[scala-2.10] 01/01: Imported Upstream version 2.10.4
Emmanuel Bourg
ebourg-guest at moszumanska.debian.org
Tue Nov 15 12:44:42 UTC 2016
This is an automated email from the git hooks/post-receive script.
ebourg-guest pushed a commit to annotated tag upstream/2.10.4
in repository scala-2.10.
commit 1b970a4a30a2bae3aff4a7ff7e8b5e0d271191b7
Author: Lucas Satabin <lucas.satabin at gnieh.org>
Date: Wed Apr 9 20:50:55 2014 +0200
Imported Upstream version 2.10.4
---
.gitattributes | 26 +
.gitignore | 49 +
.mailmap | 25 +
.project | 29 -
CONTRIBUTING.md | 66 +
META-INF/MANIFEST.MF | 8 +-
README | 202 -
README.rst | 207 +
bincompat-backward.whitelist.conf | 105 +
bincompat-forward.whitelist.conf | 178 +
build.examples.xml | 283 -
build.number | 4 +-
build.number.maven | 3 +
build.xml | 3590 +++--
classpath.SAMPLE | 13 -
docs/LICENSE | 86 +-
docs/README | 58 +-
docs/development/scala.dbc/SQLTypes.dot | 48 -
docs/examples/actors/auction.scala | 131 -
docs/examples/actors/boundedbuffer.scala | 37 -
docs/examples/actors/channels.scala | 32 -
docs/examples/actors/fringe.scala | 82 -
docs/examples/actors/links.scala | 47 -
docs/examples/actors/looping.scala | 26 -
docs/examples/actors/message.scala | 40 -
docs/examples/actors/pingpong.scala | 61 -
docs/examples/actors/producers.scala | 114 -
docs/examples/actors/seq.scala | 15 -
docs/examples/boundedbuffer.scala | 46 -
docs/examples/computeserver.scala | 53 -
.../examples/expressions/expressions-current.scala | 68 -
docs/examples/fors.scala | 112 -
docs/examples/futures.scala | 17 -
docs/examples/gadts.scala | 22 -
docs/examples/iterators.scala | 28 -
docs/examples/jolib/Ref.scala | 55 -
docs/examples/jolib/parallelOr.scala | 58 -
docs/examples/maps.scala | 187 -
docs/examples/monads/callccInterpreter.scala | 86 -
docs/examples/monads/directInterpreter.scala | 55 -
docs/examples/monads/errorInterpreter.scala | 86 -
docs/examples/monads/simpleInterpreter.scala | 75 -
docs/examples/monads/stateInterpreter.scala | 86 -
docs/examples/oneplacebuffer.scala | 60 -
docs/examples/parsing/ArithmeticParser.scala | 57 -
docs/examples/parsing/ArithmeticParsers.scala | 70 -
docs/examples/parsing/JSON.scala | 44 -
docs/examples/parsing/ListParser.scala | 33 -
docs/examples/parsing/ListParsers.scala | 30 -
docs/examples/parsing/MiniML.scala | 52 -
docs/examples/parsing/lambda/Main.scala | 34 -
docs/examples/parsing/lambda/TestParser.scala | 68 -
docs/examples/parsing/lambda/TestSyntax.scala | 86 -
docs/examples/parsing/lambda/test/test-01.kwi | 1 -
docs/examples/parsing/lambda/test/test-02.kwi | 1 -
docs/examples/parsing/lambda/test/test-03.kwi | 1 -
docs/examples/parsing/lambda/test/test-04.kwi | 1 -
docs/examples/parsing/lambda/test/test-05.kwi | 1 -
docs/examples/parsing/lambda/test/test-06.kwi | 1 -
docs/examples/parsing/lambda/test/test-07.kwi | 1 -
docs/examples/parsing/lambda/test/test-08.kwi | 1 -
docs/examples/patterns.scala | 36 -
docs/examples/pilib/elasticBuffer.scala | 77 -
docs/examples/pilib/handover.scala | 186 -
docs/examples/pilib/mobilePhoneProtocol.scala | 172 -
docs/examples/pilib/piNat.scala | 89 -
docs/examples/pilib/rwlock.scala | 329 -
docs/examples/pilib/scheduler.scala | 150 -
docs/examples/pilib/semaphore.scala | 72 -
docs/examples/pilib/twoPlaceBuffer.scala | 67 -
docs/examples/plugintemplate/.classpath | 11 -
docs/examples/plugintemplate/.project | 18 -
docs/examples/plugintemplate/build.xml | 265 -
docs/examples/plugintemplate/doc/README | 68 -
.../plugintemplate/examples/BasicExample.scala | 8 -
.../plugintemplate/lib/scalatest.jar.desired.sha1 | 1 -
.../examples/plugintemplate/misc/scalac-plugin.xml | 4 -
docs/examples/plugintemplate/plugin.properties | 10 -
.../src/plugintemplate/PluginProperties.scala | 61 -
.../plugintemplate/TemplateAnnotationChecker.scala | 20 -
.../src/plugintemplate/TemplateComponent.scala | 35 -
.../TemplateInfoTransformComponent.scala | 79 -
.../src/plugintemplate/TemplatePlugin.scala | 52 -
.../TemplateTransformComponent.scala | 58 -
.../plugintemplate/TemplateTraverseComponent.scala | 32 -
.../src/plugintemplate/standalone/Main.scala | 44 -
.../plugintemplate/standalone/PluginRunner.scala | 36 -
.../plugintemplate/PluginPropertiesSuite.scala | 14 -
.../test/plugintemplate/TemplatePluginSuite.scala | 22 -
docs/examples/sort.scala | 48 -
docs/examples/sort1.scala | 22 -
docs/examples/sort2.scala | 25 -
docs/examples/tcpoly/collection/HOSeq.scala | 167 -
docs/examples/tcpoly/monads/Monads.scala | 69 -
docs/examples/typeinf.scala | 253 -
docs/examples/xml/phonebook/embeddedBook.scala | 26 -
docs/examples/xml/phonebook/phonebook.scala | 38 -
docs/examples/xml/phonebook/phonebook1.scala | 21 -
docs/examples/xml/phonebook/phonebook2.scala | 25 -
docs/examples/xml/phonebook/phonebook3.scala | 81 -
docs/examples/xml/phonebook/verboseBook.scala | 24 -
docs/licenses/apache_android.txt | 16 -
docs/licenses/apache_ant.txt | 16 -
docs/licenses/apache_jansi.txt | 203 +
docs/licenses/bsd_asm.txt | 31 +
docs/licenses/bsd_jline.txt | 2 +-
docs/svn-to-sha1-map.txt | 14907 +++++++++++++++++++
docs/svn-to-sha1-missing.txt | 140 +
gitconfig.SAMPLE | 8 +
gitignore.SAMPLE | 25 -
lib/fjbg.jar | Bin 130721 -> 0 bytes
lib/fjbg.jar.desired.sha1 | 2 +-
lib/forkjoin.jar | Bin 47215 -> 0 bytes
lib/forkjoin.jar.desired.sha1 | 2 +-
lib/jline.jar | Bin 158705 -> 0 bytes
lib/jline.jar.desired.sha1 | 2 +-
lib/midpapi10.jar.desired.sha1 | 1 -
lib/msil.jar | Bin 299354 -> 0 bytes
lib/msil.jar.desired.sha1 | 2 +-
lib/scala-compiler-src.jar.desired.sha1 | 1 +
lib/scala-compiler.jar | Bin 11422560 -> 0 bytes
lib/scala-compiler.jar.desired.sha1 | 2 +-
lib/scala-library-src.jar | Bin 1306388 -> 0 bytes
lib/scala-library-src.jar.desired.sha1 | 2 +-
lib/scala-library.jar | Bin 9925007 -> 0 bytes
lib/scala-library.jar.desired.sha1 | 2 +-
lib/scala-reflect-src.jar.desired.sha1 | 1 +
lib/scala-reflect.jar.desired.sha1 | 1 +
project/Build.scala | 336 +
project/Layers.scala | 120 +
project/Packaging.scala | 129 +
project/Partest.scala | 141 +
project/Release.scala | 30 +
project/RemoteDependencies.scala | 53 +
project/Sametest.scala | 63 +
project/ScalaBuildKeys.scala | 23 +
project/ScalaToolRunner.scala | 21 +
project/ShaResolve.scala | 147 +
project/Testing.scala | 41 +
project/VerifyClassLoad.scala | 46 +
project/Versions.scala | 142 +
project/build.properties | 11 -
project/build/AdditionalResources.scala | 81 -
project/build/BasicLayer.scala | 296 -
project/build/BuildInfoEnvironment.scala | 21 -
project/build/Comparator.scala | 72 -
project/build/Compilation.scala | 104 -
project/build/CompilationStep.scala | 39 -
project/build/ForkSBT.scala | 49 -
project/build/Packer.scala | 122 -
project/build/Partest.scala | 370 -
project/build/PathConfig.scala | 43 -
project/build/SVN.scala | 36 -
project/build/ScalaBuildProject.scala | 36 -
project/build/ScalaSBTBuilder.scala | 362 -
project/build/ScalaTools.scala | 179 -
project/build/Scaladoc.scala | 48 -
project/plugins.sbt | 9 +
project/plugins/Plugins.scala | 6 -
project/project/Build.scala | 7 +
pull-binary-libs.sh | 12 +
push-binary-libs.sh | 0
src/actors/scala/actors/AbstractActor.scala | 5 +-
src/actors/scala/actors/Actor.scala | 726 +-
src/actors/scala/actors/ActorCanReply.scala | 4 +-
src/actors/scala/actors/ActorProxy.scala | 7 +-
src/actors/scala/actors/ActorRef.scala | 52 +
src/actors/scala/actors/ActorTask.scala | 23 +-
src/actors/scala/actors/CanReply.scala | 30 +-
src/actors/scala/actors/Channel.scala | 15 +-
src/actors/scala/actors/Combinators.scala | 6 +-
src/actors/scala/actors/DaemonActor.scala | 7 +-
src/actors/scala/actors/Debug.scala | 5 +-
src/actors/scala/actors/Future.scala | 16 +-
src/actors/scala/actors/IScheduler.scala | 29 +-
src/actors/scala/actors/InputChannel.scala | 2 +-
src/actors/scala/actors/InternalActor.scala | 544 +
src/actors/scala/actors/InternalReplyReactor.scala | 161 +
src/actors/scala/actors/KillActorControl.scala | 16 +
src/actors/scala/actors/MQueue.scala | 250 +
src/actors/scala/actors/MessageQueue.scala | 262 -
src/actors/scala/actors/OutputChannel.scala | 13 +-
src/actors/scala/actors/ReactChannel.scala | 32 +-
src/actors/scala/actors/Reaction.scala | 33 -
src/actors/scala/actors/Reactor.scala | 18 +-
src/actors/scala/actors/ReactorCanReply.scala | 4 +-
src/actors/scala/actors/ReactorTask.scala | 2 +-
src/actors/scala/actors/ReplyReactor.scala | 164 +-
src/actors/scala/actors/ReplyReactorTask.scala | 17 +-
src/actors/scala/actors/Scheduler.scala | 23 +-
src/actors/scala/actors/SchedulerAdapter.scala | 2 +-
src/actors/scala/actors/UncaughtException.scala | 4 +-
src/actors/scala/actors/package.scala | 27 +-
.../scala/actors/remote/FreshNameCreator.scala | 2 +-
.../scala/actors/remote/JavaSerializer.scala | 2 +-
src/actors/scala/actors/remote/NetKernel.scala | 12 +-
src/actors/scala/actors/remote/Proxy.scala | 18 +-
src/actors/scala/actors/remote/RemoteActor.scala | 20 +-
src/actors/scala/actors/remote/Serializer.scala | 2 +-
src/actors/scala/actors/remote/Service.scala | 2 +-
src/actors/scala/actors/remote/TcpService.scala | 41 +-
src/actors/scala/actors/scheduler/ActorGC.scala | 15 +-
.../scala/actors/scheduler/DaemonScheduler.scala | 2 +-
.../actors/scheduler/DelegatingScheduler.scala | 2 +-
.../actors/scheduler/DrainableForkJoinPool.scala | 4 +-
.../scala/actors/scheduler/ExecutorScheduler.scala | 2 +-
.../scala/actors/scheduler/ForkJoinScheduler.scala | 9 +-
.../scala/actors/scheduler/QuitControl.scala | 6 +-
.../scheduler/ResizableThreadPoolScheduler.scala | 13 +-
.../actors/scheduler/SingleThreadedScheduler.scala | 6 +-
.../actors/scheduler/TerminationMonitor.scala | 11 +-
.../actors/scheduler/TerminationService.scala | 2 +-
.../scala/actors/scheduler/ThreadPoolConfig.scala | 9 +-
src/android-library/scala/ScalaObject.scala | 13 -
src/asm/scala/tools/asm/AnnotationVisitor.java | 157 +
src/asm/scala/tools/asm/AnnotationWriter.java | 322 +
src/asm/scala/tools/asm/Attribute.java | 254 +
src/asm/scala/tools/asm/ByteVector.java | 293 +
src/asm/scala/tools/asm/ClassReader.java | 2216 +++
src/asm/scala/tools/asm/ClassVisitor.java | 277 +
src/asm/scala/tools/asm/ClassWriter.java | 1672 +++
src/asm/scala/tools/asm/CustomAttr.java | 20 +
src/asm/scala/tools/asm/Edge.java | 75 +
src/asm/scala/tools/asm/FieldVisitor.java | 115 +
src/asm/scala/tools/asm/FieldWriter.java | 271 +
src/asm/scala/tools/asm/Frame.java | 1435 ++
src/asm/scala/tools/asm/Handle.java | 159 +
src/asm/scala/tools/asm/Handler.java | 118 +
src/asm/scala/tools/asm/Item.java | 297 +
src/asm/scala/tools/asm/Label.java | 555 +
src/asm/scala/tools/asm/MethodVisitor.java | 588 +
src/asm/scala/tools/asm/MethodWriter.java | 2671 ++++
src/asm/scala/tools/asm/Opcodes.java | 358 +
src/asm/scala/tools/asm/Type.java | 865 ++
.../scala/tools/asm/signature/SignatureReader.java | 229 +
.../tools/asm/signature/SignatureVisitor.java | 228 +
.../scala/tools/asm/signature/SignatureWriter.java | 227 +
src/asm/scala/tools/asm/tree/AbstractInsnNode.java | 238 +
src/asm/scala/tools/asm/tree/AnnotationNode.java | 224 +
src/asm/scala/tools/asm/tree/ClassNode.java | 371 +
src/asm/scala/tools/asm/tree/FieldInsnNode.java | 106 +
src/asm/scala/tools/asm/tree/FieldNode.java | 243 +
src/asm/scala/tools/asm/tree/FrameNode.java | 211 +
src/asm/scala/tools/asm/tree/IincInsnNode.java | 80 +
src/asm/scala/tools/asm/tree/InnerClassNode.java | 101 +
src/asm/scala/tools/asm/tree/InsnList.java | 578 +
src/asm/scala/tools/asm/tree/InsnNode.java | 84 +
src/asm/scala/tools/asm/tree/IntInsnNode.java | 84 +
.../tools/asm/tree/InvokeDynamicInsnNode.java | 100 +
src/asm/scala/tools/asm/tree/JumpInsnNode.java | 92 +
src/asm/scala/tools/asm/tree/LabelNode.java | 78 +
src/asm/scala/tools/asm/tree/LdcInsnNode.java | 77 +
src/asm/scala/tools/asm/tree/LineNumberNode.java | 82 +
.../scala/tools/asm/tree/LocalVariableNode.java | 115 +
.../scala/tools/asm/tree/LookupSwitchInsnNode.java | 116 +
src/asm/scala/tools/asm/tree/MethodInsnNode.java | 107 +
src/asm/scala/tools/asm/tree/MethodNode.java | 645 +
.../tools/asm/tree/MultiANewArrayInsnNode.java | 81 +
.../scala/tools/asm/tree/TableSwitchInsnNode.java | 115 +
.../scala/tools/asm/tree/TryCatchBlockNode.java | 94 +
src/asm/scala/tools/asm/tree/TypeInsnNode.java | 87 +
src/asm/scala/tools/asm/tree/VarInsnNode.java | 90 +
.../scala/tools/asm/tree/analysis/Analyzer.java | 549 +
.../tools/asm/tree/analysis/AnalyzerException.java | 64 +
.../tools/asm/tree/analysis/BasicInterpreter.java | 365 +
.../scala/tools/asm/tree/analysis/BasicValue.java | 108 +
.../tools/asm/tree/analysis/BasicVerifier.java | 459 +
src/asm/scala/tools/asm/tree/analysis/Frame.java | 709 +
.../scala/tools/asm/tree/analysis/Interpreter.java | 204 +
.../tools/asm/tree/analysis/SimpleVerifier.java | 329 +
.../scala/tools/asm/tree/analysis/SmallSet.java | 134 +
.../tools/asm/tree/analysis/SourceInterpreter.java | 206 +
.../scala/tools/asm/tree/analysis/SourceValue.java | 97 +
.../scala/tools/asm/tree/analysis/Subroutine.java | 93 +
src/asm/scala/tools/asm/tree/analysis/Value.java | 45 +
src/asm/scala/tools/asm/util/ASMifiable.java | 53 +
src/asm/scala/tools/asm/util/ASMifier.java | 1238 ++
.../tools/asm/util/CheckAnnotationAdapter.java | 142 +
.../scala/tools/asm/util/CheckClassAdapter.java | 603 +
.../scala/tools/asm/util/CheckFieldAdapter.java | 97 +
.../scala/tools/asm/util/CheckMethodAdapter.java | 1668 +++
.../tools/asm/util/CheckSignatureAdapter.java | 329 +
src/asm/scala/tools/asm/util/Printer.java | 558 +
src/asm/scala/tools/asm/util/SignatureChecker.java | 47 +
src/asm/scala/tools/asm/util/Textifiable.java | 54 +
src/asm/scala/tools/asm/util/Textifier.java | 1286 ++
.../tools/asm/util/TraceAnnotationVisitor.java | 96 +
.../scala/tools/asm/util/TraceClassVisitor.java | 232 +
.../scala/tools/asm/util/TraceFieldVisitor.java | 78 +
.../scala/tools/asm/util/TraceMethodVisitor.java | 264 +
.../tools/asm/util/TraceSignatureVisitor.java | 318 +
src/attic/README | 2 -
src/attic/scala/tools/nsc/models/Models.scala | 419 -
.../scala/tools/nsc/models/SemanticTokens.scala | 702 -
src/attic/scala/tools/nsc/models/Signatures.scala | 85 -
.../scala/tools/nsc/symtab/SymbolWalker.scala | 253 -
src/build/InnerObjectTestGen.scala | 308 +
src/build/bnd/continuations.bnd | 5 +
src/build/bnd/scala-actors.bnd | 5 +
src/build/bnd/scala-compiler.bnd | 8 +
src/build/bnd/scala-library.bnd | 6 +
src/build/bnd/scala-reflect.bnd | 6 +
src/build/bnd/scala-swing.bnd | 5 +
src/build/genprod.scala | 284 +-
src/build/maven/continuations-plugin-pom.xml | 9 +-
src/build/maven/jline-pom.xml | 9 +-
src/build/maven/maven-deploy.xml | 55 +-
src/build/maven/scala-actors-pom.xml | 64 +
src/build/maven/scala-compiler-pom.xml | 14 +-
src/build/maven/scala-dbc-pom.xml | 62 -
src/build/maven/scala-dotnet-library-pom.xml | 9 +-
src/build/maven/scala-library-pom.xml | 41 +-
src/build/maven/scala-partest-pom.xml | 9 +-
src/build/maven/scala-reflect-pom.xml | 64 +
src/build/maven/scala-swing-pom.xml | 12 +-
src/build/maven/scalap-pom.xml | 9 +-
src/build/pack.xml | 221 +-
src/compiler/rootdoc.txt | 6 +
.../macros/runtime/AbortMacroException.scala | 7 +
.../scala/reflect/macros/runtime/Aliases.scala | 36 +
.../scala/reflect/macros/runtime/Context.scala | 28 +
.../scala/reflect/macros/runtime/Enclosures.scala | 24 +
.../scala/reflect/macros/runtime/Evals.scala | 18 +
.../scala/reflect/macros/runtime/ExprUtils.scala | 35 +
.../scala/reflect/macros/runtime/FrontEnds.scala | 20 +
.../reflect/macros/runtime/Infrastructure.scala | 16 +
.../scala/reflect/macros/runtime/Names.scala | 17 +
.../scala/reflect/macros/runtime/Parsers.scala | 24 +
.../scala/reflect/macros/runtime/Reifiers.scala | 77 +
.../scala/reflect/macros/runtime/Traces.scala | 8 +
.../scala/reflect/macros/runtime/Typers.scala | 50 +
.../scala/reflect/macros/util/Traces.scala | 11 +
src/compiler/scala/reflect/reify/Errors.scala | 84 +
src/compiler/scala/reflect/reify/Phases.scala | 44 +
src/compiler/scala/reflect/reify/Reifier.scala | 143 +
src/compiler/scala/reflect/reify/States.scala | 67 +
src/compiler/scala/reflect/reify/Taggers.scala | 102 +
.../reflect/reify/codegen/GenAnnotationInfos.scala | 55 +
.../scala/reflect/reify/codegen/GenNames.scala | 14 +
.../scala/reflect/reify/codegen/GenPositions.scala | 17 +
.../scala/reflect/reify/codegen/GenSymbols.scala | 180 +
.../scala/reflect/reify/codegen/GenTrees.scala | 239 +
.../scala/reflect/reify/codegen/GenTypes.scala | 197 +
.../scala/reflect/reify/codegen/GenUtils.scala | 148 +
src/compiler/scala/reflect/reify/package.scala | 93 +
.../scala/reflect/reify/phases/Calculate.scala | 61 +
.../scala/reflect/reify/phases/Metalevels.scala | 150 +
.../scala/reflect/reify/phases/Reify.scala | 62 +
.../scala/reflect/reify/phases/Reshape.scala | 371 +
.../scala/reflect/reify/utils/Extractors.scala | 256 +
.../scala/reflect/reify/utils/NodePrinters.scala | 110 +
.../scala/reflect/reify/utils/StdAttachments.scala | 18 +
.../scala/reflect/reify/utils/SymbolTables.scala | 217 +
src/compiler/scala/reflect/reify/utils/Utils.scala | 21 +
src/compiler/scala/tools/ant/ClassloadVerify.scala | 53 +
src/compiler/scala/tools/ant/FastScalac.scala | 225 +-
src/compiler/scala/tools/ant/Pack200Task.scala | 31 +-
src/compiler/scala/tools/ant/Same.scala | 29 +-
src/compiler/scala/tools/ant/ScalaBazaar.scala | 318 -
.../scala/tools/ant/ScalaMatchingTask.scala | 2 +-
src/compiler/scala/tools/ant/ScalaTool.scala | 73 +-
src/compiler/scala/tools/ant/Scalac.scala | 344 +-
src/compiler/scala/tools/ant/ScalacShared.scala | 2 +-
src/compiler/scala/tools/ant/Scaladoc.scala | 259 +-
src/compiler/scala/tools/ant/antlib.xml | 8 +-
src/compiler/scala/tools/ant/sabbus/Break.scala | 2 +-
.../tools/ant/sabbus/CompilationFailure.scala | 2 +-
src/compiler/scala/tools/ant/sabbus/Compiler.scala | 2 +-
.../scala/tools/ant/sabbus/Compilers.scala | 21 +-
.../scala/tools/ant/sabbus/ForeignCompiler.scala | 2 +-
src/compiler/scala/tools/ant/sabbus/Make.scala | 4 +-
.../scala/tools/ant/sabbus/ScalacFork.scala | 51 +-
src/compiler/scala/tools/ant/sabbus/Settings.scala | 15 +-
src/compiler/scala/tools/ant/sabbus/TaskArgs.scala | 21 +-
src/compiler/scala/tools/ant/sabbus/Use.scala | 2 +-
.../scala/tools/ant/templates/tool-unix.tmpl | 144 +-
.../scala/tools/ant/templates/tool-windows.tmpl | 226 +-
src/compiler/scala/tools/cmd/CommandLine.scala | 2 +-
src/compiler/scala/tools/cmd/Demo.scala | 2 +-
src/compiler/scala/tools/cmd/FromString.scala | 23 +-
src/compiler/scala/tools/cmd/Instance.scala | 2 +-
src/compiler/scala/tools/cmd/Interpolation.scala | 2 +-
src/compiler/scala/tools/cmd/Meta.scala | 2 +-
src/compiler/scala/tools/cmd/Opt.scala | 2 +-
src/compiler/scala/tools/cmd/Parser.scala | 2 +-
src/compiler/scala/tools/cmd/Property.scala | 4 +-
src/compiler/scala/tools/cmd/Reference.scala | 4 +-
src/compiler/scala/tools/cmd/Spec.scala | 2 +-
src/compiler/scala/tools/cmd/gen/AnyVals.scala | 315 +-
src/compiler/scala/tools/cmd/gen/Codegen.scala | 4 +-
src/compiler/scala/tools/cmd/gen/CodegenSpec.scala | 2 +-
src/compiler/scala/tools/cmd/package.scala | 6 +-
src/compiler/scala/tools/cmd/program/Scmp.scala | 60 -
src/compiler/scala/tools/cmd/program/Simple.scala | 81 -
src/compiler/scala/tools/cmd/program/Tokens.scala | 106 -
.../scala/tools/nsc/CompilationUnits.scala | 75 +-
src/compiler/scala/tools/nsc/CompileClient.scala | 2 +-
src/compiler/scala/tools/nsc/CompileServer.scala | 62 +-
src/compiler/scala/tools/nsc/CompileSocket.scala | 10 +-
src/compiler/scala/tools/nsc/CompilerCommand.scala | 13 +-
src/compiler/scala/tools/nsc/CompilerRun.scala | 2 +-
src/compiler/scala/tools/nsc/ConsoleWriter.scala | 2 +-
src/compiler/scala/tools/nsc/Driver.scala | 71 +
src/compiler/scala/tools/nsc/EvalLoop.scala | 4 +-
src/compiler/scala/tools/nsc/FatalError.scala | 19 -
.../scala/tools/nsc/GenericRunnerCommand.scala | 7 +-
.../scala/tools/nsc/GenericRunnerSettings.scala | 4 +-
src/compiler/scala/tools/nsc/Global.scala | 1120 +-
.../scala/tools/nsc/InterpreterCommand.scala | 8 -
src/compiler/scala/tools/nsc/Main.scala | 82 +-
src/compiler/scala/tools/nsc/MainBench.scala | 48 +
.../scala/tools/nsc/MainGenericRunner.scala | 34 +-
src/compiler/scala/tools/nsc/MainInterpreter.scala | 13 -
src/compiler/scala/tools/nsc/MainTokenMetric.scala | 2 +-
.../scala/tools/nsc/NewLinePrintWriter.scala | 2 +-
src/compiler/scala/tools/nsc/NoPhase.scala | 11 -
src/compiler/scala/tools/nsc/ObjectRunner.scala | 18 +-
.../scala/tools/nsc/OfflineCompilerCommand.scala | 4 +-
src/compiler/scala/tools/nsc/Phase.scala | 85 -
src/compiler/scala/tools/nsc/PhaseAssembly.scala | 5 +-
src/compiler/scala/tools/nsc/Phases.scala | 46 +
src/compiler/scala/tools/nsc/Properties.scala | 22 +-
src/compiler/scala/tools/nsc/ScalaDoc.scala | 15 +-
src/compiler/scala/tools/nsc/ScriptRunner.scala | 6 +-
src/compiler/scala/tools/nsc/Settings.scala | 8 +-
src/compiler/scala/tools/nsc/SubComponent.scala | 5 +-
src/compiler/scala/tools/nsc/ast/DocComments.scala | 245 +-
.../scala/tools/nsc/ast/NodePrinters.scala | 566 +-
src/compiler/scala/tools/nsc/ast/Positions.scala | 37 +
src/compiler/scala/tools/nsc/ast/Printers.scala | 296 +
.../scala/tools/nsc/ast/TreeBrowsers.scala | 179 +-
src/compiler/scala/tools/nsc/ast/TreeDSL.scala | 72 +-
src/compiler/scala/tools/nsc/ast/TreeGen.scala | 508 +-
src/compiler/scala/tools/nsc/ast/TreeInfo.scala | 384 +-
.../scala/tools/nsc/ast/TreePrinters.scala | 708 -
src/compiler/scala/tools/nsc/ast/Trees.scala | 1333 +-
.../scala/tools/nsc/ast/parser/BracePair.scala | 2 +-
.../scala/tools/nsc/ast/parser/BracePatch.scala | 2 +-
.../scala/tools/nsc/ast/parser/Change.scala | 2 +-
.../scala/tools/nsc/ast/parser/MarkupParsers.scala | 35 +-
.../scala/tools/nsc/ast/parser/Parsers.scala | 1296 +-
.../scala/tools/nsc/ast/parser/Patch.scala | 2 +-
.../scala/tools/nsc/ast/parser/Scanners.scala | 558 +-
.../tools/nsc/ast/parser/SymbolicXMLBuilder.scala | 54 +-
.../tools/nsc/ast/parser/SyntaxAnalyzer.scala | 4 +-
.../scala/tools/nsc/ast/parser/Tokens.scala | 40 +-
.../scala/tools/nsc/ast/parser/TreeBuilder.scala | 187 +-
.../scala/tools/nsc/backend/JavaPlatform.scala | 56 +-
.../scala/tools/nsc/backend/MSILPlatform.scala | 45 +-
.../scala/tools/nsc/backend/Platform.scala | 32 +-
.../scala/tools/nsc/backend/ScalaPrimitives.scala | 42 +-
.../tools/nsc/backend/WorklistAlgorithm.scala | 2 +-
.../tools/nsc/backend/icode/BasicBlocks.scala | 260 +-
.../tools/nsc/backend/icode/CheckerException.scala | 2 +-
.../nsc/backend/icode/ExceptionHandlers.scala | 20 +-
.../scala/tools/nsc/backend/icode/GenICode.scala | 1147 +-
.../tools/nsc/backend/icode/ICodeCheckers.scala | 24 +-
.../scala/tools/nsc/backend/icode/ICodes.scala | 52 +-
.../tools/nsc/backend/icode/Linearizers.scala | 27 +-
.../scala/tools/nsc/backend/icode/Members.scala | 112 +-
.../scala/tools/nsc/backend/icode/Opcodes.scala | 399 +-
.../scala/tools/nsc/backend/icode/Primitives.scala | 74 +-
.../scala/tools/nsc/backend/icode/Printers.scala | 10 +-
.../scala/tools/nsc/backend/icode/Repository.scala | 16 +-
.../scala/tools/nsc/backend/icode/TypeKinds.scala | 83 +-
.../scala/tools/nsc/backend/icode/TypeStacks.scala | 4 +-
.../backend/icode/analysis/CopyPropagation.scala | 74 +-
.../backend/icode/analysis/DataFlowAnalysis.scala | 21 +-
.../nsc/backend/icode/analysis/Liveness.scala | 55 +-
.../nsc/backend/icode/analysis/LubException.scala | 2 +-
.../nsc/backend/icode/analysis/ProgramPoint.scala | 2 +-
.../icode/analysis/ReachingDefinitions.scala | 129 +-
.../nsc/backend/icode/analysis/SemiLattice.scala | 8 +-
.../backend/icode/analysis/TypeFlowAnalysis.scala | 823 +-
.../tools/nsc/backend/jvm/BytecodeWriters.scala | 87 +-
.../scala/tools/nsc/backend/jvm/GenASM.scala | 3355 +++++
.../scala/tools/nsc/backend/jvm/GenAndroid.scala | 22 +-
.../scala/tools/nsc/backend/jvm/GenJVM.scala | 1666 +--
.../scala/tools/nsc/backend/jvm/GenJVMASM.scala | 99 +
.../scala/tools/nsc/backend/jvm/GenJVMUtil.scala | 38 +-
.../scala/tools/nsc/backend/msil/GenMSIL.scala | 144 +-
.../tools/nsc/backend/opt/ClosureElimination.scala | 60 +-
.../nsc/backend/opt/DeadCodeElimination.scala | 278 +-
.../nsc/backend/opt/InlineExceptionHandlers.scala | 389 +
.../scala/tools/nsc/backend/opt/Inliners.scala | 1028 +-
.../scala/tools/nsc/dependencies/Changes.scala | 6 +-
.../nsc/dependencies/DependencyAnalysis.scala | 28 +-
src/compiler/scala/tools/nsc/doc/DocFactory.scala | 48 +-
src/compiler/scala/tools/nsc/doc/DocParser.scala | 4 +-
src/compiler/scala/tools/nsc/doc/Index.scala | 2 +-
src/compiler/scala/tools/nsc/doc/Settings.scala | 275 +-
.../scala/tools/nsc/doc/Uncompilable.scala | 9 +-
src/compiler/scala/tools/nsc/doc/Universe.scala | 2 +-
.../tools/nsc/doc/base/CommentFactoryBase.scala | 955 ++
src/compiler/scala/tools/nsc/doc/base/LinkTo.scala | 15 +
.../tools/nsc/doc/base/MemberLookupBase.scala | 206 +
.../scala/tools/nsc/doc/base/comment/Body.scala | 95 +
.../scala/tools/nsc/doc/base/comment/Comment.scala | 134 +
src/compiler/scala/tools/nsc/doc/html/Doclet.scala | 2 +-
.../scala/tools/nsc/doc/html/HtmlFactory.scala | 34 +-
.../scala/tools/nsc/doc/html/HtmlPage.scala | 110 +-
src/compiler/scala/tools/nsc/doc/html/Page.scala | 58 +-
.../scala/tools/nsc/doc/html/SyntaxHigh.scala | 34 +-
.../scala/tools/nsc/doc/html/page/Index.scala | 55 +-
.../tools/nsc/doc/html/page/IndexScript.scala | 18 +-
.../tools/nsc/doc/html/page/ReferenceIndex.scala | 6 +-
.../scala/tools/nsc/doc/html/page/Source.scala | 5 +-
.../scala/tools/nsc/doc/html/page/Template.scala | 635 +-
.../doc/html/page/diagram/DiagramGenerator.scala | 53 +
.../nsc/doc/html/page/diagram/DiagramStats.scala | 66 +
.../html/page/diagram/DotDiagramGenerator.scala | 511 +
.../nsc/doc/html/page/diagram/DotRunner.scala | 228 +
.../nsc/doc/html/resource/lib/class_diagram.png | Bin 0 -> 3910 bytes
.../nsc/doc/html/resource/lib/conversionbg.gif | Bin 0 -> 167 bytes
.../tools/nsc/doc/html/resource/lib/diagrams.css | 143 +
.../tools/nsc/doc/html/resource/lib/diagrams.js | 324 +
.../tools/nsc/doc/html/resource/lib/index.css | 304 +-
.../scala/tools/nsc/doc/html/resource/lib/index.js | 134 +-
.../tools/nsc/doc/html/resource/lib/jquery-ui.js | 407 +-
.../tools/nsc/doc/html/resource/lib/jquery.js | 156 +-
.../nsc/doc/html/resource/lib/jquery.layout.js | 5480 ++++++-
.../nsc/doc/html/resource/lib/modernizr.custom.js | 4 +
.../nsc/doc/html/resource/lib/object_diagram.png | Bin 0 -> 3903 bytes
.../doc/html/resource/lib/object_to_type_big.png | Bin 0 -> 9158 bytes
.../tools/nsc/doc/html/resource/lib/raphael-min.js | 10 +
.../tools/nsc/doc/html/resource/lib/ref-index.css | 0
.../tools/nsc/doc/html/resource/lib/rootdoc.txt | 27 -
.../doc/html/resource/lib/selected-implicits.png | Bin 0 -> 1150 bytes
.../html/resource/lib/selected-right-implicits.png | Bin 0 -> 646 bytes
.../tools/nsc/doc/html/resource/lib/template.css | 180 +-
.../tools/nsc/doc/html/resource/lib/template.js | 310 +-
.../nsc/doc/html/resource/lib/trait_diagram.png | Bin 0 -> 3882 bytes
.../scala/tools/nsc/doc/html/resource/lib/type.png | Bin 0 -> 1445 bytes
.../tools/nsc/doc/html/resource/lib/type_big.png | Bin 0 -> 4236 bytes
.../nsc/doc/html/resource/lib/type_diagram.png | Bin 0 -> 1841 bytes
.../doc/html/resource/lib/type_to_object_big.png | Bin 0 -> 4969 bytes
.../scala/tools/nsc/doc/model/CommentFactory.scala | 114 +
.../scala/tools/nsc/doc/model/Entity.scala | 314 +-
.../tools/nsc/doc/model/IndexModelFactory.scala | 13 +-
.../scala/tools/nsc/doc/model/MemberLookup.scala | 63 +
.../scala/tools/nsc/doc/model/ModelFactory.scala | 1130 +-
.../doc/model/ModelFactoryImplicitSupport.scala | 609 +
.../nsc/doc/model/ModelFactoryTypeSupport.scala | 326 +
.../scala/tools/nsc/doc/model/TreeEntity.scala | 2 +-
.../scala/tools/nsc/doc/model/TreeFactory.scala | 17 +-
.../scala/tools/nsc/doc/model/TypeEntity.scala | 6 +-
.../scala/tools/nsc/doc/model/ValueArgument.scala | 2 +-
.../scala/tools/nsc/doc/model/Visibility.scala | 2 +-
.../scala/tools/nsc/doc/model/comment/Body.scala | 88 -
.../tools/nsc/doc/model/comment/Comment.scala | 117 -
.../nsc/doc/model/comment/CommentFactory.scala | 955 --
.../tools/nsc/doc/model/diagram/Diagram.scala | 146 +
.../doc/model/diagram/DiagramDirectiveParser.scala | 261 +
.../nsc/doc/model/diagram/DiagramFactory.scala | 271 +
.../scala/tools/nsc/interactive/BuildManager.scala | 7 +-
.../tools/nsc/interactive/CompilerControl.scala | 144 +-
.../scala/tools/nsc/interactive/ContextTrees.scala | 69 +-
.../scala/tools/nsc/interactive/Global.scala | 296 +-
.../nsc/interactive/InteractiveReporter.scala | 6 +-
.../scala/tools/nsc/interactive/Picklers.scala | 25 +-
.../interactive/PresentationCompilerThread.scala | 4 +-
.../scala/tools/nsc/interactive/REPL.scala | 103 +-
.../tools/nsc/interactive/RangePositions.scala | 87 +-
.../nsc/interactive/RefinedBuildManager.scala | 43 +-
.../scala/tools/nsc/interactive/Response.scala | 2 +-
.../nsc/interactive/RichCompilationUnits.scala | 6 +-
.../tools/nsc/interactive/ScratchPadMaker.scala | 200 +
.../tools/nsc/interactive/SimpleBuildManager.scala | 6 +-
.../nsc/interactive/tests/InteractiveTest.scala | 301 +-
.../tests/InteractiveTestSettings.scala | 70 +
.../scala/tools/nsc/interactive/tests/Tester.scala | 8 +-
.../nsc/interactive/tests/core/AskCommand.scala | 122 +
.../nsc/interactive/tests/core/CoreTestDefs.scala | 133 +
.../tests/core/PresentationCompilerInstance.scala | 35 +
.../PresentationCompilerRequestsWorkingMode.scala | 62 +
.../tests/core/PresentationCompilerTestDef.scala | 19 +
.../nsc/interactive/tests/core/Reporter.scala | 15 +
.../interactive/tests/core/SourcesCollector.scala | 22 +
.../nsc/interactive/tests/core/TestMarker.scala | 29 +
.../nsc/interactive/tests/core/TestResources.scala | 12 +
.../nsc/interactive/tests/core/TestSettings.scala | 19 +
.../nsc/interpreter/AbstractFileClassLoader.scala | 70 +-
.../nsc/interpreter/AbstractOrMissingHandler.scala | 2 +-
.../scala/tools/nsc/interpreter/ByteCode.scala | 9 +-
.../scala/tools/nsc/interpreter/CodeHandlers.scala | 23 +-
.../scala/tools/nsc/interpreter/CommandLine.scala | 2 +-
.../scala/tools/nsc/interpreter/Completion.scala | 6 +-
.../tools/nsc/interpreter/CompletionAware.scala | 63 +-
.../tools/nsc/interpreter/CompletionOutput.scala | 24 +-
.../nsc/interpreter/ConsoleReaderHelper.scala | 2 +-
.../scala/tools/nsc/interpreter/Delimited.scala | 2 +-
.../scala/tools/nsc/interpreter/Dossiers.scala | 53 -
.../scala/tools/nsc/interpreter/Eval.scala | 33 -
.../scala/tools/nsc/interpreter/ExprTyper.scala | 130 +-
.../tools/nsc/interpreter/FileCompletion.scala | 56 -
.../scala/tools/nsc/interpreter/Formatting.scala | 2 +-
.../scala/tools/nsc/interpreter/ILoop.scala | 473 +-
.../scala/tools/nsc/interpreter/ILoopInit.scala | 74 +-
.../scala/tools/nsc/interpreter/IMain.scala | 674 +-
.../scala/tools/nsc/interpreter/ISettings.scala | 4 +-
.../scala/tools/nsc/interpreter/Imports.scala | 27 +-
.../tools/nsc/interpreter/InteractiveReader.scala | 3 +-
.../tools/nsc/interpreter/JLineCompletion.scala | 151 +-
.../scala/tools/nsc/interpreter/JLineReader.scala | 10 +-
.../scala/tools/nsc/interpreter/KeyBinding.scala | 39 -
.../scala/tools/nsc/interpreter/Line.scala | 108 -
.../scala/tools/nsc/interpreter/Logger.scala | 2 +-
.../scala/tools/nsc/interpreter/LoopCommands.scala | 7 +-
.../tools/nsc/interpreter/MemberHandlers.scala | 36 +-
.../scala/tools/nsc/interpreter/NamedParam.scala | 19 +-
.../scala/tools/nsc/interpreter/Naming.scala | 37 +-
.../scala/tools/nsc/interpreter/Parsed.scala | 2 +-
.../scala/tools/nsc/interpreter/Pasted.scala | 2 +-
.../scala/tools/nsc/interpreter/Phased.scala | 7 +-
.../scala/tools/nsc/interpreter/Power.scala | 310 +-
.../tools/nsc/interpreter/ProductCompletion.scala | 44 -
.../scala/tools/nsc/interpreter/ReplConfig.scala | 28 +-
.../scala/tools/nsc/interpreter/ReplGlobal.scala | 57 +
.../scala/tools/nsc/interpreter/ReplProps.scala | 19 +-
.../scala/tools/nsc/interpreter/ReplReporter.scala | 18 +-
.../scala/tools/nsc/interpreter/ReplStrings.scala | 14 +-
.../scala/tools/nsc/interpreter/ReplVals.scala | 87 +-
.../scala/tools/nsc/interpreter/Results.scala | 2 +-
.../scala/tools/nsc/interpreter/RichClass.scala | 17 +-
.../scala/tools/nsc/interpreter/Runner.scala | 11 -
.../scala/tools/nsc/interpreter/SimpleReader.scala | 3 +-
.../scala/tools/nsc/interpreter/TypeStrings.scala | 179 +-
.../tools/nsc/interpreter/XMLCompletion.scala | 44 -
.../scala/tools/nsc/interpreter/package.scala | 8 +-
.../interpreter/session/FileBackedHistory.scala | 2 +-
.../tools/nsc/interpreter/session/History.scala | 2 +-
.../nsc/interpreter/session/JLineHistory.scala | 12 +-
.../nsc/interpreter/session/SimpleHistory.scala | 2 +-
.../tools/nsc/interpreter/session/package.scala | 3 +-
src/compiler/scala/tools/nsc/io/AbstractFile.scala | 254 -
.../scala/tools/nsc/io/ClassAndJarInfo.scala | 39 -
.../scala/tools/nsc/io/DaemonThreadFactory.scala | 2 +-
src/compiler/scala/tools/nsc/io/Directory.scala | 73 -
src/compiler/scala/tools/nsc/io/File.scala | 191 -
.../tools/nsc/io/FileOperationException.scala | 13 -
src/compiler/scala/tools/nsc/io/Fileish.scala | 2 +-
src/compiler/scala/tools/nsc/io/Jar.scala | 97 +-
src/compiler/scala/tools/nsc/io/Lexer.scala | 2 +-
src/compiler/scala/tools/nsc/io/MsilFile.scala | 18 +
.../scala/tools/nsc/io/NullPrintStream.scala | 37 -
src/compiler/scala/tools/nsc/io/Path.scala | 285 -
src/compiler/scala/tools/nsc/io/Pickler.scala | 21 +-
src/compiler/scala/tools/nsc/io/PlainFile.scala | 101 -
src/compiler/scala/tools/nsc/io/Socket.scala | 2 +-
src/compiler/scala/tools/nsc/io/SourceReader.scala | 4 +-
src/compiler/scala/tools/nsc/io/Sources.scala | 73 -
src/compiler/scala/tools/nsc/io/Streamable.scala | 122 -
.../scala/tools/nsc/io/VirtualDirectory.scala | 70 -
src/compiler/scala/tools/nsc/io/VirtualFile.scala | 100 -
src/compiler/scala/tools/nsc/io/ZipArchive.scala | 215 -
src/compiler/scala/tools/nsc/io/package.scala | 40 +-
.../scala/tools/nsc/javac/JavaParsers.scala | 76 +-
.../scala/tools/nsc/javac/JavaScanners.scala | 17 +-
.../scala/tools/nsc/javac/JavaTokens.scala | 2 +-
.../scala/tools/nsc/matching/MatchSupport.scala | 20 +-
src/compiler/scala/tools/nsc/matching/Matrix.scala | 26 +-
.../scala/tools/nsc/matching/MatrixAdditions.scala | 33 +-
.../tools/nsc/matching/ParallelMatching.scala | 59 +-
.../scala/tools/nsc/matching/PatternBindings.scala | 13 +-
.../scala/tools/nsc/matching/Patterns.scala | 90 +-
src/compiler/scala/tools/nsc/package.scala | 18 +-
src/compiler/scala/tools/nsc/plugins/Plugin.scala | 38 +-
.../scala/tools/nsc/plugins/PluginComponent.scala | 2 +-
.../tools/nsc/plugins/PluginDescription.scala | 12 +-
.../tools/nsc/plugins/PluginLoadException.scala | 2 +-
src/compiler/scala/tools/nsc/plugins/Plugins.scala | 8 +-
.../tools/nsc/reporters/AbstractReporter.scala | 24 +-
.../tools/nsc/reporters/ConsoleReporter.scala | 53 +-
.../scala/tools/nsc/reporters/Reporter.scala | 46 +-
.../scala/tools/nsc/reporters/ReporterTimer.scala | 20 -
.../scala/tools/nsc/reporters/StoreReporter.scala | 6 +-
.../scala/tools/nsc/scratchpad/Mixer.scala | 102 +
.../tools/nsc/scratchpad/SourceInserter.scala | 23 +
.../tools/nsc/settings/AbsScalaSettings.scala | 4 +-
.../scala/tools/nsc/settings/AbsSettings.scala | 22 +-
.../tools/nsc/settings/AdvancedScalaSettings.scala | 3 +-
.../tools/nsc/settings/AestheticSettings.scala | 6 +-
.../scala/tools/nsc/settings/FscSettings.scala | 27 +-
.../tools/nsc/settings/ImmutableSettings.scala | 11 -
.../scala/tools/nsc/settings/MutableSettings.scala | 142 +-
.../scala/tools/nsc/settings/ScalaSettings.scala | 207 +-
.../scala/tools/nsc/settings/ScalaVersion.scala | 194 +
.../tools/nsc/settings/StandardScalaSettings.scala | 9 +-
.../scala/tools/nsc/settings/Warnings.scala | 13 +-
.../tools/nsc/symtab/AnnotationCheckers.scala | 120 -
.../scala/tools/nsc/symtab/AnnotationInfos.scala | 145 -
.../scala/tools/nsc/symtab/BaseTypeSeqs.scala | 251 -
.../scala/tools/nsc/symtab/BrowsingLoaders.scala | 26 +-
.../scala/tools/nsc/symtab/Definitions.scala | 908 --
src/compiler/scala/tools/nsc/symtab/Flags.scala | 220 -
.../scala/tools/nsc/symtab/InfoTransformers.scala | 47 -
.../scala/tools/nsc/symtab/NameManglers.scala | 171 -
src/compiler/scala/tools/nsc/symtab/Names.scala | 431 -
.../scala/tools/nsc/symtab/Positions.scala | 37 -
src/compiler/scala/tools/nsc/symtab/Scopes.scala | 350 -
src/compiler/scala/tools/nsc/symtab/StdNames.scala | 588 -
.../scala/tools/nsc/symtab/SymbolLoaders.scala | 316 +-
.../scala/tools/nsc/symtab/SymbolTable.scala | 157 +-
.../scala/tools/nsc/symtab/SymbolTrackers.scala | 6 +-
src/compiler/scala/tools/nsc/symtab/Symbols.scala | 2199 ---
.../scala/tools/nsc/symtab/TypeDebugging.scala | 95 -
src/compiler/scala/tools/nsc/symtab/Types.scala | 5940 --------
.../nsc/symtab/classfile/AbstractFileReader.scala | 2 +-
.../nsc/symtab/classfile/ClassfileConstants.scala | 333 -
.../nsc/symtab/classfile/ClassfileParser.scala | 902 +-
.../tools/nsc/symtab/classfile/ICodeReader.scala | 286 +-
.../tools/nsc/symtab/classfile/MetaParser.scala | 166 -
.../scala/tools/nsc/symtab/classfile/Pickler.scala | 84 +-
.../tools/nsc/symtab/classfile/UnPickler.scala | 103 -
.../scala/tools/nsc/symtab/classfile/package.scala | 7 +
.../scala/tools/nsc/symtab/clr/CLRTypes.scala | 22 +-
.../scala/tools/nsc/symtab/clr/TypeParser.scala | 75 +-
src/compiler/scala/tools/nsc/symtab/package.scala | 7 +
.../scala/tools/nsc/transform/AddInterfaces.scala | 393 +-
.../scala/tools/nsc/transform/CleanUp.scala | 686 +-
.../scala/tools/nsc/transform/Constructors.scala | 216 +-
.../scala/tools/nsc/transform/Erasure.scala | 1334 +-
.../scala/tools/nsc/transform/ExplicitOuter.scala | 291 +-
.../tools/nsc/transform/ExtensionMethods.scala | 274 +
.../scala/tools/nsc/transform/Flatten.scala | 95 +-
.../scala/tools/nsc/transform/InfoTransform.scala | 4 +-
.../scala/tools/nsc/transform/InlineErasure.scala | 9 +
.../scala/tools/nsc/transform/LambdaLift.scala | 375 +-
.../scala/tools/nsc/transform/LazyVals.scala | 137 +-
.../scala/tools/nsc/transform/LiftCode.scala | 148 -
src/compiler/scala/tools/nsc/transform/Mixin.scala | 1305 +-
.../tools/nsc/transform/OverridingPairs.scala | 58 +-
.../scala/tools/nsc/transform/PostErasure.scala | 71 +
.../scala/tools/nsc/transform/Reifiers.scala | 330 -
.../tools/nsc/transform/SampleTransform.scala | 20 +-
.../tools/nsc/transform/SpecializeTypes.scala | 1540 +-
.../scala/tools/nsc/transform/TailCalls.scala | 221 +-
.../scala/tools/nsc/transform/Transform.scala | 2 +-
.../tools/nsc/transform/TypingTransformers.scala | 9 +-
.../scala/tools/nsc/transform/UnCurry.scala | 1074 +-
.../scala/tools/nsc/transform/patmat/Logic.scala | 644 +
.../tools/nsc/transform/patmat/MatchAnalysis.scala | 709 +
.../tools/nsc/transform/patmat/MatchCodeGen.scala | 258 +
.../nsc/transform/patmat/MatchOptimization.scala | 615 +
.../nsc/transform/patmat/MatchTranslation.scala | 674 +
.../nsc/transform/patmat/MatchTreeMaking.scala | 614 +
.../nsc/transform/patmat/PatternMatching.scala | 256 +
.../scala/tools/nsc/transform/patmat/Solving.scala | 243 +
.../scala/tools/nsc/typechecker/Adaptations.scala | 83 +
.../scala/tools/nsc/typechecker/Analyzer.scala | 15 +-
.../tools/nsc/typechecker/AnalyzerPlugins.scala | 225 +
.../scala/tools/nsc/typechecker/Checkable.scala | 283 +
.../tools/nsc/typechecker/ConstantFolder.scala | 4 +-
.../tools/nsc/typechecker/ContextErrors.scala | 1372 ++
.../scala/tools/nsc/typechecker/Contexts.scala | 499 +-
.../scala/tools/nsc/typechecker/DeVirtualize.scala | 645 -
.../tools/nsc/typechecker/DestructureTypes.scala | 208 +
.../scala/tools/nsc/typechecker/Duplicators.scala | 228 +-
.../scala/tools/nsc/typechecker/EtaExpansion.scala | 48 +-
.../scala/tools/nsc/typechecker/Implicits.scala | 1191 +-
.../scala/tools/nsc/typechecker/Infer.scala | 1264 +-
.../scala/tools/nsc/typechecker/Macros.scala | 948 ++
.../tools/nsc/typechecker/MethodSynthesis.scala | 594 +
.../scala/tools/nsc/typechecker/Modes.scala | 11 +-
.../scala/tools/nsc/typechecker/Namers.scala | 2302 +--
.../tools/nsc/typechecker/NamesDefaults.scala | 471 +-
.../scala/tools/nsc/typechecker/RefChecks.scala | 1094 +-
.../tools/nsc/typechecker/StdAttachments.scala | 10 +
.../tools/nsc/typechecker/SuperAccessors.scala | 517 +-
.../tools/nsc/typechecker/SyntheticMethods.scala | 561 +-
.../scala/tools/nsc/typechecker/Tags.scala | 72 +
.../scala/tools/nsc/typechecker/TreeCheckers.scala | 109 +-
.../tools/nsc/typechecker/TypeDiagnostics.scala | 333 +-
.../scala/tools/nsc/typechecker/Typers.scala | 5593 ++++---
.../scala/tools/nsc/typechecker/Unapplies.scala | 194 +-
.../scala/tools/nsc/typechecker/Variances.scala | 20 +-
src/compiler/scala/tools/nsc/util/BitSet.scala | 164 -
.../scala/tools/nsc/util/CharArrayReader.scala | 4 +-
src/compiler/scala/tools/nsc/util/Chars.scala | 94 -
src/compiler/scala/tools/nsc/util/ClassPath.scala | 143 +-
.../scala/tools/nsc/util/CommandLineParser.scala | 5 +-
src/compiler/scala/tools/nsc/util/DocStrings.scala | 113 +-
.../scala/tools/nsc/util/Exceptional.scala | 134 +-
src/compiler/scala/tools/nsc/util/FlagsUtil.scala | 236 -
.../scala/tools/nsc/util/FreshNameCreator.scala | 10 +-
src/compiler/scala/tools/nsc/util/HashSet.scala | 107 -
.../scala/tools/nsc/util/InterruptReq.scala | 19 +-
.../scala/tools/nsc/util/JavaCharArrayReader.scala | 6 +-
.../scala/tools/nsc/util/JavaStackFrame.scala | 71 -
.../scala/tools/nsc/util/MsilClassPath.scala | 33 +-
.../scala/tools/nsc/util/MultiHashMap.scala | 5 +-
src/compiler/scala/tools/nsc/util/Origins.scala | 105 -
src/compiler/scala/tools/nsc/util/Position.scala | 285 -
src/compiler/scala/tools/nsc/util/RegexCache.scala | 40 -
.../scala/tools/nsc/util/ScalaClassLoader.scala | 162 +-
src/compiler/scala/tools/nsc/util/ScalaPrefs.scala | 25 -
src/compiler/scala/tools/nsc/util/Set.scala | 30 -
.../scala/tools/nsc/util/ShowPickled.scala | 44 +-
.../scala/tools/nsc/util/SimpleTracer.scala | 19 +
src/compiler/scala/tools/nsc/util/SourceFile.scala | 145 -
src/compiler/scala/tools/nsc/util/Statistics.scala | 280 -
.../scala/tools/nsc/util/StatisticsInfo.scala | 38 +
src/compiler/scala/tools/nsc/util/TableDef.scala | 95 -
src/compiler/scala/tools/nsc/util/Tracer.scala | 63 -
src/compiler/scala/tools/nsc/util/TreeSet.scala | 22 +-
.../scala/tools/nsc/util/WorkScheduler.scala | 19 +-
src/compiler/scala/tools/nsc/util/package.scala | 96 +-
src/compiler/scala/tools/nsc/util/trace.scala | 13 -
src/compiler/scala/tools/reflect/FastTrack.scala | 41 +
src/compiler/scala/tools/reflect/FrontEnd.scala | 50 +
src/compiler/scala/tools/reflect/Invoked.scala | 52 -
.../scala/tools/reflect/MacroImplementations.scala | 171 +
src/compiler/scala/tools/reflect/Mock.scala | 59 -
.../scala/tools/reflect/ReflectGlobal.scala | 40 +
src/compiler/scala/tools/reflect/ReflectMain.scala | 19 +
.../scala/tools/reflect/ReflectSetup.scala | 10 +
src/compiler/scala/tools/reflect/Shield.scala | 44 -
src/compiler/scala/tools/reflect/SigParser.scala | 42 -
src/compiler/scala/tools/reflect/StdTags.scala | 58 +
src/compiler/scala/tools/reflect/ToolBox.scala | 104 +
.../scala/tools/reflect/ToolBoxFactory.scala | 421 +
src/compiler/scala/tools/reflect/UniversalFn.scala | 59 -
.../scala/tools/reflect/WrappedProperties.scala | 16 +-
src/compiler/scala/tools/reflect/package.scala | 130 +-
src/compiler/scala/tools/util/AbstractTimer.scala | 53 -
.../scala/tools/util/ClassPathSettings.scala | 32 -
src/compiler/scala/tools/util/Javap.scala | 89 +-
src/compiler/scala/tools/util/PathResolver.scala | 38 +-
src/compiler/scala/tools/util/Profiling.scala | 52 -
src/compiler/scala/tools/util/SignalManager.scala | 275 -
src/compiler/scala/tools/util/Signallable.scala | 65 -
.../scala/tools/util/SocketConnection.scala | 52 -
src/compiler/scala/tools/util/SocketServer.scala | 6 +-
src/compiler/scala/tools/util/StringOps.scala | 89 -
src/compiler/scala/tools/util/VerifyClass.scala | 53 +
src/compiler/scala/tools/util/Which.scala | 38 -
.../scala/util/continuations/ControlContext.scala | 100 +-
.../library/scala/util/continuations/package.scala | 134 +-
.../tools/selectivecps/CPSAnnotationChecker.scala | 316 +-
.../plugin/scala/tools/selectivecps/CPSUtils.scala | 138 +-
.../tools/selectivecps/SelectiveANFTransform.scala | 348 +-
.../tools/selectivecps/SelectiveCPSPlugin.scala | 2 +
.../tools/selectivecps/SelectiveCPSTransform.scala | 114 +-
src/dbc/scala/dbc/DataType.scala | 69 -
src/dbc/scala/dbc/Database.scala | 187 -
src/dbc/scala/dbc/Syntax.scala | 47 -
src/dbc/scala/dbc/Utilities.scala | 28 -
src/dbc/scala/dbc/Value.scala | 27 -
src/dbc/scala/dbc/Vendor.scala | 41 -
.../scala/dbc/datatype/ApproximateNumeric.scala | 57 -
src/dbc/scala/dbc/datatype/Boolean.scala | 31 -
src/dbc/scala/dbc/datatype/Character.scala | 40 -
.../scala/dbc/datatype/CharacterLargeObject.scala | 31 -
src/dbc/scala/dbc/datatype/CharacterString.scala | 24 -
src/dbc/scala/dbc/datatype/CharacterVarying.scala | 41 -
src/dbc/scala/dbc/datatype/ExactNumeric.scala | 65 -
src/dbc/scala/dbc/datatype/Factory.scala | 250 -
src/dbc/scala/dbc/datatype/Numeric.scala | 32 -
src/dbc/scala/dbc/datatype/String.scala | 24 -
src/dbc/scala/dbc/datatype/Unknown.scala | 34 -
.../scala/dbc/exception/IncompatibleSchema.scala | 19 -
.../scala/dbc/exception/UnsupportedFeature.scala | 16 -
src/dbc/scala/dbc/package.scala | 6 -
src/dbc/scala/dbc/result/Field.scala | 63 -
src/dbc/scala/dbc/result/FieldMetadata.scala | 40 -
src/dbc/scala/dbc/result/Relation.scala | 82 -
src/dbc/scala/dbc/result/Status.scala | 28 -
src/dbc/scala/dbc/result/Tuple.scala | 42 -
src/dbc/scala/dbc/statement/AccessMode.scala | 26 -
src/dbc/scala/dbc/statement/DerivedColumn.scala | 38 -
src/dbc/scala/dbc/statement/Expression.scala | 28 -
src/dbc/scala/dbc/statement/Insert.scala | 31 -
src/dbc/scala/dbc/statement/InsertionData.scala | 40 -
src/dbc/scala/dbc/statement/IsolationLevel.scala | 32 -
src/dbc/scala/dbc/statement/JoinType.scala | 56 -
src/dbc/scala/dbc/statement/Jointure.scala | 45 -
src/dbc/scala/dbc/statement/Relation.scala | 55 -
src/dbc/scala/dbc/statement/Select.scala | 99 -
src/dbc/scala/dbc/statement/SetClause.scala | 21 -
src/dbc/scala/dbc/statement/SetQuantifier.scala | 38 -
src/dbc/scala/dbc/statement/Statement.scala | 16 -
src/dbc/scala/dbc/statement/Status.scala | 32 -
src/dbc/scala/dbc/statement/Table.scala | 38 -
src/dbc/scala/dbc/statement/Transaction.scala | 55 -
src/dbc/scala/dbc/statement/Update.scala | 47 -
.../scala/dbc/statement/expression/Aggregate.scala | 35 -
.../dbc/statement/expression/BinaryOperator.scala | 33 -
.../scala/dbc/statement/expression/Constant.scala | 23 -
.../scala/dbc/statement/expression/Default.scala | 22 -
src/dbc/scala/dbc/statement/expression/Field.scala | 40 -
.../dbc/statement/expression/FunctionCall.scala | 33 -
.../scala/dbc/statement/expression/Select.scala | 28 -
.../dbc/statement/expression/SetFunction.scala | 40 -
.../scala/dbc/statement/expression/TypeCast.scala | 32 -
.../dbc/statement/expression/UnaryOperator.scala | 33 -
src/dbc/scala/dbc/syntax/DataTypeUtil.scala | 98 -
src/dbc/scala/dbc/syntax/Database.scala | 33 -
src/dbc/scala/dbc/syntax/Statement.scala | 274 -
src/dbc/scala/dbc/syntax/StatementExpression.scala | 221 -
src/dbc/scala/dbc/value/ApproximateNumeric.scala | 28 -
src/dbc/scala/dbc/value/Boolean.scala | 27 -
src/dbc/scala/dbc/value/Character.scala | 35 -
src/dbc/scala/dbc/value/CharacterLargeObject.scala | 35 -
src/dbc/scala/dbc/value/CharacterVarying.scala | 35 -
src/dbc/scala/dbc/value/Conversion.scala | 156 -
src/dbc/scala/dbc/value/ExactNumeric.scala | 35 -
src/dbc/scala/dbc/value/Factory.scala | 95 -
src/dbc/scala/dbc/value/Unknown.scala | 27 -
src/dbc/scala/dbc/vendor/PostgreSQL.scala | 29 -
src/detach/library/scala/remoting/Channel.scala | 34 +-
src/detach/library/scala/remoting/Debug.scala | 2 +-
.../library/scala/remoting/ServerChannel.scala | 2 +-
src/detach/library/scala/remoting/detach.scala | 2 +-
src/detach/library/scala/runtime/RemoteRef.scala | 2 +-
.../library/scala/runtime/remoting/Debug.scala | 2 +-
.../scala/runtime/remoting/RegistryDelegate.scala | 2 +-
.../scala/runtime/remoting/RemoteBooleanRef.scala | 2 +-
.../scala/runtime/remoting/RemoteByteRef.scala | 2 +-
.../scala/runtime/remoting/RemoteCharRef.scala | 2 +-
.../scala/runtime/remoting/RemoteDoubleRef.scala | 2 +-
.../scala/runtime/remoting/RemoteFloatRef.scala | 2 +-
.../library/scala/runtime/remoting/RemoteGC.scala | 7 +-
.../scala/runtime/remoting/RemoteIntRef.scala | 2 +-
.../scala/runtime/remoting/RemoteLongRef.scala | 2 +-
.../scala/runtime/remoting/RemoteObjectRef.scala | 2 +-
.../scala/runtime/remoting/RemoteShortRef.scala | 2 +-
src/detach/plugin/scala/tools/detach/Detach.scala | 51 +-
.../plugin/scala/tools/detach/DetachPlugin.scala | 2 +-
src/eclipse/README.md | 55 +
src/eclipse/asm/.classpath | 6 +
src/eclipse/asm/.project | 29 +
src/eclipse/continuations-library/.classpath | 8 +
src/eclipse/continuations-library/.project | 30 +
.../.settings/org.scala-ide.sdt.core.prefs | 2 +
src/eclipse/fjbg/.classpath | 7 +
src/eclipse/fjbg/.project | 30 +
src/eclipse/partest/.classpath | 15 +
src/eclipse/partest/.project | 35 +
src/eclipse/reflect/.classpath | 8 +
src/eclipse/reflect/.project | 30 +
src/eclipse/scala-compiler/.classpath | 14 +
src/eclipse/scala-compiler/.project | 35 +
src/eclipse/scala-library/.classpath | 7 +
src/eclipse/scala-library/.project | 30 +
src/eclipse/scalap/.classpath | 13 +
src/eclipse/scalap/.project | 35 +
src/eclipse/test-junit/.classpath | 12 +
src/eclipse/test-junit/.project | 35 +
src/ensime/.ensime.SAMPLE | 17 +
src/ensime/README.md | 11 +
src/fjbg/ch/epfl/lamp/fjbg/FJBGContext.java | 2 +-
src/fjbg/ch/epfl/lamp/fjbg/JAccessFlags.java | 2 +-
src/fjbg/ch/epfl/lamp/fjbg/JArrayType.java | 2 +-
src/fjbg/ch/epfl/lamp/fjbg/JAttribute.java | 2 +-
src/fjbg/ch/epfl/lamp/fjbg/JAttributeFactory.java | 3 +-
.../ch/epfl/lamp/fjbg/JBootstrapInvokeDynamic.java | 69 -
src/fjbg/ch/epfl/lamp/fjbg/JClass.java | 10 +-
src/fjbg/ch/epfl/lamp/fjbg/JCode.java | 19 +-
src/fjbg/ch/epfl/lamp/fjbg/JCodeAttribute.java | 2 +-
src/fjbg/ch/epfl/lamp/fjbg/JCodeIterator.java | 2 +-
src/fjbg/ch/epfl/lamp/fjbg/JConstantPool.java | 2 +-
.../ch/epfl/lamp/fjbg/JConstantValueAttribute.java | 2 +-
.../epfl/lamp/fjbg/JEnclosingMethodAttribute.java | 2 +-
.../ch/epfl/lamp/fjbg/JExceptionsAttribute.java | 2 +-
src/fjbg/ch/epfl/lamp/fjbg/JExtendedCode.java | 12 +-
src/fjbg/ch/epfl/lamp/fjbg/JField.java | 2 +-
src/fjbg/ch/epfl/lamp/fjbg/JFieldOrMethod.java | 2 +-
.../ch/epfl/lamp/fjbg/JInnerClassesAttribute.java | 2 +-
src/fjbg/ch/epfl/lamp/fjbg/JLabel.java | 2 +-
.../epfl/lamp/fjbg/JLineNumberTableAttribute.java | 2 +-
src/fjbg/ch/epfl/lamp/fjbg/JLocalVariable.java | 2 +-
.../lamp/fjbg/JLocalVariableTableAttribute.java | 2 +-
src/fjbg/ch/epfl/lamp/fjbg/JMember.java | 2 +-
src/fjbg/ch/epfl/lamp/fjbg/JMethod.java | 2 +-
src/fjbg/ch/epfl/lamp/fjbg/JMethodType.java | 2 +-
src/fjbg/ch/epfl/lamp/fjbg/JObjectType.java | 2 +-
src/fjbg/ch/epfl/lamp/fjbg/JOpcode.java | 2 +-
src/fjbg/ch/epfl/lamp/fjbg/JOtherAttribute.java | 2 +-
src/fjbg/ch/epfl/lamp/fjbg/JReferenceType.java | 2 +-
.../ch/epfl/lamp/fjbg/JSourceFileAttribute.java | 2 +-
.../ch/epfl/lamp/fjbg/JStackMapTableAttribute.java | 2 +-
src/fjbg/ch/epfl/lamp/fjbg/JType.java | 2 +-
src/fjbg/ch/epfl/lamp/fjbg/Main.java | 2 +-
src/fjbg/ch/epfl/lamp/util/ByteArray.java | 2 +-
.../scala/concurrent/forkjoin/ForkJoinPool.java | 4731 ++++--
.../scala/concurrent/forkjoin/ForkJoinTask.java | 1749 ++-
.../concurrent/forkjoin/ForkJoinWorkerThread.java | 756 +-
.../concurrent/forkjoin/LinkedTransferQueue.java | 1583 +-
.../scala/concurrent/forkjoin/RecursiveAction.java | 113 +-
.../scala/concurrent/forkjoin/RecursiveTask.java | 31 +-
.../concurrent/forkjoin/ThreadLocalRandom.java | 81 +-
.../scala/concurrent/forkjoin/TransferQueue.java | 85 +-
.../scala/concurrent/forkjoin/package-info.java | 5 +-
src/forkjoin/scala/concurrent/util/Unsafe.java | 35 +
src/intellij/README | 11 +-
src/intellij/actors.iml.SAMPLE | 4 +-
src/intellij/asm.iml.SAMPLE | 12 +
src/intellij/compiler.iml.SAMPLE | 8 +-
src/intellij/dbc.iml.SAMPLE | 23 -
src/intellij/fjbg.iml.SAMPLE | 12 +
src/intellij/forkjoin.iml.SAMPLE | 12 +
src/intellij/library.iml.SAMPLE | 6 +-
src/intellij/manual.iml.SAMPLE | 2 +-
src/intellij/msil.iml.SAMPLE | 24 +
src/intellij/partest.iml.SAMPLE | 6 +-
src/intellij/reflect.iml.SAMPLE | 25 +
src/intellij/scala-lang.ipr.SAMPLE | 32 +-
src/intellij/scala.iml.SAMPLE | 10 +
src/intellij/scalap.iml.SAMPLE | 3 +-
src/intellij/swing.iml.SAMPLE | 2 +-
src/intellij/test.iml.SAMPLE | 20 +
src/jline/TEST-NOTE.txt | 4 -
src/jline/build.sbt | 49 +
src/jline/manual-test.sh | 8 +
src/jline/project/build.properties | 8 -
src/jline/project/build/JlineProject.scala | 35 -
src/jline/project/plugins/Plugins.scala | 5 -
src/jline/project/plugins/build.sbt | 5 +
src/jline/project/plugins/project/build.properties | 3 -
.../scala/tools/jline/console/ConsoleReader.java | 9 +
.../jline/console/ConsoleReaderTestSupport.java | 1 +
src/library-aux/scala/Any.scala | 34 +-
src/library-aux/scala/AnyRef.scala | 5 +-
src/library/rootdoc.txt | 28 +
src/library/scala/AnyVal.scala | 43 +-
src/library/scala/AnyValCompanion.scala | 4 +-
src/library/scala/App.scala | 19 +-
src/library/scala/Application.scala | 92 +-
src/library/scala/Array.scala | 263 +-
src/library/scala/Boolean.scala | 108 +-
src/library/scala/Byte.scala | 718 +-
src/library/scala/Cell.scala | 21 -
src/library/scala/Char.scala | 717 +-
src/library/scala/Cloneable.scala | 14 +
src/library/scala/Console.scala | 48 +-
src/library/scala/CountedIterator.scala | 24 -
src/library/scala/DelayedInit.scala | 46 +-
src/library/scala/Double.scala | 453 +-
src/library/scala/Dynamic.scala | 35 +-
src/library/scala/Either.scala | 609 -
src/library/scala/Enumeration.scala | 156 +-
src/library/scala/Equals.scala | 4 +-
src/library/scala/Float.scala | 457 +-
src/library/scala/Function.scala | 71 +-
src/library/scala/Function0.scala | 16 +-
src/library/scala/Function1.scala | 18 +-
src/library/scala/Function10.scala | 9 +-
src/library/scala/Function11.scala | 9 +-
src/library/scala/Function12.scala | 9 +-
src/library/scala/Function13.scala | 9 +-
src/library/scala/Function14.scala | 9 +-
src/library/scala/Function15.scala | 9 +-
src/library/scala/Function16.scala | 9 +-
src/library/scala/Function17.scala | 9 +-
src/library/scala/Function18.scala | 9 +-
src/library/scala/Function19.scala | 9 +-
src/library/scala/Function2.scala | 21 +-
src/library/scala/Function20.scala | 9 +-
src/library/scala/Function21.scala | 9 +-
src/library/scala/Function22.scala | 9 +-
src/library/scala/Function3.scala | 9 +-
src/library/scala/Function4.scala | 9 +-
src/library/scala/Function5.scala | 9 +-
src/library/scala/Function6.scala | 9 +-
src/library/scala/Function7.scala | 9 +-
src/library/scala/Function8.scala | 9 +-
src/library/scala/Function9.scala | 9 +-
src/library/scala/Immutable.scala | 2 +-
src/library/scala/Int.scala | 716 +-
src/library/scala/Long.scala | 715 +-
src/library/scala/LowPriorityImplicits.scala | 24 +-
src/library/scala/MatchError.scala | 12 +-
src/library/scala/Math.scala | 95 -
src/library/scala/MathCommon.scala | 143 -
src/library/scala/Mutable.scala | 2 +-
src/library/scala/NotDefinedError.scala | 17 -
src/library/scala/NotImplementedError.scala | 19 +
src/library/scala/NotNull.scala | 6 +-
src/library/scala/Option.scala | 73 +-
src/library/scala/PartialFunction.scala | 233 +-
src/library/scala/Predef.scala | 230 +-
src/library/scala/Product.scala | 27 +-
src/library/scala/Product1.scala | 8 +-
src/library/scala/Product10.scala | 8 +-
src/library/scala/Product11.scala | 8 +-
src/library/scala/Product12.scala | 8 +-
src/library/scala/Product13.scala | 8 +-
src/library/scala/Product14.scala | 8 +-
src/library/scala/Product15.scala | 8 +-
src/library/scala/Product16.scala | 8 +-
src/library/scala/Product17.scala | 8 +-
src/library/scala/Product18.scala | 8 +-
src/library/scala/Product19.scala | 8 +-
src/library/scala/Product2.scala | 8 +-
src/library/scala/Product20.scala | 8 +-
src/library/scala/Product21.scala | 8 +-
src/library/scala/Product22.scala | 8 +-
src/library/scala/Product3.scala | 8 +-
src/library/scala/Product4.scala | 8 +-
src/library/scala/Product5.scala | 8 +-
src/library/scala/Product6.scala | 8 +-
src/library/scala/Product7.scala | 8 +-
src/library/scala/Product8.scala | 8 +-
src/library/scala/Product9.scala | 8 +-
src/library/scala/Proxy.scala | 23 +-
src/library/scala/Responder.scala | 23 +-
src/library/scala/ScalaObject.scala | 11 +-
src/library/scala/SerialVersionUID.scala | 9 +-
src/library/scala/Serializable.scala | 4 +-
src/library/scala/Short.scala | 717 +-
src/library/scala/Specializable.scala | 29 +
src/library/scala/SpecializableCompanion.scala | 3 +-
src/library/scala/StringContext.scala | 240 +
src/library/scala/Symbol.scala | 36 +-
src/library/scala/Tuple1.scala | 2 +-
src/library/scala/Tuple10.scala | 2 +-
src/library/scala/Tuple11.scala | 2 +-
src/library/scala/Tuple12.scala | 2 +-
src/library/scala/Tuple13.scala | 2 +-
src/library/scala/Tuple14.scala | 2 +-
src/library/scala/Tuple15.scala | 2 +-
src/library/scala/Tuple16.scala | 2 +-
src/library/scala/Tuple17.scala | 2 +-
src/library/scala/Tuple18.scala | 2 +-
src/library/scala/Tuple19.scala | 2 +-
src/library/scala/Tuple2.scala | 108 +-
src/library/scala/Tuple20.scala | 2 +-
src/library/scala/Tuple21.scala | 2 +-
src/library/scala/Tuple22.scala | 2 +-
src/library/scala/Tuple3.scala | 122 +-
src/library/scala/Tuple4.scala | 2 +-
src/library/scala/Tuple5.scala | 2 +-
src/library/scala/Tuple6.scala | 2 +-
src/library/scala/Tuple7.scala | 2 +-
src/library/scala/Tuple8.scala | 2 +-
src/library/scala/Tuple9.scala | 2 +-
src/library/scala/UninitializedError.scala | 2 +-
src/library/scala/UninitializedFieldError.scala | 9 +-
src/library/scala/Unit.scala | 13 +-
src/library/scala/annotation/Annotation.scala | 15 +-
.../scala/annotation/ClassfileAnnotation.scala | 10 +-
.../scala/annotation/StaticAnnotation.scala | 8 +-
src/library/scala/annotation/TypeConstraint.scala | 24 +-
src/library/scala/annotation/bridge.scala | 5 +-
src/library/scala/annotation/cloneable.scala | 15 +
src/library/scala/annotation/elidable.scala | 80 +-
.../scala/annotation/implicitNotFound.scala | 4 +-
src/library/scala/annotation/meta/beanGetter.scala | 13 +
src/library/scala/annotation/meta/beanSetter.scala | 13 +
.../scala/annotation/meta/companionClass.scala | 17 +
.../scala/annotation/meta/companionMethod.scala | 17 +
.../scala/annotation/meta/companionObject.scala | 14 +
src/library/scala/annotation/meta/field.scala | 13 +
src/library/scala/annotation/meta/getter.scala | 13 +
.../scala/annotation/meta/languageFeature.scala | 13 +
src/library/scala/annotation/meta/package.scala | 68 +
src/library/scala/annotation/meta/param.scala | 13 +
src/library/scala/annotation/meta/setter.scala | 13 +
src/library/scala/annotation/migration.scala | 11 +-
src/library/scala/annotation/serializable.scala | 4 +-
src/library/scala/annotation/strictfp.scala | 4 +-
src/library/scala/annotation/switch.scala | 7 +-
src/library/scala/annotation/tailrec.scala | 15 +-
.../scala/annotation/target/beanGetter.scala | 13 -
.../scala/annotation/target/beanSetter.scala | 13 -
src/library/scala/annotation/target/field.scala | 13 -
src/library/scala/annotation/target/getter.scala | 13 -
src/library/scala/annotation/target/package.scala | 93 +-
src/library/scala/annotation/target/param.scala | 13 -
src/library/scala/annotation/target/setter.scala | 13 -
.../annotation/unchecked/uncheckedStable.scala | 4 +-
.../annotation/unchecked/uncheckedVariance.scala | 4 +-
src/library/scala/annotation/unspecialized.scala | 17 +
src/library/scala/annotation/varargs.scala | 13 +-
src/library/scala/beans/BeanDescription.scala | 19 +
src/library/scala/beans/BeanDisplayName.scala | 18 +
src/library/scala/beans/BeanInfo.scala | 20 +
src/library/scala/beans/BeanInfoSkip.scala | 18 +
src/library/scala/beans/BeanProperty.scala | 26 +
src/library/scala/beans/BooleanBeanProperty.scala | 16 +
src/library/scala/beans/ScalaBeanInfo.scala | 46 +
src/library/scala/cloneable.scala | 16 -
src/library/scala/collection/BitSet.scala | 6 +-
src/library/scala/collection/BitSetLike.scala | 80 +-
.../scala/collection/BufferedIterator.scala | 2 +-
.../scala/collection/CustomParallelizable.scala | 4 +-
src/library/scala/collection/DefaultMap.scala | 32 +-
src/library/scala/collection/GenIterable.scala | 4 +-
src/library/scala/collection/GenIterableLike.scala | 89 +-
src/library/scala/collection/GenIterableView.scala | 2 +-
.../scala/collection/GenIterableViewLike.scala | 3 +-
src/library/scala/collection/GenMap.scala | 4 +-
src/library/scala/collection/GenMapLike.scala | 74 +-
src/library/scala/collection/GenSeq.scala | 4 +-
src/library/scala/collection/GenSeqLike.scala | 205 +-
src/library/scala/collection/GenSeqView.scala | 2 +-
src/library/scala/collection/GenSeqViewLike.scala | 2 +-
src/library/scala/collection/GenSet.scala | 4 +-
src/library/scala/collection/GenSetLike.scala | 21 +-
src/library/scala/collection/GenTraversable.scala | 4 +-
.../scala/collection/GenTraversableLike.scala | 199 +-
.../scala/collection/GenTraversableOnce.scala | 122 +-
.../scala/collection/GenTraversableView.scala | 2 +-
.../scala/collection/GenTraversableViewLike.scala | 2 +-
src/library/scala/collection/IndexedSeq.scala | 13 +-
src/library/scala/collection/IndexedSeqLike.scala | 18 +-
.../scala/collection/IndexedSeqOptimized.scala | 15 +-
src/library/scala/collection/Iterable.scala | 20 +-
src/library/scala/collection/IterableLike.scala | 65 +-
src/library/scala/collection/IterableProxy.scala | 10 +-
.../scala/collection/IterableProxyLike.scala | 6 +-
src/library/scala/collection/IterableView.scala | 2 +-
.../scala/collection/IterableViewLike.scala | 28 +-
src/library/scala/collection/Iterator.scala | 262 +-
src/library/scala/collection/JavaConversions.scala | 915 +-
src/library/scala/collection/JavaConverters.scala | 501 +-
src/library/scala/collection/LinearSeq.scala | 7 +-
src/library/scala/collection/LinearSeqLike.scala | 19 +-
.../scala/collection/LinearSeqOptimized.scala | 33 +-
src/library/scala/collection/Map.scala | 13 +-
src/library/scala/collection/MapLike.scala | 75 +-
src/library/scala/collection/MapProxy.scala | 7 +-
src/library/scala/collection/MapProxyLike.scala | 2 +-
src/library/scala/collection/Parallel.scala | 2 +-
src/library/scala/collection/Parallelizable.scala | 4 +-
src/library/scala/collection/Seq.scala | 19 +-
src/library/scala/collection/SeqExtractors.scala | 23 +
src/library/scala/collection/SeqLike.scala | 475 +-
src/library/scala/collection/SeqProxy.scala | 2 +-
src/library/scala/collection/SeqProxyLike.scala | 5 +-
src/library/scala/collection/SeqView.scala | 2 +-
src/library/scala/collection/SeqViewLike.scala | 43 +-
src/library/scala/collection/Set.scala | 9 +-
src/library/scala/collection/SetLike.scala | 30 +-
src/library/scala/collection/SetProxy.scala | 8 +-
src/library/scala/collection/SetProxyLike.scala | 2 +-
src/library/scala/collection/SortedMap.scala | 21 +-
src/library/scala/collection/SortedMapLike.scala | 25 +-
src/library/scala/collection/SortedSet.scala | 6 +-
src/library/scala/collection/SortedSetLike.scala | 2 +-
src/library/scala/collection/Traversable.scala | 15 +-
src/library/scala/collection/TraversableLike.scala | 165 +-
src/library/scala/collection/TraversableOnce.scala | 107 +-
.../scala/collection/TraversableProxy.scala | 2 +-
.../scala/collection/TraversableProxyLike.scala | 7 +-
src/library/scala/collection/TraversableView.scala | 4 +-
.../scala/collection/TraversableViewLike.scala | 52 +-
.../scala/collection/concurrent/BasicNode.java | 20 +
.../scala/collection/concurrent/CNodeBase.java | 35 +
src/library/scala/collection/concurrent/Gen.java | 18 +
.../scala/collection/concurrent/INodeBase.java | 35 +
.../scala/collection/concurrent/MainNode.java | 40 +
src/library/scala/collection/concurrent/Map.scala | 88 +
.../scala/collection/concurrent/TrieMap.scala | 1082 ++
.../scala/collection/convert/DecorateAsJava.scala | 318 +
.../scala/collection/convert/DecorateAsScala.scala | 209 +
.../scala/collection/convert/Decorators.scala | 46 +
.../scala/collection/convert/WrapAsJava.scala | 287 +
.../scala/collection/convert/WrapAsScala.scala | 236 +
.../scala/collection/convert/Wrappers.scala | 478 +
src/library/scala/collection/convert/package.scala | 18 +
src/library/scala/collection/generic/Addable.scala | 61 -
.../scala/collection/generic/BitOperations.scala | 2 +-
.../scala/collection/generic/BitSetFactory.scala | 4 +-
.../scala/collection/generic/CanBuildFrom.scala | 4 +-
.../scala/collection/generic/CanCombineFrom.scala | 20 +-
.../generic/ClassManifestTraversableFactory.scala | 19 -
.../generic/ClassTagTraversableFactory.scala | 32 +
.../scala/collection/generic/Clearable.scala | 26 +
.../scala/collection/generic/FilterMonadic.scala | 6 +-
.../scala/collection/generic/GenMapFactory.scala | 7 +-
.../scala/collection/generic/GenSeqFactory.scala | 15 +-
.../scala/collection/generic/GenSetFactory.scala | 5 +-
.../collection/generic/GenTraversableFactory.scala | 21 +-
.../generic/GenericClassManifestCompanion.scala | 31 -
.../GenericClassManifestTraversableTemplate.scala | 24 -
.../generic/GenericClassTagCompanion.scala | 33 +
.../GenericClassTagTraversableTemplate.scala | 31 +
.../collection/generic/GenericCompanion.scala | 11 +-
.../generic/GenericOrderedCompanion.scala | 12 +-
.../GenericOrderedTraversableTemplate.scala | 7 +-
.../collection/generic/GenericParCompanion.scala | 7 +-
.../collection/generic/GenericParTemplate.scala | 22 +-
.../collection/generic/GenericSeqCompanion.scala | 14 +-
.../collection/generic/GenericSetTemplate.scala | 6 +-
.../generic/GenericTraversableTemplate.scala | 48 +-
.../scala/collection/generic/Growable.scala | 10 +-
.../scala/collection/generic/HasNewBuilder.scala | 4 +-
.../scala/collection/generic/HasNewCombiner.scala | 24 +-
.../collection/generic/ImmutableMapFactory.scala | 4 +-
.../collection/generic/ImmutableSetFactory.scala | 3 +-
.../generic/ImmutableSortedMapFactory.scala | 6 +-
.../generic/ImmutableSortedSetFactory.scala | 10 +-
.../collection/generic/IsTraversableLike.scala | 129 +
.../collection/generic/IsTraversableOnce.scala | 62 +
.../collection/generic/IterableForwarder.scala | 29 +-
.../scala/collection/generic/MapFactory.scala | 6 +-
.../collection/generic/MutableMapFactory.scala | 3 +-
.../collection/generic/MutableSetFactory.scala | 3 +-
.../generic/MutableSortedSetFactory.scala | 34 +
.../generic/OrderedTraversableFactory.scala | 6 +-
.../scala/collection/generic/ParFactory.scala | 30 +-
.../scala/collection/generic/ParMapFactory.scala | 10 +-
.../scala/collection/generic/ParSetFactory.scala | 28 +-
.../scala/collection/generic/SeqFactory.scala | 16 +-
.../scala/collection/generic/SeqForwarder.scala | 37 +-
.../scala/collection/generic/SetFactory.scala | 13 +-
.../scala/collection/generic/Shrinkable.scala | 6 +-
.../scala/collection/generic/Signalling.scala | 2 +-
src/library/scala/collection/generic/Sizing.scala | 8 +
.../scala/collection/generic/SliceInterval.scala | 4 +-
src/library/scala/collection/generic/Sorted.scala | 22 +-
.../collection/generic/SortedMapFactory.scala | 3 +-
.../collection/generic/SortedSetFactory.scala | 3 +-
.../scala/collection/generic/Subtractable.scala | 8 +-
.../collection/generic/TraversableFactory.scala | 49 +-
.../collection/generic/TraversableForwarder.scala | 32 +-
src/library/scala/collection/generic/package.scala | 13 +-
.../scala/collection/immutable/BitSet.scala | 60 +-
.../scala/collection/immutable/DefaultMap.scala | 2 +-
.../immutable/GenIterable.scala.disabled | 4 +-
.../collection/immutable/GenMap.scala.disabled | 2 +-
.../collection/immutable/GenSeq.scala.disabled | 6 +-
.../collection/immutable/GenSet.scala.disabled | 6 +-
.../immutable/GenTraversable.scala.disabled | 4 +-
.../scala/collection/immutable/HashMap.scala | 258 +-
.../scala/collection/immutable/HashSet.scala | 97 +-
.../scala/collection/immutable/IndexedSeq.scala | 14 +-
.../scala/collection/immutable/IntMap.scala | 365 +-
.../scala/collection/immutable/Iterable.scala | 8 +-
.../scala/collection/immutable/LinearSeq.scala | 7 +-
src/library/scala/collection/immutable/List.scala | 339 +-
.../scala/collection/immutable/ListMap.scala | 63 +-
.../scala/collection/immutable/ListSet.scala | 48 +-
.../scala/collection/immutable/LongMap.scala | 345 +-
src/library/scala/collection/immutable/Map.scala | 28 +-
.../scala/collection/immutable/MapLike.scala | 33 +-
.../scala/collection/immutable/MapProxy.scala | 2 +-
.../scala/collection/immutable/NumericRange.scala | 45 +-
.../scala/collection/immutable/PagedSeq.scala | 20 +-
src/library/scala/collection/immutable/Queue.scala | 49 +-
src/library/scala/collection/immutable/Range.scala | 191 +-
.../scala/collection/immutable/RedBlack.scala | 27 +-
.../scala/collection/immutable/RedBlackTree.scala | 496 +
src/library/scala/collection/immutable/Seq.scala | 8 +-
src/library/scala/collection/immutable/Set.scala | 28 +-
.../scala/collection/immutable/SetProxy.scala | 4 +-
.../scala/collection/immutable/SortedMap.scala | 36 +-
.../scala/collection/immutable/SortedSet.scala | 10 +-
src/library/scala/collection/immutable/Stack.scala | 21 +-
.../scala/collection/immutable/Stream.scala | 148 +-
.../collection/immutable/StreamViewLike.scala | 37 +-
.../scala/collection/immutable/StringLike.scala | 104 +-
.../scala/collection/immutable/StringOps.scala | 8 +-
.../scala/collection/immutable/Traversable.scala | 6 +-
.../scala/collection/immutable/TreeMap.scala | 106 +-
.../scala/collection/immutable/TreeSet.scala | 97 +-
.../scala/collection/immutable/TrieIterator.scala | 14 +-
.../scala/collection/immutable/Vector.scala | 78 +-
.../scala/collection/immutable/WrappedString.scala | 7 +-
.../scala/collection/immutable/package.scala | 98 +-
.../collection/interfaces/IterableMethods.scala | 39 -
.../scala/collection/interfaces/MapMethods.scala | 45 -
.../scala/collection/interfaces/SeqMethods.scala | 71 -
.../scala/collection/interfaces/SetMethods.scala | 52 -
.../collection/interfaces/TraversableMethods.scala | 63 -
.../interfaces/TraversableOnceMethods.scala | 77 -
src/library/scala/collection/mutable/AVLTree.scala | 242 +
.../scala/collection/mutable/AddingBuilder.scala | 37 -
.../scala/collection/mutable/ArrayBuffer.scala | 23 +-
.../scala/collection/mutable/ArrayBuilder.scala | 30 +-
.../scala/collection/mutable/ArrayLike.scala | 26 +-
.../scala/collection/mutable/ArrayOps.scala | 85 +-
.../scala/collection/mutable/ArraySeq.scala | 18 +-
.../scala/collection/mutable/ArrayStack.scala | 55 +-
src/library/scala/collection/mutable/BitSet.scala | 26 +-
src/library/scala/collection/mutable/Buffer.scala | 14 +-
.../scala/collection/mutable/BufferLike.scala | 66 +-
.../scala/collection/mutable/BufferProxy.scala | 27 +-
src/library/scala/collection/mutable/Builder.scala | 27 +-
.../scala/collection/mutable/Cloneable.scala | 9 +-
.../scala/collection/mutable/ConcurrentMap.scala | 38 +-
.../scala/collection/mutable/DefaultEntry.scala | 6 +-
.../scala/collection/mutable/DefaultMapModel.scala | 2 +-
.../collection/mutable/DoubleLinkedList.scala | 18 +-
.../collection/mutable/DoubleLinkedListLike.scala | 6 +-
.../scala/collection/mutable/FlatHashTable.scala | 118 +-
.../collection/mutable/GenIterable.scala.disabled | 4 +-
.../scala/collection/mutable/GenMap.scala.disabled | 2 +-
.../scala/collection/mutable/GenSeq.scala.disabled | 6 +-
.../scala/collection/mutable/GenSet.scala.disabled | 6 +-
.../mutable/GenTraversable.scala.disabled | 4 +-
.../scala/collection/mutable/GrowingBuilder.scala | 4 +-
.../scala/collection/mutable/HashEntry.scala | 2 +-
src/library/scala/collection/mutable/HashMap.scala | 64 +-
src/library/scala/collection/mutable/HashSet.scala | 19 +-
.../scala/collection/mutable/HashTable.scala | 168 +-
src/library/scala/collection/mutable/History.scala | 6 +-
.../collection/mutable/ImmutableMapAdaptor.scala | 17 +-
.../collection/mutable/ImmutableSetAdaptor.scala | 10 +-
.../scala/collection/mutable/IndexedSeq.scala | 7 +-
.../scala/collection/mutable/IndexedSeqLike.scala | 8 +-
.../collection/mutable/IndexedSeqOptimized.scala | 4 +-
.../scala/collection/mutable/IndexedSeqView.scala | 18 +-
.../scala/collection/mutable/Iterable.scala | 8 +-
.../scala/collection/mutable/LazyBuilder.scala | 2 +-
.../scala/collection/mutable/LinearSeq.scala | 9 +-
.../scala/collection/mutable/LinkedEntry.scala | 2 +-
.../scala/collection/mutable/LinkedHashMap.scala | 85 +-
.../scala/collection/mutable/LinkedHashSet.scala | 96 +-
.../scala/collection/mutable/LinkedList.scala | 11 +-
.../scala/collection/mutable/LinkedListLike.scala | 18 +-
.../scala/collection/mutable/ListBuffer.scala | 174 +-
src/library/scala/collection/mutable/ListMap.scala | 30 +-
src/library/scala/collection/mutable/Map.scala | 9 +-
.../scala/collection/mutable/MapBuilder.scala | 2 +-
src/library/scala/collection/mutable/MapLike.scala | 41 +-
.../scala/collection/mutable/MapProxy.scala | 7 +-
.../scala/collection/mutable/MultiMap.scala | 42 +-
.../scala/collection/mutable/MutableList.scala | 46 +-
.../collection/mutable/ObservableBuffer.scala | 21 +-
.../scala/collection/mutable/ObservableMap.scala | 2 +-
.../scala/collection/mutable/ObservableSet.scala | 2 +-
.../scala/collection/mutable/OpenHashMap.scala | 152 +-
.../scala/collection/mutable/PriorityQueue.scala | 72 +-
.../collection/mutable/PriorityQueueProxy.scala | 2 +-
.../scala/collection/mutable/Publisher.scala | 6 +-
src/library/scala/collection/mutable/Queue.scala | 52 +-
.../scala/collection/mutable/QueueProxy.scala | 17 +-
.../scala/collection/mutable/ResizableArray.scala | 29 +-
.../collection/mutable/RevertibleHistory.scala | 2 +-
src/library/scala/collection/mutable/Seq.scala | 11 +-
src/library/scala/collection/mutable/SeqLike.scala | 6 +-
src/library/scala/collection/mutable/Set.scala | 8 +-
.../scala/collection/mutable/SetBuilder.scala | 4 +-
src/library/scala/collection/mutable/SetLike.scala | 14 +-
.../scala/collection/mutable/SetProxy.scala | 6 +-
.../scala/collection/mutable/SortedSet.scala | 49 +
src/library/scala/collection/mutable/Stack.scala | 46 +-
.../scala/collection/mutable/StackProxy.scala | 26 +-
.../scala/collection/mutable/StringBuilder.scala | 71 +-
.../scala/collection/mutable/Subscriber.scala | 12 +-
.../collection/mutable/SynchronizedBuffer.scala | 44 +-
.../scala/collection/mutable/SynchronizedMap.scala | 12 +-
.../mutable/SynchronizedPriorityQueue.scala | 4 +-
.../collection/mutable/SynchronizedQueue.scala | 21 +-
.../scala/collection/mutable/SynchronizedSet.scala | 4 +-
.../collection/mutable/SynchronizedStack.scala | 4 +-
.../scala/collection/mutable/Traversable.scala | 6 +-
src/library/scala/collection/mutable/TreeSet.scala | 123 +
.../scala/collection/mutable/Undoable.scala | 2 +-
.../scala/collection/mutable/UnrolledBuffer.scala | 58 +-
.../scala/collection/mutable/WeakHashMap.scala | 11 +-
.../scala/collection/mutable/WrappedArray.scala | 52 +-
.../collection/mutable/WrappedArrayBuilder.scala | 28 +-
src/library/scala/collection/package.scala | 16 +-
.../scala/collection/parallel/Combiner.scala | 35 +-
.../scala/collection/parallel/ParIterable.scala | 4 +-
.../collection/parallel/ParIterableLike.scala | 589 +-
.../collection/parallel/ParIterableView.scala | 2 +-
.../collection/parallel/ParIterableViewLike.scala | 6 +-
src/library/scala/collection/parallel/ParMap.scala | 52 +-
.../scala/collection/parallel/ParMapLike.scala | 99 +-
src/library/scala/collection/parallel/ParSeq.scala | 2 +-
.../scala/collection/parallel/ParSeqLike.scala | 175 +-
.../scala/collection/parallel/ParSeqView.scala | 2 +-
.../scala/collection/parallel/ParSeqViewLike.scala | 14 +-
src/library/scala/collection/parallel/ParSet.scala | 2 +-
.../scala/collection/parallel/ParSetLike.scala | 2 +-
.../collection/parallel/PreciseSplitter.scala | 2 +-
.../collection/parallel/RemainsIterator.scala | 72 +-
.../scala/collection/parallel/Splitter.scala | 2 +-
.../scala/collection/parallel/TaskSupport.scala | 82 +-
src/library/scala/collection/parallel/Tasks.scala | 283 +-
.../collection/parallel/immutable/ParHashMap.scala | 71 +-
.../collection/parallel/immutable/ParHashSet.scala | 82 +-
.../parallel/immutable/ParIterable.scala | 9 +-
.../collection/parallel/immutable/ParMap.scala | 70 +-
.../immutable/ParNumericRange.scala.disabled | 4 +-
.../collection/parallel/immutable/ParRange.scala | 27 +-
.../collection/parallel/immutable/ParSeq.scala | 12 +-
.../collection/parallel/immutable/ParSet.scala | 36 +-
.../collection/parallel/immutable/ParVector.scala | 21 +-
.../collection/parallel/immutable/package.scala | 32 +-
.../collection/parallel/mutable/LazyCombiner.scala | 15 +-
.../collection/parallel/mutable/ParArray.scala | 57 +-
.../parallel/mutable/ParFlatHashTable.scala | 10 +-
.../collection/parallel/mutable/ParHashMap.scala | 90 +-
.../collection/parallel/mutable/ParHashSet.scala | 69 +-
.../collection/parallel/mutable/ParHashTable.scala | 14 +-
.../collection/parallel/mutable/ParIterable.scala | 11 +-
.../scala/collection/parallel/mutable/ParMap.scala | 53 +-
.../collection/parallel/mutable/ParMapLike.scala | 22 +-
.../scala/collection/parallel/mutable/ParSeq.scala | 14 +-
.../scala/collection/parallel/mutable/ParSet.scala | 14 +-
.../collection/parallel/mutable/ParSetLike.scala | 13 +-
.../collection/parallel/mutable/ParTrieMap.scala | 195 +
.../mutable/ResizableParArrayCombiner.scala | 32 +-
.../mutable/UnrolledParArrayCombiner.scala | 26 +-
.../collection/parallel/mutable/package.scala | 19 +-
.../scala/collection/parallel/package.scala | 181 +-
.../readme-if-you-want-to-add-something.txt | 0
src/library/scala/collection/script/Location.scala | 9 +-
src/library/scala/collection/script/Message.scala | 15 +-
.../scala/collection/script/Scriptable.scala | 9 +-
src/library/scala/compat/Platform.scala | 4 +-
src/library/scala/concurrent/Awaitable.scala | 64 +
src/library/scala/concurrent/BlockContext.scala | 77 +
src/library/scala/concurrent/Channel.scala | 4 +-
src/library/scala/concurrent/DelayedLazyVal.scala | 25 +-
.../scala/concurrent/ExecutionContext.scala | 89 +
src/library/scala/concurrent/Future.scala | 706 +
.../scala/concurrent/FutureTaskRunner.scala | 14 +-
src/library/scala/concurrent/JavaConversions.scala | 23 +-
src/library/scala/concurrent/Lock.scala | 2 +-
src/library/scala/concurrent/MailBox.scala | 179 -
src/library/scala/concurrent/ManagedBlocker.scala | 14 +-
src/library/scala/concurrent/Promise.scala | 152 +
src/library/scala/concurrent/SyncChannel.scala | 11 +-
src/library/scala/concurrent/SyncVar.scala | 72 +-
src/library/scala/concurrent/TIMEOUT.scala | 21 -
src/library/scala/concurrent/TaskRunner.scala | 9 +-
src/library/scala/concurrent/TaskRunners.scala | 7 +-
.../scala/concurrent/ThreadPoolRunner.scala | 10 +-
src/library/scala/concurrent/ThreadRunner.scala | 9 +-
.../scala/concurrent/duration/Deadline.scala | 81 +
.../scala/concurrent/duration/Duration.scala | 698 +
.../concurrent/duration/DurationConversions.scala | 92 +
.../scala/concurrent/duration/package.scala | 75 +
.../scala/concurrent/impl/AbstractPromise.java | 40 +
.../concurrent/impl/ExecutionContextImpl.scala | 149 +
src/library/scala/concurrent/impl/Future.scala | 34 +
src/library/scala/concurrent/impl/Promise.scala | 341 +
src/library/scala/concurrent/ops.scala | 31 +-
src/library/scala/concurrent/package.scala | 109 +
.../scala/concurrent/package.scala.disabled | 108 -
src/library/scala/concurrent/pilib.scala | 208 -
src/library/scala/deprecated.scala | 6 +-
src/library/scala/deprecatedInheritance.scala | 22 +
src/library/scala/deprecatedName.scala | 23 +-
src/library/scala/deprecatedOverriding.scala | 21 +
src/library/scala/inline.scala | 4 +-
src/library/scala/io/BufferedSource.scala | 12 +-
src/library/scala/io/BytePickle.scala | 15 +-
src/library/scala/io/Codec.scala | 35 +-
src/library/scala/io/Position.scala | 29 +-
src/library/scala/io/Source.scala | 58 +-
src/library/scala/io/UTF8Codec.scala | 38 +-
src/library/scala/language.scala | 173 +
src/library/scala/languageFeature.scala | 37 +
src/library/scala/math/BigDecimal.scala | 52 +-
src/library/scala/math/BigInt.scala | 122 +-
src/library/scala/math/Equiv.scala | 4 +-
src/library/scala/math/Fractional.scala | 6 +-
src/library/scala/math/Integral.scala | 6 +-
src/library/scala/math/Numeric.scala | 8 +-
src/library/scala/math/Ordered.scala | 19 +-
src/library/scala/math/Ordering.scala | 52 +-
src/library/scala/math/PartialOrdering.scala | 73 +-
src/library/scala/math/PartiallyOrdered.scala | 14 +-
src/library/scala/math/ScalaNumber.java | 2 +-
.../scala/math/ScalaNumericConversions.scala | 23 +-
src/library/scala/math/package.scala | 170 +-
src/library/scala/native.scala | 4 +-
src/library/scala/noinline.scala | 4 +-
src/library/scala/package.scala | 107 +-
src/library/scala/parallel/Future.scala | 30 +-
src/library/scala/ref/PhantomReference.scala | 2 +-
src/library/scala/ref/Reference.scala | 7 +-
src/library/scala/ref/ReferenceQueue.scala | 2 +-
src/library/scala/ref/ReferenceWrapper.scala | 10 +-
src/library/scala/ref/SoftReference.scala | 2 +-
src/library/scala/ref/WeakReference.scala | 18 +-
src/library/scala/reflect/BeanDescription.scala | 19 -
src/library/scala/reflect/BeanDisplayName.scala | 18 -
src/library/scala/reflect/BeanInfo.scala | 21 -
src/library/scala/reflect/BeanInfoSkip.scala | 18 -
src/library/scala/reflect/BeanProperty.scala | 34 -
.../scala/reflect/BooleanBeanProperty.scala | 23 -
src/library/scala/reflect/ClassManifest.scala | 242 -
.../reflect/ClassManifestDeprecatedApis.scala | 240 +
src/library/scala/reflect/ClassTag.scala | 148 +
src/library/scala/reflect/Code.scala | 20 -
src/library/scala/reflect/Manifest.scala | 151 +-
src/library/scala/reflect/NameTransformer.scala | 19 +-
src/library/scala/reflect/NoManifest.scala | 10 +-
src/library/scala/reflect/OptManifest.scala | 16 +-
src/library/scala/reflect/Print.scala | 113 -
src/library/scala/reflect/ScalaBeanInfo.scala | 46 -
src/library/scala/reflect/ScalaLongSignature.java | 1 -
src/library/scala/reflect/ScalaSignature.java | 1 -
src/library/scala/reflect/Symbol.scala | 85 -
src/library/scala/reflect/Tree.scala | 52 -
src/library/scala/reflect/Type.scala | 67 -
.../scala/reflect/generic/AnnotationInfos.scala | 42 -
src/library/scala/reflect/generic/ByteCodecs.scala | 216 -
src/library/scala/reflect/generic/Constants.scala | 238 -
src/library/scala/reflect/generic/Flags.scala | 264 -
src/library/scala/reflect/generic/HasFlags.scala | 231 -
src/library/scala/reflect/generic/Names.scala | 24 -
.../scala/reflect/generic/PickleBuffer.scala | 187 -
.../scala/reflect/generic/PickleFormat.scala | 224 -
src/library/scala/reflect/generic/Scopes.scala | 15 -
.../reflect/generic/StandardDefinitions.scala | 66 -
src/library/scala/reflect/generic/StdNames.scala | 44 -
src/library/scala/reflect/generic/Symbols.scala | 187 -
src/library/scala/reflect/generic/Trees.scala | 730 -
src/library/scala/reflect/generic/Types.scala | 165 -
src/library/scala/reflect/generic/UnPickler.scala | 862 --
src/library/scala/reflect/generic/Universe.scala | 17 -
.../scala/reflect/macros/internal/macroImpl.scala | 18 +
src/library/scala/reflect/package.scala | 68 +
src/library/scala/remote.scala | 19 +-
src/library/scala/runtime/AbstractFunction0.scala | 2 +-
src/library/scala/runtime/AbstractFunction1.scala | 2 +-
.../scala/runtime/AbstractPartialFunction.scala | 34 +
src/library/scala/runtime/AnyValCompanion.scala | 14 -
src/library/scala/runtime/ArrayRuntime.java | 10 +
src/library/scala/runtime/BooleanRef.java | 2 +-
src/library/scala/runtime/Boxed.scala | 2 +-
src/library/scala/runtime/BoxedUnit.java | 4 +-
src/library/scala/runtime/BoxesRunTime.java | 378 +-
src/library/scala/runtime/ByteRef.java | 2 +-
src/library/scala/runtime/CharRef.java | 2 +-
src/library/scala/runtime/DoubleRef.java | 2 +-
.../scala/runtime/DynamicDispatch.java-notyet | 42 -
src/library/scala/runtime/FloatRef.java | 2 +-
src/library/scala/runtime/IntRef.java | 2 +-
src/library/scala/runtime/LongRef.java | 2 +-
src/library/scala/runtime/MethodCache.scala | 28 +-
.../scala/runtime/NonLocalReturnControl.scala | 8 +-
src/library/scala/runtime/Nothing$.scala | 10 +-
src/library/scala/runtime/Null$.scala | 14 +-
src/library/scala/runtime/ObjectRef.java | 8 +-
src/library/scala/runtime/RichBoolean.scala | 8 +-
src/library/scala/runtime/RichByte.scala | 9 +-
src/library/scala/runtime/RichChar.scala | 17 +-
src/library/scala/runtime/RichDouble.scala | 28 +-
src/library/scala/runtime/RichException.scala | 6 +-
src/library/scala/runtime/RichFloat.scala | 29 +-
src/library/scala/runtime/RichInt.scala | 35 +-
src/library/scala/runtime/RichLong.scala | 15 +-
src/library/scala/runtime/RichShort.scala | 7 +-
src/library/scala/runtime/ScalaNumberProxy.scala | 31 +-
src/library/scala/runtime/ScalaRunTime.scala | 266 +-
src/library/scala/runtime/SeqCharSequence.scala | 45 +
src/library/scala/runtime/ShortRef.java | 2 +-
src/library/scala/runtime/Statics.java | 89 +
src/library/scala/runtime/StringAdd.scala | 15 +-
src/library/scala/runtime/StringFormat.scala | 19 +
src/library/scala/runtime/TraitSetter.java | 0
src/library/scala/runtime/Tuple2Zipped.scala | 130 +
src/library/scala/runtime/Tuple3Zipped.scala | 140 +
src/library/scala/runtime/VolatileBooleanRef.java | 2 +-
src/library/scala/runtime/VolatileByteRef.java | 2 +-
src/library/scala/runtime/VolatileCharRef.java | 2 +-
src/library/scala/runtime/VolatileDoubleRef.java | 2 +-
src/library/scala/runtime/VolatileFloatRef.java | 2 +-
src/library/scala/runtime/VolatileIntRef.java | 2 +-
src/library/scala/runtime/VolatileLongRef.java | 2 +-
src/library/scala/runtime/VolatileObjectRef.java | 8 +-
src/library/scala/runtime/VolatileShortRef.java | 2 +-
src/library/scala/runtime/WorksheetSupport.scala | 94 +
src/library/scala/runtime/package.scala | 12 +-
src/library/scala/specialized.scala | 27 +-
src/library/scala/sys/BooleanProp.scala | 4 +-
src/library/scala/sys/Prop.scala | 7 +-
src/library/scala/sys/PropImpl.scala | 2 +-
src/library/scala/sys/ShutdownHookThread.scala | 3 +-
src/library/scala/sys/SystemProperties.scala | 10 +-
src/library/scala/sys/package.scala | 12 +-
src/library/scala/sys/process/BasicIO.scala | 6 +-
src/library/scala/sys/process/Process.scala | 3 +-
src/library/scala/sys/process/ProcessBuilder.scala | 8 +-
.../scala/sys/process/ProcessBuilderImpl.scala | 6 +-
src/library/scala/sys/process/ProcessIO.scala | 2 +-
src/library/scala/sys/process/ProcessImpl.scala | 9 +-
src/library/scala/sys/process/ProcessLogger.scala | 2 +-
src/library/scala/sys/process/package.scala | 4 +-
src/library/scala/testing/Benchmark.scala | 21 +-
src/library/scala/testing/SUnit.scala | 272 -
src/library/scala/testing/Show.scala | 28 +-
src/library/scala/text/Document.scala | 14 +-
src/library/scala/throws.scala | 16 +-
src/library/scala/transient.scala | 6 +-
src/library/scala/unchecked.scala | 46 +-
src/library/scala/util/DynamicVariable.scala | 12 +-
src/library/scala/util/Either.scala | 594 +
src/library/scala/util/Marshal.scala | 34 +-
src/library/scala/util/MurmurHash.scala | 7 +-
src/library/scala/util/Properties.scala | 80 +-
src/library/scala/util/Random.scala | 32 +-
src/library/scala/util/Sorting.scala | 43 +-
src/library/scala/util/Try.scala | 217 +
.../scala/util/automata/BaseBerrySethi.scala | 48 +-
src/library/scala/util/automata/DetWordAutom.scala | 3 +-
src/library/scala/util/automata/Inclusion.scala | 10 +-
.../scala/util/automata/NondetWordAutom.scala | 40 +-
.../scala/util/automata/SubsetConstruction.scala | 28 +-
.../scala/util/automata/WordBerrySethi.scala | 60 +-
src/library/scala/util/control/Breaks.scala | 15 +-
.../scala/util/control/ControlThrowable.scala | 36 +-
src/library/scala/util/control/Exception.scala | 139 +-
src/library/scala/util/control/NoStackTrace.scala | 11 +-
src/library/scala/util/control/NonFatal.scala | 45 +
src/library/scala/util/control/TailCalls.scala | 28 +-
src/library/scala/util/grammar/HedgeRHS.scala | 12 +-
src/library/scala/util/grammar/TreeRHS.scala | 9 +-
.../scala/util/hashing/ByteswapHashing.scala | 35 +
src/library/scala/util/hashing/Hashing.scala | 39 +
src/library/scala/util/hashing/MurmurHash3.scala | 285 +
src/library/scala/util/hashing/package.scala | 35 +
src/library/scala/util/logging/ConsoleLogger.scala | 9 +-
src/library/scala/util/logging/Logged.scala | 27 +-
src/library/scala/util/matching/Regex.scala | 77 +-
.../scala/util/parsing/ast/AbstractSyntax.scala | 5 +-
src/library/scala/util/parsing/ast/Binders.scala | 31 +-
.../parsing/combinator/ImplicitConversions.scala | 31 +-
.../util/parsing/combinator/JavaTokenParsers.scala | 18 +-
.../util/parsing/combinator/PackratParsers.scala | 94 +-
.../scala/util/parsing/combinator/Parsers.scala | 129 +-
.../util/parsing/combinator/RegexParsers.scala | 5 +-
.../util/parsing/combinator/lexical/Lexical.scala | 25 +-
.../util/parsing/combinator/lexical/Scanners.scala | 32 +-
.../parsing/combinator/lexical/StdLexical.scala | 29 +-
.../syntactical/StandardTokenParsers.scala | 5 +-
.../combinator/syntactical/StdTokenParsers.scala | 11 +-
.../combinator/syntactical/TokenParsers.scala | 9 +-
.../parsing/combinator/testing/RegexTest.scala | 5 +
.../util/parsing/combinator/testing/Tester.scala | 40 +-
.../util/parsing/combinator/token/StdTokens.scala | 7 +-
.../util/parsing/combinator/token/Tokens.scala | 16 +-
.../scala/util/parsing/input/CharArrayReader.scala | 19 +-
.../util/parsing/input/CharSequenceReader.scala | 2 +-
.../scala/util/parsing/input/NoPosition.scala | 7 +-
.../scala/util/parsing/input/OffsetPosition.scala | 33 +-
.../scala/util/parsing/input/PagedSeqReader.scala | 4 +-
.../scala/util/parsing/input/Position.scala | 56 +-
.../scala/util/parsing/input/Positional.scala | 4 +-
src/library/scala/util/parsing/input/Reader.scala | 23 +-
.../scala/util/parsing/input/StreamReader.scala | 17 +-
src/library/scala/util/parsing/json/JSON.scala | 65 +-
src/library/scala/util/parsing/json/Lexer.scala | 2 +-
src/library/scala/util/parsing/json/Parser.scala | 2 +-
.../scala/util/parsing/syntax/package.scala | 21 -
src/library/scala/util/regexp/Base.scala | 11 +-
.../scala/util/regexp/PointedHedgeExp.scala | 6 +-
src/library/scala/util/regexp/SyntaxError.scala | 3 +-
src/library/scala/util/regexp/WordExp.scala | 42 +-
src/library/scala/volatile.scala | 6 +-
src/library/scala/xml/Atom.scala | 25 +-
src/library/scala/xml/Attribute.scala | 35 +-
src/library/scala/xml/Comment.scala | 15 +-
src/library/scala/xml/Document.scala | 10 +-
src/library/scala/xml/Elem.scala | 75 +-
src/library/scala/xml/EntityRef.scala | 15 +-
src/library/scala/xml/Equality.scala | 36 +-
src/library/scala/xml/Group.scala | 7 +-
src/library/scala/xml/HasKeyValue.scala | 25 -
.../scala/xml/MalformedAttributeException.scala | 2 +-
src/library/scala/xml/MetaData.scala | 92 +-
src/library/scala/xml/NamespaceBinding.scala | 13 +-
src/library/scala/xml/Node.scala | 54 +-
src/library/scala/xml/NodeBuffer.scala | 29 +-
src/library/scala/xml/NodeSeq.scala | 50 +-
src/library/scala/xml/Null.scala | 24 +-
src/library/scala/xml/PCData.scala | 41 +-
src/library/scala/xml/PrefixedAttribute.scala | 25 +-
src/library/scala/xml/PrettyPrinter.scala | 46 +-
src/library/scala/xml/ProcInstr.scala | 6 +-
src/library/scala/xml/QNode.scala | 12 +-
src/library/scala/xml/SpecialNode.scala | 21 +-
src/library/scala/xml/Text.scala | 49 +-
src/library/scala/xml/TextBuffer.scala | 13 +-
src/library/scala/xml/TopScope.scala | 7 +-
src/library/scala/xml/TypeSymbol.scala | 2 +-
src/library/scala/xml/Unparsed.scala | 26 +-
src/library/scala/xml/UnprefixedAttribute.scala | 6 +-
src/library/scala/xml/Utility.scala | 200 +-
src/library/scala/xml/XML.scala | 48 +-
src/library/scala/xml/dtd/ContentModel.scala | 13 +-
src/library/scala/xml/dtd/ContentModelParser.scala | 17 +-
src/library/scala/xml/dtd/DTD.scala | 11 +-
src/library/scala/xml/dtd/Decl.scala | 75 +-
src/library/scala/xml/dtd/DocType.scala | 4 +-
src/library/scala/xml/dtd/ElementValidator.scala | 8 +-
src/library/scala/xml/dtd/ExternalID.scala | 8 +-
src/library/scala/xml/dtd/Scanner.scala | 6 +-
src/library/scala/xml/dtd/Tokens.scala | 2 +-
.../scala/xml/dtd/ValidationException.scala | 2 +-
src/library/scala/xml/factory/Binder.scala | 12 +-
.../scala/xml/factory/LoggedNodeFactory.scala | 4 +-
src/library/scala/xml/factory/NodeFactory.scala | 4 +-
src/library/scala/xml/factory/XMLLoader.scala | 2 +-
.../xml/include/CircularIncludeException.scala | 20 +-
.../xml/include/UnavailableResourceException.scala | 10 +-
.../scala/xml/include/XIncludeException.scala | 41 +-
.../scala/xml/include/sax/EncodingHeuristics.scala | 2 +-
src/library/scala/xml/include/sax/Main.scala | 4 +-
.../scala/xml/include/sax/XIncludeFilter.scala | 175 +-
src/library/scala/xml/include/sax/XIncluder.scala | 20 +-
src/library/scala/xml/package.scala | 10 +-
.../scala/xml/parsing/ConstructingHandler.scala | 6 +-
.../scala/xml/parsing/ConstructingParser.scala | 29 +-
.../scala/xml/parsing/DefaultMarkupHandler.scala | 6 +-
.../scala/xml/parsing/ExternalSources.scala | 7 +-
src/library/scala/xml/parsing/FactoryAdapter.scala | 6 +-
src/library/scala/xml/parsing/FatalError.scala | 2 +-
src/library/scala/xml/parsing/MarkupHandler.scala | 12 +-
src/library/scala/xml/parsing/MarkupParser.scala | 310 +-
.../scala/xml/parsing/MarkupParserCommon.scala | 16 +-
.../xml/parsing/NoBindingFactoryAdapter.scala | 2 +-
src/library/scala/xml/parsing/TokenTests.scala | 48 +-
.../xml/parsing/ValidatingMarkupHandler.scala | 2 +-
src/library/scala/xml/parsing/XhtmlEntities.scala | 9 +-
src/library/scala/xml/parsing/XhtmlParser.scala | 22 +-
.../scala/xml/persistent/CachedFileStorage.scala | 44 +-
src/library/scala/xml/persistent/Index.scala | 2 +-
src/library/scala/xml/persistent/SetStorage.scala | 2 +-
src/library/scala/xml/pull/XMLEvent.scala | 4 +-
src/library/scala/xml/pull/XMLEventReader.scala | 16 +-
.../scala/xml/transform/BasicTransformer.scala | 7 +-
src/library/scala/xml/transform/RewriteRule.scala | 2 +-
.../scala/xml/transform/RuleTransformer.scala | 2 +-
src/manual/scala/man1/Command.scala | 16 +-
src/manual/scala/man1/fsc.scala | 16 +-
src/manual/scala/man1/sbaz.scala | 205 -
src/manual/scala/man1/scala.scala | 27 +-
src/manual/scala/man1/scalac.scala | 231 +-
src/manual/scala/man1/scaladoc.scala | 21 +-
src/manual/scala/man1/scalap.scala | 11 +-
src/manual/scala/tools/docutil/EmitHtml.scala | 302 +-
src/manual/scala/tools/docutil/EmitManPage.scala | 96 +-
src/manual/scala/tools/docutil/ManMaker.scala | 12 +-
src/manual/scala/tools/docutil/ManPage.scala | 2 +-
.../scala/tools/docutil/resources/css/style.css | 15 +-
.../scala/tools/docutil/resources/index.html | 33 +-
.../epfl/lamp/compiler/msil/emit/ILGenerator.scala | 1 -
.../lamp/compiler/msil/emit/ILPrinterVisitor.scala | 2 +-
.../msil/emit/MultipleFilesILPrinterVisitor.scala | 8 +-
.../ch/epfl/lamp/compiler/msil/emit/OpCode.scala | 4 +-
.../ch/epfl/lamp/compiler/msil/emit/OpCodes.scala | 4 +-
.../msil/emit/SingleFileILPrinterVisitor.scala | 8 +-
.../epfl/lamp/compiler/msil/emit/TypeBuilder.scala | 6 +-
src/partest-alternative/README | 50 -
.../scala/tools/partest/Actions.scala | 189 -
.../scala/tools/partest/Alarms.scala | 86 -
.../scala/tools/partest/BuildContributors.scala | 102 -
.../scala/tools/partest/Categories.scala | 70 -
.../scala/tools/partest/Compilable.scala | 106 -
.../scala/tools/partest/Config.scala | 115 -
.../scala/tools/partest/Dispatcher.scala | 162 -
.../scala/tools/partest/Entities.scala | 74 -
.../scala/tools/partest/Housekeeping.scala | 187 -
.../scala/tools/partest/Partest.scala | 81 -
.../scala/tools/partest/PartestSpec.scala | 104 -
.../scala/tools/partest/Properties.scala | 17 -
.../scala/tools/partest/Results.scala | 121 -
.../scala/tools/partest/Runner.scala | 36 -
.../scala/tools/partest/Statistics.scala | 46 -
.../scala/tools/partest/Universe.scala | 96 -
.../scala/tools/partest/ant/JavaTask.scala | 57 -
.../scala/tools/partest/antlib.xml | 3 -
.../tools/partest/category/AllCategories.scala | 20 -
.../scala/tools/partest/category/Analysis.scala | 64 -
.../scala/tools/partest/category/Compiler.scala | 140 -
.../scala/tools/partest/category/Runner.scala | 108 -
.../scala/tools/partest/io/ANSIWriter.scala | 58 -
.../scala/tools/partest/io/Diff.java | 873 --
.../scala/tools/partest/io/DiffPrint.java | 606 -
.../scala/tools/partest/io/JUnitReport.scala | 38 -
.../scala/tools/partest/io/Logging.scala | 137 -
.../scala/tools/partest/package.scala | 45 -
.../scala/tools/partest/util/package.scala | 61 -
.../scala/tools/partest/ASMConverters.scala | 71 +
src/partest/scala/tools/partest/AsmNode.scala | 60 +
src/partest/scala/tools/partest/BytecodeTest.scala | 129 +
src/partest/scala/tools/partest/CompilerTest.scala | 61 +
src/partest/scala/tools/partest/DirectTest.scala | 131 +
src/partest/scala/tools/partest/IcodeTest.scala | 45 +
src/partest/scala/tools/partest/MemoryTest.scala | 38 +
.../scala/tools/partest/PartestDefaults.scala | 13 +-
src/partest/scala/tools/partest/PartestTask.scala | 182 +-
src/partest/scala/tools/partest/ReplTest.scala | 28 +-
.../scala/tools/partest/ScaladocModelTest.scala | 205 +
src/partest/scala/tools/partest/SecurityTest.scala | 2 +-
src/partest/scala/tools/partest/SigTest.scala | 27 +-
.../tools/partest/StoreReporterDirectTest.scala | 15 +
src/partest/scala/tools/partest/TestUtil.scala | 12 +-
.../partest/instrumented/Instrumentation.scala | 92 +
.../scala/tools/partest/instrumented/Profiler.java | 82 +
.../tools/partest/javaagent/ASMTransformer.java | 49 +
.../scala/tools/partest/javaagent/MANIFEST.MF | 1 +
.../tools/partest/javaagent/ProfilerVisitor.java | 59 +
.../tools/partest/javaagent/ProfilingAgent.java | 25 +
.../scala/tools/partest/nest/AntRunner.scala | 6 +-
.../scala/tools/partest/nest/CompileManager.scala | 132 +-
.../tools/partest/nest/ConsoleFileManager.scala | 43 +-
.../scala/tools/partest/nest/ConsoleRunner.scala | 32 +-
src/partest/scala/tools/partest/nest/Diff.java | 873 --
.../scala/tools/partest/nest/DiffPrint.java | 606 -
.../scala/tools/partest/nest/DirectRunner.scala | 78 +-
.../scala/tools/partest/nest/FileManager.scala | 47 +-
.../scala/tools/partest/nest/NestRunner.scala | 2 +-
src/partest/scala/tools/partest/nest/NestUI.scala | 6 +-
.../scala/tools/partest/nest/PathSettings.scala | 22 +-
.../tools/partest/nest/ReflectiveRunner.scala | 30 +-
.../scala/tools/partest/nest/RunnerManager.scala | 862 ++
.../scala/tools/partest/nest/RunnerUtils.scala | 2 +-
.../scala/tools/partest/nest/SBTRunner.scala | 83 +-
.../scala/tools/partest/nest/TestFile.scala | 44 +-
src/partest/scala/tools/partest/nest/Worker.scala | 1019 --
src/partest/scala/tools/partest/package.scala | 89 +-
.../scala/tools/partest/utils/PrintMgr.scala | 2 +-
.../scala/tools/partest/utils/Properties.scala | 4 +-
src/reflect/scala/reflect/api/Annotations.scala | 203 +
src/reflect/scala/reflect/api/BuildUtils.scala | 78 +
src/reflect/scala/reflect/api/Constants.scala | 223 +
src/reflect/scala/reflect/api/Exprs.scala | 177 +
src/reflect/scala/reflect/api/FlagSets.scala | 180 +
src/reflect/scala/reflect/api/Importers.scala | 103 +
src/reflect/scala/reflect/api/JavaMirrors.scala | 56 +
src/reflect/scala/reflect/api/JavaUniverse.scala | 46 +
src/reflect/scala/reflect/api/Mirror.scala | 122 +
src/reflect/scala/reflect/api/Mirrors.scala | 504 +
src/reflect/scala/reflect/api/Names.scala | 118 +
src/reflect/scala/reflect/api/Position.scala | 164 +
src/reflect/scala/reflect/api/Positions.scala | 54 +
src/reflect/scala/reflect/api/Printers.scala | 229 +
src/reflect/scala/reflect/api/Scopes.scala | 70 +
.../scala/reflect/api/StandardDefinitions.scala | 314 +
src/reflect/scala/reflect/api/StandardNames.scala | 107 +
src/reflect/scala/reflect/api/Symbols.scala | 1022 ++
src/reflect/scala/reflect/api/TagInterop.scala | 43 +
src/reflect/scala/reflect/api/TreeCreator.scala | 12 +
src/reflect/scala/reflect/api/Trees.scala | 3021 ++++
src/reflect/scala/reflect/api/TypeCreator.scala | 12 +
src/reflect/scala/reflect/api/TypeTags.scala | 354 +
src/reflect/scala/reflect/api/Types.scala | 997 ++
src/reflect/scala/reflect/api/Universe.scala | 96 +
src/reflect/scala/reflect/api/package.scala | 47 +
.../reflect/internal/AnnotationCheckers.scala | 144 +
.../scala/reflect/internal/AnnotationInfos.scala | 363 +
.../scala/reflect/internal/BaseTypeSeqs.scala | 239 +
.../scala/reflect/internal/BuildUtils.scala | 68 +
.../scala/reflect/internal/CapturedVariables.scala | 36 +
src/reflect/scala/reflect/internal/Chars.scala | 98 +
.../reflect/internal/ClassfileConstants.scala | 385 +
src/reflect/scala/reflect/internal/Constants.scala | 266 +
.../scala/reflect/internal/Definitions.scala | 1292 ++
.../reflect/internal/ExistentialsAndSkolems.scala | 119 +
.../scala/reflect/internal/FatalError.scala | 6 +
src/reflect/scala/reflect/internal/FlagSets.scala | 45 +
src/reflect/scala/reflect/internal/Flags.scala | 500 +
src/reflect/scala/reflect/internal/HasFlags.scala | 175 +
src/reflect/scala/reflect/internal/Importers.scala | 466 +
.../scala/reflect/internal/InfoTransformers.scala | 51 +
src/reflect/scala/reflect/internal/Kinds.scala | 233 +
src/reflect/scala/reflect/internal/Mirrors.scala | 295 +
.../reflect/internal/MissingRequirementError.scala | 24 +
src/reflect/scala/reflect/internal/Names.scala | 542 +
src/reflect/scala/reflect/internal/Phase.scala | 66 +
src/reflect/scala/reflect/internal/Positions.scala | 65 +
src/reflect/scala/reflect/internal/Printers.scala | 691 +
src/reflect/scala/reflect/internal/Required.scala | 15 +
src/reflect/scala/reflect/internal/Scopes.scala | 401 +
.../scala/reflect/internal/StdAttachments.scala | 76 +
.../scala/reflect/internal/StdCreators.scala | 21 +
src/reflect/scala/reflect/internal/StdNames.scala | 1241 ++
.../scala/reflect/internal/SymbolTable.scala | 366 +
src/reflect/scala/reflect/internal/Symbols.scala | 3385 +++++
src/reflect/scala/reflect/internal/TreeGen.scala | 298 +
src/reflect/scala/reflect/internal/TreeInfo.scala | 764 +
src/reflect/scala/reflect/internal/Trees.scala | 1701 +++
.../scala/reflect/internal/TypeDebugging.scala | 71 +
src/reflect/scala/reflect/internal/Types.scala | 7435 +++++++++
.../internal/annotations/compileTimeOnly.scala | 31 +
.../internal/annotations/uncheckedBounds.scala | 13 +
.../reflect/internal/pickling/ByteCodecs.scala | 221 +
.../reflect/internal/pickling/PickleBuffer.scala | 188 +
.../reflect/internal/pickling/PickleFormat.scala | 225 +
.../reflect/internal/pickling/UnPickler.scala | 883 ++
.../reflect/internal/settings/AbsSettings.scala | 23 +
.../internal/settings/MutableSettings.scala | 50 +
.../scala/reflect/internal/transform/Erasure.scala | 383 +
.../reflect/internal/transform/RefChecks.scala | 13 +
.../reflect/internal/transform/Transforms.scala | 41 +
.../scala/reflect/internal/transform/UnCurry.scala | 70 +
.../scala/reflect/internal/util/Collections.scala | 227 +
.../scala/reflect/internal/util/HashSet.scala | 106 +
.../scala/reflect/internal/util/Origins.scala | 119 +
.../scala/reflect/internal/util/Position.scala | 311 +
src/reflect/scala/reflect/internal/util/Set.scala | 28 +
.../scala/reflect/internal/util/SourceFile.scala | 171 +
.../scala/reflect/internal/util/Statistics.scala | 273 +
.../scala/reflect/internal/util/StringOps.scala | 99 +
.../internal/util/StripMarginInterpolator.scala | 40 +
.../scala/reflect/internal/util/TableDef.scala | 95 +
.../scala/reflect/internal/util/ThreeValues.scala | 14 +
.../internal/util/TraceSymbolActivity.scala | 174 +
.../scala/reflect/internal/util/WeakHashSet.scala | 430 +
.../scala/reflect/internal/util/package.scala | 9 +
src/reflect/scala/reflect/io/AbstractFile.scala | 264 +
src/reflect/scala/reflect/io/Directory.scala | 79 +
src/reflect/scala/reflect/io/File.scala | 197 +
.../scala/reflect/io/FileOperationException.scala | 13 +
src/reflect/scala/reflect/io/NoAbstractFile.scala | 33 +
src/reflect/scala/reflect/io/Path.scala | 289 +
src/reflect/scala/reflect/io/PlainFile.scala | 104 +
src/reflect/scala/reflect/io/Streamable.scala | 127 +
.../scala/reflect/io/VirtualDirectory.scala | 72 +
src/reflect/scala/reflect/io/VirtualFile.scala | 103 +
src/reflect/scala/reflect/io/ZipArchive.scala | 234 +
src/reflect/scala/reflect/macros/Aliases.scala | 112 +
src/reflect/scala/reflect/macros/Attachments.scala | 64 +
src/reflect/scala/reflect/macros/Context.scala | 90 +
src/reflect/scala/reflect/macros/Enclosures.scala | 66 +
src/reflect/scala/reflect/macros/Evals.scala | 57 +
src/reflect/scala/reflect/macros/ExprUtils.scala | 51 +
src/reflect/scala/reflect/macros/FrontEnds.scala | 47 +
.../scala/reflect/macros/Infrastructure.scala | 25 +
src/reflect/scala/reflect/macros/Names.scala | 23 +
src/reflect/scala/reflect/macros/Parsers.scala | 22 +
src/reflect/scala/reflect/macros/Reifiers.scala | 95 +
src/reflect/scala/reflect/macros/TreeBuilder.scala | 72 +
src/reflect/scala/reflect/macros/Typers.scala | 95 +
src/reflect/scala/reflect/macros/Universe.scala | 241 +
src/reflect/scala/reflect/macros/package.scala | 15 +
.../scala/reflect/runtime/JavaMirrors.scala | 1315 ++
.../scala/reflect/runtime/JavaUniverse.scala | 28 +
.../scala/reflect/runtime/ReflectSetup.scala | 12 +
.../scala/reflect/runtime/ReflectionUtils.scala | 85 +
src/reflect/scala/reflect/runtime/Settings.scala | 49 +
.../scala/reflect/runtime/SymbolLoaders.scala | 145 +
.../scala/reflect/runtime/SymbolTable.scala | 45 +
.../scala/reflect/runtime/SynchronizedOps.scala | 52 +
.../reflect/runtime/SynchronizedSymbols.scala | 140 +
.../scala/reflect/runtime/SynchronizedTypes.scala | 106 +
.../scala/reflect/runtime/TwoWayCache.scala | 66 +
src/reflect/scala/reflect/runtime/package.scala | 37 +
src/scalacheck/org/scalacheck/Arbitrary.scala | 47 +-
src/scalacheck/org/scalacheck/Arg.scala | 4 +-
src/scalacheck/org/scalacheck/Commands.scala | 35 +-
.../org/scalacheck/ConsoleReporter.scala | 53 +-
src/scalacheck/org/scalacheck/Gen.scala | 144 +-
src/scalacheck/org/scalacheck/Pretty.scala | 27 +-
src/scalacheck/org/scalacheck/Prop.scala | 70 +-
src/scalacheck/org/scalacheck/Properties.scala | 20 +-
src/scalacheck/org/scalacheck/Shrink.scala | 4 +-
src/scalacheck/org/scalacheck/Test.scala | 202 +-
src/scalacheck/org/scalacheck/util/Buildable.scala | 7 +-
.../org/scalacheck/util/CmdLineParser.scala | 13 +-
src/scalacheck/org/scalacheck/util/FreqMap.scala | 4 +-
src/scalacheck/org/scalacheck/util/StdRand.scala | 4 +-
src/scalap/scala/tools/scalap/Arguments.scala | 4 +-
.../scala/tools/scalap/ByteArrayReader.scala | 4 +-
src/scalap/scala/tools/scalap/Classfile.scala | 2 +-
src/scalap/scala/tools/scalap/Classfiles.scala | 28 +-
src/scalap/scala/tools/scalap/CodeWriter.scala | 8 +-
src/scalap/scala/tools/scalap/Decode.scala | 18 +-
src/scalap/scala/tools/scalap/JavaWriter.scala | 49 +-
src/scalap/scala/tools/scalap/Main.scala | 28 +-
src/scalap/scala/tools/scalap/MetaParser.scala | 4 +-
src/scalap/scala/tools/scalap/Names.scala | 96 -
src/scalap/scala/tools/scalap/Properties.scala | 2 +-
.../tools/scalap/scalax/rules/Memoisable.scala | 4 +-
.../scala/tools/scalap/scalax/rules/Rules.scala | 2 +-
.../scala/tools/scalap/scalax/rules/SeqRule.scala | 2 +-
.../scala/tools/scalap/scalax/rules/package.scala | 9 +
.../scalax/rules/scalasig/ClassFileParser.scala | 7 +-
.../scalap/scalax/rules/scalasig/ScalaSig.scala | 8 +-
.../scalax/rules/scalasig/ScalaSigPrinter.scala | 7 +-
src/swing/scala/swing/AbstractButton.scala | 2 +-
src/swing/scala/swing/Action.scala | 4 +-
src/swing/scala/swing/Alignment.scala | 2 +-
src/swing/scala/swing/Applet.scala | 2 +-
src/swing/scala/swing/BorderPanel.scala | 2 +-
src/swing/scala/swing/BoxPanel.scala | 2 +-
src/swing/scala/swing/BufferWrapper.scala | 2 +-
src/swing/scala/swing/Button.scala | 2 +-
src/swing/scala/swing/ButtonGroup.scala | 7 +-
src/swing/scala/swing/CheckBox.scala | 2 +-
src/swing/scala/swing/ComboBox.scala | 6 +-
src/swing/scala/swing/Component.scala | 50 +-
src/swing/scala/swing/Container.scala | 2 +-
src/swing/scala/swing/EditorPane.scala | 2 +-
src/swing/scala/swing/FileChooser.scala | 14 +-
src/swing/scala/swing/FlowPanel.scala | 2 +-
src/swing/scala/swing/Font.scala.disabled | 32 +-
src/swing/scala/swing/FormattedTextField.scala | 2 +-
src/swing/scala/swing/GUIApplication.scala | 30 -
src/swing/scala/swing/GridBagPanel.scala | 5 +-
src/swing/scala/swing/GridPanel.scala | 2 +-
src/swing/scala/swing/Label.scala | 2 +-
src/swing/scala/swing/LayoutContainer.scala | 22 +-
src/swing/scala/swing/ListView.scala | 18 +-
src/swing/scala/swing/MainFrame.scala | 4 +-
src/swing/scala/swing/Menu.scala | 2 +-
src/swing/scala/swing/Orientable.scala | 2 +-
src/swing/scala/swing/Orientation.scala | 2 +-
src/swing/scala/swing/Oriented.scala | 2 +-
src/swing/scala/swing/Panel.scala | 2 +-
src/swing/scala/swing/PasswordField.scala | 2 +-
src/swing/scala/swing/ProgressBar.scala | 2 +-
src/swing/scala/swing/Publisher.scala | 16 +-
src/swing/scala/swing/RadioButton.scala | 2 +-
src/swing/scala/swing/Reactions.scala | 2 +-
src/swing/scala/swing/Reactor.scala | 2 +-
src/swing/scala/swing/RichWindow.scala | 54 +-
src/swing/scala/swing/RootPanel.scala | 2 +-
src/swing/scala/swing/ScrollBar.scala | 2 +-
src/swing/scala/swing/ScrollPane.scala | 8 +-
src/swing/scala/swing/Scrollable.scala | 2 +-
src/swing/scala/swing/Separator.scala | 2 +-
src/swing/scala/swing/SequentialContainer.scala | 2 +-
src/swing/scala/swing/SimpleGUIApplication.scala | 47 -
src/swing/scala/swing/SimpleSwingApplication.scala | 19 +
src/swing/scala/swing/Slider.scala | 2 +-
src/swing/scala/swing/SplitPane.scala | 10 +-
src/swing/scala/swing/Swing.scala | 3 +-
src/swing/scala/swing/SwingActor.scala | 24 +-
src/swing/scala/swing/SwingApplication.scala | 8 +
src/swing/scala/swing/SwingWorker.scala | 4 +-
src/swing/scala/swing/TabbedPane.scala | 5 +-
src/swing/scala/swing/Table.scala | 7 +-
src/swing/scala/swing/TextArea.scala | 2 +-
src/swing/scala/swing/TextComponent.scala | 2 +-
src/swing/scala/swing/TextField.scala | 2 +-
src/swing/scala/swing/ToggleButton.scala | 2 +-
src/swing/scala/swing/UIElement.scala | 28 +-
src/swing/scala/swing/Window.scala | 14 +-
src/swing/scala/swing/event/ActionEvent.scala | 2 +-
src/swing/scala/swing/event/AdjustingEvent.scala | 15 +-
.../scala/swing/event/BackgroundChanged.scala | 2 +-
src/swing/scala/swing/event/ButtonClicked.scala | 2 +-
src/swing/scala/swing/event/CaretUpdate.scala | 2 +-
src/swing/scala/swing/event/ComponentEvent.scala | 11 +-
src/swing/scala/swing/event/ContainerEvent.scala | 2 +-
src/swing/scala/swing/event/EditDone.scala | 2 +-
src/swing/scala/swing/event/Event.scala | 2 +-
src/swing/scala/swing/event/FocusEvent.scala | 2 +-
src/swing/scala/swing/event/FontChanged.scala | 2 +-
.../scala/swing/event/ForegroundChanged.scala | 2 +-
src/swing/scala/swing/event/InputEvent.scala | 2 +-
src/swing/scala/swing/event/Key.scala | 2 +-
src/swing/scala/swing/event/KeyEvent.scala | 2 +-
src/swing/scala/swing/event/ListEvent.scala | 6 +-
src/swing/scala/swing/event/MouseEvent.scala | 2 +-
src/swing/scala/swing/event/SelectionEvent.scala | 2 +-
src/swing/scala/swing/event/TableEvent.scala | 2 +-
src/swing/scala/swing/event/UIEvent.scala | 2 +-
src/swing/scala/swing/event/ValueChanged.scala | 2 +-
src/swing/scala/swing/event/WindowActivated.scala | 2 +-
src/swing/scala/swing/event/WindowClosing.scala | 2 +-
.../scala/swing/event/WindowDeactivated.scala | 2 +-
.../scala/swing/event/WindowDeiconified.scala | 2 +-
src/swing/scala/swing/event/WindowEvent.scala | 2 +-
src/swing/scala/swing/event/WindowIconified.scala | 2 +-
src/swing/scala/swing/event/WindowOpened.scala | 2 +-
src/swing/scala/swing/model/Matrix.scala | 2 +-
src/swing/scala/swing/package.scala | 3 +
src/swing/scala/swing/test/ButtonApp.scala | 25 -
src/swing/scala/swing/test/CelsiusConverter.scala | 43 -
src/swing/scala/swing/test/CelsiusConverter2.scala | 37 -
src/swing/scala/swing/test/ComboBoxes.scala | 87 -
src/swing/scala/swing/test/CountButton.scala | 31 -
src/swing/scala/swing/test/Dialogs.scala | 177 -
src/swing/scala/swing/test/GridBagDemo.scala | 65 -
src/swing/scala/swing/test/HelloWorld.scala | 14 -
src/swing/scala/swing/test/LabelTest.scala | 20 -
src/swing/scala/swing/test/LinePainting.scala | 53 -
src/swing/scala/swing/test/ListViewDemo.scala | 18 -
src/swing/scala/swing/test/SimpleApplet.scala | 19 -
src/swing/scala/swing/test/SwingApp.scala | 30 -
src/swing/scala/swing/test/TableSelection.scala | 97 -
src/swing/scala/swing/test/UIDemo.scala | 148 -
src/swing/scala/swing/test/images/banana.jpg | Bin 6000 -> 0 bytes
src/swing/scala/swing/test/images/margarita1.jpg | Bin 14770 -> 0 bytes
src/swing/scala/swing/test/images/margarita2.jpg | Bin 17310 -> 0 bytes
src/swing/scala/swing/test/images/rose.jpg | Bin 13808 -> 0 bytes
.../scala/tools/util/YourkitProfiling.scala | 63 -
starr.number | 2 +
test/ant/test-basic/build.xml | 33 +
.../test-basic/src/test-1.scala} | 0
test/benchmarking/AVL-insert-random.scala | 67 +
test/benchmarking/AVL-insert.scala | 67 +
test/benchmarking/ParCtrie-bfs.scala | 73 +
test/benchmarking/ParCtrie-map.scala | 21 +
test/benchmarking/ParCtrie-nums.scala | 39 +
test/benchmarking/ParCtrie-size.scala | 37 +
test/benchmarking/ParHashMap.scala | 33 +
test/benchmarking/ParVector-reduce.scala | 33 +
test/benchmarking/TreeSetInsert.scala | 70 +
test/benchmarking/TreeSetInsertRandom.scala | 65 +
test/benchmarking/TreeSetIterator.scala | 69 +
test/benchmarking/TreeSetRemove.scala | 69 +
test/benchmarking/TreeSetRemoveRandom.scala | 66 +
test/benchmarking/t6726-patmat-analysis.scala | 4005 +++++
test/benchmarks/bench | 0
.../scala/collection/immutable/range-bench.scala | 61 +
.../scala/collection/mutable/hashtable-bench.scala | 61 +
.../parallel/benchmarks/arrays/Resetting.scala | 8 +-
.../parallel_array/MatrixMultiplication.scala | 26 +-
test/benchmarks/src/scala/util/HashSpeedTest.scala | 150 +-
test/{files/jvm/bug680.check => debug/OBSOLETE} | 0
.../bug680.check => debug/buildmanager/.gitignore} | 0
.../jvm/bug680.check => debug/jvm/.gitignore} | 0
.../jvm/bug680.check => debug/neg/.gitignore} | 0
.../jvm/bug680.check => debug/pos/.gitignore} | 0
.../jvm/bug680.check => debug/res/.gitignore} | 0
.../jvm/bug680.check => debug/run/.gitignore} | 0
.../bug680.check => debug/scalacheck/.gitignore} | 0
.../jvm/bug680.check => debug/scalap/.gitignore} | 0
.../jvm/bug680.check => debug/shootout/.gitignore} | 0
.../script/loadAndExecute/lAndE1.scala | 0
.../script/loadAndExecute/lAndE2.scala | 0
.../script/loadAndExecute/loadAndExecute.scala | 0
test/disabled-windows/script/utf8.bat | 22 +-
test/disabled-windows/script/utf8.scala | 0
.../{files => disabled}/buildmanager/t2652/A.scala | 0
.../{files => disabled}/buildmanager/t2652/B.scala | 0
.../buildmanager/t2652/t2652.changes/A2.scala | 0
test/disabled/buildmanager/t2652/t2652.check | 9 +
.../buildmanager/t2652/t2652.test | 0
.../continuations-neg/infer0.check | 0
test/disabled/continuations-neg/infer0.scala | 12 +
test/disabled/jvm/concurrent-future.check | 14 +
test/disabled/jvm/concurrent-future.scala | 122 +
test/disabled/lib/annotations.jar | Bin 2242 -> 0 bytes
test/disabled/lib/annotations.jar.desired.sha1 | 1 -
test/disabled/lib/enums.jar | Bin 1372 -> 0 bytes
test/disabled/lib/enums.jar.desired.sha1 | 1 -
test/disabled/lib/genericNest.jar | Bin 1136 -> 0 bytes
test/disabled/lib/genericNest.jar.desired.sha1 | 1 -
test/disabled/lib/methvsfield.jar | Bin 609 -> 0 bytes
test/disabled/lib/methvsfield.jar.desired.sha1 | 1 -
test/disabled/lib/nest.jar | Bin 2920 -> 0 bytes
test/disabled/lib/nest.jar.desired.sha1 | 1 -
test/disabled/lib/scalacheck.jar | Bin 746629 -> 0 bytes
test/disabled/lib/scalacheck.jar.desired.sha1 | 1 -
test/disabled/neg/abstract-report3.check | 39 +
test/disabled/neg/abstract-report3.scala | 1 +
.../pos/caseclass-parents.flags} | 0
test/disabled/pos/caseclass-parents.scala | 11 +
.../pos/caseclass-productN.flags} | 0
test/disabled/pos/caseclass-productN.scala | 20 +
test/disabled/pos/t1545.scala | 18 +
.../pos/bug1737 => disabled/pos/t1737}/A.java | 0
.../pos/bug1737 => disabled/pos/t1737}/B.java | 0
.../pos/bug1737 => disabled/pos/t1737}/c.scala | 0
test/disabled/pos/{bug2919.scala => t2919.scala} | 0
test/disabled/presentation/akka.check | 492 +
test/disabled/presentation/akka.flags | 18 +
test/disabled/presentation/akka/Runner.scala | 3 +
.../presentation/akka/src/akka/AkkaException.scala | 40 +
.../presentation/akka/src/akka/actor/Actor.scala | 503 +
.../akka/src/akka/actor/ActorRef.scala | 1433 ++
.../akka/src/akka/actor/ActorRegistry.scala | 389 +
.../presentation/akka/src/akka/actor/Actors.java | 108 +
.../akka/actor/BootableActorLoaderService.scala | 60 +
.../presentation/akka/src/akka/actor/FSM.scala | 527 +
.../akka/src/akka/actor/Scheduler.scala | 133 +
.../akka/src/akka/actor/Supervisor.scala | 176 +
.../akka/src/akka/actor/UntypedActor.scala | 134 +
.../presentation/akka/src/akka/actor/package.scala | 23 +
.../presentation/akka/src/akka/config/Config.scala | 93 +
.../akka/src/akka/config/ConfigParser.scala | 74 +
.../akka/src/akka/config/Configuration.scala | 166 +
.../akka/src/akka/config/Configurator.scala | 21 +
.../akka/src/akka/config/Importer.scala | 64 +
.../akka/src/akka/config/SupervisionConfig.scala | 134 +
.../akka/src/akka/dataflow/DataFlow.scala | 165 +
.../akka/src/akka/dispatch/Dispatchers.scala | 227 +
.../ExecutorBasedEventDrivenDispatcher.scala | 305 +
...torBasedEventDrivenWorkStealingDispatcher.scala | 165 +
.../akka/src/akka/dispatch/MailboxHandling.scala | 68 +
.../akka/src/akka/dispatch/MessageHandling.scala | 260 +
.../src/akka/dispatch/ThreadBasedDispatcher.scala | 52 +
.../akka/src/akka/dispatch/ThreadPoolBuilder.scala | 259 +
.../akka/src/akka/event/EventHandler.scala | 235 +
.../akka/src/akka/event/JavaEventHandler.java | 35 +
.../presentation/akka/src/akka/japi/JavaAPI.scala | 108 +
.../akka/remoteinterface/RemoteEventHandler.scala | 43 +
.../src/akka/remoteinterface/RemoteInterface.scala | 493 +
.../akka/src/akka/routing/Iterators.scala | 49 +
.../akka/src/akka/routing/Listeners.scala | 37 +
.../presentation/akka/src/akka/routing/Pool.scala | 292 +
.../akka/src/akka/routing/Routers.scala | 87 +
.../akka/src/akka/routing/Routing.scala | 64 +
.../presentation/akka/src/akka/util/Address.scala | 29 +
.../akka/src/akka/util/AkkaLoader.scala | 94 +
.../presentation/akka/src/akka/util/Bootable.scala | 10 +
.../akka/src/akka/util/BoundedBlockingQueue.scala | 326 +
.../presentation/akka/src/akka/util/Crypt.scala | 44 +
.../presentation/akka/src/akka/util/Duration.scala | 437 +
.../presentation/akka/src/akka/util/HashCode.scala | 57 +
.../presentation/akka/src/akka/util/Helpers.scala | 99 +
.../akka/src/akka/util/ListenerManagement.scala | 81 +
.../presentation/akka/src/akka/util/LockUtil.scala | 197 +
.../akka/src/akka/util/ReflectiveAccess.scala | 232 +
.../presentation/akka/src/akka/util/package.scala | 27 +
.../akka/src/com/eaio/util/lang/Hex.java | 215 +
.../akka/src/com/eaio/uuid/MACAddressParser.java | 116 +
.../presentation/akka/src/com/eaio/uuid/UUID.java | 311 +
.../akka/src/com/eaio/uuid/UUIDGen.java | 364 +
.../akka/src/com/eaio/uuid/UUIDHelper.java | 86 +
.../akka/src/com/eaio/uuid/UUIDHolder.java | 42 +
test/disabled/presentation/doc.check | 1 +
test/disabled/presentation/doc/doc.scala | 139 +
test/disabled/presentation/doc/src/Class.scala | 1 +
test/disabled/presentation/doc/src/p/Base.scala | 11 +
test/disabled/presentation/doc/src/p/Derived.scala | 9 +
.../presentation/ide-bug-1000450.check} | 0
.../presentation/ide-bug-1000450/Runner.scala | 6 +
.../presentation/ide-bug-1000450/src/Ranges.scala | 5 +
test/disabled/presentation/ide-bug-1000508.check | 163 +
.../presentation/ide-bug-1000508/Runner.scala | 3 +
.../presentation/ide-bug-1000508/src/Foo.scala | 3 +
.../presentation/ide-bug-1000545.check} | 0
.../presentation/ide-bug-1000545/Runner.scala | 3 +
.../ide-bug-1000545/src/CompletionFails.scala | 25 +
test/disabled/presentation/ide-t1000620.check | 37 +
.../presentation/ide-t1000620/Runner.scala | 3 +
.../presentation/ide-t1000620/src/a/A.scala | 5 +
.../presentation/ide-t1000620/src/b/B.scala | 8 +
test/disabled/presentation/shutdown-deadlock.check | 3 +
.../shutdown-deadlock/ShutdownDeadlockTest.scala | 45 +
.../shutdown-deadlock/src/arrays.scala | 937 ++
test/disabled/presentation/simple-tests.check | 2 +-
test/disabled/presentation/timeofday.check | 100 +
test/disabled/presentation/timeofday/Runner.scala | 3 +
.../presentation/timeofday/src/timeofday.scala | 35 +
test/disabled/properties.check | 158 +
test/disabled/properties/Runner.scala | 3 +
test/disabled/properties/src/properties.scala | 54 +
test/{files => disabled}/run/lisp.check | 0
test/{files => disabled}/run/lisp.scala | 0
test/{files => disabled}/run/syncchannel.check | 0
test/{files => disabled}/run/syncchannel.scala | 0
test/{files => disabled}/run/t2886.scala | 0
test/disabled/run/{bug4279.scala => t4279.scala} | 0
test/disabled/run/t4532.check | 15 +
test/disabled/run/t4532.scala | 34 +
test/disabled/run/t6026.check | 9 +
test/disabled/run/t6026.scala | 9 +
test/disabled/run/t6987.check | 1 +
test/disabled/run/t6987.scala | 43 +
test/disabled/scalacheck/redblack.scala | 157 -
test/disabled/script/fact.bat | 34 +-
test/disabled/script/fact.scala | 0
test/disabled/script/second.bat | 6 +-
test/disabled/script/second.scala | 0
test/disabled/script/t1015.bat | 24 +-
test/disabled/script/t1015.scala | 0
test/disabled/script/t1017.bat | 30 +-
test/disabled/script/t1017.scala | 0
test/files/{gitignore.SAMPLE => .gitignore} | 0
test/files/android/HelloAndroid.scala | 16 -
test/files/android/HelloAndroid.xml | 11 -
test/files/ant/README | 42 +
test/files/ant/fsc-build.xml | 31 -
test/files/ant/fsc.check | 13 -
test/files/ant/fsc001-build.check | 14 +
test/files/ant/fsc001-build.xml | 26 +
test/files/ant/fsc001.scala | 7 +
test/files/ant/fsc002-build.check | 14 +
test/files/ant/fsc002-build.xml | 28 +
test/files/ant/{fsc.scala => fsc002.scala} | 0
test/files/ant/fsc003-build.check | 14 +
test/files/ant/fsc003-build.xml | 25 +
test/files/ant/fsc003.scala | 7 +
test/files/ant/imported.xml | 111 +-
test/files/ant/scalac-build.xml | 31 -
test/files/ant/scalac.check | 13 -
test/files/ant/scalac001-build.check | 14 +
test/files/ant/scalac001-build.xml | 26 +
test/files/ant/{scalac.scala => scalac001.scala} | 0
test/files/ant/scalac002-build.check | 14 +
test/files/ant/scalac002-build.xml | 28 +
test/files/ant/scalac002.scala | 7 +
test/files/ant/scalac003-build.check | 14 +
test/files/ant/scalac003-build.xml | 25 +
test/files/ant/scalac003.scala | 7 +
test/files/ant/scalac004-build.check | 24 +
test/files/ant/scalac004-build.xml | 26 +
test/files/ant/scalac004.scala | 11 +
test/files/ant/scaladoc-build.check | 15 +
test/files/ant/scaladoc-build.xml | 11 +-
test/files/ant/scaladoc.check | 13 -
test/files/ant/scaladoc.scala | 5 +-
test/files/bench/equality/eq.scala | 68 +-
test/files/bench/equality/eqeq.scala | 92 +-
test/files/buildmanager/overloaded_1/A.scala | 11 -
test/files/buildmanager/t2556_1/t2556_1.check | 6 +-
test/files/buildmanager/t2556_2/t2556_2.check | 8 +-
test/files/buildmanager/t2556_3/t2556_3.check | 8 +-
test/files/buildmanager/t2557/t2557.check | 6 +-
test/files/buildmanager/t2559/D.scala | 6 +-
test/files/buildmanager/t2559/t2559.check | 5 -
test/files/buildmanager/t2562/t2562.check | 10 +-
test/files/buildmanager/t2649/t2649.check | 4 +-
test/files/buildmanager/t2650_1/t2650_1.check | 1 +
test/files/buildmanager/t2650_2/t2650_2.check | 1 +
test/files/buildmanager/t2650_3/t2650_3.check | 1 +
test/files/buildmanager/t2650_4/t2650_4.check | 1 +
test/files/buildmanager/t2651_3/t2651_3.check | 2 +-
test/files/buildmanager/t2651_4/t2651_4.check | 4 +-
test/files/buildmanager/t2652/t2652.check | 9 -
test/files/buildmanager/t2653/t2653.check | 2 +-
test/files/buildmanager/t2655/t2655.check | 6 +-
test/files/buildmanager/t2657/t2657.check | 5 +-
test/files/buildmanager/t2789/t2789.check | 4 +-
test/files/buildmanager/t2790/t2790.check | 5 +-
test/files/buildmanager/t2792/t2792.check | 3 +
test/files/codelib/code.jar.desired.sha1 | 1 +
test/files/continuations-neg/function2.check | 2 +-
test/files/continuations-neg/infer0.scala | 14 -
test/files/continuations-neg/lazy.check | 8 +-
test/files/continuations-neg/t1929.check | 4 +-
test/files/continuations-neg/t3628.check | 4 +
.../continuations-neg/t3628.scala | 0
test/files/continuations-neg/t3718.check | 4 +
test/files/continuations-neg/t3718.scala | 3 +
.../t5314-missing-result-type.check | 4 +
.../t5314-missing-result-type.scala | 13 +
test/files/continuations-neg/t5314-npe.check | 4 +
test/files/continuations-neg/t5314-npe.scala | 3 +
.../continuations-neg/t5314-return-reset.check | 4 +
.../continuations-neg/t5314-return-reset.scala | 21 +
.../files/continuations-neg/t5314-type-error.check | 6 +
.../files/continuations-neg/t5314-type-error.scala | 17 +
test/files/continuations-neg/trycatch2.scala | 6 +-
test/files/continuations-run/basics.check | 0
test/files/continuations-run/basics.scala | 0
test/files/continuations-run/match2.scala | 2 +-
test/files/continuations-run/shift-pct.check | 25 +
test/files/continuations-run/shift-pct.scala | 30 +
test/files/continuations-run/t5314-2.check | 5 +
test/files/continuations-run/t5314-2.scala | 44 +
test/files/continuations-run/t5314-3.check | 4 +
test/files/continuations-run/t5314-3.scala | 27 +
test/files/continuations-run/t5314-with-if.check | 1 +
test/files/continuations-run/t5314-with-if.scala | 17 +
test/files/continuations-run/t5314.check | 8 +
test/files/continuations-run/t5314.scala | 52 +
test/files/continuations-run/t5472.check | 1 +
test/files/continuations-run/t5472.scala | 90 +
.../bug680.check => continuations-run/z1673.check} | 0
test/files/continuations-run/z1673.scala | 31 +
test/files/detach-run/actor/Client.scala | 18 +-
test/files/detach-run/actor/Server.scala | 8 +-
test/files/detach-run/basic/Client.scala | 26 +-
test/files/detach-run/basic/Server.scala | 14 +-
test/files/detach-run/basic/ServerConsole.scala | 44 +-
test/files/disabled/A.scala | 11 +
.../overloaded_1 => disabled}/overloaded_1.check | 0
.../overloaded_1 => disabled}/overloaded_1.test | 0
test/files/disabled/run/t4602.scala | 57 +
.../files/{buildmanager => disabled}/t4245/A.scala | 0
.../{buildmanager => disabled}/t4245/t4245.check | 0
.../{buildmanager => disabled}/t4245/t4245.test | 0
test/files/disabled/t7020.check | 17 +
.../caseinherit.flags => disabled/t7020.flags} | 0
test/files/disabled/t7020.scala | 30 +
test/files/instrumented/InstrumentationTest.check | 8 +
test/files/instrumented/InstrumentationTest.scala | 30 +
test/files/instrumented/README | 15 +
.../instrumented/inline-in-constructors.check | 3 +
.../instrumented/inline-in-constructors.flags | 1 +
.../inline-in-constructors/assert_1.scala | 13 +
.../inline-in-constructors/bar_2.scala | 7 +
.../inline-in-constructors/test_3.scala | 15 +
test/files/instrumented/t6611.check | 1 +
test/files/instrumented/t6611.scala | 35 +
test/files/jvm/annotations.check | 12 +
test/files/jvm/annotations.scala | 26 +-
test/files/jvm/backendBugUnapply.scala | 9 +-
test/files/jvm/bigints.check | 9 +
test/files/jvm/bigints.scala | 51 +-
test/files/jvm/bytecode-test-example.check | 1 +
test/files/jvm/bytecode-test-example/Foo_1.scala | 9 +
test/files/jvm/bytecode-test-example/Test.scala | 32 +
test/files/jvm/deprecation.cmds | 3 -
test/files/jvm/deprecation/Test_1.scala | 6 +-
test/files/jvm/duration-java.check | 364 +
test/files/jvm/duration-java/Test.java | 46 +
test/files/jvm/duration-tck.scala | 198 +
test/files/jvm/future-spec/FutureTests.scala | 553 +
test/files/jvm/future-spec/PromiseTests.scala | 246 +
test/files/jvm/future-spec/TryTests.scala | 130 +
test/files/jvm/future-spec/main.scala | 110 +
test/files/jvm/inner.scala | 8 +-
test/files/jvm/interpreter.check | 26 +-
test/files/jvm/interpreter.scala | 11 +-
test/files/jvm/libnatives.jnilib | Bin
test/files/jvm/manifests-new.check | 58 +
test/files/jvm/manifests-new.scala | 111 +
.../jvm/{manifests.check => manifests-old.check} | 0
test/files/jvm/manifests-old.scala | 109 +
test/files/jvm/manifests.scala | 119 -
test/files/jvm/mkLibNatives.bat | 140 +-
test/files/jvm/mkLibNatives.sh | 0
test/files/jvm/non-fatal-tests.scala | 47 +
test/files/jvm/patmat_opt_ignore_underscore.check | 1 +
test/files/jvm/patmat_opt_ignore_underscore.flags | 1 +
.../patmat_opt_ignore_underscore/Analyzed_1.scala | 29 +
.../jvm/patmat_opt_ignore_underscore/test.scala | 15 +
test/files/jvm/patmat_opt_no_nullcheck.check | 1 +
test/files/jvm/patmat_opt_no_nullcheck.flags | 1 +
.../jvm/patmat_opt_no_nullcheck/Analyzed_1.scala | 24 +
test/files/jvm/patmat_opt_no_nullcheck/test.scala | 8 +
test/files/jvm/patmat_opt_primitive_typetest.check | 1 +
test/files/jvm/patmat_opt_primitive_typetest.flags | 1 +
.../patmat_opt_primitive_typetest/Analyzed_1.scala | 24 +
.../jvm/patmat_opt_primitive_typetest/test.scala | 8 +
test/files/jvm/protectedacc.scala | 2 +-
test/files/jvm/scala-concurrent-tck.scala | 780 +
test/files/jvm/serialization-new.check | 337 +
test/files/jvm/serialization-new.scala | 671 +
test/files/jvm/serialization.check | 61 +-
test/files/jvm/serialization.scala | 66 +-
test/files/jvm/si5471.check | 2 +
test/files/jvm/si5471.scala | 17 +
test/files/jvm/signum.scala | 21 +
test/files/jvm/stringbuilder.check | 17 +
test/files/jvm/stringbuilder.scala | 84 +-
test/files/jvm/t0632.check | 24 +-
test/files/jvm/t1118.check | 11 +
test/files/jvm/t1118.scala | 21 +
test/files/jvm/t1652.check | 2 -
test/files/jvm/t2104.scala | 2 +-
test/files/jvm/{bug560bis.check => t560bis.check} | 0
test/files/jvm/{bug560bis.scala => t560bis.scala} | 0
test/files/jvm/t6172.scala | 3005 ++++
test/files/jvm/{bug676.check => t676.check} | 0
test/files/jvm/{bug676.scala => t676.scala} | 0
test/files/jvm/{bug680.check => t680.check} | 0
test/files/jvm/{bug680.scala => t680.scala} | 0
test/files/jvm/t6941.check | 1 +
test/files/jvm/t6941.flags | 1 +
test/files/jvm/t6941/Analyzed_1.scala | 11 +
test/files/jvm/t6941/test.scala | 15 +
test/files/jvm/t7146.check | 5 +
test/files/jvm/t7146.scala | 23 +
test/files/jvm/t7253.check | 1 +
test/files/jvm/t7253/Base_1.scala | 5 +
test/files/jvm/t7253/JavaClient_1.java | 9 +
test/files/jvm/t7253/ScalaClient_1.scala | 9 +
test/files/jvm/t7253/test.scala | 28 +
test/files/jvm/throws-annot-from-java.check | 47 +
.../PolymorphicException_1.scala | 3 +
test/files/jvm/throws-annot-from-java/Test_3.scala | 29 +
.../ThrowsDeclaration_2.java | 6 +
test/files/jvm/try-type-tests.scala | 144 +
test/files/jvm/typerep.scala | 2 +-
test/files/jvm/unittest_io_Jvm.check | 6 +
test/files/jvm/unittest_io_Jvm.scala | 31 +-
test/files/jvm/unittest_xml.scala | 126 +-
test/files/jvm/xml01.check | 4 +-
test/files/jvm/xml01.scala | 169 +-
test/files/jvm/{bug680.check => xml02.check} | 0
test/files/jvm/xml02.scala | 62 +-
test/files/jvm/xml03syntax.check | 13 +-
test/files/jvm/xml03syntax.scala | 33 +-
test/files/jvm/xml04embed.check | 3 +
test/files/jvm/xml04embed.scala | 13 +-
test/files/jvm/xml05.check | 11 +
test/files/jvm/xml05.scala | 7 +
test/files/jvm/xmlattr.check | 18 +
test/files/jvm/xmlattr.scala | 67 +-
test/files/jvm/xmlstuff.scala | 302 +-
test/files/lib/annotations.jar | Bin 2242 -> 0 bytes
test/files/lib/annotations.jar.desired.sha1 | 2 +-
test/files/lib/enums.jar | Bin 1372 -> 0 bytes
test/files/lib/enums.jar.desired.sha1 | 2 +-
test/files/lib/genericNest.jar | Bin 1136 -> 0 bytes
test/files/lib/genericNest.jar.desired.sha1 | 2 +-
test/files/lib/methvsfield.jar | Bin 609 -> 0 bytes
test/files/lib/methvsfield.jar.desired.sha1 | 2 +-
test/files/lib/nest.jar | Bin 2920 -> 0 bytes
test/files/lib/nest.jar.desired.sha1 | 2 +-
test/files/lib/scalacheck.jar | Bin 746629 -> 0 bytes
test/files/lib/scalacheck.jar.desired.sha1 | 2 +-
test/files/neg/abstract-concrete-methods.check | 5 +
test/files/neg/abstract-concrete-methods.scala | 10 +
test/files/neg/abstract-report.check | 24 +
test/files/neg/abstract-report.scala | 1 +
test/files/neg/abstract-report2.check | 103 +
test/files/neg/abstract-report2.scala | 11 +
.../neg/abstraction-from-volatile-type-error.check | 4 +
.../neg/abstraction-from-volatile-type-error.scala | 11 +
test/files/neg/ambiguous-float-dots.check | 16 +
test/files/neg/ambiguous-float-dots.flags | 1 +
test/files/neg/ambiguous-float-dots.scala | 14 +
test/files/neg/ambiguous-float-dots2.check | 10 +
test/files/neg/ambiguous-float-dots2.flags | 1 +
test/files/neg/ambiguous-float-dots2.scala | 14 +
test/files/neg/annot-nonconst.check | 4 +-
test/files/neg/annot-nonconst.scala | 4 +-
test/files/neg/any-vs-anyref.check | 64 +
test/files/neg/any-vs-anyref.scala | 29 +
test/files/neg/anytrait.check | 7 +
test/files/neg/anytrait.scala | 10 +
test/files/neg/anyval-anyref-parent.check | 23 +
test/files/neg/anyval-anyref-parent.scala | 12 +
test/files/neg/anyval-sealed.check | 12 -
test/files/neg/anyval-sealed.scala | 6 -
test/files/neg/applydynamic_sip.check | 58 +
test/files/neg/applydynamic_sip.flags | 1 +
test/files/neg/applydynamic_sip.scala | 33 +
test/files/neg/array-not-seq.check | 12 +-
test/files/neg/bug0418.check | 7 -
test/files/neg/bug1010.check | 6 -
test/files/neg/bug1011.check | 4 -
test/files/neg/bug1017.check | 4 -
test/files/neg/bug1038.check | 5 -
test/files/neg/bug1041.check | 6 -
test/files/neg/bug1106.check | 7 -
test/files/neg/bug1112.check | 4 -
test/files/neg/bug112706A.check | 6 -
test/files/neg/bug1181.check | 4 -
test/files/neg/bug1183.check | 17 -
test/files/neg/bug1183.scala | 38 -
test/files/neg/bug1224.check | 4 -
test/files/neg/bug1241.check | 4 -
test/files/neg/bug1275.check | 6 -
test/files/neg/bug1286.check | 7 -
test/files/neg/bug1364.check | 5 -
test/files/neg/bug1523.check | 4 -
test/files/neg/bug1623.check | 4 -
test/files/neg/bug1838.check | 7 -
test/files/neg/bug1845.check | 4 -
test/files/neg/bug1845.scala | 10 -
test/files/neg/bug1872.check | 4 -
test/files/neg/bug1878.check | 15 -
test/files/neg/bug1878.scala | 15 -
test/files/neg/bug1909b.check | 4 -
test/files/neg/bug1960.check | 4 -
test/files/neg/bug200.check | 4 -
test/files/neg/bug2102.check | 6 -
test/files/neg/bug2144.check | 4 -
test/files/neg/bug2148.check | 4 -
test/files/neg/bug2206.check | 5 -
test/files/neg/bug2213.check | 15 -
test/files/neg/bug2275a.check | 13 -
test/files/neg/bug2275b.check | 10 -
test/files/neg/bug2441.check | 4 -
test/files/neg/bug276.check | 5 -
test/files/neg/bug278.check | 10 -
test/files/neg/bug278.scala | 6 -
test/files/neg/bug284.check | 8 -
test/files/neg/bug3209.check | 4 -
test/files/neg/bug343.check | 4 -
test/files/neg/bug3631.check | 4 -
test/files/neg/bug3683a.check | 6 -
test/files/neg/bug3683b.check | 8 -
test/files/neg/bug3714-neg.check | 13 -
test/files/neg/bug3714-neg.scala | 41 -
test/files/neg/bug3736.check | 16 -
test/files/neg/bug3909.check | 4 -
test/files/neg/bug391.check | 13 -
test/files/neg/bug3913.check | 4 -
test/files/neg/bug4069.check | 16 -
test/files/neg/bug409.check | 4 -
test/files/neg/bug412.check | 5 -
test/files/neg/bug414.check | 11 -
test/files/neg/bug4158.check | 19 -
test/files/neg/bug4166.check | 4 -
test/files/neg/bug4166.scala | 11 -
test/files/neg/bug4174.check | 4 -
test/files/neg/bug418.check | 7 -
test/files/neg/bug4196.check | 4 -
test/files/neg/bug421.check | 4 -
test/files/neg/bug4217.check | 4 -
test/files/neg/bug4221.check | 6 -
test/files/neg/bug4302.check | 4 -
test/files/neg/bug4419.check | 4 -
test/files/neg/bug4419.scala | 3 -
test/files/neg/bug452.check | 6 -
test/files/neg/bug4584.check | 4 -
test/files/neg/bug464-neg.check | 16 -
test/files/neg/bug4727.check | 11 -
test/files/neg/bug473.check | 4 -
test/files/neg/bug500.check | 4 -
test/files/neg/bug501.check | 4 -
test/files/neg/bug510.check | 4 -
test/files/neg/bug512.check | 4 -
test/files/neg/bug515.check | 6 -
test/files/neg/bug520.check | 4 -
test/files/neg/bug521.check | 15 -
test/files/neg/bug545.check | 7 -
test/files/neg/bug550.check | 7 -
test/files/neg/bug556.check | 4 -
test/files/neg/bug558.check | 4 -
test/files/neg/bug562.check | 4 -
test/files/neg/bug563.check | 4 -
test/files/neg/bug563.scala | 7 -
test/files/neg/bug565.check | 5 -
test/files/neg/bug576.check | 4 -
test/files/neg/bug585.check | 4 -
test/files/neg/bug588.check | 13 -
test/files/neg/bug591.check | 4 -
test/files/neg/bug593.check | 4 -
test/files/neg/bug608.check | 6 -
test/files/neg/bug630.check | 5 -
test/files/neg/bug631.check | 4 -
test/files/neg/bug633.check | 4 -
test/files/neg/bug639.check | 4 -
test/files/neg/bug649.check | 4 -
test/files/neg/bug650.check | 4 -
test/files/neg/bug663.check | 7 -
test/files/neg/bug664.check | 7 -
test/files/neg/bug667.check | 4 -
test/files/neg/bug668.check | 4 -
test/files/neg/bug677.check | 6 -
test/files/neg/bug691.check | 4 -
test/files/neg/bug692.check | 24 -
test/files/neg/bug693.check | 4 -
test/files/neg/bug696.check | 5 -
test/files/neg/bug700.check | 4 -
test/files/neg/bug708.check | 5 -
test/files/neg/bug712.check | 4 -
test/files/neg/bug715.check | 4 -
test/files/neg/bug729.check | 6 -
test/files/neg/bug752.check | 6 -
test/files/neg/bug765.check | 4 -
test/files/neg/bug766.check | 4 -
test/files/neg/bug779.check | 4 -
test/files/neg/bug783.check | 6 -
test/files/neg/bug798.check | 4 -
test/files/neg/bug800.check | 13 -
test/files/neg/bug835.check | 9 -
test/files/neg/bug836.check | 6 -
test/files/neg/bug845.check | 4 -
test/files/neg/bug846.check | 6 -
test/files/neg/bug856.check | 6 -
test/files/neg/bug875.check | 17 -
test/files/neg/bug876.check | 4 -
test/files/neg/bug877.check | 7 -
test/files/neg/bug882.check | 4 -
test/files/neg/bug900.check | 10 -
test/files/neg/bug908.check | 4 -
test/files/neg/bug909.check | 6 -
test/files/neg/bug910.check | 6 -
test/files/neg/bug935.check | 7 -
test/files/neg/bug944.check | 4 -
test/files/neg/bug960.check | 10 -
test/files/neg/bug960.scala | 20 -
test/files/neg/bug961.check | 4 -
test/files/neg/bug987.check | 19 -
test/files/neg/bug997.check | 13 -
test/files/neg/bug997.scala | 15 -
test/files/neg/case-collision.check | 10 +
.../{anyval-sealed.flags => case-collision.flags} | 0
test/files/neg/case-collision.scala | 11 +
test/files/neg/caseinherit.check | 18 +-
test/files/neg/caseinherit.scala | 17 +-
test/files/neg/catch-all.check | 10 +
test/files/neg/{bug3683a.flags => catch-all.flags} | 0
test/files/neg/catch-all.scala | 31 +
test/files/neg/checksensible.check | 53 +-
test/files/neg/checksensible.scala | 11 +-
.../neg/classmanifests_new_deprecations.check | 25 +
.../neg/classmanifests_new_deprecations.flags | 1 +
.../neg/classmanifests_new_deprecations.scala | 37 +
test/files/neg/classtags_contextbound_a.check | 4 +
test/files/neg/classtags_contextbound_a.scala | 4 +
test/files/neg/classtags_contextbound_b.check | 4 +
test/files/neg/classtags_contextbound_b.scala | 7 +
test/files/neg/classtags_contextbound_c.check | 4 +
test/files/neg/classtags_contextbound_c.scala | 7 +
test/files/neg/classtags_dont_use_typetags.check | 4 +
test/files/neg/classtags_dont_use_typetags.scala | 5 +
test/files/neg/constructor-prefix-error.check | 4 +
test/files/neg/constructor-prefix-error.scala | 7 +
test/files/neg/cyclics-import.check | 15 +
test/files/neg/cyclics-import.scala | 17 +
test/{pending => files}/neg/dbldef.check | 0
test/{pending => files}/neg/dbldef.scala | 0
test/files/neg/deadline-inf-illegal.check | 15 +
test/files/neg/deadline-inf-illegal.scala | 8 +
test/files/neg/delayed-init-ref.check | 10 +
test/files/neg/delayed-init-ref.flags | 1 +
test/files/neg/delayed-init-ref.scala | 42 +
test/files/neg/depmet_1.flags | 1 -
...or_dependentMethodTpeConversionToFunction.check | 4 +
...or_dependentMethodTpeConversionToFunction.scala | 5 +
test/files/neg/error_tooManyArgsPattern.check | 4 +
test/files/neg/error_tooManyArgsPattern.scala | 5 +
test/files/neg/eta-expand-star.check | 4 +
test/files/neg/eta-expand-star.scala | 8 +
test/files/neg/exhausting.check | 34 +-
test/files/neg/exhausting.flags | 2 +-
test/files/neg/exhausting.scala | 26 +-
test/files/neg/finitary-error.check | 4 +
test/files/neg/finitary-error.scala | 3 +
test/files/neg/for-comprehension-old.check | 26 +
test/files/neg/for-comprehension-old.flags | 1 +
test/files/neg/for-comprehension-old.scala | 11 +
test/files/neg/found-req-variance.check | 10 +-
test/files/neg/gadts1.check | 4 +-
test/files/neg/hk-bad-bounds.check | 4 +
test/files/neg/hk-bad-bounds.scala | 5 +
test/files/neg/implicit-shadow.check | 11 +
test/files/neg/implicit-shadow.flags | 1 +
test/files/neg/implicit-shadow.scala | 13 +
test/files/neg/implicits.check | 7 +-
test/files/neg/implicits.scala | 16 +
...interop_abstypetags_arenot_classmanifests.check | 4 +
...interop_abstypetags_arenot_classmanifests.scala | 11 +
.../neg/interop_abstypetags_arenot_classtags.check | 4 +
.../neg/interop_abstypetags_arenot_classtags.scala | 12 +
.../neg/interop_abstypetags_arenot_manifests.check | 4 +
.../neg/interop_abstypetags_arenot_manifests.scala | 11 +
.../interop_classmanifests_arenot_typetags.check | 4 +
.../interop_classmanifests_arenot_typetags.scala | 11 +
.../neg/interop_classtags_arenot_manifests.check | 4 +
.../neg/interop_classtags_arenot_manifests.scala | 11 +
.../interop_typetags_arenot_classmanifests.check | 4 +
.../interop_typetags_arenot_classmanifests.scala | 11 +
.../neg/interop_typetags_arenot_classtags.check | 4 +
.../neg/interop_typetags_arenot_classtags.scala | 12 +
...petags_without_classtags_arenot_manifests.check | 6 +
...petags_without_classtags_arenot_manifests.scala | 12 +
.../files/neg/javaConversions-2.10-ambiguity.check | 6 +
.../files/neg/javaConversions-2.10-ambiguity.scala | 10 +
test/files/neg/logImplicits.check | 19 +
test/files/neg/logImplicits.flags | 1 +
test/files/neg/logImplicits.scala | 25 +
test/files/neg/macro-abort.check | 4 +
test/files/neg/macro-abort/Macros_1.scala | 9 +
test/files/neg/macro-abort/Test_2.scala | 3 +
test/files/neg/macro-basic-mamdmi.check | 4 +
test/files/neg/macro-basic-mamdmi.flags | 1 +
.../macro-basic-mamdmi/Impls_Macros_Test_1.scala | 37 +
test/files/neg/macro-cyclic.check | 4 +
test/files/neg/macro-cyclic.flags | 1 +
test/files/neg/macro-cyclic/Impls_Macros_1.scala | 25 +
test/files/neg/macro-deprecate-idents.check | 52 +
test/files/neg/macro-deprecate-idents.flags | 1 +
test/files/neg/macro-deprecate-idents.scala | 56 +
test/files/neg/macro-divergence-controlled.check | 4 +
.../Impls_Macros_1.scala | 23 +
.../neg/macro-divergence-controlled/Test_2.scala | 3 +
test/files/neg/macro-exception.check | 7 +
test/files/neg/macro-exception/Macros_1.scala | 9 +
test/files/neg/macro-exception/Test_2.scala | 3 +
.../neg/macro-false-deprecation-warning.check | 4 +
.../neg/macro-false-deprecation-warning.flags | 1 +
.../Impls_Macros_1.scala | 15 +
test/files/neg/macro-invalidimpl-a.check | 4 +
test/files/neg/macro-invalidimpl-a.flags | 1 +
test/files/neg/macro-invalidimpl-a/Impls_1.scala | 5 +
.../neg/macro-invalidimpl-a/Macros_Test_2.scala | 9 +
test/files/neg/macro-invalidimpl-b.check | 4 +
test/files/neg/macro-invalidimpl-b.flags | 1 +
test/files/neg/macro-invalidimpl-b/Impls_1.scala | 5 +
.../neg/macro-invalidimpl-b/Macros_Test_2.scala | 9 +
test/files/neg/macro-invalidimpl-c.check | 4 +
test/files/neg/macro-invalidimpl-c.flags | 1 +
.../neg/macro-invalidimpl-c/Impls_Macros_1.scala | 9 +
test/files/neg/macro-invalidimpl-c/Test_2.scala | 3 +
test/files/neg/macro-invalidimpl-d.check | 4 +
test/files/neg/macro-invalidimpl-d.flags | 1 +
test/files/neg/macro-invalidimpl-d/Impls_1.scala | 7 +
.../neg/macro-invalidimpl-d/Macros_Test_2.scala | 7 +
test/files/neg/macro-invalidimpl-e.check | 13 +
test/files/neg/macro-invalidimpl-e.flags | 1 +
test/files/neg/macro-invalidimpl-e/Impls_1.scala | 6 +
.../neg/macro-invalidimpl-e/Macros_Test_2.scala | 9 +
test/files/neg/macro-invalidimpl-f.check | 7 +
test/files/neg/macro-invalidimpl-f.flags | 1 +
test/files/neg/macro-invalidimpl-f/Impls_1.scala | 11 +
.../neg/macro-invalidimpl-f/Macros_Test_2.scala | 9 +
test/files/neg/macro-invalidimpl-g.check | 7 +
test/files/neg/macro-invalidimpl-g.flags | 1 +
test/files/neg/macro-invalidimpl-g/Impls_1.scala | 11 +
.../neg/macro-invalidimpl-g/Macros_Test_2.scala | 8 +
test/files/neg/macro-invalidimpl-h.check | 4 +
test/files/neg/macro-invalidimpl-h.flags | 1 +
test/files/neg/macro-invalidimpl-h/Impls_1.scala | 5 +
.../neg/macro-invalidimpl-h/Macros_Test_2.scala | 8 +
test/files/neg/macro-invalidimpl-i.check | 4 +
test/files/neg/macro-invalidimpl-i.flags | 1 +
test/files/neg/macro-invalidimpl-i/Impls_1.scala | 7 +
.../neg/macro-invalidimpl-i/Macros_Test_2.scala | 5 +
test/files/neg/macro-invalidret-nontree.check | 7 +
test/files/neg/macro-invalidret-nontree.flags | 1 +
.../neg/macro-invalidret-nontree/Impls_1.scala | 5 +
.../macro-invalidret-nontree/Macros_Test_2.scala | 8 +
.../neg/macro-invalidret-nonuniversetree.check | 7 +
.../neg/macro-invalidret-nonuniversetree.flags | 1 +
.../macro-invalidret-nonuniversetree/Impls_1.scala | 6 +
.../Macros_Test_2.scala | 8 +
test/files/neg/macro-invalidshape-a.check | 5 +
test/files/neg/macro-invalidshape-a.flags | 1 +
test/files/neg/macro-invalidshape-a/Impls_1.scala | 5 +
.../neg/macro-invalidshape-a/Macros_Test_2.scala | 8 +
test/files/neg/macro-invalidshape-b.check | 5 +
test/files/neg/macro-invalidshape-b.flags | 1 +
test/files/neg/macro-invalidshape-b/Impls_1.scala | 5 +
.../neg/macro-invalidshape-b/Macros_Test_2.scala | 8 +
test/files/neg/macro-invalidshape-c.check | 9 +
test/files/neg/macro-invalidshape-c.flags | 1 +
test/files/neg/macro-invalidshape-c/Impls_1.scala | 5 +
.../neg/macro-invalidshape-c/Macros_Test_2.scala | 8 +
test/files/neg/macro-invalidshape-d.check | 8 +
test/files/neg/macro-invalidshape-d.flags | 1 +
test/files/neg/macro-invalidshape-d/Impls_1.scala | 5 +
.../neg/macro-invalidshape-d/Macros_Test_2.scala | 8 +
.../neg/macro-invalidsig-context-bounds.check | 4 +
.../neg/macro-invalidsig-context-bounds.flags | 1 +
.../macro-invalidsig-context-bounds/Impls_1.scala | 9 +
.../Macros_Test_1.scala | 8 +
test/files/neg/macro-invalidsig-ctx-badargc.check | 7 +
test/files/neg/macro-invalidsig-ctx-badargc.flags | 1 +
.../neg/macro-invalidsig-ctx-badargc/Impls_1.scala | 3 +
.../Macros_Test_2.scala | 8 +
test/files/neg/macro-invalidsig-ctx-badtype.check | 7 +
test/files/neg/macro-invalidsig-ctx-badtype.flags | 1 +
.../neg/macro-invalidsig-ctx-badtype/Impls_1.scala | 5 +
.../Macros_Test_2.scala | 8 +
.../neg/macro-invalidsig-ctx-badvarargs.check | 7 +
.../neg/macro-invalidsig-ctx-badvarargs.flags | 1 +
.../macro-invalidsig-ctx-badvarargs/Impls_1.scala | 5 +
.../Macros_Test_2.scala | 8 +
test/files/neg/macro-invalidsig-ctx-noctx.check | 7 +
test/files/neg/macro-invalidsig-ctx-noctx.flags | 1 +
.../neg/macro-invalidsig-ctx-noctx/Impls_1.scala | 5 +
.../macro-invalidsig-ctx-noctx/Macros_Test_2.scala | 8 +
.../neg/macro-invalidsig-implicit-params.check | 4 +
.../neg/macro-invalidsig-implicit-params.flags | 1 +
.../Impls_Macros_1.scala | 19 +
.../macro-invalidsig-implicit-params/Test_2.scala | 4 +
.../neg/macro-invalidsig-params-badargc.check | 7 +
.../neg/macro-invalidsig-params-badargc.flags | 1 +
.../Impls_Macros_1.scala | 9 +
.../macro-invalidsig-params-badargc/Test_2.scala | 4 +
.../neg/macro-invalidsig-params-badtype.check | 7 +
.../neg/macro-invalidsig-params-badtype.flags | 1 +
.../Impls_Macros_1.scala | 9 +
.../macro-invalidsig-params-badtype/Test_2.scala | 4 +
.../neg/macro-invalidsig-params-badvarargs.check | 7 +
.../neg/macro-invalidsig-params-badvarargs.flags | 1 +
.../Impls_Macros_1.scala | 9 +
.../Test_2.scala | 4 +
.../neg/macro-invalidsig-params-namemismatch.check | 7 +
.../neg/macro-invalidsig-params-namemismatch.flags | 1 +
.../Impls_Macros_1.scala | 9 +
.../Test_2.scala | 4 +
.../neg/macro-invalidsig-tparams-badtype.check | 7 +
.../neg/macro-invalidsig-tparams-badtype.flags | 1 +
.../macro-invalidsig-tparams-badtype/Impls_1.scala | 5 +
.../Macros_Test_2.scala | 8 +
.../neg/macro-invalidsig-tparams-bounds-a.check | 4 +
.../neg/macro-invalidsig-tparams-bounds-a.flags | 1 +
.../Impls_1.scala | 5 +
.../Macros_Test_2.scala | 8 +
.../neg/macro-invalidsig-tparams-bounds-b.check | 4 +
.../neg/macro-invalidsig-tparams-bounds-b.flags | 1 +
.../Impls_1.scala | 5 +
.../Macros_Test_2.scala | 8 +
.../neg/macro-invalidsig-tparams-notparams-a.check | 4 +
.../neg/macro-invalidsig-tparams-notparams-a.flags | 1 +
.../Impls_1.scala | 6 +
.../Macros_Test_2.scala | 8 +
.../neg/macro-invalidsig-tparams-notparams-b.check | 4 +
.../neg/macro-invalidsig-tparams-notparams-b.flags | 1 +
.../Impls_1.scala | 11 +
.../Macros_Test_2.scala | 11 +
.../neg/macro-invalidsig-tparams-notparams-c.check | 4 +
.../neg/macro-invalidsig-tparams-notparams-c.flags | 1 +
.../Impls_1.scala | 12 +
.../Macros_Test_2.scala | 11 +
test/files/neg/macro-invalidusage-badargs.check | 6 +
test/files/neg/macro-invalidusage-badargs.flags | 1 +
.../neg/macro-invalidusage-badargs/Impls_1.scala | 5 +
.../macro-invalidusage-badargs/Macros_Test_2.scala | 8 +
.../files/neg/macro-invalidusage-badbounds-a.check | 4 +
.../files/neg/macro-invalidusage-badbounds-a.flags | 1 +
.../macro-invalidusage-badbounds-a/Impls_1.scala | 5 +
.../Macros_Test_2.scala | 8 +
test/files/neg/macro-invalidusage-badtargs.check | 4 +
test/files/neg/macro-invalidusage-badtargs.flags | 1 +
.../neg/macro-invalidusage-badtargs/Impls_1.scala | 5 +
.../Macros_Test_2.scala | 8 +
.../neg/macro-invalidusage-methodvaluesyntax.check | 4 +
.../neg/macro-invalidusage-methodvaluesyntax.flags | 1 +
.../Impls_1.scala | 9 +
.../Macros_Test_2.scala | 8 +
test/files/neg/macro-noexpand.check | 4 +
test/files/neg/macro-noexpand.flags | 1 +
test/files/neg/macro-noexpand/Impls_1.scala | 5 +
test/files/neg/macro-noexpand/Macros_Test_2.scala | 8 +
test/files/neg/macro-nontypeablebody.check | 4 +
test/files/neg/macro-nontypeablebody.flags | 1 +
test/files/neg/macro-nontypeablebody/Impls_1.scala | 5 +
.../neg/macro-nontypeablebody/Macros_Test_2.scala | 8 +
...verride-macro-overrides-abstract-method-a.check | 5 +
...verride-macro-overrides-abstract-method-a.flags | 1 +
.../Impls_Macros_1.scala | 13 +
.../Test_2.scala | 4 +
...verride-macro-overrides-abstract-method-b.check | 11 +
...verride-macro-overrides-abstract-method-b.flags | 1 +
.../Impls_Macros_1.scala | 8 +
.../Test_2.scala | 6 +
.../macro-override-method-overrides-macro.check | 5 +
.../macro-override-method-overrides-macro.flags | 1 +
.../Impls_1.scala | 15 +
.../Macros_Test_2.scala | 15 +
test/files/neg/macro-qmarkqmarkqmark.check | 13 +
test/files/neg/macro-qmarkqmarkqmark.scala | 13 +
.../macro-reify-typetag-hktypeparams-notags.check | 7 +
.../Test.scala | 9 +
.../macro-reify-typetag-typeparams-notags.check | 7 +
.../Test.scala | 9 +
.../neg/macro-reify-typetag-useabstypetag.check | 7 +
.../macro-reify-typetag-useabstypetag/Test.scala | 9 +
test/files/neg/macro-without-xmacros-a.check | 17 +
.../neg/macro-without-xmacros-a/Impls_1.scala | 18 +
.../neg/macro-without-xmacros-a/Macros_2.scala | 12 +
.../files/neg/macro-without-xmacros-a/Test_3.scala | 4 +
test/files/neg/macro-without-xmacros-b.check | 17 +
.../neg/macro-without-xmacros-b/Impls_1.scala | 18 +
.../neg/macro-without-xmacros-b/Macros_2.scala | 10 +
.../files/neg/macro-without-xmacros-b/Test_3.scala | 4 +
test/files/neg/main1.check | 26 +
test/files/{run/bug4317.flags => neg/main1.flags} | 0
test/files/neg/main1.scala | 45 +
test/files/neg/migration28.check | 13 +-
test/files/neg/migration28.scala | 5 +-
test/files/neg/multi-array.check | 6 +-
test/files/neg/multi-array.scala | 13 +-
test/files/neg/names-defaults-neg.check | 71 +-
test/files/neg/names-defaults-neg.flags | 1 +
test/files/neg/nested-fn-print.check | 6 +-
test/files/neg/newpat_unreachable.check | 27 +
...nyval-sealed.flags => newpat_unreachable.flags} | 0
test/files/neg/newpat_unreachable.scala | 29 +
test/files/neg/no-implicit-to-anyref.check | 28 +
test/files/neg/no-implicit-to-anyref.scala | 29 +
test/files/neg/no-predef.check | 14 +
test/files/neg/no-predef.flags | 1 +
test/files/neg/no-predef.scala | 5 +
test/files/neg/not-possible-cause.check | 9 +
test/files/neg/not-possible-cause.scala | 3 +
test/files/neg/object-not-a-value.check | 4 +
test/files/neg/object-not-a-value.scala | 7 +
test/files/neg/overloaded-implicit.flags | 2 +-
test/files/neg/overloaded-unapply.check | 14 +
test/files/neg/overloaded-unapply.scala | 24 +
test/files/neg/override-object-flag.check | 5 +
test/files/neg/override-object-flag.scala | 3 +
test/files/neg/override-object-no.check | 23 +
test/files/neg/override-object-no.flags | 1 +
test/files/neg/override-object-no.scala | 45 +
test/files/neg/override.check | 4 +-
test/files/neg/override.scala | 2 +-
test/files/neg/package-ob-case.check | 5 +
test/files/neg/package-ob-case.flags | 1 +
test/files/neg/package-ob-case.scala | 5 +
test/files/neg/parent-inherited-twice-error.check | 7 +
test/files/neg/parent-inherited-twice-error.scala | 2 +
test/files/neg/pat_unreachable.check | 8 +-
test/files/neg/pat_unreachable.flags | 1 +
test/files/neg/pat_unreachable.scala | 8 +-
test/files/neg/patmat-type-check.check | 12 +-
test/files/neg/patmatexhaust.check | 56 +-
test/files/neg/patmatexhaust.flags | 2 +-
test/files/neg/patmatexhaust.scala | 62 +-
test/files/neg/permanent-blindness.check | 10 +
...caseinherit.flags => permanent-blindness.flags} | 0
test/files/neg/permanent-blindness.scala | 22 +
test/files/neg/primitive-sigs-1.check | 8 +-
.../neg/primitive-sigs-1/{A_1.scala => A.scala} | 0
test/files/neg/primitive-sigs-1/A_3.scala | 5 -
test/files/neg/primitive-sigs-1/B.scala | 5 +
test/files/neg/primitive-sigs-1/J.java | 8 +
test/files/neg/primitive-sigs-1/J_2.java | 8 -
test/files/neg/protected-constructors.check | 8 +-
test/files/neg/protected-static-fail.check | 16 +
test/files/neg/protected-static-fail/J.java | 7 +
test/files/neg/protected-static-fail/S.scala | 10 +
test/files/neg/protected-static-fail/S0.scala | 9 +
test/files/neg/qualifying-class-error-1.check | 4 +
test/files/neg/qualifying-class-error-1.scala | 2 +
test/files/neg/qualifying-class-error-2.check | 4 +
test/files/neg/qualifying-class-error-2.scala | 11 +
test/files/neg/reassignment.check | 13 +
test/files/neg/reassignment.scala | 7 +
test/files/neg/reflection-names-neg.check | 10 +
test/files/neg/reflection-names-neg.scala | 6 +
test/files/neg/reify_ann2b.check | 4 +
test/files/neg/reify_ann2b.scala | 28 +
.../reify_metalevel_breach_+0_refers_to_1.check | 7 +
.../reify_metalevel_breach_+0_refers_to_1.scala | 16 +
.../reify_metalevel_breach_-1_refers_to_0_a.check | 7 +
.../reify_metalevel_breach_-1_refers_to_0_a.scala | 14 +
.../reify_metalevel_breach_-1_refers_to_0_b.check | 7 +
.../reify_metalevel_breach_-1_refers_to_0_b.scala | 18 +
.../reify_metalevel_breach_-1_refers_to_1.check | 7 +
.../reify_metalevel_breach_-1_refers_to_1.scala | 16 +
.../neg/reify_nested_inner_refers_to_local.check | 7 +
.../neg/reify_nested_inner_refers_to_local.scala | 15 +
test/files/neg/saferJavaConversions.check | 6 +
test/files/neg/saferJavaConversions.scala | 20 +
test/files/neg/scopes.check | 4 +
test/files/neg/sealed-java-enums.check | 7 +-
test/files/neg/sensitive2.check | 10 +
test/files/neg/sensitive2.scala | 8 +
test/files/neg/specification-scopes.check | 12 +
test/files/neg/specification-scopes/P_1.scala | 6 +
test/files/neg/specification-scopes/P_2.scala | 21 +
test/files/neg/stmt-expr-discard.check | 7 +
.../{caseinherit.flags => stmt-expr-discard.flags} | 0
test/files/neg/stmt-expr-discard.scala | 5 +
test/files/neg/stringinterpolation_macro-neg.check | 70 +
test/files/neg/stringinterpolation_macro-neg.scala | 31 +
test/files/neg/structural.check | 23 +-
test/files/neg/suggest-similar.check | 10 +
test/files/neg/suggest-similar.scala | 11 +
test/files/neg/switch.check | 9 +-
test/files/neg/{caseinherit.flags => switch.flags} | 0
test/files/neg/switch.scala | 4 +-
test/files/neg/t0152.check | 2 +-
test/files/neg/t0418.check | 7 +
test/files/neg/{bug0418.scala => t0418.scala} | 0
test/files/neg/t0565.check | 2 +-
test/files/neg/t0673.check | 2 +-
test/files/neg/t0764.check | 2 +-
test/files/neg/t0816.check | 4 +
test/files/{pos => neg}/t0816.scala | 0
test/files/neg/t0903.check | 2 +-
test/files/neg/t1010.check | 6 +
test/files/neg/{bug1010.scala => t1010.scala} | 0
test/files/neg/t1011.check | 4 +
test/files/neg/{bug1011.scala => t1011.scala} | 0
test/files/neg/t1017.check | 4 +
test/files/neg/{bug1017.scala => t1017.scala} | 0
test/files/neg/t1038.check | 5 +
test/files/neg/{bug1038.scala => t1038.scala} | 0
test/files/neg/t1041.check | 6 +
test/files/neg/{bug1041.scala => t1041.scala} | 0
test/files/neg/t1106.check | 7 +
test/files/neg/{bug1106.scala => t1106.scala} | 0
test/files/neg/t1112.check | 4 +
test/files/neg/{bug1112.scala => t1112.scala} | 0
test/files/neg/t112706A.check | 6 +
.../files/neg/{bug112706A.scala => t112706A.scala} | 0
test/files/neg/t1181.check | 8 +
test/files/neg/{bug1181.scala => t1181.scala} | 0
test/files/neg/t1183.check | 17 +
test/files/neg/t1183.scala | 34 +
test/files/neg/t1224.check | 4 +
test/files/neg/{bug1224.scala => t1224.scala} | 0
test/files/neg/t1241.check | 4 +
test/files/neg/{bug1241.scala => t1241.scala} | 0
test/files/neg/t1275.check | 6 +
test/files/neg/{bug1275.scala => t1275.scala} | 0
test/files/neg/t1286.check | 5 +
test/files/neg/{bug1286 => t1286}/a.scala | 0
test/files/neg/{bug1286 => t1286}/b.scala | 0
test/files/neg/t1364.check | 5 +
test/files/neg/{bug1364.scala => t1364.scala} | 0
test/files/neg/t1422.check | 7 +-
test/files/neg/t1422.scala | 2 +-
test/files/neg/t1431.check | 4 +
test/files/neg/t1431.scala | 10 +
test/files/neg/t1432.check | 6 +
test/files/neg/t1432.scala | 12 +
test/files/neg/t1477.check | 2 +-
test/files/neg/t1523.check | 4 +
test/files/neg/{bug1523.scala => t1523.scala} | 0
test/files/neg/{bug1548.check => t1548.check} | 0
test/files/neg/{bug1548 => t1548}/J.java | 0
test/files/neg/{bug1548 => t1548}/S.scala | 0
test/files/neg/t1623.check | 4 +
test/files/neg/{bug1623.scala => t1623.scala} | 0
test/files/neg/t1672b.check | 16 +
test/files/neg/t1672b.scala | 52 +
test/files/neg/t1701.check | 2 +-
test/files/neg/t1838.check | 7 +
test/files/neg/{bug1838.scala => t1838.scala} | 0
test/files/neg/t1845.check | 6 +
test/files/neg/t1845.scala | 10 +
test/files/neg/t1872.check | 8 +
test/files/neg/{bug1872.scala => t1872.scala} | 0
test/files/neg/t1878-typer.check | 4 +
test/files/neg/t1878-typer.scala | 6 +
test/files/neg/t1878.check | 7 +
test/files/neg/t1878.scala | 17 +
test/files/neg/t1909b.check | 4 +
test/files/neg/{bug1909b.scala => t1909b.scala} | 0
test/files/neg/t1960.check | 4 +
test/files/neg/{bug1960.scala => t1960.scala} | 0
test/files/neg/t200.check | 5 +
test/files/neg/{bug200.scala => t200.scala} | 0
test/files/neg/t2070.check | 3 +-
test/files/neg/t2078.check | 2 +-
test/files/neg/t2078.scala | 2 +-
test/files/neg/t2102.check | 6 +
test/files/neg/{bug2102.scala => t2102.scala} | 0
test/files/neg/t2144.check | 4 +
test/files/neg/{bug2144.scala => t2144.scala} | 0
test/files/neg/t2148.check | 4 +
test/files/neg/{bug2148.scala => t2148.scala} | 0
test/files/neg/t2206.check | 5 +
test/files/neg/{bug2206.scala => t2206.scala} | 0
test/files/neg/t2208.check | 2 +-
test/files/neg/t2213.check | 25 +
test/files/neg/{bug2213.scala => t2213.scala} | 0
test/files/neg/t2275a.check | 13 +
test/files/neg/{bug2275a.scala => t2275a.scala} | 0
test/files/neg/t2275b.check | 10 +
test/files/neg/{bug2275b.scala => t2275b.scala} | 0
test/files/neg/t2296a.check | 5 +
test/files/neg/t2296a/J.java | 7 +
test/files/neg/t2296a/S.scala | 18 +
test/files/neg/t2296b.check | 5 +
test/files/neg/t2296b/J_1.java | 7 +
test/files/neg/t2296b/S_2.scala | 18 +
test/files/neg/t2336.check | 2 +-
test/files/neg/t2336.scala | 0
test/files/neg/t2386.check | 4 -
test/files/neg/t2386.scala | 3 -
test/files/neg/t2388.check | 4 +
test/files/neg/t2388.scala | 4 +
test/files/neg/t2405.check | 8 +
test/files/neg/t2405.scala | 10 +
test/files/neg/t2441.check | 4 +
test/files/neg/{bug2441.scala => t2441.scala} | 0
test/files/neg/t2442.check | 9 +
test/files/neg/{caseinherit.flags => t2442.flags} | 0
test/files/neg/t2442/MyEnum.java | 3 +
test/files/neg/t2442/MySecondEnum.java | 6 +
test/files/neg/t2442/t2442.scala | 15 +
test/files/neg/t2488.check | 31 +
test/files/neg/t2488.scala | 12 +
test/files/neg/t2494.scala | 0
test/files/neg/t2641.check | 30 +-
test/files/neg/t2641.scala | 3 +-
test/files/neg/t276.check | 5 +
test/files/neg/{bug276.scala => t276.scala} | 0
test/files/neg/t2773.check | 2 +-
test/files/neg/t2773.scala | 0
test/files/neg/t2775.check | 2 +-
test/files/neg/t2779.check | 1 +
test/files/neg/t2779.scala | 0
test/files/neg/t278.check | 11 +
test/files/neg/t278.scala | 6 +
test/files/neg/t2796.check | 4 +
test/files/neg/{caseinherit.flags => t2796.flags} | 0
test/files/neg/t2796.scala | 28 +
test/files/neg/t284.check | 8 +
test/files/neg/{bug284.scala => t284.scala} | 0
test/files/neg/t2870.check | 8 +-
test/files/neg/t2870.scala | 0
test/files/neg/t2910.check | 2 +-
test/files/neg/t2910.scala | 1 +
test/files/neg/t2918.check | 9 +-
test/files/neg/t2918.scala | 2 +-
test/files/neg/t2968.check | 10 +
test/files/neg/t2968.scala | 26 +
test/files/neg/t2968b.check | 4 +
test/files/neg/t2968b.scala | 7 +
test/files/neg/t3006.check | 2 +-
test/files/neg/t3006.scala | 0
test/files/neg/t3015.check | 11 +-
test/files/neg/t3098.check | 5 +
.../files/neg/{anyval-sealed.flags => t3098.flags} | 0
test/files/neg/t3098/a.scala | 6 +
test/files/neg/t3098/b.scala | 8 +
test/files/neg/t3115.check | 12 -
test/files/neg/t3115.scala | 9 -
test/files/neg/t3189.check | 4 +
.../neg/bug3189.scala => files/neg/t3189.scala} | 0
test/files/neg/t3209.check | 4 +
test/files/neg/{bug3209.scala => t3209.scala} | 0
test/files/neg/t3224.scala | 0
test/files/neg/t3234.check | 2 +
test/files/{pos/bug3234.flags => neg/t3234.flags} | 0
test/files/{pos/bug3234.scala => neg/t3234.scala} | 0
test/files/neg/t3240.check | 4 +
test/files/neg/t3240.scala | 8 +
test/files/neg/t3275.check | 4 +
test/files/neg/t3275.scala | 3 +
test/files/neg/t3392.check | 4 +
test/files/neg/t3392.scala | 11 +
test/files/neg/t343.check | 4 +
test/files/neg/{bug343.scala => t343.scala} | 0
test/files/neg/t3481.check | 29 +
test/files/neg/t3481.scala | 28 +
test/files/neg/t3507-old.check | 4 +
test/files/neg/{t3507.scala => t3507-old.scala} | 0
test/files/neg/t3507.check | 4 -
test/files/neg/t3614.check | 4 +
test/files/neg/t3614.scala | 3 +
test/files/neg/t3631.check | 4 +
test/files/neg/{bug3631.scala => t3631.scala} | 0
test/files/neg/t3683a.check | 5 +
.../neg/{anyval-sealed.flags => t3683a.flags} | 0
test/files/neg/{bug3683a.scala => t3683a.scala} | 0
test/files/neg/t3683b.check | 8 +
test/files/neg/{bug3683b.scala => t3683b.scala} | 0
test/files/neg/t3691.check | 6 +-
test/files/neg/t3692-new.check | 14 +
test/files/neg/t3692-new.flags | 1 +
test/files/neg/t3692-new.scala | 20 +
test/files/neg/t3692-old.check | 14 +
test/files/neg/t3692-old.flags | 1 +
test/files/neg/{t3692.scala => t3692-old.scala} | 0
test/files/neg/t3692.check | 4 -
test/files/neg/t3714-neg.check | 13 +
test/files/neg/t3714-neg.scala | 41 +
test/files/neg/t3736.check | 16 +
test/files/neg/{bug3736.scala => t3736.scala} | 0
test/files/neg/t3761-overload-byname.check | 13 +
test/files/neg/t3761-overload-byname.scala | 13 +
test/files/neg/t3773.check | 4 -
test/files/neg/t3773.flags | 1 -
test/files/neg/t3773.scala | 5 -
test/files/neg/t3816.check | 2 +-
test/files/neg/t3836.check | 13 +
test/files/neg/t3836.scala | 28 +
test/files/neg/t3854.check | 5 +
test/files/neg/t3854.scala | 15 +
test/files/neg/t3873.flags | 1 -
test/files/neg/t3909.check | 4 +
test/files/neg/{bug3909.scala => t3909.scala} | 0
test/files/neg/t391.check | 13 +
test/files/neg/{bug391.scala => t391.scala} | 0
test/files/neg/t3913.check | 4 +
test/files/neg/{bug3913.scala => t3913.scala} | 0
test/files/neg/t3934.check | 2 +-
test/files/neg/t3987.check | 1 +
test/files/neg/t3995.check | 6 +
test/files/neg/t3995.scala | 32 +
test/files/neg/t4044.check | 12 +-
test/files/neg/t4069.check | 16 +
test/files/neg/{bug4069.scala => t4069.scala} | 0
test/files/neg/t409.check | 4 +
test/files/neg/{bug409.scala => t409.scala} | 0
test/files/neg/t4098.check | 13 +
test/files/neg/t4098.scala | 22 +
test/files/neg/t412.check | 5 +
test/files/neg/{bug412.scala => t412.scala} | 0
test/files/neg/t4134.check | 4 +
test/files/neg/t4134.scala | 30 +
test/files/neg/t414.check | 12 +
test/files/neg/{bug414.scala => t414.scala} | 0
test/files/neg/t4158.check | 19 +
test/files/neg/{bug4158.scala => t4158.scala} | 0
test/files/neg/t4163.check | 6 +-
test/files/neg/t4163.scala | 10 +-
test/files/neg/t4166.check | 4 +
test/files/neg/t4166.scala | 11 +
test/files/neg/t4174.check | 4 +
test/files/neg/{bug4174.scala => t4174.scala} | 0
test/files/neg/t418.check | 7 +
test/files/neg/{bug418.scala => t418.scala} | 0
test/files/neg/t4196.check | 4 +
test/files/neg/{bug4196.scala => t4196.scala} | 0
test/files/neg/t421.check | 4 +
test/files/neg/{bug421.scala => t421.scala} | 0
test/files/neg/t4217.check | 4 +
test/files/neg/{bug4217.scala => t4217.scala} | 0
test/files/neg/t4221.check | 6 +
test/files/neg/{bug4221.scala => t4221.scala} | 0
test/files/neg/t425.check | 4 +
test/files/{pos => neg}/t425.scala | 0
test/files/neg/t4270.check | 4 +
test/files/neg/t4270.scala | 6 +
test/files/neg/t4271.check | 10 +
test/files/neg/t4271.scala | 12 +
test/files/neg/t4283b.check | 4 +
test/files/neg/t4283b/AbstractFoo.java | 5 +
.../run/t4283 => files/neg/t4283b}/ScalaBipp.scala | 0
test/files/neg/t4283b/Test.scala | 3 +
test/files/neg/t4302.check | 4 +
test/files/neg/{bug4302.flags => t4302.flags} | 0
test/files/neg/{bug4302.scala => t4302.scala} | 0
test/files/neg/t4417.check | 7 +
test/files/neg/t4417.scala | 17 +
test/files/neg/t4419.check | 4 +
test/files/neg/t4419.scala | 3 +
test/files/neg/t4425.check | 4 +
test/files/neg/t4425.flags | 1 +
test/files/neg/t4425.scala | 4 +
test/files/neg/t4440.check | 13 +
test/files/{pos/bug1439.flags => neg/t4440.flags} | 0
test/files/neg/t4440.scala | 19 +
test/files/neg/t4515.check | 6 +
test/files/neg/t4515.scala | 41 +
test/files/neg/t452.check | 6 +
test/files/neg/{bug452.scala => t452.scala} | 0
test/files/neg/t4537.check | 4 +
test/files/neg/t4537/a.scala | 5 +
test/files/neg/t4537/b.scala | 5 +
test/files/neg/t4537/c.scala | 8 +
test/files/neg/t4541.check | 7 +
test/files/neg/t4541.scala | 16 +
test/files/neg/t4541b.check | 7 +
test/files/neg/t4541b.scala | 16 +
test/files/neg/t4568.check | 4 +
test/files/neg/t4568.scala | 13 +
test/files/neg/t4584.check | 7 +
test/files/neg/{bug4584.scala => t4584.scala} | 0
test/files/neg/t464-neg.check | 16 +
.../files/neg/{bug464-neg.scala => t464-neg.scala} | 0
test/files/neg/t4691_exhaust_extractor.check | 13 +
...inherit.flags => t4691_exhaust_extractor.flags} | 0
test/files/neg/t4691_exhaust_extractor.scala | 33 +
test/files/neg/t4727.check | 11 +
test/files/neg/{bug4727.scala => t4727.scala} | 0
test/files/neg/t473.check | 4 +
test/files/neg/{bug473.scala => t473.scala} | 0
test/files/neg/t4749.check | 28 +
test/files/neg/{caseinherit.flags => t4749.flags} | 0
test/files/neg/t4749.scala | 44 +
test/files/neg/t4762.check | 7 +
test/files/neg/t4762.flags | 1 +
test/files/neg/t4762.scala | 51 +
test/files/neg/t4818.check | 6 +
test/files/neg/t4818.scala | 7 +
test/files/neg/t4831.check | 7 +
test/files/neg/t4831.scala | 11 +
test/files/neg/t4842.check | 7 +
test/files/neg/t4842.scala | 7 +
test/files/neg/t4851.check | 49 +
test/files/neg/t4851.flags | 1 +
test/files/neg/t4851/J.java | 15 +
test/files/neg/t4851/J2.java | 11 +
test/files/neg/t4851/S.scala | 28 +
test/files/neg/t4877.check | 22 +
test/files/neg/t4877.flags | 1 +
test/files/neg/t4877.scala | 22 +
test/files/neg/t4879.check | 13 +
test/files/neg/t4879.scala | 15 +
test/files/neg/t4882.check | 4 +
test/files/neg/t4882.scala | 3 +
test/files/neg/t4928.check | 5 +
test/files/neg/t4928.scala | 4 +
test/files/neg/t4987.check | 4 +
test/files/neg/t4987.scala | 2 +
test/files/neg/t4989.check | 7 +
test/files/neg/t4989.scala | 68 +
test/files/neg/t500.check | 4 +
test/files/neg/{bug500.scala => t500.scala} | 0
test/files/neg/t501.check | 4 +
test/files/neg/{bug501.scala => t501.scala} | 0
test/files/neg/t5031.check | 5 +
test/files/neg/t5031/Id.scala | 4 +
test/files/neg/t5031/package.scala | 3 +
test/files/neg/t5031b.check | 5 +
test/files/neg/t5031b/a.scala | 3 +
test/files/neg/t5031b/b.scala | 3 +
test/files/neg/t5044.check | 9 +
test/files/neg/t5044.scala | 9 +
test/files/neg/t5060.check | 7 +
test/files/neg/t5060.scala | 19 +
test/files/neg/t5063.check | 4 +
test/files/neg/t5063.scala | 3 +
test/files/neg/t5078.check | 13 +
test/files/neg/t5078.scala | 11 +
test/files/neg/t5093.check | 10 +
test/files/neg/t5093.scala | 3 +
test/files/neg/t510.check | 4 +
test/files/neg/{bug510.scala => t510.scala} | 0
test/files/neg/t5106.check | 11 +
test/files/neg/t5106.scala | 5 +
test/files/neg/t512.check | 4 +
test/files/neg/{bug512.scala => t512.scala} | 0
test/files/neg/t5120.check | 12 +
test/files/neg/t5120.scala | 29 +
test/files/neg/t5148.check | 9 +
test/files/neg/t5148.scala | 4 +
test/files/neg/t515.check | 6 +
test/files/neg/{bug515.scala => t515.scala} | 0
test/files/neg/t5152.check | 11 +
test/files/neg/t5152.scala | 17 +
test/files/neg/t5189.check | 6 +
test/files/neg/t5189.scala | 5 +
test/files/neg/t5189_inferred.check | 6 +
test/files/neg/t5189_inferred.scala | 8 +
test/files/neg/t5189b.check | 11 +
test/files/neg/t5189b.scala | 80 +
test/files/neg/t520.check | 4 +
test/files/neg/{bug520.scala => t520.scala} | 0
test/files/neg/t521.check | 15 +
test/files/neg/{bug521.scala => t521.scala} | 0
test/files/neg/t5318.check | 5 +
test/files/neg/t5318.scala | 8 +
test/files/neg/t5318b.check | 5 +
test/files/neg/t5318b.scala | 8 +
test/files/neg/t5318c.check | 5 +
test/files/neg/t5318c.scala | 14 +
test/files/neg/t5340.check | 6 +
test/files/neg/t5340.scala | 29 +
test/files/neg/t5352.check | 13 +
test/files/neg/{caseinherit.flags => t5352.flags} | 0
test/files/neg/t5352.scala | 15 +
test/files/neg/t5354.check | 7 +
test/files/neg/t5354.scala | 15 +
test/files/neg/t5357.check | 4 +
test/files/neg/t5357.scala | 9 +
test/files/neg/t5358.check | 7 +
test/files/neg/t5358.scala | 4 +
test/files/neg/t5361.check | 4 +
test/files/neg/t5361.scala | 3 +
test/files/neg/t5376.check | 11 +
test/files/neg/t5376.scala | 24 +
test/files/neg/t5378.check | 31 +
test/files/neg/t5378.scala | 54 +
test/files/neg/t5390.check | 4 +
test/files/neg/t5390.scala | 10 +
test/files/neg/t5390b.check | 4 +
test/files/neg/t5390b.scala | 10 +
test/files/neg/t5390c.check | 4 +
test/files/neg/t5390c.scala | 10 +
test/files/neg/t5390d.check | 4 +
test/files/neg/t5390d.scala | 10 +
test/files/neg/t5426.check | 13 +
test/files/neg/{caseinherit.flags => t5426.flags} | 0
test/files/neg/t5426.scala | 10 +
test/files/neg/t5429.check | 142 +
test/files/neg/t5429.scala | 93 +
test/files/neg/t5440.check | 5 +
test/files/neg/{caseinherit.flags => t5440.flags} | 0
test/files/neg/t5440.scala | 7 +
test/files/neg/t545.check | 7 +
test/files/neg/{bug545.scala => t545.scala} | 0
test/files/neg/t5452-new.check | 8 +
test/files/neg/t5452-new.scala | 31 +
test/files/neg/t5452-old.check | 8 +
test/files/neg/t5452-old.scala | 29 +
test/files/neg/t5455.check | 4 +
test/files/neg/t5455.scala | 16 +
test/files/neg/t5493.check | 4 +
test/files/neg/t5493.scala | 3 +
test/files/neg/t5497.check | 4 +
test/files/neg/t5497.scala | 5 +
test/files/neg/t550.check | 7 +
test/files/neg/{bug550.scala => t550.scala} | 0
test/files/neg/t5510.check | 19 +
test/files/neg/t5510.scala | 7 +
test/files/neg/t5529.check | 10 +
test/files/neg/t5529.scala | 13 +
test/files/neg/t5543.check | 10 +
test/files/neg/t5543.scala | 19 +
test/files/neg/t5544.check | 4 +
test/files/neg/t5544/Api_1.scala | 8 +
test/files/neg/t5544/Test_2.scala | 3 +
test/files/neg/t5553_1.check | 54 +
test/files/neg/t5553_1.scala | 34 +
test/files/neg/t5553_2.check | 50 +
test/files/neg/t5553_2.scala | 59 +
test/files/neg/t5554.check | 67 +
test/files/neg/t5554.scala | 39 +
test/files/neg/t556.check | 4 +
test/files/neg/{bug556.scala => t556.scala} | 0
test/files/neg/t5564.check | 4 +
test/files/neg/t5564.scala | 9 +
test/files/neg/t5572.check | 11 +
test/files/neg/t5572.scala | 23 +
test/files/neg/t5578.check | 4 +
test/files/neg/t5578.scala | 39 +
test/files/neg/t558.check | 4 +
test/files/neg/{bug558.scala => t558.scala} | 0
test/files/neg/t5580a.check | 6 +
test/files/neg/t5580a.scala | 11 +
test/files/neg/t5589neg.flags | 1 +
test/files/neg/t5589neg2.check | 9 +
test/files/neg/t5617.check | 8 +
test/files/neg/t5617.scala | 14 +
test/files/neg/t562.check | 4 +
test/files/neg/{bug562.scala => t562.scala} | 0
test/files/neg/t563.check | 4 +
test/files/neg/t563.scala | 7 +
test/files/neg/t565.check | 5 +
test/files/neg/{bug565.scala => t565.scala} | 0
test/files/neg/t5663-badwarneq.check | 40 +
.../{anyval-sealed.flags => t5663-badwarneq.flags} | 0
test/files/neg/t5663-badwarneq.scala | 94 +
test/files/neg/t5666.check | 37 +
test/files/neg/t5666.scala | 14 +
test/files/neg/t5675.check | 2 +
test/files/neg/{caseinherit.flags => t5675.flags} | 0
test/files/neg/t5675.scala | 7 +
test/files/neg/t5683.check | 16 +
test/files/neg/t5683.scala | 23 +
test/files/neg/t5687.check | 8 +
test/files/neg/t5687.scala | 55 +
test/files/neg/t5689.check | 7 +
test/files/neg/t5689.flags | 1 +
test/files/neg/t5689.scala | 6 +
test/files/neg/t5696.check | 19 +
test/files/neg/t5696.scala | 47 +
test/files/neg/t5702-neg-bad-and-wild.check | 28 +
test/files/neg/t5702-neg-bad-and-wild.scala | 29 +
test/files/neg/t5702-neg-bad-brace.check | 10 +
test/files/neg/t5702-neg-bad-brace.scala | 17 +
test/files/neg/t5702-neg-bad-xbrace.check | 7 +
test/files/neg/t5702-neg-bad-xbrace.scala | 31 +
test/files/neg/t5702-neg-ugly-xbrace.check | 19 +
test/files/neg/t5702-neg-ugly-xbrace.scala | 14 +
test/files/neg/t5728.check | 4 +
test/files/neg/t5728.scala | 7 +
test/files/neg/t5735.check | 6 +
test/files/neg/t5735.scala | 7 +
test/files/neg/t5753.check | 4 +
test/files/neg/t5753.flags | 1 +
test/files/neg/t5753/Impls_Macros_1.scala | 6 +
test/files/neg/t5753/Test_2.scala | 11 +
test/files/neg/t576.check | 4 +
test/files/neg/{bug576.scala => t576.scala} | 0
test/files/neg/t5760-pkgobj-warn.check | 4 +
test/files/neg/t5760-pkgobj-warn/stalepkg_1.scala | 11 +
test/files/neg/t5760-pkgobj-warn/stalepkg_2.scala | 11 +
test/files/neg/t5761.check | 16 +
test/files/neg/t5761.scala | 16 +
test/files/neg/t5762.check | 13 +
.../files/neg/{anyval-sealed.flags => t5762.flags} | 0
test/files/neg/t5762.scala | 24 +
test/files/neg/t5799.check | 4 +
test/files/neg/t5799.scala | 8 +
test/files/neg/t5801.check | 22 +
test/files/neg/t5801.scala | 16 +
test/files/neg/t5803.check | 4 +
test/files/neg/t5803.scala | 4 +
test/files/neg/t5821.check | 4 +
test/files/neg/t5821.scala | 8 +
test/files/neg/t5830.check | 7 +
test/files/neg/{caseinherit.flags => t5830.flags} | 0
test/files/neg/t5830.scala | 9 +
test/files/neg/t5839.check | 6 +
test/files/neg/t5839.scala | 7 +
test/files/neg/t5845.check | 7 +
test/files/neg/t5845.scala | 16 +
test/files/neg/t585.check | 4 +
test/files/neg/{bug585.scala => t585.scala} | 0
test/files/neg/t5856.check | 31 +
test/files/neg/t5856.scala | 11 +
test/files/neg/t5878.check | 13 +
test/files/neg/t5878.scala | 6 +
test/files/neg/t588.check | 13 +
test/files/neg/{bug588.scala => t588.scala} | 0
test/files/neg/t5882.check | 9 +
test/files/neg/t5882.scala | 6 +
test/files/neg/t5892.check | 17 +
test/files/neg/t5892.scala | 25 +
test/files/neg/t591.check | 5 +
test/files/neg/{bug591.scala => t591.scala} | 0
test/files/neg/t593.check | 4 +
test/files/neg/{bug593.scala => t593.scala} | 0
test/files/neg/t5956.check | 20 +
test/files/neg/t5956.flags | 1 +
test/files/neg/t5956.scala | 2 +
test/files/neg/t5969.check | 7 +
test/files/neg/t5969.scala | 11 +
test/files/neg/t6011.check | 10 +
test/files/neg/{caseinherit.flags => t6011.flags} | 0
test/files/neg/t6011.scala | 23 +
test/files/neg/t6013.check | 7 +
test/files/neg/t6013/Abstract.java | 7 +
test/files/neg/t6013/Base.java | 10 +
test/files/neg/t6013/DerivedScala.scala | 7 +
test/files/neg/t6040.check | 9 +
test/files/neg/t6040.scala | 1 +
test/files/neg/t6042.check | 4 +
test/files/neg/t6042.scala | 8 +
test/files/neg/t6048.check | 13 +
test/files/neg/{caseinherit.flags => t6048.flags} | 0
test/files/neg/t6048.scala | 22 +
test/files/neg/t6074.check | 4 +
test/files/neg/t6074.scala | 6 +
test/files/neg/t608.check | 6 +
test/files/neg/{bug608.scala => t608.scala} | 0
test/files/neg/t6082.check | 13 +
test/files/neg/t6082.scala | 2 +
test/files/neg/t6138.check | 7 +
test/files/neg/t6138.scala | 5 +
test/files/neg/t6162-inheritance.check | 10 +
test/files/neg/t6162-inheritance.flags | 1 +
test/files/neg/t6162-inheritance.scala | 19 +
test/files/neg/t6162-overriding.check | 7 +
test/files/neg/t6162-overriding.flags | 1 +
test/files/neg/t6162-overriding.scala | 17 +
test/files/neg/t6214.check | 4 +
test/files/neg/t6214.scala | 7 +
test/files/neg/t6227.check | 4 +
test/files/neg/t6227.scala | 6 +
test/files/neg/t6231.check | 6 +
test/files/neg/t6231.scala | 15 +
test/files/neg/t6258.check | 16 +
test/files/neg/t6258.scala | 25 +
test/files/neg/t6260.check | 13 +
test/files/neg/t6260.scala | 17 +
test/files/neg/t6263.check | 9 +
test/files/neg/t6263.scala | 6 +
test/files/neg/t6264.check | 4 +
.../files/neg/{anyval-sealed.flags => t6264.flags} | 0
test/files/neg/t6264.scala | 6 +
test/files/neg/t6276.check | 19 +
.../files/neg/{anyval-sealed.flags => t6276.flags} | 0
test/files/neg/t6276.scala | 44 +
test/files/neg/t6283.check | 4 +
test/files/neg/t6283.scala | 1 +
test/files/neg/t630.check | 5 +
test/files/neg/{bug630.scala => t630.scala} | 0
test/files/neg/t631.check | 4 +
test/files/neg/{bug631.scala => t631.scala} | 0
test/files/neg/t6323a.check | 15 +
test/files/neg/t6323a.flags | 1 +
test/files/neg/t6323a.scala | 21 +
test/files/neg/t633.check | 4 +
test/files/neg/{bug633.scala => t633.scala} | 0
test/files/neg/t6335.check | 9 +
test/files/neg/t6335.scala | 7 +
test/files/neg/t6336.check | 7 +
test/files/neg/t6336.scala | 12 +
test/files/neg/t6337.check | 7 +
test/files/neg/t6337.scala | 21 +
test/files/neg/t6340.check | 10 +
test/files/neg/t6340.scala | 21 +
test/files/neg/t6357.check | 4 +
test/files/neg/t6357.scala | 6 +
test/files/neg/t6359.check | 9 +
test/files/neg/t6359.scala | 8 +
test/files/neg/t6385.check | 7 +
test/files/neg/t6385.scala | 13 +
test/files/neg/t639.check | 7 +
test/files/neg/{bug639.scala => t639.scala} | 0
test/files/neg/t6436.check | 10 +
test/files/neg/t6436.scala | 9 +
test/files/neg/t6436b.check | 10 +
test/files/neg/t6436b.scala | 9 +
test/files/neg/t6443c.check | 7 +
test/files/neg/t6443c.scala | 21 +
test/files/neg/t6483.check | 9 +
test/files/neg/t6483.scala | 24 +
test/files/neg/t649.check | 4 +
test/files/neg/{bug649.scala => t649.scala} | 0
test/files/neg/t650.check | 4 +
test/files/neg/{bug650.scala => t650.scala} | 0
test/files/neg/t6526.check | 16 +
test/files/neg/t6526.scala | 41 +
test/files/neg/t6534.check | 17 +
test/files/neg/t6534.flags | 1 +
test/files/neg/t6534.scala | 10 +
test/files/neg/t6535.check | 6 +
test/files/neg/t6535.scala | 15 +
test/files/neg/t6539.check | 10 +
test/files/neg/t6539/Macro_1.scala | 10 +
test/files/neg/t6539/Test_2.scala | 12 +
test/files/neg/t6558.check | 10 +
test/files/neg/t6558.scala | 12 +
test/files/neg/t6558b.check | 7 +
test/files/neg/t6558b.scala | 15 +
test/files/neg/t6563.check | 4 +
test/files/neg/t6563.scala | 8 +
test/files/neg/t6567.check | 7 +
test/files/neg/t6567.flags | 1 +
test/files/neg/t6567.scala | 11 +
test/files/neg/t6597.check | 4 +
test/files/neg/t6597.scala | 5 +
test/files/neg/t663.check | 7 +
test/files/neg/{bug663.scala => t663.scala} | 0
test/files/neg/t664.check | 7 +
test/files/neg/{bug664.scala => t664.scala} | 0
test/files/neg/t6663.check | 6 +
test/files/neg/t6663.scala | 19 +
test/files/neg/t6666.check | 37 +
test/files/neg/t6666.scala | 121 +
test/files/neg/t6666b.check | 7 +
test/files/neg/t6666b.scala | 27 +
test/files/neg/t6666c.check | 10 +
test/files/neg/t6666c.scala | 8 +
test/files/neg/t6666d.check | 4 +
test/files/neg/t6666d.scala | 18 +
test/files/neg/t6666e.check | 4 +
test/files/neg/t6666e.scala | 9 +
test/files/neg/t6667.check | 14 +
test/files/neg/t6667.flags | 1 +
test/files/neg/t6667.scala | 10 +
test/files/neg/t6667b.check | 14 +
test/files/neg/t6667b.flags | 1 +
test/files/neg/t6667b.scala | 25 +
test/files/neg/t667.check | 4 +
test/files/neg/{bug667.scala => t667.scala} | 0
test/files/neg/t6675-old-patmat.check | 4 +
test/files/neg/t6675-old-patmat.flags | 1 +
test/files/neg/t6675-old-patmat.scala | 13 +
test/files/neg/t6675.check | 4 +
test/files/neg/t6675.flags | 1 +
test/files/neg/t6675.scala | 13 +
test/files/neg/t668.check | 4 +
test/files/neg/{bug668.scala => t668.scala} | 0
test/files/neg/t6728.check | 4 +
test/files/neg/t6728.scala | 5 +
test/files/neg/t6758.check | 28 +
test/files/neg/t6758.scala | 43 +
test/files/neg/t677.check | 6 +
test/files/neg/{bug677.scala => t677.scala} | 0
test/files/neg/t6771b.check | 6 +
test/files/neg/t6771b.scala | 16 +
test/files/neg/t6788.check | 5 +
test/files/neg/t6788.scala | 7 +
test/files/neg/t6795.check | 4 +
test/files/neg/t6795.scala | 3 +
test/files/neg/t6829.check | 36 +
test/files/neg/t6829.scala | 64 +
test/files/neg/t6902.check | 10 +
test/files/neg/{caseinherit.flags => t6902.flags} | 0
test/files/neg/t6902.scala | 23 +
test/files/neg/t691.check | 4 +
test/files/neg/{bug691.scala => t691.scala} | 0
test/files/neg/t6912.check | 4 +
test/files/neg/t6912.scala | 9 +
test/files/neg/t692.check | 19 +
test/files/neg/{bug692.scala => t692.scala} | 0
test/files/neg/t6928.check | 7 +
test/files/neg/t6928.scala | 10 +
test/files/neg/t693.check | 4 +
test/files/neg/{bug693.scala => t693.scala} | 0
test/files/neg/t6952.check | 13 +
test/files/neg/t6952.scala | 4 +
test/files/neg/t6963a.check | 5 +
test/files/neg/t6963a.flags | 1 +
test/files/neg/t6963a.scala | 5 +
test/files/neg/t6963b.check | 13 +
test/files/neg/t6963b.flags | 1 +
test/files/neg/t6963b.scala | 20 +
test/files/neg/t696a.check | 5 +
test/files/{jvm/bug680.check => neg/t696a.flags} | 0
test/files/neg/{bug696.scala => t696a.scala} | 0
test/files/neg/t696b.check | 9 +
test/files/neg/t696b.flags | 1 +
test/files/neg/t696b.scala | 7 +
test/files/neg/t700.check | 4 +
test/files/neg/{bug700.scala => t700.scala} | 0
test/files/neg/t708.check | 5 +
test/files/neg/{bug708.scala => t708.scala} | 0
test/files/neg/t712.check | 4 +
test/files/neg/{bug712.scala => t712.scala} | 0
test/files/neg/t715.check | 4 +
test/files/neg/{bug715.scala => t715.scala} | 0
test/files/neg/t7166.check | 4 +
test/files/neg/t7166/Impls_Macros_1.scala | 26 +
test/files/neg/t7166/Test_2.scala | 3 +
test/files/neg/t7171.check | 7 +
test/files/neg/t7171.flags | 1 +
test/files/neg/t7171.scala | 11 +
test/files/neg/t7171b.check | 10 +
test/files/neg/t7171b.flags | 1 +
test/files/neg/t7171b.scala | 15 +
test/files/neg/t7185.check | 7 +
test/files/neg/t7185.scala | 3 +
test/files/neg/t7235.check | 4 +
test/files/neg/t7235.scala | 14 +
test/files/neg/t7238.check | 6 +
test/files/neg/t7238.scala | 7 +
test/files/neg/t7251.check | 4 +
test/files/neg/t7251/A_1.scala | 10 +
test/files/neg/t7251/B_2.scala | 7 +
test/files/neg/t7259.check | 7 +
test/files/neg/t7259.scala | 9 +
test/files/neg/t7285.check | 13 +
test/files/neg/{caseinherit.flags => t7285.flags} | 0
test/files/neg/t7285.scala | 55 +
test/files/neg/t7289.check | 4 +
test/files/neg/t7289.scala | 39 +
test/files/neg/t7289_status_quo.check | 22 +
test/files/neg/t7289_status_quo.scala | 23 +
test/files/neg/t729.check | 6 +
test/files/neg/{bug729.scala => t729.scala} | 0
test/files/neg/t7290.check | 10 +
test/files/neg/{caseinherit.flags => t7290.flags} | 0
test/files/neg/t7290.scala | 10 +
test/files/neg/t7299.check | 7 +
test/files/neg/t7299.scala | 6 +
test/files/neg/t7325.check | 19 +
test/files/neg/t7325.scala | 25 +
test/files/neg/t7330.check | 5 +
test/files/neg/t7330.scala | 5 +
test/files/neg/t7369.check | 13 +
test/files/neg/{caseinherit.flags => t7369.flags} | 0
test/files/neg/t7369.scala | 43 +
test/files/neg/t7385.check | 10 +
test/files/neg/t7385.scala | 7 +
test/files/neg/t7388.check | 4 +
test/files/neg/t7388.scala | 1 +
test/files/neg/t742.check | 3 +-
test/files/neg/t7473.check | 7 +
test/files/neg/t7473.scala | 7 +
test/files/neg/t750.check | 15 +
test/files/neg/t750/AO_1.java | 5 +
test/files/neg/t750/Test_2.scala | 6 +
test/files/neg/t7507.check | 4 +
test/files/neg/t7507.scala | 7 +
test/files/neg/t7509.check | 12 +
test/files/neg/t7509.scala | 4 +
test/files/neg/t750b.check | 15 +
test/files/neg/t750b/AO.java | 5 +
test/files/neg/t750b/Test.scala | 6 +
test/files/neg/t7519-b.check | 4 +
test/files/neg/t7519-b/Mac_1.scala | 14 +
test/files/neg/t7519-b/Use_2.scala | 8 +
test/files/neg/t7519.check | 7 +
test/files/neg/t7519.scala | 18 +
test/files/neg/t752.check | 6 +
test/files/neg/{bug752.scala => t752.scala} | 0
test/files/neg/t7636.check | 10 +
test/files/neg/t7636.scala | 7 +
test/files/neg/t765.check | 4 +
test/files/neg/{bug765.scala => t765.scala} | 0
test/files/neg/t766.check | 4 +
test/files/neg/{bug766.scala => t766.scala} | 0
test/files/neg/t7694b.check | 7 +
test/files/neg/t771.scala | 0
test/files/neg/t7752.check | 27 +
test/files/neg/t7752.scala | 26 +
test/files/neg/t7783.check | 16 +
test/files/neg/{t3115.flags => t7783.flags} | 0
test/files/neg/t7783.scala | 15 +
test/files/neg/t779.check | 4 +
test/files/neg/{bug779.scala => t779.scala} | 0
test/files/neg/t783.check | 6 +
test/files/neg/{bug783.scala => t783.scala} | 0
test/files/neg/t798.check | 4 +
test/files/neg/{bug798.scala => t798.scala} | 0
test/files/neg/t800.check | 16 +
test/files/neg/{bug800.scala => t800.scala} | 0
test/files/neg/t8104a.check | 4 +
test/files/neg/t8104a/Macros_1.scala | 23 +
test/files/neg/t8104a/Test_2.scala | 20 +
test/files/neg/t8104b.check | 4 +
test/files/neg/t8104b/Macros_1.scala | 23 +
test/files/neg/t8104b/Test_2.scala | 24 +
test/files/neg/t8146-non-finitary-2.check | 9 +
test/files/neg/t8146-non-finitary-2.scala | 8 +
test/files/neg/t8146-non-finitary.check | 9 +
test/files/neg/t8146-non-finitary.scala | 7 +
test/files/neg/t835.check | 9 +
test/files/neg/{bug835.scala => t835.scala} | 0
test/files/neg/t836.check | 7 +
test/files/neg/{bug836.scala => t836.scala} | 0
test/files/neg/t845.check | 4 +
test/files/neg/{bug845.scala => t845.scala} | 0
test/files/neg/t846.check | 6 +
test/files/neg/{bug846.scala => t846.scala} | 0
test/files/neg/t856.check | 14 +
test/files/neg/{bug856.scala => t856.scala} | 0
test/files/neg/t875.check | 17 +
test/files/neg/{bug875.scala => t875.scala} | 0
test/files/neg/t876.check | 4 +
test/files/neg/{bug876.scala => t876.scala} | 0
test/files/neg/t877.check | 7 +
test/files/neg/{bug877.scala => t877.scala} | 0
test/files/neg/t882.check | 4 +
test/files/neg/{bug882.scala => t882.scala} | 0
test/files/neg/t900.check | 9 +
test/files/neg/{bug900.scala => t900.scala} | 0
test/files/neg/t908.check | 4 +
test/files/neg/{bug908.scala => t908.scala} | 0
test/files/neg/t909.check | 6 +
test/files/neg/{bug909.scala => t909.scala} | 0
test/files/neg/t910.check | 6 +
test/files/neg/{bug910.scala => t910.scala} | 0
test/files/neg/t935.check | 10 +
test/files/neg/{bug935.scala => t935.scala} | 0
test/files/neg/t944.check | 4 +
test/files/neg/{bug944.scala => t944.scala} | 0
test/files/neg/t961.check | 4 +
test/files/neg/{bug961.scala => t961.scala} | 0
test/files/neg/t963.check | 12 +
test/files/neg/t963.scala | 18 +
test/files/neg/t963b.check | 6 +
test/files/neg/t963b.scala | 26 +
test/files/neg/t987.check | 19 +
test/files/neg/{bug987.scala => t987.scala} | 0
test/files/neg/t997.check | 7 +
test/files/neg/t997.scala | 15 +
test/files/neg/tailrec-2.check | 7 +-
test/files/neg/tailrec-2.scala | 3 +
test/files/neg/tailrec.check | 6 +-
test/files/neg/tcpoly_override.check | 3 +-
test/files/neg/tcpoly_override.scala | 2 +-
test/files/neg/tcpoly_ticket2101.check | 2 +-
test/files/neg/tcpoly_typealias.check | 9 +-
test/files/neg/tcpoly_variance.check | 2 +-
test/files/neg/tcpoly_variance_enforce.check | 34 +-
test/files/neg/unchecked-abstract.check | 25 +
...nyval-sealed.flags => unchecked-abstract.flags} | 0
test/files/neg/unchecked-abstract.scala | 93 +
test/files/neg/unchecked-impossible.check | 4 +
...val-sealed.flags => unchecked-impossible.flags} | 0
test/files/neg/unchecked-impossible.scala | 16 +
test/files/neg/unchecked-knowable.check | 7 +
...nyval-sealed.flags => unchecked-knowable.flags} | 0
test/files/neg/unchecked-knowable.scala | 22 +
test/files/neg/unchecked-refinement.check | 13 +
...val-sealed.flags => unchecked-refinement.flags} | 0
test/files/neg/unchecked-refinement.scala | 27 +
test/files/neg/unchecked-suppress.check | 10 +
...{caseinherit.flags => unchecked-suppress.flags} | 0
test/files/neg/unchecked-suppress.scala | 10 +
test/files/neg/unchecked.check | 19 +
test/files/neg/unchecked.flags | 1 +
test/files/neg/unchecked.scala | 74 +
test/files/neg/unchecked2.check | 43 +
.../{pos/bug3097.flags => neg/unchecked2.flags} | 0
test/files/neg/unchecked2.scala | 33 +
test/files/neg/unchecked3.check | 37 +
.../neg/{anyval-sealed.flags => unchecked3.flags} | 0
test/files/neg/unchecked3.scala | 83 +
test/files/neg/unicode-unterminated-quote.check | 7 +
test/files/neg/unicode-unterminated-quote.scala | 2 +
test/files/neg/unit-returns-value.check | 5 +-
test/files/neg/unit2anyref.check | 2 -
test/files/neg/unreachablechar.flags | 1 +
test/files/neg/valueclasses-doubledefs.check | 7 +
test/files/neg/valueclasses-doubledefs.scala | 6 +
.../files/neg/valueclasses-impl-restrictions.check | 13 +
.../files/neg/valueclasses-impl-restrictions.scala | 29 +
test/files/neg/valueclasses-pavlov.check | 7 +
test/files/neg/valueclasses-pavlov.scala | 23 +
test/files/neg/valueclasses.check | 43 +
test/files/neg/valueclasses.scala | 31 +
test/files/neg/varargs.check | 4 +-
test/files/neg/variances.check | 19 +-
test/files/neg/variances.scala | 31 +-
test/files/neg/virtpatmat_reach_null.check | 4 +
...seinherit.flags => virtpatmat_reach_null.flags} | 0
test/files/neg/virtpatmat_reach_null.scala | 19 +
.../neg/virtpatmat_reach_sealed_unsealed.check | 14 +
...lags => virtpatmat_reach_sealed_unsealed.flags} | 0
.../neg/virtpatmat_reach_sealed_unsealed.scala | 21 +
test/files/neg/virtpatmat_unreach_select.check | 4 +
...ealed.flags => virtpatmat_unreach_select.flags} | 0
test/files/neg/virtpatmat_unreach_select.scala | 12 +
test/files/neg/wrong-args-for-none.check | 4 +
test/files/neg/wrong-args-for-none.scala | 6 +
test/files/pos/CustomGlobal.scala | 33 +
test/files/pos/MailBox.scala | 3 +-
test/files/pos/SI-4012-a.scala | 7 +
test/files/pos/SI-4012-b.scala | 15 +
test/files/pos/SI-5788.scala | 3 +
test/files/pos/SI-7060.flags | 1 +
test/files/pos/SI-7060.scala | 11 +
test/files/pos/SI-7100.scala | 6 +
test/files/pos/SI-7638.scala | 51 +
test/files/pos/Transactions.scala | 2 +-
test/files/pos/annot-inner.scala | 2 +-
test/files/pos/annotDepMethType.scala | 2 +-
test/files/pos/annotated-original/C_2.scala | 7 +
test/files/pos/annotated-original/M_1.scala | 7 +
.../bug680.check => pos/annotated-treecopy.check} | 0
test/files/pos/annotated-treecopy.flags | 1 +
.../pos/annotated-treecopy/Impls_Macros_1.scala | 53 +
test/files/pos/annotated-treecopy/Test_2.scala | 5 +
test/files/pos/annotations.scala | 24 +-
test/files/pos/arrays3.scala | 11 +
.../attachments-typed-ident.check} | 0
test/files/pos/attachments-typed-ident.flags | 1 +
.../pos/attachments-typed-ident/Impls_1.scala | 17 +
.../attachments-typed-ident/Macros_Test_2.scala | 4 +
test/files/pos/attributes.scala | 14 +-
test/files/pos/bug0029.scala | 3 -
test/files/pos/bug1000.scala | 4 -
test/files/pos/bug1001.scala | 105 -
test/files/pos/bug1014.scala | 13 -
test/files/pos/bug1049.scala | 7 -
test/files/pos/bug1050.scala | 10 -
test/files/pos/bug1070.scala | 4 -
test/files/pos/bug1203.scala | 7 -
test/files/pos/bug1279a.scala | 40 -
test/files/pos/bug1439.scala | 8 -
test/files/pos/bug1560.scala | 11 -
test/files/pos/bug2023.scala | 16 -
test/files/pos/bug211.scala | 8 -
test/files/pos/bug2261.scala | 6 -
test/files/pos/bug252.scala | 17 -
test/files/pos/bug2619.scala | 80 -
test/files/pos/bug2691.scala | 10 -
test/files/pos/bug3097.scala | 31 -
test/files/pos/bug3568.scala | 46 -
test/files/pos/bug3688.scala | 7 -
test/files/pos/bug531.scala | 10 -
test/files/pos/bug532.scala | 10 -
test/files/pos/bug715/meredith_1.scala | 98 -
test/files/pos/bug839.scala | 26 -
test/files/pos/bug927.scala | 11 -
test/files/pos/classtag-pos.flags | 1 +
test/files/pos/classtag-pos.scala | 5 +
test/files/pos/code.scala | 3 -
test/files/pos/collectGenericCC.scala | 8 +-
test/files/pos/contextbounds-implicits-new.scala | 10 +
test/files/pos/contextbounds-implicits-old.scala | 8 +
test/files/pos/depexists.scala | 2 +-
test/files/pos/depmet_1_pos.flags | 1 -
test/files/pos/depmet_implicit_chaining_zw.flags | 1 -
test/files/pos/depmet_implicit_norm_ret.flags | 1 -
.../files/pos/depmet_implicit_oopsla_session.flags | 1 -
.../pos/depmet_implicit_oopsla_session_2.flags | 1 -
.../depmet_implicit_oopsla_session_simpler.flags | 1 -
.../files/pos/depmet_implicit_oopsla_zipwith.flags | 1 -
test/files/pos/depmet_implicit_tpbetareduce.flags | 1 -
.../exhaust_alternatives.flags} | 0
test/files/pos/exhaust_alternatives.scala | 10 +
test/files/pos/exhaustive_heuristics.scala | 26 +
test/files/pos/existentials-harmful.scala | 54 +
test/files/pos/existentials.scala | 22 +
test/files/pos/exponential-spec.scala | 47 +
test/files/pos/five-dot-f.flags | 1 +
test/files/pos/five-dot-f.scala | 5 +
test/files/pos/gen-traversable-methods.scala | 20 +
test/files/pos/generic-sigs.scala | 4 +-
test/files/pos/getClassType.scala | 16 +
test/files/pos/hk-match/a.scala | 5 +
test/files/pos/hk-match/b.scala | 1 +
test/files/pos/hkarray.flags | 2 +-
test/files/pos/hkrange.scala | 5 +
test/files/pos/implicit-unwrap-tc.scala | 10 +
test/files/pos/implicits-new.scala | 92 +
.../pos/{implicits.scala => implicits-old.scala} | 0
.../{annotDepMethType.flags => infersingle.flags} | 0
test/files/pos/infersingle.scala | 5 +
test/files/pos/inline-access-levels.flags | 1 +
test/files/pos/inline-access-levels/A_1.scala | 10 +
test/files/pos/inline-access-levels/Test_2.scala | 11 +
test/files/pos/{bug3420.flags => inliner2.flags} | 0
test/files/pos/inliner2.scala | 57 +
test/files/pos/irrefutable.scala | 22 +
test/files/pos/isApplicableSafe.scala | 8 +
.../pos/javaConversions-2.10-regression.scala | 17 +
test/files/pos/javaReadsSigs/fromjava.java | 4 +-
test/files/pos/lexical.scala | 0
test/files/pos/liftcode_polymorphic.scala | 12 +
test/files/pos/list-extractor.scala | 8 +
test/{pending => files}/pos/local-objects.scala | 0
test/files/pos/lookupswitch.scala | 37 +
test/files/pos/lub-dealias-widen.scala | 34 +
test/files/pos/lubs.scala | 3 +
...ro-deprecate-dont-touch-backquotedidents.flags} | 0
...cro-deprecate-dont-touch-backquotedidents.scala | 56 +
.../macro-qmarkqmarkqmark.check} | 0
test/files/pos/macro-qmarkqmarkqmark.scala | 7 +
test/files/pos/manifest1-new.scala | 21 +
.../pos/{manifest1.scala => manifest1-old.scala} | 0
test/files/pos/native-warning.flags | 1 +
test/files/pos/native-warning.scala | 3 +
.../{bug3097.flags => nonlocal-unchecked.flags} | 0
test/files/pos/nonlocal-unchecked.scala | 6 +
test/files/pos/nothing_manifest_disambig-new.scala | 12 +
...g.scala => nothing_manifest_disambig-old.scala} | 0
.../pos/overloaded_extractor_and_regular_def.scala | 32 +
test/files/pos/override-object-yes.flags | 1 +
test/files/pos/override-object-yes.scala | 40 +
test/files/pos/package-case.scala | 4 +
test/files/pos/package-implicit/ActorRef.scala | 7 +
test/files/pos/package-implicit/DataFlow.scala | 7 +
test/files/pos/package-implicit/package.scala | 6 +
test/files/pos/packageobjs.scala | 0
test/files/pos/patmat.scala | 163 +
test/files/pos/polymorphic-case-class.flags | 1 +
test/files/pos/polymorphic-case-class.scala | 2 +
test/files/pos/presuperContext.scala | 13 +
test/files/pos/protected-static/J.java | 7 +
test/files/pos/protected-static/JavaClass.java | 6 +
test/files/pos/protected-static/S.scala | 7 +
test/files/pos/protected-static/ScalaClass.scala | 6 +
test/files/pos/rangepos-anonapply.flags | 1 +
test/files/pos/rangepos-anonapply.scala | 9 +
test/files/pos/rangepos-patmat.flags | 1 +
test/files/pos/rangepos-patmat.scala | 4 +
test/files/pos/rangepos.flags | 1 +
test/files/pos/rangepos.scala | 5 +
test/files/pos/raw-map/J_1.java | 4 +
test/files/pos/raw-map/S_2.scala | 6 +
test/files/pos/setter-not-implicit.flags | 1 +
test/files/pos/setter-not-implicit.scala | 3 +
test/files/pos/spec-Function1.scala | 2 +-
test/files/pos/spec-annotations.scala | 4 +-
test/files/pos/spec-constr-new.scala | 9 +
.../{spec-constr.scala => spec-constr-old.scala} | 0
test/files/pos/spec-doubledef-new.scala | 30 +
...ec-doubledef.scala => spec-doubledef-old.scala} | 0
test/files/pos/spec-fields-new.scala | 12 +
.../{spec-fields.scala => spec-fields-old.scala} | 0
test/files/pos/spec-funs.scala | 4 +-
test/files/pos/spec-groups.scala | 65 +
test/files/pos/spec-params-new.scala | 34 +
.../{spec-params.scala => spec-params-old.scala} | 0
test/files/pos/spec-sparsearray-new.scala | 25 +
...parsearray.scala => spec-sparsearray-old.scala} | 0
test/files/pos/spec-t6286.scala | 10 +
test/files/pos/specialize10.scala | 7 +
test/files/pos/specializes-sym-crash.scala | 26 +
test/files/pos/spurious-overload.scala | 32 +
test/files/pos/strip-tvars-for-lubbasetypes.scala | 25 +
test/files/pos/switch-small.scala | 8 +
test/files/pos/{bug0002.scala => t0002.scala} | 0
test/files/pos/{bug0017.scala => t0017.scala} | 0
test/files/pos/{bug0020.scala => t0020.scala} | 0
test/files/pos/t0029.scala | 3 +
test/files/pos/{bug0030.scala => t0030.scala} | 0
test/files/pos/{bug0031.scala => t0031.scala} | 0
test/files/pos/{bug0032.scala => t0032.scala} | 0
test/files/pos/{bug0036.scala => t0036.scala} | 0
test/files/pos/{bug0039.scala => t0039.scala} | 0
test/files/pos/{bug0049.scala => t0049.scala} | 0
test/files/pos/{bug0053.scala => t0053.scala} | 0
test/files/pos/{bug0054.scala => t0054.scala} | 0
test/files/pos/{bug0061.scala => t0061.scala} | 0
test/files/pos/{bug0064.scala => t0064.scala} | 0
test/files/pos/{bug0066.scala => t0066.scala} | 0
test/files/pos/{bug0068.scala => t0068.scala} | 0
test/files/pos/{bug0069.scala => t0069.scala} | 0
test/files/pos/{bug0076.scala => t0076.scala} | 0
test/files/pos/{bug0081.scala => t0081.scala} | 0
test/files/pos/{bug0082.scala => t0082.scala} | 0
test/files/pos/{bug0085.scala => t0085.scala} | 0
test/files/pos/{bug0091.scala => t0091.scala} | 0
test/files/pos/{bug0093.scala => t0093.scala} | 0
test/files/pos/{bug0095.scala => t0095.scala} | 0
test/files/pos/{bug0123.scala => t0123.scala} | 0
test/files/pos/{bug0204.scala => t0204.scala} | 0
test/files/pos/{bug0304.scala => t0304.scala} | 0
test/files/pos/{bug0305.scala => t0305.scala} | 0
test/files/pos/{bug0422.scala => t0422.scala} | 0
test/files/pos/{bug0599.scala => t0599.scala} | 0
test/files/pos/{bug0646.scala => t0646.scala} | 0
test/files/pos/t0651.scala | 4 -
test/files/pos/t1000.scala | 5 +
test/files/pos/t1001.scala | 109 +
test/files/pos/{bug1006.scala => t1006.scala} | 0
test/files/pos/t1014.scala | 15 +
test/files/pos/t1029/Test_1.scala | 2 +-
test/files/pos/{bug1034.scala => t1034.scala} | 0
test/files/pos/{bug1048.scala => t1048.scala} | 0
test/files/pos/t1049.scala | 7 +
test/files/pos/t1050.scala | 10 +
test/files/pos/{bug1056.scala => t1056.scala} | 0
test/files/pos/t1070.scala | 4 +
test/files/pos/{bug1071.scala => t1071.scala} | 0
test/files/pos/{bug1075.scala => t1075.scala} | 0
test/files/pos/{bug1085.scala => t1085.scala} | 0
test/files/pos/{bug1090.scala => t1090.scala} | 0
test/files/pos/{bug1107.scala => t1107.scala} | 0
test/files/pos/{t1107 => t1107b}/O.scala | 0
test/files/pos/{t1107 => t1107b}/T.scala | 0
test/files/pos/{bug1119.scala => t1119.scala} | 0
test/files/pos/{bug1123.scala => t1123.scala} | 0
.../files/pos/{bug112606A.scala => t112606A.scala} | 0
test/files/pos/t1133.scala | 32 +
test/files/pos/{bug1136.scala => t1136.scala} | 0
test/files/pos/{bug115.scala => t115.scala} | 0
test/files/pos/{bug116.scala => t116.scala} | 0
test/files/pos/{bug1168.scala => t1168.scala} | 0
test/files/pos/{bug1185.scala => t1185.scala} | 0
test/files/pos/{bug119.scala => t119.scala} | 0
test/files/pos/t1203.scala | 7 +
test/files/pos/{t1203 => t1203b}/J.java | 0
test/files/pos/{t1203 => t1203b}/S.scala | 0
test/files/pos/{bug1208.scala => t1208.scala} | 0
test/files/pos/{bug121.scala => t121.scala} | 0
test/files/pos/{bug1210a.scala => t1210a.scala} | 0
test/files/pos/{bug122.scala => t122.scala} | 0
test/files/pos/{bug1237.scala => t1237.scala} | 0
test/files/pos/{bug124.scala => t124.scala} | 0
test/files/pos/t1263/Test.java | 1 +
test/files/pos/{bug1272.scala => t1272.scala} | 0
test/files/pos/t1279a.scala | 39 +
test/files/pos/{bug1292.scala => t1292.scala} | 0
test/files/pos/t1318.scala | 31 +
.../pos/bug1357.scala => files/pos/t1357.scala} | 0
test/files/pos/t1381-new.scala | 31 +
test/files/pos/{bug1381.scala => t1381-old.scala} | 0
test/files/pos/{bug1385.scala => t1385.scala} | 0
test/files/pos/t1439.flags | 1 +
test/files/pos/t1439.scala | 8 +
test/files/pos/t1459/AbstractBase.java | 0
test/files/pos/t1459/App.scala | 6 +-
test/files/pos/t1459/Caller.java | 0
test/files/pos/{bug151.scala => t151.scala} | 0
test/files/pos/t1545.scala | 16 -
test/files/pos/{bug1565.scala => t1565.scala} | 0
test/files/pos/t1569.flags | 1 -
test/files/pos/{bug159.scala => t159.scala} | 0
test/files/pos/{bug160.scala => t160.scala} | 0
test/files/pos/{bug1626.scala => t1626.scala} | 0
test/files/pos/t1672.scala | 10 +
test/files/pos/t1722/Test.scala | 0
test/files/pos/t1722/Top.scala | 0
test/files/pos/{bug175.scala => t175.scala} | 0
test/files/pos/t1756.scala | 0
test/files/pos/{bug177.scala => t177.scala} | 0
test/files/pos/t1782/Test_1.scala | 16 -
test/files/pos/t1785.scala | 7 +
test/files/pos/t1803.flags | 1 +
test/files/pos/t1803.scala | 2 +
test/files/pos/{bug183.scala => t183.scala} | 0
test/files/pos/t1832.scala | 8 +
test/files/pos/{bug1843.scala => t1843.scala} | 0
test/files/pos/{bug1858.scala => t1858.scala} | 0
test/files/pos/{bug1909.scala => t1909.scala} | 0
.../pos/{bug1909b-pos.scala => t1909b-pos.scala} | 0
test/files/pos/t1942/A_1.scala | 2 +-
.../pos/bug1957.scala => files/pos/t1957.scala} | 0
test/files/pos/{bug1974.scala => t1974.scala} | 0
test/files/pos/{t1987.scala => t1987a.scala} | 0
test/files/pos/t1987b/a.scala | 7 +
test/files/pos/t1987b/b.scala | 10 +
test/files/pos/{bug201.scala => t201.scala} | 0
test/files/pos/{bug2018.scala => t2018.scala} | 0
test/files/pos/t2038.scala | 5 +
test/files/pos/t2060.scala | 0
test/files/pos/{bug2081.scala => t2081.scala} | 0
test/files/pos/t2082.scala | 0
test/files/pos/{bug2094.scala => t2094.scala} | 0
test/files/pos/{bug210.scala => t210.scala} | 0
test/files/pos/t211.scala | 8 +
test/files/pos/{bug2119.scala => t2119.scala} | 0
test/files/pos/{bug2127.scala => t2127.scala} | 0
test/files/pos/{bug2130-1.scala => t2130-1.scala} | 0
test/files/pos/{bug2130-2.scala => t2130-2.scala} | 0
test/files/pos/{bug2168.scala => t2168.scala} | 0
test/files/pos/{bug3252.flags => t2171.flags} | 0
test/files/pos/t2171.scala | 7 +
test/files/pos/t2179.scala | 0
test/files/pos/{bug2187-2.scala => t2187-2.scala} | 0
test/files/pos/{bug2187.scala => t2187.scala} | 0
test/{pending => files}/pos/t2194.scala | 0
test/files/pos/{bug2260.scala => t2260.scala} | 0
test/files/pos/t2281.scala | 41 +
test/files/pos/{bug229.scala => t229.scala} | 0
test/files/pos/{bug2310.scala => t2310.scala} | 0
test/files/pos/{bug2399.scala => t2399.scala} | 0
test/files/pos/t2405.scala | 23 +
test/files/pos/{bug2409 => t2409}/J.java | 0
.../{bug2409/bug2409.scala => t2409/t2409.scala} | 0
test/files/pos/t2425.scala | 0
test/files/pos/t2429.scala | 0
test/files/pos/t2433/A.java | 6 +-
test/files/pos/t2433/B.java | 6 +-
test/files/pos/t2433/Test.scala | 6 +-
test/files/pos/t2435.scala | 27 +
.../files/pos/{bug2441pos.scala => t2441pos.scala} | 0
test/files/pos/{bug245.scala => t245.scala} | 0
test/files/pos/{bug247.scala => t247.scala} | 0
test/files/pos/t2484.scala | 34 +-
test/files/pos/{bug2486.scala => t2486.scala} | 0
test/files/pos/t2504.scala | 0
test/files/pos/t252.scala | 17 +
test/files/pos/t2545.scala | 0
test/files/pos/{bug261-ab.scala => t261-ab.scala} | 0
test/files/pos/{bug261-ba.scala => t261-ba.scala} | 0
test/files/pos/{bug262.scala => t262.scala} | 0
test/files/pos/t2635.scala | 0
test/files/pos/{bug267.scala => t267.scala} | 0
test/files/pos/t2683.scala | 0
test/files/pos/t2691.scala | 9 +-
test/files/pos/{bug2693.scala => t2693.scala} | 0
.../pos/t2726/{bug2726_2.scala => t2726_2.scala} | 0
test/files/pos/t2764/Ann.java | 5 +
test/files/pos/t2764/Enum.java | 5 +
test/files/pos/t2764/Use.scala | 6 +
test/files/pos/t2782.scala | 18 +
test/files/pos/t2795-new.scala | 19 +
test/files/pos/t2795-old.scala | 17 +
test/files/pos/t2795.scala | 17 -
.../files/pos/{bug284-pos.scala => t284-pos.scala} | 0
test/files/pos/t2868/pick_1.scala | 7 -
test/files/pos/{bug287.scala => t287.scala} | 0
test/files/pos/{bug289.scala => t289.scala} | 0
test/files/pos/t2913.scala | 0
test/files/pos/{bug2939.scala => t2939.scala} | 0
test/files/pos/{bug2945.scala => t2945.scala} | 0
test/files/pos/{bug295.scala => t295.scala} | 0
test/files/pos/t2956/t2956.scala | 0
test/files/pos/{bug296.scala => t296.scala} | 0
test/files/pos/{bug3020.scala => t3020.scala} | 0
test/files/pos/{bug304.scala => t304.scala} | 0
test/files/pos/{bug3048.scala => t3048.scala} | 0
test/files/pos/{bug3106.scala => t3106.scala} | 0
test/files/pos/t3120/J1.java | 4 +
test/files/pos/t3120/J2.java | 4 +
test/files/pos/t3120/Q.java | 3 +
test/files/pos/t3120/Test.scala | 3 +
test/files/pos/{bug3136.scala => t3136.scala} | 0
test/files/pos/{bug3137.scala => t3137.scala} | 0
test/files/pos/t3174.scala | 0
test/files/pos/t3174b.scala | 0
.../pos/{bug3175-pos.scala => t3175-pos.scala} | 0
test/files/pos/{bug318.scala => t318.scala} | 0
test/files/pos/{bug319.scala => t319.scala} | 0
test/files/pos/{bug3430.flags => t3252.flags} | 0
test/files/pos/{bug3252.scala => t3252.scala} | 0
test/files/pos/t3272.scala | 8 +
test/files/pos/{bug3278.scala => t3278.scala} | 0
test/files/pos/{bug3312.scala => t3312.scala} | 0
test/files/pos/{bug3343.scala => t3343.scala} | 0
test/files/pos/t3363-new.scala | 20 +
test/files/pos/t3363-old.scala | 18 +
test/files/pos/t3363.scala | 18 -
test/files/pos/t3371.scala | 9 +
test/files/pos/{bug3411.scala => t3411.scala} | 0
test/files/pos/{bug342.scala => t342.scala} | 0
test/files/pos/{bug3420.flags => t3420.flags} | 0
test/files/pos/{bug3420.scala => t3420.scala} | 0
test/files/pos/{bug4840.flags => t3430.flags} | 0
test/files/pos/{bug3430.scala => t3430.scala} | 0
test/files/pos/{bug344.scala => t344.scala} | 0
test/files/pos/{bug3440.scala => t3440.scala} | 0
test/files/pos/{bug3480.scala => t3480.scala} | 0
.../files/pos/{bug348plus.scala => t348plus.scala} | 0
test/files/pos/{bug3495.flags => t3495.flags} | 0
test/files/pos/{bug3495.scala => t3495.scala} | 0
test/files/pos/t3498-new.scala | 17 +
test/files/pos/{bug3498.scala => t3498-old.scala} | 0
test/files/pos/{bug3521 => t3521}/DoubleValue.java | 0
test/files/pos/{bug3521 => t3521}/a.scala | 0
test/files/pos/{bug3528.scala => t3528.scala} | 0
test/files/pos/{bug3534.scala => t3534.scala} | 0
test/files/pos/{3567 => t3567}/Foo.scala | 0
test/files/pos/{3567 => t3567}/Outer.java | 0
test/files/pos/t3568.scala | 0
test/files/pos/{bug3570.scala => t3570.scala} | 0
test/files/pos/t3577.scala | 29 +
test/files/pos/{bug3578.scala => t3578.scala} | 0
test/files/pos/{bug359.scala => t359.scala} | 0
test/files/pos/{bug360.scala => t360.scala} | 0
test/files/pos/{bug361.scala => t361.scala} | 0
test/files/pos/{bug3636.scala => t3636.scala} | 0
test/files/pos/{bug3642 => t3642}/Tuppel_1.java | 0
.../bug3642_2.scala => t3642/t3642_2.scala} | 0
test/files/pos/{bug3671.scala => t3671.scala} | 0
.../pos/{bug3688-redux.scala => t3688-redux.scala} | 0
test/files/pos/t3688.scala | 5 +
test/files/pos/{bug372.scala => t372.scala} | 0
test/files/pos/{bug374.scala => t374.scala} | 0
test/files/pos/t3800.scala | 2 +-
test/files/pos/t3836.scala | 14 +
test/files/pos/t3856.scala | 1 +
test/files/pos/{bug3861.scala => t3861.scala} | 0
test/files/pos/t3880.scala | 16 +
test/files/pos/{bug3883.scala => t3883.scala} | 0
test/files/pos/{bug389.scala => t389.scala} | 0
test/files/pos/t3898.scala | 6 +
test/files/pos/{bug3938 => t3938}/Parent.java | 0
test/files/pos/{bug3938 => t3938}/UseParent.scala | 0
test/files/pos/t3951/Coll_1.scala | 2 +-
test/files/pos/t3960.flags | 1 +
test/files/pos/t3960.scala | 7 +
test/files/pos/{bug397.scala => t397.scala} | 0
test/files/pos/{bug3972.scala => t3972.scala} | 0
test/files/pos/t3999/a_1.scala | 9 +
test/files/pos/t3999/b_2.scala | 7 +
test/files/pos/t3999b.scala | 20 +
test/files/pos/{bug4018.scala => t4018.scala} | 0
test/files/pos/{bug402.scala => t402.scala} | 0
.../{neg/anyval-sealed.flags => pos/t4020.flags} | 0
test/files/pos/t4020.scala | 25 +
test/files/pos/{bug404.scala => t404.scala} | 0
test/files/pos/t4063.scala | 39 +
test/files/pos/t4070.scala | 37 +
test/files/pos/t4070b.scala | 35 +
test/files/pos/{bug415.scala => t415.scala} | 0
test/files/pos/t4176.scala | 6 +
test/files/pos/t4176b.scala | 5 +
test/files/pos/{bug4188.scala => t4188.scala} | 0
test/files/pos/{bug419.scala => t419.scala} | 0
test/files/pos/{bug422.scala => t422.scala} | 0
test/files/pos/{bug4220.scala => t4220.scala} | 0
test/files/pos/{bug4237.scala => t4237.scala} | 0
test/files/pos/{bug4269.scala => t4269.scala} | 0
test/files/pos/t4273.scala | 8 +
test/files/pos/{bug4275.scala => t4275.scala} | 0
.../pos/{bug430-feb09.scala => t430-feb09.scala} | 0
test/files/pos/{bug430.scala => t430.scala} | 0
test/files/pos/{bug4305.scala => t4305.scala} | 0
test/files/pos/{bug432.scala => t432.scala} | 0
test/files/pos/t4336.scala | 19 +
test/files/pos/t4351.check | 1 +
test/files/pos/t4351.scala | 20 +
test/files/pos/{bug439.scala => t439.scala} | 0
test/files/pos/{bug443.scala => t443.scala} | 0
test/files/pos/t4430.scala | 11 +
test/files/pos/t4494.flags | 1 +
test/files/pos/t4494.scala | 3 +
test/files/pos/{bug4501.scala => t4501.scala} | 0
test/files/pos/t4502.scala | 12 +
test/files/pos/t4524.scala | 9 +
test/files/pos/t4545.scala | 14 +
test/files/pos/{bug4553.scala => t4553.scala} | 0
test/files/pos/t4579.flags | 1 +
test/files/pos/t4579.scala | 518 +
test/files/pos/{bug460.scala => t460.scala} | 0
test/files/pos/{bug4603 => t4603}/J.java | 0
test/files/pos/{bug4603 => t4603}/S.scala | 0
test/files/pos/{bug464.scala => t464.scala} | 0
test/files/pos/t4651.scala | 12 +
test/files/pos/t4716.scala | 10 +
test/files/pos/t4717.scala | 35 +
test/files/pos/{bug4731.scala => t4731.scala} | 0
test/files/pos/t4737/J_1.java | 9 +
test/files/pos/t4737/S_2.scala | 10 +
test/files/pos/{bug4757 => t4757}/A_2.scala | 0
test/files/pos/{bug4757 => t4757}/B_3.scala | 0
test/files/pos/{bug4757 => t4757}/P_1.scala | 0
test/files/pos/t4758.scala | 17 +
test/files/pos/t4760.scala | 34 +
test/files/pos/t4812.scala | 4 +
test/files/pos/t4831.scala | 11 +
test/files/{run/bug2106.flags => pos/t4840.flags} | 0
test/files/pos/{bug4840.scala => t4840.scala} | 0
test/files/pos/t4842.scala | 26 +
test/files/pos/t4853.scala | 12 +
test/files/pos/t4869.scala | 8 +
test/files/pos/t4910.scala | 6 +
test/files/{neg/bug4302.flags => pos/t4911.flags} | 0
test/files/pos/t4911.scala | 16 +
test/files/pos/t4938.scala | 4 +
test/files/pos/t4957.scala | 89 +
test/files/pos/t4970.scala | 13 +
test/files/pos/t4975.scala | 12 +
test/files/pos/t5012.scala | 12 +
test/files/pos/t5020.scala | 19 +
.../{neg/caseinherit.flags => pos/t5029.flags} | 0
test/files/pos/t5029.scala | 3 +
test/files/pos/t5031/Id.scala | 4 +
test/files/pos/t5031/package.scala | 3 +
test/files/pos/t5031_2.scala | 7 +
test/files/pos/t5031_3/Foo_1.scala | 5 +
test/files/pos/t5031_3/Main_2.scala | 6 +
test/files/pos/t5031_3/package.scala | 6 +
test/files/pos/t5033.scala | 15 +
test/files/pos/t5041.scala | 9 +
test/files/pos/t5071.scala | 18 +
test/files/pos/t5082.scala | 14 +
test/files/pos/t5084.scala | 5 +
test/files/pos/t5099.scala | 14 +
test/files/pos/t5119.scala | 13 +
test/files/pos/t5120.scala | 26 +
test/files/pos/t5127.scala | 8 +
test/files/pos/t5130.scala | 46 +
test/files/pos/t5137.scala | 17 +
test/files/pos/{bug514.scala => t514.scala} | 0
test/files/pos/t5156.scala | 21 +
test/files/pos/{bug516.scala => t516.scala} | 0
test/files/pos/t5165/TestAnnotation.java | 11 +
test/files/pos/t5165/TestObject.scala | 3 +
test/files/pos/t5165/TestTrait.scala | 3 +
.../{neg/caseinherit.flags => pos/t5175.flags} | 0
test/files/pos/t5175.scala | 9 +
test/files/pos/t5178.scala | 11 +
test/files/pos/t5198.scala | 15 +
test/files/pos/t5210.scala | 10 +
test/files/pos/{bug522.scala => t522.scala} | 0
test/files/pos/t5223.scala | 6 +
test/files/pos/t5240.scala | 11 +
test/files/pos/t5245.scala | 3 +
test/files/pos/t5259.scala | 21 +
test/files/pos/{bug530.scala => t530.scala} | 0
test/files/pos/t5305.scala | 13 +
test/files/pos/t531.scala | 11 +
test/files/pos/t5313.scala | 30 +
test/files/pos/t5317.scala | 12 +
test/files/pos/t532.scala | 11 +
test/files/pos/{bug533.scala => t533.scala} | 0
test/files/pos/t5330.scala | 22 +
test/files/pos/t5330b.scala | 6 +
test/files/pos/t5330c.scala | 5 +
test/files/pos/t5359.scala | 17 +
test/files/pos/t5384.scala | 7 +
test/files/pos/t5390.scala | 11 +
test/files/pos/t5399.scala | 45 +
test/files/pos/t5406.scala | 4 +
test/files/pos/t5444.scala | 42 +
test/files/pos/t5504/s_1.scala | 4 +
test/files/pos/t5504/s_2.scala | 8 +
test/files/pos/{t5534.scala => t5541.scala} | 0
test/files/pos/t5542.flags | 1 +
test/files/pos/t5542.scala | 3 +
test/files/pos/t5545/S_1.scala | 4 +
test/files/pos/t5545/S_2.scala | 4 +
test/files/pos/t5546.scala | 1 +
test/files/pos/t5580b.scala | 19 +
test/files/pos/t5604/ReplConfig.scala | 53 +
test/files/pos/t5604/ReplReporter.scala | 30 +
test/files/pos/t5604b/T_1.scala | 6 +
test/files/pos/t5604b/T_2.scala | 6 +
test/files/pos/t5604b/Test_1.scala | 7 +
test/files/pos/t5604b/Test_2.scala | 7 +
test/files/pos/t5604b/pack_1.scala | 5 +
test/files/pos/t5626.scala | 12 +
test/files/pos/t5644/BoxesRunTime.java | 836 ++
test/files/pos/t5644/other.scala | 3 +
test/files/pos/t5654.scala | 13 +
test/files/pos/{bug566.scala => t566.scala} | 0
test/files/pos/t5667.scala | 4 +
test/files/pos/t5692a.check | 4 +
test/files/pos/t5692a.flags | 1 +
test/files/pos/t5692a/Macros_1.scala | 6 +
test/files/pos/t5692a/Test_2.scala | 3 +
test/files/pos/t5692b.check | 4 +
test/files/pos/t5692b.flags | 1 +
test/files/pos/t5692b/Macros_1.scala | 6 +
test/files/pos/t5692b/Test_2.scala | 3 +
test/files/{jvm/bug680.check => pos/t5692c.check} | 0
test/files/pos/t5692c.scala | 4 +
test/files/pos/t5702-pos-infix-star.scala | 15 +
test/files/pos/t5703/Base.java | 3 +
test/files/pos/t5703/Impl.scala | 3 +
test/files/pos/t5706.flags | 1 +
test/files/pos/t5706.scala | 10 +
test/files/pos/t5720-ownerous.scala | 56 +
test/files/pos/t5726.scala | 17 +
test/files/pos/t5727.scala | 31 +
test/files/pos/t5729.scala | 6 +
test/files/pos/{bug573.scala => t573.scala} | 0
test/files/pos/t5738.scala | 8 +
test/files/pos/t5742.scala | 8 +
test/files/pos/t5744/Macros_1.scala | 22 +
test/files/pos/t5744/Test_2.scala | 6 +
test/files/pos/t5756.scala | 6 +
test/files/pos/t5769.scala | 9 +
test/files/pos/{bug577.scala => t577.scala} | 0
test/files/pos/t5777.scala | 45 +
test/files/pos/t5779-numeq-warn.scala | 13 +
test/files/pos/{bug578.scala => t578.scala} | 0
test/files/pos/t5796.scala | 8 +
test/files/pos/t5809.flags | 1 +
test/files/pos/t5809.scala | 5 +
test/files/pos/t5829.scala | 18 +
test/files/pos/t5846.scala | 10 +
test/files/pos/t5853.scala | 55 +
test/files/pos/t5859.scala | 15 +
test/files/pos/t5862.scala | 38 +
test/files/pos/t5877.scala | 14 +
test/files/pos/t5877b.scala | 13 +
test/files/pos/t5886.scala | 18 +
test/files/pos/t5892.scala | 5 +
.../{neg/caseinherit.flags => pos/t5897.flags} | 0
test/files/pos/t5897.scala | 6 +
.../{neg/caseinherit.flags => pos/t5899.flags} | 0
test/files/pos/t5899.scala | 19 +
test/files/pos/t5910.java | 2 +
test/files/pos/{bug592.scala => t592.scala} | 0
test/files/pos/t5930.flags | 1 +
test/files/pos/t5930.scala | 4 +
.../{neg/caseinherit.flags => pos/t5932.flags} | 0
test/files/pos/t5932.scala | 15 +
test/files/pos/{bug595.scala => t595.scala} | 0
test/files/pos/t5953.scala | 16 +
test/files/pos/t5957/T_1.scala | 8 +
test/files/pos/t5957/Test.java | 11 +
test/files/pos/t5958.scala | 15 +
test/files/pos/{bug596.scala => t596.scala} | 0
test/files/pos/t5967.scala | 6 +
.../{neg/caseinherit.flags => pos/t5968.flags} | 0
test/files/pos/t5968.scala | 8 +
test/files/pos/{bug599.scala => t599.scala} | 0
.../{neg/caseinherit.flags => pos/t6008.flags} | 0
test/files/pos/t6008.scala | 12 +
test/files/pos/t6014.scala | 13 +
test/files/pos/{bug602.scala => t602.scala} | 0
.../{neg/caseinherit.flags => pos/t6022.flags} | 0
test/files/pos/t6022.scala | 7 +
test/files/pos/t6022b.scala | 20 +
test/files/pos/t6028/t6028_1.scala | 3 +
test/files/pos/t6028/t6028_2.scala | 4 +
test/files/pos/t6029.scala | 3 +
test/files/pos/t6033.scala | 5 +
test/files/pos/t6034.scala | 1 +
test/files/pos/{bug604.scala => t604.scala} | 0
test/files/pos/t6040.scala | 3 +
test/files/pos/t6047.flags | 1 +
test/files/pos/t6047.scala | 20 +
test/files/pos/{bug607.scala => t607.scala} | 0
test/files/pos/t6072.scala | 3 +
test/files/pos/t6084.scala | 15 +
test/files/pos/t6089b.scala | 18 +
test/files/pos/t6091.flags | 1 +
test/files/pos/t6091.scala | 10 +
test/files/pos/{bug611.scala => t611.scala} | 0
test/files/pos/t6117.scala | 19 +
test/files/pos/{bug613.scala => t613.scala} | 0
test/files/pos/t6145.scala | 11 +
.../{neg/caseinherit.flags => pos/t6146.flags} | 0
test/files/pos/t6146.scala | 60 +
test/files/pos/{bug615.scala => t615.scala} | 0
test/files/pos/t6157.flags | 1 +
test/files/pos/t6157.scala | 25 +
test/files/pos/{bug616.scala => t616.scala} | 0
test/files/pos/t6184.scala | 7 +
test/files/pos/t6201.scala | 13 +
test/files/pos/t6204-a.scala | 9 +
test/files/pos/t6204-b.scala | 10 +
test/files/pos/t6205.scala | 18 +
test/files/pos/t6208.scala | 4 +
.../{neg/caseinherit.flags => pos/t6210.flags} | 0
test/files/pos/t6210.scala | 21 +
test/files/pos/t6215.scala | 1 +
test/files/pos/t6225.scala | 20 +
test/files/pos/t6245/Base.java | 5 +
test/files/pos/t6245/Foo.scala | 9 +
test/files/pos/t6245/Vis.java | 3 +
test/files/pos/t6274.scala | 13 +
.../{neg/anyval-sealed.flags => pos/t6275.flags} | 0
test/files/pos/t6275.scala | 11 +
test/files/pos/t6278-synth-def.scala | 30 +
test/files/pos/{bug628.scala => t628.scala} | 0
test/files/pos/t6311.scala | 5 +
test/files/pos/t6335.scala | 25 +
test/files/pos/t6358.scala | 6 +
test/files/pos/t6358_2.scala | 6 +
test/files/pos/t6367.scala | 34 +
test/files/pos/t6386.scala | 5 +
test/files/pos/{bug640.scala => t640.scala} | 0
test/files/pos/t6479.scala | 56 +
test/files/pos/t6482.scala | 11 +
test/files/pos/t6485a/Macros_1.scala | 5 +
test/files/pos/t6485a/Test_2.scala | 5 +
test/files/pos/t6485b/Test.scala | 10 +
test/files/pos/t6499.scala | 3 +
test/files/pos/{bug651.scala => t651.scala} | 0
test/files/pos/t6514.scala | 11 +
test/files/pos/t6516.scala | 19 +
.../{neg/anyval-sealed.flags => pos/t6537.flags} | 0
test/files/pos/t6537.scala | 16 +
test/files/pos/t6547.flags | 1 +
test/files/pos/t6547.scala | 6 +
test/files/pos/t6551.scala | 13 +
test/files/pos/t6552.scala | 8 +
test/files/pos/t6556.scala | 32 +
test/files/pos/t6562.scala | 14 +
test/files/pos/t6575a.scala | 15 +
test/files/pos/t6575b.scala | 17 +
.../{neg/anyval-sealed.flags => pos/t6595.flags} | 0
test/files/pos/t6595.scala | 18 +
test/files/pos/t6600.scala | 8 +
test/files/pos/t6601/PrivateValueClass_1.scala | 1 +
test/files/pos/t6601/UsePrivateValueClass_2.scala | 10 +
test/files/pos/{bug661.scala => t661.scala} | 0
test/files/pos/t6624.scala | 28 +
test/files/pos/t6648.scala | 24 +
test/files/pos/t6651.scala | 33 +
.../{neg/caseinherit.flags => pos/t6675.flags} | 0
test/files/pos/t6675.scala | 20 +
test/files/pos/t6712.scala | 5 +
test/files/pos/t6722.scala | 11 +
test/files/pos/{bug675.scala => t675.scala} | 0
.../{neg/caseinherit.flags => pos/t6771.flags} | 0
test/files/pos/t6771.scala | 9 +
test/files/pos/{bug684.scala => t684.scala} | 0
test/files/pos/t6846.scala | 28 +
test/files/pos/t6891.flags | 1 +
test/files/pos/t6891.scala | 26 +
.../{neg/anyval-sealed.flags => pos/t6896.flags} | 0
test/files/pos/t6896.scala | 7 +
test/files/pos/t6897.scala | 6 +
test/files/pos/{bug690.scala => t690.scala} | 0
test/files/pos/t6921.scala | 11 +
test/files/pos/t6925.scala | 9 +
test/files/pos/t6925b.scala | 18 +
test/files/pos/{bug694.scala => t694.scala} | 0
.../{neg/caseinherit.flags => pos/t6942.flags} | 0
test/files/pos/t6942/Bar.java | 235 +
test/files/pos/t6942/t6942.scala | 64 +
test/files/pos/t6963c.flags | 1 +
test/files/pos/t6963c.scala | 25 +
test/files/pos/{bug697.scala => t697.scala} | 0
test/files/pos/t6976/Exts_1.scala | 10 +
test/files/pos/t6976/ImplicitBug_1.scala | 27 +
test/files/pos/t6976/ImplicitBug_2.scala | 7 +
test/files/pos/{bug698.scala => t698.scala} | 0
.../{neg/caseinherit.flags => pos/t6994.flags} | 0
test/files/pos/t6994.scala | 8 +
test/files/pos/t7011.flags | 1 +
test/files/pos/t7011.scala | 7 +
test/files/pos/t7014/ThreadSafety.java | 9 +
test/files/pos/t7014/ThreadSafetyLevel.java | 8 +
test/files/pos/t7014/t7014.scala | 4 +
test/files/pos/t7022.scala | 9 +
test/files/pos/{bug703.scala => t703.scala} | 0
test/files/pos/t7033.scala | 15 +
test/files/pos/t7035.scala | 15 +
test/files/pos/{bug704.scala => t704.scala} | 0
test/files/pos/t7091.scala | 7 +
test/files/pos/{bug711.scala => t711.scala} | 0
test/files/pos/t7126.scala | 11 +
test/files/pos/{bug715.cmds => t715.cmds} | 0
test/files/pos/t715/meredith_1.scala | 98 +
test/files/pos/{bug715 => t715}/runner_2.scala | 0
test/files/pos/t7180.scala | 13 +
.../{neg/caseinherit.flags => pos/t7183.flags} | 0
test/files/pos/t7183.scala | 13 +
test/files/pos/t7190.scala | 26 +
test/files/pos/{bug720.scala => t720.scala} | 0
test/files/pos/t7200b.scala | 50 +
test/files/pos/t7226.scala | 26 +
.../{neg/caseinherit.flags => pos/t7232.flags} | 0
test/files/pos/t7232/Foo.java | 9 +
test/files/pos/t7232/List.java | 4 +
test/files/pos/t7232/Test.scala | 5 +
.../{neg/caseinherit.flags => pos/t7232b.flags} | 0
test/files/pos/t7232b/Foo.java | 8 +
test/files/pos/t7232b/List.java | 5 +
test/files/pos/t7232b/Test.scala | 5 +
.../{neg/caseinherit.flags => pos/t7232c.flags} | 0
test/files/pos/t7232c/Foo.java | 10 +
test/files/pos/t7232c/Test.scala | 4 +
.../{neg/caseinherit.flags => pos/t7232d.flags} | 0
test/files/pos/t7232d/Entry.java | 4 +
test/files/pos/t7232d/Foo.java | 8 +
test/files/pos/t7232d/Test.scala | 4 +
test/files/pos/t7233.scala | 14 +
test/files/pos/t7233b.scala | 8 +
test/files/pos/t7239.scala | 38 +
.../{neg/caseinherit.flags => pos/t7285a.flags} | 0
test/files/pos/t7285a.scala | 83 +
test/files/pos/t7329.scala | 1 +
.../{neg/anyval-sealed.flags => pos/t7369.flags} | 0
test/files/pos/t7369.scala | 37 +
test/files/pos/t7377/Client_2.scala | 11 +
test/files/pos/t7377/Macro_1.scala | 7 +
test/files/pos/t7377b.flags | 1 +
test/files/pos/t7377b.scala | 13 +
test/files/pos/t7426.scala | 3 +
test/files/{jvm/bug680.check => pos/t7461.check} | 0
test/files/pos/t7461/Macros_1.scala | 13 +
test/files/pos/t7461/Test_2.scala | 3 +
test/files/pos/t7486-named.scala | 8 +
test/files/pos/t7486.scala | 8 +
test/files/pos/t7505.scala | 16 +
test/files/pos/t7516/A_1.scala | 9 +
test/files/pos/t7516/B_2.scala | 4 +
test/files/pos/t7517.scala | 22 +
test/files/pos/t7532/A_1.java | 6 +
test/files/pos/t7532/B_2.scala | 5 +
test/files/pos/t7532b/A_1.scala | 7 +
test/files/pos/t7532b/B_2.scala | 8 +
test/files/pos/{bug756.scala => t756.scala} | 0
test/files/pos/{bug757.scala => t757.scala} | 0
test/files/pos/{bug757a.scala => t757a.scala} | 0
test/files/pos/{bug758.scala => t758.scala} | 0
test/files/pos/{bug759.scala => t759.scala} | 0
test/files/pos/{bug762.scala => t762.scala} | 0
test/files/pos/t7649.flags | 1 +
test/files/pos/t7649.scala | 20 +
test/files/pos/{bug767.scala => t767.scala} | 0
test/files/pos/t7694.scala | 40 +
test/files/pos/t7716.scala | 16 +
test/files/{jvm/bug680.check => pos/t7776.check} | 0
test/files/pos/t7776.scala | 12 +
test/files/pos/t7782.scala | 25 +
test/files/pos/t7782b.scala | 25 +
test/files/pos/{bug780.scala => t780.scala} | 0
test/files/pos/t7815.scala | 30 +
test/files/pos/t7818.scala | 10 +
test/files/pos/{bug788.scala => t788.scala} | 0
test/files/pos/{bug789.scala => t789.scala} | 0
test/files/pos/t7902.scala | 17 +
test/files/pos/{bug796.scala => t796.scala} | 0
test/files/pos/{bug802.scala => t802.scala} | 0
test/files/pos/{bug803.scala => t803.scala} | 0
test/files/pos/{bug805.scala => t805.scala} | 0
test/files/pos/t8060.scala | 11 +
test/files/pos/t8062.flags | 1 +
test/files/pos/t8062/A_1.scala | 5 +
test/files/pos/t8062/B_2.scala | 3 +
test/files/pos/{bug807.scala => t807.scala} | 0
test/files/pos/t8111.scala | 24 +
test/files/pos/{bug812.scala => t812.scala} | 0
test/files/pos/t8138.scala | 24 +
test/files/pos/t8146a.scala | 9 +
test/files/pos/t8146b.scala | 77 +
test/files/pos/t8152-performance.scala | 13 +
test/files/pos/t839.scala | 26 +
test/files/pos/{bug851.scala => t851.scala} | 0
test/files/pos/{bug873.scala => t873.scala} | 0
test/files/pos/{bug880.scala => t880.scala} | 0
test/files/pos/{bug892.scala => t892.scala} | 0
test/files/pos/{bug911.scala => t911.scala} | 0
test/files/pos/t927.scala | 11 +
test/files/pos/t942/Amount_1.java | 5 +
test/files/pos/t942/Test_2.scala | 3 +
test/files/pos/{bug946.scala => t946.scala} | 0
test/files/pos/ticket2251.scala | 2 +-
.../bug4285.flags => pos/trait-force-info.flags} | 0
test/files/pos/trait-force-info.scala | 18 +
test/files/pos/trait-parents.scala | 16 +
test/files/pos/typetags.scala | 16 +
test/files/pos/unapplyComplex.scala | 6 +-
.../{neg/bug4302.flags => pos/unchecked-a.flags} | 0
test/files/pos/unchecked-a.scala | 15 +
test/files/pos/value-class-override-no-spec.flags | 1 +
test/files/pos/value-class-override-no-spec.scala | 9 +
test/files/pos/value-class-override-spec.scala | 9 +
test/files/pos/virtpatmat_alts_subst.flags | 1 +
test/files/pos/virtpatmat_alts_subst.scala | 6 +
.../virtpatmat_anonfun_for.flags} | 0
test/files/pos/virtpatmat_anonfun_for.scala | 8 +
test/files/pos/virtpatmat_binding_opt.flags | 1 +
test/files/pos/virtpatmat_binding_opt.scala | 11 +
test/files/pos/virtpatmat_castbinder.flags | 1 +
test/files/pos/virtpatmat_castbinder.scala | 15 +
test/files/pos/virtpatmat_exhaust.scala | 24 +
.../virtpatmat_exhaust_unchecked.flags} | 0
test/files/pos/virtpatmat_exhaust_unchecked.scala | 24 +
test/files/pos/virtpatmat_exist1.flags | 1 +
test/files/pos/virtpatmat_exist1.scala | 24 +
test/files/pos/virtpatmat_exist2.flags | 1 +
test/files/pos/virtpatmat_exist2.scala | 20 +
test/files/pos/virtpatmat_exist3.flags | 1 +
test/files/pos/virtpatmat_exist3.scala | 12 +
test/files/pos/virtpatmat_exist4.scala | 35 +
test/files/pos/virtpatmat_exist_uncurry.scala | 6 +
test/files/pos/virtpatmat_gadt_array.flags | 1 +
test/files/pos/virtpatmat_gadt_array.scala | 15 +
test/files/pos/virtpatmat_infer_single_1.flags | 1 +
test/files/pos/virtpatmat_infer_single_1.scala | 7 +
test/files/pos/virtpatmat_instof_valuetype.flags | 1 +
test/files/pos/virtpatmat_instof_valuetype.scala | 8 +
test/files/pos/virtpatmat_obj_in_case.flags | 1 +
test/files/pos/virtpatmat_obj_in_case.scala | 5 +
test/files/pos/virtpatmat_partialfun_nsdnho.scala | 18 +
test/files/pos/virtpatmat_reach_const.scala | 11 +
test/files/pos/xlint1.flags | 1 +
test/files/pos/xlint1.scala | 13 +
test/files/pos/z1720.scala | 16 +
test/files/pos/z1730.flags | 1 +
test/files/pos/z1730.scala | 13 +
test/files/positions/Unpositioned1.scala | 2 +-
test/files/presentation/callcc-interpreter.check | 94 +
.../presentation/callcc-interpreter/Runner.scala | 3 +
.../callcc-interpreter/src/CallccInterpreter.scala | 86 +
.../presentation/completion-implicit-chained.check | 29 +
.../completion-implicit-chained/Test.scala | 3 +
.../src/Completions.scala | 12 +
test/files/presentation/forgotten-ask.scala | 33 +
test/files/presentation/hyperlinks.check | 181 +
test/files/presentation/hyperlinks/Runner.scala | 11 +
.../hyperlinks/src/NameDefaultTests.scala | 16 +
.../presentation/hyperlinks/src/PatMatTests.scala | 28 +
.../presentation/hyperlinks/src/SuperTypes.scala | 32 +
test/files/presentation/ide-bug-1000349.check | 40 +
.../presentation/ide-bug-1000349/Runner.scala | 3 +
.../src/CompletionOnEmptyArgMethod.scala | 7 +
test/files/presentation/ide-bug-1000469.check | 1 +
.../presentation/ide-bug-1000469/Runner.scala | 3 +
.../ide-bug-1000469/src/java/JavaEventHandler.java | 3 +
.../ide-bug-1000469/src/scala/EventHandler.scala | 5 +
test/files/presentation/ide-bug-1000475.check | 115 +
.../presentation/ide-bug-1000475/Runner.scala | 3 +
.../presentation/ide-bug-1000475/src/Foo.scala | 9 +
test/files/presentation/ide-bug-1000531.check | 129 +
.../presentation/ide-bug-1000531/Runner.scala | 3 +
.../ide-bug-1000531/src/CrashOnLoad.scala | 7 +
test/files/presentation/ide-t1000567.check | 1 +
test/files/presentation/ide-t1000567/Runner.scala | 15 +
test/files/presentation/ide-t1000567/src/a/a.scala | 5 +
test/files/presentation/ide-t1000567/src/b/b.scala | 5 +
test/files/presentation/ide-t1000609.check | 6 +
test/files/presentation/ide-t1000609/Runner.scala | 3 +
.../ide-t1000609/src/NoHyperlinking.scala | 8 +
test/files/presentation/ide-t1000976.check | 1 +
test/files/presentation/ide-t1000976.flags | 1 +
test/files/presentation/ide-t1000976/Test.scala | 30 +
test/files/presentation/ide-t1000976/src/a/A.scala | 7 +
test/files/presentation/ide-t1000976/src/b/B.scala | 7 +
test/files/presentation/ide-t1000976/src/c/C.scala | 3 +
test/files/presentation/ide-t1000976/src/d/D.scala | 7 +
test/files/presentation/ide-t1001326.check | 4 +
test/files/presentation/ide-t1001326/Test.scala | 91 +
test/files/presentation/ide-t1001326/src/a/A.scala | 5 +
test/files/presentation/ide-t1001388.check | 1 +
test/files/presentation/ide-t1001388/Test.scala | 28 +
test/files/presentation/ide-t1001388/src/a/A.scala | 6 +
test/files/presentation/implicit-member.check | 42 +
.../presentation/implicit-member/Runner.scala | 3 +
.../implicit-member/src/ImplicitMember.scala | 8 +
test/files/presentation/memory-leaks.check | 54 +
.../memory-leaks/MemoryLeaksTest.scala | 126 +
test/files/presentation/ping-pong.check | 102 +
test/files/presentation/ping-pong/Runner.scala | 3 +
.../presentation/ping-pong/src/PingPong.scala | 22 +
test/files/presentation/random.check | 27 +
test/files/presentation/random/Runner.scala | 3 +
test/files/presentation/random/src/Random.scala | 106 +
test/files/presentation/recursive-ask.check | 4 +
.../presentation/recursive-ask/RecursiveAsk.scala | 20 +
test/files/presentation/scope-completion-1.check | 19 +
.../presentation/scope-completion-1/Test.scala | 3 +
.../scope-completion-1/src/Completions.scala | 12 +
test/files/presentation/scope-completion-2.check | 33 +
.../presentation/scope-completion-2/Test.scala | 3 +
.../scope-completion-2/src/Completions.scala | 31 +
test/files/presentation/scope-completion-3.check | 111 +
.../presentation/scope-completion-3/Test.scala | 3 +
.../scope-completion-3/src/Completions.scala | 106 +
test/files/presentation/scope-completion-4.check | 293 +
.../presentation/scope-completion-4/Test.scala | 3 +
.../scope-completion-4/src/Completions.scala | 84 +
.../presentation/scope-completion-import.check | 193 +
.../scope-completion-import/Test.scala | 3 +
.../scope-completion-import/src/Completions.scala | 72 +
test/files/presentation/t5708.check | 47 +
test/files/presentation/t5708/Test.scala | 3 +
.../files/presentation/t5708/src/Completions.scala | 18 +
test/files/presentation/t7548.check | 1 +
test/files/presentation/t7548/Test.scala | 17 +
test/files/presentation/t7548/src/Foo.scala | 7 +
test/files/presentation/t7548b.check | 1 +
test/files/presentation/t7548b/Test.scala | 17 +
test/files/presentation/t7548b/src/Foo.scala | 12 +
test/files/presentation/t7915.check | 11 +
test/files/presentation/t7915/Test.scala | 8 +
test/files/presentation/t7915/src/Foo.scala | 9 +
test/files/presentation/t8085.check | 3 +
test/files/presentation/t8085.flags | 1 +
test/files/presentation/t8085/Test.scala | 27 +
.../presentation/t8085/src/nodescala/Foo.scala | 3 +
.../t8085/src/nodescala/NodeScalaSuite.scala | 10 +
.../presentation/t8085/src/nodescala/package.scala | 7 +
test/files/presentation/t8085b.check | 3 +
test/files/presentation/t8085b.flags | 1 +
test/files/presentation/t8085b/Test.scala | 27 +
.../presentation/t8085b/src/p1/nodescala/Foo.scala | 4 +
.../t8085b/src/p1/nodescala/NodeScalaSuite.scala | 11 +
.../t8085b/src/p1/nodescala/package.scala | 9 +
test/files/presentation/visibility.check | 221 +
test/files/presentation/visibility/Test.scala | 3 +
.../presentation/visibility/src/Completions.scala | 40 +
test/files/res/bug597.res | 2 -
test/files/res/bug687.check | 8 -
test/files/res/bug687.res | 3 -
test/files/res/bug687/QueryA.scala | 4 -
test/files/res/bug687/QueryB.scala | 4 -
test/files/res/bug722.res | 2 -
test/files/res/bug722/IfElse.scala | 4 -
test/files/res/bug722/Parser.scala | 8 -
test/files/res/bug722/ScanBased.scala | 10 -
test/files/res/bug735.res | 2 -
test/files/res/bug735/ScalaExpressions.scala | 11 -
test/files/res/bug735/ScalaTyper.scala | 5 -
test/files/res/bug743.res | 2 -
test/files/res/bug743/BracesXXX.scala | 6 -
test/files/res/bug743/ParserXXX.scala | 15 -
test/files/res/bug785.res | 2 -
test/files/res/bug785/ScalaNewTyper.scala | 10 -
test/files/res/bug785/ScalaTrees.scala | 7 -
test/files/res/bug831.res | 2 -
test/files/res/bug831/NewScalaParserXXX.scala | 43 -
test/files/res/bug831/NewScalaTestXXX.scala | 2 -
test/files/res/{bug597.check => t5167.check} | 0
test/files/res/t5167.res | 2 +
test/files/res/t5167/t5167_1.scala | 12 +
test/files/res/t5167/t5167_2.scala | 7 +
test/files/res/{bug722.check => t5489.check} | 0
test/files/res/t5489.res | 2 +
test/files/res/t5489/t5489.scala | 14 +
test/files/res/{bug735.check => t597.check} | 0
test/files/res/t597.res | 2 +
test/files/res/{bug597 => t597}/Main.scala | 0
test/files/res/{bug597 => t597}/Test.scala | 0
test/files/res/t687.check | 8 +
test/files/res/t687.res | 3 +
test/files/res/t687/QueryA.scala | 4 +
test/files/res/t687/QueryB.scala | 4 +
test/files/res/{bug743.check => t722.check} | 0
test/files/res/t722.res | 2 +
test/files/res/t722/IfElse.scala | 4 +
test/files/res/t722/Parser.scala | 8 +
test/files/res/t722/ScanBased.scala | 10 +
test/files/res/{bug785.check => t735.check} | 0
test/files/res/t735.res | 2 +
test/files/res/t735/ScalaExpressions.scala | 11 +
test/files/res/t735/ScalaTyper.scala | 5 +
test/files/res/{bug831.check => t743.check} | 0
test/files/res/t743.res | 2 +
test/files/res/t743/BracesXXX.scala | 6 +
test/files/res/t743/ParserXXX.scala | 15 +
test/files/res/{bug597.check => t785.check} | 0
test/files/res/t785.res | 2 +
test/files/res/t785/ScalaNewTyper.scala | 10 +
test/files/res/t785/ScalaTrees.scala | 7 +
test/files/res/{bug597.check => t831.check} | 0
test/files/res/t831.res | 2 +
test/files/res/t831/NewScalaParserXXX.scala | 43 +
test/files/res/t831/NewScalaTestXXX.scala | 2 +
test/files/run/Course-2002-01-msil.check | 34 -
test/files/run/Course-2002-02-msil.check | 187 -
test/files/run/Course-2002-03-msil.check | 67 -
test/files/run/Course-2002-04-msil.check | 64 -
test/files/run/Course-2002-08-msil.check | 171 -
test/files/run/Course-2002-08.scala | 12 +-
test/files/run/Course-2002-09-msil.check | 50 -
test/files/run/Course-2002-10-msil.check | 46 -
test/files/run/Course-2002-10.scala | 2 +-
test/files/run/Course-2002-13.scala | 11 +-
test/files/run/Meter.check | 13 +
test/files/run/Meter.scala | 109 +
test/files/run/MeterCaseClass.check | 13 +
test/files/run/MeterCaseClass.scala | 106 +
test/files/run/MutableListTest.scala | 252 +-
test/files/run/Predef.readLine.check | 3 +
test/files/run/Predef.readLine.scala | 10 +
test/files/run/QueueTest.scala | 594 +-
test/files/run/SymbolsTest.scala | 566 +-
test/files/run/WeakHashSetTest.scala | 174 +
test/files/run/absoverride-msil.check | 20 -
test/files/run/abstypetags_core.check | 30 +
test/files/run/abstypetags_core.scala | 34 +
test/files/run/abstypetags_serialize.check | 2 +
test/files/run/abstypetags_serialize.scala | 33 +
test/files/run/adding-growing-set.scala | 2 +-
test/files/run/analyzerPlugins.check | 196 +
test/files/run/analyzerPlugins.scala | 121 +
test/files/run/annotatedRetyping.check | 6 +
test/files/run/annotatedRetyping.scala | 62 +
test/files/run/applydynamic_sip.check | 29 +
test/files/run/applydynamic_sip.flags | 1 +
test/files/run/applydynamic_sip.scala | 66 +
test/files/run/array-charSeq.check | 248 +
test/files/run/array-charSeq.scala | 27 +
test/files/run/array-existential-bound.check | 4 +
test/files/run/array-existential-bound.scala | 17 +
test/files/run/arrayclone-new.scala | 108 +
.../run/{arrayclone.scala => arrayclone-old.scala} | 0
test/files/run/arrays.scala | 30 +-
.../run/{bug4062.check => backreferences.check} | 0
test/files/run/backreferences.scala | 13 +
test/files/run/bigDecimalTest.check | 6 +
test/files/run/bigDecimalTest.scala | 35 +
test/files/run/bitsets-msil.check | 33 -
test/files/run/bitsets.check | 46 +
test/files/run/bitsets.scala | 85 +
test/files/run/boolord-msil.check | 4 -
test/files/run/buffer-slice.check | 1 +
test/files/run/buffer-slice.scala | 5 +
test/files/run/bug1044.scala | 4 -
test/files/run/bug1192.scala | 7 -
test/files/run/bug1309.scala | 7 -
test/files/run/bug2005.scala | 10 -
test/files/run/bug2124.check | 1 -
test/files/run/bug2125.check | 1 -
test/files/run/bug3613.scala | 22 -
test/files/run/bug4110.scala | 11 -
test/files/run/bug4317/S_1.scala | 9 -
test/files/run/bug4671.check | 46 -
test/files/run/bug4671.scala | 13 -
test/files/run/bug4710.check | 7 -
test/files/run/bug576.scala | 45 -
test/files/run/bugs-msil.check | 96 -
test/files/run/bugs.scala | 3 +-
test/files/run/byname.scala | 6 +-
test/files/run/bytecodecs.scala | 2 +-
test/files/run/caseClassHash.check | 9 +
test/files/run/caseClassHash.scala | 37 +
test/files/run/class-symbol-contravariant.check | 36 +
test/files/run/class-symbol-contravariant.scala | 15 +
test/files/run/classfile-format-51.scala | 126 +
test/files/run/classfile-format-52.check | 2 +
test/files/run/classfile-format-52.scala | 77 +
test/files/run/classmanifests_new_alias.check | 2 +
test/files/run/classmanifests_new_alias.scala | 5 +
test/files/run/classmanifests_new_core.check | 2 +
test/files/run/classmanifests_new_core.scala | 4 +
test/files/run/classtags_contextbound.check | 1 +
test/files/run/classtags_contextbound.scala | 7 +
test/files/run/classtags_core.check | 30 +
test/files/run/classtags_core.scala | 34 +
test/files/run/classtags_multi.check | 5 +
test/files/run/classtags_multi.scala | 9 +
test/files/run/collection-conversions.check | 126 +
test/files/run/collection-conversions.scala | 64 +
test/files/run/collections.check | 8 +
test/files/run/collections.scala | 2 +
test/files/run/colltest.check | 1 +
test/files/run/colltest.scala | 3 +-
test/files/run/colltest1.check | 2 +
test/files/run/colltest1.scala | 2 +
test/files/run/compiler-asSeenFrom.check | 323 +
test/files/run/compiler-asSeenFrom.scala | 122 +
test/files/run/concurrent-map-conversions.scala | 36 +
test/files/run/constant-type.check | 30 +
test/files/run/constant-type.scala | 17 +
test/files/run/constrained-types.check | 18 +-
test/files/run/ctries-new/DumbHash.scala | 14 +
test/files/run/ctries-new/Wrap.scala | 9 +
test/files/run/ctries-new/concmap.scala | 188 +
test/files/run/ctries-new/iterator.scala | 289 +
test/files/run/ctries-new/lnode.scala | 61 +
test/files/run/ctries-new/main.scala | 47 +
test/files/run/ctries-new/snapshot.scala | 267 +
test/files/run/ctries-old/DumbHash.scala | 14 +
test/files/run/ctries-old/Wrap.scala | 9 +
test/files/run/ctries-old/concmap.scala | 188 +
test/files/run/ctries-old/iterator.scala | 289 +
test/files/run/ctries-old/lnode.scala | 61 +
test/files/run/ctries-old/main.scala | 45 +
test/files/run/ctries-old/snapshot.scala | 267 +
.../dead-code-elimination.check} | 0
test/files/run/dead-code-elimination.flags | 1 +
test/files/run/dead-code-elimination.scala | 33 +
test/files/run/delay-bad.check | 47 +
test/files/run/delay-bad.scala | 77 +
test/files/run/delay-good.check | 41 +
test/files/run/delay-good.scala | 77 +
test/files/run/dynamic-anyval.check | 4 +
test/files/run/dynamic-anyval.scala | 22 +
test/files/run/dynamic-applyDynamic.check | 14 +
test/files/run/dynamic-applyDynamic.scala | 26 +
test/files/run/dynamic-applyDynamicNamed.check | 14 +
test/files/run/dynamic-applyDynamicNamed.scala | 26 +
test/files/run/dynamic-selectDynamic.check | 13 +
test/files/run/dynamic-selectDynamic.scala | 25 +
test/files/run/dynamic-updateDynamic.check | 14 +
test/files/run/dynamic-updateDynamic.scala | 28 +
test/files/run/elidable-opt.check | 14 +
test/files/run/elidable-opt.flags | 1 +
test/files/run/elidable-opt.scala | 85 +
test/files/run/elidable.check | 15 +-
test/files/run/elidable.scala | 73 +-
test/files/run/empty-array.check | 3 +
test/files/run/empty-array.scala | 8 +
test/files/run/emptypf.check | 3 +
test/files/run/emptypf.scala | 14 +
test/files/run/enrich-gentraversable.check | 8 +
test/files/run/enrich-gentraversable.scala | 67 +
test/files/run/enums.check | 10 +
test/files/run/enums.scala | 55 +
test/files/run/eta-expand-star.check | 1 +
test/files/run/eta-expand-star.scala | 8 +
test/files/run/eta-expand-star2.check | 1 +
test/files/run/eta-expand-star2.flags | 1 +
test/files/run/eta-expand-star2.scala | 8 +
test/files/run/existentials-in-compiler.check | 156 +
test/files/run/existentials-in-compiler.scala | 84 +
test/files/run/existentials3-new.check | 24 +
test/files/run/existentials3-new.scala | 80 +
test/files/run/existentials3-old.check | 22 +
test/files/run/existentials3-old.scala | 73 +
test/files/run/exprs_serialize.check | 2 +
test/files/run/exprs_serialize.scala | 29 +
test/files/run/fail-non-value-types.check | 3 +
test/files/run/fail-non-value-types.scala | 40 +
test/files/run/finalvar.check | 6 +
test/files/run/finalvar.flags | 1 +
test/files/run/finalvar.scala | 37 +
test/files/run/fors.scala | 4 +-
test/files/run/freetypes_false_alarm1.check | 1 +
test/files/run/freetypes_false_alarm1.scala | 10 +
test/files/run/freetypes_false_alarm2.check | 1 +
test/files/run/freetypes_false_alarm2.scala | 8 +
test/files/run/future-flatmap-exec-count.check | 6 +
test/files/run/future-flatmap-exec-count.scala | 61 +
test/files/run/genericValueClass.check | 2 +
test/files/run/genericValueClass.scala | 17 +
.../{getClassTest.check => getClassTest-new.check} | 0
test/files/run/getClassTest-new.scala | 68 +
.../{getClassTest.check => getClassTest-old.check} | 0
test/files/run/getClassTest-old.scala | 66 +
test/files/run/getClassTest-valueClass.check | 2 +
test/files/run/getClassTest-valueClass.scala | 10 +
test/files/run/getClassTest.scala | 66 -
test/files/run/global-showdef.check | 4 +-
test/files/run/hashhash.scala | 13 +
test/files/run/idempotency-case-classes.check | 55 +
test/files/run/idempotency-case-classes.scala | 22 +
test/files/run/idempotency-extractors.check | 5 +
test/files/run/idempotency-extractors.scala | 22 +
test/files/run/idempotency-labels.check | 15 +
test/files/run/idempotency-labels.scala | 22 +
test/files/run/idempotency-lazy-vals.check | 23 +
test/files/run/idempotency-lazy-vals.scala | 27 +
test/files/run/idempotency-partial-functions.check | 2 +
test/files/run/idempotency-this.check | 4 +
test/files/run/idempotency-this.scala | 22 +
test/files/run/imain.check | 1 +
test/files/run/imain.scala | 17 +
test/files/run/impconvtimes-msil.check | 1 -
test/files/run/implicitclasses.scala | 10 +
.../run/{bug216.check => indexedSeq-apply.check} | 0
test/files/run/indexedSeq-apply.scala | 15 +
test/files/run/infix-msil.check | 2 -
test/files/run/inline-ex-handlers.check | 490 +
test/files/run/inline-ex-handlers.scala | 329 +
test/files/run/inner-obj-auto.check | 65 +
test/files/run/inner-obj-auto.scala | 2092 +++
.../run/interop_classtags_are_classmanifests.check | 3 +
.../run/interop_classtags_are_classmanifests.scala | 11 +
.../run/interop_manifests_are_abstypetags.check | 3 +
.../run/interop_manifests_are_abstypetags.scala | 11 +
.../run/interop_manifests_are_classtags.check | 18 +
.../run/interop_manifests_are_classtags.scala | 23 +
.../files/run/interop_manifests_are_typetags.check | 3 +
.../files/run/interop_manifests_are_typetags.scala | 11 +
.../files/run/interop_typetags_are_manifests.check | 3 +
.../files/run/interop_typetags_are_manifests.scala | 12 +
test/files/run/interpolation.check | 32 +
test/files/run/interpolation.flags | 1 +
test/files/run/interpolation.scala | 32 +
test/files/run/interpolationArgs.check | 2 +
.../interpolationArgs.flags} | 0
test/files/run/interpolationArgs.scala | 5 +
test/files/run/interpolationMultiline1.check | 26 +
test/files/run/interpolationMultiline1.flags | 1 +
test/files/run/interpolationMultiline1.scala | 26 +
test/files/run/interpolationMultiline2.check | 26 +
.../interpolationMultiline2.flags} | 0
test/files/run/interpolationMultiline2.scala | 21 +
test/files/run/iq-msil.check | 12 -
test/files/run/iq.scala | 4 +-
test/files/run/is-valid-num.scala | 312 +
test/files/run/iterators.scala | 10 +-
test/files/run/java-erasure.check | 1 +
test/files/run/java-erasure.scala | 10 +
test/files/run/json.scala | 8 +-
test/files/run/kmpSliceSearch.check | 4 +
test/files/run/kmpSliceSearch.scala | 60 +
test/files/run/large_code.check | 3 +
test/files/run/large_code.scala | 24 +
test/files/run/lazy-leaks.scala | 2 +-
test/files/run/lazy-locals.check | 41 +-
test/files/run/lazy-locals.scala | 85 +-
test/files/run/lift-and-unlift.scala | 6 +-
test/files/run/lists-run.scala | 177 +-
test/files/run/lub-visibility.check | 3 +-
test/files/run/macro-abort-fresh.check | 6 +
test/files/run/macro-abort-fresh.flags | 1 +
test/files/run/macro-abort-fresh/Macros_1.scala | 15 +
test/files/run/macro-abort-fresh/Test_2.scala | 8 +
...yncchannel.check => macro-auto-duplicate.check} | 0
test/files/run/macro-auto-duplicate/Macros_1.scala | 17 +
test/files/run/macro-auto-duplicate/Test_2.scala | 3 +
test/files/run/macro-basic-ma-md-mi.check | 1 +
test/files/run/macro-basic-ma-md-mi.flags | 1 +
test/files/run/macro-basic-ma-md-mi/Impls_1.scala | 21 +
test/files/run/macro-basic-ma-md-mi/Macros_2.scala | 10 +
test/files/run/macro-basic-ma-md-mi/Test_3.scala | 4 +
test/files/run/macro-basic-ma-mdmi.check | 1 +
test/files/run/macro-basic-ma-mdmi.flags | 1 +
.../run/macro-basic-ma-mdmi/Impls_Macros_1.scala | 32 +
test/files/run/macro-basic-ma-mdmi/Test_2.scala | 4 +
test/files/run/macro-basic-mamd-mi.check | 1 +
test/files/run/macro-basic-mamd-mi.flags | 1 +
test/files/run/macro-basic-mamd-mi/Impls_1.scala | 19 +
.../run/macro-basic-mamd-mi/Macros_Test_2.scala | 15 +
test/files/run/macro-bodyexpandstoimpl.check | 1 +
test/files/run/macro-bodyexpandstoimpl.flags | 1 +
.../run/macro-bodyexpandstoimpl/Impls_1.scala | 12 +
.../macro-bodyexpandstoimpl/Macros_Test_2.scala | 10 +
test/files/run/macro-declared-in-annotation.check | 1 +
test/files/run/macro-declared-in-annotation.flags | 1 +
.../run/macro-declared-in-annotation/Impls_1.scala | 11 +
.../macro-declared-in-annotation/Macros_2.scala | 8 +
.../run/macro-declared-in-annotation/Test_3.scala | 3 +
test/files/run/macro-declared-in-anonymous.check | 2 +
test/files/run/macro-declared-in-anonymous.flags | 1 +
.../run/macro-declared-in-anonymous/Impls_1.scala | 11 +
.../Macros_Test_2.scala | 4 +
test/files/run/macro-declared-in-block.check | 2 +
test/files/run/macro-declared-in-block.flags | 1 +
.../run/macro-declared-in-block/Impls_1.scala | 11 +
.../macro-declared-in-block/Macros_Test_2.scala | 6 +
test/files/run/macro-declared-in-class-class.check | 2 +
test/files/run/macro-declared-in-class-class.flags | 1 +
.../macro-declared-in-class-class/Impls_1.scala | 11 +
.../Macros_Test_2.scala | 10 +
.../files/run/macro-declared-in-class-object.check | 2 +
.../files/run/macro-declared-in-class-object.flags | 1 +
.../macro-declared-in-class-object/Impls_1.scala | 11 +
.../Macros_Test_2.scala | 10 +
test/files/run/macro-declared-in-class.check | 2 +
test/files/run/macro-declared-in-class.flags | 1 +
.../run/macro-declared-in-class/Impls_1.scala | 11 +
.../macro-declared-in-class/Macros_Test_2.scala | 7 +
.../run/macro-declared-in-default-param.check | 5 +
.../run/macro-declared-in-default-param.flags | 1 +
.../macro-declared-in-default-param/Impls_1.scala | 11 +
.../Macros_Test_2.scala | 7 +
.../run/macro-declared-in-implicit-class.check | 2 +
.../run/macro-declared-in-implicit-class.flags | 1 +
.../Impls_Macros_1.scala | 19 +
.../macro-declared-in-implicit-class/Test_2.scala | 4 +
test/files/run/macro-declared-in-method.check | 2 +
test/files/run/macro-declared-in-method.flags | 1 +
.../run/macro-declared-in-method/Impls_1.scala | 11 +
.../macro-declared-in-method/Macros_Test_2.scala | 8 +
.../files/run/macro-declared-in-object-class.check | 2 +
.../files/run/macro-declared-in-object-class.flags | 1 +
.../macro-declared-in-object-class/Impls_1.scala | 11 +
.../Macros_Test_2.scala | 10 +
.../run/macro-declared-in-object-object.check | 2 +
.../run/macro-declared-in-object-object.flags | 1 +
.../macro-declared-in-object-object/Impls_1.scala | 11 +
.../Macros_Test_2.scala | 10 +
test/files/run/macro-declared-in-object.check | 2 +
test/files/run/macro-declared-in-object.flags | 1 +
.../run/macro-declared-in-object/Impls_1.scala | 11 +
.../macro-declared-in-object/Macros_Test_2.scala | 7 +
.../run/macro-declared-in-package-object.check | 2 +
.../run/macro-declared-in-package-object.flags | 1 +
.../macro-declared-in-package-object/Impls_1.scala | 11 +
.../Macros_Test_2.scala | 8 +
test/files/run/macro-declared-in-refinement.check | 2 +
test/files/run/macro-declared-in-refinement.flags | 1 +
.../run/macro-declared-in-refinement/Impls_1.scala | 11 +
.../Macros_Test_2.scala | 6 +
test/files/run/macro-declared-in-trait.check | 15 +
test/files/run/macro-declared-in-trait.flags | 1 +
.../run/macro-declared-in-trait/Impls_1.scala | 11 +
.../macro-declared-in-trait/Macros_Test_2.scala | 13 +
test/files/run/macro-def-infer-return-type-a.check | 1 +
test/files/run/macro-def-infer-return-type-a.flags | 1 +
.../macro-def-infer-return-type-a/Impls_1.scala | 5 +
.../Macros_Test_2.scala | 4 +
test/files/run/macro-def-infer-return-type-b.check | 6 +
test/files/run/macro-def-infer-return-type-b.flags | 1 +
.../Impls_Macros_1.scala | 10 +
.../run/macro-def-infer-return-type-b/Test_2.scala | 8 +
test/files/run/macro-def-infer-return-type-c.check | 1 +
test/files/run/macro-def-infer-return-type-c.flags | 1 +
.../macro-def-infer-return-type-c/Impls_1.scala | 5 +
.../Macros_Test_2.scala | 4 +
test/files/run/macro-def-path-dependent-a.check | 1 +
test/files/run/macro-def-path-dependent-a.flags | 1 +
.../Impls_Macros_1.scala | 21 +
.../run/macro-def-path-dependent-a/Test_2.scala | 3 +
test/files/run/macro-def-path-dependent-b.check | 1 +
test/files/run/macro-def-path-dependent-b.flags | 1 +
.../Impls_Macros_1.scala | 20 +
.../run/macro-def-path-dependent-b/Test_2.scala | 3 +
test/files/run/macro-def-path-dependent-c.check | 1 +
test/files/run/macro-def-path-dependent-c.flags | 1 +
.../Impls_Macros_1.scala | 20 +
.../run/macro-def-path-dependent-c/Test_2.scala | 3 +
test/files/run/macro-def-path-dependent-d1.check | 1 +
test/files/run/macro-def-path-dependent-d1.flags | 1 +
.../Impls_Macros_1.scala | 9 +
.../run/macro-def-path-dependent-d1/Test_2.scala | 3 +
test/files/run/macro-def-path-dependent-d2.check | 1 +
test/files/run/macro-def-path-dependent-d2.flags | 1 +
.../run/macro-def-path-dependent-d2/Impls_1.scala | 7 +
.../run/macro-def-path-dependent-d2/Macros_2.scala | 7 +
.../run/macro-def-path-dependent-d2/Test_3.scala | 3 +
test/files/run/macro-divergence-spurious.check | 1 +
.../macro-divergence-spurious/Impls_Macros_1.scala | 23 +
.../run/macro-divergence-spurious/Test_2.scala | 3 +
.../bug680.check => run/macro-duplicate.check} | 0
test/files/run/macro-duplicate.flags | 1 +
.../files/run/macro-duplicate/Impls_Macros_1.scala | 29 +
test/files/run/macro-duplicate/Test_2.scala | 6 +
....check => macro-expand-implicit-argument.check} | 0
.../files/run/macro-expand-implicit-argument.flags | 1 +
.../macro-expand-implicit-argument/Macros_1.scala | 59 +
.../macro-expand-implicit-argument/Test_2.scala | 4 +
...macro-expand-implicit-macro-has-implicit.check} | 0
.../macro-expand-implicit-macro-has-implicit.flags | 1 +
.../Impls_1.scala | 9 +
.../Macros_Test_2.scala | 5 +
.../macro-expand-implicit-macro-is-implicit.check | 2 +
.../macro-expand-implicit-macro-is-implicit.flags | 1 +
.../Impls_1.scala | 9 +
.../Macros_Test_2.scala | 10 +
.../run/macro-expand-implicit-macro-is-val.check | 1 +
.../run/macro-expand-implicit-macro-is-val.flags | 1 +
.../Impls_1.scala | 9 +
.../Macros_Test_2.scala | 5 +
.../run/macro-expand-implicit-macro-is-view.check | 1 +
.../run/macro-expand-implicit-macro-is-view.flags | 1 +
.../Impls_1.scala | 9 +
.../Macros_Test_2.scala | 9 +
.../files/run/macro-expand-multiple-arglists.check | 1 +
.../files/run/macro-expand-multiple-arglists.flags | 1 +
.../macro-expand-multiple-arglists/Impls_1.scala | 10 +
.../Macros_Test_2.scala | 4 +
test/files/run/macro-expand-nullary-generic.check | 6 +
test/files/run/macro-expand-nullary-generic.flags | 1 +
.../run/macro-expand-nullary-generic/Impls_1.scala | 15 +
.../Macros_Test_2.scala | 15 +
.../run/macro-expand-nullary-nongeneric.check | 6 +
.../run/macro-expand-nullary-nongeneric.flags | 1 +
.../macro-expand-nullary-nongeneric/Impls_1.scala | 14 +
.../Macros_Test_2.scala | 15 +
test/files/run/macro-expand-overload.check | 6 +
test/files/run/macro-expand-overload.flags | 1 +
test/files/run/macro-expand-overload/Impls_1.scala | 15 +
.../run/macro-expand-overload/Macros_Test_2.scala | 20 +
test/files/run/macro-expand-override.check | 15 +
test/files/run/macro-expand-override.flags | 1 +
test/files/run/macro-expand-override/Impls_1.scala | 15 +
.../run/macro-expand-override/Macros_Test_2.scala | 43 +
test/files/run/macro-expand-recursive.check | 1 +
test/files/run/macro-expand-recursive.flags | 1 +
.../files/run/macro-expand-recursive/Impls_1.scala | 15 +
.../run/macro-expand-recursive/Macros_Test_2.scala | 8 +
.../macro-expand-tparams-bounds-a.check} | 0
test/files/run/macro-expand-tparams-bounds-a.flags | 1 +
.../macro-expand-tparams-bounds-a/Impls_1.scala | 5 +
.../Macros_Test_2.scala | 8 +
.../macro-expand-tparams-bounds-b.check} | 0
test/files/run/macro-expand-tparams-bounds-b.flags | 1 +
.../macro-expand-tparams-bounds-b/Impls_1.scala | 7 +
.../Macros_Test_2.scala | 10 +
test/files/run/macro-expand-tparams-explicit.check | 1 +
test/files/run/macro-expand-tparams-explicit.flags | 1 +
.../macro-expand-tparams-explicit/Impls_1.scala | 11 +
.../Macros_Test_2.scala | 4 +
test/files/run/macro-expand-tparams-implicit.check | 2 +
test/files/run/macro-expand-tparams-implicit.flags | 1 +
.../macro-expand-tparams-implicit/Impls_1.scala | 11 +
.../Macros_Test_2.scala | 5 +
.../run/macro-expand-tparams-only-in-impl.flags | 1 +
.../Impls_1.scala | 5 +
.../Macros_Test_2.scala | 8 +
test/files/run/macro-expand-tparams-optional.check | 1 +
test/files/run/macro-expand-tparams-optional.flags | 1 +
.../macro-expand-tparams-optional/Impls_1.scala | 9 +
.../Macros_Test_2.scala | 4 +
test/files/run/macro-expand-tparams-prefix-a.check | 4 +
test/files/run/macro-expand-tparams-prefix-a.flags | 1 +
.../macro-expand-tparams-prefix-a/Impls_1.scala | 11 +
.../Macros_Test_2.scala | 10 +
test/files/run/macro-expand-tparams-prefix-b.check | 2 +
test/files/run/macro-expand-tparams-prefix-b.flags | 1 +
.../macro-expand-tparams-prefix-b/Impls_1.scala | 12 +
.../Macros_Test_2.scala | 10 +
.../files/run/macro-expand-tparams-prefix-c1.check | 3 +
.../files/run/macro-expand-tparams-prefix-c1.flags | 1 +
.../macro-expand-tparams-prefix-c1/Impls_1.scala | 13 +
.../Macros_Test_2.scala | 11 +
.../files/run/macro-expand-tparams-prefix-c2.check | 3 +
.../files/run/macro-expand-tparams-prefix-c2.flags | 1 +
.../Impls_Macros_1.scala | 19 +
.../macro-expand-tparams-prefix-c2/Test_2.scala | 5 +
.../files/run/macro-expand-tparams-prefix-d1.check | 3 +
.../files/run/macro-expand-tparams-prefix-d1.flags | 1 +
.../macro-expand-tparams-prefix-d1/Impls_1.scala | 13 +
.../Macros_Test_2.scala | 11 +
...pand-varargs-explicit-over-nonvarargs-bad.check | 4 +
...pand-varargs-explicit-over-nonvarargs-bad.flags | 1 +
.../Impls_1.scala | 9 +
.../Macros_Test_2.scala | 12 +
...and-varargs-explicit-over-nonvarargs-good.check | 1 +
...and-varargs-explicit-over-nonvarargs-good.flags | 1 +
.../Impls_1.scala | 13 +
.../Macros_Test_2.scala | 8 +
...acro-expand-varargs-explicit-over-varargs.check | 1 +
...acro-expand-varargs-explicit-over-varargs.flags | 1 +
.../Impls_1.scala | 13 +
.../Macros_Test_2.scala | 8 +
...o-expand-varargs-implicit-over-nonvarargs.check | 1 +
...o-expand-varargs-implicit-over-nonvarargs.flags | 1 +
.../Impls_1.scala | 9 +
.../Macros_Test_2.scala | 7 +
...acro-expand-varargs-implicit-over-varargs.check | 1 +
...acro-expand-varargs-implicit-over-varargs.flags | 1 +
.../Impls_1.scala | 13 +
.../Macros_Test_2.scala | 7 +
test/files/run/macro-impl-default-params.check | 5 +
test/files/run/macro-impl-default-params.flags | 1 +
.../macro-impl-default-params/Impls_Macros_1.scala | 21 +
.../run/macro-impl-default-params/Test_2.scala | 4 +
test/files/run/macro-impl-rename-context.check | 2 +
test/files/run/macro-impl-rename-context.flags | 1 +
.../macro-impl-rename-context/Impls_Macros_1.scala | 15 +
.../run/macro-impl-rename-context/Test_2.scala | 4 +
...-invalidret-doesnt-conform-to-def-rettype.check | 5 +
...-invalidret-doesnt-conform-to-def-rettype.flags | 1 +
.../Impls_Macros_1.scala | 12 +
.../Test_2.scala | 8 +
test/files/run/macro-invalidret-nontypeable.check | 3 +
test/files/run/macro-invalidret-nontypeable.flags | 1 +
.../Impls_Macros_1.scala | 13 +
.../run/macro-invalidret-nontypeable/Test_2.scala | 8 +
test/files/run/macro-invalidusage-badret.check | 5 +
test/files/run/macro-invalidusage-badret.flags | 1 +
.../macro-invalidusage-badret/Impls_Macros_1.scala | 9 +
.../run/macro-invalidusage-badret/Test_2.scala | 8 +
...alidusage-partialapplication-with-tparams.check | 3 +
...alidusage-partialapplication-with-tparams.flags | 1 +
.../Impls_Macros_1.scala | 13 +
.../Test_2.scala | 8 +
.../macro-invalidusage-partialapplication.check | 3 +
.../macro-invalidusage-partialapplication.flags | 1 +
.../Impls_Macros_1.scala | 14 +
.../Test_2.scala | 8 +
test/files/run/macro-openmacros.check | 3 +
test/files/run/macro-openmacros.flags | 1 +
.../run/macro-openmacros/Impls_Macros_1.scala | 25 +
test/files/run/macro-openmacros/Test_2.scala | 3 +
test/files/run/macro-quasiinvalidbody-c.check | 1 +
test/files/run/macro-quasiinvalidbody-c.flags | 1 +
.../macro-quasiinvalidbody-c/Impls_Macros_1.scala | 9 +
.../run/macro-quasiinvalidbody-c/Test_2.scala | 4 +
test/files/run/macro-range.check | 9 +
test/files/run/macro-range.flags | 1 +
test/files/run/macro-range/Common_1.scala | 47 +
.../run/macro-range/Expansion_Impossible_2.scala | 53 +
.../run/macro-range/Expansion_Possible_3.scala | 7 +
.../run/macro-reflective-ma-normal-mdmi.check | 1 +
.../run/macro-reflective-ma-normal-mdmi.flags | 1 +
.../Impls_Macros_1.scala | 13 +
.../macro-reflective-ma-normal-mdmi/Test_2.scala | 7 +
.../run/macro-reflective-mamd-normal-mi.check | 1 +
.../macro-reflective-mamd-normal-mi/Impls_1.scala | 9 +
.../Macros_Test_2.scala | 20 +
.../run/macro-reify-abstypetag-notypeparams.check | 2 +
.../macro-reify-abstypetag-notypeparams/Test.scala | 6 +
.../macro-reify-abstypetag-typeparams-notags.check | 2 +
.../Test.scala | 9 +
.../macro-reify-abstypetag-typeparams-tags.check | 2 +
.../Test.scala | 9 +
.../run/macro-reify-abstypetag-usetypetag.check | 2 +
.../macro-reify-abstypetag-usetypetag/Test.scala | 9 +
test/files/run/macro-reify-basic.check | 1 +
test/files/run/macro-reify-basic.flags | 1 +
test/files/run/macro-reify-basic/Macros_1.scala | 11 +
test/files/run/macro-reify-basic/Test_2.scala | 3 +
test/files/run/macro-reify-freevars.check | 3 +
test/files/run/macro-reify-freevars.flags | 1 +
test/files/run/macro-reify-freevars/Macros_1.scala | 20 +
test/files/run/macro-reify-freevars/Test_2.scala | 11 +
.../macro-reify-nested-a.check} | 0
test/files/run/macro-reify-nested-a.flags | 1 +
.../run/macro-reify-nested-a/Impls_Macros_1.scala | 46 +
test/files/run/macro-reify-nested-a/Test_2.scala | 4 +
.../macro-reify-nested-b.check} | 0
test/files/run/macro-reify-nested-b.flags | 1 +
.../run/macro-reify-nested-b/Impls_Macros_1.scala | 46 +
test/files/run/macro-reify-nested-b/Test_2.scala | 4 +
....check => macro-reify-ref-to-packageless.check} | 0
.../files/run/macro-reify-ref-to-packageless.flags | 1 +
.../macro-reify-ref-to-packageless/Impls_1.scala | 6 +
.../macro-reify-ref-to-packageless/Test_2.scala | 4 +
...heck => macro-reify-splice-outside-reify.check} | 0
.../run/macro-reify-splice-outside-reify.flags | 1 +
.../Impls_Macros_1.scala | 13 +
.../macro-reify-splice-outside-reify/Test_2.scala | 8 +
test/files/run/macro-reify-splice-splice.check | 1 +
test/files/run/macro-reify-splice-splice.flags | 1 +
.../run/macro-reify-splice-splice/Macros_1.scala | 11 +
.../run/macro-reify-splice-splice/Test_2.scala | 3 +
test/files/run/macro-reify-staticXXX.check | 12 +
test/files/run/macro-reify-staticXXX.flags | 1 +
.../files/run/macro-reify-staticXXX/Macros_1.scala | 48 +
test/files/run/macro-reify-staticXXX/Test_2.scala | 4 +
test/files/run/macro-reify-tagful-a.check | 1 +
test/files/run/macro-reify-tagful-a.flags | 1 +
test/files/run/macro-reify-tagful-a/Macros_1.scala | 12 +
test/files/run/macro-reify-tagful-a/Test_2.scala | 4 +
test/files/run/macro-reify-tagless-a.check | 3 +
test/files/run/macro-reify-tagless-a.flags | 1 +
.../run/macro-reify-tagless-a/Impls_Macros_1.scala | 11 +
test/files/run/macro-reify-tagless-a/Test_2.scala | 14 +
test/files/run/macro-reify-type.check | 1 +
test/files/run/macro-reify-type.flags | 1 +
test/files/run/macro-reify-type/Macros_1.scala | 27 +
test/files/run/macro-reify-type/Test_2.scala | 21 +
.../run/macro-reify-typetag-notypeparams.check | 2 +
.../macro-reify-typetag-notypeparams/Test.scala | 6 +
.../run/macro-reify-typetag-typeparams-tags.check | 2 +
.../macro-reify-typetag-typeparams-tags/Test.scala | 9 +
test/files/run/macro-reify-unreify.check | 1 +
test/files/run/macro-reify-unreify.flags | 1 +
test/files/run/macro-reify-unreify/Macros_1.scala | 20 +
test/files/run/macro-reify-unreify/Test_2.scala | 3 +
test/files/run/macro-repl-basic.check | 54 +
test/files/run/macro-repl-basic.scala | 39 +
test/files/run/macro-repl-dontexpand.check | 12 +
test/files/run/macro-repl-dontexpand.scala | 9 +
test/files/run/macro-settings.check | 1 +
test/files/run/macro-settings.flags | 1 +
test/files/run/macro-settings/Impls_Macros_1.scala | 11 +
test/files/run/macro-settings/Test_2.scala | 3 +
test/files/run/macro-sip19-revised.check | 5 +
test/files/run/macro-sip19-revised.flags | 1 +
.../run/macro-sip19-revised/Impls_Macros_1.scala | 34 +
test/files/run/macro-sip19-revised/Test_2.scala | 12 +
test/files/run/macro-sip19.check | 5 +
test/files/run/macro-sip19.flags | 1 +
test/files/run/macro-sip19/Impls_Macros_1.scala | 25 +
test/files/run/macro-sip19/Test_2.scala | 16 +
test/files/run/macro-system-properties.check | 26 +
test/files/run/macro-system-properties.scala | 16 +
.../run/macro-typecheck-implicitsdisabled.check | 2 +
.../run/macro-typecheck-implicitsdisabled.flags | 1 +
.../Impls_Macros_1.scala | 28 +
.../macro-typecheck-implicitsdisabled/Test_2.scala | 4 +
.../files/run/macro-typecheck-macrosdisabled.check | 32 +
.../files/run/macro-typecheck-macrosdisabled.flags | 1 +
.../Impls_Macros_1.scala | 30 +
.../macro-typecheck-macrosdisabled/Test_2.scala | 4 +
.../run/macro-typecheck-macrosdisabled2.check | 32 +
.../run/macro-typecheck-macrosdisabled2.flags | 1 +
.../Impls_Macros_1.scala | 30 +
.../macro-typecheck-macrosdisabled2/Test_2.scala | 4 +
test/files/run/macro-undetparams-consfromsls.check | 5 +
test/files/run/macro-undetparams-consfromsls.flags | 1 +
.../Impls_Macros_1.scala | 18 +
.../run/macro-undetparams-consfromsls/Test_2.scala | 7 +
test/files/run/macro-undetparams-implicitval.check | 1 +
test/files/run/macro-undetparams-implicitval.flags | 1 +
.../run/macro-undetparams-implicitval/Test.scala | 6 +
test/files/run/macro-undetparams-macroitself.check | 2 +
test/files/run/macro-undetparams-macroitself.flags | 1 +
.../Impls_Macros_1.scala | 8 +
.../run/macro-undetparams-macroitself/Test_2.scala | 4 +
test/files/run/manifests-new.scala | 149 +
test/files/run/manifests-old.scala | 147 +
.../manifests-undeprecated-in-2.10.0.flags} | 0
.../run/manifests-undeprecated-in-2.10.0.scala | 15 +
test/files/run/manifests.scala | 147 -
test/files/run/map_java_conversions.scala | 2 +-
test/files/run/matchonseq.check | 2 +
test/files/run/matchonseq.scala | 8 +
test/files/run/misc-msil.check | 33 -
test/files/run/mock.check | 3 -
test/files/run/mock.scala | 29 -
test/files/run/multi-array.scala | 4 +-
test/files/run/names-defaults.check | 2 +-
test/files/run/names-defaults.scala | 9 +-
test/files/run/newTags.check | 3 +
test/files/run/newTags.scala | 11 +
test/files/run/nonlocalreturn.check | 1 +
test/files/run/nonlocalreturn.scala | 15 +
test/files/run/nullable-lazyvals.check | 3 +
test/files/run/nullable-lazyvals.scala | 36 +
test/files/run/numbereq.scala | 35 +-
test/files/run/optimizer-array-load.check | 6 +
.../optimizer-array-load.flags} | 0
test/files/run/optimizer-array-load.scala | 16 +
test/files/run/option-fold.check | 5 +
test/files/run/option-fold.scala | 19 +
test/files/run/origins.check | 2 +-
test/files/run/origins.scala | 4 +-
test/files/run/outertest.scala | 57 +
test/files/run/packrat1.scala | 2 +-
test/files/run/packrat2.scala | 2 +-
test/files/run/packrat3.check | 2 +-
test/files/run/packrat3.scala | 2 +-
test/files/run/parmap-ops.scala | 48 +
test/files/run/parserFilter.check | 9 +
test/files/run/parserFilter.scala | 15 +
test/files/run/parserForFilter.check | 1 +
test/files/run/parserForFilter.scala | 12 +
test/files/run/parserJavaIdent.check | 26 +
test/files/run/parserJavaIdent.scala | 26 +
test/files/run/parserNoSuccessMessage.check | 20 +
test/files/run/parserNoSuccessMessage.scala | 19 +
test/files/run/partialfun.check | 6 +
test/files/run/partialfun.scala | 86 +
test/files/run/patmat-finally.scala | 25 +
test/files/run/patmat_unapp_abstype-new.check | 4 +
test/files/run/patmat_unapp_abstype-new.scala | 76 +
test/files/run/patmat_unapp_abstype-old.check | 4 +
test/files/run/patmat_unapp_abstype-old.flags | 1 +
test/files/run/patmat_unapp_abstype-old.scala | 83 +
test/files/run/patmatnew.scala | 922 +-
test/files/run/phantomValueClass.check | 1 +
test/files/run/phantomValueClass.scala | 10 +
test/files/run/predef-cycle.scala | 71 +
test/files/run/primitive-sigs-2-new.check | 7 +
test/files/run/primitive-sigs-2-new.scala | 32 +
test/files/run/primitive-sigs-2-old.check | 7 +
test/files/run/primitive-sigs-2-old.scala | 39 +
test/files/run/primitive-sigs-2.check | 3 -
test/files/run/primitive-sigs-2.scala | 20 -
test/files/run/private-inline.check | 1 +
.../bug3252.flags => run/private-inline.flags} | 0
test/files/run/private-inline.scala | 52 +
test/files/run/programmatic-main.check | 57 +-
test/files/run/promotion-msil.check | 4 -
test/files/run/proxy.check | 2 +
test/files/run/proxy.scala | 8 +
test/files/run/pure-args-byname-noinline.check | 12 +
test/files/run/pure-args-byname-noinline.scala | 33 +
test/files/run/randomAccessSeq-apply.scala | 15 -
test/files/run/range-unit.check | 4178 ++++++
test/files/run/range-unit.scala | 55 +
test/files/run/rawstrings.check | 1 +
test/files/run/rawstrings.scala | 3 +
.../files/run/reflection-allmirrors-tostring.check | 14 +
.../files/run/reflection-allmirrors-tostring.scala | 43 +
...eflection-constructormirror-inner-badpath.check | 2 +
...eflection-constructormirror-inner-badpath.scala | 32 +
.../reflection-constructormirror-inner-good.check | 1 +
.../reflection-constructormirror-inner-good.scala | 26 +
...flection-constructormirror-nested-badpath.check | 2 +
...flection-constructormirror-nested-badpath.scala | 32 +
.../reflection-constructormirror-nested-good.check | 1 +
.../reflection-constructormirror-nested-good.scala | 26 +
...ection-constructormirror-toplevel-badpath.check | 2 +
...ection-constructormirror-toplevel-badpath.scala | 33 +
...eflection-constructormirror-toplevel-good.check | 1 +
...eflection-constructormirror-toplevel-good.scala | 27 +
test/files/run/reflection-enclosed-basic.check | 18 +
test/files/run/reflection-enclosed-basic.scala | 46 +
.../run/reflection-enclosed-inner-basic.check | 20 +
.../run/reflection-enclosed-inner-basic.scala | 52 +
.../reflection-enclosed-inner-inner-basic.check | 20 +
.../reflection-enclosed-inner-inner-basic.scala | 58 +
.../reflection-enclosed-inner-nested-basic.check | 20 +
.../reflection-enclosed-inner-nested-basic.scala | 55 +
.../run/reflection-enclosed-nested-basic.check | 20 +
.../run/reflection-enclosed-nested-basic.scala | 52 +
.../reflection-enclosed-nested-inner-basic.check | 20 +
.../reflection-enclosed-nested-inner-basic.scala | 54 +
.../reflection-enclosed-nested-nested-basic.check | 20 +
.../reflection-enclosed-nested-nested-basic.scala | 54 +
test/files/run/reflection-equality.check | 53 +
test/files/run/reflection-equality.scala | 22 +
.../reflection-fieldmirror-accessorsareokay.check | 6 +
.../reflection-fieldmirror-accessorsareokay.scala | 29 +
.../run/reflection-fieldmirror-ctorparam.check | 3 +
.../run/reflection-fieldmirror-ctorparam.scala | 21 +
.../run/reflection-fieldmirror-getsetval.check | 2 +
.../run/reflection-fieldmirror-getsetval.scala | 18 +
.../run/reflection-fieldmirror-getsetvar.check | 2 +
.../run/reflection-fieldmirror-getsetvar.scala | 18 +
...flection-fieldmirror-nmelocalsuffixstring.check | 1 +
...flection-fieldmirror-nmelocalsuffixstring.scala | 16 +
.../run/reflection-fieldmirror-privatethis.check | 3 +
.../run/reflection-fieldmirror-privatethis.scala | 19 +
.../run/reflection-fieldsymbol-navigation.check | 6 +
.../run/reflection-fieldsymbol-navigation.scala | 15 +
test/files/run/reflection-implClass.scala | 40 +
test/files/run/reflection-implicit.check | 2 +
test/files/run/reflection-implicit.scala | 15 +
test/files/run/reflection-java-annotations.check | 1 +
.../JavaAnnottee_1.java | 47 +
.../JavaComplexAnnotation_1.java | 34 +
.../JavaSimpleAnnotation_1.java | 21 +
.../JavaSimpleEnumeration_1.java | 4 +
.../run/reflection-java-annotations/Test_2.scala | 7 +
test/files/run/reflection-java-crtp.check | 1 +
.../JavaSimpleEnumeration_1.java | 4 +
test/files/run/reflection-java-crtp/Main_2.scala | 8 +
.../files/run/reflection-magicsymbols-invoke.check | 124 +
.../files/run/reflection-magicsymbols-invoke.scala | 100 +
test/files/run/reflection-magicsymbols-repl.check | 39 +
test/files/run/reflection-magicsymbols-repl.scala | 23 +
.../run/reflection-magicsymbols-vanilla.check | 8 +
.../run/reflection-magicsymbols-vanilla.scala | 20 +
test/files/run/reflection-mem-glbs.scala | 13 +
test/files/run/reflection-mem-tags.scala | 17 +
test/files/run/reflection-mem-typecheck.scala | 26 +
.../files/run/reflection-methodsymbol-params.check | 8 +
.../files/run/reflection-methodsymbol-params.scala | 24 +
.../run/reflection-methodsymbol-returntype.check | 8 +
.../run/reflection-methodsymbol-returntype.scala | 24 +
.../run/reflection-methodsymbol-typeparams.check | 8 +
.../run/reflection-methodsymbol-typeparams.scala | 24 +
.../reflection-modulemirror-inner-badpath.check | 2 +
.../reflection-modulemirror-inner-badpath.scala | 24 +
.../run/reflection-modulemirror-inner-good.check | 1 +
.../run/reflection-modulemirror-inner-good.scala | 23 +
.../reflection-modulemirror-nested-badpath.check | 2 +
.../reflection-modulemirror-nested-badpath.scala | 26 +
.../run/reflection-modulemirror-nested-good.check | 1 +
.../run/reflection-modulemirror-nested-good.scala | 24 +
.../reflection-modulemirror-toplevel-badpath.check | 2 +
.../reflection-modulemirror-toplevel-badpath.scala | 26 +
.../reflection-modulemirror-toplevel-good.check | 1 +
.../reflection-modulemirror-toplevel-good.scala | 20 +
test/files/run/reflection-names.check | 4 +
test/files/run/reflection-names.scala | 15 +
test/files/run/reflection-repl-classes.check | 35 +
test/files/run/reflection-repl-classes.scala | 22 +
test/files/run/reflection-repl-elementary.check | 9 +
test/files/run/reflection-repl-elementary.scala | 7 +
test/files/run/reflection-sanitychecks.check | 30 +
test/files/run/reflection-sanitychecks.scala | 49 +
test/files/run/reflection-sorted-decls.check | 3 +
test/files/run/reflection-sorted-decls.scala | 8 +
test/files/run/reflection-sorted-members.check | 4 +
test/files/run/reflection-sorted-members.scala | 11 +
.../run/reflection-valueclasses-derived.check | 3 +
.../run/reflection-valueclasses-derived.scala | 12 +
test/files/run/reflection-valueclasses-magic.check | 1456 ++
test/files/run/reflection-valueclasses-magic.scala | 116 +
.../run/reflection-valueclasses-standard.check | 27 +
.../run/reflection-valueclasses-standard.scala | 21 +
test/files/run/reflinit.check | 1 +
test/files/run/reflinit.scala | 6 +
test/files/run/reify-aliases.check | 1 +
test/files/run/reify-aliases.scala | 5 +
test/files/run/reify-repl-fail-gracefully.check | 21 +
test/files/run/reify-repl-fail-gracefully.scala | 10 +
test/files/run/reify-staticXXX.check | 24 +
test/files/run/reify-staticXXX.scala | 56 +
test/files/run/reify_ann1a.check | 30 +
test/files/run/reify_ann1a.scala | 28 +
test/files/run/reify_ann1b.check | 30 +
test/files/run/reify_ann1b.scala | 28 +
test/files/run/reify_ann2a.check | 44 +
test/files/run/reify_ann2a.scala | 28 +
test/files/run/reify_ann3.check | 21 +
test/files/run/reify_ann3.scala | 22 +
test/files/run/reify_ann4.check | 32 +
test/files/run/reify_ann4.scala | 26 +
test/files/run/reify_ann5.check | 22 +
test/files/run/reify_ann5.scala | 23 +
test/files/run/reify_anonymous.check | 1 +
test/files/run/reify_anonymous.scala | 8 +
test/files/run/reify_classfileann_a.check | 18 +
test/files/run/reify_classfileann_a.scala | 22 +
test/files/run/reify_classfileann_b.check | 20 +
test/files/run/reify_classfileann_b.scala | 26 +
test/files/run/reify_closure1.check | 2 +
test/files/run/reify_closure1.scala | 19 +
test/files/run/reify_closure2a.check | 2 +
test/files/run/reify_closure2a.scala | 19 +
test/files/run/reify_closure3a.check | 2 +
test/files/run/reify_closure3a.scala | 21 +
test/files/run/reify_closure4a.check | 2 +
test/files/run/reify_closure4a.scala | 21 +
test/files/run/reify_closure5a.check | 2 +
test/files/run/reify_closure5a.scala | 21 +
test/files/run/reify_closure6.check | 7 +
test/files/run/reify_closure6.scala | 29 +
test/files/run/reify_closure7.check | 6 +
test/files/run/reify_closure7.scala | 33 +
test/files/run/reify_closure8a.check | 1 +
test/files/run/reify_closure8a.scala | 15 +
test/files/run/reify_closure8b.check | 3 +
test/files/run/reify_closure8b.scala | 21 +
.../run/{t4560.check => reify_closures10.check} | 0
test/files/run/reify_closures10.scala | 13 +
test/files/run/reify_complex.check | 1 +
test/files/run/reify_complex.scala | 25 +
test/files/run/reify_copypaste1.check | 2 +
test/files/run/reify_copypaste1.scala | 19 +
test/files/run/reify_copypaste2.check | 1 +
test/files/run/reify_copypaste2.scala | 10 +
test/files/run/reify_csv.check | 10 +
test/files/run/reify_csv.scala | 36 +
test/files/run/reify_extendbuiltins.check | 1 +
test/files/run/reify_extendbuiltins.scala | 15 +
.../{jvm/bug680.check => run/reify_for1.check} | 0
test/files/run/reify_for1.scala | 10 +
test/files/run/reify_fors_newpatmat.check | 5 +
test/files/run/reify_fors_newpatmat.scala | 101 +
test/files/run/reify_fors_oldpatmat.check | 5 +
.../reify_fors_oldpatmat.flags} | 0
test/files/run/reify_fors_oldpatmat.scala | 101 +
test/files/run/reify_generic.check | 1 +
test/files/run/reify_generic.scala | 9 +
test/files/run/reify_generic2.check | 1 +
test/files/run/reify_generic2.scala | 10 +
test/files/run/reify_getter.check | 1 +
test/files/run/reify_getter.scala | 18 +
test/files/run/reify_inheritance.check | 1 +
test/files/run/reify_inheritance.scala | 17 +
test/files/run/reify_inner1.check | 1 +
test/files/run/reify_inner1.scala | 16 +
test/files/run/reify_inner2.check | 1 +
test/files/run/reify_inner2.scala | 16 +
test/files/run/reify_inner3.check | 1 +
test/files/run/reify_inner3.scala | 16 +
test/files/run/reify_inner4.check | 1 +
test/files/run/reify_inner4.scala | 16 +
test/files/run/reify_lazyevaluation.check | 8 +
test/files/run/reify_lazyevaluation.scala | 59 +
test/files/run/reify_lazyunit.check | 3 +
test/files/run/reify_lazyunit.scala | 13 +
test/files/run/reify_magicsymbols.check | 13 +
test/files/run/reify_magicsymbols.scala | 17 +
test/files/run/reify_maps_newpatmat.check | 4 +
test/files/run/reify_maps_newpatmat.scala | 20 +
test/files/run/reify_maps_oldpatmat.check | 4 +
.../reify_maps_oldpatmat.flags} | 0
test/files/run/reify_maps_oldpatmat.scala | 20 +
.../reify_metalevel_breach_+0_refers_to_1.check | 1 +
.../reify_metalevel_breach_+0_refers_to_1.scala | 18 +
.../reify_metalevel_breach_-1_refers_to_0_a.check | 1 +
.../reify_metalevel_breach_-1_refers_to_0_a.scala | 16 +
.../reify_metalevel_breach_-1_refers_to_0_b.check | 1 +
.../reify_metalevel_breach_-1_refers_to_0_b.scala | 21 +
.../reify_metalevel_breach_-1_refers_to_1.check | 1 +
.../reify_metalevel_breach_-1_refers_to_1.scala | 18 +
.../run/reify_nested_inner_refers_to_global.check | 1 +
.../run/reify_nested_inner_refers_to_global.scala | 17 +
.../run/reify_nested_inner_refers_to_local.check | 1 +
.../run/reify_nested_inner_refers_to_local.scala | 17 +
.../run/reify_nested_outer_refers_to_global.check | 1 +
.../run/reify_nested_outer_refers_to_global.scala | 19 +
.../run/reify_nested_outer_refers_to_local.check | 1 +
.../run/reify_nested_outer_refers_to_local.scala | 19 +
test/files/run/reify_newimpl_01.check | 1 +
test/files/run/reify_newimpl_01.scala | 13 +
test/files/run/reify_newimpl_02.check | 1 +
test/files/run/reify_newimpl_02.scala | 13 +
test/files/run/reify_newimpl_03.check | 1 +
test/files/run/reify_newimpl_03.scala | 13 +
test/files/run/reify_newimpl_04.check | 1 +
test/files/run/reify_newimpl_04.scala | 13 +
test/files/run/reify_newimpl_05.check | 1 +
test/files/run/reify_newimpl_05.scala | 14 +
test/files/run/reify_newimpl_06.check | 1 +
test/files/run/reify_newimpl_06.scala | 13 +
test/files/run/reify_newimpl_11.check | 2 +
test/files/run/reify_newimpl_11.scala | 19 +
test/files/run/reify_newimpl_12.check | 1 +
test/files/run/reify_newimpl_12.scala | 14 +
test/files/run/reify_newimpl_13.check | 2 +
test/files/run/reify_newimpl_13.scala | 21 +
test/files/run/reify_newimpl_14.check | 1 +
test/files/run/reify_newimpl_14.scala | 16 +
test/files/run/reify_newimpl_15.check | 1 +
test/files/run/reify_newimpl_15.scala | 15 +
test/files/run/reify_newimpl_18.check | 1 +
test/files/run/reify_newimpl_18.scala | 15 +
test/files/run/reify_newimpl_19.check | 2 +
test/files/run/reify_newimpl_19.scala | 20 +
test/files/run/reify_newimpl_20.check | 1 +
test/files/run/reify_newimpl_20.scala | 16 +
test/files/run/reify_newimpl_21.check | 1 +
test/files/run/reify_newimpl_21.scala | 20 +
test/files/run/reify_newimpl_22.check | 29 +
test/files/run/reify_newimpl_22.scala | 17 +
test/files/run/reify_newimpl_23.check | 28 +
test/files/run/reify_newimpl_23.scala | 16 +
test/files/run/reify_newimpl_25.check | 19 +
test/files/run/reify_newimpl_25.scala | 13 +
test/files/run/reify_newimpl_26.check | 21 +
test/files/run/reify_newimpl_26.scala | 13 +
test/files/run/reify_newimpl_27.check | 1 +
test/files/run/reify_newimpl_27.scala | 15 +
test/files/run/reify_newimpl_29.check | 1 +
test/files/run/reify_newimpl_29.scala | 15 +
test/files/run/reify_newimpl_30.check | 2 +
test/files/run/reify_newimpl_30.scala | 18 +
test/files/run/reify_newimpl_31.check | 1 +
test/files/run/reify_newimpl_31.scala | 15 +
test/files/run/reify_newimpl_33.check | 1 +
test/files/run/reify_newimpl_33.scala | 16 +
test/files/run/reify_newimpl_35.check | 17 +
test/files/run/reify_newimpl_35.scala | 10 +
.../{syncchannel.check => reify_newimpl_36.check} | 0
test/files/run/reify_newimpl_36.scala | 15 +
.../{syncchannel.check => reify_newimpl_37.check} | 0
test/files/run/reify_newimpl_37.scala | 16 +
.../{syncchannel.check => reify_newimpl_38.check} | 0
test/files/run/reify_newimpl_38.scala | 15 +
.../{syncchannel.check => reify_newimpl_39.check} | 0
test/files/run/reify_newimpl_39.scala | 16 +
test/files/run/reify_newimpl_40.check | 1 +
test/files/run/reify_newimpl_40.scala | 16 +
test/files/run/reify_newimpl_41.check | 3 +
test/files/run/reify_newimpl_41.scala | 18 +
test/files/run/reify_newimpl_42.check | 3 +
test/files/run/reify_newimpl_42.scala | 17 +
test/files/run/reify_newimpl_43.check | 2 +
test/files/run/reify_newimpl_43.scala | 16 +
test/files/run/reify_newimpl_44.check | 2 +
test/files/run/reify_newimpl_44.scala | 16 +
test/files/run/reify_newimpl_45.check | 2 +
test/files/run/reify_newimpl_45.scala | 16 +
test/files/run/reify_newimpl_47.check | 1 +
test/files/run/reify_newimpl_47.scala | 17 +
test/files/run/reify_newimpl_48.check | 1 +
test/files/run/reify_newimpl_48.scala | 22 +
test/files/run/reify_newimpl_49.check | 3 +
test/files/run/reify_newimpl_49.scala | 16 +
test/files/run/reify_newimpl_50.check | 3 +
test/files/run/reify_newimpl_50.scala | 15 +
test/files/run/reify_newimpl_51.check | 3 +
test/files/run/reify_newimpl_51.scala | 18 +
test/files/run/reify_newimpl_52.check | 3 +
test/files/run/reify_newimpl_52.scala | 18 +
.../{jvm/bug680.check => run/reify_printf.check} | 0
test/files/run/reify_printf.scala | 71 +
test/files/run/reify_properties.check | 2 +
test/files/run/reify_properties.scala | 57 +
test/files/run/reify_renamed_term_basic.check | 1 +
test/files/run/reify_renamed_term_basic.scala | 20 +
.../run/reify_renamed_term_local_to_reifee.check | 1 +
.../run/reify_renamed_term_local_to_reifee.scala | 20 +
.../run/reify_renamed_term_overloaded_method.check | 1 +
.../run/reify_renamed_term_overloaded_method.scala | 17 +
test/files/run/reify_renamed_term_si5841.check | 1 +
test/files/run/reify_renamed_term_si5841.scala | 7 +
test/files/run/reify_renamed_type_basic.check | 1 +
test/files/run/reify_renamed_type_basic.scala | 16 +
.../run/reify_renamed_type_local_to_reifee.check | 1 +
.../run/reify_renamed_type_local_to_reifee.scala | 24 +
test/files/run/reify_renamed_type_spliceable.check | 1 +
test/files/run/reify_renamed_type_spliceable.scala | 21 +
test/files/run/reify_sort.check | 2 +
test/files/run/reify_sort.scala | 51 +
test/files/run/reify_sort1.check | 2 +
test/files/run/reify_sort1.scala | 21 +
test/files/run/reify_this.check | 5 +
test/files/run/reify_this.scala | 20 +
test/files/run/reify_timeofday.check | 1 +
test/files/run/reify_timeofday.scala | 42 +
test/files/run/reify_typerefs_1a.check | 1 +
test/files/run/reify_typerefs_1a.scala | 18 +
test/files/run/reify_typerefs_1b.check | 1 +
test/files/run/reify_typerefs_1b.scala | 18 +
test/files/run/reify_typerefs_2a.check | 1 +
test/files/run/reify_typerefs_2a.scala | 20 +
test/files/run/reify_typerefs_2b.check | 1 +
test/files/run/reify_typerefs_2b.scala | 20 +
test/files/run/reify_typerefs_3a.check | 1 +
test/files/run/reify_typerefs_3a.scala | 20 +
test/files/run/reify_typerefs_3b.check | 1 +
test/files/run/reify_typerefs_3b.scala | 20 +
test/files/run/reify_varargs.check | 1 +
test/files/run/reify_varargs.scala | 11 +
test/files/run/repl-backticks.scala | 2 +-
test/files/run/repl-bare-expr.check | 18 +
test/files/run/repl-colon-type.check | 178 +-
test/files/run/repl-colon-type.scala | 12 +
test/files/run/repl-parens.check | 27 +-
test/files/run/repl-parens.scala | 6 +-
test/files/run/repl-paste-2.check | 8 +-
test/files/run/repl-paste-2.scala | 2 +-
test/files/run/repl-paste-3.check | 14 +
test/files/run/repl-paste-3.scala | 8 +
test/files/run/repl-power.check | 22 +-
test/files/run/repl-power.scala | 5 +-
test/files/run/repl-reset.check | 55 +
test/files/run/repl-reset.scala | 22 +
test/files/run/repl-type-verbose.check | 194 +
test/files/run/repl-type-verbose.scala | 20 +
test/files/run/resetattrs-this.check | 1 +
test/files/run/resetattrs-this.scala | 11 +
test/files/run/richWrapperEquals.scala | 4 -
test/files/run/richs-msil.check | 66 -
test/files/run/runtime-msil.check | 64 -
test/files/run/runtimeEval1.check | 1 +
test/files/run/runtimeEval1.scala | 9 +
test/files/run/runtimeEval2.check | 1 +
test/files/run/runtimeEval2.scala | 11 +
test/files/run/sequenceComparisons.scala | 4 +-
test/files/run/serialize-stream.check | 6 +
test/files/run/serialize-stream.scala | 20 +
test/files/run/showraw_aliases.check | 2 +
test/files/run/showraw_aliases.scala | 17 +
test/files/run/showraw_mods.check | 1 +
test/files/run/showraw_mods.scala | 6 +
test/files/run/showraw_nosymbol.check | 1 +
test/files/run/showraw_nosymbol.scala | 5 +
test/files/run/showraw_tree.check | 2 +
test/files/run/showraw_tree.scala | 8 +
test/files/run/showraw_tree_ids.check | 2 +
test/files/run/showraw_tree_ids.scala | 9 +
test/files/run/showraw_tree_kinds.check | 2 +
test/files/run/showraw_tree_kinds.scala | 8 +
test/files/run/showraw_tree_types_ids.check | 12 +
test/files/run/showraw_tree_types_ids.scala | 11 +
test/files/run/showraw_tree_types_typed.check | 12 +
test/files/run/showraw_tree_types_typed.scala | 10 +
test/files/run/showraw_tree_types_untyped.check | 2 +
test/files/run/showraw_tree_types_untyped.scala | 8 +
test/files/run/showraw_tree_ultimate.check | 12 +
test/files/run/showraw_tree_ultimate.scala | 11 +
test/files/run/shutdownhooks.check | 3 +
test/files/run/shutdownhooks.scala | 37 +
test/files/run/si5045.check | 6 +
test/files/run/si5045.scala | 46 +
test/files/run/sm-interpolator.scala | 41 +
test/files/run/sort.scala | 12 +-
test/files/run/spec-nlreturn.check | 2 +
test/files/run/spec-nlreturn.scala | 16 +
test/files/run/spec-self.check | 2 +
test/files/run/spec-self.scala | 14 +
.../run/stream-stack-overflow-filter-map.scala | 44 +
test/files/run/stream_length.scala | 2 +-
test/files/run/streams.check | 14 +
test/files/run/streams.scala | 20 +
test/files/run/stringinterpolation_macro-run.check | 62 +
test/files/run/stringinterpolation_macro-run.scala | 103 +
test/files/run/synchronized.check | 128 +
test/files/run/synchronized.flags | 1 +
test/files/run/synchronized.scala | 449 +
test/files/run/t0017.scala | 2 +-
test/files/run/{bug0325.check => t0325.check} | 0
test/files/run/{bug0325.scala => t0325.scala} | 0
test/files/run/{t0421.check => t0421-new.check} | 0
test/files/run/t0421-new.scala | 32 +
test/files/run/{t0421.check => t0421-old.check} | 0
test/files/run/t0421-old.scala | 30 +
test/files/run/t0421.scala | 30 -
test/files/run/t0663.check | 2 +-
test/files/run/t0677-new.scala | 10 +
test/files/run/t0677-old.scala | 8 +
test/files/run/t0677.scala | 9 -
test/files/run/{bug1005.check => t1005.check} | 0
test/files/run/{bug1005.scala => t1005.scala} | 0
test/files/run/t102.check | 2 +
test/files/run/t102.scala | 24 +
test/files/run/{bug1042.check => t1042.check} | 0
test/files/run/{bug1042.scala => t1042.scala} | 0
test/files/run/t1044.scala | 4 +
test/files/run/{bug1048.check => t1048.check} | 0
test/files/run/{bug1048.scala => t1048.scala} | 0
test/files/run/{bug1074.check => t1074.check} | 0
test/files/run/{bug1074.scala => t1074.scala} | 0
test/files/run/{bug1079.check => t1079.check} | 0
test/files/run/{bug1079.scala => t1079.scala} | 0
test/files/run/t1100.check | 4 +
test/files/run/t1100.scala | 17 +
test/files/run/{bug1110.scala => t1110.scala} | 0
test/files/run/{bug1141.check => t1141.check} | 0
test/files/run/{bug1141.scala => t1141.scala} | 0
test/files/run/{bug1192.check => t1192.check} | 0
test/files/run/t1192.scala | 7 +
test/files/run/t1195-new.check | 6 +
test/files/run/t1195-new.scala | 28 +
test/files/run/t1195-old.check | 6 +
test/files/run/t1195-old.scala | 26 +
test/files/run/{bug1220.scala => t1220.scala} | 0
test/files/run/t1247.check | 1 +
test/files/run/t1247.scala | 11 +
test/files/run/{bug1300.check => t1300.check} | 0
test/files/run/{bug1300.scala => t1300.scala} | 0
test/files/run/t1309.scala | 7 +
test/files/run/{bug1333.check => t1333.check} | 0
test/files/run/{bug1333.scala => t1333.scala} | 0
test/files/run/{bug1360.check => t1360.check} | 0
test/files/run/{bug1360.scala => t1360.scala} | 0
test/files/run/{bug1373.scala => t1373.scala} | 0
test/files/run/{bug1427.scala => t1427.scala} | 0
test/files/run/t1430.check | 1 +
test/files/run/t1430/Bar_1.java | 8 +
test/files/run/t1430/Test_2.scala | 16 +
test/files/run/{bug1434.scala => t1434.scala} | 0
test/files/run/{bug1466.scala => t1466.scala} | 0
test/files/run/t1500.scala | 6 +-
test/files/run/t1501.scala | 4 +-
test/files/run/t1620.check | 4 +-
test/files/run/t1672.scala | 28 +
.../run/bug1697.scala => files/run/t1697.scala} | 0
test/files/run/{bug1766.scala => t1766.scala} | 0
test/files/run/t1987.check | 16 +
.../{neg/caseinherit.flags => run/t1987.flags} | 0
test/files/run/t1987.scala | 62 +
test/files/run/t1987b.check | 1 +
test/files/run/t1987b/PullIteratees.scala | 17 +
test/files/run/t1987b/a.scala | 6 +
test/files/run/t1987b/cce_test.scala | 15 +
test/files/run/t1987b/pkg1.scala | 4 +
test/files/run/t1987b/pkg2.scala | 3 +
test/files/run/t2005.scala | 10 +
test/files/run/{bug2029.check => t2029.check} | 0
test/files/run/{bug2029.scala => t2029.scala} | 0
test/files/run/{bug2075.scala => t2075.scala} | 0
...ugs2087-and-2400.scala => t2087-and-2400.scala} | 0
test/files/{pos/bug3252.flags => run/t2106.flags} | 0
test/files/run/{bug2106.scala => t2106.scala} | 0
test/files/run/t2124.check | 1 +
test/files/run/{bug2124.scala => t2124.scala} | 0
test/files/run/t2125.check | 1 +
test/files/run/{bug2125.scala => t2125.scala} | 0
test/files/run/t2127.scala | 0
test/files/run/{bug629.check => t216.check} | 0
test/files/run/{bug216.scala => t216.scala} | 0
test/files/run/{bug2162.check => t2162.check} | 0
test/files/run/{bug2162.scala => t2162.scala} | 0
test/files/run/{bug2175.scala => t2175.scala} | 0
test/files/run/t2236-new.scala | 19 +
test/files/run/{t2236.scala => t2236-old.scala} | 0
test/files/run/{bug2241.scala => t2241.scala} | 0
test/files/run/{bug2250.scala => t2250.scala} | 0
test/files/run/{bug2276.check => t2276.check} | 0
test/files/run/{bug2276.scala => t2276.scala} | 0
test/files/run/t2296c.check | 1 +
test/files/run/t2296c/Action.java | 21 +
test/files/run/t2296c/Display.java | 9 +
test/files/run/t2296c/Global.java | 29 +
test/files/run/t2296c/ScalaActivity.scala | 18 +
test/files/run/t2296c/Test.scala | 15 +
test/files/run/t2296c/a.scala | 5 +
test/files/run/{bug2308a.check => t2308a.check} | 0
test/files/run/{bug2308a.scala => t2308a.scala} | 0
test/files/run/t2337.check | 4 +
test/files/run/t2337.scala | 21 +
test/files/run/{bug2354.scala => t2354.scala} | 0
test/files/run/{bug2378.scala => t2378.scala} | 0
test/files/run/t2386-new.check | 2 +
test/files/run/t2386-new.scala | 5 +
test/files/run/t2417.scala | 6 +-
test/files/run/t2418.check | 1 +
test/files/run/t2418.scala | 10 +
test/files/run/t2488.check | 4 +
test/files/run/t2488.scala | 11 +
test/files/run/t2503.scala | 0
test/files/run/{bug2512.scala => t2512.scala} | 0
test/files/run/{bug2514.scala => t2514.scala} | 0
test/files/run/t2544.check | 4 +-
test/files/run/t2544.scala | 22 +-
test/files/run/{bug2552.check => t2552.check} | 0
test/files/run/{bug2552.scala => t2552.scala} | 0
test/files/run/{bug2636.scala => t2636.scala} | 0
test/files/run/{bug266.scala => t266.scala} | 0
test/files/run/{bug2721.check => t2721.check} | 0
test/files/run/{bug2721.scala => t2721.scala} | 0
test/files/run/{bug2755.check => t2755.check} | 0
test/files/run/{bug2755.scala => t2755.scala} | 0
test/files/run/{bug2800.check => t2800.check} | 0
test/files/run/{bug2800.scala => t2800.scala} | 0
test/files/run/t2813.2.scala | 78 +-
test/files/run/t2818.check | 4 +
test/files/run/t2818.scala | 6 +
test/files/run/t2873.check | 1 +
test/files/run/t2873.scala | 5 +
test/files/run/{bug2876.scala => t2876.scala} | 0
test/files/run/t2886.check | 6 +-
test/files/run/t2886.scala | 6 +-
test/files/run/{bug2958.scala => t2958.scala} | 0
test/files/run/{bug298.check => t298.check} | 0
test/files/run/{bug298.scala => t298.scala} | 0
test/files/run/{bug3004.scala => t3004.scala} | 0
test/files/run/t3026.scala | 0
test/files/run/{bug3050.scala => t3050.scala} | 0
test/files/run/{bug3088.scala => t3088.scala} | 0
test/files/run/t3097.check | 1 +
test/files/run/t3097.scala | 18 +
test/files/run/{bug3126.scala => t3126.scala} | 0
test/files/run/{bug3150.scala => t3150.scala} | 0
test/files/run/{bug3175.check => t3175.check} | 0
test/files/run/{bug3175.scala => t3175.scala} | 0
test/files/run/{bug3232.scala => t3232.scala} | 0
test/files/run/{bug3269.check => t3269.check} | 0
test/files/run/{bug3269.scala => t3269.scala} | 0
test/files/run/{bug3273.scala => t3273.scala} | 0
test/files/run/t3326.check | 8 +
test/files/run/t3326.scala | 74 +
test/files/run/{bug3327.check => t3327.check} | 0
test/files/run/{bug3327.scala => t3327.scala} | 0
test/files/run/t3353.check | 1 +
test/files/run/t3353.scala | 10 +
test/files/run/{bug3376.check => t3376.check} | 0
test/files/run/{bug3376.scala => t3376.scala} | 0
test/files/run/{bug3395.check => t3395.check} | 0
test/files/run/{bug3395.scala => t3395.scala} | 0
test/files/run/{bug3397.scala => t3397.scala} | 0
test/files/run/{bug3487.scala => t3487.scala} | 0
test/files/run/t3488.check | 2 +
test/files/run/t3488.scala | 6 +
test/files/run/t3507-new.check | 1 +
test/files/run/t3507-new.scala | 17 +
test/files/run/{bug3509.flags => t3509.flags} | 0
test/files/run/{bug3509.scala => t3509.scala} | 0
test/files/run/{bug3516.check => t3516.check} | 0
test/files/run/{bug3516.scala => t3516.scala} | 0
test/files/run/{bug3518.scala => t3518.scala} | 0
test/files/run/{bug3529.scala => t3529.scala} | 0
test/files/run/{bug3530.check => t3530.check} | 0
test/files/run/{bug3530.scala => t3530.scala} | 0
test/files/run/{bug3540.scala => t3540.scala} | 0
test/files/run/{bug3563.scala => t3563.scala} | 0
test/files/run/t3569.check | 16 +
test/files/run/{bug3509.flags => t3569.flags} | 0
test/files/run/t3569.scala | 32 +
test/files/run/t3575.check | 20 +
test/files/run/t3575.scala | 55 +
test/files/run/t3613.scala | 22 +
test/files/run/{bug3616.check => t3616.check} | 0
test/files/run/{bug3616.scala => t3616.scala} | 0
test/files/run/{bug363.check => t363.check} | 0
test/files/run/{bug363.scala => t363.scala} | 0
test/files/run/{bug3699.scala => t3699.scala} | 0
test/files/run/t3702.check | 2 +
test/files/run/t3702.scala | 11 +
test/files/run/t3705.scala | 17 +
test/files/run/{bug3714.scala => t3714.scala} | 0
test/files/run/t3758-old.scala | 10 +
test/files/run/{bug3760.scala => t3760.scala} | 0
test/files/run/t3761-overload-byname.check | 12 +
test/files/run/t3761-overload-byname.scala | 39 +
test/files/run/t3798.check | 1 +
test/files/run/t3798.scala | 10 +
test/files/run/{bug3822.scala => t3822.scala} | 0
test/files/run/t3835.scala | 9 +-
test/files/run/{bug3855.scala => t3855.scala} | 0
test/files/run/{bug3923.scala => t3923.scala} | 0
test/files/run/{bug3964.check => t3964.check} | 0
test/files/run/{bug3964.scala => t3964.scala} | 0
test/files/run/{bug3984.scala => t3984.scala} | 0
test/files/run/t3994.scala | 20 +
test/files/run/t4024.scala | 11 +
test/files/run/t4025.check | 19 +
test/files/run/t4025.scala | 12 +
test/files/run/t4027.check | 12 +
test/files/run/t4027.scala | 27 +
test/files/run/{bug405.scala => t405.scala} | 0
test/files/run/{bug4062.check => t4062.check} | 0
test/files/run/{bug4062.scala => t4062.scala} | 0
test/files/run/{bug408.scala => t408.scala} | 0
test/files/run/{bug4080.check => t4080.check} | 0
test/files/run/{bug4080.scala => t4080.scala} | 0
test/files/run/t4110-new.check | 2 +
test/files/run/t4110-new.scala | 13 +
test/files/run/{bug4110.check => t4110-old.check} | 0
test/files/run/t4110-old.scala | 11 +
test/files/run/{bug4119 => t4119}/J.java | 0
test/files/run/{bug4119 => t4119}/S.scala | 0
test/files/run/{bug4122.scala => t4122.scala} | 0
test/files/run/t4124.check | 4 +
test/files/run/t4124.scala | 24 +
test/files/run/t4138.check | 2 +
test/files/run/t4138.scala | 6 +
test/files/run/t4147.scala | 36 +
test/files/run/{bug4148.check => t4148.check} | 0
test/files/run/{bug4148.scala => t4148.scala} | 0
test/files/run/t4171.check | 3 +
test/files/run/t4171.scala | 11 +
test/files/run/t4172.check | 12 +
test/files/run/t4172.scala | 7 +
test/files/run/t4190.check | 3 +
test/files/run/t4190.scala | 6 +
test/files/run/{bug4201.scala => t4201.scala} | 0
test/files/run/t4216.check | 37 +
test/files/run/t4216.scala | 19 +
test/files/run/{bug4238 => t4238}/J_1.java | 0
test/files/run/{bug4238 => t4238}/s_2.scala | 0
test/files/run/t4283.check | 5 +
test/{pending => files}/run/t4283/AbstractFoo.java | 0
test/{pending => files}/run/t4283/ScalaBipp.scala | 0
test/files/run/t4283/Test.scala | 16 +
test/files/run/{bug4285.check => t4285.check} | 0
test/files/{pos/bug3252.flags => run/t4285.flags} | 0
test/files/run/{bug4285.scala => t4285.scala} | 0
test/files/run/{bug4288.scala => t4288.scala} | 0
test/files/run/{bug429.check => t429.check} | 0
test/files/run/{bug429.scala => t429.scala} | 0
test/files/run/{bug4297.scala => t4297.scala} | 0
test/files/run/{bug4317.check => t4317.check} | 0
.../{neg/caseinherit.flags => run/t4317.flags} | 0
test/files/run/{bug4317 => t4317}/J_2.java | 0
test/files/run/t4317/S_1.scala | 11 +
test/files/run/{bug4317 => t4317}/S_3.scala | 0
test/files/run/{bug4387.scala => t4387.scala} | 0
test/files/run/t4398.scala | 11 +
test/files/run/t4415.scala | 86 +
test/files/run/t4461.check | 11 +
test/files/run/t4461.scala | 23 +
test/files/run/t4482.check | 1 +
test/files/run/t4482.scala | 15 +
test/files/run/t4535.check | 3 +
test/files/run/t4535.scala | 30 +
test/files/run/t4536.check | 8 +
test/files/run/t4536.flags | 1 +
test/files/run/t4536.scala | 46 +
test/files/run/t4542.check | 19 +
test/files/run/t4542.scala | 11 +
test/files/run/{bug4558.scala => t4558.scala} | 0
test/files/run/t4560.check | 8 +-
test/files/run/t4560.scala | 65 +-
test/files/run/t4560b.check | 2 +
test/files/run/t4560b.scala | 28 +
test/files/run/t4565_2.check | 189 -
test/files/run/t4565_2.scala | 6717 ---------
test/files/run/{bug4570.check => t4570.check} | 0
test/files/run/{bug4570.scala => t4570.scala} | 0
test/files/run/t4574.check | 2 +
test/files/run/t4582.scala | 11 +
test/files/run/t4592.check | 3 +
test/files/run/t4592.scala | 10 +
test/files/run/t4601.check | 1 +
test/files/run/t4601.scala | 15 +
test/files/run/t4608.scala | 3 -
test/files/run/{bug4617.check => t4617.check} | 0
test/files/run/{bug4617.scala => t4617.scala} | 0
test/files/run/{bug4656.check => t4656.check} | 0
test/files/run/{bug4656.scala => t4656.scala} | 0
test/files/run/t4658.check | 80 +
test/files/run/t4658.scala | 33 +
test/files/run/{bug4660.scala => t4660.scala} | 0
test/files/run/t4671.check | 46 +
test/files/run/t4671.scala | 13 +
test/files/run/t4680.check | 60 +
test/files/run/t4680.scala | 71 +
test/files/run/{bug4697.check => t4697.check} | 0
test/files/run/{bug4697.scala => t4697.scala} | 0
test/files/run/t4709.scala | 10 +
test/files/run/t4710.check | 8 +
test/files/run/{bug4710.scala => t4710.scala} | 0
test/files/run/t4723.scala | 9 +
test/files/run/t4729.check | 4 +
test/files/run/t4729/J_1.java | 4 +
test/files/run/t4729/S_2.scala | 29 +
test/files/run/{bug4752.scala => t4752.scala} | 0
test/files/run/t4753.check | 1 +
test/files/run/t4753.scala | 12 +
test/files/run/t4761.check | 4 +
test/files/run/t4761.scala | 11 +
test/files/run/t4766.check | 1 +
test/files/run/t4766.scala | 7 +
test/files/run/t4770.check | 2 +
test/files/run/t4770.scala | 15 +
test/files/run/t4777.check | 2 +
test/files/run/t4777.scala | 8 +
test/files/run/t4794.check | 1 +
test/files/run/t4794.scala | 12 +
test/files/run/t4809.scala | 34 +
test/files/run/t4813.scala | 37 +
test/files/run/t4827.scala | 15 +
test/files/run/t4827b.scala | 18 +
test/files/run/t4835.check | 14 +-
test/files/run/t4835.scala | 76 +-
test/files/run/t4871.check | 2 +
test/files/run/t4871.scala | 12 +
test/files/run/t4891.check | 7 +
test/files/run/t4891/J_2.java | 13 +
test/files/run/t4891/S_1.scala | 26 +
test/files/run/t4891/S_3.scala | 5 +
test/files/run/t4894.scala | 27 +
test/files/run/t4895.scala | 10 +-
test/files/run/t4897.check | 1 +
test/files/run/t4897.scala | 10 +
test/files/run/t4929.check | 1 +
test/files/run/t4929.scala | 42 +
test/files/run/t4930.check | 2 +
test/files/run/t4930.scala | 11 +
test/files/run/t4935.check | 1 +
test/files/run/t4935.flags | 1 +
test/files/run/t4935.scala | 9 +
test/files/run/t4954.scala | 45 +
test/files/run/t5009.check | 5 +
test/files/run/t5009.scala | 14 +
test/files/run/t5018.scala | 37 +
test/files/run/t5037.check | 2 +
test/files/run/t5037.scala | 18 +
test/files/run/t5040.check | 1 +
test/files/run/t5040.flags | 1 +
test/files/run/t5040.scala | 11 +
test/files/run/t5053.check | 6 +
test/files/run/t5053.scala | 20 +
test/files/run/t5064.check | 25 +
test/files/run/t5064.scala | 23 +
test/files/run/t5072.check | 14 +
test/files/run/t5072.scala | 8 +
test/files/run/t5080.check | 1 +
test/files/run/t5080.scala | 24 +
test/files/run/t5105.check | 1 +
test/files/run/t5105.scala | 14 +
test/files/run/t5125.check | 4 +
test/files/run/t5125.scala | 24 +
test/files/run/t5125b.check | 7 +
test/files/run/t5125b.scala | 37 +
test/files/run/t5158.check | 1 +
test/files/run/t5158.scala | 17 +
test/files/run/t5162.scala | 19 +
test/files/run/t5171.check | 1 +
test/files/run/t5171.scala | 7 +
test/files/run/t5201.check | 1 +
test/files/run/t5201.scala | 8 +
test/files/run/t5224.check | 9 +
test/files/run/t5224.scala | 8 +
test/files/run/t5225_1.check | 4 +
test/files/run/t5225_1.scala | 6 +
test/files/run/t5225_2.check | 4 +
test/files/run/t5225_2.scala | 6 +
test/files/{jvm/bug680.check => run/t5229_1.check} | 0
test/files/run/t5229_1.scala | 8 +
test/files/run/t5229_2.check | 2 +
test/files/run/t5229_2.scala | 18 +
test/files/run/t5230.check | 2 +
test/files/run/t5230.scala | 18 +
test/files/run/t5256a.check | 6 +
test/files/run/t5256a.scala | 11 +
test/files/run/t5256b.check | 6 +
test/files/run/t5256b.scala | 10 +
test/files/run/t5256c.check | 6 +
test/files/run/t5256c.scala | 12 +
test/files/run/t5256d.check | 32 +
test/files/run/t5256d.scala | 13 +
test/files/run/t5256e.check | 6 +
test/files/run/t5256e.scala | 10 +
test/files/run/t5256f.check | 12 +
test/files/run/t5256f.scala | 22 +
test/files/run/t5256g.check | 3 +
test/files/run/t5256g.scala | 13 +
test/files/run/t5256h.check | 7 +
test/files/run/t5256h.scala | 10 +
test/files/run/t5258a.check | 1 +
test/files/run/t5258a.scala | 8 +
test/files/run/t5262.check | 2 +
test/files/run/t5262.scala | 26 +
test/files/run/t5266_1.check | 2 +
test/files/run/t5266_1.scala | 15 +
test/files/run/t5266_2.check | 2 +
test/files/run/t5266_2.scala | 16 +
test/files/run/t5269.check | 1 +
test/files/run/t5269.scala | 16 +
test/files/run/t5270.check | 1 +
test/files/run/t5270.scala | 20 +
test/files/run/t5271_1.check | 12 +
test/files/run/t5271_1.scala | 15 +
test/files/run/t5271_2.check | 14 +
test/files/run/t5271_2.scala | 17 +
test/files/run/t5271_3.check | 21 +
test/files/run/t5271_3.scala | 18 +
test/files/{jvm/bug680.check => run/t5271_4.check} | 0
test/files/run/t5271_4.scala | 8 +
test/files/run/t5272_1_newpatmat.check | 1 +
test/files/run/t5272_1_newpatmat.scala | 11 +
test/files/run/t5272_1_oldpatmat.check | 1 +
.../bug680.check => run/t5272_1_oldpatmat.flags} | 0
test/files/run/t5272_1_oldpatmat.scala | 11 +
test/files/run/t5272_2_newpatmat.check | 1 +
test/files/run/t5272_2_newpatmat.scala | 10 +
test/files/run/t5272_2_oldpatmat.check | 1 +
.../bug680.check => run/t5272_2_oldpatmat.flags} | 0
test/files/run/t5272_2_oldpatmat.scala | 10 +
test/files/run/t5273_1_newpatmat.check | 1 +
test/files/run/t5273_1_newpatmat.scala | 11 +
test/files/run/t5273_1_oldpatmat.check | 1 +
.../bug680.check => run/t5273_1_oldpatmat.flags} | 0
test/files/run/t5273_1_oldpatmat.scala | 11 +
test/files/run/t5273_2a_newpatmat.check | 1 +
test/files/run/t5273_2a_newpatmat.scala | 9 +
test/files/run/t5273_2a_oldpatmat.check | 1 +
.../bug680.check => run/t5273_2a_oldpatmat.flags} | 0
test/files/run/t5273_2a_oldpatmat.scala | 9 +
test/files/run/t5273_2b_newpatmat.check | 1 +
test/files/run/t5273_2b_newpatmat.scala | 10 +
test/files/run/t5273_2b_oldpatmat.check | 1 +
.../bug680.check => run/t5273_2b_oldpatmat.flags} | 0
test/files/run/t5273_2b_oldpatmat.scala | 10 +
test/files/run/t5274_1.check | 3 +
test/files/run/t5274_1.scala | 14 +
test/files/run/t5274_2.check | 2 +
test/files/run/t5274_2.scala | 51 +
test/files/run/t5275.check | 1 +
test/files/run/t5275.scala | 9 +
test/files/run/t5276_1a.check | 1 +
test/files/run/t5276_1a.scala | 9 +
test/files/run/t5276_1b.check | 1 +
test/files/run/t5276_1b.scala | 9 +
test/files/run/t5276_2a.check | 1 +
test/files/run/t5276_2a.scala | 12 +
test/files/run/t5276_2b.check | 1 +
test/files/run/t5276_2b.scala | 13 +
test/files/run/t5277_1.check | 1 +
test/files/run/t5277_1.scala | 15 +
test/files/run/t5277_2.check | 2 +
test/files/run/t5277_2.scala | 12 +
test/files/run/t5279.check | 1 +
test/files/run/t5279.scala | 8 +
test/files/run/t5284.check | 1 +
test/files/run/t5284.scala | 25 +
test/files/run/t5284b.check | 1 +
test/files/run/t5284b.scala | 28 +
test/files/run/t5284c.check | 1 +
test/files/run/t5284c.scala | 30 +
test/files/run/t5300.scala | 7 +
test/files/run/t5313.check | 12 +
test/files/run/t5313.scala | 54 +
test/files/run/t5328.check | 3 +
test/files/run/t5328.scala | 5 +
test/files/run/t5334_1.check | 1 +
test/files/run/t5334_1.scala | 15 +
test/files/run/t5334_2.check | 1 +
test/files/run/t5334_2.scala | 15 +
test/files/run/t5335.check | 1 +
test/files/run/t5335.scala | 8 +
test/files/run/t5356.check | 6 +
test/files/run/t5356.scala | 12 +
test/files/run/t5375.check | 1 +
test/files/run/t5375.scala | 19 +
test/files/run/t5377.check | 18 +
test/files/run/t5377.scala | 47 +
test/files/run/{si5380.scala => t5380.scala} | 0
test/files/run/t5385.check | 8 +
test/files/run/t5385.scala | 16 +
test/files/run/t5387.scala | 15 +
test/files/run/t5394.scala | 4 +
test/files/run/t5407.check | 2 +
test/files/run/t5407.scala | 17 +
test/files/{jvm/bug680.check => run/t5415.check} | 0
test/files/run/t5415.scala | 12 +
test/files/run/t5418a.check | 1 +
test/files/run/t5418a.scala | 3 +
test/files/run/t5418b.check | 2 +
test/files/run/t5418b.scala | 11 +
test/files/run/t5419.check | 1 +
test/files/run/t5419.scala | 8 +
test/files/run/t5423.check | 1 +
test/files/run/t5423.scala | 11 +
test/files/run/t5428.check | 1 +
test/files/run/t5428.scala | 29 +
test/files/run/t5488-fn.check | 17 +
test/files/run/t5488-fn.scala | 27 +
test/files/run/t5488.check | 14 +
test/files/run/t5488.scala | 26 +
test/files/run/t5500.check | 2 +
test/files/run/t5500.scala | 12 +
test/files/run/t5500b.check | 28 +
test/files/run/t5500b.scala | 51 +
test/files/run/t5514.check | 19 +
test/files/run/t5514.scala | 35 +
test/files/run/t5527.check | 99 +
test/files/run/t5527.scala | 107 +
test/files/run/t5530.check | 2 +
.../annotDepMethType.flags => run/t5530.flags} | 0
test/files/run/t5530.scala | 4 +
.../annotDepMethType.flags => run/t5532.flags} | 0
test/files/run/t5532.scala | 4 +
test/files/run/t5535.check | 20 +
test/files/run/t5535.scala | 10 +
test/files/run/t5537.check | 20 +
test/files/run/t5537.scala | 10 +
test/files/run/t5543.check | 9 +
test/files/run/t5543.scala | 45 +
test/files/run/{bug4570.check => t5544.check} | 0
test/files/run/t5544/Api_1.scala | 8 +
test/files/run/t5544/Test_2.scala | 3 +
test/files/{jvm/bug680.check => run/t5545.check} | 0
test/files/run/t5545.scala | 27 +
test/files/run/t5552.check | 2 +
test/files/run/t5552.scala | 10 +
test/files/run/t5568.check | 9 +
test/files/run/t5568.scala | 16 +
test/files/run/t5577.check | 11 +
test/files/run/t5577.scala | 27 +
test/files/run/t5583.check | 20 +
test/files/run/t5583.scala | 11 +
test/files/run/t5588.check | 2 +
test/files/run/t5588.scala | 14 +
test/files/run/t5590.check | 4 +
test/files/run/t5590.scala | 31 +
test/files/run/t5603.check | 29 +
test/files/run/t5603.scala | 42 +
test/files/run/t5604.check | 8 +
test/files/run/t5604.scala | 50 +
test/files/run/t5608.check | 1 +
test/files/run/t5608.scala | 12 +
test/files/run/t5610.check | 6 +
test/files/run/t5610.scala | 30 +
test/files/run/t5612.check | 4 +
test/files/run/t5612.scala | 28 +
test/files/run/t5614.check | 3 +
test/files/run/t5614.flags | 1 +
test/files/run/t5614.scala | 5 +
test/files/run/t5629.check | 2 +
test/files/run/t5629.scala | 36 +
test/files/run/t5629b.check | 10 +
test/files/run/t5629b.scala | 41 +
test/files/run/t5648.check | 4 +
.../{neg/caseinherit.flags => run/t5648.flags} | 0
test/files/run/t5648.scala | 10 +
test/files/run/t5652.check | 8 +
test/files/run/t5652/t5652_1.scala | 6 +
test/files/run/t5652/t5652_2.scala | 9 +
test/files/run/t5652b.check | 4 +
test/files/run/t5652b/t5652b_1.scala | 3 +
test/files/run/t5652b/t5652b_2.scala | 9 +
test/files/run/t5652c.check | 6 +
test/files/run/t5652c/t5652c.scala | 10 +
test/files/run/t5655.check | 30 +
test/files/run/t5655.scala | 10 +
test/files/run/t5656.check | 1 +
test/files/run/t5656.scala | 11 +
test/files/run/t5676.check | 3 +
.../annotDepMethType.flags => run/t5676.flags} | 0
test/files/run/t5676.scala | 24 +
test/files/run/t5680.check | 3 +
test/files/run/t5680.scala | 7 +
test/files/run/t5688.check | 1 +
test/files/run/t5688.scala | 23 +
test/files/run/t5699.check | 11 +
test/files/run/t5699.scala | 24 +
test/files/run/t5704.check | 1 +
test/files/run/t5704.flags | 1 +
test/files/run/t5704.scala | 19 +
test/files/run/t5710-1.check | 1 +
test/files/run/t5710-1.scala | 15 +
test/files/run/t5710-2.check | 1 +
test/files/run/t5710-2.scala | 15 +
test/files/run/t5713.check | 1 +
test/files/run/t5713.flags | 1 +
test/files/run/t5713/Impls_Macros_1.scala | 28 +
test/files/run/t5713/Test_2.scala | 5 +
test/files/run/t5733.check | 2 +
test/files/run/t5733.scala | 53 +
test/files/run/t5753_1.check | 1 +
test/files/run/t5753_1.flags | 1 +
test/files/run/t5753_1/Impls_Macros_1.scala | 10 +
test/files/run/t5753_1/Test_2.scala | 4 +
test/files/run/t5753_2.check | 1 +
test/files/run/t5753_2.flags | 1 +
test/files/run/t5753_2/Impls_Macros_1.scala | 10 +
test/files/run/t5753_2/Test_2.scala | 4 +
test/files/run/{bug576.check => t576.check} | 0
test/files/run/t576.scala | 45 +
test/files/run/t5770.check | 10 +
test/files/run/t5770.scala | 25 +
test/files/run/t5789.check | 14 +
test/files/run/t5789.scala | 14 +
test/files/run/t5804.check | 4 +
test/files/run/t5804.scala | 32 +
test/files/run/t5816.check | 1 +
test/files/run/t5816.scala | 17 +
test/files/run/t5824.check | 1 +
test/files/run/t5824.scala | 8 +
test/files/run/t5830.check | 6 +
.../{neg/caseinherit.flags => run/t5830.flags} | 0
test/files/run/t5830.scala | 56 +
test/files/run/t5840.scala | 7 +
test/files/run/t5843.check | 9 +
test/files/run/t5843.scala | 15 +
test/files/run/t5856.scala | 10 +
test/files/run/t5857.scala | 45 +
test/files/run/t5866.check | 2 +
test/files/run/t5866.scala | 11 +
test/files/run/t5867.check | 1 +
test/files/run/t5867.scala | 14 +
test/files/run/t5879.check | 16 +
test/files/run/t5879.scala | 74 +
test/files/run/t5880.scala | 41 +
test/files/run/t5881.check | 2 +
test/files/run/t5881.scala | 6 +
test/files/run/t5894.scala | 17 +
test/files/run/t5907.check | 31 +
test/files/run/t5907.scala | 118 +
test/files/run/t5912.scala | 6 +
test/files/run/t5914.check | 1 +
test/files/run/t5914.scala | 23 +
test/files/run/t5923a.check | 3 +
test/files/run/t5923a/Macros_1.scala | 14 +
test/files/run/t5923a/Test_2.scala | 5 +
test/files/run/t5923b.check | 3 +
test/files/run/t5923b/Test.scala | 7 +
test/files/run/t5937.scala | 12 +
test/files/run/{bug594.check => t594.check} | 0
test/files/run/{bug594.scala => t594.scala} | 0
test/files/run/t5940.scala | 41 +
test/files/{jvm/bug680.check => run/t5942.check} | 0
test/files/run/t5942.scala | 10 +
test/files/run/t5943a1.check | 1 +
test/files/run/t5943a1.scala | 9 +
test/files/run/t5943a2.check | 1 +
test/files/run/t5943a2.scala | 9 +
test/files/run/t5966.check | 3 +
test/files/run/t5966.scala | 9 +
test/files/run/t5971.check | 4 +
test/files/run/t5971.scala | 23 +
test/files/run/t5974.check | 1 +
test/files/run/t5974.scala | 10 +
test/files/run/t5986.check | 15 +
test/files/run/t5986.scala | 36 +
test/files/run/{bug601.check => t601.check} | 0
test/files/run/{bug601.scala => t601.scala} | 0
test/files/run/t6011b.check | 1 +
test/files/run/t6011b.scala | 11 +
test/files/run/t6011c.scala | 13 +
test/files/run/t6023.check | 12 +
test/files/run/t6023.scala | 17 +
test/files/run/t6028.check | 84 +
test/files/run/t6028.scala | 21 +
test/files/run/{bug603.check => t603.check} | 0
test/files/run/{bug603.scala => t603.scala} | 0
test/files/run/t6052.scala | 21 +
test/files/run/t6063.check | 1 +
test/files/run/t6063/S_1.scala | 11 +
test/files/run/t6063/S_2.scala | 8 +
test/files/run/t6070.check | 1 +
test/files/run/t6070.scala | 36 +
test/files/run/t6077_patmat_cse_irrefutable.check | 1 +
test/files/run/t6077_patmat_cse_irrefutable.scala | 13 +
test/files/run/t6086-repl.check | 12 +
test/files/run/t6086-repl.scala | 8 +
test/files/run/t6086-vanilla.check | 1 +
test/files/run/t6086-vanilla.scala | 6 +
test/files/run/t6089.check | 1 +
test/files/run/t6089.scala | 13 +
test/files/run/t6090.scala | 6 +
test/files/run/t6102.check | 1 +
test/files/run/t6102.flags | 1 +
test/files/run/t6102.scala | 13 +
test/files/run/t6104.check | 1 +
test/files/run/t6104.scala | 8 +
test/files/run/t6111.check | 2 +
test/files/run/t6111.scala | 26 +
test/files/run/t6113.check | 1 +
test/files/run/t6113.scala | 6 +
test/files/run/t6114.scala | 61 +
test/files/run/t6126.scala | 8 +
test/files/run/t6135.scala | 13 +
test/files/run/t6146b.check | 59 +
test/files/run/t6146b.scala | 40 +
test/files/run/t6150.scala | 44 +
test/files/run/t6154.check | 1 +
test/files/run/t6154.scala | 10 +
test/files/run/t6175.scala | 5 +
test/files/run/t6178.check | 1 +
test/files/run/t6178.scala | 7 +
test/files/run/t6181.check | 1 +
test/files/run/t6181.scala | 8 +
test/files/run/t6187.check | 32 +
test/files/run/t6187.scala | 18 +
test/files/run/t6187b.scala | 5 +
test/files/run/t6188.check | 1 +
test/files/run/t6188.flags | 1 +
test/files/run/t6188.scala | 12 +
test/files/run/t6194.check | 1 +
test/files/run/t6194.scala | 8 +
test/files/{jvm/bug680.check => run/t6197.check} | 0
test/files/run/t6197.scala | 21 +
test/files/{jvm/bug680.check => run/t6198.check} | 0
test/files/run/t6198.scala | 24 +
test/files/run/t6199-mirror.check | 1 +
test/files/run/t6199-mirror.scala | 7 +
test/files/run/t6199-toolbox.check | 1 +
test/files/run/t6199-toolbox.scala | 8 +
test/files/run/t6206.check | 4 +
test/files/run/t6206.scala | 37 +
test/files/run/t6220.scala | 92 +
test/files/run/t6223.check | 4 +
test/files/run/t6223.scala | 11 +
test/files/run/t6246.check | 90 +
test/files/run/t6246.scala | 26 +
test/files/run/t6259.scala | 56 +
test/files/run/t6260.check | 1 +
test/files/run/t6260.scala | 12 +
test/files/run/t6261.scala | 130 +
test/files/run/{bug627.check => t627.check} | 0
test/files/run/{bug627.scala => t627.scala} | 0
test/files/run/t6271.scala | 32 +
test/files/run/t6272.check | 10 +
test/files/run/t6272.scala | 62 +
test/files/run/t6273.check | 19 +
test/files/run/t6273.scala | 11 +
test/files/run/t6277.check | 1 +
test/files/run/t6277.scala | 9 +
test/files/run/t6287.check | 3 +
test/files/run/t6287.scala | 11 +
test/files/run/t6288.check | 79 +
test/files/run/t6288.scala | 41 +
test/files/run/t6288b-jump-position.check | 76 +
test/files/run/t6288b-jump-position.scala | 22 +
.../{randomAccessSeq-apply.check => t629.check} | 0
test/files/run/{bug629.scala => t629.scala} | 0
test/files/run/t6290.scala | 4 +
test/files/run/t6292.scala | 18 +
test/files/run/t6318_derived.check | 3 +
test/files/run/t6318_derived.scala | 15 +
test/files/run/t6318_primitives.check | 36 +
test/files/run/t6318_primitives.scala | 71 +
test/files/run/t6320.check | 17 +
test/files/run/t6320.scala | 9 +
test/files/run/t6323b.check | 1 +
test/files/run/t6323b.scala | 21 +
test/files/run/t6327.check | 4 +
test/files/run/t6327.scala | 22 +
test/files/run/t6329_repl.check | 13 +
test/files/run/t6329_repl.scala | 8 +
test/files/run/t6329_repl_bug.check | 13 +
test/files/run/t6329_repl_bug.pending | 10 +
test/files/run/t6329_vanilla.check | 2 +
test/files/run/t6329_vanilla.scala | 4 +
test/files/run/t6329_vanilla_bug.check | 2 +
test/files/run/t6329_vanilla_bug.pending | 7 +
test/files/run/t6331.check | 23 +
test/files/run/t6331.scala | 71 +
test/files/run/t6331b.check | 30 +
test/files/run/t6331b.scala | 20 +
test/files/run/t6333.scala | 29 +
test/files/run/t6337a.scala | 16 +
test/files/run/t6344.check | 132 +
test/files/run/t6344.scala | 106 +
test/files/run/t6353.check | 1 +
test/files/run/t6353.scala | 12 +
test/files/run/t6370.scala | 12 +
test/files/run/t6380.check | 7 +
test/files/run/t6380.scala | 20 +
test/files/run/t6392a.check | 1 +
test/files/run/t6392a.scala | 9 +
test/files/run/t6392b.check | 1 +
test/files/run/t6392b.scala | 9 +
test/files/run/t6394a.check | 1 +
test/files/run/t6394a.flags | 1 +
test/files/run/t6394a/Macros_1.scala | 12 +
test/files/run/t6394a/Test_2.scala | 4 +
test/files/run/t6394b.check | 1 +
test/files/run/t6394b.flags | 1 +
test/files/run/t6394b/Macros_1.scala | 12 +
test/files/run/t6394b/Test_2.scala | 4 +
test/files/run/t6410.check | 2 +
test/files/run/t6410.scala | 9 +
test/files/run/t6434.check | 10 +
test/files/run/t6434.scala | 8 +
test/files/run/t6439.check | 66 +
test/files/run/t6439.scala | 22 +
test/files/run/t6440.check | 4 +
test/files/run/t6440.scala | 48 +
test/files/run/t6440b.check | 4 +
test/files/run/t6440b.scala | 61 +
test/files/run/t6443-by-name.check | 3 +
test/files/run/t6443-by-name.scala | 18 +
.../run/{bug4570.check => t6443-varargs.check} | 0
test/files/run/t6443-varargs.scala | 16 +
test/files/run/t6443.scala | 15 +
test/files/run/t6443b.scala | 16 +
test/files/run/t6481.check | 4 +
test/files/run/t6481.scala | 13 +
test/files/run/t6488.check | 1 +
test/files/run/t6488.scala | 11 +
test/files/run/t6500.scala | 13 +
test/files/run/t6506.scala | 8 +
test/files/run/t6534.scala | 14 +
test/files/{pos/bug3252.flags => run/t6546.flags} | 0
test/files/run/t6546/A_1.scala | 6 +
test/files/run/t6546/B_2.scala | 8 +
test/files/run/t6549.check | 32 +
test/files/run/t6549.scala | 22 +
test/files/run/t6555.check | 22 +
test/files/run/t6555.scala | 15 +
test/files/run/t6559.scala | 17 +
test/files/run/{bug657.check => t657.check} | 0
test/files/run/{bug657.scala => t657.scala} | 0
test/files/run/t6572/bar_1.scala | 19 +
test/files/run/t6572/foo_2.scala | 17 +
test/files/run/t6584.check | 8 +
test/files/run/t6584.scala | 16 +
test/files/run/t6591_1.check | 1 +
test/files/run/t6591_1.scala | 19 +
test/files/run/t6591_2.check | 1 +
test/files/run/t6591_2.scala | 19 +
test/files/run/t6591_3.check | 1 +
test/files/run/t6591_3.scala | 17 +
test/files/run/t6591_5.check | 1 +
test/files/run/t6591_5.scala | 23 +
test/files/run/t6591_6.check | 1 +
test/files/run/t6591_6.scala | 24 +
test/files/run/t6591_7.check | 4 +
test/files/run/t6591_7.scala | 26 +
test/files/run/t6611.scala | 61 +
test/files/run/t6614.check | 11 +
test/files/run/t6614.scala | 8 +
test/files/run/t6628.check | 2 +
test/files/run/t6628.scala | 11 +
test/files/run/t6631.scala | 18 +
test/files/run/t6632.check | 3 +
test/files/run/t6632.scala | 29 +
test/files/run/t6633.check | 3 +
test/files/run/t6633.scala | 33 +
test/files/run/t6634.check | 31 +
test/files/run/t6634.scala | 80 +
test/files/run/t6637.check | 1 +
test/files/run/t6637.scala | 8 +
test/files/run/t6644.scala | 8 +
test/files/run/t6646.check | 5 +
test/files/run/t6646.scala | 19 +
test/files/run/t6662.check | 1 +
test/files/run/t6662/Macro_1.scala | 8 +
test/files/run/t6662/Test_2.scala | 8 +
test/files/run/{syncchannel.check => t6663.check} | 0
test/files/run/t6663.scala | 17 +
test/files/run/t6666a.scala | 16 +
test/files/run/t6669.scala | 26 +
test/files/run/t6673.check | 1 +
test/files/run/t6673.scala | 5 +
test/files/run/t6677.scala | 28 +
test/files/run/t6677b.scala | 33 +
test/files/run/t6687.scala | 10 +
test/files/run/t6690.scala | 62 +
test/files/run/t6695.scala | 18 +
test/files/run/t6706.scala | 14 +
test/files/run/t6715.scala | 15 +
test/files/run/t6725-1.check | 2 +
test/files/run/t6725-1.scala | 5 +
test/files/run/t6725-2.check | 8 +
test/files/run/t6725-2.scala | 6 +
test/files/run/t6731.check | 40 +
test/files/run/t6731.scala | 143 +
test/files/run/t6793.scala | 9 +
test/files/run/t6793b.scala | 11 +
test/files/run/t6793c.scala | 11 +
test/files/run/t6827.check | 15 +
test/files/run/t6827.scala | 34 +
test/files/run/t6853.scala | 18 +
test/files/run/t6863.scala | 114 +
test/files/run/t6888.check | 3 +
test/files/run/t6888.scala | 19 +
test/files/run/t6900.scala | 36 +
test/files/run/t6911.scala | 24 +
test/files/run/t6928-run.check | 1 +
test/files/run/t6928-run.scala | 10 +
test/files/run/t6935.scala | 14 +
test/files/run/t6937.check | 26 +
test/files/run/t6937.scala | 12 +
test/files/run/t6955.check | 1 +
test/files/run/t6955.scala | 26 +
test/files/run/t6956.check | 1 +
test/files/run/t6956.scala | 26 +
test/files/run/t6957.scala | 8 +
test/files/run/t6968.check | 1 +
test/files/run/t6968.scala | 7 +
test/files/run/t6969.check | 1 +
test/files/run/t6969.scala | 28 +
test/files/run/t6989.check | 240 +
test/files/run/t6989/JavaClass_1.java | 43 +
test/files/run/t6989/Test_2.scala | 42 +
test/files/run/t7008-scala-defined.check | 7 +
.../run/t7008-scala-defined/Impls_Macros_2.scala | 12 +
.../ScalaClassWithCheckedExceptions_1.scala | 6 +
test/files/run/t7008-scala-defined/Test_3.scala | 9 +
test/files/run/t7008.check | 9 +
test/files/run/t7008/Impls_Macros_2.scala | 12 +
.../t7008/JavaClassWithCheckedExceptions_1.java | 7 +
test/files/run/t7008/Test_3.scala | 9 +
test/files/run/t7039.check | 1 +
test/files/run/t7039.scala | 11 +
test/files/run/t7046.check | 2 +
test/files/run/t7046.scala | 13 +
test/files/{jvm/bug680.check => run/t7047.check} | 0
test/files/run/t7047/Impls_Macros_1.scala | 19 +
test/files/run/t7047/Test_2.scala | 3 +
test/files/run/t7064-old-style-supercalls.check | 1 +
test/files/run/t7064-old-style-supercalls.scala | 48 +
test/files/run/t7074.check | 9 +
test/files/run/t7074.scala | 15 +
test/files/run/t7096.check | 2 +
test/files/run/t7096.scala | 36 +
test/files/run/t7106.check | 6 +
test/files/run/t7106/Analyzed_1.scala | 14 +
test/files/run/t7106/test.scala | 10 +
test/files/run/t7171.scala | 22 +
test/files/run/t7185.check | 34 +
test/files/run/t7185.scala | 12 +
test/files/run/t7200.scala | 34 +
test/files/run/t7214.scala | 57 +
test/files/run/t7215.scala | 6 +
test/files/run/t7235.check | 4 +
test/files/run/t7235.scala | 14 +
test/files/{jvm/bug680.check => run/t7240.check} | 0
test/files/run/t7240/Macros_1.scala | 48 +
test/files/run/t7240/Test_2.scala | 3 +
test/files/run/t7242.scala | 71 +
test/files/run/t7246.check | 1 +
test/files/run/t7246/Outer.java | 4 +
test/files/run/t7246/Test.scala | 16 +
test/files/run/t7246b.check | 2 +
test/files/run/t7246b/Base.scala | 7 +
test/files/run/t7246b/Outer.java | 4 +
test/files/run/t7246b/Test.scala | 14 +
test/files/run/t7249.check | 1 +
test/files/run/t7249.scala | 7 +
test/files/run/t7265.scala | 27 +
test/files/run/t7269.scala | 32 +
test/files/run/t7271.check | 12 +
test/files/run/t7271.scala | 34 +
test/files/run/t7290.scala | 9 +
test/files/run/t7291a.check | 1 +
test/files/{jvm/bug680.check => run/t7291a.flags} | 0
test/files/run/t7291a.scala | 19 +
test/files/run/t7291b.check | 2 +
test/files/run/t7291b.flags | 1 +
test/files/run/t7291b.scala | 19 +
test/files/run/t7319.check | 38 +
test/files/run/t7319.scala | 13 +
test/files/run/t7325.check | 19 +
test/files/run/t7325.scala | 25 +
test/files/run/t7331a.check | 2 +
test/files/run/t7331a.scala | 10 +
test/files/run/t7331b.check | 3 +
test/files/run/t7331b.scala | 11 +
test/files/run/t7331c.check | 3 +
test/files/run/t7331c.scala | 11 +
test/files/run/t7336.scala | 31 +
test/files/{jvm/bug680.check => run/t7341.check} | 0
test/files/run/t7341.flags | 1 +
test/files/run/t7341.scala | 15 +
test/files/run/t7359.check | 1 +
test/files/run/t7359/Cyclic_1.java | 3 +
test/files/run/t7359/Test_2.scala | 6 +
test/files/run/t7375a.check | 4 +
test/files/run/t7375a.scala | 16 +
test/files/run/t7375b.check | 4 +
test/files/run/t7375b/Macros_1.scala | 18 +
test/files/run/t7375b/Test_2.scala | 3 +
test/files/run/t7398.scala | 26 +
test/files/run/t7439.check | 1 +
test/files/run/t7439/A_1.java | 3 +
test/files/run/t7439/B_1.java | 3 +
test/files/run/t7439/Test_2.scala | 31 +
test/files/run/{bug744.check => t744.check} | 0
test/files/run/{bug744.scala => t744.scala} | 0
test/files/run/t7455.check | 4 +
test/files/run/t7455/Outer.java | 31 +
test/files/run/t7455/Test.scala | 30 +
test/files/run/t7498.scala | 20 +
test/files/run/t7507.scala | 31 +
test/files/run/{bug751.scala => t751.scala} | 0
test/files/{jvm/bug680.check => run/t7510.check} | 0
test/files/run/t7510/Ann_1.java | 4 +
test/files/run/t7510/Test_2.scala | 9 +
test/files/run/t7556.check | 2 +
test/files/run/t7556/Test_2.scala | 11 +
test/files/run/t7556/mega-class_1.scala | 3002 ++++
test/files/run/t7558.scala | 9 +
test/files/run/t7569.check | 12 +
test/files/run/t7569.scala | 19 +
test/files/run/t7571.scala | 12 +
test/files/run/t7617a.check | 2 +
test/files/run/t7617a/Macros_1.scala | 22 +
test/files/run/t7617a/Test_2.scala | 5 +
test/files/run/t7617b.check | 1 +
test/files/run/t7617b/Macros_1.scala | 8 +
test/files/run/t7617b/Test_2.scala | 11 +
test/files/run/t7657.check | 3 +
test/files/run/t7657/Macros_1.scala | 8 +
test/files/run/t7657/Test_2.scala | 6 +
test/files/run/t7733.check | 1 +
test/files/run/t7733/Separate_1.scala | 5 +
test/files/run/t7733/Test_2.scala | 9 +
test/files/run/t7775.scala | 17 +
test/files/run/t7779.scala | 67 +
test/files/run/t7825.scala | 34 +
test/files/run/t7912.scala | 16 +
test/files/run/t8010.scala | 22 +
test/files/run/t8029.scala | 57 +
test/files/run/t8114.scala | 15 +
test/files/run/{bug874.check => t874.check} | 0
test/files/run/{bug874.scala => t874.scala} | 0
test/files/run/{bug889.check => t889.check} | 0
test/files/run/{bug889.scala => t889.scala} | 0
test/files/run/{bug920.check => t920.check} | 0
test/files/run/{bug920.scala => t920.scala} | 0
test/files/run/{bug949.scala => t949.scala} | 0
test/files/run/{bug978.scala => t978.scala} | 0
test/files/run/tailcalls.check | 3 +-
test/files/run/tailcalls.scala | 2 +-
test/files/run/test-cpp.check | 81 +
test/files/run/test-cpp.scala | 104 +
test/files/run/toolbox_console_reporter.check | 8 +
test/files/run/toolbox_console_reporter.scala | 29 +
test/files/run/toolbox_current_run_compiles.check | 2 +
test/files/run/toolbox_current_run_compiles.scala | 28 +
.../run/toolbox_default_reporter_is_silent.check | 1 +
.../run/toolbox_default_reporter_is_silent.scala | 16 +
test/files/run/toolbox_rangepos.check | 1 +
test/files/run/toolbox_rangepos.scala | 8 +
test/files/run/toolbox_silent_reporter.check | 4 +
test/files/run/toolbox_silent_reporter.scala | 19 +
.../run/toolbox_typecheck_implicitsdisabled.check | 5 +
.../run/toolbox_typecheck_implicitsdisabled.scala | 27 +
.../run/toolbox_typecheck_inferimplicitvalue.check | 1 +
.../run/toolbox_typecheck_inferimplicitvalue.scala | 13 +
.../run/toolbox_typecheck_macrosdisabled.check | 41 +
.../run/toolbox_typecheck_macrosdisabled.scala | 25 +
.../run/toolbox_typecheck_macrosdisabled2.check | 41 +
.../run/toolbox_typecheck_macrosdisabled2.scala | 25 +
test/files/run/trait-renaming.check | 2 +
test/files/run/trait-renaming/A_1.scala | 15 +
test/files/run/trait-renaming/B_2.scala | 5 +
test/files/run/treePrint.scala | 11 +-
test/files/run/triemap-hash.scala | 46 +
test/files/run/triple-quoted-expr.scala | 8 +-
test/files/run/try-catch-unify.check | 4 +
test/files/run/try-catch-unify.scala | 16 +
test/files/run/tuple-zipped.scala | 4 +-
test/files/run/tuples-msil.check | 2 -
test/files/run/type-currying.check | 27 +
test/files/run/type-currying.scala | 58 +
.../{syncchannel.check => typed-annotated.check} | 0
test/files/run/typed-annotated/Macros_1.scala | 17 +
test/files/run/typed-annotated/Test_2.scala | 3 +
test/files/run/typetags_core.check | 30 +
test/files/run/typetags_core.scala | 34 +
test/files/run/typetags_multi.check | 5 +
test/files/run/typetags_multi.scala | 9 +
test/files/run/typetags_serialize.check | 2 +
test/files/run/typetags_serialize.scala | 29 +
...gs_without_scala_reflect_manifest_lookup.check} | 0
...ags_without_scala_reflect_manifest_lookup.scala | 29 +
...tags_without_scala_reflect_typetag_lookup.check | 2 +
...tags_without_scala_reflect_typetag_lookup.scala | 43 +
...ut_scala_reflect_typetag_manifest_interop.check | 2 +
...ut_scala_reflect_typetag_manifest_interop.scala | 47 +
test/files/run/unapply.scala | 81 +-
test/files/run/unittest_collection.scala | 123 +-
test/files/run/unittest_io.scala | 54 +-
.../run/valueclasses-classmanifest-basic.check | 1 +
.../run/valueclasses-classmanifest-basic.scala | 5 +
.../valueclasses-classmanifest-existential.check | 1 +
.../valueclasses-classmanifest-existential.scala | 5 +
.../run/valueclasses-classmanifest-generic.check | 1 +
.../run/valueclasses-classmanifest-generic.scala | 5 +
test/files/run/valueclasses-classtag-basic.check | 1 +
test/files/run/valueclasses-classtag-basic.scala | 5 +
.../run/valueclasses-classtag-existential.check | 1 +
.../run/valueclasses-classtag-existential.scala | 5 +
test/files/run/valueclasses-classtag-generic.check | 1 +
test/files/run/valueclasses-classtag-generic.scala | 5 +
test/files/run/valueclasses-constr.check | 10 +
test/files/run/valueclasses-constr.scala | 79 +
test/files/run/valueclasses-manifest-basic.check | 1 +
test/files/run/valueclasses-manifest-basic.scala | 5 +
.../run/valueclasses-manifest-existential.check | 1 +
.../run/valueclasses-manifest-existential.scala | 5 +
test/files/run/valueclasses-manifest-generic.check | 1 +
test/files/run/valueclasses-manifest-generic.scala | 5 +
test/files/run/valueclasses-pavlov.check | 2 +
test/files/run/valueclasses-pavlov.scala | 26 +
test/files/run/valueclasses-typetag-basic.check | 1 +
test/files/run/valueclasses-typetag-basic.scala | 5 +
.../run/valueclasses-typetag-existential.check | 1 +
.../run/valueclasses-typetag-existential.scala | 5 +
test/files/run/valueclasses-typetag-generic.check | 1 +
test/files/run/valueclasses-typetag-generic.scala | 5 +
test/files/run/viewtest.scala | 0
test/files/run/virtpatmat_alts.check | 1 +
test/files/run/virtpatmat_alts.flags | 1 +
test/files/run/virtpatmat_alts.scala | 12 +
test/files/run/virtpatmat_apply.check | 1 +
test/files/run/virtpatmat_apply.flags | 1 +
test/files/run/virtpatmat_apply.scala | 7 +
test/files/run/virtpatmat_casting.check | 1 +
test/files/run/virtpatmat_casting.flags | 1 +
test/files/run/virtpatmat_casting.scala | 8 +
test/files/run/virtpatmat_extends_product.check | 1 +
test/files/run/virtpatmat_extends_product.flags | 1 +
test/files/run/virtpatmat_extends_product.scala | 14 +
test/files/run/virtpatmat_literal.check | 3 +
test/files/run/virtpatmat_literal.flags | 1 +
test/files/run/virtpatmat_literal.scala | 22 +
test/files/run/virtpatmat_nested_lists.check | 1 +
test/files/run/virtpatmat_nested_lists.flags | 1 +
test/files/run/virtpatmat_nested_lists.scala | 3 +
test/files/run/virtpatmat_npe.check | 1 +
test/files/run/virtpatmat_npe.flags | 1 +
test/files/run/virtpatmat_npe.scala | 10 +
test/files/run/virtpatmat_opt_sharing.check | 1 +
test/files/run/virtpatmat_opt_sharing.flags | 1 +
test/files/run/virtpatmat_opt_sharing.scala | 10 +
test/files/run/virtpatmat_partial.check | 17 +
test/files/run/virtpatmat_partial.flags | 1 +
test/files/run/virtpatmat_partial.scala | 181 +
test/files/run/virtpatmat_partial_backquoted.check | 1 +
test/files/run/virtpatmat_partial_backquoted.scala | 12 +
test/files/run/virtpatmat_staging.check | 1 +
test/files/run/virtpatmat_staging.flags | 1 +
test/files/run/virtpatmat_staging.scala | 52 +
test/files/run/virtpatmat_stringinterp.check | 1 +
.../virtpatmat_stringinterp.flags} | 0
test/files/run/virtpatmat_stringinterp.scala | 13 +
test/files/run/virtpatmat_switch.check | 7 +
test/files/run/virtpatmat_switch.flags | 1 +
test/files/run/virtpatmat_switch.scala | 38 +
.../run/virtpatmat_tailcalls_verifyerror.check} | 0
.../run/virtpatmat_tailcalls_verifyerror.flags | 1 +
.../run/virtpatmat_tailcalls_verifyerror.scala | 14 +
test/files/run/virtpatmat_try.check | 2 +
test/files/run/virtpatmat_try.flags | 1 +
test/files/run/virtpatmat_try.scala | 47 +
test/files/run/virtpatmat_typed.check | 1 +
test/files/run/virtpatmat_typed.flags | 1 +
test/files/run/virtpatmat_typed.scala | 7 +
test/files/run/virtpatmat_typetag.check | 10 +
.../virtpatmat_typetag.flags} | 0
test/files/run/virtpatmat_typetag.scala | 36 +
test/files/run/virtpatmat_unapply.check | 2 +
test/files/run/virtpatmat_unapply.flags | 1 +
test/files/run/virtpatmat_unapply.scala | 32 +
test/files/run/virtpatmat_unapplyprod.check | 4 +
test/files/run/virtpatmat_unapplyprod.flags | 1 +
test/files/run/virtpatmat_unapplyprod.scala | 23 +
test/files/run/virtpatmat_unapplyseq.check | 1 +
test/files/run/virtpatmat_unapplyseq.flags | 1 +
test/files/run/virtpatmat_unapplyseq.scala | 5 +
test/files/run/virtpatmat_valdef.check | 1 +
test/files/run/virtpatmat_valdef.scala | 6 +
test/files/run/weakconform.scala | 0
test/files/run/xml-attribute.check | 12 +
test/files/run/xml-attribute.scala | 37 +
test/files/run/xml-loop-bug.scala | 4 +-
test/files/scalacheck/CheckEither.scala | 9 +-
test/files/scalacheck/Ctrie.scala | 199 +
test/files/scalacheck/array-new.scala | 37 +
.../scalacheck/{array.scala => array-old.scala} | 0
test/files/scalacheck/avl.scala | 114 +
test/files/scalacheck/duration.scala | 69 +
test/files/scalacheck/nan-ordering.scala | 130 +
.../parallel-collections/ParallelCtrieCheck.scala | 98 +
.../ParallelIterableCheck.scala | 32 +-
.../parallel-collections/ParallelMapCheck1.scala | 2 +-
.../files/scalacheck/parallel-collections/pc.scala | 3 +
test/files/scalacheck/range.scala | 485 +-
test/files/scalacheck/redblack.scala | 213 +
test/files/scalacheck/redblacktree.scala | 258 +
test/files/scalacheck/si4147.scala | 67 +
test/files/scalacheck/t2460.scala | 32 +
test/files/scalacheck/treemap.scala | 154 +
test/files/scalacheck/treeset.scala | 152 +
test/files/scalap/abstractClass/result.test | 2 +-
test/files/scalap/abstractMethod/result.test | 2 +-
test/files/scalap/caseClass/result.test | 19 +-
test/files/scalap/caseObject/result.test | 14 +-
test/files/scalap/cbnParam/result.test | 2 +-
test/files/scalap/classPrivate/result.test | 6 +-
test/files/scalap/classWithExistential/result.test | 4 +-
.../scalap/classWithSelfAnnotation/result.test | 2 +-
test/files/scalap/covariantParam/result.test | 2 +-
test/files/scalap/defaultParameter/result.test | 4 +-
test/files/scalap/implicitParam/result.test | 2 +-
test/files/scalap/packageObject/result.test | 2 +-
test/files/scalap/paramClauses/result.test | 2 +-
test/files/scalap/paramNames/result.test | 2 +-
test/files/scalap/sequenceParam/result.test | 2 +-
test/files/scalap/simpleClass/result.test | 2 +-
test/files/scalap/traitObject/result.test | 4 +-
test/files/scalap/typeAnnotations/result.test | 4 +-
test/files/scalap/valAndVar/result.test | 2 +-
test/files/scalap/wildcardType/result.test | 2 +-
test/files/specialized/SI-7343.scala | 55 +
test/files/specialized/SI-7344.scala | 53 +
test/files/specialized/arrays-traits.check | 8 +-
test/files/specialized/arrays-traits.scala | 17 +-
test/files/specialized/arrays.check | 6 +-
test/files/specialized/fft.check | 2 +-
test/files/specialized/spec-ame.check | 2 +-
test/files/specialized/spec-ame.scala | 3 +
test/files/specialized/spec-hlists.check | 2 +
test/files/specialized/spec-hlists.scala | 29 +
.../{spec-matrix.check => spec-matrix-new.check} | 0
test/files/specialized/spec-matrix-new.scala | 82 +
.../{spec-matrix.check => spec-matrix-old.check} | 0
test/files/specialized/spec-matrix-old.scala | 80 +
test/files/specialized/spec-matrix.scala | 80 -
test/files/specialized/spec-patmatch.check | 2 +-
test/files/specialized/t6035.check | 1 +
test/files/specialized/t6035/first_1.scala | 5 +
test/files/specialized/t6035/second_2.scala | 13 +
test/files/speclib/instrumented.jar | Bin 24221 -> 0 bytes
test/files/speclib/instrumented.jar.desired.sha1 | 2 +-
test/{files => flaky}/pos/t2868.cmds | 0
test/{files => flaky}/pos/t2868/Jann.java | 0
test/{files => flaky}/pos/t2868/Nest.java | 0
test/flaky/pos/t2868/pick_1.scala | 7 +
test/{files => flaky}/pos/t2868/t2868_src_2.scala | 0
test/instrumented/boxes.patch | 29 +
.../library/scala/runtime/BoxesRunTime.java | 491 +-
.../library/scala/runtime/ScalaRunTime.scala | 291 +-
test/instrumented/mkinstrumented | 46 -
test/instrumented/mkinstrumented.sh | 51 +
test/instrumented/srt.patch | 10 +
.../scala/concurrent/impl/DefaultPromiseTest.scala | 344 +
.../reflect/internal/util/SourceFileTest.scala | 58 +
test/junit/scala/runtime/ScalaRunTimeTest.scala | 70 +
test/junit/scala/tools/nsc/SampleTest.scala | 17 +
test/osgi/src/BasicLibrary.scala | 37 +
test/osgi/src/BasicReflection.scala | 66 +
test/osgi/src/BasicTest.scala | 33 +
test/osgi/src/ReflectionToolboxTest.scala | 49 +
test/osgi/src/ScalaOsgiHelper.scala | 36 +
test/partest | 20 +-
test/partest.bat | 203 +-
test/pending/continuations-neg/t3628.check | 3 -
test/pending/junit/scala/util/t7265.scala | 46 +
test/pending/jvm/interpreter.scala | 158 -
test/pending/neg/bug3189.check | 7 -
test/pending/neg/bug963.scala | 26 -
test/pending/neg/dot-classpath.flags | 1 +
test/pending/neg/dot-classpath/S_1.scala | 3 +
test/pending/neg/dot-classpath/S_2.scala | 3 +
.../neg/macro-invalidusage-badbounds-b.check | 4 +
.../neg/macro-invalidusage-badbounds-b.flags | 1 +
.../macro-invalidusage-badbounds-b/Impls_1.scala | 5 +
.../Macros_Test_2.scala | 8 +
.../neg/plugin-after-terminal/misc/build.sh | 0
.../pending/neg/plugin-before-parser/misc/build.sh | 0
.../neg/plugin-cyclic-dependency/misc/build.sh | 0
.../neg/plugin-multiple-rafter/misc/build.sh | 0
.../neg/plugin-rafter-before-1/misc/build.sh | 0
.../neg/plugin-rightafter-terminal/misc/build.sh | 0
test/pending/neg/reify_packed.check | 4 +
test/pending/neg/reify_packed.scala | 15 +
test/pending/neg/t1557.scala | 18 +
test/pending/neg/t2066.scala | 16 +
test/pending/neg/t3633/test/Test.scala | 8 +-
test/pending/neg/t5008.scala | 165 +
test/pending/neg/t5353.check | 4 +
test/pending/neg/t5353.scala | 3 +
test/pending/neg/t5589neg.check | 37 +
test/pending/neg/t5589neg.scala | 6 +
test/pending/neg/t5589neg2.scala | 13 +
test/pending/neg/t5618.check | 7 +
test/pending/neg/t5618.scala | 27 +
test/pending/neg/t7441.check | 6 +
test/pending/neg/t7441.scala | 7 +
test/pending/neg/t796.scala | 20 -
test/pending/pos/bug1987/bug1987a.scala | 7 -
test/pending/pos/bug1987/bug1987b.scala | 10 -
test/pending/pos/bug4704.scala | 36 +
test/pending/pos/exhaust_2.scala | 54 +
test/pending/pos/inference.scala | 41 +
test/pending/pos/nothing.scala | 24 +
test/pending/pos/t1336.scala | 10 +
test/pending/pos/t1380/gnujaxp.jar.desired.sha1 | 1 -
test/pending/pos/t1380/hallo.scala | 3 -
test/pending/pos/t1476.scala | 23 +
test/{files => pending}/pos/t1751.cmds | 0
test/{files => pending}/pos/t1751/A1_2.scala | 0
test/{files => pending}/pos/t1751/A2_1.scala | 0
.../{files => pending}/pos/t1751/SuiteClasses.java | 0
test/{files => pending}/pos/t1782.cmds | 0
test/{files => pending}/pos/t1782/Ann.java | 0
test/{files => pending}/pos/t1782/Days.java | 0
.../pos/t1782/ImplementedBy.java | 0
test/pending/pos/t1782/Test_1.scala | 16 +
test/pending/pos/t1832.scala | 10 +
test/{files => pending}/pos/t294.cmds | 0
test/{files => pending}/pos/t294/Ann.java | 0
test/{files => pending}/pos/t294/Ann2.java | 0
test/{files => pending}/pos/t294/Test_1.scala | 0
test/{files => pending}/pos/t294/Test_2.scala | 0
test/pending/pos/t3439.scala | 2 +
test/pending/pos/t3943/Outer_1.java | 14 +
test/pending/pos/t3943/test_2.scala | 8 +
test/pending/pos/t4012.scala | 7 +
test/pending/pos/t4123.scala | 14 +
test/pending/pos/t4436.scala | 3 +
test/pending/pos/t4541.scala | 10 +
test/pending/pos/t4606.scala | 29 +
.../caseinherit.flags => pending/pos/t4649.flags} | 0
test/pending/pos/t4649.scala | 6 +
test/pending/pos/t4683.scala | 11 +
test/pending/pos/t4717.scala | 7 +
test/pending/pos/t4786.scala | 24 +
test/pending/pos/t4787.scala | 4 +
test/pending/pos/t4790.scala | 4 +
test/pending/pos/t4859.scala | 15 +
test/pending/pos/t5091.scala | 11 +
test/pending/pos/t5231.scala | 18 +
test/pending/pos/t5259.scala | 14 +
test/pending/pos/t5265.scala | 21 +
test/pending/pos/t5399.scala | 8 +
test/pending/pos/t5399a.scala | 19 +
test/pending/pos/t5400.scala | 14 +
.../caseinherit.flags => pending/pos/t5503.flags} | 0
test/pending/pos/t5503.scala | 18 +
test/pending/pos/t5521.scala | 3 +
test/pending/pos/t5534.scala | 11 +
test/pending/pos/t5559.scala | 23 +
test/pending/pos/t5564.scala | 5 +
test/pending/pos/t5579.scala | 29 +
test/pending/pos/t5585.scala | 18 +
test/pending/pos/t5589.scala | 22 +
test/pending/pos/t5606.scala | 9 +
test/pending/pos/t5626.scala | 12 +
test/pending/pos/t5639/Bar.scala | 7 +
test/pending/pos/t5639/Foo.scala | 7 +
test/pending/pos/t5654.scala | 4 +
test/pending/pos/t5712.scala | 14 +
test/pending/pos/t7234.scala | 15 +
test/pending/pos/t7234b.scala | 20 +
test/pending/pos/t7778/Foo_1.java | 6 +
test/pending/pos/t7778/Test_2.scala | 3 +
test/pending/pos/those-kinds-are-high.scala | 18 +-
test/pending/pos/unapplyGeneric.scala | 11 -
test/pending/pos/z1720.scala | 16 +
test/pending/run/bug2318.scala | 38 -
test/pending/run/bug4704run.scala | 10 +
test/pending/run/hk-lub-fail.scala | 37 +
.../run/idempotency-partial-functions.scala | 28 +
test/pending/run/implicit-classes.scala | 17 +
test/pending/run/jar-version.scala | 11 +
test/pending/run/macro-expand-default.flags | 1 +
.../pending/run/macro-expand-default/Impls_1.scala | 10 +
.../run/macro-expand-default/Macros_Test_2.scala | 8 +
...and-implicit-macro-defeats-type-inference.check | 6 +
...and-implicit-macro-defeats-type-inference.flags | 1 +
.../Impls_1.scala | 10 +
.../Macros_Test_2.scala | 6 +
.../run/macro-expand-macro-has-context-bound.check | 1 +
.../run/macro-expand-macro-has-context-bound.flags | 1 +
.../Impls_1.scala | 10 +
.../Macros_Test_2.scala | 4 +
test/pending/run/macro-expand-named.flags | 1 +
test/pending/run/macro-expand-named/Impls_1.scala | 10 +
.../run/macro-expand-named/Macros_Test_2.scala | 5 +
.../run/macro-expand-tparams-prefix-e1.check | 3 +
.../run/macro-expand-tparams-prefix-e1.flags | 1 +
.../macro-expand-tparams-prefix-e1/Impls_1.scala | 12 +
.../Macros_Test_2.scala | 13 +
.../run/macro-expand-tparams-prefix-f1.check | 3 +
.../run/macro-expand-tparams-prefix-f1.flags | 1 +
.../macro-expand-tparams-prefix-f1/Impls_1.scala | 12 +
.../Macros_Test_2.scala | 13 +
test/pending/run/macro-quasiinvalidbody-a.check | 1 +
test/pending/run/macro-quasiinvalidbody-a.flags | 1 +
.../run/macro-quasiinvalidbody-a/Impls_1.scala | 5 +
.../macro-quasiinvalidbody-a/Macros_Test_2.scala | 10 +
test/pending/run/macro-quasiinvalidbody-b.check | 1 +
test/pending/run/macro-quasiinvalidbody-b.flags | 1 +
.../run/macro-quasiinvalidbody-b/Impls_1.scala | 7 +
.../macro-quasiinvalidbody-b/Macros_Test_2.scala | 10 +
test/pending/run/macro-reify-array.flags | 1 +
test/pending/run/macro-reify-array/Macros_1.scala | 11 +
test/pending/run/macro-reify-array/Test_2.scala | 4 +
...cro-reify-groundtypetag-hktypeparams-tags.check | 2 +
.../Test.scala | 9 +
test/pending/run/macro-reify-tagful-b.check | 1 +
test/pending/run/macro-reify-tagful-b.flags | 1 +
.../run/macro-reify-tagful-b/Macros_1.scala | 11 +
test/pending/run/macro-reify-tagful-b/Test_2.scala | 4 +
test/pending/run/macro-reify-tagless-b.check | 3 +
test/pending/run/macro-reify-tagless-b.flags | 1 +
.../run/macro-reify-tagless-b/Impls_Macros_1.scala | 11 +
.../pending/run/macro-reify-tagless-b/Test_2.scala | 13 +
.../macro-reify-typetag-hktypeparams-notags.check | 2 +
.../Test.scala | 9 +
.../macro-reify-typetag-hktypeparams-tags.check | 2 +
.../Test.scala | 9 +
test/pending/run/partial-anyref-spec.check | 13 +
test/pending/run/partial-anyref-spec.scala | 31 +
test/pending/run/reflection-mem-eval.scala | 26 +
test/pending/run/reify_addressbook.check | 30 +
test/pending/run/reify_addressbook.scala | 65 +
test/pending/run/reify_brainf_ck.check | 4 +
test/pending/run/reify_brainf_ck.scala | 79 +
test/pending/run/reify_callccinterpreter.check | 3 +
test/pending/run/reify_callccinterpreter.scala | 88 +
test/pending/run/reify_closure2b.check | 2 +
test/pending/run/reify_closure2b.scala | 21 +
test/pending/run/reify_closure3b.check | 2 +
test/pending/run/reify_closure3b.scala | 23 +
test/pending/run/reify_closure4b.check | 2 +
test/pending/run/reify_closure4b.scala | 23 +
test/pending/run/reify_closure5b.check | 2 +
test/pending/run/reify_closure5b.scala | 21 +
test/pending/run/reify_closure9a.check | 1 +
test/pending/run/reify_closure9a.scala | 18 +
test/pending/run/reify_closure9b.check | 1 +
test/pending/run/reify_closure9b.scala | 18 +
test/pending/run/reify_closures11.check | 1 +
test/pending/run/reify_closures11.scala | 16 +
.../run/reify_gadts.check} | 0
test/pending/run/reify_gadts.scala | 39 +
test/pending/run/reify_implicits-new.check | 1 +
test/pending/run/reify_implicits-new.scala | 16 +
test/pending/run/reify_implicits-old.check | 1 +
test/pending/run/reify_implicits-old.scala | 15 +
test/pending/run/reify_newimpl_07.scala | 14 +
test/pending/run/reify_newimpl_08.scala | 16 +
test/pending/run/reify_newimpl_09.scala | 13 +
test/pending/run/reify_newimpl_09a.scala | 13 +
test/pending/run/reify_newimpl_09b.scala | 14 +
test/pending/run/reify_newimpl_09c.scala | 20 +
test/pending/run/reify_newimpl_10.scala | 14 +
test/pending/run/reify_newimpl_16.scala | 17 +
test/pending/run/reify_newimpl_17.scala | 20 +
test/pending/run/reify_newimpl_28.scala | 17 +
test/pending/run/reify_newimpl_32.scala | 17 +
test/pending/run/reify_newimpl_34.scala | 18 +
test/pending/run/reify_newimpl_46.scala | 15 +
test/pending/run/reify_newimpl_53.scala | 18 +
test/pending/run/reify_simpleinterpreter.check | 2 +
test/pending/run/reify_simpleinterpreter.scala | 75 +
.../run/structural-types-vs-anon-classes.scala | 17 +
test/pending/run/subarray.check | 2 -
test/pending/run/t0446.check | 2 -
test/pending/run/{bug2318.check => t2318.check} | 0
test/pending/run/t2318.scala | 38 +
test/pending/run/{bug2364.check => t2364.check} | 0
test/pending/run/{bug2364.scala => t2364.scala} | 0
test/pending/run/t2897.scala | 22 +
test/pending/run/t3609.scala | 0
test/pending/run/t3669.scala | 22 +
test/pending/run/t3832.scala | 7 +
test/{files => pending}/run/t3897.check | 0
test/{files => pending}/run/t3897/J_2.java | 0
test/{files => pending}/run/t3897/a_1.scala | 0
test/{files => pending}/run/t3897/a_2.scala | 0
test/pending/run/t3899.check | 4 +
test/pending/run/t3899/Base_1.java | 5 +
test/pending/run/t3899/Derived_2.scala | 30 +
test/pending/run/t4098.scala | 9 +
test/pending/run/t4283/IllegalAccess.scala | 17 -
test/pending/run/{bug4291.check => t4291.check} | 0
test/pending/run/{bug4291.scala => t4291.scala} | 0
test/pending/run/t4460.scala | 12 +
test/pending/run/t4511.scala | 10 +
test/pending/run/t4511b.scala | 25 +
test/pending/run/t4574.scala | 13 +
test/pending/run/t4713/JavaAnnots.java | 14 +
test/pending/run/t4713/Problem.scala | 5 +
test/pending/run/t4728.check | 2 +
test/pending/run/t4728.scala | 11 +
test/pending/run/t4971.scala | 16 +
test/pending/run/t4996.scala | 15 +
test/pending/run/t5258b.check | 1 +
test/pending/run/t5258b.scala | 9 +
test/pending/run/t5258c.check | 1 +
test/pending/run/t5258c.scala | 9 +
test/pending/run/t5284.scala | 14 +
test/pending/run/t5293-map.scala | 88 +
test/pending/run/t5293.scala | 83 +
test/pending/run/t5334_1.scala | 9 +
test/pending/run/t5334_2.scala | 9 +
.../jvm/bug680.check => pending/run/t5418.check} | 0
test/pending/run/t5418.scala | 8 +
test/pending/run/t5427a.check | 1 +
test/pending/run/t5427a.scala | 10 +
test/pending/run/t5427b.check | 1 +
test/pending/run/t5427b.scala | 11 +
test/pending/run/t5427c.check | 1 +
test/pending/run/t5427c.scala | 13 +
test/pending/run/t5427d.check | 1 +
test/pending/run/t5427d.scala | 11 +
test/pending/run/t5610a.check | 1 +
test/pending/run/t5610a.scala | 19 +
test/pending/run/t5610b.check | 1 +
test/pending/run/t5610b.scala | 21 +
test/pending/run/t5692.flags | 1 +
test/pending/run/t5692/Impls_Macros_1.scala | 9 +
test/pending/run/t5692/Test_2.scala | 4 +
test/pending/run/t5698/client.scala | 9 +
test/pending/run/t5698/server.scala | 22 +
test/pending/run/t5698/testmsg.scala | 5 +
test/pending/run/t5722.scala | 6 +
test/pending/run/t5726a.scala | 17 +
test/pending/run/t5726b.scala | 16 +
test/pending/run/t5882.scala | 14 +
test/pending/run/t5943b1.scala | 10 +
test/pending/run/t5943b2.scala | 10 +
test/pending/run/t6387.check | 1 +
test/pending/run/t6387.scala | 16 +
test/pending/run/t6408.scala | 11 +
test/pending/run/t6591_4.check | 1 +
test/pending/run/t6591_4.scala | 17 +
.../run/virtpatmat_anonfun_underscore.check} | 0
.../run/virtpatmat_anonfun_underscore.flags | 1 +
.../run/virtpatmat_anonfun_underscore.scala | 4 +
test/pending/script/dashi.check | 1 +
test/pending/script/dashi.flags | 1 +
test/pending/script/dashi/a.scala | 2 +
.../script/{bug2365.javaopts => t2365.javaopts} | 0
test/pending/script/{bug2365.sh => t2365.sh} | 0
test/pending/script/{bug2365 => t2365}/Test.scala | 0
.../{bug2365/bug2365.scala => t2365/runner.scala} | 0
test/pending/shootout/meteor.scala | 67 +-
test/pending/specialized/SI-5005.check | 33 +
test/pending/specialized/SI-5005.scala | 36 +
test/review | 0
test/scaladoc/resources/SI-3314-diagrams.scala | 78 +
test/scaladoc/resources/SI-3314.scala | 85 +
test/scaladoc/resources/SI-4360.scala | 42 +
test/scaladoc/resources/SI-5558.scala | 6 +
test/scaladoc/resources/SI-5784.scala | 28 +
test/scaladoc/resources/SI-6509.scala | 24 +
test/scaladoc/resources/SI-6511.scala | 24 +
test/scaladoc/resources/SI_4676.scala | 4 -
test/scaladoc/resources/SI_4898.scala | 9 +
test/scaladoc/resources/SI_5054_q1.scala | 9 +
test/scaladoc/resources/SI_5054_q2.scala | 9 +
test/scaladoc/resources/SI_5054_q3.scala | 9 +
test/scaladoc/resources/SI_5054_q4.scala | 9 +
test/scaladoc/resources/SI_5054_q5.scala | 9 +
test/scaladoc/resources/SI_5054_q6.scala | 9 +
test/scaladoc/resources/SI_5054_q7.scala | 22 +
test/scaladoc/resources/SI_5287.scala | 17 +
test/scaladoc/resources/Trac3484.scala | 27 -
test/scaladoc/resources/basic.scala | 27 +
test/scaladoc/resources/code-indent.scala | 37 +
test/scaladoc/resources/doc-root/Any.scala | 114 +
test/scaladoc/resources/doc-root/AnyRef.scala | 132 +
test/scaladoc/resources/doc-root/Nothing.scala | 23 +
test/scaladoc/resources/doc-root/Null.scala | 17 +
.../resources/explicit-inheritance-override.scala | 48 +
.../resources/explicit-inheritance-usecase.scala | 47 +
.../resources/implicit-inheritance-override.scala | 41 +
.../resources/implicit-inheritance-usecase.scala | 57 +
.../resources/implicits-ambiguating-res.scala | 72 +
test/scaladoc/resources/implicits-base-res.scala | 152 +
.../resources/implicits-chaining-res.scala | 50 +
.../resources/implicits-elimination-res.scala | 14 +
.../implicits-known-type-classes-res.scala | 39 +
test/scaladoc/resources/implicits-scopes-res.scala | 52 +
.../resources/implicits-shadowing-res.scala | 64 +
.../resources/inheritdoc-corner-cases.scala | 78 +
test/scaladoc/resources/links.scala | 74 +
test/scaladoc/resources/package-object-res.scala | 14 +
test/scaladoc/run/SI-191-deprecated.check | 1 +
test/scaladoc/run/SI-191-deprecated.scala | 72 +
test/scaladoc/run/SI-191.check | 1 +
test/scaladoc/run/SI-191.scala | 77 +
test/scaladoc/run/SI-3314-diagrams.check | 1 +
test/scaladoc/run/SI-3314-diagrams.scala | 28 +
test/scaladoc/run/SI-3314.check | 1 +
test/scaladoc/run/SI-3314.scala | 92 +
test/scaladoc/run/SI-3448.check | 1 +
test/scaladoc/run/SI-3448.scala | 38 +
test/scaladoc/run/SI-3484.check | 1 +
test/scaladoc/run/SI-3484.scala | 52 +
test/scaladoc/run/SI-4324.check | 1 +
test/scaladoc/run/SI-4324.scala | 24 +
test/scaladoc/run/SI-4360.check | 1 +
test/scaladoc/run/SI-4360.scala | 48 +
test/scaladoc/run/SI-4676.check | 1 +
test/scaladoc/run/SI-4676.scala | 26 +
test/scaladoc/run/SI-4887.check | 1 +
test/scaladoc/run/SI-4887.scala | 46 +
test/scaladoc/run/SI-5235.check | 4 +
test/scaladoc/run/SI-5235.scala | 88 +
test/scaladoc/run/SI-5373.check | 1 +
test/scaladoc/run/SI-5373.scala | 34 +
test/scaladoc/run/SI-5533.check | 1 +
test/scaladoc/run/SI-5533.scala | 39 +
test/scaladoc/run/SI-5780.check | 1 +
test/scaladoc/run/SI-5780.scala | 25 +
test/scaladoc/run/SI-5784.check | 1 +
test/scaladoc/run/SI-5784.scala | 44 +
test/scaladoc/run/SI-5933.check | 1 +
test/scaladoc/run/SI-5933.scala | 43 +
test/scaladoc/run/SI-5965.check | 1 +
test/scaladoc/run/SI-5965.scala | 24 +
test/scaladoc/run/SI-6017.check | 1 +
test/scaladoc/run/SI-6017.scala | 28 +
test/scaladoc/run/SI-6140.check | 1 +
test/scaladoc/run/SI-6140.scala | 18 +
test/scaladoc/run/SI-6509.check | 1 +
test/scaladoc/run/SI-6509.scala | 30 +
test/scaladoc/run/SI-6511.check | 1 +
test/scaladoc/run/SI-6511.scala | 22 +
test/scaladoc/run/SI-6580.check | 11 +
test/scaladoc/run/SI-6580.scala | 32 +
test/scaladoc/run/SI-6715.check | 1 +
test/scaladoc/run/SI-6715.scala | 15 +
test/scaladoc/run/SI-6812.check | 1 +
test/scaladoc/run/SI-6812.scala | 24 +
test/scaladoc/run/SI-7367.check | 1 +
test/scaladoc/run/SI-7367.scala | 25 +
test/scaladoc/run/diagrams-base.check | 1 +
test/scaladoc/run/diagrams-base.scala | 73 +
test/scaladoc/run/diagrams-determinism.check | 1 +
test/scaladoc/run/diagrams-determinism.scala | 67 +
test/scaladoc/run/diagrams-filtering.check | 1 +
test/scaladoc/run/diagrams-filtering.scala | 93 +
test/scaladoc/run/diagrams-inherited-nodes.check | 1 +
test/scaladoc/run/diagrams-inherited-nodes.scala | 69 +
test/scaladoc/run/groups.check | 1 +
test/scaladoc/run/groups.scala | 127 +
test/scaladoc/run/implicits-ambiguating.check | 1 +
test/scaladoc/run/implicits-ambiguating.scala | 114 +
test/scaladoc/run/implicits-base.check | 1 +
test/scaladoc/run/implicits-base.scala | 209 +
test/scaladoc/run/implicits-chaining.check | 1 +
test/scaladoc/run/implicits-chaining.scala | 65 +
.../run/implicits-known-type-classes.check | 1 +
.../run/implicits-known-type-classes.scala | 33 +
test/scaladoc/run/implicits-scopes.check | 1 +
test/scaladoc/run/implicits-scopes.scala | 79 +
test/scaladoc/run/implicits-shadowing.check | 1 +
test/scaladoc/run/implicits-shadowing.scala | 59 +
test/scaladoc/run/implicits-var-exp.check | 1 +
test/scaladoc/run/implicits-var-exp.scala | 56 +
test/scaladoc/run/links.check | 1 +
test/scaladoc/run/links.scala | 32 +
test/scaladoc/run/package-object.check | 4 +
test/scaladoc/run/package-object.scala | 17 +
test/scaladoc/run/t4922.check | 1 +
test/scaladoc/run/t4922.scala | 32 +
test/scaladoc/run/t7767.check | 1 +
test/scaladoc/run/t7767.scala | 18 +
test/scaladoc/run/usecase-var-expansion.check | 4 +
test/scaladoc/run/usecase-var-expansion.scala | 26 +
test/scaladoc/scala/IndexScriptTest.scala | 52 -
test/scaladoc/scala/IndexTest.scala | 82 -
test/scaladoc/scala/html/HtmlFactoryTest.scala | 375 -
test/scaladoc/scala/model/CommentFactoryTest.scala | 155 -
test/scaladoc/scalacheck/CommentFactoryTest.scala | 169 +
test/scaladoc/scalacheck/HtmlFactoryTest.flags | 1 +
test/scaladoc/scalacheck/HtmlFactoryTest.scala | 697 +
test/scaladoc/scalacheck/IndexScriptTest.scala | 52 +
test/scaladoc/scalacheck/IndexTest.scala | 82 +
test/script-tests/README | 8 +
.../jar-manifest/resources/MANIFEST.MF | 3 +
test/script-tests/jar-manifest/run-test | 41 +
test/script-tests/jar-manifest/run-test.check | 30 +
test/script-tests/jar-manifest/src/jar-test.scala | 34 +
test/support/annotations/mkAnnotationsJar.sh | 0
tools/abspath | 0
tools/binary-repo-lib.sh | 89 +-
tools/buildcp | 11 +
tools/class-dump | 6 +
tools/cleanup-commit | 130 +
tools/codegen | 2 +-
tools/codegen-anyvals | 0
tools/cpof | 28 +-
tools/deploy-local-maven-snapshot | 0
tools/diffPickled | 0
tools/epfl-build | 0
tools/epfl-build-2.x.x | 35 -
tools/epfl-publish | 2 +-
tools/get-scala-commit-date | 0
tools/get-scala-commit-date.bat | 29 +-
tools/get-scala-commit-sha | 0
tools/get-scala-commit-sha.bat | 26 +-
tools/jar-dump | 4 +
tools/locker_scala | 8 +-
tools/locker_scalac | 8 +-
tools/lockercp | 8 +-
tools/make-release-notes | 0
tools/new-starr | 6 +
tools/packcp | 4 +-
tools/pathResolver | 0
tools/profile_scala | 17 -
tools/profile_scalac | 25 -
tools/push.jar | Bin 9096808 -> 0 bytes
tools/push.jar.desired.sha1 | 2 +-
tools/quick_scala | 6 +
tools/quick_scalac | 6 +
tools/quickcp | 8 +-
tools/remotetest | 0
tools/rm-orphan-checkfiles | 18 +
tools/scaladoc-compare | 50 +
tools/scalawhich | 4 -
tools/scmp | 4 -
tools/showPickled | 0
tools/stability-test.sh | 29 +
tools/starr_scala | 6 +
tools/starr_scalac | 6 +
tools/starrcp | 4 +-
tools/strapcp | 12 +-
tools/test-renamer | 82 +
tools/tokens | 4 -
tools/truncate | 7 -
tools/updatescalacheck | 0
tools/verify-jar-cache | 33 +
7932 files changed, 288586 insertions(+), 105602 deletions(-)
diff --git a/.gitattributes b/.gitattributes
new file mode 100644
index 0000000..958b0b9
--- /dev/null
+++ b/.gitattributes
@@ -0,0 +1,26 @@
+# These files are text and should be normalized (convert crlf => lf)
+*.c text
+*.check text
+*.css text
+*.html text
+*.java text
+*.js text
+*.sbt text
+*.scala text
+*.sh text
+*.txt text
+*.xml text
+
+# Windows-specific files get windows endings
+*.bat eol=crlf
+*.cmd eol=crlf
+*-windows.tmpl eol=crlf
+
+# Some binary file types for completeness
+# (binary is a macro for -text -diff)
+*.dll binary
+*.gif binary
+*.jpg binary
+*.png binary
+*.class -text diff=class
+*.jar -text diff=jar
diff --git a/.gitignore b/.gitignore
new file mode 100644
index 0000000..4329fce
--- /dev/null
+++ b/.gitignore
@@ -0,0 +1,49 @@
+#
+# Are you tempted to edit this file?
+#
+# First consider if the changes make sense for all,
+# or if they are specific to your workflow/system.
+# If it is the latter, you can augment this list with
+# entries in .git/info/excludes
+#
+# see also test/files/.gitignore
+#
+
+#
+# JARs aren't checked in, they are fetched by Ant / pull_binary_libs.sh
+#
+# We could be more concise with /lib/**/*.jar but that assumes
+# a late-model git.
+#
+/lib/ant/*.jar
+/lib/*.jar
+/test/files/codelib/*.jar
+/test/files/lib/*.jar
+/test/files/speclib/instrumented.jar
+/tools/*.jar
+
+# Developer specific Ant properties
+/build.properties
+/buildcharacter.properties
+
+# target directories for ant build
+/build/
+/dists/
+
+# other
+/out/
+/bin/
+/sandbox/
+
+# eclipse, intellij
+/.classpath
+/.project
+/src/intellij/*.iml
+/src/intellij/*.ipr
+/src/intellij/*.iws
+/.cache
+/.idea
+/.settings
+
+# Standard symbolic link to build/quick/bin
+/qbin
diff --git a/.mailmap b/.mailmap
new file mode 100644
index 0000000..49d5dc6
--- /dev/null
+++ b/.mailmap
@@ -0,0 +1,25 @@
+Aleksandar Prokopec <aleksandar at aleksandar-Latitude-E6500.(none)>
+Aleksandar Prokopec <aleksandar at htpc.(none)>
+Aleksandar Prokopec <aleksandar at htpc-axel22.(none)>
+Aleksandar Prokopec <aleksandar at lampmac14.epfl.ch>
+Aleksandar Prokopec <aleksandar.prokopec at epfl.ch>
+Antonio Cunei <antonio.cunei at typesafe.com>
+Caoyuan Deng <dcaoyuan at epfl.ch>
+Chris Hodapp <clhodapp1 at gmail.com>
+Chris James <chrisJames at epfl.ch>
+Christopher Vogt <vogt at epfl.ch>
+Damien Obristi <damien.obrist at gmail.com>
+Daniel C. Sobral <dcs at dcs-132-CK-NF79.(none)>
+Ilya Sergei <ilyas at epfl.ch>
+Ingo Maier <ingoem at gmail.com>
+Kenji Yoshida <6b656e6a69 at gmail.com>
+Luc Bourlier <skyluc at epfl.ch>
+Martin Odersky <odersky at gamil.com>
+Nada Amin <amin at epfl.ch>
+Nada Amin <nada.amin at epfl.ch>
+Natallie Baikevich <lu-a-jalla at ya.ru>
+Pavel Pavlov <pavel.e.pavlov at gmail.com>
+Philipp Haller <philipp.haller at typesafe.com>
+Roland Kuhn <rk at rkuhn.info>
+Rüdiger Klaehn <rklaehn at gmail.com>
+Stéphane Micheloud <michelou at epfl.ch>
diff --git a/.project b/.project
deleted file mode 100644
index b1f7386..0000000
--- a/.project
+++ /dev/null
@@ -1,29 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<projectDescription>
- <name>scala</name>
- <comment></comment>
- <projects>
- </projects>
- <buildSpec>
- <buildCommand>
- <name>org.eclipse.pde.ManifestBuilder</name>
- <arguments>
- </arguments>
- </buildCommand>
- <buildCommand>
- <name>org.eclipse.pde.SchemaBuilder</name>
- <arguments>
- </arguments>
- </buildCommand>
- <buildCommand>
- <name>org.scala-ide.sdt.core.scalabuilder</name>
- <arguments>
- </arguments>
- </buildCommand>
- </buildSpec>
- <natures>
- <nature>org.scala-ide.sdt.core.scalanature</nature>
- <nature>org.eclipse.pde.PluginNature</nature>
- <nature>org.eclipse.jdt.core.javanature</nature>
- </natures>
-</projectDescription>
diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md
new file mode 100644
index 0000000..2451a52
--- /dev/null
+++ b/CONTRIBUTING.md
@@ -0,0 +1,66 @@
+# Scala Project & Developer Guidelines
+
+These guidelines are meant to be a living document that should be changed and adapted as needed. We encourage changes that make it easier to achieve our goals in an efficient way.
+
+## General Workflow
+
+This is the process for committing code to the Scala project. There are of course exceptions to these rules, for example minor changes to comments and documentation, fixing a broken build etc.
+
+1. Make sure you have signed the [Scala CLA](http://typesafe.com/contribute/cla/scala), if not, sign it.
+2. Before starting to work on a feature or a fix, it's good practice to ensure that:
+ 1. There is a ticket for your work in the project's issue tracker. If not, create it first (perhaps given a thumbs up from the scala-internals mailing list first).
+ 2. The ticket has been discussed and prioritized by the team.
+3. You should always perform your work in its own Git branch. The branch should be given a descriptive name that explains its intent. Some teams also like adding the ticket number and/or the [GitHub](http://github.com) user ID to the branch name, these details is up to each of the individual teams. (See below for more details on branch naming.)
+4. When the feature or fix is completed you should open a [Pull Request](https://help.github.com/articles/using-pull-requests) on GitHub.
+5. The Pull Request should be reviewed by other maintainers (as many as feasible/practical). Note that a reviewer can also be an outside contributor-- members of Typesafe and independent contributors are encouraged to participate in the review process. It is not a closed process. Please try to avoid conflict of interest -- the spirit of the review process is to evenly distribute the understanding of our code base across its maintainers as well as to load balance quality assurance. Assign [...]
+6. After the review, you should resolve issues brought up by the reviewers as needed (pushing a new commit to address reviewers' comments), iterating until the reviewers give their thumbs up, the "LGTM" (acronym for "Looks Good To Me").
+7. Once the code has passed review the Pull Request can be merged into the distribution.
+
+## Pull Request Requirements
+
+First, please have a look at and follow the [Pull Request Policy](https://github.com/scala/scala/wiki/Pull-Request-Policy) for guidelines on submitting a pull request to the Scala project.
+
+In order for a Pull Request to be considered, it has to meet these requirements:
+
+1. Live up to the current code standard:
+ - Not violate [DRY](http://programmer.97things.oreilly.com/wiki/index.php/Don%27t_Repeat_Yourself).
+ - [Boy Scout Rule](http://programmer.97things.oreilly.com/wiki/index.php/The_Boy_Scout_Rule) should be applied.
+2. Tests are of paramount importance.
+3. The code must be well documented in the project's standard documentation format (see the ‘Documentation’ section below).
+
+If *all* of these requirements are not met then the code should **not** be merged into the distribution, and need not even be reviewed.
+
+## Documentation
+
+All contributed code should come accompanied with documentation. Pull requests containing undocumented code will not be accepted. Both user-facing Scaladoc comments, as well as committer-facing internal documentation (i.e. important design decisions that other maintainers should know about should be placed inline with line comments `//`) should be accompanying all contributed code where possible.
+
+
+## Work In Progress
+
+It is ok to work on a public feature branch in the GitHub repository. Something that can sometimes be useful for early feedback etc. If so, then it is preferable to name the branch accordingly. This can be done by either prefixing the name with ``wip-`` as in ‘Work In Progress’, or use hierarchical names like ``wip/..``, ``feature/..`` or ``topic/..``. Either way is fine as long as it is clear that it is work in progress and not ready for merge. This work can temporarily have a lower sta [...]
+
+Also, to facilitate both well-formed commits and working together, the ``wip`` and ``feature``/``topic`` identifiers also have special meaning. Any branch labelled with ``wip`` is considered “git-unstable” and may be rebased and have its history rewritten. Any branch with ``feature``/``topic`` in the name is considered “stable” enough for others to depend on when a group is working on a feature.
+
+## Creating Commits And Writing Commit Messages
+
+Follow these guidelines when creating public commits and writing commit messages.
+
+1. If your work spans multiple local commits (for example; if you do safe point commits while working in a feature branch or work in a branch for long time doing merges/rebases etc.) then please do not commit it all but rewrite the history by squashing the commits into one large commit which is accompanied by a detailed commit message for (as discussed in the following sections). For more info, see the article: [Git Workflow](http://sandofsky.com/blog/git-workflow.html). Additionally, ev [...]
+2. The first line should be a descriptive sentence about what the commit is doing. It should be possible to fully understand what the commit does by just reading this single line. It is **not ok** to only list the ticket number, type "minor fix" or similar. If the commit has a corresponding ticket, include a reference to the ticket number, prefixed with "SI-", at the beginning of the first line followed by the title of the ticket, assuming that it aptly and concisely summarizes the commi [...]
+3. Following the single line description (ideally no more than 70 characters long) should be a blank line followed by an enumerated list with the details of the commit.
+4. Add keywords for your commit (depending on the degree of automation we reach, the list may change over time):
+ * ``Review by @githubuser`` - will notify the reviewer via GitHub. Everyone is encouraged to give feedback, however. (Remember that @-mentions will result in notifications also when pushing to a WIP branch, so please only include this in your commit message when you're ready for your pull request to be reviewed. Alternatively, you may request a review in the pull request's description.)
+ * ``Fix/Fixing/Fixes/Close/Closing/Refs #ticket`` - if you want to mark the ticket as fixed in the issue tracker (Assembla understands this).
+ * ``backport to _branch name_`` - if the fix needs to be cherry-picked to another branch (like 2.9.x, 2.10.x, etc)
+
+Example:
+
+ SI-4032 Implicit conversion visibility affected by presence of "this"
+
+ - Details 1
+ - Details 2
+ - Details 3
+
+## The Scala Improvement Process
+A new language feature requires a SIP (Scala Improvement Process) proposal. Note that significant additions to the standard library are also considered candidates for a SIP proposal.
+For more details on submitting SIPs, see (how to submit a SIP)[http://docs.scala-lang.org/sips/sip-submission.html].
diff --git a/META-INF/MANIFEST.MF b/META-INF/MANIFEST.MF
index 83c20b5..4d5573d 100644
--- a/META-INF/MANIFEST.MF
+++ b/META-INF/MANIFEST.MF
@@ -2,7 +2,7 @@ Manifest-Version: 1.0
Bundle-ManifestVersion: 2
Bundle-Name: Scala Distribution
Bundle-SymbolicName: org.scala-ide.scala.compiler;singleton:=true
-Bundle-Version: 2.9.2.alpha
+Bundle-Version: 2.10.0.alpha
Eclipse-LazyStart: true
Bundle-ClassPath:
.,
@@ -40,7 +40,13 @@ Export-Package:
scala.tools.nsc.util,
scala.tools.util,
scala.reflect.internal,
+ scala.reflect.internal.pickling,
scala.reflect.internal.settings,
+ scala.reflect.internal.util,
+ scala.reflect.macros,
+ scala.reflect.runtime,
+ scala.reflect.internal.transform,
+ scala.reflect.api,
ch.epfl.lamp.compiler.msil,
ch.epfl.lamp.compiler.msil.emit,
ch.epfl.lamp.compiler.msil.util,
diff --git a/README b/README
deleted file mode 100644
index 69b0c75..0000000
--- a/README
+++ /dev/null
@@ -1,202 +0,0 @@
-================================================================================
- THE SCALA REPOSITORY
- Structure and build system
-================================================================================
-
-This document describes the Scala core (core library and compiler) repository
-and how to build it. For information about Scala as a language, you can visit
-the web site http://www.scala-lang.org/
-
-Part I. The repository layout
---------------------------------------------------------------------------------
-
-Follows the file layout of the Scala repository. Files marked with a † are not
-part of the Subversion repository but are either automatically generated by the
-build script or user-created if needed. This is not a complete listing.
-
-scala/
- bin/ Developer utilities.
- build/ † Temporary staging area for build products.
- build.excludes † An optional build configuration file.
- build.number The version number of the current distribution.
- build.properties † An optional build configuration file.
- build.xml The main Ant build script.
- dist/ † The destination folder of Scala distributions.
- docs/ Documentation of Scala. More in its own module.
- development/ Developer documentation.
- examples/ Scala source code examples.
- lib/ Pre-compiled libraries for the build.
- fjbg.jar The Java byte-code generation library.
- scala-compiler.jar The stable reference version (aka 'starr')
- of the Scala compiler
- scala-library.jar The stable reference version (aka 'starr')
- of the Scala library.
- scala-library-src.jar A snapshot of the source code which was used
- to build starr.
- ant/ Support libraries for the build tool.
- ant-contrib.jar Provides additional features for Ant
- vizant.jar Provides DOT graph generation for Ant
- project/ All that is needed to use SBT for building
- boot/ † SBT storage (for download,...)
- build/ The class files that defines the build project
- build.properties Various SBT project variables
- README The file you are currently reading.
- sandbox/ † A folder to test code etc.
- src/ All the source files of Scala.
- actors/ The sources of the Actor library.
- compiler/ The sources of the Scala compiler.
- library/ The sources of the core Scala library.
- swing/ The sources of the Swing library.
- target/ † Temporary staging area for build products from the SBT script.
- test/ The Scala test suite.
-
-Part II. Building Scala with SABBUS
---------------------------------------------------------------------------------
-
-SABBUS is the name of the Ant build script used to compile Scala. It is mostly
-automated and takes care of managing the dependencies.
-
-LAYERS:
-
-In order to guarantee the bootstrapping of the Scala compiler, SABBUS builds
-Scala in layers. Each layer is a complete compiled Scala compiler and library.
-A superior layer is always compiled by the layer just below it. Here is a short
-description of the four layers that SABBUS uses, from bottom to top:
-
-'starr': the stable reference Scala release which is shared by all the
-developers. It is found in the repository as 'lib/scala-compiler.jar' and
-'lib/scala-library.jar'. Any committable source code must be compiled directly
-by starr to guarantee the bootstrapping of the compiler.
-
-'locker': the local reference which is compiled by starr and is the work
-compiler in a typical development cycle. When it has been built once, it is
-“frozen” in this state. Updating it to fit the current source code must be
-explicitly required (see below).
-
-'quick': the layer which is incrementally built when testing changes in the
-compiler or library. This is considered an actual new version when locker is
-up-to-date in relation to the source code.
-
-'strap': a test layer used to check stability of the build.
-
-DEPENDANT CHANGES:
-
-SABBUS compiles, for each layer, the Scala library first and the compiler next.
-That means that any changes in the library can immediately be used in the
-compiler without an intermediate build. On the other hand, if building the
-library requires changes in the compiler, a new locker must be built if
-bootstrapping is still possible, or a new starr if it is not.
-
-REQUIREMENTS FOR SABBUS:
-
-The Scala build system is based on Apache Ant. Most required pre-compiled
-libraries are part of the repository (in 'lib/'). The following however is
-assumed to be installed on the build machine:
- - A Java runtime environment (JRE) or SDK 1.6 or above.
- - Apache Ant version 1.7.0 or above.
-
-
-Part III. Using SBT as an alternative
---------------------------------------------------------------------------------
-It is now possible to use SBT as an alternative to build Scala. This is still in beta stage.
-More informations and usage instruction at http://lampsvn.epfl.ch/trac/scala/wiki/SBT
-
-Part IV. Common use-cases
---------------------------------------------------------------------------------
-
-'ant -p'
- Prints out information about the commonly used ant targets. The interested
- developer can find the rest in the XML files.
-
-'ant' or 'ant build'
- A quick compilation (to quick) of your changes using the locker compiler.
- - This will rebuild all quick if locker changed.
- - This will also rebuild locker if starr changed.
-
-'ln -s build/quick/bin qbin' (once)
-'ant && qbin/scalac -d sandbox sandbox/test.scala && qbin/scala -cp sandbox Test'
- Incrementally builds quick, and then uses it to compile and run the file
- 'sandbox/test.scala'. This is a typical debug cycle.
-
-'ant replacelocker'
- "unfreezes" locker by updating it to match the current source code.
- - This will delete quick so as not to mix classes compiled with different
- versions of locker.
-
-'ant test'
- Tests that your code is working and fit to be committed.
- - Runs the test suite and bootstrapping test on quick.
- - You can run the suite only (skipping strap) with 'ant test.suite'.
-
-'ant docs'
- Generates the HTML documentation for the library from the sources using the
- scaladoc tool in quick. Note: on most machines this requires more heap than
- is allocate by default. You can adjust the parameters with ANT_OPTS.
- Example command line:
- ANT_OPTS="-Xms512M -Xmx2048M -Xss1M -XX:MaxPermSize=128M" ant docs
-
-'ant dist'
- Builds a distribution.
- - Rebuilds locker from scratch (to make sure it bootstraps).
- - Builds everything twice more and compares bit-to-bit the two builds (to
- make sure it is stable).
- - Runs the test suite (and refuses to build a distribution if it fails).
- - Creates a local distribution in 'dists/latest'.
-
-'ant clean'
- Removes all temporary build files (locker is preserved).
-
-'ant locker.clean'
- Removes all build files.
-
-'ant all.clean'
- Removes all build files (including locker) and all distributions.
-
-Many of these targets offer a variant which runs with -optimise enabled.
-Optimized targets include build-opt, test-opt, dist-opt, fastdist-opt,
-replacestarr-opt, replacelocker-opt, and distpack-opt.
-
-Part V. Contributing to Scala
---------------------------------------------------------------------------------
-
-If you wish to contribute, you can find all of the necessary information on
-the official Scala website: www.scala-lang.org.
-
-Specifically, you can subscribe to the Scala mailing lists, read all of the
-available documentation, and browse the live SVN repository. You can contact
-the Scala team by sending us a message on one of the mailing lists, or by using
-the available contact form.
-
-In detail:
-
-- Scala website (links to everything else):
- http://www.scala-lang.org
-
-- Scala documentation:
- http://www.scala-lang.org/node/197
-
-- Scala mailing lists:
- http://www.scala-lang.org/node/199
-
-- Scala Trac bug and issue tracker:
- https://lampsvn.epfl.ch/trac/scala
-
-- Scala live SVN source tree:
- http://www.scala-lang.org/node/213
-
-- Building Scala from the source code:
- http://www.scala-lang.org/node/217
-
-- Contact form:
- http://www.scala-lang.org/node/188
-
-
-If you are interested in contributing code, we ask you to complete and submit
-to us the Scala Contributor License Agreement, which allows us to ensure that
-all code submitted to the project is unencumbered by copyrights or patents.
-The form is available at:
-http://www.scala-lang.org/sites/default/files/contributor_agreement.pdf
-
-Thank you!
-The Scala Team
-
diff --git a/README.rst b/README.rst
new file mode 100644
index 0000000..4ed283d
--- /dev/null
+++ b/README.rst
@@ -0,0 +1,207 @@
+################################################################################
+ THE SCALA REPOSITORY
+################################################################################
+
+This document describes the Scala core (core library and compiler) repository
+and how to build it. For information about Scala as a language, you can visit
+the web site http://www.scala-lang.org/
+
+Part I. The repository layout
+--------------------------------------------------------------------------------
+
+Follows the file layout of the Scala repository. Files marked with a † are not
+part of the repository but are either automatically generated by the
+build script or user-created if needed. This is not a complete listing. ::
+ scala/
+ +--build/ Build products output directory for ant.
+ +--build.xml The main Ant build script.
+ +--dist/ The destination folder for Scala distributions.
+ +--docs/ Documentation and sample code.
+ +--lib/ Pre-compiled libraries for the build.
+ | +--fjbg.jar The Java byte-code generation library.
+ | +--scala-compiler.jar The stable reference ('starr') compiler jar
+ | +--scala-library.jar The stable reference ('starr') library jar
+ | +--scala-library-src.jar A snapshot of the source used to build starr.
+ | ---ant/ Support libraries for ant.
+ +--pull-binary-libs.sh Pulls binary artifacts from remote repository.
+ +--push-binary-libs.sh Pushes new binary artifacts and creates sha.
+ +--README.rst The file you are currently reading.
+ +--src/ All the source files of Scala.
+ | +--actors/ The sources of the Actor library.
+ | +--compiler/ The sources of the Scala compiler.
+ | +--library/ The sources of the core Scala library.
+ | ---swing/ The sources of the Swing library.
+ +--target/ † Build products output directory for sbt.
+ +--test/ The Scala test suite.
+ ---tools/ Developer utilities.
+
+
+
+Part II. Building Scala with SABBUS
+--------------------------------------------------------------------------------
+
+SABBUS is the name of the Ant build script used to compile Scala. It is mostly
+automated and takes care of managing the dependencies.
+
+^^^^^^^^^^^^^^^^^^^^^^^^
+ LAYERS:
+^^^^^^^^^^^^^^^^^^^^^^^^
+In order to guarantee the bootstrapping of the Scala compiler, SABBUS builds
+Scala in layers. Each layer is a complete compiled Scala compiler and library.
+A superior layer is always compiled by the layer just below it. Here is a short
+description of the four layers that SABBUS uses, from bottom to top:
+
+- ``starr``: the stable reference Scala release which is shared by all the
+ developers. It is found in the repository as 'lib/scala-compiler.jar' and
+ 'lib/scala-library.jar'. Any committable source code must be compiled directly
+ by starr to guarantee the bootstrapping of the compiler.
+
+- ``locker``: the local reference which is compiled by starr and is the work
+ compiler in a typical development cycle. When it has been built once, it is
+ “frozen” in this state. Updating it to fit the current source code must be
+ explicitly requested (see below).
+
+- ``quick``: the layer which is incrementally built when testing changes in the
+ compiler or library. This is considered an actual new version when locker is
+ up-to-date in relation to the source code.
+
+- ``strap``: a test layer used to check stability of the build.
+
+^^^^^^^^^^^^^^^^^^^^^^^^
+ DEPENDANT CHANGES:
+^^^^^^^^^^^^^^^^^^^^^^^^
+SABBUS compiles, for each layer, the Scala library first and the compiler next.
+That means that any changes in the library can immediately be used in the
+compiler without an intermediate build. On the other hand, if building the
+library requires changes in the compiler, a new locker must be built if
+bootstrapping is still possible, or a new starr if it is not.
+
+
+^^^^^^^^^^^^^^^^^^^^^^^^
+REQUIREMENTS FOR SABBUS:
+^^^^^^^^^^^^^^^^^^^^^^^^
+The Scala build system is based on Apache Ant. Most required pre-compiled
+libraries are part of the repository (in 'lib/'). The following however is
+assumed to be installed on the build machine:
+
+- A Java runtime environment (JRE) or SDK 1.6 or above.
+- Apache Ant version 1.7.0 or above.
+- bash (via cygwin for windows)
+- curl
+
+
+Part III. Common use-cases
+--------------------------------------------------------------------------------
+- ``./pull-binary-libs.sh``
+
+ Downloads all binary artifacts associated with this commit. This requires
+ internet access to http://typesafe.artifactoryonline.com/typesafe.
+
+- ``ant -p``
+
+ Prints out information about the commonly used ant targets. The interested
+ developer can find the rest in the XML files.
+
+- ``ant`` or ``ant build``
+
+ A quick compilation (to quick) of your changes using the locker compiler.
+
+ - This will rebuild all quick if locker changed.
+ - This will also rebuild locker if starr changed.
+
+- ``ln -s build/quick/bin qbin`` (once):
+- ``ant && qbin/scalac -d sandbox sandbox/test.scala && qbin/scala -cp sandbox Test``
+
+ Incrementally builds quick, and then uses it to compile and run the file
+ ``sandbox/test.scala``. This is a typical debug cycle.
+
+- ``ant replacelocker``
+
+ "unfreezes" locker by updating it to match the current source code.
+
+ - This will delete quick so as not to mix classes compiled with different
+ versions of locker.
+
+- ``ant test``
+
+ Tests that your code is working and fit to be committed.
+
+ - Runs the test suite and bootstrapping test on quick.
+ - You can run the suite only (skipping strap) with 'ant test.suite'.
+
+- ``ant docs``
+ Generates the HTML documentation for the library from the sources using the
+ scaladoc tool in quick. Note: on most machines this requires more heap than
+ is allocate by default. You can adjust the parameters with ANT_OPTS.
+ Example command line::
+ ANT_OPTS="-Xms512M -Xmx2048M -Xss1M -XX:MaxPermSize=128M" ant docs
+
+- ``ant dist``
+
+ Builds a distribution.
+
+ - Rebuilds locker from scratch (to make sure it bootstraps).
+ - Builds everything twice more and compares bit-to-bit the two builds (to
+ make sure it is stable).
+ - Runs the test suite (and refuses to build a distribution if it fails).
+ - Creates a local distribution in 'dists/latest'.
+
+- ``ant clean``
+
+ Removes all temporary build files (locker is preserved).
+
+- ``ant locker.clean``
+
+ Removes all build files.
+
+- ``ant all.clean``
+
+ Removes all build files (including locker) and all distributions.
+
+Many of these targets offer a variant which runs with -optimise enabled.
+Optimized targets include build-opt, test-opt, dist-opt, fastdist-opt,
+replacestarr-opt, replacelocker-opt, and distpack-opt.
+
+Part IV. Contributing to Scala
+--------------------------------------------------------------------------------
+
+If you wish to contribute, you can find all of the necessary information on
+the official Scala website: www.scala-lang.org.
+
+Specifically, you can subscribe to the Scala mailing lists, read all of the
+available documentation, and browse the live github repository. You can contact
+the Scala team by sending us a message on one of the mailing lists, or by using
+the available contact form.
+
+In detail:
+
+- Scala website (links to everything else):
+ http://www.scala-lang.org
+
+- Scala documentation:
+ http://docs.scala-lang.org
+
+- Scala mailing lists:
+ http://www.scala-lang.org/node/199
+
+- Scala bug and issue tracker:
+ https://issues.scala-lang.org
+
+- Scala live git source tree:
+ http://github.com/scala/scala
+
+If you are interested in contributing code, we ask you to sign the
+[Scala Contributor License Agreement](http://typesafe.com/contribute/cla/scala),
+which allows us to ensure that all code submitted to the project is
+unencumbered by copyrights or patents.
+
+Before submitting a pull-request, please make sure you have followed the guidelines
+outlined in our `Pull Request Policy <https://github.com/scala/scala/wiki/Pull-Request-Policy>`_.
+
+------------------
+
+
+
+Thank you!
+
+The Scala Team
diff --git a/bincompat-backward.whitelist.conf b/bincompat-backward.whitelist.conf
new file mode 100644
index 0000000..2d3c203
--- /dev/null
+++ b/bincompat-backward.whitelist.conf
@@ -0,0 +1,105 @@
+filter {
+ packages = [
+ "scala.reflect.internal"
+ ]
+ problems=[
+ # Scala library
+ {
+ # can only be called from Stream::distinct, which cannot itself be inlined, so distinct is the only feasible call-site
+ matchName="scala.collection.immutable.Stream.scala$collection$immutable$Stream$$loop$4"
+ problemName=MissingMethodProblem
+ },
+ {
+ # can only be called from Stream::distinct, which cannot itself be inlined, so distinct is the only feasible call-site
+ matchName="scala.collection.immutable.Stream.scala$collection$immutable$Stream$$loop$5"
+ problemName=MissingMethodProblem
+ },
+ # {
+ # # private[scala]
+ # matchName="scala.collection.immutable.ListSerializeStart$"
+ # problemName=MissingClassProblem
+ # },
+ # {
+ # # private[scala]
+ # matchName="scala.collection.immutable.ListSerializeStart"
+ # problemName=MissingClassProblem
+ # },
+ {
+ # private nested class became private top-level class to fix SI-7018
+ matchName="scala.reflect.macros.Attachments$NonemptyAttachments"
+ problemName=MissingClassProblem
+ },
+
+ # scala.reflect.runtime
+ # {
+ # matchName="scala.reflect.runtime.JavaUniverse.createClassModule"
+ # problemName=MissingMethodProblem
+ # },
+ # {
+ # matchName="scala.reflect.runtime.JavaUniverse.initClassModule"
+ # problemName=MissingMethodProblem
+ # },
+ # {
+ # matchName="scala.reflect.runtime.SymbolLoaders.createClassModule"
+ # problemName=MissingMethodProblem
+ # },
+ # {
+ # matchName="scala.reflect.runtime.SymbolLoaders.initClassModule"
+ # problemName=MissingMethodProblem
+ # },
+ # {
+ # matchName="scala.reflect.runtime.SymbolLoaders.initClassAndModule"
+ # problemName=MissingMethodProblem
+ # },
+ # {
+ # matchName="scala.reflect.runtime.SymbolLoaders.initAndEnterClassAndModule"
+ # problemName=MissingMethodProblem
+ # },
+ # {
+ # matchName="scala.reflect.runtime.JavaMirrors#JavaMirror.scala$reflect$runtime$JavaMirrors$JavaMirror$$jclassAsScala"
+ # problemName=MissingMethodProblem
+ # },
+ # {
+ # matchName="scala.reflect.runtime.JavaMirrors#JavaMirror.scala$reflect$runtime$JavaMirrors$JavaMirror$$jclassAsScala"
+ # problemName=IncompatibleResultTypeProblem
+ # },
+
+ # scala.concurrent.forkjoin (SI-7442)
+ {
+ matchName="scala.concurrent.forkjoin.ForkJoinTask.internalGetCompleter"
+ problemName=MissingMethodProblem
+ },
+ {
+ matchName="scala.concurrent.forkjoin.ForkJoinPool.registerWorker"
+ problemName=IncompatibleMethTypeProblem
+ },
+ {
+ matchName="scala.concurrent.forkjoin.ForkJoinPool.nextWorkerName"
+ problemName=MissingMethodProblem
+ },
+ {
+ matchName="scala.concurrent.forkjoin.ForkJoinPool.signalWork"
+ problemName=MissingMethodProblem
+ },
+ {
+ matchName="scala.concurrent.forkjoin.ForkJoinPool.idlePerActive"
+ problemName=MissingMethodProblem
+ },
+ {
+ matchName="scala.concurrent.forkjoin.ForkJoinPool.tryCompensate"
+ problemName=MissingMethodProblem
+ },
+ {
+ matchName="scala.concurrent.forkjoin.ForkJoinPool.helpJoinOnce"
+ problemName=IncompatibleResultTypeProblem
+ },
+ {
+ matchName="scala.reflect.runtime.JavaUniverse.isInvalidClassName"
+ problemName=MissingMethodProblem
+ },
+ {
+ matchName="scala.reflect.runtime.SymbolLoaders.isInvalidClassName"
+ problemName=MissingMethodProblem
+ }
+ ]
+}
diff --git a/bincompat-forward.whitelist.conf b/bincompat-forward.whitelist.conf
new file mode 100644
index 0000000..087fa07
--- /dev/null
+++ b/bincompat-forward.whitelist.conf
@@ -0,0 +1,178 @@
+filter {
+ packages = [
+ "scala.reflect.internal"
+ ]
+ problems=[
+ # rework d526f8bd74 to duplicate tailImpl as a private method
+ # {
+ # matchName="scala.collection.mutable.MutableList.tailImpl"
+ # problemName=MissingMethodProblem
+ # },
+ {
+ # can only be called from Stream::distinct, which cannot itself be inlined, so distinct is the only feasible call-site
+ matchName="scala.collection.immutable.Stream.scala$collection$immutable$Stream$$loop$6"
+ problemName=MissingMethodProblem
+ },
+ {
+ # can only be called from Stream::distinct, which cannot itself be inlined, so distinct is the only feasible call-site
+ matchName="scala.collection.immutable.Stream.scala$collection$immutable$Stream$$loop$4"
+ problemName=MissingMethodProblem
+ },
+ {
+ # can only be called from Stream::distinct, which cannot itself be inlined, so distinct is the only feasible call-site
+ matchName="scala.collection.immutable.Stream.scala$collection$immutable$Stream$$loop$5"
+ problemName=MissingMethodProblem
+ },
+ # TODO: revert a557a97360: bridge method appeared because result is now Int but the super-method's result type erases to Object
+ # {
+ # matchName="scala.collection.immutable.Range.head"
+ # problemName=IncompatibleResultTypeProblem
+ # },
+ # revert 0b92073a38 2aa66bec86: SI-4664 [Make scala.util.Random Serializable] Add test case
+ # {
+ # matchName="scala.util.Random"
+ # problemName=MissingTypesProblem
+ # },
+ # {
+ # matchName="scala.util.Random$"
+ # problemName=MissingTypesProblem
+ # },
+ # {
+ # # private[concurrent]
+ # matchName="scala.concurrent.BatchingExecutor$Batch"
+ # problemName=MissingClassProblem
+ # },
+ # {
+ # # private[concurrent]
+ # matchName="scala.concurrent.BatchingExecutor"
+ # problemName=MissingClassProblem
+ # },
+ # {
+ # # private[concurrent]
+ # matchName="scala.concurrent.impl.ExecutionContextImpl$AdaptedForkJoinTask"
+ # problemName=MissingClassProblem
+ # },
+ # {
+ # # private[concurrent]
+ # matchName="scala.concurrent.impl.ExecutionContextImpl.scala$concurrent$impl$ExecutionContextImpl$$uncaughtExceptionHandler"
+ # problemName=MissingMethodProblem
+ # },
+ {
+ # private nested class became private top-level class to fix SI-7018
+ matchName="scala.reflect.macros.NonemptyAttachments"
+ problemName=MissingClassProblem
+ },
+
+ # scala.reflect.runtime
+ # {
+ # matchName="scala.reflect.runtime.JavaMirrors#JavaMirror.scala$reflect$runtime$JavaMirrors$JavaMirror$$jclassAsScala"
+ # problemName=IncompatibleResultTypeProblem
+ # },
+ # {
+ # matchName="scala.reflect.runtime.JavaMirrors#JavaMirror.scala$reflect$runtime$JavaMirrors$JavaMirror$$jclassAsScala1"
+ # problemName=MissingMethodProblem
+ # },
+ # {
+ # matchName="scala.reflect.runtime.SymbolLoaders.initClassAndModule"
+ # problemName=MissingMethodProblem
+ # },
+ # {
+ # matchName="scala.reflect.runtime.SymbolLoaders.initAndEnterClassAndModule"
+ # problemName=MissingMethodProblem
+ # },
+ # {
+ # matchName="scala.reflect.runtime.SymbolLoaders.createClassModule"
+ # problemName=MissingMethodProblem
+ # },
+ # {
+ # matchName="scala.reflect.runtime.SymbolLoaders.initClassModule"
+ # problemName=MissingMethodProblem
+ # },
+ # {
+ # matchName="scala.reflect.runtime.JavaUniverse"
+ # problemName=MissingTypesProblem
+ # },
+ # {
+ # matchName="scala.reflect.runtime.JavaUniverse.initClassAndModule"
+ # problemName=MissingMethodProblem
+ # },
+ # {
+ # matchName="scala.reflect.runtime.JavaUniverse.initAndEnterClassAndModule"
+ # problemName=MissingMethodProblem
+ # },
+
+ # scala.concurrent.forkjoin (SI-7442)
+ {
+ matchName="scala.concurrent.forkjoin.ForkJoinPool.registerWorker"
+ problemName=IncompatibleMethTypeProblem
+ },
+ {
+ matchName="scala.concurrent.forkjoin.ForkJoinPool.externalPush"
+ problemName=MissingMethodProblem
+ },
+ {
+ matchName="scala.concurrent.forkjoin.ForkJoinPool.this"
+ problemName=IncompatibleMethTypeProblem
+ },
+ {
+ matchName="scala.concurrent.forkjoin.ForkJoinPool.signalWork"
+ problemName=MissingMethodProblem
+ },
+ {
+ matchName="scala.concurrent.forkjoin.ForkJoinPool.awaitQuiescence"
+ problemName=MissingMethodProblem
+ },
+ {
+ matchName="scala.concurrent.forkjoin.ForkJoinPool.tryCompensate"
+ problemName=MissingMethodProblem
+ },
+ {
+ matchName="scala.concurrent.forkjoin.ForkJoinTask.recordExceptionalCompletion"
+ problemName=MissingMethodProblem
+ },
+ {
+ matchName="scala.concurrent.forkjoin.ForkJoinTask.internalPropagateException"
+ problemName=MissingMethodProblem
+ },
+ {
+ matchName="scala.concurrent.forkjoin.ForkJoinPool.helpJoinOnce"
+ problemName=IncompatibleResultTypeProblem
+ },
+ {
+ matchName="scala.concurrent.impl.Promise$CompletionLatch"
+ problemName=MissingClassProblem
+ },
+ {
+ matchName="scala.concurrent.impl.Promise#DefaultPromise.linkRootOf"
+ problemName=MissingMethodProblem
+ },
+ {
+ matchName="scala.concurrent.impl.Promise#DefaultPromise.scala$concurrent$impl$Promise$DefaultPromise$$dispatchOrAddCallback"
+ problemName=MissingMethodProblem
+ },
+ {
+ matchName="scala.reflect.runtime.JavaMirrors#JavaMirror#FromJavaClassCompleter.scala$reflect$runtime$JavaMirrors$JavaMirror$FromJavaClassCompleter$$enterEmptyCtorIfNecessary$1"
+ problemName=MissingMethodProblem
+ },
+ {
+ matchName="scala.reflect.runtime.ReflectionUtils.scalacShouldntLoadClass"
+ problemName=MissingMethodProblem
+ },
+ {
+ matchName="scala.reflect.runtime.ReflectionUtils.scalacShouldntLoadClassfile"
+ problemName=MissingMethodProblem
+ },
+ {
+ matchName="scala.reflect.runtime.ReflectionUtils.isTraitImplementation"
+ problemName=MissingMethodProblem
+ },
+ {
+ matchName="scala.reflect.runtime.JavaMirrors#JavaMirror.scala$reflect$runtime$JavaMirrors$JavaMirror$$PackageAndClassPattern"
+ problemName=MissingMethodProblem
+ },
+ {
+ matchName="scala.reflect.runtime.SymbolLoaders.isInvalidClassName"
+ problemName=MissingMethodProblem
+ }
+ ]
+}
diff --git a/build.examples.xml b/build.examples.xml
deleted file mode 100644
index 62210d5..0000000
--- a/build.examples.xml
+++ /dev/null
@@ -1,283 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-
-<project name="scala-examples" default="build">
-
- <description>
- I am SABBUS for ${ant.project.name}, the build system for the Scala compiler
- and core library. Please check the 'docs/README' file for more information
- about me.
- </description>
-
- <echo level="info" message="Running SABBUS for ${ant.project.name}..."/>
-
-<!-- ===========================================================================
-PROPERTIES
-============================================================================ -->
-
- <property environment="env"/>
-
- <!-- Prevents system classpath from being used -->
- <property name="build.sysclasspath" value="ignore"/>
-
- <!-- Base properties -->
- <property name="src.dir" value="${basedir}/docs"/>
- <property name="src.examples.dir" value="${src.dir}/examples"/>
- <property name="lib.dir" value="${basedir}/lib"/>
- <!-- User properties -->
- <property file="${basedir}/build.examples.properties"/>
- <!-- Location of pre-compiled libraries properties -->
- <property name="scala.lib.jar" value="${lib.dir}/scala-library.jar"/>
- <property name="scala.comp.jar" value="${lib.dir}/scala-compiler.jar"/>
- <property name="fjbg.name" value="fjbg.jar"/>
- <property name="fjbg.jar" value="${lib.dir}/${fjbg.name}"/>
- <property name="msil.name" value="msil.jar"/>
- <property name="msil.jar" value="${lib.dir}/${msil.name}"/>
- <property name="ant.jar" value="${ant.home}/lib/ant.jar"/>
- <property name="ant-contrib.jar" value="${lib.dir}/ant/ant-contrib.jar"/>
- <!-- -->
- <property name="build.dir" value="${basedir}/build"/>
- <property name="build.examples.dir" value="${build.dir}/examples"/>
- <!-- tbd -->
- <property name="excludes.compile"
- value="**/*current.*,**/parsers2.*,**/typeinf.*"/>
- <property name="excludes.run"
- value="**/Parsers.*"/>
-
-<!-- ===========================================================================
-INITIALISATION
-============================================================================ -->
-
- <target name="init" unless="init.avail">
- <tstamp prefix="start">
- <format
- property="human-time"
- pattern="EEEE, d MMMM yyyy, HH:mm:ss (zz)"
- />
- </tstamp>
- <!-- Testing if everything is in place -->
- <echo level="verbose" message="scala.lib.jar=${scala.lib.jar}"/>
- <fail message="Scala library in '${lib.dir}/' is not available">
- <condition><not><and>
- <available
- classname="scala.Predef"
- classpath="${scala.lib.jar}"
- />
- <available
- classname="scala.collection.immutable.List"
- classpath="${scala.lib.jar}"
- />
- <available
- classname="scala.runtime.ObjectRef"
- classpath="${scala.lib.jar}"
- />
- </and></not></condition>
- </fail>
- <echo level="verbose" message="scala.comp.jar=${scala.comp.jar}"/>
- <fail message="Scala compiler in '${lib.dir}/' is not available">
- <condition><not>
- <available
- classname="scala.tools.ant.Scalac"
- classpath="${scala.comp.jar}:${scala.lib.jar}"
- />
- </not></condition>
- </fail>
- <echo level="verbose" message="fjbg.jar=${fjbg.jar}"/>
- <fail message="FJBG library in '${lib.dir}/' is not available">
- <condition><not>
- <available
- classname="ch.epfl.lamp.fjbg.JCode"
- classpath="${fjbg.jar}"
- />
- </not></condition>
- </fail>
- <echo level="verbose" message="msil.jar=${msil.jar}"/>
- <fail message="MSIL library in '${lib.dir}/' is not available">
- <condition><not>
- <available
- classname="ch.epfl.lamp.compiler.msil.MemberInfo"
- classpath="${msil.jar}"
- />
- </not></condition>
- </fail>
- <echo level="verbose" message="ant.jar=${ant.jar}"/>
- <echo level="verbose" message="ant-contrib.jar=${ant-contrib.jar}"/>
- <fail message="Additional Ant tasks in '${lib.dir}/' is not available">
- <condition><not>
- <available
- classname="net.sf.antcontrib.AntContribVersion"
- classpath="${ant-contrib.jar}"
- />
- </not></condition>
- </fail>
- <!-- Creating class-pathes -->
- <path id="common.classpath">
- <pathelement location="${fjbg.jar}"/>
- <pathelement location="${msil.jar}"/>
- </path>
- <path id="scala.classpath">
- <pathelement location="${scala.lib.jar}"/>
- <pathelement location="${scala.comp.jar}"/>
- <path refid="common.classpath"/>
- </path>
- <!-- Creating boot-level tasks -->
- <taskdef resource="net/sf/antcontrib/antlib.xml">
- <classpath>
- <pathelement location="${ant-contrib.jar}"/>
- </classpath>
- </taskdef>
- <!-- Finding out what system architecture is being used -->
- <condition property="os.win">
- <os family="windows"/>
- </condition>
- <if><isset property="os.win"/>
- <then>
- <exec
- executable="cygpath"
- vmlauncher="no"
- errorproperty="cygpath.err"
- outputproperty="cygpath.out"
- >
- <arg value="--windir"/>
- </exec>
- <condition property="os.cygwin">
- <equals arg1="${cygpath.err}" arg2=""/>
- </condition>
- </then>
- </if>
- <condition property="os.unix">
- <or>
- <os family="unix"/>
- <os family="mac"/>
- <isset property="os.cygwin"/>
- </or>
- </condition>
- <if><isset property="os.cygwin"/>
- <then><property name="os.type" value="Cygwin"/></then>
- <elseif><isset property="os.win"/>
- <then><property name="os.type" value="Windows"/></then>
- </elseif>
- <elseif><isset property="os.unix"/>
- <then><property name="os.type" value="UNIX"/></then>
- </elseif>
- <else>
- <fail>System environment could not be determined</fail>
- </else>
- </if>
- <echo
- level="info"
- message="OS: ${os.type} (${os.name} ${os.arch} ${os.version});"
- />
- <echo
- level="info"
- message="Java: ${java.version} ('${java.home}');"
- />
- <echo
- level="info"
- message="JVM: ${java.vm.name} ${java.vm.version};"
- />
- <echo
- level="info"
- message="Ant: ${ant.version}."
- />
- <property name="init.avail" value="yes"/>
- </target>
-
- <target name="setup" depends="init">
- <!-- Creating boot-level tasks -->
- <taskdef
- name="scalac"
- classname="scala.tools.ant.Scalac"
- classpathref="scala.classpath"
- />
- <property name="init.avail" value="yes"/>
- </target>
-
-<!-- ===========================================================================
-BUILD
-============================================================================ -->
-
- <target name="build" depends="setup">
- <mkdir dir="${build.dir}"/>
- <echo level="verbose">src.dir=${src.dir}</echo>
- <pathconvert property="list" pathsep=",">
- <path>
- <fileset
- dir="${src.dir}"
- includes="examples/*.scala"
- excludes="${excludes.compile}"
- />
- </path>
- <mapper>
- <globmapper from="${src.dir}/*" to="*"/>
- </mapper>
- </pathconvert>
- <for list="${list}" param="file">
- <sequential>
- <scalac srcdir="${src.dir}"
- destdir="${build.dir}" deprecation="true">
- <classpath>
- <pathelement location="${scala.lib.jar}"/>
- <pathelement location="${build.dir}"/>
- </classpath>
- <include name="@{file}"/>
- </scalac>
- </sequential>
- </for>
- </target>
-
-<!-- ===========================================================================
-RUN
-============================================================================ -->
-
- <target name="run" depends="build">
- <pathconvert property="list" pathsep=",">
- <path>
- <fileset
- dir="${src.dir}"
- includes="examples/*.scala"
- excludes="${excludes.run}"
- />
- </path>
- <mapper>
- <globmapper from="${src.dir}/*" to="*"/>
- </mapper>
- </pathconvert>
- <for list="${list}" param="file">
- <sequential>
- <var name="classname" unset="true"/>
- <basename property="classname" file="@{file}" suffix=".scala"/>
- <echo>Executing Scala program examples.${classname}..</echo>
- <java classname="examples.${classname}" fork="true"
- output="${build.examples.dir}/${classname}.log"
- error="${build.examples.dir}/${classname}.log"
- append="true">
- <classpath>
- <pathelement location="${scala.lib.jar}"/>
- <pathelement location="${build.dir}"/>
- </classpath>
- </java>
- </sequential>
- </for>
- </target>
-
-<!-- ===========================================================================
-CLEAN
-============================================================================ -->
-
- <macrodef name="remove">
- <attribute name="dir"/>
- <sequential>
- <delete
- dir="@{dir}"
- includeemptydirs="yes"
- quiet="yes"
- failonerror="no"
- />
- </sequential>
- </macrodef>
-
- <target name="clean" description="Removes all build examples">
- <remove dir="${build.examples.dir}"/>
- </target>
-
-</project>
diff --git a/build.number b/build.number
index e5a19b9..7c027e7 100644
--- a/build.number
+++ b/build.number
@@ -1,7 +1,7 @@
#Tue Sep 11 19:21:09 CEST 2007
version.major=2
-version.minor=9
-version.patch=2
+version.minor=10
+version.patch=4
# This is the -N part of a version. if it's 0, it's dropped from maven versions.
version.bnum=0
diff --git a/build.number.maven b/build.number.maven
new file mode 100644
index 0000000..eed9f38
--- /dev/null
+++ b/build.number.maven
@@ -0,0 +1,3 @@
+version.major=2
+version.minor=10
+version.patch=0
diff --git a/build.xml b/build.xml
index 02da531..a54b033 100644
--- a/build.xml
+++ b/build.xml
@@ -1,299 +1,406 @@
<?xml version="1.0" encoding="UTF-8"?>
-<project name="sabbus" default="build">
+<project name="sabbus" default="build" xmlns:artifact="urn:maven-artifact-ant">
<description>
SuperSabbus for Scala core, builds the scala library and compiler. It can also package it as a simple distribution, tests it for stable bootstrapping and against the Scala test suite.
</description>
-<!-- ===========================================================================
-END-USER TARGETS
-============================================================================ -->
-
- <target name="build" depends="pack.done"
- description="Builds the Scala compiler and library. Executables are in 'build/pack/bin'."/>
-
- <target name="build-opt"
- description="Builds the optimised Scala compiler and library. Executables are in 'build/pack/bin'.">
- <antcall target="build">
- <param name="scalac.args.optimise" value="-optimise"/>
- </antcall>
- </target>
+<!-- HINTS
- <target name="clean" depends="quick.clean"
- description="Removes binaries of compiler and library. Distributions are untouched."/>
+ - for faster builds, have a build.properties in the same directory as build.xml that says:
+ locker.skip=1
+ starr.use.released=1
- <target name="test" depends="test.done"
- description="Runs test suite and bootstrapping test on Scala compiler and library."/>
+-->
- <target name="test-opt"
- description="Runs test suite and bootstrapping test, everything is optimised (compiler, library, tests).">
- <antcall target="test">
- <param name="scalac.args.optimise" value="-optimise"/>
- </antcall>
- </target>
+<!-- USAGE FROM JENKINS SCRIPTS IS (CURRENTLY) AS FOLLOWS:
+ant $antArgs $scalacArgs $targets
- <target name="docs" depends="docs.done"
- description="Builds documentation for the Scala library. Scaladoc is in 'build/scaladoc/library'."/>
-
- <target name="docscomp" depends="docs.comp"
- description="Builds documentation for the Scala compiler and library. Scaladoc is in 'build/scaladoc'."/>
-
- <target name="docsclean" depends="docs.clean"
- description="Removes generated documentation. Distributions are untouched."/>
-
- <target name="dist"
- description="Makes a new distribution and tests it. Will remove existing binaries and documentation.">
- <antcall target="locker.clean"/>
- <antcall target="docs.clean"/>
- <antcall target="all.done"/>
- </target>
-
- <target name="dist-opt"
- description="Makes a new optimised distribution and tests it. Will remove existing binaries and documentation.">
- <antcall target="dist">
- <param name="scalac.args.optimise" value="-optimise"/>
- </antcall>
- </target>
+antArgs tend to be:
+ -Darchives.skipxz=true
+ -Dscalac.args.optimise=-optimise
- <target name="fastdist" depends="dist.done"
- description="Makes a new distribution without testing it or removing partially build elements."/>
+scalacArgs examples:
+ "-Dscalac.args=\"-Yrangepos\" -Dpartest.scalac_opts=\"-Yrangepos\""
- <target name="fastdist-opt"
- description="Makes a new optimised distribution without testing it or removing partially build elements.">
- <antcall target="fastdist">
- <param name="scalac.args.optimise" value="-optimise"/>
- </antcall>
- </target>
+targets exercised:
+ locker.done build-opt nightly test.suite test.continuations.suite test.scaladoc
+-->
- <target name="distclean" depends="dist.clean"
- description="Removes all distributions. Binaries and documentation are untouched."/>
-
- <target name="replacestarr"
- description="Replaces the Starr compiler and library by fresh ones built from current sources and tests them.">
- <fail message="This target is not available on Windows. Use 'ant replacestarrwin' instead.">
- <condition>
- <os family="windows"/>
- </condition>
- </fail>
- <antcall target="locker.clean"/>
- <antcall target="pack.done"/>
- <antcall target="starr.done"/>
- <antcall target="locker.clean"/>
- <antcall target="test.done"/>
- </target>
-
- <target name="replacestarr-opt"
- description="Replaces the Starr compiler and library by fresh, optimised ones built from current sources and tests them.">
- <antcall target="replacestarr">
- <param name="scalac.args.optimise" value="-optimise"/>
- </antcall>
- </target>
+<!-- To use Zinc with the ant build:
+ - install zinc and symlink the installed zinc script to ${basedir}/tools/zinc (${basedir} is where build.xml and the rest of your checkout resides)
+ - make sure to set ZINC_OPTS to match ANT_OPTS!
+-->
- <!-- Ant on Windows is not able to delete jar files that are referenced in any <path>.
- See ticket 1290 on trac. -->
- <target name="replacestarrwin"
- description="Creates a new Starr on Windows. Manually execute 'ant locker.clean build' first!">
- <fail message="This target is only available on Windows. Use 'ant replacestarr' instead.">
- <condition>
- <not><os family="windows"/></not>
- </condition>
- </fail>
- <echo message="CAUTION: Make sure to execute 'ant locker.clean build' prior to calling 'replacestarrwin'."/>
- <antcall target="starr.done"/>
- <antcall target="locker.clean"/>
- <antcall target="test.done"/>
- </target>
-
- <target name="replacelocker"
- description="Replaces the Locker compiler and library by fresh ones built from current sources.">
- <antcall target="palo.clean"/>
- <antcall target="unlocklocker"/>
- </target>
+<!--
+TODO:
+ - detect zinc anywhere on PATH
+ - automatically set ZINC_OPTS
+ - skip locker (and test.stability) by default to speed up PR validation, still do full build & testing during nightly
+ - (rework the build to pack locker and build using that when using zinc)
+-->
- <target name="replacelocker-opt"
- description="Replaces the Locker compiler and library by fresh, optimised ones built from current sources.">
- <antcall target="replacelocker">
- <param name="scalac.args.optimise" value="-optimise"/>
- </antcall>
- </target>
- <target name="unlocklocker"
- description="Unlocks Locker, allowing its compiler and library to be rebuilt">
- <antcall target="locker.unlock"/>
- <antcall target="palo.done"/>
- </target>
+<!-- ===========================================================================
+ END-USER TARGETS
+============================================================================ -->
+ <target name="build" depends="pack.done" description="Builds the Scala compiler and library. Executables are in 'build/pack/bin'."/>
+ <target name="test" depends="test.done" description="Runs test suite and bootstrapping test on Scala compiler and library."/>
+ <target name="docs" depends="docs.done" description="Builds documentation for the Scala library. Scaladoc is in 'build/scaladoc/library'."/>
+ <target name="docscomp" depends="docs.comp" description="Builds documentation for the Scala compiler and library. Scaladoc is in 'build/scaladoc'."/>
+ <target name="dist" depends="all.clean, all.done" description="Cleans all and builds and tests a new distribution."/>
+ <target name="partialdist" depends="dist.partial" description="Makes a new distribution without documentation, so just for testing."/>
+ <target name="fastdist" depends="dist.done" description="Makes a new distribution without testing it or removing partially build elements."/>
+
+ <target name="build-opt" description="Optimized version of build."> <optimized name="build"/></target>
+ <target name="test-opt" description="Optimized version of test."> <optimized name="test"/></target>
+ <target name="dist-opt" description="Optimized version of dist."> <optimized name="dist"/></target>
+ <target name="partialdist-opt" description="Optimized version of partialdist."> <optimized name="partialdist"/></target>
+ <target name="fastdist-opt" description="Optimized version of fastdist."> <optimized name="fastdist"/></target>
+
+ <!-- packaging -->
+ <target name="distpack" depends="dist.done, docs.done">
+ <ant antfile="${src.dir}/build/pack.xml" target="pack-all.done" inheritall="yes" inheritrefs="yes"/></target>
+
+ <target name="distpack-maven" depends="dist.done, docs.done">
+ <ant antfile="${src.dir}/build/pack.xml" target="pack-maven.done" inheritall="yes" inheritrefs="yes"/></target>
+
+ <target name="distpack-opt" description="Builds an optimised distribution."> <optimized name="distpack"/></target>
+ <target name="distpack-maven-opt" description="Builds an optimised maven distribution."><optimized name="distpack-maven"/></target>
- <target name="newlibs"
- description="Requires libraries (MSIL, FJBG) to be rebuilt. Add this target before any other if class file format is incompatible.">
- <property name="libs.outdated" value="yes"/>
- </target>
+ <target name="all.done" depends="dist.done, test.done"/>
+
+ <!-- must use depends for all.done, not antcall: need the properties defined in there (dist.dir) -->
+ <target name="nightly-nopt" depends="all.done, docs.done">
+ <ant antfile="${src.dir}/build/pack.xml" target="pack-all.done" inheritall="yes" inheritrefs="yes"/></target>
+ <target name="nightly"><optimized name="nightly-nopt"/></target>
+
+ <target name="nightly.checkall">
+ <antcall target="nightly-nopt"> <param name="partest.scalac_opts" value="-Ycheck:all"/></antcall></target>
+
+ <target name="clean" depends="quick.clean" description="Removes binaries of compiler and library. Locker and distributions are untouched."/>
+ <target name="docsclean" depends="docs.clean" description="Removes generated documentation. Distributions are untouched."/>
+ <target name="distclean" depends="dist.clean" description="Removes all distributions. Binaries and documentation are untouched."/>
+
+ <macrodef name="optimized" >
+ <attribute name="name"/>
+ <sequential>
+ <antcall target="@{name}"><param name="scalac.args.optimise" value="-optimise"/></antcall>
+ </sequential>
+ </macrodef>
- <target name="newforkjoin"
- description="Requires forkjoin library to be rebuilt. Add this target before any other if class file format is incompatible.">
- <property name="forkjoin.outdated" value="yes"/>
- </target>
-
<!-- ===========================================================================
-PROPERTIES
+ PROPERTIES
============================================================================ -->
<property environment="env"/>
<!-- Prevents system classpath from being used -->
- <property name="build.sysclasspath" value="ignore"/>
+ <property name="build.sysclasspath" value="ignore"/>
<!-- Defines the repository layout -->
- <property name="docs.dir" value="${basedir}/docs"/>
- <property name="lib.dir" value="${basedir}/lib"/>
- <property name="lib-ant.dir" value="${lib.dir}/ant"/>
- <property name="src.dir" value="${basedir}/src"/>
- <property name="partest.dir" value="${basedir}/test"/>
-
+ <property name="docs.dir" value="${basedir}/docs"/>
+ <property name="lib.dir" value="${basedir}/lib"/>
+ <property name="src.dir" value="${basedir}/src"/>
+ <property name="partest.dir" value="${basedir}/test"/>
+
+ <property name="lib-ant.dir" value="${lib.dir}/ant"/>
<!-- For developers: any jars placed in this dir will be added to the classpath
of all targets and copied into quick/pack/etc builds. -->
- <property name="lib-extra.dir" value="${lib.dir}/extra"/>
+ <property name="lib-extra.dir" value="${lib.dir}/extra"/>
<!-- Loads custom properties definitions -->
<property file="${basedir}/build.properties"/>
+
<!-- Generating version number -->
<property file="${basedir}/build.number"/>
- <!-- Additional command line arguments for scalac. They are added to all build targets -->
- <property name="scalac.args" value=""/>
- <property name="javac.args" value=""/>
+ <!-- read starr.version -->
+ <property file="${basedir}/starr.number"/>
<!-- Sets location of pre-compiled libraries -->
- <property name="lib.starr.jar" value="${lib.dir}/scala-library.jar"/>
- <property name="comp.starr.jar" value="${lib.dir}/scala-compiler.jar"/>
- <property name="jline.jar" value="${lib.dir}/jline.jar"/>
- <property name="ant.jar" value="${ant.home}/lib/ant.jar"/>
- <property name="scalacheck.jar" value="${lib.dir}/scalacheck.jar"/>
+ <property name="library.starr.jar" value="${lib.dir}/scala-library.jar"/>
+ <property name="reflect.starr.jar" value="${lib.dir}/scala-reflect.jar"/>
+ <property name="compiler.starr.jar" value="${lib.dir}/scala-compiler.jar"/>
+ <property name="msil.starr.jar" value="${lib.dir}/msil.jar"/>
+ <property name="jline.jar" value="${lib.dir}/jline.jar"/>
+ <property name="ant.jar" value="${ant.home}/lib/ant.jar"/>
+ <property name="scalacheck.jar" value="${lib.dir}/scalacheck.jar"/>
<!-- Sets location of build folders -->
- <property name="build.dir" value="${basedir}/build"/>
- <property name="build-locker.dir" value="${build.dir}/locker"/>
- <property name="build-palo.dir" value="${build.dir}/palo"/>
- <property name="build-quick.dir" value="${build.dir}/quick"/>
- <property name="build-pack.dir" value="${build.dir}/pack"/>
- <property name="build-strap.dir" value="${build.dir}/strap"/>
- <property name="build-docs.dir" value="${build.dir}/scaladoc"/>
- <property name="build-libs.dir" value="${build.dir}/libs"/>
-
- <property name="dists.dir" value="${basedir}/dists"/>
-
- <property name="copyright.string" value="Copyright 2002-2011, LAMP/EPFL"/>
- <property name="partest.version.number" value="0.9.2"/>
+ <property name="build.dir" value="${basedir}/build"/>
+ <property name="build-libs.dir" value="${build.dir}/libs"/>
+ <property name="build-asm.dir" value="${build.dir}/asm"/>
+ <property name="build-fjbg.dir" value="${build-libs.dir}"/>
+ <property name="build-forkjoin.dir" value="${build-libs.dir}"/>
+ <property name="build-locker.dir" value="${build.dir}/locker"/>
+ <property name="build-palo.dir" value="${build.dir}/palo"/>
+ <property name="build-quick.dir" value="${build.dir}/quick"/>
+ <property name="build-pack.dir" value="${build.dir}/pack"/>
+ <property name="build-osgi.dir" value="${build.dir}/osgi"/>
+ <property name="build-junit.dir" value="${build.dir}/junit"/>
+ <property name="build-strap.dir" value="${build.dir}/strap"/>
+ <property name="build-docs.dir" value="${build.dir}/scaladoc"/>
+ <property name="build-sbt.dir" value="${build.dir}/sbt-interface"/>
+
+ <property name="test.osgi.src" value="${partest.dir}/osgi/src"/>
+ <property name="test.osgi.classes" value="${build-osgi.dir}/classes"/>
+
+ <property name="test.junit.src" value="${partest.dir}/junit"/>
+ <property name="test.junit.classes" value="${build-junit.dir}/classes"/>
+
+ <property name="dists.dir" value="${basedir}/dists"/>
+
+ <property name="copyright.string" value="Copyright 2002-2013, LAMP/EPFL"/>
+ <property name="partest.version.number" value="0.9.2"/>
<!-- These are NOT the flags used to run SuperSabbus, but the ones written
into the script runners created with scala.tools.ant.ScalaTool -->
- <property name="java.flags" value="-Xmx256M -Xms32M"/>
- <property name="jvm.opts" value=""/>
+ <property name="java.flags" value="-Xmx256M -Xms32M"/>
+ <property name="jvm.opts" value=""/>
<!-- if ANT_OPTS is already set by the environment, it will be unaltered,
but if it is unset it will take this default value. -->
- <property name="env.ANT_OPTS" value="-Xms1536M -Xmx1536M -Xss1M -XX:MaxPermSize=192M -XX:+UseParallelGC" />
-
- <!-- to find max heap usage: -Xaprof ; currently at 980M for locker.comp -->
- <echo message="Forking with JVM opts: ${env.ANT_OPTS} ${jvm.opts}" />
+ <property name="env.ANT_OPTS" value="-Xms1536M -Xmx1536M -Xss1M -XX:MaxPermSize=192M -XX:+UseParallelGC" />
- <property
- name="scalacfork.jvmargs"
- value="${env.ANT_OPTS} ${jvm.opts}"/>
+ <property name="scalacfork.jvmargs" value="${env.ANT_OPTS} ${jvm.opts}"/>
<!-- ===========================================================================
-INITIALISATION
+ INITIALIZATION
============================================================================ -->
+ <target name="desired.jars.uptodate">
+ <patternset id="desired.jars">
+ <include name="lib/**/*.desired.sha1"/>
+ <include name="test/files/**/*.desired.sha1"/>
+ <include name="tools/**/*.desired.sha1"/>
+ </patternset>
+
+ <uptodate property="lib.jars.uptodate">
+ <srcfiles dir="${basedir}"><patternset refid="desired.jars"/></srcfiles>
+ <mapper type="glob" from="*.desired.sha1" to="*"/>
+ </uptodate>
+ </target>
- <condition property="starr.absent">
- <not><available file="${lib.dir}/scala-library.jar"/></not>
- </condition>
-
- <target name="init.starr" if="starr.absent">
- <echo level="warn" message="Downloading bootstrap libs. (To do this by hand, run ./pull-binary-libs.sh)"/>
+ <target name="boot" depends="desired.jars.uptodate" unless="lib.jars.uptodate">
+ <echo level="warn" message="Updating bootstrap libs. (To do this by hand, run ./pull-binary-libs.sh)"/>
<exec osfamily="unix" vmlauncher="false" executable="./pull-binary-libs.sh" failifexecutionfails="true" />
<exec osfamily="windows" vmlauncher="false" executable="pull-binary-libs.sh" failifexecutionfails="true" />
- </target>
-
- <!-- Determines OSGi string + maven extension. -->
- <target name="init.hasbuildnum">
- <condition property="version.hasbuildnum">
- <not><equals arg1="${version.bnum}" arg2="0"/></not>
- </condition>
- </target>
- <target name="init.build.snapshot" unless="build.release">
- <property name="maven.version.suffix" value="-SNAPSHOT"/>
- </target>
- <target name="init.build.release" if="build.release" depends="init.hasbuildnum, init.build.snapshot">
- <property name="maven.version.suffix" value=""/>
- </target>
- <target name="init.build.nopatch.release" unless="version.hasbuildnum" depends="init.hasbuildnum">
- <property name="version.suffix" value=""/>
- </target>
- <!-- funny thing, ant is. Can only specify *one* property in if check. Guaranteed that both are true here,
- since properties are immutable. -->
- <target name="init.build.patch.release" if="version.hasbuildnum" depends="init.build.nopatch.release">
- <property name="version.suffix" value="-${version.bnum}"/>
- </target>
+ <!-- uptodate task needs to know these are what's in the sha. -->
+ <touch>
+ <fileset dir="${basedir}"><patternset refid="desired.jars"/></fileset>
+ <mapper type="glob" from="*.desired.sha1" to="*"/>
+ </touch>
+ </target>
+
+ <target name="init" depends="boot">
+ <!-- Set up Ant contrib tasks so we can use <if><then><else> instead of the clunky `unless` attribute -->
+ <taskdef resource="net/sf/antcontrib/antlib.xml" classpath="${lib-ant.dir}/ant-contrib.jar"/>
+
+ <!-- Add our maven ant tasks -->
+ <path id="maven-ant-tasks.classpath" path="${lib-ant.dir}/maven-ant-tasks-2.1.1.jar" />
+ <typedef resource="org/apache/maven/artifact/ant/antlib.xml" uri="urn:maven-artifact-ant" classpathref="maven-ant-tasks.classpath" />
+
+ <!-- Resolve maven dependencies -->
+
+ <!-- work around http://jira.codehaus.org/browse/MANTTASKS-203:
+ java.lang.ClassCastException: org.codehaus.plexus.DefaultPlexusContainer cannot be cast to org.codehaus.plexus.PlexusContainer
+ on repeated use of artifact:dependencies
+ -->
+ <if><not><isset property="maven-deps-done"></isset></not><then>
+ <mkdir dir="${user.home}/.m2/repository"/>
+ <!-- This task has an issue where if the user directory does not exist, so we create it above. UGH. -->
+ <artifact:dependencies pathId="extra.tasks.classpath" filesetId="extra.tasks.fileset">
+ <dependency groupId="biz.aQute" artifactId="bnd" version="1.50.0"/>
+ </artifact:dependencies>
+
+ <!-- JUnit -->
+ <property name="junit.version" value="4.10"/>
+ <artifact:dependencies pathId="junit.classpath" filesetId="junit.fileset">
+ <dependency groupId="junit" artifactId="junit" version="${junit.version}"/>
+ </artifact:dependencies>
+
+ <!-- Pax runner -->
+ <property name="pax.exam.version" value="2.6.0"/>
+ <artifact:dependencies pathId="pax.exam.classpath" filesetId="pax.exam.fileset">
+ <dependency groupId="org.ops4j.pax.exam" artifactId="pax-exam-container-native" version="${pax.exam.version}"/>
+ <dependency groupId="org.ops4j.pax.exam" artifactId="pax-exam-junit4" version="${pax.exam.version}"/>
+ <dependency groupId="org.ops4j.pax.exam" artifactId="pax-exam-link-assembly" version="${pax.exam.version}"/>
+ <!-- upgraded to 1.6.0 to get fix for https://ops4j1.jira.com/browse/PAXURL-217
+ https://ops4j1.jira.com/browse/PAXURL-138 is still unresolved... -->
+ <dependency groupId="org.ops4j.pax.url" artifactId="pax-url-aether" version="1.6.0"/>
+ <dependency groupId="org.ops4j.pax.swissbox" artifactId="pax-swissbox-framework" version="1.5.1"/>
+ <dependency groupId="ch.qos.logback" artifactId="logback-core" version="0.9.20"/>
+ <dependency groupId="ch.qos.logback" artifactId="logback-classic" version="0.9.20"/>
+ <dependency groupId="junit" artifactId="junit" version="${junit.version}"/>
+ <dependency groupId="org.apache.felix" artifactId="org.apache.felix.framework" version="3.2.2"/>
+ </artifact:dependencies>
+
+
+ <artifact:dependencies pathId="partest.extras.classpath" filesetId="partest.extras.fileset" versionsId="partest.extras.versions">
+ <dependency groupId="com.googlecode.java-diff-utils" artifactId="diffutils" version="1.3.0"/>
+ </artifact:dependencies>
+
+ <!-- BND support -->
+ <typedef resource="aQute/bnd/ant/taskdef.properties" classpathref="extra.tasks.classpath" />
+
+ <artifact:remoteRepository id="extra-repo" url="${extra.repo.url}"/>
+
+ <!-- Download STARR via maven if `starr.use.released` is set,
+ and `starr.version` is specified (see the starr.number properties file).
+ Want to slow down STARR changes, using only released versions. -->
+ <if><isset property="starr.use.released"/><then>
+ <echo message="Using Scala ${starr.version} for STARR."/>
+ <artifact:dependencies pathId="starr.core.path">
+ <artifact:remoteRepository refid="extra-repo"/>
+ <dependency groupId="org.scala-lang" artifactId="scala-library" version="${starr.version}"/>
+ <dependency groupId="org.scala-lang" artifactId="scala-reflect" version="${starr.version}"/>
+ <dependency groupId="org.scala-lang" artifactId="scala-compiler" version="${starr.version}"/>
+ </artifact:dependencies></then>
+ <else>
+ <path id="starr.core.path">
+ <pathelement location="${library.starr.jar}"/>
+ <pathelement location="${reflect.starr.jar}"/>
+ <pathelement location="${compiler.starr.jar}"/>
+ <pathelement location="${msil.starr.jar}"/>
+ </path></else>
+ </if>
+
+ <property name="maven-deps-done" value="yep!"/>
+ </then></if>
+
+ <!-- NOTE: ant properties are write-once: second writes are silently discarded; the logic below relies on this -->
+
+ <!-- Compute defaults (i.e., if not specified on command-line) for OSGi/maven version suffixes.
+ Try to establish the invariant (verified below):
+ `version.suffix == maven.version.suffix == osgi.version.suffix`,
+ except for:
+ - snapshot builds, where:
+ - `maven.suffix == "-SNAPSHOT"`
+ - `version.suffix == osgi.version.suffix == ""`
+ - final builds, where:
+ - `osgi.suffix == "-VFINAL"`
+ - `version.suffix == maven.version.suffix == ""`
+ -->
+ <if><not><equals arg1="${version.bnum}" arg2="0"/></not><then>
+ <property name="version.suffix" value="-${version.bnum}"/>
+ </then></if>
+
+ <if><or><not><isset property="version.suffix"/></not><equals arg1="${version.suffix}" arg2=""/></or><then>
+ <if><isset property="build.release"/><then>
+ <property name="maven.version.suffix" value=""/>
+ <property name="version.suffix" value="${maven.version.suffix}"/>
+ <if><equals arg1="${maven.version.suffix}" arg2=""/><then>
+ <property name="osgi.version.suffix" value="-VFINAL"/></then>
+ <else>
+ <property name="osgi.version.suffix" value="${maven.version.suffix}"/></else></if></then></if></then>
+ <else> <!-- version.suffix set and not empty -->
+ <property name="maven.version.suffix" value="${version.suffix}"/>
+ <property name="osgi.version.suffix" value="${version.suffix}"/></else></if>
+
+ <!-- if a maven version suffix was set (or inferred), assume we're building a release -->
+ <if><isset property="maven.version.suffix"/><then>
+ <property name="build.release" value="1"/></then></if>
+
+ <!-- not building a release and no version.suffix specified -->
+ <property name="maven.version.suffix" value="-SNAPSHOT"/>
+
+ <if><equals arg1="${maven.version.suffix}" arg2="-SNAPSHOT"/><then>
+ <property name="osgi.version.suffix" value=""/>
+ <property name="version.suffix" value=""/></then>
+ <else>
+ <property name="osgi.version.suffix" value="${maven.version.suffix}"/>
+ <property name="version.suffix" value="${maven.version.suffix}"/></else></if>
- <target name="init.build.suffix.done" depends="init.build.release, init.build.patch.release"/>
- <target name="init.version.git" depends="init.build.suffix.done">
- <!-- Find out whether we are running on Windows -->
- <condition property="os.win">
- <os family="windows"/>
- </condition>
-
<exec osfamily="unix" executable="tools/get-scala-commit-sha" outputproperty="git.commit.sha" failifexecutionfails="false" />
+ <exec osfamily="windows" executable="cmd.exe" outputproperty="git.commit.sha" failifexecutionfails="false">
+ <arg value="/c"/>
+ <arg value="tools\get-scala-commit-sha.bat"/>
+ <arg value="-p"/>
+ </exec>
<exec osfamily="unix" executable="tools/get-scala-commit-date" outputproperty="git.commit.date" failifexecutionfails="false" />
+ <exec osfamily="windows" executable="cmd.exe" outputproperty="git.commit.date" failifexecutionfails="false">
+ <arg value="/c"/>
+ <arg value="tools\get-scala-commit-date.bat"/>
+ <arg value="-p"/>
+ </exec>
+
<!-- some default in case something went wrong getting the revision -->
- <property name="git.commit.sha" value="unknown"/>
- <property name="git.commit.date" value="unknown"/>
+ <property name="git.commit.sha" value="unknown"/>
+ <property name="git.commit.date" value="unknown"/>
<!-- We use the git describe to determine the OSGi modifier for our build. -->
- <property
- name="maven.version.number"
- value="${version.major}.${version.minor}.${version.patch}${version.suffix}${maven.version.suffix}"/>
- <property
- name="osgi.version.number"
- value="${version.major}.${version.minor}.${version.patch}.v${git.commit.date}${version.suffix}-${git.commit.sha}"/>
- </target>
+ <property name="maven.version.number"
+ value="${version.major}.${version.minor}.${version.patch}${maven.version.suffix}"/>
+ <property name="osgi.version.number"
+ value="${version.major}.${version.minor}.${version.patch}.v${git.commit.date}${osgi.version.suffix}-${git.commit.sha}"/>
+
+ <if><isset property="build.release"/><then>
+ <property name="version.number" value="${maven.version.number}"/>
+ </then><else>
+ <property name="version.number" value="${version.major}.${version.minor}.${version.patch}${version.suffix}-${git.commit.date}-${git.commit.sha}"/>
+ </else></if>
+
+ <condition property="has.java6">
+ <equals arg1="${ant.java.version}" arg2="1.6"/>
+ </condition>
+ <condition property="has.java7">
+ <equals arg1="${ant.java.version}" arg2="1.7"/>
+ </condition>
+ <condition property="has.unsupported.jdk">
+ <not><or>
+ <isset property="has.java7" />
+ <isset property="has.java6" />
+ </or></not>
+ </condition>
- <target name="init.version.snapshot" unless="build.release" depends="init.version.git">
- <property
- name="version.number"
- value="${version.major}.${version.minor}.${version.patch}${version.suffix}-${git.commit.date}-${git.commit.sha}"/>
- </target>
+ <fail if="has.unsupported.jdk" message="JDK ${ant.java.version} is not supported by this build!"/>
+
+ <if><isset property="has.java7"/><then>
+ <echo level="warning"> You are using JDK7 for this build.
+ While this will be able to build most of Scala, it will not build the Swing project.
+ You will be unable to create a distribution.
+ </echo>
+ </then></if>
+
+ <!-- Allow this to be overridden simply -->
+ <property name="sbt.latest.version" value="0.12.2"/>
+
+ <property name="sbt.src.dir" value="${build-sbt.dir}/${sbt.latest.version}/src"/>
+ <property name="sbt.lib.dir" value="${build-sbt.dir}/${sbt.latest.version}/lib"/>
+
+ <property name="sbt.interface.jar" value="${sbt.lib.dir}/interface.jar"/>
+ <property name="sbt.interface.url" value="http://private-repo.typesafe.com/typesafe/ivy-releases/org.scala-sbt/interface/${sbt.latest.version}/jars/interface.jar"/>
+ <property name="sbt.interface.src.jar" value="${sbt.src.dir}/compiler-interface-src.jar"/>
+ <property name="sbt.interface.src.url" value="http://private-repo.typesafe.com/typesafe/ivy-releases/org.scala-sbt/compiler-interface/${sbt.latest.version}/jars/compiler-interface-src.jar"/>
- <target name="init.version.release" if="build.release" depends="init.version.git">
- <property
- name="version.number"
- value="${maven.version.number}"/>
- </target>
- <target name="init.version.done" depends="init.version.release, init.version.snapshot"/>
-
- <target name="init" depends="init.starr, init.version.done">
- <!-- scalac.args.optimise is selectively overridden in certain antcall tasks. -->
- <property name="scalac.args.optimise" value=""/>
- <!-- scalac.args.quickonly are added to quick.* targets but not others (particularly, locker.)
- This is to facilitate testing new command line options which do not yet exist in starr. -->
- <property name="scalac.args.quickonly" value=""/>
- <property name="scalac.args.all" value="${scalac.args} ${scalac.args.optimise}"/>
- <property name="scalac.args.quick" value="${scalac.args.all} ${scalac.args.quickonly}"/>
- <!-- Setting-up Ant contrib tasks -->
- <taskdef resource="net/sf/antcontrib/antlib.xml" classpath="${lib.dir}/ant/ant-contrib.jar"/>
+ <!-- Additional command line arguments for scalac. They are added to all build targets -->
+ <property name="scalac.args" value=""/>
+ <property name="javac.args" value=""/>
+
+ <property name="scalac.args.always" value="" />
+ <property name="scalac.args.optimise" value=""/> <!-- scalac.args.optimise is selectively overridden in certain antcall tasks. -->
+ <property name="scalac.args.all" value="${scalac.args.always} ${scalac.args} ${scalac.args.optimise}"/>
+ <property name="scalac.args.locker" value="${scalac.args.all}"/>
+ <property name="scalac.args.quick" value="${scalac.args.all}"/>
+ <property name="scalac.args.strap" value="${scalac.args.quick}"/>
+
<!-- This is the start time for the distribution -->
<tstamp prefix="time">
- <format property="human" pattern="d MMMM yyyy, HH:mm:ss"/>
+ <format property="human" pattern="d MMMM yyyy, HH:mm:ss" locale="en,US"/>
<format property="short" pattern="yyyyMMddHHmmss"/>
</tstamp>
- <property name="init.avail" value="yes"/>
-
+ <!-- some default in case something went wrong getting the revision -->
+ <property name="version.number" value="-unknown-"/>
+ <property name="init.avail" value="yes"/>
<!-- Local libs (developer use.) -->
<mkdir dir="${lib-extra.dir}"/>
-
- <path id="lib.extra">
+
+ <!-- Auxiliary libs placed on every classpath. -->
+ <path id="aux.libs">
+ <pathelement location="${ant.jar}"/>
<!-- needs ant 1.7.1 -->
<!-- <fileset dir="${lib-extra.dir}" erroronmissingdir="false"> -->
<fileset dir="${lib-extra.dir}">
@@ -301,1451 +408,1137 @@ INITIALISATION
</fileset>
</path>
- <!-- Auxiliary libs placed on every classpath. -->
- <path id="aux.libs">
- <fileset dir="${lib.dir}">
- <include name="fjbg.jar"/>
- <include name="msil.jar"/>
- <include name="forkjoin.jar"/>
- </fileset>
- <pathelement location="${ant.jar}"/>
- <path refid="lib.extra"/>
- </path>
-
<!-- And print-out what we are building -->
<echo message=" build time: ${time.human}" />
- <echo message=" java version: ${java.vm.name} ${java.version}" />
+ <echo message=" java version: ${java.vm.name} ${java.version} (${ant.java.version})" />
<echo message=" java args: ${env.ANT_OPTS} ${jvm.opts}" />
<echo message=" javac args: ${javac.args}" />
- <echo message=" scalac args: ${scalac.args}" />
+ <echo message=" scalac args: ${scalac.args.all}" />
+ <echo message="scalac quick args: ${scalac.args.quick}" />
<echo message=" git date: ${git.commit.date}" />
<echo message=" git hash: ${git.commit.sha}" />
<echo message=" maven version: ${maven.version.number}"/>
<echo message=" OSGi version: ${osgi.version.number}" />
<echo message="canonical version: ${version.number}" />
- <!-- Define tasks that can be run with Starr -->
- <path id="starr.classpath">
- <pathelement location="${lib.starr.jar}"/>
- <pathelement location="${comp.starr.jar}"/>
- <path refid="aux.libs"/>
- </path>
- <!-- What to have on the compilation path when compiling during certain phases -->
- <path id="quick.compilation.path">
- <pathelement location="${build-quick.dir}/classes/library"/>
+ <echoproperties destfile="buildcharacter.properties">
+ <propertyset>
+ <propertyref regex="time.*" />
+ <propertyref regex="git.*" />
+ <propertyref name="java.vm.name" />
+ <propertyref regex=".*version.*" />
+ <propertyref regex="scalac.args.*" />
+ <propertyref name="scalacfork.jvmargs" />
+ </propertyset>
+ </echoproperties>
+
+ <!-- validate version suffixes -->
+ <if><equals arg1="${maven.version.suffix}" arg2="-SNAPSHOT"/><then>
+ <condition property="version.suffixes.consistent"><and>
+ <equals arg1="${osgi.version.suffix}" arg2=""/>
+ <equals arg1="${version.suffix}" arg2=""/>
+ </and></condition></then>
+ <else>
+ <if><equals arg1="${osgi.version.suffix}" arg2="-VFINAL"/><then>
+ <condition property="version.suffixes.consistent"><and>
+ <equals arg1="${maven.version.suffix}" arg2=""/>
+ <equals arg1="${version.suffix}" arg2=""/>
+ </and></condition></then>
+ <else>
+ <condition property="version.suffixes.consistent"><and>
+ <equals arg1="${osgi.version.suffix}" arg2="${maven.version.suffix}"/>
+ <equals arg1="${version.suffix}" arg2="${maven.version.suffix}"/>
+ </and></condition></else></if></else></if>
+
+ <!-- <echo message=" maven suffix: ${maven.version.suffix}"/>
+ <echo message=" OSGi suffix: ${osgi.version.suffix}" />
+ <echo message="canonical suffix: ${version.suffix}" /> -->
+ <fail unless="version.suffixes.consistent" message="Version suffixes inconsistent!"/>
+
+ <path id="forkjoin.classpath" path="${build-libs.dir}/classes/forkjoin"/>
+ <path id="fjbg.classpath" path="${build-libs.dir}/classes/fjbg"/>
+ <path id="asm.classpath" path="${build-asm.dir}/classes"/>
+ <property name="forkjoin-classes" refid="forkjoin.classpath"/>
+ <property name="fjbg-classes" refid="fjbg.classpath"/>
+ <property name="asm-classes" refid="asm.classpath"/>
+
+ <!-- Compilers to use for the various stages.
+
+ There must be a variable of the shape @{stage}.compiler.path for all @{stage} in starr, locker, quick, strap.
+ -->
+ <path id="starr.compiler.path">
+ <path refid="starr.core.path"/>
<pathelement location="${lib.dir}/forkjoin.jar"/>
- <path refid="lib.extra"/>
+ <path refid="aux.libs"/>
</path>
- <path id="strap.compilation.path">
- <pathelement location="${build-strap.dir}/classes/library"/>
- <pathelement location="${lib.dir}/forkjoin.jar"/>
- <path refid="lib.extra"/>
- </path>
- <taskdef resource="scala/tools/ant/sabbus/antlib.xml" classpathref="starr.classpath"/>
- </target>
-<!-- ===========================================================================
-LOCAL REFERENCE BUILD (LOCKER)
-============================================================================ -->
+ <!-- To skip locker, use -Dlocker.skip=1 -->
+ <if><isset property="locker.skip"/><then>
+ <echo message="Using STARR to build the quick stage (skipping locker)."/>
+ <path id="locker.compiler.path" refid="starr.compiler.path"/>
+ <property name="locker.locked" value="locker skipped"/></then>
+ <else>
+ <path id="locker.compiler.path"><path refid="locker.compiler.build.path"/></path></else></if>
- <target name="locker.start" depends="init">
- <condition property="locker.available">
- <available file="${build-locker.dir}/all.complete"/>
- </condition>
- </target>
-
- <target name="locker.pre-lib" depends="locker.start" unless="locker.available">
- <condition property="locker.lib.needed">
- <not><available file="${build-locker.dir}/library.complete"/></not>
- </condition>
- </target>
+ <!-- compilerpathref for compiling with quick -->
+ <path id="quick.compiler.path"> <path refid="quick.compiler.build.path"/></path>
- <target name="locker.lib" depends="locker.pre-lib" if="locker.lib.needed">
- <stopwatch name="locker.lib.timer"/>
- <mkdir dir="${build-locker.dir}/classes/library"/>
- <javac
- srcdir="${src.dir}/library"
- destdir="${build-locker.dir}/classes/library"
- classpath="${build-locker.dir}/classes/library"
- includes="**/*.java"
- target="1.5" source="1.5">
- <compilerarg line="${javac.args}"/>
- </javac>
- <!-- NOTE: Potential problem with maximal command line length on Windows
- (32768 characters for XP, since executed with Java's "exec"). See
- src/build/msil.xml in msil branch for more details. -->
- <scalacfork
- destdir="${build-locker.dir}/classes/library"
- compilerpathref="starr.classpath"
- srcpath="${src.dir}/library"
- params="${scalac.args.all}"
- srcdir="${src.dir}/library"
- jvmargs="${scalacfork.jvmargs}">
- <include name="**/*.scala"/>
- <compilationpath>
- <pathelement location="${build-locker.dir}/classes/library"/>
- <pathelement location="${lib.dir}/forkjoin.jar"/>
- </compilationpath>
- </scalacfork>
- <propertyfile file="${build-locker.dir}/classes/library/library.properties">
- <entry key="version.number" value="${version.number}"/>
- <entry key="maven.version.number" value="${maven.version.number}"/>
- <entry key="osgi.version.number" value="${osgi.version.number}"/>
- <entry key="copyright.string" value="${copyright.string}"/>
- </propertyfile>
- <copy todir="${build-locker.dir}/classes/library">
- <fileset dir="${src.dir}/library">
- <include name="**/*.tmpl"/>
- <include name="**/*.xml"/>
- <include name="**/*.js"/>
- <include name="**/*.css"/>
- </fileset>
- </copy>
- <touch file="${build-locker.dir}/library.complete" verbose="no"/>
- <stopwatch name="locker.lib.timer" action="total"/>
- </target>
-
- <target name="locker.pre-comp" depends="locker.lib" unless="locker.available">
- <condition property="locker.comp.needed">
- <not><available file="${build-locker.dir}/compiler.complete"/></not>
- </condition>
- </target>
- <target name="locker.comp" depends="locker.pre-comp" if="locker.comp.needed">
- <stopwatch name="locker.comp.timer"/>
- <mkdir dir="${build-locker.dir}/classes/compiler"/>
- <scalacfork
- destdir="${build-locker.dir}/classes/compiler"
- compilerpathref="starr.classpath"
- params="${scalac.args.all}"
- srcdir="${src.dir}/compiler"
- jvmargs="${scalacfork.jvmargs}">
- <include name="**/*.scala"/>
- <compilationpath>
- <pathelement location="${build-locker.dir}/classes/library"/>
- <pathelement location="${build-locker.dir}/classes/compiler"/>
- <path refid="aux.libs"/>
- <pathelement location="${jline.jar}"/>
- </compilationpath>
- </scalacfork>
- <propertyfile file="${build-locker.dir}/classes/compiler/compiler.properties">
- <entry key="version.number" value="${version.number}"/>
- <entry key="maven.version.number" value="${maven.version.number}"/>
- <entry key="osgi.version.number" value="${osgi.version.number}"/>
- <entry key="copyright.string" value="${copyright.string}"/>
- </propertyfile>
- <copy todir="${build-locker.dir}/classes/compiler">
- <fileset dir="${src.dir}/compiler">
- <include name="**/*.tmpl"/>
- <include name="**/*.xml"/>
- <include name="**/*.js"/>
- <include name="**/*.html"/>
- <include name="**/*.css"/>
- <include name="**/*.properties"/>
- <include name="**/*.swf"/>
- <include name="**/*.png"/>
- <include name="**/*.gif"/>
- </fileset>
- </copy>
- <touch file="${build-locker.dir}/compiler.complete" verbose="no"/>
- <stopwatch name="locker.comp.timer" action="total"/>
- </target>
+ <!-- What to have on the compilation path when compiling during certain phases.
- <target name="locker.done" depends="locker.comp">
- <touch file="${build-locker.dir}/all.complete" verbose="no"/>
- <path id="locker.classpath">
+ There must be a variable of the shape @{stage}.@{project}.build.path
+ for all @{stage} in locker, quick, strap
+ and all @{project} in library, reflect, compiler
+ when stage is quick, @{project} also includes: actors, swing, plugins, scalacheck, partest, scalap
+ -->
+
+ <!-- LOCKER -->
+ <path id="locker.library.build.path">
<pathelement location="${build-locker.dir}/classes/library"/>
- <pathelement location="${build-locker.dir}/classes/compiler"/>
+ <path refid="forkjoin.classpath"/>
<path refid="aux.libs"/>
</path>
- </target>
-
- <target name="locker.clean" depends="palo.clean">
- <delete dir="${build-locker.dir}" includeemptydirs="yes" quiet="yes" failonerror="no"/>
- </target>
-
- <target name="locker.unlock">
- <delete file="${build-locker.dir}/all.complete"/>
- <delete file="${build-locker.dir}/library.complete"/>
- <delete file="${build-locker.dir}/compiler.complete"/>
- </target>
-<!-- ===========================================================================
-PACKED LOCKER BUILD (PALO)
-============================================================================ -->
-
- <target name="palo.start" depends="locker.done"/>
-
- <target name="palo.pre-lib" depends="palo.start">
- <uptodate
- property="palo.lib.available"
- targetfile="${build-palo.dir}/lib/scala-library.jar"
- srcfile="${build-locker.dir}/library.complete"/>
- </target>
-
- <target name="palo.lib" depends="palo.pre-lib" unless="palo.lib.available">
- <mkdir dir="${build-palo.dir}/lib"/>
- <jar destfile="${build-palo.dir}/lib/scala-library.jar">
- <fileset dir="${build-locker.dir}/classes/library"/>
- </jar>
- </target>
-
- <target name="palo.pre-comp" depends="palo.lib">
- <uptodate
- property="palo.comp.available"
- targetfile="${build-palo.dir}/lib/scala-compiler.jar"
- srcfile="${build-locker.dir}/compiler.complete"/>
- </target>
-
- <target name="palo.comp" depends="palo.pre-comp" unless="palo.comp.available">
- <mkdir dir="${build-palo.dir}/lib"/>
- <jar destfile="${build-palo.dir}/lib/scala-compiler.jar" manifest="${basedir}/META-INF/MANIFEST.MF">
- <fileset dir="${build-locker.dir}/classes/compiler"/>
- <!-- filemode / dirmode: see trac ticket #1294 -->
- </jar>
- </target>
+ <path id="locker.actors.build.path">
+ <path refid="locker.library.build.path"/>
+ <pathelement location="${build-locker.dir}/classes/actors"/>
+ </path>
- <target name="palo.done" depends="palo.comp">
- </target>
+ <path id="locker.msil.build.path">
+ <path refid="locker.compiler.build.path"/>
+ <pathelement location="${build-locker.dir}/classes/msil"/>
+ </path>
- <target name="palo.clean" depends="quick.clean">
- <delete dir="${build-palo.dir}" includeemptydirs="yes" quiet="yes" failonerror="no"/>
- </target>
+ <path id="locker.reflect.build.path">
+ <path refid="locker.library.build.path"/>
+ <pathelement location="${build-locker.dir}/classes/reflect"/>
+ </path>
-<!-- ===========================================================================
-QUICK BUILD (QUICK)
-============================================================================ -->
-
- <target name="quick.start" depends="locker.done"/>
+ <path id="locker.compiler.build.path">
+ <path refid="locker.reflect.build.path"/>
+ <pathelement location="${build-locker.dir}/classes/compiler"/>
+ <pathelement location="${build-locker.dir}/classes/msil"/>
+ <path refid="asm.classpath"/>
+ <path refid="fjbg.classpath"/>
+ <pathelement location="${jline.jar}"/>
+ </path>
- <target name="quick.pre-lib" depends="quick.start">
- <uptodate property="quick.lib.available" targetfile="${build-quick.dir}/library.complete">
- <srcfiles dir="${src.dir}">
- <include name="library/**"/>
- <include name="dbc/**"/>
- <include name="actors/**"/>
- <include name="continuations/**"/>
- <include name="swing/**"/>
- </srcfiles>
- </uptodate>
- </target>
+ <path id="locker.msil.build.path" refid="locker.compiler.build.path"/>
- <target name="quick.lib" depends="quick.pre-lib" unless="quick.lib.available">
- <stopwatch name="quick.lib.timer"/>
- <mkdir dir="${build-quick.dir}/classes/library"/>
- <javac
- srcdir="${src.dir}/library"
- destdir="${build-quick.dir}/classes/library"
- classpath="${build-quick.dir}/classes/library"
- includes="**/*.java"
- target="1.5" source="1.5">
- <compilerarg line="${javac.args}"/>
- </javac>
- <javac
- srcdir="${src.dir}/actors"
- destdir="${build-quick.dir}/classes/library"
- classpath="${build-quick.dir}/classes/library"
- includes="**/*.java"
- target="1.5" source="1.5">
- <compilerarg line="${javac.args}"/>
- </javac>
- <scalacfork
- destdir="${build-quick.dir}/classes/library"
- compilerpathref="locker.classpath"
- srcpath="${src.dir}/library"
- params="${scalac.args.quick}"
- srcdir="${src.dir}/library"
- jvmargs="${scalacfork.jvmargs}">
- <include name="**/*.scala"/>
- <compilationpath refid="quick.compilation.path"/>
- </scalacfork>
- <scalacfork
- destdir="${build-quick.dir}/classes/library"
- compilerpathref="locker.classpath"
- params="${scalac.args.quick}"
- srcdir="${src.dir}/actors"
- jvmargs="${scalacfork.jvmargs}">
- <include name="**/*.scala"/>
- <compilationpath refid="quick.compilation.path"/>
- </scalacfork>
- <scalacfork
- destdir="${build-quick.dir}/classes/library"
- compilerpathref="locker.classpath"
- params="${scalac.args.quick}"
- srcdir="${src.dir}/dbc"
- jvmargs="${scalacfork.jvmargs}">
- <include name="**/*.scala"/>
- <compilationpath refid="quick.compilation.path"/>
- </scalacfork>
- <scalacfork
- destdir="${build-quick.dir}/classes/library"
- compilerpathref="locker.classpath"
- params="${scalac.args.quick}"
- srcdir="${src.dir}/swing"
- jvmargs="${scalacfork.jvmargs}">
- <include name="**/*.scala"/>
- <compilationpath refid="quick.compilation.path"/>
- </scalacfork>
- <propertyfile file="${build-quick.dir}/classes/library/library.properties">
- <entry key="version.number" value="${version.number}"/>
- <entry key="maven.version.number" value="${maven.version.number}"/>
- <entry key="osgi.version.number" value="${osgi.version.number}"/>
- <entry key="copyright.string" value="${copyright.string}"/>
- </propertyfile>
- <copy todir="${build-quick.dir}/classes/library">
- <fileset dir="${src.dir}/library">
- <include name="**/*.tmpl"/>
- <include name="**/*.xml"/>
- <include name="**/*.js"/>
- <include name="**/*.css"/>
- </fileset>
- </copy>
- <touch file="${build-quick.dir}/library.complete" verbose="no"/>
- <stopwatch name="quick.lib.timer" action="total"/>
- </target>
-
- <target name="quick.newlibs" depends="quick.lib" if="libs.outdated">
- <antcall target="libs.done" inheritRefs="true"/>
- <property name="fjbg.jar" value="${build-libs.dir}/fjbg.jar"/>
- <property name="msil.jar" value="${build-libs.dir}/msil.jar"/>
- </target>
-
- <target name="quick.libs" depends="quick.newlibs" unless="libs.outdated">
- <property name="fjbg.jar" value="${lib.dir}/fjbg.jar"/>
- <property name="msil.jar" value="${lib.dir}/msil.jar"/>
- </target>
+ <!-- QUICK -->
+ <path id="quick.library.build.path">
+ <pathelement location="${build-quick.dir}/classes/library"/>
+ <path refid="forkjoin.classpath"/>
+ <path refid="aux.libs"/>
+ </path>
- <target name="quick.newforkjoin" depends="quick.libs" if="forkjoin.outdated">
- <antcall target="forkjoin.done" inheritRefs="true"/>
- <property name="forkjoin.jar" value="${build-libs.dir}/forkjoin.jar"/>
- </target>
+ <path id="quick.actors.build.path">
+ <path refid="quick.library.build.path"/>
+ <pathelement location="${build-quick.dir}/classes/actors"/>
+ </path>
- <target name="quick.forkjoin" depends="quick.newforkjoin" unless="forkjoin.outdated">
- <property name="forkjoin.jar" value="${lib.dir}/forkjoin.jar"/>
- </target>
+ <path id="quick.reflect.build.path">
+ <path refid="quick.library.build.path"/>
+ <pathelement location="${build-quick.dir}/classes/reflect"/>
+ </path>
- <target name="quick.pre-comp" depends="quick.forkjoin">
- <uptodate property="quick.comp.available" targetfile="${build-quick.dir}/compiler.complete">
- <srcfiles dir="${src.dir}/compiler"/>
- </uptodate>
- </target>
+ <path id="quick.msil.build.path">
+ <path refid="quick.compiler.build.path"/>
+ <pathelement location="${build-quick.dir}/classes/msil"/>
+ </path>
- <target name="quick.comp" depends="quick.pre-comp" unless="quick.comp.available">
- <stopwatch name="quick.comp.timer"/>
- <mkdir dir="${build-quick.dir}/classes/compiler"/>
- <scalacfork
- destdir="${build-quick.dir}/classes/compiler"
- compilerpathref="locker.classpath"
- params="${scalac.args.quick}"
- srcdir="${src.dir}/compiler"
- jvmargs="${scalacfork.jvmargs}">
- <include name="**/*.scala"/>
- <compilationpath>
- <pathelement location="${build-quick.dir}/classes/library"/>
- <pathelement location="${build-quick.dir}/classes/compiler"/>
- <path refid="aux.libs"/>
- <pathelement location="${jline.jar}"/>
- </compilationpath>
- </scalacfork>
- <propertyfile file="${build-quick.dir}/classes/compiler/compiler.properties">
- <entry key="version.number" value="${version.number}"/>
- <entry key="maven.version.number" value="${maven.version.number}"/>
- <entry key="osgi.version.number" value="${osgi.version.number}"/>
- <entry key="copyright.string" value="${copyright.string}"/>
- </propertyfile>
- <copy todir="${build-quick.dir}/classes/compiler">
- <fileset dir="${src.dir}/compiler">
- <include name="**/*.tmpl"/>
- <include name="**/*.xml"/>
- <include name="**/*.js"/>
- <include name="**/*.css"/>
- <include name="**/*.html"/>
- <include name="**/*.properties"/>
- <include name="**/*.swf"/>
- <include name="**/*.png"/>
- <include name="**/*.gif"/>
- <include name="**/*.txt"/>
- </fileset>
- </copy>
- <touch file="${build-quick.dir}/compiler.complete" verbose="no"/>
- <stopwatch name="quick.comp.timer" action="total"/>
- </target>
+ <path id="quick.compiler.build.path">
+ <path refid="quick.reflect.build.path"/>
+ <pathelement location="${build-quick.dir}/classes/compiler"/>
+ <pathelement location="${build-quick.dir}/classes/msil"/>
+ <path refid="asm.classpath"/>
+ <path refid="fjbg.classpath"/>
+ <pathelement location="${jline.jar}"/>
+ </path>
- <target name="quick.pre-plugins" depends="quick.comp">
- <uptodate property="quick.plugins.available" targetfile="${build-quick.dir}/plugins.complete">
- <srcfiles dir="${src.dir}/continuations"/>
- </uptodate>
- </target>
+ <path id="quick.swing.build.path">
+ <path refid="quick.library.build.path"/>
+ <path refid="quick.actors.build.path"/>
+ <pathelement location="${build-quick.dir}/classes/swing"/>
+ </path>
- <target name="quick.plugins" depends="quick.pre-plugins" unless="quick.plugins.available">
- <stopwatch name="quick.plugins.timer"/>
- <mkdir dir="${build-quick.dir}/classes/continuations-plugin"/>
- <scalacfork
- destdir="${build-quick.dir}/classes/continuations-plugin"
- compilerpathref="locker.classpath"
- params="${scalac.args.quick}"
- srcdir="${src.dir}/continuations/plugin"
- jvmargs="${scalacfork.jvmargs}">
- <include name="**/*.scala"/>
- <compilationpath>
- <pathelement location="${build-quick.dir}/classes/library"/>
- <pathelement location="${build-quick.dir}/classes/compiler"/>
- <pathelement location="${build-quick.dir}/classes/continuations-plugin"/>
- <pathelement location="${lib.dir}/forkjoin.jar"/>
- </compilationpath>
- </scalacfork>
- <copy
- file="${src.dir}/continuations/plugin/scalac-plugin.xml"
- todir="${build-quick.dir}/classes/continuations-plugin"/>
- <!-- not very nice to create jar here but needed to load plugin -->
- <mkdir dir="${build-quick.dir}/misc/scala-devel/plugins"/>
- <jar destfile="${build-quick.dir}/misc/scala-devel/plugins/continuations.jar">
- <fileset dir="${build-quick.dir}/classes/continuations-plugin"/>
- </jar>
- <!-- might split off library part into its own ant target -->
- <scalacfork
- destdir="${build-quick.dir}/classes/library"
- compilerpathref="locker.classpath"
- params="${scalac.args.quick} -Xpluginsdir ${build-quick.dir}/misc/scala-devel/plugins -Xplugin-require:continuations -P:continuations:enable"
- srcdir="${src.dir}/continuations/library"
- jvmargs="${scalacfork.jvmargs}">
- <include name="**/*.scala"/>
- <compilationpath refid="quick.compilation.path"/>
- </scalacfork>
- <touch file="${build-quick.dir}/plugins.complete" verbose="no"/>
- <stopwatch name="quick.plugins.timer" action="total"/>
- </target>
-
- <target name="quick.scalacheck" depends="quick.plugins">
- <mkdir dir="${build-quick.dir}/classes/scalacheck"/>
- <scalacfork
- destdir="${build-quick.dir}/classes/scalacheck"
- compilerpathref="locker.classpath"
- params="${scalac.args.all}"
- srcdir="${src.dir}/scalacheck"
- jvmargs="${scalacfork.jvmargs}">
- <include name="**/*.scala"/>
- <compilationpath>
- <pathelement location="${build-quick.dir}/classes/library"/>
- <pathelement location="${build-quick.dir}/classes/scalacheck"/>
- </compilationpath>
- </scalacfork>
- </target>
-
- <target name="quick.pre-scalap" depends="quick.scalacheck">
- <uptodate property="quick.scalap.available" targetfile="${build-quick.dir}/scalap.complete">
- <srcfiles dir="${src.dir}/scalap"/>
- </uptodate>
- </target>
+ <path id="quick.plugins.build.path">
+ <path refid="quick.compiler.build.path"/>
+ <pathelement location="${build-quick.dir}/classes/continuations-plugin"/>
+ </path>
- <target name="quick.scalap" depends="quick.pre-scalap" unless="quick.scalap.available">
- <stopwatch name="quick.scalap.timer"/>
- <mkdir dir="${build-quick.dir}/classes/scalap"/>
- <scalacfork
- destdir="${build-quick.dir}/classes/scalap"
- compilerpathref="locker.classpath"
- params="${scalac.args.quick}"
- srcdir="${src.dir}/scalap"
- jvmargs="${scalacfork.jvmargs}">
- <include name="**/*.scala"/>
- <compilationpath>
- <pathelement location="${build-quick.dir}/classes/library"/>
- <pathelement location="${build-quick.dir}/classes/compiler"/>
- <pathelement location="${build-quick.dir}/classes/scalap"/>
- <pathelement location="${build-quick.dir}/classes/partest"/>
- <pathelement location="${ant.jar}"/>
- <pathelement location="${lib.dir}/forkjoin.jar"/>
- </compilationpath>
- </scalacfork>
- <touch file="${build-quick.dir}/scalap.complete" verbose="no"/>
- <stopwatch name="quick.scalap.timer" action="total"/>
- </target>
+ <path id="quick.scalacheck.build.path">
+ <pathelement location="${build-quick.dir}/classes/library"/>
+ <pathelement location="${build-quick.dir}/classes/actors"/>
+ <pathelement location="${build-quick.dir}/classes/scalacheck"/>
+ </path>
- <target name="quick.pre-partest" depends="quick.scalap">
- <uptodate property="quick.partest.available" targetfile="${build-quick.dir}/partest.complete">
- <srcfiles dir="${src.dir}/partest"/>
- </uptodate>
- </target>
-
- <target name="quick.partest" depends="quick.pre-partest" unless="quick.partest.available">
- <stopwatch name="quick.partest.timer"/>
- <mkdir dir="${build-quick.dir}/classes/partest"/>
- <javac
- srcdir="${src.dir}/partest"
- destdir="${build-quick.dir}/classes/partest"
- target="1.5" source="1.5">
- <classpath>
- <pathelement location="${build-quick.dir}/classes/library"/>
- <pathelement location="${build-quick.dir}/classes/compiler"/>
- <pathelement location="${build-quick.dir}/classes/scalap"/>
- <pathelement location="${build-quick.dir}/classes/partest"/>
- </classpath>
- <include name="**/*.java"/>
- <compilerarg line="${javac.args}"/>
- </javac>
- <scalacfork
- destdir="${build-quick.dir}/classes/partest"
- compilerpathref="locker.classpath"
- params="${scalac.args.quick}"
- srcdir="${src.dir}/partest"
- jvmargs="${scalacfork.jvmargs}">
- <include name="**/*.scala"/>
- <compilationpath>
- <pathelement location="${build-quick.dir}/classes/library"/>
- <pathelement location="${build-quick.dir}/classes/compiler"/>
- <pathelement location="${build-quick.dir}/classes/scalap"/>
- <pathelement location="${build-quick.dir}/classes/partest"/>
- <pathelement location="${ant.jar}"/>
- <pathelement location="${lib.dir}/forkjoin.jar"/>
- <pathelement location="${scalacheck.jar}"/>
- </compilationpath>
- </scalacfork>
- <propertyfile file="${build-quick.dir}/classes/partest/partest.properties">
- <entry key="version.number" value="${partest.version.number}"/>
- <entry key="copyright.string" value="${copyright.string}"/>
- </propertyfile>
- <copy todir="${build-quick.dir}/classes/partest">
- <fileset dir="${src.dir}/partest">
- <include name="**/*.xml"/>
- </fileset>
- </copy>
- <touch file="${build-quick.dir}/partest.complete" verbose="no"/>
- <stopwatch name="quick.partest.timer" action="total"/>
- </target>
+ <path id="quick.scalap.build.path">
+ <path refid="quick.compiler.build.path"/>
+ <pathelement location="${build-quick.dir}/classes/scalap"/>
+ <pathelement location="${build-quick.dir}/classes/partest"/>
+ </path>
- <target name="quick.pre-bin" depends="quick.partest">
- <condition property="quick.bin.available">
- <isset property="quick.comp.available"/>
- </condition>
- </target>
+ <path id="quick.partest.build.path">
+ <path refid="quick.scalap.build.path"/>
+ <path refid="partest.extras.classpath"/>
+ <pathelement location="${scalacheck.jar}"/>
+ </path>
- <target name="quick.bin" depends="quick.pre-bin" unless="quick.bin.available">
- <path id="quick.bin.classpath">
+ <path id="quick.bin.tool.path">
<pathelement location="${build-quick.dir}/classes/library"/>
+ <pathelement location="${build-quick.dir}/classes/actors"/>
+ <pathelement location="${build-quick.dir}/classes/reflect"/>
<pathelement location="${build-quick.dir}/classes/compiler"/>
+ <pathelement location="${build-quick.dir}/classes/msil"/>
<pathelement location="${build-quick.dir}/classes/scalap"/>
+ <pathelement location="${build-quick.dir}/classes/continuations-library"/>
+ <pathelement location="${jline.jar}"/>
+ <path refid="asm.classpath"/>
+ <path refid="forkjoin.classpath"/>
<path refid="aux.libs"/>
+ </path>
+
+ <!-- PACK -->
+ <!-- also used for docs.* targets TODO: use separate paths for those -->
+ <path id="pack.compiler.path">
+ <pathelement location="${build-pack.dir}/lib/scala-library.jar"/>
+ <pathelement location="${build-pack.dir}/lib/scala-reflect.jar"/>
+ <pathelement location="${build-pack.dir}/lib/scala-compiler.jar"/>
+ <pathelement location="${build-pack.dir}/lib/scala-partest.jar"/>
+ <pathelement location="${build-pack.dir}/lib/scalap.jar"/>
+ <pathelement location="${build-pack.dir}/lib/scala-actors.jar"/>
+ <pathelement location="${ant.jar}"/>
<pathelement location="${jline.jar}"/>
+ <path refid="partest.extras.classpath"/>
+ <path refid="aux.libs"/>
</path>
- <taskdef name="quick-bin" classname="scala.tools.ant.ScalaTool" classpathref="quick.bin.classpath"/>
- <mkdir dir="${build-quick.dir}/bin"/>
- <quick-bin
- file="${build-quick.dir}/bin/scala"
- class="scala.tools.nsc.MainGenericRunner"
- javaFlags="${java.flags}"
- classpathref="quick.bin.classpath"/>
- <quick-bin
- file="${build-quick.dir}/bin/scalac"
- class="scala.tools.nsc.Main"
- javaFlags="${java.flags}"
- classpathref="quick.bin.classpath"/>
- <quick-bin
- file="${build-quick.dir}/bin/scaladoc"
- class="scala.tools.nsc.ScalaDoc"
- javaFlags="${java.flags}"
- classpathref="quick.bin.classpath"/>
- <quick-bin
- file="${build-quick.dir}/bin/fsc"
- class="scala.tools.nsc.CompileClient"
- javaFlags="${java.flags}"
- classpathref="quick.bin.classpath"/>
- <quick-bin
- file="${build-quick.dir}/bin/scalap"
- class="scala.tools.scalap.Main"
- javaFlags="${java.flags}"
- classpathref="quick.bin.classpath"/>
- <chmod perm="ugo+rx" file="${build-quick.dir}/bin/scala"/>
- <chmod perm="ugo+rx" file="${build-quick.dir}/bin/scalac"/>
- <chmod perm="ugo+rx" file="${build-quick.dir}/bin/scaladoc"/>
- <chmod perm="ugo+rx" file="${build-quick.dir}/bin/fsc"/>
- <chmod perm="ugo+rx" file="${build-quick.dir}/bin/scalap"/>
- <touch file="${build-quick.dir}/bin.complete" verbose="no"/>
- </target>
-
- <target name="quick.done" depends="quick.bin">
- <path id="quick.classpath">
- <pathelement location="${build-quick.dir}/classes/library"/>
- <pathelement location="${build-quick.dir}/classes/compiler"/>
+
+ <path id="pack.bin.tool.path">
+ <pathelement location="${build-pack.dir}/lib/scala-library.jar"/>
+ <pathelement location="${build-pack.dir}/lib/scala-reflect.jar"/>
+ <pathelement location="${build-pack.dir}/lib/scala-compiler.jar"/>
+ <pathelement location="${build-pack.dir}/lib/scalap.jar"/>
+ <pathelement location="${build-pack.dir}/lib/jline.jar"/>
<path refid="aux.libs"/>
</path>
- </target>
- <target name="quick.clean" depends="libs.clean">
- <delete dir="${build-quick.dir}" includeemptydirs="yes" quiet="yes" failonerror="no"/>
- </target>
+ <path id="pack.library.files">
+ <fileset dir="${build-quick.dir}/classes/library"/>
+ <fileset dir="${build-quick.dir}/classes/continuations-library"/>
+ <fileset dir="${forkjoin-classes}"/>
+ </path>
-<!-- ===========================================================================
-PACKED QUICK BUILD (PACK)
-============================================================================ -->
-
- <target name="pack.start" depends="quick.done"/>
-
- <target name="pack.pre-lib" depends="pack.start">
- <uptodate
- property="pack.lib.available"
- targetfile="${build-pack.dir}/lib/scala-library.jar"
- srcfile="${build-quick.dir}/library.complete"/>
- </target>
+ <path id="pack.actors.files">
+ <fileset dir="${build-quick.dir}/classes/actors"/>
+ </path>
- <target name="pack.lib" depends="pack.pre-lib" unless="pack.lib.available">
- <mkdir dir="${build-pack.dir}/lib"/>
- <jar destfile="${build-pack.dir}/lib/scala-library.jar">
- <fileset dir="${build-quick.dir}/classes/library">
- <exclude name="scala/dbc/**"/>
- <exclude name="scala/swing/**"/>
- </fileset>
- <zipfileset dirmode="755" filemode="644" src="${forkjoin.jar}"/>
- </jar>
- <jar destfile="${build-pack.dir}/lib/scala-dbc.jar">
- <fileset dir="${build-quick.dir}/classes/library">
- <include name="scala/dbc/**"/>
- </fileset>
- </jar>
- <jar destfile="${build-pack.dir}/lib/scala-swing.jar">
- <fileset dir="${build-quick.dir}/classes/library">
- <include name="scala/swing/**"/>
- </fileset>
- <fileset dir="${src.dir}/swing">
- <include name="scala/swing/test/images/**"/>
+ <path id="pack.compiler.files">
+ <fileset dir="${build-quick.dir}/classes/compiler"/>
+ <fileset dir="${build-quick.dir}/classes/msil"/>
+ <fileset dir="${asm-classes}"/>
+ <fileset dir="${fjbg-classes}"/>
+ </path>
+
+ <path id="pack.swing.files"> <fileset dir="${build-quick.dir}/classes/swing"/> </path>
+ <path id="pack.reflect.files"> <fileset dir="${build-quick.dir}/classes/reflect"/> </path>
+ <path id="pack.plugins.files"> <fileset dir="${build-quick.dir}/classes/continuations-plugin"/> </path>
+ <path id="pack.scalacheck.files"> <fileset dir="${build-quick.dir}/classes/scalacheck"/> </path>
+ <path id="pack.scalap.files"> <fileset dir="${build-quick.dir}/classes/scalap"/>
+ <fileset file="${src.dir}/scalap/decoder.properties"/> </path>
+
+ <path id="pack.partest.files">
+ <fileset dir="${build-quick.dir}/classes/partest">
+ <exclude name="scala/tools/partest/javaagent/**"/>
</fileset>
- </jar>
- </target>
-
- <target name="pack.pre-comp" depends="pack.lib">
- <uptodate
- property="pack.comp.available"
- targetfile="${build-pack.dir}/lib/scala-compiler.jar"
- srcfile="${build-quick.dir}/compiler.complete"/>
- </target>
+ </path>
- <target name="pack.comp" depends="pack.pre-comp" unless="pack.comp.available">
- <mkdir dir="${build-pack.dir}/META-INF"/>
- <copy file="META-INF/MANIFEST.MF" toDir="${build-pack.dir}/META-INF"/>
- <manifest file="${build-pack.dir}/META-INF/MANIFEST.MF" mode="update">
- <attribute name="Bundle-Version" value="${version.number}"/>
- </manifest>
- <mkdir dir="${build-pack.dir}/lib"/>
- <jar destfile="${build-pack.dir}/lib/scala-compiler.jar" manifest="${build-pack.dir}/META-INF/MANIFEST.MF">
- <fileset dir="${build-quick.dir}/classes/compiler"/>
- <!-- filemode / dirmode: see trac ticket #1294 -->
- <zipfileset dirmode="755" filemode="644" src="${fjbg.jar}"/>
- <zipfileset dirmode="755" filemode="644" src="${msil.jar}"/>
- </jar>
- <copy file="${jline.jar}" toDir="${build-pack.dir}/lib"/>
- <copy todir="${build-pack.dir}/lib">
- <fileset dir="${lib-extra.dir}">
- <include name="**/*.jar"/>
+ <path id="pack.partest-javaagent.files">
+ <fileset dir="${build-quick.dir}/classes/partest">
+ <include name="scala/tools/partest/javaagent/**"/>
</fileset>
- </copy>
- </target>
-
- <target name="pack.pre-plugins" depends="pack.comp">
- <uptodate
- property="pack.plugins.available"
- targetfile="${build-pack.dir}/misc/scala-devel/plugins/continuations.jar"
- srcfile="${build-quick.dir}/plugins.complete"/>
- </target>
+ </path>
- <target name="pack.plugins" depends="pack.pre-plugins" unless="pack.plugins.available">
- <mkdir dir="${build-pack.dir}/misc/scala-devel/plugins"/>
- <jar destfile="${build-pack.dir}/misc/scala-devel/plugins/continuations.jar">
- <fileset dir="${build-quick.dir}/classes/continuations-plugin"/>
- </jar>
- </target>
-
- <target name="pack.scalacheck" depends="pack.plugins">
- <jar destfile="${build-pack.dir}/lib/scalacheck.jar">
- <fileset dir="${build-quick.dir}/classes/scalacheck"/>
- </jar>
- </target>
-
- <target name="pack.pre-partest" depends="pack.scalacheck">
- <uptodate
- property="pack.partest.available"
- targetfile="${build-pack.dir}/lib/scala-partest.jar"
- srcfile="${build-quick.dir}/partest.complete"/>
- </target>
+ <!-- STRAP -->
+ <path id="strap.library.build.path">
+ <pathelement location="${build-strap.dir}/classes/library"/>
+ <path refid="forkjoin.classpath"/>
+ <path refid="aux.libs"/>
+ </path>
- <target name="pack.partest" depends="pack.pre-partest" unless="pack.partest.available">
- <mkdir dir="${build-pack.dir}/lib"/>
- <jar destfile="${build-pack.dir}/lib/scala-partest.jar">
- <fileset dir="${build-quick.dir}/classes/partest"/>
- </jar>
- </target>
-
- <target name="pack.pre-scalap" depends="pack.partest">
- <uptodate
- property="pack.scalap.available"
- targetfile="${build-pack.dir}/lib/scalap.jar"
- srcfile="${build-quick.dir}/scalap.complete"/>
- </target>
+ <path id="strap.msil.build.path">
+ <path refid="strap.compiler.build.path"/>
+ <pathelement location="${build-strap.dir}/classes/msil"/>
+ </path>
- <target name="pack.scalap" depends="pack.pre-scalap" unless="pack.scalap.available">
- <mkdir dir="${build-pack.dir}/lib"/>
- <jar destfile="${build-pack.dir}/lib/scalap.jar">
- <fileset dir="${build-quick.dir}/classes/scalap"/>
- <fileset file="${src.dir}/scalap/decoder.properties"/>
- </jar>
- </target>
-
- <target name="pack.pre-bin" depends="pack.scalap">
- <uptodate
- property="pack.bin.available"
- srcfile="${build-pack.dir}/lib/scala-compiler.jar"
- targetfile="${build-pack.dir}/bin.complete"/>
- </target>
+ <path id="strap.reflect.build.path">
+ <path refid="strap.library.build.path"/>
+ <pathelement location="${build-strap.dir}/classes/reflect"/>
+ </path>
- <target name="pack.bin" depends="pack.pre-bin" unless="pack.bin.available">
- <taskdef name="pack-bin" classname="scala.tools.ant.ScalaTool">
- <classpath>
- <pathelement location="${build-pack.dir}/lib/scala-library.jar"/>
- <pathelement location="${build-pack.dir}/lib/scala-compiler.jar"/>
- <pathelement location="${build-pack.dir}/lib/jline.jar"/>
- </classpath>
- </taskdef>
- <mkdir dir="${build-pack.dir}/bin"/>
- <pack-bin
- file="${build-pack.dir}/bin/scala"
- class="scala.tools.nsc.MainGenericRunner"
- javaFlags="${java.flags}"/>
- <pack-bin
- file="${build-pack.dir}/bin/scalac"
- class="scala.tools.nsc.Main"
- javaFlags="${java.flags}"/>
- <pack-bin
- file="${build-pack.dir}/bin/scaladoc"
- class="scala.tools.nsc.ScalaDoc"
- javaFlags="${java.flags}"/>
- <pack-bin
- file="${build-pack.dir}/bin/fsc"
- class="scala.tools.nsc.CompileClient"
- javaFlags="${java.flags}"/>
- <pack-bin
- file="${build-pack.dir}/bin/scalap"
- class="scala.tools.scalap.Main"
- javaFlags="${java.flags}"/>
- <chmod perm="ugo+rx" file="${build-pack.dir}/bin/scala"/>
- <chmod perm="ugo+rx" file="${build-pack.dir}/bin/scalac"/>
- <chmod perm="ugo+rx" file="${build-pack.dir}/bin/scaladoc"/>
- <chmod perm="ugo+rx" file="${build-pack.dir}/bin/fsc"/>
- <chmod perm="ugo+rx" file="${build-pack.dir}/bin/scalap"/>
- <touch file="${build-pack.dir}/bin.complete" verbose="no"/>
- </target>
-
- <target name="pack.done" depends="pack.bin">
- <path id="pack.classpath">
- <pathelement location="${build-pack.dir}/lib/scala-library.jar"/>
- <pathelement location="${build-pack.dir}/lib/scala-compiler.jar"/>
- <pathelement location="${build-pack.dir}/lib/scala-partest.jar"/>
- <pathelement location="${build-pack.dir}/lib/scalap.jar"/>
- <pathelement location="${ant.jar}"/>
+ <path id="strap.compiler.build.path">
+ <path refid="strap.reflect.build.path"/>
+ <pathelement location="${build-strap.dir}/classes/compiler"/>
+ <pathelement location="${build-strap.dir}/classes/msil"/>
+ <path refid="asm.classpath"/>
+ <path refid="fjbg.classpath"/>
<pathelement location="${jline.jar}"/>
- <path refid="lib.extra"/>
</path>
- <taskdef resource="scala/tools/ant/antlib.xml" classpathref="pack.classpath"/>
- <taskdef resource="scala/tools/partest/antlib.xml" classpathref="pack.classpath"/>
- </target>
- <target name="pack.clean" depends="strap.clean">
- <delete dir="${build-pack.dir}" includeemptydirs="yes" quiet="yes" failonerror="no"/>
+ <!-- MISC -->
+ <path id="sbt.compile.build.path">
+ <path refid="quick.compiler.build.path"/>
+ <pathelement location="${sbt.interface.jar}"/>
+ </path>
+
+ <path id="manual.classpath">
+ <pathelement location="${build-pack.dir}/lib/scala-library.jar"/>
+ <pathelement location="${build.dir}/manmaker/classes"/>
+ </path>
+
+ <path id="partest.classpath">
+ <path refid="pack.compiler.path"/>
+ <path refid="partest.extras.classpath"/>
+ </path>
+
+ <path id="partest.build.path">
+ <path refid="pack.compiler.path"/>
+ <fileset dir="${partest.dir}/files/lib" includes="*.jar" />
+ <pathelement location="${pack.dir}/lib/scala-swing.jar"/> <!-- TODO - segregate swing tests (there can't be many) -->
+ </path>
+
+ <path id="test.junit.compiler.build.path">
+ <pathelement location="${test.junit.classes}"/>
+ <path refid="quick.compiler.build.path"/>
+ <path refid="junit.classpath"/>
+ </path>
+
+ <path id="test.osgi.compiler.build.path">
+ <pathelement location="${test.osgi.classes}"/>
+ <pathelement location="${build-osgi.dir}/org.scala-lang.scala-library.jar"/>
+ <pathelement location="${build-osgi.dir}/org.scala-lang.scala-reflect.jar"/>
+ <pathelement location="${build-osgi.dir}/org.scala-lang.scala-compiler.jar"/>
+ <pathelement location="${build-osgi.dir}/org.scala-lang.scala-actors.jar"/>
+ <path refid="pax.exam.classpath"/>
+ <path refid="forkjoin.classpath"/>
+ </path>
+
+ <path id="palo.bin.tool.path">
+ <pathelement location="${build-palo.dir}/lib/scala-library.jar"/>
+ <pathelement location="${build-palo.dir}/lib/scala-reflect.jar"/>
+ <pathelement location="${build-palo.dir}/lib/scala-compiler.jar"/>
+ <pathelement location="${build-palo.dir}/lib/jline.jar"/>
+ </path>
+
+ <path id="test.positions.sub.build.path" path="${build-quick.dir}/classes/library"/>
+
+ <!-- TODO: consolidate *.includes -->
+ <patternset id="partest.includes">
+ <include name="**/*.xml"/>
+ </patternset>
+
+ <patternset id="lib.includes">
+ <include name="**/*.tmpl"/>
+ <include name="**/*.xml"/>
+ <include name="**/*.js"/>
+ <include name="**/*.css"/>
+ </patternset>
+
+ <patternset id="lib.rootdoc.includes">
+ <include name="**/*.tmpl"/>
+ <include name="**/*.xml"/>
+ <include name="**/*.js"/>
+ <include name="**/*.css"/>
+ <include name="rootdoc.txt"/>
+ </patternset>
+
+ <patternset id="comp.includes">
+ <include name="**/*.tmpl"/>
+ <include name="**/*.xml"/>
+ <include name="**/*.js"/>
+ <include name="**/*.css"/>
+ <include name="**/*.html"/>
+ <include name="**/*.properties"/>
+ <include name="**/*.swf"/>
+ <include name="**/*.png"/>
+ <include name="**/*.gif"/>
+ <include name="**/*.txt"/>
+ </patternset>
+
+ <taskdef resource="scala/tools/ant/sabbus/antlib.xml" classpathref="starr.compiler.path"/>
</target>
<!-- ===========================================================================
-BOOTSTRAPPING BUILD (STRAP)
+ CLEANLINESS
+=============================================================================-->
+ <target name="libs.clean"> <clean build="libs"/> <clean build="asm"/> </target>
+ <target name="quick.clean" depends="libs.clean"> <clean build="quick"/> <clean build="pack"/> <clean build="strap"/> </target>
+ <target name="palo.clean" depends="quick.clean"> <clean build="palo"/> </target>
+ <target name="locker.clean" depends="palo.clean"> <clean build="locker"/> </target>
+
+ <target name="docs.clean"> <clean build="docs"/> <delete dir="${build.dir}/manmaker" includeemptydirs="yes" quiet="yes" failonerror="no"/> </target>
+ <target name="dist.clean"> <delete dir="${dists.dir}" includeemptydirs="yes" quiet="yes" failonerror="no"/> </target>
+
+ <target name="all.clean" depends="locker.clean, docs.clean"> <clean build="sbt"/> <clean build="osgi"/> </target>
+
+ <!-- Used by the scala-installer script -->
+ <target name="allallclean" depends="all.clean, dist.clean"/>
+
+ <macrodef name="clean">
+ <attribute name="build"/>
+ <sequential>
+ <delete dir="${build-@{build}.dir}" includeemptydirs="yes" quiet="yes" failonerror="no"/>
+ </sequential>
+ </macrodef>
+
+<!-- ===========================================================================
+ LOCAL DEPENDENCIES
============================================================================ -->
-
- <target name="strap.start" depends="pack.done"/>
-
- <target name="strap.pre-lib" depends="strap.start">
- <uptodate property="strap.lib.available" targetfile="${build-strap.dir}/library.complete">
- <srcfiles dir="${src.dir}">
- <include name="library/**"/>
- <include name="dbc/**"/>
- <include name="actors/**"/>
- <include name="swing/**"/>
- </srcfiles>
- </uptodate>
- </target>
+ <macrodef name="simple-javac" >
+ <attribute name="project"/> <!-- project: fjbg/asm/forkjoin -->
+ <attribute name="args" default=""/>
+ <attribute name="jar" default="yes"/>
+ <sequential>
+ <uptodate property="@{project}.available" targetfile="${build-libs.dir}/@{project}.complete">
+ <srcfiles dir="${src.dir}/@{project}"/></uptodate>
+ <if><not><isset property="@{project}.available"/></not><then>
+ <stopwatch name="@{project}.timer"/>
+ <mkdir dir="${@{project}-classes}"/>
+ <javac
+ debug="true"
+ srcdir="${src.dir}/@{project}"
+ destdir="${@{project}-classes}"
+ classpath="${@{project}-classes}"
+ includes="**/*.java"
+ target="1.6" source="1.5"
+ compiler="javac1.6">
+ <compilerarg line="${javac.args} @{args}"/>
+ </javac>
+ <if><equals arg1="@{jar}" arg2="yes"/><then>
+ <jar whenmanifestonly="fail" destfile="${build-libs.dir}/@{project}.jar" basedir="${@{project}-classes}"/></then></if>
+ <stopwatch name="@{project}.timer" action="total"/>
+ <mkdir dir="${build-libs.dir}"/>
+ <touch file="${build-libs.dir}/@{project}.complete" verbose="no"/>
+ </then></if>
+ </sequential>
+ </macrodef>
+
+ <target name="asm.done" depends="init"> <simple-javac project="asm" jar="no"/> </target>
+ <target name="fjbg.done" depends="init"> <simple-javac project="fjbg"/> </target>
+ <target name="forkjoin.done" depends="init"> <simple-javac project="forkjoin" args="-XDignore.symbol.file"/></target>
- <target name="strap.lib" depends="strap.pre-lib" unless="strap.lib.available">
- <stopwatch name="strap.lib.timer"/>
- <mkdir dir="${build-strap.dir}/classes/library"/>
- <javac
- srcdir="${src.dir}/library"
- destdir="${build-strap.dir}/classes/library"
- classpath="${build-strap.dir}/classes/library"
- includes="**/*.java"
- target="1.5" source="1.5">
- <compilerarg line="${javac.args}"/>
- </javac>
- <javac
- srcdir="${src.dir}/actors"
- destdir="${build-strap.dir}/classes/library"
- classpath="${build-strap.dir}/classes/library"
- includes="**/*.java"
- target="1.5" source="1.5">
- <compilerarg line="${javac.args}"/>
- </javac>
- <scalacfork
- destdir="${build-strap.dir}/classes/library"
- compilerpathref="pack.classpath"
- srcpath="${src.dir}/library"
- params="${scalac.args.all}"
- srcdir="${src.dir}/library"
- jvmargs="${scalacfork.jvmargs}">
- <include name="**/*.scala"/>
- <compilationpath refid="strap.compilation.path"/>
- </scalacfork>
- <scalacfork
- destdir="${build-strap.dir}/classes/library"
- compilerpathref="pack.classpath"
- params="${scalac.args.all}"
- srcdir="${src.dir}/actors"
- jvmargs="${scalacfork.jvmargs}">
- <include name="**/*.scala"/>
- <compilationpath refid="strap.compilation.path"/>
- </scalacfork>
- <scalacfork
- destdir="${build-strap.dir}/classes/library"
- compilerpathref="pack.classpath"
- params="${scalac.args.all}"
- srcdir="${src.dir}/dbc"
- jvmargs="${scalacfork.jvmargs}">
- <include name="**/*.scala"/>
- <compilationpath refid="strap.compilation.path"/>
- </scalacfork>
- <scalacfork
- destdir="${build-strap.dir}/classes/library"
- compilerpathref="pack.classpath"
- params="${scalac.args.all}"
- srcdir="${src.dir}/swing"
- jvmargs="${scalacfork.jvmargs}">
- <include name="**/*.scala"/>
- <compilationpath refid="strap.compilation.path"/>
- </scalacfork>
- <propertyfile file="${build-strap.dir}/classes/library/library.properties">
- <entry key="version.number" value="${version.number}"/>
- <entry key="maven.version.number" value="${maven.version.number}"/>
- <entry key="osgi.version.number" value="${osgi.version.number}"/>
- <entry key="copyright.string" value="${copyright.string}"/>
- </propertyfile>
- <copy todir="${build-strap.dir}/classes/library">
- <fileset dir="${src.dir}/library">
- <include name="**/*.tmpl"/>
- <include name="**/*.xml"/>
- <include name="**/*.js"/>
- <include name="**/*.css"/>
- </fileset>
- </copy>
- <touch file="${build-strap.dir}/library.complete" verbose="no"/>
- <stopwatch name="strap.lib.timer" action="total"/>
- </target>
-
- <target name="strap.pre-comp" depends="strap.lib">
- <uptodate property="strap.comp.available" targetfile="${build-strap.dir}/compiler.complete">
- <srcfiles dir="${src.dir}/compiler"/>
- </uptodate>
- </target>
+<!-- ===========================================================================
+ STAGED COMPILATION MACROS
+============================================================================ -->
+ <macrodef name="staged-javac" >
+ <attribute name="stage"/> <!-- current stage (locker, quick, strap) -->
+ <attribute name="project"/> <!-- project: library/reflect/compiler/actors -->
+ <attribute name="destproject" default="@{project}"/> <!-- overrides the output directory; used when building multiple projects into the same directory-->
+ <attribute name="args" default=""/>
+ <attribute name="excludes" default=""/>
+
+ <sequential>
+ <javac
+ debug="true"
+ srcdir="${src.dir}/@{project}"
+ destdir="${build-@{stage}.dir}/classes/@{destproject}"
+ includes="**/*.java"
+ excludes="@{excludes}"
+ target="1.6" source="1.5">
+ <compilerarg line="${javac.args} @{args}"/>
+ <classpath refid="@{stage}.@{destproject}.build.path"/>
+ </javac>
+ </sequential>
+ </macrodef>
+
+ <!-- Zinc assumes a one-to-one correspondence of output folder to set of source files.
+ When compiling different sets of source files in multiple compilations to the same output directory,
+ Zinc thinks source files that appeared in an earlier compilation but are absent in the current one,
+ were deleted and thus deletes the corresponding output files.
+
+ Note that zinc also requires each arg to scalac to be prefixed by -S.
+ -->
+ <macrodef name="zinc">
+ <attribute name="compilerpathref" />
+ <attribute name="destdir" />
+ <attribute name="srcdir" />
+ <attribute name="srcpath" default="NOT SET"/> <!-- needed to compile the library, "NOT SET" is just a convention to denote an optional attribute -->
+ <attribute name="buildpathref" />
+ <attribute name="params" default="" />
+ <attribute name="java-excludes" default=""/>
+
+ <sequential>
+ <local name="sources"/>
+ <pathconvert pathsep=" " property="sources">
+ <fileset dir="@{srcdir}">
+ <include name="**/*.java"/>
+ <include name="**/*.scala"/>
+ <exclude name="@{java-excludes}"/>
+ </fileset>
+ </pathconvert>
+ <local name="args"/>
+ <local name="sargs"/>
+ <if><not><equals arg1="@{srcpath}" arg2="NOT SET"/></not><then>
+ <property name="args" value="@{params} -sourcepath @{srcpath}"/>
+ </then></if>
+ <property name="args" value="@{params}"/> <!-- default -->
+
+ <!-- HACK: prefix scalac args by -S -->
+ <script language="javascript">
+ project.setProperty("sargs", project.getProperty("args").trim().replaceAll(" ", " -S"));
+ </script>
+
+ <exec osfamily="unix" executable="tools/zinc" failifexecutionfails="true" failonerror="true">
+ <arg line="-nailed -compile-order JavaThenScala -scala-path ${ant.refid:@{compilerpathref}} -d @{destdir} -classpath ${toString:@{buildpathref}} ${sargs} ${sources}"/>
+ </exec>
+ </sequential>
+ </macrodef>
+
+ <macrodef name="staged-scalac" >
+ <attribute name="with"/> <!-- will use path `@{with}.compiler.path` to locate scalac -->
+ <attribute name="stage"/> <!-- current stage (locker, quick, strap) -->
+ <attribute name="project"/> <!-- project: library/reflect/compiler/actors -->
+ <attribute name="srcpath" default="NOT SET"/> <!-- needed to compile the library -->
+ <attribute name="args" default=""/> <!-- additional args -->
+ <attribute name="destproject" default="@{project}"/> <!-- overrides the output directory; used when building multiple projects into the same directory-->
+ <attribute name="srcdir" default="@{project}"/>
+ <attribute name="java-excludes" default=""/>
+
+ <sequential>
+ <!-- TODO: detect zinc anywhere on PATH
+ use zinc for the quick stage if it's available;
+ would use it for locker but something is iffy in sbt: get a class cast error on global phase -->
+ <if><and> <available file="tools/zinc"/>
+ <equals arg1="@{stage}" arg2="quick"/>
+ <not><equals arg1="@{project}" arg2="plugins"/></not> <!-- doesn't work in zinc because it requires the quick compiler, which isn't jarred up-->
+ </and><then>
+ <zinc taskname="Z.@{stage}.@{project}"
+ compilerpathref="@{with}.compiler.path"
+ destdir="${build-@{stage}.dir}/classes/@{destproject}"
+ srcdir="${src.dir}/@{srcdir}"
+ srcpath="@{srcpath}"
+ buildpathref="@{stage}.@{project}.build.path"
+ params="${scalac.args.@{stage}} @{args}"
+ java-excludes="@{java-excludes}"/></then>
+ <else>
+ <if><equals arg1="@{srcpath}" arg2="NOT SET"/><then>
+ <scalacfork taskname="@{stage}.@{project}"
+ jvmargs="${scalacfork.jvmargs}"
+ compilerpathref="@{with}.compiler.path"
+ destdir="${build-@{stage}.dir}/classes/@{destproject}"
+ srcdir="${src.dir}/@{srcdir}"
+ params="${scalac.args.@{stage}} @{args}">
+ <include name="**/*.scala"/>
+ <compilationpath refid="@{stage}.@{project}.build.path"/></scalacfork></then>
+ <else>
+ <scalacfork taskname="@{stage}.@{project}"
+ jvmargs="${scalacfork.jvmargs}"
+ compilerpathref="@{with}.compiler.path"
+ destdir="${build-@{stage}.dir}/classes/@{destproject}"
+ srcdir="${src.dir}/@{srcdir}"
+ srcpath="@{srcpath}"
+ params="${scalac.args.@{stage}} @{args}">
+ <include name="**/*.scala"/>
+ <compilationpath refid="@{stage}.@{project}.build.path"/></scalacfork></else>
+ </if>
+ </else></if>
+ </sequential>
+ </macrodef>
+
+ <macrodef name="staged-uptodate" >
+ <attribute name="stage"/>
+ <attribute name="project"/>
+ <element name="check"/>
+ <element name="do"/>
+
+ <sequential>
+ <uptodate property="@{stage}.@{project}.available" targetfile="${build-@{stage}.dir}/@{project}.complete">
+ <check/>
+ </uptodate>
+ <if><not><isset property="@{stage}.@{project}.available"/></not><then>
+ <do/>
+ <touch file="${build-@{stage}.dir}/@{project}.complete" verbose="no"/>
+ </then></if>
+ </sequential>
+ </macrodef>
+
+ <macrodef name="staged-build" >
+ <attribute name="with"/> <!-- will use path `@{with}.compiler.path` to locate scalac -->
+ <attribute name="stage"/> <!-- current stage (locker, quick, strap) -->
+ <attribute name="project"/> <!-- project: library/reflect/compiler/actors -->
+ <attribute name="srcpath" default="NOT SET"/> <!-- needed to compile the library -->
+ <attribute name="args" default=""/> <!-- additional args -->
+ <attribute name="includes" default="comp.includes"/>
+ <attribute name="java-excludes" default=""/>
+ <attribute name="version" default=""/> <!-- non-empty for partest and scaladoc: use @{version}.version.number in property file-->
+
+ <sequential>
+ <staged-uptodate stage="@{stage}" project="@{project}">
+ <check><srcfiles dir="${src.dir}/@{project}"/></check>
+ <do>
+ <stopwatch name="@{stage}.@{project}.timer"/>
+ <mkdir dir="${build-@{stage}.dir}/classes/@{project}"/>
+ <staged-javac stage="@{stage}" project="@{project}" excludes="@{java-excludes}"/> <!-- always compile with javac for simplicity and regularity; it's cheap -->
+ <staged-scalac with="@{with}" stage="@{stage}" project="@{project}" srcpath="@{srcpath}" args="@{args}" java-excludes="@{java-excludes}"/>
+ <if><equals arg1="@{version}" arg2=""/><then>
+ <propertyfile file = "${build-@{stage}.dir}/classes/@{project}/@{project}.properties">
+ <entry key = "version.number" value="${version.number}"/>
+ <entry key = "maven.version.number" value="${maven.version.number}"/>
+ <entry key = "osgi.version.number" value="${osgi.version.number}"/>
+ <entry key = "copyright.string" value="${copyright.string}"/>
+ </propertyfile>
+ </then><else>
+ <propertyfile file = "${build-@{stage}.dir}/classes/@{project}/@{project}.properties">
+ <entry key = "version.number" value="${@{version}.version.number}"/>
+ <entry key = "copyright.string" value="${copyright.string}"/>
+ </propertyfile>
+ </else></if>
+ <copy todir="${build-@{stage}.dir}/classes/@{project}">
+ <fileset dir="${src.dir}/@{project}">
+ <patternset refid="@{includes}"/>
+ </fileset>
+ </copy>
+ <stopwatch name="@{stage}.@{project}.timer" action="total"/>
+ </do>
+ </staged-uptodate>
+ </sequential>
+ </macrodef>
+
+ <macrodef name="staged-bin">
+ <attribute name="stage"/>
+ <attribute name="classpathref" default="NOT SET"/>
+ <sequential>
+ <staged-uptodate stage="@{stage}" project="bin">
+ <check>
+ <srcfiles dir="${src.dir}">
+ <include name="compiler/scala/tools/ant/templates/**"/>
+ </srcfiles>
+ </check>
+ <do>
+ <taskdef name="mk-bin" classname="scala.tools.ant.ScalaTool" classpathref="@{stage}.bin.tool.path"/>
+ <mkdir dir="${build-@{stage}.dir}/bin"/>
+ <if><equals arg1="@{classpathref}" arg2="NOT SET"/><then>
+ <mk-bin file="${build-@{stage}.dir}/bin/scala" class="scala.tools.nsc.MainGenericRunner" javaFlags="${java.flags}"/>
+ <mk-bin file="${build-@{stage}.dir}/bin/scalac" class="scala.tools.nsc.Main" javaFlags="${java.flags}"/>
+ <mk-bin file="${build-@{stage}.dir}/bin/scaladoc" class="scala.tools.nsc.ScalaDoc" javaFlags="${java.flags}"/>
+ <mk-bin file="${build-@{stage}.dir}/bin/fsc" class="scala.tools.nsc.CompileClient" javaFlags="${java.flags}"/>
+ <mk-bin file="${build-@{stage}.dir}/bin/scalap" class="scala.tools.scalap.Main" javaFlags="${java.flags}"/>
+ </then><else>
+ <mk-bin file="${build-@{stage}.dir}/bin/scala" class="scala.tools.nsc.MainGenericRunner" javaFlags="${java.flags}" classpathref="@{classpathref}"/>
+ <mk-bin file="${build-@{stage}.dir}/bin/scalac" class="scala.tools.nsc.Main" javaFlags="${java.flags}" classpathref="@{classpathref}"/>
+ <mk-bin file="${build-@{stage}.dir}/bin/scaladoc" class="scala.tools.nsc.ScalaDoc" javaFlags="${java.flags}" classpathref="@{classpathref}"/>
+ <mk-bin file="${build-@{stage}.dir}/bin/fsc" class="scala.tools.nsc.CompileClient" javaFlags="${java.flags}" classpathref="@{classpathref}"/>
+ <mk-bin file="${build-@{stage}.dir}/bin/scalap" class="scala.tools.scalap.Main" javaFlags="${java.flags}" classpathref="@{classpathref}"/>
+ </else></if>
+ <chmod perm="ugo+rx" file="${build-@{stage}.dir}/bin/scala"/>
+ <chmod perm="ugo+rx" file="${build-@{stage}.dir}/bin/scalac"/>
+ <chmod perm="ugo+rx" file="${build-@{stage}.dir}/bin/scaladoc"/>
+ <chmod perm="ugo+rx" file="${build-@{stage}.dir}/bin/fsc"/>
+ <chmod perm="ugo+rx" file="${build-@{stage}.dir}/bin/scalap"/>
+ </do>
+ </staged-uptodate>
+ </sequential>
+ </macrodef>
+
+ <macrodef name="staged-pack">
+ <attribute name="project"/>
+ <attribute name="targetdir" default="lib"/>
+ <attribute name="targetjar" default="scala-@{project}.jar"/>
+ <attribute name="destfile" default="${build-pack.dir}/@{targetdir}/@{targetjar}"/>
+ <attribute name="manifest" default=""/>
+ <element name="pre" optional="true"/>
+ <element name="jar-opts" optional="true"/>
+
+ <sequential>
+ <uptodate property="pack.@{project}.available" targetfile="@{destfile}">
+ <srcresources>
+ <resources refid="pack.@{project}.files"/>
+ <!-- <path><pathelement location="${build-quick.dir}/@{project}.complete"/></path> -->
+ </srcresources>
+ </uptodate>
+ <if><not><isset property="pack.@{project}.available"/></not><then>
+ <mkdir dir="${build-pack.dir}/@{targetdir}"/>
+ <pre/>
+
+ <if><not><equals arg1="@{manifest}" arg2=""/></not><then>
+ <jar whenmanifestonly="fail" destfile="@{destfile}" manifest="@{manifest}"> <!-- update="true" makes no difference on my machine, so starting from scratch-->
+ <jar-opts/>
+ <path refid="pack.@{project}.files"/>
+ </jar></then>
+ <else>
+ <jar whenmanifestonly="fail" destfile="@{destfile}">
+ <jar-opts/>
+ <path refid="pack.@{project}.files"/>
+ </jar>
+ </else></if>
+ </then></if>
+ </sequential>
+ </macrodef>
+
+ <macrodef name="staged-docs">
+ <attribute name="project"/>
+ <attribute name="dir" default="@{project}"/>
+ <attribute name="title"/>
+ <attribute name="docroot" default="NOT SET"/>
+ <element name="includes" implicit="true"/>
+
+ <sequential>
+ <staged-uptodate stage="docs" project="@{project}">
+ <check><srcfiles dir="${src.dir}/@{dir}"/></check>
+ <do>
+ <stopwatch name="docs.@{project}.timer"/>
+ <mkdir dir="${build-docs.dir}/@{project}"/>
+ <if><equals arg1="@{docroot}" arg2="NOT SET"/><then>
+ <!-- TODO: introduce docs.@{project}.build.path for classpathref -->
+ <scaladoc
+ destdir="${build-docs.dir}/@{project}"
+ doctitle="@{title}"
+ docversion="${version.number}"
+ sourcepath="${src.dir}"
+ classpathref="pack.compiler.path"
+ srcdir="${src.dir}/@{dir}"
+ addparams="${scalac.args.all}"
+ implicits="on"
+ diagrams="on"
+ groups="on"
+ rawOutput="${scaladoc.raw.output}"
+ noPrefixes="${scaladoc.no.prefixes}">
+ <includes/>
+ </scaladoc>
+ </then><else>
+ <scaladoc
+ destdir="${build-docs.dir}/@{project}"
+ doctitle="@{title}"
+ docversion="${version.number}"
+ sourcepath="${src.dir}"
+ classpathref="pack.compiler.path"
+ srcdir="${src.dir}/@{dir}"
+ docRootContent="${src.dir}/@{project}/@{docroot}"
+ addparams="${scalac.args.all}"
+ implicits="on"
+ diagrams="on"
+ groups="on"
+ rawOutput="${scaladoc.raw.output}"
+ noPrefixes="${scaladoc.no.prefixes}">
+ <includes/>
+ </scaladoc>
+ </else></if>
+ <stopwatch name="docs.@{project}.timer" action="total"/>
+ </do>
+ </staged-uptodate>
+ </sequential>
+ </macrodef>
- <target name="strap.comp" depends="strap.pre-comp" unless="strap.comp.available">
- <stopwatch name="strap.comp.timer"/>
- <mkdir dir="${build-strap.dir}/classes/compiler"/>
- <scalacfork
- destdir="${build-strap.dir}/classes/compiler"
- compilerpathref="pack.classpath"
- params="${scalac.args.all}"
- srcdir="${src.dir}/compiler"
- jvmargs="${scalacfork.jvmargs}">
- <include name="**/*.scala"/>
- <compilationpath>
- <pathelement location="${build-strap.dir}/classes/library"/>
- <pathelement location="${build-strap.dir}/classes/compiler"/>
- <path refid="aux.libs"/>
- <pathelement location="${jline.jar}"/>
- </compilationpath>
- </scalacfork>
- <propertyfile file="${build-strap.dir}/classes/compiler/compiler.properties">
- <entry key="version.number" value="${version.number}"/>
- <entry key="maven.version.number" value="${maven.version.number}"/>
- <entry key="osgi.version.number" value="${osgi.version.number}"/>
- <entry key="copyright.string" value="${copyright.string}"/>
- </propertyfile>
- <copy todir="${build-strap.dir}/classes/compiler">
- <fileset dir="${src.dir}/compiler">
- <include name="**/*.tmpl"/>
- <include name="**/*.xml"/>
- <include name="**/*.js"/>
- <include name="**/*.css"/>
- <include name="**/*.html"/>
- <include name="**/*.properties"/>
- <include name="**/*.swf"/>
- <include name="**/*.png"/>
- <include name="**/*.gif"/>
- <include name="**/*.txt"/>
- </fileset>
- </copy>
- <touch file="${build-strap.dir}/compiler.complete" verbose="no"/>
- <stopwatch name="strap.comp.timer" action="total"/>
- </target>
+<!-- ===========================================================================
+ LOCAL REFERENCE BUILD (LOCKER)
+============================================================================ -->
+ <target name="locker.start" depends="asm.done, forkjoin.done, fjbg.done">
+ <condition property="locker.locked"><available file="${build-locker.dir}/locker.locked"/></condition></target>
- <target name="strap.pre-plugins" depends="strap.comp">
- <uptodate property="strap.plugins.available" targetfile="${build-strap.dir}/plugins.complete">
- <srcfiles dir="${src.dir}/continuations"/>
- </uptodate>
- </target>
+ <target name="locker.lib" depends="locker.start" unless="locker.locked">
+ <staged-build with="starr" stage="locker" project="library" srcpath="${src.dir}/library" includes="lib.includes"/></target>
- <target name="strap.plugins" depends="strap.pre-plugins" unless="strap.plugins.available">
- <stopwatch name="strap.plugins.timer"/>
- <mkdir dir="${build-strap.dir}/classes/continuations-plugin"/>
- <scalacfork
- destdir="${build-strap.dir}/classes/continuations-plugin"
- compilerpathref="pack.classpath"
- params="${scalac.args.all}"
- srcdir="${src.dir}/continuations/plugin"
- jvmargs="${scalacfork.jvmargs}">
- <include name="**/*.scala"/>
- <compilationpath>
- <pathelement location="${build-strap.dir}/classes/library"/>
- <pathelement location="${build-strap.dir}/classes/compiler"/>
- <pathelement location="${build-strap.dir}/classes/continuations-plugin"/>
- <pathelement location="${lib.dir}/forkjoin.jar"/>
- </compilationpath>
- </scalacfork>
- <copy
- file="${src.dir}/continuations/plugin/scalac-plugin.xml"
- todir="${build-strap.dir}/classes/continuations-plugin"/>
- <!-- not very nice to create jar here but needed to load plugin -->
- <mkdir dir="${build-strap.dir}/misc/scala-devel/plugins"/>
- <jar destfile="${build-strap.dir}/misc/scala-devel/plugins/continuations.jar">
- <fileset dir="${build-strap.dir}/classes/continuations-plugin"/>
- </jar>
- <!-- might split off library part into its own ant target -->
- <scalacfork
- destdir="${build-strap.dir}/classes/library"
- compilerpathref="pack.classpath"
- params="${scalac.args.all} -Xpluginsdir ${build-strap.dir}/misc/scala-devel/plugins -Xplugin-require:continuations -P:continuations:enable"
- srcdir="${src.dir}/continuations/library"
- jvmargs="${scalacfork.jvmargs}">
- <include name="**/*.scala"/>
- <compilationpath refid="strap.compilation.path"/>
- </scalacfork>
- <touch file="${build-strap.dir}/plugins.complete" verbose="no"/>
- <stopwatch name="strap.plugins.timer" action="total"/>
- </target>
-
- <target name="strap.scalacheck" depends="strap.plugins">
- <mkdir dir="${build-strap.dir}/classes/scalacheck"/>
- <scalacfork
- destdir="${build-strap.dir}/classes/scalacheck"
- compilerpathref="pack.classpath"
- params="${scalac.args.all}"
- srcdir="${src.dir}/scalacheck"
- jvmargs="${scalacfork.jvmargs}">
- <include name="**/*.scala"/>
- <compilationpath>
- <pathelement location="${build-strap.dir}/classes/library"/>
- </compilationpath>
- </scalacfork>
- </target>
+ <target name="locker.actors" depends="locker.lib" unless="locker.locked">
+ <staged-build with="starr" stage="locker" project="actors"/> </target>
- <target name="strap.pre-scalap" depends="strap.scalacheck">
- <uptodate property="strap.scalap.available" targetfile="${build-strap.dir}/scalap.complete">
- <srcfiles dir="${src.dir}/scalap"/>
- </uptodate>
- </target>
+ <target name="locker.msil" depends="locker.lib" unless="locker.locked">
+ <staged-build with="starr" stage="locker" project="msil" java-excludes="**/tests/**"/> </target>
- <target name="strap.scalap" depends="strap.pre-scalap" unless="strap.scalap.available">
- <stopwatch name="strap.scalap.timer"/>
- <mkdir dir="${build-strap.dir}/classes/scalap"/>
- <scalacfork
- destdir="${build-strap.dir}/classes/scalap"
- compilerpathref="pack.classpath"
- params="${scalac.args.all}"
- srcdir="${src.dir}/scalap"
- jvmargs="${scalacfork.jvmargs}">
- <include name="**/*.scala"/>
- <compilationpath>
- <pathelement location="${build-strap.dir}/classes/library"/>
- <pathelement location="${build-strap.dir}/classes/compiler"/>
- <pathelement location="${build-strap.dir}/classes/scalap"/>
- <pathelement location="${build-strap.dir}/classes/partest"/>
- <pathelement location="${ant.jar}"/>
- <pathelement location="${lib.dir}/forkjoin.jar"/>
- </compilationpath>
- </scalacfork>
- <touch file="${build-strap.dir}/scalap.complete" verbose="no"/>
- <stopwatch name="strap.scalap.timer" action="total"/>
- </target>
-
- <target name="strap.pre-partest" depends="strap.scalap">
- <uptodate property="strap.partest.available" targetfile="${build-strap.dir}/partest.complete">
- <srcfiles dir="${src.dir}/partest"/>
- </uptodate>
- </target>
-
- <target name="strap.partest" depends="strap.pre-partest" unless="strap.partest.available">
- <stopwatch name="strap.partest.timer"/>
- <mkdir dir="${build-strap.dir}/classes/partest"/>
- <javac
- srcdir="${src.dir}/partest"
- destdir="${build-strap.dir}/classes/partest"
- target="1.5" source="1.5">
- <classpath>
- <pathelement location="${build-strap.dir}/classes/library"/>
- <pathelement location="${build-strap.dir}/classes/compiler"/>
- <pathelement location="${build-strap.dir}/classes/scalap"/>
- <pathelement location="${build-strap.dir}/classes/partest"/>
- </classpath>
- <include name="**/*.java"/>
- <compilerarg line="${javac.args}"/>
- </javac>
- <scalacfork
- destdir="${build-strap.dir}/classes/partest"
- compilerpathref="pack.classpath"
- params="${scalac.args.all}"
- srcdir="${src.dir}/partest"
- jvmargs="${scalacfork.jvmargs}">
- <include name="**/*.scala"/>
- <compilationpath>
- <pathelement location="${build-strap.dir}/classes/library"/>
- <pathelement location="${build-strap.dir}/classes/compiler"/>
- <pathelement location="${build-strap.dir}/classes/scalap"/>
- <pathelement location="${build-strap.dir}/classes/partest"/>
- <pathelement location="${ant.jar}"/>
- <pathelement location="${lib.dir}/forkjoin.jar"/>
- <pathelement location="${scalacheck.jar}"/>
- </compilationpath>
- </scalacfork>
- <copy todir="${build-strap.dir}/classes/partest">
- <fileset dir="${src.dir}/partest">
- <include name="**/*.xml"/>
- </fileset>
- </copy>
- <touch file="${build-strap.dir}/partest.complete" verbose="no"/>
- <stopwatch name="strap.partest.timer" action="total"/>
- </target>
+ <target name="locker.reflect" depends="locker.lib" unless="locker.locked">
+ <staged-build with="starr" stage="locker" project="reflect"/></target>
- <target name="strap.done" depends="strap.partest"/>
+ <target name="locker.comp" depends="locker.reflect, locker.msil" unless="locker.locked">
+ <staged-build with="starr" stage="locker" project="compiler"/></target>
- <target name="strap.clean">
- <delete dir="${build-strap.dir}" includeemptydirs="yes" quiet="yes" failonerror="no"/>
+ <target name="locker.done" depends="locker.comp">
+ <mkdir dir="${build-locker.dir}"/>
+ <touch file="${build-locker.dir}/locker.locked" verbose="no"/>
</target>
+ <target name="locker.unlock"> <delete file="${build-locker.dir}/locker.locked"/>
+ <delete file="${build-locker.dir}/*.complete"/></target>
<!-- ===========================================================================
-LIBRARIES (MSIL, FJBG maybe later)
+ QUICK BUILD (QUICK)
============================================================================ -->
-
- <target name="libs.start"/>
-
- <target name="libs.pre-forkjoin" depends="libs.start">
- <property name="java6.home" value="/home/linuxsoft/apps/java-1.6"/>
- <fail message="Compiling forkjoin.jar requires java 1.6. Please set the property `java6.home` in build.properties or using `-Djava6.home=/path/to/java6`">
- <condition><not>
- <available file="${java6.home}/bin/javac"/>
- </not></condition>
- </fail>
+ <target name="quick.start" depends="locker.done"/>
- <uptodate property="libs.forkjoin.available" targetfile="${build-libs.dir}/forkjoin.complete">
- <srcfiles dir="${src.dir}/forkjoin">
- <include name="**/*.java"/>
- <include name="**/*.scala"/>
- </srcfiles>
- </uptodate>
- </target>
-
- <target name="libs.forkjoin" depends="libs.pre-forkjoin" unless="libs.forkjoin.available">
- <mkdir dir="${build-libs.dir}/classes/forkjoin"/>
- <javac
- executable="${java6.home}/bin/javac"
- fork="yes"
- compiler="javac1.6"
- srcdir="${src.dir}/forkjoin"
- destdir="${build-libs.dir}/classes/forkjoin"
- classpath="${build-libs.dir}/classes/forkjoin"
- includes="**/*.java"
- debug="true"
- target="1.5" source="1.5">
- <compilerarg line="${javac.args}"/>
- </javac>
- <touch file="${build-libs.dir}/forkjoin.complete" verbose="no"/>
- </target>
-
- <target name="libs.pre-forkjoinpack" depends="libs.forkjoin">
- </target>
-
- <target name="libs.forkjoinpack" depends="libs.pre-forkjoinpack" unless="libs.forkjoinpack.available">
- <jar destfile="${build-libs.dir}/forkjoin.jar">
- <fileset dir="${build-libs.dir}/classes/forkjoin"/>
- </jar>
- </target>
-
- <target name="libs.pre-msil" depends="libs.start">
- <uptodate property="libs.msil.available" targetfile="${build-libs.dir}/msil.complete">
- <srcfiles dir="${src.dir}/msil">
- <include name="**/*.java"/>
- <include name="**/*.scala"/>
- </srcfiles>
- </uptodate>
- </target>
-
- <target name="libs.msil" depends="libs.pre-msil" unless="libs.msil.available">
- <mkdir dir="${build-libs.dir}/classes/msil"/>
- <javac
- srcdir="${src.dir}/msil"
- destdir="${build-libs.dir}/classes/msil"
- classpath="${build-libs.dir}/classes/msil"
- includes="**/*.java"
- excludes="**/tests/**"
- debug="true"
- target="1.5" source="1.4">
- <compilerarg line="${javac.args}"/>
- </javac>
- <scalacfork
- destdir="${build-libs.dir}/classes/msil"
- compilerpathref="locker.classpath"
- params="${scalac.args.all}"
- srcdir="${src.dir}/msil"
- jvmargs="${scalacfork.jvmargs}">
- <include name="**/*.scala"/>
- <compilationpath>
- <pathelement location="${build-quick.dir}/classes/library"/>
- <pathelement location="${build-libs.dir}/classes/msil"/>
- </compilationpath>
- </scalacfork>
- <touch file="${build-libs.dir}/msil.complete" verbose="no"/>
- </target>
-
- <target name="libs.pre-msilpack" depends="libs.msil">
- </target>
-
- <target name="libs.msilpack" depends="libs.pre-msilpack" unless="libs.msilpack.available">
- <jar destfile="${build-libs.dir}/msil.jar">
- <fileset dir="${build-libs.dir}/classes/msil"/>
- </jar>
+ <target name="quick.lib" depends="quick.start">
+ <staged-build with="locker" stage="quick" project="library" srcpath="${src.dir}/library" includes="lib.rootdoc.includes"/></target>
+
+ <target name="quick.actors" depends="quick.lib">
+ <staged-build with="locker" stage="quick" project="actors"/> </target>
+
+ <target name="quick.msil" depends="quick.lib">
+ <staged-build with="locker" stage="quick" project="msil" java-excludes="**/tests/**"/> </target>
+
+ <target name="quick.reflect" depends="quick.lib">
+ <staged-build with="locker" stage="quick" project="reflect"/> </target>
+
+ <target name="quick.comp" depends="quick.reflect, quick.msil">
+ <staged-build with="locker" stage="quick" project="compiler"/> </target>
+
+ <target name="quick.scalacheck" depends="quick.actors, quick.lib">
+ <staged-build with="locker" stage="quick" project="scalacheck" args="-nowarn"/> </target>
+
+ <target name="quick.scalap" depends="quick.comp">
+ <staged-build with="locker" stage="quick" project="scalap"/> </target>
+
+ <target name="quick.partest" depends="quick.scalap, quick.comp, asm.done">
+ <staged-build with="locker" stage="quick" project="partest" version="partest"/> </target>
+
+ <target name="quick.swing" depends="quick.actors, quick.lib" if="has.java6">
+ <staged-build with="locker" stage="quick" project="swing"/> </target>
+
+ <target name="quick.plugins" depends="quick.comp">
+ <staged-uptodate stage="quick" project="plugins">
+ <check><srcfiles dir="${src.dir}/continuations"/></check>
+ <do>
+ <stopwatch name="quick.plugins.timer"/>
+
+ <mkdir dir="${build-quick.dir}/classes/continuations-plugin"/>
+ <staged-scalac with="locker" stage="quick" project="plugins" srcdir="continuations/plugin" destproject="continuations-plugin"/>
+ <copy
+ file="${src.dir}/continuations/plugin/scalac-plugin.xml"
+ todir="${build-quick.dir}/classes/continuations-plugin"/>
+
+ <!-- not very nice to create jar here but needed to load plugin -->
+ <mkdir dir="${build-quick.dir}/misc/scala-devel/plugins"/>
+ <jar whenmanifestonly="fail" destfile="${build-quick.dir}/misc/scala-devel/plugins/continuations.jar" basedir="${build-quick.dir}/classes/continuations-plugin"/>
+
+ <!-- might split off library part into its own ant target -->
+ <mkdir dir="${build-quick.dir}/classes/continuations-library"/>
+ <!-- TODO: must build with quick to avoid
+ [quick.plugins] error: java.lang.NoClassDefFoundError: scala/tools/nsc/transform/patmat/PatternMatching
+ [quick.plugins] at scala.tools.selectivecps.SelectiveCPSTransform.newTransformer(SelectiveCPSTransform.scala:29)
+
+ WHY OH WHY!? scala/tools/nsc/transform/patmat/PatternMatching should be on locker.compiler.path
+ -->
+ <staged-scalac with="quick" stage="quick" project="plugins"
+ srcdir="continuations/library" destproject="continuations-library"
+ args="-Xplugin-require:continuations -P:continuations:enable -Xpluginsdir ${build-quick.dir}/misc/scala-devel/plugins"/>
+
+ <stopwatch name="quick.plugins.timer" action="total"/>
+ </do>
+ </staged-uptodate>
</target>
-
- <target name="libs.pre-fjbg" depends="libs.start">
- <uptodate property="libs.fjbg.available" targetfile="${build-libs.dir}/fjbg.complete">
- <srcfiles dir="${src.dir}/fjbg">
- <include name="**/*.java"/>
- <include name="**/*.scala"/>
- </srcfiles>
- </uptodate>
+
+ <target name="quick.bin" depends="quick.lib, quick.reflect, quick.comp, quick.scalacheck, quick.scalap, quick.swing, quick.plugins, quick.partest">
+ <staged-bin stage="quick" classpathref="quick.bin.tool.path"/>
</target>
-
- <target name="libs.fjbg" depends="libs.pre-fjbg" unless="libs.fjbg.available">
- <mkdir dir="${build-libs.dir}/classes/fjbg"/>
- <javac
- srcdir="${src.dir}/fjbg"
- destdir="${build-libs.dir}/classes/fjbg"
- classpath="${build-libs.dir}/classes/fjbg"
- includes="**/*.java"
- debug="true"
- target="1.5" source="1.4">
- <compilerarg line="${javac.args}"/>
- </javac>
- <touch file="${build-libs.dir}/fjbg.complete" verbose="no"/>
+
+ <target name="quick.done" depends="quick.bin"/>
+ <target name="quick-opt" description="Optimized version of quick.done."> <optimized name="quick.done"/></target>
+
+
+<!-- ===========================================================================
+ PACKED QUICK BUILD (PACK)
+============================================================================ -->
+ <target name="pack.lib" depends="quick.lib, quick.plugins, forkjoin.done">
+ <staged-pack project="library"/></target>
+
+ <target name="pack.actors" depends="quick.lib"> <staged-pack project="actors"/> </target>
+ <target name="pack.swing" if="has.java6" depends="quick.swing"> <staged-pack project="swing"/> </target>
+ <target name="pack.reflect" depends="quick.reflect"> <staged-pack project="reflect"/> </target>
+
+ <target name="pack.comp" depends="quick.comp, asm.done">
+ <staged-pack project="compiler" manifest="${build-pack.dir}/META-INF/MANIFEST.MF">
+ <pre> <!-- TODO the files copied here do not influence actuality of this target (nor does the manifest) -->
+ <copy file="${jline.jar}" toDir="${build-pack.dir}/lib"/>
+ <copy todir="${build-pack.dir}/lib">
+ <fileset dir="${lib-extra.dir}">
+ <include name="**/*.jar"/>
+ </fileset>
+ </copy>
+ <mkdir dir="${build-pack.dir}/META-INF"/>
+ <copy file="${basedir}/META-INF/MANIFEST.MF" toDir="${build-pack.dir}/META-INF"/>
+ <manifest file="${build-pack.dir}/META-INF/MANIFEST.MF" mode="update">
+ <attribute name="Bundle-Version" value="${version.number}"/>
+ <attribute name="Class-Path" value="scala-reflect.jar scala-library.jar"/>
+ </manifest>
+ </pre>
+ <!-- script api is 2.11-only so far
+ <jar-opts>
+ <service type="javax.script.ScriptEngineFactory" provider="scala.tools.nsc.interpreter.IMain$Factory"/>
+ </jar-opts>
+ -->
+ </staged-pack>
+ </target>
+
+ <target name="pack.plugins" depends="quick.plugins"> <staged-pack project="plugins" targetdir="misc/scala-devel/plugins" targetjar="continuations.jar"/> </target>
+ <target name="pack.scalacheck" depends="quick.scalacheck"> <staged-pack project="scalacheck" targetjar="scalacheck.jar"/> </target>
+
+ <target name="pack.partest" depends="quick.partest">
+ <staged-pack project="partest"/>
+ <!-- TODO the manifest should influence actuality of this target -->
+ <staged-pack project="partest-javaagent" manifest="${src.dir}/partest/scala/tools/partest/javaagent/MANIFEST.MF"/>
+ </target>
+
+ <target name="pack.scalap" depends="quick.scalap"> <staged-pack project="scalap" targetjar="scalap.jar"/> </target>
+
+ <target name="pack.bin" depends="pack.comp, pack.lib, pack.actors, pack.partest, pack.plugins, pack.reflect, pack.scalacheck, pack.scalap, pack.swing">
+ <staged-bin stage="pack"/>
+ </target>
+
+ <!-- depend on quick.done so quick.bin is run when pack.done is -->
+ <target name="pack.done" depends="quick.done, pack.bin">
+ <!-- copy dependencies to build/pack/lib, it only takes a second so don't bother with uptodate checks -->
+ <copy todir="${build-pack.dir}/lib">
+ <resources refid="partest.extras.fileset"/>
+ <mapper classpathref="maven-ant-tasks.classpath" classname="org.apache.maven.artifact.ant.VersionMapper"
+ from="${partest.extras.versions}" to="flatten"/>
+ </copy>
+
+ <taskdef resource="scala/tools/ant/antlib.xml" classpathref="pack.compiler.path"/>
+ <taskdef resource="scala/tools/partest/antlib.xml" classpathref="partest.classpath"/>
</target>
-
- <target name="libs.pre-fjbgpack" depends="libs.fjbg">
+
+
+<!-- ===========================================================================
+ BOOTSTRAPPING BUILD (STRAP)
+============================================================================ -->
+ <target name="strap.done" depends="pack.done">
+ <staged-build with="pack" stage="strap" project="library" srcpath="${src.dir}/library" includes="lib.rootdoc.includes"/>
+ <staged-build with="pack" stage="strap" project="msil" java-excludes="**/tests/**"/>
+ <staged-build with="pack" stage="strap" project="reflect"/>
+ <staged-build with="pack" stage="strap" project="compiler"/>
</target>
-
- <target name="libs.fjbgpack" depends="libs.pre-fjbgpack" unless="libs.fjbgpack.available">
- <jar destfile="${build-libs.dir}/fjbg.jar">
- <fileset dir="${build-libs.dir}/classes/fjbg"/>
+
+ <target name="strap-opt" description="Optimized version of strap.done."> <optimized name="strap.done"/></target>
+
+
+<!-- ===========================================================================
+ PACKED LOCKER BUILD (PALO)
+============================================================================ -->
+ <target name="palo.done" depends="locker.done">
+ <mkdir dir="${build-palo.dir}/lib"/>
+ <jar whenmanifestonly="fail" destfile="${build-palo.dir}/lib/scala-library.jar">
+ <fileset dir="${build-locker.dir}/classes/library"/>
+ <fileset dir="${forkjoin-classes}"/>
+ </jar>
+ <jar whenmanifestonly="fail" destfile="${build-palo.dir}/lib/scala-reflect.jar" manifest="${basedir}/META-INF/MANIFEST.MF"
+ basedir="${build-locker.dir}/classes/reflect"/>
+ <jar whenmanifestonly="fail" destfile="${build-palo.dir}/lib/scala-compiler.jar" manifest="${basedir}/META-INF/MANIFEST.MF">
+ <fileset dir="${build-locker.dir}/classes/compiler"/>
+ <fileset dir="${asm-classes}"/>
+ <fileset dir="${fjbg-classes}"/>
</jar>
+ <copy file="${jline.jar}" toDir="${build-palo.dir}/lib"/>
</target>
- <target name="libs.done" depends="libs.msilpack, libs.fjbgpack"/>
+ <target name="palo.bin" depends="palo.done"> <staged-bin stage="palo"/></target>
- <target name="forkjoin.done" depends="libs.forkjoinpack"/>
-
- <target name="libs.clean" depends="pack.clean">
- <delete dir="${build-libs.dir}" includeemptydirs="yes" quiet="yes" failonerror="no"/>
- </target>
<!-- ===========================================================================
-DOCUMENTATION
+ OSGi Artifacts
============================================================================ -->
-
- <target name="docs.start" depends="pack.done">
- <macrodef name="doc-uptodate-check">
+ <target name="osgi.done" depends="pack.done">
+ <mkdir dir="${build-osgi.dir}"/>
+
+ <!-- simplify fixing pom versions -->
+ <macrodef name="make-bundle">
<attribute name="name" />
- <attribute name="srcdir" />
- <element name="source-includes" optional="yes" />
+ <attribute name="version" />
<sequential>
- <uptodate property="docs.@{name}.available" targetfile="${build-docs.dir}/@{name}.complete">
- <srcfiles dir="@{srcdir}">
- <source-includes/>
- </srcfiles>
- </uptodate>
+ <copy file="${src.dir}/build/bnd/@{name}.bnd" tofile="${build-osgi.dir}/@{name}.bnd" overwrite="true">
+ <filterset>
+ <filter token="VERSION" value="@{version}" />
+ </filterset>
+ </copy>
+ <bnd classpath="${build-pack.dir}/lib/@{name}.jar"
+ eclipse="false"
+ failok="false"
+ exceptions="true"
+ files="${build-osgi.dir}/@{name}.bnd"
+ output="${build-osgi.dir}"/>
+ </sequential>
+ </macrodef>
+ <macrodef name="make-plugin-bundle">
+ <attribute name="name" />
+ <attribute name="version" />
+ <sequential>
+ <copy file="${src.dir}/build/bnd/@{name}.bnd" tofile="${build-osgi.dir}/@{name}.bnd" overwrite="true">
+ <filterset>
+ <filter token="VERSION" value="@{version}" />
+ </filterset>
+ </copy>
+ <bnd classpath="${build-pack.dir}/misc/scala-devel/plugins/@{name}.jar"
+ eclipse="false"
+ failok="false"
+ exceptions="true"
+ files="${build-osgi.dir}/@{name}.bnd"
+ output="${build-osgi.dir}"/>
</sequential>
</macrodef>
- <!-- Set the github commit scaladoc sources point to -->
- <!-- For releases, look for the tag with the same name as the maven version -->
- <condition property="scaladoc.git.commit" value="v${maven.version.number}">
- <isset property="build.release"/>
- </condition>
- <!-- For snapshots, if we know the commit, point scaladoc to that particular commit instead of master -->
- <condition property="scaladoc.git.commit" value="${git.commit.sha}">
- <not><equals arg1="${git.commit.sha}" arg2="unknown"/></not>
- </condition>
- <!-- Fallback: point scaladoc to master -->
- <property name="scaladoc.git.commit" value="master"/>
- <!-- Compute the URL and show it -->
- <property name="scaladoc.url" value="https://github.com/scala/scala/tree/${scaladoc.git.commit}/src"/>
- <echo message="Scaladoc will point to ${scaladoc.url} for source files."/>
- </target>
-
- <target name="docs.pre-lib" depends="docs.start">
- <doc-uptodate-check name="library" srcdir="${src.dir}">
- <source-includes>
- <include name="library/**"/>
- <include name="dbc/**"/>
- <include name="actors/**"/>
- <include name="swing/**"/>
- </source-includes>
- </doc-uptodate-check>
- </target>
-
- <target name="docs.lib" depends="docs.pre-lib" unless="docs.library.available">
- <stopwatch name="docs.lib.timer"/>
- <mkdir dir="${build-docs.dir}/library"/>
- <scaladoc
- destdir="${build-docs.dir}/library"
- doctitle="Scala Standard Library"
- docversion="${version.number}"
- docfooter="epfl"
- docsourceurl="${scaladoc.url}€{FILE_PATH}.scala#L1"
- docUncompilable="${src.dir}/library-aux"
- sourcepath="${src.dir}"
- classpathref="pack.classpath"
- docRootContent="${build-docs.dir}/library/lib/rootdoc.txt">
- <src>
- <files includes="${src.dir}/actors"/>
- <files includes="${src.dir}/library/scala"/>
- <files includes="${src.dir}/swing"/>
- <files includes="${src.dir}/continuations/library"/>
- </src>
- <include name="**/*.scala"/>
- <exclude name="reflect/Code.scala"/>
- <exclude name="reflect/Print.scala"/>
- <exclude name="reflect/Symbol.scala"/>
- <exclude name="reflect/Tree.scala"/>
- <exclude name="reflect/Type.scala"/>
- <exclude name="runtime/*$.scala"/>
- <exclude name="runtime/ScalaRunTime.scala"/>
- <exclude name="runtime/StringAdd.scala"/>
- <exclude name="scala/swing/test/**"/>
- </scaladoc>
- <touch file="${build-docs.dir}/library.complete" verbose="no"/>
- <stopwatch name="docs.lib.timer" action="total"/>
- </target>
-
- <target name="docs.pre-man" depends="docs.lib">
- <doc-uptodate-check name="manual" srcdir="${src.dir}/manual"/>
- </target>
+ <uptodate property="osgi.bundles.available" targetfile="${build-osgi.dir}/bundles.complete">
+ <srcfiles dir="${basedir}">
+ <include name="build.xml"/>
+ <include name="src/build/bnd/*.bnd"/>
+ </srcfiles>
+ </uptodate>
- <target name="docs.manmaker" depends="docs.pre-man" unless="docs.manual.available">
- <mkdir dir="${build.dir}/manmaker/classes"/>
- <scalac
- destdir="${build.dir}/manmaker/classes"
- classpathref="pack.classpath"
- srcdir="${src.dir}/manual"
- includes="**/*.scala"
- addparams="${scalac.args.all}"/>
- <path id="manual.classpath">
- <pathelement location="${build-pack.dir}/lib/scala-library.jar"/>
- <pathelement location="${build.dir}/manmaker/classes"/>
- </path>
+ <if><not><isset property="osgi.bundles.available"/></not><then>
+ <stopwatch name="osgi.bundle.timer"/>
+ <make-bundle name="scala-library" version="${osgi.version.number}" />
+ <make-bundle name="scala-actors" version="${osgi.version.number}" />
+ <make-bundle name="scala-reflect" version="${osgi.version.number}" />
+ <make-bundle name="scala-compiler" version="${osgi.version.number}" />
+ <make-plugin-bundle name="continuations" version="${osgi.version.number}" />
+ <touch file="${build-osgi.dir}/bundles.complete" verbose="no"/>
+
+ <if><isset property="has.java6"/><then>
+ <make-bundle name="scala-swing" version="${osgi.version.number}"/></then>
+ </if>
+ <stopwatch name="osgi.bundle.timer" action="total"/></then>
+ </if>
</target>
- <target name="docs.man" depends="docs.manmaker" unless="docs.manual.available">
- <mkdir dir="${build-docs.dir}/manual/man/man1"/>
- <mkdir dir="${build-docs.dir}/manual/html"/>
- <mkdir dir="${build-docs.dir}/manual/genman/man1"/>
- <taskdef name="genman"
- classname="scala.tools.docutil.ManMaker"
- classpathref="manual.classpath"/>
- <genman command="fsc, scala, scalac, scaladoc, scalap"
- htmlout="${build-docs.dir}/manual/html"
- manout="${build-docs.dir}/manual/genman"/>
- <!-- On Windows source and target files can't be the same ! -->
- <fixcrlf
- srcdir="${build-docs.dir}/manual/genman"
- destdir="${build-docs.dir}/manual/man"
- eol="unix" includes="**/*.1"/>
- <copy todir="${build-docs.dir}/manual/html">
- <fileset dir="${src.dir}/manual/scala/tools/docutil/resources">
- <include name="**/*.html"/>
- <include name="**/*.css"/>
- <include name="**/*.gif"/>
- <include name="**/*.png"/>
- </fileset>
- </copy>
- <touch file="${build-docs.dir}/manual.complete" verbose="no"/>
- </target>
- <target name="docs.pre-comp" depends="docs.man">
- <doc-uptodate-check name="compiler" srcdir="${src.dir}/compiler"/>
+<!-- ===========================================================================
+ TEST SUITE
+============================================================================ -->
+ <!-- bootstrapping stability: compare {quick,strap}/(lib|reflect|comp) -->
+ <target name="test.stability" depends="strap.done">
+ <exec osfamily="unix" vmlauncher="false" executable="${basedir}/tools/stability-test.sh" failonerror="true" />
+ <!-- I think doing it this way means it will auto-pass on windows... that's the idea. If not, something like this. -->
+ <!-- <exec osfamily="windows" executable="foo" failonerror="false" failifexecutionfails="false" /> -->
</target>
- <target name="docs.comp" depends="docs.pre-comp" unless="docs.compiler.available">
- <stopwatch name="docs.comp.timer"/>
- <mkdir dir="${build-docs.dir}/compiler"/>
- <scaladoc
- destdir="${build-docs.dir}/compiler"
- doctitle="Scala Compiler"
- docversion="${version.number}"
- docsourceurl="${scaladoc.url}€{FILE_PATH}.scala#L1"
- sourcepath="${src.dir}"
- classpathref="pack.classpath"
- srcdir="${src.dir}/compiler">
- <include name="**/*.scala"/>
- </scaladoc>
- <touch file="${build-docs.dir}/compiler.complete" verbose="no"/>
- <stopwatch name="docs.comp.timer" action="total"/>
- </target>
+ <target name="test.stability-opt" description="Optimized version of test.stability."> <optimized name="test.stability"/></target>
- <target name="docs.pre-jline" depends="docs.start">
- <doc-uptodate-check name="jline" srcdir="${src.dir}/jline/src/main/java" />
+ <target name="test.osgi.init" depends="osgi.done">
+ <uptodate property="test.osgi.available" targetfile="${build-osgi.dir}/test-compile.complete">
+ <srcfiles dir="${test.osgi.src}">
+ <include name="**/*.scala"/>
+ </srcfiles>
+ </uptodate>
</target>
- <target name="docs.jline" depends="docs.pre-jline" unless="docs.jline.available">
- <stopwatch name="docs.jline.timer"/>
- <mkdir dir="${build-docs.dir}/jline"/>
- <scaladoc
- destdir="${build-docs.dir}/jline"
- doctitle="Scala JLine"
- docversion="${version.number}"
- sourcepath="${src.dir}"
- classpathref="pack.classpath"
- srcdir="${src.dir}/jline/src/main/java">
+ <target name="test.osgi.comp" depends="test.osgi.init, quick.done" unless="test.osgi.available">
+ <stopwatch name="test.osgi.compiler.timer"/>
+ <mkdir dir="${test.osgi.classes}"/>
+ <scalacfork
+ destdir="${test.osgi.classes}"
+ compilerpathref="quick.compiler.path"
+ params="${scalac.args.quick}"
+ srcdir="${test.osgi.src}"
+ jvmargs="${scalacfork.jvmargs}">
<include name="**/*.scala"/>
- <include name="**/*.java"/>
- </scaladoc>
- <touch file="${build-docs.dir}/jline.complete" verbose="no"/>
- <stopwatch name="docs.jline.timer" action="total"/>
- </target>
-
- <target name="docs.pre-scalap" depends="docs.start">
- <doc-uptodate-check name="scalap" srcdir="${src.dir}/scalap" />
+ <compilationpath refid="test.osgi.compiler.build.path"/>
+ </scalacfork>
+ <touch file="${build-osgi.dir}/test-compile.complete" verbose="no"/>
+ <stopwatch name="test.osgi.compiler.timer" action="total"/>
</target>
- <target name="docs.scalap" depends="docs.pre-scalap" unless="docs.scalap.available">
- <stopwatch name="docs.scalap.timer"/>
- <mkdir dir="${build-docs.dir}/scalap"/>
- <scaladoc
- destdir="${build-docs.dir}/scalap"
- doctitle="Scalap"
- docversion="${version.number}"
- docsourceurl="${scaladoc.url}€{FILE_PATH}.scala#L1"
- sourcepath="${src.dir}"
- classpathref="pack.classpath"
- srcdir="${src.dir}/scalap">
- <include name="**/*.scala"/>
- </scaladoc>
- <touch file="${build-docs.dir}/scalap.complete" verbose="no"/>
- <stopwatch name="docs.scalap.timer" action="total"/>
+ <target name="test.osgi" depends="test.osgi.comp">
+ <stopwatch name="test.osgi.timer"/>
+ <mkdir dir="${test.osgi.classes}"/>
+ <junit fork="yes" haltonfailure="yes">
+ <classpath refid="test.osgi.compiler.build.path"/>
+ <batchtest fork="yes" todir="${build-osgi.dir}">
+ <fileset dir="${test.osgi.classes}">
+ <include name="**/*Test.class"/>
+ </fileset>
+ </batchtest>
+ <formatter type="brief" /> <!-- silenced by having it use a file; I tried for an hour to use other formatters but classpath issues drove me to this usefile="false" -->
+ </junit>
+ <stopwatch name="test.osgi.timer" action="total"/>
</target>
- <target name="docs.pre-partest" depends="docs.start">
- <doc-uptodate-check name="partest" srcdir="${src.dir}/partest" />
- </target>
- <target name="docs.partest" depends="docs.pre-partest" unless="docs.partest.available">
- <stopwatch name="docs.partest.timer"/>
- <mkdir dir="${build-docs.dir}/scala-partest"/>
- <scaladoc
- destdir="${build-docs.dir}/scala-partest"
- doctitle="Scala Parallel Testing Framework"
- docversion="${version.number}"
- sourcepath="${src.dir}"
- classpathref="pack.classpath"
- srcdir="${src.dir}/partest">
+<!-- ===========================================================================
+ SBT Compiler Interface
+============================================================================ -->
+ <target name="test.sbt" depends="quick.done">
+ <if><not><and>
+ <available file="${sbt.interface.jar}"/>
+ <available file="${sbt.interface.src.jar}"/></and></not>
+ <then>
+ <!-- Ensure directories exist -->
+ <mkdir dir="${sbt.src.dir}"/>
+ <mkdir dir="${sbt.lib.dir}"/>
+
+ <get src="${sbt.interface.url}" dest="${sbt.interface.jar}"/>
+ <get src="${sbt.interface.src.url}" dest="${sbt.interface.src.jar}"/>
+
+ <!-- Explode sources -->
+ <unzip src="${sbt.interface.src.jar}" dest="${sbt.src.dir}"/>
+ </then></if>
+
+ <stopwatch name="quick.sbt-interface.timer"/>
+ <mkdir dir="${build-sbt.dir}/classes"/>
+ <scalacfork
+ destdir="${build-sbt.dir}/classes"
+ compilerpathref="quick.compiler.path"
+ params="${scalac.args.quick}"
+ srcdir="${sbt.src.dir}"
+ jvmargs="${scalacfork.jvmargs}">
<include name="**/*.scala"/>
- </scaladoc>
- <touch file="${build-docs.dir}/partest.complete" verbose="no"/>
- <stopwatch name="docs.partest.timer" action="total"/>
+ <compilationpath refid="sbt.compile.build.path"/>
+ </scalacfork>
+ <touch file="${build-sbt.dir}/sbt-interface.complete" verbose="no"/>
+ <stopwatch name="quick.sbt-interface.timer" action="total"/>
</target>
- <target name="docs.pre-continuations-plugin" depends="docs.start">
- <doc-uptodate-check name="continuations-plugin" srcdir="${src.dir}/continuations/plugin" />
+ <target name="test.junit.init" depends="quick.done">
+ <uptodate property="test.junit.available" targetfile="${build-junit.dir}/test-compile.complete">
+ <srcfiles dir="${test.junit.src}">
+ <include name="**/*.scala"/>
+ </srcfiles>
+ </uptodate>
</target>
- <target name="docs.continuations-plugin" depends="docs.pre-continuations-plugin" unless="docs.continuations-plugin.available">
- <stopwatch name="docs.continuations-plugin.timer"/>
- <mkdir dir="${build-docs.dir}/continuations-plugin"/>
- <scaladoc
- destdir="${build-docs.dir}/continuations-plugin"
- doctitle="Delimited Continuations Compiler Plugin"
- docversion="${version.number}"
- sourcepath="${src.dir}"
- classpathref="pack.classpath"
- srcdir="${src.dir}/continuations/plugin">
+ <target name="test.junit.comp" depends="test.junit.init, quick.done" unless="test.junit.available">
+ <stopwatch name="test.junit.compiler.timer"/>
+ <mkdir dir="${test.junit.classes}"/>
+ <scalacfork
+ destdir="${test.junit.classes}"
+ compilerpathref="quick.compiler.path"
+ params="${scalac.args.quick}"
+ srcdir="${test.junit.src}"
+ jvmargs="${scalacfork.jvmargs}">
<include name="**/*.scala"/>
- </scaladoc>
- <touch file="${build-docs.dir}/continuations-plugin.complete" verbose="no"/>
- <stopwatch name="docs.continuations-plugin.timer" action="total"/>
+ <compilationpath refid="test.junit.compiler.build.path"/>
+ </scalacfork>
+ <touch file="${build-junit.dir}/test-compile.complete" verbose="no"/>
+ <stopwatch name="test.junit.compiler.timer" action="total"/>
</target>
- <target name="docs.done" depends="docs.man"/>
-
- <target name="docs.all" depends="docs.jline, docs.comp, docs.man, docs.lib, docs.scalap, docs.partest, docs.continuations-plugin"/>
-
- <target name="docs.clean">
- <delete dir="${build-docs.dir}" includeemptydirs="yes" quiet="yes" failonerror="no"/>
- <delete dir="${build.dir}/manmaker" includeemptydirs="yes" quiet="yes" failonerror="no"/>
+ <target name="test.junit" depends="test.junit.comp">
+ <stopwatch name="test.junit.timer"/>
+ <mkdir dir="${test.junit.classes}"/>
+ <junit fork="yes" haltonfailure="yes" showoutput="yes" printsummary="on">
+ <classpath refid="test.junit.compiler.build.path"/>
+ <batchtest fork="yes" todir="${build-junit.dir}">
+ <fileset dir="${test.junit.classes}">
+ <include name="**/*Test.class"/>
+ </fileset>
+ </batchtest>
+ <formatter type="plain"/>
+ </junit>
+ <stopwatch name="test.junit.timer" action="total"/>
</target>
-<!-- ===========================================================================
-BOOTRAPING TEST AND TEST SUITE
-============================================================================ -->
-
- <target name="test.stability" depends="strap.done">
- <same dir="${build-quick.dir}" todir="${build-strap.dir}" failondifferent="yes">
- <exclude name="**/*.properties"/>
- <exclude name="bin/**"/>
- <exclude name="*.complete"/>
- <exclude name="misc/scala-devel/plugins/*.jar"/>
- </same>
- </target>
-
- <!-- this target will run only those tests found in test/debug -->
- <target name="test.debug">
- <antcall target="test.suite">
- <param name="partest.srcdir" value="debug" />
- </antcall>
- </target>
+ <property name="partest.srcdir" value="files" /> <!-- TODO: make targets for `pending` and other subdirs -->
<target name="test.run" depends="pack.done">
- <property name="partest.srcdir" value="files" />
<partest showlog="yes" erroronfailed="yes" javacmd="${java.home}/bin/java"
timeout="1200000"
srcdir="${partest.srcdir}"
scalacopts="${scalac.args.optimise}">
- <compilationpath>
- <path refid="pack.classpath"/>
- <pathelement location="${pack.dir}/lib/scala-swing.jar"/>
- <fileset dir="${partest.dir}/files/lib" includes="*.jar" />
- </compilationpath>
+
+ <compilationpath refid="partest.build.path"/>
<runtests dir="${partest.dir}/${partest.srcdir}/run" includes="*.scala"/>
<jvmtests dir="${partest.dir}/${partest.srcdir}/jvm" includes="*.scala"/>
</partest>
</target>
<target name="test.suite" depends="pack.done">
- <property name="partest.srcdir" value="files" />
<partest showlog="yes" erroronfailed="yes" javacmd="${java.home}/bin/java"
timeout="2400000"
srcdir="${partest.srcdir}"
scalacopts="${scalac.args.optimise}">
- <compilationpath>
- <path refid="pack.classpath"/>
- <pathelement location="${pack.dir}/lib/scala-swing.jar"/>
- <fileset dir="${partest.dir}/files/lib" includes="*.jar" />
- </compilationpath>
+ <compilationpath refid="partest.build.path"/>
<postests dir="${partest.dir}/${partest.srcdir}/pos" includes="*.scala"/>
<negtests dir="${partest.dir}/${partest.srcdir}/neg" includes="*.scala"/>
<runtests dir="${partest.dir}/${partest.srcdir}/run" includes="*.scala"/>
@@ -1759,55 +1552,303 @@ BOOTRAPING TEST AND TEST SUITE
<specializedtests dir="${partest.dir}/${partest.srcdir}/specialized">
<include name="*.scala"/>
</specializedtests>
- <presentationtests dir="${partest.dir}/${partest.srcdir}/presentation">
- <include name="*/*.scala"/>
- </presentationtests>
- <!-- <scripttests dir="${partest.dir}/${partest.srcdir}/script" includes="*.scala"/> -->
+ <instrumentedtests dir="${partest.dir}/${partest.srcdir}/instrumented">
+ <include name="*.scala"/>
+ </instrumentedtests>
</partest>
</target>
<target name="test.continuations.suite" depends="pack.done">
- <property name="partest.srcdir" value="files" />
<partest showlog="yes" erroronfailed="yes" javacmd="${java.home}/bin/java"
timeout="2400000"
srcdir="${partest.srcdir}"
- scalacopts="${scalac.args.optimise} -Xpluginsdir ${build-quick.dir}/misc/scala-devel/plugins -Xplugin-require:continuations -P:continuations:enable">
- <compilationpath>
- <path refid="pack.classpath"/>
- <fileset dir="${partest.dir}/files/lib" includes="*.jar" />
- </compilationpath>
+ scalacopts="${scalac.args.optimise} -Xplugin-require:continuations -P:continuations:enable">
+ <compilerarg value="-Xpluginsdir"/>
+ <compilerarg file="${build-quick.dir}/misc/scala-devel/plugins"/>
+ <compilationpath refid="partest.build.path"/>
<negtests dir="${partest.dir}/${partest.srcdir}/continuations-neg" includes="*.scala"/>
<runtests dir="${partest.dir}/${partest.srcdir}/continuations-run" includes="*.scala"/>
</partest>
</target>
<target name="test.scaladoc" depends="pack.done">
- <partest erroronfailed="yes" scalacopts="${scalac.args.optimise}">
- <compilationpath>
- <path refid="pack.classpath"/>
- </compilationpath>
- <scalachecktests dir="test/scaladoc/scala">
- <include name="*.scala"/>
- </scalachecktests>
+ <partest erroronfailed="yes" scalacopts="${scalac.args.optimise}" showlog="yes">
+ <compilationpath refid="partest.build.path"/>
+ <runtests dir="${partest.dir}/scaladoc/run" includes="*.scala" />
+ <scalachecktests dir="${partest.dir}/scaladoc/scalacheck" includes="*.scala" />
+ </partest>
+ </target>
+
+ <target name="test.interactive" depends="pack.done">
+ <partest erroronfailed="yes" scalacopts="${scalac.args.optimise}" showlog="yes">
+ <compilationpath refid="partest.build.path"/>
+ <presentationtests dir="${partest.dir}/${partest.srcdir}/presentation">
+ <include name="*/*.scala"/>
+ </presentationtests>
</partest>
</target>
- <target name="test.done" depends="test.suite, test.continuations.suite, test.scaladoc, test.stability"/>
+ <!-- for use in PR validation, where stability is rarely broken, so we're going to use starr for locker,
+ and skip test.stability (which requires locker == quick) -->
+ <target name="test.core" depends="test.osgi, test.sbt, test.bc, test.junit, test.interactive, test.continuations.suite, test.scaladoc, test.suite"/>
+ <target name="test.done" depends="test.core, test.stability"/>
+
+
+<!-- ===========================================================================
+ BINARY COMPATIBILITY TESTING
+============================================================================ -->
+ <target name="bc.init" depends="init" unless="maven-deps-done-mima">
+ <property name="bc-reference-version" value="2.10.0"/>
+
+ <property name="bc-build.dir" value="${build.dir}/bc"/>
+ <!-- Obtain mima -->
+ <mkdir dir="${bc-build.dir}"/>
+ <!-- Pull down MIMA -->
+ <artifact:dependencies pathId="mima.classpath">
+ <dependency groupId="com.typesafe" artifactId="mima-reporter_2.10" version="0.1.6"/>
+ </artifact:dependencies>
+ <artifact:dependencies pathId="old.bc.classpath">
+ <dependency groupId="org.scala-lang" artifactId="scala-swing" version="${bc-reference-version}"/>
+ <dependency groupId="org.scala-lang" artifactId="scala-library" version="${bc-reference-version}"/>
+ <dependency groupId="org.scala-lang" artifactId="scala-reflect" version="${bc-reference-version}"/>
+ </artifact:dependencies>
+ <property name="maven-deps-done-mima" value="true"/>
+ </target>
+
+ <macrodef name="bc.run-mima">
+ <attribute name="jar-name"/>
+ <attribute name="prev"/>
+ <attribute name="curr"/>
+ <attribute name="direction"/>
+ <sequential>
+ <echo message="Checking @{direction} binary compatibility for @{jar-name} (against ${bc-reference-version})"/>
+ <java taskname="mima"
+ fork="true"
+ failonerror="true"
+ classname="com.typesafe.tools.mima.cli.Main">
+ <arg value="--prev"/>
+ <arg value="@{prev}"/>
+ <arg value="--curr"/>
+ <arg value="@{curr}"/>
+ <arg value="--filters"/>
+ <arg value="${basedir}/bincompat-@{direction}.whitelist.conf"/>
+ <arg value="--generate-filters"/>
+ <classpath>
+ <path refid="mima.classpath"/>
+ </classpath>
+ </java>
+ </sequential>
+ </macrodef>
+
+ <macrodef name="bc.check">
+ <attribute name="jar-name"/>
+ <sequential>
+ <bc.run-mima
+ jar-name="@{jar-name}"
+ prev="${org.scala-lang:@{jar-name}:jar}"
+ curr="${build-pack.dir}/lib/@{jar-name}.jar"
+ direction="backward"/>
+ <bc.run-mima
+ jar-name="@{jar-name}"
+ prev="${build-pack.dir}/lib/@{jar-name}.jar"
+ curr="${org.scala-lang:@{jar-name}:jar}"
+ direction="forward"/>
+ </sequential>
+ </macrodef>
+
+ <target name="test.bc-opt" description="Optimized version of test.bc."> <optimized name="test.bc"/></target>
+ <target name="test.bc" depends="bc.init, pack.lib, pack.reflect, pack.swing">
+ <bc.check jar-name="scala-library"/>
+ <bc.check jar-name="scala-reflect"/>
+ <bc.check jar-name="scala-swing"/>
+ </target>
<!-- ===========================================================================
-DISTRIBUTION
+ DOCUMENTATION
============================================================================ -->
-
- <target name="dist.start" depends="docs.done, pack.done">
- <property name="dist.dir" value="${dists.dir}/scala-${version.number}"/>
+ <target name="docs.start" depends="pack.done">
+ <!-- Set the github commit scaladoc sources point to -->
+ <!-- For releases, look for the tag with the same name as the maven version -->
+ <condition property="scaladoc.git.commit" value="v${maven.version.number}">
+ <isset property="build.release"/>
+ </condition>
+ <!-- For snapshots, if we know the commit, point scaladoc to that particular commit instead of master -->
+ <condition property="scaladoc.git.commit" value="${git.commit.sha}">
+ <not><equals arg1="${git.commit.sha}" arg2="unknown"/></not>
+ </condition>
+ <!-- Fallback: point scaladoc to master -->
+ <property name="scaladoc.git.commit" value="master"/>
+ <!-- Compute the URL and show it -->
+ <property name="scaladoc.url" value="https://github.com/scala/scala/tree/${scaladoc.git.commit}/src"/>
+ <echo message="Scaladoc will point to ${scaladoc.url} for source files."/>
+
+ <!-- Unless set with -Dscaladoc.<...>, these won't be activated -->
+ <property name="scaladoc.raw.output" value="no"/>
+ <property name="scaladoc.no.prefixes" value="no"/>
+ </target>
+
+ <target name="docs.lib" depends="docs.start">
+ <staged-uptodate stage="docs" project="library">
+ <check><srcfiles dir="${src.dir}">
+ <include name="library/**"/>
+ <include name="swing/**"/>
+ <include name="actors/**"/>
+ <include name="reflect/**"/>
+ <include name="continuations/library/**"/>
+ </srcfiles></check>
+ <do>
+ <stopwatch name="docs.lib.timer"/>
+ <mkdir dir="${build-docs.dir}/library"/>
+ <!-- last three attributes not supported by staged-docs: -->
+ <scaladoc
+ destdir="${build-docs.dir}/library"
+ doctitle="Scala Standard Library API (Scaladoc)"
+ docversion="${version.number}"
+ docsourceurl="${scaladoc.url}€{FILE_PATH}.scala#L1"
+ sourcepath="${src.dir}"
+ classpathref="pack.compiler.path"
+ addparams="${scalac.args.all}"
+ docRootContent="${src.dir}/library/rootdoc.txt"
+ implicits="on"
+ diagrams="on"
+ groups="on"
+ rawOutput="${scaladoc.raw.output}"
+ noPrefixes="${scaladoc.no.prefixes}"
+ docfooter="epfl"
+ docUncompilable="${src.dir}/library-aux"
+ skipPackages="scala.reflect.macros.internal:scala.reflect.internal:scala.reflect.io:scala.concurrent.impl">
+ <src>
+ <files includes="${src.dir}/actors"/>
+ <files includes="${src.dir}/library"/>
+ <files includes="${src.dir}/reflect"/>
+ <files includes="${src.dir}/swing"/>
+ <files includes="${src.dir}/continuations/library"/>
+ </src>
+ <include name="**/*.scala"/>
+ <exclude name="reflect/Code.scala"/>
+ <exclude name="reflect/Print.scala"/>
+ <exclude name="reflect/Symbol.scala"/>
+ <exclude name="reflect/Tree.scala"/>
+ <exclude name="reflect/Type.scala"/>
+ <exclude name="runtime/*$.scala"/>
+ <exclude name="runtime/ScalaRunTime.scala"/>
+ <exclude name="runtime/StringAdd.scala"/>
+ </scaladoc>
+ <stopwatch name="docs.lib.timer" action="total"/>
+ </do>
+ </staged-uptodate>
+ </target>
+
+ <target name="docs.comp" depends="docs.start">
+ <staged-docs project="compiler" title="Scala Compiler" docroot="rootdoc.txt">
+ <include name="**/*.scala"/>
+ </staged-docs>
+ </target>
+
+ <target name="docs.jline" depends="docs.start">
+ <staged-docs project="jline" dir="jline/src/main/java" title="Scala JLine">
+ <include name="**/*.scala"/>
+ <include name="**/*.java"/>
+ </staged-docs>
+ </target>
+
+ <target name="docs.scalap" depends="docs.start">
+ <staged-docs project="scalap" title="Scalap">
+ <include name="**/*.scala"/>
+ </staged-docs>
+ </target>
+
+ <target name="docs.partest" depends="docs.start">
+ <staged-docs project="partest" title="Scala Parallel Testing Framework">
+ <include name="**/*.scala"/>
+ </staged-docs>
</target>
-
- <target name="dist.base" depends="dist.start">
+
+ <target name="docs.continuations-plugin" depends="docs.start">
+ <staged-docs project="continuations-plugin" dir="continuations/plugin" title="Delimited Continuations Compiler Plugin">
+ <include name="**/*.scala"/>
+ </staged-docs>
+ </target>
+
+ <target name="docs.man" depends="docs.start">
+ <staged-uptodate stage="docs" project="manual">
+ <check><srcfiles dir="${src.dir}/manual"/></check>
+ <do>
+ <mkdir dir="${build.dir}/manmaker/classes"/>
+ <scalac
+ destdir="${build.dir}/manmaker/classes"
+ classpathref="pack.compiler.path"
+ srcdir="${src.dir}/manual"
+ includes="**/*.scala"
+ addparams="${scalac.args.all}"/>
+ <mkdir dir="${build-docs.dir}/manual/man/man1"/>
+ <mkdir dir="${build-docs.dir}/manual/html"/>
+ <mkdir dir="${build-docs.dir}/manual/genman/man1"/>
+ <taskdef name="genman"
+ classname="scala.tools.docutil.ManMaker"
+ classpathref="manual.classpath"/>
+ <genman command="fsc, scala, scalac, scaladoc, scalap"
+ htmlout="${build-docs.dir}/manual/html"
+ manout="${build-docs.dir}/manual/genman"/>
+ <!-- On Windows source and target files can't be the same ! -->
+ <fixcrlf
+ srcdir="${build-docs.dir}/manual/genman"
+ destdir="${build-docs.dir}/manual/man"
+ eol="unix" includes="**/*.1"/>
+ <copy todir="${build-docs.dir}/manual/html">
+ <fileset dir="${src.dir}/manual/scala/tools/docutil/resources">
+ <include name="**/*.html"/>
+ <include name="**/*.css"/>
+ <include name="**/*.gif"/>
+ <include name="**/*.png"/>
+ </fileset>
+ </copy>
+ </do>
+ </staged-uptodate>
+ </target>
+
+ <target name="docs.done" depends="docs.jline, docs.comp, docs.man, docs.lib, docs.scalap, docs.partest, docs.continuations-plugin"/>
+
+
+<!-- ===========================================================================
+ DISTRIBUTION
+============================================================================ -->
+ <target name="dist.base" depends="pack.done, osgi.done">
+ <property name="dist.name" value="scala-${version.number}"/>
+ <property name="dist.dir" value="${dists.dir}/${dist.name}"/>
+
+ <macrodef name="copy-bundle">
+ <attribute name="name" />
+ <sequential>
+ <copy file="${build-osgi.dir}/org.scala-lang.@{name}.jar"
+ tofile="${dist.dir}/lib/@{name}.jar"/>
+ </sequential>
+ </macrodef>
+ <macrodef name="copy-plugin-bundle">
+ <attribute name="name" />
+ <sequential>
+ <copy file="${build-osgi.dir}/org.scala-lang.plugins.@{name}.jar"
+ tofile="${dist.dir}/misc/scala-devel/plugins/@{name}.jar"
+ overwrite="yes"/>
+ </sequential>
+ </macrodef>
+
<mkdir dir="${dist.dir}/lib"/>
<copy toDir="${dist.dir}/lib">
- <fileset dir="${build-pack.dir}/lib"/>
+ <fileset dir="${build-pack.dir}/lib">
+ <include name="jline.jar"/>
+ <include name="scala-partest.jar"/> <!-- needed for maven publish -->
+ <include name="scalap.jar"/>
+ </fileset>
</copy>
+
<mkdir dir="${dist.dir}/bin"/>
+ <!-- TODO - Stop being inefficient and don't copy OSGi bundles overtop other jars. -->
+ <copy-bundle name="scala-library"/>
+ <copy-bundle name="scala-reflect"/>
+ <copy-bundle name="scala-swing"/>
+ <copy-bundle name="scala-actors"/>
+ <copy-bundle name="scala-compiler"/>
<copy toDir="${dist.dir}/bin">
<fileset dir="${build-pack.dir}/bin"/>
</copy>
@@ -1817,91 +1858,109 @@ DISTRIBUTION
<chmod perm="ugo+rx" file="${dist.dir}/bin/fsc"/>
<chmod perm="ugo+rx" file="${dist.dir}/bin/scalap"/>
<mkdir dir="${dist.dir}/misc/scala-devel/plugins"/>
- <copy toDir="${dist.dir}/misc/scala-devel/plugins">
- <fileset dir="${build-pack.dir}/misc/scala-devel/plugins"/>
- </copy>
+ <copy-plugin-bundle name="continuations"/>
</target>
- <target name="dist.doc" depends="dist.base">
- <mkdir dir="${dist.dir}/doc/scala-devel-docs"/>
+ <target name="dist.doc" depends="dist.base, docs.done">
+ <mkdir dir="${dist.dir}/doc"/>
+ <mkdir dir="${dist.dir}/doc/licenses"/>
+ <mkdir dir="${dist.dir}/doc/tools"/>
<copy file="${docs.dir}/LICENSE" toDir="${dist.dir}/doc"/>
- <copy file="${docs.dir}/README" toDir="${dist.dir}/doc"/>
- <mkdir dir="${dist.dir}/doc/scala-devel-docs/api"/>
- <copy toDir="${dist.dir}/doc/scala-devel-docs/api">
- <fileset dir="${build-docs.dir}/library"/>
- </copy>
- <mkdir dir="${dist.dir}/doc/scala-devel-docs/examples"/>
- <copy toDir="${dist.dir}/doc/scala-devel-docs/examples">
- <fileset dir="${docs.dir}/examples"/>
+ <copy file="${docs.dir}/README" toDir="${dist.dir}/doc"/>
+ <copy toDir="${dist.dir}/doc/licenses">
+ <fileset dir="${docs.dir}/licenses"/>
</copy>
- <mkdir dir="${dist.dir}/doc/scala-devel-docs/tools"/>
- <copy toDir="${dist.dir}/doc/scala-devel-docs/tools">
+ <copy toDir="${dist.dir}/doc/tools">
<fileset dir="${build-docs.dir}/manual/html"/>
</copy>
+
+ <mkdir dir="${dist.dir}/api"/>
+ <copy toDir="${dist.dir}/api">
+ <fileset dir="${build-docs.dir}/library"/>
+ </copy>
<copy file="${src.dir}/swing/doc/README"
- toFile="${dist.dir}/doc/scala-devel-docs/README.scala-swing"/>
+ toFile="${dist.dir}/api/README.scala-swing"/>
</target>
- <target name="dist.man" depends="dist.doc">
+
+ <target name="dist.man" depends="dist.base">
<mkdir dir="${dist.dir}/man"/>
<copy toDir="${dist.dir}/man">
<fileset dir="${build-docs.dir}/manual/man"/>
</copy>
</target>
- <target name="dist.src" depends="dist.man">
+ <!--
+ A jar-like task that creates an OSGi source bundle. It adds the required MANIFEST.MF headers that allow
+ Eclipse to match sources with the corresponding binaries.
+ -->
+ <macrodef name="osgi.source.bundle">
+ <attribute name="destfile" description="The jar file name"/>
+ <attribute name="symbolicName" description="The original bundle symbolic name (without .source at the end)"/>
+ <attribute name="bundleName" description="A value for Bundle-Name, usually a textual description"/>
+ <element name="file-sets" description="A sequence of fileset elements to be included in the jar" optional="true" implicit="true"/>
+
+ <sequential>
+ <jar whenmanifestonly="fail" destfile="@{destFile}">
+ <file-sets/>
+ <manifest>
+ <attribute name="Manifest-Version" value="1.0"/>
+ <attribute name="Bundle-Name" value="@{bundleName}"/>
+ <attribute name="Bundle-SymbolicName" value="@{symbolicName}.source"/>
+ <attribute name="Bundle-Version" value="${osgi.version.number}"/>
+ <attribute name="Eclipse-SourceBundle" value="@{symbolicName};version="${osgi.version.number}";roots:="."" />
+ </manifest>
+ </jar>
+ </sequential>
+ </macrodef>
+
+ <target name="dist.src" depends="dist.base">
<mkdir dir="${dist.dir}/src"/>
- <jar destfile="${dist.dir}/src/scala-library-src.jar">
+ <osgi.source.bundle destfile="${dist.dir}/src/scala-library-src.jar"
+ symbolicName="org.scala-lang.scala-library"
+ bundleName="Scala Library Sources">
<fileset dir="${src.dir}/library"/>
- <fileset dir="${src.dir}/actors"/>
<fileset dir="${src.dir}/continuations/library"/>
- </jar>
- <jar destfile="${dist.dir}/src/scala-dbc-src.jar">
- <fileset dir="${src.dir}/dbc"/>
- </jar>
- <jar destfile="${dist.dir}/src/scala-swing-src.jar">
+ </osgi.source.bundle>
+ <osgi.source.bundle destfile="${dist.dir}/src/scala-reflect-src.jar"
+ symbolicName="org.scala-lang.scala-reflect"
+ bundleName="Scala Reflect Sources">
+ <fileset dir="${src.dir}/reflect"/>
+ </osgi.source.bundle>
+ <osgi.source.bundle destfile="${dist.dir}/src/scala-swing-src.jar"
+ symbolicName="org.scala-lang.scala-swing"
+ bundleName="Scala Swing Sources">
<fileset dir="${src.dir}/swing"/>
- </jar>
- <jar destfile="${dist.dir}/src/scala-compiler-src.jar">
+ </osgi.source.bundle>
+ <osgi.source.bundle destfile="${dist.dir}/src/scala-compiler-src.jar"
+ symbolicName="org.scala-lang.scala-compiler"
+ bundleName="Scala Compiler Sources">
<fileset dir="${src.dir}/compiler"/>
- </jar>
- <jar destfile="${dist.dir}/src/scalap-src.jar">
- <fileset dir="${src.dir}/scalap"/>
- </jar>
- <!-- Needed for Maven distribution -->
- <jar destfile="${dist.dir}/src/scala-partest-src.jar">
- <fileset dir="${src.dir}/partest"/>
- </jar>
- </target>
-
- <target name="dist.latest.unix" depends="dist.src" unless="os.win">
- <symlink link="${dists.dir}/latest" resource="${dist.dir}" overwrite="yes"/>
- </target>
-
- <target name="dist.latest.win" depends="dist.src" if="os.win">
- <copy todir="${dists.dir}/latest">
- <fileset dir="${dist.dir}"/>
- </copy>
+ </osgi.source.bundle>
+ <jar whenmanifestonly="fail" destfile="${dist.dir}/src/fjbg-src.jar" basedir="${src.dir}/fjbg"/>
+ <jar whenmanifestonly="fail" destfile="${dist.dir}/src/msil-src.jar" basedir="${src.dir}/msil"/>
+ <osgi.source.bundle destfile="${dist.dir}/src/scala-actors-src.jar"
+ symbolicName="org.scala-lang.scala-actors"
+ bundleName="Scala Actors Sources">
+ <fileset dir="${src.dir}/actors"/>
+ </osgi.source.bundle>
+ <jar whenmanifestonly="fail" destfile="${dist.dir}/src/scalap-src.jar" basedir="${src.dir}/scalap"/>
+ <jar whenmanifestonly="fail" destfile="${dist.dir}/src/scala-partest-src.jar" basedir="${src.dir}/partest"/>
</target>
- <target name="dist.latest" depends="dist.latest.unix,dist.latest.win"/>
-
- <target name="dist.done" depends="dist.latest"/>
-
- <target name="dist.clean">
- <delete dir="${dists.dir}" includeemptydirs="yes" quiet="yes" failonerror="no"/>
+ <target name="dist.partial" depends="dist.base">
+ <if><not><os family="windows"/></not><then>
+ <symlink link="${dists.dir}/latest" resource="${dist.name}" overwrite="yes"/>
+ </then><else> <!-- XXX THIS PROBABLY DOES NOT WORK: copying must happen last during dist.done! is this guaranteed? -->
+ <copydir dest="${dists.dir}/latest" src="${dist.dir}"/>
+ </else></if>
</target>
-<!-- ===========================================================================
-TEST AND DISTRIBUTION BUNDLE (ALL)
-============================================================================ -->
-
- <target name="all.done" depends="dist.done, test.done"/>
+ <target name="dist.done" depends="dist.doc, dist.man, dist.src, dist.partial"/>
- <target name="all.clean" depends="locker.clean, docs.clean, dist.clean"/>
<!-- ===========================================================================
-STABLE REFERENCE (STARR)
+ STABLE REFERENCE (STARR)
============================================================================ -->
<!-- Does not use any properties other than ${basedir}, so that it can
run without 'init' (when using 'replacestarrwin') -->
@@ -1909,251 +1968,122 @@ STABLE REFERENCE (STARR)
<target name="starr.start">
<fail message="Library in build/pack not available">
<condition><not><and>
- <available file="${basedir}/build/pack/lib/scala-library.jar"/>
+ <available file="${build-pack.dir}/lib/scala-library.jar"/>
</and></not></condition>
</fail>
<fail message="Compiler in build/quick not available">
<condition><not><and>
- <available file="${basedir}/build/quick/classes/compiler"/>
- <available file="${basedir}/build/quick/compiler.complete"/>
+ <available file="${build-quick.dir}/classes/compiler"/>
+ <available file="${build-quick.dir}/compiler.complete"/>
</and></not></condition>
</fail>
</target>
- <target name="starr.clean" depends="starr.start">
- <delete file="${basedir}/lib/scala-library.jar"/>
- <delete file="${basedir}/lib/scala-compiler.jar"/>
- <delete file="${basedir}/lib/scala-library-src.jar"/>
- </target>
-
- <target name="starr.lib" depends="starr.start">
- <jar destfile="${basedir}/lib/scala-library.jar">
- <fileset dir="${basedir}/build/quick/classes/library"/>
- </jar>
+ <target name="starr.jars" depends="starr.start">
+ <copy toDir="${lib.dir}" overwrite="yes">
+ <fileset dir="${build-pack.dir}/lib">
+ <include name="scala-library.jar"/>
+ <include name="scala-reflect.jar"/>
+ <include name="scala-compiler.jar"/>
+ </fileset>
+ </copy>
</target>
- <target name="starr.comp" depends="starr.lib">
- <jar destfile="${basedir}/lib/scala-compiler.jar">
- <fileset dir="${basedir}/build/quick/classes/compiler"/>
+ <target name="starr.src" depends="starr.jars">
+ <jar whenmanifestonly="fail" destfile="${lib.dir}/scala-library-src.jar">
+ <fileset dir="${src.dir}/library"/>
+ <fileset dir="${src.dir}/swing"/>
+ <fileset dir="${src.dir}/actors"/>
+ <fileset dir="${src.dir}/forkjoin"/>
</jar>
- </target>
-
- <target name="starr.src" depends="starr.comp">
- <jar destfile="${basedir}/lib/scala-library-src.jar">
- <fileset dir="${basedir}/src/library"/>
- <fileset dir="${basedir}/src/actors"/>
- <fileset dir="${basedir}/src/swing"/>
- <fileset dir="${basedir}/src/dbc"/>
+ <jar whenmanifestonly="fail" destfile="${lib.dir}/scala-reflect-src.jar" basedir="${src.dir}/reflect"/>
+ <jar whenmanifestonly="fail" destfile="${lib.dir}/scala-compiler-src.jar">
+ <fileset dir="${src.dir}/compiler"/>
+ <fileset dir="${src.dir}/asm"/>
</jar>
</target>
- <target name="starr.libs" depends="starr.src" if="libs.outdated">
- <copy toDir="${lib.dir}" overwrite="yes">
- <fileset dir="${build-libs.dir}">
- <include name="fjbg.jar"/>
- <include name="msil.jar"/>
- <include name="forkjoin.jar"/>
+ <target name="starr.removesha1" depends="starr.src">
+ <!-- remove SHA1 files for no starr, so we don't loose artifacts. -->
+ <delete>
+ <fileset dir="${lib.dir}">
+ <include name="scala-compiler.jar.desired.sha1"/>
+ <include name="scala-reflect.jar.desired.sha1"/>
+ <include name="scala-library.jar.desired.sha1"/>
+ <include name="scala-library-src.jar.desired.sha1"/>
+ <include name="scala-reflect-src.jar.desired.sha1"/>
+ <include name="scala-compiler-src.jar.desired.sha1"/>
</fileset>
- </copy>
+ </delete>
</target>
- <target name="starr.done" depends="starr.libs"/>
+ <target name="starr.done" depends="starr.jars, starr.removesha1"/>
-<!-- ===========================================================================
-FORWARDED TARGETS FOR PACKAGING
-============================================================================ -->
-
- <target name="distpack" depends="dist.done, docs.all">
- <ant antfile="${src.dir}/build/pack.xml" target="pack-all.done" inheritall="yes" inheritrefs="yes"/>
+ <target name="replacestarr" description="Replaces the Starr compiler and library by fresh ones built from current sources and tests them.">
+ <fail message="This target is not available on Windows. Use 'ant replacestarrwin' instead.">
+ <condition>
+ <os family="windows"/>
+ </condition>
+ </fail>
+ <!-- needs antcall to enforce ordering -->
+ <antcall target="locker.clean"/>
+ <antcall target="pack.done"/>
+ <antcall target="starr.done"/>
+ <antcall target="locker.clean"/>
+ <antcall target="test.done"/>
</target>
- <target name="distpack-maven" depends="dist.done, docs.all">
- <ant antfile="${src.dir}/build/pack.xml" target="pack-maven.done" inheritall="yes" inheritrefs="yes"/>
- </target>
+ <target name="replacestarr-opt" description="Replaces the Starr compiler and library by fresh, optimised ones built from current sources and tests them.">
+ <optimized name="replacestarr"/></target>
- <target name="distpack-opt"
- description="Builds an optimised distribution.">
- <antcall target="distpack">
- <param name="scalac.args.optimise" value="-optimise"/>
- </antcall>
- </target>
-
- <target name="distpack-maven-opt"
- description="Builds an optimised maven distribution.">
- <antcall target="distpack-maven">
- <param name="scalac.args.optimise" value="-optimise"/>
- </antcall>
+ <!-- Ant on Windows is not able to delete jar files that are referenced in any <path>.
+ See ticket 1290 on trac. -->
+ <target name="replacestarrwin" description="Creates a new Starr on Windows. Manually execute 'ant locker.clean build' first!">
+ <fail message="This target is only available on Windows. Use 'ant replacestarr' instead.">
+ <condition>
+ <not><os family="windows"/></not>
+ </condition>
+ </fail>
+ <echo message="CAUTION: Make sure to execute 'ant locker.clean build' prior to calling 'replacestarrwin'."/>
+ <antcall target="starr.done"/>
+ <antcall target="locker.clean"/>
+ <antcall target="test.done"/>
</target>
- <!-- Used by the scala-installer script -->
- <target name="allallclean" depends="all.clean"/>
+ <target name="replacestarrwin-opt" description="Creates a new Starr on Windows. Manually execute 'ant locker.clean build' first!">
+ <optimized name="replacestarrwin"/></target>
-<!-- ===========================================================================
-FORWARDED TARGETS FOR NIGHTLY BUILDS
-============================================================================ -->
+ <target name="replacelocker" description="Replaces the Locker compiler and library by fresh ones built from current sources."
+ depends="palo.clean, locker.unlock, palo.done"/>
- <target name="nightly">
- <antcall target="nightly-nopt">
- <param name="scalac.args.optimise" value="-optimise"/>
- </antcall>
- </target>
+ <target name="replacelocker-opt" description="Replaces the Locker compiler and library by fresh, optimised ones built from current sources.">
+ <optimized name="replacelocker"/></target>
- <target name="nightly-nopt" depends="all.done, docs.all">
- <!-- cannot antcall all.done, the properties defined in there (dist.dir) are not returned. need depends. -->
- <ant antfile="${src.dir}/build/pack.xml" target="pack-all.done" inheritall="yes" inheritrefs="yes"/>
- </target>
+ <target name="buildlocker" description="Does the same for locker as build does for quick." depends="locker.unlock, palo.bin"/>
+ <target name="unlocklocker" description="Same as buildlocker." depends="buildlocker"/> <!-- REMOVE -->
- <target name="nightly.checkinit">
- <antcall target="nightly-nopt">
- <param name="scalac.args.optimise" value="-Xcheckinit"/>
- </antcall>
+ <target name="fastlocker.lib" description="Buildlocker without extra fuss" depends="locker.unlock, locker.lib">
+ <property name="fastlocker" value="true"/>
</target>
- <target name="nightly.checkall">
- <antcall target="nightly-nopt">
- <param name="partest.scalacopts" value="-Ycheck:all"/>
- </antcall>
+ <target name="fastlocker.reflect" description="Buildlocker without extra fuss" depends="locker.unlock, locker.reflect">
+ <property name="fastlocker" value="true"/>
</target>
-<!-- ===========================================================================
-POSITIONS
-============================================================================ -->
-
- <target name="test.positions" depends="quick.comp">
- <antcall target="test.positions.tests.sub" inheritRefs="true">
- <param name="test.tests.srcs" value="${partest.dir}/files/positions"/>
- </antcall>
- <antcall target="test.positions.sub" inheritRefs="true">
- <param name="test.srcs" value="${src.dir}/compiler"/>
- </antcall>
- <antcall target="test.positions.sub" inheritRefs="true">
- <param name="test.srcs" value="${src.dir}/library"/>
- </antcall>
- <antcall target="test.positions.sub" inheritRefs="true">
- <param name="test.srcs" value="${src.dir}/actors"/>
- </antcall>
- <antcall target="test.positions.sub" inheritRefs="true">
- <param name="test.srcs" value="${src.dir}/dbc"/>
- </antcall>
- <antcall target="test.positions.sub" inheritRefs="true">
- <param name="test.srcs" value="${src.dir}/swing"/>
- </antcall>
- <antcall target="test.positions.sub" inheritRefs="true">
- <param name="test.srcs" value="${src.dir}/partest"/>
- </antcall>
- <antcall target="test.positions.sub" inheritRefs="true">
- <param name="test.srcs" value="${src.dir}/scalap"/>
- </antcall>
- <antcall target="test.positions.tests.sub" inheritRefs="true">
- <param name="test.tests.srcs" value="${partest.dir}/files/pos"/>
- </antcall>
- <antcall target="test.positions.tests.sub" inheritRefs="true">
- <param name="test.tests.srcs" value="${partest.dir}/files/run"/>
- </antcall>
- <antcall target="test.positions.tests.sub" inheritRefs="true">
- <param name="test.tests.srcs" value="${partest.dir}/files/neg"/>
- </antcall>
- </target>
-
- <target name="test.positions.sub">
- <echo message="Validating positions for: ${test.srcs}"/>
- <if>
- <isfileselected file="${test.srcs}">
- <type type="dir"/>
- </isfileselected>
- <then>
- <property name="srcdir" value="${test.srcs}"/>
- <property name="srcs" value="**/*.scala"/>
- </then>
- <else>
- <dirname property="srcdir" file="${test.srcs}"/>
- <basename property="srcs" file="${test.srcs}"/>
- </else>
- </if>
- <scalacfork
- destdir=""
- compilerpathref="locker.classpath"
- srcpath="${srcdir}"
- params="-Xprint-pos -Yide-debug"
- srcdir="${srcdir}"
- jvmargs="${scalacfork.jvmargs}">
- <include name="${srcs}"/>
- <compilationpath>
- <pathelement location="${build-quick.dir}/classes/library"/>
- </compilationpath>
- </scalacfork>
+ <target name="fastlocker.comp" description="Buildlocker without extra fuss" depends="locker.unlock, locker.comp">
+ <property name="fastlocker" value="true"/>
</target>
- <target name="test.positions.tests.sub">
- <foreach target="test.positions.sub"
- inheritAll="true"
- inheritRefs="true"
- param="test.srcs">
- <path>
- <fileset dir="${test.tests.srcs}" includes="*.scala"/>
- <dirset dir="${test.tests.srcs}">
- <include name="*"/>
- </dirset>
- </path>
- </foreach>
- </target>
-
+ <target name="fastlocker" description="Buildlocker without extra fuss" depends="fastlocker.comp"/>
+
<!-- ===========================================================================
-MISCELLANEOUS
+ VISUALIZATION
============================================================================ -->
- <target name="yourkit.init">
- <property name="yourkit.home" value="/Applications/YourKit.app"/>
- <property name="yourkit.api.jar" value="${yourkit.home}/lib/yjp-controller-api-redist.jar"/>
- <property name="yourkit.agent" value="${yourkit.home}/bin/mac/libyjpagent.jnilib"/>
- <property name="yourkit.jvm.opts" value="-agentpath:${yourkit.agent}"/>
- <property name="yourkit.scalac.opts" value="-Yprofile:all"/>
- </target>
-
- <!-- Builds yourkit wrapper/jar and copies into lib/extra. -->
- <target name="yourkit.build" depends="locker.done,yourkit.init">
- <copy file="${yourkit.api.jar}" todir="${lib-extra.dir}"/>
- <property name="yourkit.build.dir" value="${build-quick.dir}/classes/yourkit"/>
- <mkdir dir="${yourkit.build.dir}"/>
-
- <scalacfork
- destdir="${yourkit.build.dir}"
- compilerpathref="locker.classpath"
- params="${scalac.args.all}"
- srcdir="${src.dir}/yourkit"
- jvmargs="${scalacfork.jvmargs}">
- <include name="**/*.scala"/>
- <compilationpath>
- <path refid="locker.classpath"/>
- </compilationpath>
- </scalacfork>
- <jar destfile="${lib-extra.dir}/scalac-yourkit.jar">
- <fileset dir="${yourkit.build.dir}"/>
- </jar>
- </target>
-
- <!-- Builds quick.lib/comp with profiling enabled. -->
- <target name="yourkit.run" depends="yourkit.build">
- <antcall target="clean"/>
- <ant target="quick.lib" inheritall="false" inheritrefs="false">
- <property name="jvm.opts" value="${yourkit.jvm.opts}"/>
- <property name="scalac.args" value="${yourkit.scalac.opts}"/>
- </ant>
- <ant target="quick.comp" inheritall="false" inheritrefs="false">
- <property name="jvm.opts" value="${yourkit.jvm.opts}"/>
- <property name="scalac.args" value="${yourkit.scalac.opts}"/>
- </ant>
- <antcall target="build"/>
- </target>
-
<target name="graph.init">
<taskdef name="vizant" classname="vizant.Vizant" classpath="${lib-ant.dir}/vizant.jar"/>
</target>
- <target name="graph.clean" depends="graph.init">
- <vizant antfile="${ant.file}" outfile="${ant.project.name}.dot" from="starr.clean"/>
- </target>
-
<target name="graph.all" depends="graph.init">
<vizant antfile="${ant.file}" outfile="${ant.project.name}.dot" from="all.done"/>
</target>
diff --git a/classpath.SAMPLE b/classpath.SAMPLE
deleted file mode 100644
index 69c2bae..0000000
--- a/classpath.SAMPLE
+++ /dev/null
@@ -1,13 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<classpath>
- <classpathentry kind="src" path="src/compiler"/>
- <classpathentry kind="con" path="org.scala-ide.sdt.launching.SCALA_CONTAINER"/>
- <classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER"/>
- <classpathentry exported="true" kind="lib" path="lib/msil.jar"/>
- <classpathentry kind="lib" path="lib/jline.jar"/>
- <classpathentry kind="con" path="org.eclipse.pde.core.requiredPlugins"/>
- <classpathentry exported="true" kind="lib" path="lib/fjbg.jar"/>
- <classpathentry kind="lib" path="lib/forkjoin.jar"/>
- <classpathentry kind="lib" path="lib/ant/ant.jar"/>
- <classpathentry kind="output" path="build/quick/classes/compiler"/>
-</classpath>
diff --git a/docs/LICENSE b/docs/LICENSE
index 38d1636..4daedef 100644
--- a/docs/LICENSE
+++ b/docs/LICENSE
@@ -1,35 +1,63 @@
-SCALA LICENSE
+Scala is licensed under the [BSD 3-Clause License](http://opensource.org/licenses/BSD-3-Clause).
-Copyright (c) 2002-2011 EPFL, Lausanne, unless otherwise specified.
-All rights reserved.
-
-This software was developed by the Programming Methods Laboratory of the
-Swiss Federal Institute of Technology (EPFL), Lausanne, Switzerland.
+## Scala License
-Permission to use, copy, modify, and distribute this software in source
-or binary form for any purpose with or without fee is hereby granted,
-provided that the following conditions are met:
+Copyright (c) 2002-2013 EPFL
- 1. Redistributions of source code must retain the above copyright
- notice, this list of conditions and the following disclaimer.
+Copyright (c) 2011-2013 Typesafe, Inc.
- 2. Redistributions in binary form must reproduce the above copyright
- notice, this list of conditions and the following disclaimer in the
- documentation and/or other materials provided with the distribution.
-
- 3. Neither the name of the EPFL nor the names of its contributors
- may be used to endorse or promote products derived from this
- software without specific prior written permission.
+All rights reserved.
+Redistribution and use in source and binary forms, with or without modification,
+are permitted provided that the following conditions are met:
+
+ * Redistributions of source code must retain the above copyright notice,
+ this list of conditions and the following disclaimer.
+ * Redistributions in binary form must reproduce the above copyright notice,
+ this list of conditions and the following disclaimer in the documentation
+ and/or other materials provided with the distribution.
+ * Neither the name of the EPFL nor the names of its contributors
+ may be used to endorse or promote products derived from this software
+ without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
+CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
+EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
+PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
+LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
+NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
+SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+# Other Licenses
+
+This software includes projects with the following licenses,
+which are also included in the `licenses/` directory:
+
+### [Apache License](http://www.apache.org/licenses/LICENSE-2.0.html)
+This license is used by the following third-party libraries:
+ * jansi
+
+### [BSD License](http://www.opensource.org/licenses/bsd-license.php)
+This license is used by the following third-party libraries:
+ * jline
+
+### [BSD 3-Clause License](http://opensource.org/licenses/BSD-3-Clause)
+This license is used by the following third-party libraries:
+ * asm
+
+### [MIT License](http://www.opensource.org/licenses/MIT)
+This license is used by the following third-party libraries:
+ * jquery
+ * jquery-ui
+ * jquery-layout
+ * sizzle
+ * tools tooltip
+
+### Public Domain
+The following libraries are freely available in the public domain:
+ * forkjoin
-THIS SOFTWARE IS PROVIDED BY THE REGENTS AND CONTRIBUTORS ``AS IS'' AND
-ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
-IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
-ARE DISCLAIMED. IN NO EVENT SHALL THE REGENTS OR CONTRIBUTORS BE LIABLE
-FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
-DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
-SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
-CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
-LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
-OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
-SUCH DAMAGE.
diff --git a/docs/README b/docs/README
index 6af6334..1d5f553 100644
--- a/docs/README
+++ b/docs/README
@@ -1,58 +1,36 @@
+Scala Distribution
+------------------
-Scala Software Distributions
-----------------------------
-
-- scala-<major>.<minor>.<patch>.tgz Unix distribution
-- scala-<major>.<minor>.<patch>.zip Windows distribution
-
-The standard distributions require Java 1.5 or above. If you don't
-know which version of Java you have, run the command "java -version".
+The Scala distribution requires Java 1.6 or above.
+Please report bugs at https://issues.scala-lang.org/.
+We welcome contributions at https://github.com/scala/scala!
Scala Tools
-----------
-- fsc Scala offline compiler
+- scala Scala interactive interpreter
- scalac Scala compiler
+- fsc Scala resident compiler
- scaladoc Scala API documentation generator
-- scala Scala interactive interpreter
- scalap Scala classfile decoder
Run the command "scalac -help" to display the list of available
compiler options.
-Unix Installation
------------------
-
-Untar the archive. All Scala tools are located in the "bin" directory.
-Adding that directory to the PATH variable will make the Scala commands
-directly accessible.
-
-You may test the distribution by running the following commands:
-
-$ ./bin/scala
-scala> Array(4,3,2,1).sorted
-res0: Array[Int] = Array(1, 2, 3, 4)
-
-scala>:quit
-$
-
-
-Windows Installation
---------------------
-
-Unzip the archive. All Scala tools are located in the "bin" directory.
-Adding that directory to the PATH variable will make the Scala commands
-directly accessible.
+Installation
+------------
-On Windows 95/98/Me, you must define the environment variable SCALA_HOME
-to point to the home directory of the Scala distribution to run any of
-these tools. This can be done by adding the following command to your
-AUTOEXEC.BAT file and then rebooting your machine.
+Decompress the archive and run the above commands directly from `bin` directory.
+We recommend adding the full path of the `bin` directory to the `PATH`
+environment variable.
-set SCALA_HOME=<install-directory>\scala-<major>.<minor>.<patch>
-On Windows NT/2000/XP, you do not need to define any variable in order
-for the Scala commands to run correctly.
+Licenses
+--------
+Scala is licensed under the standard 3-clause BSD license,
+included in the distribution as the file `doc/LICENSE`.
+The licenses of the software included in the Scala distribution can
+be found in the `doc/licenses` directory.
\ No newline at end of file
diff --git a/docs/development/scala.dbc/SQLTypes.dot b/docs/development/scala.dbc/SQLTypes.dot
deleted file mode 100644
index 77c27cf..0000000
--- a/docs/development/scala.dbc/SQLTypes.dot
+++ /dev/null
@@ -1,48 +0,0 @@
-digraph SQLTypes {
- node [shape = record]
- DataType [label = "{DataType|nullable: Boolean}"]
- StringType [label = "{StringType|maxLength: Int}"]
- CharacterStringType [label = "{CharacterStringType|encoding: String}"]
- CharacterType [label = "{CharacterType|length: Int}"]
- CharacterVaryingType [label = "{CharacterVaryingType|length: Int}"]
- CharacterLargeObjectType [label = "CharacterLargeObjectType"]
- BinaryStringType [label = "BinaryStringType"]
- BinaryType [label = "{BinaryType|length: Int}"]
- BinaryVaryingType [label = "{BinaryVaryingType|length: Int}"]
- BinaryLargeObjectType [label = "BinaryLargeObjectType"]
- BooleanType [label = "BooleanType"]
- NumericType [label = "{NumericType|precisionRadix: Int\nprecision: Int}"]
- ExactNumericType [label = "{ExactNumericType|scale: Int}"]
- ApproximateNumericType [label = "ApproximateNumericType"]
- DateTimeType [label = "DateTimeType"]
- TimeType [label = "{TimeType|precisionRadix: Int\nsecondScale: Int}"]
- TimeWithoutTimezoneType [label = "TimeWithoutTimezoneType"]
- TimeWithTimezoneType [label = "TimeWithTimezoneType"]
- TimestampType [label = "{TimestampType|precisionRadix: Int\nsecondScale: Int}"]
- TimestampWithoutTimezoneType [label = "TimestampWithoutTimezoneType"]
- TimestampWithTimezoneType [label = "TimestampWithTimezoneType"]
- DateType [label = "DateType"]
- IntervalType [label = "{IntervalType|precisionRadix: Int\nsecondScale: Int}"]
- StringType -> DataType
- BooleanType -> DataType
- NumericType -> DataType
- DateTimeType -> DataType
- IntervalType -> DataType
- CharacterStringType -> StringType
- BinaryStringType -> StringType
- ExactNumericType -> NumericType
- ApproximateNumericType -> NumericType
- TimeType -> DateTimeType
- TimestampType -> DateTimeType
- DateType -> DateTimeType
- CharacterType -> CharacterStringType
- CharacterVaryingType -> CharacterStringType
- CharacterLargeObjectType -> CharacterStringType
- BinaryType -> BinaryStringType
- BinaryVaryingType -> BinaryStringType
- BinaryLargeObjectType -> BinaryStringType
- TimeWithoutTimezoneType -> TimeType
- TimeWithTimezoneType -> TimeType
- TimestampWithoutTimezoneType -> TimestampType
- TimestampWithTimezoneType -> TimestampType
-}
diff --git a/docs/examples/actors/auction.scala b/docs/examples/actors/auction.scala
deleted file mode 100644
index c3124c6..0000000
--- a/docs/examples/actors/auction.scala
+++ /dev/null
@@ -1,131 +0,0 @@
-package examples.actors
-
-import java.util.Date
-
-import scala.actors._
-import scala.actors.Actor._
-
-/** A simple demonstrator program implementing an online auction service
- * The example uses the actor abstraction defined in the API of
- * package scala.actors.
- */
-
-trait AuctionMessage
-case class Offer(bid: Int, client: Actor) extends AuctionMessage // make a bid
-case class Inquire(client: Actor) extends AuctionMessage // inquire status
-
-trait AuctionReply
-case class Status(asked: Int, expiration: Date) // asked sum, expiration date
- extends AuctionReply
-case object BestOffer extends AuctionReply // yours is the best offer
-case class BeatenOffer(maxBid: Int) extends AuctionReply // offer beaten by maxBid
-case class AuctionConcluded(seller: Actor, client: Actor) // auction concluded
- extends AuctionReply
-case object AuctionFailed extends AuctionReply // failed with no bids
-case object AuctionOver extends AuctionReply // bidding is closed
-
-class AuctionActor(seller: Actor, minBid: Int, closing: Date) extends Actor {
- val timeToShutdown = 3000 // msec
- val bidIncrement = 10
-
- def act() {
- var maxBid = minBid - bidIncrement
- var maxBidder: Actor = null
-
- loop {
- reactWithin (closing.getTime() - new Date().getTime()) {
-
- case Offer(bid, client) =>
- if (bid >= maxBid + bidIncrement) {
- if (maxBid >= minBid)
- maxBidder ! BeatenOffer(bid)
- maxBid = bid
- maxBidder = client
- client ! BestOffer
- }
- else
- client ! BeatenOffer(maxBid)
-
- case Inquire(client) =>
- client ! Status(maxBid, closing)
-
- case TIMEOUT =>
- if (maxBid >= minBid) {
- val reply = AuctionConcluded(seller, maxBidder)
- maxBidder ! reply
- seller ! reply
- } else {
- seller ! AuctionFailed
- }
- reactWithin(timeToShutdown) {
- case Offer(_, client) => client ! AuctionOver
- case TIMEOUT => exit()
- }
-
- }
- }
- }
-}
-
-object auction {
-
- val random = new scala.util.Random
-
- val minBid = 100
- val closing = new Date(new Date().getTime() + 4000)
-
- val seller = Actor.actor { }
- val auction = new AuctionActor(seller, minBid, closing)
-
- def client(i: Int, increment: Int, top: Int) = new Actor {
- val name = "Client " + i
- def log(msg: String) = Console.println(name + ": " + msg)
- var max: Int = _
- var current: Int = 0
- def act() {
- log("started")
- auction ! Inquire(this)
- receive {
- case Status(maxBid, _) =>
- log("status(" + maxBid + ")")
- max = maxBid
- }
- loop {
- if (max >= top) {
- log("too high for me")
- }
- else if (current < max) {
- current = max + increment
- Thread.sleep(1 + random.nextInt(1000))
- auction ! Offer(current, this)
- }
-
- reactWithin(3000) {
- case BestOffer =>
- log("bestOffer(" + current + ")")
-
- case BeatenOffer(maxBid) =>
- log("beatenOffer(" + maxBid + ")")
- max = maxBid
-
- case AuctionConcluded(seller, maxBidder) =>
- log("auctionConcluded"); exit()
-
- case AuctionOver =>
- log("auctionOver"); exit()
-
- case TIMEOUT =>
- exit()
- }
- }
- }
- }
-
- def main(args: Array[String]) {
- seller.start()
- auction.start()
- client(1, 20, 200).start()
- client(2, 10, 300).start()
- }
-
-}
diff --git a/docs/examples/actors/boundedbuffer.scala b/docs/examples/actors/boundedbuffer.scala
deleted file mode 100644
index c65bb77..0000000
--- a/docs/examples/actors/boundedbuffer.scala
+++ /dev/null
@@ -1,37 +0,0 @@
-package examples.actors
-
-import scala.actors.Actor._
-
-object boundedbuffer {
- class BoundedBuffer[T](N: Int)(implicit m: Manifest[T]) {
- private case class Put(x: T)
- private case object Get
- private case object Stop
-
- private val buffer = actor {
- val buf = new Array[T](N)
- var in, out, n = 0
- loop {
- react {
- case Put(x) if n < N =>
- buf(in) = x; in = (in + 1) % N; n += 1; reply()
- case Get if n > 0 =>
- val r = buf(out); out = (out + 1) % N; n -= 1; reply(r)
- case Stop =>
- reply(); exit("stopped")
- }
- }
- }
-
- def put(x: T) { buffer !? Put(x) }
- def get: T = (buffer !? Get).asInstanceOf[T]
- def stop() { buffer !? Stop }
- }
-
- def main(args: Array[String]) {
- val buf = new BoundedBuffer[Int](1)
- buf.put(42)
- println("" + buf.get)
- buf.stop()
- }
-}
diff --git a/docs/examples/actors/channels.scala b/docs/examples/actors/channels.scala
deleted file mode 100644
index 3c0dbf2..0000000
--- a/docs/examples/actors/channels.scala
+++ /dev/null
@@ -1,32 +0,0 @@
-package examples.actors
-
-import scala.actors._
-import scala.actors.Actor._
-
-object channels extends Application {
- case class Msg(ch1: Channel[Int], ch2: Channel[String])
-
- val a = actor {
- val Ch1 = new Channel[Int]
- val Ch2 = new Channel[String]
-
- b ! Msg(Ch1, Ch2)
-
- val ICh1 = Ch1.asInstanceOf[InputChannel[Int]]
- val ICh2 = Ch2.asInstanceOf[InputChannel[String]]
-
- react {
- case ICh1 ! (x: Int) =>
- val r = x + 21
- println("result: "+r)
- case ICh2 ! y =>
- println("received: "+y)
- }
- }
-
- val b = actor {
- react {
- case Msg(ch1, ch2) => ch1 ! 21
- }
- }
-}
diff --git a/docs/examples/actors/fringe.scala b/docs/examples/actors/fringe.scala
deleted file mode 100644
index 2026628..0000000
--- a/docs/examples/actors/fringe.scala
+++ /dev/null
@@ -1,82 +0,0 @@
-package examples.actors
-
-import scala.actors.Actor._
-import scala.actors.{Channel, OutputChannel}
-
-/**
- @author Philipp Haller
- @version 1.1, 09/21/2007
- */
-object fringe extends Application {
-
- abstract class Tree
- case class Node(left: Tree, right: Tree) extends Tree
- case class Leaf(v: Int) extends Tree
-
- case class CompareFringe(t1: Tree, t2: Tree)
- case class ComputeFringe(t1: Tree, atoms: OutputChannel[Option[Leaf]])
- case class Equal(atom1: Option[Leaf], atom2: Option[Leaf])
- case class Extract(tree: Tree)
-
- val comparator = actor {
- val extractor1 = actor(extractorBehavior())
- val extractor2 = actor(extractorBehavior())
- val ch1 = new Channel[Option[Leaf]]
- val ch2 = new Channel[Option[Leaf]]
- loop {
- react {
- case CompareFringe(tree1, tree2) =>
- extractor1 ! ComputeFringe(tree1, ch1)
- extractor2 ! ComputeFringe(tree2, ch2)
- self ! Equal(ch1.?, ch2.?)
-
- case Equal(atom1, atom2) =>
- if (atom1 == atom2) atom1 match {
- case None =>
- println("same fringe")
- exit()
- case _ =>
- self ! Equal(ch1.?, ch2.?)
- } else {
- println("fringes differ")
- exit()
- }
- }
- }
- }
-
- val extractorBehavior = () => {
- var output: OutputChannel[Option[Leaf]] = null
- loop {
- react {
- case ComputeFringe(tree, leafSink) =>
- output = leafSink
- self ! Extract(tree)
-
- case Extract(tree) => tree match {
- case atom @ Leaf(_) =>
- output ! Some(atom)
- sender ! 'Continue
-
- case Node(left, right) =>
- val outer = self
- val outerCont = sender
- val cont = actor {
- react {
- case 'Continue =>
- outer.send(Extract(right), outerCont)
- }
- }
- self.send(Extract(left), cont)
- }
-
- case 'Continue =>
- output ! None
- exit()
- }
- }
- }
-
- comparator ! CompareFringe(Node(Leaf(5), Node(Leaf(7), Leaf(3))),
- Node(Leaf(5), Node(Leaf(7), Leaf(3))))
-}
diff --git a/docs/examples/actors/links.scala b/docs/examples/actors/links.scala
deleted file mode 100644
index 373e6b0..0000000
--- a/docs/examples/actors/links.scala
+++ /dev/null
@@ -1,47 +0,0 @@
-package examples.actors
-
-import scala.actors.{Actor, Exit}
-import scala.actors.Actor._
-
-object links extends Application {
-
- case object Stop
-
- actor {
- val start = link(p(2))
- start ! Stop
- }
-
- def p(n: Int): Actor =
- if (n == 0) top1()
- else top(p(n-1), n)
-
- def top(a: Actor, n: Int): Actor = actor {
- println("starting actor " + n + " (" + self + ")")
- self.trapExit = true
- link(a)
- loop {
- receive {
- case ex @ Exit(from, reason) =>
- println("Actor " + n + " received " + ex)
- exit('finished)
- case any => {
- println("Actor " + n + " received " + any)
- a ! any
- }
- }
- }
- }
-
- def top1(): Actor = actor {
- println("starting last actor" + " (" + self + ")")
- receive {
- case Stop =>
- println("Last actor now exiting")
- exit('abnormal)
- case any =>
- println("Last actor received " + any)
- top1()
- }
- }
-}
diff --git a/docs/examples/actors/looping.scala b/docs/examples/actors/looping.scala
deleted file mode 100644
index 1ce2e2e..0000000
--- a/docs/examples/actors/looping.scala
+++ /dev/null
@@ -1,26 +0,0 @@
-package examples.actors
-
-import scala.actors.Actor._
-
-object looping extends Application {
- case object A
-
- val a = actor {
- var cnt = 0
- loop {
- react {
- case A =>
- cnt += 1
- if (cnt % 2 != 0) continue
- if (cnt < 10)
- println("received A")
- else {
- println("received last A")
- exit()
- }
- }
- }
- }
-
- for (i <- 0 until 10) a ! A
-}
diff --git a/docs/examples/actors/message.scala b/docs/examples/actors/message.scala
deleted file mode 100644
index d385543..0000000
--- a/docs/examples/actors/message.scala
+++ /dev/null
@@ -1,40 +0,0 @@
-package examples.actors
-
-import scala.actors.{Actor, Scheduler}
-import scala.actors.Actor._
-import scala.actors.scheduler.SingleThreadedScheduler
-
-object message {
- def main(args: Array[String]) {
- val n = try { args(0).toInt }
- catch {
- case _ =>
- println("Usage: examples.actors.message <n>")
- Predef.exit
- }
- val nActors = 500
- val finalSum = n * nActors
- Scheduler.impl = new SingleThreadedScheduler
-
- def beh(next: Actor, sum: Int) {
- react {
- case value: Int =>
- val j = value + 1; val nsum = sum + j
- if (next == null && nsum >= n * j)
- println(nsum)
- else {
- if (next != null) next ! j
- if (nsum < n * j) beh(next, nsum)
- }
- }
- }
-
- def actorChain(i: Int, a: Actor): Actor =
- if (i > 0) actorChain(i-1, actor(beh(a, 0))) else a
-
- val firstActor = actorChain(nActors, null)
- var i = n; while (i > 0) { firstActor ! 0; i -= 1 }
-
- Scheduler.shutdown()
- }
-}
diff --git a/docs/examples/actors/pingpong.scala b/docs/examples/actors/pingpong.scala
deleted file mode 100644
index 4ed225c..0000000
--- a/docs/examples/actors/pingpong.scala
+++ /dev/null
@@ -1,61 +0,0 @@
-package examples.actors
-
-import scala.actors.Actor
-import scala.actors.Actor._
-
-case object Ping
-case object Pong
-case object Stop
-
-/**
- * Ping pong example.
- *
- * @author Philipp Haller
- * @version 1.1
- */
-object pingpong extends Application {
- val pong = new Pong
- val ping = new Ping(100000, pong)
- ping.start
- pong.start
-}
-
-class Ping(count: Int, pong: Actor) extends Actor {
- def act() {
- var pingsLeft = count - 1
- pong ! Ping
- loop {
- react {
- case Pong =>
- if (pingsLeft % 1000 == 0)
- println("Ping: pong")
- if (pingsLeft > 0) {
- pong ! Ping
- pingsLeft -= 1
- } else {
- println("Ping: stop")
- pong ! Stop
- exit()
- }
- }
- }
- }
-}
-
-class Pong extends Actor {
- def act() {
- var pongCount = 0
- loop {
- react {
- case Ping =>
- if (pongCount % 1000 == 0)
- println("Pong: ping "+pongCount)
- sender ! Pong
- pongCount += 1
- case Stop =>
- println("Pong: stop")
- exit()
- }
- }
- }
-}
diff --git a/docs/examples/actors/producers.scala b/docs/examples/actors/producers.scala
deleted file mode 100644
index 80e5ae3..0000000
--- a/docs/examples/actors/producers.scala
+++ /dev/null
@@ -1,114 +0,0 @@
-package examples.actors
-
-import scala.actors.Actor
-import scala.actors.Actor._
-
-abstract class Producer[T] {
-
- /** A signal that the next value should be produced. */
- private val Next = new Object
-
- /** A label for an undefined state of the iterators. */
- private val Undefined = new Object
-
- /** A signal to stop the coordinator. */
- private val Stop = new Object
-
- protected def produce(x: T) {
- coordinator ! Some(x)
- receive { case Next => }
- }
-
- protected def produceValues: Unit
-
- def iterator = new Iterator[T] {
- private var current: Any = Undefined
- private def lookAhead = {
- if (current == Undefined) current = coordinator !? Next
- current
- }
-
- def hasNext: Boolean = lookAhead match {
- case Some(x) => true
- case None => { coordinator ! Stop; false }
- }
-
- def next: T = lookAhead match {
- case Some(x) => current = Undefined; x.asInstanceOf[T]
- }
- }
-
- private val coordinator: Actor = actor {
- loop {
- react {
- case Next =>
- producer ! Next
- reply {
- receive { case x: Option[_] => x }
- }
- case Stop =>
- exit('stop)
- }
- }
- }
-
- private val producer: Actor = actor {
- receive {
- case Next =>
- produceValues
- coordinator ! None
- }
- }
-}
-
-object producers extends Application {
-
- class Tree(val left: Tree, val elem: Int, val right: Tree)
- def node(left: Tree, elem: Int, right: Tree): Tree = new Tree(left, elem, right)
- def node(elem: Int): Tree = node(null, elem, null)
-
- def tree = node(node(node(3), 4, node(6)), 8, node(node(9), 10, node(11)))
-
- class PreOrder(n: Tree) extends Producer[Int] {
- def produceValues = traverse(n)
- def traverse(n: Tree) {
- if (n != null) {
- produce(n.elem)
- traverse(n.left)
- traverse(n.right)
- }
- }
- }
-
- class PostOrder(n: Tree) extends Producer[Int] {
- def produceValues = traverse(n)
- def traverse(n: Tree) {
- if (n != null) {
- traverse(n.left)
- traverse(n.right)
- produce(n.elem)
- }
- }
- }
-
- class InOrder(n: Tree) extends Producer[Int] {
- def produceValues = traverse(n)
- def traverse(n: Tree) {
- if (n != null) {
- traverse(n.left)
- produce(n.elem)
- traverse(n.right)
- }
- }
- }
-
- actor {
- print("PreOrder:")
- for (x <- new PreOrder(tree).iterator) print(" "+x)
- print("\nPostOrder:")
- for (x <- new PostOrder(tree).iterator) print(" "+x)
- print("\nInOrder:")
- for (x <- new InOrder(tree).iterator) print(" "+x)
- print("\n")
- }
-}
diff --git a/docs/examples/actors/seq.scala b/docs/examples/actors/seq.scala
deleted file mode 100644
index 816c969..0000000
--- a/docs/examples/actors/seq.scala
+++ /dev/null
@@ -1,15 +0,0 @@
-package examples.actors
-
-object seq extends Application {
- import scala.actors.Actor._
- val a = actor {
- { react {
- case 'A => println("received 1st message")
- }; ()
- } andThen react {
- case 'A => println("received 2nd message")
- }
- }
- a ! 'A
- a ! 'A
-}
diff --git a/docs/examples/boundedbuffer.scala b/docs/examples/boundedbuffer.scala
deleted file mode 100644
index 359bfd8..0000000
--- a/docs/examples/boundedbuffer.scala
+++ /dev/null
@@ -1,46 +0,0 @@
-package examples
-
-object boundedbuffer {
-
- import concurrent.ops._
-
- class BoundedBuffer[A](N: Int)(implicit m: ClassManifest[A]) {
- var in, out, n = 0
- val elems = new Array[A](N)
-
- def await(cond: => Boolean) = while (!cond) { wait() }
-
- def put(x: A) = synchronized {
- await (n < N)
- elems(in) = x; in = (in + 1) % N; n += 1
- if (n == 1) notifyAll()
- }
-
- def get: A = synchronized {
- await (n != 0)
- val x = elems(out); out = (out + 1) % N ; n -= 1
- if (n == N - 1) notifyAll()
- x
- }
- }
-
- def kill(delay: Int) = new java.util.Timer().schedule(
- new java.util.TimerTask {
- override def run() = {
- println("[killed]")
- System.exit(0)
- }
- },
- delay) // in milliseconds
-
- def main(args: Array[String]) {
- val buf = new BoundedBuffer[String](10)
- var cnt = 0
- def produceString = { cnt += 1; cnt.toString() }
- def consumeString(ss: String) = println(ss)
- spawn { while (true) { val ssss = produceString; buf.put(ssss) } }
- spawn { while (true) { val s = buf.get; consumeString(s) } }
- kill(1000)
- }
-
-}
diff --git a/docs/examples/computeserver.scala b/docs/examples/computeserver.scala
deleted file mode 100644
index bd6f536..0000000
--- a/docs/examples/computeserver.scala
+++ /dev/null
@@ -1,53 +0,0 @@
-package examples
-
-import concurrent._, concurrent.ops._
-
-class ComputeServer(n: Int) {
-
- private trait Job {
- type t
- def task: t
- def ret(x: t): Unit
- }
-
- private val openJobs = new Channel[Job]()
-
- private def processor(i: Int): Unit = {
- while (true) {
- val job = openJobs.read
- println("read a job")
- job.ret(job.task)
- }
- }
-
- def future[a](p: => a): () => a = {
- val reply = new SyncVar[a]()
- openJobs.write{
- new Job {
- type t = a
- def task = p
- def ret(x: a) = reply.set(x)
- }
- }
- () => reply.get
- }
-
- spawn(replicate(0, n) { processor })
-}
-
-object computeserver extends Application {
-
- def kill(delay: Int) = new java.util.Timer().schedule(
- new java.util.TimerTask {
- override def run() = {
- println("[killed]")
- System.exit(0)
- }
- },
- delay) // in milliseconds
-
- val server = new ComputeServer(1)
- val f = server.future(42)
- println(f())
- kill(10000)
-}
diff --git a/docs/examples/expressions/expressions-current.scala b/docs/examples/expressions/expressions-current.scala
deleted file mode 100644
index 660a417..0000000
--- a/docs/examples/expressions/expressions-current.scala
+++ /dev/null
@@ -1,68 +0,0 @@
-package examples.expressions
-
-class Ref[a](var elem:a) {}
-
-abstract class Lang {
- trait Visitor {
- def caseNum(n: int): unit
- }
-
- abstract class Exp {
- def visit(v: visitor): unit
- }
-
- type visitor <: Visitor
-
- class Num(n: int) extends Exp {
- def visit(v: visitor): unit = v.caseNum(n)
- }
-
- class Eval(result: Ref[int]) requires visitor extends Visitor {
- def caseNum(n: int) = result.elem = n
- }
-}
-
-abstract class Lang2 extends Lang {
- trait Visitor2 extends Visitor {
- def casePlus(left: Exp, right: Exp): unit
- }
-
- type visitor <: Visitor2
-
- class Plus(l: Exp, r: Exp) extends Exp {
- def visit(v: visitor): unit = v.casePlus(l, r)
- }
-
- // class Eval2(result: Ref[int]): visitor extends Eval(result) with Visitor2 {
- class Eval2(result: Ref[int]) requires visitor extends Eval(result) with Visitor2 {
- def casePlus(l: Exp, r: Exp) =
- result.elem = { l.visit(this); result.elem } + { r.visit(this); result.elem }
- }
-
- class Show2(result: Ref[String]) requires visitor extends Visitor2 {
- def caseNum(n: int) = result.elem = n.toString()
- def casePlus(l: Exp, r: Exp) =
- result.elem =
- "(" + { l.visit(this); result.elem } +
- "+" + { r.visit(this); result.elem } + ")"
- }
-}
-
-object Main {
- def main(args: Array[String]): unit = {
- //val l1 = new Lang { type visitor = Visitor } // not yet implemented
- object l1 extends Lang { type visitor = Visitor } // workaround
- val e1: l1.Exp = new l1.Num(42)
- val iref = new Ref(0)
- Console.println("eval: " + { e1.visit(new l1.Eval(iref)); iref.elem })
-
- //val l2 = new Lang2 { type visitor = Visitor2 } // not yet implemented
- object l2 extends Lang2 { type visitor = Visitor2 } // workaround
- val e2: l2.Exp = new l2.Plus(new l2.Num(5), new l2.Num(37))
- val sref = new Ref("")
- Console.println("eval: " + { e2.visit(new l2.Eval2(iref)); iref.elem })
- Console.println("show: " + { e2.visit(new l2.Show2(sref)); sref.elem })
- e1.visit(new l1.Eval(iref))
- e2.visit(new l2.Show2(sref))
- }
-}
diff --git a/docs/examples/fors.scala b/docs/examples/fors.scala
deleted file mode 100644
index b937e53..0000000
--- a/docs/examples/fors.scala
+++ /dev/null
@@ -1,112 +0,0 @@
-package examples
-
-import scala.xml._
-
-
-object fors {
-
- val e = Node.NoAttributes
-
- class Person(_name: String, _age: Int) {
- val name = _name
- val age = _age
- }
-
- def printOlderThan20(xs: Seq[Person]): Iterator[String] =
- printOlderThan20(xs.iterator)
-
- def printOlderThan20(xs: Iterator[Person]): Iterator[String] =
- for (p <- xs if p.age > 20) yield p.name
-
- val persons = List(
- new Person("John", 40),
- new Person("Richard", 68)
- )
-
- def divisors(n: Int): List[Int] =
- for (i <- List.range(1, n+1) if n % i == 0) yield i
-
- def isPrime(n: Int) = divisors(n).length == 2
-
- def findNums(n: Int): Iterable[(Int, Int)] =
- for (i <- 1 until n;
- j <- 1 until (i-1);
- if isPrime(i+j)) yield (i, j)
-
- def sum(xs: List[Double]): Double =
- xs.foldLeft(0.0) { (x, y) => x + y }
-
- def scalProd(xs: List[Double], ys: List[Double]) =
- sum(for((x, y) <- xs zip ys) yield x * y)
-
- type Lst = List[Any]
-
- val prefix = null
- val scope = TopScope
-
- val books = List(
- Elem(prefix, "book", e, scope,
- Elem(prefix, "title", e, scope,
- Text("Structure and Interpretation of Computer Programs")),
- Elem(prefix, "author", e, scope,
- Text("Abelson, Harald")),
- Elem(prefix, "author", e, scope,
- Text("Sussman, Gerald J."))),
- Elem(prefix, "book", e, scope,
- Elem(prefix, "title", e, scope,
- Text("Principles of Compiler Design")),
- Elem(prefix, "author", e, scope,
- Text("Aho, Alfred")),
- Elem(prefix, "author", e, scope,
- Text("Ullman, Jeffrey"))),
- Elem(prefix, "book", e, scope,
- Elem(prefix, "title", e, scope,
- Text("Programming in Modula-2")),
- Elem(prefix, "author", e, scope,
- Text("Wirth, Niklaus")))
- )
-
- def findAuthor(books: Lst) =
- for (Elem(_, "book", _, _, book @ _*) <- books;
- Elem(_, "title", _, _, Text(title)) <- book.toList;
- if (title indexOf "Program") >= 0;
- Elem(_, "author", _, _, Text(author)) <- List(book)) yield author
-
- for (Elem(_, "book", _, _, book @ _*) <- books;
- Elem(_, "author", _, _, Text(author)) <- book.toList;
- if author startsWith "Ullman";
- Elem(_, "title", _, _, Text(title)) <- List(book)) yield title
-
- removeDuplicates(
- for (Elem(_, "book", _, _, b1 @ _* ) <- books;
- Elem(_, "book", _, _, b2 @ _*) <- books;
- if b1 != b2;
- Elem(_, "author", _, _, Text(a1)) <- b1.toList;
- Elem(_, "author", _, _, Text(a2)) <- b2.toList;
- if a1 == a2) yield Pair(a1, a2))
-
- def removeDuplicates[a](xs: List[a]): List[a] =
- if (xs.isEmpty)
- xs
- else
- xs.head :: removeDuplicates(for (x <- xs.tail if x != xs.head) yield x)
-
- def main(args: Array[String]) {
- print("Persons over 20:")
- printOlderThan20(persons) foreach { x => print(" " + x) }
- println
-
- println("divisors(34) = " + divisors(34))
-
- print("findNums(15) =");
- findNums(15) foreach { x => print(" " + x); }
- println
-
- val xs = List(3.5, 5.0, 4.5)
- println("average(" + xs + ") = " + sum(xs) / xs.length)
-
- val ys = List(2.0, 1.0, 3.0)
- println("scalProd(" + xs + ", " + ys +") = " + scalProd(xs, ys))
- }
-
-}
diff --git a/docs/examples/futures.scala b/docs/examples/futures.scala
deleted file mode 100644
index e05b6b3..0000000
--- a/docs/examples/futures.scala
+++ /dev/null
@@ -1,17 +0,0 @@
-package examples
-
-import concurrent.ops._
-
-object futures {
- def someLengthyComputation = 1
- def anotherLengthyComputation = 2
- def f(x: Int) = x + x
- def g(x: Int) = x * x
-
- def main(args: Array[String]) {
- val x = future(someLengthyComputation)
- anotherLengthyComputation
- val y = f(x()) + g(x())
- println(y)
- }
-}
diff --git a/docs/examples/gadts.scala b/docs/examples/gadts.scala
deleted file mode 100644
index d2792f4..0000000
--- a/docs/examples/gadts.scala
+++ /dev/null
@@ -1,22 +0,0 @@
-package examples
-
-object gadts extends Application {
-
- abstract class Term[T]
- case class Lit(x: Int) extends Term[Int]
- case class Succ(t: Term[Int]) extends Term[Int]
- case class IsZero(t: Term[Int]) extends Term[Boolean]
- case class If[T](c: Term[Boolean],
- t1: Term[T],
- t2: Term[T]) extends Term[T]
-
- def eval[T](t: Term[T]): T = t match {
- case Lit(n) => n
- case Succ(u) => eval(u) + 1
- case IsZero(u) => eval(u) == 0
- case If(c, u1, u2) => eval(if (eval(c)) u1 else u2)
- }
- println(
- eval(If(IsZero(Lit(1)), Lit(41), Succ(Lit(41)))))
-}
-
diff --git a/docs/examples/iterators.scala b/docs/examples/iterators.scala
deleted file mode 100644
index e2e5e05..0000000
--- a/docs/examples/iterators.scala
+++ /dev/null
@@ -1,28 +0,0 @@
-package examples
-
-object iterators {
-
- def Array(elems: Double*): Array[Double] = {
- val ar = new Array[Double](elems.length)
- for (i <- 0 until elems.length)
- ar(i) = elems(i)
- ar
- }
-
- def printArray(xs: Array[Double]) =
- xs.iterator foreach { x => println(x) }
-
- def findGreater(xs: Array[Double], limit: Double) =
- xs.iterator
- .zip(Iterator.from(0))
- .filter{case Pair(x, i) => x > limit }
- .map{case Pair(x, i) => i}
-
- def main(args: Array[String]) {
- val ar = Array/*[Double]*/(6, 2, 8, 5, 1)
- printArray(ar)
- println("Elements greater than 3.0:")
- findGreater(ar, 3.0) foreach { x => Console.println(ar(x)) }
- }
-
-}
diff --git a/docs/examples/jolib/Ref.scala b/docs/examples/jolib/Ref.scala
deleted file mode 100644
index 5f655f1..0000000
--- a/docs/examples/jolib/Ref.scala
+++ /dev/null
@@ -1,55 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-package examples.jolib;
-/*
-import concurrent.SyncVar;
-import concurrent.jolib._;
-
-class Ref[a](init: a) extends Join {
-
- object get extends Synchr[a](this) { case class C() extends SyncVar[a]; }
- object set extends Synchr[unit](this) { case class C(x: a) extends SyncVar[unit]; }
- object state extends Asynchr(this) { case class C(x: a); }
-
- rules (
- Pair(List(get, state), { case List(g @ get.C(), state.C(x) ) =>
- { g.set(x); state(state.C(x)) } }),
- Pair(List(set, state), { case List(s @ set.C(x), state.C(y) ) =>
- { s.set(()); state(state.C(x)) } })
- );
-
- state(state.C(init));
-
- def Get: a = get(get.C());
- def Set(x: a): unit = set(set.C(x));
-}
-*/
-object RefTest {
-
- def main(args: Array[String]) = {
- System.out.println("Started.");
-/*
- concurrent.ops.spawn({
- val r1 = new Ref(0);
- System.out.println("Reference r1 created.");
- System.out.println("Value r1 (first time) = " + r1.Get);
- r1.Set(42);
- System.out.println("Value r1 (second time) = " + r1.Get);
- });
- concurrent.ops.spawn({
- val r2 = new Ref(100);
- System.out.println("Reference r2 created.");
- System.out.println("Value r2 (first time) = " + r2.Get);
- r2.Set(89);
- System.out.println("Value r2 (second time) = " + r2.Get);
- });
-*/
- }
-
-}
diff --git a/docs/examples/jolib/parallelOr.scala b/docs/examples/jolib/parallelOr.scala
deleted file mode 100644
index 72d282b..0000000
--- a/docs/examples/jolib/parallelOr.scala
+++ /dev/null
@@ -1,58 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-package examples.jolib;
-/*
-import concurrent.jolib._;
-import concurrent.SyncVar;
-
-/** Implementation in the join-calculus of a parallel OR. */
-object or extends Join {
-
- object res extends Synchr[boolean](this) { case class C() extends SyncVar[boolean] };
- object res1 extends Asynchr(this) { case class C(b: boolean); }
- object res2 extends Asynchr(this) { case class C(b: boolean); }
- object res1False extends Synchr[boolean](this) { case class C() extends SyncVar[boolean] };
- object res2False extends Synchr[boolean](this) { case class C() extends SyncVar[boolean] };
-
- rules(
- Pair(List(res, res1), { case List(r @ res.C(), res1.C(b)) =>
- if (b) r.set(b) else r.set(res1False(res1False.C())) }),
-
- Pair(List(res, res2), { case List(r @ res.C(), res2.C(b)) =>
- if (b) r.set(b) else r.set(res2False(res2False.C())) }),
-
- Pair(List(res1False, res2), { case List(r @ res1False.C(), res2.C(b)) =>
- r.set(b) }),
-
- Pair(List(res2False, res1), { case List(r @ res2False.C(), res1.C(b)) =>
- r.set(b) })
- );
-
- def apply(b1: => boolean, b2: => boolean): boolean = {
- concurrent.ops.spawn(res1(res1.C(b1)));
- concurrent.ops.spawn(res2(res2.C(b2)));
- res(res.C())
- }
-}
-*/
-object parallelOr {
-
- def main(args: Array[String]): unit = {
- def loop: boolean = { while (true) {}; true };
-/*
- System.out.println("true || true = " + or(true, true));
- System.out.println("false || false = " + or(false, false));
- System.out.println("false || true = " + or(false, true));
- System.out.println("true || false = " + or(true, false));
- System.out.println("true || loop = " + or(true, loop));
- System.out.println("loop || true = " + or(loop, true));
-*/
- }
-
-}
diff --git a/docs/examples/maps.scala b/docs/examples/maps.scala
deleted file mode 100644
index d545b50..0000000
--- a/docs/examples/maps.scala
+++ /dev/null
@@ -1,187 +0,0 @@
-package examples
-
-object maps {
-
- import scala.collection.immutable._
-
- trait MapStruct[kt, vt] {
- trait Map extends Function1[kt, vt] {
- def extend(key: kt, value: vt): Map
- def remove(key: kt): Map
- def domain: Stream[kt]
- def range: Stream[vt]
- }
- type map <: Map
- val empty: map
- }
-
- class AlgBinTree[kt >: Null <: Ordered[kt], vt >: Null <: AnyRef]() extends MapStruct[kt, vt] {
- type map = AlgMap
-
- val empty: AlgMap = Empty()
-
- private case class Empty() extends AlgMap {}
- private case class Node(key: kt, value: vt, l: map, r: map) extends AlgMap {}
-
- trait AlgMap extends Map {
- def apply(key: kt): vt = this match {
- case Empty() => null
- case Node(k, v, l, r) =>
- if (key < k) l.apply(key)
- else if (key > k) r.apply(key)
- else v
- }
-
- def extend(key: kt, value: vt): map = this match {
- case Empty()=> Node(key, value, empty, empty)
- case Node(k, v, l, r) =>
- if (key < k) Node(k, v, l.extend(key, value), r)
- else if (key > k) Node(k, v, l, r.extend(key, value))
- else Node(k, value, l, r)
- }
-
- def remove(key: kt): map = this match {
- case Empty()=> empty
- case Node(k, v, l, r) =>
- if (key < k) Node(k, v, l.remove(key), r)
- else if (key > k) Node(k, v, l, r.remove(key))
- else if (l == empty) r
- else if (r == empty) l
- else {
- val midKey = r.domain.head
- Node(midKey, r.apply(midKey), l, r.remove(midKey))
- }
- }
-
- def domain: Stream[kt] = this match {
- case Empty()=> Stream.empty
- case Node(k, v, l, r) => l.domain append Stream.cons(k, r.domain)
- }
-
- def range: Stream[vt] = this match {
- case Empty()=> Stream.empty
- case Node(k, v, l, r) => l.range append Stream.cons(v, r.range)
- }
- }
- }
-
- class OOBinTree[kt >: Null <: Ordered[kt], vt >: Null <: AnyRef]() extends MapStruct[kt, vt] {
- type map = OOMap
-
- trait OOMap extends Map {
- def apply(key: kt): vt
- def extend(key: kt, value: vt): map
- def remove(key: kt): map
- def domain: Stream[kt]
- def range: Stream[vt]
- }
- val empty: OOMap = new OOMap {
- def apply(key: kt): vt = null
- def extend(key: kt, value: vt) = new Node(key, value, empty, empty)
- def remove(key: kt) = empty
- def domain: Stream[kt] = Stream.empty
- def range: Stream[vt] = Stream.empty
- }
- private class Node(k: kt, v: vt, l: map, r: map) extends OOMap {
- def apply(key: kt): vt =
- if (key < k) l.apply(key)
- else if (key > k) r.apply(key)
- else v;
- def extend(key: kt, value: vt): map =
- if (key < k) new Node(k, v, l.extend(key, value), r)
- else if (key > k) new Node(k, v, l, r.extend(key, value))
- else new Node(k, value, l, r)
- def remove(key: kt): map =
- if (key < k) new Node(k, v, l.remove(key), r)
- else if (key > k) new Node(k, v, l, r.remove(key))
- else if (l == empty) r
- else if (r == empty) l
- else {
- val midKey = r.domain.head
- new Node(midKey, r(midKey), l, r.remove(midKey))
- }
- def domain: Stream[kt] = l.domain append Stream.cons(k, r.domain)
- def range: Stream[vt] = l.range append Stream.cons(v, r.range)
- }
- }
-
- class MutBinTree[kt >: Null <: Ordered[kt], vt >: Null <: AnyRef]() extends MapStruct[kt, vt] {
- type map = MutMap
- class MutMap(key: kt, value: vt) extends Map {
- val k = key
- var v = value
- var l, r = empty
-
- def apply(key: kt): vt =
- if (this == empty) null
- else if (key < k) l.apply(key)
- else if (key > k) r.apply(key)
- else v
-
- def extend(key: kt, value: vt): map =
- if (this == empty) new MutMap(key, value)
- else {
- if (key < k) l = l.extend(key, value)
- else if (key > k) r = r.extend(key, value)
- else v = value
- this
- }
-
- def remove(key: kt): map =
- if (this == empty) this
- else if (key < k) { l = l.remove(key); this }
- else if (key > k) { r = r.remove(key); this }
- else if (l == empty) r
- else if (r == empty) l
- else {
- var mid = r
- while (!(mid.l == empty)) { mid = mid.l }
- mid.r = r.remove(mid.k)
- mid.l = l
- mid
- }
-
- def domain: Stream[kt] =
- if (this == empty) Stream.empty
- else l.domain append Stream.cons(k, r.domain)
-
- def range: Stream[vt] =
- if (this == empty) Stream.empty
- else l.range append Stream.cons(v, r.range)
- }
- val empty = new MutMap(null, null)
- }
-
- class Date(y: Int, m: Int, d: Int) extends Ordered[Date] {
- def year = y
- def month = m
- def day = d
-
- def compare(other: Date): Int =
- if (year == other.year &&
- month == other.month &&
- day == other.day)
- 0
- else if (year < other.year ||
- year == other.year && month < other.month ||
- month == other.month && day < other.day)
- -1
- else
- 1
-
- override def equals(that: Any): Boolean =
- that.isInstanceOf[Date] && {
- val o = that.asInstanceOf[Date];
- day == o.day && month == o.month && year == o.year
- }
- }
-
- def main(args: Array[String]) {
- val t = new OOBinTree[Date, String]()
- ()
- }
-
-}
-
-
-
diff --git a/docs/examples/monads/callccInterpreter.scala b/docs/examples/monads/callccInterpreter.scala
deleted file mode 100644
index 5b556bd..0000000
--- a/docs/examples/monads/callccInterpreter.scala
+++ /dev/null
@@ -1,86 +0,0 @@
-object callccInterpreter {
-
- def id[a](x: a) = x
-
- type Answer = Value;
-
- case class M[A](in: (A => Answer) => Answer) {
- def bind[B](k: A => M[B]) = M[B](c => in (a => k(a) in c));
- def map[B](f: A => B): M[B] = bind(x => unitM(f(x)));
- def flatMap[B](f: A => M[B]): M[B] = bind(f);
- }
-
- def unitM[A](a: A) = M[A](c => c(a));
-
- def showM(m: M[Value]): String = (m in id).toString();
-
- def callCC[A](h: (A => M[A]) => M[A]) =
- M[A](c => h(a => M[A](d => c(a))) in c);
-
- type Name = String;
-
- trait Term;
- case class Var(x: Name) extends Term;
- case class Con(n: Int) extends Term;
- case class Add(l: Term, r: Term) extends Term;
- case class Lam(x: Name, body: Term) extends Term;
- case class App(fun: Term, arg: Term) extends Term;
- case class Ccc(x: Name, t: Term) extends Term;
-
- trait Value;
- case object Wrong extends Value {
- override def toString() = "wrong"
- }
- case class Num(n: Int) extends Value {
- override def toString() = n.toString();
- }
- case class Fun(f: Value => M[Value]) extends Value {
- override def toString() = "<function>"
- }
-
- type Environment = List[Pair[Name, Value]];
-
- def lookup(x: Name, e: Environment): M[Value] = e match {
- case List() => unitM(Wrong)
- case Pair(y, b) :: e1 => if (x == y) unitM(b) else lookup(x, e1)
- }
-
- def add(a: Value, b: Value): M[Value] = Pair(a, b) match {
- case Pair(Num(m), Num(n)) => unitM(Num(m + n))
- case _ => unitM(Wrong)
- }
-
- def apply(a: Value, b: Value): M[Value] = a match {
- case Fun(k) => k(b)
- case _ => unitM(Wrong)
- }
-
- def interp(t: Term, e: Environment): M[Value] = t match {
- case Var(x) => lookup(x, e)
- case Con(n) => unitM(Num(n))
- case Add(l, r) => for (a <- interp(l, e);
- b <- interp(r, e);
- c <- add(a, b))
- yield c
- case Lam(x, t) => unitM(Fun(a => interp(t, Pair(x, a) :: e)))
- case App(f, t) => for (a <- interp(f, e);
- b <- interp(t, e);
- c <- apply(a, b))
- yield c
- case Ccc(x, t) => callCC(k => interp(t, Pair(x, Fun(k)) :: e))
- }
-
- def test(t: Term): String =
- showM(interp(t, List()));
-
- val term0 = App(Lam("x", Add(Var("x"), Var("x"))), Add(Con(10), Con(11)));
- val term1 = App(Con(1), Con(2));
- val term2 = Add(Con(1), Ccc("k", Add(Con(2), App(Var("k"), Con(4)))));
-
- def main(args: Array[String]) = {
- System.out.println(test(term0));
- System.out.println(test(term1));
- System.out.println(test(term2));
- }
-}
-
diff --git a/docs/examples/monads/directInterpreter.scala b/docs/examples/monads/directInterpreter.scala
deleted file mode 100644
index 06fffba..0000000
--- a/docs/examples/monads/directInterpreter.scala
+++ /dev/null
@@ -1,55 +0,0 @@
-object directInterpreter {
-
- type Name = String;
-
- trait Term;
- case class Var(x: Name) extends Term;
- case class Con(n: Int) extends Term;
- case class Add(l: Term, r: Term) extends Term;
- case class Lam(x: Name, body: Term) extends Term;
- case class App(fun: Term, arg: Term) extends Term;
-
- trait Value;
- case object Wrong extends Value;
- case class Num(n: Int) extends Value;
- case class Fun(f: Value => Value)extends Value;
-
- def showval(v: Value): String = v match {
- case Wrong => "<wrong>"
- case Num(n) => n.toString()
- case Fun(f) => "<function>"
- }
-
- type Environment = List[Pair[Name, Value]];
-
- def lookup(x: Name, e: Environment): Value = e match {
- case List() => Wrong
- case Pair(y, b) :: e1 => if (x == y) b else lookup(x, e1)
- }
-
- def add(a: Value, b: Value): Value = Pair(a, b) match {
- case Pair(Num(m), Num(n)) => Num(m + n)
- case _ => Wrong
- }
-
- def apply(a: Value, b: Value) = a match {
- case Fun(k) => k(b)
- case _ => Wrong
- }
-
- def interp(t: Term, e: Environment): Value = t match {
- case Var(x) => lookup(x, e)
- case Con(n) => Num(n)
- case Add(l, r) => add(interp(l, e), interp(r, e))
- case Lam(x, t) => Fun(a => interp(t, Pair(x, a) :: e))
- case App(f, t) => apply(interp(f, e), interp(t, e))
- }
-
- def test(t: Term): String =
- showval(interp(t, List()));
-
- val term0 = App(Lam("x", Add(Var("x"), Var("x"))), Add(Con(10), Con(11)));
-
- def main(args: Array[String]) =
- System.out.println(test(term0));
-}
diff --git a/docs/examples/monads/errorInterpreter.scala b/docs/examples/monads/errorInterpreter.scala
deleted file mode 100644
index d3cc456..0000000
--- a/docs/examples/monads/errorInterpreter.scala
+++ /dev/null
@@ -1,86 +0,0 @@
-object errorInterpreter {
-
- trait M[A] {
- def show: String
- def bind[B](k: A => M[B]): M[B]
- def map[B](f: A => B): M[B] = bind(x => unitM(f(x)))
- def flatMap[B](f: A => M[B]): M[B] = bind(f)
- }
-
- def unitM[A](a: A): M[A] = new Suc(a)
- def errorM[A](msg: String): M[A] = new Err(msg)
-
- def showM(m: M[Value]): String = m.show
-
- class Suc[A](x: A) extends M[A] {
- def bind[B](k: A => M[B]): M[B] = k(x)
- def show: String = "Success: " + x
- }
- class Err[A](msg: String) extends M[A] {
- def bind[B](k: A => M[B]): M[B] = new Err(msg)
- def show: String = "Error: " + msg
- }
-
- type Name = String
-
- trait Term
- case class Var(x: Name) extends Term
- case class Con(n: Int) extends Term
- case class Add(l: Term, r: Term) extends Term
- case class Lam(x: Name, body: Term) extends Term
- case class App(fun: Term, arg: Term) extends Term
-
- trait Value
- case object Wrong extends Value {
- override def toString() = "wrong"
- }
- case class Num(n: Int) extends Value {
- override def toString() = n.toString()
- }
- case class Fun(f: Value => M[Value]) extends Value {
- override def toString() = "<function>"
- }
-
- type Environment = List[Pair[Name, Value]]
-
- def lookup(x: Name, e: Environment): M[Value] = e match {
- case List() => errorM("unbound variable: " + x);
- case Pair(y, b) :: e1 => if (x == y) unitM(b) else lookup(x, e1)
- }
-
- def add(a: Value, b: Value): M[Value] = Pair(a, b) match {
- case Pair(Num(m), Num(n)) => unitM(Num(m + n))
- case _ => errorM("should be numbers: " + a + "," + b)
- }
-
- def apply(a: Value, b: Value): M[Value] = a match {
- case Fun(k) => k(b)
- case _ => errorM("should be function: " + a)
- }
-
- def interp(t: Term, e: Environment): M[Value] = t match {
- case Var(x) => lookup(x, e)
- case Con(n) => unitM(Num(n))
- case Add(l, r) => for (a <- interp(l, e);
- b <- interp(r, e);
- c <- add(a, b))
- yield c
- case Lam(x, t) => unitM(Fun(a => interp(t, Pair(x, a) :: e)))
- case App(f, t) => for (a <- interp(f, e);
- b <- interp(t, e);
- c <- apply(a, b))
- yield c
- }
-
- def test(t: Term): String =
- showM(interp(t, List()))
-
- val term0 = App(Lam("x", Add(Var("x"), Var("x"))), Add(Con(10), Con(11)))
- val term1 = App(Con(1), Con(2))
-
- def main(args: Array[String]) = {
- System.out.println(test(term0))
- System.out.println(test(term1))
- }
-}
-
diff --git a/docs/examples/monads/simpleInterpreter.scala b/docs/examples/monads/simpleInterpreter.scala
deleted file mode 100644
index cde3a92..0000000
--- a/docs/examples/monads/simpleInterpreter.scala
+++ /dev/null
@@ -1,75 +0,0 @@
-object simpleInterpreter {
-
- case class M[A](value: A) {
- def bind[B](k: A => M[B]): M[B] = k(value);
- def map[B](f: A => B): M[B] = bind(x => unitM(f(x)));
- def flatMap[B](f: A => M[B]): M[B] = bind(f);
- }
-
- def unitM[A](a: A): M[A] = M(a);
-
- def showM(m: M[Value]): String = m.value.toString();
-
- type Name = String;
-
- trait Term;
- case class Var(x: Name) extends Term;
- case class Con(n: Int) extends Term;
- case class Add(l: Term, r: Term) extends Term;
- case class Lam(x: Name, body: Term) extends Term;
- case class App(fun: Term, arg: Term) extends Term;
-
- trait Value;
- case object Wrong extends Value {
- override def toString() = "wrong"
- }
- case class Num(n: Int) extends Value {
- override def toString() = n.toString();
- }
- case class Fun(f: Value => M[Value]) extends Value {
- override def toString() = "<function>"
- }
-
- type Environment = List[Pair[Name, Value]];
-
- def lookup(x: Name, e: Environment): M[Value] = e match {
- case List() => unitM(Wrong)
- case Pair(y, b) :: e1 => if (x == y) unitM(b) else lookup(x, e1)
- }
-
- def add(a: Value, b: Value): M[Value] = Pair(a, b) match {
- case Pair(Num(m), Num(n)) => unitM(Num(m + n))
- case _ => unitM(Wrong)
- }
-
- def apply(a: Value, b: Value): M[Value] = a match {
- case Fun(k) => k(b)
- case _ => unitM(Wrong)
- }
-
- def interp(t: Term, e: Environment): M[Value] = t match {
- case Var(x) => lookup(x, e)
- case Con(n) => unitM(Num(n))
- case Add(l, r) => for (a <- interp(l, e);
- b <- interp(r, e);
- c <- add(a, b))
- yield c
- case Lam(x, t) => unitM(Fun(a => interp(t, Pair(x, a) :: e)))
- case App(f, t) => for (a <- interp(f, e);
- b <- interp(t, e);
- c <- apply(a, b))
- yield c
- }
-
- def test(t: Term): String =
- showM(interp(t, List()));
-
- val term0 = App(Lam("x", Add(Var("x"), Var("x"))), Add(Con(10), Con(11)));
- val term1 = App(Con(1), Con(2));
-
- def main(args: Array[String]) = {
- System.out.println(test(term0));
- System.out.println(test(term1));
- }
-}
-
diff --git a/docs/examples/monads/stateInterpreter.scala b/docs/examples/monads/stateInterpreter.scala
deleted file mode 100644
index 97f3335..0000000
--- a/docs/examples/monads/stateInterpreter.scala
+++ /dev/null
@@ -1,86 +0,0 @@
-package examples.monads;
-
-object stateInterpreter {
-
- type State = Int;
-
- val tickS = new M(s => Pair((), s + 1));
-
- case class M[A](in: State => Pair[A, State]) {
- def bind[B](k: A => M[B]) = M[B]{ s0 =>
- val Pair(a, s1) = this in s0; k(a) in s1
- }
- def map[B](f: A => B): M[B] = bind(x => unitM(f(x)));
- def flatMap[B](f: A => M[B]): M[B] = bind(f);
- }
-
- def unitM[A](a: A) = M[A](s => Pair(a, s));
-
- def showM(m: M[Value]): String = {
- val Pair(a, s1) = m in 0;
- "Value: " + a + "; Count: " + s1
- }
-
- type Name = String;
-
- trait Term;
- case class Var(x: Name) extends Term;
- case class Con(n: Int) extends Term;
- case class Add(l: Term, r: Term) extends Term;
- case class Lam(x: Name, body: Term) extends Term;
- case class App(fun: Term, arg: Term) extends Term;
-
- trait Value;
- case object Wrong extends Value {
- override def toString() = "wrong"
- }
- case class Num(n: Int) extends Value {
- override def toString() = n.toString();
- }
- case class Fun(f: Value => M[Value]) extends Value {
- override def toString() = "<function>"
- }
-
- type Environment = List[Pair[Name, Value]];
-
- def lookup(x: Name, e: Environment): M[Value] = e match {
- case List() => unitM(Wrong)
- case Pair(y, b) :: e1 => if (x == y) unitM(b) else lookup(x, e1)
- }
-
- def add(a: Value, b: Value): M[Value] = Pair(a, b) match {
- case Pair(Num(m), Num(n)) => for (_ <- tickS) yield Num(m + n)
- case _ => unitM(Wrong)
- }
-
- def apply(a: Value, b: Value): M[Value] = a match {
- case Fun(k) => for (_ <- tickS; c <- k(b)) yield c
- case _ => unitM(Wrong)
- }
-
- def interp(t: Term, e: Environment): M[Value] = t match {
- case Var(x) => lookup(x, e)
- case Con(n) => unitM(Num(n))
- case Add(l, r) => for (a <- interp(l, e);
- b <- interp(r, e);
- c <- add(a, b))
- yield c
- case Lam(x, t) => unitM(Fun(a => interp(t, Pair(x, a) :: e)))
- case App(f, t) => for (a <- interp(f, e);
- b <- interp(t, e);
- c <- apply(a, b))
- yield c
- }
-
- def test(t: Term): String =
- showM(interp(t, List()));
-
- val term0 = App(Lam("x", Add(Var("x"), Var("x"))), Add(Con(10), Con(11)));
- val term1 = App(Con(1), Con(2));
-
- def main(args: Array[String]) = {
- System.out.println(test(term0));
- System.out.println(test(term1));
- }
-}
-
diff --git a/docs/examples/oneplacebuffer.scala b/docs/examples/oneplacebuffer.scala
deleted file mode 100644
index 02b8a9c..0000000
--- a/docs/examples/oneplacebuffer.scala
+++ /dev/null
@@ -1,60 +0,0 @@
-package examples
-
-object oneplacebuffer {
-
- import scala.concurrent.{MailBox, ops}
-
- class OnePlaceBuffer {
- private val m = new MailBox() {} // An internal mailbox
- private case class Empty() // Types of messages we deal with
- private case class Full(x: Int)
-
- m send Empty() // Initialization
-
- def write(x: Int) {
- m receive {
- case Empty() =>
- println("put " + x)
- m send Full(x)
- }
- }
-
- def read: Int = m receive {
- case Full(x) =>
- println("get " + x)
- m send Empty(); x
- }
- }
-
- def kill(delay: Int) = new java.util.Timer().schedule(
- new java.util.TimerTask {
- override def run() = {
- println("[killed]")
- exit(0)
- }
- },
- delay) // in milliseconds
-
- def main(args: Array[String]) {
- val buf = new OnePlaceBuffer
- val random = new java.util.Random()
-
- def producer(n: Int) {
- Thread.sleep(random.nextInt(1000))
- buf.write(n)
- producer(n + 1)
- }
-
- def consumer {
- Thread.sleep(random.nextInt(1000))
- val n = buf.read
- consumer
- }
-
- ops.spawn(producer(0))
- ops.spawn(consumer)
- kill(10000)
- }
-
-}
-
diff --git a/docs/examples/parsing/ArithmeticParser.scala b/docs/examples/parsing/ArithmeticParser.scala
deleted file mode 100644
index 99cf7a5..0000000
--- a/docs/examples/parsing/ArithmeticParser.scala
+++ /dev/null
@@ -1,57 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2006-2011, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-package examples.parsing
-
-import scala.util.parsing.combinator.lexical.StdLexical
-import scala.util.parsing.combinator.syntactical.StdTokenParsers
-
-/** Parse and evaluate a numeric expression as a sequence of terms, separated by + or -
- * a term is a sequence of factors, separated by * or /
- * a factor is a parenthesized expression or a number
- *
- * @author Adriaan Moors
- */
-object arithmeticParser extends StdTokenParsers {
- type Tokens = StdLexical ; val lexical = new StdLexical
- lexical.delimiters ++= List("(", ")", "+", "-", "*", "/")
-
- lazy val expr = term*("+" ^^^ {(x: int, y: int) => x + y} | "-" ^^^ {(x: int, y: int) => x - y})
- lazy val term = factor*("*" ^^^ {(x: int, y: int) => x * y} | "/" ^^^ {(x: int, y: int) => x / y})
- lazy val factor: Parser[int] = "(" ~> expr <~ ")" | numericLit ^^ (_.toInt)
-
- def main(args: Array[String]) {
- println(
- if (args.length == 1) {
- expr(new lexical.Scanner(args(0)))
- }
- else
- "usage: scala examples.parsing.arithmeticParser <expr-string>"
- )
- }
-}
-
-
-object arithmeticParserDesugared extends StdTokenParsers {
- type Tokens = StdLexical ; val lexical = new StdLexical
- lexical.delimiters ++= List("(", ")", "+", "-", "*", "/")
-
- lazy val expr = chainl1(term, (keyword("+").^^^{(x: int, y: int) => x + y}).|(keyword("-").^^^{(x: int, y: int) => x - y}))
- lazy val term = chainl1(factor, (keyword("*").^^^{(x: int, y: int) => x * y}).|(keyword("/").^^^{(x: int, y: int) => x / y}))
- lazy val factor: Parser[int] = keyword("(").~>(expr.<~(keyword(")"))).|(numericLit.^^(x => x.toInt))
-
- def main(args: Array[String]) {
- println(
- if (args.length == 1) {
- expr(new lexical.Scanner(args(0)))
- }
- else
- "usage: scala examples.parsing.arithmeticParser <expr-string>"
- )
- }
-}
diff --git a/docs/examples/parsing/ArithmeticParsers.scala b/docs/examples/parsing/ArithmeticParsers.scala
deleted file mode 100644
index 62d7a61..0000000
--- a/docs/examples/parsing/ArithmeticParsers.scala
+++ /dev/null
@@ -1,70 +0,0 @@
-package examples.parsing
-
-import scala.util.parsing.combinator1.syntactical.StandardTokenParsers
-
-object ArithmeticParsers extends StandardTokenParsers {
- lexical.delimiters ++= List("(", ")", "+", "-", "*", "/")
-
- def expr: Parser[Any] = term ~ rep("+" ~ term | "-" ~ term)
- def term = factor ~ rep("*" ~ factor | "/" ~ factor)
- def factor: Parser[Any] = "(" ~ expr ~ ")" | numericLit
-
- def main(args: Array[String]) {
- val tokens = new lexical.Scanner(args(0))
- println(args(0))
- println(phrase(expr)(tokens))
- }
-}
-
-object ArithmeticParsers1 extends StandardTokenParsers {
- lexical.delimiters ++= List("(", ")", "+", "-", "*", "/")
-
- val reduceList: Int ~ List[String ~ Int] => Int = {
- case i ~ ps => (i /: ps)(reduce)
- }
-
- def reduce(x: Int, r: String ~ Int) = (r: @unchecked) match {
- case "+" ~ y => x + y
- case "-" ~ y => x - y
- case "*" ~ y => x * y
- case "/" ~ y => x / y
- }
-
- def expr : Parser[Int] = term ~ rep ("+" ~ term | "-" ~ term) ^^ reduceList
- def term : Parser[Int] = factor ~ rep ("*" ~ factor | "/" ~ factor) ^^ reduceList
- def factor: Parser[Int] = "(" ~> expr <~ ")" | numericLit ^^ (_.toInt)
-
- def main(args: Array[String]) {
- val tokens = new lexical.Scanner(args(0))
- println(args(0))
- println(phrase(expr)(tokens))
- }
-}
-
-class Expr
-case class BinOp(op: String, l: Expr, r: Expr) extends Expr
-case class Num(n: Int) extends Expr
-
-object ArithmeticParsers2 extends StandardTokenParsers {
- lexical.delimiters ++= List("(", ")", "+", "-", "*", "/")
-
- val reduceList: Expr ~ List[String ~ Expr] => Expr = {
- case i ~ ps => (i /: ps)(reduce)
- }
-
- def reduce(l: Expr, r: String ~ Expr) = BinOp(r._1, l, r._2)
- def mkNum(s: String) = Num(s.toInt)
-
- def expr : Parser[Expr] = term ~ rep ("+" ~ term | "-" ~ term) ^^ reduceList
- def term : Parser[Expr] = factor ~ rep ("*" ~ factor | "/" ~ factor) ^^ reduceList
- def factor: Parser[Expr] = "(" ~> expr <~ ")" | numericLit ^^ ((s: String) => Num(s.toInt))
-
- def main(args: Array[String]) {
- val parse = phrase(expr)
- val tokens = new lexical.Scanner(args(0))
- println(args(0))
- println(parse(tokens))
- }
-}
-
-
diff --git a/docs/examples/parsing/JSON.scala b/docs/examples/parsing/JSON.scala
deleted file mode 100644
index abfa242..0000000
--- a/docs/examples/parsing/JSON.scala
+++ /dev/null
@@ -1,44 +0,0 @@
-package examples.parsing
-
-import scala.util.parsing.combinator1.syntactical.StandardTokenParsers
-
-object JSON extends StandardTokenParsers {
- lexical.delimiters += ("{", "}", "[", "]", ":", ",")
- lexical.reserved += ("null", "true", "false")
-
- def obj : Parser[Any] = "{" ~ repsep(member, ",") ~ "}"
- def arr : Parser[Any] = "[" ~ repsep(value, ",") ~ "]"
- def member: Parser[Any] = ident ~ ":" ~ value
- def value : Parser[Any] = ident | numericLit | obj | arr |
- "null" | "true" | "false"
-
- def main(args: Array[String]) {
- val tokens = new lexical.Scanner(args(0))
- println(args(0))
- println(phrase(value)(tokens))
- }
-}
-object JSON1 extends StandardTokenParsers {
- lexical.delimiters += ("{", "}", "[", "]", ":", ",")
- lexical.reserved += ("null", "true", "false")
-
- def obj: Parser[Map[String, Any]] =
- "{" ~> repsep(member, ",") <~ "}" ^^ (Map() ++ _)
-
- def arr: Parser[List[Any]] =
- "[" ~> repsep(value, ",") <~ "]"
-
- def member: Parser[(String, Any)] =
- ident ~ ":" ~ value ^^ { case name ~ ":" ~ value => (name -> value) }
-
- def value: Parser[Any] =
- ident | numericLit ^^ (_.toInt) | obj | arr |
- "null" ^^^ null | "true" ^^^ true | "false" ^^^ false
-
- def main(args: Array[String]) {
- val tokens = new lexical.Scanner(args(0))
- println(args(0))
- println(phrase(value)(tokens))
- }
-}
-
diff --git a/docs/examples/parsing/ListParser.scala b/docs/examples/parsing/ListParser.scala
deleted file mode 100644
index 59fc292..0000000
--- a/docs/examples/parsing/ListParser.scala
+++ /dev/null
@@ -1,33 +0,0 @@
-package examples.parsing
-
-import scala.util.parsing.combinator.{Parsers, ImplicitConversions, ~, mkTilde}
-import scala.util.parsing.input.CharArrayReader
-
-object listParser {
- abstract class Tree
- case class Id(s: String) extends Tree
- case class Num(n: Int) extends Tree
- case class Lst(elems: List[Tree]) extends Tree
-
- import Character.{isLetter, isLetterOrDigit, isDigit}
- def mkString(cs: List[Any]) = cs.mkString("")
-
- class ListParsers extends Parsers {
- type Elem = Char
-
- lazy val ident = rep1(elem("letter", isLetter), elem("letter or digit", isLetterOrDigit)) ^^ {cs => Id(mkString(cs))}
- lazy val number = chainl1(elem("digit", isDigit) ^^ (_ - '0'), success{(accum: Int, d: Int) => accum * 10 + d}) ^^ Num
- lazy val list = '(' ~> repsep(expr, ',') <~ ')' ^^ Lst
- lazy val expr: Parser[Tree] = list | ident | number
- }
-
- def main(args: Array[String]) {
- println(
- if (args.length == 1) {
- (new ListParsers).expr(new CharArrayReader(args(0).toCharArray()))
- }
- else
- "usage: scala examples.parsing.listParser <list-string>"
- )
- }
-}
diff --git a/docs/examples/parsing/ListParsers.scala b/docs/examples/parsing/ListParsers.scala
deleted file mode 100644
index b449c4a..0000000
--- a/docs/examples/parsing/ListParsers.scala
+++ /dev/null
@@ -1,30 +0,0 @@
-package examples.parsing
-
-import scala.util.parsing.combinator1.syntactical.StandardTokenParsers
-
-object ListParsers extends StandardTokenParsers {
- lexical.delimiters ++= List("(", ")", ",")
-
- def expr: Parser[Any] = "(" ~ exprs ~ ")" | ident | numericLit
- def exprs: Parser[Any] = expr ~ rep ("," ~ expr)
-
- def main(args: Array[String]) {
- val tokens = new lexical.Scanner(args(0))
- println(args(0))
- println(phrase(expr)(tokens))
- }
-}
-
-object ListParsers1 extends StandardTokenParsers {
- lexical.delimiters ++= List("(", ")", ",")
-
- def expr: Parser[Any] = "(" ~> exprs <~ ")" | ident | numericLit
-
- def exprs: Parser[List[Any]] = expr ~ rep ("," ~> expr) ^^ { case x ~ y => x :: y }
-
- def main(args: Array[String]) {
- val tokens = new lexical.Scanner(args(0))
- println(args(0))
- println(phrase(expr)(tokens))
- }
-}
diff --git a/docs/examples/parsing/MiniML.scala b/docs/examples/parsing/MiniML.scala
deleted file mode 100644
index f7f7172..0000000
--- a/docs/examples/parsing/MiniML.scala
+++ /dev/null
@@ -1,52 +0,0 @@
-package examples.parsing
-
-import scala.util.parsing.combinator1.syntactical.StandardTokenParsers
-import scala.util.parsing.combinator1.syntactical.StandardTokenParsers
-
-object MiniML extends StandardTokenParsers {
- lexical.delimiters += ("(", ")", ".", "=")
- lexical.reserved += ("lambda", "let", "in")
-
- def expr: Parser[Any] = (
- "let" ~ ident ~ "=" ~ expr ~ "in" ~ expr
- | "lambda" ~ ident ~ "." ~ expr
- | simpleExpr ~ rep(expr)
- )
- def simpleExpr: Parser[Any] = (
- ident
- | "(" ~ expr ~ ")"
- )
-
- def main(args: Array[String]) {
- val tokens = new lexical.Scanner(args(0))
- println(args(0))
- println(phrase(expr)(tokens))
- }
-}
-
-class Expr
-case class Let(x: String, expr: Expr, body: Expr) extends Expr
-case class Lambda(x: String, expr: Expr) extends Expr
-case class Apply(fun: Expr, arg: Expr) extends Expr
-case class Var(x: String) extends Expr
-
-object MiniML1 extends StandardTokenParsers {
- lexical.delimiters += ("(", ")", ".", "=")
- lexical.reserved += ("lambda", "let", "in")
-
- def expr: Parser[Expr] = (
- "let" ~ ident ~ "=" ~ expr ~ "in" ~ expr ^^ { case "let" ~ x ~ "=" ~ e ~ "in" ~ b => Let(x, e, b) }
- | "lambda" ~ ident ~ "." ~ expr ^^ { case "lambda" ~ x ~ "." ~ e => Lambda(x, e) }
- | simpleExpr ~ rep(expr) ^^ { case f ~ as => (f /: as) (Apply) }
- )
- def simpleExpr: Parser[Expr] = (
- ident ^^ { Var }
- | "(" ~> expr <~ ")"
- )
-
- def main(args: Array[String]) {
- val tokens = new lexical.Scanner(args(0))
- println(args(0))
- println(phrase(expr)(tokens))
- }
-}
diff --git a/docs/examples/parsing/lambda/Main.scala b/docs/examples/parsing/lambda/Main.scala
deleted file mode 100644
index 165e82b..0000000
--- a/docs/examples/parsing/lambda/Main.scala
+++ /dev/null
@@ -1,34 +0,0 @@
-package examples.parsing.lambda
-
-import scala.util.parsing.combinator.Parsers
-import scala.util.parsing.input.StreamReader
-
-import java.io.File
-import java.io.FileInputStream
-import java.io.InputStreamReader
-
-/**
- * Parser for an untyped lambda calculus
- *
- * Usage: scala examples.parsing.lambda.Main <file>
- *
- * (example files: see test/ *.kwi)
- *
- * @author Miles Sabin (adapted slightly by Adriaan Moors)
- */
-object Main extends Application with TestParser
-{
- override def main(args: Array[String]) =
- {
- val in = StreamReader(new InputStreamReader(new FileInputStream(new File(args(0))), "ISO-8859-1"))
- parse(in) match
- {
- case Success(term, _) =>
- {
- Console.println("Term: \n"+term)
- }
- case Failure(msg, remainder) => Console.println("Failure: "+msg+"\n"+"Remainder: \n"+remainder.pos.longString)
- case Error(msg, remainder) => Console.println("Error: "+msg+"\n"+"Remainder: \n"+remainder.pos.longString)
- }
- }
-}
diff --git a/docs/examples/parsing/lambda/TestParser.scala b/docs/examples/parsing/lambda/TestParser.scala
deleted file mode 100644
index d26589d..0000000
--- a/docs/examples/parsing/lambda/TestParser.scala
+++ /dev/null
@@ -1,68 +0,0 @@
-package examples.parsing.lambda
-
-import scala.util.parsing.input.Reader
-import scala.util.parsing.combinator.lexical.StdLexical
-import scala.util.parsing.combinator.syntactical.StdTokenParsers
-import scala.util.parsing.combinator.ImplicitConversions
-
-/**
- * Parser for an untyped lambda calculus
- *
- * @author Miles Sabin (adapted slightly by Adriaan Moors)
- */
-trait TestParser extends StdTokenParsers with ImplicitConversions with TestSyntax
-{
- type Tokens = StdLexical
- val lexical = new StdLexical
- lexical.reserved ++= List("unit", "let", "in", "if", "then", "else")
- lexical.delimiters ++= List("=>", "->", "==", "(", ")", "=", "\\", "+", "-", "*", "/")
-
-
- def name : Parser[Name] = ident ^^ Name
-
- // meaning of the arguments to the closure during subsequent iterations
- // (...(expr2 op1 expr1) ... op1 expr1)
- // ^a^^^ ^o^ ^b^^^
- // ^^^^^^^a^^^^^^^ ^o^ ^^b^^
- def expr1 : Parser[Term] =
- chainl1(expr2, expr1, op1 ^^ {o => (a: Term, b: Term) => App(App(o, a), b)})
-
- def expr2 : Parser[Term] =
- chainl1(expr3, expr2, op2 ^^ {o => (a: Term, b: Term) => App(App(o, a), b)})
-
- def expr3 : Parser[Term] =
- chainl1(expr4, expr3, op3 ^^ {o => (a: Term, b: Term) => App(App(o, a), b)})
-
- def expr4 : Parser[Term] =
- ( "\\" ~> lambdas
- | ("let" ~> name) ~ ("=" ~> expr1) ~ ("in" ~> expr1) ^^ flatten3(Let)
- | ("if" ~> expr1) ~ ("then" ~> expr1) ~ ("else" ~> expr1) ^^ flatten3(If)
- | chainl1(aexpr, success(App(_: Term, _: Term)))
- )
-
- def lambdas : Parser[Term] =
- name ~ ("->" ~> expr1 | lambdas) ^^ flatten2(Lam)
-
- def aexpr : Parser[Term] =
- ( numericLit ^^ (_.toInt) ^^ Lit
- | name ^^ Ref
- | "unit" ^^^ Unit()
- | "(" ~> expr1 <~ ")"
- )
-
- def op1 : Parser[Term] =
- "==" ^^^ Ref(Name("=="))
-
- def op2 : Parser[Term] =
- ( "+" ^^^ Ref(Name("+"))
- | "-" ^^^ Ref(Name("-"))
- )
-
- def op3 : Parser[Term] =
- ( "*" ^^^ Ref(Name("*"))
- | "/" ^^^ Ref(Name("/"))
- )
-
- def parse(r: Reader[char]) : ParseResult[Term] =
- phrase(expr1)(new lexical.Scanner(r))
-}
diff --git a/docs/examples/parsing/lambda/TestSyntax.scala b/docs/examples/parsing/lambda/TestSyntax.scala
deleted file mode 100644
index 7edca6c..0000000
--- a/docs/examples/parsing/lambda/TestSyntax.scala
+++ /dev/null
@@ -1,86 +0,0 @@
-package examples.parsing.lambda
-
-/**
- * Parser for an untyped lambda calculus: abstract syntax tree
- *
- * @author Miles Sabin (adapted slightly by Adriaan Moors)
- */
-trait TestSyntax
-{
- trait Term
-
- case class Unit extends Term
- {
- override def toString = "unit"
- }
-
- case class Lit(n: int) extends Term
- {
- override def toString = n.toString
- }
-
- case class Bool(b: boolean) extends Term
- {
- override def toString = b.toString
- }
-
- case class Name(name: String) extends Term
- {
- override def toString = name
- }
-
- case class Ref(n: Name) extends Term
- {
- def value = n
- }
-
- case class Lam(n: Name, l: Term) extends Term
- {
- override def toString = "(\\ "+n+" -> "+l+")"
- }
-
- case class App(t1: Term, t2: Term) extends Term
- {
- override def toString = "("+t1+" "+t2+")"
- }
-
- case class Let(n: Name, t1: Term, t2: Term) extends Term
- {
- override def toString = "let "+n+" = "+t1+" in "+t2
- }
-
- case class If(c: Term, t1: Term, t2: Term) extends Term
- {
- override def toString = "if "+c+" then "+t1+" else "+t2
- }
-
- trait PrimTerm extends Term
- {
- def apply(n: Lit) : Term
- }
-
- case class PrimPlus extends PrimTerm
- {
- def apply(x: Lit) = new PrimTerm { def apply(y: Lit) = Lit(x.n+y.n) }
- }
-
- case class PrimMinus extends PrimTerm
- {
- def apply(x: Lit) = new PrimTerm { def apply(y: Lit) = Lit(x.n-y.n) }
- }
-
- case class PrimMultiply extends PrimTerm
- {
- def apply(x: Lit) = new PrimTerm { def apply(y: Lit) = Lit(x.n*y.n) }
- }
-
- case class PrimDivide extends PrimTerm
- {
- def apply(x: Lit) = new PrimTerm { def apply(y: Lit) = Lit(x.n/y.n) }
- }
-
- case class PrimEquals extends PrimTerm
- {
- def apply(x: Lit) = new PrimTerm { def apply(y: Lit) = Bool(x.n == y.n) }
- }
-}
diff --git a/docs/examples/parsing/lambda/test/test-01.kwi b/docs/examples/parsing/lambda/test/test-01.kwi
deleted file mode 100644
index 9833d10..0000000
--- a/docs/examples/parsing/lambda/test/test-01.kwi
+++ /dev/null
@@ -1 +0,0 @@
-let x = 23 in (\y z -> x+y+z) 1 2
diff --git a/docs/examples/parsing/lambda/test/test-02.kwi b/docs/examples/parsing/lambda/test/test-02.kwi
deleted file mode 100644
index 11198c6..0000000
--- a/docs/examples/parsing/lambda/test/test-02.kwi
+++ /dev/null
@@ -1 +0,0 @@
-let f = (\x y -> x*y) in f 2 3
diff --git a/docs/examples/parsing/lambda/test/test-03.kwi b/docs/examples/parsing/lambda/test/test-03.kwi
deleted file mode 100644
index d4515d7..0000000
--- a/docs/examples/parsing/lambda/test/test-03.kwi
+++ /dev/null
@@ -1 +0,0 @@
-let f = (\x y -> x*y) in f (f 1 2) 3
diff --git a/docs/examples/parsing/lambda/test/test-04.kwi b/docs/examples/parsing/lambda/test/test-04.kwi
deleted file mode 100644
index e54c454..0000000
--- a/docs/examples/parsing/lambda/test/test-04.kwi
+++ /dev/null
@@ -1 +0,0 @@
-let fact = \x -> if x == 0 then 1 else x*(fact (x-1)) in unit
diff --git a/docs/examples/parsing/lambda/test/test-05.kwi b/docs/examples/parsing/lambda/test/test-05.kwi
deleted file mode 100644
index 0b95d67..0000000
--- a/docs/examples/parsing/lambda/test/test-05.kwi
+++ /dev/null
@@ -1 +0,0 @@
-let fact = \x -> if x == 0 then 1 else x*(fact (x-1)) in fact 6
diff --git a/docs/examples/parsing/lambda/test/test-06.kwi b/docs/examples/parsing/lambda/test/test-06.kwi
deleted file mode 100644
index 47723dc..0000000
--- a/docs/examples/parsing/lambda/test/test-06.kwi
+++ /dev/null
@@ -1 +0,0 @@
-2*3+4*5 == 26
diff --git a/docs/examples/parsing/lambda/test/test-07.kwi b/docs/examples/parsing/lambda/test/test-07.kwi
deleted file mode 100644
index 14fba0d..0000000
--- a/docs/examples/parsing/lambda/test/test-07.kwi
+++ /dev/null
@@ -1 +0,0 @@
-let fix = \f -> f(fix f) in unit
diff --git a/docs/examples/parsing/lambda/test/test-08.kwi b/docs/examples/parsing/lambda/test/test-08.kwi
deleted file mode 100644
index 7166d15..0000000
--- a/docs/examples/parsing/lambda/test/test-08.kwi
+++ /dev/null
@@ -1 +0,0 @@
-let fix = (\f -> f(fix f)) in (fix (\g n -> if n == 0 then 1 else n*(g(n-1)))) 5
diff --git a/docs/examples/patterns.scala b/docs/examples/patterns.scala
deleted file mode 100644
index 738deab..0000000
--- a/docs/examples/patterns.scala
+++ /dev/null
@@ -1,36 +0,0 @@
-package examples
-
-object patterns {
-
- trait Tree
- case class Branch(left: Tree, right: Tree) extends Tree
- case class Leaf(x: Int) extends Tree
-
- val tree1 = Branch(Branch(Leaf(1), Leaf(2)), Branch(Leaf(3), Leaf(4)))
-
- def sumLeaves(t: Tree): Int = t match {
- case Branch(l, r) => sumLeaves(l) + sumLeaves(r)
- case Leaf(x) => x
- }
-
- def find[a,b](it: Iterator[Pair[a, b]], x: a): Option[b] = {
- var result: Option[b] = None
- var found = false
- while (it.hasNext && !found) {
- val Pair(x1, y) = it.next
- if (x == x1) { found = true; result = Some(y) }
- }
- result
- }
-
- def printFinds[a](xs: List[Pair[a, String]], x: a) =
- find(xs.iterator, x) match {
- case Some(y) => System.out.println(y)
- case None => System.out.println("no match")
- }
-
- def main(args: Array[String]) {
- println("sum of leafs=" + sumLeaves(tree1))
- printFinds(List(Pair(3, "three"), Pair(4, "four")), 4)
- }
-}
diff --git a/docs/examples/pilib/elasticBuffer.scala b/docs/examples/pilib/elasticBuffer.scala
deleted file mode 100644
index 5fec96a..0000000
--- a/docs/examples/pilib/elasticBuffer.scala
+++ /dev/null
@@ -1,77 +0,0 @@
-package examples.pilib
-
-object elasticBuffer {
-
- import scala.concurrent.pilib._
-
- /**
- * Recursive type for channels that carry a "String" channel and
- * an object of the type we define.
- */
- class MetaChan extends Chan[Pair[Chan[String], MetaChan]]
-
- def Buffer(put: Chan[String], get: Chan[String]): Unit = {
-
- /**
- * An empty buffer cell, ready to pass on (o,r) to the left.
- */
- def Bl(i:Chan[String], l: MetaChan,
- o: Chan[String], r: MetaChan): unit =
- choice (
- l(Pair(o,r)) * (System.out.println("Removed one cell.")),
- i * (inp => Cl(i, l, o, r, inp))
- )
-
- /**
- * A buffer cell containing a value, ready to receive (o,r) from the right.
- */
- def Cl(i: Chan[String], l: MetaChan,
- o: Chan[String], r: MetaChan, content: String): Unit =
- choice (
- o(content) * (Bl(i,l,o,r)),
- i * (inp => Dl(i,l,o,r,content, inp)),
- r * ( { case Pair(newo, newr) => Cl(i,l,newo,newr,content) })
- )
-
- /**
- * Two joined buffer cells, of type Cl
- */
- def Dl(i: Chan[String], l: MetaChan,
- o: Chan[String], r: MetaChan,
- content: String, inp: String): Unit = {
- val newlr = new MetaChan
- val newio = new Chan[String]
- spawn < Cl(i, l, newio, newlr, inp) | Cl(newio, newlr, o, r, content) >
- }
-
- // l and r channels for the leftmost and rightmost cell, respectively.
- val unused1 = new MetaChan
- val unused2 = new MetaChan
-
- Bl(put, unused1, get, unused2)
- }
-
- val random = new java.util.Random()
-
- def Producer(n: int, put: Chan[String]): Unit = {
- Thread.sleep(1 + random.nextInt(1000))
- val msg = "object " + n
- put.write(msg)
- System.out.println("Producer gave " + msg)
- Producer(n + 1, put)
- }
-
- def Consumer(get: Chan[String]): Unit = {
- Thread.sleep(1 + random.nextInt(1000))
- val msg = get.read
- System.out.println("Consumer took " + msg)
- Consumer(get)
- }
-
- def main(args: Array[String]): Unit = {
- val put = new Chan[String]
- val get = new Chan[String]
- spawn < Producer(0, put) | Consumer(get) | Buffer(put, get) >
- }
-
-}
diff --git a/docs/examples/pilib/handover.scala b/docs/examples/pilib/handover.scala
deleted file mode 100644
index c9b6156..0000000
--- a/docs/examples/pilib/handover.scala
+++ /dev/null
@@ -1,186 +0,0 @@
-package examples.pilib
-
-/**
- * Handover example with recursive types for channels.
- */
-object handoverRecursive {
-
- import concurrent.pilib._
-
- val random = new java.util.Random()
-
- /**
- * Recursive type for channels that carry a channel "unit" and
- * an object of the type we define.
- */
- class Switch extends Chan[Pair[Chan[unit], Switch]]
-
- /**
- * Car.
- */
- def Car(talk: Chan[unit], switch: Switch): unit =
- choice (
- switch * ({ case Pair(t,s) => Car(t, s) }),
- talk(()) * ( {
- Thread.sleep(1 + random.nextInt(1000));
- System.out.println("Car emitted a message.");
- Car(talk, switch)
- })
- );
-
- /**
- * Control center.
- */
- def Control(talk1: Chan[unit], switch1: Switch,
- gain1: Switch, lose1: Switch,
- talk2: Chan[unit], switch2: Switch,
- gain2: Switch, lose2: Switch): unit
- = {
- def Control1: unit= {
- Thread.sleep(1 + random.nextInt(1000));
- lose1.write(Pair(talk2, switch2));
- gain2.write(Pair(talk2, switch2));
- Control2
- }
- def Control2: unit = {
- Thread.sleep(1 + random.nextInt(1000));
- lose2.write(Pair(talk1, switch1));
- gain1.write(Pair(talk1, switch1));
- Control1
- }
- Control1
- }
-
- /**
- * Active transmitter.
- */
- def ActiveTransmitter(id: String, talk: Chan[unit], switch: Switch,
- gain: Switch, lose: Switch): unit
- =
- choice (
- talk * (x => {
- System.out.println(id + " received a message.")
- ActiveTransmitter(id, talk, switch, gain, lose)
- }),
- lose * ({ case Pair(t, s) => {
- switch.write(Pair(t, s))
- IdleTransmitter(id, gain, lose)
- }})
- );
-
- /**
- * Idle transmitter.
- */
- def IdleTransmitter(id: String, gain: Switch, lose: Switch): unit = {
- val Pair(t, s) = gain.read;
- ActiveTransmitter(id, t, s, gain, lose)
- }
-
- def main(args: Array[String]): unit = {
- val talk1 = new Chan[unit]
- val switch1 = new Switch
- val gain1 = new Switch
- val lose1 = new Switch
- val talk2 = new Chan[unit]
- val switch2 = new Switch
- val gain2 = new Switch
- val lose2 = new Switch
- spawn <
- Car(talk1, switch1) |
- ActiveTransmitter("Transmitter 1", talk1, switch1, gain1, lose1) |
- IdleTransmitter("Transmitter 2", gain2, lose2) |
- Control(talk1, switch1, gain1, lose1, talk2, switch2, gain2, lose2) >
- }
-}
-
-/**
-* Handover example with type casts.
-*/
-object handoverCast {
-
- import concurrent.pilib._;
-
- val random = new java.util.Random();
-
- /**
- * Car.
- */
- def Car(talk: Chan[Any], switch: Chan[Any]): unit =
- choice (
- switch * (o => {
- val Pair(t,s) = o.asInstanceOf[Pair[Chan[Any],Chan[Any]]];
- Car(t, s)
- }),
- talk(()) * ( {
- Thread.sleep(1 + random.nextInt(1000));
- System.out.println("Car emitted a message.");
- Car(talk, switch)
- })
- );
-
- /**
- * Control center.
- */
- def Control(talk1: Chan[Any], switch1: Chan[Any],
- gain1: Chan[Any], lose1: Chan[Any],
- talk2: Chan[Any], switch2: Chan[Any],
- gain2: Chan[Any], lose2: Chan[Any]): unit
- = {
- def Control1: unit = {
- Thread.sleep(1 + random.nextInt(1000));
- lose1.write(Pair(talk2, switch2));
- gain2.write(Pair(talk2, switch2));
- Control2
- }
- def Control2: unit = {
- Thread.sleep(1 + random.nextInt(1000));
- lose2.write(Pair(talk1, switch1));
- gain1.write(Pair(talk1, switch1));
- Control1
- }
- Control1
- }
-
- /**
- * Active transmitter.
- */
- def ActiveTransmitter(id: String, talk: Chan[Any], switch: Chan[Any],
- gain: Chan[Any], lose: Chan[Any]): unit
- =
- choice (
- talk * (x => {
- System.out.println(id + " received a message.")
- ActiveTransmitter(id, talk, switch, gain, lose)
- }),
- lose * (o => {
- val Pair(t, s) = o.asInstanceOf[Pair[Chan[Any],Chan[Any]]]
- switch.write(Pair(t, s))
- IdleTransmitter(id, gain, lose)
- })
- )
-
- /**
- * Idle transmitter.
- */
- def IdleTransmitter(id: String, gain: Chan[Any], lose: Chan[Any]): unit = {
- val Pair(t, s) = gain.read.asInstanceOf[Pair[Chan[Any],Chan[Any]]]
- ActiveTransmitter(id, t, s, gain, lose)
- }
-
- def main(args: Array[String]): unit = {
- val talk1 = new Chan[Any]
- val switch1 = new Chan[Any]
- val gain1 = new Chan[Any]
- val lose1 = new Chan[Any]
- val talk2 = new Chan[Any]
- val switch2 = new Chan[Any]
- val gain2 = new Chan[Any]
- val lose2 = new Chan[Any]
- spawn <
- Car(talk1, switch1) |
- ActiveTransmitter("Transmitter 1", talk1, switch1, gain1, lose1) |
- IdleTransmitter("Transmitter 2", gain2, lose2) |
- Control(talk1, switch1, gain1, lose1, talk2, switch2, gain2, lose2) >
- }
-
-}
diff --git a/docs/examples/pilib/mobilePhoneProtocol.scala b/docs/examples/pilib/mobilePhoneProtocol.scala
deleted file mode 100644
index e8c0ac1..0000000
--- a/docs/examples/pilib/mobilePhoneProtocol.scala
+++ /dev/null
@@ -1,172 +0,0 @@
-package examples.pilib
-
-/**
-* Mobile phone protocol.
-* Equivalent to a three-place buffer.
-* @see Bjoern Victor "A verification tool for the polyadic pi-calculus".
-*/
-object mobilePhoneProtocol {
-
- import concurrent.pilib._
-
- val random = new java.util.Random()
-
- // Internal messages exchanged by the protocol.
- trait Message
-
- // Predefined messages used by the protocol.
- case class Data() extends Message;
- case class HoCmd() extends Message; // handover command
- case class HoAcc() extends Message; // handover access
- case class HoCom() extends Message; // handover complete
- case class HoFail() extends Message; // handover fail
- case class ChRel() extends Message; // release
- case class Voice(s: String) extends Message; // voice
- case class Channel(n: Chan[Message]) extends Message; // channel
-
- def MobileSystem(in: Chan[String], out: Chan[String]): unit = {
-
- def CC(fa: Chan[Message], fp: Chan[Message], l: Chan[Channel]): unit =
- choice (
- in * (v => { fa.write(Data()); fa.write(Voice(v)); CC(fa, fp, l) })
- ,
- l * (m_new => {
- fa.write(HoCmd());
- fa.write(m_new);
- choice (
- fp * ({ case HoCom() => {
- System.out.println("Mobile has moved from one cell to another");
- fa.write(ChRel());
- val Channel(m_old) = fa.read;
- l.write(Channel(m_old));
- CC(fp, fa, l)
- }})
- ,
- fa * ({ case HoFail() => {
- System.out.println("Mobile has failed to move from one cell to another");
- l.write(m_new);
- CC(fa, fp, l)
- }})
- )
- })
- );
-
- /*
- * Continuously orders the MSC to switch the MS to the non-used BS.
- */
- def HC(l: Chan[Channel], m: Chan[Message]): unit = {
- Thread.sleep(1 + random.nextInt(1000));
- l.write(Channel(m));
- val Channel(m_new) = l.read;
- HC(l, m_new)
- }
-
- /**
- * Mobile switching center.
- */
- def MSC(fa: Chan[Message], fp: Chan[Message], m: Chan[Message]): unit = {
- val l = new Chan[Channel];
- spawn < HC(l, m) | CC(fa, fp, l) >
- }
-
- /**
- * Active base station.
- */
- def BSa(f: Chan[Message], m: Chan[Message]): unit =
- (f.read) match {
- case Data() => {
- val v = f.read;
- m.write(Data());
- m.write(v);
- BSa(f, m)
- }
- case HoCmd() => {
- val v = f.read;
- m.write(HoCmd());
- m.write(v);
- choice (
- f * ({ case ChRel() => {
- f.write(Channel(m));
- BSp(f, m)
- }})
- ,
- m * ({ case HoFail() => {
- f.write(HoFail());
- BSa(f, m)
- }})
- )
- }
- };
-
- /**
- * Passive base station.
- */
- def BSp(f: Chan[Message], m: Chan[Message]): unit = {
- val HoAcc = m.read
- f.write(HoCom())
- BSa(f, m)
- }
-
- /**
- * Mobile station.
- */
- def MS(m: Chan[Message]): unit =
- (m.read) match {
- case Data() => {
- val Voice(v) = m.read;
- out.write(v);
- MS(m)
- }
- case HoCmd() =>
- (m.read) match {
- case Channel(m_new) => {
- if (random.nextInt(1) == 0)
- choice ( m_new(HoAcc()) * (MS(m_new)) );
- else
- choice ( m(HoFail()) * (MS(m)) );
- }
- }
- };
-
- def P(fa: Chan[Message], fp: Chan[Message]): unit = {
- val m = new Chan[Message];
- spawn < MSC(fa, fp, m) | BSp(fp, m) >
- }
-
- def Q(fa: Chan[Message]): unit = {
- val m = new Chan[Message];
- spawn < BSa(fa, m) | MS(m) >
- }
-
- val fa = new Chan[Message];
- val fp = new Chan[Message];
- spawn < Q(fa) | P(fa, fp) >;
- }
-
- //***************** Entry function ******************//
-
- def main(args: Array[String]): unit = {
-
- def Producer(n: Int, put: Chan[String]): unit = {
- Thread.sleep(1 + random.nextInt(1000));
- val msg = "object " + n;
- put.write(msg);
- System.out.println("Producer gave " + msg);
- Producer(n + 1, put)
- }
-
- def Consumer(get: Chan[String]): unit = {
- Thread.sleep(1 + random.nextInt(1000));
- val msg = get.read;
- System.out.println("Consumer took " + msg);
- Consumer(get)
- }
-
- val put = new Chan[String];
- val get = new Chan[String];
- spawn < Producer(0, put) | Consumer(get) | MobileSystem(put, get) >
- }
-
-}
-
-
diff --git a/docs/examples/pilib/piNat.scala b/docs/examples/pilib/piNat.scala
deleted file mode 100644
index a1a0e68..0000000
--- a/docs/examples/pilib/piNat.scala
+++ /dev/null
@@ -1,89 +0,0 @@
-package examples.pilib
-
-import scala.concurrent.pilib._
-
-/** Church encoding of naturals in the Pi-calculus */
-object piNat extends Application {
-
- /** Locations of Pi-calculus natural */
- class NatChan extends Chan[Triple[Chan[Unit], Chan[NatChan], Chan[NatChan]]]
-
- /** Zero */
- def Z(l: NatChan): Unit = choice (
- l * { case Triple(z, sd, d) => z.write(()) }
- )
-
- /** Successor of Double */
- def SD(n: NatChan, l: NatChan): Unit = choice (
- l * { case Triple(z, sd, d) => sd.write(n) }
- )
-
- /** Double */
- def D(n: NatChan, l: NatChan): Unit = choice (
- l * { case Triple(z, sd, d) => d.write(n) }
- )
-
- /** Make "l" a location representing the natural "n" */
- def make(n: Int, l: NatChan): Unit =
- if (n == 0) Z(l)
- else if (n % 2 == 0) { val l1 = new NatChan; spawn < D(l1, l) >; make(n/2, l1) }
- else { val l1 = new NatChan; spawn < SD(l1, l) >; make(n/2, l1) }
-
- /** Consume the natural "m" and put it successor at location "n" */
- def Succ(m: NatChan, n: NatChan) {
- val z = new Chan[Unit]
- val sd = new Chan[NatChan]
- val d = new Chan[NatChan]
- spawn < m.write(Triple(z, sd, d)) >;
- choice (
- z * { x => make(1, n) },
- sd * { m1 => { val n1 = new NatChan; spawn < D(n1, n) >; Succ(m1, n1) } },
- d * { m1 => SD(m1, n) }
- )
- }
-
- /** Consume the natural "l" and put two copies at locations "m" and "n" */
- def Copy(l: NatChan, m: NatChan, n: NatChan) {
- val z = new Chan[Unit]
- val sd = new Chan[NatChan]
- val d = new Chan[NatChan]
- spawn < l.write(Triple(z, sd, d)) >;
- choice (
- z * { x => spawn < Z(m) >; Z(n) },
- sd * { l1 => { val m1 = new NatChan; val n1 = new NatChan;
- spawn < SD(m1, m) | SD(n1, n) >;
- Copy(l1, m1, n1) } },
- d * { l1 => { val m1 = new NatChan; val n1 = new NatChan;
- spawn < D(m1, m) | D(n1, n) >;
- Copy(l1, m1, n1) } }
- )
- }
-
- /** Consume the natural at location "n" and return its value */
- def value(n: NatChan): Int = {
- val z = new Chan[Unit]
- val sd = new Chan[NatChan]
- val d = new Chan[NatChan]
- spawn < n.write(Triple(z, sd, d)) >;
- choice (
- z * { x => 0 },
- sd * { n1 => 2 * value(n1) + 1 },
- d * { n1 => 2 * value(n1) }
- )
- }
-
- // Test
- val i = 42
- val l = new NatChan
- val l1 = new NatChan
- val l2 = new NatChan
- val l3 = new NatChan
-
- spawn <
- make(i, l) |
- Copy(l, l1, l2) |
- Succ(l2, l3) |
- println("" + i + " = " + value(l1)) |
- println("succ " + i + " = " + value(l3)) >
-
-}
diff --git a/docs/examples/pilib/rwlock.scala b/docs/examples/pilib/rwlock.scala
deleted file mode 100644
index bb1c26b..0000000
--- a/docs/examples/pilib/rwlock.scala
+++ /dev/null
@@ -1,329 +0,0 @@
-package examples.pilib
-
-/**
-* From Pi to Scala: Semaphores, monitors, read/write locks.
-* Readers/writers locks.
-*/
-object rwlock {
-
- import scala.concurrent.pilib._
-
- class Signal extends Chan[unit] {
- def send = write(())
- def receive = read
- }
-
- class CountLock {
- private val busy = new Signal
- def get = busy.send
- def release = busy.receive
- spawn < release >
- }
-
- /** A binary semaphore
- */
- class Lock {
- private val busy = new Signal;
- private val free = new Signal;
- def get = busy.send;
- def release = free.send;
- spawn < (while (true) {
- choice (
- busy * (x => free.receive),
- free * (x => ())
- )
- }) >
- }
-
- /** A monitor a la Java
- */
- class JavaMonitor {
-
- private val lock = new Lock
-
- private var waiting: List[Signal] = Nil
-
- def Wait = {
- val s = new Signal
- waiting = s :: waiting
- lock.release
- s.receive
- lock.get
- }
-
- def Notify =
- if (!waiting.isEmpty) {
- waiting.head.send
- waiting = waiting.tail
- }
-
- def NotifyAll =
- while (!waiting.isEmpty) {
- waiting.head.send
- waiting = waiting.tail
- }
-
- def await(cond: => boolean): unit =
- while (false == cond) (Wait)
- }
-
- /*
- class Buffer[a](size: Int) extends JavaMonitor with {
- var in = 0, out = 0, n = 0;
- val elems = new Array[a](size);
- def put(x: a) = synchronized {
- await(n < size);
- elems(out) = x;
- out = (out + 1) % size;
- }
- def get: a = synchronized {
- await(n > 0);
- val x = elems(in);
- in = (in + 1) % size;
- x
- }
- }
- */
-
- /** A readers/writers lock. */
- trait ReadWriteLock {
- def startRead: unit
- def startWrite: unit
- def endRead: unit
- def endWrite: unit
- }
-
- /**
- * A readers/writers lock, using monitor abstractions.
- */
- class ReadWriteLock1 extends JavaMonitor with ReadWriteLock {
-
- private var nactive: int = 0
- private var nwriters: int = 0
-
- def status =
- System.out.println(nactive + " active, " + nwriters + " writers");
-
- def startRead = synchronized {
- await(nwriters == 0)
- nactive = nactive + 1
- status
- }
-
- def startWrite = synchronized {
- nwriters = nwriters + 1
- await(nactive == 0)
- nactive = 1
- status
- }
-
- def endRead = synchronized {
- nactive = nactive - 1
- if (nactive == 0) NotifyAll
- status
- }
-
- def endWrite = synchronized {
- nwriters = nwriters - 1
- nactive = 0
- NotifyAll
- status
- }
- }
-
- /** A readers/writers lock, using semaphores
- */
- class ReadWriteLock2 extends ReadWriteLock {
-
- private var rc: int = 0 // reading readers
- private var wc: int = 0 // writing writers
- private var rwc: int = 0 // waiting readers
- private var wwc: int = 0 // waiting writers
- private val mutex = new Lock
- private val rsem = new Lock
- private val wsem = new Lock
-
- def startRead = {
- mutex.get;
- if (wwc > 0 || wc > 0) {
- rwc = rwc + 1;
- mutex.release;
- rsem.get;
- rwc = rwc - 1
- }
- rc = rc + 1;
- if (rwc > 0) rsem.release;
- mutex.release
- }
-
- def startWrite = {
- mutex.get;
- if (rc > 0 || wc > 0) {
- wwc = wwc + 1;
- mutex.release;
- wsem.get;
- wwc = wwc - 1
- }
- wc = wc + 1;
- mutex.release
- }
-
- def endRead = {
- mutex.get;
- rc = rc - 1;
- if (rc == 0 && wwc > 0) wsem.release;
- mutex.release
- }
-
- def endWrite = {
- mutex.get;
- wc = wc - 1;
- if (rwc > 0)
- rsem.release
- else if (wwc > 0) wsem.release;
- mutex.release
- }
- }
-
- /** A readers/writers lock, using channels, without priortities
- */
- class ReadWriteLock3 extends ReadWriteLock {
-
- private val sr = new Signal
- private val er = new Signal
- private val sw = new Signal
- private val ew = new Signal
-
- def startRead = sr.send
- def startWrite = sw.send
- def endRead = er.send
- def endWrite = ew.send
-
- private def rwlock: unit = choice (
- sr * (x => reading(1)),
- sw * (x => { ew.receive; rwlock })
- )
-
- private def reading(n: int): unit = choice (
- sr * (x => reading(n+1)),
- er * (x => if (n == 1) rwlock else reading(n-1))
- )
-
- spawn < rwlock >
- }
-
- /** Same, with sequencing
- */
- class ReadWriteLock4 extends ReadWriteLock {
-
- private val rwlock = new ReadWriteLock3
-
- private val sr = new Signal
- private val ww = new Signal
- private val sw = new Signal
-
- def startRead = sr.send
- def startWrite = { ww.send; sw.send }
- def endRead = rwlock.endRead
- def endWrite = rwlock.endWrite
-
- private def queue: unit = choice (
- sr * (x => { rwlock.startRead ; queue }),
- ww * (x => { rwlock.startWrite; sw.receive; queue })
- )
-
- spawn < queue >;
- }
-
- /** Readwritelock where writers always have priority over readers
- */
- class ReadWriteLock5 extends ReadWriteLock {
-
- private val sr = new Signal
- private val er = new Signal
- private val ww = new Signal
- private val sw = new Signal
- private val ew = new Signal
-
- def startRead = sr.send
- def startWrite = { ww.send; sw.send }
- def endRead = er.send
- def endWrite = ew.send
-
- private def Reading(nr: int, nw: int): unit =
- if (nr == 0 && nw == 0)
- choice (
- sr * (x => Reading(1, 0)),
- ww * (x => Reading(0, 1))
- )
- else if (nr == 0 && nw != 0) {
- sw.receive;
- Writing(nw);
- }
- else if (nr != 0 && nw == 0)
- choice (
- sr * (x => Reading(nr + 1, 0)),
- er * (x => Reading(nr - 1, 0)),
- ww * (x => Reading(nr, 1))
- )
- else if (nr != 0 && nw != 0)
- choice (
- ww * (x => Reading(nr, nw + 1)),
- er * (x => Reading(nr - 1, nw))
- );
-
- private def Writing(nw: int): unit = choice (
- ew * (x => Reading(0, nw - 1)),
- ww * (x => Writing(nw + 1))
- );
-
- spawn < Reading(0, 0) >;
-
- }
-
- /**
- * Main function.
- */
- def main(args: Array[String]): unit = {
- val random = new java.util.Random()
-
- def reader(i: int, rwlock: ReadWriteLock): unit = {
- Thread.sleep(1 + random.nextInt(100))
- System.err.println("Reader " + i + " wants to read.")
- rwlock.startRead
- System.err.println("Reader " + i + " is reading.")
- Thread.sleep(1 + random.nextInt(100))
- rwlock.endRead
- System.err.println("Reader " + i + " has read.")
- reader(i, rwlock)
- }
-
- def writer(i: int, rwlock: ReadWriteLock): unit = {
- Thread.sleep(1 + random.nextInt(100))
- System.err.println("Writer " + i + " wants to write.")
- rwlock.startWrite
- System.err.println("Writer " + i + " is writing.")
- Thread.sleep(1 + random.nextInt(100))
- rwlock.endWrite
- System.err.println("Writer " + i + " has written.")
- writer(i, rwlock)
- }
-
- val n = try { Integer.parseInt(args(0)) } catch { case _ => 0 }
- if (n < 1 || 5 < n) {
- Console.println("Usage: scala examples.pilib.rwlock <n> (n=1..5)")
- exit
- }
- val rwlock = n match {
- case 1 => new ReadWriteLock1
- case 2 => new ReadWriteLock2
- case 3 => new ReadWriteLock3
- case 4 => new ReadWriteLock4
- case 5 => new ReadWriteLock5
- }
- List.range(0, 5) foreach (i => spawn < reader(i, rwlock) >)
- List.range(0, 5) foreach (i => spawn < writer(i, rwlock) >)
- }
-
-}
-
diff --git a/docs/examples/pilib/scheduler.scala b/docs/examples/pilib/scheduler.scala
deleted file mode 100644
index fd8fd52..0000000
--- a/docs/examples/pilib/scheduler.scala
+++ /dev/null
@@ -1,150 +0,0 @@
-package examples.pilib
-
-import scala.concurrent.pilib._
-
-object scheduler {
-
- /**
- * Random number generator.
- */
- val random = new util.Random()
-
- //***************** Scheduler ******************//
-
- /**
- * A cell of the scheduler whose attached agent is allowed to start.
- */
- def A(a: Chan[Unit], b: Chan[Unit])(d: Chan[Unit], c: Chan[Unit]) {
- ///- ... complete here ...
- choice ( a * { x => C(a, b)(d, c) })
- ///+
- }
-
- /**
- * A cell of the scheduler in another intermediate state.
- */
- def C(a: Chan[Unit], b: Chan[Unit])(d: Chan[Unit], c: Chan[Unit]) {
- ///- ... complete here ...
- choice (c * { x => B(a, b)(d, c) })
- ///+
- }
-
- /**
- * A cell of the scheduler whose attached agent is allowed to finish.
- */
- def B(a: Chan[Unit], b: Chan[Unit])(d: Chan[Unit], c: Chan[Unit]) {
- ///- ... complete here ...
- // choice (b * { x => D(a, b)(d, c) }) // incorrect naive solution
- choice (
- b * { x => choice ( d(()) * A(a, b)(d, c) ) }, // b.'d.A
- d(()) * (choice (b * { x => A(a, b)(d, c) })) // 'd.b.A
- )
- ///+
- }
-
- /**
- * A cell of the scheduler whose attached agent is not yet allowed to start.
- */
- def D(a: Chan[Unit], b: Chan[Unit])(d: Chan[Unit], c: Chan[Unit]) {
- ///- ... complete here ...
- choice (d(()) * A(a, b)(d, c))
- ///+
- }
-
- //***************** Agents ******************//
-
- def agent(i: Int)(a: Chan[Unit], b: Chan[Unit]) {
- // 50% chance that we sleep forever
- if (i == 0 && random.nextInt(10) < 5) {
- a.attach(x => println("Start and sleeps ----> " + i))
- Thread.sleep(random.nextInt(1000))
- a.write(())
- }
- else {
- a.attach(x => println("Start ----> " + i))
- b.attach(x => println("Stop -> " + i))
- Thread.sleep(random.nextInt(1000))
- a.write(())
- Thread.sleep(random.nextInt(1000))
- b.write(())
- agent(i)(a, b)
- }
- }
-
- //***************** Entry function ******************//
-
- /**
- * Creates a scheduler for five agents (programs).
- */
-
- def main(args: Array[String]) {
- val agentNb = 5
- val agents = List.range(0, agentNb) map agent
- scheduleAgents(agents)
- }
-
- //***************** Infrastructure *****************//
-
- /**
- * A cell is modelled as a function that takes as parameters
- * input and output channels and which returns nothing.
- */
- type Cell = (Chan[Unit], Chan[Unit]) => Unit
-
- /**
- * Creates a cell composed of two cells linked together.
- */
- def join(cell1: Cell, cell2: Cell): Cell =
- (l: Chan[Unit], r: Chan[Unit]) => {
- val link = new Chan[Unit];
- spawn < cell1(l, link) | cell2(link, r) >
- };
-
- /**
- * Links the output of a cell to its input.
- */
- def close(cell: Cell) {
- val a = new Chan[Unit]
- cell(a, a)
- }
-
- /**
- * Creates a cell consisting of a chain of cells.
- */
- def chain(cells: List[Cell]): Cell =
- cells reduceLeft join
-
- /**
- * Creates a cell consisting of a chain of cells.
- */
- def makeRing(cells: List[Cell]): Unit =
- close(chain(cells))
-
- /**
- * An agent is modelled as a function that takes as parameters channels to
- * signal that it has started or finished.
- */
- type Agent = (Chan[Unit], Chan[Unit]) => Unit
-
- /**
- * Takes a list of agents and schedules them.
- */
- def scheduleAgents(agents: List[Agent]) {
- var firstAgent = true;
- val cells = agents map (ag => {
- val a = new Chan[Unit];
- val b = new Chan[Unit];
- spawn < ag(a, b) >;
- (d: Chan[Unit], c: Chan[Unit]) => if (firstAgent) {
- firstAgent = false;
- A(a, b)(d, c)
- }
- else
- D(a, b)(d, c)
- });
- makeRing(cells)
- }
-
-}
-
-
diff --git a/docs/examples/pilib/semaphore.scala b/docs/examples/pilib/semaphore.scala
deleted file mode 100644
index 951c90e..0000000
--- a/docs/examples/pilib/semaphore.scala
+++ /dev/null
@@ -1,72 +0,0 @@
-package examples.pilib
-
-import scala.concurrent.pilib._
-
-/** Solution of exercise session 6 (first question). */
-object semaphore {
-
- class Signal extends Chan[Unit] {
- def send = write(())
- def receive = read
- }
-
- /** Interface. */
- trait Semaphore {
- def get: Unit
- def release: Unit
- }
-
- /** First implementation. */
- class Sem1 extends Semaphore {
-
- private val g = new Signal
- private val r = new Signal
-
- def get: Unit = g.send
- def release: Unit = r.send
-
- private def Sched: Unit = choice (
- g * (x => { r.receive; Sched }),
- r * (x => Sched)
- )
- spawn< Sched >
- }
-
- /** Second implementation. */
- class Sem2 extends Semaphore {
-
- private val a = new Signal
- private val na = new Signal
-
- def get { a.receive; spawn< na.send > }
- def release: Unit = choice (
- a * (x => spawn< a.send >),
- na * (x => spawn< a.send >)
- )
- spawn< a.send >
- }
-
- /** Test program. */
- def main(args: Array[String]) {
- val random = new util.Random()
- val sem = new Sem2
- def mutex(p: => Unit) { sem.get; p; sem.release }
-
- spawn< {
- Thread.sleep(1 + random.nextInt(100));
- mutex( {
- println("a1");
- Thread.sleep(1 + random.nextInt(100));
- println("a2")
- } )
- } | {
- Thread.sleep(1 + random.nextInt(100));
- mutex( {
- println("b1");
- Thread.sleep(1 + random.nextInt(100));
- println("b2")
- } )
- } >;
- }
-}
-
diff --git a/docs/examples/pilib/twoPlaceBuffer.scala b/docs/examples/pilib/twoPlaceBuffer.scala
deleted file mode 100644
index 255f70c..0000000
--- a/docs/examples/pilib/twoPlaceBuffer.scala
+++ /dev/null
@@ -1,67 +0,0 @@
-package examples.pilib
-
-import scala.concurrent.pilib._
-
-/** Two-place buffer specification and implementation. */
-object twoPlaceBuffer extends Application {
-
- /**
- * Specification.
- */
- def Spec[A](in: Chan[A], out: Chan[A]) {
-
- def B0: Unit = choice (
- in * (x => B1(x))
- )
-
- def B1(x: A): Unit = choice (
- out(x) * (B0),
- in * (y => B2(x, y))
- )
-
- def B2(x: A, y: A): Unit = choice (
- out(x) * (B1(y))
- )
-
- B0
- }
-
- /**
- * Implementation using two one-place buffers.
- */
- def Impl[A](in: Chan[A], out: Chan[A]) {
- ///- ... complete here ...
- // one-place buffer
- def OnePlaceBuffer[A](in: Chan[A], out: Chan[A]) {
- def B0: Unit = choice ( in * (x => B1(x)) )
- def B1(x: A): Unit = choice ( out(x) * (B0))
- B0
- }
- val hidden = new Chan[A]
- spawn < OnePlaceBuffer(in, hidden) | OnePlaceBuffer(hidden, out) >
- ///+
- }
-
- val random = new util.Random()
-
- def Producer(n: Int, in: Chan[String]) {
- Thread.sleep(random.nextInt(1000))
- val msg = "" + n
- choice (in(msg) * {})
- Producer(n + 1, in)
- }
-
- def Consumer(out: Chan[String]) {
- Thread.sleep(random.nextInt(1000))
- choice (out * { msg => () })
- Consumer(out)
- }
-
- val in = new Chan[String]
- in.attach(s => println("put " + s))
- val out = new Chan[String]
- out.attach(s => println("get " + s))
- //spawn < Producer(0, in) | Consumer(out) | Spec(in, out) >
- spawn < Producer(0, in) | Consumer(out) | Impl(in, out) >
-
-}
diff --git a/docs/examples/plugintemplate/.classpath b/docs/examples/plugintemplate/.classpath
deleted file mode 100644
index e906963..0000000
--- a/docs/examples/plugintemplate/.classpath
+++ /dev/null
@@ -1,11 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<classpath>
- <classpathentry kind="src" path="src"/>
- <classpathentry kind="src" path="doc/examples"/>
- <classpathentry kind="src" path="test"/>
- <classpathentry kind="con" path="ch.epfl.lamp.sdt.launching.SCALA_CONTAINER"/>
- <classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER"/>
- <classpathentry kind="lib" path="/Applications/eclipse-3.4M6a/plugins/ch.epfl.lamp.sdt.core_2.7.1.r14724-b20080421111118/lib/scala-compiler.jar"/>
- <classpathentry kind="lib" path="lib/scalatest.jar"/>
- <classpathentry kind="output" path="build/eclipse"/>
-</classpath>
diff --git a/docs/examples/plugintemplate/.project b/docs/examples/plugintemplate/.project
deleted file mode 100644
index 075b0c5..0000000
--- a/docs/examples/plugintemplate/.project
+++ /dev/null
@@ -1,18 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<projectDescription>
- <name>plugintemplate</name>
- <comment></comment>
- <projects>
- </projects>
- <buildSpec>
- <buildCommand>
- <name>ch.epfl.lamp.sdt.core.scalabuilder</name>
- <arguments>
- </arguments>
- </buildCommand>
- </buildSpec>
- <natures>
- <nature>ch.epfl.lamp.sdt.core.scalanature</nature>
- <nature>org.eclipse.jdt.core.javanature</nature>
- </natures>
-</projectDescription>
diff --git a/docs/examples/plugintemplate/build.xml b/docs/examples/plugintemplate/build.xml
deleted file mode 100644
index 37c8441..0000000
--- a/docs/examples/plugintemplate/build.xml
+++ /dev/null
@@ -1,265 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-
-<project name="plugintemplate" default="pack">
- <!-- Edit the file plugin.properties to set
- - scala.home
- - plugin.name
- - plugin.commandname
- - plugin.description
- - plugin.mainclass
- - version numbers
- -->
- <property file="${basedir}/plugin.properties"/>
-
- <property name="version" value="${version.major}.${version.minor}"/>
-
- <taskdef resource="scala/tools/ant/antlib.xml">
- <classpath>
- <pathelement location="${scala.home}/lib/scala-compiler.jar"/>
- <pathelement location="${scala.home}/lib/scala-library.jar"/>
- </classpath>
- </taskdef>
-
-
- <!-- =================================================================
- End-user targets
- ================================================================== -->
-
- <target name="build" depends="build.main"/>
- <target name="build.all" depends="build.done"/>
- <target name="test" depends="test.all"/>
- <target name="guitest" depends="guitest.all"/>
- <target name="doc" depends="doc.done"/>
- <target name="pack" depends="pack.done"/>
- <target name="dist" depends="dist.done"/>
- <target name="clean" depends="clean.all"/>
-
- <!-- =================================================================
- Build targets
- ================================================================== -->
-
- <target name="build.main">
- <mkdir dir="build/build.main"/>
- <scalac srcdir="src"
- destdir="build/build.main"
- includes="**/*.scala">
- <classpath>
- <pathelement location="${scala.home}/lib/scala-library.jar"/>
- <pathelement location="${scala.home}/lib/scala-compiler.jar"/>
- </classpath>
- </scalac>
- </target>
-
- <target name="build.test" depends="build.main">
- <mkdir dir="build/build.test"/>
- <scalac srcdir="test"
- destdir="build/build.test"
- includes="**/*.scala">
- <classpath>
- <pathelement location="${scala.home}/lib/scala-library.jar"/>
- <pathelement location="${scala.home}/lib/scala-compiler.jar"/>
- <pathelement location="lib/scalatest.jar"/>
- <pathelement location="build/build.main"/>
- </classpath>
- </scalac>
- </target>
-
- <target name="build.examples" depends="build.test">
- <mkdir dir="build/build.examples"/>
- <scalac srcdir="doc/examples"
- destdir="build/build.examples"
- includes="**/*.scala">
- <classpath>
- <pathelement location="${scala.home}/lib/scala-library.jar"/>
- <pathelement location="${scala.home}/lib/scala-compiler.jar"/>
- <pathelement location="build/build.main"/>
- </classpath>
- </scalac>
- </target>
-
- <target name="build.done" depends="build.examples"/>
-
- <!-- =================================================================
- Test targets
- ================================================================== -->
-
- <target name="test.all" depends="build.done">
- <!-- fork="true" is good for running the graphical mode -->
- <java classname="org.scalatest.tools.Runner" fork="true">
- <classpath>
- <pathelement location="${scala.home}/lib/scala-compiler.jar" />
- <pathelement location="${scala.home}/lib/scala-library.jar" />
- <pathelement location="lib/scalatest.jar" />
- <pathelement location="build/build.main" />
- <pathelement location="build/build.test" />
- </classpath>
-
- <arg value="-p" />
- <arg value="build/build.test" />
-
- <arg value="-o" />
- </java>
- </target>
-
- <target name="guitest.all" depends="build.done">
- <!-- fork="true" is good for running the graphical mode -->
- <java classname="org.scalatest.tools.Runner" fork="true">
- <classpath>
- <pathelement location="${scala.home}/lib/scala-compiler.jar" />
- <pathelement location="${scala.home}/lib/scala-library.jar" />
- <pathelement location="lib/scalatest.jar" />
- <pathelement location="build/build.main" />
- <pathelement location="build/build.test" />
- </classpath>
-
- <arg value="-p" />
- <arg value="build/build.test" />
-
- <arg value="-g" />
- </java>
- </target>
-
- <!-- =================================================================
- Doc targets
- ================================================================== -->
-
- <target name="doc.main">
- <mkdir dir="doc/api"/>
- <scaladoc srcdir="src"
- destdir="doc/api"
- includes="**/*.scala">
- <classpath>
- <pathelement location="${scala.home}/lib/scala-library.jar"/>
- <pathelement location="${scala.home}/lib/scala-compiler.jar"/>
- </classpath>
- </scaladoc>
- </target>
-
- <target name="doc.done" depends="doc.main"/>
-
- <!-- =================================================================
- Pack targets
- ================================================================== -->
-
- <target name="pack.main" depends="build.done">
- <mkdir dir="build/pack"/>
- <jar destfile="build/pack/${plugin.name}.jar">
- <fileset dir="build/build.main"/>
- <fileset file="plugin.properties"/>
- <fileset file="misc/scalac-plugin.xml"/>
- </jar>
- </target>
-
- <target name="pack.src" depends="pack.main">
- <jar destfile="build/pack/${plugin.name}-src.jar"
- basedir="src"
- includes="**/*.scala"/>
- </target>
-
- <target name="pack.done" depends="pack.src"/>
-
- <!-- =================================================================
- Dist targets
- ================================================================== -->
-
- <target name="dist.bin" depends="test.all,pack.done,doc.done">
- <mkdir dir="build/toolscript"/>
- <scalascript
- file="build/toolscript/${plugin.commandname}"
- class="${plugin.mainclass}"/>
- <chmod dir="build/toolscript" perm="a+rx" excludes="*.bat"/>
- </target>
-
- <target name="dist.archive" depends="dist.bin">
- <mkdir dir="build/dist"/>
- <tar destfile="build/dist/${plugin.name}-${version}.tgz"
- compression="gzip">
- <tarfileset prefix="lib" dir="build/pack"
- includes="${plugin.name}.jar"/>
- <tarfileset prefix="src" dir="build/pack"
- includes="${plugin.name}-src.jar"/>
- <tarfileset prefix="doc/${plugin.name}" dir="doc"
- includes="README"/>
- <tarfileset prefix="doc/${plugin.name}" dir="doc"
- includes="examples/**"/>
- <tarfileset prefix="doc/${plugin.name}" dir="doc"
- includes="api/**"/>
- <tarfileset prefix="bin" dir="build/toolscript"
- includes="${plugin.commandname}" mode="755"/>
- <tarfileset prefix="bin" dir="build/toolscript"
- includes="${plugin.commandname}.bat"/>
- <tarfileset prefix="misc/scala-devel/plugins" dir="build/pack"
- includes="${plugin.name}.jar"/>
- </tar>
- </target>
-
- <target name="dist.sbaz" depends="dist.archive">
- <sbaz file="build/dist/${plugin.name}-${version}.sbp"
- adfile="build/dist/${plugin.name}-${version}.advert"
- name="${plugin.name}"
- version="${version}"
- depends="scala-devel"
- desc="${plugin.description}"
- link="${plugin.sbazbaseurl}/${plugin.name}-${version}.sbp">
- <libset dir="build/pack" includes="${plugin.name}.jar"/>
- <srcset dir="build/pack" includes="${plugin.name}-src.jar"/>
- <docset dir="doc" includes="README"/>
- <docset dir="doc" includes="examples/**"/>
- <docset dir="doc" includes="api/**"/>
- <binset dir="build/toolscript"/>
- <looseset destination="misc/scala-devel/plugins">
- <fileset file="build/pack/${plugin.name}.jar"/>
- </looseset>
- </sbaz>
- </target>
-
- <target name="dist.done" depends="dist.sbaz"/>
-
- <!-- =================================================================
- Local installation
- ================================================================== -->
-
- <target name="install" depends="dist.done">
- <exec executable="sbaz">
- <arg line="-v install -f build/dist/${plugin.name}-${version}.sbp"/>
- </exec>
- </target>
-
- <target name="uninstall">
- <exec executable="sbaz">
- <arg line="remove ${plugin.name}"/>
- </exec>
- </target>
-
- <!-- =================================================================
- Clean targets
- ================================================================== -->
-
- <target name="clean.build">
- <delete dir="build/build.main" includeemptydirs="yes"
- quiet="yes" failonerror="no"/>
- <delete dir="build/build.test" includeemptydirs="yes"
- quiet="yes" failonerror="no"/>
- <delete dir="build/build.examples" includeemptydirs="yes"
- quiet="yes" failonerror="no"/>
- </target>
-
- <target name="clean.pack" depends="clean.build">
- <delete dir="build/pack" includeemptydirs="yes"
- quiet="yes" failonerror="no"/>
- </target>
-
- <target name="clean.dist" depends="clean.pack">
- <delete dir="build/dist" includeemptydirs="yes"
- quiet="yes" failonerror="no"/>
- <delete dir="build/toolscript" includeemptydirs="yes"
- quiet="yes" failonerror="no"/>
- </target>
-
- <target name="clean.doc">
- <delete dir="doc/api" includeemptydirs="yes"
- quiet="yes" failonerror="no"/>
- </target>
-
- <target name="clean.all" depends="clean.dist,clean.doc"/>
-</project>
diff --git a/docs/examples/plugintemplate/doc/README b/docs/examples/plugintemplate/doc/README
deleted file mode 100644
index 958f712..0000000
--- a/docs/examples/plugintemplate/doc/README
+++ /dev/null
@@ -1,68 +0,0 @@
-Scala compiler plugin template
-------------------------------
-
-This project is a template that can be used for creating compiler
-plugins for the Scala compiler.
-
-
-Installation
-------------
-To install the compiler plugin, run "ant install". This will create
-an sbaz package for the template plugin and install it in the scala
-installation available in your PATH.
-The install command will also create a script for running the plugin
-as standalone application. The command is called "runplugintemplate"
-and available in your scala installation as well.
-To uninstall the plugin again, run "and uninstall".
-
-Alternatively, copy the file build/pack/plugintemplate.jar, generated
-by "ant pack", to the directory misc/scala-devel/plugins of your
-scala distribution. The scala compiler will then find and integrate
-the new plugin.
-
-Customization
--------------
-The following files need to be edited when creating a new plugin
-- plugin.properties
-- misc/scalac-plugin.xml
-- src / test / doc/examples: The source files of the template plugin
- are located a package called "plugintemplate". This will most likely
- be changed for new plugins.
-
-When using eclipse for development, make sure "scala-compiler.jar" is
-in the Java Build Path: Right-Click the project and select "Properties".
-Then go to "Java Build Path" -> "Libraries" and add the jar file
-"eclipse/plugins/ch.epfl.lamp.sdt.core[..]/lib/scala-compiler.jar".
-
-Traverse, Transform, Check
---------------------------
-There are several pre-defined components that can be used to code
-the behavior of a plugin:
-- TemplateTraverseComponent: a template for tree traversers. Used
- to analyze and collect data about compiler trees.
- -> implement the "check" method
-
-- TemplateTransformComponent: a template for tree transformers.
- -> implement "preTransform" and / or "postTransform"
-
-- TemplateInfoTransformComponent: also a tree transformer, which
- additionally is an InfoTransformer. Allows changing the type
- of some symbols for later phases.
- -> implement "preTransform" and / or "postTransform", and the
- "apply" method of the "infoTransformer".
-
-- TemplateAnnotationChecker: a plugin for the typechecker, useful
- for pluggable type systems. Computes the subtyping between two
- annotated types, and allows providing inferred type information.
- -> implement "annotationsConform" and optionally "addAnnotations"
-
-- TemplateComponent: the most general component. Can do anything it
- likes with the CompilationUnits.
- -> implement the "run" method
-
-Ant tasks
----------
-"build.xml" defines Ant tasks for building, testing and packing a
-plugin. The tests are written using the ScalaTest framework
-(http://www.artima.com/scalatest/).
-Run the tests using "ant test", or "ant guitest".
diff --git a/docs/examples/plugintemplate/doc/examples/plugintemplate/examples/BasicExample.scala b/docs/examples/plugintemplate/doc/examples/plugintemplate/examples/BasicExample.scala
deleted file mode 100644
index d1f6c91..0000000
--- a/docs/examples/plugintemplate/doc/examples/plugintemplate/examples/BasicExample.scala
+++ /dev/null
@@ -1,8 +0,0 @@
-package plugintemplate.examples
-
-/** An example demonstrating the fancy features of the new
- * compiler plugin.
- */
-class BasicExample {
- def foo = ()
-}
diff --git a/docs/examples/plugintemplate/lib/scalatest.jar.desired.sha1 b/docs/examples/plugintemplate/lib/scalatest.jar.desired.sha1
deleted file mode 100644
index 14c20f8..0000000
--- a/docs/examples/plugintemplate/lib/scalatest.jar.desired.sha1
+++ /dev/null
@@ -1 +0,0 @@
-8b6ba65c8146217333f0762087fe2340d572e832 ?scalatest.jar
diff --git a/docs/examples/plugintemplate/misc/scalac-plugin.xml b/docs/examples/plugintemplate/misc/scalac-plugin.xml
deleted file mode 100644
index bad4e87..0000000
--- a/docs/examples/plugintemplate/misc/scalac-plugin.xml
+++ /dev/null
@@ -1,4 +0,0 @@
-<plugin>
- <name>plugintemplate</name>
- <classname>plugintemplate.TemplatePlugin</classname>
-</plugin>
diff --git a/docs/examples/plugintemplate/plugin.properties b/docs/examples/plugintemplate/plugin.properties
deleted file mode 100644
index 131f961..0000000
--- a/docs/examples/plugintemplate/plugin.properties
+++ /dev/null
@@ -1,10 +0,0 @@
-scala.home=../../../build/pack
-
-plugin.name=plugintemplate
-plugin.commandname=runplugintemplate
-plugin.description=A template compiler plugin saying hello to the World
-plugin.mainclass=plugintemplate.standalone.Main
-plugin.sbazbaseurl=http://scala.epfl.ch/downloads/packages
-
-version.major=0
-version.minor=1
diff --git a/docs/examples/plugintemplate/src/plugintemplate/PluginProperties.scala b/docs/examples/plugintemplate/src/plugintemplate/PluginProperties.scala
deleted file mode 100644
index ed078a0..0000000
--- a/docs/examples/plugintemplate/src/plugintemplate/PluginProperties.scala
+++ /dev/null
@@ -1,61 +0,0 @@
-package plugintemplate
-
-import java.util.Properties
-
-/** A utility to load properties of this plugin via the property
- * file "plugin.properties"
- */
-object PluginProperties {
- private val propFilename = "plugin.properties"
-
- val pluginName = getOrElse("plugin.name", "(name_unknown)")
- val pluginDescription = getOrElse("plugin.description", "(plugin description not found)")
- val pluginCommand = getOrElse("plugin.commandname", "(command_unknown)")
- val versionString = {
- val default = "(version_unknown)"
- props match {
- case Some(p) =>
- val major = p.getProperty("version.major")
- val minor = p.getProperty("version.minor")
- if ((major eq null) || (minor eq null)) default
- else major +"."+ minor
- case None => default
- }
- }
-
- private def getOrElse(property: String, default: String) = {
- props match {
- case Some(p) if (p.getProperty(property) != null) =>
- p.getProperty(property)
- case _ =>
- default
- }
- }
-
- private lazy val props: Option[Properties] = {
- /** Running from JAR file: the properties file should be in the
- * jar as well
- */
- var stream = this.getClass.getResourceAsStream("/"+ propFilename)
- if (stream == null) {
- /** Running from .class files: expect classfiles to be in
- * directory [...]/build/build.main, and [...] to contain
- * the properties file.
- */
- try {
- val current = this.getClass.getClassLoader.getResource(".")
- val dir = new java.io.File(current.toURI)
- // dir will be [...]/build/build.main/
- stream = new java.io.FileInputStream(dir.getParentFile.getParent +"/"+ propFilename)
- } catch {
- case _ => ()
- }
- }
- if (stream == null) None
- else {
- val p = new Properties
- p.load(stream)
- Some(p)
- }
- }
-}
diff --git a/docs/examples/plugintemplate/src/plugintemplate/TemplateAnnotationChecker.scala b/docs/examples/plugintemplate/src/plugintemplate/TemplateAnnotationChecker.scala
deleted file mode 100644
index 6cd3472..0000000
--- a/docs/examples/plugintemplate/src/plugintemplate/TemplateAnnotationChecker.scala
+++ /dev/null
@@ -1,20 +0,0 @@
-package plugintemplate
-
-import scala.tools.nsc.Global
-
-abstract class TemplateAnnotationChecker {
- val global: Global
- import global._
-
- object checker extends AnnotationChecker {
- def annotationsConform(tpe1: Type, tpe2: Type): Boolean = {
- println("checking: "+ tpe1 +" <: "+ tpe2)
- true
- }
-
- override def addAnnotations(tree: Tree, tpe: Type): Type = {
- println("adding annot to "+ tree.symbol)
- tpe
- }
- }
-}
diff --git a/docs/examples/plugintemplate/src/plugintemplate/TemplateComponent.scala b/docs/examples/plugintemplate/src/plugintemplate/TemplateComponent.scala
deleted file mode 100644
index b63f320..0000000
--- a/docs/examples/plugintemplate/src/plugintemplate/TemplateComponent.scala
+++ /dev/null
@@ -1,35 +0,0 @@
-package plugintemplate
-
-import scala.tools.nsc._
-import scala.tools.nsc.plugins.PluginComponent
-
-/** This class shows how to implement a compiler component that
- * can be used in a compiler plugin. If the plugin uses a tree
- * transformer and / or an InfoTransformer, look at the two
- * classes <code>TemplateTransformComponent</code> and
- * <code>TemplateInfoTransformComponent</code>.
- *
- * @todo Adapt the name of this class to the plugin, and implement it.
- */
-class TemplateComponent(val global: Global) extends PluginComponent {
- import global._
-
- val runsAfter = List[String]("refchecks")
-
- /** The name of this plugin phase
- * @todo Adapt to specific plugin.
- */
- val phaseName = "plugintemplate"
-
- def newPhase(prev: Phase) = new Phase(prev) {
- def name = phaseName
-
- /** The implementation of this Phase's behavior
- *
- * @todo Implementation.
- */
- def run {
- println("Hello from phase "+ name)
- }
- }
-}
diff --git a/docs/examples/plugintemplate/src/plugintemplate/TemplateInfoTransformComponent.scala b/docs/examples/plugintemplate/src/plugintemplate/TemplateInfoTransformComponent.scala
deleted file mode 100644
index 71069ae..0000000
--- a/docs/examples/plugintemplate/src/plugintemplate/TemplateInfoTransformComponent.scala
+++ /dev/null
@@ -1,79 +0,0 @@
-package plugintemplate
-
-import scala.tools.nsc._
-import scala.tools.nsc.plugins.PluginComponent
-import scala.tools.nsc.transform.InfoTransform
-// import scala.tools.nsc.transform.TypingTransformers
-
-/** This class implements a plugin component using tree transformers and
- * InfoTransformer. An InfoTransformer will be automatically created
- * and registered in <code>SymbolTable.infoTransformers</code>. If
- * a <code>Typer</code> is needed during transformation, the component
- * should mix in <code>TypingTransformers</code>. This provides a local
- * variable <code>localTyper: Typer</code> that is always updated to
- * the current context.
- *
- * @todo Adapt the name of this class to the plugin, and implement it.
- */
-class TemplateInfoTransformComponent(val global: Global) extends PluginComponent
- // with TypingTransformers
- with InfoTransform {
-
- import global._
- import global.definitions._
-
- val runsAfter = List[String]("refchecks")
- /** The phase name of the compiler plugin
- * @todo Adapt to specific plugin.
- */
- val phaseName = "plugintemplateinfotransform"
-
- def transformInfo(sym: Symbol, tp: Type): Type = infoTransformer.mapOver(tp)
-
- def newTransformer(unit: CompilationUnit) = new TemplateTransformer
-
- /** The type transformation applied by this component. The trait InfoTransform
- * will create an instance of InfoTransformer applying this TypeMap. The type
- * map will be applied when computing a symbol's type in all phases
- * <em>after</em> "plugintemplateinfotransform".
- *
- * @todo Implement.
- */
- private val infoTransformer = new TypeMap {
- def apply(tp: Type): Type = tp match {
- case MethodType(pts, rt) =>
- println("methodType (_, _, ..) => "+ rt)
- tp
- case _ => mapOver(tp)
- }
- }
-
- /** The tree transformer that implements the behavior of this
- * component. Change the superclass to <code>TypingTransformer</code>
- * to make a local typechecker <code>localTyper</code> available.
- *
- * @todo Implement.
- */
- class TemplateTransformer extends /*Typing*/ Transformer {
- /** When using <code>preTransform</code>, each node is
- * visited before its children.
- */
- def preTransform(tree: Tree): Tree = tree match {
- case ValDef(_, name, _, _) =>
- println("pre-info-transforming valdef "+ name)
- tree
- case _ => tree
- }
-
- /** When using <code>postTransform</code>, each node is
- * visited after its children.
- */
- def postTransform(tree: Tree): Tree = tree match {
- case _ => tree
- }
-
- override def transform(tree: Tree): Tree = {
- postTransform(super.transform(preTransform(tree)))
- }
- }
-}
diff --git a/docs/examples/plugintemplate/src/plugintemplate/TemplatePlugin.scala b/docs/examples/plugintemplate/src/plugintemplate/TemplatePlugin.scala
deleted file mode 100644
index 6cda37d..0000000
--- a/docs/examples/plugintemplate/src/plugintemplate/TemplatePlugin.scala
+++ /dev/null
@@ -1,52 +0,0 @@
-package plugintemplate
-
-import scala.tools.nsc.Global
-import scala.tools.nsc.plugins.Plugin
-
-/** A class describing the compiler plugin
- *
- * @todo Adapt the name of this class to the plugin being
- * implemented
- */
-class TemplatePlugin(val global: Global) extends Plugin {
- /** The name of this plugin. Extracted from the properties file. */
- val name = PluginProperties.pluginName
-
- val runsAfter = List[String]("refchecks")
-
- /** A short description of the plugin, read from the properties file */
- val description = PluginProperties.pluginDescription
-
- /** @todo A description of the plugin's options */
- override val optionsHelp = Some(
- " -P:"+ name +":option sets some option for this plugin")
-
- /** @todo Implement parsing of plugin options */
- override def processOptions(options: List[String], error: String => Unit) {
- super.processOptions(options, error)
- }
-
- /** The compiler components that will be applied when running
- * this plugin
- *
- * @todo Adapt to the plugin being implemented
- */
- val components = TemplatePlugin.components(global)
-
- val checker = new TemplateAnnotationChecker {
- val global: TemplatePlugin.this.global.type = TemplatePlugin.this.global
- }
- global.addAnnotationChecker(checker.checker)
-}
-
-object TemplatePlugin {
- /** Yields the list of Components to be executed in this plugin
- *
- * @todo: Adapt to specific implementation.
- */
- def components(global: Global) =
- List(new TemplateComponent(global),
- new TemplateTraverseComponent(global),
- new TemplateTransformComponent(global),
- new TemplateInfoTransformComponent(global))
-}
diff --git a/docs/examples/plugintemplate/src/plugintemplate/TemplateTransformComponent.scala b/docs/examples/plugintemplate/src/plugintemplate/TemplateTransformComponent.scala
deleted file mode 100644
index 7c2630d..0000000
--- a/docs/examples/plugintemplate/src/plugintemplate/TemplateTransformComponent.scala
+++ /dev/null
@@ -1,58 +0,0 @@
-package plugintemplate
-
-import scala.tools.nsc._
-import scala.tools.nsc.plugins.PluginComponent
-import scala.tools.nsc.transform.Transform
-// import scala.tools.nsc.transform.TypingTransformers
-
-/** This class implements a plugin component using tree transformers. If
- * a <code>Typer</code> is needed during transformation, the component
- * should mix in <code>TypingTransformers</code>. This provides a local
- * variable <code>localTyper: Typer</code> that is always updated to
- * the current context.
- *
- * @todo Adapt the name of this class to the plugin, and implement it.
- */
-class TemplateTransformComponent(val global: Global) extends PluginComponent
- // with TypingTransformers
- with Transform {
- import global._
- import global.definitions._
-
- val runsAfter = List[String]("refchecks")
- /** The phase name of the compiler plugin
- * @todo Adapt to specific plugin.
- */
- val phaseName = "plugintemplatetransform"
-
- def newTransformer(unit: CompilationUnit) = new TemplateTransformer
-
- /** The tree transformer that implements the behavior of this
- * component. Change the superclass to <code>TypingTransformer</code>
- * to make a local typechecker <code>localTyper</code> available.
- *
- * @todo Implement.
- */
- class TemplateTransformer extends /*Typing*/ Transformer {
- /** When using <code>preTransform</code>, each node is
- * visited before its children.
- */
- def preTransform(tree: Tree): Tree = tree match {
- case _ => tree
- }
-
- /** When using <code>postTransform</code>, each node is
- * visited after its children.
- */
- def postTransform(tree: Tree): Tree = tree match {
- case New(tpt) =>
- println("post-transforming new "+ tpt)
- tree
- case _ => tree
- }
-
- override def transform(tree: Tree): Tree = {
- postTransform(super.transform(preTransform(tree)))
- }
- }
-}
diff --git a/docs/examples/plugintemplate/src/plugintemplate/TemplateTraverseComponent.scala b/docs/examples/plugintemplate/src/plugintemplate/TemplateTraverseComponent.scala
deleted file mode 100644
index 400daf7..0000000
--- a/docs/examples/plugintemplate/src/plugintemplate/TemplateTraverseComponent.scala
+++ /dev/null
@@ -1,32 +0,0 @@
-package plugintemplate
-
-import scala.tools.nsc._
-import scala.tools.nsc.plugins.PluginComponent
-
-/** This class implements a plugin component using a tree
- * traverser */
-class TemplateTraverseComponent (val global: Global) extends PluginComponent {
- import global._
- import global.definitions._
-
- val runsAfter = List[String]("refchecks")
- /** The phase name of the compiler plugin
- * @todo Adapt to specific plugin.
- */
- val phaseName = "plugintemplatetraverse"
-
- def newPhase(prev: Phase): Phase = new TraverserPhase(prev)
- class TraverserPhase(prev: Phase) extends StdPhase(prev) {
- def apply(unit: CompilationUnit) {
- newTraverser().traverse(unit.body)
- }
- }
-
- def newTraverser(): Traverser = new ForeachTreeTraverser(check)
-
- def check(tree: Tree): Unit = tree match {
- case Apply(fun, args) =>
- println("traversing application of "+ fun)
- case _ => ()
- }
-}
diff --git a/docs/examples/plugintemplate/src/plugintemplate/standalone/Main.scala b/docs/examples/plugintemplate/src/plugintemplate/standalone/Main.scala
deleted file mode 100644
index 19d2613..0000000
--- a/docs/examples/plugintemplate/src/plugintemplate/standalone/Main.scala
+++ /dev/null
@@ -1,44 +0,0 @@
-package plugintemplate.standalone
-
-import plugintemplate.PluginProperties
-import scala.tools.nsc.CompilerCommand
-import scala.tools.nsc.Settings
-
-/** An object for running the plugin as standalone application.
- *
- * @todo: print, parse and apply plugin options !!!
- * ideally re-use the TemplatePlugin (-> runsAfter, optionsHelp,
- * processOptions, components, annotationChecker) instead of
- * duplicating it here and in PluginRunner.
- */
-object Main {
- def main(args: Array[String]) {
- val settings = new Settings
-
- val command = new CompilerCommand(args.toList, settings) {
- /** The command name that will be printed in in the usage message.
- * This is automatically set to the value of 'plugin.commandname' in the
- * file build.properties.
- */
- override val cmdName = PluginProperties.pluginCommand
- }
-
- if (!command.ok)
- return()
-
- /** The version number of this plugin is read from the properties file
- */
- if (settings.version.value) {
- println(command.cmdName +" version "+ PluginProperties.versionString)
- return()
- }
- if (settings.help.value) {
- println(command.usageMsg)
- return()
- }
-
- val runner = new PluginRunner(settings)
- val run = new runner.Run
- run.compile(command.files)
- }
-}
diff --git a/docs/examples/plugintemplate/src/plugintemplate/standalone/PluginRunner.scala b/docs/examples/plugintemplate/src/plugintemplate/standalone/PluginRunner.scala
deleted file mode 100644
index 786d72d..0000000
--- a/docs/examples/plugintemplate/src/plugintemplate/standalone/PluginRunner.scala
+++ /dev/null
@@ -1,36 +0,0 @@
-package plugintemplate.standalone
-
-import plugintemplate.{TemplateAnnotationChecker, TemplatePlugin}
-import scala.tools.nsc.{Global, Settings, SubComponent}
-import scala.tools.nsc.reporters.{ConsoleReporter, Reporter}
-
-/** This class is a compiler that will be used for running
- * the plugin in standalone mode.
- */
-class PluginRunner(settings: Settings, reporter: Reporter)
-extends Global(settings, reporter) {
- def this(settings: Settings) = this(settings, new ConsoleReporter(settings))
-
- val annotChecker = new TemplateAnnotationChecker {
- val global: PluginRunner.this.type = PluginRunner.this
- }
- addAnnotationChecker(annotChecker.checker)
-
- /** The phases to be run.
- *
- * @todo: Adapt to specific plugin implementation
- */
- override protected def computeInternalPhases() {
- phasesSet += syntaxAnalyzer
- phasesSet += analyzer.namerFactory
- phasesSet += analyzer.typerFactory
- phasesSet += superAccessors // add super accessors
- phasesSet += pickler // serialize symbol tables
- phasesSet += refchecks // perform reference and override checking, translate nested objects
-
- for (phase <- TemplatePlugin.components(this)) {
- phasesSet += phase
- }
- }
-
-}
diff --git a/docs/examples/plugintemplate/test/plugintemplate/PluginPropertiesSuite.scala b/docs/examples/plugintemplate/test/plugintemplate/PluginPropertiesSuite.scala
deleted file mode 100644
index a07796a..0000000
--- a/docs/examples/plugintemplate/test/plugintemplate/PluginPropertiesSuite.scala
+++ /dev/null
@@ -1,14 +0,0 @@
-package plugintemplate
-
-import org.scalatest.Suite
-
-class PluginPropertiesSuite extends Suite {
- def testProperties() {
- expect("A template compiler plugin saying hello to the World") {
- PluginProperties.pluginDescription
- }
- expect("0.1") {
- PluginProperties.versionString
- }
- }
-}
diff --git a/docs/examples/plugintemplate/test/plugintemplate/TemplatePluginSuite.scala b/docs/examples/plugintemplate/test/plugintemplate/TemplatePluginSuite.scala
deleted file mode 100644
index 06916f9..0000000
--- a/docs/examples/plugintemplate/test/plugintemplate/TemplatePluginSuite.scala
+++ /dev/null
@@ -1,22 +0,0 @@
-package plugintemplate
-
-import org.scalatest.Suite
-import org.scalatest.Ignore
-
-class TemplatePluginSuite extends Suite {
- def testName() {
- import scala.tools.nsc.{Global, Settings}
- import scala.tools.nsc.reporters.ConsoleReporter
- val settings = new Settings
- val compiler = new Global(settings, new ConsoleReporter(settings))
- val plugin = new TemplatePlugin(compiler)
- expect("plugintemplate") {
- plugin.name
- }
- }
-
- @Ignore
- def testFail() {
- expect(1) { 2 }
- }
-}
diff --git a/docs/examples/sort.scala b/docs/examples/sort.scala
deleted file mode 100644
index 9a928f1..0000000
--- a/docs/examples/sort.scala
+++ /dev/null
@@ -1,48 +0,0 @@
-package examples
-
-object sort {
-
- def sort(a: Array[Int]) {
-
- def swap(i: Int, j: Int) {
- val t = a(i); a(i) = a(j); a(j) = t
- }
-
- def sort1(l: Int, r: Int) {
- val pivot = a((l + r) / 2)
- var i = l
- var j = r
- while (i <= j) {
- while (a(i) < pivot) { i += 1 }
- while (a(j) > pivot) { j -= 1 }
- if (i <= j) {
- swap(i, j)
- i += 1
- j -= 1
- }
- }
- if (l < j) sort1(l, j)
- if (j < r) sort1(i, r)
- }
-
- if (a.length > 0)
- sort1(0, a.length - 1)
- }
-
- def println(ar: Array[Int]) {
- def print1 = {
- def iter(i: Int): String =
- ar(i) + (if (i < ar.length-1) "," + iter(i+1) else "")
- if (ar.length == 0) "" else iter(0)
- }
- Console.println("[" + print1 + "]")
- }
-
- def main(args: Array[String]) {
- val ar = Array(6, 2, 8, 5, 1)
- println(ar)
- sort(ar)
- println(ar)
- }
-
-}
diff --git a/docs/examples/sort1.scala b/docs/examples/sort1.scala
deleted file mode 100644
index 39e5519..0000000
--- a/docs/examples/sort1.scala
+++ /dev/null
@@ -1,22 +0,0 @@
-package examples
-
-object sort1 {
-
- def sort(a: List[Int]): List[Int] = {
- if (a.length < 2)
- a
- else {
- val pivot = a(a.length / 2)
- sort(a.filter(x => x < pivot)) :::
- a.filter(x => x == pivot) :::
- sort(a.filter(x => x > pivot))
- }
- }
-
- def main(args: Array[String]) {
- val xs = List(6, 2, 8, 5, 1)
- println(xs)
- println(sort(xs))
- }
-
-}
diff --git a/docs/examples/sort2.scala b/docs/examples/sort2.scala
deleted file mode 100644
index 8e0b070..0000000
--- a/docs/examples/sort2.scala
+++ /dev/null
@@ -1,25 +0,0 @@
-package examples
-
-object sort2 {
-
- def sort(a: List[Int]): List[Int] = {
- if (a.length < 2)
- a
- else {
- val pivot = a(a.length / 2)
- def lePivot(x: Int) = x < pivot
- def gtPivot(x: Int) = x > pivot
- def eqPivot(x: Int) = x == pivot
- sort(a filter lePivot) :::
- sort(a filter eqPivot) :::
- sort(a filter gtPivot)
- }
- }
-
- def main(args: Array[String]) {
- val xs = List(6, 2, 8, 5, 1)
- println(xs)
- println(sort(xs))
- }
-
-}
diff --git a/docs/examples/tcpoly/collection/HOSeq.scala b/docs/examples/tcpoly/collection/HOSeq.scala
deleted file mode 100644
index a6757b9..0000000
--- a/docs/examples/tcpoly/collection/HOSeq.scala
+++ /dev/null
@@ -1,167 +0,0 @@
-package examples.tcpoly.collection;
-
-trait HOSeq {
- // an internal interface that encapsulates the accumulation of elements (of type elT) to produce
- // a structure of type coll[elT] -- different kinds of collections should provide different implicit
- // values implementing this interface, in order to provide more performant ways of building that structure
- trait Accumulator[+coll[x], elT] {
- def += (el: elT): Unit
- def result: coll[elT]
- }
-
-
- // Iterable abstracts over the type of its structure as well as its elements (see PolyP's Bifunctor)
- // m[x] is intentionally unbounded: fold can then be defined nicely
- // variance: if we write m[+x] instead of +m[+x], x is an invariant position because its enclosing type
- // is an invariant position -- should probably rule that out?
- trait Iterable[+m[+x], +t] {
- //def unit[a](orig: a): m[a]
- def iterator: Iterator[t]
-
- // construct an empty accumulator that will produce the same structure as this iterable, with elements of type t
- def accumulator[t]: Accumulator[m, t]
-
- def filter(p: t => Boolean): m[t] = {
- val buf = accumulator[t]
- val elems = elements
- while (elems.hasNext) { val x = elems.next; if (p(x)) buf += x }
- buf.result
- }
-
- def map[s](f: t => s): m[s] = {
- val buf = accumulator[s]
- val elems = elements
- while (elems.hasNext) buf += f(elems.next)
- buf.result
- }
-
- // flatMap is a more specialized map, it only works if the mapped function produces Iterable values,
- // which are then added to the result one by one
- // the compiler should be able to find the right accumulator (implicit buf) to build the result
- // to get concat, resColl = SingletonIterable, f = unit for SingletonIterable
- def flatMap[resColl[x] <: Iterable[resColl, x], s](f: t => resColl[s])(implicit buf: Accumulator[resColl, s]): resColl[s] = {
- // TODO: would a viewbound for resColl[x] be better?
- // -- 2nd-order type params are not yet in scope in view bound
- val elems = elements
- while (elems.hasNext) {
- val elemss: Iterator[s] = f(elems.next).iterator
- while (elemss.hasNext) buf += elemss.next
- }
- buf.result
- }
- }
-
- final class ListBuffer[A] {
- private var start: List[A] = Nil
- private var last: ::[A] = _
- private var exported: boolean = false
-
- /** Appends a single element to this buffer.
- *
- * @param x the element to append.
- */
- def += (x: A): unit = {
- if (exported) copy
- if (start.isEmpty) {
- last = new HOSeq.this.:: (x, Nil)
- start = last
- } else {
- val last1 = last
- last = new HOSeq.this.:: (x, null) // hack: ::'s tail will actually be last
- //last1.tl = last
- }
- }
-
- /** Converts this buffer to a list
- */
- def toList: List[A] = {
- exported = !start.isEmpty
- start
- }
-
- /** Clears the buffer contents.
- */
- def clear: unit = {
- start = Nil
- exported = false
- }
-
- /** Copy contents of this buffer */
- private def copy = {
- var cursor = start
- val limit = last.tail
- clear
- while (cursor ne limit) {
- this += cursor.head
- cursor = cursor.tail
- }
- }
- }
-
- implicit def listAccumulator[elT]: Accumulator[List, elT] = new Accumulator[List, elT] {
- private[this] val buff = new ListBuffer[elT]
- def += (el: elT): Unit = buff += el
- def result: List[elT] = buff.toList
- }
-
- trait List[+t] extends Iterable[List, t] {
- def head: t
- def tail: List[t]
- def isEmpty: Boolean
- def iterator: Iterator[t] = error("TODO")
-
- // construct an empty accumulator that will produce the same structure as this iterable, with elements of type t
- def accumulator[t]: Accumulator[List, t] = error("TODO")
- }
-
- // TODO: the var tl approach does not seem to work because subtyping isn't fully working yet
- final case class ::[+b](hd: b, private val tl: List[b]) extends List[b] {
- def head = hd
- def tail = if(tl==null) this else tl // hack
- override def isEmpty: boolean = false
- }
-
- case object Nil extends List[Nothing] {
- def isEmpty = true
- def head: Nothing =
- throw new NoSuchElementException("head of empty list")
- def tail: List[Nothing] =
- throw new NoSuchElementException("tail of empty list")
- }
-}
-
-
-
-// misc signatures collected from mailing list / library code:
- /*override def flatMap[B](f: A => Iterable[B]): Set[B]
- final override def flatMap[b](f: Any => Iterable[b]): Array[b]
- def flatMap[b](f: a => Parser[b]) = new Parser[b]
- override def flatMap[b](f: a => Iterable[b]): List[b]
-
-
- MapResult[K] <: Seq[K]
- FilterResult <: Seq[T]
- Concat <: Seq[T]
- Subseq <: Seq[T]
-
-
- def map[K](f: T=>K): MapResult[K]
- def filter(f: T=>Boolean): FilterResult
- def subseq(from: int, to: int): Subseq
- def flatMap[S <: Seq[K], K](f: T => S): S#Concat // legal?
- def concat(others: Seq[T]): Concat
- */
-
-/*trait Iterator[t] {
- // @post hasAdvanced implies hasNext
- // model def hasAdvanced: Boolean
-
- def hasNext: Boolean // pure
-
- // @pre hasAdvanced
- def current: t // pure
-
- // @pre hasNext
- // @post hasAdvanced
- def advance: Unit
-}*/
diff --git a/docs/examples/tcpoly/monads/Monads.scala b/docs/examples/tcpoly/monads/Monads.scala
deleted file mode 100644
index b6e3d5b..0000000
--- a/docs/examples/tcpoly/monads/Monads.scala
+++ /dev/null
@@ -1,69 +0,0 @@
-package examples.tcpoly.monad;
-
-trait Monads {
- /**
- * class Monad m where
- * (>>=) :: m a -> (a -> m b) -> m b
- * return :: a -> m a
- *
- * MonadTC encodes the above Haskell type class,
- * an instance of MonadTC corresponds to a method dictionary.
- * (see http://lampwww.epfl.ch/~odersky/talks/wg2.8-boston06.pdf)
- *
- * Note that the identity (`this') of the method dictionary does not really correspond
- * to the instance of m[x] (`self') that is `wrapped': e.g., unit does not use `self' (which
- * corresponds to the argument of the implicit conversion that encodes an instance of this type class)
- */
- // Option =:= [x] => Option[x] <: [x] => Any
-// trait MonadTC[m <: [x] => Any, a] {
- // MonadTC[m[x], a] x is a type parameter too -- should not write e.g., m[Int] here
- trait MonadTC[m[x], a] {
- def unit[a](orig: a): m[a]
-
- // >>='s first argument comes from the implicit definition constructing this "method dictionary"
- def >>=[b](fun: a => m[b]): m[b]
- }
-}
-
-/**
- * instance Monad Maybe where
- * (Just x) >>= k = k x
- * Nothing >>= _ = Nothing
- */
-trait OptionMonad extends Monads {
- // this implicit method encodes the Monad type class instance for Option
- implicit def OptionInstOfMonad[a](self: Option[a]): MonadTC[Option, a]
- = new MonadTC[Option, a] {
- def unit[a](orig: a) = Some(orig)
- def >>=[b](fun: a => Option[b]): Option[b] = self match {
- case Some(x) => fun(x)
- case None => None
- }
- }
-}
-
-object main extends OptionMonad with Application {
- Console.println(Some("aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa") >>= (x => Some(x.length)))
-}
-
-
-/*
-trait MonadTC[m[x], a] requires m[x] {
- def unit[a](orig: a): m[a]
-
- // >>='s first argument comes from the implicit definition constructing this "method dictionary"
- def >>=[b](fun: a => m[b]): m[b]
-}
-
-abstract class OptionIsMonad[t[x] <: Option[x], a] implicit extends MonadTC[t, a] {
- def unit[a](orig: a) = Some(orig) // TODO: problematic.. is a meta-member: not invoked on this
-}
-
-class SomeIsMonad[a] extends OptionIsMonad[Some, a] {
- def >>=[b](fun: a => Option[b]): Option[b] = fun(x)
-}
-
-class NoneIsMonad[a] extends OptionIsMonad[None, a] {
- def >>=[b](fun: a => Option[b]): Option[b] = None
-}
-*/
diff --git a/docs/examples/typeinf.scala b/docs/examples/typeinf.scala
deleted file mode 100644
index d4bc8bf..0000000
--- a/docs/examples/typeinf.scala
+++ /dev/null
@@ -1,253 +0,0 @@
-package examples
-
-object typeinf {
-
-trait Term {}
-
-case class Var(x: String) extends Term {
- override def toString() = x
-}
-case class Lam(x: String, e: Term) extends Term {
- override def toString() = "(\\" + x + "." + e + ")"
-}
-case class App(f: Term, e: Term) extends Term {
- override def toString() = "(" + f + " " + e + ")"
-}
-case class Let(x: String, e: Term, f: Term) extends Term {
- override def toString() = "let " + x + " = " + e + " in " + f
-}
-
-sealed trait Type {}
-case class Tyvar(a: String) extends Type {
- override def toString() = a
-}
-case class Arrow(t1: Type, t2: Type) extends Type {
- override def toString() = "(" + t1 + "->" + t2 + ")"
-}
-case class Tycon(k: String, ts: List[Type]) extends Type {
- override def toString() =
- k + (if (ts.isEmpty) "" else ts.mkString("[", ",", "]"))
-}
-
-object typeInfer {
-
- private var n: Int = 0
- def newTyvar(): Type = { n += 1; Tyvar("a" + n) }
-
- trait Subst extends Function1[Type, Type] {
- def lookup(x: Tyvar): Type
- def apply(t: Type): Type = t match {
- case tv @ Tyvar(a) => val u = lookup(tv); if (t == u) t else apply(u)
- case Arrow(t1, t2) => Arrow(apply(t1), apply(t2))
- case Tycon(k, ts) => Tycon(k, ts map apply)
- }
- def extend(x: Tyvar, t: Type) = new Subst {
- def lookup(y: Tyvar): Type = if (x == y) t else Subst.this.lookup(y)
- }
- }
-
- val emptySubst = new Subst { def lookup(t: Tyvar): Type = t }
-
- case class TypeScheme(tyvars: List[Tyvar], tpe: Type) {
- def newInstance: Type =
- (emptySubst /: tyvars) ((s, tv) => s.extend(tv, newTyvar())) (tpe)
- }
-
- type Env = List[Pair[String, TypeScheme]]
-
- def lookup(env: Env, x: String): TypeScheme = env match {
- case List() => null
- case Pair(y, t) :: env1 => if (x == y) t else lookup(env1, x)
- }
-
- def gen(env: Env, t: Type): TypeScheme =
- TypeScheme(tyvars(t) diff tyvars(env), t)
-
- def tyvars(t: Type): List[Tyvar] = t match {
- case tv @ Tyvar(a) => List(tv)
- case Arrow(t1, t2) => tyvars(t1) union tyvars(t2)
- case Tycon(k, ts) => (List[Tyvar]() /: ts) ((tvs, t) => tvs union tyvars(t))
- }
-
- def tyvars(ts: TypeScheme): List[Tyvar] =
- tyvars(ts.tpe) diff ts.tyvars;
-
- def tyvars(env: Env): List[Tyvar] =
- (List[Tyvar]() /: env) ((tvs, nt) => tvs union tyvars(nt._2))
-
- def mgu(t: Type, u: Type, s: Subst): Subst = Pair(s(t), s(u)) match {
- case Pair(Tyvar(a), Tyvar(b)) if (a == b) =>
- s
- case Pair(Tyvar(a), _) if !(tyvars(u) contains a) =>
- s.extend(Tyvar(a), u)
- case Pair(_, Tyvar(a)) =>
- mgu(u, t, s)
- case Pair(Arrow(t1, t2), Arrow(u1, u2)) =>
- mgu(t1, u1, mgu(t2, u2, s))
- case Pair(Tycon(k1, ts), Tycon(k2, us)) if (k1 == k2) =>
- (s /: (ts zip us)) ((s, tu) => mgu(tu._1, tu._2, s))
- case _ =>
- throw new TypeError("cannot unify " + s(t) + " with " + s(u))
- }
-
- case class TypeError(s: String) extends Exception(s) {}
-
- def tp(env: Env, e: Term, t: Type, s: Subst): Subst = {
- current = e
- e match {
- case Var(x) =>
- val u = lookup(env, x)
- if (u == null) throw new TypeError("undefined: " + x)
- else mgu(u.newInstance, t, s)
-
- case Lam(x, e1) =>
- val a, b = newTyvar()
- val s1 = mgu(t, Arrow(a, b), s)
- val env1 = Pair(x, TypeScheme(List(), a)) :: env
- tp(env1, e1, b, s1)
-
- case App(e1, e2) =>
- val a = newTyvar()
- val s1 = tp(env, e1, Arrow(a, t), s)
- tp(env, e2, a, s1)
-
- case Let(x, e1, e2) =>
- val a = newTyvar()
- val s1 = tp(env, e1, a, s)
- tp(Pair(x, gen(env, s1(a))) :: env, e2, t, s1)
- }
- }
- var current: Term = null
-
- def typeOf(env: Env, e: Term): Type = {
- val a = newTyvar()
- tp(env, e, a, emptySubst)(a)
- }
-}
-
- object predefined {
- val booleanType = Tycon("Boolean", List())
- val intType = Tycon("Int", List())
- def listType(t: Type) = Tycon("List", List(t))
-
- private def gen(t: Type): typeInfer.TypeScheme = typeInfer.gen(List(), t)
- private val a = typeInfer.newTyvar()
- val env = List(
-/*
- Pair("true", gen(booleanType)),
- Pair("false", gen(booleanType)),
- Pair("if", gen(Arrow(booleanType, Arrow(a, Arrow(a, a))))),
- Pair("zero", gen(intType)),
- Pair("succ", gen(Arrow(intType, intType))),
- Pair("nil", gen(listType(a))),
- Pair("cons", gen(Arrow(a, Arrow(listType(a), listType(a))))),
- Pair("isEmpty", gen(Arrow(listType(a), booleanType))),
- Pair("head", gen(Arrow(listType(a), a))),
- Pair("tail", gen(Arrow(listType(a), listType(a)))),
-*/
- Pair("fix", gen(Arrow(Arrow(a, a), a)))
- )
- }
-
- trait MiniMLParsers extends CharParsers {
-
- /** whitespace */
- def whitespace = rep{chr(' ') ||| chr('\t') ||| chr('\n')}
-
- /** A given character, possible preceded by whitespace */
- def wschr(ch: char) = whitespace &&& chr(ch)
-
- def isLetter = (c: char) => Character.isLetter(c)
- def isLetterOrDigit: char => boolean = Character.isLetterOrDigit
-
- /** identifiers or keywords */
- def id: Parser[String] =
- for (
- c: char <- rep(chr(' ')) &&& chr(isLetter);
- cs: List[char] <- rep(chr(isLetterOrDigit))
- ) yield (c :: cs).mkString("", "", "")
-
- /** Non-keyword identifiers */
- def ident: Parser[String] =
- for (s <- id if s != "let" && s != "in") yield s
-
- /** term = '\' ident '.' term | term1 {term1} | let ident "=" term in term */
- def term: Parser[Term] = (
- ( for (
- _ <- wschr('\\');
- x <- ident;
- _ <- wschr('.');
- t <- term)
- yield Lam(x, t): Term )
- |||
- ( for (
- letid <- id if letid == "let";
- x <- ident;
- _ <- wschr('=');
- t <- term;
- inid <- id; if inid == "in";
- c <- term)
- yield Let(x, t, c) )
- |||
- ( for (
- t <- term1;
- ts <- rep(term1))
- yield (t /: ts)((f, arg) => App(f, arg)) )
- )
-
- /** term1 = ident | '(' term ')' */
- def term1: Parser[Term] = (
- ( for (s <- ident)
- yield Var(s): Term )
- |||
- ( for (
- _ <- wschr('(');
- t <- term;
- _ <- wschr(')'))
- yield t )
- )
-
- /** all = term ';' */
- def all: Parser[Term] =
- for (
- t <- term;
- _ <- wschr(';'))
- yield t
- }
-
- class ParseString(s: String) extends Parsers {
- type inputType = int
- val input = 0
- def any = new Parser[char] {
- def apply(in: int): Parser[char]#Result =
- if (in < s.length()) Some(Pair(s charAt in, in + 1)) else None
- }
- }
-
- def showType(e: Term): String =
- try {
- typeInfer.typeOf(predefined.env, e).toString()
- }
- catch {
- case typeInfer.TypeError(msg) =>
- "\n cannot type: " + typeInfer.current +
- "\n reason: " + msg
- }
-
- def main(args: Array[String]) {
- Console.println(
- if (args.length == 1) {
- val ps = new ParseString(args(0)) with MiniMLParsers
- ps.all(ps.input) match {
- case Some(Pair(term, _)) =>
- "" + term + ": " + showType(term)
- case None =>
- "syntax error"
- }
- }
- else
- "usage: java examples.typeinf <expr-string>"
- )
- }
-
-}
diff --git a/docs/examples/xml/phonebook/embeddedBook.scala b/docs/examples/xml/phonebook/embeddedBook.scala
deleted file mode 100644
index 3286485..0000000
--- a/docs/examples/xml/phonebook/embeddedBook.scala
+++ /dev/null
@@ -1,26 +0,0 @@
-/* examples/phonebook/embeddedBook.scala */
-package phonebook
-
-object embeddedBook {
-
- val company = <a href="http://acme.org">ACME</a>
- val first = "Burak"
- val last = "Emir"
- val location = "work"
-
- val embBook =
- <phonebook>
- <descr>
- This is the <b>phonebook</b> of the
- {company} corporation.
- </descr>
- <entry>
- <name>{ first+" "+last }</name>
- <phone where={ location }>+41 21 693 68 {val x = 60 + 7; x}</phone>
- </entry>
- </phonebook>;
-
- def main(args: Array[String]) =
- Console.println( embBook )
-
-}
diff --git a/docs/examples/xml/phonebook/phonebook.scala b/docs/examples/xml/phonebook/phonebook.scala
deleted file mode 100644
index 3c0dfbd..0000000
--- a/docs/examples/xml/phonebook/phonebook.scala
+++ /dev/null
@@ -1,38 +0,0 @@
-package phonebook ;
-
-object phonebook {
-
- val labPhoneBook =
- <phonebook>
- <descr>
- This is the <b>phonebook</b> of the
- <a href="http://acme.org">ACME</a> corporation.
- </descr>
- <entry>
- <name>Burak</name>
- <phone where="work"> +41 21 693 68 67</phone>
- <phone where="mobile">+41 79 602 23 23</phone>
- </entry>
- </phonebook>;
-
- Console.println( labPhoneBook );
-
- // XML is immutable - adding an element
-
- import scala.xml.{ Node, Text };
-
- def add( phonebook:Node, newEntry:Node ):Node = phonebook match {
- case <phonebook>{ ch @ _* }</phonebook> =>
- <phonebook>{ ch }{ newEntry }</phonebook>
- }
-
- val pb2 =
- add( labPhoneBook,
- <entry>
- <name>Kim</name>
- <phone where="work"> +41 21 111 11 11</phone>
- </entry> );
-
- def main(args:Array[String]) = Console.println( pb2 );
-
-}
diff --git a/docs/examples/xml/phonebook/phonebook1.scala b/docs/examples/xml/phonebook/phonebook1.scala
deleted file mode 100644
index 316c6c1..0000000
--- a/docs/examples/xml/phonebook/phonebook1.scala
+++ /dev/null
@@ -1,21 +0,0 @@
-/* examples/phonebook/phonebook1.scala */
-package phonebook
-
-object phonebook1 {
-
- val labPhoneBook =
- <phonebook>
- <descr>
- This is the <b>phonebook</b> of the
- <a href="http://acme.org">ACME</a> corporation.
- </descr>
- <entry>
- <name>Burak Emir</name>
- <phone where="work">+41 21 693 68 67</phone>
- </entry>
- </phonebook>;
-
- def main(args: Array[String]) =
- Console.println( labPhoneBook )
-
-}
diff --git a/docs/examples/xml/phonebook/phonebook2.scala b/docs/examples/xml/phonebook/phonebook2.scala
deleted file mode 100644
index 2a708da..0000000
--- a/docs/examples/xml/phonebook/phonebook2.scala
+++ /dev/null
@@ -1,25 +0,0 @@
-/* examples/xml/phonebook/phonebook2.scala */
-package phonebook;
-
-object phonebook2 {
-
- import scala.xml.Node
-
- /** adds an entry to a phonebook */
- def add( p: Node, newEntry: Node ): Node = p match {
-
- case <phonebook>{ ch @ _* }</phonebook> =>
-
- <phonebook>{ ch }{ newEntry }</phonebook>
- }
-
- val pb2 =
- add( phonebook1.labPhoneBook,
- <entry>
- <name>Kim</name>
- <phone where="work">+41 21 111 11 11</phone>
- </entry> );
-
- def main( args: Array[String] ) =
- Console.println( pb2 )
-}
diff --git a/docs/examples/xml/phonebook/phonebook3.scala b/docs/examples/xml/phonebook/phonebook3.scala
deleted file mode 100644
index 12f2dea..0000000
--- a/docs/examples/xml/phonebook/phonebook3.scala
+++ /dev/null
@@ -1,81 +0,0 @@
-package phonebook;
-
-object phonebook3 {
-
- import scala.xml.{Elem, Node, Text} ;
- import scala.xml.PrettyPrinter ;
- import Node.NoAttributes ;
-
- /* this method "changes" (returns an updated copy) of the phonebook when the
- * entry for Name exists. If it has an attribute "where" whose value is equal to the
- * parameter Where, it is changed, otherwise, it is added.
- */
- def change ( phonebook:Node, Name:String, Where:String, newPhone:String ) = {
-
- /** this nested function walks through tree, and returns an updated copy of it */
- def copyOrChange ( ch: Iterator[Node] ) = {
-
- import xml.Utility.{trim,trimProper} //removes whitespace nodes, which are annoying in matches
-
- for( val c <- ch ) yield
- trimProper(c) match {
-
- // if the node is the particular entry we are looking for, return an updated copy
-
- case x @ <entry><name>{ Text(Name) }</name>{ ch1 @ _* }</entry> =>
-
- var updated = false;
- val ch2 = for(c <- ch1) yield c match { // does it have the phone number?
-
- case y @ <phone>{ _* }</phone> if y \ "@where" == Where =>
- updated = true
- <phone where={ Where }>{ newPhone }</phone>
-
- case y => y
-
- }
- if( !updated ) { // no, so we add as first entry
-
- <entry>
- <name>{ Name }</name>
- <phone where={ Where }>{ newPhone }</phone>
- { ch1 }
- </entry>
-
- } else { // yes, and we changed it as we should
-
- <entry>
- { ch2 }
- </entry>
-
- }
- // end case x @ <entry>...
-
- // other entries are copied without changing them
-
- case x =>
- x
-
- }
- } ; // for ... yield ... returns an Iterator[Node]
-
- // decompose phonebook, apply updates
- phonebook match {
- case <phonebook>{ ch @ _* }</phonebook> =>
- <phonebook>{ copyOrChange( ch.iterator ) }</phonebook>
- }
-
- }
-
- val pb2 =
- change( phonebook1.labPhoneBook, "John", "work", "+41 55 555 55 55" );
-
- val pp = new PrettyPrinter( 80, 5 );
-
- def main( args:Array[String] ) = {
- Console.println("---before---");
- Console.println( pp.format( phonebook1.labPhoneBook ));
- Console.println("---after---");
- Console.println( pp.format( pb2 ));
- }
-}
diff --git a/docs/examples/xml/phonebook/verboseBook.scala b/docs/examples/xml/phonebook/verboseBook.scala
deleted file mode 100644
index 2dcb155..0000000
--- a/docs/examples/xml/phonebook/verboseBook.scala
+++ /dev/null
@@ -1,24 +0,0 @@
-/* examples/xml/phonebook/verboseBook.scala */
-package phonebook
-
-object verboseBook {
-
- import scala.xml.{ UnprefixedAttribute, Elem, Node, Null, Text, TopScope }
-
- val pbookVerbose =
- Elem(null, "phonebook", Null, TopScope,
- Elem(null, "descr", Null, TopScope,
- Text("This is a "),
- Elem(null, "b", Null, TopScope, Text("sample")),
- Text("description")
- ),
- Elem(null, "entry", Null, TopScope,
- Elem(null, "name", Null, TopScope, Text("Burak Emir")),
- Elem(null, "phone", new UnprefixedAttribute("where","work", Null), TopScope,
- Text("+41 21 693 68 67"))
- )
- )
-
- def main(args: Array[String]) =
- Console.println( pbookVerbose )
-}
diff --git a/docs/licenses/apache_android.txt b/docs/licenses/apache_android.txt
deleted file mode 100644
index 00f3396..0000000
--- a/docs/licenses/apache_android.txt
+++ /dev/null
@@ -1,16 +0,0 @@
-Scala includes various example files for Android:
-
-Copyright (c) 2005-2009, The Android Open Source Project
-Copyright (c) 2007, Steven Osborn
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
\ No newline at end of file
diff --git a/docs/licenses/apache_ant.txt b/docs/licenses/apache_ant.txt
deleted file mode 100644
index ac637d7..0000000
--- a/docs/licenses/apache_ant.txt
+++ /dev/null
@@ -1,16 +0,0 @@
-Scala includes Ant as a library needed for build with sbt
-
-Copyright © 1999-2010, The Apache Software Foundation.
- http://ant.apache.org/
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
diff --git a/docs/licenses/apache_jansi.txt b/docs/licenses/apache_jansi.txt
new file mode 100644
index 0000000..067a5a6
--- /dev/null
+++ b/docs/licenses/apache_jansi.txt
@@ -0,0 +1,203 @@
+Scala includes the JLine library, which includes the Jansi library.
+
+ Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
+
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+ 1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+ 2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+ 3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+ 4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+ 6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+ 7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+ 8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+ 9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+ END OF TERMS AND CONDITIONS
+
+ APPENDIX: How to apply the Apache License to your work.
+
+ To apply the Apache License to your work, attach the following
+ boilerplate notice, with the fields enclosed by brackets "[]"
+ replaced with your own identifying information. (Don't include
+ the brackets!) The text should be enclosed in the appropriate
+ comment syntax for the file format. We also recommend that a
+ file or class name and description of purpose be included on the
+ same "printed page" as the copyright notice for easier
+ identification within third-party archives.
+
+ Copyright [yyyy] [name of copyright owner]
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
diff --git a/docs/licenses/bsd_asm.txt b/docs/licenses/bsd_asm.txt
new file mode 100644
index 0000000..8613cd3
--- /dev/null
+++ b/docs/licenses/bsd_asm.txt
@@ -0,0 +1,31 @@
+Scala includes the ASM library.
+
+Copyright (c) 2000-2011 INRIA, France Telecom
+All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions
+are met:
+
+1. Redistributions of source code must retain the above copyright
+ notice, this list of conditions and the following disclaimer.
+
+2. Redistributions in binary form must reproduce the above copyright
+ notice, this list of conditions and the following disclaimer in the
+ documentation and/or other materials provided with the distribution.
+
+3. Neither the name of the copyright holders nor the names of its
+ contributors may be used to endorse or promote products derived from
+ this software without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
+LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
+THE POSSIBILITY OF SUCH DAMAGE.
\ No newline at end of file
diff --git a/docs/licenses/bsd_jline.txt b/docs/licenses/bsd_jline.txt
index 4ac4a37..3e5dba7 100644
--- a/docs/licenses/bsd_jline.txt
+++ b/docs/licenses/bsd_jline.txt
@@ -1,4 +1,4 @@
-Scala includes the jLine library:
+Scala includes the JLine library:
Copyright (c) 2002-2006, Marc Prud'hommeaux <mwp1 at cornell.edu>
All rights reserved.
diff --git a/docs/svn-to-sha1-map.txt b/docs/svn-to-sha1-map.txt
new file mode 100644
index 0000000..e192ac2
--- /dev/null
+++ b/docs/svn-to-sha1-map.txt
@@ -0,0 +1,14907 @@
+r216 e566ca34a3
+r217 33d6e170c9
+r218 4177daab2f
+r219 073294fbba
+r220 23d2bfbeb2
+r221 fd3f10df3c
+r222 21b147f7ca
+r223 51f6f363f0
+r224 0ef73bcf85
+r225 413b4edac3
+r226 71da7497b0
+r227 8001992607
+r228 faca8cb93f
+r229 4bb5759c29
+r230 bf9a101fb5
+r231 7abd4f84e2
+r232 04e7b8d053
+r233 672f970631
+r234 48e7aa8296
+r235 934da996ba
+r236 1b970f6fb4
+r237 1af5e67569
+r238 20f7e75afe
+r239 19470c9c41
+r240 5253396420
+r241 a1f09f8344
+r242 9ed4c257ab
+r243 1726bf7568
+r244 df427a25f1
+r245 bd7715e8dd
+r246 85c1f5afc3
+r247 ae4ce8d3c4
+r248 e0b8cd4966
+r249 517c132d72
+r250 d95d9cb156
+r251 f7f0da0fd1
+r252 11450dbc4f
+r253 6cb8bc84c9
+r254 8ab0ae13ce
+r255 5f531ab2e6
+r256 66ca81e66f
+r257 ceb16f7fea
+r258 7d1e4e92ca
+r259 ee984f7f47
+r260 6ea3ab4665
+r261 325edcd705
+r262 b63203c5b5
+r263 b8509a08f1
+r264 affdf7ee9c
+r265 ee273f5e73
+r266 eac21ad76d
+r267 de0a87e4a0
+r268 77ef6d4279
+r269 bf1f3aa029
+r270 7e7310ca12
+r271 942bac76c3
+r272 7a1fdc1453
+r273 e5c5cc620d
+r274 2fc8c8dc20
+r275 17bd66e3cf
+r276 f9517d6754
+r277 2b83d80577
+r278 0aa5a94bb6
+r279 7394e750cb
+r280 af8181e6b3
+r281 168da72d52
+r282 1b4875af97
+r283 dc22952ef4
+r284 2c49076945
+r285 6f6ef48204
+r286 68fabb7cc6
+r287 685a3ccd27
+r288 55c2ee3d49
+r289 ee9191bbf0
+r290 c00e8c765a
+r291 bde5d21715
+r292 0b68bd30b1
+r293 5d47aa2f77
+r294 b81d58dbc3
+r295 6b2fcfb659
+r296 89161f84fd
+r297 4c58302ea3
+r298 3efc6463c1
+r299 0d9486124a
+r300 3c1b85f91e
+r301 b5a8069651
+r302 83e1bd9b50
+r303 ddfa3561ca
+r304 d316462efa
+r305 9454221e70
+r306 647a30b9bf
+r307 6a4a9f9e93
+r308 e1fb3fb655
+r309
+r310 6749e5dd65
+r311 fe773c088d
+r312 6290560c08
+r313 1be73bee0e
+r314 e8b06e776b
+r315 4cd3c13b5d
+r316 99565a58dd
+r317 6f00b2f558
+r318 7d4e995581
+r319 1d2a33a1c2
+r320 fe9d7cc9ec
+r321 de976b2afa
+r322 95a5ffa201
+r323 9700a2088f
+r324 9427388e5a
+r325 e5583b7c11
+r326 fc497536ed
+r327 91c9a415e3
+r328 1fb1bf6d27
+r329 208bd5ee9e
+r330 d382fa3fa4
+r331 f119eaa798
+r332 7732779b26
+r333 20813b9555
+r334 c92e218894
+r335 e9e6e2ee0d
+r336 6bd6a0b409
+r337 59ed04e4f2
+r338 f5c16175c8
+r339 1956c53007
+r340 2afca5bd49
+r341 bfe8564103
+r342 013290fbda
+r343 65b8549607
+r344 c5ffb069fa
+r345 4a44cf6531
+r346 3d7e4fa518
+r347 a005880219
+r348 8503fe1a88
+r349 f00a69459a
+r350 dc5897f483
+r351 efa9d346d4
+r352 c371d05bd6
+r353 37666f9377
+r354 675b4262a2
+r355 2522593cfd
+r356 bcc3899778
+r357 a16dd265fd
+r358 65f127aaa2
+r359 0c3c430ecd
+r360 ca3af56fc2
+r361 bb0968e953
+r362 aa82c43f10
+r363 d0e2fb4b34
+r364 67b84045bf
+r365 3ef8b49d5e
+r366 b2410c68a9
+r367 efeadee8bb
+r368 2666bf0515
+r369 6a6d53bb15
+r370 a275c7c9fa
+r371 0c12c1623d
+r372 de6d589d7f
+r373 0e938416e8
+r374 b1276c1eca
+r375 a6e2444478
+r376 4d43c508f3
+r377 be7a96e1b5
+r378 14bc0c4f0d
+r379 aac15cfe1c
+r380 2531b91feb
+r381 ce0cb58ff3
+r382 1fb5a195b5
+r383 d5da7d9aa5
+r384 b5308c3f44
+r385 3dd969e98d
+r386 c3ad24e873
+r387 7dcbfdfdf1
+r388 9447d90bd7
+r389 ace3aba1de
+r390 2ad302331f
+r391 3fc1840211
+r392 c773be407e
+r393 0318d97b8c
+r394 66046dcef9
+r395 32920909df
+r396 9046cab361
+r397 b1f3fad210
+r398 83ae0d91c2
+r399 aecf76e848
+r400 6cdcb93df4
+r401 7a553aba4c
+r402 453461f798
+r403 86beea21be
+r404 0f07bf588c
+r405 eab692bf1f
+r406 e2a4a9dff4
+r407 78d30c2813
+r408 28eec741b3
+r409 be91eb10bc
+r410 b6c9458943
+r411 7ba32e7eef
+r412 ff7d11e0c1
+r413 0bc479de95
+r414 d7bb5a3038
+r415 974cf85afb
+r416 9ab44e5b8c
+r417 b094b0ef63
+r418 fafd175ca9
+r419 7254471b0b
+r420 2142b86ece
+r421 2dc20eb9c8
+r422 ad60428ffd
+r423 8246e726ae
+r424 00e8b20d83
+r425 b078b78ebd
+r426 766aece314
+r427 6656a7bed7
+r428 32d7050253
+r429 e9314e4358
+r430 2301c181a8
+r431 1501b629e8
+r432 76466c44df
+r433 0f9346336d
+r434 9e6cc7fa40
+r435 d6cc02f92d
+r436 fa5c556780
+r437 38ec9ea7d1
+r438 6e1b224b20
+r439 1faf3fbd77
+r440 8e1ff11b1c
+r441 3d3fae031a
+r442 a3cceb2ddf
+r443 b8ae1b5fd8
+r444 7c50acd7bc
+r445 66ce41098c
+r446 4147525455
+r447 ab6e0b35fe
+r448 b6568d57a4
+r449
+r450 5d7eda1d9c
+r451 449b38c265
+r452 37acb0f1dd
+r453 8a4a9a9809
+r454 b4b5355b6b
+r455 23f2da8615
+r456 68e734d000
+r457 1a44c882dc
+r458 f4a43858e8
+r459 188dd82f86
+r460 cc86341145
+r461 2c9a95dbe5
+r462 70dfa262b3
+r463 684a5d4d0b
+r464 c9d34467cd
+r465 82cd3d4d23
+r466 7b6238d54b
+r467 16e81343ba
+r468 6f805930c9
+r469 1c07a3cfef
+r470 cee76a7329
+r471 341cb486e8
+r472 4244c4f10a
+r473 9bf8922877
+r474 b4d9609411
+r475 0eb7d01302
+r476 579d815bfa
+r477 9a4819a033
+r478 9d8a37ee5c
+r479 bca74f068d
+r480 4b69de24fd
+r481 3b822a8f07
+r482 e4adf08ce2
+r483 1cbb1ee373
+r484 8d16dc3a98
+r485 78b2ff42fc
+r486 22c472cff5
+r487 6dfc1be517
+r488 818eca7c39
+r489 acd1b06b4e
+r490 19458ed8e2
+r491 bbea05c3f7
+r492 31b5dceeb1
+r493 3307717e4e
+r494 ed5dbe8475
+r495 60218d9ef8
+r496 ed86cb4106
+r497 955981999c
+r498 0cc202c85b
+r499 db1ad8a9e0
+r500 820c818d4e
+r501 611eb370fa
+r502 c6ce203b92
+r503 890f4fc1b3
+r504 374fe54282
+r505 58cad3d1ce
+r506 04577625cb
+r507 0d66e06ff4
+r508 dd1df4c41e
+r509 7452fd4769
+r510 b68d6aba80
+r511 73cf6d4754
+r512 4afc1d1c27
+r513 c995209f7e
+r514 6440a65cbe
+r515 f449cd95e9
+r516 3be5b4361a
+r517 644e5bdf87
+r518 1bb9e69a30
+r519 6a7bec093b
+r520 5e7f6d941d
+r521 0947087d29
+r522 940c7755d3
+r523 e6ebbe6ab4
+r524 746cf42fd3
+r525 6326a9e379
+r526 dab45b752f
+r527 d891fd9474
+r528 394aef1a7f
+r529 5f8e5c235e
+r530 b80dcfe38a
+r531 1c311b1828
+r532 54952ba17e
+r533 787d4bb9db
+r534 e2a09f258a
+r535 0aa9fd3d2e
+r536 d4992a09ec
+r537 61150fa8ae
+r538 1a2828c106
+r539 4d1b718b13
+r540 8b716cefd3
+r541 7722c1b044
+r542 26caccbea4
+r543 51627d9425
+r544 e0cfd0011b
+r545 856b1b4355
+r546 bbd53b7ccb
+r547 9cfe96647b
+r548 e1dcdf1a7b
+r549 b5a3e6b734
+r550 e189c7bacc
+r551 5c24c95533
+r552 2ed373a5c3
+r553 5ee5a01aad
+r554 277c7242d0
+r555 c33226ad82
+r556 85c73ba918
+r557 efd06d74f1
+r558 9ba1d49533
+r559 379a56669b
+r560 19da03df20
+r561 a8f9240799
+r562 5c510296ee
+r563 5092735baa
+r564 7104fcb442
+r565 15aeb5fd48
+r566 d8284d61f2
+r567 f115eda9c9
+r568 d7c9373e85
+r569 fee56a7201
+r570 d91518092e
+r571 868b0f94f0
+r572 fcae0e84b5
+r573 3ceaf4b02d
+r574 a3d34c650a
+r575 bfcbdb5f90
+r576 e360fb4095
+r577 6ffa9f1636
+r578 5e49a57244
+r579 7acb9ba822
+r580 a7846c5f8e
+r581 2ff2f6e029
+r582 00699895d9
+r583 fae0e93a6a
+r584 a715104520
+r585 eb4833b12e
+r586 0c9d5eb8c3
+r587 5557a63792
+r588 009ca753a5
+r589 1bcbe1244a
+r590 53e9038cd0
+r591 6bb5add14b
+r592 44eba4f61b
+r593 03a24d7345
+r594 cee6c10b74
+r595 cc931f87ac
+r596 8bfdf09fe8
+r597 6b71c4960a
+r598 8f51cb5a38
+r599 0aa5643808
+r600 e38818336a
+r601 793f61a0a2
+r602 dd65ae6e73
+r603 54f148e1ee
+r604 1e7ea9f9b7
+r605 d872259f55
+r606 2c230e23ac
+r607 46b0b6bad4
+r608 79c7c73561
+r609 217d42413b
+r610 4503263fda
+r611 e51cf921ec
+r612 c8bea29c67
+r613 64861914be
+r614 bcad96f5ad
+r615 f9534fc128
+r616 09402976e7
+r617 8ed70b27d7
+r618 e403c76450
+r619 272e832a97
+r620 d28eae9101
+r621 4d64e59a55
+r622 660d5315db
+r623 1e6f940bd9
+r624 46034e790c
+r625 45d391977c
+r626 8bde4b7721
+r627 9a6a334729
+r628 609593beeb
+r629 d5d9d56f49
+r630 6208a4f530
+r631 faf079fc79
+r632 84de17250f
+r633 62df669297
+r634 4d51076c62
+r635 17a647a740
+r636 d20bbb416e
+r637 bd60b6057c
+r638 2b05eb0cc4
+r639 c3feacc621
+r640 63815a24d6
+r641 2a5b63b2a0
+r642 e644be0706
+r643 fd4d0f8fe9
+r644 a5aa3c8f66
+r645 28cbd95ca3
+r646 3599b6d086
+r647 e1cdc3fe30
+r648 f7308846bb
+r649 791909eab2
+r650 3ab93af939
+r651 336eabe34a
+r652 544dd4f57e
+r653 8e76d1283e
+r654 c397f80f8b
+r655 06238329c5
+r656 3f3e6accb7
+r657 4d1dfaffed
+r658 fa72586d0b
+r659 e0d3451834
+r660 21f24de326
+r661 81a8fae3a6
+r662 a9e68909d6
+r663 d02f69f602
+r664 a5d85a9e96
+r665 7871c81399
+r666 42fe3b7da7
+r667 49a63cbfb4
+r668 f3aeae44c2
+r669 0478f7197f
+r670 88143accb0
+r671 014a47d565
+r672 e8dc487e70
+r673 99becce923
+r674 3db933967d
+r675 7099e17fb2
+r676 f6ca275318
+r677 723503c1c8
+r678 6f062616e2
+r679 51b150938e
+r680 ce9a82d638
+r681 1b110634b1
+r682 2d62f04fb4
+r683 89fb9fd615
+r684 bfe4d0dff9
+r685 ae221d1e85
+r686 dfb6cb93cc
+r687 932bc98741
+r688 b9bd1fbde7
+r689 bd6ee62da0
+r690 5571c34f79
+r691 bbb471bf1a
+r692 52874b143e
+r693 2b22c5eb6a
+r694 c7d24b1e47
+r695 23d5c3f804
+r696 135fc297cb
+r697 5eecad0f93
+r698 ceda0125a9
+r699 92e745e537
+r700 bd6c059264
+r701 47fbf9d2e9
+r702 b3896b2e39
+r703 2a6f701d05
+r704 a575f59c3b
+r705 16b7be07c6
+r706 4d8caab2e6
+r707 de98513298
+r708 9de54c7671
+r709 fdd7ca356b
+r710 d5f8a13cd7
+r711 b9ff893fdf
+r712 7f08642a0a
+r713 c55bc91171
+r714 ca14451a52
+r715 74be7e83e5
+r716 974fe6069d
+r717 6be0c19d9e
+r718 2c2c1a4e17
+r719 b0c97ff489
+r720 e15b1ae55a
+r721 c7b62d7913
+r722 9b2e927cd8
+r723 4686a2d6f6
+r724 bdc7125ab5
+r725 89cec93a5d
+r726 4071a56256
+r727 3096d1674f
+r728 b4cfef2557
+r729 9c66a1e5b6
+r730 7da0997328
+r731 911a4a65f1
+r732 969e41ca39
+r733 2300aac76a
+r734 f7f1500768
+r735 f5f7f30a43
+r736 7b6a46d75a
+r737 3efb3a279e
+r738 259221ca99
+r739 82bedc921b
+r740 fb71c50b8f
+r741 8f1264daa9
+r742 7eda0b9cfc
+r743 a766b31106
+r744 22d0a607cd
+r745 2cc25288dd
+r746 d62458f59a
+r747 703ab37f59
+r748 5e26ba92f6
+r749 fa4d10ee2b
+r750 be99001f72
+r751 ace7fee429
+r752 15321b1641
+r753 edce97ab20
+r754 60fe35a72b
+r755 639e009fd9
+r756 47843c835d
+r757 c76223a9a2
+r758 ba71b42902
+r759 9bad87da03
+r760 5745978304
+r761 cb5e82737f
+r762 3fb5e2ade5
+r763 336e1acd4f
+r764 416062aa91
+r765 6af6dae0df
+r766 3a593c580c
+r767 c481e95b2f
+r768 be858b38fe
+r769 6a6b914be9
+r770 8290fa5c45
+r771 15e29208a4
+r772 469714eafe
+r773 528c521f9d
+r774 d7d26ea960
+r775 1fbc4f6561
+r776 a55f14b464
+r777 34cdd069a1
+r778 c055dc83e3
+r779 d8aceb9d8d
+r780 24259833eb
+r781 2fc1837fcc
+r782 39f22e7351
+r783 62fc094c20
+r784 914d29f889
+r785 da93e36d8f
+r786 5c348d28da
+r787 9dc6d5fd22
+r788 ada273a1ca
+r789 e06aeaebbd
+r790 329c70cae6
+r791 f69094bc71
+r792 ca1cba5b06
+r793 1ab2519887
+r794 dfcf91626f
+r795 bacea50d7a
+r796 43a8b154ed
+r797 84af8bf38d
+r798 a00409bd98
+r799 64621b6363
+r800 4269eb620a
+r801 ee7107b4ab
+r802 b23289c5da
+r803 52e2b941b1
+r804 46517a47bc
+r805 05deaeec74
+r806 8cfce062de
+r807 aa579de50f
+r808 8044852c6f
+r809 6533142379
+r810 be4f8d7916
+r811 97e75ddc91
+r812 9c9dfb24a4
+r813 ba5d59e9f6
+r814 44ca12f55b
+r815 0494d60bfd
+r816 da838048c9
+r817 152934349f
+r818 a495f88f49
+r819 c4335d55bc
+r820 85d4773be7
+r821 1e180e451c
+r822 5021943900
+r823 099c17cf13
+r824 2fd2dfeeb3
+r825 563e00ffc7
+r826 6734a441e8
+r827 1b049a090b
+r828 c75bafbbbc
+r829 537442e3dc
+r830 ead39262eb
+r831 ecc6226a4d
+r832 d647b1e479
+r833 4a809abfa5
+r834 f770cdac70
+r835 b74ad75078
+r836 7dc050f17d
+r837 11622662c8
+r838 5d1b310ad7
+r839 e99f07aac3
+r840 23f124d305
+r841 0e1e141430
+r842 c7392f4c45
+r843 82f0cb3c2c
+r844 5f6f1f7aa7
+r845 0df5ec7521
+r846 1583a2afb2
+r847 e7609c9d0e
+r848 88cb90bf6d
+r849 8edcd12a55
+r850 cefb352f0f
+r851 7454e3a009
+r852 072b5480f9
+r853 ec5989695e
+r854 9ee7224289
+r855 184e92e447
+r856 d82f770754
+r857 70ae99e7ea
+r858 f29ec2158b
+r859 3102d7d40f
+r860 9753961477
+r861 d8d2c7f502
+r862 c2c93468eb
+r863 0720197b32
+r864 cc296d5b5c
+r865 b8f86bb95c
+r866 8b6079a283
+r867 ee836661ce
+r868 1f97bdd390
+r869 a424426552
+r870 9114fea991
+r871 68c5a76acb
+r872 ce103c2f95
+r873 6b4b085c7c
+r874 efd426fe23
+r875 a8722061ee
+r876 6a0cdb5821
+r877 4826669acc
+r878 1066a7cf01
+r879 4827da4894
+r880 b80391a805
+r881 f1a6676465
+r882 b95c08c879
+r883 0145ce34b5
+r884 06a671299a
+r885 c7f30e40c0
+r886 5a0ab443e5
+r887 0e53b38aed
+r888 ecd251a20e
+r889 f03a35b6c3
+r890 1a094d97cb
+r891 ff386d78cf
+r892 2cc211bc73
+r893 ec3b6d9bbc
+r894 ad92319573
+r895 478c334b56
+r896 5bcdedd615
+r897 a461a7982b
+r898 f0e3edad2c
+r899 dc0594eee9
+r900 ba84abf44d
+r901 b814f5d2ce
+r902 3084ef6b79
+r903 26388aa8b6
+r904 d5f5419249
+r905 a6389e9170
+r906 a0361ef7c1
+r907 6958133baa
+r908 ddf59687e3
+r909 55424e716c
+r910 ee7a23f3fb
+r911 05d7f7c3b5
+r912 94cc5fb398
+r913 bf8fd4c5b3
+r914 00abd39f96
+r915 e2a375174c
+r916 8e9836f531
+r917 38b5376903
+r918 68f54db833
+r919 335a4e9588
+r920 3ef2334f34
+r921 a4392e6d75
+r922 fe7e260075
+r923 1481659b35
+r924 c5f1b804dd
+r925 0d359a148e
+r926 3c256cfb74
+r927 ad4c87c5af
+r928 4912b7dd53
+r929 1554123d30
+r930 48dbc5e78c
+r931 4b1f4936e2
+r932 55ebf641a9
+r933 006b8ed3a1
+r934 5615207c16
+r935 9d78319bec
+r936 aa4085f651
+r937 35173713d1
+r938 1d24dc9093
+r939 d2df7c9c9a
+r940 b7f7cddf7c
+r941 d58dc0f186
+r942 3edab36b89
+r943 a72fdbec0d
+r944 e7e6cc4243
+r945 e5770ffd30
+r946 4bd86410e4
+r947 8eead5dedd
+r948 6ad472567e
+r949 639f108441
+r950 fedbced652
+r951 2aec262f78
+r952 1ec3e2c664
+r953 981a0d142c
+r954 bf64b80e8e
+r955 df8999d77a
+r956 57830a98fc
+r957 76f378175a
+r958 dd34727fc7
+r959 a9d2d11892
+r960 d4555e92d1
+r961 933de9aa03
+r962 04e4c7ee18
+r963 c3a8d9f143
+r964 b5f8932a9b
+r965 62656923de
+r966 428dce2175
+r967 720e381fd8
+r968 32d99afd50
+r969 4bcea1cf5c
+r970 209dd1ab44
+r971 05350a4a9d
+r972 2f2e78b7c1
+r973 1203341cb2
+r974 916bc3b9cd
+r975 3f3eab9278
+r976 796f281527
+r977 c2b559a9b2
+r978 22e7c20e90
+r979 af52fe5e14
+r980 4e426a6298
+r981 4df9f3a89b
+r982 09ad15e15a
+r983 808974e349
+r984 0e5eaf6fbd
+r985 eca1e7ffa8
+r986 6139351009
+r987 bdf7315e7f
+r988 37d9d1b979
+r989 7a4d11c997
+r990 3b96193f16
+r991 7c77d7dcf6
+r992 6cef26d980
+r993 8b54bfd4f6
+r994 c9f7644026
+r995 c64fa43afa
+r996 87d3cc2997
+r997 dbda2fc17d
+r998 c637a7f0de
+r999 2afcc06484
+r1000 0ef074e5fb
+r1001 f01c39c755
+r1002 bc36095d0e
+r1003 77bbd22d07
+r1004 cda6f17ef0
+r1005 58ed80c61d
+r1006 319090d57b
+r1007 ca9f4fbb7b
+r1008 6802b7f420
+r1009 47326f67ee
+r1010 8e54f08fa4
+r1011 195efaee57
+r1012 a943d3cf95
+r1013 1935d7178d
+r1014 e96d1be7b6
+r1015 e31cc564d5
+r1016 3ad0a509fc
+r1017 709b56fe8a
+r1018 c66ad962e6
+r1019 becb3c22d6
+r1020 1805e699a0
+r1021 ae9eeb9372
+r1022 e90fe22dc3
+r1023 05b3783bba
+r1024 7477cf8c1c
+r1025 b5b28969c5
+r1026 be547c5450
+r1027 6391473b0d
+r1028 697691c3b3
+r1029 6f65660583
+r1030 c0a66221a6
+r1031 1be5d460df
+r1032 8b025da064
+r1033 3279825ba3
+r1034 13885930be
+r1035 42ebd9cb4c
+r1036 f56a073205
+r1037 177dba42d5
+r1038 98fbeebaa5
+r1039 be1376dcac
+r1040 57b45faedf
+r1041 28db3bba9b
+r1042 da378d9a6d
+r1043 40eddc459e
+r1044 b82944e86b
+r1045 b3ad694a43
+r1046 36fed7ddbb
+r1047 308cd9b2f6
+r1048 bb98463dc1
+r1049 1277a5e94e
+r1050 db2914e723
+r1051 81dbbfa8d6
+r1052 280d025c7e
+r1053 9aaa79cdba
+r1054 0a0595a1c7
+r1055 08ba2872c4
+r1056 8ddba4dded
+r1057 e00deae3e5
+r1058 a5fdf3ec18
+r1059 316f425492
+r1060 7ccd1ed473
+r1061 b0b2440892
+r1062 0c5b3ad66e
+r1063 8f1ab98b77
+r1064 d4945a881b
+r1065 086e26c6bb
+r1066 14143d5b3e
+r1067 0715852a2e
+r1068 71dba047af
+r1069 52afd6d1da
+r1070 9efa993106
+r1071 9500f0c78c
+r1072 85a93fa145
+r1073 5a64e1706c
+r1074 5f77ce3a39
+r1075 30309b2ba2
+r1076 e9c280e68e
+r1077 323f6c8961
+r1078 5df0cb2c74
+r1079 511713e0f4
+r1080 c1bcad868c
+r1081 bb9cfcedf1
+r1082 7afa1692c9
+r1083 a56f482825
+r1084 336bb52e43
+r1085 7c0c7a1f49
+r1086 def6806d93
+r1087 9b09c3e8d9
+r1088 a146e0762d
+r1089 016c1d51aa
+r1090 1651493c7e
+r1091 74d350a2ba
+r1092 e570d189e0
+r1093 4ff4623f2e
+r1094 22f3db43a7
+r1095 6d4a913e0f
+r1096 4c8016c62b
+r1097 a6a3c78743
+r1098 53efe4c369
+r1099 b08af12a36
+r1100 aaf811cc09
+r1101 34c22f876f
+r1102 09797356a0
+r1103 640680faba
+r1104 b68cc17788
+r1105 d75d9c0d07
+r1106 be905bb7cb
+r1107 e52bd69509
+r1108 673eec6972
+r1109 ac54718edb
+r1110 7dc9bd0f1c
+r1111 4fdf2ee3ca
+r1112 63c9056e69
+r1113 fc4121d4cc
+r1114 71557bc2da
+r1115 c5d9799308
+r1116 69d94c439c
+r1117 d73289451b
+r1118 e39c6c0e62
+r1119 056a15a7e8
+r1120 60ec6920d9
+r1121 40e05d7679
+r1122 115b836500
+r1123 6b56b4b590
+r1124 59f320de1d
+r1125 b7378219e2
+r1126 ed86a8f6b3
+r1127 9877ad4b2c
+r1128 ef53216099
+r1129 011db07a5b
+r1130 20410a6d32
+r1131 5107585f17
+r1132 3765cc0c11
+r1133 2c9c03c154
+r1134 86e5e65288
+r1135 4d18dc9f7d
+r1136 c6a3849966
+r1137 4b03e0bc46
+r1138 30e3b26eee
+r1139 9b9660252e
+r1140 3016ae3a59
+r1141 90b4108f45
+r1142 c1c06996b1
+r1143 41e6216426
+r1144 5850ec1c8b
+r1145 2d01fbe908
+r1146 3a4c181e03
+r1147 8684be678d
+r1148 728ab1f19f
+r1149 be21ca1267
+r1150 03449ed20a
+r1151 8c0786c2f1
+r1152 97b01f58e9
+r1153 5a67796e02
+r1154 e41aa28a33
+r1155 8ccfe152e0
+r1156 9b9ce37073
+r1157 ea1bcd09ef
+r1158 f014b416aa
+r1159 5cbecc3b89
+r1160 863a5f0add
+r1161 bb672e7f07
+r1162 b25aa75bcb
+r1163 01b58f124d
+r1164 0502ed783e
+r1165 bc7faf76c7
+r1166 6fa7aaec76
+r1167 9c38388db3
+r1168 5c9050c6b5
+r1169 4997e2ee05
+r1170 a6a049520a
+r1171 a045106086
+r1172 8c0290713c
+r1173 d27a593dc1
+r1174 8f8b0efb39
+r1175 8a3fd993d8
+r1176 d809159c0f
+r1177 aa4c7a9ca2
+r1178 8dc5a3d907
+r1179 45be55750d
+r1180 57fdd41099
+r1181 e1d1b2d9b8
+r1182 cd257c40d1
+r1183 36a3ab03ef
+r1184 f0398407c7
+r1185 4019f76676
+r1186 e73d2649b1
+r1187 62ea09a680
+r1188 3db90fcd88
+r1189 154d2e27a1
+r1190 59f37b3fec
+r1191 d0da6a1fd0
+r1192 7e214f1547
+r1193 57e6418abf
+r1194 e07f1d2146
+r1195 044392dffe
+r1196 69e9c38b4f
+r1197 34ddfde6bd
+r1198 3efa683e96
+r1199 7cef1c5c75
+r1200 17ec08ec2f
+r1201 f1d35e8588
+r1202 7dc777e619
+r1203 912a3dcbea
+r1204 14cf526996
+r1205 c513a75367
+r1206 5a3dead77f
+r1207 a89d27dea0
+r1208 1732d4ec94
+r1209 7a1154824c
+r1210 6150a5b04e
+r1211 5ea9e55829
+r1212 dd32ecc6bd
+r1213 7c3f5b1123
+r1214 5893d5b55b
+r1215 6e5ee79778
+r1216 6bd09d1151
+r1217 9ed9970ee4
+r1218 cecd6833be
+r1219 fe0cd4ccf9
+r1220 50cfa1ce61
+r1221 32f01ba87a
+r1222 eda495f66d
+r1223 20e31b0d76
+r1224 ca32e4de8e
+r1225 b515ce4596
+r1226 de98c6562a
+r1227 32cef67832
+r1228 d24f7cda21
+r1229 abd8bae0a2
+r1230 d61afba2c5
+r1231 7cd27574a6
+r1232 562f1f62e3
+r1233 da74821b08
+r1234 183d279b2c
+r1235 9d675361a3
+r1236 a3654375f6
+r1237 101992b2d7
+r1238 1bbbb4c44f
+r1239 b56a6d699c
+r1240 5d58eac358
+r1241 ab3ad145b7
+r1242 43eaf5cb64
+r1243 f37b3d25f8
+r1244 5aefaf0289
+r1245 f91ce5d110
+r1246 71ef5f593c
+r1247 72e4181a21
+r1248 417db2c895
+r1249 c635da58a6
+r1250 f92d38c415
+r1251 df43fa3f64
+r1252 fb39bdf496
+r1253 396a60a22c
+r1254 2607570861
+r1255 4678d29bef
+r1256 c99331efe7
+r1257 cce804c34f
+r1258 5fdf691280
+r1259 73b8c5b039
+r1260 83b0601c69
+r1261 8dbaa5dfc0
+r1262 0386aaf8b9
+r1263 e7d85e45d6
+r1264 1cd03ac6fc
+r1265 0e43757819
+r1266 c4e1967d6c
+r1267 87210b8f10
+r1268 b7dd9ed9a2
+r1269 73e8019358
+r1270 4cdff61887
+r1271 eae9ff36d8
+r1272 1832dd1036
+r1273 8222cb50fb
+r1274 a6b1f467d9
+r1275 596976749d
+r1276 1fd3a2beb2
+r1277 16f6896733
+r1278 67a3af7360
+r1279 8497662b95
+r1280 b0a6581fe6
+r1281 a79210890a
+r1282 10842143de
+r1283 da5c361c7a
+r1284 8341c5c36e
+r1285 7b1200a4f4
+r1286 b227b27211
+r1287 d1d13f56f1
+r1288 83f7f3a758
+r1289 14b1a37788
+r1290 71cd6f0484
+r1291 1203bc5ed8
+r1292 261f125a04
+r1293 a6cccc16e3
+r1294 31e4cd7266
+r1295 062981ee6a
+r1296 ef8c355694
+r1297 048a89ecb9
+r1298 20aa76ad3a
+r1299 54886f8012
+r1300 8a94b49aab
+r1301 d50c39952e
+r1302 cc29221639
+r1303 eb893b68fa
+r1304 633f7316ae
+r1305 f0cf135c58
+r1306 20543e1606
+r1307 dc2dd01c6d
+r1308 e7e41951af
+r1309 b41bb0cfaa
+r1310 1d4933eab0
+r1311 b0a00e8558
+r1312 40fde0de91
+r1313 690d5b8ee1
+r1314 c68f3a0c00
+r1315 8224188368
+r1316 c9f081e345
+r1317 ba17480ab2
+r1318 5a25b6cfc1
+r1319 4f8b58c0ae
+r1320 1cfdffddd1
+r1321 8246648ff1
+r1322 c4e4065bfe
+r1323 6d891c5063
+r1324 c8f4c60282
+r1325 bc25825b42
+r1326 6dbb85aa03
+r1327 7590404f80
+r1328 ca6bfb0f68
+r1329 20b0001740
+r1330 f029f8f1ba
+r1331 904390c640
+r1332 24884fed2f
+r1333 079d579bfe
+r1334 508e62c581
+r1335 c6dafd9c9c
+r1336 c8c10445bf
+r1337 b04a4e1a21
+r1338 93c3bce1fa
+r1339 288ba9925e
+r1340 4c10e8515b
+r1341 80d3a625a7
+r1342 2b1afe846e
+r1343 d7b4fc3e69
+r1344 191ff46a27
+r1345 330db276e6
+r1346 33bb8c9531
+r1347 d36d1e0e4c
+r1348 2b4c3ffd81
+r1349 16058f3be3
+r1350 c040897705
+r1351 d19300beff
+r1352 2549ba1c55
+r1353 7ebf3abe37
+r1354 194a0cfcbf
+r1355 c6bfe08b2e
+r1356 03a8443eea
+r1357 2fd58d0430
+r1358 f69ebea872
+r1359 376b97626f
+r1360 a2bc132e04
+r1361 bbbecb8a61
+r1362 5d5d6d1763
+r1363 65981fc712
+r1364 3cda488d5a
+r1365 07493a2465
+r1366 4409444f49
+r1367 f10b65baef
+r1368 7a9bbd21f0
+r1369 1f02ae1368
+r1370 1ba1b5f0d6
+r1371 cef4819a20
+r1372 03552d1859
+r1373 9ed2cdba69
+r1374 06a5f2627e
+r1375 108c95de63
+r1376 41af0bf85b
+r1377 6ba693de02
+r1378 eb89bf0481
+r1379 10f1c3abfb
+r1380 9cf507cee3
+r1381 cc58ab3a7f
+r1382 e6d8b58497
+r1383 79b7bfc473
+r1384 325b15e759
+r1385 8ac36547ea
+r1386 3c896b4d73
+r1387 2d1a404d9a
+r1388 cdbd9750f4
+r1389 860d5686c0
+r1390 003528200c
+r1391 f548eaa205
+r1392 1fc44135a1
+r1393 3228df8eaf
+r1394 ec46a90f5c
+r1395 0c5225a4af
+r1396 fbb6cebf1d
+r1397 155189bcfa
+r1398 40bdb6bee6
+r1399 627a239ed9
+r1400 fc682c4406
+r1401 9769a4d244
+r1402 a290cbe0a1
+r1403 3cb7eb8fcd
+r1404 7d98030490
+r1405 69d4d1a118
+r1406 513514d066
+r1407 5a7daecfa2
+r1408 a69e4e5995
+r1409 dd1ebac2aa
+r1410 d8a3d0acaa
+r1411 d1746306e4
+r1412 7e8423ed47
+r1413 c52494a7e0
+r1414 af26097134
+r1415 638f6e8e07
+r1416 045f856bac
+r1417 4212f1b8c0
+r1418 5d956bda6b
+r1419 e2b146bbef
+r1420 d107eb40f1
+r1421 7e8533ec42
+r1422 97d8a84895
+r1423 dcf7886f78
+r1424 c85fd22375
+r1425 43c5c82eb9
+r1426 70d78cbfc8
+r1427 a9af998cdc
+r1428 bb6372b1c9
+r1429 129deca8fd
+r1430 139d9a3f87
+r1431 e9a7b01df1
+r1432 78c05c5995
+r1433 0fd76c61fd
+r1434 e60924767e
+r1435 52c7c80485
+r1436 13c7c02fbe
+r1437 151cca035b
+r1438 5600ac92e6
+r1439 3ea157ef07
+r1440 77e079a5e1
+r1441 8395399f4b
+r1442 026c357349
+r1443 636ded2b48
+r1444 9b9e16dd39
+r1445 86451906a5
+r1446 957c42dadf
+r1447 7d2cf8f17d
+r1448 8e10a1c93c
+r1449 86fa7e4536
+r1450 e3aa358f3c
+r1451 e46d223383
+r1452 c015c50dd2
+r1453 2be75c2c42
+r1454 271e180836
+r1455 731b678500
+r1456 3551973214
+r1457 c4b7a33f58
+r1458 0eec3d4087
+r1459 d14fd54e1b
+r1460 239d97850a
+r1461 0f69f89f76
+r1462 37846a9955
+r1463 e7b222d3fa
+r1464 e47e2de37e
+r1465 ba1b334040
+r1466 97ad2ad9fe
+r1467 a5764c4b45
+r1468 9207360ce2
+r1469 66807fa7e2
+r1470 a04578330d
+r1471 606b414ee1
+r1472 3029d91bf2
+r1473 499216593c
+r1474 874773fde6
+r1475 fcbd0e6400
+r1476 0aa1cfd521
+r1477 a6cc836dda
+r1478 bda0fb8228
+r1479 5ff566c77f
+r1480 19f1bccb17
+r1481 f42db99fd1
+r1482 ed300578cc
+r1483 9fae257875
+r1484 3c0b747908
+r1485 33fa93d62b
+r1486 8c482d22eb
+r1487 6e78409268
+r1488 01d4668fc8
+r1489 1b77651f90
+r1490 dc6ec50a08
+r1491 d8af1f7d53
+r1492 5b9b535641
+r1493 c0de8fd882
+r1494 b77cc54fa8
+r1495 8c65092474
+r1496 f7a0696413
+r1497 83737b19d1
+r1498 c8f0a7b6bd
+r1499 409a65421c
+r1500 ec5d770a7c
+r1501 7af685862e
+r1502 51a5386fa3
+r1503 810aefd0aa
+r1504 191c921e2e
+r1505 423ecdde9b
+r1506 d564a5473c
+r1507 156cb20b17
+r1508 d9bddc2fce
+r1509 9b05a390f1
+r1510 4d46f95c8e
+r1511 9638946662
+r1512 eb2f292cf9
+r1513 ff834c078d
+r1514 820f0b7226
+r1515 2b811578d4
+r1516 50fc9d84a0
+r1517 909b51e1da
+r1518 7a10026f29
+r1519 bb0022e6f6
+r1520 dc3fd344db
+r1521 419261187e
+r1522 066d81e7b6
+r1523 561f5efc25
+r1524 7f76c81a3e
+r1525 5d8b5d80bb
+r1526 b66879588f
+r1527 6282d0a5b0
+r1528 179b3f7892
+r1529 3ec4228daf
+r1530 d853b5d4d4
+r1531 807f9e4fb7
+r1532 4b3c76ddc4
+r1533 95ced83e5a
+r1534 49fae7d6e4
+r1535 0ff59624ef
+r1536 b870b4d3c9
+r1537 e2aba2c2ad
+r1538 26f6e93446
+r1539 154770da0b
+r1540 20918420a8
+r1541 14b3e240da
+r1542 fe809d3e73
+r1543 89f87cd020
+r1544 6f759ab9ca
+r1545 dd78e43d8f
+r1546 64d947d0e2
+r1547 7449ae53ec
+r1548 57a845d676
+r1549 615be6cee2
+r1550 f1182273dd
+r1551 d08dff3b18
+r1552 4500aea224
+r1553 d39fa1bb47
+r1554 3c30f6a1e6
+r1555 2d87b80967
+r1556 ae0b5fd298
+r1557 041659f9cc
+r1558 201f7eceea
+r1559 b6ad6a1bc9
+r1560 6ca43bcd97
+r1561 afabca6131
+r1562 fa256a1af8
+r1563 169b9a7ebe
+r1564 c12c3d3856
+r1565 dd6c158469
+r1566 82f735e5d5
+r1567 4f7353b447
+r1568 fba7c6afa2
+r1569 75d0b4a55f
+r1570 9baa6069ce
+r1571 f805b1683f
+r1572 2a1c7b3076
+r1573 84bdc646dd
+r1574 aa4eeeadec
+r1575 8de05b9366
+r1576 5718f84fdd
+r1577 8870ac88ff
+r1578 2052b68d97
+r1579 3338ca09b8
+r1580 4c20ac9650
+r1581 35342050b6
+r1582 84b6d995fd
+r1583 c6a4f7ec60
+r1584 65f0b02c89
+r1585 24c93d6416
+r1586 0e0aa61d20
+r1587 d49b034739
+r1588 f1d658c71e
+r1589 185bb897da
+r1590 ec98152cb2
+r1591 923c969e57
+r1592 0d9f013e96
+r1593 d113a4ca43
+r1594 8a265077a0
+r1595 f70f8574e4
+r1596 3e7a9d63ef
+r1597 51fb00e99f
+r1598 791345238b
+r1599 0dffd904b0
+r1600 041c512b32
+r1601 febb62721c
+r1602 ed28110153
+r1603 9d803bdc8a
+r1604 66077bf0c6
+r1605 8ee55188d8
+r1606 9c45685549
+r1607 55e40e1fdf
+r1608 a54029cbf9
+r1609 c17ef940fd
+r1610 10ce3e7c80
+r1611 dfc5cdeeb7
+r1612 d91729e50c
+r1613 497bfa3ea7
+r1614 1df7849ad7
+r1615 fc5e4bae74
+r1616 e2a6ec40b4
+r1617 cbf2cf2dca
+r1618 da160bfd73
+r1619 9b76838e75
+r1620 b70c49d2cd
+r1621 2de2bfc08e
+r1622 9cd9808b13
+r1623 3e764c63bd
+r1624 1ec30351bf
+r1625 2bb320eee9
+r1626 5dc0be3990
+r1627 fa73acda7c
+r1628 9e75e356d9
+r1629 094b1778ce
+r1630 5328404a62
+r1631 7191c8db6a
+r1632 dcd1796051
+r1633 a87e39db1f
+r1634 774bd9179e
+r1635 cd57b4ea44
+r1636 971ea727e7
+r1637 1726af0c47
+r1638 04e430874f
+r1639 30e1c738b9
+r1640 3242f383e0
+r1641 ecb8e40fb5
+r1642 7e20b9677d
+r1643 110211dfcc
+r1644 785aa26ab6
+r1645 67f1003ff6
+r1646 0f26e2f6ed
+r1647 08e04389de
+r1648 fbfe5ca0ba
+r1649 f7d10e2442
+r1650 339f51f314
+r1651 cc2a5f0399
+r1652 46781834bf
+r1653 f52ca3cc46
+r1654 1f454cd1cb
+r1655 2755e0794f
+r1656 96eb45c701
+r1657 e9b5eabdb5
+r1658 3ba71965ef
+r1659 0432dd179a
+r1660 607e9ec3f1
+r1661 9b3424de03
+r1662 53a5a8b254
+r1663 e006340aeb
+r1664 1a3084b209
+r1665 99b4e7dc35
+r1666 85ecdee41a
+r1667 79d406a6e9
+r1668 a9b7800360
+r1669 a887198e59
+r1670 3a8034f03a
+r1671 9cf2d7a56a
+r1672 fdf807a9fc
+r1673 67d1375a9b
+r1674 c40946712e
+r1675 a25300aed4
+r1676 a544dd4512
+r1677 767fba6cd1
+r1678 2e5258021f
+r1679 2c1ac0cc2a
+r1680 abee72fd55
+r1681 d5488e582a
+r1682 9c16bdcb8e
+r1683
+r1684 8490d8db14
+r1685 dff11cda58
+r1686 a6e102a5a1
+r1687 453e6a6db7
+r1688 d1a6514fb1
+r1689 be83a67054
+r1690 907dd4a4c7
+r1691 724ebb9791
+r1692 17e61a1faa
+r1693 afc36c22f4
+r1694 bbea46f3c3
+r1695 aba90f1964
+r1696 351971e83a
+r1697 82f6be34ee
+r1698 47a3af351e
+r1699 e1e0fa0c7b
+r1700 5fe89984bf
+r1701 a95be0a530
+r1702 b374c47114
+r1703 fe8f946e87
+r1704 1be7ad1e4d
+r1705 0c125b263d
+r1706 60205bccb6
+r1707 eb0304192b
+r1708 afdd2ae37b
+r1709 98f8b715ca
+r1710 3b888fff88
+r1711 0590ef07a2
+r1712 2543b1f362
+r1713 34d1e011d0
+r1714 93cb87cc1a
+r1715 8cf9f1c09c
+r1716 1e58e5873d
+r1717 fa86012919
+r1718 ca433daf1e
+r1719 ba5d4bc0ba
+r1720 9efff672d7
+r1721 39e04cd56d
+r1722 c5684228f0
+r1723 ff81c53907
+r1724 18c6124caa
+r1725 47ebc88769
+r1726 cc14c3fd9f
+r1727 9060ea504a
+r1728 6393b5b089
+r1729 f270a39315
+r1730 1e13dcd54b
+r1731 d625849898
+r1732 8422906b95
+r1733 71d2d7d978
+r1734 c3dd593e0d
+r1735 ca4f0683b1
+r1736 22601538e7
+r1737 7a7fd08c62
+r1738 e9b85b2806
+r1739 40c6285921
+r1740 6b900ad98d
+r1741 30ebdd6a33
+r1742 2f0b15f0e8
+r1743 36cde37b4a
+r1744 3e967ea8a6
+r1745 5a6459c987
+r1746 8f86ae48c3
+r1747 8f8507d071
+r1748 bf1f22df3f
+r1749 3b6074552a
+r1750 49f9d70b50
+r1751 5ec41c878f
+r1752 95fb97c1d2
+r1753 e231ecf228
+r1754 093023c653
+r1755 0e7948f042
+r1756 243531187d
+r1757 7a740005ac
+r1758 ff2fdd7bf9
+r1759 9739f7b7b1
+r1760 6f239df8e7
+r1761 256df827c2
+r1762 17e5c50d20
+r1763 71288c3d5e
+r1764 6502b10931
+r1765 da10615b3f
+r1766 4c58fa7b64
+r1767 95ed9ff085
+r1768 76da137f37
+r1769 b960d0b0e5
+r1770 f6dab0da8d
+r1771 63035c10a8
+r1772 a42f5acee1
+r1773 6191a1cea7
+r1774 b0cd565a51
+r1775 05e2b718cd
+r1776 f381bdba78
+r1777 2a4fe8cf43
+r1778 90c25ce9bb
+r1779 9aa73f7072
+r1780 d8beafde50
+r1781 813005cff3
+r1782 ea9add9f3d
+r1783 6e7a634da7
+r1784 7885501dc1
+r1785 bf54552f98
+r1786 3be1b3ad50
+r1787 480141c85a
+r1788 f6c0572ee8
+r1789 df1f2259cb
+r1790 d1f3dd8f8c
+r1791 0d71e3976b
+r1792 8f3e64bfcd
+r1793 8c06f155be
+r1794 96c18e0bf4
+r1795 390da638ae
+r1796 c48e8b69eb
+r1797 eb7da0de46
+r1798 4d69afd9eb
+r1799 fb814bd992
+r1800 7bfe816d3d
+r1801 4430371143
+r1802 29f2b9e84c
+r1803 4764fc5555
+r1804 d23d0a9c73
+r1805 53b2044393
+r1806 50db43a6e4
+r1807 c84e4be5ce
+r1808 1e46957a4f
+r1809 7d5d0d08ca
+r1810 44c0c70c5d
+r1811 b39d559fcf
+r1812 21d6879077
+r1813 4171a0e4a4
+r1814 8ff5e6c0e5
+r1815 8c3432973c
+r1816 32512b8609
+r1817 999b431955
+r1818 e1389174de
+r1819 81288e4e3e
+r1820 1115a0305c
+r1821 a884cbd15f
+r1822 a87a5ed43e
+r1823 f2edc84853
+r1824 33d19305e4
+r1825 26801b88cd
+r1826 aa3d610138
+r1827 8566e05662
+r1828 51f791416e
+r1829 58a79d27b3
+r1830 b587800cb7
+r1831 35bbfac32e
+r1832 5c70a41406
+r1833 a4d3dba63b
+r1834 76ff2cfcc5
+r1835 3a6b4792cb
+r1836 08cc6583cf
+r1837 7347b4ef10
+r1838 64c34f2009
+r1839 2cdffdee79
+r1840 7c52bed1a6
+r1841 9c20935fb6
+r1842 412f0dee7e
+r1843 d172e5ef70
+r1844 9bcc8b562f
+r1845 d37c08ba93
+r1846 ca1fb5b2ea
+r1847 263b33d07e
+r1848 e592008b31
+r1849 6be0cda04a
+r1850 aa8b75a4cb
+r1851 eb2a2e9310
+r1852 bdaca26661
+r1853 70245d6924
+r1854 c811babc88
+r1855 49625177f1
+r1856 57875e8033
+r1857 93fc1b0b63
+r1858 b877736780
+r1859 653445deeb
+r1860 4063ce9617
+r1861 394a775723
+r1862 e3e27c8785
+r1863 ea5ed7d4b2
+r1864 c2d445c46a
+r1865 ff67e2865f
+r1866 be5f005c3a
+r1867 302a8dfa19
+r1868 300a10fbe4
+r1869 560262c902
+r1870 8e697fc00d
+r1871 e721ad85bb
+r1872 cc00fa9f43
+r1873 9bf060b7c9
+r1874 fc7e1bce49
+r1875 4bab79034d
+r1876 de0a7b2297
+r1877 6ef31a0569
+r1878 c38b0a7fd3
+r1879 8d29db9496
+r1880 17638ef00f
+r1881 7363ca6d17
+r1882 97043a3bd4
+r1883 da10e84d85
+r1884 20e65c3ad8
+r1885 2ba1bbb103
+r1886 cc0c421327
+r1887 7122907653
+r1888 6a5131fc32
+r1889 2521f5270d
+r1890 8f12698280
+r1891 ab3ba403ef
+r1892 3cc09cdf0a
+r1893 ced2ba5fa0
+r1894 8dcce18a84
+r1895 83d1bae3f6
+r1896 fa70dcb1a5
+r1897 18fa82639a
+r1898 2093f9a082
+r1899 cf86b70560
+r1900 4f86e73bfe
+r1901 c743c68faa
+r1902 4f7571ec6b
+r1903 73b40d05db
+r1904 a5737137ab
+r1905 32d380ac6a
+r1906 0f6629c829
+r1907 54313dd4d0
+r1908 8da7c2b08d
+r1909 f8ed082d80
+r1910 f5437e9a8b
+r1911 a61eb89370
+r1912 9d52498406
+r1913 4cdb15a19e
+r1914 70ed6bea27
+r1915 cebcce6b16
+r1916 d71d7bb6f1
+r1917 1ce2b54384
+r1918 5c81900dec
+r1919 b9035ad31a
+r1920 02e1901894
+r1921 859704d7d6
+r1922 8e28c8583d
+r1923 4cf8078dab
+r1924 012bb63042
+r1925 63e0282966
+r1926 9a63043f7c
+r1927 7318a7e03d
+r1928 1bb18c95ae
+r1929 ddfcb6ad98
+r1930 3d150a5c2b
+r1931 0da94e1a1b
+r1932 e5ae9a3ec8
+r1933 7396b95892
+r1934 34615d4a1a
+r1935 516d5e2e31
+r1936 3c051855fc
+r1937 7597b1d5bb
+r1938 e5d1984c53
+r1939 1f99f743ae
+r1940 b072c8ee42
+r1941 7beb013c4d
+r1942 013b0ec718
+r1943 64913ef749
+r1944 bcd8a97b88
+r1945 056ce01ce5
+r1946 6a72d316aa
+r1947 f28a8a337e
+r1948 35ff40f25b
+r1949 319d4a304f
+r1950 3ad5854650
+r1951 79dfd483eb
+r1952 3b343cbf53
+r1953 0d064c5f91
+r1954 67c0850080
+r1955 e914e7a9de
+r1956 5fb655da1e
+r1957 34806cbc47
+r1958 cf31deaa19
+r1959 862f5badaa
+r1960 dfba31919a
+r1961 0f287203ac
+r1962 e37834d2eb
+r1963 e641ecb4dd
+r1964 7834c94e2d
+r1965 83e2c23071
+r1966 9f261a9240
+r1967 c7b74a41f1
+r1968 826b2fe47b
+r1969 182dce41f7
+r1970 15d66b518f
+r1971 29aa887026
+r1972 da7c6e4094
+r1973 0b4f31189a
+r1974 24b5f2f352
+r1975 2618e4550d
+r1976 c738ff1ae8
+r1977 2c435db44a
+r1978 3284c3e19f
+r1979 58657deaa2
+r1980 c69637585f
+r1981 d9fad519e8
+r1982 1bd13a8a2a
+r1983 5c34a951da
+r1984 aff70280b8
+r1985 ef7ab5ba91
+r1986 b35e4689cf
+r1987 e81d53a7e6
+r1988 ed02ff19e9
+r1989 b29d2c5234
+r1990 f81bbb4560
+r1991 0591bfabfb
+r1992 4d6fdfccca
+r1993 febd795beb
+r1994 b4997e3245
+r1995 d5bb139c0c
+r1996 7ce4434052
+r1997 63f7a4026f
+r1998 f936b14dd7
+r1999 6e64ba463c
+r2000 bcfd14b3f3
+r2001 986cda8cfc
+r2002 ed337a0a04
+r2003 858b174325
+r2004 60f05e6378
+r2005 90e43b5df7
+r2006 6289ffbd91
+r2007 d4acacd8bf
+r2008 399bb06cf0
+r2009 c9bb06052e
+r2010 28d3e984f7
+r2011 a3a5e047a6
+r2012 8faa7e1826
+r2013 bb03dbdd47
+r2014 93fea4d99c
+r2015 3e30fefb9d
+r2016 9a387fe59f
+r2017 164e2d8283
+r2018 35cfb1d88b
+r2019 e8de562d27
+r2020 9d6b317310
+r2021 41d7105a22
+r2022 4a5e0ea95c
+r2023 c8f278f400
+r2024 0c15dac9e9
+r2025 5045628572
+r2026 35edf3c230
+r2027 406679c2e6
+r2028 daf8afbdbb
+r2029 25016938dc
+r2030 bfe5383a1e
+r2031 24349248b1
+r2032 ca506ab133
+r2033 b1465f1f22
+r2034 f3fa114104
+r2035 2b7eaff322
+r2036 b68be7fedf
+r2037 2fd1face7f
+r2038 cbbb75f1bd
+r2039 7871d529b6
+r2040 746baf5411
+r2041 9b39818185
+r2042 18b13aadb5
+r2043 b72b96eace
+r2044 8c48250df5
+r2045 82f98b6f03
+r2046 cb6381bedc
+r2047 5fd5896c14
+r2048 e40307b850
+r2049 0212d5e04a
+r2050 4c626e1062
+r2051
+r2052 4ef1371308
+r2053 3317f76bbd
+r2054 33c3ea3b03
+r2055 377337eb8c
+r2056 8bb7f3d835
+r2057 890d729569
+r2058 30dae67575
+r2059 79c146cc2a
+r2060 50f7a66ed0
+r2061 db9d5a4f8b
+r2062 18be2fe9d8
+r2063 21a4dcc99c
+r2064 6b8d116ec9
+r2065 daea8b76a5
+r2066 ee3559b8bd
+r2067 44f38bde65
+r2068 ed0a728933
+r2069 345c562684
+r2070 6a1db626b6
+r2071 6c9deb38e1
+r2072 c926654a82
+r2073 0ab1c86696
+r2074 8550ca1591
+r2075 75b2c96112
+r2076 e37e8692e0
+r2077 a23dcbc444
+r2078 52d21a8546
+r2079 c6c820e8c5
+r2080 64ab1bd6b6
+r2081 8bec111856
+r2082 34501279e2
+r2083 a54b3188ed
+r2084 4a2e6b4e9e
+r2085 142bcb34f7
+r2086 3a4e72367e
+r2087 de8b8417f9
+r2088 b9fb541ab2
+r2089 a24fb5cd32
+r2090 bfde8ef1fe
+r2091 56e2a32dc3
+r2092 dcf5824694
+r2093 5a966687d2
+r2094 240bba50f0
+r2095 cb84910e87
+r2096 26fcd4c7cd
+r2097 f20b622e6a
+r2098 16d29a74a0
+r2099 18f69a76c2
+r2100 c8437e055e
+r2101 38d21f571c
+r2102 0861b9b399
+r2103 6ab80e73d3
+r2104 e6769e5ed9
+r2105 f4eb9e9cf9
+r2106 5488f9b4ae
+r2107 dec4538a46
+r2108 d773ded52f
+r2109 3743c70592
+r2110 bdb4c6d897
+r2111 2a0a8d29e1
+r2112 99a4612af7
+r2113 8f37d5e80f
+r2114 dda82d5eb2
+r2115 dcbe9fae57
+r2116 56945b9d09
+r2117 619bbf9b85
+r2118 d305f5fbe6
+r2119 0c3462a399
+r2120 e9b099b381
+r2121 26630285cd
+r2122 6d14e4da5a
+r2123 a1e8115baa
+r2124 62747ac614
+r2125 6dac101d48
+r2126 a85cabb4c9
+r2127 673cc92764
+r2128 1e1222b707
+r2129 7a4b5c1072
+r2130 4840576349
+r2131 4000080b8a
+r2132 f662fe1a35
+r2133 082d612f19
+r2134 9370a1e001
+r2135 9dce7827b2
+r2136 e4a37a2f11
+r2137 3b81bb39eb
+r2138 dbbab2f7f8
+r2139 8796df1360
+r2140 aa8590e42b
+r2141 ab08cd252b
+r2142 5e6295d2f1
+r2143 ee81efca19
+r2144 0c7c3c6d75
+r2145 be3f31b34a
+r2146 8a675351cf
+r2147 5d861db0fc
+r2148 08dea16b70
+r2149 7feba1480e
+r2150 b0d1c8d146
+r2151 15c5be6f3d
+r2152 d56b51f38d
+r2153 2bda1797dc
+r2154 9ff862a955
+r2155 178ae73888
+r2156 3edd611a2c
+r2157 336268483f
+r2158 00915ce954
+r2159 e516933250
+r2160 22b5c4c0bf
+r2161 5137f0a3ad
+r2162 accaee1ce5
+r2163 17b8ac4bf4
+r2164 4931ca3059
+r2165 cea1921b50
+r2166 8d7d9f8df5
+r2167 829cdf1f81
+r2168 6b8ceb50e3
+r2169 6e1ccede35
+r2170 1f4151cc03
+r2171 605ff15c1e
+r2172 2aa1444f81
+r2173 486a8c2f7d
+r2174 e4687a8913
+r2175 613a52d58f
+r2176 6e7244f1c0
+r2177 709ba6a8fe
+r2178 1935bd3e53
+r2179 2d473fd67a
+r2180 35e4fb5175
+r2181 8dda7b0466
+r2182 40508d0a02
+r2183 8d9a50e63a
+r2184 6cc7254805
+r2185 103888d458
+r2186 5e87c33e2a
+r2187 86f01a5276
+r2188 039d3b3c86
+r2189 68a9768777
+r2190 255be1e85a
+r2191 1efee7453f
+r2192 28a8f644f0
+r2193 6047e1e259
+r2194 fab2ebadf0
+r2195 e6ed073577
+r2196 fa15a3d866
+r2197
+r2198 cd15a69869
+r2199 7e748928cb
+r2200 03e0decc57
+r2201 93da4f9341
+r2202 df9d6b1edc
+r2203 2458b5ce59
+r2204 44e74c6381
+r2205 904d31853d
+r2206 d0ffbd2412
+r2207 d87359dbd9
+r2208 21cf884cc7
+r2209 b550531ef9
+r2210 806aab5f09
+r2211 da6aa22fc8
+r2212 644a9f0d71
+r2213 bd139b1e9e
+r2214 d8c9cf366c
+r2215 f36f1385f4
+r2216 9b0529c56f
+r2217 07627136f8
+r2218 5b88042e49
+r2219 68ed8693e9
+r2220 2694a9cda4
+r2221 063e9a81fa
+r2222 58d053ebed
+r2223 adf175ac26
+r2224 bcc3423f85
+r2225 933984df2c
+r2226 4b5620b2f1
+r2227 de574928fe
+r2228 6eba51241f
+r2229 a7c75c09c6
+r2230 eaedb73aa5
+r2231 910667e39a
+r2232 144f8735b7
+r2233 681290f866
+r2234 787f3ff992
+r2235 f2de9c44a8
+r2236 d29c108139
+r2237 161661cf29
+r2238 15d8dae21d
+r2239 0602da2bfe
+r2240 7534129fe0
+r2241 687adfac11
+r2242 67bb1e7543
+r2243 76d02d660b
+r2244 0310ff02f3
+r2245 aa19b7dead
+r2246 f5ccd18bd6
+r2247 fd5b71760e
+r2248 14bd516c52
+r2249 8acc04c7d3
+r2250 373f590537
+r2251 b1d1e01908
+r2252 110310e52a
+r2253 c5d12428eb
+r2254 b9bce038b1
+r2255 b1b0574170
+r2256 ff8ce7198a
+r2257 3351f53801
+r2258 7c0e0f3ca3
+r2259 1dcdd042ac
+r2260 d6cb921038
+r2261 183040ae17
+r2262 81ed64fd4d
+r2263 e15d8d316b
+r2264 77eea4abf2
+r2265 f22dc6124d
+r2266 5f8752e96c
+r2267 77895f73d5
+r2268 2eed730f5f
+r2269 3d2b827dcc
+r2270 782063cf85
+r2271 83f5597196
+r2272 946aa12519
+r2273 3b1253891b
+r2274 0adfc8d42a
+r2275 ab7815a4ab
+r2276 7b8b6d0adf
+r2277 22499e81b5
+r2278 fec2e00d09
+r2279 72e96acd7e
+r2280 783f68c2ac
+r2281 5f628d0664
+r2282 2c8a91239d
+r2283 da4189d103
+r2284 68b2298f83
+r2285 71cd266cd4
+r2286 a1c71f9157
+r2287 8b4b869302
+r2288 5090a8faa6
+r2289 dcac982fd6
+r2290 836f5fbd90
+r2291 b05601a61b
+r2292 3590dd484a
+r2293 497e073783
+r2294 03399790a4
+r2295 3186eaed67
+r2296 84f921cf1c
+r2297 edf7c7a74b
+r2298 5598e28509
+r2299 3f4bdb54a2
+r2300 fd033d227b
+r2301 3fcadde1cd
+r2302 88ec34baba
+r2303 5ab98b10ad
+r2304 c8eb73357f
+r2305 5059979f35
+r2306 d6e4037c7b
+r2307 cc195672a2
+r2308 abdb5cc6bb
+r2309 d8888a99cf
+r2310 3f6a2d9a54
+r2311 16fca155f2
+r2312 9b1c72bc8a
+r2313 25d392bbcc
+r2314 b8d2c4e065
+r2315 9d7f21f573
+r2316 eee708d519
+r2317 084de2477e
+r2318 5e749cea9d
+r2319 c5dcb8d01f
+r2320 d9eef6e144
+r2321 e3a34d5bee
+r2322 2f487fd928
+r2323 f5919ef574
+r2324 64c98ed139
+r2325 57bf1138b8
+r2326 253a192ede
+r2327 2f88fe7918
+r2328 dc13a90b2b
+r2329 ae638b7fc0
+r2330 6a29f17c21
+r2331 74a2351508
+r2332 ad1bbdca7e
+r2333 000632827a
+r2334 e3981e4bbf
+r2335 7ba607db86
+r2336 87cb480434
+r2337 8698d99b93
+r2338 5665f6b29c
+r2339 39d3d2c894
+r2340 c0b473a235
+r2341 cfcba70201
+r2342 dcb9b69a64
+r2343 fdfbbfd640
+r2344 94d3acbf63
+r2345 35259d1028
+r2346 4ba19f6141
+r2347 84f0da94d5
+r2348 5e6ded3a4a
+r2349 33d36a45eb
+r2350 bf1d9d46d0
+r2351 ca5b2ccfb2
+r2352 b37cbcac6f
+r2353 7b0cb5b0f3
+r2354 ffe249b10d
+r2355 21dfb196b2
+r2356 3ce1703938
+r2357 2209925d31
+r2358 f7e5579e4f
+r2359 ca3b44fb2d
+r2360 fb144c8d45
+r2361 3f89d6837c
+r2362 fbbe896c2c
+r2363 4a9bfff8fb
+r2364 c788c8898c
+r2365 d9c1452ff8
+r2366 ad1e0f4cc3
+r2367 6024fffaf8
+r2368 c474f7cb36
+r2369 8a9f354696
+r2370 512a32f9e2
+r2371 4464fd3c97
+r2372 0362d6e255
+r2373 de408cadfb
+r2374 b629bde913
+r2375 cbecd2ab52
+r2376 2d4a2223b1
+r2377 08ab698c37
+r2378 399482a6ba
+r2379 b62bc67911
+r2380 e22c2ff60a
+r2381 53e08f348e
+r2382 6f0bb4891c
+r2383 a15110d883
+r2384 a7fc16dfe6
+r2385 1dbc00126b
+r2386 94d7bcd7ab
+r2387 3ea1b00f74
+r2388 59a98600d2
+r2389 4e215f6791
+r2390 c72f7b292f
+r2391 1be73373fa
+r2392 d1624f0e58
+r2393 4baa04cfb6
+r2394 67da7e0b9c
+r2395 5b0dce5f2f
+r2396 f34373f436
+r2397 5a98f27b77
+r2398 643a9f3e2c
+r2399 f31ddb4271
+r2400 c1af5293fc
+r2401 b877bd4e6e
+r2402 a63c581ec0
+r2403 b35f58c4f2
+r2404 1d821aee2f
+r2405 2733181352
+r2406 0572255cf1
+r2407 79fca26698
+r2408 d53c0dadb9
+r2410 9108260633
+r2411 752abae338
+r2412 cebef56475
+r2413 dfb4b3d88b
+r2414 39aeb78b15
+r2415 e5901f3310
+r2416 3927bcf1cc
+r2417 f2ae3da0a7
+r2418 61cd59dc29
+r2419 f2d05be35c
+r2420 8109d288cd
+r2421 bbadab7e72
+r2422 f8865bfa85
+r2423 2102c85d8d
+r2424 0c2f94986a
+r2425 4ae2a110b2
+r2426 c1344232ad
+r2428 350dae616e
+r2429 2c14e0fd96
+r2430 ec8b875fec
+r2431 ed4861b3f3
+r2432 00bd0b0b03
+r2433 2c067ee54f
+r2434 b011f55379
+r2435 1c3bde7437
+r2436 7c8f4490a3
+r2437 e0302c3f4a
+r2438 cd4de247e0
+r2439 a2a20e4cc2
+r2440 b411d98cb9
+r2441 8822af3c41
+r2442 5421ec6d05
+r2443 d9059f96dc
+r2444 e6bcb618fa
+r2445 9694e01a39
+r2446 bba5b99fcf
+r2447 0c5398b922
+r2448 af6b02cfe0
+r2449 bc787f22d3
+r2450 783d20556d
+r2451 7fab748c79
+r2452 fd419e96a7
+r2453 6688f9d3e1
+r2454 b711111204
+r2455 25412bcee8
+r2456 098eeb4af8
+r2457 ccaf171196
+r2458 77eeea0708
+r2459 97626f9df6
+r2460 34a75235f6
+r2461 642fe7790b
+r2462 56457e5b4f
+r2463 e72cb8c981
+r2464 24c538e634
+r2465 10ab89ae44
+r2466 d2d2db6b51
+r2467 7d75758247
+r2468 f525d895f4
+r2469 640950adab
+r2470 398f4e52a4
+r2471 aa23e3e1a2
+r2472 a386c6b2f4
+r2473 a14f030d44
+r2474 ae2cba7319
+r2475 328063bbe5
+r2476 05b798c3d1
+r2477 7a9f373473
+r2478 17ea384cb3
+r2479 3cb16fdb40
+r2480 4209d6c888
+r2481 5069b94720
+r2482 c8842d2ece
+r2483 2aef35c1c9
+r2484 7c6d191387
+r2485 d3aeb53f30
+r2486 30d9763761
+r2487 364a11eaee
+r2488 fc07fab722
+r2489 3dc7c479c1
+r2490 ee9aea08d4
+r2491 4a61569db4
+r2492 73b6fcf337
+r2493 4e8adb9edd
+r2494 9c37599cf6
+r2495 24549f229e
+r2496 67b86b9e8d
+r2497 94c44549ef
+r2498 41f787d1f5
+r2499 91945ebb95
+r2500 3d7fe86ae7
+r2501 ff4e274396
+r2502 0134764630
+r2503 4c01efeee5
+r2504 244e701074
+r2505 95bd5979f6
+r2506 170091b655
+r2507 4f93a0fb9d
+r2508 0bc48e99d9
+r2509 bec9884b00
+r2510 c9e045f5c6
+r2511 e473193158
+r2512 b95957de6c
+r2513 43318b75bd
+r2514 131fc7ff56
+r2515 06bad88d6c
+r2516 c86863e436
+r2517 b8f8fb77bb
+r2518 204c95bb5e
+r2519 53f396c70e
+r2520 ec2cf46df2
+r2521 4801729114
+r2522 8f71bdfa4e
+r2523 e6ad5066a8
+r2524 08c65b09ef
+r2525 37cfcbc4f5
+r2526 b5d47b164f
+r2527 c11a8632c4
+r2528 982254cf56
+r2529 bc2b4c14e4
+r2530 f412400f06
+r2531 b2847d5516
+r2532 24e7b23949
+r2533 7c34b69259
+r2534 49b2a7e6b9
+r2535 0e15eaa854
+r2536 9441412e0c
+r2537 2f18309e79
+r2538 5b1555e72e
+r2539 e414d903e3
+r2540 1c315aa623
+r2541 f40e29b44c
+r2542 d2d7a7ed16
+r2543 f5fc87e968
+r2544 9d0a383fa1
+r2545 f9d951b4e6
+r2546 39a7f8363f
+r2547 7735e5b993
+r2548 d68d41ec0a
+r2549 8d6a1e3cfe
+r2550 0fe104ec43
+r2551 3a273d52ed
+r2552 6157d53787
+r2553 d6963262b4
+r2554 df78dc64f7
+r2555 d05ea282a1
+r2556 0c20540ebe
+r2557 0b38cbc3c5
+r2558 2629b94686
+r2559 3a657c3f26
+r2560 466ef4d121
+r2561 bd2cb9d56f
+r2562 da6966888b
+r2563 d266b00a2d
+r2564 5cf09c3b1b
+r2565 990b79b76d
+r2566 3fedc714db
+r2567 a10fed035d
+r2568 dd76054657
+r2569 6a930f9ca6
+r2570 c9ced67aa4
+r2571 fb462ea1b3
+r2572 a0ae30f323
+r2573 9de41d8e77
+r2574 196d85658b
+r2575 1f5810a6e8
+r2576 b62de8dc4f
+r2577 2014d1feee
+r2578 02424acb23
+r2579 08299566b2
+r2580 1da04b88fc
+r2581 14ea14e71b
+r2582 7861176c22
+r2583 9c50901f93
+r2584 b549b7bc7b
+r2585 07f96aac39
+r2586 e1f634c04c
+r2587 f145a03da3
+r2588 2f8a23ed07
+r2589 7cf98e704a
+r2590 d6261e9cd3
+r2591 0f58b769c4
+r2592 a1f0c5d00b
+r2593 d437649e1f
+r2594 6e033e8d2d
+r2595 429b2299ae
+r2596 d5d867cc1c
+r2597 f69df6a87d
+r2599 1ceb5de993
+r2600 0ec87d7eb2
+r2601 819c49c7f3
+r2602 3c2c7c93c6
+r2603 0434561cee
+r2604 27203be4cd
+r2605 8bb7d00387
+r2606 66202c13c9
+r2607 9742dffcb5
+r2608 9810b4372a
+r2609 2d6d5a41e2
+r2610 d5f12adbfd
+r2611 f84a1e2955
+r2612 470b27d49a
+r2613 16ef657d46
+r2614 24a50b5e81
+r2615 40e9aaf193
+r2616 3b4e70e1bd
+r2617 d19cd4e679
+r2618 ffc44a5c91
+r2619 04121e51e8
+r2620 f405b980ba
+r2621 4fa1acc175
+r2622 192afdc3ca
+r2623 c2e3c0f366
+r2624 a45c078ec7
+r2625 f6fa10b19b
+r2626 b1e0f11836
+r2627 6a574075fc
+r2628 911f51efb7
+r2629 d72362d233
+r2630 669a7e4704
+r2631 949cbfa341
+r2632 5e430d9bf6
+r2633 8895d4c283
+r2634 c46335ac1a
+r2635 b8d11d03ea
+r2636 a634b2280f
+r2637 333d2fd8ba
+r2638 7b9dbbfaf5
+r2639 df05d14290
+r2640 d15a4148ef
+r2641 ba3daff2aa
+r2642 b52895234d
+r2643 e24b4f134f
+r2644 646bedd83c
+r2645 6c399e8273
+r2646 c56fa94244
+r2647 b28470ad0e
+r2648 2fae19f844
+r2649 5b778f324f
+r2650 76506bbb73
+r2651 cfefa04006
+r2652 31238c61f5
+r2653 f4308ff5f3
+r2654 3eb734d2b4
+r2655 a28376d5bd
+r2656 0b75ded56f
+r2657 01599fa37b
+r2658 12bd290e16
+r2659 180d7c2fec
+r2660 fffd640953
+r2661 531b370021
+r2662 45715e4289
+r2663 2f390afd17
+r2664 181f366139
+r2665 16ec5b5482
+r2666 94109ffcbe
+r2667 c1e6d28227
+r2668 e2d5017493
+r2669 7ff87b6dc3
+r2670 4342030b00
+r2671 124944fb5b
+r2672 05632168c1
+r2673 826af8cfd0
+r2674 e27bc7f5e6
+r2675 a6cbb7ee0f
+r2676 3f86c7c501
+r2677 09d5285df3
+r2678 38ad1eeb91
+r2679 5bcf3d3f6f
+r2680 c81ec5f07f
+r2681 8cf49a6284
+r2682 9308bfb939
+r2683 a8431a8613
+r2684 56747fd2de
+r2685 810d031614
+r2686 00478513fc
+r2687 4c74885f5b
+r2688 142fa4545b
+r2689 593554425b
+r2690 420ab4bb9c
+r2691 045c22769d
+r2692 1807482906
+r2693 b96ad4aaa3
+r2694 6034828756
+r2695 dc15aa8a27
+r2696 b3d9ef7126
+r2697 4066bd9c15
+r2698 f909d73594
+r2699 d2bf0e1ddb
+r2700 fda2eeab2e
+r2701 cda9593740
+r2702 ffea5d8f78
+r2703 ebd6149d9c
+r2704 5c4179270f
+r2705 c3dad6eaf6
+r2706 3610314d5c
+r2707 b3c7876018
+r2708 f117a23cbc
+r2709 483b35519a
+r2710 4b14bbab34
+r2711 63e5a79c2b
+r2712 dbb4b1b89d
+r2713 94ce263ccb
+r2714 67089f9e05
+r2715 5ff59b4a7a
+r2716 ef077db69b
+r2717 0da441a4ca
+r2718 90feb7ffbd
+r2719 3d5478d4e1
+r2720 95146d1ee5
+r2721 1d27f61a15
+r2722 756d7e4741
+r2723 65fc22f072
+r2724 0bb65de0e0
+r2725 ec81919033
+r2726 ef1bd748b8
+r2727 4c4bc2c147
+r2728 50f5fcf7d6
+r2729 2d8126de26
+r2730 c1c3bc8b5a
+r2731 92d93e58ce
+r2732 00f558fd79
+r2733 6d53026841
+r2734 b1562509b0
+r2735 5aa1b9d168
+r2736 04aea0295e
+r2737 0f9736d449
+r2738 6a448198f8
+r2739 dbd4d89103
+r2740 22f8b2e70d
+r2741 4d14aa915e
+r2742 46e374a5c0
+r2743 45df364c3b
+r2744 b674983475
+r2745 dc1e6dd949
+r2746 5f19071110
+r2747 c06bdb684a
+r2748 88a9af0734
+r2749 72a496a3c4
+r2750 8ba6023e7a
+r2751 ce039b7db1
+r2752 b57a08994f
+r2753 fae54c38a7
+r2754 2dedb4dd2b
+r2755 79ab139d58
+r2756 286ab9ba98
+r2757 e9201a7193
+r2758 21e809f6cb
+r2759 a4737b5704
+r2760 fce53bc99b
+r2761 1e9a5c8fa3
+r2762 41fc64111c
+r2763 da9c179a47
+r2764 d0f5e90b5b
+r2765 b918f65c2e
+r2766 bf4d9f29a6
+r2767 829ff49f1c
+r2768 07c291484e
+r2769 a736bd4140
+r2770 774209bb21
+r2771 b93f7b2512
+r2772 78ea6ddc4c
+r2773 8f6a248ace
+r2774 1e478c2c6e
+r2775 70d535ae7b
+r2776 98bd45db83
+r2777 982187f1d3
+r2778 b524ace93f
+r2779 b7210674f8
+r2780 a0846e3ecf
+r2781 de42629d73
+r2782 f6f7e50bfd
+r2783 5998eb1012
+r2784 bd9f74861e
+r2785 5412ad4a1c
+r2786 2ca6f3cc99
+r2787 7c81b118ae
+r2788 aa96bcae32
+r2789 0aa10646c7
+r2790 26d14cf7cf
+r2791 e688c54bea
+r2792 b29bcf9f0e
+r2793 95f6a43b4c
+r2794 6bee9bc8b0
+r2795 61d5e9b411
+r2796 cce47063a6
+r2797 d95cab4184
+r2798 952ee03cca
+r2799 ddc26de6b2
+r2800 e7bb2275e3
+r2801 b40e2e6879
+r2802 247c8b081e
+r2803 37be4bd4a8
+r2804 db24f5b0d6
+r2805 c39826e69e
+r2806 4a8d2fa214
+r2807 bb70bf9e77
+r2808 04741a8f8a
+r2809 315baae74d
+r2810 c1df3809c6
+r2811 6c1888cb45
+r2812 63f1bdd100
+r2813 6c9e15bea0
+r2814 72523cc253
+r2815 354a08de0d
+r2816 848d9a68a9
+r2817 d61be478ed
+r2818 6d5be0aba4
+r2819 29c8420e04
+r2820 f893e29c2f
+r2821 417033fd0a
+r2822 f108d5429f
+r2823 7155dffc81
+r2824 6d13331746
+r2825 35338a6399
+r2826 f56e421f4f
+r2827 4f00279941
+r2828 0bdcdc7c9f
+r2829 435fe5da69
+r2830 2ebbfcd94b
+r2831 7814682f95
+r2832 d58b852b5c
+r2833 ff313793ab
+r2834 82bd6e4326
+r2835 10090487be
+r2836 58dc39185c
+r2837 7417f70cc6
+r2838 2e3a472e95
+r2839 1b56122b74
+r2840 f410167a75
+r2841 8e21b1ec26
+r2842 4b1688cfd4
+r2843 b5d1f0a2f4
+r2844 8a2115f360
+r2845 9928e41df8
+r2846 57808a09a8
+r2847 f6c38a0331
+r2848 dd1a0dff0f
+r2849 6ef9088488
+r2850 5b2ecea0ec
+r2851 4ed93830ba
+r2852 8a4add814e
+r2853 32fb9e583a
+r2854 d94678566b
+r2855 647a8836c5
+r2856 a231200e62
+r2857 0b43b2e82d
+r2858 a37819d7be
+r2859 7b19a9f333
+r2860 672a2b4b11
+r2861 65f20e3f1a
+r2862 737ba5b937
+r2863 bf4737b364
+r2864 a49360db4e
+r2865 6f6fae0e87
+r2866 09b226cf9d
+r2867 069839fa6c
+r2868 577d475284
+r2869 2bea6271b4
+r2870 dacc0190d5
+r2871 47e6548915
+r2872 0af8d12102
+r2873 3869143cba
+r2874 0a10a202bb
+r2875 f6835d10b6
+r2876 29d6bb1eb3
+r2877 164f433132
+r2878 5db349a7bd
+r2879 8517e8ce45
+r2880 c94a990938
+r2881 c5ca08e53f
+r2882 3cd77e2c4f
+r2883 a4eb56b88c
+r2884 a32de8bd0c
+r2885 2cfc33e42c
+r2886 0f9240b197
+r2887 e18aa1f949
+r2888 5d81251857
+r2889 05f0493156
+r2890 d84ed1d80f
+r2891 fa228978e0
+r2892 e272f2dc11
+r2893 9be9bb3626
+r2894 0522bc5751
+r2895 bf519a01e3
+r2896 45028dc737
+r2897 92763237f3
+r2898 ca196dd13c
+r2899 49332fe728
+r2900 100718a811
+r2901 f8d7d0b5a5
+r2902 0180171652
+r2903 9cfde36da8
+r2904 7465e94917
+r2905 f57010499b
+r2906 5ed2fb0f5d
+r2907 1e69dfd777
+r2908 61bf0c8f1d
+r2909 430c5dbe56
+r2910 c86bcd0630
+r2911 25ebed6d59
+r2912 834473088e
+r2913 e0ae9dedb0
+r2914 ef1bee05f0
+r2915 7ad11edbe9
+r2916 6aa8f52864
+r2917 71ac5a4ad2
+r2918 a70044860b
+r2919 da995cbaec
+r2920 51cc72085e
+r2921 8408bce1b7
+r2922 071bc69d4d
+r2923 c6526ff17d
+r2924 4fdc1318cc
+r2925 d188fb525f
+r2926 0ee73f9bb5
+r2927 0643b2df51
+r2928 4206abe0ca
+r2929 feb87f51f3
+r2930 944d6aec55
+r2931 302643672d
+r2932 1a380153a0
+r2933 e54a33c950
+r2934 95749d947c
+r2935 d7541a389a
+r2936 224c54733e
+r2937 360cd14a72
+r2938 9c24883918
+r2939 bb5e2de28e
+r2940 cf4fd3eeea
+r2941 3657ec24df
+r2942 227d56fc06
+r2943 b4745afc19
+r2944 d88a6cb1e4
+r2945 ae8b367bfe
+r2946 1300597627
+r2947 c44e8bb3c3
+r2948 b929563659
+r2949 56835ce139
+r2950 93102f73c8
+r2951 c262e44a2f
+r2952 6b60fc73e6
+r2953 70e9690e72
+r2954 dd33f4d02b
+r2955 04d78098f0
+r2956 4e3a699d7f
+r2957 3b5c08c007
+r2958 7847f3cf0f
+r2959 653b1117a2
+r2960 e52e120e4b
+r2961 6e1747c335
+r2962 bce606fb00
+r2963 381f20a04b
+r2964 2b714fefd1
+r2965 8bd0505b31
+r2966 dc77b955f8
+r2967 9e04e5e0a9
+r2968 42ae44afed
+r2969 5073bab4d6
+r2970 8a549256ab
+r2971 41872ffb3b
+r2972 9278a377fd
+r2973 7a5770aa1e
+r2974 c83874f3a2
+r2975 1731e5bd87
+r2976 8cbb56700d
+r2977 4931414ab4
+r2978 938d635c43
+r2979 bf2c43a88b
+r2980 b88fd07ae6
+r2981 dbbff1f3e4
+r2982 789d2abd99
+r2983 1b604c5f4a
+r2984 8127c2eeef
+r2985 6b35acd807
+r2986 556ac3b679
+r2987 245b2c3eb3
+r2988 b604e761bc
+r2989 5f69afd077
+r2990 5027368303
+r2991 a28216b0e1
+r2992 784644a919
+r2993 b33c785dbb
+r2994 43505887a3
+r2995 5dc5083345
+r2996 17c857d22e
+r2997 35f72d0a59
+r2998 86b56b80e1
+r2999 7c7bb3f6e7
+r3000 39d7ffe546
+r3001 645f87a5a8
+r3002 98a03600e0
+r3003 64d2fb73cd
+r3004 99ec3e8abc
+r3005 d963cc312e
+r3006 4004f3c9c8
+r3007 b8e65e4dfb
+r3008 c17db339dc
+r3009 d194fb8cea
+r3010 a4642adf15
+r3011 b19820ffbe
+r3012 34dca6ad93
+r3013 8dd1635f7f
+r3014 2a309487c5
+r3015 1a83c87e7e
+r3016 adfc51e14b
+r3017 a743b99a00
+r3018 0c3b2c8af0
+r3019 9fa2048e5c
+r3020 bcf98e6de1
+r3021 70c6897197
+r3022 118ba73f3a
+r3023 acbb83de85
+r3024 8bc6f7c187
+r3025 988633e286
+r3026 a5fef07308
+r3027 82a62ec95a
+r3028 483f42e9ab
+r3029 fbd9b93cc4
+r3030 3ec2af2548
+r3031 a55fdce899
+r3032 c4098caf33
+r3033 b9d0a59aad
+r3034 05468b3b04
+r3035 c1d2e4fa48
+r3036 e884c5b471
+r3037 9050b0828e
+r3038 915155182f
+r3039 4a2c2ffedc
+r3040 bae29995f2
+r3041 68d72320e3
+r3042 ce0c39c85e
+r3043 d540d32e90
+r3044 e5d0859a89
+r3045 76606401f9
+r3046 4d40926c1e
+r3047 0de069d640
+r3048 d57f01bdef
+r3049 acbf344574
+r3050 5b782ac56a
+r3051 222b71d54f
+r3052 8ff3a97381
+r3053 77f339b101
+r3054 bda037d7c6
+r3055 ef5b5ca41a
+r3056 fb2baaca32
+r3057 deb8c2dbee
+r3058 ad169885b0
+r3059 d8631cf668
+r3060 13000c076c
+r3061 2c4e04f759
+r3062 880c57e2e9
+r3063 07c4fae621
+r3064 f78573782b
+r3065 09ce120614
+r3066 2a3901a657
+r3067 141324d825
+r3068 0193c6d2d5
+r3069 278d0ef80e
+r3070 6ab8129e58
+r3071 266937fda1
+r3072 abe707d00a
+r3073 92fcc53be9
+r3074 873dd15e74
+r3075 229917fca2
+r3076 9422bf63f7
+r3077 ef7e4e5a67
+r3078 7ff8b2396f
+r3079 91a1d60c0d
+r3080 3da2cbe475
+r3081 e329fb0ec7
+r3082 62ba1d3b91
+r3083 f988ff0675
+r3084 84ff0a4c40
+r3085 f28c845709
+r3086 f962498141
+r3087 cd2030986e
+r3088 05062b76d8
+r3089 65d12219ef
+r3090 e691366550
+r3091 70e76c73dc
+r3092 d9944e2b51
+r3093 c7ce40c3c7
+r3094 0c42b4a80b
+r3095 927dadef10
+r3096 7db35370fe
+r3097 cfcd524e69
+r3098 e377d5cd76
+r3099 26f8a264be
+r3100 687c2be6d7
+r3101 7cb6cbfa0a
+r3102 4b1ad364d5
+r3103 89cd6790e5
+r3104 e4642b1cf5
+r3105 9d24efb389
+r3106 61bfff7453
+r3107 eeab29703e
+r3108 ef7348057f
+r3109 ce49391c0b
+r3110 5d65d5689a
+r3111 f8791e07ec
+r3112 c88601425d
+r3113 fa257bfab3
+r3114 011b49957d
+r3115 3d80e28b90
+r3116 a91be3f08a
+r3117 9711cb5539
+r3118 5fef5ac208
+r3119 c2bac2fa23
+r3120 cb2627b3cc
+r3121 0c2b5967e0
+r3122 bd07456f92
+r3123 34ae4f9fba
+r3124 c5287e6ce5
+r3125 1389f3407e
+r3126 92659885e3
+r3127 e339aa20e8
+r3128 bebd7cb4b6
+r3129 1bca8c5072
+r3130 b85cbeed4f
+r3131 0214953367
+r3132 1b9f47f3e3
+r3133 4fefd6bb11
+r3134 1e724a3d46
+r3135 bb2e5cbb9c
+r3136 8837d66ac4
+r3137 a405a10c05
+r3138 f475e1a49a
+r3139 2a5dfa5220
+r3140 e744fbb15d
+r3141 536d087fb8
+r3142 f152ba6f9d
+r3143 ee45148951
+r3144 6f2455dd9f
+r3145 8571291ea2
+r3146 8f463de49f
+r3147 21f7a05322
+r3148 54cb878b8b
+r3149 987b57f6b4
+r3150 c2dfcba328
+r3151 492ef88167
+r3152 24e43faec4
+r3153 2ebc9ea1d6
+r3154 5ddd74a408
+r3155 4db594575a
+r3156 6e8fe0a8c7
+r3157 7432218075
+r3158 00048f2901
+r3159 425f0d4461
+r3160 20bae1c9fc
+r3161 d9e9decf57
+r3162 60f6069405
+r3163 b524342e8f
+r3164 18d2dda29a
+r3165 a6b356f4a5
+r3166 b618729497
+r3167 2aab9b99cd
+r3168 14c64d8e10
+r3169 7de863e85c
+r3170 1b9da8e38c
+r3171 12ee4a22bf
+r3172 c9c91c98bc
+r3173 de2f5cdf57
+r3174 81091404c9
+r3175 e6d2aa4047
+r3176 af92d37f45
+r3177 0349ad65d8
+r3178 4daaa21895
+r3179 0cb02ad504
+r3180 308ed786b8
+r3181 9efd259519
+r3182 d7e5c0f81c
+r3183 f698557737
+r3184 e0cb1d2184
+r3185 02e928fd36
+r3186 0371fea50f
+r3187 bab61a5c3f
+r3188 1f7970f3c6
+r3189 65788124d7
+r3190 c10e42f319
+r3191 5e5ff4d592
+r3192 c3168553c4
+r3193 ca09668e88
+r3194 45f3196c8f
+r3195 77609a89df
+r3196 02a6574294
+r3197 8dcb4da871
+r3198 e90524b771
+r3199 32a9ad2c6a
+r3200 d7c89ac1b6
+r3201 872ffbd907
+r3202 a832a47df4
+r3203 1e1dfb7c8c
+r3204 ba2568edf4
+r3205 359ccf8501
+r3206 828b051bf4
+r3207 2cdb40e1ef
+r3208 401f49d066
+r3209 a1ae43c145
+r3210 b1a561d119
+r3211 3d3273ecae
+r3212 904fd95252
+r3213 7e04abe185
+r3214 f25e5dee76
+r3215 668e8ae268
+r3216 3b1dca4a7f
+r3217 c49fcd1023
+r3218 aefc959799
+r3219 989713ac26
+r3220 108910dcf6
+r3221 9f33609a68
+r3222 6af09c2f22
+r3223 18d6311803
+r3224 0cf6ebc16d
+r3225 b56ca3254d
+r3226 27a522996d
+r3227 e62db728e8
+r3228 06c5b6bf94
+r3229 b4f40a720c
+r3230 501082e638
+r3231 a8254eef65
+r3232 65518842d4
+r3233 76255b83a2
+r3234 3f84ccaa23
+r3235 3f137861e9
+r3236 e3deada17d
+r3237 446d90a2b0
+r3238 53ee2c0a66
+r3239 e5a10b5d5f
+r3240 b45360c49e
+r3241 7569c085bc
+r3242 d0ecd06a51
+r3243 d94a30d347
+r3244 682856e062
+r3245 805cd03fcd
+r3246 f36b4fc607
+r3247 efb7dc68db
+r3248 7b29157404
+r3249 608e922cbc
+r3250 1e59ef7fe0
+r3251 3b537582a6
+r3252 790ea6458a
+r3253 41ccf7eea1
+r3254 7f8e3d286e
+r3255 ce4346489c
+r3256 4ff7dbf5b9
+r3257 8b5b896060
+r3258 b14785e208
+r3259 74a305485a
+r3260 53445e748a
+r3261 4c6e4e319b
+r3262 3668fbec35
+r3263 d2fbc9ec5a
+r3264 940f327765
+r3265 43d9d996ff
+r3266 239e60890f
+r3267 47f5adf267
+r3268 61b0435b64
+r3269 706cd4cf87
+r3270 794a8601bf
+r3271 b0b5b5fc12
+r3272 368d511247
+r3273 dea41a5aab
+r3274 2c7b4a9d13
+r3275 4a3559d005
+r3276 f9042a2c42
+r3277 fceea28c22
+r3278 3bf3156272
+r3279 960da5806c
+r3280 b33917d779
+r3281 0602ac4d0b
+r3282 b96d7fa0a9
+r3283 5c8234107d
+r3284 7b6ab58713
+r3285 ad0b57d983
+r3286 5dacc66587
+r3287 e73cc0dbf5
+r3288 1b9180c273
+r3289 aa86bdc415
+r3290 d03b5fd70e
+r3291 87b12a1040
+r3292 1fef47e7b0
+r3293 e56821baaf
+r3294 a278f79961
+r3295 3b26120ff8
+r3296 2ce4da7402
+r3297 43f2d69e0e
+r3298 4c1a09cbc9
+r3299 f37c79282a
+r3300 bae111e875
+r3301 bb777251ab
+r3302 f020b6c5ba
+r3303 3cf6799f12
+r3304 1da220d96b
+r3305 2090a468ef
+r3306 fa64b1f6b2
+r3307 b64f685feb
+r3308 5e263118d0
+r3309 3fb2be2e19
+r3310 146510051f
+r3311 a86e0b90d8
+r3312 53e1782c71
+r3313 4761c43895
+r3314 910d3045ec
+r3315 0a4f68e681
+r3316 51a3f4687b
+r3317 d4014963a3
+r3318 f339e45758
+r3319 218dfd17b1
+r3320 d7060af8bb
+r3321 0c69d76b6c
+r3322 bf6a12295f
+r3323 12f31726de
+r3324 5a1bdae350
+r3325 2416fb7416
+r3326 498e4de99d
+r3327 93944e71f3
+r3328 fee5e824a9
+r3329 8d57fd5731
+r3330 c48a6091ee
+r3331 7be461e3ec
+r3332 26fe188f82
+r3333 1ed6c90451
+r3334 f3129f0da6
+r3335 d4e3c78e73
+r3336 d2db0dc89d
+r3337 b47b66ba0c
+r3338 a7c611df65
+r3339 424c55c4a7
+r3340 d62f52e2f9
+r3341 be579df2ed
+r3342 c806592747
+r3343 cffaae5651
+r3344 563faf882f
+r3345 02f1b571ce
+r3346 1c5ee40dab
+r3347 45541e41cb
+r3348 6eab12dda6
+r3349 19a0b7bf76
+r3350 5325bdaaf2
+r3351 417eeecba6
+r3352 e667e3d3d6
+r3353 f0462d8921
+r3354 eb5957859c
+r3355 379107dc6e
+r3356 bd56492ebd
+r3357 b3714201db
+r3358 e2885f986f
+r3359 b5127bbfea
+r3360 40db5ce741
+r3361 50b1b01c8e
+r3362 5c93f175aa
+r3363 313fb0a317
+r3364 e6b4b5bb09
+r3365 944b0908bc
+r3366 e2711857ee
+r3367 97875c8e2f
+r3368 5b86f497ec
+r3369 c1cf10de40
+r3370 c6bafd19a0
+r3371 cd51f95257
+r3372 87ba0c3692
+r3373 82fac1f8d8
+r3374 bc7e8ae564
+r3375 ce3243d0a4
+r3376 faa6d5c4a6
+r3377 d301ceffc9
+r3378 2eeda36287
+r3379 d89ef849b3
+r3380 c42214f9a3
+r3381 9e6bdbf4d8
+r3382 65cd38fb8b
+r3383 8d5573b5a0
+r3384 9686e20774
+r3385 9b4accd226
+r3386 e0e30084fb
+r3387 de1938de8f
+r3388 81b3c99632
+r3389 6607c9043b
+r3390 b49b44f0f2
+r3391 a7e0b49793
+r3392 196fb61c6f
+r3393 74946c736c
+r3394 c2505b8e5e
+r3395 62bb07c8a5
+r3396 501341ca37
+r3397 d30eb65e9d
+r3398 ed98c812a5
+r3399 cbf9e4a901
+r3400 5a1117d93a
+r3401 932f642e9e
+r3402 b0f0428e9a
+r3403 14163d11e5
+r3404 b53d38fdcd
+r3405 15bccea34e
+r3406 000f4bea97
+r3407 2a33fa039b
+r3408 f4e913aa03
+r3409 49123a49a1
+r3410 1982d7c0e5
+r3411 0adfa22f70
+r3412 514b9f68e1
+r3413 50ca1789d3
+r3414 755fcb9a66
+r3415 7262baec37
+r3416 9f3e2b2a0f
+r3417 5c1a325f05
+r3418 83f49b9beb
+r3419 9633437d12
+r3420 efb7b042ee
+r3421 96ff31936c
+r3422 548a1b758f
+r3423 395ad8ef2a
+r3424 147b761cea
+r3425 e27e0cf399
+r3426 259f4d2745
+r3427 b1b396567e
+r3428 8e297c9a6e
+r3429 036c29404e
+r3430 cf71c30d3c
+r3431 42cdcee6a3
+r3432 9393649522
+r3433 9ed892ea8f
+r3434 8cfefad21f
+r3435 f36f539cc2
+r3436 ba6a39aa67
+r3437 f2db31c140
+r3438 ba643c72df
+r3439 8eab4b5a28
+r3440 946d299889
+r3441 90d52624b9
+r3442 da852d8ff2
+r3443 8991585adc
+r3444 fbed2284e1
+r3445 96d69778b6
+r3446 62bde31335
+r3447 2136372ed7
+r3448 1d90bcabca
+r3449 8d92c23ba2
+r3450 57aef02daa
+r3451 05e63cf5e6
+r3452 41803c1c21
+r3453 52cbb7e9a7
+r3454 9c9c620615
+r3455 d5783a0e75
+r3456 b84faf9252
+r3457 e42693c168
+r3458 92ed802ce4
+r3459 8df9fca462
+r3460 3d71c05ad2
+r3461 7ddd0a6021
+r3462 4bd55b04d9
+r3463 77542c4f6a
+r3464 b4ae478e11
+r3465 ca1842d677
+r3466 c7010a9995
+r3467 9309cf418f
+r3468 63f1dcdd14
+r3469 1fb60c2cb0
+r3470 96aaa10303
+r3471 c377a704ca
+r3472 e23c51b0c4
+r3473 0437311aa1
+r3474 979587afe1
+r3475 e624082970
+r3476 2ce38016a8
+r3477 a746827473
+r3478 37742d3e76
+r3479 d2f969bff5
+r3480 09dba51a9a
+r3481 1c023c5345
+r3482 52d69b2abd
+r3483 8f5fdee46a
+r3484 49ee0198cf
+r3485 39178d7bfc
+r3486 acde04b2cd
+r3487 b6078ccf17
+r3488 cbe17005ad
+r3489 f2fdd942f9
+r3490 a14f094cf5
+r3491 8ac6b33927
+r3492 20de82010b
+r3493 66e469b904
+r3494 ebfda5b516
+r3495 05dd3314d6
+r3496 6274b6d50a
+r3497 365eb2d10f
+r3498 c812ada36f
+r3499 1129ed2878
+r3500 3db7494096
+r3501 a0b4532024
+r3502 dc580cf37e
+r3503 cb7783485b
+r3504 0c2274120c
+r3505 dea91c4e75
+r3506 e5cd07a0e8
+r3507 8912797e9b
+r3508 33d3b46b98
+r3509 4ab231d693
+r3510 cb1b811c02
+r3511 e23a24bb9f
+r3512 c7ccac906a
+r3513 9802b472cc
+r3514 ce53d0dc9c
+r3515 8621368703
+r3516 32013363bc
+r3517 19c9ffaa82
+r3518 07c7a31297
+r3519 c5a53a3a06
+r3520 31c6c0a62d
+r3521 5f9cb270e8
+r3522 05b722f3be
+r3523 751b5fef76
+r3524 9b178df043
+r3525 d2bb978499
+r3526 801009bb55
+r3527 9674b1514d
+r3528 6e4d83438b
+r3529 663ba495b4
+r3530 98f97d8e30
+r3531 b586442ff3
+r3532 6cc9d353da
+r3533 ba35c9553c
+r3534 4a1a36b344
+r3535 596f4af6a8
+r3536 c8a563c9a6
+r3537 3302ff7a20
+r3538 af125e6f83
+r3539 d53ff4ce6a
+r3540 e976f28a28
+r3541 bcde7a4406
+r3542 8da050118d
+r3543 d93bfce648
+r3544 2f30b9e5cf
+r3545 01e4da3b3b
+r3546 624d9f1198
+r3547 53fab22ccc
+r3548 4a94d26165
+r3549 97fcb93af1
+r3550 80cee61ed3
+r3551 a1acbca2a4
+r3552 99d2c0a5db
+r3553 09c6eecd08
+r3554 31d7bbf0f5
+r3555 6f74136951
+r3556 09415a6af5
+r3557 84a4f81380
+r3558 1d35cb0258
+r3559 1a6515ccef
+r3560 652272e16f
+r3561 89942c7a7f
+r3562 5c259cbc76
+r3563 7320ca34aa
+r3564 fb32a6880b
+r3565 23984e79ff
+r3566 72e388e281
+r3567 93796dd69d
+r3568 8adac706a6
+r3569 65a7eff371
+r3570 de650b49b7
+r3571 4cdcb6dbae
+r3572 ea60f46077
+r3573 bb58768c2c
+r3574 5c2695aedc
+r3575 dc7b49d56d
+r3576 25339d1762
+r3577 ad12814977
+r3578 388a7262cb
+r3579 befce84f58
+r3580 cdf59d7873
+r3581 2df00e9062
+r3582 71da85dba6
+r3583 af375eabc6
+r3584 906348dd30
+r3585 c54ece4ae0
+r3586 92e05fabc9
+r3587 c69d97edc4
+r3588 8e283c9e3c
+r3589 b6cc6b0e57
+r3590 913e6bd36f
+r3591 0516acad01
+r3592 42ea1b6956
+r3593 902ced470f
+r3594 99fe4d41dc
+r3595 01409a254a
+r3596 2cbdc0ba3b
+r3597 eed5ff3582
+r3598 5f09d8f587
+r3599 246717e05e
+r3600 6a31538686
+r3601 780d8d55b1
+r3602 b6ae5c66e2
+r3603 badb4d8cd4
+r3604 5fa2459117
+r3605 e8ba62bd8a
+r3606 c1dcdba537
+r3607 26d3537617
+r3608 a28ac70198
+r3609 c2e80c44ac
+r3610 218f76a292
+r3611 f614ac93d2
+r3612 3fe1910a3f
+r3613 80109112f9
+r3614 4fad1254ef
+r3615 c2c1e5db00
+r3616 3bd3a5d239
+r3617 cbf71d88fd
+r3618 364ef1fd07
+r3619 025f26c3d4
+r3620 5cc5811736
+r3621 42fedfeb61
+r3622 e0fa1563de
+r3623 f381097446
+r3624 7fffc7b84c
+r3625 93aab3cf13
+r3626 4c09cb76be
+r3627 3cf459cf6a
+r3628 225d4cca51
+r3629 0579072405
+r3630 d59e2e7baf
+r3631 659b759965
+r3632 f0309dff80
+r3633 92432c2148
+r3634 d229755836
+r3635 ac5afb16a5
+r3636 a1f8145d48
+r3637 085cfba242
+r3638 2dd10de725
+r3639 4c98fce602
+r3640 c66e04d863
+r3641 1e107ea04d
+r3642 6f574e4004
+r3643 af63f742e8
+r3644 11f42cf102
+r3645 7701a98e41
+r3646 e5d611e411
+r3647 d214dd6c6c
+r3648 e6a955c2fc
+r3649 a7474d56c8
+r3650 728d05b388
+r3651 5d37e0e315
+r3652 c885bb4472
+r3653 4b5ad66372
+r3654 a7d877a4ef
+r3655 006505fd59
+r3656 24b907a640
+r3657 99b207b1d7
+r3658 52877fa8cb
+r3659 f9cda0d53a
+r3660 6b99c42b61
+r3661 8673513033
+r3662 b9f91af85b
+r3663 88ad975120
+r3664 3dd173c8ed
+r3665 8233d97107
+r3666 8bf7aa51bf
+r3667 633ee309f1
+r3668 acf705fe9d
+r3669 57d20057ab
+r3670 fa2236790c
+r3671 1fbf1add8e
+r3672 032410ce2f
+r3673 ac9e42deb3
+r3674 d0ac66f6d5
+r3675 6c23d94763
+r3676 cd96887579
+r3677 5c8b65d6d0
+r3678 b29f29c850
+r3679 f01e57a6f6
+r3680 d3e1bf2e08
+r3681 1c08fd5be7
+r3682 e86b5f81f9
+r3683 d361bcb23c
+r3684 14414226a9
+r3685 4ffc505e68
+r3686 12905b5fc0
+r3687 7f63832946
+r3688 8ae023e876
+r3689 5b0cf6f9f1
+r3690 02e58d8f1c
+r3691 71643852e2
+r3692 543531f94c
+r3693 a0702e16f1
+r3694 b3461701e7
+r3695 1050dd4533
+r3696 e1ee4a54c0
+r3697 98fd27c10e
+r3698 edd9c3b808
+r3699 5b80c0ad5d
+r3700 60e78ebb8c
+r3701 b687aa1883
+r3702 31f3132b17
+r3703 534204a7ee
+r3704 24b9bbe78b
+r3705 8df067b25b
+r3706 0b4c2c7563
+r3707 a2b63875b5
+r3708 e864209014
+r3709 ea57d9e40d
+r3710 cb785fad2f
+r3711 96bc1b2e6f
+r3712 dd012e5461
+r3713 66ab84dd8c
+r3714 8541c3cfb1
+r3715 87a4e43ba8
+r3716 1a3fffe3c6
+r3717 d67d3c2eba
+r3718 bb73b04148
+r3719 f609e1d7cd
+r3720 4e7330335e
+r3721 c824d58e10
+r3722 e9fd9059f2
+r3723 a9664dbf3d
+r3724 55dc942618
+r3725 5cedd7f04e
+r3726 f749c05183
+r3727 5ba5cce463
+r3728 d50af5d833
+r3729 35612e02fc
+r3730 5e1103c409
+r3731 4368c52950
+r3732 41cd79472f
+r3733 a8332ccd34
+r3734 f0429d8a6f
+r3735 8b802f68a6
+r3736 48d8539087
+r3737 6386db1a6d
+r3738 ab3bc54b20
+r3739 f99e4b1e18
+r3740 25b24ddd28
+r3741 09c3cc4c36
+r3742 4ba5a222f5
+r3743 fec3fd9ee6
+r3744 7457a6092e
+r3745 f56aef22e8
+r3746 734dbe0e1e
+r3747 74a30a3f52
+r3748 622167df9a
+r3749 829eb95ee2
+r3750 6e325ca26c
+r3751 0dcfb955d4
+r3752 8d054a3f01
+r3753 e8a800d31f
+r3754 87de8ee438
+r3755 8e4b8c4d58
+r3756 251d24e244
+r3757 bfa877d7e4
+r3758 27410be753
+r3759 18b44350ef
+r3760 358371050d
+r3761 c78c1e3efd
+r3762 1deb28f000
+r3763 89f45612e8
+r3764 afbe00bbad
+r3765 9d65aea9a9
+r3766 2968ffe5e0
+r3767 35c612c5c2
+r3768 5fc13b102f
+r3769 86dd00a81c
+r3770 d34f161678
+r3771 f91cf5ddfc
+r3772 4bd7cf5b63
+r3773 a8731f5c35
+r3774 55fb705ed9
+r3775 499b0279b7
+r3776 016e76d9c2
+r3777 d2b5a0ad16
+r3778 233229a0f8
+r3779 88e246ba2a
+r3780 10c29b9c5b
+r3781 172de146a8
+r3782 d2b9c55e12
+r3783 02dc24e068
+r3784 c9e33b2023
+r3785 dff9023c16
+r3786 4d14ec1b71
+r3787 7108592b2b
+r3788 0610ba492f
+r3789 d8e3e31836
+r3790 c3d9d5ed52
+r3791 0a45f37896
+r3792 db7ba7d051
+r3793 d953b81b54
+r3794 92bbd46102
+r3795 49f7b6b403
+r3796 21b0b406b5
+r3797 4cc5d62ce1
+r3798 41b5050ad1
+r3799 a21098b9cb
+r3800 e35884ed02
+r3801 e18433d52e
+r3802 9ea32651f7
+r3803 f66f43a1be
+r3804 0f7b4d28a1
+r3805 b8186b906d
+r3806 66db83df88
+r3807 ac6bf7a571
+r3808 70394e1ca5
+r3809 7142247463
+r3810 ab2a6493bd
+r3811 72d99c95e9
+r3812 3ef7b2660e
+r3813 f617efc24e
+r3814 fae754c81a
+r3815 6862dacb9f
+r3816 84094a0101
+r3817 e485893f01
+r3818 85733d4b2e
+r3819 cd7dcb372b
+r3820 c1fa420d34
+r3821 74d2ffc0b9
+r3822 6d35dedf60
+r3823 2facf37679
+r3824 6b243c5e3d
+r3825 f9cc4a054b
+r3826 0baefc44bc
+r3827 a9b53b7c86
+r3828 23f795a322
+r3829 e3198c669c
+r3830 4e79c400f4
+r3831 a88516e6a9
+r3832 d6f4a87a85
+r3833 0c75fe7c17
+r3834
+r3835 9eb2d3fa77
+r3836 efe04a5215
+r3837 a78d745dbd
+r3838 19158d78f8
+r3839 2080c5a1cc
+r3840 162a5f7755
+r3841 4fdab72617
+r3842 ebe2c4bf3c
+r3843 b8c700cd8f
+r3844 cbd30cf21c
+r3845 08661fd29f
+r3846 1aa40dd9e3
+r3847 a0a569dfb7
+r3848 436a4363f7
+r3849 1a333dbf5f
+r3850 5d070472ca
+r3851 2dd7fe52f6
+r3852 d5e8f67ade
+r3853 e4a6367b05
+r3854 35f02f5fc8
+r3855 4a2bd066c9
+r3856 8332a1e9d8
+r3857 99847828c7
+r3858 0f6081c0bd
+r3859 95381cac9e
+r3860 8aa1f96c45
+r3861 6b93dced8a
+r3862 4ec12fd076
+r3863 bc2421cd19
+r3864 89d9f33d8f
+r3865 bd170a6e74
+r3866 88a2e8af94
+r3867 986b87a3be
+r3868 6e578cf8bf
+r3869 e7f0aaf5c3
+r3870 a7e9b25308
+r3871 45a2a1519b
+r3872 f45ce87202
+r3873 896b9e9783
+r3874 eb3d3eeb7e
+r3875 fc1ed2a188
+r3876 096ab28f3c
+r3877 4fd6b0098e
+r3878 f1bf4d646d
+r3879 1f2e15f4e5
+r3880 2c5022f9da
+r3881 71010e2f3f
+r3882 9b6cd96846
+r3883 5c3266e3d1
+r3884 5e80a7ac2d
+r3885 75f09b2c8f
+r3886 03f635fcec
+r3887 3620f945d1
+r3888 d475960786
+r3889 1098308d1a
+r3890 0dce46b648
+r3891 5f956146db
+r3892 6b7136abff
+r3893 5d450c4999
+r3894 da9f329d84
+r3895 f9ccc84517
+r3896 d5e85ef0cf
+r3897 fcc306f42a
+r3898 042b3c3978
+r3899 402ee86303
+r3900 9d73819ae7
+r3901 16856ead74
+r3902 5de62f994f
+r3903 80c6300d10
+r3904 2cd85f1d31
+r3905 9d8942df91
+r3906 0b6ef8dc59
+r3907 0afb3068da
+r3908 c003c37092
+r3909 2bde64168d
+r3910 edf4302bff
+r3911 d0cf4e00d7
+r3912 816c3d5001
+r3913 4a519eb7b1
+r3914 d435f4e8d7
+r3915 54c7abb0d0
+r3916 6f55f1053b
+r3917 757caf9ec6
+r3918 01a9d76f59
+r3919 21204727d1
+r3920 cc64c24f2e
+r3921 0cf94fe12d
+r3922 93f05e44fd
+r3923 0f88183f98
+r3924 67b84cefdb
+r3925 b08c2c22a6
+r3926 2ce58118dd
+r3927 160c05843d
+r3928 524918c134
+r3929 204dbd6dac
+r3930 4ab12055ef
+r3931 8442cdcfca
+r3932 8281ca3993
+r3933 8c930dea2f
+r3934 5722c3dd69
+r3935 15e8b9c25b
+r3936 e0411a5c21
+r3937 e1b655d6ae
+r3938 bda1e6ab23
+r3939 f177bb3215
+r3940 390e2599eb
+r3941 c053c8af00
+r3942 f8ee6ef857
+r3943 594fd59916
+r3944 64cff6a0e3
+r3945 74c76637aa
+r3946 d554c8332b
+r3947 1addfa71cf
+r3948 c05c10e3fa
+r3949 863714d6cc
+r3950 e3e53e2bda
+r3951 d439857e2f
+r3952 4c6438417d
+r3953 851321621a
+r3954 5dfd488748
+r3955 4f59c83f13
+r3956 431abf42bd
+r3957 28c2394d01
+r3958 9d110b32d0
+r3959 1fe84bcc45
+r3960 b2dc4a4233
+r3961 f714a29dd6
+r3962 491b4c50a8
+r3963 7f8e2cec8f
+r3964 9b8b0e477e
+r3965 008f8f063c
+r3966 4d7916df75
+r3967 951667d5ee
+r3968 ee4c236bcf
+r3969 ded727e045
+r3970 a8a9dfda09
+r3971 b81c202d9d
+r3972 ff2538e649
+r3973 a7dfe53e15
+r3974 737ceb1e9a
+r3975 4fccc2395b
+r3976 12b7df185b
+r3977 bd9b58dd62
+r3978 2655bd72e0
+r3979 1b7d5dbc1f
+r3980 a50c723119
+r3981 5323096a43
+r3982 47f009d34f
+r3983 2f7726cbc0
+r3984 51a21634fe
+r3985 273a9c720c
+r3986 7c9853df4c
+r3987 434f79ad15
+r3988 78dedbcfe8
+r3989 3a11fb5be6
+r3990 d389d62497
+r3991 f8c47c369e
+r3992 9acfa7693d
+r3993 820a2d3a60
+r3994 e6072321ea
+r3995 ac954ccd10
+r3996 52696417c6
+r3997 aa77b6d1ec
+r3998 2f69f39176
+r3999 e8b87c676d
+r4000 0c3c16e037
+r4001 718ff58ca1
+r4002 89de292795
+r4003 98447d6dd2
+r4004 7501dbe6ea
+r4005 ca46e0cc97
+r4006 b52ba30891
+r4007 5363f24d1d
+r4008 c8c857382d
+r4009 39b3d0aaf4
+r4010 1d22852044
+r4011 e657ee6136
+r4012 26743f690b
+r4013 105ddb769e
+r4014 90a3814707
+r4015 beea6fa18e
+r4016 014b73dd9a
+r4017 e1d244645f
+r4018 6a7c67314a
+r4019 a3488a2195
+r4020 1cd1331b29
+r4021 0cc197de4e
+r4022 c21090e6a8
+r4023 b2ee76bdc5
+r4024 f0e63b8bcb
+r4025 7179a093ef
+r4026 9e67e8eb2a
+r4027 baf9a278a4
+r4028 28d2afb09c
+r4029 d5dd908810
+r4030 75398c1c57
+r4031 528c8d1450
+r4032 424f8b40d5
+r4033 90b4dc0509
+r4034 22d6d7b652
+r4035 9917c66801
+r4036 a274f949c3
+r4037 9602bf11e9
+r4038 2e064cb574
+r4039 a95c0558aa
+r4040 9e2006a60e
+r4041 713aadc739
+r4042 2879da2391
+r4043 0d0172cb82
+r4044 f0663f5fd7
+r4045 8cefd2b4b3
+r4046 a29d908bb3
+r4047 37a3e2201b
+r4048 852bece973
+r4049 b8c5798b5c
+r4050 87ea8ccb1a
+r4051 36d0dca50b
+r4052 fd4e74823e
+r4053 fa99242159
+r4054 e46aab9c0c
+r4055 38c5a6b5ca
+r4056 5860530cce
+r4057 bca179b895
+r4058 51fcef17d6
+r4059 72ced8be62
+r4060 ebf8f4f181
+r4061 21d00c2acf
+r4062 a994adf6e1
+r4063 715423971f
+r4064 60e9413f4a
+r4065 51dfe805f4
+r4066 0246e1e74c
+r4067 1bee42b554
+r4068 5b2c183efb
+r4069 477b790692
+r4070 c009286f50
+r4071 eff6111eea
+r4072 061a14c274
+r4073 a68b994bdb
+r4074 9e4dfe2668
+r4075 32bc7086c6
+r4076 ed7f01e165
+r4077 9201f823b0
+r4078 6508005cfa
+r4079 d02399bd06
+r4080 5662d8f94e
+r4081 2dfa8272da
+r4082 8d4cadf3d9
+r4083 956b9aa3fc
+r4084 b0876f8e35
+r4085 250399c9e1
+r4086 6f7a94d6e4
+r4087 278cb7cc7b
+r4088 4582381b8a
+r4089 8802442bde
+r4090 48073005b9
+r4091 b937dc9918
+r4092 5dec2b451b
+r4093 379f7c1f8c
+r4094 a3fbf70b2a
+r4095 041681054f
+r4096 68562d06e3
+r4097 e922fce3e6
+r4098 6d081b3c4c
+r4099 67290d0879
+r4100 040ca6168b
+r4101 07af0f5eb5
+r4102 9a33a267d9
+r4103 ad7e262eb8
+r4104 5c5a13fc7e
+r4105 96cf49a321
+r4106 8bb23af6b6
+r4107 2554f8b5f6
+r4108 badd1338a0
+r4109 c0f530cfa0
+r4110 31b680f267
+r4111 427e592c27
+r4112 bdf2e9f702
+r4113 6a415fa5ce
+r4114 b630d0e2d9
+r4115 8e8f155893
+r4116 0ff3b181b6
+r4117 8cce5ad64a
+r4118 6d81466523
+r4119 0baff379fd
+r4120 5a6a7cf01a
+r4121 32947cc0c3
+r4122 09dde3d0fb
+r4123 204ec80b8f
+r4124 680392e3ea
+r4125 d6a1e148ac
+r4126 472e16fbec
+r4127 74b9d73234
+r4128 de8fc1e7de
+r4129 c808e1b5c1
+r4130 7febddefc6
+r4131 e08284c96a
+r4132 b3e4299f66
+r4133 d86d471f88
+r4134 1832eb5f83
+r4135 73ef58a544
+r4136 60e0d4dea6
+r4137 63bd290c91
+r4138 e5af480b99
+r4139 da0dcd1188
+r4140 05ac4be4a3
+r4141 5a665f0654
+r4142 2e5c8d22e4
+r4143 ea57a524be
+r4144 8cb91759c7
+r4145 9081d7c2be
+r4146 9bd5e8507d
+r4147 edbac1669b
+r4148 171b8ec351
+r4149 540fe94ec0
+r4150 cb6e13ecc4
+r4151 88a54be387
+r4152 27ea2ec908
+r4153 737dfff4c7
+r4154 ece0d0ed89
+r4155 d1b4a12b05
+r4156 57d313ef7e
+r4157 a636876294
+r4158 91a11635eb
+r4159 c718a6bce6
+r4160 89a3ecc15e
+r4161 a1c834fea8
+r4162 85b2ef7fac
+r4163 ea94e14951
+r4164 860077ec57
+r4165 4c8b6bac74
+r4166 d1a3ad162d
+r4167 0adb68921a
+r4168 12e8a96c2b
+r4169 3f5f7682e4
+r4170 f53185a333
+r4171 507568e72c
+r4172 6ba18e0059
+r4173 cb4fd03782
+r4174 e67937da14
+r4175 5e7ea748c3
+r4176 2c5078a2ee
+r4177 329705355e
+r4178 e34cd16629
+r4179 5865b39955
+r4180 b232d5005c
+r4181 28a0f4147f
+r4182 61badf43b9
+r4183 e215fbc8cf
+r4184 535c7e54fc
+r4185 9907ade461
+r4186 194eaecc00
+r4187 b021e998f8
+r4188 67282530f6
+r4189 d9e3c133db
+r4190 242b37e9b8
+r4191 676fbe45e3
+r4192 0f61edd914
+r4193 1af5b9aeed
+r4194 8bdf158f08
+r4195 11f1938e73
+r4196 2ab6994175
+r4197 6e45b64b7c
+r4198 b5c5916958
+r4199 7ef2731a78
+r4200 de1ca7103e
+r4201 2a99a8010f
+r4202 e389932a09
+r4203 e39e84e8f2
+r4204 0562f3653e
+r4205 5c39c6a1a9
+r4206 0eabdfe72a
+r4207 ef910b836e
+r4208 5ba805cbfc
+r4209 cb0e7af1e8
+r4210 08caefd4e0
+r4211 6e33a303fe
+r4212 6f9c2ac007
+r4213 af1a7619f6
+r4214 3371e4627e
+r4215 8c6e72f8ea
+r4216 ce836de569
+r4217 f1c0882880
+r4218 9b45ca7391
+r4219 bb6caf035a
+r4220 0ea3313c31
+r4221 b691398a82
+r4222 22dc160a9f
+r4223 4c593d00f6
+r4224 c20c973f9f
+r4225 958dd64c52
+r4226 a50fb39267
+r4227 08d6815870
+r4228 2fa90340dd
+r4229 d7268ca89a
+r4230 0dfe89ce41
+r4231 23f5623d54
+r4232 29f5328623
+r4233 21eab08db3
+r4234 7fb5a2b969
+r4235 8ae660b5ce
+r4236 ec21929876
+r4237 aab9d8db07
+r4238 3d20038cd6
+r4239 dc4938928d
+r4240 d3cc2c2216
+r4241 4e274a8232
+r4242 23e00d0a92
+r4243 e31007e594
+r4244 1631e00c3c
+r4245 364559e233
+r4246 2b80c3e689
+r4247 4aa2414f56
+r4248 9966a10dc9
+r4249 99ee96571c
+r4250 4751d12774
+r4251 336f08db48
+r4252 bfbc23fa63
+r4253 b9bb52ea34
+r4254 1979f56bb0
+r4255 7c023507ab
+r4256 82365dd142
+r4257 abf0edeaf3
+r4258 fd154fbd77
+r4259 5da06c813f
+r4260 12be3aab0d
+r4261 ce80365a9d
+r4262 3e24518770
+r4263 537b80d752
+r4264 faf9183089
+r4265 d7499538cc
+r4266 4ae459ef75
+r4267 6ad31934e9
+r4268 20e2019647
+r4269 b72243eb88
+r4270 3577a16ffe
+r4271 ca5b2cba22
+r4272 f2a6a86bb2
+r4273 612132fd58
+r4274 c04ff15055
+r4275 8c69c7617a
+r4276 ed271f4379
+r4277 c27b04348a
+r4278 869e14b718
+r4279 72128a7a5a
+r4280 1f3355d714
+r4281 1ec9209a8d
+r4282 7fe5ed6df8
+r4283 ebe1c8f272
+r4284 3cabc3d6df
+r4285 1ea7ccc409
+r4286 95bafdf4ea
+r4287 7fd0b4b8c8
+r4288 d8f34726bc
+r4289 a9b4163417
+r4290 97b285c569
+r4291 dd9c59cc23
+r4292 eee9ffbb4a
+r4293 4824341905
+r4294 4eac31b0ff
+r4295 51168b223a
+r4296 b0190b575c
+r4297 1cd6878c34
+r4298 555612e072
+r4299 c5b684607c
+r4300 c8573fd5df
+r4301 0caa21c155
+r4302 7b78918132
+r4303 b04cea15bc
+r4304 944cdf5c60
+r4305 7ad58e693c
+r4306 df6b358dcb
+r4307 bc84a838e5
+r4308 1cb144f5e8
+r4309 ce41129d96
+r4310 7d4c3a7052
+r4311 fdd8c6597f
+r4312 5704ccb048
+r4313 fcafb7bed6
+r4314 2c62148021
+r4315 8c15cfa189
+r4316 00e3092afa
+r4317 b2dbde8066
+r4318 a93bb8d43f
+r4319 43e1f829ef
+r4320 5271830578
+r4321 6308575a9e
+r4322 7999556902
+r4323 85d13f716b
+r4324 f683124427
+r4325 1de8fefb18
+r4326 3f2b3db06d
+r4327 94da2c3d36
+r4328 6152efdbc1
+r4329 a98c6f20f8
+r4330 c77239218d
+r4331 ebb096e96f
+r4332 63bb8df947
+r4333 ec061b1605
+r4334 bca043774f
+r4335 b4ba0b8045
+r4336 6d4bae44bf
+r4337 8e1c13bc2a
+r4338 b0142d0b0b
+r4339 fbe14f7330
+r4340 c09c5c4c75
+r4341 1b61b60d0e
+r4342 74fa0daa1a
+r4343 6dd54e71a1
+r4344 cd6a645300
+r4345 2393804085
+r4346 a4e5d4a1d7
+r4347 35b8aa2237
+r4348 a81b05fe54
+r4349 7a3a636e9d
+r4350 98fd985ca3
+r4351 ac9e7dcde2
+r4352 b900a9491d
+r4353 6e9b46d532
+r4354 ed607f9e00
+r4355 b3c92d8d92
+r4356 eab8ef5475
+r4357 a779e34b04
+r4358 bdfec77a20
+r4359 7ca0b11f15
+r4360 1e6dd6bf67
+r4361 d145b661e1
+r4362 4139c127a7
+r4363 1e33553484
+r4364 5e728c60b7
+r4365 a481860c64
+r4366 3abec2c182
+r4367 c0a2895a71
+r4368 957609904b
+r4369 409252cb26
+r4370 20851c9a02
+r4371 5b1141d3e7
+r4372 98d76b37bb
+r4373 9bebec47fd
+r4374 43f25bbed9
+r4375 f750bc83b4
+r4376 a6b903c195
+r4377 2317a36563
+r4378 170cb99b47
+r4379 2b073f0a00
+r4380 b23d885feb
+r4381 3e90b7175a
+r4382 5cf7d39061
+r4383 aa78f8ed21
+r4384 84f48521b8
+r4385 ea4a4fd3b2
+r4386 503767d7b5
+r4387 998e8e3f6f
+r4388 f5633fe404
+r4389 2aa41fcee1
+r4390 9be1f597f2
+r4391 2f19f317f4
+r4392 c8b79c9ee7
+r4393 5f5d61e408
+r4394 99aa6cd9ed
+r4395 5e19bd9b04
+r4396 8ed7d96bde
+r4397 64f1cbe7dd
+r4398 9a5375373b
+r4399 adde8def57
+r4400 f505a2d5a2
+r4401 6113fda697
+r4402 7df39b24cf
+r4403 5269174866
+r4404 adf2ae34ae
+r4405 4fe7cba490
+r4406 84bc4d62b2
+r4407 ee16845bd4
+r4408 03f703627a
+r4409 e59ae197eb
+r4410 83ffad38a2
+r4411 f833e14198
+r4412 dfd98cb40a
+r4413 b09ad43fbf
+r4414 db7efc544c
+r4415 0ebb260f0a
+r4416 e12958a079
+r4417 2a5f62338c
+r4418 56b6b545dd
+r4419 80a2ef51f1
+r4420 7e92e642b9
+r4421 2f441aeb70
+r4422 6b0fcaab0e
+r4423 ec4245fc4e
+r4424 163fd22846
+r4425 fe6d934763
+r4426 09a1cca14e
+r4427 15ed0b070e
+r4428 d5fec7cd48
+r4429 5354118e13
+r4430 8de006ed70
+r4431 1e497c553d
+r4432 eb2601d5af
+r4433 3d0bf84e9b
+r4434 e4ce06a933
+r4435 7e26a89aec
+r4436 a33babfcf1
+r4437 bc6f997f0a
+r4438 7d50bd127a
+r4439 184a284ccc
+r4440 2ce85ef7ee
+r4441 86ed57937a
+r4442 9418aa6b6f
+r4443 33f0d7c7e0
+r4444 a500d671a4
+r4445 5cad7d9a1d
+r4446 35dd7bad5e
+r4447 2e0a2d41cd
+r4448 573e3db24e
+r4449 6c2eeae273
+r4450 efcdf64997
+r4451 05928a2653
+r4452 f30e2cdae7
+r4453 a6fb796e0e
+r4454 5105a3cd57
+r4455 d527c775db
+r4456 ae5a9701ae
+r4457 611894900f
+r4458 338d1dece1
+r4459 7edb15bf5f
+r4460 c43de12f1e
+r4461 1715eca785
+r4462 2c5d9fc10d
+r4463 6a173f47a6
+r4464 3fe0c855d6
+r4465 813a8805de
+r4466 e4c22e287b
+r4467 16632c98c6
+r4468 7fa7c9317a
+r4469 0d4dfff1a0
+r4470 e2e975778f
+r4471 a84b3fba65
+r4472 47e47b4a12
+r4473 2be434ad7f
+r4474 0bf95c4e3e
+r4475 02746d1257
+r4476 7517bd975a
+r4477 5d7078f6b8
+r4478 fdcaec1742
+r4479
+r4480 8cf263bf21
+r4481 01cd680dee
+r4482 e8c5ff7bae
+r4483 441a24642b
+r4484 2bcd0daa54
+r4485 ce8cd951e7
+r4486 9294a4771f
+r4487 675b73f5c4
+r4488 c188ae171c
+r4489 4d5aa89e14
+r4490 703297ef51
+r4491 ec5c9dff4b
+r4492 b6f8d5a603
+r4493 b058c90501
+r4494 747d62e43c
+r4495 f18f51cb99
+r4496 26ae505805
+r4497 0c89a9d1a2
+r4498 2f8d5228ca
+r4499 90942ba061
+r4500 4d3f8e6a98
+r4501 9e3c3c9731
+r4502 dc4422b5c6
+r4503 ffbd367ed4
+r4504 a0f177b57b
+r4505 437b69de00
+r4506 ae80c2257e
+r4507 92c43defc4
+r4508 10b4d730b8
+r4509 d0126c1ff4
+r4510 a2231f55a0
+r4511 3761cb4b3a
+r4512 8ef0c9bfc7
+r4513 65c1d826b2
+r4514 14c330159a
+r4515 fcc3a4867d
+r4516 1b62046e2e
+r4517 f730f48c1f
+r4518 c7cf81fcb5
+r4519 7554cbeb65
+r4520 4a72b68fe3
+r4521 cb95310d86
+r4522 bd16fac899
+r4523 ef7b23f9d8
+r4524 097a86f213
+r4525 d8d8d98d36
+r4526 48bd238a90
+r4527 b18e6b9a5a
+r4528 5b8594a6be
+r4529 dcc928609e
+r4530 6b71c24b1d
+r4531 7bcb0076ad
+r4532 88aad851bf
+r4533 d47ab5bff5
+r4534 97cf075c99
+r4535 159d71afbe
+r4536 37a09ef5c2
+r4537 485957378e
+r4538 cebbca73fb
+r4539 6b793b53ef
+r4540 5f6f5f723b
+r4541 ff21a4fbaf
+r4542 288e0c04ac
+r4543 a23a5c8b04
+r4544 0af18b6efc
+r4545 ec620e79d0
+r4546 8565ad9661
+r4547 e14a1532ef
+r4548 4e800def5b
+r4549 1b8f5a109e
+r4550 2b8b774ea6
+r4551 4fd9ff44db
+r4552 6313864bba
+r4553 cc3cdec920
+r4554 b65ef22c4d
+r4555 9055a919a6
+r4556 cc54f72704
+r4557 7314eaba5e
+r4558 0085ecb2f4
+r4559 e23e263d51
+r4560 4be0964120
+r4561 5a7a0b1dcd
+r4562 6e9fcf31c2
+r4563 50b1206218
+r4564 9cbbfa3ae3
+r4565 43b0ce3c5d
+r4566 e572f2935c
+r4567 b8b10d4207
+r4568 41a4692089
+r4569 cd0fe627cb
+r4570 27a039bf41
+r4571 72937e8473
+r4572 159a3633b5
+r4573 2994973970
+r4574 abcd2d2f11
+r4575 0f11b56fdc
+r4576 b8356d0569
+r4577 7deca20d7c
+r4578 ce5f59f920
+r4579 0c5513d5fc
+r4580 47278930d1
+r4581 5c8e9c28ec
+r4582 a4796d889d
+r4583 4c83b5e7d2
+r4584 77464f58b8
+r4585 8fa3a68fa3
+r4586 526506ee0d
+r4587 71186b0815
+r4588 9202c01342
+r4589 2941c83b95
+r4590 fba39a9328
+r4591 0e4a5a46d1
+r4592 4b24405a51
+r4593 120d1f6d1d
+r4594 c420d1b4b6
+r4595 88445e5c92
+r4596 5318e01060
+r4597 22a82cff38
+r4598 c1f0a81530
+r4599 eb6ce946a2
+r4600 2a09259c9c
+r4601 a4d45a4908
+r4602 b1c5fc5475
+r4603 1d7cdd713c
+r4604 8baf2c8492
+r4605 380429bc95
+r4606 2f697bbee2
+r4607 5c27a53649
+r4608 f13923cb2a
+r4609 c9305ff74f
+r4610 b57983c013
+r4611 85218bf8a6
+r4612 add8bf8d68
+r4613 3a28c9b0a3
+r4614 78a88d95aa
+r4615 738348f88d
+r4616 041a971eb7
+r4617 0a6b2c44cb
+r4618 018bd93918
+r4619 7a23facb88
+r4620 897ffc2114
+r4621 a4409bd62f
+r4622 4dff479674
+r4623 f3198962b8
+r4624 3b81e0cbac
+r4625 25a98964b5
+r4626 8c7d8bd610
+r4627 8a666daa5c
+r4628 e21ba6a461
+r4629 307cda5cad
+r4630 3d3787b6d4
+r4631 5da73c7fd8
+r4632 32cabb1c30
+r4633 ce8279816d
+r4634 391ec16407
+r4635 ecda78ddf1
+r4636 c64152bc3e
+r4637 527e849cbf
+r4638 e46029a572
+r4639 2c1956c282
+r4640 9ac7819931
+r4641 6772d17cbd
+r4642 c18f8a9b2d
+r4643 16317e63bf
+r4644 7c11786a48
+r4645 72b4cec44a
+r4646 269e0a0579
+r4647 265f05b5d7
+r4648 5af15214f1
+r4649 99369b6820
+r4650 bd6070ae78
+r4651 e093d72b2f
+r4652 60b24c0671
+r4653 1da91ff38f
+r4654 90948bf331
+r4655 7af69ba79d
+r4656 45084b98fc
+r4657 8fd901f748
+r4658 36795d2e4c
+r4659 082ab859ac
+r4660 27103aafc3
+r4661 013bdae337
+r4662 20af4df51a
+r4663 c141a84b49
+r4664 dd918cc2b8
+r4665 ecd89b556f
+r4666 3632df227d
+r4667 2214cdeaef
+r4668 4cb8dc8cc3
+r4669 cc49e611aa
+r4670 9a7eb6466c
+r4671 6f850988f4
+r4672 59a434de1b
+r4673 3f12c15fc0
+r4674 1a3ba334d7
+r4675 e4ce6b57c2
+r4676 7f208e2a13
+r4677 8e4ce216bd
+r4678 57a460675a
+r4679 1c2a65c287
+r4680 bb79f90e83
+r4681
+r4682 23f8c69b0b
+r4683
+r4684 8cd7fcc2ab
+r4685 620b8cedeb
+r4686 c7a32d9079
+r4687 74dabb6ec9
+r4688 7762de74a5
+r4689 4b2d79b21c
+r4690 924b0f3039
+r4691 899e2bf1b2
+r4692 76993fa93b
+r4693 21766465c5
+r4694 c7f9cb3d7d
+r4695 8970fdfe03
+r4696 9272651e53
+r4697 2826766917
+r4698 66527219ab
+r4699 6f66105f7d
+r4700 5db8ce56f5
+r4701 218871311d
+r4702 1adcbe66f6
+r4703 9910af693a
+r4704 6e1ef09bdc
+r4705 f8beba5270
+r4706 e142eae2eb
+r4707 b47c6e1f7a
+r4708 3080077eb7
+r4709 1814e8a373
+r4710 5e4a5b0270
+r4711 e82f10b501
+r4712 ad4be6739a
+r4713 d2c7c98291
+r4714 90b1ff4a62
+r4715 2e445f65c0
+r4716 eb8147e124
+r4717 7332181fcd
+r4718 6091cca8a5
+r4719 67dc2eddbc
+r4720 dae93b66ed
+r4721 135a6a67b7
+r4722 41433ad630
+r4723 5354ca48d8
+r4724 a5a299eecb
+r4725 ac14ced855
+r4726 90595610c6
+r4727 aa62dc1ac2
+r4728 fecc6c4d1f
+r4729 3ae2484310
+r4730 0954e0acf5
+r4731 a2a1b7b1d8
+r4732 6a6d7b7f49
+r4733 0cd27125ec
+r4734 9cb190e882
+r4735 7a10e3170d
+r4736 a37e1b6309
+r4737 321c9c4240
+r4738 4c9144de76
+r4739 11a9eecb4d
+r4740 d8522ed174
+r4741 36a6c00e93
+r4742 0efba3ab03
+r4743 50e9847ce5
+r4744 4024e57526
+r4745 e80b0f535e
+r4746 ad601a2680
+r4747 252505f3bd
+r4748 db3bf9a78a
+r4749 b8818bf292
+r4750 b10fe9805e
+r4751 89fdedf629
+r4752 e06547121d
+r4753 61e926fa20
+r4754 a628fcb21e
+r4755 2d9c5a2419
+r4756 207f4257b3
+r4757 c8a1b33655
+r4758 70e481806b
+r4759 e7991261bd
+r4760 df9d094d27
+r4761 5ae9ab371e
+r4762 0188db141f
+r4763 68b225d73b
+r4764 5a5a3eb0e1
+r4765 471bb9d011
+r4766 9cbac19bd6
+r4767 c24210160e
+r4768 e96181b4d8
+r4769 f029fc6649
+r4770 d603b33c53
+r4771 61e06202c0
+r4772 0c9b6c2e46
+r4773 de663567a2
+r4774 de4256056a
+r4775 3ae63b5ccd
+r4776 fc8a16405c
+r4777 1903902243
+r4778 fd9ebbc82c
+r4779 db20991e47
+r4780 15956fc33e
+r4781 0b87051d35
+r4782 9e1ed62536
+r4783 177e09a431
+r4784 e1a8cf0ba7
+r4785 f2141da88e
+r4786 ef6771bfc8
+r4787 f4d80be80f
+r4788 e74f7af55c
+r4789 23c574d163
+r4790 7adc109576
+r4791 daa5460faf
+r4792 ddfe8474cd
+r4793 7ebd3268f7
+r4794 917a34ff65
+r4795 b2846fa014
+r4796 528a6580ed
+r4797 f49c6bd79b
+r4798 083c4b354e
+r4799 f6f24bd8f5
+r4800 b2857eddb0
+r4801 1806bcbab4
+r4802 5ffdc57de9
+r4803 6401f14a5c
+r4804 0d9289b101
+r4805 33cce75063
+r4806 9c7d881883
+r4807 0e1461926a
+r4808 f70518013d
+r4809 ba2e6f61e8
+r4810 9f6d1325c7
+r4811 8398b563c7
+r4812 f2a21aafec
+r4813 aab12e76a3
+r4814 d17278ec0b
+r4815 e4f6a24702
+r4816 75971d2afe
+r4817 56d62194cd
+r4818 4eb2ccaed2
+r4819 b09684a187
+r4820 25152f0884
+r4821 b5bb25e418
+r4822 9e8ee50e5e
+r4823 7a65551686
+r4824 d35e16dea3
+r4825 3616845062
+r4826 63b346bd6f
+r4827 0cf7c3be89
+r4828 e57dc927b5
+r4829 427dfba905
+r4830 ddbc132632
+r4831 7aa7e0b239
+r4832 66bf262e01
+r4833 ec5c988d61
+r4834 ca015f2887
+r4835 45edd7984a
+r4836 7836c40fcd
+r4837 c3244c0d69
+r4838 54671fce28
+r4839 2eb46ac9dd
+r4840 21363864e8
+r4841 aa7d8d3ffc
+r4842 1901db1ef0
+r4843 d466616dd4
+r4844 0b22f20283
+r4845 acfa296358
+r4846 771f3479c1
+r4847 f11fca9389
+r4848 a41b58e5a1
+r4849 feaeff1c3c
+r4850 f4fb89d6d6
+r4851 6df648d403
+r4852 e2bffd2133
+r4853 6bf26b5b78
+r4854 78441751ad
+r4855 630679a8b6
+r4856 0cde435cdf
+r4857 0b24f5797d
+r4858 871771f410
+r4859 ec1c69a32b
+r4860 65814d93ac
+r4861 387dd38c1e
+r4862 2f369fd348
+r4863 08b8ef29f3
+r4864 b8627f4782
+r4865 4aa7f95c0c
+r4866 b9461febf4
+r4867 eceee57a25
+r4868 bd7c67a541
+r4869 029493a5ec
+r4870 dfe0ebc86a
+r4871 a444240d9d
+r4872 3291d4cb2d
+r4873 bc4c24f8ee
+r4874 8aedd8beea
+r4875 d523187556
+r4876 f3b767e870
+r4877 9df28816ef
+r4878 f2b9ba819a
+r4879 607db199f0
+r4880 73fff1f47e
+r4881 1634d380f6
+r4882 bcd7ead349
+r4883 11bd0d6186
+r4884 fabdc86271
+r4885 14203ea9e9
+r4886 eba1c026d1
+r4887 0f97e0f90d
+r4888 83282ce687
+r4889 4047801c1e
+r4890 e416b5a276
+r4891 5e03512552
+r4892 58dc9b6ad4
+r4893 8800f2781e
+r4894 977cbd4ef5
+r4895 90b93c790c
+r4896 071be391c1
+r4897 8a426ccf5f
+r4898 3ee9201e2f
+r4899 52e169b789
+r4900 d888c78872
+r4901 222cbc2dea
+r4902 47f1199b5c
+r4903 97e86af1a9
+r4904 e2b9df1250
+r4905 7fa8d8b098
+r4906 c3a4c7ee6e
+r4907 d11a5ec080
+r4908 fb1795a8b9
+r4909 d75e14f947
+r4910 44ec9c5d1e
+r4911 87f227fedd
+r4912 0beee8af0c
+r4913 161eca2526
+r4914 f4823a2c46
+r4915 d1fbd50cc3
+r4916 36f6311a1d
+r4917 a34d33eecb
+r4918 da82206648
+r4919 a1a44d9fc9
+r4920 7d38b7501c
+r4921 26d7ba2f85
+r4922 c3acfba197
+r4923 d7d3c75f70
+r4924 ea98167b27
+r4925 b58c45a425
+r4926 6a9ac9e4eb
+r4927 98378efcc3
+r4929 85477b8726
+r4930 f89520449e
+r4931 1986671899
+r4932 306e0e4e7a
+r4933 b1944462af
+r4934 83aef81458
+r4935 5535664a2a
+r4936 da547cc724
+r4937 cbd29e3627
+r4938 a03c63c2a3
+r4939 59eea769bb
+r4940 f7ba3e8bbe
+r4941 f8e80a4464
+r4942 599345037c
+r4943 b83bbad311
+r4944 fb67524a83
+r4945 12c007cda6
+r4946 d4de06d53a
+r4947 858ca46c6e
+r4948 87878dd860
+r4949 39b388ce8a
+r4950 e0afb879a8
+r4951 657c0cb4f1
+r4952 05228439f3
+r4953 a47b13bd5f
+r4954 d8e21c3162
+r4955 273a7ad59a
+r4956 029c7504a5
+r4957 b7e1ffda48
+r4958 3a863546b1
+r4959 61befc9bde
+r4960 1d6a8505af
+r4961 4b4aa8e21f
+r4962 ad017dcfba
+r4963 a92ce124f5
+r4964 6a9da72893
+r4965 3f7799f8c6
+r4966 c32643ee1b
+r4967 6f3451e92f
+r4968 bcf48fb54e
+r4969 33e0b0964a
+r4970 e99a5c79c4
+r4971 6beb9d699f
+r4972 959a8f0520
+r4973 653d8ffab2
+r4974 83e70dd503
+r4975 990c85f22f
+r4976 535febedaf
+r4977 1d2b98eaa1
+r4978 e528160f31
+r4979 fdeedc59a9
+r4980 9bcec1fcbd
+r4981 630b3717fc
+r4982 115c008334
+r4983 4d9a521222
+r4984 4cf6770e38
+r4985 15724bed1b
+r4986 97d4a23fa6
+r4987 6e137742b1
+r4988 0b6923d694
+r4989 06f66337c3
+r4990 81592cfd53
+r4991 c037162241
+r4992 634e743658
+r4993 31168656d7
+r4994 89c583a548
+r4995 47d41ea48d
+r4996 2ff070d879
+r4997 d0b1b0f44e
+r4998 0be4dbe484
+r4999 b22fc5ff5e
+r5000 b72a0cd2ed
+r5001 bbc77264aa
+r5002 c2967e39e1
+r5003 0a69feac8c
+r5004 0aba785404
+r5005 57ec040fbc
+r5006 0a8b8f9b90
+r5007 09e5446bd3
+r5008 1ddf7e4b15
+r5009 bc5923e2a9
+r5010 854954dc3a
+r5011 0ca9ad8078
+r5012 4720d45a83
+r5013 d4a7e14e41
+r5014 a84e0a9b9e
+r5015 505451a22c
+r5016 7cd71254b0
+r5017 1d724260bd
+r5018 7612d651c6
+r5019 db6216578f
+r5020 0da6b57884
+r5021 b98f463833
+r5022 30e4902b3d
+r5023 fc0af27421
+r5024 8bbd5b9c94
+r5025 e9caaa6ac5
+r5026 bcedaa4549
+r5027 7ba39195a5
+r5028 5318cffed3
+r5029 87052b61f5
+r5030 060f551348
+r5031 53cfb59269
+r5032 3d141a0130
+r5033 c057cb9d00
+r5034 e0d7aeaa9d
+r5035 2d91f011f2
+r5036 386cb01afd
+r5037 d5d245559d
+r5038 f21a820859
+r5039 a0855e0e7b
+r5040 d1ad2bf521
+r5041 a88a30cdbc
+r5042 515d0ff480
+r5043 04fe66b306
+r5044 5dbdf2cc8c
+r5045 54d61d5149
+r5046 31f89d2888
+r5047 cb13c4597b
+r5048 2bf04d01db
+r5049 03698af2fe
+r5050 41c615a461
+r5051 6ff6a40689
+r5052 95dbf1955f
+r5053 354a2566de
+r5054 58375d932a
+r5055 f11d4d6216
+r5056 f87ec7b728
+r5057 3c7879dea0
+r5058 9b60de91ba
+r5059 676477e2f5
+r5060 849943209e
+r5061 65e8e4cd1c
+r5062 31a5aa6eca
+r5063 b6f86e98f9
+r5064 4f4d28f2d5
+r5065 e7f8ed8b62
+r5066 4e8414de05
+r5067 b32abd3724
+r5335 eca144a9ce
+r5336 3c876ae544
+r5337 5da6acde68
+r5338 bf6dcc4e92
+r5340 0a27645cd5
+r5344 79c0c5404d
+r5345 6eef38afc1
+r5347 f88572e6dd
+r5348 b68121ff0e
+r5349 62df5b4f60
+r5350 203e2f5549
+r5351 5a8157ab26
+r5352 ca957c397d
+r5353 b0d216d7da
+r5354 bc1714113b
+r5355 db7046b4e1
+r5356 8ef485ab17
+r5357 2eba60d641
+r5358 aa5ba627f3
+r5359 3ef0d0f9e0
+r5361 3478adbbd4
+r5363 13a89c1778
+r5366 2c0f7659ec
+r5367 e70a1a24ef
+r5368 17e2b1c2a6
+r5369 df50e05006
+r5370 53a3cc7b17
+r5371 0669cf647f
+r5372 c0d0e8f685
+r5373 b2695e9489
+r5374 9ff3d91d01
+r5375 3bb43d3862
+r5376 227e616d4b
+r5377 7afcf99c5a
+r5386 0e82079908
+r5387 d3819b93ab
+r5388 2f7430a277
+r5389 d6c0efe5b4
+r5390 ac84922849
+r5391 9821f70bc7
+r5393 d8fdc6daf9
+r5394 341c62a27b
+r5395 f7f19a8883
+r5396 ec2227a060
+r5397 7ccea812b7
+r5399 99b6474dab
+r5400 34e7849596
+r5401 713b176bd2
+r5402 10322415ae
+r5403 212ae85d01
+r5404 518f51eb79
+r5405 e50dcb9e2a
+r5406 fe815b63e9
+r5407 5faf35dbd6
+r5408 2ec5c04244
+r5409 35915d3420
+r5410 eb94d965c9
+r5426 b846b44bb7
+r5427 4f8cb21ef3
+r5441 ec25a32375
+r5442 dbf2424c54
+r5443 4e176bc3d2
+r5446 776ecad2a3
+r5447 02752ec5eb
+r5448 e30e2a3304
+r5466 5d4d8b21ce
+r5469 ee5a600ff4
+r5470 d85b12fb07
+r5471 281a73cdd5
+r5478 156a21e266
+r5479 956a04062a
+r5480 331d8814dc
+r5481 58175ab809
+r5482 04b5daba99
+r5483 87863bb42c
+r5484 c189860619
+r5485 400a4aca0a
+r5486 8bde6043d6
+r5487 b839a4f9b3
+r5488 5854add893
+r5489 4c9d99666d
+r5490 9d4a545cd0
+r5491 5dfb1f07ad
+r5494 cfd33de807
+r5497 163ea78358
+r5498 65d00d8084
+r5507 67855156d8
+r5508 a948905244
+r5509 ccb7b56e5e
+r5510 eb15d28974
+r5519 18e106e8d0
+r5528 d8d15e9700
+r5529
+r5530 f7a382d513
+r5531 b0cdfa157a
+r5533 15431dfb40
+r5534 52a762c84e
+r5535
+r5538 1b2637c4ef
+r5539 5a34f0b7a7
+r5540 891506606d
+r5541 401bb8a56f
+r5542 84523838fc
+r5543 1a2b324edf
+r5544 a637905c84
+r5545 33efb08a90
+r5546 cb5094082a
+r5547 124760ce04
+r5548 60ee99e4ad
+r5549 8ecff3568d
+r5550 c0578852eb
+r5551 e81a5c8074
+r5552 1ae15a9a30
+r5553 d9ed348810
+r5554 c4b0b7f476
+r5556 b169da1399
+r5557 e6d5f93be6
+r5558
+r5565 39d0d659e7
+r5566 c79184934b
+r5567 ae23ef2344
+r5568 792fe29856
+r5572 65fa4b2806
+r5574 ac90ad939c
+r5575 a6d825e5af
+r5578 445d2630c2
+r5581 9d5475d9db
+r5582 d3eec69c33
+r5583 64b3256bbb
+r5584 2360b7b207
+r5585 c89ce42f40
+r5586 d89f328f14
+r5587
+r5588 487f837c81
+r5589 8a41146ae3
+r5590 b9a2da1e41
+r5591 5748364adc
+r5592 e885bf6a4b
+r5593 cacf5a2b6a
+r5599 9eac2bedc6
+r5602 628f5c1eab
+r5603 6fc1fe1d66
+r5604 79fab58946
+r5606 3ba2f2b49e
+r5610 f1314e373a
+r5611 e0a29566c2
+r5612 a61449bc64
+r5613 e95af789da
+r5614 b945b6d398
+r5615 4f707afb75
+r5616 6960178399
+r5617 4a08aae226
+r5618 6dc1abb28a
+r5619 9007f0b447
+r5620 91cb19d568
+r5621 049fd9e20d
+r5622 c904321df0
+r5623 be2558167a
+r5624 f0f49df473
+r5625 fa129e84e8
+r5626 73892507bc
+r5627 26dd3fc05f
+r5628 e649e5a07c
+r5629 a8735d44aa
+r5630 78c5bde4ca
+r5631 ccc4c81ec3
+r5632 f8336becda
+r5633 5953fca1fe
+r5634 ab90a0a69c
+r5635 09ff70349d
+r5636 3d222bdcde
+r5637 dceda6e347
+r5638 902f698abb
+r5639 e475dfe83d
+r5640 dcedaaead7
+r5642 bc13888586
+r5643
+r5644 a5cffcb687
+r5645 c57219d240
+r5646 0d6dd01058
+r5647 05a91221bd
+r5653 c717ffa0fd
+r5655 44af599687
+r5656 cb6e500214
+r5657 d18d3c6368
+r5658 88dbab4afb
+r5659 60a0f3e591
+r5660 3ebac4319b
+r5661 38b3c42eba
+r5662 03c4d117bd
+r5663 432ea8895b
+r5664 3fedd27585
+r5666 7748d5fd7f
+r5667 4306480044
+r5668 a3ec956b66
+r5669 55baf42acb
+r5670 dc4e5a3fbd
+r5675 c9a4b1fd73
+r5676 0ec22a89f2
+r5677 dd7e035a5d
+r5695 1577ce588c
+r5702 fa9b107e81
+r5704 c9919d1be6
+r5705 67fa247c22
+r5707 b55ce89f72
+r5711 9547dd90c0
+r5712 b8f52d4664
+r5713 9668bd2204
+r5714 7cb7e12fa1
+r5715
+r5716 90c4181708
+r5717 bc15df9265
+r5718 da05ce41a5
+r5719 1d7dd9a70a
+r5721 25eb10e214
+r5722 7fc1dcd161
+r5723 8adbe6a585
+r5724 5c4c36dc47
+r5725 c904af67ce
+r5726 14a08beabf
+r5727 9d212568da
+r5729 4d92b553e2
+r5730 0bdfe0b5e6
+r5731 6b0d6745a4
+r5732 5ea297c2be
+r5735 c19726b180
+r5741 8f7db2818a
+r5742 f292079705
+r5743 62dcdfbe3f
+r5744 641aa219e7
+r5745 9392e58298
+r5746 2197e9485a
+r5747 28f84fae2b
+r5748 b499d07e91
+r5749 9640cab2cc
+r5750 12517352e0
+r5753 6fa3674c30
+r5754 8bb1d77089
+r5755 2b8adb6ba8
+r5763 dbc6ef023c
+r5764 a831beb540
+r5765 4f6c6e57cb
+r5768 e195c21436
+r5769 15d7da7d90
+r5770 01443e42ed
+r5771 71d0e5a229
+r5772 302186ad6e
+r5773 074eba93ed
+r5774 22245600a5
+r5775 6b1d01b1b2
+r5776 2aafa8639f
+r5782 ed96cbb6a1
+r5783 2821949f43
+r5784 05c7c3c6e8
+r5785 05dd1909d2
+r5786 287ffda0a6
+r5792 1e23b870ca
+r5794 bbad3c86f9
+r5795 46a4e2f698
+r5796 f5d48370ee
+r5797 97b9dcf588
+r5798 73a8597fde
+r5799 b78ee4f8b8
+r5800 c8db5e2c18
+r5801 108e80e28e
+r5802 5380d49e4e
+r5803 f5f37e4062
+r5805 15fea20ac4
+r5806 710c9301a3
+r5817 acdffcce39
+r5818 2526f54f64
+r5820 89c682981b
+r5821 5bd4ed60ee
+r5822 c1e184a365
+r5826 96ae92e4f6
+r5827 7320a81b8a
+r5828 96578a7c29
+r5829 a7991b7024
+r5830
+r5831 25ed8431be
+r5832 806b26a007
+r5833 d3607fa712
+r5834 9272c30317
+r5835 787f4bce33
+r5836 b47d0130f6
+r5843 cce4e3e625
+r5846 bf6be46075
+r5847 a51f26e639
+r5848 f205be7a60
+r5849 ad5e5a343d
+r5850 45371e8792
+r5851 b2793f3496
+r5852 eb73a9886d
+r5859 5a1d969571
+r5860 007f4346d0
+r5861 11e3b59f8f
+r5862 55b91a4680
+r5863 261195377f
+r5864 40dc432b5e
+r5865 dc92346c81
+r5867 bbcf2deba1
+r5868 e8384f4f32
+r5869 ba2010fcad
+r5870 3427c16568
+r5871 0b2d0a6c5d
+r5877 7d7e144e98
+r5878
+r5880 91a9821f91
+r5883 d7007f7a96
+r5884 19cd1641c1
+r5885 f9fed3d5ce
+r5886 a081275eeb
+r5887 0d35144e70
+r5888 4f42f5b49b
+r5889 208bb6e42d
+r5890 d0266a1a7e
+r5891 31b6aecca7
+r5892 750b48f091
+r5893 eb9f31482b
+r5897 3cc6245389
+r5898 9c599f5f90
+r5903 f8b72f6122
+r5904 3e27d741d1
+r6619 ba72a27f4a
+r6620 277dcc3571
+r6621 389e6d3afe
+r6622 a190c204e0
+r6623 8a9572b96b
+r6624 c44a597469
+r6625 e588e23b94
+r6626 c899305fa7
+r6630 27b35faf27
+r6631 2534d32a6e
+r6632 c7e1b5449f
+r6633 d969657ce2
+r6634 3d41a873e2
+r6635 c36fefb5da
+r6636 b0c609cf01
+r6637 d7919a1a9e
+r6638 1169c34d29
+r6643 ca9017c139
+r6644 083f4dd55a
+r6646 1e3992f63a
+r6647 57edf1ab5e
+r6648 b5c077e394
+r6649 5698c4850c
+r6650 95ebbaa43e
+r6651 647c85991c
+r6653 f9377afa2b
+r6654 719588d174
+r6655 718cc9060c
+r6656 33bcd27ccd
+r6657 5478a64f23
+r6658 cfcb34f4e3
+r6659 99fce48f6c
+r6660 b283f88a6f
+r6661 285389fb4d
+r6662 1aa3839d75
+r6663 ff46b04fc9
+r6664
+r6667 c0c963afaf
+r6668 0bef86d8e8
+r6669 963530c26e
+r6670 1c43d5e932
+r6671 8c8b95f3ed
+r6675 b9863c050b
+r6679 f857dea44a
+r6680 4d0b6e97c4
+r6681 d22d800a3d
+r6682 fb7e30141f
+r6683 58b08a3b64
+r6685 cb156c0843
+r6687 661aade979
+r6690 561a1e4f3f
+r6691 dea10c9898
+r6693 74d770b456
+r6701 7cb7defbd4
+r6704 9beb585e55
+r6705 74e31661ce
+r6708 6d022ea683
+r6722 78c4deeb63
+r6727 71fa860544
+r6728 9e745473dc
+r6730 d3d7b7ce01
+r6731 197e25fa59
+r6732 045dba5466
+r6733 eb5bdf5ed6
+r6734 739ba95896
+r6742 6bd2f4b698
+r6744 c09c5f39bc
+r6747 03f3c2af8c
+r6748 8533be1a96
+r6750 496ed79cbb
+r6751 7ede3d70d2
+r6752 803caf64ee
+r6753 bdc6a260fb
+r6754 bb158a6c62
+r6755 9765bb08ad
+r6756 8b4c6ca107
+r6759 7e5198183b
+r6760 b3acb71544
+r6762 b2dbba9927
+r6766 37e705bd66
+r6767 51565df038
+r6768 c516a44630
+r6770 886e009e11
+r6772 88abe6a1e9
+r6773 8e3135cf74
+r6774 aa33f16c7d
+r6781 91ff3e0a6d
+r6782 15433cf438
+r6783 bbfac7615b
+r6784 2b54dff2c2
+r6788 4bf7da4f43
+r6804 3baeaef8b8
+r6808 14cdf97737
+r6812 039933c86a
+r6816 bfbe346421
+r6819 42aa095ac4
+r6821 bf39025ae7
+r6823 1eb8db0dc6
+r6835 578b9226a6
+r6840 322068fb8a
+r6841 0a1598f285
+r6842 0404ac212b
+r6844 7e4339ca70
+r6847 4ddf81c218
+r6848 5459db1226
+r6849 47f417a4a2
+r6850 3cc6197142
+r6852 b656cd6c83
+r6853 0a5eb2c599
+r6854 1ef57837fb
+r6855 5c15a9a9d5
+r6858 e7bdebbdf6
+r6859 1f57a0e06e
+r6862 5725b720cc
+r6864 1d147fed1e
+r6865 357c6349ec
+r6866 887bf2afd5
+r6868 36e6a5a203
+r6869 e5864c02f0
+r6870 0140bb0b4a
+r6871 7863b8edad
+r6872 516ec524e5
+r6873 c1978a3507
+r6874 0558b4ffd9
+r6875 23b23e99f8
+r6876 246dc68a9b
+r6877 095970154d
+r6880 c4f1e1c3fe
+r6882 15a115e5bb
+r6884 d7a3d1a070
+r6890 bcc8c5b3f4
+r6891 1e93a4694f
+r6892 e97babe022
+r6897 5b854aa343
+r6898 8515d4a5ab
+r6899 ce7646c79b
+r6900 c7e98a8e00
+r6901 f15cab9b7f
+r6902 705747005f
+r6903 7d8791d5c5
+r6904 beefcf0c9e
+r6905 5d8738edb4
+r6906 9f7ee056ca
+r6907 be7541e2f4
+r6908 b007bacd9a
+r6911 5cd5436fc1
+r6918 d0d3ec6098
+r6920 eccdddcc73
+r6921 76f0380dd7
+r6924 f9874202d8
+r6925 95921f1ad9
+r6930 20978ce7ae
+r6931 a959828b60
+r6933 6b46664e63
+r6937 f64d8a594c
+r6938 626a6597f7
+r6939 d746f73c9d
+r6945 2b4f591221
+r6946 e858e292e5
+r6947 abc7c2c51c
+r6948
+r6949 4bbc472029
+r6950 18ef3d1b68
+r6951 e5af62215a
+r6952
+r6958 fc24e7abd4
+r6959 d9942ba66f
+r6962 65f9252a9a
+r6963 99c2f95fcf
+r6965 02e3db6b22
+r6973 98071e6518
+r6984 650c4f0627
+r6985 c732b72618
+r6986 a75bf119d5
+r6987 a315aa92b5
+r6988 ed3fdfcb39
+r6989 53725c9b96
+r6990 a56b5bc795
+r6991 5b3eaa1547
+r6992 2608a0b3ec
+r6993 6e3e914fa8
+r6994 05cde95442
+r6996 542401df8d
+r6997 f9ea70db10
+r6998 7423e836f2
+r6999 d5fd750f81
+r7000 a2647adc11
+r7001 e9b3fa3701
+r7002 8d86347882
+r7003 4f0d8b24a1
+r7004 198624e412
+r7005 d66ace258d
+r7006 e9ea3247c6
+r7007 7ff239d7a9
+r7008 3049afc7ec
+r7009 5993e28ec5
+r7013 fde7c4cb46
+r7015 b178e4658b
+r7017 315ba402be
+r7020 e7a7b15c8b
+r7029 6fbb495aad
+r7035 dd40ea8aeb
+r7036 2163b93a51
+r7039 3b40ebd0cb
+r7040 67627dd409
+r7041 4bbe6ea1dc
+r7043 2b8d5f6485
+r7044 d1007862ed
+r7045 5013567324
+r7047 811abc564c
+r7048 56645fa25d
+r7049 486042e89a
+r7052 ac8b46abda
+r7053 ce508d2ea1
+r7054 1c4335808d
+r7055 2c18390628
+r7059 af265b8b1d
+r7060 c058627550
+r7061 87185f9844
+r7063 13aeb49173
+r7071 512b362d73
+r7072 e59dc955e3
+r7073 99a204f187
+r7074 9ce18b19b6
+r7076 40f1882abe
+r7077 9f328e4c8d
+r7078 fa84d50fb8
+r7087 bc10a1dc26
+r7088 264a2ef48a
+r7089 2c4293b449
+r7090 b8da7c77d6
+r7093 8fc98a03c2
+r7099 0f46fe4ca5
+r7116 8b6c8a3c07
+r7117 a0c48ce649
+r7118 e3fc3506c7
+r7120 73ff6fcfc2
+r7121 99a8527292
+r7123 e205301999
+r7124 e29a183a64
+r7130 2f626674d0
+r7139 6900fbac1a
+r7155 e22bb2b053
+r7161 0a5724a195
+r7162 6c633ce6bb
+r7164 acc947a63b
+r7165 95fa0a32b3
+r7180 5fe735b785
+r7182 aec0da2ead
+r7192 1e768684d1
+r7193 a13de6568b
+r7205 d8cb3b071d
+r7206 ce72df2c02
+r7220 78d3bf3364
+r7227 1819fb81bf
+r7228 bf78330b04
+r7233 c495fbf365
+r7237 bccf5e8958
+r7238 63f4d51181
+r7244 8e1da29a68
+r7249 88cd71a283
+r7250 84d3c4385e
+r7251 2313191913
+r7252 ffa1aaad1b
+r7253 b6f7fcc730
+r7256 692ce7bc6b
+r7257 34b47d2a0b
+r7258 ef8d203f26
+r7259 a9595d49f7
+r7260 dab0d42953
+r7265 e84e21716c
+r7266 3cb424ab59
+r7267 0fb74cd584
+r7272 0b47ca3e5b
+r7273 ca8dccb135
+r7274 90451da3b1
+r7289 103fc2b13d
+r7290 8243b2dd2d
+r7291 62fb3c42e4
+r7292 2c7b0f4ced
+r7294 f4cefb4318
+r7295 5c41ae07d5
+r7309 365acfe04b
+r7310 274d395e6b
+r7318 c0f698a7c0
+r7319 ecf482f69e
+r7335 e8b399400f
+r7336 7435339ba7
+r7337 6b474101b9
+r7338 f8de30e27e
+r7341 5a94352a62
+r7344 1f5bd8a590
+r7345 12a9f76471
+r7347 b1e41df94d
+r7348 bffeaa0e04
+r7352 099e903658
+r7354 5a5f6faf05
+r7355 026286b7aa
+r7360 0015af7171
+r7363 ff1c68655a
+r7364 00afa24fb6
+r7365 dcb432cd6e
+r7371 f20335d190
+r7373 3379165fc1
+r7374 960380abbf
+r7377 2c77b8a0af
+r7379 8b8d0f844c
+r7384 fa472df87d
+r7387 3225458545
+r7405 6ce297f44c
+r7406 88a1448f33
+r7408 de29ef0ac4
+r7409 92dcada606
+r7415 7199ea34ab
+r7420 ea5e13cb94
+r7421 ef93d319a6
+r7422 4723a7ea5c
+r7423 8d5dc2f990
+r7424 5d3c21e6c7
+r7425 5911c61bf5
+r7426 8d547276dc
+r7427 bc4bd901b1
+r7428 703ba993c3
+r7429 bc46a1b536
+r7431 ddfe2e74ec
+r7432 332ab9f485
+r7433 5c11f952af
+r7436 8ab0305de7
+r7437 98e286c197
+r7439 c98f8ec742
+r7440 ac5aa786a0
+r7446 b9f274691a
+r7449 1685264f55
+r7451 f60573811d
+r7452 63c4d30252
+r7454 79432ad37e
+r7455 3f638fc27d
+r7456 4a0d4f42ce
+r7457 183bcec0b6
+r7458 45ccffe15d
+r7459 a31e6c4000
+r7460 953466de7c
+r7461 47d6dff4eb
+r7462 dbe346af1c
+r7463 c05a58bd34
+r7464 16b00da844
+r7467 f746ce36d8
+r7468 ef2de304b1
+r7469 6870553eff
+r7470 2aea310f9a
+r7472 541b260c65
+r7479 06ab9264e8
+r7481 ffffaf4910
+r7482 5cfcf82f51
+r7483 c039ddddee
+r7484 d83476284e
+r7563 696b5a858f
+r7564 07724cb4b0
+r7565 eec07f7431
+r7566 911ce1e4a5
+r7573 90bed7c3b6
+r7574 288d0481e4
+r7575 b7ff021e08
+r7576 673fb7e02b
+r7577 2a2f543db6
+r7581 f4ad01e291
+r7584 4311ae53e7
+r7585 4e6e4e17d5
+r7586 50d5f79bd7
+r7588 0ecead295c
+r7589 ed292b2b9b
+r7590 3a349e70a1
+r7597 5da7b0a395
+r7608 e3b1cc9130
+r7609 fa80c56a42
+r7610 757086a40b
+r7616 1918e7230b
+r7623 335de89b82
+r7625 f7a989f23a
+r7637 549c8a2a44
+r7638 cc2a602aa5
+r7639 cab784ad14
+r7640 23904f6355
+r7641 213addb673
+r7642 af9cd28006
+r7647 c49cb64a8a
+r7655 4150f7e6fc
+r7677 9040a1ea86
+r7678 8f660e3dda
+r7679 5e34cf4f88
+r7683 a15d1d617a
+r7684 69584d1e2f
+r7692 31adfc6cf4
+r7695 3be616edcf
+r7704 95ff3d2928
+r7705 0ab820501a
+r7708 a2f0ad4b7e
+r7710 f6bdc80cf2
+r7711 61441aa3be
+r7712 df73352fea
+r7717 b43c857900
+r7719 4d15dfcb12
+r7720 6e81dcdd8a
+r7721 715c838ebb
+r7722 a93415ff65
+r7723 52f4d88651
+r7724 ddbd7463f2
+r7726 e06f68204c
+r7728 78871179ee
+r7729 a8df0271a0
+r7730 4825d24dac
+r7731 fe6c954429
+r7732 cefd4bfbd5
+r7733
+r7734 8b2f809290
+r7735 cc71492e8b
+r7736 f79c3b7566
+r7739 682413c930
+r7744 b9a54c2751
+r7748 8b6eba1a9c
+r7754 7427ad1127
+r7762 28264ad218
+r7767 046c97346e
+r7768 4ba746e97c
+r7769 d8ee617600
+r7770 eee023674e
+r7771 de843e4a74
+r7772 1c43cfe216
+r7774 333b75ec32
+r7775 ae11503b40
+r7777 6e756ebf32
+r7778 016ff4c9ec
+r7807 66adf79008
+r7809 e5556bbbe0
+r7824 150014366e
+r7833 faf05d692e
+r7835 24bbfba338
+r7836 c024e21764
+r7838 5976124d73
+r7847 9ae456c484
+r7848 37cb08de40
+r7849 102c5ae99d
+r7850 72db375a73
+r7851 bae76d1be3
+r7852 7b6693a2a2
+r7856 ab1b5de53f
+r7857 f451a2fc8d
+r7859 39a1658065
+r7863 a605ab716e
+r7864 1b68ef970c
+r7865 e8f45e9296
+r7866 5bae313f42
+r7870 ca712dacc6
+r7871 5f49bdadcf
+r7872 dcf5715bee
+r7873 2de072f99b
+r7874 af68b2f871
+r7875 42bd0dce6c
+r7876 4857648d27
+r7877 d8dd12a551
+r7878 4f69e5325d
+r7881 8f94fcf948
+r7882 d6f40f58a9
+r7883 a00b0c60a7
+r7884 975a608b36
+r7885 8599693b3c
+r7886 37e0008c4e
+r7888 0f99d908cb
+r7895 8714d194ab
+r7900 769b33953d
+r7901 86a6c4afff
+r7902 b142c4376d
+r7907 28c125d3b9
+r7908 77b063b003
+r7909 001ce2371b
+r7911 7718b24e9d
+r7912 c4ad0fba91
+r7913 35adc1c48a
+r7914 e0f22af441
+r7915 adab52e289
+r7916 68159e91ab
+r7917 0be36c00e4
+r7918 5dd59f4127
+r7919 e670a7bb76
+r7920 ddad4e40ef
+r7921 8249292424
+r7923 e10bdf2f82
+r7925 913b2c9b3a
+r7928 a4e074308b
+r7929 640ea6fc45
+r7931 4d929158ef
+r7932 c1cc40e97d
+r7935 b444420b5b
+r7936
+r7937 2933e3f3cc
+r7938 fe05247881
+r7939 3fe40a93ff
+r7941 0d8b14c605
+r7942 8446c6c767
+r7943 590af0e4be
+r7946 a2dc3dd2c5
+r7948 37f32b6907
+r7949 013b381743
+r7950 a833d535ec
+r7951 189cd283fb
+r7952 b113b640be
+r7953 7aceef658a
+r7954 5da65359b5
+r7955 b1be740f87
+r7956 d0ff5e5680
+r7957 1ab98be85b
+r7958 f704035418
+r7959 511aa6f2e4
+r7960 6e372ca477
+r7961 9d39ff267e
+r7962 c3426a231b
+r7963 0282dda201
+r7964 059cda57f0
+r7967 48dd2c26dd
+r7968 4642751e0e
+r7969 777381a305
+r7970 7309056770
+r7971 b9e7cf28ee
+r7978 ef34b6a65b
+r7979 499580a1ed
+r7980 b39db081ff
+r7984 b301f8e867
+r7985 096390023e
+r7994 5a17c91819
+r7999 972ecebb27
+r8001 0b424f22db
+r8002 74c681cb2d
+r8004 aabf6ed2ab
+r8013 a31a1a0c7e
+r8014 e8a989b914
+r8015 af8c15ce25
+r8024 7fa3172f1a
+r8025 4757cf7f35
+r8028 b8a3d27064
+r8029 d16491f730
+r8030 df6069ed29
+r8031 4006064a64
+r8032 47e617d962
+r8034 af9961b0ec
+r8035 93da925b0d
+r8037 1df3ef081a
+r8041 53366074ae
+r8042 8aaebe5639
+r8043 51eb57e0ea
+r8044 cf9459eefd
+r8045 f467096ce4
+r8046 599eb475e4
+r8047 998bc939d7
+r8048 3860412af7
+r8049 b0e949a3cb
+r8050 f24e379577
+r8051 6204bc36f0
+r8052 cadfccc331
+r8053 fc17292454
+r8054 5a6a763157
+r8055 1292086fa5
+r8056 6f4b3a93cc
+r8058 e4f63ce252
+r8059 fe0436c6f9
+r8060 2568aebb5a
+r8061 4ac8886e43
+r8063 b34c9f524f
+r8064 3d7e84e6fa
+r8065 401d5eda44
+r8066 7d9e8c17bf
+r8067 fb129da251
+r8068 4308d3971b
+r8069 be158db7ec
+r8070 ce9000fb3a
+r8071 9024aa4405
+r8072 ac20cbb480
+r8073 f670f31553
+r8081 49ea811d41
+r8082 4b1eef7cf4
+r8083 6865be2978
+r8085 fa92e7b7e3
+r8088 b705622061
+r8089 9a39e68be1
+r8090 7632879f2c
+r8092 047b0657af
+r8096 2b77dc7e1c
+r8097 e6ef9af62f
+r8098 087920b5e3
+r8099 5d6cd01850
+r8100 8d6cbbead8
+r8101 f3c1d397f9
+r8102 41c92929fe
+r8103 02ab294283
+r8104 a78b5c7699
+r8105 02fb5be2df
+r8106 2906c10f80
+r8107 6147fc43c8
+r8108 ad9ac5a296
+r8109 ac87e36fdd
+r8110 e0c336f21b
+r8111 e4fc9bd2fc
+r8113 b53dced121
+r8114 8592375f95
+r8116 856c86e29d
+r8117 4080a760cb
+r8118 bafe025128
+r8120 cc8ee691af
+r8121 ed1dfe18cb
+r8122 87447d7723
+r8123 2caf315455
+r8124 2c430022e5
+r8125 6374945139
+r8126 1b41a79cb7
+r8128 a35c89a5e9
+r8129 57e11c6b35
+r8130 5c97c9e85c
+r8136 5e37103071
+r8137 ac83eb5c94
+r8139 5d3674cbab
+r8140 0b09e1d2e4
+r8141 5e3e15d1dd
+r8142 be488e78a9
+r8143 31ea434a66
+r8144 0f456bcbb0
+r8146 52b71a5564
+r8147 3f17e1b36f
+r8151 23e9172c99
+r8152 701558d924
+r8153 d31085b750
+r8154 cdc4595aed
+r8155 04d69300ed
+r8156 56ea4526d3
+r8157 56c803d9c5
+r8158 9d95c090f4
+r8159 7796d36f0b
+r8160 52ce2fb174
+r8161 c755b6a62e
+r8163 f964ab66d6
+r8165 5c25a05118
+r8166 78f9cc60cf
+r8167 90f48c4fbe
+r8168 bb6dc39a5d
+r8169 71158d0b59
+r8170 a39f873983
+r8173 3a0c15973d
+r8178 3e41f705d1
+r8182 ccbd600259
+r8184 068aa4e25a
+r8185 0f21d47d79
+r8188 b56a24bbc7
+r8189 cedd6024fb
+r8190 2146b9187e
+r8191
+r8192 ab1b368720
+r8196 d21d4888b3
+r8199 3fc6cbcbfb
+r8204 d0bc4a20d2
+r8205 43a5a26967
+r8206 1d6f2ff511
+r8207 045652a12b
+r8208 8e2649846a
+r8216 8ac5a042ec
+r8222 ab9c3ee71d
+r8226 4146327bbd
+r8230 30161140e9
+r8246 343c15fe83
+r8247 5bdedbd453
+r8248 3f8fefbe72
+r8249 b2455fcc38
+r8250 f901816b3f
+r8251 7b8adeb8ff
+r8253 adebb89dfa
+r8254 1f7c3208a5
+r8255 7eac52f2c1
+r8256 e44c3c3606
+r8259 982fab6e30
+r8260 88ba68ac7e
+r8261 d30f004a81
+r8262 e538d9afa1
+r8263 e753bc53ac
+r8264 b41132eeb3
+r8265 2edbb8c633
+r8266 1ab39df4af
+r8267 5b74d5d555
+r8268 1c873c520f
+r8269 9b7fbdfe7f
+r8270 f2211e34b8
+r8271 43109af479
+r8272 29fd527461
+r8273 dc344b2fd6
+r8275 cb62884e39
+r8276 a3be604378
+r8277 261ff3d0ab
+r8278 82fddf62e8
+r8279 198f0d25d0
+r8283 5363217748
+r8291 7e65f43f82
+r8292 9934175fad
+r8294 55561538cd
+r8296 8e569f7fb4
+r8300 474c32c2fd
+r8303 73fc9aef16
+r8304 58749ce64b
+r8305 89dba633f0
+r8306 793151ef07
+r8307 65c14d6dc7
+r8308 e40c9ebc96
+r8309 e87657e617
+r8310 d16fd45df7
+r8316 8ad24113ea
+r8317 c5c18aa57a
+r8324 4f25b17e9f
+r8325 96bf7d6c80
+r8350 33a9262563
+r8362 a035658a13
+r8366 eb9c91332c
+r8369 28113d4604
+r8370 9a73f4c8d4
+r8371 1bedeb3b33
+r8373 a959d0cd10
+r8376 c840d9f58c
+r8377 eb79135b97
+r8378 6f141280bf
+r8379 6d236c4abd
+r8380 9b88ad1f3c
+r8381 d03714058c
+r8382 dcc092f2ad
+r8385 8c39831d83
+r8388 67bdd4e52b
+r8391 0cad3ffca7
+r8392 ec74d7c7ec
+r8394 f6b48ea106
+r8395 279f7b6674
+r8397 1b39181c37
+r8401 75ee284f25
+r8403 49ee6e4ec4
+r8404 840911b8e3
+r8405 22a098bf7e
+r8406 80bfcf9e75
+r8407 1e9090374d
+r8414 e84cda0299
+r8415 a2cd7999f5
+r8420 d5aee9e7a1
+r8422 d283455a24
+r8423 ad4905c0ff
+r8429 5a90f0aebd
+r8432 8d9a6bb9b2
+r8433 7f3d535727
+r8435 1536b1c67e
+r8436 c4bc5bc26a
+r8437 87494e1323
+r8438 3197c82a56
+r8439 e15e544b09
+r8440 d75abefffa
+r8445 8e0d30f85c
+r8446 b795edec92
+r8454 9954eafffd
+r8455 4a8bcedf9b
+r8458 01dfdb2e3c
+r8466 9e4302fbea
+r8467 dd535c3645
+r8468 9050da7108
+r8470 1c6e546027
+r8474 7430aa1b4c
+r8475 796ed03186
+r8478 23992437cf
+r8485 0d2ad77479
+r8491 cd98806a35
+r8492 b7fdd69780
+r8493 0093ff481c
+r8495 94591f74bc
+r8496 111bd4483b
+r8497 e852d77293
+r8498 2c0c8cced1
+r8499 30da384983
+r8500 8a4c664b33
+r8502 da84919a84
+r8503 a8a2bc7ff2
+r8504 296bcdfcb2
+r8507 b5f66bdd72
+r8514 6d9e1774b9
+r8516 5a4ad1c3ff
+r8518 5e6c4e77af
+r8522 d156f34b93
+r8525 90a4be3747
+r8526 f52e6a6a8b
+r8527 8eaac02ce0
+r8531 bd0e709a7b
+r8532
+r8534 7cb834d07b
+r8536 927abec3b0
+r8537 30ed1a3702
+r8540 33637d5c2f
+r8546 5a8391bb88
+r8547 98c1cc7d1a
+r8548 31c48dcbf1
+r8549 c216472d2f
+r8553 cda2954e7b
+r8557 d82e9fa4d7
+r8559 3a4a6a3b66
+r8561 def54abfbd
+r8563 fe5b7a11c5
+r8564 fb7021c177
+r8565 b2079c3e22
+r8566 2119e3945b
+r8567 a89814eaf3
+r8568 bacd5d56f4
+r8569 132637e42e
+r8570 9ea0d2b4bc
+r8572 f81fd55cf6
+r8574 423649a208
+r8575 7936eb95cc
+r8578 93cb4fff0f
+r8579 082d6d6ac0
+r8582 4ba05a16c5
+r8583 4ed3ac6323
+r8585 82654dbf8a
+r8586 a202a68496
+r8587 cd2cfe1999
+r8588 b0399bd45b
+r8589 a131363221
+r8594 e5154da769
+r8597 f914e325dc
+r8598 0a4e7a8116
+r8599 238f90bea8
+r8600 2a73bd0f46
+r8601 d4c7abb9d0
+r8602 ba8044fafd
+r8603 da1c8faef9
+r8604 6cd505dba5
+r8605 d921798f07
+r8606 55f38ed459
+r8607 0482a0c416
+r8608 75df1caebc
+r8610 643711f83a
+r8611 f2b8a606c1
+r8613 206233021b
+r8616 f011bcedf3
+r8617 fe6e0afa5c
+r8621 ff389f693c
+r8622 5e60e37eb4
+r8623 82a4d4a8a1
+r8624 bd649c76b1
+r8625 d81428a287
+r8626 97980ef322
+r8627 3d449d9f66
+r8628 d0798c6b85
+r8631 a6279c2d91
+r8632 c0f1af1705
+r8639 3818926f90
+r8641 9a8e9075dc
+r8642 1237c52026
+r8643 540f1b3922
+r8644 9abe4fe735
+r8651 b4ea568bb3
+r8652 7c6c9c0847
+r8653 7165e8d40d
+r8656 dc97215ec9
+r8657 6387971d97
+r8658 91412ea3d4
+r8659 d1f14a8b11
+r8660 1874b9eba4
+r8662 3f3634c6d0
+r8663 29ac82f32a
+r8667 3f64a5e88e
+r8670 2e01209742
+r8671 0f3a8b5a8e
+r8673 01d4e3645a
+r8674 97257e8e6d
+r8679 13a369ce8d
+r8689 f72b4dfe46
+r8690 e51237b7cc
+r8691 d9be3828b7
+r8692 b3d9e27b95
+r8693 cc43126a20
+r8694 bc80f0fd79
+r8696 6cbc253b9b
+r8707 aafc72b3df
+r8710 5a5eb8196c
+r8711 a3b6a1de07
+r8715 5508808ef7
+r8717 a4b7c29804
+r8718 54a8dae948
+r8720 bc14c4aa87
+r8721 0e61f9c37e
+r8722 00ee529f42
+r8723 8bb69c4fa8
+r8724 2282fe8e8d
+r8726 3b48a0dbda
+r8728 3101d1577e
+r8729 655a7f3801
+r8730 39e6150851
+r8731 7bc38cea93
+r8732 e452b69c0e
+r8733 75beea5dd9
+r8735 5fab489bd5
+r8737 bfe7706220
+r8738 bf98eebc6c
+r8741 e40402803f
+r8742 9a45bd5bdb
+r8743 920e6a2e5a
+r8744 427c400d0e
+r8745 04871d8ee1
+r8747 f5934f7970
+r8748 c8964378fb
+r8750 8ee34d4036
+r8755 e3efde8ea0
+r8756 a2d886a301
+r8757 5656170f7c
+r8758 c12786087f
+r8761 d58bf70442
+r8762 96e5dc3d89
+r8763 4f4ce3a4f1
+r8764 d12123f57d
+r8765 ce2affc166
+r8768 e898539e93
+r8769 20aa9911d0
+r8770 40396d951e
+r8771 b628076f05
+r8773 b03888b4da
+r8775 9643a7ddc2
+r8778 8d98363504
+r8779 c6d2de5a15
+r8781 a3a8628edb
+r8784 2511000652
+r8796 0586e1b756
+r8797 8abd909119
+r8802 499d7f10e2
+r8803 60b3d90f81
+r8804 53cb459ecf
+r8805 942bb16fc5
+r8813 6c710d403e
+r8814 8d3d085f4b
+r8823 6ce056f31e
+r8827 1450735f97
+r8831 4831a456ff
+r8832 59b5c7d568
+r8833 e1327fc474
+r8834 dc398c6645
+r8835 3e985a3e8d
+r8837 9f013a7ccd
+r8838 02bf8fff18
+r8839 1c15235511
+r8840 f4f4e71387
+r8841 76faa29bb7
+r8842 7d72618b37
+r8843 93275f2d34
+r8845 7233c24d3c
+r8846 8b2e339813
+r8847 054f9fcc98
+r8855 e627aa0456
+r8856 6f6036e0d3
+r8857 02afba3bf8
+r8858 2404604f2d
+r8859 ac49199ed2
+r8861 9c0102e568
+r8862 c23c5ee74c
+r8869 6aba5aeae5
+r8870 137654bb3e
+r8871 5a4c34e338
+r8872 af995b1f8f
+r8874 0f6e140435
+r8875 dc2f206668
+r8878 2901639c75
+r8880 d7a4f76d25
+r8881 83b51eccb8
+r8882 5c21476c57
+r8883 717d95c978
+r8884 fa37aa44cc
+r8885 24284feee5
+r8886 42dc44dd52
+r8887 6a20eed594
+r8889 86c028b6fa
+r8890 fbc3a71a1e
+r8891 c986830f3c
+r8892 3863cb69ce
+r8893 705d9f23d3
+r8895 bff27eb916
+r8897 5f951ae316
+r8898 7096ee3e73
+r8899 bf18c37320
+r8900 64ed2090a3
+r8901 00a2c044eb
+r8902 7cd471c223
+r8904 c012f92306
+r8906 c1a76844ed
+r8907 b1b11f7221
+r8908 bd7a56937a
+r8909 6fe33e19fb
+r8910 97efa1560f
+r8911 2995f1a6a4
+r8912 de4eb301bc
+r8915 dcbcc29383
+r8917 2f0f432ebc
+r8919 507ce7c6b9
+r8920 8322730ecf
+r8922 61622e4255
+r8923 543c22858f
+r8925 aa9629c845
+r8926 9fc39d7b60
+r8927 096ef34f8e
+r8928 3bc241d399
+r8929 47f4077d2a
+r8930 1eb482f817
+r8931 deb79f8dd8
+r8944 472b09e0aa
+r8945 7dd216cef2
+r8948 a094bf3c2e
+r8949 27de825580
+r8950 4a26ab7d81
+r8952 1a3ed197d1
+r8953 bff6517f57
+r8954 dcfd04956a
+r8955 ec04190880
+r8958 af511469a6
+r8961 f1208fc000
+r8962 470f990722
+r8964 48946a261d
+r8968 5331ce87dd
+r8969 c95aa7344c
+r8970 4490aaef07
+r8971 0618391c55
+r8973 e909afb83f
+r8974 bcf35077a2
+r8975 2be267a788
+r8976 7cadf17a75
+r8977 b7ccb47d14
+r8978 59e15fd5f1
+r8982 07033117c9
+r8984 4af96ffd7a
+r8986 3475351c46
+r8988 00db012c72
+r8990 2cf278b25b
+r8992 c10e1f0cab
+r8993 4b0a5966df
+r8996 458d3d10cf
+r8997 bcac3c45b2
+r8998 f44bc6e066
+r8999 d1053b01cd
+r9000 f3f8f974bf
+r9004 f9e5afd36a
+r9005 118050a7d7
+r9007 42744ffda0
+r9008 0e0a4aa446
+r9009 61d092d582
+r9010 615d92649f
+r9015 52a66ee1f7
+r9016 c5af8e01c6
+r9019 54a3755e36
+r9022 d6753d1eda
+r9036 2f2e82a9c3
+r9037 d3462e7f50
+r9038 83fcb4da4e
+r9040 2f5a1ddcde
+r9041 7e705baa34
+r9043 505644abe4
+r9045 8526940f15
+r9049 50788d5fff
+r9050 44b5456706
+r9051 2fd723d1cd
+r9053 3f8b526dd8
+r9054 2738fdc2ed
+r9055 43949e44b7
+r9056 4a56a364a4
+r9057 21808a3d77
+r9058 91eb4a0982
+r9059 fab8b6d5c1
+r9060 17aff1c1fb
+r9061 bd1dd90121
+r9062 d42b02b092
+r9065 f7df490b13
+r9066 a2912abc26
+r9067 3554798475
+r9068 31e93255cb
+r9069 a7a95ea3de
+r9070 009442ef0b
+r9071 5c642cbca2
+r9072 d8e8ab6a9e
+r9074 61723a8f72
+r9075 948b1a53ea
+r9076 f28285cee7
+r9077 640ecf38b7
+r9091 7ebc41cda2
+r9092 247577e966
+r9094 dd9a27c37f
+r9095 e02fed8e7d
+r9097 8d82ebbe36
+r9098 ee7252af47
+r9099 5352638cee
+r9100 37b3648e30
+r9101 1ccd9b6ced
+r9102 6c9163849c
+r9104 68c6e531f4
+r9105 c0d0290379
+r9106 39ac777cdd
+r9107 1c1e6d0fd8
+r9108 82ee25df5d
+r9109 8b0cd5cf7c
+r9110 257a1845d3
+r9111 40990494c7
+r9112 79705f3dbd
+r9113 4749c3d31d
+r9115 f187d8d473
+r9116 3e1e1e91bd
+r9117 b6f68a6bcc
+r9118 6aa668e0f4
+r9119 d9ba6d6db9
+r9120
+r9121 6b142c2217
+r9122 12ef0e1272
+r9123 a071695837
+r9125 2b3c8f9449
+r9126 d433eb14c8
+r9127
+r9128 f25687d77f
+r9132 e7042a30c6
+r9133 69b4ee3b28
+r9136 8d006d8cba
+r9138 f03c47f101
+r9139 bc752a61fe
+r9140 bce3c6001f
+r9141 0f2a6c8bba
+r9143 f8680fc2b1
+r9144 2c670cb8a2
+r9145 4819d0a6a4
+r9146 c3351baaa2
+r9147 23f68d5b13
+r9148 09369019c7
+r9149 9d507e381c
+r9150 1e23988361
+r9151 8e56e0e55b
+r9152 a4e49ea5ac
+r9153 afb51786ac
+r9161 8b5680aa83
+r9162 69583d89bc
+r9163 516f06d7bd
+r9164 e2e0a9488d
+r9165 d8de14d630
+r9166 3125604fb0
+r9167 7632c7172d
+r9168 63d618b20c
+r9169 84089c19ec
+r9170 5c2004c074
+r9171 1e1a2160bc
+r9172 4fb358b4ae
+r9173 69a0c3e30a
+r9175 c3ff16d17e
+r9176 c8b7f16b10
+r9178 939774370e
+r9185 f18a26d8b9
+r9187 ea64259fa0
+r9189 aa93e6f3b8
+r9190 f7e5d9d0af
+r9191 398e777ecd
+r9193 aecb341d73
+r9197 064217d20c
+r9198 c7a3100b08
+r9199 a90beca18e
+r9200 12014a82a3
+r9209 46ff81bfd5
+r9210 8ad9636a32
+r9213 39a00243c5
+r9214 e46598c089
+r9215 61f333639f
+r9216 25b1b24c08
+r9217 de92a193eb
+r9218 9a326616b4
+r9220 0d7fcb4422
+r9221 7faacc7b75
+r9222 f165c87a43
+r9223 166fc61a6e
+r9224 7b06546f88
+r9226 e008a32fb9
+r9228 6889ff9726
+r9229 ac255eaf85
+r9235 f3047df95f
+r9236 bb30761427
+r9238 1a98bd7b47
+r9239 97f3e8050e
+r9240 b00a1198aa
+r9241 f1bac69903
+r9242 dff1d96421
+r9243 96c144a972
+r9245 a18c1441c6
+r9246 8e2cb2b07a
+r9247 d0dd6b7eee
+r9248 258064826d
+r9249 66b7fe95d2
+r9254 15a20c680c
+r9255 b15e16ecc5
+r9256 cc9e329bff
+r9260 25896b2d55
+r9261 17c14587cb
+r9262 1ef41016b0
+r9263 4a530112eb
+r9264 41b2863d8d
+r9266 d26dfbdf59
+r9267 821551dd7f
+r9270 cb7711db82
+r9272 466db7220a
+r9273 9f54fe8bd0
+r9274 23c02cb584
+r9275 2538bfa058
+r9276 837661837e
+r9279 aecb355ecc
+r9289 cbd2f9d216
+r9290 7106a3e0e1
+r9294 f6183ef4b0
+r9295 5131de0a0b
+r9300 02a20e546d
+r9301 4aeee87b5d
+r9309 f05f4846f1
+r9310 63ceabef32
+r9311 54ad97b77d
+r9312 216f8bf4c2
+r9313 4a2b662fa8
+r9314 87d1a060ea
+r9316 1b1040e91d
+r9317 6e5b3945dd
+r9321 2a19832b23
+r9323 f7e598a6a9
+r9324 4af77453d4
+r9327 5cfd4f2b9e
+r9328 25133fac5d
+r9330 adf238e0db
+r9331 663b3ae5b8
+r9333 b72b10f883
+r9334 8b0dd2aa7b
+r9344 ee04f80964
+r9346 5baf3e447f
+r9359 f814b3162e
+r9361 ee8ff73b74
+r9362 b09e4cd1c6
+r9363 327b87d1c6
+r9364 75327922b4
+r9367 51d3afbb1a
+r9368 90da470006
+r9369 fb4eff8638
+r9370 54bb9363cd
+r9371 24561d55b0
+r9372 086f1209bf
+r9373 41d22eefca
+r9374
+r9375 9eb3282d5e
+r9376 2bf8bc108b
+r9377 e150c1dd7e
+r9379 722d82d18a
+r9381 23a59cf629
+r9382 2cd214e5fe
+r9384 6538ff2bea
+r9386 ccf513fe44
+r9387 e56d401f6b
+r9388 1e1dcc614b
+r9389 c8a05b45e0
+r9390 61b77f31e7
+r9391 06303e5d5b
+r9392 0774603396
+r9393 686571753a
+r9394 61ef5c893f
+r9395 c5e9360725
+r9398 6c468e8927
+r9399 77708ded5e
+r9400 899a7adfe5
+r9403 0f20a51754
+r9404 42f868bcea
+r9405 5a2f21ce9a
+r9406 6981bc62d7
+r9407 c50a0d2295
+r9408 bc94a338c0
+r9409 9629051686
+r9411 04fe2f9bde
+r9412 50c411759b
+r9414 f9da023c4e
+r9415 cddb243ff6
+r9416 a72d88c271
+r9417 f8b32f27f6
+r9418 8809b3edf2
+r9419 e566bd874b
+r9421 6337248afe
+r9422 10213bc9e7
+r9423 78db4cf7fe
+r9425 ca3a272ce6
+r9426 f34865ed00
+r9427 1c33235d8c
+r9429 959f3bc8f8
+r9431 a42ba53668
+r9435 cfe4c7ffe6
+r9436 18a55e8190
+r9437 6474945c60
+r9438 6090bd2328
+r9441 4fe80dadef
+r9443 e7b3599186
+r9444 9924a48040
+r9447 8a9719c222
+r9448 4f6a14b33d
+r9449 4d85fb1278
+r9450 4cb43c7788
+r9451 6c347e3971
+r9452 a3ffb394a4
+r9453 6cffd12cb9
+r9454 ccb5bd1da8
+r9455 40a4987f45
+r9456 f1f6f2b233
+r9457 db6ceead4b
+r9458 98f71af784
+r9459 525018c3ca
+r9460 67dfced37f
+r9461 0988bfd2e3
+r9462 52bb1b64db
+r9463 80eb08f5a1
+r9464 7806f3f00f
+r9466 7eadbd48c7
+r9472 3654e5334b
+r9473 fdb2a89495
+r9483 8a193daf23
+r9486 a0f6d27d54
+r9487 4b8520e5ef
+r9489 cb3adcfb6d
+r9490 1e5fd9b56a
+r9491 af8af21c94
+r9492 e794df0beb
+r9493 593deb5d50
+r9494 64c81890a5
+r9495 0c657593da
+r9500 a64a94ca52
+r9502 5916f8929a
+r9503 9551ed8f58
+r9504 8de712dd91
+r9506 8f3171f840
+r9507 3bb7dbfe4d
+r9509 8de6f6fe13
+r9510 0d16edd1ce
+r9514 f1e0492155
+r9515 60231d47f3
+r9516 f50f40c2df
+r9518 95c592e4b7
+r9519 39eba8638c
+r9520 d26f9ec822
+r9522 e74806422b
+r9525 dd230b0b1f
+r9526 635b88be42
+r9529 eabd3ebf0c
+r9530 5384cea22b
+r9533 44348b4eb4
+r9534 b360756b02
+r9535 c633e28b40
+r9536 7ed033caf3
+r9539 2820d1ff44
+r9540 8c2a69d14e
+r9541 8c84ecf771
+r9542 9e3b5c094b
+r9543 bfea9f20d2
+r9544 0ca21a0653
+r9545 02a45e20bb
+r9546 a961d3dcd6
+r9547 5b72bfcf91
+r9548 ce6fd61e24
+r9549 344ba095e1
+r9550 d0193043d9
+r9551 fcec4e056e
+r9552 d1042e7f42
+r9553 78d2e50495
+r9554 29da7050a8
+r9557 d4b2af5aaf
+r9558 3f748a40b1
+r9560 735573067a
+r9561 a3d868bf57
+r9562 114bfa60ec
+r9564 96248ae593
+r9565 279cdcb498
+r9566 2f6d0cf0fd
+r9569 dae92a5589
+r9571 7931d3dbaf
+r9573 210fdccbfb
+r9574 114aeb4622
+r9575 6835f1377b
+r9578 f75cbd338f
+r9580 1828ef4310
+r9581 b6df86923f
+r9583 8b51007563
+r9587 181cefa872
+r9588 d1d980fd2b
+r9589 cfb8a3bb3e
+r9603 003f7e2b70
+r9604 f3cf054432
+r9605 6f5749c792
+r9606 e1bfe57368
+r9610 3f41a604a3
+r9611 8190c6b5da
+r9612 8bb851f21e
+r9614 f41ccda10b
+r9615 9453e0350e
+r9616 96376cd154
+r9617 6093bbedc0
+r9618 cf5b53633e
+r9619 4c0d1ef392
+r9620 767bb1b875
+r9621 81d2963d4c
+r9624 0d6d353412
+r9626 d3cc8c2190
+r9628 6b0dcb2052
+r9632 2bd3ff37df
+r9633 01f4bb38e9
+r9635 1c2ab2bf73
+r9636 a27223c2f1
+r9637 aeb2770ea0
+r9638 4aa9c242f1
+r9639 990a28f37c
+r9640 cc4427befb
+r9644 e5a7cc3149
+r9646 509d09ebaa
+r9647 8efcc63042
+r9648 69001ca4f9
+r9649 e1d945a2ed
+r9650 e97fb47f7c
+r9652 d932455a65
+r9654 903fc11979
+r9655 9e27208eae
+r9656 e4282e0148
+r9659 9e58ed4d39
+r9660 cf7c5917c9
+r9661 ec85d6ce0c
+r9662 2836cba04c
+r9664 0e974bb373
+r9669 6c4b4f89c8
+r9670 e3e918acdb
+r9671 9e5f776d68
+r9672 dd7f9edbf1
+r9673 ea260cc63c
+r9677 d429702dc5
+r9678 4cc8ccb5f3
+r9680 634c658057
+r9681 18e6056e06
+r9682 635a7663d7
+r9684 76d0d7ad84
+r9685 8acb41bd0a
+r9687 cfe333853f
+r9690 016ff2d134
+r9692 c9f419ea7c
+r9703 634195f784
+r9716 e6fe93e5b4
+r9718 d915a97c87
+r9719 6d62e86ec4
+r9720 453fdea8ba
+r9721 a8835495d4
+r9722 251f5ce1a6
+r9723 1cbef2171c
+r9724 0ef0f40ae3
+r9725 b7b7d30add
+r9726 57dd329199
+r9727 f8a6425c9c
+r9728 ea6777a4ea
+r9729 3020baca77
+r9730 dd50828fda
+r9732 d169735abb
+r9733 11bcf8ef29
+r9734 10f7cd80e3
+r9735 44d630b0ce
+r9737 803488f3e2
+r9740 273be26c40
+r9741 8c752c3af8
+r9753 3178d341be
+r9786 d684e5c071
+r9788 5833fcd8c7
+r9789 ebdcd61b65
+r9790 2937f4ebca
+r9791 0e14716756
+r9792 fba3480e73
+r9795 8c38668c95
+r9798 c1822e42d2
+r9799 434d460454
+r9800 d3d12d547f
+r9802 cf5d275c67
+r9803 2f4c6a2eb8
+r9804 4a64ac9c7b
+r9807 2aee8120ee
+r9817 e3099c24bd
+r9818 9e9adeedf0
+r9819 eb0969baed
+r9820 607c9b39ae
+r9821 97e6e4eb27
+r9822 bf075cd7bd
+r9823 0ecbad689c
+r9824 cc77b573c3
+r9825 f6f011d167
+r9826 32e3f2cafb
+r9827 e566c7126c
+r9830 485a79aa79
+r9833 a116937649
+r9835 47fd02fe68
+r9836 d69bbfb031
+r9837 8a7e78ded3
+r9838 0d9b416b66
+r9839 919caa4646
+r9845 1a605eefa6
+r9848 039e982182
+r9849 29f933b60a
+r9850 df3c09479e
+r9856 6a440b960c
+r9857 9edda0088d
+r9858 07c368dcdf
+r9859 8c1bbafee4
+r9860 7cc5c06947
+r9861 ffa9da234d
+r9866 828377d9c0
+r9870 3eae42f4cc
+r9874 e92807e312
+r9875 4077f04935
+r9876 100951d187
+r9877 39d6b1f81e
+r9878 50ce776c18
+r9879 611f454168
+r9880 fd8dff6dd8
+r9881 15fc37931a
+r9882 195dc6ba17
+r9883 7482239527
+r9884 9304e2a7a6
+r9886 912077c5f8
+r9888 2f4f3d3db7
+r9889 b277d15d25
+r9892 89e9d67df8
+r9896 f54efe4dc3
+r9897 56f672fec1
+r9899 a27f9a3b43
+r9900 f1c170f25f
+r9907 658bc3c447
+r9908 31365c2ab0
+r9910 e8df51ba07
+r9912 108db60672
+r9913 e3b4286533
+r9914 852ff5c25c
+r9915 15d4afe9eb
+r9916 29162dae26
+r9917 60b6ba084f
+r9919 9be1288dec
+r9925 67cf4f5e32
+r9926 f045549b48
+r9927 17f1716229
+r9928 b20668b85a
+r9934 7adcd11916
+r9936 152563b963
+r9937 408c6fe6c5
+r9939 04cbd87417
+r9940 cc20f5fbb5
+r9941 176e869db3
+r9942 107e2e6a5b
+r9944 3c8bde9170
+r9945 242afcdafd
+r9946 9674b0e81d
+r9951 c470f8cca0
+r9953 110a1d0cde
+r9954 f2ccc14292
+r9955 37dd5e78a7
+r9956 c96ed0ccb8
+r9957 38522bbe95
+r9958 d7da5b7e4f
+r9959 258591edca
+r9960 d7dc0ad355
+r9962 94e3a13f24
+r9965 b3a20024cb
+r9967 ed30031b5c
+r9969 41fefebc73
+r9973 78ac90b85a
+r9974 bd426ab6f9
+r9980 e5b3a8a6b4
+r9981
+r9982 979180ca5f
+r9990 0af30e1b77
+r9996 d1cc9d42c9
+r9997 142560739a
+r9999 100b76d0f5
+r10002 5c8c46664d
+r10005 6e23c62953
+r10016 0e94771489
+r10017 77ca805c39
+r10020 8799272ad2
+r10021 5585e3de50
+r10028 6c26499a9e
+r10030 1614f42a20
+r10031 2a27ffb80e
+r10032 d710f4e615
+r10033 969f759310
+r10035 ce7fe41d5f
+r10036 68508bdd74
+r10037 0647a263be
+r10038 7b006dc22e
+r10039 f1e1fcc733
+r10041 53c115ff4c
+r10044 fabe192ccb
+r10048 603ef144ed
+r10058 c71d5e24e6
+r10059 4362112a7e
+r10060 1d856cb047
+r10061 5db82b1e48
+r10070 45bcd02f6b
+r10071 199cec961a
+r10079 3e829735e9
+r10082 56483c663b
+r10083 5c7809eab4
+r10085 cb7f66a22c
+r10086 914932babf
+r10087 316228912b
+r10088
+r10089 b4a6ccf033
+r10091 fca1d7499a
+r10092 c90bd2b598
+r10095 790842fe30
+r10097 b31ceb487d
+r10101 f55b965036
+r10103 b94b6f9af6
+r10104 853b9424e5
+r10105 02e108bcf2
+r10106 7be3105727
+r10112 016811518a
+r10113 8c8bc695b7
+r10114 9f926a9e1e
+r10116 e30503f100
+r10117 8cd3a8fcd5
+r10119 afbcca7ccf
+r10121 5b971182c0
+r10122 f14c3081b4
+r10123 3faf31102b
+r10128 5a435856c7
+r10131 02488b6797
+r10133 54f0202e29
+r10134 0b433a78b4
+r10136 79e3814ced
+r10137 e0dde41aec
+r10142 28f747a2c1
+r10145 08373d4e92
+r10147 a2fced5b2c
+r10149 e37a942060
+r10150 27c0faf35a
+r10151 a13f7c0a01
+r10152 2867ff421b
+r10154 3ab5889983
+r10158 f341b97e0b
+r10159 e7c9deb053
+r10161 48d8a9ffdb
+r10167 32176ac4d3
+r10168 614ebd7eea
+r10169 327f88d168
+r10172 12a3b4c5ff
+r10175 2357b792b4
+r10177 83d75b3bdb
+r10178 e63cc07f6d
+r10181 a1c8763976
+r10184 61b2debaa0
+r10186 d3d697a9c5
+r10187 cac2dae195
+r10188 a5abaf7350
+r10189 df922513e5
+r10192 e46e66a019
+r10193 c5455747a9
+r10194 3a352874f5
+r10200 6fab83741b
+r10201 c09dd61567
+r10202 0d03cad115
+r10203 2c11ab6c75
+r10205 3f05775fad
+r10206 9529a25ac7
+r10210 2d80ade773
+r10213 93119cb1e7
+r10216 69a8cebb64
+r10218 9c97b8998b
+r10221 70e2162afe
+r10222 4ba667134f
+r10223 b0d5f49209
+r10225 198906fb11
+r10231 687e65fb3c
+r10236 e69db0d97f
+r10237 76ed03005d
+r10238 e46fafa41e
+r10239 a41182e5fd
+r10241 5303be8340
+r10242 3269ad2aff
+r10248 acacbf69ba
+r10253 a0476f0882
+r10254 b213b89017
+r10258 60d600e1a1
+r10259 8514f85695
+r10260 f7fd780877
+r10261 1693661295
+r10264 fe174ed6ed
+r10265 3e35bb3888
+r10268 0790935d10
+r10270 c054287dd8
+r10271 f7567ab635
+r10292 ab63846918
+r10295 87db2fe784
+r10297 e1d57aae22
+r10307 439588db95
+r10310 661c602630
+r10311 d8448a3b19
+r10313 31af03b80e
+r10316 c0ab376dd8
+r10322 bc89afad4f
+r10323 0eb1877191
+r10324 f947c1f55f
+r10329 2bca03b1f5
+r10334 a1e615c925
+r10338 6e53e14f4d
+r10339 0ad5e129f3
+r10340 e8540b5d78
+r10342 611228e4e8
+r10345 8a799ca985
+r10357 16bbef1243
+r10358 30b12a8240
+r10359 53dedee225
+r10362 73d2dd4ed4
+r10363 5b99970b27
+r10364 e3cba876b8
+r10365 6d93465512
+r10366 3d4d7ce3ef
+r10367 5de3ead55f
+r10369 00a38096af
+r10370 5015b73da1
+r10387 bf280fbf45
+r10388 3ee224f431
+r10390 cb08c06766
+r10391 2b00fe2592
+r10394 50bcf69e3f
+r10396 a600ff64fb
+r10397 a694dd57cc
+r10401 bce0953662
+r10404 33098727a1
+r10405 d848220eca
+r10407 df63d8e2f8
+r10411 3c1e6d6ce3
+r10417 9715d09e80
+r10420 699c6045ff
+r10436 1052ad2f1e
+r10437 6a2134b1b0
+r10439 8e890c848f
+r10440 c61121a813
+r10441 0c96403c27
+r10442 f70a92677c
+r10443 8d3c44cfb9
+r10448 06e94608cd
+r10449 2dcc3776f9
+r10455 0196b0e057
+r10461 800ce668ac
+r10462 18bf680266
+r10463 058227a666
+r10465 4c5b8cd11c
+r10468 44678c37b1
+r10469 05db77de0d
+r10475 f0fb641bf6
+r10491 f0a0e0cbe6
+r10492 b809bf2730
+r10495 e06381565d
+r10496 156137e600
+r10497 16a3288cce
+r10498 96fd088973
+r10499 2464205e53
+r10502 22fa993389
+r10503 b7cd34eda4
+r10504 98f2f761c7
+r10512 7afac73a71
+r10513 9347b21b29
+r10514 ebde4dd2e1
+r10515 4827f6b33f
+r10516 48eef96556
+r10517 78e8d1aef2
+r10518 5752dd02e2
+r10519 5fc1ae9658
+r10521
+r10522
+r10523
+r10525 c5ebdc8ee5
+r10531 735025859b
+r10532 e0e0326182
+r10533 e2ec34436e
+r10534 d27455c099
+r10537 1ce961f61e
+r10538 831cb380f1
+r10541 dae0f5a9ef
+r10547 ed847eaf75
+r10548 31a6f4e932
+r10555 a4d94d427a
+r10556 8062384325
+r10557 43185f20f4
+r10558 bccb84e1e4
+r10559 7ace623b84
+r10560 eabe0b477d
+r10561 1ab4fbc3b9
+r10565 b592e914f2
+r10567 207d07dae7
+r10572 3b317f928b
+r10573 c1d1fec365
+r10574 b739c4a2ec
+r10575 208678a0c1
+r10576 4b37b5a01c
+r10577 098db0fd0b
+r10579 b1a3187949
+r10580 8eafa3496a
+r10583 7e5c5cdec0
+r10584 ce525b28b0
+r10585 8d4f8da5c9
+r10586 571734806b
+r10587 6cf170624d
+r10588 31458cbaed
+r10590 9cfe96ba63
+r10591 4d8b3694b3
+r10592 3bf0245b79
+r10595 43933f0a88
+r10604 6948de6a3d
+r10606 0769f64764
+r10607 db913d614d
+r10608 8e54a0f8c7
+r10609 7f3c7c3924
+r10625 51d9edec14
+r10635 7674f974c3
+r10636 b5b3ce4df6
+r10639 289fd3d730
+r10642 e1c732db44
+r10643 10a651a13c
+r10644 f96b6beefc
+r10648 9f27eacd5c
+r10649 4ae344ed1c
+r10650 1f2a73f36b
+r10652 c5861d8243
+r10655 1f2b7055e4
+r10657 10cbf9df8d
+r10658 88a5144bb6
+r10659 e732589d1d
+r10660 28d40d21d0
+r10661
+r10663 ba3e6db5b8
+r10665 9c90fcb0a5
+r10666 01191c193f
+r10667 ef8581a8f1
+r10669 34856ebaec
+r10670 5bb26aa18d
+r10671 b519e9c792
+r10672 837c8180bd
+r10673 5d449bfbc1
+r10675 eecb5e5c4c
+r10677 ca330cfd2f
+r10680 0ddd974516
+r10681 9ea852e2a5
+r10682 6da6345af2
+r10683 a4bc6dfce1
+r10686 62cb8572fa
+r10688 d08a0445c1
+r10689 3d9d369b71
+r10696 24eb581d80
+r10697 f0cde59118
+r10701 6f84d4088e
+r10703 c0ace80be3
+r10705 17227e1b2e
+r10708
+r10710 2383a5b941
+r10711 44a06ff6ab
+r10712 02550b2be3
+r10713 b66389b2f2
+r10714 9a17c7fb08
+r10715 7f0f5d6586
+r10716 cac4c47b3a
+r10719 14c88ba747
+r10722 dd8c18716a
+r10724 c744cf80a6
+r10725 755fb899e3
+r10726 b1c47f7bfa
+r10727 8625a87820
+r10729 46a32e94ff
+r10730 a7da970fa8
+r10731 2b00a2580c
+r10732 d3529d9a6e
+r10733 5298d7cde0
+r10736 b92ecfcbd0
+r10737 ea0c3a7ce9
+r10738 81cc9a834c
+r10739 e43c7bef06
+r10740 2ef5d4c6d8
+r10741 d934e21d46
+r10742 4efd6eb726
+r10743 43b3b98924
+r10744 807b7a27ed
+r10746 5b834e801c
+r10748 28edfc1109
+r10751 a87d9a20e0
+r10752 2f4064efbe
+r10753 7b2bdb4e75
+r10754 ed8f3f0b9b
+r10755 5cc62b4e5c
+r10758 8efd925c73
+r10759 ddaba1cd98
+r10760 2e68f5404a
+r10761 5daae64bc6
+r10762 b33aca6a2f
+r10763 3fb252a904
+r10764 c98ed47ebb
+r10765 af87cfc074
+r10767
+r10768 30cac1fb06
+r10769 444b8a7d2e
+r10770 27176e165d
+r10771 78c3aae673
+r10772 9b21354635
+r10773 d6969c4b5d
+r10774 5c8a5ba86a
+r10775 96ac0066d7
+r10777 821fbc5a87
+r10778 223060bfa9
+r10780 6efa3eee11
+r10781 b0c55e3bf3
+r10782 e91bb354f4
+r10783 97f23516de
+r10784 a9cc141f19
+r10786 dd225e9a25
+r10787 90c68e1914
+r10788 bd7866c794
+r10790 b8fc3bed09
+r10792 b1936ece49
+r10793 50c0a1ee2f
+r10794 e4c282d9ef
+r10795 1f5dfbd7a6
+r10796 5e8888920f
+r10797 532c0efeb8
+r10799 e0eb99500c
+r10800 55dfd6ad55
+r10801 24cbbd1ede
+r10802 fef68d7c3f
+r10803 2647716232
+r10804 437535a2de
+r10805 4707025099
+r10806 9577df98ab
+r10807 9015f58e12
+r10808 2c2a0807ed
+r10809 fba880aba9
+r10810 7039753da9
+r10811 0484e57e04
+r10812 5c21feb3a0
+r10813 a11f6935e0
+r10814
+r10815 26f25f8d88
+r10818
+r10819 e9bd1a35e7
+r10820 58c64abc66
+r10824 04034834f5
+r10828 789bf97c72
+r10829 218c5a8223
+r10832 775cd7b80e
+r10835 b7e87847c7
+r10838 99630e2937
+r10846 8d2349581f
+r10862 b1d8840877
+r10865 21c8ba1830
+r10868 a7f0266287
+r10876 a08e8f2d88
+r10878 cc8d4298d7
+r10880 f9454ad5ce
+r10885 12a2b3b7eb
+r10887 7f27845c6d
+r10888 9227a0f86a
+r10890 10aa201700
+r10891 750e57765b
+r10892 03f09f244e
+r10893 7f42043da3
+r10894 c90d5a4256
+r10895 838b1dea8d
+r10896 cfffbfed68
+r10897 4c272b0d3e
+r10898 2aa6c12894
+r10899 cf626598ea
+r10901 86e18d84dc
+r10902 28a1d779aa
+r10903 1cc06bb697
+r10904 2043c0ab21
+r10905 6041bbcabc
+r10906 cc7c6431d5
+r10907 99792adaf6
+r10909 034bc4be40
+r10910 427c20e5e0
+r10911 d6369095cf
+r10913 dbce4463e8
+r10914 d0ac19940d
+r10915 d977e2f2fa
+r10916 61da956077
+r10918 58bbb60b30
+r10919 c6c0b840e0
+r10920 22cd83b16b
+r10921 8feb3e75bc
+r10922 b5adf7938c
+r10923 f48b473795
+r10924 c57c0d9c77
+r10925 c74fb39638
+r10927 879b7baeb0
+r10940 41b90d5719
+r10944 d1c4f9e32b
+r10946 7cf6a80044
+r10949 97caf77483
+r10951 b927a915b0
+r10953 3c13a0fe5f
+r10956 93d985632f
+r10959 e70118f238
+r10960 c126ff44ca
+r10962 e5813a6b34
+r10963 5be9ee0305
+r10965 9a0804557c
+r10966 917449a634
+r10967 4f41a69e99
+r10968 c1e09aa0b3
+r10971 86eaf4945b
+r10975 fbccd6a318
+r10977 96a817da9a
+r10979 058d18cdf1
+r10980 cc89987935
+r10981 dfa271755f
+r10982 a39d99f668
+r10987 fb248a8ec1
+r10989 ae0a3254e1
+r10990 48c9a76728
+r10994 b40e3b35ce
+r10995 fb649f4f34
+r10996 306a954005
+r10998 f6c3ded42b
+r11010 59ab197fef
+r11012 95d627ef59
+r11013 fbb5dde4e9
+r11014 328e57f430
+r11020 f54b2bded5
+r11023 bb7d698d97
+r11025 1e07cd1829
+r11026 e20a23f7e4
+r11030 ebc5e580fa
+r11031 690f288e45
+r11032 6d23621bb9
+r11033 d893f45b6a
+r11034 312aeb8639
+r11035 8de5ae2b13
+r11037 9450c16f19
+r11038 47c5f0f3ec
+r11040 8b952a85bb
+r11042 aa5655211c
+r11047 578b99d3a6
+r11048 d83897f0af
+r11052 25ac436d71
+r11054 bbe0f5e228
+r11055 c4181de5eb
+r11056 8d6dced8a0
+r11058 28972eb1cb
+r11060 cf9f9a663f
+r11062 58f003be77
+r11063 dfb9046387
+r11064 73b2db5db4
+r11067 1f65685c96
+r11071 31cb1f9613
+r11072 6a33d831d2
+r11073 6014888a9d
+r11074 c2f7d03d50
+r11075 82d419c00c
+r11076 00736e1092
+r11079 4c1f8c3810
+r11081 0e8ad86aa1
+r11082 28cd5c6e5e
+r11083 b0e9768e07
+r11084 b367d6e32d
+r11085 e2e090d4e2
+r11086 0bdaec07d8
+r11092 9ddd12335e
+r11093 d8e5596950
+r11095 a43e6b1242
+r11096 72b474d620
+r11098 77863427ae
+r11100 ef2279df3d
+r11101 c4ad383220
+r11103 5e9a42a481
+r11105 6c4a4d5de8
+r11110 e8447205a8
+r11111 6df0408f3c
+r11112 d7ebd599b9
+r11124 1cc0156eb6
+r11125 34289c430a
+r11126 0be9c5a52c
+r11127 9c91674927
+r11132 22a8618b48
+r11133 fe55fa336b
+r11134 02332d4a07
+r11135 9d76f980c5
+r11136 2cab50f0f0
+r11140 b9cfe254ac
+r11141 01e1d5902b
+r11142 d13cbc73c3
+r11787 c4df28255a
+r11788 02a1f9afa9
+r11789 9ff91b5986
+r11792 e9002c674d
+r11793 0ceb9c1c8e
+r11794 977d703857
+r11796 fcbd0bfa8b
+r11798 f6eb33a216
+r11804 8813209807
+r11808 c5b9e36ca3
+r11809 377310315a
+r11810 eeeb68228f
+r11811 e639f232ec
+r11819 f800661f1d
+r11820 4dc7be4b10
+r11821 e9dcc56639
+r11826 cd0434d530
+r11830 fcc4d182dd
+r11831 6ea7d123d3
+r11832 c8ce38cb14
+r11833 0d18ad8861
+r11835 6018a932ce
+r11838 8397c7b73c
+r11839 d6be8533ee
+r11840 c2a6b222c1
+r11841 64bd32b141
+r11842 dcabbf90df
+r11843 57a569ba3c
+r11845 e45535592a
+r11846 fa4aaf9bcb
+r11847 6712cfd277
+r11854 31d539218a
+r11855 ca6b2dcd81
+r11856 661a599ed6
+r11857 546d98a2ba
+r11858 430e9f4c47
+r11859 6e961f3b74
+r11860 b0e5eeb119
+r11861 2dcbfa7d08
+r11863 1cc6a768e0
+r11864 e78dcdc4c5
+r11869 aac8bba0c2
+r11875 96d7374b9b
+r11876 2fb330d244
+r11878 2d6d68fb6d
+r11889 1f166dfd3a
+r11891 5bc19dd5f6
+r11892 ae8da76c01
+r11893 724e68bab0
+r11894 450979f775
+r11895 609af01c6e
+r11898 ecca1a73d8
+r11899 26400b7b32
+r11900 a31e57a3e7
+r11901 e92dd1b674
+r11909 e060c61b61
+r11911 e51207992f
+r11924 34ec899267
+r11926 cea527a9dc
+r11927 ee41bd58d4
+r11928 f324c3aa07
+r11930 83d0d76b12
+r11931 cfb62d0b27
+r11934 458adadcaf
+r11935 6739cacb9d
+r11936 d1aed7012a
+r11938 90fed9c484
+r11939 c66f4b8162
+r11944 278f89bf2f
+r11950 540c308ca6
+r11951 e182625e51
+r11954 d7b39e3597
+r11955 11c26aa228
+r11960 7d89506e35
+r11961 6ad83dae69
+r11963 8414ebada9
+r11964 d4cc633ec9
+r11965 80d1c7de2a
+r11966 908decebd0
+r11967 d2d5fb166c
+r11968 39fbdc73ae
+r11970 7cf62c8a32
+r11974 b2d9d9096a
+r11979 9578a9df03
+r11980 1cf6fcfbfa
+r11983 a7a87af828
+r11984 835fab5224
+r11985 a31e3c23a1
+r11986 625d525491
+r11987 f1eb98a9ec
+r11989 84bed4cf43
+r11990 5740c65d5f
+r11992 f587ec7c8f
+r11994 3aad376baf
+r11995 0335d0cf63
+r11996 35b2dad1fe
+r11997 067694f36c
+r11998 273405850c
+r11999 54b23b8394
+r12000 989c80bcad
+r12001 63d5f0c247
+r12002 2d28f7fcc3
+r12003 a384720d2c
+r12004 9934c835a7
+r12005 2a52c9f3ab
+r12006 8bde15629b
+r12019 61349a9191
+r12020 fc5d8ffdb0
+r12021 2140a3b04a
+r12022
+r12023 d394b0b1c1
+r12024 1e6f4c280d
+r12026 52759489db
+r12033 a2db8d932a
+r12040 1d8e1b2d22
+r12041 4b7298f02f
+r12042 40c6ed3501
+r12043 d0a8963618
+r12045 e0f606ac4c
+r12047 9128040ab1
+r12048 960ce31287
+r12050 37222ddfae
+r12052 715774d4b2
+r12053 ba3b4ba405
+r12054 225fac5af5
+r12055 6bc98cf8af
+r12056 d675971454
+r12057 41d984037a
+r12059 a6ffdf6992
+r12060 2cae4689eb
+r12061 52ccdc5627
+r12065 2ec348815b
+r12066 4dc5918462
+r12067 46285f4309
+r12068 f16995458c
+r12069 54e04e4085
+r12070 d63942fe1a
+r12071 4ce287ec39
+r12075 7620e2d34b
+r12078 9867746f9a
+r12079 6d5979b714
+r12080 c184cc7096
+r12081 6476819ce3
+r12082 1edd1f1db1
+r12083 7b6fe636f8
+r12086 c378489a95
+r12087 542c248d61
+r12088 627257dfbb
+r12089 09dd9eb7ef
+r12090 177505fcb9
+r12093 d1b12f2a86
+r12094 ff5d9c9afa
+r12095 a4faf44171
+r12096 b0da26356e
+r12097 7329219d88
+r12098 7dfd2d5822
+r12099 08fc901f4c
+r12101 72c1d53647
+r12103 aba747cf8d
+r12105 30f41d643a
+r12111 ed3f1d101d
+r12112 4e18a7e8d4
+r12113 67915c6694
+r12114 ed89b34938
+r12117 b5df8e3517
+r12120 c717018a84
+r12124 8f93b9f009
+r12126 490050f689
+r12127 1b887be0a1
+r12129 40a5b9d61c
+r12136 66eb76d374
+r12138 94220cb34e
+r12139 5081021e56
+r12141 b0745f84a3
+r12142 0b4d9de1dc
+r12146 a1ec75c264
+r12148 b4b91dcb58
+r12151 088d4aef3f
+r12152 eea125fb1d
+r12158 2836a47041
+r12159 97664fd90f
+r12160 ecc878bb26
+r12161 2096c06222
+r12162 674015b30b
+r12164 a8a692413d
+r12169 fed30dbea8
+r12170 665a2748f0
+r12171 d618e1f898
+r12173 8eed99684f
+r12175 81a4d20bf3
+r12176 b0cee5943f
+r12177 510f983351
+r12178 6f5102c26b
+r12182 01292462be
+r12183 1219180c5f
+r12185 5eb0d12474
+r12187 72597908f8
+r12191 e4bc488dea
+r12192 842391cb5c
+r12193 9b5d61596c
+r12194 1287e33524
+r12197 308f93f8ed
+r12198 b75dec4cf4
+r12199 f0da69b725
+r12200 8a42f2f146
+r12203 c7345c8a95
+r12205 288b766d4e
+r12206 af32136a17
+r12207 5fa0bb8d42
+r12208 633354bc2d
+r12209 0cd9d09355
+r12210 16e1b57be1
+r12211 2754c44988
+r12212 7f4894c8ba
+r12213 a694448355
+r12214 ad89e1d2ff
+r12215 6b9c024750
+r12216 1c53987588
+r12217 34732d0895
+r12218 b656480d91
+r12222 d6dd8c3fb0
+r12223 00c12b4d00
+r12224 f974621afd
+r12225 17d7758bba
+r12226 846fec4ed0
+r12227 f581be6a67
+r12228 ab013e7071
+r12229 c08f1700ca
+r12230 40430c44da
+r12231 f2e4362a5a
+r12232 b98eb1d741
+r12233 573b5e2c86
+r12234 32cd1ac7b8
+r12235 ee232043b0
+r12236 a9f599f975
+r12237 2ad1a3c218
+r12238 33ec0ad1d7
+r12239 7aeca0f163
+r12240 787bb041fe
+r12241 03408a6c02
+r12242 819c89544a
+r12244 887f9515f7
+r12246 4e9b6b48a7
+r12247 2e35175f47
+r12249 ede9c3921e
+r12250 d46c58d0f7
+r12251 7ef97b2993
+r12253 b766a0baf3
+r12254 b0b847f1eb
+r12255 9260b90c00
+r12256 c2c019c73d
+r12257 ab51bb3e06
+r12260 01d6a088da
+r12261 212f89bcc6
+r12262 9f8daa47ff
+r12263 302612f334
+r12264 85272be21d
+r12265 2345df9ba2
+r12267 726eff2779
+r12268 802a3e3a8f
+r12269 9f2ea1b342
+r12270 5540988eb4
+r12271 95a9b8dc2e
+r12274 76e2ea8895
+r12275 4358c5019d
+r12276 c5bacffe8d
+r12277 bd02f63597
+r12278 54bdbd7c5a
+r12279 c2cd1c0ece
+r12280 a31348e99a
+r12281 27afc3c269
+r12282 7ccd176538
+r12283 a874f35109
+r12285 080802c84d
+r12286 8b78ce0012
+r12287 6d2449f066
+r12288 c2c439dc6d
+r12291 edacf9f434
+r12292 4428dd2a4e
+r12295 84224545d9
+r12296 ca623645fa
+r12297 d93096ce92
+r12298 255435b4f2
+r12299 76223e85e2
+r12300 eeff0aed80
+r12301 5fe375ba62
+r12304 07833a931f
+r12305 6d9221f765
+r12306 13369d36fa
+r12307 7529035f6d
+r12308 72105be047
+r12309 f6a9176308
+r12310 7e617efa8f
+r12311 fcf17f5bec
+r12312 6bd1cbb28f
+r12313 6d98fcf8ef
+r12314 5c473c90d8
+r12315 8a5b14e856
+r12316 145902b170
+r12317 e48c773647
+r12318 511a3ff39a
+r12319 d8116e7abd
+r12320 a5b37442c3
+r12322 2b7574b14f
+r12323 90eda0dfdb
+r12324 3bac46a9ea
+r12327 7ce9a2d992
+r12328 07d14d9712
+r12329 9f1345730a
+r12330 a840917b32
+r12332 51ff43f811
+r12333
+r12335 379dacdac3
+r12336 51242965f3
+r12337 1d8d942932
+r12338 74f167520e
+r12339 8da5cd2cf0
+r12340 c739e595a3
+r12341 4b121fc9bb
+r12342 d701620429
+r12344 f9c34f8d3b
+r12347 31b6dbf1c5
+r12348 c5c6d4a8ce
+r12349 02189a8d5b
+r12350 3d8003db99
+r12354 14ea3ab51e
+r12355 efb6db4cc9
+r12356 92cb82a6da
+r12357 bf32e7d4a8
+r12358 7c57443945
+r12359 63c0c73ba7
+r12360 e00276d4b1
+r12361 1019c3a8ef
+r12362 368d3a7129
+r12363 5f07516f6a
+r12365 e0fa88b729
+r12367 247ec20681
+r12368 1adb565b6e
+r12370 d33a20f067
+r12371 6b9a5f149a
+r12372 475937a041
+r12373 62d7f4c35a
+r12375 bd9874055f
+r12377 f3c134a70b
+r12378 444991fd00
+r12379 0f1add0d0b
+r12381 c633afd891
+r12382 b37372ea5c
+r12383 a41f6eefc5
+r12384 d5d4f71448
+r12385 1fd4a4b280
+r12386 46b5d81c6b
+r12387 2ec28d164c
+r12390 08a42368c0
+r12393 11f1b34dde
+r12394 2d3a640e0b
+r12395 16d3cf1f8f
+r12396 6348be1535
+r12397 7f39e4a1fd
+r12399 399cfa2a08
+r12400 c2bab2c122
+r12401 0a719124c9
+r12402 551e31ec7d
+r12403 d8504784b8
+r12404 5355f3c732
+r12405 d6f27a8d9c
+r12406 a64786e441
+r12407 81442501d0
+r12409 39b0a1fbf3
+r12410 7f2f04c2f8
+r12411 e2bcca722e
+r12412 e175239fd3
+r12413 fd3697ed00
+r12414 95eaa29b50
+r12415 538e22b80c
+r12416 c89a1811df
+r12417 a21a258fe6
+r12419 a06edbf12a
+r12420 4b973bfb25
+r12421 b1498443ca
+r12422 1da3a45955
+r12423 e517b3b183
+r12424 edff72ec73
+r12428 d73c9b51b8
+r12429 c61bd2d85c
+r12431 0b34dfbcfe
+r12433 7c236d237c
+r12434 e7ea8f8598
+r12439 1c87f4dd46
+r12440 dbcfaeb07e
+r12441 9f95026c8e
+r12443 d3f33a44f8
+r12445 5327a60d20
+r12456 eb21be47d8
+r12473 4574bcbd67
+r12474 d340c57d7e
+r12475 de47e4d7a9
+r12476 53f715896d
+r12477 17ddb5c33c
+r12478 d5dceac54c
+r12479 6b182eb903
+r12480 7de02030ad
+r12481 eadf9387e2
+r12482 e114becbc9
+r12483 a00c8f75f1
+r12484 fe133c86f4
+r12485 a19af644d2
+r12486 d50a009591
+r12487 dc373d09bb
+r12488 972725421c
+r12489 09db2edcd9
+r12490 e822b909c2
+r12491 42f11c0aff
+r12493 d725f4a6d2
+r12494 c9fa2fb215
+r12497 d71a8cd2f7
+r12502 7ff9dec674
+r12506 74b464b1c5
+r12510 2cc1c6799b
+r12511 b9232101bd
+r12514 af7db4f3c5
+r12515 d0655ebe76
+r12516 974e1a2f9e
+r12521 3cebbd7cea
+r12527 0fdee8b11c
+r12528 58b7571f72
+r12529 145c188d55
+r12530 564bc566d3
+r12532 4357e79096
+r12533 2465b7e2aa
+r12534 7d793f6ff5
+r12536 6b573ed92f
+r12540 cbba5153da
+r12546 300caee15c
+r12557 b47ed354cf
+r12558 65d20f0d9d
+r12560 b63f70743f
+r12564 f507e25804
+r12565 53e0d8562a
+r12566 60718f6da0
+r12569 e8844dd919
+r12571 538a43fb6e
+r12574 9e118bbf6a
+r12575 5eadca1408
+r12576 102aadc5f5
+r12578 748a2f87b2
+r12582 4da4d32840
+r12591 62c04ef6b9
+r12592 f69c8e975a
+r12604 f8b2b21050
+r12605 ecbe1cdd17
+r12607 93e7d7fe4d
+r12608 a1b189d8ad
+r12610 082e881d0a
+r12611 2ddb10dfa4
+r12613 649289cb68
+r12616 1e350595d8
+r12619 e313d9651a
+r12620 ea8c405c26
+r12621 d1dcdba7ee
+r12623 ff9592bd51
+r12624 5c301870f6
+r12625 b696185eec
+r12627 83767dc5e2
+r12628 d0310bece6
+r12629 4192a98136
+r12630 73a1346edb
+r12631 e69edec6c7
+r12633 20caac2bac
+r12634 cb77b69a42
+r12635 4976d17863
+r12636 83d4a1b304
+r12639 e731089080
+r12641 1ce5ecc912
+r12646 62cd29a178
+r12649 8d96aea0a2
+r12651 466df8f4b7
+r12669 36a7ca2d54
+r12671 0e32440936
+r12675 7440758377
+r12682 e07c5695f3
+r12686 e032ccba0e
+r12694 6d8a7e7376
+r12699 44c08fe2e4
+r12704 22f1be16fb
+r12705 649de80693
+r12707 25acfe6cc7
+r12708 79cda8f630
+r12711 9d12fe104d
+r12712 b0f070a6aa
+r12713 042cce8cfc
+r12714 71b3f784a3
+r12715 0fd37e790a
+r12716 eba18a359e
+r12717 c61168109e
+r12719 52ab7acfbf
+r12721 840202e705
+r12724 10bd9e9c8e
+r12727 c8f68128c1
+r12728 57a9a56fa9
+r12729 7896c66827
+r12730 658fc638ac
+r12734 3527c51675
+r12737 71ba470de3
+r12738 97946f9d60
+r12740 d32deafeb2
+r12741 7c7ea4b57e
+r12747 26109cdb6b
+r12754 77de72ce86
+r12758 8af1dfade7
+r12760 6ac1007149
+r12762 8d61f90ec5
+r12763 623a1e50fb
+r12764 33770714c3
+r12765 6b630a80aa
+r12766 2e1e65ee5b
+r12767 dfa2cf1c11
+r12769 5a9fbd9d95
+r12771 8d8bbecc08
+r12772 809ffd2c15
+r12773 1f486461f7
+r12774 27261144ee
+r12775 0d022af194
+r12779 1a8874d472
+r12780 1828b005b0
+r12781 d65e422032
+r12784 dca3a04243
+r12785 7bb91bbfbd
+r12786 8ab3c6b56d
+r12787 10bec64595
+r12788 c8740e98dc
+r12789 5960d43f3d
+r12791 259528cdf7
+r12792 92629629ab
+r12793 4d9354ae14
+r12795 24943dad3c
+r12798 98b3d5254f
+r12800 c0983451c5
+r12812 d932b6cb1e
+r12814 aa6cc51acb
+r12815 4fdaad4888
+r12817 1ec3ba2ab4
+r12818 49d86b0f87
+r12827 c9e92bfc89
+r12832 b907c8eb59
+r12843 7b405d5b02
+r12844 302e8dbfca
+r12845 8d1cf73c03
+r12847 cf471e6091
+r12848 385b899a0c
+r12860 c1ce17b264
+r12864 6900f59041
+r12868 d2c1b74f0f
+r12869 d2671e65de
+r12870 9f996ddaf6
+r12872 ee3213739c
+r12874 d0893187a2
+r12875 6b801413fd
+r12876 ab7d775228
+r12877 4c74083c14
+r12878 0b2f65aa6c
+r12879 7fe7bace8a
+r12880 2353ddb22a
+r12881 6eb0e65691
+r12882 78906ce9f9
+r12884 1c1b5e88fb
+r12885 3f9b82c88d
+r12886 a205b6b06e
+r12904 95c231ae31
+r12905 f6d48a08ca
+r12906 712ffcabe5
+r12907 00eed346f2
+r12909 d3b1c7957e
+r12910 6c815e5740
+r12911 87fed8f410
+r12912 151acf12ef
+r12914 18b2e053ae
+r12917 c310233042
+r12920 fffae18775
+r12921 afa0379466
+r12925 a20315a54e
+r12926 de68205eba
+r12927 a272e9b460
+r12928 ff6a4630be
+r12929 ddae8fd220
+r12931 b77116fea1
+r12932 7845ce31b8
+r12933 8df9996c16
+r12934 b7a2b46a73
+r12937 aee3d8d52d
+r12938 b979e14d6e
+r12939 f25732f7d1
+r12940 34f6ea9cab
+r12942 67717605c8
+r12946 648556baef
+r12949 1b41795f51
+r12957 9f847abf34
+r12959 72639626f7
+r12960 a2d610b1d7
+r12966 e4a89c3bd0
+r12971 22aa3cc49b
+r12972 a15a2bed93
+r12973 3e458ce8dd
+r12974 2fef21d33e
+r12975 cb0a5a45a1
+r12976 2f38118b94
+r12977 0b00cb9fc3
+r12978 40884972d9
+r12979 2a22d4156b
+r12980 56fa78c91d
+r12984 96906f755f
+r12985 082a3edc21
+r12986 c373bdc3b8
+r12990 7f37fa01a4
+r12993 08704a195e
+r12994 49592a353d
+r12996 2b040ce0fd
+r12997 d708dde778
+r12999 9d44ea69f8
+r13001 bbcd575ed7
+r13002 ead965a337
+r13003 b8b85aa1c5
+r13006 6761dc14b7
+r13007 41865ed001
+r13009 4f2d5e4808
+r13012 9ce1dd8d50
+r13014 c4181f656d
+r13015 dd8fbb7c36
+r13016 3ef75fa07a
+r13018 d93d566e08
+r13032 f91bc93ad4
+r13034 b10fe799a8
+r13035 ef106dc59c
+r13036 853a0a5433
+r13037 9db671d702
+r13038 8090763f46
+r13039 3a28527889
+r13040 515ab49a70
+r13041 ab093d847c
+r13042 417417ef71
+r13043 07f4b0f821
+r13044 eeb6eb3873
+r13045 43daceac47
+r13046 eadef15613
+r13047 1487307edc
+r13048 57a7a38526
+r13052 ab477e33c3
+r13053 e2565c0356
+r13054 825e4447d6
+r13062 96eb13400a
+r13063 34112093ef
+r13065 05672898a1
+r13068 363a042442
+r13089 c3f4ef6104
+r13098 853e53f117
+r13101 7a8dc411ac
+r13106 476606f848
+r13109 15ffd68390
+r13112 7305b72eb8
+r13113 a49cbca4e9
+r13114 0ff28e0305
+r13115 810a709dd7
+r13116 5a17de87ec
+r13125 333e924d5c
+r13147 74c60ffa67
+r13150 e97eb8f50e
+r13151 f5aa270473
+r13169 c1cb43d928
+r13175 3124ea5658
+r13176 85f19da7d2
+r13177 9e8c022640
+r13180 bff42682bc
+r13182 b14c210bab
+r13186 3a4750825e
+r13189 b9d874ba4e
+r13191 42b43e8b38
+r13192 b91088da8d
+r13198 99a7957c4f
+r13199 14553bf9fe
+r13200 ff082b58c6
+r13202 db75a1c922
+r13203 64177deffa
+r13205 491d263ce2
+r13206 b2ed8091cf
+r13207 db0c13fdad
+r13208 98d92d4659
+r13214 05b59f2c7d
+r13215 bf83b15cad
+r13220 9feddc6cb4
+r13222 d3b764f220
+r13223 35bd7a8255
+r13224 a49aba6e82
+r13226 08f096df8c
+r13228 01ef5d562c
+r13232 6c52710e56
+r13233 5bd7d8413a
+r13234 9c0994d031
+r13235 17f18b46d5
+r13238 16f241cfe7
+r13239 5438ab13a9
+r13242 6ff4542f8b
+r13243 969384da70
+r13244 768a93028f
+r13245 35c966b024
+r13246 4cd5e4812e
+r13247 bdc8a6a607
+r13249 96f925078f
+r13250 fbd2eae173
+r13253 bd931b3fcf
+r13254 4ca92ff83c
+r13261 7886881b34
+r13262 ac791fa286
+r13266 8fc8fb71ac
+r13268 a7cd73f5f5
+r13274 0903ca6b21
+r13277 fa369bcf65
+r13279 7b61cfa3e4
+r13280 41be228d1a
+r13281 d18ce48ac2
+r13282 ed73e93b10
+r13283 509410ff39
+r13285 3818b76a0d
+r13288 ac55b8a3c3
+r13289 78940081ab
+r13290 e1f5fa089b
+r13291 82c5f83abc
+r13294 d0c65dcd15
+r13295 44b2aab804
+r13296 1c3653233e
+r13298 afbc3fedec
+r13299 60aced6ae6
+r13300 201ee07f10
+r13301 7444097917
+r13303 6590cc3936
+r13304 122ff46948
+r13305 a98fe88f2e
+r13306 e117099d3d
+r13307 07235ebcd3
+r13309 62bf8d67c0
+r13310 48e6aca343
+r13311 a6354053e0
+r13312 dca86389ac
+r13313 b574ca87cc
+r13314 7ae1ce1e8d
+r13315 893b03bebd
+r13316 c5ef189ab9
+r13317 c8fab9ec7d
+r13320 1bdf2c4ebf
+r13321 e32400681a
+r13323 96792348fa
+r13324 062fedaefa
+r13328 a9a877ea24
+r13330 3c14e1a072
+r13331 cbc9b3c3ba
+r13332 aab21c2dc8
+r13334 b6f9e1752c
+r13336 7e4f1a8b53
+r13337 db83d6f46e
+r13338 cad2ace82f
+r13339 8e5450bb09
+r13341 61bfaa4b28
+r13342 79842acc1a
+r13343 2ee9e59b35
+r13344 ccb860372f
+r13345 50a757c947
+r13348 639adcce01
+r13349 83ac2b5669
+r13350 e4d31aed1f
+r13352 090482dae2
+r13355 59a0cce0c0
+r13356 f5c98713de
+r13360 08a4772207
+r13362 1999c1bdc3
+r13363 b7af5e53d1
+r13365 25258b3d6d
+r13366 8c9e9f7b7d
+r13367 dfda38550a
+r13369 b8681839ed
+r13370 924c77e79b
+r13371 db60c0207b
+r13372 8f305d4959
+r13376 5e175852a7
+r13377 0d95261bbc
+r13378 9b2cb3887b
+r13379 060239a436
+r13380 1d4bc5dea5
+r13381 43319c2b2c
+r13382 e1aa90e7f3
+r13383 3c9cf29c59
+r13384 493ab4a848
+r13398 c73986baa5
+r13401 532d7a9f35
+r13403 7f8c296130
+r13404 5d4605b693
+r13405 cde40bd9bd
+r13406 6eacab84a9
+r13407 8647f299b0
+r13408 de4e67acfb
+r13415 db0cba8350
+r13416 c4a1857e8b
+r13418 52ccc329cb
+r13422 cc843b480d
+r13423 c68abba08e
+r13425 1120eaf953
+r13427 387f98cfde
+r13428 8f1bc80367
+r13454 5ecc494306
+r13455 0faae9a228
+r13456 26fb6b5395
+r13460 1360c0b1ac
+r13482 b7a804423f
+r13483 31e30179cb
+r13487 b097e75380
+r13490 da79f1b5fb
+r13491 8ed122db80
+r13495 65e20dbbf9
+r13498 81c78662cb
+r13517 98e9995ddd
+r13518 bd42999939
+r13519 a891b5b274
+r13533 4ddadc9fb8
+r13536 dc6d7a0973
+r13537 9752b4da2a
+r13540 0d5c56f023
+r13553 6459ab6923
+r13577 ed8326a398
+r13580 97b34f3cd1
+r13582
+r13588 e5d6f338de
+r13589 2ed9387915
+r13591 74dc1a8453
+r13592 7d3efa0e19
+r13593 422a46993c
+r13595 999d30eea7
+r13607 8ad43107f5
+r13612 a703d69eab
+r13615 9aaf456f48
+r13616
+r13619 b186613b3e
+r13620 c6f96a7ef3
+r13621 c9658ac006
+r13622 a210986884
+r13623 5fbcd57e96
+r13624 4e45e9e07b
+r13628 9a9ab66963
+r13629 cc4c5f64d1
+r13630 c84f9f9226
+r13631 6d544011e9
+r13632 74168e4184
+r13633 fc9a9f9334
+r13634 58283d2f54
+r13635 eb1e54b1e8
+r13636 3aa48de96a
+r13638 319dda9041
+r13639 50f39cd160
+r13640 c81cb36f85
+r13641 a6d2b80b53
+r13646 a86b9aedb9
+r13648 0fd867b5ed
+r13655 73d091062d
+r13656 6d37bf097d
+r13657 a99670b344
+r13662 e054b90b63
+r13664 7da478591f
+r13667 5327f1188a
+r13668 f6cd01e01f
+r13669 f95bfb97f4
+r13671 80f280c545
+r13674 5ca37b791e
+r13675 a315748a73
+r13676 13148cc2ae
+r13677 064ff0081d
+r13678 5b273b4327
+r13679 779aec9f38
+r13684 4c05b14a71
+r13688 7ceeb1e609
+r13689 dda8f67ce0
+r13694 a85358f76a
+r13695 d27d64aa30
+r13699 1a87dcf96b
+r13700 d9f2401cdb
+r13701 abbcc46aca
+r13702 b9461d35c4
+r13703 36a6313540
+r13705 5d32ba1ca5
+r13706
+r13707 078b598234
+r13709 13fc5575c5
+r13713
+r13716 0f73d8ae86
+r13718 e5ca1a3906
+r13719 76c06b4661
+r13720 3cad8b66e8
+r13721 b9af696f62
+r13722 18d3961cbe
+r13723 06b9a2a9c8
+r13724 4eb322b878
+r13726 2bd1b6a760
+r13727 f21693b632
+r13728 5747a2d98a
+r13730 fa1837c8f7
+r13731 bf23fbb746
+r13733 799f20c50c
+r13735 52d136c332
+r13737 e56b12033d
+r13738 456729b845
+r13739 7e1a139a35
+r13740 f614f2eb68
+r13741 3267a516f9
+r13742 0cbafac8af
+r13743 6c0a1ca198
+r13744
+r13745 255766e149
+r13746 cb5425e58c
+r13747 fdcae0b7eb
+r13749 07e22566c1
+r13750 d890aa1a5c
+r13751 ba2bb0f732
+r13752 94a67b3673
+r13753 e5237247c9
+r13754 966d503017
+r13755 fdd9bd04ed
+r13756 9c723bc385
+r13760 e8558ed48a
+r13762 3e1f51aad2
+r13763 cddc4c3cf5
+r13764 51e784b215
+r13765 a17c545086
+r13766 dfb7134aec
+r13767 b3f0e4bf9f
+r13768 0580641b2e
+r13769 a478609e1b
+r13770 687e21d160
+r13773 cb1daed658
+r13775 2bb757ae59
+r13777 9090138f76
+r13778 e45c740e23
+r13780 602a62d1fb
+r13783 1f6eb71e42
+r13784 a8c4885e88
+r13786 a5692b1f40
+r13787 fe9a3a692e
+r13789 2f56eefee4
+r13790 c3c207c8f1
+r13791 b355de5a08
+r13792 628029724b
+r13794 0dd548a5ea
+r13795 1aa0693bf9
+r13796 ed48d258dc
+r13797 cfa21f44a0
+r13798 fb51361c65
+r13799 23b18671a2
+r13800 185ed95432
+r13801 6f0e4a2001
+r13802 45f7a9228a
+r13803 67176e0d6e
+r13804 8941b3fa60
+r13805 3e249d85e4
+r13806 8d886ca8fb
+r13807 bb99cc66c9
+r13809 2fd65e0fd3
+r13813 322c980230
+r13816 c4cd5137d2
+r13817 ca0ffaa0ee
+r13818 2113259af4
+r13819 4c3fd9db2a
+r13820 fc09487480
+r13821 9d7b414f6c
+r13822 f913d79677
+r13826 0799efb893
+r13827 d855e45442
+r13828 ca64494053
+r13834 345d649bb2
+r13835 1bb3f81b2e
+r13836 1dda54121e
+r13837 817317824a
+r13838 eb71465d1d
+r13839 7bee443fb8
+r13841 62c8424646
+r13842 fb77e16411
+r13853 3ab4a7b749
+r13854 fcc91d2f8d
+r13855 856ffa7572
+r13856 222998874f
+r13858 1e00b1bd3b
+r13860 2e9f2110cc
+r13861 123760edeb
+r13862 3272a4bfb3
+r13863 1bb174dd34
+r13866 d45f68252a
+r13870 17688db317
+r13871 d8e9f6cd93
+r13873 5295a1a8ca
+r13876 83641e762f
+r13878 4ce201a6f4
+r13879 d02988125b
+r13881 11297162d1
+r13882 fa2d95497d
+r13884 1f945242de
+r13885 2a1e4cc575
+r13886 644350e3ca
+r13887 87609b4241
+r13888 2388b54ba3
+r13889 c295622baf
+r13890 b7ff333ead
+r13891 e9d163ad64
+r13892 b7470d8225
+r13893 864c5a385a
+r13894 a09af55ae3
+r13895 120253e251
+r13896 09f0838d07
+r13898 df55a8175a
+r13899 1021800b39
+r13900 411793e1ba
+r13901 02c5a32843
+r13902 51e901a8c3
+r13905 9b379d01bf
+r13906 11d8e2c487
+r13907 5ffe50c3df
+r13908 0c453c1a3a
+r13909 f3e42a50ab
+r13911 0eae959935
+r13912 984c3fb306
+r13913 b3d232dbbe
+r13914 481741edaa
+r13917 264b9c05a2
+r13930 87e7a42076
+r13932 baa83f11ee
+r13933 f45ea36183
+r13934 183c469b21
+r13936 a176556bea
+r13939 d3a71dbd88
+r13940 613ee6e299
+r13942 7f6e39f86e
+r13943 4ba8aa0dfa
+r13947 6b74adde4a
+r13948 8c53284280
+r13949 fb3b62bc0f
+r13950 614a08f31d
+r13951 01e533c0c8
+r13952 a3dcb88cad
+r13955 c9861cd198
+r13962 50af342498
+r13964 433db019ec
+r13965 9481a6f181
+r13966 bdf8585f76
+r13971 928dce3cfa
+r13973 a9ce750946
+r13975 0fb6357fa6
+r13978 8125e64385
+r13981 b08f3e3e9d
+r13982 37eb010197
+r13983 67729af8d5
+r13984 c8fcd5202e
+r13988
+r14001 2ba73ce97c
+r14009 ba31aaae83
+r14010 b206d8933c
+r14012 92f5905cc6
+r14014 1aad4cb651
+r14016 fa3861528d
+r14017 e1ffc05b10
+r14019 3cb61dc106
+r14020 25fd82c6dd
+r14022 787b0264db
+r14024 e086a4440b
+r14027 9289284717
+r14029 42bd578320
+r14030 575f3023b5
+r14031 1b1b7d6515
+r14033 2e91b45194
+r14036 8b0df2f59e
+r14037 c0fd2c90d0
+r14040 5879e8e98b
+r14042 bc940a8471
+r14043 288e240875
+r14051 a3d11d3121
+r14052 1769b68a6d
+r14054 41dc722508
+r14055 b3c3d1a638
+r14056 2e1386f375
+r14057 aafaaef961
+r14059 9c79f8e32d
+r14061 0eb7d42b3a
+r14065 7231cf881a
+r14066 c1f27b70c6
+r14067 8c5352dc3a
+r14071 3a6ce7d18a
+r14073 151cbc5c27
+r14074 dbd98be39e
+r14076 70ea2a549c
+r14079 524405a9b6
+r14080 bb1dd8165a
+r14081 add615a76f
+r14082 e715cdd0c4
+r14083 2e68a9c580
+r14084 3b07bbaa4b
+r14085 d46b616171
+r14086 05096f361b
+r14087 9495c8bcd1
+r14089 573e90041e
+r14090 fb6fcaa6d6
+r14092 8bd9521d8a
+r14093 af87ca7166
+r14094 7e34adcfa1
+r14096 1565699e2d
+r14097 2f0b80463d
+r14102 fb914227c5
+r14103 07c5d167ad
+r14104 ecca8e2e67
+r14105 0c48c80ce9
+r14106 fc8593d4eb
+r14107 2dcea3f029
+r14108 ae85da1eac
+r14110 bb37eb067b
+r14111 9342a6e7c4
+r14113 21221df100
+r14114 a8f9f01d5e
+r14115 c11c657d05
+r14116 cad235ff62
+r14117 dbf12a761a
+r14118 12a0200eae
+r14119 0053d374d6
+r14121 e690f4cc38
+r14122 b4916be877
+r14125 befbc9d2c1
+r14127 2d39db44e2
+r14128 e5029f8266
+r14130 1bfbf4d63c
+r14131 0c6ab69119
+r14133 bcbeab7456
+r14134 e869cd3410
+r14135 cf5f84719b
+r14136 f41ab7c062
+r14137 54df3c451c
+r14140 fe0b578001
+r14141 cf6f492cc7
+r14142 a447f3b97d
+r14143 e2d790348a
+r14144 6b1bf0c0c9
+r14145 e7b7a10fe3
+r14146 6ff45c6dca
+r14147 79740bedb4
+r14149 8c86276228
+r14152 18e3e2ad5b
+r14153 ba065b5e68
+r14154 63f65cfaf2
+r14155 f97742d562
+r14156 1ed83a9ccd
+r14157 3b35b45b6f
+r14158 d650015537
+r14160 8bc588cbbe
+r14161 2110b51b9c
+r14174 ff089cef43
+r14180 d01b65f281
+r14181 8332f80b24
+r14182 03005a71d1
+r14183 95c1e8dacd
+r14184 13731d7f32
+r14185 6213bbc492
+r14189 fc13dfb1f7
+r14190 d97eea1aa1
+r14191 8224431116
+r14192 c399ad282f
+r14204 db7be2b068
+r14206 28b6ccdc83
+r14218 4d9208cfb0
+r14223 d0cfad449e
+r14224 c596fbe6f5
+r14225 50d638aa63
+r14226 941b8cc560
+r14227 faf3c9732d
+r14228 e902d4a048
+r14231 ac08734864
+r14233 dd1a28887d
+r14234 5b6bfd19de
+r14235 6473f2d851
+r14236 ca2cc573ac
+r14237 7fa4bf6084
+r14240 9797470c1c
+r14241 0b67c4eaa0
+r14242 746658d274
+r14243 94339301cf
+r14244 3d62db6fdd
+r14245 6bdf9ef7f1
+r14246 bd4a42d791
+r14247 f1a96095b1
+r14248 adcc39fca8
+r14249 c91f5ac73f
+r14250 f99dfe54c4
+r14251 822c99821c
+r14252 ad49bc9daf
+r14253 b04e01b374
+r14256 0e7908665b
+r14257 7eadd5453d
+r14259 9aacd6ddc4
+r14260 5ab72025a7
+r14262 5a019e4c52
+r14263 a62078efe9
+r14265 84e704d8b9
+r14266 921bc499d0
+r14267 c80f666566
+r14268 23d9e5717e
+r14269 3b8407a9ae
+r14270 1fcc24dd92
+r14271 b41231402d
+r14272 445cb840b9
+r14285 ebd7d295f4
+r14289 f19c2f31b8
+r14291 e97e8daa09
+r14295 bee89ecede
+r14315 f507f0ac4c
+r14316
+r14317 456e209662
+r14318 e5fb1da91a
+r14319 e1e48d78a9
+r14320 2d763549c0
+r14321 e0047ee119
+r14322 2d819d201e
+r14323 36894f5639
+r14324 fb31f764a2
+r14325 060068379f
+r14326 c19b67566e
+r14327 87dd9cb139
+r14328 190093c4ea
+r14329 e8e46b9fe0
+r14341 6131229601
+r14342 d817beea39
+r14343 9ee330b57d
+r14344 55fca86261
+r14346 8295360048
+r14347 83d3f475da
+r14348 2c0a9d6348
+r14349 a311262c67
+r14350 6137ba4276
+r14351 efb71c1e44
+r14354 83ab996aac
+r14357 993a987bd3
+r14358 5e0d16ad0c
+r14360 4b798d9b34
+r14363 e717d05c2e
+r14364 a29fd9c861
+r14365 3e72397413
+r14366 deab63a2db
+r14367 1e01637f89
+r14368 bb1cc87c92
+r14369 da97d90a01
+r14371 328363d628
+r14372 a0eb2af811
+r14374 44c4ab87bd
+r14375 10b30e9d22
+r14378 d0a90c7c4a
+r14379 e0f1c57dcc
+r14380 02c904f51d
+r14381 87c7cde2aa
+r14382 e9aec18ddf
+r14384 f8a14831d8
+r14385 9543096582
+r14389 a480c3afdb
+r14391 06b17eb97f
+r14394 c32ee91e83
+r14396 647c6d8d3c
+r14398 86ddfebfbd
+r14399 1211909cc9
+r14400 c0ce58e5e7
+r14401 c6d7eeb63f
+r14405 acfef3a473
+r14406 babcbb325c
+r14407 df542644b4
+r14408 e76edeb541
+r14409 0b15f0e5fe
+r14410 d5a928e927
+r14411 2f6f349a16
+r14412 cf299d7bbd
+r14415 ea617bd0bb
+r14416 2c36f5cad2
+r14418 8177b1fbfd
+r14419 37a34b327f
+r14420 c58bc06b10
+r14426 4f8a818c72
+r14427 6854959bc2
+r14428 75b4429e15
+r14430 53d25a4ed0
+r14432 1eaa352ec8
+r14433 c0705fc670
+r14435 827c7e32c3
+r14437 8696e223ac
+r14440 3a76532277
+r14443 5d91c77938
+r14444 c5e0179c22
+r14446 266c5326a3
+r14451 ef488e9e39
+r14461 54f611edb3
+r14465 75ea6c9f2a
+r14466 55eb30f54c
+r14467 b59e5237c1
+r14469 cb817b3253
+r14470 e700865476
+r14471 32c6de2b24
+r14472 c181450310
+r14473 6a93c709ad
+r14477 22d46fbded
+r14478 66515781fa
+r14480 2facac90e8
+r14481 a2ee2a5913
+r14482 694b5caf29
+r14483 fd417cfa96
+r14485 1258a12712
+r14486 70ac4996ae
+r14487 fcb2ea2ebd
+r14488 4ec9c8abe1
+r14489 279da87f48
+r14491 d055ff17c3
+r14492 783b6a672d
+r14493 2677581b24
+r14494 b5dae30241
+r14495 8d1aa644f8
+r14496 0cda1dec3f
+r14497 cb9f5470c9
+r14498 b250e5e012
+r14499 0e580e1207
+r14500 9c8e5d206d
+r14502 768d107385
+r14503 f9a68fb01d
+r14504 0b9cefa7e9
+r14505 671bae7caf
+r14506 fa942d0a19
+r14507 bd75cef9c1
+r14508 e5b446654f
+r14509 02975ed50d
+r14513 30610072ac
+r14514 f12696d5d7
+r14515 f64174df68
+r14516 4fa39e5246
+r14517 a0ce35f939
+r14518 c04fa9cd22
+r14519 fbd2b0caac
+r14520 e5fedb8059
+r14521 d235c4d7c1
+r14522 b6f12c0800
+r14523 a197e61bc8
+r14524 5b81033d33
+r14525 db6b85db24
+r14530 09d3a7bb5b
+r14531 48fdb8620a
+r14532 c05a0b7a49
+r14533 ed2dc480b1
+r14534 e657891d8e
+r14535 64dc793f3e
+r14536 5ac5c4c3ce
+r14537 427a2eaad6
+r14538 4951596902
+r14539 8f693de881
+r14540 70c841ac46
+r14541 97f01e6f8e
+r14542 67af71b370
+r14543 34fe33a612
+r14544 9d37cdde42
+r14547 f6c4b03cb2
+r14548 f9e8fbe0af
+r14549 3884f6e1ce
+r14550 b267019640
+r14551 7975b2be09
+r14552 46669b8f04
+r14553 e8f9c64093
+r14554 93ab0ec361
+r14555 89274fde0f
+r14556 9a15040953
+r14557 cb9c4a1c3a
+r14558 a5958d5bb5
+r14559 82b18210e3
+r14560 233e7106b1
+r14561 17d05259cd
+r14564 d85738f9e3
+r14566 ed01acd971
+r14569 07b35f671e
+r14571
+r14574 9346877092
+r14576 84d2a61972
+r14593 2d27f601d1
+r14596 4688cf9ac2
+r14621 7d36c43034
+r14622 3fd2c50ffd
+r14623 f8d488f098
+r14624 d94507e039
+r14625 5df2f76bb8
+r14627 0b89f667d2
+r14630 2fa3294cd9
+r14632 551db35802
+r14633 3f2bba7a05
+r14635 890a7c5095
+r14637 6fe5b44d31
+r14638 88a96b4ff3
+r14639 f4ab1e5dfa
+r14642 f5321be1aa
+r14643 4d215df276
+r14646 df1c1931cf
+r14650 c0090ac04b
+r14651 b0a07f7860
+r14652 887d021102
+r14653 1ce782ce2f
+r14658 9b98538679
+r14660 c89a410866
+r14666 68caee2e41
+r14668 374b34db53
+r14669 92ec9e276b
+r14671 51721cc3a4
+r14674 1d2570c5d7
+r14675 e10538e201
+r14676 55bc1d9fe6
+r14678 17e7531c14
+r14679 d96ec43600
+r14682 ad36c66258
+r14684 e9c8a59b63
+r14685 52f711e282
+r14686 d6046cea4b
+r14687 414ab99028
+r14688 2df4b46fb7
+r14689 d927ac6be7
+r14690 7086e9a963
+r14691 07567a3ff9
+r14697 8ebd73e6d7
+r14701 7d3d498225
+r14702 258c55afa7
+r14704 1dbb53f9b6
+r14706 6b6afed012
+r14709 bc99ad9be7
+r14711 324bc18be0
+r14714 5fbb8b6f9a
+r14716 c82c0adf09
+r14722 baad2fbd4e
+r14727 e744a80417
+r14728 d17ec3325a
+r14729 d814e5047d
+r14731 9c55c50d4b
+r14733 f18b805841
+r14734 1e277487f5
+r14740 1e5d8760f6
+r14741 dbf80520e3
+r14754 8394637280
+r14756 00b3b4c307
+r14757 6af6ce1130
+r14758 c3b7c00d1e
+r14759 7bae49fccc
+r14760 3dffcc27a4
+r14761 153a393c5b
+r14762 01e872f8c8
+r14763 39f2357f9c
+r14765 eb6911e3aa
+r14768 b19f300a28
+r14770 fbe6aa9fc7
+r14772 9a78b52ba3
+r14773 d8342f44a7
+r14794 0724552655
+r14796 5ce0d309ab
+r14797 bb90aa425d
+r14799 ffe8d3717b
+r14800 90a862787e
+r14801 142bf7242e
+r14802 5b0e21738a
+r14803 8faf310341
+r14804 dcfbdbfd10
+r14806 c0b21797bd
+r14811 04387ad63b
+r14812 05b846b94d
+r14815 1e54e8baf5
+r14818 633ceeda07
+r14820 1ecbd65b8c
+r14824 8e359872e4
+r14826 3009b2c80f
+r14830 1a9186d389
+r14832 1ecd751ef7
+r14835 006394362f
+r14837 9af5aa94d3
+r14838 7bb24097c9
+r14839 f8085a2e65
+r14840 1a0b3a2afe
+r14841 e0015e4ede
+r14842 e26f530f57
+r14845 badd123221
+r14846 fdbf828bb3
+r14847 3e1e2078f7
+r14864 e5a1fb508d
+r14866 289869e273
+r14867 570bb834c3
+r14869 374bd7f7b0
+r14870 f862598220
+r14872 78ab4f9e7a
+r14873 5241150491
+r14876 8e2b888a71
+r14877
+r14878
+r14880 0f6f62e503
+r14881 1d4fbeece9
+r14882 8c35b8f863
+r14884 76c76b28f9
+r14886 6b515dd6af
+r14888 f759e27007
+r14891 4563bc53c6
+r14902 72615dc18e
+r14912 06efde1f28
+r14915 37b0a629b6
+r14916 a1c8394f06
+r14917 2d2821504b
+r14918 3e47505f7f
+r14919 733eeaa6ce
+r14925 d3aec2477d
+r14928
+r14934 712077fcbf
+r14939 688cb18a1c
+r14941 8a78b2af60
+r14943 0385e9835d
+r14945 a959e93dbe
+r14946 9a09b884ee
+r14947 b57e67b8a1
+r14949 525aef50a2
+r14950 b7589adec0
+r14952 0234c9d0f4
+r14953 a9a6eeac9c
+r14954 1c95be35ee
+r14956 0f09ba97e7
+r14959 76068fd352
+r14960 774b845a3a
+r14961 bc13181ea1
+r14962 914e09a4a3
+r14963 d864fda9a0
+r14964 53bce94d30
+r14965 faeeb4f264
+r14966 cec6829c1a
+r14972 075630213f
+r14973 4d07c3dac6
+r14975 f02cc551dc
+r14976 fa147c6ad9
+r14979 bfe8a1281e
+r14980 2cd76912cf
+r14982 1be78ed232
+r14985 1be24726a0
+r14990 a73188c76f
+r14997 26641ee26a
+r14998 ea732f0c01
+r14999 938d16abcf
+r15000 7a1fba63c2
+r15001 1f8b79f1b3
+r15002 ad903380ca
+r15004 7c319c48ea
+r15029 76b511c18b
+r15030 0702dce858
+r15031 9b29afd550
+r15042 be2557d32c
+r15043 ffa638e564
+r15045 154a80dda6
+r15053 3e58057fd1
+r15057 ddf531d934
+r15061 69bf02e9eb
+r15067 1b0ebaf617
+r15070 639ce2f29d
+r15071 7b33fcff43
+r15073 9b3c97d984
+r15074 f185beecca
+r15075 f2d0746c8a
+r15080 7340a8f64b
+r15081 da769bad03
+r15085 617eafd6e8
+r15086 f1954d9a35
+r15088 cef268814a
+r15089 99b5d1c647
+r15091 4983ebac4a
+r15092 3f4fe40cc5
+r15097 7a981f4262
+r15098 7466f2ee02
+r15099 880eb7c04b
+r15100 7e9f81fd53
+r15101 3d7e820e9b
+r15102 5f450da638
+r15103 44fd5e7272
+r15104 4686535142
+r15105 f95cde8984
+r15106 7d71e4cf09
+r15112 c1f07338ed
+r15114 3a0b0d40d7
+r15115 17ce6cb275
+r15116 a81ac63831
+r15117 2c7e43ae7a
+r15120 00e18ddfec
+r15132 e72ace00e6
+r15133 f9340a7c06
+r15134 eea19e9670
+r15135 0425a6b3f7
+r15136 7eea3c922d
+r15137 0d31ac9ab9
+r15139 47f35b5879
+r15140 b7efa99768
+r15141 96b8079173
+r15142 e327bbb7bf
+r15162 0bc0b0bbc6
+r15164 ef6460b6e4
+r15165 e2fd411f0a
+r15166 6ec528fcec
+r15167 04185de550
+r15168 2063b4c4fe
+r15169 3eae3a2679
+r15176 100b87f666
+r15178 b1cf78869f
+r15179
+r15180 4cba60178d
+r15181
+r15182 c033e72385
+r15183 dc2ea7ccd5
+r15185 9e7a08fba2
+r15186 ad451f4a55
+r15188 f6056a24c5
+r15190 15c03d4811
+r15191 d7efafa48f
+r15192 6209dbe66e
+r15193 ef715d5f10
+r15194 762476777a
+r15196 115538595e
+r15199 c8882cea3c
+r15200 1b1425c63b
+r15204 bb04dae00b
+r15213 a480d4381e
+r15214 859f7497e1
+r15215 6f638318d6
+r15216 0d82294aa6
+r15218 c03b61cb94
+r15219 da328a26bb
+r15224 b7e13c2338
+r15227 3fefc43327
+r15228 859d2bbba8
+r15229 8de595a5d4
+r15230 97e20b1ff0
+r15235 b6281cd5a7
+r15238 562647a37a
+r15239 5d1a536a04
+r15242 35bb651843
+r15243 e8eb3647f6
+r15244 7569442847
+r15245 ba33786e9b
+r15256 a7f12d2e14
+r15259 fb88e0421c
+r15266 a5ef3d597d
+r15267 8c06a1a545
+r15279 e4e5d2a93d
+r15284 5efe5b8017
+r15285 c5de85e432
+r15286 ba0e0cdbf8
+r15289 3e7f5eaa1f
+r15295 d6b6402e4c
+r15297 abe9ec9859
+r15298 df9ba15338
+r15302 acfc0bf01c
+r15304 02271ecb5e
+r15305 9ba40ca890
+r15307 18da40ae4b
+r15308 f918ad1886
+r15309 113c795595
+r15311 a4baf28d20
+r15313 dbfdf0ec6d
+r15315 943f6dda3b
+r15318 0dabdc7b17
+r15320 2070c4b1ed
+r15322 6178673ae8
+r15323 68f0566419
+r15324 6036fb15c6
+r15325 0cd5bb6de0
+r15327 fb80a94f67
+r15330 0c146c48b8
+r15331 fa99ddba14
+r15332 86d6fb22d0
+r15335 740c36ace1
+r15341 9b17332f11
+r15342 2d6cc7c416
+r15343
+r15345 98596ff0aa
+r15347 d89ea1c9a5
+r15349
+r15355 e6a3566bb7
+r15363 ae7d7d20bd
+r15371 aa2a5f89d0
+r15372 70ead2ee53
+r15374 a735240edd
+r15376 388342464e
+r15377 f8d38356f5
+r15384 d576a53cd2
+r15388 d34d51d220
+r15390 9077de63b9
+r15392 707e55c227
+r15395 72da305329
+r15399 e2b7b044c5
+r15401 85db410e24
+r15404 6ea801d868
+r15405 3a824805c4
+r15406 7f78d46347
+r15407 84f24cad14
+r15411 2ed788315c
+r15412 1324218fd5
+r15413 71d6e44fde
+r15416 57209b7bf0
+r15422 e18907e87f
+r15424 e77f128169
+r15425 a4d47adf0e
+r15426 d8b12acb93
+r15427 b0c36c7a7c
+r15428 24a4298b72
+r15431 a42ff88491
+r15437 57e2d8157c
+r15438 7770830756
+r15440 2e217be7e0
+r15441 a8edcacc4f
+r15446 5ca94175b3
+r15447 fffb8c1031
+r15448 73006bc163
+r15451 de69837219
+r15452 5110fdf070
+r15455 a8552fcc43
+r15457 acfecf5902
+r15458 a911ebb98b
+r15459 f5e1103a0d
+r15463 bb41ff09e1
+r15466 d4115d4898
+r15467 3b18a3f004
+r15473 3f256f905f
+r15478 1f9606f747
+r15486 957bd55c65
+r15490 d9f65f3eb3
+r15497 82fa132d6b
+r15500 bc5ef919c0
+r15502 4f27b3769c
+r15503 546aa324e0
+r15504 3db2a5539b
+r15505 0c98435e63
+r15507 1133f0f05f
+r15508 323fe887df
+r15509 6d07d7a3a9
+r15510 29a41bcff5
+r15511 7b90be1358
+r15512 01a20f46ef
+r15514 f3bfae5a98
+r15517 e85297fc2b
+r15518 64bbcbd82c
+r15519 913cd10193
+r15522 f12e0645ff
+r15523 d374411895
+r15526 79727b4ea3
+r15527 9dc05dc520
+r15532 39f5c5cb28
+r15533 21781be0c9
+r15537 76fd52f306
+r15538 574e9dd010
+r15539 78b4ab415c
+r15543 66a97fea14
+r15544 7ec37d609c
+r15546 45a3c3aeef
+r15549 239bd13d4b
+r15550 06f6a127b7
+r15553 aa3d38d9a0
+r15555 0a49cecf82
+r15558 84806c6a63
+r15566 343b1de18a
+r15568 368dcb0566
+r15569 0a62491a99
+r15570 abcd0ec5e7
+r15573 aeb29ddfbb
+r15579 b894f804ad
+r15580 1157b4042d
+r15581 872c9ba67d
+r15582 92da76f957
+r15583 45cf39b3ee
+r15585 5086f86937
+r15588 0f53a99225
+r15589 eea36e4a51
+r15592 cca42c1c3b
+r15593 fe07aac5bb
+r15594 80f341ff12
+r15596 34572d6e7a
+r15601 2e42f93bac
+r15602 3f9549bd6f
+r15603 c69e0a9b82
+r15604 9117995a53
+r15605 ca6811cfa5
+r15606 19d6af3745
+r15609 eb79ac2f9d
+r15610 d1fb907895
+r15611 c8b3af98b9
+r15612 d492b489b1
+r15613 f89b267340
+r15615 d3b56e4b39
+r15616 8bacd7cf46
+r15617 90200957ca
+r15618 f697441605
+r15619 c925964406
+r15620 bb2c7676f5
+r15621 71fd0c5ed0
+r15622 2513754bd5
+r15624 8b954c346e
+r15625 9638b5c79a
+r15626 f4efeb88f2
+r15627 0c33725df7
+r15628 3c782c8429
+r15629 753e15520a
+r15630 8af4a26ead
+r15631 3635ee89ea
+r15634 f667fb7193
+r15635 d0063db3ea
+r15636 66d53477ca
+r15638 3fbd4f0d78
+r15639 3c2c20740a
+r15640 bf86775038
+r15642 44f801b71b
+r15643 f816f0a6f8
+r15645 078d9446bb
+r15646 2eb46f56d2
+r15649 9cfe5e961e
+r15656 076db04123
+r15657 b4ad97ce2a
+r15658 520647cf0e
+r15659
+r15660 24426432a0
+r15661 2389f12ce6
+r15662 8954759d50
+r15663 9dbfdc9ae1
+r15664 4fbdc7ce71
+r15665 f39f93c473
+r15666 60963bf600
+r15676 bbe9c35375
+r15677 7d2f33a7d2
+r15678 a254fe545f
+r15680 6938beb1d4
+r15681 82543fe499
+r15682 8b6a34df2d
+r15683 8b06724757
+r15684 70f7bb2dc1
+r15685 42f60f37e1
+r15686 10582aff64
+r15687 699e811f1a
+r15689 8b7c4138c6
+r15690 89cdad5e4f
+r15691 9285759660
+r15693 4d721eabbd
+r15694 2e5ad27670
+r15695 6e159702e1
+r15696 6d5656f637
+r15697 74f476f303
+r15698 d850636479
+r15700 f65e13b82d
+r15701 e09055636d
+r15702 3d82fd2ff5
+r15703 2daab02552
+r15704 e50c7947b5
+r15705 e2be618472
+r15706 c0eb91b2d7
+r15707 13cb455fb5
+r15709 2bb161b407
+r15710 9c72f1a023
+r15712 8a8230837a
+r15713 f07ac82ac2
+r15714 51f09101bb
+r15716 64d0862222
+r15717 1c801f47af
+r15723 47fb4c71ef
+r15724 059f4e7611
+r15725 6de93c661f
+r15726 aa1f5a76e4
+r15727 2d445ad1af
+r15728 b7e61584f6
+r15729 a15a44cdd1
+r15730 66f063a37e
+r15737 c84ba7f665
+r15738 021fa2b31d
+r15743 d5c8ea4d00
+r15744 988804257f
+r15745 3cf1330cc9
+r15746 55c4cb59db
+r15748 4f81ca5702
+r15749 13fddf993c
+r15751 d789698f45
+r15755 fe4591ba0c
+r15756 d27e89c0bc
+r15757 5ce0e339c4
+r15760 28e36e9a74
+r15762 0d31778efe
+r15763 885e7dbad5
+r15765 afa84b3b9c
+r15766 6283944356
+r15767 a4ace3820b
+r15768 b9578ddc25
+r15774 5b39e1e56a
+r15786 63a716747e
+r15788 53bb3acd71
+r15789 ecff1202b1
+r15790 0737e96229
+r15792 53bcf783da
+r15793 cac07e08d8
+r15796 d820345540
+r15798 20a3e4ee45
+r15799 7261acdba4
+r15800 ebd8be8c72
+r15807 2de0e86f9b
+r15808 2ea6916297
+r15810 ef642a8a51
+r15812 5cc825c48d
+r15813 ce47426183
+r15815 c19ea510a3
+r15818 8d07df2b37
+r15819 796bed0987
+r15820 98ba45e4f6
+r15821 aa43994c96
+r15822 40de8cc60f
+r15824 4644b54328
+r15825 f8e30d654c
+r15826 616d3e4597
+r15827 6bddfbb6d3
+r15828 207afbb388
+r15829 e1bca64e99
+r15830 72cd46805c
+r15831 2f69f47e7b
+r15832 0a0eeacedf
+r15834 775c6ca39b
+r15835 642b0ca4fb
+r15836 d63963d580
+r15837 e85bedf5af
+r15838 5603633e39
+r15839 54065c579e
+r15841 56eb012d9f
+r15845 9577fff49c
+r15870 b54da55aa6
+r15872 8678b42928
+r15884 ad5afb0487
+r15886 73e60c55ba
+r15887 6988638b93
+r15889 157ce5639b
+r15890 48a0b62ad1
+r15893 9319bfeba6
+r15895 7e23740dcc
+r15896 9a04bac69b
+r15901 138499401d
+r15903 9a984e4e5a
+r15927 cd6ed4e12b
+r15929 73021214bc
+r15931 b1e5ba0eef
+r15935 6a7a1eeff9
+r15937 a3e8c0637f
+r15939 f09222f565
+r15940 40d7db8596
+r15947 65062d459f
+r15948 75dd516be1
+r15949 dc8989918d
+r15950 532013fd52
+r15954 d0299fb471
+r15955 f0ab2e175e
+r15956 44bd48af53
+r15958 b85a3d25fc
+r15964 af2e9f53fe
+r15965 7fec2a0876
+r15972 fc1e62681c
+r15973 ea2fa34a56
+r15974 b3ba623412
+r15975 3ee45986dc
+r15976 6b3f18dbdd
+r15979 ce88a14515
+r15980 f58162a784
+r15983 cd085f6143
+r15985 906248a4b2
+r15987 4b6277f851
+r15991 e1cb4b5d15
+r15992 cfe1ba4c34
+r15993 f765ef4c20
+r15994 3c6d775e92
+r15997 c49538d204
+r15999 fb882601b7
+r16001 386fe95009
+r16003 6fd613c192
+r16007 1513988c3b
+r16009 9ea23262bb
+r16010 1ed25d9dd0
+r16012 106ebd0ba3
+r16014 8a71b645f2
+r16016 329de99b63
+r16017 350f4abecd
+r16020 1ffe917f94
+r16021 148f56f1c6
+r16022 743edeefd4
+r16024 3e0cd7e748
+r16025 97db00dada
+r16026 12bceb22fd
+r16028 f7eccb851a
+r16030 45e264bfa6
+r16033 5d1339b121
+r16034 d0eb6ae1a2
+r16035 fa8d0d8d85
+r16036 5d0ff3c25e
+r16039 8eef9983c1
+r16040 efb19538b2
+r16043 03c12787c6
+r16044 16acc7aa51
+r16047 4334d8c991
+r16048 7369338a6e
+r16051 0de2fb2846
+r16055 62f0adf98b
+r16056 faeca93e87
+r16057 ab1c93a7bd
+r16059 2bd07f7264
+r16061 457e00ba9f
+r16079 74f3359eef
+r16080 118a288bee
+r16081 6be73f6e95
+r16083 0e76651704
+r16084 a9a27eaea6
+r16087 350ba559f1
+r16089 b9232781f4
+r16090 6402af9e5d
+r16096 f36d200b28
+r16098 c3d3f0d3dd
+r16103 ed1c45477f
+r16104 aef23d2821
+r16113 c409423aef
+r16114 7d5d4995bd
+r16116 6bdefe4aec
+r16117 fbfb44c7f4
+r16118 91efd55dcd
+r16120 e92d29fecc
+r16121 e4d18ccfbb
+r16122 c8b96646e5
+r16151 281e265384
+r16157 a18a545a84
+r16161 5521ec1e2e
+r16163 4678821611
+r16167 e20362771c
+r16168 184383a519
+r16171 ee9e91a107
+r16172 12935da7da
+r16178 b9c208a380
+r16180 692afd6ddd
+r16183 51f6183304
+r16185 b320b6cf52
+r16187 b9343703f5
+r16189 c46666b9f4
+r16190 dbe66d0672
+r16217 29a8a8f779
+r16218 bd46c931f0
+r16224 8f1a65cb97
+r16226 2e770c31b6
+r16227 7fc6432ea6
+r16229 3eacec9453
+r16244 546eb6e8a7
+r16245 f7c0dd850c
+r16246 8059712c40
+r16248 b98da683a9
+r16250 ea2ceda18b
+r16251 19f4c0652b
+r16252 143ecef34b
+r16253 4163ac5548
+r16254 364360e180
+r16255 1615902e57
+r16263 86b39a89cd
+r16265 7e3aecae9e
+r16266 8b63b6aacb
+r16267 ddda42af0a
+r16269 d180b26e6a
+r16270 acd4c9471d
+r16272 8a3bbb52a7
+r16273 6ec1e72460
+r16274 a44eeedd3c
+r16275 6372a8b619
+r16278 1a3a362db7
+r16279 cc441db380
+r16282 bba64758bb
+r16286 973ac73362
+r16289 b2e8634221
+r16292 08a8c00be6
+r16293 baf7e773f3
+r16296 9b7039e946
+r16297 e5868320d4
+r16298 95dd7d914a
+r16299 33b03fdc1f
+r16300 54a4542917
+r16304 b3057cb638
+r16306 4c9ef158c6
+r16307 baa6f58f76
+r16308 f353a1d4fe
+r16309 8484a8b26c
+r16312 f9924c9efd
+r16313 c06b1d3f61
+r16314 f88f17f6ee
+r16315 980a99cfa4
+r16321 e64aa79347
+r16322 597f971fcd
+r16328 0469d412cd
+r16329 cb2364e9c8
+r16332 17d9b4a800
+r16335 1f029a28d6
+r16336 79a47b92e0
+r16337 98abb80c3c
+r16338 b846a6a741
+r16339 96c581e441
+r16340 758092d16b
+r16341 f902a988a0
+r16342 d357ce72f5
+r16343 bd61de26a3
+r16344 ced4ddfef6
+r16345 833c65eb09
+r16347 88f7f3fa69
+r16348 6f503f39b0
+r16349 a12fde6a5a
+r16350 22ef50488a
+r16353 2f3d17b186
+r16355 068cd37e08
+r16356 167a627457
+r16357 8840b3a207
+r16358 c336690252
+r16359 fdab95c6ae
+r16360 2d6d18662d
+r16361 0964a593ec
+r16364 ea3a4fe4c8
+r16376 cc97afe49f
+r16377 d1bf566ad6
+r16378 b95390021d
+r16379 9dde9718b9
+r16380 6fce7f1410
+r16381 c0674859e2
+r16383 d0b40ba526
+r16384 35daeb8603
+r16385 829e4ea485
+r16386 852d3d0a66
+r16387 09d8adf207
+r16389 cc84bb54bb
+r16390 7d42d4b2a9
+r16391 d5763d58d9
+r16392 1db99a4309
+r16393 9cbedbdaca
+r16394 f0d060eee5
+r16403 c59f026df1
+r16404 7e8f7199a1
+r16405 8e4e97ad78
+r16406 325e2ba1b1
+r16407 0bc8157005
+r16408 4e308c8f62
+r16410 b219392bfd
+r16414 3d8880746e
+r16416 391fea8da0
+r16417 3128d1e0e5
+r16418 e6a1539441
+r16419 32cebff4ba
+r16420 8c770d3a7a
+r16422 2156f3e306
+r16423 418e7e5f9e
+r16424 583a2fda9f
+r16425 9da19f07f1
+r16438 6ae2c86f2f
+r16439 6eba78c751
+r16442 219412ebb7
+r16443 eae38f8340
+r16444 683e15f02b
+r16447 99529c51c0
+r16448 bcbf5a1267
+r16449 2bed53ea79
+r16452 81985e49cf
+r16454 ffe546326a
+r16456 8b014ee7d3
+r16460 c7780ded0b
+r16461 448110ac11
+r16462 fa88dfe5cd
+r16463 7efd2d6eb0
+r16469 cadd7aca7d
+r16471 3a49d0ae1d
+r16472 6599832787
+r16473 c50dd4e212
+r16483 57e8dfd55a
+r16486 90394b899f
+r16487 7999744683
+r16488 e6f0eb6e1b
+r16489 4f84b00b86
+r16490 26877991ed
+r16520 cdbd7d9a01
+r16521 23fdf0b4e2
+r16533 fff82dd828
+r16534 5d6c2cb4c6
+r16540 8a69a88c9a
+r16541 535d514b23
+r16543 7848f0ffaf
+r16548 a38b62f23a
+r16551 4f7749dd30
+r16552 08b9fdc210
+r16553 f20f480fca
+r16554 6866d592b9
+r16558 a7db64605e
+r16562 2834d1838c
+r16564 bc452c0ef2
+r16569
+r16570 7f72290295
+r16575 65ba7e2bec
+r16576 f618e45807
+r16577 01a338c1ac
+r16578 b32a065e53
+r16579 6243483556
+r16580 1f84f1d776
+r16581 a2db9e3f7f
+r16582 e7f006fe9a
+r16587 283bc03d95
+r16590 3c327c5d4d
+r16591 c63b3a7e7a
+r16595 be91cd08be
+r16598 21749978ee
+r16606 c92b30307c
+r16609 db642a40da
+r16621 8aee69cc9d
+r16622 6700e99884
+r16625 2d61f09332
+r16629 af47e5b433
+r16633 0b574c7842
+r16635 909efc305d
+r16642 23d69bfab5
+r16653 ed4693400b
+r16654 b31dcbdcf5
+r16661 f3bf480dc3
+r16664 f7638a5cbb
+r16683 91b2f60d31
+r16689
+r16690
+r16692 c3c87411ce
+r16694
+r16695
+r16696
+r16700 c8107b0d95
+r16728 0dde1442dc
+r16731 aae227ba01
+r16733 4d32e17513
+r16738 f83d897754
+r16740 1566ee0c36
+r16745 61b353255e
+r16747 806edf6f84
+r16748 c8c1ecc5ea
+r16749 eba7932b13
+r16751 491ebd0c2c
+r16754 af6be2087f
+r16755 c962a00e03
+r16760 8836f6c0f0
+r16761 14bb605d95
+r16765 c70776c006
+r16767 ee740145d8
+r16775 c379973e4c
+r16776 f6b2ab9b5b
+r16783 af7c128293
+r16794 fef6bc4f30
+r16795 eedce544f0
+r16812 50884412ab
+r16815 a405c1e0f2
+r16831 1805207276
+r16832 b1c9db8bfc
+r16833 ba0935e8ac
+r16842 70347b7896
+r16844 abeb6e6435
+r16852 b0de8aa196
+r16855 166563559b
+r16859 0313e1c018
+r16875 86397c940a
+r16884 18aff4c4b5
+r16887 d215c74375
+r16888 cc5695df41
+r16889 91d92ec83b
+r16890 ee79ccdc9b
+r16893 fd47d1ef24
+r16896 6fa0f854c7
+r16897 e53cf49b7f
+r16902 feec9de760
+r16903 55795630fd
+r16913 323e895672
+r16918 774176c7a6
+r16920 5e9bf6564f
+r16922 e877601ffb
+r16923 bc7db60a25
+r16928 8047e3e109
+r16930 a492467f1f
+r16939 c60a882fee
+r16940 de4d32b2e4
+r16943 e3d105a0cb
+r16945 51615fcd58
+r16948 737dd284b6
+r16952 72cffa149f
+r16955 77852ce568
+r16962 ca805b9f21
+r16964 45aed61ae5
+r16968 d7839e8a6d
+r16969 59d2220360
+r16970 1f83b1f07c
+r16971 9ad89d940f
+r16976 d265e01353
+r16993 1898ae1307
+r16994 0606aa4755
+r16995 a0c64cf5a8
+r16996 e52898338e
+r16997 f13e298f14
+r16998 91f5c1e98c
+r16999 7b1258829d
+r17000 9bf8be6db8
+r17001 45a49c276c
+r17002 8c52d2ef0a
+r17004 c9365b9f14
+r17005 b5e97c54fd
+r17007 35607daf40
+r17008 dcb611298e
+r17010 6838910311
+r17012 011d39a3b3
+r17017 3f70dea914
+r17021 b2e6ac7747
+r17036 ec3ee84bd2
+r17039 f9d6f834b6
+r17040 b85f33beb7
+r17041 f86527ce55
+r17042 a81199163d
+r17047 48355ee28a
+r17048 0ecacced03
+r17049 dd42e06b03
+r17050 bb6969c638
+r17051 c1e179743e
+r17053 6011d38a03
+r17054 8765cfe472
+r17055 3c43622fb2
+r17056 3eb1eb58f1
+r17057 a4c522e822
+r17058 18b36de92b
+r17059 6fde5968a3
+r17060 16e159d17e
+r17062 a6340e3280
+r17063 3811981e42
+r17064 21a839bbf3
+r17066 9191eb8dd8
+r17067 76009173e0
+r17071 b0bcd0a40d
+r17072 ebb6a2a06a
+r17078 3e45f134aa
+r17079 7681434a92
+r17082 8d017c0f1e
+r17083 f4720669d6
+r17085 64af689e66
+r17086 347e682ba2
+r17087 4fdfc29d7e
+r17089
+r17090 719dce0a89
+r17092 ced3433418
+r17094 bcb3384b79
+r17095 c6127f4070
+r17097 bee24f7b52
+r17098 40f7264305
+r17099 903933d7fd
+r17100 fb80d00274
+r17101 98933b910f
+r17103 7acf450800
+r17104 708baf9476
+r17106 04840e2ed4
+r17113 f2032c9588
+r17114 266df9f05e
+r17115 dd36893757
+r17117 c25ec632d3
+r17118 bb15b2d1d7
+r17119 10b8c781c2
+r17120 c193d5918c
+r17121 311a391dd1
+r17124 c248f50471
+r17129 f43868888e
+r17132 855ec6101a
+r17133 0ee11c3876
+r17136 0171fdede1
+r17139 882022241d
+r17143 0e04072c89
+r17144 36b0e8178f
+r17146 a626f62538
+r17147 5da9192e4a
+r17149 f4411a5ab0
+r17152 972e5c52af
+r17154 feb773f602
+r17158 6ed49d8b85
+r17159 275e9c7375
+r17161 7e908c84ff
+r17169 502a422b3f
+r17170 dad1f88d8e
+r17171 9c0ac8b712
+r17172 a187f432f7
+r17177 ef13a9d40b
+r17178 68e4cac5ae
+r17179 c4c651969c
+r17180 ae4e5376d5
+r17181 a4baf48a5f
+r17182 bf35b888e4
+r17188 57e95eb403
+r17190 0f81e1686b
+r17196 5c2635fb90
+r17200 14725a8ca3
+r17201 020add45b8
+r17202 166afcab41
+r17203 4e52d412b1
+r17209 5d802d95ce
+r17210 0e495b0aba
+r17211 c02c236c70
+r17212 7fe49aba49
+r17213 228225c538
+r17214 07ee2ba75f
+r17215 174a9a7059
+r17216 b4cd4a89db
+r17217 b6e70976e8
+r17218 04949bcfb5
+r17220 305fe3a352
+r17221 9fc30e17b2
+r17228 3d96a4aa32
+r17229 ddecab441f
+r17230 77be5533c6
+r17231 51c487b126
+r17235 3489c4fdd1
+r17238 56b0eb1d8a
+r17241 276ed22211
+r17248 9bedaaa817
+r17250 0bd2114450
+r17252 2ef54cbddb
+r17253 7e95eacafc
+r17254 f22cdb775f
+r17255 9bfd5a0249
+r17256 6ac42fecec
+r17257 c5e4288aff
+r17260 f3b5aed2b9
+r17272 717e797c25
+r17273 5e2dd3850d
+r17274 40f8fa9402
+r17275 a1c3d51a90
+r17276 807daab252
+r17277 ec04bfb454
+r17278 f085ff3942
+r17279 4ccece5f6e
+r17284 f2dfc4a54a
+r17286 5af0e1461b
+r17287 8e28858bd1
+r17288 8bafc41b19
+r17289 b4e3d06662
+r17290 ca9431e11c
+r17296 cd105bb1f4
+r17297 5f0edd35f0
+r17299 a7ea097502
+r17301 1a1c5f5503
+r17303 30a27a479e
+r17304 3bbffde303
+r17305 a14b437421
+r17306 ff9887891f
+r17313 00d196adee
+r17315 67c3c68da5
+r17316 36bf7cb302
+r17323 9a4199709d
+r17340 65b7d05759
+r17344 7bf8a0c175
+r17349 00c9c7e85c
+r17367 5a820a9708
+r17370 9257aeb98b
+r17371 89ddf2d6e7
+r17372 66f28b5aa8
+r17373 a2bfe6eef5
+r17374 ba2bb4c1a1
+r17376 1d439e0bd0
+r17377 e33a70721e
+r17378 4145de88b4
+r17379 30306fec3b
+r17380 bf96e45cd1
+r17383 06e3400b2c
+r17389 370f060c6d
+r17390 1c72ffaee5
+r17393 532147c333
+r17394 dea08d71fc
+r17395 b62a73e023
+r17396 8087f9b529
+r17397 651294e140
+r17398 8ffa7ff6be
+r17399 55d14ccdd6
+r17400 faa34dab7d
+r17401 845c4fcd31
+r17402 070c60f747
+r17404 20f986ecf4
+r17406 c1be9a8a7f
+r17409 3b25ed4bb5
+r17415 a464ed4c3a
+r17416 16d4b1d76a
+r17417 79c1f9882a
+r17418 68bcc9e7c6
+r17421 2abcdde283
+r17422 ccfea35d7a
+r17423 2a491aaa0e
+r17438 f2a72ec46b
+r17447 7cc03e888b
+r17448 b17f6f68da
+r17452 84bb943a9d
+r17453 becf900b40
+r17455 150d137d20
+r17457 339cbf16da
+r17460 4e2f93073a
+r17461 7a458d7131
+r17462 e42d7e8399
+r17463 b06edbc46d
+r17470 c3e29c28b0
+r17471 e1ccc2e829
+r17481 d237da1fff
+r17482 0d513223bd
+r17483 8c997bd38c
+r17484 2fd6666690
+r17485 4ac90d308d
+r17486 f5bed34066
+r17487 21376b3811
+r17489 a51564d278
+r17494 6ea08aefa3
+r17496 b30ca9c570
+r17497 fb93555a44
+r17498 6556ff6af3
+r17501 4153ff1282
+r17502 c9bb938eb0
+r17503 c8639e6f9c
+r17519 cc3c2f72df
+r17521 c516c42d42
+r17528 1e1231c150
+r17538 92f91f0e06
+r17541 2ffeb5af81
+r17545 cd2843fa26
+r17546 19c09dd687
+r17549 da904a34ee
+r17550 0adcf1fd86
+r17553 d1d54c6f8d
+r17554 c52b5c7df7
+r17556 4ae08113a6
+r17557 aaf919859f
+r17558 d1cd9999f2
+r17580 458c4128c8
+r17581 7a03d2498b
+r17582 718c06c2f9
+r17583 2806d83317
+r17584 cbb366f129
+r17585 d5985686e0
+r17586 03429aee94
+r17589 bdc8c11581
+r17590 ae897e4d28
+r17591 912da5d2ea
+r17592 6875e2fde5
+r17593 6029fa7931
+r17594 cee28d7cc7
+r17595 8137c1492f
+r17596 0a80c26324
+r17597 a62eceab93
+r17598 a79e84b239
+r17599 7acc55b2dc
+r17601 b5b769354d
+r17602 4d3c8ef4be
+r17603 9f907e5813
+r17604 90fa917f34
+r17605 8906512f68
+r17606 c045524ead
+r17607 e4b32dab97
+r17608 8a9a104f79
+r17609 8be38d4395
+r17610 255c136db6
+r17612 9b2908a5ed
+r17613 b17eed3047
+r17614 7fd2740b27
+r17616 a020e82b2e
+r17617 8cc51cc0dc
+r17619 6befaa0f9d
+r17620 1165c27985
+r17621 4603e36f93
+r17623 2bb5db8e23
+r17629 e8cdd793c5
+r17631 f461ac7401
+r17632 003571d528
+r17633 5d2441dd3c
+r17634 c3989c5ba7
+r17635 558808f135
+r17636 e2dc065960
+r17637 43e5b5c135
+r17638 7831970b25
+r17639 2a31d6fd2c
+r17640 036b3851c1
+r17641 f5508bac2c
+r17644 330ad12bbf
+r17649 6f4ba5480f
+r17650 9ce36827e3
+r17651 ba42c086e1
+r17652 4304b15730
+r17653 29c746ca68
+r17654 1bbf9f89f3
+r17655 6d66470bbd
+r17656 5b1da4217f
+r17657 98be321315
+r17658 c7a419a711
+r17659 3e43cc0490
+r17660 1b2c72aeed
+r17661 5103735f4b
+r17664 e9bcc87c81
+r17665 af8a754328
+r17666 ee2d15b908
+r17667 8155f5e712
+r17673 5671456e84
+r17677 2379eb4ebb
+r17680 14a631a5fe
+r17681 75d487d831
+r17682 f3c0640e3d
+r17684 1e8d204851
+r17685 eead648222
+r17687 a9b446fadb
+r17688 8100cc9f6d
+r17689 8b030ca484
+r17690 974735b01f
+r17691 68bb95dc35
+r17695 f7ab13b08e
+r17696 2ea3b94ee2
+r17697
+r17701 931d2d43cd
+r17703 a79ee73df1
+r17705 a8acd9ecbe
+r17706 e4a8be83c1
+r17707 ca3d31e7b2
+r17708 11f5744d1f
+r17709 99e44f21fe
+r17710 93ce8b0c6c
+r17712 e326df2c22
+r17713 c8ad9ef2d1
+r17714 7cfc53fb4b
+r17715 39fdbddb88
+r17716 e2690f9e0c
+r17717 764e5d6db8
+r17718 304a455e65
+r17719 1e3c53fc74
+r17720 0df17b5003
+r17721 62d0a71057
+r17722 1b9f19f085
+r17723 40c11466e6
+r17724 9b3b1847ce
+r17725 1d744e7e93
+r17726 e9a2726b58
+r17727 302427358e
+r17728 8fa8118e34
+r17729 f665c2749c
+r17730 cafc8d6e57
+r17731 14dbc65b92
+r17733 1b97e9821d
+r17734 a4b9b4366e
+r17735 4168caa00c
+r17736 083f2fe49e
+r17737 5b4ff1bb32
+r17738 78d6eadeaa
+r17739 2670b004c7
+r17740 78265a6b80
+r17741 fbf991833d
+r17742 10830eaae2
+r17743 2a3015a883
+r17744 5dcd3008db
+r17745 7e3e93ed98
+r17746 6402ff311c
+r17747 2068560890
+r17751 e76fd544aa
+r17752 cce6308e78
+r17753 b2e928c6d1
+r17754 8fb4f2c37d
+r17755 b80d1e378e
+r17757 e789f9ac8f
+r17761 3de51d6b76
+r17762 3b5f98fd1c
+r17767 e7d6bfb2ae
+r17769 924b4a982c
+r17770 54384172fe
+r17771 af9090a32a
+r17772 14fb2dfadd
+r17773 b3ce4c4f7d
+r17774 6d20b470c5
+r17778 92be0221ea
+r17780 eb96cbb7bc
+r17781 3f1d10d105
+r17783 457f6dfc11
+r17784 9325f2a582
+r17785 14a4920c0c
+r17790 f151228bbd
+r17791 4c3d87a501
+r17792 5326d9a204
+r17793 a4a89f7a2a
+r17794 12a88b5900
+r17795 eb4eac963d
+r17796 36a2c0d43b
+r17798 6b26cdf4fc
+r17799 182a5cbf02
+r17800 22b60f2f2b
+r17801 e3a13688df
+r17803 618fadfcfd
+r17804 54a706f3f6
+r17805 a1f0987959
+r17806 67ab4b8ece
+r17807 fa3010ed33
+r17808 36f07c72a4
+r17809 4065255346
+r17810 213285991d
+r17811 c5aa57c2d5
+r17812 607cb4250d
+r17813 c3afb3feaa
+r17814 0490a0ef52
+r17815 c3247d415f
+r17816 46bb8d600c
+r17817 0a4089a8ba
+r17818 0b8ece795b
+r17820 d73a296574
+r17823 e484f312b5
+r17825 5e12bab477
+r17828 103c97f7de
+r17829 5b2dec1e9e
+r17830 bd119a13d6
+r17831 7702b79895
+r17832 9e6db19540
+r17834 d03ffa8466
+r17835 9ed3fc1dbd
+r17836 21733eb9fd
+r17837 e01b0f41ef
+r17841 ea7734643b
+r17844 3781c27ce2
+r17845 e39e2b05b2
+r17847 76612dc8ec
+r17848 07eef10799
+r17849 76e6f41e6d
+r17850 29f58824a4
+r17851 b22342e78a
+r17852 2039b7fec7
+r17854 b036f6fe74
+r17855 4b8be5d8be
+r17856 cc5e79c9ec
+r17857 c7cd961ad1
+r17858 5abe77233b
+r17860 359d460949
+r17861 e8e1e61177
+r17862 93a27b6d75
+r17863 d94cac09a0
+r17865 ea519396af
+r17867 ce0d59af04
+r17868 503d8c26b9
+r17870 c8ef95caee
+r17871 09e9e88d00
+r17874 13f7432497
+r17878 b7eac378da
+r17879 578d4c6716
+r17880 08da52d903
+r17881 92b8ae1388
+r17882 f34e908054
+r17883 8434c271e5
+r17884 cf59c41582
+r17885 0df28504f8
+r17886 7fc525184b
+r17887 9b2430c776
+r17888 e1424d97d5
+r17889 dbb58b1170
+r17890 67fa653a48
+r17894 450425c964
+r17895 08c63fc9a1
+r17896 09dc46783d
+r17897 036f260201
+r17898 9636749e63
+r17899 3f04dd4462
+r17900 02827fb081
+r17901 b35a79a93c
+r17902 660b4beeda
+r17903 5ef904034f
+r17904 da332a0e42
+r17905 f98d917d42
+r17907 f057f5f3fa
+r17909 da10214991
+r17910 488f986078
+r17911 fcc62d3da6
+r17912 c36e3cc0a6
+r17913 661f1ba10e
+r17916 390ccacfe0
+r17917 12d57cd2b4
+r17918 1dd1702022
+r17920 ab9381b453
+r17925 c6cf4fc022
+r17926 761d162a7a
+r17927 d3a5b5b97b
+r17933 63031aa7f0
+r17934 8c23908ebb
+r17937 fb57f8cec1
+r17939 7aab2a8d9e
+r17940 e0a4e468b7
+r17941 3f8de98f0b
+r17942 cdda313b40
+r17943 289970ec7f
+r17944 c7aa8f5778
+r17946 26e953fc6b
+r17947 d161b8bcf2
+r17948 640daad3f4
+r17950 5906c86214
+r17952 045e04db5a
+r17958 954377bb52
+r17959 a7aeed67da
+r17960 f5f18249a1
+r17962 da8b3a4b9d
+r17964 115dcf1b3d
+r17979 520483071d
+r17981 c9bc955f52
+r17982 a431dc606a
+r17983 02ec6b9c10
+r17984 cf4c6c334a
+r17986 7d7b037bd0
+r17988 e46e603f65
+r17990 56b22f27d0
+r17991 f09e35944a
+r17992 c3bddc74e4
+r17995 a55567971e
+r17997 a0c0c86846
+r17998 d14114d3ad
+r17999 9f6fe27b21
+r18000 c260301efe
+r18001 a2166dec9d
+r18002 8cc477f8b6
+r18003 9bfc974222
+r18004 bd7bd8fb27
+r18005 8e8beb0cdc
+r18006 139d4300d8
+r18007 df426a0c13
+r18008 01dcf00b68
+r18011 238ad426ba
+r18012 f205501be8
+r18013 5fa3710faa
+r18014 f85a6749de
+r18015 1164ab879a
+r18017 771451984a
+r18018 66036d3d4f
+r18019 b9e451ce6e
+r18020 6d09e964f7
+r18021 a46b8b1501
+r18022 9e8835d617
+r18023 c762ae353b
+r18024 e638fb8662
+r18025 b72cc0bda5
+r18026 8d8d1c3147
+r18027 d3ff8d400f
+r18028 5982a5347b
+r18029 dc426d5fa7
+r18030 5fe886ed64
+r18031 9b046d0952
+r18033 a907772ff5
+r18034 7337db9c59
+r18035 54093685b8
+r18036 a4bdfdcccb
+r18038 53ed9b920e
+r18039 73746f649a
+r18042 e41d30ba4a
+r18043 4788fee88e
+r18048 cd7e1a1728
+r18049 e58673295a
+r18050 d05270c938
+r18052 78eeb59f0f
+r18053 493d03653e
+r18055 5d11bc4733
+r18056 e6c140fecd
+r18059 9e52f5beda
+r18060 57ac948b1b
+r18061 be8e3c6911
+r18062 3ee6b3653f
+r18063 a657e6b766
+r18064 4d5d6fbe94
+r18065 2b3218c788
+r18066 614ba1f785
+r18067 83ec9c329c
+r18068 60810d5c03
+r18069 0e170e4b69
+r18070 533764a718
+r18071 8cf7228f8c
+r18072 85a7be90da
+r18076 c50f73ddb1
+r18077 e1b88d7758
+r18078 2ebff1417c
+r18079 c22ebf74e0
+r18080 76294e00c5
+r18085 9ca38d23a0
+r18087 11d2fc79cf
+r18088 3f9bbdbc78
+r18089 d09ec90432
+r18090 4bac7312b3
+r18091 ef06def0f0
+r18093 6060a29843
+r18094 ecb80ebcc5
+r18095 d83917a2ee
+r18096 ec70057db5
+r18097 6ab1f0b771
+r18098 1c9870541f
+r18099 410efa8317
+r18102 f537546d8b
+r18103 2478159125
+r18104 6c0ba3ee65
+r18105 ae85676cb4
+r18106 7e3f53ed7d
+r18107 c83d5573ce
+r18108 ac7180cf63
+r18109 ff1eb3aa12
+r18115 d2c69112e0
+r18116 7518d6700f
+r18117 94ade481b2
+r18118 d0452d00c9
+r18119 26adfa0610
+r18121 2f085cf0d2
+r18122 288a684174
+r18124 1e2217eccb
+r18125 9a8c1984be
+r18126 7abf1386ee
+r18127 7d92d6c60f
+r18128 2c31c03c62
+r18129 cfe07c80c3
+r18130 4fccc851b8
+r18131 b3924e660b
+r18132 979e774ef8
+r18133 505ea7c3e0
+r18134 e32113307c
+r18135 e3bb9bfa5c
+r18136 31baa0e552
+r18137 a868cd7589
+r18138 73a4bffc83
+r18140 f5c93803e4
+r18148 91643c355b
+r18149 e659affbea
+r18150 8fbdb547f1
+r18151 1ecef3bcd3
+r18152 a91ef25608
+r18153 fe1d043034
+r18155 96f6c893f1
+r18157 978e36705a
+r18158 0464a24e40
+r18159 211fcd601e
+r18160 bb085c4f75
+r18162 19c3aa9b31
+r18163 d14b4a117e
+r18165 b640b4c70f
+r18166 a784a5846b
+r18168 d6519af64c
+r18169 ab099645c9
+r18170 91c683e22d
+r18171 d17c979ce0
+r18176 7ac2fc34f7
+r18177 6cee8d5837
+r18184 f535672a90
+r18188 e308e10616
+r18189 def1f684c0
+r18190 568cba14a3
+r18192 8e2090600c
+r18193 08a4234ce0
+r18195 3b72f6de82
+r18196 ffb3ff17c1
+r18197 57e0d0250d
+r18198 c044b2e8c9
+r18199 76228e8448
+r18200 865ec030f3
+r18202 70b9c762e8
+r18205 5f06ad4179
+r18206 3be21076e0
+r18208 3ba0e87fed
+r18209 e373d268a5
+r18210 67881bbca0
+r18212 c93f64f7ea
+r18213 64e41b43cc
+r18214 129cdce825
+r18215 26bca73b09
+r18218 5c33f943d4
+r18220 dba0f7f3bd
+r18226 5754e85ed0
+r18230 dbe0e2bc38
+r18231 1eda989ae9
+r18235 99ede604a0
+r18236 ac4542b356
+r18237 f50cd49608
+r18238 b0706ef600
+r18239 2bbaf246cf
+r18240 e59b2669a7
+r18241 92b3940688
+r18243 1901250eef
+r18244 ccfb3b9c16
+r18245 79dc3b49f0
+r18246 69fb6eaa7d
+r18247 8ee2c8685d
+r18248 2bc40d593a
+r18251 a25a8c309a
+r18254 fdd7b82c5a
+r18256 5a0c92b079
+r18257 67d80e7a75
+r18264 7ff290c43f
+r18271 97e4a6162a
+r18272 d0731b1edd
+r18273 0c29413d8a
+r18278 ddf20e4d09
+r18285 ac779096c1
+r18287 0be42af7a2
+r18291 d9418567e6
+r18293 4ec0c0ee2c
+r18295 d7dbdd75fd
+r18298 93ba5d9293
+r18301 370817ac97
+r18308 69e1ddb55a
+r18310 8dee86d734
+r18315 b9be89ebda
+r18322 818a8f4c08
+r18323 467cfb2fc6
+r18324 58bc0b3a53
+r18326 097993aea4
+r18327 1514085298
+r18328 8bbfb90b49
+r18329 dc498fd655
+r18330 b66b9de0ee
+r18331 3eadba0ddd
+r18332 35a638ed93
+r18333 9dd3236b92
+r18334 3355ead4eb
+r18335 6581c02a2e
+r18336 f1f6d7c6a6
+r18337 21e5e4c173
+r18338 ea45f483bd
+r18339 9f84c9512a
+r18340 f6350575f3
+r18341 d6798ac2ab
+r18342 1f6c8f2be9
+r18343 1c56489b3e
+r18344 b70cf1f40b
+r18345 fd1c68f004
+r18346 4fa2b5ac18
+r18347 670edfe22a
+r18350 9fcf6dc3c6
+r18352 04ed00053e
+r18353 a91a8b2ac2
+r18357 294000998f
+r18358 2b51d5c477
+r18359 3e95510910
+r18360 30ab8b6924
+r18361 ff4552038d
+r18362 0cb9b256f8
+r18363 2c3208955c
+r18364
+r18366 64342a3d92
+r18369 9e89645170
+r18371 d063a9fa51
+r18372 202d2562ec
+r18376 3b0c2ba269
+r18377 fa70f51234
+r18378 9eed5b8929
+r18379 9dfe628e0f
+r18380 128c23c788
+r18381 437e8ef4bd
+r18383 50b5242ee3
+r18384 f4301266d3
+r18385 8a78d37483
+r18387 40707e0f49
+r18388 22edfb2881
+r18389 68c289a95f
+r18391 c4a59834b9
+r18394 cbadb522f1
+r18395 cc711eef35
+r18396 27700284fa
+r18397 01ed33304a
+r18399 5775f1b887
+r18404 74a6eeaf09
+r18406 db045cb8dd
+r18407 46e40830b1
+r18408 947abebda1
+r18409 46f563457f
+r18410 c5af4c0388
+r18413 6148dff45a
+r18415 b9bec1c708
+r18416 8f1cf06e01
+r18417 14c5910337
+r18420 47bb1e153b
+r18421 5319bf04da
+r18422 8444d6e22b
+r18423 bd1e6e0934
+r18424 be31fef41a
+r18425 24471facbd
+r18426 1a4566278c
+r18427 11ee847d38
+r18429 d339959ff1
+r18431 f9c2bc54ff
+r18432 9780704595
+r18434 cf7a2f64f1
+r18437 ac89702827
+r18438 ec5e34144e
+r18439 744049bb71
+r18440 00f35b8424
+r18443 f046863f53
+r18444 edb1bf023b
+r18445 4226a1ffb1
+r18447 d32130e1f4
+r18448 f22d1313c2
+r18449 381209889a
+r18450 acdf9452c9
+r18451 5f8b4d2595
+r18455 dd8009b190
+r18458 1e15c075c1
+r18460 fe52cb070d
+r18461 f335258f61
+r18462 62104980be
+r18463 60533e82c8
+r18464 fdf7441ed1
+r18467 dad6fe7901
+r18468 e5187676e6
+r18469 1c872d63b8
+r18470 72f099bb9c
+r18471 a7d94bbd21
+r18472 db202748fe
+r18473 1ceff6729a
+r18474 2416d5724e
+r18475 abc5b5f47f
+r18477 ab9cf60fc7
+r18478 de8ca77c2e
+r18479 23f878f89c
+r18480 5e1deae361
+r18481 d601240fe6
+r18482 7838ff734a
+r18483 43b445579f
+r18484 fe72ad6351
+r18486 110b737f99
+r18487 f4d0095bf5
+r18488 cdfb6bf18d
+r18490 d73053a0c6
+r18491 ba8648d13e
+r18492 9cea5f6198
+r18493 309e7e0b58
+r18494 e484200310
+r18495 e6dd85961e
+r18496 4c4040c931
+r18497 32463342dc
+r18498 d0ca666b75
+r18499 22fcda0341
+r18500 8df11b38aa
+r18501 0eee4ea689
+r18502 420311df8d
+r18503 ad8d6f6753
+r18505 6b5b635f09
+r18506 ec18f148e8
+r18507 917101fd0d
+r18508 1d28a77bf3
+r18509 90bdc95f5a
+r18510 1af45f6a6f
+r18511 f90e6a94a6
+r18512 2b18a8b27e
+r18513 0ffc4725ce
+r18514 d249bcf71f
+r18516 c55580014c
+r18517 169a6a323f
+r18518 1cea0ea34a
+r18519 ff6271982d
+r18520 e8a46e6459
+r18521 fcb6a3772b
+r18522 0ae54e25fb
+r18523 522bf3a7d8
+r18524 397c2027d9
+r18525 6a9d9f379a
+r18526 c54bca38b0
+r18527 f56aac6b0f
+r18528 94e8503e18
+r18529 9e3295514c
+r18530 832114b933
+r18531 69d4d8c0a3
+r18532 0c7b99fbc8
+r18533 35c590828c
+r18534 8d4c53543c
+r18535 70d9557ab4
+r18536 f73e819a41
+r18537 78b61c43da
+r18538 163e4275ce
+r18539 4a1b8bcc72
+r18540 7039772a3a
+r18541 d0024b6e99
+r18542 d4c53a90db
+r18543 3be639c503
+r18544 0c424e878c
+r18545 72a7124873
+r18546 22608da738
+r18547 27fc24b0a2
+r18548 a8edce124f
+r18549 cd36447b0a
+r18550 94e71c26a4
+r18551 5251059ef6
+r18552 8c106309b0
+r18553 50c1a4c2ad
+r18554 affff809b0
+r18555 0f7296a008
+r18557 db8c41b535
+r18558 9c8da21394
+r18559 a97d573f6d
+r18560 99705d852d
+r18561 c1df5090b9
+r18562 42568ac7c9
+r18563 7f757333f9
+r18564 241dc55e6c
+r18565 0a921760e9
+r18566 7a2002522d
+r18567 37b2160aa3
+r18568 275ed574a8
+r18569 a75d39a04d
+r18570 d7f5a8824a
+r18572 7aa4764ed2
+r18573 8aed300faa
+r18574 f53ec2dc9f
+r18575 2d8878f516
+r18576 ac29052535
+r18577 7224d1c26d
+r18578 48cc8408cf
+r18579 904713e980
+r18580 fd58ffc924
+r18581 a4e8b0a502
+r18582 cd2bb7f026
+r18583 7c20966e50
+r18584 8949b0f255
+r18585 36529fe0ff
+r18586 b611f2e978
+r18587 de8a10cdd1
+r18588 2c39b8b083
+r18589 a04195e637
+r18590 d0a82fb9db
+r18591 d19685e7a5
+r18592 e7bd2c9fe5
+r18593 8814de2aa0
+r18594 ce362ef76f
+r18595 d582588b6d
+r18597 36b00f5234
+r18598 de60f2481f
+r18599 0c910180fb
+r18600 1e5ffa0cc8
+r18601 7e67e62dca
+r18602 a1efb93af4
+r18603 463be6731f
+r18604 1d19903447
+r18605 e6efa38619
+r18606 f44eb67634
+r18607 81440d55ee
+r18608 61635f0f58
+r18610 fe334907b3
+r18611 dd22c570ab
+r18612 8d9cab992a
+r18613 bc872302db
+r18614 88dc46dd31
+r18615 158e5db28b
+r18616 09ba9ab65e
+r18617 d227d486fd
+r18618 6758ca1bfe
+r18619 c918b70784
+r18620 d9a7d026ce
+r18621 8637f14a9e
+r18623 0600724c0a
+r18624 6da528df44
+r18625 0ef9dbcef0
+r18626 cfed2479dc
+r18627 5f89d82719
+r18628 96e5cca150
+r18629 2598cf0507
+r18630 54b405337f
+r18631 337ec4560f
+r18632 8ed736aab8
+r18633 3eb22b8eb1
+r18634 729ae785e9
+r18635 b5618b224a
+r18636 68c9e7c924
+r18637 6ac283c5e4
+r18640 8e498fed37
+r18641 7f8a733c0d
+r18642 fa3ea36c05
+r18643 17e464314d
+r18644 f8f0e5d25a
+r18645 17a441d93a
+r18646 d6db8f8213
+r18647 0ae9ca5d24
+r18648 fd1eba7145
+r18649 4d209eab31
+r18650 822b93ac9b
+r18651 c980b574ba
+r18653 3335e037a8
+r18655 aef123719d
+r18656 ba6cdaf1f3
+r18657 6b01bf9c30
+r18658 97fd4b036c
+r18659 2619f09ad0
+r18660 b06d4eb4ec
+r18662 39023c4346
+r18664 d471679126
+r18665 bc489c725e
+r18677 c71af41d6a
+r18678 c3a56da40a
+r18679 bbbfe4e748
+r18680 3c224284fd
+r18682 069ebc1801
+r18683 5f5b82e792
+r18685 e72f0c7f2f
+r18686 fe2068ef0d
+r18687 e934ffb347
+r18688 0250956d2d
+r18691 10cf73815b
+r18692 57ed4ca114
+r18693 8871528f60
+r18694 61ff261346
+r18695 514ff83e39
+r18696 f9394a4d47
+r18697 e604abb25c
+r18698 38dd94c87a
+r18701 9a22b72231
+r18702 c45e93e798
+r18703 2788c1ad5b
+r18704
+r18705 4ccb0bf2b7
+r18706 a5f4411f8a
+r18707 719b38b4bc
+r18708 1b1a9ba1f3
+r18709 d46bbd29ee
+r18710 7c589dcde6
+r18711 5dbf500ff8
+r18712 ef05daf100
+r18713 63089db7fb
+r18714
+r18715 27f573afb4
+r18716 b4c4f22b78
+r18717 03570027fe
+r18718 acf1e47be8
+r18719 32f93874ac
+r18720 6255db9edc
+r18721 ced5ee337f
+r18722 d5b02c8652
+r18723 d117803f2a
+r18725 4c29e778f1
+r18727 0f10ffedc8
+r18730 4b116e95da
+r18731 16eced4644
+r18732 d094b4ac4d
+r18733 efc9abd507
+r18734 6f18d00708
+r18735 44e60b3ae6
+r18736
+r18737 4466e36c4d
+r18738 35f61f4fa2
+r18739 eaa7f5738d
+r18741 66b6059b4b
+r18743 3a98614bd1
+r18744 4d8093722a
+r18745 30109202ee
+r18746 b03c1699a9
+r18747 a7697326cf
+r18749 e5464bcb42
+r18750 2fe29c477a
+r18751 48fe27d8fb
+r18752 9e54361343
+r18753 dc65ebea9e
+r18754 0d86d977a3
+r18755 4edbecfe9b
+r18756 9992fe264c
+r18757 2c5bd20a7e
+r18758 c2d33c6585
+r18759 caff582d5d
+r18762 875c84b359
+r18764 6bc633a4f4
+r18765 21035aa141
+r18766 87a113f132
+r18767 cabb954584
+r18768 6cfd03986f
+r18770 babad68e86
+r18771 ad9103538d
+r18772 593d685e4f
+r18773 c1f5cbd4a0
+r18774 f19fd024e0
+r18776 e1b326195e
+r18779 fb38e47af1
+r18780 6fea2488af
+r18781 92fc7b37b0
+r18782 8f8096f0ab
+r18783 67a8cdb404
+r18784 d17b40768c
+r18785 026b824ecc
+r18786
+r18787 a43a29e643
+r18788 d7796af940
+r18789 22c91bc256
+r18790 e31f18094d
+r18791 4a727f3b01
+r18792 0c50ba8677
+r18793 15eb9333fa
+r18794 9f5eff8768
+r18795 726ca37676
+r18797 3fb279ed38
+r18798 2a5664146d
+r18799 cecae47090
+r18800 490218b354
+r18801 f7ba972de1
+r18802 09b71d8bea
+r18803 5ae38f0f2a
+r18804 0bd474625f
+r18805 f0dc32f686
+r18806 32cac0e3fd
+r18811 53d98e7d42
+r18812 4231751ecf
+r18813 449f2a7473
+r18816 f934201e2f
+r18817 198f9932b9
+r18820 72789e9bb8
+r18821
+r18825 1575d9b94e
+r18826 f981d15e96
+r18827 393ce4d2cc
+r18828 2a91d630e7
+r18829 0d724fbb3e
+r18831 8f17ff94fa
+r18832 c590eb86c6
+r18834 49bfcbe509
+r18835 a109a92d35
+r18836 3a4aa69fbe
+r18839 5816ef2f97
+r18840 701cb3195d
+r18841 5aa7e892bb
+r18842 4f62a386bb
+r18843 efa181e577
+r18850 d364022236
+r18853 e000ae4a5a
+r18855 082a427ff9
+r18857 fe264943ef
+r18858 a21a60e5b0
+r18859 13ec830291
+r18860 dbf87324a0
+r18861 f30c0b0dba
+r18862 353c843392
+r18863 ed09a7a83d
+r18864 d0442a8636
+r18865 7209116540
+r18866 a316250dca
+r18867 caa2d287d6
+r18869 1bc50a7c84
+r18880 321338da04
+r18887 154cad734b
+r18888 284788dbe1
+r18889 84146e2f53
+r18895 83b67aa805
+r18900 6a6a0ce235
+r18902 4ad7f5bf9b
+r18904 845d054b6c
+r18905 6ac3bdaf7f
+r18906 3bcfc86548
+r18907 f931f89c5e
+r18908 5d0b9775ab
+r18909 aad82c0521
+r18910 eb4d0290ac
+r18911 43dcd522f1
+r18912 7fd3db89c8
+r18913 0144df5f04
+r18914 d9a67d0f1e
+r18916 2672f972eb
+r18917 fad438ec01
+r18920 3b4a8067ae
+r18924 7804031bb3
+r18925 f52458dfff
+r18926 403bf69a0b
+r18927 aaa3689ffc
+r18931 5da791d8c4
+r18932 7f2eaea3e7
+r18937 2d5390fd99
+r18939 f4dbe6bdc7
+r18940 3e41797985
+r18941 fe8658350b
+r18942 43ce7fbc82
+r18943 c107643d20
+r18944 ac5c2b3c67
+r18945 e3d9ce3e09
+r18946 8828cd9984
+r18948 7c04bac160
+r18949 8befdb8b05
+r18950 3826ab4938
+r18951 94b8abdd93
+r18952 9b33c1c5ef
+r18954 4a6c3da399
+r18955 a6f19f5d97
+r18957 ad62d9f8b0
+r18958 9f121f57e0
+r18959 6b31849b85
+r18960 99a2cd8de7
+r18961 a8272bce60
+r18962 611e5bd1f9
+r18964 eb572091cd
+r18965 16a0192b99
+r18966 383b4ca492
+r18967 176401d453
+r18970 8cc29335a8
+r18975 25d9040661
+r18976 91f82d5821
+r18984 6ec4b09952
+r18985 adb677e4bc
+r18987 9cf9ab263b
+r18988 5be7c2213b
+r18992 0c57ba75d0
+r18993 25a6ed98b2
+r18997 5f1bf635db
+r18998 054c404c03
+r19003 6fb95453d1
+r19006 0e26f93326
+r19018 6c3a2d29f6
+r19019 e7763d39da
+r19020 cce8ae3c86
+r19024 1c67c5b849
+r19025 422ad71e10
+r19026 4e71524062
+r19027 50184e5847
+r19028 59e6507315
+r19029 2ec828e02c
+r19033 8b383a4a15
+r19034 2555d008fa
+r19035 1c4ad55d6f
+r19039 8a45a5570e
+r19040 2de36ff140
+r19041 71f8dd24a0
+r19045 2482bddf7e
+r19047 901ce7a85b
+r19048 112a1dbef0
+r19049 31c726aa43
+r19053 89a03016ab
+r19054 bf9ca9a2b7
+r19057 f75ee36c6f
+r19058 bf02e46f2a
+r19059 5d61522281
+r19060 a0cf7a48c8
+r19072 b45a1eeb33
+r19073 04d037f2e1
+r19074 820e0bd940
+r19075 e76f8f00cd
+r19076 5bfb4e7a56
+r19077 bb817a67b9
+r19080 447c7aed67
+r19084 75e791bf7a
+r19085 b880c5f288
+r19089 dff48d1ca5
+r19090 c3137e6293
+r19091 7e05907065
+r19092 1363244de1
+r19094 1747692434
+r19095 9d9889a7d6
+r19096 b57abb7bfe
+r19104 6255d6f2a8
+r19107 8ce658f665
+r19110 136c1cce62
+r19111 3a5e4c9e8b
+r19112 221f2a8d72
+r19113 a4aa2b4b63
+r19114 1b91faa830
+r19115 3bf4f69c1d
+r19116 3949726f1f
+r19121 4cb4ad76b2
+r19122 aaae8db368
+r19128 a1a8e9261e
+r19129 d828ace341
+r19142 6dae27f35a
+r19144 2bdd20d023
+r19145 5eeb2a3b43
+r19152 1e452efbc1
+r19153 cb754b1a56
+r19160 feb088b2bc
+r19162 5a817fdbf7
+r19165 cd98a5a186
+r19167 081e2fb747
+r19168 2d1242bd5e
+r19169 9dc0426d05
+r19170 a021e16b5f
+r19183 58651079b7
+r19189 70bc8f93c5
+r19190 f818b44b1c
+r19191 03bea84fa1
+r19192 6bb3d2ceca
+r19201 07a9de6b12
+r19203 2ae67db555
+r19204 247895b5e0
+r19205 322b823276
+r19206 7349476e5c
+r19207 49dde393b4
+r19208 4c84b05477
+r19209 c570e1e7af
+r19210 2816f2e6ce
+r19211 991c819cb5
+r19212 dc64c68660
+r19215 3bd3ae75da
+r19219 907fd78c9b
+r19223 5f43e70e1c
+r19229 1f1cce4587
+r19230 d7504cba9b
+r19237 1b7e1feee1
+r19243 c23174011d
+r19244 a2eab2215a
+r19245 bf584e5320
+r19246 a074b27312
+r19247 99dae57ebb
+r19248 dab03ce579
+r19249 92cfcd4399
+r19251 42a111ba41
+r19253 3926c98936
+r19256 3803528e26
+r19257 d913225042
+r19261 460a434698
+r19265 2cef1c58a5
+r19266 728775440c
+r19267 a129d09bae
+r19273 b2fbae789e
+r19274 93967d3563
+r19275 765acb4789
+r19278 2270544a9c
+r19285 ee02ad59ce
+r19288 926ca95a9c
+r19289 180c140953
+r19290 0b16c12662
+r19291 35a8ab3cdd
+r19292 63b1fd9be6
+r19293 f3068614fb
+r19295 af66ddc350
+r19296 e5ccae21e0
+r19299 4b8fe44351
+r19301 f9551d0c2f
+r19306 42a42ac0c3
+r19307 38c3ca6756
+r19309 d4c63b2af1
+r19310 727490ab53
+r19311 3a08cbbb97
+r19315 c3b27d3b4d
+r19316 dbdac60079
+r19319 cf53536f9e
+r19320 0ce248ef65
+r19321 03e717bdc7
+r19331 cc934ee7bb
+r19332 b7772a6535
+r19333 b4084fc9c0
+r19334 9a9fece5c4
+r19337 41b0aefbf5
+r19348 223bcfc6ab
+r19350 c5157c830c
+r19353 6ae7f2cbc1
+r19354 6f7723bea4
+r19355 acaad2bcfe
+r19356 95b6ced60a
+r19357 a6d876fbdd
+r19361 52f14327c2
+r19364 b42e1f1902
+r19368 852f027607
+r19369 4f373f6da9
+r19370 e159530bfe
+r19374 c9c04a5907
+r19375 3d115bd2a4
+r19383 094ed77bd9
+r19384 621da8e1ff
+r19385 04fb01d131
+r19386 d7f7a3e001
+r19387 13d642151f
+r19391 b02b388ffa
+r19392 f5ede0923c
+r19394 021dd25395
+r19395 7cbc06ed20
+r19396 1f075b56f8
+r19397 dbf0e12c15
+r19398 a4895b8592
+r19399 85cac0f0e0
+r19401 a110b8f8e4
+r19404 74ffca5b10
+r19406 679d4590d9
+r19407 72ede3ed81
+r19413 36716c6027
+r19416 a690e4691c
+r19417 1e93e17843
+r19421 1b807250a3
+r19422 d42f62bbd7
+r19424 5d25e9334d
+r19425 f540f03503
+r19426 decbd55f61
+r19428 abd87fb19d
+r19432 5084c4d8a1
+r19433 6fbb226617
+r19434 86a6ad44fd
+r19435 c6dfb1e316
+r19436 c7c9684ae4
+r19437 2ac62aa9e9
+r19441 b2bf6d3d09
+r19442 507cd9ef50
+r19443 af1b2ef059
+r19444 f2f2c41311
+r19445 f8187cb519
+r19446 3ec24991df
+r19447 7ae5e07a4b
+r19448 199de7cd8e
+r19452 6f4fba9c67
+r19453 c490722ae1
+r19454 6167e273e0
+r19455 6c6d9a0423
+r19456 47ff605523
+r19457 fe8ed5a8f9
+r19458 1754e3f490
+r19459 e7749823a7
+r19461 6debb6aa08
+r19463 43ad0dea06
+r19464 e9ce2c085b
+r19465 df502f4ffa
+r19466 e981bccdb7
+r19467 2aeae98776
+r19469 7da30bf2d5
+r19471 cedd41ba4a
+r19472 29d431ce89
+r19473 26a13165f4
+r19474 a0159da70d
+r19481 eea79567f1
+r19482 acd28e3fd1
+r19483 572adfa2f5
+r19484 dcc8d01366
+r19487 928c9eba3b
+r19490 aaa4da9f37
+r19491 277e28956c
+r19492 f3a375b0e8
+r19493 e597ad04c0
+r19494 46af17c33c
+r19498 98c7307de8
+r19499 2a5669288a
+r19501 ecee4b18ad
+r19502 6aaab9a6df
+r19507 0c17a1a7d6
+r19508 f0664e9035
+r19509 1e9a86e701
+r19510 fc07ece2e7
+r19513 446edd3328
+r19515 074281bafe
+r19516 df13e31bbb
+r19543 33e1dac4e4
+r19545 f5a525aace
+r19546 0e4ff57c1c
+r19547 6720ae4cbc
+r19557 5995692ffd
+r19561 39fb348121
+r19567 9ed068ec00
+r19569 fe1d0c7052
+r19570 e7bc7737c7
+r19578 6599b4dc60
+r19582 b302b5afad
+r19583 8f53cc93ec
+r19598 d24de699d8
+r19599 fe3b78b864
+r19600 523a9a2658
+r19601 07c295560c
+r19604 b88e47ced9
+r19618 d47dbcf17b
+r19624 261a807655
+r19627 f86ead7ca3
+r19629 4cc65f6e0d
+r19630 92c280f6d1
+r19645 6c4064a770
+r19651 1cd31e2edd
+r19655 c43f01c39d
+r19656 0c373e4985
+r19657 046bbed8b7
+r19658 31c1983e72
+r19659 50f42ab8c1
+r19660 540aa394f3
+r19666 ed4caf3fe8
+r19667 041361ae42
+r19668 17d6cc4445
+r19670 6063bf3d78
+r19673 0b236faf92
+r19674 ff7183ddeb
+r19675 0da0208af4
+r19676 773b7a287b
+r19677 c14b30a39e
+r19678 a3926747d3
+r19679 60e6a45de9
+r19683 db99de350f
+r19684 f34abbc000
+r19685 9aafbff378
+r19688 79cbdefa47
+r19692 32b04c2801
+r19695 ac3931a11d
+r19696 2edbf55c11
+r19697 08cba2fd9f
+r19698 6a23aa029b
+r19699 7bad13f179
+r19700 39a1e1fcea
+r19706 06713afedf
+r19707 536955e1af
+r19717 ae024cebd4
+r19718 d92679d81c
+r19719 2a6a02e9a7
+r19723 6f4e82da32
+r19724 055190a38b
+r19726 1e1c87c234
+r19730 04a99160c2
+r19735 7356f7782a
+r19736 56ce6c65a5
+r19737 3cf0e5a010
+r19738 c317201d1f
+r19739 99d8d53c36
+r19740 f7b8e8f346
+r19742 781eb073f3
+r19743 1a104aefd6
+r19744 88b60e35e6
+r19746 346aff23bf
+r19747 a8759a4ec3
+r19748 5b5af9e255
+r19749 682a01c83b
+r19750 d354fa17e7
+r19751 4c9372f665
+r19752 e78864041f
+r19753 cc4cd00e58
+r19754 b59bb7d36c
+r19755 e10d77e1ab
+r19756 3a75338448
+r19757 06947d66ea
+r19758 937872a489
+r19759 b408d0e98f
+r19762 2ea21b6ca0
+r19763 40dabcbb6a
+r19764 442766475e
+r19767 19dc226f24
+r19768 aa2c129e41
+r19769 58a86b6e67
+r19773 42123a6366
+r19776 9aae43ad9f
+r19781 e8e504c0f2
+r19787 27bc36b7a9
+r19789 1e890eacbf
+r19792 85befd6927
+r19793 3045b84c8c
+r19798 269486307a
+r19799 4daa662dea
+r19800 8eaef9c60f
+r19803 1c4e51471e
+r19804 ef3fb07b53
+r19806 c46145f040
+r19807 cc44d56c42
+r19808 b93068347e
+r19813 d6b43c4b48
+r19814 4a1b22e19f
+r19815 91a0ce7ba7
+r19818 f3fa2e86d4
+r19819 d26b2f2b94
+r19820 4ad672b0b2
+r19824 2e0c9a6ba4
+r19842 583e431b07
+r19844 d9e3dde6d6
+r19846 326e257371
+r19848 ee2415395e
+r19849 6f4a561df2
+r19854 b059cbd155
+r19855 ec6a2ce91c
+r19858 a350c4c1a5
+r19859 f1b417f10c
+r19861 a3aa801a51
+r19863 1f162e940c
+r19864 7f3922f39a
+r19865 7463bf9292
+r19867 84b523c520
+r19869 13b3d06f82
+r19871 0a1d1a6167
+r19872 dc683cb316
+r19873 ec664b1cd0
+r19874 aabd642596
+r19888 8648e1c8fa
+r19891 c882a2d675
+r19892 83d96af554
+r19893 797b2aeda3
+r19894 333f70873b
+r19895 370ab197f9
+r19896 7aa5ecea0b
+r19897 6f70a9f61c
+r19899 8284808cf6
+r19900 207b303157
+r19901 100112a580
+r19903 3f03586ba4
+r19904 0635b1a3d8
+r19905 cabf107814
+r19908 3d10835062
+r19909 b06fc095fc
+r19910 5be23003fd
+r19911 252ebb3281
+r19912 bc5eb3e511
+r19913 3bf4c1afc0
+r19914 b94c73656e
+r19916 c6fb331ae3
+r19917 d56190908f
+r19918 cf92cfb928
+r19925 b22086d0eb
+r19926 61cbe9441d
+r19935 15ba4abc82
+r19938 c6bc2a97a6
+r19939 e73ce61377
+r19941 41253da6fb
+r19945 706c86380e
+r19948 4559f45c7e
+r19949 9fe1f1503f
+r19950 43c1314333
+r19952 0f17201b10
+r19959 a55310838b
+r19963 c2359ccec5
+r19964 a3bf3f136c
+r19970 f54e15370e
+r19971 75d02a1a52
+r19972 87fa83d3f9
+r19973 a030f0d8b3
+r19974 ea22ed166a
+r19975 ef98846b86
+r19982 a9a967bc82
+r19983 e4af2ce209
+r19984 5697e1115b
+r19986 6995333a27
+r19988 7bee4c499d
+r19989 f2056ddf45
+r19992 38625cc96c
+r19993 62601656c3
+r19994 43d9fc1248
+r19995 7feaefb229
+r20003 0e9c5b7f85
+r20004 e7d2120bee
+r20006 a41307b3ea
+r20007 15add6cd50
+r20008 36b1d9cf1c
+r20010 8be82e1499
+r20011 ff2a9b4c58
+r20014 70ff72a16a
+r20015 3aea5dc898
+r20016 91d6fa1a8b
+r20021 4532a5d7f1
+r20022 e1afd5b323
+r20028 ba33e9ba99
+r20036 147ecff4e5
+r20041 de1d172a15
+r20042 1e88594f35
+r20044 873a28d90c
+r20045 e1c9a81e5d
+r20048 a4011632f7
+r20050 64f63ab396
+r20051 b42abff4c0
+r20052 721c6935fd
+r20056 24ad61eb2d
+r20063 d6cca14c48
+r20064 25d82e13f1
+r20068 a17785f8be
+r20070 8bd78809c4
+r20071 a4f1bfec2c
+r20072 2411320fda
+r20073 cf3c8e3e1c
+r20074 65db7124a7
+r20075 6bce02b838
+r20076 127147fb06
+r20079 4ee93c52c7
+r20080 eb8538483c
+r20082 e4fded7210
+r20085 f8d6169dd3
+r20086 63f5dbb0a6
+r20087 cd14cb81c2
+r20088 670bbca782
+r20092 1ba4b35866
+r20093 441f16c01b
+r20095 71e3f77d35
+r20096 505a7bc4e0
+r20097 b9d997e1d9
+r20098 db3d2518f5
+r20104 e378965dc2
+r20107 fffe6449d1
+r20109 8388f49560
+r20110 5472e3afc9
+r20114 1db89021e5
+r20124 461c798dbf
+r20129 cb1c0cf0a9
+r20133 8a89b68903
+r20137 e59e58b003
+r20138 4681d842dc
+r20139 6c7497dff4
+r20140 b0745039e2
+r20142 759ad530ee
+r20143 1c5db35b3a
+r20149 5330b36a5b
+r20160 a8dc5cbdac
+r20165 cc8e4136b6
+r20172 eb46c9ab39
+r20173 1a7200a1d2
+r20175 65bd378795
+r20178 f607fe4f95
+r20186 63333f9e62
+r20199 d8ef68e6a1
+r20203 88683ede7d
+r20208 248a992059
+r20209 d5f0ed310e
+r20210 3b620e31d3
+r20211 a25195fc1f
+r20212 05363648a6
+r20216 bbc126660f
+r20217 74f5d6fa90
+r20224 e8f34924dc
+r20229 32bfcc4194
+r20230 ce4572ca49
+r20231 a41d9351d5
+r20232 70ed6680a5
+r20233 7ddabed25a
+r20248 4faa918259
+r20250 691bc54190
+r20252 e7e0d49dea
+r20253 482cf0e2ef
+r20254 beb7392745
+r20255 b70347940e
+r20256 27f2d87d88
+r20262 348fd6e69a
+r20263 f9a751e444
+r20266 21e3410dd1
+r20267 a326f40dbf
+r20269 169b05aa40
+r20270 c163877ba8
+r20284 192c943c33
+r20287 ff1ecb5316
+r20288 3a0713b4e0
+r20289 ef2cb0f658
+r20292 2d12c10366
+r20294 14fcdff9c7
+r20295 d32b5bc758
+r20296 361a7a40d3
+r20297 cb4fd65825
+r20300 e197e3a1f5
+r20307 0cc326c767
+r20309 154326ab0c
+r20310 b41e97987f
+r20311 17f712ec18
+r20312 b858cef587
+r20329 e132d06e6b
+r20341 210a9552b5
+r20344 e5d37b199d
+r20349 6af8cbb361
+r20350 c10a035e1d
+r20351 053b6a686a
+r20357 8989a1bac5
+r20358 eebda61186
+r20359 e02fb0df97
+r20363 9e5fd5403a
+r20364 5d6a3f6382
+r20365 bdf13aaa1d
+r20366 df1139ee18
+r20376 2bf84d21a6
+r20377 d66a76c121
+r20385 9245c6a701
+r20386 f96931f98f
+r20387 e97ae22dd7
+r20388 64b0678d33
+r20390 7315339782
+r20398 57f14277da
+r20399 b5c141b4ed
+r20401 e525797f19
+r20404 677352f871
+r20405 4c879e3088
+r20406 6f3aa39042
+r20416 c63a271034
+r20429 dab6222b27
+r20437 9772ebe8ec
+r20438 60d5bbdf4a
+r20444 457fd68556
+r20445 d163f6971b
+r20446 466920e895
+r20447 250b45a124
+r20449 998a7b758f
+r20450 aa6811dae6
+r20451 91e88b3f7d
+r20453 c6c3b44b0c
+r20456 2f0d5beb47
+r20457 7ba3ff508e
+r20459 d1ac90fb48
+r20463 38cfa95dd7
+r20464 a6a9f23ec1
+r20465 65c180a5dd
+r20466 335f62ba63
+r20468 d75264a14a
+r20469 2664de4710
+r20476 895280684f
+r20477 6b9fe986af
+r20478 1b97738fcd
+r20480 4f2bcd1af4
+r20481 28c75a82ea
+r20482 f181a9be2a
+r20484 d64620b254
+r20486 fa0cdc7b3f
+r20487 020b930ec9
+r20488 25e7a7c350
+r20489 541dd58f4d
+r20490 1e828fdbf0
+r20491 34fe81a8a9
+r20495 763be33fea
+r20496 19bf31545f
+r20500 814683dd50
+r20501 23f89dd9e4
+r20502 9693cd5c2b
+r20504 eaa949005f
+r20515 df4d259938
+r20519 2d324f4506
+r20522 135ec13927
+r20523 a40276ad9a
+r20524 b0e6451e78
+r20525 3e1241caec
+r20538 9bd9b9fcc1
+r20539 74c615c835
+r20543 36ef60e68c
+r20544 d9b01e2c58
+r20549 3b00d9d7e5
+r20555 4bb4b8a08e
+r20556 3d47813cda
+r20559 518ac3d5fd
+r20560 d73a32db9c
+r20561 853b1817be
+r20562 0d5d440a68
+r20564 1184fd68b0
+r20565 0b77c407e7
+r20566 fdae184659
+r20573 e83ad1e005
+r20582 135d4f06b1
+r20586 41e80159b3
+r20597 efd68171b5
+r20598 6e0b81844b
+r20599 c4cacc0edf
+r20600 e077a9d6ae
+r20601 4ed1910b1d
+r20602 c19a721704
+r20603 556813ccdf
+r20607 08013877ac
+r20608 10ee5fd9ce
+r20609 8a1eab26ad
+r20610 7ea84d3542
+r20611 6dcfae7e8d
+r20612 1c1b6ef8f9
+r20613 a3d41894e7
+r20614 2d487cd460
+r20615 5fc0c8d78c
+r20619 61316fdc90
+r20623 a259a744bb
+r20624 164fa5151c
+r20625 0ad899b34e
+r20629 80ad0e7b37
+r20630 7eea9f2823
+r20631 1ab0d9ea48
+r20634 ac9fb6ad28
+r20635 daf9227e73
+r20639 bb6e5958e6
+r20640 a0c0f09497
+r20644 895c271ead
+r20645 21fbde04b4
+r20646 7d4cea0a99
+r20649 7140e9c3ad
+r20650 e4e513079f
+r20651 743e8782a1
+r20654 2a1f11991f
+r20655 361051b4d3
+r20656 ea7ac7b389
+r20657 4591dabb1f
+r20658 f8bcd67d50
+r20659 34bc787b08
+r20660 02c6aa766b
+r20661 0516cd02f1
+r20662 89fee4efe3
+r20663 6c88e2e298
+r20664 c3d125891f
+r20672 70cc762d3a
+r20673 589adb9563
+r20675 d90d03d55a
+r20676 6975d16800
+r20677 6441087c31
+r20678 8856f21f59
+r20681 f6183b63f2
+r20682 06c7657555
+r20683 daa6f82dd1
+r20687 311622a6d1
+r20688 94d2758147
+r20689 96270a3450
+r20690 e12005a107
+r20692 c01d264766
+r20693 f375f8ac3e
+r20704 71a0d2773e
+r20705 a7ad163b51
+r20707 953fecc029
+r20710 f6c69106d3
+r20711 6a79e29cd8
+r20712 b08a2a652f
+r20713 88a93f2bd3
+r20714 5b64d91b20
+r20716 6964699e92
+r20718 690542dbe4
+r20720 f5dc89196d
+r20723 7d08bfed78
+r20724 449c680774
+r20727 36707c33be
+r20728 a3da2dca9f
+r20729 ad0fd8bca3
+r20730 bb149d1b96
+r20734 c73ab4525e
+r20735 3078e17093
+r20738 0bc49d7c61
+r20739 1c8ab3a6ed
+r20740 e73348dc9d
+r20744 fe9126e5a3
+r20745 bdf37de86a
+r20748 e75346d68d
+r20750 b6cdaaa3db
+r20751 131b264b25
+r20752 490ed74ff8
+r20753 3282ac260c
+r20756 b80125cb3f
+r20757 07629c3c12
+r20761 3502dadad1
+r20763 2b20a98b3f
+r20767 5df06dc8da
+r20768 a469bd9637
+r20769 c8203f123f
+r20771 4aeae5f9c7
+r20772 9f55ad82d1
+r20776 0ae8343fd4
+r20777 909924acba
+r20778 a6eecfb045
+r20779 96a42a2eda
+r20780 6cb01719eb
+r20781 e6a0063d29
+r20783 19e78a93e6
+r20785 2b82a20d75
+r20787 93277ea020
+r20788 9ee1f2f3b8
+r20789 a1a6ab90ac
+r20790 bf696d016a
+r20791 429da0c3c7
+r20793 67b215e974
+r20794 7c19904e48
+r20795 a572d2d56d
+r20796 bd3afbf36e
+r20797 e979241c0e
+r20798 28837470cb
+r20802 96dc0e44e8
+r20803 f203f3adfd
+r20805 1e29061536
+r20806 b4d8becafa
+r20807 9691e49efe
+r20812 982baae076
+r20816 8d4f65fb24
+r20818 7577ec4388
+r20826 ac7dc3c102
+r20828 3033d4c30d
+r20829 150e1d69c5
+r20830 53545e7af8
+r20831 171d21f11a
+r20832 b627de8553
+r20834 68bcaee6c1
+r20835 1b99b4b148
+r20840 71e03e4aca
+r20842 ebceb2fa8d
+r20843 d983dc8c26
+r20844 5087792dda
+r20849 d4486b9e2e
+r20850 1c8210ec7e
+r20851 96a7efb1fd
+r20852 a165920200
+r20854 4de81a05b3
+r20855 06ae221de9
+r20856 6e76af56f7
+r20857 a8ee0a1a93
+r20858 821e11d056
+r20862 6a416d51f4
+r20863 c37cb5ad1d
+r20864 a78bf650be
+r20866 e9a60f236b
+r20867 1e166a7a82
+r20869 bbeecf2b78
+r20872 7a8973d40a
+r20873 2040ada34b
+r20874 30e65502ff
+r20878 d04911d894
+r20879 730720552b
+r20880 d7ad3f3487
+r20881 1ec5bf5c82
+r20884 15dfc92cdd
+r20885 d14841d095
+r20886 13da5ccad3
+r20887 369d3ca26f
+r20888 821229741d
+r20889 9132454143
+r20894 5e993b77ec
+r20895 cc698e70af
+r20896 f059062578
+r20897 a6b2e34c55
+r20898 80b0d24134
+r20899 1f8b43be3b
+r20900 2e6f4e7246
+r20901 ab33bb1b34
+r20905 e8ffe2674a
+r20906 b2e9e1b26b
+r20907 29ce74418d
+r20908 8a85f07da3
+r20909 84da1b2033
+r20911 09816ef0d3
+r20912 0e439d6d30
+r20913 f83314aa82
+r20917 cf2f9d7fbe
+r20918 23e5428008
+r20920 388a0c0d1d
+r20921 f592fb0520
+r20922 a2da1ebe61
+r20928 dd89e9c089
+r20929 cabe517050
+r20932 d6fb9d7809
+r20933 ff32248e9a
+r20934 71e84137b6
+r20935 7a339e84c2
+r20936 099f42f725
+r20937 d8a75fda44
+r20938 3bc73c1e1a
+r20941 18aa7f0c80
+r20942 f07bdbab91
+r20944 91cdb15316
+r20945 6e061d6f25
+r20949 57d38b321e
+r20950 669ce2013e
+r20951 acb161272f
+r20952 8d74992310
+r20953 df94b3c5b8
+r20954 db511fee56
+r20955 1558069de5
+r20956 7cfbc47200
+r20957 68cbfeac52
+r20958 84ecd8c45a
+r20959 6022f4b5d2
+r20960 3ceebd6ba6
+r20961 1c75ee54a6
+r20962 ea09870b1c
+r20963 152d22dbd0
+r20964 39c117a822
+r20965 de56fa0693
+r20966 303a4b33f8
+r20967 3f9364fc49
+r20968 145b61f50b
+r20969 6b834672a1
+r20970 865a9137db
+r20972 0284428a9a
+r20973 415fced48d
+r20974 f270f7ecfb
+r20976 f84684ee02
+r20977 cd5525a989
+r20978 43b68ece97
+r20979 4aa7ec183e
+r20980 2bf3a560d6
+r20981 8a36e97b10
+r20982 ebe8a875e5
+r20983 46e78e4589
+r20984
+r20985 53f4fbaa79
+r20986 c6facf49bb
+r20987 f479aff274
+r20988 7312300d33
+r20989 6ca74641f0
+r20990 10d7b668b9
+r20991 e81eeb3679
+r20992 ae71711ffd
+r20993 6e768fe8c5
+r20994 52f85091e1
+r20995 1911d4e96a
+r20996 cc9e8eda33
+r20997 93f8dd3a4e
+r20998 0dd2f30edb
+r20999 d5ae4c69b0
+r21000 00814d33ca
+r21001 cda9718a21
+r21003 2b1513b35e
+r21004 462e27a358
+r21005 64fd0c1346
+r21006 b19089db0d
+r21007 ddecf60083
+r21008 646c478b3a
+r21009 7476ed45af
+r21010 432e16ce90
+r21011 ba5dbbd44d
+r21012 9bfc0f0ac6
+r21013 b94c6e0da6
+r21014 07f1f6dd14
+r21015 42e67f1420
+r21016 7214dc0e23
+r21017 2356f6751e
+r21018 a73bbdfed1
+r21019 d18435dcd2
+r21020 6fa82c014c
+r21021 3aa1da6596
+r21022 fc03eabf5d
+r21023 c8e224eaec
+r21024 60ae43e753
+r21027 d3bf2e7801
+r21028 9690b45b3b
+r21029 dae85e321a
+r21031 dc9bb26306
+r21043 2a04d57787
+r21044 1b5c4b93ec
+r21045 649c18aeae
+r21053 0200da2d12
+r21054 65520ac86f
+r21058 34b8e8fcbb
+r21059 66509d1f68
+r21060 acf89aafe5
+r21062 38babc2678
+r21063 006eee0388
+r21064 1e84701e1b
+r21065 5679285ec4
+r21066 f9c2792695
+r21067 cb39da4caf
+r21068 98c87462f7
+r21071 4e7fd5ce08
+r21073 34b2093601
+r21074 87b2ffd8db
+r21075 833b9e671a
+r21076 55b69cb447
+r21077 dcca0ea0d7
+r21078 603f715f52
+r21079 0433d88432
+r21080 a4558a403a
+r21081 3447b38abc
+r21083 8d59708911
+r21084 68c2fff4c1
+r21085 121164ef71
+r21086 5f9c20c232
+r21087 60e50904a3
+r21088 69d8830083
+r21091 fee21b7e70
+r21092 217415af22
+r21093 2f5e867066
+r21094 b13d8fe24e
+r21098 b6c6e8f353
+r21099 aff35a066a
+r21100 7144b4990f
+r21101 2b0dcfe636
+r21102 b10b283498
+r21103 b7c17993c6
+r21105 13f24056a4
+r21106 57261cf375
+r21107 b9691e331e
+r21108 5f7ddb20ab
+r21109 fa34ce4700
+r21110 1c795cdd5d
+r21111 5e6367cca2
+r21113 bde2b7880d
+r21115 0708b61d19
+r21121 c3d86bfed3
+r21123 bf032aea51
+r21124 0f5c2696c8
+r21125 10bcc73bad
+r21126 ff2ef2fd44
+r21127 193df0b93d
+r21128 6ee849b6ee
+r21129 23d5dfc76b
+r21130 6aa285809c
+r21131 d12ea6d31f
+r21135 6aaf4a3d5e
+r21136 8d2876cc7d
+r21137 baaff96be8
+r21138 dd7dbea581
+r21139 356540e284
+r21140 f584d24348
+r21141 8352022054
+r21142 32e1da60a1
+r21148 1c4651b9b1
+r21149 98a5d29539
+r21150 51850896c5
+r21151 ce67a15560
+r21156 56dc3ded65
+r21157 3ff77430de
+r21158 4eade93cfe
+r21159 1b14f49ff2
+r21160 2f3988dd7c
+r21162 860f2bbe85
+r21163 605b7c5aeb
+r21164 08437bb245
+r21165 70d4eb9654
+r21167 f972729b04
+r21168 746f8ddcc7
+r21171 cc1a2efec3
+r21174 2ccf6d3b00
+r21175 2f0a415e1f
+r21176 fc6b3b0c62
+r21177 2b05807142
+r21178 f1e0c8f025
+r21179 505bbf0b34
+r21180 1dbc0d0fc1
+r21181 324eeff963
+r21184 166c496d57
+r21186 b61957e6f0
+r21187 3bcd23488e
+r21188 4a2e3d4175
+r21189 533c7397ed
+r21190 e21283e8a7
+r21193 2515edd33b
+r21195 70de5c3890
+r21196 115ca80a0b
+r21199 5ea6fc6807
+r21200 704aa0362f
+r21201 c2a9a308cc
+r21205 7fb02f53de
+r21206 9f4d2a906f
+r21207 fb399bce3a
+r21210 46ddf14b45
+r21214 bf2da77cef
+r21215
+r21216 05c22ec2ee
+r21217 c059e09cc7
+r21218 d2726ea605
+r21219 6915c987ac
+r21220 f2be3e6836
+r21222 6613b1cdae
+r21223 44fddf7540
+r21224 a4f00eaf4d
+r21225 6353b3711f
+r21226 3d7e9c11ad
+r21227 1935b66102
+r21228 a263215e09
+r21229 4eff9e1cd5
+r21230 88aab1cf8e
+r21231 ae8c065594
+r21232 a4aeb2d0a9
+r21233 fb8c14ea43
+r21234 ef1577a9c5
+r21235 2e1aefd118
+r21236 5b394541a2
+r21237 011377a2c7
+r21238 26a2abff27
+r21239 c452268c13
+r21240 10be8dc785
+r21241 f52d79f1fb
+r21242 058b878c02
+r21243 c44c00ce76
+r21244 787e286505
+r21245 172b58c99f
+r21246 98cb7ad7c4
+r21247 c21980c483
+r21248 408f351c13
+r21249 916d6fbc82
+r21250 64d2ab49ca
+r21252 cb9f3c3d0f
+r21253 c7c8981b43
+r21254 d43ccc679d
+r21256 a09cf5dbf7
+r21257 3617996351
+r21258 c80d4c8b3d
+r21259 040e4480b5
+r21260 c968d3179f
+r21261 824e71d603
+r21262 36ca453919
+r21263 ab492f44e0
+r21264 3931ab281f
+r21265 56003e8535
+r21266 0edfb35371
+r21269 63103a5e1a
+r21271 1cedf8047b
+r21273 c0b615fe80
+r21274 6ee24a3c5d
+r21275 aa406f4b82
+r21276 f427b1e67d
+r21278 2bf117c3b2
+r21279 edcf568e61
+r21280 84a2f65e77
+r21281 22a037557c
+r21282 73dfbd2fb0
+r21283 323057ba4e
+r21284 ec127ce60d
+r21285 0c8e219596
+r21286 f349e24ea0
+r21287 25d87efb94
+r21288 a7dc91be7a
+r21289 40fdbddc05
+r21290 ee81323908
+r21291 59da69b707
+r21292 f500aeb1fd
+r21294 83c817f84c
+r21295 9751508956
+r21296 c72f823f16
+r21297 2d8b1c7ffc
+r21299 f0624e1937
+r21303 0e7403eea2
+r21304 e7e15da74c
+r21305 ad036896d8
+r21307 469dc5ebf0
+r21309 f32f872269
+r21313 7b43c30aa1
+r21322 cd51ac694d
+r21323 d5c7049d4f
+r21324 d1372c1541
+r21325 86af709d76
+r21326 081df6755b
+r21327 1ce6d9abad
+r21328 28ed5c6b21
+r21329 e8a121e9e1
+r21330 edc621d245
+r21331 d59bde5a11
+r21332 b454bbc5a4
+r21333 b6f8761f03
+r21341
+r21342 3b8ee6d4a9
+r21343 f578ff88d2
+r21344 4aa006cecd
+r21345 4ca7a22d9e
+r21346 1cc838b634
+r21347 a292a87fc5
+r21348 e0cf98dd42
+r21349 50ed222b48
+r21350 bb1482ef2c
+r21351 288c4aaa29
+r21353 2a8667d1cd
+r21354 d5b8082ce9
+r21356 9a8ba0c877
+r21372 82eb13cc08
+r21374 1b098c643a
+r21375 6dd51419b8
+r21378 af6da0b41e
+r21379 a2f3507a56
+r21380 67959c5913
+r21381 24bc8b350a
+r21382 0e437ba309
+r21383 ad0cb2873f
+r21390 82deaa1e79
+r21396 3cc8af6179
+r21401 2ff464685f
+r21402 9bed3788ba
+r21403 27ace8351a
+r21404 a5105c67d2
+r21405 9378ba126c
+r21406 68504813ef
+r21407 73648228ff
+r21408 d76943f9ae
+r21409 710e1cb6c4
+r21410 f218c00988
+r21411 0528b12ed4
+r21412 04e60a56e9
+r21413 2209c911ce
+r21414 53256b43ff
+r21415 9fa486fb6e
+r21416 1a77a3b4ce
+r21417 457a672d6f
+r21418 c46a200d8c
+r21419
+r21420 2dba26ed12
+r21421 f1044e136b
+r21422 0dbc3ea559
+r21423 2b59cbaafa
+r21424 0d80fa2d50
+r21425 261e399ba3
+r21426 8fc50d2aa7
+r21427 33aa7965dd
+r21428 1915363914
+r21429 eec07a4284
+r21430 56584c300f
+r21431 83d8f0b8f8
+r21432 b1307080fc
+r21433 b535c77592
+r21434 519214dcc6
+r21435 e2decb09ed
+r21436 1e6de3dcbe
+r21437 71b6aca681
+r21438 e93c1a93a2
+r21439 973c00923d
+r21441 18700fab3b
+r21442 beebad1bc4
+r21443 22c16774aa
+r21444 38c1f9741f
+r21445 9c4905dce1
+r21446 9722186804
+r21447 3750235190
+r21448 8ee1f32478
+r21450 e7718496ee
+r21451 ad596fcfc7
+r21452 67b1041a85
+r21453 ebe772d693
+r21455 bf3e7d4900
+r21456 8ced5e85f8
+r21459 dd9a1245ed
+r21467 bed1ffb1c3
+r21471 cfe47e4b74
+r21472 81c7ff7ef7
+r21473 800d9d8fe9
+r21474 9cf7f2c71f
+r21475 08496424f2
+r21476 a5051ddadc
+r21477 484134e4f5
+r21478 e96091a44f
+r21479 248c72814a
+r21480 03e6cd1400
+r21481 ec5a4e8f47
+r21482 b53884e8ad
+r21486 7693ab0dec
+r21487 6dd3250020
+r21492 9361f2d069
+r21493 c315a6fe9c
+r21494 b3f909df2e
+r21495 f7340c3abc
+r21496 d0475494b2
+r21497 303d9f812b
+r21498 0beec15420
+r21499 18f75625a8
+r21500 010889645c
+r21501 8ec16299c8
+r21502 70322ab6ba
+r21503 814f097feb
+r21504 b6f7f79384
+r21505 734f709290
+r21506 c1f1a2cfdf
+r21507 0721367ab2
+r21508 b8b6507a3e
+r21509 beee01e9ec
+r21510 7015c96b21
+r21511 9e155f4956
+r21512 406e54b7e5
+r21516 4f12f2af97
+r21517 00581b645b
+r21518 e8c80f152f
+r21520 628b2edf73
+r21521 5055ee1d62
+r21522 ea91456310
+r21523 aad801fc89
+r21524 11663541b4
+r21525 d98e426541
+r21527 bb1a2d20cd
+r21529 35f9176e79
+r21531 c54b7a99e8
+r21535 bc791369f7
+r21536 1973a1f7d3
+r21537 bf0921a072
+r21539 174c1721ff
+r21540 e20c986ba1
+r21541 9024ffbfbf
+r21542 765864526d
+r21543 ab257556c9
+r21545 a0cd7f2fca
+r21546 41d9ea1452
+r21547 27288e3ffe
+r21548 382dd00508
+r21550 3b2c0466a6
+r21552 6d0d855d49
+r21554 248ae6753e
+r21555 6c213d1c81
+r21556 7d6f1e7e4e
+r21557 c272bbfb64
+r21558 d95eb2a8f9
+r21559 ee10da727b
+r21560 c89c953796
+r21575 4afe5f122e
+r21577 c0d1bc031e
+r21596 348271c8b2
+r21597 4fb3473182
+r21598 41860ffcf7
+r21599 11398dd393
+r21603 2c8f5c5a82
+r21604 91b6426788
+r21606 9b54f56bde
+r21607 ff714a4621
+r21611 0ffb0708fa
+r21616 0acdb6a68c
+r21620 41c280194d
+r21621 199f6f6cb8
+r21622 9933cbe4e4
+r21627 c5441dcc98
+r21628 22b66d601b
+r21629 b2deee49ce
+r21634 4214e738c0
+r21635 0b0513fd6c
+r21638 0c6fe023cd
+r21639 326065c5ec
+r21640 cf26f62070
+r21643 a17a4dc157
+r21644 db0d40b73c
+r21645 c8266ce2b5
+r21649 3861a3a42e
+r21650 dcbffd4dc5
+r21652 d16e517303
+r21655 e4716c234d
+r21660 618b55fa8e
+r21661 42ebea46c7
+r21662 3400802903
+r21663 17ce401dbb
+r21664 947ed04398
+r21665 db8bd90da4
+r21666 eb1ee924dd
+r21667 6736ca07f2
+r21671 a0e5e165c9
+r21672 ee1042f8c6
+r21673 810deda16a
+r21675 a29eafaf4b
+r21676 1148683005
+r21677 bd66ed93af
+r21679 ce27db8021
+r21680 9af947ea3a
+r21681 796d24e102
+r21684 8b58d4360a
+r21685 aed5acd725
+r21686 2fd048855d
+r21687 3b24fde836
+r21688 4ab780e8be
+r21690 c2f6ae9755
+r21691 e73312494c
+r21696 bc17cc6c03
+r21697 cf552d7f27
+r21700 4f24cb62ce
+r21701 fa715fdd66
+r21702 15fecdc78e
+r21703 f99b3ceac6
+r21704 622c15815f
+r21705 0675d244e4
+r21706 9b16201d2c
+r21707 99cbff74b7
+r21708 4a785c8727
+r21709 1f7165c5d4
+r21710 af4338c2b2
+r21711 677ca58efb
+r21712 fe0a2ac4c3
+r21714 4f5a598284
+r21720 3db6fcb7bf
+r21721 32cff2050f
+r21722 231cfbe1c0
+r21723 9b066f5a1e
+r21724 b86d72b35e
+r21725 45e3ff972c
+r21729 922938dc87
+r21730 54e1e31679
+r21735 8f2d31cbcd
+r21736 151d1ec579
+r21737 ee5daee5d8
+r21738 d6178b3a10
+r21747 8a6e20ce4c
+r21748 78ca916a09
+r21749 35e8818609
+r21750 a2c3cdf668
+r21751 4bd4c7f4d4
+r21752 37893fe867
+r21753 8a3ff479f2
+r21754 8eb1d0c6ac
+r21755 5b937bacd3
+r21756 18cdaae4b6
+r21757 d43999e5d0
+r21765 a514ab4fe1
+r21766 4758f2a87c
+r21767 f662b62e2b
+r21771 6c86ba45ef
+r21777 3c2edb472a
+r21778 a46601aa3e
+r21779 5f75746b66
+r21783 3ec6dba7ba
+r21784 b8e90e8aef
+r21787 37a5c47ac5
+r21788 df78ff25e3
+r21789 6bc86b8248
+r21790 7abeacab53
+r21791 02ad6bb966
+r21792 c473291597
+r21793 20192c84a9
+r21794 185b1c828a
+r21795 2c0731e106
+r21796 115d774e47
+r21797 7868f336ec
+r21798 a01b81352f
+r21799 2c45d41b37
+r21800 19ec1c5b7e
+r21801 09bbc6ea28
+r21802 60cd12f770
+r21810 dabf2c23ef
+r21811 c2002c8361
+r21816 acc5c7e650
+r21817 0f4b2306ec
+r21818 7cb9037e17
+r21826 cb35c38f14
+r21829 c55b106f50
+r21834
+r21840 aa09cc3505
+r21845 b8e0795548
+r21847 536fa4d9c8
+r21853 d1185713fa
+r21866 8fe7b53164
+r21881 f8b4ca8cf0
+r21882 0319fec702
+r21884 601729ad84
+r21885 db50a62b62
+r21886 bfb49242b5
+r21888 d484df935d
+r21891 e6ff7d3557
+r21897 57a0b3d165
+r21898 180c6d047d
+r21901 582c53207b
+r21908 a99710111e
+r21914
+r21915 f9ab50b25e
+r21917 c7c69ea245
+r21919
+r21920 ba1c91710f
+r21922 0ed53d4d68
+r21923 016d815104
+r21928 fd5d20d2cf
+r21929 7c7c267d4e
+r21930 5f5660dd6e
+r21931 e7ce9b9723
+r21932 fa75d20c42
+r21933 a239e85e65
+r21934 33ff703da2
+r21939 f6ee85bed7
+r21940 a193d9f42d
+r21941
+r21942 7b822f2866
+r21943 d97b3a8066
+r21944 f4420e7b13
+r21945 bf82ecbcbe
+r21946 54523bc2fc
+r21947 b7888a61f8
+r21948 b7f77112a5
+r21951 0577b21098
+r21952 dd500f0f57
+r21953 092ef8f8f7
+r21954 516a00c88c
+r21962 b081940e5a
+r21963 a3bbcdbfc6
+r21964 1b06a599ca
+r21965 da8253c2e0
+r21966 e0c2758ed3
+r21967 b7781f0d87
+r21968 ebfcab7b96
+r21973 4d11985231
+r21974 d6191fcdbf
+r21975 da86fbe4a8
+r21979 7df797939b
+r21980 f139afb941
+r21981 50bf167d08
+r21987 b96804031a
+r21988 4debc5bf1e
+r21989 293b70525e
+r21990 dba07aa5a4
+r21991 136f08e7db
+r21992 6c1a68c847
+r21993 20919ccb1a
+r21994 9dae73d4cd
+r21995 448c34d11b
+r21996 bb141f2c7d
+r22001 1fa7a9373a
+r22002 1a66cb2193
+r22003 90c59eb70a
+r22004 4382c7dd6e
+r22005 712ebe2943
+r22007
+r22008 2ae12a5c6d
+r22009 354e05b8db
+r22010 0df04f17e0
+r22011 43cc66eefd
+r22012 6043ad6f8f
+r22013 5b391ab536
+r22014 9a3f9c0e79
+r22015 c8b3ae91ad
+r22017 3bad6d54b1
+r22018 41d361a9d2
+r22019 418b041eb4
+r22020 a33ef273d0
+r22022 67a650205b
+r22024 a3c413084c
+r22025 6fc37a1324
+r22028 5628970b43
+r22029 4b10a4ca64
+r22030 56313be050
+r22031 885f76fd05
+r22032 bb83cb8ba7
+r22033 6ecd2f3ef0
+r22034 d38342768a
+r22035 ddea6d37d4
+r22037 e3c5bb68a1
+r22038 97abbae86a
+r22039 910adc615a
+r22040 4e3c1a99e8
+r22041 83630c3ce6
+r22042 5e9d2809eb
+r22043 0301bcfa43
+r22046 bf7eee0889
+r22047 f80f8033a7
+r22048
+r22066 5da8a164cd
+r22100 0b006e7762
+r22108 6e3814fe9e
+r22114 8acca208ae
+r22115 f3d87c08f6
+r22121 2eab8f3134
+r22130 8e2b780c61
+r22131 30d9767343
+r22137 3bff39ce76
+r22140 a708aa88f4
+r22141 de67e153ee
+r22142 3281d0627b
+r22147 60354bdda2
+r22148 4e1907afb6
+r22149 cb6db4169a
+r22151 043889d581
+r22152 43e5eff2c8
+r22154 e9d3987da7
+r22155 67d0f1050f
+r22157 bf17437453
+r22159 09f490bd56
+r22160 ebb6c4a2d9
+r22161 245ec93fb1
+r22167 da5910c7c6
+r22168 84b86a977e
+r22170 d3a747882c
+r22172 5440040432
+r22174
+r22175 407ba61ff6
+r22176 eebb8695e2
+r22177 0e413bc755
+r22178 dd396886d0
+r22182 e67f560766
+r22184 1c243de3c6
+r22186 d6896c490a
+r22188 caa6bf0e7a
+r22189 a1e29d20aa
+r22190 d112ec1f88
+r22194
+r22195 905c3126ac
+r22196 22ea4e87f7
+r22197
+r22198 e045a3ff33
+r22199 7aae8c7cbc
+r22204 0f5d5c58ec
+r22206 f8429e2fcd
+r22211 5ad8adecf8
+r22215 8512b81f4e
+r22219 a2875b700b
+r22227 afe4edad3c
+r22229 3c85de708d
+r22234 a2a14fa803
+r22248
+r22249
+r22253 d300a5817f
+r22260 436a7d8636
+r22261 d3a7702162
+r22275 f492b00323
+r22276 a8d02cd6b6
+r22278 2b458481ed
+r22285 c52aa972a3
+r22291 ef9fea4f2e
+r22295 ee23aefccc
+r22296
+r22297 1e08467076
+r22298 bf1b8d136d
+r22299 de7fbb051b
+r22300
+r22303 0c6cbdac43
+r22310 85d5a0cfcd
+r22311 b23b36e655
+r22314 8af697d20f
+r22315 9cc51c6d4b
+r22316
+r22317 2db73a027a
+r22318 806f2f67c3
+r22319 e3fd6b82e0
+r22321 97bd54ecf3
+r22322 4e9d57fd26
+r22323 59dc9f40bd
+r22324 fd9ddea91f
+r22325 b9034f4cd5
+r22326 5f25a7cf9a
+r22331 9e0618ba29
+r22334 f750b08d9e
+r22335 b9fb76d09d
+r22347 18ad78dd73
+r22355 ceec792d1a
+r22356
+r22357 9923b97157
+r22358 cb367e28ee
+r22359
+r22361 109924d63e
+r22362 c084ad2bcd
+r22371
+r22372
+r22373
+r22374 b040ad441b
+r22379 c65032c3f6
+r22380 104193705e
+r22393 e938bb961f
+r22396
+r22399 5b8cba103c
+r22400 dee314b7bc
+r22409
+r22410
+r22411 9f6b596c7e
+r22414 bf63903073
+r22416 1067f5f55c
+r22417
+r22418 b2abe22c97
+r22419 52b863dd86
+r22420 24a694fe23
+r22421
+r22423
+r22426 9d5bc93142
+r22435 846040bdd1
+r22445 31dcef9d4c
+r22446 12c8a6113e
+r22448 574f77446b
+r22449 b4528e7730
+r22450 66de11cf7f
+r22451 6a949bb61c
+r22452 49344ed1da
+r22453 3501f38288
+r22454 6abc0a3ebf
+r22455 5a84bffb2c
+r22456 02f73a54ee
+r22457 7bee6a5400
+r22458 f0e000d759
+r22459 deaf94e5f2
+r22460 a0bacadc80
+r22461 c2a3d50262
+r22462 74eb6b70d5
+r22463 60a7e53a5f
+r22464 9421f2ecaf
+r22466 57b7e442af
+r22467 f911b5da55
+r22468 63dff5e57a
+r22469 38912509af
+r22470 58adc8d999
+r22471 fbc4533975
+r22472 328651c39a
+r22473 8eee437289
+r22474 f5f71f2d02
+r22475 d9dc68cd2b
+r22476 4dd14ec6f6
+r22477 78b419c329
+r22478 322e856f13
+r22479
+r22481 39e4641ec9
+r22482 7a8a37e5f1
+r22484 302b1df81f
+r22486 4db2941031
+r22487 4d69f2d6eb
+r22488 b053d329d3
+r22489 536cdd87be
+r22490 8a2c52b105
+r22493
+r22498 c66d3b0d44
+r22499 02ac95f076
+r22500 44d1000e70
+r22501 aff3ddde53
+r22508 356abe3a5b
+r22509 d7814a2359
+r22510 3c85f13569
+r22511 0cbeaf17d8
+r22512 bc5ac3dc9a
+r22513 68aeeae422
+r22514 27cdc8ab7f
+r22515 3a1d34eebf
+r22516 c9827c4a98
+r22517 b54e416219
+r22518 45528c7e3b
+r22519 fcb0419a27
+r22520 06f0f80ed9
+r22523 2182f4d283
+r22524 ba975223e8
+r22525 c66898e5be
+r22526 0394b8426f
+r22527 029482c86e
+r22532
+r22534
+r22536 a02ff1ac0e
+r22537 e036e2da98
+r22538 87b48f8686
+r22539 b05c0fa47d
+r22540 a012c4c920
+r22542 fe378b7d81
+r22544 6af63c5203
+r22545 ada6cccb35
+r22549 78d96afa56
+r22550
+r22556 0661398ceb
+r22573 d93ab70b47
+r22574 bdbaba4cf0
+r22584 289e6a43d4
+r22587 d36dcfbf9d
+r22588 5c9400467b
+r22589 a6bb10a310
+r22590 9c365348fd
+r22594 7ca4628b2a
+r22595 30896b2f45
+r22599
+r22604 60d56501a0
+r22605 7634d75934
+r22606 c386234edf
+r22607 9972040b0f
+r22608 f7d2a3fa4e
+r22609 272a147c77
+r22614 644a80be87
+r22618 fdc1be772b
+r22619 1e3a43e74f
+r22620 f5bc26b45f
+r22621 97b7cc4ddb
+r22624 da234921b7
+r22625 315e1e79e2
+r22626 74868d53db
+r22627
+r22628 280cc3fe3e
+r22630 0ce0ad5128
+r22631
+r22632 c6cc8c7282
+r22633 3630e9ba45
+r22634 9d3eef33c5
+r22636 bc0ed202b6
+r22639 5aeca8a716
+r22641 db5f08d5bb
+r22642 04e2cccd0d
+r22643 f0a7202589
+r22644 26bbdbe3a2
+r22646 e3ca222e48
+r22647 69ff5578c0
+r22648 c479dcdd98
+r22649 8992596004
+r22650 f9fe76375d
+r22652
+r22657 ed3c7e54fc
+r22658 3d6fe98b65
+r22667 a14012bd56
+r22668 12a41f6dcf
+r22669 958fb1c6f4
+r22670 db99926628
+r22672 bf44cd12b1
+r22674 8a8172da3c
+r22682 23bd1501fc
+r22683 e51d5de4cb
+r22684 c690bf16b9
+r22685 0a787b6477
+r22687 20efb133c5
+r22690 50a178f73e
+r22693 d4e2058a3a
+r22694 95d7ef40eb
+r22695 0d7f67df70
+r22698 f36ea69f64
+r22702 ed3dddae4e
+r22703 40aafbdf1a
+r22710 3ac03c3d3f
+r22711 5a50d83a33
+r22712 e5efbddf19
+r22713 024c0220d1
+r22721 ca0bb2c419
+r22722 1809c97bb3
+r22723 1e68079614
+r22724 9d7586adab
+r22725 001cf628f1
+r22726 04c38829b6
+r22727 41bfef8087
+r22732 3b8fee9184
+r22737 e3743b812a
+r22738 b781e25afe
+r22739 596ef0e94b
+r22740 4b9de7deb2
+r22751 29f9d75674
+r22754 9550c2077c
+r22755 d0f2062493
+r22762 72c11c60b1
+r22763 c3cfb6cfc9
+r22764 fc2749bfa7
+r22765
+r22766 11ae7ea080
+r22767 7155c1e129
+r22775 d91edb59ee
+r22776 a8ec5198cb
+r22777 1427045ab6
+r22778 daaede456d
+r22779 3ca4c6ef6c
+r22780 ed98119165
+r22785 385775c0c5
+r22786 e1232ab57a
+r22791 4fb0d53f1c
+r22792 86d07ffe72
+r22796 9d202a7a8d
+r22797 1ededc1ab0
+r22798 16adcd1fa8
+r22799 11f2760b59
+r22800 8bef04a234
+r22801 d8fed0f583
+r22802 40f8f77339
+r22803 d4645f9372
+r22804 e11cac6ecc
+r22805 fc735859ff
+r22806 b3982fcf27
+r22807 3c001a598d
+r22808 a43eac7510
+r22809 bd6914a7c2
+r22810 7adc188a07
+r22811 0cab741d08
+r22812 b64d195601
+r22813 e176011b88
+r22814 f6843150fb
+r22815 6c2c125d1b
+r22816 c5650b9f7d
+r22817 32de7fe412
+r22818 95e096797a
+r22819 cde87ec0a7
+r22820 d4e44a6565
+r22821 6892195b1f
+r22822 7b387e898c
+r22823 081b838897
+r22824 38e707849c
+r22825 0fc61a72e4
+r22826 74da0c7851
+r22827 38ba1149cb
+r22828 2c14b262e9
+r22829 3db5daf609
+r22830 79a7191e60
+r22831 e987f72df3
+r22832 5056993477
+r22833 bb7b9fe850
+r22834 3657dd345f
+r22835 de1f665939
+r22841 cbb97ea113
+r22842 b3e8963c30
+r22843 e73fa382cc
+r22844 b54b36af8f
+r22845 559000b732
+r22846 d20380ea9a
+r22851 799a2b0e28
+r22855 501a7c1bb6
+r22856 c0b806f709
+r22857 f61d2d2f4d
+r22858 af8f7ed60b
+r22859 41e2c237df
+r22860 8964f6f1bc
+r22865 faed687d92
+r22866 185d04643d
+r22867 4af85c28c4
+r22868 9db3f49ff4
+r22869 b0c8e27156
+r22870 64fab04e4b
+r22871 8b0de323fd
+r22872 2a6a1f370f
+r22873 de664fbc0d
+r22880 fb950eef15
+r22892 5827534754
+r22893 d367ae7b26
+r22896 8f1a52438a
+r22897 707baf25a2
+r22899 801280e6f9
+r22900 926f64007c
+r22913 a420fd587c
+r22917
+r22920 f1a211eff6
+r22922 bd52cc368e
+r22928 e594fe58ef
+r22930 0d8ba6ca38
+r22931 b3256eda66
+r22932 3bbfd70e39
+r22933 9813e37ca6
+r22934 ad22d88f56
+r22935 ec0f4422e0
+r22937 b7db974606
+r22938 441956b523
+r22939 4dcc114183
+r22942 02783a4743
+r22945 ea710916c3
+r22946 ee5a5d6294
+r22947 aebeaad6e4
+r22948 b5c2052735
+r22949 6dfcae30bf
+r22957 ec7cc94358
+r22958 56d5033a4d
+r22959 f7751134d1
+r22960 ac499bec25
+r22961 4d0f311f8f
+r22962 5a150395e7
+r22963 aab959bbe2
+r22968 3b4343886d
+r22969 672c1356ef
+r22970 f7a6c8823b
+r22972 cfb6168dc5
+r22973 561a8077e6
+r22974 6a21106690
+r22975 964cceed6d
+r22976 c40a798bf0
+r22977 4c47e9435d
+r22978 c0f03d837c
+r22979 ce755fb08d
+r22981 ad55804547
+r22982 45b659cd41
+r22983 3b8129c77b
+r22986 5824594015
+r22988 7bd08662d1
+r22989 6c4d41fbcc
+r22990 e595d0a130
+r22995 8562015759
+r22996 726a336651
+r22997 d5701f0c97
+r22998 edf94d0baf
+r22999 f78d8f648e
+r23000 b094defe61
+r23001 81226c6223
+r23002 18a4de80a9
+r23003 e57245492c
+r23006 e998a5e747
+r23007 d505a106f8
+r23009 44784f3e41
+r23010 ce223fe7ab
+r23011 e557acb9a7
+r23012 084ccb1e0c
+r23016 2976ede075
+r23017 003bd3adee
+r23018 4fe2d213ce
+r23019 99fb2b420f
+r23020 a4e163d262
+r23021 94e9b95f9b
+r23022 ab8f20c1f7
+r23024 513fd181bc
+r23026 49bdf3cda2
+r23027 bc3e3c54fb
+r23028 e251279035
+r23029 bece2590ef
+r23030 76ce74d7ae
+r23031 df7119adc0
+r23033 28c1aa3c20
+r23034 fd2bfa28b0
+r23036 df90c36a13
+r23037 9563f21b20
+r23038 54b5eacb56
+r23039 e4a596e91d
+r23041 0dacb8195a
+r23042 8b16236ebd
+r23050 feb435cc0a
+r23051 6b957d0455
+r23053 567968ab8e
+r23057 03cd602835
+r23058 39a8b1042e
+r23059 a5d47fb693
+r23060 285d2182f1
+r23062 a992ec2d57
+r23063 c8dec98981
+r23064 3e70e56427
+r23065 2e7bd469cd
+r23066 ffd6cff38f
+r23067 0894660255
+r23068 d5baff39ed
+r23069 a7ea942cfe
+r23070 04159cb985
+r23071 1b1d48353b
+r23072 0a0cdb03d8
+r23077 b82c431991
+r23078 6b033dfc5e
+r23079 0100aacc35
+r23080 c37a59c683
+r23081 d742020345
+r23082 a3aa8993d2
+r23083 43babf744b
+r23084 d7739fc014
+r23085 6e710c26ea
+r23090 ba5d0ec898
+r23091 7fa6c08f53
+r23092 cdd4cf44dd
+r23093 e4afb12949
+r23094 1389f0421a
+r23096 ec4b635150
+r23101 82b9e235bb
+r23105 24a9ae5a42
+r23106 dace259b47
+r23107 2399a69b90
+r23108 5579374fc1
+r23109 9522f08f41
+r23111 b40f4ba322
+r23112 a56c33b6a4
+r23117 9c0e58c48d
+r23118 7032d7dbdc
+r23119 0b70eebcab
+r23122 7673099e47
+r23123 19b42dea45
+r23124 fda537c771
+r23125 c18c3e1081
+r23126 cb91343d2b
+r23127 9058008d34
+r23128 4dc846980e
+r23129 0534bcaf69
+r23130 eac72bbee3
+r23131 54f6615104
+r23132 20f39c1d4b
+r23137 c0cc1dda85
+r23138 e1eb91714d
+r23139 521267d23e
+r23140 44ba99aacf
+r23141 57f2b3d5e0
+r23144 4697416af3
+r23157 0f2808227b
+r23158 d3c453d15c
+r23159 1148daec9c
+r23164 256aca6122
+r23169 06aa1c9eff
+r23171 943fbb1363
+r23172 2fefb37220
+r23173 2c59afc2c1
+r23174 a031311991
+r23179 afea859ef6
+r23180 a7fd7d6dc2
+r23181 c901a06757
+r23182 9e21fe6c69
+r23183 e0372eddc1
+r23184 ff1e0647c1
+r23185 6472e115d5
+r23190 74a0c96db0
+r23191 4afd17d6d3
+r23192 c1f8dbca52
+r23193 b090accba1
+r23194 4f741668a8
+r23195 5f00dcd852
+r23196 33aa342005
+r23197 5deb8d8440
+r23198 a4cf7b1ec5
+r23199 7553e6901d
+r23200 23c6d4f985
+r23202 bf84cd2f44
+r23203
+r23204 f22b627730
+r23205 1a9a264f8b
+r23206 f647966e8e
+r23207 b8c07db737
+r23208 cd92aad821
+r23210 34c872d1a7
+r23211 eccc23e2e5
+r23212 68aafb29c1
+r23213 001e910f97
+r23215 41d7f547c0
+r23216 4af97e33e7
+r23217 908ed2f29f
+r23218 e027dd4fd3
+r23220 40cd42b7f5
+r23222 487e5bf895
+r23223 a350673750
+r23224 72cf31c7ac
+r23225 6abce56ad4
+r23226 5c83be3b2b
+r23228 e5c22d9e0a
+r23229 4215f6bd7d
+r23230 7f5f17303e
+r23231 46069e0725
+r23232 b33c2c37a4
+r23233 b7efe90890
+r23234 44d0bb2426
+r23235 cf11854cf0
+r23236 38d4500430
+r23238 46d5e73c11
+r23240 08c460450a
+r23241 d64cbe4366
+r23242 0891a46d96
+r23243 68516d31fe
+r23244 0e7b7a50c6
+r23245 15f4e9fc9b
+r23246 d9e7e347c7
+r23250 77c31e39ec
+r23251 492f5f5214
+r23252 111deeb1a4
+r23253 af200c9594
+r23255 a4865203eb
+r23256 771b4f7c23
+r23257 6893c72ee1
+r23260 920449d6ee
+r23262 185700607d
+r23271 c5c38fc642
+r23272 6e18fbbd38
+r23273 3332d20526
+r23274 264e7c95f1
+r23281 1e73d82e13
+r23282 3087233967
+r23283 de2fb8466e
+r23284 9adc6d22c9
+r23285 e5cfe47a19
+r23286 b525978a52
+r23287 80dc8f4e27
+r23288 0642bdf044
+r23290 87134363a2
+r23291 5cdb213d7d
+r23292 080d357a3e
+r23297 491ecd7b8b
+r23298 c39f26382d
+r23301 8dd7839ac8
+r23303 4b97811b4e
+r23308 ed65254c4f
+r23309 79389bc80d
+r23310 26ac638650
+r23311 8b17d54737
+r23313 9bd74024a1
+r23314 9066ffa93e
+r23319 842ec522a2
+r23320 7a4b4c7a97
+r23321 de3e8492e6
+r23322 add9be644f
+r23323 2014160121
+r23324 eeb70cd5f4
+r23325 d33724e24b
+r23326 2f7197c50b
+r23327 898bd4b57c
+r23328 d13a2529aa
+r23329 d3d218e5ea
+r23330 e7ca142b45
+r23331 a4a65f9c42
+r23332 b1d9354a08
+r23333 b689b912ca
+r23339 2b417333e3
+r23340 81443d309e
+r23341 cfb50cbcce
+r23342 006fbc37ca
+r23345 246b590a4a
+r23349 baf9c6f380
+r23350 5c322510b1
+r23352 7f365342d9
+r23355 22da3636fd
+r23357 6de5505cd9
+r23358 cab41b6858
+r23359 6d22805793
+r23370 0895da3b10
+r23371 dc11fa1ca6
+r23372 2212fd6f4e
+r23373 6b6d21444f
+r23374 46d1cfc7f0
+r23379 a15e48df88
+r23380 0e3e701870
+r23381 d96113b2bf
+r23382 ba6fbcef84
+r23383 683af5895e
+r23384 6e6435156a
+r23385 e077dbb8b9
+r23391 e734600e0a
+r23392 4ddb4ce1e2
+r23393 f388aaaf52
+r23394 e9b61ff9fc
+r23395 962a348ab2
+r23396 8d311558f3
+r23397 6801b5e490
+r23398 b7a344e93f
+r23399 750b5244ee
+r23400 9f3d7b709e
+r23401 460edf36cb
+r23406 b4afd4c86b
+r23407 a2ce51bcb7
+r23408 e73e777e21
+r23412 adbad7ba56
+r23413 b4d47496cb
+r23414 09ec5aa3f0
+r23417 6beaf28e6d
+r23418 00b42b18ed
+r23419 1df37f4769
+r23420 9b54520a8c
+r23421 d6b71cecda
+r23422 3953904fd0
+r23423 ff86078200
+r23424 89f3533a2f
+r23425 2f851bd1f7
+r23426 c0b74d9bcd
+r23427 ae49104855
+r23429 3f26904e68
+r23430 278ec47fb1
+r23431 f4e000f7f0
+r23432 62614a6f9f
+r23433 b9982a3d3d
+r23434 b80f277804
+r23435 bcfe76ee68
+r23436 6fddcaa5f9
+r23437
+r23438 543d70e30c
+r23439 8e32048762
+r23440 3b0b4d7480
+r23441 c891ba15f2
+r23443 db163e25eb
+r23445 de012b3a6d
+r23446 379af580e2
+r23447 29be721e25
+r23448 78c1e2f94e
+r23449 1320e921ad
+r23450 70d07a2394
+r23452 af202942f1
+r23453 4a19146481
+r23454 e3b2ebcbcf
+r23455 4659d81554
+r23459 1016d68bef
+r23461 056663c3f2
+r23462 09ed9d12c3
+r23463 d76d7778b6
+r23464 8607dd6b78
+r23465 b10ba655d5
+r23466 7f8ccd778d
+r23467 948f4228c1
+r23468 8009f723b9
+r23469 942bf86c7b
+r23470 71f765bc4f
+r23471 b2559b3cf4
+r23472 107cf1ae8e
+r23474 6cb5c25802
+r23475 e46a397977
+r23476 903478337c
+r23486 37d9130f9f
+r23487 43409ebb6f
+r23488 29bd7715f7
+r23489 a1b86a7e51
+r23490 bd86b89077
+r23492 82770a97b8
+r23493 19b12e8e0f
+r23494 b95246f152
+r23495 19064bad63
+r23496 2d4a8afdc3
+r23497 a1fd391c10
+r23498 46a921df81
+r23501 91eff8e6d9
+r23502 505a858ea1
+r23503 a061def4dd
+r23505 6bf1e7a268
+r23506 8c5af3304f
+r23507 d205bf404f
+r23508 5d1052f36a
+r23510 e1780e9686
+r23511 298738e959
+r23512 ff5acd0dbb
+r23513 872f147d84
+r23515 6900ffe884
+r23516 bf939d9757
+r23517 d0d20f5b63
+r23518 8006c99792
+r23519 a3c0cdc9db
+r23520 6292877281
+r23521 e3c3cc9759
+r23523 81d659141a
+r23524 764dc81ede
+r23525 70ecc1ea56
+r23526 03b3f7d4a1
+r23528 b7fcc7c73e
+r23530 363a1456f6
+r23531 c09f6173e9
+r23533 048abea829
+r23534 9266922e1b
+r23535 eb2d8e3985
+r23536 4c1cae0ef2
+r23537 d41da608a3
+r23538 cfa6808a9e
+r23539 1fbd342a80
+r23540 48451f980e
+r23542 a86453a5ee
+r23544 13a20ba71a
+r23546 c5c02cf4ff
+r23548 1ab5e1578c
+r23549 fcbf371518
+r23550 349c8baeab
+r23551 a01f074d3e
+r23552 78ae055e52
+r23553 c9f0770b44
+r23554 72969dec9d
+r23555 4886b55fa4
+r23557 685f675ea0
+r23558 0e70623ab8
+r23561 e3cfb4216f
+r23563 c6f4dac7be
+r23565 7c0ee3acb4
+r23568 c555cedd67
+r23576 30b26d84b3
+r23577 46d1d8e55a
+r23578 597acf7b0c
+r23579 b766d4bc9a
+r23585 e83bcb3fc5
+r23587 fcc1747548
+r23588 a16bba97a0
+r23590 9382d7ca14
+r23592 575f7c33e0
+r23593 cf8c15946e
+r23594 088c19a13c
+r23595 794324a73f
+r23596 8f5b0ef428
+r23597 5ded3c7a61
+r23598 f1fa3ce757
+r23599 79ef52f9e3
+r23600 1fcb865070
+r23601 66f0296fda
+r23602 5be89bb3bf
+r23603 72d12aabf3
+r23604 bb3235a2b6
+r23606 a3d56cb47e
+r23607 59c95e3e92
+r23609 14e47d131b
+r23610 49d47cb372
+r23611 25757de1db
+r23612 3e3e3564ca
+r23613 a5553b8384
+r23615 16b3e8c1d7
+r23616 28ff653bc5
+r23617 98569e2464
+r23618 b810d8c401
+r23619 fa822e3ef6
+r23622 cbcf3f5051
+r23623 4ec7f11a79
+r23624 66a92814a6
+r23626 402d96dd3f
+r23627 4be5e11ccc
+r23628 81f38907b8
+r23629 51e4a6a351
+r23630 6b274687b3
+r23632 1c0d571f6d
+r23633 46fba575f7
+r23634 4ff54d0448
+r23642 8a959d80f1
+r23643 a37284fdf7
+r23644 1660cfc41e
+r23645 b9a25c8acf
+r23650 d7de71e9d3
+r23651 7e94841fb7
+r23652 e1aa9c8e00
+r23653 b2bade0259
+r23654 2b689f169e
+r23655 a69c1afd4b
+r23656
+r23657 765f9aa2bf
+r23658 79821ad8b6
+r23659 31533385b7
+r23664 715d95479e
+r23665 811c7f9ba6
+r23666 979c57cd87
+r23667 cc1f6bca81
+r23668 2e136c6924
+r23669 13182292f2
+r23670 ff8932a429
+r23671 f476b96f44
+r23672 843efeab1b
+r23673 3a783937bf
+r23674 627adab5db
+r23675 e1a0866ce7
+r23676 9e9914e109
+r23678 1a45bc7f19
+r23679 72b2715324
+r23680 4e3a930c04
+r23681 3d97123034
+r23682 b1e969a11b
+r23683 32ca2f2be6
+r23684 626e38940b
+r23686 77eb8fefec
+r23687 ed5459550e
+r23688 b6db478a96
+r23690 8922c4ed09
+r23693 1113f7ddca
+r23694 7806112e43
+r23696 d46e72721f
+r23697 a8db7a2da7
+r23698 fbe897d165
+r23699 43b59488c1
+r23700 b8d567feef
+r23701 0f2a7867cf
+r23702 ef89729e20
+r23703 0f188e1b47
+r23704 2087a249ac
+r23705 32454d61e7
+r23707 60a88e05b6
+r23708 8c325affb4
+r23709 c4daaeae6c
+r23710 cbc8495920
+r23712 8aed49aba9
+r23713 9a7e511b3e
+r23714 6e15632fcb
+r23715
+r23716 4dbe72f83f
+r23720 a730fb5cc6
+r23721 492b22576f
+r23722 f2ecbd0469
+r23723 11dfc5a64d
+r23724 ff7589681d
+r23725 3bbe3c70a3
+r23726 ec233d3dbf
+r23732 4cfcc156f4
+r23733 262ee3a852
+r23734 933148f71e
+r23736 58b7100731
+r23742 6c59d99c5e
+r23743 e61fb59b9d
+r23744 9c238c6acc
+r23745 5d6b870ea8
+r23746 1e6c122c44
+r23750 f033bc401a
+r23754 beed1ea811
+r23755 7f814ff6be
+r23760 bda52e41b2
+r23762 45b0c875e7
+r23763 2bb5d585de
+r23765 e671d76012
+r23766 c514c35b2e
+r23767 799bd96931
+r23768 69aa78bd1b
+r23773 9f08c98a6e
+r23779 30e72647ed
+r23780 5c6c2c243c
+r23781 9ada1110c5
+r23782 e2edb26440
+r23783 4850e825a7
+r23785 46a978e022
+r23788 4a442e98e3
+r23789 06487c5afb
+r23790 7ef1dd1b61
+r23791 4885cc5e08
+r23792 a6163bcd8f
+r23793 c123fe5e02
+r23794 9cbadc4d7c
+r23796 e911fdab94
+r23797 c72713c16f
+r23799 e49af12110
+r23800 ab276e195a
+r23801 b0623ae481
+r23803 580b030d41
+r23804 0e306e1f90
+r23806 f40a20b0f4
+r23807 3cfee5b145
+r23808 3bfd81869c
+r23810 a887c83972
+r23812 ed9fb72104
+r23813 f79c93cd22
+r23814 ae67d3e8b3
+r23815 cc1f960036
+r23816 003fc68783
+r23817 8aff48b504
+r23818 c2c54e12d4
+r23819 c9ae821b77
+r23820 5bc2fc5769
+r23822 1050387558
+r23823 f826618f7b
+r23825 610fdb6b5a
+r23826 d5533fbf70
+r23827 db4bf36110
+r23828 d519e34eb5
+r23830 7418d531f0
+r23831 8b567935cf
+r23832 54f75dc98f
+r23833 932694494d
+r23834 9e261754f2
+r23837 09d502f286
+r23838 5f32d54695
+r23840 d04cfc06f0
+r23841 969fd08a04
+r23843 6ae3eb1ad9
+r23844 cf49fb3326
+r23848 3ec0583fb6
+r23849 3e61c9a5ae
+r23850 e33bb82c2d
+r23851 89de9c3f9f
+r23853 c0bfbce726
+r23854 096bc81a90
+r23855 bf375f7d63
+r23857 f82a8ce058
+r23858 2b61c308c3
+r23859 6c04413edb
+r23860 740fcf90bd
+r23861 1259651a7d
+r23862 4db73388f2
+r23863 86834347c3
+r23864 c7262dd1a2
+r23865 31d2746757
+r23866 0cdd234b1a
+r23867 2af07fb589
+r23868 bfcffea8cf
+r23869
+r23871 79ca8d4cd2
+r23872 15cb1c7535
+r23873 8d993af724
+r23874 03f90c1734
+r23875 533ffe9482
+r23877 635bc9c17b
+r23880 4e0d481418
+r23881 cb10f8a9ff
+r23882 7b14f38ec2
+r23883 4f9b1cf852
+r23884 d891167c88
+r23885 e8b450d51d
+r23887 7d0e5ac4bb
+r23888 266a2ca1c4
+r23889 234ee6d56b
+r23890 c0a4e5acdc
+r23891 7c34a1af96
+r23892 1f4d528702
+r23893 a87d132bb7
+r23894 55d1ee6d8b
+r23895 5c5657c299
+r23896 f0f0dfd9a3
+r23897 8ae754399d
+r23898 b2fbd5a79f
+r23900 66c9b6a949
+r23901 86044e0e54
+r23902 6915e7e999
+r23903 fdb1e69991
+r23905 c875dc635b
+r23906 0b5c9ca653
+r23907 715262fcfc
+r23908 04f59ea9e8
+r23909 5d022058c4
+r23911 57ea3841d2
+r23912 07edcee629
+r23913 733a3d7569
+r23914 6ae3072cd4
+r23915 8fea694f69
+r23916 9917b4aed9
+r23917 377972b095
+r23918 33b35dfbfd
+r23919 5cefd81ee9
+r23920 8e9f3c219d
+r23921 4265833e12
+r23922 ced363bf5a
+r23923 148736c3df
+r23924 32e7c24327
+r23926 d45b5ceed9
+r23927 701b17de26
+r23928 8752d58884
+r23929 18b563879c
+r23931 0dea879a76
+r23932 d4748121aa
+r23933 7d9fb75275
+r23934 c8ddf01621
+r23935 d94210996b
+r23936 785621901a
+r23937 34d82221cc
+r23939 d06ccf64f0
+r23940 58b5c24df8
+r23941 e05dfaeabf
+r23942 c35d829d18
+r23943 67042fd53e
+r23944 92132d6efd
+r23945 bc55c7854c
+r23946 5b481bbff7
+r23947 b0fecaea9b
+r23948 b05c8ebc8f
+r23949 9026bd6e02
+r23950 09052a6a1a
+r23951 0b78a0196a
+r23953 158e748e44
+r23954 fe65bb177f
+r23955 75371b41db
+r23956 2230bc9f7b
+r23957 059e8be4c7
+r23958 9558f60e7a
+r23959 4af620886b
+r23960 c44bf4a004
+r23962 f321aef4fd
+r23964 5f40fe0456
+r23965 566fefb05a
+r23967 5bada810b4
+r23968 2e7d7d4555
+r23969 2263afdf11
+r23970 7ecee9ad1a
+r23972 236f61c04c
+r23974 b4ba25da7e
+r23975 8f444e6626
+r23977 ecc9384838
+r23978
+r23979 c936b0f217
+r23980 c865d35d85
+r23981 93b4e617db
+r23983 8348f2e278
+r23986 604797b645
+r23987 866801385f
+r23988 e89b53d7e1
+r23990 bce484e237
+r23991 5e6f7952d7
+r23992 3414335ced
+r23993 cf820b8907
+r23997 be2778d50f
+r23998 16e7ad360d
+r23999 ac0fc0fecb
+r24000 169a5233f8
+r24001 db35ccb623
+r24004 10f637f1fc
+r24005 111425f14b
+r24006 b500a5c78d
+r24007 bdd7487b06
+r24008 cbfb5d387b
+r24009 60f1b4b1c4
+r24010 fc68a188f2
+r24011 b9f20bf6d5
+r24012 cace663c95
+r24013 9722b4a420
+r24014 8fbe377d4e
+r24015 98de3e5bad
+r24016 8c713da3d0
+r24017 c1db69d909
+r24019 c90ff3d95d
+r24020 5d8c6c898a
+r24021 d6816e0143
+r24022 9d29de3084
+r24024 8e59e56216
+r24025 f3711ed324
+r24026 c28a86006b
+r24027 f4f1738fe7
+r24029 f0bff86d31
+r24032 2d11a5bd46
+r24033
+r24034 7a9f1437ab
+r24035 161a4fda39
+r24036 919d4e1f31
+r24038 a8a7481ab7
+r24039 d8994ad4d1
+r24040 cb693f9f3a
+r24041 5c7ff3ea5f
+r24042 fee124a419
+r24043 cd52c9797d
+r24044 e206930303
+r24046 d8dfb6ec63
+r24047 3715aa127c
+r24048 e6167d9350
+r24050 7cb70a411a
+r24051 b09bc25012
+r24052 017e96230a
+r24053 b89c6e7bb2
+r24054 3ca75587df
+r24055 45580f1562
+r24058 2432afcc61
+r24059 647d23d801
+r24060 da0d80743a
+r24062 3ca434dfd9
+r24063 a99604e60b
+r24064 168a3ffdd9
+r24065 de9a8b9194
+r24066 1cbe06c2dc
+r24068 4253124eec
+r24069 d2dfdc4e6f
+r24070 492be26527
+r24071 3301506556
+r24072 19b45e9643
+r24073 6300d5e277
+r24074 e07ca49a24
+r24075 f253b67d4a
+r24076 82a6aaab86
+r24078 3235722859
+r24080 be85330d5b
+r24082 dea65103bf
+r24083 5f905da8b6
+r24084 85e79881a0
+r24087 3cf67d788a
+r24088 85fbd6f100
+r24089 e372dc0767
+r24090 fe1f2b8096
+r24091 ec9b00e195
+r24092 f9b1917e8b
+r24093 78007ac467
+r24094 78a48c46cf
+r24095 ccc81fa54c
+r24096 ebafcc4e7c
+r24097 da6b846e70
+r24098 dc39ab60d5
+r24099 5be3517c4f
+r24100 d3d4a95ce7
+r24101 6d43731ecf
+r24102 6d0718b5ec
+r24103 a1d4d39c40
+r24104 b961c9bdfb
+r24105 e97169c1c3
+r24106 c888bb422d
+r24109 da33ea2189
+r24112 07a2981402
+r24113 b6fb314419
+r24114 4a194bf538
+r24115 fcdc2267fe
+r24116 e40485618c
+r24117 d884d63800
+r24118 64da770afe
+r24119 942d844aeb
+r24120 db25b914f5
+r24121 0d29472c77
+r24122
+r24123 330febc72b
+r24124 ba82b29b92
+r24125 1c537ba1b3
+r24126 4bc1fae32f
+r24129 7048ac2d66
+r24130 fb718ccd5c
+r24131 834c065736
+r24132 ad3910e7fe
+r24133 b345da5ef4
+r24134 43d3c02185
+r24135 0967826371
+r24136 b06bfabfa4
+r24138 cf492f472a
+r24139 80488e4218
+r24140 f89016a873
+r24141 4b9e197b96
+r24142 9808117e92
+r24143 c6e21a52fe
+r24144 42eee5f325
+r24146 3ef8ef6606
+r24147 45c751c04e
+r24148 c5f20ad02b
+r24151 174a25e1b3
+r24152 2f1759cebc
+r24153 6de1404fd3
+r24154 ce173be810
+r24155 581e82f87f
+r24157 94bb0a9013
+r24158 d59d7f928d
+r24159 ee4e09235a
+r24160 ed9469c06d
+r24161 cd4486aa72
+r24162 589b8a5d53
+r24163 caf436d96f
+r24164 2ebde52602
+r24166 ad7fd95c8f
+r24167 7aca20d8d3
+r24168 235a7ea171
+r24169 5caf65d340
+r24170 76dfe52fff
+r24171 380ce38936
+r24172 fa7838568e
+r24174 961b881659
+r24175 8fb1b1aaff
+r24176 8d9ecb70eb
+r24177 c332e580a3
+r24178 1038b708f2
+r24180 985c587364
+r24181 f61020bb96
+r24182 4d862deb3a
+r24183 9dc772f163
+r24184 25a2d72189
+r24185 566857e894
+r24186 ebf0aa14d0
+r24187 d8f00482ff
+r24188 abb43ce593
+r24189 d20e2b0e17
+r24190 232f4627d4
+r24191 10ef7a4d4b
+r24192 5905acc722
+r24194 2d0e42041a
+r24196 78914b6f23
+r24197 c6bfc6ed94
+r24199 2316be766b
+r24201 20fc7a364a
+r24202 639d471f4d
+r24205 1f189a0d91
+r24206 a4bbb15aa2
+r24207 d3701a5818
+r24208 7b19ec8b1b
+r24210 fcc962b197
+r24211 1065c911a1
+r24212 5c18620fa4
+r24213 2060b631ab
+r24214 a589cb084b
+r24215 cd579b9866
+r24216 2bfaf998ad
+r24217 23aee8758a
+r24218 c89ea6e3ae
+r24221 3467ad57e4
+r24222 c8e8d79870
+r24223 75fe0c8bd6
+r24224 496dc76118
+r24225 fa84b33190
+r24226 87809b72a3
+r24227 ac17c71b23
+r24228 5b9b417ae0
+r24229 9300aaf6a7
+r24230 07a44adf6f
+r24232 6d19219483
+r24233 27a658c86e
+r24234 756a086802
+r24235 c3130988e8
+r24236 13497cbd39
+r24237 c727015def
+r24238 5151d7865e
+r24239 dff00da93d
+r24240 75667b88b3
+r24241 d5fbd26715
+r24242 d34d0d5108
+r24243 48b2da0169
+r24244 96e4c57ac9
+r24245 7ac66ec3b4
+r24246
+r24247 47bea31877
+r24248 160b82a7dd
+r24249 82ffae1693
+r24250 854de25ee6
+r24252 5749084921
+r24254 1789df3815
+r24255 58be2cb1e7
+r24256 804a161227
+r24257 a681a6a2d0
+r24258 bd1efca55a
+r24259 8915ac8e0b
+r24260 d8da9f5d38
+r24261 c8f326e5f6
+r24262 2b0f0a57c7
+r24263 d54ad45ded
+r24264 8e380b6736
+r24266 e9f1ccb030
+r24267 7b7d177571
+r24268 02435237ac
+r24269 593256a6ec
+r24270 02fd6b6139
+r24272 1c5d8d2e68
+r24274 953e3767a0
+r24275 1584f3f018
+r24276 ce73a10d3c
+r24277 5c99d89642
+r24279 4ddfe877b2
+r24280 c7f0ca2897
+r24281 00384916e0
+r24282 6201a2c638
+r24283 ba5118b24c
+r24284 274be93704
+r24285 1887da0617
+r24286 aca0be3dc5
+r24287 f05000629d
+r24288 8e76ce6368
+r24289 2d6575b79b
+r24291 1e6f5d5bf2
+r24292 35d1cb18c7
+r24293 a1309ca93b
+r24294 b8a23b072f
+r24296 82d3f68819
+r24297 066861f6f8
+r24298 9f4c747c6d
+r24300 5ba01cd7c8
+r24302 38c668fcc7
+r24303 e91c0e25f1
+r24305 68d13416b5
+r24307 3f96a415e1
+r24308 801c5cd82e
+r24309 1b6f1d4d30
+r24310 c3ebada7e6
+r24311 6a570deed1
+r24312 fd1ca1e63c
+r24313 d221cef8aa
+r24314 a765a6ff94
+r24316 ebec416529
+r24317 9779036af8
+r24318 7a9aba47d5
+r24319 3594304e82
+r24320 3621100820
+r24321 d610e36fa5
+r24322 0848855e2e
+r24323 a7c77669bd
+r24325 be9a1788b5
+r24326 93498931b5
+r24327 1236b5d14b
+r24328 c9f6d65536
+r24329 8aaca8c135
+r24330 6961f66371
+r24332 6ae7873658
+r24333 82909349e3
+r24334 ed971ecaba
+r24336 633025cabd
+r24337 879c7f610d
+r24338 4449c5af35
+r24339 30b6187f15
+r24340 10ec23352c
+r24341 c9a2180b1b
+r24342 11b936a03a
+r24344 dd45d81acf
+r24345 b0b63f1901
+r24346 49e8a4eef6
+r24348 34d3f1cb95
+r24351 e0aeabba88
+r24352 ba236bdcdc
+r24353 bee568cb56
+r24354 4073555ee5
+r24355 fce8415e57
+r24356 34719ee9cb
+r24357 fdaa0a7a01
+r24360 a07df6427f
+r24361 2021f39362
+r24363 e42733e9fe
+r24364 e465571a4e
+r24365 8f0878683a
+r24366 ba1312d195
+r24367 4e0d7b8e22
+r24369 ebeb8c51e4
+r24370 a296cefe0c
+r24371 290c7cd008
+r24372 db62da7582
+r24374 6055b57403
+r24375 305e7aa380
+r24376
+r24377 e586206e08
+r24378 38adb1426f
+r24379 1f6814a8f1
+r24382 74bee3204d
+r24383 8e5144d8a9
+r24384 6ad9d0085e
+r24385 2cc16420f3
+r24386 ff0dd07133
+r24388 bcb42e12dc
+r24389 a3d2d3b1ce
+r24390 bc9a3475f3
+r24391 64660068dd
+r24393 603c3dae0f
+r24395 1ff7cd53e3
+r24396 2edab8991b
+r24397 ca392540e3
+r24398 5f491e5d03
+r24399 02e043c776
+r24400 b8c1203121
+r24401 fe94d62563
+r24403 7e2259fc94
+r24404 cb0d585411
+r24405 3689a29fca
+r24406 3b467cdfe1
+r24408 a6c075bc62
+r24409 c29b455562
+r24411 6dfc61ab72
+r24412 fff2721945
+r24413 8328a880b6
+r24414 783721e98a
+r24415 cabd899188
+r24416 2333e9af28
+r24417 8fb2df90cf
+r24418 0475b23ebd
+r24419 4e787be632
+r24420 6c5b98812b
+r24421 daf30ee2eb
+r24422 41c6dc0087
+r24424 9f964bcfd0
+r24425 cfeea7a25b
+r24427 f9d286cd66
+r24428 f8f8d378a4
+r24429 50cff4d634
+r24430 67c461b2d9
+r24432 be49752855
+r24433 8f245d95f6
+r24434 0254234328
+r24436 e86934018b
+r24437 ee4cc17eb7
+r24439 1f3c58a818
+r24440 13c59adf9f
+r24441 e64b94fcc9
+r24442 764072ffcb
+r24443 546588a134
+r24444 5602ec602a
+r24457 fbf7125dd8
+r24458 048fe68a1f
+r24459 7a29fc7de3
+r24460 e96dba0c9a
+r24461 4383277103
+r24462 06a98d22ce
+r24463 c450953875
+r24464 e6a60a05a1
+r24465 e23435247b
+r24466 9c5dfa18ed
+r24467 4bae7e8a92
+r24468 fe9a10c9a0
+r24469 f80801c675
+r24470 eb0b73b116
+r24472 c982243064
+r24473 32b05da169
+r24476 d6f3184fc8
+r24480 cc672b023e
+r24483 5647d73009
+r24484 ebcec5f4d6
+r24485 b3c85819bf
+r24486 90e5aea537
+r24490 821816a315
+r24492 d5d7953ab4
+r24494 f3b970b28c
+r24495 0554c37865
+r24496 86e8f5ae1d
+r24497 0e064a7a56
+r24498 a7d2d13732
+r24504 e7c2ab469c
+r24505 c565784711
+r24506 ffa29b1f31
+r24507 8f0ff8bc2a
+r24508 5bb967a3de
+r24509 01203c2844
+r24510 4380911a32
+r24511 4b0531b55a
+r24512 aa0cc8e415
+r24513 b503ea139a
+r24514 9b68c3c213
+r24515 fef8e61cb3
+r24516 36ac83da7f
+r24518 a30ae005c5
+r24519 db7431d209
+r24520 50eb40bcd6
+r24521 6eb6e8ca22
+r24523 72a0e8be61
+r24525 4d0cd60b0e
+r24526 b5d314af8e
+r24527 0b0a927a60
+r24528 9acb3f5609
+r24529 8230585c3a
+r24530 991b359976
+r24531 449fc76cf5
+r24532 7946facede
+r24533 455ee619fb
+r24534 8bf258ca83
+r24535 971653e40d
+r24536 063e8a9dfe
+r24537 fed7729dbb
+r24538 ad8efdf707
+r24539 8cbc17e8f1
+r24541 87eb32c61a
+r24542 fda6c9517e
+r24543 60d9a5127c
+r24544 e579152f73
+r24545 142405e1dd
+r24546 413feab04c
+r24547 8ca5a8fbbc
+r24548 39bbd26bc4
+r24551 0444c81889
+r24552 1323a61e68
+r24553 84671e1076
+r24554 3491672e86
+r24555 a45be8b285
+r24556 a5a18e80ec
+r24557 4a6c40fe6b
+r24558 5670f1f834
+r24559
+r24560 ae8e258bf4
+r24561 0dd018e2cb
+r24562 84b0acd214
+r24563 af011572ee
+r24564 d0e519a309
+r24567 469a08c1ed
+r24570 6b337cb02c
+r24573 3c34549d7d
+r24576 420df2a9a2
+r24578 5e829a82bc
+r24579 88fd5b9279
+r24583 70e6dc980f
+r24584 af3b3d3945
+r24591 15e491b4d2
+r24592 5083559781
+r24593 22d1ae7fa4
+r24594 c402bdde2e
+r24595 809bf414be
+r24596 2f5c6da837
+r24597 408fe0dc4b
+r24598 caba14ff4b
+r24599 628060af0f
+r24600 f84a12bfbb
+r24601 3e5cd92cbb
+r24602 9e0b5eb6c4
+r24603 0d324c4e10
+r24604 3387d04757
+r24605 e6d026304f
+r24607 40195b89b3
+r24608 fbdda78887
+r24609 c17e46682a
+r24610 4d25cc33ee
+r24611 54f560fe37
+r24612 1b4fc3f26e
+r24614 a1fe9d33bf
+r24615 f1af3e0766
+r24616 b6b0359b8a
+r24617 eb32c46d69
+r24618 d4392e047b
+r24619 214a04461b
+r24620 bd319586ed
+r24621 c1efef726c
+r24622 b3889b68af
+r24623 ebedbef6d1
+r24624 5ebbba7a71
+r24625 92693774c1
+r24626 ff5aec180e
+r24627 9e2b204400
+r24628 71d2aba042
+r24629 1caac54694
+r24630 88fbb71848
+r24631 21432085e1
+r24632 b34ef21d71
+r24633 1b14bfcb7f
+r24634 adc57219ae
+r24635 f21113d28a
+r24636 5691a3900d
+r24637 bbd5efa596
+r24638 386d506847
+r24639 96965c4459
+r24640 518cc3af73
+r24641 e74515bbd3
+r24642 b2ca0efb2d
+r24643 ab488babc6
+r24644 56b7e67051
+r24645 c81e94b5dd
+r24646 f88c979f85
+r24647 e94a62622d
+r24648 daa3b19439
+r24649 e5c6241bca
+r24651 c9b4254f94
+r24652 ac87dd2e0c
+r24653 06218608dc
+r24654 93732bf103
+r24655 b1cb4e114f
+r24656 661ce2922d
+r24657 40263b7fa6
+r24658 fe0e4b473f
+r24659 0444357cd5
+r24660 305f49ce8f
+r24661 9b3852f262
+r24662 9781aba3e5
+r24663 1fd0b31aec
+r24664 4df2e9335b
+r24665 a6ba30b8eb
+r24666 223428d1eb
+r24667 c345782c06
+r24672 9f70316820
+r24673 a689456253
+r24674 869e5e9793
+r24675 00b0be49a8
+r24676 557a0ebd03
+r24677 98b50d2f52
+r24678 7eccd78350
+r24679 edb78ae9db
+r24680 876760c6db
+r24681 749739d146
+r24682 23c937f345
+r24683 e06244cb55
+r24684 e50fbcc3b3
+r24685 fd27ca6263
+r24686
+r24687 1c1c65c8df
+r24688 804c401ffd
+r24689 f0a2dd936e
+r24690 329fd609f3
+r24691 7d15e93f56
+r24692 4415640dc4
+r24693 9c776fda54
+r24694 e830a7ce9e
+r24695 7ec0249519
+r24696 bbede17631
+r24697 4040d8511e
+r24698 f040879c81
+r24699 1cf60d304d
+r24700 f36e7acd02
+r24701 6a204df670
+r24702 f8f09796e8
+r24703 8088ca13c4
+r24704 da67f3b71e
+r24705 21f3cf0e80
+r24706 42dbce3295
+r24708 caee04079f
+r24709 4cf60d65bc
+r24710 8cd754f358
+r24711 b13ef720c0
+r24712 26c3f65241
+r24713 6eae720732
+r24714 8e093b517f
+r24715 8a2df2dc70
+r24716 8a64f16fe1
+r24717 35f82e66d1
+r24719 6b7ff287fc
+r24720 112dc4f2a8
+r24721 659f8ba673
+r24722 886e0a6a1c
+r24723 adb112fec4
+r24724 66956b745f
+r24727 a8f2ea50ac
+r24728 3eaae89020
+r24730 95ecae3469
+r24731 50f6c7c275
+r24732 7fba64d2d0
+r24733 7872efc614
+r24734 f229addbcb
+r24736 7d9d9d453a
+r24737 c6040a7bc6
+r24738 b6ab8af4f2
+r24739 ca05143ea7
+r24740 c28aed1ab1
+r24741 f31a20a99c
+r24742 afd1e7d293
+r24743 a3b106bf60
+r24744 4e9a38be50
+r24745 fe268d9778
+r24746 703bbdae73
+r24749 514d01c1ce
+r24750 185d5b50fd
+r24751 9b5cb18dbd
+r24752 3de96153e5
+r24753 af358131de
+r24754 9334ad0db2
+r24755 97b9978b85
+r24756 44ddee59a4
+r24757 b38f7fe290
+r24758 20d0a7dd22
+r24759 8198c1193c
+r24760 fa0ee266cd
+r24761 0b18e29225
+r24762 8707c9ecb3
+r24763 09028a4fa5
+r24764 09e192caea
+r24765 a0909c0573
+r24766 3de9030dca
+r24767 5ff4875db3
+r24768 bffb951f84
+r24769 50c93f63b8
+r24770 1765c49192
+r24771 3c8bc3ab73
+r24773 ed52bec270
+r24774 54fa0d6c3e
+r24776 493da996d8
+r24777 e0653db305
+r24778 b7bdf048b1
+r24779 52fbbcc824
+r24783 acffc823df
+r24784 7c47203ee2
+r24785 bf53d9f48e
+r24786 e6efa7b11f
+r24787 fa8f997a2d
+r24788 3fce9dfd7f
+r24790 5485932c5a
+r24795 b477d7389b
+r24796 9be2e54633
+r24798 21ea5ad627
+r24799 fe15d7eed7
+r24800 9388a634f5
+r24803 7c456cde62
+r24804 efd6b46e74
+r24805 f5b2972d2b
+r24806 9b8f5acf89
+r24807 97b620ae63
+r24808 6af1d5c526
+r24809 ffe789dd78
+r24810 50a4b393f7
+r24811 21121ff62e
+r24812 63c7c9d857
+r24813 9db7dbe440
+r24814 3e65235845
+r24815 bca5660e38
+r24816 add75447f4
+r24817 870679585a
+r24818 60463a8721
+r24819 290f3711d5
+r24820 de27ba74b9
+r24830 c79f8876aa
+r24831 6d653c3d07
+r24834 1a443ebb20
+r24835 6a988aeff0
+r24836 45f20c26c9
+r24837 b18773a988
+r24838 6a5a5ed217
+r24839 ff5cd2f6e8
+r24840 2700617052
+r24841 9a9f73b802
+r24842 199ec3c10f
+r24843 c5d9b7e6a9
+r24844 cc60527405
+r24845 6c1feb586b
+r24846 96ab92d67c
+r24847 8792dda476
+r24848 7f6ebc9762
+r24849 f335e44725
+r24850 80bb9cfb7b
+r24851 e439b24609
+r24852 95ae7765e8
+r24853 acc5311c15
+r24854 793796eee0
+r24855 b4749d3b1a
+r24856 8182349189
+r24857 a02b2daa2a
+r24858 269ea9ab57
+r24859 445ade0bbd
+r24860 f82acf5d37
+r24861 70f18a67e5
+r24862 cb74fc1c8a
+r24867 0bfaa0baf4
+r24868 3f1f0a4947
+r24873 4e96111f35
+r24881 7858ae7be5
+r24882 28723395ed
+r24883 e573f9b206
+r24884 00f6d557ed
+r24885 b38cddd20e
+r24886 93b4217797
+r24887 1c0df8f97e
+r24888 c937fd9570
+r24889 facc1b33fa
+r24890 5e499c5e43
+r24891 d70e69e8a8
+r24892 a0ea242f75
+r24893 4eb00a0a72
+r24894 57a00a46c8
+r24895 14cd653295
+r24896 311b7de861
+r24897 a6d0d9dd0d
+r24899 9654d51491
+r24900 a4c920acf1
+r24901 7a29a1ca3b
+r24902 4cd3e354ce
+r24903 6b58c8522d
+r24904 b72a9b1455
+r24909 41ac77599c
+r24919 1a92fb60e6
+r24920 a4d3c77616
+r24922 124cf3f9cb
+r24923 28149691da
+r24925 106a3ac9a7
+r24927 1e1c4d05db
+r24929 dacd4cab7e
+r24933 b6d24633e3
+r24934 4869a2b284
+r24941 2bd6b4ae40
+r24942 692f32f66b
+r24943 bf1da638cc
+r24944 48e9663489
+r24956 c989273edb
+r24957 11ebee0991
+r24958 ce5170fe02
+r24959 7720716567
+r24960 b7e7cf14bb
+r24961 feb1ba8ab3
+r24962 d5c7021dd7
+r24963 0e3282d99f
+r24964 15ed8925c9
+r24965 27edca2ca7
+r24966 a6032e86af
+r24967 782c73313e
+r24968 7127d82937
+r24973 a3d53243c6
+r24974 806a524f9a
+r24975 9bab5cc04e
+r24976 e75142424c
+r24977 6d2b5e14f8
+r24978 1e5194b41c
+r24979 fff93cd049
+r24980 1a9b0c9926
+r24981 5efdab9621
+r24982
+r24983 b4fd2ab8e8
+r24984 b389940697
+r24985 a22be1267a
+r24986 4074f0e1c2
+r24987 dbd1bbc81e
+r24988 9050263192
+r24989 fea604df16
+r24990 12fa84a6ed
+r24991 683adbd63e
+r24992 63735b31ef
+r24993 ccceeeb179
+r24994 7595671ec3
+r24995 4afa092314
+r24996 d1c806b2d3
+r24997 be35545354
+r24998 2beeb23cc7
+r24999 83703d1e44
+r25000 2a32395ff2
+r25001 e22d7f9915
+r25002 9cc4c5f9a3
+r25003 b4b884e0f8
+r25004 390f2d52ae
+r25005 3e75e7e462
+r25006 9d2c066436
+r25007 86e7c9b205
+r25008 850a689e75
+r25009 00569a3b47
+r25010 e6b0beaa4c
+r25015 d3ff7ee9fc
+r25028 3f19215781
+r25029 4f54ab68fe
+r25030 4b04c9c044
+r25031 d800ebd073
+r25032 d76dc724e3
+r25033 3adaa37cd2
+r25034 4689792757
+r25035 ccb438ff74
+r25036 94e1965b64
+r25037 c5bd18d46e
+r25038 75ec2ba72f
+r25039 1125a9cfab
+r25040 4c7d23b470
+r25041 a8926ae0b2
+r25042 6daacd386b
+r25043 82eaeed3b1
+r25044 35f7c2bde5
+r25045 edad717cc1
+r25046 ad328ff2c0
+r25047 1c2d44dda0
+r25048 fb061f22d4
+r25049 ed87ab5299
+r25050 46c8150743
+r25051 d838e10f7b
+r25052 92a2fd5397
+r25053 33d45626bd
+r25054 6b67a342ab
+r25055 6ebd6c4c07
+r25056 1ebbe029dd
+r25057 b9731954fb
+r25058 29cdb5837c
+r25059 b8575e9636
+r25060 fec42c1f3a
+r25061 5fa1978fac
+r25062 68808e80c4
+r25063 28e6744e23
+r25064 07fab88cee
+r25065 4e85b6fb33
+r25066 21e90dfb59
+r25067 c8f4316b37
+r25068 d73d4950c9
+r25069 8bba6eb9d3
+r25070 581a8c6ffe
+r25071 f0ca26ab84
+r25072 25d692b76f
+r25073 83c0929417
+r25074 b960944463
+r25075 58a147ae51
+r25076 a4772525b2
+r25077 1a11aef9c3
+r25078 f0cea787c7
+r25079 5b09130d85
+r25080 e0155ce582
+r25081 f44c01eab2
+r25082 21584ed38e
+r25083 32d2b15d5d
+r25084 b6d1953b85
+r25085 f02512706f
+r25086 4ba275137e
+r25087 7fa4ca91ff
+r25088 e4f800b205
+r25089 ebfbe58d36
+r25090 30f0befbfc
+r25091 0cebb74f67
+r25092 8b66af0cfe
+r25093 5de317f769
+r25094 3cbf6bf54e
+r25095 2a2d5d6af9
+r25096 413a076381
+r25097 5d20f0650e
+r25098 270c0cb80d
+r25099 916d5f2de0
+r25100 d8f3a17f5d
+r25101 08546513f4
+r25102 8e10b0579b
+r25103 60c8697f0c
+r25104 3a63a796c8
+r25105 1db8243e72
+r25106 814f7ef9f2
+r25107 e102fee1b9
+r25108 e572b6b687
+r25109 3299ee0046
+r25110 87b1b72769
+r25111 2e29f1475a
+r25112 d2fd3d61d1
+r25113 2627ab313f
+r25114 f0125bc591
+r25115 2b41d07155
+r25116 6f895f4cbd
+r25117 f57ac28712
+r25118 b054289bd7
+r25119 26ad0c9e8c
+r25120 c412771635
+r25121 dd511e1a1a
+r25122 b3b9dbaee2
+r25123 bb0e6e9102
+r25124 cf85a61beb
+r25125 7d5b6fa1ee
+r25126 d8a4b0e8fc
+r25127 e0757f1726
+r25128 3f97335832
+r25129 d4f8dc660a
+r25130 5c416166c2
+r25131 4b8810d3a3
+r25132 a546fc8f49
+r25133 a3b1d1130c
+r25134 b567bdc1b2
+r25135 79c5790d05
+r25136 e49ec10e93
+r25137 9853b5b829
+r25138 83db5e6600
+r25139 066ab070e6
+r25140 781726bf75
+r25141 31c213d164
+r25142 444ab55481
+r25143 dbf4bf263a
+r25144 a14da40419
+r25145 21115422de
+r25146 8ba9b511c2
+r25147 b924c4142d
+r25148 5dc127e69c
+r25149 034489b501
+r25150 438c7a4540
+r25151 cb9c2f8335
+r25152 d8a40e730f
+r25153 2a9781ee4c
+r25154 d8912db143
+r25155 7b7b242299
+r25156 8196473768
+r25157 924b5852fa
+r25158 6c87275af7
+r25160 94a00c3168
+r25161 77c01a9bac
+r25162 c23c21853a
+r25164 42fb66a2cb
+r25165 e0a4bbdb39
+r25166 7a1dc55abe
+r25167 84442a01ce
+r25168 1f38dbf299
+r25169 e365b51c04
+r25170 d7cc162132
+r25171 72a095dcdc
+r25172 fdfdd09d51
+r25202 349a1aade0
+r25204 30ccdc9da6
+r25206 a1375bf437
+r25207 d782ab3246
+r25208 fa2a197462
+r25209 bf65e48526
+r25210 9d02b4adea
+r25212 300cb9e1ee
+r25213 60085c5cf8
+r25214 3c5f893b78
+r25215 ab3e6f21ae
+r25216 60d0585371
+r25217 dcc07bd9f0
+r25219 4df206e640
+r25220 ba81847fd9
+r25224 6d3159db05
+r25225 835be39b53
+r25226 d858fc14ad
+r25227 552d7aa113
+r25228 f34c836cb6
+r25229 69b9d9858e
+r25230 54b26beb2c
+r25231 9b3c49a171
+r25232 f90c462b42
+r25233 9e7d7e021c
+r25234 257a7e65de
+r25235 0bfef30696
+r25236 c48953cbe1
+r25237 f7bca9a7bf
+r25238 124e2f95ae
+r25239 2c28fc4afa
+r25240 321439e32f
+r25241 302f9fb68a
+r25242 acd25f5732
+r25243 26829db804
+r25244 dbd2a2a626
+r25245 d0d8b498b8
+r25246 1bc91a26b2
+r25247 a21cb1b375
+r25248 262114974b
+r25249
+r25250 ce89d436b8
+r25251 2ef447e266
+r25252 9f4e1b050f
+r25253 49ebb3ec42
+r25254 4a862eac9d
+r25255 f0169872c9
+r25256 7d4cff1dc6
+r25257 9e1d24d642
+r25258 74db0a59ad
+r25259 8110e02ec2
+r25260 4b616e2ff3
+r25261 3f2a92765e
+r25262 9f39fc0124
+r25263 7ed18f3300
+r25264 80d5122f2c
+r25265 6cb88f36ff
+r25266 4977341da7
+r25267 e3085dadb3
+r25268 a10f699d7c
+r25269 66862fe9d8
+r25270 5eefefb73b
+r25271 6163cdcc23
+r25272 70da5a627f
+r25273 0fac26971e
+r25274 360f747c67
+r25275 cda484779f
+r25276 e032852d12
+r25277 d8e882ad5c
+r25278 3a2529f9df
+r25279 124103be21
+r25280 60974b90da
+r25281 038fef39ad
+r25282 8a0d130537
+r25283 c849eb7c7d
+r25284 c614e932d0
+r25285 5e49b41819
+r25286 733669230a
+r25287 d79493bb72
+r25292 a0476af6bc
+r25293 a4fb15861b
+r25294 2621ee6328
+r25295 9eaf24abe6
+r25296 3010da2247
+r25297 21c0730f7f
+r25298 31108f7518
+r25299 4c71fabc01
+r25300 207b5ef725
+r25301 12162603c4
+r25302 ad775b3239
+r25303 aa674f304d
+r25304 29e501db0b
+r25305 90725a50c4
+r25306 5ed007aab7
+r25307 15df85b047
+r25308 42a2169161
+r25309 e56c8c561f
+r25310 1fc6f7eb4e
+r25311 9a7744dcaf
+r25312 dbeab9b86f
+r25313 873b4b8b55
+r25314 a94747dc47
+r25315 18617f77d2
+r25316 87d050bf09
+r25317 a8e5a7be9f
+r25318 e8f46334b4
+r25319 88710b419a
+r25320 a0f1c4c4f7
+r25321 b2a1ced1a7
+r25322 658ba1b4e6
+r25323 44b9cf0ca9
+r25324 970d4132b6
+r25325 b2f1b87468
+r25326 d34bd62d07
+r25327 03f3cb5fcd
+r25328 3e9041b031
+r25329 00da8a8f07
+r25330 628c0265aa
+r25331 c0ddb8f941
+r25332 48d2c78144
+r25333 dde17e953f
+r25334 04a39e7981
+r25335 ce895bbb40
+r25336 aafc0fe172
+r25337 654c9ff6e6
+r25338 fb2e30e472
+r25341 f9f164d3c7
+r25351 5c61410fe5
+r25352 c3c1c65d5f
+r25353 b204a9360f
+r25366 8c8e8788fd
+r25367 ac2ecfb3af
+r25370 460f57d5d3
+r25372 9f9af2ad48
+r25376 1ad15b1f50
+r25382 68031b3af1
+r25383 401baad565
+r25387 6b09630977
+r25388 ac0bfa6220
+r25389 321ecd84d8
+r25390 209167a1b4
+r25391 5dbb616610
+r25392 892ecd2db7
+r25393 ac96200c92
+r25394 e0890be9a7
+r25402 900f7a8f5c
+r25403 1942bb6cd4
+r25406 cee5d977cb
+r25407 5bbb198b24
+r25408 cda84e7f21
+r25410 4e488a6059
+r25411 c8385cbf67
+r25412 2b15e8ce93
+r25414 eb3ee130ad
+r25415 4231a0bc06
+r25416 902c61f397
+r25417 9bdc1a0b6d
+r25418 b5865cd83f
+r25419 af412cd72e
+r25420 67a63278a6
+r25421 613f30f1cd
+r25422 9c7e267082
+r25423 d0c5e4be55
+r25424 c0db3f2d06
+r25425 4f5419eecb
+r25426 8c0fa605fb
+r25427 daa26379ce
+r25428 257b6c91a5
+r25429 60ee9924b7
+r25430 2b748e9ce7
+r25431 987c30ddfb
+r25432 74062e3529
+r25433 6f1552568c
+r25434 39e50a12d2
+r25435 cf4037a46c
+r25436 254ad276ca
+r25437 39ebbf6743
+r25438 a1a870a72c
+r25439 5aa8100a48
+r25440 0dda8885a9
+r25441 9a86215c18
+r25442 e02eecbbad
+r25445 c18878ab71
+r25446 209f7e7657
+r25447 234336d7b1
+r25448 f7f5b50848
+r25449 b39a7044d6
+r25450 92f32deabb
+r25451 8709b52eef
+r25452 6d45fddd4c
+r25453 4f4a80ad5b
+r25454 ead69ed245
+r25455 990fa046e6
+r25456 05382e2351
+r25457 2b31bc81ad
+r25458 6fe5754cec
+r25459 be31934db3
+r25460 8b28292b53
+r25461 5b11f250ce
+r25462 9e4bdd411c
+r25463 cda4650d4d
+r25464 2e8cad2cc2
+r25465 b2aba00207
+r25466 554fb11b0c
+r25467 c1aaf1fc7a
+r25468 97da3af7a4
+r25469 335a6bd99b
+r25470 84189c6b15
+r25471 c773c47fe9
+r25472 a584c40018
+r25473 31827a6881
+r25474 e90ef48c1b
+r25475 87aca40676
+r25482 333f540595
+r25483 e3e64e4365
+r25484 879e5af47d
+r25485 ff7416d88b
+r25486 386dddde53
+r25487 e4288e5143
+r25488 febd8857dd
+r25490 48fcd8a794
+r25491 03b1fb29c6
+r25492 7f45f9c67e
+r25493 69867e949d
+r25494 9185598c8b
+r25495 8b4d5de0b6
+r25496 acb91674c8
+r25497 0440f885e9
+r25498 3fff0d0caf
+r25499 5522aeafa7
+r25500 3d740f4f79
+r25505 03ac255fa7
+r25507 abc851a1de
+r25509 f309513c9f
+r25510 e43daf434b
+r25511 20859263f2
+r25518 d8359a20a0
+r25519 719549799e
+r25520 044099d4f1
+r25521 6ba1b9f3c9
+r25522
+r25523 7a5ea2758e
+r25524 bfb20c64a9
+r25525 64a2e3074e
+r25526 63f072fe9b
+r25527 7a49a9aea9
+r25528 96066dec30
+r25529 1bbf88a1fd
+r25530 e4559e4387
+r25531 6a3b465ba9
+r25533 19592c45ed
+r25534 7e99a7d380
+r25535 cecee085f3
+r25537 553bea21fb
+r25538 a707ec6fef
+r25539 cae9d2306e
+r25540 4b29535009
+r25541 80952759fb
+r25544 a93134b483
+r25545 e69822117c
+r25546 3a1463cd83
+r25549 0e74720c49
+r25559 48e8133cb0
+r25560 77175ede13
+r25561 e1a9fd9a7a
+r25562 ce0df1e1bf
+r25563 84fcf633d9
+r25564 b9785280a7
+r25565 e97be9ce13
+r25566 006cd77979
+r25567 fbb5b57d65
+r25568 febf1a0cd9
+r25569 2fdbabe0a2
+r25570 0a9d3e00a4
+r25571 b5bedbce22
+r25572 c4db95fdb8
+r25573 3efce112b5
+r25574 649b4262c4
+r25575 2c548eac23
+r25576 f0b042b335
+r25577 caaf429668
+r25578 6f881202be
+r25583 65bf9178c4
+r25584 6d717dc986
+r25585 d52e53ce4f
+r25586 8f3c3f5387
+r25587 dd050a6a63
+r25588 476e4816f8
+r25589 d8add367dd
+r25596 aade88f8a7
+r25598 0e0e2055f3
+r25599 0377cad8c6
+r25600 9954de923e
+r25601 6d10bd53c5
+r25602 9183117cb4
+r25603 13f30c385b
+r25604 6817244d64
+r25608 fa2deeb430
+r25609 4235635142
+r25610 0d379b728a
+r25611 0d99f59eba
+r25612 c4bb14e760
+r25613 2f4349e8dd
+r25614 7cb2054eb6
+r25615 f3114ec2a4
+r25616 ac9243fb9e
+r25617 8e489f66ec
+r25618 596be479f1
+r25619 620f339bba
+r25620 45d3adac9d
+r25621 68806429fb
+r25622 8cd3eae681
+r25625 c37e8f45cf
+r25626 52c1d019d6
+r25635 f32a32b1b3
+r25636 2c5f1e8b02
+r25637 65a785e177
+r25638 ca15d245fd
+r25639 bcdd1882f1
+r25640 9a40a521b2
+r25641 b2b068133a
+r25642 cbf8534ff7
+r25643 8e8518864f
+r25644 7b173d5bad
+r25645 aaaa019588
+r25646 e8aee14bbd
+r25647 2e7026c0b6
+r25648 d5c30508ca
+r25649 3949410af7
+r25650 acc4c04b0c
+r25651 ac7152b8bb
+r25652 0815b27995
+r25655 b2f3fb2713
+r25656 7cddbc6564
+r25657 17c0462861
+r25658 09b1a31309
+r25659 3b357972e9
+r25660 36bdc192b2
+r25661 be57a47dcf
+r25664 9ffe29d61a
+r25668 c69b0aecc6
+r25669 bd2381d654
+r25670 3b48cc7fe0
+r25671 a3ce6c471a
+r25672 fa0f48a5df
+r25673 fef6649b31
+r25674 7343e04415
+r25675 670f62de1d
+r25676 3defd7a0a0
+r25677 a26fc299ca
+r25678 127dd7654b
+r25679 bbd8480584
+r25680 be9e2991d9
+r25681 3f58f66c8b
+r25682 bfeef8a9d3
+r25683 0c25af0ec8
+r25684 2553cc1fdc
+r25685 f7e038361a
+r25686 5637b22d21
+r25687 e21d9b0a39
+r25688 c22bc18ab6
+r25696 f6d4d84dd7
+r25697 088094b1c8
+r25698 47a131ac36
+r25699 158e6e6106
+r25700 ffcb1847b4
+r25701 4e3a9a64a8
+r25702 dfd19afc50
+r25703 3491b3d79d
+r25704 6c56d71a17
+r25705 c0aebb1220
+r25706 b38f2a1df3
+r25707 5e501977f8
+r25708 afe1d6fa62
+r25709 7e47107efa
+r25710 7dc4723db3
+r25711 1111b27d0e
+r25712 7bfdac0b73
+r25713 2b699c3fdb
+r25714 3e24f4c48d
+r25715 5d5826812a
+r25716 274ce61990
+r25717 c62f666664
+r25719 87972677b8
+r25720 567e9f2980
+r25722 aeda72b2ea
+r25723 0d5660cbcf
+r25724 660d80f682
+r25725 e412524fee
+r25726 a90fbf59ae
+r25727 e3efea04c2
+r25728 b1f7de6ef4
+r25737 e4879d785d
+r25738 287b935ea3
+r25739 7dfb214aaa
+r25742 148f4ef194
+r25743 8c9d01fffa
+r25744 1765432085
+r25745 288faf969a
+r25746 eeaec410f0
+r25747 888444b175
+r25748 9ef01e6885
+r25749 444914a881
+r25750 f4e4a8a588
+r25751 c567ad0922
+r25752 f7a4cdd56f
+r25753 08845f2ce3
+r25754 26ddf17b21
+r25755 82eb1aa430
+r25756 3a1332c451
+r25757 8987550566
+r25758 34387c7184
+r25759 02ac8de5c0
+r25761 4529141cc1
+r25762 f9aa83a6e5
+r25765 1c4765a416
+r25766 6116b8db81
+r25767 6663d12daa
+r25768 5355c120ef
+r25769 2891464fba
+r25770 a2e9a1b465
+r25771 b939e8fbab
+r25772 ff5619e1f0
+r25773 55109d0d25
+r25778 beadafa2d8
+r25779 3503dac971
+r25780 2b4b8bbe9d
+r25782 0d730957dd
+r25783 77d90e3aea
+r25784 e3bbd95afa
+r25785 7ab032f25a
+r25786 5d283f3f68
+r25787 d1a7af8e27
+r25788 10938bfc06
+r25789 ea562b4177
+r25790 97b41d36b6
+r25791 c7f14dbbcc
+r25792 b1c420e48b
+r25793 daffb123fd
+r25796 1e0f7dcb4f
+r25797 0afd6d1b19
+r25798 77aae5843a
+r25799 bcd155beb9
+r25800 e8451c2a8b
+r25801 e98c864cbb
+r25802 497e6321a0
+r25806 4646937ff8
+r25807 2adf5a0613
+r25808 2c1a1192ce
+r25809 bc4468cdd2
+r25810 1706358bdc
+r25811 4e86106b5b
+r25812 d08296f063
+r25813 8821b0f220
+r25814 ca47241bf8
+r25817 063f2c4984
+r25820 0ef5e8a645
+r25821 4b4acbd819
+r25822 168f8065ea
+r25823 d3f0fa824b
+r25824 4f5159f0ed
+r25826 e3b58d0c99
+r25827 1bd14badd7
+r25828 bca8959a1a
+r25829 fcd0998f1e
+r25830 9ea2cefb20
+r25831 e52053f10b
+r25832 58bc507ee1
+r25833 5690452698
+r25834 5575b8c368
+r25835 4d2499a835
+r25836 f434a6d49e
+r25837 7d772368d5
+r25838 581fad662c
+r25839 3778505276
+r25840 240fb16547
+r25841 6974cca537
+r25843 2d2a3e92de
+r25844 a98d0903a8
+r25845 23ab7e3c9a
+r25846 d0a36c66cb
+r25847 ee365acb17
+r25848 d6eb989388
+r25849 75890493a0
+r25850 fb2353db6c
+r25852 8fc7a72a2b
+r25853 8337964e31
+r25854 5fb68614da
+r25855 ac7b8020eb
+r25856 0816035d76
+r25857 612f87b3d3
+r25858 24eb4c5bb5
+r25859 3921e5be74
+r25860 dd8706fc11
+r25861 98b904db87
+r25862 8704ed2fc9
+r25863 d5b81b6cb1
+r25864 8394676c1e
+r25865 891a6e466b
+r25866 8a9fd64129
+r25867 dabe26bb1e
+r25868 421605022d
+r25869 f262ab507e
+r25870 ad3dada12c
+r25871 0172051d24
+r25872 acb1c39dbd
+r25873 4afae5be74
+r25874 3a195c71ba
+r25875 c7ec0385c7
+r25877 0c97d8c73f
+r25879 290f687fb6
+r25880 81fda510a7
+r25881 fa3c892017
+r25882 dbcc393e57
+r25884 1df8d23b47
+r25885 36adada0d5
+r25886 78db538e1d
+r25887 70996f8583
+r25888 6b70b7d23a
+r25889 9bdbc5bb34
+r25890 170089943b
+r25891 ffb65f0061
+r25893 5f0ef121a1
+r25894 893e8b6391
+r25899 daf6f0d5dd
+r25900 09188cd820
+r25901 4505c2b05c
+r25902 eb2d18b945
+r25903 49f352d890
+r25904 6111702474
+r25905 b005cd5798
+r25906 456aee6cad
+r25907 1b68611e04
+r25908 bcf53cbe91
+r25909 6c22499c40
+r25910 d1f89f473a
+r25911 48a26b9c2b
+r25912 2d3fe5733c
+r25913 1f3fe09a78
+r25914 62b0182834
+r25916 8de176f454
+r25917 bf0b9fcf84
+r25918 c0407608be
+r25919 0ba09556cd
+r25920 07c3e9c8c6
+r25921 1754813beb
+r25922 684d1901d9
+r25923 934f8015a2
+r25924 69b3cd5092
+r25928 b7b81ca286
+r25929
+r25930 b6778be919
+r25931 938eab16f8
+r25932 5852fd01b7
+r25935 22d125f1e3
+r25936 53427f86cd
+r25937 5df51cc5a6
+r25938 8006cc6760
+r25941 f4991fcffc
+r25942 508101158c
+r25943 1d4f2d4aa3
+r25944 54435d633e
+r25945 8901935da8
+r25946 4474d9ba20
+r25947 761faecd9f
+r25948 152be020c4
+r25949 affa7911f7
+r25950 d56a8a5d1c
+r25952 d6f9361e4b
+r25953 c8683ff5bf
+r25954 1c0105dec7
+r25957 5816db58e1
+r25958 15b9785d30
+r25959 838a09f2a9
+r25962 a0a045f5c0
+r25963 481096f2c5
+r25964 106180d020
+r25965 0362b6af90
+r25966 5cc3dad991
+r25968 27c8266eb6
+r25969 4eda3043c3
+r25970 bcc5eebedb
+r25971 f9fb5ca997
+r25972 173d9473a1
+r25973 f0bd9a805f
+r25974 7876a574d5
+r25976 7121c6a8db
+r25977 5d6844e9b6
+r25978 a38f03ba96
+r25979 9f9932bd20
+r25980 88e2cfae3d
+r25981 10f7a8c465
+r25982 d01ab1ba46
+r25983 7f4fa0ec6f
+r25984 042fdbc42a
+r25985 f194a29a53
+r25986 7918510f4d
+r25987 78315845b1
+r25988 f308e5d703
+r25989 1016522ec9
+r25990 bac7d17ab1
+r25992 d917d7c8a1
+r25993 ea5aac152d
+r25994 b6a300f3ac
+r25995 bc2bd6e67a
+r25996 0c4ad65950
+r25997 e864f48338
+r25998 89ceefb747
+r26000 01141595e9
+r26001 38a646ce5c
+r26002 46050d6ec4
+r26003 167309afd1
+r26004 b80ad1f452
+r26005 e6497919b3
+r26006 76e35fa141
+r26007 dc3fdb0d49
+r26008 e65ba2a5c2
+r26009 7e643d3e4a
+r26010 85e7755ef6
+r26011 3ba3b39b93
+r26012 ce5d909de9
+r26013 7abc466d64
+r26014 8a64ed85b9
+r26015 0a31808f5f
+r26016 b7395e9f50
+r26017 5f2be94ca4
+r26018 e7fc002d33
+r26019 5270d614f0
+r26020 3b0fd925a8
+r26023 44741eee53
+r26024 89d2dd52ef
+r26025 955b852dfd
+r26026 7c2c8c8adf
+r26027 e386ebdff8
+r26030 47c9911a12
+r26031 7eb6f102e8
+r26032 334872e33b
+r26033 214c145943
+r26034 6d5a16b382
+r26035 943d2cfb07
+r26036 eeb111c41d
+r26037 053e224677
+r26038 c6cc1bbafc
+r26039 e3fcce9c0b
+r26040 f9278123eb
+r26041 eb0643210f
+r26042 e86f07fdd4
+r26043 3b8db0dd75
+r26044 b34615a1e1
+r26045 cd69603589
+r26046 ac03178903
+r26047 a17be60676
+r26048 03112a3a3d
+r26049 370841db4b
+r26050 1189476b0e
+r26051 ae054a1663
+r26052 aa1219dcdb
+r26053 4fca89bfd0
+r26054 817579904b
+r26055 b93c4a9f97
+r26056 25ecde037f
+r26057 f191dca582
+r26058 579e999fbf
+r26059 bbde90f3dc
+r26060 23d7024e71
+r26061 667227b796
+r26062 4213eb4d56
+r26063 8e965f00e4
+r26064 4cfca8a7f6
+r26065 60fb9ec19b
+r26066 93717598b7
+r26067 2b069593c8
+r26068 32a753546e
+r26069 5fb26c6a88
+r26070 1b98d1fa2a
+r26072 afc755916f
+r26073 37201dd3cd
+r26074 172563dfbb
+r26075 b194689ada
+r26077 e4c5e04b06
+r26078 0bea2ab5f6
+r26079 311d813910
+r26080 66bf8db3f1
+r26081 4e987a3cf0
+r26082 f69d3e34dd
+r26083 88ab644173
+r26084 3c24983f42
+r26085 ee5644056a
+r26086 3e04761ce2
+r26087 ca37db37e9
+r26088 6dbd2dac27
+r26089 9c4f14411f
+r26090
+r26091 8eba9acbc4
+r26092 91dbfb2a8f
+r26093 fe38e54ca1
diff --git a/docs/svn-to-sha1-missing.txt b/docs/svn-to-sha1-missing.txt
new file mode 100644
index 0000000..6971257
--- /dev/null
+++ b/docs/svn-to-sha1-missing.txt
@@ -0,0 +1,140 @@
+# Shas are from https://github.com/paulp/legacy-svn-scala-full
+r309 | 45ffe9aa78
+r449 | 4bed839e59
+r1683 | 7bd4d88483
+r2051 | b23c8e0ecc
+r2197 | c0d1934836
+r3834 | 14d772c56b
+r4479 | 6520d1237f
+r4681 | d1884e972a
+r4683 | 1bc760309d
+r5529 | 8fa51577d6
+r5535 | a316dfdb36
+r5558 | c5a0f08b5e
+r5587 | acfdcee6d7
+r5643 | 0a61670c04
+r5715 | 3eb67c07e1
+r5830 | 86d29d352f
+r5878 | dc991d50da
+r6664 | eb9e4a73f4
+r6948 | 0cb34d506c
+r6952 | 19c934a4de
+r7733 | cf4d26c3d5
+r7936 | c91a40fd4a
+r8191 | 07b14e5e78
+r8532 | cb3a221dc9
+r9120 | 0358410b8c
+r9127 | 4a99565c4d
+r9374 | 81944e8c6f
+r9981 | c8a3383d6e
+r10088 | b0c5bd3c71
+r10521 | df7c409574
+r10522 | 2f7e5a7a45
+r10523 | 676dccd266
+r10661 | 2543f36ad6
+r10708 | d24c570712
+r10767 | 8f9e7589d1
+r10814 | fa8e526415
+r10818 | bdafefa11f
+r12022 | 1842903cd6
+r12333 | ac3b782c26
+r13582 | 66e547e5d7
+r13616 | 4323db0fe6
+r13706 | 0170a864c0
+r13713 | 746a6c03d0
+r13744 | 3485f71caf
+r13988 | f4508f3f91
+r14316 | 787260e7a7
+r14571 | d0fa3c1d43
+r14877 | 37db26c6d7
+r14878 | 66e9bab99b
+r14928 | 3e741d62de
+r15179 | dc53a9887a
+r15181 | e2b387e7a5
+r15343 | e3b0ad33d7
+r15349 | 4f280665c2
+r15659 | 306e59ef39
+r16569 | 126b7403f8
+r16689 | 6a6ab0cbcd
+r16690 | 8ea9a17905
+r16694 | 70e81644e2
+r16695 | fee7bc4772
+r16696 | 0537dbe80a
+r17089 | 25ca913ffb
+r17697 | 47612b688f
+r18364 | ec4670e120
+r18704 | 973010f034
+r18714 | cc69b10717
+r18736 | ee4e13af03
+r18786 | 60feb7dba9
+r18821 | a3ae86b245
+r19523 | 59829c478b
+r19534 | 8206ded007
+r20984 | ec5360d68d
+r21215 | 87a8a7b3ed
+r21341 | afd1ce73e0
+r21419 | 1aedfd0433
+r21834 | 0964721434
+r21837 | 3e180cbb8a
+r21914 | 2b17044a88
+r21919 | 0cdc3778f6
+r21941 | cfee7f5b4a
+r22007 | 97fd29a709
+r22048 | 6a22c267d5
+r22174 | 48e967ea18
+r22180 | b6cdb65735
+r22194 | 8d839e950d
+r22197 | f288be3a1f
+r22248 | bfc7b37042
+r22249 | 64363b019a
+r22279 | 914b8eb08b
+r22281 | d495f6f3cd
+r22296 | 164ffdcce3
+r22300 | 8b4bb765db
+r22316 | 6c59c8c68f
+r22356 | f1912c197d
+r22359 | 51b5c2a504
+r22371 | 767a1147c9
+r22372 | f85daa6911
+r22373 | 5908717a04
+r22375 | 5b73be9a15
+r22396 | b5a49161ce
+r22409 | f0f5ce5102
+r22410 | 46976a50ca
+r22417 | 07cb720be3
+r22421 | 734023d64f
+r22423 | c7f1dbe2d1
+r22479 | 4f73f40c49
+r22493 | 12f498d4a1
+r22532 | 080efc62da
+r22534 | 2e62d6991c
+r22550 | a03e9494fc
+r22580 | a3eb24ff8b
+r22599 | c5082d61d8
+r22627 | 14e121bc33
+r22631 | 5988b2a472
+r22652 | 92438a01f5
+r22765 | 46a68d025c
+r22917 | c0c3a20428
+r22952 | 611211e5f8
+r23203 | c8ad56f269
+r23437 | 63b3d5cee1
+r23656 | 2c6625e236
+r23715 | dda53a171e
+r23869 | 26507816f5
+r23978 | b2345752fb
+r24033 | 09041c59aa
+r24122 | 2bf6b6d6dd
+r24246 | a150ac383b
+r24376 | 861fda78b5
+r24450 | fe95545d68
+r24456 | d3456d776b
+r24482 | d8311274d1
+r24559 | 75c9b12581
+r24686 | a7841e490c
+r24982 | d4ce3b2c21
+r25203 | 029167f940
+r25249 | 288a6b856d
+r25522 | cacd228c5b
+r25929 | 710aba4df0
+r26090 | 93e5faca79
diff --git a/gitconfig.SAMPLE b/gitconfig.SAMPLE
new file mode 100644
index 0000000..d90c3bf
--- /dev/null
+++ b/gitconfig.SAMPLE
@@ -0,0 +1,8 @@
+# With something like this in .git/config or ~/.gitconfig
+# you can diff class files and jar files.
+[diff "class"]
+ textconv = tools/class-dump
+ cachetextconv = true
+[diff "jar"]
+ textconv = tools/jar-dump
+ cachetextconv = true
diff --git a/gitignore.SAMPLE b/gitignore.SAMPLE
deleted file mode 100644
index 3c6d873..0000000
--- a/gitignore.SAMPLE
+++ /dev/null
@@ -1,25 +0,0 @@
-# see also test/files/.gitignore
-/.gitignore
-/test/files/.gitignore
-
-/.scala_dependencies
-
-# "a" and "a/" to get both file (i.e. symlink) and folder
-/build
-/build/
-/target
-/target/
-/dists/
-/out/
-/bin/
-
-/sandbox/
-
-/.classpath
-
-/src/intellij/*.iml
-/src/intellij/*.ipr
-/src/intellij/*.iws
-
-/project/boot/
-/project/build/target/
diff --git a/lib/fjbg.jar b/lib/fjbg.jar
deleted file mode 100644
index 4ffd443..0000000
Binary files a/lib/fjbg.jar and /dev/null differ
diff --git a/lib/fjbg.jar.desired.sha1 b/lib/fjbg.jar.desired.sha1
index fd3def2..6f3ccc7 100644
--- a/lib/fjbg.jar.desired.sha1
+++ b/lib/fjbg.jar.desired.sha1
@@ -1 +1 @@
-bd8e22a955eeb82671c5fdb8a7a14bc7f25e9eb1 ?fjbg.jar
+8acc87f222210b4a5eb2675477602fc1759e7684 *fjbg.jar
diff --git a/lib/forkjoin.jar b/lib/forkjoin.jar
deleted file mode 100644
index 3830868..0000000
Binary files a/lib/forkjoin.jar and /dev/null differ
diff --git a/lib/forkjoin.jar.desired.sha1 b/lib/forkjoin.jar.desired.sha1
index 758ecb4..8bb86f3 100644
--- a/lib/forkjoin.jar.desired.sha1
+++ b/lib/forkjoin.jar.desired.sha1
@@ -1 +1 @@
-12c479a33ee283599fdb7aa91d6a1df0197a52cf ?forkjoin.jar
+ddd7d5398733c4fbbb8355c049e258d47af636cf ?forkjoin.jar
diff --git a/lib/jline.jar b/lib/jline.jar
deleted file mode 100644
index 0a018cc..0000000
Binary files a/lib/jline.jar and /dev/null differ
diff --git a/lib/jline.jar.desired.sha1 b/lib/jline.jar.desired.sha1
index ea3729c..b042613 100644
--- a/lib/jline.jar.desired.sha1
+++ b/lib/jline.jar.desired.sha1
@@ -1 +1 @@
-545b37930819a1196705e582a232abfeb252cc8d ?jline.jar
+a5261e70728c1847639e2b47d953441d0b217bcb *jline.jar
diff --git a/lib/midpapi10.jar.desired.sha1 b/lib/midpapi10.jar.desired.sha1
deleted file mode 100644
index 6c73bd7..0000000
--- a/lib/midpapi10.jar.desired.sha1
+++ /dev/null
@@ -1 +0,0 @@
-6597e6f74113e952a4233c451c973f5ac7f2b705 ?midpapi10.jar
diff --git a/lib/msil.jar b/lib/msil.jar
deleted file mode 100644
index 4c65bf8..0000000
Binary files a/lib/msil.jar and /dev/null differ
diff --git a/lib/msil.jar.desired.sha1 b/lib/msil.jar.desired.sha1
index 7dd6b5d..9396b27 100644
--- a/lib/msil.jar.desired.sha1
+++ b/lib/msil.jar.desired.sha1
@@ -1 +1 @@
-58f64cd00399c724e7d526e5bdcbce3e2b79f78b ?msil.jar
+d48cb950ceded82a5e0ffae8ef2c68d0923ed00c *msil.jar
diff --git a/lib/scala-compiler-src.jar.desired.sha1 b/lib/scala-compiler-src.jar.desired.sha1
new file mode 100644
index 0000000..082d86f
--- /dev/null
+++ b/lib/scala-compiler-src.jar.desired.sha1
@@ -0,0 +1 @@
+cfa3ee21f76cd5c115bd3bc070a3b401587bafb5 ?scala-compiler-src.jar
diff --git a/lib/scala-compiler.jar b/lib/scala-compiler.jar
deleted file mode 100644
index ae810a7..0000000
Binary files a/lib/scala-compiler.jar and /dev/null differ
diff --git a/lib/scala-compiler.jar.desired.sha1 b/lib/scala-compiler.jar.desired.sha1
index 4180969..bb39b4d 100644
--- a/lib/scala-compiler.jar.desired.sha1
+++ b/lib/scala-compiler.jar.desired.sha1
@@ -1 +1 @@
-5f31fab985a3efc21229297810c625b0a2593757 ?scala-compiler.jar
+d54b99f215d4d42b3f0b3489fbb1081270700992 ?scala-compiler.jar
diff --git a/lib/scala-library-src.jar b/lib/scala-library-src.jar
deleted file mode 100644
index 39e9887..0000000
Binary files a/lib/scala-library-src.jar and /dev/null differ
diff --git a/lib/scala-library-src.jar.desired.sha1 b/lib/scala-library-src.jar.desired.sha1
index 3370299..cd42c23 100644
--- a/lib/scala-library-src.jar.desired.sha1
+++ b/lib/scala-library-src.jar.desired.sha1
@@ -1 +1 @@
-364c3b992bdebeac9fafb187e1acbece45644de7 ?scala-library-src.jar
+8bdac1cdd60b73ff7e12fd2b556355fa10343e2d ?scala-library-src.jar
diff --git a/lib/scala-library.jar b/lib/scala-library.jar
deleted file mode 100644
index 5d8fac9..0000000
Binary files a/lib/scala-library.jar and /dev/null differ
diff --git a/lib/scala-library.jar.desired.sha1 b/lib/scala-library.jar.desired.sha1
index 4993fe9..6bdeaa9 100644
--- a/lib/scala-library.jar.desired.sha1
+++ b/lib/scala-library.jar.desired.sha1
@@ -1 +1 @@
-c52dbed261e4870a504cef24518484b335a38067 ?scala-library.jar
+1e0e39fae15b42e85998740511ec5a3830e26243 ?scala-library.jar
diff --git a/lib/scala-reflect-src.jar.desired.sha1 b/lib/scala-reflect-src.jar.desired.sha1
new file mode 100644
index 0000000..d630c93
--- /dev/null
+++ b/lib/scala-reflect-src.jar.desired.sha1
@@ -0,0 +1 @@
+d229f4c91ea8ab1a81559b5803efd9b0b1632f0b ?scala-reflect-src.jar
diff --git a/lib/scala-reflect.jar.desired.sha1 b/lib/scala-reflect.jar.desired.sha1
new file mode 100644
index 0000000..a5d6701
--- /dev/null
+++ b/lib/scala-reflect.jar.desired.sha1
@@ -0,0 +1 @@
+288f47dbe1002653e030fd25ca500b9ffe1ebd64 ?scala-reflect.jar
diff --git a/project/Build.scala b/project/Build.scala
new file mode 100644
index 0000000..a50a572
--- /dev/null
+++ b/project/Build.scala
@@ -0,0 +1,336 @@
+import sbt._
+import Keys._
+import partest._
+import ScalaBuildKeys._
+import Release._
+
+
+object ScalaBuild extends Build with Layers with Packaging with Testing {
+
+ // Build wide settings:
+ override lazy val settings = super.settings ++ Versions.settings ++ Seq(
+ autoScalaLibrary := false,
+ resolvers += Resolver.url(
+ "Typesafe nightlies",
+ url("https://typesafe.artifactoryonline.com/typesafe/ivy-snapshots/")
+ )(Resolver.ivyStylePatterns),
+ resolvers ++= Seq(
+ "junit interface repo" at "https://repository.jboss.org/nexus/content/repositories/scala-tools-releases",
+ ScalaToolsSnapshots
+ ),
+ organization := "org.scala-lang",
+ version <<= Versions.mavenVersion,
+ pomExtra := epflPomExtra
+ )
+
+ // Collections of projects to run 'compile' on.
+ lazy val compiledProjects = Seq(quickLib, quickComp, continuationsLibrary, actors, swing, forkjoin, fjbg)
+ // Collection of projects to 'package' and 'publish' together.
+ lazy val packagedBinaryProjects = Seq(scalaLibrary, scalaCompiler, swing, actors, continuationsPlugin, jline, scalap)
+ lazy val partestRunProjects = Seq(testsuite, continuationsTestsuite)
+
+ private def epflPomExtra = (
+ <xml:group>
+ <inceptionYear>2002</inceptionYear>
+ <licenses>
+ <license>
+ <name>BSD-like</name>
+ <url>http://www.scala-lang.org/downloads/license.html</url>
+ </license>
+ </licenses>
+ <scm>
+ <connection>scm:git:git://github.com/scala/scala.git</connection>
+ </scm>
+ <issueManagement>
+ <system>jira</system>
+ <url>http://issues.scala-lang.org</url>
+ </issueManagement>
+ </xml:group>
+ )
+
+ // Settings used to make sure publishing goes smoothly.
+ def publishSettings: Seq[Setting[_]] = Seq(
+ ivyScala ~= ((is: Option[IvyScala]) => is.map(_.copy(checkExplicit = false))),
+ pomIncludeRepository := (_ => false),
+ publishMavenStyle := true,
+ makePomConfiguration <<= makePomConfiguration apply (_.copy(configurations = Some(Seq(Compile, Default)))),
+ pomExtra := epflPomExtra
+ )
+
+ // Settings for root project. These are aggregate tasks against the rest of the build.
+ def projectSettings: Seq[Setting[_]] = publishSettings ++ Seq(
+ doc in Compile <<= (doc in documentation in Compile).identity,
+ // These next two aggregate commands on several projects and return results that are to be ignored by remaining tasks.
+ compile in Compile <<= compiledProjects.map(p => compile in p in Compile).join.map(_.head),
+ // TODO - just clean target? i.e. target map IO.deleteRecursively
+ clean <<= (compiledProjects ++ partestRunProjects).map(p => clean in p).dependOn,
+ packageBin in Compile <<= packagedBinaryProjects.map(p => packageBin in p in Compile).join.map(_.head),
+ // TODO - Make sure scalaLibrary has packageDoc + packageSrc from documentation attached...
+ publish <<= packagedBinaryProjects.map(p => publish in p).join.map(_.head),
+ publishLocal <<= packagedBinaryProjects.map(p => publishLocal in p).join.map(_.head),
+ packageDoc in Compile <<= (packageDoc in documentation in Compile).identity,
+ packageSrc in Compile <<= (packageSrc in documentation in Compile).identity,
+ test in Test <<= (runPartest in testsuite, runPartest in continuationsTestsuite, checkSame in testsuite) map { (a,b,c) => () },
+ lockerLock <<= (lockFile in lockerLib, lockFile in lockerComp, compile in Compile in lockerLib, compile in Compile in lockerComp) map { (lib, comp, _, _) =>
+ Seq(lib,comp).foreach(f => IO.touch(f))
+ },
+ lockerUnlock <<= (lockFile in lockerLib, lockFile in lockerComp) map { (lib, comp) =>
+ Seq(lib,comp).foreach(IO.delete)
+ },
+ genBinQuick <<= (genBinQuick in scaladist).identity,
+ makeDist <<= (makeDist in scaladist).identity,
+ makeExplodedDist <<= (makeExplodedDist in scaladist).identity,
+ // Note: We override unmanagedSources so that ~ compile will look at all these sources, then run our aggregated compile...
+ unmanagedSourceDirectories in Compile <<= baseDirectory apply (_ / "src") apply { dir =>
+ Seq("library/scala","actors","compiler","fjbg","swing","continuations/library","forkjoin") map (dir / _)
+ },
+ // TODO - Make exported products == makeDist so we can use this when creating a *real* distribution.
+ commands += Release.pushStarr
+ )
+ // Note: Root project is determined by lowest-alphabetical project that has baseDirectory as file("."). we use aaa_ to 'win'.
+ lazy val aaa_root = Project("scala", file(".")) settings(projectSettings: _*) settings(ShaResolve.settings: _*)
+
+ // External dependencies used for various projects
+ lazy val externalDeps: Setting[_] = libraryDependencies <<= (sbtVersion)(v =>
+ Seq(
+ "org.apache.ant" % "ant" % "1.8.2",
+ "org.scala-sbt" % "compiler-interface" % v % "provided"
+ )
+ )
+
+ def fixArtifactSrc(dir: File, name: String) = name match {
+ case x if x startsWith "scala-" => dir / "src" / (name drop 6)
+ case x => dir / "src" / name
+ }
+
+ // These are setting overrides for most artifacts in the Scala build file.
+ def settingOverrides: Seq[Setting[_]] = publishSettings ++ Seq(
+ crossPaths := false,
+ autoScalaLibrary := false,
+ // Work around a bug where scala-library (and forkjoin) is put on classpath for analysis.
+ classpathOptions := ClasspathOptions.manual,
+ publishArtifact in packageDoc := false,
+ publishArtifact in packageSrc := false,
+ target <<= (baseDirectory, name) apply (_ / "target" / _),
+ (classDirectory in Compile) <<= target(_ / "classes"),
+ javacOptions ++= Seq("-target", "1.5", "-source", "1.5"),
+ scalaSource in Compile <<= (baseDirectory, name) apply fixArtifactSrc,
+ javaSource in Compile <<= (baseDirectory, name) apply fixArtifactSrc,
+ unmanagedJars in Compile := Seq(),
+ // Most libs in the compiler use this order to build.
+ compileOrder in Compile := CompileOrder.JavaThenScala,
+ lockFile <<= target(_ / "compile.lock"),
+ skip in Compile <<= lockFile map (_.exists),
+ lock <<= lockFile map (f => IO.touch(f)),
+ unlock <<= lockFile map IO.delete
+ )
+
+ // --------------------------------------------------------------
+ // Libraries used by Scalac that change infrequently
+ // (or hopefully so).
+ // --------------------------------------------------------------
+
+ // Jline nested project. Compile this sucker once and be done.
+ lazy val jline = Project("jline", file("src/jline"))
+ // Fast Java Bytecode Generator (nested in every scala-compiler.jar)
+ lazy val fjbg = Project("fjbg", file(".")) settings(settingOverrides : _*)
+ // Our wrapped version of msil.
+ lazy val asm = Project("asm", file(".")) settings(settingOverrides : _*)
+ // Forkjoin backport
+ lazy val forkjoin = Project("forkjoin", file(".")) settings(settingOverrides : _*)
+
+ // --------------------------------------------------------------
+ // The magic kingdom.
+ // Layered compilation of Scala.
+ // Stable Reference -> Locker ('Lockable' dev version) -> Quick -> Strap (Binary compatibility testing)
+ // --------------------------------------------------------------
+
+ // Need a report on this...
+ // TODO - Resolve STARR from a repo..
+ lazy val STARR = scalaInstance <<= (appConfiguration, ShaResolve.pullBinaryLibs in ThisBuild) map { (app, _) =>
+ val launcher = app.provider.scalaProvider.launcher
+ val library = file("lib/scala-library.jar")
+ val compiler = file("lib/scala-compiler.jar")
+ val libJars = (file("lib") * "*.jar").get filterNot Set(library, compiler)
+ ScalaInstance("starr", library, compiler, launcher, libJars: _*)
+ }
+
+ // Locker is a lockable Scala compiler that can be built of 'current' source to perform rapid development.
+ lazy val (lockerLib, lockerReflect, lockerComp) = makeLayer("locker", STARR, autoLock = true)
+ lazy val locker = Project("locker", file(".")) aggregate(lockerLib, lockerReflect, lockerComp)
+
+ // Quick is the general purpose project layer for the Scala compiler.
+ lazy val (quickLib, quickReflect, quickComp) = makeLayer("quick", makeScalaReference("locker", lockerLib, lockerReflect, lockerComp))
+ lazy val quick = Project("quick", file(".")) aggregate(quickLib, quickReflect, quickComp)
+
+ // Reference to quick scala instance.
+ lazy val quickScalaInstance = makeScalaReference("quick", quickLib, quickReflect, quickComp)
+ def quickScalaLibraryDependency = unmanagedClasspath in Compile <++= (exportedProducts in quickLib in Compile).identity
+ def quickScalaReflectDependency = unmanagedClasspath in Compile <++= (exportedProducts in quickReflect in Compile).identity
+ def quickScalaCompilerDependency = unmanagedClasspath in Compile <++= (exportedProducts in quickComp in Compile).identity
+
+ // Strapp is used to test binary 'sameness' between things built with locker and things built with quick.
+ lazy val (strappLib, strappReflect, strappComp) = makeLayer("strapp", quickScalaInstance)
+
+ // --------------------------------------------------------------
+ // Projects dependent on layered compilation (quick)
+ // --------------------------------------------------------------
+ def addCheaterDependency(projectName: String): Setting[_] =
+ pomPostProcess <<= (version, organization, pomPostProcess) apply { (v,o,k) =>
+ val dependency: scala.xml.Node =
+ <dependency>
+ <groupId>{o}</groupId>
+ <artifactid>{projectName}</artifactid>
+ <version>{v}</version>
+ </dependency>
+ def fixDependencies(node: scala.xml.Node): scala.xml.Node = node match {
+ case <dependencies>{nested at _*}</dependencies> => <dependencies>{dependency}{nested}</dependencies>
+ case x => x
+ }
+ // This is a hack to get around issues where \ and \\ don't work if any of the children are `scala.xml.Group`.
+ def hasDependencies(root: scala.xml.Node): Boolean =
+ (root.child collectFirst {
+ case n: scala.xml.Elem if n.label == "dependencies" => n
+ } isEmpty)
+ // TODO - Keep namespace on project...
+ k andThen {
+ case n @ <project>{ nested at _*}</project> if hasDependencies(n) =>
+ <project xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns="http://maven.apache.org/POM/4.0.0">{nested}<dependencies>{dependency}</dependencies></project>
+ case <project>{ nested at _*}</project> =>
+ <project xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns="http://maven.apache.org/POM/4.0.0">{ nested map fixDependencies }</project>
+ }
+ }
+
+ // TODO - in sabbus, these all use locker to build... I think tihs way is better, but let's farm this idea around.
+ lazy val dependentProjectSettings = settingOverrides ++ Seq(quickScalaInstance, quickScalaLibraryDependency, addCheaterDependency("scala-library"))
+ lazy val actors = Project("scala-actors", file(".")) settings(dependentProjectSettings:_*) dependsOn(forkjoin % "provided")
+ lazy val swing = Project("scala-swing", file(".")) settings(dependentProjectSettings:_*) dependsOn(actors % "provided")
+ // This project will generate man pages (in man1 and html) for scala.
+ lazy val manmakerSettings: Seq[Setting[_]] = dependentProjectSettings :+ externalDeps
+ lazy val manmaker = Project("manual", file(".")) settings(manmakerSettings:_*)
+
+ // Things that compile against the compiler.
+ lazy val compilerDependentProjectSettings = dependentProjectSettings ++ Seq(quickScalaReflectDependency, quickScalaCompilerDependency, addCheaterDependency("scala-compiler"))
+
+ lazy val scalacheck = Project("scalacheck", file(".")) settings(compilerDependentProjectSettings:_*) dependsOn(actors % "provided")
+ lazy val partestSettings = compilerDependentProjectSettings :+ externalDeps
+ lazy val partest = Project("partest", file(".")) settings(partestSettings:_*) dependsOn(actors,forkjoin,scalap,asm)
+ lazy val scalapSettings = compilerDependentProjectSettings ++ Seq(
+ name := "scalap",
+ exportJars := true
+ )
+ lazy val scalap = Project("scalap", file(".")) settings(scalapSettings:_*)
+
+ // --------------------------------------------------------------
+ // Continuations plugin + library
+ // --------------------------------------------------------------
+ lazy val continuationsPluginSettings = compilerDependentProjectSettings ++ Seq(
+ scalaSource in Compile <<= baseDirectory(_ / "src/continuations/plugin/"),
+ resourceDirectory in Compile <<= baseDirectory(_ / "src/continuations/plugin/"),
+ exportJars := true,
+ name := "continuations" // Note: This artifact is directly exported.
+
+ )
+ lazy val continuationsPlugin = Project("continuations-plugin", file(".")) settings(continuationsPluginSettings:_*)
+ lazy val continuationsLibrarySettings = dependentProjectSettings ++ Seq(
+ scalaSource in Compile <<= baseDirectory(_ / "src/continuations/library/"),
+ scalacOptions in Compile <++= (exportedProducts in Compile in continuationsPlugin) map {
+ case Seq(cpDir) => Seq("-Xplugin-require:continuations", "-P:continuations:enable", "-Xplugin:"+cpDir.data.getAbsolutePath)
+ }
+ )
+ lazy val continuationsLibrary = Project("continuations-library", file(".")) settings(continuationsLibrarySettings:_*)
+
+ // TODO - OSGi Manifest
+
+ // --------------------------------------------------------------
+ // Real Library Artifact
+ // --------------------------------------------------------------
+ val allSubpathsCopy = (dir: File) => (dir.*** --- dir) x (relativeTo(dir)|flat)
+ def productTaskToMapping(products : Seq[File]) = products flatMap { p => allSubpathsCopy(p) }
+ lazy val packageScalaLibBinTask = Seq(quickLib, continuationsLibrary, forkjoin).map(p => products in p in Compile).join.map(_.flatten).map(productTaskToMapping)
+ lazy val scalaLibArtifactSettings: Seq[Setting[_]] = inConfig(Compile)(Defaults.packageTasks(packageBin, packageScalaLibBinTask)) ++ Seq(
+ name := "scala-library",
+ crossPaths := false,
+ exportJars := true,
+ autoScalaLibrary := false,
+ unmanagedJars in Compile := Seq(),
+ packageDoc in Compile <<= (packageDoc in documentation in Compile).identity,
+ packageSrc in Compile <<= (packageSrc in documentation in Compile).identity,
+ fullClasspath in Runtime <<= (exportedProducts in Compile).identity,
+ quickScalaInstance,
+ target <<= (baseDirectory, name) apply (_ / "target" / _)
+ )
+ lazy val scalaLibrary = Project("scala-library", file(".")) settings(publishSettings:_*) settings(scalaLibArtifactSettings:_*)
+
+ // --------------------------------------------------------------
+ // Real Reflect Artifact
+ // --------------------------------------------------------------
+
+ lazy val packageScalaReflect = Seq(quickReflect).map(p => products in p in Compile).join.map(_.flatten).map(productTaskToMapping)
+ lazy val scalaReflectArtifactSettings : Seq[Setting[_]] = inConfig(Compile)(Defaults.packageTasks(packageBin, packageScalaReflect)) ++ Seq(
+ name := "scala-reflect",
+ crossPaths := false,
+ exportJars := true,
+ autoScalaLibrary := false,
+ unmanagedJars in Compile := Seq(),
+ fullClasspath in Runtime <<= (exportedProducts in Compile).identity,
+ quickScalaInstance,
+ target <<= (baseDirectory, name) apply (_ / "target" / _)
+ )
+ lazy val scalaReflect = Project("scala-reflect", file(".")) settings(publishSettings:_*) settings(scalaReflectArtifactSettings:_*) dependsOn(scalaLibrary)
+
+
+ // --------------------------------------------------------------
+ // Real Compiler Artifact
+ // --------------------------------------------------------------
+ lazy val packageScalaBinTask = Seq(quickComp, fjbg, asm).map(p => products in p in Compile).join.map(_.flatten).map(productTaskToMapping)
+ lazy val scalaBinArtifactSettings : Seq[Setting[_]] = inConfig(Compile)(Defaults.packageTasks(packageBin, packageScalaBinTask)) ++ Seq(
+ name := "scala-compiler",
+ crossPaths := false,
+ exportJars := true,
+ autoScalaLibrary := false,
+ unmanagedJars in Compile := Seq(),
+ fullClasspath in Runtime <<= (exportedProducts in Compile).identity,
+ quickScalaInstance,
+ target <<= (baseDirectory, name) apply (_ / "target" / _)
+ )
+ lazy val scalaCompiler = Project("scala-compiler", file(".")) settings(publishSettings:_*) settings(scalaBinArtifactSettings:_*) dependsOn(scalaReflect)
+ lazy val fullQuickScalaReference = makeScalaReference("pack", scalaLibrary, scalaReflect, scalaCompiler)
+
+
+ // --------------------------------------------------------------
+ // Generating Documentation.
+ // --------------------------------------------------------------
+
+ // TODO - Migrate this into the dist project.
+ // Scaladocs
+ lazy val documentationSettings: Seq[Setting[_]] = dependentProjectSettings ++ Seq(
+ // TODO - Make these work for realz.
+ defaultExcludes in unmanagedSources in Compile := ((".*" - ".") || HiddenFileFilter ||
+ "reflect/Print.scala" ||
+ "reflect/Symbol.scala" ||
+ "reflect/Tree.scala" ||
+ "reflect/Type.scala" ||
+ "runtime/*$.scala" ||
+ "runtime/ScalaRuntime.scala" ||
+ "runtime/StringAdd.scala" ||
+ "scala/swing/test/*"),
+ sourceFilter in Compile := ("*.scala"),
+ unmanagedSourceDirectories in Compile <<= baseDirectory apply { dir =>
+ Seq(dir / "src" / "library" / "scala", dir / "src" / "actors", dir / "src" / "swing", dir / "src" / "continuations" / "library")
+ },
+ compile := inc.Analysis.Empty,
+ // scaladocOptions in Compile <++= (baseDirectory) map (bd =>
+ // Seq("-sourcepath", (bd / "src" / "library").getAbsolutePath,
+ // "-doc-no-compile", (bd / "src" / "library-aux").getAbsolutePath,
+ // "-doc-source-url", """https://lampsvn.epfl.ch/trac/scala/browser/scala/trunk/src/€{FILE_PATH}.scala#L1""",
+ // "-doc-root-content", (bd / "compiler/scala/tools/nsc/doc/html/resource/lib/rootdoc.txt").getAbsolutePath
+ // )),
+ classpathOptions in Compile := ClasspathOptions.manual
+ )
+ lazy val documentation = (
+ Project("documentation", file("."))
+ settings (documentationSettings: _*)
+ dependsOn(quickLib, quickComp, actors, fjbg, forkjoin, swing, continuationsLibrary)
+ )
+}
diff --git a/project/Layers.scala b/project/Layers.scala
new file mode 100644
index 0000000..35cc79c
--- /dev/null
+++ b/project/Layers.scala
@@ -0,0 +1,120 @@
+import sbt._
+import Keys._
+import com.jsuereth.git.GitKeys.gitRunner
+import ScalaBuildKeys.lock
+
+/** This trait stores all the helper methods to generate layers in Scala's layered build. */
+trait Layers extends Build {
+ // TODO - Clean this up or use a self-type.
+
+ /** Default SBT overrides needed for layered compilation. */
+ def settingOverrides: Seq[Setting[_]]
+ /** Reference to the jline project */
+ def jline: Project
+ /** Reference to forkjoin library */
+ def forkjoin: Project
+ /** Reference to Fast-Java-Bytecode-Generator library */
+ def fjbg: Project
+ /** Reference to the ASM wrapped project. */
+ def asm: Project
+ /** A setting that adds some external dependencies. */
+ def externalDeps: Setting[_]
+ /** The root project. */
+ def aaa_root: Project
+
+ /** Creates a reference Scala version that can be used to build other projects. This takes in the raw
+ * library, compiler and fjbg libraries as well as a string representing the layer name (used for compiling the compile-interface).
+ */
+ def makeScalaReference(layer: String, library: Project, reflect: Project, compiler: Project) =
+ scalaInstance <<= (appConfiguration in library,
+ version in library,
+ (exportedProducts in library in Compile),
+ (exportedProducts in reflect in Compile),
+ (exportedProducts in compiler in Compile),
+ (exportedProducts in fjbg in Compile),
+ (fullClasspath in jline in Runtime),
+ (exportedProducts in asm in Runtime)) map {
+ (app, version: String, lib: Classpath, reflect: Classpath, comp: Classpath, fjbg: Classpath, jline: Classpath, asm: Classpath) =>
+ val launcher = app.provider.scalaProvider.launcher
+ (lib,comp) match {
+ case (Seq(libraryJar), Seq(compilerJar)) =>
+ ScalaInstance(
+ version + "-" + layer + "-",
+ libraryJar.data,
+ compilerJar.data,
+ launcher,
+ ((fjbg.files ++ jline.files ++ asm.files ++ reflect.files):_*))
+ case _ => error("Cannot build a ScalaReference with more than one classpath element")
+ }
+ }
+
+ /** Creates a "layer" of Scala compilation. That is, this will build the next version of Scala from a previous version.
+ * Returns the library project and compiler project from the next layer.
+ * Note: The library and compiler are not *complete* in the sense that they are missing things like "actors" and "fjbg".
+ */
+ def makeLayer(layer: String, referenceScala: Setting[Task[ScalaInstance]], autoLock: Boolean = false) : (Project, Project, Project) = {
+ val autoLockSettings: Seq[Setting[_]] =
+ if(autoLock) Seq(compile in Compile <<= (compile in Compile, lock) apply { (c, l) =>
+ c flatMapR { cResult =>
+ val result = Result.tryValue(cResult)
+ l mapR { tx => result }
+ }
+ })
+ else Seq.empty
+
+
+ val library = Project(layer + "-library", file(".")) settings(settingOverrides: _*) settings(autoLockSettings:_*) settings(
+ version := layer,
+ // TODO - use depends on.
+ unmanagedClasspath in Compile <<= (exportedProducts in forkjoin in Compile).identity,
+ managedClasspath in Compile := Seq(),
+ scalaSource in Compile <<= (baseDirectory) apply (_ / "src" / "library"),
+ resourceDirectory in Compile <<= baseDirectory apply (_ / "src" / "library"),
+ defaultExcludes in unmanagedResources := ("*.scala" | "*.java" | "*.disabled"),
+ // TODO - Allow other scalac option settings.
+ scalacOptions in Compile <++= (scalaSource in Compile) map (src => Seq("-sourcepath", src.getAbsolutePath)),
+ resourceGenerators in Compile <+= (resourceManaged, Versions.scalaVersions, skip in Compile, streams) map Versions.generateVersionPropertiesFile("library.properties"),
+ referenceScala
+ )
+
+ // Define the reflection
+ val reflect = Project(layer + "-reflect", file(".")) settings(settingOverrides:_*) settings(autoLockSettings:_*) settings(
+ version := layer,
+ scalaSource in Compile <<= (baseDirectory) apply (_ / "src" / "reflect"),
+ resourceDirectory in Compile <<= baseDirectory apply (_ / "src" / "reflect"),
+ defaultExcludes := ("tests"),
+ defaultExcludes in unmanagedResources := "*.scala",
+ resourceGenerators in Compile <+= (resourceManaged, Versions.scalaVersions, skip in Compile, streams) map Versions.generateVersionPropertiesFile("reflect.properties"),
+ // TODO - Use depends on *and* SBT's magic dependency mechanisms...
+ unmanagedClasspath in Compile <<= Seq(forkjoin, library).map(exportedProducts in Compile in _).join.map(_.flatten),
+ externalDeps,
+ referenceScala
+ )
+
+ // Define the compiler
+ val compiler = Project(layer + "-compiler", file(".")) settings(settingOverrides:_*) settings(autoLockSettings:_*) settings(
+ version := layer,
+ scalaSource in Compile <<= (baseDirectory) apply (_ / "src" / "compiler"),
+ resourceDirectory in Compile <<= baseDirectory apply (_ / "src" / "compiler"),
+ unmanagedSourceDirectories in Compile <+= (baseDirectory) apply (_ / "src" / "msil"),
+ defaultExcludes := ("tests"),
+ defaultExcludes in unmanagedResources := "*.scala",
+ resourceGenerators in Compile <+= (resourceManaged, Versions.scalaVersions, skip in Compile, streams) map Versions.generateVersionPropertiesFile("compiler.properties"),
+ // Note, we might be able to use the default task, but for some reason ant was filtering files out. Not sure what's up, but we'll
+ // stick with that for now.
+ unmanagedResources in Compile <<= (baseDirectory) map {
+ (bd) =>
+ val dirs = Seq(bd / "src" / "compiler")
+ dirs.descendentsExcept( ("*.xml" | "*.html" | "*.gif" | "*.png" | "*.js" | "*.css" | "*.tmpl" | "*.swf" | "*.properties" | "*.txt"),"*.scala").get
+ },
+ // TODO - Use depends on *and* SBT's magic dependency mechanisms...
+ unmanagedClasspath in Compile <<= Seq(forkjoin, library, reflect, fjbg, jline, asm).map(exportedProducts in Compile in _).join.map(_.flatten),
+ externalDeps,
+ referenceScala
+ )
+
+ // Return the generated projects.
+ (library, reflect, compiler)
+ }
+
+}
diff --git a/project/Packaging.scala b/project/Packaging.scala
new file mode 100644
index 0000000..eb4e69f
--- /dev/null
+++ b/project/Packaging.scala
@@ -0,0 +1,129 @@
+import sbt._
+import Keys._
+import ScalaBuildKeys._
+
+/** All the settings related to *packaging* the built scala software. */
+trait Packaging { self: ScalaBuild.type =>
+
+ // --------------------------------------------------------------
+ // Packaging a distro
+ // --------------------------------------------------------------
+ lazy val scalaDistSettings: Seq[Setting[_]] = Seq(
+ crossPaths := false,
+ target <<= (baseDirectory, name) apply (_ / "target" / _),
+ scalaSource in Compile <<= (baseDirectory, name) apply (_ / "src" / _),
+ autoScalaLibrary := false,
+ unmanagedJars in Compile := Seq(),
+ genBinRunner <<= (fullClasspath in quickComp in Runtime) map (new ScalaToolRunner(_)),
+ binDir <<= target(_/"bin"),
+ genBin <<= genBinTask(genBinRunner, binDir, fullClasspath in Runtime, false),
+ binDir in genBinQuick <<= baseDirectory apply (_ / "target" / "bin"),
+ // Configure the classpath this way to avoid having .jar files and previous layers on the classpath.
+ fullClasspath in Runtime in genBinQuick <<= Seq(quickComp,quickLib,scalap,actors,swing,fjbg,jline,forkjoin).map(classDirectory in Compile in _).join.map(Attributed.blankSeq),
+ fullClasspath in Runtime in genBinQuick <++= (fullClasspath in Compile in jline),
+ genBinQuick <<= genBinTask(genBinRunner, binDir in genBinQuick, fullClasspath in Runtime in genBinQuick, true),
+ runManmakerMan <<= runManmakerTask(fullClasspath in Runtime in manmaker, runner in manmaker, "scala.tools.docutil.EmitManPage", "man1", ".1"),
+ runManmakerHtml <<= runManmakerTask(fullClasspath in Runtime in manmaker, runner in manmaker, "scala.tools.docutil.EmitHtml", "doc", ".html"),
+ // TODO - We could *really* clean this up in many ways. Let's look into making a a Seq of "direct jars" (scalaLibrary, scalaCompiler, jline, scalap)
+ // a seq of "plugin jars" (continuationsPlugin) and "binaries" (genBin) and "documentation" mappings (genBin) that this can aggregate.
+ // really need to figure out a better way to pull jline + jansi.
+ makeDistMappings <<= (genBin,
+ runManmakerMan,
+ runManmakerHtml,
+ packageBin in scalaLibrary in Compile,
+ packageBin in scalaCompiler in Compile,
+ packageBin in jline in Compile,
+ packageBin in continuationsPlugin in Compile,
+ managedClasspath in jline in Compile,
+ packageBin in scalap in Compile) map {
+ (binaries, man, html, lib, comp, jline, continuations, jlineDeps, scalap) =>
+ val jlineDepMap: Seq[(File, String)] = jlineDeps.map(_.data).flatMap(_ x Path.flat) map { case(a,b) => a -> ("lib/"+b) }
+ binaries ++ man ++ html ++ jlineDepMap ++ Seq(
+ lib -> "lib/scala-library.jar",
+ comp -> "lib/scala-compiler.jar",
+ jline -> "lib/jline.jar",
+ continuations -> "misc/scala-devel/plugins/continuations.jar",
+ scalap -> "lib/scalap.jar"
+ )
+ },
+ // Add in some more dependencies
+ makeDistMappings <+= (packageBin in swing in Compile) map (s => s -> "lib/scala-swing.jar"),
+ makeDistMappings <+= (packageBin in scalaReflect in Compile) map (s => s -> "lib/scala-reflect.jar"),
+ makeDist <<= (makeDistMappings, baseDirectory, streams) map { (maps, dir, s) =>
+ s.log.debug("Map = " + maps.mkString("\n"))
+ val file = dir / "target" / "scala-dist.zip"
+ IO.zip(maps, file)
+ s.log.info("Created " + file.getAbsolutePath)
+ file
+ },
+ makeExplodedDist <<= (makeDistMappings, target, streams) map { (maps, dir, s) =>
+ def sameFile(f: File, f2: File) = f.getCanonicalPath == f2.getCanonicalPath
+ IO.createDirectory(dir)
+ IO.copy(for {
+ (file, name) <- maps
+ val file2 = dir / name
+ if !sameFile(file,file2)
+ } yield (file, file2))
+ // Hack to make binaries be executable. TODO - Fix for JDK 5 and below...
+ maps map (_._2) filter (_ startsWith "bin/") foreach (dir / _ setExecutable true)
+ dir
+ }
+ )
+ lazy val scaladist = (
+ Project("dist", file("."))
+ settings (scalaDistSettings: _*)
+ )
+
+
+// Helpers to make a distribution
+
+ /** Generates runner scripts for distribution. */
+ def genBinTask(
+ runner: ScopedTask[ScalaToolRunner],
+ outputDir: ScopedSetting[File],
+ classpath: ScopedTask[Classpath],
+ useClasspath: Boolean
+ ): Project.Initialize[sbt.Task[Seq[(File,String)]]] = {
+ (runner, outputDir, classpath, streams) map { (runner, outDir, cp, s) =>
+ IO.createDirectory(outDir)
+ val classToFilename = Seq(
+ "scala.tools.nsc.MainGenericRunner" -> "scala",
+ "scala.tools.nsc.Main" -> "scalac",
+ "scala.tools.nsc.ScalaDoc" -> "scaladoc",
+ "scala.tools.nsc.CompileClient" -> "fsc",
+ "scala.tools.scalap.Main" -> "scalap"
+ )
+ if (useClasspath) {
+ val classpath = Build.data(cp).map(_.getCanonicalPath).distinct.mkString(",")
+ s.log.debug("Setting classpath = " + classpath)
+ runner setClasspath classpath
+ }
+ def genBinFiles(cls: String, dest: File) = {
+ runner.setClass(cls)
+ runner.setFile(dest)
+ runner.execute()
+ // TODO - Mark generated files as executable (755 or a+x) that is *not* JDK6 specific...
+ dest.setExecutable(true)
+ }
+ def makeBinMappings(cls: String, binName: String): Seq[(File,String)] = {
+ val file = outDir / binName
+ val winBinName = binName + ".bat"
+ genBinFiles(cls, file)
+ Seq( file -> ("bin/"+binName), outDir / winBinName -> ("bin/"+winBinName) )
+ }
+ classToFilename.flatMap((makeBinMappings _).tupled)
+ }
+ }
+ /** Creates man pages for distribution. */
+ def runManmakerTask(classpath: ScopedTask[Classpath], scalaRun: ScopedTask[ScalaRun], mainClass: String, dir: String, ext: String): Project.Initialize[Task[Seq[(File,String)]]] =
+ (classpath, scalaRun, streams, target) map { (cp, runner, s, target) =>
+ val binaries = Seq("fsc", "scala", "scalac", "scaladoc", "scalap")
+ binaries map { bin =>
+ val file = target / "man" / dir / (bin + ext)
+ val classname = "scala.man1." + bin
+ IO.createDirectory(file.getParentFile)
+ toError(runner.run(mainClass, Build.data(cp), Seq(classname, file.getAbsolutePath), s.log))
+ file -> ("man/" + dir + "/" + bin + ext)
+ }
+ }
+}
diff --git a/project/Partest.scala b/project/Partest.scala
new file mode 100644
index 0000000..fbb0a2a
--- /dev/null
+++ b/project/Partest.scala
@@ -0,0 +1,141 @@
+import sbt._
+
+import Build._
+import Keys._
+import Project.Initialize
+import complete._
+import scala.collection.{ mutable, immutable }
+
+/** This object */
+object partest {
+
+ /** The key for the run-partest task that exists in Scala's test suite. */
+ lazy val runPartest = TaskKey[Unit]("run-partest", "Runs the partest test suite against the quick.")
+ lazy val runPartestSingle = InputKey[Unit]("run-partest-single", "Runs a single partest test against quick.")
+ lazy val runPartestFailed = TaskKey[Unit]("run-partest-failed", "Runs failed partest tests.")
+ lazy val runPartestGrep = InputKey[Unit]("run-partest-grep", "Runs a single partest test against quick.")
+ lazy val partestRunner = TaskKey[PartestRunner]("partest-runner", "Creates a runner that can run partest suites")
+ lazy val partestTests = TaskKey[Map[String, Seq[File]]]("partest-tests", "Creates a map of test-type to a sequence of the test files/directoryies to test.")
+ lazy val partestDirs = SettingKey[Map[String,File]]("partest-dirs", "The map of partest test type to directory associated with that test type")
+
+ lazy val partestTaskSettings: Seq[Setting[_]] = Seq(
+ javaOptions in partestRunner := Seq("-Xmx512M -Xms256M"),
+ partestDirs <<= baseDirectory apply { bd =>
+ partestTestTypes map (kind => kind -> (bd / "test" / "files" / kind)) toMap
+ },
+ partestRunner <<= partestRunnerTask(fullClasspath in Runtime, javaOptions in partestRunner),
+ partestTests <<= partestTestsTask(partestDirs),
+ runPartest <<= runPartestTask(partestRunner, partestTests, scalacOptions in Test),
+ runPartestSingle <<= runSingleTestTask(partestRunner, partestDirs, scalacOptions in Test),
+ runPartestFailed <<= runPartestTask(partestRunner, partestTests, scalacOptions in Test, Seq("--failed"))
+ )
+
+ // What's fun here is that we want "*.scala" files *and* directories in the base directory...
+ def partestResources(base: File, testType: String): PathFinder = testType match {
+ case "res" => base ** "*.res"
+ case "buildmanager" => base * "*"
+ // TODO - Only allow directories that have "*.scala" children...
+ case _ => base * "*" filter { f => !f.getName.endsWith(".obj") && (f.isDirectory || f.getName.endsWith(".scala")) }
+ }
+ lazy val partestTestTypes = Seq("run", "jvm", "pos", "neg", "buildmanager", "res", "shootout", "scalap", "specialized", "presentation", "scalacheck")
+
+ // TODO - Figure out how to specify only a subset of resources...
+ def partestTestsTask(testDirs: ScopedSetting[Map[String,File]]): Project.Initialize[Task[Map[String, Seq[File]]]] =
+ testDirs map (m => m map { case (kind, dir) => kind -> partestResources(dir, kind).get })
+
+ // TODO - Split partest task into Configurations and build a Task for each Configuration.
+ // *then* mix all of them together for run-testsuite or something clever like this.
+ def runPartestTask(runner: ScopedTask[PartestRunner], testRuns: ScopedTask[Map[String,Seq[File]]], scalacOptions: ScopedTask[Seq[String]], extraArgs: Seq[String] = Seq()): Initialize[Task[Unit]] = {
+ (runner, testRuns, scalacOptions, streams) map {
+ (runner, runs, scalaOpts, s) => runPartestImpl(runner, runs, scalaOpts, s, extraArgs)
+ }
+ }
+ private def runPartestImpl(runner: PartestRunner, runs: Map[String, Seq[File]], scalacOptions: Seq[String], s: TaskStreams, extras: Seq[String] = Seq()): Unit = {
+ val testArgs = runs.toSeq collect { case (kind, files) if files.nonEmpty => Seq("-" + kind, files mkString ",") } flatten
+ val extraArgs = scalacOptions flatMap (opt => Seq("-scalacoption", opt))
+
+ import collection.JavaConverters._
+ val results = runner run Array(testArgs ++ extraArgs ++ extras: _*) asScala
+ // TODO - save results
+ val failures = results collect {
+ case (path, "FAIL") => path + " [FAILED]"
+ case (path, "TIMEOUT") => path + " [TIMEOUT]"
+ }
+
+ if (failures.isEmpty)
+ s.log.info(""+results.size+" tests passed.")
+ else {
+ failures foreach (s.log error _)
+ error("Test Failures! ("+failures.size+" of "+results.size+")")
+ }
+ }
+
+ def convertTestsForAutoComplete(tests: Map[String, Seq[File]]): (Set[String], Set[String]) =
+ (tests.keys.toSet, tests.values flatMap (_ map cleanFileName) toSet)
+
+ /** Takes a test file, as sent ot Partest, and cleans it up for auto-complete */
+ def cleanFileName(file: File): String = {
+ // TODO - Something intelligent here
+ val TestPattern = ".*/test/(.*)".r
+ file.getCanonicalPath match {
+ case TestPattern(n) => n
+ case _ => file.getName
+ }
+ }
+
+ // TODO - Allow a filter for the second part of this...
+ def runSingleTestParser(testDirs: Map[String, File]): State => Parser[(String, String)] = {
+ import DefaultParsers._
+ state => {
+ Space ~> token(NotSpace examples testDirs.keys.toSet) flatMap { kind =>
+ val files: Set[String] = testDirs get kind match {
+ case Some(dir) =>
+ partestResources(dir, kind).get flatMap (_ relativeTo dir) map (_ getName) toSet
+ case _ =>
+ Set()
+ }
+ Space ~> token(NotSpace examples files) map (kind -> _)
+ }
+ }
+ }
+
+ def runSingleTestTask(runner: ScopedTask[PartestRunner], testDirs: ScopedSetting[Map[String, File]], scalacOptions: ScopedTask[Seq[String]]) : Initialize[InputTask[Unit]] = {
+ import sbinary.DefaultProtocol._
+
+ InputTask(testDirs apply runSingleTestParser) { result =>
+ (runner, result, testDirs, scalacOptions, streams) map {
+ case (r, (kind, filter), dirs, o, s) =>
+ // TODO - Use partest resources somehow to filter the filter correctly....
+ val files: Seq[File] =
+ if (filter == "*") partestResources(dirs(kind), kind).get
+ else (dirs(kind) * filter).get
+
+ runPartestImpl(r, Map(kind -> files), o, s)
+ }
+ }
+ }
+
+ def partestRunnerTask(classpath: ScopedTask[Classpath], javacOptions: TaskKey[Seq[String]]): Project.Initialize[Task[PartestRunner]] =
+ (classpath, javacOptions) map ((cp, opts) => new PartestRunner(Build.data(cp), opts mkString " "))
+}
+
+class PartestRunner(classpath: Seq[File], javaOpts: String) {
+ // Classloader that does *not* have this as parent, for differing Scala version.
+ lazy val classLoader = new java.net.URLClassLoader(classpath.map(_.toURI.toURL).toArray, null)
+ lazy val (mainClass, mainMethod) = try {
+ val c = classLoader.loadClass("scala.tools.partest.nest.SBTRunner")
+ val m = c.getMethod("mainReflect", classOf[Array[String]])
+ (c,m)
+ }
+ lazy val classPathArgs = Seq("-cp", classpath.map(_.getAbsoluteFile).mkString(java.io.File.pathSeparator))
+ def run(args: Array[String]): java.util.Map[String,String] = try {
+ // TODO - undo this settings after running. Also globals are bad.
+ System.setProperty("partest.java_opts", javaOpts)
+ val allArgs = (classPathArgs ++ args).toArray
+ mainMethod.invoke(null, allArgs).asInstanceOf[java.util.Map[String,String]]
+ } catch {
+ case e =>
+ //error("Could not run Partest: " + e)
+ throw e
+ }
+}
diff --git a/project/Release.scala b/project/Release.scala
new file mode 100644
index 0000000..feab8bd
--- /dev/null
+++ b/project/Release.scala
@@ -0,0 +1,30 @@
+import sbt._
+import Keys._
+
+object Release {
+
+ // TODO - Just make the STARR artifacts and dump the sha1 files.
+
+ val starrLibs = Seq("scala-library.jar", "scala-reflect.jar", "scala-compiler.jar", "jline.jar")
+
+ val pushStarr = Command.command("new-starr") { (state: State) =>
+ /*val extracted = Project.extract(state)
+ import extracted._
+ // First run tests
+ val (s1, result) = runTask(test in Test, state)
+ // If successful, package artifacts
+ val (s2, distDir) = runTask(makeExplodedDist, s1)
+ // Then copy new libs in place
+ val bd = extracted get baseDirectory
+ for {
+ jarName <- starrLibs
+ jar = distDir / "lib" / jarName
+ if jar.exists
+ } IO.copyFile(jar, bd / "lib" / jarName)
+ // Invalidate SHA1 files.
+ ShaResolve.removeInvalidShaFiles(bd)
+ // Now run tests *again*?
+ s2*/
+ state
+ }
+}
diff --git a/project/RemoteDependencies.scala b/project/RemoteDependencies.scala
new file mode 100644
index 0000000..705b9dc
--- /dev/null
+++ b/project/RemoteDependencies.scala
@@ -0,0 +1,53 @@
+import sbt._
+import Keys._
+import ScalaBuildKeys._
+
+
+object RemoteDependencies {
+ def buildSettings(externalProjects: Set[URI], localScala: Setting[_]): Seq[Setting[_]] = Seq(
+ commands += Command.command("fix-uri-projects") { (state: State) =>
+ if(state.get(buildFixed) getOrElse false) state
+ else {
+ // TODO -fix up scalacheck's dependencies!
+ val extracted = Project.extract(state)
+ import extracted._
+ val scalaVersionString = extracted get version
+
+ def fix(s: Setting[_]): Setting[_] = s match {
+ case ScopedExternalSetting(p, scalaInstance.key, setting) if externalProjects(p) => localScala mapKey Project.mapScope(_ => s.key.scope)
+ // TODO - Fix Actors dependency...
+ //case ScopedExternalSetting(p, libraryDependencies.key, setting) if externalProjects(p) => fixProjectDeps(s)
+ case s => s
+ }
+ val transformed = session.mergeSettings map ( s => fix(s) )
+ val scopes = transformed collect { case ScopedExternalSetting(p, _, s) if externalProjects(p) => s.key.scope } toSet
+ // Create some fixers so we don't download scala or rely on it.
+ // Also add dependencies that disappear in 2.10 for now...
+ val fixers = for { scope <- scopes
+ setting <- Seq(autoScalaLibrary := false,
+ crossPaths := false,
+ scalaVersion := scalaVersionString)
+ } yield setting mapKey Project.mapScope(_ => scope)
+ val newStructure = Load.reapply(transformed ++ fixers, structure)
+ Project.setProject(session, newStructure, state).put(buildFixed, true)
+ }
+ },
+ onLoad in Global <<= (onLoad in Global) apply (_ andThen { (state: State) =>
+ "fix-uri-projects" :: state
+ })
+ )
+}
+
+
+
+/** Matcher to make updated remote project references easier. */
+object ScopedExternalSetting {
+ def unapply[T](s: Setting[_]): Option[(URI, AttributeKey[_], Setting[_])] =
+ s.key.scope.project match {
+ case Select(p @ ProjectRef(uri, _)) => Some((uri, s.key.key, s))
+ case _ => None
+ }
+}
+
+
+
diff --git a/project/Sametest.scala b/project/Sametest.scala
new file mode 100644
index 0000000..6f12eb2
--- /dev/null
+++ b/project/Sametest.scala
@@ -0,0 +1,63 @@
+import sbt._
+
+import Build._
+import Keys._
+
+// This code is adapted from scala.tools.ant.Same by Gilles Dubochet.
+object SameTest {
+
+ def checkSameBinaryProjects(lhs: Project, rhs: Project): Project.Initialize[Task[Unit]] =
+ (classDirectory in Compile in lhs, classDirectory in Compile in rhs,
+ compile in Compile in lhs, compile in Compile in rhs, streams) map { (lhs,rhs, _, _, s) =>
+ // Now we generate a complete set of relative files and then
+ def relativeClasses(dir: File) = (dir ** "*.class").get.flatMap(IO.relativize(dir,_).toList)
+ // This code adapted from SameTask in the compiler.
+ def hasDifferentFiles(filePairs: Seq[(File,File)]): Boolean = {
+ filePairs exists { case (a,b) =>
+ if (!a.canRead || !b.canRead) {
+ s.log.error("Either ["+a+"] or ["+b+"] is missing.")
+ true
+ } else {
+ s.log.debug("Checking for binary differences in ["+a+"] against ["+b+"].")
+ val diff = !checkSingleFilePair(a,b)
+ if(diff) s.log.error("["+a+"] differs from ["+b+"]")
+ diff
+ }
+ }
+ }
+ val allClassMappings = (relativeClasses(lhs) ++ relativeClasses(rhs)).distinct
+ val comparisons = allClassMappings.map(f => new File(lhs, f) -> new File(rhs, f))
+ val result = hasDifferentFiles(comparisons)
+ if (result) error("Binary artifacts differ.")
+ }
+
+ val bufferSize = 1024
+
+ // Tests whether two files are binary equivalents of each other.
+ def checkSingleFilePair(originFile: File, destFile: File): Boolean = {
+ Using.fileInputStream(originFile) { originStream =>
+ Using.fileInputStream(destFile) { destStream =>
+ val originBuffer = new Array[Byte](bufferSize)
+ val destBuffer = new Array[Byte](bufferSize)
+ var equalNow = true
+ var originRemaining = originStream.read(originBuffer)
+ var destRemaining = destStream.read(destBuffer)
+ while (originRemaining > 0 && equalNow) {
+ if (originRemaining == destRemaining) {
+ for (idx <- 0 until originRemaining) {
+ equalNow = equalNow && (originBuffer(idx) == destBuffer(idx))
+ }
+ } else {
+ equalNow = false
+ }
+ originRemaining = originStream.read(originBuffer)
+ destRemaining = destStream.read(destBuffer)
+ }
+ if (destRemaining > 0) equalNow = false
+ equalNow
+ }
+ }
+ }
+
+
+}
diff --git a/project/ScalaBuildKeys.scala b/project/ScalaBuildKeys.scala
new file mode 100644
index 0000000..9e495de
--- /dev/null
+++ b/project/ScalaBuildKeys.scala
@@ -0,0 +1,23 @@
+import sbt._
+import Keys._
+
+object ScalaBuildKeys {
+ val lockerLock = TaskKey[Unit]("locker-lock", "Locks the locker layer of the compiler build such that it won't rebuild on changed source files.")
+ val lockerUnlock = TaskKey[Unit]("locker-unlock", "Unlocks the locker layer of the compiler so that it will be recompiled on changed source files.")
+ val lockFile = SettingKey[File]("lock-file", "Location of the lock file compiling this project.")
+ val lock = TaskKey[Unit]("lock", "Locks this project so it won't be recompiled.")
+ val unlock = TaskKey[Unit]("unlock", "Unlocks this project so it will be recompiled.")
+ val makeDist = TaskKey[File]("make-dist", "Creates a mini-distribution (scala home directory) for this build in a zip file.")
+ val makeExplodedDist = TaskKey[File]("make-exploded-dist", "Creates a mini-distribution (scala home directory) for this build in a directory.")
+ val makeDistMappings = TaskKey[Seq[(File, String)]]("make-dist-mappings", "Creates distribution mappings for creating zips,jars,directorys,etc.")
+ val buildFixed = AttributeKey[Boolean]("build-uri-fixed")
+ val genBinRunner = TaskKey[ScalaToolRunner]("gen-bin-runner", "Creates a utility to generate script files for Scala.")
+ val genBin = TaskKey[Seq[(File,String)]]("gen-bin", "Creates script files for Scala distribution.")
+ val binDir = SettingKey[File]("binaries-directory", "Directory where binary scripts will be located.")
+ val genBinQuick = TaskKey[Seq[(File,String)]]("gen-quick-bin", "Creates script files for testing against current Scala build classfiles (not local dist).")
+ val runManmakerMan = TaskKey[Seq[(File,String)]]("make-man", "Runs the man maker project to generate man pages")
+ val runManmakerHtml = TaskKey[Seq[(File,String)]]("make-html", "Runs the man maker project to generate html pages")
+ val checkSame = TaskKey[Unit]("check-same-binaries", "checks whether or not the class files generated by scala are the same.")
+ val checkSameLibrary = TaskKey[Unit]("check-same-lib-binaries", "checks whether or not the librayr class files generated by scala are the same.")
+ val checkSameCompiler = TaskKey[Unit]("check-same-comp-binaries", "checks whether or not the compiler class files generated by scala are the same.")
+}
diff --git a/project/ScalaToolRunner.scala b/project/ScalaToolRunner.scala
new file mode 100644
index 0000000..d7338a5
--- /dev/null
+++ b/project/ScalaToolRunner.scala
@@ -0,0 +1,21 @@
+import sbt._
+import Keys._
+
+/** Reflection helper that runs ScalaTool.
+ * TODO - When SBT is on 2.10.x try to use Dynamic + Reflection. COULD BE FUN.
+ */
+class ScalaToolRunner(classpath: Classpath) {
+ // TODO - Don't use the ant task directly...
+ lazy val classLoader = new java.net.URLClassLoader(classpath.map(_.data.toURI.toURL).toArray, null)
+ lazy val mainClass = classLoader.loadClass("scala.tools.ant.ScalaTool")
+ lazy val executeMethod = mainClass.getMethod("execute")
+ lazy val setFileMethod = mainClass.getMethod("setFile", classOf[java.io.File])
+ lazy val setClassMethod = mainClass.getMethod("setClass", classOf[String])
+ lazy val setClasspathMethod = mainClass.getMethod("setClassPath", classOf[String])
+ lazy val instance = mainClass.newInstance()
+
+ def setClass(cls: String): Unit = setClassMethod.invoke(instance, cls)
+ def setFile(file: File): Unit = setFileMethod.invoke(instance, file)
+ def setClasspath(cp: String): Unit = setClasspathMethod.invoke(instance, cp)
+ def execute(): Unit = executeMethod.invoke(instance)
+}
diff --git a/project/ShaResolve.scala b/project/ShaResolve.scala
new file mode 100644
index 0000000..cea2b2d
--- /dev/null
+++ b/project/ShaResolve.scala
@@ -0,0 +1,147 @@
+import sbt._
+
+import Build._
+import Keys._
+import Project.Initialize
+import scala.collection.{ mutable, immutable }
+import scala.collection.parallel.CompositeThrowable
+import java.security.MessageDigest
+
+case class Credentials(user: String, pw: String)
+
+/** Helpers to resolve SHA artifacts from typesafe repo. */
+object ShaResolve {
+ import dispatch.{Http,url}
+ val remote_urlbase="http://typesafe.artifactoryonline.com/typesafe/scala-sha-bootstrap/org/scala-lang/bootstrap"
+
+ val pullBinaryLibs = TaskKey[Unit]("pull-binary-libs", "Pulls binary libs by the SHA key.")
+ val pushBinaryLibs = TaskKey[Unit]("push-binary-libs", "Pushes binary libs whose SHA has changed.")
+ val binaryLibCache = SettingKey[File]("binary-lib-cache", "Location of the cache of binary libs for this scala build.")
+
+ def settings: Seq[Setting[_]] = Seq(
+ binaryLibCache in ThisBuild := file(System.getProperty("user.home")) / ".sbt" / "cache" / "scala",
+ pullBinaryLibs in ThisBuild <<= (baseDirectory, binaryLibCache, streams) map resolveLibs,
+ pushBinaryLibs in ThisBuild <<= (baseDirectory, streams) map getCredentialsAndPushFiles
+ )
+
+ def resolveLibs(dir: File, cacheDir: File, s: TaskStreams): Unit = loggingParallelExceptions(s) {
+ val files = (dir / "test" / "files" ** "*.desired.sha1") +++ (dir / "lib" ** "*.desired.sha1")
+ for {
+ (file, name) <- (files x relativeTo(dir)).par
+ uri = name.dropRight(13).replace('\\', '/')
+ jar = dir / uri
+ if !jar.exists || !isValidSha(file)
+ sha = getShaFromShafile(file)
+ } pullFile(jar, sha + "/" + uri, cacheDir, sha, s)
+ }
+
+ /** This method removes all SHA1 files that don't match their corresponding JAR. */
+ def removeInvalidShaFiles(dir: File): Unit = {
+ val files = (dir / "test" / "files" ** "*.desired.sha1") +++ (dir / "lib" ** "*.desired.sha1")
+ for {
+ (file, name) <- (files x relativeTo(dir)).par
+ uri = name.dropRight(13).replace('\\', '/')
+ jar = dir / uri
+ if !jar.exists || !isValidSha(file)
+ } IO.delete(jar)
+ }
+ def getCredentials: Credentials = System.out.synchronized {
+ val user = (SimpleReader.readLine("Please enter your STARR username> ") getOrElse error("No username provided."))
+ val password = (SimpleReader.readLine("Please enter your STARR password> ", Some('*')) getOrElse error("No password provided."))
+ Credentials(user, password)
+ }
+
+ def getCredentialsAndPushFiles(dir: File, s: TaskStreams): Unit =
+ pushFiles(dir, getCredentials, s)
+
+ def pushFiles(dir: File, cred: Credentials, s: TaskStreams): Unit = loggingParallelExceptions(s) {
+ val files = (dir / "test" / "files" ** "*.jar") +++ (dir / "lib" ** "*.jar")
+ for {
+ (jar, name) <- (files x relativeTo(dir)).par
+ shafile = dir / (name + ".desired.sha1")
+ if !shafile.exists || !isValidSha(shafile)
+ } pushFile(jar, name, cred, s)
+ }
+
+ @inline final def loggingParallelExceptions[U](s: TaskStreams)(f: => U): U = try f catch {
+ case t: CompositeThrowable =>
+ s.log.error("Error during parallel execution, GET READY FOR STACK TRACES!!")
+ t.throwables foreach (t2 => s.log.trace(t2))
+ throw t
+ }
+
+ // TODO - Finish this publishing aspect.
+
+ def getShaFromShafile(file: File): String = parseShaFile(file)._2
+
+ // This should calculate the SHA sum of a file the same as the linux process.
+ def calculateSha(file: File): String = {
+ val digest = MessageDigest.getInstance("SHA1")
+ val in = new java.io.FileInputStream(file);
+ val buffer = new Array[Byte](8192)
+ try {
+ def read(): Unit = in.read(buffer) match {
+ case x if x <= 0 => ()
+ case size => digest.update(buffer, 0, size); read()
+ }
+ read()
+ } finally in.close()
+ val sha = convertToHex(digest.digest())
+ sha
+ }
+
+ def convertToHex(data: Array[Byte]): String = {
+ def byteToHex(b: Int) =
+ if ((0 <= b) && (b <= 9)) ('0' + b).toChar
+ else ('a' + (b-10)).toChar
+ val buf = new StringBuffer
+ for (i <- 0 until data.length) {
+ buf append byteToHex((data(i) >>> 4) & 0x0F)
+ buf append byteToHex(data(i) & 0x0F)
+ }
+ buf.toString
+ }
+ // Parses a sha file into a file and a sha.
+ def parseShaFile(file: File): (File, String) =
+ IO.read(file).split("\\s") match {
+ case Array(sha, filename) if filename.startsWith("?") => (new File(file.getParentFile, filename.drop(1)), sha)
+ case Array(sha, filename) => (new File(file.getParentFile, filename), sha)
+ case _ => error(file.getAbsolutePath + " is an invalid sha file")
+ }
+
+
+ def isValidSha(file: File): Boolean =
+ try {
+ val (jar, sha) = parseShaFile(file)
+ jar.exists && calculateSha(jar) == sha
+ } catch {
+ case t: Exception => false
+ }
+
+
+ def pullFile(file: File, uri: String, cacheDir: File, sha: String, s: TaskStreams): Unit = {
+ val cachedFile = cacheDir / uri
+ if (!cachedFile.exists || calculateSha(cachedFile) != sha) {
+ // Ensure the directory for the cache exists.
+ cachedFile.getParentFile.mkdirs()
+ val url = remote_urlbase + "/" + uri
+ val fous = new java.io.FileOutputStream(cachedFile)
+ s.log.info("Pulling [" + cachedFile + "] to cache")
+ try Http(dispatch.url(url) >>> fous) finally fous.close()
+ }
+ s.log.info("Pulling [" + file + "] from local cache")
+ IO.copyFile(cachedFile, file)
+ }
+
+ // Pushes a file and writes the new .desired.sha1 for git.
+ def pushFile(file: File, uri: String, cred: Credentials, s: TaskStreams): Unit = {
+ val sha = calculateSha(file)
+ val url = remote_urlbase + "/" + sha + "/" + uri
+ val sender = dispatch.url(url).PUT.as(cred.user,cred.pw) <<< (file, "application/java-archive")
+ // TODO - output to logger.
+ Http(sender >>> System.out)
+ val shafile = file.getParentFile / (file.getName + ".desired.sha1")
+ IO.touch(shafile)
+ IO.write(shafile, sha + " ?" + file.getName)
+ }
+}
diff --git a/project/Testing.scala b/project/Testing.scala
new file mode 100644
index 0000000..5de7211
--- /dev/null
+++ b/project/Testing.scala
@@ -0,0 +1,41 @@
+import sbt._
+import Keys._
+import partest._
+import SameTest._
+import ScalaBuildKeys._
+
+/** All settings/projects relating to testing. */
+trait Testing { self: ScalaBuild.type =>
+
+ lazy val testsuiteSettings: Seq[Setting[_]] = compilerDependentProjectSettings ++ partestTaskSettings ++ VerifyClassLoad.settings ++ Seq(
+ unmanagedBase <<= baseDirectory / "test/files/lib",
+ fullClasspath in VerifyClassLoad.checkClassLoad <<= (fullClasspath in scalaLibrary in Runtime).identity,
+ autoScalaLibrary := false,
+ checkSameLibrary <<= checkSameBinaryProjects(quickLib, strappLib),
+ checkSameCompiler <<= checkSameBinaryProjects(quickComp, strappComp),
+ checkSame <<= (checkSameLibrary, checkSameCompiler) map ((a,b) => ()),
+ autoScalaLibrary := false
+ )
+ lazy val continuationsTestsuiteSettings: Seq[Setting[_]] = testsuiteSettings ++ Seq(
+ scalacOptions in Test <++= (exportedProducts in Compile in continuationsPlugin) map {
+ case Seq(cpDir) => Seq("-Xplugin-require:continuations", "-P:continuations:enable", "-Xplugin:"+cpDir.data.getAbsolutePath)
+ },
+ partestDirs <<= baseDirectory apply { bd =>
+ def mkFile(name: String) = bd / "test" / "files" / name
+ def mkTestType(name: String) = name.drop("continuations-".length).toString
+ Seq("continuations-neg", "continuations-run") map (t => mkTestType(t) -> mkFile(t)) toMap
+ }
+ )
+ val testsuite = (
+ Project("testsuite", file("."))
+ settings (testsuiteSettings:_*)
+ dependsOn (scalaLibrary, scalaCompiler, fjbg, partest, scalacheck)
+ )
+ val continuationsTestsuite = (
+ Project("continuations-testsuite", file("."))
+ settings (continuationsTestsuiteSettings:_*)
+ dependsOn (partest, scalaLibrary, scalaCompiler, fjbg)
+ )
+
+}
+
diff --git a/project/VerifyClassLoad.scala b/project/VerifyClassLoad.scala
new file mode 100644
index 0000000..c8eebb1
--- /dev/null
+++ b/project/VerifyClassLoad.scala
@@ -0,0 +1,46 @@
+import sbt._
+
+import Build._
+import Keys._
+
+// This is helper code to validate that generated class files will succed in bytecode verification at class-load time.
+object VerifyClassLoad {
+ lazy val checkClassLoad: TaskKey[Unit] = TaskKey("check-class-load", "checks whether or not the class files generated by scala are deemed acceptable by classloaders.")
+ lazy val checkClassRunner: TaskKey[ClassVerifyRunner] = TaskKey("check-class-runner", "A wrapper around reflective calls to the VerifyClass class.")
+
+
+ def settings: Seq[Setting[_]] = Seq(
+ checkClassRunner <<= (fullClasspath in Runtime) map (cp => new ClassVerifyRunner(data(cp))),
+ fullClasspath in checkClassLoad := Seq(),
+ checkClassLoad <<= (checkClassRunner, fullClasspath in checkClassLoad, streams) map { (runner, dirs, s) =>
+ import collection.JavaConverters._
+ val results = runner.run(data(dirs).map(_.getAbsolutePath).toArray).asScala
+
+ s.log.info("Processed " + results.size + " classes.")
+ val errors = results.filter(_._2 != null)
+ for( (name, result) <- results; if result != null) {
+ s.log.error(name + " had error: " + result)
+ }
+ if(errors.size > 0) error("Classload validation errors encountered")
+ ()
+ }
+ )
+
+ // TODO - Use
+ class ClassVerifyRunner(classpath: Seq[File]) {
+ // Classloader that does *not* have this as parent, for differing Scala version.
+ lazy val classLoader = new java.net.URLClassLoader(classpath.map(_.toURI.toURL).toArray, null)
+ lazy val (mainClass, mainMethod) = try {
+ val c = classLoader.loadClass("scala.tools.util.VerifyClass")
+ val m = c.getMethod("run", classOf[Array[String]])
+ (c,m)
+ }
+ def run(args: Array[String]): java.util.Map[String,String] = try {
+ mainMethod.invoke(null, args).asInstanceOf[java.util.Map[String,String]]
+ } catch {
+ case e =>
+ //error("Could not run Partest: " + e)
+ throw e
+ }
+ }
+}
diff --git a/project/Versions.scala b/project/Versions.scala
new file mode 100644
index 0000000..57e274c
--- /dev/null
+++ b/project/Versions.scala
@@ -0,0 +1,142 @@
+import sbt._
+import Keys._
+import java.util.Properties
+import scala.util.control.Exception.catching
+import java.lang.{NumberFormatException => NFE}
+import java.io.FileInputStream
+import com.jsuereth.git.GitRunner
+import com.jsuereth.git.GitKeys.gitRunner
+
+case class VersionInfo(canonical: String,
+ maven: String,
+ osgi: String)
+
+/** this file is responsible for setting up Scala versioning schemes and updating all the necessary bits. */
+object Versions {
+ val buildNumberFile = SettingKey[File]("scala-build-number-file")
+ // TODO - Make this a setting?
+ val buildNumberProps = SettingKey[BaseBuildNumber]("scala-build-number-props")
+ val buildRelease = SettingKey[Boolean]("scala-build-release", "This is set to true if we're building a release.")
+ val mavenSuffix = SettingKey[String]("scala-maven-suffix", "This is set to whatever maven suffix is required.")
+
+ val gitSha = TaskKey[String]("scala-git-sha", "The sha of the current git commit.")
+ val gitDate = TaskKey[String]("scala-git-date", "The date of the current git commit.")
+
+ val mavenVersion = SettingKey[String]("scala-maven-version", "The maven version number.")
+ val osgiVersion = TaskKey[String]("scala-osgi-version", "The OSGi version number.")
+ val canonicalVersion = TaskKey[String]("scala-canonical-version", "The canonical version number.")
+
+ val scalaVersions = TaskKey[VersionInfo]("scala-version-info", "The scala versions used for this build.")
+
+
+
+ def settings: Seq[Setting[_]] = Seq(
+ buildNumberFile <<= baseDirectory apply (_ / "build.number"),
+ buildNumberProps <<= buildNumberFile apply loadBuildNumberProps,
+ buildRelease := Option(System.getProperty("build.release")) map (!_.isEmpty) getOrElse false,
+ mavenSuffix <<= buildRelease apply pickMavenSuffix,
+ mavenVersion <<= (buildNumberProps, mavenSuffix) apply makeMavenVersion,
+ gitSha <<= (gitRunner, baseDirectory, streams) map getGitSha,
+ gitDate <<= (gitRunner, baseDirectory, streams) map getGitDate,
+ osgiVersion <<= (buildNumberProps, gitDate, gitSha) map makeOsgiVersion,
+ canonicalVersion <<= (buildRelease, mavenVersion, buildNumberProps, gitDate, gitSha) map makeCanonicalVersion,
+ scalaVersions <<= (canonicalVersion, mavenVersion, osgiVersion) map VersionInfo.apply
+ )
+
+
+ /** This generates a properties file, if it does not already exist, with the maximum lastmodified timestamp
+ * of any source file. */
+ def generateVersionPropertiesFile(name: String)(dir: File, versions: VersionInfo, skip: Boolean, s: TaskStreams): Seq[File] = {
+ // TODO - We can probably clean this up by moving caching bits elsewhere perhaps....
+ val target = dir / name
+ // TODO - Regenerate on triggers, like recompilation or something...
+ def hasSameVersion: Boolean = {
+ val props = new java.util.Properties
+ val in = new java.io.FileInputStream(target)
+ try props.load(in) finally in.close()
+ versions.canonical == (props getProperty "version.number")
+ }
+ if (!target.exists || !(skip || hasSameVersion)) {
+ makeVersionPropertiesFile(target, versions)
+ }
+ target :: Nil
+ }
+
+ // This creates the *.properties file used to determine the current version of scala at runtime. TODO - move these somewhere utility like.
+ def makeVersionPropertiesFile(f: File, versions: VersionInfo): Unit =
+ IO.write(f, "version.number = "+versions.canonical+"\n"+
+ "osgi.number = "+versions.osgi+"\n"+
+ "maven.number = "+versions.maven+"\n"+
+ "copyright.string = Copyright 2002-2013, LAMP/EPFL")
+
+ def makeCanonicalVersion(isRelease: Boolean, mvnVersion: String, base: BaseBuildNumber, gitDate: String, gitSha: String): String =
+ if(isRelease) mvnVersion
+ else {
+ val suffix = if(base.bnum > 0) "-%d".format(base.bnum) else ""
+ "%s.%s.%s%s-%s-%s" format (base.major, base.minor, base.patch, suffix, gitDate, gitSha)
+ }
+
+ def makeMavenVersion(base: BaseBuildNumber, suffix: String): String = {
+ val firstSuffix = if(base.bnum > 0) "-%d".format(base.bnum) else ""
+ "%d.%d.%d%s%s" format (base.major, base.minor, base.patch, firstSuffix, suffix)
+ }
+
+ def makeOsgiVersion(base: BaseBuildNumber, gitDate: String, gitSha: String): String = {
+ val suffix = if(base.bnum > 0) "-%d".format(base.bnum) else ""
+ "%s.%s.%s.v%s%s-%s" format (base.major, base.minor, base.patch, gitDate, suffix, gitSha)
+ }
+
+ /** Determines what the maven sufffix should be for this build. */
+ def pickMavenSuffix(isRelease: Boolean): String = {
+ def default = if(isRelease) "" else "-SNAPSHOT"
+ Option(System.getProperty("maven.version.suffix")) getOrElse default
+ }
+
+ /** Loads the build.number properties file into SBT. */
+ def loadBuildNumberProps(file: File): BaseBuildNumber = {
+ val fin = new FileInputStream(file)
+ try {
+ val props = new Properties()
+ props.load(fin)
+ def getProp(name: String): Int =
+ (for {
+ v <- Option(props.getProperty(name))
+ v2 <- catching(classOf[NFE]) opt v.toInt
+ } yield v2) getOrElse sys.error("Could not convert %s to integer!" format (name))
+
+ BaseBuildNumber(
+ major=getProp("version.major"),
+ minor=getProp("version.minor"),
+ patch=getProp("version.patch"),
+ bnum =getProp("version.bnum")
+ )
+ } finally fin.close()
+ }
+
+
+ def getGitDate(git: GitRunner, baseDirectory: File, s: TaskStreams): String = {
+ val lines = getGitLines("log","-1","--format=\"%ci\"")(git,baseDirectory, s)
+ val line = if(lines.isEmpty) sys.error("Could not retreive git commit sha!") else lines.head
+ // Lines *always* start with " for some reason...
+ line drop 1 split "\\s+" match {
+ case Array(date, time, _*) => "%s-%s" format (date.replaceAll("\\-", ""), time.replaceAll(":",""))
+ case _ => sys.error("Could not parse git date: " + line)
+ }
+ }
+
+ def getGitSha(git: GitRunner, baseDirectory: File, s: TaskStreams): String = {
+ val lines = getGitLines("log","-1","--format=\"%H\"", "HEAD")(git,baseDirectory, s)
+ val line = if(lines.isEmpty) sys.error("Could not retreive git commit sha!") else lines.head
+ val noquote = if(line startsWith "\"") line drop 1 else line
+ val nog = if(noquote startsWith "g") noquote drop 1 else noquote
+ nog take 10
+ }
+
+ def getGitLines(args: String*)(git: GitRunner, baseDirectory: File, s: TaskStreams): Seq[String] =
+ git(args: _*)(baseDirectory, s.log) split "[\r\n]+"
+}
+
+
+case class BaseBuildNumber(major: Int, minor: Int, patch: Int, bnum: Int) {
+ override def toString = "BaseBuildNumber(%d.%d.%d-%d)" format (major, minor, patch, bnum)
+}
diff --git a/project/build.properties b/project/build.properties
deleted file mode 100644
index 4775404..0000000
--- a/project/build.properties
+++ /dev/null
@@ -1,11 +0,0 @@
-#Project properties
-#Sun Apr 11 14:24:47 CEST 2010
-project.name=scala
-def.scala.version=2.7.7
-sbt.version=0.7.7
-copyright=Copyright 2002-2011, LAMP/EPFL
-build.scala.versions=2.7.7
-project.initialize=false
-project.organization=ch.epfl.lamp
-partest.version.number=0.9.2
-project.version=2.8.1
diff --git a/project/build/AdditionalResources.scala b/project/build/AdditionalResources.scala
deleted file mode 100644
index d83d45b..0000000
--- a/project/build/AdditionalResources.scala
+++ /dev/null
@@ -1,81 +0,0 @@
-import sbt._
-import java.util.jar.{Manifest}
-import java.io.{FileInputStream}
-import AdditionalResources._
-/**
- * Additional tasks that are required to obtain a complete compiler and library pair, but that are not part of the
- * compilation task. It copies additional files and generates the properties files
- * @author Grégory Moix
- */
-trait AdditionalResources {
- self : BasicLayer =>
-
- def writeProperties: Option[String] = {
- def write0(steps: List[Step]): Option[String] = steps match {
- case x :: xs => x match {
- case c: PropertiesToWrite => {
- c.writeProperties orElse write0(xs)
- }
- case _ => write0(xs)
- }
- case Nil => None
- }
- write0(allSteps.topologicalSort)
- }
-}
-
-object AdditionalResources {
- /**
- * A FileFilter that defines what are the files that will be copied
- */
- lazy val basicFilter = "*.tmpl" | "*.xml" | "*.js" | "*.css" | "*.properties" | "*.swf" | "*.png"
- implicit def stringToGlob(s: String): NameFilter = GlobFilter(s)
-}
-
-trait ResourcesToCopy {
- self : CompilationStep =>
-
- def getResources(from: Path, filter: FileFilter): PathFinder = (from ##)** filter
- def getResources(from: Path): PathFinder = getResources(from, AdditionalResources.basicFilter)
-
- def copyDestination: Path
- def filesToCopy: PathFinder
-
- def copy = {
- log.info("Copying files for "+name)
- try { FileUtilities.copy(filesToCopy.get, copyDestination, log) }
- catch { case e => Some(e.toString) }
-
- None
- }
-}
-
-trait PropertiesToWrite {
- self : CompilationStep =>
-
- def propertyList: List[(String, String)]
- def propertyDestination: Path
-
- def writeProperties: Option[String] ={
- import java.io._
- import java.util.Properties
-
- val properties = new Properties
-
- def insert(list: List[(String, String)]): Unit =
- list foreach { case (k, v) => properties.setProperty(k, v) }
-
- try {
- insert(propertyList)
- val destFile = propertyDestination.asFile
- val stream = new FileOutputStream(destFile)
- properties.store(stream, null)
- }
- catch {
- case e: Exception => Some(e.toString)
- }
- None
- }
-
-}
-
diff --git a/project/build/BasicLayer.scala b/project/build/BasicLayer.scala
deleted file mode 100644
index b333131..0000000
--- a/project/build/BasicLayer.scala
+++ /dev/null
@@ -1,296 +0,0 @@
-import sbt._
-import xsbt.ScalaInstance
-import ScalaBuildProject._
-
-/**
- * Basic tasks and configuration shared by all layers. This class regroups the configuration and behaviour
- * shared by all layers.
- * @author Grégory Moix
- */
-abstract class BasicLayer(val info: ProjectInfo, val versionNumber: String, previousLayer: Option[BasicLayer])
- extends ScalaBuildProject
- with ReflectiveProject
- with AdditionalResources
- with LayerCompilation
- with BuildInfoEnvironment
- with ForkSBT {
- layer =>
-
- // All path values must be lazy in order to avoid initialization issues (sbt way of doing things)
-
- def buildInfoEnvironmentLocation: Path = outputRootPath / ("build-"+name+".properties")
-
- val forkProperty = "scala.sbt.forked"
- def isDebug = info.logger atLevel Level.Debug
- def isForked = System.getProperty(forkProperty) != null
-
- // Support of triggered execution at project level
- override def watchPaths = info.projectPath / "src" ** ("*.scala" || "*.java" || AdditionalResources.basicFilter)
- override def dependencies = info.dependencies
-
- lazy val copyright = property[String]
- lazy val partestVersionNumber = property[Version]
-
- lazy val nextLayer: Option[BasicLayer] = None
- def packingDestination : Path = layerOutput / "pack"
- lazy val libsDestination = packingDestination/ "lib"
- lazy val packedStarrOutput = outputRootPath / "pasta"
- lazy val requiredPluginsDirForCompilation = layerOutput / "misc" / "scala-devel" / "plugins"
-
- def compilerAdditionalJars: List[Path] = Nil
- def libraryAdditionalJars: List[Path] = Nil
-
- // TASKS
-
- /**
- * Before compiling the layer, we need to check that the previous layer
- * was created correctly and compile it if necessary
- */
- lazy val startLayer = previousLayer match {
- case Some(previous) => task(None) dependsOn previous.finishLayer
- case _ => task(None)
- }
-
- def buildLayer = externalCompilation orElse writeProperties
-
- lazy val build = compile
-
- lazy val compile = task(buildLayer) dependsOn startLayer
-
- /**
- * Finish the compilation and ressources copy and generation
- * It does nothing in itself. As sbt doesn't support conditional dependencies,
- * it permit locker to override it in order to lock the layer when the compilation
- * is finished.
- */
- lazy val finishLayer: ManagedTask = task(None) dependsOn compile
-
- def cleaningList = List(
- layerOutput,
- layerEnvironment.envBackingPath,
- packingDestination
- )
-
- def cleanFiles = FileUtilities.clean(cleaningList, true, log)
-
- // We use super.task, so cleaning is done in every case, even when locked
- lazy val clean: Task = nextLayer match {
- case Some(next) => super.task(cleanFiles) dependsOn next.clean
- case _ => super.task(cleanFiles)
- }
- lazy val cleanBuild = task(cleanFiles orElse buildLayer) dependsOn startLayer
-
- // Utility methods (for quick access)
- def actorsOutput = actorsConfig.outputDirectory
- def actorsSrcDir = actorsConfig.srcDir
- def compilerOutput = compilerConfig.outputDirectory
- def compilerSrcDir = compilerConfig.srcDir
- def dbcOutput = dbcConfig.outputDirectory
- def libraryOutput = libraryConfig.outputDirectory
- def librarySrcDir = libraryConfig.srcDir
- def outputCompilerJar = compilerConfig.packagingConfig.jarDestination
- def outputLibraryJar = libraryWS.packagingConfig.jarDestination
- def outputPartestJar = partestConfig.packagingConfig.jarDestination
- def outputScalapJar = scalapConfig.packagingConfig.jarDestination
- def scalapOutput = scalapConfig.outputDirectory
- def swingOutput = swingConfig.outputDirectory
- def swingSrcDir = swingConfig.srcDir
-
- // CONFIGURATION OF THE COMPILATION STEPS
-
- /**
- * Configuration of the core library compilation
- */
- lazy val libraryConfig = new CompilationStep("library", pathLayout , log) with ResourcesToCopy with PropertiesToWrite {
- def label = "["+layer.name+"] library"
- def options: Seq[String] = Seq("-sourcepath", pathConfig.sources.absolutePath.toString)
- def dependencies = Nil
- override def classpath = super.classpath +++ forkJoinJar
-
- def copyDestination = outputDirectory
- def filesToCopy = getResources(srcDir)
-
- def propertyDestination = outputDirectory / "library.properties"
- def propertyList = ("version.number",versionNumber) :: ("copyright.string", copyright.value) :: Nil
- }
-
- /**
- * Configuration of the compiler
- */
- lazy val compilerConfig = new CompilationStep("compiler", pathLayout, log) with ResourcesToCopy with PropertiesToWrite with Packaging {
- def label = "["+layer.name+"] compiler"
- private def bootClassPath : String = {
- System.getProperty("sun.boot.class.path")
- }
- override def classpath: PathFinder = super.classpath +++ fjbgJar +++ msilJar +++ jlineJar +++ antJar +++ forkJoinJar
- def options = Seq("-bootclasspath", bootClassPath)
- def dependencies = if (minimalCompilation) libraryConfig :: Nil else libraryConfig :: actorsConfig :: dbcConfig :: swingConfig :: Nil
-
- def copyDestination = outputDirectory
- def filesToCopy = getResources(srcDir)
-
- def propertyDestination = outputDirectory / "compiler.properties"
- def propertyList = ("version.number",versionNumber) :: ("copyright.string", copyright.value) :: Nil
-
- lazy val packagingConfig = {
- import java.util.jar.Manifest
- import java.io.FileInputStream
- val manifest = new Manifest(new FileInputStream(manifestPath.asFile))
- new PackagingConfiguration(libsDestination / compilerJarName, List(outputDirectory ##), manifest , compilerAdditionalJars)
- }
- lazy val starrPackagingConfig = new PackagingConfiguration(packedStarrOutput/compilerJarName, List(outputDirectory ##))
-
- }
-
- //// ADDTIONNAL LIBRARIES ////
-
- /**
- * Config of the actors library
- */
- lazy val actorsConfig = new CompilationStep ("actors", pathLayout, log){
- def label = "["+layer.name+"] actors library"
- override def classpath: PathFinder = super.classpath +++ forkJoinJar
- def options: Seq[String] = Seq()
- def dependencies = libraryConfig :: Nil
- }
-
- /**
- * Config of the dbc library
- */
- lazy val dbcConfig = new CompilationStep("dbc", pathLayout, log) with Packaging {
- def label = "["+layer.name+"] dbc library"
- def options: Seq[String] = Seq()
- def dependencies = libraryConfig :: Nil
-
- lazy val packagingConfig = new PackagingConfiguration(
- libsDestination / dbcJarName,
- List(outputDirectory ##)
- )
- }
-
- /**
- * Config of the swing library
- */
- lazy val swingConfig = new CompilationStep("swing", pathLayout, log) with Packaging {
- def label = "["+layer.name+"] swing library"
- def options: Seq[String] = Seq()
- def dependencies = libraryConfig :: actorsConfig :: Nil
-
- lazy val packagingConfig = new PackagingConfiguration(
- libsDestination / swingJarName,
- List(outputDirectory ##)
- )
- }
-
- ///// TOOLS CONFIGURATION ////////
-
- /**
- * Configuration of scalacheck
- */
- lazy val scalacheckConfig = new CompilationStep("scalacheck", pathLayout, log) with Packaging {
- def label = "["+layer.name+"] scalacheck"
- def options: Seq[String] = Seq()
- def dependencies = libraryConfig :: compilerConfig :: actorsConfig :: Nil
-
- lazy val packagingConfig = new PackagingConfiguration(
- libsDestination / scalacheckJarName,
- List(outputDirectory ##)
- )
- }
-
- /**
- * Configuration of scalap tool
- */
- lazy val scalapConfig = new CompilationStep("scalap", pathLayout, log) with Packaging {
- def label = "["+layer.name+"] scalap"
- def options: Seq[String] = Seq()
- def dependencies = libraryConfig :: compilerConfig :: Nil
-
- val decoderProperties = (srcDir ## ) / "decoder.properties"
-
- lazy val packagingConfig = new PackagingConfiguration(
- libsDestination / scalapJarName,
- List(outputDirectory ##, decoderProperties)
- )
- }
-
- /**
- * Configuration of the partest tool
- */
- lazy val partestConfig = new CompilationStep("partest", pathLayout, log) with ResourcesToCopy with PropertiesToWrite with Packaging {
- def label = "["+layer.name+"] partest"
- override def classpath: PathFinder = super.classpath +++ antJar +++ forkJoinJar
- def options: Seq[String] = Seq()
- def dependencies = libraryConfig :: compilerConfig :: scalapConfig :: actorsConfig :: Nil
-
- def copyDestination = outputDirectory
- def filesToCopy = getResources(srcDir)
-
- def propertyDestination = outputDirectory / "partest.properties"
- def propertyList = List(
- ("version.number", partestVersionNumber.value.toString),
- ("copyright.string", copyright.value)
- )
-
- lazy val packagingConfig = new PackagingConfiguration(libsDestination / partestJarName, List(outputDirectory ##))
-
- }
-
- ///// PLUGINS CONFIGURATION ////////
-
- lazy val continuationPluginConfig = {
- val config = new PathConfig {
- def projectRoot: Path = pathLayout.projectRoot
- def sources: Path = pathLayout.srcDir / "continuations" / "plugin"
- def analysis: Path = pathLayout.analysisOutput / "continuations" / "plugin"
- def output: Path = pathLayout.classesOutput / "continuations" / "plugin"
- }
-
- new CompilationStep("continuation-plugin", config, log) with ResourcesToCopy with EarlyPackaging {
- def label = "["+layer.name+"] continuation plugin"
- def dependencies = libraryConfig :: compilerConfig :: Nil
- def options = Seq()
-
- def filesToCopy = (sourceRoots ##) / "scalac-plugin.xml"
- def copyDestination = outputDirectory
- def jarContent = List(outputDirectory ##)
- lazy val packagingConfig = new PackagingConfiguration(
- requiredPluginsDirForCompilation/"continuations.jar",
- List(outputDirectory ##)
- )
- lazy val earlyPackagingConfig = new PackagingConfiguration(
- pathLayout.outputDir / "misc" / "scala-devel" / "plugins" / "continuations.jar",
- List(outputDirectory ##)
- )
- }
- }
-
- lazy val continuationLibraryConfig = {
- val config = new PathConfig {
- def projectRoot: Path = pathLayout.projectRoot
- def sources: Path = pathLayout.srcDir / "continuations" / "library"
- def analysis: Path = pathLayout.analysisOutput / "continuations" / "library"
- def output: Path = pathLayout.classesOutput / "continuations" / "library"
- }
-
- new CompilationStep("continuation-library", config, log) {
- def label = "["+layer.name+"] continuation library"
- def dependencies = libraryConfig :: compilerConfig :: continuationPluginConfig :: Nil
- def options = Seq(
- "-Xpluginsdir",
- requiredPluginsDirForCompilation.absolutePath,
- "-Xplugin-require:continuations",
- "-P:continuations:enable"
- )
- }
- }
-
- // Grouping compilation steps
- def minimalCompilation = false // It must be true for locker because we do not need to compile everything
-
- def libraryWS: WrapperStep with Packaging
- def toolsWS: WrapperStep
-
- lazy val pluginsWS = new WrapperStep(continuationPluginConfig :: continuationLibraryConfig :: Nil)
- lazy val allSteps = new WrapperStep(libraryWS :: compilerConfig :: pluginsWS :: toolsWS :: Nil)
-}
diff --git a/project/build/BuildInfoEnvironment.scala b/project/build/BuildInfoEnvironment.scala
deleted file mode 100644
index fc1c436..0000000
--- a/project/build/BuildInfoEnvironment.scala
+++ /dev/null
@@ -1,21 +0,0 @@
-import sbt._
-trait BuildInfoEnvironment {
- self : Project =>
- def buildInfoEnvironmentLocation: Path
- /**
- * Environment for storing properties that
- * 1) need to be saved across sbt session
- * 2) Are local to a layer
- * Used to save the last version of the compiler used to build the layer (for discarding it's product if necessary)
- */
- lazy val layerEnvironment = new BasicEnvironment {
- // use the project's Logger for any properties-related logging
- def log = self.log
-
- // the properties file will be read/stored
- def envBackingPath = buildInfoEnvironmentLocation
- // define some properties
- lazy val lastCompilerVersion: Property[String] = propertyOptional[String]("")
- }
-
-}
diff --git a/project/build/Comparator.scala b/project/build/Comparator.scala
deleted file mode 100644
index 7400788..0000000
--- a/project/build/Comparator.scala
+++ /dev/null
@@ -1,72 +0,0 @@
-import sbt._
-import java.io.{File, FileInputStream}
-
-// Based on scala.tools.ant.Same
-object Comparator {
-
- private def getMappedPath(path: Path, baseDirectory: Path): Path = {
- Path.fromString(baseDirectory, path.relativePath)
- }
-
-
- def compare(origin: Path, dest: Path, filter: Path => PathFinder, log: Logger): Option[String] = {
- log.info("Comparing the contents of "+origin.absolutePath+ " with "+dest.absolutePath)
- var allEqualNow = true
-
- def reportDiff(f1: File, f2: File) = {
- allEqualNow = false
- log.error("File '" + f1 + "' is different from correspondant.")
- }
-
- def reportMissing(f1: File) = {
- allEqualNow = false
- log.error("File '" + f1 + "' has no correspondant.")
- }
-
-
-
- val originPaths = filter(origin).get
-
- val bufferSize = 1024
- val originBuffer = new Array[Byte](bufferSize)
- val destBuffer = new Array[Byte](bufferSize)
-
- for (originPath <- originPaths.filter(! _.isDirectory)){
- log.debug("origin :" + originPath.absolutePath)
- val destPath = getMappedPath(originPath, dest)
- log.debug("dest :" + destPath.absolutePath)
- var equalNow = true
- val originFile = originPath.asFile
- val destFile = destPath.asFile
-
- if (originFile.canRead && destFile.canRead) {
-
- val originStream = new FileInputStream(originFile)
- val destStream = new FileInputStream(destFile)
- var originRemaining = originStream.read(originBuffer)
- var destRemaining = destStream.read(destBuffer)
- while (originRemaining > 0 && equalNow) {
- if (originRemaining == destRemaining)
- for (idx <- 0 until originRemaining) {
- equalNow = equalNow && (originBuffer(idx) == destBuffer(idx))}
- else
- equalNow = false
- originRemaining = originStream.read(originBuffer)
- destRemaining = destStream.read(destBuffer)
- }
- if (destRemaining > 0) equalNow = false
-
- if (!equalNow) reportDiff(originFile, destFile)
-
- originStream.close
- destStream.close
-
- }
- else reportMissing(originFile)
-
- }
- if(allEqualNow) None else Some("There were differences between "+origin.absolutePath+ " and "+ dest.absolutePath)
- }
-
-
-}
diff --git a/project/build/Compilation.scala b/project/build/Compilation.scala
deleted file mode 100644
index d581b2b..0000000
--- a/project/build/Compilation.scala
+++ /dev/null
@@ -1,104 +0,0 @@
-import sbt._
-import xsbt.{AnalyzingCompiler, ScalaInstance}
-import FileUtilities._
-
-/**
- * This trait define the compilation task.
-* @author Grégory Moix
- */
-trait Compilation {
- self : ScalaBuildProject with BuildInfoEnvironment =>
-
- def lastUsedCompilerVersion = layerEnvironment.lastCompilerVersion
-
- def instantiationCompilerJar: Path
- def instantiationLibraryJar: Path
-
- def instanceScope[A](action: ScalaInstance => A): A = {
- val instance = ScalaInstance(instantiationLibraryJar.asFile, instantiationCompilerJar.asFile, info.launcher, msilJar.asFile, fjbgJar.asFile)
- log.debug("Compiler will be instantiated by :" +instance.compilerJar +" and :" +instance.libraryJar )
- action(instance)
- }
-
- def compile(stepList: Step, clean:() => Option[String]): Option[String] = compile(stepList, Some(clean))
- def compile(stepList: Step): Option[String] = compile(stepList, None)
- /**
- * Execute the different compilation parts one after the others.
- */
- def compile(stepsList: Step, clean: Option[() => Option[String]]): Option[String] ={
-
- instanceScope[Option[String]]{ scala =>
- lazy val analyzing = new AnalyzingCompiler(scala, componentManager, xsbt.ClasspathOptions.manual, log)
-
- def compilerVersionHasChanged = lastUsedCompilerVersion.value != scala.actualVersion
-
- def checkAndClean(cleanFunction:() => Option[String]): Option[String] ={
- if (compilerVersionHasChanged) {
- log.info("The compiler version used to build this layer has changed since last time or this is a clean build.")
- lastUsedCompilerVersion.update(scala.actualVersion)
- layerEnvironment.saveEnvironment
- cleanFunction()
- } else {
- log.debug("The compiler version is unchanged. No need for cleaning.")
- None
- }
- }
-
- def compile0(steps: List[Step]): Option[String] = {
- steps foreach {
- case c: CompilationStep =>
- val conditional = new CompileConditional(c, analyzing)
- log.info("")
- val res = conditional.run orElse copy(c) orElse earlyPackaging(c)
- if (res.isDefined)
- return res
- case _ => ()
- }
- None
- }
-
- /**
- * When we finishe to compile a step we want to jar if necessary in order to
- * be able to load plugins for the associated library
- */
- def earlyPackaging(step: CompilationStep): Option[String] = step match {
- case s: EarlyPackaging => {
- val c = s.earlyPackagingConfig
- log.debug("Creating jar for plugin")
- jar(c.content.flatMap(Packer.jarPattern(_)), c.jarDestination, c.manifest, false, log)
- }
- case _ => None
- }
-
- def copy(step: CompilationStep): Option[String] = step match {
- case s: ResourcesToCopy => s.copy
- case _ => None
- }
-
- def cleanIfNecessary: Option[String] = clean match {
- case None => None
- case Some(f) => checkAndClean(f)
- }
- cleanIfNecessary orElse compile0(stepsList.topologicalSort)
- }
- }
-
-
-}
-
-trait LayerCompilation extends Compilation {
- self : BasicLayer =>
-
- protected def cleanCompilation: Option[String] = {
- log.info("Cleaning the products of the compilation.")
- FileUtilities.clean(layerOutput :: Nil, true, log)
- }
-
- /**
- * Run the actual compilation. Should not be called directly because it is executed on the same jvm and that
- * it could lead to memory issues. It is used only when launching a new sbt process to do the compilation.
- */
- lazy val compilation = task {compile(allSteps, cleanCompilation _)}
-
- def externalCompilation: Option[String] = maybeFork(compilation)
-}
diff --git a/project/build/CompilationStep.scala b/project/build/CompilationStep.scala
deleted file mode 100644
index 000dca0..0000000
--- a/project/build/CompilationStep.scala
+++ /dev/null
@@ -1,39 +0,0 @@
-import sbt._
-import AdditionalResources._
-
-trait Step extends Dag[Step] {
- def dependencies: Iterable[Step]
-}
-
-class WrapperStep(contents: List[Step]) extends Step {
- def dependencies = contents
-}
-
-abstract class CompilationStep(val name: String, val pathConfig: PathConfig, logger: Logger) extends CompileConfiguration with Step {
- def this(name: String, layout: PathLayout, logger: Logger) = this(name, layout / name, logger)
-
- // Utility methods (for quick access, ...)
- final def srcDir = pathConfig.sources
-
- // Methods required for the compilation
- def log: Logger = logger
- final def sourceRoots : PathFinder = pathConfig.sources
- def sources: PathFinder = sourceRoots.descendentsExcept("*.java" | "*.scala", ".svn")
- final def projectPath: Path = pathConfig.projectRoot
- final def analysisPath: Path = pathConfig.analysis
- final def outputDirectory: Path = pathConfig.output
- def classpath = {
- def addDependenciesOutputTo(list: List[Step], acc: PathFinder): PathFinder = list match {
- case Nil => acc
- case x :: xs => x match {
- case c: CompilationStep => addDependenciesOutputTo(xs, acc +++ c.outputDirectory)
- case w: WrapperStep => addDependenciesOutputTo(xs, addDependenciesOutputTo(dependencies.toList, acc))
- }
- }
- addDependenciesOutputTo(dependencies.toList, outputDirectory)
- }
- def javaOptions: Seq[String] = "-target 1.5 -source 1.5 -g:none" split ' '
- def maxErrors: Int = 100
- def compileOrder = CompileOrder.JavaThenScala
- def fingerprints = Fingerprints(Nil, Nil)
-}
diff --git a/project/build/ForkSBT.scala b/project/build/ForkSBT.scala
deleted file mode 100644
index b30e35e..0000000
--- a/project/build/ForkSBT.scala
+++ /dev/null
@@ -1,49 +0,0 @@
-/** Scala SBT build
- * Copyright 2005-2010 LAMP/EPFL
- * @author Paul Phillips
- */
-
-import sbt._
-
-/** Worked out a way to fork sbt tasks, preserving all sbt command line
- * options and without hardcoding anything.
- */
-trait ForkSBT {
- self: BasicLayer =>
-
- def jvmArguments: List[String] = {
- import scala.collection.jcl.Conversions._
- import java.lang.management.ManagementFactory
- ManagementFactory.getRuntimeMXBean().getInputArguments().toList
- }
-
- private var extraJVMArgs: List[String] = Nil
- def withJVMArgs[T](args: String*)(body: => T): T = {
- val saved = extraJVMArgs
- extraJVMArgs = args.toList
- try { body }
- finally extraJVMArgs = saved
- }
-
- // Set a property in forked sbts to inhibit possible forking cycles.
- def markForked = "-D" + forkProperty + "=true"
-
- /** Forks a new process to run "sbt task task ...":
- */
- def forkTasks(tasks: String*): Boolean = {
- require (!isForked, "Tried to fork but sbt is already forked: " + tasks.mkString(" "))
-
- val sbtJar = System.getProperty("java.class.path")
- val sbtMain = "xsbt.boot.Boot" // ok, much of anything.
- val args = jvmArguments ++ Seq(markForked, "-classpath", sbtJar, sbtMain) ++ tasks
-
- log.info("Forking: " + args.mkString("java ", " ", ""))
- Fork.java(None, args, StdoutOutput) == 0
- }
- def maybeFork(task: TaskManager#Task): Option[String] = maybeFork(task, "Error during external compilation.")
- def maybeFork(task: TaskManager#Task, errorMsg: String): Option[String] = {
- if (isForked) task.run
- else if (forkTasks("project " + this.name, task.name)) None
- else Some(errorMsg)
- }
-}
diff --git a/project/build/Packer.scala b/project/build/Packer.scala
deleted file mode 100644
index 73db556..0000000
--- a/project/build/Packer.scala
+++ /dev/null
@@ -1,122 +0,0 @@
-import sbt._
-import java.io.{File, FileInputStream}
-import java.util.jar.Manifest
-import AdditionalResources._
-import FileUtilities._
-
-
-
-object Packer {
-
- /**
- * A filter that exclude files that musn't be in a jar file.
- */
- // We must exclude the manifest because we generate it automatically, and when we add multiples other jars, they could have
- // also a manifest files each, resulting in conflicts for the FileUtilities.jar(..) method
- def jarPattern(path: PathFinder) = path.descendentsExcept(AllPassFilter, (".*" - ".") || HiddenFileFilter || new ExactFilter("MANIFEST.MF")).get
-
- def createJar(j: Packaging, log: Logger): Option[String] = createJar(j.packagingConfig, log, jarPattern _, true)
- def createJar(j: PackagingConfiguration, log: Logger): Option[String] = createJar(j, log, jarPattern _, true)
-
-
- /**
- * Create a jar from the packaging trait. Is able to add directly others jars to it
- */
- def createJar(j: PackagingConfiguration, log: Logger, filter:(PathFinder) => Iterable[Path], addIncludedLibs: Boolean): Option[String] = {
- def pack0(content: Iterable[Path])= jar(content.flatMap(filter(_)), j.jarDestination, j.manifest, false, log)
-
- j.jarsToInclude match {
- case Nil => pack0(j.content)
- case list if addIncludedLibs => {
- withTemporaryDirectory(log) { tmp: File =>
- val tmpPath = Path.fromFile(tmp)
- log.debug("List of jars to be added : " +list)
- def unzip0(l: List[Path]): Option[String] = l match {
- case x :: xs => {unzip(x, tmpPath, log);unzip0(xs)} //TODO properly handle failing of unzip
- case Nil => None
- }
- unzip0(list)
- log.debug("Content of temp folder"+ tmpPath.##.**( GlobFilter("*")))
- pack0(j.content ++ Set(tmpPath ##))
- }
- }
- case _ => pack0(j.content)
-
- }
- }
-
-}
-
-/**
- * Create the jars of pack
- * @author Grégory Moix
- */
-trait Packer {
- self: BasicLayer =>
-
- def libraryToCopy: List[Path] = Nil
-
- /**
- * The actual pack task.
- */
- def packF = {
- import Packer._
- def iterate(steps: List[Step]): Option[String] = steps match {
- case x :: xs => x match {
- case c: Packaging => {
- createJar(c, log) orElse iterate(xs)
- }
- case _ => iterate(xs)
- }
- case Nil => None
- }
-
- def copy0 ={
- copyFile(manifestPath,packingDestination/"META-INF"/"MANIFEST.MF", log) orElse {
- copy(libraryToCopy, packingDestination , true, true, log) match {
- case Right(_) => None
- case Left(e) => Some(e)
- }
- }
- }
- iterate(allSteps.topologicalSort) orElse copy0
- }
- lazy val pack = task {packF}.dependsOn(finishLayer)
-}
-
-
-class PackagingConfiguration(val jarDestination: Path, val content: Iterable[Path], val manifest: Manifest, val jarsToInclude: List[Path]){
- def this(jarDestination: Path, content: Iterable[Path])= this(jarDestination, content, new Manifest, Nil)
- def this(jarDestination: Path, content: Iterable[Path], jarsToInclude: List[Path])= this(jarDestination, content, new Manifest, jarsToInclude)
-}
-
-trait Packaging extends Step {
- def packagingConfig: PackagingConfiguration
-}
-
-trait WrapperPackaging extends Packaging {
- self : WrapperStep =>
-
- def jarContent = {
- def getContent(list: List[Step], acc: List[Path]): List[Path] = list match {
- case Nil => acc
- case x :: xs => x match {
- case w: WrapperStep => getContent(xs, getContent(w.dependencies.toList, acc))
- case c: CompilationStep => getContent(xs, (c.outputDirectory ##) :: acc)
- }
- }
- getContent(dependencies.toList, Nil)
- }
-}
-
-/**
- * This trait is here to add the possiblity to have a different packing destination that is used right after the
- * compilation of the step has finished. It permits to have use libraries that are build using a plugin. (The plugin must
- * be a jar in order to be recognised by the compiler.
- */
-trait EarlyPackaging extends Packaging {
- self: CompilationStep =>
- //def earlyPackagingDestination: Path
- //def earlyJarDestination = earlyPackagingDestination / jarName
- def earlyPackagingConfig: PackagingConfiguration
-}
diff --git a/project/build/Partest.scala b/project/build/Partest.scala
deleted file mode 100644
index 7771c6f..0000000
--- a/project/build/Partest.scala
+++ /dev/null
@@ -1,370 +0,0 @@
-import sbt._
-import java.io.File
-import java.net.URLClassLoader
-import TestSet.{filter}
-
-class TestSet(val SType: TestSetType.Value, val kind: String, val description: String, val files: Array[File]){
- /**
- * @param a list of file that we want to know wheter they are members of the test set or not
- * @return two lists : the first contains files that are member of the test set, the second contains the files that aren't
- */
- def splitContent(f: List[File]):(List[File], List[File]) = {
- f.partition((f: File) => files.elements.exists((e: File) => f == e))
- }
-}
-
-object TestSet {
- def apply(sType: TestSetType.Value, kind: String, description: String, files: PathFinder)= new TestSet(sType, kind, description, filter(files))
- def filter(p: PathFinder): Array[File] =( p --- p **(HiddenFileFilter || GlobFilter("*.obj")||GlobFilter("*.log"))).getFiles.toArray
-}
-
-object TestSetType extends Enumeration {
- val Std, Continuations = Value
-}
-
-class TestConfiguration(val library: Path, val classpath: Iterable[Path], val testRoot: Path,
- val tests: List[TestSet], val junitReportDir: Option[Path]){
-}
-
-trait PartestRunner {
- self: BasicLayer with Packer =>
-
- import Partest.runTest
- import TestSetType._
-
- lazy val testRoot = projectRoot / "test"
- lazy val testFiles = testRoot / "files" ##
- lazy val testLibs = testFiles / "lib"
-
- lazy val posFilesTest = TestSet(Std,"pos", "Compiling files that are expected to build", testFiles / "pos" * ("*.scala" || DirectoryFilter))
- lazy val negFilesTest = TestSet(Std,"neg", "Compiling files that are expected to fail", testFiles / "neg" * ("*.scala" || DirectoryFilter))
- lazy val runFilesTest = TestSet(Std,"run", "Compiling and running files", testFiles / "run" * ("*.scala" || DirectoryFilter))
- lazy val jvmFilesTest = TestSet(Std,"jvm", "Compiling and running files", testFiles / "jvm" *("*.scala" || DirectoryFilter))
- lazy val resFilesTest = TestSet(Std,"res", "Running resident compiler scenarii", testFiles / "res" * ("*.res"))
- lazy val buildmanagerFilesTest = TestSet(Std,"buildmanager", "Running Build Manager scenarii", testFiles / "buildmanager" * DirectoryFilter)
- // lazy val scalacheckFilesTest = TestSet(Std,"scalacheck", "Running scalacheck tests", testFiles / "scalacheck" * ("*.scala" || DirectoryFilter))
- lazy val scriptFilesTest = TestSet(Std,"script", "Running script files", testFiles / "script" * ("*.scala"))
- lazy val shootoutFilesTest = TestSet(Std,"shootout", "Running shootout tests", testFiles / "shootout" * ("*.scala"))
- lazy val scalapFilesTest = TestSet(Std,"scalap", "Running scalap tests", testFiles / "scalap" * ("*.scala"))
- lazy val specializedFilesTest = TestSet(Std,"specialized", "Running specialized tests", testFiles / "specialized" * ("*.scala"))
-
- // lazy val negContinuationTest = TestSet(Continuations,"neg", "Compiling continuations files that are expected to fail", testFiles / "continuations-neg" * ("*.scala" || DirectoryFilter))
- // lazy val runContinuationTest = TestSet(Continuations,"run", "Compiling and running continuations files", testFiles / "continuations-run" ** ("*.scala" ))
- //
- // lazy val continuationScalaOpts = (
- // "-Xpluginsdir " +
- // continuationPluginConfig.packagingConfig.jarDestination.asFile.getParent +
- // " -Xplugin-require:continuations -P:continuations:enable"
- // )
-
- lazy val testSuiteFiles: List[TestSet] = List(
- posFilesTest, negFilesTest, runFilesTest, jvmFilesTest, resFilesTest,
- buildmanagerFilesTest,
- //scalacheckFilesTest,
- shootoutFilesTest, scalapFilesTest,
- specializedFilesTest
- )
- lazy val testSuiteContinuation: List[TestSet] = Nil // List(negContinuationTest, runContinuationTest)
-
- private lazy val filesTestMap: Map[String, TestSet] =
- Map(testSuiteFiles.map(s => (s.kind,s) ):_*)
- // + (("continuations-neg",negContinuationTest),("continuations-run", runContinuationTest))
-
- private lazy val partestOptions = List("-failed")
-
- private lazy val partestCompletionList: Seq[String] = {
- val len = testFiles.asFile.toString.length + 1
-
- filesTestMap.keys.toList ++ partestOptions ++
- (filesTestMap.values.toList flatMap (_.files) map (_.toString take len))
- }
-
- private def runPartest(tests: List[TestSet], scalacOpts: Option[String], failedOnly: Boolean) = {
-
- val config = new TestConfiguration(
- outputLibraryJar,
- (outputLibraryJar +++ outputCompilerJar +++ outputPartestJar +++ outputScalapJar +++ antJar +++ jlineJar +++ (testLibs * "*.jar")).get,
- testRoot,
- tests,
- None
- )
-
- val javaHome = Path.fromFile(new File(System.getProperty("java.home")))
- val java = Some(javaHome / "bin" / "java" asFile)
- val javac = Some(javaHome / "bin" / "javac" asFile)
- val timeout = Some("2400000")
- val loader = info.launcher.topLoader
-
- log.debug("Ready to run tests")
-
- if (tests.isEmpty) {
- log.debug("Empty test list")
- None
- }
- else runTest(
- loader, config, java, javac,
- scalacOpts, timeout, true, true,
- failedOnly, true, isDebug, log
- )
- }
-
- def partestDebugProp =
- if (isDebug) List("-Dpartest.debug=true")
- else Nil
-
- lazy val externalPartest = task { args =>
- task {
- if (isForked) partest(args).run
- else withJVMArgs(partestDebugProp ++ args: _*) {
- if (forkTasks("partest")) None
- else Some("Some tests failed.")
- }
- } dependsOn pack
- } completeWith partestCompletionList
-
- lazy val partest = task { args =>
- var failedOnly = false
-
- def setOptions(options: List[String], acc: List[String]): List[String] = options match {
- case "-failed" :: xs =>
- failedOnly = true
- log.info("Only tests that failed previously will be run")
- setOptions(xs, acc)
- case x :: xs =>
- setOptions(xs, x :: acc)
- case _ => acc
- }
-
- def resolveSets(l: List[String], rem: List[String], acc: List[TestSet]): (List[String], List[TestSet]) = {
- def searchSet(arg: String): Option[TestSet] = filesTestMap get arg
-
- l match {
- case x :: xs => searchSet(x) match {
- case Some(s) => resolveSets(xs, rem, s :: acc)
- case None => resolveSets(xs, x :: rem, acc)
- }
- case Nil => (rem, acc)
- }
- }
-
- def resolveFiles(l: List[String], sets: List[TestSet]):(List[String], List[TestSet]) = {
- def resolve0(filesToResolve: List[File], setsToSearchIn: List[TestSet], setAcc: List[TestSet]):(List[String], List[TestSet])= {
- filesToResolve match {
- case Nil => (Nil, setAcc) // If we have no files left to resolve, we can return the list of the set we have
- case list => {
- setsToSearchIn match {
- case Nil => (list.map(_.toString), setAcc)// If we already had search all sets to find a match, we return the list of the files that where problematic and the set we have
- case x :: xs => {
- val (found, notFound)= x.splitContent(list)
- if(!found.isEmpty){
- val newSet = new TestSet(x.SType, x.kind, x.description, found.toArray)
- resolve0(notFound, xs, newSet :: setAcc)
- } else {
- resolve0(notFound, xs, setAcc)
- }
- }
- }
- }
- }
-
- }
-
- resolve0(l.map(Path.fromString(testFiles, _).asFile), filesTestMap.values.toList, sets)
- }
-
- val keys = setOptions(args.toList, Nil)
-
- if (keys.isEmpty) {
- task { runPartest(testSuiteFiles, None, failedOnly) }
- }
- else {
- val (fileNames, sets) = resolveSets(keys, Nil, Nil)
- val (notFound, allSets) = resolveFiles(fileNames, sets)
- if (!notFound.isEmpty)
- log.info("Don't know what to do with : \n"+notFound.mkString("\n"))
-
- task { runPartest(allSets, None, failedOnly) }
- }
- // if (keys.length == 0) task {
- // runPartest(testSuiteFiles, None, failedOnly) orElse {
- // runPartest(testSuiteContinuation, None, failedOnly)
- // } // this is the case where there were only config options, we will run the standard test suite
- // }
- // else {
- // val (fileNames, sets) = resolveSets(keys, Nil, Nil)
- // val (notFound, allSets) = resolveFiles(fileNames, sets)
- // if (!notFound.isEmpty)
- // log.info("Don't know what to do with : \n"+notFound.mkString("\n"))
- //
- // val (std, continuations) = allSets partition (_.SType == TestSetType.Std)
- // task {
- // runPartest(std, None, failedOnly) orElse {
- // runPartest(continuations, Some(continuationScalaOpts), failedOnly)
- // }
- // }
- // }
- }.completeWith(partestCompletionList)
-
-}
-
-object Partest {
- def runTest(
- parentLoader: ClassLoader,
- config: TestConfiguration,
- javacmd: Option[File],
- javaccmd: Option[File],
- scalacOpts: Option[String],
- timeout: Option[String],
- showDiff: Boolean,
- showLog: Boolean,
- runFailed: Boolean,
- errorOnFailed: Boolean,
- debug: Boolean,
- log: Logger
- ): Option[String] = {
-
- if (debug)
- log.setLevel(Level.Debug)
-
- if (config.classpath.isEmpty)
- return Some("The classpath is empty")
-
- log.debug("Classpath is "+ config.classpath)
-
- val classloader = new URLClassLoader(
- Array(config.classpath.toSeq.map(_.asURL):_*),
- ClassLoader.getSystemClassLoader.getParent
- )
- val runner: AnyRef =
- classloader.loadClass("scala.tools.partest.nest.SBTRunner").newInstance().asInstanceOf[AnyRef]
- val fileManager: AnyRef =
- runner.getClass.getMethod("fileManager", Array[Class[_]](): _*).invoke(runner, Array[Object](): _*)
-
- val runMethod =
- runner.getClass.getMethod("reflectiveRunTestsForFiles", Array(classOf[Array[File]], classOf[String]): _*)
-
- def runTestsForFiles(kindFiles: Array[File], kind: String) = {
- val result = runMethod.invoke(runner, Array(kindFiles, kind): _*).asInstanceOf[java.util.HashMap[String, Int]]
- scala.collection.jcl.Conversions.convertMap(result)
- }
-
- def setFileManagerBooleanProperty(name: String, value: Boolean) {
- log.debug("Setting partest property :"+name+" to :"+value)
- val setMethod =
- fileManager.getClass.getMethod(name+"_$eq", Array(classOf[Boolean]): _*)
- setMethod.invoke(fileManager, Array(java.lang.Boolean.valueOf(value)).asInstanceOf[Array[Object]]: _*)
- }
-
- def setFileManagerStringProperty(name: String, value: String) {
- log.debug("Setting partest property :"+name+" to :"+value)
- val setMethod =
- fileManager.getClass.getMethod(name+"_$eq", Array(classOf[String]): _*)
- setMethod.invoke(fileManager, Array(value).asInstanceOf[Array[Object]]: _*)
- }
-
- // System.setProperty("partest.srcdir", "files")
-
- setFileManagerBooleanProperty("showDiff", showDiff)
- setFileManagerBooleanProperty("showLog", showLog)
- setFileManagerBooleanProperty("failed", runFailed)
- if (!javacmd.isEmpty)
- setFileManagerStringProperty("JAVACMD", javacmd.get.getAbsolutePath)
- if (!javaccmd.isEmpty)
- setFileManagerStringProperty("JAVAC_CMD", "javac")
- setFileManagerStringProperty("CLASSPATH", (config.classpath.map(_.absolutePath).mkString(File.pathSeparator)))
- setFileManagerStringProperty("LATEST_LIB", config.library.absolutePath)
- setFileManagerStringProperty("SCALAC_OPTS", scalacOpts getOrElse "")
-
- if (!timeout.isEmpty)
- setFileManagerStringProperty("timeout", timeout.get)
-
- type TFSet = (Array[File], String, String)
-
- val testFileSets = config.tests
-
- def resultsToStatistics(results: Iterable[(_, Int)]): (Int, Int) = {
- val (files, failures) = results map (_._2 == 0) partition (_ == true)
- def count(i: Iterable[_]): Int ={
- var c = 0
- for (elem <-i) yield {
- c = c+1
- }
- c
- }
- (count(files), count(failures))
- }
-
-
- def runSet(set: TestSet): (Int, Int, Iterable[String]) = {
- val (files, name, msg) = (set.files, set.kind, set.description)
- log.debug("["+name+"] "+ msg+files.mkString(", files :\n","\n",""))
- if (files.isEmpty) {
- log.debug("No files !")
- (0, 0, List())
- }
- else {
- log.info(name +" : "+ msg)
- val results: Iterable[(String, Int)] = runTestsForFiles(files, name)
- val (succs, fails) = resultsToStatistics(results)
-
- val failed: Iterable[String] = results.filter( _._2!=0) map(_ match {
- case (path, 1) => path + " [FAILED]"
- case (path, 2) => path + " [TIMOUT]"
- })
-
- val r =(succs, fails, failed)
-
- config.junitReportDir match {
- case Some(d) => {
- val report = testReport(name, results, succs, fails)
- scala.xml.XML.save(d/name+".xml", report)
- }
- case None =>
- }
-
- r
- }
- }
-
- val _results = testFileSets map runSet
- val allSuccesses = _results.map (_._1).foldLeft(0)( _ + _ )
- val allFailures = _results.map (_._2).foldLeft(0)( _ + _ )
- val allFailedPaths = _results flatMap (_._3)
-
- def f(msg: String): Option[String] =
- if (errorOnFailed && allFailures > 0) {
- Some(msg)
- }
- else {
- log.info(msg)
- None
- }
- def s = if (allFailures > 1) "s" else ""
- val msg =
- if (allFailures > 0) "Test suite finished with %d case%s failing.\n".format(allFailures, s)+ allFailedPaths.mkString("\n")
- else if (allSuccesses == 0) "There were no tests to run."
- else "Test suite finished with no failures."
-
- f(msg)
-
- }
-
- private def oneResult(res: (String, Int)) =
- <testcase name ={res._1}>{
- res._2 match {
- case 0 => scala.xml.NodeSeq.Empty
- case 1 => <failure message="Test failed"/>
- case 2 => <failure message="Test timed out"/>
- }
- }</testcase>
-
- private def testReport(kind: String, results: Iterable[(String, Int)], succs: Int, fails: Int) =
- <testsuite name ={kind} tests ={(succs + fails).toString} failures ={fails.toString}>
- <properties/>
- {
- results.map(oneResult(_))
- }
- </testsuite>
-
-
-}
diff --git a/project/build/PathConfig.scala b/project/build/PathConfig.scala
deleted file mode 100644
index 3ed5684..0000000
--- a/project/build/PathConfig.scala
+++ /dev/null
@@ -1,43 +0,0 @@
-import sbt._
-
-/**
- * An abstract class for grouping all different paths that are needed to
- * compile the a CompilationStep
- * @author Grégory Moix
- */
-abstract class PathConfig {
- def projectRoot: Path
- def sources: Path
- def analysis: Path
- def output: Path
-}
-
-object PathConfig {
- val classes = "classes"
- val analysis = "analysis"
-}
-
-trait SimpleOutputLayout {
- def outputDir: Path
- lazy val classesOutput = outputDir / PathConfig.classes
- lazy val analysisOutput = outputDir / PathConfig.analysis
-
-}
-
-class PathLayout(val projectRoot: Path, val outputDir: Path) extends SimpleOutputLayout {
- lazy val srcDir = projectRoot / "src"
- /**
- * An utility method to easily create StandardPathConfig from a given path layout
- */
- def /(name: String)= new StandardPathConfig(this, name)
-}
-
-/**
- *
- */
-class StandardPathConfig(layout: PathLayout, name: String) extends PathConfig {
- lazy val projectRoot = layout.projectRoot
- lazy val sources = layout.srcDir / name
- lazy val analysis = layout.analysisOutput / name
- lazy val output = layout.classesOutput / name
-}
diff --git a/project/build/SVN.scala b/project/build/SVN.scala
deleted file mode 100644
index 427469e..0000000
--- a/project/build/SVN.scala
+++ /dev/null
@@ -1,36 +0,0 @@
-import sbt._
-
-/**
- * @param root the root of an svn repository
- * @author Moix Grégory
- */
-class SVN(root: Path) {
- /** Location of tool which parses svn revision in git-svn repository. */
- val GitSvnRevTool = root / "tools" / "get-scala-revision"
- val GitSvnRegex = """^Revision:\s*(\d+).*""".r
-
- /**
- * Gets the revision number of the repository given through the constructor of the class
- * It assumes that svn or git is installed on the running computer. Return 0 if it was not
- * able to found the revision number
- */
- def getRevisionNumber: Int = getSvn orElse getGit getOrElse 0
- def getSvn: Option[Int] = {
- /** Doing this the hard way trying to suppress the svn error message
- * on stderr. Could not figure out how to do it simply in sbt.
- */
- val pb = new java.lang.ProcessBuilder("svn", "info")
- pb directory root.asFile
- pb redirectErrorStream true
-
- Process(pb).lines_! foreach {
- case GitSvnRegex(rev) => return Some(rev.toInt)
- case _ => ()
- }
- None
- }
-
- def getGit: Option[Int] =
- try { Some(Process(GitSvnRevTool.toString, root).!!.trim.toInt) }
- catch { case _: Exception => None }
-}
diff --git a/project/build/ScalaBuildProject.scala b/project/build/ScalaBuildProject.scala
deleted file mode 100644
index 250ad7a..0000000
--- a/project/build/ScalaBuildProject.scala
+++ /dev/null
@@ -1,36 +0,0 @@
-import sbt._
-import ScalaBuildProject._
-
-
-abstract class ScalaBuildProject extends Project {
- lazy val projectRoot = info.projectPath
- lazy val layerOutput = outputRootPath / name
- lazy val pathLayout = new PathLayout(projectRoot, layerOutput)
-
- lazy val manifestPath = projectRoot/"META-INF"/"MANIFEST.MF"
-
- lazy val lib = projectRoot / "lib"
- lazy val forkJoinJar = lib / forkjoinJarName
- lazy val jlineJar = lib / jlineJarName
- lazy val antJar = lib / "ant" / "ant.jar"
- lazy val fjbgJar = lib / fjbgJarName
- lazy val msilJar = lib / msilJarName
-
-}
-
-object ScalaBuildProject {
- // Some path definitions related strings
- val compilerJarName = "scala-compiler.jar"
- val libraryJarName = "scala-library.jar"
- val scalacheckJarName = "scalacheck.jar"
- val scalapJarName = "scalap.jar"
- val dbcJarName = "scala-dbc.jar"
- val swingJarName = "scala-swing.jar"
- val partestJarName = "scala-partest.jar"
- val fjbgJarName = "fjbg.jar"
- val msilJarName = "msil.jar"
- val jlineJarName = "jline.jar"
- val forkjoinJarName = "forkjoin.jar"
-
-
-}
diff --git a/project/build/ScalaSBTBuilder.scala b/project/build/ScalaSBTBuilder.scala
deleted file mode 100644
index 81c7860..0000000
--- a/project/build/ScalaSBTBuilder.scala
+++ /dev/null
@@ -1,362 +0,0 @@
-import sbt._
-import ScalaBuildProject._
-import ScalaSBTBuilder._
-
-/**
- * This class is the entry point for building scala with SBT.
- * @author Grégory Moix
- */
-class ScalaSBTBuilder(val info: ProjectInfo)
- extends Project
- with ReflectiveProject
- with BasicDependencyProject
- // with IdeaProject
- with MavenStyleScalaPaths {
- /** This secret system property turns off transitive dependencies during change
- * detection. It's a short term measure. BE AWARE! That means you can no longer
- * trust sbt to recompile everything: it's only recompiling changed files.
- * (The alternative is that adding a space to TraversableLike incurs a 10+ minute
- * incremental build, which means sbt doesn't get used at all, so this is better.)
- */
- System.setProperty("sbt.intransitive", "true")
-
- // Required by BasicDependencyProject
- def fullUnmanagedClasspath(config: Configuration) = unmanagedClasspath
-
- override def dependencies: Iterable[Project] = (
- info.dependencies ++
- locker.dependencies ++
- quick.dependencies ++
- strap.dependencies ++
- libs.dependencies
- )
- override def shouldCheckOutputDirectories = false
-
- // Support of triggered execution at top level
- override def watchPaths = info.projectPath / "src" ** ("*.scala" || "*.java" || AdditionalResources.basicFilter)
-
- // Top Level Tasks
- lazy val buildFjbg = libs.buildFjbg.describedAs(buildFjbgTaskDescription)
- lazy val buildForkjoin = libs.buildForkjoin.describedAs(buildForkjoinTaskDescription)
- lazy val buildMsil = libs.buildMsil.describedAs(buildMislTaskDescription)
- lazy val clean = quick.clean.dependsOn(libs.clean).describedAs(cleanTaskDescription)
- lazy val cleanAll = locker.clean.dependsOn(libs.clean).describedAs(cleanAllTaskDescription)
- lazy val compile = task {None}.dependsOn(quick.binPack, quick.binQuick).describedAs(buildTaskDescription)
- lazy val docs = quick.scaladoc.describedAs(docsTaskDescription)
- lazy val newFjbg = libs.newFjbg.describedAs(newFjbgTaskDescription)
- lazy val newForkjoin = libs.newForkjoin.describedAs(newForkjoinTaskDescription)
- lazy val newLocker = locker.newLocker.describedAs(newLockerTaskDescription)
- lazy val newMsil = libs.newMsil.describedAs(newMsilTaskDescription)
- lazy val newStarr = quick.newStarr.describedAs(newStarrTaskDescription)
- lazy val palo = locker.pack.describedAs(paloTaskDescription)
- lazy val pasta = quick.pasta.describedAs(pastaTaskDescription)
- lazy val stabilityTest = strap.stabilityTest.describedAs(stabilityTestTaskDescription)
- lazy val test = quick.externalPartest.describedAs(partestTaskDescription)
-
- // Non-standard names for tasks chosen earlier which I point at the standard ones.
- lazy val build = compile
- lazy val partest = test
-
- // Top level variables
-
- /**
- * The version number of the compiler that will be created by the run of sbt. It is initialised once
- * the first time it is needed, meaning that this number will be kept
- * until sbt quit.
- */
- lazy val versionNumber: String ={
- def getTimeString: String = {
- import java.util.Calendar;
- import java.text.SimpleDateFormat;
- val formatString = "yyyyMMddHHmmss"
- new SimpleDateFormat(formatString) format Calendar.getInstance.getTime
- }
- def getVersion: String = projectVersion.value.toString takeWhile (_ != '-') mkString
- def getRevision: Int = new SVN(info.projectPath) getRevisionNumber
-
- getVersion+".r"+getRevision+"-b"+getTimeString
- }
-
- /* LAYER DEFINITIONS
- * We define here what's specific to each layer are they differ.
- * The common behavior is defined in the BasicLayer class
- * It is important that the class that extends BasicLayer are inner classes of ScalaSBTBuilder. If not, SBT will
- * not know what the main project definition is, as it will find many classes that extends Project
- */
-
- lazy val locker = project(info.projectPath,"locker", new LockerLayer(_))
- lazy val quick = project(info.projectPath,"quick", new QuickLayer(_, locker))
- lazy val strap = project(info.projectPath,"strap", new StrapLayer(_, quick))
- lazy val libs = project(info.projectPath,"libs", new LibsBuilder(_))
-
-
- /**
- * Definition of what is specific to the locker layer. It implements SimplePacker in order to
- * be able to create palo (packed locker)
- */
- class LockerLayer(info: ProjectInfo) extends BasicLayer(info, versionNumber, None) with Packer {
-
-
- override lazy val nextLayer = Some(quick)
- lazy val instantiationCompilerJar = lib / compilerJarName
- lazy val instantiationLibraryJar = lib / libraryJarName
- lazy val lockFile = layerOutput / "locker.lock"
-
- /**
- * We override the definition of the task method in order to make the tasks of this layer
- * be executed only if the layer is not locked. Task of this layer that should be executed
- * whether the layer is locked or not should call super.task instead
- */
- override def task(action : => Option[String])=
- super.task {
- if (lockFile.exists) {
- log.info(name +" is locked")
- None
- }
- else action
- }
-
- def deleteLock = FileUtilities.clean(lockFile, log)
- def createLock = {
- log.info("locking "+name)
- FileUtilities.touch(lockFile, log)
- }
-
- /**
- * Task for locking locker
- */
- lazy val lock = super.task {
- createLock
- }
-
- /**
- * Task for unlocking locker
- */
- lazy val unlock = super.task {
- deleteLock
- }
-
- lazy val newLocker = super.task {
- createNewLocker
- }
- def createNewLocker = {
- deleteLock orElse
- buildLayer orElse
- createLock
- }
-
-
- /**
- * Making locker being locked when it has finished building
- */
- override lazy val finishLayer = lock.dependsOn(build)
-
- override lazy val pack = super.task {packF}.dependsOn(finishLayer)
-
-
- override lazy val packingDestination: Path = outputRootPath /"palo"
-
- override lazy val libraryWS = {
- new WrapperStep(libraryConfig :: Nil) with WrapperPackaging {
- lazy val packagingConfig = new PackagingConfiguration(libsDestination/libraryJarName, jarContent)
- }
- }
- override val minimalCompilation = true
- override lazy val pluginsWS: WrapperStep = new WrapperStep(Nil)
- override lazy val toolsWS = new WrapperStep(Nil)
- }
-
-
- /**
- * Definition of what is specific to the quick layer. It implements Packer in order to create pack, ScalaTools
- * for creating the binaries and Scaladoc to generate the documentation
- */
- class QuickLayer(info: ProjectInfo, previous: BasicLayer) extends BasicLayer(info, versionNumber, Some(previous)) with PartestRunner
- with Packer with ScalaTools with Scaladoc {
-
- override lazy val nextLayer = Some(strap)
-
-
- lazy val instantiationCompilerJar = previous.compilerOutput
- lazy val instantiationLibraryJar = previous.libraryOutput
-
-
- override lazy val packingDestination: Path = outputRootPath/ "pack"
-
- override def libraryToCopy = jlineJar :: Nil
- override def compilerAdditionalJars = msilJar :: fjbgJar :: Nil
- override def libraryAdditionalJars = forkJoinJar :: Nil
-
- override def cleaningList = packedStarrOutput :: super.cleaningList
-
-
- override lazy val libraryWS = new WrapperStep(libraryConfig :: actorsConfig :: dbcConfig :: swingConfig :: Nil) with Packaging {
- def jarContent = List(libraryConfig , actorsConfig, continuationLibraryConfig).map(_.outputDirectory ##)
- lazy val starrJarContent = List(libraryConfig , actorsConfig, dbcConfig, swingConfig, continuationLibraryConfig).map(_.outputDirectory ##)
- lazy val packagingConfig = new PackagingConfiguration(libsDestination/libraryJarName, jarContent, libraryAdditionalJars)
- lazy val starrPackagingConfig = new PackagingConfiguration(packedStarrOutput/libraryJarName, starrJarContent)
-
- }
-
- override lazy val toolsWS = new WrapperStep(scalacheckConfig :: scalapConfig :: partestConfig :: Nil)
-
- // An additional task for building only the library of quick
- // Used for compiling msil
- lazy val compileLibraryOnly = task {
- compile(libraryConfig, cleanCompilation _)
- }
- lazy val externalCompileLibraryOnly = task(maybeFork(compileLibraryOnly)) dependsOn startLayer
-
- def createNewStarrJar: Option[String] ={
- import Packer._
- createJar(libraryWS.starrPackagingConfig, log) orElse
- createJar(compilerConfig.starrPackagingConfig, log)
- }
- lazy val pasta = task {
- createNewStarrJar
- }.dependsOn(build)
-
- lazy val newStarr = task {
- val files = (packedStarrOutput ##) * "*.jar"
- FileUtilities.copy(files.get, lib, true, log) match {
- case Right(_) => None
- case Left(_) => Some("Error occured when copying the new starr to its destination")
- }
-
- }.dependsOn(pasta)
-
- /*
- * Defining here the creation of the binaries for quick and pack
- */
- private lazy val quickBinClasspath = libraryOutput :: actorsOutput :: dbcOutput :: swingOutput :: compilerOutput :: scalapOutput :: forkJoinJar :: fjbgJar :: msilJar :: jlineJar :: Nil
- private lazy val packBinClasspath = Nil
- lazy val binQuick = tools(layerOutput / "bin", quickBinClasspath).dependsOn(finishLayer)
- lazy val binPack = tools(packingDestination / "bin", packBinClasspath).dependsOn(pack)
- }
-
-
- /**
- * Definition of what is specific to the strap layer
- */
- class StrapLayer(info: ProjectInfo, previous: BasicLayer) extends BasicLayer(info, versionNumber, Some(previous)) {
-
- lazy val instantiationCompilerJar = previous.compilerOutput
- lazy val instantiationLibraryJar = previous.libraryOutput
- private val quick = previous
-
- override lazy val libraryWS = new WrapperStep(libraryConfig :: actorsConfig :: dbcConfig :: swingConfig :: Nil) with WrapperPackaging {
- lazy val packagingConfig = new PackagingConfiguration(libsDestination/libraryJarName, Set())
-
- }
-
- override lazy val toolsWS = new WrapperStep(scalacheckConfig :: scalapConfig :: partestConfig :: Nil)
-
-
- def compare = {
- import PathConfig.classes
- def filter(path: Path)= path.descendentsExcept(AllPassFilter, HiddenFileFilter || "*.properties")
- Comparator.compare(quick.pathLayout.outputDir/classes ##, this.pathLayout.outputDir/classes ##, filter _ , log)
- }
-
- lazy val stabilityTest = task {
- log.warn("Stability test must be run on a clean build in order to yield correct results.")
- compare
- }.dependsOn(finishLayer)
- }
-
- /**
- * An additional subproject used to build new version of forkjoin, fjbg and msil
- */
- class LibsBuilder(val info: ProjectInfo) extends ScalaBuildProject with ReflectiveProject with Compilation with BuildInfoEnvironment {
- override def dependencies = info.dependencies
- override def watchPaths = info.projectPath / "src" ** ("*.scala" || "*.java" ||AdditionalResources.basicFilter) // Support of triggered execution at project level
-
-
- def buildInfoEnvironmentLocation: Path = outputRootPath / ("build-"+name+".properties")
-
- def instantiationCompilerJar: Path = locker.compilerOutput
- def instantiationLibraryJar: Path = locker.libraryOutput
-
- def libsDestination = layerOutput
-
- lazy val checkJavaVersion = task {
- val version = System.getProperty("java.version")
- log.debug("java.version ="+version)
- val required = "1.6"
- if (version.startsWith(required)) None else Some("Incompatible java version : required "+required)
- }
-
-
- private def simpleBuild(step: CompilationStep with Packaging)= task {
- import Packer._
- compile(step) orElse createJar(step, log)
- }.dependsOn(locker.finishLayer)
-
- private def copyJar(step: CompilationStep with Packaging, name: String) = task {
- FileUtilities.copyFile(step.packagingConfig.jarDestination, lib/name, log)
- }
-
- lazy val newForkjoin = copyJar(forkJoinConfig, forkjoinJarName).dependsOn(buildForkjoin)
- lazy val buildForkjoin = simpleBuild(forkJoinConfig).dependsOn(checkJavaVersion)
- lazy val newFjbg = copyJar(fjbgConfig, fjbgJarName).dependsOn(buildFjbg)
- lazy val buildFjbg = simpleBuild(fjbgConfig)
- lazy val newMsil = copyJar(msilConfig, msilJarName).dependsOn(buildMsil)
- // TODO As msil contains scala files, maybe needed compile it with an ExternalSBTRunner
- lazy val buildMsil = simpleBuild(msilConfig).dependsOn(quick.externalCompileLibraryOnly)
-
- lazy val forkJoinConfig = new CompilationStep("forkjoin", pathLayout, log) with Packaging {
- def label = "new forkjoin library"
- override def sources: PathFinder = sourceRoots.descendentsExcept("*.java", ".svn")
- def dependencies = Seq()
- def options = Seq()
- override def javaOptions = Seq("-target","1.5","-source","1.5","-g")
- lazy val packagingConfig = new PackagingConfiguration(libsDestination/forkjoinJarName, List(outputDirectory ##))
- }
-
- lazy val fjbgConfig = new CompilationStep("fjbg", pathLayout, log) with Packaging {
- def label = "new fjbg library"
- override def sources: PathFinder = sourceRoots.descendentsExcept("*.java", ".svn")
- def dependencies = Seq()
- def options = Seq()
- override def javaOptions = Seq("-target","1.5","-source","1.4","-g")
- lazy val packagingConfig = new PackagingConfiguration(libsDestination/fjbgJarName, List(outputDirectory ##))
-
- }
-
- lazy val msilConfig = new CompilationStep("msil", pathLayout, log) with Packaging {
- def label = "new msil library"
- override def sources: PathFinder = sourceRoots.descendentsExcept("*.java" |"*.scala", ".svn" |"tests")
- def dependencies = Seq()
- override def classpath = super.classpath +++ quick.libraryOutput
- def options = Seq()
- override def javaOptions = Seq("-target","1.5","-source","1.4","-g")
- lazy val packagingConfig = new PackagingConfiguration(libsDestination/msilJarName, List(outputDirectory ##))
-
- }
-
- def cleaningList = layerOutput :: layerEnvironment.envBackingPath :: Nil
-
- def cleanFiles = FileUtilities.clean(cleaningList, true, log)
-
- lazy val clean: Task = task {cleanFiles}// We use super.task, so cleaning is done in every case, even when locked
-
- }
-}
-object ScalaSBTBuilder {
- val buildTaskDescription = "build locker, lock it, build quick and create pack. It is the equivalent command to 'ant build'."
- val cleanTaskDescription = "clean the outputs of quick and strap. locker remains untouched."
- val cleanAllTaskDescription = "same as clean, but in addition clean locker too."
- val docsTaskDescription = "generate the scaladoc"
- val partestTaskDescription = "run partest"
- val stabilityTestTaskDescription = "run stability testing. It is required to use a clean build (for example, execute the clean-all action) in order to ensure correctness of the result."
- val paloTaskDescription = "create palo"
- val pastaTaskDescription = "create all the jar needed to make a new starr from quick (pasta = packed starr). It does not replace the current library and compiler jars in the libs folder, but the products of the task are instead located in target/pasta"
- val newStarrTaskDescription = "create a new starr and replace the library and compiler jars in the libs folder. It will keep locker locker locked, meaning that if you want to update locker after updating starr, you must run the 'new-locker' command. It will not automatically run partest and stability testing before replacing."
- val newLockerTaskDescription = "replace locker. It will build a new locker. It does not automatically rebuild quick."
- val buildForkjoinTaskDescription = "create all the jar needed to make a new forkjoin. It does not replace the current library and compiler jars in the libs folder, but the products of the task are instead located in target/libs."
- val newForkjoinTaskDescription = "create a new forkjoin and replace the corresponding jar in the libs folder."
- val buildFjbgTaskDescription = "create all the jar needed to make a new fjbg. It does not replace the current library and compiler jars in the libs folder, but the products of the task are instead located in target/libs."
- val newFjbgTaskDescription = "create a new fjbg and replace the corresponding jar in the libs folder."
- val buildMislTaskDescription = "create all the jar needed to make a new msil. It does not replace the current library and compiler jars in the libs folder, but the products of the task are instead located in target/libs."
- val newMsilTaskDescription = "create a msil and replace the corresponding jar in the libs folder."
-}
diff --git a/project/build/ScalaTools.scala b/project/build/ScalaTools.scala
deleted file mode 100644
index d74639d..0000000
--- a/project/build/ScalaTools.scala
+++ /dev/null
@@ -1,179 +0,0 @@
-import java.io.{FileInputStream, File, InputStream, FileWriter}
-import sbt._
-import scala.io._
-
-/**
- * Create the scala binaries
- * Based on scala.tools.ant.ScalaTool
- * @author Grégory Moix (for the sbt adaptation)
- */
-trait ScalaTools {
- self: BasicLayer =>
-
- lazy val templatesLocation = compilerConfig.srcDir/ "scala" / "tools" / "ant" / "templates"
- lazy val unixTemplate = templatesLocation / "tool-unix.tmpl"
- lazy val winTemplate = templatesLocation / "tool-windows.tmpl"
-
-
- // XXX encoding and generalize
- private def getResourceAsCharStream(resource: Path): Stream[Char] = {
- val stream = new FileInputStream(resource.asFile)
- def streamReader(): Stream[Char] = stream.read match {
- case -1 => Stream.empty
- case value => Stream.cons(value.asInstanceOf[Char], streamReader())
-
- }
- if (stream == null) {
- log.debug("Stream was null")
- Stream.empty
- }
-
- //else Stream continually stream.read() takeWhile (_ != -1) map (_.asInstanceOf[Char]) // Does not work in scala 2.7.7
- else streamReader
- }
-
-
- // Converts a variable like @SCALA_HOME@ to ${SCALA_HOME} when pre = "${" and post = "}"
- private def transposeVariableMarkup(text: String, pre: String, post: String) : String = {
- val chars = Source.fromString(text)
- val builder = new StringBuilder()
-
- while (chars.hasNext) {
- val char = chars.next
- if (char == '@') {
- var char = chars.next
- val token = new StringBuilder()
- while (chars.hasNext && char != '@') {
- token.append(char)
- char = chars.next
- }
- if (token.toString == "")
- builder.append('@')
- else
- builder.append(pre + token.toString + post)
- } else builder.append(char)
- }
- builder.toString
- }
-
- private def readAndPatchResource(resource: Path, tokens: Map[String, String]): String = {
- val chars = getResourceAsCharStream(resource).elements
- val builder = new StringBuilder()
-
- while (chars.hasNext) {
- val char = chars.next
- if (char == '@') {
- var char = chars.next
- val token = new StringBuilder()
- while (chars.hasNext && char != '@') {
- token.append(char)
- char = chars.next
- }
- if (tokens.contains(token.toString))
- builder.append(tokens(token.toString))
- else if (token.toString == "")
- builder.append('@')
- else
- builder.append("@" + token.toString + "@")
- } else builder.append(char)
- }
- builder.toString
- }
-
- private def writeFile(file: File, content: String, makeExecutable: Boolean): Option[String] =
- if (file.exists() && !file.canWrite())
- Some("File " + file + " is not writable")
- else {
- val writer = new FileWriter(file, false)
- writer.write(content)
- writer.close()
- file.setExecutable(makeExecutable)
- None
- }
-
- /** Gets the value of the classpath attribute in a Scala-friendly form.
- * @return The class path as a list of files. */
- private def getUnixclasspath(classpath: List[String]): String =
- transposeVariableMarkup(classpath.mkString("", ":", "").replace('\\', '/'), "${", "}")
-
- /** Gets the value of the classpath attribute in a Scala-friendly form.
- * @return The class path as a list of files. */
- private def getWinclasspath(classpath: List[String]): String =
- transposeVariableMarkup(classpath.mkString("", ";", "").replace('/', '\\'), "%", "%")
-
- /** Performs the tool creation of a tool with for a given os
- * @param file
- * @param mainClas
- * @param properties
- * @param javaFlags
- * @param toolFlags
- * @param classPath
- * @param template
- * @param classpathFormater
- */
- private def tool(template: Path, classpathFormater: List[String] => String, file: Path, mainClass: String,
- properties: String, javaFlags: String, toolFlags: String, classPath: List[Path], makeExecutable: Boolean): Option[String] = {
- val patches = Map (
- ("class", mainClass),
- ("properties", properties),
- ("javaflags", javaFlags),
- ("toolflags", toolFlags),
- ("classpath", classpathFormater(classPath.map(_.absolutePath)))
- )
-
- val result = readAndPatchResource(template, patches)
- writeFile(file.asFile, result, makeExecutable)
-
- }
- private def generateTool(config: ToolConfiguration): Option[String] =
- generateTool(config.toolName, config.destination, config.mainClass, config.properties, config.javaFlags, config.toolFlags, config.classPath)
-
- private def generateTool(toolName: String, destination: Path, mainClass: String, properties: String, javaFlags: String, toolFlags: String, classPath: List[Path]): Option[String] ={
- val unixFile = destination / toolName
- val winFile = destination /(toolName + ".bat")
- tool(unixTemplate, getUnixclasspath, unixFile, mainClass, properties, javaFlags, toolFlags, classPath, true) orElse
- tool(winTemplate, getWinclasspath, winFile, mainClass, properties, javaFlags, toolFlags, classPath, false)
- }
-
-
- /*============================================================================*\
- ** Definition of the different tools **
- \*============================================================================*/
- private val defaultJavaFlags = "-Xmx256M -Xms32M"
-
- /**
- * A class that holds the different parameters of a tool
- */
- class ToolConfiguration(val toolName: String, val destination: Path, val mainClass: String, val properties: String, val javaFlags: String, val toolFlags: String, val classPath: List[Path])
-
- /**
- * Generate all tools
- * @param destination Root folder where all the binaries will be written
- * @param classpath Should be specified when you want to use a specific classpath, could be Nil if you want
- * to make the bin use what is in the lib folder of the distribution.
- */
- def tools(destination: Path, classpath: List[Path]) = task {
- val scala = new ToolConfiguration("scala", destination, "scala.tools.nsc.MainGenericRunner", "",defaultJavaFlags, "", classpath)
- val scalac = new ToolConfiguration("scalac", destination, "scala.tools.nsc.Main", "",defaultJavaFlags, "", classpath)
- val scaladoc = new ToolConfiguration("scaladoc",destination,"scala.tools.nsc.ScalaDoc", "",defaultJavaFlags,"", classpath)
- val fsc = new ToolConfiguration("fsc", destination,"scala.tools.nsc.CompileClient", "",defaultJavaFlags, "", classpath)
- val scalap = new ToolConfiguration("scalap",destination, "scala.tools.scalap.Main", "",defaultJavaFlags, "", classpath)
-
-
- val toolList = scala :: scalac :: scaladoc :: fsc :: scalap :: Nil
-
- def process(list: List[ToolConfiguration]): Option[String] = list match {
- case x :: xs => {
- log.debug("Generating "+x.toolName+" bin")
- generateTool(x) orElse process(xs)
- }
- case Nil => None
-
- }
- FileUtilities.createDirectory(destination, log)
- process(toolList)
-
- }
-}
-
-
diff --git a/project/build/Scaladoc.scala b/project/build/Scaladoc.scala
deleted file mode 100644
index 39bcb52..0000000
--- a/project/build/Scaladoc.scala
+++ /dev/null
@@ -1,48 +0,0 @@
-import sbt._
-import xsbt.AnalyzingCompiler
-
-trait Scaladoc {
- self: BasicLayer with Packer =>
-
- lazy val documentationDestination = outputRootPath / "scaladoc"
- lazy val libraryDocumentationDestination = documentationDestination / "library"
- lazy val compilerDocumentationDestination = documentationDestination / "compiler"
- lazy val libraryDoc = {
- val reflect = librarySrcDir / "scala" / "reflect"
- val runtime = librarySrcDir / "scala" / "runtime"
-
- ((librarySrcDir +++ actorsSrcDir +++ swingSrcDir)**("*.scala")---
- reflect / "Code.scala" ---
- reflect / "Manifest.scala" ---
- reflect / "Print.scala" ---
- reflect / "Symbol.scala" ---
- reflect / "Tree.scala" ---
- reflect / "Type.scala" ---
- reflect / "TypedCode.scala" ---
- runtime /"ScalaRunTime.scala" ---
- runtime / "StreamCons.scala" ---
- runtime / "StringAdd.scala" ---
- runtime * ("*$.scala") ---
- runtime *("*Array.scala")
- )
-
- }
- lazy val compilerDoc = {
- compilerSrcDir **("*.scala")
- }
- lazy val classpath ={
- (antJar +++ jlineJar +++ msilJar +++ fjbgJar +++ forkJoinJar +++ outputLibraryJar +++ outputCompilerJar +++ outputPartestJar +++ outputScalapJar ).get
-
- }
- lazy val scaladoc = task(maybeFork(generateScaladoc, "Error generating scaladoc")) dependsOn pack
-
- lazy val generateScaladoc = task {
- instanceScope[Option[String]]{ scala =>
- lazy val compiler = new AnalyzingCompiler(scala, componentManager, xsbt.ClasspathOptions.manual, log)
- val docGenerator = new sbt.Scaladoc(50, compiler)
- docGenerator("Scala "+ versionNumber+" API", libraryDoc.get, classpath, libraryDocumentationDestination, Seq(), log) orElse
- docGenerator("Scala Compiler"+ versionNumber+" API", compilerDoc.get, classpath, compilerDocumentationDestination, Seq(), log)
- }
- }
-
-}
diff --git a/project/plugins.sbt b/project/plugins.sbt
new file mode 100644
index 0000000..fdf37e3
--- /dev/null
+++ b/project/plugins.sbt
@@ -0,0 +1,9 @@
+resolvers += Resolver.url("Typesafe nightlies", url("https://typesafe.artifactoryonline.com/typesafe/ivy-snapshots/"))(Resolver.ivyStylePatterns)
+
+resolvers += Resolver.url("scalasbt", new URL("http://scalasbt.artifactoryonline.com/scalasbt/sbt-plugin-releases"))(Resolver.ivyStylePatterns)
+
+resolvers += "jgit-repo" at "http://download.eclipse.org/jgit/maven"
+
+libraryDependencies += "net.databinder" % "dispatch-http_2.9.1" % "0.8.6"
+
+
diff --git a/project/plugins/Plugins.scala b/project/plugins/Plugins.scala
deleted file mode 100644
index 15ee162..0000000
--- a/project/plugins/Plugins.scala
+++ /dev/null
@@ -1,6 +0,0 @@
-import sbt._
-
-class Plugins(info: ProjectInfo) extends PluginDefinition(info) {
- val sbtIdeaRepo = "sbt-idea-repo" at "http://mpeltonen.github.com/maven/"
- val sbtIdea = "com.github.mpeltonen" % "sbt-idea-plugin" % "0.2.0"
-}
\ No newline at end of file
diff --git a/project/project/Build.scala b/project/project/Build.scala
new file mode 100644
index 0000000..902e8b0
--- /dev/null
+++ b/project/project/Build.scala
@@ -0,0 +1,7 @@
+import sbt._
+object PluginDef extends Build {
+ override def projects = Seq(root)
+ lazy val root = Project("plugins", file(".")) dependsOn(proguard, git)
+ lazy val proguard = uri("git://github.com/jsuereth/xsbt-proguard-plugin.git#sbt-0.12")
+ lazy val git = uri("git://github.com/sbt/sbt-git-plugin.git#scala-build")
+}
diff --git a/pull-binary-libs.sh b/pull-binary-libs.sh
old mode 100644
new mode 100755
index 65d9590..6c94e39
--- a/pull-binary-libs.sh
+++ b/pull-binary-libs.sh
@@ -2,6 +2,18 @@
#
# Script to pull binary artifacts for scala from the remote repository.
+# Avoid corrupting the jar cache in ~/.sbt and the ugly crash when curl is not installed
+# This affects Linux systems mostly, because wget is the default download tool and curl
+# is not installed at all.
+curl --version &> /dev/null
+if [ $? -ne 0 ]
+then
+ echo ""
+ echo "Please install curl to download the jar files necessary for building Scala."
+ echo ""
+ exit 1
+fi
+
. $(dirname $0)/tools/binary-repo-lib.sh
# TODO - argument parsing...
diff --git a/push-binary-libs.sh b/push-binary-libs.sh
old mode 100644
new mode 100755
diff --git a/src/actors/scala/actors/AbstractActor.scala b/src/actors/scala/actors/AbstractActor.scala
index 3817f9c..5a4e0d9 100644
--- a/src/actors/scala/actors/AbstractActor.scala
+++ b/src/actors/scala/actors/AbstractActor.scala
@@ -1,14 +1,15 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2005-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2005-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-
package scala.actors
+import scala.language.higherKinds
+
/**
* @author Philipp Haller
*
diff --git a/src/actors/scala/actors/Actor.scala b/src/actors/scala/actors/Actor.scala
index 57e1075..61124b3 100644
--- a/src/actors/scala/actors/Actor.scala
+++ b/src/actors/scala/actors/Actor.scala
@@ -1,44 +1,41 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2005-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2005-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-
package scala.actors
import scala.util.control.ControlThrowable
import java.util.{Timer, TimerTask}
+import scala.language.implicitConversions
/**
- * Provides functions for the definition of
- * actors, as well as actor operations, such as
- * <code>receive</code>, <code>react</code>, <code>reply</code>,
- * etc.
+ * Provides functions for the definition of actors, as well as actor
+ * operations, such as `receive`, `react`, `reply`, etc.
*
* @author Philipp Haller
*/
object Actor extends Combinators {
/** State of an actor.
- * <ul>
- * <li><b>New</b> -
- * Not yet started</li>
- * <li><b>Runnable</b> -
- * Executing</li>
- * <li><b>Suspended</b> -
- * Suspended, waiting in a `react`</li>
- * <li><b>TimedSuspended</b> -
- * Suspended, waiting in a `reactWithin` </li>
- * <li><b>Blocked</b> -
- * Blocked waiting in a `receive` </li>
- * <li><b>TimedBlocked</b> -
- * Blocked waiting in a `receiveWithin` </li>
- * <li><b>Terminated</b> -
- * Actor has terminated </li>
- * </ul>
+ *
+ * - '''New''' -
+ * Not yet started
+ * - '''Runnable''' -
+ * Executing
+ * - '''Suspended''' -
+ * Suspended, waiting in a `react`
+ * - '''TimedSuspended''' -
+ * Suspended, waiting in a `reactWithin`
+ * - '''Blocked''' -
+ * Blocked waiting in a `receive`
+ * - '''TimedBlocked''' -
+ * Blocked waiting in a `receiveWithin`
+ * - '''Terminated''' -
+ * Actor has terminated
*/
object State extends Enumeration {
val New,
@@ -50,7 +47,7 @@ object Actor extends Combinators {
Terminated = Value
}
- private[actors] val tl = new ThreadLocal[ReplyReactor]
+ private[actors] val tl = new ThreadLocal[InternalReplyReactor]
// timer thread runs as daemon
private[actors] val timer = new Timer(true)
@@ -59,20 +56,19 @@ object Actor extends Combinators {
/**
* Returns the currently executing actor. Should be used instead
- * of <code>this</code> in all blocks of code executed by
- * actors.
+ * of `'''this'''` in all blocks of code executed by actors.
*
* @return returns the currently executing actor.
*/
- def self: Actor = self(Scheduler)
+ def self: Actor = self(Scheduler).asInstanceOf[Actor]
- private[actors] def self(sched: IScheduler): Actor =
- rawSelf(sched).asInstanceOf[Actor]
+ private[actors] def self(sched: IScheduler): InternalActor =
+ rawSelf(sched).asInstanceOf[InternalActor]
- private[actors] def rawSelf: ReplyReactor =
+ private[actors] def rawSelf: InternalReplyReactor =
rawSelf(Scheduler)
- private[actors] def rawSelf(sched: IScheduler): ReplyReactor = {
+ private[actors] def rawSelf(sched: IScheduler): InternalReplyReactor = {
val s = tl.get
if (s eq null) {
val r = new ActorProxy(Thread.currentThread, sched)
@@ -89,11 +85,11 @@ object Actor extends Combinators {
/**
* Resets an actor proxy associated with the current thread.
- * It replaces the implicit <code>ActorProxy</code> instance
+ * It replaces the implicit `ActorProxy` instance
* of the current thread (if any) with a new instance.
*
* This permits to re-use the current thread as an actor
- * even if its <code>ActorProxy</code> has died for some reason.
+ * even if its `ActorProxy` has died for some reason.
*/
def resetProxy() {
val a = tl.get
@@ -102,16 +98,15 @@ object Actor extends Combinators {
}
/**
- * Removes any reference to an <code>Actor</code> instance
+ * Removes any reference to an `Actor` instance
* currently stored in thread-local storage.
*
- * This allows to release references from threads that are
- * potentially long-running or being re-used (e.g. inside
- * a thread pool). Permanent references in thread-local storage
- * are a potential memory leak.
+ * This allows to release references from threads that are potentially
+ * long-running or being re-used (e.g. inside a thread pool). Permanent
+ * references in thread-local storage are a potential memory leak.
*/
def clearSelf() {
- tl.set(null)
+ tl set null
}
/**
@@ -168,15 +163,13 @@ object Actor extends Combinators {
}
/**
- * Receives the next message from the mailbox of the current actor
- * <code>self</code>.
+ * Receives the next message from the mailbox of the current actor `self`.
*/
def ? : Any = self.?
/**
- * Receives a message from the mailbox of
- * <code>self</code>. Blocks if no message matching any of the
- * cases of <code>f</code> can be received.
+ * Receives a message from the mailbox of `self`. Blocks if no message
+ * matching any of the cases of `f` can be received.
*
* @example {{{
* receive {
@@ -193,12 +186,10 @@ object Actor extends Combinators {
self.receive(f)
/**
- * Receives a message from the mailbox of
- * <code>self</code>. Blocks at most <code>msec</code>
- * milliseconds if no message matching any of the cases of
- * <code>f</code> can be received. If no message could be
- * received the <code>TIMEOUT</code> action is executed if
- * specified.
+ * Receives a message from the mailbox of `self`. Blocks at most `msec`
+ * milliseconds if no message matching any of the cases of `f` can be
+ * received. If no message could be received the `TIMEOUT` action is
+ * executed if specified.
*
* @param msec the time span before timeout
* @param f a partial function specifying patterns and actions
@@ -208,11 +199,10 @@ object Actor extends Combinators {
self.receiveWithin(msec)(f)
/**
- * Lightweight variant of <code>receive</code>.
+ * Lightweight variant of `receive`.
*
- * Actions in <code>f</code> have to contain the rest of the
- * computation of <code>self</code>, as this method will never
- * return.
+ * Actions in `f` have to contain the rest of the computation of `self`,
+ * as this method will never return.
*
* A common method of continuting the computation is to send a message
* to another actor:
@@ -241,11 +231,10 @@ object Actor extends Combinators {
rawSelf.react(f)
/**
- * Lightweight variant of <code>receiveWithin</code>.
+ * Lightweight variant of `receiveWithin`.
*
- * Actions in <code>f</code> have to contain the rest of the
- * computation of <code>self</code>, as this method will never
- * return.
+ * Actions in `f` have to contain the rest of the computation of `self`,
+ * as this method will never return.
*
* @param msec the time span before timeout
* @param f a partial function specifying patterns and actions
@@ -257,7 +246,7 @@ object Actor extends Combinators {
def eventloop(f: PartialFunction[Any, Unit]): Nothing =
rawSelf.react(new RecursiveProxyHandler(rawSelf, f))
- private class RecursiveProxyHandler(a: ReplyReactor, f: PartialFunction[Any, Unit])
+ private class RecursiveProxyHandler(a: InternalReplyReactor, f: PartialFunction[Any, Unit])
extends PartialFunction[Any, Unit] {
def isDefinedAt(m: Any): Boolean =
true // events are immediately removed from the mailbox
@@ -271,26 +260,24 @@ object Actor extends Combinators {
* Returns the actor which sent the last received message.
*/
def sender: OutputChannel[Any] =
- rawSelf.sender
+ rawSelf.internalSender
/**
- * Sends <code>msg</code> to the actor waiting in a call to
- * <code>!?</code>.
+ * Sends `msg` to the actor waiting in a call to `!?`.
*/
def reply(msg: Any): Unit =
rawSelf.reply(msg)
/**
- * Sends <code>()</code> to the actor waiting in a call to
- * <code>!?</code>.
+ * Sends `()` to the actor waiting in a call to `!?`.
*/
def reply(): Unit =
rawSelf.reply(())
/**
- * Returns the number of messages in <code>self</code>'s mailbox
+ * Returns the number of messages in `self`'s mailbox
*
- * @return the number of messages in <code>self</code>'s mailbox
+ * @return the number of messages in `self`'s mailbox
*/
def mailboxSize: Int = rawSelf.mailboxSize
@@ -316,12 +303,12 @@ object Actor extends Combinators {
def andThen[b](other: => b): Unit
}
- implicit def mkBody[a](body: => a) = new Body[a] {
+ implicit def mkBody[a](body: => a) = new InternalActor.Body[a] {
def andThen[b](other: => b): Unit = rawSelf.seq(body, other)
}
/**
- * Links <code>self</code> to actor <code>to</code>.
+ * Links `self` to actor `to`.
*
* @param to the actor to link to
* @return the parameter actor
@@ -329,7 +316,7 @@ object Actor extends Combinators {
def link(to: AbstractActor): AbstractActor = self.link(to)
/**
- * Links <code>self</code> to the actor defined by <code>body</code>.
+ * Links `self` to the actor defined by `body`.
*
* @param body the body of the actor to link to
* @return the parameter actor
@@ -337,587 +324,86 @@ object Actor extends Combinators {
def link(body: => Unit): Actor = self.link(body)
/**
- * Unlinks <code>self</code> from actor <code>from</code>.
+ * Unlinks `self` from actor `from`.
*
* @param from the actor to unlink from
*/
def unlink(from: AbstractActor): Unit = self.unlink(from)
/**
- * <p>
- * Terminates execution of <code>self</code> with the following
- * effect on linked actors:
- * </p>
- * <p>
- * For each linked actor <code>a</code> with
- * <code>trapExit</code> set to <code>true</code>, send message
- * <code>Exit(self, reason)</code> to <code>a</code>.
- * </p>
- * <p>
- * For each linked actor <code>a</code> with
- * <code>trapExit</code> set to <code>false</code> (default),
- * call <code>a.exit(reason)</code> if
- * <code>reason != 'normal</code>.
- * </p>
+ * Terminates execution of `self` with the following effect on
+ * linked actors:
+ *
+ * For each linked actor `a` with `trapExit` set to `'''true'''`,
+ * send message `Exit(self, reason)` to `a`.
+ *
+ * For each linked actor `a` with `trapExit` set to `'''false'''`
+ * (default), call `a.exit(reason)` if `reason != 'normal`.
*/
def exit(reason: AnyRef): Nothing = self.exit(reason)
/**
- * <p>
- * Terminates execution of <code>self</code> with the following
- * effect on linked actors:
- * </p>
- * <p>
- * For each linked actor <code>a</code> with
- * <code>trapExit</code> set to <code>true</code>, send message
- * <code>Exit(self, 'normal)</code> to <code>a</code>.
- * </p>
+ * Terminates execution of `self` with the following effect on
+ * linked actors:
+ *
+ * For each linked actor `a` with `trapExit` set to `'''true'''`,
+ * send message `Exit(self, 'normal)` to `a`.
*/
def exit(): Nothing = rawSelf.exit()
}
-/**
- * <p>
- * Provides lightweight, concurrent actors. Actors are
- * created by extending the `Actor` trait (alternatively, one of the
- * factory methods in its companion object can be used). The
- * behavior of an `Actor` subclass is defined by implementing its
- * `act` method:
- *
- * {{{
- * class MyActor extends Actor {
- * def act() {
- * // actor behavior goes here
- * }
- * }
- * }}}
+/** Provides lightweight, concurrent actors. Actors are created by extending
+ * the `Actor` trait (alternatively, one of the factory methods in its
+ * companion object can be used). The behavior of an `Actor` subclass is
+ * defined by implementing its `act` method:
+ * {{{
+ * class MyActor extends Actor {
+ * def act() {
+ * // actor behavior goes here
+ * }
+ * }
+ * }}}
+ * A new `Actor` instance is started by invoking its `start` method.
*
- * A new `Actor` instance is started by invoking its `start` method.
+ * '''Note:''' care must be taken when invoking thread-blocking methods other
+ * than those provided by the `Actor` trait or its companion object (such as
+ * `receive`). Blocking the underlying thread inside an actor may lead to
+ * starvation of other actors. This also applies to actors hogging their
+ * thread for a long time between invoking `receive`/`react`.
*
- * '''Note:''' care must be taken when invoking thread-blocking methods
- * other than those provided by the `Actor` trait or its companion
- * object (such as `receive`). Blocking the underlying thread inside
- * an actor may lead to starvation of other actors. This also
- * applies to actors hogging their thread for a long time between
- * invoking `receive`/`react`.
+ * If actors use blocking operations (for example, methods for blocking I/O),
+ * there are several options:
*
- * If actors use blocking operations (for example, methods for
- * blocking I/O), there are several options:
- * <ul>
- * <li>The run-time system can be configured to use a larger thread pool size
- * (for example, by setting the `actors.corePoolSize` JVM property).</li>
+ * - The run-time system can be configured to use a larger thread pool size
+ * (for example, by setting the `actors.corePoolSize` JVM property).
+ * - The `scheduler` method of the `Actor` trait can be overridden to return a
+ * `ResizableThreadPoolScheduler`, which resizes its thread pool to
+ * avoid starvation caused by actors that invoke arbitrary blocking methods.
+ * - The `actors.enableForkJoin` JVM property can be set to `false`, in which
+ * case a `ResizableThreadPoolScheduler` is used by default to execute actors.
*
- * <li>The `scheduler` method of the `Actor` trait can be overridden to return a
- * `ResizableThreadPoolScheduler`, which resizes its thread pool to
- * avoid starvation caused by actors that invoke arbitrary blocking methods.</li>
+ * The main ideas of the implementation are explained in the two papers
*
- * <li>The `actors.enableForkJoin` JVM property can be set to `false`, in which
- * case a `ResizableThreadPoolScheduler` is used by default to execute actors.</li>
- * </ul>
- * </p>
- * <p>
- * The main ideas of the implementation are explained in the two papers
- * <ul>
- * <li>
- * <a href="http://lampwww.epfl.ch/~odersky/papers/jmlc06.pdf">
- * <span style="font-weight:bold; white-space:nowrap;">Event-Based
- * Programming without Inversion of Control</span></a>,
- * Philipp Haller and Martin Odersky, <i>Proc. JMLC 2006</i>, and
- * </li>
- * <li>
- * <a href="http://lamp.epfl.ch/~phaller/doc/haller07coord.pdf">
- * <span style="font-weight:bold; white-space:nowrap;">Actors that
- * Unify Threads and Events</span></a>,
- * Philipp Haller and Martin Odersky, <i>Proc. COORDINATION 2007</i>.
- * </li>
- * </ul>
- * </p>
+ * - [[http://lampwww.epfl.ch/~odersky/papers/jmlc06.pdf Event-Based
+ * Programming without Inversion of Control]],
+ * Philipp Haller and Martin Odersky, ''Proc. JMLC 2006'', and
+ * - [[http://lamp.epfl.ch/~phaller/doc/haller07coord.pdf Actors that
+ * Unify Threads and Events]],
+ * Philipp Haller and Martin Odersky, ''Proc. COORDINATION 2007''.
*
- * @author Philipp Haller
+ * @author Philipp Haller
*
- * @define actor actor
- * @define channel actor's mailbox
+ * @define actor actor
+ * @define channel actor's mailbox
*/
@SerialVersionUID(-781154067877019505L)
-trait Actor extends AbstractActor with ReplyReactor with ActorCanReply with InputChannel[Any] with Serializable {
-
- /* The following two fields are only used when the actor
- * suspends by blocking its underlying thread, for example,
- * when waiting in a receive or synchronous send.
- */
- @volatile
- private var isSuspended = false
-
- /* This field is used to communicate the received message from
- * the invocation of send to the place where the thread of
- * the receiving actor resumes inside receive/receiveWithin.
- */
- @volatile
- private var received: Option[Any] = None
-
- protected[actors] override def scheduler: IScheduler = Scheduler
-
- private[actors] override def startSearch(msg: Any, replyTo: OutputChannel[Any], handler: PartialFunction[Any, Any]) =
- if (isSuspended) {
- () => synchronized {
- mailbox.append(msg, replyTo)
- resumeActor()
- }
- } else super.startSearch(msg, replyTo, handler)
-
- // we override this method to check `shouldExit` before suspending
- private[actors] override def searchMailbox(startMbox: MQueue[Any],
- handler: PartialFunction[Any, Any],
- resumeOnSameThread: Boolean) {
- var tmpMbox = startMbox
- var done = false
- while (!done) {
- val qel = tmpMbox.extractFirst((msg: Any, replyTo: OutputChannel[Any]) => {
- senders = List(replyTo)
- handler.isDefinedAt(msg)
- })
- if (tmpMbox ne mailbox)
- tmpMbox.foreach((m, s) => mailbox.append(m, s))
- if (null eq qel) {
- synchronized {
- // in mean time new stuff might have arrived
- if (!sendBuffer.isEmpty) {
- tmpMbox = new MQueue[Any]("Temp")
- drainSendBuffer(tmpMbox)
- // keep going
- } else {
- // very important to check for `shouldExit` at this point
- // since linked actors might have set it after we checked
- // last time (e.g., at the beginning of `react`)
- if (shouldExit) exit()
- waitingFor = handler
- // see Reactor.searchMailbox
- throw Actor.suspendException
- }
- }
- } else {
- resumeReceiver((qel.msg, qel.session), handler, resumeOnSameThread)
- done = true
- }
- }
- }
-
- private[actors] override def makeReaction(fun: () => Unit, handler: PartialFunction[Any, Any], msg: Any): Runnable =
- new ActorTask(this, fun, handler, msg)
-
- /** See the companion object's `receive` method. */
- def receive[R](f: PartialFunction[Any, R]): R = {
- assert(Actor.self(scheduler) == this, "receive from channel belonging to other actor")
-
- synchronized {
- if (shouldExit) exit() // links
- drainSendBuffer(mailbox)
- }
-
- var done = false
- while (!done) {
- val qel = mailbox.extractFirst((m: Any, replyTo: OutputChannel[Any]) => {
- senders = replyTo :: senders
- val matches = f.isDefinedAt(m)
- senders = senders.tail
- matches
- })
- if (null eq qel) {
- synchronized {
- // in mean time new stuff might have arrived
- if (!sendBuffer.isEmpty) {
- drainSendBuffer(mailbox)
- // keep going
- } else {
- waitingFor = f
- isSuspended = true
- scheduler.managedBlock(blocker)
- drainSendBuffer(mailbox)
- // keep going
- }
- }
- } else {
- received = Some(qel.msg)
- senders = qel.session :: senders
- done = true
- }
- }
-
- val result = f(received.get)
- received = None
- senders = senders.tail
- result
- }
-
- /** See the companion object's `receiveWithin` method. */
- def receiveWithin[R](msec: Long)(f: PartialFunction[Any, R]): R = {
- assert(Actor.self(scheduler) == this, "receive from channel belonging to other actor")
-
- synchronized {
- if (shouldExit) exit() // links
- drainSendBuffer(mailbox)
- }
-
- // first, remove spurious TIMEOUT message from mailbox if any
- mailbox.extractFirst((m: Any, replyTo: OutputChannel[Any]) => m == TIMEOUT)
-
- val receiveTimeout = () => {
- if (f.isDefinedAt(TIMEOUT)) {
- received = Some(TIMEOUT)
- senders = this :: senders
- } else
- sys.error("unhandled timeout")
- }
-
- var done = false
- while (!done) {
- val qel = mailbox.extractFirst((m: Any, replyTo: OutputChannel[Any]) => {
- senders = replyTo :: senders
- val matches = f.isDefinedAt(m)
- senders = senders.tail
- matches
- })
- if (null eq qel) {
- val todo = synchronized {
- // in mean time new stuff might have arrived
- if (!sendBuffer.isEmpty) {
- drainSendBuffer(mailbox)
- // keep going
- () => {}
- } else if (msec == 0L) {
- done = true
- receiveTimeout
- } else {
- if (onTimeout.isEmpty) {
- if (!f.isDefinedAt(TIMEOUT))
- sys.error("unhandled timeout")
-
- val thisActor = this
- onTimeout = Some(new TimerTask {
- def run() {
- thisActor.send(TIMEOUT, thisActor)
- }
- })
- Actor.timer.schedule(onTimeout.get, msec)
- }
-
- // It is possible that !onTimeout.isEmpty, but TIMEOUT is not yet in mailbox
- // See SI-4759
- waitingFor = f
- received = None
- isSuspended = true
- scheduler.managedBlock(blocker)
- drainSendBuffer(mailbox)
- // keep going
- () => {}
- }
- }
- todo()
- } else {
- synchronized {
- if (!onTimeout.isEmpty) {
- onTimeout.get.cancel()
- onTimeout = None
- }
- }
- received = Some(qel.msg)
- senders = qel.session :: senders
- done = true
- }
- }
-
- val result = f(received.get)
- received = None
- senders = senders.tail
- result
- }
-
- /** See the companion object's `react` method. */
- override def react(handler: PartialFunction[Any, Unit]): Nothing = {
- synchronized {
- if (shouldExit) exit()
- }
- super.react(handler)
- }
-
- /** See the companion object's `reactWithin` method. */
- override def reactWithin(msec: Long)(handler: PartialFunction[Any, Unit]): Nothing = {
- synchronized {
- if (shouldExit) exit()
- }
- super.reactWithin(msec)(handler)
- }
-
- /** Receives the next message from the mailbox */
- def ? : Any = receive {
- case x => x
- }
-
- // guarded by lock of this
- // never throws SuspendActorControl
- private[actors] override def scheduleActor(f: PartialFunction[Any, Any], msg: Any) =
- if (f eq null) {
- // do nothing (timeout is handled instead)
- }
- else {
- val task = new ActorTask(this, null, f, msg)
- scheduler executeFromActor task
- }
-
- /* Used for notifying scheduler when blocking inside receive/receiveWithin. */
- private object blocker extends scala.concurrent.ManagedBlocker {
- def block() = {
- Actor.this.suspendActor()
- true
- }
- def isReleasable =
- !Actor.this.isSuspended
- }
-
- private def suspendActor() = synchronized {
- while (isSuspended) {
- try {
- wait()
- } catch {
- case _: InterruptedException =>
- }
- }
- // links: check if we should exit
- if (shouldExit) exit()
- }
-
- private def resumeActor() {
- isSuspended = false
- notify()
- }
-
- private[actors] override def exiting = synchronized {
- _state == Actor.State.Terminated
- }
-
- // guarded by this
- private[actors] override def dostart() {
- // Reset various flags.
- //
- // Note that we do *not* reset `trapExit`. The reason is that
- // users should be able to set the field in the constructor
- // and before `act` is called.
- exitReason = 'normal
- shouldExit = false
-
- super.dostart()
- }
+trait Actor extends InternalActor with ReplyReactor {
override def start(): Actor = synchronized {
super.start()
this
}
- /** State of this actor */
- override def getState: Actor.State.Value = synchronized {
- if (isSuspended) {
- if (onTimeout.isEmpty)
- Actor.State.Blocked
- else
- Actor.State.TimedBlocked
- } else
- super.getState
- }
-
- // guarded by this
- private[actors] var links: List[AbstractActor] = Nil
-
- /**
- * Links <code>self</code> to actor <code>to</code>.
- *
- * @param to the actor to link to
- * @return the parameter actor
- */
- def link(to: AbstractActor): AbstractActor = {
- assert(Actor.self(scheduler) == this, "link called on actor different from self")
- this linkTo to
- to linkTo this
- to
}
- /**
- * Links <code>self</code> to the actor defined by <code>body</code>.
- *
- * @param body the body of the actor to link to
- * @return the parameter actor
- */
- def link(body: => Unit): Actor = {
- assert(Actor.self(scheduler) == this, "link called on actor different from self")
- val a = new Actor {
- def act() = body
- override final val scheduler: IScheduler = Actor.this.scheduler
- }
- link(a)
- a.start()
- a
- }
-
- private[actors] def linkTo(to: AbstractActor) = synchronized {
- links = to :: links
- }
-
- /**
- * Unlinks <code>self</code> from actor <code>from</code>.
- */
- def unlink(from: AbstractActor) {
- assert(Actor.self(scheduler) == this, "unlink called on actor different from self")
- this unlinkFrom from
- from unlinkFrom this
- }
-
- private[actors] def unlinkFrom(from: AbstractActor) = synchronized {
- links = links.filterNot(from.==)
- }
-
- @volatile
- var trapExit = false
- // guarded by this
- private var exitReason: AnyRef = 'normal
- // guarded by this
- private[actors] var shouldExit = false
-
- /**
- * <p>
- * Terminates execution of <code>self</code> with the following
- * effect on linked actors:
- * </p>
- * <p>
- * For each linked actor <code>a</code> with
- * <code>trapExit</code> set to <code>true</code>, send message
- * <code>Exit(self, reason)</code> to <code>a</code>.
- * </p>
- * <p>
- * For each linked actor <code>a</code> with
- * <code>trapExit</code> set to <code>false</code> (default),
- * call <code>a.exit(reason)</code> if
- * <code>reason != 'normal</code>.
- * </p>
- */
- protected[actors] def exit(reason: AnyRef): Nothing = {
- synchronized {
- exitReason = reason
- }
- exit()
- }
-
- /**
- * Terminates with exit reason <code>'normal</code>.
- */
- protected[actors] override def exit(): Nothing = {
- val todo = synchronized {
- if (!links.isEmpty)
- exitLinked()
- else
- () => {}
- }
- todo()
- super.exit()
- }
-
- // Assume !links.isEmpty
- // guarded by this
- private[actors] def exitLinked(): () => Unit = {
- _state = Actor.State.Terminated
- // reset waitingFor, otherwise getState returns Suspended
- waitingFor = Reactor.waitingForNone
- // remove this from links
- val mylinks = links.filterNot(this.==)
- // unlink actors
- mylinks.foreach(unlinkFrom(_))
- // return closure that locks linked actors
- () => {
- mylinks.foreach((linked: AbstractActor) => {
- linked.synchronized {
- if (!linked.exiting) {
- linked.unlinkFrom(this)
- linked.exit(this, exitReason)
- }
- }
- })
- }
- }
-
- // Assume !links.isEmpty
- // guarded by this
- private[actors] def exitLinked(reason: AnyRef): () => Unit = {
- exitReason = reason
- exitLinked()
- }
-
- // Assume !this.exiting
- private[actors] def exit(from: AbstractActor, reason: AnyRef) {
- if (trapExit) {
- this ! Exit(from, reason)
- }
- else if (reason != 'normal)
- synchronized {
- shouldExit = true
- exitReason = reason
- // resume this Actor in a way that
- // causes it to exit
- // (because shouldExit == true)
- if (isSuspended)
- resumeActor()
- else if (waitingFor ne Reactor.waitingForNone) {
- waitingFor = Reactor.waitingForNone
- // it doesn't matter what partial function we are passing here
- scheduleActor(waitingFor, null)
- /* Here we should not throw a SuspendActorControl,
- since the current method is called from an actor that
- is in the process of exiting.
-
- Therefore, the contract for scheduleActor is that
- it never throws a SuspendActorControl.
- */
- }
- }
- }
-
- /* Requires qualified private, because <code>RemoteActor</code> must
- * register a termination handler.
- */
- private[actors] def onTerminate(f: => Unit) {
- scheduler.onTerminate(this) { f }
- }
-}
-
-
-/**
- * Used as the timeout pattern in
- * <a href="Actor.html#receiveWithin(Long)" target="contentFrame">
- * <code>receiveWithin</code></a> and
- * <a href="Actor.html#reactWithin(Long)" target="contentFrame">
- * <code>reactWithin</code></a>.
- *
- * @example {{{
- * receiveWithin(500) {
- * case (x, y) => ...
- * case TIMEOUT => ...
- * }
- * }}}
- *
- * @author Philipp Haller
- */
-case object TIMEOUT
-
-
-/** Sent to an actor
- * with `trapExit` set to `true` whenever one of its linked actors
- * terminates.
- *
- * @param from the actor that terminated
- * @param reason the reason that caused the actor to terminate
- */
-case class Exit(from: AbstractActor, reason: AnyRef)
-
-/** Manages control flow of actor executions.
- *
- * @author Philipp Haller
- */
-private[actors] class SuspendActorControl extends ControlThrowable
diff --git a/src/actors/scala/actors/ActorCanReply.scala b/src/actors/scala/actors/ActorCanReply.scala
index b307aaf..07191ec 100644
--- a/src/actors/scala/actors/ActorCanReply.scala
+++ b/src/actors/scala/actors/ActorCanReply.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2005-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2005-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -18,7 +18,7 @@ import scala.concurrent.SyncVar
* @author Philipp Haller
*/
private[actors] trait ActorCanReply extends ReactorCanReply {
- this: AbstractActor with ReplyReactor =>
+ this: AbstractActor with InternalReplyReactor =>
override def !?(msg: Any): Any = {
val replyCh = new Channel[Any](Actor.self(scheduler))
diff --git a/src/actors/scala/actors/ActorProxy.scala b/src/actors/scala/actors/ActorProxy.scala
index d4381ab..5e1d3e6 100644
--- a/src/actors/scala/actors/ActorProxy.scala
+++ b/src/actors/scala/actors/ActorProxy.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2005-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2005-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -12,8 +12,7 @@ package scala.actors
import java.lang.Thread
/**
- * Provides a dynamic actor proxy for normal
- * Java threads.
+ * Provides a dynamic actor proxy for normal Java threads.
*
* @author Philipp Haller
*/
@@ -22,7 +21,7 @@ private[actors] class ActorProxy(t: Thread, override final val scheduler: ISched
def act() {}
/**
- * Terminates with exit reason <code>'normal</code>.
+ * Terminates with exit reason `'normal`.
*/
override def exit(): Nothing = {
shouldExit = false
diff --git a/src/actors/scala/actors/ActorRef.scala b/src/actors/scala/actors/ActorRef.scala
new file mode 100644
index 0000000..5c17906
--- /dev/null
+++ b/src/actors/scala/actors/ActorRef.scala
@@ -0,0 +1,52 @@
+package scala.actors
+
+import java.util.concurrent.TimeoutException
+import scala.concurrent.duration.Duration
+
+/**
+ * Trait used for migration of Scala actors to Akka.
+ */
+ at deprecated("ActorRef ought to be used only with the Actor Migration Kit.", "2.10.0")
+trait ActorRef {
+
+ /**
+ * Sends a one-way asynchronous message. E.g. fire-and-forget semantics.
+ * <p/>
+ *
+ * If invoked from within an actor then the actor reference is implicitly passed on as the implicit 'sender' argument.
+ * <p/>
+ *
+ * This actor 'sender' reference is then available in the receiving actor in the 'sender' member variable,
+ * if invoked from within an Actor. If not then no sender is available.
+ * <pre>
+ * actor ! message
+ * </pre>
+ * <p/>
+ */
+ def !(message: Any)(implicit sender: ActorRef = null): Unit
+
+ /**
+ * Sends a message asynchronously, returning a future which may eventually hold the reply.
+ */
+ private[actors] def ?(message: Any, timeout: Duration): scala.concurrent.Future[Any]
+
+ /**
+ * Forwards the message and passes the original sender actor as the sender.
+ * <p/>
+ * Works with '!' and '?'.
+ */
+ def forward(message: Any)
+
+ private[actors] def localActor: AbstractActor
+
+}
+
+/**
+ * This is what is used to complete a Future that is returned from an ask/? call,
+ * when it times out.
+ */
+class AskTimeoutException(message: String, cause: Throwable) extends TimeoutException {
+ def this(message: String) = this(message, null: Throwable)
+}
+
+object PoisonPill
diff --git a/src/actors/scala/actors/ActorTask.scala b/src/actors/scala/actors/ActorTask.scala
index 8d0379c..21d7a0a 100644
--- a/src/actors/scala/actors/ActorTask.scala
+++ b/src/actors/scala/actors/ActorTask.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2005-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2005-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -12,12 +12,16 @@ package scala.actors
/**
* @author Philipp Haller
+ * @note This class inherits a public var called 'msg' from ReactorTask,
+ * and also defines a constructor parameter which shadows it (which makes any
+ * changes to the underlying var invisible.) I can't figure out what's supposed
+ * to happen, so I renamed the constructor parameter to at least be less confusing.
*/
-private[actors] class ActorTask(actor: Actor,
+private[actors] class ActorTask(actor: InternalActor,
fun: () => Unit,
handler: PartialFunction[Any, Any],
- msg: Any)
- extends ReplyReactorTask(actor, fun, handler, msg) {
+ initialMsg: Any)
+ extends ReplyReactorTask(actor, fun, handler, initialMsg) {
protected override def beginExecution() {
super.beginExecution()
@@ -28,23 +32,28 @@ private[actors] class ActorTask(actor: Actor,
}
protected override def terminateExecution(e: Throwable) {
- val senderInfo = try { Some(actor.sender) } catch {
+ val senderInfo = try { Some(actor.internalSender) } catch {
case _: Exception => None
}
+ // !!! If this is supposed to be setting the current contents of the
+ // inherited mutable var rather than always the value given in the constructor,
+ // then it should be changed from initialMsg to msg.
val uncaught = UncaughtException(actor,
- if (msg != null) Some(msg) else None,
+ if (initialMsg != null) Some(initialMsg) else None,
senderInfo,
Thread.currentThread,
e)
val todo = actor.synchronized {
- if (!actor.links.isEmpty)
+ val res = if (!actor.links.isEmpty)
actor.exitLinked(uncaught)
else {
super.terminateExecution(e)
() => {}
}
+ res
}
+
todo()
}
diff --git a/src/actors/scala/actors/CanReply.scala b/src/actors/scala/actors/CanReply.scala
index de2c91d..3d26477 100644
--- a/src/actors/scala/actors/CanReply.scala
+++ b/src/actors/scala/actors/CanReply.scala
@@ -1,14 +1,15 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2005-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2005-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-
package scala.actors
+import scala.language.higherKinds
+
/**
* Defines result-bearing message send operations.
*
@@ -21,8 +22,7 @@ trait CanReply[-T, +R] {
type Future[+P] <: () => P
/**
- * Sends <code>msg</code> to this $actor and
- * awaits reply (synchronous).
+ * Sends `msg` to this $actor and awaits reply (synchronous).
*
* @param msg the message to be sent
* @return the reply
@@ -30,20 +30,19 @@ trait CanReply[-T, +R] {
def !?(msg: T): R
/**
- * Sends <code>msg</code> to this $actor and
- * awaits reply (synchronous) within <code>msec</code>
- * milliseconds.
+ * Sends `msg` to this $actor and awaits reply (synchronous) within
+ * `msec` milliseconds.
*
* @param msec the time span before timeout
* @param msg the message to be sent
- * @return <code>None</code> in case of timeout, otherwise
- * <code>Some(x)</code> where <code>x</code> is the reply
+ * @return `None` in case of timeout, otherwise
+ * `Some(x)` where `x` is the reply
*/
def !?(msec: Long, msg: T): Option[R]
/**
- * Sends <code>msg</code> to this $actor and
- * immediately returns a future representing the reply value.
+ * Sends `msg` to this $actor and immediately returns a future representing
+ * the reply value.
*
* @param msg the message to be sent
* @return the future
@@ -51,11 +50,10 @@ trait CanReply[-T, +R] {
def !!(msg: T): Future[R]
/**
- * Sends <code>msg</code> to this $actor and
- * immediately returns a future representing the reply value.
- * The reply is post-processed using the partial function
- * <code>handler</code>. This also allows to recover a more
- * precise type for the reply value.
+ * Sends `msg` to this $actor and immediately returns a future representing
+ * the reply value. The reply is post-processed using the partial function
+ * `handler`. This also allows to recover a more precise type for the reply
+ * value.
*
* @param msg the message to be sent
* @param handler the function to be applied to the response
diff --git a/src/actors/scala/actors/Channel.scala b/src/actors/scala/actors/Channel.scala
index f489877..9669ffb 100644
--- a/src/actors/scala/actors/Channel.scala
+++ b/src/actors/scala/actors/Channel.scala
@@ -1,20 +1,18 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2005-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2005-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-
package scala.actors
import scala.concurrent.SyncVar
/**
- * Used to pattern match on values that were sent
- * to some channel <code>Chan<sub>n</sub></code> by the current
- * actor <code>self</code>.
+ * Used to pattern match on values that were sent to some channel `Chan,,n,,`
+ * by the current actor `self`.
*
* @example {{{
* receive {
@@ -28,16 +26,15 @@ import scala.concurrent.SyncVar
case class ! [a](ch: Channel[a], msg: a)
/**
- * Provides a means for typed communication among
- * actors. Only the actor creating an instance of a
- * <code>Channel</code> may receive from it.
+ * Provides a means for typed communication among actors. Only the
+ * actor creating an instance of a `Channel` may receive from it.
*
* @author Philipp Haller
*
* @define actor channel
* @define channel channel
*/
-class Channel[Msg](val receiver: Actor) extends InputChannel[Msg] with OutputChannel[Msg] with CanReply[Msg, Any] {
+class Channel[Msg](val receiver: InternalActor) extends InputChannel[Msg] with OutputChannel[Msg] with CanReply[Msg, Any] {
type Future[+P] = scala.actors.Future[P]
diff --git a/src/actors/scala/actors/Combinators.scala b/src/actors/scala/actors/Combinators.scala
index 5276c78..64dbaf0 100644
--- a/src/actors/scala/actors/Combinators.scala
+++ b/src/actors/scala/actors/Combinators.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2005-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2005-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -10,13 +10,15 @@
package scala.actors
+import scala.language.implicitConversions
+
private[actors] trait Combinators {
/**
* Enables the composition of suspendable closures using `andThen`,
* `loop`, `loopWhile`, etc.
*/
- implicit def mkBody[a](body: => a): Actor.Body[a]
+ implicit def mkBody[a](body: => a): InternalActor.Body[a]
/**
* Repeatedly executes `body`.
diff --git a/src/actors/scala/actors/DaemonActor.scala b/src/actors/scala/actors/DaemonActor.scala
index ad26bab..ffe8b75 100644
--- a/src/actors/scala/actors/DaemonActor.scala
+++ b/src/actors/scala/actors/DaemonActor.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2005-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2005-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -12,8 +12,9 @@ import scheduler.DaemonScheduler
/**
* Base trait for actors with daemon semantics.
- * Unlike a regular <code>Actor</code>, an active <code>DaemonActor</code> will
- * not prevent an application terminating, much like a daemon thread.
+ *
+ * Unlike a regular `Actor`, an active `DaemonActor` will not
+ * prevent an application terminating, much like a daemon thread.
*
* @author Erik Engbrecht
*/
diff --git a/src/actors/scala/actors/Debug.scala b/src/actors/scala/actors/Debug.scala
index 30d5ae5..cc51dfd 100644
--- a/src/actors/scala/actors/Debug.scala
+++ b/src/actors/scala/actors/Debug.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2005-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2005-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -42,6 +42,3 @@ private[actors] class Logger(tag: String) {
def doError(b: => Unit) =
if (lev > 0) b
}
-
- at deprecated("this class is going to be removed in a future release", "2.7.7")
-class Debug(tag: String) extends Logger(tag) {}
diff --git a/src/actors/scala/actors/Future.scala b/src/actors/scala/actors/Future.scala
index c6b575d..3037f82 100644
--- a/src/actors/scala/actors/Future.scala
+++ b/src/actors/scala/actors/Future.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2005-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2005-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -27,14 +27,6 @@ abstract class Future[+T] extends Responder[T] with Function0[T] {
private[actors] var fvalue: Option[Any] = None
private[actors] def fvalueTyped = fvalue.get.asInstanceOf[T]
- @deprecated("this member is going to be removed in a future release", "2.8.0")
- def ch: InputChannel[Any] = inputChannel
-
- @deprecated("this member is going to be removed in a future release", "2.8.0")
- protected def value: Option[Any] = fvalue
- @deprecated("this member is going to be removed in a future release", "2.8.0")
- protected def value_=(x: Option[Any]) { fvalue = x }
-
/** Tests whether the future's result is available.
*
* @return `true` if the future's result is available,
@@ -102,7 +94,9 @@ private class FutureActor[T](fun: SyncVar[T] => Unit, channel: Channel[T]) exten
loop {
react {
- case Eval => reply()
+ // This is calling ReplyReactor#reply(msg: Any).
+ // Was: reply(). Now: reply(()).
+ case Eval => reply(())
}
}
}
@@ -180,7 +174,7 @@ object Futures {
* or timeout + `System.currentTimeMillis()` is negative.
*/
def awaitAll(timeout: Long, fts: Future[Any]*): List[Option[Any]] = {
- var resultsMap: collection.mutable.Map[Int, Option[Any]] = new collection.mutable.HashMap[Int, Option[Any]]
+ var resultsMap: scala.collection.mutable.Map[Int, Option[Any]] = new scala.collection.mutable.HashMap[Int, Option[Any]]
var cnt = 0
val mappedFts = fts.map(ft =>
diff --git a/src/actors/scala/actors/IScheduler.scala b/src/actors/scala/actors/IScheduler.scala
index 2bb90a5..35c2d32 100644
--- a/src/actors/scala/actors/IScheduler.scala
+++ b/src/actors/scala/actors/IScheduler.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2005-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2005-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -10,12 +10,10 @@
package scala.actors
/**
- * A common interface
- * for all schedulers used to execute actor tasks.
+ * A common interface for all schedulers used to execute actor tasks.
*
- * Subclasses of <code>Actor</code> that override its
- * <code>scheduler</code> member must provide
- * an <code>IScheduler</code> implementation.
+ * Subclasses of `Actor` that override its `scheduler` member must provide
+ * an `IScheduler` implementation.
*
* @author Philipp Haller
*/
@@ -27,7 +25,7 @@ trait IScheduler {
*/
def execute(fun: => Unit): Unit
- /** Submits a <code>Runnable</code> for execution.
+ /** Submits a `Runnable` for execution.
*
* @param task the task to be executed
*/
@@ -36,11 +34,12 @@ trait IScheduler {
def executeFromActor(task: Runnable): Unit =
execute(task)
- /** Shuts down the scheduler.
- */
+ /** Shuts down the scheduler. */
def shutdown(): Unit
/** When the scheduler is active, it can execute tasks.
+ *
+ * @return `'''true'''`, if the scheduler is active, otherwise false.
*/
def isActive: Boolean
@@ -67,16 +66,4 @@ trait IScheduler {
def managedBlock(blocker: scala.concurrent.ManagedBlocker): Unit
- @deprecated("this member is going to be removed in a future release", "2.7.7")
- def tick(a: Actor) {}
-
- @deprecated("this member is going to be removed in a future release", "2.7.7")
- def onLockup(handler: () => Unit) {}
-
- @deprecated("this member is going to be removed in a future release", "2.7.7")
- def onLockup(millis: Int)(handler: () => Unit) {}
-
- @deprecated("this member is going to be removed in a future release", "2.7.7")
- def printActorDump() {}
-
}
diff --git a/src/actors/scala/actors/InputChannel.scala b/src/actors/scala/actors/InputChannel.scala
index 51e739b..3d7dd7d 100644
--- a/src/actors/scala/actors/InputChannel.scala
+++ b/src/actors/scala/actors/InputChannel.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2005-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2005-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/actors/scala/actors/InternalActor.scala b/src/actors/scala/actors/InternalActor.scala
new file mode 100644
index 0000000..ed9e25c
--- /dev/null
+++ b/src/actors/scala/actors/InternalActor.scala
@@ -0,0 +1,544 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2005-2013, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+package scala.actors
+import java.util.TimerTask
+import scala.util.control.ControlThrowable
+
+private[actors] object InternalActor {
+ private[actors] trait Body[a] {
+ def andThen[b](other: => b): Unit
+ }
+}
+
+private[actors] trait InternalActor extends AbstractActor with InternalReplyReactor with ActorCanReply with InputChannel[Any] with Serializable {
+
+ /* The following two fields are only used when the actor
+ * suspends by blocking its underlying thread, for example,
+ * when waiting in a receive or synchronous send.
+ */
+ @volatile
+ private[actors] var isSuspended = false
+
+ /* This field is used to communicate the received message from
+ * the invocation of send to the place where the thread of
+ * the receiving actor resumes inside receive/receiveWithin.
+ */
+ @volatile
+ private var received: Option[Any] = None
+
+ protected[actors] override def scheduler: IScheduler = Scheduler
+
+ private[actors] override def startSearch(msg: Any, replyTo: OutputChannel[Any], handler: PartialFunction[Any, Any]) =
+ if (isSuspended) {
+ () =>
+ synchronized {
+ mailbox.append(msg, replyTo)
+ resumeActor()
+ }
+ } else super.startSearch(msg, replyTo, handler)
+
+ // we override this method to check `shouldExit` before suspending
+ private[actors] override def searchMailbox(startMbox: MQueue[Any],
+ handler: PartialFunction[Any, Any],
+ resumeOnSameThread: Boolean) {
+ var tmpMbox = startMbox
+ var done = false
+ while (!done) {
+ val qel = tmpMbox.extractFirst((msg: Any, replyTo: OutputChannel[Any]) => {
+ senders = List(replyTo)
+ handler.isDefinedAt(msg)
+ })
+ if (tmpMbox ne mailbox)
+ tmpMbox.foreach((m, s) => mailbox.append(m, s))
+ if (null eq qel) {
+ synchronized {
+ // in mean time new stuff might have arrived
+ if (!sendBuffer.isEmpty) {
+ tmpMbox = new MQueue[Any]("Temp")
+ drainSendBuffer(tmpMbox)
+ // keep going
+ } else {
+ // very important to check for `shouldExit` at this point
+ // since linked actors might have set it after we checked
+ // last time (e.g., at the beginning of `react`)
+ if (shouldExit) exit()
+ waitingFor = handler
+ // see Reactor.searchMailbox
+ throw Actor.suspendException
+ }
+ }
+ } else {
+ resumeReceiver((qel.msg, qel.session), handler, resumeOnSameThread)
+ done = true
+ }
+ }
+ }
+
+ private[actors] override def makeReaction(fun: () => Unit, handler: PartialFunction[Any, Any], msg: Any): Runnable =
+ new ActorTask(this, fun, handler, msg)
+
+ /** See the companion object's `receive` method. */
+ def receive[R](f: PartialFunction[Any, R]): R = {
+ assert(Actor.self(scheduler) == this, "receive from channel belonging to other actor")
+
+ synchronized {
+ if (shouldExit) exit() // links
+ drainSendBuffer(mailbox)
+ }
+
+ var done = false
+ while (!done) {
+ val qel = mailbox.extractFirst((m: Any, replyTo: OutputChannel[Any]) => {
+ senders = replyTo :: senders
+ val matches = f.isDefinedAt(m)
+ senders = senders.tail
+ matches
+ })
+ if (null eq qel) {
+ synchronized {
+ // in mean time new stuff might have arrived
+ if (!sendBuffer.isEmpty) {
+ drainSendBuffer(mailbox)
+ // keep going
+ } else {
+ waitingFor = f
+ isSuspended = true
+ scheduler.managedBlock(blocker)
+ drainSendBuffer(mailbox)
+ // keep going
+ }
+ }
+ } else {
+ received = Some(qel.msg)
+ senders = qel.session :: senders
+ done = true
+ }
+ }
+
+ val result = f(received.get)
+ received = None
+ senders = senders.tail
+ result
+ }
+
+ /** See the companion object's `receiveWithin` method. */
+ def receiveWithin[R](msec: Long)(f: PartialFunction[Any, R]): R = {
+ assert(Actor.self(scheduler) == this, "receive from channel belonging to other actor")
+
+ synchronized {
+ if (shouldExit) exit() // links
+ drainSendBuffer(mailbox)
+ }
+
+ // first, remove spurious TIMEOUT message from mailbox if any
+ mailbox.extractFirst((m: Any, replyTo: OutputChannel[Any]) => m == TIMEOUT)
+
+ val receiveTimeout = () => {
+ if (f.isDefinedAt(TIMEOUT)) {
+ received = Some(TIMEOUT)
+ senders = this :: senders
+ } else
+ sys.error("unhandled timeout")
+ }
+
+ var done = false
+ while (!done) {
+ val qel = mailbox.extractFirst((m: Any, replyTo: OutputChannel[Any]) => {
+ senders = replyTo :: senders
+ val matches = f.isDefinedAt(m)
+ senders = senders.tail
+ matches
+ })
+ if (null eq qel) {
+ val todo = synchronized {
+ // in mean time new stuff might have arrived
+ if (!sendBuffer.isEmpty) {
+ drainSendBuffer(mailbox)
+ // keep going
+ () => {}
+ } else if (msec == 0L) {
+ done = true
+ receiveTimeout
+ } else {
+ if (onTimeout.isEmpty) {
+ if (!f.isDefinedAt(TIMEOUT))
+ sys.error("unhandled timeout")
+
+ val thisActor = this
+ onTimeout = Some(new TimerTask {
+ def run() {
+ thisActor.send(TIMEOUT, thisActor)
+ }
+ })
+ Actor.timer.schedule(onTimeout.get, msec)
+ }
+
+ // It is possible that !onTimeout.isEmpty, but TIMEOUT is not yet in mailbox
+ // See SI-4759
+ waitingFor = f
+ received = None
+ isSuspended = true
+ scheduler.managedBlock(blocker)
+ drainSendBuffer(mailbox)
+ // keep going
+ () => {}
+ }
+ }
+ todo()
+ } else {
+ synchronized {
+ if (!onTimeout.isEmpty) {
+ onTimeout.get.cancel()
+ onTimeout = None
+ }
+ }
+ received = Some(qel.msg)
+ senders = qel.session :: senders
+ done = true
+ }
+ }
+
+ val result = f(received.get)
+ received = None
+ senders = senders.tail
+ result
+ }
+
+ /** See the companion object's `react` method. */
+ override def react(handler: PartialFunction[Any, Unit]): Nothing = {
+ synchronized {
+ if (shouldExit) exit()
+ }
+ super.react(handler)
+ }
+
+ /** See the companion object's `reactWithin` method. */
+ override def reactWithin(msec: Long)(handler: PartialFunction[Any, Unit]): Nothing = {
+ synchronized {
+ if (shouldExit) exit()
+ }
+ super.reactWithin(msec)(handler)
+ }
+
+ /** Receives the next message from the mailbox */
+ def ? : Any = receive {
+ case x => x
+ }
+
+ // guarded by lock of this
+ // never throws SuspendActorControl
+ private[actors] override def scheduleActor(f: PartialFunction[Any, Any], msg: Any) =
+ if (f eq null) {
+ // do nothing (timeout is handled instead)
+ } else {
+ val task = new ActorTask(this, null, f, msg)
+ scheduler executeFromActor task
+ }
+
+ /* Used for notifying scheduler when blocking inside receive/receiveWithin. */
+ private object blocker extends scala.concurrent.ManagedBlocker {
+ def block() = {
+ InternalActor.this.suspendActor()
+ true
+ }
+ def isReleasable =
+ !InternalActor.this.isSuspended
+ }
+
+ private def suspendActor() = synchronized {
+ while (isSuspended) {
+ try {
+ wait()
+ } catch {
+ case _: InterruptedException =>
+ }
+ }
+ // links: check if we should exit
+ if (shouldExit) exit()
+ }
+
+ private def resumeActor() {
+ isSuspended = false
+ notify()
+ }
+
+ private[actors] override def exiting = synchronized {
+ _state == Actor.State.Terminated
+ }
+
+ // guarded by this
+ private[actors] override def dostart() {
+ // Reset various flags.
+ //
+ // Note that we do *not* reset `trapExit`. The reason is that
+ // users should be able to set the field in the constructor
+ // and before `act` is called.
+ exitReason = 'normal
+ shouldExit = false
+
+ super.dostart()
+ }
+
+ override def start(): InternalActor = synchronized {
+ super.start()
+ this
+ }
+
+ /** State of this actor */
+ override def getState: Actor.State.Value = synchronized {
+ if (isSuspended) {
+ if (onTimeout.isEmpty)
+ Actor.State.Blocked
+ else
+ Actor.State.TimedBlocked
+ } else
+ super.getState
+ }
+
+ // guarded by this
+ private[actors] var links: List[AbstractActor] = Nil
+
+ /**
+ * Links <code>self</code> to actor <code>to</code>.
+ *
+ * @param to the actor to link to
+ * @return the parameter actor
+ */
+ def link(to: AbstractActor): AbstractActor = {
+ assert(Actor.self(scheduler) == this, "link called on actor different from self")
+ this linkTo to
+ to linkTo this
+ to
+ }
+
+ /**
+ * Links <code>self</code> to actor <code>to</code>.
+ *
+ * @param to the actor to link to
+ * @return the parameter actor
+ */
+ def link(to: ActorRef): ActorRef = {
+ this.link(to.localActor)
+ to
+ }
+
+ /**
+ * Unidirectional linking. For migration purposes only
+ */
+ private[actors] def watch(subject: ActorRef): ActorRef = {
+ assert(Actor.self(scheduler) == this, "link called on actor different from self")
+ subject.localActor linkTo this
+ subject
+ }
+
+ /**
+ * Unidirectional linking. For migration purposes only
+ */
+ private[actors] def unwatch(subject: ActorRef): ActorRef = {
+ assert(Actor.self(scheduler) == this, "link called on actor different from self")
+ subject.localActor unlinkFrom this
+ subject
+ }
+
+ /**
+ * Links <code>self</code> to the actor defined by <code>body</code>.
+ *
+ * @param body the body of the actor to link to
+ * @return the parameter actor
+ */
+ def link(body: => Unit): Actor = {
+ assert(Actor.self(scheduler) == this, "link called on actor different from self")
+ val a = new Actor {
+ def act() = body
+ override final val scheduler: IScheduler = InternalActor.this.scheduler
+ }
+ link(a)
+ a.start()
+ a
+ }
+
+ private[actors] def linkTo(to: AbstractActor) = synchronized {
+ links = to :: links
+ }
+
+ /**
+ * Unlinks <code>self</code> from actor <code>from</code>.
+ */
+ def unlink(from: AbstractActor) {
+ assert(Actor.self(scheduler) == this, "unlink called on actor different from self")
+ this unlinkFrom from
+ from unlinkFrom this
+ }
+
+ /**
+ * Unlinks <code>self</code> from actor <code>from</code>.
+ */
+ def unlink(from: ActorRef) {
+ unlink(from.localActor)
+ }
+
+ private[actors] def unlinkFrom(from: AbstractActor) = synchronized {
+ links = links.filterNot(from.==)
+ }
+
+ @volatile
+ private[actors] var _trapExit = false
+
+ def trapExit = _trapExit
+
+ def trapExit_=(value: Boolean) = _trapExit = value
+
+ // guarded by this
+ private var exitReason: AnyRef = 'normal
+ // guarded by this
+ private[actors] var shouldExit = false
+
+ /**
+ * <p>
+ * Terminates execution of <code>self</code> with the following
+ * effect on linked actors:
+ * </p>
+ * <p>
+ * For each linked actor <code>a</code> with
+ * <code>trapExit</code> set to <code>true</code>, send message
+ * <code>Exit(self, reason)</code> to <code>a</code>.
+ * </p>
+ * <p>
+ * For each linked actor <code>a</code> with
+ * <code>trapExit</code> set to <code>false</code> (default),
+ * call <code>a.exit(reason)</code> if
+ * <code>reason != 'normal</code>.
+ * </p>
+ */
+ protected[actors] def exit(reason: AnyRef): Nothing = {
+ synchronized {
+ exitReason = reason
+ }
+ exit()
+ }
+
+ /**
+ * Terminates with exit reason <code>'normal</code>.
+ */
+ protected[actors] override def exit(): Nothing = {
+ val todo = synchronized {
+ if (!links.isEmpty)
+ exitLinked()
+ else
+ () => {}
+ }
+ todo()
+ super.exit()
+ }
+
+ // Assume !links.isEmpty
+ // guarded by this
+ private[actors] def exitLinked(): () => Unit = {
+ _state = Actor.State.Terminated
+ // reset waitingFor, otherwise getState returns Suspended
+ waitingFor = Reactor.waitingForNone
+ // remove this from links
+ val mylinks = links.filterNot(this.==)
+ // unlink actors
+ mylinks.foreach(unlinkFrom(_))
+ // return closure that locks linked actors
+ () => {
+ mylinks.foreach((linked: AbstractActor) => {
+ linked.synchronized {
+ if (!linked.exiting) {
+ linked.unlinkFrom(this)
+ linked.exit(this, exitReason)
+ }
+ }
+ })
+ }
+ }
+
+ // Assume !links.isEmpty
+ // guarded by this
+ private[actors] def exitLinked(reason: AnyRef): () => Unit = {
+ exitReason = reason
+ exitLinked()
+ }
+
+ // Assume !this.exiting
+ private[actors] def exit(from: AbstractActor, reason: AnyRef) {
+ if (trapExit) {
+ this ! Exit(from, reason)
+ } else if (reason != 'normal)
+ stop(reason)
+ }
+
+ /* Requires qualified private, because <code>RemoteActor</code> must
+ * register a termination handler.
+ */
+ private[actors] def onTerminate(f: => Unit) {
+ scheduler.onTerminate(this) { f }
+ }
+
+
+ private[actors] def stop(reason: AnyRef): Unit = {
+ synchronized {
+ shouldExit = true
+ exitReason = reason
+ // resume this Actor in a way that
+ // causes it to exit
+ // (because shouldExit == true)
+ if (isSuspended)
+ resumeActor()
+ else if (waitingFor ne Reactor.waitingForNone) {
+ waitingFor = Reactor.waitingForNone
+ // it doesn't matter what partial function we are passing here
+ val task = new ActorTask(this, null, waitingFor, null)
+ scheduler execute task
+ /* Here we should not throw a SuspendActorControl,
+ since the current method is called from an actor that
+ is in the process of exiting.
+
+ Therefore, the contract for scheduleActor is that
+ it never throws a SuspendActorControl.
+ */
+ }
+ }
+ }
+}
+
+/**
+ * Used as the timeout pattern in
+ * <a href="Actor.html#receiveWithin(Long)" target="contentFrame">
+ * <code>receiveWithin</code></a> and
+ * <a href="Actor.html#reactWithin(Long)" target="contentFrame">
+ * <code>reactWithin</code></a>.
+ *
+ * @example {{{
+ * receiveWithin(500) {
+ * case (x, y) => ...
+ * case TIMEOUT => ...
+ * }
+ * }}}
+ *
+ * @author Philipp Haller
+ */
+case object TIMEOUT
+
+/**
+ * Sent to an actor
+ * with `trapExit` set to `true` whenever one of its linked actors
+ * terminates.
+ *
+ * @param from the actor that terminated
+ * @param reason the reason that caused the actor to terminate
+ */
+case class Exit(from: AbstractActor, reason: AnyRef)
+
+/**
+ * Manages control flow of actor executions.
+ *
+ * @author Philipp Haller
+ */
+private[actors] class SuspendActorControl extends ControlThrowable
diff --git a/src/actors/scala/actors/InternalReplyReactor.scala b/src/actors/scala/actors/InternalReplyReactor.scala
new file mode 100644
index 0000000..3829513
--- /dev/null
+++ b/src/actors/scala/actors/InternalReplyReactor.scala
@@ -0,0 +1,161 @@
+package scala.actors
+
+import java.util.{TimerTask}
+
+/**
+ * Extends the [[scala.actors.Reactor]]
+ * trait with methods to reply to the sender of a message.
+ * Sending a message to a <code>ReplyReactor</code> implicitly
+ * passes a reference to the sender together with the message.
+ *
+ * @author Philipp Haller
+ *
+ * @define actor `ReplyReactor`
+ */
+trait InternalReplyReactor extends Reactor[Any] with ReactorCanReply {
+
+ /* A list of the current senders. The head of the list is
+ * the sender of the message that was received last.
+ */
+ @volatile
+ private[actors] var senders: List[OutputChannel[Any]] = List()
+
+ /* This option holds a TimerTask when the actor waits in a
+ * reactWithin. The TimerTask is cancelled when the actor
+ * resumes.
+ *
+ * guarded by this
+ */
+ private[actors] var onTimeout: Option[TimerTask] = None
+
+ /**
+ * Returns the $actor which sent the last received message.
+ */
+ protected[actors] def internalSender: OutputChannel[Any] = senders.head
+
+ /**
+ * Replies with <code>msg</code> to the sender.
+ */
+ protected[actors] def reply(msg: Any) {
+ internalSender ! msg
+ }
+
+ override def !(msg: Any) {
+ send(msg, Actor.rawSelf(scheduler))
+ }
+
+ override def forward(msg: Any) {
+ send(msg, Actor.sender)
+ }
+
+ private[actors] override def resumeReceiver(item: (Any, OutputChannel[Any]), handler: PartialFunction[Any, Any], onSameThread: Boolean) {
+ synchronized {
+ if (!onTimeout.isEmpty) {
+ onTimeout.get.cancel()
+ onTimeout = None
+ }
+ }
+ senders = List(item._2)
+ super.resumeReceiver(item, handler, onSameThread)
+ }
+
+ private[actors] override def searchMailbox(startMbox: MQueue[Any],
+ handler: PartialFunction[Any, Any],
+ resumeOnSameThread: Boolean) {
+ var tmpMbox = startMbox
+ var done = false
+ while (!done) {
+ val qel = tmpMbox.extractFirst((msg: Any, replyTo: OutputChannel[Any]) => {
+ senders = List(replyTo)
+ handler.isDefinedAt(msg)
+ })
+ if (tmpMbox ne mailbox)
+ tmpMbox.foreach((m, s) => mailbox.append(m, s))
+ if (null eq qel) {
+ synchronized {
+ // in mean time new stuff might have arrived
+ if (!sendBuffer.isEmpty) {
+ tmpMbox = new MQueue[Any]("Temp")
+ drainSendBuffer(tmpMbox)
+ // keep going
+ } else {
+ waitingFor = handler
+ // see Reactor.searchMailbox
+ throw Actor.suspendException
+ }
+ }
+ } else {
+ resumeReceiver((qel.msg, qel.session), handler, resumeOnSameThread)
+ done = true
+ }
+ }
+ }
+
+ private[actors] override def makeReaction(fun: () => Unit, handler: PartialFunction[Any, Any], msg: Any): Runnable =
+ new ReplyReactorTask(this, fun, handler, msg)
+
+ protected[actors] override def react(handler: PartialFunction[Any, Unit]): Nothing = {
+ assert(Actor.rawSelf(scheduler) == this, "react on channel belonging to other actor")
+ super.react(handler)
+ }
+
+
+ /**
+ * Receives a message from this $actor's mailbox within a certain
+ * time span.
+ *
+ * This method never returns. Therefore, the rest of the computation
+ * has to be contained in the actions of the partial function.
+ *
+ * @param msec the time span before timeout
+ * @param handler a partial function with message patterns and actions
+ */
+ protected[actors] def reactWithin(msec: Long)(handler: PartialFunction[Any, Unit]): Nothing = {
+ assert(Actor.rawSelf(scheduler) == this, "react on channel belonging to other actor")
+
+ synchronized { drainSendBuffer(mailbox) }
+
+ // first, remove spurious TIMEOUT message from mailbox if any
+ mailbox.extractFirst((m: Any, replyTo: OutputChannel[Any]) => m == TIMEOUT)
+
+ while (true) {
+ val qel = mailbox.extractFirst((m: Any, replyTo: OutputChannel[Any]) => {
+ senders = List(replyTo)
+ handler isDefinedAt m
+ })
+ if (null eq qel) {
+ synchronized {
+ // in mean time new messages might have arrived
+ if (!sendBuffer.isEmpty) {
+ drainSendBuffer(mailbox)
+ // keep going
+ } else if (msec == 0L) {
+ // throws Actor.suspendException
+ resumeReceiver((TIMEOUT, this), handler, false)
+ } else {
+ waitingFor = handler
+ val thisActor = this
+ onTimeout = Some(new TimerTask {
+ def run() { thisActor.send(TIMEOUT, thisActor) }
+ })
+ Actor.timer.schedule(onTimeout.get, msec)
+ throw Actor.suspendException
+ }
+ }
+ } else
+ resumeReceiver((qel.msg, qel.session), handler, false)
+ }
+ throw Actor.suspendException
+ }
+
+ override def getState: Actor.State.Value = synchronized {
+ if (waitingFor ne Reactor.waitingForNone) {
+ if (onTimeout.isEmpty)
+ Actor.State.Suspended
+ else
+ Actor.State.TimedSuspended
+ } else
+ _state
+ }
+
+}
diff --git a/src/actors/scala/actors/KillActorControl.scala b/src/actors/scala/actors/KillActorControl.scala
new file mode 100644
index 0000000..2f1f08e
--- /dev/null
+++ b/src/actors/scala/actors/KillActorControl.scala
@@ -0,0 +1,16 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2005-2013, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+
+
+package scala.actors
+
+import scala.util.control.ControlThrowable
+import java.lang.{InterruptedException, Runnable}
+
+private[actors] class KillActorControl extends ControlThrowable
diff --git a/src/actors/scala/actors/MQueue.scala b/src/actors/scala/actors/MQueue.scala
new file mode 100644
index 0000000..d766ecc
--- /dev/null
+++ b/src/actors/scala/actors/MQueue.scala
@@ -0,0 +1,250 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2005-2013, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+package scala.actors
+
+private[actors] class MQueueElement[Msg >: Null](val msg: Msg, val session: OutputChannel[Any], var next: MQueueElement[Msg]) {
+ def this() = this(null, null, null)
+ def this(msg: Msg, session: OutputChannel[Any]) = this(msg, session, null)
+}
+
+private[actors] class MQueue[Msg >: Null](protected val label: String) {
+ protected var first: MQueueElement[Msg] = null
+ protected var last: MQueueElement[Msg] = null // last eq null iff list is empty
+ private var _size = 0
+
+ def size = _size
+ final def isEmpty = last eq null
+
+ protected def changeSize(diff: Int) {
+ _size += diff
+ }
+
+ def prepend(other: MQueue[Msg]) {
+ if (!other.isEmpty) {
+ other.last.next = first
+ first = other.first
+ }
+ }
+
+ def clear() {
+ first = null
+ last = null
+ _size = 0
+ }
+
+
+ def append(msg: Msg, session: OutputChannel[Any]) {
+ changeSize(1) // size always increases by 1
+ val el = new MQueueElement(msg, session)
+
+ if (isEmpty) first = el
+ else last.next = el
+
+ last = el
+ }
+
+ def append(el: MQueueElement[Msg]) {
+ changeSize(1) // size always increases by 1
+
+ if (isEmpty) first = el
+ else last.next = el
+
+ last = el
+ }
+
+ def foreach(f: (Msg, OutputChannel[Any]) => Unit) {
+ var curr = first
+ while (curr != null) {
+ f(curr.msg, curr.session)
+ curr = curr.next
+ }
+ }
+
+ def foreachAppend(target: MQueue[Msg]) {
+ var curr = first
+ while (curr != null) {
+ target.append(curr)
+ curr = curr.next
+ }
+ }
+
+ def foreachDequeue(target: MQueue[Msg]) {
+ var curr = first
+ while (curr != null) {
+ target.append(curr)
+ curr = curr.next
+ }
+ first = null
+ last = null
+ _size = 0
+ }
+
+ def foldLeft[B](z: B)(f: (B, Msg) => B): B = {
+ var acc = z
+ var curr = first
+ while (curr != null) {
+ acc = f(acc, curr.msg)
+ curr = curr.next
+ }
+ acc
+ }
+
+ /** Returns the n-th message that satisfies the predicate `p`
+ * without removing it.
+ */
+ def get(n: Int)(p: Msg => Boolean): Option[Msg] = {
+ var pos = 0
+
+ def test(msg: Msg): Boolean =
+ p(msg) && (pos == n || { pos += 1; false })
+
+ var curr = first
+ while (curr != null)
+ if (test(curr.msg)) return Some(curr.msg) // early return
+ else curr = curr.next
+
+ None
+ }
+
+ /** Removes the n-th message that satisfies the predicate <code>p</code>.
+ */
+ def remove(n: Int)(p: (Msg, OutputChannel[Any]) => Boolean): Option[(Msg, OutputChannel[Any])] =
+ removeInternal(n)(p) map (x => (x.msg, x.session))
+
+ /** Extracts the first message that satisfies the predicate `p`
+ * or `'''null'''` if `p` fails for all of them.
+ */
+ def extractFirst(p: (Msg, OutputChannel[Any]) => Boolean): MQueueElement[Msg] =
+ removeInternal(0)(p).orNull
+
+ def extractFirst(pf: PartialFunction[Msg, Any]): MQueueElement[Msg] = {
+ if (isEmpty) // early return
+ return null
+
+ // special handling if returning the head
+ if (pf.isDefinedAt(first.msg)) {
+ val res = first
+ first = first.next
+ if (res eq last)
+ last = null
+
+ changeSize(-1)
+ res
+ }
+ else {
+ var curr = first.next // init to element #2
+ var prev = first
+
+ while (curr != null) {
+ if (pf.isDefinedAt(curr.msg)) {
+ prev.next = curr.next
+ if (curr eq last)
+ last = prev
+
+ changeSize(-1)
+ return curr // early return
+ }
+ else {
+ prev = curr
+ curr = curr.next
+ }
+ }
+ // not found
+ null
+ }
+ }
+
+ private def removeInternal(n: Int)(p: (Msg, OutputChannel[Any]) => Boolean): Option[MQueueElement[Msg]] = {
+ var pos = 0
+
+ def foundMsg(x: MQueueElement[Msg]) = {
+ changeSize(-1)
+ Some(x)
+ }
+ def test(msg: Msg, session: OutputChannel[Any]): Boolean =
+ p(msg, session) && (pos == n || { pos += 1 ; false })
+
+ if (isEmpty) // early return
+ return None
+
+ // special handling if returning the head
+ if (test(first.msg, first.session)) {
+ val res = first
+ first = first.next
+ if (res eq last)
+ last = null
+
+ foundMsg(res)
+ }
+ else {
+ var curr = first.next // init to element #2
+ var prev = first
+
+ while (curr != null) {
+ if (test(curr.msg, curr.session)) {
+ prev.next = curr.next
+ if (curr eq last)
+ last = prev
+
+ return foundMsg(curr) // early return
+ }
+ else {
+ prev = curr
+ curr = curr.next
+ }
+ }
+ // not found
+ None
+ }
+ }
+}
+
+/** Debugging trait.
+ */
+private[actors] trait MessageQueueTracer extends MQueue[Any]
+{
+ private val queueNumber = MessageQueueTracer.getQueueNumber
+
+ override def append(msg: Any, session: OutputChannel[Any]) {
+ super.append(msg, session)
+ printQueue("APPEND %s" format msg)
+ }
+ override def get(n: Int)(p: Any => Boolean): Option[Any] = {
+ val res = super.get(n)(p)
+ printQueue("GET %s" format res)
+ res
+ }
+ override def remove(n: Int)(p: (Any, OutputChannel[Any]) => Boolean): Option[(Any, OutputChannel[Any])] = {
+ val res = super.remove(n)(p)
+ printQueue("REMOVE %s" format res)
+ res
+ }
+ override def extractFirst(p: (Any, OutputChannel[Any]) => Boolean): MQueueElement[Any] = {
+ val res = super.extractFirst(p)
+ printQueue("EXTRACT_FIRST %s" format res)
+ res
+ }
+
+ private def printQueue(msg: String) = {
+ def firstMsg = if (first eq null) "null" else first.msg
+ def lastMsg = if (last eq null) "null" else last.msg
+
+ println("[%s size=%d] [%s] first = %s, last = %s".format(this, size, msg, firstMsg, lastMsg))
+ }
+ override def toString() = "%s:%d".format(label, queueNumber)
+}
+
+private[actors] object MessageQueueTracer {
+ // for tracing purposes
+ private var queueNumberAssigner = 0
+ private def getQueueNumber = synchronized {
+ queueNumberAssigner += 1
+ queueNumberAssigner
+ }
+}
diff --git a/src/actors/scala/actors/MessageQueue.scala b/src/actors/scala/actors/MessageQueue.scala
deleted file mode 100644
index 777735d..0000000
--- a/src/actors/scala/actors/MessageQueue.scala
+++ /dev/null
@@ -1,262 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2005-2011, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-package scala.actors
-
-/**
- * This class is used by our efficient message queue
- * implementation.
- *
- * @author Philipp Haller
- */
- at SerialVersionUID(7124278808020037465L)
- at deprecated("this class is going to be removed in a future release", "2.7.7")
-class MessageQueueElement(msg: Any, session: OutputChannel[Any], next: MessageQueueElement) extends MQueueElement[Any](msg, session, next) with Serializable {
- def this() = this(null, null, null)
- def this(msg: Any, session: OutputChannel[Any]) = this(msg, session, null)
-}
-
-private[actors] class MQueueElement[Msg >: Null](val msg: Msg, val session: OutputChannel[Any], var next: MQueueElement[Msg]) {
- def this() = this(null, null, null)
- def this(msg: Msg, session: OutputChannel[Any]) = this(msg, session, null)
-}
-
-/**
- * The class <code>MessageQueue</code> provides an efficient
- * implementation of a message queue specialized for this actor
- * library. Classes in this package are supposed to be the only
- * clients of this class.
- *
- * @author Philipp Haller
- */
- at SerialVersionUID(2168935872884095767L)
- at deprecated("this class is going to be removed in a future release", "2.7.7")
-class MessageQueue(label: String) extends MQueue[Any](label) with Serializable
-
-private[actors] class MQueue[Msg >: Null](protected val label: String) {
- protected var first: MQueueElement[Msg] = null
- protected var last: MQueueElement[Msg] = null // last eq null iff list is empty
- private var _size = 0
-
- def size = _size
- final def isEmpty = last eq null
-
- protected def changeSize(diff: Int) {
- _size += diff
- }
-
- def append(msg: Msg, session: OutputChannel[Any]) {
- changeSize(1) // size always increases by 1
- val el = new MQueueElement(msg, session)
-
- if (isEmpty) first = el
- else last.next = el
-
- last = el
- }
-
- def append(el: MQueueElement[Msg]) {
- changeSize(1) // size always increases by 1
-
- if (isEmpty) first = el
- else last.next = el
-
- last = el
- }
-
- def foreach(f: (Msg, OutputChannel[Any]) => Unit) {
- var curr = first
- while (curr != null) {
- f(curr.msg, curr.session)
- curr = curr.next
- }
- }
-
- def foreachAppend(target: MQueue[Msg]) {
- var curr = first
- while (curr != null) {
- target.append(curr)
- curr = curr.next
- }
- }
-
- def foreachDequeue(target: MQueue[Msg]) {
- var curr = first
- while (curr != null) {
- target.append(curr)
- curr = curr.next
- }
- first = null
- last = null
- _size = 0
- }
-
- def foldLeft[B](z: B)(f: (B, Msg) => B): B = {
- var acc = z
- var curr = first
- while (curr != null) {
- acc = f(acc, curr.msg)
- curr = curr.next
- }
- acc
- }
-
- /** Returns the n-th message that satisfies the predicate <code>p</code>
- * without removing it.
- */
- def get(n: Int)(p: Msg => Boolean): Option[Msg] = {
- var pos = 0
-
- def test(msg: Msg): Boolean =
- p(msg) && (pos == n || { pos += 1; false })
-
- var curr = first
- while (curr != null)
- if (test(curr.msg)) return Some(curr.msg) // early return
- else curr = curr.next
-
- None
- }
-
- /** Removes the n-th message that satisfies the predicate <code>p</code>.
- */
- def remove(n: Int)(p: (Msg, OutputChannel[Any]) => Boolean): Option[(Msg, OutputChannel[Any])] =
- removeInternal(n)(p) map (x => (x.msg, x.session))
-
- /** Extracts the first message that satisfies the predicate <code>p</code>
- * or <code>null</code> if <code>p</code> fails for all of them.
- */
- def extractFirst(p: (Msg, OutputChannel[Any]) => Boolean): MQueueElement[Msg] =
- removeInternal(0)(p) orNull
-
- def extractFirst(pf: PartialFunction[Msg, Any]): MQueueElement[Msg] = {
- if (isEmpty) // early return
- return null
-
- // special handling if returning the head
- if (pf.isDefinedAt(first.msg)) {
- val res = first
- first = first.next
- if (res eq last)
- last = null
-
- changeSize(-1)
- res
- }
- else {
- var curr = first.next // init to element #2
- var prev = first
-
- while (curr != null) {
- if (pf.isDefinedAt(curr.msg)) {
- prev.next = curr.next
- if (curr eq last)
- last = prev
-
- changeSize(-1)
- return curr // early return
- }
- else {
- prev = curr
- curr = curr.next
- }
- }
- // not found
- null
- }
- }
-
- private def removeInternal(n: Int)(p: (Msg, OutputChannel[Any]) => Boolean): Option[MQueueElement[Msg]] = {
- var pos = 0
-
- def foundMsg(x: MQueueElement[Msg]) = {
- changeSize(-1)
- Some(x)
- }
- def test(msg: Msg, session: OutputChannel[Any]): Boolean =
- p(msg, session) && (pos == n || { pos += 1 ; false })
-
- if (isEmpty) // early return
- return None
-
- // special handling if returning the head
- if (test(first.msg, first.session)) {
- val res = first
- first = first.next
- if (res eq last)
- last = null
-
- foundMsg(res)
- }
- else {
- var curr = first.next // init to element #2
- var prev = first
-
- while (curr != null) {
- if (test(curr.msg, curr.session)) {
- prev.next = curr.next
- if (curr eq last)
- last = prev
-
- return foundMsg(curr) // early return
- }
- else {
- prev = curr
- curr = curr.next
- }
- }
- // not found
- None
- }
- }
-}
-
-/** Debugging trait.
- */
-private[actors] trait MessageQueueTracer extends MQueue[Any]
-{
- private val queueNumber = MessageQueueTracer.getQueueNumber
-
- override def append(msg: Any, session: OutputChannel[Any]) {
- super.append(msg, session)
- printQueue("APPEND %s" format msg)
- }
- override def get(n: Int)(p: Any => Boolean): Option[Any] = {
- val res = super.get(n)(p)
- printQueue("GET %s" format res)
- res
- }
- override def remove(n: Int)(p: (Any, OutputChannel[Any]) => Boolean): Option[(Any, OutputChannel[Any])] = {
- val res = super.remove(n)(p)
- printQueue("REMOVE %s" format res)
- res
- }
- override def extractFirst(p: (Any, OutputChannel[Any]) => Boolean): MQueueElement[Any] = {
- val res = super.extractFirst(p)
- printQueue("EXTRACT_FIRST %s" format res)
- res
- }
-
- private def printQueue(msg: String) = {
- def firstMsg = if (first eq null) "null" else first.msg
- def lastMsg = if (last eq null) "null" else last.msg
-
- println("[%s size=%d] [%s] first = %s, last = %s".format(this, size, msg, firstMsg, lastMsg))
- }
- override def toString() = "%s:%d".format(label, queueNumber)
-}
-
-private[actors] object MessageQueueTracer {
- // for tracing purposes
- private var queueNumberAssigner = 0
- private def getQueueNumber = synchronized {
- queueNumberAssigner += 1
- queueNumberAssigner
- }
-}
diff --git a/src/actors/scala/actors/OutputChannel.scala b/src/actors/scala/actors/OutputChannel.scala
index 35a95c3..fd87f81 100644
--- a/src/actors/scala/actors/OutputChannel.scala
+++ b/src/actors/scala/actors/OutputChannel.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2005-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2005-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-
package scala.actors
/**
@@ -19,14 +18,14 @@ package scala.actors
trait OutputChannel[-Msg] {
/**
- * Sends <code>msg</code> to this $actor (asynchronous).
+ * Sends `msg` to this $actor (asynchronous).
*
* @param msg the message to send
*/
def !(msg: Msg): Unit
/**
- * Sends <code>msg</code> to this $actor (asynchronous) supplying
+ * Sends `msg` to this $actor (asynchronous) supplying
* explicit reply destination.
*
* @param msg the message to send
@@ -35,14 +34,14 @@ trait OutputChannel[-Msg] {
def send(msg: Msg, replyTo: OutputChannel[Any]): Unit
/**
- * Forwards <code>msg</code> to this $actor (asynchronous).
+ * Forwards `msg` to this $actor (asynchronous).
*
* @param msg the message to forward
*/
def forward(msg: Msg): Unit
/**
- * Returns the <code>Actor</code> that is receiving from this $actor.
+ * Returns the `Actor` that is receiving from this $actor.
*/
- def receiver: Actor
+ def receiver: InternalActor
}
diff --git a/src/actors/scala/actors/ReactChannel.scala b/src/actors/scala/actors/ReactChannel.scala
index 9b6c1c5..7e34681 100644
--- a/src/actors/scala/actors/ReactChannel.scala
+++ b/src/actors/scala/actors/ReactChannel.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2005-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2005-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -12,7 +12,7 @@ package scala.actors
/**
* @author Philipp Haller
*/
-private[actors] class ReactChannel[Msg](receiver: ReplyReactor) extends InputChannel[Msg] {
+private[actors] class ReactChannel[Msg](receiver: InternalReplyReactor) extends InputChannel[Msg] {
private case class SendToReactor(channel: ReactChannel[Msg], msg: Msg)
@@ -26,8 +26,8 @@ private[actors] class ReactChannel[Msg](receiver: ReplyReactor) extends InputCha
}
/**
- * Sends a message to this <code>ReactChannel</code>
- * (asynchronous) supplying explicit reply destination.
+ * Sends a message to this `ReactChannel` (asynchronous) supplying
+ * explicit reply destination.
*
* @param msg the message to send
* @param replyTo the reply destination
@@ -37,17 +37,17 @@ private[actors] class ReactChannel[Msg](receiver: ReplyReactor) extends InputCha
}
/**
- * Forwards <code>msg</code> to <code>this</code> keeping the
- * last sender as sender instead of <code>self</code>.
+ * Forwards `msg` to `'''this'''` keeping the last sender as sender
+ * instead of `self`.
*/
def forward(msg: Msg) {
receiver forward SendToReactor(this, msg)
}
/**
- * Receives a message from this <code>ReactChannel</code>.
- * <p>
- * This method never returns. Therefore, the rest of the computation
+ * Receives a message from this `ReactChannel`.
+ *
+ * This method ''never'' returns. Therefore, the rest of the computation
* has to be contained in the actions of the partial function.
*
* @param f a partial function with message patterns and actions
@@ -61,10 +61,9 @@ private[actors] class ReactChannel[Msg](receiver: ReplyReactor) extends InputCha
}
/**
- * Receives a message from this <code>ReactChannel</code> within
- * a certain time span.
- * <p>
- * This method never returns. Therefore, the rest of the computation
+ * Receives a message from this `ReactChannel` within a certain time span.
+ *
+ * This method ''never'' returns. Therefore, the rest of the computation
* has to be contained in the actions of the partial function.
*
* @param msec the time span before timeout
@@ -81,7 +80,7 @@ private[actors] class ReactChannel[Msg](receiver: ReplyReactor) extends InputCha
}
/**
- * Receives a message from this <code>ReactChannel</code>.
+ * Receives a message from this `ReactChannel`.
*
* @param f a partial function with message patterns and actions
* @return result of processing the received value
@@ -96,8 +95,7 @@ private[actors] class ReactChannel[Msg](receiver: ReplyReactor) extends InputCha
}
/**
- * Receives a message from this <code>ReactChannel</code> within a certain
- * time span.
+ * Receives a message from this `ReactChannel` within a certain time span.
*
* @param msec the time span before timeout
* @param f a partial function with message patterns and actions
@@ -114,7 +112,7 @@ private[actors] class ReactChannel[Msg](receiver: ReplyReactor) extends InputCha
}
/**
- * Receives the next message from this <code>ReactChannel</code>.
+ * Receives the next message from this `ReactChannel`.
*/
def ? : Msg = receive {
case x => x
diff --git a/src/actors/scala/actors/Reaction.scala b/src/actors/scala/actors/Reaction.scala
deleted file mode 100644
index e94d420..0000000
--- a/src/actors/scala/actors/Reaction.scala
+++ /dev/null
@@ -1,33 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2005-2011, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-
-package scala.actors
-
-import scala.util.control.ControlThrowable
-import java.lang.{InterruptedException, Runnable}
-
-private[actors] class KillActorControl extends ControlThrowable
-
-/** <p>
- * The abstract class <code>Reaction</code> associates
- * an instance of an <code>Actor</code> with a
- * <a class="java/lang/Runnable" href="" target="contentFrame">
- * <code>java.lang.Runnable</code></a>.
- * </p>
- *
- * @author Philipp Haller
- */
- at deprecated("This class will be removed in a future release", "2.7.7")
-class Reaction(a: Actor, f: PartialFunction[Any, Any], msg: Any)
-extends ActorTask(a, if (f == null) (() => a.act()) else null, f, msg) {
-
- def this(a: Actor) = this(a, null, null)
-
-}
diff --git a/src/actors/scala/actors/Reactor.scala b/src/actors/scala/actors/Reactor.scala
index 507c18d..f025f6b 100644
--- a/src/actors/scala/actors/Reactor.scala
+++ b/src/actors/scala/actors/Reactor.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2005-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2005-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -12,6 +12,7 @@ package scala.actors
import scala.actors.scheduler.{DelegatingScheduler, ExecutorScheduler,
ForkJoinScheduler, ThreadPoolConfig}
import java.util.concurrent.{ThreadPoolExecutor, TimeUnit, LinkedBlockingQueue}
+import scala.language.implicitConversions
private[actors] object Reactor {
@@ -38,11 +39,10 @@ private[actors] object Reactor {
}
}
- val waitingForNone = new PartialFunction[Any, Unit] {
+ val waitingForNone: PartialFunction[Any, Unit] = new PartialFunction[Any, Unit] {
def isDefinedAt(x: Any) = false
def apply(x: Any) {}
}
-
}
/**
@@ -215,11 +215,16 @@ trait Reactor[Msg >: Null] extends OutputChannel[Msg] with Combinators {
scheduler executeFromActor makeReaction(null, handler, msg)
}
+ private[actors] def preAct() = {}
+
// guarded by this
private[actors] def dostart() {
_state = Actor.State.Runnable
scheduler newActor this
- scheduler execute makeReaction(() => act(), null, null)
+ scheduler execute makeReaction(() => {
+ preAct()
+ act()
+ }, null, null)
}
/**
@@ -254,7 +259,7 @@ trait Reactor[Msg >: Null] extends OutputChannel[Msg] with Combinators {
_state
}
- implicit def mkBody[A](body: => A) = new Actor.Body[A] {
+ implicit def mkBody[A](body: => A) = new InternalActor.Body[A] {
def andThen[B](other: => B): Unit = Reactor.this.seq(body, other)
}
@@ -286,12 +291,15 @@ trait Reactor[Msg >: Null] extends OutputChannel[Msg] with Combinators {
throw Actor.suspendException
}
+ private[actors] def internalPostStop() = {}
+
private[actors] def terminated() {
synchronized {
_state = Actor.State.Terminated
// reset waitingFor, otherwise getState returns Suspended
waitingFor = Reactor.waitingForNone
}
+ internalPostStop()
scheduler.terminated(this)
}
diff --git a/src/actors/scala/actors/ReactorCanReply.scala b/src/actors/scala/actors/ReactorCanReply.scala
index 68f9999..e30efcb 100644
--- a/src/actors/scala/actors/ReactorCanReply.scala
+++ b/src/actors/scala/actors/ReactorCanReply.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2005-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2005-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -16,7 +16,7 @@ package scala.actors
* @author Philipp Haller
*/
private[actors] trait ReactorCanReply extends CanReply[Any, Any] {
- _: ReplyReactor =>
+ _: InternalReplyReactor =>
type Future[+P] = scala.actors.Future[P]
diff --git a/src/actors/scala/actors/ReactorTask.scala b/src/actors/scala/actors/ReactorTask.scala
index 98099c4..1ca061b 100644
--- a/src/actors/scala/actors/ReactorTask.scala
+++ b/src/actors/scala/actors/ReactorTask.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2005-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2005-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/actors/scala/actors/ReplyReactor.scala b/src/actors/scala/actors/ReplyReactor.scala
index 85ef0d3..a2051d4 100644
--- a/src/actors/scala/actors/ReplyReactor.scala
+++ b/src/actors/scala/actors/ReplyReactor.scala
@@ -1,169 +1,13 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2005-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2005-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-
-
package scala.actors
-import java.util.{Timer, TimerTask}
-
-/**
- * Extends the [[scala.actors.Reactor]]
- * trait with methods to reply to the sender of a message.
- * Sending a message to a <code>ReplyReactor</code> implicitly
- * passes a reference to the sender together with the message.
- *
- * @author Philipp Haller
- *
- * @define actor `ReplyReactor`
- */
-trait ReplyReactor extends Reactor[Any] with ReactorCanReply {
-
- /* A list of the current senders. The head of the list is
- * the sender of the message that was received last.
- */
- @volatile
- private[actors] var senders: List[OutputChannel[Any]] = List()
-
- /* This option holds a TimerTask when the actor waits in a
- * reactWithin. The TimerTask is cancelled when the actor
- * resumes.
- *
- * guarded by this
- */
- private[actors] var onTimeout: Option[TimerTask] = None
-
- /**
- * Returns the $actor which sent the last received message.
- */
- protected[actors] def sender: OutputChannel[Any] = senders.head
-
- /**
- * Replies with <code>msg</code> to the sender.
- */
- protected[actors] def reply(msg: Any) {
- sender ! msg
- }
-
- override def !(msg: Any) {
- send(msg, Actor.rawSelf(scheduler))
- }
-
- override def forward(msg: Any) {
- send(msg, Actor.sender)
- }
-
- private[actors] override def resumeReceiver(item: (Any, OutputChannel[Any]), handler: PartialFunction[Any, Any], onSameThread: Boolean) {
- synchronized {
- if (!onTimeout.isEmpty) {
- onTimeout.get.cancel()
- onTimeout = None
- }
- }
- senders = List(item._2)
- super.resumeReceiver(item, handler, onSameThread)
- }
-
- private[actors] override def searchMailbox(startMbox: MQueue[Any],
- handler: PartialFunction[Any, Any],
- resumeOnSameThread: Boolean) {
- var tmpMbox = startMbox
- var done = false
- while (!done) {
- val qel = tmpMbox.extractFirst((msg: Any, replyTo: OutputChannel[Any]) => {
- senders = List(replyTo)
- handler.isDefinedAt(msg)
- })
- if (tmpMbox ne mailbox)
- tmpMbox.foreach((m, s) => mailbox.append(m, s))
- if (null eq qel) {
- synchronized {
- // in mean time new stuff might have arrived
- if (!sendBuffer.isEmpty) {
- tmpMbox = new MQueue[Any]("Temp")
- drainSendBuffer(tmpMbox)
- // keep going
- } else {
- waitingFor = handler
- // see Reactor.searchMailbox
- throw Actor.suspendException
- }
- }
- } else {
- resumeReceiver((qel.msg, qel.session), handler, resumeOnSameThread)
- done = true
- }
- }
- }
-
- private[actors] override def makeReaction(fun: () => Unit, handler: PartialFunction[Any, Any], msg: Any): Runnable =
- new ReplyReactorTask(this, fun, handler, msg)
-
- protected[actors] override def react(handler: PartialFunction[Any, Unit]): Nothing = {
- assert(Actor.rawSelf(scheduler) == this, "react on channel belonging to other actor")
- super.react(handler)
- }
-
- /**
- * Receives a message from this $actor's mailbox within a certain
- * time span.
- *
- * This method never returns. Therefore, the rest of the computation
- * has to be contained in the actions of the partial function.
- *
- * @param msec the time span before timeout
- * @param handler a partial function with message patterns and actions
- */
- protected[actors] def reactWithin(msec: Long)(handler: PartialFunction[Any, Unit]): Nothing = {
- assert(Actor.rawSelf(scheduler) == this, "react on channel belonging to other actor")
-
- synchronized { drainSendBuffer(mailbox) }
-
- // first, remove spurious TIMEOUT message from mailbox if any
- mailbox.extractFirst((m: Any, replyTo: OutputChannel[Any]) => m == TIMEOUT)
-
- while (true) {
- val qel = mailbox.extractFirst((m: Any, replyTo: OutputChannel[Any]) => {
- senders = List(replyTo)
- handler isDefinedAt m
- })
- if (null eq qel) {
- synchronized {
- // in mean time new messages might have arrived
- if (!sendBuffer.isEmpty) {
- drainSendBuffer(mailbox)
- // keep going
- } else if (msec == 0L) {
- // throws Actor.suspendException
- resumeReceiver((TIMEOUT, this), handler, false)
- } else {
- waitingFor = handler
- val thisActor = this
- onTimeout = Some(new TimerTask {
- def run() { thisActor.send(TIMEOUT, thisActor) }
- })
- Actor.timer.schedule(onTimeout.get, msec)
- throw Actor.suspendException
- }
- }
- } else
- resumeReceiver((qel.msg, qel.session), handler, false)
- }
- throw Actor.suspendException
- }
-
- override def getState: Actor.State.Value = synchronized {
- if (waitingFor ne Reactor.waitingForNone) {
- if (onTimeout.isEmpty)
- Actor.State.Suspended
- else
- Actor.State.TimedSuspended
- } else
- _state
- }
-
+ at deprecated("Scala Actors are being removed from the standard library. Please refer to the migration guide.", "2.10")
+trait ReplyReactor extends InternalReplyReactor {
+ protected[actors] def sender: OutputChannel[Any] = super.internalSender
}
diff --git a/src/actors/scala/actors/ReplyReactorTask.scala b/src/actors/scala/actors/ReplyReactorTask.scala
index 1db722f..ea9070f 100644
--- a/src/actors/scala/actors/ReplyReactorTask.scala
+++ b/src/actors/scala/actors/ReplyReactorTask.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2005-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2005-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -12,18 +12,25 @@ package scala.actors
/**
* @author Philipp Haller
+ * @note This class inherits a public var called 'reactor' from ReactorTask,
+ * and also defines a constructor parameter which shadows it (which makes any
+ * changes to the underlying var invisible.) I can't figure out what's supposed
+ * to happen, so I renamed the constructor parameter to at least be less confusing.
*/
-private[actors] class ReplyReactorTask(reactor: ReplyReactor,
+private[actors] class ReplyReactorTask(replyReactor: InternalReplyReactor,
fun: () => Unit,
handler: PartialFunction[Any, Any],
msg: Any)
- extends ReactorTask(reactor, fun, handler, msg) {
+ extends ReactorTask(replyReactor, fun, handler, msg) {
- var saved: ReplyReactor = _
+ var saved: InternalReplyReactor = _
protected override def beginExecution() {
saved = Actor.tl.get
- Actor.tl set reactor
+ // !!! If this is supposed to be setting the current contents of the
+ // inherited mutable var rather than always the value given in the constructor,
+ // then it should be changed to "set reactor".
+ Actor.tl set replyReactor
}
protected override def suspendExecution() {
diff --git a/src/actors/scala/actors/Scheduler.scala b/src/actors/scala/actors/Scheduler.scala
index 7fe492c..dd6c110 100644
--- a/src/actors/scala/actors/Scheduler.scala
+++ b/src/actors/scala/actors/Scheduler.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2005-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2005-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -37,25 +37,4 @@ object Scheduler extends DelegatingScheduler {
Debug.info(this+": starting new "+sched+" ["+sched.getClass+"]")
sched
}
-
- /* Only <code>ForkJoinScheduler</code> implements this method.
- */
- @deprecated("snapshot will be removed", "2.8.0")
- def snapshot() {
- if (sched.isInstanceOf[ForkJoinScheduler]) {
- sched.asInstanceOf[ForkJoinScheduler].snapshot()
- } else
- sys.error("scheduler does not implement snapshot")
- }
-
- /* Only <code>ForkJoinScheduler</code> implements this method.
- */
- @deprecated("restart will be removed", "2.8.0")
- def restart() {
- if (sched.isInstanceOf[ForkJoinScheduler]) {
- sched.asInstanceOf[ForkJoinScheduler].restart()
- } else
- sys.error("scheduler does not implement restart")
- }
-
}
diff --git a/src/actors/scala/actors/SchedulerAdapter.scala b/src/actors/scala/actors/SchedulerAdapter.scala
index 110f358..fb28b3f 100644
--- a/src/actors/scala/actors/SchedulerAdapter.scala
+++ b/src/actors/scala/actors/SchedulerAdapter.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2005-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2005-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/actors/scala/actors/UncaughtException.scala b/src/actors/scala/actors/UncaughtException.scala
index 3e6efe3..f225987 100644
--- a/src/actors/scala/actors/UncaughtException.scala
+++ b/src/actors/scala/actors/UncaughtException.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2005-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2005-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -20,7 +20,7 @@ package scala.actors
* @author Philipp Haller
* @author Erik Engbrecht
*/
-case class UncaughtException(actor: Actor,
+case class UncaughtException(actor: InternalActor,
message: Option[Any],
sender: Option[OutputChannel[Any]],
thread: Thread,
diff --git a/src/actors/scala/actors/package.scala b/src/actors/scala/actors/package.scala
index 1fd9fd7..d176487 100644
--- a/src/actors/scala/actors/package.scala
+++ b/src/actors/scala/actors/package.scala
@@ -7,7 +7,7 @@ package scala
* == Guide ==
*
* A detailed guide for the actors library is available
- * [[http://www.scala-lang.org/docu/files/actors-api/actors_api_guide.html#]].
+ * [[http://docs.scala-lang.org/overviews/core/actors.html]].
*
* == Getting Started ==
*
@@ -19,29 +19,4 @@ package object actors {
// type of Reactors tracked by termination detector
private[actors] type TrackedReactor = Reactor[A] forSome { type A >: Null }
-
- @deprecated("use scheduler.ForkJoinScheduler instead", "2.8.0")
- type FJTaskScheduler2 = scala.actors.scheduler.ForkJoinScheduler
-
- @deprecated("use scheduler.ForkJoinScheduler instead", "2.8.0")
- type TickedScheduler = scala.actors.scheduler.ForkJoinScheduler
-
- @deprecated("use scheduler.ForkJoinScheduler instead", "2.8.0")
- type WorkerThreadScheduler = scala.actors.scheduler.ForkJoinScheduler
-
- @deprecated("this class is going to be removed in a future release", "2.8.0")
- type WorkerThread = java.lang.Thread
-
- @deprecated("use scheduler.SingleThreadedScheduler instead", "2.8.0")
- type SingleThreadedScheduler = scala.actors.scheduler.SingleThreadedScheduler
-
- // This used to do a blind cast and throw a CCE after the package
- // object was loaded. I have replaced with a variation that should work
- // in whatever cases that was working but fail less exceptionally for
- // those not intentionally using it.
- @deprecated("this value is going to be removed in a future release", "2.8.0")
- val ActorGC = scala.actors.Scheduler.impl match {
- case x: scala.actors.scheduler.ActorGC => x
- case _ => null
- }
}
diff --git a/src/actors/scala/actors/remote/FreshNameCreator.scala b/src/actors/scala/actors/remote/FreshNameCreator.scala
index ba006ec..f7cf293 100644
--- a/src/actors/scala/actors/remote/FreshNameCreator.scala
+++ b/src/actors/scala/actors/remote/FreshNameCreator.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2005-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2005-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/actors/scala/actors/remote/JavaSerializer.scala b/src/actors/scala/actors/remote/JavaSerializer.scala
index 52ed9af..6e9f4a7 100644
--- a/src/actors/scala/actors/remote/JavaSerializer.scala
+++ b/src/actors/scala/actors/remote/JavaSerializer.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2005-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2005-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/actors/scala/actors/remote/NetKernel.scala b/src/actors/scala/actors/remote/NetKernel.scala
index c335f5c..4795ff3 100644
--- a/src/actors/scala/actors/remote/NetKernel.scala
+++ b/src/actors/scala/actors/remote/NetKernel.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2005-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2005-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -10,7 +10,7 @@
package scala.actors
package remote
-import scala.collection.mutable.{HashMap, HashSet}
+import scala.collection.mutable
case class NamedSend(senderLoc: Locator, receiverLoc: Locator, data: Array[Byte], session: Symbol)
@@ -39,8 +39,8 @@ private[remote] class NetKernel(service: Service) {
sendToNode(receiverLoc.node, NamedSend(senderLoc, receiverLoc, bytes, session))
}
- private val actors = new HashMap[Symbol, OutputChannel[Any]]
- private val names = new HashMap[OutputChannel[Any], Symbol]
+ private val actors = new mutable.HashMap[Symbol, OutputChannel[Any]]
+ private val names = new mutable.HashMap[OutputChannel[Any], Symbol]
def register(name: Symbol, a: OutputChannel[Any]): Unit = synchronized {
actors += Pair(name, a)
@@ -60,7 +60,7 @@ private[remote] class NetKernel(service: Service) {
send(node, name, msg, 'nosession)
def send(node: Node, name: Symbol, msg: AnyRef, session: Symbol) {
- val senderLoc = Locator(service.node, getOrCreateName(Actor.self))
+ val senderLoc = Locator(service.node, getOrCreateName(Actor.self(Scheduler)))
val receiverLoc = Locator(node, name)
namedSend(senderLoc, receiverLoc, msg, session)
}
@@ -83,7 +83,7 @@ private[remote] class NetKernel(service: Service) {
p
}
- val proxies = new HashMap[(Node, Symbol), Proxy]
+ val proxies = new mutable.HashMap[(Node, Symbol), Proxy]
def getOrCreateProxy(senderNode: Node, senderName: Symbol): Proxy =
proxies.synchronized {
diff --git a/src/actors/scala/actors/remote/Proxy.scala b/src/actors/scala/actors/remote/Proxy.scala
index bf066ae..73af1ed 100644
--- a/src/actors/scala/actors/remote/Proxy.scala
+++ b/src/actors/scala/actors/remote/Proxy.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2005-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2005-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -10,7 +10,7 @@
package scala.actors
package remote
-import scala.collection.mutable.HashMap
+import scala.collection.mutable
/**
* @author Philipp Haller
@@ -83,7 +83,11 @@ private[remote] class Proxy(node: Node, name: Symbol, @transient var kernel: Net
name+"@"+node
}
-class LinkToFun extends Function2[AbstractActor, Proxy, Unit] with Serializable {
+// Proxy is private[remote], but these classes are public and use it in a public
+// method signature. That makes the only method they have non-overriddable.
+// So I made them final, which seems appropriate anyway.
+
+final class LinkToFun extends Function2[AbstractActor, Proxy, Unit] with Serializable {
def apply(target: AbstractActor, creator: Proxy) {
target.linkTo(creator)
}
@@ -91,7 +95,7 @@ class LinkToFun extends Function2[AbstractActor, Proxy, Unit] with Serializable
"<LinkToFun>"
}
-class UnlinkFromFun extends Function2[AbstractActor, Proxy, Unit] with Serializable {
+final class UnlinkFromFun extends Function2[AbstractActor, Proxy, Unit] with Serializable {
def apply(target: AbstractActor, creator: Proxy) {
target.unlinkFrom(creator)
}
@@ -99,7 +103,7 @@ class UnlinkFromFun extends Function2[AbstractActor, Proxy, Unit] with Serializa
"<UnlinkFromFun>"
}
-class ExitFun(reason: AnyRef) extends Function2[AbstractActor, Proxy, Unit] with Serializable {
+final class ExitFun(reason: AnyRef) extends Function2[AbstractActor, Proxy, Unit] with Serializable {
def apply(target: AbstractActor, creator: Proxy) {
target.exit(creator, reason)
}
@@ -113,8 +117,8 @@ private[remote] case class Apply0(rfun: Function2[AbstractActor, Proxy, Unit])
* @author Philipp Haller
*/
private[remote] class DelegateActor(creator: Proxy, node: Node, name: Symbol, kernel: NetKernel) extends Actor {
- var channelMap = new HashMap[Symbol, OutputChannel[Any]]
- var sessionMap = new HashMap[OutputChannel[Any], Symbol]
+ var channelMap = new mutable.HashMap[Symbol, OutputChannel[Any]]
+ var sessionMap = new mutable.HashMap[OutputChannel[Any], Symbol]
def act() {
Actor.loop {
diff --git a/src/actors/scala/actors/remote/RemoteActor.scala b/src/actors/scala/actors/remote/RemoteActor.scala
index f6ef62b..f1644c2 100644
--- a/src/actors/scala/actors/remote/RemoteActor.scala
+++ b/src/actors/scala/actors/remote/RemoteActor.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2005-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2005-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -40,7 +40,7 @@ package remote
*/
object RemoteActor {
- private val kernels = new scala.collection.mutable.HashMap[Actor, NetKernel]
+ private val kernels = new scala.collection.mutable.HashMap[InternalActor, NetKernel]
/* If set to <code>null</code> (default), the default class loader
* of <code>java.io.ObjectInputStream</code> is used for deserializing
@@ -62,7 +62,7 @@ object RemoteActor {
private def createNetKernelOnPort(port: Int): NetKernel = {
val serv = TcpService(port, cl)
val kern = serv.kernel
- val s = Actor.self
+ val s = Actor.self(Scheduler)
kernels += Pair(s, kern)
s.onTerminate {
@@ -81,19 +81,15 @@ object RemoteActor {
kern
}
- @deprecated("this member is going to be removed in a future release", "2.8.0")
- def createKernelOnPort(port: Int): NetKernel =
- createNetKernelOnPort(port)
-
/**
* Registers <code>a</code> under <code>name</code> on this
* node.
*/
def register(name: Symbol, a: Actor): Unit = synchronized {
- val kernel = kernels.get(Actor.self) match {
+ val kernel = kernels.get(Actor.self(Scheduler)) match {
case None =>
val serv = TcpService(TcpService.generatePort, cl)
- kernels += Pair(Actor.self, serv.kernel)
+ kernels += Pair(Actor.self(Scheduler), serv.kernel)
serv.kernel
case Some(k) =>
k
@@ -101,7 +97,7 @@ object RemoteActor {
kernel.register(name, a)
}
- private def selfKernel = kernels.get(Actor.self) match {
+ private def selfKernel = kernels.get(Actor.self(Scheduler)) match {
case None =>
// establish remotely accessible
// return path (sender)
@@ -120,10 +116,6 @@ object RemoteActor {
private[remote] def someNetKernel: NetKernel =
kernels.valuesIterator.next
-
- @deprecated("this member is going to be removed in a future release", "2.8.0")
- def someKernel: NetKernel =
- someNetKernel
}
diff --git a/src/actors/scala/actors/remote/Serializer.scala b/src/actors/scala/actors/remote/Serializer.scala
index 9478b14..e39b01f 100644
--- a/src/actors/scala/actors/remote/Serializer.scala
+++ b/src/actors/scala/actors/remote/Serializer.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2005-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2005-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/actors/scala/actors/remote/Service.scala b/src/actors/scala/actors/remote/Service.scala
index ed5b383..4584cc3 100644
--- a/src/actors/scala/actors/remote/Service.scala
+++ b/src/actors/scala/actors/remote/Service.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2005-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2005-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/actors/scala/actors/remote/TcpService.scala b/src/actors/scala/actors/remote/TcpService.scala
index d156443..028dd3a 100644
--- a/src/actors/scala/actors/remote/TcpService.scala
+++ b/src/actors/scala/actors/remote/TcpService.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2005-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2005-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -14,9 +14,9 @@ package remote
import java.io.{DataInputStream, DataOutputStream, IOException}
import java.lang.{Thread, SecurityException}
-import java.net.{InetAddress, ServerSocket, Socket, UnknownHostException}
+import java.net.{InetAddress, InetSocketAddress, ServerSocket, Socket, SocketTimeoutException, UnknownHostException}
-import scala.collection.mutable.HashMap
+import scala.collection.mutable
import scala.util.Random
/* Object TcpService.
@@ -26,7 +26,7 @@ import scala.util.Random
*/
object TcpService {
private val random = new Random
- private val ports = new HashMap[Int, TcpService]
+ private val ports = new mutable.HashMap[Int, TcpService]
def apply(port: Int, cl: ClassLoader): TcpService =
ports.get(port) match {
@@ -59,6 +59,23 @@ object TcpService {
portnum
}
+ private val connectTimeoutMillis = {
+ val propName = "scala.actors.tcpSocket.connectTimeoutMillis"
+ val defaultTimeoutMillis = 0
+ sys.props get propName flatMap {
+ timeout =>
+ try {
+ val to = timeout.toInt
+ Debug.info("Using socket timeout $to")
+ Some(to)
+ } catch {
+ case e: NumberFormatException =>
+ Debug.warning(s"""Could not parse $propName = "$timeout" as an Int""")
+ None
+ }
+ } getOrElse defaultTimeoutMillis
+ }
+
var BufSize: Int = 65536
}
@@ -73,11 +90,11 @@ class TcpService(port: Int, cl: ClassLoader) extends Thread with Service {
private val internalNode = new Node(InetAddress.getLocalHost().getHostAddress(), port)
def node: Node = internalNode
- private val pendingSends = new HashMap[Node, List[Array[Byte]]]
+ private val pendingSends = new mutable.HashMap[Node, List[Array[Byte]]]
/**
* Sends a byte array to another node on the network.
- * If the node is not yet up, up to <code>TcpService.BufSize</code>
+ * If the node is not yet up, up to `TcpService.BufSize`
* messages are buffered.
*/
def send(node: Node, data: Array[Byte]): Unit = synchronized {
@@ -161,7 +178,7 @@ class TcpService(port: Int, cl: ClassLoader) extends Thread with Service {
// connection management
private val connections =
- new scala.collection.mutable.HashMap[Node, TcpServiceWorker]
+ new mutable.HashMap[Node, TcpServiceWorker]
private[actors] def addConnection(node: Node, worker: TcpServiceWorker) = synchronized {
connections += Pair(node, worker)
@@ -176,7 +193,15 @@ class TcpService(port: Int, cl: ClassLoader) extends Thread with Service {
}
def connect(n: Node): TcpServiceWorker = synchronized {
- val socket = new Socket(n.address, n.port)
+ val socket = new Socket()
+ val start = System.nanoTime
+ try {
+ socket.connect(new InetSocketAddress(n.address, n.port), TcpService.connectTimeoutMillis)
+ } catch {
+ case e: SocketTimeoutException =>
+ Debug.warning(f"Timed out connecting to $n after ${(System.nanoTime - start) / math.pow(10, 9)}%.3f seconds")
+ throw e
+ }
val worker = new TcpServiceWorker(this, socket)
worker.sendNode(n)
worker.start()
diff --git a/src/actors/scala/actors/scheduler/ActorGC.scala b/src/actors/scala/actors/scheduler/ActorGC.scala
index 473fba1..6d9a945 100644
--- a/src/actors/scala/actors/scheduler/ActorGC.scala
+++ b/src/actors/scala/actors/scheduler/ActorGC.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2005-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2005-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -11,8 +11,7 @@ package scala.actors
package scheduler
import java.lang.ref.{Reference, WeakReference, ReferenceQueue}
-
-import scala.collection.mutable.HashSet
+import scala.collection.mutable
/**
* ActorGC keeps track of the number of live actors being managed by a
@@ -20,9 +19,9 @@ import scala.collection.mutable.HashSet
* either been explicitly terminated or garbage collected.
*
* When an actor is started, it is registered with the ActorGC via the
- * <code>newActor</code> method, and when an actor is knowingly terminated
+ * `newActor` method, and when an actor is knowingly terminated
* (e.g. act method finishes, exit explicitly called, an exception is thrown),
- * the ActorGC is informed via the <code>terminated</code> method.
+ * the ActorGC is informed via the `terminated` method.
*/
trait ActorGC extends TerminationMonitor {
self: IScheduler =>
@@ -32,10 +31,10 @@ trait ActorGC extends TerminationMonitor {
/**
* This is a set of references to all the actors registered with
- * this ActorGC. It is maintained so that the WeakReferences will not be GC'd
- * before the actors to which they point.
+ * this ActorGC. It is maintained so that the WeakReferences will
+ * not be GC'd before the actors to which they point.
*/
- private val refSet = new HashSet[Reference[t] forSome { type t <: TrackedReactor }]
+ private val refSet = new mutable.HashSet[Reference[t] forSome { type t <: TrackedReactor }]
/** newActor is invoked whenever a new actor is started. */
override def newActor(a: TrackedReactor) = synchronized {
diff --git a/src/actors/scala/actors/scheduler/DaemonScheduler.scala b/src/actors/scala/actors/scheduler/DaemonScheduler.scala
index da472ba..a2d6941 100644
--- a/src/actors/scala/actors/scheduler/DaemonScheduler.scala
+++ b/src/actors/scala/actors/scheduler/DaemonScheduler.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2005-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2005-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/actors/scala/actors/scheduler/DelegatingScheduler.scala b/src/actors/scala/actors/scheduler/DelegatingScheduler.scala
index 49f8cee..b8a81d1 100644
--- a/src/actors/scala/actors/scheduler/DelegatingScheduler.scala
+++ b/src/actors/scala/actors/scheduler/DelegatingScheduler.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2005-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2005-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/actors/scala/actors/scheduler/DrainableForkJoinPool.scala b/src/actors/scala/actors/scheduler/DrainableForkJoinPool.scala
index 257fe92..15ce605 100644
--- a/src/actors/scala/actors/scheduler/DrainableForkJoinPool.scala
+++ b/src/actors/scala/actors/scheduler/DrainableForkJoinPool.scala
@@ -4,9 +4,9 @@ package scheduler
import java.util.Collection
import scala.concurrent.forkjoin.{ForkJoinPool, ForkJoinTask}
-private class DrainableForkJoinPool extends ForkJoinPool {
+private class DrainableForkJoinPool(parallelism: Int, maxPoolSize: Int) extends ForkJoinPool(parallelism, ForkJoinPool.defaultForkJoinWorkerThreadFactory, null, true) {
- override def drainTasksTo(c: Collection[ForkJoinTask[_]]): Int =
+ override def drainTasksTo(c: Collection[ _ >: ForkJoinTask[_]]): Int =
super.drainTasksTo(c)
}
diff --git a/src/actors/scala/actors/scheduler/ExecutorScheduler.scala b/src/actors/scala/actors/scheduler/ExecutorScheduler.scala
index f473a16..a1d5666 100644
--- a/src/actors/scala/actors/scheduler/ExecutorScheduler.scala
+++ b/src/actors/scala/actors/scheduler/ExecutorScheduler.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2005-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2005-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/actors/scala/actors/scheduler/ForkJoinScheduler.scala b/src/actors/scala/actors/scheduler/ForkJoinScheduler.scala
index ba0f88c..ce67ffd 100644
--- a/src/actors/scala/actors/scheduler/ForkJoinScheduler.scala
+++ b/src/actors/scala/actors/scheduler/ForkJoinScheduler.scala
@@ -38,13 +38,8 @@ class ForkJoinScheduler(val initCoreSize: Int, val maxSize: Int, daemon: Boolean
}
private def makeNewPool(): DrainableForkJoinPool = {
- val p = new DrainableForkJoinPool()
- // enable locally FIFO scheduling mode
- p.setAsyncMode(true)
- p.setParallelism(initCoreSize)
- p.setMaximumPoolSize(maxSize)
+ val p = new DrainableForkJoinPool(initCoreSize, maxSize)
Debug.info(this+": parallelism "+p.getParallelism())
- Debug.info(this+": max pool size "+p.getMaximumPoolSize())
p
}
@@ -144,7 +139,7 @@ class ForkJoinScheduler(val initCoreSize: Int, val maxSize: Int, daemon: Boolean
ForkJoinPool.managedBlock(new ForkJoinPool.ManagedBlocker {
def block = blocker.block()
def isReleasable() = blocker.isReleasable
- }, true)
+ })
}
/** Suspends the scheduler. All threads that were in use by the
diff --git a/src/actors/scala/actors/scheduler/QuitControl.scala b/src/actors/scala/actors/scheduler/QuitControl.scala
index 9560a0e..b3e288a 100644
--- a/src/actors/scala/actors/scheduler/QuitControl.scala
+++ b/src/actors/scala/actors/scheduler/QuitControl.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2005-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2005-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -11,8 +11,8 @@ package scala.actors.scheduler
import scala.util.control.ControlThrowable
/**
- * The <code>QuitControl</code> class is used to manage control flow
- * of certain schedulers.
+ * The `QuitControl` class is used to manage control flow of certain
+ * schedulers.
*
* @author Philipp Haller
*/
diff --git a/src/actors/scala/actors/scheduler/ResizableThreadPoolScheduler.scala b/src/actors/scala/actors/scheduler/ResizableThreadPoolScheduler.scala
index f6c14e3..f370d45 100644
--- a/src/actors/scala/actors/scheduler/ResizableThreadPoolScheduler.scala
+++ b/src/actors/scala/actors/scheduler/ResizableThreadPoolScheduler.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2005-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2005-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-
package scala.actors.scheduler
import scala.actors.threadpool.{ThreadPoolExecutor, TimeUnit, LinkedBlockingQueue,
@@ -15,13 +14,11 @@ import scala.actors.{Debug, IScheduler}
import scala.concurrent.ManagedBlocker
/**
- * This scheduler class uses a <code>ThreadPoolExecutor</code>
- * to execute <code>Actor</code>s.
+ * This scheduler class uses a `ThreadPoolExecutor` to execute `Actor`s.
*
* The scheduler attempts to shut down itself and the underlying
- * <code>ThreadPoolExecutor</code> only if <code>terminate</code>
- * is set to true. Otherwise, the scheduler must be shut down
- * explicitly.
+ * `ThreadPoolExecutor` only if `terminate` is set to true. Otherwise,
+ * the scheduler must be shut down explicitly.
*
* @author Philipp Haller
*/
@@ -176,7 +173,7 @@ class ResizableThreadPoolScheduler(protected val terminate: Boolean,
}
/** Resumes the execution of the scheduler if it was previously
- * suspended using <code>snapshot</code>.
+ * suspended using `snapshot`.
*/
def restart() {
synchronized {
diff --git a/src/actors/scala/actors/scheduler/SingleThreadedScheduler.scala b/src/actors/scala/actors/scheduler/SingleThreadedScheduler.scala
index 021c706..04d1d2c 100644
--- a/src/actors/scala/actors/scheduler/SingleThreadedScheduler.scala
+++ b/src/actors/scala/actors/scheduler/SingleThreadedScheduler.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2005-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2005-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -10,7 +10,7 @@
package scala.actors
package scheduler
-import scala.collection.mutable.Queue
+import scala.collection.mutable
/**
* This scheduler executes actor tasks on the current thread.
@@ -19,7 +19,7 @@ import scala.collection.mutable.Queue
*/
class SingleThreadedScheduler extends IScheduler {
- private val tasks = new Queue[Runnable]
+ private val tasks = new mutable.Queue[Runnable]
/** The maximum number of nested tasks that are run
* without unwinding the call stack.
diff --git a/src/actors/scala/actors/scheduler/TerminationMonitor.scala b/src/actors/scala/actors/scheduler/TerminationMonitor.scala
index e4b9743..9f26ca8 100644
--- a/src/actors/scala/actors/scheduler/TerminationMonitor.scala
+++ b/src/actors/scala/actors/scheduler/TerminationMonitor.scala
@@ -1,22 +1,21 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2005-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2005-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-
package scala.actors
package scheduler
-import scala.collection.mutable.HashMap
+import scala.collection.mutable
private[scheduler] trait TerminationMonitor {
_: IScheduler =>
protected var activeActors = 0
- protected val terminationHandlers = new HashMap[TrackedReactor, () => Unit]
+ protected val terminationHandlers = new mutable.HashMap[TrackedReactor, () => Unit]
private var started = false
/** newActor is invoked whenever a new actor is started. */
@@ -65,10 +64,6 @@ private[scheduler] trait TerminationMonitor {
started && activeActors <= 0
}
- /** Deprecated non-actor-private version */
- @deprecated("this method is going to be removed in a future release", "2.7.7")
- def allTerminated: Boolean = allActorsTerminated
-
/** Checks for actors that have become garbage. */
protected def gc() {}
}
diff --git a/src/actors/scala/actors/scheduler/TerminationService.scala b/src/actors/scala/actors/scheduler/TerminationService.scala
index 6e56e70..280c8f4 100644
--- a/src/actors/scala/actors/scheduler/TerminationService.scala
+++ b/src/actors/scala/actors/scheduler/TerminationService.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2005-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2005-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/actors/scala/actors/scheduler/ThreadPoolConfig.scala b/src/actors/scala/actors/scheduler/ThreadPoolConfig.scala
index a7bf8ec..bfd4e7a 100644
--- a/src/actors/scala/actors/scheduler/ThreadPoolConfig.scala
+++ b/src/actors/scala/actors/scheduler/ThreadPoolConfig.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2005-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2005-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -10,7 +10,7 @@
package scala.actors
package scheduler
-import util.Properties.{ javaVersion, javaVmVendor, isJavaAtLeast, propIsSetTo, propOrNone }
+import scala.util.Properties.{ javaVersion, javaVmVendor, isJavaAtLeast, propIsSetTo, propOrNone }
/**
* @author Erik Engbrecht
@@ -42,10 +42,7 @@ private[actors] object ThreadPoolConfig {
(propIsSetTo("actors.enableForkJoin", "true") || {
Debug.info(this+": java.version = "+javaVersion)
Debug.info(this+": java.vm.vendor = "+javaVmVendor)
-
- // on IBM J9 1.6 do not use ForkJoinPool
- // XXX this all needs to go into Properties.
- isJavaAtLeast("1.6") && ((javaVmVendor contains "Oracle") || (javaVmVendor contains "Sun") || (javaVmVendor contains "Apple"))
+ isJavaAtLeast("1.6")
})
catch {
case _: SecurityException => false
diff --git a/src/android-library/scala/ScalaObject.scala b/src/android-library/scala/ScalaObject.scala
deleted file mode 100644
index f44116d..0000000
--- a/src/android-library/scala/ScalaObject.scala
+++ /dev/null
@@ -1,13 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-
-package scala
-
-trait ScalaObject extends AnyRef
diff --git a/src/asm/scala/tools/asm/AnnotationVisitor.java b/src/asm/scala/tools/asm/AnnotationVisitor.java
new file mode 100644
index 0000000..b96e730
--- /dev/null
+++ b/src/asm/scala/tools/asm/AnnotationVisitor.java
@@ -0,0 +1,157 @@
+/***
+ * ASM: a very small and fast Java bytecode manipulation framework
+ * Copyright (c) 2000-2011 INRIA, France Telecom
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ * 3. Neither the name of the copyright holders nor the names of its
+ * contributors may be used to endorse or promote products derived from
+ * this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+ * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
+ * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+ * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+ * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
+ * THE POSSIBILITY OF SUCH DAMAGE.
+ */
+package scala.tools.asm;
+
+/**
+ * A visitor to visit a Java annotation. The methods of this class must be
+ * called in the following order: ( <tt>visit</tt> | <tt>visitEnum</tt> |
+ * <tt>visitAnnotation</tt> | <tt>visitArray</tt> )* <tt>visitEnd</tt>.
+ *
+ * @author Eric Bruneton
+ * @author Eugene Kuleshov
+ */
+public abstract class AnnotationVisitor {
+
+ /**
+ * The ASM API version implemented by this visitor. The value of this field
+ * must be one of {@link Opcodes#ASM4}.
+ */
+ protected final int api;
+
+ /**
+ * The annotation visitor to which this visitor must delegate method calls.
+ * May be null.
+ */
+ protected AnnotationVisitor av;
+
+ /**
+ * Constructs a new {@link AnnotationVisitor}.
+ *
+ * @param api the ASM API version implemented by this visitor. Must be one
+ * of {@link Opcodes#ASM4}.
+ */
+ public AnnotationVisitor(final int api) {
+ this(api, null);
+ }
+
+ /**
+ * Constructs a new {@link AnnotationVisitor}.
+ *
+ * @param api the ASM API version implemented by this visitor. Must be one
+ * of {@link Opcodes#ASM4}.
+ * @param av the annotation visitor to which this visitor must delegate
+ * method calls. May be null.
+ */
+ public AnnotationVisitor(final int api, final AnnotationVisitor av) {
+ /*if (api != Opcodes.ASM4) {
+ throw new IllegalArgumentException();
+ }*/
+ this.api = api;
+ this.av = av;
+ }
+
+ /**
+ * Visits a primitive value of the annotation.
+ *
+ * @param name the value name.
+ * @param value the actual value, whose type must be {@link Byte},
+ * {@link Boolean}, {@link Character}, {@link Short}, {@link Integer}
+ * , {@link Long}, {@link Float}, {@link Double}, {@link String} or
+ * {@link Type} or OBJECT or ARRAY sort. This value can also be an
+ * array of byte, boolean, short, char, int, long, float or double
+ * values (this is equivalent to using {@link #visitArray visitArray}
+ * and visiting each array element in turn, but is more convenient).
+ */
+ public void visit(String name, Object value) {
+ if (av != null) {
+ av.visit(name, value);
+ }
+ }
+
+ /**
+ * Visits an enumeration value of the annotation.
+ *
+ * @param name the value name.
+ * @param desc the class descriptor of the enumeration class.
+ * @param value the actual enumeration value.
+ */
+ public void visitEnum(String name, String desc, String value) {
+ if (av != null) {
+ av.visitEnum(name, desc, value);
+ }
+ }
+
+ /**
+ * Visits a nested annotation value of the annotation.
+ *
+ * @param name the value name.
+ * @param desc the class descriptor of the nested annotation class.
+ * @return a visitor to visit the actual nested annotation value, or
+ * <tt>null</tt> if this visitor is not interested in visiting
+ * this nested annotation. <i>The nested annotation value must be
+ * fully visited before calling other methods on this annotation
+ * visitor</i>.
+ */
+ public AnnotationVisitor visitAnnotation(String name, String desc) {
+ if (av != null) {
+ return av.visitAnnotation(name, desc);
+ }
+ return null;
+ }
+
+ /**
+ * Visits an array value of the annotation. Note that arrays of primitive
+ * types (such as byte, boolean, short, char, int, long, float or double)
+ * can be passed as value to {@link #visit visit}. This is what
+ * {@link ClassReader} does.
+ *
+ * @param name the value name.
+ * @return a visitor to visit the actual array value elements, or
+ * <tt>null</tt> if this visitor is not interested in visiting
+ * these values. The 'name' parameters passed to the methods of this
+ * visitor are ignored. <i>All the array values must be visited
+ * before calling other methods on this annotation visitor</i>.
+ */
+ public AnnotationVisitor visitArray(String name) {
+ if (av != null) {
+ return av.visitArray(name);
+ }
+ return null;
+ }
+
+ /**
+ * Visits the end of the annotation.
+ */
+ public void visitEnd() {
+ if (av != null) {
+ av.visitEnd();
+ }
+ }
+}
diff --git a/src/asm/scala/tools/asm/AnnotationWriter.java b/src/asm/scala/tools/asm/AnnotationWriter.java
new file mode 100644
index 0000000..e530780
--- /dev/null
+++ b/src/asm/scala/tools/asm/AnnotationWriter.java
@@ -0,0 +1,322 @@
+/***
+ * ASM: a very small and fast Java bytecode manipulation framework
+ * Copyright (c) 2000-2011 INRIA, France Telecom
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ * 3. Neither the name of the copyright holders nor the names of its
+ * contributors may be used to endorse or promote products derived from
+ * this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+ * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
+ * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+ * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+ * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
+ * THE POSSIBILITY OF SUCH DAMAGE.
+ */
+package scala.tools.asm;
+
+/**
+ * An {@link AnnotationVisitor} that generates annotations in bytecode form.
+ *
+ * @author Eric Bruneton
+ * @author Eugene Kuleshov
+ */
+final class AnnotationWriter extends AnnotationVisitor {
+
+ /**
+ * The class writer to which this annotation must be added.
+ */
+ private final ClassWriter cw;
+
+ /**
+ * The number of values in this annotation.
+ */
+ private int size;
+
+ /**
+ * <tt>true<tt> if values are named, <tt>false</tt> otherwise. Annotation
+ * writers used for annotation default and annotation arrays use unnamed
+ * values.
+ */
+ private final boolean named;
+
+ /**
+ * The annotation values in bytecode form. This byte vector only contains
+ * the values themselves, i.e. the number of values must be stored as a
+ * unsigned short just before these bytes.
+ */
+ private final ByteVector bv;
+
+ /**
+ * The byte vector to be used to store the number of values of this
+ * annotation. See {@link #bv}.
+ */
+ private final ByteVector parent;
+
+ /**
+ * Where the number of values of this annotation must be stored in
+ * {@link #parent}.
+ */
+ private final int offset;
+
+ /**
+ * Next annotation writer. This field is used to store annotation lists.
+ */
+ AnnotationWriter next;
+
+ /**
+ * Previous annotation writer. This field is used to store annotation lists.
+ */
+ AnnotationWriter prev;
+
+ // ------------------------------------------------------------------------
+ // Constructor
+ // ------------------------------------------------------------------------
+
+ /**
+ * Constructs a new {@link AnnotationWriter}.
+ *
+ * @param cw the class writer to which this annotation must be added.
+ * @param named <tt>true<tt> if values are named, <tt>false</tt> otherwise.
+ * @param bv where the annotation values must be stored.
+ * @param parent where the number of annotation values must be stored.
+ * @param offset where in <tt>parent</tt> the number of annotation values must
+ * be stored.
+ */
+ AnnotationWriter(
+ final ClassWriter cw,
+ final boolean named,
+ final ByteVector bv,
+ final ByteVector parent,
+ final int offset)
+ {
+ super(Opcodes.ASM4);
+ this.cw = cw;
+ this.named = named;
+ this.bv = bv;
+ this.parent = parent;
+ this.offset = offset;
+ }
+
+ // ------------------------------------------------------------------------
+ // Implementation of the AnnotationVisitor abstract class
+ // ------------------------------------------------------------------------
+
+ @Override
+ public void visit(final String name, final Object value) {
+ ++size;
+ if (named) {
+ bv.putShort(cw.newUTF8(name));
+ }
+ if (value instanceof String) {
+ bv.put12('s', cw.newUTF8((String) value));
+ } else if (value instanceof Byte) {
+ bv.put12('B', cw.newInteger(((Byte) value).byteValue()).index);
+ } else if (value instanceof Boolean) {
+ int v = ((Boolean) value).booleanValue() ? 1 : 0;
+ bv.put12('Z', cw.newInteger(v).index);
+ } else if (value instanceof Character) {
+ bv.put12('C', cw.newInteger(((Character) value).charValue()).index);
+ } else if (value instanceof Short) {
+ bv.put12('S', cw.newInteger(((Short) value).shortValue()).index);
+ } else if (value instanceof Type) {
+ bv.put12('c', cw.newUTF8(((Type) value).getDescriptor()));
+ } else if (value instanceof byte[]) {
+ byte[] v = (byte[]) value;
+ bv.put12('[', v.length);
+ for (int i = 0; i < v.length; i++) {
+ bv.put12('B', cw.newInteger(v[i]).index);
+ }
+ } else if (value instanceof boolean[]) {
+ boolean[] v = (boolean[]) value;
+ bv.put12('[', v.length);
+ for (int i = 0; i < v.length; i++) {
+ bv.put12('Z', cw.newInteger(v[i] ? 1 : 0).index);
+ }
+ } else if (value instanceof short[]) {
+ short[] v = (short[]) value;
+ bv.put12('[', v.length);
+ for (int i = 0; i < v.length; i++) {
+ bv.put12('S', cw.newInteger(v[i]).index);
+ }
+ } else if (value instanceof char[]) {
+ char[] v = (char[]) value;
+ bv.put12('[', v.length);
+ for (int i = 0; i < v.length; i++) {
+ bv.put12('C', cw.newInteger(v[i]).index);
+ }
+ } else if (value instanceof int[]) {
+ int[] v = (int[]) value;
+ bv.put12('[', v.length);
+ for (int i = 0; i < v.length; i++) {
+ bv.put12('I', cw.newInteger(v[i]).index);
+ }
+ } else if (value instanceof long[]) {
+ long[] v = (long[]) value;
+ bv.put12('[', v.length);
+ for (int i = 0; i < v.length; i++) {
+ bv.put12('J', cw.newLong(v[i]).index);
+ }
+ } else if (value instanceof float[]) {
+ float[] v = (float[]) value;
+ bv.put12('[', v.length);
+ for (int i = 0; i < v.length; i++) {
+ bv.put12('F', cw.newFloat(v[i]).index);
+ }
+ } else if (value instanceof double[]) {
+ double[] v = (double[]) value;
+ bv.put12('[', v.length);
+ for (int i = 0; i < v.length; i++) {
+ bv.put12('D', cw.newDouble(v[i]).index);
+ }
+ } else {
+ Item i = cw.newConstItem(value);
+ bv.put12(".s.IFJDCS".charAt(i.type), i.index);
+ }
+ }
+
+ @Override
+ public void visitEnum(
+ final String name,
+ final String desc,
+ final String value)
+ {
+ ++size;
+ if (named) {
+ bv.putShort(cw.newUTF8(name));
+ }
+ bv.put12('e', cw.newUTF8(desc)).putShort(cw.newUTF8(value));
+ }
+
+ @Override
+ public AnnotationVisitor visitAnnotation(
+ final String name,
+ final String desc)
+ {
+ ++size;
+ if (named) {
+ bv.putShort(cw.newUTF8(name));
+ }
+ // write tag and type, and reserve space for values count
+ bv.put12('@', cw.newUTF8(desc)).putShort(0);
+ return new AnnotationWriter(cw, true, bv, bv, bv.length - 2);
+ }
+
+ @Override
+ public AnnotationVisitor visitArray(final String name) {
+ ++size;
+ if (named) {
+ bv.putShort(cw.newUTF8(name));
+ }
+ // write tag, and reserve space for array size
+ bv.put12('[', 0);
+ return new AnnotationWriter(cw, false, bv, bv, bv.length - 2);
+ }
+
+ @Override
+ public void visitEnd() {
+ if (parent != null) {
+ byte[] data = parent.data;
+ data[offset] = (byte) (size >>> 8);
+ data[offset + 1] = (byte) size;
+ }
+ }
+
+ // ------------------------------------------------------------------------
+ // Utility methods
+ // ------------------------------------------------------------------------
+
+ /**
+ * Returns the size of this annotation writer list.
+ *
+ * @return the size of this annotation writer list.
+ */
+ int getSize() {
+ int size = 0;
+ AnnotationWriter aw = this;
+ while (aw != null) {
+ size += aw.bv.length;
+ aw = aw.next;
+ }
+ return size;
+ }
+
+ /**
+ * Puts the annotations of this annotation writer list into the given byte
+ * vector.
+ *
+ * @param out where the annotations must be put.
+ */
+ void put(final ByteVector out) {
+ int n = 0;
+ int size = 2;
+ AnnotationWriter aw = this;
+ AnnotationWriter last = null;
+ while (aw != null) {
+ ++n;
+ size += aw.bv.length;
+ aw.visitEnd(); // in case user forgot to call visitEnd
+ aw.prev = last;
+ last = aw;
+ aw = aw.next;
+ }
+ out.putInt(size);
+ out.putShort(n);
+ aw = last;
+ while (aw != null) {
+ out.putByteArray(aw.bv.data, 0, aw.bv.length);
+ aw = aw.prev;
+ }
+ }
+
+ /**
+ * Puts the given annotation lists into the given byte vector.
+ *
+ * @param panns an array of annotation writer lists.
+ * @param off index of the first annotation to be written.
+ * @param out where the annotations must be put.
+ */
+ static void put(
+ final AnnotationWriter[] panns,
+ final int off,
+ final ByteVector out)
+ {
+ int size = 1 + 2 * (panns.length - off);
+ for (int i = off; i < panns.length; ++i) {
+ size += panns[i] == null ? 0 : panns[i].getSize();
+ }
+ out.putInt(size).putByte(panns.length - off);
+ for (int i = off; i < panns.length; ++i) {
+ AnnotationWriter aw = panns[i];
+ AnnotationWriter last = null;
+ int n = 0;
+ while (aw != null) {
+ ++n;
+ aw.visitEnd(); // in case user forgot to call visitEnd
+ aw.prev = last;
+ last = aw;
+ aw = aw.next;
+ }
+ out.putShort(n);
+ aw = last;
+ while (aw != null) {
+ out.putByteArray(aw.bv.data, 0, aw.bv.length);
+ aw = aw.prev;
+ }
+ }
+ }
+}
diff --git a/src/asm/scala/tools/asm/Attribute.java b/src/asm/scala/tools/asm/Attribute.java
new file mode 100644
index 0000000..408f21c
--- /dev/null
+++ b/src/asm/scala/tools/asm/Attribute.java
@@ -0,0 +1,254 @@
+/***
+ * ASM: a very small and fast Java bytecode manipulation framework
+ * Copyright (c) 2000-2011 INRIA, France Telecom
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ * 3. Neither the name of the copyright holders nor the names of its
+ * contributors may be used to endorse or promote products derived from
+ * this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+ * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
+ * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+ * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+ * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
+ * THE POSSIBILITY OF SUCH DAMAGE.
+ */
+package scala.tools.asm;
+
+/**
+ * A non standard class, field, method or code attribute.
+ *
+ * @author Eric Bruneton
+ * @author Eugene Kuleshov
+ */
+public class Attribute {
+
+ /**
+ * The type of this attribute.
+ */
+ public final String type;
+
+ /**
+ * The raw value of this attribute, used only for unknown attributes.
+ */
+ byte[] value;
+
+ /**
+ * The next attribute in this attribute list. May be <tt>null</tt>.
+ */
+ Attribute next;
+
+ /**
+ * Constructs a new empty attribute.
+ *
+ * @param type the type of the attribute.
+ */
+ protected Attribute(final String type) {
+ this.type = type;
+ }
+
+ /**
+ * Returns <tt>true</tt> if this type of attribute is unknown. The default
+ * implementation of this method always returns <tt>true</tt>.
+ *
+ * @return <tt>true</tt> if this type of attribute is unknown.
+ */
+ public boolean isUnknown() {
+ return true;
+ }
+
+ /**
+ * Returns <tt>true</tt> if this type of attribute is a code attribute.
+ *
+ * @return <tt>true</tt> if this type of attribute is a code attribute.
+ */
+ public boolean isCodeAttribute() {
+ return false;
+ }
+
+ /**
+ * Returns the labels corresponding to this attribute.
+ *
+ * @return the labels corresponding to this attribute, or <tt>null</tt> if
+ * this attribute is not a code attribute that contains labels.
+ */
+ protected Label[] getLabels() {
+ return null;
+ }
+
+ /**
+ * Reads a {@link #type type} attribute. This method must return a <i>new</i>
+ * {@link Attribute} object, of type {@link #type type}, corresponding to
+ * the <tt>len</tt> bytes starting at the given offset, in the given class
+ * reader.
+ *
+ * @param cr the class that contains the attribute to be read.
+ * @param off index of the first byte of the attribute's content in {@link
+ * ClassReader#b cr.b}. The 6 attribute header bytes, containing the
+ * type and the length of the attribute, are not taken into account
+ * here.
+ * @param len the length of the attribute's content.
+ * @param buf buffer to be used to call
+ * {@link ClassReader#readUTF8 readUTF8},
+ * {@link ClassReader#readClass(int,char[]) readClass} or
+ * {@link ClassReader#readConst readConst}.
+ * @param codeOff index of the first byte of code's attribute content in
+ * {@link ClassReader#b cr.b}, or -1 if the attribute to be read is
+ * not a code attribute. The 6 attribute header bytes, containing the
+ * type and the length of the attribute, are not taken into account
+ * here.
+ * @param labels the labels of the method's code, or <tt>null</tt> if the
+ * attribute to be read is not a code attribute.
+ * @return a <i>new</i> {@link Attribute} object corresponding to the given
+ * bytes.
+ */
+ protected Attribute read(
+ final ClassReader cr,
+ final int off,
+ final int len,
+ final char[] buf,
+ final int codeOff,
+ final Label[] labels)
+ {
+ Attribute attr = new Attribute(type);
+ attr.value = new byte[len];
+ System.arraycopy(cr.b, off, attr.value, 0, len);
+ return attr;
+ }
+
+ /**
+ * Returns the byte array form of this attribute.
+ *
+ * @param cw the class to which this attribute must be added. This parameter
+ * can be used to add to the constant pool of this class the items
+ * that corresponds to this attribute.
+ * @param code the bytecode of the method corresponding to this code
+ * attribute, or <tt>null</tt> if this attribute is not a code
+ * attributes.
+ * @param len the length of the bytecode of the method corresponding to this
+ * code attribute, or <tt>null</tt> if this attribute is not a code
+ * attribute.
+ * @param maxStack the maximum stack size of the method corresponding to
+ * this code attribute, or -1 if this attribute is not a code
+ * attribute.
+ * @param maxLocals the maximum number of local variables of the method
+ * corresponding to this code attribute, or -1 if this attribute is
+ * not a code attribute.
+ * @return the byte array form of this attribute.
+ */
+ protected ByteVector write(
+ final ClassWriter cw,
+ final byte[] code,
+ final int len,
+ final int maxStack,
+ final int maxLocals)
+ {
+ ByteVector v = new ByteVector();
+ v.data = value;
+ v.length = value.length;
+ return v;
+ }
+
+ /**
+ * Returns the length of the attribute list that begins with this attribute.
+ *
+ * @return the length of the attribute list that begins with this attribute.
+ */
+ final int getCount() {
+ int count = 0;
+ Attribute attr = this;
+ while (attr != null) {
+ count += 1;
+ attr = attr.next;
+ }
+ return count;
+ }
+
+ /**
+ * Returns the size of all the attributes in this attribute list.
+ *
+ * @param cw the class writer to be used to convert the attributes into byte
+ * arrays, with the {@link #write write} method.
+ * @param code the bytecode of the method corresponding to these code
+ * attributes, or <tt>null</tt> if these attributes are not code
+ * attributes.
+ * @param len the length of the bytecode of the method corresponding to
+ * these code attributes, or <tt>null</tt> if these attributes are
+ * not code attributes.
+ * @param maxStack the maximum stack size of the method corresponding to
+ * these code attributes, or -1 if these attributes are not code
+ * attributes.
+ * @param maxLocals the maximum number of local variables of the method
+ * corresponding to these code attributes, or -1 if these attributes
+ * are not code attributes.
+ * @return the size of all the attributes in this attribute list. This size
+ * includes the size of the attribute headers.
+ */
+ final int getSize(
+ final ClassWriter cw,
+ final byte[] code,
+ final int len,
+ final int maxStack,
+ final int maxLocals)
+ {
+ Attribute attr = this;
+ int size = 0;
+ while (attr != null) {
+ cw.newUTF8(attr.type);
+ size += attr.write(cw, code, len, maxStack, maxLocals).length + 6;
+ attr = attr.next;
+ }
+ return size;
+ }
+
+ /**
+ * Writes all the attributes of this attribute list in the given byte
+ * vector.
+ *
+ * @param cw the class writer to be used to convert the attributes into byte
+ * arrays, with the {@link #write write} method.
+ * @param code the bytecode of the method corresponding to these code
+ * attributes, or <tt>null</tt> if these attributes are not code
+ * attributes.
+ * @param len the length of the bytecode of the method corresponding to
+ * these code attributes, or <tt>null</tt> if these attributes are
+ * not code attributes.
+ * @param maxStack the maximum stack size of the method corresponding to
+ * these code attributes, or -1 if these attributes are not code
+ * attributes.
+ * @param maxLocals the maximum number of local variables of the method
+ * corresponding to these code attributes, or -1 if these attributes
+ * are not code attributes.
+ * @param out where the attributes must be written.
+ */
+ final void put(
+ final ClassWriter cw,
+ final byte[] code,
+ final int len,
+ final int maxStack,
+ final int maxLocals,
+ final ByteVector out)
+ {
+ Attribute attr = this;
+ while (attr != null) {
+ ByteVector b = attr.write(cw, code, len, maxStack, maxLocals);
+ out.putShort(cw.newUTF8(attr.type)).putInt(b.length);
+ out.putByteArray(b.data, 0, b.length);
+ attr = attr.next;
+ }
+ }
+}
diff --git a/src/asm/scala/tools/asm/ByteVector.java b/src/asm/scala/tools/asm/ByteVector.java
new file mode 100644
index 0000000..5081f01
--- /dev/null
+++ b/src/asm/scala/tools/asm/ByteVector.java
@@ -0,0 +1,293 @@
+/***
+ * ASM: a very small and fast Java bytecode manipulation framework
+ * Copyright (c) 2000-2011 INRIA, France Telecom
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ * 3. Neither the name of the copyright holders nor the names of its
+ * contributors may be used to endorse or promote products derived from
+ * this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+ * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
+ * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+ * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+ * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
+ * THE POSSIBILITY OF SUCH DAMAGE.
+ */
+package scala.tools.asm;
+
+/**
+ * A dynamically extensible vector of bytes. This class is roughly equivalent to
+ * a DataOutputStream on top of a ByteArrayOutputStream, but is more efficient.
+ *
+ * @author Eric Bruneton
+ */
+public class ByteVector {
+
+ /**
+ * The content of this vector.
+ */
+ byte[] data;
+
+ /**
+ * Actual number of bytes in this vector.
+ */
+ int length;
+
+ /**
+ * Constructs a new {@link ByteVector ByteVector} with a default initial
+ * size.
+ */
+ public ByteVector() {
+ data = new byte[64];
+ }
+
+ /**
+ * Constructs a new {@link ByteVector ByteVector} with the given initial
+ * size.
+ *
+ * @param initialSize the initial size of the byte vector to be constructed.
+ */
+ public ByteVector(final int initialSize) {
+ data = new byte[initialSize];
+ }
+
+ /**
+ * Puts a byte into this byte vector. The byte vector is automatically
+ * enlarged if necessary.
+ *
+ * @param b a byte.
+ * @return this byte vector.
+ */
+ public ByteVector putByte(final int b) {
+ int length = this.length;
+ if (length + 1 > data.length) {
+ enlarge(1);
+ }
+ data[length++] = (byte) b;
+ this.length = length;
+ return this;
+ }
+
+ /**
+ * Puts two bytes into this byte vector. The byte vector is automatically
+ * enlarged if necessary.
+ *
+ * @param b1 a byte.
+ * @param b2 another byte.
+ * @return this byte vector.
+ */
+ ByteVector put11(final int b1, final int b2) {
+ int length = this.length;
+ if (length + 2 > data.length) {
+ enlarge(2);
+ }
+ byte[] data = this.data;
+ data[length++] = (byte) b1;
+ data[length++] = (byte) b2;
+ this.length = length;
+ return this;
+ }
+
+ /**
+ * Puts a short into this byte vector. The byte vector is automatically
+ * enlarged if necessary.
+ *
+ * @param s a short.
+ * @return this byte vector.
+ */
+ public ByteVector putShort(final int s) {
+ int length = this.length;
+ if (length + 2 > data.length) {
+ enlarge(2);
+ }
+ byte[] data = this.data;
+ data[length++] = (byte) (s >>> 8);
+ data[length++] = (byte) s;
+ this.length = length;
+ return this;
+ }
+
+ /**
+ * Puts a byte and a short into this byte vector. The byte vector is
+ * automatically enlarged if necessary.
+ *
+ * @param b a byte.
+ * @param s a short.
+ * @return this byte vector.
+ */
+ ByteVector put12(final int b, final int s) {
+ int length = this.length;
+ if (length + 3 > data.length) {
+ enlarge(3);
+ }
+ byte[] data = this.data;
+ data[length++] = (byte) b;
+ data[length++] = (byte) (s >>> 8);
+ data[length++] = (byte) s;
+ this.length = length;
+ return this;
+ }
+
+ /**
+ * Puts an int into this byte vector. The byte vector is automatically
+ * enlarged if necessary.
+ *
+ * @param i an int.
+ * @return this byte vector.
+ */
+ public ByteVector putInt(final int i) {
+ int length = this.length;
+ if (length + 4 > data.length) {
+ enlarge(4);
+ }
+ byte[] data = this.data;
+ data[length++] = (byte) (i >>> 24);
+ data[length++] = (byte) (i >>> 16);
+ data[length++] = (byte) (i >>> 8);
+ data[length++] = (byte) i;
+ this.length = length;
+ return this;
+ }
+
+ /**
+ * Puts a long into this byte vector. The byte vector is automatically
+ * enlarged if necessary.
+ *
+ * @param l a long.
+ * @return this byte vector.
+ */
+ public ByteVector putLong(final long l) {
+ int length = this.length;
+ if (length + 8 > data.length) {
+ enlarge(8);
+ }
+ byte[] data = this.data;
+ int i = (int) (l >>> 32);
+ data[length++] = (byte) (i >>> 24);
+ data[length++] = (byte) (i >>> 16);
+ data[length++] = (byte) (i >>> 8);
+ data[length++] = (byte) i;
+ i = (int) l;
+ data[length++] = (byte) (i >>> 24);
+ data[length++] = (byte) (i >>> 16);
+ data[length++] = (byte) (i >>> 8);
+ data[length++] = (byte) i;
+ this.length = length;
+ return this;
+ }
+
+ /**
+ * Puts an UTF8 string into this byte vector. The byte vector is
+ * automatically enlarged if necessary.
+ *
+ * @param s a String.
+ * @return this byte vector.
+ */
+ public ByteVector putUTF8(final String s) {
+ int charLength = s.length();
+ int len = length;
+ if (len + 2 + charLength > data.length) {
+ enlarge(2 + charLength);
+ }
+ byte[] data = this.data;
+ // optimistic algorithm: instead of computing the byte length and then
+ // serializing the string (which requires two loops), we assume the byte
+ // length is equal to char length (which is the most frequent case), and
+ // we start serializing the string right away. During the serialization,
+ // if we find that this assumption is wrong, we continue with the
+ // general method.
+ data[len++] = (byte) (charLength >>> 8);
+ data[len++] = (byte) charLength;
+ for (int i = 0; i < charLength; ++i) {
+ char c = s.charAt(i);
+ if (c >= '\001' && c <= '\177') {
+ data[len++] = (byte) c;
+ } else {
+ int byteLength = i;
+ for (int j = i; j < charLength; ++j) {
+ c = s.charAt(j);
+ if (c >= '\001' && c <= '\177') {
+ byteLength++;
+ } else if (c > '\u07FF') {
+ byteLength += 3;
+ } else {
+ byteLength += 2;
+ }
+ }
+ data[length] = (byte) (byteLength >>> 8);
+ data[length + 1] = (byte) byteLength;
+ if (length + 2 + byteLength > data.length) {
+ length = len;
+ enlarge(2 + byteLength);
+ data = this.data;
+ }
+ for (int j = i; j < charLength; ++j) {
+ c = s.charAt(j);
+ if (c >= '\001' && c <= '\177') {
+ data[len++] = (byte) c;
+ } else if (c > '\u07FF') {
+ data[len++] = (byte) (0xE0 | c >> 12 & 0xF);
+ data[len++] = (byte) (0x80 | c >> 6 & 0x3F);
+ data[len++] = (byte) (0x80 | c & 0x3F);
+ } else {
+ data[len++] = (byte) (0xC0 | c >> 6 & 0x1F);
+ data[len++] = (byte) (0x80 | c & 0x3F);
+ }
+ }
+ break;
+ }
+ }
+ length = len;
+ return this;
+ }
+
+ /**
+ * Puts an array of bytes into this byte vector. The byte vector is
+ * automatically enlarged if necessary.
+ *
+ * @param b an array of bytes. May be <tt>null</tt> to put <tt>len</tt>
+ * null bytes into this byte vector.
+ * @param off index of the fist byte of b that must be copied.
+ * @param len number of bytes of b that must be copied.
+ * @return this byte vector.
+ */
+ public ByteVector putByteArray(final byte[] b, final int off, final int len)
+ {
+ if (length + len > data.length) {
+ enlarge(len);
+ }
+ if (b != null) {
+ System.arraycopy(b, off, data, length, len);
+ }
+ length += len;
+ return this;
+ }
+
+ /**
+ * Enlarge this byte vector so that it can receive n more bytes.
+ *
+ * @param size number of additional bytes that this byte vector should be
+ * able to receive.
+ */
+ private void enlarge(final int size) {
+ int length1 = 2 * data.length;
+ int length2 = length + size;
+ byte[] newData = new byte[length1 > length2 ? length1 : length2];
+ System.arraycopy(data, 0, newData, 0, length);
+ data = newData;
+ }
+}
diff --git a/src/asm/scala/tools/asm/ClassReader.java b/src/asm/scala/tools/asm/ClassReader.java
new file mode 100644
index 0000000..f3287d4
--- /dev/null
+++ b/src/asm/scala/tools/asm/ClassReader.java
@@ -0,0 +1,2216 @@
+/***
+ * ASM: a very small and fast Java bytecode manipulation framework
+ * Copyright (c) 2000-2011 INRIA, France Telecom
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ * 3. Neither the name of the copyright holders nor the names of its
+ * contributors may be used to endorse or promote products derived from
+ * this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+ * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
+ * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+ * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+ * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
+ * THE POSSIBILITY OF SUCH DAMAGE.
+ */
+package scala.tools.asm;
+
+import java.io.IOException;
+import java.io.InputStream;
+
+/**
+ * A Java class parser to make a {@link ClassVisitor} visit an existing class.
+ * This class parses a byte array conforming to the Java class file format and
+ * calls the appropriate visit methods of a given class visitor for each field,
+ * method and bytecode instruction encountered.
+ *
+ * @author Eric Bruneton
+ * @author Eugene Kuleshov
+ */
+public class ClassReader {
+
+ /**
+ * True to enable signatures support.
+ */
+ static final boolean SIGNATURES = true;
+
+ /**
+ * True to enable annotations support.
+ */
+ static final boolean ANNOTATIONS = true;
+
+ /**
+ * True to enable stack map frames support.
+ */
+ static final boolean FRAMES = true;
+
+ /**
+ * True to enable bytecode writing support.
+ */
+ static final boolean WRITER = true;
+
+ /**
+ * True to enable JSR_W and GOTO_W support.
+ */
+ static final boolean RESIZE = true;
+
+ /**
+ * Flag to skip method code. If this class is set <code>CODE</code>
+ * attribute won't be visited. This can be used, for example, to retrieve
+ * annotations for methods and method parameters.
+ */
+ public static final int SKIP_CODE = 1;
+
+ /**
+ * Flag to skip the debug information in the class. If this flag is set the
+ * debug information of the class is not visited, i.e. the
+ * {@link MethodVisitor#visitLocalVariable visitLocalVariable} and
+ * {@link MethodVisitor#visitLineNumber visitLineNumber} methods will not be
+ * called.
+ */
+ public static final int SKIP_DEBUG = 2;
+
+ /**
+ * Flag to skip the stack map frames in the class. If this flag is set the
+ * stack map frames of the class is not visited, i.e. the
+ * {@link MethodVisitor#visitFrame visitFrame} method will not be called.
+ * This flag is useful when the {@link ClassWriter#COMPUTE_FRAMES} option is
+ * used: it avoids visiting frames that will be ignored and recomputed from
+ * scratch in the class writer.
+ */
+ public static final int SKIP_FRAMES = 4;
+
+ /**
+ * Flag to expand the stack map frames. By default stack map frames are
+ * visited in their original format (i.e. "expanded" for classes whose
+ * version is less than V1_6, and "compressed" for the other classes). If
+ * this flag is set, stack map frames are always visited in expanded format
+ * (this option adds a decompression/recompression step in ClassReader and
+ * ClassWriter which degrades performances quite a lot).
+ */
+ public static final int EXPAND_FRAMES = 8;
+
+ /**
+ * The class to be parsed. <i>The content of this array must not be
+ * modified. This field is intended for {@link Attribute} sub classes, and
+ * is normally not needed by class generators or adapters.</i>
+ */
+ public final byte[] b;
+
+ /**
+ * The start index of each constant pool item in {@link #b b}, plus one.
+ * The one byte offset skips the constant pool item tag that indicates its
+ * type.
+ */
+ private final int[] items;
+
+ /**
+ * The String objects corresponding to the CONSTANT_Utf8 items. This cache
+ * avoids multiple parsing of a given CONSTANT_Utf8 constant pool item,
+ * which GREATLY improves performances (by a factor 2 to 3). This caching
+ * strategy could be extended to all constant pool items, but its benefit
+ * would not be so great for these items (because they are much less
+ * expensive to parse than CONSTANT_Utf8 items).
+ */
+ private final String[] strings;
+
+ /**
+ * Maximum length of the strings contained in the constant pool of the
+ * class.
+ */
+ private final int maxStringLength;
+
+ /**
+ * Start index of the class header information (access, name...) in
+ * {@link #b b}.
+ */
+ public final int header;
+
+ // ------------------------------------------------------------------------
+ // Constructors
+ // ------------------------------------------------------------------------
+
+ /**
+ * Constructs a new {@link ClassReader} object.
+ *
+ * @param b the bytecode of the class to be read.
+ */
+ public ClassReader(final byte[] b) {
+ this(b, 0, b.length);
+ }
+
+ /**
+ * Constructs a new {@link ClassReader} object.
+ *
+ * @param b the bytecode of the class to be read.
+ * @param off the start offset of the class data.
+ * @param len the length of the class data.
+ */
+ public ClassReader(final byte[] b, final int off, final int len) {
+ this.b = b;
+ // checks the class version
+ if (readShort(6) > Opcodes.V1_7) {
+ throw new IllegalArgumentException();
+ }
+ // parses the constant pool
+ items = new int[readUnsignedShort(off + 8)];
+ int n = items.length;
+ strings = new String[n];
+ int max = 0;
+ int index = off + 10;
+ for (int i = 1; i < n; ++i) {
+ items[i] = index + 1;
+ int size;
+ switch (b[index]) {
+ case ClassWriter.FIELD:
+ case ClassWriter.METH:
+ case ClassWriter.IMETH:
+ case ClassWriter.INT:
+ case ClassWriter.FLOAT:
+ case ClassWriter.NAME_TYPE:
+ case ClassWriter.INDY:
+ size = 5;
+ break;
+ case ClassWriter.LONG:
+ case ClassWriter.DOUBLE:
+ size = 9;
+ ++i;
+ break;
+ case ClassWriter.UTF8:
+ size = 3 + readUnsignedShort(index + 1);
+ if (size > max) {
+ max = size;
+ }
+ break;
+ case ClassWriter.HANDLE:
+ size = 4;
+ break;
+ // case ClassWriter.CLASS:
+ // case ClassWriter.STR:
+ // case ClassWriter.MTYPE
+ default:
+ size = 3;
+ break;
+ }
+ index += size;
+ }
+ maxStringLength = max;
+ // the class header information starts just after the constant pool
+ header = index;
+ }
+
+ /**
+ * Returns the class's access flags (see {@link Opcodes}). This value may
+ * not reflect Deprecated and Synthetic flags when bytecode is before 1.5
+ * and those flags are represented by attributes.
+ *
+ * @return the class access flags
+ *
+ * @see ClassVisitor#visit(int, int, String, String, String, String[])
+ */
+ public int getAccess() {
+ return readUnsignedShort(header);
+ }
+
+ /**
+ * Returns the internal name of the class (see
+ * {@link Type#getInternalName() getInternalName}).
+ *
+ * @return the internal class name
+ *
+ * @see ClassVisitor#visit(int, int, String, String, String, String[])
+ */
+ public String getClassName() {
+ return readClass(header + 2, new char[maxStringLength]);
+ }
+
+ /**
+ * Returns the internal of name of the super class (see
+ * {@link Type#getInternalName() getInternalName}). For interfaces, the
+ * super class is {@link Object}.
+ *
+ * @return the internal name of super class, or <tt>null</tt> for
+ * {@link Object} class.
+ *
+ * @see ClassVisitor#visit(int, int, String, String, String, String[])
+ */
+ public String getSuperName() {
+ int n = items[readUnsignedShort(header + 4)];
+ return n == 0 ? null : readUTF8(n, new char[maxStringLength]);
+ }
+
+ /**
+ * Returns the internal names of the class's interfaces (see
+ * {@link Type#getInternalName() getInternalName}).
+ *
+ * @return the array of internal names for all implemented interfaces or
+ * <tt>null</tt>.
+ *
+ * @see ClassVisitor#visit(int, int, String, String, String, String[])
+ */
+ public String[] getInterfaces() {
+ int index = header + 6;
+ int n = readUnsignedShort(index);
+ String[] interfaces = new String[n];
+ if (n > 0) {
+ char[] buf = new char[maxStringLength];
+ for (int i = 0; i < n; ++i) {
+ index += 2;
+ interfaces[i] = readClass(index, buf);
+ }
+ }
+ return interfaces;
+ }
+
+ /**
+ * Copies the constant pool data into the given {@link ClassWriter}. Should
+ * be called before the {@link #accept(ClassVisitor,int)} method.
+ *
+ * @param classWriter the {@link ClassWriter} to copy constant pool into.
+ */
+ void copyPool(final ClassWriter classWriter) {
+ char[] buf = new char[maxStringLength];
+ int ll = items.length;
+ Item[] items2 = new Item[ll];
+ for (int i = 1; i < ll; i++) {
+ int index = items[i];
+ int tag = b[index - 1];
+ Item item = new Item(i);
+ int nameType;
+ switch (tag) {
+ case ClassWriter.FIELD:
+ case ClassWriter.METH:
+ case ClassWriter.IMETH:
+ nameType = items[readUnsignedShort(index + 2)];
+ item.set(tag,
+ readClass(index, buf),
+ readUTF8(nameType, buf),
+ readUTF8(nameType + 2, buf));
+ break;
+
+ case ClassWriter.INT:
+ item.set(readInt(index));
+ break;
+
+ case ClassWriter.FLOAT:
+ item.set(Float.intBitsToFloat(readInt(index)));
+ break;
+
+ case ClassWriter.NAME_TYPE:
+ item.set(tag,
+ readUTF8(index, buf),
+ readUTF8(index + 2, buf),
+ null);
+ break;
+
+ case ClassWriter.LONG:
+ item.set(readLong(index));
+ ++i;
+ break;
+
+ case ClassWriter.DOUBLE:
+ item.set(Double.longBitsToDouble(readLong(index)));
+ ++i;
+ break;
+
+ case ClassWriter.UTF8: {
+ String s = strings[i];
+ if (s == null) {
+ index = items[i];
+ s = strings[i] = readUTF(index + 2,
+ readUnsignedShort(index),
+ buf);
+ }
+ item.set(tag, s, null, null);
+ }
+ break;
+
+ case ClassWriter.HANDLE: {
+ int fieldOrMethodRef = items[readUnsignedShort(index + 1)];
+ nameType = items[readUnsignedShort(fieldOrMethodRef + 2)];
+ item.set(ClassWriter.HANDLE_BASE + readByte(index),
+ readClass(fieldOrMethodRef, buf),
+ readUTF8(nameType, buf),
+ readUTF8(nameType + 2, buf));
+
+ }
+ break;
+
+
+ case ClassWriter.INDY:
+ if (classWriter.bootstrapMethods == null) {
+ copyBootstrapMethods(classWriter, items2, buf);
+ }
+ nameType = items[readUnsignedShort(index + 2)];
+ item.set(readUTF8(nameType, buf),
+ readUTF8(nameType + 2, buf),
+ readUnsignedShort(index));
+ break;
+
+
+ // case ClassWriter.STR:
+ // case ClassWriter.CLASS:
+ // case ClassWriter.MTYPE
+ default:
+ item.set(tag, readUTF8(index, buf), null, null);
+ break;
+ }
+
+ int index2 = item.hashCode % items2.length;
+ item.next = items2[index2];
+ items2[index2] = item;
+ }
+
+ int off = items[1] - 1;
+ classWriter.pool.putByteArray(b, off, header - off);
+ classWriter.items = items2;
+ classWriter.threshold = (int) (0.75d * ll);
+ classWriter.index = ll;
+ }
+
+ private void copyBootstrapMethods(ClassWriter classWriter, Item[] items2, char[] buf) {
+ int i, j, k, u, v;
+
+ // skip class header
+ v = header;
+ v += 8 + (readUnsignedShort(v + 6) << 1);
+
+ // skips fields and methods
+ i = readUnsignedShort(v);
+ v += 2;
+ for (; i > 0; --i) {
+ j = readUnsignedShort(v + 6);
+ v += 8;
+ for (; j > 0; --j) {
+ v += 6 + readInt(v + 2);
+ }
+ }
+ i = readUnsignedShort(v);
+ v += 2;
+ for (; i > 0; --i) {
+ j = readUnsignedShort(v + 6);
+ v += 8;
+ for (; j > 0; --j) {
+ v += 6 + readInt(v + 2);
+ }
+ }
+
+ // read class attributes
+ i = readUnsignedShort(v);
+ v += 2;
+ for (; i > 0; --i) {
+ String attrName = readUTF8(v, buf);
+ int size = readInt(v + 2);
+ if ("BootstrapMethods".equals(attrName)) {
+ int boostrapMethodCount = readUnsignedShort(v + 6);
+ int x = v + 8;
+ for (j = 0; j < boostrapMethodCount; j++) {
+ int hashCode = readConst(readUnsignedShort(x), buf).hashCode();
+ k = readUnsignedShort(x + 2);
+ u = x + 4;
+ for(; k > 0; --k) {
+ hashCode ^= readConst(readUnsignedShort(u), buf).hashCode();
+ u += 2;
+ }
+ Item item = new Item(j);
+ item.set(x - v - 8, hashCode & 0x7FFFFFFF);
+
+ int index2 = item.hashCode % items2.length;
+ item.next = items2[index2];
+ items2[index2] = item;
+
+ x = u;
+ }
+
+ classWriter.bootstrapMethodsCount = boostrapMethodCount;
+ ByteVector bootstrapMethods = new ByteVector(size + 62);
+ bootstrapMethods.putByteArray(b, v + 8, size - 2);
+ classWriter.bootstrapMethods = bootstrapMethods;
+ return;
+ }
+ v += 6 + size;
+ }
+
+ // we are in trouble !!!
+ }
+
+ /**
+ * Constructs a new {@link ClassReader} object.
+ *
+ * @param is an input stream from which to read the class.
+ * @throws IOException if a problem occurs during reading.
+ */
+ public ClassReader(final InputStream is) throws IOException {
+ this(readClass(is, false));
+ }
+
+ /**
+ * Constructs a new {@link ClassReader} object.
+ *
+ * @param name the binary qualified name of the class to be read.
+ * @throws IOException if an exception occurs during reading.
+ */
+ public ClassReader(final String name) throws IOException {
+ this(readClass(ClassLoader.getSystemResourceAsStream(name.replace('.', '/')
+ + ".class"), true));
+ }
+
+ /**
+ * Reads the bytecode of a class.
+ *
+ * @param is an input stream from which to read the class.
+ * @param close true to close the input stream after reading.
+ * @return the bytecode read from the given input stream.
+ * @throws IOException if a problem occurs during reading.
+ */
+ private static byte[] readClass(final InputStream is, boolean close)
+ throws IOException
+ {
+ if (is == null) {
+ throw new IOException("Class not found");
+ }
+ try {
+ byte[] b = new byte[is.available()];
+ int len = 0;
+ while (true) {
+ int n = is.read(b, len, b.length - len);
+ if (n == -1) {
+ if (len < b.length) {
+ byte[] c = new byte[len];
+ System.arraycopy(b, 0, c, 0, len);
+ b = c;
+ }
+ return b;
+ }
+ len += n;
+ if (len == b.length) {
+ int last = is.read();
+ if (last < 0) {
+ return b;
+ }
+ byte[] c = new byte[b.length + 1000];
+ System.arraycopy(b, 0, c, 0, len);
+ c[len++] = (byte) last;
+ b = c;
+ }
+ }
+ } finally {
+ if (close) {
+ is.close();
+ }
+ }
+ }
+
+ // ------------------------------------------------------------------------
+ // Public methods
+ // ------------------------------------------------------------------------
+
+ /**
+ * Makes the given visitor visit the Java class of this {@link ClassReader}.
+ * This class is the one specified in the constructor (see
+ * {@link #ClassReader(byte[]) ClassReader}).
+ *
+ * @param classVisitor the visitor that must visit this class.
+ * @param flags option flags that can be used to modify the default behavior
+ * of this class. See {@link #SKIP_DEBUG}, {@link #EXPAND_FRAMES},
+ * {@link #SKIP_FRAMES}, {@link #SKIP_CODE}.
+ */
+ public void accept(final ClassVisitor classVisitor, final int flags) {
+ accept(classVisitor, new Attribute[0], flags);
+ }
+
+ /**
+ * Makes the given visitor visit the Java class of this {@link ClassReader}.
+ * This class is the one specified in the constructor (see
+ * {@link #ClassReader(byte[]) ClassReader}).
+ *
+ * @param classVisitor the visitor that must visit this class.
+ * @param attrs prototypes of the attributes that must be parsed during the
+ * visit of the class. Any attribute whose type is not equal to the
+ * type of one the prototypes will not be parsed: its byte array
+ * value will be passed unchanged to the ClassWriter. <i>This may
+ * corrupt it if this value contains references to the constant pool,
+ * or has syntactic or semantic links with a class element that has
+ * been transformed by a class adapter between the reader and the
+ * writer</i>.
+ * @param flags option flags that can be used to modify the default behavior
+ * of this class. See {@link #SKIP_DEBUG}, {@link #EXPAND_FRAMES},
+ * {@link #SKIP_FRAMES}, {@link #SKIP_CODE}.
+ */
+ public void accept(
+ final ClassVisitor classVisitor,
+ final Attribute[] attrs,
+ final int flags)
+ {
+ byte[] b = this.b; // the bytecode array
+ char[] c = new char[maxStringLength]; // buffer used to read strings
+ int i, j, k; // loop variables
+ int u, v, w; // indexes in b
+ Attribute attr;
+
+ int access;
+ String name;
+ String desc;
+ String attrName;
+ String signature;
+ int anns = 0;
+ int ianns = 0;
+ Attribute cattrs = null;
+
+ // visits the header
+ u = header;
+ access = readUnsignedShort(u);
+ name = readClass(u + 2, c);
+ v = items[readUnsignedShort(u + 4)];
+ String superClassName = v == 0 ? null : readUTF8(v, c);
+ String[] implementedItfs = new String[readUnsignedShort(u + 6)];
+ w = 0;
+ u += 8;
+ for (i = 0; i < implementedItfs.length; ++i) {
+ implementedItfs[i] = readClass(u, c);
+ u += 2;
+ }
+
+ boolean skipCode = (flags & SKIP_CODE) != 0;
+ boolean skipDebug = (flags & SKIP_DEBUG) != 0;
+ boolean unzip = (flags & EXPAND_FRAMES) != 0;
+
+ // skips fields and methods
+ v = u;
+ i = readUnsignedShort(v);
+ v += 2;
+ for (; i > 0; --i) {
+ j = readUnsignedShort(v + 6);
+ v += 8;
+ for (; j > 0; --j) {
+ v += 6 + readInt(v + 2);
+ }
+ }
+ i = readUnsignedShort(v);
+ v += 2;
+ for (; i > 0; --i) {
+ j = readUnsignedShort(v + 6);
+ v += 8;
+ for (; j > 0; --j) {
+ v += 6 + readInt(v + 2);
+ }
+ }
+ // reads the class's attributes
+ signature = null;
+ String sourceFile = null;
+ String sourceDebug = null;
+ String enclosingOwner = null;
+ String enclosingName = null;
+ String enclosingDesc = null;
+ int[] bootstrapMethods = null; // start indexed of the bsms
+
+ i = readUnsignedShort(v);
+ v += 2;
+ for (; i > 0; --i) {
+ attrName = readUTF8(v, c);
+ // tests are sorted in decreasing frequency order
+ // (based on frequencies observed on typical classes)
+ if ("SourceFile".equals(attrName)) {
+ sourceFile = readUTF8(v + 6, c);
+ } else if ("InnerClasses".equals(attrName)) {
+ w = v + 6;
+ } else if ("EnclosingMethod".equals(attrName)) {
+ enclosingOwner = readClass(v + 6, c);
+ int item = readUnsignedShort(v + 8);
+ if (item != 0) {
+ enclosingName = readUTF8(items[item], c);
+ enclosingDesc = readUTF8(items[item] + 2, c);
+ }
+ } else if (SIGNATURES && "Signature".equals(attrName)) {
+ signature = readUTF8(v + 6, c);
+ } else if (ANNOTATIONS && "RuntimeVisibleAnnotations".equals(attrName)) {
+ anns = v + 6;
+ } else if ("Deprecated".equals(attrName)) {
+ access |= Opcodes.ACC_DEPRECATED;
+ } else if ("Synthetic".equals(attrName)) {
+ access |= Opcodes.ACC_SYNTHETIC | ClassWriter.ACC_SYNTHETIC_ATTRIBUTE;
+ } else if ("SourceDebugExtension".equals(attrName)) {
+ int len = readInt(v + 2);
+ sourceDebug = readUTF(v + 6, len, new char[len]);
+ } else if (ANNOTATIONS && "RuntimeInvisibleAnnotations".equals(attrName)) {
+ ianns = v + 6;
+ } else if ("BootstrapMethods".equals(attrName)) {
+ int boostrapMethodCount = readUnsignedShort(v + 6);
+ bootstrapMethods = new int[boostrapMethodCount];
+ int x = v + 8;
+ for (j = 0; j < boostrapMethodCount; j++) {
+ bootstrapMethods[j] = x;
+ x += 2 + readUnsignedShort(x + 2) << 1;
+ }
+ } else {
+ attr = readAttribute(attrs,
+ attrName,
+ v + 6,
+ readInt(v + 2),
+ c,
+ -1,
+ null);
+ if (attr != null) {
+ attr.next = cattrs;
+ cattrs = attr;
+ }
+ }
+ v += 6 + readInt(v + 2);
+ }
+ // calls the visit method
+ classVisitor.visit(readInt(4),
+ access,
+ name,
+ signature,
+ superClassName,
+ implementedItfs);
+
+ // calls the visitSource method
+ if (!skipDebug && (sourceFile != null || sourceDebug != null)) {
+ classVisitor.visitSource(sourceFile, sourceDebug);
+ }
+
+ // calls the visitOuterClass method
+ if (enclosingOwner != null) {
+ classVisitor.visitOuterClass(enclosingOwner,
+ enclosingName,
+ enclosingDesc);
+ }
+
+ // visits the class annotations
+ if (ANNOTATIONS) {
+ for (i = 1; i >= 0; --i) {
+ v = i == 0 ? ianns : anns;
+ if (v != 0) {
+ j = readUnsignedShort(v);
+ v += 2;
+ for (; j > 0; --j) {
+ v = readAnnotationValues(v + 2,
+ c,
+ true,
+ classVisitor.visitAnnotation(readUTF8(v, c), i != 0));
+ }
+ }
+ }
+ }
+
+ // visits the class attributes
+ while (cattrs != null) {
+ attr = cattrs.next;
+ cattrs.next = null;
+ classVisitor.visitAttribute(cattrs);
+ cattrs = attr;
+ }
+
+ // calls the visitInnerClass method
+ if (w != 0) {
+ i = readUnsignedShort(w);
+ w += 2;
+ for (; i > 0; --i) {
+ classVisitor.visitInnerClass(readUnsignedShort(w) == 0
+ ? null
+ : readClass(w, c), readUnsignedShort(w + 2) == 0
+ ? null
+ : readClass(w + 2, c), readUnsignedShort(w + 4) == 0
+ ? null
+ : readUTF8(w + 4, c), readUnsignedShort(w + 6));
+ w += 8;
+ }
+ }
+
+ // visits the fields
+ i = readUnsignedShort(u);
+ u += 2;
+ for (; i > 0; --i) {
+ access = readUnsignedShort(u);
+ name = readUTF8(u + 2, c);
+ desc = readUTF8(u + 4, c);
+ // visits the field's attributes and looks for a ConstantValue
+ // attribute
+ int fieldValueItem = 0;
+ signature = null;
+ anns = 0;
+ ianns = 0;
+ cattrs = null;
+
+ j = readUnsignedShort(u + 6);
+ u += 8;
+ for (; j > 0; --j) {
+ attrName = readUTF8(u, c);
+ // tests are sorted in decreasing frequency order
+ // (based on frequencies observed on typical classes)
+ if ("ConstantValue".equals(attrName)) {
+ fieldValueItem = readUnsignedShort(u + 6);
+ } else if (SIGNATURES && "Signature".equals(attrName)) {
+ signature = readUTF8(u + 6, c);
+ } else if ("Deprecated".equals(attrName)) {
+ access |= Opcodes.ACC_DEPRECATED;
+ } else if ("Synthetic".equals(attrName)) {
+ access |= Opcodes.ACC_SYNTHETIC | ClassWriter.ACC_SYNTHETIC_ATTRIBUTE;
+ } else if (ANNOTATIONS && "RuntimeVisibleAnnotations".equals(attrName)) {
+ anns = u + 6;
+ } else if (ANNOTATIONS && "RuntimeInvisibleAnnotations".equals(attrName)) {
+ ianns = u + 6;
+ } else {
+ attr = readAttribute(attrs,
+ attrName,
+ u + 6,
+ readInt(u + 2),
+ c,
+ -1,
+ null);
+ if (attr != null) {
+ attr.next = cattrs;
+ cattrs = attr;
+ }
+ }
+ u += 6 + readInt(u + 2);
+ }
+ // visits the field
+ FieldVisitor fv = classVisitor.visitField(access,
+ name,
+ desc,
+ signature,
+ fieldValueItem == 0 ? null : readConst(fieldValueItem, c));
+ // visits the field annotations and attributes
+ if (fv != null) {
+ if (ANNOTATIONS) {
+ for (j = 1; j >= 0; --j) {
+ v = j == 0 ? ianns : anns;
+ if (v != 0) {
+ k = readUnsignedShort(v);
+ v += 2;
+ for (; k > 0; --k) {
+ v = readAnnotationValues(v + 2,
+ c,
+ true,
+ fv.visitAnnotation(readUTF8(v, c), j != 0));
+ }
+ }
+ }
+ }
+ while (cattrs != null) {
+ attr = cattrs.next;
+ cattrs.next = null;
+ fv.visitAttribute(cattrs);
+ cattrs = attr;
+ }
+ fv.visitEnd();
+ }
+ }
+
+ // visits the methods
+ i = readUnsignedShort(u);
+ u += 2;
+ for (; i > 0; --i) {
+ int u0 = u + 6;
+ access = readUnsignedShort(u);
+ name = readUTF8(u + 2, c);
+ desc = readUTF8(u + 4, c);
+ signature = null;
+ anns = 0;
+ ianns = 0;
+ int dann = 0;
+ int mpanns = 0;
+ int impanns = 0;
+ cattrs = null;
+ v = 0;
+ w = 0;
+
+ // looks for Code and Exceptions attributes
+ j = readUnsignedShort(u + 6);
+ u += 8;
+ for (; j > 0; --j) {
+ attrName = readUTF8(u, c);
+ int attrSize = readInt(u + 2);
+ u += 6;
+ // tests are sorted in decreasing frequency order
+ // (based on frequencies observed on typical classes)
+ if ("Code".equals(attrName)) {
+ if (!skipCode) {
+ v = u;
+ }
+ } else if ("Exceptions".equals(attrName)) {
+ w = u;
+ } else if (SIGNATURES && "Signature".equals(attrName)) {
+ signature = readUTF8(u, c);
+ } else if ("Deprecated".equals(attrName)) {
+ access |= Opcodes.ACC_DEPRECATED;
+ } else if (ANNOTATIONS && "RuntimeVisibleAnnotations".equals(attrName)) {
+ anns = u;
+ } else if (ANNOTATIONS && "AnnotationDefault".equals(attrName)) {
+ dann = u;
+ } else if ("Synthetic".equals(attrName)) {
+ access |= Opcodes.ACC_SYNTHETIC | ClassWriter.ACC_SYNTHETIC_ATTRIBUTE;
+ } else if (ANNOTATIONS && "RuntimeInvisibleAnnotations".equals(attrName)) {
+ ianns = u;
+ } else if (ANNOTATIONS && "RuntimeVisibleParameterAnnotations".equals(attrName))
+ {
+ mpanns = u;
+ } else if (ANNOTATIONS && "RuntimeInvisibleParameterAnnotations".equals(attrName))
+ {
+ impanns = u;
+ } else {
+ attr = readAttribute(attrs,
+ attrName,
+ u,
+ attrSize,
+ c,
+ -1,
+ null);
+ if (attr != null) {
+ attr.next = cattrs;
+ cattrs = attr;
+ }
+ }
+ u += attrSize;
+ }
+ // reads declared exceptions
+ String[] exceptions;
+ if (w == 0) {
+ exceptions = null;
+ } else {
+ exceptions = new String[readUnsignedShort(w)];
+ w += 2;
+ for (j = 0; j < exceptions.length; ++j) {
+ exceptions[j] = readClass(w, c);
+ w += 2;
+ }
+ }
+
+ // visits the method's code, if any
+ MethodVisitor mv = classVisitor.visitMethod(access,
+ name,
+ desc,
+ signature,
+ exceptions);
+
+ if (mv != null) {
+ /*
+ * if the returned MethodVisitor is in fact a MethodWriter, it
+ * means there is no method adapter between the reader and the
+ * writer. If, in addition, the writer's constant pool was
+ * copied from this reader (mw.cw.cr == this), and the signature
+ * and exceptions of the method have not been changed, then it
+ * is possible to skip all visit events and just copy the
+ * original code of the method to the writer (the access, name
+ * and descriptor can have been changed, this is not important
+ * since they are not copied as is from the reader).
+ */
+ if (WRITER && mv instanceof MethodWriter) {
+ MethodWriter mw = (MethodWriter) mv;
+ if (mw.cw.cr == this) {
+ if (signature == mw.signature) {
+ boolean sameExceptions = false;
+ if (exceptions == null) {
+ sameExceptions = mw.exceptionCount == 0;
+ } else {
+ if (exceptions.length == mw.exceptionCount) {
+ sameExceptions = true;
+ for (j = exceptions.length - 1; j >= 0; --j)
+ {
+ w -= 2;
+ if (mw.exceptions[j] != readUnsignedShort(w))
+ {
+ sameExceptions = false;
+ break;
+ }
+ }
+ }
+ }
+ if (sameExceptions) {
+ /*
+ * we do not copy directly the code into
+ * MethodWriter to save a byte array copy
+ * operation. The real copy will be done in
+ * ClassWriter.toByteArray().
+ */
+ mw.classReaderOffset = u0;
+ mw.classReaderLength = u - u0;
+ continue;
+ }
+ }
+ }
+ }
+
+ if (ANNOTATIONS && dann != 0) {
+ AnnotationVisitor dv = mv.visitAnnotationDefault();
+ readAnnotationValue(dann, c, null, dv);
+ if (dv != null) {
+ dv.visitEnd();
+ }
+ }
+ if (ANNOTATIONS) {
+ for (j = 1; j >= 0; --j) {
+ w = j == 0 ? ianns : anns;
+ if (w != 0) {
+ k = readUnsignedShort(w);
+ w += 2;
+ for (; k > 0; --k) {
+ w = readAnnotationValues(w + 2,
+ c,
+ true,
+ mv.visitAnnotation(readUTF8(w, c), j != 0));
+ }
+ }
+ }
+ }
+ if (ANNOTATIONS && mpanns != 0) {
+ readParameterAnnotations(mpanns, desc, c, true, mv);
+ }
+ if (ANNOTATIONS && impanns != 0) {
+ readParameterAnnotations(impanns, desc, c, false, mv);
+ }
+ while (cattrs != null) {
+ attr = cattrs.next;
+ cattrs.next = null;
+ mv.visitAttribute(cattrs);
+ cattrs = attr;
+ }
+ }
+
+ if (mv != null && v != 0) {
+ int maxStack = readUnsignedShort(v);
+ int maxLocals = readUnsignedShort(v + 2);
+ int codeLength = readInt(v + 4);
+ v += 8;
+
+ int codeStart = v;
+ int codeEnd = v + codeLength;
+
+ mv.visitCode();
+
+ // 1st phase: finds the labels
+ int label;
+ Label[] labels = new Label[codeLength + 2];
+ readLabel(codeLength + 1, labels);
+ while (v < codeEnd) {
+ w = v - codeStart;
+ int opcode = b[v] & 0xFF;
+ switch (ClassWriter.TYPE[opcode]) {
+ case ClassWriter.NOARG_INSN:
+ case ClassWriter.IMPLVAR_INSN:
+ v += 1;
+ break;
+ case ClassWriter.LABEL_INSN:
+ readLabel(w + readShort(v + 1), labels);
+ v += 3;
+ break;
+ case ClassWriter.LABELW_INSN:
+ readLabel(w + readInt(v + 1), labels);
+ v += 5;
+ break;
+ case ClassWriter.WIDE_INSN:
+ opcode = b[v + 1] & 0xFF;
+ if (opcode == Opcodes.IINC) {
+ v += 6;
+ } else {
+ v += 4;
+ }
+ break;
+ case ClassWriter.TABL_INSN:
+ // skips 0 to 3 padding bytes*
+ v = v + 4 - (w & 3);
+ // reads instruction
+ readLabel(w + readInt(v), labels);
+ j = readInt(v + 8) - readInt(v + 4) + 1;
+ v += 12;
+ for (; j > 0; --j) {
+ readLabel(w + readInt(v), labels);
+ v += 4;
+ }
+ break;
+ case ClassWriter.LOOK_INSN:
+ // skips 0 to 3 padding bytes*
+ v = v + 4 - (w & 3);
+ // reads instruction
+ readLabel(w + readInt(v), labels);
+ j = readInt(v + 4);
+ v += 8;
+ for (; j > 0; --j) {
+ readLabel(w + readInt(v + 4), labels);
+ v += 8;
+ }
+ break;
+ case ClassWriter.VAR_INSN:
+ case ClassWriter.SBYTE_INSN:
+ case ClassWriter.LDC_INSN:
+ v += 2;
+ break;
+ case ClassWriter.SHORT_INSN:
+ case ClassWriter.LDCW_INSN:
+ case ClassWriter.FIELDORMETH_INSN:
+ case ClassWriter.TYPE_INSN:
+ case ClassWriter.IINC_INSN:
+ v += 3;
+ break;
+ case ClassWriter.ITFMETH_INSN:
+ case ClassWriter.INDYMETH_INSN:
+ v += 5;
+ break;
+ // case MANA_INSN:
+ default:
+ v += 4;
+ break;
+ }
+ }
+ // parses the try catch entries
+ j = readUnsignedShort(v);
+ v += 2;
+ for (; j > 0; --j) {
+ Label start = readLabel(readUnsignedShort(v), labels);
+ Label end = readLabel(readUnsignedShort(v + 2), labels);
+ Label handler = readLabel(readUnsignedShort(v + 4), labels);
+ int type = readUnsignedShort(v + 6);
+ if (type == 0) {
+ mv.visitTryCatchBlock(start, end, handler, null);
+ } else {
+ mv.visitTryCatchBlock(start,
+ end,
+ handler,
+ readUTF8(items[type], c));
+ }
+ v += 8;
+ }
+ // parses the local variable, line number tables, and code
+ // attributes
+ int varTable = 0;
+ int varTypeTable = 0;
+ int stackMap = 0;
+ int stackMapSize = 0;
+ int frameCount = 0;
+ int frameMode = 0;
+ int frameOffset = 0;
+ int frameLocalCount = 0;
+ int frameLocalDiff = 0;
+ int frameStackCount = 0;
+ Object[] frameLocal = null;
+ Object[] frameStack = null;
+ boolean zip = true;
+ cattrs = null;
+ j = readUnsignedShort(v);
+ v += 2;
+ for (; j > 0; --j) {
+ attrName = readUTF8(v, c);
+ if ("LocalVariableTable".equals(attrName)) {
+ if (!skipDebug) {
+ varTable = v + 6;
+ k = readUnsignedShort(v + 6);
+ w = v + 8;
+ for (; k > 0; --k) {
+ label = readUnsignedShort(w);
+ if (labels[label] == null) {
+ readLabel(label, labels).status |= Label.DEBUG;
+ }
+ label += readUnsignedShort(w + 2);
+ if (labels[label] == null) {
+ readLabel(label, labels).status |= Label.DEBUG;
+ }
+ w += 10;
+ }
+ }
+ } else if ("LocalVariableTypeTable".equals(attrName)) {
+ varTypeTable = v + 6;
+ } else if ("LineNumberTable".equals(attrName)) {
+ if (!skipDebug) {
+ k = readUnsignedShort(v + 6);
+ w = v + 8;
+ for (; k > 0; --k) {
+ label = readUnsignedShort(w);
+ if (labels[label] == null) {
+ readLabel(label, labels).status |= Label.DEBUG;
+ }
+ labels[label].line = readUnsignedShort(w + 2);
+ w += 4;
+ }
+ }
+ } else if (FRAMES && "StackMapTable".equals(attrName)) {
+ if ((flags & SKIP_FRAMES) == 0) {
+ stackMap = v + 8;
+ stackMapSize = readInt(v + 2);
+ frameCount = readUnsignedShort(v + 6);
+ }
+ /*
+ * here we do not extract the labels corresponding to
+ * the attribute content. This would require a full
+ * parsing of the attribute, which would need to be
+ * repeated in the second phase (see below). Instead the
+ * content of the attribute is read one frame at a time
+ * (i.e. after a frame has been visited, the next frame
+ * is read), and the labels it contains are also
+ * extracted one frame at a time. Thanks to the ordering
+ * of frames, having only a "one frame lookahead" is not
+ * a problem, i.e. it is not possible to see an offset
+ * smaller than the offset of the current insn and for
+ * which no Label exist.
+ */
+ /*
+ * This is not true for UNINITIALIZED type offsets. We
+ * solve this by parsing the stack map table without a
+ * full decoding (see below).
+ */
+ } else if (FRAMES && "StackMap".equals(attrName)) {
+ if ((flags & SKIP_FRAMES) == 0) {
+ stackMap = v + 8;
+ stackMapSize = readInt(v + 2);
+ frameCount = readUnsignedShort(v + 6);
+ zip = false;
+ }
+ /*
+ * IMPORTANT! here we assume that the frames are
+ * ordered, as in the StackMapTable attribute, although
+ * this is not guaranteed by the attribute format.
+ */
+ } else {
+ for (k = 0; k < attrs.length; ++k) {
+ if (attrs[k].type.equals(attrName)) {
+ attr = attrs[k].read(this,
+ v + 6,
+ readInt(v + 2),
+ c,
+ codeStart - 8,
+ labels);
+ if (attr != null) {
+ attr.next = cattrs;
+ cattrs = attr;
+ }
+ }
+ }
+ }
+ v += 6 + readInt(v + 2);
+ }
+
+ // 2nd phase: visits each instruction
+ if (FRAMES && stackMap != 0) {
+ // creates the very first (implicit) frame from the method
+ // descriptor
+ frameLocal = new Object[maxLocals];
+ frameStack = new Object[maxStack];
+ if (unzip) {
+ int local = 0;
+ if ((access & Opcodes.ACC_STATIC) == 0) {
+ if ("<init>".equals(name)) {
+ frameLocal[local++] = Opcodes.UNINITIALIZED_THIS;
+ } else {
+ frameLocal[local++] = readClass(header + 2, c);
+ }
+ }
+ j = 1;
+ loop: while (true) {
+ k = j;
+ switch (desc.charAt(j++)) {
+ case 'Z':
+ case 'C':
+ case 'B':
+ case 'S':
+ case 'I':
+ frameLocal[local++] = Opcodes.INTEGER;
+ break;
+ case 'F':
+ frameLocal[local++] = Opcodes.FLOAT;
+ break;
+ case 'J':
+ frameLocal[local++] = Opcodes.LONG;
+ break;
+ case 'D':
+ frameLocal[local++] = Opcodes.DOUBLE;
+ break;
+ case '[':
+ while (desc.charAt(j) == '[') {
+ ++j;
+ }
+ if (desc.charAt(j) == 'L') {
+ ++j;
+ while (desc.charAt(j) != ';') {
+ ++j;
+ }
+ }
+ frameLocal[local++] = desc.substring(k, ++j);
+ break;
+ case 'L':
+ while (desc.charAt(j) != ';') {
+ ++j;
+ }
+ frameLocal[local++] = desc.substring(k + 1,
+ j++);
+ break;
+ default:
+ break loop;
+ }
+ }
+ frameLocalCount = local;
+ }
+ /*
+ * for the first explicit frame the offset is not
+ * offset_delta + 1 but only offset_delta; setting the
+ * implicit frame offset to -1 allow the use of the
+ * "offset_delta + 1" rule in all cases
+ */
+ frameOffset = -1;
+ /*
+ * Finds labels for UNINITIALIZED frame types. Instead of
+ * decoding each element of the stack map table, we look
+ * for 3 consecutive bytes that "look like" an UNINITIALIZED
+ * type (tag 8, offset within code bounds, NEW instruction
+ * at this offset). We may find false positives (i.e. not
+ * real UNINITIALIZED types), but this should be rare, and
+ * the only consequence will be the creation of an unneeded
+ * label. This is better than creating a label for each NEW
+ * instruction, and faster than fully decoding the whole
+ * stack map table.
+ */
+ for (j = stackMap; j < stackMap + stackMapSize - 2; ++j) {
+ if (b[j] == 8) { // UNINITIALIZED FRAME TYPE
+ k = readUnsignedShort(j + 1);
+ if (k >= 0 && k < codeLength) { // potential offset
+ if ((b[codeStart + k] & 0xFF) == Opcodes.NEW) { // NEW at this offset
+ readLabel(k, labels);
+ }
+ }
+ }
+ }
+ }
+ v = codeStart;
+ Label l;
+ while (v < codeEnd) {
+ w = v - codeStart;
+
+ l = labels[w];
+ if (l != null) {
+ mv.visitLabel(l);
+ if (!skipDebug && l.line > 0) {
+ mv.visitLineNumber(l.line, l);
+ }
+ }
+
+ while (FRAMES && frameLocal != null
+ && (frameOffset == w || frameOffset == -1))
+ {
+ // if there is a frame for this offset,
+ // makes the visitor visit it,
+ // and reads the next frame if there is one.
+ if (!zip || unzip) {
+ mv.visitFrame(Opcodes.F_NEW,
+ frameLocalCount,
+ frameLocal,
+ frameStackCount,
+ frameStack);
+ } else if (frameOffset != -1) {
+ mv.visitFrame(frameMode,
+ frameLocalDiff,
+ frameLocal,
+ frameStackCount,
+ frameStack);
+ }
+
+ if (frameCount > 0) {
+ int tag, delta, n;
+ if (zip) {
+ tag = b[stackMap++] & 0xFF;
+ } else {
+ tag = MethodWriter.FULL_FRAME;
+ frameOffset = -1;
+ }
+ frameLocalDiff = 0;
+ if (tag < MethodWriter.SAME_LOCALS_1_STACK_ITEM_FRAME)
+ {
+ delta = tag;
+ frameMode = Opcodes.F_SAME;
+ frameStackCount = 0;
+ } else if (tag < MethodWriter.RESERVED) {
+ delta = tag
+ - MethodWriter.SAME_LOCALS_1_STACK_ITEM_FRAME;
+ stackMap = readFrameType(frameStack,
+ 0,
+ stackMap,
+ c,
+ labels);
+ frameMode = Opcodes.F_SAME1;
+ frameStackCount = 1;
+ } else {
+ delta = readUnsignedShort(stackMap);
+ stackMap += 2;
+ if (tag == MethodWriter.SAME_LOCALS_1_STACK_ITEM_FRAME_EXTENDED)
+ {
+ stackMap = readFrameType(frameStack,
+ 0,
+ stackMap,
+ c,
+ labels);
+ frameMode = Opcodes.F_SAME1;
+ frameStackCount = 1;
+ } else if (tag >= MethodWriter.CHOP_FRAME
+ && tag < MethodWriter.SAME_FRAME_EXTENDED)
+ {
+ frameMode = Opcodes.F_CHOP;
+ frameLocalDiff = MethodWriter.SAME_FRAME_EXTENDED
+ - tag;
+ frameLocalCount -= frameLocalDiff;
+ frameStackCount = 0;
+ } else if (tag == MethodWriter.SAME_FRAME_EXTENDED)
+ {
+ frameMode = Opcodes.F_SAME;
+ frameStackCount = 0;
+ } else if (tag < MethodWriter.FULL_FRAME) {
+ j = unzip ? frameLocalCount : 0;
+ for (k = tag
+ - MethodWriter.SAME_FRAME_EXTENDED; k > 0; k--)
+ {
+ stackMap = readFrameType(frameLocal,
+ j++,
+ stackMap,
+ c,
+ labels);
+ }
+ frameMode = Opcodes.F_APPEND;
+ frameLocalDiff = tag
+ - MethodWriter.SAME_FRAME_EXTENDED;
+ frameLocalCount += frameLocalDiff;
+ frameStackCount = 0;
+ } else { // if (tag == FULL_FRAME) {
+ frameMode = Opcodes.F_FULL;
+ n = frameLocalDiff = frameLocalCount = readUnsignedShort(stackMap);
+ stackMap += 2;
+ for (j = 0; n > 0; n--) {
+ stackMap = readFrameType(frameLocal,
+ j++,
+ stackMap,
+ c,
+ labels);
+ }
+ n = frameStackCount = readUnsignedShort(stackMap);
+ stackMap += 2;
+ for (j = 0; n > 0; n--) {
+ stackMap = readFrameType(frameStack,
+ j++,
+ stackMap,
+ c,
+ labels);
+ }
+ }
+ }
+ frameOffset += delta + 1;
+ readLabel(frameOffset, labels);
+
+ --frameCount;
+ } else {
+ frameLocal = null;
+ }
+ }
+
+ int opcode = b[v] & 0xFF;
+ switch (ClassWriter.TYPE[opcode]) {
+ case ClassWriter.NOARG_INSN:
+ mv.visitInsn(opcode);
+ v += 1;
+ break;
+ case ClassWriter.IMPLVAR_INSN:
+ if (opcode > Opcodes.ISTORE) {
+ opcode -= 59; // ISTORE_0
+ mv.visitVarInsn(Opcodes.ISTORE + (opcode >> 2),
+ opcode & 0x3);
+ } else {
+ opcode -= 26; // ILOAD_0
+ mv.visitVarInsn(Opcodes.ILOAD + (opcode >> 2),
+ opcode & 0x3);
+ }
+ v += 1;
+ break;
+ case ClassWriter.LABEL_INSN:
+ mv.visitJumpInsn(opcode, labels[w
+ + readShort(v + 1)]);
+ v += 3;
+ break;
+ case ClassWriter.LABELW_INSN:
+ mv.visitJumpInsn(opcode - 33, labels[w
+ + readInt(v + 1)]);
+ v += 5;
+ break;
+ case ClassWriter.WIDE_INSN:
+ opcode = b[v + 1] & 0xFF;
+ if (opcode == Opcodes.IINC) {
+ mv.visitIincInsn(readUnsignedShort(v + 2),
+ readShort(v + 4));
+ v += 6;
+ } else {
+ mv.visitVarInsn(opcode,
+ readUnsignedShort(v + 2));
+ v += 4;
+ }
+ break;
+ case ClassWriter.TABL_INSN:
+ // skips 0 to 3 padding bytes
+ v = v + 4 - (w & 3);
+ // reads instruction
+ label = w + readInt(v);
+ int min = readInt(v + 4);
+ int max = readInt(v + 8);
+ v += 12;
+ Label[] table = new Label[max - min + 1];
+ for (j = 0; j < table.length; ++j) {
+ table[j] = labels[w + readInt(v)];
+ v += 4;
+ }
+ mv.visitTableSwitchInsn(min,
+ max,
+ labels[label],
+ table);
+ break;
+ case ClassWriter.LOOK_INSN:
+ // skips 0 to 3 padding bytes
+ v = v + 4 - (w & 3);
+ // reads instruction
+ label = w + readInt(v);
+ j = readInt(v + 4);
+ v += 8;
+ int[] keys = new int[j];
+ Label[] values = new Label[j];
+ for (j = 0; j < keys.length; ++j) {
+ keys[j] = readInt(v);
+ values[j] = labels[w + readInt(v + 4)];
+ v += 8;
+ }
+ mv.visitLookupSwitchInsn(labels[label],
+ keys,
+ values);
+ break;
+ case ClassWriter.VAR_INSN:
+ mv.visitVarInsn(opcode, b[v + 1] & 0xFF);
+ v += 2;
+ break;
+ case ClassWriter.SBYTE_INSN:
+ mv.visitIntInsn(opcode, b[v + 1]);
+ v += 2;
+ break;
+ case ClassWriter.SHORT_INSN:
+ mv.visitIntInsn(opcode, readShort(v + 1));
+ v += 3;
+ break;
+ case ClassWriter.LDC_INSN:
+ mv.visitLdcInsn(readConst(b[v + 1] & 0xFF, c));
+ v += 2;
+ break;
+ case ClassWriter.LDCW_INSN:
+ mv.visitLdcInsn(readConst(readUnsignedShort(v + 1),
+ c));
+ v += 3;
+ break;
+ case ClassWriter.FIELDORMETH_INSN:
+ case ClassWriter.ITFMETH_INSN: {
+ int cpIndex = items[readUnsignedShort(v + 1)];
+ String iowner = readClass(cpIndex, c);
+ cpIndex = items[readUnsignedShort(cpIndex + 2)];
+ String iname = readUTF8(cpIndex, c);
+ String idesc = readUTF8(cpIndex + 2, c);
+ if (opcode < Opcodes.INVOKEVIRTUAL) {
+ mv.visitFieldInsn(opcode, iowner, iname, idesc);
+ } else {
+ mv.visitMethodInsn(opcode, iowner, iname, idesc);
+ }
+ if (opcode == Opcodes.INVOKEINTERFACE) {
+ v += 5;
+ } else {
+ v += 3;
+ }
+ break;
+ }
+ case ClassWriter.INDYMETH_INSN: {
+ int cpIndex = items[readUnsignedShort(v + 1)];
+ int bsmIndex = bootstrapMethods[readUnsignedShort(cpIndex)];
+ cpIndex = items[readUnsignedShort(cpIndex + 2)];
+ String iname = readUTF8(cpIndex, c);
+ String idesc = readUTF8(cpIndex + 2, c);
+
+ int mhIndex = readUnsignedShort(bsmIndex);
+ Handle bsm = (Handle) readConst(mhIndex, c);
+ int bsmArgCount = readUnsignedShort(bsmIndex + 2);
+ Object[] bsmArgs = new Object[bsmArgCount];
+ bsmIndex += 4;
+ for(int a = 0; a < bsmArgCount; a++) {
+ int argIndex = readUnsignedShort(bsmIndex);
+ bsmArgs[a] = readConst(argIndex, c);
+ bsmIndex += 2;
+ }
+ mv.visitInvokeDynamicInsn(iname, idesc, bsm, bsmArgs);
+
+ v += 5;
+ break;
+ }
+ case ClassWriter.TYPE_INSN:
+ mv.visitTypeInsn(opcode, readClass(v + 1, c));
+ v += 3;
+ break;
+ case ClassWriter.IINC_INSN:
+ mv.visitIincInsn(b[v + 1] & 0xFF, b[v + 2]);
+ v += 3;
+ break;
+ // case MANA_INSN:
+ default:
+ mv.visitMultiANewArrayInsn(readClass(v + 1, c),
+ b[v + 3] & 0xFF);
+ v += 4;
+ break;
+ }
+ }
+ l = labels[codeEnd - codeStart];
+ if (l != null) {
+ mv.visitLabel(l);
+ }
+ // visits the local variable tables
+ if (!skipDebug && varTable != 0) {
+ int[] typeTable = null;
+ if (varTypeTable != 0) {
+ k = readUnsignedShort(varTypeTable) * 3;
+ w = varTypeTable + 2;
+ typeTable = new int[k];
+ while (k > 0) {
+ typeTable[--k] = w + 6; // signature
+ typeTable[--k] = readUnsignedShort(w + 8); // index
+ typeTable[--k] = readUnsignedShort(w); // start
+ w += 10;
+ }
+ }
+ k = readUnsignedShort(varTable);
+ w = varTable + 2;
+ for (; k > 0; --k) {
+ int start = readUnsignedShort(w);
+ int length = readUnsignedShort(w + 2);
+ int index = readUnsignedShort(w + 8);
+ String vsignature = null;
+ if (typeTable != null) {
+ for (int a = 0; a < typeTable.length; a += 3) {
+ if (typeTable[a] == start
+ && typeTable[a + 1] == index)
+ {
+ vsignature = readUTF8(typeTable[a + 2], c);
+ break;
+ }
+ }
+ }
+ mv.visitLocalVariable(readUTF8(w + 4, c),
+ readUTF8(w + 6, c),
+ vsignature,
+ labels[start],
+ labels[start + length],
+ index);
+ w += 10;
+ }
+ }
+ // visits the other attributes
+ while (cattrs != null) {
+ attr = cattrs.next;
+ cattrs.next = null;
+ mv.visitAttribute(cattrs);
+ cattrs = attr;
+ }
+ // visits the max stack and max locals values
+ mv.visitMaxs(maxStack, maxLocals);
+ }
+
+ if (mv != null) {
+ mv.visitEnd();
+ }
+ }
+
+ // visits the end of the class
+ classVisitor.visitEnd();
+ }
+
+ /**
+ * Reads parameter annotations and makes the given visitor visit them.
+ *
+ * @param v start offset in {@link #b b} of the annotations to be read.
+ * @param desc the method descriptor.
+ * @param buf buffer to be used to call {@link #readUTF8 readUTF8},
+ * {@link #readClass(int,char[]) readClass} or
+ * {@link #readConst readConst}.
+ * @param visible <tt>true</tt> if the annotations to be read are visible
+ * at runtime.
+ * @param mv the visitor that must visit the annotations.
+ */
+ private void readParameterAnnotations(
+ int v,
+ final String desc,
+ final char[] buf,
+ final boolean visible,
+ final MethodVisitor mv)
+ {
+ int i;
+ int n = b[v++] & 0xFF;
+ // workaround for a bug in javac (javac compiler generates a parameter
+ // annotation array whose size is equal to the number of parameters in
+ // the Java source file, while it should generate an array whose size is
+ // equal to the number of parameters in the method descriptor - which
+ // includes the synthetic parameters added by the compiler). This work-
+ // around supposes that the synthetic parameters are the first ones.
+ int synthetics = Type.getArgumentTypes(desc).length - n;
+ AnnotationVisitor av;
+ for (i = 0; i < synthetics; ++i) {
+ // virtual annotation to detect synthetic parameters in MethodWriter
+ av = mv.visitParameterAnnotation(i, "Ljava/lang/Synthetic;", false);
+ if (av != null) {
+ av.visitEnd();
+ }
+ }
+ for (; i < n + synthetics; ++i) {
+ int j = readUnsignedShort(v);
+ v += 2;
+ for (; j > 0; --j) {
+ av = mv.visitParameterAnnotation(i, readUTF8(v, buf), visible);
+ v = readAnnotationValues(v + 2, buf, true, av);
+ }
+ }
+ }
+
+ /**
+ * Reads the values of an annotation and makes the given visitor visit them.
+ *
+ * @param v the start offset in {@link #b b} of the values to be read
+ * (including the unsigned short that gives the number of values).
+ * @param buf buffer to be used to call {@link #readUTF8 readUTF8},
+ * {@link #readClass(int,char[]) readClass} or
+ * {@link #readConst readConst}.
+ * @param named if the annotation values are named or not.
+ * @param av the visitor that must visit the values.
+ * @return the end offset of the annotation values.
+ */
+ private int readAnnotationValues(
+ int v,
+ final char[] buf,
+ final boolean named,
+ final AnnotationVisitor av)
+ {
+ int i = readUnsignedShort(v);
+ v += 2;
+ if (named) {
+ for (; i > 0; --i) {
+ v = readAnnotationValue(v + 2, buf, readUTF8(v, buf), av);
+ }
+ } else {
+ for (; i > 0; --i) {
+ v = readAnnotationValue(v, buf, null, av);
+ }
+ }
+ if (av != null) {
+ av.visitEnd();
+ }
+ return v;
+ }
+
+ /**
+ * Reads a value of an annotation and makes the given visitor visit it.
+ *
+ * @param v the start offset in {@link #b b} of the value to be read (<i>not
+ * including the value name constant pool index</i>).
+ * @param buf buffer to be used to call {@link #readUTF8 readUTF8},
+ * {@link #readClass(int,char[]) readClass} or
+ * {@link #readConst readConst}.
+ * @param name the name of the value to be read.
+ * @param av the visitor that must visit the value.
+ * @return the end offset of the annotation value.
+ */
+ private int readAnnotationValue(
+ int v,
+ final char[] buf,
+ final String name,
+ final AnnotationVisitor av)
+ {
+ int i;
+ if (av == null) {
+ switch (b[v] & 0xFF) {
+ case 'e': // enum_const_value
+ return v + 5;
+ case '@': // annotation_value
+ return readAnnotationValues(v + 3, buf, true, null);
+ case '[': // array_value
+ return readAnnotationValues(v + 1, buf, false, null);
+ default:
+ return v + 3;
+ }
+ }
+ switch (b[v++] & 0xFF) {
+ case 'I': // pointer to CONSTANT_Integer
+ case 'J': // pointer to CONSTANT_Long
+ case 'F': // pointer to CONSTANT_Float
+ case 'D': // pointer to CONSTANT_Double
+ av.visit(name, readConst(readUnsignedShort(v), buf));
+ v += 2;
+ break;
+ case 'B': // pointer to CONSTANT_Byte
+ av.visit(name,
+ new Byte((byte) readInt(items[readUnsignedShort(v)])));
+ v += 2;
+ break;
+ case 'Z': // pointer to CONSTANT_Boolean
+ av.visit(name, readInt(items[readUnsignedShort(v)]) == 0
+ ? Boolean.FALSE
+ : Boolean.TRUE);
+ v += 2;
+ break;
+ case 'S': // pointer to CONSTANT_Short
+ av.visit(name,
+ new Short((short) readInt(items[readUnsignedShort(v)])));
+ v += 2;
+ break;
+ case 'C': // pointer to CONSTANT_Char
+ av.visit(name,
+ new Character((char) readInt(items[readUnsignedShort(v)])));
+ v += 2;
+ break;
+ case 's': // pointer to CONSTANT_Utf8
+ av.visit(name, readUTF8(v, buf));
+ v += 2;
+ break;
+ case 'e': // enum_const_value
+ av.visitEnum(name, readUTF8(v, buf), readUTF8(v + 2, buf));
+ v += 4;
+ break;
+ case 'c': // class_info
+ av.visit(name, Type.getType(readUTF8(v, buf)));
+ v += 2;
+ break;
+ case '@': // annotation_value
+ v = readAnnotationValues(v + 2,
+ buf,
+ true,
+ av.visitAnnotation(name, readUTF8(v, buf)));
+ break;
+ case '[': // array_value
+ int size = readUnsignedShort(v);
+ v += 2;
+ if (size == 0) {
+ return readAnnotationValues(v - 2,
+ buf,
+ false,
+ av.visitArray(name));
+ }
+ switch (this.b[v++] & 0xFF) {
+ case 'B':
+ byte[] bv = new byte[size];
+ for (i = 0; i < size; i++) {
+ bv[i] = (byte) readInt(items[readUnsignedShort(v)]);
+ v += 3;
+ }
+ av.visit(name, bv);
+ --v;
+ break;
+ case 'Z':
+ boolean[] zv = new boolean[size];
+ for (i = 0; i < size; i++) {
+ zv[i] = readInt(items[readUnsignedShort(v)]) != 0;
+ v += 3;
+ }
+ av.visit(name, zv);
+ --v;
+ break;
+ case 'S':
+ short[] sv = new short[size];
+ for (i = 0; i < size; i++) {
+ sv[i] = (short) readInt(items[readUnsignedShort(v)]);
+ v += 3;
+ }
+ av.visit(name, sv);
+ --v;
+ break;
+ case 'C':
+ char[] cv = new char[size];
+ for (i = 0; i < size; i++) {
+ cv[i] = (char) readInt(items[readUnsignedShort(v)]);
+ v += 3;
+ }
+ av.visit(name, cv);
+ --v;
+ break;
+ case 'I':
+ int[] iv = new int[size];
+ for (i = 0; i < size; i++) {
+ iv[i] = readInt(items[readUnsignedShort(v)]);
+ v += 3;
+ }
+ av.visit(name, iv);
+ --v;
+ break;
+ case 'J':
+ long[] lv = new long[size];
+ for (i = 0; i < size; i++) {
+ lv[i] = readLong(items[readUnsignedShort(v)]);
+ v += 3;
+ }
+ av.visit(name, lv);
+ --v;
+ break;
+ case 'F':
+ float[] fv = new float[size];
+ for (i = 0; i < size; i++) {
+ fv[i] = Float.intBitsToFloat(readInt(items[readUnsignedShort(v)]));
+ v += 3;
+ }
+ av.visit(name, fv);
+ --v;
+ break;
+ case 'D':
+ double[] dv = new double[size];
+ for (i = 0; i < size; i++) {
+ dv[i] = Double.longBitsToDouble(readLong(items[readUnsignedShort(v)]));
+ v += 3;
+ }
+ av.visit(name, dv);
+ --v;
+ break;
+ default:
+ v = readAnnotationValues(v - 3,
+ buf,
+ false,
+ av.visitArray(name));
+ }
+ }
+ return v;
+ }
+
+ private int readFrameType(
+ final Object[] frame,
+ final int index,
+ int v,
+ final char[] buf,
+ final Label[] labels)
+ {
+ int type = b[v++] & 0xFF;
+ switch (type) {
+ case 0:
+ frame[index] = Opcodes.TOP;
+ break;
+ case 1:
+ frame[index] = Opcodes.INTEGER;
+ break;
+ case 2:
+ frame[index] = Opcodes.FLOAT;
+ break;
+ case 3:
+ frame[index] = Opcodes.DOUBLE;
+ break;
+ case 4:
+ frame[index] = Opcodes.LONG;
+ break;
+ case 5:
+ frame[index] = Opcodes.NULL;
+ break;
+ case 6:
+ frame[index] = Opcodes.UNINITIALIZED_THIS;
+ break;
+ case 7: // Object
+ frame[index] = readClass(v, buf);
+ v += 2;
+ break;
+ default: // Uninitialized
+ frame[index] = readLabel(readUnsignedShort(v), labels);
+ v += 2;
+ }
+ return v;
+ }
+
+ /**
+ * Returns the label corresponding to the given offset. The default
+ * implementation of this method creates a label for the given offset if it
+ * has not been already created.
+ *
+ * @param offset a bytecode offset in a method.
+ * @param labels the already created labels, indexed by their offset. If a
+ * label already exists for offset this method must not create a new
+ * one. Otherwise it must store the new label in this array.
+ * @return a non null Label, which must be equal to labels[offset].
+ */
+ protected Label readLabel(int offset, Label[] labels) {
+ if (labels[offset] == null) {
+ labels[offset] = new Label();
+ }
+ return labels[offset];
+ }
+
+ /**
+ * Reads an attribute in {@link #b b}.
+ *
+ * @param attrs prototypes of the attributes that must be parsed during the
+ * visit of the class. Any attribute whose type is not equal to the
+ * type of one the prototypes is ignored (i.e. an empty
+ * {@link Attribute} instance is returned).
+ * @param type the type of the attribute.
+ * @param off index of the first byte of the attribute's content in
+ * {@link #b b}. The 6 attribute header bytes, containing the type
+ * and the length of the attribute, are not taken into account here
+ * (they have already been read).
+ * @param len the length of the attribute's content.
+ * @param buf buffer to be used to call {@link #readUTF8 readUTF8},
+ * {@link #readClass(int,char[]) readClass} or
+ * {@link #readConst readConst}.
+ * @param codeOff index of the first byte of code's attribute content in
+ * {@link #b b}, or -1 if the attribute to be read is not a code
+ * attribute. The 6 attribute header bytes, containing the type and
+ * the length of the attribute, are not taken into account here.
+ * @param labels the labels of the method's code, or <tt>null</tt> if the
+ * attribute to be read is not a code attribute.
+ * @return the attribute that has been read, or <tt>null</tt> to skip this
+ * attribute.
+ */
+ private Attribute readAttribute(
+ final Attribute[] attrs,
+ final String type,
+ final int off,
+ final int len,
+ final char[] buf,
+ final int codeOff,
+ final Label[] labels)
+ {
+ for (int i = 0; i < attrs.length; ++i) {
+ if (attrs[i].type.equals(type)) {
+ return attrs[i].read(this, off, len, buf, codeOff, labels);
+ }
+ }
+ return new Attribute(type).read(this, off, len, null, -1, null);
+ }
+
+ // ------------------------------------------------------------------------
+ // Utility methods: low level parsing
+ // ------------------------------------------------------------------------
+
+ /**
+ * Returns the number of constant pool items in {@link #b b}.
+ *
+ * @return the number of constant pool items in {@link #b b}.
+ */
+ public int getItemCount() {
+ return items.length;
+ }
+
+ /**
+ * Returns the start index of the constant pool item in {@link #b b}, plus
+ * one. <i>This method is intended for {@link Attribute} sub classes, and is
+ * normally not needed by class generators or adapters.</i>
+ *
+ * @param item the index a constant pool item.
+ * @return the start index of the constant pool item in {@link #b b}, plus
+ * one.
+ */
+ public int getItem(final int item) {
+ return items[item];
+ }
+
+ /**
+ * Returns the maximum length of the strings contained in the constant pool
+ * of the class.
+ *
+ * @return the maximum length of the strings contained in the constant pool
+ * of the class.
+ */
+ public int getMaxStringLength() {
+ return maxStringLength;
+ }
+
+ /**
+ * Reads a byte value in {@link #b b}. <i>This method is intended for
+ * {@link Attribute} sub classes, and is normally not needed by class
+ * generators or adapters.</i>
+ *
+ * @param index the start index of the value to be read in {@link #b b}.
+ * @return the read value.
+ */
+ public int readByte(final int index) {
+ return b[index] & 0xFF;
+ }
+
+ /**
+ * Reads an unsigned short value in {@link #b b}. <i>This method is
+ * intended for {@link Attribute} sub classes, and is normally not needed by
+ * class generators or adapters.</i>
+ *
+ * @param index the start index of the value to be read in {@link #b b}.
+ * @return the read value.
+ */
+ public int readUnsignedShort(final int index) {
+ byte[] b = this.b;
+ return ((b[index] & 0xFF) << 8) | (b[index + 1] & 0xFF);
+ }
+
+ /**
+ * Reads a signed short value in {@link #b b}. <i>This method is intended
+ * for {@link Attribute} sub classes, and is normally not needed by class
+ * generators or adapters.</i>
+ *
+ * @param index the start index of the value to be read in {@link #b b}.
+ * @return the read value.
+ */
+ public short readShort(final int index) {
+ byte[] b = this.b;
+ return (short) (((b[index] & 0xFF) << 8) | (b[index + 1] & 0xFF));
+ }
+
+ /**
+ * Reads a signed int value in {@link #b b}. <i>This method is intended for
+ * {@link Attribute} sub classes, and is normally not needed by class
+ * generators or adapters.</i>
+ *
+ * @param index the start index of the value to be read in {@link #b b}.
+ * @return the read value.
+ */
+ public int readInt(final int index) {
+ byte[] b = this.b;
+ return ((b[index] & 0xFF) << 24) | ((b[index + 1] & 0xFF) << 16)
+ | ((b[index + 2] & 0xFF) << 8) | (b[index + 3] & 0xFF);
+ }
+
+ /**
+ * Reads a signed long value in {@link #b b}. <i>This method is intended
+ * for {@link Attribute} sub classes, and is normally not needed by class
+ * generators or adapters.</i>
+ *
+ * @param index the start index of the value to be read in {@link #b b}.
+ * @return the read value.
+ */
+ public long readLong(final int index) {
+ long l1 = readInt(index);
+ long l0 = readInt(index + 4) & 0xFFFFFFFFL;
+ return (l1 << 32) | l0;
+ }
+
+ /**
+ * Reads an UTF8 string constant pool item in {@link #b b}. <i>This method
+ * is intended for {@link Attribute} sub classes, and is normally not needed
+ * by class generators or adapters.</i>
+ *
+ * @param index the start index of an unsigned short value in {@link #b b},
+ * whose value is the index of an UTF8 constant pool item.
+ * @param buf buffer to be used to read the item. This buffer must be
+ * sufficiently large. It is not automatically resized.
+ * @return the String corresponding to the specified UTF8 item.
+ */
+ public String readUTF8(int index, final char[] buf) {
+ int item = readUnsignedShort(index);
+ String s = strings[item];
+ if (s != null) {
+ return s;
+ }
+ index = items[item];
+ return strings[item] = readUTF(index + 2, readUnsignedShort(index), buf);
+ }
+
+ /**
+ * Reads UTF8 string in {@link #b b}.
+ *
+ * @param index start offset of the UTF8 string to be read.
+ * @param utfLen length of the UTF8 string to be read.
+ * @param buf buffer to be used to read the string. This buffer must be
+ * sufficiently large. It is not automatically resized.
+ * @return the String corresponding to the specified UTF8 string.
+ */
+ private String readUTF(int index, final int utfLen, final char[] buf) {
+ int endIndex = index + utfLen;
+ byte[] b = this.b;
+ int strLen = 0;
+ int c;
+ int st = 0;
+ char cc = 0;
+ while (index < endIndex) {
+ c = b[index++];
+ switch (st) {
+ case 0:
+ c = c & 0xFF;
+ if (c < 0x80) { // 0xxxxxxx
+ buf[strLen++] = (char) c;
+ } else if (c < 0xE0 && c > 0xBF) { // 110x xxxx 10xx xxxx
+ cc = (char) (c & 0x1F);
+ st = 1;
+ } else { // 1110 xxxx 10xx xxxx 10xx xxxx
+ cc = (char) (c & 0x0F);
+ st = 2;
+ }
+ break;
+
+ case 1: // byte 2 of 2-byte char or byte 3 of 3-byte char
+ buf[strLen++] = (char) ((cc << 6) | (c & 0x3F));
+ st = 0;
+ break;
+
+ case 2: // byte 2 of 3-byte char
+ cc = (char) ((cc << 6) | (c & 0x3F));
+ st = 1;
+ break;
+ }
+ }
+ return new String(buf, 0, strLen);
+ }
+
+ /**
+ * Reads a class constant pool item in {@link #b b}. <i>This method is
+ * intended for {@link Attribute} sub classes, and is normally not needed by
+ * class generators or adapters.</i>
+ *
+ * @param index the start index of an unsigned short value in {@link #b b},
+ * whose value is the index of a class constant pool item.
+ * @param buf buffer to be used to read the item. This buffer must be
+ * sufficiently large. It is not automatically resized.
+ * @return the String corresponding to the specified class item.
+ */
+ public String readClass(final int index, final char[] buf) {
+ // computes the start index of the CONSTANT_Class item in b
+ // and reads the CONSTANT_Utf8 item designated by
+ // the first two bytes of this CONSTANT_Class item
+ return readUTF8(items[readUnsignedShort(index)], buf);
+ }
+
+ /**
+ * Reads a numeric or string constant pool item in {@link #b b}. <i>This
+ * method is intended for {@link Attribute} sub classes, and is normally not
+ * needed by class generators or adapters.</i>
+ *
+ * @param item the index of a constant pool item.
+ * @param buf buffer to be used to read the item. This buffer must be
+ * sufficiently large. It is not automatically resized.
+ * @return the {@link Integer}, {@link Float}, {@link Long}, {@link Double},
+ * {@link String}, {@link Type} or {@link Handle} corresponding to
+ * the given constant pool item.
+ */
+ public Object readConst(final int item, final char[] buf) {
+ int index = items[item];
+ switch (b[index - 1]) {
+ case ClassWriter.INT:
+ return new Integer(readInt(index));
+ case ClassWriter.FLOAT:
+ return new Float(Float.intBitsToFloat(readInt(index)));
+ case ClassWriter.LONG:
+ return new Long(readLong(index));
+ case ClassWriter.DOUBLE:
+ return new Double(Double.longBitsToDouble(readLong(index)));
+ case ClassWriter.CLASS:
+ return Type.getObjectType(readUTF8(index, buf));
+ case ClassWriter.STR:
+ return readUTF8(index, buf);
+ case ClassWriter.MTYPE:
+ return Type.getMethodType(readUTF8(index, buf));
+
+ //case ClassWriter.HANDLE_BASE + [1..9]:
+ default: {
+ int tag = readByte(index);
+ int[] items = this.items;
+ int cpIndex = items[readUnsignedShort(index + 1)];
+ String owner = readClass(cpIndex, buf);
+ cpIndex = items[readUnsignedShort(cpIndex + 2)];
+ String name = readUTF8(cpIndex, buf);
+ String desc = readUTF8(cpIndex + 2, buf);
+ return new Handle(tag, owner, name, desc);
+ }
+ }
+ }
+}
diff --git a/src/asm/scala/tools/asm/ClassVisitor.java b/src/asm/scala/tools/asm/ClassVisitor.java
new file mode 100644
index 0000000..ae38ae0
--- /dev/null
+++ b/src/asm/scala/tools/asm/ClassVisitor.java
@@ -0,0 +1,277 @@
+/***
+ * ASM: a very small and fast Java bytecode manipulation framework
+ * Copyright (c) 2000-2011 INRIA, France Telecom
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ * 3. Neither the name of the copyright holders nor the names of its
+ * contributors may be used to endorse or promote products derived from
+ * this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+ * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
+ * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+ * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+ * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
+ * THE POSSIBILITY OF SUCH DAMAGE.
+ */
+package scala.tools.asm;
+
+/**
+ * A visitor to visit a Java class. The methods of this class must be called
+ * in the following order: <tt>visit</tt> [ <tt>visitSource</tt> ] [
+ * <tt>visitOuterClass</tt> ] ( <tt>visitAnnotation</tt> |
+ * <tt>visitAttribute</tt> )* ( <tt>visitInnerClass</tt> |
+ * <tt>visitField</tt> | <tt>visitMethod</tt> )* <tt>visitEnd</tt>.
+ *
+ * @author Eric Bruneton
+ */
+public abstract class ClassVisitor {
+
+ /**
+ * The ASM API version implemented by this visitor. The value of this field
+ * must be one of {@link Opcodes#ASM4}.
+ */
+ protected final int api;
+
+ /**
+ * The class visitor to which this visitor must delegate method calls. May
+ * be null.
+ */
+ protected ClassVisitor cv;
+
+ /**
+ * Constructs a new {@link ClassVisitor}.
+ *
+ * @param api the ASM API version implemented by this visitor. Must be one
+ * of {@link Opcodes#ASM4}.
+ */
+ public ClassVisitor(final int api) {
+ this(api, null);
+ }
+
+ /**
+ * Constructs a new {@link ClassVisitor}.
+ *
+ * @param api the ASM API version implemented by this visitor. Must be one
+ * of {@link Opcodes#ASM4}.
+ * @param cv the class visitor to which this visitor must delegate method
+ * calls. May be null.
+ */
+ public ClassVisitor(final int api, final ClassVisitor cv) {
+ /*if (api != Opcodes.ASM4) {
+ throw new IllegalArgumentException();
+ }*/
+ this.api = api;
+ this.cv = cv;
+ }
+
+ /**
+ * Visits the header of the class.
+ *
+ * @param version the class version.
+ * @param access the class's access flags (see {@link Opcodes}). This
+ * parameter also indicates if the class is deprecated.
+ * @param name the internal name of the class (see
+ * {@link Type#getInternalName() getInternalName}).
+ * @param signature the signature of this class. May be <tt>null</tt> if
+ * the class is not a generic one, and does not extend or implement
+ * generic classes or interfaces.
+ * @param superName the internal of name of the super class (see
+ * {@link Type#getInternalName() getInternalName}). For interfaces,
+ * the super class is {@link Object}. May be <tt>null</tt>, but
+ * only for the {@link Object} class.
+ * @param interfaces the internal names of the class's interfaces (see
+ * {@link Type#getInternalName() getInternalName}). May be
+ * <tt>null</tt>.
+ */
+ public void visit(
+ int version,
+ int access,
+ String name,
+ String signature,
+ String superName,
+ String[] interfaces)
+ {
+ if (cv != null) {
+ cv.visit(version, access, name, signature, superName, interfaces);
+ }
+ }
+
+ /**
+ * Visits the source of the class.
+ *
+ * @param source the name of the source file from which the class was
+ * compiled. May be <tt>null</tt>.
+ * @param debug additional debug information to compute the correspondance
+ * between source and compiled elements of the class. May be
+ * <tt>null</tt>.
+ */
+ public void visitSource(String source, String debug) {
+ if (cv != null) {
+ cv.visitSource(source, debug);
+ }
+ }
+
+ /**
+ * Visits the enclosing class of the class. This method must be called only
+ * if the class has an enclosing class.
+ *
+ * @param owner internal name of the enclosing class of the class.
+ * @param name the name of the method that contains the class, or
+ * <tt>null</tt> if the class is not enclosed in a method of its
+ * enclosing class.
+ * @param desc the descriptor of the method that contains the class, or
+ * <tt>null</tt> if the class is not enclosed in a method of its
+ * enclosing class.
+ */
+ public void visitOuterClass(String owner, String name, String desc) {
+ if (cv != null) {
+ cv.visitOuterClass(owner, name, desc);
+ }
+ }
+
+ /**
+ * Visits an annotation of the class.
+ *
+ * @param desc the class descriptor of the annotation class.
+ * @param visible <tt>true</tt> if the annotation is visible at runtime.
+ * @return a visitor to visit the annotation values, or <tt>null</tt> if
+ * this visitor is not interested in visiting this annotation.
+ */
+ public AnnotationVisitor visitAnnotation(String desc, boolean visible) {
+ if (cv != null) {
+ return cv.visitAnnotation(desc, visible);
+ }
+ return null;
+ }
+
+ /**
+ * Visits a non standard attribute of the class.
+ *
+ * @param attr an attribute.
+ */
+ public void visitAttribute(Attribute attr) {
+ if (cv != null) {
+ cv.visitAttribute(attr);
+ }
+ }
+
+ /**
+ * Visits information about an inner class. This inner class is not
+ * necessarily a member of the class being visited.
+ *
+ * @param name the internal name of an inner class (see
+ * {@link Type#getInternalName() getInternalName}).
+ * @param outerName the internal name of the class to which the inner class
+ * belongs (see {@link Type#getInternalName() getInternalName}). May
+ * be <tt>null</tt> for not member classes.
+ * @param innerName the (simple) name of the inner class inside its
+ * enclosing class. May be <tt>null</tt> for anonymous inner
+ * classes.
+ * @param access the access flags of the inner class as originally declared
+ * in the enclosing class.
+ */
+ public void visitInnerClass(
+ String name,
+ String outerName,
+ String innerName,
+ int access)
+ {
+ if (cv != null) {
+ cv.visitInnerClass(name, outerName, innerName, access);
+ }
+ }
+
+ /**
+ * Visits a field of the class.
+ *
+ * @param access the field's access flags (see {@link Opcodes}). This
+ * parameter also indicates if the field is synthetic and/or
+ * deprecated.
+ * @param name the field's name.
+ * @param desc the field's descriptor (see {@link Type Type}).
+ * @param signature the field's signature. May be <tt>null</tt> if the
+ * field's type does not use generic types.
+ * @param value the field's initial value. This parameter, which may be
+ * <tt>null</tt> if the field does not have an initial value, must
+ * be an {@link Integer}, a {@link Float}, a {@link Long}, a
+ * {@link Double} or a {@link String} (for <tt>int</tt>,
+ * <tt>float</tt>, <tt>long</tt> or <tt>String</tt> fields
+ * respectively). <i>This parameter is only used for static fields</i>.
+ * Its value is ignored for non static fields, which must be
+ * initialized through bytecode instructions in constructors or
+ * methods.
+ * @return a visitor to visit field annotations and attributes, or
+ * <tt>null</tt> if this class visitor is not interested in
+ * visiting these annotations and attributes.
+ */
+ public FieldVisitor visitField(
+ int access,
+ String name,
+ String desc,
+ String signature,
+ Object value)
+ {
+ if (cv != null) {
+ return cv.visitField(access, name, desc, signature, value);
+ }
+ return null;
+ }
+
+ /**
+ * Visits a method of the class. This method <i>must</i> return a new
+ * {@link MethodVisitor} instance (or <tt>null</tt>) each time it is
+ * called, i.e., it should not return a previously returned visitor.
+ *
+ * @param access the method's access flags (see {@link Opcodes}). This
+ * parameter also indicates if the method is synthetic and/or
+ * deprecated.
+ * @param name the method's name.
+ * @param desc the method's descriptor (see {@link Type Type}).
+ * @param signature the method's signature. May be <tt>null</tt> if the
+ * method parameters, return type and exceptions do not use generic
+ * types.
+ * @param exceptions the internal names of the method's exception classes
+ * (see {@link Type#getInternalName() getInternalName}). May be
+ * <tt>null</tt>.
+ * @return an object to visit the byte code of the method, or <tt>null</tt>
+ * if this class visitor is not interested in visiting the code of
+ * this method.
+ */
+ public MethodVisitor visitMethod(
+ int access,
+ String name,
+ String desc,
+ String signature,
+ String[] exceptions)
+ {
+ if (cv != null) {
+ return cv.visitMethod(access, name, desc, signature, exceptions);
+ }
+ return null;
+ }
+
+ /**
+ * Visits the end of the class. This method, which is the last one to be
+ * called, is used to inform the visitor that all the fields and methods of
+ * the class have been visited.
+ */
+ public void visitEnd() {
+ if (cv != null) {
+ cv.visitEnd();
+ }
+ }
+}
diff --git a/src/asm/scala/tools/asm/ClassWriter.java b/src/asm/scala/tools/asm/ClassWriter.java
new file mode 100644
index 0000000..c7a0736
--- /dev/null
+++ b/src/asm/scala/tools/asm/ClassWriter.java
@@ -0,0 +1,1672 @@
+/***
+ * ASM: a very small and fast Java bytecode manipulation framework
+ * Copyright (c) 2000-2011 INRIA, France Telecom
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ * 3. Neither the name of the copyright holders nor the names of its
+ * contributors may be used to endorse or promote products derived from
+ * this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+ * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
+ * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+ * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+ * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
+ * THE POSSIBILITY OF SUCH DAMAGE.
+ */
+package scala.tools.asm;
+
+/**
+ * A {@link ClassVisitor} that generates classes in bytecode form. More
+ * precisely this visitor generates a byte array conforming to the Java class
+ * file format. It can be used alone, to generate a Java class "from scratch",
+ * or with one or more {@link ClassReader ClassReader} and adapter class visitor
+ * to generate a modified class from one or more existing Java classes.
+ *
+ * @author Eric Bruneton
+ */
+public class ClassWriter extends ClassVisitor {
+
+ /**
+ * Flag to automatically compute the maximum stack size and the maximum
+ * number of local variables of methods. If this flag is set, then the
+ * arguments of the {@link MethodVisitor#visitMaxs visitMaxs} method of the
+ * {@link MethodVisitor} returned by the {@link #visitMethod visitMethod}
+ * method will be ignored, and computed automatically from the signature and
+ * the bytecode of each method.
+ *
+ * @see #ClassWriter(int)
+ */
+ public static final int COMPUTE_MAXS = 1;
+
+ /**
+ * Flag to automatically compute the stack map frames of methods from
+ * scratch. If this flag is set, then the calls to the
+ * {@link MethodVisitor#visitFrame} method are ignored, and the stack map
+ * frames are recomputed from the methods bytecode. The arguments of the
+ * {@link MethodVisitor#visitMaxs visitMaxs} method are also ignored and
+ * recomputed from the bytecode. In other words, computeFrames implies
+ * computeMaxs.
+ *
+ * @see #ClassWriter(int)
+ */
+ public static final int COMPUTE_FRAMES = 2;
+
+ /**
+ * Pseudo access flag to distinguish between the synthetic attribute and
+ * the synthetic access flag.
+ */
+ static final int ACC_SYNTHETIC_ATTRIBUTE = 0x40000;
+
+ /**
+ * The type of instructions without any argument.
+ */
+ static final int NOARG_INSN = 0;
+
+ /**
+ * The type of instructions with an signed byte argument.
+ */
+ static final int SBYTE_INSN = 1;
+
+ /**
+ * The type of instructions with an signed short argument.
+ */
+ static final int SHORT_INSN = 2;
+
+ /**
+ * The type of instructions with a local variable index argument.
+ */
+ static final int VAR_INSN = 3;
+
+ /**
+ * The type of instructions with an implicit local variable index argument.
+ */
+ static final int IMPLVAR_INSN = 4;
+
+ /**
+ * The type of instructions with a type descriptor argument.
+ */
+ static final int TYPE_INSN = 5;
+
+ /**
+ * The type of field and method invocations instructions.
+ */
+ static final int FIELDORMETH_INSN = 6;
+
+ /**
+ * The type of the INVOKEINTERFACE/INVOKEDYNAMIC instruction.
+ */
+ static final int ITFMETH_INSN = 7;
+
+ /**
+ * The type of the INVOKEDYNAMIC instruction.
+ */
+ static final int INDYMETH_INSN = 8;
+
+ /**
+ * The type of instructions with a 2 bytes bytecode offset label.
+ */
+ static final int LABEL_INSN = 9;
+
+ /**
+ * The type of instructions with a 4 bytes bytecode offset label.
+ */
+ static final int LABELW_INSN = 10;
+
+ /**
+ * The type of the LDC instruction.
+ */
+ static final int LDC_INSN = 11;
+
+ /**
+ * The type of the LDC_W and LDC2_W instructions.
+ */
+ static final int LDCW_INSN = 12;
+
+ /**
+ * The type of the IINC instruction.
+ */
+ static final int IINC_INSN = 13;
+
+ /**
+ * The type of the TABLESWITCH instruction.
+ */
+ static final int TABL_INSN = 14;
+
+ /**
+ * The type of the LOOKUPSWITCH instruction.
+ */
+ static final int LOOK_INSN = 15;
+
+ /**
+ * The type of the MULTIANEWARRAY instruction.
+ */
+ static final int MANA_INSN = 16;
+
+ /**
+ * The type of the WIDE instruction.
+ */
+ static final int WIDE_INSN = 17;
+
+ /**
+ * The instruction types of all JVM opcodes.
+ */
+ static final byte[] TYPE;
+
+ /**
+ * The type of CONSTANT_Class constant pool items.
+ */
+ static final int CLASS = 7;
+
+ /**
+ * The type of CONSTANT_Fieldref constant pool items.
+ */
+ static final int FIELD = 9;
+
+ /**
+ * The type of CONSTANT_Methodref constant pool items.
+ */
+ static final int METH = 10;
+
+ /**
+ * The type of CONSTANT_InterfaceMethodref constant pool items.
+ */
+ static final int IMETH = 11;
+
+ /**
+ * The type of CONSTANT_String constant pool items.
+ */
+ static final int STR = 8;
+
+ /**
+ * The type of CONSTANT_Integer constant pool items.
+ */
+ static final int INT = 3;
+
+ /**
+ * The type of CONSTANT_Float constant pool items.
+ */
+ static final int FLOAT = 4;
+
+ /**
+ * The type of CONSTANT_Long constant pool items.
+ */
+ static final int LONG = 5;
+
+ /**
+ * The type of CONSTANT_Double constant pool items.
+ */
+ static final int DOUBLE = 6;
+
+ /**
+ * The type of CONSTANT_NameAndType constant pool items.
+ */
+ static final int NAME_TYPE = 12;
+
+ /**
+ * The type of CONSTANT_Utf8 constant pool items.
+ */
+ static final int UTF8 = 1;
+
+ /**
+ * The type of CONSTANT_MethodType constant pool items.
+ */
+ static final int MTYPE = 16;
+
+ /**
+ * The type of CONSTANT_MethodHandle constant pool items.
+ */
+ static final int HANDLE = 15;
+
+ /**
+ * The type of CONSTANT_InvokeDynamic constant pool items.
+ */
+ static final int INDY = 18;
+
+ /**
+ * The base value for all CONSTANT_MethodHandle constant pool items.
+ * Internally, ASM store the 9 variations of CONSTANT_MethodHandle into
+ * 9 different items.
+ */
+ static final int HANDLE_BASE = 20;
+
+ /**
+ * Normal type Item stored in the ClassWriter {@link ClassWriter#typeTable},
+ * instead of the constant pool, in order to avoid clashes with normal
+ * constant pool items in the ClassWriter constant pool's hash table.
+ */
+ static final int TYPE_NORMAL = 30;
+
+ /**
+ * Uninitialized type Item stored in the ClassWriter
+ * {@link ClassWriter#typeTable}, instead of the constant pool, in order to
+ * avoid clashes with normal constant pool items in the ClassWriter constant
+ * pool's hash table.
+ */
+ static final int TYPE_UNINIT = 31;
+
+ /**
+ * Merged type Item stored in the ClassWriter {@link ClassWriter#typeTable},
+ * instead of the constant pool, in order to avoid clashes with normal
+ * constant pool items in the ClassWriter constant pool's hash table.
+ */
+ static final int TYPE_MERGED = 32;
+
+ /**
+ * The type of BootstrapMethods items. These items are stored in a
+ * special class attribute named BootstrapMethods and
+ * not in the constant pool.
+ */
+ static final int BSM = 33;
+
+ /**
+ * The class reader from which this class writer was constructed, if any.
+ */
+ ClassReader cr;
+
+ /**
+ * Minor and major version numbers of the class to be generated.
+ */
+ int version;
+
+ /**
+ * Index of the next item to be added in the constant pool.
+ */
+ int index;
+
+ /**
+ * The constant pool of this class.
+ */
+ final ByteVector pool;
+
+ /**
+ * The constant pool's hash table data.
+ */
+ Item[] items;
+
+ /**
+ * The threshold of the constant pool's hash table.
+ */
+ int threshold;
+
+ /**
+ * A reusable key used to look for items in the {@link #items} hash table.
+ */
+ final Item key;
+
+ /**
+ * A reusable key used to look for items in the {@link #items} hash table.
+ */
+ final Item key2;
+
+ /**
+ * A reusable key used to look for items in the {@link #items} hash table.
+ */
+ final Item key3;
+
+ /**
+ * A reusable key used to look for items in the {@link #items} hash table.
+ */
+ final Item key4;
+
+ /**
+ * A type table used to temporarily store internal names that will not
+ * necessarily be stored in the constant pool. This type table is used by
+ * the control flow and data flow analysis algorithm used to compute stack
+ * map frames from scratch. This array associates to each index <tt>i</tt>
+ * the Item whose index is <tt>i</tt>. All Item objects stored in this
+ * array are also stored in the {@link #items} hash table. These two arrays
+ * allow to retrieve an Item from its index or, conversely, to get the index
+ * of an Item from its value. Each Item stores an internal name in its
+ * {@link Item#strVal1} field.
+ */
+ Item[] typeTable;
+
+ /**
+ * Number of elements in the {@link #typeTable} array.
+ */
+ private short typeCount;
+
+ /**
+ * The access flags of this class.
+ */
+ private int access;
+
+ /**
+ * The constant pool item that contains the internal name of this class.
+ */
+ private int name;
+
+ /**
+ * The internal name of this class.
+ */
+ String thisName;
+
+ /**
+ * The constant pool item that contains the signature of this class.
+ */
+ private int signature;
+
+ /**
+ * The constant pool item that contains the internal name of the super class
+ * of this class.
+ */
+ private int superName;
+
+ /**
+ * Number of interfaces implemented or extended by this class or interface.
+ */
+ private int interfaceCount;
+
+ /**
+ * The interfaces implemented or extended by this class or interface. More
+ * precisely, this array contains the indexes of the constant pool items
+ * that contain the internal names of these interfaces.
+ */
+ private int[] interfaces;
+
+ /**
+ * The index of the constant pool item that contains the name of the source
+ * file from which this class was compiled.
+ */
+ private int sourceFile;
+
+ /**
+ * The SourceDebug attribute of this class.
+ */
+ private ByteVector sourceDebug;
+
+ /**
+ * The constant pool item that contains the name of the enclosing class of
+ * this class.
+ */
+ private int enclosingMethodOwner;
+
+ /**
+ * The constant pool item that contains the name and descriptor of the
+ * enclosing method of this class.
+ */
+ private int enclosingMethod;
+
+ /**
+ * The runtime visible annotations of this class.
+ */
+ private AnnotationWriter anns;
+
+ /**
+ * The runtime invisible annotations of this class.
+ */
+ private AnnotationWriter ianns;
+
+ /**
+ * The non standard attributes of this class.
+ */
+ private Attribute attrs;
+
+ /**
+ * The number of entries in the InnerClasses attribute.
+ */
+ private int innerClassesCount;
+
+ /**
+ * The InnerClasses attribute.
+ */
+ private ByteVector innerClasses;
+
+ /**
+ * The number of entries in the BootstrapMethods attribute.
+ */
+ int bootstrapMethodsCount;
+
+ /**
+ * The BootstrapMethods attribute.
+ */
+ ByteVector bootstrapMethods;
+
+ /**
+ * The fields of this class. These fields are stored in a linked list of
+ * {@link FieldWriter} objects, linked to each other by their
+ * {@link FieldWriter#fv} field. This field stores the first element of
+ * this list.
+ */
+ FieldWriter firstField;
+
+ /**
+ * The fields of this class. These fields are stored in a linked list of
+ * {@link FieldWriter} objects, linked to each other by their
+ * {@link FieldWriter#fv} field. This field stores the last element of
+ * this list.
+ */
+ FieldWriter lastField;
+
+ /**
+ * The methods of this class. These methods are stored in a linked list of
+ * {@link MethodWriter} objects, linked to each other by their
+ * {@link MethodWriter#mv} field. This field stores the first element of
+ * this list.
+ */
+ MethodWriter firstMethod;
+
+ /**
+ * The methods of this class. These methods are stored in a linked list of
+ * {@link MethodWriter} objects, linked to each other by their
+ * {@link MethodWriter#mv} field. This field stores the last element of
+ * this list.
+ */
+ MethodWriter lastMethod;
+
+ /**
+ * <tt>true</tt> if the maximum stack size and number of local variables
+ * must be automatically computed.
+ */
+ private final boolean computeMaxs;
+
+ /**
+ * <tt>true</tt> if the stack map frames must be recomputed from scratch.
+ */
+ private final boolean computeFrames;
+
+ /**
+ * <tt>true</tt> if the stack map tables of this class are invalid. The
+ * {@link MethodWriter#resizeInstructions} method cannot transform existing
+ * stack map tables, and so produces potentially invalid classes when it is
+ * executed. In this case the class is reread and rewritten with the
+ * {@link #COMPUTE_FRAMES} option (the resizeInstructions method can resize
+ * stack map tables when this option is used).
+ */
+ boolean invalidFrames;
+
+ // ------------------------------------------------------------------------
+ // Static initializer
+ // ------------------------------------------------------------------------
+
+ /**
+ * Computes the instruction types of JVM opcodes.
+ */
+ static {
+ int i;
+ byte[] b = new byte[220];
+ String s = "AAAAAAAAAAAAAAAABCLMMDDDDDEEEEEEEEEEEEEEEEEEEEAAAAAAAADD"
+ + "DDDEEEEEEEEEEEEEEEEEEEEAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA"
+ + "AAAAAAAAAAAAAAAAANAAAAAAAAAAAAAAAAAAAAJJJJJJJJJJJJJJJJDOPAA"
+ + "AAAAGGGGGGGHIFBFAAFFAARQJJKKJJJJJJJJJJJJJJJJJJ";
+ for (i = 0; i < b.length; ++i) {
+ b[i] = (byte) (s.charAt(i) - 'A');
+ }
+ TYPE = b;
+
+ // code to generate the above string
+ //
+ // // SBYTE_INSN instructions
+ // b[Constants.NEWARRAY] = SBYTE_INSN;
+ // b[Constants.BIPUSH] = SBYTE_INSN;
+ //
+ // // SHORT_INSN instructions
+ // b[Constants.SIPUSH] = SHORT_INSN;
+ //
+ // // (IMPL)VAR_INSN instructions
+ // b[Constants.RET] = VAR_INSN;
+ // for (i = Constants.ILOAD; i <= Constants.ALOAD; ++i) {
+ // b[i] = VAR_INSN;
+ // }
+ // for (i = Constants.ISTORE; i <= Constants.ASTORE; ++i) {
+ // b[i] = VAR_INSN;
+ // }
+ // for (i = 26; i <= 45; ++i) { // ILOAD_0 to ALOAD_3
+ // b[i] = IMPLVAR_INSN;
+ // }
+ // for (i = 59; i <= 78; ++i) { // ISTORE_0 to ASTORE_3
+ // b[i] = IMPLVAR_INSN;
+ // }
+ //
+ // // TYPE_INSN instructions
+ // b[Constants.NEW] = TYPE_INSN;
+ // b[Constants.ANEWARRAY] = TYPE_INSN;
+ // b[Constants.CHECKCAST] = TYPE_INSN;
+ // b[Constants.INSTANCEOF] = TYPE_INSN;
+ //
+ // // (Set)FIELDORMETH_INSN instructions
+ // for (i = Constants.GETSTATIC; i <= Constants.INVOKESTATIC; ++i) {
+ // b[i] = FIELDORMETH_INSN;
+ // }
+ // b[Constants.INVOKEINTERFACE] = ITFMETH_INSN;
+ // b[Constants.INVOKEDYNAMIC] = INDYMETH_INSN;
+ //
+ // // LABEL(W)_INSN instructions
+ // for (i = Constants.IFEQ; i <= Constants.JSR; ++i) {
+ // b[i] = LABEL_INSN;
+ // }
+ // b[Constants.IFNULL] = LABEL_INSN;
+ // b[Constants.IFNONNULL] = LABEL_INSN;
+ // b[200] = LABELW_INSN; // GOTO_W
+ // b[201] = LABELW_INSN; // JSR_W
+ // // temporary opcodes used internally by ASM - see Label and
+ // MethodWriter
+ // for (i = 202; i < 220; ++i) {
+ // b[i] = LABEL_INSN;
+ // }
+ //
+ // // LDC(_W) instructions
+ // b[Constants.LDC] = LDC_INSN;
+ // b[19] = LDCW_INSN; // LDC_W
+ // b[20] = LDCW_INSN; // LDC2_W
+ //
+ // // special instructions
+ // b[Constants.IINC] = IINC_INSN;
+ // b[Constants.TABLESWITCH] = TABL_INSN;
+ // b[Constants.LOOKUPSWITCH] = LOOK_INSN;
+ // b[Constants.MULTIANEWARRAY] = MANA_INSN;
+ // b[196] = WIDE_INSN; // WIDE
+ //
+ // for (i = 0; i < b.length; ++i) {
+ // System.err.print((char)('A' + b[i]));
+ // }
+ // System.err.println();
+ }
+
+ // ------------------------------------------------------------------------
+ // Constructor
+ // ------------------------------------------------------------------------
+
+ /**
+ * Constructs a new {@link ClassWriter} object.
+ *
+ * @param flags option flags that can be used to modify the default behavior
+ * of this class. See {@link #COMPUTE_MAXS}, {@link #COMPUTE_FRAMES}.
+ */
+ public ClassWriter(final int flags) {
+ super(Opcodes.ASM4);
+ index = 1;
+ pool = new ByteVector();
+ items = new Item[256];
+ threshold = (int) (0.75d * items.length);
+ key = new Item();
+ key2 = new Item();
+ key3 = new Item();
+ key4 = new Item();
+ this.computeMaxs = (flags & COMPUTE_MAXS) != 0;
+ this.computeFrames = (flags & COMPUTE_FRAMES) != 0;
+ }
+
+ /**
+ * Constructs a new {@link ClassWriter} object and enables optimizations for
+ * "mostly add" bytecode transformations. These optimizations are the
+ * following:
+ *
+ * <ul> <li>The constant pool from the original class is copied as is in the
+ * new class, which saves time. New constant pool entries will be added at
+ * the end if necessary, but unused constant pool entries <i>won't be
+ * removed</i>.</li> <li>Methods that are not transformed are copied as is
+ * in the new class, directly from the original class bytecode (i.e. without
+ * emitting visit events for all the method instructions), which saves a
+ * <i>lot</i> of time. Untransformed methods are detected by the fact that
+ * the {@link ClassReader} receives {@link MethodVisitor} objects that come
+ * from a {@link ClassWriter} (and not from any other {@link ClassVisitor}
+ * instance).</li> </ul>
+ *
+ * @param classReader the {@link ClassReader} used to read the original
+ * class. It will be used to copy the entire constant pool from the
+ * original class and also to copy other fragments of original
+ * bytecode where applicable.
+ * @param flags option flags that can be used to modify the default behavior
+ * of this class. <i>These option flags do not affect methods that
+ * are copied as is in the new class. This means that the maximum
+ * stack size nor the stack frames will be computed for these
+ * methods</i>. See {@link #COMPUTE_MAXS}, {@link #COMPUTE_FRAMES}.
+ */
+ public ClassWriter(final ClassReader classReader, final int flags) {
+ this(flags);
+ classReader.copyPool(this);
+ this.cr = classReader;
+ }
+
+ // ------------------------------------------------------------------------
+ // Implementation of the ClassVisitor abstract class
+ // ------------------------------------------------------------------------
+
+ @Override
+ public final void visit(
+ final int version,
+ final int access,
+ final String name,
+ final String signature,
+ final String superName,
+ final String[] interfaces)
+ {
+ this.version = version;
+ this.access = access;
+ this.name = newClass(name);
+ thisName = name;
+ if (ClassReader.SIGNATURES && signature != null) {
+ this.signature = newUTF8(signature);
+ }
+ this.superName = superName == null ? 0 : newClass(superName);
+ if (interfaces != null && interfaces.length > 0) {
+ interfaceCount = interfaces.length;
+ this.interfaces = new int[interfaceCount];
+ for (int i = 0; i < interfaceCount; ++i) {
+ this.interfaces[i] = newClass(interfaces[i]);
+ }
+ }
+ }
+
+ @Override
+ public final void visitSource(final String file, final String debug) {
+ if (file != null) {
+ sourceFile = newUTF8(file);
+ }
+ if (debug != null) {
+ sourceDebug = new ByteVector().putUTF8(debug);
+ }
+ }
+
+ @Override
+ public final void visitOuterClass(
+ final String owner,
+ final String name,
+ final String desc)
+ {
+ enclosingMethodOwner = newClass(owner);
+ if (name != null && desc != null) {
+ enclosingMethod = newNameType(name, desc);
+ }
+ }
+
+ @Override
+ public final AnnotationVisitor visitAnnotation(
+ final String desc,
+ final boolean visible)
+ {
+ if (!ClassReader.ANNOTATIONS) {
+ return null;
+ }
+ ByteVector bv = new ByteVector();
+ // write type, and reserve space for values count
+ bv.putShort(newUTF8(desc)).putShort(0);
+ AnnotationWriter aw = new AnnotationWriter(this, true, bv, bv, 2);
+ if (visible) {
+ aw.next = anns;
+ anns = aw;
+ } else {
+ aw.next = ianns;
+ ianns = aw;
+ }
+ return aw;
+ }
+
+ @Override
+ public final void visitAttribute(final Attribute attr) {
+ attr.next = attrs;
+ attrs = attr;
+ }
+
+ @Override
+ public final void visitInnerClass(
+ final String name,
+ final String outerName,
+ final String innerName,
+ final int access)
+ {
+ if (innerClasses == null) {
+ innerClasses = new ByteVector();
+ }
+ ++innerClassesCount;
+ innerClasses.putShort(name == null ? 0 : newClass(name));
+ innerClasses.putShort(outerName == null ? 0 : newClass(outerName));
+ innerClasses.putShort(innerName == null ? 0 : newUTF8(innerName));
+ innerClasses.putShort(access);
+ }
+
+ @Override
+ public final FieldVisitor visitField(
+ final int access,
+ final String name,
+ final String desc,
+ final String signature,
+ final Object value)
+ {
+ return new FieldWriter(this, access, name, desc, signature, value);
+ }
+
+ @Override
+ public final MethodVisitor visitMethod(
+ final int access,
+ final String name,
+ final String desc,
+ final String signature,
+ final String[] exceptions)
+ {
+ return new MethodWriter(this,
+ access,
+ name,
+ desc,
+ signature,
+ exceptions,
+ computeMaxs,
+ computeFrames);
+ }
+
+ @Override
+ public final void visitEnd() {
+ }
+
+ // ------------------------------------------------------------------------
+ // Other public methods
+ // ------------------------------------------------------------------------
+
+ /**
+ * Returns the bytecode of the class that was build with this class writer.
+ *
+ * @return the bytecode of the class that was build with this class writer.
+ */
+ public byte[] toByteArray() {
+ if (index > Short.MAX_VALUE) {
+ throw new RuntimeException("Class file too large!");
+ }
+ // computes the real size of the bytecode of this class
+ int size = 24 + 2 * interfaceCount;
+ int nbFields = 0;
+ FieldWriter fb = firstField;
+ while (fb != null) {
+ ++nbFields;
+ size += fb.getSize();
+ fb = (FieldWriter) fb.fv;
+ }
+ int nbMethods = 0;
+ MethodWriter mb = firstMethod;
+ while (mb != null) {
+ ++nbMethods;
+ size += mb.getSize();
+ mb = (MethodWriter) mb.mv;
+ }
+ int attributeCount = 0;
+ if (bootstrapMethods != null) { // we put it as first argument in order
+ // to improve a bit ClassReader.copyBootstrapMethods
+ ++attributeCount;
+ size += 8 + bootstrapMethods.length;
+ newUTF8("BootstrapMethods");
+ }
+ if (ClassReader.SIGNATURES && signature != 0) {
+ ++attributeCount;
+ size += 8;
+ newUTF8("Signature");
+ }
+ if (sourceFile != 0) {
+ ++attributeCount;
+ size += 8;
+ newUTF8("SourceFile");
+ }
+ if (sourceDebug != null) {
+ ++attributeCount;
+ size += sourceDebug.length + 4;
+ newUTF8("SourceDebugExtension");
+ }
+ if (enclosingMethodOwner != 0) {
+ ++attributeCount;
+ size += 10;
+ newUTF8("EnclosingMethod");
+ }
+ if ((access & Opcodes.ACC_DEPRECATED) != 0) {
+ ++attributeCount;
+ size += 6;
+ newUTF8("Deprecated");
+ }
+ if ((access & Opcodes.ACC_SYNTHETIC) != 0
+ && ((version & 0xFFFF) < Opcodes.V1_5 || (access & ACC_SYNTHETIC_ATTRIBUTE) != 0))
+ {
+ ++attributeCount;
+ size += 6;
+ newUTF8("Synthetic");
+ }
+ if (innerClasses != null) {
+ ++attributeCount;
+ size += 8 + innerClasses.length;
+ newUTF8("InnerClasses");
+ }
+ if (ClassReader.ANNOTATIONS && anns != null) {
+ ++attributeCount;
+ size += 8 + anns.getSize();
+ newUTF8("RuntimeVisibleAnnotations");
+ }
+ if (ClassReader.ANNOTATIONS && ianns != null) {
+ ++attributeCount;
+ size += 8 + ianns.getSize();
+ newUTF8("RuntimeInvisibleAnnotations");
+ }
+ if (attrs != null) {
+ attributeCount += attrs.getCount();
+ size += attrs.getSize(this, null, 0, -1, -1);
+ }
+ size += pool.length;
+ // allocates a byte vector of this size, in order to avoid unnecessary
+ // arraycopy operations in the ByteVector.enlarge() method
+ ByteVector out = new ByteVector(size);
+ out.putInt(0xCAFEBABE).putInt(version);
+ out.putShort(index).putByteArray(pool.data, 0, pool.length);
+ int mask = Opcodes.ACC_DEPRECATED
+ | ClassWriter.ACC_SYNTHETIC_ATTRIBUTE
+ | ((access & ClassWriter.ACC_SYNTHETIC_ATTRIBUTE) / (ClassWriter.ACC_SYNTHETIC_ATTRIBUTE / Opcodes.ACC_SYNTHETIC));
+ out.putShort(access & ~mask).putShort(name).putShort(superName);
+ out.putShort(interfaceCount);
+ for (int i = 0; i < interfaceCount; ++i) {
+ out.putShort(interfaces[i]);
+ }
+ out.putShort(nbFields);
+ fb = firstField;
+ while (fb != null) {
+ fb.put(out);
+ fb = (FieldWriter) fb.fv;
+ }
+ out.putShort(nbMethods);
+ mb = firstMethod;
+ while (mb != null) {
+ mb.put(out);
+ mb = (MethodWriter) mb.mv;
+ }
+ out.putShort(attributeCount);
+ if (bootstrapMethods != null) { // should be the first class attribute ?
+ out.putShort(newUTF8("BootstrapMethods"));
+ out.putInt(bootstrapMethods.length + 2).putShort(bootstrapMethodsCount);
+ out.putByteArray(bootstrapMethods.data, 0, bootstrapMethods.length);
+ }
+ if (ClassReader.SIGNATURES && signature != 0) {
+ out.putShort(newUTF8("Signature")).putInt(2).putShort(signature);
+ }
+ if (sourceFile != 0) {
+ out.putShort(newUTF8("SourceFile")).putInt(2).putShort(sourceFile);
+ }
+ if (sourceDebug != null) {
+ int len = sourceDebug.length - 2;
+ out.putShort(newUTF8("SourceDebugExtension")).putInt(len);
+ out.putByteArray(sourceDebug.data, 2, len);
+ }
+ if (enclosingMethodOwner != 0) {
+ out.putShort(newUTF8("EnclosingMethod")).putInt(4);
+ out.putShort(enclosingMethodOwner).putShort(enclosingMethod);
+ }
+ if ((access & Opcodes.ACC_DEPRECATED) != 0) {
+ out.putShort(newUTF8("Deprecated")).putInt(0);
+ }
+ if ((access & Opcodes.ACC_SYNTHETIC) != 0
+ && ((version & 0xFFFF) < Opcodes.V1_5 || (access & ACC_SYNTHETIC_ATTRIBUTE) != 0))
+ {
+ out.putShort(newUTF8("Synthetic")).putInt(0);
+ }
+ if (innerClasses != null) {
+ out.putShort(newUTF8("InnerClasses"));
+ out.putInt(innerClasses.length + 2).putShort(innerClassesCount);
+ out.putByteArray(innerClasses.data, 0, innerClasses.length);
+ }
+ if (ClassReader.ANNOTATIONS && anns != null) {
+ out.putShort(newUTF8("RuntimeVisibleAnnotations"));
+ anns.put(out);
+ }
+ if (ClassReader.ANNOTATIONS && ianns != null) {
+ out.putShort(newUTF8("RuntimeInvisibleAnnotations"));
+ ianns.put(out);
+ }
+ if (attrs != null) {
+ attrs.put(this, null, 0, -1, -1, out);
+ }
+ if (invalidFrames) {
+ ClassWriter cw = new ClassWriter(COMPUTE_FRAMES);
+ new ClassReader(out.data).accept(cw, ClassReader.SKIP_FRAMES);
+ return cw.toByteArray();
+ }
+ return out.data;
+ }
+
+ // ------------------------------------------------------------------------
+ // Utility methods: constant pool management
+ // ------------------------------------------------------------------------
+
+ /**
+ * Adds a number or string constant to the constant pool of the class being
+ * build. Does nothing if the constant pool already contains a similar item.
+ *
+ * @param cst the value of the constant to be added to the constant pool.
+ * This parameter must be an {@link Integer}, a {@link Float}, a
+ * {@link Long}, a {@link Double}, a {@link String} or a
+ * {@link Type}.
+ * @return a new or already existing constant item with the given value.
+ */
+ Item newConstItem(final Object cst) {
+ if (cst instanceof Integer) {
+ int val = ((Integer) cst).intValue();
+ return newInteger(val);
+ } else if (cst instanceof Byte) {
+ int val = ((Byte) cst).intValue();
+ return newInteger(val);
+ } else if (cst instanceof Character) {
+ int val = ((Character) cst).charValue();
+ return newInteger(val);
+ } else if (cst instanceof Short) {
+ int val = ((Short) cst).intValue();
+ return newInteger(val);
+ } else if (cst instanceof Boolean) {
+ int val = ((Boolean) cst).booleanValue() ? 1 : 0;
+ return newInteger(val);
+ } else if (cst instanceof Float) {
+ float val = ((Float) cst).floatValue();
+ return newFloat(val);
+ } else if (cst instanceof Long) {
+ long val = ((Long) cst).longValue();
+ return newLong(val);
+ } else if (cst instanceof Double) {
+ double val = ((Double) cst).doubleValue();
+ return newDouble(val);
+ } else if (cst instanceof String) {
+ return newString((String) cst);
+ } else if (cst instanceof Type) {
+ Type t = (Type) cst;
+ int s = t.getSort();
+ if (s == Type.ARRAY) {
+ return newClassItem(t.getDescriptor());
+ } else if (s == Type.OBJECT) {
+ return newClassItem(t.getInternalName());
+ } else { // s == Type.METHOD
+ return newMethodTypeItem(t.getDescriptor());
+ }
+ } else if (cst instanceof Handle) {
+ Handle h = (Handle) cst;
+ return newHandleItem(h.tag, h.owner, h.name, h.desc);
+ } else {
+ throw new IllegalArgumentException("value " + cst);
+ }
+ }
+
+ /**
+ * Adds a number or string constant to the constant pool of the class being
+ * build. Does nothing if the constant pool already contains a similar item.
+ * <i>This method is intended for {@link Attribute} sub classes, and is
+ * normally not needed by class generators or adapters.</i>
+ *
+ * @param cst the value of the constant to be added to the constant pool.
+ * This parameter must be an {@link Integer}, a {@link Float}, a
+ * {@link Long}, a {@link Double} or a {@link String}.
+ * @return the index of a new or already existing constant item with the
+ * given value.
+ */
+ public int newConst(final Object cst) {
+ return newConstItem(cst).index;
+ }
+
+ /**
+ * Adds an UTF8 string to the constant pool of the class being build. Does
+ * nothing if the constant pool already contains a similar item. <i>This
+ * method is intended for {@link Attribute} sub classes, and is normally not
+ * needed by class generators or adapters.</i>
+ *
+ * @param value the String value.
+ * @return the index of a new or already existing UTF8 item.
+ */
+ public int newUTF8(final String value) {
+ key.set(UTF8, value, null, null);
+ Item result = get(key);
+ if (result == null) {
+ pool.putByte(UTF8).putUTF8(value);
+ result = new Item(index++, key);
+ put(result);
+ }
+ return result.index;
+ }
+
+ /**
+ * Adds a class reference to the constant pool of the class being build.
+ * Does nothing if the constant pool already contains a similar item.
+ * <i>This method is intended for {@link Attribute} sub classes, and is
+ * normally not needed by class generators or adapters.</i>
+ *
+ * @param value the internal name of the class.
+ * @return a new or already existing class reference item.
+ */
+ Item newClassItem(final String value) {
+ key2.set(CLASS, value, null, null);
+ Item result = get(key2);
+ if (result == null) {
+ pool.put12(CLASS, newUTF8(value));
+ result = new Item(index++, key2);
+ put(result);
+ }
+ return result;
+ }
+
+ /**
+ * Adds a class reference to the constant pool of the class being build.
+ * Does nothing if the constant pool already contains a similar item.
+ * <i>This method is intended for {@link Attribute} sub classes, and is
+ * normally not needed by class generators or adapters.</i>
+ *
+ * @param value the internal name of the class.
+ * @return the index of a new or already existing class reference item.
+ */
+ public int newClass(final String value) {
+ return newClassItem(value).index;
+ }
+
+ /**
+ * Adds a method type reference to the constant pool of the class being
+ * build. Does nothing if the constant pool already contains a similar item.
+ * <i>This method is intended for {@link Attribute} sub classes, and is
+ * normally not needed by class generators or adapters.</i>
+ *
+ * @param methodDesc method descriptor of the method type.
+ * @return a new or already existing method type reference item.
+ */
+ Item newMethodTypeItem(final String methodDesc) {
+ key2.set(MTYPE, methodDesc, null, null);
+ Item result = get(key2);
+ if (result == null) {
+ pool.put12(MTYPE, newUTF8(methodDesc));
+ result = new Item(index++, key2);
+ put(result);
+ }
+ return result;
+ }
+
+ /**
+ * Adds a method type reference to the constant pool of the class being
+ * build. Does nothing if the constant pool already contains a similar item.
+ * <i>This method is intended for {@link Attribute} sub classes, and is
+ * normally not needed by class generators or adapters.</i>
+ *
+ * @param methodDesc method descriptor of the method type.
+ * @return the index of a new or already existing method type reference
+ * item.
+ */
+ public int newMethodType(final String methodDesc) {
+ return newMethodTypeItem(methodDesc).index;
+ }
+
+ /**
+ * Adds a handle to the constant pool of the class being build. Does nothing
+ * if the constant pool already contains a similar item. <i>This method is
+ * intended for {@link Attribute} sub classes, and is normally not needed by
+ * class generators or adapters.</i>
+ *
+ * @param tag the kind of this handle. Must be {@link Opcodes#H_GETFIELD},
+ * {@link Opcodes#H_GETSTATIC}, {@link Opcodes#H_PUTFIELD},
+ * {@link Opcodes#H_PUTSTATIC}, {@link Opcodes#H_INVOKEVIRTUAL},
+ * {@link Opcodes#H_INVOKESTATIC}, {@link Opcodes#H_INVOKESPECIAL},
+ * {@link Opcodes#H_NEWINVOKESPECIAL} or
+ * {@link Opcodes#H_INVOKEINTERFACE}.
+ * @param owner the internal name of the field or method owner class.
+ * @param name the name of the field or method.
+ * @param desc the descriptor of the field or method.
+ * @return a new or an already existing method type reference item.
+ */
+ Item newHandleItem(
+ final int tag,
+ final String owner,
+ final String name,
+ final String desc)
+ {
+ key4.set(HANDLE_BASE + tag, owner, name, desc);
+ Item result = get(key4);
+ if (result == null) {
+ if (tag <= Opcodes.H_PUTSTATIC) {
+ put112(HANDLE, tag, newField(owner, name, desc));
+ } else {
+ put112(HANDLE, tag, newMethod(owner,
+ name,
+ desc,
+ tag == Opcodes.H_INVOKEINTERFACE));
+ }
+ result = new Item(index++, key4);
+ put(result);
+ }
+ return result;
+ }
+
+ /**
+ * Adds a handle to the constant pool of the class being
+ * build. Does nothing if the constant pool already contains a similar item.
+ * <i>This method is intended for {@link Attribute} sub classes, and is
+ * normally not needed by class generators or adapters.</i>
+ *
+ * @param tag the kind of this handle. Must be {@link Opcodes#H_GETFIELD},
+ * {@link Opcodes#H_GETSTATIC}, {@link Opcodes#H_PUTFIELD},
+ * {@link Opcodes#H_PUTSTATIC}, {@link Opcodes#H_INVOKEVIRTUAL},
+ * {@link Opcodes#H_INVOKESTATIC}, {@link Opcodes#H_INVOKESPECIAL},
+ * {@link Opcodes#H_NEWINVOKESPECIAL} or
+ * {@link Opcodes#H_INVOKEINTERFACE}.
+ * @param owner the internal name of the field or method owner class.
+ * @param name the name of the field or method.
+ * @param desc the descriptor of the field or method.
+ * @return the index of a new or already existing method type reference
+ * item.
+ */
+ public int newHandle(
+ final int tag,
+ final String owner,
+ final String name,
+ final String desc)
+ {
+ return newHandleItem(tag, owner, name, desc).index;
+ }
+
+ /**
+ * Adds an invokedynamic reference to the constant pool of the class being
+ * build. Does nothing if the constant pool already contains a similar item.
+ * <i>This method is intended for {@link Attribute} sub classes, and is
+ * normally not needed by class generators or adapters.</i>
+ *
+ * @param name name of the invoked method.
+ * @param desc descriptor of the invoke method.
+ * @param bsm the bootstrap method.
+ * @param bsmArgs the bootstrap method constant arguments.
+ *
+ * @return a new or an already existing invokedynamic type reference item.
+ */
+ Item newInvokeDynamicItem(
+ final String name,
+ final String desc,
+ final Handle bsm,
+ final Object... bsmArgs)
+ {
+ // cache for performance
+ ByteVector bootstrapMethods = this.bootstrapMethods;
+ if (bootstrapMethods == null) {
+ bootstrapMethods = this.bootstrapMethods = new ByteVector();
+ }
+
+ int position = bootstrapMethods.length; // record current position
+
+ int hashCode = bsm.hashCode();
+ bootstrapMethods.putShort(newHandle(bsm.tag,
+ bsm.owner,
+ bsm.name,
+ bsm.desc));
+
+ int argsLength = bsmArgs.length;
+ bootstrapMethods.putShort(argsLength);
+
+ for (int i = 0; i < argsLength; i++) {
+ Object bsmArg = bsmArgs[i];
+ hashCode ^= bsmArg.hashCode();
+ bootstrapMethods.putShort(newConst(bsmArg));
+ }
+
+ byte[] data = bootstrapMethods.data;
+ int length = (1 + 1 + argsLength) << 1; // (bsm + argCount + arguments)
+ hashCode &= 0x7FFFFFFF;
+ Item result = items[hashCode % items.length];
+ loop: while (result != null) {
+ if (result.type != BSM || result.hashCode != hashCode) {
+ result = result.next;
+ continue;
+ }
+
+ // because the data encode the size of the argument
+ // we don't need to test if these size are equals
+ int resultPosition = result.intVal;
+ for (int p = 0; p < length; p++) {
+ if (data[position + p] != data[resultPosition + p]) {
+ result = result.next;
+ continue loop;
+ }
+ }
+ break;
+ }
+
+ int bootstrapMethodIndex;
+ if (result != null) {
+ bootstrapMethodIndex = result.index;
+ bootstrapMethods.length = position; // revert to old position
+ } else {
+ bootstrapMethodIndex = bootstrapMethodsCount++;
+ result = new Item(bootstrapMethodIndex);
+ result.set(position, hashCode);
+ put(result);
+ }
+
+ // now, create the InvokeDynamic constant
+ key3.set(name, desc, bootstrapMethodIndex);
+ result = get(key3);
+ if (result == null) {
+ put122(INDY, bootstrapMethodIndex, newNameType(name, desc));
+ result = new Item(index++, key3);
+ put(result);
+ }
+ return result;
+ }
+
+ /**
+ * Adds an invokedynamic reference to the constant pool of the class being
+ * build. Does nothing if the constant pool already contains a similar item.
+ * <i>This method is intended for {@link Attribute} sub classes, and is
+ * normally not needed by class generators or adapters.</i>
+ *
+ * @param name name of the invoked method.
+ * @param desc descriptor of the invoke method.
+ * @param bsm the bootstrap method.
+ * @param bsmArgs the bootstrap method constant arguments.
+ *
+ * @return the index of a new or already existing invokedynamic
+ * reference item.
+ */
+ public int newInvokeDynamic(
+ final String name,
+ final String desc,
+ final Handle bsm,
+ final Object... bsmArgs)
+ {
+ return newInvokeDynamicItem(name, desc, bsm, bsmArgs).index;
+ }
+
+ /**
+ * Adds a field reference to the constant pool of the class being build.
+ * Does nothing if the constant pool already contains a similar item.
+ *
+ * @param owner the internal name of the field's owner class.
+ * @param name the field's name.
+ * @param desc the field's descriptor.
+ * @return a new or already existing field reference item.
+ */
+ Item newFieldItem(final String owner, final String name, final String desc)
+ {
+ key3.set(FIELD, owner, name, desc);
+ Item result = get(key3);
+ if (result == null) {
+ put122(FIELD, newClass(owner), newNameType(name, desc));
+ result = new Item(index++, key3);
+ put(result);
+ }
+ return result;
+ }
+
+ /**
+ * Adds a field reference to the constant pool of the class being build.
+ * Does nothing if the constant pool already contains a similar item.
+ * <i>This method is intended for {@link Attribute} sub classes, and is
+ * normally not needed by class generators or adapters.</i>
+ *
+ * @param owner the internal name of the field's owner class.
+ * @param name the field's name.
+ * @param desc the field's descriptor.
+ * @return the index of a new or already existing field reference item.
+ */
+ public int newField(final String owner, final String name, final String desc)
+ {
+ return newFieldItem(owner, name, desc).index;
+ }
+
+ /**
+ * Adds a method reference to the constant pool of the class being build.
+ * Does nothing if the constant pool already contains a similar item.
+ *
+ * @param owner the internal name of the method's owner class.
+ * @param name the method's name.
+ * @param desc the method's descriptor.
+ * @param itf <tt>true</tt> if <tt>owner</tt> is an interface.
+ * @return a new or already existing method reference item.
+ */
+ Item newMethodItem(
+ final String owner,
+ final String name,
+ final String desc,
+ final boolean itf)
+ {
+ int type = itf ? IMETH : METH;
+ key3.set(type, owner, name, desc);
+ Item result = get(key3);
+ if (result == null) {
+ put122(type, newClass(owner), newNameType(name, desc));
+ result = new Item(index++, key3);
+ put(result);
+ }
+ return result;
+ }
+
+ /**
+ * Adds a method reference to the constant pool of the class being build.
+ * Does nothing if the constant pool already contains a similar item.
+ * <i>This method is intended for {@link Attribute} sub classes, and is
+ * normally not needed by class generators or adapters.</i>
+ *
+ * @param owner the internal name of the method's owner class.
+ * @param name the method's name.
+ * @param desc the method's descriptor.
+ * @param itf <tt>true</tt> if <tt>owner</tt> is an interface.
+ * @return the index of a new or already existing method reference item.
+ */
+ public int newMethod(
+ final String owner,
+ final String name,
+ final String desc,
+ final boolean itf)
+ {
+ return newMethodItem(owner, name, desc, itf).index;
+ }
+
+ /**
+ * Adds an integer to the constant pool of the class being build. Does
+ * nothing if the constant pool already contains a similar item.
+ *
+ * @param value the int value.
+ * @return a new or already existing int item.
+ */
+ Item newInteger(final int value) {
+ key.set(value);
+ Item result = get(key);
+ if (result == null) {
+ pool.putByte(INT).putInt(value);
+ result = new Item(index++, key);
+ put(result);
+ }
+ return result;
+ }
+
+ /**
+ * Adds a float to the constant pool of the class being build. Does nothing
+ * if the constant pool already contains a similar item.
+ *
+ * @param value the float value.
+ * @return a new or already existing float item.
+ */
+ Item newFloat(final float value) {
+ key.set(value);
+ Item result = get(key);
+ if (result == null) {
+ pool.putByte(FLOAT).putInt(key.intVal);
+ result = new Item(index++, key);
+ put(result);
+ }
+ return result;
+ }
+
+ /**
+ * Adds a long to the constant pool of the class being build. Does nothing
+ * if the constant pool already contains a similar item.
+ *
+ * @param value the long value.
+ * @return a new or already existing long item.
+ */
+ Item newLong(final long value) {
+ key.set(value);
+ Item result = get(key);
+ if (result == null) {
+ pool.putByte(LONG).putLong(value);
+ result = new Item(index, key);
+ index += 2;
+ put(result);
+ }
+ return result;
+ }
+
+ /**
+ * Adds a double to the constant pool of the class being build. Does nothing
+ * if the constant pool already contains a similar item.
+ *
+ * @param value the double value.
+ * @return a new or already existing double item.
+ */
+ Item newDouble(final double value) {
+ key.set(value);
+ Item result = get(key);
+ if (result == null) {
+ pool.putByte(DOUBLE).putLong(key.longVal);
+ result = new Item(index, key);
+ index += 2;
+ put(result);
+ }
+ return result;
+ }
+
+ /**
+ * Adds a string to the constant pool of the class being build. Does nothing
+ * if the constant pool already contains a similar item.
+ *
+ * @param value the String value.
+ * @return a new or already existing string item.
+ */
+ private Item newString(final String value) {
+ key2.set(STR, value, null, null);
+ Item result = get(key2);
+ if (result == null) {
+ pool.put12(STR, newUTF8(value));
+ result = new Item(index++, key2);
+ put(result);
+ }
+ return result;
+ }
+
+ /**
+ * Adds a name and type to the constant pool of the class being build. Does
+ * nothing if the constant pool already contains a similar item. <i>This
+ * method is intended for {@link Attribute} sub classes, and is normally not
+ * needed by class generators or adapters.</i>
+ *
+ * @param name a name.
+ * @param desc a type descriptor.
+ * @return the index of a new or already existing name and type item.
+ */
+ public int newNameType(final String name, final String desc) {
+ return newNameTypeItem(name, desc).index;
+ }
+
+ /**
+ * Adds a name and type to the constant pool of the class being build. Does
+ * nothing if the constant pool already contains a similar item.
+ *
+ * @param name a name.
+ * @param desc a type descriptor.
+ * @return a new or already existing name and type item.
+ */
+ Item newNameTypeItem(final String name, final String desc) {
+ key2.set(NAME_TYPE, name, desc, null);
+ Item result = get(key2);
+ if (result == null) {
+ put122(NAME_TYPE, newUTF8(name), newUTF8(desc));
+ result = new Item(index++, key2);
+ put(result);
+ }
+ return result;
+ }
+
+ /**
+ * Adds the given internal name to {@link #typeTable} and returns its index.
+ * Does nothing if the type table already contains this internal name.
+ *
+ * @param type the internal name to be added to the type table.
+ * @return the index of this internal name in the type table.
+ */
+ int addType(final String type) {
+ key.set(TYPE_NORMAL, type, null, null);
+ Item result = get(key);
+ if (result == null) {
+ result = addType(key);
+ }
+ return result.index;
+ }
+
+ /**
+ * Adds the given "uninitialized" type to {@link #typeTable} and returns its
+ * index. This method is used for UNINITIALIZED types, made of an internal
+ * name and a bytecode offset.
+ *
+ * @param type the internal name to be added to the type table.
+ * @param offset the bytecode offset of the NEW instruction that created
+ * this UNINITIALIZED type value.
+ * @return the index of this internal name in the type table.
+ */
+ int addUninitializedType(final String type, final int offset) {
+ key.type = TYPE_UNINIT;
+ key.intVal = offset;
+ key.strVal1 = type;
+ key.hashCode = 0x7FFFFFFF & (TYPE_UNINIT + type.hashCode() + offset);
+ Item result = get(key);
+ if (result == null) {
+ result = addType(key);
+ }
+ return result.index;
+ }
+
+ /**
+ * Adds the given Item to {@link #typeTable}.
+ *
+ * @param item the value to be added to the type table.
+ * @return the added Item, which a new Item instance with the same value as
+ * the given Item.
+ */
+ private Item addType(final Item item) {
+ ++typeCount;
+ Item result = new Item(typeCount, key);
+ put(result);
+ if (typeTable == null) {
+ typeTable = new Item[16];
+ }
+ if (typeCount == typeTable.length) {
+ Item[] newTable = new Item[2 * typeTable.length];
+ System.arraycopy(typeTable, 0, newTable, 0, typeTable.length);
+ typeTable = newTable;
+ }
+ typeTable[typeCount] = result;
+ return result;
+ }
+
+ /**
+ * Returns the index of the common super type of the two given types. This
+ * method calls {@link #getCommonSuperClass} and caches the result in the
+ * {@link #items} hash table to speedup future calls with the same
+ * parameters.
+ *
+ * @param type1 index of an internal name in {@link #typeTable}.
+ * @param type2 index of an internal name in {@link #typeTable}.
+ * @return the index of the common super type of the two given types.
+ */
+ int getMergedType(final int type1, final int type2) {
+ key2.type = TYPE_MERGED;
+ key2.longVal = type1 | (((long) type2) << 32);
+ key2.hashCode = 0x7FFFFFFF & (TYPE_MERGED + type1 + type2);
+ Item result = get(key2);
+ if (result == null) {
+ String t = typeTable[type1].strVal1;
+ String u = typeTable[type2].strVal1;
+ key2.intVal = addType(getCommonSuperClass(t, u));
+ result = new Item((short) 0, key2);
+ put(result);
+ }
+ return result.intVal;
+ }
+
+ /**
+ * Returns the common super type of the two given types. The default
+ * implementation of this method <i>loads<i> the two given classes and uses
+ * the java.lang.Class methods to find the common super class. It can be
+ * overridden to compute this common super type in other ways, in particular
+ * without actually loading any class, or to take into account the class
+ * that is currently being generated by this ClassWriter, which can of
+ * course not be loaded since it is under construction.
+ *
+ * @param type1 the internal name of a class.
+ * @param type2 the internal name of another class.
+ * @return the internal name of the common super class of the two given
+ * classes.
+ */
+ protected String getCommonSuperClass(final String type1, final String type2)
+ {
+ Class<?> c, d;
+ ClassLoader classLoader = getClass().getClassLoader();
+ try {
+ c = Class.forName(type1.replace('/', '.'), false, classLoader);
+ d = Class.forName(type2.replace('/', '.'), false, classLoader);
+ } catch (Exception e) {
+ throw new RuntimeException(e.toString());
+ }
+ if (c.isAssignableFrom(d)) {
+ return type1;
+ }
+ if (d.isAssignableFrom(c)) {
+ return type2;
+ }
+ if (c.isInterface() || d.isInterface()) {
+ return "java/lang/Object";
+ } else {
+ do {
+ c = c.getSuperclass();
+ } while (!c.isAssignableFrom(d));
+ return c.getName().replace('.', '/');
+ }
+ }
+
+ /**
+ * Returns the constant pool's hash table item which is equal to the given
+ * item.
+ *
+ * @param key a constant pool item.
+ * @return the constant pool's hash table item which is equal to the given
+ * item, or <tt>null</tt> if there is no such item.
+ */
+ private Item get(final Item key) {
+ Item i = items[key.hashCode % items.length];
+ while (i != null && (i.type != key.type || !key.isEqualTo(i))) {
+ i = i.next;
+ }
+ return i;
+ }
+
+ /**
+ * Puts the given item in the constant pool's hash table. The hash table
+ * <i>must</i> not already contains this item.
+ *
+ * @param i the item to be added to the constant pool's hash table.
+ */
+ private void put(final Item i) {
+ if (index + typeCount > threshold) {
+ int ll = items.length;
+ int nl = ll * 2 + 1;
+ Item[] newItems = new Item[nl];
+ for (int l = ll - 1; l >= 0; --l) {
+ Item j = items[l];
+ while (j != null) {
+ int index = j.hashCode % newItems.length;
+ Item k = j.next;
+ j.next = newItems[index];
+ newItems[index] = j;
+ j = k;
+ }
+ }
+ items = newItems;
+ threshold = (int) (nl * 0.75);
+ }
+ int index = i.hashCode % items.length;
+ i.next = items[index];
+ items[index] = i;
+ }
+
+ /**
+ * Puts one byte and two shorts into the constant pool.
+ *
+ * @param b a byte.
+ * @param s1 a short.
+ * @param s2 another short.
+ */
+ private void put122(final int b, final int s1, final int s2) {
+ pool.put12(b, s1).putShort(s2);
+ }
+
+ /**
+ * Puts two bytes and one short into the constant pool.
+ *
+ * @param b1 a byte.
+ * @param b2 another byte.
+ * @param s a short.
+ */
+ private void put112(final int b1, final int b2, final int s) {
+ pool.put11(b1, b2).putShort(s);
+ }
+}
diff --git a/src/asm/scala/tools/asm/CustomAttr.java b/src/asm/scala/tools/asm/CustomAttr.java
new file mode 100644
index 0000000..22b5d28
--- /dev/null
+++ b/src/asm/scala/tools/asm/CustomAttr.java
@@ -0,0 +1,20 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2013 LAMP/EPFL
+ */
+
+package scala.tools.asm;
+
+import scala.tools.asm.Attribute;
+
+/**
+ * A subclass of ASM's Attribute for the sole purpose of accessing a protected field there.
+ *
+ */
+public class CustomAttr extends Attribute {
+
+ public CustomAttr(final String type, final byte[] value) {
+ super(type);
+ super.value = value;
+ }
+
+}
diff --git a/src/asm/scala/tools/asm/Edge.java b/src/asm/scala/tools/asm/Edge.java
new file mode 100644
index 0000000..daac1f7
--- /dev/null
+++ b/src/asm/scala/tools/asm/Edge.java
@@ -0,0 +1,75 @@
+/***
+ * ASM: a very small and fast Java bytecode manipulation framework
+ * Copyright (c) 2000-2011 INRIA, France Telecom
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ * 3. Neither the name of the copyright holders nor the names of its
+ * contributors may be used to endorse or promote products derived from
+ * this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+ * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
+ * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+ * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+ * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
+ * THE POSSIBILITY OF SUCH DAMAGE.
+ */
+package scala.tools.asm;
+
+/**
+ * An edge in the control flow graph of a method body. See {@link Label Label}.
+ *
+ * @author Eric Bruneton
+ */
+class Edge {
+
+ /**
+ * Denotes a normal control flow graph edge.
+ */
+ static final int NORMAL = 0;
+
+ /**
+ * Denotes a control flow graph edge corresponding to an exception handler.
+ * More precisely any {@link Edge} whose {@link #info} is strictly positive
+ * corresponds to an exception handler. The actual value of {@link #info} is
+ * the index, in the {@link ClassWriter} type table, of the exception that
+ * is catched.
+ */
+ static final int EXCEPTION = 0x7FFFFFFF;
+
+ /**
+ * Information about this control flow graph edge. If
+ * {@link ClassWriter#COMPUTE_MAXS} is used this field is the (relative)
+ * stack size in the basic block from which this edge originates. This size
+ * is equal to the stack size at the "jump" instruction to which this edge
+ * corresponds, relatively to the stack size at the beginning of the
+ * originating basic block. If {@link ClassWriter#COMPUTE_FRAMES} is used,
+ * this field is the kind of this control flow graph edge (i.e. NORMAL or
+ * EXCEPTION).
+ */
+ int info;
+
+ /**
+ * The successor block of the basic block from which this edge originates.
+ */
+ Label successor;
+
+ /**
+ * The next edge in the list of successors of the originating basic block.
+ * See {@link Label#successors successors}.
+ */
+ Edge next;
+}
diff --git a/src/asm/scala/tools/asm/FieldVisitor.java b/src/asm/scala/tools/asm/FieldVisitor.java
new file mode 100644
index 0000000..9ac0f62
--- /dev/null
+++ b/src/asm/scala/tools/asm/FieldVisitor.java
@@ -0,0 +1,115 @@
+/***
+ * ASM: a very small and fast Java bytecode manipulation framework
+ * Copyright (c) 2000-2011 INRIA, France Telecom
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ * 3. Neither the name of the copyright holders nor the names of its
+ * contributors may be used to endorse or promote products derived from
+ * this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+ * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
+ * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+ * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+ * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
+ * THE POSSIBILITY OF SUCH DAMAGE.
+ */
+package scala.tools.asm;
+
+/**
+ * A visitor to visit a Java field. The methods of this class must be called
+ * in the following order: ( <tt>visitAnnotation</tt> |
+ * <tt>visitAttribute</tt> )* <tt>visitEnd</tt>.
+ *
+ * @author Eric Bruneton
+ */
+public abstract class FieldVisitor {
+
+ /**
+ * The ASM API version implemented by this visitor. The value of this field
+ * must be one of {@link Opcodes#ASM4}.
+ */
+ protected final int api;
+
+ /**
+ * The field visitor to which this visitor must delegate method calls. May
+ * be null.
+ */
+ protected FieldVisitor fv;
+
+ /**
+ * Constructs a new {@link FieldVisitor}.
+ *
+ * @param api the ASM API version implemented by this visitor. Must be one
+ * of {@link Opcodes#ASM4}.
+ */
+ public FieldVisitor(final int api) {
+ this(api, null);
+ }
+
+ /**
+ * Constructs a new {@link FieldVisitor}.
+ *
+ * @param api the ASM API version implemented by this visitor. Must be one
+ * of {@link Opcodes#ASM4}.
+ * @param fv the field visitor to which this visitor must delegate method
+ * calls. May be null.
+ */
+ public FieldVisitor(final int api, final FieldVisitor fv) {
+ /*if (api != Opcodes.ASM4) {
+ throw new IllegalArgumentException();
+ }*/
+ this.api = api;
+ this.fv = fv;
+ }
+
+ /**
+ * Visits an annotation of the field.
+ *
+ * @param desc the class descriptor of the annotation class.
+ * @param visible <tt>true</tt> if the annotation is visible at runtime.
+ * @return a visitor to visit the annotation values, or <tt>null</tt> if
+ * this visitor is not interested in visiting this annotation.
+ */
+ public AnnotationVisitor visitAnnotation(String desc, boolean visible) {
+ if (fv != null) {
+ return fv.visitAnnotation(desc, visible);
+ }
+ return null;
+ }
+
+ /**
+ * Visits a non standard attribute of the field.
+ *
+ * @param attr an attribute.
+ */
+ public void visitAttribute(Attribute attr) {
+ if (fv != null) {
+ fv.visitAttribute(attr);
+ }
+ }
+
+ /**
+ * Visits the end of the field. This method, which is the last one to be
+ * called, is used to inform the visitor that all the annotations and
+ * attributes of the field have been visited.
+ */
+ public void visitEnd() {
+ if (fv != null) {
+ fv.visitEnd();
+ }
+ }
+}
diff --git a/src/asm/scala/tools/asm/FieldWriter.java b/src/asm/scala/tools/asm/FieldWriter.java
new file mode 100644
index 0000000..45ef6d0
--- /dev/null
+++ b/src/asm/scala/tools/asm/FieldWriter.java
@@ -0,0 +1,271 @@
+/***
+ * ASM: a very small and fast Java bytecode manipulation framework
+ * Copyright (c) 2000-2011 INRIA, France Telecom
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ * 3. Neither the name of the copyright holders nor the names of its
+ * contributors may be used to endorse or promote products derived from
+ * this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+ * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
+ * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+ * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+ * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
+ * THE POSSIBILITY OF SUCH DAMAGE.
+ */
+package scala.tools.asm;
+
+/**
+ * An {@link FieldVisitor} that generates Java fields in bytecode form.
+ *
+ * @author Eric Bruneton
+ */
+final class FieldWriter extends FieldVisitor {
+
+ /**
+ * The class writer to which this field must be added.
+ */
+ private final ClassWriter cw;
+
+ /**
+ * Access flags of this field.
+ */
+ private final int access;
+
+ /**
+ * The index of the constant pool item that contains the name of this
+ * method.
+ */
+ private final int name;
+
+ /**
+ * The index of the constant pool item that contains the descriptor of this
+ * field.
+ */
+ private final int desc;
+
+ /**
+ * The index of the constant pool item that contains the signature of this
+ * field.
+ */
+ private int signature;
+
+ /**
+ * The index of the constant pool item that contains the constant value of
+ * this field.
+ */
+ private int value;
+
+ /**
+ * The runtime visible annotations of this field. May be <tt>null</tt>.
+ */
+ private AnnotationWriter anns;
+
+ /**
+ * The runtime invisible annotations of this field. May be <tt>null</tt>.
+ */
+ private AnnotationWriter ianns;
+
+ /**
+ * The non standard attributes of this field. May be <tt>null</tt>.
+ */
+ private Attribute attrs;
+
+ // ------------------------------------------------------------------------
+ // Constructor
+ // ------------------------------------------------------------------------
+
+ /**
+ * Constructs a new {@link FieldWriter}.
+ *
+ * @param cw the class writer to which this field must be added.
+ * @param access the field's access flags (see {@link Opcodes}).
+ * @param name the field's name.
+ * @param desc the field's descriptor (see {@link Type}).
+ * @param signature the field's signature. May be <tt>null</tt>.
+ * @param value the field's constant value. May be <tt>null</tt>.
+ */
+ FieldWriter(
+ final ClassWriter cw,
+ final int access,
+ final String name,
+ final String desc,
+ final String signature,
+ final Object value)
+ {
+ super(Opcodes.ASM4);
+ if (cw.firstField == null) {
+ cw.firstField = this;
+ } else {
+ cw.lastField.fv = this;
+ }
+ cw.lastField = this;
+ this.cw = cw;
+ this.access = access;
+ this.name = cw.newUTF8(name);
+ this.desc = cw.newUTF8(desc);
+ if (ClassReader.SIGNATURES && signature != null) {
+ this.signature = cw.newUTF8(signature);
+ }
+ if (value != null) {
+ this.value = cw.newConstItem(value).index;
+ }
+ }
+
+ // ------------------------------------------------------------------------
+ // Implementation of the FieldVisitor abstract class
+ // ------------------------------------------------------------------------
+
+ @Override
+ public AnnotationVisitor visitAnnotation(
+ final String desc,
+ final boolean visible)
+ {
+ if (!ClassReader.ANNOTATIONS) {
+ return null;
+ }
+ ByteVector bv = new ByteVector();
+ // write type, and reserve space for values count
+ bv.putShort(cw.newUTF8(desc)).putShort(0);
+ AnnotationWriter aw = new AnnotationWriter(cw, true, bv, bv, 2);
+ if (visible) {
+ aw.next = anns;
+ anns = aw;
+ } else {
+ aw.next = ianns;
+ ianns = aw;
+ }
+ return aw;
+ }
+
+ @Override
+ public void visitAttribute(final Attribute attr) {
+ attr.next = attrs;
+ attrs = attr;
+ }
+
+ @Override
+ public void visitEnd() {
+ }
+
+ // ------------------------------------------------------------------------
+ // Utility methods
+ // ------------------------------------------------------------------------
+
+ /**
+ * Returns the size of this field.
+ *
+ * @return the size of this field.
+ */
+ int getSize() {
+ int size = 8;
+ if (value != 0) {
+ cw.newUTF8("ConstantValue");
+ size += 8;
+ }
+ if ((access & Opcodes.ACC_SYNTHETIC) != 0
+ && ((cw.version & 0xFFFF) < Opcodes.V1_5 || (access & ClassWriter.ACC_SYNTHETIC_ATTRIBUTE) != 0))
+ {
+ cw.newUTF8("Synthetic");
+ size += 6;
+ }
+ if ((access & Opcodes.ACC_DEPRECATED) != 0) {
+ cw.newUTF8("Deprecated");
+ size += 6;
+ }
+ if (ClassReader.SIGNATURES && signature != 0) {
+ cw.newUTF8("Signature");
+ size += 8;
+ }
+ if (ClassReader.ANNOTATIONS && anns != null) {
+ cw.newUTF8("RuntimeVisibleAnnotations");
+ size += 8 + anns.getSize();
+ }
+ if (ClassReader.ANNOTATIONS && ianns != null) {
+ cw.newUTF8("RuntimeInvisibleAnnotations");
+ size += 8 + ianns.getSize();
+ }
+ if (attrs != null) {
+ size += attrs.getSize(cw, null, 0, -1, -1);
+ }
+ return size;
+ }
+
+ /**
+ * Puts the content of this field into the given byte vector.
+ *
+ * @param out where the content of this field must be put.
+ */
+ void put(final ByteVector out) {
+ int mask = Opcodes.ACC_DEPRECATED
+ | ClassWriter.ACC_SYNTHETIC_ATTRIBUTE
+ | ((access & ClassWriter.ACC_SYNTHETIC_ATTRIBUTE) / (ClassWriter.ACC_SYNTHETIC_ATTRIBUTE / Opcodes.ACC_SYNTHETIC));
+ out.putShort(access & ~mask).putShort(name).putShort(desc);
+ int attributeCount = 0;
+ if (value != 0) {
+ ++attributeCount;
+ }
+ if ((access & Opcodes.ACC_SYNTHETIC) != 0
+ && ((cw.version & 0xFFFF) < Opcodes.V1_5 || (access & ClassWriter.ACC_SYNTHETIC_ATTRIBUTE) != 0))
+ {
+ ++attributeCount;
+ }
+ if ((access & Opcodes.ACC_DEPRECATED) != 0) {
+ ++attributeCount;
+ }
+ if (ClassReader.SIGNATURES && signature != 0) {
+ ++attributeCount;
+ }
+ if (ClassReader.ANNOTATIONS && anns != null) {
+ ++attributeCount;
+ }
+ if (ClassReader.ANNOTATIONS && ianns != null) {
+ ++attributeCount;
+ }
+ if (attrs != null) {
+ attributeCount += attrs.getCount();
+ }
+ out.putShort(attributeCount);
+ if (value != 0) {
+ out.putShort(cw.newUTF8("ConstantValue"));
+ out.putInt(2).putShort(value);
+ }
+ if ((access & Opcodes.ACC_SYNTHETIC) != 0
+ && ((cw.version & 0xFFFF) < Opcodes.V1_5 || (access & ClassWriter.ACC_SYNTHETIC_ATTRIBUTE) != 0))
+ {
+ out.putShort(cw.newUTF8("Synthetic")).putInt(0);
+ }
+ if ((access & Opcodes.ACC_DEPRECATED) != 0) {
+ out.putShort(cw.newUTF8("Deprecated")).putInt(0);
+ }
+ if (ClassReader.SIGNATURES && signature != 0) {
+ out.putShort(cw.newUTF8("Signature"));
+ out.putInt(2).putShort(signature);
+ }
+ if (ClassReader.ANNOTATIONS && anns != null) {
+ out.putShort(cw.newUTF8("RuntimeVisibleAnnotations"));
+ anns.put(out);
+ }
+ if (ClassReader.ANNOTATIONS && ianns != null) {
+ out.putShort(cw.newUTF8("RuntimeInvisibleAnnotations"));
+ ianns.put(out);
+ }
+ if (attrs != null) {
+ attrs.put(cw, null, 0, -1, -1, out);
+ }
+ }
+}
diff --git a/src/asm/scala/tools/asm/Frame.java b/src/asm/scala/tools/asm/Frame.java
new file mode 100644
index 0000000..387b567
--- /dev/null
+++ b/src/asm/scala/tools/asm/Frame.java
@@ -0,0 +1,1435 @@
+/***
+ * ASM: a very small and fast Java bytecode manipulation framework
+ * Copyright (c) 2000-2011 INRIA, France Telecom
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ * 3. Neither the name of the copyright holders nor the names of its
+ * contributors may be used to endorse or promote products derived from
+ * this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+ * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
+ * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+ * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+ * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
+ * THE POSSIBILITY OF SUCH DAMAGE.
+ */
+package scala.tools.asm;
+
+/**
+ * Information about the input and output stack map frames of a basic block.
+ *
+ * @author Eric Bruneton
+ */
+final class Frame {
+
+ /*
+ * Frames are computed in a two steps process: during the visit of each
+ * instruction, the state of the frame at the end of current basic block is
+ * updated by simulating the action of the instruction on the previous state
+ * of this so called "output frame". In visitMaxs, a fix point algorithm is
+ * used to compute the "input frame" of each basic block, i.e. the stack map
+ * frame at the beginning of the basic block, starting from the input frame
+ * of the first basic block (which is computed from the method descriptor),
+ * and by using the previously computed output frames to compute the input
+ * state of the other blocks.
+ *
+ * All output and input frames are stored as arrays of integers. Reference
+ * and array types are represented by an index into a type table (which is
+ * not the same as the constant pool of the class, in order to avoid adding
+ * unnecessary constants in the pool - not all computed frames will end up
+ * being stored in the stack map table). This allows very fast type
+ * comparisons.
+ *
+ * Output stack map frames are computed relatively to the input frame of the
+ * basic block, which is not yet known when output frames are computed. It
+ * is therefore necessary to be able to represent abstract types such as
+ * "the type at position x in the input frame locals" or "the type at
+ * position x from the top of the input frame stack" or even "the type at
+ * position x in the input frame, with y more (or less) array dimensions".
+ * This explains the rather complicated type format used in output frames.
+ *
+ * This format is the following: DIM KIND VALUE (4, 4 and 24 bits). DIM is a
+ * signed number of array dimensions (from -8 to 7). KIND is either BASE,
+ * LOCAL or STACK. BASE is used for types that are not relative to the input
+ * frame. LOCAL is used for types that are relative to the input local
+ * variable types. STACK is used for types that are relative to the input
+ * stack types. VALUE depends on KIND. For LOCAL types, it is an index in
+ * the input local variable types. For STACK types, it is a position
+ * relatively to the top of input frame stack. For BASE types, it is either
+ * one of the constants defined in FrameVisitor, or for OBJECT and
+ * UNINITIALIZED types, a tag and an index in the type table.
+ *
+ * Output frames can contain types of any kind and with a positive or
+ * negative dimension (and even unassigned types, represented by 0 - which
+ * does not correspond to any valid type value). Input frames can only
+ * contain BASE types of positive or null dimension. In all cases the type
+ * table contains only internal type names (array type descriptors are
+ * forbidden - dimensions must be represented through the DIM field).
+ *
+ * The LONG and DOUBLE types are always represented by using two slots (LONG +
+ * TOP or DOUBLE + TOP), for local variable types as well as in the operand
+ * stack. This is necessary to be able to simulate DUPx_y instructions,
+ * whose effect would be dependent on the actual type values if types were
+ * always represented by a single slot in the stack (and this is not
+ * possible, since actual type values are not always known - cf LOCAL and
+ * STACK type kinds).
+ */
+
+ /**
+ * Mask to get the dimension of a frame type. This dimension is a signed
+ * integer between -8 and 7.
+ */
+ static final int DIM = 0xF0000000;
+
+ /**
+ * Constant to be added to a type to get a type with one more dimension.
+ */
+ static final int ARRAY_OF = 0x10000000;
+
+ /**
+ * Constant to be added to a type to get a type with one less dimension.
+ */
+ static final int ELEMENT_OF = 0xF0000000;
+
+ /**
+ * Mask to get the kind of a frame type.
+ *
+ * @see #BASE
+ * @see #LOCAL
+ * @see #STACK
+ */
+ static final int KIND = 0xF000000;
+
+ /**
+ * Flag used for LOCAL and STACK types. Indicates that if this type happens
+ * to be a long or double type (during the computations of input frames),
+ * then it must be set to TOP because the second word of this value has
+ * been reused to store other data in the basic block. Hence the first word
+ * no longer stores a valid long or double value.
+ */
+ static final int TOP_IF_LONG_OR_DOUBLE = 0x800000;
+
+ /**
+ * Mask to get the value of a frame type.
+ */
+ static final int VALUE = 0x7FFFFF;
+
+ /**
+ * Mask to get the kind of base types.
+ */
+ static final int BASE_KIND = 0xFF00000;
+
+ /**
+ * Mask to get the value of base types.
+ */
+ static final int BASE_VALUE = 0xFFFFF;
+
+ /**
+ * Kind of the types that are not relative to an input stack map frame.
+ */
+ static final int BASE = 0x1000000;
+
+ /**
+ * Base kind of the base reference types. The BASE_VALUE of such types is an
+ * index into the type table.
+ */
+ static final int OBJECT = BASE | 0x700000;
+
+ /**
+ * Base kind of the uninitialized base types. The BASE_VALUE of such types
+ * in an index into the type table (the Item at that index contains both an
+ * instruction offset and an internal class name).
+ */
+ static final int UNINITIALIZED = BASE | 0x800000;
+
+ /**
+ * Kind of the types that are relative to the local variable types of an
+ * input stack map frame. The value of such types is a local variable index.
+ */
+ private static final int LOCAL = 0x2000000;
+
+ /**
+ * Kind of the the types that are relative to the stack of an input stack
+ * map frame. The value of such types is a position relatively to the top of
+ * this stack.
+ */
+ private static final int STACK = 0x3000000;
+
+ /**
+ * The TOP type. This is a BASE type.
+ */
+ static final int TOP = BASE | 0;
+
+ /**
+ * The BOOLEAN type. This is a BASE type mainly used for array types.
+ */
+ static final int BOOLEAN = BASE | 9;
+
+ /**
+ * The BYTE type. This is a BASE type mainly used for array types.
+ */
+ static final int BYTE = BASE | 10;
+
+ /**
+ * The CHAR type. This is a BASE type mainly used for array types.
+ */
+ static final int CHAR = BASE | 11;
+
+ /**
+ * The SHORT type. This is a BASE type mainly used for array types.
+ */
+ static final int SHORT = BASE | 12;
+
+ /**
+ * The INTEGER type. This is a BASE type.
+ */
+ static final int INTEGER = BASE | 1;
+
+ /**
+ * The FLOAT type. This is a BASE type.
+ */
+ static final int FLOAT = BASE | 2;
+
+ /**
+ * The DOUBLE type. This is a BASE type.
+ */
+ static final int DOUBLE = BASE | 3;
+
+ /**
+ * The LONG type. This is a BASE type.
+ */
+ static final int LONG = BASE | 4;
+
+ /**
+ * The NULL type. This is a BASE type.
+ */
+ static final int NULL = BASE | 5;
+
+ /**
+ * The UNINITIALIZED_THIS type. This is a BASE type.
+ */
+ static final int UNINITIALIZED_THIS = BASE | 6;
+
+ /**
+ * The stack size variation corresponding to each JVM instruction. This
+ * stack variation is equal to the size of the values produced by an
+ * instruction, minus the size of the values consumed by this instruction.
+ */
+ static final int[] SIZE;
+
+ /**
+ * Computes the stack size variation corresponding to each JVM instruction.
+ */
+ static {
+ int i;
+ int[] b = new int[202];
+ String s = "EFFFFFFFFGGFFFGGFFFEEFGFGFEEEEEEEEEEEEEEEEEEEEDEDEDDDDD"
+ + "CDCDEEEEEEEEEEEEEEEEEEEEBABABBBBDCFFFGGGEDCDCDCDCDCDCDCDCD"
+ + "CDCEEEEDDDDDDDCDCDCEFEFDDEEFFDEDEEEBDDBBDDDDDDCCCCCCCCEFED"
+ + "DDCDCDEEEEEEEEEEFEEEEEEDDEEDDEE";
+ for (i = 0; i < b.length; ++i) {
+ b[i] = s.charAt(i) - 'E';
+ }
+ SIZE = b;
+
+ // code to generate the above string
+ //
+ // int NA = 0; // not applicable (unused opcode or variable size opcode)
+ //
+ // b = new int[] {
+ // 0, //NOP, // visitInsn
+ // 1, //ACONST_NULL, // -
+ // 1, //ICONST_M1, // -
+ // 1, //ICONST_0, // -
+ // 1, //ICONST_1, // -
+ // 1, //ICONST_2, // -
+ // 1, //ICONST_3, // -
+ // 1, //ICONST_4, // -
+ // 1, //ICONST_5, // -
+ // 2, //LCONST_0, // -
+ // 2, //LCONST_1, // -
+ // 1, //FCONST_0, // -
+ // 1, //FCONST_1, // -
+ // 1, //FCONST_2, // -
+ // 2, //DCONST_0, // -
+ // 2, //DCONST_1, // -
+ // 1, //BIPUSH, // visitIntInsn
+ // 1, //SIPUSH, // -
+ // 1, //LDC, // visitLdcInsn
+ // NA, //LDC_W, // -
+ // NA, //LDC2_W, // -
+ // 1, //ILOAD, // visitVarInsn
+ // 2, //LLOAD, // -
+ // 1, //FLOAD, // -
+ // 2, //DLOAD, // -
+ // 1, //ALOAD, // -
+ // NA, //ILOAD_0, // -
+ // NA, //ILOAD_1, // -
+ // NA, //ILOAD_2, // -
+ // NA, //ILOAD_3, // -
+ // NA, //LLOAD_0, // -
+ // NA, //LLOAD_1, // -
+ // NA, //LLOAD_2, // -
+ // NA, //LLOAD_3, // -
+ // NA, //FLOAD_0, // -
+ // NA, //FLOAD_1, // -
+ // NA, //FLOAD_2, // -
+ // NA, //FLOAD_3, // -
+ // NA, //DLOAD_0, // -
+ // NA, //DLOAD_1, // -
+ // NA, //DLOAD_2, // -
+ // NA, //DLOAD_3, // -
+ // NA, //ALOAD_0, // -
+ // NA, //ALOAD_1, // -
+ // NA, //ALOAD_2, // -
+ // NA, //ALOAD_3, // -
+ // -1, //IALOAD, // visitInsn
+ // 0, //LALOAD, // -
+ // -1, //FALOAD, // -
+ // 0, //DALOAD, // -
+ // -1, //AALOAD, // -
+ // -1, //BALOAD, // -
+ // -1, //CALOAD, // -
+ // -1, //SALOAD, // -
+ // -1, //ISTORE, // visitVarInsn
+ // -2, //LSTORE, // -
+ // -1, //FSTORE, // -
+ // -2, //DSTORE, // -
+ // -1, //ASTORE, // -
+ // NA, //ISTORE_0, // -
+ // NA, //ISTORE_1, // -
+ // NA, //ISTORE_2, // -
+ // NA, //ISTORE_3, // -
+ // NA, //LSTORE_0, // -
+ // NA, //LSTORE_1, // -
+ // NA, //LSTORE_2, // -
+ // NA, //LSTORE_3, // -
+ // NA, //FSTORE_0, // -
+ // NA, //FSTORE_1, // -
+ // NA, //FSTORE_2, // -
+ // NA, //FSTORE_3, // -
+ // NA, //DSTORE_0, // -
+ // NA, //DSTORE_1, // -
+ // NA, //DSTORE_2, // -
+ // NA, //DSTORE_3, // -
+ // NA, //ASTORE_0, // -
+ // NA, //ASTORE_1, // -
+ // NA, //ASTORE_2, // -
+ // NA, //ASTORE_3, // -
+ // -3, //IASTORE, // visitInsn
+ // -4, //LASTORE, // -
+ // -3, //FASTORE, // -
+ // -4, //DASTORE, // -
+ // -3, //AASTORE, // -
+ // -3, //BASTORE, // -
+ // -3, //CASTORE, // -
+ // -3, //SASTORE, // -
+ // -1, //POP, // -
+ // -2, //POP2, // -
+ // 1, //DUP, // -
+ // 1, //DUP_X1, // -
+ // 1, //DUP_X2, // -
+ // 2, //DUP2, // -
+ // 2, //DUP2_X1, // -
+ // 2, //DUP2_X2, // -
+ // 0, //SWAP, // -
+ // -1, //IADD, // -
+ // -2, //LADD, // -
+ // -1, //FADD, // -
+ // -2, //DADD, // -
+ // -1, //ISUB, // -
+ // -2, //LSUB, // -
+ // -1, //FSUB, // -
+ // -2, //DSUB, // -
+ // -1, //IMUL, // -
+ // -2, //LMUL, // -
+ // -1, //FMUL, // -
+ // -2, //DMUL, // -
+ // -1, //IDIV, // -
+ // -2, //LDIV, // -
+ // -1, //FDIV, // -
+ // -2, //DDIV, // -
+ // -1, //IREM, // -
+ // -2, //LREM, // -
+ // -1, //FREM, // -
+ // -2, //DREM, // -
+ // 0, //INEG, // -
+ // 0, //LNEG, // -
+ // 0, //FNEG, // -
+ // 0, //DNEG, // -
+ // -1, //ISHL, // -
+ // -1, //LSHL, // -
+ // -1, //ISHR, // -
+ // -1, //LSHR, // -
+ // -1, //IUSHR, // -
+ // -1, //LUSHR, // -
+ // -1, //IAND, // -
+ // -2, //LAND, // -
+ // -1, //IOR, // -
+ // -2, //LOR, // -
+ // -1, //IXOR, // -
+ // -2, //LXOR, // -
+ // 0, //IINC, // visitIincInsn
+ // 1, //I2L, // visitInsn
+ // 0, //I2F, // -
+ // 1, //I2D, // -
+ // -1, //L2I, // -
+ // -1, //L2F, // -
+ // 0, //L2D, // -
+ // 0, //F2I, // -
+ // 1, //F2L, // -
+ // 1, //F2D, // -
+ // -1, //D2I, // -
+ // 0, //D2L, // -
+ // -1, //D2F, // -
+ // 0, //I2B, // -
+ // 0, //I2C, // -
+ // 0, //I2S, // -
+ // -3, //LCMP, // -
+ // -1, //FCMPL, // -
+ // -1, //FCMPG, // -
+ // -3, //DCMPL, // -
+ // -3, //DCMPG, // -
+ // -1, //IFEQ, // visitJumpInsn
+ // -1, //IFNE, // -
+ // -1, //IFLT, // -
+ // -1, //IFGE, // -
+ // -1, //IFGT, // -
+ // -1, //IFLE, // -
+ // -2, //IF_ICMPEQ, // -
+ // -2, //IF_ICMPNE, // -
+ // -2, //IF_ICMPLT, // -
+ // -2, //IF_ICMPGE, // -
+ // -2, //IF_ICMPGT, // -
+ // -2, //IF_ICMPLE, // -
+ // -2, //IF_ACMPEQ, // -
+ // -2, //IF_ACMPNE, // -
+ // 0, //GOTO, // -
+ // 1, //JSR, // -
+ // 0, //RET, // visitVarInsn
+ // -1, //TABLESWITCH, // visiTableSwitchInsn
+ // -1, //LOOKUPSWITCH, // visitLookupSwitch
+ // -1, //IRETURN, // visitInsn
+ // -2, //LRETURN, // -
+ // -1, //FRETURN, // -
+ // -2, //DRETURN, // -
+ // -1, //ARETURN, // -
+ // 0, //RETURN, // -
+ // NA, //GETSTATIC, // visitFieldInsn
+ // NA, //PUTSTATIC, // -
+ // NA, //GETFIELD, // -
+ // NA, //PUTFIELD, // -
+ // NA, //INVOKEVIRTUAL, // visitMethodInsn
+ // NA, //INVOKESPECIAL, // -
+ // NA, //INVOKESTATIC, // -
+ // NA, //INVOKEINTERFACE, // -
+ // NA, //INVOKEDYNAMIC, // visitInvokeDynamicInsn
+ // 1, //NEW, // visitTypeInsn
+ // 0, //NEWARRAY, // visitIntInsn
+ // 0, //ANEWARRAY, // visitTypeInsn
+ // 0, //ARRAYLENGTH, // visitInsn
+ // NA, //ATHROW, // -
+ // 0, //CHECKCAST, // visitTypeInsn
+ // 0, //INSTANCEOF, // -
+ // -1, //MONITORENTER, // visitInsn
+ // -1, //MONITOREXIT, // -
+ // NA, //WIDE, // NOT VISITED
+ // NA, //MULTIANEWARRAY, // visitMultiANewArrayInsn
+ // -1, //IFNULL, // visitJumpInsn
+ // -1, //IFNONNULL, // -
+ // NA, //GOTO_W, // -
+ // NA, //JSR_W, // -
+ // };
+ // for (i = 0; i < b.length; ++i) {
+ // System.err.print((char)('E' + b[i]));
+ // }
+ // System.err.println();
+ }
+
+ /**
+ * The label (i.e. basic block) to which these input and output stack map
+ * frames correspond.
+ */
+ Label owner;
+
+ /**
+ * The input stack map frame locals.
+ */
+ int[] inputLocals;
+
+ /**
+ * The input stack map frame stack.
+ */
+ int[] inputStack;
+
+ /**
+ * The output stack map frame locals.
+ */
+ private int[] outputLocals;
+
+ /**
+ * The output stack map frame stack.
+ */
+ private int[] outputStack;
+
+ /**
+ * Relative size of the output stack. The exact semantics of this field
+ * depends on the algorithm that is used.
+ *
+ * When only the maximum stack size is computed, this field is the size of
+ * the output stack relatively to the top of the input stack.
+ *
+ * When the stack map frames are completely computed, this field is the
+ * actual number of types in {@link #outputStack}.
+ */
+ private int outputStackTop;
+
+ /**
+ * Number of types that are initialized in the basic block.
+ *
+ * @see #initializations
+ */
+ private int initializationCount;
+
+ /**
+ * The types that are initialized in the basic block. A constructor
+ * invocation on an UNINITIALIZED or UNINITIALIZED_THIS type must replace
+ * <i>every occurence</i> of this type in the local variables and in the
+ * operand stack. This cannot be done during the first phase of the
+ * algorithm since, during this phase, the local variables and the operand
+ * stack are not completely computed. It is therefore necessary to store the
+ * types on which constructors are invoked in the basic block, in order to
+ * do this replacement during the second phase of the algorithm, where the
+ * frames are fully computed. Note that this array can contain types that
+ * are relative to input locals or to the input stack (see below for the
+ * description of the algorithm).
+ */
+ private int[] initializations;
+
+ /**
+ * Returns the output frame local variable type at the given index.
+ *
+ * @param local the index of the local that must be returned.
+ * @return the output frame local variable type at the given index.
+ */
+ private int get(final int local) {
+ if (outputLocals == null || local >= outputLocals.length) {
+ // this local has never been assigned in this basic block,
+ // so it is still equal to its value in the input frame
+ return LOCAL | local;
+ } else {
+ int type = outputLocals[local];
+ if (type == 0) {
+ // this local has never been assigned in this basic block,
+ // so it is still equal to its value in the input frame
+ type = outputLocals[local] = LOCAL | local;
+ }
+ return type;
+ }
+ }
+
+ /**
+ * Sets the output frame local variable type at the given index.
+ *
+ * @param local the index of the local that must be set.
+ * @param type the value of the local that must be set.
+ */
+ private void set(final int local, final int type) {
+ // creates and/or resizes the output local variables array if necessary
+ if (outputLocals == null) {
+ outputLocals = new int[10];
+ }
+ int n = outputLocals.length;
+ if (local >= n) {
+ int[] t = new int[Math.max(local + 1, 2 * n)];
+ System.arraycopy(outputLocals, 0, t, 0, n);
+ outputLocals = t;
+ }
+ // sets the local variable
+ outputLocals[local] = type;
+ }
+
+ /**
+ * Pushes a new type onto the output frame stack.
+ *
+ * @param type the type that must be pushed.
+ */
+ private void push(final int type) {
+ // creates and/or resizes the output stack array if necessary
+ if (outputStack == null) {
+ outputStack = new int[10];
+ }
+ int n = outputStack.length;
+ if (outputStackTop >= n) {
+ int[] t = new int[Math.max(outputStackTop + 1, 2 * n)];
+ System.arraycopy(outputStack, 0, t, 0, n);
+ outputStack = t;
+ }
+ // pushes the type on the output stack
+ outputStack[outputStackTop++] = type;
+ // updates the maximun height reached by the output stack, if needed
+ int top = owner.inputStackTop + outputStackTop;
+ if (top > owner.outputStackMax) {
+ owner.outputStackMax = top;
+ }
+ }
+
+ /**
+ * Pushes a new type onto the output frame stack.
+ *
+ * @param cw the ClassWriter to which this label belongs.
+ * @param desc the descriptor of the type to be pushed. Can also be a method
+ * descriptor (in this case this method pushes its return type onto
+ * the output frame stack).
+ */
+ private void push(final ClassWriter cw, final String desc) {
+ int type = type(cw, desc);
+ if (type != 0) {
+ push(type);
+ if (type == LONG || type == DOUBLE) {
+ push(TOP);
+ }
+ }
+ }
+
+ /**
+ * Returns the int encoding of the given type.
+ *
+ * @param cw the ClassWriter to which this label belongs.
+ * @param desc a type descriptor.
+ * @return the int encoding of the given type.
+ */
+ private static int type(final ClassWriter cw, final String desc) {
+ String t;
+ int index = desc.charAt(0) == '(' ? desc.indexOf(')') + 1 : 0;
+ switch (desc.charAt(index)) {
+ case 'V':
+ return 0;
+ case 'Z':
+ case 'C':
+ case 'B':
+ case 'S':
+ case 'I':
+ return INTEGER;
+ case 'F':
+ return FLOAT;
+ case 'J':
+ return LONG;
+ case 'D':
+ return DOUBLE;
+ case 'L':
+ // stores the internal name, not the descriptor!
+ t = desc.substring(index + 1, desc.length() - 1);
+ return OBJECT | cw.addType(t);
+ // case '[':
+ default:
+ // extracts the dimensions and the element type
+ int data;
+ int dims = index + 1;
+ while (desc.charAt(dims) == '[') {
+ ++dims;
+ }
+ switch (desc.charAt(dims)) {
+ case 'Z':
+ data = BOOLEAN;
+ break;
+ case 'C':
+ data = CHAR;
+ break;
+ case 'B':
+ data = BYTE;
+ break;
+ case 'S':
+ data = SHORT;
+ break;
+ case 'I':
+ data = INTEGER;
+ break;
+ case 'F':
+ data = FLOAT;
+ break;
+ case 'J':
+ data = LONG;
+ break;
+ case 'D':
+ data = DOUBLE;
+ break;
+ // case 'L':
+ default:
+ // stores the internal name, not the descriptor
+ t = desc.substring(dims + 1, desc.length() - 1);
+ data = OBJECT | cw.addType(t);
+ }
+ return (dims - index) << 28 | data;
+ }
+ }
+
+ /**
+ * Pops a type from the output frame stack and returns its value.
+ *
+ * @return the type that has been popped from the output frame stack.
+ */
+ private int pop() {
+ if (outputStackTop > 0) {
+ return outputStack[--outputStackTop];
+ } else {
+ // if the output frame stack is empty, pops from the input stack
+ return STACK | -(--owner.inputStackTop);
+ }
+ }
+
+ /**
+ * Pops the given number of types from the output frame stack.
+ *
+ * @param elements the number of types that must be popped.
+ */
+ private void pop(final int elements) {
+ if (outputStackTop >= elements) {
+ outputStackTop -= elements;
+ } else {
+ // if the number of elements to be popped is greater than the number
+ // of elements in the output stack, clear it, and pops the remaining
+ // elements from the input stack.
+ owner.inputStackTop -= elements - outputStackTop;
+ outputStackTop = 0;
+ }
+ }
+
+ /**
+ * Pops a type from the output frame stack.
+ *
+ * @param desc the descriptor of the type to be popped. Can also be a method
+ * descriptor (in this case this method pops the types corresponding
+ * to the method arguments).
+ */
+ private void pop(final String desc) {
+ char c = desc.charAt(0);
+ if (c == '(') {
+ pop((Type.getArgumentsAndReturnSizes(desc) >> 2) - 1);
+ } else if (c == 'J' || c == 'D') {
+ pop(2);
+ } else {
+ pop(1);
+ }
+ }
+
+ /**
+ * Adds a new type to the list of types on which a constructor is invoked in
+ * the basic block.
+ *
+ * @param var a type on a which a constructor is invoked.
+ */
+ private void init(final int var) {
+ // creates and/or resizes the initializations array if necessary
+ if (initializations == null) {
+ initializations = new int[2];
+ }
+ int n = initializations.length;
+ if (initializationCount >= n) {
+ int[] t = new int[Math.max(initializationCount + 1, 2 * n)];
+ System.arraycopy(initializations, 0, t, 0, n);
+ initializations = t;
+ }
+ // stores the type to be initialized
+ initializations[initializationCount++] = var;
+ }
+
+ /**
+ * Replaces the given type with the appropriate type if it is one of the
+ * types on which a constructor is invoked in the basic block.
+ *
+ * @param cw the ClassWriter to which this label belongs.
+ * @param t a type
+ * @return t or, if t is one of the types on which a constructor is invoked
+ * in the basic block, the type corresponding to this constructor.
+ */
+ private int init(final ClassWriter cw, final int t) {
+ int s;
+ if (t == UNINITIALIZED_THIS) {
+ s = OBJECT | cw.addType(cw.thisName);
+ } else if ((t & (DIM | BASE_KIND)) == UNINITIALIZED) {
+ String type = cw.typeTable[t & BASE_VALUE].strVal1;
+ s = OBJECT | cw.addType(type);
+ } else {
+ return t;
+ }
+ for (int j = 0; j < initializationCount; ++j) {
+ int u = initializations[j];
+ int dim = u & DIM;
+ int kind = u & KIND;
+ if (kind == LOCAL) {
+ u = dim + inputLocals[u & VALUE];
+ } else if (kind == STACK) {
+ u = dim + inputStack[inputStack.length - (u & VALUE)];
+ }
+ if (t == u) {
+ return s;
+ }
+ }
+ return t;
+ }
+
+ /**
+ * Initializes the input frame of the first basic block from the method
+ * descriptor.
+ *
+ * @param cw the ClassWriter to which this label belongs.
+ * @param access the access flags of the method to which this label belongs.
+ * @param args the formal parameter types of this method.
+ * @param maxLocals the maximum number of local variables of this method.
+ */
+ void initInputFrame(
+ final ClassWriter cw,
+ final int access,
+ final Type[] args,
+ final int maxLocals)
+ {
+ inputLocals = new int[maxLocals];
+ inputStack = new int[0];
+ int i = 0;
+ if ((access & Opcodes.ACC_STATIC) == 0) {
+ if ((access & MethodWriter.ACC_CONSTRUCTOR) == 0) {
+ inputLocals[i++] = OBJECT | cw.addType(cw.thisName);
+ } else {
+ inputLocals[i++] = UNINITIALIZED_THIS;
+ }
+ }
+ for (int j = 0; j < args.length; ++j) {
+ int t = type(cw, args[j].getDescriptor());
+ inputLocals[i++] = t;
+ if (t == LONG || t == DOUBLE) {
+ inputLocals[i++] = TOP;
+ }
+ }
+ while (i < maxLocals) {
+ inputLocals[i++] = TOP;
+ }
+ }
+
+ /**
+ * Simulates the action of the given instruction on the output stack frame.
+ *
+ * @param opcode the opcode of the instruction.
+ * @param arg the operand of the instruction, if any.
+ * @param cw the class writer to which this label belongs.
+ * @param item the operand of the instructions, if any.
+ */
+ void execute(
+ final int opcode,
+ final int arg,
+ final ClassWriter cw,
+ final Item item)
+ {
+ int t1, t2, t3, t4;
+ switch (opcode) {
+ case Opcodes.NOP:
+ case Opcodes.INEG:
+ case Opcodes.LNEG:
+ case Opcodes.FNEG:
+ case Opcodes.DNEG:
+ case Opcodes.I2B:
+ case Opcodes.I2C:
+ case Opcodes.I2S:
+ case Opcodes.GOTO:
+ case Opcodes.RETURN:
+ break;
+ case Opcodes.ACONST_NULL:
+ push(NULL);
+ break;
+ case Opcodes.ICONST_M1:
+ case Opcodes.ICONST_0:
+ case Opcodes.ICONST_1:
+ case Opcodes.ICONST_2:
+ case Opcodes.ICONST_3:
+ case Opcodes.ICONST_4:
+ case Opcodes.ICONST_5:
+ case Opcodes.BIPUSH:
+ case Opcodes.SIPUSH:
+ case Opcodes.ILOAD:
+ push(INTEGER);
+ break;
+ case Opcodes.LCONST_0:
+ case Opcodes.LCONST_1:
+ case Opcodes.LLOAD:
+ push(LONG);
+ push(TOP);
+ break;
+ case Opcodes.FCONST_0:
+ case Opcodes.FCONST_1:
+ case Opcodes.FCONST_2:
+ case Opcodes.FLOAD:
+ push(FLOAT);
+ break;
+ case Opcodes.DCONST_0:
+ case Opcodes.DCONST_1:
+ case Opcodes.DLOAD:
+ push(DOUBLE);
+ push(TOP);
+ break;
+ case Opcodes.LDC:
+ switch (item.type) {
+ case ClassWriter.INT:
+ push(INTEGER);
+ break;
+ case ClassWriter.LONG:
+ push(LONG);
+ push(TOP);
+ break;
+ case ClassWriter.FLOAT:
+ push(FLOAT);
+ break;
+ case ClassWriter.DOUBLE:
+ push(DOUBLE);
+ push(TOP);
+ break;
+ case ClassWriter.CLASS:
+ push(OBJECT | cw.addType("java/lang/Class"));
+ break;
+ case ClassWriter.STR:
+ push(OBJECT | cw.addType("java/lang/String"));
+ break;
+ case ClassWriter.MTYPE:
+ push(OBJECT | cw.addType("java/lang/invoke/MethodType"));
+ break;
+ // case ClassWriter.HANDLE_BASE + [1..9]:
+ default:
+ push(OBJECT | cw.addType("java/lang/invoke/MethodHandle"));
+ }
+ break;
+ case Opcodes.ALOAD:
+ push(get(arg));
+ break;
+ case Opcodes.IALOAD:
+ case Opcodes.BALOAD:
+ case Opcodes.CALOAD:
+ case Opcodes.SALOAD:
+ pop(2);
+ push(INTEGER);
+ break;
+ case Opcodes.LALOAD:
+ case Opcodes.D2L:
+ pop(2);
+ push(LONG);
+ push(TOP);
+ break;
+ case Opcodes.FALOAD:
+ pop(2);
+ push(FLOAT);
+ break;
+ case Opcodes.DALOAD:
+ case Opcodes.L2D:
+ pop(2);
+ push(DOUBLE);
+ push(TOP);
+ break;
+ case Opcodes.AALOAD:
+ pop(1);
+ t1 = pop();
+ push(ELEMENT_OF + t1);
+ break;
+ case Opcodes.ISTORE:
+ case Opcodes.FSTORE:
+ case Opcodes.ASTORE:
+ t1 = pop();
+ set(arg, t1);
+ if (arg > 0) {
+ t2 = get(arg - 1);
+ // if t2 is of kind STACK or LOCAL we cannot know its size!
+ if (t2 == LONG || t2 == DOUBLE) {
+ set(arg - 1, TOP);
+ } else if ((t2 & KIND) != BASE) {
+ set(arg - 1, t2 | TOP_IF_LONG_OR_DOUBLE);
+ }
+ }
+ break;
+ case Opcodes.LSTORE:
+ case Opcodes.DSTORE:
+ pop(1);
+ t1 = pop();
+ set(arg, t1);
+ set(arg + 1, TOP);
+ if (arg > 0) {
+ t2 = get(arg - 1);
+ // if t2 is of kind STACK or LOCAL we cannot know its size!
+ if (t2 == LONG || t2 == DOUBLE) {
+ set(arg - 1, TOP);
+ } else if ((t2 & KIND) != BASE) {
+ set(arg - 1, t2 | TOP_IF_LONG_OR_DOUBLE);
+ }
+ }
+ break;
+ case Opcodes.IASTORE:
+ case Opcodes.BASTORE:
+ case Opcodes.CASTORE:
+ case Opcodes.SASTORE:
+ case Opcodes.FASTORE:
+ case Opcodes.AASTORE:
+ pop(3);
+ break;
+ case Opcodes.LASTORE:
+ case Opcodes.DASTORE:
+ pop(4);
+ break;
+ case Opcodes.POP:
+ case Opcodes.IFEQ:
+ case Opcodes.IFNE:
+ case Opcodes.IFLT:
+ case Opcodes.IFGE:
+ case Opcodes.IFGT:
+ case Opcodes.IFLE:
+ case Opcodes.IRETURN:
+ case Opcodes.FRETURN:
+ case Opcodes.ARETURN:
+ case Opcodes.TABLESWITCH:
+ case Opcodes.LOOKUPSWITCH:
+ case Opcodes.ATHROW:
+ case Opcodes.MONITORENTER:
+ case Opcodes.MONITOREXIT:
+ case Opcodes.IFNULL:
+ case Opcodes.IFNONNULL:
+ pop(1);
+ break;
+ case Opcodes.POP2:
+ case Opcodes.IF_ICMPEQ:
+ case Opcodes.IF_ICMPNE:
+ case Opcodes.IF_ICMPLT:
+ case Opcodes.IF_ICMPGE:
+ case Opcodes.IF_ICMPGT:
+ case Opcodes.IF_ICMPLE:
+ case Opcodes.IF_ACMPEQ:
+ case Opcodes.IF_ACMPNE:
+ case Opcodes.LRETURN:
+ case Opcodes.DRETURN:
+ pop(2);
+ break;
+ case Opcodes.DUP:
+ t1 = pop();
+ push(t1);
+ push(t1);
+ break;
+ case Opcodes.DUP_X1:
+ t1 = pop();
+ t2 = pop();
+ push(t1);
+ push(t2);
+ push(t1);
+ break;
+ case Opcodes.DUP_X2:
+ t1 = pop();
+ t2 = pop();
+ t3 = pop();
+ push(t1);
+ push(t3);
+ push(t2);
+ push(t1);
+ break;
+ case Opcodes.DUP2:
+ t1 = pop();
+ t2 = pop();
+ push(t2);
+ push(t1);
+ push(t2);
+ push(t1);
+ break;
+ case Opcodes.DUP2_X1:
+ t1 = pop();
+ t2 = pop();
+ t3 = pop();
+ push(t2);
+ push(t1);
+ push(t3);
+ push(t2);
+ push(t1);
+ break;
+ case Opcodes.DUP2_X2:
+ t1 = pop();
+ t2 = pop();
+ t3 = pop();
+ t4 = pop();
+ push(t2);
+ push(t1);
+ push(t4);
+ push(t3);
+ push(t2);
+ push(t1);
+ break;
+ case Opcodes.SWAP:
+ t1 = pop();
+ t2 = pop();
+ push(t1);
+ push(t2);
+ break;
+ case Opcodes.IADD:
+ case Opcodes.ISUB:
+ case Opcodes.IMUL:
+ case Opcodes.IDIV:
+ case Opcodes.IREM:
+ case Opcodes.IAND:
+ case Opcodes.IOR:
+ case Opcodes.IXOR:
+ case Opcodes.ISHL:
+ case Opcodes.ISHR:
+ case Opcodes.IUSHR:
+ case Opcodes.L2I:
+ case Opcodes.D2I:
+ case Opcodes.FCMPL:
+ case Opcodes.FCMPG:
+ pop(2);
+ push(INTEGER);
+ break;
+ case Opcodes.LADD:
+ case Opcodes.LSUB:
+ case Opcodes.LMUL:
+ case Opcodes.LDIV:
+ case Opcodes.LREM:
+ case Opcodes.LAND:
+ case Opcodes.LOR:
+ case Opcodes.LXOR:
+ pop(4);
+ push(LONG);
+ push(TOP);
+ break;
+ case Opcodes.FADD:
+ case Opcodes.FSUB:
+ case Opcodes.FMUL:
+ case Opcodes.FDIV:
+ case Opcodes.FREM:
+ case Opcodes.L2F:
+ case Opcodes.D2F:
+ pop(2);
+ push(FLOAT);
+ break;
+ case Opcodes.DADD:
+ case Opcodes.DSUB:
+ case Opcodes.DMUL:
+ case Opcodes.DDIV:
+ case Opcodes.DREM:
+ pop(4);
+ push(DOUBLE);
+ push(TOP);
+ break;
+ case Opcodes.LSHL:
+ case Opcodes.LSHR:
+ case Opcodes.LUSHR:
+ pop(3);
+ push(LONG);
+ push(TOP);
+ break;
+ case Opcodes.IINC:
+ set(arg, INTEGER);
+ break;
+ case Opcodes.I2L:
+ case Opcodes.F2L:
+ pop(1);
+ push(LONG);
+ push(TOP);
+ break;
+ case Opcodes.I2F:
+ pop(1);
+ push(FLOAT);
+ break;
+ case Opcodes.I2D:
+ case Opcodes.F2D:
+ pop(1);
+ push(DOUBLE);
+ push(TOP);
+ break;
+ case Opcodes.F2I:
+ case Opcodes.ARRAYLENGTH:
+ case Opcodes.INSTANCEOF:
+ pop(1);
+ push(INTEGER);
+ break;
+ case Opcodes.LCMP:
+ case Opcodes.DCMPL:
+ case Opcodes.DCMPG:
+ pop(4);
+ push(INTEGER);
+ break;
+ case Opcodes.JSR:
+ case Opcodes.RET:
+ throw new RuntimeException("JSR/RET are not supported with computeFrames option");
+ case Opcodes.GETSTATIC:
+ push(cw, item.strVal3);
+ break;
+ case Opcodes.PUTSTATIC:
+ pop(item.strVal3);
+ break;
+ case Opcodes.GETFIELD:
+ pop(1);
+ push(cw, item.strVal3);
+ break;
+ case Opcodes.PUTFIELD:
+ pop(item.strVal3);
+ pop();
+ break;
+ case Opcodes.INVOKEVIRTUAL:
+ case Opcodes.INVOKESPECIAL:
+ case Opcodes.INVOKESTATIC:
+ case Opcodes.INVOKEINTERFACE:
+ pop(item.strVal3);
+ if (opcode != Opcodes.INVOKESTATIC) {
+ t1 = pop();
+ if (opcode == Opcodes.INVOKESPECIAL
+ && item.strVal2.charAt(0) == '<')
+ {
+ init(t1);
+ }
+ }
+ push(cw, item.strVal3);
+ break;
+ case Opcodes.INVOKEDYNAMIC:
+ pop(item.strVal2);
+ push(cw, item.strVal2);
+ break;
+ case Opcodes.NEW:
+ push(UNINITIALIZED | cw.addUninitializedType(item.strVal1, arg));
+ break;
+ case Opcodes.NEWARRAY:
+ pop();
+ switch (arg) {
+ case Opcodes.T_BOOLEAN:
+ push(ARRAY_OF | BOOLEAN);
+ break;
+ case Opcodes.T_CHAR:
+ push(ARRAY_OF | CHAR);
+ break;
+ case Opcodes.T_BYTE:
+ push(ARRAY_OF | BYTE);
+ break;
+ case Opcodes.T_SHORT:
+ push(ARRAY_OF | SHORT);
+ break;
+ case Opcodes.T_INT:
+ push(ARRAY_OF | INTEGER);
+ break;
+ case Opcodes.T_FLOAT:
+ push(ARRAY_OF | FLOAT);
+ break;
+ case Opcodes.T_DOUBLE:
+ push(ARRAY_OF | DOUBLE);
+ break;
+ // case Opcodes.T_LONG:
+ default:
+ push(ARRAY_OF | LONG);
+ break;
+ }
+ break;
+ case Opcodes.ANEWARRAY:
+ String s = item.strVal1;
+ pop();
+ if (s.charAt(0) == '[') {
+ push(cw, '[' + s);
+ } else {
+ push(ARRAY_OF | OBJECT | cw.addType(s));
+ }
+ break;
+ case Opcodes.CHECKCAST:
+ s = item.strVal1;
+ pop();
+ if (s.charAt(0) == '[') {
+ push(cw, s);
+ } else {
+ push(OBJECT | cw.addType(s));
+ }
+ break;
+ // case Opcodes.MULTIANEWARRAY:
+ default:
+ pop(arg);
+ push(cw, item.strVal1);
+ break;
+ }
+ }
+
+ /**
+ * Merges the input frame of the given basic block with the input and output
+ * frames of this basic block. Returns <tt>true</tt> if the input frame of
+ * the given label has been changed by this operation.
+ *
+ * @param cw the ClassWriter to which this label belongs.
+ * @param frame the basic block whose input frame must be updated.
+ * @param edge the kind of the {@link Edge} between this label and 'label'.
+ * See {@link Edge#info}.
+ * @return <tt>true</tt> if the input frame of the given label has been
+ * changed by this operation.
+ */
+ boolean merge(final ClassWriter cw, final Frame frame, final int edge) {
+ boolean changed = false;
+ int i, s, dim, kind, t;
+
+ int nLocal = inputLocals.length;
+ int nStack = inputStack.length;
+ if (frame.inputLocals == null) {
+ frame.inputLocals = new int[nLocal];
+ changed = true;
+ }
+
+ for (i = 0; i < nLocal; ++i) {
+ if (outputLocals != null && i < outputLocals.length) {
+ s = outputLocals[i];
+ if (s == 0) {
+ t = inputLocals[i];
+ } else {
+ dim = s & DIM;
+ kind = s & KIND;
+ if (kind == BASE) {
+ t = s;
+ } else {
+ if (kind == LOCAL) {
+ t = dim + inputLocals[s & VALUE];
+ } else {
+ t = dim + inputStack[nStack - (s & VALUE)];
+ }
+ if ((s & TOP_IF_LONG_OR_DOUBLE) != 0 && (t == LONG || t == DOUBLE)) {
+ t = TOP;
+ }
+ }
+ }
+ } else {
+ t = inputLocals[i];
+ }
+ if (initializations != null) {
+ t = init(cw, t);
+ }
+ changed |= merge(cw, t, frame.inputLocals, i);
+ }
+
+ if (edge > 0) {
+ for (i = 0; i < nLocal; ++i) {
+ t = inputLocals[i];
+ changed |= merge(cw, t, frame.inputLocals, i);
+ }
+ if (frame.inputStack == null) {
+ frame.inputStack = new int[1];
+ changed = true;
+ }
+ changed |= merge(cw, edge, frame.inputStack, 0);
+ return changed;
+ }
+
+ int nInputStack = inputStack.length + owner.inputStackTop;
+ if (frame.inputStack == null) {
+ frame.inputStack = new int[nInputStack + outputStackTop];
+ changed = true;
+ }
+
+ for (i = 0; i < nInputStack; ++i) {
+ t = inputStack[i];
+ if (initializations != null) {
+ t = init(cw, t);
+ }
+ changed |= merge(cw, t, frame.inputStack, i);
+ }
+ for (i = 0; i < outputStackTop; ++i) {
+ s = outputStack[i];
+ dim = s & DIM;
+ kind = s & KIND;
+ if (kind == BASE) {
+ t = s;
+ } else {
+ if (kind == LOCAL) {
+ t = dim + inputLocals[s & VALUE];
+ } else {
+ t = dim + inputStack[nStack - (s & VALUE)];
+ }
+ if ((s & TOP_IF_LONG_OR_DOUBLE) != 0 && (t == LONG || t == DOUBLE)) {
+ t = TOP;
+ }
+ }
+ if (initializations != null) {
+ t = init(cw, t);
+ }
+ changed |= merge(cw, t, frame.inputStack, nInputStack + i);
+ }
+ return changed;
+ }
+
+ /**
+ * Merges the type at the given index in the given type array with the given
+ * type. Returns <tt>true</tt> if the type array has been modified by this
+ * operation.
+ *
+ * @param cw the ClassWriter to which this label belongs.
+ * @param t the type with which the type array element must be merged.
+ * @param types an array of types.
+ * @param index the index of the type that must be merged in 'types'.
+ * @return <tt>true</tt> if the type array has been modified by this
+ * operation.
+ */
+ private static boolean merge(
+ final ClassWriter cw,
+ int t,
+ final int[] types,
+ final int index)
+ {
+ int u = types[index];
+ if (u == t) {
+ // if the types are equal, merge(u,t)=u, so there is no change
+ return false;
+ }
+ if ((t & ~DIM) == NULL) {
+ if (u == NULL) {
+ return false;
+ }
+ t = NULL;
+ }
+ if (u == 0) {
+ // if types[index] has never been assigned, merge(u,t)=t
+ types[index] = t;
+ return true;
+ }
+ int v;
+ if ((u & BASE_KIND) == OBJECT || (u & DIM) != 0) {
+ // if u is a reference type of any dimension
+ if (t == NULL) {
+ // if t is the NULL type, merge(u,t)=u, so there is no change
+ return false;
+ } else if ((t & (DIM | BASE_KIND)) == (u & (DIM | BASE_KIND))) {
+ if ((u & BASE_KIND) == OBJECT) {
+ // if t is also a reference type, and if u and t have the
+ // same dimension merge(u,t) = dim(t) | common parent of the
+ // element types of u and t
+ v = (t & DIM) | OBJECT
+ | cw.getMergedType(t & BASE_VALUE, u & BASE_VALUE);
+ } else {
+ // if u and t are array types, but not with the same element
+ // type, merge(u,t)=java/lang/Object
+ v = OBJECT | cw.addType("java/lang/Object");
+ }
+ } else if ((t & BASE_KIND) == OBJECT || (t & DIM) != 0) {
+ // if t is any other reference or array type,
+ // merge(u,t)=java/lang/Object
+ v = OBJECT | cw.addType("java/lang/Object");
+ } else {
+ // if t is any other type, merge(u,t)=TOP
+ v = TOP;
+ }
+ } else if (u == NULL) {
+ // if u is the NULL type, merge(u,t)=t,
+ // or TOP if t is not a reference type
+ v = (t & BASE_KIND) == OBJECT || (t & DIM) != 0 ? t : TOP;
+ } else {
+ // if u is any other type, merge(u,t)=TOP whatever t
+ v = TOP;
+ }
+ if (u != v) {
+ types[index] = v;
+ return true;
+ }
+ return false;
+ }
+}
diff --git a/src/asm/scala/tools/asm/Handle.java b/src/asm/scala/tools/asm/Handle.java
new file mode 100644
index 0000000..be8f334
--- /dev/null
+++ b/src/asm/scala/tools/asm/Handle.java
@@ -0,0 +1,159 @@
+/***
+ * ASM: a very small and fast Java bytecode manipulation framework
+ * Copyright (c) 2000-2011 INRIA, France Telecom
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ * 3. Neither the name of the copyright holders nor the names of its
+ * contributors may be used to endorse or promote products derived from
+ * this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+ * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
+ * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+ * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+ * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
+ * THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+package scala.tools.asm;
+
+/**
+ * A reference to a field or a method.
+ *
+ * @author Remi Forax
+ * @author Eric Bruneton
+ */
+public final class Handle {
+
+ /**
+ * The kind of field or method designated by this Handle. Should be
+ * {@link Opcodes#H_GETFIELD}, {@link Opcodes#H_GETSTATIC},
+ * {@link Opcodes#H_PUTFIELD}, {@link Opcodes#H_PUTSTATIC},
+ * {@link Opcodes#H_INVOKEVIRTUAL}, {@link Opcodes#H_INVOKESTATIC},
+ * {@link Opcodes#H_INVOKESPECIAL}, {@link Opcodes#H_NEWINVOKESPECIAL} or
+ * {@link Opcodes#H_INVOKEINTERFACE}.
+ */
+ final int tag;
+
+ /**
+ * The internal name of the field or method designed by this handle.
+ */
+ final String owner;
+
+ /**
+ * The name of the field or method designated by this handle.
+ */
+ final String name;
+
+ /**
+ * The descriptor of the field or method designated by this handle.
+ */
+ final String desc;
+
+ /**
+ * Constructs a new field or method handle.
+ *
+ * @param tag the kind of field or method designated by this Handle. Must be
+ * {@link Opcodes#H_GETFIELD}, {@link Opcodes#H_GETSTATIC},
+ * {@link Opcodes#H_PUTFIELD}, {@link Opcodes#H_PUTSTATIC},
+ * {@link Opcodes#H_INVOKEVIRTUAL}, {@link Opcodes#H_INVOKESTATIC},
+ * {@link Opcodes#H_INVOKESPECIAL},
+ * {@link Opcodes#H_NEWINVOKESPECIAL} or
+ * {@link Opcodes#H_INVOKEINTERFACE}.
+ * @param owner the internal name of the field or method designed by this
+ * handle.
+ * @param name the name of the field or method designated by this handle.
+ * @param desc the descriptor of the field or method designated by this
+ * handle.
+ */
+ public Handle(int tag, String owner, String name, String desc) {
+ this.tag = tag;
+ this.owner = owner;
+ this.name = name;
+ this.desc = desc;
+ }
+
+ /**
+ * Returns the kind of field or method designated by this handle.
+ *
+ * @return {@link Opcodes#H_GETFIELD}, {@link Opcodes#H_GETSTATIC},
+ * {@link Opcodes#H_PUTFIELD}, {@link Opcodes#H_PUTSTATIC},
+ * {@link Opcodes#H_INVOKEVIRTUAL}, {@link Opcodes#H_INVOKESTATIC},
+ * {@link Opcodes#H_INVOKESPECIAL},
+ * {@link Opcodes#H_NEWINVOKESPECIAL} or
+ * {@link Opcodes#H_INVOKEINTERFACE}.
+ */
+ public int getTag() {
+ return tag;
+ }
+
+ /**
+ * Returns the internal name of the field or method designed by this
+ * handle.
+ *
+ * @return the internal name of the field or method designed by this
+ * handle.
+ */
+ public String getOwner() {
+ return owner;
+ }
+
+ /**
+ * Returns the name of the field or method designated by this handle.
+ *
+ * @return the name of the field or method designated by this handle.
+ */
+ public String getName() {
+ return name;
+ }
+
+ /**
+ * Returns the descriptor of the field or method designated by this handle.
+ *
+ * @return the descriptor of the field or method designated by this handle.
+ */
+ public String getDesc() {
+ return desc;
+ }
+
+ @Override
+ public boolean equals(Object obj) {
+ if (obj == this) {
+ return true;
+ }
+ if (!(obj instanceof Handle)) {
+ return false;
+ }
+ Handle h = (Handle) obj;
+ return tag == h.tag && owner.equals(h.owner)
+ && name.equals(h.name) && desc.equals(h.desc);
+ }
+
+ @Override
+ public int hashCode() {
+ return tag + owner.hashCode() * name.hashCode() * desc.hashCode();
+ }
+
+ /**
+ * Returns the textual representation of this handle. The textual
+ * representation is: <pre>owner '.' name desc ' ' '(' tag ')'</pre>. As
+ * this format is unambiguous, it can be parsed if necessary.
+ */
+ @Override
+ public String toString() {
+ return owner + '.' + name + desc + " (" + tag + ')';
+ }
+}
diff --git a/src/asm/scala/tools/asm/Handler.java b/src/asm/scala/tools/asm/Handler.java
new file mode 100644
index 0000000..9e92bb9
--- /dev/null
+++ b/src/asm/scala/tools/asm/Handler.java
@@ -0,0 +1,118 @@
+/***
+ * ASM: a very small and fast Java bytecode manipulation framework
+ * Copyright (c) 2000-2011 INRIA, France Telecom
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ * 3. Neither the name of the copyright holders nor the names of its
+ * contributors may be used to endorse or promote products derived from
+ * this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+ * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
+ * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+ * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+ * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
+ * THE POSSIBILITY OF SUCH DAMAGE.
+ */
+package scala.tools.asm;
+
+/**
+ * Information about an exception handler block.
+ *
+ * @author Eric Bruneton
+ */
+class Handler {
+
+ /**
+ * Beginning of the exception handler's scope (inclusive).
+ */
+ Label start;
+
+ /**
+ * End of the exception handler's scope (exclusive).
+ */
+ Label end;
+
+ /**
+ * Beginning of the exception handler's code.
+ */
+ Label handler;
+
+ /**
+ * Internal name of the type of exceptions handled by this handler, or
+ * <tt>null</tt> to catch any exceptions.
+ */
+ String desc;
+
+ /**
+ * Constant pool index of the internal name of the type of exceptions
+ * handled by this handler, or 0 to catch any exceptions.
+ */
+ int type;
+
+ /**
+ * Next exception handler block info.
+ */
+ Handler next;
+
+ /**
+ * Removes the range between start and end from the given exception
+ * handlers.
+ *
+ * @param h an exception handler list.
+ * @param start the start of the range to be removed.
+ * @param end the end of the range to be removed. Maybe null.
+ * @return the exception handler list with the start-end range removed.
+ */
+ static Handler remove(Handler h, Label start, Label end) {
+ if (h == null) {
+ return null;
+ } else {
+ h.next = remove(h.next, start, end);
+ }
+ int hstart = h.start.position;
+ int hend = h.end.position;
+ int s = start.position;
+ int e = end == null ? Integer.MAX_VALUE : end.position;
+ // if [hstart,hend[ and [s,e[ intervals intersect...
+ if (s < hend && e > hstart) {
+ if (s <= hstart) {
+ if (e >= hend) {
+ // [hstart,hend[ fully included in [s,e[, h removed
+ h = h.next;
+ } else {
+ // [hstart,hend[ minus [s,e[ = [e,hend[
+ h.start = end;
+ }
+ } else if (e >= hend) {
+ // [hstart,hend[ minus [s,e[ = [hstart,s[
+ h.end = start;
+ } else {
+ // [hstart,hend[ minus [s,e[ = [hstart,s[ + [e,hend[
+ Handler g = new Handler();
+ g.start = end;
+ g.end = h.end;
+ g.handler = h.handler;
+ g.desc = h.desc;
+ g.type = h.type;
+ g.next = h.next;
+ h.end = start;
+ h.next = g;
+ }
+ }
+ return h;
+ }
+}
diff --git a/src/asm/scala/tools/asm/Item.java b/src/asm/scala/tools/asm/Item.java
new file mode 100644
index 0000000..021a0b1
--- /dev/null
+++ b/src/asm/scala/tools/asm/Item.java
@@ -0,0 +1,297 @@
+/***
+ * ASM: a very small and fast Java bytecode manipulation framework
+ * Copyright (c) 2000-2011 INRIA, France Telecom
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ * 3. Neither the name of the copyright holders nor the names of its
+ * contributors may be used to endorse or promote products derived from
+ * this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+ * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
+ * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+ * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+ * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
+ * THE POSSIBILITY OF SUCH DAMAGE.
+ */
+package scala.tools.asm;
+
+/**
+ * A constant pool item. Constant pool items can be created with the 'newXXX'
+ * methods in the {@link ClassWriter} class.
+ *
+ * @author Eric Bruneton
+ */
+final class Item {
+
+ /**
+ * Index of this item in the constant pool.
+ */
+ int index;
+
+ /**
+ * Type of this constant pool item. A single class is used to represent all
+ * constant pool item types, in order to minimize the bytecode size of this
+ * package. The value of this field is one of {@link ClassWriter#INT},
+ * {@link ClassWriter#LONG}, {@link ClassWriter#FLOAT},
+ * {@link ClassWriter#DOUBLE}, {@link ClassWriter#UTF8},
+ * {@link ClassWriter#STR}, {@link ClassWriter#CLASS},
+ * {@link ClassWriter#NAME_TYPE}, {@link ClassWriter#FIELD},
+ * {@link ClassWriter#METH}, {@link ClassWriter#IMETH},
+ * {@link ClassWriter#MTYPE}, {@link ClassWriter#INDY}.
+ *
+ * MethodHandle constant 9 variations are stored using a range
+ * of 9 values from {@link ClassWriter#HANDLE_BASE} + 1 to
+ * {@link ClassWriter#HANDLE_BASE} + 9.
+ *
+ * Special Item types are used for Items that are stored in the ClassWriter
+ * {@link ClassWriter#typeTable}, instead of the constant pool, in order to
+ * avoid clashes with normal constant pool items in the ClassWriter constant
+ * pool's hash table. These special item types are
+ * {@link ClassWriter#TYPE_NORMAL}, {@link ClassWriter#TYPE_UNINIT} and
+ * {@link ClassWriter#TYPE_MERGED}.
+ */
+ int type;
+
+ /**
+ * Value of this item, for an integer item.
+ */
+ int intVal;
+
+ /**
+ * Value of this item, for a long item.
+ */
+ long longVal;
+
+ /**
+ * First part of the value of this item, for items that do not hold a
+ * primitive value.
+ */
+ String strVal1;
+
+ /**
+ * Second part of the value of this item, for items that do not hold a
+ * primitive value.
+ */
+ String strVal2;
+
+ /**
+ * Third part of the value of this item, for items that do not hold a
+ * primitive value.
+ */
+ String strVal3;
+
+ /**
+ * The hash code value of this constant pool item.
+ */
+ int hashCode;
+
+ /**
+ * Link to another constant pool item, used for collision lists in the
+ * constant pool's hash table.
+ */
+ Item next;
+
+ /**
+ * Constructs an uninitialized {@link Item}.
+ */
+ Item() {
+ }
+
+ /**
+ * Constructs an uninitialized {@link Item} for constant pool element at
+ * given position.
+ *
+ * @param index index of the item to be constructed.
+ */
+ Item(final int index) {
+ this.index = index;
+ }
+
+ /**
+ * Constructs a copy of the given item.
+ *
+ * @param index index of the item to be constructed.
+ * @param i the item that must be copied into the item to be constructed.
+ */
+ Item(final int index, final Item i) {
+ this.index = index;
+ type = i.type;
+ intVal = i.intVal;
+ longVal = i.longVal;
+ strVal1 = i.strVal1;
+ strVal2 = i.strVal2;
+ strVal3 = i.strVal3;
+ hashCode = i.hashCode;
+ }
+
+ /**
+ * Sets this item to an integer item.
+ *
+ * @param intVal the value of this item.
+ */
+ void set(final int intVal) {
+ this.type = ClassWriter.INT;
+ this.intVal = intVal;
+ this.hashCode = 0x7FFFFFFF & (type + intVal);
+ }
+
+ /**
+ * Sets this item to a long item.
+ *
+ * @param longVal the value of this item.
+ */
+ void set(final long longVal) {
+ this.type = ClassWriter.LONG;
+ this.longVal = longVal;
+ this.hashCode = 0x7FFFFFFF & (type + (int) longVal);
+ }
+
+ /**
+ * Sets this item to a float item.
+ *
+ * @param floatVal the value of this item.
+ */
+ void set(final float floatVal) {
+ this.type = ClassWriter.FLOAT;
+ this.intVal = Float.floatToRawIntBits(floatVal);
+ this.hashCode = 0x7FFFFFFF & (type + (int) floatVal);
+ }
+
+ /**
+ * Sets this item to a double item.
+ *
+ * @param doubleVal the value of this item.
+ */
+ void set(final double doubleVal) {
+ this.type = ClassWriter.DOUBLE;
+ this.longVal = Double.doubleToRawLongBits(doubleVal);
+ this.hashCode = 0x7FFFFFFF & (type + (int) doubleVal);
+ }
+
+ /**
+ * Sets this item to an item that do not hold a primitive value.
+ *
+ * @param type the type of this item.
+ * @param strVal1 first part of the value of this item.
+ * @param strVal2 second part of the value of this item.
+ * @param strVal3 third part of the value of this item.
+ */
+ void set(
+ final int type,
+ final String strVal1,
+ final String strVal2,
+ final String strVal3)
+ {
+ this.type = type;
+ this.strVal1 = strVal1;
+ this.strVal2 = strVal2;
+ this.strVal3 = strVal3;
+ switch (type) {
+ case ClassWriter.UTF8:
+ case ClassWriter.STR:
+ case ClassWriter.CLASS:
+ case ClassWriter.MTYPE:
+ case ClassWriter.TYPE_NORMAL:
+ hashCode = 0x7FFFFFFF & (type + strVal1.hashCode());
+ return;
+ case ClassWriter.NAME_TYPE:
+ hashCode = 0x7FFFFFFF & (type + strVal1.hashCode()
+ * strVal2.hashCode());
+ return;
+ // ClassWriter.FIELD:
+ // ClassWriter.METH:
+ // ClassWriter.IMETH:
+ // ClassWriter.HANDLE_BASE + 1..9
+ default:
+ hashCode = 0x7FFFFFFF & (type + strVal1.hashCode()
+ * strVal2.hashCode() * strVal3.hashCode());
+ }
+ }
+
+ /**
+ * Sets the item to an InvokeDynamic item.
+ *
+ * @param name invokedynamic's name.
+ * @param desc invokedynamic's desc.
+ * @param bsmIndex zero based index into the class attribute BootrapMethods.
+ */
+ void set(String name, String desc, int bsmIndex) {
+ this.type = ClassWriter.INDY;
+ this.longVal = bsmIndex;
+ this.strVal1 = name;
+ this.strVal2 = desc;
+ this.hashCode = 0x7FFFFFFF & (ClassWriter.INDY + bsmIndex
+ * strVal1.hashCode() * strVal2.hashCode());
+ }
+
+ /**
+ * Sets the item to a BootstrapMethod item.
+ *
+ * @param position position in byte in the class attribute BootrapMethods.
+ * @param hashCode hashcode of the item. This hashcode is processed from
+ * the hashcode of the bootstrap method and the hashcode of
+ * all bootstrap arguments.
+ */
+ void set(int position, int hashCode) {
+ this.type = ClassWriter.BSM;
+ this.intVal = position;
+ this.hashCode = hashCode;
+ }
+
+ /**
+ * Indicates if the given item is equal to this one. <i>This method assumes
+ * that the two items have the same {@link #type}</i>.
+ *
+ * @param i the item to be compared to this one. Both items must have the
+ * same {@link #type}.
+ * @return <tt>true</tt> if the given item if equal to this one,
+ * <tt>false</tt> otherwise.
+ */
+ boolean isEqualTo(final Item i) {
+ switch (type) {
+ case ClassWriter.UTF8:
+ case ClassWriter.STR:
+ case ClassWriter.CLASS:
+ case ClassWriter.MTYPE:
+ case ClassWriter.TYPE_NORMAL:
+ return i.strVal1.equals(strVal1);
+ case ClassWriter.TYPE_MERGED:
+ case ClassWriter.LONG:
+ case ClassWriter.DOUBLE:
+ return i.longVal == longVal;
+ case ClassWriter.INT:
+ case ClassWriter.FLOAT:
+ return i.intVal == intVal;
+ case ClassWriter.TYPE_UNINIT:
+ return i.intVal == intVal && i.strVal1.equals(strVal1);
+ case ClassWriter.NAME_TYPE:
+ return i.strVal1.equals(strVal1) && i.strVal2.equals(strVal2);
+ case ClassWriter.INDY:
+ return i.longVal == longVal && i.strVal1.equals(strVal1)
+ && i.strVal2.equals(strVal2);
+
+ // case ClassWriter.FIELD:
+ // case ClassWriter.METH:
+ // case ClassWriter.IMETH:
+ // case ClassWriter.HANDLE_BASE + 1..9
+ default:
+ return i.strVal1.equals(strVal1) && i.strVal2.equals(strVal2)
+ && i.strVal3.equals(strVal3);
+ }
+ }
+
+}
diff --git a/src/asm/scala/tools/asm/Label.java b/src/asm/scala/tools/asm/Label.java
new file mode 100644
index 0000000..712c7f2
--- /dev/null
+++ b/src/asm/scala/tools/asm/Label.java
@@ -0,0 +1,555 @@
+/***
+ * ASM: a very small and fast Java bytecode manipulation framework
+ * Copyright (c) 2000-2011 INRIA, France Telecom
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ * 3. Neither the name of the copyright holders nor the names of its
+ * contributors may be used to endorse or promote products derived from
+ * this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+ * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
+ * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+ * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+ * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
+ * THE POSSIBILITY OF SUCH DAMAGE.
+ */
+package scala.tools.asm;
+
+/**
+ * A label represents a position in the bytecode of a method. Labels are used
+ * for jump, goto, and switch instructions, and for try catch blocks. A label
+ * designates the <i>instruction</i> that is just after. Note however that
+ * there can be other elements between a label and the instruction it
+ * designates (such as other labels, stack map frames, line numbers, etc.).
+ *
+ * @author Eric Bruneton
+ */
+public class Label {
+
+ /**
+ * Indicates if this label is only used for debug attributes. Such a label
+ * is not the start of a basic block, the target of a jump instruction, or
+ * an exception handler. It can be safely ignored in control flow graph
+ * analysis algorithms (for optimization purposes).
+ */
+ static final int DEBUG = 1;
+
+ /**
+ * Indicates if the position of this label is known.
+ */
+ static final int RESOLVED = 2;
+
+ /**
+ * Indicates if this label has been updated, after instruction resizing.
+ */
+ static final int RESIZED = 4;
+
+ /**
+ * Indicates if this basic block has been pushed in the basic block stack.
+ * See {@link MethodWriter#visitMaxs visitMaxs}.
+ */
+ static final int PUSHED = 8;
+
+ /**
+ * Indicates if this label is the target of a jump instruction, or the start
+ * of an exception handler.
+ */
+ static final int TARGET = 16;
+
+ /**
+ * Indicates if a stack map frame must be stored for this label.
+ */
+ static final int STORE = 32;
+
+ /**
+ * Indicates if this label corresponds to a reachable basic block.
+ */
+ static final int REACHABLE = 64;
+
+ /**
+ * Indicates if this basic block ends with a JSR instruction.
+ */
+ static final int JSR = 128;
+
+ /**
+ * Indicates if this basic block ends with a RET instruction.
+ */
+ static final int RET = 256;
+
+ /**
+ * Indicates if this basic block is the start of a subroutine.
+ */
+ static final int SUBROUTINE = 512;
+
+ /**
+ * Indicates if this subroutine basic block has been visited by a
+ * visitSubroutine(null, ...) call.
+ */
+ static final int VISITED = 1024;
+
+ /**
+ * Indicates if this subroutine basic block has been visited by a
+ * visitSubroutine(!null, ...) call.
+ */
+ static final int VISITED2 = 2048;
+
+ /**
+ * Field used to associate user information to a label. Warning: this field
+ * is used by the ASM tree package. In order to use it with the ASM tree
+ * package you must override the {@link
+ * org.objectweb.asm.tree.MethodNode#getLabelNode} method.
+ */
+ public Object info;
+
+ /**
+ * Flags that indicate the status of this label.
+ *
+ * @see #DEBUG
+ * @see #RESOLVED
+ * @see #RESIZED
+ * @see #PUSHED
+ * @see #TARGET
+ * @see #STORE
+ * @see #REACHABLE
+ * @see #JSR
+ * @see #RET
+ */
+ int status;
+
+ /**
+ * The line number corresponding to this label, if known.
+ */
+ int line;
+
+ /**
+ * The position of this label in the code, if known.
+ */
+ int position;
+
+ /**
+ * Number of forward references to this label, times two.
+ */
+ private int referenceCount;
+
+ /**
+ * Informations about forward references. Each forward reference is
+ * described by two consecutive integers in this array: the first one is the
+ * position of the first byte of the bytecode instruction that contains the
+ * forward reference, while the second is the position of the first byte of
+ * the forward reference itself. In fact the sign of the first integer
+ * indicates if this reference uses 2 or 4 bytes, and its absolute value
+ * gives the position of the bytecode instruction. This array is also used
+ * as a bitset to store the subroutines to which a basic block belongs. This
+ * information is needed in {@linked MethodWriter#visitMaxs}, after all
+ * forward references have been resolved. Hence the same array can be used
+ * for both purposes without problems.
+ */
+ private int[] srcAndRefPositions;
+
+ // ------------------------------------------------------------------------
+
+ /*
+ * Fields for the control flow and data flow graph analysis algorithms (used
+ * to compute the maximum stack size or the stack map frames). A control
+ * flow graph contains one node per "basic block", and one edge per "jump"
+ * from one basic block to another. Each node (i.e., each basic block) is
+ * represented by the Label object that corresponds to the first instruction
+ * of this basic block. Each node also stores the list of its successors in
+ * the graph, as a linked list of Edge objects.
+ *
+ * The control flow analysis algorithms used to compute the maximum stack
+ * size or the stack map frames are similar and use two steps. The first
+ * step, during the visit of each instruction, builds information about the
+ * state of the local variables and the operand stack at the end of each
+ * basic block, called the "output frame", <i>relatively</i> to the frame
+ * state at the beginning of the basic block, which is called the "input
+ * frame", and which is <i>unknown</i> during this step. The second step,
+ * in {@link MethodWriter#visitMaxs}, is a fix point algorithm that
+ * computes information about the input frame of each basic block, from the
+ * input state of the first basic block (known from the method signature),
+ * and by the using the previously computed relative output frames.
+ *
+ * The algorithm used to compute the maximum stack size only computes the
+ * relative output and absolute input stack heights, while the algorithm
+ * used to compute stack map frames computes relative output frames and
+ * absolute input frames.
+ */
+
+ /**
+ * Start of the output stack relatively to the input stack. The exact
+ * semantics of this field depends on the algorithm that is used.
+ *
+ * When only the maximum stack size is computed, this field is the number of
+ * elements in the input stack.
+ *
+ * When the stack map frames are completely computed, this field is the
+ * offset of the first output stack element relatively to the top of the
+ * input stack. This offset is always negative or null. A null offset means
+ * that the output stack must be appended to the input stack. A -n offset
+ * means that the first n output stack elements must replace the top n input
+ * stack elements, and that the other elements must be appended to the input
+ * stack.
+ */
+ int inputStackTop;
+
+ /**
+ * Maximum height reached by the output stack, relatively to the top of the
+ * input stack. This maximum is always positive or null.
+ */
+ int outputStackMax;
+
+ /**
+ * Information about the input and output stack map frames of this basic
+ * block. This field is only used when {@link ClassWriter#COMPUTE_FRAMES}
+ * option is used.
+ */
+ Frame frame;
+
+ /**
+ * The successor of this label, in the order they are visited. This linked
+ * list does not include labels used for debug info only. If
+ * {@link ClassWriter#COMPUTE_FRAMES} option is used then, in addition, it
+ * does not contain successive labels that denote the same bytecode position
+ * (in this case only the first label appears in this list).
+ */
+ Label successor;
+
+ /**
+ * The successors of this node in the control flow graph. These successors
+ * are stored in a linked list of {@link Edge Edge} objects, linked to each
+ * other by their {@link Edge#next} field.
+ */
+ Edge successors;
+
+ /**
+ * The next basic block in the basic block stack. This stack is used in the
+ * main loop of the fix point algorithm used in the second step of the
+ * control flow analysis algorithms. It is also used in
+ * {@link #visitSubroutine} to avoid using a recursive method.
+ *
+ * @see MethodWriter#visitMaxs
+ */
+ Label next;
+
+ // ------------------------------------------------------------------------
+ // Constructor
+ // ------------------------------------------------------------------------
+
+ /**
+ * Constructs a new label.
+ */
+ public Label() {
+ }
+
+ // ------------------------------------------------------------------------
+ // Methods to compute offsets and to manage forward references
+ // ------------------------------------------------------------------------
+
+ /**
+ * Returns the offset corresponding to this label. This offset is computed
+ * from the start of the method's bytecode. <i>This method is intended for
+ * {@link Attribute} sub classes, and is normally not needed by class
+ * generators or adapters.</i>
+ *
+ * @return the offset corresponding to this label.
+ * @throws IllegalStateException if this label is not resolved yet.
+ */
+ public int getOffset() {
+ if ((status & RESOLVED) == 0) {
+ throw new IllegalStateException("Label offset position has not been resolved yet");
+ }
+ return position;
+ }
+
+ /**
+ * Puts a reference to this label in the bytecode of a method. If the
+ * position of the label is known, the offset is computed and written
+ * directly. Otherwise, a null offset is written and a new forward reference
+ * is declared for this label.
+ *
+ * @param owner the code writer that calls this method.
+ * @param out the bytecode of the method.
+ * @param source the position of first byte of the bytecode instruction that
+ * contains this label.
+ * @param wideOffset <tt>true</tt> if the reference must be stored in 4
+ * bytes, or <tt>false</tt> if it must be stored with 2 bytes.
+ * @throws IllegalArgumentException if this label has not been created by
+ * the given code writer.
+ */
+ void put(
+ final MethodWriter owner,
+ final ByteVector out,
+ final int source,
+ final boolean wideOffset)
+ {
+ if ((status & RESOLVED) == 0) {
+ if (wideOffset) {
+ addReference(-1 - source, out.length);
+ out.putInt(-1);
+ } else {
+ addReference(source, out.length);
+ out.putShort(-1);
+ }
+ } else {
+ if (wideOffset) {
+ out.putInt(position - source);
+ } else {
+ out.putShort(position - source);
+ }
+ }
+ }
+
+ /**
+ * Adds a forward reference to this label. This method must be called only
+ * for a true forward reference, i.e. only if this label is not resolved
+ * yet. For backward references, the offset of the reference can be, and
+ * must be, computed and stored directly.
+ *
+ * @param sourcePosition the position of the referencing instruction. This
+ * position will be used to compute the offset of this forward
+ * reference.
+ * @param referencePosition the position where the offset for this forward
+ * reference must be stored.
+ */
+ private void addReference(
+ final int sourcePosition,
+ final int referencePosition)
+ {
+ if (srcAndRefPositions == null) {
+ srcAndRefPositions = new int[6];
+ }
+ if (referenceCount >= srcAndRefPositions.length) {
+ int[] a = new int[srcAndRefPositions.length + 6];
+ System.arraycopy(srcAndRefPositions,
+ 0,
+ a,
+ 0,
+ srcAndRefPositions.length);
+ srcAndRefPositions = a;
+ }
+ srcAndRefPositions[referenceCount++] = sourcePosition;
+ srcAndRefPositions[referenceCount++] = referencePosition;
+ }
+
+ /**
+ * Resolves all forward references to this label. This method must be called
+ * when this label is added to the bytecode of the method, i.e. when its
+ * position becomes known. This method fills in the blanks that where left
+ * in the bytecode by each forward reference previously added to this label.
+ *
+ * @param owner the code writer that calls this method.
+ * @param position the position of this label in the bytecode.
+ * @param data the bytecode of the method.
+ * @return <tt>true</tt> if a blank that was left for this label was to
+ * small to store the offset. In such a case the corresponding jump
+ * instruction is replaced with a pseudo instruction (using unused
+ * opcodes) using an unsigned two bytes offset. These pseudo
+ * instructions will need to be replaced with true instructions with
+ * wider offsets (4 bytes instead of 2). This is done in
+ * {@link MethodWriter#resizeInstructions}.
+ * @throws IllegalArgumentException if this label has already been resolved,
+ * or if it has not been created by the given code writer.
+ */
+ boolean resolve(
+ final MethodWriter owner,
+ final int position,
+ final byte[] data)
+ {
+ boolean needUpdate = false;
+ this.status |= RESOLVED;
+ this.position = position;
+ int i = 0;
+ while (i < referenceCount) {
+ int source = srcAndRefPositions[i++];
+ int reference = srcAndRefPositions[i++];
+ int offset;
+ if (source >= 0) {
+ offset = position - source;
+ if (offset < Short.MIN_VALUE || offset > Short.MAX_VALUE) {
+ /*
+ * changes the opcode of the jump instruction, in order to
+ * be able to find it later (see resizeInstructions in
+ * MethodWriter). These temporary opcodes are similar to
+ * jump instruction opcodes, except that the 2 bytes offset
+ * is unsigned (and can therefore represent values from 0 to
+ * 65535, which is sufficient since the size of a method is
+ * limited to 65535 bytes).
+ */
+ int opcode = data[reference - 1] & 0xFF;
+ if (opcode <= Opcodes.JSR) {
+ // changes IFEQ ... JSR to opcodes 202 to 217
+ data[reference - 1] = (byte) (opcode + 49);
+ } else {
+ // changes IFNULL and IFNONNULL to opcodes 218 and 219
+ data[reference - 1] = (byte) (opcode + 20);
+ }
+ needUpdate = true;
+ }
+ data[reference++] = (byte) (offset >>> 8);
+ data[reference] = (byte) offset;
+ } else {
+ offset = position + source + 1;
+ data[reference++] = (byte) (offset >>> 24);
+ data[reference++] = (byte) (offset >>> 16);
+ data[reference++] = (byte) (offset >>> 8);
+ data[reference] = (byte) offset;
+ }
+ }
+ return needUpdate;
+ }
+
+ /**
+ * Returns the first label of the series to which this label belongs. For an
+ * isolated label or for the first label in a series of successive labels,
+ * this method returns the label itself. For other labels it returns the
+ * first label of the series.
+ *
+ * @return the first label of the series to which this label belongs.
+ */
+ Label getFirst() {
+ return !ClassReader.FRAMES || frame == null ? this : frame.owner;
+ }
+
+ // ------------------------------------------------------------------------
+ // Methods related to subroutines
+ // ------------------------------------------------------------------------
+
+ /**
+ * Returns true is this basic block belongs to the given subroutine.
+ *
+ * @param id a subroutine id.
+ * @return true is this basic block belongs to the given subroutine.
+ */
+ boolean inSubroutine(final long id) {
+ if ((status & Label.VISITED) != 0) {
+ return (srcAndRefPositions[(int) (id >>> 32)] & (int) id) != 0;
+ }
+ return false;
+ }
+
+ /**
+ * Returns true if this basic block and the given one belong to a common
+ * subroutine.
+ *
+ * @param block another basic block.
+ * @return true if this basic block and the given one belong to a common
+ * subroutine.
+ */
+ boolean inSameSubroutine(final Label block) {
+ if ((status & VISITED) == 0 || (block.status & VISITED) == 0) {
+ return false;
+ }
+ for (int i = 0; i < srcAndRefPositions.length; ++i) {
+ if ((srcAndRefPositions[i] & block.srcAndRefPositions[i]) != 0) {
+ return true;
+ }
+ }
+ return false;
+ }
+
+ /**
+ * Marks this basic block as belonging to the given subroutine.
+ *
+ * @param id a subroutine id.
+ * @param nbSubroutines the total number of subroutines in the method.
+ */
+ void addToSubroutine(final long id, final int nbSubroutines) {
+ if ((status & VISITED) == 0) {
+ status |= VISITED;
+ srcAndRefPositions = new int[(nbSubroutines - 1) / 32 + 1];
+ }
+ srcAndRefPositions[(int) (id >>> 32)] |= (int) id;
+ }
+
+ /**
+ * Finds the basic blocks that belong to a given subroutine, and marks these
+ * blocks as belonging to this subroutine. This method follows the control
+ * flow graph to find all the blocks that are reachable from the current
+ * block WITHOUT following any JSR target.
+ *
+ * @param JSR a JSR block that jumps to this subroutine. If this JSR is not
+ * null it is added to the successor of the RET blocks found in the
+ * subroutine.
+ * @param id the id of this subroutine.
+ * @param nbSubroutines the total number of subroutines in the method.
+ */
+ void visitSubroutine(final Label JSR, final long id, final int nbSubroutines)
+ {
+ // user managed stack of labels, to avoid using a recursive method
+ // (recursivity can lead to stack overflow with very large methods)
+ Label stack = this;
+ while (stack != null) {
+ // removes a label l from the stack
+ Label l = stack;
+ stack = l.next;
+ l.next = null;
+
+ if (JSR != null) {
+ if ((l.status & VISITED2) != 0) {
+ continue;
+ }
+ l.status |= VISITED2;
+ // adds JSR to the successors of l, if it is a RET block
+ if ((l.status & RET) != 0) {
+ if (!l.inSameSubroutine(JSR)) {
+ Edge e = new Edge();
+ e.info = l.inputStackTop;
+ e.successor = JSR.successors.successor;
+ e.next = l.successors;
+ l.successors = e;
+ }
+ }
+ } else {
+ // if the l block already belongs to subroutine 'id', continue
+ if (l.inSubroutine(id)) {
+ continue;
+ }
+ // marks the l block as belonging to subroutine 'id'
+ l.addToSubroutine(id, nbSubroutines);
+ }
+ // pushes each successor of l on the stack, except JSR targets
+ Edge e = l.successors;
+ while (e != null) {
+ // if the l block is a JSR block, then 'l.successors.next' leads
+ // to the JSR target (see {@link #visitJumpInsn}) and must
+ // therefore not be followed
+ if ((l.status & Label.JSR) == 0 || e != l.successors.next) {
+ // pushes e.successor on the stack if it not already added
+ if (e.successor.next == null) {
+ e.successor.next = stack;
+ stack = e.successor;
+ }
+ }
+ e = e.next;
+ }
+ }
+ }
+
+ // ------------------------------------------------------------------------
+ // Overriden Object methods
+ // ------------------------------------------------------------------------
+
+ /**
+ * Returns a string representation of this label.
+ *
+ * @return a string representation of this label.
+ */
+ @Override
+ public String toString() {
+ return "L" + System.identityHashCode(this);
+ }
+}
diff --git a/src/asm/scala/tools/asm/MethodVisitor.java b/src/asm/scala/tools/asm/MethodVisitor.java
new file mode 100644
index 0000000..a8a859a
--- /dev/null
+++ b/src/asm/scala/tools/asm/MethodVisitor.java
@@ -0,0 +1,588 @@
+/***
+ * ASM: a very small and fast Java bytecode manipulation framework
+ * Copyright (c) 2000-2011 INRIA, France Telecom
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ * 3. Neither the name of the copyright holders nor the names of its
+ * contributors may be used to endorse or promote products derived from
+ * this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+ * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
+ * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+ * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+ * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
+ * THE POSSIBILITY OF SUCH DAMAGE.
+ */
+package scala.tools.asm;
+
+/**
+ * A visitor to visit a Java method. The methods of this class must be
+ * called in the following order: [ <tt>visitAnnotationDefault</tt> ] (
+ * <tt>visitAnnotation</tt> | <tt>visitParameterAnnotation</tt> |
+ * <tt>visitAttribute</tt> )* [ <tt>visitCode</tt> ( <tt>visitFrame</tt> |
+ * <tt>visit</tt><i>X</i>Insn</tt> | <tt>visitLabel</tt> | <tt>visitTryCatchBlock</tt> |
+ * <tt>visitLocalVariable</tt> | <tt>visitLineNumber</tt> )* <tt>visitMaxs</tt> ]
+ * <tt>visitEnd</tt>. In addition, the <tt>visit</tt><i>X</i>Insn</tt>
+ * and <tt>visitLabel</tt> methods must be called in the sequential order of
+ * the bytecode instructions of the visited code, <tt>visitTryCatchBlock</tt>
+ * must be called <i>before</i> the labels passed as arguments have been
+ * visited, and the <tt>visitLocalVariable</tt> and <tt>visitLineNumber</tt>
+ * methods must be called <i>after</i> the labels passed as arguments have been
+ * visited.
+ *
+ * @author Eric Bruneton
+ */
+public abstract class MethodVisitor {
+
+ /**
+ * The ASM API version implemented by this visitor. The value of this field
+ * must be one of {@link Opcodes#ASM4}.
+ */
+ protected final int api;
+
+ /**
+ * The method visitor to which this visitor must delegate method calls. May
+ * be null.
+ */
+ protected MethodVisitor mv;
+
+ /**
+ * Constructs a new {@link MethodVisitor}.
+ *
+ * @param api the ASM API version implemented by this visitor. Must be one
+ * of {@link Opcodes#ASM4}.
+ */
+ public MethodVisitor(final int api) {
+ this(api, null);
+ }
+
+ /**
+ * Constructs a new {@link MethodVisitor}.
+ *
+ * @param api the ASM API version implemented by this visitor. Must be one
+ * of {@link Opcodes#ASM4}.
+ * @param mv the method visitor to which this visitor must delegate method
+ * calls. May be null.
+ */
+ public MethodVisitor(final int api, final MethodVisitor mv) {
+ /*if (api != Opcodes.ASM4) {
+ throw new IllegalArgumentException();
+ }*/
+ this.api = api;
+ this.mv = mv;
+ }
+
+ // -------------------------------------------------------------------------
+ // Annotations and non standard attributes
+ // -------------------------------------------------------------------------
+
+ /**
+ * Visits the default value of this annotation interface method.
+ *
+ * @return a visitor to the visit the actual default value of this
+ * annotation interface method, or <tt>null</tt> if this visitor
+ * is not interested in visiting this default value. The 'name'
+ * parameters passed to the methods of this annotation visitor are
+ * ignored. Moreover, exacly one visit method must be called on this
+ * annotation visitor, followed by visitEnd.
+ */
+ public AnnotationVisitor visitAnnotationDefault() {
+ if (mv != null) {
+ return mv.visitAnnotationDefault();
+ }
+ return null;
+ }
+
+ /**
+ * Visits an annotation of this method.
+ *
+ * @param desc the class descriptor of the annotation class.
+ * @param visible <tt>true</tt> if the annotation is visible at runtime.
+ * @return a visitor to visit the annotation values, or <tt>null</tt> if
+ * this visitor is not interested in visiting this annotation.
+ */
+ public AnnotationVisitor visitAnnotation(String desc, boolean visible) {
+ if (mv != null) {
+ return mv.visitAnnotation(desc, visible);
+ }
+ return null;
+ }
+
+ /**
+ * Visits an annotation of a parameter this method.
+ *
+ * @param parameter the parameter index.
+ * @param desc the class descriptor of the annotation class.
+ * @param visible <tt>true</tt> if the annotation is visible at runtime.
+ * @return a visitor to visit the annotation values, or <tt>null</tt> if
+ * this visitor is not interested in visiting this annotation.
+ */
+ public AnnotationVisitor visitParameterAnnotation(
+ int parameter,
+ String desc,
+ boolean visible)
+ {
+ if (mv != null) {
+ return mv.visitParameterAnnotation(parameter, desc, visible);
+ }
+ return null;
+ }
+
+ /**
+ * Visits a non standard attribute of this method.
+ *
+ * @param attr an attribute.
+ */
+ public void visitAttribute(Attribute attr) {
+ if (mv != null) {
+ mv.visitAttribute(attr);
+ }
+ }
+
+ /**
+ * Starts the visit of the method's code, if any (i.e. non abstract method).
+ */
+ public void visitCode() {
+ if (mv != null) {
+ mv.visitCode();
+ }
+ }
+
+ /**
+ * Visits the current state of the local variables and operand stack
+ * elements. This method must(*) be called <i>just before</i> any
+ * instruction <b>i</b> that follows an unconditional branch instruction
+ * such as GOTO or THROW, that is the target of a jump instruction, or that
+ * starts an exception handler block. The visited types must describe the
+ * values of the local variables and of the operand stack elements <i>just
+ * before</i> <b>i</b> is executed. <br> <br> (*) this is mandatory only
+ * for classes whose version is greater than or equal to
+ * {@link Opcodes#V1_6 V1_6}. <br> <br> Packed frames are basically
+ * "deltas" from the state of the previous frame (very first frame is
+ * implicitly defined by the method's parameters and access flags): <ul>
+ * <li>{@link Opcodes#F_SAME} representing frame with exactly the same
+ * locals as the previous frame and with the empty stack.</li> <li>{@link Opcodes#F_SAME1}
+ * representing frame with exactly the same locals as the previous frame and
+ * with single value on the stack (<code>nStack</code> is 1 and
+ * <code>stack[0]</code> contains value for the type of the stack item).</li>
+ * <li>{@link Opcodes#F_APPEND} representing frame with current locals are
+ * the same as the locals in the previous frame, except that additional
+ * locals are defined (<code>nLocal</code> is 1, 2 or 3 and
+ * <code>local</code> elements contains values representing added types).</li>
+ * <li>{@link Opcodes#F_CHOP} representing frame with current locals are
+ * the same as the locals in the previous frame, except that the last 1-3
+ * locals are absent and with the empty stack (<code>nLocals</code> is 1,
+ * 2 or 3). </li> <li>{@link Opcodes#F_FULL} representing complete frame
+ * data.</li> </li> </ul>
+ *
+ * @param type the type of this stack map frame. Must be
+ * {@link Opcodes#F_NEW} for expanded frames, or
+ * {@link Opcodes#F_FULL}, {@link Opcodes#F_APPEND},
+ * {@link Opcodes#F_CHOP}, {@link Opcodes#F_SAME} or
+ * {@link Opcodes#F_APPEND}, {@link Opcodes#F_SAME1} for compressed
+ * frames.
+ * @param nLocal the number of local variables in the visited frame.
+ * @param local the local variable types in this frame. This array must not
+ * be modified. Primitive types are represented by
+ * {@link Opcodes#TOP}, {@link Opcodes#INTEGER},
+ * {@link Opcodes#FLOAT}, {@link Opcodes#LONG},
+ * {@link Opcodes#DOUBLE},{@link Opcodes#NULL} or
+ * {@link Opcodes#UNINITIALIZED_THIS} (long and double are
+ * represented by a single element). Reference types are represented
+ * by String objects (representing internal names), and uninitialized
+ * types by Label objects (this label designates the NEW instruction
+ * that created this uninitialized value).
+ * @param nStack the number of operand stack elements in the visited frame.
+ * @param stack the operand stack types in this frame. This array must not
+ * be modified. Its content has the same format as the "local" array.
+ * @throws IllegalStateException if a frame is visited just after another
+ * one, without any instruction between the two (unless this frame
+ * is a Opcodes#F_SAME frame, in which case it is silently ignored).
+ */
+ public void visitFrame(
+ int type,
+ int nLocal,
+ Object[] local,
+ int nStack,
+ Object[] stack)
+ {
+ if (mv != null) {
+ mv.visitFrame(type, nLocal, local, nStack, stack);
+ }
+ }
+
+ // -------------------------------------------------------------------------
+ // Normal instructions
+ // -------------------------------------------------------------------------
+
+ /**
+ * Visits a zero operand instruction.
+ *
+ * @param opcode the opcode of the instruction to be visited. This opcode is
+ * either NOP, ACONST_NULL, ICONST_M1, ICONST_0, ICONST_1, ICONST_2,
+ * ICONST_3, ICONST_4, ICONST_5, LCONST_0, LCONST_1, FCONST_0,
+ * FCONST_1, FCONST_2, DCONST_0, DCONST_1, IALOAD, LALOAD, FALOAD,
+ * DALOAD, AALOAD, BALOAD, CALOAD, SALOAD, IASTORE, LASTORE, FASTORE,
+ * DASTORE, AASTORE, BASTORE, CASTORE, SASTORE, POP, POP2, DUP,
+ * DUP_X1, DUP_X2, DUP2, DUP2_X1, DUP2_X2, SWAP, IADD, LADD, FADD,
+ * DADD, ISUB, LSUB, FSUB, DSUB, IMUL, LMUL, FMUL, DMUL, IDIV, LDIV,
+ * FDIV, DDIV, IREM, LREM, FREM, DREM, INEG, LNEG, FNEG, DNEG, ISHL,
+ * LSHL, ISHR, LSHR, IUSHR, LUSHR, IAND, LAND, IOR, LOR, IXOR, LXOR,
+ * I2L, I2F, I2D, L2I, L2F, L2D, F2I, F2L, F2D, D2I, D2L, D2F, I2B,
+ * I2C, I2S, LCMP, FCMPL, FCMPG, DCMPL, DCMPG, IRETURN, LRETURN,
+ * FRETURN, DRETURN, ARETURN, RETURN, ARRAYLENGTH, ATHROW,
+ * MONITORENTER, or MONITOREXIT.
+ */
+ public void visitInsn(int opcode) {
+ if (mv != null) {
+ mv.visitInsn(opcode);
+ }
+ }
+
+ /**
+ * Visits an instruction with a single int operand.
+ *
+ * @param opcode the opcode of the instruction to be visited. This opcode is
+ * either BIPUSH, SIPUSH or NEWARRAY.
+ * @param operand the operand of the instruction to be visited.<br> When
+ * opcode is BIPUSH, operand value should be between Byte.MIN_VALUE
+ * and Byte.MAX_VALUE.<br> When opcode is SIPUSH, operand value
+ * should be between Short.MIN_VALUE and Short.MAX_VALUE.<br> When
+ * opcode is NEWARRAY, operand value should be one of
+ * {@link Opcodes#T_BOOLEAN}, {@link Opcodes#T_CHAR},
+ * {@link Opcodes#T_FLOAT}, {@link Opcodes#T_DOUBLE},
+ * {@link Opcodes#T_BYTE}, {@link Opcodes#T_SHORT},
+ * {@link Opcodes#T_INT} or {@link Opcodes#T_LONG}.
+ */
+ public void visitIntInsn(int opcode, int operand) {
+ if (mv != null) {
+ mv.visitIntInsn(opcode, operand);
+ }
+ }
+
+ /**
+ * Visits a local variable instruction. A local variable instruction is an
+ * instruction that loads or stores the value of a local variable.
+ *
+ * @param opcode the opcode of the local variable instruction to be visited.
+ * This opcode is either ILOAD, LLOAD, FLOAD, DLOAD, ALOAD, ISTORE,
+ * LSTORE, FSTORE, DSTORE, ASTORE or RET.
+ * @param var the operand of the instruction to be visited. This operand is
+ * the index of a local variable.
+ */
+ public void visitVarInsn(int opcode, int var) {
+ if (mv != null) {
+ mv.visitVarInsn(opcode, var);
+ }
+ }
+
+ /**
+ * Visits a type instruction. A type instruction is an instruction that
+ * takes the internal name of a class as parameter.
+ *
+ * @param opcode the opcode of the type instruction to be visited. This
+ * opcode is either NEW, ANEWARRAY, CHECKCAST or INSTANCEOF.
+ * @param type the operand of the instruction to be visited. This operand
+ * must be the internal name of an object or array class (see {@link
+ * Type#getInternalName() getInternalName}).
+ */
+ public void visitTypeInsn(int opcode, String type) {
+ if (mv != null) {
+ mv.visitTypeInsn(opcode, type);
+ }
+ }
+
+ /**
+ * Visits a field instruction. A field instruction is an instruction that
+ * loads or stores the value of a field of an object.
+ *
+ * @param opcode the opcode of the type instruction to be visited. This
+ * opcode is either GETSTATIC, PUTSTATIC, GETFIELD or PUTFIELD.
+ * @param owner the internal name of the field's owner class (see {@link
+ * Type#getInternalName() getInternalName}).
+ * @param name the field's name.
+ * @param desc the field's descriptor (see {@link Type Type}).
+ */
+ public void visitFieldInsn(int opcode, String owner, String name, String desc) {
+ if (mv != null) {
+ mv.visitFieldInsn(opcode, owner, name, desc);
+ }
+ }
+
+ /**
+ * Visits a method instruction. A method instruction is an instruction that
+ * invokes a method.
+ *
+ * @param opcode the opcode of the type instruction to be visited. This
+ * opcode is either INVOKEVIRTUAL, INVOKESPECIAL, INVOKESTATIC
+ * or INVOKEINTERFACE.
+ * @param owner the internal name of the method's owner class (see {@link
+ * Type#getInternalName() getInternalName}).
+ * @param name the method's name.
+ * @param desc the method's descriptor (see {@link Type Type}).
+ */
+ public void visitMethodInsn(int opcode, String owner, String name, String desc) {
+ if (mv != null) {
+ mv.visitMethodInsn(opcode, owner, name, desc);
+ }
+ }
+
+ /**
+ * Visits an invokedynamic instruction.
+ *
+ * @param name the method's name.
+ * @param desc the method's descriptor (see {@link Type Type}).
+ * @param bsm the bootstrap method.
+ * @param bsmArgs the bootstrap method constant arguments. Each argument
+ * must be an {@link Integer}, {@link Float}, {@link Long},
+ * {@link Double}, {@link String}, {@link Type} or {@link Handle}
+ * value. This method is allowed to modify the content of the array
+ * so a caller should expect that this array may change.
+ */
+ public void visitInvokeDynamicInsn(String name, String desc, Handle bsm, Object... bsmArgs) {
+ if (mv != null) {
+ mv.visitInvokeDynamicInsn(name, desc, bsm, bsmArgs);
+ }
+ }
+
+ /**
+ * Visits a jump instruction. A jump instruction is an instruction that may
+ * jump to another instruction.
+ *
+ * @param opcode the opcode of the type instruction to be visited. This
+ * opcode is either IFEQ, IFNE, IFLT, IFGE, IFGT, IFLE, IF_ICMPEQ,
+ * IF_ICMPNE, IF_ICMPLT, IF_ICMPGE, IF_ICMPGT, IF_ICMPLE, IF_ACMPEQ,
+ * IF_ACMPNE, GOTO, JSR, IFNULL or IFNONNULL.
+ * @param label the operand of the instruction to be visited. This operand
+ * is a label that designates the instruction to which the jump
+ * instruction may jump.
+ */
+ public void visitJumpInsn(int opcode, Label label) {
+ if (mv != null) {
+ mv.visitJumpInsn(opcode, label);
+ }
+ }
+
+ /**
+ * Visits a label. A label designates the instruction that will be visited
+ * just after it.
+ *
+ * @param label a {@link Label Label} object.
+ */
+ public void visitLabel(Label label) {
+ if (mv != null) {
+ mv.visitLabel(label);
+ }
+ }
+
+ // -------------------------------------------------------------------------
+ // Special instructions
+ // -------------------------------------------------------------------------
+
+ /**
+ * Visits a LDC instruction. Note that new constant types may be added in
+ * future versions of the Java Virtual Machine. To easily detect new
+ * constant types, implementations of this method should check for
+ * unexpected constant types, like this:
+ * <pre>
+ * if (cst instanceof Integer) {
+ * // ...
+ * } else if (cst instanceof Float) {
+ * // ...
+ * } else if (cst instanceof Long) {
+ * // ...
+ * } else if (cst instanceof Double) {
+ * // ...
+ * } else if (cst instanceof String) {
+ * // ...
+ * } else if (cst instanceof Type) {
+ * int sort = ((Type) cst).getSort();
+ * if (sort == Type.OBJECT) {
+ * // ...
+ * } else if (sort == Type.ARRAY) {
+ * // ...
+ * } else if (sort == Type.METHOD) {
+ * // ...
+ * } else {
+ * // throw an exception
+ * }
+ * } else if (cst instanceof Handle) {
+ * // ...
+ * } else {
+ * // throw an exception
+ * }</pre>
+ *
+ * @param cst the constant to be loaded on the stack. This parameter must be
+ * a non null {@link Integer}, a {@link Float}, a {@link Long}, a
+ * {@link Double}, a {@link String}, a {@link Type} of OBJECT or ARRAY
+ * sort for <tt>.class</tt> constants, for classes whose version is
+ * 49.0, a {@link Type} of METHOD sort or a {@link Handle} for
+ * MethodType and MethodHandle constants, for classes whose version
+ * is 51.0.
+ */
+ public void visitLdcInsn(Object cst) {
+ if (mv != null) {
+ mv.visitLdcInsn(cst);
+ }
+ }
+
+ /**
+ * Visits an IINC instruction.
+ *
+ * @param var index of the local variable to be incremented.
+ * @param increment amount to increment the local variable by.
+ */
+ public void visitIincInsn(int var, int increment) {
+ if (mv != null) {
+ mv.visitIincInsn(var, increment);
+ }
+ }
+
+ /**
+ * Visits a TABLESWITCH instruction.
+ *
+ * @param min the minimum key value.
+ * @param max the maximum key value.
+ * @param dflt beginning of the default handler block.
+ * @param labels beginnings of the handler blocks. <tt>labels[i]</tt> is
+ * the beginning of the handler block for the <tt>min + i</tt> key.
+ */
+ public void visitTableSwitchInsn(int min, int max, Label dflt, Label... labels) {
+ if (mv != null) {
+ mv.visitTableSwitchInsn(min, max, dflt, labels);
+ }
+ }
+
+ /**
+ * Visits a LOOKUPSWITCH instruction.
+ *
+ * @param dflt beginning of the default handler block.
+ * @param keys the values of the keys.
+ * @param labels beginnings of the handler blocks. <tt>labels[i]</tt> is
+ * the beginning of the handler block for the <tt>keys[i]</tt> key.
+ */
+ public void visitLookupSwitchInsn(Label dflt, int[] keys, Label[] labels) {
+ if (mv != null) {
+ mv.visitLookupSwitchInsn(dflt, keys, labels);
+ }
+ }
+
+ /**
+ * Visits a MULTIANEWARRAY instruction.
+ *
+ * @param desc an array type descriptor (see {@link Type Type}).
+ * @param dims number of dimensions of the array to allocate.
+ */
+ public void visitMultiANewArrayInsn(String desc, int dims) {
+ if (mv != null) {
+ mv.visitMultiANewArrayInsn(desc, dims);
+ }
+ }
+
+ // -------------------------------------------------------------------------
+ // Exceptions table entries, debug information, max stack and max locals
+ // -------------------------------------------------------------------------
+
+ /**
+ * Visits a try catch block.
+ *
+ * @param start beginning of the exception handler's scope (inclusive).
+ * @param end end of the exception handler's scope (exclusive).
+ * @param handler beginning of the exception handler's code.
+ * @param type internal name of the type of exceptions handled by the
+ * handler, or <tt>null</tt> to catch any exceptions (for "finally"
+ * blocks).
+ * @throws IllegalArgumentException if one of the labels has already been
+ * visited by this visitor (by the {@link #visitLabel visitLabel}
+ * method).
+ */
+ public void visitTryCatchBlock(Label start, Label end, Label handler, String type) {
+ if (mv != null) {
+ mv.visitTryCatchBlock(start, end, handler, type);
+ }
+ }
+
+ /**
+ * Visits a local variable declaration.
+ *
+ * @param name the name of a local variable.
+ * @param desc the type descriptor of this local variable.
+ * @param signature the type signature of this local variable. May be
+ * <tt>null</tt> if the local variable type does not use generic
+ * types.
+ * @param start the first instruction corresponding to the scope of this
+ * local variable (inclusive).
+ * @param end the last instruction corresponding to the scope of this local
+ * variable (exclusive).
+ * @param index the local variable's index.
+ * @throws IllegalArgumentException if one of the labels has not already
+ * been visited by this visitor (by the
+ * {@link #visitLabel visitLabel} method).
+ */
+ public void visitLocalVariable(
+ String name,
+ String desc,
+ String signature,
+ Label start,
+ Label end,
+ int index)
+ {
+ if (mv != null) {
+ mv.visitLocalVariable(name, desc, signature, start, end, index);
+ }
+ }
+
+ /**
+ * Visits a line number declaration.
+ *
+ * @param line a line number. This number refers to the source file from
+ * which the class was compiled.
+ * @param start the first instruction corresponding to this line number.
+ * @throws IllegalArgumentException if <tt>start</tt> has not already been
+ * visited by this visitor (by the {@link #visitLabel visitLabel}
+ * method).
+ */
+ public void visitLineNumber(int line, Label start) {
+ if (mv != null) {
+ mv.visitLineNumber(line, start);
+ }
+ }
+
+ /**
+ * Visits the maximum stack size and the maximum number of local variables
+ * of the method.
+ *
+ * @param maxStack maximum stack size of the method.
+ * @param maxLocals maximum number of local variables for the method.
+ */
+ public void visitMaxs(int maxStack, int maxLocals) {
+ if (mv != null) {
+ mv.visitMaxs(maxStack, maxLocals);
+ }
+ }
+
+ /**
+ * Visits the end of the method. This method, which is the last one to be
+ * called, is used to inform the visitor that all the annotations and
+ * attributes of the method have been visited.
+ */
+ public void visitEnd() {
+ if (mv != null) {
+ mv.visitEnd();
+ }
+ }
+}
diff --git a/src/asm/scala/tools/asm/MethodWriter.java b/src/asm/scala/tools/asm/MethodWriter.java
new file mode 100644
index 0000000..887cb28
--- /dev/null
+++ b/src/asm/scala/tools/asm/MethodWriter.java
@@ -0,0 +1,2671 @@
+/***
+ * ASM: a very small and fast Java bytecode manipulation framework
+ * Copyright (c) 2000-2011 INRIA, France Telecom
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ * 3. Neither the name of the copyright holders nor the names of its
+ * contributors may be used to endorse or promote products derived from
+ * this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+ * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
+ * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+ * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+ * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
+ * THE POSSIBILITY OF SUCH DAMAGE.
+ */
+package scala.tools.asm;
+
+/**
+ * A {@link MethodVisitor} that generates methods in bytecode form. Each visit
+ * method of this class appends the bytecode corresponding to the visited
+ * instruction to a byte vector, in the order these methods are called.
+ *
+ * @author Eric Bruneton
+ * @author Eugene Kuleshov
+ */
+class MethodWriter extends MethodVisitor {
+
+ /**
+ * Pseudo access flag used to denote constructors.
+ */
+ static final int ACC_CONSTRUCTOR = 262144;
+
+ /**
+ * Frame has exactly the same locals as the previous stack map frame and
+ * number of stack items is zero.
+ */
+ static final int SAME_FRAME = 0; // to 63 (0-3f)
+
+ /**
+ * Frame has exactly the same locals as the previous stack map frame and
+ * number of stack items is 1
+ */
+ static final int SAME_LOCALS_1_STACK_ITEM_FRAME = 64; // to 127 (40-7f)
+
+ /**
+ * Reserved for future use
+ */
+ static final int RESERVED = 128;
+
+ /**
+ * Frame has exactly the same locals as the previous stack map frame and
+ * number of stack items is 1. Offset is bigger then 63;
+ */
+ static final int SAME_LOCALS_1_STACK_ITEM_FRAME_EXTENDED = 247; // f7
+
+ /**
+ * Frame where current locals are the same as the locals in the previous
+ * frame, except that the k last locals are absent. The value of k is given
+ * by the formula 251-frame_type.
+ */
+ static final int CHOP_FRAME = 248; // to 250 (f8-fA)
+
+ /**
+ * Frame has exactly the same locals as the previous stack map frame and
+ * number of stack items is zero. Offset is bigger then 63;
+ */
+ static final int SAME_FRAME_EXTENDED = 251; // fb
+
+ /**
+ * Frame where current locals are the same as the locals in the previous
+ * frame, except that k additional locals are defined. The value of k is
+ * given by the formula frame_type-251.
+ */
+ static final int APPEND_FRAME = 252; // to 254 // fc-fe
+
+ /**
+ * Full frame
+ */
+ static final int FULL_FRAME = 255; // ff
+
+ /**
+ * Indicates that the stack map frames must be recomputed from scratch. In
+ * this case the maximum stack size and number of local variables is also
+ * recomputed from scratch.
+ *
+ * @see #compute
+ */
+ private static final int FRAMES = 0;
+
+ /**
+ * Indicates that the maximum stack size and number of local variables must
+ * be automatically computed.
+ *
+ * @see #compute
+ */
+ private static final int MAXS = 1;
+
+ /**
+ * Indicates that nothing must be automatically computed.
+ *
+ * @see #compute
+ */
+ private static final int NOTHING = 2;
+
+ /**
+ * The class writer to which this method must be added.
+ */
+ final ClassWriter cw;
+
+ /**
+ * Access flags of this method.
+ */
+ private int access;
+
+ /**
+ * The index of the constant pool item that contains the name of this
+ * method.
+ */
+ private final int name;
+
+ /**
+ * The index of the constant pool item that contains the descriptor of this
+ * method.
+ */
+ private final int desc;
+
+ /**
+ * The descriptor of this method.
+ */
+ private final String descriptor;
+
+ /**
+ * The signature of this method.
+ */
+ String signature;
+
+ /**
+ * If not zero, indicates that the code of this method must be copied from
+ * the ClassReader associated to this writer in <code>cw.cr</code>. More
+ * precisely, this field gives the index of the first byte to copied from
+ * <code>cw.cr.b</code>.
+ */
+ int classReaderOffset;
+
+ /**
+ * If not zero, indicates that the code of this method must be copied from
+ * the ClassReader associated to this writer in <code>cw.cr</code>. More
+ * precisely, this field gives the number of bytes to copied from
+ * <code>cw.cr.b</code>.
+ */
+ int classReaderLength;
+
+ /**
+ * Number of exceptions that can be thrown by this method.
+ */
+ int exceptionCount;
+
+ /**
+ * The exceptions that can be thrown by this method. More precisely, this
+ * array contains the indexes of the constant pool items that contain the
+ * internal names of these exception classes.
+ */
+ int[] exceptions;
+
+ /**
+ * The annotation default attribute of this method. May be <tt>null</tt>.
+ */
+ private ByteVector annd;
+
+ /**
+ * The runtime visible annotations of this method. May be <tt>null</tt>.
+ */
+ private AnnotationWriter anns;
+
+ /**
+ * The runtime invisible annotations of this method. May be <tt>null</tt>.
+ */
+ private AnnotationWriter ianns;
+
+ /**
+ * The runtime visible parameter annotations of this method. May be
+ * <tt>null</tt>.
+ */
+ private AnnotationWriter[] panns;
+
+ /**
+ * The runtime invisible parameter annotations of this method. May be
+ * <tt>null</tt>.
+ */
+ private AnnotationWriter[] ipanns;
+
+ /**
+ * The number of synthetic parameters of this method.
+ */
+ private int synthetics;
+
+ /**
+ * The non standard attributes of the method.
+ */
+ private Attribute attrs;
+
+ /**
+ * The bytecode of this method.
+ */
+ private ByteVector code = new ByteVector();
+
+ /**
+ * Maximum stack size of this method.
+ */
+ private int maxStack;
+
+ /**
+ * Maximum number of local variables for this method.
+ */
+ private int maxLocals;
+
+ /**
+ * Number of local variables in the current stack map frame.
+ */
+ private int currentLocals;
+
+ /**
+ * Number of stack map frames in the StackMapTable attribute.
+ */
+ private int frameCount;
+
+ /**
+ * The StackMapTable attribute.
+ */
+ private ByteVector stackMap;
+
+ /**
+ * The offset of the last frame that was written in the StackMapTable
+ * attribute.
+ */
+ private int previousFrameOffset;
+
+ /**
+ * The last frame that was written in the StackMapTable attribute.
+ *
+ * @see #frame
+ */
+ private int[] previousFrame;
+
+ /**
+ * Index of the next element to be added in {@link #frame}.
+ */
+ private int frameIndex;
+
+ /**
+ * The current stack map frame. The first element contains the offset of the
+ * instruction to which the frame corresponds, the second element is the
+ * number of locals and the third one is the number of stack elements. The
+ * local variables start at index 3 and are followed by the operand stack
+ * values. In summary frame[0] = offset, frame[1] = nLocal, frame[2] =
+ * nStack, frame[3] = nLocal. All types are encoded as integers, with the
+ * same format as the one used in {@link Label}, but limited to BASE types.
+ */
+ private int[] frame;
+
+ /**
+ * Number of elements in the exception handler list.
+ */
+ private int handlerCount;
+
+ /**
+ * The first element in the exception handler list.
+ */
+ private Handler firstHandler;
+
+ /**
+ * The last element in the exception handler list.
+ */
+ private Handler lastHandler;
+
+ /**
+ * Number of entries in the LocalVariableTable attribute.
+ */
+ private int localVarCount;
+
+ /**
+ * The LocalVariableTable attribute.
+ */
+ private ByteVector localVar;
+
+ /**
+ * Number of entries in the LocalVariableTypeTable attribute.
+ */
+ private int localVarTypeCount;
+
+ /**
+ * The LocalVariableTypeTable attribute.
+ */
+ private ByteVector localVarType;
+
+ /**
+ * Number of entries in the LineNumberTable attribute.
+ */
+ private int lineNumberCount;
+
+ /**
+ * The LineNumberTable attribute.
+ */
+ private ByteVector lineNumber;
+
+ /**
+ * The non standard attributes of the method's code.
+ */
+ private Attribute cattrs;
+
+ /**
+ * Indicates if some jump instructions are too small and need to be resized.
+ */
+ private boolean resize;
+
+ /**
+ * The number of subroutines in this method.
+ */
+ private int subroutines;
+
+ // ------------------------------------------------------------------------
+
+ /*
+ * Fields for the control flow graph analysis algorithm (used to compute the
+ * maximum stack size). A control flow graph contains one node per "basic
+ * block", and one edge per "jump" from one basic block to another. Each
+ * node (i.e., each basic block) is represented by the Label object that
+ * corresponds to the first instruction of this basic block. Each node also
+ * stores the list of its successors in the graph, as a linked list of Edge
+ * objects.
+ */
+
+ /**
+ * Indicates what must be automatically computed.
+ *
+ * @see #FRAMES
+ * @see #MAXS
+ * @see #NOTHING
+ */
+ private final int compute;
+
+ /**
+ * A list of labels. This list is the list of basic blocks in the method,
+ * i.e. a list of Label objects linked to each other by their
+ * {@link Label#successor} field, in the order they are visited by
+ * {@link MethodVisitor#visitLabel}, and starting with the first basic block.
+ */
+ private Label labels;
+
+ /**
+ * The previous basic block.
+ */
+ private Label previousBlock;
+
+ /**
+ * The current basic block.
+ */
+ private Label currentBlock;
+
+ /**
+ * The (relative) stack size after the last visited instruction. This size
+ * is relative to the beginning of the current basic block, i.e., the true
+ * stack size after the last visited instruction is equal to the
+ * {@link Label#inputStackTop beginStackSize} of the current basic block
+ * plus <tt>stackSize</tt>.
+ */
+ private int stackSize;
+
+ /**
+ * The (relative) maximum stack size after the last visited instruction.
+ * This size is relative to the beginning of the current basic block, i.e.,
+ * the true maximum stack size after the last visited instruction is equal
+ * to the {@link Label#inputStackTop beginStackSize} of the current basic
+ * block plus <tt>stackSize</tt>.
+ */
+ private int maxStackSize;
+
+ // ------------------------------------------------------------------------
+ // Constructor
+ // ------------------------------------------------------------------------
+
+ /**
+ * Constructs a new {@link MethodWriter}.
+ *
+ * @param cw the class writer in which the method must be added.
+ * @param access the method's access flags (see {@link Opcodes}).
+ * @param name the method's name.
+ * @param desc the method's descriptor (see {@link Type}).
+ * @param signature the method's signature. May be <tt>null</tt>.
+ * @param exceptions the internal names of the method's exceptions. May be
+ * <tt>null</tt>.
+ * @param computeMaxs <tt>true</tt> if the maximum stack size and number
+ * of local variables must be automatically computed.
+ * @param computeFrames <tt>true</tt> if the stack map tables must be
+ * recomputed from scratch.
+ */
+ MethodWriter(
+ final ClassWriter cw,
+ final int access,
+ final String name,
+ final String desc,
+ final String signature,
+ final String[] exceptions,
+ final boolean computeMaxs,
+ final boolean computeFrames)
+ {
+ super(Opcodes.ASM4);
+ if (cw.firstMethod == null) {
+ cw.firstMethod = this;
+ } else {
+ cw.lastMethod.mv = this;
+ }
+ cw.lastMethod = this;
+ this.cw = cw;
+ this.access = access;
+ this.name = cw.newUTF8(name);
+ this.desc = cw.newUTF8(desc);
+ this.descriptor = desc;
+ if (ClassReader.SIGNATURES) {
+ this.signature = signature;
+ }
+ if (exceptions != null && exceptions.length > 0) {
+ exceptionCount = exceptions.length;
+ this.exceptions = new int[exceptionCount];
+ for (int i = 0; i < exceptionCount; ++i) {
+ this.exceptions[i] = cw.newClass(exceptions[i]);
+ }
+ }
+ this.compute = computeFrames ? FRAMES : (computeMaxs ? MAXS : NOTHING);
+ if (computeMaxs || computeFrames) {
+ if (computeFrames && "<init>".equals(name)) {
+ this.access |= ACC_CONSTRUCTOR;
+ }
+ // updates maxLocals
+ int size = Type.getArgumentsAndReturnSizes(descriptor) >> 2;
+ if ((access & Opcodes.ACC_STATIC) != 0) {
+ --size;
+ }
+ maxLocals = size;
+ currentLocals = size;
+ // creates and visits the label for the first basic block
+ labels = new Label();
+ labels.status |= Label.PUSHED;
+ visitLabel(labels);
+ }
+ }
+
+ // ------------------------------------------------------------------------
+ // Implementation of the MethodVisitor abstract class
+ // ------------------------------------------------------------------------
+
+ @Override
+ public AnnotationVisitor visitAnnotationDefault() {
+ if (!ClassReader.ANNOTATIONS) {
+ return null;
+ }
+ annd = new ByteVector();
+ return new AnnotationWriter(cw, false, annd, null, 0);
+ }
+
+ @Override
+ public AnnotationVisitor visitAnnotation(
+ final String desc,
+ final boolean visible)
+ {
+ if (!ClassReader.ANNOTATIONS) {
+ return null;
+ }
+ ByteVector bv = new ByteVector();
+ // write type, and reserve space for values count
+ bv.putShort(cw.newUTF8(desc)).putShort(0);
+ AnnotationWriter aw = new AnnotationWriter(cw, true, bv, bv, 2);
+ if (visible) {
+ aw.next = anns;
+ anns = aw;
+ } else {
+ aw.next = ianns;
+ ianns = aw;
+ }
+ return aw;
+ }
+
+ @Override
+ public AnnotationVisitor visitParameterAnnotation(
+ final int parameter,
+ final String desc,
+ final boolean visible)
+ {
+ if (!ClassReader.ANNOTATIONS) {
+ return null;
+ }
+ ByteVector bv = new ByteVector();
+ if ("Ljava/lang/Synthetic;".equals(desc)) {
+ // workaround for a bug in javac with synthetic parameters
+ // see ClassReader.readParameterAnnotations
+ synthetics = Math.max(synthetics, parameter + 1);
+ return new AnnotationWriter(cw, false, bv, null, 0);
+ }
+ // write type, and reserve space for values count
+ bv.putShort(cw.newUTF8(desc)).putShort(0);
+ AnnotationWriter aw = new AnnotationWriter(cw, true, bv, bv, 2);
+ if (visible) {
+ if (panns == null) {
+ panns = new AnnotationWriter[Type.getArgumentTypes(descriptor).length];
+ }
+ aw.next = panns[parameter];
+ panns[parameter] = aw;
+ } else {
+ if (ipanns == null) {
+ ipanns = new AnnotationWriter[Type.getArgumentTypes(descriptor).length];
+ }
+ aw.next = ipanns[parameter];
+ ipanns[parameter] = aw;
+ }
+ return aw;
+ }
+
+ @Override
+ public void visitAttribute(final Attribute attr) {
+ if (attr.isCodeAttribute()) {
+ attr.next = cattrs;
+ cattrs = attr;
+ } else {
+ attr.next = attrs;
+ attrs = attr;
+ }
+ }
+
+ @Override
+ public void visitCode() {
+ }
+
+ @Override
+ public void visitFrame(
+ final int type,
+ final int nLocal,
+ final Object[] local,
+ final int nStack,
+ final Object[] stack)
+ {
+ if (!ClassReader.FRAMES || compute == FRAMES) {
+ return;
+ }
+
+ if (type == Opcodes.F_NEW) {
+ currentLocals = nLocal;
+ startFrame(code.length, nLocal, nStack);
+ for (int i = 0; i < nLocal; ++i) {
+ if (local[i] instanceof String) {
+ frame[frameIndex++] = Frame.OBJECT
+ | cw.addType((String) local[i]);
+ } else if (local[i] instanceof Integer) {
+ frame[frameIndex++] = ((Integer) local[i]).intValue();
+ } else {
+ frame[frameIndex++] = Frame.UNINITIALIZED
+ | cw.addUninitializedType("",
+ ((Label) local[i]).position);
+ }
+ }
+ for (int i = 0; i < nStack; ++i) {
+ if (stack[i] instanceof String) {
+ frame[frameIndex++] = Frame.OBJECT
+ | cw.addType((String) stack[i]);
+ } else if (stack[i] instanceof Integer) {
+ frame[frameIndex++] = ((Integer) stack[i]).intValue();
+ } else {
+ frame[frameIndex++] = Frame.UNINITIALIZED
+ | cw.addUninitializedType("",
+ ((Label) stack[i]).position);
+ }
+ }
+ endFrame();
+ } else {
+ int delta;
+ if (stackMap == null) {
+ stackMap = new ByteVector();
+ delta = code.length;
+ } else {
+ delta = code.length - previousFrameOffset - 1;
+ if (delta < 0) {
+ if (type == Opcodes.F_SAME) {
+ return;
+ } else {
+ throw new IllegalStateException();
+ }
+ }
+ }
+
+ switch (type) {
+ case Opcodes.F_FULL:
+ currentLocals = nLocal;
+ stackMap.putByte(FULL_FRAME)
+ .putShort(delta)
+ .putShort(nLocal);
+ for (int i = 0; i < nLocal; ++i) {
+ writeFrameType(local[i]);
+ }
+ stackMap.putShort(nStack);
+ for (int i = 0; i < nStack; ++i) {
+ writeFrameType(stack[i]);
+ }
+ break;
+ case Opcodes.F_APPEND:
+ currentLocals += nLocal;
+ stackMap.putByte(SAME_FRAME_EXTENDED + nLocal)
+ .putShort(delta);
+ for (int i = 0; i < nLocal; ++i) {
+ writeFrameType(local[i]);
+ }
+ break;
+ case Opcodes.F_CHOP:
+ currentLocals -= nLocal;
+ stackMap.putByte(SAME_FRAME_EXTENDED - nLocal)
+ .putShort(delta);
+ break;
+ case Opcodes.F_SAME:
+ if (delta < 64) {
+ stackMap.putByte(delta);
+ } else {
+ stackMap.putByte(SAME_FRAME_EXTENDED).putShort(delta);
+ }
+ break;
+ case Opcodes.F_SAME1:
+ if (delta < 64) {
+ stackMap.putByte(SAME_LOCALS_1_STACK_ITEM_FRAME + delta);
+ } else {
+ stackMap.putByte(SAME_LOCALS_1_STACK_ITEM_FRAME_EXTENDED)
+ .putShort(delta);
+ }
+ writeFrameType(stack[0]);
+ break;
+ }
+
+ previousFrameOffset = code.length;
+ ++frameCount;
+ }
+
+ maxStack = Math.max(maxStack, nStack);
+ maxLocals = Math.max(maxLocals, currentLocals);
+ }
+
+ @Override
+ public void visitInsn(final int opcode) {
+ // adds the instruction to the bytecode of the method
+ code.putByte(opcode);
+ // update currentBlock
+ // Label currentBlock = this.currentBlock;
+ if (currentBlock != null) {
+ if (compute == FRAMES) {
+ currentBlock.frame.execute(opcode, 0, null, null);
+ } else {
+ // updates current and max stack sizes
+ int size = stackSize + Frame.SIZE[opcode];
+ if (size > maxStackSize) {
+ maxStackSize = size;
+ }
+ stackSize = size;
+ }
+ // if opcode == ATHROW or xRETURN, ends current block (no successor)
+ if ((opcode >= Opcodes.IRETURN && opcode <= Opcodes.RETURN)
+ || opcode == Opcodes.ATHROW)
+ {
+ noSuccessor();
+ }
+ }
+ }
+
+ @Override
+ public void visitIntInsn(final int opcode, final int operand) {
+ // Label currentBlock = this.currentBlock;
+ if (currentBlock != null) {
+ if (compute == FRAMES) {
+ currentBlock.frame.execute(opcode, operand, null, null);
+ } else if (opcode != Opcodes.NEWARRAY) {
+ // updates current and max stack sizes only for NEWARRAY
+ // (stack size variation = 0 for BIPUSH or SIPUSH)
+ int size = stackSize + 1;
+ if (size > maxStackSize) {
+ maxStackSize = size;
+ }
+ stackSize = size;
+ }
+ }
+ // adds the instruction to the bytecode of the method
+ if (opcode == Opcodes.SIPUSH) {
+ code.put12(opcode, operand);
+ } else { // BIPUSH or NEWARRAY
+ code.put11(opcode, operand);
+ }
+ }
+
+ @Override
+ public void visitVarInsn(final int opcode, final int var) {
+ // Label currentBlock = this.currentBlock;
+ if (currentBlock != null) {
+ if (compute == FRAMES) {
+ currentBlock.frame.execute(opcode, var, null, null);
+ } else {
+ // updates current and max stack sizes
+ if (opcode == Opcodes.RET) {
+ // no stack change, but end of current block (no successor)
+ currentBlock.status |= Label.RET;
+ // save 'stackSize' here for future use
+ // (see {@link #findSubroutineSuccessors})
+ currentBlock.inputStackTop = stackSize;
+ noSuccessor();
+ } else { // xLOAD or xSTORE
+ int size = stackSize + Frame.SIZE[opcode];
+ if (size > maxStackSize) {
+ maxStackSize = size;
+ }
+ stackSize = size;
+ }
+ }
+ }
+ if (compute != NOTHING) {
+ // updates max locals
+ int n;
+ if (opcode == Opcodes.LLOAD || opcode == Opcodes.DLOAD
+ || opcode == Opcodes.LSTORE || opcode == Opcodes.DSTORE)
+ {
+ n = var + 2;
+ } else {
+ n = var + 1;
+ }
+ if (n > maxLocals) {
+ maxLocals = n;
+ }
+ }
+ // adds the instruction to the bytecode of the method
+ if (var < 4 && opcode != Opcodes.RET) {
+ int opt;
+ if (opcode < Opcodes.ISTORE) {
+ /* ILOAD_0 */
+ opt = 26 + ((opcode - Opcodes.ILOAD) << 2) + var;
+ } else {
+ /* ISTORE_0 */
+ opt = 59 + ((opcode - Opcodes.ISTORE) << 2) + var;
+ }
+ code.putByte(opt);
+ } else if (var >= 256) {
+ code.putByte(196 /* WIDE */).put12(opcode, var);
+ } else {
+ code.put11(opcode, var);
+ }
+ if (opcode >= Opcodes.ISTORE && compute == FRAMES && handlerCount > 0) {
+ visitLabel(new Label());
+ }
+ }
+
+ @Override
+ public void visitTypeInsn(final int opcode, final String type) {
+ Item i = cw.newClassItem(type);
+ // Label currentBlock = this.currentBlock;
+ if (currentBlock != null) {
+ if (compute == FRAMES) {
+ currentBlock.frame.execute(opcode, code.length, cw, i);
+ } else if (opcode == Opcodes.NEW) {
+ // updates current and max stack sizes only if opcode == NEW
+ // (no stack change for ANEWARRAY, CHECKCAST, INSTANCEOF)
+ int size = stackSize + 1;
+ if (size > maxStackSize) {
+ maxStackSize = size;
+ }
+ stackSize = size;
+ }
+ }
+ // adds the instruction to the bytecode of the method
+ code.put12(opcode, i.index);
+ }
+
+ @Override
+ public void visitFieldInsn(
+ final int opcode,
+ final String owner,
+ final String name,
+ final String desc)
+ {
+ Item i = cw.newFieldItem(owner, name, desc);
+ // Label currentBlock = this.currentBlock;
+ if (currentBlock != null) {
+ if (compute == FRAMES) {
+ currentBlock.frame.execute(opcode, 0, cw, i);
+ } else {
+ int size;
+ // computes the stack size variation
+ char c = desc.charAt(0);
+ switch (opcode) {
+ case Opcodes.GETSTATIC:
+ size = stackSize + (c == 'D' || c == 'J' ? 2 : 1);
+ break;
+ case Opcodes.PUTSTATIC:
+ size = stackSize + (c == 'D' || c == 'J' ? -2 : -1);
+ break;
+ case Opcodes.GETFIELD:
+ size = stackSize + (c == 'D' || c == 'J' ? 1 : 0);
+ break;
+ // case Constants.PUTFIELD:
+ default:
+ size = stackSize + (c == 'D' || c == 'J' ? -3 : -2);
+ break;
+ }
+ // updates current and max stack sizes
+ if (size > maxStackSize) {
+ maxStackSize = size;
+ }
+ stackSize = size;
+ }
+ }
+ // adds the instruction to the bytecode of the method
+ code.put12(opcode, i.index);
+ }
+
+ @Override
+ public void visitMethodInsn(
+ final int opcode,
+ final String owner,
+ final String name,
+ final String desc)
+ {
+ boolean itf = opcode == Opcodes.INVOKEINTERFACE;
+ Item i = cw.newMethodItem(owner, name, desc, itf);
+ int argSize = i.intVal;
+ // Label currentBlock = this.currentBlock;
+ if (currentBlock != null) {
+ if (compute == FRAMES) {
+ currentBlock.frame.execute(opcode, 0, cw, i);
+ } else {
+ /*
+ * computes the stack size variation. In order not to recompute
+ * several times this variation for the same Item, we use the
+ * intVal field of this item to store this variation, once it
+ * has been computed. More precisely this intVal field stores
+ * the sizes of the arguments and of the return value
+ * corresponding to desc.
+ */
+ if (argSize == 0) {
+ // the above sizes have not been computed yet,
+ // so we compute them...
+ argSize = Type.getArgumentsAndReturnSizes(desc);
+ // ... and we save them in order
+ // not to recompute them in the future
+ i.intVal = argSize;
+ }
+ int size;
+ if (opcode == Opcodes.INVOKESTATIC) {
+ size = stackSize - (argSize >> 2) + (argSize & 0x03) + 1;
+ } else {
+ size = stackSize - (argSize >> 2) + (argSize & 0x03);
+ }
+ // updates current and max stack sizes
+ if (size > maxStackSize) {
+ maxStackSize = size;
+ }
+ stackSize = size;
+ }
+ }
+ // adds the instruction to the bytecode of the method
+ if (itf) {
+ if (argSize == 0) {
+ argSize = Type.getArgumentsAndReturnSizes(desc);
+ i.intVal = argSize;
+ }
+ code.put12(Opcodes.INVOKEINTERFACE, i.index).put11(argSize >> 2, 0);
+ } else {
+ code.put12(opcode, i.index);
+ }
+ }
+
+ @Override
+ public void visitInvokeDynamicInsn(
+ final String name,
+ final String desc,
+ final Handle bsm,
+ final Object... bsmArgs)
+ {
+ Item i = cw.newInvokeDynamicItem(name, desc, bsm, bsmArgs);
+ int argSize = i.intVal;
+ // Label currentBlock = this.currentBlock;
+ if (currentBlock != null) {
+ if (compute == FRAMES) {
+ currentBlock.frame.execute(Opcodes.INVOKEDYNAMIC, 0, cw, i);
+ } else {
+ /*
+ * computes the stack size variation. In order not to recompute
+ * several times this variation for the same Item, we use the
+ * intVal field of this item to store this variation, once it
+ * has been computed. More precisely this intVal field stores
+ * the sizes of the arguments and of the return value
+ * corresponding to desc.
+ */
+ if (argSize == 0) {
+ // the above sizes have not been computed yet,
+ // so we compute them...
+ argSize = Type.getArgumentsAndReturnSizes(desc);
+ // ... and we save them in order
+ // not to recompute them in the future
+ i.intVal = argSize;
+ }
+ int size = stackSize - (argSize >> 2) + (argSize & 0x03) + 1;
+
+ // updates current and max stack sizes
+ if (size > maxStackSize) {
+ maxStackSize = size;
+ }
+ stackSize = size;
+ }
+ }
+ // adds the instruction to the bytecode of the method
+ code.put12(Opcodes.INVOKEDYNAMIC, i.index);
+ code.putShort(0);
+ }
+
+ @Override
+ public void visitJumpInsn(final int opcode, final Label label) {
+ Label nextInsn = null;
+ // Label currentBlock = this.currentBlock;
+ if (currentBlock != null) {
+ if (compute == FRAMES) {
+ currentBlock.frame.execute(opcode, 0, null, null);
+ // 'label' is the target of a jump instruction
+ label.getFirst().status |= Label.TARGET;
+ // adds 'label' as a successor of this basic block
+ addSuccessor(Edge.NORMAL, label);
+ if (opcode != Opcodes.GOTO) {
+ // creates a Label for the next basic block
+ nextInsn = new Label();
+ }
+ } else {
+ if (opcode == Opcodes.JSR) {
+ if ((label.status & Label.SUBROUTINE) == 0) {
+ label.status |= Label.SUBROUTINE;
+ ++subroutines;
+ }
+ currentBlock.status |= Label.JSR;
+ addSuccessor(stackSize + 1, label);
+ // creates a Label for the next basic block
+ nextInsn = new Label();
+ /*
+ * note that, by construction in this method, a JSR block
+ * has at least two successors in the control flow graph:
+ * the first one leads the next instruction after the JSR,
+ * while the second one leads to the JSR target.
+ */
+ } else {
+ // updates current stack size (max stack size unchanged
+ // because stack size variation always negative in this
+ // case)
+ stackSize += Frame.SIZE[opcode];
+ addSuccessor(stackSize, label);
+ }
+ }
+ }
+ // adds the instruction to the bytecode of the method
+ if ((label.status & Label.RESOLVED) != 0
+ && label.position - code.length < Short.MIN_VALUE)
+ {
+ /*
+ * case of a backward jump with an offset < -32768. In this case we
+ * automatically replace GOTO with GOTO_W, JSR with JSR_W and IFxxx
+ * <l> with IFNOTxxx <l'> GOTO_W <l>, where IFNOTxxx is the
+ * "opposite" opcode of IFxxx (i.e., IFNE for IFEQ) and where <l'>
+ * designates the instruction just after the GOTO_W.
+ */
+ if (opcode == Opcodes.GOTO) {
+ code.putByte(200); // GOTO_W
+ } else if (opcode == Opcodes.JSR) {
+ code.putByte(201); // JSR_W
+ } else {
+ // if the IF instruction is transformed into IFNOT GOTO_W the
+ // next instruction becomes the target of the IFNOT instruction
+ if (nextInsn != null) {
+ nextInsn.status |= Label.TARGET;
+ }
+ code.putByte(opcode <= 166
+ ? ((opcode + 1) ^ 1) - 1
+ : opcode ^ 1);
+ code.putShort(8); // jump offset
+ code.putByte(200); // GOTO_W
+ }
+ label.put(this, code, code.length - 1, true);
+ } else {
+ /*
+ * case of a backward jump with an offset >= -32768, or of a forward
+ * jump with, of course, an unknown offset. In these cases we store
+ * the offset in 2 bytes (which will be increased in
+ * resizeInstructions, if needed).
+ */
+ code.putByte(opcode);
+ label.put(this, code, code.length - 1, false);
+ }
+ if (currentBlock != null) {
+ if (nextInsn != null) {
+ // if the jump instruction is not a GOTO, the next instruction
+ // is also a successor of this instruction. Calling visitLabel
+ // adds the label of this next instruction as a successor of the
+ // current block, and starts a new basic block
+ visitLabel(nextInsn);
+ }
+ if (opcode == Opcodes.GOTO) {
+ noSuccessor();
+ }
+ }
+ }
+
+ @Override
+ public void visitLabel(final Label label) {
+ // resolves previous forward references to label, if any
+ resize |= label.resolve(this, code.length, code.data);
+ // updates currentBlock
+ if ((label.status & Label.DEBUG) != 0) {
+ return;
+ }
+ if (compute == FRAMES) {
+ if (currentBlock != null) {
+ if (label.position == currentBlock.position) {
+ // successive labels, do not start a new basic block
+ currentBlock.status |= (label.status & Label.TARGET);
+ label.frame = currentBlock.frame;
+ return;
+ }
+ // ends current block (with one new successor)
+ addSuccessor(Edge.NORMAL, label);
+ }
+ // begins a new current block
+ currentBlock = label;
+ if (label.frame == null) {
+ label.frame = new Frame();
+ label.frame.owner = label;
+ }
+ // updates the basic block list
+ if (previousBlock != null) {
+ if (label.position == previousBlock.position) {
+ previousBlock.status |= (label.status & Label.TARGET);
+ label.frame = previousBlock.frame;
+ currentBlock = previousBlock;
+ return;
+ }
+ previousBlock.successor = label;
+ }
+ previousBlock = label;
+ } else if (compute == MAXS) {
+ if (currentBlock != null) {
+ // ends current block (with one new successor)
+ currentBlock.outputStackMax = maxStackSize;
+ addSuccessor(stackSize, label);
+ }
+ // begins a new current block
+ currentBlock = label;
+ // resets the relative current and max stack sizes
+ stackSize = 0;
+ maxStackSize = 0;
+ // updates the basic block list
+ if (previousBlock != null) {
+ previousBlock.successor = label;
+ }
+ previousBlock = label;
+ }
+ }
+
+ @Override
+ public void visitLdcInsn(final Object cst) {
+ Item i = cw.newConstItem(cst);
+ // Label currentBlock = this.currentBlock;
+ if (currentBlock != null) {
+ if (compute == FRAMES) {
+ currentBlock.frame.execute(Opcodes.LDC, 0, cw, i);
+ } else {
+ int size;
+ // computes the stack size variation
+ if (i.type == ClassWriter.LONG || i.type == ClassWriter.DOUBLE)
+ {
+ size = stackSize + 2;
+ } else {
+ size = stackSize + 1;
+ }
+ // updates current and max stack sizes
+ if (size > maxStackSize) {
+ maxStackSize = size;
+ }
+ stackSize = size;
+ }
+ }
+ // adds the instruction to the bytecode of the method
+ int index = i.index;
+ if (i.type == ClassWriter.LONG || i.type == ClassWriter.DOUBLE) {
+ code.put12(20 /* LDC2_W */, index);
+ } else if (index >= 256) {
+ code.put12(19 /* LDC_W */, index);
+ } else {
+ code.put11(Opcodes.LDC, index);
+ }
+ }
+
+ @Override
+ public void visitIincInsn(final int var, final int increment) {
+ if (currentBlock != null) {
+ if (compute == FRAMES) {
+ currentBlock.frame.execute(Opcodes.IINC, var, null, null);
+ }
+ }
+ if (compute != NOTHING) {
+ // updates max locals
+ int n = var + 1;
+ if (n > maxLocals) {
+ maxLocals = n;
+ }
+ }
+ // adds the instruction to the bytecode of the method
+ if ((var > 255) || (increment > 127) || (increment < -128)) {
+ code.putByte(196 /* WIDE */)
+ .put12(Opcodes.IINC, var)
+ .putShort(increment);
+ } else {
+ code.putByte(Opcodes.IINC).put11(var, increment);
+ }
+ }
+
+ @Override
+ public void visitTableSwitchInsn(
+ final int min,
+ final int max,
+ final Label dflt,
+ final Label... labels)
+ {
+ // adds the instruction to the bytecode of the method
+ int source = code.length;
+ code.putByte(Opcodes.TABLESWITCH);
+ code.putByteArray(null, 0, (4 - code.length % 4) % 4);
+ dflt.put(this, code, source, true);
+ code.putInt(min).putInt(max);
+ for (int i = 0; i < labels.length; ++i) {
+ labels[i].put(this, code, source, true);
+ }
+ // updates currentBlock
+ visitSwitchInsn(dflt, labels);
+ }
+
+ @Override
+ public void visitLookupSwitchInsn(
+ final Label dflt,
+ final int[] keys,
+ final Label[] labels)
+ {
+ // adds the instruction to the bytecode of the method
+ int source = code.length;
+ code.putByte(Opcodes.LOOKUPSWITCH);
+ code.putByteArray(null, 0, (4 - code.length % 4) % 4);
+ dflt.put(this, code, source, true);
+ code.putInt(labels.length);
+ for (int i = 0; i < labels.length; ++i) {
+ code.putInt(keys[i]);
+ labels[i].put(this, code, source, true);
+ }
+ // updates currentBlock
+ visitSwitchInsn(dflt, labels);
+ }
+
+ private void visitSwitchInsn(final Label dflt, final Label[] labels) {
+ // Label currentBlock = this.currentBlock;
+ if (currentBlock != null) {
+ if (compute == FRAMES) {
+ currentBlock.frame.execute(Opcodes.LOOKUPSWITCH, 0, null, null);
+ // adds current block successors
+ addSuccessor(Edge.NORMAL, dflt);
+ dflt.getFirst().status |= Label.TARGET;
+ for (int i = 0; i < labels.length; ++i) {
+ addSuccessor(Edge.NORMAL, labels[i]);
+ labels[i].getFirst().status |= Label.TARGET;
+ }
+ } else {
+ // updates current stack size (max stack size unchanged)
+ --stackSize;
+ // adds current block successors
+ addSuccessor(stackSize, dflt);
+ for (int i = 0; i < labels.length; ++i) {
+ addSuccessor(stackSize, labels[i]);
+ }
+ }
+ // ends current block
+ noSuccessor();
+ }
+ }
+
+ @Override
+ public void visitMultiANewArrayInsn(final String desc, final int dims) {
+ Item i = cw.newClassItem(desc);
+ // Label currentBlock = this.currentBlock;
+ if (currentBlock != null) {
+ if (compute == FRAMES) {
+ currentBlock.frame.execute(Opcodes.MULTIANEWARRAY, dims, cw, i);
+ } else {
+ // updates current stack size (max stack size unchanged because
+ // stack size variation always negative or null)
+ stackSize += 1 - dims;
+ }
+ }
+ // adds the instruction to the bytecode of the method
+ code.put12(Opcodes.MULTIANEWARRAY, i.index).putByte(dims);
+ }
+
+ @Override
+ public void visitTryCatchBlock(
+ final Label start,
+ final Label end,
+ final Label handler,
+ final String type)
+ {
+ ++handlerCount;
+ Handler h = new Handler();
+ h.start = start;
+ h.end = end;
+ h.handler = handler;
+ h.desc = type;
+ h.type = type != null ? cw.newClass(type) : 0;
+ if (lastHandler == null) {
+ firstHandler = h;
+ } else {
+ lastHandler.next = h;
+ }
+ lastHandler = h;
+ }
+
+ @Override
+ public void visitLocalVariable(
+ final String name,
+ final String desc,
+ final String signature,
+ final Label start,
+ final Label end,
+ final int index)
+ {
+ if (signature != null) {
+ if (localVarType == null) {
+ localVarType = new ByteVector();
+ }
+ ++localVarTypeCount;
+ localVarType.putShort(start.position)
+ .putShort(end.position - start.position)
+ .putShort(cw.newUTF8(name))
+ .putShort(cw.newUTF8(signature))
+ .putShort(index);
+ }
+ if (localVar == null) {
+ localVar = new ByteVector();
+ }
+ ++localVarCount;
+ localVar.putShort(start.position)
+ .putShort(end.position - start.position)
+ .putShort(cw.newUTF8(name))
+ .putShort(cw.newUTF8(desc))
+ .putShort(index);
+ if (compute != NOTHING) {
+ // updates max locals
+ char c = desc.charAt(0);
+ int n = index + (c == 'J' || c == 'D' ? 2 : 1);
+ if (n > maxLocals) {
+ maxLocals = n;
+ }
+ }
+ }
+
+ @Override
+ public void visitLineNumber(final int line, final Label start) {
+ if (lineNumber == null) {
+ lineNumber = new ByteVector();
+ }
+ ++lineNumberCount;
+ lineNumber.putShort(start.position);
+ lineNumber.putShort(line);
+ }
+
+ @Override
+ public void visitMaxs(final int maxStack, final int maxLocals) {
+ if (ClassReader.FRAMES && compute == FRAMES) {
+ // completes the control flow graph with exception handler blocks
+ Handler handler = firstHandler;
+ while (handler != null) {
+ Label l = handler.start.getFirst();
+ Label h = handler.handler.getFirst();
+ Label e = handler.end.getFirst();
+ // computes the kind of the edges to 'h'
+ String t = handler.desc == null
+ ? "java/lang/Throwable"
+ : handler.desc;
+ int kind = Frame.OBJECT | cw.addType(t);
+ // h is an exception handler
+ h.status |= Label.TARGET;
+ // adds 'h' as a successor of labels between 'start' and 'end'
+ while (l != e) {
+ // creates an edge to 'h'
+ Edge b = new Edge();
+ b.info = kind;
+ b.successor = h;
+ // adds it to the successors of 'l'
+ b.next = l.successors;
+ l.successors = b;
+ // goes to the next label
+ l = l.successor;
+ }
+ handler = handler.next;
+ }
+
+ // creates and visits the first (implicit) frame
+ Frame f = labels.frame;
+ Type[] args = Type.getArgumentTypes(descriptor);
+ f.initInputFrame(cw, access, args, this.maxLocals);
+ visitFrame(f);
+
+ /*
+ * fix point algorithm: mark the first basic block as 'changed'
+ * (i.e. put it in the 'changed' list) and, while there are changed
+ * basic blocks, choose one, mark it as unchanged, and update its
+ * successors (which can be changed in the process).
+ */
+ int max = 0;
+ Label changed = labels;
+ while (changed != null) {
+ // removes a basic block from the list of changed basic blocks
+ Label l = changed;
+ changed = changed.next;
+ l.next = null;
+ f = l.frame;
+ // a reachable jump target must be stored in the stack map
+ if ((l.status & Label.TARGET) != 0) {
+ l.status |= Label.STORE;
+ }
+ // all visited labels are reachable, by definition
+ l.status |= Label.REACHABLE;
+ // updates the (absolute) maximum stack size
+ int blockMax = f.inputStack.length + l.outputStackMax;
+ if (blockMax > max) {
+ max = blockMax;
+ }
+ // updates the successors of the current basic block
+ Edge e = l.successors;
+ while (e != null) {
+ Label n = e.successor.getFirst();
+ boolean change = f.merge(cw, n.frame, e.info);
+ if (change && n.next == null) {
+ // if n has changed and is not already in the 'changed'
+ // list, adds it to this list
+ n.next = changed;
+ changed = n;
+ }
+ e = e.next;
+ }
+ }
+
+ // visits all the frames that must be stored in the stack map
+ Label l = labels;
+ while (l != null) {
+ f = l.frame;
+ if ((l.status & Label.STORE) != 0) {
+ visitFrame(f);
+ }
+ if ((l.status & Label.REACHABLE) == 0) {
+ // finds start and end of dead basic block
+ Label k = l.successor;
+ int start = l.position;
+ int end = (k == null ? code.length : k.position) - 1;
+ // if non empty basic block
+ if (end >= start) {
+ max = Math.max(max, 1);
+ // replaces instructions with NOP ... NOP ATHROW
+ for (int i = start; i < end; ++i) {
+ code.data[i] = Opcodes.NOP;
+ }
+ code.data[end] = (byte) Opcodes.ATHROW;
+ // emits a frame for this unreachable block
+ startFrame(start, 0, 1);
+ frame[frameIndex++] = Frame.OBJECT
+ | cw.addType("java/lang/Throwable");
+ endFrame();
+ // removes the start-end range from the exception handlers
+ firstHandler = Handler.remove(firstHandler, l, k);
+ }
+ }
+ l = l.successor;
+ }
+
+ handler = firstHandler;
+ handlerCount = 0;
+ while (handler != null) {
+ handlerCount += 1;
+ handler = handler.next;
+ }
+
+ this.maxStack = max;
+ } else if (compute == MAXS) {
+ // completes the control flow graph with exception handler blocks
+ Handler handler = firstHandler;
+ while (handler != null) {
+ Label l = handler.start;
+ Label h = handler.handler;
+ Label e = handler.end;
+ // adds 'h' as a successor of labels between 'start' and 'end'
+ while (l != e) {
+ // creates an edge to 'h'
+ Edge b = new Edge();
+ b.info = Edge.EXCEPTION;
+ b.successor = h;
+ // adds it to the successors of 'l'
+ if ((l.status & Label.JSR) == 0) {
+ b.next = l.successors;
+ l.successors = b;
+ } else {
+ // if l is a JSR block, adds b after the first two edges
+ // to preserve the hypothesis about JSR block successors
+ // order (see {@link #visitJumpInsn})
+ b.next = l.successors.next.next;
+ l.successors.next.next = b;
+ }
+ // goes to the next label
+ l = l.successor;
+ }
+ handler = handler.next;
+ }
+
+ if (subroutines > 0) {
+ // completes the control flow graph with the RET successors
+ /*
+ * first step: finds the subroutines. This step determines, for
+ * each basic block, to which subroutine(s) it belongs.
+ */
+ // finds the basic blocks that belong to the "main" subroutine
+ int id = 0;
+ labels.visitSubroutine(null, 1, subroutines);
+ // finds the basic blocks that belong to the real subroutines
+ Label l = labels;
+ while (l != null) {
+ if ((l.status & Label.JSR) != 0) {
+ // the subroutine is defined by l's TARGET, not by l
+ Label subroutine = l.successors.next.successor;
+ // if this subroutine has not been visited yet...
+ if ((subroutine.status & Label.VISITED) == 0) {
+ // ...assigns it a new id and finds its basic blocks
+ id += 1;
+ subroutine.visitSubroutine(null, (id / 32L) << 32
+ | (1L << (id % 32)), subroutines);
+ }
+ }
+ l = l.successor;
+ }
+ // second step: finds the successors of RET blocks
+ l = labels;
+ while (l != null) {
+ if ((l.status & Label.JSR) != 0) {
+ Label L = labels;
+ while (L != null) {
+ L.status &= ~Label.VISITED2;
+ L = L.successor;
+ }
+ // the subroutine is defined by l's TARGET, not by l
+ Label subroutine = l.successors.next.successor;
+ subroutine.visitSubroutine(l, 0, subroutines);
+ }
+ l = l.successor;
+ }
+ }
+
+ /*
+ * control flow analysis algorithm: while the block stack is not
+ * empty, pop a block from this stack, update the max stack size,
+ * compute the true (non relative) begin stack size of the
+ * successors of this block, and push these successors onto the
+ * stack (unless they have already been pushed onto the stack).
+ * Note: by hypothesis, the {@link Label#inputStackTop} of the
+ * blocks in the block stack are the true (non relative) beginning
+ * stack sizes of these blocks.
+ */
+ int max = 0;
+ Label stack = labels;
+ while (stack != null) {
+ // pops a block from the stack
+ Label l = stack;
+ stack = stack.next;
+ // computes the true (non relative) max stack size of this block
+ int start = l.inputStackTop;
+ int blockMax = start + l.outputStackMax;
+ // updates the global max stack size
+ if (blockMax > max) {
+ max = blockMax;
+ }
+ // analyzes the successors of the block
+ Edge b = l.successors;
+ if ((l.status & Label.JSR) != 0) {
+ // ignores the first edge of JSR blocks (virtual successor)
+ b = b.next;
+ }
+ while (b != null) {
+ l = b.successor;
+ // if this successor has not already been pushed...
+ if ((l.status & Label.PUSHED) == 0) {
+ // computes its true beginning stack size...
+ l.inputStackTop = b.info == Edge.EXCEPTION ? 1 : start
+ + b.info;
+ // ...and pushes it onto the stack
+ l.status |= Label.PUSHED;
+ l.next = stack;
+ stack = l;
+ }
+ b = b.next;
+ }
+ }
+ this.maxStack = Math.max(maxStack, max);
+ } else {
+ this.maxStack = maxStack;
+ this.maxLocals = maxLocals;
+ }
+ }
+
+ @Override
+ public void visitEnd() {
+ }
+
+ // ------------------------------------------------------------------------
+ // Utility methods: control flow analysis algorithm
+ // ------------------------------------------------------------------------
+
+ /**
+ * Adds a successor to the {@link #currentBlock currentBlock} block.
+ *
+ * @param info information about the control flow edge to be added.
+ * @param successor the successor block to be added to the current block.
+ */
+ private void addSuccessor(final int info, final Label successor) {
+ // creates and initializes an Edge object...
+ Edge b = new Edge();
+ b.info = info;
+ b.successor = successor;
+ // ...and adds it to the successor list of the currentBlock block
+ b.next = currentBlock.successors;
+ currentBlock.successors = b;
+ }
+
+ /**
+ * Ends the current basic block. This method must be used in the case where
+ * the current basic block does not have any successor.
+ */
+ private void noSuccessor() {
+ if (compute == FRAMES) {
+ Label l = new Label();
+ l.frame = new Frame();
+ l.frame.owner = l;
+ l.resolve(this, code.length, code.data);
+ previousBlock.successor = l;
+ previousBlock = l;
+ } else {
+ currentBlock.outputStackMax = maxStackSize;
+ }
+ currentBlock = null;
+ }
+
+ // ------------------------------------------------------------------------
+ // Utility methods: stack map frames
+ // ------------------------------------------------------------------------
+
+ /**
+ * Visits a frame that has been computed from scratch.
+ *
+ * @param f the frame that must be visited.
+ */
+ private void visitFrame(final Frame f) {
+ int i, t;
+ int nTop = 0;
+ int nLocal = 0;
+ int nStack = 0;
+ int[] locals = f.inputLocals;
+ int[] stacks = f.inputStack;
+ // computes the number of locals (ignores TOP types that are just after
+ // a LONG or a DOUBLE, and all trailing TOP types)
+ for (i = 0; i < locals.length; ++i) {
+ t = locals[i];
+ if (t == Frame.TOP) {
+ ++nTop;
+ } else {
+ nLocal += nTop + 1;
+ nTop = 0;
+ }
+ if (t == Frame.LONG || t == Frame.DOUBLE) {
+ ++i;
+ }
+ }
+ // computes the stack size (ignores TOP types that are just after
+ // a LONG or a DOUBLE)
+ for (i = 0; i < stacks.length; ++i) {
+ t = stacks[i];
+ ++nStack;
+ if (t == Frame.LONG || t == Frame.DOUBLE) {
+ ++i;
+ }
+ }
+ // visits the frame and its content
+ startFrame(f.owner.position, nLocal, nStack);
+ for (i = 0; nLocal > 0; ++i, --nLocal) {
+ t = locals[i];
+ frame[frameIndex++] = t;
+ if (t == Frame.LONG || t == Frame.DOUBLE) {
+ ++i;
+ }
+ }
+ for (i = 0; i < stacks.length; ++i) {
+ t = stacks[i];
+ frame[frameIndex++] = t;
+ if (t == Frame.LONG || t == Frame.DOUBLE) {
+ ++i;
+ }
+ }
+ endFrame();
+ }
+
+ /**
+ * Starts the visit of a stack map frame.
+ *
+ * @param offset the offset of the instruction to which the frame
+ * corresponds.
+ * @param nLocal the number of local variables in the frame.
+ * @param nStack the number of stack elements in the frame.
+ */
+ private void startFrame(final int offset, final int nLocal, final int nStack)
+ {
+ int n = 3 + nLocal + nStack;
+ if (frame == null || frame.length < n) {
+ frame = new int[n];
+ }
+ frame[0] = offset;
+ frame[1] = nLocal;
+ frame[2] = nStack;
+ frameIndex = 3;
+ }
+
+ /**
+ * Checks if the visit of the current frame {@link #frame} is finished, and
+ * if yes, write it in the StackMapTable attribute.
+ */
+ private void endFrame() {
+ if (previousFrame != null) { // do not write the first frame
+ if (stackMap == null) {
+ stackMap = new ByteVector();
+ }
+ writeFrame();
+ ++frameCount;
+ }
+ previousFrame = frame;
+ frame = null;
+ }
+
+ /**
+ * Compress and writes the current frame {@link #frame} in the StackMapTable
+ * attribute.
+ */
+ private void writeFrame() {
+ int clocalsSize = frame[1];
+ int cstackSize = frame[2];
+ if ((cw.version & 0xFFFF) < Opcodes.V1_6) {
+ stackMap.putShort(frame[0]).putShort(clocalsSize);
+ writeFrameTypes(3, 3 + clocalsSize);
+ stackMap.putShort(cstackSize);
+ writeFrameTypes(3 + clocalsSize, 3 + clocalsSize + cstackSize);
+ return;
+ }
+ int localsSize = previousFrame[1];
+ int type = FULL_FRAME;
+ int k = 0;
+ int delta;
+ if (frameCount == 0) {
+ delta = frame[0];
+ } else {
+ delta = frame[0] - previousFrame[0] - 1;
+ }
+ if (cstackSize == 0) {
+ k = clocalsSize - localsSize;
+ switch (k) {
+ case -3:
+ case -2:
+ case -1:
+ type = CHOP_FRAME;
+ localsSize = clocalsSize;
+ break;
+ case 0:
+ type = delta < 64 ? SAME_FRAME : SAME_FRAME_EXTENDED;
+ break;
+ case 1:
+ case 2:
+ case 3:
+ type = APPEND_FRAME;
+ break;
+ }
+ } else if (clocalsSize == localsSize && cstackSize == 1) {
+ type = delta < 63
+ ? SAME_LOCALS_1_STACK_ITEM_FRAME
+ : SAME_LOCALS_1_STACK_ITEM_FRAME_EXTENDED;
+ }
+ if (type != FULL_FRAME) {
+ // verify if locals are the same
+ int l = 3;
+ for (int j = 0; j < localsSize; j++) {
+ if (frame[l] != previousFrame[l]) {
+ type = FULL_FRAME;
+ break;
+ }
+ l++;
+ }
+ }
+ switch (type) {
+ case SAME_FRAME:
+ stackMap.putByte(delta);
+ break;
+ case SAME_LOCALS_1_STACK_ITEM_FRAME:
+ stackMap.putByte(SAME_LOCALS_1_STACK_ITEM_FRAME + delta);
+ writeFrameTypes(3 + clocalsSize, 4 + clocalsSize);
+ break;
+ case SAME_LOCALS_1_STACK_ITEM_FRAME_EXTENDED:
+ stackMap.putByte(SAME_LOCALS_1_STACK_ITEM_FRAME_EXTENDED)
+ .putShort(delta);
+ writeFrameTypes(3 + clocalsSize, 4 + clocalsSize);
+ break;
+ case SAME_FRAME_EXTENDED:
+ stackMap.putByte(SAME_FRAME_EXTENDED).putShort(delta);
+ break;
+ case CHOP_FRAME:
+ stackMap.putByte(SAME_FRAME_EXTENDED + k).putShort(delta);
+ break;
+ case APPEND_FRAME:
+ stackMap.putByte(SAME_FRAME_EXTENDED + k).putShort(delta);
+ writeFrameTypes(3 + localsSize, 3 + clocalsSize);
+ break;
+ // case FULL_FRAME:
+ default:
+ stackMap.putByte(FULL_FRAME)
+ .putShort(delta)
+ .putShort(clocalsSize);
+ writeFrameTypes(3, 3 + clocalsSize);
+ stackMap.putShort(cstackSize);
+ writeFrameTypes(3 + clocalsSize, 3 + clocalsSize + cstackSize);
+ }
+ }
+
+ /**
+ * Writes some types of the current frame {@link #frame} into the
+ * StackMapTableAttribute. This method converts types from the format used
+ * in {@link Label} to the format used in StackMapTable attributes. In
+ * particular, it converts type table indexes to constant pool indexes.
+ *
+ * @param start index of the first type in {@link #frame} to write.
+ * @param end index of last type in {@link #frame} to write (exclusive).
+ */
+ private void writeFrameTypes(final int start, final int end) {
+ for (int i = start; i < end; ++i) {
+ int t = frame[i];
+ int d = t & Frame.DIM;
+ if (d == 0) {
+ int v = t & Frame.BASE_VALUE;
+ switch (t & Frame.BASE_KIND) {
+ case Frame.OBJECT:
+ stackMap.putByte(7)
+ .putShort(cw.newClass(cw.typeTable[v].strVal1));
+ break;
+ case Frame.UNINITIALIZED:
+ stackMap.putByte(8).putShort(cw.typeTable[v].intVal);
+ break;
+ default:
+ stackMap.putByte(v);
+ }
+ } else {
+ StringBuffer buf = new StringBuffer();
+ d >>= 28;
+ while (d-- > 0) {
+ buf.append('[');
+ }
+ if ((t & Frame.BASE_KIND) == Frame.OBJECT) {
+ buf.append('L');
+ buf.append(cw.typeTable[t & Frame.BASE_VALUE].strVal1);
+ buf.append(';');
+ } else {
+ switch (t & 0xF) {
+ case 1:
+ buf.append('I');
+ break;
+ case 2:
+ buf.append('F');
+ break;
+ case 3:
+ buf.append('D');
+ break;
+ case 9:
+ buf.append('Z');
+ break;
+ case 10:
+ buf.append('B');
+ break;
+ case 11:
+ buf.append('C');
+ break;
+ case 12:
+ buf.append('S');
+ break;
+ default:
+ buf.append('J');
+ }
+ }
+ stackMap.putByte(7).putShort(cw.newClass(buf.toString()));
+ }
+ }
+ }
+
+ private void writeFrameType(final Object type) {
+ if (type instanceof String) {
+ stackMap.putByte(7).putShort(cw.newClass((String) type));
+ } else if (type instanceof Integer) {
+ stackMap.putByte(((Integer) type).intValue());
+ } else {
+ stackMap.putByte(8).putShort(((Label) type).position);
+ }
+ }
+
+ // ------------------------------------------------------------------------
+ // Utility methods: dump bytecode array
+ // ------------------------------------------------------------------------
+
+ /**
+ * Returns the size of the bytecode of this method.
+ *
+ * @return the size of the bytecode of this method.
+ */
+ final int getSize() {
+ if (classReaderOffset != 0) {
+ return 6 + classReaderLength;
+ }
+ if (resize) {
+ // replaces the temporary jump opcodes introduced by Label.resolve.
+ if (ClassReader.RESIZE) {
+ resizeInstructions();
+ } else {
+ throw new RuntimeException("Method code too large!");
+ }
+ }
+ int size = 8;
+ if (code.length > 0) {
+ if (code.length > 65536) {
+ String nameString = "";
+ int i = 0;
+ // find item that corresponds to the index of our name
+ while (i < cw.items.length && (cw.items[i] == null || cw.items[i].index != name)) i++;
+ if (cw.items[i] != null) nameString = cw.items[i].strVal1 +"'s ";
+ throw new RuntimeException("Method "+ nameString +"code too large!");
+ }
+ cw.newUTF8("Code");
+ size += 18 + code.length + 8 * handlerCount;
+ if (localVar != null) {
+ cw.newUTF8("LocalVariableTable");
+ size += 8 + localVar.length;
+ }
+ if (localVarType != null) {
+ cw.newUTF8("LocalVariableTypeTable");
+ size += 8 + localVarType.length;
+ }
+ if (lineNumber != null) {
+ cw.newUTF8("LineNumberTable");
+ size += 8 + lineNumber.length;
+ }
+ if (stackMap != null) {
+ boolean zip = (cw.version & 0xFFFF) >= Opcodes.V1_6;
+ cw.newUTF8(zip ? "StackMapTable" : "StackMap");
+ size += 8 + stackMap.length;
+ }
+ if (cattrs != null) {
+ size += cattrs.getSize(cw,
+ code.data,
+ code.length,
+ maxStack,
+ maxLocals);
+ }
+ }
+ if (exceptionCount > 0) {
+ cw.newUTF8("Exceptions");
+ size += 8 + 2 * exceptionCount;
+ }
+ if ((access & Opcodes.ACC_SYNTHETIC) != 0
+ && ((cw.version & 0xFFFF) < Opcodes.V1_5 || (access & ClassWriter.ACC_SYNTHETIC_ATTRIBUTE) != 0))
+ {
+ cw.newUTF8("Synthetic");
+ size += 6;
+ }
+ if ((access & Opcodes.ACC_DEPRECATED) != 0) {
+ cw.newUTF8("Deprecated");
+ size += 6;
+ }
+ if (ClassReader.SIGNATURES && signature != null) {
+ cw.newUTF8("Signature");
+ cw.newUTF8(signature);
+ size += 8;
+ }
+ if (ClassReader.ANNOTATIONS && annd != null) {
+ cw.newUTF8("AnnotationDefault");
+ size += 6 + annd.length;
+ }
+ if (ClassReader.ANNOTATIONS && anns != null) {
+ cw.newUTF8("RuntimeVisibleAnnotations");
+ size += 8 + anns.getSize();
+ }
+ if (ClassReader.ANNOTATIONS && ianns != null) {
+ cw.newUTF8("RuntimeInvisibleAnnotations");
+ size += 8 + ianns.getSize();
+ }
+ if (ClassReader.ANNOTATIONS && panns != null) {
+ cw.newUTF8("RuntimeVisibleParameterAnnotations");
+ size += 7 + 2 * (panns.length - synthetics);
+ for (int i = panns.length - 1; i >= synthetics; --i) {
+ size += panns[i] == null ? 0 : panns[i].getSize();
+ }
+ }
+ if (ClassReader.ANNOTATIONS && ipanns != null) {
+ cw.newUTF8("RuntimeInvisibleParameterAnnotations");
+ size += 7 + 2 * (ipanns.length - synthetics);
+ for (int i = ipanns.length - 1; i >= synthetics; --i) {
+ size += ipanns[i] == null ? 0 : ipanns[i].getSize();
+ }
+ }
+ if (attrs != null) {
+ size += attrs.getSize(cw, null, 0, -1, -1);
+ }
+ return size;
+ }
+
+ /**
+ * Puts the bytecode of this method in the given byte vector.
+ *
+ * @param out the byte vector into which the bytecode of this method must be
+ * copied.
+ */
+ final void put(final ByteVector out) {
+ int mask = Opcodes.ACC_DEPRECATED
+ | ClassWriter.ACC_SYNTHETIC_ATTRIBUTE
+ | ((access & ClassWriter.ACC_SYNTHETIC_ATTRIBUTE) / (ClassWriter.ACC_SYNTHETIC_ATTRIBUTE / Opcodes.ACC_SYNTHETIC));
+ out.putShort(access & ~mask).putShort(name).putShort(desc);
+ if (classReaderOffset != 0) {
+ out.putByteArray(cw.cr.b, classReaderOffset, classReaderLength);
+ return;
+ }
+ int attributeCount = 0;
+ if (code.length > 0) {
+ ++attributeCount;
+ }
+ if (exceptionCount > 0) {
+ ++attributeCount;
+ }
+ if ((access & Opcodes.ACC_SYNTHETIC) != 0
+ && ((cw.version & 0xFFFF) < Opcodes.V1_5 || (access & ClassWriter.ACC_SYNTHETIC_ATTRIBUTE) != 0))
+ {
+ ++attributeCount;
+ }
+ if ((access & Opcodes.ACC_DEPRECATED) != 0) {
+ ++attributeCount;
+ }
+ if (ClassReader.SIGNATURES && signature != null) {
+ ++attributeCount;
+ }
+ if (ClassReader.ANNOTATIONS && annd != null) {
+ ++attributeCount;
+ }
+ if (ClassReader.ANNOTATIONS && anns != null) {
+ ++attributeCount;
+ }
+ if (ClassReader.ANNOTATIONS && ianns != null) {
+ ++attributeCount;
+ }
+ if (ClassReader.ANNOTATIONS && panns != null) {
+ ++attributeCount;
+ }
+ if (ClassReader.ANNOTATIONS && ipanns != null) {
+ ++attributeCount;
+ }
+ if (attrs != null) {
+ attributeCount += attrs.getCount();
+ }
+ out.putShort(attributeCount);
+ if (code.length > 0) {
+ int size = 12 + code.length + 8 * handlerCount;
+ if (localVar != null) {
+ size += 8 + localVar.length;
+ }
+ if (localVarType != null) {
+ size += 8 + localVarType.length;
+ }
+ if (lineNumber != null) {
+ size += 8 + lineNumber.length;
+ }
+ if (stackMap != null) {
+ size += 8 + stackMap.length;
+ }
+ if (cattrs != null) {
+ size += cattrs.getSize(cw,
+ code.data,
+ code.length,
+ maxStack,
+ maxLocals);
+ }
+ out.putShort(cw.newUTF8("Code")).putInt(size);
+ out.putShort(maxStack).putShort(maxLocals);
+ out.putInt(code.length).putByteArray(code.data, 0, code.length);
+ out.putShort(handlerCount);
+ if (handlerCount > 0) {
+ Handler h = firstHandler;
+ while (h != null) {
+ out.putShort(h.start.position)
+ .putShort(h.end.position)
+ .putShort(h.handler.position)
+ .putShort(h.type);
+ h = h.next;
+ }
+ }
+ attributeCount = 0;
+ if (localVar != null) {
+ ++attributeCount;
+ }
+ if (localVarType != null) {
+ ++attributeCount;
+ }
+ if (lineNumber != null) {
+ ++attributeCount;
+ }
+ if (stackMap != null) {
+ ++attributeCount;
+ }
+ if (cattrs != null) {
+ attributeCount += cattrs.getCount();
+ }
+ out.putShort(attributeCount);
+ if (localVar != null) {
+ out.putShort(cw.newUTF8("LocalVariableTable"));
+ out.putInt(localVar.length + 2).putShort(localVarCount);
+ out.putByteArray(localVar.data, 0, localVar.length);
+ }
+ if (localVarType != null) {
+ out.putShort(cw.newUTF8("LocalVariableTypeTable"));
+ out.putInt(localVarType.length + 2).putShort(localVarTypeCount);
+ out.putByteArray(localVarType.data, 0, localVarType.length);
+ }
+ if (lineNumber != null) {
+ out.putShort(cw.newUTF8("LineNumberTable"));
+ out.putInt(lineNumber.length + 2).putShort(lineNumberCount);
+ out.putByteArray(lineNumber.data, 0, lineNumber.length);
+ }
+ if (stackMap != null) {
+ boolean zip = (cw.version & 0xFFFF) >= Opcodes.V1_6;
+ out.putShort(cw.newUTF8(zip ? "StackMapTable" : "StackMap"));
+ out.putInt(stackMap.length + 2).putShort(frameCount);
+ out.putByteArray(stackMap.data, 0, stackMap.length);
+ }
+ if (cattrs != null) {
+ cattrs.put(cw, code.data, code.length, maxLocals, maxStack, out);
+ }
+ }
+ if (exceptionCount > 0) {
+ out.putShort(cw.newUTF8("Exceptions"))
+ .putInt(2 * exceptionCount + 2);
+ out.putShort(exceptionCount);
+ for (int i = 0; i < exceptionCount; ++i) {
+ out.putShort(exceptions[i]);
+ }
+ }
+ if ((access & Opcodes.ACC_SYNTHETIC) != 0
+ && ((cw.version & 0xFFFF) < Opcodes.V1_5 || (access & ClassWriter.ACC_SYNTHETIC_ATTRIBUTE) != 0))
+ {
+ out.putShort(cw.newUTF8("Synthetic")).putInt(0);
+ }
+ if ((access & Opcodes.ACC_DEPRECATED) != 0) {
+ out.putShort(cw.newUTF8("Deprecated")).putInt(0);
+ }
+ if (ClassReader.SIGNATURES && signature != null) {
+ out.putShort(cw.newUTF8("Signature"))
+ .putInt(2)
+ .putShort(cw.newUTF8(signature));
+ }
+ if (ClassReader.ANNOTATIONS && annd != null) {
+ out.putShort(cw.newUTF8("AnnotationDefault"));
+ out.putInt(annd.length);
+ out.putByteArray(annd.data, 0, annd.length);
+ }
+ if (ClassReader.ANNOTATIONS && anns != null) {
+ out.putShort(cw.newUTF8("RuntimeVisibleAnnotations"));
+ anns.put(out);
+ }
+ if (ClassReader.ANNOTATIONS && ianns != null) {
+ out.putShort(cw.newUTF8("RuntimeInvisibleAnnotations"));
+ ianns.put(out);
+ }
+ if (ClassReader.ANNOTATIONS && panns != null) {
+ out.putShort(cw.newUTF8("RuntimeVisibleParameterAnnotations"));
+ AnnotationWriter.put(panns, synthetics, out);
+ }
+ if (ClassReader.ANNOTATIONS && ipanns != null) {
+ out.putShort(cw.newUTF8("RuntimeInvisibleParameterAnnotations"));
+ AnnotationWriter.put(ipanns, synthetics, out);
+ }
+ if (attrs != null) {
+ attrs.put(cw, null, 0, -1, -1, out);
+ }
+ }
+
+ // ------------------------------------------------------------------------
+ // Utility methods: instruction resizing (used to handle GOTO_W and JSR_W)
+ // ------------------------------------------------------------------------
+
+ /**
+ * Resizes and replaces the temporary instructions inserted by
+ * {@link Label#resolve} for wide forward jumps, while keeping jump offsets
+ * and instruction addresses consistent. This may require to resize other
+ * existing instructions, or even to introduce new instructions: for
+ * example, increasing the size of an instruction by 2 at the middle of a
+ * method can increases the offset of an IFEQ instruction from 32766 to
+ * 32768, in which case IFEQ 32766 must be replaced with IFNEQ 8 GOTO_W
+ * 32765. This, in turn, may require to increase the size of another jump
+ * instruction, and so on... All these operations are handled automatically
+ * by this method. <p> <i>This method must be called after all the method
+ * that is being built has been visited</i>. In particular, the
+ * {@link Label Label} objects used to construct the method are no longer
+ * valid after this method has been called.
+ */
+ private void resizeInstructions() {
+ byte[] b = code.data; // bytecode of the method
+ int u, v, label; // indexes in b
+ int i, j; // loop indexes
+ /*
+ * 1st step: As explained above, resizing an instruction may require to
+ * resize another one, which may require to resize yet another one, and
+ * so on. The first step of the algorithm consists in finding all the
+ * instructions that need to be resized, without modifying the code.
+ * This is done by the following "fix point" algorithm:
+ *
+ * Parse the code to find the jump instructions whose offset will need
+ * more than 2 bytes to be stored (the future offset is computed from
+ * the current offset and from the number of bytes that will be inserted
+ * or removed between the source and target instructions). For each such
+ * instruction, adds an entry in (a copy of) the indexes and sizes
+ * arrays (if this has not already been done in a previous iteration!).
+ *
+ * If at least one entry has been added during the previous step, go
+ * back to the beginning, otherwise stop.
+ *
+ * In fact the real algorithm is complicated by the fact that the size
+ * of TABLESWITCH and LOOKUPSWITCH instructions depends on their
+ * position in the bytecode (because of padding). In order to ensure the
+ * convergence of the algorithm, the number of bytes to be added or
+ * removed from these instructions is over estimated during the previous
+ * loop, and computed exactly only after the loop is finished (this
+ * requires another pass to parse the bytecode of the method).
+ */
+ int[] allIndexes = new int[0]; // copy of indexes
+ int[] allSizes = new int[0]; // copy of sizes
+ boolean[] resize; // instructions to be resized
+ int newOffset; // future offset of a jump instruction
+
+ resize = new boolean[code.length];
+
+ // 3 = loop again, 2 = loop ended, 1 = last pass, 0 = done
+ int state = 3;
+ do {
+ if (state == 3) {
+ state = 2;
+ }
+ u = 0;
+ while (u < b.length) {
+ int opcode = b[u] & 0xFF; // opcode of current instruction
+ int insert = 0; // bytes to be added after this instruction
+
+ switch (ClassWriter.TYPE[opcode]) {
+ case ClassWriter.NOARG_INSN:
+ case ClassWriter.IMPLVAR_INSN:
+ u += 1;
+ break;
+ case ClassWriter.LABEL_INSN:
+ if (opcode > 201) {
+ // converts temporary opcodes 202 to 217, 218 and
+ // 219 to IFEQ ... JSR (inclusive), IFNULL and
+ // IFNONNULL
+ opcode = opcode < 218 ? opcode - 49 : opcode - 20;
+ label = u + readUnsignedShort(b, u + 1);
+ } else {
+ label = u + readShort(b, u + 1);
+ }
+ newOffset = getNewOffset(allIndexes, allSizes, u, label);
+ if (newOffset < Short.MIN_VALUE
+ || newOffset > Short.MAX_VALUE)
+ {
+ if (!resize[u]) {
+ if (opcode == Opcodes.GOTO
+ || opcode == Opcodes.JSR)
+ {
+ // two additional bytes will be required to
+ // replace this GOTO or JSR instruction with
+ // a GOTO_W or a JSR_W
+ insert = 2;
+ } else {
+ // five additional bytes will be required to
+ // replace this IFxxx <l> instruction with
+ // IFNOTxxx <l'> GOTO_W <l>, where IFNOTxxx
+ // is the "opposite" opcode of IFxxx (i.e.,
+ // IFNE for IFEQ) and where <l'> designates
+ // the instruction just after the GOTO_W.
+ insert = 5;
+ }
+ resize[u] = true;
+ }
+ }
+ u += 3;
+ break;
+ case ClassWriter.LABELW_INSN:
+ u += 5;
+ break;
+ case ClassWriter.TABL_INSN:
+ if (state == 1) {
+ // true number of bytes to be added (or removed)
+ // from this instruction = (future number of padding
+ // bytes - current number of padding byte) -
+ // previously over estimated variation =
+ // = ((3 - newOffset%4) - (3 - u%4)) - u%4
+ // = (-newOffset%4 + u%4) - u%4
+ // = -(newOffset & 3)
+ newOffset = getNewOffset(allIndexes, allSizes, 0, u);
+ insert = -(newOffset & 3);
+ } else if (!resize[u]) {
+ // over estimation of the number of bytes to be
+ // added to this instruction = 3 - current number
+ // of padding bytes = 3 - (3 - u%4) = u%4 = u & 3
+ insert = u & 3;
+ resize[u] = true;
+ }
+ // skips instruction
+ u = u + 4 - (u & 3);
+ u += 4 * (readInt(b, u + 8) - readInt(b, u + 4) + 1) + 12;
+ break;
+ case ClassWriter.LOOK_INSN:
+ if (state == 1) {
+ // like TABL_INSN
+ newOffset = getNewOffset(allIndexes, allSizes, 0, u);
+ insert = -(newOffset & 3);
+ } else if (!resize[u]) {
+ // like TABL_INSN
+ insert = u & 3;
+ resize[u] = true;
+ }
+ // skips instruction
+ u = u + 4 - (u & 3);
+ u += 8 * readInt(b, u + 4) + 8;
+ break;
+ case ClassWriter.WIDE_INSN:
+ opcode = b[u + 1] & 0xFF;
+ if (opcode == Opcodes.IINC) {
+ u += 6;
+ } else {
+ u += 4;
+ }
+ break;
+ case ClassWriter.VAR_INSN:
+ case ClassWriter.SBYTE_INSN:
+ case ClassWriter.LDC_INSN:
+ u += 2;
+ break;
+ case ClassWriter.SHORT_INSN:
+ case ClassWriter.LDCW_INSN:
+ case ClassWriter.FIELDORMETH_INSN:
+ case ClassWriter.TYPE_INSN:
+ case ClassWriter.IINC_INSN:
+ u += 3;
+ break;
+ case ClassWriter.ITFMETH_INSN:
+ case ClassWriter.INDYMETH_INSN:
+ u += 5;
+ break;
+ // case ClassWriter.MANA_INSN:
+ default:
+ u += 4;
+ break;
+ }
+ if (insert != 0) {
+ // adds a new (u, insert) entry in the allIndexes and
+ // allSizes arrays
+ int[] newIndexes = new int[allIndexes.length + 1];
+ int[] newSizes = new int[allSizes.length + 1];
+ System.arraycopy(allIndexes,
+ 0,
+ newIndexes,
+ 0,
+ allIndexes.length);
+ System.arraycopy(allSizes, 0, newSizes, 0, allSizes.length);
+ newIndexes[allIndexes.length] = u;
+ newSizes[allSizes.length] = insert;
+ allIndexes = newIndexes;
+ allSizes = newSizes;
+ if (insert > 0) {
+ state = 3;
+ }
+ }
+ }
+ if (state < 3) {
+ --state;
+ }
+ } while (state != 0);
+
+ // 2nd step:
+ // copies the bytecode of the method into a new bytevector, updates the
+ // offsets, and inserts (or removes) bytes as requested.
+
+ ByteVector newCode = new ByteVector(code.length);
+
+ u = 0;
+ while (u < code.length) {
+ int opcode = b[u] & 0xFF;
+ switch (ClassWriter.TYPE[opcode]) {
+ case ClassWriter.NOARG_INSN:
+ case ClassWriter.IMPLVAR_INSN:
+ newCode.putByte(opcode);
+ u += 1;
+ break;
+ case ClassWriter.LABEL_INSN:
+ if (opcode > 201) {
+ // changes temporary opcodes 202 to 217 (inclusive), 218
+ // and 219 to IFEQ ... JSR (inclusive), IFNULL and
+ // IFNONNULL
+ opcode = opcode < 218 ? opcode - 49 : opcode - 20;
+ label = u + readUnsignedShort(b, u + 1);
+ } else {
+ label = u + readShort(b, u + 1);
+ }
+ newOffset = getNewOffset(allIndexes, allSizes, u, label);
+ if (resize[u]) {
+ // replaces GOTO with GOTO_W, JSR with JSR_W and IFxxx
+ // <l> with IFNOTxxx <l'> GOTO_W <l>, where IFNOTxxx is
+ // the "opposite" opcode of IFxxx (i.e., IFNE for IFEQ)
+ // and where <l'> designates the instruction just after
+ // the GOTO_W.
+ if (opcode == Opcodes.GOTO) {
+ newCode.putByte(200); // GOTO_W
+ } else if (opcode == Opcodes.JSR) {
+ newCode.putByte(201); // JSR_W
+ } else {
+ newCode.putByte(opcode <= 166
+ ? ((opcode + 1) ^ 1) - 1
+ : opcode ^ 1);
+ newCode.putShort(8); // jump offset
+ newCode.putByte(200); // GOTO_W
+ // newOffset now computed from start of GOTO_W
+ newOffset -= 3;
+ }
+ newCode.putInt(newOffset);
+ } else {
+ newCode.putByte(opcode);
+ newCode.putShort(newOffset);
+ }
+ u += 3;
+ break;
+ case ClassWriter.LABELW_INSN:
+ label = u + readInt(b, u + 1);
+ newOffset = getNewOffset(allIndexes, allSizes, u, label);
+ newCode.putByte(opcode);
+ newCode.putInt(newOffset);
+ u += 5;
+ break;
+ case ClassWriter.TABL_INSN:
+ // skips 0 to 3 padding bytes
+ v = u;
+ u = u + 4 - (v & 3);
+ // reads and copies instruction
+ newCode.putByte(Opcodes.TABLESWITCH);
+ newCode.putByteArray(null, 0, (4 - newCode.length % 4) % 4);
+ label = v + readInt(b, u);
+ u += 4;
+ newOffset = getNewOffset(allIndexes, allSizes, v, label);
+ newCode.putInt(newOffset);
+ j = readInt(b, u);
+ u += 4;
+ newCode.putInt(j);
+ j = readInt(b, u) - j + 1;
+ u += 4;
+ newCode.putInt(readInt(b, u - 4));
+ for (; j > 0; --j) {
+ label = v + readInt(b, u);
+ u += 4;
+ newOffset = getNewOffset(allIndexes, allSizes, v, label);
+ newCode.putInt(newOffset);
+ }
+ break;
+ case ClassWriter.LOOK_INSN:
+ // skips 0 to 3 padding bytes
+ v = u;
+ u = u + 4 - (v & 3);
+ // reads and copies instruction
+ newCode.putByte(Opcodes.LOOKUPSWITCH);
+ newCode.putByteArray(null, 0, (4 - newCode.length % 4) % 4);
+ label = v + readInt(b, u);
+ u += 4;
+ newOffset = getNewOffset(allIndexes, allSizes, v, label);
+ newCode.putInt(newOffset);
+ j = readInt(b, u);
+ u += 4;
+ newCode.putInt(j);
+ for (; j > 0; --j) {
+ newCode.putInt(readInt(b, u));
+ u += 4;
+ label = v + readInt(b, u);
+ u += 4;
+ newOffset = getNewOffset(allIndexes, allSizes, v, label);
+ newCode.putInt(newOffset);
+ }
+ break;
+ case ClassWriter.WIDE_INSN:
+ opcode = b[u + 1] & 0xFF;
+ if (opcode == Opcodes.IINC) {
+ newCode.putByteArray(b, u, 6);
+ u += 6;
+ } else {
+ newCode.putByteArray(b, u, 4);
+ u += 4;
+ }
+ break;
+ case ClassWriter.VAR_INSN:
+ case ClassWriter.SBYTE_INSN:
+ case ClassWriter.LDC_INSN:
+ newCode.putByteArray(b, u, 2);
+ u += 2;
+ break;
+ case ClassWriter.SHORT_INSN:
+ case ClassWriter.LDCW_INSN:
+ case ClassWriter.FIELDORMETH_INSN:
+ case ClassWriter.TYPE_INSN:
+ case ClassWriter.IINC_INSN:
+ newCode.putByteArray(b, u, 3);
+ u += 3;
+ break;
+ case ClassWriter.ITFMETH_INSN:
+ case ClassWriter.INDYMETH_INSN:
+ newCode.putByteArray(b, u, 5);
+ u += 5;
+ break;
+ // case MANA_INSN:
+ default:
+ newCode.putByteArray(b, u, 4);
+ u += 4;
+ break;
+ }
+ }
+
+ // recomputes the stack map frames
+ if (frameCount > 0) {
+ if (compute == FRAMES) {
+ frameCount = 0;
+ stackMap = null;
+ previousFrame = null;
+ frame = null;
+ Frame f = new Frame();
+ f.owner = labels;
+ Type[] args = Type.getArgumentTypes(descriptor);
+ f.initInputFrame(cw, access, args, maxLocals);
+ visitFrame(f);
+ Label l = labels;
+ while (l != null) {
+ /*
+ * here we need the original label position. getNewOffset
+ * must therefore never have been called for this label.
+ */
+ u = l.position - 3;
+ if ((l.status & Label.STORE) != 0 || (u >= 0 && resize[u]))
+ {
+ getNewOffset(allIndexes, allSizes, l);
+ // TODO update offsets in UNINITIALIZED values
+ visitFrame(l.frame);
+ }
+ l = l.successor;
+ }
+ } else {
+ /*
+ * Resizing an existing stack map frame table is really hard.
+ * Not only the table must be parsed to update the offets, but
+ * new frames may be needed for jump instructions that were
+ * inserted by this method. And updating the offsets or
+ * inserting frames can change the format of the following
+ * frames, in case of packed frames. In practice the whole table
+ * must be recomputed. For this the frames are marked as
+ * potentially invalid. This will cause the whole class to be
+ * reread and rewritten with the COMPUTE_FRAMES option (see the
+ * ClassWriter.toByteArray method). This is not very efficient
+ * but is much easier and requires much less code than any other
+ * method I can think of.
+ */
+ cw.invalidFrames = true;
+ }
+ }
+ // updates the exception handler block labels
+ Handler h = firstHandler;
+ while (h != null) {
+ getNewOffset(allIndexes, allSizes, h.start);
+ getNewOffset(allIndexes, allSizes, h.end);
+ getNewOffset(allIndexes, allSizes, h.handler);
+ h = h.next;
+ }
+ // updates the instructions addresses in the
+ // local var and line number tables
+ for (i = 0; i < 2; ++i) {
+ ByteVector bv = i == 0 ? localVar : localVarType;
+ if (bv != null) {
+ b = bv.data;
+ u = 0;
+ while (u < bv.length) {
+ label = readUnsignedShort(b, u);
+ newOffset = getNewOffset(allIndexes, allSizes, 0, label);
+ writeShort(b, u, newOffset);
+ label += readUnsignedShort(b, u + 2);
+ newOffset = getNewOffset(allIndexes, allSizes, 0, label)
+ - newOffset;
+ writeShort(b, u + 2, newOffset);
+ u += 10;
+ }
+ }
+ }
+ if (lineNumber != null) {
+ b = lineNumber.data;
+ u = 0;
+ while (u < lineNumber.length) {
+ writeShort(b, u, getNewOffset(allIndexes,
+ allSizes,
+ 0,
+ readUnsignedShort(b, u)));
+ u += 4;
+ }
+ }
+ // updates the labels of the other attributes
+ Attribute attr = cattrs;
+ while (attr != null) {
+ Label[] labels = attr.getLabels();
+ if (labels != null) {
+ for (i = labels.length - 1; i >= 0; --i) {
+ getNewOffset(allIndexes, allSizes, labels[i]);
+ }
+ }
+ attr = attr.next;
+ }
+
+ // replaces old bytecodes with new ones
+ code = newCode;
+ }
+
+ /**
+ * Reads an unsigned short value in the given byte array.
+ *
+ * @param b a byte array.
+ * @param index the start index of the value to be read.
+ * @return the read value.
+ */
+ static int readUnsignedShort(final byte[] b, final int index) {
+ return ((b[index] & 0xFF) << 8) | (b[index + 1] & 0xFF);
+ }
+
+ /**
+ * Reads a signed short value in the given byte array.
+ *
+ * @param b a byte array.
+ * @param index the start index of the value to be read.
+ * @return the read value.
+ */
+ static short readShort(final byte[] b, final int index) {
+ return (short) (((b[index] & 0xFF) << 8) | (b[index + 1] & 0xFF));
+ }
+
+ /**
+ * Reads a signed int value in the given byte array.
+ *
+ * @param b a byte array.
+ * @param index the start index of the value to be read.
+ * @return the read value.
+ */
+ static int readInt(final byte[] b, final int index) {
+ return ((b[index] & 0xFF) << 24) | ((b[index + 1] & 0xFF) << 16)
+ | ((b[index + 2] & 0xFF) << 8) | (b[index + 3] & 0xFF);
+ }
+
+ /**
+ * Writes a short value in the given byte array.
+ *
+ * @param b a byte array.
+ * @param index where the first byte of the short value must be written.
+ * @param s the value to be written in the given byte array.
+ */
+ static void writeShort(final byte[] b, final int index, final int s) {
+ b[index] = (byte) (s >>> 8);
+ b[index + 1] = (byte) s;
+ }
+
+ /**
+ * Computes the future value of a bytecode offset. <p> Note: it is possible
+ * to have several entries for the same instruction in the <tt>indexes</tt>
+ * and <tt>sizes</tt>: two entries (index=a,size=b) and (index=a,size=b')
+ * are equivalent to a single entry (index=a,size=b+b').
+ *
+ * @param indexes current positions of the instructions to be resized. Each
+ * instruction must be designated by the index of its <i>last</i>
+ * byte, plus one (or, in other words, by the index of the <i>first</i>
+ * byte of the <i>next</i> instruction).
+ * @param sizes the number of bytes to be <i>added</i> to the above
+ * instructions. More precisely, for each i < <tt>len</tt>,
+ * <tt>sizes</tt>[i] bytes will be added at the end of the
+ * instruction designated by <tt>indexes</tt>[i] or, if
+ * <tt>sizes</tt>[i] is negative, the <i>last</i> |<tt>sizes[i]</tt>|
+ * bytes of the instruction will be removed (the instruction size
+ * <i>must not</i> become negative or null).
+ * @param begin index of the first byte of the source instruction.
+ * @param end index of the first byte of the target instruction.
+ * @return the future value of the given bytecode offset.
+ */
+ static int getNewOffset(
+ final int[] indexes,
+ final int[] sizes,
+ final int begin,
+ final int end)
+ {
+ int offset = end - begin;
+ for (int i = 0; i < indexes.length; ++i) {
+ if (begin < indexes[i] && indexes[i] <= end) {
+ // forward jump
+ offset += sizes[i];
+ } else if (end < indexes[i] && indexes[i] <= begin) {
+ // backward jump
+ offset -= sizes[i];
+ }
+ }
+ return offset;
+ }
+
+ /**
+ * Updates the offset of the given label.
+ *
+ * @param indexes current positions of the instructions to be resized. Each
+ * instruction must be designated by the index of its <i>last</i>
+ * byte, plus one (or, in other words, by the index of the <i>first</i>
+ * byte of the <i>next</i> instruction).
+ * @param sizes the number of bytes to be <i>added</i> to the above
+ * instructions. More precisely, for each i < <tt>len</tt>,
+ * <tt>sizes</tt>[i] bytes will be added at the end of the
+ * instruction designated by <tt>indexes</tt>[i] or, if
+ * <tt>sizes</tt>[i] is negative, the <i>last</i> |<tt>sizes[i]</tt>|
+ * bytes of the instruction will be removed (the instruction size
+ * <i>must not</i> become negative or null).
+ * @param label the label whose offset must be updated.
+ */
+ static void getNewOffset(
+ final int[] indexes,
+ final int[] sizes,
+ final Label label)
+ {
+ if ((label.status & Label.RESIZED) == 0) {
+ label.position = getNewOffset(indexes, sizes, 0, label.position);
+ label.status |= Label.RESIZED;
+ }
+ }
+}
diff --git a/src/asm/scala/tools/asm/Opcodes.java b/src/asm/scala/tools/asm/Opcodes.java
new file mode 100644
index 0000000..809e5ae
--- /dev/null
+++ b/src/asm/scala/tools/asm/Opcodes.java
@@ -0,0 +1,358 @@
+/***
+ * ASM: a very small and fast Java bytecode manipulation framework
+ * Copyright (c) 2000-2011 INRIA, France Telecom
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ * 3. Neither the name of the copyright holders nor the names of its
+ * contributors may be used to endorse or promote products derived from
+ * this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+ * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
+ * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+ * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+ * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
+ * THE POSSIBILITY OF SUCH DAMAGE.
+ */
+package scala.tools.asm;
+
+/**
+ * Defines the JVM opcodes, access flags and array type codes. This interface
+ * does not define all the JVM opcodes because some opcodes are automatically
+ * handled. For example, the xLOAD and xSTORE opcodes are automatically replaced
+ * by xLOAD_n and xSTORE_n opcodes when possible. The xLOAD_n and xSTORE_n
+ * opcodes are therefore not defined in this interface. Likewise for LDC,
+ * automatically replaced by LDC_W or LDC2_W when necessary, WIDE, GOTO_W and
+ * JSR_W.
+ *
+ * @author Eric Bruneton
+ * @author Eugene Kuleshov
+ */
+public interface Opcodes {
+
+ // ASM API versions
+
+ int ASM4 = 4 << 16 | 0 << 8 | 0;
+
+ // versions
+
+ int V1_1 = 3 << 16 | 45;
+ int V1_2 = 0 << 16 | 46;
+ int V1_3 = 0 << 16 | 47;
+ int V1_4 = 0 << 16 | 48;
+ int V1_5 = 0 << 16 | 49;
+ int V1_6 = 0 << 16 | 50;
+ int V1_7 = 0 << 16 | 51;
+
+ // access flags
+
+ int ACC_PUBLIC = 0x0001; // class, field, method
+ int ACC_PRIVATE = 0x0002; // class, field, method
+ int ACC_PROTECTED = 0x0004; // class, field, method
+ int ACC_STATIC = 0x0008; // field, method
+ int ACC_FINAL = 0x0010; // class, field, method
+ int ACC_SUPER = 0x0020; // class
+ int ACC_SYNCHRONIZED = 0x0020; // method
+ int ACC_VOLATILE = 0x0040; // field
+ int ACC_BRIDGE = 0x0040; // method
+ int ACC_VARARGS = 0x0080; // method
+ int ACC_TRANSIENT = 0x0080; // field
+ int ACC_NATIVE = 0x0100; // method
+ int ACC_INTERFACE = 0x0200; // class
+ int ACC_ABSTRACT = 0x0400; // class, method
+ int ACC_STRICT = 0x0800; // method
+ int ACC_SYNTHETIC = 0x1000; // class, field, method
+ int ACC_ANNOTATION = 0x2000; // class
+ int ACC_ENUM = 0x4000; // class(?) field inner
+
+ // ASM specific pseudo access flags
+
+ int ACC_DEPRECATED = 0x20000; // class, field, method
+
+ // types for NEWARRAY
+
+ int T_BOOLEAN = 4;
+ int T_CHAR = 5;
+ int T_FLOAT = 6;
+ int T_DOUBLE = 7;
+ int T_BYTE = 8;
+ int T_SHORT = 9;
+ int T_INT = 10;
+ int T_LONG = 11;
+
+ // tags for Handle
+
+ int H_GETFIELD = 1;
+ int H_GETSTATIC = 2;
+ int H_PUTFIELD = 3;
+ int H_PUTSTATIC = 4;
+ int H_INVOKEVIRTUAL = 5;
+ int H_INVOKESTATIC = 6;
+ int H_INVOKESPECIAL = 7;
+ int H_NEWINVOKESPECIAL = 8;
+ int H_INVOKEINTERFACE = 9;
+
+ // stack map frame types
+
+ /**
+ * Represents an expanded frame. See {@link ClassReader#EXPAND_FRAMES}.
+ */
+ int F_NEW = -1;
+
+ /**
+ * Represents a compressed frame with complete frame data.
+ */
+ int F_FULL = 0;
+
+ /**
+ * Represents a compressed frame where locals are the same as the locals in
+ * the previous frame, except that additional 1-3 locals are defined, and
+ * with an empty stack.
+ */
+ int F_APPEND = 1;
+
+ /**
+ * Represents a compressed frame where locals are the same as the locals in
+ * the previous frame, except that the last 1-3 locals are absent and with
+ * an empty stack.
+ */
+ int F_CHOP = 2;
+
+ /**
+ * Represents a compressed frame with exactly the same locals as the
+ * previous frame and with an empty stack.
+ */
+ int F_SAME = 3;
+
+ /**
+ * Represents a compressed frame with exactly the same locals as the
+ * previous frame and with a single value on the stack.
+ */
+ int F_SAME1 = 4;
+
+ Integer TOP = new Integer(0);
+ Integer INTEGER = new Integer(1);
+ Integer FLOAT = new Integer(2);
+ Integer DOUBLE = new Integer(3);
+ Integer LONG = new Integer(4);
+ Integer NULL = new Integer(5);
+ Integer UNINITIALIZED_THIS = new Integer(6);
+
+ // opcodes // visit method (- = idem)
+
+ int NOP = 0; // visitInsn
+ int ACONST_NULL = 1; // -
+ int ICONST_M1 = 2; // -
+ int ICONST_0 = 3; // -
+ int ICONST_1 = 4; // -
+ int ICONST_2 = 5; // -
+ int ICONST_3 = 6; // -
+ int ICONST_4 = 7; // -
+ int ICONST_5 = 8; // -
+ int LCONST_0 = 9; // -
+ int LCONST_1 = 10; // -
+ int FCONST_0 = 11; // -
+ int FCONST_1 = 12; // -
+ int FCONST_2 = 13; // -
+ int DCONST_0 = 14; // -
+ int DCONST_1 = 15; // -
+ int BIPUSH = 16; // visitIntInsn
+ int SIPUSH = 17; // -
+ int LDC = 18; // visitLdcInsn
+ // int LDC_W = 19; // -
+ // int LDC2_W = 20; // -
+ int ILOAD = 21; // visitVarInsn
+ int LLOAD = 22; // -
+ int FLOAD = 23; // -
+ int DLOAD = 24; // -
+ int ALOAD = 25; // -
+ // int ILOAD_0 = 26; // -
+ // int ILOAD_1 = 27; // -
+ // int ILOAD_2 = 28; // -
+ // int ILOAD_3 = 29; // -
+ // int LLOAD_0 = 30; // -
+ // int LLOAD_1 = 31; // -
+ // int LLOAD_2 = 32; // -
+ // int LLOAD_3 = 33; // -
+ // int FLOAD_0 = 34; // -
+ // int FLOAD_1 = 35; // -
+ // int FLOAD_2 = 36; // -
+ // int FLOAD_3 = 37; // -
+ // int DLOAD_0 = 38; // -
+ // int DLOAD_1 = 39; // -
+ // int DLOAD_2 = 40; // -
+ // int DLOAD_3 = 41; // -
+ // int ALOAD_0 = 42; // -
+ // int ALOAD_1 = 43; // -
+ // int ALOAD_2 = 44; // -
+ // int ALOAD_3 = 45; // -
+ int IALOAD = 46; // visitInsn
+ int LALOAD = 47; // -
+ int FALOAD = 48; // -
+ int DALOAD = 49; // -
+ int AALOAD = 50; // -
+ int BALOAD = 51; // -
+ int CALOAD = 52; // -
+ int SALOAD = 53; // -
+ int ISTORE = 54; // visitVarInsn
+ int LSTORE = 55; // -
+ int FSTORE = 56; // -
+ int DSTORE = 57; // -
+ int ASTORE = 58; // -
+ // int ISTORE_0 = 59; // -
+ // int ISTORE_1 = 60; // -
+ // int ISTORE_2 = 61; // -
+ // int ISTORE_3 = 62; // -
+ // int LSTORE_0 = 63; // -
+ // int LSTORE_1 = 64; // -
+ // int LSTORE_2 = 65; // -
+ // int LSTORE_3 = 66; // -
+ // int FSTORE_0 = 67; // -
+ // int FSTORE_1 = 68; // -
+ // int FSTORE_2 = 69; // -
+ // int FSTORE_3 = 70; // -
+ // int DSTORE_0 = 71; // -
+ // int DSTORE_1 = 72; // -
+ // int DSTORE_2 = 73; // -
+ // int DSTORE_3 = 74; // -
+ // int ASTORE_0 = 75; // -
+ // int ASTORE_1 = 76; // -
+ // int ASTORE_2 = 77; // -
+ // int ASTORE_3 = 78; // -
+ int IASTORE = 79; // visitInsn
+ int LASTORE = 80; // -
+ int FASTORE = 81; // -
+ int DASTORE = 82; // -
+ int AASTORE = 83; // -
+ int BASTORE = 84; // -
+ int CASTORE = 85; // -
+ int SASTORE = 86; // -
+ int POP = 87; // -
+ int POP2 = 88; // -
+ int DUP = 89; // -
+ int DUP_X1 = 90; // -
+ int DUP_X2 = 91; // -
+ int DUP2 = 92; // -
+ int DUP2_X1 = 93; // -
+ int DUP2_X2 = 94; // -
+ int SWAP = 95; // -
+ int IADD = 96; // -
+ int LADD = 97; // -
+ int FADD = 98; // -
+ int DADD = 99; // -
+ int ISUB = 100; // -
+ int LSUB = 101; // -
+ int FSUB = 102; // -
+ int DSUB = 103; // -
+ int IMUL = 104; // -
+ int LMUL = 105; // -
+ int FMUL = 106; // -
+ int DMUL = 107; // -
+ int IDIV = 108; // -
+ int LDIV = 109; // -
+ int FDIV = 110; // -
+ int DDIV = 111; // -
+ int IREM = 112; // -
+ int LREM = 113; // -
+ int FREM = 114; // -
+ int DREM = 115; // -
+ int INEG = 116; // -
+ int LNEG = 117; // -
+ int FNEG = 118; // -
+ int DNEG = 119; // -
+ int ISHL = 120; // -
+ int LSHL = 121; // -
+ int ISHR = 122; // -
+ int LSHR = 123; // -
+ int IUSHR = 124; // -
+ int LUSHR = 125; // -
+ int IAND = 126; // -
+ int LAND = 127; // -
+ int IOR = 128; // -
+ int LOR = 129; // -
+ int IXOR = 130; // -
+ int LXOR = 131; // -
+ int IINC = 132; // visitIincInsn
+ int I2L = 133; // visitInsn
+ int I2F = 134; // -
+ int I2D = 135; // -
+ int L2I = 136; // -
+ int L2F = 137; // -
+ int L2D = 138; // -
+ int F2I = 139; // -
+ int F2L = 140; // -
+ int F2D = 141; // -
+ int D2I = 142; // -
+ int D2L = 143; // -
+ int D2F = 144; // -
+ int I2B = 145; // -
+ int I2C = 146; // -
+ int I2S = 147; // -
+ int LCMP = 148; // -
+ int FCMPL = 149; // -
+ int FCMPG = 150; // -
+ int DCMPL = 151; // -
+ int DCMPG = 152; // -
+ int IFEQ = 153; // visitJumpInsn
+ int IFNE = 154; // -
+ int IFLT = 155; // -
+ int IFGE = 156; // -
+ int IFGT = 157; // -
+ int IFLE = 158; // -
+ int IF_ICMPEQ = 159; // -
+ int IF_ICMPNE = 160; // -
+ int IF_ICMPLT = 161; // -
+ int IF_ICMPGE = 162; // -
+ int IF_ICMPGT = 163; // -
+ int IF_ICMPLE = 164; // -
+ int IF_ACMPEQ = 165; // -
+ int IF_ACMPNE = 166; // -
+ int GOTO = 167; // -
+ int JSR = 168; // -
+ int RET = 169; // visitVarInsn
+ int TABLESWITCH = 170; // visiTableSwitchInsn
+ int LOOKUPSWITCH = 171; // visitLookupSwitch
+ int IRETURN = 172; // visitInsn
+ int LRETURN = 173; // -
+ int FRETURN = 174; // -
+ int DRETURN = 175; // -
+ int ARETURN = 176; // -
+ int RETURN = 177; // -
+ int GETSTATIC = 178; // visitFieldInsn
+ int PUTSTATIC = 179; // -
+ int GETFIELD = 180; // -
+ int PUTFIELD = 181; // -
+ int INVOKEVIRTUAL = 182; // visitMethodInsn
+ int INVOKESPECIAL = 183; // -
+ int INVOKESTATIC = 184; // -
+ int INVOKEINTERFACE = 185; // -
+ int INVOKEDYNAMIC = 186; // visitInvokeDynamicInsn
+ int NEW = 187; // visitTypeInsn
+ int NEWARRAY = 188; // visitIntInsn
+ int ANEWARRAY = 189; // visitTypeInsn
+ int ARRAYLENGTH = 190; // visitInsn
+ int ATHROW = 191; // -
+ int CHECKCAST = 192; // visitTypeInsn
+ int INSTANCEOF = 193; // -
+ int MONITORENTER = 194; // visitInsn
+ int MONITOREXIT = 195; // -
+ // int WIDE = 196; // NOT VISITED
+ int MULTIANEWARRAY = 197; // visitMultiANewArrayInsn
+ int IFNULL = 198; // visitJumpInsn
+ int IFNONNULL = 199; // -
+ // int GOTO_W = 200; // -
+ // int JSR_W = 201; // -
+}
diff --git a/src/asm/scala/tools/asm/Type.java b/src/asm/scala/tools/asm/Type.java
new file mode 100644
index 0000000..bf11071
--- /dev/null
+++ b/src/asm/scala/tools/asm/Type.java
@@ -0,0 +1,865 @@
+/***
+ * ASM: a very small and fast Java bytecode manipulation framework
+ * Copyright (c) 2000-2011 INRIA, France Telecom
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ * 3. Neither the name of the copyright holders nor the names of its
+ * contributors may be used to endorse or promote products derived from
+ * this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+ * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
+ * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+ * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+ * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
+ * THE POSSIBILITY OF SUCH DAMAGE.
+ */
+package scala.tools.asm;
+
+import java.lang.reflect.Constructor;
+import java.lang.reflect.Method;
+
+/**
+ * A Java field or method type. This class can be used to make it easier to
+ * manipulate type and method descriptors.
+ *
+ * @author Eric Bruneton
+ * @author Chris Nokleberg
+ */
+public class Type {
+
+ /**
+ * The sort of the <tt>void</tt> type. See {@link #getSort getSort}.
+ */
+ public static final int VOID = 0;
+
+ /**
+ * The sort of the <tt>boolean</tt> type. See {@link #getSort getSort}.
+ */
+ public static final int BOOLEAN = 1;
+
+ /**
+ * The sort of the <tt>char</tt> type. See {@link #getSort getSort}.
+ */
+ public static final int CHAR = 2;
+
+ /**
+ * The sort of the <tt>byte</tt> type. See {@link #getSort getSort}.
+ */
+ public static final int BYTE = 3;
+
+ /**
+ * The sort of the <tt>short</tt> type. See {@link #getSort getSort}.
+ */
+ public static final int SHORT = 4;
+
+ /**
+ * The sort of the <tt>int</tt> type. See {@link #getSort getSort}.
+ */
+ public static final int INT = 5;
+
+ /**
+ * The sort of the <tt>float</tt> type. See {@link #getSort getSort}.
+ */
+ public static final int FLOAT = 6;
+
+ /**
+ * The sort of the <tt>long</tt> type. See {@link #getSort getSort}.
+ */
+ public static final int LONG = 7;
+
+ /**
+ * The sort of the <tt>double</tt> type. See {@link #getSort getSort}.
+ */
+ public static final int DOUBLE = 8;
+
+ /**
+ * The sort of array reference types. See {@link #getSort getSort}.
+ */
+ public static final int ARRAY = 9;
+
+ /**
+ * The sort of object reference types. See {@link #getSort getSort}.
+ */
+ public static final int OBJECT = 10;
+
+ /**
+ * The sort of method types. See {@link #getSort getSort}.
+ */
+ public static final int METHOD = 11;
+
+ /**
+ * The <tt>void</tt> type.
+ */
+ public static final Type VOID_TYPE = new Type(VOID, null, ('V' << 24)
+ | (5 << 16) | (0 << 8) | 0, 1);
+
+ /**
+ * The <tt>boolean</tt> type.
+ */
+ public static final Type BOOLEAN_TYPE = new Type(BOOLEAN, null, ('Z' << 24)
+ | (0 << 16) | (5 << 8) | 1, 1);
+
+ /**
+ * The <tt>char</tt> type.
+ */
+ public static final Type CHAR_TYPE = new Type(CHAR, null, ('C' << 24)
+ | (0 << 16) | (6 << 8) | 1, 1);
+
+ /**
+ * The <tt>byte</tt> type.
+ */
+ public static final Type BYTE_TYPE = new Type(BYTE, null, ('B' << 24)
+ | (0 << 16) | (5 << 8) | 1, 1);
+
+ /**
+ * The <tt>short</tt> type.
+ */
+ public static final Type SHORT_TYPE = new Type(SHORT, null, ('S' << 24)
+ | (0 << 16) | (7 << 8) | 1, 1);
+
+ /**
+ * The <tt>int</tt> type.
+ */
+ public static final Type INT_TYPE = new Type(INT, null, ('I' << 24)
+ | (0 << 16) | (0 << 8) | 1, 1);
+
+ /**
+ * The <tt>float</tt> type.
+ */
+ public static final Type FLOAT_TYPE = new Type(FLOAT, null, ('F' << 24)
+ | (2 << 16) | (2 << 8) | 1, 1);
+
+ /**
+ * The <tt>long</tt> type.
+ */
+ public static final Type LONG_TYPE = new Type(LONG, null, ('J' << 24)
+ | (1 << 16) | (1 << 8) | 2, 1);
+
+ /**
+ * The <tt>double</tt> type.
+ */
+ public static final Type DOUBLE_TYPE = new Type(DOUBLE, null, ('D' << 24)
+ | (3 << 16) | (3 << 8) | 2, 1);
+
+ // ------------------------------------------------------------------------
+ // Fields
+ // ------------------------------------------------------------------------
+
+ /**
+ * The sort of this Java type.
+ */
+ private final int sort;
+
+ /**
+ * A buffer containing the internal name of this Java type. This field is
+ * only used for reference types.
+ */
+ private final char[] buf;
+
+ /**
+ * The offset of the internal name of this Java type in {@link #buf buf} or,
+ * for primitive types, the size, descriptor and getOpcode offsets for this
+ * type (byte 0 contains the size, byte 1 the descriptor, byte 2 the offset
+ * for IALOAD or IASTORE, byte 3 the offset for all other instructions).
+ */
+ private final int off;
+
+ /**
+ * The length of the internal name of this Java type.
+ */
+ private final int len;
+
+ // ------------------------------------------------------------------------
+ // Constructors
+ // ------------------------------------------------------------------------
+
+ /**
+ * Constructs a reference type.
+ *
+ * @param sort the sort of the reference type to be constructed.
+ * @param buf a buffer containing the descriptor of the previous type.
+ * @param off the offset of this descriptor in the previous buffer.
+ * @param len the length of this descriptor.
+ */
+ private Type(final int sort, final char[] buf, final int off, final int len)
+ {
+ this.sort = sort;
+ this.buf = buf;
+ this.off = off;
+ this.len = len;
+ }
+
+ /**
+ * Returns the Java type corresponding to the given type descriptor.
+ *
+ * @param typeDescriptor a field or method type descriptor.
+ * @return the Java type corresponding to the given type descriptor.
+ */
+ public static Type getType(final String typeDescriptor) {
+ return getType(typeDescriptor.toCharArray(), 0);
+ }
+
+ /**
+ * Returns the Java type corresponding to the given internal name.
+ *
+ * @param internalName an internal name.
+ * @return the Java type corresponding to the given internal name.
+ */
+ public static Type getObjectType(final String internalName) {
+ char[] buf = internalName.toCharArray();
+ return new Type(buf[0] == '[' ? ARRAY : OBJECT, buf, 0, buf.length);
+ }
+
+ /**
+ * Returns the Java type corresponding to the given method descriptor.
+ * Equivalent to <code>Type.getType(methodDescriptor)</code>.
+ *
+ * @param methodDescriptor a method descriptor.
+ * @return the Java type corresponding to the given method descriptor.
+ */
+ public static Type getMethodType(final String methodDescriptor) {
+ return getType(methodDescriptor.toCharArray(), 0);
+ }
+
+ /**
+ * Returns the Java method type corresponding to the given argument and
+ * return types.
+ *
+ * @param returnType the return type of the method.
+ * @param argumentTypes the argument types of the method.
+ * @return the Java type corresponding to the given argument and return types.
+ */
+ public static Type getMethodType(final Type returnType, final Type... argumentTypes) {
+ return getType(getMethodDescriptor(returnType, argumentTypes));
+ }
+
+ /**
+ * Returns the Java type corresponding to the given class.
+ *
+ * @param c a class.
+ * @return the Java type corresponding to the given class.
+ */
+ public static Type getType(final Class<?> c) {
+ if (c.isPrimitive()) {
+ if (c == Integer.TYPE) {
+ return INT_TYPE;
+ } else if (c == Void.TYPE) {
+ return VOID_TYPE;
+ } else if (c == Boolean.TYPE) {
+ return BOOLEAN_TYPE;
+ } else if (c == Byte.TYPE) {
+ return BYTE_TYPE;
+ } else if (c == Character.TYPE) {
+ return CHAR_TYPE;
+ } else if (c == Short.TYPE) {
+ return SHORT_TYPE;
+ } else if (c == Double.TYPE) {
+ return DOUBLE_TYPE;
+ } else if (c == Float.TYPE) {
+ return FLOAT_TYPE;
+ } else /* if (c == Long.TYPE) */{
+ return LONG_TYPE;
+ }
+ } else {
+ return getType(getDescriptor(c));
+ }
+ }
+
+ /**
+ * Returns the Java method type corresponding to the given constructor.
+ *
+ * @param c a {@link Constructor Constructor} object.
+ * @return the Java method type corresponding to the given constructor.
+ */
+ public static Type getType(final Constructor<?> c) {
+ return getType(getConstructorDescriptor(c));
+ }
+
+ /**
+ * Returns the Java method type corresponding to the given method.
+ *
+ * @param m a {@link Method Method} object.
+ * @return the Java method type corresponding to the given method.
+ */
+ public static Type getType(final Method m) {
+ return getType(getMethodDescriptor(m));
+ }
+
+ /**
+ * Returns the Java types corresponding to the argument types of the given
+ * method descriptor.
+ *
+ * @param methodDescriptor a method descriptor.
+ * @return the Java types corresponding to the argument types of the given
+ * method descriptor.
+ */
+ public static Type[] getArgumentTypes(final String methodDescriptor) {
+ char[] buf = methodDescriptor.toCharArray();
+ int off = 1;
+ int size = 0;
+ while (true) {
+ char car = buf[off++];
+ if (car == ')') {
+ break;
+ } else if (car == 'L') {
+ while (buf[off++] != ';') {
+ }
+ ++size;
+ } else if (car != '[') {
+ ++size;
+ }
+ }
+ Type[] args = new Type[size];
+ off = 1;
+ size = 0;
+ while (buf[off] != ')') {
+ args[size] = getType(buf, off);
+ off += args[size].len + (args[size].sort == OBJECT ? 2 : 0);
+ size += 1;
+ }
+ return args;
+ }
+
+ /**
+ * Returns the Java types corresponding to the argument types of the given
+ * method.
+ *
+ * @param method a method.
+ * @return the Java types corresponding to the argument types of the given
+ * method.
+ */
+ public static Type[] getArgumentTypes(final Method method) {
+ Class<?>[] classes = method.getParameterTypes();
+ Type[] types = new Type[classes.length];
+ for (int i = classes.length - 1; i >= 0; --i) {
+ types[i] = getType(classes[i]);
+ }
+ return types;
+ }
+
+ /**
+ * Returns the Java type corresponding to the return type of the given
+ * method descriptor.
+ *
+ * @param methodDescriptor a method descriptor.
+ * @return the Java type corresponding to the return type of the given
+ * method descriptor.
+ */
+ public static Type getReturnType(final String methodDescriptor) {
+ char[] buf = methodDescriptor.toCharArray();
+ return getType(buf, methodDescriptor.indexOf(')') + 1);
+ }
+
+ /**
+ * Returns the Java type corresponding to the return type of the given
+ * method.
+ *
+ * @param method a method.
+ * @return the Java type corresponding to the return type of the given
+ * method.
+ */
+ public static Type getReturnType(final Method method) {
+ return getType(method.getReturnType());
+ }
+
+ /**
+ * Computes the size of the arguments and of the return value of a method.
+ *
+ * @param desc the descriptor of a method.
+ * @return the size of the arguments of the method (plus one for the
+ * implicit this argument), argSize, and the size of its return
+ * value, retSize, packed into a single int i =
+ * <tt>(argSize << 2) | retSize</tt> (argSize is therefore equal
+ * to <tt>i >> 2</tt>, and retSize to <tt>i & 0x03</tt>).
+ */
+ public static int getArgumentsAndReturnSizes(final String desc) {
+ int n = 1;
+ int c = 1;
+ while (true) {
+ char car = desc.charAt(c++);
+ if (car == ')') {
+ car = desc.charAt(c);
+ return n << 2
+ | (car == 'V' ? 0 : (car == 'D' || car == 'J' ? 2 : 1));
+ } else if (car == 'L') {
+ while (desc.charAt(c++) != ';') {
+ }
+ n += 1;
+ } else if (car == '[') {
+ while ((car = desc.charAt(c)) == '[') {
+ ++c;
+ }
+ if (car == 'D' || car == 'J') {
+ n -= 1;
+ }
+ } else if (car == 'D' || car == 'J') {
+ n += 2;
+ } else {
+ n += 1;
+ }
+ }
+ }
+
+ /**
+ * Returns the Java type corresponding to the given type descriptor. For
+ * method descriptors, buf is supposed to contain nothing more than the
+ * descriptor itself.
+ *
+ * @param buf a buffer containing a type descriptor.
+ * @param off the offset of this descriptor in the previous buffer.
+ * @return the Java type corresponding to the given type descriptor.
+ */
+ private static Type getType(final char[] buf, final int off) {
+ int len;
+ switch (buf[off]) {
+ case 'V':
+ return VOID_TYPE;
+ case 'Z':
+ return BOOLEAN_TYPE;
+ case 'C':
+ return CHAR_TYPE;
+ case 'B':
+ return BYTE_TYPE;
+ case 'S':
+ return SHORT_TYPE;
+ case 'I':
+ return INT_TYPE;
+ case 'F':
+ return FLOAT_TYPE;
+ case 'J':
+ return LONG_TYPE;
+ case 'D':
+ return DOUBLE_TYPE;
+ case '[':
+ len = 1;
+ while (buf[off + len] == '[') {
+ ++len;
+ }
+ if (buf[off + len] == 'L') {
+ ++len;
+ while (buf[off + len] != ';') {
+ ++len;
+ }
+ }
+ return new Type(ARRAY, buf, off, len + 1);
+ case 'L':
+ len = 1;
+ while (buf[off + len] != ';') {
+ ++len;
+ }
+ return new Type(OBJECT, buf, off + 1, len - 1);
+ // case '(':
+ default:
+ return new Type(METHOD, buf, 0, buf.length);
+ }
+ }
+
+ // ------------------------------------------------------------------------
+ // Accessors
+ // ------------------------------------------------------------------------
+
+ /**
+ * Returns the sort of this Java type.
+ *
+ * @return {@link #VOID VOID}, {@link #BOOLEAN BOOLEAN},
+ * {@link #CHAR CHAR}, {@link #BYTE BYTE}, {@link #SHORT SHORT},
+ * {@link #INT INT}, {@link #FLOAT FLOAT}, {@link #LONG LONG},
+ * {@link #DOUBLE DOUBLE}, {@link #ARRAY ARRAY},
+ * {@link #OBJECT OBJECT} or {@link #METHOD METHOD}.
+ */
+ public int getSort() {
+ return sort;
+ }
+
+ /**
+ * Returns the number of dimensions of this array type. This method should
+ * only be used for an array type.
+ *
+ * @return the number of dimensions of this array type.
+ */
+ public int getDimensions() {
+ int i = 1;
+ while (buf[off + i] == '[') {
+ ++i;
+ }
+ return i;
+ }
+
+ /**
+ * Returns the type of the elements of this array type. This method should
+ * only be used for an array type.
+ *
+ * @return Returns the type of the elements of this array type.
+ */
+ public Type getElementType() {
+ return getType(buf, off + getDimensions());
+ }
+
+ /**
+ * Returns the binary name of the class corresponding to this type. This
+ * method must not be used on method types.
+ *
+ * @return the binary name of the class corresponding to this type.
+ */
+ public String getClassName() {
+ switch (sort) {
+ case VOID:
+ return "void";
+ case BOOLEAN:
+ return "boolean";
+ case CHAR:
+ return "char";
+ case BYTE:
+ return "byte";
+ case SHORT:
+ return "short";
+ case INT:
+ return "int";
+ case FLOAT:
+ return "float";
+ case LONG:
+ return "long";
+ case DOUBLE:
+ return "double";
+ case ARRAY:
+ StringBuffer b = new StringBuffer(getElementType().getClassName());
+ for (int i = getDimensions(); i > 0; --i) {
+ b.append("[]");
+ }
+ return b.toString();
+ case OBJECT:
+ return new String(buf, off, len).replace('/', '.');
+ default:
+ return null;
+ }
+ }
+
+ /**
+ * Returns the internal name of the class corresponding to this object or
+ * array type. The internal name of a class is its fully qualified name (as
+ * returned by Class.getName(), where '.' are replaced by '/'. This method
+ * should only be used for an object or array type.
+ *
+ * @return the internal name of the class corresponding to this object type.
+ */
+ public String getInternalName() {
+ return new String(buf, off, len);
+ }
+
+ /**
+ * Returns the argument types of methods of this type. This method should
+ * only be used for method types.
+ *
+ * @return the argument types of methods of this type.
+ */
+ public Type[] getArgumentTypes() {
+ return getArgumentTypes(getDescriptor());
+ }
+
+ /**
+ * Returns the return type of methods of this type. This method should only
+ * be used for method types.
+ *
+ * @return the return type of methods of this type.
+ */
+ public Type getReturnType() {
+ return getReturnType(getDescriptor());
+ }
+
+ /**
+ * Returns the size of the arguments and of the return value of methods of
+ * this type. This method should only be used for method types.
+ *
+ * @return the size of the arguments (plus one for the implicit this
+ * argument), argSize, and the size of the return value, retSize,
+ * packed into a single int i = <tt>(argSize << 2) | retSize</tt>
+ * (argSize is therefore equal to <tt>i >> 2</tt>, and retSize to
+ * <tt>i & 0x03</tt>).
+ */
+ public int getArgumentsAndReturnSizes() {
+ return getArgumentsAndReturnSizes(getDescriptor());
+ }
+
+ // ------------------------------------------------------------------------
+ // Conversion to type descriptors
+ // ------------------------------------------------------------------------
+
+ /**
+ * Returns the descriptor corresponding to this Java type.
+ *
+ * @return the descriptor corresponding to this Java type.
+ */
+ public String getDescriptor() {
+ StringBuffer buf = new StringBuffer();
+ getDescriptor(buf);
+ return buf.toString();
+ }
+
+ /**
+ * Returns the descriptor corresponding to the given argument and return
+ * types.
+ *
+ * @param returnType the return type of the method.
+ * @param argumentTypes the argument types of the method.
+ * @return the descriptor corresponding to the given argument and return
+ * types.
+ */
+ public static String getMethodDescriptor(
+ final Type returnType,
+ final Type... argumentTypes)
+ {
+ StringBuffer buf = new StringBuffer();
+ buf.append('(');
+ for (int i = 0; i < argumentTypes.length; ++i) {
+ argumentTypes[i].getDescriptor(buf);
+ }
+ buf.append(')');
+ returnType.getDescriptor(buf);
+ return buf.toString();
+ }
+
+ /**
+ * Appends the descriptor corresponding to this Java type to the given
+ * string buffer.
+ *
+ * @param buf the string buffer to which the descriptor must be appended.
+ */
+ private void getDescriptor(final StringBuffer buf) {
+ if (this.buf == null) {
+ // descriptor is in byte 3 of 'off' for primitive types (buf == null)
+ buf.append((char) ((off & 0xFF000000) >>> 24));
+ } else if (sort == OBJECT) {
+ buf.append('L');
+ buf.append(this.buf, off, len);
+ buf.append(';');
+ } else { // sort == ARRAY || sort == METHOD
+ buf.append(this.buf, off, len);
+ }
+ }
+
+ // ------------------------------------------------------------------------
+ // Direct conversion from classes to type descriptors,
+ // without intermediate Type objects
+ // ------------------------------------------------------------------------
+
+ /**
+ * Returns the internal name of the given class. The internal name of a
+ * class is its fully qualified name, as returned by Class.getName(), where
+ * '.' are replaced by '/'.
+ *
+ * @param c an object or array class.
+ * @return the internal name of the given class.
+ */
+ public static String getInternalName(final Class<?> c) {
+ return c.getName().replace('.', '/');
+ }
+
+ /**
+ * Returns the descriptor corresponding to the given Java type.
+ *
+ * @param c an object class, a primitive class or an array class.
+ * @return the descriptor corresponding to the given class.
+ */
+ public static String getDescriptor(final Class<?> c) {
+ StringBuffer buf = new StringBuffer();
+ getDescriptor(buf, c);
+ return buf.toString();
+ }
+
+ /**
+ * Returns the descriptor corresponding to the given constructor.
+ *
+ * @param c a {@link Constructor Constructor} object.
+ * @return the descriptor of the given constructor.
+ */
+ public static String getConstructorDescriptor(final Constructor<?> c) {
+ Class<?>[] parameters = c.getParameterTypes();
+ StringBuffer buf = new StringBuffer();
+ buf.append('(');
+ for (int i = 0; i < parameters.length; ++i) {
+ getDescriptor(buf, parameters[i]);
+ }
+ return buf.append(")V").toString();
+ }
+
+ /**
+ * Returns the descriptor corresponding to the given method.
+ *
+ * @param m a {@link Method Method} object.
+ * @return the descriptor of the given method.
+ */
+ public static String getMethodDescriptor(final Method m) {
+ Class<?>[] parameters = m.getParameterTypes();
+ StringBuffer buf = new StringBuffer();
+ buf.append('(');
+ for (int i = 0; i < parameters.length; ++i) {
+ getDescriptor(buf, parameters[i]);
+ }
+ buf.append(')');
+ getDescriptor(buf, m.getReturnType());
+ return buf.toString();
+ }
+
+ /**
+ * Appends the descriptor of the given class to the given string buffer.
+ *
+ * @param buf the string buffer to which the descriptor must be appended.
+ * @param c the class whose descriptor must be computed.
+ */
+ private static void getDescriptor(final StringBuffer buf, final Class<?> c) {
+ Class<?> d = c;
+ while (true) {
+ if (d.isPrimitive()) {
+ char car;
+ if (d == Integer.TYPE) {
+ car = 'I';
+ } else if (d == Void.TYPE) {
+ car = 'V';
+ } else if (d == Boolean.TYPE) {
+ car = 'Z';
+ } else if (d == Byte.TYPE) {
+ car = 'B';
+ } else if (d == Character.TYPE) {
+ car = 'C';
+ } else if (d == Short.TYPE) {
+ car = 'S';
+ } else if (d == Double.TYPE) {
+ car = 'D';
+ } else if (d == Float.TYPE) {
+ car = 'F';
+ } else /* if (d == Long.TYPE) */{
+ car = 'J';
+ }
+ buf.append(car);
+ return;
+ } else if (d.isArray()) {
+ buf.append('[');
+ d = d.getComponentType();
+ } else {
+ buf.append('L');
+ String name = d.getName();
+ int len = name.length();
+ for (int i = 0; i < len; ++i) {
+ char car = name.charAt(i);
+ buf.append(car == '.' ? '/' : car);
+ }
+ buf.append(';');
+ return;
+ }
+ }
+ }
+
+ // ------------------------------------------------------------------------
+ // Corresponding size and opcodes
+ // ------------------------------------------------------------------------
+
+ /**
+ * Returns the size of values of this type. This method must not be used for
+ * method types.
+ *
+ * @return the size of values of this type, i.e., 2 for <tt>long</tt> and
+ * <tt>double</tt>, 0 for <tt>void</tt> and 1 otherwise.
+ */
+ public int getSize() {
+ // the size is in byte 0 of 'off' for primitive types (buf == null)
+ return buf == null ? (off & 0xFF) : 1;
+ }
+
+ /**
+ * Returns a JVM instruction opcode adapted to this Java type. This method
+ * must not be used for method types.
+ *
+ * @param opcode a JVM instruction opcode. This opcode must be one of ILOAD,
+ * ISTORE, IALOAD, IASTORE, IADD, ISUB, IMUL, IDIV, IREM, INEG, ISHL,
+ * ISHR, IUSHR, IAND, IOR, IXOR and IRETURN.
+ * @return an opcode that is similar to the given opcode, but adapted to
+ * this Java type. For example, if this type is <tt>float</tt> and
+ * <tt>opcode</tt> is IRETURN, this method returns FRETURN.
+ */
+ public int getOpcode(final int opcode) {
+ if (opcode == Opcodes.IALOAD || opcode == Opcodes.IASTORE) {
+ // the offset for IALOAD or IASTORE is in byte 1 of 'off' for
+ // primitive types (buf == null)
+ return opcode + (buf == null ? (off & 0xFF00) >> 8 : 4);
+ } else {
+ // the offset for other instructions is in byte 2 of 'off' for
+ // primitive types (buf == null)
+ return opcode + (buf == null ? (off & 0xFF0000) >> 16 : 4);
+ }
+ }
+
+ // ------------------------------------------------------------------------
+ // Equals, hashCode and toString
+ // ------------------------------------------------------------------------
+
+ /**
+ * Tests if the given object is equal to this type.
+ *
+ * @param o the object to be compared to this type.
+ * @return <tt>true</tt> if the given object is equal to this type.
+ */
+ @Override
+ public boolean equals(final Object o) {
+ if (this == o) {
+ return true;
+ }
+ if (!(o instanceof Type)) {
+ return false;
+ }
+ Type t = (Type) o;
+ if (sort != t.sort) {
+ return false;
+ }
+ if (sort >= ARRAY) {
+ if (len != t.len) {
+ return false;
+ }
+ for (int i = off, j = t.off, end = i + len; i < end; i++, j++) {
+ if (buf[i] != t.buf[j]) {
+ return false;
+ }
+ }
+ }
+ return true;
+ }
+
+ /**
+ * Returns a hash code value for this type.
+ *
+ * @return a hash code value for this type.
+ */
+ @Override
+ public int hashCode() {
+ int hc = 13 * sort;
+ if (sort >= ARRAY) {
+ for (int i = off, end = i + len; i < end; i++) {
+ hc = 17 * (hc + buf[i]);
+ }
+ }
+ return hc;
+ }
+
+ /**
+ * Returns a string representation of this type.
+ *
+ * @return the descriptor of this type.
+ */
+ @Override
+ public String toString() {
+ return getDescriptor();
+ }
+}
diff --git a/src/asm/scala/tools/asm/signature/SignatureReader.java b/src/asm/scala/tools/asm/signature/SignatureReader.java
new file mode 100644
index 0000000..22e6427
--- /dev/null
+++ b/src/asm/scala/tools/asm/signature/SignatureReader.java
@@ -0,0 +1,229 @@
+/***
+ * ASM: a very small and fast Java bytecode manipulation framework
+ * Copyright (c) 2000-2011 INRIA, France Telecom
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ * 3. Neither the name of the copyright holders nor the names of its
+ * contributors may be used to endorse or promote products derived from
+ * this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+ * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
+ * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+ * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+ * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
+ * THE POSSIBILITY OF SUCH DAMAGE.
+ */
+package scala.tools.asm.signature;
+
+/**
+ * A type signature parser to make a signature visitor visit an existing
+ * signature.
+ *
+ * @author Thomas Hallgren
+ * @author Eric Bruneton
+ */
+public class SignatureReader {
+
+ /**
+ * The signature to be read.
+ */
+ private final String signature;
+
+ /**
+ * Constructs a {@link SignatureReader} for the given signature.
+ *
+ * @param signature A <i>ClassSignature</i>, <i>MethodTypeSignature</i>,
+ * or <i>FieldTypeSignature</i>.
+ */
+ public SignatureReader(final String signature) {
+ this.signature = signature;
+ }
+
+ /**
+ * Makes the given visitor visit the signature of this
+ * {@link SignatureReader}. This signature is the one specified in the
+ * constructor (see {@link #SignatureReader(String) SignatureReader}). This
+ * method is intended to be called on a {@link SignatureReader} that was
+ * created using a <i>ClassSignature</i> (such as the
+ * <code>signature</code> parameter of the
+ * {@link org.objectweb.asm.ClassVisitor#visit ClassVisitor.visit} method)
+ * or a <i>MethodTypeSignature</i> (such as the <code>signature</code>
+ * parameter of the
+ * {@link org.objectweb.asm.ClassVisitor#visitMethod ClassVisitor.visitMethod}
+ * method).
+ *
+ * @param v the visitor that must visit this signature.
+ */
+ public void accept(final SignatureVisitor v) {
+ String signature = this.signature;
+ int len = signature.length();
+ int pos;
+ char c;
+
+ if (signature.charAt(0) == '<') {
+ pos = 2;
+ do {
+ int end = signature.indexOf(':', pos);
+ v.visitFormalTypeParameter(signature.substring(pos - 1, end));
+ pos = end + 1;
+
+ c = signature.charAt(pos);
+ if (c == 'L' || c == '[' || c == 'T') {
+ pos = parseType(signature, pos, v.visitClassBound());
+ }
+
+ while ((c = signature.charAt(pos++)) == ':') {
+ pos = parseType(signature, pos, v.visitInterfaceBound());
+ }
+ } while (c != '>');
+ } else {
+ pos = 0;
+ }
+
+ if (signature.charAt(pos) == '(') {
+ pos++;
+ while (signature.charAt(pos) != ')') {
+ pos = parseType(signature, pos, v.visitParameterType());
+ }
+ pos = parseType(signature, pos + 1, v.visitReturnType());
+ while (pos < len) {
+ pos = parseType(signature, pos + 1, v.visitExceptionType());
+ }
+ } else {
+ pos = parseType(signature, pos, v.visitSuperclass());
+ while (pos < len) {
+ pos = parseType(signature, pos, v.visitInterface());
+ }
+ }
+ }
+
+ /**
+ * Makes the given visitor visit the signature of this
+ * {@link SignatureReader}. This signature is the one specified in the
+ * constructor (see {@link #SignatureReader(String) SignatureReader}). This
+ * method is intended to be called on a {@link SignatureReader} that was
+ * created using a <i>FieldTypeSignature</i>, such as the
+ * <code>signature</code> parameter of the
+ * {@link org.objectweb.asm.ClassVisitor#visitField
+ * ClassVisitor.visitField} or {@link
+ * org.objectweb.asm.MethodVisitor#visitLocalVariable
+ * MethodVisitor.visitLocalVariable} methods.
+ *
+ * @param v the visitor that must visit this signature.
+ */
+ public void acceptType(final SignatureVisitor v) {
+ parseType(this.signature, 0, v);
+ }
+
+ /**
+ * Parses a field type signature and makes the given visitor visit it.
+ *
+ * @param signature a string containing the signature that must be parsed.
+ * @param pos index of the first character of the signature to parsed.
+ * @param v the visitor that must visit this signature.
+ * @return the index of the first character after the parsed signature.
+ */
+ private static int parseType(
+ final String signature,
+ int pos,
+ final SignatureVisitor v)
+ {
+ char c;
+ int start, end;
+ boolean visited, inner;
+ String name;
+
+ switch (c = signature.charAt(pos++)) {
+ case 'Z':
+ case 'C':
+ case 'B':
+ case 'S':
+ case 'I':
+ case 'F':
+ case 'J':
+ case 'D':
+ case 'V':
+ v.visitBaseType(c);
+ return pos;
+
+ case '[':
+ return parseType(signature, pos, v.visitArrayType());
+
+ case 'T':
+ end = signature.indexOf(';', pos);
+ v.visitTypeVariable(signature.substring(pos, end));
+ return end + 1;
+
+ default: // case 'L':
+ start = pos;
+ visited = false;
+ inner = false;
+ for (;;) {
+ switch (c = signature.charAt(pos++)) {
+ case '.':
+ case ';':
+ if (!visited) {
+ name = signature.substring(start, pos - 1);
+ if (inner) {
+ v.visitInnerClassType(name);
+ } else {
+ v.visitClassType(name);
+ }
+ }
+ if (c == ';') {
+ v.visitEnd();
+ return pos;
+ }
+ start = pos;
+ visited = false;
+ inner = true;
+ break;
+
+ case '<':
+ name = signature.substring(start, pos - 1);
+ if (inner) {
+ v.visitInnerClassType(name);
+ } else {
+ v.visitClassType(name);
+ }
+ visited = true;
+ top: for (;;) {
+ switch (c = signature.charAt(pos)) {
+ case '>':
+ break top;
+ case '*':
+ ++pos;
+ v.visitTypeArgument();
+ break;
+ case '+':
+ case '-':
+ pos = parseType(signature,
+ pos + 1,
+ v.visitTypeArgument(c));
+ break;
+ default:
+ pos = parseType(signature,
+ pos,
+ v.visitTypeArgument('='));
+ break;
+ }
+ }
+ }
+ }
+ }
+ }
+}
diff --git a/src/asm/scala/tools/asm/signature/SignatureVisitor.java b/src/asm/scala/tools/asm/signature/SignatureVisitor.java
new file mode 100644
index 0000000..2fc364e
--- /dev/null
+++ b/src/asm/scala/tools/asm/signature/SignatureVisitor.java
@@ -0,0 +1,228 @@
+/***
+ * ASM: a very small and fast Java bytecode manipulation framework
+ * Copyright (c) 2000-2011 INRIA, France Telecom
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ * 3. Neither the name of the copyright holders nor the names of its
+ * contributors may be used to endorse or promote products derived from
+ * this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+ * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
+ * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+ * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+ * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
+ * THE POSSIBILITY OF SUCH DAMAGE.
+ */
+package scala.tools.asm.signature;
+
+import scala.tools.asm.Opcodes;
+
+/**
+ * A visitor to visit a generic signature. The methods of this interface must be
+ * called in one of the three following orders (the last one is the only valid
+ * order for a {@link SignatureVisitor} that is returned by a method of this
+ * interface): <ul> <li><i>ClassSignature</i> = (
+ * <tt>visitFormalTypeParameter</tt>
+ * <tt>visitClassBound</tt>?
+ * <tt>visitInterfaceBound</tt>* )* ( <tt>visitSuperClass</tt>
+ * <tt>visitInterface</tt>* )</li>
+ * <li><i>MethodSignature</i> = ( <tt>visitFormalTypeParameter</tt>
+ * <tt>visitClassBound</tt>?
+ * <tt>visitInterfaceBound</tt>* )* ( <tt>visitParameterType</tt>*
+ * <tt>visitReturnType</tt>
+ * <tt>visitExceptionType</tt>* )</li> <li><i>TypeSignature</i> =
+ * <tt>visitBaseType</tt> | <tt>visitTypeVariable</tt> |
+ * <tt>visitArrayType</tt> | (
+ * <tt>visitClassType</tt> <tt>visitTypeArgument</tt>* (
+ * <tt>visitInnerClassType</tt> <tt>visitTypeArgument</tt>* )*
+ * <tt>visitEnd</tt> ) )</li> </ul>
+ *
+ * @author Thomas Hallgren
+ * @author Eric Bruneton
+ */
+public abstract class SignatureVisitor {
+
+ /**
+ * Wildcard for an "extends" type argument.
+ */
+ public final static char EXTENDS = '+';
+
+ /**
+ * Wildcard for a "super" type argument.
+ */
+ public final static char SUPER = '-';
+
+ /**
+ * Wildcard for a normal type argument.
+ */
+ public final static char INSTANCEOF = '=';
+
+ /**
+ * The ASM API version implemented by this visitor. The value of this field
+ * must be one of {@link Opcodes#ASM4}.
+ */
+ protected final int api;
+
+ /**
+ * Constructs a new {@link SignatureVisitor}.
+ *
+ * @param api the ASM API version implemented by this visitor. Must be one
+ * of {@link Opcodes#ASM4}.
+ */
+ public SignatureVisitor(final int api) {
+ this.api = api;
+ }
+
+ /**
+ * Visits a formal type parameter.
+ *
+ * @param name the name of the formal parameter.
+ */
+ public void visitFormalTypeParameter(String name) {
+ }
+
+ /**
+ * Visits the class bound of the last visited formal type parameter.
+ *
+ * @return a non null visitor to visit the signature of the class bound.
+ */
+ public SignatureVisitor visitClassBound() {
+ return this;
+ }
+
+ /**
+ * Visits an interface bound of the last visited formal type parameter.
+ *
+ * @return a non null visitor to visit the signature of the interface bound.
+ */
+ public SignatureVisitor visitInterfaceBound() {
+ return this;
+ }
+
+ /**
+ * Visits the type of the super class.
+ *
+ * @return a non null visitor to visit the signature of the super class
+ * type.
+ */
+ public SignatureVisitor visitSuperclass() {
+ return this;
+ }
+
+ /**
+ * Visits the type of an interface implemented by the class.
+ *
+ * @return a non null visitor to visit the signature of the interface type.
+ */
+ public SignatureVisitor visitInterface() {
+ return this;
+ }
+
+ /**
+ * Visits the type of a method parameter.
+ *
+ * @return a non null visitor to visit the signature of the parameter type.
+ */
+ public SignatureVisitor visitParameterType() {
+ return this;
+ }
+
+ /**
+ * Visits the return type of the method.
+ *
+ * @return a non null visitor to visit the signature of the return type.
+ */
+ public SignatureVisitor visitReturnType() {
+ return this;
+ }
+
+ /**
+ * Visits the type of a method exception.
+ *
+ * @return a non null visitor to visit the signature of the exception type.
+ */
+ public SignatureVisitor visitExceptionType() {
+ return this;
+ }
+
+ /**
+ * Visits a signature corresponding to a primitive type.
+ *
+ * @param descriptor the descriptor of the primitive type, or 'V' for
+ * <tt>void</tt>.
+ */
+ public void visitBaseType(char descriptor) {
+ }
+
+ /**
+ * Visits a signature corresponding to a type variable.
+ *
+ * @param name the name of the type variable.
+ */
+ public void visitTypeVariable(String name) {
+ }
+
+ /**
+ * Visits a signature corresponding to an array type.
+ *
+ * @return a non null visitor to visit the signature of the array element
+ * type.
+ */
+ public SignatureVisitor visitArrayType() {
+ return this;
+ }
+
+ /**
+ * Starts the visit of a signature corresponding to a class or interface
+ * type.
+ *
+ * @param name the internal name of the class or interface.
+ */
+ public void visitClassType(String name) {
+ }
+
+ /**
+ * Visits an inner class.
+ *
+ * @param name the local name of the inner class in its enclosing class.
+ */
+ public void visitInnerClassType(String name) {
+ }
+
+ /**
+ * Visits an unbounded type argument of the last visited class or inner
+ * class type.
+ */
+ public void visitTypeArgument() {
+ }
+
+ /**
+ * Visits a type argument of the last visited class or inner class type.
+ *
+ * @param wildcard '+', '-' or '='.
+ * @return a non null visitor to visit the signature of the type argument.
+ */
+ public SignatureVisitor visitTypeArgument(char wildcard) {
+ return this;
+ }
+
+ /**
+ * Ends the visit of a signature corresponding to a class or interface type.
+ */
+ public void visitEnd() {
+ }
+}
diff --git a/src/asm/scala/tools/asm/signature/SignatureWriter.java b/src/asm/scala/tools/asm/signature/SignatureWriter.java
new file mode 100644
index 0000000..a59fdfd
--- /dev/null
+++ b/src/asm/scala/tools/asm/signature/SignatureWriter.java
@@ -0,0 +1,227 @@
+/***
+ * ASM: a very small and fast Java bytecode manipulation framework
+ * Copyright (c) 2000-2011 INRIA, France Telecom
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ * 3. Neither the name of the copyright holders nor the names of its
+ * contributors may be used to endorse or promote products derived from
+ * this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+ * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
+ * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+ * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+ * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
+ * THE POSSIBILITY OF SUCH DAMAGE.
+ */
+package scala.tools.asm.signature;
+
+import scala.tools.asm.Opcodes;
+
+/**
+ * A signature visitor that generates signatures in string format.
+ *
+ * @author Thomas Hallgren
+ * @author Eric Bruneton
+ */
+public class SignatureWriter extends SignatureVisitor {
+
+ /**
+ * Buffer used to construct the signature.
+ */
+ private final StringBuffer buf = new StringBuffer();
+
+ /**
+ * Indicates if the signature contains formal type parameters.
+ */
+ private boolean hasFormals;
+
+ /**
+ * Indicates if the signature contains method parameter types.
+ */
+ private boolean hasParameters;
+
+ /**
+ * Stack used to keep track of class types that have arguments. Each element
+ * of this stack is a boolean encoded in one bit. The top of the stack is
+ * the lowest order bit. Pushing false = *2, pushing true = *2+1, popping =
+ * /2.
+ */
+ private int argumentStack;
+
+ /**
+ * Constructs a new {@link SignatureWriter} object.
+ */
+ public SignatureWriter() {
+ super(Opcodes.ASM4);
+ }
+
+ // ------------------------------------------------------------------------
+ // Implementation of the SignatureVisitor interface
+ // ------------------------------------------------------------------------
+
+ @Override
+ public void visitFormalTypeParameter(final String name) {
+ if (!hasFormals) {
+ hasFormals = true;
+ buf.append('<');
+ }
+ buf.append(name);
+ buf.append(':');
+ }
+
+ @Override
+ public SignatureVisitor visitClassBound() {
+ return this;
+ }
+
+ @Override
+ public SignatureVisitor visitInterfaceBound() {
+ buf.append(':');
+ return this;
+ }
+
+ @Override
+ public SignatureVisitor visitSuperclass() {
+ endFormals();
+ return this;
+ }
+
+ @Override
+ public SignatureVisitor visitInterface() {
+ return this;
+ }
+
+ @Override
+ public SignatureVisitor visitParameterType() {
+ endFormals();
+ if (!hasParameters) {
+ hasParameters = true;
+ buf.append('(');
+ }
+ return this;
+ }
+
+ @Override
+ public SignatureVisitor visitReturnType() {
+ endFormals();
+ if (!hasParameters) {
+ buf.append('(');
+ }
+ buf.append(')');
+ return this;
+ }
+
+ @Override
+ public SignatureVisitor visitExceptionType() {
+ buf.append('^');
+ return this;
+ }
+
+ @Override
+ public void visitBaseType(final char descriptor) {
+ buf.append(descriptor);
+ }
+
+ @Override
+ public void visitTypeVariable(final String name) {
+ buf.append('T');
+ buf.append(name);
+ buf.append(';');
+ }
+
+ @Override
+ public SignatureVisitor visitArrayType() {
+ buf.append('[');
+ return this;
+ }
+
+ @Override
+ public void visitClassType(final String name) {
+ buf.append('L');
+ buf.append(name);
+ argumentStack *= 2;
+ }
+
+ @Override
+ public void visitInnerClassType(final String name) {
+ endArguments();
+ buf.append('.');
+ buf.append(name);
+ argumentStack *= 2;
+ }
+
+ @Override
+ public void visitTypeArgument() {
+ if (argumentStack % 2 == 0) {
+ ++argumentStack;
+ buf.append('<');
+ }
+ buf.append('*');
+ }
+
+ @Override
+ public SignatureVisitor visitTypeArgument(final char wildcard) {
+ if (argumentStack % 2 == 0) {
+ ++argumentStack;
+ buf.append('<');
+ }
+ if (wildcard != '=') {
+ buf.append(wildcard);
+ }
+ return this;
+ }
+
+ @Override
+ public void visitEnd() {
+ endArguments();
+ buf.append(';');
+ }
+
+ /**
+ * Returns the signature that was built by this signature writer.
+ *
+ * @return the signature that was built by this signature writer.
+ */
+ @Override
+ public String toString() {
+ return buf.toString();
+ }
+
+ // ------------------------------------------------------------------------
+ // Utility methods
+ // ------------------------------------------------------------------------
+
+ /**
+ * Ends the formal type parameters section of the signature.
+ */
+ private void endFormals() {
+ if (hasFormals) {
+ hasFormals = false;
+ buf.append('>');
+ }
+ }
+
+ /**
+ * Ends the type arguments of a class or inner class type.
+ */
+ private void endArguments() {
+ if (argumentStack % 2 != 0) {
+ buf.append('>');
+ }
+ argumentStack /= 2;
+ }
+}
\ No newline at end of file
diff --git a/src/asm/scala/tools/asm/tree/AbstractInsnNode.java b/src/asm/scala/tools/asm/tree/AbstractInsnNode.java
new file mode 100644
index 0000000..471f842
--- /dev/null
+++ b/src/asm/scala/tools/asm/tree/AbstractInsnNode.java
@@ -0,0 +1,238 @@
+/***
+ * ASM: a very small and fast Java bytecode manipulation framework
+ * Copyright (c) 2000-2011 INRIA, France Telecom
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ * 3. Neither the name of the copyright holders nor the names of its
+ * contributors may be used to endorse or promote products derived from
+ * this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+ * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
+ * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+ * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+ * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
+ * THE POSSIBILITY OF SUCH DAMAGE.
+ */
+package scala.tools.asm.tree;
+
+import java.util.List;
+import java.util.Map;
+
+import scala.tools.asm.MethodVisitor;
+
+/**
+ * A node that represents a bytecode instruction. <i>An instruction can appear
+ * at most once in at most one {@link InsnList} at a time</i>.
+ *
+ * @author Eric Bruneton
+ */
+public abstract class AbstractInsnNode {
+
+ /**
+ * The type of {@link InsnNode} instructions.
+ */
+ public static final int INSN = 0;
+
+ /**
+ * The type of {@link IntInsnNode} instructions.
+ */
+ public static final int INT_INSN = 1;
+
+ /**
+ * The type of {@link VarInsnNode} instructions.
+ */
+ public static final int VAR_INSN = 2;
+
+ /**
+ * The type of {@link TypeInsnNode} instructions.
+ */
+ public static final int TYPE_INSN = 3;
+
+ /**
+ * The type of {@link FieldInsnNode} instructions.
+ */
+ public static final int FIELD_INSN = 4;
+
+ /**
+ * The type of {@link MethodInsnNode} instructions.
+ */
+ public static final int METHOD_INSN = 5;
+
+ /**
+ * The type of {@link InvokeDynamicInsnNode} instructions.
+ */
+ public static final int INVOKE_DYNAMIC_INSN = 6;
+
+ /**
+ * The type of {@link JumpInsnNode} instructions.
+ */
+ public static final int JUMP_INSN = 7;
+
+ /**
+ * The type of {@link LabelNode} "instructions".
+ */
+ public static final int LABEL = 8;
+
+ /**
+ * The type of {@link LdcInsnNode} instructions.
+ */
+ public static final int LDC_INSN = 9;
+
+ /**
+ * The type of {@link IincInsnNode} instructions.
+ */
+ public static final int IINC_INSN = 10;
+
+ /**
+ * The type of {@link TableSwitchInsnNode} instructions.
+ */
+ public static final int TABLESWITCH_INSN = 11;
+
+ /**
+ * The type of {@link LookupSwitchInsnNode} instructions.
+ */
+ public static final int LOOKUPSWITCH_INSN = 12;
+
+ /**
+ * The type of {@link MultiANewArrayInsnNode} instructions.
+ */
+ public static final int MULTIANEWARRAY_INSN = 13;
+
+ /**
+ * The type of {@link FrameNode} "instructions".
+ */
+ public static final int FRAME = 14;
+
+ /**
+ * The type of {@link LineNumberNode} "instructions".
+ */
+ public static final int LINE = 15;
+
+ /**
+ * The opcode of this instruction.
+ */
+ protected int opcode;
+
+ /**
+ * Previous instruction in the list to which this instruction belongs.
+ */
+ AbstractInsnNode prev;
+
+ /**
+ * Next instruction in the list to which this instruction belongs.
+ */
+ AbstractInsnNode next;
+
+ /**
+ * Index of this instruction in the list to which it belongs. The value of
+ * this field is correct only when {@link InsnList#cache} is not null. A
+ * value of -1 indicates that this instruction does not belong to any
+ * {@link InsnList}.
+ */
+ int index;
+
+ /**
+ * Constructs a new {@link AbstractInsnNode}.
+ *
+ * @param opcode the opcode of the instruction to be constructed.
+ */
+ protected AbstractInsnNode(final int opcode) {
+ this.opcode = opcode;
+ this.index = -1;
+ }
+
+ /**
+ * Returns the opcode of this instruction.
+ *
+ * @return the opcode of this instruction.
+ */
+ public int getOpcode() {
+ return opcode;
+ }
+
+ /**
+ * Returns the type of this instruction.
+ *
+ * @return the type of this instruction, i.e. one the constants defined in
+ * this class.
+ */
+ public abstract int getType();
+
+ /**
+ * Returns the previous instruction in the list to which this instruction
+ * belongs, if any.
+ *
+ * @return the previous instruction in the list to which this instruction
+ * belongs, if any. May be <tt>null</tt>.
+ */
+ public AbstractInsnNode getPrevious() {
+ return prev;
+ }
+
+ /**
+ * Returns the next instruction in the list to which this instruction
+ * belongs, if any.
+ *
+ * @return the next instruction in the list to which this instruction
+ * belongs, if any. May be <tt>null</tt>.
+ */
+ public AbstractInsnNode getNext() {
+ return next;
+ }
+
+ /**
+ * Makes the given code visitor visit this instruction.
+ *
+ * @param cv a code visitor.
+ */
+ public abstract void accept(final MethodVisitor cv);
+
+ /**
+ * Returns a copy of this instruction.
+ *
+ * @param labels a map from LabelNodes to cloned LabelNodes.
+ * @return a copy of this instruction. The returned instruction does not
+ * belong to any {@link InsnList}.
+ */
+ public abstract AbstractInsnNode clone(final Map<LabelNode, LabelNode> labels);
+
+ /**
+ * Returns the clone of the given label.
+ *
+ * @param label a label.
+ * @param map a map from LabelNodes to cloned LabelNodes.
+ * @return the clone of the given label.
+ */
+ static LabelNode clone(final LabelNode label, final Map<LabelNode, LabelNode> map) {
+ return map.get(label);
+ }
+
+ /**
+ * Returns the clones of the given labels.
+ *
+ * @param labels a list of labels.
+ * @param map a map from LabelNodes to cloned LabelNodes.
+ * @return the clones of the given labels.
+ */
+ static LabelNode[] clone(final List<LabelNode> labels, final Map<LabelNode, LabelNode> map) {
+ LabelNode[] clones = new LabelNode[labels.size()];
+ for (int i = 0; i < clones.length; ++i) {
+ clones[i] = map.get(labels.get(i));
+ }
+ return clones;
+ }
+}
diff --git a/src/asm/scala/tools/asm/tree/AnnotationNode.java b/src/asm/scala/tools/asm/tree/AnnotationNode.java
new file mode 100644
index 0000000..9f13255
--- /dev/null
+++ b/src/asm/scala/tools/asm/tree/AnnotationNode.java
@@ -0,0 +1,224 @@
+/***
+ * ASM: a very small and fast Java bytecode manipulation framework
+ * Copyright (c) 2000-2011 INRIA, France Telecom
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ * 3. Neither the name of the copyright holders nor the names of its
+ * contributors may be used to endorse or promote products derived from
+ * this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+ * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
+ * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+ * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+ * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
+ * THE POSSIBILITY OF SUCH DAMAGE.
+ */
+package scala.tools.asm.tree;
+
+import java.util.ArrayList;
+import java.util.List;
+
+import scala.tools.asm.AnnotationVisitor;
+import scala.tools.asm.Opcodes;
+
+/**
+ * A node that represents an annotationn.
+ *
+ * @author Eric Bruneton
+ */
+public class AnnotationNode extends AnnotationVisitor {
+
+ /**
+ * The class descriptor of the annotation class.
+ */
+ public String desc;
+
+ /**
+ * The name value pairs of this annotation. Each name value pair is stored
+ * as two consecutive elements in the list. The name is a {@link String},
+ * and the value may be a {@link Byte}, {@link Boolean}, {@link Character},
+ * {@link Short}, {@link Integer}, {@link Long}, {@link Float},
+ * {@link Double}, {@link String} or {@link org.objectweb.asm.Type}, or an
+ * two elements String array (for enumeration values), a
+ * {@link AnnotationNode}, or a {@link List} of values of one of the
+ * preceding types. The list may be <tt>null</tt> if there is no name
+ * value pair.
+ */
+ public List<Object> values;
+
+ /**
+ * Constructs a new {@link AnnotationNode}. <i>Subclasses must not use this
+ * constructor</i>. Instead, they must use the
+ * {@link #AnnotationNode(int, String)} version.
+ *
+ * @param desc the class descriptor of the annotation class.
+ */
+ public AnnotationNode(final String desc) {
+ this(Opcodes.ASM4, desc);
+ }
+
+ /**
+ * Constructs a new {@link AnnotationNode}.
+ *
+ * @param api the ASM API version implemented by this visitor. Must be one
+ * of {@link Opcodes#ASM4}.
+ * @param desc the class descriptor of the annotation class.
+ */
+ public AnnotationNode(final int api, final String desc) {
+ super(api);
+ this.desc = desc;
+ }
+
+ /**
+ * Constructs a new {@link AnnotationNode} to visit an array value.
+ *
+ * @param values where the visited values must be stored.
+ */
+ AnnotationNode(final List<Object> values) {
+ super(Opcodes.ASM4);
+ this.values = values;
+ }
+
+ // ------------------------------------------------------------------------
+ // Implementation of the AnnotationVisitor abstract class
+ // ------------------------------------------------------------------------
+
+ @Override
+ public void visit(final String name, final Object value) {
+ if (values == null) {
+ values = new ArrayList<Object>(this.desc != null ? 2 : 1);
+ }
+ if (this.desc != null) {
+ values.add(name);
+ }
+ values.add(value);
+ }
+
+ @Override
+ public void visitEnum(
+ final String name,
+ final String desc,
+ final String value)
+ {
+ if (values == null) {
+ values = new ArrayList<Object>(this.desc != null ? 2 : 1);
+ }
+ if (this.desc != null) {
+ values.add(name);
+ }
+ values.add(new String[] { desc, value });
+ }
+
+ @Override
+ public AnnotationVisitor visitAnnotation(
+ final String name,
+ final String desc)
+ {
+ if (values == null) {
+ values = new ArrayList<Object>(this.desc != null ? 2 : 1);
+ }
+ if (this.desc != null) {
+ values.add(name);
+ }
+ AnnotationNode annotation = new AnnotationNode(desc);
+ values.add(annotation);
+ return annotation;
+ }
+
+ @Override
+ public AnnotationVisitor visitArray(final String name) {
+ if (values == null) {
+ values = new ArrayList<Object>(this.desc != null ? 2 : 1);
+ }
+ if (this.desc != null) {
+ values.add(name);
+ }
+ List<Object> array = new ArrayList<Object>();
+ values.add(array);
+ return new AnnotationNode(array);
+ }
+
+ @Override
+ public void visitEnd() {
+ }
+
+ // ------------------------------------------------------------------------
+ // Accept methods
+ // ------------------------------------------------------------------------
+
+ /**
+ * Checks that this annotation node is compatible with the given ASM API
+ * version. This methods checks that this node, and all its nodes
+ * recursively, do not contain elements that were introduced in more recent
+ * versions of the ASM API than the given version.
+ *
+ * @param api an ASM API version. Must be one of {@link Opcodes#ASM4}.
+ */
+ public void check(final int api) {
+ // nothing to do
+ }
+
+ /**
+ * Makes the given visitor visit this annotation.
+ *
+ * @param av an annotation visitor. Maybe <tt>null</tt>.
+ */
+ public void accept(final AnnotationVisitor av) {
+ if (av != null) {
+ if (values != null) {
+ for (int i = 0; i < values.size(); i += 2) {
+ String name = (String) values.get(i);
+ Object value = values.get(i + 1);
+ accept(av, name, value);
+ }
+ }
+ av.visitEnd();
+ }
+ }
+
+ /**
+ * Makes the given visitor visit a given annotation value.
+ *
+ * @param av an annotation visitor. Maybe <tt>null</tt>.
+ * @param name the value name.
+ * @param value the actual value.
+ */
+ static void accept(
+ final AnnotationVisitor av,
+ final String name,
+ final Object value)
+ {
+ if (av != null) {
+ if (value instanceof String[]) {
+ String[] typeconst = (String[]) value;
+ av.visitEnum(name, typeconst[0], typeconst[1]);
+ } else if (value instanceof AnnotationNode) {
+ AnnotationNode an = (AnnotationNode) value;
+ an.accept(av.visitAnnotation(name, an.desc));
+ } else if (value instanceof List) {
+ AnnotationVisitor v = av.visitArray(name);
+ List<?> array = (List<?>) value;
+ for (int j = 0; j < array.size(); ++j) {
+ accept(v, null, array.get(j));
+ }
+ v.visitEnd();
+ } else {
+ av.visit(name, value);
+ }
+ }
+ }
+}
diff --git a/src/asm/scala/tools/asm/tree/ClassNode.java b/src/asm/scala/tools/asm/tree/ClassNode.java
new file mode 100644
index 0000000..64effae
--- /dev/null
+++ b/src/asm/scala/tools/asm/tree/ClassNode.java
@@ -0,0 +1,371 @@
+/***
+ * ASM: a very small and fast Java bytecode manipulation framework
+ * Copyright (c) 2000-2011 INRIA, France Telecom
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ * 3. Neither the name of the copyright holders nor the names of its
+ * contributors may be used to endorse or promote products derived from
+ * this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+ * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
+ * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+ * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+ * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
+ * THE POSSIBILITY OF SUCH DAMAGE.
+ */
+package scala.tools.asm.tree;
+
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.List;
+
+import scala.tools.asm.AnnotationVisitor;
+import scala.tools.asm.Attribute;
+import scala.tools.asm.ClassVisitor;
+import scala.tools.asm.FieldVisitor;
+import scala.tools.asm.MethodVisitor;
+import scala.tools.asm.Opcodes;
+
+/**
+ * A node that represents a class.
+ *
+ * @author Eric Bruneton
+ */
+public class ClassNode extends ClassVisitor {
+
+ /**
+ * The class version.
+ */
+ public int version;
+
+ /**
+ * The class's access flags (see {@link org.objectweb.asm.Opcodes}). This
+ * field also indicates if the class is deprecated.
+ */
+ public int access;
+
+ /**
+ * The internal name of the class (see
+ * {@link org.objectweb.asm.Type#getInternalName() getInternalName}).
+ */
+ public String name;
+
+ /**
+ * The signature of the class. Mayt be <tt>null</tt>.
+ */
+ public String signature;
+
+ /**
+ * The internal of name of the super class (see
+ * {@link org.objectweb.asm.Type#getInternalName() getInternalName}). For
+ * interfaces, the super class is {@link Object}. May be <tt>null</tt>,
+ * but only for the {@link Object} class.
+ */
+ public String superName;
+
+ /**
+ * The internal names of the class's interfaces (see
+ * {@link org.objectweb.asm.Type#getInternalName() getInternalName}). This
+ * list is a list of {@link String} objects.
+ */
+ public List<String> interfaces;
+
+ /**
+ * The name of the source file from which this class was compiled. May be
+ * <tt>null</tt>.
+ */
+ public String sourceFile;
+
+ /**
+ * Debug information to compute the correspondance between source and
+ * compiled elements of the class. May be <tt>null</tt>.
+ */
+ public String sourceDebug;
+
+ /**
+ * The internal name of the enclosing class of the class. May be
+ * <tt>null</tt>.
+ */
+ public String outerClass;
+
+ /**
+ * The name of the method that contains the class, or <tt>null</tt> if the
+ * class is not enclosed in a method.
+ */
+ public String outerMethod;
+
+ /**
+ * The descriptor of the method that contains the class, or <tt>null</tt>
+ * if the class is not enclosed in a method.
+ */
+ public String outerMethodDesc;
+
+ /**
+ * The runtime visible annotations of this class. This list is a list of
+ * {@link AnnotationNode} objects. May be <tt>null</tt>.
+ *
+ * @associates org.objectweb.asm.tree.AnnotationNode
+ * @label visible
+ */
+ public List<AnnotationNode> visibleAnnotations;
+
+ /**
+ * The runtime invisible annotations of this class. This list is a list of
+ * {@link AnnotationNode} objects. May be <tt>null</tt>.
+ *
+ * @associates org.objectweb.asm.tree.AnnotationNode
+ * @label invisible
+ */
+ public List<AnnotationNode> invisibleAnnotations;
+
+ /**
+ * The non standard attributes of this class. This list is a list of
+ * {@link Attribute} objects. May be <tt>null</tt>.
+ *
+ * @associates org.objectweb.asm.Attribute
+ */
+ public List<Attribute> attrs;
+
+ /**
+ * Informations about the inner classes of this class. This list is a list
+ * of {@link InnerClassNode} objects.
+ *
+ * @associates org.objectweb.asm.tree.InnerClassNode
+ */
+ public List<InnerClassNode> innerClasses;
+
+ /**
+ * The fields of this class. This list is a list of {@link FieldNode}
+ * objects.
+ *
+ * @associates org.objectweb.asm.tree.FieldNode
+ */
+ public List<FieldNode> fields;
+
+ /**
+ * The methods of this class. This list is a list of {@link MethodNode}
+ * objects.
+ *
+ * @associates org.objectweb.asm.tree.MethodNode
+ */
+ public List<MethodNode> methods;
+
+ /**
+ * Constructs a new {@link ClassNode}. <i>Subclasses must not use this
+ * constructor</i>. Instead, they must use the {@link #ClassNode(int)}
+ * version.
+ */
+ public ClassNode() {
+ this(Opcodes.ASM4);
+ }
+
+ /**
+ * Constructs a new {@link ClassNode}.
+ *
+ * @param api the ASM API version implemented by this visitor. Must be one
+ * of {@link Opcodes#ASM4}.
+ */
+ public ClassNode(final int api) {
+ super(api);
+ this.interfaces = new ArrayList<String>();
+ this.innerClasses = new ArrayList<InnerClassNode>();
+ this.fields = new ArrayList<FieldNode>();
+ this.methods = new ArrayList<MethodNode>();
+ }
+
+ // ------------------------------------------------------------------------
+ // Implementation of the ClassVisitor abstract class
+ // ------------------------------------------------------------------------
+
+ @Override
+ public void visit(
+ final int version,
+ final int access,
+ final String name,
+ final String signature,
+ final String superName,
+ final String[] interfaces)
+ {
+ this.version = version;
+ this.access = access;
+ this.name = name;
+ this.signature = signature;
+ this.superName = superName;
+ if (interfaces != null) {
+ this.interfaces.addAll(Arrays.asList(interfaces));
+ }
+ }
+
+ @Override
+ public void visitSource(final String file, final String debug) {
+ sourceFile = file;
+ sourceDebug = debug;
+ }
+
+ @Override
+ public void visitOuterClass(
+ final String owner,
+ final String name,
+ final String desc)
+ {
+ outerClass = owner;
+ outerMethod = name;
+ outerMethodDesc = desc;
+ }
+
+ @Override
+ public AnnotationVisitor visitAnnotation(
+ final String desc,
+ final boolean visible)
+ {
+ AnnotationNode an = new AnnotationNode(desc);
+ if (visible) {
+ if (visibleAnnotations == null) {
+ visibleAnnotations = new ArrayList<AnnotationNode>(1);
+ }
+ visibleAnnotations.add(an);
+ } else {
+ if (invisibleAnnotations == null) {
+ invisibleAnnotations = new ArrayList<AnnotationNode>(1);
+ }
+ invisibleAnnotations.add(an);
+ }
+ return an;
+ }
+
+ @Override
+ public void visitAttribute(final Attribute attr) {
+ if (attrs == null) {
+ attrs = new ArrayList<Attribute>(1);
+ }
+ attrs.add(attr);
+ }
+
+ @Override
+ public void visitInnerClass(
+ final String name,
+ final String outerName,
+ final String innerName,
+ final int access)
+ {
+ InnerClassNode icn = new InnerClassNode(name,
+ outerName,
+ innerName,
+ access);
+ innerClasses.add(icn);
+ }
+
+ @Override
+ public FieldVisitor visitField(
+ final int access,
+ final String name,
+ final String desc,
+ final String signature,
+ final Object value)
+ {
+ FieldNode fn = new FieldNode(access, name, desc, signature, value);
+ fields.add(fn);
+ return fn;
+ }
+
+ @Override
+ public MethodVisitor visitMethod(
+ final int access,
+ final String name,
+ final String desc,
+ final String signature,
+ final String[] exceptions)
+ {
+ MethodNode mn = new MethodNode(access,
+ name,
+ desc,
+ signature,
+ exceptions);
+ methods.add(mn);
+ return mn;
+ }
+
+ @Override
+ public void visitEnd() {
+ }
+
+ // ------------------------------------------------------------------------
+ // Accept method
+ // ------------------------------------------------------------------------
+
+ /**
+ * Checks that this class node is compatible with the given ASM API version.
+ * This methods checks that this node, and all its nodes recursively, do not
+ * contain elements that were introduced in more recent versions of the ASM
+ * API than the given version.
+ *
+ * @param api an ASM API version. Must be one of {@link Opcodes#ASM4}.
+ */
+ public void check(final int api) {
+ // nothing to do
+ }
+
+ /**
+ * Makes the given class visitor visit this class.
+ *
+ * @param cv a class visitor.
+ */
+ public void accept(final ClassVisitor cv) {
+ // visits header
+ String[] interfaces = new String[this.interfaces.size()];
+ this.interfaces.toArray(interfaces);
+ cv.visit(version, access, name, signature, superName, interfaces);
+ // visits source
+ if (sourceFile != null || sourceDebug != null) {
+ cv.visitSource(sourceFile, sourceDebug);
+ }
+ // visits outer class
+ if (outerClass != null) {
+ cv.visitOuterClass(outerClass, outerMethod, outerMethodDesc);
+ }
+ // visits attributes
+ int i, n;
+ n = visibleAnnotations == null ? 0 : visibleAnnotations.size();
+ for (i = 0; i < n; ++i) {
+ AnnotationNode an = visibleAnnotations.get(i);
+ an.accept(cv.visitAnnotation(an.desc, true));
+ }
+ n = invisibleAnnotations == null ? 0 : invisibleAnnotations.size();
+ for (i = 0; i < n; ++i) {
+ AnnotationNode an = invisibleAnnotations.get(i);
+ an.accept(cv.visitAnnotation(an.desc, false));
+ }
+ n = attrs == null ? 0 : attrs.size();
+ for (i = 0; i < n; ++i) {
+ cv.visitAttribute(attrs.get(i));
+ }
+ // visits inner classes
+ for (i = 0; i < innerClasses.size(); ++i) {
+ innerClasses.get(i).accept(cv);
+ }
+ // visits fields
+ for (i = 0; i < fields.size(); ++i) {
+ fields.get(i).accept(cv);
+ }
+ // visits methods
+ for (i = 0; i < methods.size(); ++i) {
+ methods.get(i).accept(cv);
+ }
+ // visits end
+ cv.visitEnd();
+ }
+}
diff --git a/src/asm/scala/tools/asm/tree/FieldInsnNode.java b/src/asm/scala/tools/asm/tree/FieldInsnNode.java
new file mode 100644
index 0000000..6b7a6a1
--- /dev/null
+++ b/src/asm/scala/tools/asm/tree/FieldInsnNode.java
@@ -0,0 +1,106 @@
+/***
+ * ASM: a very small and fast Java bytecode manipulation framework
+ * Copyright (c) 2000-2011 INRIA, France Telecom
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ * 3. Neither the name of the copyright holders nor the names of its
+ * contributors may be used to endorse or promote products derived from
+ * this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+ * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
+ * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+ * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+ * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
+ * THE POSSIBILITY OF SUCH DAMAGE.
+ */
+package scala.tools.asm.tree;
+
+import java.util.Map;
+
+import scala.tools.asm.MethodVisitor;
+
+/**
+ * A node that represents a field instruction. A field instruction is an
+ * instruction that loads or stores the value of a field of an object.
+ *
+ * @author Eric Bruneton
+ */
+public class FieldInsnNode extends AbstractInsnNode {
+
+ /**
+ * The internal name of the field's owner class (see
+ * {@link org.objectweb.asm.Type#getInternalName() getInternalName}).
+ */
+ public String owner;
+
+ /**
+ * The field's name.
+ */
+ public String name;
+
+ /**
+ * The field's descriptor (see {@link org.objectweb.asm.Type}).
+ */
+ public String desc;
+
+ /**
+ * Constructs a new {@link FieldInsnNode}.
+ *
+ * @param opcode the opcode of the type instruction to be constructed. This
+ * opcode must be GETSTATIC, PUTSTATIC, GETFIELD or PUTFIELD.
+ * @param owner the internal name of the field's owner class (see
+ * {@link org.objectweb.asm.Type#getInternalName() getInternalName}).
+ * @param name the field's name.
+ * @param desc the field's descriptor (see {@link org.objectweb.asm.Type}).
+ */
+ public FieldInsnNode(
+ final int opcode,
+ final String owner,
+ final String name,
+ final String desc)
+ {
+ super(opcode);
+ this.owner = owner;
+ this.name = name;
+ this.desc = desc;
+ }
+
+ /**
+ * Sets the opcode of this instruction.
+ *
+ * @param opcode the new instruction opcode. This opcode must be GETSTATIC,
+ * PUTSTATIC, GETFIELD or PUTFIELD.
+ */
+ public void setOpcode(final int opcode) {
+ this.opcode = opcode;
+ }
+
+ @Override
+ public int getType() {
+ return FIELD_INSN;
+ }
+
+ @Override
+ public void accept(final MethodVisitor cv) {
+ cv.visitFieldInsn(opcode, owner, name, desc);
+ }
+
+ @Override
+ public AbstractInsnNode clone(final Map<LabelNode, LabelNode> labels) {
+ return new FieldInsnNode(opcode, owner, name, desc);
+ }
+}
diff --git a/src/asm/scala/tools/asm/tree/FieldNode.java b/src/asm/scala/tools/asm/tree/FieldNode.java
new file mode 100644
index 0000000..9a1e170
--- /dev/null
+++ b/src/asm/scala/tools/asm/tree/FieldNode.java
@@ -0,0 +1,243 @@
+/***
+ * ASM: a very small and fast Java bytecode manipulation framework
+ * Copyright (c) 2000-2011 INRIA, France Telecom
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ * 3. Neither the name of the copyright holders nor the names of its
+ * contributors may be used to endorse or promote products derived from
+ * this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+ * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
+ * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+ * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+ * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
+ * THE POSSIBILITY OF SUCH DAMAGE.
+ */
+package scala.tools.asm.tree;
+
+import java.util.ArrayList;
+import java.util.List;
+
+import scala.tools.asm.AnnotationVisitor;
+import scala.tools.asm.Attribute;
+import scala.tools.asm.ClassVisitor;
+import scala.tools.asm.FieldVisitor;
+import scala.tools.asm.Opcodes;
+
+/**
+ * A node that represents a field.
+ *
+ * @author Eric Bruneton
+ */
+public class FieldNode extends FieldVisitor {
+
+ /**
+ * The field's access flags (see {@link org.objectweb.asm.Opcodes}). This
+ * field also indicates if the field is synthetic and/or deprecated.
+ */
+ public int access;
+
+ /**
+ * The field's name.
+ */
+ public String name;
+
+ /**
+ * The field's descriptor (see {@link org.objectweb.asm.Type}).
+ */
+ public String desc;
+
+ /**
+ * The field's signature. May be <tt>null</tt>.
+ */
+ public String signature;
+
+ /**
+ * The field's initial value. This field, which may be <tt>null</tt> if
+ * the field does not have an initial value, must be an {@link Integer}, a
+ * {@link Float}, a {@link Long}, a {@link Double} or a {@link String}.
+ */
+ public Object value;
+
+ /**
+ * The runtime visible annotations of this field. This list is a list of
+ * {@link AnnotationNode} objects. May be <tt>null</tt>.
+ *
+ * @associates org.objectweb.asm.tree.AnnotationNode
+ * @label visible
+ */
+ public List<AnnotationNode> visibleAnnotations;
+
+ /**
+ * The runtime invisible annotations of this field. This list is a list of
+ * {@link AnnotationNode} objects. May be <tt>null</tt>.
+ *
+ * @associates org.objectweb.asm.tree.AnnotationNode
+ * @label invisible
+ */
+ public List<AnnotationNode> invisibleAnnotations;
+
+ /**
+ * The non standard attributes of this field. This list is a list of
+ * {@link Attribute} objects. May be <tt>null</tt>.
+ *
+ * @associates org.objectweb.asm.Attribute
+ */
+ public List<Attribute> attrs;
+
+ /**
+ * Constructs a new {@link FieldNode}. <i>Subclasses must not use this
+ * constructor</i>. Instead, they must use the
+ * {@link #FieldNode(int, int, String, String, String, Object)} version.
+ *
+ * @param access the field's access flags (see
+ * {@link org.objectweb.asm.Opcodes}). This parameter also indicates
+ * if the field is synthetic and/or deprecated.
+ * @param name the field's name.
+ * @param desc the field's descriptor (see {@link org.objectweb.asm.Type
+ * Type}).
+ * @param signature the field's signature.
+ * @param value the field's initial value. This parameter, which may be
+ * <tt>null</tt> if the field does not have an initial value, must be
+ * an {@link Integer}, a {@link Float}, a {@link Long}, a
+ * {@link Double} or a {@link String}.
+ */
+ public FieldNode(
+ final int access,
+ final String name,
+ final String desc,
+ final String signature,
+ final Object value)
+ {
+ this(Opcodes.ASM4, access, name, desc, signature, value);
+ }
+
+ /**
+ * Constructs a new {@link FieldNode}. <i>Subclasses must not use this
+ * constructor</i>. Instead, they must use the
+ * {@link #FieldNode(int, int, String, String, String, Object)} version.
+ *
+ * @param api the ASM API version implemented by this visitor. Must be one
+ * of {@link Opcodes#ASM4}.
+ * @param access the field's access flags (see
+ * {@link org.objectweb.asm.Opcodes}). This parameter also indicates
+ * if the field is synthetic and/or deprecated.
+ * @param name the field's name.
+ * @param desc the field's descriptor (see {@link org.objectweb.asm.Type
+ * Type}).
+ * @param signature the field's signature.
+ * @param value the field's initial value. This parameter, which may be
+ * <tt>null</tt> if the field does not have an initial value, must be
+ * an {@link Integer}, a {@link Float}, a {@link Long}, a
+ * {@link Double} or a {@link String}.
+ */
+ public FieldNode(
+ final int api,
+ final int access,
+ final String name,
+ final String desc,
+ final String signature,
+ final Object value)
+ {
+ super(api);
+ this.access = access;
+ this.name = name;
+ this.desc = desc;
+ this.signature = signature;
+ this.value = value;
+ }
+
+ // ------------------------------------------------------------------------
+ // Implementation of the FieldVisitor abstract class
+ // ------------------------------------------------------------------------
+
+ @Override
+ public AnnotationVisitor visitAnnotation(
+ final String desc,
+ final boolean visible)
+ {
+ AnnotationNode an = new AnnotationNode(desc);
+ if (visible) {
+ if (visibleAnnotations == null) {
+ visibleAnnotations = new ArrayList<AnnotationNode>(1);
+ }
+ visibleAnnotations.add(an);
+ } else {
+ if (invisibleAnnotations == null) {
+ invisibleAnnotations = new ArrayList<AnnotationNode>(1);
+ }
+ invisibleAnnotations.add(an);
+ }
+ return an;
+ }
+
+ @Override
+ public void visitAttribute(final Attribute attr) {
+ if (attrs == null) {
+ attrs = new ArrayList<Attribute>(1);
+ }
+ attrs.add(attr);
+ }
+
+ @Override
+ public void visitEnd() {
+ }
+
+ // ------------------------------------------------------------------------
+ // Accept methods
+ // ------------------------------------------------------------------------
+
+ /**
+ * Checks that this field node is compatible with the given ASM API version.
+ * This methods checks that this node, and all its nodes recursively, do not
+ * contain elements that were introduced in more recent versions of the ASM
+ * API than the given version.
+ *
+ * @param api an ASM API version. Must be one of {@link Opcodes#ASM4}.
+ */
+ public void check(final int api) {
+ // nothing to do
+ }
+
+ /**
+ * Makes the given class visitor visit this field.
+ *
+ * @param cv a class visitor.
+ */
+ public void accept(final ClassVisitor cv) {
+ FieldVisitor fv = cv.visitField(access, name, desc, signature, value);
+ if (fv == null) {
+ return;
+ }
+ int i, n;
+ n = visibleAnnotations == null ? 0 : visibleAnnotations.size();
+ for (i = 0; i < n; ++i) {
+ AnnotationNode an = visibleAnnotations.get(i);
+ an.accept(fv.visitAnnotation(an.desc, true));
+ }
+ n = invisibleAnnotations == null ? 0 : invisibleAnnotations.size();
+ for (i = 0; i < n; ++i) {
+ AnnotationNode an = invisibleAnnotations.get(i);
+ an.accept(fv.visitAnnotation(an.desc, false));
+ }
+ n = attrs == null ? 0 : attrs.size();
+ for (i = 0; i < n; ++i) {
+ fv.visitAttribute(attrs.get(i));
+ }
+ fv.visitEnd();
+ }
+}
diff --git a/src/asm/scala/tools/asm/tree/FrameNode.java b/src/asm/scala/tools/asm/tree/FrameNode.java
new file mode 100644
index 0000000..66825de
--- /dev/null
+++ b/src/asm/scala/tools/asm/tree/FrameNode.java
@@ -0,0 +1,211 @@
+/***
+ * ASM: a very small and fast Java bytecode manipulation framework
+ * Copyright (c) 2000-2011 INRIA, France Telecom
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ * 3. Neither the name of the copyright holders nor the names of its
+ * contributors may be used to endorse or promote products derived from
+ * this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+ * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
+ * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+ * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+ * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
+ * THE POSSIBILITY OF SUCH DAMAGE.
+ */
+package scala.tools.asm.tree;
+
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.List;
+import java.util.Map;
+
+import scala.tools.asm.MethodVisitor;
+import scala.tools.asm.Opcodes;
+
+/**
+ * A node that represents a stack map frame. These nodes are pseudo instruction
+ * nodes in order to be inserted in an instruction list. In fact these nodes
+ * must(*) be inserted <i>just before</i> any instruction node <b>i</b> that
+ * follows an unconditionnal branch instruction such as GOTO or THROW, that is
+ * the target of a jump instruction, or that starts an exception handler block.
+ * The stack map frame types must describe the values of the local variables and
+ * of the operand stack elements <i>just before</i> <b>i</b> is executed. <br>
+ * <br> (*) this is mandatory only for classes whose version is greater than or
+ * equal to {@link Opcodes#V1_6 V1_6}.
+ *
+ * @author Eric Bruneton
+ */
+public class FrameNode extends AbstractInsnNode {
+
+ /**
+ * The type of this frame. Must be {@link Opcodes#F_NEW} for expanded
+ * frames, or {@link Opcodes#F_FULL}, {@link Opcodes#F_APPEND},
+ * {@link Opcodes#F_CHOP}, {@link Opcodes#F_SAME} or
+ * {@link Opcodes#F_APPEND}, {@link Opcodes#F_SAME1} for compressed frames.
+ */
+ public int type;
+
+ /**
+ * The types of the local variables of this stack map frame. Elements of
+ * this list can be Integer, String or LabelNode objects (for primitive,
+ * reference and uninitialized types respectively - see
+ * {@link MethodVisitor}).
+ */
+ public List<Object> local;
+
+ /**
+ * The types of the operand stack elements of this stack map frame. Elements
+ * of this list can be Integer, String or LabelNode objects (for primitive,
+ * reference and uninitialized types respectively - see
+ * {@link MethodVisitor}).
+ */
+ public List<Object> stack;
+
+ private FrameNode() {
+ super(-1);
+ }
+
+ /**
+ * Constructs a new {@link FrameNode}.
+ *
+ * @param type the type of this frame. Must be {@link Opcodes#F_NEW} for
+ * expanded frames, or {@link Opcodes#F_FULL},
+ * {@link Opcodes#F_APPEND}, {@link Opcodes#F_CHOP},
+ * {@link Opcodes#F_SAME} or {@link Opcodes#F_APPEND},
+ * {@link Opcodes#F_SAME1} for compressed frames.
+ * @param nLocal number of local variables of this stack map frame.
+ * @param local the types of the local variables of this stack map frame.
+ * Elements of this list can be Integer, String or LabelNode objects
+ * (for primitive, reference and uninitialized types respectively -
+ * see {@link MethodVisitor}).
+ * @param nStack number of operand stack elements of this stack map frame.
+ * @param stack the types of the operand stack elements of this stack map
+ * frame. Elements of this list can be Integer, String or LabelNode
+ * objects (for primitive, reference and uninitialized types
+ * respectively - see {@link MethodVisitor}).
+ */
+ public FrameNode(
+ final int type,
+ final int nLocal,
+ final Object[] local,
+ final int nStack,
+ final Object[] stack)
+ {
+ super(-1);
+ this.type = type;
+ switch (type) {
+ case Opcodes.F_NEW:
+ case Opcodes.F_FULL:
+ this.local = asList(nLocal, local);
+ this.stack = asList(nStack, stack);
+ break;
+ case Opcodes.F_APPEND:
+ this.local = asList(nLocal, local);
+ break;
+ case Opcodes.F_CHOP:
+ this.local = Arrays.asList(new Object[nLocal]);
+ break;
+ case Opcodes.F_SAME:
+ break;
+ case Opcodes.F_SAME1:
+ this.stack = asList(1, stack);
+ break;
+ }
+ }
+
+ @Override
+ public int getType() {
+ return FRAME;
+ }
+
+ /**
+ * Makes the given visitor visit this stack map frame.
+ *
+ * @param mv a method visitor.
+ */
+ @Override
+ public void accept(final MethodVisitor mv) {
+ switch (type) {
+ case Opcodes.F_NEW:
+ case Opcodes.F_FULL:
+ mv.visitFrame(type,
+ local.size(),
+ asArray(local),
+ stack.size(),
+ asArray(stack));
+ break;
+ case Opcodes.F_APPEND:
+ mv.visitFrame(type, local.size(), asArray(local), 0, null);
+ break;
+ case Opcodes.F_CHOP:
+ mv.visitFrame(type, local.size(), null, 0, null);
+ break;
+ case Opcodes.F_SAME:
+ mv.visitFrame(type, 0, null, 0, null);
+ break;
+ case Opcodes.F_SAME1:
+ mv.visitFrame(type, 0, null, 1, asArray(stack));
+ break;
+ }
+ }
+
+ @Override
+ public AbstractInsnNode clone(final Map<LabelNode, LabelNode> labels) {
+ FrameNode clone = new FrameNode();
+ clone.type = type;
+ if (local != null) {
+ clone.local = new ArrayList<Object>();
+ for (int i = 0; i < local.size(); ++i) {
+ Object l = local.get(i);
+ if (l instanceof LabelNode) {
+ l = labels.get(l);
+ }
+ clone.local.add(l);
+ }
+ }
+ if (stack != null) {
+ clone.stack = new ArrayList<Object>();
+ for (int i = 0; i < stack.size(); ++i) {
+ Object s = stack.get(i);
+ if (s instanceof LabelNode) {
+ s = labels.get(s);
+ }
+ clone.stack.add(s);
+ }
+ }
+ return clone;
+ }
+
+ // ------------------------------------------------------------------------
+
+ private static List<Object> asList(final int n, final Object[] o) {
+ return Arrays.asList(o).subList(0, n);
+ }
+
+ private static Object[] asArray(final List<Object> l) {
+ Object[] objs = new Object[l.size()];
+ for (int i = 0; i < objs.length; ++i) {
+ Object o = l.get(i);
+ if (o instanceof LabelNode) {
+ o = ((LabelNode) o).getLabel();
+ }
+ objs[i] = o;
+ }
+ return objs;
+ }
+}
diff --git a/src/asm/scala/tools/asm/tree/IincInsnNode.java b/src/asm/scala/tools/asm/tree/IincInsnNode.java
new file mode 100644
index 0000000..75ac408
--- /dev/null
+++ b/src/asm/scala/tools/asm/tree/IincInsnNode.java
@@ -0,0 +1,80 @@
+/***
+ * ASM: a very small and fast Java bytecode manipulation framework
+ * Copyright (c) 2000-2011 INRIA, France Telecom
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ * 3. Neither the name of the copyright holders nor the names of its
+ * contributors may be used to endorse or promote products derived from
+ * this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+ * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
+ * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+ * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+ * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
+ * THE POSSIBILITY OF SUCH DAMAGE.
+ */
+package scala.tools.asm.tree;
+
+import java.util.Map;
+
+import scala.tools.asm.MethodVisitor;
+import scala.tools.asm.Opcodes;
+
+/**
+ * A node that represents an IINC instruction.
+ *
+ * @author Eric Bruneton
+ */
+public class IincInsnNode extends AbstractInsnNode {
+
+ /**
+ * Index of the local variable to be incremented.
+ */
+ public int var;
+
+ /**
+ * Amount to increment the local variable by.
+ */
+ public int incr;
+
+ /**
+ * Constructs a new {@link IincInsnNode}.
+ *
+ * @param var index of the local variable to be incremented.
+ * @param incr increment amount to increment the local variable by.
+ */
+ public IincInsnNode(final int var, final int incr) {
+ super(Opcodes.IINC);
+ this.var = var;
+ this.incr = incr;
+ }
+
+ @Override
+ public int getType() {
+ return IINC_INSN;
+ }
+
+ @Override
+ public void accept(final MethodVisitor mv) {
+ mv.visitIincInsn(var, incr);
+ }
+
+ @Override
+ public AbstractInsnNode clone(final Map<LabelNode, LabelNode> labels) {
+ return new IincInsnNode(var, incr);
+ }
+}
\ No newline at end of file
diff --git a/src/asm/scala/tools/asm/tree/InnerClassNode.java b/src/asm/scala/tools/asm/tree/InnerClassNode.java
new file mode 100644
index 0000000..4579488
--- /dev/null
+++ b/src/asm/scala/tools/asm/tree/InnerClassNode.java
@@ -0,0 +1,101 @@
+/***
+ * ASM: a very small and fast Java bytecode manipulation framework
+ * Copyright (c) 2000-2011 INRIA, France Telecom
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ * 3. Neither the name of the copyright holders nor the names of its
+ * contributors may be used to endorse or promote products derived from
+ * this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+ * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
+ * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+ * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+ * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
+ * THE POSSIBILITY OF SUCH DAMAGE.
+ */
+package scala.tools.asm.tree;
+
+import scala.tools.asm.ClassVisitor;
+
+/**
+ * A node that represents an inner class.
+ *
+ * @author Eric Bruneton
+ */
+public class InnerClassNode {
+
+ /**
+ * The internal name of an inner class (see
+ * {@link org.objectweb.asm.Type#getInternalName() getInternalName}).
+ */
+ public String name;
+
+ /**
+ * The internal name of the class to which the inner class belongs (see
+ * {@link org.objectweb.asm.Type#getInternalName() getInternalName}). May
+ * be <tt>null</tt>.
+ */
+ public String outerName;
+
+ /**
+ * The (simple) name of the inner class inside its enclosing class. May be
+ * <tt>null</tt> for anonymous inner classes.
+ */
+ public String innerName;
+
+ /**
+ * The access flags of the inner class as originally declared in the
+ * enclosing class.
+ */
+ public int access;
+
+ /**
+ * Constructs a new {@link InnerClassNode}.
+ *
+ * @param name the internal name of an inner class (see
+ * {@link org.objectweb.asm.Type#getInternalName() getInternalName}).
+ * @param outerName the internal name of the class to which the inner class
+ * belongs (see
+ * {@link org.objectweb.asm.Type#getInternalName() getInternalName}).
+ * May be <tt>null</tt>.
+ * @param innerName the (simple) name of the inner class inside its
+ * enclosing class. May be <tt>null</tt> for anonymous inner
+ * classes.
+ * @param access the access flags of the inner class as originally declared
+ * in the enclosing class.
+ */
+ public InnerClassNode(
+ final String name,
+ final String outerName,
+ final String innerName,
+ final int access)
+ {
+ this.name = name;
+ this.outerName = outerName;
+ this.innerName = innerName;
+ this.access = access;
+ }
+
+ /**
+ * Makes the given class visitor visit this inner class.
+ *
+ * @param cv a class visitor.
+ */
+ public void accept(final ClassVisitor cv) {
+ cv.visitInnerClass(name, outerName, innerName, access);
+ }
+}
diff --git a/src/asm/scala/tools/asm/tree/InsnList.java b/src/asm/scala/tools/asm/tree/InsnList.java
new file mode 100644
index 0000000..dedd3bb
--- /dev/null
+++ b/src/asm/scala/tools/asm/tree/InsnList.java
@@ -0,0 +1,578 @@
+/***
+ * ASM: a very small and fast Java bytecode manipulation framework
+ * Copyright (c) 2000-2011 INRIA, France Telecom
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ * 3. Neither the name of the copyright holders nor the names of its
+ * contributors may be used to endorse or promote products derived from
+ * this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+ * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
+ * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+ * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+ * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
+ * THE POSSIBILITY OF SUCH DAMAGE.
+ */
+package scala.tools.asm.tree;
+
+import java.util.ListIterator;
+import java.util.NoSuchElementException;
+
+import scala.tools.asm.MethodVisitor;
+
+/**
+ * A doubly linked list of {@link AbstractInsnNode} objects. <i>This
+ * implementation is not thread safe</i>.
+ */
+public class InsnList {
+
+ /**
+ * The number of instructions in this list.
+ */
+ private int size;
+
+ /**
+ * The first instruction in this list. May be <tt>null</tt>.
+ */
+ private AbstractInsnNode first;
+
+ /**
+ * The last instruction in this list. May be <tt>null</tt>.
+ */
+ private AbstractInsnNode last;
+
+ /**
+ * A cache of the instructions of this list. This cache is used to improve
+ * the performance of the {@link #get} method.
+ */
+ AbstractInsnNode[] cache;
+
+ /**
+ * Returns the number of instructions in this list.
+ *
+ * @return the number of instructions in this list.
+ */
+ public int size() {
+ return size;
+ }
+
+ /**
+ * Returns the first instruction in this list.
+ *
+ * @return the first instruction in this list, or <tt>null</tt> if the
+ * list is empty.
+ */
+ public AbstractInsnNode getFirst() {
+ return first;
+ }
+
+ /**
+ * Returns the last instruction in this list.
+ *
+ * @return the last instruction in this list, or <tt>null</tt> if the list
+ * is empty.
+ */
+ public AbstractInsnNode getLast() {
+ return last;
+ }
+
+ /**
+ * Returns the instruction whose index is given. This method builds a cache
+ * of the instructions in this list to avoid scanning the whole list each
+ * time it is called. Once the cache is built, this method run in constant
+ * time. This cache is invalidated by all the methods that modify the list.
+ *
+ * @param index the index of the instruction that must be returned.
+ * @return the instruction whose index is given.
+ * @throws IndexOutOfBoundsException if (index < 0 || index >= size()).
+ */
+ public AbstractInsnNode get(final int index) {
+ if (index < 0 || index >= size) {
+ throw new IndexOutOfBoundsException();
+ }
+ if (cache == null) {
+ cache = toArray();
+ }
+ return cache[index];
+ }
+
+ /**
+ * Returns <tt>true</tt> if the given instruction belongs to this list.
+ * This method always scans the instructions of this list until it finds the
+ * given instruction or reaches the end of the list.
+ *
+ * @param insn an instruction.
+ * @return <tt>true</tt> if the given instruction belongs to this list.
+ */
+ public boolean contains(final AbstractInsnNode insn) {
+ AbstractInsnNode i = first;
+ while (i != null && i != insn) {
+ i = i.next;
+ }
+ return i != null;
+ }
+
+ /**
+ * Returns the index of the given instruction in this list. This method
+ * builds a cache of the instruction indexes to avoid scanning the whole
+ * list each time it is called. Once the cache is built, this method run in
+ * constant time. The cache is invalidated by all the methods that modify
+ * the list.
+ *
+ * @param insn an instruction <i>of this list</i>.
+ * @return the index of the given instruction in this list. <i>The result of
+ * this method is undefined if the given instruction does not belong
+ * to this list</i>. Use {@link #contains contains} to test if an
+ * instruction belongs to an instruction list or not.
+ */
+ public int indexOf(final AbstractInsnNode insn) {
+ if (cache == null) {
+ cache = toArray();
+ }
+ return insn.index;
+ }
+
+ /**
+ * Makes the given visitor visit all of the instructions in this list.
+ *
+ * @param mv the method visitor that must visit the instructions.
+ */
+ public void accept(final MethodVisitor mv) {
+ AbstractInsnNode insn = first;
+ while (insn != null) {
+ insn.accept(mv);
+ insn = insn.next;
+ }
+ }
+
+ /**
+ * Returns an iterator over the instructions in this list.
+ *
+ * @return an iterator over the instructions in this list.
+ */
+ public ListIterator<AbstractInsnNode> iterator() {
+ return iterator(0);
+ }
+
+ /**
+ * Returns an iterator over the instructions in this list.
+ *
+ * @return an iterator over the instructions in this list.
+ */
+ @SuppressWarnings("unchecked")
+ public ListIterator<AbstractInsnNode> iterator(int index) {
+ return new InsnListIterator(index);
+ }
+
+ /**
+ * Returns an array containing all of the instructions in this list.
+ *
+ * @return an array containing all of the instructions in this list.
+ */
+ public AbstractInsnNode[] toArray() {
+ int i = 0;
+ AbstractInsnNode elem = first;
+ AbstractInsnNode[] insns = new AbstractInsnNode[size];
+ while (elem != null) {
+ insns[i] = elem;
+ elem.index = i++;
+ elem = elem.next;
+ }
+ return insns;
+ }
+
+ /**
+ * Replaces an instruction of this list with another instruction.
+ *
+ * @param location an instruction <i>of this list</i>.
+ * @param insn another instruction, <i>which must not belong to any
+ * {@link InsnList}</i>.
+ */
+ public void set(final AbstractInsnNode location, final AbstractInsnNode insn) {
+ AbstractInsnNode next = location.next;
+ insn.next = next;
+ if (next != null) {
+ next.prev = insn;
+ } else {
+ last = insn;
+ }
+ AbstractInsnNode prev = location.prev;
+ insn.prev = prev;
+ if (prev != null) {
+ prev.next = insn;
+ } else {
+ first = insn;
+ }
+ if (cache != null) {
+ int index = location.index;
+ cache[index] = insn;
+ insn.index = index;
+ } else {
+ insn.index = 0; // insn now belongs to an InsnList
+ }
+ location.index = -1; // i no longer belongs to an InsnList
+ location.prev = null;
+ location.next = null;
+ }
+
+ /**
+ * Adds the given instruction to the end of this list.
+ *
+ * @param insn an instruction, <i>which must not belong to any
+ * {@link InsnList}</i>.
+ */
+ public void add(final AbstractInsnNode insn) {
+ ++size;
+ if (last == null) {
+ first = insn;
+ last = insn;
+ } else {
+ last.next = insn;
+ insn.prev = last;
+ }
+ last = insn;
+ cache = null;
+ insn.index = 0; // insn now belongs to an InsnList
+ }
+
+ /**
+ * Adds the given instructions to the end of this list.
+ *
+ * @param insns an instruction list, which is cleared during the process.
+ * This list must be different from 'this'.
+ */
+ public void add(final InsnList insns) {
+ if (insns.size == 0) {
+ return;
+ }
+ size += insns.size;
+ if (last == null) {
+ first = insns.first;
+ last = insns.last;
+ } else {
+ AbstractInsnNode elem = insns.first;
+ last.next = elem;
+ elem.prev = last;
+ last = insns.last;
+ }
+ cache = null;
+ insns.removeAll(false);
+ }
+
+ /**
+ * Inserts the given instruction at the begining of this list.
+ *
+ * @param insn an instruction, <i>which must not belong to any
+ * {@link InsnList}</i>.
+ */
+ public void insert(final AbstractInsnNode insn) {
+ ++size;
+ if (first == null) {
+ first = insn;
+ last = insn;
+ } else {
+ first.prev = insn;
+ insn.next = first;
+ }
+ first = insn;
+ cache = null;
+ insn.index = 0; // insn now belongs to an InsnList
+ }
+
+ /**
+ * Inserts the given instructions at the begining of this list.
+ *
+ * @param insns an instruction list, which is cleared during the process.
+ * This list must be different from 'this'.
+ */
+ public void insert(final InsnList insns) {
+ if (insns.size == 0) {
+ return;
+ }
+ size += insns.size;
+ if (first == null) {
+ first = insns.first;
+ last = insns.last;
+ } else {
+ AbstractInsnNode elem = insns.last;
+ first.prev = elem;
+ elem.next = first;
+ first = insns.first;
+ }
+ cache = null;
+ insns.removeAll(false);
+ }
+
+ /**
+ * Inserts the given instruction after the specified instruction.
+ *
+ * @param location an instruction <i>of this list</i> after which insn must be
+ * inserted.
+ * @param insn the instruction to be inserted, <i>which must not belong to
+ * any {@link InsnList}</i>.
+ */
+ public void insert(final AbstractInsnNode location, final AbstractInsnNode insn) {
+ ++size;
+ AbstractInsnNode next = location.next;
+ if (next == null) {
+ last = insn;
+ } else {
+ next.prev = insn;
+ }
+ location.next = insn;
+ insn.next = next;
+ insn.prev = location;
+ cache = null;
+ insn.index = 0; // insn now belongs to an InsnList
+ }
+
+ /**
+ * Inserts the given instructions after the specified instruction.
+ *
+ * @param location an instruction <i>of this list</i> after which the
+ * instructions must be inserted.
+ * @param insns the instruction list to be inserted, which is cleared during
+ * the process. This list must be different from 'this'.
+ */
+ public void insert(final AbstractInsnNode location, final InsnList insns) {
+ if (insns.size == 0) {
+ return;
+ }
+ size += insns.size;
+ AbstractInsnNode ifirst = insns.first;
+ AbstractInsnNode ilast = insns.last;
+ AbstractInsnNode next = location.next;
+ if (next == null) {
+ last = ilast;
+ } else {
+ next.prev = ilast;
+ }
+ location.next = ifirst;
+ ilast.next = next;
+ ifirst.prev = location;
+ cache = null;
+ insns.removeAll(false);
+ }
+
+ /**
+ * Inserts the given instruction before the specified instruction.
+ *
+ * @param location an instruction <i>of this list</i> before which insn must be
+ * inserted.
+ * @param insn the instruction to be inserted, <i>which must not belong to
+ * any {@link InsnList}</i>.
+ */
+ public void insertBefore(final AbstractInsnNode location, final AbstractInsnNode insn) {
+ ++size;
+ AbstractInsnNode prev = location.prev;
+ if (prev == null) {
+ first = insn;
+ } else {
+ prev.next = insn;
+ }
+ location.prev = insn;
+ insn.next = location;
+ insn.prev = prev;
+ cache = null;
+ insn.index = 0; // insn now belongs to an InsnList
+ }
+
+ /**
+ * Inserts the given instructions before the specified instruction.
+ *
+ * @param location an instruction <i>of this list</i> before which the instructions
+ * must be inserted.
+ * @param insns the instruction list to be inserted, which is cleared during
+ * the process. This list must be different from 'this'.
+ */
+ public void insertBefore(final AbstractInsnNode location, final InsnList insns) {
+ if (insns.size == 0) {
+ return;
+ }
+ size += insns.size;
+ AbstractInsnNode ifirst = insns.first;
+ AbstractInsnNode ilast = insns.last;
+ AbstractInsnNode prev = location .prev;
+ if (prev == null) {
+ first = ifirst;
+ } else {
+ prev.next = ifirst;
+ }
+ location .prev = ilast;
+ ilast.next = location ;
+ ifirst.prev = prev;
+ cache = null;
+ insns.removeAll(false);
+ }
+
+
+
+ /**
+ * Removes the given instruction from this list.
+ *
+ * @param insn the instruction <i>of this list</i> that must be removed.
+ */
+ public void remove(final AbstractInsnNode insn) {
+ --size;
+ AbstractInsnNode next = insn.next;
+ AbstractInsnNode prev = insn.prev;
+ if (next == null) {
+ if (prev == null) {
+ first = null;
+ last = null;
+ } else {
+ prev.next = null;
+ last = prev;
+ }
+ } else {
+ if (prev == null) {
+ first = next;
+ next.prev = null;
+ } else {
+ prev.next = next;
+ next.prev = prev;
+ }
+ }
+ cache = null;
+ insn.index = -1; // insn no longer belongs to an InsnList
+ insn.prev = null;
+ insn.next = null;
+ }
+
+ /**
+ * Removes all of the instructions of this list.
+ *
+ * @param mark if the instructions must be marked as no longer belonging to
+ * any {@link InsnList}.
+ */
+ void removeAll(final boolean mark) {
+ if (mark) {
+ AbstractInsnNode insn = first;
+ while (insn != null) {
+ AbstractInsnNode next = insn.next;
+ insn.index = -1; // insn no longer belongs to an InsnList
+ insn.prev = null;
+ insn.next = null;
+ insn = next;
+ }
+ }
+ size = 0;
+ first = null;
+ last = null;
+ cache = null;
+ }
+
+ /**
+ * Removes all of the instructions of this list.
+ */
+ public void clear() {
+ removeAll(false);
+ }
+
+ /**
+ * Reset all labels in the instruction list. This method should be called
+ * before reusing same instructions list between several
+ * <code>ClassWriter</code>s.
+ */
+ public void resetLabels() {
+ AbstractInsnNode insn = first;
+ while (insn != null) {
+ if (insn instanceof LabelNode) {
+ ((LabelNode) insn).resetLabel();
+ }
+ insn = insn.next;
+ }
+ }
+
+ // this class is not generified because it will create bridges
+ private final class InsnListIterator implements ListIterator/*<AbstractInsnNode>*/ {
+
+ AbstractInsnNode next;
+
+ AbstractInsnNode prev;
+
+ InsnListIterator(int index) {
+ if(index==size()) {
+ next = null;
+ prev = getLast();
+ } else {
+ next = get(index);
+ prev = next.prev;
+ }
+ }
+
+ public boolean hasNext() {
+ return next != null;
+ }
+
+ public Object next() {
+ if (next == null) {
+ throw new NoSuchElementException();
+ }
+ AbstractInsnNode result = next;
+ prev = result;
+ next = result.next;
+ return result;
+ }
+
+ public void remove() {
+ InsnList.this.remove(prev);
+ prev = prev.prev;
+ }
+
+ public boolean hasPrevious() {
+ return prev != null;
+ }
+
+ public Object previous() {
+ AbstractInsnNode result = prev;
+ next = result;
+ prev = result.prev;
+ return result;
+ }
+
+ public int nextIndex() {
+ if (next == null) {
+ return size();
+ }
+ if (cache == null) {
+ cache = toArray();
+ }
+ return next.index;
+ }
+
+ public int previousIndex() {
+ if (prev == null) {
+ return -1;
+ }
+ if (cache == null) {
+ cache = toArray();
+ }
+ return prev.index;
+ }
+
+ public void add(Object o) {
+ InsnList.this.insertBefore(next, (AbstractInsnNode) o);
+ prev = (AbstractInsnNode) o;
+ }
+
+ public void set(Object o) {
+ InsnList.this.set(next.prev, (AbstractInsnNode) o);
+ prev = (AbstractInsnNode) o;
+ }
+ }
+}
diff --git a/src/asm/scala/tools/asm/tree/InsnNode.java b/src/asm/scala/tools/asm/tree/InsnNode.java
new file mode 100644
index 0000000..d4664d2
--- /dev/null
+++ b/src/asm/scala/tools/asm/tree/InsnNode.java
@@ -0,0 +1,84 @@
+/***
+ * ASM: a very small and fast Java bytecode manipulation framework
+ * Copyright (c) 2000-2011 INRIA, France Telecom
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ * 3. Neither the name of the copyright holders nor the names of its
+ * contributors may be used to endorse or promote products derived from
+ * this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+ * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
+ * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+ * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+ * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
+ * THE POSSIBILITY OF SUCH DAMAGE.
+ */
+package scala.tools.asm.tree;
+
+import java.util.Map;
+
+import scala.tools.asm.MethodVisitor;
+
+/**
+ * A node that represents a zero operand instruction.
+ *
+ * @author Eric Bruneton
+ */
+public class InsnNode extends AbstractInsnNode {
+
+ /**
+ * Constructs a new {@link InsnNode}.
+ *
+ * @param opcode the opcode of the instruction to be constructed. This
+ * opcode must be NOP, ACONST_NULL, ICONST_M1, ICONST_0, ICONST_1,
+ * ICONST_2, ICONST_3, ICONST_4, ICONST_5, LCONST_0, LCONST_1,
+ * FCONST_0, FCONST_1, FCONST_2, DCONST_0, DCONST_1, IALOAD, LALOAD,
+ * FALOAD, DALOAD, AALOAD, BALOAD, CALOAD, SALOAD, IASTORE, LASTORE,
+ * FASTORE, DASTORE, AASTORE, BASTORE, CASTORE, SASTORE, POP, POP2,
+ * DUP, DUP_X1, DUP_X2, DUP2, DUP2_X1, DUP2_X2, SWAP, IADD, LADD,
+ * FADD, DADD, ISUB, LSUB, FSUB, DSUB, IMUL, LMUL, FMUL, DMUL, IDIV,
+ * LDIV, FDIV, DDIV, IREM, LREM, FREM, DREM, INEG, LNEG, FNEG, DNEG,
+ * ISHL, LSHL, ISHR, LSHR, IUSHR, LUSHR, IAND, LAND, IOR, LOR, IXOR,
+ * LXOR, I2L, I2F, I2D, L2I, L2F, L2D, F2I, F2L, F2D, D2I, D2L, D2F,
+ * I2B, I2C, I2S, LCMP, FCMPL, FCMPG, DCMPL, DCMPG, IRETURN, LRETURN,
+ * FRETURN, DRETURN, ARETURN, RETURN, ARRAYLENGTH, ATHROW,
+ * MONITORENTER, or MONITOREXIT.
+ */
+ public InsnNode(final int opcode) {
+ super(opcode);
+ }
+
+ @Override
+ public int getType() {
+ return INSN;
+ }
+
+ /**
+ * Makes the given visitor visit this instruction.
+ *
+ * @param mv a method visitor.
+ */
+ @Override
+ public void accept(final MethodVisitor mv) {
+ mv.visitInsn(opcode);
+ }
+
+ @Override
+ public AbstractInsnNode clone(final Map<LabelNode, LabelNode> labels) {
+ return new InsnNode(opcode);
+ }
+}
diff --git a/src/asm/scala/tools/asm/tree/IntInsnNode.java b/src/asm/scala/tools/asm/tree/IntInsnNode.java
new file mode 100644
index 0000000..b61270c
--- /dev/null
+++ b/src/asm/scala/tools/asm/tree/IntInsnNode.java
@@ -0,0 +1,84 @@
+/***
+ * ASM: a very small and fast Java bytecode manipulation framework
+ * Copyright (c) 2000-2011 INRIA, France Telecom
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ * 3. Neither the name of the copyright holders nor the names of its
+ * contributors may be used to endorse or promote products derived from
+ * this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+ * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
+ * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+ * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+ * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
+ * THE POSSIBILITY OF SUCH DAMAGE.
+ */
+package scala.tools.asm.tree;
+
+import java.util.Map;
+
+import scala.tools.asm.MethodVisitor;
+
+/**
+ * A node that represents an instruction with a single int operand.
+ *
+ * @author Eric Bruneton
+ */
+public class IntInsnNode extends AbstractInsnNode {
+
+ /**
+ * The operand of this instruction.
+ */
+ public int operand;
+
+ /**
+ * Constructs a new {@link IntInsnNode}.
+ *
+ * @param opcode the opcode of the instruction to be constructed. This
+ * opcode must be BIPUSH, SIPUSH or NEWARRAY.
+ * @param operand the operand of the instruction to be constructed.
+ */
+ public IntInsnNode(final int opcode, final int operand) {
+ super(opcode);
+ this.operand = operand;
+ }
+
+ /**
+ * Sets the opcode of this instruction.
+ *
+ * @param opcode the new instruction opcode. This opcode must be BIPUSH,
+ * SIPUSH or NEWARRAY.
+ */
+ public void setOpcode(final int opcode) {
+ this.opcode = opcode;
+ }
+
+ @Override
+ public int getType() {
+ return INT_INSN;
+ }
+
+ @Override
+ public void accept(final MethodVisitor mv) {
+ mv.visitIntInsn(opcode, operand);
+ }
+
+ @Override
+ public AbstractInsnNode clone(final Map<LabelNode, LabelNode> labels) {
+ return new IntInsnNode(opcode, operand);
+ }
+}
diff --git a/src/asm/scala/tools/asm/tree/InvokeDynamicInsnNode.java b/src/asm/scala/tools/asm/tree/InvokeDynamicInsnNode.java
new file mode 100644
index 0000000..d993b5a
--- /dev/null
+++ b/src/asm/scala/tools/asm/tree/InvokeDynamicInsnNode.java
@@ -0,0 +1,100 @@
+/***
+ * ASM: a very small and fast Java bytecode manipulation framework
+ * Copyright (c) 2000-2011 INRIA, France Telecom
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ * 3. Neither the name of the copyright holders nor the names of its
+ * contributors may be used to endorse or promote products derived from
+ * this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+ * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
+ * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+ * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+ * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
+ * THE POSSIBILITY OF SUCH DAMAGE.
+ */
+package scala.tools.asm.tree;
+
+import java.util.Map;
+
+import scala.tools.asm.Handle;
+import scala.tools.asm.MethodVisitor;
+import scala.tools.asm.Opcodes;
+
+/**
+ * A node that represents an invokedynamic instruction.
+ *
+ * @author Remi Forax
+ */
+public class InvokeDynamicInsnNode extends AbstractInsnNode {
+
+ /**
+ * Invokedynamic name.
+ */
+ public String name;
+
+ /**
+ * Invokedynamic descriptor.
+ */
+ public String desc;
+
+ /**
+ * Bootstrap method
+ */
+ public Handle bsm;
+
+ /**
+ * Bootstrap constant arguments
+ */
+ public Object[] bsmArgs;
+
+ /**
+ * Constructs a new {@link InvokeDynamicInsnNode}.
+ *
+ * @param name invokedynamic name.
+ * @param desc invokedynamic descriptor (see {@link org.objectweb.asm.Type}).
+ * @param bsm the bootstrap method.
+ * @param bsmArgs the boostrap constant arguments.
+ */
+ public InvokeDynamicInsnNode(
+ final String name,
+ final String desc,
+ final Handle bsm,
+ final Object... bsmArgs)
+ {
+ super(Opcodes.INVOKEDYNAMIC);
+ this.name = name;
+ this.desc = desc;
+ this.bsm = bsm;
+ this.bsmArgs = bsmArgs;
+ }
+
+ @Override
+ public int getType() {
+ return INVOKE_DYNAMIC_INSN;
+ }
+
+ @Override
+ public void accept(final MethodVisitor mv) {
+ mv.visitInvokeDynamicInsn(name, desc, bsm, bsmArgs);
+ }
+
+ @Override
+ public AbstractInsnNode clone(final Map<LabelNode, LabelNode> labels) {
+ return new InvokeDynamicInsnNode(name, desc, bsm, bsmArgs);
+ }
+}
\ No newline at end of file
diff --git a/src/asm/scala/tools/asm/tree/JumpInsnNode.java b/src/asm/scala/tools/asm/tree/JumpInsnNode.java
new file mode 100644
index 0000000..339ebbd
--- /dev/null
+++ b/src/asm/scala/tools/asm/tree/JumpInsnNode.java
@@ -0,0 +1,92 @@
+/***
+ * ASM: a very small and fast Java bytecode manipulation framework
+ * Copyright (c) 2000-2011 INRIA, France Telecom
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ * 3. Neither the name of the copyright holders nor the names of its
+ * contributors may be used to endorse or promote products derived from
+ * this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+ * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
+ * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+ * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+ * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
+ * THE POSSIBILITY OF SUCH DAMAGE.
+ */
+package scala.tools.asm.tree;
+
+import java.util.Map;
+
+import scala.tools.asm.MethodVisitor;
+
+/**
+ * A node that represents a jump instruction. A jump instruction is an
+ * instruction that may jump to another instruction.
+ *
+ * @author Eric Bruneton
+ */
+public class JumpInsnNode extends AbstractInsnNode {
+
+ /**
+ * The operand of this instruction. This operand is a label that designates
+ * the instruction to which this instruction may jump.
+ */
+ public LabelNode label;
+
+ /**
+ * Constructs a new {@link JumpInsnNode}.
+ *
+ * @param opcode the opcode of the type instruction to be constructed. This
+ * opcode must be IFEQ, IFNE, IFLT, IFGE, IFGT, IFLE, IF_ICMPEQ,
+ * IF_ICMPNE, IF_ICMPLT, IF_ICMPGE, IF_ICMPGT, IF_ICMPLE, IF_ACMPEQ,
+ * IF_ACMPNE, GOTO, JSR, IFNULL or IFNONNULL.
+ * @param label the operand of the instruction to be constructed. This
+ * operand is a label that designates the instruction to which the
+ * jump instruction may jump.
+ */
+ public JumpInsnNode(final int opcode, final LabelNode label) {
+ super(opcode);
+ this.label = label;
+ }
+
+ /**
+ * Sets the opcode of this instruction.
+ *
+ * @param opcode the new instruction opcode. This opcode must be IFEQ, IFNE,
+ * IFLT, IFGE, IFGT, IFLE, IF_ICMPEQ, IF_ICMPNE, IF_ICMPLT,
+ * IF_ICMPGE, IF_ICMPGT, IF_ICMPLE, IF_ACMPEQ, IF_ACMPNE, GOTO, JSR,
+ * IFNULL or IFNONNULL.
+ */
+ public void setOpcode(final int opcode) {
+ this.opcode = opcode;
+ }
+
+ @Override
+ public int getType() {
+ return JUMP_INSN;
+ }
+
+ @Override
+ public void accept(final MethodVisitor mv) {
+ mv.visitJumpInsn(opcode, label.getLabel());
+ }
+
+ @Override
+ public AbstractInsnNode clone(final Map<LabelNode, LabelNode> labels) {
+ return new JumpInsnNode(opcode, clone(label, labels));
+ }
+}
diff --git a/src/asm/scala/tools/asm/tree/LabelNode.java b/src/asm/scala/tools/asm/tree/LabelNode.java
new file mode 100644
index 0000000..523a8d6
--- /dev/null
+++ b/src/asm/scala/tools/asm/tree/LabelNode.java
@@ -0,0 +1,78 @@
+/***
+ * ASM: a very small and fast Java bytecode manipulation framework
+ * Copyright (c) 2000-2011 INRIA, France Telecom
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ * 3. Neither the name of the copyright holders nor the names of its
+ * contributors may be used to endorse or promote products derived from
+ * this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+ * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
+ * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+ * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+ * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
+ * THE POSSIBILITY OF SUCH DAMAGE.
+ */
+package scala.tools.asm.tree;
+
+import java.util.Map;
+
+import scala.tools.asm.Label;
+import scala.tools.asm.MethodVisitor;
+
+/**
+ * An {@link AbstractInsnNode} that encapsulates a {@link Label}.
+ */
+public class LabelNode extends AbstractInsnNode {
+
+ private Label label;
+
+ public LabelNode() {
+ super(-1);
+ }
+
+ public LabelNode(final Label label) {
+ super(-1);
+ this.label = label;
+ }
+
+ @Override
+ public int getType() {
+ return LABEL;
+ }
+
+ public Label getLabel() {
+ if (label == null) {
+ label = new Label();
+ }
+ return label;
+ }
+
+ @Override
+ public void accept(final MethodVisitor cv) {
+ cv.visitLabel(getLabel());
+ }
+
+ @Override
+ public AbstractInsnNode clone(final Map<LabelNode, LabelNode> labels) {
+ return labels.get(this);
+ }
+
+ public void resetLabel() {
+ label = null;
+ }
+}
\ No newline at end of file
diff --git a/src/asm/scala/tools/asm/tree/LdcInsnNode.java b/src/asm/scala/tools/asm/tree/LdcInsnNode.java
new file mode 100644
index 0000000..f8d115a
--- /dev/null
+++ b/src/asm/scala/tools/asm/tree/LdcInsnNode.java
@@ -0,0 +1,77 @@
+/***
+ * ASM: a very small and fast Java bytecode manipulation framework
+ * Copyright (c) 2000-2011 INRIA, France Telecom
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ * 3. Neither the name of the copyright holders nor the names of its
+ * contributors may be used to endorse or promote products derived from
+ * this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+ * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
+ * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+ * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+ * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
+ * THE POSSIBILITY OF SUCH DAMAGE.
+ */
+package scala.tools.asm.tree;
+
+import java.util.Map;
+
+import scala.tools.asm.MethodVisitor;
+import scala.tools.asm.Opcodes;
+
+/**
+ * A node that represents an LDC instruction.
+ *
+ * @author Eric Bruneton
+ */
+public class LdcInsnNode extends AbstractInsnNode {
+
+ /**
+ * The constant to be loaded on the stack. This parameter must be a non null
+ * {@link Integer}, a {@link Float}, a {@link Long}, a {@link Double}, a
+ * {@link String} or a {@link org.objectweb.asm.Type}.
+ */
+ public Object cst;
+
+ /**
+ * Constructs a new {@link LdcInsnNode}.
+ *
+ * @param cst the constant to be loaded on the stack. This parameter must be
+ * a non null {@link Integer}, a {@link Float}, a {@link Long}, a
+ * {@link Double} or a {@link String}.
+ */
+ public LdcInsnNode(final Object cst) {
+ super(Opcodes.LDC);
+ this.cst = cst;
+ }
+
+ @Override
+ public int getType() {
+ return LDC_INSN;
+ }
+
+ @Override
+ public void accept(final MethodVisitor mv) {
+ mv.visitLdcInsn(cst);
+ }
+
+ @Override
+ public AbstractInsnNode clone(final Map<LabelNode, LabelNode> labels) {
+ return new LdcInsnNode(cst);
+ }
+}
\ No newline at end of file
diff --git a/src/asm/scala/tools/asm/tree/LineNumberNode.java b/src/asm/scala/tools/asm/tree/LineNumberNode.java
new file mode 100644
index 0000000..acc83c8
--- /dev/null
+++ b/src/asm/scala/tools/asm/tree/LineNumberNode.java
@@ -0,0 +1,82 @@
+/***
+ * ASM: a very small and fast Java bytecode manipulation framework
+ * Copyright (c) 2000-2011 INRIA, France Telecom
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ * 3. Neither the name of the copyright holders nor the names of its
+ * contributors may be used to endorse or promote products derived from
+ * this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+ * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
+ * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+ * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+ * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
+ * THE POSSIBILITY OF SUCH DAMAGE.
+ */
+package scala.tools.asm.tree;
+
+import java.util.Map;
+
+import scala.tools.asm.MethodVisitor;
+
+/**
+ * A node that represents a line number declaration. These nodes are pseudo
+ * instruction nodes in order to be inserted in an instruction list.
+ *
+ * @author Eric Bruneton
+ */
+public class LineNumberNode extends AbstractInsnNode {
+
+ /**
+ * A line number. This number refers to the source file from which the class
+ * was compiled.
+ */
+ public int line;
+
+ /**
+ * The first instruction corresponding to this line number.
+ */
+ public LabelNode start;
+
+ /**
+ * Constructs a new {@link LineNumberNode}.
+ *
+ * @param line a line number. This number refers to the source file from
+ * which the class was compiled.
+ * @param start the first instruction corresponding to this line number.
+ */
+ public LineNumberNode(final int line, final LabelNode start) {
+ super(-1);
+ this.line = line;
+ this.start = start;
+ }
+
+ @Override
+ public int getType() {
+ return LINE;
+ }
+
+ @Override
+ public void accept(final MethodVisitor mv) {
+ mv.visitLineNumber(line, start.getLabel());
+ }
+
+ @Override
+ public AbstractInsnNode clone(final Map<LabelNode, LabelNode> labels) {
+ return new LineNumberNode(line, clone(start, labels));
+ }
+}
diff --git a/src/asm/scala/tools/asm/tree/LocalVariableNode.java b/src/asm/scala/tools/asm/tree/LocalVariableNode.java
new file mode 100644
index 0000000..51cbd3c
--- /dev/null
+++ b/src/asm/scala/tools/asm/tree/LocalVariableNode.java
@@ -0,0 +1,115 @@
+/***
+ * ASM: a very small and fast Java bytecode manipulation framework
+ * Copyright (c) 2000-2011 INRIA, France Telecom
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ * 3. Neither the name of the copyright holders nor the names of its
+ * contributors may be used to endorse or promote products derived from
+ * this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+ * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
+ * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+ * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+ * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
+ * THE POSSIBILITY OF SUCH DAMAGE.
+ */
+package scala.tools.asm.tree;
+
+import scala.tools.asm.MethodVisitor;
+
+/**
+ * A node that represents a local variable declaration.
+ *
+ * @author Eric Bruneton
+ */
+public class LocalVariableNode {
+
+ /**
+ * The name of a local variable.
+ */
+ public String name;
+
+ /**
+ * The type descriptor of this local variable.
+ */
+ public String desc;
+
+ /**
+ * The signature of this local variable. May be <tt>null</tt>.
+ */
+ public String signature;
+
+ /**
+ * The first instruction corresponding to the scope of this local variable
+ * (inclusive).
+ */
+ public LabelNode start;
+
+ /**
+ * The last instruction corresponding to the scope of this local variable
+ * (exclusive).
+ */
+ public LabelNode end;
+
+ /**
+ * The local variable's index.
+ */
+ public int index;
+
+ /**
+ * Constructs a new {@link LocalVariableNode}.
+ *
+ * @param name the name of a local variable.
+ * @param desc the type descriptor of this local variable.
+ * @param signature the signature of this local variable. May be
+ * <tt>null</tt>.
+ * @param start the first instruction corresponding to the scope of this
+ * local variable (inclusive).
+ * @param end the last instruction corresponding to the scope of this local
+ * variable (exclusive).
+ * @param index the local variable's index.
+ */
+ public LocalVariableNode(
+ final String name,
+ final String desc,
+ final String signature,
+ final LabelNode start,
+ final LabelNode end,
+ final int index)
+ {
+ this.name = name;
+ this.desc = desc;
+ this.signature = signature;
+ this.start = start;
+ this.end = end;
+ this.index = index;
+ }
+
+ /**
+ * Makes the given visitor visit this local variable declaration.
+ *
+ * @param mv a method visitor.
+ */
+ public void accept(final MethodVisitor mv) {
+ mv.visitLocalVariable(name,
+ desc,
+ signature,
+ start.getLabel(),
+ end.getLabel(),
+ index);
+ }
+}
diff --git a/src/asm/scala/tools/asm/tree/LookupSwitchInsnNode.java b/src/asm/scala/tools/asm/tree/LookupSwitchInsnNode.java
new file mode 100644
index 0000000..6d0f971
--- /dev/null
+++ b/src/asm/scala/tools/asm/tree/LookupSwitchInsnNode.java
@@ -0,0 +1,116 @@
+/***
+ * ASM: a very small and fast Java bytecode manipulation framework
+ * Copyright (c) 2000-2011 INRIA, France Telecom
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ * 3. Neither the name of the copyright holders nor the names of its
+ * contributors may be used to endorse or promote products derived from
+ * this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+ * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
+ * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+ * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+ * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
+ * THE POSSIBILITY OF SUCH DAMAGE.
+ */
+package scala.tools.asm.tree;
+
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.List;
+import java.util.Map;
+
+import scala.tools.asm.Label;
+import scala.tools.asm.MethodVisitor;
+import scala.tools.asm.Opcodes;
+
+/**
+ * A node that represents a LOOKUPSWITCH instruction.
+ *
+ * @author Eric Bruneton
+ */
+public class LookupSwitchInsnNode extends AbstractInsnNode {
+
+ /**
+ * Beginning of the default handler block.
+ */
+ public LabelNode dflt;
+
+ /**
+ * The values of the keys. This list is a list of {@link Integer} objects.
+ */
+ public List<Integer> keys;
+
+ /**
+ * Beginnings of the handler blocks. This list is a list of
+ * {@link LabelNode} objects.
+ */
+ public List<LabelNode> labels;
+
+ /**
+ * Constructs a new {@link LookupSwitchInsnNode}.
+ *
+ * @param dflt beginning of the default handler block.
+ * @param keys the values of the keys.
+ * @param labels beginnings of the handler blocks. <tt>labels[i]</tt> is
+ * the beginning of the handler block for the <tt>keys[i]</tt> key.
+ */
+ public LookupSwitchInsnNode(
+ final LabelNode dflt,
+ final int[] keys,
+ final LabelNode[] labels)
+ {
+ super(Opcodes.LOOKUPSWITCH);
+ this.dflt = dflt;
+ this.keys = new ArrayList<Integer>(keys == null ? 0 : keys.length);
+ this.labels = new ArrayList<LabelNode>(labels == null ? 0 : labels.length);
+ if (keys != null) {
+ for (int i = 0; i < keys.length; ++i) {
+ this.keys.add(new Integer(keys[i]));
+ }
+ }
+ if (labels != null) {
+ this.labels.addAll(Arrays.asList(labels));
+ }
+ }
+
+ @Override
+ public int getType() {
+ return LOOKUPSWITCH_INSN;
+ }
+
+ @Override
+ public void accept(final MethodVisitor mv) {
+ int[] keys = new int[this.keys.size()];
+ for (int i = 0; i < keys.length; ++i) {
+ keys[i] = this.keys.get(i).intValue();
+ }
+ Label[] labels = new Label[this.labels.size()];
+ for (int i = 0; i < labels.length; ++i) {
+ labels[i] = this.labels.get(i).getLabel();
+ }
+ mv.visitLookupSwitchInsn(dflt.getLabel(), keys, labels);
+ }
+
+ @Override
+ public AbstractInsnNode clone(final Map<LabelNode, LabelNode> labels) {
+ LookupSwitchInsnNode clone = new LookupSwitchInsnNode(clone(dflt,
+ labels), null, clone(this.labels, labels));
+ clone.keys.addAll(keys);
+ return clone;
+ }
+}
diff --git a/src/asm/scala/tools/asm/tree/MethodInsnNode.java b/src/asm/scala/tools/asm/tree/MethodInsnNode.java
new file mode 100644
index 0000000..c3036bc
--- /dev/null
+++ b/src/asm/scala/tools/asm/tree/MethodInsnNode.java
@@ -0,0 +1,107 @@
+/***
+ * ASM: a very small and fast Java bytecode manipulation framework
+ * Copyright (c) 2000-2011 INRIA, France Telecom
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ * 3. Neither the name of the copyright holders nor the names of its
+ * contributors may be used to endorse or promote products derived from
+ * this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+ * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
+ * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+ * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+ * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
+ * THE POSSIBILITY OF SUCH DAMAGE.
+ */
+package scala.tools.asm.tree;
+
+import java.util.Map;
+
+import scala.tools.asm.MethodVisitor;
+
+/**
+ * A node that represents a method instruction. A method instruction is an
+ * instruction that invokes a method.
+ *
+ * @author Eric Bruneton
+ */
+public class MethodInsnNode extends AbstractInsnNode {
+
+ /**
+ * The internal name of the method's owner class (see
+ * {@link org.objectweb.asm.Type#getInternalName() getInternalName}).
+ */
+ public String owner;
+
+ /**
+ * The method's name.
+ */
+ public String name;
+
+ /**
+ * The method's descriptor (see {@link org.objectweb.asm.Type}).
+ */
+ public String desc;
+
+ /**
+ * Constructs a new {@link MethodInsnNode}.
+ *
+ * @param opcode the opcode of the type instruction to be constructed. This
+ * opcode must be INVOKEVIRTUAL, INVOKESPECIAL, INVOKESTATIC or
+ * INVOKEINTERFACE.
+ * @param owner the internal name of the method's owner class (see
+ * {@link org.objectweb.asm.Type#getInternalName() getInternalName}).
+ * @param name the method's name.
+ * @param desc the method's descriptor (see {@link org.objectweb.asm.Type}).
+ */
+ public MethodInsnNode(
+ final int opcode,
+ final String owner,
+ final String name,
+ final String desc)
+ {
+ super(opcode);
+ this.owner = owner;
+ this.name = name;
+ this.desc = desc;
+ }
+
+ /**
+ * Sets the opcode of this instruction.
+ *
+ * @param opcode the new instruction opcode. This opcode must be
+ * INVOKEVIRTUAL, INVOKESPECIAL, INVOKESTATIC or INVOKEINTERFACE.
+ */
+ public void setOpcode(final int opcode) {
+ this.opcode = opcode;
+ }
+
+ @Override
+ public int getType() {
+ return METHOD_INSN;
+ }
+
+ @Override
+ public void accept(final MethodVisitor mv) {
+ mv.visitMethodInsn(opcode, owner, name, desc);
+ }
+
+ @Override
+ public AbstractInsnNode clone(final Map<LabelNode, LabelNode> labels) {
+ return new MethodInsnNode(opcode, owner, name, desc);
+ }
+}
\ No newline at end of file
diff --git a/src/asm/scala/tools/asm/tree/MethodNode.java b/src/asm/scala/tools/asm/tree/MethodNode.java
new file mode 100644
index 0000000..70ec39e
--- /dev/null
+++ b/src/asm/scala/tools/asm/tree/MethodNode.java
@@ -0,0 +1,645 @@
+/***
+ * ASM: a very small and fast Java bytecode manipulation framework
+ * Copyright (c) 2000-2011 INRIA, France Telecom
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ * 3. Neither the name of the copyright holders nor the names of its
+ * contributors may be used to endorse or promote products derived from
+ * this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+ * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
+ * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+ * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+ * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
+ * THE POSSIBILITY OF SUCH DAMAGE.
+ */
+package scala.tools.asm.tree;
+
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.List;
+
+import scala.tools.asm.AnnotationVisitor;
+import scala.tools.asm.Attribute;
+import scala.tools.asm.ClassVisitor;
+import scala.tools.asm.Handle;
+import scala.tools.asm.Label;
+import scala.tools.asm.MethodVisitor;
+import scala.tools.asm.Opcodes;
+import scala.tools.asm.Type;
+
+/**
+ * A node that represents a method.
+ *
+ * @author Eric Bruneton
+ */
+public class MethodNode extends MethodVisitor {
+
+ /**
+ * The method's access flags (see {@link Opcodes}). This field also
+ * indicates if the method is synthetic and/or deprecated.
+ */
+ public int access;
+
+ /**
+ * The method's name.
+ */
+ public String name;
+
+ /**
+ * The method's descriptor (see {@link Type}).
+ */
+ public String desc;
+
+ /**
+ * The method's signature. May be <tt>null</tt>.
+ */
+ public String signature;
+
+ /**
+ * The internal names of the method's exception classes (see
+ * {@link Type#getInternalName() getInternalName}). This list is a list of
+ * {@link String} objects.
+ */
+ public List<String> exceptions;
+
+ /**
+ * The runtime visible annotations of this method. This list is a list of
+ * {@link AnnotationNode} objects. May be <tt>null</tt>.
+ *
+ * @associates org.objectweb.asm.tree.AnnotationNode
+ * @label visible
+ */
+ public List<AnnotationNode> visibleAnnotations;
+
+ /**
+ * The runtime invisible annotations of this method. This list is a list of
+ * {@link AnnotationNode} objects. May be <tt>null</tt>.
+ *
+ * @associates org.objectweb.asm.tree.AnnotationNode
+ * @label invisible
+ */
+ public List<AnnotationNode> invisibleAnnotations;
+
+ /**
+ * The non standard attributes of this method. This list is a list of
+ * {@link Attribute} objects. May be <tt>null</tt>.
+ *
+ * @associates org.objectweb.asm.Attribute
+ */
+ public List<Attribute> attrs;
+
+ /**
+ * The default value of this annotation interface method. This field must be
+ * a {@link Byte}, {@link Boolean}, {@link Character}, {@link Short},
+ * {@link Integer}, {@link Long}, {@link Float}, {@link Double},
+ * {@link String} or {@link Type}, or an two elements String array (for
+ * enumeration values), a {@link AnnotationNode}, or a {@link List} of
+ * values of one of the preceding types. May be <tt>null</tt>.
+ */
+ public Object annotationDefault;
+
+ /**
+ * The runtime visible parameter annotations of this method. These lists are
+ * lists of {@link AnnotationNode} objects. May be <tt>null</tt>.
+ *
+ * @associates org.objectweb.asm.tree.AnnotationNode
+ * @label invisible parameters
+ */
+ public List<AnnotationNode>[] visibleParameterAnnotations;
+
+ /**
+ * The runtime invisible parameter annotations of this method. These lists
+ * are lists of {@link AnnotationNode} objects. May be <tt>null</tt>.
+ *
+ * @associates org.objectweb.asm.tree.AnnotationNode
+ * @label visible parameters
+ */
+ public List<AnnotationNode>[] invisibleParameterAnnotations;
+
+ /**
+ * The instructions of this method. This list is a list of
+ * {@link AbstractInsnNode} objects.
+ *
+ * @associates org.objectweb.asm.tree.AbstractInsnNode
+ * @label instructions
+ */
+ public InsnList instructions;
+
+ /**
+ * The try catch blocks of this method. This list is a list of
+ * {@link TryCatchBlockNode} objects.
+ *
+ * @associates org.objectweb.asm.tree.TryCatchBlockNode
+ */
+ public List<TryCatchBlockNode> tryCatchBlocks;
+
+ /**
+ * The maximum stack size of this method.
+ */
+ public int maxStack;
+
+ /**
+ * The maximum number of local variables of this method.
+ */
+ public int maxLocals;
+
+ /**
+ * The local variables of this method. This list is a list of
+ * {@link LocalVariableNode} objects. May be <tt>null</tt>
+ *
+ * @associates org.objectweb.asm.tree.LocalVariableNode
+ */
+ public List<LocalVariableNode> localVariables;
+
+ /**
+ * If the accept method has been called on this object.
+ */
+ private boolean visited;
+
+ /**
+ * Constructs an uninitialized {@link MethodNode}. <i>Subclasses must not
+ * use this constructor</i>. Instead, they must use the
+ * {@link #MethodNode(int)} version.
+ */
+ public MethodNode() {
+ this(Opcodes.ASM4);
+ }
+
+ /**
+ * Constructs an uninitialized {@link MethodNode}.
+ *
+ * @param api the ASM API version implemented by this visitor. Must be one
+ * of {@link Opcodes#ASM4}.
+ */
+ public MethodNode(final int api) {
+ super(api);
+ this.instructions = new InsnList();
+ }
+
+ /**
+ * Constructs a new {@link MethodNode}. <i>Subclasses must not use this
+ * constructor</i>. Instead, they must use the
+ * {@link #MethodNode(int, int, String, String, String, String[])} version.
+ *
+ * @param access the method's access flags (see {@link Opcodes}). This
+ * parameter also indicates if the method is synthetic and/or
+ * deprecated.
+ * @param name the method's name.
+ * @param desc the method's descriptor (see {@link Type}).
+ * @param signature the method's signature. May be <tt>null</tt>.
+ * @param exceptions the internal names of the method's exception classes
+ * (see {@link Type#getInternalName() getInternalName}). May be
+ * <tt>null</tt>.
+ */
+ public MethodNode(
+ final int access,
+ final String name,
+ final String desc,
+ final String signature,
+ final String[] exceptions)
+ {
+ this(Opcodes.ASM4, access, name, desc, signature, exceptions);
+ }
+
+ /**
+ * Constructs a new {@link MethodNode}.
+ *
+ * @param api the ASM API version implemented by this visitor. Must be one
+ * of {@link Opcodes#ASM4}.
+ * @param access the method's access flags (see {@link Opcodes}). This
+ * parameter also indicates if the method is synthetic and/or
+ * deprecated.
+ * @param name the method's name.
+ * @param desc the method's descriptor (see {@link Type}).
+ * @param signature the method's signature. May be <tt>null</tt>.
+ * @param exceptions the internal names of the method's exception classes
+ * (see {@link Type#getInternalName() getInternalName}). May be
+ * <tt>null</tt>.
+ */
+ public MethodNode(
+ final int api,
+ final int access,
+ final String name,
+ final String desc,
+ final String signature,
+ final String[] exceptions)
+ {
+ super(api);
+ this.access = access;
+ this.name = name;
+ this.desc = desc;
+ this.signature = signature;
+ this.exceptions = new ArrayList<String>(exceptions == null
+ ? 0
+ : exceptions.length);
+ boolean isAbstract = (access & Opcodes.ACC_ABSTRACT) != 0;
+ if (!isAbstract) {
+ this.localVariables = new ArrayList<LocalVariableNode>(5);
+ }
+ this.tryCatchBlocks = new ArrayList<TryCatchBlockNode>();
+ if (exceptions != null) {
+ this.exceptions.addAll(Arrays.asList(exceptions));
+ }
+ this.instructions = new InsnList();
+ }
+
+ // ------------------------------------------------------------------------
+ // Implementation of the MethodVisitor abstract class
+ // ------------------------------------------------------------------------
+
+ @Override
+ public AnnotationVisitor visitAnnotationDefault() {
+ return new AnnotationNode(new ArrayList<Object>(0) {
+ @Override
+ public boolean add(final Object o) {
+ annotationDefault = o;
+ return super.add(o);
+ }
+ });
+ }
+
+ @Override
+ public AnnotationVisitor visitAnnotation(
+ final String desc,
+ final boolean visible)
+ {
+ AnnotationNode an = new AnnotationNode(desc);
+ if (visible) {
+ if (visibleAnnotations == null) {
+ visibleAnnotations = new ArrayList<AnnotationNode>(1);
+ }
+ visibleAnnotations.add(an);
+ } else {
+ if (invisibleAnnotations == null) {
+ invisibleAnnotations = new ArrayList<AnnotationNode>(1);
+ }
+ invisibleAnnotations.add(an);
+ }
+ return an;
+ }
+
+ @Override
+ public AnnotationVisitor visitParameterAnnotation(
+ final int parameter,
+ final String desc,
+ final boolean visible)
+ {
+ AnnotationNode an = new AnnotationNode(desc);
+ if (visible) {
+ if (visibleParameterAnnotations == null) {
+ int params = Type.getArgumentTypes(this.desc).length;
+ visibleParameterAnnotations = (List<AnnotationNode>[])new List<?>[params];
+ }
+ if (visibleParameterAnnotations[parameter] == null) {
+ visibleParameterAnnotations[parameter] = new ArrayList<AnnotationNode>(1);
+ }
+ visibleParameterAnnotations[parameter].add(an);
+ } else {
+ if (invisibleParameterAnnotations == null) {
+ int params = Type.getArgumentTypes(this.desc).length;
+ invisibleParameterAnnotations = (List<AnnotationNode>[])new List<?>[params];
+ }
+ if (invisibleParameterAnnotations[parameter] == null) {
+ invisibleParameterAnnotations[parameter] = new ArrayList<AnnotationNode>(1);
+ }
+ invisibleParameterAnnotations[parameter].add(an);
+ }
+ return an;
+ }
+
+ @Override
+ public void visitAttribute(final Attribute attr) {
+ if (attrs == null) {
+ attrs = new ArrayList<Attribute>(1);
+ }
+ attrs.add(attr);
+ }
+
+ @Override
+ public void visitCode() {
+ }
+
+ @Override
+ public void visitFrame(
+ final int type,
+ final int nLocal,
+ final Object[] local,
+ final int nStack,
+ final Object[] stack)
+ {
+ instructions.add(new FrameNode(type, nLocal, local == null
+ ? null
+ : getLabelNodes(local), nStack, stack == null
+ ? null
+ : getLabelNodes(stack)));
+ }
+
+ @Override
+ public void visitInsn(final int opcode) {
+ instructions.add(new InsnNode(opcode));
+ }
+
+ @Override
+ public void visitIntInsn(final int opcode, final int operand) {
+ instructions.add(new IntInsnNode(opcode, operand));
+ }
+
+ @Override
+ public void visitVarInsn(final int opcode, final int var) {
+ instructions.add(new VarInsnNode(opcode, var));
+ }
+
+ @Override
+ public void visitTypeInsn(final int opcode, final String type) {
+ instructions.add(new TypeInsnNode(opcode, type));
+ }
+
+ @Override
+ public void visitFieldInsn(
+ final int opcode,
+ final String owner,
+ final String name,
+ final String desc)
+ {
+ instructions.add(new FieldInsnNode(opcode, owner, name, desc));
+ }
+
+ @Override
+ public void visitMethodInsn(
+ final int opcode,
+ final String owner,
+ final String name,
+ final String desc)
+ {
+ instructions.add(new MethodInsnNode(opcode, owner, name, desc));
+ }
+
+ @Override
+ public void visitInvokeDynamicInsn(
+ String name,
+ String desc,
+ Handle bsm,
+ Object... bsmArgs)
+ {
+ instructions.add(new InvokeDynamicInsnNode(name, desc, bsm, bsmArgs));
+ }
+
+ @Override
+ public void visitJumpInsn(final int opcode, final Label label) {
+ instructions.add(new JumpInsnNode(opcode, getLabelNode(label)));
+ }
+
+ @Override
+ public void visitLabel(final Label label) {
+ instructions.add(getLabelNode(label));
+ }
+
+ @Override
+ public void visitLdcInsn(final Object cst) {
+ instructions.add(new LdcInsnNode(cst));
+ }
+
+ @Override
+ public void visitIincInsn(final int var, final int increment) {
+ instructions.add(new IincInsnNode(var, increment));
+ }
+
+ @Override
+ public void visitTableSwitchInsn(
+ final int min,
+ final int max,
+ final Label dflt,
+ final Label... labels)
+ {
+ instructions.add(new TableSwitchInsnNode(min,
+ max,
+ getLabelNode(dflt),
+ getLabelNodes(labels)));
+ }
+
+ @Override
+ public void visitLookupSwitchInsn(
+ final Label dflt,
+ final int[] keys,
+ final Label[] labels)
+ {
+ instructions.add(new LookupSwitchInsnNode(getLabelNode(dflt),
+ keys,
+ getLabelNodes(labels)));
+ }
+
+ @Override
+ public void visitMultiANewArrayInsn(final String desc, final int dims) {
+ instructions.add(new MultiANewArrayInsnNode(desc, dims));
+ }
+
+ @Override
+ public void visitTryCatchBlock(
+ final Label start,
+ final Label end,
+ final Label handler,
+ final String type)
+ {
+ tryCatchBlocks.add(new TryCatchBlockNode(getLabelNode(start),
+ getLabelNode(end),
+ getLabelNode(handler),
+ type));
+ }
+
+ @Override
+ public void visitLocalVariable(
+ final String name,
+ final String desc,
+ final String signature,
+ final Label start,
+ final Label end,
+ final int index)
+ {
+ localVariables.add(new LocalVariableNode(name,
+ desc,
+ signature,
+ getLabelNode(start),
+ getLabelNode(end),
+ index));
+ }
+
+ @Override
+ public void visitLineNumber(final int line, final Label start) {
+ instructions.add(new LineNumberNode(line, getLabelNode(start)));
+ }
+
+ @Override
+ public void visitMaxs(final int maxStack, final int maxLocals) {
+ this.maxStack = maxStack;
+ this.maxLocals = maxLocals;
+ }
+
+ @Override
+ public void visitEnd() {
+ }
+
+ /**
+ * Returns the LabelNode corresponding to the given Label. Creates a new
+ * LabelNode if necessary. The default implementation of this method uses
+ * the {@link Label#info} field to store associations between labels and
+ * label nodes.
+ *
+ * @param l a Label.
+ * @return the LabelNode corresponding to l.
+ */
+ protected LabelNode getLabelNode(final Label l) {
+ if (!(l.info instanceof LabelNode)) {
+ l.info = new LabelNode(l);
+ }
+ return (LabelNode) l.info;
+ }
+
+ private LabelNode[] getLabelNodes(final Label[] l) {
+ LabelNode[] nodes = new LabelNode[l.length];
+ for (int i = 0; i < l.length; ++i) {
+ nodes[i] = getLabelNode(l[i]);
+ }
+ return nodes;
+ }
+
+ private Object[] getLabelNodes(final Object[] objs) {
+ Object[] nodes = new Object[objs.length];
+ for (int i = 0; i < objs.length; ++i) {
+ Object o = objs[i];
+ if (o instanceof Label) {
+ o = getLabelNode((Label) o);
+ }
+ nodes[i] = o;
+ }
+ return nodes;
+ }
+
+ // ------------------------------------------------------------------------
+ // Accept method
+ // ------------------------------------------------------------------------
+
+ /**
+ * Checks that this method node is compatible with the given ASM API
+ * version. This methods checks that this node, and all its nodes
+ * recursively, do not contain elements that were introduced in more recent
+ * versions of the ASM API than the given version.
+ *
+ * @param api an ASM API version. Must be one of {@link Opcodes#ASM4}.
+ */
+ public void check(final int api) {
+ // nothing to do
+ }
+
+ /**
+ * Makes the given class visitor visit this method.
+ *
+ * @param cv a class visitor.
+ */
+ public void accept(final ClassVisitor cv) {
+ String[] exceptions = new String[this.exceptions.size()];
+ this.exceptions.toArray(exceptions);
+ MethodVisitor mv = cv.visitMethod(access,
+ name,
+ desc,
+ signature,
+ exceptions);
+ if (mv != null) {
+ accept(mv);
+ }
+ }
+
+ /**
+ * Makes the given method visitor visit this method.
+ *
+ * @param mv a method visitor.
+ */
+ public void accept(final MethodVisitor mv) {
+ // visits the method attributes
+ int i, j, n;
+ if (annotationDefault != null) {
+ AnnotationVisitor av = mv.visitAnnotationDefault();
+ AnnotationNode.accept(av, null, annotationDefault);
+ if (av != null) {
+ av.visitEnd();
+ }
+ }
+ n = visibleAnnotations == null ? 0 : visibleAnnotations.size();
+ for (i = 0; i < n; ++i) {
+ AnnotationNode an = visibleAnnotations.get(i);
+ an.accept(mv.visitAnnotation(an.desc, true));
+ }
+ n = invisibleAnnotations == null ? 0 : invisibleAnnotations.size();
+ for (i = 0; i < n; ++i) {
+ AnnotationNode an = invisibleAnnotations.get(i);
+ an.accept(mv.visitAnnotation(an.desc, false));
+ }
+ n = visibleParameterAnnotations == null
+ ? 0
+ : visibleParameterAnnotations.length;
+ for (i = 0; i < n; ++i) {
+ List<?> l = visibleParameterAnnotations[i];
+ if (l == null) {
+ continue;
+ }
+ for (j = 0; j < l.size(); ++j) {
+ AnnotationNode an = (AnnotationNode) l.get(j);
+ an.accept(mv.visitParameterAnnotation(i, an.desc, true));
+ }
+ }
+ n = invisibleParameterAnnotations == null
+ ? 0
+ : invisibleParameterAnnotations.length;
+ for (i = 0; i < n; ++i) {
+ List<?> l = invisibleParameterAnnotations[i];
+ if (l == null) {
+ continue;
+ }
+ for (j = 0; j < l.size(); ++j) {
+ AnnotationNode an = (AnnotationNode) l.get(j);
+ an.accept(mv.visitParameterAnnotation(i, an.desc, false));
+ }
+ }
+ if (visited) {
+ instructions.resetLabels();
+ }
+ n = attrs == null ? 0 : attrs.size();
+ for (i = 0; i < n; ++i) {
+ mv.visitAttribute(attrs.get(i));
+ }
+ // visits the method's code
+ if (instructions.size() > 0) {
+ mv.visitCode();
+ // visits try catch blocks
+ n = tryCatchBlocks == null ? 0 : tryCatchBlocks.size();
+ for (i = 0; i < n; ++i) {
+ tryCatchBlocks.get(i).accept(mv);
+ }
+ // visits instructions
+ instructions.accept(mv);
+ // visits local variables
+ n = localVariables == null ? 0 : localVariables.size();
+ for (i = 0; i < n; ++i) {
+ localVariables.get(i).accept(mv);
+ }
+ // visits maxs
+ mv.visitMaxs(maxStack, maxLocals);
+ visited = true;
+ }
+ mv.visitEnd();
+ }
+}
diff --git a/src/asm/scala/tools/asm/tree/MultiANewArrayInsnNode.java b/src/asm/scala/tools/asm/tree/MultiANewArrayInsnNode.java
new file mode 100644
index 0000000..9dfba77
--- /dev/null
+++ b/src/asm/scala/tools/asm/tree/MultiANewArrayInsnNode.java
@@ -0,0 +1,81 @@
+/***
+ * ASM: a very small and fast Java bytecode manipulation framework
+ * Copyright (c) 2000-2011 INRIA, France Telecom
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ * 3. Neither the name of the copyright holders nor the names of its
+ * contributors may be used to endorse or promote products derived from
+ * this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+ * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
+ * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+ * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+ * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
+ * THE POSSIBILITY OF SUCH DAMAGE.
+ */
+package scala.tools.asm.tree;
+
+import java.util.Map;
+
+import scala.tools.asm.MethodVisitor;
+import scala.tools.asm.Opcodes;
+
+/**
+ * A node that represents a MULTIANEWARRAY instruction.
+ *
+ * @author Eric Bruneton
+ */
+public class MultiANewArrayInsnNode extends AbstractInsnNode {
+
+ /**
+ * An array type descriptor (see {@link org.objectweb.asm.Type}).
+ */
+ public String desc;
+
+ /**
+ * Number of dimensions of the array to allocate.
+ */
+ public int dims;
+
+ /**
+ * Constructs a new {@link MultiANewArrayInsnNode}.
+ *
+ * @param desc an array type descriptor (see {@link org.objectweb.asm.Type}).
+ * @param dims number of dimensions of the array to allocate.
+ */
+ public MultiANewArrayInsnNode(final String desc, final int dims) {
+ super(Opcodes.MULTIANEWARRAY);
+ this.desc = desc;
+ this.dims = dims;
+ }
+
+ @Override
+ public int getType() {
+ return MULTIANEWARRAY_INSN;
+ }
+
+ @Override
+ public void accept(final MethodVisitor mv) {
+ mv.visitMultiANewArrayInsn(desc, dims);
+ }
+
+ @Override
+ public AbstractInsnNode clone(final Map<LabelNode, LabelNode> labels) {
+ return new MultiANewArrayInsnNode(desc, dims);
+ }
+
+}
\ No newline at end of file
diff --git a/src/asm/scala/tools/asm/tree/TableSwitchInsnNode.java b/src/asm/scala/tools/asm/tree/TableSwitchInsnNode.java
new file mode 100644
index 0000000..929ad9b
--- /dev/null
+++ b/src/asm/scala/tools/asm/tree/TableSwitchInsnNode.java
@@ -0,0 +1,115 @@
+/***
+ * ASM: a very small and fast Java bytecode manipulation framework
+ * Copyright (c) 2000-2011 INRIA, France Telecom
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ * 3. Neither the name of the copyright holders nor the names of its
+ * contributors may be used to endorse or promote products derived from
+ * this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+ * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
+ * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+ * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+ * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
+ * THE POSSIBILITY OF SUCH DAMAGE.
+ */
+package scala.tools.asm.tree;
+
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.List;
+import java.util.Map;
+
+import scala.tools.asm.Label;
+import scala.tools.asm.MethodVisitor;
+import scala.tools.asm.Opcodes;
+
+/**
+ * A node that represents a TABLESWITCH instruction.
+ *
+ * @author Eric Bruneton
+ */
+public class TableSwitchInsnNode extends AbstractInsnNode {
+
+ /**
+ * The minimum key value.
+ */
+ public int min;
+
+ /**
+ * The maximum key value.
+ */
+ public int max;
+
+ /**
+ * Beginning of the default handler block.
+ */
+ public LabelNode dflt;
+
+ /**
+ * Beginnings of the handler blocks. This list is a list of
+ * {@link LabelNode} objects.
+ */
+ public List<LabelNode> labels;
+
+ /**
+ * Constructs a new {@link TableSwitchInsnNode}.
+ *
+ * @param min the minimum key value.
+ * @param max the maximum key value.
+ * @param dflt beginning of the default handler block.
+ * @param labels beginnings of the handler blocks. <tt>labels[i]</tt> is
+ * the beginning of the handler block for the <tt>min + i</tt> key.
+ */
+ public TableSwitchInsnNode(
+ final int min,
+ final int max,
+ final LabelNode dflt,
+ final LabelNode... labels)
+ {
+ super(Opcodes.TABLESWITCH);
+ this.min = min;
+ this.max = max;
+ this.dflt = dflt;
+ this.labels = new ArrayList<LabelNode>();
+ if (labels != null) {
+ this.labels.addAll(Arrays.asList(labels));
+ }
+ }
+
+ @Override
+ public int getType() {
+ return TABLESWITCH_INSN;
+ }
+
+ @Override
+ public void accept(final MethodVisitor mv) {
+ Label[] labels = new Label[this.labels.size()];
+ for (int i = 0; i < labels.length; ++i) {
+ labels[i] = this.labels.get(i).getLabel();
+ }
+ mv.visitTableSwitchInsn(min, max, dflt.getLabel(), labels);
+ }
+
+ @Override
+ public AbstractInsnNode clone(final Map<LabelNode, LabelNode> labels) {
+ return new TableSwitchInsnNode(min,
+ max,
+ clone(dflt, labels),
+ clone(this.labels, labels));
+ }
+}
\ No newline at end of file
diff --git a/src/asm/scala/tools/asm/tree/TryCatchBlockNode.java b/src/asm/scala/tools/asm/tree/TryCatchBlockNode.java
new file mode 100644
index 0000000..375b4cf
--- /dev/null
+++ b/src/asm/scala/tools/asm/tree/TryCatchBlockNode.java
@@ -0,0 +1,94 @@
+/***
+ * ASM: a very small and fast Java bytecode manipulation framework
+ * Copyright (c) 2000-2011 INRIA, France Telecom
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ * 3. Neither the name of the copyright holders nor the names of its
+ * contributors may be used to endorse or promote products derived from
+ * this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+ * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
+ * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+ * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+ * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
+ * THE POSSIBILITY OF SUCH DAMAGE.
+ */
+package scala.tools.asm.tree;
+
+import scala.tools.asm.MethodVisitor;
+
+/**
+ * A node that represents a try catch block.
+ *
+ * @author Eric Bruneton
+ */
+public class TryCatchBlockNode {
+
+ /**
+ * Beginning of the exception handler's scope (inclusive).
+ */
+ public LabelNode start;
+
+ /**
+ * End of the exception handler's scope (exclusive).
+ */
+ public LabelNode end;
+
+ /**
+ * Beginning of the exception handler's code.
+ */
+ public LabelNode handler;
+
+ /**
+ * Internal name of the type of exceptions handled by the handler. May be
+ * <tt>null</tt> to catch any exceptions (for "finally" blocks).
+ */
+ public String type;
+
+ /**
+ * Constructs a new {@link TryCatchBlockNode}.
+ *
+ * @param start beginning of the exception handler's scope (inclusive).
+ * @param end end of the exception handler's scope (exclusive).
+ * @param handler beginning of the exception handler's code.
+ * @param type internal name of the type of exceptions handled by the
+ * handler, or <tt>null</tt> to catch any exceptions (for "finally"
+ * blocks).
+ */
+ public TryCatchBlockNode(
+ final LabelNode start,
+ final LabelNode end,
+ final LabelNode handler,
+ final String type)
+ {
+ this.start = start;
+ this.end = end;
+ this.handler = handler;
+ this.type = type;
+ }
+
+ /**
+ * Makes the given visitor visit this try catch block.
+ *
+ * @param mv a method visitor.
+ */
+ public void accept(final MethodVisitor mv) {
+ mv.visitTryCatchBlock(start.getLabel(), end.getLabel(), handler == null
+ ? null
+ : handler.getLabel(), type);
+ }
+}
diff --git a/src/asm/scala/tools/asm/tree/TypeInsnNode.java b/src/asm/scala/tools/asm/tree/TypeInsnNode.java
new file mode 100644
index 0000000..0b2666c
--- /dev/null
+++ b/src/asm/scala/tools/asm/tree/TypeInsnNode.java
@@ -0,0 +1,87 @@
+/***
+ * ASM: a very small and fast Java bytecode manipulation framework
+ * Copyright (c) 2000-2011 INRIA, France Telecom
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ * 3. Neither the name of the copyright holders nor the names of its
+ * contributors may be used to endorse or promote products derived from
+ * this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+ * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
+ * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+ * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+ * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
+ * THE POSSIBILITY OF SUCH DAMAGE.
+ */
+package scala.tools.asm.tree;
+
+import java.util.Map;
+
+import scala.tools.asm.MethodVisitor;
+
+/**
+ * A node that represents a type instruction. A type instruction is an
+ * instruction that takes a type descriptor as parameter.
+ *
+ * @author Eric Bruneton
+ */
+public class TypeInsnNode extends AbstractInsnNode {
+
+ /**
+ * The operand of this instruction. This operand is an internal name (see
+ * {@link org.objectweb.asm.Type}).
+ */
+ public String desc;
+
+ /**
+ * Constructs a new {@link TypeInsnNode}.
+ *
+ * @param opcode the opcode of the type instruction to be constructed. This
+ * opcode must be NEW, ANEWARRAY, CHECKCAST or INSTANCEOF.
+ * @param desc the operand of the instruction to be constructed. This
+ * operand is an internal name (see {@link org.objectweb.asm.Type}).
+ */
+ public TypeInsnNode(final int opcode, final String desc) {
+ super(opcode);
+ this.desc = desc;
+ }
+
+ /**
+ * Sets the opcode of this instruction.
+ *
+ * @param opcode the new instruction opcode. This opcode must be NEW,
+ * ANEWARRAY, CHECKCAST or INSTANCEOF.
+ */
+ public void setOpcode(final int opcode) {
+ this.opcode = opcode;
+ }
+
+ @Override
+ public int getType() {
+ return TYPE_INSN;
+ }
+
+ @Override
+ public void accept(final MethodVisitor mv) {
+ mv.visitTypeInsn(opcode, desc);
+ }
+
+ @Override
+ public AbstractInsnNode clone(final Map<LabelNode, LabelNode> labels) {
+ return new TypeInsnNode(opcode, desc);
+ }
+}
\ No newline at end of file
diff --git a/src/asm/scala/tools/asm/tree/VarInsnNode.java b/src/asm/scala/tools/asm/tree/VarInsnNode.java
new file mode 100644
index 0000000..89f572d
--- /dev/null
+++ b/src/asm/scala/tools/asm/tree/VarInsnNode.java
@@ -0,0 +1,90 @@
+/***
+ * ASM: a very small and fast Java bytecode manipulation framework
+ * Copyright (c) 2000-2011 INRIA, France Telecom
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ * 3. Neither the name of the copyright holders nor the names of its
+ * contributors may be used to endorse or promote products derived from
+ * this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+ * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
+ * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+ * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+ * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
+ * THE POSSIBILITY OF SUCH DAMAGE.
+ */
+package scala.tools.asm.tree;
+
+import java.util.Map;
+
+import scala.tools.asm.MethodVisitor;
+
+/**
+ * A node that represents a local variable instruction. A local variable
+ * instruction is an instruction that loads or stores the value of a local
+ * variable.
+ *
+ * @author Eric Bruneton
+ */
+public class VarInsnNode extends AbstractInsnNode {
+
+ /**
+ * The operand of this instruction. This operand is the index of a local
+ * variable.
+ */
+ public int var;
+
+ /**
+ * Constructs a new {@link VarInsnNode}.
+ *
+ * @param opcode the opcode of the local variable instruction to be
+ * constructed. This opcode must be ILOAD, LLOAD, FLOAD, DLOAD,
+ * ALOAD, ISTORE, LSTORE, FSTORE, DSTORE, ASTORE or RET.
+ * @param var the operand of the instruction to be constructed. This operand
+ * is the index of a local variable.
+ */
+ public VarInsnNode(final int opcode, final int var) {
+ super(opcode);
+ this.var = var;
+ }
+
+ /**
+ * Sets the opcode of this instruction.
+ *
+ * @param opcode the new instruction opcode. This opcode must be ILOAD,
+ * LLOAD, FLOAD, DLOAD, ALOAD, ISTORE, LSTORE, FSTORE, DSTORE, ASTORE
+ * or RET.
+ */
+ public void setOpcode(final int opcode) {
+ this.opcode = opcode;
+ }
+
+ @Override
+ public int getType() {
+ return VAR_INSN;
+ }
+
+ @Override
+ public void accept(final MethodVisitor mv) {
+ mv.visitVarInsn(opcode, var);
+ }
+
+ @Override
+ public AbstractInsnNode clone(final Map<LabelNode, LabelNode> labels) {
+ return new VarInsnNode(opcode, var);
+ }
+}
\ No newline at end of file
diff --git a/src/asm/scala/tools/asm/tree/analysis/Analyzer.java b/src/asm/scala/tools/asm/tree/analysis/Analyzer.java
new file mode 100644
index 0000000..df387b0
--- /dev/null
+++ b/src/asm/scala/tools/asm/tree/analysis/Analyzer.java
@@ -0,0 +1,549 @@
+/***
+ * ASM: a very small and fast Java bytecode manipulation framework
+ * Copyright (c) 2000-2011 INRIA, France Telecom
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ * 3. Neither the name of the copyright holders nor the names of its
+ * contributors may be used to endorse or promote products derived from
+ * this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+ * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
+ * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+ * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+ * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
+ * THE POSSIBILITY OF SUCH DAMAGE.
+ */
+package scala.tools.asm.tree.analysis;
+
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+import scala.tools.asm.Opcodes;
+import scala.tools.asm.Type;
+import scala.tools.asm.tree.AbstractInsnNode;
+import scala.tools.asm.tree.IincInsnNode;
+import scala.tools.asm.tree.InsnList;
+import scala.tools.asm.tree.JumpInsnNode;
+import scala.tools.asm.tree.LabelNode;
+import scala.tools.asm.tree.LookupSwitchInsnNode;
+import scala.tools.asm.tree.MethodNode;
+import scala.tools.asm.tree.TableSwitchInsnNode;
+import scala.tools.asm.tree.TryCatchBlockNode;
+import scala.tools.asm.tree.VarInsnNode;
+
+/**
+ * A semantic bytecode analyzer. <i>This class does not fully check that JSR and
+ * RET instructions are valid.</i>
+ *
+ * @param <V> type of the Value used for the analysis.
+ *
+ * @author Eric Bruneton
+ */
+public class Analyzer<V extends Value> implements Opcodes {
+
+ private final Interpreter<V> interpreter;
+
+ private int n;
+
+ private InsnList insns;
+
+ private List<TryCatchBlockNode>[] handlers;
+
+ private Frame<V>[] frames;
+
+ private Subroutine[] subroutines;
+
+ private boolean[] queued;
+
+ private int[] queue;
+
+ private int top;
+
+ /**
+ * Constructs a new {@link Analyzer}.
+ *
+ * @param interpreter the interpreter to be used to symbolically interpret
+ * the bytecode instructions.
+ */
+ public Analyzer(final Interpreter<V> interpreter) {
+ this.interpreter = interpreter;
+ }
+
+ /**
+ * Analyzes the given method.
+ *
+ * @param owner the internal name of the class to which the method belongs.
+ * @param m the method to be analyzed.
+ * @return the symbolic state of the execution stack frame at each bytecode
+ * instruction of the method. The size of the returned array is
+ * equal to the number of instructions (and labels) of the method. A
+ * given frame is <tt>null</tt> if and only if the corresponding
+ * instruction cannot be reached (dead code).
+ * @throws AnalyzerException if a problem occurs during the analysis.
+ */
+ public Frame<V>[] analyze(final String owner, final MethodNode m)
+ throws AnalyzerException
+ {
+ if ((m.access & (ACC_ABSTRACT | ACC_NATIVE)) != 0) {
+ frames = (Frame<V>[])new Frame<?>[0];
+ return frames;
+ }
+ n = m.instructions.size();
+ insns = m.instructions;
+ handlers = (List<TryCatchBlockNode>[])new List<?>[n];
+ frames = (Frame<V>[])new Frame<?>[n];
+ subroutines = new Subroutine[n];
+ queued = new boolean[n];
+ queue = new int[n];
+ top = 0;
+
+ // computes exception handlers for each instruction
+ for (int i = 0; i < m.tryCatchBlocks.size(); ++i) {
+ TryCatchBlockNode tcb = m.tryCatchBlocks.get(i);
+ int begin = insns.indexOf(tcb.start);
+ int end = insns.indexOf(tcb.end);
+ for (int j = begin; j < end; ++j) {
+ List<TryCatchBlockNode> insnHandlers = handlers[j];
+ if (insnHandlers == null) {
+ insnHandlers = new ArrayList<TryCatchBlockNode>();
+ handlers[j] = insnHandlers;
+ }
+ insnHandlers.add(tcb);
+ }
+ }
+
+ // computes the subroutine for each instruction:
+ Subroutine main = new Subroutine(null, m.maxLocals, null);
+ List<AbstractInsnNode> subroutineCalls = new ArrayList<AbstractInsnNode>();
+ Map<LabelNode, Subroutine> subroutineHeads = new HashMap<LabelNode, Subroutine>();
+ findSubroutine(0, main, subroutineCalls);
+ while (!subroutineCalls.isEmpty()) {
+ JumpInsnNode jsr = (JumpInsnNode) subroutineCalls.remove(0);
+ Subroutine sub = subroutineHeads.get(jsr.label);
+ if (sub == null) {
+ sub = new Subroutine(jsr.label, m.maxLocals, jsr);
+ subroutineHeads.put(jsr.label, sub);
+ findSubroutine(insns.indexOf(jsr.label), sub, subroutineCalls);
+ } else {
+ sub.callers.add(jsr);
+ }
+ }
+ for (int i = 0; i < n; ++i) {
+ if (subroutines[i] != null && subroutines[i].start == null) {
+ subroutines[i] = null;
+ }
+ }
+
+ // initializes the data structures for the control flow analysis
+ Frame<V> current = newFrame(m.maxLocals, m.maxStack);
+ Frame<V> handler = newFrame(m.maxLocals, m.maxStack);
+ current.setReturn(interpreter.newValue(Type.getReturnType(m.desc)));
+ Type[] args = Type.getArgumentTypes(m.desc);
+ int local = 0;
+ if ((m.access & ACC_STATIC) == 0) {
+ Type ctype = Type.getObjectType(owner);
+ current.setLocal(local++, interpreter.newValue(ctype));
+ }
+ for (int i = 0; i < args.length; ++i) {
+ current.setLocal(local++, interpreter.newValue(args[i]));
+ if (args[i].getSize() == 2) {
+ current.setLocal(local++, interpreter.newValue(null));
+ }
+ }
+ while (local < m.maxLocals) {
+ current.setLocal(local++, interpreter.newValue(null));
+ }
+ merge(0, current, null);
+
+ init(owner, m);
+
+ // control flow analysis
+ while (top > 0) {
+ int insn = queue[--top];
+ Frame<V> f = frames[insn];
+ Subroutine subroutine = subroutines[insn];
+ queued[insn] = false;
+
+ AbstractInsnNode insnNode = null;
+ try {
+ insnNode = m.instructions.get(insn);
+ int insnOpcode = insnNode.getOpcode();
+ int insnType = insnNode.getType();
+
+ if (insnType == AbstractInsnNode.LABEL
+ || insnType == AbstractInsnNode.LINE
+ || insnType == AbstractInsnNode.FRAME)
+ {
+ merge(insn + 1, f, subroutine);
+ newControlFlowEdge(insn, insn + 1);
+ } else {
+ current.init(f).execute(insnNode, interpreter);
+ subroutine = subroutine == null ? null : subroutine.copy();
+
+ if (insnNode instanceof JumpInsnNode) {
+ JumpInsnNode j = (JumpInsnNode) insnNode;
+ if (insnOpcode != GOTO && insnOpcode != JSR) {
+ merge(insn + 1, current, subroutine);
+ newControlFlowEdge(insn, insn + 1);
+ }
+ int jump = insns.indexOf(j.label);
+ if (insnOpcode == JSR) {
+ merge(jump, current, new Subroutine(j.label,
+ m.maxLocals,
+ j));
+ } else {
+ merge(jump, current, subroutine);
+ }
+ newControlFlowEdge(insn, jump);
+ } else if (insnNode instanceof LookupSwitchInsnNode) {
+ LookupSwitchInsnNode lsi = (LookupSwitchInsnNode) insnNode;
+ int jump = insns.indexOf(lsi.dflt);
+ merge(jump, current, subroutine);
+ newControlFlowEdge(insn, jump);
+ for (int j = 0; j < lsi.labels.size(); ++j) {
+ LabelNode label = lsi.labels.get(j);
+ jump = insns.indexOf(label);
+ merge(jump, current, subroutine);
+ newControlFlowEdge(insn, jump);
+ }
+ } else if (insnNode instanceof TableSwitchInsnNode) {
+ TableSwitchInsnNode tsi = (TableSwitchInsnNode) insnNode;
+ int jump = insns.indexOf(tsi.dflt);
+ merge(jump, current, subroutine);
+ newControlFlowEdge(insn, jump);
+ for (int j = 0; j < tsi.labels.size(); ++j) {
+ LabelNode label = tsi.labels.get(j);
+ jump = insns.indexOf(label);
+ merge(jump, current, subroutine);
+ newControlFlowEdge(insn, jump);
+ }
+ } else if (insnOpcode == RET) {
+ if (subroutine == null) {
+ throw new AnalyzerException(insnNode, "RET instruction outside of a sub routine");
+ }
+ for (int i = 0; i < subroutine.callers.size(); ++i) {
+ JumpInsnNode caller = subroutine.callers.get(i);
+ int call = insns.indexOf(caller);
+ if (frames[call] != null) {
+ merge(call + 1,
+ frames[call],
+ current,
+ subroutines[call],
+ subroutine.access);
+ newControlFlowEdge(insn, call + 1);
+ }
+ }
+ } else if (insnOpcode != ATHROW
+ && (insnOpcode < IRETURN || insnOpcode > RETURN))
+ {
+ if (subroutine != null) {
+ if (insnNode instanceof VarInsnNode) {
+ int var = ((VarInsnNode) insnNode).var;
+ subroutine.access[var] = true;
+ if (insnOpcode == LLOAD || insnOpcode == DLOAD
+ || insnOpcode == LSTORE
+ || insnOpcode == DSTORE)
+ {
+ subroutine.access[var + 1] = true;
+ }
+ } else if (insnNode instanceof IincInsnNode) {
+ int var = ((IincInsnNode) insnNode).var;
+ subroutine.access[var] = true;
+ }
+ }
+ merge(insn + 1, current, subroutine);
+ newControlFlowEdge(insn, insn + 1);
+ }
+ }
+
+ List<TryCatchBlockNode> insnHandlers = handlers[insn];
+ if (insnHandlers != null) {
+ for (int i = 0; i < insnHandlers.size(); ++i) {
+ TryCatchBlockNode tcb = insnHandlers.get(i);
+ Type type;
+ if (tcb.type == null) {
+ type = Type.getObjectType("java/lang/Throwable");
+ } else {
+ type = Type.getObjectType(tcb.type);
+ }
+ int jump = insns.indexOf(tcb.handler);
+ if (newControlFlowExceptionEdge(insn, tcb)) {
+ handler.init(f);
+ handler.clearStack();
+ handler.push(interpreter.newValue(type));
+ merge(jump, handler, subroutine);
+ }
+ }
+ }
+ } catch (AnalyzerException e) {
+ throw new AnalyzerException(e.node, "Error at instruction " + insn
+ + ": " + e.getMessage(), e);
+ } catch (Exception e) {
+ throw new AnalyzerException(insnNode, "Error at instruction " + insn
+ + ": " + e.getMessage(), e);
+ }
+ }
+
+ return frames;
+ }
+
+ private void findSubroutine(int insn, final Subroutine sub, final List<AbstractInsnNode> calls)
+ throws AnalyzerException
+ {
+ while (true) {
+ if (insn < 0 || insn >= n) {
+ throw new AnalyzerException(null, "Execution can fall off end of the code");
+ }
+ if (subroutines[insn] != null) {
+ return;
+ }
+ subroutines[insn] = sub.copy();
+ AbstractInsnNode node = insns.get(insn);
+
+ // calls findSubroutine recursively on normal successors
+ if (node instanceof JumpInsnNode) {
+ if (node.getOpcode() == JSR) {
+ // do not follow a JSR, it leads to another subroutine!
+ calls.add(node);
+ } else {
+ JumpInsnNode jnode = (JumpInsnNode) node;
+ findSubroutine(insns.indexOf(jnode.label), sub, calls);
+ }
+ } else if (node instanceof TableSwitchInsnNode) {
+ TableSwitchInsnNode tsnode = (TableSwitchInsnNode) node;
+ findSubroutine(insns.indexOf(tsnode.dflt), sub, calls);
+ for (int i = tsnode.labels.size() - 1; i >= 0; --i) {
+ LabelNode l = tsnode.labels.get(i);
+ findSubroutine(insns.indexOf(l), sub, calls);
+ }
+ } else if (node instanceof LookupSwitchInsnNode) {
+ LookupSwitchInsnNode lsnode = (LookupSwitchInsnNode) node;
+ findSubroutine(insns.indexOf(lsnode.dflt), sub, calls);
+ for (int i = lsnode.labels.size() - 1; i >= 0; --i) {
+ LabelNode l = lsnode.labels.get(i);
+ findSubroutine(insns.indexOf(l), sub, calls);
+ }
+ }
+
+ // calls findSubroutine recursively on exception handler successors
+ List<TryCatchBlockNode> insnHandlers = handlers[insn];
+ if (insnHandlers != null) {
+ for (int i = 0; i < insnHandlers.size(); ++i) {
+ TryCatchBlockNode tcb = insnHandlers.get(i);
+ findSubroutine(insns.indexOf(tcb.handler), sub, calls);
+ }
+ }
+
+ // if insn does not falls through to the next instruction, return.
+ switch (node.getOpcode()) {
+ case GOTO:
+ case RET:
+ case TABLESWITCH:
+ case LOOKUPSWITCH:
+ case IRETURN:
+ case LRETURN:
+ case FRETURN:
+ case DRETURN:
+ case ARETURN:
+ case RETURN:
+ case ATHROW:
+ return;
+ }
+ insn++;
+ }
+ }
+
+ /**
+ * Returns the symbolic stack frame for each instruction of the last
+ * recently analyzed method.
+ *
+ * @return the symbolic state of the execution stack frame at each bytecode
+ * instruction of the method. The size of the returned array is
+ * equal to the number of instructions (and labels) of the method. A
+ * given frame is <tt>null</tt> if the corresponding instruction
+ * cannot be reached, or if an error occured during the analysis of
+ * the method.
+ */
+ public Frame<V>[] getFrames() {
+ return frames;
+ }
+
+ /**
+ * Returns the exception handlers for the given instruction.
+ *
+ * @param insn the index of an instruction of the last recently analyzed
+ * method.
+ * @return a list of {@link TryCatchBlockNode} objects.
+ */
+ public List<TryCatchBlockNode> getHandlers(final int insn) {
+ return handlers[insn];
+ }
+
+ /**
+ * Initializes this analyzer. This method is called just before the
+ * execution of control flow analysis loop in #analyze. The default
+ * implementation of this method does nothing.
+ *
+ * @param owner the internal name of the class to which the method belongs.
+ * @param m the method to be analyzed.
+ * @throws AnalyzerException if a problem occurs.
+ */
+ protected void init(String owner, MethodNode m) throws AnalyzerException {
+ }
+
+ /**
+ * Constructs a new frame with the given size.
+ *
+ * @param nLocals the maximum number of local variables of the frame.
+ * @param nStack the maximum stack size of the frame.
+ * @return the created frame.
+ */
+ protected Frame<V> newFrame(final int nLocals, final int nStack) {
+ return new Frame<V>(nLocals, nStack);
+ }
+
+ /**
+ * Constructs a new frame that is identical to the given frame.
+ *
+ * @param src a frame.
+ * @return the created frame.
+ */
+ protected Frame<V> newFrame(final Frame<? extends V> src) {
+ return new Frame<V>(src);
+ }
+
+ /**
+ * Creates a control flow graph edge. The default implementation of this
+ * method does nothing. It can be overriden in order to construct the
+ * control flow graph of a method (this method is called by the
+ * {@link #analyze analyze} method during its visit of the method's code).
+ *
+ * @param insn an instruction index.
+ * @param successor index of a successor instruction.
+ */
+ protected void newControlFlowEdge(final int insn, final int successor) {
+ }
+
+ /**
+ * Creates a control flow graph edge corresponding to an exception handler.
+ * The default implementation of this method does nothing. It can be
+ * overridden in order to construct the control flow graph of a method (this
+ * method is called by the {@link #analyze analyze} method during its visit
+ * of the method's code).
+ *
+ * @param insn an instruction index.
+ * @param successor index of a successor instruction.
+ * @return true if this edge must be considered in the data flow analysis
+ * performed by this analyzer, or false otherwise. The default
+ * implementation of this method always returns true.
+ */
+ protected boolean newControlFlowExceptionEdge(
+ final int insn,
+ final int successor)
+ {
+ return true;
+ }
+
+ /**
+ * Creates a control flow graph edge corresponding to an exception handler.
+ * The default implementation of this method delegates to
+ * {@link #newControlFlowExceptionEdge(int, int)
+ * newControlFlowExceptionEdge(int, int)}. It can be overridden in order to
+ * construct the control flow graph of a method (this method is called by
+ * the {@link #analyze analyze} method during its visit of the method's
+ * code).
+ *
+ * @param insn an instruction index.
+ * @param tcb TryCatchBlockNode corresponding to this edge.
+ * @return true if this edge must be considered in the data flow analysis
+ * performed by this analyzer, or false otherwise. The default
+ * implementation of this method delegates to
+ * {@link #newControlFlowExceptionEdge(int, int)
+ * newControlFlowExceptionEdge(int, int)}.
+ */
+ protected boolean newControlFlowExceptionEdge(
+ final int insn,
+ final TryCatchBlockNode tcb)
+ {
+ return newControlFlowExceptionEdge(insn, insns.indexOf(tcb.handler));
+ }
+
+ // -------------------------------------------------------------------------
+
+ private void merge(
+ final int insn,
+ final Frame<V> frame,
+ final Subroutine subroutine) throws AnalyzerException
+ {
+ Frame<V> oldFrame = frames[insn];
+ Subroutine oldSubroutine = subroutines[insn];
+ boolean changes;
+
+ if (oldFrame == null) {
+ frames[insn] = newFrame(frame);
+ changes = true;
+ } else {
+ changes = oldFrame.merge(frame, interpreter);
+ }
+
+ if (oldSubroutine == null) {
+ if (subroutine != null) {
+ subroutines[insn] = subroutine.copy();
+ changes = true;
+ }
+ } else {
+ if (subroutine != null) {
+ changes |= oldSubroutine.merge(subroutine);
+ }
+ }
+ if (changes && !queued[insn]) {
+ queued[insn] = true;
+ queue[top++] = insn;
+ }
+ }
+
+ private void merge(
+ final int insn,
+ final Frame<V> beforeJSR,
+ final Frame<V> afterRET,
+ final Subroutine subroutineBeforeJSR,
+ final boolean[] access) throws AnalyzerException
+ {
+ Frame<V> oldFrame = frames[insn];
+ Subroutine oldSubroutine = subroutines[insn];
+ boolean changes;
+
+ afterRET.merge(beforeJSR, access);
+
+ if (oldFrame == null) {
+ frames[insn] = newFrame(afterRET);
+ changes = true;
+ } else {
+ changes = oldFrame.merge(afterRET, interpreter);
+ }
+
+ if (oldSubroutine != null && subroutineBeforeJSR != null) {
+ changes |= oldSubroutine.merge(subroutineBeforeJSR);
+ }
+ if (changes && !queued[insn]) {
+ queued[insn] = true;
+ queue[top++] = insn;
+ }
+ }
+}
diff --git a/src/asm/scala/tools/asm/tree/analysis/AnalyzerException.java b/src/asm/scala/tools/asm/tree/analysis/AnalyzerException.java
new file mode 100644
index 0000000..a89bb35
--- /dev/null
+++ b/src/asm/scala/tools/asm/tree/analysis/AnalyzerException.java
@@ -0,0 +1,64 @@
+/***
+ * ASM: a very small and fast Java bytecode manipulation framework
+ * Copyright (c) 2000-2011 INRIA, France Telecom
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ * 3. Neither the name of the copyright holders nor the names of its
+ * contributors may be used to endorse or promote products derived from
+ * this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+ * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
+ * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+ * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+ * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
+ * THE POSSIBILITY OF SUCH DAMAGE.
+ */
+package scala.tools.asm.tree.analysis;
+
+import scala.tools.asm.tree.AbstractInsnNode;
+
+/**
+ * Thrown if a problem occurs during the analysis of a method.
+ *
+ * @author Bing Ran
+ * @author Eric Bruneton
+ */
+public class AnalyzerException extends Exception {
+
+ public final AbstractInsnNode node;
+
+ public AnalyzerException(final AbstractInsnNode node, final String msg) {
+ super(msg);
+ this.node = node;
+ }
+
+ public AnalyzerException(final AbstractInsnNode node, final String msg, final Throwable exception) {
+ super(msg, exception);
+ this.node = node;
+ }
+
+ public AnalyzerException(
+ final AbstractInsnNode node,
+ final String msg,
+ final Object expected,
+ final Value encountered)
+ {
+ super((msg == null ? "Expected " : msg + ": expected ") + expected
+ + ", but found " + encountered);
+ this.node = node;
+ }
+}
diff --git a/src/asm/scala/tools/asm/tree/analysis/BasicInterpreter.java b/src/asm/scala/tools/asm/tree/analysis/BasicInterpreter.java
new file mode 100644
index 0000000..64ddcc1
--- /dev/null
+++ b/src/asm/scala/tools/asm/tree/analysis/BasicInterpreter.java
@@ -0,0 +1,365 @@
+/***
+ * ASM: a very small and fast Java bytecode manipulation framework
+ * Copyright (c) 2000-2011 INRIA, France Telecom
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ * 3. Neither the name of the copyright holders nor the names of its
+ * contributors may be used to endorse or promote products derived from
+ * this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+ * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
+ * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+ * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+ * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
+ * THE POSSIBILITY OF SUCH DAMAGE.
+ */
+package scala.tools.asm.tree.analysis;
+
+import java.util.List;
+
+import scala.tools.asm.Handle;
+import scala.tools.asm.Opcodes;
+import scala.tools.asm.Type;
+import scala.tools.asm.tree.AbstractInsnNode;
+import scala.tools.asm.tree.FieldInsnNode;
+import scala.tools.asm.tree.IntInsnNode;
+import scala.tools.asm.tree.InvokeDynamicInsnNode;
+import scala.tools.asm.tree.LdcInsnNode;
+import scala.tools.asm.tree.MethodInsnNode;
+import scala.tools.asm.tree.MultiANewArrayInsnNode;
+import scala.tools.asm.tree.TypeInsnNode;
+
+/**
+ * An {@link Interpreter} for {@link BasicValue} values.
+ *
+ * @author Eric Bruneton
+ * @author Bing Ran
+ */
+public class BasicInterpreter extends Interpreter<BasicValue> implements
+ Opcodes
+{
+
+ public BasicInterpreter() {
+ super(ASM4);
+ }
+
+ protected BasicInterpreter(final int api) {
+ super(api);
+ }
+
+ @Override
+ public BasicValue newValue(final Type type) {
+ if (type == null) {
+ return BasicValue.UNINITIALIZED_VALUE;
+ }
+ switch (type.getSort()) {
+ case Type.VOID:
+ return null;
+ case Type.BOOLEAN:
+ case Type.CHAR:
+ case Type.BYTE:
+ case Type.SHORT:
+ case Type.INT:
+ return BasicValue.INT_VALUE;
+ case Type.FLOAT:
+ return BasicValue.FLOAT_VALUE;
+ case Type.LONG:
+ return BasicValue.LONG_VALUE;
+ case Type.DOUBLE:
+ return BasicValue.DOUBLE_VALUE;
+ case Type.ARRAY:
+ case Type.OBJECT:
+ return BasicValue.REFERENCE_VALUE;
+ default:
+ throw new Error("Internal error");
+ }
+ }
+
+ @Override
+ public BasicValue newOperation(final AbstractInsnNode insn)
+ throws AnalyzerException
+ {
+ switch (insn.getOpcode()) {
+ case ACONST_NULL:
+ return newValue(Type.getObjectType("null"));
+ case ICONST_M1:
+ case ICONST_0:
+ case ICONST_1:
+ case ICONST_2:
+ case ICONST_3:
+ case ICONST_4:
+ case ICONST_5:
+ return BasicValue.INT_VALUE;
+ case LCONST_0:
+ case LCONST_1:
+ return BasicValue.LONG_VALUE;
+ case FCONST_0:
+ case FCONST_1:
+ case FCONST_2:
+ return BasicValue.FLOAT_VALUE;
+ case DCONST_0:
+ case DCONST_1:
+ return BasicValue.DOUBLE_VALUE;
+ case BIPUSH:
+ case SIPUSH:
+ return BasicValue.INT_VALUE;
+ case LDC:
+ Object cst = ((LdcInsnNode) insn).cst;
+ if (cst instanceof Integer) {
+ return BasicValue.INT_VALUE;
+ } else if (cst instanceof Float) {
+ return BasicValue.FLOAT_VALUE;
+ } else if (cst instanceof Long) {
+ return BasicValue.LONG_VALUE;
+ } else if (cst instanceof Double) {
+ return BasicValue.DOUBLE_VALUE;
+ } else if (cst instanceof String) {
+ return newValue(Type.getObjectType("java/lang/String"));
+ } else if (cst instanceof Type) {
+ int sort = ((Type) cst).getSort();
+ if (sort == Type.OBJECT || sort == Type.ARRAY) {
+ return newValue(Type.getObjectType("java/lang/Class"));
+ } else if (sort == Type.METHOD) {
+ return newValue(Type.getObjectType("java/lang/invoke/MethodType"));
+ } else {
+ throw new IllegalArgumentException("Illegal LDC constant " + cst);
+ }
+ } else if (cst instanceof Handle) {
+ return newValue(Type.getObjectType("java/lang/invoke/MethodHandle"));
+ } else {
+ throw new IllegalArgumentException("Illegal LDC constant " + cst);
+ }
+ case JSR:
+ return BasicValue.RETURNADDRESS_VALUE;
+ case GETSTATIC:
+ return newValue(Type.getType(((FieldInsnNode) insn).desc));
+ case NEW:
+ return newValue(Type.getObjectType(((TypeInsnNode) insn).desc));
+ default:
+ throw new Error("Internal error.");
+ }
+ }
+
+ @Override
+ public BasicValue copyOperation(final AbstractInsnNode insn, final BasicValue value)
+ throws AnalyzerException
+ {
+ return value;
+ }
+
+ @Override
+ public BasicValue unaryOperation(final AbstractInsnNode insn, final BasicValue value)
+ throws AnalyzerException
+ {
+ switch (insn.getOpcode()) {
+ case INEG:
+ case IINC:
+ case L2I:
+ case F2I:
+ case D2I:
+ case I2B:
+ case I2C:
+ case I2S:
+ return BasicValue.INT_VALUE;
+ case FNEG:
+ case I2F:
+ case L2F:
+ case D2F:
+ return BasicValue.FLOAT_VALUE;
+ case LNEG:
+ case I2L:
+ case F2L:
+ case D2L:
+ return BasicValue.LONG_VALUE;
+ case DNEG:
+ case I2D:
+ case L2D:
+ case F2D:
+ return BasicValue.DOUBLE_VALUE;
+ case IFEQ:
+ case IFNE:
+ case IFLT:
+ case IFGE:
+ case IFGT:
+ case IFLE:
+ case TABLESWITCH:
+ case LOOKUPSWITCH:
+ case IRETURN:
+ case LRETURN:
+ case FRETURN:
+ case DRETURN:
+ case ARETURN:
+ case PUTSTATIC:
+ return null;
+ case GETFIELD:
+ return newValue(Type.getType(((FieldInsnNode) insn).desc));
+ case NEWARRAY:
+ switch (((IntInsnNode) insn).operand) {
+ case T_BOOLEAN:
+ return newValue(Type.getType("[Z"));
+ case T_CHAR:
+ return newValue(Type.getType("[C"));
+ case T_BYTE:
+ return newValue(Type.getType("[B"));
+ case T_SHORT:
+ return newValue(Type.getType("[S"));
+ case T_INT:
+ return newValue(Type.getType("[I"));
+ case T_FLOAT:
+ return newValue(Type.getType("[F"));
+ case T_DOUBLE:
+ return newValue(Type.getType("[D"));
+ case T_LONG:
+ return newValue(Type.getType("[J"));
+ default:
+ throw new AnalyzerException(insn, "Invalid array type");
+ }
+ case ANEWARRAY:
+ String desc = ((TypeInsnNode) insn).desc;
+ return newValue(Type.getType("[" + Type.getObjectType(desc)));
+ case ARRAYLENGTH:
+ return BasicValue.INT_VALUE;
+ case ATHROW:
+ return null;
+ case CHECKCAST:
+ desc = ((TypeInsnNode) insn).desc;
+ return newValue(Type.getObjectType(desc));
+ case INSTANCEOF:
+ return BasicValue.INT_VALUE;
+ case MONITORENTER:
+ case MONITOREXIT:
+ case IFNULL:
+ case IFNONNULL:
+ return null;
+ default:
+ throw new Error("Internal error.");
+ }
+ }
+
+ @Override
+ public BasicValue binaryOperation(
+ final AbstractInsnNode insn,
+ final BasicValue value1,
+ final BasicValue value2) throws AnalyzerException
+ {
+ switch (insn.getOpcode()) {
+ case IALOAD:
+ case BALOAD:
+ case CALOAD:
+ case SALOAD:
+ case IADD:
+ case ISUB:
+ case IMUL:
+ case IDIV:
+ case IREM:
+ case ISHL:
+ case ISHR:
+ case IUSHR:
+ case IAND:
+ case IOR:
+ case IXOR:
+ return BasicValue.INT_VALUE;
+ case FALOAD:
+ case FADD:
+ case FSUB:
+ case FMUL:
+ case FDIV:
+ case FREM:
+ return BasicValue.FLOAT_VALUE;
+ case LALOAD:
+ case LADD:
+ case LSUB:
+ case LMUL:
+ case LDIV:
+ case LREM:
+ case LSHL:
+ case LSHR:
+ case LUSHR:
+ case LAND:
+ case LOR:
+ case LXOR:
+ return BasicValue.LONG_VALUE;
+ case DALOAD:
+ case DADD:
+ case DSUB:
+ case DMUL:
+ case DDIV:
+ case DREM:
+ return BasicValue.DOUBLE_VALUE;
+ case AALOAD:
+ return BasicValue.REFERENCE_VALUE;
+ case LCMP:
+ case FCMPL:
+ case FCMPG:
+ case DCMPL:
+ case DCMPG:
+ return BasicValue.INT_VALUE;
+ case IF_ICMPEQ:
+ case IF_ICMPNE:
+ case IF_ICMPLT:
+ case IF_ICMPGE:
+ case IF_ICMPGT:
+ case IF_ICMPLE:
+ case IF_ACMPEQ:
+ case IF_ACMPNE:
+ case PUTFIELD:
+ return null;
+ default:
+ throw new Error("Internal error.");
+ }
+ }
+
+ @Override
+ public BasicValue ternaryOperation(
+ final AbstractInsnNode insn,
+ final BasicValue value1,
+ final BasicValue value2,
+ final BasicValue value3) throws AnalyzerException
+ {
+ return null;
+ }
+
+ @Override
+ public BasicValue naryOperation(final AbstractInsnNode insn, final List<? extends BasicValue> values)
+ throws AnalyzerException
+ {
+ int opcode = insn.getOpcode();
+ if (opcode == MULTIANEWARRAY) {
+ return newValue(Type.getType(((MultiANewArrayInsnNode) insn).desc));
+ } else if (opcode == INVOKEDYNAMIC){
+ return newValue(Type.getReturnType(((InvokeDynamicInsnNode) insn).desc));
+ } else {
+ return newValue(Type.getReturnType(((MethodInsnNode) insn).desc));
+ }
+ }
+
+ @Override
+ public void returnOperation(
+ final AbstractInsnNode insn,
+ final BasicValue value,
+ final BasicValue expected) throws AnalyzerException
+ {
+ }
+
+ @Override
+ public BasicValue merge(final BasicValue v, final BasicValue w) {
+ if (!v.equals(w)) {
+ return BasicValue.UNINITIALIZED_VALUE;
+ }
+ return v;
+ }
+}
diff --git a/src/asm/scala/tools/asm/tree/analysis/BasicValue.java b/src/asm/scala/tools/asm/tree/analysis/BasicValue.java
new file mode 100644
index 0000000..6c449db
--- /dev/null
+++ b/src/asm/scala/tools/asm/tree/analysis/BasicValue.java
@@ -0,0 +1,108 @@
+/***
+ * ASM: a very small and fast Java bytecode manipulation framework
+ * Copyright (c) 2000-2011 INRIA, France Telecom
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ * 3. Neither the name of the copyright holders nor the names of its
+ * contributors may be used to endorse or promote products derived from
+ * this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+ * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
+ * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+ * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+ * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
+ * THE POSSIBILITY OF SUCH DAMAGE.
+ */
+package scala.tools.asm.tree.analysis;
+
+import scala.tools.asm.Type;
+
+/**
+ * A {@link Value} that is represented by its type in a seven types type system.
+ * This type system distinguishes the UNINITIALZED, INT, FLOAT, LONG, DOUBLE,
+ * REFERENCE and RETURNADDRESS types.
+ *
+ * @author Eric Bruneton
+ */
+public class BasicValue implements Value {
+
+ public static final BasicValue UNINITIALIZED_VALUE = new BasicValue(null);
+
+ public static final BasicValue INT_VALUE = new BasicValue(Type.INT_TYPE);
+
+ public static final BasicValue FLOAT_VALUE = new BasicValue(Type.FLOAT_TYPE);
+
+ public static final BasicValue LONG_VALUE = new BasicValue(Type.LONG_TYPE);
+
+ public static final BasicValue DOUBLE_VALUE = new BasicValue(Type.DOUBLE_TYPE);
+
+ public static final BasicValue REFERENCE_VALUE = new BasicValue(Type.getObjectType("java/lang/Object"));
+
+ public static final BasicValue RETURNADDRESS_VALUE = new BasicValue(Type.VOID_TYPE);
+
+ private final Type type;
+
+ public BasicValue(final Type type) {
+ this.type = type;
+ }
+
+ public Type getType() {
+ return type;
+ }
+
+ public int getSize() {
+ return type == Type.LONG_TYPE || type == Type.DOUBLE_TYPE ? 2 : 1;
+ }
+
+ public boolean isReference() {
+ return type != null
+ && (type.getSort() == Type.OBJECT || type.getSort() == Type.ARRAY);
+ }
+
+ @Override
+ public boolean equals(final Object value) {
+ if (value == this) {
+ return true;
+ } else if (value instanceof BasicValue) {
+ if (type == null) {
+ return ((BasicValue) value).type == null;
+ } else {
+ return type.equals(((BasicValue) value).type);
+ }
+ } else {
+ return false;
+ }
+ }
+
+ @Override
+ public int hashCode() {
+ return type == null ? 0 : type.hashCode();
+ }
+
+ @Override
+ public String toString() {
+ if (this == UNINITIALIZED_VALUE) {
+ return ".";
+ } else if (this == RETURNADDRESS_VALUE) {
+ return "A";
+ } else if (this == REFERENCE_VALUE) {
+ return "R";
+ } else {
+ return type.getDescriptor();
+ }
+ }
+}
diff --git a/src/asm/scala/tools/asm/tree/analysis/BasicVerifier.java b/src/asm/scala/tools/asm/tree/analysis/BasicVerifier.java
new file mode 100644
index 0000000..9297dd9
--- /dev/null
+++ b/src/asm/scala/tools/asm/tree/analysis/BasicVerifier.java
@@ -0,0 +1,459 @@
+/***
+ * ASM: a very small and fast Java bytecode manipulation framework
+ * Copyright (c) 2000-2011 INRIA, France Telecom
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ * 3. Neither the name of the copyright holders nor the names of its
+ * contributors may be used to endorse or promote products derived from
+ * this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+ * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
+ * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+ * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+ * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
+ * THE POSSIBILITY OF SUCH DAMAGE.
+ */
+package scala.tools.asm.tree.analysis;
+
+import java.util.List;
+
+import scala.tools.asm.Type;
+import scala.tools.asm.tree.AbstractInsnNode;
+import scala.tools.asm.tree.FieldInsnNode;
+import scala.tools.asm.tree.InvokeDynamicInsnNode;
+import scala.tools.asm.tree.MethodInsnNode;
+
+/**
+ * An extended {@link BasicInterpreter} that checks that bytecode instructions
+ * are correctly used.
+ *
+ * @author Eric Bruneton
+ * @author Bing Ran
+ */
+public class BasicVerifier extends BasicInterpreter {
+
+ public BasicVerifier() {
+ super(ASM4);
+ }
+
+ protected BasicVerifier(final int api) {
+ super(api);
+ }
+
+ @Override
+ public BasicValue copyOperation(final AbstractInsnNode insn, final BasicValue value)
+ throws AnalyzerException
+ {
+ Value expected;
+ switch (insn.getOpcode()) {
+ case ILOAD:
+ case ISTORE:
+ expected = BasicValue.INT_VALUE;
+ break;
+ case FLOAD:
+ case FSTORE:
+ expected = BasicValue.FLOAT_VALUE;
+ break;
+ case LLOAD:
+ case LSTORE:
+ expected = BasicValue.LONG_VALUE;
+ break;
+ case DLOAD:
+ case DSTORE:
+ expected = BasicValue.DOUBLE_VALUE;
+ break;
+ case ALOAD:
+ if (!value.isReference()) {
+ throw new AnalyzerException(insn,
+ null,
+ "an object reference",
+ value);
+ }
+ return value;
+ case ASTORE:
+ if (!value.isReference()
+ && !BasicValue.RETURNADDRESS_VALUE.equals(value))
+ {
+ throw new AnalyzerException(insn,
+ null,
+ "an object reference or a return address",
+ value);
+ }
+ return value;
+ default:
+ return value;
+ }
+ if (!expected.equals(value)) {
+ throw new AnalyzerException(insn, null, expected, value);
+ }
+ return value;
+ }
+
+ @Override
+ public BasicValue unaryOperation(final AbstractInsnNode insn, final BasicValue value)
+ throws AnalyzerException
+ {
+ BasicValue expected;
+ switch (insn.getOpcode()) {
+ case INEG:
+ case IINC:
+ case I2F:
+ case I2L:
+ case I2D:
+ case I2B:
+ case I2C:
+ case I2S:
+ case IFEQ:
+ case IFNE:
+ case IFLT:
+ case IFGE:
+ case IFGT:
+ case IFLE:
+ case TABLESWITCH:
+ case LOOKUPSWITCH:
+ case IRETURN:
+ case NEWARRAY:
+ case ANEWARRAY:
+ expected = BasicValue.INT_VALUE;
+ break;
+ case FNEG:
+ case F2I:
+ case F2L:
+ case F2D:
+ case FRETURN:
+ expected = BasicValue.FLOAT_VALUE;
+ break;
+ case LNEG:
+ case L2I:
+ case L2F:
+ case L2D:
+ case LRETURN:
+ expected = BasicValue.LONG_VALUE;
+ break;
+ case DNEG:
+ case D2I:
+ case D2F:
+ case D2L:
+ case DRETURN:
+ expected = BasicValue.DOUBLE_VALUE;
+ break;
+ case GETFIELD:
+ expected = newValue(Type.getObjectType(((FieldInsnNode) insn).owner));
+ break;
+ case CHECKCAST:
+ if (!value.isReference()) {
+ throw new AnalyzerException(insn,
+ null,
+ "an object reference",
+ value);
+ }
+ return super.unaryOperation(insn, value);
+ case ARRAYLENGTH:
+ if (!isArrayValue(value)) {
+ throw new AnalyzerException(insn,
+ null,
+ "an array reference",
+ value);
+ }
+ return super.unaryOperation(insn, value);
+ case ARETURN:
+ case ATHROW:
+ case INSTANCEOF:
+ case MONITORENTER:
+ case MONITOREXIT:
+ case IFNULL:
+ case IFNONNULL:
+ if (!value.isReference()) {
+ throw new AnalyzerException(insn,
+ null,
+ "an object reference",
+ value);
+ }
+ return super.unaryOperation(insn, value);
+ case PUTSTATIC:
+ expected = newValue(Type.getType(((FieldInsnNode) insn).desc));
+ break;
+ default:
+ throw new Error("Internal error.");
+ }
+ if (!isSubTypeOf(value, expected)) {
+ throw new AnalyzerException(insn, null, expected, value);
+ }
+ return super.unaryOperation(insn, value);
+ }
+
+ @Override
+ public BasicValue binaryOperation(
+ final AbstractInsnNode insn,
+ final BasicValue value1,
+ final BasicValue value2) throws AnalyzerException
+ {
+ BasicValue expected1;
+ BasicValue expected2;
+ switch (insn.getOpcode()) {
+ case IALOAD:
+ expected1 = newValue(Type.getType("[I"));
+ expected2 = BasicValue.INT_VALUE;
+ break;
+ case BALOAD:
+ if (isSubTypeOf(value1, newValue(Type.getType("[Z")))) {
+ expected1 = newValue(Type.getType("[Z"));
+ } else {
+ expected1 = newValue(Type.getType("[B"));
+ }
+ expected2 = BasicValue.INT_VALUE;
+ break;
+ case CALOAD:
+ expected1 = newValue(Type.getType("[C"));
+ expected2 = BasicValue.INT_VALUE;
+ break;
+ case SALOAD:
+ expected1 = newValue(Type.getType("[S"));
+ expected2 = BasicValue.INT_VALUE;
+ break;
+ case LALOAD:
+ expected1 = newValue(Type.getType("[J"));
+ expected2 = BasicValue.INT_VALUE;
+ break;
+ case FALOAD:
+ expected1 = newValue(Type.getType("[F"));
+ expected2 = BasicValue.INT_VALUE;
+ break;
+ case DALOAD:
+ expected1 = newValue(Type.getType("[D"));
+ expected2 = BasicValue.INT_VALUE;
+ break;
+ case AALOAD:
+ expected1 = newValue(Type.getType("[Ljava/lang/Object;"));
+ expected2 = BasicValue.INT_VALUE;
+ break;
+ case IADD:
+ case ISUB:
+ case IMUL:
+ case IDIV:
+ case IREM:
+ case ISHL:
+ case ISHR:
+ case IUSHR:
+ case IAND:
+ case IOR:
+ case IXOR:
+ case IF_ICMPEQ:
+ case IF_ICMPNE:
+ case IF_ICMPLT:
+ case IF_ICMPGE:
+ case IF_ICMPGT:
+ case IF_ICMPLE:
+ expected1 = BasicValue.INT_VALUE;
+ expected2 = BasicValue.INT_VALUE;
+ break;
+ case FADD:
+ case FSUB:
+ case FMUL:
+ case FDIV:
+ case FREM:
+ case FCMPL:
+ case FCMPG:
+ expected1 = BasicValue.FLOAT_VALUE;
+ expected2 = BasicValue.FLOAT_VALUE;
+ break;
+ case LADD:
+ case LSUB:
+ case LMUL:
+ case LDIV:
+ case LREM:
+ case LAND:
+ case LOR:
+ case LXOR:
+ case LCMP:
+ expected1 = BasicValue.LONG_VALUE;
+ expected2 = BasicValue.LONG_VALUE;
+ break;
+ case LSHL:
+ case LSHR:
+ case LUSHR:
+ expected1 = BasicValue.LONG_VALUE;
+ expected2 = BasicValue.INT_VALUE;
+ break;
+ case DADD:
+ case DSUB:
+ case DMUL:
+ case DDIV:
+ case DREM:
+ case DCMPL:
+ case DCMPG:
+ expected1 = BasicValue.DOUBLE_VALUE;
+ expected2 = BasicValue.DOUBLE_VALUE;
+ break;
+ case IF_ACMPEQ:
+ case IF_ACMPNE:
+ expected1 = BasicValue.REFERENCE_VALUE;
+ expected2 = BasicValue.REFERENCE_VALUE;
+ break;
+ case PUTFIELD:
+ FieldInsnNode fin = (FieldInsnNode) insn;
+ expected1 = newValue(Type.getObjectType(fin.owner));
+ expected2 = newValue(Type.getType(fin.desc));
+ break;
+ default:
+ throw new Error("Internal error.");
+ }
+ if (!isSubTypeOf(value1, expected1)) {
+ throw new AnalyzerException(insn, "First argument", expected1, value1);
+ } else if (!isSubTypeOf(value2, expected2)) {
+ throw new AnalyzerException(insn, "Second argument", expected2, value2);
+ }
+ if (insn.getOpcode() == AALOAD) {
+ return getElementValue(value1);
+ } else {
+ return super.binaryOperation(insn, value1, value2);
+ }
+ }
+
+ @Override
+ public BasicValue ternaryOperation(
+ final AbstractInsnNode insn,
+ final BasicValue value1,
+ final BasicValue value2,
+ final BasicValue value3) throws AnalyzerException
+ {
+ BasicValue expected1;
+ BasicValue expected3;
+ switch (insn.getOpcode()) {
+ case IASTORE:
+ expected1 = newValue(Type.getType("[I"));
+ expected3 = BasicValue.INT_VALUE;
+ break;
+ case BASTORE:
+ if (isSubTypeOf(value1, newValue(Type.getType("[Z")))) {
+ expected1 = newValue(Type.getType("[Z"));
+ } else {
+ expected1 = newValue(Type.getType("[B"));
+ }
+ expected3 = BasicValue.INT_VALUE;
+ break;
+ case CASTORE:
+ expected1 = newValue(Type.getType("[C"));
+ expected3 = BasicValue.INT_VALUE;
+ break;
+ case SASTORE:
+ expected1 = newValue(Type.getType("[S"));
+ expected3 = BasicValue.INT_VALUE;
+ break;
+ case LASTORE:
+ expected1 = newValue(Type.getType("[J"));
+ expected3 = BasicValue.LONG_VALUE;
+ break;
+ case FASTORE:
+ expected1 = newValue(Type.getType("[F"));
+ expected3 = BasicValue.FLOAT_VALUE;
+ break;
+ case DASTORE:
+ expected1 = newValue(Type.getType("[D"));
+ expected3 = BasicValue.DOUBLE_VALUE;
+ break;
+ case AASTORE:
+ expected1 = value1;
+ expected3 = BasicValue.REFERENCE_VALUE;
+ break;
+ default:
+ throw new Error("Internal error.");
+ }
+ if (!isSubTypeOf(value1, expected1)) {
+ throw new AnalyzerException(insn, "First argument", "a " + expected1
+ + " array reference", value1);
+ } else if (!BasicValue.INT_VALUE.equals(value2)) {
+ throw new AnalyzerException(insn, "Second argument",
+ BasicValue.INT_VALUE,
+ value2);
+ } else if (!isSubTypeOf(value3, expected3)) {
+ throw new AnalyzerException(insn, "Third argument", expected3, value3);
+ }
+ return null;
+ }
+
+ @Override
+ public BasicValue naryOperation(final AbstractInsnNode insn, final List<? extends BasicValue> values)
+ throws AnalyzerException
+ {
+ int opcode = insn.getOpcode();
+ if (opcode == MULTIANEWARRAY) {
+ for (int i = 0; i < values.size(); ++i) {
+ if (!BasicValue.INT_VALUE.equals(values.get(i))) {
+ throw new AnalyzerException(insn,
+ null,
+ BasicValue.INT_VALUE,
+ values.get(i));
+ }
+ }
+ } else {
+ int i = 0;
+ int j = 0;
+ if (opcode != INVOKESTATIC && opcode != INVOKEDYNAMIC) {
+ Type owner = Type.getObjectType(((MethodInsnNode) insn).owner);
+ if (!isSubTypeOf(values.get(i++), newValue(owner))) {
+ throw new AnalyzerException(insn, "Method owner",
+ newValue(owner),
+ values.get(0));
+ }
+ }
+ String desc = (opcode == INVOKEDYNAMIC)?
+ ((InvokeDynamicInsnNode) insn).desc:
+ ((MethodInsnNode) insn).desc;
+ Type[] args = Type.getArgumentTypes(desc);
+ while (i < values.size()) {
+ BasicValue expected = newValue(args[j++]);
+ BasicValue encountered = values.get(i++);
+ if (!isSubTypeOf(encountered, expected)) {
+ throw new AnalyzerException(insn,
+ "Argument " + j,
+ expected,
+ encountered);
+ }
+ }
+ }
+ return super.naryOperation(insn, values);
+ }
+
+ @Override
+ public void returnOperation(
+ final AbstractInsnNode insn,
+ final BasicValue value,
+ final BasicValue expected) throws AnalyzerException
+ {
+ if (!isSubTypeOf(value, expected)) {
+ throw new AnalyzerException(insn,
+ "Incompatible return type",
+ expected,
+ value);
+ }
+ }
+
+ protected boolean isArrayValue(final BasicValue value) {
+ return value.isReference();
+ }
+
+ protected BasicValue getElementValue(final BasicValue objectArrayValue)
+ throws AnalyzerException
+ {
+ return BasicValue.REFERENCE_VALUE;
+ }
+
+ protected boolean isSubTypeOf(final BasicValue value, final BasicValue expected) {
+ return value.equals(expected);
+ }
+}
diff --git a/src/asm/scala/tools/asm/tree/analysis/Frame.java b/src/asm/scala/tools/asm/tree/analysis/Frame.java
new file mode 100644
index 0000000..fe19c2c
--- /dev/null
+++ b/src/asm/scala/tools/asm/tree/analysis/Frame.java
@@ -0,0 +1,709 @@
+/***
+ * ASM: a very small and fast Java bytecode manipulation framework
+ * Copyright (c) 2000-2011 INRIA, France Telecom
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ * 3. Neither the name of the copyright holders nor the names of its
+ * contributors may be used to endorse or promote products derived from
+ * this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+ * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
+ * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+ * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+ * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
+ * THE POSSIBILITY OF SUCH DAMAGE.
+ */
+package scala.tools.asm.tree.analysis;
+
+import java.util.ArrayList;
+import java.util.List;
+
+import scala.tools.asm.Opcodes;
+import scala.tools.asm.Type;
+import scala.tools.asm.tree.AbstractInsnNode;
+import scala.tools.asm.tree.IincInsnNode;
+import scala.tools.asm.tree.InvokeDynamicInsnNode;
+import scala.tools.asm.tree.MethodInsnNode;
+import scala.tools.asm.tree.MultiANewArrayInsnNode;
+import scala.tools.asm.tree.VarInsnNode;
+
+/**
+ * A symbolic execution stack frame. A stack frame contains a set of local
+ * variable slots, and an operand stack. Warning: long and double values are
+ * represented by <i>two</i> slots in local variables, and by <i>one</i> slot
+ * in the operand stack.
+ *
+ * @param <V> type of the Value used for the analysis.
+ *
+ * @author Eric Bruneton
+ */
+public class Frame<V extends Value> {
+
+ /**
+ * The expected return type of the analyzed method, or <tt>null</tt> if the
+ * method returns void.
+ */
+ private V returnValue;
+
+ /**
+ * The local variables and operand stack of this frame.
+ */
+ private V[] values;
+
+ /**
+ * The number of local variables of this frame.
+ */
+ private int locals;
+
+ /**
+ * The number of elements in the operand stack.
+ */
+ private int top;
+
+ /**
+ * Constructs a new frame with the given size.
+ *
+ * @param nLocals the maximum number of local variables of the frame.
+ * @param nStack the maximum stack size of the frame.
+ */
+ public Frame(final int nLocals, final int nStack) {
+ this.values = (V[]) new Value[nLocals + nStack];
+ this.locals = nLocals;
+ }
+
+ /**
+ * Constructs a new frame that is identical to the given frame.
+ *
+ * @param src a frame.
+ */
+ public Frame(final Frame<? extends V> src) {
+ this(src.locals, src.values.length - src.locals);
+ init(src);
+ }
+
+ /**
+ * Copies the state of the given frame into this frame.
+ *
+ * @param src a frame.
+ * @return this frame.
+ */
+ public Frame<V> init(final Frame<? extends V> src) {
+ returnValue = src.returnValue;
+ System.arraycopy(src.values, 0, values, 0, values.length);
+ top = src.top;
+ return this;
+ }
+
+ /**
+ * Sets the expected return type of the analyzed method.
+ *
+ * @param v the expected return type of the analyzed method, or
+ * <tt>null</tt> if the method returns void.
+ */
+ public void setReturn(final V v) {
+ returnValue = v;
+ }
+
+ /**
+ * Returns the maximum number of local variables of this frame.
+ *
+ * @return the maximum number of local variables of this frame.
+ */
+ public int getLocals() {
+ return locals;
+ }
+
+ /**
+ * Returns the value of the given local variable.
+ *
+ * @param i a local variable index.
+ * @return the value of the given local variable.
+ * @throws IndexOutOfBoundsException if the variable does not exist.
+ */
+ public V getLocal(final int i) throws IndexOutOfBoundsException {
+ if (i >= locals) {
+ throw new IndexOutOfBoundsException("Trying to access an inexistant local variable");
+ }
+ return values[i];
+ }
+
+ /**
+ * Sets the value of the given local variable.
+ *
+ * @param i a local variable index.
+ * @param value the new value of this local variable.
+ * @throws IndexOutOfBoundsException if the variable does not exist.
+ */
+ public void setLocal(final int i, final V value)
+ throws IndexOutOfBoundsException
+ {
+ if (i >= locals) {
+ throw new IndexOutOfBoundsException("Trying to access an inexistant local variable "+i);
+ }
+ values[i] = value;
+ }
+
+ /**
+ * Returns the number of values in the operand stack of this frame. Long and
+ * double values are treated as single values.
+ *
+ * @return the number of values in the operand stack of this frame.
+ */
+ public int getStackSize() {
+ return top;
+ }
+
+ /**
+ * Returns the value of the given operand stack slot.
+ *
+ * @param i the index of an operand stack slot.
+ * @return the value of the given operand stack slot.
+ * @throws IndexOutOfBoundsException if the operand stack slot does not
+ * exist.
+ */
+ public V getStack(final int i) throws IndexOutOfBoundsException {
+ return values[i + locals];
+ }
+
+ /**
+ * Clears the operand stack of this frame.
+ */
+ public void clearStack() {
+ top = 0;
+ }
+
+ /**
+ * Pops a value from the operand stack of this frame.
+ *
+ * @return the value that has been popped from the stack.
+ * @throws IndexOutOfBoundsException if the operand stack is empty.
+ */
+ public V pop() throws IndexOutOfBoundsException {
+ if (top == 0) {
+ throw new IndexOutOfBoundsException("Cannot pop operand off an empty stack.");
+ }
+ return values[--top + locals];
+ }
+
+ /**
+ * Pushes a value into the operand stack of this frame.
+ *
+ * @param value the value that must be pushed into the stack.
+ * @throws IndexOutOfBoundsException if the operand stack is full.
+ */
+ public void push(final V value) throws IndexOutOfBoundsException {
+ if (top + locals >= values.length) {
+ throw new IndexOutOfBoundsException("Insufficient maximum stack size.");
+ }
+ values[top++ + locals] = value;
+ }
+
+ public void execute(
+ final AbstractInsnNode insn,
+ final Interpreter<V> interpreter) throws AnalyzerException
+ {
+ V value1, value2, value3, value4;
+ List<V> values;
+ int var;
+
+ switch (insn.getOpcode()) {
+ case Opcodes.NOP:
+ break;
+ case Opcodes.ACONST_NULL:
+ case Opcodes.ICONST_M1:
+ case Opcodes.ICONST_0:
+ case Opcodes.ICONST_1:
+ case Opcodes.ICONST_2:
+ case Opcodes.ICONST_3:
+ case Opcodes.ICONST_4:
+ case Opcodes.ICONST_5:
+ case Opcodes.LCONST_0:
+ case Opcodes.LCONST_1:
+ case Opcodes.FCONST_0:
+ case Opcodes.FCONST_1:
+ case Opcodes.FCONST_2:
+ case Opcodes.DCONST_0:
+ case Opcodes.DCONST_1:
+ case Opcodes.BIPUSH:
+ case Opcodes.SIPUSH:
+ case Opcodes.LDC:
+ push(interpreter.newOperation(insn));
+ break;
+ case Opcodes.ILOAD:
+ case Opcodes.LLOAD:
+ case Opcodes.FLOAD:
+ case Opcodes.DLOAD:
+ case Opcodes.ALOAD:
+ push(interpreter.copyOperation(insn,
+ getLocal(((VarInsnNode) insn).var)));
+ break;
+ case Opcodes.IALOAD:
+ case Opcodes.LALOAD:
+ case Opcodes.FALOAD:
+ case Opcodes.DALOAD:
+ case Opcodes.AALOAD:
+ case Opcodes.BALOAD:
+ case Opcodes.CALOAD:
+ case Opcodes.SALOAD:
+ value2 = pop();
+ value1 = pop();
+ push(interpreter.binaryOperation(insn, value1, value2));
+ break;
+ case Opcodes.ISTORE:
+ case Opcodes.LSTORE:
+ case Opcodes.FSTORE:
+ case Opcodes.DSTORE:
+ case Opcodes.ASTORE:
+ value1 = interpreter.copyOperation(insn, pop());
+ var = ((VarInsnNode) insn).var;
+ setLocal(var, value1);
+ if (value1.getSize() == 2) {
+ setLocal(var + 1, interpreter.newValue(null));
+ }
+ if (var > 0) {
+ Value local = getLocal(var - 1);
+ if (local != null && local.getSize() == 2) {
+ setLocal(var - 1, interpreter.newValue(null));
+ }
+ }
+ break;
+ case Opcodes.IASTORE:
+ case Opcodes.LASTORE:
+ case Opcodes.FASTORE:
+ case Opcodes.DASTORE:
+ case Opcodes.AASTORE:
+ case Opcodes.BASTORE:
+ case Opcodes.CASTORE:
+ case Opcodes.SASTORE:
+ value3 = pop();
+ value2 = pop();
+ value1 = pop();
+ interpreter.ternaryOperation(insn, value1, value2, value3);
+ break;
+ case Opcodes.POP:
+ if (pop().getSize() == 2) {
+ throw new AnalyzerException(insn, "Illegal use of POP");
+ }
+ break;
+ case Opcodes.POP2:
+ if (pop().getSize() == 1) {
+ if (pop().getSize() != 1) {
+ throw new AnalyzerException(insn, "Illegal use of POP2");
+ }
+ }
+ break;
+ case Opcodes.DUP:
+ value1 = pop();
+ if (value1.getSize() != 1) {
+ throw new AnalyzerException(insn, "Illegal use of DUP");
+ }
+ push(value1);
+ push(interpreter.copyOperation(insn, value1));
+ break;
+ case Opcodes.DUP_X1:
+ value1 = pop();
+ value2 = pop();
+ if (value1.getSize() != 1 || value2.getSize() != 1) {
+ throw new AnalyzerException(insn, "Illegal use of DUP_X1");
+ }
+ push(interpreter.copyOperation(insn, value1));
+ push(value2);
+ push(value1);
+ break;
+ case Opcodes.DUP_X2:
+ value1 = pop();
+ if (value1.getSize() == 1) {
+ value2 = pop();
+ if (value2.getSize() == 1) {
+ value3 = pop();
+ if (value3.getSize() == 1) {
+ push(interpreter.copyOperation(insn, value1));
+ push(value3);
+ push(value2);
+ push(value1);
+ break;
+ }
+ } else {
+ push(interpreter.copyOperation(insn, value1));
+ push(value2);
+ push(value1);
+ break;
+ }
+ }
+ throw new AnalyzerException(insn, "Illegal use of DUP_X2");
+ case Opcodes.DUP2:
+ value1 = pop();
+ if (value1.getSize() == 1) {
+ value2 = pop();
+ if (value2.getSize() == 1) {
+ push(value2);
+ push(value1);
+ push(interpreter.copyOperation(insn, value2));
+ push(interpreter.copyOperation(insn, value1));
+ break;
+ }
+ } else {
+ push(value1);
+ push(interpreter.copyOperation(insn, value1));
+ break;
+ }
+ throw new AnalyzerException(insn, "Illegal use of DUP2");
+ case Opcodes.DUP2_X1:
+ value1 = pop();
+ if (value1.getSize() == 1) {
+ value2 = pop();
+ if (value2.getSize() == 1) {
+ value3 = pop();
+ if (value3.getSize() == 1) {
+ push(interpreter.copyOperation(insn, value2));
+ push(interpreter.copyOperation(insn, value1));
+ push(value3);
+ push(value2);
+ push(value1);
+ break;
+ }
+ }
+ } else {
+ value2 = pop();
+ if (value2.getSize() == 1) {
+ push(interpreter.copyOperation(insn, value1));
+ push(value2);
+ push(value1);
+ break;
+ }
+ }
+ throw new AnalyzerException(insn, "Illegal use of DUP2_X1");
+ case Opcodes.DUP2_X2:
+ value1 = pop();
+ if (value1.getSize() == 1) {
+ value2 = pop();
+ if (value2.getSize() == 1) {
+ value3 = pop();
+ if (value3.getSize() == 1) {
+ value4 = pop();
+ if (value4.getSize() == 1) {
+ push(interpreter.copyOperation(insn, value2));
+ push(interpreter.copyOperation(insn, value1));
+ push(value4);
+ push(value3);
+ push(value2);
+ push(value1);
+ break;
+ }
+ } else {
+ push(interpreter.copyOperation(insn, value2));
+ push(interpreter.copyOperation(insn, value1));
+ push(value3);
+ push(value2);
+ push(value1);
+ break;
+ }
+ }
+ } else {
+ value2 = pop();
+ if (value2.getSize() == 1) {
+ value3 = pop();
+ if (value3.getSize() == 1) {
+ push(interpreter.copyOperation(insn, value1));
+ push(value3);
+ push(value2);
+ push(value1);
+ break;
+ }
+ } else {
+ push(interpreter.copyOperation(insn, value1));
+ push(value2);
+ push(value1);
+ break;
+ }
+ }
+ throw new AnalyzerException(insn, "Illegal use of DUP2_X2");
+ case Opcodes.SWAP:
+ value2 = pop();
+ value1 = pop();
+ if (value1.getSize() != 1 || value2.getSize() != 1) {
+ throw new AnalyzerException(insn, "Illegal use of SWAP");
+ }
+ push(interpreter.copyOperation(insn, value2));
+ push(interpreter.copyOperation(insn, value1));
+ break;
+ case Opcodes.IADD:
+ case Opcodes.LADD:
+ case Opcodes.FADD:
+ case Opcodes.DADD:
+ case Opcodes.ISUB:
+ case Opcodes.LSUB:
+ case Opcodes.FSUB:
+ case Opcodes.DSUB:
+ case Opcodes.IMUL:
+ case Opcodes.LMUL:
+ case Opcodes.FMUL:
+ case Opcodes.DMUL:
+ case Opcodes.IDIV:
+ case Opcodes.LDIV:
+ case Opcodes.FDIV:
+ case Opcodes.DDIV:
+ case Opcodes.IREM:
+ case Opcodes.LREM:
+ case Opcodes.FREM:
+ case Opcodes.DREM:
+ value2 = pop();
+ value1 = pop();
+ push(interpreter.binaryOperation(insn, value1, value2));
+ break;
+ case Opcodes.INEG:
+ case Opcodes.LNEG:
+ case Opcodes.FNEG:
+ case Opcodes.DNEG:
+ push(interpreter.unaryOperation(insn, pop()));
+ break;
+ case Opcodes.ISHL:
+ case Opcodes.LSHL:
+ case Opcodes.ISHR:
+ case Opcodes.LSHR:
+ case Opcodes.IUSHR:
+ case Opcodes.LUSHR:
+ case Opcodes.IAND:
+ case Opcodes.LAND:
+ case Opcodes.IOR:
+ case Opcodes.LOR:
+ case Opcodes.IXOR:
+ case Opcodes.LXOR:
+ value2 = pop();
+ value1 = pop();
+ push(interpreter.binaryOperation(insn, value1, value2));
+ break;
+ case Opcodes.IINC:
+ var = ((IincInsnNode) insn).var;
+ setLocal(var, interpreter.unaryOperation(insn, getLocal(var)));
+ break;
+ case Opcodes.I2L:
+ case Opcodes.I2F:
+ case Opcodes.I2D:
+ case Opcodes.L2I:
+ case Opcodes.L2F:
+ case Opcodes.L2D:
+ case Opcodes.F2I:
+ case Opcodes.F2L:
+ case Opcodes.F2D:
+ case Opcodes.D2I:
+ case Opcodes.D2L:
+ case Opcodes.D2F:
+ case Opcodes.I2B:
+ case Opcodes.I2C:
+ case Opcodes.I2S:
+ push(interpreter.unaryOperation(insn, pop()));
+ break;
+ case Opcodes.LCMP:
+ case Opcodes.FCMPL:
+ case Opcodes.FCMPG:
+ case Opcodes.DCMPL:
+ case Opcodes.DCMPG:
+ value2 = pop();
+ value1 = pop();
+ push(interpreter.binaryOperation(insn, value1, value2));
+ break;
+ case Opcodes.IFEQ:
+ case Opcodes.IFNE:
+ case Opcodes.IFLT:
+ case Opcodes.IFGE:
+ case Opcodes.IFGT:
+ case Opcodes.IFLE:
+ interpreter.unaryOperation(insn, pop());
+ break;
+ case Opcodes.IF_ICMPEQ:
+ case Opcodes.IF_ICMPNE:
+ case Opcodes.IF_ICMPLT:
+ case Opcodes.IF_ICMPGE:
+ case Opcodes.IF_ICMPGT:
+ case Opcodes.IF_ICMPLE:
+ case Opcodes.IF_ACMPEQ:
+ case Opcodes.IF_ACMPNE:
+ value2 = pop();
+ value1 = pop();
+ interpreter.binaryOperation(insn, value1, value2);
+ break;
+ case Opcodes.GOTO:
+ break;
+ case Opcodes.JSR:
+ push(interpreter.newOperation(insn));
+ break;
+ case Opcodes.RET:
+ break;
+ case Opcodes.TABLESWITCH:
+ case Opcodes.LOOKUPSWITCH:
+ interpreter.unaryOperation(insn, pop());
+ break;
+ case Opcodes.IRETURN:
+ case Opcodes.LRETURN:
+ case Opcodes.FRETURN:
+ case Opcodes.DRETURN:
+ case Opcodes.ARETURN:
+ value1 = pop();
+ interpreter.unaryOperation(insn, value1);
+ interpreter.returnOperation(insn, value1, returnValue);
+ break;
+ case Opcodes.RETURN:
+ if (returnValue != null) {
+ throw new AnalyzerException(insn, "Incompatible return type");
+ }
+ break;
+ case Opcodes.GETSTATIC:
+ push(interpreter.newOperation(insn));
+ break;
+ case Opcodes.PUTSTATIC:
+ interpreter.unaryOperation(insn, pop());
+ break;
+ case Opcodes.GETFIELD:
+ push(interpreter.unaryOperation(insn, pop()));
+ break;
+ case Opcodes.PUTFIELD:
+ value2 = pop();
+ value1 = pop();
+ interpreter.binaryOperation(insn, value1, value2);
+ break;
+ case Opcodes.INVOKEVIRTUAL:
+ case Opcodes.INVOKESPECIAL:
+ case Opcodes.INVOKESTATIC:
+ case Opcodes.INVOKEINTERFACE: {
+ values = new ArrayList<V>();
+ String desc = ((MethodInsnNode) insn).desc;
+ for (int i = Type.getArgumentTypes(desc).length; i > 0; --i) {
+ values.add(0, pop());
+ }
+ if (insn.getOpcode() != Opcodes.INVOKESTATIC) {
+ values.add(0, pop());
+ }
+ if (Type.getReturnType(desc) == Type.VOID_TYPE) {
+ interpreter.naryOperation(insn, values);
+ } else {
+ push(interpreter.naryOperation(insn, values));
+ }
+ break;
+ }
+ case Opcodes.INVOKEDYNAMIC: {
+ values = new ArrayList<V>();
+ String desc = ((InvokeDynamicInsnNode) insn).desc;
+ for (int i = Type.getArgumentTypes(desc).length; i > 0; --i) {
+ values.add(0, pop());
+ }
+ if (Type.getReturnType(desc) == Type.VOID_TYPE) {
+ interpreter.naryOperation(insn, values);
+ } else {
+ push(interpreter.naryOperation(insn, values));
+ }
+ break;
+ }
+ case Opcodes.NEW:
+ push(interpreter.newOperation(insn));
+ break;
+ case Opcodes.NEWARRAY:
+ case Opcodes.ANEWARRAY:
+ case Opcodes.ARRAYLENGTH:
+ push(interpreter.unaryOperation(insn, pop()));
+ break;
+ case Opcodes.ATHROW:
+ interpreter.unaryOperation(insn, pop());
+ break;
+ case Opcodes.CHECKCAST:
+ case Opcodes.INSTANCEOF:
+ push(interpreter.unaryOperation(insn, pop()));
+ break;
+ case Opcodes.MONITORENTER:
+ case Opcodes.MONITOREXIT:
+ interpreter.unaryOperation(insn, pop());
+ break;
+ case Opcodes.MULTIANEWARRAY:
+ values = new ArrayList<V>();
+ for (int i = ((MultiANewArrayInsnNode) insn).dims; i > 0; --i) {
+ values.add(0, pop());
+ }
+ push(interpreter.naryOperation(insn, values));
+ break;
+ case Opcodes.IFNULL:
+ case Opcodes.IFNONNULL:
+ interpreter.unaryOperation(insn, pop());
+ break;
+ default:
+ throw new RuntimeException("Illegal opcode "+insn.getOpcode());
+ }
+ }
+
+ /**
+ * Merges this frame with the given frame.
+ *
+ * @param frame a frame.
+ * @param interpreter the interpreter used to merge values.
+ * @return <tt>true</tt> if this frame has been changed as a result of the
+ * merge operation, or <tt>false</tt> otherwise.
+ * @throws AnalyzerException if the frames have incompatible sizes.
+ */
+ public boolean merge(final Frame<? extends V> frame, final Interpreter<V> interpreter)
+ throws AnalyzerException
+ {
+ if (top != frame.top) {
+ throw new AnalyzerException(null, "Incompatible stack heights");
+ }
+ boolean changes = false;
+ for (int i = 0; i < locals + top; ++i) {
+ V v = interpreter.merge(values[i], frame.values[i]);
+ if (v != values[i]) {
+ values[i] = v;
+ changes = true;
+ }
+ }
+ return changes;
+ }
+
+ /**
+ * Merges this frame with the given frame (case of a RET instruction).
+ *
+ * @param frame a frame
+ * @param access the local variables that have been accessed by the
+ * subroutine to which the RET instruction corresponds.
+ * @return <tt>true</tt> if this frame has been changed as a result of the
+ * merge operation, or <tt>false</tt> otherwise.
+ */
+ public boolean merge(final Frame<? extends V> frame, final boolean[] access) {
+ boolean changes = false;
+ for (int i = 0; i < locals; ++i) {
+ if (!access[i] && !values[i].equals(frame.values[i])) {
+ values[i] = frame.values[i];
+ changes = true;
+ }
+ }
+ return changes;
+ }
+
+ /**
+ * Returns a string representation of this frame.
+ *
+ * @return a string representation of this frame.
+ */
+ @Override
+ public String toString() {
+ StringBuffer b = new StringBuffer();
+ for (int i = 0; i < getLocals(); ++i) {
+ b.append(getLocal(i));
+ }
+ b.append(' ');
+ for (int i = 0; i < getStackSize(); ++i) {
+ b.append(getStack(i).toString());
+ }
+ return b.toString();
+ }
+}
diff --git a/src/asm/scala/tools/asm/tree/analysis/Interpreter.java b/src/asm/scala/tools/asm/tree/analysis/Interpreter.java
new file mode 100644
index 0000000..930c8f4
--- /dev/null
+++ b/src/asm/scala/tools/asm/tree/analysis/Interpreter.java
@@ -0,0 +1,204 @@
+/***
+ * ASM: a very small and fast Java bytecode manipulation framework
+ * Copyright (c) 2000-2011 INRIA, France Telecom
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ * 3. Neither the name of the copyright holders nor the names of its
+ * contributors may be used to endorse or promote products derived from
+ * this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+ * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
+ * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+ * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+ * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
+ * THE POSSIBILITY OF SUCH DAMAGE.
+ */
+package scala.tools.asm.tree.analysis;
+
+import java.util.List;
+
+import scala.tools.asm.Type;
+import scala.tools.asm.tree.AbstractInsnNode;
+
+/**
+ * A semantic bytecode interpreter. More precisely, this interpreter only
+ * manages the computation of values from other values: it does not manage the
+ * transfer of values to or from the stack, and to or from the local variables.
+ * This separation allows a generic bytecode {@link Analyzer} to work with
+ * various semantic interpreters, without needing to duplicate the code to
+ * simulate the transfer of values.
+ *
+ * @param <V> type of the Value used for the analysis.
+ *
+ * @author Eric Bruneton
+ */
+public abstract class Interpreter<V extends Value> {
+
+ protected final int api;
+
+ protected Interpreter(final int api) {
+ this.api = api;
+ }
+
+ /**
+ * Creates a new value that represents the given type.
+ *
+ * Called for method parameters (including <code>this</code>),
+ * exception handler variable and with <code>null</code> type
+ * for variables reserved by long and double types.
+ *
+ * @param type a primitive or reference type, or <tt>null</tt> to
+ * represent an uninitialized value.
+ * @return a value that represents the given type. The size of the returned
+ * value must be equal to the size of the given type.
+ */
+ public abstract V newValue(Type type);
+
+ /**
+ * Interprets a bytecode instruction without arguments. This method is
+ * called for the following opcodes:
+ *
+ * ACONST_NULL, ICONST_M1, ICONST_0, ICONST_1, ICONST_2, ICONST_3, ICONST_4,
+ * ICONST_5, LCONST_0, LCONST_1, FCONST_0, FCONST_1, FCONST_2, DCONST_0,
+ * DCONST_1, BIPUSH, SIPUSH, LDC, JSR, GETSTATIC, NEW
+ *
+ * @param insn the bytecode instruction to be interpreted.
+ * @return the result of the interpretation of the given instruction.
+ * @throws AnalyzerException if an error occured during the interpretation.
+ */
+ public abstract V newOperation(AbstractInsnNode insn)
+ throws AnalyzerException;
+
+ /**
+ * Interprets a bytecode instruction that moves a value on the stack or to
+ * or from local variables. This method is called for the following opcodes:
+ *
+ * ILOAD, LLOAD, FLOAD, DLOAD, ALOAD, ISTORE, LSTORE, FSTORE, DSTORE,
+ * ASTORE, DUP, DUP_X1, DUP_X2, DUP2, DUP2_X1, DUP2_X2, SWAP
+ *
+ * @param insn the bytecode instruction to be interpreted.
+ * @param value the value that must be moved by the instruction.
+ * @return the result of the interpretation of the given instruction. The
+ * returned value must be <tt>equal</tt> to the given value.
+ * @throws AnalyzerException if an error occured during the interpretation.
+ */
+ public abstract V copyOperation(AbstractInsnNode insn, V value)
+ throws AnalyzerException;
+
+ /**
+ * Interprets a bytecode instruction with a single argument. This method is
+ * called for the following opcodes:
+ *
+ * INEG, LNEG, FNEG, DNEG, IINC, I2L, I2F, I2D, L2I, L2F, L2D, F2I, F2L,
+ * F2D, D2I, D2L, D2F, I2B, I2C, I2S, IFEQ, IFNE, IFLT, IFGE, IFGT, IFLE,
+ * TABLESWITCH, LOOKUPSWITCH, IRETURN, LRETURN, FRETURN, DRETURN, ARETURN,
+ * PUTSTATIC, GETFIELD, NEWARRAY, ANEWARRAY, ARRAYLENGTH, ATHROW, CHECKCAST,
+ * INSTANCEOF, MONITORENTER, MONITOREXIT, IFNULL, IFNONNULL
+ *
+ * @param insn the bytecode instruction to be interpreted.
+ * @param value the argument of the instruction to be interpreted.
+ * @return the result of the interpretation of the given instruction.
+ * @throws AnalyzerException if an error occured during the interpretation.
+ */
+ public abstract V unaryOperation(AbstractInsnNode insn, V value)
+ throws AnalyzerException;
+
+ /**
+ * Interprets a bytecode instruction with two arguments. This method is
+ * called for the following opcodes:
+ *
+ * IALOAD, LALOAD, FALOAD, DALOAD, AALOAD, BALOAD, CALOAD, SALOAD, IADD,
+ * LADD, FADD, DADD, ISUB, LSUB, FSUB, DSUB, IMUL, LMUL, FMUL, DMUL, IDIV,
+ * LDIV, FDIV, DDIV, IREM, LREM, FREM, DREM, ISHL, LSHL, ISHR, LSHR, IUSHR,
+ * LUSHR, IAND, LAND, IOR, LOR, IXOR, LXOR, LCMP, FCMPL, FCMPG, DCMPL,
+ * DCMPG, IF_ICMPEQ, IF_ICMPNE, IF_ICMPLT, IF_ICMPGE, IF_ICMPGT, IF_ICMPLE,
+ * IF_ACMPEQ, IF_ACMPNE, PUTFIELD
+ *
+ * @param insn the bytecode instruction to be interpreted.
+ * @param value1 the first argument of the instruction to be interpreted.
+ * @param value2 the second argument of the instruction to be interpreted.
+ * @return the result of the interpretation of the given instruction.
+ * @throws AnalyzerException if an error occured during the interpretation.
+ */
+ public abstract V binaryOperation(AbstractInsnNode insn, V value1, V value2)
+ throws AnalyzerException;
+
+ /**
+ * Interprets a bytecode instruction with three arguments. This method is
+ * called for the following opcodes:
+ *
+ * IASTORE, LASTORE, FASTORE, DASTORE, AASTORE, BASTORE, CASTORE, SASTORE
+ *
+ * @param insn the bytecode instruction to be interpreted.
+ * @param value1 the first argument of the instruction to be interpreted.
+ * @param value2 the second argument of the instruction to be interpreted.
+ * @param value3 the third argument of the instruction to be interpreted.
+ * @return the result of the interpretation of the given instruction.
+ * @throws AnalyzerException if an error occured during the interpretation.
+ */
+ public abstract V ternaryOperation(
+ AbstractInsnNode insn,
+ V value1,
+ V value2,
+ V value3) throws AnalyzerException;
+
+ /**
+ * Interprets a bytecode instruction with a variable number of arguments.
+ * This method is called for the following opcodes:
+ *
+ * INVOKEVIRTUAL, INVOKESPECIAL, INVOKESTATIC, INVOKEINTERFACE,
+ * MULTIANEWARRAY and INVOKEDYNAMIC
+ *
+ * @param insn the bytecode instruction to be interpreted.
+ * @param values the arguments of the instruction to be interpreted.
+ * @return the result of the interpretation of the given instruction.
+ * @throws AnalyzerException if an error occured during the interpretation.
+ */
+ public abstract V naryOperation(
+ AbstractInsnNode insn,
+ List< ? extends V> values) throws AnalyzerException;
+
+ /**
+ * Interprets a bytecode return instruction. This method is called for the
+ * following opcodes:
+ *
+ * IRETURN, LRETURN, FRETURN, DRETURN, ARETURN
+ *
+ * @param insn the bytecode instruction to be interpreted.
+ * @param value the argument of the instruction to be interpreted.
+ * @param expected the expected return type of the analyzed method.
+ * @throws AnalyzerException if an error occured during the interpretation.
+ */
+ public abstract void returnOperation(
+ AbstractInsnNode insn,
+ V value,
+ V expected) throws AnalyzerException;
+
+ /**
+ * Merges two values. The merge operation must return a value that
+ * represents both values (for instance, if the two values are two types,
+ * the merged value must be a common super type of the two types. If the two
+ * values are integer intervals, the merged value must be an interval that
+ * contains the previous ones. Likewise for other types of values).
+ *
+ * @param v a value.
+ * @param w another value.
+ * @return the merged value. If the merged value is equal to <tt>v</tt>,
+ * this method <i>must</i> return <tt>v</tt>.
+ */
+ public abstract V merge(V v, V w);
+}
diff --git a/src/asm/scala/tools/asm/tree/analysis/SimpleVerifier.java b/src/asm/scala/tools/asm/tree/analysis/SimpleVerifier.java
new file mode 100644
index 0000000..c4f515d
--- /dev/null
+++ b/src/asm/scala/tools/asm/tree/analysis/SimpleVerifier.java
@@ -0,0 +1,329 @@
+/***
+ * ASM: a very small and fast Java bytecode manipulation framework
+ * Copyright (c) 2000-2011 INRIA, France Telecom
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ * 3. Neither the name of the copyright holders nor the names of its
+ * contributors may be used to endorse or promote products derived from
+ * this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+ * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
+ * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+ * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+ * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
+ * THE POSSIBILITY OF SUCH DAMAGE.
+ */
+package scala.tools.asm.tree.analysis;
+
+import java.util.List;
+
+import scala.tools.asm.Type;
+
+/**
+ * An extended {@link BasicVerifier} that performs more precise verifications.
+ * This verifier computes exact class types, instead of using a single "object
+ * reference" type (as done in the {@link BasicVerifier}).
+ *
+ * @author Eric Bruneton
+ * @author Bing Ran
+ */
+public class SimpleVerifier extends BasicVerifier {
+
+ /**
+ * The class that is verified.
+ */
+ private final Type currentClass;
+
+ /**
+ * The super class of the class that is verified.
+ */
+ private final Type currentSuperClass;
+
+ /**
+ * The interfaces implemented by the class that is verified.
+ */
+ private final List<Type> currentClassInterfaces;
+
+ /**
+ * If the class that is verified is an interface.
+ */
+ private final boolean isInterface;
+
+ /**
+ * The loader to use for referenced classes.
+ */
+ private ClassLoader loader = getClass().getClassLoader();
+
+ /**
+ * Constructs a new {@link SimpleVerifier}.
+ */
+ public SimpleVerifier() {
+ this(null, null, false);
+ }
+
+ /**
+ * Constructs a new {@link SimpleVerifier} to verify a specific class. This
+ * class will not be loaded into the JVM since it may be incorrect.
+ *
+ * @param currentClass the class that is verified.
+ * @param currentSuperClass the super class of the class that is verified.
+ * @param isInterface if the class that is verified is an interface.
+ */
+ public SimpleVerifier(
+ final Type currentClass,
+ final Type currentSuperClass,
+ final boolean isInterface)
+ {
+ this(currentClass, currentSuperClass, null, isInterface);
+ }
+
+ /**
+ * Constructs a new {@link SimpleVerifier} to verify a specific class. This
+ * class will not be loaded into the JVM since it may be incorrect.
+ *
+ * @param currentClass the class that is verified.
+ * @param currentSuperClass the super class of the class that is verified.
+ * @param currentClassInterfaces the interfaces implemented by the class
+ * that is verified.
+ * @param isInterface if the class that is verified is an interface.
+ */
+ public SimpleVerifier(
+ final Type currentClass,
+ final Type currentSuperClass,
+ final List<Type> currentClassInterfaces,
+ final boolean isInterface)
+ {
+ this(ASM4,
+ currentClass,
+ currentSuperClass,
+ currentClassInterfaces,
+ isInterface);
+ }
+
+ protected SimpleVerifier(
+ final int api,
+ final Type currentClass,
+ final Type currentSuperClass,
+ final List<Type> currentClassInterfaces,
+ final boolean isInterface)
+ {
+ super(api);
+ this.currentClass = currentClass;
+ this.currentSuperClass = currentSuperClass;
+ this.currentClassInterfaces = currentClassInterfaces;
+ this.isInterface = isInterface;
+ }
+
+ /**
+ * Set the <code>ClassLoader</code> which will be used to load referenced
+ * classes. This is useful if you are verifying multiple interdependent
+ * classes.
+ *
+ * @param loader a <code>ClassLoader</code> to use
+ */
+ public void setClassLoader(final ClassLoader loader) {
+ this.loader = loader;
+ }
+
+ @Override
+ public BasicValue newValue(final Type type) {
+ if (type == null) {
+ return BasicValue.UNINITIALIZED_VALUE;
+ }
+
+ boolean isArray = type.getSort() == Type.ARRAY;
+ if (isArray) {
+ switch (type.getElementType().getSort()) {
+ case Type.BOOLEAN:
+ case Type.CHAR:
+ case Type.BYTE:
+ case Type.SHORT:
+ return new BasicValue(type);
+ }
+ }
+
+ BasicValue v = super.newValue(type);
+ if (BasicValue.REFERENCE_VALUE.equals(v)) {
+ if (isArray) {
+ v = newValue(type.getElementType());
+ String desc = v.getType().getDescriptor();
+ for (int i = 0; i < type.getDimensions(); ++i) {
+ desc = '[' + desc;
+ }
+ v = new BasicValue(Type.getType(desc));
+ } else {
+ v = new BasicValue(type);
+ }
+ }
+ return v;
+ }
+
+ @Override
+ protected boolean isArrayValue(final BasicValue value) {
+ Type t = value.getType();
+ return t != null
+ && ("Lnull;".equals(t.getDescriptor()) || t.getSort() == Type.ARRAY);
+ }
+
+ @Override
+ protected BasicValue getElementValue(final BasicValue objectArrayValue)
+ throws AnalyzerException
+ {
+ Type arrayType = objectArrayValue.getType();
+ if (arrayType != null) {
+ if (arrayType.getSort() == Type.ARRAY) {
+ return newValue(Type.getType(arrayType.getDescriptor()
+ .substring(1)));
+ } else if ("Lnull;".equals(arrayType.getDescriptor())) {
+ return objectArrayValue;
+ }
+ }
+ throw new Error("Internal error");
+ }
+
+ @Override
+ protected boolean isSubTypeOf(final BasicValue value, final BasicValue expected) {
+ Type expectedType = expected.getType();
+ Type type = value.getType();
+ switch (expectedType.getSort()) {
+ case Type.INT:
+ case Type.FLOAT:
+ case Type.LONG:
+ case Type.DOUBLE:
+ return type.equals(expectedType);
+ case Type.ARRAY:
+ case Type.OBJECT:
+ if ("Lnull;".equals(type.getDescriptor())) {
+ return true;
+ } else if (type.getSort() == Type.OBJECT
+ || type.getSort() == Type.ARRAY)
+ {
+ return isAssignableFrom(expectedType, type);
+ } else {
+ return false;
+ }
+ default:
+ throw new Error("Internal error");
+ }
+ }
+
+ @Override
+ public BasicValue merge(final BasicValue v, final BasicValue w) {
+ if (!v.equals(w)) {
+ Type t = v.getType();
+ Type u = w.getType();
+ if (t != null
+ && (t.getSort() == Type.OBJECT || t.getSort() == Type.ARRAY))
+ {
+ if (u != null
+ && (u.getSort() == Type.OBJECT || u.getSort() == Type.ARRAY))
+ {
+ if ("Lnull;".equals(t.getDescriptor())) {
+ return w;
+ }
+ if ("Lnull;".equals(u.getDescriptor())) {
+ return v;
+ }
+ if (isAssignableFrom(t, u)) {
+ return v;
+ }
+ if (isAssignableFrom(u, t)) {
+ return w;
+ }
+ // TODO case of array classes of the same dimension
+ // TODO should we look also for a common super interface?
+ // problem: there may be several possible common super
+ // interfaces
+ do {
+ if (t == null || isInterface(t)) {
+ return BasicValue.REFERENCE_VALUE;
+ }
+ t = getSuperClass(t);
+ if (isAssignableFrom(t, u)) {
+ return newValue(t);
+ }
+ } while (true);
+ }
+ }
+ return BasicValue.UNINITIALIZED_VALUE;
+ }
+ return v;
+ }
+
+ protected boolean isInterface(final Type t) {
+ if (currentClass != null && t.equals(currentClass)) {
+ return isInterface;
+ }
+ return getClass(t).isInterface();
+ }
+
+ protected Type getSuperClass(final Type t) {
+ if (currentClass != null && t.equals(currentClass)) {
+ return currentSuperClass;
+ }
+ Class<?> c = getClass(t).getSuperclass();
+ return c == null ? null : Type.getType(c);
+ }
+
+ protected boolean isAssignableFrom(final Type t, final Type u) {
+ if (t.equals(u)) {
+ return true;
+ }
+ if (currentClass != null && t.equals(currentClass)) {
+ if (getSuperClass(u) == null) {
+ return false;
+ } else {
+ if (isInterface) {
+ return u.getSort() == Type.OBJECT || u.getSort() == Type.ARRAY;
+ }
+ return isAssignableFrom(t, getSuperClass(u));
+ }
+ }
+ if (currentClass != null && u.equals(currentClass)) {
+ if (isAssignableFrom(t, currentSuperClass)) {
+ return true;
+ }
+ if (currentClassInterfaces != null) {
+ for (int i = 0; i < currentClassInterfaces.size(); ++i) {
+ Type v = currentClassInterfaces.get(i);
+ if (isAssignableFrom(t, v)) {
+ return true;
+ }
+ }
+ }
+ return false;
+ }
+ Class<?> tc = getClass(t);
+ if (tc.isInterface()) {
+ tc = Object.class;
+ }
+ return tc.isAssignableFrom(getClass(u));
+ }
+
+ protected Class<?> getClass(final Type t) {
+ try {
+ if (t.getSort() == Type.ARRAY) {
+ return Class.forName(t.getDescriptor().replace('/', '.'),
+ false,
+ loader);
+ }
+ return Class.forName(t.getClassName(), false, loader);
+ } catch (ClassNotFoundException e) {
+ throw new RuntimeException(e.toString());
+ }
+ }
+}
diff --git a/src/asm/scala/tools/asm/tree/analysis/SmallSet.java b/src/asm/scala/tools/asm/tree/analysis/SmallSet.java
new file mode 100644
index 0000000..205878d
--- /dev/null
+++ b/src/asm/scala/tools/asm/tree/analysis/SmallSet.java
@@ -0,0 +1,134 @@
+/***
+ * ASM: a very small and fast Java bytecode manipulation framework
+ * Copyright (c) 2000-2011 INRIA, France Telecom
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ * 3. Neither the name of the copyright holders nor the names of its
+ * contributors may be used to endorse or promote products derived from
+ * this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+ * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
+ * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+ * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+ * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
+ * THE POSSIBILITY OF SUCH DAMAGE.
+ */
+package scala.tools.asm.tree.analysis;
+
+import java.util.AbstractSet;
+import java.util.HashSet;
+import java.util.Iterator;
+import java.util.NoSuchElementException;
+import java.util.Set;
+
+/**
+ * A set of at most two elements.
+ *
+ * @author Eric Bruneton
+ */
+class SmallSet<E> extends AbstractSet<E> implements Iterator<E> {
+
+ // if e1 is null, e2 must be null; otherwise e2 must be different from e1
+
+ E e1, e2;
+
+ static final <T> Set<T> emptySet() {
+ return new SmallSet<T>(null, null);
+ }
+
+ SmallSet(final E e1, final E e2) {
+ this.e1 = e1;
+ this.e2 = e2;
+ }
+
+ // -------------------------------------------------------------------------
+ // Implementation of inherited abstract methods
+ // -------------------------------------------------------------------------
+
+ @Override
+ public Iterator<E> iterator() {
+ return new SmallSet<E>(e1, e2);
+ }
+
+ @Override
+ public int size() {
+ return e1 == null ? 0 : (e2 == null ? 1 : 2);
+ }
+
+ // -------------------------------------------------------------------------
+ // Implementation of the Iterator interface
+ // -------------------------------------------------------------------------
+
+ public boolean hasNext() {
+ return e1 != null;
+ }
+
+ public E next() {
+ if (e1 == null) {
+ throw new NoSuchElementException();
+ }
+ E e = e1;
+ e1 = e2;
+ e2 = null;
+ return e;
+ }
+
+ public void remove() {
+ }
+
+ // -------------------------------------------------------------------------
+ // Utility methods
+ // -------------------------------------------------------------------------
+
+ Set<E> union(final SmallSet<E> s) {
+ if ((s.e1 == e1 && s.e2 == e2) || (s.e1 == e2 && s.e2 == e1)) {
+ return this; // if the two sets are equal, return this
+ }
+ if (s.e1 == null) {
+ return this; // if s is empty, return this
+ }
+ if (e1 == null) {
+ return s; // if this is empty, return s
+ }
+ if (s.e2 == null) { // s contains exactly one element
+ if (e2 == null) {
+ return new SmallSet<E>(e1, s.e1); // necessarily e1 != s.e1
+ } else if (s.e1 == e1 || s.e1 == e2) { // s is included in this
+ return this;
+ }
+ }
+ if (e2 == null) { // this contains exactly one element
+ // if (s.e2 == null) { // cannot happen
+ // return new SmallSet(e1, s.e1); // necessarily e1 != s.e1
+ // } else
+ if (e1 == s.e1 || e1 == s.e2) { // this in included in s
+ return s;
+ }
+ }
+ // here we know that there are at least 3 distinct elements
+ HashSet<E> r = new HashSet<E>(4);
+ r.add(e1);
+ if (e2 != null) {
+ r.add(e2);
+ }
+ r.add(s.e1);
+ if (s.e2 != null) {
+ r.add(s.e2);
+ }
+ return r;
+ }
+}
diff --git a/src/asm/scala/tools/asm/tree/analysis/SourceInterpreter.java b/src/asm/scala/tools/asm/tree/analysis/SourceInterpreter.java
new file mode 100644
index 0000000..067200b
--- /dev/null
+++ b/src/asm/scala/tools/asm/tree/analysis/SourceInterpreter.java
@@ -0,0 +1,206 @@
+/***
+ * ASM: a very small and fast Java bytecode manipulation framework
+ * Copyright (c) 2000-2011 INRIA, France Telecom
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ * 3. Neither the name of the copyright holders nor the names of its
+ * contributors may be used to endorse or promote products derived from
+ * this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+ * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
+ * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+ * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+ * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
+ * THE POSSIBILITY OF SUCH DAMAGE.
+ */
+package scala.tools.asm.tree.analysis;
+
+import java.util.HashSet;
+import java.util.List;
+import java.util.Set;
+
+import scala.tools.asm.Opcodes;
+import scala.tools.asm.Type;
+import scala.tools.asm.tree.AbstractInsnNode;
+import scala.tools.asm.tree.FieldInsnNode;
+import scala.tools.asm.tree.InvokeDynamicInsnNode;
+import scala.tools.asm.tree.LdcInsnNode;
+import scala.tools.asm.tree.MethodInsnNode;
+
+/**
+ * An {@link Interpreter} for {@link SourceValue} values.
+ *
+ * @author Eric Bruneton
+ */
+public class SourceInterpreter extends Interpreter<SourceValue> implements
+ Opcodes
+{
+
+ public SourceInterpreter() {
+ super(ASM4);
+ }
+
+ protected SourceInterpreter(final int api) {
+ super(api);
+ }
+
+ @Override
+ public SourceValue newValue(final Type type) {
+ if (type == Type.VOID_TYPE) {
+ return null;
+ }
+ return new SourceValue(type == null ? 1 : type.getSize());
+ }
+
+ @Override
+ public SourceValue newOperation(final AbstractInsnNode insn) {
+ int size;
+ switch (insn.getOpcode()) {
+ case LCONST_0:
+ case LCONST_1:
+ case DCONST_0:
+ case DCONST_1:
+ size = 2;
+ break;
+ case LDC:
+ Object cst = ((LdcInsnNode) insn).cst;
+ size = cst instanceof Long || cst instanceof Double ? 2 : 1;
+ break;
+ case GETSTATIC:
+ size = Type.getType(((FieldInsnNode) insn).desc).getSize();
+ break;
+ default:
+ size = 1;
+ }
+ return new SourceValue(size, insn);
+ }
+
+ @Override
+ public SourceValue copyOperation(final AbstractInsnNode insn, final SourceValue value) {
+ return new SourceValue(value.getSize(), insn);
+ }
+
+ @Override
+ public SourceValue unaryOperation(final AbstractInsnNode insn, final SourceValue value)
+ {
+ int size;
+ switch (insn.getOpcode()) {
+ case LNEG:
+ case DNEG:
+ case I2L:
+ case I2D:
+ case L2D:
+ case F2L:
+ case F2D:
+ case D2L:
+ size = 2;
+ break;
+ case GETFIELD:
+ size = Type.getType(((FieldInsnNode) insn).desc).getSize();
+ break;
+ default:
+ size = 1;
+ }
+ return new SourceValue(size, insn);
+ }
+
+ @Override
+ public SourceValue binaryOperation(
+ final AbstractInsnNode insn,
+ final SourceValue value1,
+ final SourceValue value2)
+ {
+ int size;
+ switch (insn.getOpcode()) {
+ case LALOAD:
+ case DALOAD:
+ case LADD:
+ case DADD:
+ case LSUB:
+ case DSUB:
+ case LMUL:
+ case DMUL:
+ case LDIV:
+ case DDIV:
+ case LREM:
+ case DREM:
+ case LSHL:
+ case LSHR:
+ case LUSHR:
+ case LAND:
+ case LOR:
+ case LXOR:
+ size = 2;
+ break;
+ default:
+ size = 1;
+ }
+ return new SourceValue(size, insn);
+ }
+
+ @Override
+ public SourceValue ternaryOperation(
+ final AbstractInsnNode insn,
+ final SourceValue value1,
+ final SourceValue value2,
+ final SourceValue value3)
+ {
+ return new SourceValue(1, insn);
+ }
+
+ @Override
+ public SourceValue naryOperation(final AbstractInsnNode insn, final List<? extends SourceValue> values) {
+ int size;
+ int opcode = insn.getOpcode();
+ if (opcode == MULTIANEWARRAY) {
+ size = 1;
+ } else {
+ String desc = (opcode == INVOKEDYNAMIC)?
+ ((InvokeDynamicInsnNode) insn).desc:
+ ((MethodInsnNode) insn).desc;
+ size = Type.getReturnType(desc).getSize();
+ }
+ return new SourceValue(size, insn);
+ }
+
+ @Override
+ public void returnOperation(
+ final AbstractInsnNode insn,
+ final SourceValue value,
+ final SourceValue expected)
+ {
+ }
+
+ @Override
+ public SourceValue merge(final SourceValue d, final SourceValue w) {
+ if (d.insns instanceof SmallSet && w.insns instanceof SmallSet) {
+ Set<AbstractInsnNode> s = ((SmallSet<AbstractInsnNode>) d.insns).union((SmallSet<AbstractInsnNode>) w.insns);
+ if (s == d.insns && d.size == w.size) {
+ return d;
+ } else {
+ return new SourceValue(Math.min(d.size, w.size), s);
+ }
+ }
+ if (d.size != w.size || !d.insns.containsAll(w.insns)) {
+ HashSet<AbstractInsnNode> s = new HashSet<AbstractInsnNode>();
+ s.addAll(d.insns);
+ s.addAll(w.insns);
+ return new SourceValue(Math.min(d.size, w.size), s);
+ }
+ return d;
+ }
+}
diff --git a/src/asm/scala/tools/asm/tree/analysis/SourceValue.java b/src/asm/scala/tools/asm/tree/analysis/SourceValue.java
new file mode 100644
index 0000000..57ff212
--- /dev/null
+++ b/src/asm/scala/tools/asm/tree/analysis/SourceValue.java
@@ -0,0 +1,97 @@
+/***
+ * ASM: a very small and fast Java bytecode manipulation framework
+ * Copyright (c) 2000-2011 INRIA, France Telecom
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ * 3. Neither the name of the copyright holders nor the names of its
+ * contributors may be used to endorse or promote products derived from
+ * this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+ * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
+ * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+ * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+ * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
+ * THE POSSIBILITY OF SUCH DAMAGE.
+ */
+package scala.tools.asm.tree.analysis;
+
+import java.util.Set;
+
+import scala.tools.asm.tree.AbstractInsnNode;
+
+/**
+ * A {@link Value} that is represented by its type in a two types type system.
+ * This type system distinguishes the ONEWORD and TWOWORDS types.
+ *
+ * @author Eric Bruneton
+ */
+public class SourceValue implements Value {
+
+ /**
+ * The size of this value.
+ */
+ public final int size;
+
+ /**
+ * The instructions that can produce this value. For example, for the Java
+ * code below, the instructions that can produce the value of <tt>i</tt>
+ * at line 5 are the txo ISTORE instructions at line 1 and 3:
+ *
+ * <pre>
+ * 1: i = 0;
+ * 2: if (...) {
+ * 3: i = 1;
+ * 4: }
+ * 5: return i;
+ * </pre>
+ *
+ * This field is a set of {@link AbstractInsnNode} objects.
+ */
+ public final Set<AbstractInsnNode> insns;
+
+ public SourceValue(final int size) {
+ this(size, SmallSet.<AbstractInsnNode>emptySet());
+ }
+
+ public SourceValue(final int size, final AbstractInsnNode insn) {
+ this.size = size;
+ this.insns = new SmallSet<AbstractInsnNode>(insn, null);
+ }
+
+ public SourceValue(final int size, final Set<AbstractInsnNode> insns) {
+ this.size = size;
+ this.insns = insns;
+ }
+
+ public int getSize() {
+ return size;
+ }
+
+ @Override
+ public boolean equals(final Object value) {
+ if (!(value instanceof SourceValue)) {
+ return false;
+ }
+ SourceValue v = (SourceValue) value;
+ return size == v.size && insns.equals(v.insns);
+ }
+
+ @Override
+ public int hashCode() {
+ return insns.hashCode();
+ }
+}
diff --git a/src/asm/scala/tools/asm/tree/analysis/Subroutine.java b/src/asm/scala/tools/asm/tree/analysis/Subroutine.java
new file mode 100644
index 0000000..038880d
--- /dev/null
+++ b/src/asm/scala/tools/asm/tree/analysis/Subroutine.java
@@ -0,0 +1,93 @@
+/***
+ * ASM: a very small and fast Java bytecode manipulation framework
+ * Copyright (c) 2000-2011 INRIA, France Telecom
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ * 3. Neither the name of the copyright holders nor the names of its
+ * contributors may be used to endorse or promote products derived from
+ * this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+ * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
+ * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+ * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+ * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
+ * THE POSSIBILITY OF SUCH DAMAGE.
+ */
+package scala.tools.asm.tree.analysis;
+
+import java.util.ArrayList;
+import java.util.List;
+
+import scala.tools.asm.tree.JumpInsnNode;
+import scala.tools.asm.tree.LabelNode;
+
+/**
+ * A method subroutine (corresponds to a JSR instruction).
+ *
+ * @author Eric Bruneton
+ */
+class Subroutine {
+
+ LabelNode start;
+
+ boolean[] access;
+
+ List<JumpInsnNode> callers;
+
+ private Subroutine() {
+ }
+
+ Subroutine(
+ final LabelNode start,
+ final int maxLocals,
+ final JumpInsnNode caller)
+ {
+ this.start = start;
+ this.access = new boolean[maxLocals];
+ this.callers = new ArrayList<JumpInsnNode>();
+ callers.add(caller);
+ }
+
+ public Subroutine copy() {
+ Subroutine result = new Subroutine();
+ result.start = start;
+ result.access = new boolean[access.length];
+ System.arraycopy(access, 0, result.access, 0, access.length);
+ result.callers = new ArrayList<JumpInsnNode>(callers);
+ return result;
+ }
+
+ public boolean merge(final Subroutine subroutine) throws AnalyzerException {
+ boolean changes = false;
+ for (int i = 0; i < access.length; ++i) {
+ if (subroutine.access[i] && !access[i]) {
+ access[i] = true;
+ changes = true;
+ }
+ }
+ if (subroutine.start == start) {
+ for (int i = 0; i < subroutine.callers.size(); ++i) {
+ JumpInsnNode caller = subroutine.callers.get(i);
+ if (!callers.contains(caller)) {
+ callers.add(caller);
+ changes = true;
+ }
+ }
+ }
+ return changes;
+ }
+}
\ No newline at end of file
diff --git a/src/asm/scala/tools/asm/tree/analysis/Value.java b/src/asm/scala/tools/asm/tree/analysis/Value.java
new file mode 100644
index 0000000..1edf475
--- /dev/null
+++ b/src/asm/scala/tools/asm/tree/analysis/Value.java
@@ -0,0 +1,45 @@
+/***
+ * ASM: a very small and fast Java bytecode manipulation framework
+ * Copyright (c) 2000-2011 INRIA, France Telecom
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ * 3. Neither the name of the copyright holders nor the names of its
+ * contributors may be used to endorse or promote products derived from
+ * this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+ * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
+ * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+ * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+ * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
+ * THE POSSIBILITY OF SUCH DAMAGE.
+ */
+package scala.tools.asm.tree.analysis;
+
+/**
+ * An immutable symbolic value for semantic interpretation of bytecode.
+ *
+ * @author Eric Bruneton
+ */
+public interface Value {
+
+ /**
+ * Returns the size of this value in words.
+ *
+ * @return either 1 or 2.
+ */
+ int getSize();
+}
diff --git a/src/asm/scala/tools/asm/util/ASMifiable.java b/src/asm/scala/tools/asm/util/ASMifiable.java
new file mode 100644
index 0000000..6a31dd5
--- /dev/null
+++ b/src/asm/scala/tools/asm/util/ASMifiable.java
@@ -0,0 +1,53 @@
+/**
+ * ASM: a very small and fast Java bytecode manipulation framework
+ * Copyright (c) 2000-2011 INRIA, France Telecom
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ * 3. Neither the name of the copyright holders nor the names of its
+ * contributors may be used to endorse or promote products derived from
+ * this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+ * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
+ * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+ * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+ * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
+ * THE POSSIBILITY OF SUCH DAMAGE.
+ */
+package scala.tools.asm.util;
+
+import java.util.Map;
+
+import scala.tools.asm.Label;
+
+/**
+ * An {@link org.objectweb.asm.Attribute Attribute} that can print the ASM code
+ * to create an equivalent attribute.
+ *
+ * @author Eugene Kuleshov
+ */
+public interface ASMifiable {
+
+ /**
+ * Prints the ASM code to create an attribute equal to this attribute.
+ *
+ * @param buf a buffer used for printing Java code.
+ * @param varName name of the variable in a printed code used to store
+ * attribute instance.
+ * @param labelNames map of label instances to their names.
+ */
+ void asmify(StringBuffer buf, String varName, Map<Label, String> labelNames);
+}
diff --git a/src/asm/scala/tools/asm/util/ASMifier.java b/src/asm/scala/tools/asm/util/ASMifier.java
new file mode 100644
index 0000000..5967c87
--- /dev/null
+++ b/src/asm/scala/tools/asm/util/ASMifier.java
@@ -0,0 +1,1238 @@
+/***
+ * ASM: a very small and fast Java bytecode manipulation framework
+ * Copyright (c) 2000-2011 INRIA, France Telecom
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ * 3. Neither the name of the copyright holders nor the names of its
+ * contributors may be used to endorse or promote products derived from
+ * this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+ * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
+ * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+ * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+ * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
+ * THE POSSIBILITY OF SUCH DAMAGE.
+ */
+package scala.tools.asm.util;
+
+import java.io.FileInputStream;
+import java.io.PrintWriter;
+import java.util.HashMap;
+import java.util.Map;
+
+import scala.tools.asm.Attribute;
+import scala.tools.asm.ClassReader;
+import scala.tools.asm.Handle;
+import scala.tools.asm.Label;
+import scala.tools.asm.Opcodes;
+import scala.tools.asm.Type;
+
+/**
+ * A {@link Printer} that prints the ASM code to generate the classes if visits.
+ *
+ * @author Eric Bruneton
+ */
+public class ASMifier extends Printer {
+
+ /**
+ * The name of the visitor variable in the produced code.
+ */
+ protected final String name;
+
+ /**
+ * Identifier of the annotation visitor variable in the produced code.
+ */
+ protected final int id;
+
+ /**
+ * The label names. This map associates String values to Label keys. It is
+ * used only in ASMifierMethodVisitor.
+ */
+ protected Map<Label, String> labelNames;
+
+ /**
+ * Pseudo access flag used to distinguish class access flags.
+ */
+ private static final int ACCESS_CLASS = 262144;
+
+ /**
+ * Pseudo access flag used to distinguish field access flags.
+ */
+ private static final int ACCESS_FIELD = 524288;
+
+ /**
+ * Pseudo access flag used to distinguish inner class flags.
+ */
+ private static final int ACCESS_INNER = 1048576;
+
+ /**
+ * Constructs a new {@link ASMifier}. <i>Subclasses must not use this
+ * constructor</i>. Instead, they must use the
+ * {@link #ASMifier(int, String, int)} version.
+ */
+ public ASMifier() {
+ this(Opcodes.ASM4, "cw", 0);
+ }
+
+ /**
+ * Constructs a new {@link ASMifier}.
+ *
+ * @param api the ASM API version implemented by this class. Must be one of
+ * {@link Opcodes#ASM4}.
+ * @param name the name of the visitor variable in the produced code.
+ * @param id identifier of the annotation visitor variable in the produced
+ * code.
+ */
+ protected ASMifier(final int api, final String name, final int id) {
+ super(api);
+ this.name = name;
+ this.id = id;
+ }
+
+ /**
+ * Prints the ASM source code to generate the given class to the standard
+ * output. <p> Usage: ASMifier [-debug] <binary
+ * class name or class file name>
+ *
+ * @param args the command line arguments.
+ *
+ * @throws Exception if the class cannot be found, or if an IO exception
+ * occurs.
+ */
+ public static void main(final String[] args) throws Exception {
+ int i = 0;
+ int flags = ClassReader.SKIP_DEBUG;
+
+ boolean ok = true;
+ if (args.length < 1 || args.length > 2) {
+ ok = false;
+ }
+ if (ok && "-debug".equals(args[0])) {
+ i = 1;
+ flags = 0;
+ if (args.length != 2) {
+ ok = false;
+ }
+ }
+ if (!ok) {
+ System.err.println("Prints the ASM code to generate the given class.");
+ System.err.println("Usage: ASMifier [-debug] "
+ + "<fully qualified class name or class file name>");
+ return;
+ }
+ ClassReader cr;
+ if (args[i].endsWith(".class") || args[i].indexOf('\\') > -1
+ || args[i].indexOf('/') > -1)
+ {
+ cr = new ClassReader(new FileInputStream(args[i]));
+ } else {
+ cr = new ClassReader(args[i]);
+ }
+ cr.accept(new TraceClassVisitor(null,
+ new ASMifier(),
+ new PrintWriter(System.out)), flags);
+ }
+
+ // ------------------------------------------------------------------------
+ // Classes
+ // ------------------------------------------------------------------------
+
+ @Override
+ public void visit(
+ final int version,
+ final int access,
+ final String name,
+ final String signature,
+ final String superName,
+ final String[] interfaces)
+ {
+ String simpleName;
+ int n = name.lastIndexOf('/');
+ if (n == -1) {
+ simpleName = name;
+ } else {
+ text.add("package asm." + name.substring(0, n).replace('/', '.')
+ + ";\n");
+ simpleName = name.substring(n + 1);
+ }
+ text.add("import java.util.*;\n");
+ text.add("import org.objectweb.asm.*;\n");
+ text.add("import org.objectweb.asm.attrs.*;\n");
+ text.add("public class " + simpleName + "Dump implements Opcodes {\n\n");
+ text.add("public static byte[] dump () throws Exception {\n\n");
+ text.add("ClassWriter cw = new ClassWriter(0);\n");
+ text.add("FieldVisitor fv;\n");
+ text.add("MethodVisitor mv;\n");
+ text.add("AnnotationVisitor av0;\n\n");
+
+ buf.setLength(0);
+ buf.append("cw.visit(");
+ switch (version) {
+ case Opcodes.V1_1:
+ buf.append("V1_1");
+ break;
+ case Opcodes.V1_2:
+ buf.append("V1_2");
+ break;
+ case Opcodes.V1_3:
+ buf.append("V1_3");
+ break;
+ case Opcodes.V1_4:
+ buf.append("V1_4");
+ break;
+ case Opcodes.V1_5:
+ buf.append("V1_5");
+ break;
+ case Opcodes.V1_6:
+ buf.append("V1_6");
+ break;
+ case Opcodes.V1_7:
+ buf.append("V1_7");
+ break;
+ default:
+ buf.append(version);
+ break;
+ }
+ buf.append(", ");
+ appendAccess(access | ACCESS_CLASS);
+ buf.append(", ");
+ appendConstant(name);
+ buf.append(", ");
+ appendConstant(signature);
+ buf.append(", ");
+ appendConstant(superName);
+ buf.append(", ");
+ if (interfaces != null && interfaces.length > 0) {
+ buf.append("new String[] {");
+ for (int i = 0; i < interfaces.length; ++i) {
+ buf.append(i == 0 ? " " : ", ");
+ appendConstant(interfaces[i]);
+ }
+ buf.append(" }");
+ } else {
+ buf.append("null");
+ }
+ buf.append(");\n\n");
+ text.add(buf.toString());
+ }
+
+ @Override
+ public void visitSource(final String file, final String debug) {
+ buf.setLength(0);
+ buf.append("cw.visitSource(");
+ appendConstant(file);
+ buf.append(", ");
+ appendConstant(debug);
+ buf.append(");\n\n");
+ text.add(buf.toString());
+ }
+
+ @Override
+ public void visitOuterClass(
+ final String owner,
+ final String name,
+ final String desc)
+ {
+ buf.setLength(0);
+ buf.append("cw.visitOuterClass(");
+ appendConstant(owner);
+ buf.append(", ");
+ appendConstant(name);
+ buf.append(", ");
+ appendConstant(desc);
+ buf.append(");\n\n");
+ text.add(buf.toString());
+ }
+
+ @Override
+ public ASMifier visitClassAnnotation(
+ final String desc,
+ final boolean visible)
+ {
+ return visitAnnotation(desc, visible);
+ }
+
+ @Override
+ public void visitClassAttribute(final Attribute attr) {
+ visitAttribute(attr);
+ }
+
+ @Override
+ public void visitInnerClass(
+ final String name,
+ final String outerName,
+ final String innerName,
+ final int access)
+ {
+ buf.setLength(0);
+ buf.append("cw.visitInnerClass(");
+ appendConstant(name);
+ buf.append(", ");
+ appendConstant(outerName);
+ buf.append(", ");
+ appendConstant(innerName);
+ buf.append(", ");
+ appendAccess(access | ACCESS_INNER);
+ buf.append(");\n\n");
+ text.add(buf.toString());
+ }
+
+ @Override
+ public ASMifier visitField(
+ final int access,
+ final String name,
+ final String desc,
+ final String signature,
+ final Object value)
+ {
+ buf.setLength(0);
+ buf.append("{\n");
+ buf.append("fv = cw.visitField(");
+ appendAccess(access | ACCESS_FIELD);
+ buf.append(", ");
+ appendConstant(name);
+ buf.append(", ");
+ appendConstant(desc);
+ buf.append(", ");
+ appendConstant(signature);
+ buf.append(", ");
+ appendConstant(value);
+ buf.append(");\n");
+ text.add(buf.toString());
+ ASMifier a = createASMifier("fv", 0);
+ text.add(a.getText());
+ text.add("}\n");
+ return a;
+ }
+
+ @Override
+ public ASMifier visitMethod(
+ final int access,
+ final String name,
+ final String desc,
+ final String signature,
+ final String[] exceptions)
+ {
+ buf.setLength(0);
+ buf.append("{\n");
+ buf.append("mv = cw.visitMethod(");
+ appendAccess(access);
+ buf.append(", ");
+ appendConstant(name);
+ buf.append(", ");
+ appendConstant(desc);
+ buf.append(", ");
+ appendConstant(signature);
+ buf.append(", ");
+ if (exceptions != null && exceptions.length > 0) {
+ buf.append("new String[] {");
+ for (int i = 0; i < exceptions.length; ++i) {
+ buf.append(i == 0 ? " " : ", ");
+ appendConstant(exceptions[i]);
+ }
+ buf.append(" }");
+ } else {
+ buf.append("null");
+ }
+ buf.append(");\n");
+ text.add(buf.toString());
+ ASMifier a = createASMifier("mv", 0);
+ text.add(a.getText());
+ text.add("}\n");
+ return a;
+ }
+
+ @Override
+ public void visitClassEnd() {
+ text.add("cw.visitEnd();\n\n");
+ text.add("return cw.toByteArray();\n");
+ text.add("}\n");
+ text.add("}\n");
+ }
+
+ // ------------------------------------------------------------------------
+ // Annotations
+ // ------------------------------------------------------------------------
+
+ @Override
+ public void visit(final String name, final Object value) {
+ buf.setLength(0);
+ buf.append("av").append(id).append(".visit(");
+ appendConstant(buf, name);
+ buf.append(", ");
+ appendConstant(buf, value);
+ buf.append(");\n");
+ text.add(buf.toString());
+ }
+
+ @Override
+ public void visitEnum(
+ final String name,
+ final String desc,
+ final String value)
+ {
+ buf.setLength(0);
+ buf.append("av").append(id).append(".visitEnum(");
+ appendConstant(buf, name);
+ buf.append(", ");
+ appendConstant(buf, desc);
+ buf.append(", ");
+ appendConstant(buf, value);
+ buf.append(");\n");
+ text.add(buf.toString());
+ }
+
+ @Override
+ public ASMifier visitAnnotation(
+ final String name,
+ final String desc)
+ {
+ buf.setLength(0);
+ buf.append("{\n");
+ buf.append("AnnotationVisitor av").append(id + 1).append(" = av");
+ buf.append(id).append(".visitAnnotation(");
+ appendConstant(buf, name);
+ buf.append(", ");
+ appendConstant(buf, desc);
+ buf.append(");\n");
+ text.add(buf.toString());
+ ASMifier a = createASMifier("av", id + 1);
+ text.add(a.getText());
+ text.add("}\n");
+ return a;
+ }
+
+ @Override
+ public ASMifier visitArray(final String name) {
+ buf.setLength(0);
+ buf.append("{\n");
+ buf.append("AnnotationVisitor av").append(id + 1).append(" = av");
+ buf.append(id).append(".visitArray(");
+ appendConstant(buf, name);
+ buf.append(");\n");
+ text.add(buf.toString());
+ ASMifier a = createASMifier("av", id + 1);
+ text.add(a.getText());
+ text.add("}\n");
+ return a;
+ }
+
+ @Override
+ public void visitAnnotationEnd() {
+ buf.setLength(0);
+ buf.append("av").append(id).append(".visitEnd();\n");
+ text.add(buf.toString());
+ }
+
+ // ------------------------------------------------------------------------
+ // Fields
+ // ------------------------------------------------------------------------
+
+ @Override
+ public ASMifier visitFieldAnnotation(
+ final String desc,
+ final boolean visible)
+ {
+ return visitAnnotation(desc, visible);
+ }
+
+ @Override
+ public void visitFieldAttribute(final Attribute attr) {
+ visitAttribute(attr);
+ }
+
+ @Override
+ public void visitFieldEnd() {
+ buf.setLength(0);
+ buf.append(name).append(".visitEnd();\n");
+ text.add(buf.toString());
+ }
+
+ // ------------------------------------------------------------------------
+ // Methods
+ // ------------------------------------------------------------------------
+
+ @Override
+ public ASMifier visitAnnotationDefault() {
+ buf.setLength(0);
+ buf.append("{\n")
+ .append("av0 = ")
+ .append(name)
+ .append(".visitAnnotationDefault();\n");
+ text.add(buf.toString());
+ ASMifier a = createASMifier("av", 0);
+ text.add(a.getText());
+ text.add("}\n");
+ return a;
+ }
+
+ @Override
+ public ASMifier visitMethodAnnotation(
+ final String desc,
+ final boolean visible)
+ {
+ return visitAnnotation(desc, visible);
+ }
+
+ @Override
+ public ASMifier visitParameterAnnotation(
+ final int parameter,
+ final String desc,
+ final boolean visible)
+ {
+ buf.setLength(0);
+ buf.append("{\n")
+ .append("av0 = ").append(name).append(".visitParameterAnnotation(")
+ .append(parameter)
+ .append(", ");
+ appendConstant(desc);
+ buf.append(", ").append(visible).append(");\n");
+ text.add(buf.toString());
+ ASMifier a = createASMifier("av", 0);
+ text.add(a.getText());
+ text.add("}\n");
+ return a;
+ }
+
+ @Override
+ public void visitMethodAttribute(final Attribute attr) {
+ visitAttribute(attr);
+ }
+
+ @Override
+ public void visitCode() {
+ text.add(name + ".visitCode();\n");
+ }
+
+ @Override
+ public void visitFrame(
+ final int type,
+ final int nLocal,
+ final Object[] local,
+ final int nStack,
+ final Object[] stack)
+ {
+ buf.setLength(0);
+ switch (type) {
+ case Opcodes.F_NEW:
+ case Opcodes.F_FULL:
+ declareFrameTypes(nLocal, local);
+ declareFrameTypes(nStack, stack);
+ if (type == Opcodes.F_NEW) {
+ buf.append(name).append(".visitFrame(Opcodes.F_NEW, ");
+ } else {
+ buf.append(name).append(".visitFrame(Opcodes.F_FULL, ");
+ }
+ buf.append(nLocal).append(", new Object[] {");
+ appendFrameTypes(nLocal, local);
+ buf.append("}, ").append(nStack).append(", new Object[] {");
+ appendFrameTypes(nStack, stack);
+ buf.append('}');
+ break;
+ case Opcodes.F_APPEND:
+ declareFrameTypes(nLocal, local);
+ buf.append(name).append(".visitFrame(Opcodes.F_APPEND,")
+ .append(nLocal)
+ .append(", new Object[] {");
+ appendFrameTypes(nLocal, local);
+ buf.append("}, 0, null");
+ break;
+ case Opcodes.F_CHOP:
+ buf.append(name).append(".visitFrame(Opcodes.F_CHOP,")
+ .append(nLocal)
+ .append(", null, 0, null");
+ break;
+ case Opcodes.F_SAME:
+ buf.append(name).append(".visitFrame(Opcodes.F_SAME, 0, null, 0, null");
+ break;
+ case Opcodes.F_SAME1:
+ declareFrameTypes(1, stack);
+ buf.append(name).append(".visitFrame(Opcodes.F_SAME1, 0, null, 1, new Object[] {");
+ appendFrameTypes(1, stack);
+ buf.append('}');
+ break;
+ }
+ buf.append(");\n");
+ text.add(buf.toString());
+ }
+
+ @Override
+ public void visitInsn(final int opcode) {
+ buf.setLength(0);
+ buf.append(name).append(".visitInsn(").append(OPCODES[opcode]).append(");\n");
+ text.add(buf.toString());
+ }
+
+ @Override
+ public void visitIntInsn(final int opcode, final int operand) {
+ buf.setLength(0);
+ buf.append(name)
+ .append(".visitIntInsn(")
+ .append(OPCODES[opcode])
+ .append(", ")
+ .append(opcode == Opcodes.NEWARRAY
+ ? TYPES[operand]
+ : Integer.toString(operand))
+ .append(");\n");
+ text.add(buf.toString());
+ }
+
+ @Override
+ public void visitVarInsn(final int opcode, final int var) {
+ buf.setLength(0);
+ buf.append(name)
+ .append(".visitVarInsn(")
+ .append(OPCODES[opcode])
+ .append(", ")
+ .append(var)
+ .append(");\n");
+ text.add(buf.toString());
+ }
+
+ @Override
+ public void visitTypeInsn(final int opcode, final String type) {
+ buf.setLength(0);
+ buf.append(name).append(".visitTypeInsn(").append(OPCODES[opcode]).append(", ");
+ appendConstant(type);
+ buf.append(");\n");
+ text.add(buf.toString());
+ }
+
+ @Override
+ public void visitFieldInsn(
+ final int opcode,
+ final String owner,
+ final String name,
+ final String desc)
+ {
+ buf.setLength(0);
+ buf.append(this.name).append(".visitFieldInsn(").append(OPCODES[opcode]).append(", ");
+ appendConstant(owner);
+ buf.append(", ");
+ appendConstant(name);
+ buf.append(", ");
+ appendConstant(desc);
+ buf.append(");\n");
+ text.add(buf.toString());
+ }
+
+ @Override
+ public void visitMethodInsn(
+ final int opcode,
+ final String owner,
+ final String name,
+ final String desc)
+ {
+ buf.setLength(0);
+ buf.append(this.name).append(".visitMethodInsn(").append(OPCODES[opcode]).append(", ");
+ appendConstant(owner);
+ buf.append(", ");
+ appendConstant(name);
+ buf.append(", ");
+ appendConstant(desc);
+ buf.append(");\n");
+ text.add(buf.toString());
+ }
+
+ @Override
+ public void visitInvokeDynamicInsn(
+ String name,
+ String desc,
+ Handle bsm,
+ Object... bsmArgs)
+ {
+ buf.setLength(0);
+ buf.append(this.name).append(".visitInvokeDynamicInsn(");
+ appendConstant(name);
+ buf.append(", ");
+ appendConstant(desc);
+ buf.append(", ");
+ appendConstant(bsm);
+ buf.append(", new Object[]{");
+ for (int i = 0; i < bsmArgs.length; ++i) {
+ appendConstant(bsmArgs[i]);
+ if (i != bsmArgs.length - 1) {
+ buf.append(", ");
+ }
+ }
+ buf.append("});\n");
+ text.add(buf.toString());
+ }
+
+ @Override
+ public void visitJumpInsn(final int opcode, final Label label) {
+ buf.setLength(0);
+ declareLabel(label);
+ buf.append(name).append(".visitJumpInsn(").append(OPCODES[opcode]).append(", ");
+ appendLabel(label);
+ buf.append(");\n");
+ text.add(buf.toString());
+ }
+
+ @Override
+ public void visitLabel(final Label label) {
+ buf.setLength(0);
+ declareLabel(label);
+ buf.append(name).append(".visitLabel(");
+ appendLabel(label);
+ buf.append(");\n");
+ text.add(buf.toString());
+ }
+
+ @Override
+ public void visitLdcInsn(final Object cst) {
+ buf.setLength(0);
+ buf.append(name).append(".visitLdcInsn(");
+ appendConstant(cst);
+ buf.append(");\n");
+ text.add(buf.toString());
+ }
+
+ @Override
+ public void visitIincInsn(final int var, final int increment) {
+ buf.setLength(0);
+ buf.append(name)
+ .append(".visitIincInsn(")
+ .append(var)
+ .append(", ")
+ .append(increment)
+ .append(");\n");
+ text.add(buf.toString());
+ }
+
+ @Override
+ public void visitTableSwitchInsn(
+ final int min,
+ final int max,
+ final Label dflt,
+ final Label... labels)
+ {
+ buf.setLength(0);
+ for (int i = 0; i < labels.length; ++i) {
+ declareLabel(labels[i]);
+ }
+ declareLabel(dflt);
+
+ buf.append(name)
+ .append(".visitTableSwitchInsn(")
+ .append(min)
+ .append(", ")
+ .append(max)
+ .append(", ");
+ appendLabel(dflt);
+ buf.append(", new Label[] {");
+ for (int i = 0; i < labels.length; ++i) {
+ buf.append(i == 0 ? " " : ", ");
+ appendLabel(labels[i]);
+ }
+ buf.append(" });\n");
+ text.add(buf.toString());
+ }
+
+ @Override
+ public void visitLookupSwitchInsn(
+ final Label dflt,
+ final int[] keys,
+ final Label[] labels)
+ {
+ buf.setLength(0);
+ for (int i = 0; i < labels.length; ++i) {
+ declareLabel(labels[i]);
+ }
+ declareLabel(dflt);
+
+ buf.append(name).append(".visitLookupSwitchInsn(");
+ appendLabel(dflt);
+ buf.append(", new int[] {");
+ for (int i = 0; i < keys.length; ++i) {
+ buf.append(i == 0 ? " " : ", ").append(keys[i]);
+ }
+ buf.append(" }, new Label[] {");
+ for (int i = 0; i < labels.length; ++i) {
+ buf.append(i == 0 ? " " : ", ");
+ appendLabel(labels[i]);
+ }
+ buf.append(" });\n");
+ text.add(buf.toString());
+ }
+
+ @Override
+ public void visitMultiANewArrayInsn(final String desc, final int dims) {
+ buf.setLength(0);
+ buf.append(name).append(".visitMultiANewArrayInsn(");
+ appendConstant(desc);
+ buf.append(", ").append(dims).append(");\n");
+ text.add(buf.toString());
+ }
+
+ @Override
+ public void visitTryCatchBlock(
+ final Label start,
+ final Label end,
+ final Label handler,
+ final String type)
+ {
+ buf.setLength(0);
+ declareLabel(start);
+ declareLabel(end);
+ declareLabel(handler);
+ buf.append(name).append(".visitTryCatchBlock(");
+ appendLabel(start);
+ buf.append(", ");
+ appendLabel(end);
+ buf.append(", ");
+ appendLabel(handler);
+ buf.append(", ");
+ appendConstant(type);
+ buf.append(");\n");
+ text.add(buf.toString());
+ }
+
+ @Override
+ public void visitLocalVariable(
+ final String name,
+ final String desc,
+ final String signature,
+ final Label start,
+ final Label end,
+ final int index)
+ {
+ buf.setLength(0);
+ buf.append(this.name).append(".visitLocalVariable(");
+ appendConstant(name);
+ buf.append(", ");
+ appendConstant(desc);
+ buf.append(", ");
+ appendConstant(signature);
+ buf.append(", ");
+ appendLabel(start);
+ buf.append(", ");
+ appendLabel(end);
+ buf.append(", ").append(index).append(");\n");
+ text.add(buf.toString());
+ }
+
+ @Override
+ public void visitLineNumber(final int line, final Label start) {
+ buf.setLength(0);
+ buf.append(name).append(".visitLineNumber(").append(line).append(", ");
+ appendLabel(start);
+ buf.append(");\n");
+ text.add(buf.toString());
+ }
+
+ @Override
+ public void visitMaxs(final int maxStack, final int maxLocals) {
+ buf.setLength(0);
+ buf.append(name)
+ .append(".visitMaxs(")
+ .append(maxStack)
+ .append(", ")
+ .append(maxLocals)
+ .append(");\n");
+ text.add(buf.toString());
+ }
+
+ @Override
+ public void visitMethodEnd() {
+ buf.setLength(0);
+ buf.append(name).append(".visitEnd();\n");
+ text.add(buf.toString());
+ }
+
+ // ------------------------------------------------------------------------
+ // Common methods
+ // ------------------------------------------------------------------------
+
+ public ASMifier visitAnnotation(
+ final String desc,
+ final boolean visible)
+ {
+ buf.setLength(0);
+ buf.append("{\n")
+ .append("av0 = ")
+ .append(name)
+ .append(".visitAnnotation(");
+ appendConstant(desc);
+ buf.append(", ").append(visible).append(");\n");
+ text.add(buf.toString());
+ ASMifier a = createASMifier("av", 0);
+ text.add(a.getText());
+ text.add("}\n");
+ return a;
+ }
+
+ public void visitAttribute(final Attribute attr) {
+ buf.setLength(0);
+ buf.append("// ATTRIBUTE ").append(attr.type).append('\n');
+ if (attr instanceof ASMifiable) {
+ if (labelNames == null) {
+ labelNames = new HashMap<Label, String>();
+ }
+ buf.append("{\n");
+ ((ASMifiable) attr).asmify(buf, "attr", labelNames);
+ buf.append(name).append(".visitAttribute(attr);\n");
+ buf.append("}\n");
+ }
+ text.add(buf.toString());
+ }
+
+ // ------------------------------------------------------------------------
+ // Utility methods
+ // ------------------------------------------------------------------------
+
+ protected ASMifier createASMifier(final String name, final int id) {
+ return new ASMifier(Opcodes.ASM4, name, id);
+ }
+
+ /**
+ * Appends a string representation of the given access modifiers to {@link
+ * #buf buf}.
+ *
+ * @param access some access modifiers.
+ */
+ void appendAccess(final int access) {
+ boolean first = true;
+ if ((access & Opcodes.ACC_PUBLIC) != 0) {
+ buf.append("ACC_PUBLIC");
+ first = false;
+ }
+ if ((access & Opcodes.ACC_PRIVATE) != 0) {
+ buf.append("ACC_PRIVATE");
+ first = false;
+ }
+ if ((access & Opcodes.ACC_PROTECTED) != 0) {
+ buf.append("ACC_PROTECTED");
+ first = false;
+ }
+ if ((access & Opcodes.ACC_FINAL) != 0) {
+ if (!first) {
+ buf.append(" + ");
+ }
+ buf.append("ACC_FINAL");
+ first = false;
+ }
+ if ((access & Opcodes.ACC_STATIC) != 0) {
+ if (!first) {
+ buf.append(" + ");
+ }
+ buf.append("ACC_STATIC");
+ first = false;
+ }
+ if ((access & Opcodes.ACC_SYNCHRONIZED) != 0) {
+ if (!first) {
+ buf.append(" + ");
+ }
+ if ((access & ACCESS_CLASS) == 0) {
+ buf.append("ACC_SYNCHRONIZED");
+ } else {
+ buf.append("ACC_SUPER");
+ }
+ first = false;
+ }
+ if ((access & Opcodes.ACC_VOLATILE) != 0
+ && (access & ACCESS_FIELD) != 0)
+ {
+ if (!first) {
+ buf.append(" + ");
+ }
+ buf.append("ACC_VOLATILE");
+ first = false;
+ }
+ if ((access & Opcodes.ACC_BRIDGE) != 0 && (access & ACCESS_CLASS) == 0
+ && (access & ACCESS_FIELD) == 0)
+ {
+ if (!first) {
+ buf.append(" + ");
+ }
+ buf.append("ACC_BRIDGE");
+ first = false;
+ }
+ if ((access & Opcodes.ACC_VARARGS) != 0 && (access & ACCESS_CLASS) == 0
+ && (access & ACCESS_FIELD) == 0)
+ {
+ if (!first) {
+ buf.append(" + ");
+ }
+ buf.append("ACC_VARARGS");
+ first = false;
+ }
+ if ((access & Opcodes.ACC_TRANSIENT) != 0
+ && (access & ACCESS_FIELD) != 0)
+ {
+ if (!first) {
+ buf.append(" + ");
+ }
+ buf.append("ACC_TRANSIENT");
+ first = false;
+ }
+ if ((access & Opcodes.ACC_NATIVE) != 0 && (access & ACCESS_CLASS) == 0
+ && (access & ACCESS_FIELD) == 0)
+ {
+ if (!first) {
+ buf.append(" + ");
+ }
+ buf.append("ACC_NATIVE");
+ first = false;
+ }
+ if ((access & Opcodes.ACC_ENUM) != 0
+ && ((access & ACCESS_CLASS) != 0
+ || (access & ACCESS_FIELD) != 0 || (access & ACCESS_INNER) != 0))
+ {
+ if (!first) {
+ buf.append(" + ");
+ }
+ buf.append("ACC_ENUM");
+ first = false;
+ }
+ if ((access & Opcodes.ACC_ANNOTATION) != 0
+ && ((access & ACCESS_CLASS) != 0 || (access & ACCESS_INNER) != 0))
+ {
+ if (!first) {
+ buf.append(" + ");
+ }
+ buf.append("ACC_ANNOTATION");
+ first = false;
+ }
+ if ((access & Opcodes.ACC_ABSTRACT) != 0) {
+ if (!first) {
+ buf.append(" + ");
+ }
+ buf.append("ACC_ABSTRACT");
+ first = false;
+ }
+ if ((access & Opcodes.ACC_INTERFACE) != 0) {
+ if (!first) {
+ buf.append(" + ");
+ }
+ buf.append("ACC_INTERFACE");
+ first = false;
+ }
+ if ((access & Opcodes.ACC_STRICT) != 0) {
+ if (!first) {
+ buf.append(" + ");
+ }
+ buf.append("ACC_STRICT");
+ first = false;
+ }
+ if ((access & Opcodes.ACC_SYNTHETIC) != 0) {
+ if (!first) {
+ buf.append(" + ");
+ }
+ buf.append("ACC_SYNTHETIC");
+ first = false;
+ }
+ if ((access & Opcodes.ACC_DEPRECATED) != 0) {
+ if (!first) {
+ buf.append(" + ");
+ }
+ buf.append("ACC_DEPRECATED");
+ first = false;
+ }
+ if (first) {
+ buf.append('0');
+ }
+ }
+
+ /**
+ * Appends a string representation of the given constant to the given
+ * buffer.
+ *
+ * @param cst an {@link Integer}, {@link Float}, {@link Long},
+ * {@link Double} or {@link String} object. May be <tt>null</tt>.
+ */
+ protected void appendConstant(final Object cst) {
+ appendConstant(buf, cst);
+ }
+
+ /**
+ * Appends a string representation of the given constant to the given
+ * buffer.
+ *
+ * @param buf a string buffer.
+ * @param cst an {@link Integer}, {@link Float}, {@link Long},
+ * {@link Double} or {@link String} object. May be <tt>null</tt>.
+ */
+ static void appendConstant(final StringBuffer buf, final Object cst) {
+ if (cst == null) {
+ buf.append("null");
+ } else if (cst instanceof String) {
+ appendString(buf, (String) cst);
+ } else if (cst instanceof Type) {
+ buf.append("Type.getType(\"");
+ buf.append(((Type) cst).getDescriptor());
+ buf.append("\")");
+ } else if (cst instanceof Handle) {
+ buf.append("new Handle(");
+ Handle h = (Handle) cst;
+ buf.append("Opcodes.").append(HANDLE_TAG[h.getTag()]).append(", \"");
+ buf.append(h.getOwner()).append("\", \"");
+ buf.append(h.getName()).append("\", \"");
+ buf.append(h.getDesc()).append("\")");
+ } else if (cst instanceof Byte) {
+ buf.append("new Byte((byte)").append(cst).append(')');
+ } else if (cst instanceof Boolean) {
+ buf.append(((Boolean) cst).booleanValue() ? "Boolean.TRUE" : "Boolean.FALSE");
+ } else if (cst instanceof Short) {
+ buf.append("new Short((short)").append(cst).append(')');
+ } else if (cst instanceof Character) {
+ int c = ((Character) cst).charValue();
+ buf.append("new Character((char)").append(c).append(')');
+ } else if (cst instanceof Integer) {
+ buf.append("new Integer(").append(cst).append(')');
+ } else if (cst instanceof Float) {
+ buf.append("new Float(\"").append(cst).append("\")");
+ } else if (cst instanceof Long) {
+ buf.append("new Long(").append(cst).append("L)");
+ } else if (cst instanceof Double) {
+ buf.append("new Double(\"").append(cst).append("\")");
+ } else if (cst instanceof byte[]) {
+ byte[] v = (byte[]) cst;
+ buf.append("new byte[] {");
+ for (int i = 0; i < v.length; i++) {
+ buf.append(i == 0 ? "" : ",").append(v[i]);
+ }
+ buf.append('}');
+ } else if (cst instanceof boolean[]) {
+ boolean[] v = (boolean[]) cst;
+ buf.append("new boolean[] {");
+ for (int i = 0; i < v.length; i++) {
+ buf.append(i == 0 ? "" : ",").append(v[i]);
+ }
+ buf.append('}');
+ } else if (cst instanceof short[]) {
+ short[] v = (short[]) cst;
+ buf.append("new short[] {");
+ for (int i = 0; i < v.length; i++) {
+ buf.append(i == 0 ? "" : ",").append("(short)").append(v[i]);
+ }
+ buf.append('}');
+ } else if (cst instanceof char[]) {
+ char[] v = (char[]) cst;
+ buf.append("new char[] {");
+ for (int i = 0; i < v.length; i++) {
+ buf.append(i == 0 ? "" : ",")
+ .append("(char)")
+ .append((int) v[i]);
+ }
+ buf.append('}');
+ } else if (cst instanceof int[]) {
+ int[] v = (int[]) cst;
+ buf.append("new int[] {");
+ for (int i = 0; i < v.length; i++) {
+ buf.append(i == 0 ? "" : ",").append(v[i]);
+ }
+ buf.append('}');
+ } else if (cst instanceof long[]) {
+ long[] v = (long[]) cst;
+ buf.append("new long[] {");
+ for (int i = 0; i < v.length; i++) {
+ buf.append(i == 0 ? "" : ",").append(v[i]).append('L');
+ }
+ buf.append('}');
+ } else if (cst instanceof float[]) {
+ float[] v = (float[]) cst;
+ buf.append("new float[] {");
+ for (int i = 0; i < v.length; i++) {
+ buf.append(i == 0 ? "" : ",").append(v[i]).append('f');
+ }
+ buf.append('}');
+ } else if (cst instanceof double[]) {
+ double[] v = (double[]) cst;
+ buf.append("new double[] {");
+ for (int i = 0; i < v.length; i++) {
+ buf.append(i == 0 ? "" : ",").append(v[i]).append('d');
+ }
+ buf.append('}');
+ }
+ }
+
+ private void declareFrameTypes(final int n, final Object[] o) {
+ for (int i = 0; i < n; ++i) {
+ if (o[i] instanceof Label) {
+ declareLabel((Label) o[i]);
+ }
+ }
+ }
+
+ private void appendFrameTypes(final int n, final Object[] o) {
+ for (int i = 0; i < n; ++i) {
+ if (i > 0) {
+ buf.append(", ");
+ }
+ if (o[i] instanceof String) {
+ appendConstant(o[i]);
+ } else if (o[i] instanceof Integer) {
+ switch (((Integer) o[i]).intValue()) {
+ case 0:
+ buf.append("Opcodes.TOP");
+ break;
+ case 1:
+ buf.append("Opcodes.INTEGER");
+ break;
+ case 2:
+ buf.append("Opcodes.FLOAT");
+ break;
+ case 3:
+ buf.append("Opcodes.DOUBLE");
+ break;
+ case 4:
+ buf.append("Opcodes.LONG");
+ break;
+ case 5:
+ buf.append("Opcodes.NULL");
+ break;
+ case 6:
+ buf.append("Opcodes.UNINITIALIZED_THIS");
+ break;
+ }
+ } else {
+ appendLabel((Label) o[i]);
+ }
+ }
+ }
+
+ /**
+ * Appends a declaration of the given label to {@link #buf buf}. This
+ * declaration is of the form "Label lXXX = new Label();". Does nothing if
+ * the given label has already been declared.
+ *
+ * @param l a label.
+ */
+ protected void declareLabel(final Label l) {
+ if (labelNames == null) {
+ labelNames = new HashMap<Label, String>();
+ }
+ String name = labelNames.get(l);
+ if (name == null) {
+ name = "l" + labelNames.size();
+ labelNames.put(l, name);
+ buf.append("Label ").append(name).append(" = new Label();\n");
+ }
+ }
+
+ /**
+ * Appends the name of the given label to {@link #buf buf}. The given label
+ * <i>must</i> already have a name. One way to ensure this is to always
+ * call {@link #declareLabel declared} before calling this method.
+ *
+ * @param l a label.
+ */
+ protected void appendLabel(final Label l) {
+ buf.append(labelNames.get(l));
+ }
+}
diff --git a/src/asm/scala/tools/asm/util/CheckAnnotationAdapter.java b/src/asm/scala/tools/asm/util/CheckAnnotationAdapter.java
new file mode 100644
index 0000000..8030c14
--- /dev/null
+++ b/src/asm/scala/tools/asm/util/CheckAnnotationAdapter.java
@@ -0,0 +1,142 @@
+/***
+ * ASM: a very small and fast Java bytecode manipulation framework
+ * Copyright (c) 2000-2011 INRIA, France Telecom
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ * 3. Neither the name of the copyright holders nor the names of its
+ * contributors may be used to endorse or promote products derived from
+ * this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+ * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
+ * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+ * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+ * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
+ * THE POSSIBILITY OF SUCH DAMAGE.
+ */
+package scala.tools.asm.util;
+
+import scala.tools.asm.AnnotationVisitor;
+import scala.tools.asm.Opcodes;
+import scala.tools.asm.Type;
+
+/**
+ * An {@link AnnotationVisitor} that checks that its methods are properly used.
+ *
+ * @author Eric Bruneton
+ */
+public class CheckAnnotationAdapter extends AnnotationVisitor {
+
+ private final boolean named;
+
+ private boolean end;
+
+ public CheckAnnotationAdapter(final AnnotationVisitor av) {
+ this(av, true);
+ }
+
+ CheckAnnotationAdapter(final AnnotationVisitor av, final boolean named) {
+ super(Opcodes.ASM4, av);
+ this.named = named;
+ }
+
+ @Override
+ public void visit(final String name, final Object value) {
+ checkEnd();
+ checkName(name);
+ if (!(value instanceof Byte || value instanceof Boolean
+ || value instanceof Character || value instanceof Short
+ || value instanceof Integer || value instanceof Long
+ || value instanceof Float || value instanceof Double
+ || value instanceof String || value instanceof Type
+ || value instanceof byte[] || value instanceof boolean[]
+ || value instanceof char[] || value instanceof short[]
+ || value instanceof int[] || value instanceof long[]
+ || value instanceof float[] || value instanceof double[]))
+ {
+ throw new IllegalArgumentException("Invalid annotation value");
+ }
+ if (value instanceof Type) {
+ int sort = ((Type) value).getSort();
+ if (sort != Type.OBJECT && sort != Type.ARRAY) {
+ throw new IllegalArgumentException("Invalid annotation value");
+ }
+ }
+ if (av != null) {
+ av.visit(name, value);
+ }
+ }
+
+ @Override
+ public void visitEnum(
+ final String name,
+ final String desc,
+ final String value)
+ {
+ checkEnd();
+ checkName(name);
+ CheckMethodAdapter.checkDesc(desc, false);
+ if (value == null) {
+ throw new IllegalArgumentException("Invalid enum value");
+ }
+ if (av != null) {
+ av.visitEnum(name, desc, value);
+ }
+ }
+
+ @Override
+ public AnnotationVisitor visitAnnotation(
+ final String name,
+ final String desc)
+ {
+ checkEnd();
+ checkName(name);
+ CheckMethodAdapter.checkDesc(desc, false);
+ return new CheckAnnotationAdapter(av == null
+ ? null
+ : av.visitAnnotation(name, desc));
+ }
+
+ @Override
+ public AnnotationVisitor visitArray(final String name) {
+ checkEnd();
+ checkName(name);
+ return new CheckAnnotationAdapter(av == null
+ ? null
+ : av.visitArray(name), false);
+ }
+
+ @Override
+ public void visitEnd() {
+ checkEnd();
+ end = true;
+ if (av != null) {
+ av.visitEnd();
+ }
+ }
+
+ private void checkEnd() {
+ if (end) {
+ throw new IllegalStateException("Cannot call a visit method after visitEnd has been called");
+ }
+ }
+
+ private void checkName(final String name) {
+ if (named && name == null) {
+ throw new IllegalArgumentException("Annotation value name must not be null");
+ }
+ }
+}
diff --git a/src/asm/scala/tools/asm/util/CheckClassAdapter.java b/src/asm/scala/tools/asm/util/CheckClassAdapter.java
new file mode 100644
index 0000000..a455322
--- /dev/null
+++ b/src/asm/scala/tools/asm/util/CheckClassAdapter.java
@@ -0,0 +1,603 @@
+/***
+ * ASM: a very small and fast Java bytecode manipulation framework
+ * Copyright (c) 2000-2011 INRIA, France Telecom
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ * 3. Neither the name of the copyright holders nor the names of its
+ * contributors may be used to endorse or promote products derived from
+ * this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+ * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
+ * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+ * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+ * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
+ * THE POSSIBILITY OF SUCH DAMAGE.
+ */
+package scala.tools.asm.util;
+
+import java.io.FileInputStream;
+import java.io.PrintWriter;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Map;
+
+import scala.tools.asm.AnnotationVisitor;
+import scala.tools.asm.Attribute;
+import scala.tools.asm.ClassReader;
+import scala.tools.asm.ClassVisitor;
+import scala.tools.asm.FieldVisitor;
+import scala.tools.asm.Label;
+import scala.tools.asm.MethodVisitor;
+import scala.tools.asm.Opcodes;
+import scala.tools.asm.Type;
+import scala.tools.asm.tree.ClassNode;
+import scala.tools.asm.tree.MethodNode;
+import scala.tools.asm.tree.analysis.Analyzer;
+import scala.tools.asm.tree.analysis.BasicValue;
+import scala.tools.asm.tree.analysis.Frame;
+import scala.tools.asm.tree.analysis.SimpleVerifier;
+
+/**
+ * A {@link ClassVisitor} that checks that its methods are properly used. More
+ * precisely this class adapter checks each method call individually, based
+ * <i>only</i> on its arguments, but does <i>not</i> check the <i>sequence</i>
+ * of method calls. For example, the invalid sequence
+ * <tt>visitField(ACC_PUBLIC, "i", "I", null)</tt> <tt>visitField(ACC_PUBLIC,
+ * "i", "D", null)</tt>
+ * will <i>not</i> be detected by this class adapter.
+ *
+ * <p><code>CheckClassAdapter</code> can be also used to verify bytecode
+ * transformations in order to make sure transformed bytecode is sane. For
+ * example:
+ *
+ * <pre>
+ * InputStream is = ...; // get bytes for the source class
+ * ClassReader cr = new ClassReader(is);
+ * ClassWriter cw = new ClassWriter(cr, ClassWriter.COMPUTE_MAXS);
+ * ClassVisitor cv = new <b>MyClassAdapter</b>(new CheckClassAdapter(cw));
+ * cr.accept(cv, 0);
+ *
+ * StringWriter sw = new StringWriter();
+ * PrintWriter pw = new PrintWriter(sw);
+ * CheckClassAdapter.verify(new ClassReader(cw.toByteArray()), false, pw);
+ * assertTrue(sw.toString(), sw.toString().length()==0);
+ * </pre>
+ *
+ * Above code runs transformed bytecode trough the
+ * <code>CheckClassAdapter</code>. It won't be exactly the same verification
+ * as JVM does, but it run data flow analysis for the code of each method and
+ * checks that expectations are met for each method instruction.
+ *
+ * <p>If method bytecode has errors, assertion text will show the erroneous
+ * instruction number and dump of the failed method with information about
+ * locals and stack slot for each instruction. For example (format is -
+ * insnNumber locals : stack):
+ *
+ * <pre>
+ * org.objectweb.asm.tree.analysis.AnalyzerException: Error at instruction 71: Expected I, but found .
+ * at org.objectweb.asm.tree.analysis.Analyzer.analyze(Analyzer.java:289)
+ * at org.objectweb.asm.util.CheckClassAdapter.verify(CheckClassAdapter.java:135)
+ * ...
+ * remove()V
+ * 00000 LinkedBlockingQueue$Itr . . . . . . . . :
+ * ICONST_0
+ * 00001 LinkedBlockingQueue$Itr . . . . . . . . : I
+ * ISTORE 2
+ * 00001 LinkedBlockingQueue$Itr <b>.</b> I . . . . . . :
+ * ...
+ *
+ * 00071 LinkedBlockingQueue$Itr <b>.</b> I . . . . . . :
+ * ILOAD 1
+ * 00072 <b>?</b>
+ * INVOKESPECIAL java/lang/Integer.<init> (I)V
+ * ...
+ * </pre>
+ *
+ * In the above output you can see that variable 1 loaded by
+ * <code>ILOAD 1</code> instruction at position <code>00071</code> is not
+ * initialized. You can also see that at the beginning of the method (code
+ * inserted by the transformation) variable 2 is initialized.
+ *
+ * <p>Note that when used like that, <code>CheckClassAdapter.verify()</code>
+ * can trigger additional class loading, because it is using
+ * <code>SimpleVerifier</code>.
+ *
+ * @author Eric Bruneton
+ */
+public class CheckClassAdapter extends ClassVisitor {
+
+ /**
+ * The class version number.
+ */
+ private int version;
+
+ /**
+ * <tt>true</tt> if the visit method has been called.
+ */
+ private boolean start;
+
+ /**
+ * <tt>true</tt> if the visitSource method has been called.
+ */
+ private boolean source;
+
+ /**
+ * <tt>true</tt> if the visitOuterClass method has been called.
+ */
+ private boolean outer;
+
+ /**
+ * <tt>true</tt> if the visitEnd method has been called.
+ */
+ private boolean end;
+
+ /**
+ * The already visited labels. This map associate Integer values to Label
+ * keys.
+ */
+ private Map<Label, Integer> labels;
+
+ /**
+ * <tt>true</tt> if the method code must be checked with a BasicVerifier.
+ */
+ private boolean checkDataFlow;
+
+ /**
+ * Checks a given class. <p> Usage: CheckClassAdapter <binary
+ * class name or class file name>
+ *
+ * @param args the command line arguments.
+ *
+ * @throws Exception if the class cannot be found, or if an IO exception
+ * occurs.
+ */
+ public static void main(final String[] args) throws Exception {
+ if (args.length != 1) {
+ System.err.println("Verifies the given class.");
+ System.err.println("Usage: CheckClassAdapter "
+ + "<fully qualified class name or class file name>");
+ return;
+ }
+ ClassReader cr;
+ if (args[0].endsWith(".class")) {
+ cr = new ClassReader(new FileInputStream(args[0]));
+ } else {
+ cr = new ClassReader(args[0]);
+ }
+
+ verify(cr, false, new PrintWriter(System.err));
+ }
+
+ /**
+ * Checks a given class.
+ *
+ * @param cr a <code>ClassReader</code> that contains bytecode for the
+ * analysis.
+ * @param loader a <code>ClassLoader</code> which will be used to load
+ * referenced classes. This is useful if you are verifiying multiple
+ * interdependent classes.
+ * @param dump true if bytecode should be printed out not only when errors
+ * are found.
+ * @param pw write where results going to be printed
+ */
+ public static void verify(
+ final ClassReader cr,
+ final ClassLoader loader,
+ final boolean dump,
+ final PrintWriter pw)
+ {
+ ClassNode cn = new ClassNode();
+ cr.accept(new CheckClassAdapter(cn, false), ClassReader.SKIP_DEBUG);
+
+ Type syperType = cn.superName == null
+ ? null
+ : Type.getObjectType(cn.superName);
+ List<MethodNode> methods = cn.methods;
+
+ List<Type> interfaces = new ArrayList<Type>();
+ for (Iterator<String> i = cn.interfaces.iterator(); i.hasNext();) {
+ interfaces.add(Type.getObjectType(i.next().toString()));
+ }
+
+ for (int i = 0; i < methods.size(); ++i) {
+ MethodNode method = methods.get(i);
+ SimpleVerifier verifier = new SimpleVerifier(Type.getObjectType(cn.name),
+ syperType,
+ interfaces,
+ (cn.access & Opcodes.ACC_INTERFACE) != 0);
+ Analyzer<BasicValue> a = new Analyzer<BasicValue>(verifier);
+ if (loader != null) {
+ verifier.setClassLoader(loader);
+ }
+ try {
+ a.analyze(cn.name, method);
+ if (!dump) {
+ continue;
+ }
+ } catch (Exception e) {
+ e.printStackTrace(pw);
+ }
+ printAnalyzerResult(method, a, pw);
+ }
+ pw.flush();
+ }
+
+ /**
+ * Checks a given class
+ *
+ * @param cr a <code>ClassReader</code> that contains bytecode for the
+ * analysis.
+ * @param dump true if bytecode should be printed out not only when errors
+ * are found.
+ * @param pw write where results going to be printed
+ */
+ public static void verify(
+ final ClassReader cr,
+ final boolean dump,
+ final PrintWriter pw)
+ {
+ verify(cr, null, dump, pw);
+ }
+
+ static void printAnalyzerResult(
+ MethodNode method,
+ Analyzer<BasicValue> a,
+ final PrintWriter pw)
+ {
+ Frame<BasicValue>[] frames = a.getFrames();
+ Textifier t = new Textifier();
+ TraceMethodVisitor mv = new TraceMethodVisitor(t);
+
+ pw.println(method.name + method.desc);
+ for (int j = 0; j < method.instructions.size(); ++j) {
+ method.instructions.get(j).accept(mv);
+
+ StringBuffer s = new StringBuffer();
+ Frame<BasicValue> f = frames[j];
+ if (f == null) {
+ s.append('?');
+ } else {
+ for (int k = 0; k < f.getLocals(); ++k) {
+ s.append(getShortName(f.getLocal(k).toString()))
+ .append(' ');
+ }
+ s.append(" : ");
+ for (int k = 0; k < f.getStackSize(); ++k) {
+ s.append(getShortName(f.getStack(k).toString()))
+ .append(' ');
+ }
+ }
+ while (s.length() < method.maxStack + method.maxLocals + 1) {
+ s.append(' ');
+ }
+ pw.print(Integer.toString(j + 100000).substring(1));
+ pw.print(" " + s + " : " + t.text.get(t.text.size() - 1));
+ }
+ for (int j = 0; j < method.tryCatchBlocks.size(); ++j) {
+ method.tryCatchBlocks.get(j).accept(mv);
+ pw.print(" " + t.text.get(t.text.size() - 1));
+ }
+ pw.println();
+ }
+
+ private static String getShortName(final String name) {
+ int n = name.lastIndexOf('/');
+ int k = name.length();
+ if (name.charAt(k - 1) == ';') {
+ k--;
+ }
+ return n == -1 ? name : name.substring(n + 1, k);
+ }
+
+ /**
+ * Constructs a new {@link CheckClassAdapter}. <i>Subclasses must not use
+ * this constructor</i>. Instead, they must use the
+ * {@link #CheckClassAdapter(int, ClassVisitor, boolean)} version.
+ *
+ * @param cv the class visitor to which this adapter must delegate calls.
+ */
+ public CheckClassAdapter(final ClassVisitor cv) {
+ this(cv, true);
+ }
+
+ /**
+ * Constructs a new {@link CheckClassAdapter}. <i>Subclasses must not use
+ * this constructor</i>. Instead, they must use the
+ * {@link #CheckClassAdapter(int, ClassVisitor, boolean)} version.
+ *
+ * @param cv the class visitor to which this adapter must delegate calls.
+ * @param checkDataFlow <tt>true</tt> to perform basic data flow checks, or
+ * <tt>false</tt> to not perform any data flow check (see
+ * {@link CheckMethodAdapter}). This option requires valid maxLocals
+ * and maxStack values.
+ */
+ public CheckClassAdapter(final ClassVisitor cv, final boolean checkDataFlow)
+ {
+ this(Opcodes.ASM4, cv, checkDataFlow);
+ }
+
+ /**
+ * Constructs a new {@link CheckClassAdapter}.
+ *
+ * @param api the ASM API version implemented by this visitor. Must be one
+ * of {@link Opcodes#ASM4}.
+ * @param cv the class visitor to which this adapter must delegate calls.
+ * @param checkDataFlow <tt>true</tt> to perform basic data flow checks, or
+ * <tt>false</tt> to not perform any data flow check (see
+ * {@link CheckMethodAdapter}). This option requires valid maxLocals
+ * and maxStack values.
+ */
+ protected CheckClassAdapter(
+ final int api,
+ final ClassVisitor cv,
+ final boolean checkDataFlow)
+ {
+ super(api, cv);
+ this.labels = new HashMap<Label, Integer>();
+ this.checkDataFlow = checkDataFlow;
+ }
+
+ // ------------------------------------------------------------------------
+ // Implementation of the ClassVisitor interface
+ // ------------------------------------------------------------------------
+
+ @Override
+ public void visit(
+ final int version,
+ final int access,
+ final String name,
+ final String signature,
+ final String superName,
+ final String[] interfaces)
+ {
+ if (start) {
+ throw new IllegalStateException("visit must be called only once");
+ }
+ start = true;
+ checkState();
+ checkAccess(access, Opcodes.ACC_PUBLIC + Opcodes.ACC_FINAL
+ + Opcodes.ACC_SUPER + Opcodes.ACC_INTERFACE
+ + Opcodes.ACC_ABSTRACT + Opcodes.ACC_SYNTHETIC
+ + Opcodes.ACC_ANNOTATION + Opcodes.ACC_ENUM
+ + Opcodes.ACC_DEPRECATED
+ + 0x40000); // ClassWriter.ACC_SYNTHETIC_ATTRIBUTE
+ if (name == null || !name.endsWith("package-info")) {
+ CheckMethodAdapter.checkInternalName(name, "class name");
+ }
+ if ("java/lang/Object".equals(name)) {
+ if (superName != null) {
+ throw new IllegalArgumentException("The super class name of the Object class must be 'null'");
+ }
+ } else {
+ CheckMethodAdapter.checkInternalName(superName, "super class name");
+ }
+ if (signature != null) {
+ CheckMethodAdapter.checkClassSignature(signature);
+ }
+ if ((access & Opcodes.ACC_INTERFACE) != 0) {
+ if (!"java/lang/Object".equals(superName)) {
+ throw new IllegalArgumentException("The super class name of interfaces must be 'java/lang/Object'");
+ }
+ }
+ if (interfaces != null) {
+ for (int i = 0; i < interfaces.length; ++i) {
+ CheckMethodAdapter.checkInternalName(interfaces[i],
+ "interface name at index " + i);
+ }
+ }
+ this.version = version;
+ super.visit(version, access, name, signature, superName, interfaces);
+ }
+
+ @Override
+ public void visitSource(final String file, final String debug) {
+ checkState();
+ if (source) {
+ throw new IllegalStateException("visitSource can be called only once.");
+ }
+ source = true;
+ super.visitSource(file, debug);
+ }
+
+ @Override
+ public void visitOuterClass(
+ final String owner,
+ final String name,
+ final String desc)
+ {
+ checkState();
+ if (outer) {
+ throw new IllegalStateException("visitOuterClass can be called only once.");
+ }
+ outer = true;
+ if (owner == null) {
+ throw new IllegalArgumentException("Illegal outer class owner");
+ }
+ if (desc != null) {
+ CheckMethodAdapter.checkMethodDesc(desc);
+ }
+ super.visitOuterClass(owner, name, desc);
+ }
+
+ @Override
+ public void visitInnerClass(
+ final String name,
+ final String outerName,
+ final String innerName,
+ final int access)
+ {
+ checkState();
+ CheckMethodAdapter.checkInternalName(name, "class name");
+ if (outerName != null) {
+ CheckMethodAdapter.checkInternalName(outerName, "outer class name");
+ }
+ if (innerName != null) {
+ CheckMethodAdapter.checkIdentifier(innerName, "inner class name");
+ }
+ checkAccess(access, Opcodes.ACC_PUBLIC + Opcodes.ACC_PRIVATE
+ + Opcodes.ACC_PROTECTED + Opcodes.ACC_STATIC
+ + Opcodes.ACC_FINAL + Opcodes.ACC_INTERFACE
+ + Opcodes.ACC_ABSTRACT + Opcodes.ACC_SYNTHETIC
+ + Opcodes.ACC_ANNOTATION + Opcodes.ACC_ENUM);
+ super.visitInnerClass(name, outerName, innerName, access);
+ }
+
+ @Override
+ public FieldVisitor visitField(
+ final int access,
+ final String name,
+ final String desc,
+ final String signature,
+ final Object value)
+ {
+ checkState();
+ checkAccess(access, Opcodes.ACC_PUBLIC + Opcodes.ACC_PRIVATE
+ + Opcodes.ACC_PROTECTED + Opcodes.ACC_STATIC
+ + Opcodes.ACC_FINAL + Opcodes.ACC_VOLATILE
+ + Opcodes.ACC_TRANSIENT + Opcodes.ACC_SYNTHETIC
+ + Opcodes.ACC_ENUM + Opcodes.ACC_DEPRECATED
+ + 0x40000); // ClassWriter.ACC_SYNTHETIC_ATTRIBUTE
+ CheckMethodAdapter.checkUnqualifiedName(version, name, "field name");
+ CheckMethodAdapter.checkDesc(desc, false);
+ if (signature != null) {
+ CheckMethodAdapter.checkFieldSignature(signature);
+ }
+ if (value != null) {
+ CheckMethodAdapter.checkConstant(value);
+ }
+ FieldVisitor av = super.visitField(access, name, desc, signature, value);
+ return new CheckFieldAdapter(av);
+ }
+
+ @Override
+ public MethodVisitor visitMethod(
+ final int access,
+ final String name,
+ final String desc,
+ final String signature,
+ final String[] exceptions)
+ {
+ checkState();
+ checkAccess(access, Opcodes.ACC_PUBLIC + Opcodes.ACC_PRIVATE
+ + Opcodes.ACC_PROTECTED + Opcodes.ACC_STATIC
+ + Opcodes.ACC_FINAL + Opcodes.ACC_SYNCHRONIZED
+ + Opcodes.ACC_BRIDGE + Opcodes.ACC_VARARGS + Opcodes.ACC_NATIVE
+ + Opcodes.ACC_ABSTRACT + Opcodes.ACC_STRICT
+ + Opcodes.ACC_SYNTHETIC + Opcodes.ACC_DEPRECATED
+ + 0x40000); // ClassWriter.ACC_SYNTHETIC_ATTRIBUTE
+ CheckMethodAdapter.checkMethodIdentifier(version, name, "method name");
+ CheckMethodAdapter.checkMethodDesc(desc);
+ if (signature != null) {
+ CheckMethodAdapter.checkMethodSignature(signature);
+ }
+ if (exceptions != null) {
+ for (int i = 0; i < exceptions.length; ++i) {
+ CheckMethodAdapter.checkInternalName(exceptions[i],
+ "exception name at index " + i);
+ }
+ }
+ CheckMethodAdapter cma;
+ if (checkDataFlow) {
+ cma = new CheckMethodAdapter(access,
+ name,
+ desc,
+ super.visitMethod(access, name, desc, signature, exceptions),
+ labels);
+ } else {
+ cma = new CheckMethodAdapter(super.visitMethod(access,
+ name,
+ desc,
+ signature,
+ exceptions), labels);
+ }
+ cma.version = version;
+ return cma;
+ }
+
+ @Override
+ public AnnotationVisitor visitAnnotation(
+ final String desc,
+ final boolean visible)
+ {
+ checkState();
+ CheckMethodAdapter.checkDesc(desc, false);
+ return new CheckAnnotationAdapter(super.visitAnnotation(desc, visible));
+ }
+
+ @Override
+ public void visitAttribute(final Attribute attr) {
+ checkState();
+ if (attr == null) {
+ throw new IllegalArgumentException("Invalid attribute (must not be null)");
+ }
+ super.visitAttribute(attr);
+ }
+
+ @Override
+ public void visitEnd() {
+ checkState();
+ end = true;
+ super.visitEnd();
+ }
+
+ // ------------------------------------------------------------------------
+ // Utility methods
+ // ------------------------------------------------------------------------
+
+ /**
+ * Checks that the visit method has been called and that visitEnd has not
+ * been called.
+ */
+ private void checkState() {
+ if (!start) {
+ throw new IllegalStateException("Cannot visit member before visit has been called.");
+ }
+ if (end) {
+ throw new IllegalStateException("Cannot visit member after visitEnd has been called.");
+ }
+ }
+
+ /**
+ * Checks that the given access flags do not contain invalid flags. This
+ * method also checks that mutually incompatible flags are not set
+ * simultaneously.
+ *
+ * @param access the access flags to be checked
+ * @param possibleAccess the valid access flags.
+ */
+ static void checkAccess(final int access, final int possibleAccess) {
+ if ((access & ~possibleAccess) != 0) {
+ throw new IllegalArgumentException("Invalid access flags: "
+ + access);
+ }
+ int pub = (access & Opcodes.ACC_PUBLIC) == 0 ? 0 : 1;
+ int pri = (access & Opcodes.ACC_PRIVATE) == 0 ? 0 : 1;
+ int pro = (access & Opcodes.ACC_PROTECTED) == 0 ? 0 : 1;
+ if (pub + pri + pro > 1) {
+ throw new IllegalArgumentException("public private and protected are mutually exclusive: "
+ + access);
+ }
+ int fin = (access & Opcodes.ACC_FINAL) == 0 ? 0 : 1;
+ int abs = (access & Opcodes.ACC_ABSTRACT) == 0 ? 0 : 1;
+ if (fin + abs > 1) {
+ throw new IllegalArgumentException("final and abstract are mutually exclusive: "
+ + access);
+ }
+ }
+}
diff --git a/src/asm/scala/tools/asm/util/CheckFieldAdapter.java b/src/asm/scala/tools/asm/util/CheckFieldAdapter.java
new file mode 100644
index 0000000..bdcbe14
--- /dev/null
+++ b/src/asm/scala/tools/asm/util/CheckFieldAdapter.java
@@ -0,0 +1,97 @@
+/***
+ * ASM: a very small and fast Java bytecode manipulation framework
+ * Copyright (c) 2000-2011 INRIA, France Telecom
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ * 3. Neither the name of the copyright holders nor the names of its
+ * contributors may be used to endorse or promote products derived from
+ * this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+ * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
+ * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+ * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+ * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
+ * THE POSSIBILITY OF SUCH DAMAGE.
+ */
+package scala.tools.asm.util;
+
+import scala.tools.asm.AnnotationVisitor;
+import scala.tools.asm.Attribute;
+import scala.tools.asm.FieldVisitor;
+import scala.tools.asm.Opcodes;
+
+/**
+ * A {@link FieldVisitor} that checks that its methods are properly used.
+ */
+public class CheckFieldAdapter extends FieldVisitor {
+
+ private boolean end;
+
+ /**
+ * Constructs a new {@link CheckFieldAdapter}. <i>Subclasses must not use
+ * this constructor</i>. Instead, they must use the
+ * {@link #CheckFieldAdapter(int, FieldVisitor)} version.
+ *
+ * @param fv the field visitor to which this adapter must delegate calls.
+ */
+ public CheckFieldAdapter(final FieldVisitor fv) {
+ this(Opcodes.ASM4, fv);
+ }
+
+ /**
+ * Constructs a new {@link CheckFieldAdapter}.
+ *
+ * @param api the ASM API version implemented by this visitor. Must be one
+ * of {@link Opcodes#ASM4}.
+ * @param fv the field visitor to which this adapter must delegate calls.
+ */
+ protected CheckFieldAdapter(final int api, final FieldVisitor fv) {
+ super(api, fv);
+ }
+
+ @Override
+ public AnnotationVisitor visitAnnotation(
+ final String desc,
+ final boolean visible)
+ {
+ checkEnd();
+ CheckMethodAdapter.checkDesc(desc, false);
+ return new CheckAnnotationAdapter(super.visitAnnotation(desc, visible));
+ }
+
+ @Override
+ public void visitAttribute(final Attribute attr) {
+ checkEnd();
+ if (attr == null) {
+ throw new IllegalArgumentException("Invalid attribute (must not be null)");
+ }
+ super.visitAttribute(attr);
+ }
+
+ @Override
+ public void visitEnd() {
+ checkEnd();
+ end = true;
+ super.visitEnd();
+ }
+
+ private void checkEnd() {
+ if (end) {
+ throw new IllegalStateException("Cannot call a visit method after visitEnd has been called");
+ }
+ }
+}
diff --git a/src/asm/scala/tools/asm/util/CheckMethodAdapter.java b/src/asm/scala/tools/asm/util/CheckMethodAdapter.java
new file mode 100644
index 0000000..7549765
--- /dev/null
+++ b/src/asm/scala/tools/asm/util/CheckMethodAdapter.java
@@ -0,0 +1,1668 @@
+/***
+ * ASM: a very small and fast Java bytecode manipulation framework
+ * Copyright (c) 2000-2011 INRIA, France Telecom
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ * 3. Neither the name of the copyright holders nor the names of its
+ * contributors may be used to endorse or promote products derived from
+ * this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+ * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
+ * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+ * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+ * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
+ * THE POSSIBILITY OF SUCH DAMAGE.
+ */
+package scala.tools.asm.util;
+
+import java.io.PrintWriter;
+import java.io.StringWriter;
+import java.lang.reflect.Field;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+
+import scala.tools.asm.AnnotationVisitor;
+import scala.tools.asm.Attribute;
+import scala.tools.asm.Handle;
+import scala.tools.asm.Label;
+import scala.tools.asm.MethodVisitor;
+import scala.tools.asm.Opcodes;
+import scala.tools.asm.Type;
+import scala.tools.asm.tree.MethodNode;
+import scala.tools.asm.tree.analysis.Analyzer;
+import scala.tools.asm.tree.analysis.BasicValue;
+import scala.tools.asm.tree.analysis.BasicVerifier;
+
+/**
+ * A {@link MethodVisitor} that checks that its methods are properly used. More
+ * precisely this method adapter checks each instruction individually, i.e.,
+ * each visit method checks some preconditions based <i>only</i> on its
+ * arguments - such as the fact that the given opcode is correct for a given
+ * visit method. This adapter can also perform some basic data flow checks (more
+ * precisely those that can be performed without the full class hierarchy - see
+ * {@link org.objectweb.asm.tree.analysis.BasicVerifier}). For instance in a
+ * method whose signature is <tt>void m ()</tt>, the invalid instruction
+ * IRETURN, or the invalid sequence IADD L2I will be detected if the data flow
+ * checks are enabled. These checks are enabled by using the
+ * {@link #CheckMethodAdapter(int,String,String,MethodVisitor,Map)} constructor.
+ * They are not performed if any other constructor is used.
+ *
+ * @author Eric Bruneton
+ */
+public class CheckMethodAdapter extends MethodVisitor {
+
+ /**
+ * The class version number.
+ */
+ public int version;
+
+ /**
+ * <tt>true</tt> if the visitCode method has been called.
+ */
+ private boolean startCode;
+
+ /**
+ * <tt>true</tt> if the visitMaxs method has been called.
+ */
+ private boolean endCode;
+
+ /**
+ * <tt>true</tt> if the visitEnd method has been called.
+ */
+ private boolean endMethod;
+
+ /**
+ * Number of visited instructions.
+ */
+ private int insnCount;
+
+ /**
+ * The already visited labels. This map associate Integer values to pseudo
+ * code offsets.
+ */
+ private final Map<Label, Integer> labels;
+
+ /**
+ * The labels used in this method. Every used label must be visited with
+ * visitLabel before the end of the method (i.e. should be in #labels).
+ */
+ private Set<Label> usedLabels;
+
+ /**
+ * The exception handler ranges. Each pair of list element contains the
+ * start and end labels of an exception handler block.
+ */
+ private List<Label> handlers;
+
+ /**
+ * Code of the visit method to be used for each opcode.
+ */
+ private static final int[] TYPE;
+
+ /**
+ * The Label.status field.
+ */
+ private static Field labelStatusField;
+
+ static {
+ String s = "BBBBBBBBBBBBBBBBCCIAADDDDDAAAAAAAAAAAAAAAAAAAABBBBBBBBDD"
+ + "DDDAAAAAAAAAAAAAAAAAAAABBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBB"
+ + "BBBBBBBBBBBBBBBBBBBJBBBBBBBBBBBBBBBBBBBBHHHHHHHHHHHHHHHHD"
+ + "KLBBBBBBFFFFGGGGAECEBBEEBBAMHHAA";
+ TYPE = new int[s.length()];
+ for (int i = 0; i < TYPE.length; ++i) {
+ TYPE[i] = s.charAt(i) - 'A' - 1;
+ }
+ }
+
+ // code to generate the above string
+ // public static void main (String[] args) {
+ // int[] TYPE = new int[] {
+ // 0, //NOP
+ // 0, //ACONST_NULL
+ // 0, //ICONST_M1
+ // 0, //ICONST_0
+ // 0, //ICONST_1
+ // 0, //ICONST_2
+ // 0, //ICONST_3
+ // 0, //ICONST_4
+ // 0, //ICONST_5
+ // 0, //LCONST_0
+ // 0, //LCONST_1
+ // 0, //FCONST_0
+ // 0, //FCONST_1
+ // 0, //FCONST_2
+ // 0, //DCONST_0
+ // 0, //DCONST_1
+ // 1, //BIPUSH
+ // 1, //SIPUSH
+ // 7, //LDC
+ // -1, //LDC_W
+ // -1, //LDC2_W
+ // 2, //ILOAD
+ // 2, //LLOAD
+ // 2, //FLOAD
+ // 2, //DLOAD
+ // 2, //ALOAD
+ // -1, //ILOAD_0
+ // -1, //ILOAD_1
+ // -1, //ILOAD_2
+ // -1, //ILOAD_3
+ // -1, //LLOAD_0
+ // -1, //LLOAD_1
+ // -1, //LLOAD_2
+ // -1, //LLOAD_3
+ // -1, //FLOAD_0
+ // -1, //FLOAD_1
+ // -1, //FLOAD_2
+ // -1, //FLOAD_3
+ // -1, //DLOAD_0
+ // -1, //DLOAD_1
+ // -1, //DLOAD_2
+ // -1, //DLOAD_3
+ // -1, //ALOAD_0
+ // -1, //ALOAD_1
+ // -1, //ALOAD_2
+ // -1, //ALOAD_3
+ // 0, //IALOAD
+ // 0, //LALOAD
+ // 0, //FALOAD
+ // 0, //DALOAD
+ // 0, //AALOAD
+ // 0, //BALOAD
+ // 0, //CALOAD
+ // 0, //SALOAD
+ // 2, //ISTORE
+ // 2, //LSTORE
+ // 2, //FSTORE
+ // 2, //DSTORE
+ // 2, //ASTORE
+ // -1, //ISTORE_0
+ // -1, //ISTORE_1
+ // -1, //ISTORE_2
+ // -1, //ISTORE_3
+ // -1, //LSTORE_0
+ // -1, //LSTORE_1
+ // -1, //LSTORE_2
+ // -1, //LSTORE_3
+ // -1, //FSTORE_0
+ // -1, //FSTORE_1
+ // -1, //FSTORE_2
+ // -1, //FSTORE_3
+ // -1, //DSTORE_0
+ // -1, //DSTORE_1
+ // -1, //DSTORE_2
+ // -1, //DSTORE_3
+ // -1, //ASTORE_0
+ // -1, //ASTORE_1
+ // -1, //ASTORE_2
+ // -1, //ASTORE_3
+ // 0, //IASTORE
+ // 0, //LASTORE
+ // 0, //FASTORE
+ // 0, //DASTORE
+ // 0, //AASTORE
+ // 0, //BASTORE
+ // 0, //CASTORE
+ // 0, //SASTORE
+ // 0, //POP
+ // 0, //POP2
+ // 0, //DUP
+ // 0, //DUP_X1
+ // 0, //DUP_X2
+ // 0, //DUP2
+ // 0, //DUP2_X1
+ // 0, //DUP2_X2
+ // 0, //SWAP
+ // 0, //IADD
+ // 0, //LADD
+ // 0, //FADD
+ // 0, //DADD
+ // 0, //ISUB
+ // 0, //LSUB
+ // 0, //FSUB
+ // 0, //DSUB
+ // 0, //IMUL
+ // 0, //LMUL
+ // 0, //FMUL
+ // 0, //DMUL
+ // 0, //IDIV
+ // 0, //LDIV
+ // 0, //FDIV
+ // 0, //DDIV
+ // 0, //IREM
+ // 0, //LREM
+ // 0, //FREM
+ // 0, //DREM
+ // 0, //INEG
+ // 0, //LNEG
+ // 0, //FNEG
+ // 0, //DNEG
+ // 0, //ISHL
+ // 0, //LSHL
+ // 0, //ISHR
+ // 0, //LSHR
+ // 0, //IUSHR
+ // 0, //LUSHR
+ // 0, //IAND
+ // 0, //LAND
+ // 0, //IOR
+ // 0, //LOR
+ // 0, //IXOR
+ // 0, //LXOR
+ // 8, //IINC
+ // 0, //I2L
+ // 0, //I2F
+ // 0, //I2D
+ // 0, //L2I
+ // 0, //L2F
+ // 0, //L2D
+ // 0, //F2I
+ // 0, //F2L
+ // 0, //F2D
+ // 0, //D2I
+ // 0, //D2L
+ // 0, //D2F
+ // 0, //I2B
+ // 0, //I2C
+ // 0, //I2S
+ // 0, //LCMP
+ // 0, //FCMPL
+ // 0, //FCMPG
+ // 0, //DCMPL
+ // 0, //DCMPG
+ // 6, //IFEQ
+ // 6, //IFNE
+ // 6, //IFLT
+ // 6, //IFGE
+ // 6, //IFGT
+ // 6, //IFLE
+ // 6, //IF_ICMPEQ
+ // 6, //IF_ICMPNE
+ // 6, //IF_ICMPLT
+ // 6, //IF_ICMPGE
+ // 6, //IF_ICMPGT
+ // 6, //IF_ICMPLE
+ // 6, //IF_ACMPEQ
+ // 6, //IF_ACMPNE
+ // 6, //GOTO
+ // 6, //JSR
+ // 2, //RET
+ // 9, //TABLESWITCH
+ // 10, //LOOKUPSWITCH
+ // 0, //IRETURN
+ // 0, //LRETURN
+ // 0, //FRETURN
+ // 0, //DRETURN
+ // 0, //ARETURN
+ // 0, //RETURN
+ // 4, //GETSTATIC
+ // 4, //PUTSTATIC
+ // 4, //GETFIELD
+ // 4, //PUTFIELD
+ // 5, //INVOKEVIRTUAL
+ // 5, //INVOKESPECIAL
+ // 5, //INVOKESTATIC
+ // 5, //INVOKEINTERFACE
+ // -1, //INVOKEDYNAMIC
+ // 3, //NEW
+ // 1, //NEWARRAY
+ // 3, //ANEWARRAY
+ // 0, //ARRAYLENGTH
+ // 0, //ATHROW
+ // 3, //CHECKCAST
+ // 3, //INSTANCEOF
+ // 0, //MONITORENTER
+ // 0, //MONITOREXIT
+ // -1, //WIDE
+ // 11, //MULTIANEWARRAY
+ // 6, //IFNULL
+ // 6, //IFNONNULL
+ // -1, //GOTO_W
+ // -1 //JSR_W
+ // };
+ // for (int i = 0; i < TYPE.length; ++i) {
+ // System.out.print((char)(TYPE[i] + 1 + 'A'));
+ // }
+ // System.out.println();
+ // }
+
+ /**
+ * Constructs a new {@link CheckMethodAdapter} object. This method adapter
+ * will not perform any data flow check (see
+ * {@link #CheckMethodAdapter(int,String,String,MethodVisitor,Map)}).
+ * <i>Subclasses must not use this constructor</i>. Instead, they must use
+ * the {@link #CheckMethodAdapter(int, MethodVisitor, Map)} version.
+ *
+ * @param mv the method visitor to which this adapter must delegate calls.
+ */
+ public CheckMethodAdapter(final MethodVisitor mv) {
+ this(mv, new HashMap<Label, Integer>());
+ }
+
+ /**
+ * Constructs a new {@link CheckMethodAdapter} object. This method adapter
+ * will not perform any data flow check (see
+ * {@link #CheckMethodAdapter(int,String,String,MethodVisitor,Map)}).
+ * <i>Subclasses must not use this constructor</i>. Instead, they must use
+ * the {@link #CheckMethodAdapter(int, MethodVisitor, Map)} version.
+ *
+ * @param mv the method visitor to which this adapter must delegate calls.
+ * @param labels a map of already visited labels (in other methods).
+ */
+ public CheckMethodAdapter(
+ final MethodVisitor mv,
+ final Map<Label, Integer> labels)
+ {
+ this(Opcodes.ASM4, mv, labels);
+ }
+
+ /**
+ * Constructs a new {@link CheckMethodAdapter} object. This method adapter
+ * will not perform any data flow check (see
+ * {@link #CheckMethodAdapter(int,String,String,MethodVisitor,Map)}).
+ *
+ * @param mv the method visitor to which this adapter must delegate calls.
+ * @param labels a map of already visited labels (in other methods).
+ */
+ protected CheckMethodAdapter(
+ final int api,
+ final MethodVisitor mv,
+ final Map<Label, Integer> labels)
+ {
+ super(api, mv);
+ this.labels = labels;
+ this.usedLabels = new HashSet<Label>();
+ this.handlers = new ArrayList<Label>();
+ }
+
+ /**
+ * Constructs a new {@link CheckMethodAdapter} object. This method adapter
+ * will perform basic data flow checks. For instance in a method whose
+ * signature is <tt>void m ()</tt>, the invalid instruction IRETURN, or the
+ * invalid sequence IADD L2I will be detected.
+ *
+ * @param access the method's access flags.
+ * @param name the method's name.
+ * @param desc the method's descriptor (see {@link Type Type}).
+ * @param cmv the method visitor to which this adapter must delegate calls.
+ * @param labels a map of already visited labels (in other methods).
+ */
+ public CheckMethodAdapter(
+ final int access,
+ final String name,
+ final String desc,
+ final MethodVisitor cmv,
+ final Map<Label, Integer> labels)
+ {
+ this(new MethodNode(access, name, desc, null, null) {
+ @Override
+ public void visitEnd() {
+ Analyzer<BasicValue> a = new Analyzer<BasicValue>(new BasicVerifier());
+ try {
+ a.analyze("dummy", this);
+ } catch (Exception e) {
+ if (e instanceof IndexOutOfBoundsException
+ && maxLocals == 0 && maxStack == 0)
+ {
+ throw new RuntimeException("Data flow checking option requires valid, non zero maxLocals and maxStack values.");
+ }
+ e.printStackTrace();
+ StringWriter sw = new StringWriter();
+ PrintWriter pw = new PrintWriter(sw, true);
+ CheckClassAdapter.printAnalyzerResult(this, a, pw);
+ pw.close();
+ throw new RuntimeException(e.getMessage() + ' '
+ + sw.toString());
+ }
+ accept(cmv);
+ }
+ },
+ labels);
+ }
+
+ @Override
+ public AnnotationVisitor visitAnnotation(
+ final String desc,
+ final boolean visible)
+ {
+ checkEndMethod();
+ checkDesc(desc, false);
+ return new CheckAnnotationAdapter(super.visitAnnotation(desc, visible));
+ }
+
+ @Override
+ public AnnotationVisitor visitAnnotationDefault() {
+ checkEndMethod();
+ return new CheckAnnotationAdapter(super.visitAnnotationDefault(), false);
+ }
+
+ @Override
+ public AnnotationVisitor visitParameterAnnotation(
+ final int parameter,
+ final String desc,
+ final boolean visible)
+ {
+ checkEndMethod();
+ checkDesc(desc, false);
+ return new CheckAnnotationAdapter(super.visitParameterAnnotation(parameter,
+ desc,
+ visible));
+ }
+
+ @Override
+ public void visitAttribute(final Attribute attr) {
+ checkEndMethod();
+ if (attr == null) {
+ throw new IllegalArgumentException("Invalid attribute (must not be null)");
+ }
+ super.visitAttribute(attr);
+ }
+
+ @Override
+ public void visitCode() {
+ startCode = true;
+ super.visitCode();
+ }
+
+ @Override
+ public void visitFrame(
+ final int type,
+ final int nLocal,
+ final Object[] local,
+ final int nStack,
+ final Object[] stack)
+ {
+ int mLocal;
+ int mStack;
+ switch (type) {
+ case Opcodes.F_NEW:
+ case Opcodes.F_FULL:
+ mLocal = Integer.MAX_VALUE;
+ mStack = Integer.MAX_VALUE;
+ break;
+
+ case Opcodes.F_SAME:
+ mLocal = 0;
+ mStack = 0;
+ break;
+
+ case Opcodes.F_SAME1:
+ mLocal = 0;
+ mStack = 1;
+ break;
+
+ case Opcodes.F_APPEND:
+ case Opcodes.F_CHOP:
+ mLocal = 3;
+ mStack = 0;
+ break;
+
+ default:
+ throw new IllegalArgumentException("Invalid frame type " + type);
+ }
+
+ if (nLocal > mLocal) {
+ throw new IllegalArgumentException("Invalid nLocal=" + nLocal
+ + " for frame type " + type);
+ }
+ if (nStack > mStack) {
+ throw new IllegalArgumentException("Invalid nStack=" + nStack
+ + " for frame type " + type);
+ }
+
+ if (type != Opcodes.F_CHOP) {
+ if (nLocal > 0 && (local == null || local.length < nLocal)) {
+ throw new IllegalArgumentException("Array local[] is shorter than nLocal");
+ }
+ for (int i = 0; i < nLocal; ++i) {
+ checkFrameValue(local[i]);
+ }
+ }
+ if (nStack > 0 && (stack == null || stack.length < nStack)) {
+ throw new IllegalArgumentException("Array stack[] is shorter than nStack");
+ }
+ for (int i = 0; i < nStack; ++i) {
+ checkFrameValue(stack[i]);
+ }
+
+ super.visitFrame(type, nLocal, local, nStack, stack);
+ }
+
+ @Override
+ public void visitInsn(final int opcode) {
+ checkStartCode();
+ checkEndCode();
+ checkOpcode(opcode, 0);
+ super.visitInsn(opcode);
+ ++insnCount;
+ }
+
+ @Override
+ public void visitIntInsn(final int opcode, final int operand) {
+ checkStartCode();
+ checkEndCode();
+ checkOpcode(opcode, 1);
+ switch (opcode) {
+ case Opcodes.BIPUSH:
+ checkSignedByte(operand, "Invalid operand");
+ break;
+ case Opcodes.SIPUSH:
+ checkSignedShort(operand, "Invalid operand");
+ break;
+ // case Constants.NEWARRAY:
+ default:
+ if (operand < Opcodes.T_BOOLEAN || operand > Opcodes.T_LONG) {
+ throw new IllegalArgumentException("Invalid operand (must be an array type code T_...): "
+ + operand);
+ }
+ }
+ super.visitIntInsn(opcode, operand);
+ ++insnCount;
+ }
+
+ @Override
+ public void visitVarInsn(final int opcode, final int var) {
+ checkStartCode();
+ checkEndCode();
+ checkOpcode(opcode, 2);
+ checkUnsignedShort(var, "Invalid variable index");
+ super.visitVarInsn(opcode, var);
+ ++insnCount;
+ }
+
+ @Override
+ public void visitTypeInsn(final int opcode, final String type) {
+ checkStartCode();
+ checkEndCode();
+ checkOpcode(opcode, 3);
+ checkInternalName(type, "type");
+ if (opcode == Opcodes.NEW && type.charAt(0) == '[') {
+ throw new IllegalArgumentException("NEW cannot be used to create arrays: "
+ + type);
+ }
+ super.visitTypeInsn(opcode, type);
+ ++insnCount;
+ }
+
+ @Override
+ public void visitFieldInsn(
+ final int opcode,
+ final String owner,
+ final String name,
+ final String desc)
+ {
+ checkStartCode();
+ checkEndCode();
+ checkOpcode(opcode, 4);
+ checkInternalName(owner, "owner");
+ checkUnqualifiedName(version, name, "name");
+ checkDesc(desc, false);
+ super.visitFieldInsn(opcode, owner, name, desc);
+ ++insnCount;
+ }
+
+ @Override
+ public void visitMethodInsn(
+ final int opcode,
+ final String owner,
+ final String name,
+ final String desc)
+ {
+ checkStartCode();
+ checkEndCode();
+ checkOpcode(opcode, 5);
+ checkMethodIdentifier(version, name, "name");
+ checkInternalName(owner, "owner");
+ checkMethodDesc(desc);
+ super.visitMethodInsn(opcode, owner, name, desc);
+ ++insnCount;
+ }
+
+ @Override
+ public void visitInvokeDynamicInsn(
+ String name,
+ String desc,
+ Handle bsm,
+ Object... bsmArgs)
+ {
+ checkStartCode();
+ checkEndCode();
+ checkMethodIdentifier(version, name, "name");
+ checkMethodDesc(desc);
+ if (bsm.getTag() != Opcodes.H_INVOKESTATIC
+ && bsm.getTag() != Opcodes.H_NEWINVOKESPECIAL)
+ {
+ throw new IllegalArgumentException("invalid handle tag "
+ + bsm.getTag());
+ }
+ for (int i = 0; i < bsmArgs.length; i++) {
+ checkLDCConstant(bsmArgs[i]);
+ }
+ super.visitInvokeDynamicInsn(name, desc, bsm, bsmArgs);
+ ++insnCount;
+ }
+
+ @Override
+ public void visitJumpInsn(final int opcode, final Label label) {
+ checkStartCode();
+ checkEndCode();
+ checkOpcode(opcode, 6);
+ checkLabel(label, false, "label");
+ checkNonDebugLabel(label);
+ super.visitJumpInsn(opcode, label);
+ usedLabels.add(label);
+ ++insnCount;
+ }
+
+ @Override
+ public void visitLabel(final Label label) {
+ checkStartCode();
+ checkEndCode();
+ checkLabel(label, false, "label");
+ if (labels.get(label) != null) {
+ throw new IllegalArgumentException("Already visited label");
+ }
+ labels.put(label, new Integer(insnCount));
+ super.visitLabel(label);
+ }
+
+ @Override
+ public void visitLdcInsn(final Object cst) {
+ checkStartCode();
+ checkEndCode();
+ checkLDCConstant(cst);
+ super.visitLdcInsn(cst);
+ ++insnCount;
+ }
+
+ @Override
+ public void visitIincInsn(final int var, final int increment) {
+ checkStartCode();
+ checkEndCode();
+ checkUnsignedShort(var, "Invalid variable index");
+ checkSignedShort(increment, "Invalid increment");
+ super.visitIincInsn(var, increment);
+ ++insnCount;
+ }
+
+ @Override
+ public void visitTableSwitchInsn(
+ final int min,
+ final int max,
+ final Label dflt,
+ final Label... labels)
+ {
+ checkStartCode();
+ checkEndCode();
+ if (max < min) {
+ throw new IllegalArgumentException("Max = " + max
+ + " must be greater than or equal to min = " + min);
+ }
+ checkLabel(dflt, false, "default label");
+ checkNonDebugLabel(dflt);
+ if (labels == null || labels.length != max - min + 1) {
+ throw new IllegalArgumentException("There must be max - min + 1 labels");
+ }
+ for (int i = 0; i < labels.length; ++i) {
+ checkLabel(labels[i], false, "label at index " + i);
+ checkNonDebugLabel(labels[i]);
+ }
+ super.visitTableSwitchInsn(min, max, dflt, labels);
+ for (int i = 0; i < labels.length; ++i) {
+ usedLabels.add(labels[i]);
+ }
+ ++insnCount;
+ }
+
+ @Override
+ public void visitLookupSwitchInsn(
+ final Label dflt,
+ final int[] keys,
+ final Label[] labels)
+ {
+ checkEndCode();
+ checkStartCode();
+ checkLabel(dflt, false, "default label");
+ checkNonDebugLabel(dflt);
+ if (keys == null || labels == null || keys.length != labels.length) {
+ throw new IllegalArgumentException("There must be the same number of keys and labels");
+ }
+ for (int i = 0; i < labels.length; ++i) {
+ checkLabel(labels[i], false, "label at index " + i);
+ checkNonDebugLabel(labels[i]);
+ }
+ super.visitLookupSwitchInsn(dflt, keys, labels);
+ usedLabels.add(dflt);
+ for (int i = 0; i < labels.length; ++i) {
+ usedLabels.add(labels[i]);
+ }
+ ++insnCount;
+ }
+
+ @Override
+ public void visitMultiANewArrayInsn(final String desc, final int dims) {
+ checkStartCode();
+ checkEndCode();
+ checkDesc(desc, false);
+ if (desc.charAt(0) != '[') {
+ throw new IllegalArgumentException("Invalid descriptor (must be an array type descriptor): "
+ + desc);
+ }
+ if (dims < 1) {
+ throw new IllegalArgumentException("Invalid dimensions (must be greater than 0): "
+ + dims);
+ }
+ if (dims > desc.lastIndexOf('[') + 1) {
+ throw new IllegalArgumentException("Invalid dimensions (must not be greater than dims(desc)): "
+ + dims);
+ }
+ super.visitMultiANewArrayInsn(desc, dims);
+ ++insnCount;
+ }
+
+ @Override
+ public void visitTryCatchBlock(
+ final Label start,
+ final Label end,
+ final Label handler,
+ final String type)
+ {
+ checkStartCode();
+ checkEndCode();
+ checkLabel(start, false, "start label");
+ checkLabel(end, false, "end label");
+ checkLabel(handler, false, "handler label");
+ checkNonDebugLabel(start);
+ checkNonDebugLabel(end);
+ checkNonDebugLabel(handler);
+ if (labels.get(start) != null || labels.get(end) != null
+ || labels.get(handler) != null)
+ {
+ throw new IllegalStateException("Try catch blocks must be visited before their labels");
+ }
+ if (type != null) {
+ checkInternalName(type, "type");
+ }
+ super.visitTryCatchBlock(start, end, handler, type);
+ handlers.add(start);
+ handlers.add(end);
+ }
+
+ @Override
+ public void visitLocalVariable(
+ final String name,
+ final String desc,
+ final String signature,
+ final Label start,
+ final Label end,
+ final int index)
+ {
+ checkStartCode();
+ checkEndCode();
+ checkUnqualifiedName(version, name, "name");
+ checkDesc(desc, false);
+ checkLabel(start, true, "start label");
+ checkLabel(end, true, "end label");
+ checkUnsignedShort(index, "Invalid variable index");
+ int s = labels.get(start).intValue();
+ int e = labels.get(end).intValue();
+ if (e < s) {
+ throw new IllegalArgumentException("Invalid start and end labels (end must be greater than start)");
+ }
+ super.visitLocalVariable(name, desc, signature, start, end, index);
+ }
+
+ @Override
+ public void visitLineNumber(final int line, final Label start) {
+ checkStartCode();
+ checkEndCode();
+ checkUnsignedShort(line, "Invalid line number");
+ checkLabel(start, true, "start label");
+ super.visitLineNumber(line, start);
+ }
+
+ @Override
+ public void visitMaxs(final int maxStack, final int maxLocals) {
+ checkStartCode();
+ checkEndCode();
+ endCode = true;
+ for (Label l : usedLabels) {
+ if (labels.get(l) == null) {
+ throw new IllegalStateException("Undefined label used");
+ }
+ }
+ for (int i = 0; i < handlers.size(); ) {
+ Integer start = labels.get(handlers.get(i++));
+ Integer end = labels.get(handlers.get(i++));
+ if (start == null || end == null) {
+ throw new IllegalStateException("Undefined try catch block labels");
+ }
+ if (end.intValue() <= start.intValue()) {
+ throw new IllegalStateException("Emty try catch block handler range");
+ }
+ }
+ checkUnsignedShort(maxStack, "Invalid max stack");
+ checkUnsignedShort(maxLocals, "Invalid max locals");
+ super.visitMaxs(maxStack, maxLocals);
+ }
+
+ @Override
+ public void visitEnd() {
+ checkEndMethod();
+ endMethod = true;
+ super.visitEnd();
+ }
+
+ // -------------------------------------------------------------------------
+
+ /**
+ * Checks that the visitCode method has been called.
+ */
+ void checkStartCode() {
+ if (!startCode) {
+ throw new IllegalStateException("Cannot visit instructions before visitCode has been called.");
+ }
+ }
+
+ /**
+ * Checks that the visitMaxs method has not been called.
+ */
+ void checkEndCode() {
+ if (endCode) {
+ throw new IllegalStateException("Cannot visit instructions after visitMaxs has been called.");
+ }
+ }
+
+ /**
+ * Checks that the visitEnd method has not been called.
+ */
+ void checkEndMethod() {
+ if (endMethod) {
+ throw new IllegalStateException("Cannot visit elements after visitEnd has been called.");
+ }
+ }
+
+ /**
+ * Checks a stack frame value.
+ *
+ * @param value the value to be checked.
+ */
+ void checkFrameValue(final Object value) {
+ if (value == Opcodes.TOP || value == Opcodes.INTEGER
+ || value == Opcodes.FLOAT || value == Opcodes.LONG
+ || value == Opcodes.DOUBLE || value == Opcodes.NULL
+ || value == Opcodes.UNINITIALIZED_THIS)
+ {
+ return;
+ }
+ if (value instanceof String) {
+ checkInternalName((String) value, "Invalid stack frame value");
+ return;
+ }
+ if (!(value instanceof Label)) {
+ throw new IllegalArgumentException("Invalid stack frame value: "
+ + value);
+ } else {
+ usedLabels.add((Label) value);
+ }
+ }
+
+ /**
+ * Checks that the type of the given opcode is equal to the given type.
+ *
+ * @param opcode the opcode to be checked.
+ * @param type the expected opcode type.
+ */
+ static void checkOpcode(final int opcode, final int type) {
+ if (opcode < 0 || opcode > 199 || TYPE[opcode] != type) {
+ throw new IllegalArgumentException("Invalid opcode: " + opcode);
+ }
+ }
+
+ /**
+ * Checks that the given value is a signed byte.
+ *
+ * @param value the value to be checked.
+ * @param msg an message to be used in case of error.
+ */
+ static void checkSignedByte(final int value, final String msg) {
+ if (value < Byte.MIN_VALUE || value > Byte.MAX_VALUE) {
+ throw new IllegalArgumentException(msg
+ + " (must be a signed byte): " + value);
+ }
+ }
+
+ /**
+ * Checks that the given value is a signed short.
+ *
+ * @param value the value to be checked.
+ * @param msg an message to be used in case of error.
+ */
+ static void checkSignedShort(final int value, final String msg) {
+ if (value < Short.MIN_VALUE || value > Short.MAX_VALUE) {
+ throw new IllegalArgumentException(msg
+ + " (must be a signed short): " + value);
+ }
+ }
+
+ /**
+ * Checks that the given value is an unsigned short.
+ *
+ * @param value the value to be checked.
+ * @param msg an message to be used in case of error.
+ */
+ static void checkUnsignedShort(final int value, final String msg) {
+ if (value < 0 || value > 65535) {
+ throw new IllegalArgumentException(msg
+ + " (must be an unsigned short): " + value);
+ }
+ }
+
+ /**
+ * Checks that the given value is an {@link Integer}, a{@link Float}, a
+ * {@link Long}, a {@link Double} or a {@link String}.
+ *
+ * @param cst the value to be checked.
+ */
+ static void checkConstant(final Object cst) {
+ if (!(cst instanceof Integer) && !(cst instanceof Float)
+ && !(cst instanceof Long) && !(cst instanceof Double)
+ && !(cst instanceof String))
+ {
+ throw new IllegalArgumentException("Invalid constant: " + cst);
+ }
+ }
+
+ void checkLDCConstant(final Object cst) {
+ if (cst instanceof Type) {
+ int s = ((Type) cst).getSort();
+ if (s != Type.OBJECT && s != Type.ARRAY && s != Type.METHOD) {
+ throw new IllegalArgumentException("Illegal LDC constant value");
+ }
+ if (s != Type.METHOD && (version & 0xFFFF) < Opcodes.V1_5) {
+ throw new IllegalArgumentException("ldc of a constant class requires at least version 1.5");
+ }
+ if (s == Type.METHOD && (version & 0xFFFF) < Opcodes.V1_7) {
+ throw new IllegalArgumentException("ldc of a method type requires at least version 1.7");
+ }
+ } else if (cst instanceof Handle) {
+ if ((version & 0xFFFF) < Opcodes.V1_7) {
+ throw new IllegalArgumentException("ldc of a handle requires at least version 1.7");
+ }
+ int tag = ((Handle) cst).getTag();
+ if (tag < Opcodes.H_GETFIELD || tag > Opcodes.H_INVOKEINTERFACE) {
+ throw new IllegalArgumentException("invalid handle tag "
+ + tag);
+ }
+ } else {
+ checkConstant(cst);
+ }
+ }
+
+ /**
+ * Checks that the given string is a valid unqualified name.
+ *
+ * @param version the class version.
+ * @param name the string to be checked.
+ * @param msg a message to be used in case of error.
+ */
+ static void checkUnqualifiedName(
+ int version,
+ final String name,
+ final String msg)
+ {
+ if ((version & 0xFFFF) < Opcodes.V1_5) {
+ checkIdentifier(name, msg);
+ } else {
+ for (int i = 0; i < name.length(); ++i) {
+ if (".;[/".indexOf(name.charAt(i)) != -1) {
+ throw new IllegalArgumentException("Invalid " + msg
+ + " (must be a valid unqualified name): " + name);
+ }
+ }
+ }
+ }
+
+ /**
+ * Checks that the given string is a valid Java identifier.
+ *
+ * @param name the string to be checked.
+ * @param msg a message to be used in case of error.
+ */
+ static void checkIdentifier(final String name, final String msg) {
+ checkIdentifier(name, 0, -1, msg);
+ }
+
+ /**
+ * Checks that the given substring is a valid Java identifier.
+ *
+ * @param name the string to be checked.
+ * @param start index of the first character of the identifier (inclusive).
+ * @param end index of the last character of the identifier (exclusive). -1
+ * is equivalent to <tt>name.length()</tt> if name is not
+ * <tt>null</tt>.
+ * @param msg a message to be used in case of error.
+ */
+ static void checkIdentifier(
+ final String name,
+ final int start,
+ final int end,
+ final String msg)
+ {
+ if (name == null || (end == -1 ? name.length() <= start : end <= start))
+ {
+ throw new IllegalArgumentException("Invalid " + msg
+ + " (must not be null or empty)");
+ }
+ if (!Character.isJavaIdentifierStart(name.charAt(start))) {
+ throw new IllegalArgumentException("Invalid " + msg
+ + " (must be a valid Java identifier): " + name);
+ }
+ int max = end == -1 ? name.length() : end;
+ for (int i = start + 1; i < max; ++i) {
+ if (!Character.isJavaIdentifierPart(name.charAt(i))) {
+ throw new IllegalArgumentException("Invalid " + msg
+ + " (must be a valid Java identifier): " + name);
+ }
+ }
+ }
+
+ /**
+ * Checks that the given string is a valid Java identifier or is equal to
+ * '<init>' or '<clinit>'.
+ *
+ * @param version the class version.
+ * @param name the string to be checked.
+ * @param msg a message to be used in case of error.
+ */
+ static void checkMethodIdentifier(
+ int version,
+ final String name,
+ final String msg)
+ {
+ if (name == null || name.length() == 0) {
+ throw new IllegalArgumentException("Invalid " + msg
+ + " (must not be null or empty)");
+ }
+ if ("<init>".equals(name) || "<clinit>".equals(name)) {
+ return;
+ }
+ if ((version & 0xFFFF) >= Opcodes.V1_5) {
+ for (int i = 0; i < name.length(); ++i) {
+ if (".;[/<>".indexOf(name.charAt(i)) != -1) {
+ throw new IllegalArgumentException("Invalid " + msg
+ + " (must be a valid unqualified name): " + name);
+ }
+ }
+ return;
+ }
+ if (!Character.isJavaIdentifierStart(name.charAt(0))) {
+ throw new IllegalArgumentException("Invalid "
+ + msg
+ + " (must be a '<init>', '<clinit>' or a valid Java identifier): "
+ + name);
+ }
+ for (int i = 1; i < name.length(); ++i) {
+ if (!Character.isJavaIdentifierPart(name.charAt(i))) {
+ throw new IllegalArgumentException("Invalid "
+ + msg
+ + " (must be '<init>' or '<clinit>' or a valid Java identifier): "
+ + name);
+ }
+ }
+ }
+
+ /**
+ * Checks that the given string is a valid internal class name.
+ *
+ * @param name the string to be checked.
+ * @param msg a message to be used in case of error.
+ */
+ static void checkInternalName(final String name, final String msg) {
+ if (name == null || name.length() == 0) {
+ throw new IllegalArgumentException("Invalid " + msg
+ + " (must not be null or empty)");
+ }
+ if (name.charAt(0) == '[') {
+ checkDesc(name, false);
+ } else {
+ checkInternalName(name, 0, -1, msg);
+ }
+ }
+
+ /**
+ * Checks that the given substring is a valid internal class name.
+ *
+ * @param name the string to be checked.
+ * @param start index of the first character of the identifier (inclusive).
+ * @param end index of the last character of the identifier (exclusive). -1
+ * is equivalent to <tt>name.length()</tt> if name is not
+ * <tt>null</tt>.
+ * @param msg a message to be used in case of error.
+ */
+ static void checkInternalName(
+ final String name,
+ final int start,
+ final int end,
+ final String msg)
+ {
+ int max = end == -1 ? name.length() : end;
+ try {
+ int begin = start;
+ int slash;
+ do {
+ slash = name.indexOf('/', begin + 1);
+ if (slash == -1 || slash > max) {
+ slash = max;
+ }
+ checkIdentifier(name, begin, slash, null);
+ begin = slash + 1;
+ } while (slash != max);
+ } catch (IllegalArgumentException _) {
+ throw new IllegalArgumentException("Invalid "
+ + msg
+ + " (must be a fully qualified class name in internal form): "
+ + name);
+ }
+ }
+
+ /**
+ * Checks that the given string is a valid type descriptor.
+ *
+ * @param desc the string to be checked.
+ * @param canBeVoid <tt>true</tt> if <tt>V</tt> can be considered valid.
+ */
+ static void checkDesc(final String desc, final boolean canBeVoid) {
+ int end = checkDesc(desc, 0, canBeVoid);
+ if (end != desc.length()) {
+ throw new IllegalArgumentException("Invalid descriptor: " + desc);
+ }
+ }
+
+ /**
+ * Checks that a the given substring is a valid type descriptor.
+ *
+ * @param desc the string to be checked.
+ * @param start index of the first character of the identifier (inclusive).
+ * @param canBeVoid <tt>true</tt> if <tt>V</tt> can be considered valid.
+ * @return the index of the last character of the type decriptor, plus one.
+ */
+ static int checkDesc(
+ final String desc,
+ final int start,
+ final boolean canBeVoid)
+ {
+ if (desc == null || start >= desc.length()) {
+ throw new IllegalArgumentException("Invalid type descriptor (must not be null or empty)");
+ }
+ int index;
+ switch (desc.charAt(start)) {
+ case 'V':
+ if (canBeVoid) {
+ return start + 1;
+ } else {
+ throw new IllegalArgumentException("Invalid descriptor: "
+ + desc);
+ }
+ case 'Z':
+ case 'C':
+ case 'B':
+ case 'S':
+ case 'I':
+ case 'F':
+ case 'J':
+ case 'D':
+ return start + 1;
+ case '[':
+ index = start + 1;
+ while (index < desc.length() && desc.charAt(index) == '[') {
+ ++index;
+ }
+ if (index < desc.length()) {
+ return checkDesc(desc, index, false);
+ } else {
+ throw new IllegalArgumentException("Invalid descriptor: "
+ + desc);
+ }
+ case 'L':
+ index = desc.indexOf(';', start);
+ if (index == -1 || index - start < 2) {
+ throw new IllegalArgumentException("Invalid descriptor: "
+ + desc);
+ }
+ try {
+ checkInternalName(desc, start + 1, index, null);
+ } catch (IllegalArgumentException _) {
+ throw new IllegalArgumentException("Invalid descriptor: "
+ + desc);
+ }
+ return index + 1;
+ default:
+ throw new IllegalArgumentException("Invalid descriptor: "
+ + desc);
+ }
+ }
+
+ /**
+ * Checks that the given string is a valid method descriptor.
+ *
+ * @param desc the string to be checked.
+ */
+ static void checkMethodDesc(final String desc) {
+ if (desc == null || desc.length() == 0) {
+ throw new IllegalArgumentException("Invalid method descriptor (must not be null or empty)");
+ }
+ if (desc.charAt(0) != '(' || desc.length() < 3) {
+ throw new IllegalArgumentException("Invalid descriptor: " + desc);
+ }
+ int start = 1;
+ if (desc.charAt(start) != ')') {
+ do {
+ if (desc.charAt(start) == 'V') {
+ throw new IllegalArgumentException("Invalid descriptor: "
+ + desc);
+ }
+ start = checkDesc(desc, start, false);
+ } while (start < desc.length() && desc.charAt(start) != ')');
+ }
+ start = checkDesc(desc, start + 1, true);
+ if (start != desc.length()) {
+ throw new IllegalArgumentException("Invalid descriptor: " + desc);
+ }
+ }
+
+ /**
+ * Checks a class signature.
+ *
+ * @param signature a string containing the signature that must be checked.
+ */
+ static void checkClassSignature(final String signature) {
+ // ClassSignature:
+ // FormalTypeParameters? ClassTypeSignature ClassTypeSignature*
+
+ int pos = 0;
+ if (getChar(signature, 0) == '<') {
+ pos = checkFormalTypeParameters(signature, pos);
+ }
+ pos = checkClassTypeSignature(signature, pos);
+ while (getChar(signature, pos) == 'L') {
+ pos = checkClassTypeSignature(signature, pos);
+ }
+ if (pos != signature.length()) {
+ throw new IllegalArgumentException(signature + ": error at index "
+ + pos);
+ }
+ }
+
+ /**
+ * Checks a method signature.
+ *
+ * @param signature a string containing the signature that must be checked.
+ */
+ static void checkMethodSignature(final String signature) {
+ // MethodTypeSignature:
+ // FormalTypeParameters? ( TypeSignature* ) ( TypeSignature | V ) (
+ // ^ClassTypeSignature | ^TypeVariableSignature )*
+
+ int pos = 0;
+ if (getChar(signature, 0) == '<') {
+ pos = checkFormalTypeParameters(signature, pos);
+ }
+ pos = checkChar('(', signature, pos);
+ while ("ZCBSIFJDL[T".indexOf(getChar(signature, pos)) != -1) {
+ pos = checkTypeSignature(signature, pos);
+ }
+ pos = checkChar(')', signature, pos);
+ if (getChar(signature, pos) == 'V') {
+ ++pos;
+ } else {
+ pos = checkTypeSignature(signature, pos);
+ }
+ while (getChar(signature, pos) == '^') {
+ ++pos;
+ if (getChar(signature, pos) == 'L') {
+ pos = checkClassTypeSignature(signature, pos);
+ } else {
+ pos = checkTypeVariableSignature(signature, pos);
+ }
+ }
+ if (pos != signature.length()) {
+ throw new IllegalArgumentException(signature + ": error at index "
+ + pos);
+ }
+ }
+
+ /**
+ * Checks a field signature.
+ *
+ * @param signature a string containing the signature that must be checked.
+ */
+ static void checkFieldSignature(final String signature) {
+ int pos = checkFieldTypeSignature(signature, 0);
+ if (pos != signature.length()) {
+ throw new IllegalArgumentException(signature + ": error at index "
+ + pos);
+ }
+ }
+
+ /**
+ * Checks the formal type parameters of a class or method signature.
+ *
+ * @param signature a string containing the signature that must be checked.
+ * @param pos index of first character to be checked.
+ * @return the index of the first character after the checked part.
+ */
+ private static int checkFormalTypeParameters(final String signature, int pos)
+ {
+ // FormalTypeParameters:
+ // < FormalTypeParameter+ >
+
+ pos = checkChar('<', signature, pos);
+ pos = checkFormalTypeParameter(signature, pos);
+ while (getChar(signature, pos) != '>') {
+ pos = checkFormalTypeParameter(signature, pos);
+ }
+ return pos + 1;
+ }
+
+ /**
+ * Checks a formal type parameter of a class or method signature.
+ *
+ * @param signature a string containing the signature that must be checked.
+ * @param pos index of first character to be checked.
+ * @return the index of the first character after the checked part.
+ */
+ private static int checkFormalTypeParameter(final String signature, int pos)
+ {
+ // FormalTypeParameter:
+ // Identifier : FieldTypeSignature? (: FieldTypeSignature)*
+
+ pos = checkIdentifier(signature, pos);
+ pos = checkChar(':', signature, pos);
+ if ("L[T".indexOf(getChar(signature, pos)) != -1) {
+ pos = checkFieldTypeSignature(signature, pos);
+ }
+ while (getChar(signature, pos) == ':') {
+ pos = checkFieldTypeSignature(signature, pos + 1);
+ }
+ return pos;
+ }
+
+ /**
+ * Checks a field type signature.
+ *
+ * @param signature a string containing the signature that must be checked.
+ * @param pos index of first character to be checked.
+ * @return the index of the first character after the checked part.
+ */
+ private static int checkFieldTypeSignature(final String signature, int pos)
+ {
+ // FieldTypeSignature:
+ // ClassTypeSignature | ArrayTypeSignature | TypeVariableSignature
+ //
+ // ArrayTypeSignature:
+ // [ TypeSignature
+
+ switch (getChar(signature, pos)) {
+ case 'L':
+ return checkClassTypeSignature(signature, pos);
+ case '[':
+ return checkTypeSignature(signature, pos + 1);
+ default:
+ return checkTypeVariableSignature(signature, pos);
+ }
+ }
+
+ /**
+ * Checks a class type signature.
+ *
+ * @param signature a string containing the signature that must be checked.
+ * @param pos index of first character to be checked.
+ * @return the index of the first character after the checked part.
+ */
+ private static int checkClassTypeSignature(final String signature, int pos)
+ {
+ // ClassTypeSignature:
+ // L Identifier ( / Identifier )* TypeArguments? ( . Identifier
+ // TypeArguments? )* ;
+
+ pos = checkChar('L', signature, pos);
+ pos = checkIdentifier(signature, pos);
+ while (getChar(signature, pos) == '/') {
+ pos = checkIdentifier(signature, pos + 1);
+ }
+ if (getChar(signature, pos) == '<') {
+ pos = checkTypeArguments(signature, pos);
+ }
+ while (getChar(signature, pos) == '.') {
+ pos = checkIdentifier(signature, pos + 1);
+ if (getChar(signature, pos) == '<') {
+ pos = checkTypeArguments(signature, pos);
+ }
+ }
+ return checkChar(';', signature, pos);
+ }
+
+ /**
+ * Checks the type arguments in a class type signature.
+ *
+ * @param signature a string containing the signature that must be checked.
+ * @param pos index of first character to be checked.
+ * @return the index of the first character after the checked part.
+ */
+ private static int checkTypeArguments(final String signature, int pos) {
+ // TypeArguments:
+ // < TypeArgument+ >
+
+ pos = checkChar('<', signature, pos);
+ pos = checkTypeArgument(signature, pos);
+ while (getChar(signature, pos) != '>') {
+ pos = checkTypeArgument(signature, pos);
+ }
+ return pos + 1;
+ }
+
+ /**
+ * Checks a type argument in a class type signature.
+ *
+ * @param signature a string containing the signature that must be checked.
+ * @param pos index of first character to be checked.
+ * @return the index of the first character after the checked part.
+ */
+ private static int checkTypeArgument(final String signature, int pos) {
+ // TypeArgument:
+ // * | ( ( + | - )? FieldTypeSignature )
+
+ char c = getChar(signature, pos);
+ if (c == '*') {
+ return pos + 1;
+ } else if (c == '+' || c == '-') {
+ pos++;
+ }
+ return checkFieldTypeSignature(signature, pos);
+ }
+
+ /**
+ * Checks a type variable signature.
+ *
+ * @param signature a string containing the signature that must be checked.
+ * @param pos index of first character to be checked.
+ * @return the index of the first character after the checked part.
+ */
+ private static int checkTypeVariableSignature(
+ final String signature,
+ int pos)
+ {
+ // TypeVariableSignature:
+ // T Identifier ;
+
+ pos = checkChar('T', signature, pos);
+ pos = checkIdentifier(signature, pos);
+ return checkChar(';', signature, pos);
+ }
+
+ /**
+ * Checks a type signature.
+ *
+ * @param signature a string containing the signature that must be checked.
+ * @param pos index of first character to be checked.
+ * @return the index of the first character after the checked part.
+ */
+ private static int checkTypeSignature(final String signature, int pos) {
+ // TypeSignature:
+ // Z | C | B | S | I | F | J | D | FieldTypeSignature
+
+ switch (getChar(signature, pos)) {
+ case 'Z':
+ case 'C':
+ case 'B':
+ case 'S':
+ case 'I':
+ case 'F':
+ case 'J':
+ case 'D':
+ return pos + 1;
+ default:
+ return checkFieldTypeSignature(signature, pos);
+ }
+ }
+
+ /**
+ * Checks an identifier.
+ *
+ * @param signature a string containing the signature that must be checked.
+ * @param pos index of first character to be checked.
+ * @return the index of the first character after the checked part.
+ */
+ private static int checkIdentifier(final String signature, int pos) {
+ if (!Character.isJavaIdentifierStart(getChar(signature, pos))) {
+ throw new IllegalArgumentException(signature
+ + ": identifier expected at index " + pos);
+ }
+ ++pos;
+ while (Character.isJavaIdentifierPart(getChar(signature, pos))) {
+ ++pos;
+ }
+ return pos;
+ }
+
+ /**
+ * Checks a single character.
+ *
+ * @param signature a string containing the signature that must be checked.
+ * @param pos index of first character to be checked.
+ * @return the index of the first character after the checked part.
+ */
+ private static int checkChar(final char c, final String signature, int pos)
+ {
+ if (getChar(signature, pos) == c) {
+ return pos + 1;
+ }
+ throw new IllegalArgumentException(signature + ": '" + c
+ + "' expected at index " + pos);
+ }
+
+ /**
+ * Returns the signature car at the given index.
+ *
+ * @param signature a signature.
+ * @param pos an index in signature.
+ * @return the character at the given index, or 0 if there is no such
+ * character.
+ */
+ private static char getChar(final String signature, int pos) {
+ return pos < signature.length() ? signature.charAt(pos) : (char) 0;
+ }
+
+ /**
+ * Checks that the given label is not null. This method can also check that
+ * the label has been visited.
+ *
+ * @param label the label to be checked.
+ * @param checkVisited <tt>true</tt> to check that the label has been
+ * visited.
+ * @param msg a message to be used in case of error.
+ */
+ void checkLabel(
+ final Label label,
+ final boolean checkVisited,
+ final String msg)
+ {
+ if (label == null) {
+ throw new IllegalArgumentException("Invalid " + msg
+ + " (must not be null)");
+ }
+ if (checkVisited && labels.get(label) == null) {
+ throw new IllegalArgumentException("Invalid " + msg
+ + " (must be visited first)");
+ }
+ }
+
+ /**
+ * Checks that the given label is not a label used only for debug purposes.
+ *
+ * @param label the label to be checked.
+ */
+ private static void checkNonDebugLabel(final Label label) {
+ Field f = getLabelStatusField();
+ int status = 0;
+ try {
+ status = f == null ? 0 : ((Integer) f.get(label)).intValue();
+ } catch (IllegalAccessException e) {
+ throw new Error("Internal error");
+ }
+ if ((status & 0x01) != 0) {
+ throw new IllegalArgumentException("Labels used for debug info cannot be reused for control flow");
+ }
+ }
+
+ /**
+ * Returns the Field object corresponding to the Label.status field.
+ *
+ * @return the Field object corresponding to the Label.status field.
+ */
+ private static Field getLabelStatusField() {
+ if (labelStatusField == null) {
+ labelStatusField = getLabelField("a");
+ if (labelStatusField == null) {
+ labelStatusField = getLabelField("status");
+ }
+ }
+ return labelStatusField;
+ }
+
+ /**
+ * Returns the field of the Label class whose name is given.
+ *
+ * @param name a field name.
+ * @return the field of the Label class whose name is given, or null.
+ */
+ private static Field getLabelField(final String name) {
+ try {
+ Field f = Label.class.getDeclaredField(name);
+ f.setAccessible(true);
+ return f;
+ } catch (NoSuchFieldException e) {
+ return null;
+ }
+ }
+}
diff --git a/src/asm/scala/tools/asm/util/CheckSignatureAdapter.java b/src/asm/scala/tools/asm/util/CheckSignatureAdapter.java
new file mode 100644
index 0000000..3a6c3e7
--- /dev/null
+++ b/src/asm/scala/tools/asm/util/CheckSignatureAdapter.java
@@ -0,0 +1,329 @@
+/***
+ * ASM: a very small and fast Java bytecode manipulation framework
+ * Copyright (c) 2000-2011 INRIA, France Telecom
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ * 3. Neither the name of the copyright holders nor the names of its
+ * contributors may be used to endorse or promote products derived from
+ * this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+ * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
+ * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+ * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+ * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
+ * THE POSSIBILITY OF SUCH DAMAGE.
+ */
+package scala.tools.asm.util;
+
+import scala.tools.asm.Opcodes;
+import scala.tools.asm.signature.SignatureVisitor;
+
+/**
+ * A {@link SignatureVisitor} that checks that its methods are properly used.
+ *
+ * @author Eric Bruneton
+ */
+public class CheckSignatureAdapter extends SignatureVisitor {
+
+ /**
+ * Type to be used to check class signatures. See
+ * {@link #CheckSignatureAdapter(int, SignatureVisitor) CheckSignatureAdapter}.
+ */
+ public static final int CLASS_SIGNATURE = 0;
+
+ /**
+ * Type to be used to check method signatures. See
+ * {@link #CheckSignatureAdapter(int, SignatureVisitor) CheckSignatureAdapter}.
+ */
+ public static final int METHOD_SIGNATURE = 1;
+
+ /**
+ * Type to be used to check type signatures.See
+ * {@link #CheckSignatureAdapter(int, SignatureVisitor) CheckSignatureAdapter}.
+ */
+ public static final int TYPE_SIGNATURE = 2;
+
+ private static final int EMPTY = 1;
+
+ private static final int FORMAL = 2;
+
+ private static final int BOUND = 4;
+
+ private static final int SUPER = 8;
+
+ private static final int PARAM = 16;
+
+ private static final int RETURN = 32;
+
+ private static final int SIMPLE_TYPE = 64;
+
+ private static final int CLASS_TYPE = 128;
+
+ private static final int END = 256;
+
+ /**
+ * Type of the signature to be checked.
+ */
+ private final int type;
+
+ /**
+ * State of the automaton used to check the order of method calls.
+ */
+ private int state;
+
+ /**
+ * <tt>true</tt> if the checked type signature can be 'V'.
+ */
+ private boolean canBeVoid;
+
+ /**
+ * The visitor to which this adapter must delegate calls. May be
+ * <tt>null</tt>.
+ */
+ private final SignatureVisitor sv;
+
+ /**
+ * Creates a new {@link CheckSignatureAdapter} object. <i>Subclasses must
+ * not use this constructor</i>. Instead, they must use the
+ * {@link #CheckSignatureAdapter(int, int, SignatureVisitor)} version.
+ *
+ * @param type the type of signature to be checked. See
+ * {@link #CLASS_SIGNATURE}, {@link #METHOD_SIGNATURE} and
+ * {@link #TYPE_SIGNATURE}.
+ * @param sv the visitor to which this adapter must delegate calls. May be
+ * <tt>null</tt>.
+ */
+ public CheckSignatureAdapter(final int type, final SignatureVisitor sv) {
+ this(Opcodes.ASM4, type, sv);
+ }
+
+ /**
+ * Creates a new {@link CheckSignatureAdapter} object.
+ *
+ * @param api the ASM API version implemented by this visitor. Must be one
+ * of {@link Opcodes#ASM4}.
+ * @param type the type of signature to be checked. See
+ * {@link #CLASS_SIGNATURE}, {@link #METHOD_SIGNATURE} and
+ * {@link #TYPE_SIGNATURE}.
+ * @param sv the visitor to which this adapter must delegate calls. May be
+ * <tt>null</tt>.
+ */
+ protected CheckSignatureAdapter(
+ final int api,
+ final int type,
+ final SignatureVisitor sv)
+ {
+ super(api);
+ this.type = type;
+ this.state = EMPTY;
+ this.sv = sv;
+ }
+
+ // class and method signatures
+
+ @Override
+ public void visitFormalTypeParameter(final String name) {
+ if (type == TYPE_SIGNATURE
+ || (state != EMPTY && state != FORMAL && state != BOUND))
+ {
+ throw new IllegalStateException();
+ }
+ CheckMethodAdapter.checkIdentifier(name, "formal type parameter");
+ state = FORMAL;
+ if (sv != null) {
+ sv.visitFormalTypeParameter(name);
+ }
+ }
+
+ @Override
+ public SignatureVisitor visitClassBound() {
+ if (state != FORMAL) {
+ throw new IllegalStateException();
+ }
+ state = BOUND;
+ SignatureVisitor v = sv == null ? null : sv.visitClassBound();
+ return new CheckSignatureAdapter(TYPE_SIGNATURE, v);
+ }
+
+ @Override
+ public SignatureVisitor visitInterfaceBound() {
+ if (state != FORMAL && state != BOUND) {
+ throw new IllegalArgumentException();
+ }
+ SignatureVisitor v = sv == null ? null : sv.visitInterfaceBound();
+ return new CheckSignatureAdapter(TYPE_SIGNATURE, v);
+ }
+
+ // class signatures
+
+ @Override
+ public SignatureVisitor visitSuperclass() {
+ if (type != CLASS_SIGNATURE || (state & (EMPTY | FORMAL | BOUND)) == 0)
+ {
+ throw new IllegalArgumentException();
+ }
+ state = SUPER;
+ SignatureVisitor v = sv == null ? null : sv.visitSuperclass();
+ return new CheckSignatureAdapter(TYPE_SIGNATURE, v);
+ }
+
+ @Override
+ public SignatureVisitor visitInterface() {
+ if (state != SUPER) {
+ throw new IllegalStateException();
+ }
+ SignatureVisitor v = sv == null ? null : sv.visitInterface();
+ return new CheckSignatureAdapter(TYPE_SIGNATURE, v);
+ }
+
+ // method signatures
+
+ @Override
+ public SignatureVisitor visitParameterType() {
+ if (type != METHOD_SIGNATURE
+ || (state & (EMPTY | FORMAL | BOUND | PARAM)) == 0)
+ {
+ throw new IllegalArgumentException();
+ }
+ state = PARAM;
+ SignatureVisitor v = sv == null ? null : sv.visitParameterType();
+ return new CheckSignatureAdapter(TYPE_SIGNATURE, v);
+ }
+
+ @Override
+ public SignatureVisitor visitReturnType() {
+ if (type != METHOD_SIGNATURE
+ || (state & (EMPTY | FORMAL | BOUND | PARAM)) == 0)
+ {
+ throw new IllegalArgumentException();
+ }
+ state = RETURN;
+ SignatureVisitor v = sv == null ? null : sv.visitReturnType();
+ CheckSignatureAdapter cv = new CheckSignatureAdapter(TYPE_SIGNATURE, v);
+ cv.canBeVoid = true;
+ return cv;
+ }
+
+ @Override
+ public SignatureVisitor visitExceptionType() {
+ if (state != RETURN) {
+ throw new IllegalStateException();
+ }
+ SignatureVisitor v = sv == null ? null : sv.visitExceptionType();
+ return new CheckSignatureAdapter(TYPE_SIGNATURE, v);
+ }
+
+ // type signatures
+
+ @Override
+ public void visitBaseType(final char descriptor) {
+ if (type != TYPE_SIGNATURE || state != EMPTY) {
+ throw new IllegalStateException();
+ }
+ if (descriptor == 'V') {
+ if (!canBeVoid) {
+ throw new IllegalArgumentException();
+ }
+ } else {
+ if ("ZCBSIFJD".indexOf(descriptor) == -1) {
+ throw new IllegalArgumentException();
+ }
+ }
+ state = SIMPLE_TYPE;
+ if (sv != null) {
+ sv.visitBaseType(descriptor);
+ }
+ }
+
+ @Override
+ public void visitTypeVariable(final String name) {
+ if (type != TYPE_SIGNATURE || state != EMPTY) {
+ throw new IllegalStateException();
+ }
+ CheckMethodAdapter.checkIdentifier(name, "type variable");
+ state = SIMPLE_TYPE;
+ if (sv != null) {
+ sv.visitTypeVariable(name);
+ }
+ }
+
+ @Override
+ public SignatureVisitor visitArrayType() {
+ if (type != TYPE_SIGNATURE || state != EMPTY) {
+ throw new IllegalStateException();
+ }
+ state = SIMPLE_TYPE;
+ SignatureVisitor v = sv == null ? null : sv.visitArrayType();
+ return new CheckSignatureAdapter(TYPE_SIGNATURE, v);
+ }
+
+ @Override
+ public void visitClassType(final String name) {
+ if (type != TYPE_SIGNATURE || state != EMPTY) {
+ throw new IllegalStateException();
+ }
+ CheckMethodAdapter.checkInternalName(name, "class name");
+ state = CLASS_TYPE;
+ if (sv != null) {
+ sv.visitClassType(name);
+ }
+ }
+
+ @Override
+ public void visitInnerClassType(final String name) {
+ if (state != CLASS_TYPE) {
+ throw new IllegalStateException();
+ }
+ CheckMethodAdapter.checkIdentifier(name, "inner class name");
+ if (sv != null) {
+ sv.visitInnerClassType(name);
+ }
+ }
+
+ @Override
+ public void visitTypeArgument() {
+ if (state != CLASS_TYPE) {
+ throw new IllegalStateException();
+ }
+ if (sv != null) {
+ sv.visitTypeArgument();
+ }
+ }
+
+ @Override
+ public SignatureVisitor visitTypeArgument(final char wildcard) {
+ if (state != CLASS_TYPE) {
+ throw new IllegalStateException();
+ }
+ if ("+-=".indexOf(wildcard) == -1) {
+ throw new IllegalArgumentException();
+ }
+ SignatureVisitor v = sv == null ? null : sv.visitTypeArgument(wildcard);
+ return new CheckSignatureAdapter(TYPE_SIGNATURE, v);
+ }
+
+ @Override
+ public void visitEnd() {
+ if (state != CLASS_TYPE) {
+ throw new IllegalStateException();
+ }
+ state = END;
+ if (sv != null) {
+ sv.visitEnd();
+ }
+ }
+}
diff --git a/src/asm/scala/tools/asm/util/Printer.java b/src/asm/scala/tools/asm/util/Printer.java
new file mode 100644
index 0000000..c39fd54
--- /dev/null
+++ b/src/asm/scala/tools/asm/util/Printer.java
@@ -0,0 +1,558 @@
+/***
+ * ASM: a very small and fast Java bytecode manipulation framework
+ * Copyright (c) 2000-2011 INRIA, France Telecom
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ * 3. Neither the name of the copyright holders nor the names of its
+ * contributors may be used to endorse or promote products derived from
+ * this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+ * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
+ * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+ * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+ * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
+ * THE POSSIBILITY OF SUCH DAMAGE.
+ */
+package scala.tools.asm.util;
+
+import java.io.PrintWriter;
+import java.util.ArrayList;
+import java.util.List;
+
+import scala.tools.asm.Attribute;
+import scala.tools.asm.Handle;
+import scala.tools.asm.Label;
+import scala.tools.asm.Opcodes;
+
+/**
+ * An abstract converter from visit events to text.
+ *
+ * @author Eric Bruneton
+ */
+public abstract class Printer {
+
+ /**
+ * The names of the Java Virtual Machine opcodes.
+ */
+ public static final String[] OPCODES;
+
+ /**
+ * The names of the for <code>operand</code> parameter values of the
+ * {@link org.objectweb.asm.MethodVisitor#visitIntInsn} method when
+ * <code>opcode</code> is <code>NEWARRAY</code>.
+ */
+ public static final String[] TYPES;
+
+ /**
+ * The names of the <code>tag</code> field values for
+ * {@link org.objectweb.asm.Handle}.
+ */
+ public static final String[] HANDLE_TAG;
+
+ static {
+ String s = "NOP,ACONST_NULL,ICONST_M1,ICONST_0,ICONST_1,ICONST_2,"
+ + "ICONST_3,ICONST_4,ICONST_5,LCONST_0,LCONST_1,FCONST_0,"
+ + "FCONST_1,FCONST_2,DCONST_0,DCONST_1,BIPUSH,SIPUSH,LDC,,,"
+ + "ILOAD,LLOAD,FLOAD,DLOAD,ALOAD,,,,,,,,,,,,,,,,,,,,,IALOAD,"
+ + "LALOAD,FALOAD,DALOAD,AALOAD,BALOAD,CALOAD,SALOAD,ISTORE,"
+ + "LSTORE,FSTORE,DSTORE,ASTORE,,,,,,,,,,,,,,,,,,,,,IASTORE,"
+ + "LASTORE,FASTORE,DASTORE,AASTORE,BASTORE,CASTORE,SASTORE,POP,"
+ + "POP2,DUP,DUP_X1,DUP_X2,DUP2,DUP2_X1,DUP2_X2,SWAP,IADD,LADD,"
+ + "FADD,DADD,ISUB,LSUB,FSUB,DSUB,IMUL,LMUL,FMUL,DMUL,IDIV,LDIV,"
+ + "FDIV,DDIV,IREM,LREM,FREM,DREM,INEG,LNEG,FNEG,DNEG,ISHL,LSHL,"
+ + "ISHR,LSHR,IUSHR,LUSHR,IAND,LAND,IOR,LOR,IXOR,LXOR,IINC,I2L,"
+ + "I2F,I2D,L2I,L2F,L2D,F2I,F2L,F2D,D2I,D2L,D2F,I2B,I2C,I2S,LCMP,"
+ + "FCMPL,FCMPG,DCMPL,DCMPG,IFEQ,IFNE,IFLT,IFGE,IFGT,IFLE,"
+ + "IF_ICMPEQ,IF_ICMPNE,IF_ICMPLT,IF_ICMPGE,IF_ICMPGT,IF_ICMPLE,"
+ + "IF_ACMPEQ,IF_ACMPNE,GOTO,JSR,RET,TABLESWITCH,LOOKUPSWITCH,"
+ + "IRETURN,LRETURN,FRETURN,DRETURN,ARETURN,RETURN,GETSTATIC,"
+ + "PUTSTATIC,GETFIELD,PUTFIELD,INVOKEVIRTUAL,INVOKESPECIAL,"
+ + "INVOKESTATIC,INVOKEINTERFACE,INVOKEDYNAMIC,NEW,NEWARRAY,"
+ + "ANEWARRAY,ARRAYLENGTH,ATHROW,CHECKCAST,INSTANCEOF,"
+ + "MONITORENTER,MONITOREXIT,,MULTIANEWARRAY,IFNULL,IFNONNULL,";
+ OPCODES = new String[200];
+ int i = 0;
+ int j = 0;
+ int l;
+ while ((l = s.indexOf(',', j)) > 0) {
+ OPCODES[i++] = j + 1 == l ? null : s.substring(j, l);
+ j = l + 1;
+ }
+
+ s = "T_BOOLEAN,T_CHAR,T_FLOAT,T_DOUBLE,T_BYTE,T_SHORT,T_INT,T_LONG,";
+ TYPES = new String[12];
+ j = 0;
+ i = 4;
+ while ((l = s.indexOf(',', j)) > 0) {
+ TYPES[i++] = s.substring(j, l);
+ j = l + 1;
+ }
+
+ s = "H_GETFIELD,H_GETSTATIC,H_PUTFIELD,H_PUTSTATIC,"
+ + "H_INVOKEVIRTUAL,H_INVOKESTATIC,H_INVOKESPECIAL,"
+ + "H_NEWINVOKESPECIAL,H_INVOKEINTERFACE,";
+ HANDLE_TAG = new String[10];
+ j = 0;
+ i = 1;
+ while ((l = s.indexOf(',', j)) > 0) {
+ HANDLE_TAG[i++] = s.substring(j, l);
+ j = l + 1;
+ }
+ }
+
+ /**
+ * The ASM API version implemented by this class. The value of this field
+ * must be one of {@link Opcodes#ASM4}.
+ */
+ protected final int api;
+
+ /**
+ * A buffer that can be used to create strings.
+ */
+ protected final StringBuffer buf;
+
+ /**
+ * The text to be printed. Since the code of methods is not necessarily
+ * visited in sequential order, one method after the other, but can be
+ * interlaced (some instructions from method one, then some instructions
+ * from method two, then some instructions from method one again...), it is
+ * not possible to print the visited instructions directly to a sequential
+ * stream. A class is therefore printed in a two steps process: a string
+ * tree is constructed during the visit, and printed to a sequential stream
+ * at the end of the visit. This string tree is stored in this field, as a
+ * string list that can contain other string lists, which can themselves
+ * contain other string lists, and so on.
+ */
+ public final List<Object> text;
+
+ /**
+ * Constructs a new {@link Printer}.
+ */
+ protected Printer(final int api) {
+ this.api = api;
+ this.buf = new StringBuffer();
+ this.text = new ArrayList<Object>();
+ }
+
+ /**
+ * Class header.
+ * See {@link org.objectweb.asm.ClassVisitor#visit}.
+ */
+ public abstract void visit(
+ final int version,
+ final int access,
+ final String name,
+ final String signature,
+ final String superName,
+ final String[] interfaces);
+
+ /**
+ * Class source.
+ * See {@link org.objectweb.asm.ClassVisitor#visitSource}.
+ */
+ public abstract void visitSource(final String file, final String debug);
+
+ /**
+ * Class outer class.
+ * See {@link org.objectweb.asm.ClassVisitor#visitOuterClass}.
+ */
+ public abstract void visitOuterClass(
+ final String owner,
+ final String name,
+ final String desc);
+
+ /**
+ * Class annotation.
+ * See {@link org.objectweb.asm.ClassVisitor#visitAnnotation}.
+ */
+ public abstract Printer visitClassAnnotation(
+ final String desc,
+ final boolean visible);
+
+ /**
+ * Class attribute.
+ * See {@link org.objectweb.asm.ClassVisitor#visitAttribute}.
+ */
+ public abstract void visitClassAttribute(final Attribute attr);
+
+ /**
+ * Class inner name.
+ * See {@link org.objectweb.asm.ClassVisitor#visitInnerClass}.
+ */
+ public abstract void visitInnerClass(
+ final String name,
+ final String outerName,
+ final String innerName,
+ final int access);
+
+ /**
+ * Class field.
+ * See {@link org.objectweb.asm.ClassVisitor#visitField}.
+ */
+ public abstract Printer visitField(
+ final int access,
+ final String name,
+ final String desc,
+ final String signature,
+ final Object value);
+
+ /**
+ * Class method.
+ * See {@link org.objectweb.asm.ClassVisitor#visitMethod}.
+ */
+ public abstract Printer visitMethod(
+ final int access,
+ final String name,
+ final String desc,
+ final String signature,
+ final String[] exceptions);
+
+ /**
+ * Class end.
+ * See {@link org.objectweb.asm.ClassVisitor#visitEnd}.
+ */
+ public abstract void visitClassEnd();
+
+ // ------------------------------------------------------------------------
+ // Annotations
+ // ------------------------------------------------------------------------
+
+ /**
+ * Annotation value.
+ * See {@link org.objectweb.asm.AnnotationVisitor#visit}.
+ */
+ public abstract void visit(final String name, final Object value);
+
+ /**
+ * Annotation enum value.
+ * See {@link org.objectweb.asm.AnnotationVisitor#visitEnum}.
+ */
+ public abstract void visitEnum(
+ final String name,
+ final String desc,
+ final String value);
+
+ /**
+ * Nested annotation value.
+ * See {@link org.objectweb.asm.AnnotationVisitor#visitAnnotation}.
+ */
+ public abstract Printer visitAnnotation(
+ final String name,
+ final String desc);
+
+ /**
+ * Annotation array value.
+ * See {@link org.objectweb.asm.AnnotationVisitor#visitArray}.
+ */
+ public abstract Printer visitArray(final String name);
+
+ /**
+ * Annotation end.
+ * See {@link org.objectweb.asm.AnnotationVisitor#visitEnd}.
+ */
+ public abstract void visitAnnotationEnd();
+
+ // ------------------------------------------------------------------------
+ // Fields
+ // ------------------------------------------------------------------------
+
+ /**
+ * Field annotation.
+ * See {@link org.objectweb.asm.FieldVisitor#visitAnnotation}.
+ */
+ public abstract Printer visitFieldAnnotation(
+ final String desc,
+ final boolean visible);
+
+ /**
+ * Field attribute.
+ * See {@link org.objectweb.asm.FieldVisitor#visitAttribute}.
+ */
+ public abstract void visitFieldAttribute(final Attribute attr);
+
+ /**
+ * Field end.
+ * See {@link org.objectweb.asm.FieldVisitor#visitEnd}.
+ */
+ public abstract void visitFieldEnd();
+
+ // ------------------------------------------------------------------------
+ // Methods
+ // ------------------------------------------------------------------------
+
+ /**
+ * Method default annotation.
+ * See {@link org.objectweb.asm.MethodVisitor#visitAnnotationDefault}.
+ */
+ public abstract Printer visitAnnotationDefault();
+
+ /**
+ * Method annotation.
+ * See {@link org.objectweb.asm.MethodVisitor#visitAnnotation}.
+ */
+ public abstract Printer visitMethodAnnotation(
+ final String desc,
+ final boolean visible);
+
+ /**
+ * Method parameter annotation.
+ * See {@link org.objectweb.asm.MethodVisitor#visitParameterAnnotation}.
+ */
+ public abstract Printer visitParameterAnnotation(
+ final int parameter,
+ final String desc,
+ final boolean visible);
+
+ /**
+ * Method attribute.
+ * See {@link org.objectweb.asm.MethodVisitor#visitAttribute}.
+ */
+ public abstract void visitMethodAttribute(final Attribute attr);
+
+ /**
+ * Method start.
+ * See {@link org.objectweb.asm.MethodVisitor#visitCode}.
+ */
+ public abstract void visitCode();
+
+ /**
+ * Method stack frame.
+ * See {@link org.objectweb.asm.MethodVisitor#visitFrame}.
+ */
+ public abstract void visitFrame(
+ final int type,
+ final int nLocal,
+ final Object[] local,
+ final int nStack,
+ final Object[] stack);
+
+ /**
+ * Method instruction.
+ * See {@link org.objectweb.asm.MethodVisitor#visitInsn}.
+ */
+ public abstract void visitInsn(final int opcode);
+
+ /**
+ * Method instruction.
+ * See {@link org.objectweb.asm.MethodVisitor#visitIntInsn}.
+ */
+ public abstract void visitIntInsn(final int opcode, final int operand);
+
+ /**
+ * Method instruction.
+ * See {@link org.objectweb.asm.MethodVisitor#visitVarInsn}.
+ */
+ public abstract void visitVarInsn(final int opcode, final int var);
+
+ /**
+ * Method instruction.
+ * See {@link org.objectweb.asm.MethodVisitor#visitTypeInsn}.
+ */
+ public abstract void visitTypeInsn(final int opcode, final String type);
+
+ /**
+ * Method instruction.
+ * See {@link org.objectweb.asm.MethodVisitor#visitFieldInsn}.
+ */
+ public abstract void visitFieldInsn(
+ final int opcode,
+ final String owner,
+ final String name,
+ final String desc);
+
+ /**
+ * Method instruction.
+ * See {@link org.objectweb.asm.MethodVisitor#visitMethodInsn}.
+ */
+ public abstract void visitMethodInsn(
+ final int opcode,
+ final String owner,
+ final String name,
+ final String desc);
+
+ /**
+ * Method instruction.
+ * See {@link org.objectweb.asm.MethodVisitor#visitInvokeDynamicInsn}.
+ */
+ public abstract void visitInvokeDynamicInsn(
+ String name,
+ String desc,
+ Handle bsm,
+ Object... bsmArgs);
+
+ /**
+ * Method instruction.
+ * See {@link org.objectweb.asm.MethodVisitor#visitJumpInsn}.
+ */
+ public abstract void visitJumpInsn(final int opcode, final Label label);
+
+ /**
+ * Method label.
+ * See {@link org.objectweb.asm.MethodVisitor#visitLabel}.
+ */
+ public abstract void visitLabel(final Label label);
+
+ /**
+ * Method instruction.
+ * See {@link org.objectweb.asm.MethodVisitor#visitLdcInsn}.
+ */
+ public abstract void visitLdcInsn(final Object cst);
+
+ /**
+ * Method instruction.
+ * See {@link org.objectweb.asm.MethodVisitor#visitIincInsn}.
+ */
+ public abstract void visitIincInsn(final int var, final int increment);
+
+ /**
+ * Method instruction.
+ * See {@link org.objectweb.asm.MethodVisitor#visitTableSwitchInsn}.
+ */
+ public abstract void visitTableSwitchInsn(
+ final int min,
+ final int max,
+ final Label dflt,
+ final Label... labels);
+
+ /**
+ * Method instruction.
+ * See {@link org.objectweb.asm.MethodVisitor#visitLookupSwitchInsn}.
+ */
+ public abstract void visitLookupSwitchInsn(
+ final Label dflt,
+ final int[] keys,
+ final Label[] labels);
+
+ /**
+ * Method instruction.
+ * See {@link org.objectweb.asm.MethodVisitor#visitMultiANewArrayInsn}.
+ */
+ public abstract void visitMultiANewArrayInsn(
+ final String desc,
+ final int dims);
+
+ /**
+ * Method exception handler.
+ * See {@link org.objectweb.asm.MethodVisitor#visitTryCatchBlock}.
+ */
+ public abstract void visitTryCatchBlock(
+ final Label start,
+ final Label end,
+ final Label handler,
+ final String type);
+
+ /**
+ * Method debug info.
+ * See {@link org.objectweb.asm.MethodVisitor#visitLocalVariable}.
+ */
+ public abstract void visitLocalVariable(
+ final String name,
+ final String desc,
+ final String signature,
+ final Label start,
+ final Label end,
+ final int index);
+
+ /**
+ * Method debug info.
+ * See {@link org.objectweb.asm.MethodVisitor#visitLineNumber}.
+ */
+ public abstract void visitLineNumber(final int line, final Label start);
+
+ /**
+ * Method max stack and max locals.
+ * See {@link org.objectweb.asm.MethodVisitor#visitMaxs}.
+ */
+ public abstract void visitMaxs(final int maxStack, final int maxLocals);
+
+ /**
+ * Method end.
+ * See {@link org.objectweb.asm.MethodVisitor#visitEnd}.
+ */
+ public abstract void visitMethodEnd();
+
+ /**
+ * Returns the text constructed by this visitor.
+ *
+ * @return the text constructed by this visitor.
+ */
+ public List<Object> getText() {
+ return text;
+ }
+
+ /**
+ * Prints the text constructed by this visitor.
+ *
+ * @param pw the print writer to be used.
+ */
+ public void print(final PrintWriter pw) {
+ printList(pw, text);
+ }
+
+ /**
+ * Appends a quoted string to a given buffer.
+ *
+ * @param buf the buffer where the string must be added.
+ * @param s the string to be added.
+ */
+ public static void appendString(final StringBuffer buf, final String s) {
+ buf.append('\"');
+ for (int i = 0; i < s.length(); ++i) {
+ char c = s.charAt(i);
+ if (c == '\n') {
+ buf.append("\\n");
+ } else if (c == '\r') {
+ buf.append("\\r");
+ } else if (c == '\\') {
+ buf.append("\\\\");
+ } else if (c == '"') {
+ buf.append("\\\"");
+ } else if (c < 0x20 || c > 0x7f) {
+ buf.append("\\u");
+ if (c < 0x10) {
+ buf.append("000");
+ } else if (c < 0x100) {
+ buf.append("00");
+ } else if (c < 0x1000) {
+ buf.append('0');
+ }
+ buf.append(Integer.toString(c, 16));
+ } else {
+ buf.append(c);
+ }
+ }
+ buf.append('\"');
+ }
+
+ /**
+ * Prints the given string tree.
+ *
+ * @param pw the writer to be used to print the tree.
+ * @param l a string tree, i.e., a string list that can contain other string
+ * lists, and so on recursively.
+ */
+ static void printList(final PrintWriter pw, final List<?> l) {
+ for (int i = 0; i < l.size(); ++i) {
+ Object o = l.get(i);
+ if (o instanceof List) {
+ printList(pw, (List<?>) o);
+ } else {
+ pw.print(o.toString());
+ }
+ }
+ }
+}
diff --git a/src/asm/scala/tools/asm/util/SignatureChecker.java b/src/asm/scala/tools/asm/util/SignatureChecker.java
new file mode 100644
index 0000000..71f0d80
--- /dev/null
+++ b/src/asm/scala/tools/asm/util/SignatureChecker.java
@@ -0,0 +1,47 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2013 LAMP/EPFL
+ */
+
+package scala.tools.asm.util;
+
+import scala.tools.asm.util.CheckMethodAdapter;
+import scala.tools.asm.MethodVisitor;
+
+/**
+ * A subclass of ASM's CheckMethodAdapter for the sole purpose of accessing some protected methods there.
+ *
+ */
+public class SignatureChecker extends CheckMethodAdapter {
+
+ public SignatureChecker(final MethodVisitor mv) {
+ super(mv);
+ }
+
+ /**
+ * Checks a class signature.
+ *
+ * @param signature a string containing the signature that must be checked.
+ */
+ public static void checkClassSignature(final String signature) {
+ CheckMethodAdapter.checkClassSignature(signature);
+ }
+
+ /**
+ * Checks a method signature.
+ *
+ * @param signature a string containing the signature that must be checked.
+ */
+ public static void checkMethodSignature(final String signature) {
+ CheckMethodAdapter.checkMethodSignature(signature);
+ }
+
+ /**
+ * Checks a field signature.
+ *
+ * @param signature a string containing the signature that must be checked.
+ */
+ public static void checkFieldSignature(final String signature) {
+ CheckMethodAdapter.checkFieldSignature(signature);
+ }
+
+}
diff --git a/src/asm/scala/tools/asm/util/Textifiable.java b/src/asm/scala/tools/asm/util/Textifiable.java
new file mode 100644
index 0000000..b80d013
--- /dev/null
+++ b/src/asm/scala/tools/asm/util/Textifiable.java
@@ -0,0 +1,54 @@
+/**
+ * ASM: a very small and fast Java bytecode manipulation framework
+ * Copyright (c) 2000-2011 INRIA, France Telecom
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ * 3. Neither the name of the copyright holders nor the names of its
+ * contributors may be used to endorse or promote products derived from
+ * this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+ * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
+ * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+ * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+ * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
+ * THE POSSIBILITY OF SUCH DAMAGE.
+ */
+package scala.tools.asm.util;
+
+import java.util.Map;
+
+import scala.tools.asm.Label;
+
+/**
+ * An {@link org.objectweb.asm.Attribute Attribute} that can print a readable
+ * representation of itself.
+ *
+ * Implementations should construct readable output from an attribute data
+ * structure. Such representation could be used in unit test assertions.
+ *
+ * @author Eugene Kuleshov
+ */
+public interface Textifiable {
+
+ /**
+ * Build a human readable representation of this attribute.
+ *
+ * @param buf a buffer used for printing Java code.
+ * @param labelNames map of label instances to their names.
+ */
+ void textify(StringBuffer buf, Map<Label, String> labelNames);
+}
diff --git a/src/asm/scala/tools/asm/util/Textifier.java b/src/asm/scala/tools/asm/util/Textifier.java
new file mode 100644
index 0000000..8d40ebd
--- /dev/null
+++ b/src/asm/scala/tools/asm/util/Textifier.java
@@ -0,0 +1,1286 @@
+/***
+ * ASM: a very small and fast Java bytecode manipulation framework
+ * Copyright (c) 2000-2011 INRIA, France Telecom
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ * 3. Neither the name of the copyright holders nor the names of its
+ * contributors may be used to endorse or promote products derived from
+ * this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+ * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
+ * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+ * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+ * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
+ * THE POSSIBILITY OF SUCH DAMAGE.
+ */
+package scala.tools.asm.util;
+
+import java.io.FileInputStream;
+import java.io.PrintWriter;
+import java.util.HashMap;
+import java.util.Map;
+
+import scala.tools.asm.Attribute;
+import scala.tools.asm.ClassReader;
+import scala.tools.asm.Handle;
+import scala.tools.asm.Label;
+import scala.tools.asm.Opcodes;
+import scala.tools.asm.Type;
+import scala.tools.asm.signature.SignatureReader;
+
+/**
+ * A {@link Printer} that prints a disassembled view of the classes it visits.
+ *
+ * @author Eric Bruneton
+ */
+public class Textifier extends Printer {
+
+ /**
+ * Constant used in {@link #appendDescriptor appendDescriptor} for internal
+ * type names in bytecode notation.
+ */
+ public static final int INTERNAL_NAME = 0;
+
+ /**
+ * Constant used in {@link #appendDescriptor appendDescriptor} for field
+ * descriptors, formatted in bytecode notation
+ */
+ public static final int FIELD_DESCRIPTOR = 1;
+
+ /**
+ * Constant used in {@link #appendDescriptor appendDescriptor} for field
+ * signatures, formatted in bytecode notation
+ */
+ public static final int FIELD_SIGNATURE = 2;
+
+ /**
+ * Constant used in {@link #appendDescriptor appendDescriptor} for method
+ * descriptors, formatted in bytecode notation
+ */
+ public static final int METHOD_DESCRIPTOR = 3;
+
+ /**
+ * Constant used in {@link #appendDescriptor appendDescriptor} for method
+ * signatures, formatted in bytecode notation
+ */
+ public static final int METHOD_SIGNATURE = 4;
+
+ /**
+ * Constant used in {@link #appendDescriptor appendDescriptor} for class
+ * signatures, formatted in bytecode notation
+ */
+ public static final int CLASS_SIGNATURE = 5;
+
+ /**
+ * Constant used in {@link #appendDescriptor appendDescriptor} for field or
+ * method return value signatures, formatted in default Java notation
+ * (non-bytecode)
+ */
+ public static final int TYPE_DECLARATION = 6;
+
+ /**
+ * Constant used in {@link #appendDescriptor appendDescriptor} for class
+ * signatures, formatted in default Java notation (non-bytecode)
+ */
+ public static final int CLASS_DECLARATION = 7;
+
+ /**
+ * Constant used in {@link #appendDescriptor appendDescriptor} for method
+ * parameter signatures, formatted in default Java notation (non-bytecode)
+ */
+ public static final int PARAMETERS_DECLARATION = 8;
+
+ /**
+ * Constant used in {@link #appendDescriptor appendDescriptor} for handle
+ * descriptors, formatted in bytecode notation
+ */
+ public static final int HANDLE_DESCRIPTOR = 9;
+
+ /**
+ * Tab for class members.
+ */
+ protected String tab = " ";
+
+ /**
+ * Tab for bytecode instructions.
+ */
+ protected String tab2 = " ";
+
+ /**
+ * Tab for table and lookup switch instructions.
+ */
+ protected String tab3 = " ";
+
+ /**
+ * Tab for labels.
+ */
+ protected String ltab = " ";
+
+ /**
+ * The label names. This map associate String values to Label keys.
+ */
+ protected Map<Label, String> labelNames;
+
+ private int valueNumber = 0;
+
+ /**
+ * Constructs a new {@link Textifier}. <i>Subclasses must not use this
+ * constructor</i>. Instead, they must use the {@link #Textifier(int)}
+ * version.
+ */
+ public Textifier() {
+ this(Opcodes.ASM4);
+ }
+
+ /**
+ * Constructs a new {@link Textifier}.
+ *
+ * @param api the ASM API version implemented by this visitor. Must be one
+ * of {@link Opcodes#ASM4}.
+ */
+ protected Textifier(final int api) {
+ super(api);
+ }
+
+ /**
+ * Prints a disassembled view of the given class to the standard output. <p>
+ * Usage: Textifier [-debug] <binary class name or class
+ * file name >
+ *
+ * @param args the command line arguments.
+ *
+ * @throws Exception if the class cannot be found, or if an IO exception
+ * occurs.
+ */
+ public static void main(final String[] args) throws Exception {
+ int i = 0;
+ int flags = ClassReader.SKIP_DEBUG;
+
+ boolean ok = true;
+ if (args.length < 1 || args.length > 2) {
+ ok = false;
+ }
+ if (ok && "-debug".equals(args[0])) {
+ i = 1;
+ flags = 0;
+ if (args.length != 2) {
+ ok = false;
+ }
+ }
+ if (!ok) {
+ System.err.println("Prints a disassembled view of the given class.");
+ System.err.println("Usage: Textifier [-debug] "
+ + "<fully qualified class name or class file name>");
+ return;
+ }
+ ClassReader cr;
+ if (args[i].endsWith(".class") || args[i].indexOf('\\') > -1
+ || args[i].indexOf('/') > -1)
+ {
+ cr = new ClassReader(new FileInputStream(args[i]));
+ } else {
+ cr = new ClassReader(args[i]);
+ }
+ cr.accept(new TraceClassVisitor(new PrintWriter(System.out)),
+ flags);
+ }
+
+ // ------------------------------------------------------------------------
+ // Classes
+ // ------------------------------------------------------------------------
+
+ @Override
+ public void visit(
+ final int version,
+ final int access,
+ final String name,
+ final String signature,
+ final String superName,
+ final String[] interfaces)
+ {
+ int major = version & 0xFFFF;
+ int minor = version >>> 16;
+ buf.setLength(0);
+ buf.append("// class version ")
+ .append(major)
+ .append('.')
+ .append(minor)
+ .append(" (")
+ .append(version)
+ .append(")\n");
+ if ((access & Opcodes.ACC_DEPRECATED) != 0) {
+ buf.append("// DEPRECATED\n");
+ }
+ buf.append("// access flags 0x").append(Integer.toHexString(access).toUpperCase()).append('\n');
+
+ appendDescriptor(CLASS_SIGNATURE, signature);
+ if (signature != null) {
+ TraceSignatureVisitor sv = new TraceSignatureVisitor(access);
+ SignatureReader r = new SignatureReader(signature);
+ r.accept(sv);
+ buf.append("// declaration: ")
+ .append(name)
+ .append(sv.getDeclaration())
+ .append('\n');
+ }
+
+ appendAccess(access & ~Opcodes.ACC_SUPER);
+ if ((access & Opcodes.ACC_ANNOTATION) != 0) {
+ buf.append("@interface ");
+ } else if ((access & Opcodes.ACC_INTERFACE) != 0) {
+ buf.append("interface ");
+ } else if ((access & Opcodes.ACC_ENUM) == 0) {
+ buf.append("class ");
+ }
+ appendDescriptor(INTERNAL_NAME, name);
+
+ if (superName != null && !"java/lang/Object".equals(superName)) {
+ buf.append(" extends ");
+ appendDescriptor(INTERNAL_NAME, superName);
+ buf.append(' ');
+ }
+ if (interfaces != null && interfaces.length > 0) {
+ buf.append(" implements ");
+ for (int i = 0; i < interfaces.length; ++i) {
+ appendDescriptor(INTERNAL_NAME, interfaces[i]);
+ buf.append(' ');
+ }
+ }
+ buf.append(" {\n\n");
+
+ text.add(buf.toString());
+ }
+
+ @Override
+ public void visitSource(final String file, final String debug) {
+ buf.setLength(0);
+ if (file != null) {
+ buf.append(tab)
+ .append("// compiled from: ")
+ .append(file)
+ .append('\n');
+ }
+ if (debug != null) {
+ buf.append(tab)
+ .append("// debug info: ")
+ .append(debug)
+ .append('\n');
+ }
+ if (buf.length() > 0) {
+ text.add(buf.toString());
+ }
+ }
+
+ @Override
+ public void visitOuterClass(
+ final String owner,
+ final String name,
+ final String desc)
+ {
+ buf.setLength(0);
+ buf.append(tab).append("OUTERCLASS ");
+ appendDescriptor(INTERNAL_NAME, owner);
+ buf.append(' ');
+ if (name != null) {
+ buf.append(name).append(' ');
+ }
+ appendDescriptor(METHOD_DESCRIPTOR, desc);
+ buf.append('\n');
+ text.add(buf.toString());
+ }
+
+ @Override
+ public Textifier visitClassAnnotation(
+ final String desc,
+ final boolean visible)
+ {
+ text.add("\n");
+ return visitAnnotation(desc, visible);
+ }
+
+ @Override
+ public void visitClassAttribute(final Attribute attr) {
+ text.add("\n");
+ visitAttribute(attr);
+ }
+
+ @Override
+ public void visitInnerClass(
+ final String name,
+ final String outerName,
+ final String innerName,
+ final int access)
+ {
+ buf.setLength(0);
+ buf.append(tab).append("// access flags 0x");
+ buf.append(Integer.toHexString(access & ~Opcodes.ACC_SUPER).toUpperCase()).append('\n');
+ buf.append(tab);
+ appendAccess(access);
+ buf.append("INNERCLASS ");
+ appendDescriptor(INTERNAL_NAME, name);
+ buf.append(' ');
+ appendDescriptor(INTERNAL_NAME, outerName);
+ buf.append(' ');
+ appendDescriptor(INTERNAL_NAME, innerName);
+ buf.append('\n');
+ text.add(buf.toString());
+ }
+
+ @Override
+ public Textifier visitField(
+ final int access,
+ final String name,
+ final String desc,
+ final String signature,
+ final Object value)
+ {
+ buf.setLength(0);
+ buf.append('\n');
+ if ((access & Opcodes.ACC_DEPRECATED) != 0) {
+ buf.append(tab).append("// DEPRECATED\n");
+ }
+ buf.append(tab).append("// access flags 0x").append(Integer.toHexString(access).toUpperCase()).append('\n');
+ if (signature != null) {
+ buf.append(tab);
+ appendDescriptor(FIELD_SIGNATURE, signature);
+
+ TraceSignatureVisitor sv = new TraceSignatureVisitor(0);
+ SignatureReader r = new SignatureReader(signature);
+ r.acceptType(sv);
+ buf.append(tab)
+ .append("// declaration: ")
+ .append(sv.getDeclaration())
+ .append('\n');
+ }
+
+ buf.append(tab);
+ appendAccess(access);
+
+ appendDescriptor(FIELD_DESCRIPTOR, desc);
+ buf.append(' ').append(name);
+ if (value != null) {
+ buf.append(" = ");
+ if (value instanceof String) {
+ buf.append('\"').append(value).append('\"');
+ } else {
+ buf.append(value);
+ }
+ }
+
+ buf.append('\n');
+ text.add(buf.toString());
+
+ Textifier t = createTextifier();
+ text.add(t.getText());
+ return t;
+ }
+
+ @Override
+ public Textifier visitMethod(
+ final int access,
+ final String name,
+ final String desc,
+ final String signature,
+ final String[] exceptions)
+ {
+ buf.setLength(0);
+ buf.append('\n');
+ if ((access & Opcodes.ACC_DEPRECATED) != 0) {
+ buf.append(tab).append("// DEPRECATED\n");
+ }
+ buf.append(tab).append("// access flags 0x").append(Integer.toHexString(access).toUpperCase()).append('\n');
+
+ if (signature != null) {
+ buf.append(tab);
+ appendDescriptor(METHOD_SIGNATURE, signature);
+
+ TraceSignatureVisitor v = new TraceSignatureVisitor(0);
+ SignatureReader r = new SignatureReader(signature);
+ r.accept(v);
+ String genericDecl = v.getDeclaration();
+ String genericReturn = v.getReturnType();
+ String genericExceptions = v.getExceptions();
+
+ buf.append(tab)
+ .append("// declaration: ")
+ .append(genericReturn)
+ .append(' ')
+ .append(name)
+ .append(genericDecl);
+ if (genericExceptions != null) {
+ buf.append(" throws ").append(genericExceptions);
+ }
+ buf.append('\n');
+ }
+
+ buf.append(tab);
+ appendAccess(access);
+ if ((access & Opcodes.ACC_NATIVE) != 0) {
+ buf.append("native ");
+ }
+ if ((access & Opcodes.ACC_VARARGS) != 0) {
+ buf.append("varargs ");
+ }
+ if ((access & Opcodes.ACC_BRIDGE) != 0) {
+ buf.append("bridge ");
+ }
+
+ buf.append(name);
+ appendDescriptor(METHOD_DESCRIPTOR, desc);
+ if (exceptions != null && exceptions.length > 0) {
+ buf.append(" throws ");
+ for (int i = 0; i < exceptions.length; ++i) {
+ appendDescriptor(INTERNAL_NAME, exceptions[i]);
+ buf.append(' ');
+ }
+ }
+
+ buf.append('\n');
+ text.add(buf.toString());
+
+ Textifier t = createTextifier();
+ text.add(t.getText());
+ return t;
+ }
+
+ @Override
+ public void visitClassEnd() {
+ text.add("}\n");
+ }
+
+ // ------------------------------------------------------------------------
+ // Annotations
+ // ------------------------------------------------------------------------
+
+ @Override
+ public void visit(final String name, final Object value) {
+ buf.setLength(0);
+ appendComa(valueNumber++);
+
+ if (name != null) {
+ buf.append(name).append('=');
+ }
+
+ if (value instanceof String) {
+ visitString((String) value);
+ } else if (value instanceof Type) {
+ visitType((Type) value);
+ } else if (value instanceof Byte) {
+ visitByte(((Byte) value).byteValue());
+ } else if (value instanceof Boolean) {
+ visitBoolean(((Boolean) value).booleanValue());
+ } else if (value instanceof Short) {
+ visitShort(((Short) value).shortValue());
+ } else if (value instanceof Character) {
+ visitChar(((Character) value).charValue());
+ } else if (value instanceof Integer) {
+ visitInt(((Integer) value).intValue());
+ } else if (value instanceof Float) {
+ visitFloat(((Float) value).floatValue());
+ } else if (value instanceof Long) {
+ visitLong(((Long) value).longValue());
+ } else if (value instanceof Double) {
+ visitDouble(((Double) value).doubleValue());
+ } else if (value.getClass().isArray()) {
+ buf.append('{');
+ if (value instanceof byte[]) {
+ byte[] v = (byte[]) value;
+ for (int i = 0; i < v.length; i++) {
+ appendComa(i);
+ visitByte(v[i]);
+ }
+ } else if (value instanceof boolean[]) {
+ boolean[] v = (boolean[]) value;
+ for (int i = 0; i < v.length; i++) {
+ appendComa(i);
+ visitBoolean(v[i]);
+ }
+ } else if (value instanceof short[]) {
+ short[] v = (short[]) value;
+ for (int i = 0; i < v.length; i++) {
+ appendComa(i);
+ visitShort(v[i]);
+ }
+ } else if (value instanceof char[]) {
+ char[] v = (char[]) value;
+ for (int i = 0; i < v.length; i++) {
+ appendComa(i);
+ visitChar(v[i]);
+ }
+ } else if (value instanceof int[]) {
+ int[] v = (int[]) value;
+ for (int i = 0; i < v.length; i++) {
+ appendComa(i);
+ visitInt(v[i]);
+ }
+ } else if (value instanceof long[]) {
+ long[] v = (long[]) value;
+ for (int i = 0; i < v.length; i++) {
+ appendComa(i);
+ visitLong(v[i]);
+ }
+ } else if (value instanceof float[]) {
+ float[] v = (float[]) value;
+ for (int i = 0; i < v.length; i++) {
+ appendComa(i);
+ visitFloat(v[i]);
+ }
+ } else if (value instanceof double[]) {
+ double[] v = (double[]) value;
+ for (int i = 0; i < v.length; i++) {
+ appendComa(i);
+ visitDouble(v[i]);
+ }
+ }
+ buf.append('}');
+ }
+
+ text.add(buf.toString());
+ }
+
+ private void visitInt(final int value) {
+ buf.append(value);
+ }
+
+ private void visitLong(final long value) {
+ buf.append(value).append('L');
+ }
+
+ private void visitFloat(final float value) {
+ buf.append(value).append('F');
+ }
+
+ private void visitDouble(final double value) {
+ buf.append(value).append('D');
+ }
+
+ private void visitChar(final char value) {
+ buf.append("(char)").append((int) value);
+ }
+
+ private void visitShort(final short value) {
+ buf.append("(short)").append(value);
+ }
+
+ private void visitByte(final byte value) {
+ buf.append("(byte)").append(value);
+ }
+
+ private void visitBoolean(final boolean value) {
+ buf.append(value);
+ }
+
+ private void visitString(final String value) {
+ appendString(buf, value);
+ }
+
+ private void visitType(final Type value) {
+ buf.append(value.getClassName()).append(".class");
+ }
+
+ @Override
+ public void visitEnum(
+ final String name,
+ final String desc,
+ final String value)
+ {
+ buf.setLength(0);
+ appendComa(valueNumber++);
+ if (name != null) {
+ buf.append(name).append('=');
+ }
+ appendDescriptor(FIELD_DESCRIPTOR, desc);
+ buf.append('.').append(value);
+ text.add(buf.toString());
+ }
+
+ @Override
+ public Textifier visitAnnotation(
+ final String name,
+ final String desc)
+ {
+ buf.setLength(0);
+ appendComa(valueNumber++);
+ if (name != null) {
+ buf.append(name).append('=');
+ }
+ buf.append('@');
+ appendDescriptor(FIELD_DESCRIPTOR, desc);
+ buf.append('(');
+ text.add(buf.toString());
+ Textifier t = createTextifier();
+ text.add(t.getText());
+ text.add(")");
+ return t;
+ }
+
+ @Override
+ public Textifier visitArray(
+ final String name)
+ {
+ buf.setLength(0);
+ appendComa(valueNumber++);
+ if (name != null) {
+ buf.append(name).append('=');
+ }
+ buf.append('{');
+ text.add(buf.toString());
+ Textifier t = createTextifier();
+ text.add(t.getText());
+ text.add("}");
+ return t;
+ }
+
+ @Override
+ public void visitAnnotationEnd() {
+ }
+
+ // ------------------------------------------------------------------------
+ // Fields
+ // ------------------------------------------------------------------------
+
+ @Override
+ public Textifier visitFieldAnnotation(
+ final String desc,
+ final boolean visible)
+ {
+ return visitAnnotation(desc, visible);
+ }
+
+ @Override
+ public void visitFieldAttribute(final Attribute attr) {
+ visitAttribute(attr);
+ }
+
+ @Override
+ public void visitFieldEnd() {
+ }
+
+ // ------------------------------------------------------------------------
+ // Methods
+ // ------------------------------------------------------------------------
+
+ @Override
+ public Textifier visitAnnotationDefault() {
+ text.add(tab2 + "default=");
+ Textifier t = createTextifier();
+ text.add(t.getText());
+ text.add("\n");
+ return t;
+ }
+
+ @Override
+ public Textifier visitMethodAnnotation(
+ final String desc,
+ final boolean visible)
+ {
+ return visitAnnotation(desc, visible);
+ }
+
+ @Override
+ public Textifier visitParameterAnnotation(
+ final int parameter,
+ final String desc,
+ final boolean visible)
+ {
+ buf.setLength(0);
+ buf.append(tab2).append('@');
+ appendDescriptor(FIELD_DESCRIPTOR, desc);
+ buf.append('(');
+ text.add(buf.toString());
+ Textifier t = createTextifier();
+ text.add(t.getText());
+ text.add(visible ? ") // parameter " : ") // invisible, parameter ");
+ text.add(new Integer(parameter));
+ text.add("\n");
+ return t;
+ }
+
+ @Override
+ public void visitMethodAttribute(final Attribute attr) {
+ buf.setLength(0);
+ buf.append(tab).append("ATTRIBUTE ");
+ appendDescriptor(-1, attr.type);
+
+ if (attr instanceof Textifiable) {
+ ((Textifiable) attr).textify(buf, labelNames);
+ } else {
+ buf.append(" : unknown\n");
+ }
+
+ text.add(buf.toString());
+ }
+
+ @Override
+ public void visitCode() {
+ }
+
+ @Override
+ public void visitFrame(
+ final int type,
+ final int nLocal,
+ final Object[] local,
+ final int nStack,
+ final Object[] stack)
+ {
+ buf.setLength(0);
+ buf.append(ltab);
+ buf.append("FRAME ");
+ switch (type) {
+ case Opcodes.F_NEW:
+ case Opcodes.F_FULL:
+ buf.append("FULL [");
+ appendFrameTypes(nLocal, local);
+ buf.append("] [");
+ appendFrameTypes(nStack, stack);
+ buf.append(']');
+ break;
+ case Opcodes.F_APPEND:
+ buf.append("APPEND [");
+ appendFrameTypes(nLocal, local);
+ buf.append(']');
+ break;
+ case Opcodes.F_CHOP:
+ buf.append("CHOP ").append(nLocal);
+ break;
+ case Opcodes.F_SAME:
+ buf.append("SAME");
+ break;
+ case Opcodes.F_SAME1:
+ buf.append("SAME1 ");
+ appendFrameTypes(1, stack);
+ break;
+ }
+ buf.append('\n');
+ text.add(buf.toString());
+ }
+
+ @Override
+ public void visitInsn(final int opcode) {
+ buf.setLength(0);
+ buf.append(tab2).append(OPCODES[opcode]).append('\n');
+ text.add(buf.toString());
+ }
+
+ @Override
+ public void visitIntInsn(final int opcode, final int operand) {
+ buf.setLength(0);
+ buf.append(tab2)
+ .append(OPCODES[opcode])
+ .append(' ')
+ .append(opcode == Opcodes.NEWARRAY
+ ? TYPES[operand]
+ : Integer.toString(operand))
+ .append('\n');
+ text.add(buf.toString());
+ }
+
+ @Override
+ public void visitVarInsn(final int opcode, final int var) {
+ buf.setLength(0);
+ buf.append(tab2)
+ .append(OPCODES[opcode])
+ .append(' ')
+ .append(var)
+ .append('\n');
+ text.add(buf.toString());
+ }
+
+ @Override
+ public void visitTypeInsn(final int opcode, final String type) {
+ buf.setLength(0);
+ buf.append(tab2).append(OPCODES[opcode]).append(' ');
+ appendDescriptor(INTERNAL_NAME, type);
+ buf.append('\n');
+ text.add(buf.toString());
+ }
+
+ @Override
+ public void visitFieldInsn(
+ final int opcode,
+ final String owner,
+ final String name,
+ final String desc)
+ {
+ buf.setLength(0);
+ buf.append(tab2).append(OPCODES[opcode]).append(' ');
+ appendDescriptor(INTERNAL_NAME, owner);
+ buf.append('.').append(name).append(" : ");
+ appendDescriptor(FIELD_DESCRIPTOR, desc);
+ buf.append('\n');
+ text.add(buf.toString());
+ }
+
+ @Override
+ public void visitMethodInsn(
+ final int opcode,
+ final String owner,
+ final String name,
+ final String desc)
+ {
+ buf.setLength(0);
+ buf.append(tab2).append(OPCODES[opcode]).append(' ');
+ appendDescriptor(INTERNAL_NAME, owner);
+ buf.append('.').append(name).append(' ');
+ appendDescriptor(METHOD_DESCRIPTOR, desc);
+ buf.append('\n');
+ text.add(buf.toString());
+ }
+
+ @Override
+ public void visitInvokeDynamicInsn(
+ String name,
+ String desc,
+ Handle bsm,
+ Object... bsmArgs)
+ {
+ buf.setLength(0);
+ buf.append(tab2).append("INVOKEDYNAMIC").append(' ');
+ buf.append(name);
+ appendDescriptor(METHOD_DESCRIPTOR, desc);
+ buf.append(" [");
+ appendHandle(bsm);
+ buf.append(tab3).append("// arguments:");
+ if(bsmArgs.length == 0) {
+ buf.append(" none");
+ } else {
+ buf.append('\n').append(tab3);
+ for(int i = 0; i < bsmArgs.length; i++) {
+ Object cst = bsmArgs[i];
+ if (cst instanceof String) {
+ Printer.appendString(buf, (String) cst);
+ } else if (cst instanceof Type) {
+ buf.append(((Type) cst).getDescriptor()).append(".class");
+ } else if (cst instanceof Handle) {
+ appendHandle((Handle) cst);
+ } else {
+ buf.append(cst);
+ }
+ buf.append(", ");
+ }
+ buf.setLength(buf.length() - 2);
+ }
+ buf.append('\n');
+ buf.append(tab2).append("]\n");
+ text.add(buf.toString());
+ }
+
+ @Override
+ public void visitJumpInsn(final int opcode, final Label label) {
+ buf.setLength(0);
+ buf.append(tab2).append(OPCODES[opcode]).append(' ');
+ appendLabel(label);
+ buf.append('\n');
+ text.add(buf.toString());
+ }
+
+ @Override
+ public void visitLabel(final Label label) {
+ buf.setLength(0);
+ buf.append(ltab);
+ appendLabel(label);
+ buf.append('\n');
+ text.add(buf.toString());
+ }
+
+ @Override
+ public void visitLdcInsn(final Object cst) {
+ buf.setLength(0);
+ buf.append(tab2).append("LDC ");
+ if (cst instanceof String) {
+ Printer.appendString(buf, (String) cst);
+ } else if (cst instanceof Type) {
+ buf.append(((Type) cst).getDescriptor()).append(".class");
+ } else {
+ buf.append(cst);
+ }
+ buf.append('\n');
+ text.add(buf.toString());
+ }
+
+ @Override
+ public void visitIincInsn(final int var, final int increment) {
+ buf.setLength(0);
+ buf.append(tab2)
+ .append("IINC ")
+ .append(var)
+ .append(' ')
+ .append(increment)
+ .append('\n');
+ text.add(buf.toString());
+ }
+
+ @Override
+ public void visitTableSwitchInsn(
+ final int min,
+ final int max,
+ final Label dflt,
+ final Label... labels)
+ {
+ buf.setLength(0);
+ buf.append(tab2).append("TABLESWITCH\n");
+ for (int i = 0; i < labels.length; ++i) {
+ buf.append(tab3).append(min + i).append(": ");
+ appendLabel(labels[i]);
+ buf.append('\n');
+ }
+ buf.append(tab3).append("default: ");
+ appendLabel(dflt);
+ buf.append('\n');
+ text.add(buf.toString());
+ }
+
+ @Override
+ public void visitLookupSwitchInsn(
+ final Label dflt,
+ final int[] keys,
+ final Label[] labels)
+ {
+ buf.setLength(0);
+ buf.append(tab2).append("LOOKUPSWITCH\n");
+ for (int i = 0; i < labels.length; ++i) {
+ buf.append(tab3).append(keys[i]).append(": ");
+ appendLabel(labels[i]);
+ buf.append('\n');
+ }
+ buf.append(tab3).append("default: ");
+ appendLabel(dflt);
+ buf.append('\n');
+ text.add(buf.toString());
+ }
+
+ @Override
+ public void visitMultiANewArrayInsn(final String desc, final int dims) {
+ buf.setLength(0);
+ buf.append(tab2).append("MULTIANEWARRAY ");
+ appendDescriptor(FIELD_DESCRIPTOR, desc);
+ buf.append(' ').append(dims).append('\n');
+ text.add(buf.toString());
+ }
+
+ @Override
+ public void visitTryCatchBlock(
+ final Label start,
+ final Label end,
+ final Label handler,
+ final String type)
+ {
+ buf.setLength(0);
+ buf.append(tab2).append("TRYCATCHBLOCK ");
+ appendLabel(start);
+ buf.append(' ');
+ appendLabel(end);
+ buf.append(' ');
+ appendLabel(handler);
+ buf.append(' ');
+ appendDescriptor(INTERNAL_NAME, type);
+ buf.append('\n');
+ text.add(buf.toString());
+ }
+
+ @Override
+ public void visitLocalVariable(
+ final String name,
+ final String desc,
+ final String signature,
+ final Label start,
+ final Label end,
+ final int index)
+ {
+ buf.setLength(0);
+ buf.append(tab2).append("LOCALVARIABLE ").append(name).append(' ');
+ appendDescriptor(FIELD_DESCRIPTOR, desc);
+ buf.append(' ');
+ appendLabel(start);
+ buf.append(' ');
+ appendLabel(end);
+ buf.append(' ').append(index).append('\n');
+
+ if (signature != null) {
+ buf.append(tab2);
+ appendDescriptor(FIELD_SIGNATURE, signature);
+
+ TraceSignatureVisitor sv = new TraceSignatureVisitor(0);
+ SignatureReader r = new SignatureReader(signature);
+ r.acceptType(sv);
+ buf.append(tab2)
+ .append("// declaration: ")
+ .append(sv.getDeclaration())
+ .append('\n');
+ }
+ text.add(buf.toString());
+ }
+
+ @Override
+ public void visitLineNumber(final int line, final Label start) {
+ buf.setLength(0);
+ buf.append(tab2).append("LINENUMBER ").append(line).append(' ');
+ appendLabel(start);
+ buf.append('\n');
+ text.add(buf.toString());
+ }
+
+ @Override
+ public void visitMaxs(final int maxStack, final int maxLocals) {
+ buf.setLength(0);
+ buf.append(tab2).append("MAXSTACK = ").append(maxStack).append('\n');
+ text.add(buf.toString());
+
+ buf.setLength(0);
+ buf.append(tab2).append("MAXLOCALS = ").append(maxLocals).append('\n');
+ text.add(buf.toString());
+ }
+
+ @Override
+ public void visitMethodEnd() {
+ }
+
+ // ------------------------------------------------------------------------
+ // Common methods
+ // ------------------------------------------------------------------------
+
+ /**
+ * Prints a disassembled view of the given annotation.
+ *
+ * @param desc the class descriptor of the annotation class.
+ * @param visible <tt>true</tt> if the annotation is visible at runtime.
+ * @return a visitor to visit the annotation values.
+ */
+ public Textifier visitAnnotation(
+ final String desc,
+ final boolean visible)
+ {
+ buf.setLength(0);
+ buf.append(tab).append('@');
+ appendDescriptor(FIELD_DESCRIPTOR, desc);
+ buf.append('(');
+ text.add(buf.toString());
+ Textifier t = createTextifier();
+ text.add(t.getText());
+ text.add(visible ? ")\n" : ") // invisible\n");
+ return t;
+ }
+
+ /**
+ * Prints a disassembled view of the given attribute.
+ *
+ * @param attr an attribute.
+ */
+ public void visitAttribute(final Attribute attr) {
+ buf.setLength(0);
+ buf.append(tab).append("ATTRIBUTE ");
+ appendDescriptor(-1, attr.type);
+
+ if (attr instanceof Textifiable) {
+ ((Textifiable) attr).textify(buf, null);
+ } else {
+ buf.append(" : unknown\n");
+ }
+
+ text.add(buf.toString());
+ }
+
+ // ------------------------------------------------------------------------
+ // Utility methods
+ // ------------------------------------------------------------------------
+
+ /**
+ * Creates a new TraceVisitor instance.
+ *
+ * @return a new TraceVisitor.
+ */
+ protected Textifier createTextifier() {
+ return new Textifier();
+ }
+
+ /**
+ * Appends an internal name, a type descriptor or a type signature to
+ * {@link #buf buf}.
+ *
+ * @param type indicates if desc is an internal name, a field descriptor, a
+ * method descriptor, a class signature, ...
+ * @param desc an internal name, type descriptor, or type signature. May be
+ * <tt>null</tt>.
+ */
+ protected void appendDescriptor(final int type, final String desc) {
+ if (type == CLASS_SIGNATURE || type == FIELD_SIGNATURE
+ || type == METHOD_SIGNATURE)
+ {
+ if (desc != null) {
+ buf.append("// signature ").append(desc).append('\n');
+ }
+ } else {
+ buf.append(desc);
+ }
+ }
+
+ /**
+ * Appends the name of the given label to {@link #buf buf}. Creates a new
+ * label name if the given label does not yet have one.
+ *
+ * @param l a label.
+ */
+ protected void appendLabel(final Label l) {
+ if (labelNames == null) {
+ labelNames = new HashMap<Label, String>();
+ }
+ String name = labelNames.get(l);
+ if (name == null) {
+ name = "L" + labelNames.size();
+ labelNames.put(l, name);
+ }
+ buf.append(name);
+ }
+
+ /**
+ * Appends the information about the given handle to {@link #buf buf}.
+ *
+ * @param h a handle, non null.
+ */
+ protected void appendHandle(final Handle h) {
+ buf.append('\n').append(tab3);
+ int tag = h.getTag();
+ buf.append("// handle kind 0x").append(Integer.toHexString(tag)).append(" : ");
+ switch (tag) {
+ case Opcodes.H_GETFIELD:
+ buf.append("GETFIELD");
+ break;
+ case Opcodes.H_GETSTATIC:
+ buf.append("GETSTATIC");
+ break;
+ case Opcodes.H_PUTFIELD:
+ buf.append("PUTFIELD");
+ break;
+ case Opcodes.H_PUTSTATIC:
+ buf.append("PUTSTATIC");
+ break;
+ case Opcodes.H_INVOKEINTERFACE:
+ buf.append("INVOKEINTERFACE");
+ break;
+ case Opcodes.H_INVOKESPECIAL:
+ buf.append("INVOKESPECIAL");
+ break;
+ case Opcodes.H_INVOKESTATIC:
+ buf.append("INVOKESTATIC");
+ break;
+ case Opcodes.H_INVOKEVIRTUAL:
+ buf.append("INVOKEVIRTUAL");
+ break;
+ case Opcodes.H_NEWINVOKESPECIAL:
+ buf.append("NEWINVOKESPECIAL");
+ break;
+ }
+ buf.append('\n');
+ buf.append(tab3);
+ appendDescriptor(INTERNAL_NAME, h.getOwner());
+ buf.append('.');
+ buf.append(h.getName());
+ buf.append('(');
+ appendDescriptor(HANDLE_DESCRIPTOR, h.getDesc());
+ buf.append(')').append('\n');
+ }
+
+ /**
+ * Appends a string representation of the given access modifiers to {@link
+ * #buf buf}.
+ *
+ * @param access some access modifiers.
+ */
+ private void appendAccess(final int access) {
+ if ((access & Opcodes.ACC_PUBLIC) != 0) {
+ buf.append("public ");
+ }
+ if ((access & Opcodes.ACC_PRIVATE) != 0) {
+ buf.append("private ");
+ }
+ if ((access & Opcodes.ACC_PROTECTED) != 0) {
+ buf.append("protected ");
+ }
+ if ((access & Opcodes.ACC_FINAL) != 0) {
+ buf.append("final ");
+ }
+ if ((access & Opcodes.ACC_STATIC) != 0) {
+ buf.append("static ");
+ }
+ if ((access & Opcodes.ACC_SYNCHRONIZED) != 0) {
+ buf.append("synchronized ");
+ }
+ if ((access & Opcodes.ACC_VOLATILE) != 0) {
+ buf.append("volatile ");
+ }
+ if ((access & Opcodes.ACC_TRANSIENT) != 0) {
+ buf.append("transient ");
+ }
+ if ((access & Opcodes.ACC_ABSTRACT) != 0) {
+ buf.append("abstract ");
+ }
+ if ((access & Opcodes.ACC_STRICT) != 0) {
+ buf.append("strictfp ");
+ }
+ if ((access & Opcodes.ACC_ENUM) != 0) {
+ buf.append("enum ");
+ }
+ }
+
+ private void appendComa(final int i) {
+ if (i != 0) {
+ buf.append(", ");
+ }
+ }
+
+ private void appendFrameTypes(final int n, final Object[] o) {
+ for (int i = 0; i < n; ++i) {
+ if (i > 0) {
+ buf.append(' ');
+ }
+ if (o[i] instanceof String) {
+ String desc = (String) o[i];
+ if (desc.startsWith("[")) {
+ appendDescriptor(FIELD_DESCRIPTOR, desc);
+ } else {
+ appendDescriptor(INTERNAL_NAME, desc);
+ }
+ } else if (o[i] instanceof Integer) {
+ switch (((Integer) o[i]).intValue()) {
+ case 0:
+ appendDescriptor(FIELD_DESCRIPTOR, "T");
+ break;
+ case 1:
+ appendDescriptor(FIELD_DESCRIPTOR, "I");
+ break;
+ case 2:
+ appendDescriptor(FIELD_DESCRIPTOR, "F");
+ break;
+ case 3:
+ appendDescriptor(FIELD_DESCRIPTOR, "D");
+ break;
+ case 4:
+ appendDescriptor(FIELD_DESCRIPTOR, "J");
+ break;
+ case 5:
+ appendDescriptor(FIELD_DESCRIPTOR, "N");
+ break;
+ case 6:
+ appendDescriptor(FIELD_DESCRIPTOR, "U");
+ break;
+ }
+ } else {
+ appendLabel((Label) o[i]);
+ }
+ }
+ }
+}
diff --git a/src/asm/scala/tools/asm/util/TraceAnnotationVisitor.java b/src/asm/scala/tools/asm/util/TraceAnnotationVisitor.java
new file mode 100644
index 0000000..f112609
--- /dev/null
+++ b/src/asm/scala/tools/asm/util/TraceAnnotationVisitor.java
@@ -0,0 +1,96 @@
+/***
+ * ASM: a very small and fast Java bytecode manipulation framework
+ * Copyright (c) 2000-2011 INRIA, France Telecom
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ * 3. Neither the name of the copyright holders nor the names of its
+ * contributors may be used to endorse or promote products derived from
+ * this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+ * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
+ * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+ * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+ * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
+ * THE POSSIBILITY OF SUCH DAMAGE.
+ */
+package scala.tools.asm.util;
+
+import scala.tools.asm.AnnotationVisitor;
+import scala.tools.asm.Opcodes;
+
+/**
+ * An {@link AnnotationVisitor} that prints the annotations it visits with a
+ * {@link Printer}.
+ *
+ * @author Eric Bruneton
+ */
+public final class TraceAnnotationVisitor extends AnnotationVisitor {
+
+ private final Printer p;
+
+ public TraceAnnotationVisitor(final Printer p) {
+ this(null, p);
+ }
+
+ public TraceAnnotationVisitor(final AnnotationVisitor av, final Printer p) {
+ super(Opcodes.ASM4, av);
+ this.p = p;
+ }
+
+ @Override
+ public void visit(final String name, final Object value) {
+ p.visit(name, value);
+ super.visit(name, value);
+ }
+
+ @Override
+ public void visitEnum(
+ final String name,
+ final String desc,
+ final String value)
+ {
+ p.visitEnum(name, desc, value);
+ super.visitEnum(name, desc, value);
+ }
+
+ @Override
+ public AnnotationVisitor visitAnnotation(
+ final String name,
+ final String desc)
+ {
+ Printer p = this.p.visitAnnotation(name, desc);
+ AnnotationVisitor av = this.av == null
+ ? null
+ : this.av.visitAnnotation(name, desc);
+ return new TraceAnnotationVisitor(av, p);
+ }
+
+ @Override
+ public AnnotationVisitor visitArray(final String name) {
+ Printer p = this.p.visitArray(name);
+ AnnotationVisitor av = this.av == null
+ ? null
+ : this.av.visitArray(name);
+ return new TraceAnnotationVisitor(av, p);
+ }
+
+ @Override
+ public void visitEnd() {
+ p.visitAnnotationEnd();
+ super.visitEnd();
+ }
+}
diff --git a/src/asm/scala/tools/asm/util/TraceClassVisitor.java b/src/asm/scala/tools/asm/util/TraceClassVisitor.java
new file mode 100644
index 0000000..bb830b7
--- /dev/null
+++ b/src/asm/scala/tools/asm/util/TraceClassVisitor.java
@@ -0,0 +1,232 @@
+/***
+ * ASM: a very small and fast Java bytecode manipulation framework
+ * Copyright (c) 2000-2011 INRIA, France Telecom
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ * 3. Neither the name of the copyright holders nor the names of its
+ * contributors may be used to endorse or promote products derived from
+ * this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+ * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
+ * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+ * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+ * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
+ * THE POSSIBILITY OF SUCH DAMAGE.
+ */
+package scala.tools.asm.util;
+
+import java.io.PrintWriter;
+
+import scala.tools.asm.AnnotationVisitor;
+import scala.tools.asm.Attribute;
+import scala.tools.asm.ClassVisitor;
+import scala.tools.asm.FieldVisitor;
+import scala.tools.asm.MethodVisitor;
+import scala.tools.asm.Opcodes;
+
+/**
+ * A {@link ClassVisitor} that prints the classes it visits with a
+ * {@link Printer}. This class visitor can be used in the middle of a class
+ * visitor chain to trace the class that is visited at a given point in this
+ * chain. This may be useful for debugging purposes. <p> The trace printed when
+ * visiting the <tt>Hello</tt> class is the following: <p> <blockquote>
+ *
+ * <pre> // class version 49.0 (49) // access flags 0x21 public class Hello {
+ *
+ * // compiled from: Hello.java
+ *
+ * // access flags 0x1 public <init> ()V ALOAD 0 INVOKESPECIAL
+ * java/lang/Object <init> ()V RETURN MAXSTACK = 1 MAXLOCALS = 1
+ *
+ * // access flags 0x9 public static main ([Ljava/lang/String;)V GETSTATIC
+ * java/lang/System out Ljava/io/PrintStream; LDC "hello"
+ * INVOKEVIRTUAL java/io/PrintStream println (Ljava/lang/String;)V RETURN
+ * MAXSTACK = 2 MAXLOCALS = 1 } </pre>
+ *
+ * </blockquote> where <tt>Hello</tt> is defined by: <p> <blockquote>
+ *
+ * <pre> public class Hello {
+ *
+ * public static void main(String[] args) {
+ * System.out.println("hello"); } } </pre>
+ *
+ * </blockquote>
+ *
+ * @author Eric Bruneton
+ * @author Eugene Kuleshov
+ */
+public final class TraceClassVisitor extends ClassVisitor {
+
+ /**
+ * The print writer to be used to print the class. May be null.
+ */
+ private final PrintWriter pw;
+
+ /**
+ * The object that actually converts visit events into text.
+ */
+ public final Printer p;
+
+ /**
+ * Constructs a new {@link TraceClassVisitor}.
+ *
+ * @param pw the print writer to be used to print the class.
+ */
+ public TraceClassVisitor(final PrintWriter pw) {
+ this(null, pw);
+ }
+
+ /**
+ * Constructs a new {@link TraceClassVisitor}.
+ *
+ * @param cv the {@link ClassVisitor} to which this visitor delegates calls.
+ * May be <tt>null</tt>.
+ * @param pw the print writer to be used to print the class.
+ */
+ public TraceClassVisitor(final ClassVisitor cv, final PrintWriter pw) {
+ this(cv, new Textifier(), pw);
+ }
+
+ /**
+ * Constructs a new {@link TraceClassVisitor}.
+ *
+ * @param cv the {@link ClassVisitor} to which this visitor delegates calls.
+ * May be <tt>null</tt>.
+ * @param p the object that actually converts visit events into text.
+ * @param pw the print writer to be used to print the class. May be null if
+ * you simply want to use the result via
+ * {@link Printer#getText()}, instead of printing it.
+ */
+ public TraceClassVisitor(
+ final ClassVisitor cv,
+ final Printer p,
+ final PrintWriter pw)
+ {
+ super(Opcodes.ASM4, cv);
+ this.pw = pw;
+ this.p = p;
+ }
+
+ @Override
+ public void visit(
+ final int version,
+ final int access,
+ final String name,
+ final String signature,
+ final String superName,
+ final String[] interfaces)
+ {
+ p.visit(version, access, name, signature, superName, interfaces);
+ super.visit(version, access, name, signature, superName, interfaces);
+ }
+
+ @Override
+ public void visitSource(final String file, final String debug) {
+ p.visitSource(file, debug);
+ super.visitSource(file, debug);
+ }
+
+ @Override
+ public void visitOuterClass(
+ final String owner,
+ final String name,
+ final String desc)
+ {
+ p.visitOuterClass(owner, name, desc);
+ super.visitOuterClass(owner, name, desc);
+ }
+
+ @Override
+ public AnnotationVisitor visitAnnotation(
+ final String desc,
+ final boolean visible)
+ {
+ Printer p = this.p.visitClassAnnotation(desc, visible);
+ AnnotationVisitor av = cv == null ? null : cv.visitAnnotation(desc,
+ visible);
+ return new TraceAnnotationVisitor(av, p);
+ }
+
+ @Override
+ public void visitAttribute(final Attribute attr) {
+ p.visitClassAttribute(attr);
+ super.visitAttribute(attr);
+ }
+
+ @Override
+ public void visitInnerClass(
+ final String name,
+ final String outerName,
+ final String innerName,
+ final int access)
+ {
+ p.visitInnerClass(name, outerName, innerName, access);
+ super.visitInnerClass(name, outerName, innerName, access);
+ }
+
+ @Override
+ public FieldVisitor visitField(
+ final int access,
+ final String name,
+ final String desc,
+ final String signature,
+ final Object value)
+ {
+ Printer p = this.p.visitField(access,
+ name,
+ desc,
+ signature,
+ value);
+ FieldVisitor fv = cv == null ? null : cv.visitField(access,
+ name,
+ desc,
+ signature,
+ value);
+ return new TraceFieldVisitor(fv, p);
+ }
+
+ @Override
+ public MethodVisitor visitMethod(
+ final int access,
+ final String name,
+ final String desc,
+ final String signature,
+ final String[] exceptions)
+ {
+ Printer p = this.p.visitMethod(access,
+ name,
+ desc,
+ signature,
+ exceptions);
+ MethodVisitor mv = cv == null ? null : cv.visitMethod(access,
+ name,
+ desc,
+ signature,
+ exceptions);
+ return new TraceMethodVisitor(mv, p);
+ }
+
+ @Override
+ public void visitEnd() {
+ p.visitClassEnd();
+ if (pw != null) {
+ p.print(pw);
+ pw.flush();
+ }
+ super.visitEnd();
+ }
+}
diff --git a/src/asm/scala/tools/asm/util/TraceFieldVisitor.java b/src/asm/scala/tools/asm/util/TraceFieldVisitor.java
new file mode 100644
index 0000000..f537e83
--- /dev/null
+++ b/src/asm/scala/tools/asm/util/TraceFieldVisitor.java
@@ -0,0 +1,78 @@
+/***
+ * ASM: a very small and fast Java bytecode manipulation framework
+ * Copyright (c) 2000-2011 INRIA, France Telecom
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ * 3. Neither the name of the copyright holders nor the names of its
+ * contributors may be used to endorse or promote products derived from
+ * this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+ * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
+ * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+ * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+ * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
+ * THE POSSIBILITY OF SUCH DAMAGE.
+ */
+package scala.tools.asm.util;
+
+import scala.tools.asm.AnnotationVisitor;
+import scala.tools.asm.Attribute;
+import scala.tools.asm.FieldVisitor;
+import scala.tools.asm.Opcodes;
+
+/**
+ * A {@link FieldVisitor} that prints the fields it visits with a
+ * {@link Printer}.
+ *
+ * @author Eric Bruneton
+ */
+public final class TraceFieldVisitor extends FieldVisitor {
+
+ public final Printer p;
+
+ public TraceFieldVisitor(final Printer p) {
+ this(null, p);
+ }
+
+ public TraceFieldVisitor(final FieldVisitor fv, final Printer p) {
+ super(Opcodes.ASM4, fv);
+ this.p = p;
+ }
+
+ @Override
+ public AnnotationVisitor visitAnnotation(
+ final String desc,
+ final boolean visible)
+ {
+ Printer p = this.p.visitFieldAnnotation(desc, visible);
+ AnnotationVisitor av = fv == null ? null : fv.visitAnnotation(desc,
+ visible);
+ return new TraceAnnotationVisitor(av, p);
+ }
+
+ @Override
+ public void visitAttribute(final Attribute attr) {
+ p.visitFieldAttribute(attr);
+ super.visitAttribute(attr);
+ }
+
+ @Override
+ public void visitEnd() {
+ p.visitFieldEnd();
+ super.visitEnd();
+ }
+}
diff --git a/src/asm/scala/tools/asm/util/TraceMethodVisitor.java b/src/asm/scala/tools/asm/util/TraceMethodVisitor.java
new file mode 100644
index 0000000..9aabf20
--- /dev/null
+++ b/src/asm/scala/tools/asm/util/TraceMethodVisitor.java
@@ -0,0 +1,264 @@
+/***
+ * ASM: a very small and fast Java bytecode manipulation framework
+ * Copyright (c) 2000-2011 INRIA, France Telecom
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ * 3. Neither the name of the copyright holders nor the names of its
+ * contributors may be used to endorse or promote products derived from
+ * this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+ * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
+ * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+ * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+ * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
+ * THE POSSIBILITY OF SUCH DAMAGE.
+ */
+package scala.tools.asm.util;
+
+import scala.tools.asm.AnnotationVisitor;
+import scala.tools.asm.Attribute;
+import scala.tools.asm.Handle;
+import scala.tools.asm.Label;
+import scala.tools.asm.MethodVisitor;
+import scala.tools.asm.Opcodes;
+
+/**
+ * A {@link MethodVisitor} that prints the methods it visits with a
+ * {@link Printer}.
+ *
+ * @author Eric Bruneton
+ */
+public final class TraceMethodVisitor extends MethodVisitor {
+
+ public final Printer p;
+
+ public TraceMethodVisitor(final Printer p) {
+ this(null, p);
+ }
+
+ public TraceMethodVisitor(final MethodVisitor mv, final Printer p) {
+ super(Opcodes.ASM4, mv);
+ this.p = p;
+ }
+
+ @Override
+ public AnnotationVisitor visitAnnotation(
+ final String desc,
+ final boolean visible)
+ {
+ Printer p = this.p.visitMethodAnnotation(desc, visible);
+ AnnotationVisitor av = mv == null ? null : mv.visitAnnotation(desc,
+ visible);
+ return new TraceAnnotationVisitor(av, p);
+ }
+
+ @Override
+ public void visitAttribute(final Attribute attr) {
+ p.visitMethodAttribute(attr);
+ super.visitAttribute(attr);
+ }
+
+ @Override
+ public AnnotationVisitor visitAnnotationDefault() {
+ Printer p = this.p.visitAnnotationDefault();
+ AnnotationVisitor av = mv == null ? null : mv.visitAnnotationDefault();
+ return new TraceAnnotationVisitor(av, p);
+ }
+
+ @Override
+ public AnnotationVisitor visitParameterAnnotation(
+ final int parameter,
+ final String desc,
+ final boolean visible)
+ {
+ Printer p = this.p.visitParameterAnnotation(parameter,
+ desc,
+ visible);
+ AnnotationVisitor av = mv == null
+ ? null
+ : mv.visitParameterAnnotation(parameter, desc, visible);
+ return new TraceAnnotationVisitor(av, p);
+ }
+
+ @Override
+ public void visitCode() {
+ p.visitCode();
+ super.visitCode();
+ }
+
+ @Override
+ public void visitFrame(
+ final int type,
+ final int nLocal,
+ final Object[] local,
+ final int nStack,
+ final Object[] stack)
+ {
+ p.visitFrame(type, nLocal, local, nStack, stack);
+ super.visitFrame(type, nLocal, local, nStack, stack);
+ }
+
+ @Override
+ public void visitInsn(final int opcode) {
+ p.visitInsn(opcode);
+ super.visitInsn(opcode);
+ }
+
+ @Override
+ public void visitIntInsn(final int opcode, final int operand) {
+ p.visitIntInsn(opcode, operand);
+ super.visitIntInsn(opcode, operand);
+ }
+
+ @Override
+ public void visitVarInsn(final int opcode, final int var) {
+ p.visitVarInsn(opcode, var);
+ super.visitVarInsn(opcode, var);
+ }
+
+ @Override
+ public void visitTypeInsn(final int opcode, final String type) {
+ p.visitTypeInsn(opcode, type);
+ super.visitTypeInsn(opcode, type);
+ }
+
+ @Override
+ public void visitFieldInsn(
+ final int opcode,
+ final String owner,
+ final String name,
+ final String desc)
+ {
+ p.visitFieldInsn(opcode, owner, name, desc);
+ super.visitFieldInsn(opcode, owner, name, desc);
+ }
+
+ @Override
+ public void visitMethodInsn(
+ final int opcode,
+ final String owner,
+ final String name,
+ final String desc)
+ {
+ p.visitMethodInsn(opcode, owner, name, desc);
+ super.visitMethodInsn(opcode, owner, name, desc);
+ }
+
+ @Override
+ public void visitInvokeDynamicInsn(
+ String name,
+ String desc,
+ Handle bsm,
+ Object... bsmArgs)
+ {
+ p.visitInvokeDynamicInsn(name, desc, bsm, bsmArgs);
+ super.visitInvokeDynamicInsn(name, desc, bsm, bsmArgs);
+ }
+
+ @Override
+ public void visitJumpInsn(final int opcode, final Label label) {
+ p.visitJumpInsn(opcode, label);
+ super.visitJumpInsn(opcode, label);
+ }
+
+ @Override
+ public void visitLabel(final Label label) {
+ p.visitLabel(label);
+ super.visitLabel(label);
+ }
+
+ @Override
+ public void visitLdcInsn(final Object cst) {
+ p.visitLdcInsn(cst);
+ super.visitLdcInsn(cst);
+ }
+
+ @Override
+ public void visitIincInsn(final int var, final int increment) {
+ p.visitIincInsn(var, increment);
+ super.visitIincInsn(var, increment);
+ }
+
+ @Override
+ public void visitTableSwitchInsn(
+ final int min,
+ final int max,
+ final Label dflt,
+ final Label... labels)
+ {
+ p.visitTableSwitchInsn(min, max, dflt, labels);
+ super.visitTableSwitchInsn(min, max, dflt, labels);
+ }
+
+ @Override
+ public void visitLookupSwitchInsn(
+ final Label dflt,
+ final int[] keys,
+ final Label[] labels)
+ {
+ p.visitLookupSwitchInsn(dflt, keys, labels);
+ super.visitLookupSwitchInsn(dflt, keys, labels);
+ }
+
+ @Override
+ public void visitMultiANewArrayInsn(final String desc, final int dims) {
+ p.visitMultiANewArrayInsn(desc, dims);
+ super.visitMultiANewArrayInsn(desc, dims);
+ }
+
+ @Override
+ public void visitTryCatchBlock(
+ final Label start,
+ final Label end,
+ final Label handler,
+ final String type)
+ {
+ p.visitTryCatchBlock(start, end, handler, type);
+ super.visitTryCatchBlock(start, end, handler, type);
+ }
+
+ @Override
+ public void visitLocalVariable(
+ final String name,
+ final String desc,
+ final String signature,
+ final Label start,
+ final Label end,
+ final int index)
+ {
+ p.visitLocalVariable(name, desc, signature, start, end, index);
+ super.visitLocalVariable(name, desc, signature, start, end, index);
+ }
+
+ @Override
+ public void visitLineNumber(final int line, final Label start) {
+ p.visitLineNumber(line, start);
+ super.visitLineNumber(line, start);
+ }
+
+ @Override
+ public void visitMaxs(final int maxStack, final int maxLocals) {
+ p.visitMaxs(maxStack, maxLocals);
+ super.visitMaxs(maxStack, maxLocals);
+ }
+
+ @Override
+ public void visitEnd() {
+ p.visitMethodEnd();
+ super.visitEnd();
+ }
+}
diff --git a/src/asm/scala/tools/asm/util/TraceSignatureVisitor.java b/src/asm/scala/tools/asm/util/TraceSignatureVisitor.java
new file mode 100644
index 0000000..a37b759
--- /dev/null
+++ b/src/asm/scala/tools/asm/util/TraceSignatureVisitor.java
@@ -0,0 +1,318 @@
+/***
+ * ASM: a very small and fast Java bytecode manipulation framework
+ * Copyright (c) 2000-2011 INRIA, France Telecom
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ * 3. Neither the name of the copyright holders nor the names of its
+ * contributors may be used to endorse or promote products derived from
+ * this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+ * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
+ * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+ * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+ * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
+ * THE POSSIBILITY OF SUCH DAMAGE.
+ */
+package scala.tools.asm.util;
+
+import scala.tools.asm.Opcodes;
+import scala.tools.asm.signature.SignatureVisitor;
+
+/**
+ * A {@link SignatureVisitor} that prints a disassembled view of the signature
+ * it visits.
+ *
+ * @author Eugene Kuleshov
+ * @author Eric Bruneton
+ */
+public final class TraceSignatureVisitor extends SignatureVisitor {
+
+ private final StringBuffer declaration;
+
+ private boolean isInterface;
+
+ private boolean seenFormalParameter;
+
+ private boolean seenInterfaceBound;
+
+ private boolean seenParameter;
+
+ private boolean seenInterface;
+
+ private StringBuffer returnType;
+
+ private StringBuffer exceptions;
+
+ /**
+ * Stack used to keep track of class types that have arguments. Each element
+ * of this stack is a boolean encoded in one bit. The top of the stack is
+ * the lowest order bit. Pushing false = *2, pushing true = *2+1, popping =
+ * /2.
+ */
+ private int argumentStack;
+
+ /**
+ * Stack used to keep track of array class types. Each element of this stack
+ * is a boolean encoded in one bit. The top of the stack is the lowest order
+ * bit. Pushing false = *2, pushing true = *2+1, popping = /2.
+ */
+ private int arrayStack;
+
+ private String separator = "";
+
+ public TraceSignatureVisitor(final int access) {
+ super(Opcodes.ASM4);
+ isInterface = (access & Opcodes.ACC_INTERFACE) != 0;
+ this.declaration = new StringBuffer();
+ }
+
+ private TraceSignatureVisitor(final StringBuffer buf) {
+ super(Opcodes.ASM4);
+ this.declaration = buf;
+ }
+
+ @Override
+ public void visitFormalTypeParameter(final String name) {
+ declaration.append(seenFormalParameter ? ", " : "<").append(name);
+ seenFormalParameter = true;
+ seenInterfaceBound = false;
+ }
+
+ @Override
+ public SignatureVisitor visitClassBound() {
+ separator = " extends ";
+ startType();
+ return this;
+ }
+
+ @Override
+ public SignatureVisitor visitInterfaceBound() {
+ separator = seenInterfaceBound ? ", " : " extends ";
+ seenInterfaceBound = true;
+ startType();
+ return this;
+ }
+
+ @Override
+ public SignatureVisitor visitSuperclass() {
+ endFormals();
+ separator = " extends ";
+ startType();
+ return this;
+ }
+
+ @Override
+ public SignatureVisitor visitInterface() {
+ separator = seenInterface ? ", " : isInterface
+ ? " extends "
+ : " implements ";
+ seenInterface = true;
+ startType();
+ return this;
+ }
+
+ @Override
+ public SignatureVisitor visitParameterType() {
+ endFormals();
+ if (seenParameter) {
+ declaration.append(", ");
+ } else {
+ seenParameter = true;
+ declaration.append('(');
+ }
+ startType();
+ return this;
+ }
+
+ @Override
+ public SignatureVisitor visitReturnType() {
+ endFormals();
+ if (seenParameter) {
+ seenParameter = false;
+ } else {
+ declaration.append('(');
+ }
+ declaration.append(')');
+ returnType = new StringBuffer();
+ return new TraceSignatureVisitor(returnType);
+ }
+
+ @Override
+ public SignatureVisitor visitExceptionType() {
+ if (exceptions == null) {
+ exceptions = new StringBuffer();
+ } else {
+ exceptions.append(", ");
+ }
+ // startType();
+ return new TraceSignatureVisitor(exceptions);
+ }
+
+ @Override
+ public void visitBaseType(final char descriptor) {
+ switch (descriptor) {
+ case 'V':
+ declaration.append("void");
+ break;
+ case 'B':
+ declaration.append("byte");
+ break;
+ case 'J':
+ declaration.append("long");
+ break;
+ case 'Z':
+ declaration.append("boolean");
+ break;
+ case 'I':
+ declaration.append("int");
+ break;
+ case 'S':
+ declaration.append("short");
+ break;
+ case 'C':
+ declaration.append("char");
+ break;
+ case 'F':
+ declaration.append("float");
+ break;
+ // case 'D':
+ default:
+ declaration.append("double");
+ break;
+ }
+ endType();
+ }
+
+ @Override
+ public void visitTypeVariable(final String name) {
+ declaration.append(name);
+ endType();
+ }
+
+ @Override
+ public SignatureVisitor visitArrayType() {
+ startType();
+ arrayStack |= 1;
+ return this;
+ }
+
+ @Override
+ public void visitClassType(final String name) {
+ if ("java/lang/Object".equals(name)) {
+ // Map<java.lang.Object,java.util.List>
+ // or
+ // abstract public V get(Object key); (seen in Dictionary.class)
+ // should have Object
+ // but java.lang.String extends java.lang.Object is unnecessary
+ boolean needObjectClass = argumentStack % 2 != 0 || seenParameter;
+ if (needObjectClass) {
+ declaration.append(separator).append(name.replace('/', '.'));
+ }
+ } else {
+ declaration.append(separator).append(name.replace('/', '.'));
+ }
+ separator = "";
+ argumentStack *= 2;
+ }
+
+ @Override
+ public void visitInnerClassType(final String name) {
+ if (argumentStack % 2 != 0) {
+ declaration.append('>');
+ }
+ argumentStack /= 2;
+ declaration.append('.');
+ declaration.append(separator).append(name.replace('/', '.'));
+ separator = "";
+ argumentStack *= 2;
+ }
+
+ @Override
+ public void visitTypeArgument() {
+ if (argumentStack % 2 == 0) {
+ ++argumentStack;
+ declaration.append('<');
+ } else {
+ declaration.append(", ");
+ }
+ declaration.append('?');
+ }
+
+ @Override
+ public SignatureVisitor visitTypeArgument(final char tag) {
+ if (argumentStack % 2 == 0) {
+ ++argumentStack;
+ declaration.append('<');
+ } else {
+ declaration.append(", ");
+ }
+
+ if (tag == EXTENDS) {
+ declaration.append("? extends ");
+ } else if (tag == SUPER) {
+ declaration.append("? super ");
+ }
+
+ startType();
+ return this;
+ }
+
+ @Override
+ public void visitEnd() {
+ if (argumentStack % 2 != 0) {
+ declaration.append('>');
+ }
+ argumentStack /= 2;
+ endType();
+ }
+
+ public String getDeclaration() {
+ return declaration.toString();
+ }
+
+ public String getReturnType() {
+ return returnType == null ? null : returnType.toString();
+ }
+
+ public String getExceptions() {
+ return exceptions == null ? null : exceptions.toString();
+ }
+
+ // -----------------------------------------------
+
+ private void endFormals() {
+ if (seenFormalParameter) {
+ declaration.append('>');
+ seenFormalParameter = false;
+ }
+ }
+
+ private void startType() {
+ arrayStack *= 2;
+ }
+
+ private void endType() {
+ if (arrayStack % 2 == 0) {
+ arrayStack /= 2;
+ } else {
+ while (arrayStack % 2 != 0) {
+ arrayStack /= 2;
+ declaration.append("[]");
+ }
+ }
+ }
+}
diff --git a/src/attic/README b/src/attic/README
deleted file mode 100644
index 9fb600a..0000000
--- a/src/attic/README
+++ /dev/null
@@ -1,2 +0,0 @@
-This is a holding area for source files which aren't used in
-trunk anymore but which we're keeping available for a time.
\ No newline at end of file
diff --git a/src/attic/scala/tools/nsc/models/Models.scala b/src/attic/scala/tools/nsc/models/Models.scala
deleted file mode 100644
index a1344d8..0000000
--- a/src/attic/scala/tools/nsc/models/Models.scala
+++ /dev/null
@@ -1,419 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
- * @author Martin Odersky
- */
-
-package scala.tools.nsc
-package models
-
-import scala.tools.nsc.Global
-
-/** This abstract class ...
- *
- * @author Sean McDirmid
- * @version 1.0
- */
-abstract class Models {
- val global: Global
- import global._
-
- def acceptPrivate = true
-
- object Kinds extends Enumeration {
- type Kind = Value
- val CONSTRUCTOR = Value("Constructor")
- val OBJECT = Value("Object")
- val CLASS = Value("Class")
- val TRAIT = Value("Trait")
- val DEF = Value("Def")
- val VAL = Value("Val")
- val VAR = Value("Var")
- val ARG = Value("Arg")
- val TPARAM = Value("Type")
- }
- import Kinds._
-
- def KINDS = List(CLASS, TRAIT, OBJECT, CONSTRUCTOR, TPARAM, VAL, VAR, DEF)
-
- def labelFor(kind: Kind): String = kind.toString
-
- def stringsFor(mods: Modifiers) = {
- var modString: List[String] = Nil
- if (mods.isPrivate ) modString = "private" :: modString
- if (mods.isProtected) modString = "protected" :: modString
- if (mods.isOverride ) modString = "override" :: modString
- if (mods.isAbstract ) modString = "abstract" :: modString
- if (mods.isDeferred ) modString = "abstract" :: modString
- if (mods.isCase ) modString = "case" :: modString
- if (mods.isSealed ) modString = "sealed" :: modString
- if (mods.isFinal ) modString = "final" :: modString
- if (mods.isImplicit ) modString = "implicit" :: modString
- modString
- }
-
- def codeFor(kind: Kind): String = kind match {
- case CONSTRUCTOR => codeFor(DEF)
- case _ => labelFor(kind).toLowerCase()
- }
-
- def pluralFor(kind: Kind): String = kind match {
- case CLASS => "Classes"
- case _ => labelFor(kind) + "s"
- }
-
- def kindOf(tree: Tree) = {
- val term0 = tree.symbol;
- if (term0 != NoSymbol) {
- if (term0.isVariable) VAR
- else if (term0.isValueParameter) ARG
- else if (term0.isMethod) {
- if (term0.nameString.equals("this")) CONSTRUCTOR
- else DEF
- }
- else if (term0.isClass) {
- if (tree.asInstanceOf[MemberDef].mods.isTrait) TRAIT
- else CLASS
- }
- else if (term0.isModule) OBJECT
- else if (term0.isValue) VAL
- else if (term0.isTypeParameter) TPARAM
- else if (term0.isType) TPARAM
- else {
- // Console.err.println("UNRECOGNIZED SYMBOL: " + term0 + " " + name);
- null
- }
- } else {
- val ddef = tree.asInstanceOf[ValOrDefDef];
- if (ddef.mods.isMutable) VAR;
- else VAL;
- }
- }
-
- abstract class Model
-
- // def textFor(tp : AbsTypeDef) : String = tp.toString()
-
- /**
- * @param tree ...
- * @return ...
- */
- def textFor(tree: Tree): String = {
- var ret = ""
- if (tree.symbol != NoSymbol) tree.symbol.name.toString()
- if (ret.equals("<init>")) ret = "this"
- tree match {
- case cdef: ClassDef =>
- ret = ret + "[" +
- (for (tparam <- cdef.tparams) yield textFor(tparam)) + "]";
- cdef.mods
- case vdef: ValOrDefDef =>
- vdef match {
- case ddef: DefDef =>
- ret = ret + "[" +
- (for (tparam <- ddef.tparams) yield textFor(tparam)) + "]";
- for (vparams <- ddef.vparamss) {
- ret = ret + "(" +
- (for (vparam <- vparams) yield textFor(vparam)) + ")";
- }
- case _ =>
- }
- ret = ret + " : " + textFor(vdef.tpt)
-/* Martin to Sean: Please check whether this can be dropped or does it need to be adapted?
- case atd: AbsTypeDef =>
- ret = ret + "[" + (for (tparam <- atd.tparams) yield textFor(tparam)) + "]" +
- ((if(atd.hi ne null) " <: " + textFor(atd.hi) else "") +
- (if(atd.lo ne null) " >: " + textFor(atd.lo) else ""));
-*/
- case _ =>
- ret = ret + tree.toString()
- }
- ret
- }
-
- def mods1(tree: Tree) = tree match {
- case mdef: MemberDef => mdef.mods
- case _ => NoMods
- }
-
- abstract class HasTree(val parent: Composite) extends Model with Ordered[HasTree] {
- var tree : Tree = _
- def update(tree0: Tree): Boolean = {
- tree = tree0
- false
- }
- def replacedBy(tree0: Tree): Boolean = true
- def text: String = textFor(tree)
- var mods0 = NoMods
-
- def mods = if (mods0 != NoMods) mods0 else mods1(tree)
-
- override def toString(): String = tree.toString()
-
- def compare(that: HasTree): Int = {
- val idx = KINDS.indexOf(kind)
- val jdx = KINDS.indexOf(that.kind)
- if (idx != jdx) return idx - jdx
- val result = tree.symbol.nameString.compare(that.tree.symbol.nameString)
- if (result != 0) result
- else toString().compare(that.toString())
- }
- def compare [b >: HasTree <% Ordered[b]](that: b): Int = {
- if (that.isInstanceOf[HasTree])
- compare(that.asInstanceOf[HasTree])
- else -1
- }
-
- def kind = kindOf(tree)
-
- //override def add(from: Composite, model: HasTree): Unit = { parent.add(from, model) }
- //override def remove(from: Composite, model: HasTree): Unit = { parent.remove(from, model) }
- }
-
- class ImportMod(parent0: Composite) extends HasTree(parent0) {
- def treex = tree.asInstanceOf[Import]
-
- override def replacedBy(tree0: Tree): Boolean =
- if (super.replacedBy(tree0) && tree0.isInstanceOf[Import]) {
- val tree1 = tree0.asInstanceOf[Import]
- tree1.tpe == treex.tpe
- } else
- false
- }
-
- class PackageMod(parent0: Composite) extends HasTree(parent0) {
- def treex = tree.asInstanceOf[PackageDef]
- }
-
- trait Composite extends Model {
- import scala.collection.mutable._
-
- class Members extends HashSet[HasTree]
- // val members = new Members
- object members extends Members
-
- def isMember(tree: Tree): Boolean = tree.isInstanceOf[Import] // imports welcome anywhere.
-
- def member(tree: Tree, members: List[Tree]): Tree = tree
-
- def update0(members1: List[Tree]): Boolean = {
- // Console.err.println("update0 " + this + " " + members1)
- // Martin: This is rather ugly code. We should use pattern matching here!
- if (members1.length == 1 && members1.head.isInstanceOf[PackageDef])
- return update0(members1.head.asInstanceOf[PackageDef].stats)
- val marked = new HashSet[HasTree]
- var updated = false
- for (mmbr1 <- members1) if (mmbr1.isInstanceOf[PackageDef]) {
- Console.err.println("PACKAGE: " + mmbr1.symbol + " " + members1.length)
- } else if (isMember(mmbr1)) {
- val mmbr2 = member(mmbr1, members1)
- if (mmbr2 ne null) {
- var found = false
- for (mmbr <- members) if (!found && mmbr.replacedBy(mmbr2)) {
- //Console.err.println("REPLACE: " + mmbr + " with " + mmbr2)
- mmbr.mods0 = mods1(mmbr1)
- found = true
- updated = mmbr.update(mmbr2) || updated
- marked += mmbr
- }
- if (!found) {
- updated = true
- val add = modelFor(mmbr2, this)
- add.update(mmbr2)
- add.mods0 = mods1(mmbr1) &
- ~symtab.Flags.ACCESSOR & ~symtab.Flags.SYNTHETIC
- val sz = members.size
- members += (add)
- assert(members.size == sz + 1)
- marked += add
- }
- }
- // Console.err.println("update1 " + this + " " + members + " " + marked)
- }
- val sz = members.size
- members.intersect(marked)
- updated = updated || sz < members.size
- // check if anything was removed!
- updated
- }
- }
- abstract class MemberMod(parent0: Composite) extends HasTree(parent0) {
- def treex = tree.asInstanceOf[MemberDef]
-
- def name: Name = treex.name
-
- override def replacedBy(tree0: Tree): Boolean =
- if (super.replacedBy(tree0) && tree0.isInstanceOf[MemberDef]) {
- val tree1 = tree0.asInstanceOf[MemberDef]
- treex.toString().equals(tree1.toString())
- } else false
-
- override def update(tree0: Tree): Boolean = {
- val updated = (tree eq null) || (treex.mods != tree0.asInstanceOf[MemberDef].mods)
- super.update(tree0) || updated;
- }
- }
-
- abstract class MemberComposite(parent0: Composite) extends MemberMod(parent0) with Composite
-
- trait HasClassObjects extends Composite {
- override def isMember(tree: Tree): Boolean =
- super.isMember(tree) || tree.isInstanceOf[ImplDef]
- }
-
- abstract class ValOrDefMod(parent0: Composite) extends MemberComposite(parent0) with HasClassObjects {
- override def replacedBy(tree0: Tree): Boolean =
- super.replacedBy(tree0) && tree0.isInstanceOf[ValOrDefDef]
-
- override def update(tree0: Tree): Boolean = {
- val tree1 = tree0.asInstanceOf[ValOrDefDef]
- val updated = (tree eq null) || treex.tpe != tree1.tpe
- update0(flatten(tree1.rhs, (tree2: Tree) => isMember(tree2)))
- super.update(tree0) || updated
- }
- }
-
- class ValMod(parent0: Composite) extends ValOrDefMod(parent0) {
- def treez = tree.asInstanceOf[ValDef]
- override def replacedBy(tree0: Tree): Boolean =
- super.replacedBy(tree0) && tree0.isInstanceOf[ValDef]
- }
-
- class DefMod(parent0: Composite) extends ValOrDefMod(parent0) {
- def treez = tree.asInstanceOf[DefDef]
-
- override def replacedBy(tree0: Tree) : Boolean =
- if (super.replacedBy(tree0) && tree0.isInstanceOf[DefDef]) {
- val tree1 = tree0.asInstanceOf[DefDef]
- if (tree1.vparamss.length == treez.vparamss.length) {
- val tpz = for (vd <- treez.vparamss) yield for (xd <- vd) yield xd.tpe;
- val tp1 = for (vd <- tree1.vparamss) yield for (xd <- vd) yield xd.tpe;
- tpz == tp1
- } else false
- } else false
- }
-
- abstract class ImplMod(parent0: Composite)
- extends MemberComposite(parent0) with HasClassObjects {
- override def replacedBy(tree0: Tree): Boolean =
- super.replacedBy(tree0) && tree0.isInstanceOf[ImplDef]
- override def isMember(tree: Tree): Boolean = (super.isMember(tree) ||
- (tree.isInstanceOf[ValOrDefDef] &&
- (acceptPrivate || !tree.asInstanceOf[ValOrDefDef].mods.isPrivate)
- /* && !tree.asInstanceOf[ValOrDefDef].mods.isPrivate */
- /* && !tree.asInstanceOf[ValOrDefDef].mods.isAccessor */) ||
- treeInfo.isAliasTypeDef(tree))
-
- override def member(tree: Tree, members: List[Tree]): Tree = {
- val tree0 = if (tree.isInstanceOf[DefDef]) {
- val ddef = tree.asInstanceOf[DefDef]
- ddef.mods
- if (ddef.mods.isAccessor && (ddef.symbol ne null)) {
- val sym0 = ddef.symbol;
- if (sym0.isSetter) return null;
- assert(sym0.isGetter);
- val sym = sym0.accessed
- val ret = if (sym == NoSymbol) {
- val sym = analyzer.underlying(sym0)
- //val name = nme.getterToSetter(sym0.name)
- //val setter = sym0.owner.info.decl(name);
- val isVar = sym.isVariable;
- val mods = (ddef.mods |
- (if (isVar) symtab.Flags.MUTABLE else 0) | symtab.Flags.DEFERRED) &
- ~symtab.Flags.ACCESSOR & ~symtab.Flags.SYNTHETIC
- val tree =
- ValDef(mods, ddef.name, ddef.tpt, ddef.rhs).setPos(ddef.pos).setSymbol(sym);
- tree :: Nil;
- } else for (member <- members if member.symbol == sym) yield member
- if (ret.isEmpty) null
- else ret.head
- } else tree
- } else super.member(tree, members)
-
- def sym = tree0.symbol
- if ((tree0 eq null) || tree0.pos == NoPosition) null
- else if (!acceptPrivate &&
- tree0.isInstanceOf[ValOrDefDef] &&
- tree.asInstanceOf[ValOrDefDef].mods.isPrivate) null
- else tree0
- }
-
- def children(tree0: Tree): List[Tree] =
- tree0.asInstanceOf[ImplDef].impl.body
-
- override def update(tree0: Tree): Boolean = {
- var updated = update0(children(tree0))
- super.update(tree0) || updated
- }
- }
-
- class ClassMod(parent0: Composite) extends ImplMod(parent0) {
- def treez = tree.asInstanceOf[ClassDef]
- override def replacedBy(tree0: Tree): Boolean =
- super.replacedBy(tree0) && tree0.isInstanceOf[ClassDef]
- }
-
- class ObjectMod(parent0: Composite) extends ImplMod(parent0) {
- def treez = tree.asInstanceOf[ModuleDef]
- override def replacedBy(tree0: Tree): Boolean =
- super.replacedBy(tree0) && tree0.isInstanceOf[ModuleDef]
- }
- class TypeMod(parent0: Composite) extends MemberMod(parent0) {
- override def replacedBy(tree0 : Tree) : Boolean = (super.replacedBy(tree0) && tree0.isInstanceOf[TypeDef]);
- }
- def SourceMod(original: CompilationUnit) = new SourceMod(original)
-
- class SourceMod(val original: CompilationUnit) extends Composite with HasClassObjects {
- update(original)
- //var listener : Listener = null;
- def update(unit: CompilationUnit) = unit.body match {
- case pdef: PackageDef => try {
- update0(pdef.stats)
- } catch {
- case e: Error => members.clear; update0(pdef.stats)
- }
- case _ =>
- }
-
- override def isMember(tree: Tree): Boolean =
- super.isMember(tree) || tree.isInstanceOf[Import]
- }
-
- def flatten0(exprs: List[Tree], filter: (Tree) => Boolean): List[Tree] =
- for (expr <- exprs; t: Tree <- flatten(expr,filter)) yield t
-
- def flatten(expr: Tree, filter: (Tree) => Boolean): List[Tree] =
- if (filter(expr)) expr :: Nil; else expr match {
- case Block(stats, last) =>
- flatten0(stats, filter) ::: flatten(last, filter)
- case If(cond, thenp, elsep) =>
- flatten(cond,filter) ::: flatten(thenp,filter) ::: flatten(elsep,filter);
- case Assign(lhs, rhs) =>
- flatten(rhs, filter)
- case CaseDef(pat, guard, body) =>
- flatten(body, filter)
- case Return(expr0) =>
- flatten(expr0, filter)
- case Throw(expr0) =>
- flatten(expr0,filter)
- case Try(block, catches, finalizer) =>
- flatten(block, filter) ::: flatten(finalizer, filter) ::: flatten0(catches, filter)
- case Match(selector, cases) =>
- flatten(selector, filter) ::: flatten0(cases, filter)
- case Apply(fun, args) =>
- flatten(fun, filter) ::: flatten0(args, filter)
- case TypeApply(fun, args) =>
- flatten(fun, filter) ::: flatten0(args, filter)
- case _ =>
- Nil
- }
-
- def modelFor(tree: Tree, parent: Composite): HasTree = tree match {
- case _: ValDef => new ValMod(parent)
- case _: DefDef => new DefMod(parent)
- case _: ClassDef => new ClassMod(parent)
- case _: ModuleDef => new ObjectMod(parent)
- case _: TypeDef => new TypeMod(parent)
- case _: Import => new ImportMod(parent)
- }
-
-}
diff --git a/src/attic/scala/tools/nsc/models/SemanticTokens.scala b/src/attic/scala/tools/nsc/models/SemanticTokens.scala
deleted file mode 100644
index 6c2fe79..0000000
--- a/src/attic/scala/tools/nsc/models/SemanticTokens.scala
+++ /dev/null
@@ -1,702 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
- * @author Martin Odersky
- */
-
-package scala.tools.nsc
-package models
-
-import java.lang.Character.isJavaIdentifierPart
-import java.lang.Thread
-
-import scala.collection.mutable.{HashMap, HashSet}
-import scala.tools.nsc.Global
-import scala.tools.nsc.symtab.{Flags, Names}
-import scala.tools.nsc.symtab.Flags.DEFERRED
-import scala.tools.nsc.util.{BatchSourceFile, SourceFile}
-import scala.reflect.NameTransformer
-
-class SemanticTokens(val compiler: Global) {
- import compiler._
- object walker extends symtab.SymbolWalker {
- lazy val global : compiler.type = compiler
- }
-
- abstract class Kind {}
- object OBJECT extends Kind
- object CLASS extends Kind
- object TRAIT extends Kind
- object DEF extends Kind
- object VAL extends Kind
- object VAR extends Kind
- object ARG extends Kind
- object TPARAM extends Kind
-
- type AnyClass = Class[_]
-
- // static constants here
-
- abstract class Token {
- def length: Int
- def prev: HasNext
- def next: HasPrev
- }
-
- def eatKeyword(source: BatchSourceFile, pos: Int, keywords: List[String]) : Int = {
- if (keywords.isEmpty)
- pos
- else if (pos == source.length)
- -1
- else if (source.beginsWith(pos, " "))
- eatKeywords(source, pos + 1)
- else if (source.beginsWith(pos, keywords.head + " "))
- eatKeywords(source, pos + keywords.head.length + 1)
- else
- eatKeyword(source, pos, keywords.tail)
- }
-
- def eatKeywords(source: BatchSourceFile, pos: Int): Int = {
- val keywords =
- "package" :: "val" :: "var" :: "def" :: "class" :: "trait" :: "override" :: "case" ::
- "object" :: "sealed" :: "private" :: "protected" :: Nil
- if (pos != -1) eatKeyword(source, pos, keywords)
- else pos
- }
-
- trait HasNext extends Token {
- var next0: HasPrev = _
- def next = next0
- }
-
- trait HasPrev extends Token {
- var prev0: HasNext = _
- def prev = prev0
- }
-
- abstract class Actual extends HasNext with HasPrev {
- def convertToGap: (Int, Actual) = {
- val nextGap = next.isInstanceOf[Gap]
- val prevGap = prev.isInstanceOf[Gap]
-
- if (prevGap) {
- val ret = prev.length
- val gap = prev.asInstanceOf[Gap]
- gap.setLength(gap.length + length)
- if (nextGap) {
- gap.setLength(gap.length + next.length)
- gap.next0 = next.next
- next.next.prev0 = gap
- } else {
- gap.next0 = next
- next.prev0 = gap
- }
- (ret, gap)
- }
- else if (nextGap) {
- val gap = next.asInstanceOf[Gap]
- gap.setLength(gap.length + length)
- gap.prev0 = prev
- prev.next0 = gap
- (0, gap)
- }
- else {
- prev.next0 = next
- next.prev0 = prev
- val gap = new Gap(prev)
- gap.setLength(length)
- (0, gap)
- }
-
- }
- def insert(prev1: HasNext) {
- next0 = prev1.next
- prev0 = prev1
- prev0.next0 = this
- next0.prev0 = this
- }
-
- } // Actual
-
- final class Gap extends Actual {
- def this(prev1: HasNext) = {
- this()
- insert(prev1)
- }
- override def toString() = "gap-" + length
-
- var length0: Int = -1
- def length: Int = length0
- def setLength(length1: Int) = length0 = length1
-
- // already gap
- override def convertToGap: (Int, Actual) = (0, this)
- }
-
- def Process(unit: CompilationUnit) = new Process(unit)
- class Process(val unit: CompilationUnit) {
- private var doLog = true
- def source = unit.source
-
- def dbg(tree: Tree) = {
- def treePos: Position = if (tree ne null) tree.pos else NoPosition;
- (
- "TREE=" + tree +
- (if (tree ne null) (" CLASS=" + tree.getClass()) else "") +
- " SYM=" + tree.symbol +
- " POS=" +
- treePos.dbgString
- )}
-
- val symbols = new HashMap[Symbol, Info]
-
- class Info(val symbol: Symbol) {
- var defined : Def = _
- val uses = new HashSet[Use]
- symbols.update(symbol, this)
- }
-
- def info(symbol: Symbol): Info =
- if (symbols.contains(symbol)) symbols(symbol)
- else new Info(symbol)
-
- abstract class Semantic(val symbol: Symbol) extends Actual {
- val name = NameTransformer.decode(symbol.name.toString).trim()
- assert(symbol != NoSymbol)
- def myOuter = Process.this
-
- def tpe: Type = symbol.tpe
-
- def length = name.length()
- def info: Info = if (symbols.contains(symbol)) symbols(symbol) else new Info(symbol)
-
- def kind = {
- val term0 = symbol
- if (false) null
- else if (term0.isVariable) VAR
- else if (term0.isValueParameter) ARG
- else if (term0.isMethod) DEF
- else if (term0.isClass) CLASS
- else if (term0.isModule) OBJECT
- else if (term0.isValue) VAL
- else if (term0.isTypeParameter) TPARAM
- else if (term0.isType ) TPARAM
- else {
- // Console.err.println("UNRECOGNIZED SYMBOL: " + term0 + " " + name);
- null
- }
- }
- }
-
- class Def(symbol0: Symbol) extends Semantic(symbol0) {
- info.defined = this
- override def toString() = "def-" + name + "-" + symbol.getClass()
- }
- class Use(symbol0: Symbol, tpe0: Type) extends Semantic(symbol0) {
- info.uses += this
-
- override def tpe : Type = if (tpe0 ne null) tpe0 else super.tpe;
- override def toString() = "use-" + name + "-" + symbol.getClass();
- }
- val list = new TokenList
-
- //build(unit.body)
- val map = new scala.collection.mutable.LinkedHashMap[Int,Symbol]
- map.clear // populate the map.
- class visitor extends walker.Visitor {
- def contains(pos : Position) = map.contains(pos.point)
- def apply(pos : Position) = map(pos.point)
- def update(pos : Position, sym : Symbol) : Unit = if (pos.isDefined) {
- val offset = pos.point
- map(offset) = sym
- val isDef = pos.point == sym.pos.point
- list.put(offset, (if (isDef) new Def(sym) else new Use(sym, NoType)));
- }
- }
- walker.walk(unit.body, new visitor)(offset => unit.source.identifier(offset, compiler))
-
-
- // ok start building....
- def build[T <: Tree](trees: List[T]) {
- for (tree <- trees) build(tree)
- }
-
- def build(tree0: Tree): Unit = try {
- /* if (tree0.pos != NoPosition) */ tree0 match {
- case tree: ImplDef =>
- val pos = eatKeywords(unit.source.asInstanceOf[BatchSourceFile], tree.pos.point)
- if (pos == -1) {
-
- } else buildDef(tree.symbol, eatKeywords(unit.source.asInstanceOf[BatchSourceFile], tree.pos.point));
- tree match {
- case cdef: ClassDef => build(cdef.tparams)
- case _ => ;
- }
- build(tree.impl.parents)
- build(tree.impl.body)
- case tree: ValOrDefDef =>
- if (!tree.symbol.hasAccessorFlag || tree.symbol.isDeferred) {
- // MO: I added !tree.symbol.hasFlag(DEFERRED) in a refactoring where
- // getters now can be abstract whereas before they could not.
- // Adding the condition thus keeps the old behavior.
- // todo: review whether this is correct, or whether abstract getters should be included.
- {
- val pos : Int = if (tree.name.toString().equals("<init>")) -1 else
- eatKeywords(unit.source.asInstanceOf[BatchSourceFile], tree.pos.point);
- if (false) Console.err.println("VALDEF: tree=" + tree + " sym=" + tree.symbol + " pos0=" +
- tree.symbol.pos + " alias=" + tree.symbol.alias + " pos1=" +
- pos + " pos2=" + tree.pos.dbgString + " " + tree.symbol.isSynthetic);
-
- if (pos != -1 && !tree.isSynthetic)
- buildDef(tree.symbol, pos);
- }
-
- if (tree.isInstanceOf[DefDef]) {
- val ddef = tree.asInstanceOf[DefDef];
- build(ddef.tparams);
-
- for (l0 <- ddef.vparamss; arg <- l0) {
- val pos0 : Int = if (!unit.source.beginsWith(arg.pos.point, "val ")) arg.pos.point;
- else unit.source.skipWhitespace(arg.pos.point + ("val ").length());
- buildDef(arg.symbol, pos0);
- build(arg.tpt);
- }
- }
- //TPT=scala.Iterator[DocGenerator.this.compiler0.CompilationUnit] 260 class scala.tools.nsc.ast.Trees$TypeTree scala.Iterator[DocGenerator.this.compiler0.CompilationUnit] class scala.tools.nsc.symtab.Types$$anon$5
- if ((tree.tpt eq null) || (tree.tpt.tpe eq null)) {
- //Console.err.println("BAD: " + tree.tpt + " in " + tree);
- } else {
- //Console.err.println("TPT=" + tree.tpt + " " + tree.tpt.pos + " " + tree.tpt.getClass() + " " + tree.tpt.tpe + " " + tree.tpt.tpe.getClass() + " " + tree.tpt.tpe.getClass().getSuperclass());
- build(tree.tpt);
- }
- //Console.err.println("RHS: " + tree.rhs + " " + tree.rhs.getClass() + " " + tree.rhs.getClass().getSuperclass());
- build(tree.rhs);
- }
- case tree: PackageDef =>
- //Console.err.println("PACKAGE: " + tree.name);
- if (false) {
- val pos = eatKeywords(unit.source.asInstanceOf[BatchSourceFile], tree.pos.pointOrElse(-1))
- if (pos != -1)
- buildDef(tree.symbol, pos)
- }
- build(tree.stats)
- case tree: Function =>
- for (arg <- tree.vparams if arg.pos != NoPosition) {
- val name = arg.name.toString().trim()
- val pos: Int =
- if (unit.source.beginsWith(arg.pos.pointOrElse(-1), "val "))
- unit.source.skipWhitespace(arg.pos.pointOrElse(-1) + ("val ").length())
- else if (unit.source.asInstanceOf[BatchSourceFile].content(arg.pos.point) == ':') {
- var posx : Int = arg.pos.point
- while (unit.source.asInstanceOf[BatchSourceFile].content(posx - 1).isWhitespace) posx = posx - 1
- posx - name.length()
- } else arg.pos.point
- buildDef(arg.symbol, pos)
- build(arg.tpt)
- }
- build(tree.body)
- case tree : TypeTree =>
- val treex = tree
- val tree1 = if (tree.original ne null) tree.original else tree
- def classes(clazz: AnyClass): List[AnyClass] =
- if (clazz eq null) Nil
- else clazz :: classes(clazz.getSuperclass())
- if (tree.original eq null) {
- if (false) Console.err.println("NO_ORIGINAL: " + tree + " " + tree.tpe + " " + classes(tree.tpe.getClass()));
- }
- if (tree.tpe ne null) buildT(tree1, tree.tpe);
- def buildT( tree : Tree, tpe : Type) : Unit = if (tree.pos != NoPosition) tpe match {
- case tpe0 : TypeRef => tree match {
- case apt : AppliedTypeTree =>
- buildUse(tpe.typeSymbol, apt.tpt.pos.pointOrElse(-1), tpe0);
- //Console.err.println("APT: " + treex + " vs. " + treex.original);
- //Console.err.println("APT: " + treex.pos + " vs. " + treex.original.pos + " " + unit.source.dbg(treex.original.pos));
- //Console.err.println("APT: " + apt.tpt + " sym0=" + apt.tpt.symbol + " sym1=" + tpe0.sym + " apt.args=" + apt.args + " tpe0.args=" + tpe0.args);
-
- buildTs (apt.args, tpe0.args);
- case ident : Ident => buildUse(tpe0.sym, ident.pos.pointOrElse(-1), tpe0);
- case select : Select =>
- if (select.symbol == NoSymbol)
- try {
- // build(select);
- buildUse(tpe0.typeSymbol, selectPos(select), tpe0);
- //Console.err.println("QUALIFIER: " + select.qualifier + " " + unit.source.dbg(select.qualifier.pos) + " " + tpe0.prefix + " " + tpe0.prefix.getClass() + " " + tpe0.prefix.getClass().getSuperclass() +" " + tpe0.prefix.widen + " " + tpe0.prefix.toLongString);
- buildT(select.qualifier, tpe0.prefix);
- } catch {
- case e : Error =>
- Console.err.println("BUILD_SELECT: " + select + " @ " + tpe0 + " " + (select.pos).dbgString);
- throw e;
- }
- case tpt : TypeTree =>
- if (tpt.symbol ne null) {
- Console.err.println("SYM0 " + tpt.symbol + " " + (tpt.pos).dbgString);
- buildUse(tpt.symbol, tpt.pos.pointOrElse(-1), tpe0);
- } else if (tpe0.typeSymbol ne null) {
- //Console.err.println("TYPE_SYM1 " + tpe0.symbol + " " + unit.source.dbg(tpt.pos));
- buildUse(tpe0.typeSymbol, tpt.pos.pointOrElse(-1), tpe0);
- } else {
- Console.err.println("UNKNOWN TPT0: " + (tpt.pos).dbgString + " tpt=" + tpt + " " + tpt.symbol + " tpe0="+ tpe0 + " " + tpe0.typeSymbol + " tpe0.args=" + tpe0.args);
- }
- case sft : SelectFromTypeTree =>
- build(sft.qualifier); // XXX: broken
- if (false) Console.err.println("SFTT: " + sft + " sym=" + sft.symbol + " name=" + sft.name + " qual=" + sft.qualifier + " qual.sym=" +
- sft.qualifier.symbol +
- " qual.pos=" + (sft.qualifier.pos).dbgString + " symbol=" + sft.symbol + " type=" + tpe0 +
- " type.sym=" + tpe0.typeSymbol);
- case _ => Console.err.println("UNKNOWN TPT2: " + tree + " vs. " + tpe0 + " " + tree.getClass() + " " + (tree.pos).dbgString);
- }
- case tpe0 : MethodType => tree match {
- case tpt: TypeTree =>
- if (tpt.original ne null) buildT(tpt.original, tpe);
- else {
- Console.err.println("UNKNOWN TPT3: " + tree + " vs. " + tpe0 + " " + (tree.pos).dbgString);
- }
- case ident : Ident => buildT(ident, tpe0.resultType);
- case select : Select => buildT(select, tpe0.resultType);
- case _ => Console.err.println("UNKNOWN TPE: " + tree + " vs. " + tpe0 + " " + tree.getClass());
- }
- case tpe0 : RefinedType => tree match {
- case cpt : CompoundTypeTree =>
- buildTs(cpt.templ.parents, tpe0.parents);
-
- case _ : TypeTree =>
- // Console.err.println("UNKNOWN TPE13: " + dbg(tree) + " tpe0=" + tpe0 + " " + tpe0.parents);
- case _ =>
- if (false) Console.err.println("UNKNOWN TPE5: " + dbg(tree) + " tpe0=" + tpe0 + " " + tpe0.parents);
- }
- case tpe0 : ThisType => tree match {
- case stt : SingletonTypeTree => stt.ref match {
- case ths : This => build(ths);
-
- case _ => Console.err.println("UNKNOWN TPE11: " + tpe0 + " " + stt + " " + stt.ref + " " + stt.ref.getClass() + " " + (tree.pos).dbgString);
- }
- case tt : This =>
- case _ : Ident =>
- case _ : Select =>
- case tt : TypeTree =>
- if (false) Console.err.println("UNKNOWN TPE12: " + tpe0 + " " + tree + " " + tree.getClass() + " " + (tree.pos).dbgString);
- case _ =>
- if (false) Console.err.println("UNKNOWN TPE10: " + tpe0 + " " + tree + " " + tree.getClass() + " " + (tree.pos).dbgString);
- }
- case tpe0 : SingleType => tree match {
- case ident : Ident => buildUse(tpe0.sym, ident.pos.pointOrElse(-1), tpe0);
- case select : Select =>
- buildUse(tpe0.termSymbol, selectPos(select), tpe0);
- //Console.err.println("QUALIFIER-0: " + select.qualifier + " " + unit.source.dbg(select.qualifier.pos) + " " + tpe0.prefix + " " + tpe0.prefix.getClass() + " " + tpe0.prefix.getClass().getSuperclass() +" " + tpe0.prefix.widen + " " + tpe0.prefix.toLongString);
- buildT(select.qualifier, tpe0.prefix);
-
- case _ =>
- if (false) Console.err.println("UNKNOWN TPE8: " + tree + " " + (tree.pos).dbgString + " TPE=" + tpe0 + " PRE=" + tpe0.pre + " SYM=" + tpe0.sym);
-
- }
- case ctype : ConstantType =>
- case ErrorType =>
- case _ => {
- if (false) Console.err.println("UNKNOWN TPE4: " + tree + " " + tpe + " " + tpe.getClass() + " " + (tree.pos).dbgString);
- }
- };
- def buildTs(trees : List[Tree], types : List[Type]): Unit = if (!trees.isEmpty && !types.isEmpty) {
- buildT (trees.head, types.head);
- buildTs(trees.tail, types.tail);
- } else if (trees.isEmpty != types.isEmpty) {
- if (false && doLog) {
- Console.println("" + treex + " vs. " + treex.original);
- if (treex.original ne null)
- Console.println("" + treex.tpe + " vs. " + treex.original.tpe);
- logError("Tree vs. Type mismatch: " + trees + " " + types + " " + (tree.pos).dbgString, null);
- doLog = false;
- }
- };
- case tree: Bind =>
- buildDef(tree.symbol, tree.pos.pointOrElse(-1))
- build(tree.body)
- case tree: Ident =>
- buildUse(tree.symbol, tree.pos.pointOrElse(-1), tree.tpe)
- case tree: Select =>
- try {
- build(tree.qualifier)
- } catch {
- case e : Error => Console.err.println("SELECTQ: " + tree + " " + tree.qualifier + " " + (tree.qualifier.pos).dbgString); throw e;
- }
- try {
- if (tree.pos.isDefined && tree.pos.point >= unit.source.length) {
- if (false) Console.err.println("BAD_SELECT_QUALIFIER " + tree + " @ " + (tree.pos).dbgString);
-
- } else {
- //Console.err.println("SELECT-0: " + tree.symbol + " " + tree.pos.dbgString + " " + (tree.pos - selectPos(tree)));
- buildUse(tree.symbol, selectPos(tree), tree.tpe);
- }
- } catch {
- case e : Error => Console.err.println("SELECTU: " + tree + " " + tree.symbol + " " + tree.pos.dbgString); throw e;
- }
- case tree: TypeApply =>
- //Console.err.println("TYPE_APPLY: " + tree + " " + tree.pos.dbgString);
- if (!tree.args.isEmpty) {
- //Console.err.println("ARGS: " + unit.source.dbg(tree.args0.head.pos));
- }
- build(tree.fun)
- build(tree.args)
- case tree: Apply =>
-
- build(tree.fun)
- build(tree.args)
- case tree: GenericApply =>
-
- build(tree.fun)
- build(tree.args)
- case tree: Typed =>
- build(tree.expr)
- build(tree.tpt)
- case tree: Block =>
- if (false) {
- if (!tree.stats.isEmpty)
- Console.err.println("BLOCKS: " + tree.stats.head + " " + tree.stats.head.getClass());
- Console.err.println("BLOCKE: " + tree.expr + " " + tree.expr.getClass())
- }
- build(tree.stats)
- build(tree.expr)
- case tree: CaseDef =>
- build(tree.pat)
- build(tree.guard)
- build(tree.body)
- case tree : Assign => build(tree.lhs); build(tree.rhs);
- case tree : If => build(tree.cond); build(tree.thenp); build(tree.elsep);
- case tree : New =>
- //Console.err.println("NEW: " + tree.tpt + " " + tree.tpt.getClass());
- build(tree.tpt);
- case tree : Match => build(tree.selector); build(tree.cases);
- case tree : Return => build(tree.expr);
- case tree : LabelDef => build(tree.rhs);
- case tree : Throw => build(tree.expr);
- case tree : Try => build(tree.block); build(tree.catches); build(tree.finalizer);
- case tree : Alternative => build(tree.trees);
- case tree : This =>
-
- if (tree.symbol ne null) buildUse(tree.symbol, tree.pos.pointOrElse(-1), tree.tpe);
- //Thread.dumpStack();
- case tree : TypeDef =>
- //Console.err.println("ALIAS: " + tree);
- build(tree.rhs); build(tree.tparams); buildDef(tree.symbol, tree.pos.pointOrElse(-1));
- case tree : DocDef => build(tree.definition);
- case tree: Import => build(tree.expr)
- case tree: AppliedTypeTree => ;
- case tree: Annotated => ;
- case tree: SingletonTypeTree => ;
- case tree: Super => ;
- case tree: Literal => ;
- case EmptyTree => ;
- case _ => ;
- Console.err.println("BAIL: " + (tree0.pos) + " " + tree0 + " " + tree0.getClass());
- }
- } catch {
- case t: Throwable =>
- logError("Error occured at " + (tree0.pos), t)
- }
-
- def buildUse(term: Symbol, pos: Int, tpe: Type) = buildSym(term, pos, false, tpe)
- def buildDef(term: Symbol, pos: Int) = buildSym(term, pos, true, null)
-
- def buildSym(term: Symbol, pos: Int, isDef: Boolean, tpe: Type): Unit =
- if (term.hasAccessorFlag)
- buildSym(analyzer.underlying(term), pos, isDef, tpe)
- else if (pos == -1) {
- //Console.err.println("NOPOS: " + term)
- //Thread.dumpStack()
- }
- else if (term != NoSymbol) {
- val name = NameTransformer.decode(term.name.toString).trim()
- val buf = unit.source.asInstanceOf[BatchSourceFile].content
- val cs = name.toChars
- var idx = 0
- if (cs.length + pos > buf.length) return
- else while (idx < cs.length) {
- if (buf(pos + idx) != cs(idx)) {
- //Console.err.println("MISMATCH: " + name + "[" + idx + "] " + unit.source.dbg(pos));
- //Thread.dumpStack();
- return;
- }
- else idx = idx + 1;
- }
- if (cs.length + pos + 1 < buf.length) {
- if (isJavaIdentifierPart(buf(pos + cs.length))) {
- //Console.err.println("MISMATCH: " + name + "[last] " + unit.source.dbg(pos));
- return;
- }
- }
- try {
- list.put(pos, (if (isDef) new Def(term) else new Use(term, tpe)));
- } catch {
- case e : Error => e.printStackTrace();
- }
- }
-
- def selectPos(tree: Select): Int = if (tree.pos == NoPosition) -1 else {
- val buf = unit.source.asInstanceOf[BatchSourceFile].content
- if (tree.pos.point >= buf.length) {
- if (false) {
- Console.err.println("" + tree + "@" + tree.pos + " not in " +
- unit.source.file.name + "[" + buf.length + "]");
- Thread.dumpStack()
- abort()
- }
- return 0
- }
-
- val pos : Int =
- if (buf(tree.pos.point) != '.') tree.pos.point
- else {
- def f(x : Int) : Int = {
- if (buf(x).isWhitespace) f(x + 1)
- else x
- }
- f(tree.pos.point + 1)
- }
- pos
- };
-
- class TokenList {
- object begin extends HasNext {
- def prev = this
- def length = 0
- }
- object end extends HasPrev {
- def next = this
- def length = 0
- }
- // initialize
- begin.next0 = end
- end.prev0 = begin
-
- def tokenAt(offset: Int) = {
- cursor.seek(offset)
- if (cursor.token.isInstanceOf[Semantic]) cursor.token.asInstanceOf[Semantic]
- else null
- }
-
- def put(offset: Int, tok: Actual): Unit = tok match {
- case tok0: Semantic => put(offset, tok0)
- case gap: Gap =>
- }
-
- def put(offset: Int, tok: Semantic) {
- cursor.seek(offset);
- if (cursor.token == end) {
- assert(offset >= cursor.offset);
- if (offset > cursor.offset) {
- // add a gap.
- val gap = new Gap(end.prev);
- gap.setLength(offset - cursor.offset);
- cursor.offset = offset;
- }
- // append.
- tok.insert(end.prev);
- cursor.offset = cursor.offset + tok.length;
- } else if (!cursor.token.isInstanceOf[Gap]) {
- val sem = cursor.token.asInstanceOf[Semantic];
- if (sem.symbol == tok.symbol) return;
- if (sem.symbol != tok.symbol &&
- sem.symbol.getClass() == tok.symbol.getClass() &&
- sem.symbol.pos == tok.symbol.pos) return;
- } else {
- val gap = cursor.token.asInstanceOf[Gap];
- if (!(offset - cursor.offset + tok.length <= gap.length)) {
- Console.err.println("LIST =" + this);
- Console.err.println("OFFSET=" + offset + " " + tok + " " + tok.length);
- Console.err.println(" " + cursor.offset + " " + gap.length);
- gap.length0 = offset - cursor.offset + tok.length
- //abort();
- }
- if (offset == cursor.offset) {
- // replace or prepend
- tok.prev0 = gap.prev0;
- if (tok.length == gap.length) { // replace gap
- tok.next0 = gap.next0;
- } else {
- gap.setLength(gap.length - tok.length);
- tok.next0 = gap;
- }
- tok.next0.prev0 = tok;
- tok.prev0.next0 = tok;
- cursor.token = tok;
- } else {
- // append
- val diff = (cursor.offset + gap.length) - (offset + tok.length);
-
- gap.setLength(gap.length - tok.length - diff);
- tok.prev0 = gap;
- tok.next0 = gap.next;
- tok.next0.prev0 = tok;
- tok.prev0.next0 = tok;
- if (diff != 0) {
- val gap0 = new Gap(tok);
- gap0.setLength(diff);
- }
- }
- }
- }
-
- override def toString(): String = {
- var node = begin.next
- var str = ""
- while (node != end) {
- str = str + " " + node
- node = node.next
- }
- str
- }
-
- object cursor {
- var token: Token = end
- var offset: Int = 0
-
- def next(): Unit = if (token == end) end else {
- offset = offset + token.length
- token = token.next
- }
- def prev(): Unit = if (token.prev == begin) token else {
- offset = offset - token.prev.length
- token = token.prev
- }
- def seek(soffset: Int): Unit = if (soffset == 0) {
- token = begin.next
- offset = 0
- } else {
- assert(soffset > 0)
- while (offset > soffset) prev;
- while (offset + token.length <= soffset && token != end) {
- val len0 = offset;
- next;
- }
- }
- def convertToGap = if (token.isInstanceOf[Actual]) {
- val ret = token.asInstanceOf[Actual].convertToGap;
- offset = offset - ret._1;
- token = ret._2;
- }
- }
-
- // add or delete characters
- def adjust(offset: Int, /* where */
- length: Int, /* how many characters are modified */
- to : Int /* length of new string */) = {
- cursor.seek(offset)
- if (cursor.token != end) {
- cursor.convertToGap
- while (cursor.offset + cursor.token.length < offset + length && cursor.token.next != end) {
- val save = cursor.offset
- cursor.next
- cursor.convertToGap
- assert(cursor.offset == save)
- }
- if (length != to && cursor.token != end) {
- val diff = to - length;
- val gap = cursor.token.asInstanceOf[Gap];
- gap.setLength(gap.length + diff);
- };
- }
- }
-
- } // TokenList
-
- }
-}
-
diff --git a/src/attic/scala/tools/nsc/models/Signatures.scala b/src/attic/scala/tools/nsc/models/Signatures.scala
deleted file mode 100644
index 1d414b6..0000000
--- a/src/attic/scala/tools/nsc/models/Signatures.scala
+++ /dev/null
@@ -1,85 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
- * @author Martin Odersky
- */
-
-package scala.tools.nsc
-package models
-
-import scala.collection.mutable.{HashMap, HashSet}
-import scala.tools.nsc.{Global => Compiler}
-import scala.tools.nsc.symtab.{Flags, Names}
-import scala.tools.nsc.util.{ Position, SourceFile }
-import scala.reflect.NameTransformer
-
-/** This class ...
- *
- * @author Sean McDirmid
- * @version 1.0
- */
-class Signatures(val compiler: Compiler) {
- import compiler._
-
- class Signature(val name: String, val children: List[Signature]) {
- def asString: String = name + "[" + asString0(children) + "]"
- }
-
- def sort(sigs: List[Signature]) = sigs sortBy (_.name) reverse
-
- def asString0(sigs: List[Signature]): String =
- sort(sigs) map (_.asString) mkString
-
- def signature(unit: CompilationUnit): String =
- asString0(signature(unit.body, Nil))
-
- def signature(trees: List[Tree]): List[Signature] = {
- var ret : List[Signature] = Nil
- for (tree <- trees) ret = signature(tree, ret)
- ret
- }
-
- /**
- * @param tree0 ...
- * @param rest ...
- * @return ...
- */
- def signature(tree0: Tree, rest: List[Signature]): List[Signature] = tree0 match {
- case tree: MemberDef => if (!tree.mods.isPrivate) {
- val name = "" + tree.name + "::" +
- (tree.mods &~ Flags.SYNTHETIC)
-
- val children: List[Signature] = tree match {
- case impl: ImplDef
- //if (!impl.name.toString.contains("$anonfun$")) =>
- if (impl.name.pos("$anonfun$") == name.length) =>
- val supers = new Signature("$$supers", signature(impl.impl.parents))
- val body = new Signature("$$body", signature(impl.impl.body))
- val ret = supers :: body :: Nil
- impl match {
- case cdef: ClassDef =>
- new Signature("$$tparams", signature(cdef.tparams)) :: ret
- case _ =>
- ret
- }
- case vdef: ValOrDefDef =>
- val ret = signature(vdef.tpt, Nil)
- vdef match {
- case ddef : DefDef =>
- val tparams = new Signature("$$tparams", signature(ddef.tparams))
- var vparamss : List[Signature] = Nil
- for (list <- ddef.vparamss)
- vparamss = signature(list) ::: vparamss
- new Signature("$$ret", ret) :: tparams :: vparamss
- case _ =>
- ret
- }
- case pdef: PackageDef => signature(pdef.stats)
- case _ => Nil
- }
- new Signature(name, children) :: rest
-
- } else rest
- case tree: TypeTree => new Signature("" + tree.tpe, Nil) :: rest
- case _ => rest
- }
-}
diff --git a/src/attic/scala/tools/nsc/symtab/SymbolWalker.scala b/src/attic/scala/tools/nsc/symtab/SymbolWalker.scala
deleted file mode 100644
index 8c11187..0000000
--- a/src/attic/scala/tools/nsc/symtab/SymbolWalker.scala
+++ /dev/null
@@ -1,253 +0,0 @@
-package scala.tools.nsc
-package symtab
-
-trait SymbolWalker {
- val global : Global
- import global._
- import scala.collection.mutable.LinkedHashSet
- trait Visitor {
- def update(pos : Position, sym : Symbol) : Unit
- def contains(pos : Position) : Boolean
- def apply(pos : Position) : Symbol
- def putDef(sym : Symbol, pos : Position) : Unit = ()
- }
- import scala.collection.mutable.Map
- /*
- implicit def map2use(map : Map[Position,Symbol]) = new Visitor {
- def update(pos : Position, sym : Symbol) : Unit = map.update(pos, sym)
- def contains(pos : Position) : Boolean = map.contains(pos)
- def apply(pos : Position) : Symbol = map.apply(pos)
- }
- */
- private def validSym(t: Tree) = t.symbol != NoSymbol && t.symbol != null
- private def validSym(tp: Type) = tp != null && tp.typeSymbol != NoSymbol && tp.typeSymbol != null
- private def notNull(tp: Type) = tp.typeSymbol != null
- private def isNoSymbol(t: Tree) = t.symbol eq NoSymbol
-
- def walk(tree: Tree, visitor : Visitor)(fid : (util.Position) => Option[String]) : Unit = {
- val visited = new LinkedHashSet[Tree]
- def f(t : Tree) : Unit = {
- if (visited.add(t)) return
-
- def fs(l: List[Tree]) = l foreach f
- def fss(l: List[List[Tree]]) = l foreach fs
-
- val sym = (t, t.tpe) match {
- case (Super(_,_),SuperType(_,supertp)) if validSym(supertp) => supertp.typeSymbol
- case _ if validSym(t) => t.symbol
- case (t: TypeTree, tp) if validSym(tp) => tp.typeSymbol
- case (t: TypeTree, tp) if validSym(tp.resultType) => tp.resultType.typeSymbol
- case (t, tpe: Type) if isNoSymbol(t) && tpe.termSymbol != null =>
- if (t.isTerm) tpe.termSymbol
- else t.tpe match {
- case x: TypeRef => x.sym // XXX: looks like a bug
- case _ => tpe.typeSymbol
- }
- case _ => NoSymbol
- }
-
- if (sym != null && sym != NoSymbol /* && !sym.hasFlag(SYNTHETIC) */) {
- var id = fid(t.pos)
- val doAdd = if (id.isDefined) {
- if (id.get.charAt(0) == '`') id = Some(id.get.substring(1, id.get.length - 1))
- val name = sym.name.decode.trim
- if ((name startsWith id.get) || (id.get startsWith name)) true
- else {
- false
- }
- } else false
- if (doAdd) {
-
- if (!visitor.contains(t.pos)) {
- visitor(t.pos) = sym
- } else {
- val existing = visitor(t.pos)
- if (sym.sourceFile != existing.sourceFile || sym.pos != existing.pos) {
- (sym,existing) match {
- case (sym,existing) if sym.pos == existing.pos =>
- case (sym : TypeSymbol ,_ : ClassSymbol) => visitor(t.pos) = sym
- case (_ : ClassSymbol,_ : TypeSymbol) => // nothing
- case _ if sym.isModule && existing.isValue => // nothing
- case _ if sym.isClass && existing.isMethod => // nothing
- case _ =>
- assert(true)
- }
- }
- }}
- }
- t match {
- case t : DefTree if t.symbol != NoSymbol =>
- if (t.pos != NoPosition)
- visitor.putDef(t.symbol, t.pos)
- if (t.symbol.isClass) {
- val factory = NoSymbol // XXX: t.symbol.caseFactory
- if (factory != NoSymbol) {
- visitor.putDef(factory, t.pos)
- }
- }
- case t : TypeBoundsTree => f(t.lo); f(t.hi)
- case t : TypeTree if t.original != null =>
- def h(original : Tree, tpe : Type): Unit = try {
- if (original.tpe == null)
- original.tpe = tpe
- (original) match {
- case (AppliedTypeTree(_,trees)) if tpe.isInstanceOf[TypeRef] =>
- val types = tpe.asInstanceOf[TypeRef].args
- trees.zip(types).foreach{
- case (tree,tpe) => assert(tree != null && tpe != null); h(tree, tpe)
- }
- case _ =>
- }
- }
- if (t.original.tpe == null) {
- val dup = t.original.duplicate
- h(dup,t.tpe)
- f(dup)
- } else f(t.original)
- ()
- case _ =>
- }
- (t) match {
- case (t : MemberDef) if t.symbol != null && t.symbol != NoSymbol =>
- val annotated = if (sym.isModule) sym.moduleClass else sym
- val i = t.mods.annotations.iterator
- val j = annotated.annotations.iterator
- while (i.hasNext && j.hasNext) {
- val tree = i.next
- val ainfo = j.next
- val sym = ainfo.atp.typeSymbol
- tree.setType(ainfo.atp)
- tree.setSymbol(sym)
- f(tree)
- }
-
- case _ =>
- }
- t match {
- case tree: ImplDef =>
- fs(tree.impl.parents); f(tree.impl.self); fs(tree.impl.body)
- tree match {
- case tree : ClassDef => fs(tree.tparams)
- case _ =>
- }
- case tree: PackageDef => fs(tree.stats)
- case tree: ValOrDefDef =>
- f(tree.rhs);
- if (tree.tpt != null) {
- f(tree.tpt)
- }
- tree match {
- case tree : DefDef => fs(tree.tparams); fss(tree.vparamss)
- case _ =>
- }
- case tree: Function => fs(tree.vparams); f(tree.body)
- case tree : Bind => f(tree.body)
- case tree : Select =>
- val qualifier = if (tree.tpe != null && tree.qualifier.tpe == null) {
- val pre = tree.tpe.prefix
- val qualifier = tree.qualifier.duplicate
- qualifier.tpe = pre
- qualifier
- } else tree.qualifier
-
- f(qualifier)
- case tree : Annotated => f(tree.annot); f(tree.arg)
- case tree : GenericApply => f(tree.fun); fs(tree.args)
- case tree : UnApply => f(tree.fun); fs(tree.args)
- case tree : AppliedTypeTree =>
- if (tree.tpe != null) {
- val i = tree.tpe.typeArgs.iterator
- val j = tree.args.iterator
- while (i.hasNext && j.hasNext) {
- val tpe = i.next
- val arg = j.next
- if (arg.tpe == null) {
- arg.tpe = tpe
- }
- }
- if (tree.tpt.tpe == null) {
- tree.tpt.tpe = tree.tpe
- }
-
- }
- f(tree.tpt); fs(tree.args)
-
- case tree : ExistentialTypeTree=>
- if (tree.tpt.tpe == null) {
- tree.tpt.tpe = tree.tpe
- }
-
- f(tree.tpt)
- fs(tree.whereClauses)
- case tree : SingletonTypeTree =>
- if (tree.ref.tpe == null) {
- val dup = tree.ref.duplicate
- dup.tpe = tree.tpe
- f(dup)
- } else f(tree.ref)
- case tree : CompoundTypeTree =>
- if (tree.tpe != null && tree.tpe.typeSymbol != null && tree.tpe.typeSymbol.isRefinementClass) tree.tpe.typeSymbol.info match {
- case tpe : RefinedType =>
- tpe.parents.zip(tree.templ.parents).foreach{
- case (tpe,tree) =>
- if (tree.hasSymbol && (tree.symbol == NoSymbol || tree.symbol == null)) {
- tree.symbol = tpe.typeSymbol
- }
- }
-
- case _ =>
- }
-
- f(tree.templ)
- case tree : Template => fs(tree.parents); f(tree.self); fs(tree.body)
- case tree : SelectFromTypeTree => {
- if (tree.qualifier.tpe == null) tree.tpe match {
- case tpe : TypeRef =>
- // give it a type!
- tree.qualifier.tpe = tpe.prefix
- case _ =>
- // tree.tpe.pre
- }
- f(tree.qualifier)
- }
- case tree : Literal =>
- /*
- if (tree.tpe != null && tree.tpe.typeSymbol == definitions.ClassClass) {
- // nothing we can do without original tree.
- }
- */
-
- case tree : Typed => f(tree.expr); f(tree.tpt)
- case tree : Block => fs(tree.stats); f(tree.expr)
- case tree: CaseDef => f(tree.pat);f(tree.guard);f(tree.body)
- case tree : Assign => f(tree.lhs); f(tree.rhs);
- case tree : If => f(tree.cond); f(tree.thenp); f(tree.elsep);
- case tree : New => f(tree.tpt);
- case tree : Match => f(tree.selector); fs(tree.cases);
- case tree : Return => f(tree.expr);
- case tree : LabelDef => f(tree.rhs);
- case tree : Throw => f(tree.expr);
- case tree : Try => f(tree.block); fs(tree.catches); f(tree.finalizer);
- case tree : Alternative => fs(tree.trees);
- case tree : TypeDef =>
- (tree.tpe,sym) match {
- case (null,sym : TypeSymbol) if (sym.rawInfo.isComplete) =>
- if (tree.tparams.isEmpty) {
- if (tree.rhs.tpe == null) tree.rhs.tpe = sym.info
- f(tree.rhs)
- } else {
- val tree0 = AppliedTypeTree(tree.rhs, tree.tparams)
- tree0.tpe = sym.info
- f(tree0)
- }
- case _ => f(tree.rhs); fs(tree.tparams)
- }
- case tree : DocDef => f(tree.definition);
- case tree: Import => f(tree.expr)
- case _ =>
- }
- }
- f(tree)
- }
-
-}
diff --git a/src/build/InnerObjectTestGen.scala b/src/build/InnerObjectTestGen.scala
new file mode 100644
index 0000000..b661126
--- /dev/null
+++ b/src/build/InnerObjectTestGen.scala
@@ -0,0 +1,308 @@
+import scala.collection.mutable
+
+/** All contexts where objects can be embedded. */
+object Contexts extends Enumeration {
+ val Class, Object, Trait, Method, PrivateMethod, Anonfun, ClassConstructor, TraitConstructor, LazyVal, Val = Value
+
+ val topLevel = List(Class, Object, Trait)
+}
+
+
+/** Test generation of inner objects, trying to cover as many cases as possible. It proceeds
+ * by progressively adding nesting layers around a 'payload body'.
+ *
+ * There are three scenarios (each generating a full combinatorial search):
+ * - plain object with single-threaded access
+ * - private object with single-threaded access
+ * - plain object with multi-threaded access.
+ *
+ * Special care is taken to skip problematic cases (or known bugs). For instance,
+ * it won't generate objects inside lazy vals (leads to deadlock), or objects that
+ * are initialized in the static constructors (meaning inside 'val' inside a top-level
+ * object, or equivalent).
+ *
+ * Usage: TestGen <nr of levels>
+ * - by default it's 2 leves. Currently, 3-level deep uncovers bugs in the type checker.
+ *
+ * @author Iulian Dragos
+ */
+object TestGen {
+ val testFile = "object-testers-automated.scala"
+
+ val payload =
+""" var ObjCounter = 0
+
+ object Obj { ObjCounter += 1}
+ Obj // one
+
+ def singleThreadedAccess(x: Any) = {
+ x == Obj
+ }
+
+ def runTest {
+ try {
+ assert(singleThreadedAccess(Obj))
+ assert(ObjCounter == 1, "multiple instances: " + ObjCounter)
+ println("ok")
+ } catch {
+ case e => print("failed "); e.printStackTrace()
+ }
+ }
+"""
+
+ val payloadPrivate =
+""" var ObjCounter = 0
+
+ private object Obj { ObjCounter += 1}
+ Obj // one
+
+ def singleThreadedAccess(x: Any) = {
+ x == Obj
+ }
+
+ def runTest {
+ try {
+ assert(singleThreadedAccess(Obj))
+ assert(ObjCounter == 1, "multiple instances: " + ObjCounter)
+ println("ok")
+ } catch {
+ case e => print("failed "); e.printStackTrace()
+ }
+ }
+"""
+
+ val payloadMT =
+""" @volatile var ObjCounter = 0
+
+ object Obj { ObjCounter += 1}
+
+ def multiThreadedAccess() {
+ val threads = for (i <- 1 to 5) yield new Thread(new Runnable {
+ def run = Obj
+ })
+
+ threads foreach (_.start())
+ threads foreach (_.join())
+ }
+
+ def runTest {
+ try {
+ multiThreadedAccess()
+ assert(ObjCounter == 1, "multiple instances: " + ObjCounter)
+ println("ok")
+ } catch {
+ case e => print("multi-threaded failed "); e.printStackTrace()
+ }
+ }
+"""
+
+
+ import Contexts._
+
+ val template =
+"""
+%s
+
+%s
+
+object Test {
+ def main(args: Array[String]) {
+ %s
+ }
+}
+"""
+
+ var counter = 0
+ def freshName(name: String) = {
+ counter += 1
+ name + counter
+ }
+
+ val bodies = new mutable.ListBuffer[String]
+ val triggers = new mutable.ListBuffer[String]
+
+ /** Generate the nesting code. */
+ def generate(depth: Int, // how many levels we still need to 'add' around the current body
+ body: String, // the body of one test, so far
+ trigger: String, // the code that needs to be invoked to run the test so far
+ nested: List[Contexts.Value], // the path from the innermost to the outermost context
+ p: List[Contexts.Value] => Boolean, // a predicate for filtering problematic cases
+ privateObj: Boolean = false) { // are we using a private object?
+
+ def shouldBeTopLevel =
+ ((depth == 1)
+ || (nested.headOption == Some(PrivateMethod))
+ || (nested.isEmpty && privateObj))
+
+ val enums =
+ if (shouldBeTopLevel) Contexts.topLevel else Contexts.values.toList
+
+ if (depth == 0) {
+ if (p(nested)) {bodies += body; triggers += trigger }
+ } else {
+ for (ctx <- enums) {
+ val (body1, trigger1) = ctx match {
+ case Class =>
+ val name = freshName("Class") + "_" + depth
+ ("""
+ class %s {
+ %s
+ def run { %s }
+ }
+ """.format(name, body, trigger), "(new %s).run".format(name))
+
+ case Trait =>
+ val name = freshName("Trait") + "_" + depth
+ ("""
+ trait %s {
+ %s
+ def run { %s }
+ }
+ """.format(name, body, trigger), "(new %s {}).run".format(name))
+
+ case Object =>
+ val name = freshName("Object") + "_" + depth
+ ("""
+ object %s {
+ %s
+ def run { %s } // trigger
+ }
+ """.format(name, body, trigger), "%s.run".format(name))
+
+ case Method =>
+ val name = freshName("method") + "_" + depth
+ ("""
+ def %s {
+ %s
+ %s // trigger
+ }
+ """.format(name, body, trigger), name)
+
+ case PrivateMethod =>
+ val name = freshName("method") + "_" + depth
+ ("""
+ private def %s {
+ %s
+ %s // trigger
+ }
+ """.format(name, body, trigger), name)
+
+ case Val =>
+ val name = freshName("value") + "_" + depth
+ ("""
+ val %s = {
+ %s
+ %s // trigger
+ }
+ """.format(name, body, trigger), name)
+
+ case LazyVal =>
+ val name = freshName("lzvalue") + "_" + depth
+ ("""
+ lazy val %s = {
+ %s
+ %s // trigger
+ }
+ """.format(name, body, trigger), name)
+
+ case Anonfun =>
+ val name = freshName("fun") + "_" + depth
+ ("""
+ val %s = () => {
+ %s
+ %s // trigger
+ }
+ """.format(name, body, trigger), name + "()")
+
+ case ClassConstructor =>
+ val name = freshName("Class") + "_" + depth
+ ("""
+ class %s {
+ { // in primary constructor
+ %s
+ %s // trigger
+ }
+ }
+ """.format(name, body, trigger), "(new %s)".format(name))
+
+ case TraitConstructor =>
+ val name = freshName("Trait") + "_" + depth
+ ("""
+ trait %s {
+ { // in primary constructor
+ %s
+ %s // trigger
+ }
+ }
+ """.format(name, body, trigger), "(new %s {})".format(name))
+
+ }
+ generate(depth - 1, body1, trigger1, ctx :: nested, p)
+ }
+ }
+ }
+
+ /** Only allow multithreaded tests if not inside a static initializer. */
+ private def allowMT(structure: List[Contexts.Value]): Boolean = {
+ var nesting = structure
+ while ((nesting ne Nil) && nesting.head == Object) {
+ nesting = nesting.tail
+ }
+ if (nesting ne Nil)
+ !(nesting.head == Val)
+ else
+ true
+ } && !objectInsideLazyVal(structure)
+
+ /** Known bug: object inside lazyval leads to deadlock. */
+ private def objectInsideLazyVal(structure: List[Contexts.Value]): Boolean =
+ structure.contains(LazyVal)
+
+
+ def usage() {
+ val help =
+"""
+ Usage: TestGen <nr of levels>
+
+ <nr of levels> - how deeply nested should the objects be? default is 2.
+ (Currently, 3-level deep uncovers bugs in the type checker).
+
+ Test generation of inner objects, trying to cover as many cases as possible. It proceeds
+ by progressively adding nesting layers around a 'payload body'.
+
+ There are three scenarios (each generating a full combinatorial search):
+ - plain object with single-threaded access
+ - private object with single-threaded access
+ - plain object with multi-threaded access.
+
+ Special care is taken to skip problematic cases (or known bugs). For instance,
+ it won't generate objects inside lazy vals (leads to deadlock), or objects that
+ are initialized in the static constructors (meaning inside 'val' inside a top-level
+ object, or equivalent).
+"""
+
+ println(help)
+ System.exit(1)
+ }
+
+ def main(args: Array[String]) {
+ if (args.isEmpty || args.contains("-help")) usage()
+
+ val depth = if (args.length < 1) 2 else args(0).toInt
+
+ val header =
+"""
+/* ================================================================================
+ Automatically generated on %tF. Do Not Edit (unless you have to).
+ (%d-level nesting)
+ ================================================================================ */
+""".format(new java.util.Date, depth)
+
+ generate(depth, payload, "runTest", List(), x => true)
+ // private
+ generate(depth, payloadPrivate, "runTest", List(), x => true, true)
+ generate(depth, payloadMT, "runTest", List(), allowMT)
+
+ println(template.format(header, bodies.mkString("", "\n", ""), triggers.mkString("", "\n", "")))
+ }
+}
diff --git a/src/build/bnd/continuations.bnd b/src/build/bnd/continuations.bnd
new file mode 100644
index 0000000..748502f
--- /dev/null
+++ b/src/build/bnd/continuations.bnd
@@ -0,0 +1,5 @@
+Bundle-Name: Scala Continuations Plugin
+Bundle-SymbolicName: org.scala-lang.plugins.continuations
+ver: @VERSION@
+Bundle-Version: ${ver}
+Export-Package: *;version=${ver}
diff --git a/src/build/bnd/scala-actors.bnd b/src/build/bnd/scala-actors.bnd
new file mode 100644
index 0000000..8d05557
--- /dev/null
+++ b/src/build/bnd/scala-actors.bnd
@@ -0,0 +1,5 @@
+Bundle-Name: Scala Actors
+Bundle-SymbolicName: org.scala-lang.scala-actors
+ver: @VERSION@
+Bundle-Version: ${ver}
+Export-Package: *;version=${ver}
diff --git a/src/build/bnd/scala-compiler.bnd b/src/build/bnd/scala-compiler.bnd
new file mode 100644
index 0000000..c289843
--- /dev/null
+++ b/src/build/bnd/scala-compiler.bnd
@@ -0,0 +1,8 @@
+Bundle-Name: Scala Compiler
+Bundle-SymbolicName: org.scala-lang.scala-compiler
+ver: @VERSION@
+Bundle-Version: ${ver}
+Export-Package: *;version=${ver}
+Import-Package: scala.tools.jline.*;resolution:=optional, \
+ org.apache.tools.ant.*;resolution:=optional, \
+ *
diff --git a/src/build/bnd/scala-library.bnd b/src/build/bnd/scala-library.bnd
new file mode 100644
index 0000000..03aff45
--- /dev/null
+++ b/src/build/bnd/scala-library.bnd
@@ -0,0 +1,6 @@
+Bundle-Name: Scala Standard Library
+Bundle-SymbolicName: org.scala-lang.scala-library
+ver: @VERSION@
+Bundle-Version: ${ver}
+Export-Package: *;version=${ver}
+Import-Package: sun.misc;resolution:=optional, *
diff --git a/src/build/bnd/scala-reflect.bnd b/src/build/bnd/scala-reflect.bnd
new file mode 100644
index 0000000..6cda346
--- /dev/null
+++ b/src/build/bnd/scala-reflect.bnd
@@ -0,0 +1,6 @@
+Bundle-Name: Scala Reflect
+Bundle-SymbolicName: org.scala-lang.scala-reflect
+ver: @VERSION@
+Bundle-Version: ${ver}
+Export-Package: *;version=${ver}
+Import-Package: scala.tools.nsc;resolution:=optional, *
diff --git a/src/build/bnd/scala-swing.bnd b/src/build/bnd/scala-swing.bnd
new file mode 100644
index 0000000..eeacb9b
--- /dev/null
+++ b/src/build/bnd/scala-swing.bnd
@@ -0,0 +1,5 @@
+Bundle-Name: Scala Swing
+Bundle-SymbolicName: org.scala-lang.scala-swing
+ver: @VERSION@
+Bundle-Version: ${ver}
+Export-Package: *;version=${ver}
diff --git a/src/build/genprod.scala b/src/build/genprod.scala
index 6d9e041..b9511c1 100644
--- a/src/build/genprod.scala
+++ b/src/build/genprod.scala
@@ -6,6 +6,8 @@
** |/ **
\* */
+import scala.language.postfixOps
+
/** This program generates the ProductN, TupleN, FunctionN,
* and AbstractFunctionN, where 0 <= N <= MAX_ARITY.
*
@@ -75,7 +77,7 @@ package %s
if (args.length != 1) {
println("please give path of output directory")
- exit(-1)
+ sys.exit(-1)
}
val out = args(0)
def writeFile(node: scala.xml.Node) {
@@ -96,8 +98,8 @@ zzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzz */
object FunctionZero extends Function(0) {
override def genprodString = "\n// genprod generated these sources at: " + new java.util.Date()
- override def covariantSpecs = "@specialized "
- override def descriptiveComment = functionNTemplate.format("javaVersion", "anonfun0",
+ override def covariantSpecs = "@specialized(Specializable.Primitives) "
+ override def descriptiveComment = " " + functionNTemplate.format("javaVersion", "anonfun0",
"""
* val javaVersion = () => sys.props("java.version")
*
@@ -111,10 +113,10 @@ object FunctionZero extends Function(0) {
object FunctionOne extends Function(1) {
override def classAnnotation = "@annotation.implicitNotFound(msg = \"No implicit view available from ${T1} => ${R}.\")\n"
- override def contravariantSpecs = "@specialized(scala.Int, scala.Long, scala.Float, scala.Double) "
- override def covariantSpecs = "@specialized(scala.Unit, scala.Boolean, scala.Int, scala.Float, scala.Long, scala.Double) "
+ override def contravariantSpecs = "@specialized(scala.Int, scala.Long, scala.Float, scala.Double/*, scala.AnyRef*/) "
+ override def covariantSpecs = "@specialized(scala.Unit, scala.Boolean, scala.Int, scala.Float, scala.Long, scala.Double/*, scala.AnyRef*/) "
- override def descriptiveComment = functionNTemplate.format("succ", "anonfun1",
+ override def descriptiveComment = " " + functionNTemplate.format("succ", "anonfun1",
"""
* val succ = (x: Int) => x + 1
* val anonfun1 = new Function1[Int, Int] {
@@ -130,7 +132,7 @@ object FunctionOne extends Function(1) {
* @param g a function A => T1
* @return a new function `f` such that `f(x) == apply(g(x))`
*/
- def compose[A](g: A => T1): A => R = { x => apply(g(x)) }
+ @annotation.unspecialized def compose[A](g: A => T1): A => R = { x => apply(g(x)) }
/** Composes two instances of Function1 in a new Function1, with this function applied first.
*
@@ -138,7 +140,7 @@ object FunctionOne extends Function(1) {
* @param g a function R => A
* @return a new function `f` such that `f(x) == g(apply(x))`
*/
- def andThen[A](g: R => A): T1 => A = { x => g(apply(x)) }
+ @annotation.unspecialized def andThen[A](g: R => A): T1 => A = { x => g(apply(x)) }
"""
}
@@ -146,7 +148,7 @@ object FunctionTwo extends Function(2) {
override def contravariantSpecs = "@specialized(scala.Int, scala.Long, scala.Double) "
override def covariantSpecs = "@specialized(scala.Unit, scala.Boolean, scala.Int, scala.Float, scala.Long, scala.Double) "
- override def descriptiveComment = functionNTemplate.format("max", "anonfun2",
+ override def descriptiveComment = " " + functionNTemplate.format("max", "anonfun2",
"""
* val max = (x: Int, y: Int) => if (x < y) y else x
*
@@ -169,27 +171,34 @@ object Function {
class Function(val i: Int) extends Group("Function") with Arity {
def descriptiveComment = ""
- def functionNTemplate = """
+ def functionNTemplate =
+"""
* In the following example, the definition of %s is a
* shorthand for the anonymous class definition %s:
*
* {{{
- * object Main extends App { %s }
- * }}}"""
+ * object Main extends App {%s}
+ * }}}
+ *
+ * Note that `Function1` does not define a total function, as might
+ * be suggested by the existence of [[scala.PartialFunction]]. The only
+ * distinction between `Function1` and `PartialFunction` is that the
+ * latter can specify inputs which it will not handle.
+"""
def toStr() = "\"" + ("<function%d>" format i) + "\""
def apply() = {
<file name={fileName}>{header}
/** A function of {i} parameter{s}.
- * {descriptiveComment}
+ *{descriptiveComment}
*/
{classAnnotation}trait {className}{contraCoArgs} extends AnyRef {{ self =>
/** Apply the body of this function to the argument{s}.
* @return the result of function application.
*/
def apply({funArgs}): R
- {moreMethods}
+{moreMethods}
override def toString() = {toStr}
}}
</file>
@@ -211,17 +220,16 @@ class Function(val i: Int) extends Group("Function") with Arity {
)
// f(x1,x2,x3,x4,x5,x6) == (f.curried)(x1)(x2)(x3)(x4)(x5)(x6)
- def curryComment = { """
- /** Creates a curried version of this function.
+ def curryComment = {
+""" /** Creates a curried version of this function.
*
* @return a function `f` such that `f%s == apply%s`
- */
-""".format(xdefs map ("(" + _ + ")") mkString, commaXs)
+ */""".format(xdefs map ("(" + _ + ")") mkString, commaXs)
}
def tupleMethod = {
- def comment = """
- /** Creates a tupled version of this function: instead of %d arguments,
+ def comment =
+""" /** Creates a tupled version of this function: instead of %d arguments,
* it accepts a single [[scala.Tuple%d]] argument.
*
* @return a function `f` such that `f(%s) == f(Tuple%d%s) == apply%s`
@@ -229,16 +237,16 @@ class Function(val i: Int) extends Group("Function") with Arity {
""".format(i, i, commaXs, i, commaXs, commaXs)
def body = "case Tuple%d%s => apply%s".format(i, commaXs, commaXs)
- comment + " def tupled: Tuple%d%s => R = {\n %s\n }".format(i, invariantArgs, body)
+ comment + "\n @annotation.unspecialized def tupled: Tuple%d%s => R = {\n %s\n }".format(i, invariantArgs, body)
}
def curryMethod = {
val body = if (i < 5) shortCurry else longCurry
curryComment +
- " def curried: %s => R = {\n %s\n }\n".format(
+ "\n @annotation.unspecialized def curried: %s => R = {\n %s\n }\n".format(
targs mkString " => ", body
- ) + """ @deprecated("Use 'curried' instead", "2.8.0")""" + "\n def curry = curried\n"
+ )
}
override def moreMethods = curryMethod + tupleMethod
@@ -250,10 +258,7 @@ class Function(val i: Int) extends Group("Function") with Arity {
zzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzz */
object Tuple {
- val zipImports = """
-import scala.collection.{ TraversableLike => TLike, IterableLike => ILike }
-import scala.collection.generic.{ CanBuildFrom => CBF }
-"""
+ val zipImports = ""
def make(i: Int) = apply(i)()
def apply(i: Int) = i match {
@@ -272,237 +277,18 @@ object TupleOne extends Tuple(1)
object TupleTwo extends Tuple(2)
{
override def imports = Tuple.zipImports
- override def covariantSpecs = "@specialized(Int, Long, Double) "
+ override def covariantSpecs = "@specialized(Int, Long, Double, Char, Boolean/*, AnyRef*/) "
override def moreMethods = """
/** Swaps the elements of this `Tuple`.
* @return a new Tuple where the first element is the second element of this Tuple and the
* second element is the first element of this Tuple.
*/
def swap: Tuple2[T2,T1] = Tuple2(_2, _1)
-
- @deprecated("Use `zipped` instead.", "2.9.0")
- def zip[Repr1, El1, El2, To](implicit w1: T1 => TLike[El1, Repr1],
- w2: T2 => Iterable[El2],
- cbf1: CBF[Repr1, (El1, El2), To]): To = {
- zipped map ((x, y) => ((x, y)))
- }
-
- /** Wraps a tuple in a `Zipped`, which supports 2-ary generalisations of `map`, `flatMap`, `filter`, etc.
- * Note that there must be an implicit value to convert this tuple's types into a [[scala.collection.TraversableLike]]
- * or [[scala.collection.IterableLike]].
- * {{{
- * scala> val tuple = (List(1,2,3),List('a','b','c'))
- * tuple: (List[Int], List[Char]) = (List(1, 2, 3),List(a, b, c))
- *
- * scala> tuple.zipped map { (x,y) => x + ":" + y }
- * res6: List[java.lang.String] = List(1:a, 2:b, 3:c)
- * }}}
- *
- * @see Zipped
- * Note: will not terminate for infinite-sized collections.
- */
- def zipped[Repr1, El1, Repr2, El2](implicit w1: T1 => TLike[El1, Repr1], w2: T2 => ILike[El2, Repr2]): Zipped[Repr1, El1, Repr2, El2]
- = new Zipped[Repr1, El1, Repr2, El2](_1, _2)
-
- class Zipped[+Repr1, +El1, +Repr2, +El2](coll1: TLike[El1, Repr1], coll2: ILike[El2, Repr2]) { // coll2: ILike for filter
- def map[B, To](f: (El1, El2) => B)(implicit cbf: CBF[Repr1, B, To]): To = {
- val b = cbf(coll1.repr)
- b.sizeHint(coll1)
- val elems2 = coll2.iterator
-
- for (el1 <- coll1) {
- if (elems2.hasNext)
- b += f(el1, elems2.next)
- else
- return b.result
- }
-
- b.result
- }
-
- def flatMap[B, To](f: (El1, El2) => TraversableOnce[B])(implicit cbf: CBF[Repr1, B, To]): To = {
- val b = cbf(coll1.repr)
- val elems2 = coll2.iterator
-
- for (el1 <- coll1) {
- if (elems2.hasNext)
- b ++= f(el1, elems2.next)
- else
- return b.result
- }
-
- b.result
- }
-
- def filter[To1, To2](f: (El1, El2) => Boolean)(implicit cbf1: CBF[Repr1, El1, To1], cbf2: CBF[Repr2, El2, To2]): (To1, To2) = {
- val b1 = cbf1(coll1.repr)
- val b2 = cbf2(coll2.repr)
- val elems2 = coll2.iterator
-
- for (el1 <- coll1) {
- if (elems2.hasNext) {
- val el2 = elems2.next
- if (f(el1, el2)) {
- b1 += el1
- b2 += el2
- }
- }
- else return (b1.result, b2.result)
- }
-
- (b1.result, b2.result)
- }
-
- def exists(f: (El1, El2) => Boolean): Boolean = {
- val elems2 = coll2.iterator
-
- for (el1 <- coll1) {
- if (elems2.hasNext) {
- if (f(el1, elems2.next))
- return true
- }
- else return false
- }
- false
- }
-
- def forall(f: (El1, El2) => Boolean): Boolean =
- !exists((x, y) => !f(x, y))
-
- def foreach[U](f: (El1, El2) => U): Unit = {
- val elems2 = coll2.iterator
-
- for (el1 <- coll1) {
- if (elems2.hasNext)
- f(el1, elems2.next)
- else
- return
- }
- }
- }
"""
}
object TupleThree extends Tuple(3) {
override def imports = Tuple.zipImports
- override def moreMethods = """
-
- @deprecated("Use `zipped` instead.", "2.9.0")
- def zip[Repr1, El1, El2, El3, To](implicit w1: T1 => TLike[El1, Repr1],
- w2: T2 => Iterable[El2],
- w3: T3 => Iterable[El3],
- cbf1: CBF[Repr1, (El1, El2, El3), To]): To = {
- zipped map ((x, y, z) => ((x, y, z)))
- }
-
- /** Wraps a tuple in a `Zipped`, which supports 3-ary generalisations of `map`, `flatMap`, `filter`, etc.
- * Note that there must be an implicit value to convert this tuple's types into a [[scala.collection.TraversableLike]]
- * or [[scala.collection.IterableLike]].
- * {{{
- * scala> val tuple = (List(1,2,3),List('a','b','c'),List("x","y","z"))
- * tuple: (List[Int], List[Char], List[java.lang.String]) = (List(1, 2, 3),List(a, b, c),List(x, y, z))
- *
- * scala> tuple.zipped map { (x,y,z) => x + ":" + y + ":" + z}
- * res8: List[java.lang.String] = List(1:a:x, 2:b:y, 3:c:z)
- * }}}
- *
- * @see Zipped
- * Note: will not terminate for infinite-sized collections.
- */
- def zipped[Repr1, El1, Repr2, El2, Repr3, El3](implicit w1: T1 => TLike[El1, Repr1],
- w2: T2 => ILike[El2, Repr2],
- w3: T3 => ILike[El3, Repr3]): Zipped[Repr1, El1, Repr2, El2, Repr3, El3]
- = new Zipped[Repr1, El1, Repr2, El2, Repr3, El3](_1, _2, _3)
-
- class Zipped[+Repr1, +El1, +Repr2, +El2, +Repr3, +El3](coll1: TLike[El1, Repr1],
- coll2: ILike[El2, Repr2],
- coll3: ILike[El3, Repr3]) {
- def map[B, To](f: (El1, El2, El3) => B)(implicit cbf: CBF[Repr1, B, To]): To = {
- val b = cbf(coll1.repr)
- val elems2 = coll2.iterator
- val elems3 = coll3.iterator
-
- for (el1 <- coll1) {
- if (elems2.hasNext && elems3.hasNext)
- b += f(el1, elems2.next, elems3.next)
- else
- return b.result
- }
- b.result
- }
-
- def flatMap[B, To](f: (El1, El2, El3) => TraversableOnce[B])(implicit cbf: CBF[Repr1, B, To]): To = {
- val b = cbf(coll1.repr)
- val elems2 = coll2.iterator
- val elems3 = coll3.iterator
-
- for (el1 <- coll1) {
- if (elems2.hasNext && elems3.hasNext)
- b ++= f(el1, elems2.next, elems3.next)
- else
- return b.result
- }
- b.result
- }
-
- def filter[To1, To2, To3](f: (El1, El2, El3) => Boolean)(
- implicit cbf1: CBF[Repr1, El1, To1],
- cbf2: CBF[Repr2, El2, To2],
- cbf3: CBF[Repr3, El3, To3]): (To1, To2, To3) = {
- val b1 = cbf1(coll1.repr)
- val b2 = cbf2(coll2.repr)
- val b3 = cbf3(coll3.repr)
- val elems2 = coll2.iterator
- val elems3 = coll3.iterator
- def result = (b1.result, b2.result, b3.result)
-
- for (el1 <- coll1) {
- if (elems2.hasNext && elems3.hasNext) {
- val el2 = elems2.next
- val el3 = elems3.next
-
- if (f(el1, el2, el3)) {
- b1 += el1
- b2 += el2
- b3 += el3
- }
- }
- else return result
- }
-
- result
- }
-
- def exists(f: (El1, El2, El3) => Boolean): Boolean = {
- val elems2 = coll2.iterator
- val elems3 = coll3.iterator
-
- for (el1 <- coll1) {
- if (elems2.hasNext && elems3.hasNext) {
- if (f(el1, elems2.next, elems3.next))
- return true
- }
- else return false
- }
- false
- }
-
- def forall(f: (El1, El2, El3) => Boolean): Boolean =
- !exists((x, y, z) => !f(x, y, z))
-
- def foreach[U](f: (El1, El2, El3) => U): Unit = {
- val elems2 = coll2.iterator
- val elems3 = coll3.iterator
-
- for (el1 <- coll1) {
- if (elems2.hasNext && elems3.hasNext)
- f(el1, elems2.next, elems3.next)
- else
- return
- }
- }
- }
-"""
}
class Tuple(val i: Int) extends Group("Tuple") with Arity {
@@ -573,7 +359,7 @@ class Product(val i: Int) extends Group("Product") with Arity {
* otherwise throws an `IndexOutOfBoundsException`.
*
* @param n number of the projection to be returned
- * @return same as `._(n+1)`, for example `productElement(1)` is the same as `._1`.
+ * @return same as `._(n+1)`, for example `productElement(0)` is the same as `._1`.
* @throws IndexOutOfBoundsException
*/
"""
@@ -603,7 +389,7 @@ object {className} {{
/** {className} is a cartesian product of {i} component{s}.
* @since 2.3
*/
-trait {className}{covariantArgs} extends Product {{
+trait {className}{covariantArgs} extends Any with Product {{
/** The arity of this product.
* @return {i}
*/
diff --git a/src/build/maven/continuations-plugin-pom.xml b/src/build/maven/continuations-plugin-pom.xml
index aca519b..9abb0a3 100644
--- a/src/build/maven/continuations-plugin-pom.xml
+++ b/src/build/maven/continuations-plugin-pom.xml
@@ -23,13 +23,12 @@
</license>
</licenses>
<scm>
- <connection>scm:svn:http://lampsvn.epfl.ch/svn-repos/scala/scala/trunk</connection>
- <url>https://lampsvn.epfl.ch/trac/scala/browser/scala/trunk</url>
+ <connection>scm:git:git://github.com/scala/scala.git</connection>
+ <url>https://github.com/scala/scala.git</url>
</scm>
<issueManagement>
- <system>trac</system>
- <url>http://lampsvn.epfl.ch/trac/scala
- </url>
+ <system>JIRA</system>
+ <url>https://issues.scala-lang.org/</url>
</issueManagement>
<dependencies>
diff --git a/src/build/maven/jline-pom.xml b/src/build/maven/jline-pom.xml
index 4752deb..0d6e801 100644
--- a/src/build/maven/jline-pom.xml
+++ b/src/build/maven/jline-pom.xml
@@ -28,13 +28,12 @@
</license>
</licenses>
<scm>
- <connection>scm:svn:http://lampsvn.epfl.ch/svn-repos/scala/scala/trunk</connection>
- <url>https://lampsvn.epfl.ch/trac/scala/browser/scala/trunk</url>
+ <connection>scm:git:git://github.com/scala/scala.git</connection>
+ <url>https://github.com/scala/scala.git</url>
</scm>
<issueManagement>
- <system>trac</system>
- <url>http://lampsvn.epfl.ch/trac/scala
- </url>
+ <system>JIRA</system>
+ <url>https://issues.scala-lang.org/</url>
</issueManagement>
<dependencies>
diff --git a/src/build/maven/maven-deploy.xml b/src/build/maven/maven-deploy.xml
index e0f31a5..8da1d76 100644
--- a/src/build/maven/maven-deploy.xml
+++ b/src/build/maven/maven-deploy.xml
@@ -111,11 +111,12 @@
<deploy-local name="scala-library" version="@{version}" repository="@{repository}" />
<deploy-local name="scala-compiler" version="@{version}" repository="@{repository}" />
<deploy-local-plugin name="continuations" version="@{version}" repository="@{repository}"/>
- <deploy-local name="scala-dbc" version="@{version}" repository="@{repository}" />
+ <deploy-local name="scala-reflect" version="@{version}" repository="@{repository}" />
+ <deploy-local name="scala-actors" version="@{version}" repository="@{repository}" />
<deploy-local name="scala-swing" version="@{version}" repository="@{repository}"/>
- <deploy-local name="scalap" version="@{version}" repository="@{repository}"/>
- <deploy-local name="scala-partest" version="@{version}" repository="@{repository}"/>
- <deploy-local name="jline" version="@{version}" repository="@{repository}"/>
+ <deploy-local name="scalap" version="@{version}" repository="@{repository}"/>
+ <deploy-local name="scala-partest" version="@{version}" repository="@{repository}"/>
+ <deploy-local name="jline" version="@{version}" repository="@{repository}"/>
</sequential>
</macrodef>
</target>
@@ -163,21 +164,33 @@
<attribute name="repository" />
<attribute name="version" />
<sequential>
- <deploy-remote name="scala-library" version="@{version}" repository="@{repository}">
- <extra-attachments>
- <artifact:attach type="jar" file="scala-library/scala-library-docs.jar" classifier="javadoc" />
- </extra-attachments>
- </deploy-remote>
- <deploy-remote name="jline" version="@{version}" repository="@{repository}"/>
+ <deploy-remote name="scala-library" version="@{version}" repository="@{repository}"/>
+ <deploy-remote name="jline" version="@{version}" repository="@{repository}"/>
+ <deploy-remote name="scala-reflect" version="@{version}" repository="@{repository}"/>
<deploy-remote name="scala-compiler" version="@{version}" repository="@{repository}" />
- <deploy-remote name="scala-dbc" version="@{version}" repository="@{repository}" />
<deploy-remote name="scala-swing" version="@{version}" repository="@{repository}"/>
- <deploy-remote name="scalap" version="@{version}" repository="@{repository}"/>
- <deploy-remote name="scala-partest" version="@{version}" repository="@{repository}"/>
+ <deploy-remote name="scala-actors" version="@{version}" repository="@{repository}"/>
+ <deploy-remote name="scalap" version="@{version}" repository="@{repository}"/>
+ <deploy-remote name="scala-partest" version="@{version}" repository="@{repository}"/>
<deploy-remote-plugin name="continuations" version="@{version}" repository="@{repository}"/>
</sequential>
</macrodef>
+ <!-- IDE needs swing/actors/continuations -->
+ <macrodef name="deploy-remote-core">
+ <attribute name="repository" />
+ <attribute name="version" />
+ <sequential>
+ <deploy-remote name="scala-library" version="@{version}" repository="@{repository}"/>
+ <deploy-remote name="scala-reflect" version="@{version}" repository="@{repository}"/>
+ <deploy-remote name="scala-compiler" version="@{version}" repository="@{repository}" />
+ <deploy-remote name="jline" version="@{version}" repository="@{repository}"/>
+ <deploy-remote name="scala-swing" version="@{version}" repository="@{repository}"/>
+ <deploy-remote name="scala-actors" version="@{version}" repository="@{repository}"/>
+ <deploy-remote-plugin name="continuations" version="@{version}" repository="@{repository}"/>
+ </sequential>
+ </macrodef>
+
<!-- PGP Signed deployment -->
<macrodef name="deploy-remote-signed-single">
<attribute name="pom" />
@@ -234,13 +247,14 @@
<attribute name="version" />
<sequential>
<deploy-remote-plugin-signed name="continuations" version="@{version}" repository="@{repository}"/>
- <deploy-remote-signed name="scala-library" version="@{version}" repository="@{repository}"/>
- <deploy-remote-signed name="jline" version="@{version}" repository="@{repository}"/>
+ <deploy-remote-signed name="scala-library" version="@{version}" repository="@{repository}"/>
+ <deploy-remote-signed name="jline" version="@{version}" repository="@{repository}"/>
+ <deploy-remote-signed name="scala-reflect" version="@{version}" repository="@{repository}"/>
<deploy-remote-signed name="scala-compiler" version="@{version}" repository="@{repository}" />
- <deploy-remote-signed name="scala-dbc" version="@{version}" repository="@{repository}" />
<deploy-remote-signed name="scala-swing" version="@{version}" repository="@{repository}"/>
- <deploy-remote-signed name="scalap" version="@{version}" repository="@{repository}"/>
- <deploy-remote-signed name="scala-partest" version="@{version}" repository="@{repository}"/>
+ <deploy-remote-signed name="scala-actors" version="@{version}" repository="@{repository}"/>
+ <deploy-remote-signed name="scalap" version="@{version}" repository="@{repository}"/>
+ <deploy-remote-signed name="scala-partest" version="@{version}" repository="@{repository}"/>
</sequential>
</macrodef>
</target>
@@ -269,6 +283,11 @@
<deploy-remote-all version="${maven.version.number}" repository="${remote.snapshot.repository}" />
</target>
+ <!-- for PR validation -->
+ <target name="deploy-core.snapshot" depends="deploy.remote.init">
+ <deploy-remote-core version="${maven.version.number}" repository="${remote.snapshot.repository}" />
+ </target>
+
<target name="deploy.release" depends="deploy.remote.init" unless="version.is.snapshot" description="Deploys the bundled files as a release into the desired remote Maven repository">
<deploy-remote-all version="${maven.version.number}" repository="${remote.release.repository}" />
</target>
diff --git a/src/build/maven/scala-actors-pom.xml b/src/build/maven/scala-actors-pom.xml
new file mode 100644
index 0000000..3d37ef8
--- /dev/null
+++ b/src/build/maven/scala-actors-pom.xml
@@ -0,0 +1,64 @@
+<project
+ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+ <modelVersion>4.0.0</modelVersion>
+ <groupId>org.scala-lang</groupId>
+ <artifactId>scala-actors</artifactId>
+ <packaging>jar</packaging>
+ <version>@VERSION@</version>
+ <name>Scala Actors library</name>
+ <description>Deprecated Actors Library for Scala</description>
+ <url>http://www.scala-lang.org/</url>
+ <inceptionYear>2006</inceptionYear>
+ <organization>
+ <name>LAMP/EPFL</name>
+ <url>http://lamp.epfl.ch/</url>
+ </organization>
+ <licenses>
+ <license>
+ <name>BSD-like</name>
+ <url>http://www.scala-lang.org/downloads/license.html
+ </url>
+ <distribution>repo</distribution>
+ </license>
+ </licenses>
+ <scm>
+ <connection>scm:git:git://github.com/scala/scala.git</connection>
+ <url>https://github.com/scala/scala.git</url>
+ </scm>
+ <issueManagement>
+ <system>JIRA</system>
+ <url>https://issues.scala-lang.org/</url>
+ </issueManagement>
+ <properties>
+ <info.apiURL>http://www.scala-lang.org/api/@VERSION@/</info.apiURL>
+ </properties>
+ <dependencies>
+ <dependency>
+ <groupId>org.scala-lang</groupId>
+ <artifactId>scala-library</artifactId>
+ <version>@VERSION@</version>
+ </dependency>
+ </dependencies>
+ <distributionManagement>
+ <repository>
+ <id>scala-tools.org</id>
+ <url>@RELEASE_REPOSITORY@</url>
+ </repository>
+ <snapshotRepository>
+ <id>scala-tools.org</id>
+ <url>@SNAPSHOT_REPOSITORY@</url>
+ <uniqueVersion>false</uniqueVersion>
+ </snapshotRepository>
+ </distributionManagement>
+ <developers>
+ <developer>
+ <id>lamp</id>
+ <name>EPFL LAMP</name>
+ </developer>
+ <developer>
+ <id>Typesafe</id>
+ <name>Typesafe, Inc.</name>
+ </developer>
+ </developers>
+</project>
diff --git a/src/build/maven/scala-compiler-pom.xml b/src/build/maven/scala-compiler-pom.xml
index f9bcb67..fedc34a 100644
--- a/src/build/maven/scala-compiler-pom.xml
+++ b/src/build/maven/scala-compiler-pom.xml
@@ -23,13 +23,12 @@
</license>
</licenses>
<scm>
- <connection>scm:svn:http://lampsvn.epfl.ch/svn-repos/scala/scala/trunk</connection>
- <url>https://lampsvn.epfl.ch/trac/scala/browser/scala/trunk</url>
+ <connection>scm:git:git://github.com/scala/scala.git</connection>
+ <url>https://github.com/scala/scala.git</url>
</scm>
<issueManagement>
- <system>trac</system>
- <url>http://lampsvn.epfl.ch/trac/scala
- </url>
+ <system>JIRA</system>
+ <url>https://issues.scala-lang.org/</url>
</issueManagement>
<dependencies>
@@ -38,6 +37,11 @@
<artifactId>scala-library</artifactId>
<version>@VERSION@</version>
</dependency>
+ <dependency>
+ <groupId>org.scala-lang</groupId>
+ <artifactId>scala-reflect</artifactId>
+ <version>@VERSION@</version>
+ </dependency>
<dependency>
<groupId>org.scala-lang</groupId>
<artifactId>jline</artifactId>
diff --git a/src/build/maven/scala-dbc-pom.xml b/src/build/maven/scala-dbc-pom.xml
deleted file mode 100644
index 23092d1..0000000
--- a/src/build/maven/scala-dbc-pom.xml
+++ /dev/null
@@ -1,62 +0,0 @@
-<project
- xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
- xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
- <modelVersion>4.0.0</modelVersion>
- <groupId>org.scala-lang</groupId>
- <artifactId>scala-dbc</artifactId>
- <packaging>jar</packaging>
- <version>@VERSION@</version>
- <name>Scala Database Connectivity</name>
- <description>Connectivity for your DBs</description>
- <url>http://www.scala-lang.org/</url>
- <inceptionYear>2002</inceptionYear>
- <organization>
- <name>LAMP/EPFL</name>
- <url>http://lamp.epfl.ch/</url>
- </organization>
- <licenses>
- <license>
- <name>BSD-like</name>
- <url>http://www.scala-lang.org/downloads/license.html
- </url>
- <distribution>repo</distribution>
- </license>
- </licenses>
- <scm>
- <connection>scm:svn:http://lampsvn.epfl.ch/svn-repos/scala/scala/trunk</connection>
- <url>https://lampsvn.epfl.ch/trac/scala/browser/scala/trunk</url>
- </scm>
- <issueManagement>
- <system>trac</system>
- <url>http://lampsvn.epfl.ch/trac/scala
- </url>
- </issueManagement>
- <dependencies>
- <dependency>
- <groupId>org.scala-lang</groupId>
- <artifactId>scala-library</artifactId>
- <version>@VERSION@</version>
- </dependency>
- </dependencies>
- <distributionManagement>
- <repository>
- <id>scala-tools.org</id>
- <url>@RELEASE_REPOSITORY@</url>
- </repository>
- <snapshotRepository>
- <id>scala-tools.org</id>
- <url>@SNAPSHOT_REPOSITORY@</url>
- <uniqueVersion>false</uniqueVersion>
- </snapshotRepository>
- </distributionManagement>
- <developers>
- <developer>
- <id>lamp</id>
- <name>EPFL LAMP</name>
- </developer>
- <developer>
- <id>Typesafe</id>
- <name>Typesafe, Inc.</name>
- </developer>
- </developers>
-</project>
diff --git a/src/build/maven/scala-dotnet-library-pom.xml b/src/build/maven/scala-dotnet-library-pom.xml
index 3394609..007e8be 100644
--- a/src/build/maven/scala-dotnet-library-pom.xml
+++ b/src/build/maven/scala-dotnet-library-pom.xml
@@ -24,13 +24,12 @@
</license>
</licenses>
<scm>
- <connection>scm:svn:http://lampsvn.epfl.ch/svn-repos/scala/scala/trunk</connection>
- <url>https://lampsvn.epfl.ch/trac/scala/browser/scala/trunk</url>
+ <connection>scm:git:git://github.com/scala/scala.git</connection>
+ <url>https://github.com/scala/scala.git</url>
</scm>
<issueManagement>
- <system>trac</system>
- <url>http://lampsvn.epfl.ch/trac/scala
- </url>
+ <system>JIRA</system>
+ <url>https://issues.scala-lang.org/</url>
</issueManagement>
<distributionManagement>
<repository>
diff --git a/src/build/maven/scala-library-pom.xml b/src/build/maven/scala-library-pom.xml
index 8e0abd4..fc9964a 100644
--- a/src/build/maven/scala-library-pom.xml
+++ b/src/build/maven/scala-library-pom.xml
@@ -23,25 +23,34 @@
</license>
</licenses>
<scm>
- <connection>scm:svn:http://lampsvn.epfl.ch/svn-repos/scala/scala/trunk</connection>
- <url>https://lampsvn.epfl.ch/trac/scala/browser/scala/trunk</url>
+ <connection>scm:git:git://github.com/scala/scala.git</connection>
+ <url>https://github.com/scala/scala.git</url>
</scm>
<issueManagement>
- <system>trac</system>
- <url>http://lampsvn.epfl.ch/trac/scala
- </url>
+ <system>JIRA</system>
+ <url>https://issues.scala-lang.org/</url>
</issueManagement>
- <distributionManagement>
- <repository>
- <id>scala-tools.org</id>
- <url>@RELEASE_REPOSITORY@</url>
- </repository>
- <snapshotRepository>
- <id>scala-tools.org</id>
- <url>@SNAPSHOT_REPOSITORY@</url>
- <uniqueVersion>false</uniqueVersion>
- </snapshotRepository>
- </distributionManagement>
+ <properties>
+ <info.apiURL>http://www.scala-lang.org/api/@VERSION@/</info.apiURL>
+ </properties>
+ <dependencies>
+ <!--<dependency>
+ <groupId>com.typesafe</groupId>
+ <artifactId>config</artifactId>
+ <version>0.4.0</version>
+ </dependency>-->
+ </dependencies>
+ <distributionManagement>
+ <repository>
+ <id>scala-tools.org</id>
+ <url>@RELEASE_REPOSITORY@</url>
+ </repository>
+ <snapshotRepository>
+ <id>scala-tools.org</id>
+ <url>@SNAPSHOT_REPOSITORY@</url>
+ <uniqueVersion>false</uniqueVersion>
+ </snapshotRepository>
+ </distributionManagement>
<developers>
<developer>
<id>lamp</id>
diff --git a/src/build/maven/scala-partest-pom.xml b/src/build/maven/scala-partest-pom.xml
index f18ca46..ac05f24 100644
--- a/src/build/maven/scala-partest-pom.xml
+++ b/src/build/maven/scala-partest-pom.xml
@@ -23,13 +23,12 @@
</license>
</licenses>
<scm>
- <connection>scm:svn:http://lampsvn.epfl.ch/svn-repos/scala/scala/trunk</connection>
- <url>https://lampsvn.epfl.ch/trac/scala/browser/scala/trunk</url>
+ <connection>scm:git:git://github.com/scala/scala.git</connection>
+ <url>https://github.com/scala/scala.git</url>
</scm>
<issueManagement>
- <system>trac</system>
- <url>http://lampsvn.epfl.ch/trac/scala
- </url>
+ <system>JIRA</system>
+ <url>https://issues.scala-lang.org/</url>
</issueManagement>
<dependencies>
diff --git a/src/build/maven/scala-reflect-pom.xml b/src/build/maven/scala-reflect-pom.xml
new file mode 100644
index 0000000..56d2ffc
--- /dev/null
+++ b/src/build/maven/scala-reflect-pom.xml
@@ -0,0 +1,64 @@
+<project
+ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+ <modelVersion>4.0.0</modelVersion>
+ <groupId>org.scala-lang</groupId>
+ <artifactId>scala-reflect</artifactId>
+ <packaging>jar</packaging>
+ <version>@VERSION@</version>
+ <name>Scala Compiler</name>
+ <description>Compiler for the Scala Programming Language</description>
+ <url>http://www.scala-lang.org/</url>
+ <inceptionYear>2002</inceptionYear>
+ <organization>
+ <name>LAMP/EPFL</name>
+ <url>http://lamp.epfl.ch/</url>
+ </organization>
+ <licenses>
+ <license>
+ <name>BSD-like</name>
+ <url>http://www.scala-lang.org/downloads/license.html
+ </url>
+ <distribution>repo</distribution>
+ </license>
+ </licenses>
+ <scm>
+ <connection>scm:git:git://github.com/scala/scala.git</connection>
+ <url>https://github.com/scala/scala.git</url>
+ </scm>
+ <issueManagement>
+ <system>JIRA</system>
+ <url>https://issues.scala-lang.org/</url>
+ </issueManagement>
+ <properties>
+ <info.apiURL>http://www.scala-lang.org/api/@VERSION@/</info.apiURL>
+ </properties>
+ <dependencies>
+ <dependency>
+ <groupId>org.scala-lang</groupId>
+ <artifactId>scala-library</artifactId>
+ <version>@VERSION@</version>
+ </dependency>
+ </dependencies>
+ <distributionManagement>
+ <repository>
+ <id>scala-tools.org</id>
+ <url>@RELEASE_REPOSITORY@</url>
+ </repository>
+ <snapshotRepository>
+ <id>scala-tools.org</id>
+ <url>@SNAPSHOT_REPOSITORY@</url>
+ <uniqueVersion>false</uniqueVersion>
+ </snapshotRepository>
+ </distributionManagement>
+ <developers>
+ <developer>
+ <id>lamp</id>
+ <name>EPFL LAMP</name>
+ </developer>
+ <developer>
+ <id>Typesafe</id>
+ <name>Typesafe, Inc.</name>
+ </developer>
+ </developers>
+</project>
diff --git a/src/build/maven/scala-swing-pom.xml b/src/build/maven/scala-swing-pom.xml
index a03bc07..5099fe1 100644
--- a/src/build/maven/scala-swing-pom.xml
+++ b/src/build/maven/scala-swing-pom.xml
@@ -23,14 +23,16 @@
</license>
</licenses>
<scm>
- <connection>scm:svn:http://lampsvn.epfl.ch/svn-repos/scala/scala/trunk</connection>
- <url>https://lampsvn.epfl.ch/trac/scala/browser/scala/trunk</url>
+ <connection>scm:git:git://github.com/scala/scala.git</connection>
+ <url>https://github.com/scala/scala.git</url>
</scm>
<issueManagement>
- <system>trac</system>
- <url>http://lampsvn.epfl.ch/trac/scala
- </url>
+ <system>JIRA</system>
+ <url>https://issues.scala-lang.org/</url>
</issueManagement>
+ <properties>
+ <info.apiURL>http://www.scala-lang.org/api/@VERSION@/</info.apiURL>
+ </properties>
<dependencies>
<dependency>
<groupId>org.scala-lang</groupId>
diff --git a/src/build/maven/scalap-pom.xml b/src/build/maven/scalap-pom.xml
index d7f867d..50c08e8 100644
--- a/src/build/maven/scalap-pom.xml
+++ b/src/build/maven/scalap-pom.xml
@@ -23,13 +23,12 @@
</license>
</licenses>
<scm>
- <connection>scm:svn:http://lampsvn.epfl.ch/svn-repos/scala/scala/trunk</connection>
- <url>https://lampsvn.epfl.ch/trac/scala/browser/scala/trunk</url>
+ <connection>scm:git:git://github.com/scala/scala.git</connection>
+ <url>https://github.com/scala/scala.git</url>
</scm>
<issueManagement>
- <system>trac</system>
- <url>http://lampsvn.epfl.ch/trac/scala
- </url>
+ <system>JIRA</system>
+ <url>https://issues.scala-lang.org/</url>
</issueManagement>
<dependencies>
diff --git a/src/build/pack.xml b/src/build/pack.xml
index d8ee311..8aedd3f 100644
--- a/src/build/pack.xml
+++ b/src/build/pack.xml
@@ -3,14 +3,16 @@
<project name="sabbus-pack">
<description>
- SuperSabbus extension for packaging a distribution to Sbaz or other distribution methods. THIS FILE IS NOT STAND-ALONE AND SHOULD ONLY BE USED THROUGH ENTRY POINTS IN SUPERSABBUS.
+ SuperSabbus extension for packaging a distribution. THIS FILE IS NOT STAND-ALONE AND SHOULD ONLY BE USED THROUGH ENTRY POINTS IN SUPERSABBUS.
</description>
<!-- ===========================================================================
PROPERTIES
============================================================================ -->
- <property name="sbaz.universe" value="http://www.scala-lang.org/downloads/packages"/>
+ <property file="${basedir}/build.number.maven"/>
+ <!-- the maven stuff requires version.major, version.minor and version.patch properties.
+ the "get-scala-revision" script only returns "version.number" -->
<!-- ===========================================================================
MAIN DISTRIBUTION PACKAGING
@@ -21,51 +23,63 @@ MAIN DISTRIBUTION PACKAGING
</target>
<target name="pack-archives.tar" depends="pack-archives.start">
- <tar destfile="${dists.dir}/archives/scala-${version.number}"
+ <tar destfile="${dists.dir}/archives/${dist.name}.tar"
compression="none" longfile="gnu">
- <tarfileset dir="${dist.dir}" prefix="scala-${version.number}" includes="bin/**" mode="755"/>
- <tarfileset dir="${dist.dir}" prefix="scala-${version.number}" excludes="bin/**"/>
+ <tarfileset dir="${dist.dir}" prefix="${dist.name}" includes="bin/**" mode="755"/>
+ <tarfileset dir="${dist.dir}" prefix="${dist.name}" excludes="bin/**"/>
</tar>
- <gzip src="${dists.dir}/archives/scala-${version.number}" destfile="${dists.dir}/archives/scala-${version.number}.tgz"/>
- <exec executable="xz" failifexecutionfails="false">
- <arg line="-k -9e -S .txz ${dists.dir}/archives/scala-${version.number}"/>
- </exec>
- <delete file="${dists.dir}/archives/scala-${version.number}" />
+ <gzip src="${dists.dir}/archives/${dist.name}.tar" destfile="${dists.dir}/archives/${dist.name}.tgz"/>
+ <if>
+ <not><equals arg1="${archives.skipxz}" arg2="true" /></not>
+ <then>
+ <exec executable="xz" failifexecutionfails="false">
+ <arg line="-k -9e -S .xz ${dists.dir}/archives/${dist.name}.tar"/>
+ </exec>
+ <move file="${dists.dir}/archives/${dist.name}.tar.xz" tofile="${dists.dir}/archives/${dist.name}.txz" failonerror="false"/>
+ </then>
+ </if>
+ <delete file="${dists.dir}/archives/${dist.name}.tar" />
<checksum fileext=".md5">
<fileset dir="${dists.dir}/archives">
- <include name="scala-${version.number}.t?z"/>
+ <include name="${dist.name}.t?z"/>
</fileset>
</checksum>
</target>
<target name="pack-archives.zip" depends="pack-archives.tar">
- <zip destfile="${dists.dir}/archives/scala-${version.number}.zip">
- <zipfileset prefix="scala-${version.number}" dir="${dist.dir}"/>
+ <zip destfile="${dists.dir}/archives/${dist.name}.zip">
+ <zipfileset prefix="${dist.name}" dir="${dist.dir}"/>
</zip>
- <checksum file="${dists.dir}/archives/scala-${version.number}.zip" fileext=".md5"/>
+ <checksum file="${dists.dir}/archives/${dist.name}.zip" fileext=".md5"/>
</target>
<target name="pack-devel-docs.tar" depends="pack-archives.zip">
- <tar destfile="${dists.dir}/archives/scala-${version.number}-devel-docs"
+ <tar destfile="${dists.dir}/archives/${dist.name}-devel-docs.tar"
compression="none" longfile="gnu">
- <tarfileset dir="${dist.dir}/doc/scala-devel-docs" prefix="scala-${version.number}-devel-docs"/>
+ <tarfileset dir="${dist.dir}/api" prefix="${dist.name}-devel-docs"/>
</tar>
- <gzip src="${dists.dir}/archives/scala-${version.number}-devel-docs" destfile="${dists.dir}/archives/scala-${version.number}-devel-docs.tgz"/>
- <exec executable="xz" failifexecutionfails="false">
- <arg line="-k -9e -S .txz ${dists.dir}/archives/scala-${version.number}-devel-docs"/>
- </exec>
- <delete file="${dists.dir}/archives/scala-${version.number}-devel-docs" />
+ <gzip src="${dists.dir}/archives/${dist.name}-devel-docs.tar" destfile="${dists.dir}/archives/${dist.name}-devel-docs.tgz"/>
+ <if>
+ <not><equals arg1="${archives.skipxz}" arg2="true" /></not>
+ <then>
+ <exec executable="xz" failifexecutionfails="false">
+ <arg line="-k -9e -S .xz ${dists.dir}/archives/${dist.name}-devel-docs.tar"/>
+ </exec>
+ <move file="${dists.dir}/archives/${dist.name}-devel-docs.tar.xz" tofile="${dists.dir}/archives/${dist.name}-devel-docs.txz" failonerror="false"/>
+ </then>
+ </if>
+ <delete file="${dists.dir}/archives/${dist.name}-devel-docs.tar" />
<checksum fileext=".md5">
<fileset dir="${dists.dir}/archives">
- <include name="scala-${version.number}-devel-docs.t?z"/>
+ <include name="${dist.name}-devel-docs.t?z"/>
</fileset>
</checksum>
</target>
<target name="pack-archives.src" depends="pack-devel-docs.tar">
- <tar destfile="${dists.dir}/archives/scala-${version.number}-sources"
+ <tar destfile="${dists.dir}/archives/${dist.name}-sources.tar"
compression="none" longfile="gnu">
- <tarfileset dir="${basedir}" prefix="scala-${version.number}-sources">
+ <tarfileset dir="${basedir}" prefix="${dist.name}-sources">
<exclude name="bin/**"/>
<exclude name="build/**"/>
<exclude name="debian/**"/>
@@ -75,25 +89,33 @@ MAIN DISTRIBUTION PACKAGING
<exclude name="test/partest"/>
<exclude name=".git"/>
</tarfileset>
- <tarfileset dir="${basedir}" prefix="scala-${version.number}-sources" filemode="755">
+ <tarfileset dir="${basedir}" prefix="${dist.name}-sources" filemode="755">
<include name="test/partest"/>
</tarfileset>
</tar>
- <gzip src="${dists.dir}/archives/scala-${version.number}-sources" destfile="${dists.dir}/archives/scala-${version.number}-sources.tgz"/>
- <exec executable="xz" failifexecutionfails="false">
- <arg line="-k -9e -S .txz ${dists.dir}/archives/scala-${version.number}-sources"/>
- </exec>
- <delete file="${dists.dir}/archives/scala-${version.number}-sources" />
+ <gzip src="${dists.dir}/archives/${dist.name}-sources.tar" destfile="${dists.dir}/archives/${dist.name}-sources.tgz"/>
+ <if>
+ <not><equals arg1="${archives.skipxz}" arg2="true" /></not>
+ <then>
+ <exec executable="xz" failifexecutionfails="false">
+ <arg line="-k -9e -S .xz ${dists.dir}/archives/${dist.name}-sources.tar"/>
+ </exec>
+ <move file="${dists.dir}/archives/${dist.name}-sources.tar.xz" tofile="${dists.dir}/archives/${dist.name}-sources.txz" failonerror="false"/>
+ </then>
+ </if>
+ <delete file="${dists.dir}/archives/${dist.name}-sources.tar" />
<checksum fileext=".md5">
<fileset dir="${dists.dir}/archives">
- <include name="scala-${version.number}-sources.t?z"/>
+ <include name="${dist.name}-sources.t?z"/>
</fileset>
</checksum>
</target>
<target name="pack-archives.latest.unix" depends="pack-archives.src" unless="os.win">
+ <!-- be sure to use a relative symlink to make the distribution portable,
+ `resource` is relative to directory of `link` -->
<symlink link="${dists.dir}/archives/scala-latest-sources.tgz"
- resource="${dists.dir}/archives/scala-${version.number}-sources.tgz"
+ resource="scala-${version.number}-sources.tgz"
overwrite="yes"/>
</target>
@@ -106,109 +128,6 @@ MAIN DISTRIBUTION PACKAGING
</target>
<target name="pack-archives.done" depends="pack-archives.src, pack-archives.latest.win, pack-archives.latest.unix"/>
-
-<!-- ===========================================================================
-MAIN DISTRIBUTION SBAZ
-============================================================================ -->
-
- <target name="pack-sbaz.start">
- <mkdir dir="${dists.dir}/sbaz"/>
- </target>
-
- <target name="pack-sbaz.lib" depends="pack-sbaz.start">
- <sbaz
- file="${dists.dir}/sbaz/scala-library-${version.number}.sbp"
- adfile="${dists.dir}/sbaz/scala-library-${version.number}.advert"
- name="scala-library"
- version="${version.number}"
- desc="The Scala library. This is the minimal requirement to run any Scala program."
- link="${sbaz.universe}/scala-library-${version.number}.sbp">
- <libset dir="${dist.dir}/lib" includes="scala-library.jar,scala-dbc.jar,scala-swing.jar"/>
- <srcset dir="${dist.dir}/src" includes="scala-library-src.jar,scala-dbc-src.jar,scala-swing-src.jar"/>
- <looseset destination="doc">
- <fileset dir="${dist.dir}/doc" includes="LICENSE,README"/>
- </looseset>
- </sbaz>
- </target>
-
- <target name="pack-sbaz.comp" depends="pack-sbaz.lib">
- <sbaz
- file="${dists.dir}/sbaz/scala-devel-${version.number}.sbp"
- adfile="${dists.dir}/sbaz/scala-devel-${version.number}.advert"
- name="scala-devel"
- version="${version.number}"
- desc="The Scala developer tools. This contains everything that is required to write, test and document new Scala programs."
- depends="scala-library"
- link="${sbaz.universe}/scala-devel-${version.number}.sbp">
- <binset
- dir="${dist.dir}/bin"
- includes="scala,scala.bat,scalac,scalac.bat,scaladoc,scaladoc.bat,fsc,fsc.bat"/>
- <libset dir="${dist.dir}/lib" includes="scala-compiler.jar,jline.jar"/>
- <miscset dir="${dist.dir}/misc/scala-devel"
- includes="plugins/continuations.jar"/>
- <manset dir="${dist.dir}/man" includes="**"/>
- <srcset dir="${dist.dir}/src" includes="scala-compiler-src.jar"/>
- </sbaz>
- </target>
-
- <target name="pack-sbaz.test" depends="pack-sbaz.comp">
- <sbaz
- file="${dists.dir}/sbaz/scala-test-${version.number}.sbp"
- adfile="${dists.dir}/sbaz/scala-test-${version.number}.advert"
- name="scala-test"
- version="${version.number}"
- desc="The Scala test package contains everything needed to test Scala."
- link="${sbaz.universe}/scala-test-${version.number}.sbp">
- <binset dir="${basedir}/test"
- includes="clitest,diff/diff.*,diff/lib*.dll,partest,partest.bat"/>
- <miscset dir="${basedir}/test"
- includes="files/**/*.args,files/**/*.check,files/**/*.dll,files/**/*.jar,files/**/*.java,files/**/*.scala,files/**/*.flags,files/cli/**/*.check.*,files/jvm/*.so,files/shootout/*.javaopts,files/shootout/*.runner,files/shootout/*.txt,files/specialized/*.txt,files/**/*.test"
- excludes="files/presentation"/>
- <!-- <srcset dir="${dist.dir}/src" includes="scala-partest-src.jar"/> -->
- <libset dir="${dist.dir}/lib" includes="scala-partest.jar"/>
- <libset dir="${lib.dir}" includes="scalacheck.jar"/>
- </sbaz>
- </target>
-
- <target name="pack-sbaz.scalap" depends="pack-sbaz.test">
- <sbaz
- file="${dists.dir}/sbaz/scalap-${version.number}.sbp"
- adfile="${dists.dir}/sbaz/scalap-${version.number}.advert"
- name="scalap"
- version="${version.number}"
- desc="The scalap package contains the Scala classfile decoder."
- link="${sbaz.universe}/scalap-${version.number}.sbp">
- <binset dir="${dist.dir}/bin"
- includes="scalap,scalap.bat"/>
- <libset dir="${dist.dir}/lib" includes="scalap.jar"/>
- </sbaz>
- </target>
-
- <target name="pack-sbaz.doc" depends="pack-sbaz.scalap">
- <sbaz
- file="${dists.dir}/sbaz/scala-devel-docs-${version.number}.sbp"
- adfile="${dists.dir}/sbaz/scala-devel-docs-${version.number}.advert"
- name="scala-devel-docs"
- version="${version.number}"
- desc="The Scala developer documentation. This contains all developer documentation."
- link="${sbaz.universe}/scala-devel-docs-${version.number}.sbp">
- <docset dir="${dist.dir}/doc/scala-devel-docs"/>
- </sbaz>
- </target>
-
- <target name="pack-sbaz.all" depends="pack-sbaz.doc">
- <sbaz
- file="${dists.dir}/sbaz/scala-${version.number}.sbp"
- adfile="${dists.dir}/sbaz/scala-${version.number}.advert"
- name="scala"
- version="${version.number}"
- desc="The base Scala package that contains everything needed to start using Scala."
- depends="scala-library,scala-devel"
- link="${sbaz.universe}/scala-${version.number}.sbp"/>
- </target>
-
-
- <target name="pack-sbaz.done" depends="pack-sbaz.all"/>
<target name="pack-maven.start">
<mkdir dir="${dists.dir}/maven/${version.number}"/>
@@ -234,9 +153,10 @@ MAIN DISTRIBUTION SBAZ
</macrodef>
<mvn-copy-lib mvn.artifact.name="jline"/>
<mvn-copy-lib mvn.artifact.name="scala-library"/>
+ <mvn-copy-lib mvn.artifact.name="scala-reflect"/>
<mvn-copy-lib mvn.artifact.name="scala-compiler"/>
- <mvn-copy-lib mvn.artifact.name="scala-dbc"/>
<mvn-copy-lib mvn.artifact.name="scala-swing"/>
+ <mvn-copy-lib mvn.artifact.name="scala-actors"/>
<mvn-copy-lib mvn.artifact.name="scala-partest"/>
<mvn-copy-lib mvn.artifact.name="scalap"/>
</target>
@@ -261,54 +181,57 @@ MAIN DISTRIBUTION SBAZ
<target name="pack-maven.srcs" depends="pack-maven.libs">
<!-- Add missing src jars. -->
- <jar destfile="${dists.dir}/maven/${version.number}/jline/jline-src.jar"
+ <jar whenmanifestonly="fail" destfile="${dists.dir}/maven/${version.number}/jline/jline-src.jar"
basedir="${src.dir}/jline/src/main/java">
<include name="**/*"/>
</jar>
<!-- Continuations plugin -->
- <jar destfile="${dists.dir}/maven/${version.number}/plugins/continuations/continuations-src.jar"
+ <jar whenmanifestonly="fail" destfile="${dists.dir}/maven/${version.number}/plugins/continuations/continuations-src.jar"
basedir="${src.dir}/continuations/plugin">
<include name="**/*"/>
</jar>
</target>
<target name="pack-maven.docs" depends="pack-maven.libs, pack-maven.plugins">
- <jar destfile="${dists.dir}/maven/${version.number}/jline/jline-docs.jar"
+ <jar whenmanifestonly="fail" destfile="${dists.dir}/maven/${version.number}/jline/jline-docs.jar"
basedir="${build-docs.dir}/jline">
<include name="**/*"/>
</jar>
- <jar destfile="${dists.dir}/maven/${version.number}/scala-library/scala-library-docs.jar"
+ <jar whenmanifestonly="fail" destfile="${dists.dir}/maven/${version.number}/scala-library/scala-library-docs.jar"
basedir="${build-docs.dir}/library">
<include name="**/*"/>
</jar>
- <jar destfile="${dists.dir}/maven/${version.number}/scala-compiler/scala-compiler-docs.jar"
+ <jar whenmanifestonly="fail" destfile="${dists.dir}/maven/${version.number}/scala-compiler/scala-compiler-docs.jar"
basedir="${build-docs.dir}/compiler">
<include name="**/*"/>
</jar>
- <jar destfile="${dists.dir}/maven/${version.number}/scalap/scalap-docs.jar"
+ <jar whenmanifestonly="fail" destfile="${dists.dir}/maven/${version.number}/scalap/scalap-docs.jar"
basedir="${build-docs.dir}/scalap">
<include name="**/*"/>
</jar>
- <jar destfile="${dists.dir}/maven/${version.number}/scala-partest/scala-partest-docs.jar"
- basedir="${build-docs.dir}/scala-partest">
+ <jar whenmanifestonly="fail" destfile="${dists.dir}/maven/${version.number}/scala-partest/scala-partest-docs.jar"
+ basedir="${build-docs.dir}/partest">
<include name="**/*"/>
</jar>
- <jar destfile="${dists.dir}/maven/${version.number}/plugins/continuations/continuations-docs.jar"
+ <jar whenmanifestonly="fail" destfile="${dists.dir}/maven/${version.number}/plugins/continuations/continuations-docs.jar"
basedir="${build-docs.dir}/continuations-plugin">
<include name="**/*"/>
</jar>
- <!-- TODO - Scala swing, dbc should maybe have thier own jar, but creating it is SLOW. -->
+
+ <!-- TODO - Scala swing and actors should maybe have thier own jar, but creating it is SLOW. -->
<copy tofile="${dists.dir}/maven/${version.number}/scala-swing/scala-swing-docs.jar"
file="${dists.dir}/maven/${version.number}/scala-library/scala-library-docs.jar"/>
- <copy tofile="${dists.dir}/maven/${version.number}/scala-dbc/scala-dbc-docs.jar"
+ <copy tofile="${dists.dir}/maven/${version.number}/scala-actors/scala-actors-docs.jar"
+ file="${dists.dir}/maven/${version.number}/scala-library/scala-library-docs.jar"/>
+ <copy tofile="${dists.dir}/maven/${version.number}/scala-reflect/scala-reflect-docs.jar"
file="${dists.dir}/maven/${version.number}/scala-library/scala-library-docs.jar"/>
</target>
<target name="pack-maven.latest.unix" depends="pack-maven.docs" unless="os.win">
<symlink link="${dists.dir}/maven/latest"
- resource="${dists.dir}/maven/${version.number}"
+ resource="${version.number}"
overwrite="yes"/>
</target>
@@ -333,7 +256,7 @@ MAIN DISTRIBUTION SBAZ
MISCELLANEOUS
============================================================================ -->
- <target name="pack-all.done" depends="pack-archives.done, pack-sbaz.done, pack-maven.done"/>
+ <target name="pack-all.done" depends="pack-archives.done, pack-maven.done"/>
<!-- ===========================================================================
MISCELLANEOUS
diff --git a/src/compiler/rootdoc.txt b/src/compiler/rootdoc.txt
new file mode 100644
index 0000000..173f604
--- /dev/null
+++ b/src/compiler/rootdoc.txt
@@ -0,0 +1,6 @@
+The Scala compiler API.
+
+The following resources are useful for Scala plugin/compiler development:
+ - [[http://www.scala-lang.org/node/215 Scala development tutorials]] on [[http://www.scala-lang.org www.scala-lang.org]]
+ - [[https://wiki.scala-lang.org/display/SIW/ Scala Internals wiki]]
+ - [[http://lampwww.epfl.ch/~magarcia/ScalaCompilerCornerReloaded/ Scala compiler corner]], maintained by Miguel
diff --git a/src/compiler/scala/reflect/macros/runtime/AbortMacroException.scala b/src/compiler/scala/reflect/macros/runtime/AbortMacroException.scala
new file mode 100644
index 0000000..4e4d88c
--- /dev/null
+++ b/src/compiler/scala/reflect/macros/runtime/AbortMacroException.scala
@@ -0,0 +1,7 @@
+package scala.reflect.macros
+package runtime
+
+import scala.reflect.internal.util.Position
+import scala.util.control.ControlThrowable
+
+class AbortMacroException(val pos: Position, val msg: String) extends Throwable(msg) with ControlThrowable
\ No newline at end of file
diff --git a/src/compiler/scala/reflect/macros/runtime/Aliases.scala b/src/compiler/scala/reflect/macros/runtime/Aliases.scala
new file mode 100644
index 0000000..96cf50e
--- /dev/null
+++ b/src/compiler/scala/reflect/macros/runtime/Aliases.scala
@@ -0,0 +1,36 @@
+package scala.reflect.macros
+package runtime
+
+trait Aliases {
+ self: Context =>
+
+ override type Symbol = universe.Symbol
+ override type Type = universe.Type
+ override type Name = universe.Name
+ override type TermName = universe.TermName
+ override type TypeName = universe.TypeName
+ override type Tree = universe.Tree
+ override type Position = universe.Position
+ override type Scope = universe.Scope
+ override type Modifiers = universe.Modifiers
+
+ override type Expr[+T] = universe.Expr[T]
+ override val Expr = universe.Expr
+ def Expr[T: WeakTypeTag](tree: Tree): Expr[T] = universe.Expr[T](mirror, universe.FixedMirrorTreeCreator(mirror, tree))
+
+ override type WeakTypeTag[T] = universe.WeakTypeTag[T]
+ override type TypeTag[T] = universe.TypeTag[T]
+ override val WeakTypeTag = universe.WeakTypeTag
+ override val TypeTag = universe.TypeTag
+ def WeakTypeTag[T](tpe: Type): WeakTypeTag[T] = universe.WeakTypeTag[T](mirror, universe.FixedMirrorTypeCreator(mirror, tpe))
+ def TypeTag[T](tpe: Type): TypeTag[T] = universe.TypeTag[T](mirror, universe.FixedMirrorTypeCreator(mirror, tpe))
+ override def weakTypeTag[T](implicit attag: WeakTypeTag[T]) = attag
+ override def typeTag[T](implicit ttag: TypeTag[T]) = ttag
+ override def weakTypeOf[T](implicit attag: WeakTypeTag[T]): Type = attag.tpe
+ override def typeOf[T](implicit ttag: TypeTag[T]): Type = ttag.tpe
+
+ type ImplicitCandidate = (Type, Tree)
+ implicit class RichOpenImplicit(oi: universe.analyzer.OpenImplicit) {
+ def toImplicitCandidate = (oi.pt, oi.tree)
+ }
+}
\ No newline at end of file
diff --git a/src/compiler/scala/reflect/macros/runtime/Context.scala b/src/compiler/scala/reflect/macros/runtime/Context.scala
new file mode 100644
index 0000000..8e8b0fc
--- /dev/null
+++ b/src/compiler/scala/reflect/macros/runtime/Context.scala
@@ -0,0 +1,28 @@
+package scala.reflect.macros
+package runtime
+
+import scala.tools.nsc.Global
+
+abstract class Context extends scala.reflect.macros.Context
+ with Aliases
+ with Enclosures
+ with Names
+ with Reifiers
+ with FrontEnds
+ with Infrastructure
+ with Typers
+ with Parsers
+ with Evals
+ with ExprUtils
+ with Traces {
+
+ val universe: Global
+
+ val mirror: universe.Mirror = universe.rootMirror
+
+ val callsiteTyper: universe.analyzer.Typer
+
+ val prefix: Expr[PrefixType]
+
+ val expandee: Tree
+}
diff --git a/src/compiler/scala/reflect/macros/runtime/Enclosures.scala b/src/compiler/scala/reflect/macros/runtime/Enclosures.scala
new file mode 100644
index 0000000..2a4a22f
--- /dev/null
+++ b/src/compiler/scala/reflect/macros/runtime/Enclosures.scala
@@ -0,0 +1,24 @@
+package scala.reflect.macros
+package runtime
+
+trait Enclosures {
+ self: Context =>
+
+ import universe._
+ import mirror._
+
+ private def site = callsiteTyper.context
+ private def enclTrees = site.enclosingContextChain map (_.tree)
+ private def enclPoses = enclosingMacros map (_.macroApplication.pos) filterNot (_ eq NoPosition)
+
+ // vals are eager to simplify debugging
+ // after all we wouldn't save that much time by making them lazy
+ val macroApplication: Tree = expandee
+ val enclosingClass: Tree = enclTrees collectFirst { case x: ImplDef => x } getOrElse EmptyTree
+ val enclosingImplicits: List[ImplicitCandidate] = site.openImplicits.map(_.toImplicitCandidate)
+ val enclosingMacros: List[Context] = this :: universe.analyzer.openMacros // include self
+ val enclosingMethod: Tree = site.enclMethod.tree
+ val enclosingPosition: Position = if (enclPoses.isEmpty) NoPosition else enclPoses.head.pos
+ val enclosingUnit: CompilationUnit = universe.currentRun.currentUnit
+ val enclosingRun: Run = universe.currentRun
+}
diff --git a/src/compiler/scala/reflect/macros/runtime/Evals.scala b/src/compiler/scala/reflect/macros/runtime/Evals.scala
new file mode 100644
index 0000000..1f7b5f2
--- /dev/null
+++ b/src/compiler/scala/reflect/macros/runtime/Evals.scala
@@ -0,0 +1,18 @@
+package scala.reflect.macros
+package runtime
+
+import scala.reflect.runtime.{universe => ru}
+import scala.tools.reflect.ToolBox
+
+trait Evals {
+ self: Context =>
+
+ private lazy val evalMirror = ru.runtimeMirror(universe.analyzer.macroClassloader)
+ private lazy val evalToolBox = evalMirror.mkToolBox()
+ private lazy val evalImporter = ru.mkImporter(universe).asInstanceOf[ru.Importer { val from: universe.type }]
+
+ def eval[T](expr: Expr[T]): T = {
+ val imported = evalImporter.importTree(expr.tree)
+ evalToolBox.eval(imported).asInstanceOf[T]
+ }
+}
\ No newline at end of file
diff --git a/src/compiler/scala/reflect/macros/runtime/ExprUtils.scala b/src/compiler/scala/reflect/macros/runtime/ExprUtils.scala
new file mode 100644
index 0000000..672699f
--- /dev/null
+++ b/src/compiler/scala/reflect/macros/runtime/ExprUtils.scala
@@ -0,0 +1,35 @@
+package scala.reflect.macros
+package runtime
+
+trait ExprUtils {
+ self: Context =>
+
+ import universe._
+ import mirror._
+
+ def literalNull = Expr[Null](Literal(Constant(null)))(TypeTag.Null)
+
+ def literalUnit = Expr[Unit](Literal(Constant(())))(TypeTag.Unit)
+
+ def literalTrue = Expr[Boolean](Literal(Constant(true)))(TypeTag.Boolean)
+
+ def literalFalse = Expr[Boolean](Literal(Constant(false)))(TypeTag.Boolean)
+
+ def literal(x: Boolean) = Expr[Boolean](Literal(Constant(x)))(TypeTag.Boolean)
+
+ def literal(x: Byte) = Expr[Byte](Literal(Constant(x)))(TypeTag.Byte)
+
+ def literal(x: Short) = Expr[Short](Literal(Constant(x)))(TypeTag.Short)
+
+ def literal(x: Int) = Expr[Int](Literal(Constant(x)))(TypeTag.Int)
+
+ def literal(x: Long) = Expr[Long](Literal(Constant(x)))(TypeTag.Long)
+
+ def literal(x: Float) = Expr[Float](Literal(Constant(x)))(TypeTag.Float)
+
+ def literal(x: Double) = Expr[Double](Literal(Constant(x)))(TypeTag.Double)
+
+ def literal(x: String) = Expr[String](Literal(Constant(x)))(TypeTag[String](definitions.StringClass.toTypeConstructor))
+
+ def literal(x: Char) = Expr[Char](Literal(Constant(x)))(TypeTag.Char)
+}
diff --git a/src/compiler/scala/reflect/macros/runtime/FrontEnds.scala b/src/compiler/scala/reflect/macros/runtime/FrontEnds.scala
new file mode 100644
index 0000000..a6a198e
--- /dev/null
+++ b/src/compiler/scala/reflect/macros/runtime/FrontEnds.scala
@@ -0,0 +1,20 @@
+package scala.reflect.macros
+package runtime
+
+trait FrontEnds {
+ self: Context =>
+
+ def echo(pos: Position, msg: String): Unit = universe.reporter.echo(pos, msg)
+
+ def info(pos: Position, msg: String, force: Boolean): Unit = universe.reporter.info(pos, msg, force)
+
+ def hasWarnings: Boolean = universe.reporter.hasErrors
+
+ def hasErrors: Boolean = universe.reporter.hasErrors
+
+ def warning(pos: Position, msg: String): Unit = callsiteTyper.context.warning(pos, msg)
+
+ def error(pos: Position, msg: String): Unit = callsiteTyper.context.error(pos, msg)
+
+ def abort(pos: Position, msg: String): Nothing = throw new AbortMacroException(pos, msg)
+}
diff --git a/src/compiler/scala/reflect/macros/runtime/Infrastructure.scala b/src/compiler/scala/reflect/macros/runtime/Infrastructure.scala
new file mode 100644
index 0000000..7781693
--- /dev/null
+++ b/src/compiler/scala/reflect/macros/runtime/Infrastructure.scala
@@ -0,0 +1,16 @@
+package scala.reflect.macros
+package runtime
+
+trait Infrastructure {
+ self: Context =>
+
+ def settings: List[String] = {
+ val us = universe.settings
+ import us._
+ userSetSettings collectFirst { case x: MultiStringSetting if x.name == XmacroSettings.name => x.value } getOrElse Nil
+ }
+
+ def compilerSettings: List[String] = universe.settings.recreateArgs
+
+ def classPath: List[java.net.URL] = global.classPath.asURLs
+}
diff --git a/src/compiler/scala/reflect/macros/runtime/Names.scala b/src/compiler/scala/reflect/macros/runtime/Names.scala
new file mode 100644
index 0000000..ee9f3a5
--- /dev/null
+++ b/src/compiler/scala/reflect/macros/runtime/Names.scala
@@ -0,0 +1,17 @@
+package scala.reflect.macros
+package runtime
+
+trait Names {
+ self: Context =>
+
+ lazy val freshNameCreator = callsiteTyper.context.unit.fresh
+
+ def fresh(): String =
+ freshNameCreator.newName()
+
+ def fresh(name: String): String =
+ freshNameCreator.newName(name)
+
+ def fresh[NameType <: Name](name: NameType): NameType =
+ name.mapName(freshNameCreator.newName(_)).asInstanceOf[NameType]
+}
\ No newline at end of file
diff --git a/src/compiler/scala/reflect/macros/runtime/Parsers.scala b/src/compiler/scala/reflect/macros/runtime/Parsers.scala
new file mode 100644
index 0000000..566bcde
--- /dev/null
+++ b/src/compiler/scala/reflect/macros/runtime/Parsers.scala
@@ -0,0 +1,24 @@
+package scala.reflect.macros
+package runtime
+
+import scala.language.existentials
+import scala.tools.reflect.ToolBox
+import scala.tools.reflect.ToolBoxError
+
+trait Parsers {
+ self: Context =>
+
+ def parse(code: String): Tree =
+ // todo. provide decent implementation
+ // see `Typers.typedUseCase` for details
+ try {
+ import scala.reflect.runtime.{universe => ru}
+ val parsed = ru.rootMirror.mkToolBox().parse(code)
+ val importer = universe.mkImporter(ru)
+ importer.importTree(parsed)
+ } catch {
+ case ToolBoxError(msg, cause) =>
+ // todo. provide a position
+ throw new ParseException(universe.NoPosition, msg)
+ }
+}
diff --git a/src/compiler/scala/reflect/macros/runtime/Reifiers.scala b/src/compiler/scala/reflect/macros/runtime/Reifiers.scala
new file mode 100644
index 0000000..8bb388b
--- /dev/null
+++ b/src/compiler/scala/reflect/macros/runtime/Reifiers.scala
@@ -0,0 +1,77 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2013 LAMP/EPFL
+ * @author Gilles Dubochet
+ */
+
+package scala.reflect.macros
+package runtime
+
+trait Reifiers {
+ self: Context =>
+
+ val global: universe.type = universe
+ import universe._
+ import definitions._
+
+ def reifyTree(universe: Tree, mirror: Tree, tree: Tree): Tree = {
+ assert(ExprClass != NoSymbol)
+ val result = scala.reflect.reify.`package`.reifyTree(self.universe)(callsiteTyper, universe, mirror, tree)
+ logFreeVars(enclosingPosition, result)
+ result
+ }
+
+ def reifyType(universe: Tree, mirror: Tree, tpe: Type, concrete: Boolean = false): Tree = {
+ assert(TypeTagsClass != NoSymbol)
+ val result = scala.reflect.reify.`package`.reifyType(self.universe)(callsiteTyper, universe, mirror, tpe, concrete)
+ logFreeVars(enclosingPosition, result)
+ result
+ }
+
+ def reifyRuntimeClass(tpe: Type, concrete: Boolean = true): Tree =
+ scala.reflect.reify.`package`.reifyRuntimeClass(universe)(callsiteTyper, tpe, concrete = concrete)
+
+ def reifyEnclosingRuntimeClass: Tree =
+ scala.reflect.reify.`package`.reifyEnclosingRuntimeClass(universe)(callsiteTyper)
+
+ def unreifyTree(tree: Tree): Tree = {
+ assert(ExprSplice != NoSymbol)
+ Select(tree, ExprSplice)
+ }
+
+ // fixme: if I put utils here, then "global" from utils' early initialization syntax
+ // and "global" that comes from here conflict with each other when incrementally compiling
+ // the problem is that both are pickled with the same owner - trait Reifiers
+ // and this upsets the compiler, so that oftentimes it throws assertion failures
+ // Martin knows the details
+ //
+ // object utils extends {
+ // val global: self.global.type = self.global
+ // val typer: global.analyzer.Typer = self.callsiteTyper
+ // } with scala.reflect.reify.utils.Utils
+ // import utils._
+
+ private def logFreeVars(position: Position, reification: Tree): Unit = {
+ object utils extends {
+ val global: self.global.type = self.global
+ val typer: global.analyzer.Typer = self.callsiteTyper
+ } with scala.reflect.reify.utils.Utils
+ import utils._
+
+ def logFreeVars(symtab: SymbolTable): Unit =
+ // logging free vars only when they are untyped prevents avalanches of duplicate messages
+ symtab.syms map (sym => symtab.symDef(sym)) foreach {
+ case FreeTermDef(_, _, binding, _, origin) if universe.settings.logFreeTerms.value && binding.tpe == null =>
+ reporter.echo(position, "free term: %s %s".format(showRaw(binding), origin))
+ case FreeTypeDef(_, _, binding, _, origin) if universe.settings.logFreeTypes.value && binding.tpe == null =>
+ reporter.echo(position, "free type: %s %s".format(showRaw(binding), origin))
+ case _ =>
+ // do nothing
+ }
+
+ if (universe.settings.logFreeTerms.value || universe.settings.logFreeTypes.value)
+ reification match {
+ case ReifiedTree(_, _, symtab, _, _, _, _) => logFreeVars(symtab)
+ case ReifiedType(_, _, symtab, _, _, _) => logFreeVars(symtab)
+ }
+ }
+}
diff --git a/src/compiler/scala/reflect/macros/runtime/Traces.scala b/src/compiler/scala/reflect/macros/runtime/Traces.scala
new file mode 100644
index 0000000..0238e9f
--- /dev/null
+++ b/src/compiler/scala/reflect/macros/runtime/Traces.scala
@@ -0,0 +1,8 @@
+package scala.reflect.macros
+package runtime
+
+trait Traces extends util.Traces {
+ self: Context =>
+
+ def globalSettings = universe.settings
+}
diff --git a/src/compiler/scala/reflect/macros/runtime/Typers.scala b/src/compiler/scala/reflect/macros/runtime/Typers.scala
new file mode 100644
index 0000000..a51bee0
--- /dev/null
+++ b/src/compiler/scala/reflect/macros/runtime/Typers.scala
@@ -0,0 +1,50 @@
+package scala.reflect.macros
+package runtime
+
+trait Typers {
+ self: Context =>
+
+ def openMacros: List[Context] = this :: universe.analyzer.openMacros
+
+ def openImplicits: List[ImplicitCandidate] = callsiteTyper.context.openImplicits.map(_.toImplicitCandidate)
+
+ /**
+ * @see [[scala.tools.reflect.Toolbox.typeCheck]]
+ */
+ def typeCheck(tree: Tree, pt: Type = universe.WildcardType, silent: Boolean = false, withImplicitViewsDisabled: Boolean = false, withMacrosDisabled: Boolean = false): Tree = {
+ macroLogVerbose("typechecking %s with expected type %s, implicit views = %s, macros = %s".format(tree, pt, !withImplicitViewsDisabled, !withMacrosDisabled))
+ val context = callsiteTyper.context
+ val wrapper1 = if (!withImplicitViewsDisabled) (context.withImplicitsEnabled[Tree] _) else (context.withImplicitsDisabled[Tree] _)
+ val wrapper2 = if (!withMacrosDisabled) (context.withMacrosEnabled[Tree] _) else (context.withMacrosDisabled[Tree] _)
+ def wrapper (tree: => Tree) = wrapper1(wrapper2(tree))
+ // if you get a "silent mode is not available past typer" here
+ // don't rush to change the typecheck not to use the silent method when the silent parameter is false
+ // typechecking uses silent anyways (e.g. in typedSelect), so you'll only waste your time
+ // I'd advise fixing the root cause: finding why the context is not set to report errors
+ // (also see reflect.runtime.ToolBoxes.typeCheckExpr for a workaround that might work for you)
+ wrapper(callsiteTyper.silent(_.typed(tree, universe.analyzer.EXPRmode, pt), reportAmbiguousErrors = false) match {
+ case universe.analyzer.SilentResultValue(result) =>
+ macroLogVerbose(result)
+ result
+ case error @ universe.analyzer.SilentTypeError(_) =>
+ macroLogVerbose(error.err.errMsg)
+ if (!silent) throw new TypecheckException(error.err.errPos, error.err.errMsg)
+ universe.EmptyTree
+ })
+ }
+
+ def inferImplicitValue(pt: Type, silent: Boolean = true, withMacrosDisabled: Boolean = false, pos: Position = enclosingPosition): Tree = {
+ macroLogVerbose("inferring implicit value of type %s, macros = %s".format(pt, !withMacrosDisabled))
+ universe.analyzer.inferImplicit(universe.EmptyTree, pt, false, callsiteTyper.context, silent, withMacrosDisabled, pos, (pos, msg) => throw TypecheckException(pos, msg))
+ }
+
+ def inferImplicitView(tree: Tree, from: Type, to: Type, silent: Boolean = true, withMacrosDisabled: Boolean = false, pos: Position = enclosingPosition): Tree = {
+ macroLogVerbose("inferring implicit view from %s to %s for %s, macros = %s".format(from, to, tree, !withMacrosDisabled))
+ val viewTpe = universe.appliedType(universe.definitions.FunctionClass(1).toTypeConstructor, List(from, to))
+ universe.analyzer.inferImplicit(tree, viewTpe, true, callsiteTyper.context, silent, withMacrosDisabled, pos, (pos, msg) => throw TypecheckException(pos, msg))
+ }
+
+ def resetAllAttrs(tree: Tree): Tree = universe.resetAllAttrs(tree)
+
+ def resetLocalAttrs(tree: Tree): Tree = universe.resetLocalAttrs(tree)
+}
diff --git a/src/compiler/scala/reflect/macros/util/Traces.scala b/src/compiler/scala/reflect/macros/util/Traces.scala
new file mode 100644
index 0000000..2dffc68
--- /dev/null
+++ b/src/compiler/scala/reflect/macros/util/Traces.scala
@@ -0,0 +1,11 @@
+package scala.reflect.macros
+package util
+
+trait Traces {
+ def globalSettings: scala.tools.nsc.Settings
+
+ val macroDebugLite = globalSettings.YmacrodebugLite.value
+ val macroDebugVerbose = globalSettings.YmacrodebugVerbose.value
+ @inline final def macroLogLite(msg: => Any) { if (macroDebugLite || macroDebugVerbose) println(msg) }
+ @inline final def macroLogVerbose(msg: => Any) { if (macroDebugVerbose) println(msg) }
+}
diff --git a/src/compiler/scala/reflect/reify/Errors.scala b/src/compiler/scala/reflect/reify/Errors.scala
new file mode 100644
index 0000000..2e57bc5
--- /dev/null
+++ b/src/compiler/scala/reflect/reify/Errors.scala
@@ -0,0 +1,84 @@
+package scala.reflect.reify
+
+import scala.reflect.macros.ReificationException
+import scala.reflect.macros.UnexpectedReificationException
+
+trait Errors {
+ self: Reifier =>
+
+ import global._
+ import definitions._
+
+ def defaultErrorPosition = {
+ val stack = currents collect { case t: Tree if t.pos != NoPosition => t.pos }
+ stack.headOption getOrElse analyzer.enclosingMacroPosition
+ }
+
+ // expected errors: these can happen if the user casually writes whatever.reify(...)
+ // hence we don't crash here, but nicely report a typechecking error and bail out asap
+
+ def CannotReifyType(tpe: Type) = {
+ val msg = "implementation restriction: cannot reify type %s (%s)".format(tpe, tpe.kind)
+ throw new ReificationException(defaultErrorPosition, msg)
+ }
+
+ def CannotReifySymbol(sym: Symbol) = {
+ val msg = "implementation restriction: cannot reify symbol %s (%s)".format(sym, sym.accurateKindString)
+ throw new ReificationException(defaultErrorPosition, msg)
+ }
+
+ def CannotReifyCompoundTypeTreeWithNonEmptyBody(ctt: CompoundTypeTree) = {
+ val msg = "implementation restriction: cannot reify refinement type trees with non-empty bodies"
+ throw new ReificationException(ctt.pos, msg)
+ }
+
+ def CannotReifyWeakType(details: Any) = {
+ val msg = "cannot create a TypeTag" + details + ": use WeakTypeTag instead"
+ throw new ReificationException(defaultErrorPosition, msg)
+ }
+
+ def CannotConvertManifestToTagWithoutScalaReflect(tpe: Type, manifestInScope: Tree) = {
+ val msg =
+ sm"""to create a type tag here, it is necessary to interoperate with the manifest `$manifestInScope` in scope.
+ |however manifest -> typetag conversion requires Scala reflection, which is not present on the classpath.
+ |to proceed put scala-reflect.jar on your compilation classpath and recompile."""
+ throw new ReificationException(defaultErrorPosition, msg)
+ }
+
+ def CannotReifyRuntimeSplice(tree: Tree) = {
+ val msg = """
+ |the splice cannot be resolved statically, which means there is a cross-stage evaluation involved.
+ |cross-stage evaluations need to be invoked explicitly, so we're showing you this error.
+ |if you're sure this is not an oversight, add scala-compiler.jar to the classpath,
+ |import `scala.tools.reflect.Eval` and call `<your expr>.eval` instead.""".trim.stripMargin
+ throw new ReificationException(tree.pos, msg)
+ }
+
+ // unexpected errors: these can never happen under normal conditions unless there's a bug in the compiler (or in a compiler plugin or in a macro)
+ // hence, we fail fast and loudly and don't care about being nice - in this situation noone will appreciate our quiet nicety
+
+ def CannotReifyUntypedPrefix(prefix: Tree) = {
+ val msg = "internal error: untyped prefixes are not supported, consider typechecking the prefix before passing it to the reifier"
+ throw new UnexpectedReificationException(defaultErrorPosition, msg)
+ }
+
+ def CannotReifyUntypedReifee(reifee: Any) = {
+ val msg = "internal error: untyped trees are not supported, consider typechecking the reifee before passing it to the reifier"
+ throw new UnexpectedReificationException(defaultErrorPosition, msg)
+ }
+
+ def CannotReifyErroneousPrefix(prefix: Tree) = {
+ val msg = "internal error: erroneous prefixes are not supported, make sure that your prefix has typechecked successfully before passing it to the reifier"
+ throw new UnexpectedReificationException(defaultErrorPosition, msg)
+ }
+
+ def CannotReifyErroneousReifee(reifee: Any) = {
+ val msg = "internal error: erroneous reifees are not supported, make sure that your reifee has typechecked successfully before passing it to the reifier"
+ throw new UnexpectedReificationException(defaultErrorPosition, msg)
+ }
+
+ def CannotReifyInvalidLazyVal(tree: ValDef) = {
+ val msg = "internal error: could not reconstruct original lazy val due to missing accessor"
+ throw new UnexpectedReificationException(tree.pos, msg)
+ }
+}
diff --git a/src/compiler/scala/reflect/reify/Phases.scala b/src/compiler/scala/reflect/reify/Phases.scala
new file mode 100644
index 0000000..1710cae
--- /dev/null
+++ b/src/compiler/scala/reflect/reify/Phases.scala
@@ -0,0 +1,44 @@
+package scala.reflect.reify
+
+import phases._
+
+trait Phases extends Reshape
+ with Calculate
+ with Metalevels
+ with Reify {
+
+ self: Reifier =>
+
+ import global._
+ import definitions._
+
+ private var alreadyRun = false
+
+ lazy val mkReificationPipeline: Tree => Tree = tree0 => {
+ assert(!alreadyRun, "reifier instance cannot be used more than once")
+ alreadyRun = true
+
+ var tree = tree0
+
+ if (reifyDebug) println("[calculate phase]")
+ calculate.traverse(tree)
+
+ if (reifyDebug) println("[reshape phase]")
+ tree = reshape.transform(tree)
+ if (reifyDebug) println("[interlude]")
+ if (reifyDebug) println("reifee = " + (if (opt.showTrees) "\n" + nodePrinters.nodeToString(tree).trim else tree.toString))
+
+ if (reifyDebug) println("[calculate phase]")
+ calculate.traverse(tree)
+
+ if (reifyDebug) println("[metalevels phase]")
+ tree = metalevels.transform(tree)
+ if (reifyDebug) println("[interlude]")
+ if (reifyDebug) println(symtab.debugString)
+
+ if (reifyDebug) println("[reify phase]")
+ val result = reify(tree)
+
+ result
+ }
+}
\ No newline at end of file
diff --git a/src/compiler/scala/reflect/reify/Reifier.scala b/src/compiler/scala/reflect/reify/Reifier.scala
new file mode 100644
index 0000000..47669f5
--- /dev/null
+++ b/src/compiler/scala/reflect/reify/Reifier.scala
@@ -0,0 +1,143 @@
+package scala.reflect.reify
+
+import scala.tools.nsc.Global
+import scala.reflect.macros.ReificationException
+import scala.reflect.macros.UnexpectedReificationException
+import scala.reflect.reify.utils.Utils
+
+/** Given a tree or a type, generate a tree that when executed at runtime produces the original tree or type.
+ * See more info in the comments to ``reify'' in scala.reflect.api.Universe.
+ *
+ * @author Martin Odersky
+ * @version 2.10
+ */
+abstract class Reifier extends States
+ with Phases
+ with Errors
+ with Utils {
+
+ val global: Global
+ import global._
+ import definitions._
+
+ val typer: global.analyzer.Typer
+ val universe: Tree
+ val mirror: Tree
+ val reifee: Any
+ val concrete: Boolean
+
+ // needed to seamlessly integrate with standalone utils
+ override def getReifier: Reifier { val global: Reifier.this.global.type } =
+ this.asInstanceOf[Reifier { val global: Reifier.this.global.type }]
+ override def hasReifier = true
+
+ /**
+ * For ``reifee'' and other reification parameters, generate a tree of the form
+ *
+ * {
+ * val $u: universe.type = <[ universe ]>
+ * val $m: $u.Mirror = <[ mirror ]>
+ * $u.Expr[T](rtree) // if data is a Tree
+ * $u.TypeTag[T](rtree) // if data is a Type
+ * }
+ *
+ * where
+ *
+ * - `universe` is the tree that represents the universe the result will be bound to
+ * - `mirror` is the tree that represents the mirror the result will be initially bound to
+ * - `rtree` is code that generates `reifee` at runtime.
+ * - `T` is the type that corresponds to `data`.
+ *
+ * This is not a method, but a value to indicate the fact that Reifier instances are a one-off.
+ */
+ lazy val reification: Tree = {
+ try {
+ if (universe exists (_.isErroneous)) CannotReifyErroneousPrefix(universe)
+ if (universe.tpe == null) CannotReifyUntypedPrefix(universe)
+
+ val result = reifee match {
+ case tree: Tree =>
+ reifyTrace("reifying = ")(if (opt.showTrees) "\n" + nodePrinters.nodeToString(tree).trim else tree.toString)
+ reifyTrace("reifee is located at: ")(tree.pos)
+ reifyTrace("universe = ")(universe)
+ reifyTrace("mirror = ")(mirror)
+ if (tree exists (_.isErroneous)) CannotReifyErroneousReifee(tree)
+ if (tree.tpe == null) CannotReifyUntypedReifee(tree)
+ val pipeline = mkReificationPipeline
+ val rtree = pipeline(tree)
+
+ val tpe = typer.packedType(tree, NoSymbol)
+ val ReifiedType(_, _, tpeSymtab, _, rtpe, tpeReificationIsConcrete) = `package`.reifyType(global)(typer, universe, mirror, tpe, concrete = false)
+ state.reificationIsConcrete &= tpeReificationIsConcrete
+ state.symtab ++= tpeSymtab
+ ReifiedTree(universe, mirror, symtab, rtree, tpe, rtpe, reificationIsConcrete)
+
+ case tpe: Type =>
+ reifyTrace("reifying = ")(tpe.toString)
+ reifyTrace("universe = ")(universe)
+ reifyTrace("mirror = ")(mirror)
+ val rtree = reify(tpe)
+ ReifiedType(universe, mirror, symtab, tpe, rtree, reificationIsConcrete)
+
+ case _ =>
+ throw new Error("reifee %s of type %s is not supported".format(reifee, if (reifee == null) "null" else reifee.getClass.toString))
+ }
+
+ // todo. why do we resetAllAttrs?
+ //
+ // typically we do some preprocessing before reification and
+ // the code emitted/moved around during preprocessing is very hard to typecheck, so we leave it as it is
+ // however this "as it is" sometimes doesn't make any sense
+ //
+ // ===example 1===
+ // we move a freevar from a nested symbol table to a top-level symbol table,
+ // and then the reference to $u becomes screwed up, because nested symbol tables are already typechecked,
+ // so we have an $u symbol that points to the nested $u rather than to the top-level one.
+ //
+ // ===example 2===
+ // we inline a freevar by replacing a reference to it, e.g. $u.Apply($u.Select($u.Ident($u.newTermName("$u")), $u.newTermName("Ident")), List($u.Ident($u.newTermName("free$x"))))
+ // with its original binding (e.g. $u.Ident("x"))
+ // we'd love to typecheck the result, but we cannot do this easily, because $u is external to this tree
+ // what's even worse, sometimes $u can point to the top-level symbol table's $u, which doesn't have any symbol/type yet -
+ // it's just a ValDef that will be emitted only after the reification is completed
+ //
+ // hence, the simplest solution is to erase all attrs so that invalid (as well as non-existent) bindings get rebound correctly
+ // this is ugly, but it's the best we can do
+ //
+ // todo. this is a common problem with non-trivial macros in our current macro system
+ // needs to be solved some day
+ // maybe try `resetLocalAttrs` once the dust settles
+ var importantSymbols = Set[Symbol](
+ NothingClass, AnyClass, SingletonClass, PredefModule, ScalaRunTimeModule, TypeCreatorClass, TreeCreatorClass, MirrorClass,
+ ApiUniverseClass, JavaUniverseClass, ReflectRuntimePackage, ReflectRuntimeCurrentMirror)
+ importantSymbols ++= importantSymbols map (_.companionSymbol)
+ importantSymbols ++= importantSymbols map (_.moduleClass)
+ importantSymbols ++= importantSymbols map (_.linkedClassOfClass)
+ def isImportantSymbol(sym: Symbol): Boolean = sym != null && sym != NoSymbol && importantSymbols(sym)
+ val untyped = resetAllAttrs(result, leaveAlone = {
+ case ValDef(_, u, _, _) if u == nme.UNIVERSE_SHORT => true
+ case ValDef(_, m, _, _) if m == nme.MIRROR_SHORT => true
+ case tree if symtab.syms contains tree.symbol => true
+ case tree if isImportantSymbol(tree.symbol) => true
+ case _ => false
+ })
+
+ if (reifyCopypaste) {
+ if (reifyDebug) println("=============================")
+ println(reifiedNodeToString(untyped))
+ if (reifyDebug) println("=============================")
+ } else {
+ reifyTrace("reification = ")(untyped)
+ }
+
+ untyped
+ } catch {
+ case ex: ReificationException =>
+ throw ex
+ case ex: UnexpectedReificationException =>
+ throw ex
+ case ex: Throwable =>
+ throw new UnexpectedReificationException(defaultErrorPosition, "reification crashed", ex)
+ }
+ }
+}
\ No newline at end of file
diff --git a/src/compiler/scala/reflect/reify/States.scala b/src/compiler/scala/reflect/reify/States.scala
new file mode 100644
index 0000000..58455c9
--- /dev/null
+++ b/src/compiler/scala/reflect/reify/States.scala
@@ -0,0 +1,67 @@
+package scala.reflect.reify
+
+trait States {
+ self: Reifier =>
+
+ import global._
+ import definitions._
+
+ /** Encapsulates reifier state
+ *
+ * When untangling reifier symbol tables from the reifier itself,
+ * I discovered that encoding of a symbol table (e.g. producing corresponding reificode)
+ * might cause subsequent reification (e.g. when filling in signatures and annotations for syms).
+ *
+ * This is a mess in the face of nested reifications, splices and inlining of thereof,
+ * so I made `SymbolTable` immutable, which brought a significant amount of sanity.
+ *
+ * However that wasn't enough. Sure, symbol table became immutable, but the reifier still needed
+ * to mutate its `symtab` field during reification. This caused nasty desyncs between the table being encoded
+ * and the table of the underlying reifier, so I decided to encapsulate the entire state here,
+ * so that encoding can backup the state before it starts and restore it after it completes.
+ */
+ val state = new State
+
+ // todo. rewrite the reifier so that we don't need mutable state anymore
+ // to aid you with that I've already removed all the setters from the reifier
+ // so all the places that involve mutations are forced to do that by explicitly mentioning `state`
+ class State {
+ var symtab = SymbolTable()
+ var reifyTreeSymbols = false
+ var reifyTreeTypes = false
+ private var _reificationIsConcrete = true
+ def reificationIsConcrete: Boolean = _reificationIsConcrete
+ def reificationIsConcrete_=(value: Boolean): Unit = {
+ _reificationIsConcrete = value
+ if (!value && concrete) {
+ current match {
+ case tpe: Type => CannotReifyWeakType(s" having unresolved type parameter $tpe")
+ case sym: Symbol => CannotReifyWeakType(s" referring to local ${sym.kindString} ${sym.fullName}")
+ case _ => CannotReifyWeakType("")
+ }
+ }
+ }
+ var reifyStack = reifee :: Nil
+ var localSymbols = Map[Symbol, Int]()
+
+ def backup: State = {
+ val backup = new State
+ backup.symtab = this.symtab
+ backup.reifyTreeSymbols = this.reifyTreeSymbols
+ backup.reifyTreeTypes = this.reifyTreeTypes
+ backup._reificationIsConcrete = this._reificationIsConcrete
+ backup.reifyStack = this.reifyStack
+ backup.localSymbols = this.localSymbols
+ backup
+ }
+
+ def restore(backup: State): Unit = {
+ this.symtab = backup.symtab
+ this.reifyTreeSymbols = backup.reifyTreeSymbols
+ this.reifyTreeTypes = backup.reifyTreeTypes
+ this._reificationIsConcrete = backup._reificationIsConcrete
+ this.reifyStack = backup.reifyStack
+ this.localSymbols = backup.localSymbols
+ }
+ }
+}
diff --git a/src/compiler/scala/reflect/reify/Taggers.scala b/src/compiler/scala/reflect/reify/Taggers.scala
new file mode 100644
index 0000000..cbaee41
--- /dev/null
+++ b/src/compiler/scala/reflect/reify/Taggers.scala
@@ -0,0 +1,102 @@
+package scala.reflect.reify
+
+import scala.reflect.macros.{ReificationException, UnexpectedReificationException, TypecheckException}
+import scala.reflect.macros.runtime.Context
+
+abstract class Taggers {
+ val c: Context
+
+ import c.universe._
+ import definitions._
+ import treeBuild._
+
+ val coreTags = Map(
+ ByteTpe -> nme.Byte,
+ ShortTpe -> nme.Short,
+ CharTpe -> nme.Char,
+ IntTpe -> nme.Int,
+ LongTpe -> nme.Long,
+ FloatTpe -> nme.Float,
+ DoubleTpe -> nme.Double,
+ BooleanTpe -> nme.Boolean,
+ UnitTpe -> nme.Unit,
+ AnyTpe -> nme.Any,
+ AnyValTpe -> nme.AnyVal,
+ AnyRefTpe -> nme.AnyRef,
+ ObjectTpe -> nme.Object,
+ NothingTpe -> nme.Nothing,
+ NullTpe -> nme.Null)
+
+ def materializeClassTag(tpe: Type): Tree = {
+ val tagModule = ClassTagModule
+ materializeTag(EmptyTree, tpe, tagModule, {
+ val erasure = c.reifyRuntimeClass(tpe, concrete = true)
+ val factory = TypeApply(Select(Ident(tagModule), nme.apply), List(TypeTree(tpe)))
+ Apply(factory, List(erasure))
+ })
+ }
+
+ def materializeTypeTag(universe: Tree, mirror: Tree, tpe: Type, concrete: Boolean): Tree = {
+ val tagType = if (concrete) TypeTagClass else WeakTypeTagClass
+ // what we need here is to compose a type Universe # TypeTag[$tpe]
+ // to look for an implicit that conforms to this type
+ // that's why neither appliedType(tagType, List(tpe)) aka TypeRef(TypeTagsClass.thisType, tagType, List(tpe))
+ // nor TypeRef(ApiUniverseClass.thisType, tagType, List(tpe)) won't fit here
+ // scala> :type -v def foo: scala.reflect.api.Universe#TypeTag[Int] = ???
+ // NullaryMethodType(TypeRef(pre = TypeRef(TypeSymbol(Universe)), TypeSymbol(TypeTag), args = List($tpe))))
+ val unaffiliatedTagTpe = TypeRef(ApiUniverseClass.typeConstructor, tagType, List(tpe))
+ val unaffiliatedTag = c.inferImplicitValue(unaffiliatedTagTpe, silent = true, withMacrosDisabled = true)
+ unaffiliatedTag match {
+ case success if !success.isEmpty =>
+ Apply(Select(success, nme.in), List(mirror orElse mkDefaultMirrorRef(c.universe)(universe, c.callsiteTyper)))
+ case _ =>
+ val tagModule = if (concrete) TypeTagModule else WeakTypeTagModule
+ materializeTag(universe, tpe, tagModule, c.reifyType(universe, mirror, tpe, concrete = concrete))
+ }
+ }
+
+ private def materializeTag(prefix: Tree, tpe: Type, tagModule: Symbol, materializer: => Tree): Tree = {
+ val result =
+ tpe match {
+ case coreTpe if coreTags contains coreTpe =>
+ val ref = if (tagModule.owner.isPackageClass) Ident(tagModule) else Select(prefix, tagModule.name)
+ Select(ref, coreTags(coreTpe))
+ case _ =>
+ translatingReificationErrors(materializer)
+ }
+ try c.typeCheck(result)
+ catch { case terr @ TypecheckException(pos, msg) => failTag(result, terr) }
+ }
+
+ def materializeExpr(universe: Tree, mirror: Tree, expr: Tree): Tree = {
+ val result = translatingReificationErrors(c.reifyTree(universe, mirror, expr))
+ try c.typeCheck(result)
+ catch { case terr @ TypecheckException(pos, msg) => failExpr(result, terr) }
+ }
+
+ private def translatingReificationErrors(materializer: => Tree): Tree = {
+ try materializer
+ catch {
+ case ReificationException(pos, msg) =>
+ c.error(pos.asInstanceOf[c.Position], msg) // this cast is a very small price for the sanity of exception handling
+ EmptyTree
+ case UnexpectedReificationException(pos, err, cause) if cause != null =>
+ throw cause
+ }
+ }
+
+ private def failTag(result: Tree, reason: Any): Nothing = {
+ val Apply(TypeApply(fun, List(tpeTree)), _) = c.macroApplication
+ val tpe = tpeTree.tpe
+ val PolyType(_, MethodType(_, tagTpe)) = fun.tpe
+ val tagModule = tagTpe.typeSymbol.companionSymbol
+ if (c.compilerSettings.contains("-Xlog-implicits"))
+ c.echo(c.enclosingPosition, s"cannot materialize ${tagModule.name}[$tpe] as $result because:\n$reason")
+ c.abort(c.enclosingPosition, "No %s available for %s".format(tagModule.name, tpe))
+ }
+
+ private def failExpr(result: Tree, reason: Any): Nothing = {
+ val Apply(_, expr :: Nil) = c.macroApplication
+ c.abort(c.enclosingPosition, s"Cannot materialize $expr as $result because:\n$reason")
+ }
+}
diff --git a/src/compiler/scala/reflect/reify/codegen/GenAnnotationInfos.scala b/src/compiler/scala/reflect/reify/codegen/GenAnnotationInfos.scala
new file mode 100644
index 0000000..dec491a
--- /dev/null
+++ b/src/compiler/scala/reflect/reify/codegen/GenAnnotationInfos.scala
@@ -0,0 +1,55 @@
+package scala.reflect.reify
+package codegen
+
+trait GenAnnotationInfos {
+ self: Reifier =>
+
+ import global._
+ import definitions._
+
+ // usually annotations are reified as their originals from Modifiers
+ // however, when reifying free and tough types, we're forced to reify annotation infos as is
+ // why is that bad? take a look inside
+ def reifyAnnotationInfo(ann: AnnotationInfo): Tree = {
+ val reifiedArgs = ann.args map { arg =>
+ val saved1 = reifyTreeSymbols
+ val saved2 = reifyTreeTypes
+
+ try {
+ // one more quirk of reifying annotations
+ //
+ // when reifying AnnotatedTypes we need to reify all the types and symbols of inner ASTs
+ // that's because a lot of logic expects post-typer trees to have non-null tpes
+ //
+ // Q: reified trees are pre-typer, so there's shouldn't be a problem.
+ // reflective typechecker will fill in missing symbols and types, right?
+ // A: actually, no. annotation ASTs live inside AnnotatedTypes,
+ // and insides of the types is the place where typechecker doesn't look.
+ state.reifyTreeSymbols = true
+ state.reifyTreeTypes = true
+
+ // todo. every AnnotationInfo is an island, entire of itself
+ // no regular Traverser or Transformer can reach it
+ // hence we need to run its contents through the entire reification pipeline
+ // e.g. to apply reshaping or to check metalevels
+ reify(arg)
+ } finally {
+ state.reifyTreeSymbols = saved1
+ state.reifyTreeTypes = saved2
+ }
+ }
+
+ def reifyClassfileAnnotArg(arg: ClassfileAnnotArg): Tree = arg match {
+ case LiteralAnnotArg(const) =>
+ mirrorFactoryCall(nme.LiteralAnnotArg, reifyProduct(const))
+ case ArrayAnnotArg(args) =>
+ mirrorFactoryCall(nme.ArrayAnnotArg, scalaFactoryCall(nme.Array, args map reifyClassfileAnnotArg: _*))
+ case NestedAnnotArg(ann) =>
+ mirrorFactoryCall(nme.NestedAnnotArg, reifyAnnotationInfo(ann))
+ }
+
+ // if you reify originals of anns, you get SO when trying to reify AnnotatedTypes, so screw it - after all, it's not that important
+ val reifiedAssocs = ann.assocs map (assoc => scalaFactoryCall(nme.Tuple2, reify(assoc._1), reifyClassfileAnnotArg(assoc._2)))
+ mirrorFactoryCall(nme.Annotation, reify(ann.atp), mkList(reifiedArgs), mkListMap(reifiedAssocs))
+ }
+}
\ No newline at end of file
diff --git a/src/compiler/scala/reflect/reify/codegen/GenNames.scala b/src/compiler/scala/reflect/reify/codegen/GenNames.scala
new file mode 100644
index 0000000..4abf88f
--- /dev/null
+++ b/src/compiler/scala/reflect/reify/codegen/GenNames.scala
@@ -0,0 +1,14 @@
+package scala.reflect.reify
+package codegen
+
+trait GenNames {
+ self: Reifier =>
+
+ import global._
+ import definitions._
+
+ def reifyName(name: Name) = {
+ val factory = if (name.isTypeName) nme.nmeNewTypeName else nme.nmeNewTermName
+ mirrorCall(factory, Literal(Constant(name.toString)))
+ }
+}
\ No newline at end of file
diff --git a/src/compiler/scala/reflect/reify/codegen/GenPositions.scala b/src/compiler/scala/reflect/reify/codegen/GenPositions.scala
new file mode 100644
index 0000000..8c5db04
--- /dev/null
+++ b/src/compiler/scala/reflect/reify/codegen/GenPositions.scala
@@ -0,0 +1,17 @@
+package scala.reflect.reify
+package codegen
+
+trait GenPositions {
+ self: Reifier =>
+
+ import global._
+ import definitions._
+
+ // we do not reify positions because this inflates resulting trees, but doesn't buy as anything
+ // where would one use positions? right, in error messages
+ // but I can hardly imagine when one would need a position that points to the reified code
+ // usually reified trees are used to compose macro expansions or to be fed to the runtime compiler
+ // however both macros and toolboxes have their own means to report errors in synthetic trees
+ def reifyPosition(pos: Position): Tree =
+ reifyMirrorObject(NoPosition)
+}
\ No newline at end of file
diff --git a/src/compiler/scala/reflect/reify/codegen/GenSymbols.scala b/src/compiler/scala/reflect/reify/codegen/GenSymbols.scala
new file mode 100644
index 0000000..47c966e
--- /dev/null
+++ b/src/compiler/scala/reflect/reify/codegen/GenSymbols.scala
@@ -0,0 +1,180 @@
+package scala.reflect.reify
+package codegen
+
+import scala.reflect.internal.Flags._
+
+trait GenSymbols {
+ self: Reifier =>
+
+ import global._
+ import definitions._
+
+ /** Symbol table of the reifee.
+ *
+ * Keeps track of auxiliary symbols that are necessary for this reification session.
+ * These include:
+ * 1) Free vars (terms, types and existentials),
+ * 2) Non-locatable symbols (sometimes, e.g. for RefinedTypes, we need to reify these; to do that we create their local copies in the reificode)
+ * 3) Non-locatable symbols that are referred by #1, #2 and #3
+ *
+ * Exposes three main methods:
+ * 1) `syms` that lists symbols belonging to the table,
+ * 2) `symXXX` family of methods that provide information about the symbols in the table,
+ * 3) `encode` that renders the table into a list of trees (recursively populating #3 and setting up initialization code for #1, #2 and #3)
+ */
+ def symtab: SymbolTable = state.symtab
+
+ /** Reify a reference to a symbol */
+ def reifySymRef(sym: Symbol): Tree = {
+ assert(sym != null, "sym is null")
+ if (sym == NoSymbol)
+ mirrorSelect(nme.NoSymbol)
+ else if (sym.isRootPackage)
+ mirrorMirrorSelect(nme.RootPackage)
+ else if (sym.isRoot)
+ mirrorMirrorSelect(nme.RootClass)
+ else if (sym.isEmptyPackage)
+ mirrorMirrorSelect(nme.EmptyPackage)
+ else if (sym.isEmptyPackageClass)
+ mirrorMirrorSelect(nme.EmptyPackageClass)
+ else if (sym.isModuleClass)
+ if (sym.sourceModule.isLocatable) Select(Select(reify(sym.sourceModule), nme.asModule), nme.moduleClass)
+ else reifySymDef(sym)
+ else if (sym.isPackage)
+ mirrorMirrorCall(nme.staticPackage, reify(sym.fullName))
+ else if (sym.isLocatable) {
+ /** This is a fancy conundrum that stems from the fact that Scala allows
+ * packageless packages and packageless objects with the same names in the same program.
+ *
+ * For more details read the docs to staticModule and staticPackage.
+ * Here I'll just provide the examples of how reify works for different kinds of symbols.
+ *
+ * // 1) packageless
+ * // packageless classes are non-ambiguous, but modules vs packages might be
+ * // that's why we have separate methods to reify those
+ * // note that staticModule will never resolve to a package if an object is missing and an homonymous package is present and vice versa
+ * // otherwise reification would be unsound
+ * class C => staticClass("C")
+ * object B => staticModule("B")
+ * package B => staticPackage("B")
+ *
+ * // 2) classes and modules enclosed in a package
+ * // staticXXX methods always look into parent packages and ignores parent modules, so for fully qualified names they are non-ambiguous
+ * // namely even if there's an object B { class C } next to package B { class C }, then staticClass("B.C") will resolve to a packageful class
+ * // this closely mirrors Scala's behavior, read up the docs to staticModule/staticPackage for more information
+ * package B { class C } => staticClass("B.C")
+ * package B { object B } => staticModule("B.B")
+ * package B { package B } => staticPackage("B.B")
+ *
+ * // 3) classes and modules enclosed in a packageless module
+ * // staticClass/staticModule won't look into EmptyPackageClass, so we reify such symbols in a roundabout way
+ * object B { class C } => selectType(staticModule("B"), "C")
+ * object B { object B } => selectType(staticModule("B"), "B")
+ * object B { package B } => impossible
+ */
+ val hasPackagelessParent = sym.ownerChain.tail.tail exists (_.isEmptyPackageClass)
+ if (sym.isStatic && (sym.isClass || sym.isModule) && !hasPackagelessParent) {
+ // SI-6238: if applicable, emit references to StandardDefinitions instead of staticClass/staticModule calls
+ val resolver = if (sym.isType) nme.staticClass else nme.staticModule
+ mirrorMirrorCall(resolver, reify(sym.fullName))
+ } else {
+ if (reifyDebug) println("Locatable: %s (%s) owned by %s (%s) at %s".format(sym, sym.accurateKindString, sym.owner, sym.owner.accurateKindString, sym.owner.fullNameString))
+ val rowner = reify(sym.owner)
+ val rname = reify(sym.name.toString)
+ if (sym.isType)
+ mirrorBuildCall(nme.selectType, rowner, rname)
+ else if (sym.isMethod && sym.owner.isClass && sym.owner.info.decl(sym.name).isOverloaded) {
+ val index = sym.owner.info.decl(sym.name).alternatives indexOf sym
+ assert(index >= 0, sym)
+ mirrorBuildCall(nme.selectOverloadedMethod, rowner, rname, reify(index))
+ } else
+ mirrorBuildCall(nme.selectTerm, rowner, rname)
+ }
+ } else {
+ // todo. make sure that free methods and free local defs work correctly
+ if (sym.isExistential) reifySymDef(sym)
+ else if (sym.isTerm) reifyFreeTerm(Ident(sym))
+ else reifyFreeType(Ident(sym))
+ }
+ }
+
+ def reifyFreeTerm(binding: Tree): Tree =
+ reifyIntoSymtab(binding.symbol) { sym =>
+ if (reifyDebug) println("Free term" + (if (sym.isCapturedVariable) " (captured)" else "") + ": " + sym + "(" + sym.accurateKindString + ")")
+ val name = newTermName(nme.REIFY_FREE_PREFIX + sym.name + (if (sym.isType) nme.REIFY_FREE_THIS_SUFFIX else ""))
+ // We need to note whether the free value being reified is stable or not to guide subsequent reflective compilation.
+ // Here's why reflection compilation needs our help.
+ //
+ // When dealing with a tree, which contain free values, toolboxes extract those and wrap the entire tree in a Function
+ // having parameters defined for every free values in the tree. For example, evaluating
+ //
+ // Ident(setTypeSignature(newFreeTerm("x", 2), <Int>))
+ //
+ // Will generate something like
+ //
+ // object wrapper {
+ // def wrapper(x: () => Int) = {
+ // x()
+ // }
+ // }
+ //
+ // Note that free values get transformed into, effectively, by-name parameters. This is done to make sure
+ // that evaluation order is kept intact. And indeed, we cannot just evaluate all free values at once in order
+ // to obtain arguments for wrapper.wrapper, because if some of the free values end up being unused during evaluation,
+ // we might end up doing unnecessary calculations.
+ //
+ // So far, so good - we didn't need any flags at all. However, if the code being reified contains path-dependent types,
+ // we're in trouble, because valid code like `free.T` ends up being transformed into `free.apply().T`, which won't compile.
+ //
+ // To overcome this glitch, we note whether a given free term is stable or not (because vars can also end up being free terms).
+ // Then, if a free term is stable, we tell the compiler to treat `free.apply()` specially and assume that it's stable.
+ if (!sym.isMutable) sym setFlag STABLE
+ if (sym.isCapturedVariable) {
+ assert(binding.isInstanceOf[Ident], showRaw(binding))
+ val capturedBinding = referenceCapturedVariable(sym)
+ Reification(name, capturedBinding, mirrorBuildCall(nme.newFreeTerm, reify(sym.name.toString), capturedBinding, mirrorBuildCall(nme.flagsFromBits, reify(sym.flags)), reify(origin(sym))))
+ } else {
+ Reification(name, binding, mirrorBuildCall(nme.newFreeTerm, reify(sym.name.toString), binding, mirrorBuildCall(nme.flagsFromBits, reify(sym.flags)), reify(origin(sym))))
+ }
+ }
+
+ def reifyFreeType(binding: Tree): Tree =
+ reifyIntoSymtab(binding.symbol) { sym =>
+ if (reifyDebug) println("Free type: %s (%s)".format(sym, sym.accurateKindString))
+ state.reificationIsConcrete = false
+ val name = newTermName(nme.REIFY_FREE_PREFIX + sym.name)
+ Reification(name, binding, mirrorBuildCall(nme.newFreeType, reify(sym.name.toString), mirrorBuildCall(nme.flagsFromBits, reify(sym.flags)), reify(origin(sym))))
+ }
+
+ def reifySymDef(sym: Symbol): Tree =
+ reifyIntoSymtab(sym) { sym =>
+ if (reifyDebug) println("Sym def: %s (%s)".format(sym, sym.accurateKindString))
+ val name = newTermName(nme.REIFY_SYMDEF_PREFIX + sym.name)
+ def reifiedOwner = if (sym.owner.isLocatable) reify(sym.owner) else reifySymDef(sym.owner)
+ Reification(name, Ident(sym), mirrorBuildCall(nme.newNestedSymbol, reifiedOwner, reify(sym.name), reify(sym.pos), mirrorBuildCall(nme.flagsFromBits, reify(sym.flags)), reify(sym.isClass)))
+ }
+
+ case class Reification(name: Name, binding: Tree, tree: Tree)
+
+ private def reifyIntoSymtab(sym: Symbol)(reificode: Symbol => Reification): Tree = {
+ def fromSymtab = symtab symRef sym
+ if (fromSymtab == EmptyTree) {
+ // reification is lazy, so that we can carefully choose where to evaluate it
+ // and we choose this place to be exactly here:
+ //
+ // reasons:
+ // 1) reification happens at maximum once per symbol to prevent repeated reifications
+ // 2) reification happens before putting the symbol itself into the symbol table to ensure correct initialization order:
+ // for example, if reification of symbol A refers to reification of symbol B
+ // (this might happen when we're doing `reifySymDef`, which expands into `newNestedSymbol`, which needs `sym.owner`)
+ // then we have to put reification-B into the symbol table before reification-A
+ // so that subsequent code generation that traverses the symbol table in the first-added first-codegenned order
+ // produces valid Scala code (with vals in a block depending only on lexically preceding vals)
+ val reification = reificode(sym)
+ import reification.{name, binding}
+ val tree = reification.tree updateAttachment ReifyBindingAttachment(binding)
+ state.symtab += (sym, name, tree)
+ }
+ fromSymtab
+ }
+}
diff --git a/src/compiler/scala/reflect/reify/codegen/GenTrees.scala b/src/compiler/scala/reflect/reify/codegen/GenTrees.scala
new file mode 100644
index 0000000..9894e35
--- /dev/null
+++ b/src/compiler/scala/reflect/reify/codegen/GenTrees.scala
@@ -0,0 +1,239 @@
+package scala.reflect.reify
+package codegen
+
+trait GenTrees {
+ self: Reifier =>
+
+ import global._
+ import definitions._
+
+ // unfortunately, these are necessary to reify AnnotatedTypes
+ // I'd gladly get rid of them, but I don't fancy making a metaprogramming API that doesn't work with annotated types
+ // luckily for our sanity, these vars are mutated only within a very restricted code execution path
+ def reifyTreeSymbols: Boolean = state.reifyTreeSymbols
+ def reifyTreeTypes: Boolean = state.reifyTreeTypes
+
+ /**
+ * Reify a tree.
+ * For internal use only, use ``reified'' instead.
+ */
+ def reifyTree(tree: Tree): Tree = {
+ assert(tree != null, "tree is null")
+
+ if (tree.isErroneous)
+ CannotReifyErroneousReifee(tree)
+
+ val splicedTree = spliceTree(tree)
+ if (splicedTree != EmptyTree)
+ return splicedTree
+
+ // the idea behind the new reincarnation of reifier is a simple maxim:
+ //
+ // never call ``reifyType'' to reify a tree
+ //
+ // this works because the stuff we are reifying was once represented with trees only
+ // and lexical scope information can be fully captured by reifying symbols
+ //
+ // to enable this idyll, we work hard in the ``Reshape'' phase
+ // which replaces all types with equivalent trees and works around non-idempotencies of the typechecker
+ //
+ // why bother? because this brings method to the madness
+ // the first prototype of reification reified all types and symbols for all trees => this quickly became unyieldy
+ // the second prototype reified external types, but avoided reifying local ones => this created an ugly irregularity
+ // current approach is uniform and compact
+ var rtree = tree match {
+ case global.EmptyTree =>
+ reifyMirrorObject(EmptyTree)
+ case global.emptyValDef =>
+ mirrorBuildSelect(nme.emptyValDef)
+ case FreeDef(_, _, _, _, _) =>
+ reifyNestedFreeDef(tree)
+ case FreeRef(_, _) =>
+ reifyNestedFreeRef(tree)
+ case BoundTerm(tree) =>
+ reifyBoundTerm(tree)
+ case BoundType(tree) =>
+ reifyBoundType(tree)
+ case Literal(const @ Constant(_)) =>
+ mirrorCall(nme.Literal, reifyProduct(const))
+ case Import(expr, selectors) =>
+ mirrorCall(nme.Import, reify(expr), mkList(selectors map reifyProduct))
+ case _ =>
+ reifyProduct(tree)
+ }
+
+ // usually we don't reify symbols/types, because they can be re-inferred during subsequent reflective compilation
+ // however, reification of AnnotatedTypes is special. see ``reifyType'' to find out why.
+ if (reifyTreeSymbols && tree.hasSymbol) {
+ if (reifyDebug) println("reifying symbol %s for tree %s".format(tree.symbol, tree))
+ rtree = mirrorBuildCall(nme.setSymbol, rtree, reify(tree.symbol))
+ }
+ if (reifyTreeTypes && tree.tpe != null) {
+ if (reifyDebug) println("reifying type %s for tree %s".format(tree.tpe, tree))
+ rtree = mirrorBuildCall(nme.setType, rtree, reify(tree.tpe))
+ }
+
+ rtree
+ }
+
+ def reifyModifiers(m: global.Modifiers) =
+ mirrorFactoryCall(nme.Modifiers, mirrorBuildCall(nme.flagsFromBits, reify(m.flags)), reify(m.privateWithin), reify(m.annotations))
+
+ private def spliceTree(tree: Tree): Tree = {
+ tree match {
+ case TreeSplice(splicee) =>
+ if (reifyDebug) println("splicing " + tree)
+
+ // see ``Metalevels'' for more info about metalevel breaches
+ // and about how we deal with splices that contain them
+ val isMetalevelBreach = splicee exists (sub => sub.hasSymbol && sub.symbol != NoSymbol && sub.symbol.metalevel > 0)
+ val isRuntimeEval = splicee exists (sub => sub.hasSymbol && sub.symbol == ExprSplice)
+ if (isMetalevelBreach || isRuntimeEval) {
+ // we used to convert dynamic splices into runtime evals transparently, but we no longer do that
+ // why? see comments in ``Metalevels''
+ // if (reifyDebug) println("splicing has failed: cannot splice when facing a metalevel breach")
+ // EmptyTree
+ CannotReifyRuntimeSplice(tree)
+ } else {
+ if (reifyDebug) println("splicing has succeeded")
+ splicee match {
+ // we intentionally don't care about the prefix (the first underscore in the `RefiedTree` pattern match)
+ case ReifiedTree(_, _, inlinedSymtab, rtree, _, _, _) =>
+ if (reifyDebug) println("inlining the splicee")
+ // all free vars local to the enclosing reifee should've already been inlined by ``Metalevels''
+ for (sym <- inlinedSymtab.syms if sym.isLocalToReifee)
+ abort("local free var, should have already been inlined by Metalevels: " + inlinedSymtab.symDef(sym))
+ state.symtab ++= inlinedSymtab
+ rtree
+ case tree =>
+ val migrated = Apply(Select(splicee, nme.in), List(Ident(nme.MIRROR_SHORT)))
+ Select(migrated, nme.tree)
+ }
+ }
+ case _ =>
+ EmptyTree
+ }
+ }
+
+ // unlike in `reifyBoundType` we can skip checking for `tpe` being local or not local w.r.t the reifee
+ // a single check for a symbol of the bound term should be enough
+ // that's because only Idents and Thises can be bound terms, and they cannot host complex types
+ private def reifyBoundTerm(tree: Tree): Tree = {
+ val sym = tree.symbol
+
+ tree match {
+ case This(qual) =>
+ assert(sym != NoSymbol, "unexpected: bound term that doesn't have a symbol: " + showRaw(tree))
+ if (sym.isLocalToReifee)
+ mirrorCall(nme.This, reify(qual))
+ else if (sym.isClass && !sym.isModuleClass) {
+ if (reifyDebug) println("This for %s, reified as freeVar".format(sym))
+ if (reifyDebug) println("Free: " + sym)
+ mirrorBuildCall(nme.Ident, reifyFreeTerm(This(sym)))
+ }
+ else {
+ if (reifyDebug) println("This for %s, reified as This".format(sym))
+ mirrorBuildCall(nme.This, reify(sym))
+ }
+
+ case Ident(name) =>
+ if (sym == NoSymbol) {
+ // this sometimes happens, e.g. for binds that don't have a body
+ // or for untyped code generated during previous phases
+ // (see a comment in Reifiers about the latter, starting with "why do we resetAllAttrs?")
+ mirrorCall(nme.Ident, reify(name))
+ }
+ else if (!sym.isLocalToReifee) {
+ if (sym.isVariable && sym.owner.isTerm) {
+ captureVariable(sym) // Note order dependency: captureVariable needs to come before reification here.
+ mirrorCall(nme.Select, mirrorBuildCall(nme.Ident, reify(sym)), reify(nme.elem))
+ }
+ else mirrorBuildCall(nme.Ident, reify(sym))
+ }
+ else mirrorCall(nme.Ident, reify(name))
+
+ case Select(qual, name) =>
+ if (qual.symbol != null && qual.symbol.isPackage) {
+ mirrorBuildCall(nme.Ident, reify(sym))
+ } else {
+ val effectiveName = if (sym != null && sym != NoSymbol) sym.name else name
+ reifyProduct(Select(qual, effectiveName))
+ }
+
+ case _ =>
+ throw new Error("internal error: %s (%s, %s) is not supported".format(tree, tree.productPrefix, tree.getClass))
+ }
+ }
+
+ private def reifyBoundType(tree: RefTree): Tree = {
+ val sym = tree.symbol
+ val tpe = tree.tpe
+
+ def reifyBoundType(tree: RefTree): Tree = {
+ assert(tpe != null, "unexpected: bound type that doesn't have a tpe: " + showRaw(tree))
+
+ // if a symbol or a type of the scrutinee are local to reifee
+ // (e.g. point to a locally declared class or to a path-dependent thingie that depends on a local variable)
+ // then we can reify the scrutinee as a symless AST and that will definitely be hygienic
+ // why? because then typechecking of a scrutinee doesn't depend on the environment external to the quasiquote
+ // otherwise we need to reify the corresponding type
+ if (sym.isLocalToReifee || tpe.isLocalToReifee || treeInfo.isWildcardStarType(tree))
+ reifyProduct(tree)
+ else {
+ if (reifyDebug) println("reifying bound type %s (underlying type is %s)".format(sym, tpe))
+
+ if (tpe.isSpliceable) {
+ val spliced = spliceType(tpe)
+
+ if (spliced == EmptyTree) {
+ if (reifyDebug) println("splicing failed: reify as is")
+ mirrorBuildCall(nme.TypeTree, reify(tpe))
+ }
+ else spliced match {
+ case TypeRefToFreeType(freeType) =>
+ if (reifyDebug) println("splicing returned a free type: " + freeType)
+ Ident(freeType)
+ case _ =>
+ if (reifyDebug) println("splicing succeeded: " + spliced)
+ mirrorBuildCall(nme.TypeTree, spliced)
+ }
+ }
+ else tree match {
+ case Select(qual, name) if !qual.symbol.isPackage =>
+ if (reifyDebug) println(s"reifying Select($qual, $name)")
+ mirrorCall(nme.Select, reify(qual), reify(name))
+ case SelectFromTypeTree(qual, name) =>
+ if (reifyDebug) println(s"reifying SelectFromTypeTree($qual, $name)")
+ mirrorCall(nme.SelectFromTypeTree, reify(qual), reify(name))
+ case _ if sym.isLocatable =>
+ if (reifyDebug) println(s"tpe is locatable: reify as Ident($sym)")
+ mirrorBuildCall(nme.Ident, reify(sym))
+ case _ =>
+ if (reifyDebug) println(s"tpe is not locatable: reify as TypeTree($tpe)")
+ mirrorBuildCall(nme.TypeTree, reify(tpe))
+ }
+ }
+ }
+
+ tree match {
+ case Select(qual, name) if name != sym.name =>
+ reifyBoundType(Select(qual, sym.name))
+
+ case Select(_, _) | SelectFromTypeTree(_, _) | Ident(_) =>
+ reifyBoundType(tree)
+
+ case _ =>
+ throw new Error("internal error: %s (%s, %s) is not supported".format(tree, tree.productPrefix, tree.getClass))
+ }
+ }
+
+ private def reifyNestedFreeDef(tree: Tree): Tree = {
+ if (reifyDebug) println("nested free def: %s".format(showRaw(tree)))
+ reifyProduct(tree)
+ }
+
+ private def reifyNestedFreeRef(tree: Tree): Tree = {
+ if (reifyDebug) println("nested free ref: %s".format(showRaw(tree)))
+ reifyProduct(tree)
+ }
+}
diff --git a/src/compiler/scala/reflect/reify/codegen/GenTypes.scala b/src/compiler/scala/reflect/reify/codegen/GenTypes.scala
new file mode 100644
index 0000000..bb7e1f9
--- /dev/null
+++ b/src/compiler/scala/reflect/reify/codegen/GenTypes.scala
@@ -0,0 +1,197 @@
+package scala.reflect.reify
+package codegen
+
+trait GenTypes {
+ self: Reifier =>
+
+ import global._
+ import definitions._
+
+ /**
+ * Reify a type.
+ * For internal use only, use ``reified'' instead.
+ */
+ def reifyType(tpe: Type): Tree = {
+ assert(tpe != null, "tpe is null")
+
+ if (tpe.isErroneous)
+ CannotReifyErroneousReifee(tpe)
+ if (tpe.isLocalToReifee)
+ CannotReifyType(tpe)
+
+ // this is a very special case. see the comments below for more info.
+ if (isSemiConcreteTypeMember(tpe))
+ return reifySemiConcreteTypeMember(tpe)
+
+ // SI-6242: splicing might violate type bounds
+ val spliced = spliceType(tpe)
+ if (spliced != EmptyTree)
+ return spliced
+
+ val tsym = tpe.typeSymbolDirect
+ if (tsym.isClass && tpe == tsym.typeConstructor && tsym.isStatic)
+ Select(Select(reify(tsym), nme.asType), nme.toTypeConstructor)
+ else tpe match {
+ case tpe @ NoType =>
+ reifyMirrorObject(tpe)
+ case tpe @ NoPrefix =>
+ reifyMirrorObject(tpe)
+ case tpe @ ThisType(root) if root.isRoot =>
+ mirrorBuildCall(nme.thisPrefix, mirrorMirrorSelect(nme.RootClass))
+ case tpe @ ThisType(empty) if empty.isEmptyPackageClass =>
+ mirrorBuildCall(nme.thisPrefix, mirrorMirrorSelect(nme.EmptyPackageClass))
+ case tpe @ ThisType(clazz) if clazz.isModuleClass && clazz.isStatic =>
+ val module = reify(clazz.sourceModule)
+ val moduleClass = Select(Select(module, nme.asModule), nme.moduleClass)
+ mirrorFactoryCall(nme.ThisType, moduleClass)
+ case tpe @ ThisType(_) =>
+ reifyProduct(tpe)
+ case tpe @ SuperType(thistpe, supertpe) =>
+ reifyProduct(tpe)
+ case tpe @ SingleType(pre, sym) =>
+ reifyProduct(tpe)
+ case tpe @ ConstantType(value) =>
+ mirrorFactoryCall(nme.ConstantType, reifyProduct(value))
+ case tpe @ TypeRef(pre, sym, args) =>
+ reifyProduct(tpe)
+ case tpe @ TypeBounds(lo, hi) =>
+ reifyProduct(tpe)
+ case tpe @ NullaryMethodType(restpe) =>
+ reifyProduct(tpe)
+ case tpe @ AnnotatedType(anns, underlying, selfsym) =>
+ reifyAnnotatedType(tpe)
+ case _ =>
+ reifyToughType(tpe)
+ }
+ }
+
+ /** Keeps track of whether this reification contains abstract type parameters */
+ def reificationIsConcrete: Boolean = state.reificationIsConcrete
+
+ def spliceType(tpe: Type): Tree = {
+ if (tpe.isSpliceable && !(boundSymbolsInCallstack contains tpe.typeSymbol)) {
+ if (reifyDebug) println("splicing " + tpe)
+
+ val tagFlavor = if (concrete) tpnme.TypeTag.toString else tpnme.WeakTypeTag.toString
+ val key = (tagFlavor, tpe.typeSymbol)
+ // if this fails, it might produce the dreaded "erroneous or inaccessible type" error
+ // to find out the whereabouts of the error run scalac with -Ydebug
+ if (reifyDebug) println("launching implicit search for %s.%s[%s]".format(universe, tagFlavor, tpe))
+ val result =
+ typer.resolveTypeTag(defaultErrorPosition, universe.tpe, tpe, concrete = concrete, allowMaterialization = false) match {
+ case failure if failure.isEmpty =>
+ if (reifyDebug) println("implicit search was fruitless")
+ if (reifyDebug) println("trying to splice as manifest")
+ val splicedAsManifest = spliceAsManifest(tpe)
+ if (splicedAsManifest.isEmpty) {
+ if (reifyDebug) println("no manifest in scope")
+ EmptyTree
+ } else {
+ if (reifyDebug) println("successfully spliced as manifest: " + splicedAsManifest)
+ splicedAsManifest
+ }
+ case success =>
+ if (reifyDebug) println("implicit search has produced a result: " + success)
+ state.reificationIsConcrete &= concrete || success.tpe <:< TypeTagClass.toTypeConstructor
+ Select(Apply(Select(success, nme.in), List(Ident(nme.MIRROR_SHORT))), nme.tpe)
+ }
+ if (result != EmptyTree) return result
+ state.reificationIsConcrete = false
+ }
+
+ EmptyTree
+ }
+
+ private def spliceAsManifest(tpe: Type): Tree = {
+ def isSynthetic(manifest: Tree) = manifest exists (sub => sub.symbol != null && (sub.symbol == FullManifestModule || sub.symbol.owner == FullManifestModule))
+ def searchForManifest(typer: analyzer.Typer): Tree =
+ analyzer.inferImplicit(
+ EmptyTree,
+ appliedType(FullManifestClass.toTypeConstructor, List(tpe)),
+ reportAmbiguous = false,
+ isView = false,
+ context = typer.context,
+ saveAmbiguousDivergent = false,
+ pos = defaultErrorPosition) match {
+ case success if !success.tree.isEmpty && !isSynthetic(success.tree) =>
+ val manifestInScope = success.tree
+ // todo. write a test for this
+ if (ReflectRuntimeUniverse == NoSymbol) CannotConvertManifestToTagWithoutScalaReflect(tpe, manifestInScope)
+ val cm = typer.typed(Ident(ReflectRuntimeCurrentMirror))
+ val tagTree = gen.mkMethodCall(ReflectRuntimeUniverse, nme.manifestToTypeTag, List(tpe), List(cm, manifestInScope))
+ Select(Apply(Select(tagTree, nme.in), List(Ident(nme.MIRROR_SHORT))), nme.tpe)
+ case _ =>
+ EmptyTree
+ }
+ val result = typer.silent(silentTyper => silentTyper.context.withMacrosDisabled(searchForManifest(silentTyper)))
+ result match {
+ case analyzer.SilentResultValue(result) => result
+ case analyzer.SilentTypeError(_) => EmptyTree
+ }
+ }
+
+ /** Reify a semi-concrete type member.
+ *
+ * This is a VERY special case to deal with stuff like `typeOf[ru.Type]`.
+ * In that case `Type`, which is an abstract type member of scala.reflect.api.Universe, is not a free type.
+ * Why? Because we know its prefix, and it unambiguously determines the type.
+ *
+ * Here is a different view on this question that supports this suggestion.
+ * Say, you reify a tree. Iff it doesn't contain free types, it can be successfully compiled and run.
+ * For example, if you reify `tpe.asInstanceOf[T]` taken from `def foo[T]`, then you won't be able to compile the result.
+ * Fair enough, you don't know the `T`, so the compiler will choke.
+ * This fact is captured by reification result having a free type T (this can be inspected by calling `tree.freeTypes`).
+ * Now imagine you reify the following tree: `tpe.asInstanceOf[ru.Type]`.
+ * To the contrast with the previous example, that's totally not a problem.
+ *
+ * Okay, so we figured out that `ru.Type` is not a free type.
+ * However, in our reification framework, this type would be treated a free type.
+ * Why? Because `tpe.isSpliceable` will return true.
+ * Hence we intervene and handle this situation in a special way.
+ *
+ * By the way, we cannot change the definition of `isSpliceable`, because class tags also depend on it.
+ * And, you know, class tags don't care whether we select a type member from a concrete instance or get it from scope (as with type parameters).
+ * The type itself still remains not concrete, in the sense that we don't know its erasure.
+ * I.e. we can compile the code that involves `ru.Type`, but we cannot serialize an instance of `ru.Type`.
+ */
+ private def reifySemiConcreteTypeMember(tpe: Type): Tree = tpe match {
+ case tpe @ TypeRef(pre @ SingleType(prepre, presym), sym, args) if sym.isAbstractType && !sym.isExistential =>
+ return mirrorFactoryCall(nme.TypeRef, reify(pre), mirrorBuildCall(nme.selectType, reify(sym.owner), reify(sym.name.toString)), reify(args))
+ }
+
+ /** Reify an annotated type, i.e. the one that makes us deal with AnnotationInfos */
+ private def reifyAnnotatedType(tpe: AnnotatedType): Tree = {
+ val AnnotatedType(anns, underlying, selfsym) = tpe
+ mirrorFactoryCall(nme.AnnotatedType, mkList(anns map reifyAnnotationInfo), reify(underlying), reify(selfsym))
+ }
+
+ /** Reify a tough type, i.e. the one that leads to creation of auxiliary symbols */
+ private def reifyToughType(tpe: Type): Tree = {
+ if (reifyDebug) println("tough type: %s (%s)".format(tpe, tpe.kind))
+
+ def reifyScope(scope: Scope): Tree = {
+ scope foreach reifySymDef
+ mirrorCall(nme.newScopeWith, scope.toList map reify: _*)
+ }
+
+ tpe match {
+ case tpe @ RefinedType(parents, decls) =>
+ reifySymDef(tpe.typeSymbol)
+ mirrorFactoryCall(tpe, reify(parents), reifyScope(decls), reify(tpe.typeSymbol))
+ case tpe @ ExistentialType(tparams, underlying) =>
+ tparams foreach reifySymDef
+ mirrorFactoryCall(tpe, reify(tparams), reify(underlying))
+ case tpe @ ClassInfoType(parents, decls, clazz) =>
+ reifySymDef(clazz)
+ mirrorFactoryCall(tpe, reify(parents), reifyScope(decls), reify(tpe.typeSymbol))
+ case tpe @ MethodType(params, restpe) =>
+ params foreach reifySymDef
+ mirrorFactoryCall(tpe, reify(params), reify(restpe))
+ case tpe @ PolyType(tparams, underlying) =>
+ tparams foreach reifySymDef
+ mirrorFactoryCall(tpe, reify(tparams), reify(underlying))
+ case _ =>
+ throw new Error("internal error: %s (%s) is not supported".format(tpe, tpe.kind))
+ }
+ }
+}
diff --git a/src/compiler/scala/reflect/reify/codegen/GenUtils.scala b/src/compiler/scala/reflect/reify/codegen/GenUtils.scala
new file mode 100644
index 0000000..49877b4
--- /dev/null
+++ b/src/compiler/scala/reflect/reify/codegen/GenUtils.scala
@@ -0,0 +1,148 @@
+package scala.reflect.reify
+package codegen
+
+trait GenUtils {
+ self: Reifier =>
+
+ import global._
+ import definitions._
+
+ def reifyList(xs: List[Any]): Tree =
+ mkList(xs map reify)
+
+ def reifyProduct(x: Product): Tree =
+ reifyProduct(x.productPrefix, x.productIterator.toList)
+
+ def reifyProduct(prefix: String, elements: List[Any]): Tree = {
+ // reflection would be more robust, but, hey, this is a hot path
+ if (prefix.startsWith("Tuple")) scalaFactoryCall(prefix, (elements map reify).toList: _*)
+ else mirrorCall(prefix, (elements map reify): _*)
+ }
+
+ // helper functions
+
+ /** Reify a case object defined in Mirror */
+ def reifyMirrorObject(name: String): Tree =
+ mirrorSelect(name)
+
+ def reifyMirrorObject(x: Product): Tree =
+ reifyMirrorObject(x.productPrefix)
+
+ def call(fname: String, args: Tree*): Tree =
+ Apply(termPath(fname), args.toList)
+
+ def mirrorSelect(name: String): Tree =
+ termPath(nme.UNIVERSE_PREFIX + name)
+
+ def mirrorBuildSelect(name: String): Tree =
+ termPath(nme.UNIVERSE_BUILD_PREFIX + name)
+
+ def mirrorMirrorSelect(name: String): Tree =
+ termPath(nme.MIRROR_PREFIX + name)
+
+ def mirrorCall(name: TermName, args: Tree*): Tree =
+ call("" + (nme.UNIVERSE_PREFIX append name), args: _*)
+
+ def mirrorCall(name: String, args: Tree*): Tree =
+ call(nme.UNIVERSE_PREFIX + name, args: _*)
+
+ def mirrorBuildCall(name: TermName, args: Tree*): Tree =
+ call("" + (nme.UNIVERSE_BUILD_PREFIX append name), args: _*)
+
+ def mirrorBuildCall(name: String, args: Tree*): Tree =
+ call(nme.UNIVERSE_BUILD_PREFIX + name, args: _*)
+
+ def mirrorMirrorCall(name: TermName, args: Tree*): Tree =
+ call("" + (nme.MIRROR_PREFIX append name), args: _*)
+
+ def mirrorMirrorCall(name: String, args: Tree*): Tree =
+ call(nme.MIRROR_PREFIX + name, args: _*)
+
+ def mirrorFactoryCall(value: Product, args: Tree*): Tree =
+ mirrorFactoryCall(value.productPrefix, args: _*)
+
+ def mirrorFactoryCall(prefix: String, args: Tree*): Tree =
+ mirrorCall(prefix, args: _*)
+
+ def scalaFactoryCall(name: String, args: Tree*): Tree =
+ call("scala." + name + ".apply", args: _*)
+
+ def mkList(args: List[Tree]): Tree =
+ scalaFactoryCall("collection.immutable.List", args: _*)
+
+ def mkListMap(args: List[Tree]): Tree =
+ scalaFactoryCall("collection.immutable.ListMap", args: _*)
+
+ /**
+ * An (unreified) path that refers to definition with given fully qualified name
+ * @param mkName Creator for last portion of name (either TermName or TypeName)
+ */
+ def path(fullname: String, mkName: String => Name): Tree = {
+ val parts = fullname split "\\."
+ val prefixParts = parts.init
+ val lastName = mkName(parts.last)
+ if (prefixParts.isEmpty) Ident(lastName)
+ else {
+ val prefixTree = ((Ident(prefixParts.head): Tree) /: prefixParts.tail)(Select(_, _))
+ Select(prefixTree, lastName)
+ }
+ }
+
+ /** An (unreified) path that refers to term definition with given fully qualified name */
+ def termPath(fullname: String): Tree = path(fullname, newTermName)
+
+ /** An (unreified) path that refers to type definition with given fully qualified name */
+ def typePath(fullname: String): Tree = path(fullname, newTypeName)
+
+ def isTough(tpe: Type) = {
+ def isTough(tpe: Type) = tpe match {
+ case _: RefinedType => true
+ case _: ExistentialType => true
+ case _: ClassInfoType => true
+ case _: MethodType => true
+ case _: PolyType => true
+ case _ => false
+ }
+
+ tpe != null && (tpe exists isTough)
+ }
+
+ object TypedOrAnnotated {
+ def unapply(tree: Tree): Option[Tree] = tree match {
+ case ty @ Typed(_, _) =>
+ Some(ty)
+ case at @ Annotated(_, _) =>
+ Some(at)
+ case _ =>
+ None
+ }
+ }
+
+ def isAnnotated(tpe: Type) = {
+ def isAnnotated(tpe: Type) = tpe match {
+ case _: AnnotatedType => true
+ case _ => false
+ }
+
+ tpe != null && (tpe exists isAnnotated)
+ }
+
+ def isSemiConcreteTypeMember(tpe: Type) = tpe match {
+ case TypeRef(SingleType(_, _), sym, _) if sym.isAbstractType && !sym.isExistential => true
+ case _ => false
+ }
+
+ def isCrossStageTypeBearer(tree: Tree): Boolean = tree match {
+ case TypeApply(hk, _) => isCrossStageTypeBearer(hk)
+ case Select(sym @ Select(_, ctor), nme.apply) if ctor == nme.WeakTypeTag || ctor == nme.TypeTag || ctor == nme.Expr => true
+ case _ => false
+ }
+
+ def origin(sym: Symbol) = {
+ var origin = ""
+ if (sym.owner != NoSymbol) origin += "defined by %s".format(sym.owner.name)
+ if (sym.pos != NoPosition) origin += " in %s:%s:%s".format(sym.pos.source.file.name, sym.pos.line, sym.pos.column)
+ if (origin == "") origin = "of unknown origin"
+ origin
+ }
+}
\ No newline at end of file
diff --git a/src/compiler/scala/reflect/reify/package.scala b/src/compiler/scala/reflect/reify/package.scala
new file mode 100644
index 0000000..6777bb0
--- /dev/null
+++ b/src/compiler/scala/reflect/reify/package.scala
@@ -0,0 +1,93 @@
+package scala.reflect
+
+import scala.language.implicitConversions
+import scala.reflect.macros.{Context, ReificationException, UnexpectedReificationException}
+import scala.tools.nsc.Global
+
+package object reify {
+ private def mkReifier(global1: Global)(typer: global1.analyzer.Typer, universe: global1.Tree, mirror: global1.Tree, reifee: Any, concrete: Boolean = false): Reifier { val global: global1.type } = {
+ val typer1: typer.type = typer
+ val universe1: universe.type = universe
+ val mirror1: mirror.type = mirror
+ val reifee1 = reifee
+ val concrete1 = concrete
+
+ new {
+ val global: global1.type = global1
+ val typer = typer1
+ val universe = universe1
+ val mirror = mirror1
+ val reifee = reifee1
+ val concrete = concrete1
+ } with Reifier
+ }
+
+ private[reify] def mkDefaultMirrorRef(global: Global)(universe: global.Tree, typer0: global.analyzer.Typer): global.Tree = {
+ import global._
+ import definitions._
+ val enclosingErasure = {
+ val rClassTree = reifyEnclosingRuntimeClass(global)(typer0)
+ // HACK around SI-6259
+ // If we're in the constructor of an object or others don't have easy access to `this`, we have no good way to grab
+ // the class of that object. Instead, we construct an anonymous class and grab his class file, assuming
+ // this is enough to get the correct class loadeer for the class we *want* a mirror for, the object itself.
+ rClassTree orElse Apply(Select(treeBuilder.makeAnonymousNew(Nil), sn.GetClass), Nil)
+ }
+ // JavaUniverse is defined in scala-reflect.jar, so we must be very careful in case someone reifies stuff having only scala-library.jar on the classpath
+ val isJavaUniverse = JavaUniverseClass != NoSymbol && universe.tpe <:< JavaUniverseClass.toTypeConstructor
+ if (isJavaUniverse && !enclosingErasure.isEmpty) Apply(Select(universe, nme.runtimeMirror), List(Select(enclosingErasure, sn.GetClassLoader)))
+ else Select(universe, nme.rootMirror)
+ }
+
+ def reifyTree(global: Global)(typer: global.analyzer.Typer, universe: global.Tree, mirror: global.Tree, tree: global.Tree): global.Tree =
+ mkReifier(global)(typer, universe, mirror, tree, concrete = false).reification.asInstanceOf[global.Tree]
+
+ def reifyType(global: Global)(typer: global.analyzer.Typer,universe: global.Tree, mirror: global.Tree, tpe: global.Type, concrete: Boolean = false): global.Tree =
+ mkReifier(global)(typer, universe, mirror, tpe, concrete = concrete).reification.asInstanceOf[global.Tree]
+
+ def reifyRuntimeClass(global: Global)(typer0: global.analyzer.Typer, tpe0: global.Type, concrete: Boolean = true): global.Tree = {
+ import global._
+ import definitions._
+ import analyzer.enclosingMacroPosition
+
+ // SI-7375
+ val tpe = tpe0.dealiasWiden
+
+ if (tpe.isSpliceable) {
+ val classTagInScope = typer0.resolveClassTag(enclosingMacroPosition, tpe, allowMaterialization = false)
+ if (!classTagInScope.isEmpty) return Select(classTagInScope, nme.runtimeClass)
+ if (concrete) throw new ReificationException(enclosingMacroPosition, "tpe %s is an unresolved spliceable type".format(tpe))
+ }
+
+ tpe match {
+ case TypeRef(_, ArrayClass, componentTpe :: Nil) =>
+ val componentErasure = reifyRuntimeClass(global)(typer0, componentTpe, concrete)
+ gen.mkMethodCall(arrayClassMethod, List(componentErasure))
+ case _ =>
+ var erasure = tpe.erasure
+ if (tpe.typeSymbol.isDerivedValueClass && global.phase.id < global.currentRun.erasurePhase.id) erasure = tpe
+ gen.mkNullaryCall(Predef_classOf, List(erasure))
+ }
+ }
+
+ // Note: If current context is inside the constructor of an object or otherwise not inside
+ // a class/object body, this will return an EmptyTree.
+ def reifyEnclosingRuntimeClass(global: Global)(typer0: global.analyzer.Typer): global.Tree = {
+ import global._
+ import definitions._
+ def isThisInScope = typer0.context.enclosingContextChain exists (_.tree.isInstanceOf[ImplDef])
+ if (isThisInScope) {
+ val enclosingClasses = typer0.context.enclosingContextChain map (_.tree) collect { case classDef: ClassDef => classDef }
+ val classInScope = enclosingClasses.headOption getOrElse EmptyTree
+ def isUnsafeToUseThis = {
+ val isInsideConstructorSuper = typer0.context.enclosingContextChain exists (_.inSelfSuperCall)
+ // Note: It's ok to check for any object here, because if we were in an enclosing class, we'd already have returned its classOf
+ val isInsideObject = typer0.context.enclosingContextChain map (_.tree) exists { case _: ModuleDef => true; case _ => false }
+ isInsideConstructorSuper && isInsideObject
+ }
+ if (!classInScope.isEmpty) reifyRuntimeClass(global)(typer0, classInScope.symbol.toTypeConstructor, concrete = true)
+ else if(!isUnsafeToUseThis) Select(This(tpnme.EMPTY), sn.GetClass)
+ else EmptyTree
+ } else EmptyTree
+ }
+}
diff --git a/src/compiler/scala/reflect/reify/phases/Calculate.scala b/src/compiler/scala/reflect/reify/phases/Calculate.scala
new file mode 100644
index 0000000..4d1e22a
--- /dev/null
+++ b/src/compiler/scala/reflect/reify/phases/Calculate.scala
@@ -0,0 +1,61 @@
+package scala.reflect.reify
+package phases
+
+trait Calculate {
+ self: Reifier =>
+
+ import global._
+ import definitions._
+
+ implicit class RichCalculateSymbol(sym: Symbol) {
+ def metalevel: Int = { assert(sym != null && sym != NoSymbol); localSymbols.getOrElse(sym, 0) }
+ def isLocalToReifee = (localSymbols contains sym) // todo. how do I account for local skolems?
+ }
+
+ implicit class RichCalculateType(tpe: Type) {
+ def isLocalToReifee = tpe != null && (tpe exists (tp => (localSymbols contains tp.typeSymbol) || (localSymbols contains tp.termSymbol)))
+ }
+
+ private def localSymbols: Map[Symbol, Int] = state.localSymbols // set of all symbols that are local to the tree to be reified
+ private def localSymbols_=(value: Map[Symbol, Int]): Unit = state.localSymbols = value
+ private def registerLocalSymbol(sym: Symbol, metalevel: Int): Unit =
+ if (sym != null && sym != NoSymbol) {
+ if (localSymbols contains sym)
+ assert(localSymbols(sym) == metalevel, "metalevel mismatch: expected %s, actual %s".format(localSymbols(sym), metalevel))
+ else
+ localSymbols += (sym -> metalevel)
+ }
+
+ /**
+ * Merely traverses the reifiee and records local symbols along with their metalevels.
+ */
+ val calculate = new Traverser {
+ // see the explanation of metalevels in ``Metalevels''
+ var currMetalevel = 1
+
+ override def traverse(tree: Tree): Unit = tree match {
+ case TreeSplice(_) =>
+ currMetalevel -= 1
+ try super.traverse(tree)
+ finally currMetalevel += 1
+ case tree if tree.isDef =>
+ if (reifyDebug) println("boundSym: %s of type %s".format(tree.symbol, (tree.productIterator.toList collect { case tt: TypeTree => tt }).headOption.getOrElse(TypeTree(tree.tpe))))
+ registerLocalSymbol(tree.symbol, currMetalevel)
+
+ bindRelatedSymbol(tree.symbol.sourceModule, "sourceModule")
+ bindRelatedSymbol(tree.symbol.moduleClass, "moduleClass")
+ bindRelatedSymbol(tree.symbol.companionClass, "companionClass")
+ bindRelatedSymbol(tree.symbol.companionModule, "companionModule")
+ Some(tree.symbol) collect { case termSymbol: TermSymbol => bindRelatedSymbol(termSymbol.referenced, "referenced") }
+ Some(tree) collect { case labelDef: LabelDef => labelDef.params foreach (param => bindRelatedSymbol(param.symbol, "labelParam")) }
+ def bindRelatedSymbol(related: Symbol, name: String): Unit =
+ if (related != null && related != NoSymbol) {
+ if (reifyDebug) println("boundSym (" + name + "): " + related)
+ registerLocalSymbol(related, currMetalevel)
+ }
+ super.traverse(tree)
+ case _ =>
+ super.traverse(tree)
+ }
+ }
+}
diff --git a/src/compiler/scala/reflect/reify/phases/Metalevels.scala b/src/compiler/scala/reflect/reify/phases/Metalevels.scala
new file mode 100644
index 0000000..fbbd12a
--- /dev/null
+++ b/src/compiler/scala/reflect/reify/phases/Metalevels.scala
@@ -0,0 +1,150 @@
+package scala.reflect.reify
+package phases
+
+trait Metalevels {
+ self: Reifier =>
+
+ import global._
+ import definitions._
+
+ /**
+ * Makes sense of cross-stage bindings.
+ *
+ * ================
+ *
+ * Analysis of cross-stage bindings becomes convenient if we introduce the notion of metalevels.
+ * Metalevel of a tree is a number that gets incremented every time you reify something and gets decremented when you splice something.
+ * Metalevel of a symbol is equal to the metalevel of its definition.
+ *
+ * Example 1. Consider the following snippet:
+ *
+ * reify {
+ * val x = 2 // metalevel of symbol x is 1, because it's declared inside reify
+ * val y = reify{x} // metalevel of symbol y is 1, because it's declared inside reify
+ * // metalevel of Ident(x) is 2, because it's inside two reifies
+ * y.splice // metalevel of Ident(y) is 0, because it's inside a designator of a splice
+ * }
+ *
+ * Cross-stage bindings are introduced when symbol.metalevel != curr_metalevel.
+ * Both bindings introduced in Example 1 are cross-stage.
+ *
+ * Depending on what side of the inequality is greater, the following situations might occur:
+ *
+ * 1) symbol.metalevel < curr_metalevel. In this case reifier will generate a free variable
+ * that captures both the name of the symbol (to be compiled successfully) and its value (to be run successfully).
+ * For example, x in Example 1 will be reified as follows: Ident(newFreeVar("x", IntClass.tpe, x))
+ *
+ * 2) symbol.metalevel > curr_metalevel. This leads to a metalevel breach that violates intuitive perception of splicing.
+ * As defined in macro spec, splicing takes a tree and inserts it into another tree - as simple as that.
+ * However, how exactly do we do that in the case of y.splice? In this very scenario we can use dataflow analysis and inline it,
+ * but what if y were a var, and what if it were calculated randomly at runtime?
+ *
+ * This question has a genuinely simple answer. Sure, we cannot resolve such splices statically (i.e. during macro expansion of ``reify''),
+ * but now we have runtime toolboxes, so noone stops us from picking up that reified tree and evaluating it at runtime
+ * (in fact, this is something that ``Expr.splice'' does transparently).
+ *
+ * This is akin to early vs late binding dilemma.
+ * The prior is faster, plus, the latter (implemented with reflection) might not work because of visibility issues or might be not available on all platforms.
+ * But the latter still has its uses, so I'm allowing metalevel breaches, but introducing the -Xlog-runtime-evals to log them.
+ *
+ * upd. We no longer do that. In case of a runaway ``splice'' inside a `reify`, one will get a static error.
+ * Why? Unfortunately, the cute idea of transparently converting between static and dynamic splices has failed.
+ * 1) Runtime eval that services dynamic splices requires scala-compiler.jar, which might not be on library classpath
+ * 2) Runtime eval incurs a severe performance penalty, so it'd better to be explicit about it
+ *
+ * ================
+ *
+ * As we can see, the only problem is the fact that lhs'es of `splice` can be code blocks that can capture variables from the outside.
+ * Code inside the lhs of an `splice` is not reified, while the code from the enclosing reify is.
+ *
+ * Hence some bindings become cross-stage, which is not bad per se (in fact, some cross-stage bindings have sane semantics, as in the example above).
+ * However this affects freevars, since they are delicate inter-dimensional beings that refer to both current and next planes of existence.
+ * When splicing tears the fabric of the reality apart, some freevars have to go single-dimensional to retain their sanity.
+ *
+ * Example 2. Consider the following snippet:
+ *
+ * reify {
+ * val x = 2
+ * reify{x}.splice
+ * }
+ *
+ * Since the result of the inner reify is wrapped in a splice, it won't be reified
+ * together with the other parts of the outer reify, but will be inserted into that result verbatim.
+ *
+ * The inner reify produces an Expr[Int] that wraps Ident(freeVar("x", IntClass.tpe, x)).
+ * However the freevar the reification points to will vanish when the compiler processes the outer reify.
+ * That's why we need to replace that freevar with a regular symbol that will point to reified x.
+ *
+ * Example 3. Consider the following fragment:
+ *
+ * reify {
+ * val x = 2
+ * val y = reify{x}
+ * y.splice
+ * }
+ *
+ * In this case the inner reify doesn't appear next to splice, so it will be reified together with x.
+ * This means that no special processing is needed here.
+ *
+ * Example 4. Consider the following fragment:
+ *
+ * reify {
+ * val x = 2
+ * {
+ * val y = 2
+ * val z = reify{reify{x + y}}
+ * z.splice
+ * }.splice
+ * }
+ *
+ * The reasoning from Example 2 still holds here - we do need to inline the freevar that refers to x.
+ * However, we must not touch anything inside the splice'd block, because it's not getting reified.
+ */
+ val metalevels = new Transformer {
+ var insideSplice = false
+ var inlineableBindings = scala.collection.mutable.Map[TermName, Tree]()
+
+ def withinSplice[T](op: => T) = {
+ val old = insideSplice
+ insideSplice = true
+ try op
+ finally insideSplice = old
+ }
+
+ // Q: here we deal with all sorts of reified trees. what about ReifiedType(_, _, _, _, _, _)?
+ // A: nothing. reified trees give us problems because they sometimes create dimensional rifts as described above
+ // to the contrast, reified types (i.e. synthetic typetags materialized by Implicits.scala) always stay on the same metalevel as their enclosing code
+ override def transform(tree: Tree): Tree = tree match {
+ case TreeSplice(ReifiedTree(universe, mirror, symtab, rtree, tpe, rtpe, concrete)) =>
+ if (reifyDebug) println("entering inlineable splice: " + tree)
+ val inlinees = symtab.syms filter (_.isLocalToReifee)
+ inlinees foreach (inlinee => symtab.symAliases(inlinee) foreach (alias => inlineableBindings(alias) = symtab.symBinding(inlinee)))
+ val symtab1 = symtab -- inlinees
+ if (reifyDebug) println("trimmed %s inlineable free defs from its symbol table: %s".format(inlinees.length, inlinees map (inlinee => symtab.symName(inlinee)) mkString(", ")))
+ withinSplice { super.transform(TreeSplice(ReifiedTree(universe, mirror, symtab1, rtree, tpe, rtpe, concrete))) }
+ case TreeSplice(splicee) =>
+ if (reifyDebug) println("entering splice: " + splicee)
+ val breaches = splicee filter (sub => sub.hasSymbol && sub.symbol != NoSymbol && sub.symbol.metalevel > 0)
+ if (!insideSplice && breaches.nonEmpty) {
+ // we used to convert dynamic splices into runtime evals transparently, but we no longer do that
+ // why? see comments above
+ // if (settings.logRuntimeSplices.value) reporter.echo(tree.pos, "this splice cannot be resolved statically")
+ // withinSplice { super.transform(tree) }
+ if (reifyDebug) println("metalevel breach in %s: %s".format(tree, (breaches map (_.symbol)).distinct mkString ", "))
+ CannotReifyRuntimeSplice(tree)
+ } else {
+ withinSplice { super.transform(tree) }
+ }
+ // todo. also inline usages of ``inlineableBindings'' in the symtab itself
+ // e.g. a free$Foo can well use free$x, if Foo is path-dependent w.r.t x
+ // FreeRef(_, _) check won't work, because metalevels of symbol table and body are different, hence, freerefs in symbol table look different from freerefs in body
+ case FreeRef(_, name) if inlineableBindings contains name =>
+ if (reifyDebug) println("inlineable free ref: %s in %s".format(name, showRaw(tree)))
+ val inlined = reify(inlineableBindings(name))
+ if (reifyDebug) println("verdict: inlined as %s".format(showRaw(inlined)))
+ inlined
+ case _ =>
+ super.transform(tree)
+ }
+ }
+}
diff --git a/src/compiler/scala/reflect/reify/phases/Reify.scala b/src/compiler/scala/reflect/reify/phases/Reify.scala
new file mode 100644
index 0000000..8e13a45
--- /dev/null
+++ b/src/compiler/scala/reflect/reify/phases/Reify.scala
@@ -0,0 +1,62 @@
+package scala.reflect.reify
+package phases
+
+import scala.runtime.ScalaRunTime.isAnyVal
+import scala.runtime.ScalaRunTime.isTuple
+import scala.reflect.reify.codegen._
+
+trait Reify extends GenSymbols
+ with GenTypes
+ with GenNames
+ with GenTrees
+ with GenAnnotationInfos
+ with GenPositions
+ with GenUtils {
+
+ self: Reifier =>
+
+ import global._
+ import definitions._
+
+ private object reifyStack {
+ def currents: List[Any] = state.reifyStack
+ def currents_=(value: List[Any]): Unit = state.reifyStack = value
+
+ @inline final def push[T](reifee: Any)(body: => T): T = {
+ currents ::= reifee
+ try body
+ finally currents = currents.tail
+ }
+ }
+ def boundSymbolsInCallstack = flatCollect(reifyStack.currents) {
+ case ExistentialType(quantified, _) => quantified
+ case PolyType(typeParams, _) => typeParams
+ }
+ def current = reifyStack.currents.head
+ def currents = reifyStack.currents
+
+ /**
+ * Reifies any supported value.
+ * For internal use only, use ``reified'' instead.
+ */
+ def reify(reifee: Any): Tree = reifyStack.push(reifee)(reifee match {
+ // before adding some case here, in global scope, please, consider
+ // whether it can be localized like reifyAnnotationInfo or reifyScope
+ // this will help reification stay as sane as possible
+ case sym: Symbol => reifySymRef(sym)
+ case tpe: Type => reifyType(tpe)
+ case name: Name => reifyName(name)
+ case tree: Tree => reifyTree(tree)
+ // disabled because this is a very special case that I plan to remove later
+ // why do I dislike annotations? see comments to `reifyAnnotationInfo`
+ // case ann: AnnotationInfo => reifyAnnotationInfo(ann)
+ case pos: Position => reifyPosition(pos)
+ case mods: global.Modifiers => reifyModifiers(mods)
+ case xs: List[_] => reifyList(xs)
+ case s: String => Literal(Constant(s))
+ case v if isAnyVal(v) => Literal(Constant(v))
+ case null => Literal(Constant(null))
+ case _ =>
+ throw new Error("reifee %s of type %s is not supported".format(reifee, reifee.getClass))
+ })
+}
\ No newline at end of file
diff --git a/src/compiler/scala/reflect/reify/phases/Reshape.scala b/src/compiler/scala/reflect/reify/phases/Reshape.scala
new file mode 100644
index 0000000..535a933
--- /dev/null
+++ b/src/compiler/scala/reflect/reify/phases/Reshape.scala
@@ -0,0 +1,371 @@
+package scala.reflect.reify
+package phases
+
+import scala.tools.nsc.symtab.Flags._
+
+trait Reshape {
+ self: Reifier =>
+
+ import global._
+ import definitions._
+
+ /**
+ * Rolls back certain changes that were introduced during typechecking of the reifee.
+ *
+ * These include:
+ * * Undoing macro expansions
+ * * Replacing type trees with TypeTree(tpe)
+ * * Reassembling CompoundTypeTrees into reifiable form
+ * * Transforming Modifiers.annotations into Symbol.annotations
+ * * Transforming Annotated annotations into AnnotatedType annotations
+ * * Transforming Annotated(annot, expr) into Typed(expr, TypeTree(Annotated(annot, _))
+ * * Non-idempotencies of the typechecker: https://issues.scala-lang.org/browse/SI-5464
+ */
+ val reshape = new Transformer {
+ var currentSymbol: Symbol = NoSymbol
+
+ override def transform(tree0: Tree) = {
+ val tree = undoMacroExpansion(tree0)
+ currentSymbol = tree.symbol
+
+ val preTyper = tree match {
+ case tree if tree.isErroneous =>
+ tree
+ case tt @ TypeTree() =>
+ toPreTyperTypeTree(tt)
+ case ctt @ CompoundTypeTree(_) =>
+ toPreTyperCompoundTypeTree(ctt)
+ case toa @ TypedOrAnnotated(_) =>
+ toPreTyperTypedOrAnnotated(toa)
+ case ta @ TypeApply(_, _) if isCrossStageTypeBearer(ta) =>
+ if (reifyDebug) println("cross-stage type bearer, retaining: " + tree)
+ ta
+ case ta @ TypeApply(hk, ts) =>
+ val discard = ts collect { case tt: TypeTree => tt } exists isDiscarded
+ if (reifyDebug && discard) println("discarding TypeApply: " + tree)
+ if (discard) hk else ta
+ case classDef @ ClassDef(mods, name, params, impl) =>
+ val Template(parents, self, body) = impl
+ var body1 = trimAccessors(classDef, reshapeLazyVals(body))
+ body1 = trimSyntheticCaseClassMembers(classDef, body1)
+ var impl1 = Template(parents, self, body1).copyAttrs(impl)
+ ClassDef(mods, name, params, impl1).copyAttrs(classDef)
+ case moduledef @ ModuleDef(mods, name, impl) =>
+ val Template(parents, self, body) = impl
+ var body1 = trimAccessors(moduledef, reshapeLazyVals(body))
+ body1 = trimSyntheticCaseClassMembers(moduledef, body1)
+ var impl1 = Template(parents, self, body1).copyAttrs(impl)
+ ModuleDef(mods, name, impl1).copyAttrs(moduledef)
+ case template @ Template(parents, self, body) =>
+ val discardedParents = parents collect { case tt: TypeTree => tt } filter isDiscarded
+ if (reifyDebug && discardedParents.length > 0) println("discarding parents in Template: " + discardedParents.mkString(", "))
+ val parents1 = parents diff discardedParents
+ val body1 = reshapeLazyVals(trimSyntheticCaseClassCompanions(body))
+ Template(parents1, self, body1).copyAttrs(template)
+ case block @ Block(stats, expr) =>
+ val stats1 = reshapeLazyVals(trimSyntheticCaseClassCompanions(stats))
+ Block(stats1, expr).copyAttrs(block)
+ case unapply @ UnApply(fun, args) =>
+ def extractExtractor(tree: Tree): Tree = {
+ val Apply(fun, args) = tree
+ args match {
+ case List(Ident(special)) if special == nme.SELECTOR_DUMMY =>
+ val Select(extractor, flavor) = fun
+ assert(flavor == nme.unapply || flavor == nme.unapplySeq)
+ extractor
+ case _ =>
+ extractExtractor(fun)
+ }
+ }
+
+ if (reifyDebug) println("unapplying unapply: " + tree)
+ val fun1 = extractExtractor(fun)
+ Apply(fun1, args).copyAttrs(unapply)
+ case _ =>
+ tree
+ }
+
+ super.transform(preTyper)
+ }
+
+ private def undoMacroExpansion(tree: Tree): Tree =
+ tree.attachments.get[MacroExpansionAttachment] match {
+ case Some(MacroExpansionAttachment(original)) =>
+ def mkImplicitly(tp: Type) = atPos(tree.pos)(
+ gen.mkNullaryCall(Predef_implicitly, List(tp))
+ )
+ val sym = original.symbol
+ original match {
+ // this hack is necessary until I fix implicit macros
+ // so far tag materialization is implemented by sneaky macros hidden in scala-compiler.jar
+ // hence we cannot reify references to them, because noone will be able to see them later
+ // when implicit macros are fixed, these sneaky macros will move to corresponding companion objects
+ // of, say, ClassTag or TypeTag
+ case Apply(TypeApply(_, List(tt)), _) if sym == materializeClassTag => mkImplicitly(appliedType(ClassTagClass, tt.tpe))
+ case Apply(TypeApply(_, List(tt)), List(pre)) if sym == materializeWeakTypeTag => mkImplicitly(typeRef(pre.tpe, WeakTypeTagClass, List(tt.tpe)))
+ case Apply(TypeApply(_, List(tt)), List(pre)) if sym == materializeTypeTag => mkImplicitly(typeRef(pre.tpe, TypeTagClass, List(tt.tpe)))
+ case _ => original
+ }
+ case _ => tree
+ }
+
+ override def transformModifiers(mods: Modifiers) = {
+ val mods1 = toPreTyperModifiers(mods, currentSymbol)
+ super.transformModifiers(mods1)
+ }
+
+ private def toPreTyperModifiers(mods: Modifiers, sym: Symbol) = {
+ if (!sym.annotations.isEmpty) {
+ val Modifiers(flags, privateWithin, annotations) = mods
+ val postTyper = sym.annotations filter (_.original != EmptyTree)
+ if (reifyDebug && !postTyper.isEmpty) println("reify symbol annotations for: " + sym)
+ if (reifyDebug && !postTyper.isEmpty) println("originals are: " + sym.annotations)
+ val preTyper = postTyper map toPreTyperAnnotation
+ mods.withAnnotations(preTyper)
+ } else {
+ mods
+ }
+ }
+
+ /** Restore pre-typer representation of a type.
+ *
+ * NB: This is the trickiest part of reification!
+ *
+ * In most cases, we're perfectly fine to reify a Type itself (see ``reifyType'').
+ * However if the type involves a symbol declared inside the quasiquote (i.e. registered in ``boundSyms''),
+ * then we cannot reify it, or otherwise subsequent reflective compilation will fail.
+ *
+ * Why will it fail? Because reified deftrees (e.g. ClassDef(...)) will generate fresh symbols during that compilation,
+ * so naively reified symbols will become out of sync, which brings really funny compilation errors and/or crashes, e.g.:
+ * https://issues.scala-lang.org/browse/SI-5230
+ *
+ * To deal with this unpleasant fact, we need to fall back from types to equivalent trees (after all, parser trees don't contain any types, just trees, so it should be possible).
+ * Luckily, these original trees get preserved for us in the ``original'' field when Trees get transformed into TypeTrees.
+ * And if an original of a type tree is empty, we can safely assume that this type is non-essential (e.g. was inferred/generated by the compiler).
+ * In that case the type can be omitted (e.g. reified as an empty TypeTree), since it will be inferred again later on.
+ *
+ * An important property of the original is that it isn't just a pre-typer tree.
+ * It's actually kind of a post-typer tree with symbols assigned to its Idents (e.g. Ident("List") will contain a symbol that points to immutable.this.List).
+ * This is very important, since subsequent reflective compilation won't have to resolve these symbols.
+ * In general case, such resolution cannot be performed, since reification doesn't preserve lexical context,
+ * which means that reflective compilation won't be aware of, say, imports that were provided when the reifee has been compiled.
+ *
+ * This workaround worked surprisingly well and allowed me to fix several important reification bugs, until the abstraction has leaked.
+ * Suddenly I found out that in certain contexts original trees do not contain symbols, but are just parser trees.
+ * To the moment I know only one such situation: typedAnnotations does not typecheck the annotation in-place, but rather creates new trees and typechecks them, so the original remains symless.
+ * Thus we apply a workaround for that in typedAnnotated. I hope this will be the only workaround in this department.
+ * upd. There are also problems with CompoundTypeTrees. I had to use attachments to retain necessary information.
+ *
+ * upd. Recently I went ahead and started using original for all TypeTrees, regardless of whether they refer to local symbols or not.
+ * As a result, ``reifyType'' is never called directly by tree reification (and, wow, it seems to work great!).
+ * The only usage of ``reifyType'' now is for servicing typetags, however, I have some ideas how to get rid of that as well.
+ */
+ private def isDiscarded(tt: TypeTree) = tt.original == null
+ private def toPreTyperTypeTree(tt: TypeTree): Tree = {
+ if (!isDiscarded(tt)) {
+ // here we rely on the fact that the originals that reach this point
+ // have all necessary symbols attached to them (i.e. that they can be recompiled in any lexical context)
+ // if this assumption fails, please, don't be quick to add postprocessing here (like I did before)
+ // but rather try to fix this in Typer, so that it produces quality originals (like it's done for typedAnnotated)
+ if (reifyDebug) println("TypeTree, essential: %s (%s)".format(tt.tpe, tt.tpe.kind))
+ if (reifyDebug) println("verdict: rolled back to original %s".format(tt.original))
+ transform(tt.original)
+ } else {
+ // type is deemed to be non-essential
+ // erase it and hope that subsequent reflective compilation will be able to recreate it again
+ if (reifyDebug) println("TypeTree, non-essential: %s (%s)".format(tt.tpe, tt.tpe.kind))
+ if (reifyDebug) println("verdict: discarded")
+ TypeTree()
+ }
+ }
+
+ private def toPreTyperCompoundTypeTree(ctt: CompoundTypeTree): Tree = {
+ val CompoundTypeTree(tmpl @ Template(parents, self, stats)) = ctt
+ if (stats.nonEmpty) CannotReifyCompoundTypeTreeWithNonEmptyBody(ctt)
+ assert(self eq emptyValDef, self)
+ val att = tmpl.attachments.get[CompoundTypeTreeOriginalAttachment]
+ val CompoundTypeTreeOriginalAttachment(parents1, stats1) = att.getOrElse(CompoundTypeTreeOriginalAttachment(parents, stats))
+ CompoundTypeTree(Template(parents1, self, stats1))
+ }
+
+ private def toPreTyperTypedOrAnnotated(tree: Tree): Tree = tree match {
+ case ty @ Typed(expr1, tpt) =>
+ if (reifyDebug) println("reify typed: " + tree)
+ val original = tpt match {
+ case tt @ TypeTree() => tt.original
+ case tpt => tpt
+ }
+ val annotatedArg = {
+ def loop(tree: Tree): Tree = tree match {
+ case annotated1 @ Annotated(ann, annotated2 @ Annotated(_, _)) => loop(annotated2)
+ case annotated1 @ Annotated(ann, arg) => arg
+ case _ => EmptyTree
+ }
+
+ loop(original)
+ }
+ if (annotatedArg != EmptyTree) {
+ if (annotatedArg.isType) {
+ if (reifyDebug) println("verdict: was an annotated type, reify as usual")
+ ty
+ } else {
+ if (reifyDebug) println("verdict: was an annotated value, equivalent is " + original)
+ toPreTyperTypedOrAnnotated(original)
+ }
+ } else {
+ if (reifyDebug) println("verdict: wasn't annotated, reify as usual")
+ ty
+ }
+ case at @ Annotated(annot, arg) =>
+ if (reifyDebug) println("reify type annotations for: " + tree)
+ assert(at.tpe.isInstanceOf[AnnotatedType], "%s (%s)".format(at.tpe, at.tpe.kind))
+ val annot1 = toPreTyperAnnotation(at.tpe.asInstanceOf[AnnotatedType].annotations(0))
+ if (reifyDebug) println("originals are: " + annot1)
+ Annotated(annot1, arg).copyAttrs(at)
+ }
+
+ /** Restore pre-typer representation of an annotation.
+ * The trick here is to retain the symbols that have been populated during typechecking of the annotation.
+ * If we do not do that, subsequent reflective compilation will fail.
+ */
+ private def toPreTyperAnnotation(ann: AnnotationInfo): Tree = {
+ val args = if (ann.assocs.isEmpty) {
+ ann.args
+ } else {
+ def toScalaAnnotation(jann: ClassfileAnnotArg): Tree = jann match {
+ case LiteralAnnotArg(const) =>
+ Literal(const)
+ case ArrayAnnotArg(arr) =>
+ Apply(Ident(definitions.ArrayModule), arr.toList map toScalaAnnotation)
+ case NestedAnnotArg(ann) =>
+ toPreTyperAnnotation(ann)
+ }
+
+ ann.assocs map { case (nme, arg) => AssignOrNamedArg(Ident(nme), toScalaAnnotation(arg)) }
+ }
+
+ def extractOriginal: PartialFunction[Tree, Tree] = { case Apply(Select(New(tpt), _), _) => tpt }
+ assert(extractOriginal.isDefinedAt(ann.original), showRaw(ann.original))
+ New(TypeTree(ann.atp) setOriginal extractOriginal(ann.original), List(args))
+ }
+
+ private def toPreTyperLazyVal(ddef: DefDef): ValDef = {
+ def extractRhs(rhs: Tree) = rhs match {
+ case Block(Assign(lhs, rhs)::Nil, _) if lhs.symbol.isLazy => rhs
+ case _ => rhs // unit or trait case
+ }
+ val DefDef(mods0, name0, _, _, tpt0, rhs0) = ddef
+ val name1 = nme.dropLocalSuffix(name0)
+ val Modifiers(flags0, privateWithin0, annotations0) = mods0
+ var flags1 = (flags0 & GetterFlags) & ~(STABLE | ACCESSOR | METHOD)
+ val mods1 = Modifiers(flags1, privateWithin0, annotations0) setPositions mods0.positions
+ val mods2 = toPreTyperModifiers(mods1, ddef.symbol)
+ ValDef(mods2, name1, tpt0, extractRhs(rhs0))
+ }
+
+ private def trimAccessors(deff: Tree, stats: List[Tree]): List[Tree] = {
+ val symdefs = (stats collect { case vodef: ValOrDefDef => vodef } map (vodeff => vodeff.symbol -> vodeff)).toMap
+ val accessors = scala.collection.mutable.Map[ValDef, List[DefDef]]()
+ stats collect { case ddef: DefDef => ddef } foreach (defdef => {
+ val valdef = symdefs get defdef.symbol.accessedOrSelf collect { case vdef: ValDef => vdef } getOrElse null
+ if (valdef != null) accessors(valdef) = accessors.getOrElse(valdef, Nil) :+ defdef
+
+ def detectBeanAccessors(prefix: String): Unit = {
+ if (defdef.name.startsWith(prefix)) {
+ var name = defdef.name.toString.substring(prefix.length)
+ def uncapitalize(s: String) = if (s.length == 0) "" else { val chars = s.toCharArray; chars(0) = chars(0).toLower; new String(chars) }
+ def findValDef(name: String) = (symdefs.values collect { case vdef: ValDef if nme.dropLocalSuffix(vdef.name).toString == name => vdef }).headOption
+ val valdef = findValDef(name).orElse(findValDef(uncapitalize(name))).orNull
+ if (valdef != null) accessors(valdef) = accessors.getOrElse(valdef, Nil) :+ defdef
+ }
+ }
+ detectBeanAccessors("get")
+ detectBeanAccessors("set")
+ detectBeanAccessors("is")
+ });
+
+ var stats1 = stats flatMap {
+ case vdef @ ValDef(mods, name, tpt, rhs) if !mods.isLazy =>
+ val mods1 = if (accessors.contains(vdef)) {
+ val ddef = accessors(vdef)(0) // any accessor will do
+ val Modifiers(flags, privateWithin, annotations) = mods
+ var flags1 = flags & ~LOCAL
+ if (!ddef.symbol.isPrivate) flags1 = flags1 & ~PRIVATE
+ val privateWithin1 = ddef.mods.privateWithin
+ val annotations1 = accessors(vdef).foldLeft(annotations)((curr, acc) => curr ++ (acc.symbol.annotations map toPreTyperAnnotation))
+ Modifiers(flags1, privateWithin1, annotations1) setPositions mods.positions
+ } else {
+ mods
+ }
+ val mods2 = toPreTyperModifiers(mods1, vdef.symbol)
+ val name1 = nme.dropLocalSuffix(name)
+ val vdef1 = ValDef(mods2, name1, tpt, rhs)
+ if (reifyDebug) println("resetting visibility of field: %s => %s".format(vdef, vdef1))
+ Some(vdef1) // no copyAttrs here, because new ValDef and old symbols are now out of sync
+ case ddef: DefDef if !ddef.mods.isLazy =>
+ // lazy val accessors are removed in reshapeLazyVals
+ // as they are needed to recreate lazy vals
+ if (accessors.values.exists(_.contains(ddef))) {
+ if (reifyDebug) println("discarding accessor method: " + ddef)
+ None
+ } else {
+ Some(ddef)
+ }
+ case tree =>
+ Some(tree)
+ }
+
+ stats1
+ }
+
+ private def reshapeLazyVals(stats: List[Tree]): List[Tree] = {
+ val lazyvaldefs:Map[Symbol, DefDef] = stats.collect({ case ddef: DefDef if ddef.mods.isLazy => ddef }).
+ map((ddef: DefDef) => ddef.symbol -> ddef).toMap
+ // lazy valdef and defdef are in the same block.
+ // only that valdef needs to have its rhs rebuilt from defdef
+ stats flatMap (stat => stat match {
+ case vdef: ValDef if vdef.symbol.isLazy =>
+ if (reifyDebug) println(s"reconstructing original lazy value for $vdef")
+ val ddefSym = vdef.symbol.lazyAccessor
+ val vdef1 = lazyvaldefs.get(ddefSym) match {
+ case Some(ddef) =>
+ toPreTyperLazyVal(ddef)
+ case None =>
+ CannotReifyInvalidLazyVal(vdef)
+ }
+ if (reifyDebug) println(s"reconstructed lazy val is $vdef1")
+ vdef1::Nil
+ case ddef: DefDef if ddef.symbol.isLazy =>
+ def hasUnitType(sym: Symbol) = (sym.tpe.typeSymbol == UnitClass) && sym.tpe.annotations.isEmpty
+ if (hasUnitType(ddef.symbol)) {
+ // since lazy values of type Unit don't have val's
+ // we need to create them from scratch
+ toPreTyperLazyVal(ddef) :: Nil
+ } else Nil
+ case _ => stat::Nil
+ })
+ }
+
+ private def trimSyntheticCaseClassMembers(deff: Tree, stats: List[Tree]): List[Tree] =
+ stats filterNot (memberDef => memberDef.isDef && {
+ val isSynthetic = memberDef.symbol.isSynthetic
+ // this doesn't work for local classes, e.g. for ones that are top-level to a quasiquote (see comments to companionClass)
+ // that's why I replace the check with an assumption that all synthetic members are, in fact, generated of case classes
+ // val isCaseMember = deff.symbol.isCaseClass || deff.symbol.companionClass.isCaseClass
+ val isCaseMember = true
+ if (isSynthetic && isCaseMember && reifyDebug) println("discarding case class synthetic def: " + memberDef)
+ isSynthetic && isCaseMember
+ })
+
+ private def trimSyntheticCaseClassCompanions(stats: List[Tree]): List[Tree] =
+ stats diff (stats collect { case moddef: ModuleDef => moddef } filter (moddef => {
+ val isSynthetic = moddef.symbol.isSynthetic
+ // this doesn't work for local classes, e.g. for ones that are top-level to a quasiquote (see comments to companionClass)
+ // that's why I replace the check with an assumption that all synthetic modules are, in fact, companions of case classes
+ // val isCaseCompanion = moddef.symbol.companionClass.isCaseClass
+ val isCaseCompanion = true
+ if (isSynthetic && isCaseCompanion && reifyDebug) println("discarding synthetic case class companion: " + moddef)
+ isSynthetic && isCaseCompanion
+ }))
+ }
+}
diff --git a/src/compiler/scala/reflect/reify/utils/Extractors.scala b/src/compiler/scala/reflect/reify/utils/Extractors.scala
new file mode 100644
index 0000000..59cd4e5
--- /dev/null
+++ b/src/compiler/scala/reflect/reify/utils/Extractors.scala
@@ -0,0 +1,256 @@
+package scala.reflect.reify
+package utils
+
+trait Extractors {
+ self: Utils =>
+
+ import global._
+ import definitions._
+ import Flag._
+
+ // Example of a reified tree for `reify(List(1, 2))`:
+ // (also contains an example of a reified type as a third argument to the constructor of Expr)
+ // {
+ // val $u: reflect.runtime.universe.type = scala.reflect.runtime.`package`.universe;
+ // val $m: $u.Mirror = $u.runtimeMirror(Test.this.getClass().getClassLoader());
+ // $u.Expr[List[Int]]($m, {
+ // final class $treecreator1 extends scala.reflect.api.TreeCreator {
+ // def <init>(): $treecreator1 = {
+ // $treecreator1.super.<init>();
+ // ()
+ // };
+ // def apply[U >: Nothing <: scala.reflect.api.Universe with Singleton]($m$untyped: scala.reflect.api.Mirror[U]): U#Tree = {
+ // val $u: U = $m$untyped.universe;
+ // val $m: $u.Mirror = $m$untyped.asInstanceOf[$u.Mirror];
+ // $u.Apply($u.Select($u.Select($u.build.This($m.staticPackage("scala.collection.immutable").moduleClass), $u.newTermName("List")), $u.newTermName("apply")), List($u.Literal($u.Constant(1)), $u.Literal($u.Constant(2))))
+ // }
+ // };
+ // new $treecreator1()
+ // })($u.TypeTag[List[Int]]($m, {
+ // final class $typecreator1 extends scala.reflect.api.TypeCreator {
+ // def <init>(): $typecreator1 = {
+ // $typecreator1.super.<init>();
+ // ()
+ // };
+ // def apply[U >: Nothing <: scala.reflect.api.Universe with Singleton]($m$untyped: scala.reflect.api.Mirror[U]): U#Type = {
+ // val $u: U = $m$untyped.universe;
+ // val $m: $u.Mirror = $m$untyped.asInstanceOf[$u.Mirror];
+ // $u.TypeRef($u.ThisType($m.staticPackage("scala.collection.immutable").moduleClass), $m.staticClass("scala.collection.immutable.List"), List($m.staticClass("scala.Int").toTypeConstructor))
+ // }
+ // };
+ // new $typecreator1()
+ // }))
+ // }
+
+ private def mkCreator(flavor: TypeName, symtab: SymbolTable, rtree: Tree): Tree = {
+ val tparamu = newTypeName("U")
+ val (reifierBase, reifierName, reifierTpt, reifierUniverse) = flavor match {
+ case tpnme.REIFY_TYPECREATOR_PREFIX => (TypeCreatorClass, nme.apply, SelectFromTypeTree(Ident(tparamu), tpnme.Type), ApiUniverseClass)
+ case tpnme.REIFY_TREECREATOR_PREFIX => (TreeCreatorClass, nme.apply, SelectFromTypeTree(Ident(tparamu), tpnme.Tree), ApiUniverseClass)
+ case _ => throw new Error(s"unexpected flavor $flavor")
+ }
+ val reifierBody = {
+ def gc(symtab: SymbolTable): SymbolTable = {
+ def loop(symtab: SymbolTable): SymbolTable = {
+ def extractNames(tree: Tree) = tree.collect{ case ref: RefTree => ref.name }.toSet
+ val usedNames = extractNames(rtree) ++ symtab.syms.flatMap(sym => extractNames(symtab.symDef(sym)))
+ symtab filterAliases { case (_, name) => usedNames(name) }
+ }
+ var prev = symtab
+ var next = loop(symtab)
+ while (next.syms.length < prev.syms.length) {
+ prev = next
+ next = loop(prev)
+ }
+ next
+ }
+
+ val universeAlias = ValDef(NoMods, nme.UNIVERSE_SHORT, Ident(tparamu), Select(Ident(nme.MIRROR_UNTYPED), nme.universe))
+ val mirrorAlias = ValDef(NoMods, nme.MIRROR_SHORT, Select(Ident(nme.UNIVERSE_SHORT), tpnme.Mirror), TypeApply(Select(Ident(nme.MIRROR_UNTYPED), nme.asInstanceOf_), List(Select(Ident(nme.UNIVERSE_SHORT), tpnme.Mirror))))
+ val trimmedSymtab = if (hasReifier) gc(symtab) else symtab
+ Block(universeAlias :: mirrorAlias :: trimmedSymtab.encode, rtree)
+ }
+ val tpec = ClassDef(
+ Modifiers(FINAL),
+ newTypeName(global.currentUnit.fresh.newName(flavor.toString)),
+ List(),
+ Template(List(Ident(reifierBase)),
+ emptyValDef,
+ List(
+ DefDef(NoMods, nme.CONSTRUCTOR, List(), List(List()), TypeTree(), Block(List(Apply(Select(Super(This(tpnme.EMPTY), tpnme.EMPTY), nme.CONSTRUCTOR), List())), Literal(Constant(())))),
+ DefDef(NoMods,
+ reifierName,
+ List(TypeDef(Modifiers(PARAM), tparamu, List(), TypeBoundsTree(Ident(NothingClass), CompoundTypeTree(Template(List(Ident(reifierUniverse), Ident(SingletonClass)), emptyValDef, List()))))),
+ List(List(ValDef(Modifiers(PARAM), nme.MIRROR_UNTYPED, AppliedTypeTree(Ident(MirrorClass), List(Ident(tparamu))), EmptyTree))),
+ reifierTpt, reifierBody))))
+ Block(tpec, ApplyConstructor(Ident(tpec.name), List()))
+ }
+
+ private def mkWrapper(universe: Tree, mirror: Tree, wrappee: Tree): Tree = {
+ val universeAlias = ValDef(NoMods, nme.UNIVERSE_SHORT, SingletonTypeTree(universe), universe)
+ val mirrorAlias = ValDef(NoMods, nme.MIRROR_SHORT, Select(Ident(nme.UNIVERSE_SHORT), tpnme.Mirror), mirror orElse mkDefaultMirrorRef(global)(universe, typer))
+ Block(List(universeAlias, mirrorAlias), wrappee)
+ }
+
+ // if we're reifying a MethodType, we can't use it as a type argument for TypeTag ctor
+ // http://groups.google.com/group/scala-internals/browse_thread/thread/2d7bb85bfcdb2e2
+ private def mkTarg(tpe: Type): Tree = (
+ if ((tpe eq null) || !isUseableAsTypeArg(tpe)) TypeTree(AnyTpe)
+ else TypeTree(tpe)
+ )
+
+ object ReifiedTree {
+ def apply(universe: Tree, mirror: Tree, symtab: SymbolTable, rtree: Tree, tpe: Type, rtpe: Tree, concrete: Boolean): Tree = {
+ val tagFactory = if (concrete) nme.TypeTag else nme.WeakTypeTag
+ val tagCtor = TypeApply(Select(Select(Ident(nme.UNIVERSE_SHORT), tagFactory), nme.apply), List(mkTarg(tpe)))
+ val exprCtor = TypeApply(Select(Select(Ident(nme.UNIVERSE_SHORT), nme.Expr), nme.apply), List(mkTarg(tpe)))
+ val tagArgs = List(Ident(nme.MIRROR_SHORT), mkCreator(tpnme.REIFY_TYPECREATOR_PREFIX, symtab, rtpe))
+ val unwrapped = Apply(Apply(exprCtor, List(Ident(nme.MIRROR_SHORT), mkCreator(tpnme.REIFY_TREECREATOR_PREFIX, symtab, rtree))), List(Apply(tagCtor, tagArgs)))
+ mkWrapper(universe, mirror, unwrapped)
+ }
+
+ def unapply(tree: Tree): Option[(Tree, Tree, SymbolTable, Tree, Type, Tree, Boolean)] = tree match {
+ case Block(
+ List(udef @ ValDef(_, _, _, universe), mdef @ ValDef(_, _, _, mirror)),
+ Apply(
+ Apply(TypeApply(_, List(ttpe @ TypeTree())), List(_, Block(List(ClassDef(_, _, _, Template(_, _, List(_, DefDef(_, _, _, _, _, Block(_ :: _ :: symbolTable1, rtree)))))), _))),
+ // todo. doesn't take into account optimizations such as $u.TypeTag.Int or the upcoming closure optimization
+ List(Apply(TypeApply(tagFactory @ Select(_, _), _), List(_, Block(List(ClassDef(_, _, _, Template(_, _, List(_, DefDef(_, _, _, _, _, Block(_ :: _ :: symbolTable2, rtpe)))))), _))))))
+ if udef.name == nme.UNIVERSE_SHORT && mdef.name == nme.MIRROR_SHORT =>
+ val tagFlavor = tagFactory match {
+ case Select(Select(_, tagFlavor), _) => tagFlavor
+ case Select(_, tagFlavor) => tagFlavor
+ }
+ Some((universe, mirror, SymbolTable(symbolTable1 ++ symbolTable2), rtree, ttpe.tpe, rtpe, tagFlavor == nme.TypeTag))
+ case _ =>
+ None
+ }
+ }
+
+ object ReifiedType {
+ def apply(universe: Tree, mirror: Tree, symtab: SymbolTable, tpe: Type, rtpe: Tree, concrete: Boolean) = {
+ val tagFactory = if (concrete) nme.TypeTag else nme.WeakTypeTag
+ val ctor = TypeApply(Select(Select(Ident(nme.UNIVERSE_SHORT), tagFactory), nme.apply), List(mkTarg(tpe)))
+ val args = List(Ident(nme.MIRROR_SHORT), mkCreator(tpnme.REIFY_TYPECREATOR_PREFIX, symtab, rtpe))
+ val unwrapped = Apply(ctor, args)
+ mkWrapper(universe, mirror, unwrapped)
+ }
+
+ def unapply(tree: Tree): Option[(Tree, Tree, SymbolTable, Type, Tree, Boolean)] = tree match {
+ case Block(
+ List(udef @ ValDef(_, _, _, universe), mdef @ ValDef(_, _, _, mirror)),
+ // todo. doesn't take into account optimizations such as $u.TypeTag.Int or the upcoming closure optimization
+ Apply(TypeApply(tagFactory @ Select(_, _), List(ttpe @ TypeTree())), List(_, Block(List(ClassDef(_, _, _, Template(_, _, List(_, DefDef(_, _, _, _, _, Block(_ :: _ :: symtab, rtpe)))))), _))))
+ if udef.name == nme.UNIVERSE_SHORT && mdef.name == nme.MIRROR_SHORT =>
+ val tagFlavor = tagFactory match {
+ case Select(Select(_, tagFlavor), _) => tagFlavor
+ case Select(_, tagFlavor) => tagFlavor
+ }
+ Some((universe, mirror, SymbolTable(symtab), ttpe.tpe, rtpe, tagFlavor == nme.TypeTag))
+ case _ =>
+ None
+ }
+ }
+
+ object TreeSplice {
+ def apply(splicee: Tree): Tree =
+ Select(splicee, ExprSplice)
+
+ def unapply(tree: Tree): Option[Tree] = tree match {
+ case Select(splicee, _) if tree.symbol != NoSymbol && tree.symbol == ExprSplice =>
+ Some(splicee)
+ case _ =>
+ None
+ }
+ }
+
+ sealed abstract class FreeDefExtractor(acceptTerms: Boolean, acceptTypes: Boolean) {
+ def unapply(tree: Tree): Option[(Tree, TermName, Tree, Long, String)] = {
+ def acceptFreeTermFactory(name: Name) = {
+ (acceptTerms && name == nme.newFreeTerm) ||
+ (acceptTypes && name == nme.newFreeType)
+ }
+ tree match {
+ case
+ ValDef(_, name, _, Apply(
+ Select(Select(uref1 @ Ident(_), build1), freeTermFactory),
+ _ :+
+ Apply(Select(Select(uref2 @ Ident(_), build2), flagsFromBits), List(Literal(Constant(flags: Long)))) :+
+ Literal(Constant(origin: String))))
+ if uref1.name == nme.UNIVERSE_SHORT && build1 == nme.build && acceptFreeTermFactory(freeTermFactory) &&
+ uref2.name == nme.UNIVERSE_SHORT && build2 == nme.build && flagsFromBits == nme.flagsFromBits =>
+ Some(uref1, name, reifyBinding(tree), flags, origin)
+ case _ =>
+ None
+ }
+ }
+ }
+ object FreeDef extends FreeDefExtractor(acceptTerms = true, acceptTypes = true)
+ object FreeTermDef extends FreeDefExtractor(acceptTerms = true, acceptTypes = false)
+ object FreeTypeDef extends FreeDefExtractor(acceptTerms = false, acceptTypes = true)
+
+ object FreeRef {
+ def unapply(tree: Tree): Option[(Tree, TermName)] = tree match {
+ case Apply(Select(Select(uref @ Ident(_), build), ident), List(Ident(name: TermName)))
+ if build == nme.build && ident == nme.Ident && name.startsWith(nme.REIFY_FREE_PREFIX) =>
+ Some((uref, name))
+ case _ =>
+ None
+ }
+ }
+
+ object SymDef {
+ def unapply(tree: Tree): Option[(Tree, TermName, Long, Boolean)] = tree match {
+ case
+ ValDef(_, name, _, Apply(
+ Select(Select(uref1 @ Ident(_), build1), newNestedSymbol),
+ List(
+ _,
+ _,
+ _,
+ Apply(Select(Select(uref2 @ Ident(_), build2), flagsFromBits), List(Literal(Constant(flags: Long)))),
+ Literal(Constant(isClass: Boolean)))))
+ if uref1.name == nme.UNIVERSE_SHORT && build1 == nme.build && newNestedSymbol == nme.newNestedSymbol &&
+ uref2.name == nme.UNIVERSE_SHORT && build2 == nme.build && flagsFromBits == nme.flagsFromBits =>
+ Some((uref1, name, flags, isClass))
+ case _ =>
+ None
+ }
+ }
+
+ object TypeRefToFreeType {
+ def unapply(tree: Tree): Option[TermName] = tree match {
+ case Apply(Select(Select(uref @ Ident(_), typeRef), apply), List(Select(_, noSymbol), Ident(freeType: TermName), nil))
+ if (uref.name == nme.UNIVERSE_SHORT && typeRef == nme.TypeRef && noSymbol == nme.NoSymbol && freeType.startsWith(nme.REIFY_FREE_PREFIX)) =>
+ Some(freeType)
+ case _ =>
+ None
+ }
+ }
+
+ object BoundTerm {
+ def unapply(tree: Tree): Option[Tree] = tree match {
+ case Select(_, name) if name.isTermName =>
+ Some(tree)
+ case Ident(name) if name.isTermName =>
+ Some(tree)
+ case This(_) =>
+ Some(tree)
+ case _ =>
+ None
+ }
+ }
+
+ object BoundType {
+ def unapply(tree: Tree): Option[RefTree] = tree match {
+ case tree @ Select(_, name) if name.isTypeName =>
+ Some(tree)
+ case tree @ SelectFromTypeTree(_, _) =>
+ Some(tree)
+ case tree @ Ident(name) if name.isTypeName =>
+ Some(tree)
+ case _ =>
+ None
+ }
+ }
+}
diff --git a/src/compiler/scala/reflect/reify/utils/NodePrinters.scala b/src/compiler/scala/reflect/reify/utils/NodePrinters.scala
new file mode 100644
index 0000000..aca18c7
--- /dev/null
+++ b/src/compiler/scala/reflect/reify/utils/NodePrinters.scala
@@ -0,0 +1,110 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2013 LAMP/EPFL
+ * @author Martin Odersky
+ */
+package scala.reflect.reify
+package utils
+
+import scala.compat.Platform.EOL
+
+trait NodePrinters {
+ self: Utils =>
+
+ import global._
+ import definitions._
+ import Flag._
+
+ object reifiedNodeToString extends (Tree => String) {
+ def apply(tree: Tree): String = {
+ var mirrorIsUsed = false
+ var flagsAreUsed = false
+
+ // @PP: I fervently hope this is a test case or something, not anything being
+ // depended upon. Of more fragile code I cannot conceive.
+ // @Eugene: This stuff is only needed to debug-print out reifications in human-readable format
+ // Rolling a full-fledged, robust TreePrinter would be several times more code.
+ // Also as of late we have tests that ensure that UX won't be broken by random changes to the reifier.
+ val lines = (tree.toString.split(EOL) drop 1 dropRight 1).toList splitAt 2
+ var (List(universe, mirror), reification) = lines
+ reification = (for (line <- reification) yield {
+ var s = line substring 2
+ s = s.replace(nme.UNIVERSE_PREFIX.toString, "")
+ s = s.replace(".apply", "")
+ s = "([^\"])scala\\.collection\\.immutable\\.".r.replaceAllIn(s, "$1")
+ s = "List\\[List\\[.*?\\].*?\\]".r.replaceAllIn(s, "List")
+ s = "List\\[.*?\\]".r.replaceAllIn(s, "List")
+ s = s.replace("immutable.this.Nil", "List()")
+ s = """build\.flagsFromBits\((\d+)[lL]\)""".r.replaceAllIn(s, m => {
+ flagsAreUsed = true
+ show(m.group(1).toLong)
+ })
+ s = s.replace("Modifiers(0L, newTypeName(\"\"), List())", "Modifiers()")
+ s = """Modifiers\((\d+)[lL], newTypeName\("(.*?)"\), List\((.*?)\)\)""".r.replaceAllIn(s, m => {
+ val buf = new scala.collection.mutable.ListBuffer[String]
+
+ val annotations = m.group(3)
+ if (buf.nonEmpty || annotations != "")
+ buf.append("List(" + annotations + ")")
+
+ val privateWithin = "" + m.group(2)
+ if (buf.nonEmpty || privateWithin != "")
+ buf.append("newTypeName(\"" + privateWithin + "\")")
+
+ val bits = m.group(1)
+ if (buf.nonEmpty || bits != "0L") {
+ flagsAreUsed = true
+ buf.append(show(bits.toLong))
+ }
+
+ val replacement = "Modifiers(" + buf.reverse.mkString(", ") + ")"
+ java.util.regex.Matcher.quoteReplacement(replacement)
+ })
+ s
+ })
+
+ val isExpr = reification.length > 0 && reification(0).trim.startsWith("Expr[")
+ var rtree = reification dropWhile (!_.trim.startsWith(s"val ${nme.UNIVERSE_SHORT}: U = ${nme.MIRROR_UNTYPED}.universe;"))
+ rtree = rtree drop 2
+ rtree = rtree takeWhile (_ != " }")
+ rtree = rtree map (s0 => {
+ var s = s0
+ mirrorIsUsed |= s contains nme.MIRROR_PREFIX.toString
+ s = s.replace(nme.MIRROR_PREFIX.toString, "")
+ s.trim
+ })
+
+ val printout = scala.collection.mutable.ListBuffer[String]();
+ printout += universe.trim
+ if (mirrorIsUsed) printout += mirror.replace("Mirror[", "scala.reflect.api.Mirror[").trim
+ val imports = scala.collection.mutable.ListBuffer[String]();
+ imports += nme.UNIVERSE_SHORT
+ // if (buildIsUsed) imports += nme.build
+ if (mirrorIsUsed) imports += nme.MIRROR_SHORT
+ if (flagsAreUsed) imports += nme.Flag
+ printout += s"""import ${imports map (_ + "._") mkString ", "}"""
+
+ val name = if (isExpr) "tree" else "tpe"
+ if (rtree(0) startsWith "val") {
+ printout += s"val $name = {"
+ printout ++= (rtree map (" " + _))
+ printout += "}"
+ } else {
+ printout += s"val $name = " + rtree(0)
+ }
+ if (isExpr) {
+ if (mirror contains ".getClassLoader") {
+ printout += "import scala.tools.reflect.ToolBox"
+ printout += s"println(${nme.MIRROR_SHORT}.mkToolBox().eval(tree))"
+ } else {
+ printout += "println(tree)"
+ }
+ } else {
+ printout += "println(tpe)"
+ }
+
+ // printout mkString EOL
+ val prefix = "// produced from " + reifier.defaultErrorPosition
+ (prefix +: "object Test extends App {" +: (printout map (" " + _)) :+ "}") mkString EOL
+ }
+ }
+}
diff --git a/src/compiler/scala/reflect/reify/utils/StdAttachments.scala b/src/compiler/scala/reflect/reify/utils/StdAttachments.scala
new file mode 100644
index 0000000..0b9cf58
--- /dev/null
+++ b/src/compiler/scala/reflect/reify/utils/StdAttachments.scala
@@ -0,0 +1,18 @@
+package scala.reflect.reify
+package utils
+
+trait StdAttachments {
+ self: Utils =>
+
+ import global._
+
+ case class ReifyBindingAttachment(binding: Tree)
+
+ def reifyBinding(tree: Tree): Tree =
+ tree.attachments.get[ReifyBindingAttachment] match {
+ case Some(ReifyBindingAttachment(binding)) => binding
+ case other => Ident(NoSymbol)
+ }
+
+ case class ReifyAliasAttachment(sym: Symbol, alias: TermName)
+}
\ No newline at end of file
diff --git a/src/compiler/scala/reflect/reify/utils/SymbolTables.scala b/src/compiler/scala/reflect/reify/utils/SymbolTables.scala
new file mode 100644
index 0000000..dbb0836
--- /dev/null
+++ b/src/compiler/scala/reflect/reify/utils/SymbolTables.scala
@@ -0,0 +1,217 @@
+package scala.reflect.reify
+package utils
+
+import scala.collection._
+import scala.compat.Platform.EOL
+
+trait SymbolTables {
+ self: Utils =>
+
+ import global._
+ import definitions._
+ import Flag._
+
+ class SymbolTable private[SymbolTable] (
+ private[SymbolTable] val symtab: immutable.ListMap[Symbol, Tree] = immutable.ListMap[Symbol, Tree](),
+ private[SymbolTable] val aliases: List[(Symbol, TermName)] = List[(Symbol, TermName)](),
+ private[SymbolTable] val original: Option[List[Tree]] = None) {
+
+ def syms: List[Symbol] = symtab.keys.toList
+ def isConcrete: Boolean = symtab.values forall (sym => !FreeTypeDef.unapply(sym).isDefined)
+
+// def aliases: Map[Symbol, List[TermName]] = aliases.distinct groupBy (_._1) mapValues (_ map (_._2))
+
+ def symDef(sym: Symbol): Tree =
+ symtab.getOrElse(sym, EmptyTree)
+
+ def symName(sym: Symbol): TermName =
+ symtab.get(sym) match {
+ case Some(FreeDef(_, name, _, _, _)) => name
+ case Some(SymDef(_, name, _, _)) => name
+ case None => nme.EMPTY
+ }
+
+ def symAliases(sym: Symbol): List[TermName] =
+ symName(sym) match {
+ case name if name.isEmpty => Nil
+ case _ => (aliases.distinct groupBy (_._1) mapValues (_ map (_._2)))(sym)
+ }
+
+ def symBinding(sym: Symbol): Tree =
+ symtab.get(sym) match {
+ case Some(FreeDef(_, _, binding, _, _)) => binding
+ case Some(SymDef(_, _, _, _)) => throw new UnsupportedOperationException(s"${symtab(sym)} is a symdef, hence it doesn't have a binding")
+ case None => EmptyTree
+ }
+
+ def symRef(sym: Symbol): Tree =
+ symtab.get(sym) match {
+ case Some(FreeDef(_, name, binding, _, _)) => Ident(name) updateAttachment binding
+ case Some(SymDef(_, name, _, _)) => Ident(name) updateAttachment ReifyBindingAttachment(Ident(sym))
+ case None => EmptyTree
+ }
+
+ def +(sym: Symbol, name: TermName, reification: Tree): SymbolTable = add(sym, name, reification)
+ def +(symDef: Tree): SymbolTable = add(symDef)
+ def ++(symDefs: TraversableOnce[Tree]): SymbolTable = (this /: symDefs)((symtab, symDef) => symtab.add(symDef))
+ def ++(symtab: SymbolTable): SymbolTable = { val updated = this ++ symtab.symtab.values; new SymbolTable(updated.symtab, updated.aliases ++ symtab.aliases) }
+ def -(sym: Symbol): SymbolTable = remove(sym)
+ def -(name: TermName): SymbolTable = remove(name)
+ def -(symDef: Tree): SymbolTable = remove(reifyBinding(symDef).symbol)
+ def --(syms: GenTraversableOnce[Symbol]): SymbolTable = (this /: syms)((symtab, sym) => symtab.remove(sym))
+ def --(names: Iterable[TermName]): SymbolTable = (this /: names)((symtab, name) => symtab.remove(name))
+ def --(symDefs: TraversableOnce[Tree]): SymbolTable = this -- (symDefs map (reifyBinding(_)))
+ def --(symtab: SymbolTable): SymbolTable = { val updated = this -- symtab.symtab.values; new SymbolTable(updated.symtab, updated.aliases diff symtab.aliases) }
+ def filterSyms(p: Symbol => Boolean): SymbolTable = this -- (syms filterNot p)
+ def filterAliases(p: (Symbol, TermName) => Boolean): SymbolTable = this -- (aliases filterNot (tuple => p(tuple._1, tuple._2)) map (_._2))
+
+ private def add(symDef: Tree): SymbolTable = {
+ val sym = reifyBinding(symDef).symbol
+ assert(sym != NoSymbol, showRaw(symDef))
+ val name = symDef match {
+ case FreeDef(_, name, _, _, _) => name
+ case SymDef(_, name, _, _) => name
+ }
+ val newSymtab = if (!(symtab contains sym)) symtab + (sym -> symDef) else symtab
+ val newAliases = aliases :+ (sym -> name)
+ new SymbolTable(newSymtab, newAliases)
+ }
+
+ private def add(sym: Symbol, name0: TermName, reification: Tree): SymbolTable = {
+ def freshName(name0: TermName): TermName = {
+ var name = name0.toString
+ name = name.replace(".type", "$type")
+ name = name.replace(" ", "$")
+ val fresh = typer.context.unit.fresh
+ newTermName(fresh.newName(name))
+ }
+ val bindingAttachment = reification.attachments.get[ReifyBindingAttachment].get
+ add(ValDef(NoMods, freshName(name0), TypeTree(), reification) updateAttachment bindingAttachment)
+ }
+
+ private def add(sym: Symbol, name: TermName): SymbolTable = {
+ if (!(syms contains sym)) error("cannot add an alias to a symbol not in the symbol table")
+ add(sym, name, EmptyTree)
+ }
+
+ private def remove(sym: Symbol): SymbolTable = {
+ val newSymtab = symtab - sym
+ val newAliases = aliases filter (_._1 != sym)
+ new SymbolTable(newSymtab, newAliases)
+ }
+
+ private def remove(name: TermName): SymbolTable = {
+ var newSymtab = symtab
+ val newAliases = aliases filter (_._2 != name)
+ newSymtab = newSymtab filter { case ((sym, _)) => newAliases exists (_._1 == sym) }
+ newSymtab = newSymtab map { case ((sym, tree)) =>
+ val ValDef(mods, primaryName, tpt, rhs) = tree
+ val tree1 =
+ if (!(newAliases contains (sym, primaryName))) {
+ val primaryName1 = newAliases.find(_._1 == sym).get._2
+ ValDef(mods, primaryName1, tpt, rhs).copyAttrs(tree)
+ } else tree
+ (sym, tree1)
+ }
+ new SymbolTable(newSymtab, newAliases)
+ }
+
+ private val cache = mutable.Map[SymbolTable, List[Tree]]()
+ def encode: List[Tree] = cache.getOrElseUpdate(this, SymbolTable.encode(this)) map (_.duplicate)
+
+ override def toString = {
+ val symtabString = symtab.keys.map(symName(_)).mkString(", ")
+ val trueAliases = aliases.distinct.filter(entry => symName(entry._1) != entry._2)
+ val aliasesString = trueAliases.map(entry => s"${symName(entry._1)} -> ${entry._2}").mkString(", ")
+ s"""symtab = [$symtabString], aliases = [$aliasesString]${if (original.isDefined) ", has original" else ""}"""
+ }
+
+ def debugString: String = {
+ val buf = new StringBuilder
+ buf.append("symbol table = " + (if (syms.length == 0) "<empty>" else "")).append(EOL)
+ syms foreach (sym => buf.append(symDef(sym)).append(EOL))
+ buf.delete(buf.length - EOL.length, buf.length)
+ buf.toString
+ }
+ }
+
+ object SymbolTable {
+ def apply(): SymbolTable =
+ new SymbolTable()
+
+ def apply(encoded: List[Tree]): SymbolTable = {
+ var result = new SymbolTable(original = Some(encoded))
+ encoded foreach (entry => (entry.attachments.get[ReifyBindingAttachment], entry.attachments.get[ReifyAliasAttachment]) match {
+ case (Some(ReifyBindingAttachment(_)), _) => result += entry
+ case (_, Some(ReifyAliasAttachment(sym, alias))) => result = new SymbolTable(result.symtab, result.aliases :+ (sym, alias))
+ case _ => // do nothing, this is boilerplate that can easily be recreated by subsequent `result.encode`
+ })
+ result
+ }
+
+ private[SymbolTable] def encode(symtab0: SymbolTable): List[Tree] = {
+ if (symtab0.original.isDefined) return symtab0.original.get.map(_.duplicate)
+ else assert(hasReifier, "encoding a symbol table requires a reifier")
+ // during `encode` we might need to do some reifications
+ // these reifications might lead to changes in `reifier.symtab`
+ // reifier is mutable, symtab is immutable. this is a tough friendship
+ val backup = reifier.state.backup
+ reifier.state.symtab = symtab0.asInstanceOf[reifier.SymbolTable]
+ def currtab = reifier.symtab.asInstanceOf[SymbolTable]
+ try {
+ val cumulativeSymtab = mutable.ArrayBuffer[Tree](symtab0.symtab.values.toList: _*)
+ val cumulativeAliases = mutable.ArrayBuffer[(Symbol, TermName)](symtab0.aliases: _*)
+
+ def fillInSymbol(sym: Symbol): Tree = {
+ if (reifyDebug) println("Filling in: %s (%s)".format(sym, sym.accurateKindString))
+ val isFreeTerm = FreeTermDef.unapply(currtab.symDef(sym)).isDefined
+ // SI-6204 don't reify signatures for incomplete symbols, because this might lead to cyclic reference errors
+ val signature =
+ if (sym.isInitialized) {
+ if (sym.isCapturedVariable) capturedVariableType(sym)
+ else if (isFreeTerm) sym.tpe
+ else sym.info
+ } else NoType
+ val rset = reifier.mirrorBuildCall(nme.setTypeSignature, currtab.symRef(sym), reifier.reify(signature))
+ // `Symbol.annotations` doesn't initialize the symbol, so we don't need to do anything special here
+ // also since we call `sym.info` a few lines above, by now the symbol will be initialized (if possible)
+ // so the annotations will be filled in and will be waiting to be reified (unless symbol initialization is prohibited as described above)
+ if (sym.annotations.isEmpty) rset
+ else reifier.mirrorBuildCall(nme.setAnnotations, rset, reifier.mkList(sym.annotations map reifier.reifyAnnotationInfo))
+ }
+
+ // `fillInSymbol` might add symbols to `symtab`, that's why this is done iteratively
+ var progress = 0
+ while (progress < cumulativeSymtab.length) {
+ val sym = reifyBinding(cumulativeSymtab(progress)).symbol
+ if (sym != NoSymbol) {
+ val symtabProgress = currtab.symtab.size
+ val aliasesProgress = currtab.aliases.length
+ val fillIn = fillInSymbol(sym)
+ cumulativeSymtab ++= currtab.symtab.values drop symtabProgress
+ cumulativeAliases ++= currtab.aliases drop aliasesProgress
+ cumulativeSymtab += fillIn
+ }
+ progress += 1
+ }
+
+ val withAliases = cumulativeSymtab flatMap (entry => {
+ val result = mutable.ListBuffer[Tree]()
+ result += entry
+ val sym = reifyBinding(entry).symbol
+ if (sym != NoSymbol)
+ result ++= cumulativeAliases.distinct filter (alias => alias._1 == sym && alias._2 != currtab.symName(sym)) map (alias => {
+ val canonicalName = currtab.symName(sym)
+ val aliasName = alias._2
+ ValDef(NoMods, aliasName, TypeTree(), Ident(canonicalName)) updateAttachment ReifyAliasAttachment(sym, aliasName)
+ })
+ result.toList
+ })
+
+ withAliases.toList
+ } finally {
+ reifier.state.restore(backup)
+ }
+ }
+ }
+}
\ No newline at end of file
diff --git a/src/compiler/scala/reflect/reify/utils/Utils.scala b/src/compiler/scala/reflect/reify/utils/Utils.scala
new file mode 100644
index 0000000..e1213f9
--- /dev/null
+++ b/src/compiler/scala/reflect/reify/utils/Utils.scala
@@ -0,0 +1,21 @@
+package scala.reflect.reify
+package utils
+
+import scala.tools.nsc.Global
+
+trait Utils extends NodePrinters
+ with Extractors
+ with SymbolTables
+ with StdAttachments {
+
+ val global: Global
+ val typer: global.analyzer.Typer
+
+ lazy val reifier: Reifier { val global: Utils.this.global.type } = getReifier
+ def getReifier: Reifier { val global: Utils.this.global.type } = ???
+ def hasReifier = false
+
+ val reifyDebug = global.settings.Yreifydebug.value
+ val reifyCopypaste = global.settings.Yreifycopypaste.value
+ val reifyTrace = scala.tools.nsc.util.trace when reifyDebug
+}
\ No newline at end of file
diff --git a/src/compiler/scala/tools/ant/ClassloadVerify.scala b/src/compiler/scala/tools/ant/ClassloadVerify.scala
new file mode 100644
index 0000000..73555b8
--- /dev/null
+++ b/src/compiler/scala/tools/ant/ClassloadVerify.scala
@@ -0,0 +1,53 @@
+/* __ *\
+** ________ ___ / / ___ Scala Ant Tasks **
+** / __/ __// _ | / / / _ | (c) 2005-2013, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+package scala.tools.ant
+
+import org.apache.tools.ant.Project
+import org.apache.tools.ant.types.{Path, Reference}
+import scala.collection.JavaConverters._
+import scala.tools.util.VerifyClass
+
+class ClassloadVerify extends ScalaMatchingTask {
+
+ /** The class path to use for this compilation. */
+ protected var classpath: Option[Path] = None
+
+ /** Sets the `classpath` attribute. Used by [[http://ant.apache.org Ant]].
+ * @param input The value of `classpath`. */
+ def setClasspath(input: Path) {
+ classpath = Some(input)
+ }
+
+ def setClasspathref(input: Reference) {
+ val p = new Path(getProject())
+ p.setRefid(input)
+ classpath = Some(p)
+ }
+
+ private def getClasspath: Array[String] = classpath match {
+ case None => buildError("Member 'classpath' is empty.")
+ case Some(x) => x.list.toArray
+ }
+
+ override def execute(): Unit = {
+ val results = VerifyClass.run(getClasspath).asScala
+ results foreach (r => log("Checking: " + r, Project.MSG_DEBUG))
+ val errors = for((name, error) <- results; if error != null) yield (name,error)
+ if(errors.isEmpty) {
+ // TODO - Log success
+ log("Classload verification succeeded with " + results.size + " classes.", Project.MSG_INFO)
+ } else {
+ for((name, error) <- errors) {
+ log(name + " failed verification with: " + error, Project.MSG_ERR)
+ }
+ buildError(errors.size + " classload verification errors on " + results.size + " classes.")
+ }
+ }
+
+}
diff --git a/src/compiler/scala/tools/ant/FastScalac.scala b/src/compiler/scala/tools/ant/FastScalac.scala
index a2a7496..c3eb9ee 100644
--- a/src/compiler/scala/tools/ant/FastScalac.scala
+++ b/src/compiler/scala/tools/ant/FastScalac.scala
@@ -1,26 +1,31 @@
/* __ *\
** ________ ___ / / ___ Scala Ant Tasks **
-** / __/ __// _ | / / / _ | (c) 2005-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2005-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-
package scala.tools.ant
-/** <p>
- * An Ant task to compile with the fast Scala compiler (<code>fsc</code>).
- * </p>
- * <p>
- * In addition to the attributes shared with the <code>Scalac</code>
- * task, this task also accepts the following attributes:
- * </p>
- * <ul style="font-family:Courier;">
- * <li>reset</li>
- * <li>server</li>
- * <li>shutdown</li>
- * </ul>
+import org.apache.tools.ant.{AntClassLoader, Project}
+import org.apache.tools.ant.taskdefs.Java
+import org.apache.tools.ant.types.Path
+
+import scala.tools.nsc.Settings
+import scala.tools.nsc.io.File
+import scala.tools.nsc.settings.FscSettings
+import scala.tools.nsc.util.ScalaClassLoader
+
+/** An Ant task to compile with the fast Scala compiler (`fsc`).
+ *
+ * In addition to the attributes shared with the `Scalac` task, this task
+ * also accepts the following attributes:
+ * - `reset`
+ * - `server`
+ * - `shutdown`
+ * - `ipv4`
+ * - `maxIdle`
*
* @author Stephane Micheloud
*/
@@ -32,90 +37,156 @@ class FastScalac extends Scalac {
private var shutdownServer: Boolean = false
+ private var useIPv4: Boolean = false
+
+ private var idleMinutes: Option[Int] = None
+
/*============================================================================*\
** Properties setters **
\*============================================================================*/
- /** Sets the <code>reset</code> attribute. Used by Ant.
+ /** Sets the `reset` attribute. Used by [[http://ant.apache.org Ant]].
*
- * @param input The value for <code>reset</code>.
+ * @param input The value for `reset`.
*/
- def setReset(input: Boolean): Unit =
- resetCaches = input
+ def setReset(input: Boolean) { resetCaches = input }
- /** Sets the <code>server</code> attribute. Used by Ant.
+ /** Sets the `server` attribute. Used by [[http://ant.apache.org Ant]].
*
- * @param input The value for <code>server</code>.
+ * @param input The value for `server`.
*/
- def setServer(input: String): Unit = {
- serverAddr = Some(input)
- }
+ def setServer(input: String) { serverAddr = Some(input) }
+
+ /** Sets the `shutdown` attribute. Used by [[http://ant.apache.org Ant]].
+ *
+ * @param input The value for `shutdown`.
+ */
+ def setShutdown(input: Boolean) { shutdownServer = input }
+
+ /** Sets the `ipv4` attribute. Used by [[http://ant.apache.org Ant]].
+ *
+ * @param input The value for `ipv4`.
+ */
+ def setIPv4(input: Boolean) { useIPv4 = input }
- /** Sets the <code>shutdown</code> attribute. Used by Ant.
+ /** Sets the `maxIdle` attribute. Used by [[http://ant.apache.org Ant]].
*
- * @param input The value for <code>shutdown</code>.
+ * @param input The value for `maxIdle`.
*/
- def setShutdown(input: Boolean): Unit =
- shutdownServer = input
+ def setMaxIdle(input: Int) { if (0 <= input) idleMinutes = Some(input) }
/*============================================================================*\
** The execute method **
\*============================================================================*/
+ override protected def newSettings(error: String=>Unit): Settings =
+ new FscSettings(error)
+
/** Performs the compilation. */
- override def execute() = {
+ override def execute() {
val (settings, sourceFiles, javaOnly) = initialize
- val s = settings
-
- if (!sourceFiles.isEmpty && !javaOnly) {
- def trim(xs: List[String]) = xs filter (x => x.length > 0)
- val reset = settings.BooleanSetting("-reset", "Reset compile server caches")
- val shutdown = settings.BooleanSetting("-shutdown", "Shutdown compile server")
-
- reset.value = resetCaches
- shutdown.value = shutdownServer
-
- /** XXX Since fsc is largely unmaintained, the set of options being individually
- * assessed here is likely to bear little relationship to the current set of options.
- * Most likely this manifests in confusing and very difficult to debug behavior in fsc.
- * We should warn or fix.
- */
- val stringSettings =
- List(s.outdir, s.classpath, s.bootclasspath, s.extdirs, s.encoding) flatMap (x => List(x.name, x.value))
-
- val serverOption =
- serverAddr.toList flatMap (x => List("-server", x)) // '-server' option
-
- val choiceSettings =
- List(s.debuginfo, s.target) map (x => "%s:%s".format(x.name, x.value))
-
- val booleanSettings =
- List(s.debug, s.deprecation, s.verbose, reset, shutdown) map (x => if (x.value) List(x.name) else Nil) flatten
-
- val phaseSetting = {
- val s = settings.log
- if (s.value.isEmpty) Nil
- else List("%s:%s".format(s.name, s.value.mkString(",")))
- }
-
- val cmdOptions =
- stringSettings ::: serverOption ::: choiceSettings ::: booleanSettings ::: phaseSetting
+ if (sourceFiles.isEmpty || javaOnly)
+ return
+
+ // initialize fsc specific settings
+ val s = settings.asInstanceOf[FscSettings] // safe (newSettings)
+ s.reset.value = resetCaches
+ if (!serverAddr.isEmpty) s.server.value = serverAddr.get
+ s.shutdown.value = shutdownServer
+ s.preferIPv4.value = useIPv4
+ if (!idleMinutes.isEmpty) s.idleMins.value = idleMinutes.get
+
+ val stringSettings =
+ List(
+ /*scalac*/
+ s.bootclasspath, s.classpath, s.extdirs, s.dependencyfile, s.encoding,
+ s.outdir, s.sourcepath,
+ /*fsc*/
+ s.server
+ ) filter (_.value != "") flatMap (x => List(x.name, x.value))
+
+ val choiceSettings =
+ List(
+ /*scalac*/
+ s.debuginfo, s.target
+ ) filter (x => x.value != x.default) map (x => "%s:%s".format(x.name, x.value))
+
+ val booleanSettings =
+ List(
+ /*scalac*/
+ s.debug, s.deprecation, s.explaintypes, s.nospecialization, s.nowarn,
+ s.optimise, s.unchecked, s.usejavacp, s.verbose,
+ /*fsc*/
+ s.preferIPv4, s.reset, s.shutdown
+ ) filter (_.value) map (_.name)
+
+ val intSettings =
+ List(
+ /*fsc*/
+ s.idleMins
+ ) filter (x => x.value != x.default) flatMap (x => List(x.name, x.value.toString))
+
+ val phaseSetting = {
+ val s = settings.log
+ if (s.value.isEmpty) Nil
+ else List("%s:%s".format(s.name, s.value.mkString(",")))
+ }
- val args = (cmdOptions ::: (sourceFiles map (_.toString))).toArray
- try {
- if (scala.tools.nsc.CompileClient.process(args) && failonerror)
- buildError("Compile failed; see the compiler error output for details.")
- }
- catch {
- case exception: Throwable if (exception.getMessage ne null) =>
- exception.printStackTrace()
- buildError("Compile failed because of an internal compiler error (" +
- exception.getMessage + "); see the error output for details.")
- case exception =>
- exception.printStackTrace()
- buildError("Compile failed because of an internal compiler error " +
- "(no error message provided); see the error output for details.")
+ val fscOptions =
+ stringSettings ::: choiceSettings ::: booleanSettings ::: intSettings ::: phaseSetting
+
+ val java = new Java(this)
+ java setFork true
+ // use same default memory options as in fsc script
+ java.createJvmarg() setValue "-Xmx256M"
+ java.createJvmarg() setValue "-Xms32M"
+ val scalacPath: Path = {
+ val path = new Path(getProject)
+ if (compilerPath.isDefined) path add compilerPath.get
+ else getClass.getClassLoader match {
+ case cl: AntClassLoader =>
+ path add new Path(getProject, cl.getClasspath)
+ case _ =>
+ buildError("Compilation failed because of an internal compiler error;"+
+ " see the error output for details.")
}
+ path
+ }
+ java.createJvmarg() setValue ("-Xbootclasspath/a:"+scalacPath)
+ s.jvmargs.value foreach (java.createJvmarg() setValue _)
+
+ val scalaHome: String = try {
+ val url = ScalaClassLoader.originOfClass(classOf[FastScalac]).get
+ File(url.getFile).jfile.getParentFile.getParentFile.getAbsolutePath
+ } catch {
+ case _: Throwable =>
+ buildError("Compilation failed because of an internal compiler error;"+
+ " couldn't determine value for -Dscala.home=<value>")
}
+ java.createJvmarg() setValue "-Dscala.usejavacp=true"
+ java.createJvmarg() setValue ("-Dscala.home="+scalaHome)
+ s.defines.value foreach (java.createJvmarg() setValue _)
+
+ java setClassname "scala.tools.nsc.MainGenericRunner"
+ java.createArg() setValue "scala.tools.nsc.CompileClient"
+
+ // Encode scalac/javac args for use in a file to be read back via "@file.txt"
+ def encodeScalacArgsFile(t: Traversable[String]) = t map { s =>
+ if(s.find(c => c <= ' ' || "\"'\\".contains(c)).isDefined)
+ "\"" + s.flatMap(c => (if(c == '"' || c == '\\') "\\" else "") + c ) + "\""
+ else s
+ } mkString "\n"
+
+ // dump the arguments to a file and do "java @file"
+ val tempArgFile = File.makeTemp("fastscalac")
+ val tokens = fscOptions ++ (sourceFiles map (_.getPath))
+ tempArgFile writeAll encodeScalacArgsFile(tokens)
+
+ val paths = List(Some(tempArgFile.toAbsolute.path), argfile).flatten map (_.toString)
+ val res = execWithArgFiles(java, paths)
+
+ if (failonerror && res != 0)
+ buildError("Compilation failed because of an internal compiler error;"+
+ " see the error output for details.")
}
}
diff --git a/src/compiler/scala/tools/ant/Pack200Task.scala b/src/compiler/scala/tools/ant/Pack200Task.scala
index 5d74cff..255efe5 100644
--- a/src/compiler/scala/tools/ant/Pack200Task.scala
+++ b/src/compiler/scala/tools/ant/Pack200Task.scala
@@ -1,13 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala Ant Tasks **
-** / __/ __// _ | / / / _ | (c) 2005-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2005-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
-
package scala.tools.ant
import java.io.{BufferedOutputStream, File, FileInputStream,
@@ -18,18 +16,17 @@ import java.util.jar.Pack200.Packer._
import org.apache.tools.ant.{BuildException, DirectoryScanner}
import org.apache.tools.ant.types.FileSet
-/** <p>
- * An Ant task that applies the pack200 encoding to a JAR file.
- * </p><ul>
- * <li>destdir (mandatory),</li>
- * <li>dir (defaults to project's basedir),</li>
- * <li>effort (default 9),</li>
- * <li>keepFileOrder (default false),</li>
- * <li>keepModificationTime (default false),</li>
- * <li>repack (default false),</li>
- * <li>segmentLimit (default -1 for no limit) </li>
- * <li>suffix (default ".pack")</li>
- * </ul>
+/** An [[http://ant.apache.org Ant]] task that applies the pack200 encoding
+ * to a JAR file.
+ *
+ * - `destdir` (mandatory),
+ * - `dir` (defaults to project's basedir),
+ * - `effort` (default 9),
+ * - `keepFileOrder` (default `'''false'''`),
+ * - `keepModificationTime` (default `'''false'''`),
+ * - `repack` (default false),
+ * - `segmentLimit` (default `-1` for no limit),
+ * - `suffix` (default ".pack")
*
* @author James Matlik
*/
@@ -69,8 +66,8 @@ class Pack200Task extends ScalaMatchingTask {
/** Set the flag to specify if file reordering should be performed. Reordering
* is used to remove empty packages and improve pack200 optimization.
* @param keep
- * true to retain file ordering.
- * false to optimize directory structure (DEFAULT). */
+ * `'''true'''` to retain file ordering.
+ * `'''false'''` to optimize directory structure (DEFAULT). */
def setKeepFileOrder(x: Boolean) { keepFileOrder = x }
/** If false, a single modification time is used for all contained files */
diff --git a/src/compiler/scala/tools/ant/Same.scala b/src/compiler/scala/tools/ant/Same.scala
index 52bc440..e53679f 100644
--- a/src/compiler/scala/tools/ant/Same.scala
+++ b/src/compiler/scala/tools/ant/Same.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala Ant Tasks **
-** / __/ __// _ | / / / _ | (c) 2005-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2005-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-
package scala.tools.ant
import java.io.{File, FileInputStream}
@@ -16,18 +15,20 @@ import org.apache.tools.ant.util.{FileNameMapper, IdentityMapper}
import org.apache.tools.ant.types.Mapper
-/** <p>
- * An Ant task that, for a set of files, tests them for byte-to-byte
- * equality with one or more other files.
- * This task supports the following parameters as attributes:
- * </p><ul>
- * <li>dir</li>
- * <li>todir</li>
- * <li>resultproperty (a property to be set when all tested files pairs are equal, if not set, the task will fail instead),</li>
- * <li>failing (whether to stop if all files are not equal).</li></ul>
- * <p>It also support the following nested elements:</p><ul>
- * <li>mapper (a mapper from original files to test files).</li></ul>
- * <p>This task itself defines a fileset that represents the set of original files.</p>
+/** An Ant task that, for a set of files, tests them for byte-to-byte
+ * equality with one or more other files.
+ *
+ * This task supports the following parameters as attributes:
+ * - `dir`
+ * - `todir`
+ * - `resultproperty` (a property to be set when all tested files pairs are
+ * equal, if not set, the task will fail instead),
+ * - `failing` (whether to stop if all files are not equal).
+ *
+ * It also support the following nested elements:
+ * - `mapper` (a mapper from original files to test files).
+ *
+ * This task itself defines a fileset that represents the set of original files.
*
* @author Gilles Dubochet
* @version 1.0 */
diff --git a/src/compiler/scala/tools/ant/ScalaBazaar.scala b/src/compiler/scala/tools/ant/ScalaBazaar.scala
deleted file mode 100644
index f25e4d6..0000000
--- a/src/compiler/scala/tools/ant/ScalaBazaar.scala
+++ /dev/null
@@ -1,318 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala Ant Tasks **
-** / __/ __// _ | / / / _ | (c) 2005-2011, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-
-package scala.tools.ant {
-
- import scala.collection.DefaultMap
- import scala.collection.mutable.HashMap
- import java.io.{File, FileInputStream, FileOutputStream,
- FileWriter, StringReader}
- import java.net.URL
- import java.util.ArrayList
- import java.util.zip.{ZipOutputStream, ZipEntry}
-
- import org.apache.tools.ant.{AntClassLoader, BuildException,
- DirectoryScanner, Project}
- import org.apache.tools.ant.Task
- import org.apache.tools.ant.types.Path
- import org.apache.tools.ant.util.{FileUtils, MergingMapper,
- SourceFileScanner}
- import org.apache.tools.ant.types.{EnumeratedAttribute, Reference, FileSet}
-
- /** A set of files that can be installed at any relative location */
- class LooseFileSet {
- var destination: Option[String] = None
- def setDestination(dest: String) = {
- destination = Some(dest)
- }
-
- var fileset: Option[FileSet] = None
- def addConfiguredFileSet(fs: FileSet) = {
- fileset = Some(fs)
- }
- }
- /** An Ant task that generates a Scala Bazaars package (sbp file) along
- * with an advertisement of that package.
- *
- * This task can take the following parameters as attributes:<ul>
- * <li>file (mandatory),</li>
- * <li>adfile,</li>
- * <li>name (mandatory),</li>
- * <li>version (mandatory),</li>
- * <li>depends,</li>
- * <li>description,</li>
- * <li>link.</li>
- * </ul>
- *
- * @author Gilles Dubochet */
- class ScalaBazaar extends Task with ScalaTask {
-
- /** The unique Ant file utilities instance to use in this task. */
- private val fileUtils = FileUtils.getFileUtils()
-
-/******************************************************************************\
-** Ant user-properties **
-\******************************************************************************/
-
- /** The path to the archive file. */
- private var file: Option[File] = None
- /** The optional path to the advertisement file. */
- private var adfile: Option[File] = None
- /** The name of the package. */
- private var name: Option[String] = None
- /** The version number of the package. */
- private var version: Option[String] = None
- /** An (optional) list of names of the packages it depends of. */
- private var depends: List[String] = Nil
- /** An (optional) description of this package. */
- private var desc: Option[String] = None
- /** An (optional) URL link pointing to the location of the package */
- private var link: Option[String] = None
-
- /** The sets of files to include in the package */
- private object fileSetsMap extends DefaultMap[String, List[FileSet]] {
- private var content = new HashMap[String, List[FileSet]]()
- def get(key: String): Option[List[FileSet]] = content.get(key)
- override def size: Int = content.size
- def update(key: String, value: FileSet) {
- if (content.contains(key) && content(key) != Nil)
- content.update(key, value :: content(key))
- else content.update(key, List(value))
- }
- def fileSets = content.toList
- def iterator = content.iterator
- }
-
-
-
-/******************************************************************************\
-** Internal properties **
-\******************************************************************************/
-
-
-/******************************************************************************\
-** Properties setters **
-\******************************************************************************/
-
- /** Sets the file attribute. Used by Ant.
- * @param input The value of <code>file</code>. */
- def setFile(input: File) =
- file = Some(input)
-
- /** Sets the advertisement file attribute. Used by Ant.
- * @param input The value of <code>adfile</code>. */
- def setAdfile(input: File) =
- adfile = Some(input)
-
- /** Sets the name attribute of this package. Used by Ant.
- * @param input The value of <code>name</code>. */
- def setName(input: String) =
- name = Some(input)
-
- /** Sets the version attribute of this package. Used by Ant.
- * @param input The value of <code>version</code>. */
- def setVersion(input: String) =
- version = Some(input)
-
- /** Sets the depends attribute. Used by Ant.
- * @param input The value for <code>depends</code>. */
- def setDepends(input: String) = {
- depends = input.split(",").toList.flatMap { s: String =>
- val st = s.trim()
- (if (st != "") List(st) else Nil)
- }
- }
-
- /** Sets the description attribute of this package. Used by Ant.
- * @param input The value of <code>description</code>. */
- def setDesc(input: String) =
- desc = Some(input)
-
- /** Sets the link attribute of this package. Used by Ant.
- * @param input The value of <code>link</code>. */
- def setLink(input: String) =
- link = Some(input)
-
- def addConfiguredLibset(input: FileSet) =
- fileSetsMap.update("lib", input)
-
- def addConfiguredBinset(input: FileSet) =
- fileSetsMap.update("bin", input)
-
- def addConfiguredSrcset(input: FileSet) =
- fileSetsMap.update("src", input)
-
- def addConfiguredManset(input: FileSet) =
- fileSetsMap.update("man", input)
-
- def addConfiguredDocset(input: FileSet) =
- fileSetsMap.update("doc/" + getName, input)
-
- def addConfiguredMiscset(input: FileSet) =
- fileSetsMap.update("misc/" + getName, input)
-
- def addConfiguredLooseset(set: LooseFileSet) = {
- Pair(set.destination, set.fileset) match {
- case Pair(None, _) =>
- buildError("destination not specified for a loose file set")
-
- case Pair(_, None) =>
- buildError("no files specified for a loose file set")
-
- case Pair(Some(dest), Some(fileset)) =>
- fileSetsMap.update(dest, fileset)
- }
- }
-
-/******************************************************************************\
-** Properties getters **
-\******************************************************************************/
-
- /** Gets the value of the file attribute in a Scala-friendly form.
- * @return The file as a file. */
- private def getName: String =
- if (name.isEmpty) buildError("Name attribute must be defined first.")
- else name.get
-
- /** Gets the value of the file attribute in a Scala-friendly form.
- * @return The file as a file. */
- private def getFile: File =
- if (file.isEmpty) buildError("Member 'file' is empty.")
- else getProject().resolveFile(file.get.toString())
-
- /** Gets the value of the adfile attribute in a Scala-friendly form.
- * @return The adfile as a file. */
- private def getAdfile: File =
- if (adfile.isEmpty) buildError("Member 'adfile' is empty.")
- else getProject().resolveFile(adfile.get.toString())
-
-/******************************************************************************\
-** Compilation and support methods **
-\******************************************************************************/
- /** Transforms a string name into a file relative to the provided base
- * directory.
- * @param base A file pointing to the location relative to which the name
- * will be resolved.
- * @param name A relative or absolute path to the file as a string.
- * @return A file created from the name and the base file. */
- private def nameToFile(base: File)(name: String): File =
- existing(fileUtils.resolveFile(base, name))
-
- /** Transforms a string name into a file relative to the build root
- * directory.
- * @param name A relative or absolute path to the file as a string.
- * @return A file created from the name. */
- private def nameToFile(name: String): File =
- existing(getProject().resolveFile(name))
-
- /** Tests if a file exists and prints a warning in case it doesn't. Always
- * returns the file, even if it doesn't exist.
- * @param file A file to test for existance.
- * @return The same file. */
- private def existing(file: File): File = {
- if (!file.exists())
- log("Element '" + file.toString() + "' does not exist.",
- Project.MSG_WARN)
- file
- }
-
- private def writeFile(file: File, content: String) =
- if (file.exists() && !file.canWrite())
- buildError("File " + file + " is not writable")
- else {
- val writer = new FileWriter(file, false)
- writer.write(content)
- writer.close()
- }
-
-/******************************************************************************\
-** The big execute method **
-\******************************************************************************/
-
- /** Performs the compilation. */
- override def execute() = {
- // Tests if all mandatory attributes are set and valid.
- if (file.isEmpty) buildError("Attribute 'file' is not set.")
- if (name.isEmpty) buildError("Attribute 'name' is not set.")
- if (version.isEmpty) buildError("Attribute 'version' is not set.")
-
- val pack = {
- <package>
- <name>{name.get}</name>
- <version>{version.get}</version>{
- if (!depends.isEmpty)
- <depends>{
- for (depend <- depends) yield
- <name>{depend}</name>
- }</depends>
- else Nil
- }{
- if (!desc.isEmpty)
- <description>{desc.get}</description>
- else Nil
- }
- </package>
- }
-
- log("Creating package '" + name.get + "'")
-
- // Creates the advert file
- val advert = {
- <availablePackage>
- {pack}
- {link match {
- case None => <link>INSERT LINK HERE</link>
- case Some(str) => <link>{str}</link>
- }}
- </availablePackage>
- };
-
- if (!adfile.isEmpty)
- writeFile(getAdfile, advert.toString())
-
- // Checks for new files and creates the ZIP
-
- val zipContent =
- for {
- Pair(folder, fileSets) <- fileSetsMap.fileSets
- fileSet <- fileSets
- file <- fileSet.getDirectoryScanner(getProject).getIncludedFiles.toList
- } yield Triple(folder, fileSet.getDir(getProject), file)
- val zip = new ZipOutputStream(new FileOutputStream(file.get, false))
- if (!zipContent.isEmpty) {
- for (Triple(destFolder, srcFolder, file) <- zipContent) {
- log(file, Project.MSG_DEBUG)
- zip.putNextEntry(new ZipEntry(destFolder + "/" + file))
- val input = new FileInputStream(nameToFile(srcFolder)(file))
- val buf = new Array[Byte](10240)
- var n = input.read(buf, 0, buf.length)
- while (n >= 0) {
- zip.write (buf, 0, n)
- n = input.read(buf, 0, buf.length)
- }
- zip.closeEntry()
- input.close()
- }
- } else log("Archive contains no files.", Project.MSG_VERBOSE)
- zip.putNextEntry(new ZipEntry("meta/description"))
- val packInput = new StringReader(pack.toString())
- var byte = packInput.read()
- while (byte != -1) {
- zip.write (byte)
- byte = packInput.read()
- }
- zip.closeEntry()
- packInput.close()
- zip.close
- }
-
- }
-
-}
diff --git a/src/compiler/scala/tools/ant/ScalaMatchingTask.scala b/src/compiler/scala/tools/ant/ScalaMatchingTask.scala
index 2931f48..68a84be 100644
--- a/src/compiler/scala/tools/ant/ScalaMatchingTask.scala
+++ b/src/compiler/scala/tools/ant/ScalaMatchingTask.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala Ant Tasks **
-** / __/ __// _ | / / / _ | (c) 2005-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2005-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/compiler/scala/tools/ant/ScalaTool.scala b/src/compiler/scala/tools/ant/ScalaTool.scala
index 95dc2eb..57d24f6 100644
--- a/src/compiler/scala/tools/ant/ScalaTool.scala
+++ b/src/compiler/scala/tools/ant/ScalaTool.scala
@@ -1,30 +1,28 @@
/* __ *\
** ________ ___ / / ___ Scala Ant Tasks **
-** / __/ __// _ | / / / _ | (c) 2005-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2005-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-
package scala.tools.ant
import java.io.{File, InputStream, FileWriter}
import org.apache.tools.ant.BuildException
import org.apache.tools.ant.types.{Path, Reference}
-/** <p>
- * An Ant task that generates a shell or batch script to execute a
- * Scala program.
- * This task can take the following parameters as attributes:
- * </p><ul>
- * <li>file (mandatory),</li>
- * <li>class (mandatory),</li>
- * <li>platforms,</li>
- * <li>classpath,</li>
- * <li>properties,</li>
- * <li>javaflags,</li>
- * <li>toolflags.</li></ul>
+/** An Ant task that generates a shell or batch script to execute a
+ * Scala program.
+ *
+ * This task can take the following parameters as attributes:
+ * - `file` (mandatory),
+ * - `class` (mandatory),
+ * - `platforms`,
+ * - `classpath`,
+ * - `properties`,
+ * - `javaflags`,
+ * - `toolflags`.
*
* @author Gilles Dubochet
* @version 1.1
@@ -48,36 +46,36 @@ class ScalaTool extends ScalaMatchingTask {
val values = List("unix", "windows")
}
- /** The path to the exec script file. ".bat" will be appended for the
+ /** The path to the exec script file. `".bat"` will be appended for the
* Windows BAT file, if generated. */
private var file: Option[File] = None
/** The main class to run. */
private var mainClass: Option[String] = None
- /** Supported platforms for the script. Either "unix" or "windows".
+ /** Supported platforms for the script. Either `"unix"` or `"windows"`.
* Defaults to both. */
private var platforms: List[String] = List("unix", "windows")
/** An (optional) path to all JARs that this script depend on. Paths must be
* relative to the scala home directory. If not set, all JAR archives and
- * folders in "lib/" are automatically added. */
+ * folders in `"lib/"` are automatically added. */
private var classpath: List[String] = Nil
/** An (optional) path to JARs that this script depends on relative to the
- * ant project's basedir. */
+ * ant project's `basedir`. */
private var classpathPath: Path = emptyPath
/** Comma-separated Java system properties to pass to the JRE. Properties
- * are formatted as name=value. Properties scala.home, scala.tool.name and
- * scala.tool.version are always set. */
+ * are formatted as `name=value`. Properties `scala.home`, `scala.tool.name`
+ * and `scala.tool.version` are always set. */
private var properties: List[(String, String)] = Nil
- /** Additional flags passed to the JRE ("java [javaFlags] class"). */
+ /** Additional flags passed to the JRE (`"java [javaFlags] class"`). */
private var javaFlags: String = ""
- /** Additional flags passed to the tool ("java class [toolFlags]"). Can only
- * be set when a main class is defined. */
+ /** Additional flags passed to the tool (`"java class [toolFlags]"`).
+ * Can only be set when a main class is defined. */
private var toolFlags: String = ""
/*============================================================================*\
@@ -104,16 +102,16 @@ class ScalaTool extends ScalaMatchingTask {
}
}
- /**
- * Sets the classpath with which to run the tool.
- * Note that this mechanism of setting the classpath is generally preferred
- * for general purpose scripts, as this does not assume all elements are
- * relative to the ant basedir. Additionally, the platform specific demarcation
- * of any script variables (e.g. ${SCALA_HOME} or %SCALA_HOME%) can be specified
- * in a platform independant way (e.g. @SCALA_HOME@) and automatically translated
- * for you.
+ /** Sets the classpath with which to run the tool.
+ *
+ * Note that this mechanism of setting the classpath is generally preferred
+ * for general purpose scripts, as this does not assume all elements are
+ * relative to the Ant `basedir`. Additionally, the platform specific
+ * demarcation of any script variables (e.g. `${SCALA_HOME}` or
+ * `%SCALA_HOME%`) can be specified in a platform independant way (e.g.
+ * `@SCALA_HOME@`) and automatically translated for you.
*/
- def setClassPath(input: String): Unit = {
+ def setClassPath(input: String) {
classpath = classpath ::: input.split(",").toList
}
@@ -127,10 +125,9 @@ class ScalaTool extends ScalaMatchingTask {
* Adds an Ant Path reference to the tool's classpath.
* Note that all entries in the path must exist either relative to the project
* basedir or with an absolute path to a file in the filesystem. As a result,
- * this is not a mechanism for setting the classpath for more general use scripts,
- * such as those distributed within sbaz distribution packages.
+ * this is not a mechanism for setting the classpath for more general use scripts.
*/
- def setClassPathRef(input: Reference): Unit = {
+ def setClassPathRef(input: Reference) {
val tmpPath = emptyPath
tmpPath.setRefid(input)
classpath = classpath ::: tmpPath.list.toList
@@ -239,7 +236,7 @@ class ScalaTool extends ScalaMatchingTask {
buildError("File " + file + " is not writable")
else {
val writer = new FileWriter(file, false)
- writer.write(content)
+ writer write content
writer.close()
}
@@ -262,13 +259,13 @@ class ScalaTool extends ScalaMatchingTask {
// Consolidate Paths into classpath
classpath = classpath ::: classpathPath.list.toList
// Generate the scripts
- if (platforms.contains("unix")) {
+ if (platforms contains "unix") {
val unixPatches = patches + (("classpath", getUnixclasspath))
val unixTemplateResource = resourceRoot + "tool-unix.tmpl"
val unixTemplate = readAndPatchResource(unixTemplateResource, unixPatches)
writeFile(file.get, unixTemplate)
}
- if (platforms.contains("windows")) {
+ if (platforms contains "windows") {
val winPatches = patches + (("classpath", getWinclasspath))
val winTemplateResource = resourceRoot + "tool-windows.tmpl"
val winTemplate = readAndPatchResource(winTemplateResource, winPatches)
diff --git a/src/compiler/scala/tools/ant/Scalac.scala b/src/compiler/scala/tools/ant/Scalac.scala
index f8d5f74..73d09e8 100644
--- a/src/compiler/scala/tools/ant/Scalac.scala
+++ b/src/compiler/scala/tools/ant/Scalac.scala
@@ -1,72 +1,72 @@
/* __ *\
** ________ ___ / / ___ Scala Ant Tasks **
-** / __/ __// _ | / / / _ | (c) 2005-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2005-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-
package scala.tools.ant
-import java.io.{File,PrintWriter,BufferedWriter,FileWriter}
+import java.io.{File, PrintWriter, BufferedWriter, FileWriter}
import org.apache.tools.ant.{ BuildException, Project, AntClassLoader }
import org.apache.tools.ant.taskdefs.Java
import org.apache.tools.ant.types.{Path, Reference}
import org.apache.tools.ant.util.{FileUtils, GlobPatternMapper,
- SourceFileScanner}
+ SourceFileScanner, facade}
+import org.apache.tools.ant.util.facade.{FacadeTaskHelper,
+ ImplementationSpecificArgument}
import scala.tools.nsc.{Global, Settings, CompilerCommand}
+import scala.tools.nsc.io.{Path => SPath}
import scala.tools.nsc.reporters.{Reporter, ConsoleReporter}
-/** <p>
- * An Ant task to compile with the new Scala compiler (NSC).
- * </p>
- * <p>
- * This task can take the following parameters as attributes:
- * </p>
- * <ul style="font-family:Courier;">
- * <li>srcdir (mandatory),</li>
- * <li>srcref,</li>
- * <li>destdir,</li>
- * <li>classpath,</li>
- * <li>classpathref,</li>
- * <li>sourcepath,</li>
- * <li>sourcepathref,</li>
- * <li>bootclasspath,</li>
- * <li>bootclasspathref,</li>
- * <li>extdirs,</li>
- * <li>extdirsref,</li>
- * <li>encoding,</li>
- * <li>target,</li>
- * <li>force,</li>
- * <li>fork,</li>
- * <li>logging,</li>
- * <li>logphase,</li>
- * <li>debuginfo,</li>
- * <li>addparams,</li>
- * <li>scalacdebugging,</li>
- * <li>deprecation,</li>
- * <li>optimise,</li>
- * <li>unchecked,</li>
- * <li>failonerror,</li>
- * <li>scalacdebugging,</li>
- * <li>assemname,</li>
- * <li>assemrefs.</li>
- * </ul>
- * <p>
- * It also takes the following parameters as nested elements:
- * </p>
- * <ul>
- * <li>src (for srcdir),</li>
- * <li>classpath,</li>
- * <li>sourcepath,</li>
- * <li>bootclasspath,</li>
- * <li>extdirs.</li>
- * </ul>
+/** An Ant task to compile with the new Scala compiler (NSC).
+ *
+ * This task can take the following parameters as attributes:
+ * - `srcdir` (mandatory),
+ * - `srcref`,
+ * - `destdir`,
+ * - `classpath`,
+ * - `classpathref`,
+ * - `sourcepath`,
+ * - `sourcepathref`,
+ * - `bootclasspath`,
+ * - `bootclasspathref`,
+ * - `extdirs`,
+ * - `extdirsref`,
+ * - `argfile`,
+ * - `dependencyfile`,
+ * - `encoding`,
+ * - `target`,
+ * - `force`,
+ * - `fork`,
+ * - `logging`,
+ * - `logphase`,
+ * - `debuginfo`,
+ * - `addparams`,
+ * - `explaintypes`,
+ * - `deprecation`,
+ * - `nobootcp`,
+ * - `nowarn`,
+ * - `optimise`,
+ * - `unchecked`,
+ * - `usejavacp`,
+ * - `failonerror`,
+ * - `scalacdebugging`,
+ * - `assemname`,
+ * - `assemrefs`.
+ *
+ * It also takes the following parameters as nested elements:
+ * - `src` (for `srcdir`),
+ * - `classpath`,
+ * - `sourcepath`,
+ * - `bootclasspath`,
+ * - `extdirs`,
+ * - `compilerarg`.
*
- * @author Gilles Dubochet, Stephane Micheloud
+ * @author Gilles Dubochet, Stephane Micheloud
*/
class Scalac extends ScalaMatchingTask with ScalacShared {
@@ -90,19 +90,19 @@ class Scalac extends ScalaMatchingTask with ScalacShared {
/** Defines valid values for properties that refer to compiler phases. */
object CompilerPhase extends PermissibleValue {
- val values = List("namer", "typer", "pickler", "uncurry", "tailcalls",
- "explicitouter", "erasure", "lambdalift",
- "flatten", "constructors", "mixin", "icode", "jvm",
- "terminal")
+ val values = List("namer", "typer", "pickler", "refchecks",
+ "uncurry", "tailcalls", "specialize", "explicitouter",
+ "erasure", "lazyvals", "lambdalift", "constructors",
+ "flatten", "mixin", "cleanup", "icode", "inliner",
+ "closelim", "dce", "jvm", "terminal")
}
- /** Defines valid values for the <code>target</code> property. */
+ /** Defines valid values for the `target` property. */
object Target extends PermissibleValue {
- val values = List("jvm-1.5", "msil")
+ val values = List("jvm-1.5", "jvm-1.5-fjbg", "jvm-1.5-asm", "jvm-1.6", "jvm-1.7", "msil")
}
- /** Defines valid values for the <code>deprecation</code> and
- * <code>unchecked</code> properties. */
+ /** Defines valid values for the `deprecation` and `unchecked` properties. */
object Flag extends PermissibleValue {
val values = List("yes", "no", "on", "off", "true", "false")
def toBoolean(flag: String) =
@@ -127,6 +127,9 @@ class Scalac extends ScalaMatchingTask with ScalacShared {
/** The external extensions path to use for this compilation. */
protected var extdirs: Option[Path] = None
+ protected var argfile: Option[File] = None
+ /** The dependency tracking file. */
+ protected var dependencyfile: Option[File] = None
/** The character encoding of the files to compile. */
protected var encoding: Option[String] = None
@@ -149,12 +152,20 @@ class Scalac extends ScalaMatchingTask with ScalacShared {
protected var debugInfo: Option[String] = None
/** Instruct the compiler to use additional parameters */
protected var addParams: String = ""
+ /** Instruct the compiler to explain type errors in more detail. */
+ protected var explaintypes: Option[Boolean] = None
/** Instruct the compiler to generate deprecation information. */
protected var deprecation: Option[Boolean] = None
+ /** Instruct the compiler to not use the boot classpath for the scala jars. */
+ protected var nobootcp: Option[Boolean] = None
+ /** Instruct the compiler to generate no warnings. */
+ protected var nowarn: Option[Boolean] = None
/** Instruct the compiler to run optimizations. */
protected var optimise: Option[Boolean] = None
/** Instruct the compiler to generate unchecked information. */
protected var unchecked: Option[Boolean] = None
+ /** Instruct the compiler to use `java.class.path` in classpath resolution. */
+ protected var usejavacp: Option[Boolean] = None
/** Indicates whether compilation errors will fail the build; defaults to true. */
protected var failonerror: Boolean = true
@@ -167,6 +178,9 @@ class Scalac extends ScalaMatchingTask with ScalacShared {
* (not only the number of files). */
protected var scalacDebugging: Boolean = false
+ /** Encapsulates implementation of specific command line arguments. */
+ protected var scalacCompilerArgs = new FacadeTaskHelper("compilerarg")
+
/** Helpers */
private def setOrAppend(old: Option[Path], arg: Path): Option[Path] = old match {
case Some(x) => x append arg ; Some(x)
@@ -178,7 +192,7 @@ class Scalac extends ScalaMatchingTask with ScalacShared {
}
private def createNewPath(getter: () => Option[Path], setter: (Option[Path]) => Unit) = {
if (getter().isEmpty)
- setter(Some(new Path(getProject())))
+ setter(Some(new Path(getProject)))
getter().get.createPath()
}
@@ -191,137 +205,150 @@ class Scalac extends ScalaMatchingTask with ScalacShared {
\*============================================================================*/
- /** Sets the srcdir attribute. Used by Ant.
- * @param input The value of <code>origin</code>. */
+ /** Sets the `srcdir` attribute. Used by [[http://ant.apache.org Ant]].
+ * @param input The value of `origin`. */
def setSrcdir(input: Path) {
origin = setOrAppend(origin, input)
}
- /** Sets the <code>origin</code> as a nested src Ant parameter.
+ /** Sets the `origin` as a nested src Ant parameter.
* @return An origin path to be configured. */
def createSrc(): Path = createNewPath(origin _, p => origin = p)
- /** Sets the <code>origin</code> as an external reference Ant parameter.
+ /** Sets the `origin` as an external reference Ant parameter.
* @param input A reference to an origin path. */
def setSrcref(input: Reference) =
createSrc().setRefid(input)
- /** Sets the <code>destdir</code> attribute. Used by Ant.
- * @param input The value of <code>destination</code>. */
+ /** Sets the `destdir` attribute. Used by [[http://ant.apache.org Ant]].
+ * @param input The value of `destination`. */
def setDestdir(input: File) { destination = Some(input) }
- /** Sets the <code>classpath</code> attribute. Used by Ant.
- * @param input The value of <code>classpath</code>. */
+ /** Sets the `classpath` attribute. Used by [[http://ant.apache.org Ant]].
+ * @param input The value of `classpath`. */
def setClasspath(input: Path) {
classpath = setOrAppend(classpath, input)
}
- /** Sets the <code>compilerPath</code> attribute. Used by Ant.
- * @param input The value of <code>compilerPath</code>. */
- def setCompilerPath(input : Path) {
+ /** Sets the `compilerPath` attribute. Used by [[http://ant.apache.org Ant]].
+ * @param input The value of `compilerPath`. */
+ def setCompilerPath(input: Path) {
compilerPath = setOrAppend(compilerPath, input)
}
def createCompilerPath: Path = createNewPath(compilerPath _, p => compilerPath = p)
- /** Sets the <code>compilerpathref</code> attribute. Used by Ant.
- * @param input The value of <code>compilerpathref</code>. */
+ /** Sets the `compilerpathref` attribute. Used by [[http://ant.apache.org Ant]].
+ * @param input The value of `compilerpathref`. */
def setCompilerPathRef(input: Reference) {
createCompilerPath.setRefid(input)
}
- /** Sets the <code>classpath</code> as a nested classpath Ant parameter.
+ /** Sets the `classpath` as a nested classpath Ant parameter.
* @return A class path to be configured. */
def createClasspath(): Path = createNewPath(classpath _, p => classpath = p)
- /** Sets the <code>classpath</code> as an external reference Ant parameter.
+ /** Sets the `classpath` as an external reference Ant parameter.
* @param input A reference to a class path. */
def setClasspathref(input: Reference) {
createClasspath().setRefid(input)
}
- /** Sets the <code>sourcepath</code> attribute. Used by Ant.
- * @param input The value of <code>sourcepath</code>. */
+ /** Sets the `sourcepath` attribute. Used by [[http://ant.apache.org Ant]].
+ * @param input The value of `sourcepath`. */
def setSourcepath(input: Path) {
sourcepath = setOrAppend(sourcepath, input)
}
- /** Sets the <code>sourcepath</code> as a nested sourcepath Ant parameter.
+ /** Sets the `sourcepath` as a nested sourcepath Ant parameter.
* @return A source path to be configured. */
def createSourcepath(): Path = createNewPath(sourcepath _, p => sourcepath = p)
- /** Sets the <code>sourcepath</code> as an external reference Ant parameter.
+ /** Sets the `sourcepath` as an external reference Ant parameter.
* @param input A reference to a source path. */
def setSourcepathref(input: Reference) {
createSourcepath().setRefid(input)
}
- /** Sets the boot classpath attribute. Used by Ant.
+ /** Sets the boot classpath attribute. Used by [[http://ant.apache.org Ant]].
*
- * @param input The value of <code>bootclasspath</code>. */
+ * @param input The value of `bootclasspath`. */
def setBootclasspath(input: Path) {
bootclasspath = setOrAppend(bootclasspath, input)
}
- /** Sets the <code>bootclasspath</code> as a nested sourcepath Ant
- * parameter.
+ /** Sets the `bootclasspath` as a nested bootclasspath Ant parameter.
* @return A source path to be configured. */
def createBootclasspath(): Path = createNewPath(bootclasspath _, p => bootclasspath = p)
- /** Sets the <code>bootclasspath</code> as an external reference Ant
+ /** Sets the `bootclasspath` as an external reference Ant
* parameter.
* @param input A reference to a source path. */
def setBootclasspathref(input: Reference) =
createBootclasspath().setRefid(input)
- /** Sets the external extensions path attribute. Used by Ant.
- * @param input The value of <code>extdirs</code>. */
- def setExtdirs(input: Path) =
+ /** Sets the external extensions path attribute. Used by [[http://ant.apache.org Ant]].
+ * @param input The value of `extdirs`. */
+ def setExtdirs(input: Path) {
extdirs = setOrAppend(extdirs, input)
+ }
- /** Sets the <code>extdirs</code> as a nested sourcepath Ant parameter.
+ /** Sets the `extdirs` as a nested extdirs Ant parameter.
* @return An extensions path to be configured. */
def createExtdirs(): Path = createNewPath(extdirs _, p => extdirs = p)
- /** Sets the <code>extdirs</code> as an external reference Ant parameter.
+ /** Sets the `extdirs` as an external reference Ant parameter.
* @param input A reference to an extensions path. */
def setExtdirsref(input: Reference) =
createExtdirs().setRefid(input)
- /** Sets the <code>encoding</code> attribute. Used by Ant.
- * @param input The value of <code>encoding</code>. */
- def setEncoding(input: String): Unit =
+ /** Sets the `argfile` attribute. Used by [[http://ant.apache.org Ant]].
+ * @param input The value of `argfile`. */
+ def setArgfile(input: File) {
+ argfile = Some(input)
+ }
+
+ /** Sets the `dependencyfile` attribute. Used by [[http://ant.apache.org Ant]].
+ * @param input The value of `dependencyfile`. */
+ def setDependencyfile(input: File) {
+ dependencyfile = Some(input)
+ }
+
+ /** Sets the `encoding` attribute. Used by [[http://ant.apache.org Ant]].
+ * @param input The value of `encoding`. */
+ def setEncoding(input: String) {
encoding = Some(input)
+ }
- /** Sets the <code>target</code> attribute. Used by Ant.
- * @param input The value for <code>target</code>. */
+ /** Sets the `target` attribute. Used by [[http://ant.apache.org Ant]].
+ * @param input The value for `target`. */
def setTarget(input: String): Unit =
if (Target.isPermissible(input)) backend = Some(input)
else buildError("Unknown target '" + input + "'")
- /** Sets the <code>force</code> attribute. Used by Ant.
- * @param input The value for <code>force</code>. */
+ /** Sets the `force` attribute. Used by [[http://ant.apache.org Ant]].
+ * @param input The value for `force`. */
def setForce(input: Boolean) { force = input }
- /** Sets the <code>fork</code> attribute. Used by Ant.
- * @param input The value for <code>fork</code>. */
+ /** Sets the `fork` attribute. Used by [[http://ant.apache.org Ant]].
+ * @param input The value for `fork`. */
def setFork(input : Boolean) { fork = input }
/**
- * Sets the <code>jvmargs</code> attribute. Used by Ant.
- * @param input The value for <code>jvmargs</code>
+ * Sets the `jvmargs` attribute. Used by [[http://ant.apache.org Ant]].
+ * @param input The value for `jvmargs`
*/
def setJvmargs(input : String) {
jvmArgs = Some(input)
}
- /** Sets the logging level attribute. Used by Ant.
- * @param input The value for <code>logging</code>. */
+ /** Sets the logging level attribute. Used by [[http://ant.apache.org Ant]].
+ * @param input The value for `logging`. */
def setLogging(input: String) {
if (LoggingLevel.isPermissible(input)) logging = Some(input)
else buildError("Logging level '" + input + "' does not exist.")
}
- /** Sets the <code>logphase</code> attribute. Used by Ant.
- * @param input The value for <code>logPhase</code>. */
+ /** Sets the `logphase` attribute. Used by [[http://ant.apache.org Ant]].
+ * @param input The value for `logPhase`. */
def setLogPhase(input: String) {
logPhase = input.split(",").toList.flatMap { s: String =>
val st = s.trim()
@@ -333,38 +360,62 @@ class Scalac extends ScalaMatchingTask with ScalacShared {
}
}
- /** Set the <code>debug</code> info attribute.
- * @param input The value for <code>debug</code>. */
+ /** Set the `debug` info attribute.
+ * @param input The value for `debug`. */
def setDebuginfo(input: String) { debugInfo = Some(input) }
- /** Set the <code>addparams</code> info attribute.
- * @param input The value for <code>addparams</code>. */
+ /** Set the `addparams` info attribute.
+ * @param input The value for `addparams`. */
def setAddparams(input: String) { addParams = input }
- /** Set the <code>deprecation</code> info attribute.
- * @param input One of the flags <code>yes/no</code> or <code>on/off</code>. */
+ /** Set the `explaintypes` info attribute.
+ * @param input One of the flags `yes/no` or `on/off`. */
+ def setExplaintypes(input: String) {
+ explaintypes = Flag toBoolean input orElse buildError("Unknown explaintypes flag '" + input + "'")
+ }
+
+ /** Set the `deprecation` info attribute.
+ * @param input One of the flags `yes/no` or `on/off`. */
def setDeprecation(input: String) {
deprecation = Flag toBoolean input orElse buildError("Unknown deprecation flag '" + input + "'")
}
- /** Set the <code>optimise</code> info attribute.
- * @param input One of the flags <code>yes/no</code> or <code>on/off</code>. */
+ /** Set the `nobootcp` info attribute.
+ * @param input One of the flags `yes/no` or `on/off`. */
+ def setNobootcp(input: String) {
+ nobootcp = Flag toBoolean input orElse buildError("Unknown nobootcp flag '" + input + "'")
+ }
+
+ /** Set the `nowarn` info attribute.
+ * @param input One of the flags `yes/no` or `on/off`. */
+ def setNowarn(input: String) {
+ nowarn = Flag toBoolean input orElse buildError("Unknown nowarn flag '" + input + "'")
+ }
+
+ /** Set the `optimise` info attribute.
+ * @param input One of the flags `yes/no` or `on/off`. */
def setOptimise(input: String) {
optimise = Flag toBoolean input orElse buildError("Unknown optimisation flag '" + input + "'")
}
- /** Set the <code>unchecked</code> info attribute.
- * @param input One of the flags <code>yes/no</code> or <code>on/off</code>. */
+ /** Set the `unchecked` info attribute.
+ * @param input One of the flags `yes/no` or `on/off`. */
def setUnchecked(input: String) {
unchecked = Flag toBoolean input orElse buildError("Unknown unchecked flag '" + input + "'")
}
- /** Sets the <code>force</code> attribute. Used by Ant.
- * @param input The value for <code>force</code>. */
+ /** Set the `usejavacp` info attribute.
+ * @param input One of the flags `yes/no` or `on/off`. */
+ def setUsejavacp(input: String) {
+ usejavacp = Flag toBoolean input orElse buildError("Unknown usejavacp flag '" + input + "'")
+ }
+
+ /** Sets the `failonerror` attribute. Used by [[http://ant.apache.org Ant]].
+ * @param input The value for `failonerror`. */
def setFailonerror(input: Boolean) { failonerror = input }
- /** Set the <code>scalacdebugging</code> info attribute. If set to
- * <code>true</code>, the scalac ant task will print out the filenames
+ /** Set the `scalacdebugging` info attribute. If set to
+ * `'''true'''`, the scalac ant task will print out the filenames
* being compiled.
* @param input The specified flag */
def setScalacdebugging(input: Boolean) { scalacDebugging = input }
@@ -372,38 +423,46 @@ class Scalac extends ScalaMatchingTask with ScalacShared {
def setAssemname(input: String) { assemname = Some(input) }
def setAssemrefs(input: String) { assemrefs = Some(input) }
+ /** Sets the `compilerarg` as a nested compilerarg Ant parameter.
+ * @return A compiler argument to be configured. */
+ def createCompilerArg(): ImplementationSpecificArgument = {
+ val arg = new ImplementationSpecificArgument()
+ scalacCompilerArgs addImplementationArgument arg
+ arg
+ }
+
/*============================================================================*\
** Properties getters **
\*============================================================================*/
- /** Gets the value of the <code>classpath</code> attribute in a
+ /** Gets the value of the `classpath` attribute in a
* Scala-friendly form.
* @return The class path as a list of files. */
protected def getClasspath: List[File] = pathAsList(classpath, "classpath")
- /** Gets the value of the <code>origin</code> attribute in a
+ /** Gets the value of the `origin` attribute in a
* Scala-friendly form.
* @return The origin path as a list of files. */
protected def getOrigin: List[File] = pathAsList(origin, "origin")
- /** Gets the value of the <code>destination</code> attribute in a
+ /** Gets the value of the `destination` attribute in a
* Scala-friendly form.
* @return The destination as a file. */
protected def getDestination: File =
if (destination.isEmpty) buildError("Member 'destination' is empty.")
- else existing(getProject().resolveFile(destination.get.toString))
+ else existing(getProject resolveFile destination.get.toString)
- /** Gets the value of the <code>sourcepath</code> attribute in a
+ /** Gets the value of the `sourcepath` attribute in a
* Scala-friendly form.
* @return The source path as a list of files. */
protected def getSourcepath: List[File] = pathAsList(sourcepath, "sourcepath")
- /** Gets the value of the <code>bootclasspath</code> attribute in a
+ /** Gets the value of the `bootclasspath` attribute in a
* Scala-friendly form.
* @return The boot class path as a list of files. */
protected def getBootclasspath: List[File] = pathAsList(bootclasspath, "bootclasspath")
- /** Gets the value of the <code>extdirs</code> attribute in a
+ /** Gets the value of the `extdirs` attribute in a
* Scala-friendly form.
* @return The extensions path as a list of files. */
protected def getExtdirs: List[File] = pathAsList(extdirs, "extdirs")
@@ -426,14 +485,14 @@ class Scalac extends ScalaMatchingTask with ScalacShared {
* @param name A relative or absolute path to the file as a string.
* @return A file created from the name. */
protected def nameToFile(name: String): File =
- existing(getProject().resolveFile(name))
+ existing(getProject resolveFile name)
/** Tests if a file exists and prints a warning in case it doesn't. Always
* returns the file, even if it doesn't exist.
* @param file A file to test for existance.
* @return The same file. */
protected def existing(file: File): File = {
- if (!file.exists())
+ if (!file.exists)
log("Element '" + file.toString + "' does not exist.",
Project.MSG_WARN)
file
@@ -441,13 +500,13 @@ class Scalac extends ScalaMatchingTask with ScalacShared {
/** Transforms a path into a Scalac-readable string.
* @param path A path to convert.
- * @return A string-representation of the path like <code>a.jar:b.jar</code>. */
+ * @return A string-representation of the path like `a.jar:b.jar`. */
protected def asString(path: List[File]): String =
- path.map(asString).mkString(File.pathSeparator)
+ path.map(asString) mkString File.pathSeparator
/** Transforms a file into a Scalac-readable string.
* @param path A file to convert.
- * @return A string-representation of the file like <code>/x/k/a.scala</code>. */
+ * @return A string-representation of the file like `/x/k/a.scala`. */
protected def asString(file: File): String =
file.getAbsolutePath()
@@ -457,10 +516,10 @@ class Scalac extends ScalaMatchingTask with ScalacShared {
protected def newSettings(error: String=>Unit): Settings =
new Settings(error)
+
protected def newGlobal(settings: Settings, reporter: Reporter) =
new Global(settings, reporter)
-
/*============================================================================*\
** The big execute method **
\*============================================================================*/
@@ -468,7 +527,7 @@ class Scalac extends ScalaMatchingTask with ScalacShared {
/** Initializes settings and source files */
protected def initialize: (Settings, List[File], Boolean) = {
if (scalacDebugging)
- log("Base directory is `%s`".format(scala.tools.nsc.io.Path("").normalize))
+ log("Base directory is `%s`".format(SPath("").normalize))
// Tests if all mandatory attributes are set and valid.
if (origin.isEmpty) buildError("Attribute 'srcdir' is not set.")
@@ -483,7 +542,7 @@ class Scalac extends ScalaMatchingTask with ScalacShared {
var javaOnly = true
def getOriginFiles(originDir: File) = {
- val includedFiles = getDirectoryScanner(originDir).getIncludedFiles()
+ val includedFiles = getDirectoryScanner(originDir).getIncludedFiles
val javaFiles = includedFiles filter (_ endsWith ".java")
val scalaFiles = {
val xs = includedFiles filter (_ endsWith ".scala")
@@ -533,6 +592,8 @@ class Scalac extends ScalaMatchingTask with ScalacShared {
if (!bootclasspath.isEmpty)
settings.bootclasspath.value = asString(getBootclasspath)
if (!extdirs.isEmpty) settings.extdirs.value = asString(getExtdirs)
+ if (!dependencyfile.isEmpty)
+ settings.dependencyfile.value = asString(dependencyfile.get)
if (!encoding.isEmpty) settings.encoding.value = encoding.get
if (!backend.isEmpty) settings.target.value = backend.get
if (!logging.isEmpty && logging.get == "verbose")
@@ -543,13 +604,22 @@ class Scalac extends ScalaMatchingTask with ScalacShared {
}
if (!logPhase.isEmpty) settings.log.value = logPhase
if (!debugInfo.isEmpty) settings.debuginfo.value = debugInfo.get
+ if (!explaintypes.isEmpty) settings.explaintypes.value = explaintypes.get
if (!deprecation.isEmpty) settings.deprecation.value = deprecation.get
- if (!optimise.isEmpty) settings.XO.value = optimise.get
+ if (!nobootcp.isEmpty) settings.nobootcp.value = nobootcp.get
+ if (!nowarn.isEmpty) settings.nowarn.value = nowarn.get
+ if (!optimise.isEmpty) settings.optimise.value = optimise.get
if (!unchecked.isEmpty) settings.unchecked.value = unchecked.get
+ if (!usejavacp.isEmpty) settings.usejavacp.value = usejavacp.get
if (!assemname.isEmpty) settings.assemname.value = assemname.get
if (!assemrefs.isEmpty) settings.assemrefs.value = assemrefs.get
+ val jvmargs = scalacCompilerArgs.getArgs filter (_ startsWith "-J")
+ if (!jvmargs.isEmpty) settings.jvmargs.value = jvmargs.toList
+ val defines = scalacCompilerArgs.getArgs filter (_ startsWith "-D")
+ if (!defines.isEmpty) settings.defines.value = defines.toList
+
log("Scalac params = '" + addParams + "'", Project.MSG_DEBUG)
// let CompilerCommand processes all params
@@ -561,8 +631,8 @@ class Scalac extends ScalaMatchingTask with ScalacShared {
command.settings.dependenciesFile.value match {
case "none" =>
case x =>
- val depFilePath = scala.tools.nsc.io.Path(x)
- command.settings.dependenciesFile.value = scala.tools.nsc.io.Path(getProject.getBaseDir).normalize resolve depFilePath path
+ val depFilePath = SPath(x)
+ command.settings.dependenciesFile.value = SPath(getProject.getBaseDir).normalize.resolve(depFilePath).path
}
(command.settings, sourceFiles, javaOnly)
@@ -599,8 +669,8 @@ class Scalac extends ScalaMatchingTask with ScalacShared {
java setClassname MainClass
// Write all settings to a temporary file
- def writeSettings() : File = {
- def escapeArgument(arg : String) = if(arg.matches(".*\\s.*")) ('"' + arg + '"') else arg
+ def writeSettings(): File = {
+ def escapeArgument(arg : String) = if (arg matches ".*\\s.*") '"' + arg + '"' else arg
val file = File.createTempFile("scalac-ant-",".args")
file.deleteOnExit()
val out = new PrintWriter(new BufferedWriter(new FileWriter(file)))
diff --git a/src/compiler/scala/tools/ant/ScalacShared.scala b/src/compiler/scala/tools/ant/ScalacShared.scala
index b29d947..2c88d87 100644
--- a/src/compiler/scala/tools/ant/ScalacShared.scala
+++ b/src/compiler/scala/tools/ant/ScalacShared.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala Ant Tasks **
-** / __/ __// _ | / / / _ | (c) 2005-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2005-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/compiler/scala/tools/ant/Scaladoc.scala b/src/compiler/scala/tools/ant/Scaladoc.scala
index 3d19a62..7fc8117 100644
--- a/src/compiler/scala/tools/ant/Scaladoc.scala
+++ b/src/compiler/scala/tools/ant/Scaladoc.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala Ant Tasks **
-** / __/ __// _ | / / / _ | (c) 2005-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2005-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -11,7 +11,7 @@ package scala.tools.ant
import java.io.File
-import org.apache.tools.ant.{BuildException, Project}
+import org.apache.tools.ant.Project
import org.apache.tools.ant.types.{Path, Reference}
import org.apache.tools.ant.util.{FileUtils, GlobPatternMapper}
@@ -43,7 +43,9 @@ import scala.tools.nsc.reporters.{Reporter, ConsoleReporter}
* - `deprecation`,
* - `docgenerator`,
* - `docrootcontent`,
- * - `unchecked`.
+ * - `unchecked`,
+ * - `nofail`,
+ * - `skipPackages`.
*
* It also takes the following parameters as nested elements:
* - `src` (for srcdir),
@@ -69,11 +71,16 @@ class Scaladoc extends ScalaMatchingTask {
(value == "") || values.exists(_.startsWith(value))
}
- /** Defines valid values for the <code>deprecation</code> and
- * <code>unchecked</code> properties.
+ /** Defines valid values for the `deprecation` and
+ * `unchecked` properties.
*/
object Flag extends PermissibleValue {
val values = List("yes", "no", "on", "off")
+ def getBooleanValue(value: String, flagName: String): Boolean =
+ if (Flag.isPermissible(value))
+ return ("yes".equals(value) || "on".equals(value))
+ else
+ buildError("Unknown " + flagName + " flag '" + value + "'")
}
/** The directories that contain source files to compile. */
@@ -123,29 +130,62 @@ class Scaladoc extends ScalaMatchingTask {
/** Instruct the compiler to generate unchecked information. */
private var unchecked: Boolean = false
+ /** Instruct the ant task not to fail in the event of errors */
+ private var nofail: Boolean = false
+
+ /** Instruct the scaladoc tool to document implicit conversions */
+ private var docImplicits: Boolean = false
+
+ /** Instruct the scaladoc tool to document all (including impossible) implicit conversions */
+ private var docImplicitsShowAll: Boolean = false
+
+ /** Instruct the scaladoc tool to output implicits debugging information */
+ private var docImplicitsDebug: Boolean = false
+
+ /** Instruct the scaladoc tool to create diagrams */
+ private var docDiagrams: Boolean = false
+
+ /** Instruct the scaladoc tool to output diagram creation debugging information */
+ private var docDiagramsDebug: Boolean = false
+
+ /** Instruct the scaladoc tool to use the binary given to create diagrams */
+ private var docDiagramsDotPath: Option[String] = None
+
+ /** Instruct the scaladoc to produce textual ouput from html pages, for easy diff-ing */
+ private var docRawOutput: Boolean = false
+
+ /** Instruct the scaladoc not to generate prefixes */
+ private var docNoPrefixes: Boolean = false
+
+ /** Instruct the scaladoc tool to group similar functions together */
+ private var docGroups: Boolean = false
+
+ /** Instruct the scaladoc tool to skip certain packages */
+ private var docSkipPackages: String = ""
+
/*============================================================================*\
** Properties setters **
\*============================================================================*/
- /** Sets the <code>srcdir</code> attribute. Used by Ant.
+ /** Sets the `srcdir` attribute. Used by [[http://ant.apache.org Ant]].
*
- * @param input The value of <code>origin</code>.
+ * @param input The value of `origin`.
*/
def setSrcdir(input: Path) {
if (origin.isEmpty) origin = Some(input)
else origin.get.append(input)
}
- /** Sets the <code>origin</code> as a nested src Ant parameter.
+ /** Sets the `origin` as a nested src Ant parameter.
*
* @return An origin path to be configured.
*/
def createSrc(): Path = {
- if (origin.isEmpty) origin = Some(new Path(getProject()))
+ if (origin.isEmpty) origin = Some(new Path(getProject))
origin.get.createPath()
}
- /** Sets the <code>origin</code> as an external reference Ant parameter.
+ /** Sets the `origin` as an external reference Ant parameter.
*
* @param input A reference to an origin path.
*/
@@ -153,83 +193,81 @@ class Scaladoc extends ScalaMatchingTask {
createSrc().setRefid(input)
}
- /** Sets the <code>destdir</code> attribute. Used by Ant.
+ /** Sets the `destdir` attribute. Used by [[http://ant.apache.org Ant]].
*
- * @param input The value of <code>destination</code>.
+ * @param input The value of `destination`.
*/
def setDestdir(input: File) {
destination = Some(input)
}
- /** Sets the <code>classpath</code> attribute. Used by Ant.
+ /** Sets the `classpath` attribute. Used by [[http://ant.apache.org Ant]].
*
- * @param input The value of <code>classpath</code>.
+ * @param input The value of `classpath`.
*/
def setClasspath(input: Path) {
if (classpath.isEmpty) classpath = Some(input)
else classpath.get.append(input)
}
- /** Sets the <code>classpath</code> as a nested classpath Ant parameter.
+ /** Sets the `classpath` as a nested classpath Ant parameter.
*
* @return A class path to be configured.
*/
def createClasspath(): Path = {
- if (classpath.isEmpty) classpath = Some(new Path(getProject()))
+ if (classpath.isEmpty) classpath = Some(new Path(getProject))
classpath.get.createPath()
}
- /** Sets the <code>classpath</code> as an external reference Ant parameter.
+ /** Sets the `classpath` as an external reference Ant parameter.
*
* @param input A reference to a class path.
*/
def setClasspathref(input: Reference) =
createClasspath().setRefid(input)
- /** Sets the <code>sourcepath</code> attribute. Used by Ant.
+ /** Sets the `sourcepath` attribute. Used by [[http://ant.apache.org Ant]].
*
- * @param input The value of <code>sourcepath</code>.
+ * @param input The value of `sourcepath`.
*/
def setSourcepath(input: Path) =
if (sourcepath.isEmpty) sourcepath = Some(input)
else sourcepath.get.append(input)
- /** Sets the <code>sourcepath</code> as a nested sourcepath Ant parameter.
+ /** Sets the `sourcepath` as a nested sourcepath Ant parameter.
*
* @return A source path to be configured.
*/
def createSourcepath(): Path = {
- if (sourcepath.isEmpty) sourcepath = Some(new Path(getProject()))
+ if (sourcepath.isEmpty) sourcepath = Some(new Path(getProject))
sourcepath.get.createPath()
}
- /** Sets the <code>sourcepath</code> as an external reference Ant parameter.
+ /** Sets the `sourcepath` as an external reference Ant parameter.
*
* @param input A reference to a source path.
*/
def setSourcepathref(input: Reference) =
createSourcepath().setRefid(input)
- /** Sets the <code>bootclasspath</code> attribute. Used by Ant.
+ /** Sets the `bootclasspath` attribute. Used by [[http://ant.apache.org Ant]].
*
- * @param input The value of <code>bootclasspath</code>.
+ * @param input The value of `bootclasspath`.
*/
def setBootclasspath(input: Path) =
if (bootclasspath.isEmpty) bootclasspath = Some(input)
else bootclasspath.get.append(input)
- /** Sets the <code>bootclasspath</code> as a nested sourcepath Ant
- * parameter.
+ /** Sets the `bootclasspath` as a nested `sourcepath` Ant parameter.
*
* @return A source path to be configured.
*/
def createBootclasspath(): Path = {
- if (bootclasspath.isEmpty) bootclasspath = Some(new Path(getProject()))
+ if (bootclasspath.isEmpty) bootclasspath = Some(new Path(getProject))
bootclasspath.get.createPath()
}
- /** Sets the <code>bootclasspath</code> as an external reference Ant
- * parameter.
+ /** Sets the `bootclasspath` as an external reference Ant parameter.
*
* @param input A reference to a source path.
*/
@@ -237,25 +275,25 @@ class Scaladoc extends ScalaMatchingTask {
createBootclasspath().setRefid(input)
}
- /** Sets the external extensions path attribute. Used by Ant.
+ /** Sets the external extensions path attribute. Used by [[http://ant.apache.org Ant]].
*
- * @param input The value of <code>extdirs</code>.
+ * @param input The value of `extdirs`.
*/
def setExtdirs(input: Path) {
if (extdirs.isEmpty) extdirs = Some(input)
else extdirs.get.append(input)
}
- /** Sets the <code>extdirs</code> as a nested sourcepath Ant parameter.
+ /** Sets the `extdirs` as a nested sourcepath Ant parameter.
*
* @return An extensions path to be configured.
*/
def createExtdirs(): Path = {
- if (extdirs.isEmpty) extdirs = Some(new Path(getProject()))
+ if (extdirs.isEmpty) extdirs = Some(new Path(getProject))
extdirs.get.createPath()
}
- /** Sets the <code>extdirs</code> as an external reference Ant parameter.
+ /** Sets the `extdirs` as an external reference Ant parameter.
*
* @param input A reference to an extensions path.
*/
@@ -263,15 +301,15 @@ class Scaladoc extends ScalaMatchingTask {
createExtdirs().setRefid(input)
}
- /** Sets the <code>encoding</code> attribute. Used by Ant.
+ /** Sets the `encoding` attribute. Used by Ant.
*
- * @param input The value of <code>encoding</code>.
+ * @param input The value of `encoding`.
*/
def setEncoding(input: String) {
encoding = Some(input)
}
- /** Sets the <code>docgenerator</code> attribute.
+ /** Sets the `docgenerator` attribute.
*
* @param input A fully qualified class name of a doclet.
*/
@@ -291,47 +329,47 @@ class Scaladoc extends ScalaMatchingTask {
/** Sets the `docversion` attribute.
*
- * @param input The value of <code>docversion</code>.
+ * @param input The value of `docversion`.
*/
def setDocversion(input: String) {
docversion = Some(input)
}
- /** Sets the <code>docsourceurl</code> attribute.
+ /** Sets the `docsourceurl` attribute.
*
- * @param input The value of <code>docsourceurl</code>.
+ * @param input The value of `docsourceurl`.
*/
def setDocsourceurl(input: String) {
docsourceurl = Some(input)
}
- /** Sets the <code>doctitle</code> attribute.
+ /** Sets the `doctitle` attribute.
*
- * @param input The value of <code>doctitle</code>.
+ * @param input The value of `doctitle`.
*/
def setDoctitle(input: String) {
doctitle = Some(input)
}
- /** Sets the <code>docfooter</code> attribute.
+ /** Sets the `docfooter` attribute.
*
- * @param input The value of <code>docfooter</code>.
+ * @param input The value of `docfooter`.
*/
def setDocfooter(input: String) {
docfooter = Some(input)
}
- /** Set the <code>addparams</code> info attribute.
+ /** Set the `addparams` info attribute.
*
- * @param input The value for <code>addparams</code>.
+ * @param input The value for `addparams`.
*/
def setAddparams(input: String) {
addParams = input
}
- /** Set the <code>deprecation</code> info attribute.
+ /** Set the `deprecation` info attribute.
*
- * @param input One of the flags <code>yes/no</code> or <code>on/off</code>.
+ * @param input One of the flags `yes/no` or `on/off`.
*/
def setDeprecation(input: String) {
if (Flag.isPermissible(input))
@@ -340,9 +378,9 @@ class Scaladoc extends ScalaMatchingTask {
buildError("Unknown deprecation flag '" + input + "'")
}
- /** Set the <code>unchecked</code> info attribute.
+ /** Set the `unchecked` info attribute.
*
- * @param input One of the flags <code>yes/no</code> or <code>on/off</code>.
+ * @param input One of the flags `yes/no` or `on/off`.
*/
def setUnchecked(input: String) {
if (Flag.isPermissible(input))
@@ -355,63 +393,122 @@ class Scaladoc extends ScalaMatchingTask {
docUncompilable = Some(input)
}
+ /** Set the `nofail` info attribute.
+ *
+ * @param input One of the flags `yes/no` or `on/off`. Default if no/off.
+ */
+ def setNoFail(input: String) =
+ nofail = Flag.getBooleanValue(input, "nofail")
+
+ /** Set the `implicits` info attribute.
+ * @param input One of the flags `yes/no` or `on/off`. Default if no/off. */
+ def setImplicits(input: String) =
+ docImplicits = Flag.getBooleanValue(input, "implicits")
+
+ /** Set the `implicitsShowAll` info attribute to enable scaladoc to show all implicits, including those impossible to
+ * convert to from the default scope
+ * @param input One of the flags `yes/no` or `on/off`. Default if no/off. */
+ def setImplicitsShowAll(input: String) =
+ docImplicitsShowAll = Flag.getBooleanValue(input, "implicitsShowAll")
+
+ /** Set the `implicitsDebug` info attribute so scaladoc outputs implicit conversion debug information
+ * @param input One of the flags `yes/no` or `on/off`. Default if no/off. */
+ def setImplicitsDebug(input: String) =
+ docImplicitsDebug = Flag.getBooleanValue(input, "implicitsDebug")
+
+ /** Set the `diagrams` bit so Scaladoc adds diagrams to the documentation
+ * @param input One of the flags `yes/no` or `on/off`. Default if no/off. */
+ def setDiagrams(input: String) =
+ docDiagrams = Flag.getBooleanValue(input, "diagrams")
+
+ /** Set the `diagramsDebug` bit so Scaladoc outputs diagram building debug information
+ * @param input One of the flags `yes/no` or `on/off`. Default if no/off. */
+ def setDiagramsDebug(input: String) =
+ docDiagramsDebug = Flag.getBooleanValue(input, "diagramsDebug")
+
+ /** Set the `diagramsDotPath` attribute to the path where graphviz dot can be found (including the binary file name,
+ * eg: /usr/bin/dot) */
+ def setDiagramsDotPath(input: String) =
+ docDiagramsDotPath = Some(input)
+
+ /** Set the `rawOutput` bit so Scaladoc also outputs text from each html file
+ * @param input One of the flags `yes/no` or `on/off`. Default if no/off. */
+ def setRawOutput(input: String) =
+ docRawOutput = Flag.getBooleanValue(input, "rawOutput")
+
+ /** Set the `noPrefixes` bit to prevent Scaladoc from generating prefixes in
+ * front of types -- may lead to confusion, but significantly speeds up the generation.
+ * @param input One of the flags `yes/no` or `on/off`. Default if no/off. */
+ def setNoPrefixes(input: String) =
+ docNoPrefixes = Flag.getBooleanValue(input, "noPrefixes")
+
+ /** Instruct the scaladoc tool to group similar functions together */
+ def setGroups(input: String) =
+ docGroups = Flag.getBooleanValue(input, "groups")
+
+ /** Instruct the scaladoc tool to skip certain packages.
+ * @param input A colon-delimited list of fully qualified package names that will be skipped from scaladoc.
+ */
+ def setSkipPackages(input: String) =
+ docSkipPackages = input
+
/*============================================================================*\
** Properties getters **
\*============================================================================*/
- /** Gets the value of the <code>classpath</code> attribute in a
+ /** Gets the value of the `classpath` attribute in a
* Scala-friendly form.
*
* @return The class path as a list of files.
*/
private def getClasspath: List[File] =
if (classpath.isEmpty) buildError("Member 'classpath' is empty.")
- else classpath.get.list().toList.map(nameToFile)
+ else classpath.get.list().toList map nameToFile
- /** Gets the value of the <code>origin</code> attribute in a Scala-friendly
+ /** Gets the value of the `origin` attribute in a Scala-friendly
* form.
*
* @return The origin path as a list of files.
*/
private def getOrigin: List[File] =
if (origin.isEmpty) buildError("Member 'origin' is empty.")
- else origin.get.list().toList.map(nameToFile)
+ else origin.get.list().toList map nameToFile
- /** Gets the value of the <code>destination</code> attribute in a
+ /** Gets the value of the `destination` attribute in a
* Scala-friendly form.
*
* @return The destination as a file.
*/
private def getDestination: File =
if (destination.isEmpty) buildError("Member 'destination' is empty.")
- else existing(getProject().resolveFile(destination.get.toString))
+ else existing(getProject resolveFile destination.get.toString)
- /** Gets the value of the <code>sourcepath</code> attribute in a
+ /** Gets the value of the `sourcepath` attribute in a
* Scala-friendly form.
*
* @return The source path as a list of files.
*/
private def getSourcepath: List[File] =
if (sourcepath.isEmpty) buildError("Member 'sourcepath' is empty.")
- else sourcepath.get.list().toList.map(nameToFile)
+ else sourcepath.get.list().toList map nameToFile
- /** Gets the value of the <code>bootclasspath</code> attribute in a
+ /** Gets the value of the `bootclasspath` attribute in a
* Scala-friendly form.
*
* @return The boot class path as a list of files.
*/
private def getBootclasspath: List[File] =
if (bootclasspath.isEmpty) buildError("Member 'bootclasspath' is empty.")
- else bootclasspath.get.list().toList.map(nameToFile)
+ else bootclasspath.get.list().toList map nameToFile
- /** Gets the value of the <code>extdirs</code> attribute in a
+ /** Gets the value of the `extdirs` attribute in a
* Scala-friendly form.
*
* @return The extensions path as a list of files.
*/
private def getExtdirs: List[File] =
if (extdirs.isEmpty) buildError("Member 'extdirs' is empty.")
- else extdirs.get.list().toList.map(nameToFile)
+ else extdirs.get.list().toList map nameToFile
/*============================================================================*\
** Compilation and support methods **
@@ -441,7 +538,7 @@ class Scaladoc extends ScalaMatchingTask {
* @return A file created from the name.
*/
private def nameToFile(name: String): File =
- existing(getProject().resolveFile(name))
+ existing(getProject resolveFile name)
/** Tests if a file exists and prints a warning in case it doesn't. Always
* returns the file, even if it doesn't exist.
@@ -459,7 +556,7 @@ class Scaladoc extends ScalaMatchingTask {
/** Transforms a path into a Scalac-readable string.
*
* @param path A path to convert.
- * @return A string-representation of the path like <code>a.jar:b.jar</code>.
+ * @return A string-representation of the path like `a.jar:b.jar`.
*/
private def asString(path: List[File]): String =
path.map(asString).mkString("", File.pathSeparator, "")
@@ -467,7 +564,7 @@ class Scaladoc extends ScalaMatchingTask {
/** Transforms a file into a Scalac-readable string.
*
* @param path A file to convert.
- * @return A string-representation of the file like <code>/x/k/a.scala</code>.
+ * @return A string-representation of the file like `/x/k/a.scala`.
*/
private def asString(file: File): String =
file.getAbsolutePath()
@@ -486,8 +583,8 @@ class Scaladoc extends ScalaMatchingTask {
if (destination.isEmpty) destination = Some(getOrigin.head)
val mapper = new GlobPatternMapper()
- mapper.setTo("*.html")
- mapper.setFrom("*.scala")
+ mapper setTo "*.html"
+ mapper setFrom "*.scala"
// Scans source directories to build up a compile lists.
// If force is false, only files were the .class file in destination is
@@ -542,11 +639,22 @@ class Scaladoc extends ScalaMatchingTask {
if (!doctitle.isEmpty) docSettings.doctitle.value = decodeEscapes(doctitle.get)
if (!docfooter.isEmpty) docSettings.docfooter.value = decodeEscapes(docfooter.get)
if (!docversion.isEmpty) docSettings.docversion.value = decodeEscapes(docversion.get)
- if (!docsourceurl.isEmpty) docSettings.docsourceurl.value =decodeEscapes(docsourceurl.get)
+ if (!docsourceurl.isEmpty) docSettings.docsourceurl.value = decodeEscapes(docsourceurl.get)
if (!docUncompilable.isEmpty) docSettings.docUncompilable.value = decodeEscapes(docUncompilable.get)
docSettings.deprecation.value = deprecation
docSettings.unchecked.value = unchecked
+ docSettings.docImplicits.value = docImplicits
+ docSettings.docImplicitsDebug.value = docImplicitsDebug
+ docSettings.docImplicitsShowAll.value = docImplicitsShowAll
+ docSettings.docDiagrams.value = docDiagrams
+ docSettings.docDiagramsDebug.value = docDiagramsDebug
+ docSettings.docRawOutput.value = docRawOutput
+ docSettings.docNoPrefixes.value = docNoPrefixes
+ docSettings.docGroups.value = docGroups
+ docSettings.docSkipPackages.value = docSkipPackages
+ if(!docDiagramsDotPath.isEmpty) docSettings.docDiagramsDotPath.value = docDiagramsDotPath.get
+
if (!docgenerator.isEmpty) docSettings.docgenerator.value = docgenerator.get
if (!docrootcontent.isEmpty) docSettings.docRootContent.value = docrootcontent.get.getAbsolutePath()
log("Scaladoc params = '" + addParams + "'", Project.MSG_DEBUG)
@@ -555,6 +663,8 @@ class Scaladoc extends ScalaMatchingTask {
Pair(docSettings, sourceFiles)
}
+ def safeBuildError(message: String): Unit = if (nofail) log(message) else buildError(message)
+
/** Performs the compilation. */
override def execute() = {
val Pair(docSettings, sourceFiles) = initialize
@@ -563,7 +673,7 @@ class Scaladoc extends ScalaMatchingTask {
val docProcessor = new scala.tools.nsc.doc.DocFactory(reporter, docSettings)
docProcessor.document(sourceFiles.map (_.toString))
if (reporter.ERROR.count > 0)
- buildError(
+ safeBuildError(
"Document failed with " +
reporter.ERROR.count + " error" +
(if (reporter.ERROR.count > 1) "s" else "") +
@@ -576,15 +686,10 @@ class Scaladoc extends ScalaMatchingTask {
"; see the documenter output for details.")
reporter.printSummary()
} catch {
- case exception: Throwable if exception.getMessage ne null =>
- exception.printStackTrace()
- buildError("Document failed because of an internal documenter error (" +
- exception.getMessage + "); see the error output for details.")
- case exception =>
+ case exception: Throwable =>
exception.printStackTrace()
- buildError("Document failed because of an internal documenter error " +
- "(no error message provided); see the error output for details.")
+ val msg = Option(exception.getMessage) getOrElse "no error message provided"
+ safeBuildError(s"Document failed because of an internal documenter error ($msg); see the error output for details.")
}
}
-
}
diff --git a/src/compiler/scala/tools/ant/antlib.xml b/src/compiler/scala/tools/ant/antlib.xml
index 06a355d..78159e6 100644
--- a/src/compiler/scala/tools/ant/antlib.xml
+++ b/src/compiler/scala/tools/ant/antlib.xml
@@ -1,22 +1,18 @@
<antlib>
- <!--<taskdef name="scala"
- classname="scala.tools.ant.Scala"/>-->
+ <taskdef name="classloadVerify"
+ classname="scala.tools.ant.ClassloadVerify"/>
<taskdef name="fsc"
classname="scala.tools.ant.FastScalac"/>
<taskdef name="scalac"
classname="scala.tools.ant.Scalac"/>
<taskdef name="scalascript"
classname="scala.tools.ant.ScalaTool"/>
- <taskdef name="sbaz"
- classname="scala.tools.ant.ScalaBazaar"/>
<taskdef name="scaladoc"
classname="scala.tools.ant.Scaladoc"/>
<taskdef name="scalatool"
classname="scala.tools.ant.ScalaTool"/>
<taskdef name="same"
classname="scala.tools.ant.Same"/>
- <!--<taskdef name="scalatest"
- classname="scala.tools.ant.ScalaDoc"/>-->
<taskdef name="pack200"
classname="scala.tools.ant.Pack200Task"/>
</antlib>
diff --git a/src/compiler/scala/tools/ant/sabbus/Break.scala b/src/compiler/scala/tools/ant/sabbus/Break.scala
index 91dab5b..0b6701b 100644
--- a/src/compiler/scala/tools/ant/sabbus/Break.scala
+++ b/src/compiler/scala/tools/ant/sabbus/Break.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala Ant Tasks **
-** / __/ __// _ | / / / _ | (c) 2005-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2005-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/compiler/scala/tools/ant/sabbus/CompilationFailure.scala b/src/compiler/scala/tools/ant/sabbus/CompilationFailure.scala
index 9c470f6..8032d5e 100644
--- a/src/compiler/scala/tools/ant/sabbus/CompilationFailure.scala
+++ b/src/compiler/scala/tools/ant/sabbus/CompilationFailure.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala Ant Tasks **
-** / __/ __// _ | / / / _ | (c) 2005-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2005-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/compiler/scala/tools/ant/sabbus/Compiler.scala b/src/compiler/scala/tools/ant/sabbus/Compiler.scala
index e80deae..65cd9f4 100644
--- a/src/compiler/scala/tools/ant/sabbus/Compiler.scala
+++ b/src/compiler/scala/tools/ant/sabbus/Compiler.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala Ant Tasks **
-** / __/ __// _ | / / / _ | (c) 2005-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2005-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/compiler/scala/tools/ant/sabbus/Compilers.scala b/src/compiler/scala/tools/ant/sabbus/Compilers.scala
index 4b21428..b199423 100644
--- a/src/compiler/scala/tools/ant/sabbus/Compilers.scala
+++ b/src/compiler/scala/tools/ant/sabbus/Compilers.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala Ant Tasks **
-** / __/ __// _ | / / / _ | (c) 2005-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2005-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -11,11 +11,11 @@ package scala.tools.ant.sabbus
import java.net.URL
-object Compilers extends collection.DefaultMap[String, Compiler] {
+object Compilers extends scala.collection.DefaultMap[String, Compiler] {
val debug = false
- private val container = new collection.mutable.HashMap[String, Compiler]
+ private val container = new scala.collection.mutable.HashMap[String, Compiler]
def iterator = container.iterator
@@ -24,22 +24,23 @@ object Compilers extends collection.DefaultMap[String, Compiler] {
override def size = container.size
def make(id: String, classpath: Array[URL], settings: Settings): Compiler = {
- val runtime = Runtime.getRuntime
if (debug) println("Making compiler " + id)
- if (debug) println(" memory before: " + (runtime.freeMemory/1048576.).formatted("%10.2f") + " MB")
+ if (debug) println(" memory before: " + freeMemoryString)
val comp = new Compiler(classpath, settings)
container += Pair(id, comp)
- if (debug) println(" memory after: " + (runtime.freeMemory/1048576.).formatted("%10.2f") + " MB")
+ if (debug) println(" memory after: " + freeMemoryString)
comp
}
def break(id: String): Null = {
- val runtime = Runtime.getRuntime
if (debug) println("Breaking compiler " + id)
- if (debug) println(" memory before: " + (runtime.freeMemory/1048576.).formatted("%10.2f") + " MB")
+ if (debug) println(" memory before: " + freeMemoryString)
container -= id
- System.gc
- if (debug) println(" memory after: " + (runtime.freeMemory/1048576.).formatted("%10.2f") + " MB")
+ System.gc()
+ if (debug) println(" memory after: " + freeMemoryString)
null
}
+
+ private def freeMemoryString: String =
+ (Runtime.getRuntime.freeMemory/1048576.0).formatted("%10.2f") + " MB"
}
diff --git a/src/compiler/scala/tools/ant/sabbus/ForeignCompiler.scala b/src/compiler/scala/tools/ant/sabbus/ForeignCompiler.scala
index 475dd79..13b6f10 100644
--- a/src/compiler/scala/tools/ant/sabbus/ForeignCompiler.scala
+++ b/src/compiler/scala/tools/ant/sabbus/ForeignCompiler.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala Ant Tasks **
-** / __/ __// _ | / / / _ | (c) 2005-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2005-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/compiler/scala/tools/ant/sabbus/Make.scala b/src/compiler/scala/tools/ant/sabbus/Make.scala
index 75cd010..5274594 100644
--- a/src/compiler/scala/tools/ant/sabbus/Make.scala
+++ b/src/compiler/scala/tools/ant/sabbus/Make.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala Ant Tasks **
-** / __/ __// _ | / / / _ | (c) 2005-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2005-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -21,7 +21,7 @@ class Make extends Task with TaskArgs {
if (!compTarget.isEmpty) settings.target = compTarget.get
if (!compilationPath.isEmpty) settings.classpath = compilationPath.get
if (!sourcePath.isEmpty) settings.sourcepath = sourcePath.get
- if (!params.isEmpty) settings.more = params.get
+ settings.extraParams = extraArgsFlat
Compilers.make(id.get, (compilerPath.get.list.map{ path => new File(path).toURI.toURL }), settings)
}
}
diff --git a/src/compiler/scala/tools/ant/sabbus/ScalacFork.scala b/src/compiler/scala/tools/ant/sabbus/ScalacFork.scala
index 1b66d50..9cdf484 100644
--- a/src/compiler/scala/tools/ant/sabbus/ScalacFork.scala
+++ b/src/compiler/scala/tools/ant/sabbus/ScalacFork.scala
@@ -1,43 +1,73 @@
/* __ *\
** ________ ___ / / ___ Scala Ant Tasks **
-** / __/ __// _ | / / / _ | (c) 2005-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2005-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-
package scala.tools.ant
package sabbus
-import java.io.File
-import java.io.FileWriter
+import java.io.{ File, FileWriter }
import org.apache.tools.ant.Project
import org.apache.tools.ant.taskdefs.Java
import org.apache.tools.ant.util.{ GlobPatternMapper, SourceFileScanner }
+import org.apache.tools.ant.BuildException
import scala.tools.nsc.io
import scala.tools.nsc.util.ScalaClassLoader
+/** An Ant task to compile with the new Scala compiler (NSC).
+ *
+ * This task can take the following parameters as attributes:
+ * - `srcdir` (mandatory),
+ * - `failonerror`,
+ * - `timeout`,
+ * - `jvmargs`,
+ * - `argfile`,
+ * - `params`.
+ *
+ * It also takes the following parameters as nested elements:
+ * - `src` (for `srcdir`),
+ * - `classpath`,
+ * - `sourcepath`,
+ * - `bootclasspath`,
+ * - `extdirs`,
+ * - `compilerarg`.
+ *
+ * @author Gilles Dubochet
+ */
class ScalacFork extends ScalaMatchingTask with ScalacShared with TaskArgs {
+
private def originOfThis: String =
ScalaClassLoader.originOfClass(classOf[ScalacFork]) map (_.toString) getOrElse "<unknown>"
+ /** Sets the `srcdir` attribute. Used by [[http://ant.apache.org Ant]].
+ * @param input The value of `sourceDir`. */
def setSrcdir(input: File) {
sourceDir = Some(input)
}
+ /** Sets the `failonerror` attribute. Used by [[http://ant.apache.org Ant]].
+ * @param input The value of `failOnError`. */
def setFailOnError(input: Boolean) {
failOnError = input
}
+ /** Sets the `timeout` attribute. Used by [[http://ant.apache.org Ant]].
+ * @param input The value of `timeout`. */
def setTimeout(input: Long) {
timeout = Some(input)
}
+ /** Sets the `jvmargs` attribute. Used by [[http://ant.apache.org Ant]].
+ * @param input The value of `jvmArgs`. */
def setJvmArgs(input: String) {
jvmArgs = Some(input)
}
+ /** Sets the `argfile` attribute. Used by [[http://ant.apache.org Ant]].
+ * @param input The value of `argfile`. */
def setArgfile(input: File) {
argfile = Some(input)
}
@@ -72,7 +102,7 @@ class ScalacFork extends ScalaMatchingTask with ScalacShared with TaskArgs {
compTarget foreach (settings.target = _)
compilationPath foreach (settings.classpath = _)
sourcePath foreach (settings.sourcepath = _)
- params foreach (settings.more = _)
+ settings.extraParams = extraArgsFlat
if (isMSIL)
settings.sourcedir = sourceDir
@@ -105,16 +135,23 @@ class ScalacFork extends ScalaMatchingTask with ScalacShared with TaskArgs {
java setClasspath compilerPath
java setClassname MainClass
+ // Encode scalac/javac args for use in a file to be read back via "@file.txt"
+ def encodeScalacArgsFile(t: Traversable[String]) = t map { s =>
+ if(s.find(c => c <= ' ' || "\"'\\".contains(c)).isDefined)
+ "\"" + s.flatMap(c => (if(c == '"' || c == '\\') "\\" else "") + c ) + "\""
+ else s
+ } mkString "\n"
+
// dump the arguments to a file and do "java @file"
val tempArgFile = io.File.makeTemp("scalacfork")
val tokens = settings.toArgs ++ (includedFiles map (_.getPath))
- tempArgFile writeAll (tokens mkString " ")
+ tempArgFile writeAll encodeScalacArgsFile(tokens)
val paths = List(Some(tempArgFile.toAbsolute.path), argfile).flatten map (_.toString)
val res = execWithArgFiles(java, paths)
if (failOnError && res != 0)
- sys.error("Compilation failed because of an internal compiler error;"+
+ throw new BuildException("Compilation failed because of an internal compiler error;"+
" see the error output for details.")
}
}
diff --git a/src/compiler/scala/tools/ant/sabbus/Settings.scala b/src/compiler/scala/tools/ant/sabbus/Settings.scala
index 543dcef..fde61e9 100644
--- a/src/compiler/scala/tools/ant/sabbus/Settings.scala
+++ b/src/compiler/scala/tools/ant/sabbus/Settings.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala Ant Tasks **
-** / __/ __// _ | / / / _ | (c) 2005-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2005-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-
package scala.tools.ant.sabbus
import java.io.File
@@ -57,11 +56,11 @@ class Settings {
private var optimiseBf: Boolean = false
def optimise = optimiseBf
- def optimise_=(b: Boolean): Unit = { optimiseBf = b }
+ def optimise_=(b: Boolean) { optimiseBf = b }
- private var moreBf: Option[String] = None
- def more = moreBf.get
- def more_=(s: String): this.type = { moreBf = Some(s); this }
+ private var extraParamsBf: Seq[String] = Seq()
+ def extraParams = extraParamsBf
+ def extraParams_=(s: Seq[String]): this.type = { extraParamsBf = s; this }
def toArgs: List[String] =
(if (!gBf.isEmpty) "-g:"+g :: Nil else Nil) :::
@@ -75,7 +74,7 @@ class Settings {
(if (!encodingBf.isEmpty) "-encoding" :: encoding :: Nil else Nil) :::
(if (!targetBf.isEmpty) "-target:"+target :: Nil else Nil) :::
(if (optimiseBf) "-optimise" :: Nil else Nil) :::
- (if (!moreBf.isEmpty) (more split ' ').toList else Nil)
+ extraParamsBf.toList
override def equals(that: Any): Boolean = that match {
case cs: Settings =>
@@ -90,7 +89,7 @@ class Settings {
this.encodingBf == cs.encodingBf &&
this.targetBf == cs.targetBf &&
this.optimiseBf == cs.optimiseBf &&
- this.moreBf == cs.moreBf
+ this.extraParamsBf == cs.extraParamsBf
case _ => false
}
diff --git a/src/compiler/scala/tools/ant/sabbus/TaskArgs.scala b/src/compiler/scala/tools/ant/sabbus/TaskArgs.scala
index b248735..6bb1aaa 100644
--- a/src/compiler/scala/tools/ant/sabbus/TaskArgs.scala
+++ b/src/compiler/scala/tools/ant/sabbus/TaskArgs.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala Ant Tasks **
-** / __/ __// _ | / / / _ | (c) 2005-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2005-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -12,6 +12,7 @@ package scala.tools.ant.sabbus
import java.io.File
import org.apache.tools.ant.Task
import org.apache.tools.ant.types.{Path, Reference}
+import org.apache.tools.ant.types.Commandline.Argument
trait CompilationPathProperty {
this: Task =>
@@ -41,10 +42,13 @@ trait TaskArgs extends CompilationPathProperty {
}
def setParams(input: String) {
- params = params match {
- case None => Some(input)
- case Some(ps) => Some(ps + " " + input)
- }
+ extraArgs ++= input.split(' ').map { s => val a = new Argument; a.setValue(s); a }
+ }
+
+ def createCompilerArg(): Argument = {
+ val a = new Argument
+ extraArgs :+= a
+ a
}
def setTarget(input: String) {
@@ -84,11 +88,16 @@ trait TaskArgs extends CompilationPathProperty {
}
protected var id: Option[String] = None
- protected var params: Option[String] = None
+ protected var extraArgs: Seq[Argument] = Seq()
protected var compTarget: Option[String] = None
protected var sourcePath: Option[Path] = None
protected var compilerPath: Option[Path] = None
protected var destinationDir: Option[File] = None
+ def extraArgsFlat: Seq[String] = extraArgs flatMap { a =>
+ val parts = a.getParts
+ if(parts eq null) Seq[String]() else parts.toSeq
+ }
+
def isMSIL = compTarget exists (_ == "msil")
}
diff --git a/src/compiler/scala/tools/ant/sabbus/Use.scala b/src/compiler/scala/tools/ant/sabbus/Use.scala
index ade2e3e..2c97232 100644
--- a/src/compiler/scala/tools/ant/sabbus/Use.scala
+++ b/src/compiler/scala/tools/ant/sabbus/Use.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala Ant Tasks **
-** / __/ __// _ | / / / _ | (c) 2005-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2005-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/compiler/scala/tools/ant/templates/tool-unix.tmpl b/src/compiler/scala/tools/ant/templates/tool-unix.tmpl
index 09ec8f0..f1c6c52 100644
--- a/src/compiler/scala/tools/ant/templates/tool-unix.tmpl
+++ b/src/compiler/scala/tools/ant/templates/tool-unix.tmpl
@@ -1,13 +1,28 @@
-#!/bin/bash --posix
+#!/usr/bin/env bash
#
##############################################################################
-# Copyright 2002-2011, LAMP/EPFL
+# Copyright 2002-2013 LAMP/EPFL
#
# This is free software; see the distribution for copying conditions.
# There is NO warranty; not even for MERCHANTABILITY or FITNESS FOR A
# PARTICULAR PURPOSE.
##############################################################################
+findScalaHome () {
+ # see SI-2092 and SI-5792
+ local source="${BASH_SOURCE[0]}"
+ while [ -h "$source" ] ; do
+ local linked="$(readlink "$source")"
+ local dir="$( cd -P $(dirname "$source") && cd -P $(dirname "$linked") && pwd )"
+ source="$dir/$(basename "$linked")"
+ done
+ ( cd -P "$(dirname "$source")/.." && pwd )
+}
+execCommand () {
+ [[ -n $SCALA_RUNNER_DEBUG ]] && echo "" && for arg in "$@@"; do echo "$arg"; done && echo "";
+ "$@@"
+}
+
# Not sure what the right default is here: trying nonzero.
scala_exit_status=127
saved_stty=""
@@ -15,7 +30,8 @@ saved_stty=""
# restore stty settings (echo in particular)
function restoreSttySettings() {
if [[ -n $SCALA_RUNNER_DEBUG ]]; then
- echo "restoring stty: $saved_stty"
+ echo "restoring stty:"
+ echo "$saved_stty"
fi
stty $saved_stty
@@ -23,9 +39,7 @@ function restoreSttySettings() {
}
function onExit() {
- if [[ "$saved_stty" != "" ]]; then
- restoreSttySettings
- fi
+ [[ "$saved_stty" != "" ]] && restoreSttySettings
exit $scala_exit_status
}
@@ -39,69 +53,71 @@ if [[ ! $? ]]; then
saved_stty=""
fi
if [[ -n $SCALA_RUNNER_DEBUG ]]; then
- echo "saved stty: $saved_stty"
+ echo "saved stty:"
+ echo "$saved_stty"
+fi
+
+unset cygwin
+if uname | grep -q ^CYGWIN; then
+ cygwin="$(uname)"
fi
-cygwin=false;
-case "`uname`" in
- CYGWIN*) cygwin=true ;;
-esac
+unset mingw
+if uname | grep -q ^MINGW; then
+ mingw="$(uname)"
+fi
# Finding the root folder for this Scala distribution
-SOURCE=$0;
-SCRIPT=`basename "$SOURCE"`;
-while [ -h "$SOURCE" ]; do
- SCRIPT=`basename "$SOURCE"`;
- LOOKUP=`ls -ld "$SOURCE"`;
- TARGET=`expr "$LOOKUP" : '.*-> \(.*\)$'`;
- if expr "${TARGET:-.}/" : '/.*/$' > /dev/null; then
- SOURCE=${TARGET:-.};
- else
- SOURCE=`dirname "$SOURCE"`/${TARGET:-.};
- fi;
-done;
+SCALA_HOME="$(findScalaHome)"
+SEP=":"
-# see #2092
-SCALA_HOME=`dirname "$SOURCE"`
-SCALA_HOME=`cd "$SCALA_HOME"; pwd -P`
-SCALA_HOME=`cd "$SCALA_HOME"/..; pwd`
+# Possible additional command line options
+WINDOWS_OPT=""
+EMACS_OPT=""
+[[ -n "$EMACS" ]] && EMACS_OPT="-Denv.emacs=$EMACS"
# Remove spaces from SCALA_HOME on windows
-if $cygwin; then
- SCALA_HOME=`cygpath --windows --short-name "$SCALA_HOME"`
- SCALA_HOME=`cygpath --unix "$SCALA_HOME"`
+if [[ -n "$cygwin" ]]; then
+ SCALA_HOME="$(shome="$(cygpath --windows --short-name "$SCALA_HOME")" ; cygpath --unix "$shome")"
+# elif uname |grep -q ^MINGW; then
+# SEP=";"
fi
# Constructing the extension classpath
TOOL_CLASSPATH="@classpath@"
-if [ -z "$TOOL_CLASSPATH" ] ; then
+if [[ -z "$TOOL_CLASSPATH" ]]; then
for ext in "$SCALA_HOME"/lib/* ; do
- if [ -z "$TOOL_CLASSPATH" ] ; then
+ if [[ -z "$TOOL_CLASSPATH" ]]; then
TOOL_CLASSPATH="$ext"
else
- TOOL_CLASSPATH="$TOOL_CLASSPATH:$ext"
+ TOOL_CLASSPATH="${TOOL_CLASSPATH}${SEP}${ext}"
fi
done
fi
-CYGWIN_JLINE_TERMINAL=
-if $cygwin; then
- if [ "$OS" = "Windows_NT" ] && cygpath -m .>/dev/null 2>/dev/null ; then
+if [[ -n "$cygwin" ]]; then
+ if [[ "$OS" = "Windows_NT" ]] && cygpath -m .>/dev/null 2>/dev/null ; then
format=mixed
else
format=windows
fi
- SCALA_HOME=`cygpath --$format "$SCALA_HOME"`
- TOOL_CLASSPATH=`cygpath --path --$format "$TOOL_CLASSPATH"`
+ SCALA_HOME="$(cygpath --$format "$SCALA_HOME")"
+ TOOL_CLASSPATH="$(cygpath --path --$format "$TOOL_CLASSPATH")"
+elif [[ -n "$mingw" ]]; then
+ SCALA_HOME="$(cmd //c echo "$SCALA_HOME")"
+ TOOL_CLASSPATH="$(cmd //c echo "$TOOL_CLASSPATH")"
+fi
+
+if [[ -n "$cygwin$mingw" ]]; then
case "$TERM" in
rxvt* | xterm*)
stty -icanon min 1 -echo
- CYGWIN_JLINE_TERMINAL="-Djline.terminal=scala.tools.jline.UnixTerminal"
+ WINDOWS_OPT="-Djline.terminal=scala.tools.jline.UnixTerminal"
;;
esac
fi
-[ -n "$JAVA_OPTS" ] || JAVA_OPTS="@javaflags@"
+[[ -n "$JAVA_OPTS" ]] || JAVA_OPTS="@javaflags@"
# break out -D and -J options and add them to JAVA_OPTS as well
# so they reach the underlying JVM in time to do some good. The
@@ -109,10 +125,24 @@ fi
declare -a java_args
declare -a scala_args
-# default to the boot classpath for speed.
-CPSELECT="-Xbootclasspath/a:"
+# default to the boot classpath for speed, except on cygwin/mingw because
+# JLine on Windows requires a custom DLL to be loaded.
+unset usebootcp
+if [[ -z "$cygwin$mingw" ]]; then
+ usebootcp="true"
+fi
-while [ $# -gt 0 ]; do
+# If using the boot classpath, also pass an empty classpath
+# to java to suppress "." from materializing.
+classpathArgs () {
+ if [[ -n $usebootcp ]]; then
+ echo "-Xbootclasspath/a:$TOOL_CLASSPATH -classpath \"\""
+ else
+ echo "-classpath $TOOL_CLASSPATH"
+ fi
+}
+
+while [[ $# -gt 0 ]]; do
case "$1" in
-D*)
# pass to scala as well: otherwise we lose it sometimes when we
@@ -129,11 +159,19 @@ while [ $# -gt 0 ]; do
shift
;;
-toolcp)
- TOOL_CLASSPATH="$TOOL_CLASSPATH:$2"
+ TOOL_CLASSPATH="${TOOL_CLASSPATH}${SEP}${2}"
shift 2
;;
-nobootcp)
- CPSELECT="-classpath "
+ unset usebootcp
+ shift
+ ;;
+ -usebootcp)
+ usebootcp="true"
+ shift
+ ;;
+ -debug)
+ SCALA_RUNNER_DEBUG=1
shift
;;
*)
@@ -146,18 +184,22 @@ done
# reset "$@@" to the remaining args
set -- "${scala_args[@@]}"
-if [ -z "$JAVACMD" -a -n "$JAVA_HOME" -a -x "$JAVA_HOME/bin/java" ]; then
+if [[ -z "$JAVACMD" && -n "$JAVA_HOME" && -x "$JAVA_HOME/bin/java" ]]; then
JAVACMD="$JAVA_HOME/bin/java"
fi
-"${JAVACMD:=java}" \
+# note that variables which may intentionally be empty must not
+# be quoted: otherwise an empty string will appear as a command line
+# argument, and java will think that is the program to run.
+execCommand \
+ "${JAVACMD:=java}" \
$JAVA_OPTS \
"${java_args[@@]}" \
- ${CPSELECT}${TOOL_CLASSPATH} \
- -Dscala.usejavacp=true \
+ $(classpathArgs) \
-Dscala.home="$SCALA_HOME" \
- -Denv.emacs="$EMACS" \
- $CYGWIN_JLINE_TERMINAL \
+ -Dscala.usejavacp=true \
+ $EMACS_OPT \
+ $WINDOWS_OPT \
@properties@ @class@ @toolflags@ "$@@"
# record the exit status lest it be overwritten:
diff --git a/src/compiler/scala/tools/ant/templates/tool-windows.tmpl b/src/compiler/scala/tools/ant/templates/tool-windows.tmpl
index e88a973..8441f3a 100644
--- a/src/compiler/scala/tools/ant/templates/tool-windows.tmpl
+++ b/src/compiler/scala/tools/ant/templates/tool-windows.tmpl
@@ -1,67 +1,159 @@
-@@echo off
-
-rem ##########################################################################
-rem # Copyright 2002-2011, LAMP/EPFL
-rem #
-rem # This is free software; see the distribution for copying conditions.
-rem # There is NO warranty; not even for MERCHANTABILITY or FITNESS FOR A
-rem # PARTICULAR PURPOSE.
-rem ##########################################################################
-
-if "%OS%" NEQ "Windows_NT" (
- echo "Sorry, your version of Windows is too old to run Scala."
- goto :eof
-)
-
-@@setlocal
-call :set_home
-
-rem We use the value of the JAVACMD environment variable if defined
-set _JAVACMD=%JAVACMD%
-
-if "%_JAVACMD%"=="" (
- if not "%JAVA_HOME%"=="" (
- if exist "%JAVA_HOME%\bin\java.exe" set "_JAVACMD=%JAVA_HOME%\bin\java.exe"
- )
-)
-
-if "%_JAVACMD%"=="" set _JAVACMD=java
-
-rem We use the value of the JAVA_OPTS environment variable if defined
-set _JAVA_OPTS=%JAVA_OPTS%
-if "%_JAVA_OPTS%"=="" set _JAVA_OPTS=@javaflags@
-
-set _TOOL_CLASSPATH=@classpath@
-if "%_TOOL_CLASSPATH%"=="" (
- for %%f in ("%_SCALA_HOME%\lib\*") do call :add_cpath "%%f"
- for /d %%f in ("%_SCALA_HOME%\lib\*") do call :add_cpath "%%f"
-)
-
-set _PROPS=-Dscala.home="%_SCALA_HOME%" -Denv.emacs="%EMACS%" -Dscala.usejavacp=true @properties@
-
-rem echo "%_JAVACMD%" %_JAVA_OPTS% %_PROPS% -cp "%_TOOL_CLASSPATH%" @class@ @toolflags@ %*
-"%_JAVACMD%" %_JAVA_OPTS% %_PROPS% -cp "%_TOOL_CLASSPATH%" @class@ @toolflags@ %*
-goto end
-
-rem ##########################################################################
-rem # subroutines
-
-:add_cpath
- if "%_TOOL_CLASSPATH%"=="" (
- set _TOOL_CLASSPATH=%~1
- ) else (
- set _TOOL_CLASSPATH=%_TOOL_CLASSPATH%;%~1
- )
-goto :eof
-
-rem Variable "%~dps0" works on WinXP SP2 or newer
-rem (see http://support.microsoft.com/?kbid=833431)
-rem set _SCALA_HOME=%~dps0..
-:set_home
- set _BIN_DIR=
- for %%i in (%~sf0) do set _BIN_DIR=%_BIN_DIR%%%~dpsi
- set _SCALA_HOME=%_BIN_DIR%..
-goto :eof
-
-:end
-@@endlocal
+@@echo off
+
+rem ##########################################################################
+rem # Copyright 2002-2013 LAMP/EPFL
+rem #
+rem # This is free software; see the distribution for copying conditions.
+rem # There is NO warranty; not even for MERCHANTABILITY or FITNESS FOR A
+rem # PARTICULAR PURPOSE.
+rem ##########################################################################
+
+setlocal enableextensions enabledelayedexpansion
+
+set _LINE_TOOLCP=
+
+rem Use "%~1" to handle spaces in paths. See http://ss64.com/nt/syntax-args.html
+rem SI-7295 The goto here is needed to avoid problems with `scala Script.cmd "arg(with)paren"`,
+rem we must not evaluate %~2 eagerly, but delayed expansion doesn't seem to allow
+rem removal of quotation marks.
+if not [%~1]==[-toolcp] (
+ goto :notoolcp
+)
+shift
+set _LINE_TOOLCP=%~1
+shift
+
+:notoolcp
+
+rem We keep in _JAVA_PARAMS all -J-prefixed and -D-prefixed arguments
+set _JAVA_PARAMS=
+
+if [%1]==[] goto param_afterloop
+set _TEST_PARAM=%~1
+if not "%_TEST_PARAM:~0,1%"=="-" goto param_afterloop
+
+rem ignore -e "scala code"
+if "%_TEST_PARAM:~0,2%"=="-e" (
+ shift
+ shift
+ if [%1]==[] goto param_afterloop
+)
+
+set _TEST_PARAM=%~1
+if "%_TEST_PARAM:~0,2%"=="-J" (
+ set _JAVA_PARAMS=%_TEST_PARAM:~2%
+)
+
+if "%_TEST_PARAM:~0,2%"=="-D" (
+ rem test if this was double-quoted property "-Dprop=42"
+ for /F "delims== tokens=1-2" %%G in ("%_TEST_PARAM%") DO (
+ if not "%%G" == "%_TEST_PARAM%" (
+ rem double quoted: "-Dprop=42" -> -Dprop="42"
+ set _JAVA_PARAMS=%%G="%%H"
+ ) else if [%2] neq [] (
+ rem it was a normal property: -Dprop=42 or -Drop="42"
+ set _JAVA_PARAMS=%_TEST_PARAM%=%2
+ shift
+ )
+ )
+)
+
+:param_loop
+shift
+
+if [%1]==[] goto param_afterloop
+set _TEST_PARAM=%~1
+if not "%_TEST_PARAM:~0,1%"=="-" goto param_afterloop
+
+rem ignore -e "scala code"
+if "%_TEST_PARAM:~0,2%"=="-e" (
+ shift
+ shift
+ if [%1]==[] goto param_afterloop
+)
+
+set _TEST_PARAM=%~1
+if "%_TEST_PARAM:~0,2%"=="-J" (
+ set _JAVA_PARAMS=%_JAVA_PARAMS% %_TEST_PARAM:~2%
+)
+
+if "%_TEST_PARAM:~0,2%"=="-D" (
+ rem test if this was double-quoted property "-Dprop=42"
+ for /F "delims== tokens=1-2" %%G in ("%_TEST_PARAM%") DO (
+ if not "%%G" == "%_TEST_PARAM%" (
+ rem double quoted: "-Dprop=42" -> -Dprop="42"
+ set _JAVA_PARAMS=%_JAVA_PARAMS% %%G="%%H"
+ ) else if [%2] neq [] (
+ rem it was a normal property: -Dprop=42 or -Drop="42"
+ set _JAVA_PARAMS=%_JAVA_PARAMS% %_TEST_PARAM%=%2
+ shift
+ )
+ )
+)
+goto param_loop
+:param_afterloop
+
+if "%OS%" NEQ "Windows_NT" (
+ echo "Warning, your version of Windows is not supported. Attempting to start scala anyway."
+)
+
+@@setlocal
+call :set_home
+
+rem We use the value of the JAVACMD environment variable if defined
+set _JAVACMD=%JAVACMD%
+
+if not defined _JAVACMD (
+ if not "%JAVA_HOME%"=="" (
+ if exist "%JAVA_HOME%\bin\java.exe" set "_JAVACMD=%JAVA_HOME%\bin\java.exe"
+ )
+)
+
+if "%_JAVACMD%"=="" set _JAVACMD=java
+
+rem We use the value of the JAVA_OPTS environment variable if defined
+set _JAVA_OPTS=%JAVA_OPTS%
+if not defined _JAVA_OPTS set _JAVA_OPTS=@javaflags@
+
+rem We append _JAVA_PARAMS java arguments to JAVA_OPTS if necessary
+if defined _JAVA_PARAMS set _JAVA_OPTS=%_JAVA_OPTS% %_JAVA_PARAMS%
+
+set _TOOL_CLASSPATH=@classpath@
+if "%_TOOL_CLASSPATH%"=="" (
+ for %%f in ("!_SCALA_HOME!\lib\*") do call :add_cpath "%%f"
+ for /d %%f in ("!_SCALA_HOME!\lib\*") do call :add_cpath "%%f"
+)
+
+if not "%_LINE_TOOLCP%"=="" call :add_cpath "%_LINE_TOOLCP%"
+
+set _PROPS=-Dscala.home="!_SCALA_HOME!" -Denv.emacs="%EMACS%" -Dscala.usejavacp=true @properties@
+
+rem echo "%_JAVACMD%" %_JAVA_OPTS% %_PROPS% -cp "%_TOOL_CLASSPATH%" @class@ @toolflags@ %*
+"%_JAVACMD%" %_JAVA_OPTS% %_PROPS% -cp "%_TOOL_CLASSPATH%" @class@ @toolflags@ %*
+goto end
+
+rem ##########################################################################
+rem # subroutines
+
+:add_cpath
+ if "%_TOOL_CLASSPATH%"=="" (
+ set _TOOL_CLASSPATH=%~1
+ ) else (
+ set _TOOL_CLASSPATH=%_TOOL_CLASSPATH%;%~1
+ )
+goto :eof
+
+rem Variable "%~dps0" works on WinXP SP2 or newer
+rem (see http://support.microsoft.com/?kbid=833431)
+rem set _SCALA_HOME=%~dps0..
+:set_home
+ set _BIN_DIR=
+ for %%i in (%~sf0) do set _BIN_DIR=%_BIN_DIR%%%~dpsi
+ set _SCALA_HOME=%_BIN_DIR%..
+goto :eof
+
+:end
+@@endlocal
+
+REM exit code fix, see http://stackoverflow.com/questions/4632891/exiting-batch-with-exit-b-x-where-x-1-acts-as-if-command-completed-successfu
+@@"%COMSPEC%" /C exit %errorlevel% >nul
diff --git a/src/compiler/scala/tools/cmd/CommandLine.scala b/src/compiler/scala/tools/cmd/CommandLine.scala
index ced3a97..75f96d3 100644
--- a/src/compiler/scala/tools/cmd/CommandLine.scala
+++ b/src/compiler/scala/tools/cmd/CommandLine.scala
@@ -1,5 +1,5 @@
/* NEST (New Scala Test)
- * Copyright 2007-2011 LAMP/EPFL
+ * Copyright 2007-2013 LAMP/EPFL
* @author Paul Phillips
*/
diff --git a/src/compiler/scala/tools/cmd/Demo.scala b/src/compiler/scala/tools/cmd/Demo.scala
index 61d63eb..af81884 100644
--- a/src/compiler/scala/tools/cmd/Demo.scala
+++ b/src/compiler/scala/tools/cmd/Demo.scala
@@ -1,5 +1,5 @@
/* NEST (New Scala Test)
- * Copyright 2007-2011 LAMP/EPFL
+ * Copyright 2007-2013 LAMP/EPFL
* @author Paul Phillips
*/
diff --git a/src/compiler/scala/tools/cmd/FromString.scala b/src/compiler/scala/tools/cmd/FromString.scala
index 3792c26..cba2e99 100644
--- a/src/compiler/scala/tools/cmd/FromString.scala
+++ b/src/compiler/scala/tools/cmd/FromString.scala
@@ -1,5 +1,5 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
+ * Copyright 2005-2013 LAMP/EPFL
* @author Paul Phillips
*/
@@ -7,19 +7,20 @@ package scala.tools
package cmd
import nsc.io.{ Path, File, Directory }
-import scala.reflect.OptManifest
+import scala.reflect.runtime.{universe => ru}
+import scala.tools.reflect.StdRuntimeTags._
/** A general mechanism for defining how a command line argument
* (always a String) is transformed into an arbitrary type. A few
* example instances are in the companion object, but in general
* either IntFromString will suffice or you'll want custom transformers.
*/
-abstract class FromString[+T](implicit m: OptManifest[T]) extends PartialFunction[String, T] {
+abstract class FromString[+T](implicit t: ru.TypeTag[T]) extends PartialFunction[String, T] {
def apply(s: String): T
def isDefinedAt(s: String): Boolean = true
def zero: T = apply("")
- def targetString: String = m.toString
+ def targetString: String = t.toString
}
object FromString {
@@ -29,20 +30,20 @@ object FromString {
/** Path related stringifiers.
*/
- val ExistingFile: FromString[File] = new FromString[File] {
+ val ExistingFile: FromString[File] = new FromString[File]()(tagOfFile) {
override def isDefinedAt(s: String) = toFile(s).isFile
def apply(s: String): File =
if (isDefinedAt(s)) toFile(s)
else cmd.runAndExit(println("'%s' is not an existing file." format s))
}
- val ExistingDir: FromString[Directory] = new FromString[Directory] {
+ val ExistingDir: FromString[Directory] = new FromString[Directory]()(tagOfDirectory) {
override def isDefinedAt(s: String) = toDir(s).isDirectory
def apply(s: String): Directory =
if (isDefinedAt(s)) toDir(s)
else cmd.runAndExit(println("'%s' is not an existing directory." format s))
}
- def ExistingDirRelativeTo(root: Directory) = new FromString[Directory] {
- private def resolve(s: String) = toDir(s) toAbsoluteWithRoot root toDirectory
+ def ExistingDirRelativeTo(root: Directory) = new FromString[Directory]()(tagOfDirectory) {
+ private def resolve(s: String) = (toDir(s) toAbsoluteWithRoot root).toDirectory
override def isDefinedAt(s: String) = resolve(s).isDirectory
def apply(s: String): Directory =
if (isDefinedAt(s)) resolve(s)
@@ -52,19 +53,19 @@ object FromString {
/** Argument expander, i.e. turns single argument "foo bar baz" into argument
* list "foo", "bar", "baz".
*/
- val ArgumentsFromString: FromString[List[String]] = new FromString[List[String]] {
+ val ArgumentsFromString: FromString[List[String]] = new FromString[List[String]]()(tagOfListOfString) {
def apply(s: String) = toArgs(s)
}
/** Identity.
*/
- implicit val StringFromString: FromString[String] = new FromString[String] {
+ implicit val StringFromString: FromString[String] = new FromString[String]()(tagOfString) {
def apply(s: String): String = s
}
/** Implicit as the most likely to be useful as-is.
*/
- implicit val IntFromString: FromString[Int] = new FromString[Int] {
+ implicit val IntFromString: FromString[Int] = new FromString[Int]()(tagOfInt) {
override def isDefinedAt(s: String) = safeToInt(s).isDefined
def apply(s: String) = safeToInt(s).get
def safeToInt(s: String): Option[Int] = try Some(java.lang.Integer.parseInt(s)) catch { case _: NumberFormatException => None }
diff --git a/src/compiler/scala/tools/cmd/Instance.scala b/src/compiler/scala/tools/cmd/Instance.scala
index c22294a..0e64e1e 100644
--- a/src/compiler/scala/tools/cmd/Instance.scala
+++ b/src/compiler/scala/tools/cmd/Instance.scala
@@ -1,5 +1,5 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
+ * Copyright 2005-2013 LAMP/EPFL
* @author Paul Phillips
*/
diff --git a/src/compiler/scala/tools/cmd/Interpolation.scala b/src/compiler/scala/tools/cmd/Interpolation.scala
index f08ebe0..abffd6b 100644
--- a/src/compiler/scala/tools/cmd/Interpolation.scala
+++ b/src/compiler/scala/tools/cmd/Interpolation.scala
@@ -1,5 +1,5 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
+ * Copyright 2005-2013 LAMP/EPFL
* @author Paul Phillips
*/
diff --git a/src/compiler/scala/tools/cmd/Meta.scala b/src/compiler/scala/tools/cmd/Meta.scala
index d06586e..d019ebd 100644
--- a/src/compiler/scala/tools/cmd/Meta.scala
+++ b/src/compiler/scala/tools/cmd/Meta.scala
@@ -1,5 +1,5 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
+ * Copyright 2005-2013 LAMP/EPFL
* @author Paul Phillips
*/
diff --git a/src/compiler/scala/tools/cmd/Opt.scala b/src/compiler/scala/tools/cmd/Opt.scala
index 3c49413..2c19312 100644
--- a/src/compiler/scala/tools/cmd/Opt.scala
+++ b/src/compiler/scala/tools/cmd/Opt.scala
@@ -1,5 +1,5 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
+ * Copyright 2005-2013 LAMP/EPFL
* @author Paul Phillips
*/
diff --git a/src/compiler/scala/tools/cmd/Parser.scala b/src/compiler/scala/tools/cmd/Parser.scala
index 46204c1..6e2afa4 100644
--- a/src/compiler/scala/tools/cmd/Parser.scala
+++ b/src/compiler/scala/tools/cmd/Parser.scala
@@ -1,5 +1,5 @@
/* NEST (New Scala Test)
- * Copyright 2007-2011 LAMP/EPFL
+ * Copyright 2007-2013 LAMP/EPFL
* @author Paul Phillips
*/
diff --git a/src/compiler/scala/tools/cmd/Property.scala b/src/compiler/scala/tools/cmd/Property.scala
index 8df8c66..b1d951a 100644
--- a/src/compiler/scala/tools/cmd/Property.scala
+++ b/src/compiler/scala/tools/cmd/Property.scala
@@ -1,5 +1,5 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
+ * Copyright 2005-2013 LAMP/EPFL
* @author Paul Phillips
*/
@@ -64,7 +64,7 @@ trait Property extends Reference {
propertiesToOptions(loadProperties(file))
def propertiesToOptions(props: java.util.Properties): List[String] = {
- import collection.JavaConversions._
+ import scala.collection.JavaConversions._
propertiesToOptions(props.toList)
}
def propertiesToOptions(props: List[(String, String)]) = props flatMap propMapper
diff --git a/src/compiler/scala/tools/cmd/Reference.scala b/src/compiler/scala/tools/cmd/Reference.scala
index 204ca22..bcbb454 100644
--- a/src/compiler/scala/tools/cmd/Reference.scala
+++ b/src/compiler/scala/tools/cmd/Reference.scala
@@ -1,12 +1,12 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
+ * Copyright 2005-2013 LAMP/EPFL
* @author Paul Phillips
*/
package scala.tools
package cmd
-import collection.mutable.ListBuffer
+import scala.collection.mutable.ListBuffer
import nsc.Properties.envOrNone
/** Mixes in the specification trait and uses the vals therein to
diff --git a/src/compiler/scala/tools/cmd/Spec.scala b/src/compiler/scala/tools/cmd/Spec.scala
index 929f526..b761601 100644
--- a/src/compiler/scala/tools/cmd/Spec.scala
+++ b/src/compiler/scala/tools/cmd/Spec.scala
@@ -1,5 +1,5 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
+ * Copyright 2005-2013 LAMP/EPFL
* @author Paul Phillips
*/
diff --git a/src/compiler/scala/tools/cmd/gen/AnyVals.scala b/src/compiler/scala/tools/cmd/gen/AnyVals.scala
index 31b4474..dbd2195 100644
--- a/src/compiler/scala/tools/cmd/gen/AnyVals.scala
+++ b/src/compiler/scala/tools/cmd/gen/AnyVals.scala
@@ -1,5 +1,5 @@
/* NSC -- new Scala compiler
- * Copyright 2007-2011 LAMP/EPFL
+ * Copyright 2007-2013 LAMP/EPFL
* @author Paul Phillips
*/
@@ -11,13 +11,136 @@ package gen
trait AnyValReps {
self: AnyVals =>
- sealed abstract class AnyValNum(name: String) extends AnyValRep(name) {
+ sealed abstract class AnyValNum(name: String, repr: Option[String], javaEquiv: String) extends AnyValRep(name,repr,javaEquiv) {
+
+ case class Op(val op : String, val doc : String)
+
+ private def companionCoercions(tos: AnyValRep*) = {
+ tos.toList map (to =>
+ """implicit def @javaequiv at 2%s(x: @name@): %s = x.to%s""".format(to.javaEquiv, to.name, to.name)
+ )
+ }
+ def coercionCommentExtra = ""
+ def coercionComment = """
+ /** Language mandated coercions from @name@ to "wider" types.%s
+ */""".format(coercionCommentExtra)
+
+ def implicitCoercions: List[String] = {
+ val coercions = this match {
+ case B => companionCoercions(S, I, L, F, D)
+ case S | C => companionCoercions(I, L, F, D)
+ case I => companionCoercions(L, F, D)
+ case L => companionCoercions(F, D)
+ case F => companionCoercions(D)
+ case _ => Nil
+ }
+ if (coercions.isEmpty) Nil
+ else coercionComment :: coercions
+ }
+
def isCardinal: Boolean = isIntegerType(this)
- def unaryOps = if (isCardinal) List("+", "-", "~") else List("+", "-")
- def bitwiseOps = if (isCardinal) List("|", "&", "^") else Nil
- def shiftOps = if (isCardinal) List("<<", ">>>", ">>") else Nil
- def comparisonOps = List("==", "!=", "<", "<=", ">", ">=")
- def otherOps = List("+", "-" ,"*", "/", "%")
+ def unaryOps = {
+ val ops = List(
+ Op("+", "/**\n" +
+ " * Returns this value, unmodified.\n" +
+ " */"),
+ Op("-", "/**\n" +
+ " * Returns the negation of this value.\n" +
+ " */"))
+
+ if(isCardinal)
+ Op("~", "/**\n" +
+ " * Returns the bitwise negation of this value.\n" +
+ " * @example {{{\n" +
+ " * ~5 == -6\n" +
+ " * // in binary: ~00000101 ==\n" +
+ " * // 11111010\n" +
+ " * }}}\n" +
+ " */") :: ops
+ else ops
+ }
+
+ def bitwiseOps =
+ if (isCardinal)
+ List(
+ Op("|", "/**\n" +
+ " * Returns the bitwise OR of this value and `x`.\n" +
+ " * @example {{{\n" +
+ " * (0xf0 | 0xaa) == 0xfa\n" +
+ " * // in binary: 11110000\n" +
+ " * // | 10101010\n" +
+ " * // --------\n" +
+ " * // 11111010\n" +
+ " * }}}\n" +
+ " */"),
+ Op("&", "/**\n" +
+ " * Returns the bitwise AND of this value and `x`.\n" +
+ " * @example {{{\n" +
+ " * (0xf0 & 0xaa) == 0xa0\n" +
+ " * // in binary: 11110000\n" +
+ " * // & 10101010\n" +
+ " * // --------\n" +
+ " * // 10100000\n" +
+ " * }}}\n" +
+ " */"),
+ Op("^", "/**\n" +
+ " * Returns the bitwise XOR of this value and `x`.\n" +
+ " * @example {{{\n" +
+ " * (0xf0 ^ 0xaa) == 0x5a\n" +
+ " * // in binary: 11110000\n" +
+ " * // ^ 10101010\n" +
+ " * // --------\n" +
+ " * // 01011010\n" +
+ " * }}}\n" +
+ " */"))
+ else Nil
+
+ def shiftOps =
+ if (isCardinal)
+ List(
+ Op("<<", "/**\n" +
+ " * Returns this value bit-shifted left by the specified number of bits,\n" +
+ " * filling in the new right bits with zeroes.\n" +
+ " * @example {{{ 6 << 3 == 48 // in binary: 0110 << 3 == 0110000 }}}\n" +
+ " */"),
+
+ Op(">>>", "/**\n" +
+ " * Returns this value bit-shifted right by the specified number of bits,\n" +
+ " * filling the new left bits with zeroes.\n" +
+ " * @example {{{ 21 >>> 3 == 2 // in binary: 010101 >>> 3 == 010 }}}\n" +
+ " * @example {{{\n" +
+ " * -21 >>> 3 == 536870909\n" +
+ " * // in binary: 11111111 11111111 11111111 11101011 >>> 3 ==\n" +
+ " * // 00011111 11111111 11111111 11111101\n" +
+ " * }}}\n" +
+ " */"),
+
+ Op(">>", "/**\n" +
+ " * Returns this value bit-shifted left by the specified number of bits,\n" +
+ " * filling in the right bits with the same value as the left-most bit of this.\n" +
+ " * The effect of this is to retain the sign of the value.\n" +
+ " * @example {{{\n" +
+ " * -21 >> 3 == -3\n" +
+ " * // in binary: 11111111 11111111 11111111 11101011 >> 3 ==\n" +
+ " * // 11111111 11111111 11111111 11111101\n" +
+ " * }}}\n" +
+ " */"))
+ else Nil
+
+ def comparisonOps = List(
+ Op("==", "/**\n * Returns `true` if this value is equal to x, `false` otherwise.\n */"),
+ Op("!=", "/**\n * Returns `true` if this value is not equal to x, `false` otherwise.\n */"),
+ Op("<", "/**\n * Returns `true` if this value is less than x, `false` otherwise.\n */"),
+ Op("<=", "/**\n * Returns `true` if this value is less than or equal to x, `false` otherwise.\n */"),
+ Op(">", "/**\n * Returns `true` if this value is greater than x, `false` otherwise.\n */"),
+ Op(">=", "/**\n * Returns `true` if this value is greater than or equal to x, `false` otherwise.\n */"))
+
+ def otherOps = List(
+ Op("+", "/**\n * Returns the sum of this value and `x`.\n */"),
+ Op("-", "/**\n * Returns the difference of this value and `x`.\n */"),
+ Op("*", "/**\n * Returns the product of this value and `x`.\n */"),
+ Op("/", "/**\n * Returns the quotient of this value and `x`.\n */"),
+ Op("%", "/**\n * Returns the remainder of the division of this value by `x`.\n */"))
// Given two numeric value types S and T , the operation type of S and T is defined as follows:
// If both S and T are subrange types then the operation type of S and T is Int.
@@ -33,11 +156,11 @@ trait AnyValReps {
}
def mkCoercions = numeric map (x => "def to%s: %s".format(x, x))
- def mkUnaryOps = unaryOps map (x => "def unary_%s : %s".format(x, this opType I))
+ def mkUnaryOps = unaryOps map (x => "%s\n def unary_%s : %s".format(x.doc, x.op, this opType I))
def mkStringOps = List("def +(x: String): String")
def mkShiftOps = (
for (op <- shiftOps ; arg <- List(I, L)) yield
- "def %s(x: %s): %s".format(op, arg, this opType I)
+ "%s\n def %s(x: %s): %s".format(op.doc, op.op, arg, this opType I)
)
def clumps: List[List[String]] = {
@@ -54,13 +177,13 @@ trait AnyValReps {
case (res, lines) =>
val xs = lines map {
case "" => ""
- case s => interpolate(s) + " = " + stub
+ case s => interpolate(s)
}
res ++ xs
}
def objectLines = {
val comp = if (isCardinal) cardinalCompanion else floatingCompanion
- (comp + allCompanions).trim.lines map interpolate toList
+ (comp + allCompanions + "\n" + nonUnitCompanions).trim.lines.toList ++ implicitCoercions map interpolate
}
/** Makes a set of binary operations based on the given set of ops, args, and resultFn.
@@ -70,18 +193,19 @@ trait AnyValReps {
* @param resultFn function which calculates return type based on arg type
* @return list of function definitions
*/
- def mkBinOpsGroup(ops: List[String], args: List[AnyValNum], resultFn: AnyValNum => AnyValRep): List[String] = (
+ def mkBinOpsGroup(ops: List[Op], args: List[AnyValNum], resultFn: AnyValNum => AnyValRep): List[String] = (
ops flatMap (op =>
- args.map(arg => "def %s(x: %s): %s".format(op, arg, resultFn(arg))) :+ ""
+ args.map(arg =>
+ "%s\n def %s(x: %s): %s".format(op.doc, op.op, arg, resultFn(arg))) :+ ""
)
).toList
}
- sealed abstract class AnyValRep(val name: String) {
+ sealed abstract class AnyValRep(val name: String, val repr: Option[String], val javaEquiv: String) {
def classLines: List[String]
def objectLines: List[String]
def commonClassLines = List(
- "def getClass(): Class[@name@]"
+ "override def getClass(): Class[@name@] = null"
)
def lcname = name.toLowerCase
@@ -98,6 +222,8 @@ trait AnyValReps {
case _ => "0"
}
+ def representation = repr.map(", a " + _).getOrElse("")
+
def indent(s: String) = if (s == "") "" else " " + s
def indentN(s: String) = s.lines map indent mkString "\n"
@@ -108,6 +234,8 @@ trait AnyValReps {
)
def interpolations = Map(
"@name@" -> name,
+ "@representation@" -> representation,
+ "@javaequiv@" -> javaEquiv,
"@boxed@" -> boxedName,
"@lcname@" -> lcname,
"@zero@" -> zeroRep
@@ -119,8 +247,9 @@ trait AnyValReps {
def classDoc = interpolate(classDocTemplate)
def objectDoc = ""
def mkImports = ""
- def mkClass = assemble("final class", "AnyVal", classLines) + "\n"
- def mkObject = assemble("object", "AnyValCompanion", objectLines) + "\n"
+
+ def mkClass = assemble("final abstract class " + name + " private extends AnyVal", classLines)
+ def mkObject = assemble("object " + name + " extends AnyValCompanion", objectLines)
def make() = List[String](
headerTemplate,
mkImports,
@@ -130,11 +259,10 @@ trait AnyValReps {
mkObject
) mkString ""
- def assemble(what: String, parent: String, lines: List[String]): String = {
- val decl = "%s %s extends %s ".format(what, name, parent)
- val body = if (lines.isEmpty) "{ }\n\n" else lines map indent mkString ("{\n", "\n", "\n}\n")
+ def assemble(decl: String, lines: List[String]): String = {
+ val body = if (lines.isEmpty) " { }\n\n" else lines map indent mkString (" {\n", "\n", "\n}\n")
- decl + body
+ decl + body + "\n"
}
override def toString = name
}
@@ -144,7 +272,7 @@ trait AnyValTemplates {
def headerTemplate = ("""
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -153,11 +281,14 @@ trait AnyValTemplates {
%s
package scala
+import scala.language.implicitConversions
+
""".trim.format(timestampString) + "\n\n")
def classDocTemplate = ("""
-/** `@name@` is a member of the value classes, those whose instances are
- * not represented as objects by the underlying host system.
+/** `@name@`@representation@ (equivalent to Java's `@javaequiv@` primitive type) is a
+ * subtype of [[scala.AnyVal]]. Instances of `@name@` are not
+ * represented by an object in the underlying runtime system.
*
* There is an implicit conversion from [[scala. at name@]] => [[scala.runtime.Rich at name@]]
* which provides useful non-primitive operations.
@@ -165,7 +296,6 @@ package scala
""".trim + "\n")
def timestampString = "// DO NOT EDIT, CHANGES WILL BE LOST.\n"
- def stub = """sys.error("stub")"""
def allCompanions = """
/** Transform a value type into a boxed reference type.
@@ -190,6 +320,8 @@ def unbox(x: java.lang.Object): @name@ = @unboxImpl@
override def toString = "object scala. at name@"
"""
+ def nonUnitCompanions = "" // todo
+
def cardinalCompanion = """
/** The smallest value representable as a @name at .
*/
@@ -209,9 +341,6 @@ final val NaN = @boxed at .NaN
final val PositiveInfinity = @boxed at .POSITIVE_INFINITY
final val NegativeInfinity = @boxed at .NEGATIVE_INFINITY
- at deprecated("use @name at .MinPositiveValue instead", "2.9.0")
-final val Epsilon = MinPositiveValue
-
/** The negative number with the greatest (finite) absolute value which is representable
* by a @name at . Note that it differs from [[java.lang. at name@.MIN_VALUE]], which
* is the smallest positive value representable by a @name at . In Scala that number
@@ -225,42 +354,119 @@ final val MaxValue = @boxed at .MAX_VALUE
}
class AnyVals extends AnyValReps with AnyValTemplates {
- object B extends AnyValNum("Byte")
- object S extends AnyValNum("Short")
- object C extends AnyValNum("Char")
- object I extends AnyValNum("Int")
- object L extends AnyValNum("Long")
- object F extends AnyValNum("Float")
- object D extends AnyValNum("Double")
- object Z extends AnyValRep("Boolean") {
+ object B extends AnyValNum("Byte", Some("8-bit signed integer"), "byte")
+ object S extends AnyValNum("Short", Some("16-bit signed integer"), "short")
+ object C extends AnyValNum("Char", Some("16-bit unsigned integer"), "char")
+ object I extends AnyValNum("Int", Some("32-bit signed integer"), "int")
+ object L extends AnyValNum("Long", Some("64-bit signed integer"), "long")
+ object F extends AnyValNum("Float", Some("32-bit IEEE-754 floating point number"), "float")
+ object D extends AnyValNum("Double", Some("64-bit IEEE-754 floating point number"), "double")
+ object Z extends AnyValRep("Boolean", None, "boolean") {
def classLines = """
-def unary_! : Boolean = sys.error("stub")
+/**
+ * Negates a Boolean expression.
+ *
+ * - `!a` results in `false` if and only if `a` evaluates to `true` and
+ * - `!a` results in `true` if and only if `a` evaluates to `false`.
+ *
+ * @return the negated expression
+ */
+def unary_! : Boolean
+
+/**
+ * Compares two Boolean expressions and returns `true` if they evaluate to the same value.
+ *
+ * `a == b` returns `true` if and only if
+ * - `a` and `b` are `true` or
+ * - `a` and `b` are `false`.
+ */
+def ==(x: Boolean): Boolean
+
+/**
+ * Compares two Boolean expressions and returns `true` if they evaluate to a different value.
+ *
+ * `a != b` returns `true` if and only if
+ * - `a` is `true` and `b` is `false` or
+ * - `a` is `false` and `b` is `true`.
+ */
+def !=(x: Boolean): Boolean
+
+/**
+ * Compares two Boolean expressions and returns `true` if one or both of them evaluate to true.
+ *
+ * `a || b` returns `true` if and only if
+ * - `a` is `true` or
+ * - `b` is `true` or
+ * - `a` and `b` are `true`.
+ *
+ * @note This method uses 'short-circuit' evaluation and
+ * behaves as if it was declared as `def ||(x: => Boolean): Boolean`.
+ * If `a` evaluates to `true`, `true` is returned without evaluating `b`.
+ */
+def ||(x: Boolean): Boolean
+
+/**
+ * Compares two Boolean expressions and returns `true` if both of them evaluate to true.
+ *
+ * `a && b` returns `true` if and only if
+ * - `a` and `b` are `true`.
+ *
+ * @note This method uses 'short-circuit' evaluation and
+ * behaves as if it was declared as `def &&(x: => Boolean): Boolean`.
+ * If `a` evaluates to `false`, `false` is returned without evaluating `b`.
+ */
+def &&(x: Boolean): Boolean
-def ==(x: Boolean): Boolean = sys.error("stub")
-def !=(x: Boolean): Boolean = sys.error("stub")
-def ||(x: Boolean): Boolean = sys.error("stub")
-def &&(x: Boolean): Boolean = sys.error("stub")
// Compiler won't build with these seemingly more accurate signatures
-// def ||(x: => Boolean): Boolean = sys.error("stub")
-// def &&(x: => Boolean): Boolean = sys.error("stub")
-def |(x: Boolean): Boolean = sys.error("stub")
-def &(x: Boolean): Boolean = sys.error("stub")
-def ^(x: Boolean): Boolean = sys.error("stub")
-
-def getClass(): Class[Boolean] = sys.error("stub")
+// def ||(x: => Boolean): Boolean
+// def &&(x: => Boolean): Boolean
+
+/**
+ * Compares two Boolean expressions and returns `true` if one or both of them evaluate to true.
+ *
+ * `a | b` returns `true` if and only if
+ * - `a` is `true` or
+ * - `b` is `true` or
+ * - `a` and `b` are `true`.
+ *
+ * @note This method evaluates both `a` and `b`, even if the result is already determined after evaluating `a`.
+ */
+def |(x: Boolean): Boolean
+
+/**
+ * Compares two Boolean expressions and returns `true` if both of them evaluate to true.
+ *
+ * `a & b` returns `true` if and only if
+ * - `a` and `b` are `true`.
+ *
+ * @note This method evaluates both `a` and `b`, even if the result is already determined after evaluating `a`.
+ */
+def &(x: Boolean): Boolean
+
+/**
+ * Compares two Boolean expressions and returns `true` if they evaluate to a different value.
+ *
+ * `a ^ b` returns `true` if and only if
+ * - `a` is `true` and `b` is `false` or
+ * - `a` is `false` and `b` is `true`.
+ */
+def ^(x: Boolean): Boolean
+
+override def getClass(): Class[Boolean] = null
""".trim.lines.toList
- def objectLines = interpolate(allCompanions).lines.toList
+ def objectLines = interpolate(allCompanions + "\n" + nonUnitCompanions).lines.toList
}
- object U extends AnyValRep("Unit") {
+ object U extends AnyValRep("Unit", None, "void") {
override def classDoc = """
-/** Unit is a member of the value classes, those whose instances are
- * not represented as objects by the underlying host system. There is
- * only one value of type Unit: `()`.
+/** `Unit` is a subtype of [[scala.AnyVal]]. There is only one value of type
+ * `Unit`, `()`, and it is not represented by any object in the underlying
+ * runtime system. A method with return type `Unit` is analogous to a Java
+ * method which is declared `void`.
*/
"""
def classLines = List(
- """def getClass(): Class[Unit] = sys.error("stub")"""
+ """override def getClass(): Class[Unit] = null"""
)
def objectLines = interpolate(allCompanions).lines.toList
@@ -284,4 +490,3 @@ def getClass(): Class[Boolean] = sys.error("stub")
}
object AnyVals extends AnyVals { }
-
diff --git a/src/compiler/scala/tools/cmd/gen/Codegen.scala b/src/compiler/scala/tools/cmd/gen/Codegen.scala
index 1bb8204..4ca9b6c 100644
--- a/src/compiler/scala/tools/cmd/gen/Codegen.scala
+++ b/src/compiler/scala/tools/cmd/gen/Codegen.scala
@@ -1,11 +1,13 @@
/* NEST (New Scala Test)
- * Copyright 2007-2011 LAMP/EPFL
+ * Copyright 2007-2013 LAMP/EPFL
* @author Paul Phillips
*/
package scala.tools.cmd
package gen
+import scala.language.postfixOps
+
class Codegen(args: List[String]) extends {
val parsed = CodegenSpec(args: _*)
} with CodegenSpec with Instance { }
diff --git a/src/compiler/scala/tools/cmd/gen/CodegenSpec.scala b/src/compiler/scala/tools/cmd/gen/CodegenSpec.scala
index 7f4b9c1..903517c 100644
--- a/src/compiler/scala/tools/cmd/gen/CodegenSpec.scala
+++ b/src/compiler/scala/tools/cmd/gen/CodegenSpec.scala
@@ -1,5 +1,5 @@
/* NEST (New Scala Test)
- * Copyright 2007-2011 LAMP/EPFL
+ * Copyright 2007-2013 LAMP/EPFL
* @author Paul Phillips
*/
diff --git a/src/compiler/scala/tools/cmd/package.scala b/src/compiler/scala/tools/cmd/package.scala
index 5786b00..d605eca 100644
--- a/src/compiler/scala/tools/cmd/package.scala
+++ b/src/compiler/scala/tools/cmd/package.scala
@@ -1,5 +1,5 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
+ * Copyright 2005-2013 LAMP/EPFL
* @author Paul Phillips
*/
@@ -8,6 +8,10 @@ package scala.tools
package object cmd {
def returning[T](x: T)(f: T => Unit): T = { f(x) ; x }
+ // make some language features in this package compile without warning
+ implicit def implicitConversions = scala.language.implicitConversions
+ implicit def postfixOps = scala.language.postfixOps
+
private[cmd] def debug(msg: String) = println(msg)
def runAndExit(body: => Unit): Nothing = {
diff --git a/src/compiler/scala/tools/cmd/program/Scmp.scala b/src/compiler/scala/tools/cmd/program/Scmp.scala
deleted file mode 100644
index 6f39c29..0000000
--- a/src/compiler/scala/tools/cmd/program/Scmp.scala
+++ /dev/null
@@ -1,60 +0,0 @@
-/* NEST (New Scala Test)
- * Copyright 2007-2011 LAMP/EPFL
- * @author Paul Phillips
- */
-
-package scala.tools
-package cmd
-package program
-
-import nsc.io._
-import scala.sys.process._
-
-object Scmp {
- private val scmpUsage = """
- |Usage: scmp [options] <cmd line>
- |Example: scmp --p1 '-no-specialization -Ydebug' scalac src/library/scala/Function1.scala
- |
- |Note: the command line must start with a path to scalac.
- |""".stripMargin
- private val scmpOptions = List(
- "p1" -> "options for the first run only",
- "p2" -> "options for the second run only"
- )
- private val scmpInfo = Simple.scalaProgramInfo("scmp", scmpUsage)
- lazy val ScmpSpec = Simple(scmpInfo, Nil, scmpOptions, x => returning(x)(_.onlyKnownOptions = false))
-
- def main(args0: Array[String]): Unit = {
- if (args0.isEmpty)
- return println(scmpUsage)
-
- val runner = ScmpSpec instance args0
- import runner._
-
- val p1args = parsed.getOrElse("--p1", "")
- val p2args = parsed.getOrElse("--p2", "")
-
- if (p1args.isEmpty && p2args.isEmpty)
- return println("At least one of --p1 and --p2 must be given.")
- if (residualArgs.isEmpty)
- return println("There is no command to run.")
-
- def createCmd(extras: String) =
- fromArgs(residualArgs.patch(1, toArgs(extras), 0))
-
- def runCmd(cmd: String) = {
- val tmpfile = File.makeTemp()
- (cmd #> tmpfile.jfile !)
- tmpfile
- }
-
- val cmds = List(p1args, p2args) map createCmd
- println(cmds.mkString("Running command lines:\n ", "\n ", ""))
-
- val files = cmds map runCmd map (_.path)
- val diff = "diff %s %s".format(files: _*).!!
-
- if (diff.isEmpty) println("No differences.")
- else println(diff)
- }
-}
diff --git a/src/compiler/scala/tools/cmd/program/Simple.scala b/src/compiler/scala/tools/cmd/program/Simple.scala
deleted file mode 100644
index f2095d6..0000000
--- a/src/compiler/scala/tools/cmd/program/Simple.scala
+++ /dev/null
@@ -1,81 +0,0 @@
-/* NEST (New Scala Test)
- * Copyright 2007-2011 LAMP/EPFL
- * @author Paul Phillips
- */
-
-package scala.tools
-package cmd
-package program
-
-import Spec.Info
-
-/** A boilerplate reducer for commands with simple requirements. For examples,
- * see Scmp and Tokens in this package.
- */
-object Simple {
- type CommandLineTransform = SimpleCommandLine => SimpleCommandLine
-
- abstract class SimpleSpec(val programInfo: Info) extends Spec with Meta.StdOpts with Interpolation
-
- trait SimpleInstance extends SimpleSpec with Instance {
- val parsed: CommandLine
- }
-
- class SimpleReference(
- programInfo: Info,
- unary: List[(String, String)] = Nil,
- binary: List[(String, String)] = Nil,
- postCreation: CommandLineTransform = null
- ) extends SimpleSpec(programInfo) with Reference {
-
- spec =>
-
- if (programInfo.usage != "") help(programInfo.usage)
- unary foreach { case (option, help) => option / help --? }
- binary foreach { case (option, help) => option / help --| }
-
- type ThisCommandLine = SimpleCommandLine
-
- def creator(args: List[String]) = new SimpleCommandLine(spec, args)
- def instance(args: Array[String]): SimpleInstance = instance(args.toList)
- def instance(args: List[String]): SimpleInstance =
- new {
- val parsed = spec(args: _*)
- } with SimpleSpec(programInfo) with SimpleInstance {
- lazy val referenceSpec = spec
- }
-
- lazy val referenceSpec = spec
- }
-
- def apply(info: Info, unary: List[(String, String)], binary: List[(String, String)], postCreation: CommandLineTransform): SimpleReference = {
- new SimpleReference(info, unary, binary, postCreation) {
- override def creator(args: List[String]) = {
- val obj = super.creator(args)
- if (postCreation == null) obj
- else postCreation(obj)
- }
- }
- }
-
- def scalaProgramInfo(name: String, help: String) =
- Spec.Info(name, help, "scala.tools.cmd.program." + name.capitalize)
-
- /** You can't override a def with a var unless a setter exists. We cleverly
- * sidestep this by mixing in a trait with dummy setters which will be
- * inaccessible due to the overriding var.
- */
- trait Ticket2338WontFixWorkaround {
- def enforceArity_=(x: Boolean): Unit = sys.error("unreachable")
- def onlyKnownOptions_=(x: Boolean): Unit = sys.error("unreachable")
- }
-
- /** Configurability simplicity achieved by turning defs into vars and letting
- * the spec creator apply a transformation. This way there's no need to create
- * custom subclasses of CommandLine.
- */
- class SimpleCommandLine(spec: Reference, args: List[String]) extends CommandLine(spec, args) with Ticket2338WontFixWorkaround {
- override var enforceArity: Boolean = true
- override var onlyKnownOptions: Boolean = true
- }
-}
diff --git a/src/compiler/scala/tools/cmd/program/Tokens.scala b/src/compiler/scala/tools/cmd/program/Tokens.scala
deleted file mode 100644
index be5e9e4..0000000
--- a/src/compiler/scala/tools/cmd/program/Tokens.scala
+++ /dev/null
@@ -1,106 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
- * @author Paul Phillips
- */
-
-package scala.tools
-package cmd
-package program
-
-import nsc._
-import util.Chars.char2uescape
-import io._
-import ast.parser.Tokens._
-
-/** Given paths on the command line, tokenizes any scala files found
- * and prints one token per line.
- */
-object Tokens {
- private val tokensUsage = "Usage: tokens [options] <path1 path2 ...>\n\nOptions:"
- private val tokensUnary = List(
- "verbose" -> "be more verbose",
- "freq" -> "combine token lists and sort by frequency",
- "stats" -> "output some stats"
- )
- private val tokensBinary = List(
- "sliding" -> "print tokens in groups of given size"
- )
- private val tokensInfo = Simple.scalaProgramInfo("tokens", tokensUsage)
- private lazy val TokensSpec = Simple(tokensInfo, tokensUnary, tokensBinary, null)
-
- def sanitize(x: Any): String = sanitize(x.toString)
- def sanitize(str: String): String = str flatMap (x => if (x.isControl) char2uescape(x) else x.toString)
-
- def main(args0: Array[String]): Unit = {
- if (args0.isEmpty)
- return println(TokensSpec.helpMsg)
-
- val runner = TokensSpec instance args0
- import runner._
-
- val files = (residualArgs flatMap walk).distinct
- if (parsed isSet "--verbose")
- println("Tokenizing: " + (files map (_.name) mkString " "))
-
- if (parsed isSet "--stats")
- println("Stats not yet implemented.")
-
- def raw = files flatMap fromScalaSource
- def tokens: List[Any] =
- if (parsed isSet "--sliding") raw sliding parsed("--sliding").toInt map (_ map sanitize mkString " ") toList
- else raw
-
- def output =
- if (parsed isSet "--freq")
- (tokens groupBy (x => x) mapValues (_.length)).toList sortBy (-_._2) map (x => x._2 + " " + x._1)
- else
- tokens
-
- output foreach println
- }
-
- def fromPaths(paths: String*): List[Any] =
- (paths.toList flatMap walk).distinct flatMap fromScalaSource
-
- /** Given a path, returns all .scala files underneath it.
- */
- private def walk(arg: String): List[File] = {
- def traverse = Path(arg) ifDirectory (_.deepList()) getOrElse Iterator(File(arg))
-
- Path onlyFiles traverse filter (_ hasExtension "scala") toList
- }
-
- def fromScalaString(code: String): List[Any] = {
- val f = File.makeTemp("tokens")
- f writeAll code
- fromScalaSource(f)
- }
-
- /** Tokenizes a single scala file.
- */
- def fromScalaSource(file: Path): List[Any] = fromScalaSource(file.path)
- def fromScalaSource(file: String): List[Any] = {
- val global = new Global(new Settings())
- import global._
- import syntaxAnalyzer.{ UnitScanner, token2string }
-
- val in = new UnitScanner(new CompilationUnit(getSourceFile(file)))
- in.init()
-
- Iterator continually {
- val token = in.token match {
- case IDENTIFIER | BACKQUOTED_IDENT => in.name
- case CHARLIT | INTLIT | LONGLIT => in.intVal
- case DOUBLELIT | FLOATLIT => in.floatVal
- case STRINGLIT => "\"" + in.strVal + "\""
- case SEMI | NEWLINE => ";"
- case NEWLINES => ";;"
- case COMMA => ","
- case EOF => null
- case x => token2string(x)
- }
- in.nextToken()
- token
- } takeWhile (_ != null) toList
- }
-}
diff --git a/src/compiler/scala/tools/nsc/CompilationUnits.scala b/src/compiler/scala/tools/nsc/CompilationUnits.scala
index 3338d14..355a1fd 100644
--- a/src/compiler/scala/tools/nsc/CompilationUnits.scala
+++ b/src/compiler/scala/tools/nsc/CompilationUnits.scala
@@ -1,19 +1,29 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
+ * Copyright 2005-2013 LAMP/EPFL
* @author Martin Odersky
*/
package scala.tools.nsc
-import util.{ FreshNameCreator,Position,NoPosition,SourceFile }
-import scala.collection.mutable.{ LinkedHashSet, HashSet, HashMap, ListBuffer }
+import util.FreshNameCreator
+import scala.reflect.internal.util.{ Position, NoPosition, BatchSourceFile, SourceFile, NoSourceFile }
+import scala.collection.mutable
+import scala.collection.mutable.{ LinkedHashSet, ListBuffer }
trait CompilationUnits { self: Global =>
+ /** An object representing a missing compilation unit.
+ */
+ object NoCompilationUnit extends CompilationUnit(NoSourceFile) {
+ override lazy val isJava = false
+ override def exists = false
+ override def toString() = "NoCompilationUnit"
+ }
+
/** One unit of compilation that has been submitted to the compiler.
* It typically corresponds to a single file of source code. It includes
* error-reporting hooks. */
- class CompilationUnit(val source: SourceFile) extends CompilationUnitTrait {
+ class CompilationUnit(val source: SourceFile) extends CompilationUnitContextApi { self =>
/** the fresh name creator */
var fresh: FreshNameCreator = new FreshNameCreator.Default
@@ -24,13 +34,7 @@ trait CompilationUnits { self: Global =>
/** the content of the compilation unit in tree form */
var body: Tree = EmptyTree
- /** representation for a source code comment, includes
- * '//' or '/*' '*/' in the value and the position
- */
- case class Comment(text: String, pos: Position)
-
- /** all comments found in this compilation unit */
- val comments = new ListBuffer[Comment]
+ def exists = source != NoSourceFile && source != null
// def parseSettings() = {
// val argsmarker = "SCALAC_ARGS"
@@ -45,19 +49,38 @@ trait CompilationUnits { self: Global =>
/** Note: depends now contains toplevel classes.
* To get their sourcefiles, you need to dereference with .sourcefile
*/
- val depends = new HashSet[Symbol]
+ val depends = mutable.HashSet[Symbol]()
/** so we can relink
*/
- val defined = new HashSet[Symbol]
+ val defined = mutable.HashSet[Symbol]()
/** Synthetic definitions generated by namer, eliminated by typer.
*/
- val synthetics = new HashMap[Symbol, Tree]
+ object synthetics {
+ private val map = mutable.HashMap[Symbol, Tree]()
+ def update(sym: Symbol, tree: Tree) {
+ debuglog(s"adding synthetic ($sym, $tree) to $self")
+ map.update(sym, tree)
+ }
+ def -=(sym: Symbol) {
+ debuglog(s"removing synthetic $sym from $self")
+ map -= sym
+ }
+ def get(sym: Symbol): Option[Tree] = logResultIf[Option[Tree]](s"found synthetic for $sym in $self", _.isDefined) {
+ map get sym
+ }
+ def keys: Iterable[Symbol] = map.keys
+ def clear(): Unit = map.clear()
+ override def toString = map.toString
+ }
/** things to check at end of compilation unit */
val toCheck = new ListBuffer[() => Unit]
+ /** The features that were already checked for this unit */
+ var checkedFeatures = Set[Symbol]()
+
def position(pos: Int) = source.position(pos)
/** The position of a targeted type check
@@ -72,6 +95,9 @@ trait CompilationUnits { self: Global =>
*/
val icode: LinkedHashSet[icodes.IClass] = new LinkedHashSet
+ def echo(pos: Position, msg: String) =
+ reporter.echo(pos, msg)
+
def error(pos: Position, msg: String) =
reporter.error(pos, msg)
@@ -79,12 +105,13 @@ trait CompilationUnits { self: Global =>
reporter.warning(pos, msg)
def deprecationWarning(pos: Position, msg: String) =
- if (opt.deprecation) warning(pos, msg)
- else currentRun.deprecationWarnings += 1
+ currentRun.deprecationWarnings0.warn(pos, msg)
def uncheckedWarning(pos: Position, msg: String) =
- if (opt.unchecked) warning(pos, msg)
- else currentRun.uncheckedWarnings += 1
+ currentRun.uncheckedWarnings0.warn(pos, msg)
+
+ def inlinerWarning(pos: Position, msg: String) =
+ currentRun.inlinerWarnings.warn(pos, msg)
def incompleteInputError(pos: Position, msg:String) =
reporter.incompleteInputError(pos, msg)
@@ -98,10 +125,14 @@ trait CompilationUnits { self: Global =>
override def toString() = source.toString()
def clear() {
- fresh = null
- body = null
- depends.clear
- defined.clear
+ fresh = new FreshNameCreator.Default
+ body = EmptyTree
+ depends.clear()
+ defined.clear()
+ synthetics.clear()
+ toCheck.clear()
+ checkedFeatures = Set()
+ icode.clear()
}
}
}
diff --git a/src/compiler/scala/tools/nsc/CompileClient.scala b/src/compiler/scala/tools/nsc/CompileClient.scala
index dc65bf7..731f692 100644
--- a/src/compiler/scala/tools/nsc/CompileClient.scala
+++ b/src/compiler/scala/tools/nsc/CompileClient.scala
@@ -1,5 +1,5 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
+ * Copyright 2005-2013 LAMP/EPFL
* @author Martin Odersky
*/
diff --git a/src/compiler/scala/tools/nsc/CompileServer.scala b/src/compiler/scala/tools/nsc/CompileServer.scala
index 866975b..7a0a072 100644
--- a/src/compiler/scala/tools/nsc/CompileServer.scala
+++ b/src/compiler/scala/tools/nsc/CompileServer.scala
@@ -1,5 +1,5 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
+ * Copyright 2005-2013 LAMP/EPFL
* @author Martin Odersky
*/
@@ -7,7 +7,7 @@ package scala.tools.nsc
import java.io.{ BufferedOutputStream, FileOutputStream, PrintStream }
import scala.tools.nsc.reporters.{Reporter, ConsoleReporter}
-import scala.tools.nsc.util.FakePos //Position
+import scala.reflect.internal.util.FakePos //Position
import scala.tools.util.SocketServer
import settings.FscSettings
@@ -29,9 +29,7 @@ class StandardCompileServer extends SocketServer {
var shutdown = false
var verbose = false
- val versionMsg = "Fast Scala compiler " +
- Properties.versionString + " -- " +
- Properties.copyrightString
+ val versionMsg = "Fast " + Properties.versionMsg
val MaxCharge = 0.8
@@ -87,18 +85,18 @@ class StandardCompileServer extends SocketServer {
val input = in.readLine()
def fscError(msg: String): Unit = out println (
- FakePos("fsc"),
- msg + "\n fsc -help gives more information"
+ FakePos("fsc") + msg + "\n fsc -help gives more information"
)
if (input == null || password != guessedPassword)
return
val args = input.split("\0", -1).toList
val newSettings = new FscSettings(fscError)
- this.verbose = newSettings.verbose.value
val command = newOfflineCompilerCommand(args, newSettings)
+ this.verbose = newSettings.verbose.value
info("Settings after normalizing paths: " + newSettings)
+ if (!command.files.isEmpty) info("Input files after normalizing paths: " + (command.files mkString ","))
printMemoryStats()
// Update the idle timeout if given
@@ -139,13 +137,13 @@ class StandardCompileServer extends SocketServer {
}
if (command.shouldStopWithInfo)
- reporter.info(null, command.getInfoMessage(newGlobal(newSettings, reporter)), true)
+ reporter.echo(command.getInfoMessage(newGlobal(newSettings, reporter)))
else if (command.files.isEmpty)
- reporter.info(null, command.usageMsg, true)
+ reporter.echo(command.usageMsg)
else {
if (isCompilerReusable) {
info("[Reusing existing Global instance.]")
- compiler.settings = newSettings
+ compiler.currentSettings = newSettings
compiler.reporter = reporter
}
else {
@@ -157,7 +155,7 @@ class StandardCompileServer extends SocketServer {
case ex @ FatalError(msg) =>
reporter.error(null, "fatal error: " + msg)
clearCompiler()
- case ex =>
+ case ex: Throwable =>
warn("Compile server encountered fatal condition: " + ex)
shutdown = true
throw ex
@@ -176,10 +174,22 @@ object CompileServer extends StandardCompileServer {
/** A directory holding redirected output */
private lazy val redirectDir = (compileSocket.tmpDir / "output-redirects").createDirectory()
- private def redirect(setter: PrintStream => Unit, filename: String): Unit =
- setter(new PrintStream((redirectDir / filename).createFile().bufferedOutput()))
-
- def main(args: Array[String]) {
+ private def createRedirect(filename: String) =
+ new PrintStream((redirectDir / filename).createFile().bufferedOutput())
+
+ def main(args: Array[String]) =
+ execute(() => (), args)
+
+ /**
+ * Used for internal testing. The callback is called upon
+ * server start, notifying the caller that the server is
+ * ready to run. WARNING: the callback runs in the
+ * server's thread, blocking the server from doing any work
+ * until the callback is finished. Callbacks should be kept
+ * simple and clients should not try to interact with the
+ * server while the callback is processing.
+ */
+ def execute(startupCallback : () => Unit, args: Array[String]) {
val debug = args contains "-v"
if (debug) {
@@ -187,14 +197,16 @@ object CompileServer extends StandardCompileServer {
echo("Redirect dir is " + redirectDir)
}
- redirect(System.setOut, "scala-compile-server-out.log")
- redirect(System.setErr, "scala-compile-server-err.log")
- System.err.println("...starting server on socket "+port+"...")
- System.err.flush()
- compileSocket.setPort(port)
- run()
-
- compileSocket.deletePort(port)
- sys.exit(0)
+ Console.withErr(createRedirect("scala-compile-server-err.log")) {
+ Console.withOut(createRedirect("scala-compile-server-out.log")) {
+ Console.err.println("...starting server on socket "+port+"...")
+ Console.err.flush()
+ compileSocket setPort port
+ startupCallback()
+ run()
+
+ compileSocket deletePort port
+ }
+ }
}
}
diff --git a/src/compiler/scala/tools/nsc/CompileSocket.scala b/src/compiler/scala/tools/nsc/CompileSocket.scala
index a0c39f7..4051bda 100644
--- a/src/compiler/scala/tools/nsc/CompileSocket.scala
+++ b/src/compiler/scala/tools/nsc/CompileSocket.scala
@@ -1,5 +1,5 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
+ * Copyright 2005-2013 LAMP/EPFL
* @author Martin Odersky
*/
@@ -13,7 +13,7 @@ import java.security.SecureRandom
import io.{ File, Path, Directory, Socket }
import scala.util.control.Exception.catching
import scala.tools.util.CompileOutputCommon
-import scala.tools.util.StringOps.splitWhere
+import scala.reflect.internal.util.StringOps.splitWhere
import scala.sys.process._
trait HasCompileSocket {
@@ -72,7 +72,7 @@ class CompileSocket extends CompileOutputCommon {
/** A temporary directory to use */
val tmpDir = {
val udir = Option(Properties.userName) getOrElse "shared"
- val f = (Path(Properties.tmpDir) / "scala-devel" / udir).createDirectory()
+ val f = (Path(Properties.tmpDir) / ("scala-devel" + udir)).createDirectory()
if (f.isDirectory && f.canWrite) {
info("[Temp directory: " + f + "]")
@@ -93,7 +93,7 @@ class CompileSocket extends CompileOutputCommon {
/** Start a new server. */
private def startNewServer(vmArgs: String) = {
- val cmd = serverCommand(vmArgs split " " toSeq)
+ val cmd = serverCommand((vmArgs split " ").toSeq)
info("[Executing command: %s]" format cmd.mkString(" "))
// Hiding inadequate daemonized implementation from public API for now
@@ -206,7 +206,7 @@ class CompileSocket extends CompileOutputCommon {
Thread sleep 100
ff.length
}
- if (Iterator continually check take 50 find (_ > 0) isEmpty) {
+ if ((Iterator continually check take 50 find (_ > 0)).isEmpty) {
ff.delete()
fatal("Unable to establish connection to server.")
}
diff --git a/src/compiler/scala/tools/nsc/CompilerCommand.scala b/src/compiler/scala/tools/nsc/CompilerCommand.scala
index c9791b9..e994150 100644
--- a/src/compiler/scala/tools/nsc/CompilerCommand.scala
+++ b/src/compiler/scala/tools/nsc/CompilerCommand.scala
@@ -1,17 +1,18 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
+ * Copyright 2005-2013 LAMP/EPFL
* @author Martin Odersky
*/
package scala.tools.nsc
-import java.io.IOException
import scala.collection.mutable.ListBuffer
import io.File
/** A class representing command line info for scalac */
class CompilerCommand(arguments: List[String], val settings: Settings) {
def this(arguments: List[String], error: String => Unit) = this(arguments, new Settings(error))
+ def this(arguments: List[String], settings: Settings, error: String => Unit) = this(arguments, settings withErrorFn error)
+
type Setting = Settings#Setting
/** file extensions of files that the compiler can process */
@@ -31,7 +32,7 @@ class CompilerCommand(arguments: List[String], val settings: Settings) {
|Boolean settings are always false unless set.
|Where multiple values are accepted, they should be comma-separated.
| example: -Xplugin:plugin1,plugin2
- |<phase> means one or a list of:
+ |<phases> means one or a comma-separated list of:
| (partial) phase names, phase ids, phase id ranges, or the string "all".
| example: -Xprint:all prints all phases.
| example: -Xprint:expl,24-26 prints phases explicitouter, closelim, dce, jvm.
@@ -46,7 +47,7 @@ class CompilerCommand(arguments: List[String], val settings: Settings) {
/** Creates a help message for a subset of options based on cond */
def createUsageMsg(cond: Setting => Boolean): String = {
val baseList = (settings.visibleSettings filter cond).toList sortBy (_.name)
- val width = baseList map (_.helpSyntax.length) max
+ val width = (baseList map (_.helpSyntax.length)).max
def format(s: String) = ("%-" + width + "s") format s
def helpStr(s: Setting) = {
val str = format(s.helpSyntax) + " " + s.helpDescription
@@ -98,7 +99,9 @@ class CompilerCommand(arguments: List[String], val settings: Settings) {
else if (Xhelp.value) xusageMsg
else if (Yhelp.value) yusageMsg
else if (showPlugins.value) global.pluginDescriptions
- else if (showPhases.value) global.phaseDescriptions
+ else if (showPhases.value) global.phaseDescriptions + (
+ if (debug.value) "\n" + global.phaseFlagDescriptions else ""
+ )
else ""
}
diff --git a/src/compiler/scala/tools/nsc/CompilerRun.scala b/src/compiler/scala/tools/nsc/CompilerRun.scala
index e642823..6746b08 100644
--- a/src/compiler/scala/tools/nsc/CompilerRun.scala
+++ b/src/compiler/scala/tools/nsc/CompilerRun.scala
@@ -1,5 +1,5 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
+ * Copyright 2005-2013 LAMP/EPFL
* @author Martin Odersky
*/
diff --git a/src/compiler/scala/tools/nsc/ConsoleWriter.scala b/src/compiler/scala/tools/nsc/ConsoleWriter.scala
index c06db8f..5c5606e 100644
--- a/src/compiler/scala/tools/nsc/ConsoleWriter.scala
+++ b/src/compiler/scala/tools/nsc/ConsoleWriter.scala
@@ -1,5 +1,5 @@
/* NSC -- new Scala compiler
- * Copyright 2006-2011 LAMP/EPFL
+ * Copyright 2006-2013 LAMP/EPFL
* @author Martin Odersky
*/
diff --git a/src/compiler/scala/tools/nsc/Driver.scala b/src/compiler/scala/tools/nsc/Driver.scala
new file mode 100644
index 0000000..814bd58
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/Driver.scala
@@ -0,0 +1,71 @@
+package scala.tools.nsc
+
+import scala.tools.nsc.reporters.{Reporter, ConsoleReporter}
+import Properties.{ versionString, copyrightString, residentPromptString }
+import scala.reflect.internal.util.{ BatchSourceFile, FakePos }
+
+abstract class Driver {
+
+ val prompt = residentPromptString
+
+ val versionMsg = "Scala compiler " +
+ versionString + " -- " +
+ copyrightString
+
+ var reporter: ConsoleReporter = _
+ protected var command: CompilerCommand = _
+ protected var settings: Settings = _
+
+ protected def scalacError(msg: String) {
+ reporter.error(FakePos("scalac"), msg + "\n scalac -help gives more information")
+ }
+
+ protected def processSettingsHook(): Boolean = true
+
+ protected def newCompiler(): Global
+
+ protected def doCompile(compiler: Global) {
+ if (command.files.isEmpty) {
+ reporter.echo(command.usageMsg)
+ reporter.echo(compiler.pluginOptionsHelp)
+ } else {
+ val run = new compiler.Run()
+ run compile command.files
+ reporter.printSummary()
+ }
+ }
+
+ def process(args: Array[String]) {
+ val ss = new Settings(scalacError)
+ reporter = new ConsoleReporter(ss)
+ command = new CompilerCommand(args.toList, ss)
+ settings = command.settings
+
+ if (settings.version.value) {
+ reporter.echo(versionMsg)
+ } else if (processSettingsHook()) {
+ val compiler = newCompiler()
+ try {
+ if (reporter.hasErrors)
+ reporter.flush()
+ else if (command.shouldStopWithInfo)
+ reporter.echo(command.getInfoMessage(compiler))
+ else
+ doCompile(compiler)
+ } catch {
+ case ex: Throwable =>
+ compiler.reportThrowable(ex)
+ ex match {
+ case FatalError(msg) => // signals that we should fail compilation.
+ case _ => throw ex // unexpected error, tell the outside world.
+ }
+ }
+ }
+ }
+
+ def main(args: Array[String]) {
+ process(args)
+ sys.exit(if (reporter.hasErrors) 1 else 0)
+ }
+
+}
\ No newline at end of file
diff --git a/src/compiler/scala/tools/nsc/EvalLoop.scala b/src/compiler/scala/tools/nsc/EvalLoop.scala
index 5cbd798..c4147fa 100644
--- a/src/compiler/scala/tools/nsc/EvalLoop.scala
+++ b/src/compiler/scala/tools/nsc/EvalLoop.scala
@@ -1,11 +1,11 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
+ * Copyright 2005-2013 LAMP/EPFL
* @author Martin Odersky
*/
package scala.tools.nsc
-import annotation.tailrec
+import scala.annotation.tailrec
import java.io.EOFException
trait EvalLoop {
diff --git a/src/compiler/scala/tools/nsc/FatalError.scala b/src/compiler/scala/tools/nsc/FatalError.scala
deleted file mode 100644
index a18c98f..0000000
--- a/src/compiler/scala/tools/nsc/FatalError.scala
+++ /dev/null
@@ -1,19 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
- * @author Martin Odersky
- */
-
-package scala.tools.nsc
-
-import scala.util.control.ControlThrowable
-
-case class FatalError(msg: String) extends Throwable(msg)
-
-class MissingRequirementError(val req: String) extends FatalError(req + " not found.")
-
-object MissingRequirementError {
- def unapply(x: Throwable) = x match {
- case x: MissingRequirementError => Some(x.req)
- case _ => None
- }
-}
diff --git a/src/compiler/scala/tools/nsc/GenericRunnerCommand.scala b/src/compiler/scala/tools/nsc/GenericRunnerCommand.scala
index 27596dc..c8fd598 100644
--- a/src/compiler/scala/tools/nsc/GenericRunnerCommand.scala
+++ b/src/compiler/scala/tools/nsc/GenericRunnerCommand.scala
@@ -1,5 +1,5 @@
/* NSC -- new Scala compiler
- * Copyright 2007 LAMP/EPFL
+ * Copyright 2007-2013 LAMP/EPFL
* @author Lex Spoon
*/
@@ -35,7 +35,10 @@ extends CompilerCommand(args, settings) {
else {
val f = io.File(target)
if (!f.hasExtension("class", "jar", "zip") && f.canRead) AsScript
- else sys.error("Cannot figure out how to run target: " + target)
+ else {
+ Console.err.println("No such file or class on classpath: " + target)
+ Error
+ }
}
}
/** String with either the jar file, class name, or script file name. */
diff --git a/src/compiler/scala/tools/nsc/GenericRunnerSettings.scala b/src/compiler/scala/tools/nsc/GenericRunnerSettings.scala
index b9e9a14..9c2db11 100644
--- a/src/compiler/scala/tools/nsc/GenericRunnerSettings.scala
+++ b/src/compiler/scala/tools/nsc/GenericRunnerSettings.scala
@@ -1,5 +1,5 @@
/* NSC -- new Scala compiler
- * Copyright 2006-2011 LAMP/EPFL
+ * Copyright 2006-2013 LAMP/EPFL
* @author Lex Spoon
*/
@@ -8,7 +8,7 @@ package scala.tools.nsc
import scala.tools.util.PathResolver
class GenericRunnerSettings(error: String => Unit) extends Settings(error) {
- def classpathURLs = new PathResolver(this) asURLs
+ def classpathURLs = new PathResolver(this).asURLs
val howtorun =
ChoiceSetting(
diff --git a/src/compiler/scala/tools/nsc/Global.scala b/src/compiler/scala/tools/nsc/Global.scala
index 4f56a95..aea3e0d 100644
--- a/src/compiler/scala/tools/nsc/Global.scala
+++ b/src/compiler/scala/tools/nsc/Global.scala
@@ -1,5 +1,5 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
+ * Copyright 2005-2013 LAMP/EPFL
* @author Martin Odersky
*/
@@ -7,16 +7,15 @@ package scala.tools.nsc
import java.io.{ File, FileOutputStream, PrintWriter, IOException, FileNotFoundException }
import java.nio.charset.{ Charset, CharsetDecoder, IllegalCharsetNameException, UnsupportedCharsetException }
-import compat.Platform.currentTime
-
-import scala.tools.util.Profiling
+import scala.compat.Platform.currentTime
+import scala.tools.util.PathResolver
import scala.collection.{ mutable, immutable }
import io.{ SourceReader, AbstractFile, Path }
import reporters.{ Reporter, ConsoleReporter }
-import util.{ Exceptional, ClassPath, SourceFile, Statistics, BatchSourceFile, ScriptSourceFile, ShowPickled, returning }
-import reflect.generic.{ PickleBuffer, PickleFormat }
+import util.{ Exceptional, ClassPath, MergedClassPath, StatisticsInfo, ScalaClassLoader, returning }
+import scala.reflect.internal.util.{ NoPosition, OffsetPosition, SourceFile, NoSourceFile, BatchSourceFile, ScriptSourceFile }
+import scala.reflect.internal.pickling.{ PickleBuffer, PickleFormat }
import settings.{ AestheticSettings }
-
import symtab.{ Flags, SymbolTable, SymbolLoaders, SymbolTrackers }
import symtab.classfile.Pickler
import dependencies.DependencyAnalysis
@@ -24,48 +23,99 @@ import plugins.Plugins
import ast._
import ast.parser._
import typechecker._
+import transform.patmat.PatternMatching
import transform._
-
import backend.icode.{ ICodes, GenICode, ICodeCheckers }
import backend.{ ScalaPrimitives, Platform, MSILPlatform, JavaPlatform }
-import backend.jvm.GenJVM
-import backend.opt.{ Inliners, ClosureElimination, DeadCodeElimination }
+import backend.jvm.{GenJVM, GenASM}
+import backend.opt.{ Inliners, InlineExceptionHandlers, ClosureElimination, DeadCodeElimination }
import backend.icode.analysis._
+import scala.language.postfixOps
+import scala.reflect.internal.StdAttachments
+import scala.reflect.ClassTag
+
+class Global(var currentSettings: Settings, var reporter: Reporter)
+ extends SymbolTable
+ with CompilationUnits
+ with Plugins
+ with PhaseAssembly
+ with Trees
+ with Printers
+ with DocComments
+ with Positions { self =>
+
+ // the mirror --------------------------------------------------
+
+ override def isCompilerUniverse = true
+
+ class GlobalMirror extends Roots(NoSymbol) {
+ val universe: self.type = self
+ def rootLoader: LazyType = platform.rootLoader
+ override def toString = "compiler mirror"
+ }
+
+ lazy val rootMirror: Mirror = {
+ val rm = new GlobalMirror
+ rm.init()
+ rm.asInstanceOf[Mirror]
+ }
+ def RootClass: ClassSymbol = rootMirror.RootClass
+ def EmptyPackageClass: ClassSymbol = rootMirror.EmptyPackageClass
+
+ import definitions.findNamedMember
+ def findMemberFromRoot(fullName: Name): Symbol = rootMirror.findMemberFromRoot(fullName)
-class Global(var settings: Settings, var reporter: Reporter) extends SymbolTable
- with CompilationUnits
- with Plugins
- with PhaseAssembly
-{
// alternate constructors ------------------------------------------
+ override def settings = currentSettings
+
def this(reporter: Reporter) =
this(new Settings(err => reporter.error(null, err)), reporter)
def this(settings: Settings) =
this(settings, new ConsoleReporter(settings))
+ def mkAttributedQualifier(tpe: Type, termSym: Symbol): Tree = gen.mkAttributedQualifier(tpe, termSym)
+
+ def picklerPhase: Phase = if (currentRun.isDefined) currentRun.picklerPhase else NoPhase
+
// platform specific elements
- type ThisPlatform = Platform[_] { val global: Global.this.type }
+ type ThisPlatform = Platform { val global: Global.this.type }
lazy val platform: ThisPlatform =
if (forMSIL) new { val global: Global.this.type = Global.this } with MSILPlatform
else new { val global: Global.this.type = Global.this } with JavaPlatform
- def classPath: ClassPath[_] = platform.classPath
- def rootLoader: LazyType = platform.rootLoader
+ type PlatformClassPath = ClassPath[platform.BinaryRepr]
+ type OptClassPath = Option[PlatformClassPath]
+
+ def classPath: PlatformClassPath = platform.classPath
// sub-components --------------------------------------------------
/** Generate ASTs */
- object gen extends {
+ type TreeGen = scala.tools.nsc.ast.TreeGen
+
+ /** Tree generation, usually based on existing symbols. */
+ override object gen extends {
val global: Global.this.type = Global.this
} with TreeGen {
def mkAttributedCast(tree: Tree, pt: Type): Tree =
typer.typed(mkCast(tree, pt))
}
+ /** Trees fresh from the oven, mostly for use by the parser. */
+ object treeBuilder extends {
+ val global: Global.this.type = Global.this
+ } with TreeBuilder {
+ def freshName(prefix: String): Name = freshTermName(prefix)
+ def freshTermName(prefix: String): TermName = currentUnit.freshTermName(prefix)
+ def freshTypeName(prefix: String): TypeName = currentUnit.freshTypeName(prefix)
+ def o2p(offset: Int): Position = new OffsetPosition(currentUnit.source, offset)
+ def r2p(start: Int, mid: Int, end: Int): Position = rangePos(currentUnit.source, start, mid, end)
+ }
+
/** Fold constants */
object constfold extends {
val global: Global.this.type = Global.this
@@ -103,13 +153,44 @@ class Global(var settings: Settings, var reporter: Reporter) extends SymbolTable
/** Some statistics (normally disabled) set with -Ystatistics */
object statistics extends {
val global: Global.this.type = Global.this
- } with Statistics
+ } with StatisticsInfo
/** Print tree in detailed form */
object nodePrinters extends {
val global: Global.this.type = Global.this
} with NodePrinters {
+ var lastPrintedPhase: Phase = NoPhase
+ var lastPrintedSource: String = ""
infolevel = InfoLevel.Verbose
+
+ def showUnit(unit: CompilationUnit) {
+ print(" // " + unit.source)
+ if (unit.body == null) println(": tree is null")
+ else {
+ val source = util.stringFromWriter(w => newTreePrinter(w) print unit.body)
+
+ // treePrinter show unit.body
+ if (lastPrintedSource == source)
+ println(": tree is unchanged since " + lastPrintedPhase)
+ else {
+ lastPrintedPhase = phase.prev // since we're running inside "afterPhase"
+ lastPrintedSource = source
+ println("")
+ println(source)
+ println("")
+ }
+ }
+ }
+ }
+
+ def withInfoLevel[T](infolevel: nodePrinters.InfoLevel.Value)(op: => T) = {
+ val saved = nodePrinters.infolevel
+ try {
+ nodePrinters.infolevel = infolevel
+ op
+ } finally {
+ nodePrinters.infolevel = saved
+ }
}
/** Representing ASTs as graphs */
@@ -131,7 +212,9 @@ class Global(var settings: Settings, var reporter: Reporter) extends SymbolTable
/** Register new context; called for every created context
*/
- def registerContext(c: analyzer.Context) {}
+ def registerContext(c: analyzer.Context) {
+ lastSeenContext = c
+ }
/** Register top level class (called on entering the class)
*/
@@ -141,13 +224,47 @@ class Global(var settings: Settings, var reporter: Reporter) extends SymbolTable
// not deprecated yet, but a method called "error" imported into
// nearly every trait really must go. For now using globalError.
- def error(msg: String) = globalError(msg)
- def globalError(msg: String) = reporter.error(NoPosition, msg)
- def inform(msg: String) = reporter.info(NoPosition, msg, true)
- def warning(msg: String) =
- if (opt.fatalWarnings) globalError(msg)
+ def error(msg: String) = globalError(msg)
+ def inform(msg: String) = reporter.echo(msg)
+ override def globalError(msg: String) = reporter.error(NoPosition, msg)
+ override def warning(msg: String) =
+ if (settings.fatalWarnings.value) globalError(msg)
else reporter.warning(NoPosition, msg)
+ // Getting in front of Predef's asserts to supplement with more info.
+ // This has the happy side effect of masking the one argument forms
+ // of assert and require (but for now I've reproduced them here,
+ // because there are a million to fix.)
+ @inline final def assert(assertion: Boolean, message: => Any) {
+ Predef.assert(assertion, supplementErrorMessage("" + message))
+ }
+ @inline final def assert(assertion: Boolean) {
+ assert(assertion, "")
+ }
+ @inline final def require(requirement: Boolean, message: => Any) {
+ Predef.require(requirement, supplementErrorMessage("" + message))
+ }
+ @inline final def require(requirement: Boolean) {
+ require(requirement, "")
+ }
+
+ // Needs to call error to make sure the compile fails.
+ override def abort(msg: String): Nothing = {
+ error(msg)
+ super.abort(msg)
+ }
+
+ @inline final def ifDebug(body: => Unit) {
+ if (settings.debug.value)
+ body
+ }
+ // Warnings issued only under -Ydebug. For messages which should reach
+ // developer ears, but are not adequately actionable by users.
+ @inline final override def debugwarn(msg: => String) {
+ if (settings.debug.value)
+ warning(msg)
+ }
+
private def elapsedMessage(msg: String, start: Long) =
msg + " in " + (currentTime - start) + "ms"
@@ -157,15 +274,29 @@ class Global(var settings: Settings, var reporter: Reporter) extends SymbolTable
def informTime(msg: String, start: Long) = informProgress(elapsedMessage(msg, start))
def logError(msg: String, t: Throwable): Unit = ()
+
+ def logAfterEveryPhase[T](msg: String)(op: => T) {
+ log("Running operation '%s' after every phase.\n".format(msg) + describeAfterEveryPhase(op))
+ }
+
+ override def shouldLogAtThisPhase = settings.log.isSetByUser && (
+ (settings.log containsPhase globalPhase) || (settings.log containsPhase phase)
+ )
// Over 200 closure objects are eliminated by inlining this.
- @inline final def log(msg: => AnyRef): Unit =
- if (settings.log containsPhase globalPhase)
- inform("[log " + phase + "] " + msg)
+ @inline final def log(msg: => AnyRef) {
+ if (shouldLogAtThisPhase)
+ inform("[log %s%s] %s".format(globalPhase, atPhaseStackMessage, msg))
+ }
- def logThrowable(t: Throwable): Unit = globalError(throwableAsString(t))
- def throwableAsString(t: Throwable): String =
- if (opt.richExes) Exceptional(t).force().context()
- else util.stackTraceString(t)
+ @inline final override def debuglog(msg: => String) {
+ if (settings.debug.value)
+ log(msg)
+ }
+
+ @deprecated("Renamed to reportThrowable", "2.10.1")
+ def logThrowable(t: Throwable): Unit = reportThrowable(t)
+ def reportThrowable(t: Throwable): Unit = globalError(throwableAsString(t))
+ override def throwableAsString(t: Throwable) = util.stackTraceString(t)
// ------------ File interface -----------------------------------------
@@ -193,7 +324,7 @@ class Global(var settings: Settings, var reporter: Reporter) extends SymbolTable
def ccon = Class.forName(name).getConstructor(classOf[CharsetDecoder], classOf[Reporter])
try Some(ccon.newInstance(charset.newDecoder(), reporter).asInstanceOf[SourceReader])
- catch { case x =>
+ catch { case ex: Throwable =>
globalError("exception while trying to instantiate source reader '" + name + "'")
None
}
@@ -232,10 +363,14 @@ class Global(var settings: Settings, var reporter: Reporter) extends SymbolTable
}
}
+ // behavior
+
// debugging
def checkPhase = wasActive(settings.check)
def logPhase = isActive(settings.log)
- def writeICode = settings.writeICode.value
+
+ // Write *.icode files right after GenICode when -Xprint-icode was given.
+ def writeICodeAtICode = settings.writeICode.isSetByUser && isActive(settings.writeICode)
// showing/printing things
def browsePhase = isActive(settings.browse)
@@ -246,23 +381,19 @@ class Global(var settings: Settings, var reporter: Reporter) extends SymbolTable
def showNames = List(showClass, showObject).flatten
def showPhase = isActive(settings.Yshow)
def showSymbols = settings.Yshowsyms.value
- def showTrees = settings.Xshowtrees.value
+ def showTrees = settings.Xshowtrees.value || settings.XshowtreesCompact.value || settings.XshowtreesStringified.value
val showClass = optSetting[String](settings.Xshowcls) map (x => splitClassAndPhase(x, false))
val showObject = optSetting[String](settings.Xshowobj) map (x => splitClassAndPhase(x, true))
-
- // profiling
- def profCPUPhase = isActive(settings.Yprofile) && !profileAll
- def profileAll = settings.Yprofile.doAllPhases
- def profileAny = !settings.Yprofile.isDefault || !settings.YprofileMem.isDefault
- def profileClass = settings.YprofileClass.value
- def profileMem = settings.YprofileMem.value
-
- // shortish-term property based options
- def timings = sys.props contains "scala.timings"
- def inferDebug = (sys.props contains "scalac.debug.infer") || settings.Yinferdebug.value
- def typerDebug = (sys.props contains "scalac.debug.typer") || settings.Ytyperdebug.value
}
+ // The current division between scala.reflect.* and scala.tools.nsc.* is pretty
+ // clunky. It is often difficult to have a setting influence something without having
+ // to create it on that side. For this one my strategy is a constant def at the file
+ // where I need it, and then an override in Global with the setting.
+ override protected val etaExpandKeepsStar = settings.etaExpandKeepsStar.value
+ // Here comes another one...
+ override protected val enableTypeVarExperimentals = settings.Xexperimental.value
+
// True if -Xscript has been set, indicating a script run.
def isScriptRun = opt.script.isDefined
@@ -281,6 +412,9 @@ class Global(var settings: Settings, var reporter: Reporter) extends SymbolTable
val global: Global.this.type = Global.this
}
+ /** Returns the mirror that loaded given symbol */
+ def mirrorThatLoaded(sym: Symbol): Mirror = rootMirror
+
// ------------ Phases -------------------------------------------}
var globalPhase: Phase = NoPhase
@@ -303,8 +437,6 @@ class Global(var settings: Settings, var reporter: Reporter) extends SymbolTable
override def erasedTypes: Boolean = isErased
private val isFlat = prev.name == "flatten" || prev.flatClasses
override def flatClasses: Boolean = isFlat
- // private val isDevirtualized = prev.name == "devirtualize" || prev.devirtualized
- // override def devirtualized: Boolean = isDevirtualized // (part of DEVIRTUALIZE)
private val isSpecialized = prev.name == "specialize" || prev.specialized
override def specialized: Boolean = isSpecialized
private val isRefChecked = prev.name == "refchecks" || prev.refChecked
@@ -318,10 +450,13 @@ class Global(var settings: Settings, var reporter: Reporter) extends SymbolTable
}
final def applyPhase(unit: CompilationUnit) {
+ if ((unit ne null) && unit.exists)
+ lastSeenSourceFile = unit.source
+
if (opt.echoFilenames)
inform("[running phase " + name + " on " + unit + "]")
- val unit0 = currentRun.currentUnit
+ val unit0 = currentUnit
try {
currentRun.currentUnit = unit
if (!cancelled(unit)) {
@@ -330,15 +465,15 @@ class Global(var settings: Settings, var reporter: Reporter) extends SymbolTable
}
currentRun.advanceUnit
} finally {
- //assert(currentRun.currentUnit == unit)
+ //assert(currentUnit == unit)
currentRun.currentUnit = unit0
}
}
}
/** Switch to turn on detailed type logs */
- var printTypings = opt.typerDebug
- var printInfers = opt.inferDebug
+ var printTypings = settings.Ytyperdebug.value
+ var printInfers = settings.Yinferdebug.value
// phaseName = "parser"
object syntaxAnalyzer extends {
@@ -347,152 +482,188 @@ class Global(var settings: Settings, var reporter: Reporter) extends SymbolTable
val runsRightAfter = None
} with SyntaxAnalyzer
+ // !!! I think we're overdue for all these phase objects being lazy vals.
+ // There's no way for a Global subclass to provide a custom typer
+ // despite the existence of a "def newTyper(context: Context): Typer"
+ // which is clearly designed for that, because it's defined in
+ // Analyzer and Global's "object analyzer" allows no override. For now
+ // I only changed analyzer.
+ //
// factory for phases: namer, packageobjects, typer
- object analyzer extends {
+ lazy val analyzer = new {
val global: Global.this.type = Global.this
} with Analyzer
+ // phaseName = "patmat"
+ object patmat extends {
+ val global: Global.this.type = Global.this
+ val runsAfter = List("typer")
+ // patmat doesn't need to be right after typer, as long as we run before supperaccesors
+ // (sbt does need to run right after typer, so don't conflict)
+ val runsRightAfter = None
+ } with PatternMatching
+
// phaseName = "superaccessors"
object superAccessors extends {
val global: Global.this.type = Global.this
- val runsAfter = List[String]("typer")
+ val runsAfter = List("patmat")
val runsRightAfter = None
} with SuperAccessors
+ // phaseName = "extmethods"
+ object extensionMethods extends {
+ val global: Global.this.type = Global.this
+ val runsAfter = List("superaccessors")
+ val runsRightAfter = None
+ } with ExtensionMethods
+
// phaseName = "pickler"
object pickler extends {
val global: Global.this.type = Global.this
- val runsAfter = List[String]("superaccessors")
+ val runsAfter = List("extmethods")
val runsRightAfter = None
} with Pickler
// phaseName = "refchecks"
- object refchecks extends {
+ override object refChecks extends {
val global: Global.this.type = Global.this
- val runsAfter = List[String]("pickler")
+ val runsAfter = List("pickler")
val runsRightAfter = None
} with RefChecks
- // phaseName = "liftcode"
- object liftcode extends {
- val global: Global.this.type = Global.this
- val runsAfter = List[String]("refchecks")
- val runsRightAfter = None
- } with LiftCode
-
// phaseName = "uncurry"
- object uncurry extends {
+ override object uncurry extends {
val global: Global.this.type = Global.this
- val runsAfter = List[String]("refchecks", "liftcode")
+ val runsAfter = List("refchecks")
val runsRightAfter = None
} with UnCurry
// phaseName = "tailcalls"
object tailCalls extends {
val global: Global.this.type = Global.this
- val runsAfter = List[String]("uncurry")
+ val runsAfter = List("uncurry")
val runsRightAfter = None
} with TailCalls
// phaseName = "explicitouter"
object explicitOuter extends {
val global: Global.this.type = Global.this
- val runsAfter = List[String]("tailcalls")
+ val runsAfter = List("tailcalls")
val runsRightAfter = None
} with ExplicitOuter
// phaseName = "specialize"
object specializeTypes extends {
val global: Global.this.type = Global.this
- val runsAfter = List[String]("")
+ val runsAfter = List("")
val runsRightAfter = Some("tailcalls")
} with SpecializeTypes
// phaseName = "erasure"
- object erasure extends {
+ override object erasure extends {
val global: Global.this.type = Global.this
- val runsAfter = List[String]("explicitouter")
+ val runsAfter = List("explicitouter")
val runsRightAfter = Some("explicitouter")
} with Erasure
+ // phaseName = "posterasure"
+ object postErasure extends {
+ val global: Global.this.type = Global.this
+ val runsAfter = List("erasure")
+ val runsRightAfter = Some("erasure")
+ } with PostErasure
+
// phaseName = "lazyvals"
object lazyVals extends {
- final val FLAGS_PER_WORD = 32
val global: Global.this.type = Global.this
- val runsAfter = List[String]("erasure")
+ val runsAfter = List("erasure")
val runsRightAfter = None
} with LazyVals
// phaseName = "lambdalift"
object lambdaLift extends {
val global: Global.this.type = Global.this
- val runsAfter = List[String]("lazyvals")
+ val runsAfter = List("lazyvals")
val runsRightAfter = None
} with LambdaLift
// phaseName = "constructors"
object constructors extends {
val global: Global.this.type = Global.this
- val runsAfter = List[String]("lambdalift")
+ val runsAfter = List("lambdalift")
val runsRightAfter = None
} with Constructors
// phaseName = "flatten"
object flatten extends {
val global: Global.this.type = Global.this
- val runsAfter = List[String]("constructors")
+ val runsAfter = List("constructors")
val runsRightAfter = None
} with Flatten
// phaseName = "mixin"
object mixer extends {
val global: Global.this.type = Global.this
- val runsAfter = List[String]("flatten", "constructors")
+ val runsAfter = List("flatten", "constructors")
val runsRightAfter = None
} with Mixin
// phaseName = "cleanup"
object cleanup extends {
val global: Global.this.type = Global.this
- val runsAfter = List[String]("mixin")
+ val runsAfter = List("mixin")
val runsRightAfter = None
} with CleanUp
// phaseName = "icode"
object genicode extends {
val global: Global.this.type = Global.this
- val runsAfter = List[String]("cleanup")
+ val runsAfter = List("cleanup")
val runsRightAfter = None
} with GenICode
// phaseName = "inliner"
object inliner extends {
val global: Global.this.type = Global.this
- val runsAfter = List[String]("icode")
+ val runsAfter = List("icode")
val runsRightAfter = None
} with Inliners
+ // phaseName = "inlineExceptionHandlers"
+ object inlineExceptionHandlers extends {
+ val global: Global.this.type = Global.this
+ val runsAfter = List("inliner")
+ val runsRightAfter = None
+ } with InlineExceptionHandlers
+
// phaseName = "closelim"
object closureElimination extends {
val global: Global.this.type = Global.this
- val runsAfter = List[String]("inliner")
+ val runsAfter = List("inlineExceptionHandlers")
val runsRightAfter = None
} with ClosureElimination
// phaseName = "dce"
object deadCode extends {
val global: Global.this.type = Global.this
- val runsAfter = List[String]("closelim")
+ val runsAfter = List("closelim")
val runsRightAfter = None
} with DeadCodeElimination
- // phaseName = "jvm"
+ // phaseName = "jvm", FJBG-based version
object genJVM extends {
val global: Global.this.type = Global.this
- val runsAfter = List[String]("dce")
+ val runsAfter = List("dce")
val runsRightAfter = None
} with GenJVM
+ // phaseName = "jvm", ASM-based version
+ object genASM extends {
+ val global: Global.this.type = Global.this
+ val runsAfter = List("dce")
+ val runsRightAfter = None
+ } with GenASM
+
// This phase is optional: only added if settings.make option is given.
// phaseName = "dependencyAnalysis"
object dependencyAnalysis extends {
@@ -505,7 +676,7 @@ class Global(var settings: Settings, var reporter: Reporter) extends SymbolTable
object terminal extends {
val global: Global.this.type = Global.this
val phaseName = "terminal"
- val runsAfter = List[String]("jvm", "msil")
+ val runsAfter = List("jvm", "msil")
val runsRightAfter = None
} with SubComponent {
private var cache: Option[GlobalPhase] = None
@@ -544,7 +715,7 @@ class Global(var settings: Settings, var reporter: Reporter) extends SymbolTable
object icodeChecker extends icodeCheckers.ICodeChecker()
object typer extends analyzer.Typer(
- analyzer.NoContext.make(EmptyTree, Global.this.definitions.RootClass, new Scope)
+ analyzer.NoContext.make(EmptyTree, RootClass, newScope)
)
/** Add the internal compiler phases to the phases set.
@@ -558,14 +729,17 @@ class Global(var settings: Settings, var reporter: Reporter) extends SymbolTable
analyzer.namerFactory -> "resolve names, attach symbols to named trees",
analyzer.packageObjects -> "load package objects",
analyzer.typerFactory -> "the meat and potatoes: type the trees",
+ patmat -> "translate match expressions",
superAccessors -> "add super accessors in traits and nested classes",
+ extensionMethods -> "add extension methods for inline classes",
pickler -> "serialize symbol tables",
- refchecks -> "reference/override checking, translate nested objects",
+ refChecks -> "reference/override checking, translate nested objects",
uncurry -> "uncurry, translate function values to anonymous classes",
tailCalls -> "replace tail calls by jumps",
specializeTypes -> "@specialized-driven class and method specialization",
explicitOuter -> "this refs to outer pointers, translate patterns",
erasure -> "erase types, add interfaces for traits",
+ postErasure -> "clean up erased inline classes",
lazyVals -> "allocate bitmaps, translate lazy vals into lazified defs",
lambdaLift -> "move nested functions to top level",
constructors -> "move field definitions into constructors",
@@ -573,6 +747,7 @@ class Global(var settings: Settings, var reporter: Reporter) extends SymbolTable
cleanup -> "platform-specific cleanups, generate reflective calls",
genicode -> "generate portable intermediate code",
inliner -> "optimization: do inlining",
+ inlineExceptionHandlers -> "optimization: inline exception handlers",
closureElimination -> "optimization: eliminate uncalled closures",
deadCode -> "optimization: eliminate dead code",
terminal -> "The last phase in the compiler chain"
@@ -584,7 +759,6 @@ class Global(var settings: Settings, var reporter: Reporter) extends SymbolTable
// and attractive -Xshow-phases output is unlikely if the descs span 20 files anyway.
private val otherPhaseDescriptions = Map(
"flatten" -> "eliminate inner classes",
- "liftcode" -> "reify trees",
"jvm" -> "generate JVM bytecode"
) withDefaultValue ""
@@ -606,7 +780,7 @@ class Global(var settings: Settings, var reporter: Reporter) extends SymbolTable
/* The set of phase objects that is the basis for the compiler phase chain */
protected lazy val phasesSet = new mutable.HashSet[SubComponent]
protected lazy val phasesDescMap = new mutable.HashMap[SubComponent, String] withDefaultValue ""
- private lazy val phaseTimings = new Phase.TimingModel // tracking phase stats
+
protected def addToPhasesSet(sub: SubComponent, descr: String) {
phasesSet += sub
phasesDescMap(sub) = descr
@@ -630,12 +804,266 @@ class Global(var settings: Settings, var reporter: Reporter) extends SymbolTable
}
line1 :: line2 :: descs mkString
}
+ /** Summary of the per-phase values of nextFlags and newFlags, shown
+ * with -Xshow-phases if -Ydebug also given.
+ */
+ def phaseFlagDescriptions: String = {
+ val width = phaseNames map (_.length) max
+ val fmt = "%" + width + "s %2s %s\n"
+
+ val line1 = fmt.format("phase name", "id", "new flags")
+ val line2 = fmt.format("----------", "--", "---------")
+ val descs = phaseDescriptors.zipWithIndex map {
+ case (ph, idx) =>
+ def fstr1 = if (ph.phaseNewFlags == 0L) "" else "[START] " + Flags.flagsToString(ph.phaseNewFlags)
+ def fstr2 = if (ph.phaseNextFlags == 0L) "" else "[END] " + Flags.flagsToString(ph.phaseNextFlags)
+ val fstr = (
+ if (ph.ownPhase.id == 1) Flags.flagsToString(Flags.InitialFlags)
+ else if (ph.phaseNewFlags != 0L && ph.phaseNextFlags != 0L) fstr1 + " " + fstr2
+ else fstr1 + fstr2
+ )
+ fmt.format(ph.phaseName, idx + 1, fstr)
+ }
+ line1 :: line2 :: descs mkString
+ }
+
+ /** Returns List of (phase, value) pairs, including only those
+ * where the value compares unequal to the previous phase's value.
+ */
+ def afterEachPhase[T](op: => T): List[(Phase, T)] = {
+ phaseDescriptors.map(_.ownPhase).filterNot(_ eq NoPhase).foldLeft(List[(Phase, T)]()) { (res, ph) =>
+ val value = afterPhase(ph)(op)
+ if (res.nonEmpty && res.head._2 == value) res
+ else ((ph, value)) :: res
+ } reverse
+ }
+
+ /** Returns List of ChangeAfterPhase objects, encapsulating those
+ * phase transitions where the result of the operation gave a different
+ * list than it had when run during the previous phase.
+ */
+ def changesAfterEachPhase[T](op: => List[T]): List[ChangeAfterPhase[T]] = {
+ val ops = ((NoPhase, Nil)) :: afterEachPhase(op)
+
+ ops sliding 2 map {
+ case (_, before) :: (ph, after) :: Nil =>
+ val lost = before filterNot (after contains _)
+ val gained = after filterNot (before contains _)
+ ChangeAfterPhase(ph, lost, gained)
+ case _ => ???
+ } toList
+ }
+ private def numberedPhase(ph: Phase) = "%2d/%s".format(ph.id, ph.name)
+
+ case class ChangeAfterPhase[+T](ph: Phase, lost: List[T], gained: List[T]) {
+ private def mkStr(what: String, xs: List[_]) = (
+ if (xs.isEmpty) ""
+ else xs.mkString(what + " after " + numberedPhase(ph) + " {\n ", "\n ", "\n}\n")
+ )
+ override def toString = mkStr("Lost", lost) + mkStr("Gained", gained)
+ }
+
+ def describeAfterEachPhase[T](op: => T): List[String] =
+ afterEachPhase(op) map { case (ph, t) => "[after %-15s] %s".format(numberedPhase(ph), t) }
+
+ def describeAfterEveryPhase[T](op: => T): String =
+ describeAfterEachPhase(op) map (" " + _ + "\n") mkString
+
+ def printAfterEachPhase[T](op: => T): Unit =
+ describeAfterEachPhase(op) foreach (m => println(" " + m))
+
+ // ------------ Invalidations ---------------------------------
+
+ /** Is given package class a system package class that cannot be invalidated?
+ */
+ private def isSystemPackageClass(pkg: Symbol) =
+ pkg == RootClass ||
+ pkg == definitions.ScalaPackageClass || {
+ val pkgname = pkg.fullName
+ (pkgname startsWith "scala.") && !(pkgname startsWith "scala.tools")
+ }
+
+ /** Invalidates packages that contain classes defined in a classpath entry, and
+ * rescans that entry.
+ * @param path A fully qualified name that refers to a directory or jar file that's
+ * an entry on the classpath.
+ * First, causes the classpath entry referred to by `path` to be rescanned, so that
+ * any new files or deleted files or changes in subpackages are picked up.
+ * Second, invalidates any packages for which one of the following considitions is met:
+
+ * - the classpath entry contained during the last compilation run classfiles
+ * that represent a member in the package
+ * - the classpath entry now contains classfiles
+ * that represent a member in the package
+ * - the set of subpackages has changed.
+ *
+ * The invalidated packages are reset in their entirety; all member classes and member packages
+ * are re-accessed using the new classpath.
+ * Not invalidated are system packages that the compiler needs to access as parts
+ * of standard definitions. The criterion what is a system package is currently:
+ * any package rooted in "scala", with the exception of packages rooted in "scala.tools".
+ * This can be refined later.
+ * @return A pair consisting of
+ * - a list of invalidated packages
+ * - a list of of packages that should have been invalidated but were not because
+ * they are system packages.
+ */
+ def invalidateClassPathEntries(paths: String*): (List[ClassSymbol], List[ClassSymbol]) = {
+ val invalidated, failed = new mutable.ListBuffer[ClassSymbol]
+ classPath match {
+ case cp: MergedClassPath[_] =>
+ def assoc(path: String): List[(PlatformClassPath, PlatformClassPath)] = {
+ val dir = AbstractFile getDirectory path
+ val canonical = dir.canonicalPath
+ def matchesCanonical(e: ClassPath[_]) = e.origin match {
+ case Some(opath) =>
+ (AbstractFile getDirectory opath).canonicalPath == canonical
+ case None =>
+ false
+ }
+ cp.entries find matchesCanonical match {
+ case Some(oldEntry) =>
+ List(oldEntry -> cp.context.newClassPath(dir))
+ case None =>
+ println(s"canonical = $canonical, origins = ${cp.entries map (_.origin)}")
+ error(s"cannot invalidate: no entry named $path in classpath $classPath")
+ List()
+ }
+ }
+ val subst = Map(paths flatMap assoc: _*)
+ if (subst.nonEmpty) {
+ platform updateClassPath subst
+ informProgress(s"classpath updated on entries [${subst.keys mkString ","}]")
+ def mkClassPath(elems: Iterable[PlatformClassPath]): PlatformClassPath =
+ if (elems.size == 1) elems.head
+ else new MergedClassPath(elems, classPath.context)
+ val oldEntries = mkClassPath(subst.keys)
+ val newEntries = mkClassPath(subst.values)
+ reSync(RootClass, Some(classPath), Some(oldEntries), Some(newEntries), invalidated, failed)
+ }
+ }
+ def show(msg: String, syms: scala.collection.Traversable[Symbol]) =
+ if (syms.nonEmpty)
+ informProgress(s"$msg: ${syms map (_.fullName) mkString ","}")
+ show("invalidated packages", invalidated)
+ show("could not invalidate system packages", failed)
+ (invalidated.toList, failed.toList)
+ }
+
+ /** Re-syncs symbol table with classpath
+ * @param root The root symbol to be resynced (a package class)
+ * @param allEntries Optionally, the corresponding package in the complete current classPath
+ * @param oldEntries Optionally, the corresponding package in the old classPath entries
+ * @param newEntries Optionally, the corresponding package in the new classPath entries
+ * @param invalidated A listbuffer collecting the invalidated package classes
+ * @param failed A listbuffer collecting system package classes which could not be invalidated
+ * The resyncing strategy is determined by the absence or presence of classes and packages.
+ * If either oldEntries or newEntries contains classes, root is invalidated, provided a corresponding package
+ * exists in allEntries, or otherwise is removed.
+ * Otherwise, the action is determined by the following matrix, with columns:
+ *
+ * old new all sym action
+ * + + + + recurse into all child packages of old ++ new
+ * + - + + invalidate root
+ * + - - + remove root from its scope
+ * - + + + invalidate root
+ * - + + - create and enter root
+ * - - * * no action
+ *
+ * Here, old, new, all mean classpaths and sym means symboltable. + is presence of an
+ * entry in its column, - is absence, * is don't care.
+ *
+ * Note that new <= all and old <= sym, so the matrix above covers all possibilities.
+ */
+ private def reSync(root: ClassSymbol,
+ allEntries: OptClassPath, oldEntries: OptClassPath, newEntries: OptClassPath,
+ invalidated: mutable.ListBuffer[ClassSymbol], failed: mutable.ListBuffer[ClassSymbol]) {
+ ifDebug(informProgress(s"syncing $root, $oldEntries -> $newEntries"))
+
+ val getName: ClassPath[platform.BinaryRepr] => String = (_.name)
+ def hasClasses(cp: OptClassPath) = cp.isDefined && cp.get.classes.nonEmpty
+ def invalidateOrRemove(root: ClassSymbol) = {
+ allEntries match {
+ case Some(cp) => root setInfo new loaders.PackageLoader(cp)
+ case None => root.owner.info.decls unlink root.sourceModule
+ }
+ invalidated += root
+ }
+ def packageNames(cp: PlatformClassPath): Set[String] = cp.packages.toSet map getName
+ def subPackage(cp: PlatformClassPath, name: String): OptClassPath =
+ cp.packages find (cp1 => getName(cp1) == name)
+
+ val classesFound = hasClasses(oldEntries) || hasClasses(newEntries)
+ if (classesFound && !isSystemPackageClass(root)) {
+ invalidateOrRemove(root)
+ } else {
+ if (classesFound) {
+ if (root.isRoot) invalidateOrRemove(EmptyPackageClass)
+ else failed += root
+ }
+ (oldEntries, newEntries) match {
+ case (Some(oldcp) , Some(newcp)) =>
+ for (pstr <- packageNames(oldcp) ++ packageNames(newcp)) {
+ val pname = newTermName(pstr)
+ val pkg = (root.info decl pname) orElse {
+ // package was created by external agent, create symbol to track it
+ assert(!subPackage(oldcp, pstr).isDefined)
+ loaders.enterPackage(root, pstr, new loaders.PackageLoader(allEntries.get))
+ }
+ reSync(
+ pkg.moduleClass.asInstanceOf[ClassSymbol],
+ subPackage(allEntries.get, pstr), subPackage(oldcp, pstr), subPackage(newcp, pstr),
+ invalidated, failed)
+ }
+ case (Some(oldcp), None) =>
+ invalidateOrRemove(root)
+ case (None, Some(newcp)) =>
+ invalidateOrRemove(root)
+ case (None, None) =>
+ }
+ }
+ }
+
+ /** Invalidate contents of setting -Yinvalidate */
+ def doInvalidation() = settings.Yinvalidate.value match {
+ case "" =>
+ case entry => invalidateClassPathEntries(entry)
+ }
// ----------- Runs ---------------------------------------
private var curRun: Run = null
private var curRunId = 0
+ /** A hook that lets subclasses of `Global` define whether a package or class should be kept loaded for the
+ * next compiler run. If the parameter `sym` is a class or object, and `clearOnNextRun(sym)` returns `true`,
+ * then the symbol is unloaded and reset to its state before the last compiler run. If the parameter `sym` is
+ * a package, and clearOnNextRun(sym)` returns `true`, the package is recursively searched for
+ * classes to drop.
+ *
+ * Example: Let's say I want a compiler that drops all classes corresponding to the current project
+ * between runs. Then `keepForNextRun` of a toplevel class or object should return `true` if the
+ * class or object does not form part of the current project, `false` otherwise. For a package,
+ * clearOnNextRun should return `true` if no class in that package forms part of the current project,
+ * `false` otherwise.
+ *
+ * @param sym A class symbol, object symbol, package, or package class.
+ */
+ @deprecated("use invalidateClassPathEntries instead", "2.10.0")
+ def clearOnNextRun(sym: Symbol) = false
+ /* To try out clearOnNext run on the scala.tools.nsc project itself
+ * replace `false` above with the following code
+
+ settings.Xexperimental.value && { sym.isRoot || {
+ sym.fullName match {
+ case "scala" | "scala.tools" | "scala.tools.nsc" => true
+ case _ => sym.owner.fullName.startsWith("scala.tools.nsc")
+ }
+ }}
+
+ * Then, fsc -Xexperimental clears the nsc project between successive runs of `fsc`.
+ */
+
/** Remove the current run when not needed anymore. Used by the build
* manager to save on the memory foot print. The current run holds on
* to all compilation units, which in turn hold on to trees.
@@ -644,9 +1072,115 @@ class Global(var settings: Settings, var reporter: Reporter) extends SymbolTable
curRun = null
}
+ object typeDeconstruct extends {
+ val global: Global.this.type = Global.this
+ } with interpreter.StructuredTypeStrings
+
+ /** There are common error conditions where when the exception hits
+ * here, currentRun.currentUnit is null. This robs us of the knowledge
+ * of what file was being compiled when it broke. Since I really
+ * really want to know, this hack.
+ */
+ protected var lastSeenSourceFile: SourceFile = NoSourceFile
+
+ /** Let's share a lot more about why we crash all over the place.
+ * People will be very grateful.
+ */
+ protected var lastSeenContext: analyzer.Context = null
+
/** The currently active run
*/
- def currentRun: Run = curRun
+ def currentRun: Run = curRun
+ def currentUnit: CompilationUnit = if (currentRun eq null) NoCompilationUnit else currentRun.currentUnit
+ def currentSource: SourceFile = if (currentUnit.exists) currentUnit.source else lastSeenSourceFile
+
+ // TODO - trim these to the absolute minimum.
+ @inline final def afterErasure[T](op: => T): T = afterPhase(currentRun.erasurePhase)(op)
+ @inline final def afterPostErasure[T](op: => T): T = afterPhase(currentRun.posterasurePhase)(op)
+ @inline final def afterExplicitOuter[T](op: => T): T = afterPhase(currentRun.explicitouterPhase)(op)
+ @inline final def afterFlatten[T](op: => T): T = afterPhase(currentRun.flattenPhase)(op)
+ @inline final def afterIcode[T](op: => T): T = afterPhase(currentRun.icodePhase)(op)
+ @inline final def afterMixin[T](op: => T): T = afterPhase(currentRun.mixinPhase)(op)
+ @inline final def afterPickler[T](op: => T): T = afterPhase(currentRun.picklerPhase)(op)
+ @inline final def afterRefchecks[T](op: => T): T = afterPhase(currentRun.refchecksPhase)(op)
+ @inline final def afterSpecialize[T](op: => T): T = afterPhase(currentRun.specializePhase)(op)
+ @inline final def afterTyper[T](op: => T): T = afterPhase(currentRun.typerPhase)(op)
+ @inline final def afterUncurry[T](op: => T): T = afterPhase(currentRun.uncurryPhase)(op)
+ @inline final def beforeErasure[T](op: => T): T = beforePhase(currentRun.erasurePhase)(op)
+ @inline final def beforeExplicitOuter[T](op: => T): T = beforePhase(currentRun.explicitouterPhase)(op)
+ @inline final def beforeFlatten[T](op: => T): T = beforePhase(currentRun.flattenPhase)(op)
+ @inline final def beforeIcode[T](op: => T): T = beforePhase(currentRun.icodePhase)(op)
+ @inline final def beforeMixin[T](op: => T): T = beforePhase(currentRun.mixinPhase)(op)
+ @inline final def beforePickler[T](op: => T): T = beforePhase(currentRun.picklerPhase)(op)
+ @inline final def beforeRefchecks[T](op: => T): T = beforePhase(currentRun.refchecksPhase)(op)
+ @inline final def beforeSpecialize[T](op: => T): T = beforePhase(currentRun.specializePhase)(op)
+ @inline final def beforeTyper[T](op: => T): T = beforePhase(currentRun.typerPhase)(op)
+ @inline final def beforeUncurry[T](op: => T): T = beforePhase(currentRun.uncurryPhase)(op)
+
+ def explainContext(c: analyzer.Context): String = (
+ if (c == null) "" else (
+ """| context owners: %s
+ |
+ |Enclosing block or template:
+ |%s""".format(
+ c.owner.ownerChain.takeWhile(!_.isPackageClass).mkString(" -> "),
+ nodePrinters.nodeToString(c.enclClassOrMethod.tree)
+ )
+ )
+ )
+ // Owners up to and including the first package class.
+ private def ownerChainString(sym: Symbol): String = (
+ if (sym == null) ""
+ else sym.ownerChain.span(!_.isPackageClass) match {
+ case (xs, pkg :: _) => (xs :+ pkg) mkString " -> "
+ case _ => sym.ownerChain mkString " -> " // unlikely
+ }
+ )
+ private def formatExplain(pairs: (String, Any)*): String = (
+ pairs.toList collect { case (k, v) if v != null => "%20s: %s".format(k, v) } mkString "\n"
+ )
+
+ def explainTree(t: Tree): String = formatExplain(
+ )
+
+ /** Don't want to introduce new errors trying to report errors,
+ * so swallow exceptions.
+ */
+ override def supplementErrorMessage(errorMessage: String): String =
+ if (currentRun.supplementedError) errorMessage
+ else try {
+ val tree = analyzer.lastTreeToTyper
+ val sym = tree.symbol
+ val tpe = tree.tpe
+ val enclosing = lastSeenContext.enclClassOrMethod.tree
+
+ val info1 = formatExplain(
+ "while compiling" -> currentSource.path,
+ "during phase" -> ( if (globalPhase eq phase) phase else "global=%s, atPhase=%s".format(globalPhase, phase) ),
+ "library version" -> scala.util.Properties.versionString,
+ "compiler version" -> Properties.versionString,
+ "reconstructed args" -> settings.recreateArgs.mkString(" ")
+ )
+ val info2 = formatExplain(
+ "last tree to typer" -> tree.summaryString,
+ "symbol" -> Option(sym).fold("null")(_.debugLocationString),
+ "symbol definition" -> Option(sym).fold("null")(_.defString),
+ "tpe" -> tpe,
+ "symbol owners" -> ownerChainString(sym),
+ "context owners" -> ownerChainString(lastSeenContext.owner)
+ )
+ val info3: List[String] = (
+ ( List("== Enclosing template or block ==", nodePrinters.nodeToString(enclosing).trim) )
+ ++ ( if (tpe eq null) Nil else List("== Expanded type of tree ==", typeDeconstruct.show(tpe)) )
+ ++ ( if (!opt.debug) Nil else List("== Current unit body ==", nodePrinters.nodeToString(currentUnit.body)) )
+ ++ ( List(errorMessage) )
+ )
+
+ currentRun.supplementedError = true
+
+ ("\n" + info1) :: info2 :: info3 mkString "\n\n"
+ }
+ catch { case _: Exception | _: TypeError => errorMessage }
/** The id of the currently active run
*/
@@ -658,13 +1192,72 @@ class Global(var settings: Settings, var reporter: Reporter) extends SymbolTable
inform("[running phase " + ph.name + " on " + currentRun.size + " compilation units]")
}
+ /** Collects for certain classes of warnings during this run. */
+ class ConditionalWarning(what: String, option: Settings#BooleanSetting) {
+ val warnings = mutable.LinkedHashMap[Position, String]()
+ def warn(pos: Position, msg: String) =
+ if (option.value) reporter.warning(pos, msg)
+ else if (!(warnings contains pos)) warnings += ((pos, msg))
+ def summarize() =
+ if (option.isDefault && warnings.nonEmpty)
+ reporter.warning(NoPosition, "there were %d %s warning(s); re-run with %s for details".format(warnings.size, what, option.name))
+ }
+
+ def newUnitParser(code: String) = new syntaxAnalyzer.UnitParser(newCompilationUnit(code))
+ def newUnitScanner(code: String) = new syntaxAnalyzer.UnitScanner(newCompilationUnit(code))
+ def newCompilationUnit(code: String) = new CompilationUnit(newSourceFile(code))
+ def newSourceFile(code: String) = new BatchSourceFile("<console>", code)
+
/** A Run is a single execution of the compiler on a sets of units
*/
- class Run {
+ class Run extends RunContextApi {
+ /** Have been running into too many init order issues with Run
+ * during erroneous conditions. Moved all these vals up to the
+ * top of the file so at least they're not trivially null.
+ */
var isDefined = false
+ /** The currently compiled unit; set from GlobalPhase */
+ var currentUnit: CompilationUnit = NoCompilationUnit
+
+ // This change broke sbt; I gave it the thrilling name of uncheckedWarnings0 so
+ // as to recover uncheckedWarnings for its ever-fragile compiler interface.
+ val deprecationWarnings0 = new ConditionalWarning("deprecation", settings.deprecation)
+ val uncheckedWarnings0 = new ConditionalWarning("unchecked", settings.unchecked)
+ val featureWarnings = new ConditionalWarning("feature", settings.feature)
+ val inlinerWarnings = new ConditionalWarning("inliner", settings.YinlinerWarnings)
+ val allConditionalWarnings = List(deprecationWarnings0, uncheckedWarnings0, featureWarnings, inlinerWarnings)
+
+ // for sbt's benefit
+ def uncheckedWarnings: List[(Position, String)] = uncheckedWarnings0.warnings.toList
+ def deprecationWarnings: List[(Position, String)] = deprecationWarnings0.warnings.toList
+
+ var reportedFeature = Set[Symbol]()
+
+ /** Has any macro expansion used a fallback during this run? */
+ var seenMacroExpansionsFallingBack = false
+
+ /** Have we already supplemented the error message of a compiler crash? */
+ private[nsc] final var supplementedError = false
+
/** To be initialized from firstPhase. */
private var terminalPhase: Phase = NoPhase
+ private val unitbuf = new mutable.ListBuffer[CompilationUnit]
+ val compiledFiles = new mutable.HashSet[String]
+
+ /** A map from compiled top-level symbols to their source files */
+ val symSource = new mutable.HashMap[Symbol, AbstractFile]
+
+ /** A map from compiled top-level symbols to their picklers */
+ val symData = new mutable.HashMap[Symbol, PickleBuffer]
+
+ private var phasec: Int = 0 // phases completed
+ private var unitc: Int = 0 // units completed this phase
+ private var _unitbufSize = 0
+
+ def size = _unitbufSize
+ override def toString = "scalac Run for:\n " + compiledFiles.toList.sorted.mkString("\n ")
+
// Calculate where to stop based on settings -Ystop-before or -Ystop-after.
// Slightly complicated logic due to wanting -Ystop-before:parser to fail rather
// than mysteriously running to completion.
@@ -694,7 +1287,7 @@ class Global(var settings: Settings, var reporter: Reporter) extends SymbolTable
/** Set phase to a newly created syntaxAnalyzer and call definitions.init. */
val parserPhase: Phase = syntaxAnalyzer.newPhase(NoPhase)
phase = parserPhase
- definitions.init
+ definitions.init()
// Flush the cache in the terminal phase: the chain could have been built
// before without being used. (This happens in the interpreter.)
@@ -703,27 +1296,63 @@ class Global(var settings: Settings, var reporter: Reporter) extends SymbolTable
// Each subcomponent supplies a phase, which are chained together.
// If -Ystop:phase is given, neither that phase nor any beyond it is added.
// If -Yskip:phase is given, that phase will be skipped.
- val lastPhase = phaseDescriptors.tail .
- takeWhile (pd => !stopPhase(pd.phaseName)) .
- filterNot (pd => skipPhase(pd.phaseName)) .
- foldLeft (parserPhase) ((chain, ph) => ph newPhase chain)
-
- // Ensure there is a terminal phase at the end, since -Ystop may have limited the phases.
- terminalPhase =
- if (lastPhase.name == "terminal") lastPhase
- else terminal newPhase lastPhase
-
+ val phaseLinks = {
+ val phs = (
+ phaseDescriptors.tail
+ takeWhile (pd => !stopPhase(pd.phaseName))
+ filterNot (pd => skipPhase(pd.phaseName))
+ )
+ // Ensure there is a terminal phase at the end, since -Ystop may have limited the phases.
+ if (phs.isEmpty || (phs.last ne terminal)) phs :+ terminal
+ else phs
+ }
+ // Link them together.
+ phaseLinks.foldLeft(parserPhase)((chain, ph) => ph newPhase chain)
parserPhase
}
- // --------------- Miscellania -------------------------------
-
- /** The currently compiled unit; set from GlobalPhase */
- var currentUnit: CompilationUnit = _
+ /** Reset all classes contained in current project, as determined by
+ * the clearOnNextRun hook
+ */
+ @deprecated("use invalidateClassPathEntries instead", "2.10.0")
+ def resetProjectClasses(root: Symbol): Unit = try {
+ def unlink(sym: Symbol) =
+ if (sym != NoSymbol) root.info.decls.unlink(sym)
+ if (settings.verbose.value) inform("[reset] recursing in "+root)
+ val toReload = mutable.Set[String]()
+ for (sym <- root.info.decls) {
+ if (sym.isInitialized && clearOnNextRun(sym))
+ if (sym.isPackage) {
+ resetProjectClasses(sym.moduleClass)
+ openPackageModule(sym.moduleClass)
+ } else {
+ unlink(sym)
+ unlink(root.info.decls.lookup(
+ if (sym.isTerm) sym.name.toTypeName else sym.name.toTermName))
+ toReload += sym.fullName
+ // note: toReload could be set twice with the same name
+ // but reinit must happen only once per name. That's why
+ // the following classPath.findClass { ... } code cannot be moved here.
+ }
+ }
+ for (fullname <- toReload)
+ classPath.findClass(fullname) match {
+ case Some(classRep) =>
+ if (settings.verbose.value) inform("[reset] reinit "+fullname)
+ loaders.initializeFromClassPath(root, classRep)
+ case _ =>
+ }
+ } catch {
+ case ex: Throwable =>
+ // this handler should not be nessasary, but it seems that `fsc`
+ // eats exceptions if they appear here. Need to find out the cause for
+ // this and fix it.
+ inform("[reset] exception happened: "+ex);
+ ex.printStackTrace();
+ throw ex
+ }
- /** Counts for certain classes of warnings during this run. */
- var deprecationWarnings: Int = 0
- var uncheckedWarnings: Int = 0
+ // --------------- Miscellania -------------------------------
/** Progress tracking. Measured in "progress units" which are 1 per
* compilation unit per phase completed.
@@ -757,51 +1386,62 @@ class Global(var settings: Settings, var reporter: Reporter) extends SymbolTable
def cancel() { reporter.cancelled = true }
- private var phasec: Int = 0 // phases completed
- private var unitc: Int = 0 // units completed this phase
private def currentProgress = (phasec * size) + unitc
private def totalProgress = (phaseDescriptors.size - 1) * size // -1: drops terminal phase
private def refreshProgress() = if (size > 0) progress(currentProgress, totalProgress)
// ----- finding phases --------------------------------------------
- def phaseNamed(name: String): Phase = {
- var p: Phase = firstPhase
- while (p.next != p && p.name != name) p = p.next
- if (p.name != name) NoPhase else p
- }
+ def phaseNamed(name: String): Phase =
+ findOrElse(firstPhase.iterator)(_.name == name)(NoPhase)
- val parserPhase = phaseNamed("parser")
- val namerPhase = phaseNamed("namer")
- // packageobjects
- val typerPhase = phaseNamed("typer")
- // superaccessors
- val picklerPhase = phaseNamed("pickler")
- val refchecksPhase = phaseNamed("refchecks")
- val uncurryPhase = phaseNamed("uncurry")
- // tailcalls, specialize
- val explicitouterPhase = phaseNamed("explicitouter")
- val erasurePhase = phaseNamed("erasure")
- // lazyvals, lambdalift, constructors
- val flattenPhase = phaseNamed("flatten")
- val mixinPhase = phaseNamed("mixin")
- val cleanupPhase = phaseNamed("cleanup")
- val icodePhase = phaseNamed("icode")
- // inliner, closelim, dce
- val jvmPhase = phaseNamed("jvm")
+ /** All phases as of 3/2012 here for handiness; the ones in
+ * active use uncommented.
+ */
+ val parserPhase = phaseNamed("parser")
+ val namerPhase = phaseNamed("namer")
+ // val packageobjectsPhase = phaseNamed("packageobjects")
+ val typerPhase = phaseNamed("typer")
+ val inlineclassesPhase = phaseNamed("inlineclasses")
+ // val superaccessorsPhase = phaseNamed("superaccessors")
+ val picklerPhase = phaseNamed("pickler")
+ val refchecksPhase = phaseNamed("refchecks")
+ // val selectiveanfPhase = phaseNamed("selectiveanf")
+ // val selectivecpsPhase = phaseNamed("selectivecps")
+ val uncurryPhase = phaseNamed("uncurry")
+ // val tailcallsPhase = phaseNamed("tailcalls")
+ val specializePhase = phaseNamed("specialize")
+ val explicitouterPhase = phaseNamed("explicitouter")
+ val erasurePhase = phaseNamed("erasure")
+ val posterasurePhase = phaseNamed("posterasure")
+ // val lazyvalsPhase = phaseNamed("lazyvals")
+ val lambdaliftPhase = phaseNamed("lambdalift")
+ // val constructorsPhase = phaseNamed("constructors")
+ val flattenPhase = phaseNamed("flatten")
+ val mixinPhase = phaseNamed("mixin")
+ val cleanupPhase = phaseNamed("cleanup")
+ val icodePhase = phaseNamed("icode")
+ val inlinerPhase = phaseNamed("inliner")
+ val inlineExceptionHandlersPhase = phaseNamed("inlineExceptionHandlers")
+ val closelimPhase = phaseNamed("closelim")
+ val dcePhase = phaseNamed("dce")
+ val jvmPhase = phaseNamed("jvm")
+ // val msilPhase = phaseNamed("msil")
def runIsAt(ph: Phase) = globalPhase.id == ph.id
def runIsPast(ph: Phase) = globalPhase.id > ph.id
+ // def runIsAtBytecodeGen = (runIsAt(jvmPhase) || runIsAt(msilPhase))
+ def runIsAtOptimiz = {
+ runIsAt(inlinerPhase) || // listing phases in full for robustness when -Ystop-after has been given.
+ runIsAt(inlineExceptionHandlersPhase) ||
+ runIsAt(closelimPhase) ||
+ runIsAt(dcePhase)
+ }
isDefined = true
// ----------- Units and top-level classes and objects --------
- private val unitbuf = new mutable.ListBuffer[CompilationUnit]
- val compiledFiles = new mutable.HashSet[String]
-
- private var _unitbufSize = 0
- def size = _unitbufSize
/** add unit to be compiled in this run */
private def addUnit(unit: CompilationUnit) {
@@ -814,6 +1454,8 @@ class Global(var settings: Settings, var reporter: Reporter) extends SymbolTable
settings.userSetSettings filter (_.isDeprecated) foreach { s =>
unit.deprecationWarning(NoPosition, s.name + " is deprecated: " + s.deprecationMessage.get)
}
+ if (settings.target.value.contains("jvm-1.5"))
+ unit.deprecationWarning(NoPosition, settings.target.name + ":" + settings.target.value + " is deprecated: use target for Java 1.6 or above.")
}
/* An iterator returning all the units being compiled in this run */
@@ -825,26 +1467,13 @@ class Global(var settings: Settings, var reporter: Reporter) extends SymbolTable
*/
def units: Iterator[CompilationUnit] = unitbuf.iterator
- /** A map from compiled top-level symbols to their source files */
- val symSource = new mutable.HashMap[Symbol, AbstractFile]
-
- /** A map from compiled top-level symbols to their picklers */
- val symData = new mutable.HashMap[Symbol, PickleBuffer]
-
- def registerPickle(sym: Symbol): Unit = {
- // Convert all names to the type name: objects don't store pickled data
- if (opt.showPhase && (opt.showNames exists (x => findNamedMember(x.toTypeName, sym) != NoSymbol))) {
- symData get sym foreach { pickle =>
- ShowPickled.show("\n<<-- " + sym.fullName + " -->>\n", pickle, false)
- }
- }
- }
+ def registerPickle(sym: Symbol): Unit = ()
/** does this run compile given class, module, or case factory? */
def compiles(sym: Symbol): Boolean =
if (sym == NoSymbol) false
else if (symSource.isDefinedAt(sym)) true
- else if (!sym.owner.isPackageClass) compiles(sym.toplevelClass)
+ else if (!sym.owner.isPackageClass) compiles(sym.enclosingTopLevelClass)
else if (sym.isModuleClass) compiles(sym.sourceModule)
else false
@@ -873,16 +1502,13 @@ class Global(var settings: Settings, var reporter: Reporter) extends SymbolTable
private def showMembers() =
opt.showNames foreach (x => showDef(x, opt.declsOnly, globalPhase))
- // If -Yprofile isn't given this will never be triggered.
- lazy val profiler = Class.forName(opt.profileClass).newInstance().asInstanceOf[Profiling]
-
// Similarly, this will only be created under -Yshow-syms.
object trackerFactory extends SymbolTrackers {
val global: Global.this.type = Global.this
lazy val trackers = currentRun.units.toList map (x => SymbolTracker(x))
def snapshot() = {
inform("\n[[symbol layout at end of " + phase + "]]")
- atPhase(phase.next) {
+ afterPhase(phase) {
trackers foreach { t =>
t.snapshot()
inform(t.show("Heading from " + phase.prev.name + " to " + phase.name))
@@ -900,12 +1526,11 @@ class Global(var settings: Settings, var reporter: Reporter) extends SymbolTable
}
}
else {
- def warn(count: Int, what: String, option: Settings#BooleanSetting) = (
- if (option.isDefault && count > 0)
- warning("there were %d %s warnings; re-run with %s for details".format(count, what, option.name))
- )
- warn(deprecationWarnings, "deprecation", settings.deprecation)
- warn(uncheckedWarnings, "unchecked", settings.unchecked)
+ allConditionalWarnings foreach (_.summarize)
+
+ if (seenMacroExpansionsFallingBack)
+ warning("some macros could not be expanded and code fell back to overridden methods;"+
+ "\nrecompiling with generated classfiles on the classpath might help.")
// todo: migrationWarnings
}
}
@@ -920,55 +1545,55 @@ class Global(var settings: Settings, var reporter: Reporter) extends SymbolTable
// nothing to compile, but we should still report use of deprecated options
if (sources.isEmpty) {
- checkDeprecatedSettings(new CompilationUnit(new BatchSourceFile("<no file>", "")))
+ checkDeprecatedSettings(newCompilationUnit(""))
reportCompileErrors()
return
}
- val startTime = currentTime
- reporter.reset();
- {
- val first :: rest = sources
- val unit = new CompilationUnit(first)
- addUnit(unit)
- checkDeprecatedSettings(unit)
-
- for (source <- rest)
- addUnit(new CompilationUnit(source))
- }
- globalPhase = firstPhase
+ compileUnits(sources map (new CompilationUnit(_)), firstPhase)
+ }
- if (opt.profileAll) {
- inform("starting CPU profiling on compilation run")
- profiler.startProfiling()
+ def compileUnits(units: List[CompilationUnit], fromPhase: Phase) {
+ try compileUnitsInternal(units, fromPhase)
+ catch { case ex: Throwable =>
+ val shown = if (settings.verbose.value) {
+ val pw = new java.io.PrintWriter(new java.io.StringWriter)
+ ex.printStackTrace(pw)
+ pw.toString
+ } else ex.getClass.getName
+ // ex.printStackTrace(Console.out) // DEBUG for fsc, note that error stacktraces do not print in fsc
+ globalError(supplementErrorMessage("uncaught exception during compilation: " + shown))
+ throw ex
}
- while (globalPhase != terminalPhase && !reporter.hasErrors) {
- val startTime = currentTime
- phase = globalPhase
+ }
- if (opt.profCPUPhase) {
- inform("starting CPU profiling on phase " + globalPhase.name)
- profiler profile globalPhase.run
- }
- else globalPhase.run
+ private def compileUnitsInternal(units: List[CompilationUnit], fromPhase: Phase) {
+ doInvalidation()
+
+ units foreach addUnit
+ val startTime = currentTime
- // Create a profiling generation for each phase's allocations
- if (opt.profileAny)
- profiler.advanceGeneration(globalPhase.name)
+ reporter.reset()
+ checkDeprecatedSettings(unitbuf.head)
+ globalPhase = fromPhase
+
+ while (globalPhase.hasNext && !reporter.hasErrors) {
+ val startTime = currentTime
+ phase = globalPhase
+ globalPhase.run
// progress update
informTime(globalPhase.description, startTime)
- phaseTimings(globalPhase) = currentTime - startTime
- // write icode to *.icode files
- if (opt.writeICode && (runIsAt(icodePhase) || opt.printPhase && runIsPast(icodePhase)))
+ if (opt.writeICodeAtICode || (opt.printPhase && runIsAtOptimiz)) {
+ // Write *.icode files when -Xprint-icode or -Xprint:<some-optimiz-phase> was given.
writeICode()
-
- // print trees
- if (opt.printPhase || opt.printLate && runIsAt(cleanupPhase)) {
+ } else if (opt.printPhase || opt.printLate && runIsAt(cleanupPhase)) {
+ // print trees
if (opt.showTrees) nodePrinters.printAll()
else printAllUnits()
}
+
// print the symbols presently attached to AST nodes
if (opt.showSymbols)
trackerFactory.snapshot()
@@ -994,11 +1619,9 @@ class Global(var settings: Settings, var reporter: Reporter) extends SymbolTable
advancePhase
}
- if (opt.profileAll)
- profiler.stopProfiling()
- if (opt.timings)
- inform(phaseTimings.formatted)
+ if (traceSymbolActivity)
+ units map (_.body) foreach (traceSymbols recordSymbolsInTree _)
// In case no phase was specified for -Xshow-class/object, show it now for sure.
if (opt.noShow)
@@ -1008,21 +1631,19 @@ class Global(var settings: Settings, var reporter: Reporter) extends SymbolTable
symSource.keys foreach (x => resetPackageClass(x.owner))
informTime("total", startTime)
- // save heap snapshot if requested
- if (opt.profileMem) {
- inform("Saving heap snapshot, this could take a while...")
- System.gc()
- profiler.captureSnapshot()
- inform("...done saving heap snapshot.")
- specializeTypes.printSpecStats()
- }
-
// record dependency data
if (!dependencyAnalysis.off)
dependencyAnalysis.saveDependencyAnalysis()
// Clear any sets or maps created via perRunCaches.
perRunCaches.clearAll()
+
+ // Reset project
+ if (!stopPhase("namer")) {
+ atPhase(namerPhase) {
+ resetProjectClasses(RootClass)
+ }
+ }
}
/** Compile list of abstract files. */
@@ -1054,34 +1675,15 @@ class Global(var settings: Settings, var reporter: Reporter) extends SymbolTable
/** Compile abstract file until `globalPhase`, but at least to phase "namer".
*/
def compileLate(unit: CompilationUnit) {
- def stop(ph: Phase) = ph == null || ph.id >= (globalPhase.id max typerPhase.id)
- def loop(ph: Phase) {
- if (stop(ph)) refreshProgress
- else {
- atPhase(ph)(ph.asInstanceOf[GlobalPhase] applyPhase unit)
- loop(ph.next match {
- case `ph` => null // ph == ph.next implies terminal, and null ends processing
- case x => x
- })
- }
- }
addUnit(unit)
- loop(firstPhase)
- }
- /**
- * Attempt to locate a source file providing the given name as a top-level
- * definition in the given context, and add it to the run via compileLate
- * if found.
- */
- def compileSourceFor(context : analyzer.Context, name : Name) = false
-
- /**
- * Attempt to locate a source file providing the given name as a top-level
- * definition with the given prefix, and add it to the run via compileLate
- * if found.
- */
- def compileSourceFor(qual : Tree, name : Name) = false
+ if (firstPhase ne null) { // we might get here during initialization, is a source is newer than the binary
+ val maxId = math.max(globalPhase.id, typerPhase.id)
+ firstPhase.iterator takeWhile (_.id < maxId) foreach (ph =>
+ atPhase(ph)(ph.asInstanceOf[GlobalPhase] applyPhase unit))
+ refreshProgress
+ }
+ }
/** Reset package class to state at typer (not sure what this
* is needed for?)
@@ -1095,7 +1697,7 @@ class Global(var settings: Settings, var reporter: Reporter) extends SymbolTable
/**
* Re-orders the source files to
- * 1. ScalaObject
+ * 1. This Space Intentionally Left Blank
* 2. LowPriorityImplicits / EmbeddedControls (i.e. parents of Predef)
* 3. the rest
*
@@ -1123,7 +1725,6 @@ class Global(var settings: Settings, var reporter: Reporter) extends SymbolTable
def rank(f: SourceFile) = {
if (f.file.container.name != "scala") goLast
else f.file.name match {
- case "ScalaObject.scala" => 1
case "LowPriorityImplicits.scala" => 2
case "StandardEmbeddings.scala" => 2
case "EmbeddedControls.scala" => 2
@@ -1136,29 +1737,16 @@ class Global(var settings: Settings, var reporter: Reporter) extends SymbolTable
} // class Run
def printAllUnits() {
- print("[[syntax trees at end of " + phase + "]]")
- atPhase(phase.next) { currentRun.units foreach treePrinter.print }
- }
-
- private def findMemberFromRoot(fullName: Name): Symbol = {
- val segs = nme.segments(fullName.toString, fullName.isTermName)
- if (segs.isEmpty) NoSymbol
- else findNamedMember(segs.tail, definitions.RootClass.info member segs.head)
- }
-
- private def findNamedMember(fullName: Name, root: Symbol): Symbol = {
- val segs = nme.segments(fullName.toString, fullName.isTermName)
- if (segs.isEmpty || segs.head != root.simpleName) NoSymbol
- else findNamedMember(segs.tail, root)
+ print("[[syntax trees at end of %25s]]".format(phase))
+ afterPhase(phase)(currentRun.units foreach { unit =>
+ nodePrinters showUnit unit
+ })
}
- private def findNamedMember(segs: List[Name], root: Symbol): Symbol =
- if (segs.isEmpty) root
- else findNamedMember(segs.tail, root.info member segs.head)
/** We resolve the class/object ambiguity by passing a type/term name.
*/
def showDef(fullName: Name, declsOnly: Boolean, ph: Phase) = {
- val boringOwners = Set(definitions.AnyClass, definitions.AnyRefClass, definitions.ObjectClass)
+ val boringOwners = Set[Symbol](definitions.AnyClass, definitions.AnyRefClass, definitions.ObjectClass)
def phased[T](body: => T): T = afterPhase(ph)(body)
def boringMember(sym: Symbol) = boringOwners(sym.owner)
def symString(sym: Symbol) = if (sym.isTerm) sym.defString else sym.toString
@@ -1187,20 +1775,20 @@ class Global(var settings: Settings, var reporter: Reporter) extends SymbolTable
}
}
- /** Returns the file with the given suffix for the given class. Used for icode writing. */
- def getFile(clazz: Symbol, suffix: String): File = {
+ def getFile(source: AbstractFile, segments: Array[String], suffix: String): File = {
val outDir = Path(
- settings.outputDirs.outputDirFor(clazz.sourceFile).path match {
+ settings.outputDirs.outputDirFor(source).path match {
case "" => "."
case path => path
}
)
- val segments = clazz.fullName split '.'
val dir = segments.init.foldLeft(outDir)(_ / _).createDirectory()
-
new File(dir.path, segments.last + suffix)
}
+ /** Returns the file with the given suffix for the given class. Used for icode writing. */
+ def getFile(clazz: Symbol, suffix: String): File = getFile(clazz.sourceFile, clazz.fullName split '.', suffix)
+
private def writeICode() {
val printer = new icodes.TextPrinter(null, icodes.linearizer)
icodes.classes.values.foreach((cls) => {
@@ -1227,7 +1815,7 @@ class Global(var settings: Settings, var reporter: Reporter) extends SymbolTable
// to false except in old code. The downside is that this leaves us calling a
// deprecated method: but I see no simple way out, so I leave it for now.
def forJVM = opt.jvm
- def forMSIL = opt.msil
+ override def forMSIL = opt.msil
def forInteractive = onlyPresentation
def forScaladoc = onlyPresentation
def createJavadoc = false
@@ -1235,3 +1823,7 @@ class Global(var settings: Settings, var reporter: Reporter) extends SymbolTable
@deprecated("Use forInteractive or forScaladoc, depending on what you're after", "2.9.0")
def onlyPresentation = false
}
+
+object Global {
+ def apply(settings: Settings, reporter: Reporter): Global = new Global(settings, reporter)
+}
diff --git a/src/compiler/scala/tools/nsc/InterpreterCommand.scala b/src/compiler/scala/tools/nsc/InterpreterCommand.scala
deleted file mode 100644
index e25a83e..0000000
--- a/src/compiler/scala/tools/nsc/InterpreterCommand.scala
+++ /dev/null
@@ -1,8 +0,0 @@
-package scala.tools.nsc
-
-import interpreter._
-
-/** A compatibility stub.
- */
- at deprecated("Use a class in the scala.tools.nsc.interpreter package.", "2.9.0")
-class InterpreterCommand(arguments: List[String], error: String => Unit) extends CommandLine(arguments, error) { }
\ No newline at end of file
diff --git a/src/compiler/scala/tools/nsc/Main.scala b/src/compiler/scala/tools/nsc/Main.scala
index de1d148..7d112df 100644
--- a/src/compiler/scala/tools/nsc/Main.scala
+++ b/src/compiler/scala/tools/nsc/Main.scala
@@ -1,5 +1,5 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
+ * Copyright 2005-2013 LAMP/EPFL
* @author Martin Odersky
*/
@@ -11,24 +11,13 @@ import File.pathSeparator
import scala.tools.nsc.interactive.{ RefinedBuildManager, SimpleBuildManager }
import scala.tools.nsc.io.AbstractFile
import scala.tools.nsc.reporters.{Reporter, ConsoleReporter}
-import scala.tools.nsc.util.{ BatchSourceFile, FakePos } //{Position}
-import Properties.{ versionString, copyrightString, residentPromptString, msilLibPath }
+import scala.reflect.internal.util.{ BatchSourceFile, FakePos } //{Position}
+import Properties.msilLibPath
/** The main class for NSC, a compiler for the programming
- * language Scala.
+ * language Scala.
*/
-object Main extends AnyRef with EvalLoop {
- val versionMsg = "Scala compiler " +
- versionString + " -- " +
- copyrightString
-
- val prompt = residentPromptString
-
- var reporter: ConsoleReporter = _
-
- private def scalacError(msg: String) {
- reporter.error(FakePos("scalac"), msg + "\n scalac -help gives more information")
- }
+object Main extends Driver with EvalLoop {
def resident(compiler: Global) {
loop { line =>
@@ -39,15 +28,8 @@ object Main extends AnyRef with EvalLoop {
}
}
- def process(args: Array[String]) {
- val ss = new Settings(scalacError)
- reporter = new ConsoleReporter(ss)
- val command = new CompilerCommand(args.toList, ss)
- val settings = command.settings
-
- if (settings.version.value)
- reporter.info(null, versionMsg, true)
- else if (settings.Yidedebug.value) {
+ override def processSettingsHook(): Boolean =
+ if (settings.Yidedebug.value) {
settings.Xprintpos.value = true
settings.Yrangepos.value = true
val compiler = new interactive.Global(settings, reporter)
@@ -62,6 +44,7 @@ object Main extends AnyRef with EvalLoop {
case None => reporter.reset() // Causes other compiler errors to be ignored
}
askShutdown
+ false
}
else if (settings.Ybuilderdebug.value != "none") {
def fileSet(files : List[String]) = Set.empty ++ (files map AbstractFile.getFile)
@@ -78,50 +61,21 @@ object Main extends AnyRef with EvalLoop {
val command = new CompilerCommand(args.toList, settings)
buildManager.update(fileSet(command.files), Set.empty)
}
+ false
}
else {
if (settings.target.value == "msil")
msilLibPath foreach (x => settings.assemrefs.value += (pathSeparator + x))
-
- val compiler =
- if (settings.Yrangepos.value) new interactive.Global(settings, reporter)
- else new Global(settings, reporter)
-
- try {
- if (reporter.hasErrors)
- return reporter.flush()
-
- if (command.shouldStopWithInfo) {
- reporter.info(null, command.getInfoMessage(compiler), true)
- }
- else {
- if (settings.resident.value)
- resident(compiler)
- else if (command.files.isEmpty) {
- reporter.info(null, command.usageMsg, true)
- reporter.info(null, compiler.pluginOptionsHelp, true)
- }
- else {
- val run = new compiler.Run()
- run compile command.files
- reporter.printSummary()
- }
- }
- }
- catch {
- case ex =>
- compiler.logThrowable(ex)
- ex match {
- case FatalError(msg) => reporter.error(null, "fatal error: " + msg)
- case _ => throw ex
- }
- }
+ true
}
- }
- def main(args: Array[String]) {
- process(args)
- sys.exit(if (reporter.hasErrors) 1 else 0)
- }
+ override def newCompiler(): Global =
+ if (settings.Yrangepos.value) new Global(settings, reporter) with interactive.RangePositions
+ else Global(settings, reporter)
+ override def doCompile(compiler: Global) {
+ if (settings.resident.value)
+ resident(compiler)
+ else super.doCompile(compiler)
+ }
}
diff --git a/src/compiler/scala/tools/nsc/MainBench.scala b/src/compiler/scala/tools/nsc/MainBench.scala
new file mode 100644
index 0000000..f18ff19
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/MainBench.scala
@@ -0,0 +1,48 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2013 LAMP/EPFL
+ * @author Martin Odersky
+ */
+
+package scala.tools.nsc
+
+import java.io.File
+import File.pathSeparator
+
+import scala.tools.nsc.interactive.{ RefinedBuildManager, SimpleBuildManager }
+import scala.tools.nsc.io.AbstractFile
+import scala.tools.nsc.reporters.{Reporter, ConsoleReporter}
+import scala.reflect.internal.util.{ BatchSourceFile, FakePos } //{Position}
+import Properties.{ versionString, copyrightString, residentPromptString, msilLibPath }
+import scala.reflect.internal.util.Statistics
+
+/** The main class for NSC, a compiler for the programming
+ * language Scala.
+ */
+object MainBench extends Driver with EvalLoop {
+
+ lazy val theCompiler = Global(settings, reporter)
+
+ override def newCompiler() = theCompiler
+
+ val NIter = 50
+ val NBest = 10
+
+ override def main(args: Array[String]) = {
+ val times = new Array[Long](NIter)
+ var start = System.nanoTime()
+ for (i <- 0 until NIter) {
+ if (i == NIter-1) {
+ theCompiler.settings.Ystatistics.value = true
+ Statistics.enabled = true
+ }
+ process(args)
+ val end = System.nanoTime()
+ val duration = (end-start)/1000000
+ println(s"${duration}ms")
+ times(i) = duration
+ start = end
+ }
+ val avg = times.sorted.take(NBest).sum / NBest
+ println(s"avg shortest $NBest times ${avg}ms")
+ }
+}
diff --git a/src/compiler/scala/tools/nsc/MainGenericRunner.scala b/src/compiler/scala/tools/nsc/MainGenericRunner.scala
index f2d4bac..e4a20b4 100644
--- a/src/compiler/scala/tools/nsc/MainGenericRunner.scala
+++ b/src/compiler/scala/tools/nsc/MainGenericRunner.scala
@@ -1,20 +1,34 @@
/* NSC -- new Scala compiler
- * Copyright 2006-2011 LAMP/EPFL
+ * Copyright 2006-2013 LAMP/EPFL
* @author Lex Spoon
*/
package scala.tools.nsc
-import java.io.IOException
import java.net.URL
import scala.tools.util.PathResolver
-
import io.{ File }
import util.{ ClassPath, ScalaClassLoader }
import Properties.{ versionString, copyrightString }
import interpreter.{ ILoop }
import GenericRunnerCommand._
+object JarRunner extends CommonRunner {
+ def runJar(settings: GenericRunnerSettings, jarPath: String, arguments: Seq[String]): Either[Throwable, Boolean] = {
+ val jar = new io.Jar(jarPath)
+ val mainClass = jar.mainClass getOrElse sys.error("Cannot find main class for jar: " + jarPath)
+ val jarURLs = ClassPath expandManifestPath jarPath
+ val urls = if (jarURLs.isEmpty) File(jarPath).toURL +: settings.classpathURLs else jarURLs
+
+ if (settings.Ylogcp.value) {
+ Console.err.println("Running jar with these URLs as the classpath:")
+ urls foreach println
+ }
+
+ runAndCatch(urls, mainClass, arguments)
+ }
+}
+
/** An object that runs Scala code. It has three possible
* sources for the code to run: pre-compiled code, a script file,
* or interactive entry.
@@ -25,7 +39,7 @@ class MainGenericRunner {
false
}
def errorFn(str: String): Boolean = {
- Console println str
+ Console.err println str
false
}
@@ -44,6 +58,10 @@ class MainGenericRunner {
def isI = !settings.loadfiles.isDefault
def dashi = settings.loadfiles.value
+ // Deadlocks on startup under -i unless we disable async.
+ if (isI)
+ settings.Yreplsync.value = true
+
def combinedCode = {
val files = if (isI) dashi map (file => File(file).slurp()) else Nil
val str = if (isE) List(dashe) else Nil
@@ -57,11 +75,9 @@ class MainGenericRunner {
case AsScript =>
ScriptRunner.runScriptAndCatch(settings, thingToRun, command.arguments)
case AsJar =>
- ObjectRunner.runAndCatch(
- File(thingToRun).toURL +: settings.classpathURLs,
- new io.Jar(thingToRun).mainClass getOrElse sys.error("Cannot find main class for jar: " + thingToRun),
- command.arguments
- )
+ JarRunner.runJar(settings, thingToRun, command.arguments)
+ case Error =>
+ Right(false)
case _ =>
// We start the repl when no arguments are given.
Right(new ILoop process settings)
diff --git a/src/compiler/scala/tools/nsc/MainInterpreter.scala b/src/compiler/scala/tools/nsc/MainInterpreter.scala
deleted file mode 100644
index 5d190bb..0000000
--- a/src/compiler/scala/tools/nsc/MainInterpreter.scala
+++ /dev/null
@@ -1,13 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
- * @author Lex Spoon
- */
-
-package scala.tools.nsc
-
-import interpreter._
-
- at deprecated("Use a class in the scala.tools.nsc.interpreter package.", "2.9.0")
-object MainInterpreter {
- def main(args: Array[String]): Unit = new ILoop main args
-}
diff --git a/src/compiler/scala/tools/nsc/MainTokenMetric.scala b/src/compiler/scala/tools/nsc/MainTokenMetric.scala
index 5373638..50cd51d 100644
--- a/src/compiler/scala/tools/nsc/MainTokenMetric.scala
+++ b/src/compiler/scala/tools/nsc/MainTokenMetric.scala
@@ -1,5 +1,5 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
+ * Copyright 2005-2013 LAMP/EPFL
* @author Martin Odersky
*/
diff --git a/src/compiler/scala/tools/nsc/NewLinePrintWriter.scala b/src/compiler/scala/tools/nsc/NewLinePrintWriter.scala
index 94df502..2b4cd80 100644
--- a/src/compiler/scala/tools/nsc/NewLinePrintWriter.scala
+++ b/src/compiler/scala/tools/nsc/NewLinePrintWriter.scala
@@ -1,5 +1,5 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
+ * Copyright 2005-2013 LAMP/EPFL
* @author Martin Odersky
*/
diff --git a/src/compiler/scala/tools/nsc/NoPhase.scala b/src/compiler/scala/tools/nsc/NoPhase.scala
deleted file mode 100644
index a74d14a..0000000
--- a/src/compiler/scala/tools/nsc/NoPhase.scala
+++ /dev/null
@@ -1,11 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2007-2011 LAMP/EPFL
- * @author Martin Odersky
- */
-
-package scala.tools.nsc
-
-object NoPhase extends Phase(null) {
- def name = "<no phase>"
- def run() { throw new Error("NoPhase.run") }
-}
diff --git a/src/compiler/scala/tools/nsc/ObjectRunner.scala b/src/compiler/scala/tools/nsc/ObjectRunner.scala
index 6ff0718..f512351 100644
--- a/src/compiler/scala/tools/nsc/ObjectRunner.scala
+++ b/src/compiler/scala/tools/nsc/ObjectRunner.scala
@@ -1,5 +1,5 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
+ * Copyright 2005-2013 LAMP/EPFL
* @author Lex Spoon
*/
@@ -11,12 +11,7 @@ import util.ScalaClassLoader
import java.lang.reflect.InvocationTargetException
import util.Exceptional.unwrap
-/** An object that runs another object specified by name.
- *
- * @author Lex Spoon
- * @version 1.1, 2007/7/13
- */
-object ObjectRunner {
+trait CommonRunner {
/** Check whether a class with the specified name
* exists on the specified class path. */
def classExists(urls: List[URL], objectName: String): Boolean =
@@ -38,6 +33,13 @@ object ObjectRunner {
*/
def runAndCatch(urls: List[URL], objectName: String, arguments: Seq[String]): Either[Throwable, Boolean] = {
try { run(urls, objectName, arguments) ; Right(true) }
- catch { case e => Left(unwrap(e)) }
+ catch { case e: Throwable => Left(unwrap(e)) }
}
}
+
+/** An object that runs another object specified by name.
+ *
+ * @author Lex Spoon
+ * @version 1.1, 2007/7/13
+ */
+object ObjectRunner extends CommonRunner { }
diff --git a/src/compiler/scala/tools/nsc/OfflineCompilerCommand.scala b/src/compiler/scala/tools/nsc/OfflineCompilerCommand.scala
index 6d6b99e..caf6ad1 100644
--- a/src/compiler/scala/tools/nsc/OfflineCompilerCommand.scala
+++ b/src/compiler/scala/tools/nsc/OfflineCompilerCommand.scala
@@ -1,5 +1,5 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
+ * Copyright 2005-2013 LAMP/EPFL
* @author Martin Odersky
*/
@@ -33,7 +33,7 @@ class OfflineCompilerCommand(arguments: List[String], settings: FscSettings) ext
}
else {
// Otherwise we're on the server and will use it to absolutize the paths.
- settings.absolutize(currentDir.value)
+ settings.absolutize()
}
}
diff --git a/src/compiler/scala/tools/nsc/Phase.scala b/src/compiler/scala/tools/nsc/Phase.scala
deleted file mode 100644
index f79b7c4..0000000
--- a/src/compiler/scala/tools/nsc/Phase.scala
+++ /dev/null
@@ -1,85 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
- * @author Martin Odersky
- */
-
-package scala.tools.nsc
-
-import symtab.Flags
-import util.TableDef
-
-abstract class Phase(val prev: Phase) {
-
- type Id = Int
-
- val id: Id = if (prev eq null) 0 else prev.id + 1
-
- /** New flags visible after this phase has completed */
- def nextFlags: Long = 0l
-
- /** New flags visible once this phase has started */
- def newFlags: Long = 0l
-
- private var fmask: Long =
- if (prev eq null) Flags.InitialFlags else prev.flagMask | prev.nextFlags | newFlags
- def flagMask: Long = fmask
-
- private var nx: Phase = this
- if ((prev ne null) && (prev ne NoPhase)) prev.nx = this
-
- def next: Phase = nx
-
- def name: String
- def description: String = name
- // Will running with -Ycheck:name work?
- def checkable: Boolean = true
- // def devirtualized: Boolean = false
- def specialized: Boolean = false
- def erasedTypes: Boolean = false
- def flatClasses: Boolean = false
- def refChecked: Boolean = false
- def keepsTypeParams = true
- def run(): Unit
-
- override def toString() = name
- override def hashCode = id.## + name.##
- override def equals(other: Any) = other match {
- case x: Phase => id == x.id && name == x.name
- case _ => false
- }
-}
-
-object Phase {
- val MaxPhases = 64
-
- /** A class for tracking something about each phase.
- */
- class Model[T: Manifest] {
- case class Cell(ph: Phase, value: T) {
- def name = ph.name
- def id = ph.id
- }
- val values = new Array[Cell](MaxPhases + 1)
- def results = values filterNot (_ == null)
- def apply(ph: Phase): T = values(ph.id).value
- def update(ph: Phase, value: T): Unit = values(ph.id) = Cell(ph, value)
- }
- /** A class for recording the elapsed time of each phase in the
- * interests of generating a classy and informative table.
- */
- class TimingModel extends Model[Long] {
- var total: Long = 0
- def table() = {
- total = results map (_.value) sum;
- new Format.Table(results sortBy (-_.value))
- }
- object Format extends TableDef[Cell] {
- >> ("phase" -> (_.name)) >+ " "
- << ("id" -> (_.id)) >+ " "
- >> ("ms" -> (_.value)) >+ " "
- << ("share" -> (_.value.toDouble * 100 / total formatted "%.2f"))
- }
- def formatted = "" + table()
- }
-}
-
diff --git a/src/compiler/scala/tools/nsc/PhaseAssembly.scala b/src/compiler/scala/tools/nsc/PhaseAssembly.scala
index f25ea6f..cff3590 100644
--- a/src/compiler/scala/tools/nsc/PhaseAssembly.scala
+++ b/src/compiler/scala/tools/nsc/PhaseAssembly.scala
@@ -1,5 +1,5 @@
/* NSC -- new Scala compiler
- * Copyright 2007-2011 LAMP/EPFL
+ * Copyright 2007-2013 LAMP/EPFL
* @author Anders Bach Nielsen
* @version 1.0
*/
@@ -8,6 +8,7 @@ package scala.tools.nsc
import java.io.{ BufferedWriter, FileWriter }
import scala.collection.mutable
+import scala.language.postfixOps
/**
* PhaseAssembly
@@ -185,7 +186,7 @@ trait PhaseAssembly {
* dependency on something that is dropped.
*/
def removeDanglingNodes() {
- for (node <- nodes.valuesIterator filter (_.phaseobj.isEmpty)) {
+ for (node <- nodes.values filter (_.phaseobj.isEmpty)) {
val msg = "dropping dependency on node with no phase object: "+node.phasename
informProgress(msg)
nodes -= node.phasename
diff --git a/src/compiler/scala/tools/nsc/Phases.scala b/src/compiler/scala/tools/nsc/Phases.scala
new file mode 100644
index 0000000..0901ade
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/Phases.scala
@@ -0,0 +1,46 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2013 LAMP/EPFL
+ * @author Martin Odersky
+ */
+
+package scala.tools.nsc
+
+import symtab.Flags
+import scala.reflect.internal.util.TableDef
+import scala.language.postfixOps
+
+ at deprecated("Scheduled for removal as being a dead-code in the compiler.", "2.10.1")
+object Phases {
+ val MaxPhases = 64
+
+ /** A class for tracking something about each phase.
+ */
+ class Model[T] {
+ case class Cell(ph: Phase, value: T) {
+ def name = ph.name
+ def id = ph.id
+ }
+ val values = new Array[Cell](MaxPhases + 1)
+ def results = values filterNot (_ == null)
+ def apply(ph: Phase): T = values(ph.id).value
+ def update(ph: Phase, value: T): Unit = values(ph.id) = Cell(ph, value)
+ }
+ /** A class for recording the elapsed time of each phase in the
+ * interests of generating a classy and informative table.
+ */
+ class TimingModel extends Model[Long] {
+ var total: Long = 0
+ def table() = {
+ total = results map (_.value) sum;
+ new Format.Table(results sortBy (-_.value))
+ }
+ object Format extends TableDef[Cell] {
+ >> ("phase" -> (_.name)) >+ " "
+ << ("id" -> (_.id)) >+ " "
+ >> ("ms" -> (_.value)) >+ " "
+ << ("share" -> (_.value.toDouble * 100 / total formatted "%.2f"))
+ }
+ def formatted = "" + table()
+ }
+}
+
diff --git a/src/compiler/scala/tools/nsc/Properties.scala b/src/compiler/scala/tools/nsc/Properties.scala
index f88b7e8..55fd196 100644
--- a/src/compiler/scala/tools/nsc/Properties.scala
+++ b/src/compiler/scala/tools/nsc/Properties.scala
@@ -1,25 +1,25 @@
/* NSC -- new Scala compiler
- * Copyright 2006-2011 LAMP/EPFL
+ * Copyright 2006-2013 LAMP/EPFL
* @author Stephane Micheloud
*/
-
package scala.tools.nsc
-/** Loads compiler.properties from the jar. */
+/** Loads `compiler.properties` from the jar archive file.
+ */
object Properties extends scala.util.PropertiesTrait {
- protected def propCategory = "compiler"
- protected def pickJarBasedOn = classOf[Global]
+ protected def propCategory = "compiler"
+ protected def pickJarBasedOn = classOf[Global]
// settings based on jar properties
- def fileEndingString = scalaPropOrElse("file.ending", ".scala|.java")
- def residentPromptString = scalaPropOrElse("resident.prompt", "\nnsc> ")
- def shellPromptString = scalaPropOrElse("shell.prompt", "\nscala> ")
+ def fileEndingString = scalaPropOrElse("file.ending", ".scala|.java")
+ def residentPromptString = scalaPropOrElse("resident.prompt", "\nnsc> ")
+ def shellPromptString = scalaPropOrElse("shell.prompt", "\nscala> ")
// settings based on system properties
- def msilLibPath = propOrNone("msil.libpath")
+ def msilLibPath = propOrNone("msil.libpath")
// derived values
- def isEmacsShell = propOrEmpty("env.emacs") != ""
- def fileEndings = fileEndingString.split("""\|""").toList
+ def isEmacsShell = propOrEmpty("env.emacs") != ""
+ def fileEndings = fileEndingString.split("""\|""").toList
}
diff --git a/src/compiler/scala/tools/nsc/ScalaDoc.scala b/src/compiler/scala/tools/nsc/ScalaDoc.scala
index a9330b0..ba434bc 100644
--- a/src/compiler/scala/tools/nsc/ScalaDoc.scala
+++ b/src/compiler/scala/tools/nsc/ScalaDoc.scala
@@ -1,5 +1,5 @@
/* scaladoc, a documentation generator for Scala
- * Copyright 2005-2011 LAMP/EPFL
+ * Copyright 2005-2013 LAMP/EPFL
* @author Martin Odersky
* @author Geoffrey Washburn
*/
@@ -9,7 +9,7 @@ package scala.tools.nsc
import java.io.File.pathSeparator
import scala.tools.nsc.doc.DocFactory
import scala.tools.nsc.reporters.ConsoleReporter
-import scala.tools.nsc.util.FakePos
+import scala.reflect.internal.util.FakePos
import Properties.msilLibPath
/** The main class for scaladoc, a front-end for the Scala compiler
@@ -20,7 +20,8 @@ class ScalaDoc {
def process(args: Array[String]): Boolean = {
var reporter: ConsoleReporter = null
- val docSettings = new doc.Settings(msg => reporter.error(FakePos("scaladoc"), msg + "\n scaladoc -help gives more information"))
+ val docSettings = new doc.Settings(msg => reporter.error(FakePos("scaladoc"), msg + "\n scaladoc -help gives more information"),
+ msg => reporter.printMessage(msg))
reporter = new ConsoleReporter(docSettings) {
// need to do this so that the Global instance doesn't trash all the
// symbols just because there was an error
@@ -30,17 +31,17 @@ class ScalaDoc {
def hasFiles = command.files.nonEmpty || docSettings.uncompilableFiles.nonEmpty
if (docSettings.version.value)
- reporter.info(null, versionMsg, true)
+ reporter.echo(versionMsg)
else if (docSettings.Xhelp.value)
- reporter.info(null, command.xusageMsg, true)
+ reporter.echo(command.xusageMsg)
else if (docSettings.Yhelp.value)
- reporter.info(null, command.yusageMsg, true)
+ reporter.echo(command.yusageMsg)
else if (docSettings.showPlugins.value)
reporter.warning(null, "Plugins are not available when using Scaladoc")
else if (docSettings.showPhases.value)
reporter.warning(null, "Phases are restricted when using Scaladoc")
else if (docSettings.help.value || !hasFiles)
- reporter.info(null, command.usageMsg, true)
+ reporter.echo(command.usageMsg)
else try {
if (docSettings.target.value == "msil")
msilLibPath foreach (x => docSettings.assemrefs.value += (pathSeparator + x))
diff --git a/src/compiler/scala/tools/nsc/ScriptRunner.scala b/src/compiler/scala/tools/nsc/ScriptRunner.scala
index 3ec5b2f..107c4b3 100644
--- a/src/compiler/scala/tools/nsc/ScriptRunner.scala
+++ b/src/compiler/scala/tools/nsc/ScriptRunner.scala
@@ -1,5 +1,5 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
+ * Copyright 2005-2013 LAMP/EPFL
* @author Martin Odersky
*/
@@ -84,7 +84,7 @@ class ScriptRunner extends HasCompileSocket {
}
protected def newGlobal(settings: Settings, reporter: Reporter) =
- new Global(settings, reporter)
+ Global(settings, reporter)
/** Compile a script and then run the specified closure with
* a classpath for the compiled script.
@@ -199,7 +199,7 @@ class ScriptRunner extends HasCompileSocket {
scriptArgs: List[String]): Either[Throwable, Boolean] =
{
try Right(runScript(settings, scriptFile, scriptArgs))
- catch { case e => Left(unwrap(e)) }
+ catch { case e: Throwable => Left(unwrap(e)) }
}
/** Run a command
diff --git a/src/compiler/scala/tools/nsc/Settings.scala b/src/compiler/scala/tools/nsc/Settings.scala
index ca99ddf..b64f278 100644
--- a/src/compiler/scala/tools/nsc/Settings.scala
+++ b/src/compiler/scala/tools/nsc/Settings.scala
@@ -1,5 +1,5 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
+ * Copyright 2005-2013 LAMP/EPFL
* @author Martin Odersky
*/
@@ -11,4 +11,10 @@ import settings.MutableSettings
*/
class Settings(errorFn: String => Unit) extends MutableSettings(errorFn) {
def this() = this(Console.println)
+
+ override def withErrorFn(errorFn: String => Unit): Settings = {
+ val settings = new Settings(errorFn)
+ copyInto(settings)
+ settings
+ }
}
diff --git a/src/compiler/scala/tools/nsc/SubComponent.scala b/src/compiler/scala/tools/nsc/SubComponent.scala
index cd9fef1..a0468a2 100644
--- a/src/compiler/scala/tools/nsc/SubComponent.scala
+++ b/src/compiler/scala/tools/nsc/SubComponent.scala
@@ -1,5 +1,5 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
+ * Copyright 2005-2013 LAMP/EPFL
* @author Martin Odersky
*/
@@ -47,6 +47,9 @@ abstract class SubComponent {
private var ownPhaseCache: WeakReference[Phase] = new WeakReference(null)
private var ownPhaseRunId = global.NoRunId
+ @inline final def beforeOwnPhase[T](op: => T) = global.beforePhase(ownPhase)(op)
+ @inline final def afterOwnPhase[T](op: => T) = global.afterPhase(ownPhase)(op)
+
/** The phase corresponding to this subcomponent in the current compiler run */
def ownPhase: Phase = {
ownPhaseCache.get match {
diff --git a/src/compiler/scala/tools/nsc/ast/DocComments.scala b/src/compiler/scala/tools/nsc/ast/DocComments.scala
old mode 100644
new mode 100755
index 5d276e2..6e39fc9
--- a/src/compiler/scala/tools/nsc/ast/DocComments.scala
+++ b/src/compiler/scala/tools/nsc/ast/DocComments.scala
@@ -1,5 +1,5 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
+ * Copyright 2005-2013 LAMP/EPFL
* @author Martin Odersky
*/
@@ -7,22 +7,28 @@ package scala.tools.nsc
package ast
import symtab._
-import reporters.Reporter
-import util.{Position, NoPosition}
+import reporters._
+import scala.reflect.internal.util.{Position, NoPosition}
import util.DocStrings._
-import util.Chars._
-import scala.collection.mutable.{HashMap, ListBuffer, StringBuilder}
+import scala.reflect.internal.Chars._
+import scala.collection.mutable
/*
* @author Martin Odersky
* @version 1.0
*/
-trait DocComments { self: SymbolTable =>
+trait DocComments { self: Global =>
- def reporter: Reporter
+ val cookedDocComments = mutable.HashMap[Symbol, String]()
/** The raw doc comment map */
- val docComments = new HashMap[Symbol, DocComment]
+ val docComments = mutable.HashMap[Symbol, DocComment]()
+
+ def clearDocComments() {
+ cookedDocComments.clear()
+ docComments.clear()
+ defs.clear()
+ }
/** Associate comment with symbol `sym` at position `pos`. */
def docComment(sym: Symbol, docStr: String, pos: Position = NoPosition) =
@@ -50,22 +56,30 @@ trait DocComments { self: SymbolTable =>
else sym.owner.ancestors map (sym overriddenSymbol _) filter (_ != NoSymbol)
}
- /** The raw doc comment of symbol `sym`, minus @usecase and @define sections, augmented by
+ def fillDocComment(sym: Symbol, comment: DocComment) {
+ docComments(sym) = comment
+ comment.defineVariables(sym)
+ }
+
+ /** The raw doc comment of symbol `sym`, minus usecase and define sections, augmented by
* missing sections of an inherited doc comment.
* If a symbol does not have a doc comment but some overridden version of it does,
* the doc comment of the overridden version is copied instead.
*/
- def cookedDocComment(sym: Symbol, docStr: String = ""): String = {
+ def cookedDocComment(sym: Symbol, docStr: String = ""): String = cookedDocComments.getOrElseUpdate(sym, {
val ownComment = if (docStr.length == 0) docComments get sym map (_.template) getOrElse ""
- else DocComment(docStr).template
+ else DocComment(docStr).template
superComment(sym) match {
case None =>
- ownComment
+ if (ownComment.indexOf("@inheritdoc") != -1)
+ reporter.warning(sym.pos, "The comment for " + sym +
+ " contains @inheritdoc, but no parent comment is available to inherit from.")
+ ownComment.replaceAllLiterally("@inheritdoc", "<invalid inheritdoc annotation>")
case Some(sc) =>
if (ownComment == "") sc
- else merge(sc, ownComment, sym)
+ else expandInheritdoc(sc, merge(sc, ownComment, sym), sym)
}
- }
+ })
/** The cooked doc comment of symbol `sym` after variable expansion, or "" if missing.
*
@@ -99,10 +113,18 @@ trait DocComments { self: SymbolTable =>
*/
def useCases(sym: Symbol, site: Symbol): List[(Symbol, String, Position)] = {
def getUseCases(dc: DocComment) = {
- for (uc <- dc.useCases; defn <- uc.expandedDefs(site)) yield
- (defn,
- expandVariables(merge(cookedDocComment(sym), uc.comment.raw, defn, copyFirstPara = true), sym, site),
- uc.pos)
+ val fullSigComment = cookedDocComment(sym)
+ for (uc <- dc.useCases; defn <- uc.expandedDefs(sym, site)) yield {
+ // use cases comments go through a series of transformations:
+ // 1 - filling in missing sections from the full signature
+ // 2 - expanding explicit inheritance @inheritdoc tags
+ // 3 - expanding variables like $COLL
+ val useCaseCommentRaw = uc.comment.raw
+ val useCaseCommentMerged = merge(fullSigComment, useCaseCommentRaw, defn)
+ val useCaseCommentInheritdoc = expandInheritdoc(fullSigComment, useCaseCommentMerged, sym)
+ val useCaseCommentVariables = expandVariables(useCaseCommentInheritdoc, sym, site)
+ (defn, useCaseCommentVariables, uc.pos)
+ }
}
getDocComment(sym) map getUseCases getOrElse List()
}
@@ -155,15 +177,15 @@ trait DocComments { self: SymbolTable =>
* 3. If there is no @return section in `dst` but there is one in `src`, copy it.
*/
def merge(src: String, dst: String, sym: Symbol, copyFirstPara: Boolean = false): String = {
- val srcSections = tagIndex(src)
- val dstSections = tagIndex(dst)
- val srcParams = paramDocs(src, "@param", srcSections)
- val dstParams = paramDocs(dst, "@param", dstSections)
- val srcTParams = paramDocs(src, "@tparam", srcSections)
- val dstTParams = paramDocs(dst, "@tparam", dstSections)
- val out = new StringBuilder
- var copied = 0
- var tocopy = startTag(dst, dstSections dropWhile (!isMovable(dst, _)))
+ val srcSections = tagIndex(src)
+ val dstSections = tagIndex(dst)
+ val srcParams = paramDocs(src, "@param", srcSections)
+ val dstParams = paramDocs(dst, "@param", dstSections)
+ val srcTParams = paramDocs(src, "@tparam", srcSections)
+ val dstTParams = paramDocs(dst, "@tparam", dstSections)
+ val out = new StringBuilder
+ var copied = 0
+ var tocopy = startTag(dst, dstSections dropWhile (!isMovable(dst, _)))
if (copyFirstPara) {
val eop = // end of comment body (first para), which is delimited by blank line, or tag, or end of comment
@@ -193,6 +215,7 @@ trait DocComments { self: SymbolTable =>
for (tparam <- sym.typeParams)
mergeSection(srcTParams get tparam.name.toString, dstTParams get tparam.name.toString)
mergeSection(returnDoc(src, srcSections), returnDoc(dst, dstSections))
+ mergeSection(groupDoc(src, srcSections), groupDoc(dst, dstSections))
if (out.length == 0) dst
else {
@@ -201,12 +224,91 @@ trait DocComments { self: SymbolTable =>
}
}
+ /**
+ * Expand inheritdoc tags
+ * - for the main comment we transform the inheritdoc into the super variable,
+ * and the variable expansion can expand it further
+ * - for the param, tparam and throws sections we must replace comments on the spot
+ *
+ * This is done separately, for two reasons:
+ * 1. It takes longer to run compared to merge
+ * 2. The inheritdoc annotation should not be used very often, as building the comment from pieces severely
+ * impacts performance
+ *
+ * @param parent The source (or parent) comment
+ * @param child The child (overriding member or usecase) comment
+ * @param sym The child symbol
+ * @return The child comment with the inheritdoc sections expanded
+ */
+ def expandInheritdoc(parent: String, child: String, sym: Symbol): String =
+ if (child.indexOf("@inheritdoc") == -1)
+ child
+ else {
+ val parentSections = tagIndex(parent)
+ val childSections = tagIndex(child)
+ val parentTagMap = sectionTagMap(parent, parentSections)
+ val parentNamedParams = Map() +
+ ("@param" -> paramDocs(parent, "@param", parentSections)) +
+ ("@tparam" -> paramDocs(parent, "@tparam", parentSections)) +
+ ("@throws" -> paramDocs(parent, "@throws", parentSections))
+
+ val out = new StringBuilder
+
+ def replaceInheritdoc(childSection: String, parentSection: => String) =
+ if (childSection.indexOf("@inheritdoc") == -1)
+ childSection
+ else
+ childSection.replaceAllLiterally("@inheritdoc", parentSection)
+
+ def getParentSection(section: (Int, Int)): String = {
+
+ def getSectionHeader = extractSectionTag(child, section) match {
+ case param@("@param"|"@tparam"|"@throws") => param + " " + extractSectionParam(child, section)
+ case other => other
+ }
+
+ def sectionString(param: String, paramMap: Map[String, (Int, Int)]): String =
+ paramMap.get(param) match {
+ case Some(section) =>
+ // Cleanup the section tag and parameter
+ val sectionTextBounds = extractSectionText(parent, section)
+ cleanupSectionText(parent.substring(sectionTextBounds._1, sectionTextBounds._2))
+ case None =>
+ reporter.info(sym.pos, "The \"" + getSectionHeader + "\" annotation of the " + sym +
+ " comment contains @inheritdoc, but the corresponding section in the parent is not defined.", true)
+ "<invalid inheritdoc annotation>"
+ }
+
+ child.substring(section._1, section._1 + 7) match {
+ case param@("@param "|"@tparam"|"@throws") =>
+ sectionString(extractSectionParam(child, section), parentNamedParams(param.trim))
+ case _ =>
+ sectionString(extractSectionTag(child, section), parentTagMap)
+ }
+ }
+
+ def mainComment(str: String, sections: List[(Int, Int)]): String =
+ if (str.trim.length > 3)
+ str.trim.substring(3, startTag(str, sections))
+ else
+ ""
+
+ // Append main comment
+ out.append("/**")
+ out.append(replaceInheritdoc(mainComment(child, childSections), mainComment(parent, parentSections)))
+
+ // Append sections
+ for (section <- childSections)
+ out.append(replaceInheritdoc(child.substring(section._1, section._2), getParentSection(section)))
+
+ out.append("*/")
+ out.toString
+ }
+
/** Maps symbols to the variable -> replacement maps that are defined
* in their doc comments
*/
- private val defs = new HashMap[Symbol, Map[String, String]] {
- override def default(key: Symbol) = Map()
- }
+ private val defs = mutable.HashMap[Symbol, Map[String, String]]() withDefaultValue Map()
/** Lookup definition of variable.
*
@@ -222,12 +324,12 @@ trait DocComments { self: SymbolTable =>
else site.info.baseClasses
searchList collectFirst { case x if defs(x) contains vble => defs(x)(vble) } match {
- case Some(str) if str startsWith '$' => lookupVariable(str.tail, site)
- case res => res orElse lookupVariable(vble, site.owner)
+ case Some(str) if str startsWith "$" => lookupVariable(str.tail, site)
+ case res => res orElse lookupVariable(vble, site.owner)
}
}
- /** Expand variable occurrences in string `str', until a fix point is reached or
+ /** Expand variable occurrences in string `str`, until a fix point is reached or
* a expandLimit is exceeded.
*
* @param str The string to be expanded
@@ -270,7 +372,7 @@ trait DocComments { self: SymbolTable =>
case vname =>
lookupVariable(vname, site) match {
case Some(replacement) => replaceWith(replacement)
- case None => reporter.warning(sym.pos, "Variable " + vname + " undefined in comment for " + sym)
+ case None => reporter.warning(sym.pos, "Variable " + vname + " undefined in comment for " + sym + " in " + site)
}
}
}
@@ -288,7 +390,7 @@ trait DocComments { self: SymbolTable =>
}
// !!! todo: inherit from Comment?
- case class DocComment(raw: String, pos: Position = NoPosition) {
+ case class DocComment(raw: String, pos: Position = NoPosition, codePos: Position = NoPosition) {
/** Returns:
* template: the doc comment minus all @define and @usecase sections
@@ -317,7 +419,7 @@ trait DocComments { self: SymbolTable =>
val comment = "/** " + raw.substring(commentStart, end) + "*/"
val commentPos = subPos(commentStart, end)
- UseCase(DocComment(comment, commentPos), code, codePos)
+ UseCase(DocComment(comment, commentPos, codePos), code, codePos)
}
private def subPos(start: Int, end: Int) =
@@ -348,7 +450,7 @@ trait DocComments { self: SymbolTable =>
var defined: List[Symbol] = List() // initialized by Typer
var aliases: List[Symbol] = List() // initialized by Typer
- def expandedDefs(site: Symbol): List[Symbol] = {
+ def expandedDefs(sym: Symbol, site: Symbol): List[Symbol] = {
def select(site: Type, name: Name, orElse: => Type): Type = {
val member = site.nonPrivateMember(name)
@@ -362,11 +464,16 @@ trait DocComments { self: SymbolTable =>
case List() => NoType
case site :: sites1 => select(site.thisType, name, findIn(sites1))
}
- val (classes, pkgs) = site.ownerChain.span(!_.isPackageClass)
- findIn(classes ::: List(pkgs.head, definitions.RootClass))
+ // Previously, searching was taking place *only* in the current package and in the root package
+ // now we're looking for it everywhere in the hierarchy, so we'll be able to link variable expansions like
+ // immutable.Seq in package immutable
+ //val (classes, pkgs) = site.ownerChain.span(!_.isPackageClass)
+ //val sites = (classes ::: List(pkgs.head, rootMirror.RootClass)))
+ //findIn(sites)
+ findIn(site.ownerChain ::: List(definitions.EmptyPackage))
}
- def getType(str: String): Type = {
+ def getType(str: String, variable: String): Type = {
def getParts(start: Int): List[String] = {
val end = skipIdent(str, start)
if (end == start) List()
@@ -376,7 +483,11 @@ trait DocComments { self: SymbolTable =>
}
}
val parts = getParts(0)
- assert(parts.nonEmpty, "parts is empty '" + str + "' in site " + site)
+ if (parts.isEmpty) {
+ reporter.error(comment.codePos, "Incorrect variable expansion for " + variable + " in use case. Does the " +
+ "variable expand to wiki syntax when documenting " + site + "?")
+ return ErrorType
+ }
val partnames = (parts.init map newTermName) :+ newTypeName(parts.last)
val (start, rest) = parts match {
case "this" :: _ => (site.thisType, partnames.tail)
@@ -388,35 +499,56 @@ trait DocComments { self: SymbolTable =>
case _ =>
(getSite(partnames.head), partnames.tail)
}
- (start /: rest)(select(_, _, NoType))
+ val result = (start /: rest)(select(_, _, NoType))
+ if (result == NoType)
+ reporter.warning(comment.codePos, "Could not find the type " + variable + " points to while expanding it " +
+ "for the usecase signature of " + sym + " in " + site + "." +
+ "In this context, " + variable + " = \"" + str + "\".")
+ result
+ }
+
+ /**
+ * work around the backticks issue suggested by Simon in
+ * https://groups.google.com/forum/?hl=en&fromgroups#!topic/scala-internals/z7s1CCRCz74
+ * ideally, we'd have a removeWikiSyntax method in the CommentFactory to completely eliminate the wiki markup
+ */
+ def cleanupVariable(str: String) = {
+ val tstr = str.trim
+ if (tstr.length >= 2 && tstr.startsWith("`") && tstr.endsWith("`"))
+ tstr.substring(1, tstr.length - 1)
+ else
+ tstr
}
- val aliasExpansions: List[Type] =
+ // the Boolean tells us whether we can normalize: if we found an actual type, then yes, we can normalize, else no,
+ // use the synthetic alias created for the variable
+ val aliasExpansions: List[(Type, Boolean)] =
for (alias <- aliases) yield
lookupVariable(alias.name.toString.substring(1), site) match {
case Some(repl) =>
- val tpe = getType(repl.trim)
- if (tpe != NoType) tpe
+ val repl2 = cleanupVariable(repl)
+ val tpe = getType(repl2, alias.name.toString)
+ if (tpe != NoType) (tpe, true)
else {
- val alias1 = alias.cloneSymbol(definitions.RootClass)
- alias1.name = repl.toTypeName
- typeRef(NoPrefix, alias1, Nil)
+ val alias1 = alias.cloneSymbol(rootMirror.RootClass, alias.rawflags, newTypeName(repl2))
+ (typeRef(NoPrefix, alias1, Nil), false)
}
case None =>
- typeRef(NoPrefix, alias, Nil)
+ (typeRef(NoPrefix, alias, Nil), false)
}
- def subst(sym: Symbol, from: List[Symbol], to: List[Type]): Type =
- if (from.isEmpty) sym.tpe
+ def subst(sym: Symbol, from: List[Symbol], to: List[(Type, Boolean)]): (Type, Boolean) =
+ if (from.isEmpty) (sym.tpe, false)
else if (from.head == sym) to.head
else subst(sym, from.tail, to.tail)
val substAliases = new TypeMap {
def apply(tp: Type) = mapOver(tp) match {
- case tp1 @ TypeRef(pre, sym, args) if (sym.name.length > 1 && sym.name(0) == '$') =>
+ case tp1 @ TypeRef(pre, sym, args) if (sym.name.length > 1 && sym.name.startChar == '$') =>
subst(sym, aliases, aliasExpansions) match {
- case TypeRef(pre1, sym1, _) =>
- typeRef(pre1, sym1, args)
+ case (TypeRef(pre1, sym1, _), canNormalize) =>
+ val tpe = typeRef(pre1, sym1, args)
+ if (canNormalize) tpe.normalize else tpe
case _ =>
tp1
}
@@ -426,8 +558,9 @@ trait DocComments { self: SymbolTable =>
}
for (defn <- defined) yield {
- defn.cloneSymbol.setFlag(Flags.SYNTHETIC).setInfo(
- substAliases(defn.info).asSeenFrom(site.thisType, defn.owner))
+ defn.cloneSymbol(sym.owner, sym.flags | Flags.SYNTHETIC) modifyInfo (info =>
+ substAliases(info).asSeenFrom(site.thisType, sym.owner)
+ )
}
}
}
diff --git a/src/compiler/scala/tools/nsc/ast/NodePrinters.scala b/src/compiler/scala/tools/nsc/ast/NodePrinters.scala
index ca4ccd4..deea4de 100644
--- a/src/compiler/scala/tools/nsc/ast/NodePrinters.scala
+++ b/src/compiler/scala/tools/nsc/ast/NodePrinters.scala
@@ -1,22 +1,22 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
+ * Copyright 2005-2013 LAMP/EPFL
* @author Martin Odersky
*/
package scala.tools.nsc
package ast
-import compat.Platform.EOL
+import scala.compat.Platform.EOL
import symtab.Flags._
+import scala.language.postfixOps
-/** The object <code>nodePrinter</code> converts the internal tree
- * representation to a string formatted as a Scala expression.
+/** The object `nodePrinter` converts the internal tree
+ * representation to a string.
*
* @author Stephane Micheloud
- * @version 1.0
+ * @author Paul Phillips
*/
abstract class NodePrinters {
-
val global: Global
import global._
@@ -25,257 +25,331 @@ abstract class NodePrinters {
}
var infolevel = InfoLevel.Quiet
- object nodeToString extends Function1[Tree, String] {
+ def nodeToString: Tree => String = nodeToRegularString
+
+ object nodeToRegularString extends DefaultPrintAST with (Tree => String) {
+ def apply(tree: Tree) = stringify(tree)
+ }
+
+ trait DefaultPrintAST extends PrintAST {
+ val printPos = settings.Xprintpos.value || settings.Yposdebug.value
+
+ def showNameAndPos(tree: NameTree) = showPosition(tree) + showName(tree.name)
+ def showDefTreeName(tree: DefTree) = showName(tree.name)
+ def showPosition(tree: Tree) = if (printPos) tree.pos.show else ""
+ def showFlags(tree: MemberDef) = flagsToString(tree.symbol.flags | tree.mods.flags)
+ def showLiteral(lit: Literal) = showPosition(lit) + lit.value.escapedStringValue
+ def showTypeTree(tt: TypeTree) = showPosition(tt) + "<tpt>" + emptyOrComment(showType(tt))
+ def showName(name: Name) = name match {
+ case nme.EMPTY | tpnme.EMPTY => "<empty>"
+ case name => "\"" + name + "\""
+ }
+
+ def showSymbol(tree: Tree): String = {
+ val sym = tree.symbol
+ if (sym == null || sym == NoSymbol) ""
+ else sym.defString + sym.locationString
+ }
+ def showType(tree: Tree): String = {
+ val tpe = tree.tpe
+ if (tpe == null || tpe == NoType) ""
+ else "tree.tpe=" + tpe
+ }
+
+ def showAttributes(tree: Tree): String = {
+ if (infolevel == InfoLevel.Quiet) ""
+ else {
+ try { List(showSymbol(tree), showType(tree)) filterNot (_ == "") mkString ", " trim }
+ catch { case ex: Throwable => "sym= <error> " + ex.getMessage }
+ }
+ }
+ }
+
+ trait PrintAST {
private val buf = new StringBuilder
+ private var level = 0
- def apply(tree: Tree): String = {
- def traverse(tree: Tree, level: Int, comma: Boolean) {
- def println(s: String) {
- for (i <- 0 until level) buf.append(" ")
- buf.append(s)
- buf.append(EOL)
- }
- def printcln(s: String) {
- for (i <- 0 until level) buf.append(" ")
- buf.append(s)
- if (comma) buf.append(",")
- buf.append(EOL)
- }
- def annotationInfoToString(annot: AnnotationInfo): String = {
- val str = new StringBuilder
- str.append(annot.atp.toString())
- if (!annot.args.isEmpty)
- str.append(annot.args.mkString("(", ",", ")"))
- if (!annot.assocs.isEmpty)
- for (((name, value), index) <- annot.assocs.zipWithIndex) {
- if (index > 0)
- str.append(", ")
- str.append(name).append(" = ").append(value)
- }
- str.toString
- }
- def symflags(tree: Tree): String = {
- val buf = new StringBuffer
- val sym = tree.symbol
- buf append flagsToString(sym.flags)
-
- val annots = ", annots=" + (
- if (!sym.annotations.isEmpty)
- sym.annotations.map(annotationInfoToString).mkString("[", ",", "]")
- else
- tree.asInstanceOf[MemberDef].mods.annotations)
- (if (buf.length() > 2) buf.substring(3)
- else "0") + ", // flags=" + flagsToString(sym.flags) + annots
+ def showName(name: Name): String
+ def showPosition(tree: Tree): String
+ def showNameAndPos(tree: NameTree): String
+ def showDefTreeName(defTree: DefTree): String
+ def showFlags(tree: MemberDef): String
+ def showLiteral(lit: Literal): String
+ def showTypeTree(tt: TypeTree): String
+ def showAttributes(tree: Tree): String // symbol and type
+
+ def showRefTreeName(tree: Tree): String = {
+ tree match {
+ case SelectFromTypeTree(qual, name) => showRefTreeName(qual) + "#" + showName(name)
+ case Select(qual, name) => showRefTreeName(qual) + "." + showName(name)
+ case id @ Ident(name) => showNameAndPos(id)
+ case _ => "" + tree
+ }
+ }
+ def showRefTree(tree: RefTree): String = {
+ def prefix0 = showRefTreeName(tree.qualifier)
+ def prefix = if (prefix0 == "") "" else (tree match {
+ case SelectFromTypeTree(_, _) => prefix0 + "#"
+ case Select(_, _) => prefix0 + "."
+ case _ => ""
+ })
+ prefix + showNameAndPos(tree) + emptyOrComment(showAttributes(tree))
+ }
+
+ def emptyOrComment(s: String) = if (s == "") "" else " // " + s
+
+ def stringify(tree: Tree): String = {
+ buf.clear()
+ if (settings.XshowtreesStringified.value) buf.append(tree.toString + EOL)
+ if (settings.XshowtreesCompact.value) {
+ buf.append(showRaw(tree, printIds = settings.uniqid.value, printTypes = settings.printtypes.value))
+ } else {
+ level = 0
+ traverse(tree)
+ }
+ buf.toString
+ }
+ def traverseAny(x: Any) {
+ x match {
+ case t: Tree => traverse(t)
+ case xs: List[_] => printMultiline("List", "")(xs foreach traverseAny)
+ case _ => println("" + x)
+ }
+ }
+ def println(s: String) = printLine(s, "")
+
+ def printLine(value: String, comment: String) {
+ buf append " " * level
+ buf append value
+ if (comment != "") {
+ if (value != "")
+ buf append " "
+
+ buf append "// "
+ buf append comment
+ }
+ buf append EOL
+ }
+
+ def annotationInfoToString(annot: AnnotationInfo): String = {
+ val str = new StringBuilder
+ str.append(annot.atp.toString())
+ if (!annot.args.isEmpty)
+ str.append(annot.args.mkString("(", ",", ")"))
+ if (!annot.assocs.isEmpty)
+ for (((name, value), index) <- annot.assocs.zipWithIndex) {
+ if (index > 0)
+ str.append(", ")
+ str.append(name).append(" = ").append(value)
}
+ str.toString
+ }
+ def printModifiers(tree: MemberDef) {
+ // SI-5885: by default this won't print annotations of not yet initialized symbols
+ val annots0 = tree.symbol.annotations match {
+ case Nil => tree.mods.annotations
+ case xs => xs map annotationInfoToString
+ }
+ val annots = annots0 match {
+ case Nil => ""
+ case xs => " " + xs.mkString("@{ ", ", ", " }")
+ }
+ val flagString = showFlags(tree) match {
+ case "" => "0"
+ case s => s
+ }
+ println(flagString + annots)
+ }
- def nodeinfo(tree: Tree): String =
- if (infolevel == InfoLevel.Quiet) ""
- else {
- val buf = new StringBuilder(" // sym=" + tree.symbol)
- if (tree.hasSymbol) {
- if (tree.symbol.isPrimaryConstructor)
- buf.append(", isPrimaryConstructor")
- else if (tree.symbol.isConstructor)
- buf.append(", isConstructor")
- if (tree.symbol != NoSymbol)
- buf.append(", sym.owner=" + tree.symbol.owner)
- buf.append(", sym.tpe=" + tree.symbol.tpe)
- }
- buf.append(", tpe=" + tree.tpe)
- if (tree.tpe != null) {
- var sym = tree.tpe.termSymbol
- if (sym == NoSymbol) sym = tree.tpe.typeSymbol
- buf.append(", tpe.sym=" + sym)
- if (sym != NoSymbol) {
- buf.append(", tpe.sym.owner=" + sym.owner)
- if ((infolevel > InfoLevel.Normal) &&
- !(sym.owner eq definitions.ScalaPackageClass) &&
- !sym.isModuleClass && !sym.isPackageClass &&
- !sym.isJavaDefined) {
- val members = for (m <- tree.tpe.decls.toList)
- yield m.toString() + ": " + m.tpe + ", "
- buf.append(", tpe.decls=" + members)
- }
- }
- }
- buf.toString
+ def applyCommon(tree: Tree, fun: Tree, args: List[Tree]) {
+ printMultiline(tree) {
+ traverse(fun)
+ traverseList("Nil", "argument")(args)
+ }
+ }
+
+ def treePrefix(tree: Tree) = showPosition(tree) + tree.productPrefix
+ def printMultiline(tree: Tree)(body: => Unit) {
+ printMultiline(treePrefix(tree), showAttributes(tree))(body)
+ }
+ def printMultiline(prefix: String, comment: String)(body: => Unit) {
+ printLine(prefix + "(", comment)
+ indent(body)
+ println(")")
+ }
+
+ @inline private def indent[T](body: => T): T = {
+ level += 1
+ try body
+ finally level -= 1
+ }
+
+ def traverseList(ifEmpty: String, what: String)(trees: List[Tree]) {
+ if (trees.isEmpty)
+ println(ifEmpty)
+ else if (trees.tail.isEmpty)
+ traverse(trees.head)
+ else {
+ printLine("", trees.length + " " + what + "s")
+ trees foreach traverse
+ }
+ }
+
+ def printSingle(tree: Tree, name: Name) {
+ println(treePrefix(tree) + "(" + showName(name) + ")" + showAttributes(tree))
+ }
+
+ def traverse(tree: Tree) {
+ showPosition(tree)
+
+ tree match {
+ case AppliedTypeTree(tpt, args) => applyCommon(tree, tpt, args)
+ case ApplyDynamic(fun, args) => applyCommon(tree, fun, args)
+ case Apply(fun, args) => applyCommon(tree, fun, args)
+
+ case Throw(Ident(name)) =>
+ printSingle(tree, name)
+
+ case b @ Bind(name, body) =>
+ printMultiline(tree) {
+ println(showDefTreeName(b))
+ traverse(body)
}
- def nodeinfo2(tree: Tree): String =
- (if (comma) "," else "") + nodeinfo(tree)
-
- def applyCommon(name: String, tree: Tree, fun: Tree, args: List[Tree]) {
- println(name + "(" + nodeinfo(tree))
- traverse(fun, level + 1, true)
- if (args.isEmpty)
- println(" Nil // no argument")
- else {
- val n = args.length
- println(" List( // " + n + " arguments(s)")
- for (i <- 0 until n)
- traverse(args(i), level + 2, i < n-1)
- println(" )")
+
+ case ld @ LabelDef(name, params, rhs) =>
+ printMultiline(tree) {
+ showNameAndPos(ld)
+ traverseList("()", "params")(params)
+ traverse(rhs)
}
- printcln(")")
- }
- tree match {
- case AppliedTypeTree(tpt, args) => applyCommon("AppliedTypeTree", tree, tpt, args)
- case Apply(fun, args) => applyCommon("Apply", tree, fun, args)
- case ApplyDynamic(fun, args) => applyCommon("ApplyDynamic", tree, fun, args)
-
- case Block(stats, expr) =>
- println("Block(" + nodeinfo(tree))
- if (stats.isEmpty)
- println(" List(), // no statement")
- else {
- val n = stats.length
- println(" List( // " + n + " statement(s)")
- for (i <- 0 until n)
- traverse(stats(i), level + 2, i < n-1)
- println(" ),")
- }
- traverse(expr, level + 1, false)
- printcln(")")
- case ClassDef(mods, name, tparams, impl) =>
- println("ClassDef(" + nodeinfo(tree))
- println(" " + symflags(tree))
- println(" \"" + name + "\",")
- if (tparams.isEmpty)
- println(" List(), // no type parameter")
- else {
- val n = tparams.length
- println(" List( // " + n + " type parameter(s)")
- for (i <- 0 until n)
- traverse(tparams(i), level + 2, i < n-1)
- println(" ),")
- }
- traverse(impl, level + 1, false)
- printcln(")")
- case DefDef(mods, name, tparams, vparamss, tpt, rhs) =>
- println("DefDef(" + nodeinfo(tree))
- println(" " + symflags(tree))
- println(" \"" + name + "\",")
- if (tparams.isEmpty)
- println(" List(), // no type parameter")
- else {
- val n = tparams.length
- println(" List( // " + n + " type parameter(s)")
- for (i <- 0 until n)
- traverse(tparams(i), level + 2, i < n-1)
- println(" ),")
+ case Function(vparams, body) =>
+ printMultiline(tree) {
+ traverseList("()", "parameter")(vparams)
+ traverse(body)
+ }
+ case Try(block, catches, finalizer) =>
+ printMultiline(tree) {
+ traverse(block)
+ traverseList("{}", "case")(catches)
+ if (finalizer ne EmptyTree)
+ traverse(finalizer)
+ }
+
+ case Match(selector, cases) =>
+ printMultiline(tree) {
+ traverse(selector)
+ traverseList("", "case")(cases)
+ }
+ case CaseDef(pat, guard, body) =>
+ printMultiline(tree) {
+ traverse(pat)
+ if (guard ne EmptyTree)
+ traverse(guard)
+ traverse(body)
+ }
+ case Block(stats, expr) =>
+ printMultiline(tree) {
+ traverseList("{}", "statement")(stats)
+ traverse(expr)
+ }
+ case cd @ ClassDef(mods, name, tparams, impl) =>
+ printMultiline(tree) {
+ printModifiers(cd)
+ println(showDefTreeName(cd))
+ traverseList("[]", "type parameter")(tparams)
+ traverse(impl)
+ }
+ case md @ ModuleDef(mods, name, impl) =>
+ printMultiline(tree) {
+ printModifiers(md)
+ println(showDefTreeName(md))
+ traverse(impl)
+ }
+ case dd @ DefDef(mods, name, tparams, vparamss, tpt, rhs) =>
+ printMultiline(tree) {
+ printModifiers(dd)
+ println(showDefTreeName(dd))
+ traverseList("[]", "type parameter")(tparams)
+ vparamss match {
+ case Nil => println("Nil")
+ case Nil :: Nil => println("List(Nil)")
+ case ps :: Nil =>
+ printLine("", "1 parameter list")
+ ps foreach traverse
+ case pss =>
+ printLine("", pss.length + " parameter lists")
+ pss foreach (ps => traverseList("()", "parameter")(ps))
}
- val n = vparamss.length
- if (n == 1 && vparamss(0).isEmpty)
- println(" List(List()), // no parameter")
- else {
- println(" List(")
- for (i <- 0 until n) {
- val m = vparamss(i).length
- println(" List( // " + m + " parameter(s)")
- for (j <- 0 until m)
- traverse(vparamss(i)(j), level + 3, j < m-1)
- println(" )")
+ traverse(tpt)
+ traverse(rhs)
+ }
+ case EmptyTree =>
+ println(showName(nme.EMPTY))
+ case lit @ Literal(value) =>
+ println(showLiteral(lit))
+ case New(tpt) =>
+ printMultiline(tree)(traverse(tpt))
+ case Super(This(qual), mix) =>
+ println("Super(This(" + showName(qual) + "), " + showName(mix) + ")")
+ case Super(qual, mix) =>
+ printMultiline(tree) {
+ traverse(qual)
+ showName(mix)
+ }
+ case Template(parents, self, body) =>
+ printMultiline(tree) {
+ val ps0 = parents map { p =>
+ if (p.tpe eq null) p match {
+ case x: RefTree => showRefTree(x)
+ case x => showPosition(x) + x
}
- println(" ),")
- }
- println(" " + tpt + ",")
- traverse(rhs, level + 1, false)
- printcln(")")
- case EmptyTree =>
- printcln("EmptyTree")
- case Ident(name) =>
- printcln("Ident(\"" + name + "\")" + nodeinfo2(tree))
- case Literal(value) =>
- printcln("Literal(" + value + ")")
- case New(tpt) =>
- println("New(" + nodeinfo(tree))
- traverse(tpt, level + 1, false)
- printcln(")")
- case Select(qualifier, selector) =>
- println("Select(" + nodeinfo(tree))
- traverse(qualifier, level + 1, true)
- printcln(" \"" + selector + "\")")
- case Super(qual, mix) =>
- println("Super(\"" + mix + "\")" + nodeinfo(tree))
- traverse(qual, level + 1, true)
- case Template(parents, self, body) =>
- println("Template(" + nodeinfo(tree))
- println(" " + parents.map(p =>
- if (p.tpe ne null) p.tpe.typeSymbol else "null-" + p
- ) + ", // parents")
- traverse(self, level + 1, true)
- if (body.isEmpty)
- println(" List() // no body")
- else {
- val n = body.length
- println(" List( // body")
- for (i <- 0 until n)
- traverse(body(i), level + 2, i < n-1)
- println(" )")
- }
- printcln(")")
- case This(qual) =>
- println("This(\"" + qual + "\")" + nodeinfo2(tree))
- case TypeApply(fun, args) =>
- println("TypeApply(" + nodeinfo(tree))
- traverse(fun, level + 1, true)
- if (args.isEmpty)
- println(" List() // no argument")
- else {
- val n = args.length
- println(" List(")
- for (i <- 0 until n)
- traverse(args(i), level + 1, i < n-1)
- println(" )")
+ else showName(newTypeName(p.tpe.typeSymbol.fullName))
}
- printcln(")")
- case TypeTree() =>
- printcln("TypeTree()" + nodeinfo2(tree))
- case Typed(expr, tpt) =>
- println("Typed(" + nodeinfo(tree))
- traverse(expr, level + 1, true)
- traverse(tpt, level + 1, false)
- printcln(")")
- case ValDef(mods, name, tpt, rhs) =>
- println("ValDef(" + nodeinfo(tree))
- println(" " + symflags(tree))
- println(" \"" + name + "\",")
- traverse(tpt, level + 1, true)
- traverse(rhs, level + 1, false)
- printcln(")")
- case PackageDef(pid, stats) =>
- println("PackageDef(")
- traverse(pid, level + 1, false)
- println(",\n")
- for (stat <- stats)
- traverse(stat, level + 1, false)
- printcln(")")
- case _ =>
- tree match {
- case p: Product =>
- if (p.productArity != 0) {
- println(p.productPrefix+"(")
- for (elem <- (0 until p.productArity) map p.productElement) {
- def printElem(elem: Any, level: Int): Unit = elem match {
- case t: Tree =>
- traverse(t, level, false)
- case xs: List[_] =>
- print("List(")
- for (x <- xs) printElem(x, level+1)
- printcln(")")
- case _ =>
- println(elem.toString)
- }
- printElem(elem, level+1)
- }
- printcln(")")
- } else printcln(p.productPrefix)
- }
- }
+ printLine(ps0 mkString ", ", "parents")
+ traverse(self)
+ traverseList("{}", "statement")(body)
+ }
+ case This(qual) =>
+ printSingle(tree, qual)
+ case TypeApply(fun, args) =>
+ printMultiline(tree) {
+ traverse(fun)
+ traverseList("[]", "type argument")(args)
+ }
+ case tt @ TypeTree() =>
+ println(showTypeTree(tt))
+
+ case Typed(expr, tpt) =>
+ printMultiline(tree) {
+ traverse(expr)
+ traverse(tpt)
+ }
+ case vd @ ValDef(mods, name, tpt, rhs) =>
+ printMultiline(tree) {
+ printModifiers(vd)
+ println(showDefTreeName(vd))
+ traverse(tpt)
+ traverse(rhs)
+ }
+ case td @ TypeDef(mods, name, tparams, rhs) =>
+ printMultiline(tree) {
+ printModifiers(td)
+ println(showDefTreeName(td))
+ traverseList("[]", "type parameter")(tparams)
+ traverse(rhs)
+ }
+
+ case PackageDef(pid, stats) =>
+ printMultiline("PackageDef", "")(pid :: stats foreach traverse)
+
+ case _ =>
+ tree match {
+ case t: RefTree => println(showRefTree(t))
+ case t if t.productArity == 0 => println(treePrefix(t))
+ case t => printMultiline(tree)(tree.productIterator foreach traverseAny)
+ }
}
- buf setLength 0
- traverse(tree, 0, false)
- buf.toString
}
}
diff --git a/src/compiler/scala/tools/nsc/ast/Positions.scala b/src/compiler/scala/tools/nsc/ast/Positions.scala
new file mode 100644
index 0000000..d8fb632
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/ast/Positions.scala
@@ -0,0 +1,37 @@
+package scala.tools.nsc
+package ast
+
+import scala.reflect.internal.util.{ SourceFile, Position, OffsetPosition, NoPosition }
+
+trait Positions extends scala.reflect.internal.Positions {
+ self: Global =>
+
+ def rangePos(source: SourceFile, start: Int, point: Int, end: Int) =
+ new OffsetPosition(source, point)
+
+ def validatePositions(tree: Tree) {}
+
+ class ValidatingPosAssigner extends PosAssigner {
+ var pos: Position = _
+ override def traverse(t: Tree) {
+ if (t eq EmptyTree) ()
+ else if (t.pos == NoPosition) super.traverse(t setPos pos)
+ else if (globalPhase.id <= currentRun.picklerPhase.id) {
+ // When we prune due to encountering a position, traverse the
+ // pruned children so we can warn about those lacking positions.
+ t.children foreach { c =>
+ if ((c eq EmptyTree) || (c eq emptyValDef)) ()
+ else if (c.pos == NoPosition) {
+ reporter.warning(t.pos, " Positioned tree has unpositioned child in phase " + globalPhase)
+ inform("parent: " + treeSymStatus(t))
+ inform(" child: " + treeSymStatus(c) + "\n")
+ }
+ }
+ }
+ }
+ }
+
+ override protected[this] lazy val posAssigner: PosAssigner =
+ if (settings.Yrangepos.value && settings.debug.value || settings.Yposdebug.value) new ValidatingPosAssigner
+ else new DefaultPosAssigner
+}
diff --git a/src/compiler/scala/tools/nsc/ast/Printers.scala b/src/compiler/scala/tools/nsc/ast/Printers.scala
new file mode 100644
index 0000000..83222a2
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/ast/Printers.scala
@@ -0,0 +1,296 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2013 LAMP/EPFL
+ * @author Martin Odersky
+ */
+
+package scala.tools.nsc
+package ast
+
+import java.io.{ OutputStream, PrintWriter, StringWriter, Writer }
+import symtab.Flags._
+import symtab.SymbolTable
+
+trait Printers extends scala.reflect.internal.Printers { this: Global =>
+
+ import treeInfo.{ IsTrue, IsFalse }
+
+ class TreePrinter(out: PrintWriter) extends super.TreePrinter(out) {
+
+ override def print(args: Any*): Unit = args foreach {
+ case tree: Tree =>
+ printPosition(tree)
+ printTree(
+ if (tree.isDef && tree.symbol != NoSymbol && tree.symbol.isInitialized) {
+ tree match {
+ case ClassDef(_, _, _, impl @ Template(ps, emptyValDef, body))
+ if (tree.symbol.thisSym != tree.symbol) =>
+ ClassDef(tree.symbol, Template(ps, ValDef(tree.symbol.thisSym), body))
+ case ClassDef(_, _, _, impl) => ClassDef(tree.symbol, impl)
+ case ModuleDef(_, _, impl) => ModuleDef(tree.symbol, impl)
+ case ValDef(_, _, _, rhs) => ValDef(tree.symbol, rhs)
+ case DefDef(_, _, _, vparamss, _, rhs) => DefDef(tree.symbol, vparamss, rhs)
+ case TypeDef(_, _, _, rhs) => TypeDef(tree.symbol, rhs)
+ case _ => tree
+ }
+ } else tree)
+ case unit: CompilationUnit =>
+ print("// Scala source: " + unit.source + "\n")
+ if (unit.body == null) print("<null>")
+ else { print(unit.body); println() }
+ println()
+ out.flush()
+ case arg =>
+ super.print(arg)
+ }
+ }
+
+ // overflow cases missing from TreePrinter in reflect.api
+ override def xprintTree(treePrinter: super.TreePrinter, tree: Tree) = tree match {
+ case DocDef(comment, definition) =>
+ treePrinter.print(comment.raw)
+ treePrinter.println()
+ treePrinter.print(definition)
+
+ case TypeTreeWithDeferredRefCheck() =>
+ treePrinter.print("<tree with deferred refcheck>")
+
+ case SelectFromArray(qualifier, name, _) =>
+ treePrinter.print(qualifier, ".<arr>", symName(tree, name))
+
+ case _ =>
+ super.xprintTree(treePrinter, tree)
+ }
+
+ /** A tree printer which is stingier about vertical whitespace and unnecessary
+ * punctuation than the standard one.
+ */
+ class CompactTreePrinter(out: PrintWriter) extends TreePrinter(out) {
+ override def printRow(ts: List[Tree], start: String, sep: String, end: String) {
+ print(start)
+ printSeq(ts)(print(_))(print(sep))
+ print(end)
+ }
+
+ // drill down through Blocks and pull out the real statements.
+ def allStatements(t: Tree): List[Tree] = t match {
+ case Block(stmts, expr) => (stmts flatMap allStatements) ::: List(expr)
+ case _ => List(t)
+ }
+
+ def printLogicalOr(t1: (Tree, Boolean), t2: (Tree, Boolean)) =
+ printLogicalOp(t1, t2, "||")
+
+ def printLogicalAnd(t1: (Tree, Boolean), t2: (Tree, Boolean)) =
+ printLogicalOp(t1, t2, "&&")
+
+ def printLogicalOp(t1: (Tree, Boolean), t2: (Tree, Boolean), op: String) = {
+ def maybenot(tvalue: Boolean) = if (tvalue) "" else "!"
+
+ print("%s(" format maybenot(t1._2))
+ printTree(t1._1)
+ print(") %s %s(".format(op, maybenot(t2._2)))
+ printTree(t2._1)
+ print(")")
+ }
+
+ override def printTree(tree: Tree): Unit = {
+ // routing supercalls through this for debugging ease
+ def s() = super.printTree(tree)
+
+ tree match {
+ // labels used for jumps - does not map to valid scala code
+ case LabelDef(name, params, rhs) =>
+ print("labeldef %s(%s) = ".format(name, params mkString ","))
+ printTree(rhs)
+
+ case Ident(name) =>
+ print(decodedSymName(tree, name))
+
+ // target.method(arg) ==> target method arg
+ case Apply(Select(target, method), List(arg)) =>
+ if (method.decode.toString == "||")
+ printLogicalOr(target -> true, arg -> true)
+ else if (method.decode.toString == "&&")
+ printLogicalAnd(target -> true, arg -> true)
+ else (target, arg) match {
+ case (_: Ident, _: Literal | _: Ident) =>
+ printTree(target)
+ print(" ")
+ printTree(Ident(method))
+ print(" ")
+ printTree(arg)
+ case _ => s()
+ }
+
+ // target.unary_! ==> !target
+ case Select(qualifier, name) if (name.decode startsWith "unary_") =>
+ print(name.decode drop 6)
+ printTree(qualifier)
+
+ case Select(qualifier, name) =>
+ printTree(qualifier)
+ print(".")
+ print(quotedName(name, true))
+
+ // target.toString() ==> target.toString
+ case Apply(fn, Nil) => printTree(fn)
+
+ // if a Block only continues one actual statement, just print it.
+ case Block(stats, expr) =>
+ allStatements(tree) match {
+ case List(x) => printTree(x)
+ case xs => s()
+ }
+
+ // We get a lot of this stuff
+ case If( IsTrue(), x, _) => printTree(x)
+ case If(IsFalse(), _, x) => printTree(x)
+
+ case If(cond, IsTrue(), elsep) => printLogicalOr(cond -> true, elsep -> true)
+ case If(cond, IsFalse(), elsep) => printLogicalAnd(cond -> false, elsep -> true)
+ case If(cond, thenp, IsTrue()) => printLogicalOr(cond -> false, thenp -> true)
+ case If(cond, thenp, IsFalse()) => printLogicalAnd(cond -> true, thenp -> true)
+
+ // If thenp or elsep has only one statement, it doesn't need more than one line.
+ case If(cond, thenp, elsep) =>
+ def ifIndented(x: Tree) = {
+ indent ; println() ; printTree(x) ; undent
+ }
+
+ val List(thenStmts, elseStmts) = List(thenp, elsep) map allStatements
+ print("if ("); print(cond); print(") ")
+
+ thenStmts match {
+ case List(x: If) => ifIndented(x)
+ case List(x) => printTree(x)
+ case _ => printTree(thenp)
+ }
+
+ if (elseStmts.nonEmpty) {
+ print(" else")
+ indent ; println()
+ elseStmts match {
+ case List(x) => printTree(x)
+ case _ => printTree(elsep)
+ }
+ undent ; println()
+ }
+ case _ => s()
+ }
+ }
+ }
+
+ /** This must guarantee not to force any evaluation, so we can learn
+ * a little bit about trees in the midst of compilation without altering
+ * the natural course of events.
+ */
+ class SafeTreePrinter(out: PrintWriter) extends TreePrinter(out) {
+
+ private def default(t: Tree) = t.getClass.getName.reverse.takeWhile(_ != '.').reverse
+ private def params(trees: List[Tree]): String = trees map safe mkString ", "
+
+ private def safe(name: Name): String = name.decode
+ private def safe(tree: Tree): String = tree match {
+ case Apply(fn, args) => "%s(%s)".format(safe(fn), params(args))
+ case Select(qual, name) => safe(qual) + "." + safe(name)
+ case This(qual) => safe(qual) + ".this"
+ case Ident(name) => safe(name)
+ case Literal(value) => value.stringValue
+ case _ => "(?: %s)".format(default(tree))
+ }
+
+ override def printTree(tree: Tree) { print(safe(tree)) }
+ }
+
+ class TreeMatchTemplate {
+ // non-trees defined in Trees
+ //
+ // case class ImportSelector(name: Name, namePos: Int, rename: Name, renamePos: Int)
+ // case class Modifiers(flags: Long, privateWithin: Name, annotations: List[Tree], positions: Map[Long, Position])
+ //
+ def apply(t: Tree): Unit = t match {
+ // eliminated by typer
+ case Annotated(annot, arg) =>
+ case AssignOrNamedArg(lhs, rhs) =>
+ case DocDef(comment, definition) =>
+ case Import(expr, selectors) =>
+
+ // eliminated by refchecks
+ case ModuleDef(mods, name, impl) =>
+ case TypeTreeWithDeferredRefCheck() =>
+
+ // eliminated by erasure
+ case TypeDef(mods, name, tparams, rhs) =>
+ case Typed(expr, tpt) =>
+
+ // eliminated by cleanup
+ case ApplyDynamic(qual, args) =>
+
+ // eliminated by explicitouter
+ case Alternative(trees) =>
+ case Bind(name, body) =>
+ case CaseDef(pat, guard, body) =>
+ case Star(elem) =>
+ case UnApply(fun, args) =>
+
+ // eliminated by lambdalift
+ case Function(vparams, body) =>
+
+ // eliminated by uncurry
+ case AppliedTypeTree(tpt, args) =>
+ case CompoundTypeTree(templ) =>
+ case ExistentialTypeTree(tpt, whereClauses) =>
+ case SelectFromTypeTree(qual, selector) =>
+ case SingletonTypeTree(ref) =>
+ case TypeBoundsTree(lo, hi) =>
+
+ // survivors
+ case Apply(fun, args) =>
+ case ArrayValue(elemtpt, trees) =>
+ case Assign(lhs, rhs) =>
+ case Block(stats, expr) =>
+ case ClassDef(mods, name, tparams, impl) =>
+ case DefDef(mods, name, tparams, vparamss, tpt, rhs) =>
+ case EmptyTree =>
+ case Ident(name) =>
+ case If(cond, thenp, elsep) =>
+ case LabelDef(name, params, rhs) =>
+ case Literal(value) =>
+ case Match(selector, cases) =>
+ case New(tpt) =>
+ case PackageDef(pid, stats) =>
+ case Return(expr) =>
+ case Select(qualifier, selector) =>
+ case Super(qual, mix) =>
+ case Template(parents, self, body) =>
+ case This(qual) =>
+ case Throw(expr) =>
+ case Try(block, catches, finalizer) =>
+ case TypeApply(fun, args) =>
+ case TypeTree() =>
+ case ValDef(mods, name, tpt, rhs) =>
+
+ // missing from the Trees comment
+ case Parens(args) => // only used during parsing
+ case SelectFromArray(qual, name, erasure) => // only used during erasure
+ }
+ }
+
+ def asString(t: Tree): String = render(t, newStandardTreePrinter, settings.printtypes.value, settings.uniqid.value, settings.Yshowsymkinds.value)
+ def asCompactString(t: Tree): String = render(t, newCompactTreePrinter, settings.printtypes.value, settings.uniqid.value, settings.Yshowsymkinds.value)
+ def asCompactDebugString(t: Tree): String = render(t, newCompactTreePrinter, true, true, true)
+
+ def newStandardTreePrinter(writer: PrintWriter): TreePrinter = new TreePrinter(writer)
+ def newStandardTreePrinter(stream: OutputStream): TreePrinter = newStandardTreePrinter(new PrintWriter(stream))
+ def newStandardTreePrinter(): TreePrinter = newStandardTreePrinter(new PrintWriter(ConsoleWriter))
+
+ def newCompactTreePrinter(writer: PrintWriter): CompactTreePrinter = new CompactTreePrinter(writer)
+ def newCompactTreePrinter(stream: OutputStream): CompactTreePrinter = newCompactTreePrinter(new PrintWriter(stream))
+ def newCompactTreePrinter(): CompactTreePrinter = newCompactTreePrinter(new PrintWriter(ConsoleWriter))
+
+ override def newTreePrinter(writer: PrintWriter): TreePrinter =
+ if (settings.Ycompacttrees.value) newCompactTreePrinter(writer)
+ else newStandardTreePrinter(writer)
+ override def newTreePrinter(stream: OutputStream): TreePrinter = newTreePrinter(new PrintWriter(stream))
+ override def newTreePrinter(): TreePrinter = newTreePrinter(new PrintWriter(ConsoleWriter))
+}
diff --git a/src/compiler/scala/tools/nsc/ast/TreeBrowsers.scala b/src/compiler/scala/tools/nsc/ast/TreeBrowsers.scala
index 6139288..5c95409 100644
--- a/src/compiler/scala/tools/nsc/ast/TreeBrowsers.scala
+++ b/src/compiler/scala/tools/nsc/ast/TreeBrowsers.scala
@@ -1,5 +1,5 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
+ * Copyright 2005-2013 LAMP/EPFL
* @author Martin Odersky
*/
@@ -18,6 +18,7 @@ import scala.concurrent.Lock
import scala.text._
import symtab.Flags._
import symtab.SymbolTable
+import scala.language.implicitConversions
/**
* Tree browsers can show the AST in a graphical and interactive
@@ -33,17 +34,16 @@ abstract class TreeBrowsers {
val borderSize = 10
-
def create(): SwingBrowser = new SwingBrowser();
/** Pseudo tree class, so that all JTree nodes are treated uniformly */
case class ProgramTree(units: List[UnitTree]) extends Tree {
- override def toString(): String = "Program"
+ override def toString: String = "Program"
}
/** Pseudo tree class, so that all JTree nodes are treated uniformly */
case class UnitTree(unit: CompilationUnit) extends Tree {
- override def toString(): String = unit.toString()
+ override def toString: String = unit.toString
}
/**
@@ -140,7 +140,7 @@ abstract class TreeBrowsers {
UIManager.setLookAndFeel("com.sun.java.swing.plaf.nimbus.NimbusLookAndFeel")
}
catch {
- case _ => UIManager.setLookAndFeel(UIManager.getCrossPlatformLookAndFeelClassName())
+ case _: Throwable => UIManager.setLookAndFeel(UIManager.getCrossPlatformLookAndFeelClassName())
}
val frame = new JFrame("Scala AST after " + phaseName + " phase")
@@ -200,7 +200,7 @@ abstract class TreeBrowsers {
row: Int, hasFocus: Boolean) = {
val (cls, name) = TreeInfo.treeName(value.asInstanceOf[Tree])
if (name != EMPTY)
- cls + "[" + name.toString() + "]"
+ cls + "[" + name + "]"
else
cls
}
@@ -208,7 +208,7 @@ abstract class TreeBrowsers {
jTree.addTreeSelectionListener(new javax.swing.event.TreeSelectionListener() {
def valueChanged(e: javax.swing.event.TreeSelectionEvent): Unit = {
- textArea.setText(e.getPath().getLastPathComponent().toString())
+ textArea.setText(e.getPath().getLastPathComponent().toString)
infoPanel.update(e.getPath().getLastPathComponent())
}
})
@@ -330,21 +330,21 @@ abstract class TreeBrowsers {
str.append(t.symbol.tpe).append("\n")
buf = new StringWriter()
TypePrinter.toDocument(t.symbol.tpe).format(getWidth() / getColumnWidth(), buf)
- str.append(buf.toString())
+ str.append(buf.toString)
}
str.append("\n\nSymbol info: \n")
TreeInfo.symbolTypeDoc(t).format(getWidth() / getColumnWidth(), buf)
- str.append(buf.toString())
+ str.append(buf.toString)
str.append("\n\nSymbol Attributes: \n").append(TreeInfo.symbolAttributes(t))
str.append("\ntree.tpe: ")
if (t.tpe ne null) {
- str.append(t.tpe.toString()).append("\n")
+ str.append(t.tpe.toString).append("\n")
buf = new StringWriter()
TypePrinter.toDocument(t.tpe).format(getWidth() / getColumnWidth(), buf)
- str.append(buf.toString())
+ str.append(buf.toString)
}
}
- setText(str.toString())
+ setText(str.toString)
}
}
@@ -353,144 +353,17 @@ abstract class TreeBrowsers {
* Tree.
*/
object TreeInfo {
-
/** Return the case class name and the Name, if the node defines one */
- def treeName(t: Tree): (String, Name) = t match {
- case ProgramTree(units) =>
- ("Program", EMPTY)
-
- case UnitTree(unit) =>
- ("CompilationUnit", unit.toString())
-
- case DocDef(comment, definition) =>
- ("DocDef", EMPTY)
-
- case ClassDef(mods, name, tparams, impl) =>
- ("ClassDef", name)
-
- case PackageDef(packaged, impl) =>
- ("PackageDef", EMPTY)
-
- case ModuleDef(mods, name, impl) =>
- ("ModuleDef", name)
-
- case ValDef(mods, name, tpe, rhs) =>
- ("ValDef", name)
-
- case DefDef(mods, name, tparams, vparams, tpe, rhs) =>
- ("DefDef", name)
-
- case TypeDef(mods, name, tparams, rhs) =>
- ("TypeDef", name)
-
- case Import(expr, selectors) =>
- ("Import", EMPTY)
-
- case CaseDef(pat, guard, body) =>
- ("CaseDef", EMPTY)
-
- case Template(parents, self, body) =>
- ("Template", EMPTY)
-
- case LabelDef(name, params, rhs) =>
- ("LabelDef", name)
-
- case Block(stats, expr) =>
- ("Block", EMPTY)
-
- case Alternative(trees) =>
- ("Alternative", EMPTY)
-
- case Bind(name, rhs) =>
- ("Bind", name)
-
- case UnApply(fun, args) =>
- ("UnApply", EMPTY)
-
- case Match(selector, cases) =>
- ("Visitor", EMPTY)
-
- case Function(vparams, body) =>
- ("Function", EMPTY)
-
- case Assign(lhs, rhs) =>
- ("Assign", EMPTY)
-
- case If(cond, thenp, elsep) =>
- ("If", EMPTY)
-
- case Return(expr) =>
- ("Return", EMPTY)
-
- case Throw(expr) =>
- ("Throw", EMPTY)
-
- case New(init) =>
- ("New", EMPTY)
-
- case Typed(expr, tpe) =>
- ("Typed", EMPTY)
-
- case TypeApply(fun, args) =>
- ("TypeApply", EMPTY)
-
- case Apply(fun, args) =>
- ("Apply", EMPTY)
-
- case ApplyDynamic(qual, args) =>
- ("Apply", EMPTY)
-
- case Super(qualif, mix) =>
- ("Super", "mix: " + mix.toString())
-
- case This(qualifier) =>
- ("This", qualifier)
-
- case Select(qualifier, selector) =>
- ("Select", selector)
-
- case Ident(name) =>
- ("Ident", name)
-
- case Literal(value) =>
- ("Literal", EMPTY)
-
- case TypeTree() =>
- ("TypeTree", EMPTY)
-
- case Annotated(annot, arg) =>
- ("Annotated", EMPTY)
-
- case SingletonTypeTree(ref) =>
- ("SingletonType", EMPTY)
-
- case SelectFromTypeTree(qualifier, selector) =>
- ("SelectFromType", selector)
-
- case CompoundTypeTree(template) =>
- ("CompoundType", EMPTY)
-
- case AppliedTypeTree(tpe, args) =>
- ("AppliedType", EMPTY)
-
- case TypeBoundsTree(lo, hi) =>
- ("TypeBoundsTree", EMPTY)
-
- case ExistentialTypeTree(tpt, whereClauses) =>
- ("ExistentialTypeTree", EMPTY)
-
- case Try(block, catcher, finalizer) =>
- ("Try", EMPTY)
-
- case EmptyTree =>
- ("Empty", EMPTY)
-
- case ArrayValue(elemtpt, trees) =>
- ("ArrayValue", EMPTY)
-
- case Star(t) =>
- ("Star", EMPTY)
- }
+ def treeName(t: Tree): (String, Name) = ((t.productPrefix, t match {
+ case UnitTree(unit) => newTermName("" + unit)
+ case Super(_, mix) => newTermName("mix: " + mix)
+ case This(qual) => qual
+ case Select(_, selector) => selector
+ case Ident(name) => name
+ case SelectFromTypeTree(_, selector) => selector
+ case x: DefTree => x.name
+ case _ => EMPTY
+ }))
/** Return a list of children for the given tree node */
def children(t: Tree): List[Tree] = t match {
@@ -705,7 +578,7 @@ abstract class TreeBrowsers {
case SingleType(pre, sym) =>
Document.group(
Document.nest(4, "SingleType(" :/:
- toDocument(pre) :: ", " :/: sym.name.toString() :: ")")
+ toDocument(pre) :: ", " :/: sym.name.toString :: ")")
)
case ConstantType(value) =>
@@ -715,7 +588,7 @@ abstract class TreeBrowsers {
Document.group(
Document.nest(4, "TypeRef(" :/:
toDocument(pre) :: ", " :/:
- sym.name.toString() + sym.idString :: ", " :/:
+ sym.name.toString + sym.idString :: ", " :/:
"[ " :: toDocument(args) ::"]" :: ")")
)
@@ -736,7 +609,7 @@ abstract class TreeBrowsers {
Document.group(
Document.nest(4,"ClassInfoType(" :/:
toDocument(parents) :: ", " :/:
- clazz.name.toString() + clazz.idString :: ")")
+ clazz.name.toString + clazz.idString :: ")")
)
case MethodType(params, result) =>
@@ -786,7 +659,7 @@ abstract class TreeBrowsers {
toDocument(thistpe) :/: ", " :/:
toDocument(supertpe) ::")"))
case _ =>
- sys.error("Unknown case: " + t.toString() +", "+ t.getClass)
+ sys.error("Unknown case: " + t.toString +", "+ t.getClass)
}
}
diff --git a/src/compiler/scala/tools/nsc/ast/TreeDSL.scala b/src/compiler/scala/tools/nsc/ast/TreeDSL.scala
index 6c35514..9a5b92e 100644
--- a/src/compiler/scala/tools/nsc/ast/TreeDSL.scala
+++ b/src/compiler/scala/tools/nsc/ast/TreeDSL.scala
@@ -1,5 +1,5 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
+ * Copyright 2005-2013 LAMP/EPFL
*
* @author Paul Phillips
*/
@@ -9,6 +9,7 @@ package ast
import PartialFunction._
import symtab.Flags
+import scala.language.implicitConversions
/** A DSL for generating scala code. The goal is that the
* code generating code should look a lot like the code it
@@ -27,12 +28,6 @@ trait TreeDSL {
def nullSafe[T](f: Tree => Tree, ifNull: Tree): Tree => Tree =
tree => IF (tree MEMBER_== NULL) THEN ifNull ELSE f(tree)
- // strip bindings to find what lies beneath
- final def unbind(x: Tree): Tree = x match {
- case Bind(_, y) => unbind(y)
- case y => y
- }
-
def returning[T](x: T)(f: T => Unit): T = util.returning(x)(f)
object LIT extends (Any => Literal) {
@@ -50,13 +45,15 @@ trait TreeDSL {
def NULL = LIT(null)
def UNIT = LIT(())
- object WILD {
- def apply(tpe: Type = null) =
- if (tpe == null) Ident(nme.WILDCARD)
- else Ident(nme.WILDCARD) setType tpe
+ // for those preferring boring, predictable lives, without the thrills of tree-sharing
+ // (but with the perk of typed trees)
+ def TRUE_typed = LIT(true) setType ConstantType(Constant(true))
+ def FALSE_typed = LIT(false) setType ConstantType(Constant(false))
- def unapply(other: Any) =
- cond(other) { case Ident(nme.WILDCARD) => true }
+ object WILD {
+ def empty = Ident(nme.WILDCARD)
+ def apply(tpe: Type) = Ident(nme.WILDCARD) setType tpe
+ def unapply(other: Any) = cond(other) { case Ident(nme.WILDCARD) => true }
}
def fn(lhs: Tree, op: Name, args: Tree*) = Apply(Select(lhs, op), args.toList)
@@ -77,7 +74,7 @@ trait TreeDSL {
/** Note - calling ANY_== in the matcher caused primitives to get boxed
* for the comparison, whereas looking up nme.EQ does not. See #3570 for
* an example of how target.tpe can be non-null, yet it claims not to have
- * a mmeber called nme.EQ. Not sure if that should happen, but we can be
+ * a member called nme.EQ. Not sure if that should happen, but we can be
* robust by dragging in Any regardless.
*/
def MEMBER_== (other: Tree) = {
@@ -99,8 +96,14 @@ trait TreeDSL {
def INT_== (other: Tree) = fn(target, getMember(IntClass, nme.EQ), other)
def INT_!= (other: Tree) = fn(target, getMember(IntClass, nme.NE), other)
- def BOOL_&& (other: Tree) = fn(target, getMember(BooleanClass, nme.ZAND), other)
- def BOOL_|| (other: Tree) = fn(target, getMember(BooleanClass, nme.ZOR), other)
+ // generic operations on ByteClass, IntClass, LongClass
+ def GEN_| (other: Tree, kind: ClassSymbol) = fn(target, getMember(kind, nme.OR), other)
+ def GEN_& (other: Tree, kind: ClassSymbol) = fn(target, getMember(kind, nme.AND), other)
+ def GEN_== (other: Tree, kind: ClassSymbol) = fn(target, getMember(kind, nme.EQ), other)
+ def GEN_!= (other: Tree, kind: ClassSymbol) = fn(target, getMember(kind, nme.NE), other)
+
+ def BOOL_&& (other: Tree) = fn(target, Boolean_and, other)
+ def BOOL_|| (other: Tree) = fn(target, Boolean_or, other)
/** Apply, Select, Match **/
def APPLY(params: Tree*) = Apply(target, params.toList)
@@ -123,10 +126,7 @@ trait TreeDSL {
*
* See ticket #2168 for one illustration of AS vs. AS_ANY.
*/
- def AS(tpe: Type) = TypeApply(Select(target, Any_asInstanceOf), List(TypeTree(tpe)))
- def AS_ANY(tpe: Type) = gen.mkAsInstanceOf(target, tpe)
- def AS_ATTR(tpe: Type) = gen.mkAttributedCast(target, tpe)
-
+ def AS(tpe: Type) = gen.mkAsInstanceOf(target, tpe, any = true, wrapInApply = false)
def IS(tpe: Type) = gen.mkIsInstanceOf(target, tpe, true)
def IS_OBJ(tpe: Type) = gen.mkIsInstanceOf(target, tpe, false)
@@ -214,7 +214,7 @@ trait TreeDSL {
class DefSymStart(val sym: Symbol) extends SymVODDStart with DefCreator {
def symType = sym.tpe.finalResultType
def tparams = sym.typeParams map TypeDef
- def vparamss = sym.paramss map (xs => xs map ValDef)
+ def vparamss = mapParamss(sym)(ValDef)
}
class ValSymStart(val sym: Symbol) extends SymVODDStart with ValCreator {
def symType = sym.tpe
@@ -234,7 +234,7 @@ trait TreeDSL {
}
class DefTreeStart(val name: Name) extends TreeVODDStart with DefCreator {
def tparams: List[TypeDef] = Nil
- def vparamss: List[List[ValDef]] = List(Nil)
+ def vparamss: List[List[ValDef]] = ListOfNil
}
class IfStart(cond: Tree, thenp: Tree) {
@@ -249,7 +249,7 @@ trait TreeDSL {
}
def CASE(pat: Tree): CaseStart = new CaseStart(pat, EmptyTree)
- def DEFAULT: CaseStart = new CaseStart(WILD(), EmptyTree)
+ def DEFAULT: CaseStart = new CaseStart(WILD.empty, EmptyTree)
class SymbolMethods(target: Symbol) {
def BIND(body: Tree) = Bind(target, body)
@@ -265,15 +265,11 @@ trait TreeDSL {
}
/** Top level accessible. */
- def MATCHERROR(arg: Tree) = Throw(New(TypeTree(MatchErrorClass.tpe), List(List(arg))))
- /** !!! should generalize null guard from match error here. */
- def THROW(sym: Symbol): Throw = Throw(New(TypeTree(sym.tpe), List(Nil)))
- def THROW(sym: Symbol, msg: Tree): Throw = Throw(New(TypeTree(sym.tpe), List(List(msg.TOSTRING()))))
+ def MATCHERROR(arg: Tree) = Throw(MatchErrorClass.tpe, arg)
+ def THROW(sym: Symbol, msg: Tree): Throw = Throw(sym.tpe, msg.TOSTRING())
- def NEW(tpe: Tree, args: Tree*) = New(tpe, List(args.toList))
- def NEW(sym: Symbol, args: Tree*) =
- if (args.isEmpty) New(TypeTree(sym.tpe))
- else New(TypeTree(sym.tpe), List(args.toList))
+ def NEW(tpt: Tree, args: Tree*): Tree = New(tpt, List(args.toList))
+ def NEW(sym: Symbol, args: Tree*): Tree = New(sym.tpe, args: _*)
def DEF(name: Name, tp: Type): DefTreeStart = DEF(name) withType tp
def DEF(name: Name): DefTreeStart = new DefTreeStart(name)
@@ -302,8 +298,8 @@ trait TreeDSL {
def IF(tree: Tree) = new IfStart(tree, EmptyTree)
def TRY(tree: Tree) = new TryStart(tree, Nil, EmptyTree)
def BLOCK(xs: Tree*) = Block(xs.init.toList, xs.last)
- def NOT(tree: Tree) = Select(tree, getMember(BooleanClass, nme.UNARY_!))
- def SOME(xs: Tree*) = Apply(scalaDot(nme.Some), List(makeTupleTerm(xs.toList, true)))
+ def NOT(tree: Tree) = Select(tree, Boolean_not)
+ def SOME(xs: Tree*) = Apply(SomeClass.companionSymbol, makeTupleTerm(xs.toList, true))
/** Typed trees from symbols. */
def THIS(sym: Symbol) = gen.mkAttributedThis(sym)
@@ -311,21 +307,15 @@ trait TreeDSL {
def REF(sym: Symbol) = gen.mkAttributedRef(sym)
def REF(pre: Type, sym: Symbol) = gen.mkAttributedRef(pre, sym)
- /** Some of this is basically verbatim from TreeBuilder, but we do not want
- * to get involved with him because he's an untyped only sort.
- */
- private def tupleName(count: Int, f: (String) => Name = newTermName(_: String)) =
- scalaDot(f("Tuple" + count))
-
def makeTupleTerm(trees: List[Tree], flattenUnary: Boolean): Tree = trees match {
case Nil => UNIT
case List(tree) if flattenUnary => tree
- case _ => Apply(tupleName(trees.length), trees)
+ case _ => Apply(TupleClass(trees.length).companionModule, trees: _*)
}
def makeTupleType(trees: List[Tree], flattenUnary: Boolean): Tree = trees match {
case Nil => gen.scalaUnitConstr
case List(tree) if flattenUnary => tree
- case _ => AppliedTypeTree(tupleName(trees.length, newTypeName), trees)
+ case _ => AppliedTypeTree(REF(TupleClass(trees.length)), trees)
}
/** Implicits - some of these should probably disappear **/
diff --git a/src/compiler/scala/tools/nsc/ast/TreeGen.scala b/src/compiler/scala/tools/nsc/ast/TreeGen.scala
index 268e104..99b82d9 100644
--- a/src/compiler/scala/tools/nsc/ast/TreeGen.scala
+++ b/src/compiler/scala/tools/nsc/ast/TreeGen.scala
@@ -1,5 +1,5 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
+ * Copyright 2005-2013 LAMP/EPFL
* @author Martin Odersky
*/
@@ -9,203 +9,17 @@ package ast
import scala.collection.mutable.ListBuffer
import symtab.Flags._
import symtab.SymbolTable
+import scala.language.postfixOps
/** XXX to resolve: TreeGen only assumes global is a SymbolTable, but
* TreeDSL at the moment expects a Global. Can we get by with SymbolTable?
*/
-abstract class TreeGen {
- val global: SymbolTable
+abstract class TreeGen extends scala.reflect.internal.TreeGen with TreeDSL {
+ val global: Global
import global._
import definitions._
- def rootId(name: Name) = Select(Ident(nme.ROOTPKG), name)
- def rootScalaDot(name: Name) = Select(rootId(nme.scala_) setSymbol ScalaPackage, name)
- def scalaDot(name: Name) = Select(Ident(nme.scala_) setSymbol ScalaPackage, name)
- def scalaAnyRefConstr = scalaDot(tpnme.AnyRef)
- def scalaUnitConstr = scalaDot(tpnme.Unit)
- def scalaScalaObjectConstr = scalaDot(tpnme.ScalaObject)
- def productConstr = scalaDot(tpnme.Product)
- def serializableConstr = scalaDot(tpnme.Serializable)
-
- def scalaFunctionConstr(argtpes: List[Tree], restpe: Tree, abstractFun: Boolean = false): Tree = {
- val cls = if (abstractFun)
- mkAttributedRef(AbstractFunctionClass(argtpes.length))
- else
- mkAttributedRef(FunctionClass(argtpes.length))
- AppliedTypeTree(cls, argtpes :+ restpe)
- }
-
- /** Builds a reference to value whose type is given stable prefix.
- * The type must be suitable for this. For example, it
- * must not be a TypeRef pointing to an abstract type variable.
- */
- def mkAttributedQualifier(tpe: Type): Tree =
- mkAttributedQualifier(tpe, NoSymbol)
-
- /** Builds a reference to value whose type is given stable prefix.
- * If the type is unsuitable, e.g. it is a TypeRef for an
- * abstract type variable, then an Ident will be made using
- * termSym as the Ident's symbol. In that case, termSym must
- * not be NoSymbol.
- */
- def mkAttributedQualifier(tpe: Type, termSym: Symbol): Tree = tpe match {
- case NoPrefix =>
- EmptyTree
- case ThisType(clazz) =>
- if (clazz.isEffectiveRoot) EmptyTree
- else mkAttributedThis(clazz)
- case SingleType(pre, sym) =>
- applyIfNoArgs(mkAttributedStableRef(pre, sym))
- case TypeRef(pre, sym, args) =>
- if (sym.isRoot) {
- mkAttributedThis(sym)
- } else if (sym.isModuleClass) {
- applyIfNoArgs(mkAttributedRef(pre, sym.sourceModule))
- } else if (sym.isModule || sym.isClass) {
- assert(phase.erasedTypes, tpe)
- mkAttributedThis(sym)
- } else if (sym.isType) {
- assert(termSym != NoSymbol, tpe)
- mkAttributedIdent(termSym) setType tpe
- } else {
- mkAttributedRef(pre, sym)
- }
-
- case ConstantType(value) =>
- Literal(value) setType tpe
-
- case AnnotatedType(_, atp, _) =>
- mkAttributedQualifier(atp)
-
- case RefinedType(parents, _) =>
- // I am unclear whether this is reachable, but
- // the following implementation looks logical -Lex
- val firstStable = parents.find(_.isStable)
- assert(!firstStable.isEmpty, tpe)
- mkAttributedQualifier(firstStable.get)
-
- case _ =>
- abort("bad qualifier: " + tpe)
- }
- /** If this is a reference to a method with an empty
- * parameter list, wrap it in an apply.
- */
- private def applyIfNoArgs(qual: Tree) = qual.tpe match {
- case MethodType(Nil, restpe) => Apply(qual, Nil) setType restpe
- case _ => qual
- }
-
- /** Builds a reference to given symbol with given stable prefix. */
- def mkAttributedRef(pre: Type, sym: Symbol): Tree = {
- val qual = mkAttributedQualifier(pre)
- qual match {
- case EmptyTree => mkAttributedIdent(sym)
- case This(clazz) if qual.symbol.isEffectiveRoot => mkAttributedIdent(sym)
- case _ => mkAttributedSelect(qual, sym)
- }
- }
-
- /** Builds a reference to given symbol. */
- def mkAttributedRef(sym: Symbol): Tree =
- if (sym.owner.isClass) mkAttributedRef(sym.owner.thisType, sym)
- else mkAttributedIdent(sym)
-
- /** Builds an untyped reference to given symbol. */
- def mkUnattributedRef(sym: Symbol): Tree =
- if (sym.owner.isClass) Select(This(sym.owner), sym)
- else Ident(sym)
-
- /** Replaces tree type with a stable type if possible */
- def stabilize(tree: Tree): Tree = {
- for(tp <- stableTypeFor(tree)) tree.tpe = tp
- tree
- }
-
- /** Computes stable type for a tree if possible */
- def stableTypeFor(tree: Tree): Option[Type] = tree match {
- case Ident(_) if tree.symbol.isStable =>
- Some(singleType(tree.symbol.owner.thisType, tree.symbol))
- case Select(qual, _) if ((tree.symbol ne null) && (qual.tpe ne null)) && // turned assert into guard for #4064
- tree.symbol.isStable && qual.tpe.isStable =>
- Some(singleType(qual.tpe, tree.symbol))
- case _ =>
- None
- }
-
- /** Cast `tree' to type `pt' */
- def mkCast(tree: Tree, pt: Type): Tree = {
- if (settings.debug.value) log("casting " + tree + ":" + tree.tpe + " to " + pt)
- assert(!tree.tpe.isInstanceOf[MethodType], tree)
- assert(!pt.typeSymbol.isPackageClass)
- assert(!pt.typeSymbol.isPackageObjectClass)
- assert(pt eq pt.normalize, tree +" : "+ debugString(pt) +" ~>"+ debugString(pt.normalize)) //@MAT only called during erasure, which already takes care of that
- atPos(tree.pos)(mkAsInstanceOf(tree, pt, false))
- }
-
- /** Builds a reference with stable type to given symbol */
- def mkAttributedStableRef(pre: Type, sym: Symbol): Tree =
- stabilize(mkAttributedRef(pre, sym))
-
- def mkAttributedStableRef(sym: Symbol): Tree =
- stabilize(mkAttributedRef(sym))
-
- def mkAttributedThis(sym: Symbol): Tree =
- This(sym.name.toTypeName) setSymbol sym setType sym.thisType
-
- def mkAttributedIdent(sym: Symbol): Tree =
- Ident(sym.name) setSymbol sym setType sym.tpe
-
- def mkAttributedSelect(qual: Tree, sym: Symbol): Tree = {
- // Tests involving the repl fail without the .isEmptyPackage condition.
- if (qual.symbol != null && (qual.symbol.isEffectiveRoot || qual.symbol.isEmptyPackage))
- mkAttributedIdent(sym)
- else {
- val pkgQualifier =
- if (sym != null && sym.owner.isPackageObjectClass && sym.owner.owner == qual.tpe.typeSymbol) {
- val obj = sym.owner.sourceModule
- Select(qual, nme.PACKAGEkw) setSymbol obj setType singleType(qual.tpe, obj)
- }
- else qual
-
- val tree = Select(pkgQualifier, sym)
- if (pkgQualifier.tpe == null) tree
- else tree setType (qual.tpe memberType sym)
- }
- }
-
- private def mkTypeApply(value: Tree, tpe: Type, what: Symbol) =
- Apply(
- TypeApply(
- mkAttributedSelect(value, what),
- List(TypeTree(tpe.normalize))
- ),
- Nil
- )
- /** Builds an instance test with given value and type. */
- def mkIsInstanceOf(value: Tree, tpe: Type, any: Boolean = true): Tree =
- mkTypeApply(value, tpe, (if (any) Any_isInstanceOf else Object_isInstanceOf))
-
- /** Builds a cast with given value and type. */
- def mkAsInstanceOf(value: Tree, tpe: Type, any: Boolean = true): Tree =
- mkTypeApply(value, tpe, (if (any) Any_asInstanceOf else Object_asInstanceOf))
-
- /** Cast `tree' to 'pt', unless tpe is a subtype of pt, or pt is Unit. */
- def maybeMkAsInstanceOf(tree: Tree, pt: Type, tpe: Type, beforeRefChecks: Boolean = false): Tree =
- if ((pt == UnitClass.tpe) || (tpe <:< pt)) {
- log("no need to cast from " + tpe + " to " + pt)
- tree
- } else
- atPos(tree.pos) {
- if (beforeRefChecks)
- TypeApply(mkAttributedSelect(tree, Any_asInstanceOf), List(TypeTree(pt)))
- else
- mkAsInstanceOf(tree, pt)
- }
-
- def mkClassOf(tp: Type): Tree =
- Literal(Constant(tp)) setType ConstantType(Constant(tp))// ClassType(tp)
-
def mkCheckInit(tree: Tree): Tree = {
val tpe =
if (tree.tpe != null || !tree.hasSymbol) tree.tpe
@@ -218,52 +32,89 @@ abstract class TreeGen {
tree
}
- /** Builds a list with given head and tail. */
- def mkNewCons(head: Tree, tail: Tree): Tree =
- New(Apply(mkAttributedRef(ConsClass), List(head, tail)))
+ /** Builds a fully attributed wildcard import node.
+ */
+ def mkWildcardImport(pkg: Symbol): Import = {
+ assert(pkg ne null, this)
+ val qual = gen.mkAttributedStableRef(pkg)
+ val importSym = (
+ NoSymbol
+ newImport NoPosition
+ setFlag SYNTHETIC
+ setInfo analyzer.ImportType(qual)
+ )
+ val importTree = (
+ Import(qual, ImportSelector.wildList)
+ setSymbol importSym
+ setType NoType
+ )
+ importTree
+ }
- /** Builds a list with given head and tail. */
- def mkNil: Tree = mkAttributedRef(NilModule)
+ // wrap the given expression in a SoftReference so it can be gc-ed
+ def mkSoftRef(expr: Tree): Tree = atPos(expr.pos)(New(SoftReferenceClass.tpe, expr))
- /** Builds a tree representing an undefined local, as in
- * var x: T = _
- * which is appropriate to the given Type.
- */
- def mkZero(tp: Type): Tree = {
- val tree = tp.typeSymbol match {
- case UnitClass => Literal(())
- case BooleanClass => Literal(false)
- case FloatClass => Literal(0.0f)
- case DoubleClass => Literal(0.0d)
- case ByteClass => Literal(0.toByte)
- case ShortClass => Literal(0.toShort)
- case IntClass => Literal(0)
- case LongClass => Literal(0L)
- case CharClass => Literal(0.toChar)
- case _ =>
- if (NullClass.tpe <:< tp) Literal(null: Any)
- else abort("Cannot determine zero for " + tp)
- }
- tree setType tp
+ // annotate the expression with @unchecked
+ def mkUnchecked(expr: Tree): Tree = atPos(expr.pos) {
+ // This can't be "Annotated(New(UncheckedClass), expr)" because annotations
+ // are very picky about things and it crashes the compiler with "unexpected new".
+ Annotated(New(scalaDot(UncheckedClass.name), ListOfNil), expr)
+ }
+ // if it's a Match, mark the selector unchecked; otherwise nothing.
+ def mkUncheckedMatch(tree: Tree) = tree match {
+ case Match(selector, cases) => atPos(tree.pos)(Match(mkUnchecked(selector), cases))
+ case _ => tree
}
- /** Builds a tuple */
- def mkTuple(elems: List[Tree]): Tree =
- if (elems.isEmpty) Literal(())
- else Apply(
- Select(mkAttributedRef(TupleClass(elems.length).caseModule), nme.apply),
- elems)
+ def mkSynthSwitchSelector(expr: Tree): Tree = atPos(expr.pos) {
+ // This can't be "Annotated(New(SwitchClass), expr)" because annotations
+ // are very picky about things and it crashes the compiler with "unexpected new".
+ Annotated(Ident(nme.synthSwitch), expr)
+ }
- // tree1 AND tree2
- def mkAnd(tree1: Tree, tree2: Tree): Tree =
- Apply(Select(tree1, Boolean_and), List(tree2))
+ // TODO: would be so much nicer if we would know during match-translation (i.e., type checking)
+ // whether we should emit missingCase-style apply (and isDefinedAt), instead of transforming trees post-factum
+ class MatchMatcher {
+ def caseMatch(orig: Tree, selector: Tree, cases: List[CaseDef], wrap: Tree => Tree): Tree = unknownTree(orig)
+ def caseVirtualizedMatch(orig: Tree, _match: Tree, targs: List[Tree], scrut: Tree, matcher: Tree): Tree = unknownTree(orig)
+ def caseVirtualizedMatchOpt(orig: Tree, prologue: List[Tree], cases: List[Tree], matchEndDef: Tree, wrap: Tree => Tree): Tree = unknownTree(orig)
+
+ def genVirtualizedMatch(prologue: List[Tree], cases: List[Tree], matchEndDef: Tree): Tree = Block(prologue ++ cases, matchEndDef)
+
+ def apply(matchExpr: Tree): Tree = matchExpr match {
+ // old-style match or virtpatmat switch
+ case Match(selector, cases) => // println("simple match: "+ (selector, cases) + "for:\n"+ matchExpr )
+ caseMatch(matchExpr, selector, cases, identity)
+ // old-style match or virtpatmat switch
+ case Block((vd: ValDef) :: Nil, orig at Match(selector, cases)) => // println("block match: "+ (selector, cases, vd) + "for:\n"+ matchExpr )
+ caseMatch(matchExpr, selector, cases, m => copyBlock(matchExpr, List(vd), m))
+ // virtpatmat
+ case Apply(Apply(TypeApply(Select(tgt, nme.runOrElse), targs), List(scrut)), List(matcher)) if opt.virtPatmat => // println("virt match: "+ (tgt, targs, scrut, matcher) + "for:\n"+ matchExpr )
+ caseVirtualizedMatch(matchExpr, tgt, targs, scrut, matcher)
+ // optimized version of virtpatmat
+ case Block(stats, matchEndDef) if opt.virtPatmat && (stats forall treeInfo.hasSynthCaseSymbol) =>
+ // the assumption is once we encounter a case, the remainder of the block will consist of cases
+ // the prologue may be empty, usually it is the valdef that stores the scrut
+ val (prologue, cases) = stats span (s => !s.isInstanceOf[LabelDef])
+ caseVirtualizedMatchOpt(matchExpr, prologue, cases, matchEndDef, identity)
+ // optimized version of virtpatmat
+ case Block(outerStats, orig at Block(stats, matchEndDef)) if opt.virtPatmat && (stats forall treeInfo.hasSynthCaseSymbol) =>
+ val (prologue, cases) = stats span (s => !s.isInstanceOf[LabelDef])
+ caseVirtualizedMatchOpt(matchExpr, prologue, cases, matchEndDef, m => copyBlock(matchExpr, outerStats, m))
+ case other =>
+ unknownTree(other)
+ }
- // tree1 OR tree2
- def mkOr(tree1: Tree, tree2: Tree): Tree =
- Apply(Select(tree1, Boolean_or), List(tree2))
+ def unknownTree(t: Tree): Tree = throw new MatchError(t)
+ def copyBlock(orig: Tree, stats: List[Tree], expr: Tree): Block = Block(stats, expr)
- // wrap the given expression in a SoftReference so it can be gc-ed
- def mkSoftRef(expr: Tree): Tree = New(TypeTree(SoftReferenceClass.tpe), List(List(expr)))
+ def dropSyntheticCatchAll(cases: List[CaseDef]): List[CaseDef] =
+ if (!opt.virtPatmat) cases
+ else cases filter {
+ case CaseDef(pat, EmptyTree, Throw(Apply(Select(New(exTpt), nme.CONSTRUCTOR), _))) if (treeInfo.isWildcardArg(pat) && (exTpt.tpe.typeSymbol eq MatchErrorClass)) => false
+ case CaseDef(pat, guard, body) => true
+ }
+ }
def mkCached(cvar: Symbol, expr: Tree): Tree = {
val cvarRef = mkUnattributedRef(cvar)
@@ -283,17 +134,17 @@ abstract class TreeGen {
}
def mkModuleVarDef(accessor: Symbol) = {
+ val inClass = accessor.owner.isClass
+ val extraFlags = if (inClass) PrivateLocal | SYNTHETIC else 0
+
val mval = (
- accessor.owner.newVariable(accessor.pos.focus, nme.moduleVarName(accessor.name))
- setInfo accessor.tpe.finalResultType
- setFlag (MODULEVAR)
+ accessor.owner.newVariable(nme.moduleVarName(accessor.name), accessor.pos.focus, MODULEVAR | extraFlags)
+ setInfo accessor.tpe.finalResultType
+ addAnnotation VolatileAttr
)
+ if (inClass)
+ mval.owner.info.decls enter mval
- mval.addAnnotation(AnnotationInfo(VolatileAttr.tpe, Nil, Nil))
- if (mval.owner.isClass) {
- mval setFlag (PRIVATE | LOCAL | SYNTHETIC)
- mval.owner.info.decls.enter(mval)
- }
ValDef(mval)
}
@@ -305,41 +156,71 @@ abstract class TreeGen {
def mkModuleAccessDef(accessor: Symbol, msym: Symbol) =
DefDef(accessor, Select(This(msym.owner), msym))
- def newModule(accessor: Symbol, tpe: Type) =
- New(TypeTree(tpe),
- List(for (pt <- tpe.typeSymbol.primaryConstructor.info.paramTypes)
- yield This(accessor.owner.enclClass)))
+ def newModule(accessor: Symbol, tpe: Type) = {
+ val ps = tpe.typeSymbol.primaryConstructor.info.paramTypes
+ if (ps.isEmpty) New(tpe)
+ else New(tpe, This(accessor.owner.enclClass))
+ }
// def m: T;
def mkModuleAccessDcl(accessor: Symbol) =
DefDef(accessor setFlag lateDEFERRED, EmptyTree)
def mkRuntimeCall(meth: Name, args: List[Tree]): Tree =
- Apply(Select(mkAttributedRef(ScalaRunTimeModule), meth), args)
+ mkRuntimeCall(meth, Nil, args)
def mkRuntimeCall(meth: Name, targs: List[Type], args: List[Tree]): Tree =
- Apply(TypeApply(Select(mkAttributedRef(ScalaRunTimeModule), meth), targs map TypeTree), args)
+ mkMethodCall(ScalaRunTimeModule, meth, targs, args)
+
+ def mkSysErrorCall(message: String): Tree =
+ mkMethodCall(Sys_error, List(Literal(Constant(message))))
+
+ /** A creator for a call to a scala.reflect.Manifest or ClassManifest factory method.
+ *
+ * @param full full or partial manifest (target will be Manifest or ClassManifest)
+ * @param constructor name of the factory method (e.g. "classType")
+ * @param tparg the type argument
+ * @param args value arguments
+ * @return the tree
+ */
+ def mkManifestFactoryCall(full: Boolean, constructor: String, tparg: Type, args: List[Tree]): Tree =
+ mkMethodCall(
+ if (full) FullManifestModule else PartialManifestModule,
+ newTermName(constructor),
+ List(tparg),
+ args
+ )
/** Make a synchronized block on 'monitor'. */
def mkSynchronized(monitor: Tree, body: Tree): Tree =
Apply(Select(monitor, Object_synchronized), List(body))
+ def mkAppliedTypeForCase(clazz: Symbol): Tree = {
+ val numParams = clazz.typeParams.size
+ if (clazz.typeParams.isEmpty) Ident(clazz)
+ else AppliedTypeTree(Ident(clazz), 1 to numParams map (_ => Bind(tpnme.WILDCARD, EmptyTree)) toList)
+ }
+ def mkBindForCase(patVar: Symbol, clazz: Symbol, targs: List[Type]): Tree = {
+ Bind(patVar, Typed(Ident(nme.WILDCARD),
+ if (targs.isEmpty) mkAppliedTypeForCase(clazz)
+ else AppliedTypeTree(Ident(clazz), targs map TypeTree)
+ ))
+ }
+ def mkSuperSelect = Select(Super(This(tpnme.EMPTY), tpnme.EMPTY), nme.CONSTRUCTOR)
+
def wildcardStar(tree: Tree) =
atPos(tree.pos) { Typed(tree, Ident(tpnme.WILDCARD_STAR)) }
- def paramToArg(vparam: Symbol) = {
- val arg = Ident(vparam)
- if (isRepeatedParamType(vparam.tpe)) wildcardStar(arg)
- else arg
- }
+ def paramToArg(vparam: Symbol): Tree =
+ paramToArg(Ident(vparam), isRepeatedParamType(vparam.tpe))
- def paramToArg(vparam: ValDef) = {
- val arg = Ident(vparam.name)
- if (treeInfo.isRepeatedParamType(vparam.tpt)) wildcardStar(arg)
- else arg
- }
+ def paramToArg(vparam: ValDef): Tree =
+ paramToArg(Ident(vparam.name), treeInfo.isRepeatedParamType(vparam.tpt))
+
+ def paramToArg(arg: Ident, isRepeatedParam: Boolean): Tree =
+ if (isRepeatedParam) wildcardStar(arg) else arg
- /** Make forwarder to method `target', passing all parameters in `params' */
+ /** Make forwarder to method `target`, passing all parameters in `params` */
def mkForwarder(target: Tree, vparamss: List[List[Symbol]]) =
(target /: vparamss)((fn, vparams) => Apply(fn, vparams map paramToArg))
@@ -348,27 +229,63 @@ abstract class TreeGen {
* apply the element type directly.
*/
def mkWrapArray(tree: Tree, elemtp: Type) = {
- val sym = elemtp.typeSymbol
- val meth: Name =
- if (isValueClass(sym)) "wrap"+sym.name+"Array"
- else if ((elemtp <:< AnyRefClass.tpe) && !isPhantomClass(sym)) "wrapRefArray"
- else "genericWrapArray"
-
- if (isValueClass(sym))
- Apply(Select(mkAttributedRef(PredefModule), meth), List(tree))
- else
- Apply(TypeApply(Select(mkAttributedRef(PredefModule), meth), List(TypeTree(elemtp))), List(tree))
+ mkMethodCall(
+ PredefModule,
+ wrapArrayMethodName(elemtp),
+ if (isPrimitiveValueType(elemtp)) Nil else List(elemtp),
+ List(tree)
+ )
}
+ /** Cast `tree` to type `pt` by creating
+ * one of the calls of the form
+ *
+ * x.asInstanceOf[`pt`] up to phase uncurry
+ * x.asInstanceOf[`pt`]() if after uncurry but before erasure
+ * x.$asInstanceOf[`pt`]() if at or after erasure
+ */
+ def mkCast(tree: Tree, pt: Type): Tree = {
+ debuglog("casting " + tree + ":" + tree.tpe + " to " + pt + " at phase: " + phase)
+ assert(!tree.tpe.isInstanceOf[MethodType], tree)
+ assert(pt eq pt.normalize, tree +" : "+ debugString(pt) +" ~>"+ debugString(pt.normalize))
+ atPos(tree.pos) {
+ mkAsInstanceOf(tree, pt, any = !phase.next.erasedTypes, wrapInApply = isAtPhaseAfter(currentRun.uncurryPhase))
+ }
+ }
+
+ // drop annotations generated by CPS plugin etc, since its annotationchecker rejects T @cps[U] <: Any
+ // let's assume for now annotations don't affect casts, drop them there, and bring them back using the outer Typed tree
+ def mkCastPreservingAnnotations(tree: Tree, pt: Type) =
+ Typed(mkCast(tree, pt.withoutAnnotations.dealias), TypeTree(pt))
+
/** Generate a cast for tree Tree representing Array with
* elem type elemtp to expected type pt.
*/
def mkCastArray(tree: Tree, elemtp: Type, pt: Type) =
- if (elemtp.typeSymbol == AnyClass && isValueClass(tree.tpe.typeArgs.head.typeSymbol))
- mkCast(mkRuntimeCall("toObjectArray", List(tree)), pt)
+ if (elemtp.typeSymbol == AnyClass && isPrimitiveValueType(tree.tpe.typeArgs.head))
+ mkCast(mkRuntimeCall(nme.toObjectArray, List(tree)), pt)
else
mkCast(tree, pt)
+ def mkZeroContravariantAfterTyper(tp: Type): Tree = {
+ // contravariant -- for replacing an argument in a method call
+ // must use subtyping, as otherwise we miss types like `Any with Int`
+ val tree =
+ if (NullClass.tpe <:< tp) Literal(Constant(null))
+ else if (UnitClass.tpe <:< tp) Literal(Constant())
+ else if (BooleanClass.tpe <:< tp) Literal(Constant(false))
+ else if (FloatClass.tpe <:< tp) Literal(Constant(0.0f))
+ else if (DoubleClass.tpe <:< tp) Literal(Constant(0.0d))
+ else if (ByteClass.tpe <:< tp) Literal(Constant(0.toByte))
+ else if (ShortClass.tpe <:< tp) Literal(Constant(0.toShort))
+ else if (IntClass.tpe <:< tp) Literal(Constant(0))
+ else if (LongClass.tpe <:< tp) Literal(Constant(0L))
+ else if (CharClass.tpe <:< tp) Literal(Constant(0.toChar))
+ else mkCast(Literal(Constant(null)), tp)
+
+ tree
+ }
+
/** Translate names in Select/Ident nodes to type names.
*/
def convertToTypeName(tree: Tree): Option[RefTree] = tree match {
@@ -384,18 +301,29 @@ abstract class TreeGen {
case _ => EmptyTree
}
+ /** Create a ValDef initialized to the given expression, setting the
+ * symbol to its packed type, and an function for creating Idents
+ * which refer to it.
+ */
+ private def mkPackedValDef(expr: Tree, owner: Symbol, name: Name): (ValDef, () => Ident) = {
+ val packedType = typer.packedType(expr, owner)
+ val sym = owner.newValue(name, expr.pos.makeTransparent, SYNTHETIC) setInfo packedType
+
+ (ValDef(sym, expr), () => Ident(sym) setPos sym.pos.focus setType expr.tpe)
+ }
+
/** Used in situations where you need to access value of an expression several times
*/
def evalOnce(expr: Tree, owner: Symbol, unit: CompilationUnit)(within: (() => Tree) => Tree): Tree = {
var used = false
- if (treeInfo.isPureExpr(expr)) {
+ if (treeInfo.isExprSafeToInline(expr)) {
within(() => if (used) expr.duplicate else { used = true; expr })
- } else {
- val temp = owner.newValue(expr.pos.makeTransparent, unit.freshTermName("ev$"))
- .setFlag(SYNTHETIC).setInfo(expr.tpe)
- val containing = within(() => Ident(temp) setPos temp.pos.focus setType expr.tpe)
+ }
+ else {
+ val (valDef, identFn) = mkPackedValDef(expr, owner, unit.freshTermName("ev$"))
+ val containing = within(identFn)
ensureNonOverlapping(containing, List(expr))
- Block(List(ValDef(temp, expr)), containing) setPos (containing.pos union expr.pos)
+ Block(List(valDef), containing) setPos (containing.pos union expr.pos)
}
}
@@ -405,16 +333,16 @@ abstract class TreeGen {
val used = new Array[Boolean](exprs.length)
var i = 0
for (expr <- exprs) {
- if (treeInfo.isPureExpr(expr)) {
+ if (treeInfo.isExprSafeToInline(expr)) {
exprs1 += {
val idx = i
() => if (used(idx)) expr.duplicate else { used(idx) = true; expr }
}
- } else {
- val temp = owner.newValue(expr.pos.makeTransparent, unit.freshTermName("ev$"))
- .setFlag(SYNTHETIC).setInfo(expr.tpe)
- vdefs += ValDef(temp, expr)
- exprs1 += (() => Ident(temp) setPos temp.pos.focus setType expr.tpe)
+ }
+ else {
+ val (valDef, identFn) = mkPackedValDef(expr, owner, unit.freshTermName("ev$"))
+ vdefs += valDef
+ exprs1 += identFn
}
i += 1
}
@@ -425,23 +353,19 @@ abstract class TreeGen {
else Block(prefix, containing) setPos (prefix.head.pos union containing.pos)
}
- /** Return a double-checked locking idiom around the syncBody tree. It guards with 'cond' and
- * synchronizez on 'clazz.this'. Additional statements can be included after initialization,
+ /** Return the synchronized part of the double-checked locking idiom around the syncBody tree. It guards with `cond` and
+ * synchronizez on `clazz.this`. Additional statements can be included after initialization,
* (outside the synchronized block).
*
* The idiom works only if the condition is using a volatile field.
* @see http://www.cs.umd.edu/~pugh/java/memoryModel/DoubleCheckedLocking.html
*/
- def mkDoubleCheckedLocking(clazz: Symbol, cond: Tree, syncBody: List[Tree], stats: List[Tree]): Tree =
- mkDoubleCheckedLocking(mkAttributedThis(clazz), cond, syncBody, stats)
-
- def mkDoubleCheckedLocking(attrThis: Tree, cond: Tree, syncBody: List[Tree], stats: List[Tree]): Tree = {
- If(cond,
- Block(
- mkSynchronized(
- attrThis,
- If(cond, Block(syncBody: _*), EmptyTree)) ::
- stats: _*),
- EmptyTree)
- }
+ def mkSynchronizedCheck(clazz: Symbol, cond: Tree, syncBody: List[Tree], stats: List[Tree]): Tree =
+ mkSynchronizedCheck(mkAttributedThis(clazz), cond, syncBody, stats)
+
+ def mkSynchronizedCheck(attrThis: Tree, cond: Tree, syncBody: List[Tree], stats: List[Tree]): Tree =
+ Block(mkSynchronized(
+ attrThis,
+ If(cond, Block(syncBody: _*), EmptyTree)) ::
+ stats: _*)
}
diff --git a/src/compiler/scala/tools/nsc/ast/TreeInfo.scala b/src/compiler/scala/tools/nsc/ast/TreeInfo.scala
index 730d005..cbbb4c8 100644
--- a/src/compiler/scala/tools/nsc/ast/TreeInfo.scala
+++ b/src/compiler/scala/tools/nsc/ast/TreeInfo.scala
@@ -1,394 +1,48 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
+ * Copyright 2005-2013 LAMP/EPFL
* @author Martin Odersky
*/
package scala.tools.nsc
package ast
-import symtab.Flags._
-import symtab.SymbolTable
-import util.HashSet
+import scala.reflect.internal.HasFlags
+import scala.reflect.internal.Flags._
+import symtab._
/** This class ...
*
* @author Martin Odersky
* @version 1.0
*/
-abstract class TreeInfo {
- val trees: SymbolTable
- import trees._
- import definitions.ThrowableClass
-
- def isOwnerDefinition(tree: Tree): Boolean = tree match {
- case PackageDef(_, _)
- | ClassDef(_, _, _, _)
- | ModuleDef(_, _, _)
- | DefDef(_, _, _, _, _, _)
- | Import(_, _) => true
- case _ => false
- }
+abstract class TreeInfo extends scala.reflect.internal.TreeInfo {
+ val global: Global
+ import global._
- def isDefinition(tree: Tree): Boolean = tree.isDef
-
- def isDeclaration(tree: Tree): Boolean = tree match {
- case DefDef(_, _, _, _, _, EmptyTree)
- | ValDef(_, _, _, EmptyTree)
- | TypeDef(_, _, _, _) => true
- case _ => false
- }
+ import definitions.ThrowableClass
/** Is tree legal as a member definition of an interface?
*/
- def isInterfaceMember(tree: Tree): Boolean = tree match {
- case EmptyTree => true
- case Import(_, _) => true
- case TypeDef(_, _, _, _) => true
- case DefDef(mods, _, _, _, _, __) => mods.isDeferred
- case ValDef(mods, _, _, _) => mods.isDeferred
+ override def isInterfaceMember(tree: Tree): Boolean = tree match {
case DocDef(_, definition) => isInterfaceMember(definition)
- case _ => false
+ case _ => super.isInterfaceMember(tree)
}
/** Is tree a pure (i.e. non-side-effecting) definition?
*/
- def isPureDef(tree: Tree): Boolean = tree match {
- case EmptyTree
- | ClassDef(_, _, _, _)
- | TypeDef(_, _, _, _)
- | Import(_, _)
- | DefDef(_, _, _, _, _, _) =>
- true
- case ValDef(mods, _, _, rhs) =>
- !mods.isMutable && isPureExpr(rhs)
- case DocDef(_, definition) =>
- isPureDef(definition)
- case _ =>
- false
- }
-
- /** Is tree a stable and pure expression?
- */
- def isPureExpr(tree: Tree): Boolean = tree match {
- case EmptyTree
- | This(_)
- | Super(_, _)
- | Literal(_) =>
- true
- case Ident(_) =>
- tree.symbol.isStable
- case Select(qual, _) =>
- tree.symbol.isStable && isPureExpr(qual)
- case TypeApply(fn, _) =>
- isPureExpr(fn)
- case Apply(fn, List()) =>
- /* Note: After uncurry, field accesses are represented as Apply(getter, Nil),
- * so an Apply can also be pure.
- * However, before typing, applications of nullary functional values are also
- * Apply(function, Nil) trees. To prevent them from being treated as pure,
- * we check that the callee is a method. */
- fn.symbol.isMethod && !fn.symbol.isLazy && isPureExpr(fn)
- case Typed(expr, _) =>
- isPureExpr(expr)
- case Block(stats, expr) =>
- (stats forall isPureDef) && isPureExpr(expr)
- case _ =>
- false
- }
-
- def mayBeVarGetter(sym: Symbol): Boolean = sym.info match {
- case NullaryMethodType(_) => sym.owner.isClass && !sym.isStable
- case PolyType(_, NullaryMethodType(_)) => sym.owner.isClass && !sym.isStable
- case mt @ MethodType(_, _) => mt.isImplicit && sym.owner.isClass && !sym.isStable
- case _ => false
- }
-
- def isVariableOrGetter(tree: Tree) = {
- def sym = tree.symbol
- def isVar = sym.isVariable
- def isGetter = mayBeVarGetter(sym) && sym.owner.info.member(nme.getterToSetter(sym.name)) != NoSymbol
-
- tree match {
- case Ident(_) => isVar
- case Select(_, _) => isVar || isGetter
- case _ =>
- methPart(tree) match {
- case Select(qual, nme.apply) => qual.tpe.member(nme.update) != NoSymbol
- case _ => false
- }
- }
- }
-
- /** Is tree a self constructor call?
- */
- def isSelfConstrCall(tree: Tree): Boolean = methPart(tree) match {
- case Ident(nme.CONSTRUCTOR)
- | Select(This(_), nme.CONSTRUCTOR) => true
- case _ => false
- }
-
- def isSuperConstrCall(tree: Tree): Boolean = methPart(tree) match {
- case Select(Super(_, _), nme.CONSTRUCTOR) => true
- case _ => false
- }
-
- def isSelfOrSuperConstrCall(tree: Tree) =
- isSelfConstrCall(tree) || isSuperConstrCall(tree)
-
- /** Is tree a variable pattern */
- def isVarPattern(pat: Tree): Boolean = pat match {
- case _: BackQuotedIdent => false
- case x: Ident => isVariableName(x.name)
- case _ => false
- }
-
- /** The first constructor definitions in `stats' */
- def firstConstructor(stats: List[Tree]): Tree = stats find {
- case x: DefDef => nme.isConstructorName(x.name)
- case _ => false
- } getOrElse EmptyTree
-
- /** The arguments to the first constructor in `stats'. */
- def firstConstructorArgs(stats: List[Tree]): List[Tree] = firstConstructor(stats) match {
- case DefDef(_, _, _, args :: _, _, _) => args
- case _ => Nil
- }
-
- /** The value definitions marked PRESUPER in this statement sequence */
- def preSuperFields(stats: List[Tree]): List[ValDef] =
- stats collect { case vd: ValDef if isEarlyValDef(vd) => vd }
-
- def isEarlyDef(tree: Tree) = tree match {
- case TypeDef(mods, _, _, _) => mods hasFlag PRESUPER
- case ValDef(mods, _, _, _) => mods hasFlag PRESUPER
- case _ => false
- }
-
- def isEarlyValDef(tree: Tree) = tree match {
- case ValDef(mods, _, _, _) => mods hasFlag PRESUPER
- case _ => false
- }
-
- def isEarlyTypeDef(tree: Tree) = tree match {
- case TypeDef(mods, _, _, _) => mods hasFlag PRESUPER
- case _ => false
- }
-
- /** Is tpt of the form T* ? */
- def isRepeatedParamType(tpt: Tree) = tpt match {
- case TypeTree() => definitions.isRepeatedParamType(tpt.tpe)
- case AppliedTypeTree(Select(_, tpnme.REPEATED_PARAM_CLASS_NAME), _) => true
- case AppliedTypeTree(Select(_, tpnme.JAVA_REPEATED_PARAM_CLASS_NAME), _) => true
- case _ => false
- }
- /** The parameter ValDefs from a def of the form T*. */
- def repeatedParams(tree: Tree): List[ValDef] = tree match {
- case DefDef(_, _, _, vparamss, _, _) => vparamss.flatten filter (vd => isRepeatedParamType(vd.tpt))
- case _ => Nil
- }
-
- /** Is tpt a by-name parameter type? */
- def isByNameParamType(tpt: Tree) = tpt match {
- case TypeTree() => definitions.isByNameParamType(tpt.tpe)
- case AppliedTypeTree(Select(_, tpnme.BYNAME_PARAM_CLASS_NAME), _) => true
- case _ => false
- }
-
- /** Is name a left-associative operator? */
- def isLeftAssoc(operator: Name) = operator.nonEmpty && (operator.endChar != ':')
-
- private val reserved = Set[Name](nme.false_, nme.true_, nme.null_)
-
- /** Is name a variable name? */
- def isVariableName(name: Name): Boolean = {
- val first = name(0)
- ((first.isLower && first.isLetter) || first == '_') && !reserved(name)
- }
-
- /** Is tree a this node which belongs to `enclClass'? */
- def isSelf(tree: Tree, enclClass: Symbol): Boolean = tree match {
- case This(_) => tree.symbol == enclClass
- case _ => false
- }
-
- /** can this type be a type pattern */
- def mayBeTypePat(tree: Tree): Boolean = tree match {
- case CompoundTypeTree(Template(tps, _, Nil)) => tps exists mayBeTypePat
- case Annotated(_, tp) => mayBeTypePat(tp)
- case AppliedTypeTree(constr, args) => mayBeTypePat(constr) || args.exists(_.isInstanceOf[Bind])
- case SelectFromTypeTree(tp, _) => mayBeTypePat(tp)
- case _ => false
- }
-
- /** Is this argument node of the form <expr> : _* ?
- */
- def isWildcardStarArg(tree: Tree): Boolean = tree match {
- case Typed(_, Ident(tpnme.WILDCARD_STAR)) => true
- case _ => false
+ override def isPureDef(tree: Tree): Boolean = tree match {
+ case DocDef(_, definition) => isPureDef(definition)
+ case _ => super.isPureDef(tree)
}
- def isWildcardStarArgList(trees: List[Tree]) =
- trees.nonEmpty && isWildcardStarArg(trees.last)
- /** Is the argument a (possibly bound) _ arg?
+ /** Does list of trees start with a definition of
+ * a class of module with given name (ignoring imports)
*/
- def isWildcardArg(tree: Tree): Boolean = unbind(tree) match {
- case Ident(nme.WILDCARD) => true
- case _ => false
- }
-
- /** Is this pattern node a catch-all (wildcard or variable) pattern? */
- def isDefaultCase(cdef: CaseDef) = cdef match {
- case CaseDef(pat, EmptyTree, _) => isWildcardArg(pat)
- case _ => false
- }
-
- /** Does this CaseDef catch Throwable? */
- def catchesThrowable(cdef: CaseDef) = catchesAllOf(cdef, ThrowableClass.tpe)
-
- /** Does this CaseDef catch everything of a certain Type? */
- def catchesAllOf(cdef: CaseDef, threshold: Type) =
- isDefaultCase(cdef) || (cdef.guard.isEmpty && (unbind(cdef.pat) match {
- case Typed(Ident(nme.WILDCARD), tpt) => (tpt.tpe != null) && (threshold <:< tpt.tpe)
- case _ => false
- }))
-
- /** Is this pattern node a catch-all or type-test pattern? */
- def isCatchCase(cdef: CaseDef) = cdef match {
- case CaseDef(Typed(Ident(nme.WILDCARD), tpt), EmptyTree, _) =>
- isSimpleThrowable(tpt.tpe)
- case CaseDef(Bind(_, Typed(Ident(nme.WILDCARD), tpt)), EmptyTree, _) =>
- isSimpleThrowable(tpt.tpe)
- case _ =>
- isDefaultCase(cdef)
- }
-
- private def isSimpleThrowable(tp: Type): Boolean = tp match {
- case TypeRef(pre, sym, args) =>
- (pre == NoPrefix || pre.widen.typeSymbol.isStatic) &&
- (sym isNonBottomSubClass ThrowableClass) && /* bq */ !sym.isTrait
- case _ =>
- false
- }
-
- /* If we have run-time types, and these are used for pattern matching,
- we should replace this by something like:
-
- tp match {
- case TypeRef(pre, sym, args) =>
- args.isEmpty && (sym.owner.isPackageClass || isSimple(pre))
- case NoPrefix =>
- true
- case _ =>
- false
- }
-*/
-
- /** Is this pattern node a sequence-valued pattern? */
- def isSequenceValued(tree: Tree): Boolean = unbind(tree) match {
- case Alternative(ts) => ts exists isSequenceValued
- case ArrayValue(_, _) | Star(_) => true
- case _ => false
- }
-
- /** The underlying pattern ignoring any bindings */
- def unbind(x: Tree): Tree = x match {
- case Bind(_, y) => unbind(y)
- case y => y
- }
-
- /** Is this tree a Star(_) after removing bindings? */
- def isStar(x: Tree) = unbind(x) match {
- case Star(_) => true
- case _ => false
- }
-
- /** The method part of an application node
- */
- def methPart(tree: Tree): Tree = tree match {
- case Apply(fn, _) => methPart(fn)
- case TypeApply(fn, _) => methPart(fn)
- case AppliedTypeTree(fn, _) => methPart(fn)
- case _ => tree
- }
-
- def firstArgument(tree: Tree): Tree = tree match {
- case Apply(fn, args) =>
- val f = firstArgument(fn)
- if (f == EmptyTree && !args.isEmpty) args.head else f
- case _ =>
- EmptyTree
- }
-
- /** Top-level definition sequence contains a leading import of
- * <code>Predef</code> or <code>scala.Predef</code>.
- */
- def containsLeadingPredefImport(defs: List[Tree]): Boolean = defs match {
- case List(PackageDef(_, defs1)) =>
- containsLeadingPredefImport(defs1)
- case Import(Ident(nme.Predef), _) :: _ =>
- true
- case Import(Select(Ident(nme.scala_), nme.Predef), _) :: _ =>
- true
- case Import(_, _) :: defs1 =>
- containsLeadingPredefImport(defs1)
- case _ =>
- false
- }
-
- /** Compilation unit is class or object 'name' in package 'scala'
- */
- def isUnitInScala(tree: Tree, name: Name) = tree match {
- case PackageDef(Ident(nme.scala_), defs) => isImplDef(defs, name)
- case _ => false
- }
-
- private def isImplDef(trees: List[Tree], name: Name): Boolean = trees match {
- case Import(_, _) :: xs => isImplDef(xs, name)
- case DocDef(_, tree1) :: Nil => isImplDef(List(tree1), name)
- case Annotated(_, tree1) :: Nil => isImplDef(List(tree1), name)
- case ModuleDef(_, `name`, _) :: Nil => true
+ override def firstDefinesClassOrObject(trees: List[Tree], name: Name): Boolean = trees match {
case ClassDef(_, `name`, _, _) :: Nil => true
- case _ => false
- }
-
- def isAbsTypeDef(tree: Tree) = tree match {
- case TypeDef(_, _, _, TypeBoundsTree(_, _)) => true
- case TypeDef(_, _, _, rhs) => rhs.tpe.isInstanceOf[TypeBounds]
- case _ => false
- }
-
- def isAliasTypeDef(tree: Tree) = tree match {
- case TypeDef(_, _, _, _) => !isAbsTypeDef(tree)
- case _ => false
+ case _ => super.firstDefinesClassOrObject(trees, name)
}
- /** Some handy extractors for spotting trees through the
- * the haze of irrelevant braces: i.e. Block(Nil, SomeTree)
- * should not keep us from seeing SomeTree.
- */
- abstract class SeeThroughBlocks[T] {
- protected def unapplyImpl(x: Tree): T
- def unapply(x: Tree): T = x match {
- case Block(Nil, expr) => unapply(expr)
- case _ => unapplyImpl(x)
- }
- }
- object IsTrue extends SeeThroughBlocks[Boolean] {
- protected def unapplyImpl(x: Tree): Boolean = x match {
- case Literal(Constant(true)) => true
- case _ => false
- }
- }
- object IsFalse extends SeeThroughBlocks[Boolean] {
- protected def unapplyImpl(x: Tree): Boolean = x match {
- case Literal(Constant(false)) => true
- case _ => false
- }
- }
- object IsIf extends SeeThroughBlocks[Option[(Tree, Tree, Tree)]] {
- protected def unapplyImpl(x: Tree) = x match {
- case If(cond, thenp, elsep) => Some(cond, thenp, elsep)
- case _ => None
- }
- }
+ def isInterface(mods: HasFlags, body: List[Tree]) =
+ mods.isTrait && (body forall isInterfaceMember)
}
diff --git a/src/compiler/scala/tools/nsc/ast/TreePrinters.scala b/src/compiler/scala/tools/nsc/ast/TreePrinters.scala
deleted file mode 100644
index 96f4355..0000000
--- a/src/compiler/scala/tools/nsc/ast/TreePrinters.scala
+++ /dev/null
@@ -1,708 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
- * @author Martin Odersky
- */
-
-package scala.tools.nsc
-package ast
-
-import java.io.{ OutputStream, PrintWriter, StringWriter, Writer }
-import symtab.Flags._
-import symtab.SymbolTable
-
-trait TreePrinters { trees: SymbolTable =>
-
- import treeInfo.{ IsTrue, IsFalse }
-
- final val showOuterTests = false
-
- /** Adds backticks if the name is a scala keyword. */
- def quotedName(name: Name, decode: Boolean): String = {
- val s = if (decode) name.decode else name.toString
- val term = name.toTermName
- if (nme.keywords(term) && term != nme.USCOREkw) "`%s`" format s
- else s
- }
- def quotedName(name: Name): String = quotedName(name, false)
-
- /** Turns a path into a String, introducing backquotes
- * as necessary.
- */
- def backquotedPath(t: Tree): String = t match {
- case Select(qual, name) => "%s.%s".format(backquotedPath(qual), quotedName(name))
- case Ident(name) => quotedName(name)
- case _ => t.toString
- }
-
- class TreePrinter(out: PrintWriter) extends trees.AbsTreePrinter(out) {
- protected var indentMargin = 0
- protected val indentStep = 2
- protected var indentString = " " // 40
-
- def flush() = out.flush()
-
- def indent() = indentMargin += indentStep
- def undent() = indentMargin -= indentStep
-
- protected def doPrintPositions = settings.Xprintpos.value
- def printPosition(tree: Tree) = if (doPrintPositions) print(tree.pos.show)
-
- def println() {
- out.println()
- while (indentMargin > indentString.length())
- indentString += indentString
- if (indentMargin > 0)
- out.write(indentString, 0, indentMargin)
- }
-
- def printSeq[a](ls: List[a])(printelem: a => Unit)(printsep: => Unit) {
- ls match {
- case List() =>
- case List(x) => printelem(x)
- case x :: rest => printelem(x); printsep; printSeq(rest)(printelem)(printsep)
- }
- }
-
- def printColumn(ts: List[Tree], start: String, sep: String, end: String) {
- print(start); indent; println()
- printSeq(ts){print}{print(sep); println()}; undent; println(); print(end)
- }
-
- def printRow(ts: List[Tree], start: String, sep: String, end: String) {
- print(start); printSeq(ts){print}{print(sep)}; print(end)
- }
-
- def printRow(ts: List[Tree], sep: String) { printRow(ts, "", sep, "") }
-
- def printTypeParams(ts: List[TypeDef]) {
- if (!ts.isEmpty) {
- print("["); printSeq(ts){ t =>
- printAnnotations(t)
- printParam(t)
- }{print(", ")}; print("]")
- }
- }
-
- def printValueParams(ts: List[ValDef]) {
- print("(")
- if (!ts.isEmpty) printFlags(ts.head.mods.flags & IMPLICIT, "")
- printSeq(ts){printParam}{print(", ")}
- print(")")
- }
-
- def printParam(tree: Tree) {
- tree match {
- case ValDef(mods, name, tp, rhs) =>
- printPosition(tree)
- printAnnotations(tree)
- print(symName(tree, name)); printOpt(": ", tp); printOpt(" = ", rhs)
- case TypeDef(mods, name, tparams, rhs) =>
- printPosition(tree)
- print(symName(tree, name))
- printTypeParams(tparams); print(rhs)
- }
- }
-
- def printBlock(tree: Tree) {
- tree match {
- case Block(_, _) =>
- print(tree)
- case _ =>
- printColumn(List(tree), "{", ";", "}")
- }
- }
-
- private def symFn[T](tree: Tree, f: Symbol => T, orElse: => T): T = tree.symbol match {
- case null | NoSymbol => orElse
- case sym => f(sym)
- }
- private def ifSym(tree: Tree, p: Symbol => Boolean) = symFn(tree, p, false)
-
- private def symNameInternal(tree: Tree, name: Name, decoded: Boolean): String = {
- def nameFn(sym: Symbol) = {
- val prefix = if (sym.isMixinConstructor) "/*%s*/".format(quotedName(sym.owner.name, decoded)) else ""
- prefix + tree.symbol.nameString
- }
- symFn(tree, nameFn, quotedName(name, decoded))
- }
-
- def decodedSymName(tree: Tree, name: Name) = symNameInternal(tree, name, true)
- def symName(tree: Tree, name: Name) = symNameInternal(tree, name, false)
-
- def printOpt(prefix: String, tree: Tree) {
- if (!tree.isEmpty) { print(prefix); print(tree) }
- }
-
- def printModifiers(tree: Tree, mods: Modifiers): Unit = printFlags(
- if (tree.symbol == NoSymbol) mods.flags else tree.symbol.flags, "" + (
- if (tree.symbol == NoSymbol) mods.privateWithin
- else if (tree.symbol.hasAccessBoundary) tree.symbol.privateWithin.name
- else ""
- )
- )
-
- def printFlags(flags: Long, privateWithin: String) {
- var mask: Long = if (settings.debug.value) -1L else PrintableFlags
- val s = flagsToString(flags & mask, privateWithin)
- if (s != "") print(s + " ")
- }
-
- def printAnnotations(tree: Tree) {
- val annots =
- if (tree.symbol.hasAssignedAnnotations) tree.symbol.annotations
- else tree.asInstanceOf[MemberDef].mods.annotations
-
- annots foreach (annot => print("@"+annot+" "))
- }
-
- def print(str: String) { out.print(str) }
- def print(name: Name) { print(quotedName(name)) }
-
- private var currentOwner: Symbol = NoSymbol
- private var selectorType: Type = NoType
-
- def printRaw(tree: Tree) {
- tree match {
- case EmptyTree =>
- print("<empty>")
-
- case ClassDef(mods, name, tparams, impl) =>
- printAnnotations(tree)
- printModifiers(tree, mods)
- val word =
- if (mods.hasTraitFlag) "trait"
- else if (ifSym(tree, _.isModuleClass)) "object"
- else "class"
-
- print(word + " " + symName(tree, name))
- printTypeParams(tparams)
- print(if (mods.isDeferred) " <: " else " extends "); print(impl)
-
- case PackageDef(packaged, stats) =>
- printAnnotations(tree)
- print("package "); print(packaged); printColumn(stats, " {", ";", "}")
-
- case ModuleDef(mods, name, impl) =>
- printAnnotations(tree)
- printModifiers(tree, mods); print("object " + symName(tree, name))
- print(" extends "); print(impl)
-
- case ValDef(mods, name, tp, rhs) =>
- printAnnotations(tree)
- printModifiers(tree, mods)
- print(if (mods.isMutable) "var " else "val ")
- print(symName(tree, name))
- printOpt(": ", tp)
- if (!mods.isDeferred) {
- print(" = ")
- if (rhs.isEmpty) print("_") else print(rhs)
- }
-
- case DefDef(mods, name, tparams, vparamss, tp, rhs) =>
- printAnnotations(tree)
- printModifiers(tree, mods)
- print("def " + symName(tree, name))
- printTypeParams(tparams); vparamss foreach printValueParams
- printOpt(": ", tp); printOpt(" = ", rhs)
-
- case TypeDef(mods, name, tparams, rhs) =>
- if (mods hasFlag (PARAM | DEFERRED)) {
- printAnnotations(tree)
- printModifiers(tree, mods); print("type "); printParam(tree)
- } else {
- printAnnotations(tree)
- printModifiers(tree, mods); print("type " + symName(tree, name))
- printTypeParams(tparams); printOpt(" = ", rhs)
- }
-
- case LabelDef(name, params, rhs) =>
- print(symName(tree, name)); printRow(params, "(", ",", ")"); printBlock(rhs)
-
- case Import(expr, selectors) =>
- // Is this selector remapping a name (i.e, {name1 => name2})
- def isNotRemap(s: ImportSelector) : Boolean = (s.name == nme.WILDCARD || s.name == s.rename)
- def selectorToString(s: ImportSelector): String = {
- val from = quotedName(s.name)
- if (isNotRemap(s)) from
- else from + "=>" + quotedName(s.rename)
- }
- print("import "); print(backquotedPath(expr))
- print(".")
- selectors match {
- case List(s) =>
- // If there is just one selector and it is not remapping a name, no braces are needed
- if (isNotRemap(s)) {
- print(selectorToString(s))
- } else {
- print("{"); print(selectorToString(s)); print("}")
- }
- // If there is more than one selector braces are always needed
- case many =>
- print(many.map(selectorToString).mkString("{", ", ", "}"))
- }
-
- case DocDef(comment, definition) =>
- print(comment.raw); println(); print(definition)
-
- case Template(parents, self, body) =>
- val currentOwner1 = currentOwner
- if (tree.symbol != NoSymbol) currentOwner = tree.symbol.owner
- printRow(parents, " with ")
- if (!body.isEmpty) {
- if (self.name != nme.WILDCARD) {
- print(" { "); print(self.name); printOpt(": ", self.tpt); print(" => ")
- } else if (!self.tpt.isEmpty) {
- print(" { _ : "); print(self.tpt); print(" => ")
- } else {
- print(" {")
- }
- printColumn(body, "", ";", "}")
- }
- currentOwner = currentOwner1
-
- case Block(stats, expr) =>
- printColumn(stats ::: List(expr), "{", ";", "}")
-
- case Match(selector, cases) =>
- val selectorType1 = selectorType
- selectorType = selector.tpe
- print(selector); printColumn(cases, " match {", "", "}")
- selectorType = selectorType1
-
- case CaseDef(pat, guard, body) =>
- print("case ")
- def patConstr(pat: Tree): Tree = pat match {
- case Apply(fn, args) => patConstr(fn)
- case _ => pat
- }
- if (showOuterTests &&
- needsOuterTest(
- patConstr(pat).tpe.finalResultType, selectorType, currentOwner))
- print("???")
- print(pat); printOpt(" if ", guard)
- print(" => "); print(body)
-
- case Alternative(trees) =>
- printRow(trees, "(", "| ", ")")
-
- case Star(elem) =>
- print("("); print(elem); print(")*")
-
- case Bind(name, t) =>
- print("("); print(symName(tree, name)); print(" @ "); print(t); print(")")
-
- case UnApply(fun, args) =>
- print(fun); print(" <unapply> "); printRow(args, "(", ", ", ")")
-
- case ArrayValue(elemtpt, trees) =>
- print("Array["); print(elemtpt); printRow(trees, "]{", ", ", "}")
-
- case Function(vparams, body) =>
- print("("); printValueParams(vparams); print(" => "); print(body); print(")")
- if (settings.uniqid.value && tree.symbol != null) print("#"+tree.symbol.id)
-
- case Assign(lhs, rhs) =>
- print(lhs); print(" = "); print(rhs)
-
- case AssignOrNamedArg(lhs, rhs) =>
- print(lhs); print(" = "); print(rhs)
-
- case If(cond, thenp, elsep) =>
- print("if ("); print(cond); print(")"); indent; println()
- print(thenp); undent
- if (!elsep.isEmpty) {
- println(); print("else"); indent; println(); print(elsep); undent
- }
-
- case Return(expr) =>
- print("return "); print(expr)
-
- case Try(block, catches, finalizer) =>
- print("try "); printBlock(block)
- if (!catches.isEmpty) printColumn(catches, " catch {", "", "}")
- printOpt(" finally ", finalizer)
-
- case Throw(expr) =>
- print("throw "); print(expr)
-
- case New(tpe) =>
- print("new "); print(tpe)
-
- case Typed(expr, tp) =>
- print("("); print(expr); print(": "); print(tp); print(")")
-
- case TypeApply(fun, targs) =>
- print(fun); printRow(targs, "[", ", ", "]")
-
- case Apply(fun, vargs) =>
- print(fun); printRow(vargs, "(", ", ", ")")
-
- case ApplyDynamic(qual, vargs) =>
- print("<apply-dynamic>("); print(qual); print("#"); print(tree.symbol.nameString)
- printRow(vargs, ", (", ", ", "))")
-
- case Super(This(qual), mix) =>
- if (!qual.isEmpty || tree.symbol != NoSymbol) print(symName(tree, qual) + ".")
- print("super")
- if (!mix.isEmpty)
- print("[" + mix + "]")
-
- case Super(qual, mix) =>
- print(qual)
- print(".super")
- if (!mix.isEmpty)
- print("[" + mix + "]")
-
- case This(qual) =>
- if (!qual.isEmpty) print(symName(tree, qual) + ".")
- print("this")
-
- case Select(qual @ New(tpe), name) if (!settings.debug.value) =>
- print(qual)
-
- case Select(qualifier, name) =>
- print(backquotedPath(qualifier)); print("."); print(symName(tree, name))
-
- case Ident(name) =>
- print(symName(tree, name))
-
- case Literal(x) =>
- print(x.escapedStringValue)
-
- case tt: TypeTree =>
- if ((tree.tpe eq null) || (settings.Xprintpos.value && tt.original != null)) {
- if (tt.original != null) { print("<type: "); print(tt.original); print(">") }
- else print("<type ?>")
- } else if ((tree.tpe.typeSymbol ne null) && tree.tpe.typeSymbol.isAnonymousClass) {
- print(tree.tpe.typeSymbol.toString())
- } else {
- print(tree.tpe.toString())
- }
-
- case Annotated(Apply(Select(New(tpt), nme.CONSTRUCTOR), args), tree) =>
- def printAnnot() {
- print("@"); print(tpt)
- if (!args.isEmpty)
- printRow(args, "(", ",", ")")
- }
- if (tree.isType) { print(tree); print(" "); printAnnot() }
- else { print(tree); print(": "); printAnnot() }
-
- case SingletonTypeTree(ref) =>
- print(ref); print(".type")
-
- case SelectFromTypeTree(qualifier, selector) =>
- print(qualifier); print("#"); print(symName(tree, selector))
-
- case CompoundTypeTree(templ) =>
- print(templ)
-
- case AppliedTypeTree(tp, args) =>
- print(tp); printRow(args, "[", ", ", "]")
-
- case TypeBoundsTree(lo, hi) =>
- printOpt(" >: ", lo); printOpt(" <: ", hi)
-
- case ExistentialTypeTree(tpt, whereClauses) =>
- print(tpt);
- printColumn(whereClauses, " forSome { ", ";", "}")
-
- case SelectFromArray(qualifier, name, _) =>
- print(qualifier); print(".<arr>"); print(symName(tree, name))
-
- case TypeTreeWithDeferredRefCheck() =>
- print("<tree with deferred refcheck>")
-
- case tree =>
- print("<unknown tree of class "+tree.getClass+">")
- }
- if (settings.printtypes.value && tree.isTerm && !tree.isEmpty) {
- print("{"); print(if (tree.tpe eq null) "<null>" else tree.tpe.toString()); print("}")
- }
- }
-
- def print(tree: Tree) {
- printPosition(tree)
- printRaw(
- if (tree.isDef && tree.symbol != NoSymbol && tree.symbol.isInitialized) {
- tree match {
- case ClassDef(_, _, _, impl @ Template(ps, emptyValDef, body))
- if (tree.symbol.thisSym != tree.symbol) =>
- ClassDef(tree.symbol, Template(ps, ValDef(tree.symbol.thisSym), body))
- case ClassDef(_, _, _, impl) => ClassDef(tree.symbol, impl)
- case ModuleDef(_, _, impl) => ModuleDef(tree.symbol, impl)
- case ValDef(_, _, _, rhs) => ValDef(tree.symbol, rhs)
- case DefDef(_, _, _, vparamss, _, rhs) => DefDef(tree.symbol, vparamss, rhs)
- case TypeDef(_, _, _, rhs) => TypeDef(tree.symbol, rhs)
- case _ => tree
- }
- } else tree)
- }
-
- def print(unit: CompilationUnit) {
- print("// Scala source: " + unit.source + "\n")
- if (unit.body == null) print("<null>")
- else { print(unit.body); println() }
-
- println()
- flush()
- }
- }
-
- /** A tree printer which is stingier about vertical whitespace and unnecessary
- * punctuation than the standard one.
- */
- class CompactTreePrinter(out: PrintWriter) extends TreePrinter(out) {
- override def printRow(ts: List[Tree], start: String, sep: String, end: String) {
- print(start)
- printSeq(ts)(print)(print(sep))
- print(end)
- }
-
- // drill down through Blocks and pull out the real statements.
- def allStatements(t: Tree): List[Tree] = t match {
- case Block(stmts, expr) => (stmts flatMap allStatements) ::: List(expr)
- case _ => List(t)
- }
-
- def printLogicalOr(t1: (Tree, Boolean), t2: (Tree, Boolean)) =
- printLogicalOp(t1, t2, "||")
-
- def printLogicalAnd(t1: (Tree, Boolean), t2: (Tree, Boolean)) =
- printLogicalOp(t1, t2, "&&")
-
- def printLogicalOp(t1: (Tree, Boolean), t2: (Tree, Boolean), op: String) = {
- def maybenot(tvalue: Boolean) = if (tvalue) "" else "!"
-
- print("%s(" format maybenot(t1._2))
- printRaw(t1._1)
- print(") %s %s(".format(op, maybenot(t2._2)))
- printRaw(t2._1)
- print(")")
- }
-
- override def printRaw(tree: Tree): Unit = {
- // routing supercalls through this for debugging ease
- def s() = super.printRaw(tree)
-
- tree match {
- // labels used for jumps - does not map to valid scala code
- case LabelDef(name, params, rhs) =>
- print("labeldef %s(%s) = ".format(name, params mkString ","))
- printRaw(rhs)
-
- case Ident(name) =>
- print(decodedSymName(tree, name))
-
- // target.method(arg) ==> target method arg
- case Apply(Select(target, method), List(arg)) =>
- if (method.decode.toString == "||")
- printLogicalOr(target -> true, arg -> true)
- else if (method.decode.toString == "&&")
- printLogicalAnd(target -> true, arg -> true)
- else (target, arg) match {
- case (_: Ident, _: Literal | _: Ident) =>
- printRaw(target)
- print(" ")
- printRaw(Ident(method))
- print(" ")
- printRaw(arg)
- case _ => s()
- }
-
- // target.unary_! ==> !target
- case Select(qualifier, name) if (name.decode startsWith "unary_") =>
- print(name.decode drop 6)
- printRaw(qualifier)
-
- case Select(qualifier, name) =>
- printRaw(qualifier)
- print(".")
- print(quotedName(name, true))
-
- // target.toString() ==> target.toString
- case Apply(fn, Nil) => printRaw(fn)
-
- // if a Block only continues one actual statement, just print it.
- case Block(stats, expr) =>
- allStatements(tree) match {
- case List(x) => printRaw(x)
- case xs => s()
- }
-
- // We get a lot of this stuff
- case If( IsTrue(), x, _) => printRaw(x)
- case If(IsFalse(), _, x) => printRaw(x)
-
- case If(cond, IsTrue(), elsep) => printLogicalOr(cond -> true, elsep -> true)
- case If(cond, IsFalse(), elsep) => printLogicalAnd(cond -> false, elsep -> true)
- case If(cond, thenp, IsTrue()) => printLogicalOr(cond -> false, thenp -> true)
- case If(cond, thenp, IsFalse()) => printLogicalAnd(cond -> true, thenp -> true)
-
- // If thenp or elsep has only one statement, it doesn't need more than one line.
- case If(cond, thenp, elsep) =>
- def ifIndented(x: Tree) = {
- indent ; println() ; printRaw(x) ; undent
- }
-
- val List(thenStmts, elseStmts) = List(thenp, elsep) map allStatements
- print("if ("); print(cond); print(") ")
-
- thenStmts match {
- case List(x: If) => ifIndented(x)
- case List(x) => printRaw(x)
- case _ => printRaw(thenp)
- }
-
- if (elseStmts.nonEmpty) {
- print(" else")
- indent ; println()
- elseStmts match {
- case List(x) => printRaw(x)
- case _ => printRaw(elsep)
- }
- undent ; println()
- }
- case _ => s()
- }
- }
- }
-
- /** This must guarantee not to force any evaluation, so we can learn
- * a little bit about trees in the midst of compilation without altering
- * the natural course of events.
- */
- class SafeTreePrinter(out: PrintWriter) extends TreePrinter(out) {
- override def print(tree: Tree) {
- printPosition(tree)
- printRaw(tree)
- }
- private def default(t: Tree) = t.getClass.getName.reverse.takeWhile(_ != '.').reverse
- private def params(trees: List[Tree]): String = trees map safe mkString ", "
-
- private def safe(name: Name): String = name.decode
- private def safe(tree: Tree): String = tree match {
- case Apply(fn, args) => "%s(%s)".format(safe(fn), params(args))
- case Select(qual, name) => safe(qual) + "." + safe(name)
- case This(qual) => safe(qual) + ".this"
- case Ident(name) => safe(name)
- case Literal(value) => value.stringValue
- case _ => "(?: %s)".format(default(tree))
- }
-
- override def printRaw(tree: Tree) { print(safe(tree)) }
- }
-
- class TreeMatchTemplate {
- // non-trees defined in Trees
- //
- // case class ImportSelector(name: Name, namePos: Int, rename: Name, renamePos: Int)
- // case class Modifiers(flags: Long, privateWithin: Name, annotations: List[Tree], positions: Map[Long, Position])
- //
- def apply(t: Tree): Unit = t match {
- // eliminated by typer
- case Annotated(annot, arg) =>
- case AssignOrNamedArg(lhs, rhs) =>
- case DocDef(comment, definition) =>
- case Import(expr, selectors) =>
-
- // eliminated by refchecks
- case ModuleDef(mods, name, impl) =>
- case TypeTreeWithDeferredRefCheck() =>
-
- // eliminated by erasure
- case TypeDef(mods, name, tparams, rhs) =>
- case Typed(expr, tpt) =>
-
- // eliminated by cleanup
- case ApplyDynamic(qual, args) =>
-
- // eliminated by explicitouter
- case Alternative(trees) =>
- case Bind(name, body) =>
- case CaseDef(pat, guard, body) =>
- case Star(elem) =>
- case UnApply(fun, args) =>
-
- // eliminated by lambdalift
- case Function(vparams, body) =>
-
- // eliminated by uncurry
- case AppliedTypeTree(tpt, args) =>
- case CompoundTypeTree(templ) =>
- case ExistentialTypeTree(tpt, whereClauses) =>
- case SelectFromTypeTree(qual, selector) =>
- case SingletonTypeTree(ref) =>
- case TypeBoundsTree(lo, hi) =>
-
- // survivors
- case Apply(fun, args) =>
- case ArrayValue(elemtpt, trees) =>
- case Assign(lhs, rhs) =>
- case Block(stats, expr) =>
- case ClassDef(mods, name, tparams, impl) =>
- case DefDef(mods, name, tparams, vparamss, tpt, rhs) =>
- case EmptyTree =>
- case Ident(name) =>
- case If(cond, thenp, elsep) =>
- case LabelDef(name, params, rhs) =>
- case Literal(value) =>
- case Match(selector, cases) =>
- case New(tpt) =>
- case PackageDef(pid, stats) =>
- case Return(expr) =>
- case Select(qualifier, selector) =>
- case Super(qual, mix) =>
- case Template(parents, self, body) =>
- case This(qual) =>
- case Throw(expr) =>
- case Try(block, catches, finalizer) =>
- case TypeApply(fun, args) =>
- case TypeTree() =>
- case ValDef(mods, name, tpt, rhs) =>
-
- // missing from the Trees comment
- case Parens(args) => // only used during parsing
- case SelectFromArray(qual, name, erasure) => // only used during erasure
- }
- }
-
- private def asStringInternal(t: Tree, f: PrintWriter => TreePrinter): String = {
- val buffer = new StringWriter()
- val printer = f(new PrintWriter(buffer))
- printer.print(t)
- printer.flush()
- buffer.toString
- }
- def asString(t: Tree): String = asStringInternal(t, newStandardTreePrinter)
- def asCompactString(t: Tree): String = asStringInternal(t, newCompactTreePrinter)
-
- def newStandardTreePrinter(writer: PrintWriter): TreePrinter = new TreePrinter(writer)
- def newStandardTreePrinter(stream: OutputStream): TreePrinter = newStandardTreePrinter(new PrintWriter(stream))
- def newStandardTreePrinter(): TreePrinter = newStandardTreePrinter(new PrintWriter(ConsoleWriter))
-
- def newCompactTreePrinter(writer: PrintWriter): CompactTreePrinter = new CompactTreePrinter(writer)
- def newCompactTreePrinter(stream: OutputStream): CompactTreePrinter = newCompactTreePrinter(new PrintWriter(stream))
- def newCompactTreePrinter(): CompactTreePrinter = newCompactTreePrinter(new PrintWriter(ConsoleWriter))
-
- def newTreePrinter(writer: PrintWriter): TreePrinter =
- if (settings.Ycompacttrees.value) newCompactTreePrinter(writer)
- else newStandardTreePrinter(writer)
- def newTreePrinter(stream: OutputStream): TreePrinter = newTreePrinter(new PrintWriter(stream))
- def newTreePrinter(): TreePrinter = newTreePrinter(new PrintWriter(ConsoleWriter))
-
- /** A writer that writes to the current Console and
- * is sensitive to replacement of the Console's
- * output stream.
- */
- object ConsoleWriter extends Writer {
- override def write(str: String) { Console.print(str) }
-
- def write(cbuf: Array[Char], off: Int, len: Int) {
- write(new String(cbuf, off, len))
- }
-
- def close = { /* do nothing */ }
- def flush = { /* do nothing */ }
- }
-}
diff --git a/src/compiler/scala/tools/nsc/ast/Trees.scala b/src/compiler/scala/tools/nsc/ast/Trees.scala
index 6d7b430..0a12737 100644
--- a/src/compiler/scala/tools/nsc/ast/Trees.scala
+++ b/src/compiler/scala/tools/nsc/ast/Trees.scala
@@ -1,217 +1,71 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
+ * Copyright 2005-2013 LAMP/EPFL
* @author Martin Odersky
*/
package scala.tools.nsc
package ast
-import scala.collection.mutable.ListBuffer
-import scala.tools.nsc.symtab.SymbolTable
-import scala.tools.nsc.symtab.Flags._
-import scala.tools.nsc.util.{ FreshNameCreator, HashSet, SourceFile }
+import scala.reflect.internal.Flags.BYNAMEPARAM
+import scala.reflect.internal.Flags.DEFAULTPARAM
+import scala.reflect.internal.Flags.IMPLICIT
+import scala.reflect.internal.Flags.PARAM
+import scala.reflect.internal.Flags.PARAMACCESSOR
+import scala.reflect.internal.Flags.PRESUPER
+import scala.reflect.internal.Flags.TRAIT
+import scala.compat.Platform.EOL
-trait Trees extends reflect.generic.Trees { self: SymbolTable =>
+trait Trees extends scala.reflect.internal.Trees { self: Global =>
- trait CompilationUnitTrait {
- var body: Tree
- val source: SourceFile
- def fresh : FreshNameCreator
- def freshTermName(prefix: String): TermName
- def freshTypeName(prefix: String): TypeName
- }
-
- type CompilationUnit <: CompilationUnitTrait
-
- protected def flagsIntoString(flags: Long, privateWithin: String): String = flagsToString(flags, privateWithin)
-
- // sub-components --------------------------------------------------
-
- lazy val treePrinter = newTreePrinter()
-
- object treeInfo extends {
- val trees: Trees.this.type = Trees.this
- } with TreeInfo
-
- val treeCopy = new LazyTreeCopier()
-
- implicit def treeWrapper(tree: Tree): TreeOps = new TreeOps(tree)
-
- class TreeOps(tree: Tree) {
- def isTerm: Boolean = tree match {
- case _: TermTree => true
- case Bind(name, _) => name.isTermName
- case Select(_, name) => name.isTermName
- case Ident(name) => name.isTermName
- case Annotated(_, arg) => arg.isTerm
- case DocDef(_, defn) => defn.isTerm
- case _ => false
- }
-
- def isType: Boolean = tree match {
- case _: TypTree => true
- case Bind(name, _) => name.isTypeName
- case Select(_, name) => name.isTypeName
- case Ident(name) => name.isTypeName
- case Annotated(_, arg) => arg.isType
- case DocDef(_, defn) => defn.isType
- case _ => false
- }
-
- def isErroneous = (tree.tpe ne null) && tree.tpe.isErroneous
- def isTyped = (tree.tpe ne null) && !tree.tpe.isErroneous
-
- /** Apply `f' to each subtree */
- def foreach(f: Tree => Unit) { new ForeachTreeTraverser(f).traverse(tree) }
+ def treeLine(t: Tree): String =
+ if (t.pos.isDefined && t.pos.isRange) t.pos.lineContent.drop(t.pos.column - 1).take(t.pos.end - t.pos.start + 1)
+ else t.summaryString
- /** If 'pf' is defined for a given subtree, call super.traverse(pf(tree)),
- * otherwise super.traverse(tree).
- */
- def foreachPartial(pf: PartialFunction[Tree, Tree]) { new ForeachPartialTreeTraverser(pf).traverse(tree) }
+ def treeStatus(t: Tree, enclosingTree: Tree = null) = {
+ val parent = if (enclosingTree eq null) " " else " P#%5s".format(enclosingTree.id)
- /** Find all subtrees matching predicate `p' */
- def filter(f: Tree => Boolean): List[Tree] = {
- val ft = new FilterTreeTraverser(f)
- ft.traverse(tree)
- ft.hits.toList
- }
-
- /** Returns optionally first tree (in a preorder traversal) which satisfies predicate `p',
- * or None if none exists.
- */
- def find(p: Tree => Boolean): Option[Tree] = {
- val ft = new FindTreeTraverser(p)
- ft.traverse(tree)
- ft.result
- }
-
- def changeOwner(pairs: (Symbol, Symbol)*): Tree = {
- pairs.foldLeft(tree) { case (t, (oldOwner, newOwner)) =>
- new ChangeOwnerTraverser(oldOwner, newOwner) apply t
- }
- }
-
- /** Is there part of this tree which satisfies predicate `p'? */
- def exists(p: Tree => Boolean): Boolean = !find(p).isEmpty
-
- def equalsStructure(that : Tree) = equalsStructure0(that)(_ eq _)
- def equalsStructure0(that: Tree)(f: (Tree,Tree) => Boolean): Boolean =
- f(tree, that) || ((tree.productArity == that.productArity) && {
- def equals0(this0: Any, that0: Any): Boolean = (this0, that0) match {
- case (x: Tree, y: Tree) => f(x, y) || (x equalsStructure0 y)(f)
- case (xs: List[_], ys: List[_]) => (xs corresponds ys)(equals0)
- case _ => this0 == that0
- }
- def compareOriginals() = (tree, that) match {
- case (x: TypeTree, y: TypeTree) if x.original != null && y.original != null =>
- (x.original equalsStructure0 y.original)(f)
- case _ =>
- true
- }
-
- (tree.productIterator zip that.productIterator forall { case (x, y) => equals0(x, y) }) && compareOriginals()
- })
-
- def shallowDuplicate: Tree = new ShallowDuplicator(tree) transform tree
- def shortClass: String = tree.getClass.getName split "[.$]" last
+ "[L%4s%8s] #%-6s %-15s %-10s // %s".format(t.pos.safeLine, parent, t.id, t.pos.show, t.shortClass, treeLine(t))
}
-
- private[scala] override def duplicateTree(tree: Tree): Tree = duplicator transform tree
-
-// ---- values and creators ---------------------------------------
-
- /** @param sym the class symbol
- * @return the implementation template
- */
- def ClassDef(sym: Symbol, impl: Template): ClassDef =
- atPos(sym.pos) {
- ClassDef(Modifiers(sym.flags),
- sym.name.toTypeName,
- sym.typeParams map TypeDef,
- impl) setSymbol sym
- }
-
- /** Construct class definition with given class symbol, value parameters,
- * supercall arguments and template body.
- *
- * @param sym the class symbol
- * @param constrMods the modifiers for the class constructor, i.e. as in `class C private (...)'
- * @param vparamss the value parameters -- if they have symbols they
- * should be owned by `sym'
- * @param argss the supercall arguments
- * @param body the template statements without primary constructor
- * and value parameter fields.
- */
- def ClassDef(sym: Symbol, constrMods: Modifiers, vparamss: List[List[ValDef]], argss: List[List[Tree]], body: List[Tree], superPos: Position): ClassDef =
- ClassDef(sym,
- Template(sym.info.parents map TypeTree,
- if (sym.thisSym == sym || phase.erasedTypes) emptyValDef else ValDef(sym.thisSym),
- constrMods, vparamss, argss, body, superPos))
-
- /**
- * @param sym the class symbol
- * @param impl the implementation template
- */
- def ModuleDef(sym: Symbol, impl: Template): ModuleDef =
- atPos(sym.pos) {
- ModuleDef(Modifiers(sym.flags), sym.name, impl) setSymbol sym
- }
-
- def ValDef(sym: Symbol, rhs: Tree): ValDef =
- atPos(sym.pos) {
- ValDef(Modifiers(sym.flags), sym.name,
- TypeTree(sym.tpe) setPos sym.pos.focus,
- rhs) setSymbol sym
- }
-
- def ValDef(sym: Symbol): ValDef = ValDef(sym, EmptyTree)
-
- object emptyValDef extends ValDef(Modifiers(PRIVATE), nme.WILDCARD, TypeTree(NoType), EmptyTree) {
- override def isEmpty = true
- super.setPos(NoPosition)
- override def setPos(pos: Position) = { assert(false); this }
+ def treeSymStatus(t: Tree) = {
+ val line = if (t.pos.isDefined) "line %-4s".format(t.pos.safeLine) else " "
+ "#%-5s %s %-10s // %s".format(t.id, line, t.shortClass,
+ if (t.symbol ne NoSymbol) "(" + t.symbol.fullLocationString + ")"
+ else treeLine(t)
+ )
}
- def DefDef(sym: Symbol, mods: Modifiers, vparamss: List[List[ValDef]], rhs: Tree): DefDef =
- atPos(sym.pos) {
- assert(sym != NoSymbol)
- DefDef(Modifiers(sym.flags),
- sym.name,
- sym.typeParams map TypeDef,
- vparamss,
- TypeTree(sym.tpe.finalResultType) setPos sym.pos.focus,
- rhs) setSymbol sym
- }
-
- def DefDef(sym: Symbol, vparamss: List[List[ValDef]], rhs: Tree): DefDef =
- DefDef(sym, Modifiers(sym.flags), vparamss, rhs)
+ // --- additional cases --------------------------------------------------------
+ /** Only used during parsing */
+ case class Parens(args: List[Tree]) extends Tree
- def DefDef(sym: Symbol, mods: Modifiers, rhs: Tree): DefDef =
- DefDef(sym, mods, sym.paramss map (_.map(ValDef)), rhs)
+ /** Documented definition, eliminated by analyzer */
+ case class DocDef(comment: DocComment, definition: Tree)
+ extends Tree {
+ override def symbol: Symbol = definition.symbol
+ override def symbol_=(sym: Symbol) { definition.symbol = sym }
+ override def isDef = definition.isDef
+ override def isTerm = definition.isTerm
+ override def isType = definition.isType
+ }
- def DefDef(sym: Symbol, rhs: Tree): DefDef =
- DefDef(sym, Modifiers(sym.flags), rhs)
+ /** Array selection `<qualifier> . <name>` only used during erasure */
+ case class SelectFromArray(qualifier: Tree, name: Name, erasure: Type)
+ extends RefTree with TermTree
- def DefDef(sym: Symbol, rhs: List[List[Symbol]] => Tree): DefDef = {
- DefDef(sym, rhs(sym.info.paramss))
- }
+ /** Derived value class injection (equivalent to: `new C(arg)` after erasure); only used during erasure.
+ * The class `C` is stored as a tree attachment.
+ */
+ case class InjectDerivedValue(arg: Tree)
+ extends SymTree with TermTree
- /** A TypeDef node which defines given `sym' with given tight hand side `rhs'. */
- def TypeDef(sym: Symbol, rhs: Tree): TypeDef =
- atPos(sym.pos) {
- TypeDef(Modifiers(sym.flags), sym.name.toTypeName, sym.typeParams map TypeDef, rhs) setSymbol sym
- }
+ class PostfixSelect(qual: Tree, name: Name) extends Select(qual, name)
- /** A TypeDef node which defines abstract type or type parameter for given `sym' */
- def TypeDef(sym: Symbol): TypeDef =
- TypeDef(sym, TypeBoundsTree(TypeTree(sym.info.bounds.lo), TypeTree(sym.info.bounds.hi)))
+ /** emitted by typer, eliminated by refchecks */
+ case class TypeTreeWithDeferredRefCheck()(val check: () => TypeTree) extends TypTree
- def LabelDef(sym: Symbol, params: List[Symbol], rhs: Tree): LabelDef =
- atPos(sym.pos) {
- LabelDef(sym.name, params map Ident, rhs) setSymbol sym
- }
+ // --- factory methods ----------------------------------------------------------
- /** Generates a template with constructor corresponding to
+ /** Generates a template with constructor corresponding to
*
* constrmods (vparams1_) ... (vparams_n) preSuper { presupers }
* extends superclass(args_1) ... (args_n) with mixins { self => body }
@@ -232,932 +86,331 @@ trait Trees extends reflect.generic.Trees { self: SymbolTable =>
/* Add constructor to template */
// create parameters for <init> as synthetic trees.
- var vparamss1 =
- vparamss map (vps => vps.map { vd =>
- atPos(vd.pos.focus) {
- ValDef(
- Modifiers(vd.mods.flags & (IMPLICIT | DEFAULTPARAM | BYNAMEPARAM) | PARAM | PARAMACCESSOR) withAnnotations vd.mods.annotations,
- vd.name, vd.tpt.duplicate, vd.rhs.duplicate)
- }})
+ var vparamss1 = mmap(vparamss) { vd =>
+ atPos(vd.pos.focus) {
+ val mods = Modifiers(vd.mods.flags & (IMPLICIT | DEFAULTPARAM | BYNAMEPARAM) | PARAM | PARAMACCESSOR)
+ ValDef(mods withAnnotations vd.mods.annotations, vd.name, vd.tpt.duplicate, vd.rhs.duplicate)
+ }
+ }
val (edefs, rest) = body span treeInfo.isEarlyDef
val (evdefs, etdefs) = edefs partition treeInfo.isEarlyValDef
- val (lvdefs, gvdefs) = evdefs map {
- case vdef @ ValDef(mods, name, tpt, rhs) =>
- val fld = treeCopy.ValDef(
- vdef.duplicate, mods, name,
- atPos(vdef.pos.focus) { TypeTree() setOriginal tpt setPos tpt.pos.focus }, // atPos in case
- EmptyTree)
- val local = treeCopy.ValDef(vdef, Modifiers(PRESUPER), name, tpt, rhs)
- (local, fld)
- } unzip
+ val gvdefs = evdefs map {
+ case vdef @ ValDef(_, _, tpt, _) =>
+ copyValDef(vdef)(
+ // atPos for the new tpt is necessary, since the original tpt might have no position
+ // (when missing type annotation for ValDef for example), so even though setOriginal modifies the
+ // position of TypeTree, it would still be NoPosition. That's what the author meant.
+ tpt = atPos(vdef.pos.focus)(TypeTree() setOriginal tpt setPos tpt.pos.focus),
+ rhs = EmptyTree
+ )
+ }
+ val lvdefs = evdefs collect { case vdef: ValDef => copyValDef(vdef)(mods = vdef.mods | PRESUPER) }
val constrs = {
if (constrMods hasFlag TRAIT) {
if (body forall treeInfo.isInterfaceMember) List()
else List(
atPos(wrappingPos(superPos, lvdefs)) (
- DefDef(NoMods, nme.MIXIN_CONSTRUCTOR, List(), List(List()), TypeTree(), Block(lvdefs, Literal(())))))
+ DefDef(NoMods, nme.MIXIN_CONSTRUCTOR, List(), ListOfNil, TypeTree(), Block(lvdefs, Literal(Constant())))))
} else {
// convert (implicit ... ) to ()(implicit ... ) if its the only parameter section
if (vparamss1.isEmpty || !vparamss1.head.isEmpty && vparamss1.head.head.mods.isImplicit)
vparamss1 = List() :: vparamss1;
- val superRef: Tree = atPos(superPos) {
- Select(Super(This(tpnme.EMPTY), tpnme.EMPTY), nme.CONSTRUCTOR)
- }
- val superCall = (superRef /: argss) (Apply)
+ val superRef: Tree = atPos(superPos)(gen.mkSuperSelect)
+ val superCall = (superRef /: argss) (Apply.apply)
List(
atPos(wrappingPos(superPos, lvdefs ::: argss.flatten)) (
- DefDef(constrMods, nme.CONSTRUCTOR, List(), vparamss1, TypeTree(), Block(lvdefs ::: List(superCall), Literal(())))))
+ DefDef(constrMods, nme.CONSTRUCTOR, List(), vparamss1, TypeTree(), Block(lvdefs ::: List(superCall), Literal(Constant())))))
}
}
- // println("typed template, gvdefs = "+gvdefs+", parents = "+parents+", constrs = "+constrs)
- constrs foreach (ensureNonOverlapping(_, parents ::: gvdefs))
- // vparamss2 are used as field definitions for the class. remove defaults
- val vparamss2 = vparamss map (vps => vps map { vd =>
- treeCopy.ValDef(vd, vd.mods &~ DEFAULTPARAM, vd.name, vd.tpt, EmptyTree)
- })
- Template(parents, self, gvdefs ::: vparamss2.flatten ::: constrs ::: etdefs ::: rest)
- }
-
- /** casedef shorthand */
- def CaseDef(pat: Tree, body: Tree): CaseDef = CaseDef(pat, EmptyTree, body)
-
- def Bind(sym: Symbol, body: Tree): Bind =
- Bind(sym.name, body) setSymbol sym
-
+ constrs foreach (ensureNonOverlapping(_, parents ::: gvdefs, focus=false))
+ // Field definitions for the class - remove defaults.
+ val fieldDefs = vparamss.flatten map (vd => copyValDef(vd)(mods = vd.mods &~ DEFAULTPARAM, rhs = EmptyTree))
- /** Factory method for object creation `new tpt(args_1)...(args_n)`
- * A `New(t, as)` is expanded to: `(new t).<init>(as)`
- */
- def New(tpt: Tree, argss: List[List[Tree]]): Tree = {
- assert(!argss.isEmpty)
- val superRef: Tree = Select(New(tpt), nme.CONSTRUCTOR)
- (superRef /: argss) (Apply)
+ Template(parents, self, gvdefs ::: fieldDefs ::: constrs ::: etdefs ::: rest)
}
- def Super(sym: Symbol, mix: TypeName): Tree = Super(This(sym), mix)
-
- def This(sym: Symbol): Tree = This(sym.name.toTypeName) setSymbol sym
-
- def Select(qualifier: Tree, sym: Symbol): Select =
- Select(qualifier, sym.name) setSymbol sym
-
- def Ident(sym: Symbol): Ident =
- Ident(sym.name) setSymbol sym
-
- /** Block factory that flattens directly nested blocks.
+ /** Construct class definition with given class symbol, value parameters,
+ * supercall arguments and template body.
+ *
+ * @param sym the class symbol
+ * @param constrMods the modifiers for the class constructor, i.e. as in `class C private (...)`
+ * @param vparamss the value parameters -- if they have symbols they
+ * should be owned by `sym`
+ * @param argss the supercall arguments
+ * @param body the template statements without primary constructor
+ * and value parameter fields.
*/
- def Block(stats: Tree*): Block = stats match {
- case Seq(b @ Block(_, _)) => b
- case Seq(stat) => Block(stats.toList, Literal(Constant(())))
- case Seq(_, rest @ _*) => Block(stats.init.toList, stats.last)
- }
-
- /** A synthetic term holding an arbitrary type. Not to be confused with
- * with TypTree, the trait for trees that are only used for type trees.
- * TypeTree's are inserted in several places, but most notably in
- * <code>RefCheck</code>, where the arbitrary type trees are all replaced by
- * TypeTree's. */
- case class TypeTree() extends AbsTypeTree {
- private var orig: Tree = null
- private[Trees] var wasEmpty: Boolean = false
-
- def original: Tree = orig
- def setOriginal(tree: Tree): this.type = {
- def followOriginal(t: Tree): Tree = t match {
- case tt: TypeTree => followOriginal(tt.original)
- case t => t
- }
-
- orig = followOriginal(tree); setPos(tree.pos);
- this
- }
+ def ClassDef(sym: Symbol, constrMods: Modifiers, vparamss: List[List[ValDef]], argss: List[List[Tree]], body: List[Tree], superPos: Position): ClassDef = {
+ // "if they have symbols they should be owned by `sym`"
+ assert(
+ mforall(vparamss)(p => (p.symbol eq NoSymbol) || (p.symbol.owner == sym)),
+ ((mmap(vparamss)(_.symbol), sym))
+ )
- override def defineType(tp: Type): this.type = {
- wasEmpty = isEmpty
- setType(tp)
- }
- }
-
- object TypeTree extends TypeTreeExtractor
-
- def TypeTree(tp: Type): TypeTree = TypeTree() setType tp
-
- /** Documented definition, eliminated by analyzer */
- case class DocDef(comment: DocComment, definition: Tree)
- extends Tree {
- override def symbol: Symbol = definition.symbol
- override def symbol_=(sym: Symbol) { definition.symbol = sym }
- // sean: seems to be important to the IDE
- override def isDef = definition.isDef
+ ClassDef(sym,
+ Template(sym.info.parents map TypeTree,
+ if (sym.thisSym == sym || phase.erasedTypes) emptyValDef else ValDef(sym.thisSym),
+ constrMods, vparamss, argss, body, superPos))
}
- /** Either an assignment or a named argument. Only appears in argument lists,
- * eliminated by typecheck (doTypedApply)
- */
- case class AssignOrNamedArg(lhs: Tree, rhs: Tree)
- extends TermTree
-
- case class Parens(args: List[Tree]) extends Tree // only used during parsing
+ // --- subcomponents --------------------------------------------------
- /** emitted by typer, eliminated by refchecks */
- case class TypeTreeWithDeferredRefCheck()(val check: () => TypeTree) extends AbsTypeTree
-
-// ----- subconstructors --------------------------------------------
-
- class ApplyToImplicitArgs(fun: Tree, args: List[Tree]) extends Apply(fun, args)
+ object treeInfo extends {
+ val global: Trees.this.type = self
+ } with TreeInfo
- class ApplyImplicitView(fun: Tree, args: List[Tree]) extends Apply(fun, args)
+ lazy val treePrinter = newTreePrinter()
-// ----- auxiliary objects and methods ------------------------------
+ // --- additional cases in operations ----------------------------------
+
+ override protected def xtraverse(traverser: Traverser, tree: Tree): Unit = tree match {
+ case Parens(ts) =>
+ traverser.traverseTrees(ts)
+ case DocDef(comment, definition) =>
+ traverser.traverse(definition)
+ case SelectFromArray(qualifier, selector, erasure) =>
+ traverser.traverse(qualifier)
+ case InjectDerivedValue(arg) =>
+ traverser.traverse(arg)
+ case TypeTreeWithDeferredRefCheck() =>
+ // (and rewrap the result? how to update the deferred check? would need to store wrapped tree instead of returning it from check)
+ case _ => super.xtraverse(traverser, tree)
+ }
- abstract class TreeCopier {
- def ClassDef(tree: Tree, mods: Modifiers, name: Name, tparams: List[TypeDef], impl: Template): ClassDef
- def PackageDef(tree: Tree, pid: RefTree, stats: List[Tree]): PackageDef
- def ModuleDef(tree: Tree, mods: Modifiers, name: Name, impl: Template): ModuleDef
- def ValDef(tree: Tree, mods: Modifiers, name: Name, tpt: Tree, rhs: Tree): ValDef
- def DefDef(tree: Tree, mods: Modifiers, name: Name, tparams: List[TypeDef], vparamss: List[List[ValDef]], tpt: Tree, rhs: Tree): DefDef
- def TypeDef(tree: Tree, mods: Modifiers, name: Name, tparams: List[TypeDef], rhs: Tree): TypeDef
- def LabelDef(tree: Tree, name: Name, params: List[Ident], rhs: Tree): LabelDef
- def Import(tree: Tree, expr: Tree, selectors: List[ImportSelector]): Import
+ trait TreeCopier extends super.InternalTreeCopierOps {
def DocDef(tree: Tree, comment: DocComment, definition: Tree): DocDef
- def Template(tree: Tree, parents: List[Tree], self: ValDef, body: List[Tree]): Template
- def Block(tree: Tree, stats: List[Tree], expr: Tree): Block
- def CaseDef(tree: Tree, pat: Tree, guard: Tree, body: Tree): CaseDef
- def Alternative(tree: Tree, trees: List[Tree]): Alternative
- def Star(tree: Tree, elem: Tree): Star
- def Bind(tree: Tree, name: Name, body: Tree): Bind
- def UnApply(tree: Tree, fun: Tree, args: List[Tree]): UnApply
- def ArrayValue(tree: Tree, elemtpt: Tree, trees: List[Tree]): ArrayValue
- def Function(tree: Tree, vparams: List[ValDef], body: Tree): Function
- def Assign(tree: Tree, lhs: Tree, rhs: Tree): Assign
- def AssignOrNamedArg(tree: Tree, lhs: Tree, rhs: Tree): AssignOrNamedArg
- def If(tree: Tree, cond: Tree, thenp: Tree, elsep: Tree): If
- def Match(tree: Tree, selector: Tree, cases: List[CaseDef]): Match
- def Return(tree: Tree, expr: Tree): Return
- def Try(tree: Tree, block: Tree, catches: List[CaseDef], finalizer: Tree): Try
- def Throw(tree: Tree, expr: Tree): Throw
- def New(tree: Tree, tpt: Tree): New
- def Typed(tree: Tree, expr: Tree, tpt: Tree): Typed
- def TypeApply(tree: Tree, fun: Tree, args: List[Tree]): TypeApply
- def Apply(tree: Tree, fun: Tree, args: List[Tree]): Apply
- def ApplyDynamic(tree: Tree, qual: Tree, args: List[Tree]): ApplyDynamic
- def Super(tree: Tree, qual: Tree, mix: TypeName): Super
- def This(tree: Tree, qual: Name): This
- def Select(tree: Tree, qualifier: Tree, selector: Name): Select
- def Ident(tree: Tree, name: Name): Ident
- def Literal(tree: Tree, value: Constant): Literal
- def TypeTree(tree: Tree): TypeTree
- def TypeTreeWithDeferredRefCheck(tree: Tree): TypeTreeWithDeferredRefCheck
- def Annotated(tree: Tree, annot: Tree, arg: Tree): Annotated
- def SingletonTypeTree(tree: Tree, ref: Tree): SingletonTypeTree
- def SelectFromTypeTree(tree: Tree, qualifier: Tree, selector: Name): SelectFromTypeTree
- def CompoundTypeTree(tree: Tree, templ: Template): CompoundTypeTree
- def AppliedTypeTree(tree: Tree, tpt: Tree, args: List[Tree]): AppliedTypeTree
- def TypeBoundsTree(tree: Tree, lo: Tree, hi: Tree): TypeBoundsTree
- def ExistentialTypeTree(tree: Tree, tpt: Tree, whereClauses: List[Tree]): ExistentialTypeTree
def SelectFromArray(tree: Tree, qualifier: Tree, selector: Name, erasure: Type): SelectFromArray
+ def InjectDerivedValue(tree: Tree, arg: Tree): InjectDerivedValue
+ def TypeTreeWithDeferredRefCheck(tree: Tree): TypeTreeWithDeferredRefCheck
}
- class StrictTreeCopier extends TreeCopier {
- def ClassDef(tree: Tree, mods: Modifiers, name: Name, tparams: List[TypeDef], impl: Template) =
- new ClassDef(mods, name.toTypeName, tparams, impl).copyAttrs(tree)
- def PackageDef(tree: Tree, pid: RefTree, stats: List[Tree]) =
- new PackageDef(pid, stats).copyAttrs(tree)
- def ModuleDef(tree: Tree, mods: Modifiers, name: Name, impl: Template) =
- new ModuleDef(mods, name, impl).copyAttrs(tree)
- def ValDef(tree: Tree, mods: Modifiers, name: Name, tpt: Tree, rhs: Tree) =
- new ValDef(mods, name, tpt, rhs).copyAttrs(tree)
- def DefDef(tree: Tree, mods: Modifiers, name: Name, tparams: List[TypeDef], vparamss: List[List[ValDef]], tpt: Tree, rhs: Tree) =
- new DefDef(mods, name, tparams, vparamss, tpt, rhs).copyAttrs(tree)
- def TypeDef(tree: Tree, mods: Modifiers, name: Name, tparams: List[TypeDef], rhs: Tree) =
- new TypeDef(mods, name.toTypeName, tparams, rhs).copyAttrs(tree)
- def LabelDef(tree: Tree, name: Name, params: List[Ident], rhs: Tree) =
- new LabelDef(name, params, rhs).copyAttrs(tree)
- def Import(tree: Tree, expr: Tree, selectors: List[ImportSelector]) =
- new Import(expr, selectors).copyAttrs(tree)
+ def newStrictTreeCopier: TreeCopier = new StrictTreeCopier
+ def newLazyTreeCopier: TreeCopier = new LazyTreeCopier
+
+ class StrictTreeCopier extends super.StrictTreeCopier with TreeCopier {
def DocDef(tree: Tree, comment: DocComment, definition: Tree) =
new DocDef(comment, definition).copyAttrs(tree)
- def Template(tree: Tree, parents: List[Tree], self: ValDef, body: List[Tree]) =
- new Template(parents, self, body).copyAttrs(tree)
- def Block(tree: Tree, stats: List[Tree], expr: Tree) =
- new Block(stats, expr).copyAttrs(tree)
- def CaseDef(tree: Tree, pat: Tree, guard: Tree, body: Tree) =
- new CaseDef(pat, guard, body).copyAttrs(tree)
- def Alternative(tree: Tree, trees: List[Tree]) =
- new Alternative(trees).copyAttrs(tree)
- def Star(tree: Tree, elem: Tree) =
- new Star(elem).copyAttrs(tree)
- def Bind(tree: Tree, name: Name, body: Tree) =
- new Bind(name, body).copyAttrs(tree)
- def UnApply(tree: Tree, fun: Tree, args: List[Tree]) =
- new UnApply(fun, args).copyAttrs(tree)
- def ArrayValue(tree: Tree, elemtpt: Tree, trees: List[Tree]) =
- new ArrayValue(elemtpt, trees).copyAttrs(tree)
- def Function(tree: Tree, vparams: List[ValDef], body: Tree) =
- new Function(vparams, body).copyAttrs(tree)
- def Assign(tree: Tree, lhs: Tree, rhs: Tree) =
- new Assign(lhs, rhs).copyAttrs(tree)
- def AssignOrNamedArg(tree: Tree, lhs: Tree, rhs: Tree) =
- new AssignOrNamedArg(lhs, rhs).copyAttrs(tree)
- def If(tree: Tree, cond: Tree, thenp: Tree, elsep: Tree) =
- new If(cond, thenp, elsep).copyAttrs(tree)
- def Match(tree: Tree, selector: Tree, cases: List[CaseDef]) =
- new Match(selector, cases).copyAttrs(tree)
- def Return(tree: Tree, expr: Tree) =
- new Return(expr).copyAttrs(tree)
- def Try(tree: Tree, block: Tree, catches: List[CaseDef], finalizer: Tree) =
- new Try(block, catches, finalizer).copyAttrs(tree)
- def Throw(tree: Tree, expr: Tree) =
- new Throw(expr).copyAttrs(tree)
- def New(tree: Tree, tpt: Tree) =
- new New(tpt).copyAttrs(tree)
- def Typed(tree: Tree, expr: Tree, tpt: Tree) =
- new Typed(expr, tpt).copyAttrs(tree)
- def TypeApply(tree: Tree, fun: Tree, args: List[Tree]) =
- new TypeApply(fun, args).copyAttrs(tree)
- def Apply(tree: Tree, fun: Tree, args: List[Tree]) =
- (tree match {
- case _: ApplyToImplicitArgs => new ApplyToImplicitArgs(fun, args)
- case _: ApplyImplicitView => new ApplyImplicitView(fun, args)
- case _ => new Apply(fun, args)
- }).copyAttrs(tree)
- def ApplyDynamic(tree: Tree, qual: Tree, args: List[Tree]) =
- new ApplyDynamic(qual, args).copyAttrs(tree)
- def Super(tree: Tree, qual: Tree, mix: TypeName) =
- new Super(qual, mix).copyAttrs(tree)
- def This(tree: Tree, qual: Name) =
- new This(qual.toTypeName).copyAttrs(tree)
- def Select(tree: Tree, qualifier: Tree, selector: Name) =
- new Select(qualifier, selector).copyAttrs(tree)
- def Ident(tree: Tree, name: Name) =
- new Ident(name).copyAttrs(tree)
- def Literal(tree: Tree, value: Constant) =
- new Literal(value).copyAttrs(tree)
- def TypeTree(tree: Tree) =
- new TypeTree().copyAttrs(tree)
+ def SelectFromArray(tree: Tree, qualifier: Tree, selector: Name, erasure: Type) =
+ new SelectFromArray(qualifier, selector, erasure).copyAttrs(tree)
+ def InjectDerivedValue(tree: Tree, arg: Tree) =
+ new InjectDerivedValue(arg).copyAttrs(tree)
def TypeTreeWithDeferredRefCheck(tree: Tree) = tree match {
case dc at TypeTreeWithDeferredRefCheck() => new TypeTreeWithDeferredRefCheck()(dc.check).copyAttrs(tree)
}
- def Annotated(tree: Tree, annot: Tree, arg: Tree) =
- new Annotated(annot, arg).copyAttrs(tree)
- def SingletonTypeTree(tree: Tree, ref: Tree) =
- new SingletonTypeTree(ref).copyAttrs(tree)
- def SelectFromTypeTree(tree: Tree, qualifier: Tree, selector: Name) =
- new SelectFromTypeTree(qualifier, selector.toTypeName).copyAttrs(tree)
- def CompoundTypeTree(tree: Tree, templ: Template) =
- new CompoundTypeTree(templ).copyAttrs(tree)
- def AppliedTypeTree(tree: Tree, tpt: Tree, args: List[Tree]) =
- new AppliedTypeTree(tpt, args).copyAttrs(tree)
- def TypeBoundsTree(tree: Tree, lo: Tree, hi: Tree) =
- new TypeBoundsTree(lo, hi).copyAttrs(tree)
- def ExistentialTypeTree(tree: Tree, tpt: Tree, whereClauses: List[Tree]) =
- new ExistentialTypeTree(tpt, whereClauses).copyAttrs(tree)
- def SelectFromArray(tree: Tree, qualifier: Tree, selector: Name, erasure: Type) =
- new SelectFromArray(qualifier, selector, erasure).copyAttrs(tree)
}
- class LazyTreeCopier(treeCopy: TreeCopier) extends TreeCopier {
- def this() = this(new StrictTreeCopier)
- def ClassDef(tree: Tree, mods: Modifiers, name: Name, tparams: List[TypeDef], impl: Template) = tree match {
- case t @ ClassDef(mods0, name0, tparams0, impl0)
- if (mods0 == mods) && (name0 == name) && (tparams0 == tparams) && (impl0 == impl) => t
- case _ => treeCopy.ClassDef(tree, mods, name, tparams, impl)
- }
- def PackageDef(tree: Tree, pid: RefTree, stats: List[Tree]) = tree match {
- case t @ PackageDef(pid0, stats0)
- if (pid0 == pid) && (stats0 == stats) => t
- case _ => treeCopy.PackageDef(tree, pid, stats)
- }
- def ModuleDef(tree: Tree, mods: Modifiers, name: Name, impl: Template) = tree match {
- case t @ ModuleDef(mods0, name0, impl0)
- if (mods0 == mods) && (name0 == name) && (impl0 == impl) => t
- case _ => treeCopy.ModuleDef(tree, mods, name, impl)
- }
- def ValDef(tree: Tree, mods: Modifiers, name: Name, tpt: Tree, rhs: Tree) = tree match {
- case t @ ValDef(mods0, name0, tpt0, rhs0)
- if (mods0 == mods) && (name0 == name) && (tpt0 == tpt) && (rhs0 == rhs) => t
- case _ => treeCopy.ValDef(tree, mods, name, tpt, rhs)
- }
- def DefDef(tree: Tree, mods: Modifiers, name: Name, tparams: List[TypeDef], vparamss: List[List[ValDef]], tpt: Tree, rhs: Tree) = tree match {
- case t @ DefDef(mods0, name0, tparams0, vparamss0, tpt0, rhs0)
- if (mods0 == mods) && (name0 == name) && (tparams0 == tparams) &&
- (vparamss0 == vparamss) && (tpt0 == tpt) && (rhs == rhs0) => t
- case _ => treeCopy.DefDef(tree, mods, name, tparams, vparamss, tpt, rhs)
- }
- def TypeDef(tree: Tree, mods: Modifiers, name: Name, tparams: List[TypeDef], rhs: Tree) = tree match {
- case t @ TypeDef(mods0, name0, tparams0, rhs0)
- if (mods0 == mods) && (name0 == name) && (tparams0 == tparams) && (rhs0 == rhs) => t
- case _ => treeCopy.TypeDef(tree, mods, name, tparams, rhs)
- }
- def LabelDef(tree: Tree, name: Name, params: List[Ident], rhs: Tree) = tree match {
- case t @ LabelDef(name0, params0, rhs0)
- if (name0 == name) && (params0 == params) && (rhs0 == rhs) => t
- case _ => treeCopy.LabelDef(tree, name, params, rhs)
- }
- def Import(tree: Tree, expr: Tree, selectors: List[ImportSelector]) = tree match {
- case t @ Import(expr0, selectors0)
- if (expr0 == expr) && (selectors0 == selectors) => t
- case _ => treeCopy.Import(tree, expr, selectors)
- }
+ class LazyTreeCopier extends super.LazyTreeCopier with TreeCopier {
def DocDef(tree: Tree, comment: DocComment, definition: Tree) = tree match {
case t @ DocDef(comment0, definition0)
if (comment0 == comment) && (definition0 == definition) => t
- case _ => treeCopy.DocDef(tree, comment, definition)
- }
- def Template(tree: Tree, parents: List[Tree], self: ValDef, body: List[Tree]) = tree match {
- case t @ Template(parents0, self0, body0)
- if (parents0 == parents) && (self0 == self) && (body0 == body) => t
- case _ => treeCopy.Template(tree, parents, self, body)
- }
- def Block(tree: Tree, stats: List[Tree], expr: Tree) = tree match {
- case t @ Block(stats0, expr0)
- if ((stats0 == stats) && (expr0 == expr)) => t
- case _ => treeCopy.Block(tree, stats, expr)
- }
- def CaseDef(tree: Tree, pat: Tree, guard: Tree, body: Tree) = tree match {
- case t @ CaseDef(pat0, guard0, body0)
- if (pat0 == pat) && (guard0 == guard) && (body0 == body) => t
- case _ => treeCopy.CaseDef(tree, pat, guard, body)
- }
- def Alternative(tree: Tree, trees: List[Tree]) = tree match {
- case t @ Alternative(trees0)
- if trees0 == trees => t
- case _ => treeCopy.Alternative(tree, trees)
- }
- def Star(tree: Tree, elem: Tree) = tree match {
- case t @ Star(elem0)
- if elem0 == elem => t
- case _ => treeCopy.Star(tree, elem)
- }
- def Bind(tree: Tree, name: Name, body: Tree) = tree match {
- case t @ Bind(name0, body0)
- if (name0 == name) && (body0 == body) => t
- case _ => treeCopy.Bind(tree, name, body)
- }
- def UnApply(tree: Tree, fun: Tree, args: List[Tree]) = tree match {
- case t @ UnApply(fun0, args0)
- if (fun0 == fun) && (args0 == args) => t
- case _ => treeCopy.UnApply(tree, fun, args)
- }
- def ArrayValue(tree: Tree, elemtpt: Tree, trees: List[Tree]) = tree match {
- case t @ ArrayValue(elemtpt0, trees0)
- if (elemtpt0 == elemtpt) && (trees0 == trees) => t
- case _ => treeCopy.ArrayValue(tree, elemtpt, trees)
- }
- def Function(tree: Tree, vparams: List[ValDef], body: Tree) = tree match {
- case t @ Function(vparams0, body0)
- if (vparams0 == vparams) && (body0 == body) => t
- case _ => treeCopy.Function(tree, vparams, body)
- }
- def Assign(tree: Tree, lhs: Tree, rhs: Tree) = tree match {
- case t @ Assign(lhs0, rhs0)
- if (lhs0 == lhs) && (rhs0 == rhs) => t
- case _ => treeCopy.Assign(tree, lhs, rhs)
- }
- def AssignOrNamedArg(tree: Tree, lhs: Tree, rhs: Tree) = tree match {
- case t @ AssignOrNamedArg(lhs0, rhs0)
- if (lhs0 == lhs) && (rhs0 == rhs) => t
- case _ => treeCopy.AssignOrNamedArg(tree, lhs, rhs)
- }
- def If(tree: Tree, cond: Tree, thenp: Tree, elsep: Tree) = tree match {
- case t @ If(cond0, thenp0, elsep0)
- if (cond0 == cond) && (thenp0 == thenp) && (elsep0 == elsep) => t
- case _ => treeCopy.If(tree, cond, thenp, elsep)
- }
- def Match(tree: Tree, selector: Tree, cases: List[CaseDef]) = tree match {
- case t @ Match(selector0, cases0)
- if (selector0 == selector) && (cases0 == cases) => t
- case _ => treeCopy.Match(tree, selector, cases)
- }
- def Return(tree: Tree, expr: Tree) = tree match {
- case t @ Return(expr0)
- if expr0 == expr => t
- case _ => treeCopy.Return(tree, expr)
- }
- def Try(tree: Tree, block: Tree, catches: List[CaseDef], finalizer: Tree) = tree match {
- case t @ Try(block0, catches0, finalizer0)
- if (block0 == block) && (catches0 == catches) && (finalizer0 == finalizer) => t
- case _ => treeCopy.Try(tree, block, catches, finalizer)
- }
- def Throw(tree: Tree, expr: Tree) = tree match {
- case t @ Throw(expr0)
- if expr0 == expr => t
- case _ => treeCopy.Throw(tree, expr)
- }
- def New(tree: Tree, tpt: Tree) = tree match {
- case t @ New(tpt0)
- if tpt0 == tpt => t
- case _ => treeCopy.New(tree, tpt)
- }
- def Typed(tree: Tree, expr: Tree, tpt: Tree) = tree match {
- case t @ Typed(expr0, tpt0)
- if (expr0 == expr) && (tpt0 == tpt) => t
- case _ => treeCopy.Typed(tree, expr, tpt)
- }
- def TypeApply(tree: Tree, fun: Tree, args: List[Tree]) = tree match {
- case t @ TypeApply(fun0, args0)
- if (fun0 == fun) && (args0 == args) => t
- case _ => treeCopy.TypeApply(tree, fun, args)
- }
- def Apply(tree: Tree, fun: Tree, args: List[Tree]) = tree match {
- case t @ Apply(fun0, args0)
- if (fun0 == fun) && (args0 == args) => t
- case _ => treeCopy.Apply(tree, fun, args)
- }
- def ApplyDynamic(tree: Tree, qual: Tree, args: List[Tree]) = tree match {
- case t @ ApplyDynamic(qual0, args0)
- if (qual0 == qual) && (args0 == args) => t
- case _ => treeCopy.ApplyDynamic(tree, qual, args)
- }
- def Super(tree: Tree, qual: Tree, mix: TypeName) = tree match {
- case t @ Super(qual0, mix0)
- if (qual0 == qual) && (mix0 == mix) => t
- case _ => treeCopy.Super(tree, qual, mix)
- }
- def This(tree: Tree, qual: Name) = tree match {
- case t @ This(qual0)
- if qual0 == qual => t
- case _ => treeCopy.This(tree, qual)
- }
- def Select(tree: Tree, qualifier: Tree, selector: Name) = tree match {
- case t @ Select(qualifier0, selector0)
- if (qualifier0 == qualifier) && (selector0 == selector) => t
- case _ => treeCopy.Select(tree, qualifier, selector)
- }
- def Ident(tree: Tree, name: Name) = tree match {
- case t @ Ident(name0)
- if name0 == name => t
- case _ => treeCopy.Ident(tree, name)
- }
- def Literal(tree: Tree, value: Constant) = tree match {
- case t @ Literal(value0)
- if value0 == value => t
- case _ => treeCopy.Literal(tree, value)
- }
- def TypeTree(tree: Tree) = tree match {
- case t @ TypeTree() => t
- case _ => treeCopy.TypeTree(tree)
- }
- def TypeTreeWithDeferredRefCheck(tree: Tree) = tree match {
- case t @ TypeTreeWithDeferredRefCheck() => t
- case _ => treeCopy.TypeTreeWithDeferredRefCheck(tree)
- }
- def Annotated(tree: Tree, annot: Tree, arg: Tree) = tree match {
- case t @ Annotated(annot0, arg0)
- if (annot0==annot) => t
- case _ => treeCopy.Annotated(tree, annot, arg)
- }
- def SingletonTypeTree(tree: Tree, ref: Tree) = tree match {
- case t @ SingletonTypeTree(ref0)
- if ref0 == ref => t
- case _ => treeCopy.SingletonTypeTree(tree, ref)
- }
- def SelectFromTypeTree(tree: Tree, qualifier: Tree, selector: Name) = tree match {
- case t @ SelectFromTypeTree(qualifier0, selector0)
- if (qualifier0 == qualifier) && (selector0 == selector) => t
- case _ => treeCopy.SelectFromTypeTree(tree, qualifier, selector)
- }
- def CompoundTypeTree(tree: Tree, templ: Template) = tree match {
- case t @ CompoundTypeTree(templ0)
- if templ0 == templ => t
- case _ => treeCopy.CompoundTypeTree(tree, templ)
- }
- def AppliedTypeTree(tree: Tree, tpt: Tree, args: List[Tree]) = tree match {
- case t @ AppliedTypeTree(tpt0, args0)
- if (tpt0 == tpt) && (args0 == args) => t
- case _ => treeCopy.AppliedTypeTree(tree, tpt, args)
- }
- def TypeBoundsTree(tree: Tree, lo: Tree, hi: Tree) = tree match {
- case t @ TypeBoundsTree(lo0, hi0)
- if (lo0 == lo) && (hi0 == hi) => t
- case _ => treeCopy.TypeBoundsTree(tree, lo, hi)
- }
- def ExistentialTypeTree(tree: Tree, tpt: Tree, whereClauses: List[Tree]) = tree match {
- case t @ ExistentialTypeTree(tpt0, whereClauses0)
- if (tpt0 == tpt) && (whereClauses0 == whereClauses) => t
- case _ => treeCopy.ExistentialTypeTree(tree, tpt, whereClauses)
+ case _ => this.treeCopy.DocDef(tree, comment, definition)
}
def SelectFromArray(tree: Tree, qualifier: Tree, selector: Name, erasure: Type) = tree match {
case t @ SelectFromArray(qualifier0, selector0, _)
if (qualifier0 == qualifier) && (selector0 == selector) => t
- case _ => treeCopy.SelectFromArray(tree, qualifier, selector, erasure)
- }
- }
-
- abstract class Transformer {
- val treeCopy: TreeCopier = new LazyTreeCopier
- protected var currentOwner: Symbol = definitions.RootClass
- protected def currentMethod = currentOwner.enclMethod
- protected def currentClass = currentOwner.enclClass
- protected def currentPackage = currentOwner.toplevelClass.owner
- def transform(tree: Tree): Tree = tree match {
- case EmptyTree =>
- tree
- case PackageDef(pid, stats) =>
- treeCopy.PackageDef(
- tree, transform(pid).asInstanceOf[RefTree],
- atOwner(tree.symbol.moduleClass) {
- transformStats(stats, currentOwner)
- }
- )
- case ClassDef(mods, name, tparams, impl) =>
- atOwner(tree.symbol) {
- treeCopy.ClassDef(tree, transformModifiers(mods), name,
- transformTypeDefs(tparams), transformTemplate(impl))
- }
- case ModuleDef(mods, name, impl) =>
- atOwner(tree.symbol.moduleClass) {
- treeCopy.ModuleDef(tree, transformModifiers(mods),
- name, transformTemplate(impl))
- }
- case ValDef(mods, name, tpt, rhs) =>
- atOwner(tree.symbol) {
- treeCopy.ValDef(tree, transformModifiers(mods),
- name, transform(tpt), transform(rhs))
- }
- case DefDef(mods, name, tparams, vparamss, tpt, rhs) =>
- atOwner(tree.symbol) {
- treeCopy.DefDef(tree, transformModifiers(mods), name,
- transformTypeDefs(tparams), transformValDefss(vparamss),
- transform(tpt), transform(rhs))
- }
- case TypeDef(mods, name, tparams, rhs) =>
- atOwner(tree.symbol) {
- treeCopy.TypeDef(tree, transformModifiers(mods), name,
- transformTypeDefs(tparams), transform(rhs))
- }
- case LabelDef(name, params, rhs) =>
- treeCopy.LabelDef(tree, name, transformIdents(params), transform(rhs)) //bq: Martin, once, atOwner(...) works, also change `LamdaLifter.proxy'
- case Import(expr, selectors) =>
- treeCopy.Import(tree, transform(expr), selectors)
- case DocDef(comment, definition) =>
- treeCopy.DocDef(tree, comment, transform(definition))
- case Template(parents, self, body) =>
- treeCopy.Template(tree, transformTrees(parents), transformValDef(self), transformStats(body, tree.symbol))
- case Block(stats, expr) =>
- treeCopy.Block(tree, transformStats(stats, currentOwner), transform(expr))
- case CaseDef(pat, guard, body) =>
- treeCopy.CaseDef(tree, transform(pat), transform(guard), transform(body))
- case Alternative(trees) =>
- treeCopy.Alternative(tree, transformTrees(trees))
- case Star(elem) =>
- treeCopy.Star(tree, transform(elem))
- case Bind(name, body) =>
- treeCopy.Bind(tree, name, transform(body))
- case UnApply(fun, args) =>
- treeCopy.UnApply(tree, fun, transformTrees(args)) // bq: see test/.../unapplyContexts2.scala
- case ArrayValue(elemtpt, trees) =>
- treeCopy.ArrayValue(tree, transform(elemtpt), transformTrees(trees))
- case Function(vparams, body) =>
- atOwner(tree.symbol) {
- treeCopy.Function(tree, transformValDefs(vparams), transform(body))
- }
- case Assign(lhs, rhs) =>
- treeCopy.Assign(tree, transform(lhs), transform(rhs))
- case AssignOrNamedArg(lhs, rhs) =>
- treeCopy.AssignOrNamedArg(tree, transform(lhs), transform(rhs))
- case If(cond, thenp, elsep) =>
- treeCopy.If(tree, transform(cond), transform(thenp), transform(elsep))
- case Match(selector, cases) =>
- treeCopy.Match(tree, transform(selector), transformCaseDefs(cases))
- case Return(expr) =>
- treeCopy.Return(tree, transform(expr))
- case Try(block, catches, finalizer) =>
- treeCopy.Try(tree, transform(block), transformCaseDefs(catches), transform(finalizer))
- case Throw(expr) =>
- treeCopy.Throw(tree, transform(expr))
- case New(tpt) =>
- treeCopy.New(tree, transform(tpt))
- case Typed(expr, tpt) =>
- treeCopy.Typed(tree, transform(expr), transform(tpt))
- case TypeApply(fun, args) =>
- treeCopy.TypeApply(tree, transform(fun), transformTrees(args))
- case Apply(fun, args) =>
- treeCopy.Apply(tree, transform(fun), transformTrees(args))
- case ApplyDynamic(qual, args) =>
- treeCopy.ApplyDynamic(tree, transform(qual), transformTrees(args))
- case Super(qual, mix) =>
- treeCopy.Super(tree, transform(qual), mix)
- case This(qual) =>
- treeCopy.This(tree, qual)
- case Select(qualifier, selector) =>
- treeCopy.Select(tree, transform(qualifier), selector)
- case Ident(name) =>
- treeCopy.Ident(tree, name)
- case Literal(value) =>
- treeCopy.Literal(tree, value)
- case TypeTree() =>
- treeCopy.TypeTree(tree)
- case TypeTreeWithDeferredRefCheck() =>
- treeCopy.TypeTreeWithDeferredRefCheck(tree)
- case Annotated(annot, arg) =>
- treeCopy.Annotated(tree, transform(annot), transform(arg))
- case SingletonTypeTree(ref) =>
- treeCopy.SingletonTypeTree(tree, transform(ref))
- case SelectFromTypeTree(qualifier, selector) =>
- treeCopy.SelectFromTypeTree(tree, transform(qualifier), selector)
- case CompoundTypeTree(templ) =>
- treeCopy.CompoundTypeTree(tree, transformTemplate(templ))
- case AppliedTypeTree(tpt, args) =>
- treeCopy.AppliedTypeTree(tree, transform(tpt), transformTrees(args))
- case TypeBoundsTree(lo, hi) =>
- treeCopy.TypeBoundsTree(tree, transform(lo), transform(hi))
- case ExistentialTypeTree(tpt, whereClauses) =>
- treeCopy.ExistentialTypeTree(tree, transform(tpt), transformTrees(whereClauses))
- case SelectFromArray(qualifier, selector, erasure) =>
- treeCopy.SelectFromArray(tree, transform(qualifier), selector, erasure)
- }
-
- def transformTrees(trees: List[Tree]): List[Tree] =
- trees mapConserve (transform(_))
- def transformTemplate(tree: Template): Template =
- transform(tree: Tree).asInstanceOf[Template]
- def transformTypeDefs(trees: List[TypeDef]): List[TypeDef] =
- trees mapConserve (tree => transform(tree).asInstanceOf[TypeDef])
- def transformValDef(tree: ValDef): ValDef =
- if (tree.isEmpty) tree else transform(tree).asInstanceOf[ValDef]
- def transformValDefs(trees: List[ValDef]): List[ValDef] =
- trees mapConserve (transformValDef(_))
- def transformValDefss(treess: List[List[ValDef]]): List[List[ValDef]] =
- treess mapConserve (transformValDefs(_))
- def transformCaseDefs(trees: List[CaseDef]): List[CaseDef] =
- trees mapConserve (tree => transform(tree).asInstanceOf[CaseDef])
- def transformIdents(trees: List[Ident]): List[Ident] =
- trees mapConserve (tree => transform(tree).asInstanceOf[Ident])
- def transformStats(stats: List[Tree], exprOwner: Symbol): List[Tree] =
- stats mapConserve (stat =>
- if (exprOwner != currentOwner && stat.isTerm) atOwner(exprOwner)(transform(stat))
- else transform(stat)) filter (EmptyTree !=)
- def transformUnit(unit: CompilationUnit) { unit.body = transform(unit.body) }
- def transformModifiers(mods: Modifiers): Modifiers =
- Modifiers(mods.flags, mods.privateWithin, transformTrees(mods.annotations), mods.positions)
-
- def atOwner[A](owner: Symbol)(trans: => A): A = {
- val prevOwner = currentOwner
- currentOwner = owner
- val result = trans
- currentOwner = prevOwner
- result
+ case _ => this.treeCopy.SelectFromArray(tree, qualifier, selector, erasure)
}
- }
-
- class Traverser extends super.Traverser {
- /** Compiler specific tree types are handled here: the remainder are in
- * the library's abstract tree traverser.
- */
- override def traverse(tree: Tree): Unit = tree match {
- case AssignOrNamedArg(lhs, rhs) =>
- traverse(lhs); traverse(rhs)
- case DocDef(comment, definition) =>
- traverse(definition)
- case Parens(ts) =>
- traverseTrees(ts)
- case TypeTreeWithDeferredRefCheck() => // TODO: should we traverse the wrapped tree?
- // (and rewrap the result? how to update the deferred check? would need to store wrapped tree instead of returning it from check)
- case Super(qual, _) =>
- traverse(qual) // !!! remove when Super is done
- case _ => super.traverse(tree)
+ def InjectDerivedValue(tree: Tree, arg: Tree) = tree match {
+ case t @ InjectDerivedValue(arg0)
+ if (arg0 == arg) => t
+ case _ => this.treeCopy.InjectDerivedValue(tree, arg)
}
-
- /** The abstract traverser is not aware of Tree.isTerm, so we override this one.
- */
- override def traverseStats(stats: List[Tree], exprOwner: Symbol) {
- stats foreach (stat =>
- if (exprOwner != currentOwner && stat.isTerm) atOwner(exprOwner)(traverse(stat))
- else traverse(stat)
- )
+ def TypeTreeWithDeferredRefCheck(tree: Tree) = tree match {
+ case t @ TypeTreeWithDeferredRefCheck() => t
+ case _ => this.treeCopy.TypeTreeWithDeferredRefCheck(tree)
}
-
- /** Leave apply available in the generic traverser to do something else.
- */
- def apply[T <: Tree](tree: T): T = { traverse(tree); tree }
}
- private lazy val duplicator = new Transformer {
- override val treeCopy = new StrictTreeCopier
- override def transform(t: Tree) = {
- val t1 = super.transform(t)
- if ((t1 ne t) && t1.pos.isRange) t1 setPos t.pos.focus
- t1
+ class Transformer extends super.Transformer {
+ def transformUnit(unit: CompilationUnit) {
+ try unit.body = transform(unit.body)
+ catch {
+ case ex: Exception =>
+ println(supplementErrorMessage("unhandled exception while transforming "+unit))
+ throw ex
+ }
}
}
- private class ShallowDuplicator(orig: Tree) extends Transformer {
- override val treeCopy = new StrictTreeCopier
- override def transform(tree: Tree) =
- if (tree eq orig) super.transform(tree)
- else tree
- }
- // Create a readable string describing a substitution.
- private def substituterString(fromStr: String, toStr: String, from: List[Any], to: List[Any]): String = {
- "subst[%s, %s](%s)".format(fromStr, toStr, (from, to).zipped map (_ + " -> " + _) mkString ", ")
+ // used when a phase is disabled
+ object noopTransformer extends Transformer {
+ override def transformUnit(unit: CompilationUnit): Unit = {}
}
- class TreeSubstituter(from: List[Symbol], to: List[Tree]) extends Transformer {
- override def transform(tree: Tree): Tree = tree match {
- case Ident(_) =>
- def subst(from: List[Symbol], to: List[Tree]): Tree =
- if (from.isEmpty) tree
- else if (tree.symbol == from.head) to.head
- else subst(from.tail, to.tail);
- subst(from, to)
- case _ =>
- super.transform(tree)
- }
- override def toString = substituterString("Symbol", "Tree", from, to)
+ override protected def xtransform(transformer: super.Transformer, tree: Tree): Tree = tree match {
+ case DocDef(comment, definition) =>
+ transformer.treeCopy.DocDef(tree, comment, transformer.transform(definition))
+ case SelectFromArray(qualifier, selector, erasure) =>
+ transformer.treeCopy.SelectFromArray(
+ tree, transformer.transform(qualifier), selector, erasure)
+ case InjectDerivedValue(arg) =>
+ transformer.treeCopy.InjectDerivedValue(
+ tree, transformer.transform(arg))
+ case TypeTreeWithDeferredRefCheck() =>
+ transformer.treeCopy.TypeTreeWithDeferredRefCheck(tree)
}
- class TreeTypeSubstituter(val from: List[Symbol], val to: List[Type]) extends Traverser {
- val typeSubst = new SubstTypeMap(from, to)
- def fromContains = typeSubst.fromContains
- def isEmpty = from.isEmpty && to.isEmpty
-
- override def traverse(tree: Tree) {
- if (tree.tpe ne null) tree.tpe = typeSubst(tree.tpe)
- if (tree.isDef) {
- val sym = tree.symbol
- val info1 = typeSubst(sym.info)
- if (info1 ne sym.info) sym.setInfo(info1)
- }
- super.traverse(tree)
+ object resetPos extends Traverser {
+ override def traverse(t: Tree) {
+ if (t != EmptyTree) t.setPos(NoPosition)
+ super.traverse(t)
}
- override def apply[T <: Tree](tree: T): T = super.apply(tree.duplicate)
- override def toString() = "TreeTypeSubstituter("+from+","+to+")"
}
- lazy val EmptyTreeTypeSubstituter = new TreeTypeSubstituter(List(), List())
-
- class TreeSymSubstTraverser(val from: List[Symbol], val to: List[Symbol]) extends Traverser {
- val subst = new SubstSymMap(from, to)
- override def traverse(tree: Tree) {
- if (tree.tpe ne null) tree.tpe = subst(tree.tpe)
- if (tree.isDef) {
- val sym = tree.symbol
- val info1 = subst(sym.info)
- if (info1 ne sym.info) sym.setInfo(info1)
+ /** resets symbol and tpe fields in a tree, @see ResetAttrs
+ */
+// def resetAllAttrs[A<:Tree](x:A): A = { new ResetAttrsTraverser().traverse(x); x }
+// def resetLocalAttrs[A<:Tree](x:A): A = { new ResetLocalAttrsTraverser().traverse(x); x }
+
+ def resetAllAttrs(x: Tree, leaveAlone: Tree => Boolean = null): Tree = new ResetAttrs(false, leaveAlone).transform(x)
+ def resetLocalAttrs(x: Tree, leaveAlone: Tree => Boolean = null): Tree = new ResetAttrs(true, leaveAlone).transform(x)
+ def resetLocalAttrsKeepLabels(x: Tree, leaveAlone: Tree => Boolean = null): Tree = new ResetAttrs(true, leaveAlone, true).transform(x)
+
+ /** A transformer which resets symbol and tpe fields of all nodes in a given tree,
+ * with special treatment of:
+ * TypeTree nodes: are replaced by their original if it exists, otherwise tpe field is reset
+ * to empty if it started out empty or refers to local symbols (which are erased).
+ * TypeApply nodes: are deleted if type arguments end up reverted to empty
+ * This(pkg) nodes where pkg is a package: these are kept.
+ *
+ * (bq:) This transformer has mutable state and should be discarded after use
+ */
+ private class ResetAttrs(localOnly: Boolean, leaveAlone: Tree => Boolean = null, keepLabels: Boolean = false) {
+ val debug = settings.debug.value
+ val trace = scala.tools.nsc.util.trace when debug
+
+ val locals = util.HashSet[Symbol](8)
+ val orderedLocals = scala.collection.mutable.ListBuffer[Symbol]()
+ def registerLocal(sym: Symbol) {
+ if (sym != null && sym != NoSymbol) {
+ if (debug && !(locals contains sym)) orderedLocals append sym
+ locals addEntry sym
}
- super.traverse(tree)
}
- override def apply[T <: Tree](tree: T): T = super.apply(tree.duplicate)
- override def toString() = "TreeSymSubstTraverser/" + substituterString("Symbol", "Symbol", from, to)
- }
- /** Substitute symbols in 'from' with symbols in 'to'. Returns a new
- * tree using the new symbols and whose Ident and Select nodes are
- * name-consistent with the new symbols.
- */
- class TreeSymSubstituter(from: List[Symbol], to: List[Symbol]) extends Transformer {
- val symSubst = new SubstSymMap(from, to)
- override def transform(tree: Tree): Tree = {
- def subst(from: List[Symbol], to: List[Symbol]) {
- if (!from.isEmpty)
- if (tree.symbol == from.head) tree setSymbol to.head
- else subst(from.tail, to.tail)
+ class MarkLocals extends self.Traverser {
+ def markLocal(tree: Tree) {
+ if (tree.symbol != null && tree.symbol != NoSymbol) {
+ val sym = tree.symbol
+ registerLocal(sym)
+ registerLocal(sym.sourceModule)
+ registerLocal(sym.moduleClass)
+ registerLocal(sym.companionClass)
+ registerLocal(sym.companionModule)
+ sym match {
+ case sym: TermSymbol => registerLocal(sym.referenced)
+ case _ => ;
+ }
+ }
}
- if (tree.tpe ne null) tree.tpe = symSubst(tree.tpe)
- if (tree.hasSymbol) {
- subst(from, to)
+ override def traverse(tree: Tree) = {
tree match {
- case Ident(name0) if tree.symbol != NoSymbol =>
- treeCopy.Ident(tree, tree.symbol.name)
- case Select(qual, name0) =>
- treeCopy.Select(tree, transform(qual), tree.symbol.name)
- case _ =>
- super.transform(tree)
+ case _: DefTree | Function(_, _) | Template(_, _, _) =>
+ markLocal(tree)
+ case _ =>
+ tree
}
- } else
- super.transform(tree)
- }
- def apply[T <: Tree](tree: T): T = transform(tree).asInstanceOf[T]
- override def toString() = "TreeSymSubstituter/" + substituterString("Symbol", "Symbol", from, to)
- }
-
- class ChangeOwnerTraverser(val oldowner: Symbol, val newowner: Symbol) extends Traverser {
- def changeOwner(tree: Tree) = {
- if ((tree.isDef || tree.isInstanceOf[Function]) &&
- tree.symbol != NoSymbol && tree.symbol.owner == oldowner)
- tree.symbol.owner = newowner
- }
- override def traverse(tree: Tree) {
- changeOwner(tree)
- super.traverse(tree)
- }
- }
- object posAssigner extends Traverser {
- var pos: Position = _
- override def traverse(t: Tree) {
- if (t != EmptyTree && t.pos == NoPosition) {
- t.setPos(pos)
- super.traverse(t)
+ super.traverse(tree)
}
}
- }
-
- def atPos[T <: Tree](pos: Position)(tree: T): T = {
- posAssigner.pos = pos
- posAssigner.traverse(tree)
- tree
- }
-
- class ForeachPartialTreeTraverser(pf: PartialFunction[Tree, Tree]) extends Traverser {
- override def traverse(tree: Tree) {
- val t = if (pf isDefinedAt tree) pf(tree) else tree
- super.traverse(t)
- }
- }
- class ForeachTreeTraverser(f: Tree => Unit) extends Traverser {
- override def traverse(t: Tree) {
- f(t)
- super.traverse(t)
+ class Transformer extends self.Transformer {
+ override def transform(tree: Tree): Tree = {
+ if (leaveAlone != null && leaveAlone(tree))
+ tree
+ else
+ super.transform {
+ tree match {
+ case tpt: TypeTree =>
+ if (tpt.original != null)
+ transform(tpt.original)
+ else {
+ val refersToLocalSymbols = tpt.tpe != null && (tpt.tpe exists (tp => locals contains tp.typeSymbol))
+ val isInferred = tpt.wasEmpty
+ if (refersToLocalSymbols || isInferred) {
+ val dupl = tpt.duplicate
+ dupl.tpe = null
+ dupl
+ } else {
+ tpt
+ }
+ }
+ // If one of the type arguments of a TypeApply gets reset to an empty TypeTree, then this means that:
+ // 1) It isn't empty now (tpt.tpe != null), but it was empty before (tpt.wasEmpty).
+ // 2) Thus, its argument got inferred during a preceding typecheck.
+ // 3) Thus, all its arguments were inferred (because scalac can only infer all or nothing).
+ // Therefore, we can safely erase the TypeApply altogether and have it inferred once again in a subsequent typecheck.
+ // UPD: Actually there's another reason for erasing a type behind the TypeTree
+ // is when this type refers to symbols defined in the tree being processed.
+ // These symbols will be erased, because we can't leave alive a type referring to them.
+ // Here we can only hope that everything will work fine afterwards.
+ case TypeApply(fn, args) if args map transform exists (_.isEmpty) =>
+ transform(fn)
+ case EmptyTree =>
+ tree
+ case _ =>
+ val dupl = tree.duplicate
+ // Typically the resetAttrs transformer cleans both symbols and types.
+ // However there are exceptions when we cannot erase symbols due to idiosyncrasies of the typer.
+ // vetoXXX local variables declared below describe the conditions under which we cannot erase symbols.
+ //
+ // The first reason to not erase symbols is the threat of non-idempotency (SI-5464).
+ // Here we take care of labels (SI-5562) and references to package classes (SI-5705).
+ // There are other non-idempotencies, but they are not worked around yet.
+ //
+ // The second reason has to do with the fact that resetAttrs itself has limited usefulness.
+ //
+ // First of all, why do we need resetAttrs? Gor one, it's absolutely required to move trees around.
+ // One cannot just take a typed tree from one lexical context and transplant it somewhere else.
+ // Most likely symbols defined by those trees will become borked and the compiler will blow up (SI-5797).
+ // To work around we just erase all symbols and types and then hope that we'll be able to correctly retypecheck.
+ // For ones who're not affected by scalac Stockholm syndrome, this might seem to be an extremely naive fix, but well...
+ //
+ // Of course, sometimes erasing everything won't work, because if a given identifier got resolved to something
+ // in one lexical scope, it can get resolved to something else.
+ //
+ // What do we do in these cases? Enter the workaround for the workaround: resetLocalAttrs, which only destroys
+ // locally defined symbols, but doesn't touch references to stuff declared outside of a given tree.
+ // That's what localOnly and vetoScope are for.
+ if (dupl.hasSymbol) {
+ val sym = dupl.symbol
+ val vetoScope = localOnly && !(locals contains sym)
+ val vetoLabel = keepLabels && sym.isLabel
+ val vetoThis = dupl.isInstanceOf[This] && sym.isPackageClass
+ if (!(vetoScope || vetoLabel || vetoThis)) dupl.symbol = NoSymbol
+ }
+ dupl.tpe = null
+ dupl
+ }
+ }
+ }
}
- }
- class FilterTreeTraverser(p: Tree => Boolean) extends Traverser {
- val hits = new ListBuffer[Tree]
- override def traverse(t: Tree) {
- if (p(t)) hits += t
- super.traverse(t)
- }
- }
+ def transform(x: Tree): Tree = {
+ if (localOnly)
+ new MarkLocals().traverse(x)
- class FindTreeTraverser(p: Tree => Boolean) extends Traverser {
- var result: Option[Tree] = None
- override def traverse(t: Tree) {
- if (result.isEmpty) {
- if (p(t)) result = Some(t)
- super.traverse(t)
+ if (localOnly && debug) {
+ assert(locals.size == orderedLocals.size)
+ val msg = orderedLocals.toList filter {_ != NoSymbol} map {" " + _} mkString EOL
+ trace("locals (%d total): %n".format(orderedLocals.size))(msg)
}
- }
- }
- object resetPos extends Traverser {
- override def traverse(t: Tree) {
- if (t != EmptyTree) t.setPos(NoPosition)
- super.traverse(t)
+ new Transformer().transform(x)
}
}
+ /* New pattern matching cases:
- /** resets symbol and tpe fields in a tree, @see ResetAttrsTraverse
- */
- def resetAllAttrs[A<:Tree](x:A): A = { new ResetAttrsTraverser().traverse(x); x }
- def resetLocalAttrs[A<:Tree](x:A): A = { new ResetLocalAttrsTraverser().traverse(x); x }
-
- /** A traverser which resets symbol and tpe fields of all nodes in a given tree
- * except for (1) TypeTree nodes, whose <code>.tpe</code> field is kept, and
- * (2) This(pkg) nodes, where pkg refers to a package symbol -- their attributes are kept, and
- * (3) if a <code>.symbol</code> field refers to a symbol which is defined
- * outside the tree, it is also kept.
- *
- * (2) is necessary because some This(pkg) are generated where pkg is not
- * an enclosing package.n In that case, resetting the symbol would cause the
- * next type checking run to fail. See #3152.
- *
- * (bq:) This traverser has mutable state and should be discarded after use
- */
- private class ResetAttrsTraverser extends Traverser {
- protected def isLocal(sym: Symbol): Boolean = true
- protected def resetDef(tree: Tree) {
- tree.symbol = NoSymbol
- }
- override def traverse(tree: Tree): Unit = {
- tree match {
- case _: DefTree | Function(_, _) | Template(_, _, _) =>
- resetDef(tree)
- tree.tpe = null
- case tpt: TypeTree =>
- if (tpt.wasEmpty) tree.tpe = null
- case This(_) if tree.symbol != null && tree.symbol.isPackageClass =>
- ;
- case EmptyTree =>
- ;
- case _ =>
- if (tree.hasSymbol && isLocal(tree.symbol)) tree.symbol = NoSymbol
- tree.tpe = null
- }
- super.traverse(tree)
- }
- }
+ case Parens(expr) (only used during parsing)
+ case DocDef(comment, defn) => (eliminated by typer)
+ case TypeTreeWithDeferredRefCheck() => (created and eliminated by typer)
+ case SelectFromArray(_, _, _) => (created and eliminated by erasure)
+ case InjectDerivedValue(_) => (created and eliminated by erasure)
- private class ResetLocalAttrsTraverser extends ResetAttrsTraverser {
- private val erasedSyms = HashSet[Symbol](8)
- override protected def isLocal(sym: Symbol) = erasedSyms(sym)
- override protected def resetDef(tree: Tree) {
- erasedSyms addEntry tree.symbol
- super.resetDef(tree)
- }
- override def traverse(tree: Tree): Unit = tree match {
- case Template(parents, self, body) =>
- for (stat <- body)
- if (stat.isDef) erasedSyms.addEntry(stat.symbol)
- super.traverse(tree)
- case _ =>
- super.traverse(tree)
- }
- }
-}
+ */
+ }
diff --git a/src/compiler/scala/tools/nsc/ast/parser/BracePair.scala b/src/compiler/scala/tools/nsc/ast/parser/BracePair.scala
index 9ec51c3..d5fae97 100644
--- a/src/compiler/scala/tools/nsc/ast/parser/BracePair.scala
+++ b/src/compiler/scala/tools/nsc/ast/parser/BracePair.scala
@@ -1,5 +1,5 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
+ * Copyright 2005-2013 LAMP/EPFL
* @author Martin Odersky
*/
package scala.tools.nsc
diff --git a/src/compiler/scala/tools/nsc/ast/parser/BracePatch.scala b/src/compiler/scala/tools/nsc/ast/parser/BracePatch.scala
index 73c8a94..a573ddf 100644
--- a/src/compiler/scala/tools/nsc/ast/parser/BracePatch.scala
+++ b/src/compiler/scala/tools/nsc/ast/parser/BracePatch.scala
@@ -1,5 +1,5 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
+ * Copyright 2005-2013 LAMP/EPFL
* @author Martin Odersky
*/
package scala.tools.nsc
diff --git a/src/compiler/scala/tools/nsc/ast/parser/Change.scala b/src/compiler/scala/tools/nsc/ast/parser/Change.scala
index 9b8ed0d..57dc48a 100644
--- a/src/compiler/scala/tools/nsc/ast/parser/Change.scala
+++ b/src/compiler/scala/tools/nsc/ast/parser/Change.scala
@@ -1,5 +1,5 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
+ * Copyright 2005-2013 LAMP/EPFL
* @author Martin Odersky
*/
package scala.tools.nsc.ast.parser
diff --git a/src/compiler/scala/tools/nsc/ast/parser/MarkupParsers.scala b/src/compiler/scala/tools/nsc/ast/parser/MarkupParsers.scala
old mode 100644
new mode 100755
index a3fd44f..553a208
--- a/src/compiler/scala/tools/nsc/ast/parser/MarkupParsers.scala
+++ b/src/compiler/scala/tools/nsc/ast/parser/MarkupParsers.scala
@@ -1,5 +1,5 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
+ * Copyright 2005-2013 LAMP/EPFL
* @author Burak Emir
*/
@@ -7,12 +7,14 @@ package scala.tools.nsc
package ast.parser
import scala.collection.mutable
-import mutable.{ Buffer, ArrayBuffer, ListBuffer, HashMap }
+import mutable.{ Buffer, ArrayBuffer, ListBuffer }
import scala.util.control.ControlThrowable
-import scala.tools.nsc.util.{SourceFile,CharArrayReader}
+import scala.tools.nsc.util.CharArrayReader
+import scala.reflect.internal.util.SourceFile
import scala.xml.{ Text, TextBuffer }
+import scala.xml.parsing.MarkupParserCommon
import scala.xml.Utility.{ isNameStart, isNameChar, isSpace }
-import util.Chars.{ SU, LF }
+import scala.reflect.internal.Chars.{ SU, LF }
// XXX/Note: many/most of the functions in here are almost direct cut and pastes
// from another file - scala.xml.parsing.MarkupParser, it looks like.
@@ -47,7 +49,7 @@ trait MarkupParsers {
import global._
- class MarkupParser(parser: SourceFileParser, final val preserveWS: Boolean) extends scala.xml.parsing.MarkupParserCommon {
+ class MarkupParser(parser: SourceFileParser, final val preserveWS: Boolean) extends MarkupParserCommon {
import Tokens.{ EMPTY, LBRACE, RBRACE }
@@ -76,10 +78,13 @@ trait MarkupParsers {
var tmppos : Position = NoPosition
def ch = input.ch
/** this method assign the next character to ch and advances in input */
- def nextch = { val result = input.ch; input.nextChar(); result }
- def ch_returning_nextch = nextch
+ def nextch() { input.nextChar() }
- def mkProcInstr(position: Position, name: String, text: String): Tree =
+ protected def ch_returning_nextch: Char = {
+ val result = ch; input.nextChar(); result
+ }
+
+ def mkProcInstr(position: Position, name: String, text: String): ElementType =
parser.symbXMLBuilder.procInstr(position, name, text)
var xEmbeddedBlock = false
@@ -113,7 +118,7 @@ trait MarkupParsers {
* | `{` scalablock `}`
*/
def xAttributes = {
- val aMap = new HashMap[String, Tree]()
+ val aMap = mutable.LinkedHashMap[String, Tree]()
while (isNameStart(ch)) {
val start = curOffset
@@ -263,7 +268,7 @@ trait MarkupParsers {
val (qname, attrMap) = xTag(())
if (ch == '/') { // empty element
xToken("/>")
- handle.element(r2p(start, start, curOffset), qname, attrMap, new ListBuffer[Tree])
+ handle.element(r2p(start, start, curOffset), qname, attrMap, true, new ListBuffer[Tree])
}
else { // handle content
xToken('>')
@@ -277,7 +282,7 @@ trait MarkupParsers {
val pos = r2p(start, start, curOffset)
qname match {
case "xml:group" => handle.group(pos, ts)
- case _ => handle.element(pos, qname, attrMap, ts)
+ case _ => handle.element(pos, qname, attrMap, false, ts)
}
}
}
@@ -285,7 +290,7 @@ trait MarkupParsers {
/** parse character data.
* precondition: xEmbeddedBlock == false (we are not in a scala block)
*/
- def xText: String = {
+ private def xText: String = {
assert(!xEmbeddedBlock, "internal error: encountered embedded block")
val buf = new StringBuilder
def done = buf.toString
@@ -393,12 +398,12 @@ trait MarkupParsers {
/** xScalaPatterns ::= patterns
*/
- def xScalaPatterns: List[Tree] = escapeToScala(parser.seqPatterns(), "pattern")
+ def xScalaPatterns: List[Tree] = escapeToScala(parser.xmlSeqPatterns(), "pattern")
def reportSyntaxError(pos: Int, str: String) = parser.syntaxError(pos, str)
- def reportSyntaxError(str: String) = {
+ def reportSyntaxError(str: String) {
reportSyntaxError(curOffset, "in XML literal: " + str)
- nextch
+ nextch()
}
/** '<' xPattern ::= Name [S] { xmlPattern | '{' pattern3 '}' } ETag
diff --git a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala
index 84959d0..b9e4109 100644
--- a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala
+++ b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala
@@ -1,5 +1,5 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
+ * Copyright 2005-2013 LAMP/EPFL
* @author Martin Odersky
*/
@@ -9,11 +9,12 @@
package scala.tools.nsc
package ast.parser
-import scala.collection.mutable.ListBuffer
-import util.{ SourceFile, OffsetPosition, FreshNameCreator }
-import scala.reflect.generic.{ ModifierFlags => Flags }
+import scala.collection.mutable.{ListBuffer, StringBuilder}
+import scala.reflect.internal.{ ModifierFlags => Flags }
+import scala.reflect.internal.Chars.{ isScalaLetter }
+import scala.reflect.internal.util.{ SourceFile, OffsetPosition }
import Tokens._
-import util.Chars.{ isScalaLetter }
+import util.FreshNameCreator
/** Historical note: JavaParsers started life as a direct copy of Parsers
* but at a time when that Parsers had been replaced by a different one.
@@ -28,7 +29,10 @@ trait ParsersCommon extends ScannersCommon {
val global : Global
import global._
- trait ParserCommon {
+ /** This is now an abstract class, only to work around the optimizer:
+ * methods in traits are never inlined.
+ */
+ abstract class ParserCommon {
val in: ScannerCommon
def freshName(prefix: String): Name
def freshTermName(prefix: String): TermName
@@ -42,31 +46,36 @@ trait ParsersCommon extends ScannersCommon {
* will be called, so a parse error will still result. If the grouping is
* optional, in.token should be tested before calling these methods.
*/
- def inParens[T](body: => T): T = {
+ @inline final def inParens[T](body: => T): T = {
accept(LPAREN)
val ret = body
accept(RPAREN)
ret
}
- def inParensOrError[T](body: => T, alt: T): T =
+ @inline final def inParensOrError[T](body: => T, alt: T): T =
if (in.token == LPAREN) inParens(body)
else { accept(LPAREN) ; alt }
- def inParensOrUnit[T](body: => Tree): Tree = inParensOrError(body, Literal(()))
- def inParensOrNil[T](body: => List[T]): List[T] = inParensOrError(body, Nil)
- def inBraces[T](body: => T): T = {
+ @inline final def inParensOrUnit[T](body: => Tree): Tree = inParensOrError(body, Literal(Constant()))
+ @inline final def inParensOrNil[T](body: => List[T]): List[T] = inParensOrError(body, Nil)
+
+ @inline final def inBraces[T](body: => T): T = {
accept(LBRACE)
val ret = body
accept(RBRACE)
ret
}
- def inBracesOrError[T](body: => T, alt: T): T =
+ @inline final def inBracesOrError[T](body: => T, alt: T): T =
if (in.token == LBRACE) inBraces(body)
else { accept(LBRACE) ; alt }
- def inBracesOrNil[T](body: => List[T]): List[T] = inBracesOrError(body, Nil)
- def inBracesOrUnit[T](body: => Tree): Tree = inBracesOrError(body, Literal(()))
- def inBrackets[T](body: => T): T = {
+ @inline final def inBracesOrNil[T](body: => List[T]): List[T] = inBracesOrError(body, Nil)
+ @inline final def inBracesOrUnit[T](body: => Tree): Tree = inBracesOrError(body, Literal(Constant()))
+ @inline final def dropAnyBraces[T](body: => T): T =
+ if (in.token == LBRACE) inBraces(body)
+ else body
+
+ @inline final def inBrackets[T](body: => T): T = {
accept(LBRACKET)
val ret = body
accept(RBRACKET)
@@ -75,12 +84,13 @@ trait ParsersCommon extends ScannersCommon {
/** Creates an actual Parens node (only used during parsing.)
*/
- def makeParens(body: => List[Tree]): Parens =
+ @inline final def makeParens(body: => List[Tree]): Parens =
Parens(inParens(if (in.token == RPAREN) Nil else body))
}
}
-/** <p>Performs the following context-free rewritings:</p>
+/** Performs the following context-free rewritings:
+ *
* <ol>
* <li>
* Places all pattern variables in Bind nodes. In a pattern, for
@@ -120,8 +130,6 @@ self =>
val global: Global
import global._
- private val glob: global.type = global
-
case class OpInfo(operand: Tree, operator: Name, offset: Offset)
class SourceFileParser(val source: SourceFile) extends Parser {
@@ -155,9 +163,9 @@ self =>
def incompleteInputError(msg: String): Unit = throw new MalformedInput(source.content.length - 1, msg)
/** the markup parser */
- lazy val xmlp = new MarkupParser(this, true)
+ lazy val xmlp = new MarkupParser(this, preserveWS = true)
- object symbXMLBuilder extends SymbolicXMLBuilder(this, true) { // DEBUG choices
+ object symbXMLBuilder extends SymbolicXMLBuilder(this, preserveWS = true) { // DEBUG choices
val global: self.global.type = self.global
def freshName(prefix: String): Name = SourceFileParser.this.freshName(prefix)
}
@@ -184,7 +192,7 @@ self =>
override def blockExpr(): Tree = skipBraces(EmptyTree)
- override def templateBody(isPre: Boolean) = skipBraces(emptyValDef, List(EmptyTree))
+ override def templateBody(isPre: Boolean) = skipBraces((emptyValDef, EmptyTree.asList))
}
class UnitParser(val unit: global.CompilationUnit, patches: List[BracePatch]) extends SourceFileParser(unit.source) {
@@ -205,13 +213,11 @@ self =>
}
private var smartParsing = false
- private def withSmartParsing[T](body: => T): T = {
+ @inline private def withSmartParsing[T](body: => T): T = {
val saved = smartParsing
- try {
- smartParsing = true
- body
- }
- finally smartParsing = saved // false
+ smartParsing = true
+ try body
+ finally smartParsing = saved
}
val syntaxErrors = new ListBuffer[(Int, String)]
@@ -247,6 +253,19 @@ self =>
final val InBlock = 1
final val InTemplate = 2
+ // These symbols may not yet be loaded (e.g. in the ide) so don't go
+ // through definitions to obtain the names.
+ lazy val ScalaValueClassNames = Seq(tpnme.AnyVal,
+ tpnme.Unit,
+ tpnme.Boolean,
+ tpnme.Byte,
+ tpnme.Short,
+ tpnme.Char,
+ tpnme.Int,
+ tpnme.Long,
+ tpnme.Float,
+ tpnme.Double)
+
import nme.raw
abstract class Parser extends ParserCommon {
@@ -261,20 +280,12 @@ self =>
/** whether a non-continuable syntax error has been seen */
private var lastErrorOffset : Int = -1
- object treeBuilder extends TreeBuilder {
- val global: self.global.type = self.global
- def freshName(prefix: String): Name = freshTermName(prefix)
- def freshTermName(prefix: String): TermName = Parser.this.freshTermName(prefix)
- def freshTypeName(prefix: String): TypeName = Parser.this.freshTypeName(prefix)
- def o2p(offset: Int) = Parser.this.o2p(offset)
- def r2p(start: Int, point: Int, end: Int) = Parser.this.r2p(start, point, end)
- }
import treeBuilder.{global => _, _}
/** The types of the context bounds of type parameters of the surrounding class
*/
private var classContextBounds: List[Tree] = Nil
- private def savingClassContextBounds[T](op: => T): T = {
+ @inline private def savingClassContextBounds[T](op: => T): T = {
val saved = classContextBounds
try op
finally classContextBounds = saved
@@ -288,11 +299,11 @@ self =>
inScalaPackage = false
currentPackage = ""
}
- private lazy val anyValNames: Set[Name] = tpnme.ScalaValueNames.toSet + tpnme.AnyVal
+ private lazy val primitiveNames: Set[Name] = tpnme.ScalaValueNames.toSet
private def inScalaRootPackage = inScalaPackage && currentPackage == "scala"
private def isScalaArray(name: Name) = inScalaRootPackage && name == tpnme.Array
- private def isAnyValType(name: Name) = inScalaRootPackage && anyValNames(name)
+ private def isPrimitiveType(name: Name) = inScalaRootPackage && primitiveNames(name)
def parseStartRule: () => Tree
@@ -310,10 +321,10 @@ self =>
* by compilationUnit().
*/
def scriptBody(): Tree = {
- val stmts = templateStatSeq(false)._2
+ val stmts = templateStats()
accept(EOF)
- def mainModuleName = settings.script.value
+ def mainModuleName = newTermName(settings.script.value)
/** If there is only a single object template in the file and it has a
* suitable main method, we will use it rather than building another object
* around it. Since objects are loaded lazily the whole script would have
@@ -342,7 +353,7 @@ self =>
* whole additional parse. So instead, if the actual object's name differs from
* what the script is expecting, we transform it to match.
*/
- if (name.toString == mainModuleName) md
+ if (name == mainModuleName) md
else treeCopy.ModuleDef(md, mods, mainModuleName, template)
case _ =>
/** If we see anything but the above, fail. */
@@ -351,21 +362,23 @@ self =>
Some(makePackaging(0, emptyPkg, newStmts))
}
- if (mainModuleName == ScriptRunner.defaultScriptMain)
+ if (mainModuleName == newTermName(ScriptRunner.defaultScriptMain))
searchForMain() foreach { return _ }
/** Here we are building an AST representing the following source fiction,
- * where <moduleName> is from -Xscript (defaults to "Main") and <stmts> are
+ * where `moduleName` is from -Xscript (defaults to "Main") and <stmts> are
* the result of parsing the script file.
*
- * object <moduleName> {
+ * {{{
+ * object moduleName {
* def main(argv: Array[String]): Unit = {
* val args = argv
* new AnyRef {
- * <stmts>
+ * stmts
* }
* }
* }
+ * }}}
*/
import definitions._
@@ -374,21 +387,20 @@ self =>
NoMods,
nme.CONSTRUCTOR,
Nil,
- List(Nil),
+ ListOfNil,
TypeTree(),
- Block(List(Apply(Select(Super(This(tpnme.EMPTY), tpnme.EMPTY), nme.CONSTRUCTOR), Nil)), Literal(Constant(())))
+ Block(List(Apply(gen.mkSuperSelect, Nil)), Literal(Constant(())))
)
// def main
def mainParamType = AppliedTypeTree(Ident(tpnme.Array), List(Ident(tpnme.String)))
- def mainParameter = List(ValDef(Modifiers(Flags.PARAM), "argv", mainParamType, EmptyTree))
- def mainSetArgv = List(ValDef(NoMods, "args", TypeTree(), Ident("argv")))
- def mainNew = makeNew(Nil, emptyValDef, stmts, List(Nil), NoPosition, NoPosition)
- def mainDef = DefDef(NoMods, nme.main, Nil, List(mainParameter), scalaDot(tpnme.Unit), Block(mainSetArgv, mainNew))
+ def mainParameter = List(ValDef(Modifiers(Flags.PARAM), nme.argv, mainParamType, EmptyTree))
+ def mainSetArgv = List(ValDef(NoMods, nme.args, TypeTree(), Ident(nme.argv)))
+ def mainDef = DefDef(NoMods, nme.main, Nil, List(mainParameter), scalaDot(tpnme.Unit), Block(mainSetArgv, makeAnonymousNew(stmts)))
// object Main
- def moduleName = ScriptRunner scriptMain settings
- def moduleBody = Template(List(scalaScalaObjectConstr), emptyValDef, List(emptyInit, mainDef))
+ def moduleName = newTermName(ScriptRunner scriptMain settings)
+ def moduleBody = Template(List(atPos(o2p(in.offset))(scalaAnyRefConstr)), emptyValDef, List(emptyInit, mainDef))
def moduleDef = ModuleDef(NoMods, moduleName, moduleBody)
// package <empty> { ... }
@@ -397,13 +409,13 @@ self =>
/* --------------- PLACEHOLDERS ------------------------------------------- */
- /** The implicit parameters introduced by `_' in the current expression.
- * Parameters appear in reverse order
+ /** The implicit parameters introduced by `_` in the current expression.
+ * Parameters appear in reverse order.
*/
var placeholderParams: List[ValDef] = Nil
- /** The placeholderTypes introduced by `_' in the current type.
- * Parameters appear in reverse order
+ /** The placeholderTypes introduced by `_` in the current type.
+ * Parameters appear in reverse order.
*/
var placeholderTypes: List[TypeDef] = Nil
@@ -456,15 +468,13 @@ self =>
/* ------------- ERROR HANDLING ------------------------------------------- */
- var assumedClosingParens = collection.mutable.Map(RPAREN -> 0, RBRACKET -> 0, RBRACE -> 0)
+ var assumedClosingParens = scala.collection.mutable.Map(RPAREN -> 0, RBRACKET -> 0, RBRACE -> 0)
private var inFunReturnType = false
- private def fromWithinReturnType[T](body: => T): T = {
+ @inline private def fromWithinReturnType[T](body: => T): T = {
val saved = inFunReturnType
- try {
- inFunReturnType = true
- body
- }
+ inFunReturnType = true
+ try body
finally inFunReturnType = saved
}
@@ -528,9 +538,7 @@ self =>
def expectedMsg(token: Int): String =
token2string(token) + " expected but " +token2string(in.token) + " found."
- /** Consume one token of the specified type, or
- * signal an error if it is not there.
- */
+ /** Consume one token of the specified type, or signal an error if it is not there. */
def accept(token: Int): Int = {
val offset = in.offset
if (in.token != token) {
@@ -547,8 +555,10 @@ self =>
offset
}
- /** semi = nl {nl} | `;'
+ /** {{{
+ * semi = nl {nl} | `;`
* nl = `\n' // where allowed
+ * }}}
*/
def acceptStatSep(): Unit = in.token match {
case NEWLINE | NEWLINES => in.nextToken()
@@ -562,7 +572,7 @@ self =>
def errorTermTree = Literal(Constant(null)) setPos o2p(in.offset)
def errorPatternTree = Ident(nme.WILDCARD) setPos o2p(in.offset)
- /** Check that type parameter is not by name or repeated */
+ /** Check that type parameter is not by name or repeated. */
def checkNotByNameOrVarargs(tpt: Tree) = {
if (treeInfo isByNameParamType tpt)
syntaxError(tpt.pos, "no by-name parameter type allowed here", false)
@@ -570,7 +580,7 @@ self =>
syntaxError(tpt.pos, "no * parameter type allowed here", false)
}
- /** Check that tree is a legal clause of a forSome */
+ /** Check that tree is a legal clause of a forSome. */
def checkLegalExistential(t: Tree) = t match {
case TypeDef(_, _, _, TypeBoundsTree(_, _)) |
ValDef(_, _, _, EmptyTree) | EmptyTree =>
@@ -615,8 +625,8 @@ self =>
def isLiteralToken(token: Int) = token match {
case CHARLIT | INTLIT | LONGLIT | FLOATLIT | DOUBLELIT |
- STRINGLIT | SYMBOLLIT | TRUE | FALSE | NULL => true
- case _ => false
+ STRINGLIT | INTERPOLATIONID | SYMBOLLIT | TRUE | FALSE | NULL => true
+ case _ => false
}
def isLiteral = isLiteralToken(in.token)
@@ -647,8 +657,7 @@ self =>
/* --------- COMMENT AND ATTRIBUTE COLLECTION ----------------------------- */
- /** Join the comment associated with a definition
- */
+ /** Join the comment associated with a definition. */
def joinComment(trees: => List[Tree]): List[Tree] = {
val doc = in.flushDoc
if ((doc ne null) && doc.raw.length > 0) {
@@ -657,7 +666,8 @@ self =>
DocDef(doc, t) setPos {
if (t.pos.isDefined) {
val pos = doc.pos.withEnd(t.pos.endOrPoint)
- if (t.pos.isOpaqueRange) pos else pos.makeTransparent
+ // always make the position transparent
+ pos.makeTransparent
} else {
t.pos
}
@@ -684,15 +694,13 @@ self =>
def atPos[T <: Tree](pos: Position)(t: T): T =
global.atPos(pos)(t)
- /** Convert tree to formal parameter list
- */
+ /** Convert tree to formal parameter list. */
def convertToParams(tree: Tree): List[ValDef] = tree match {
case Parens(ts) => ts map convertToParam
case _ => List(convertToParam(tree))
}
- /** Convert tree to formal parameter
- */
+ /** Convert tree to formal parameter. */
def convertToParam(tree: Tree): ValDef = atPos(tree.pos) {
def removeAsPlaceholder(name: Name) {
placeholderParams = placeholderParams filter (_.name != name)
@@ -710,8 +718,7 @@ self =>
}
}
- /** Convert (qual)ident to type identifier
- */
+ /** Convert (qual)ident to type identifier. */
def convertToTypeId(tree: Tree): Tree = atPos(tree.pos) {
convertToTypeName(tree) getOrElse {
syntaxError(tree.pos, "identifier expected", false)
@@ -719,10 +726,8 @@ self =>
}
}
- /** part { `sep` part }
- * Or if sepFirst is true, { `sep` part }
- */
- def tokenSeparated[T](separator: Int, sepFirst: Boolean, part: => T): List[T] = {
+ /** {{{ part { `sep` part } }}},or if sepFirst is true, {{{ { `sep` part } }}}. */
+ final def tokenSeparated[T](separator: Int, sepFirst: Boolean, part: => T): List[T] = {
val ts = new ListBuffer[T]
if (!sepFirst)
ts += part
@@ -733,13 +738,13 @@ self =>
}
ts.toList
}
- def commaSeparated[T](part: => T): List[T] = tokenSeparated(COMMA, false, part)
- def caseSeparated[T](part: => T): List[T] = tokenSeparated(CASE, true, part)
- def readAnnots[T](part: => T): List[T] = tokenSeparated(AT, true, part)
+ @inline final def commaSeparated[T](part: => T): List[T] = tokenSeparated(COMMA, sepFirst = false, part)
+ @inline final def caseSeparated[T](part: => T): List[T] = tokenSeparated(CASE, sepFirst = true, part)
+ @inline final def readAnnots[T](part: => T): List[T] = tokenSeparated(AT, sepFirst = true, part)
/* --------- OPERAND/OPERATOR STACK --------------------------------------- */
- /** modes for infix types */
+ /** Modes for infix types. */
object InfixMode extends Enumeration {
val FirstOp, LeftOp, RightOp = Value
}
@@ -749,7 +754,7 @@ self =>
def precedence(operator: Name): Int =
if (operator eq nme.ERROR) -1
else {
- val firstCh = operator(0)
+ val firstCh = operator.startChar
if (isScalaLetter(firstCh)) 1
else if (nme.isOpAssignmentName(operator)) 0
else firstCh match {
@@ -774,8 +779,7 @@ self =>
syntaxError(
offset, "left- and right-associative operators with same precedence may not be mixed", false)
- def reduceStack(isExpr: Boolean, base: List[OpInfo], top0: Tree,
- prec: Int, leftAssoc: Boolean): Tree = {
+ def reduceStack(isExpr: Boolean, base: List[OpInfo], top0: Tree, prec: Int, leftAssoc: Boolean): Tree = {
var top = top0
if (opstack != base && precedence(opstack.head.operator) == prec)
checkAssoc(opstack.head.offset, opstack.head.operator, leftAssoc)
@@ -803,7 +807,9 @@ self =>
* threaded through numerous methods as boolean isPattern.
*/
trait PatternContextSensitive {
- /** ArgType ::= Type
+ /** {{{
+ * ArgType ::= Type
+ * }}}
*/
def argType(): Tree
def functionArgType(): Tree
@@ -821,7 +827,7 @@ self =>
atPos(start, in.skipToken()) { makeFunctionTypeTree(ts, typ()) }
else {
ts foreach checkNotByNameOrVarargs
- val tuple = atPos(start) { makeTupleType(ts, true) }
+ val tuple = atPos(start) { makeTupleType(ts, flattenUnary = true) }
infixTypeRest(
compoundTypeRest(
annotTypeRest(
@@ -838,11 +844,13 @@ self =>
ExistentialTypeTree(t, whereClauses)
}
- /** Type ::= InfixType `=>' Type
+ /** {{{
+ * Type ::= InfixType `=>' Type
* | `(' [`=>' Type] `)' `=>' Type
* | InfixType [ExistentialClause]
* ExistentialClause ::= forSome `{' ExistentialDcl {semi ExistentialDcl}} `}'
* ExistentialDcl ::= type TypeDcl | val ValDcl
+ * }}}
*/
def typ(): Tree = placeholderTypeBoundary {
val start = in.offset
@@ -857,28 +865,34 @@ self =>
}
}
- /** TypeArgs ::= `[' ArgType {`,' ArgType} `]'
+ /** {{{
+ * TypeArgs ::= `[' ArgType {`,' ArgType} `]'
+ * }}}
*/
def typeArgs(): List[Tree] = inBrackets(types())
- /** AnnotType ::= SimpleType {Annotation}
+ /** {{{
+ * AnnotType ::= SimpleType {Annotation}
+ * }}}
*/
def annotType(): Tree = placeholderTypeBoundary { annotTypeRest(simpleType()) }
- /** SimpleType ::= SimpleType TypeArgs
+ /** {{{
+ * SimpleType ::= SimpleType TypeArgs
* | SimpleType `#' Id
* | StableId
* | Path `.' type
* | `(' Types `)'
* | WildcardType
+ * }}}
*/
def simpleType(): Tree = {
val start = in.offset
simpleTypeRest(in.token match {
- case LPAREN => atPos(start)(makeTupleType(inParens(types()), true))
+ case LPAREN => atPos(start)(makeTupleType(inParens(types()), flattenUnary = true))
case USCORE => wildcardType(in.skipToken())
case _ =>
- path(false, true) match {
+ path(thisOK = false, typeOK = true) match {
case r @ SingletonTypeTree(_) => r
case r => convertToTypeId(r)
}
@@ -888,18 +902,20 @@ self =>
private def typeProjection(t: Tree): Tree = {
val hashOffset = in.skipToken()
val nameOffset = in.offset
- val name = identForType(false)
+ val name = identForType(skipIt = false)
val point = if (name == tpnme.ERROR) hashOffset else nameOffset
atPos(t.pos.startOrPoint, point)(SelectFromTypeTree(t, name))
}
def simpleTypeRest(t: Tree): Tree = in.token match {
case HASH => simpleTypeRest(typeProjection(t))
- case LBRACKET => simpleTypeRest(atPos(t.pos.startOrPoint)(AppliedTypeTree(t, typeArgs())))
+ case LBRACKET => simpleTypeRest(atPos(t.pos.startOrPoint, t.pos.point)(AppliedTypeTree(t, typeArgs())))
case _ => t
}
- /** CompoundType ::= AnnotType {with AnnotType} [Refinement]
+ /** {{{
+ * CompoundType ::= AnnotType {with AnnotType} [Refinement]
* | Refinement
+ * }}}
*/
def compoundType(): Tree = compoundTypeRest(
if (in.token == LBRACE) atPos(o2p(in.offset))(scalaAnyRefConstr)
@@ -935,7 +951,7 @@ self =>
if (isIdent && in.name != nme.STAR) {
val opOffset = in.offset
val leftAssoc = treeInfo.isLeftAssoc(in.name)
- if (mode != InfixMode.FirstOp) checkAssoc(opOffset, in.name, mode == InfixMode.LeftOp)
+ if (mode != InfixMode.FirstOp) checkAssoc(opOffset, in.name, leftAssoc = mode == InfixMode.LeftOp)
val op = identForType()
val tycon = atPos(opOffset) { Ident(op) }
newLineOptWhenFollowing(isTypeIntroToken)
@@ -947,12 +963,16 @@ self =>
} else t
}
- /** InfixType ::= CompoundType {id [nl] CompoundType}
+ /** {{{
+ * InfixType ::= CompoundType {id [nl] CompoundType}
+ * }}}
*/
def infixType(mode: InfixMode.Value): Tree =
placeholderTypeBoundary { infixTypeRest(compoundType(), mode) }
- /** Types ::= Type {`,' Type}
+ /** {{{
+ * Types ::= Type {`,' Type}
+ * }}}
*/
def types(): List[Tree] = commaSeparated(argType())
def functionTypes(): List[Tree] = commaSeparated(functionArgType())
@@ -968,7 +988,8 @@ self =>
syntaxErrorOrIncomplete(expectedMsg(IDENTIFIER), skipIt)
nme.ERROR
}
- def ident(): Name = ident(true)
+ def ident(): Name = ident(skipIt = true)
+ def rawIdent(): Name = try in.name finally in.nextToken()
/** For when it's known already to be a type name. */
def identForType(): TypeName = ident().toTypeName
@@ -978,14 +999,16 @@ self =>
val point = in.offset
//assert(t.pos.isDefined, t)
if (t != EmptyTree)
- Select(t, ident(false)) setPos r2p(t.pos.startOrPoint, point, in.lastOffset)
+ Select(t, ident(skipIt = false)) setPos r2p(t.pos.startOrPoint, point, in.lastOffset)
else
errorTermTree // has already been reported
}
- /** Path ::= StableId
+ /** {{{
+ * Path ::= StableId
* | [Ident `.'] this
* AnnotType ::= Path [`.' type]
+ * }}}
*/
def path(thisOK: Boolean, typeOK: Boolean): Tree = {
val start = in.offset
@@ -1006,7 +1029,7 @@ self =>
val tok = in.token
val name = ident()
t = atPos(start) {
- if (tok == BACKQUOTED_IDENT) new BackQuotedIdent(name)
+ if (tok == BACKQUOTED_IDENT) Ident(name) updateAttachment BackquotedIdentifierAttachment
else Ident(name)
}
if (in.token == DOT) {
@@ -1034,35 +1057,41 @@ self =>
if (typeOK && in.token == TYPE) {
in.nextToken()
atPos(t.pos.startOrPoint, dotOffset) { SingletonTypeTree(t) }
- } else {
+ }
+ else {
val t1 = selector(t)
if (in.token == DOT) { selectors(t1, typeOK, in.skipToken()) }
else t1
}
- /** MixinQualifier ::= `[' Id `]'
+ /** {{{
+ * MixinQualifier ::= `[' Id `]'
+ * }}}
*/
def mixinQualifierOpt(): TypeName =
if (in.token == LBRACKET) inBrackets(identForType())
else tpnme.EMPTY
- /** StableId ::= Id
+ /** {{{
+ * StableId ::= Id
* | Path `.' Id
- * | [id '.'] super [`[' id `]']`.' id
+ * | [id `.'] super [`[' id `]']`.' id
+ * }}}
*/
def stableId(): Tree =
- path(false, false)
+ path(thisOK = false, typeOK = false)
- /** QualId ::= Id {`.' Id}
+ /** {{{
+ * QualId ::= Id {`.' Id}
+ * }}}
*/
def qualId(): Tree = {
val start = in.offset
val id = atPos(start) { Ident(ident()) }
- if (in.token == DOT) { selectors(id, false, in.skipToken()) }
+ if (in.token == DOT) { selectors(id, typeOK = false, in.skipToken()) }
else id
}
- /** Calls qualId() and manages some package state.
- */
+ /** Calls `qualId()` and manages some package state. */
private def pkgQualId() = {
if (in.token == IDENTIFIER && in.name.encode == nme.scala_)
inScalaPackage = true
@@ -1076,33 +1105,78 @@ self =>
pkg
}
- /** SimpleExpr ::= literal
+ /** {{{
+ * SimpleExpr ::= literal
* | symbol
* | null
- * @note The returned tree does not yet have a position
+ * }}}
*/
- def literal(isNegated: Boolean): Tree = {
- def finish(value: Any): Tree = {
- val t = Literal(Constant(value))
- in.nextToken()
- t
- }
- if (in.token == SYMBOLLIT)
- Apply(scalaDot(nme.Symbol), List(finish(in.strVal)))
- else finish(in.token match {
- case CHARLIT => in.charVal
- case INTLIT => in.intVal(isNegated).toInt
- case LONGLIT => in.intVal(isNegated)
- case FLOATLIT => in.floatVal(isNegated).toFloat
- case DOUBLELIT => in.floatVal(isNegated)
- case STRINGLIT => in.strVal
- case TRUE => true
- case FALSE => false
- case NULL => null
- case _ =>
- syntaxErrorOrIncomplete("illegal literal", true)
- null
- })
+ def literal(isNegated: Boolean = false, inPattern: Boolean = false, start: Int = in.offset): Tree = {
+ atPos(start) {
+ def finish(value: Any): Tree = {
+ val t = Literal(Constant(value))
+ in.nextToken()
+ t
+ }
+ if (in.token == SYMBOLLIT)
+ Apply(scalaDot(nme.Symbol), List(finish(in.strVal)))
+ else if (in.token == INTERPOLATIONID)
+ interpolatedString(inPattern = inPattern)
+ else finish(in.token match {
+ case CHARLIT => in.charVal
+ case INTLIT => in.intVal(isNegated).toInt
+ case LONGLIT => in.intVal(isNegated)
+ case FLOATLIT => in.floatVal(isNegated).toFloat
+ case DOUBLELIT => in.floatVal(isNegated)
+ case STRINGLIT | STRINGPART => in.strVal.intern()
+ case TRUE => true
+ case FALSE => false
+ case NULL => null
+ case _ =>
+ syntaxErrorOrIncomplete("illegal literal", true)
+ null
+ })
+ }
+ }
+
+ private def stringOp(t: Tree, op: TermName) = {
+ val str = in.strVal
+ in.nextToken()
+ if (str.length == 0) t
+ else atPos(t.pos.startOrPoint) {
+ Apply(Select(t, op), List(Literal(Constant(str))))
+ }
+ }
+
+ private def interpolatedString(inPattern: Boolean = false): Tree = atPos(in.offset) {
+ val start = in.offset
+ val interpolator = in.name
+
+ val partsBuf = new ListBuffer[Tree]
+ val exprBuf = new ListBuffer[Tree]
+ in.nextToken()
+ while (in.token == STRINGPART) {
+ partsBuf += literal()
+ exprBuf += {
+ if (inPattern) dropAnyBraces(pattern())
+ else {
+ if (in.token == IDENTIFIER) atPos(in.offset)(Ident(ident()))
+ else if(in.token == LBRACE) expr()
+ else if(in.token == THIS) { in.nextToken(); atPos(in.offset)(This(tpnme.EMPTY)) }
+ else {
+ syntaxErrorOrIncomplete("error in interpolated string: identifier or block expected", true)
+ EmptyTree
+ }
+ }
+ }
+ }
+ if (in.token == STRINGLIT) partsBuf += literal()
+
+ val t1 = atPos(o2p(start)) { Ident(nme.StringContext) }
+ val t2 = atPos(start) { Apply(t1, partsBuf.toList) }
+ t2 setPos t2.pos.makeTransparent
+ val t3 = Select(t2, interpolator) setPos t2.pos
+ atPos(start) { Apply(t3, exprBuf.toList) }
}
/* ------------- NEW LINES ------------------------------------------------- */
@@ -1128,7 +1202,9 @@ self =>
/* ------------- TYPES ---------------------------------------------------- */
- /** TypedOpt ::= [`:' Type]
+ /** {{{
+ * TypedOpt ::= [`:' Type]
+ * }}}
*/
def typedOpt(): Tree =
if (in.token == COLON) { in.nextToken(); typ() }
@@ -1139,13 +1215,15 @@ self =>
else startInfixType()
def annotTypeRest(t: Tree): Tree =
- (t /: annotations(false)) (makeAnnotated)
+ (t /: annotations(skipNewLines = false)) (makeAnnotated)
- /** WildcardType ::= `_' TypeBounds
+ /** {{{
+ * WildcardType ::= `_' TypeBounds
+ * }}}
*/
def wildcardType(start: Int) = {
val pname = freshTypeName("_$")
- val t = atPos(start) { Ident(pname) }
+ val t = atPos(start)(Ident(pname))
val bounds = typeBounds()
val param = atPos(t.pos union bounds.pos) { makeSyntheticTypeParam(pname, bounds) }
placeholderTypes = param :: placeholderTypes
@@ -1154,7 +1232,9 @@ self =>
/* ----------- EXPRESSIONS ------------------------------------------------ */
- /** EqualsExpr ::= `=' Expr
+ /** {{{
+ * EqualsExpr ::= `=' Expr
+ * }}}
*/
def equalsExpr(): Tree = {
accept(EQUALS)
@@ -1169,7 +1249,7 @@ self =>
r
} else {
accept(LPAREN)
- Literal(true)
+ Literal(Constant(true))
}
}
@@ -1178,7 +1258,8 @@ self =>
*/
def statement(location: Int): Tree = expr(location) // !!! still needed?
- /** Expr ::= (Bindings | [`implicit'] Id | `_') `=>' Expr
+ /** {{{
+ * Expr ::= (Bindings | [`implicit'] Id | `_') `=>' Expr
* | Expr1
* ResultExpr ::= (Bindings | Id `:' CompoundType) `=>' Block
* | Expr1
@@ -1186,7 +1267,7 @@ self =>
* | try (`{' Block `}' | Expr) [catch `{' CaseClauses `}'] [finally Expr]
* | while `(' Expr `)' {nl} Expr
* | do Expr [semi] while `(' Expr `)'
- * | for (`(' Enumerators `)' | '{' Enumerators '}') {nl} [yield] Expr
+ * | for (`(' Enumerators `)' | `{' Enumerators `}') {nl} [yield] Expr
* | throw Expr
* | return [Expr]
* | [SimpleExpr `.'] Id `=' Expr
@@ -1198,6 +1279,7 @@ self =>
* Ascription ::= `:' CompoundType
* | `:' Annotation {Annotation}
* | `:' `_' `*'
+ * }}}
*/
def expr(): Tree = expr(Local)
@@ -1213,22 +1295,24 @@ self =>
res
}
- def expr0(location: Int): Tree = in.token match {
+
+ def expr0(location: Int): Tree = (in.token: @scala.annotation.switch) match {
case IF =>
- atPos(in.skipToken()) {
+ def parseIf = atPos(in.skipToken()) {
val cond = condExpr()
newLinesOpt()
val thenp = expr()
val elsep = if (in.token == ELSE) { in.nextToken(); expr() }
- else Literal(())
+ else Literal(Constant())
If(cond, thenp, elsep)
}
+ parseIf
case TRY =>
- atPos(in.skipToken()) {
+ def parseTry = atPos(in.skipToken()) {
val body = in.token match {
- case LBRACE => inBracesOrUnit(block())
- case LPAREN => inParensOrUnit(expr())
- case _ => expr()
+ case LBRACE => inBracesOrUnit(block())
+ case LPAREN => inParensOrUnit(expr())
+ case _ => expr()
}
def catchFromExpr() = List(makeCatchFromExpr(expr()))
val catches: List[CaseDef] =
@@ -1242,32 +1326,39 @@ self =>
}
}
val finalizer = in.token match {
- case FINALLY => in.nextToken() ; expr()
- case _ => EmptyTree
+ case FINALLY => in.nextToken(); expr()
+ case _ => EmptyTree
}
Try(body, catches, finalizer)
}
+ parseTry
case WHILE =>
- val start = in.offset
- atPos(in.skipToken()) {
- val lname: Name = freshTermName(nme.WHILE_PREFIX)
- val cond = condExpr()
- newLinesOpt()
- val body = expr()
- makeWhile(lname, cond, body)
+ def parseWhile = {
+ val start = in.offset
+ atPos(in.skipToken()) {
+ val cond = condExpr()
+ newLinesOpt()
+ val body = expr()
+ makeWhile(start, cond, body)
+ }
}
+ parseWhile
case DO =>
- val start = in.offset
- atPos(in.skipToken()) {
- val lname: Name = freshTermName(nme.DO_WHILE_PREFIX)
- val body = expr()
- if (isStatSep) in.nextToken()
- accept(WHILE)
- val cond = condExpr()
- makeDoWhile(lname, body, cond)
+ def parseDo = {
+ val start = in.offset
+ atPos(in.skipToken()) {
+ val lname: Name = freshTermName(nme.DO_WHILE_PREFIX)
+ val body = expr()
+ if (isStatSep) in.nextToken()
+ accept(WHILE)
+ val cond = condExpr()
+ makeDoWhile(lname, body, cond)
+ }
}
+ parseDo
case FOR =>
- atPos(in.skipToken()) {
+ val start = in.skipToken()
+ def parseFor = atPos(start) {
val enums =
if (in.token == LBRACE) inBracesOrNil(enumerators())
else inParensOrNil(enumerators())
@@ -1279,108 +1370,119 @@ self =>
makeFor(enums, expr())
}
}
+ def adjustStart(tree: Tree) =
+ if (tree.pos.isRange && start < tree.pos.start)
+ tree setPos tree.pos.withStart(start)
+ else tree
+ adjustStart(parseFor)
case RETURN =>
- atPos(in.skipToken()) {
- Return(if (isExprIntro) expr() else Literal(()))
- }
+ def parseReturn =
+ atPos(in.skipToken()) {
+ Return(if (isExprIntro) expr() else Literal(Constant()))
+ }
+ parseReturn
case THROW =>
- atPos(in.skipToken()) {
- Throw(expr())
- }
+ def parseThrow =
+ atPos(in.skipToken()) {
+ Throw(expr())
+ }
+ parseThrow
case IMPLICIT =>
implicitClosure(in.skipToken(), location)
case _ =>
- var t = postfixExpr()
- if (in.token == EQUALS) {
- t match {
- case Ident(_) | Select(_, _) | Apply(_, _) =>
- t = atPos(t.pos.startOrPoint, in.skipToken()) { makeAssign(t, expr()) }
- case _ =>
- }
- } else if (in.token == COLON) {
- t = stripParens(t)
- val colonPos = in.skipToken()
- if (in.token == USCORE) {
- //todo: need to handle case where USCORE is a wildcard in a type
- val uscorePos = in.skipToken()
- if (isIdent && in.name == nme.STAR) {
- in.nextToken()
- t = atPos(t.pos.startOrPoint, colonPos) {
- Typed(t, atPos(uscorePos) { Ident(tpnme.WILDCARD_STAR) })
- }
- } else {
- syntaxErrorOrIncomplete("`*' expected", true)
+ def parseOther = {
+ var t = postfixExpr()
+ if (in.token == EQUALS) {
+ t match {
+ case Ident(_) | Select(_, _) | Apply(_, _) =>
+ t = atPos(t.pos.startOrPoint, in.skipToken()) { makeAssign(t, expr()) }
+ case _ =>
}
- } else if (in.token == AT) {
- t = (t /: annotations(false)) (makeAnnotated)
- } else {
- t = atPos(t.pos.startOrPoint, colonPos) {
- val tpt = typeOrInfixType(location)
- if (isWildcard(t))
- (placeholderParams: @unchecked) match {
- case (vd @ ValDef(mods, name, _, _)) :: rest =>
- placeholderParams = treeCopy.ValDef(vd, mods, name, tpt.duplicate, EmptyTree) :: rest
+ } else if (in.token == COLON) {
+ t = stripParens(t)
+ val colonPos = in.skipToken()
+ if (in.token == USCORE) {
+ //todo: need to handle case where USCORE is a wildcard in a type
+ val uscorePos = in.skipToken()
+ if (isIdent && in.name == nme.STAR) {
+ in.nextToken()
+ t = atPos(t.pos.startOrPoint, colonPos) {
+ Typed(t, atPos(uscorePos) { Ident(tpnme.WILDCARD_STAR) })
}
- // this does not correspond to syntax, but is necessary to
- // accept closures. We might restrict closures to be between {...} only.
- Typed(t, tpt)
+ } else {
+ syntaxErrorOrIncomplete("`*' expected", true)
+ }
+ } else if (in.token == AT) {
+ t = (t /: annotations(skipNewLines = false))(makeAnnotated)
+ } else {
+ t = atPos(t.pos.startOrPoint, colonPos) {
+ val tpt = typeOrInfixType(location)
+ if (isWildcard(t))
+ (placeholderParams: @unchecked) match {
+ case (vd @ ValDef(mods, name, _, _)) :: rest =>
+ placeholderParams = treeCopy.ValDef(vd, mods, name, tpt.duplicate, EmptyTree) :: rest
+ }
+ // this does not correspond to syntax, but is necessary to
+ // accept closures. We might restrict closures to be between {...} only.
+ Typed(t, tpt)
+ }
}
+ } else if (in.token == MATCH) {
+ t = atPos(t.pos.startOrPoint, in.skipToken())(Match(stripParens(t), inBracesOrNil(caseClauses())))
}
- } else if (in.token == MATCH) {
- t = atPos(t.pos.startOrPoint, in.skipToken()) {
- /** For debugging pattern matcher transition issues */
- if (settings.Ypmatnaive.value)
- makeSequencedMatch(stripParens(t), inBracesOrNil(caseClauses()))
- else
- Match(stripParens(t), inBracesOrNil(caseClauses()))
+ // in order to allow anonymous functions as statements (as opposed to expressions) inside
+ // templates, we have to disambiguate them from self type declarations - bug #1565
+ // The case still missed is unparenthesized single argument, like "x: Int => x + 1", which
+ // may be impossible to distinguish from a self-type and so remains an error. (See #1564)
+ def lhsIsTypedParamList() = t match {
+ case Parens(xs) if xs forall (_.isInstanceOf[Typed]) => true
+ case _ => false
}
- }
- // in order to allow anonymous functions as statements (as opposed to expressions) inside
- // templates, we have to disambiguate them from self type declarations - bug #1565
- // The case still missed is unparenthesized single argument, like "x: Int => x + 1", which
- // may be impossible to distinguish from a self-type and so remains an error. (See #1564)
- def lhsIsTypedParamList() = t match {
- case Parens(xs) if xs forall (_.isInstanceOf[Typed]) => true
- case _ => false
- }
- if (in.token == ARROW && (location != InTemplate || lhsIsTypedParamList)) {
- t = atPos(t.pos.startOrPoint, in.skipToken()) {
- Function(convertToParams(t), if (location != InBlock) expr() else block())
+ if (in.token == ARROW && (location != InTemplate || lhsIsTypedParamList)) {
+ t = atPos(t.pos.startOrPoint, in.skipToken()) {
+ Function(convertToParams(t), if (location != InBlock) expr() else block())
+ }
}
+ stripParens(t)
}
- stripParens(t)
+ parseOther
}
- /** Expr ::= implicit Id => Expr
+ /** {{{
+ * Expr ::= implicit Id => Expr
+ * }}}
*/
+
def implicitClosure(start: Int, location: Int): Tree = {
val param0 = convertToParam {
atPos(in.offset) {
- var paramexpr: Tree = Ident(ident())
- if (in.token == COLON) {
- in.nextToken()
- paramexpr = Typed(paramexpr, typeOrInfixType(location))
+ Ident(ident()) match {
+ case expr if in.token == COLON =>
+ in.nextToken() ; Typed(expr, typeOrInfixType(location))
+ case expr => expr
}
- paramexpr
}
}
- val param = treeCopy.ValDef(param0, param0.mods | Flags.IMPLICIT, param0.name, param0.tpt, param0.rhs)
+ val param = copyValDef(param0)(mods = param0.mods | Flags.IMPLICIT)
atPos(start, in.offset) {
accept(ARROW)
Function(List(param), if (location != InBlock) expr() else block())
}
}
- /** PostfixExpr ::= InfixExpr [Id [nl]]
+ /** {{{
+ * PostfixExpr ::= InfixExpr [Id [nl]]
* InfixExpr ::= PrefixExpr
* | InfixExpr Id [nl] InfixExpr
+ * }}}
*/
def postfixExpr(): Tree = {
- val base = opstack
- var top = prefixExpr()
+ val start = in.offset
+ val base = opstack
+ var top = prefixExpr()
while (isIdent) {
- top = reduceStack(true, base, top, precedence(in.name), treeInfo.isLeftAssoc(in.name))
+ top = reduceStack(isExpr = true, base, top, precedence(in.name), leftAssoc = treeInfo.isLeftAssoc(in.name))
val op = in.name
opstack = OpInfo(top, op, in.offset) :: opstack
ident()
@@ -1388,54 +1490,59 @@ self =>
if (isExprIntro) {
val next = prefixExpr()
if (next == EmptyTree)
- return reduceStack(true, base, top, 0, true)
+ return reduceStack(isExpr = true, base, top, 0, leftAssoc = true)
top = next
} else {
+ // postfix expression
val topinfo = opstack.head
opstack = opstack.tail
- val od = stripParens(reduceStack(true, base, topinfo.operand, 0, true))
- return atPos(od.pos.startOrPoint, topinfo.offset) {
- Select(od, topinfo.operator.encode)
- }
+ val od = stripParens(reduceStack(isExpr = true, base, topinfo.operand, 0, leftAssoc = true))
+ return makePostfixSelect(start, topinfo.offset, od, topinfo.operator)
}
}
- reduceStack(true, base, top, 0, true)
+ reduceStack(isExpr = true, base, top, 0, leftAssoc = true)
}
- /** PrefixExpr ::= [`-' | `+' | `~' | `!' | `&'] SimpleExpr
- */
+ /** {{{
+ * PrefixExpr ::= [`-' | `+' | `~' | `!' | `&'] SimpleExpr
+ * }}}
+ */
def prefixExpr(): Tree = {
if (isUnaryOp) {
atPos(in.offset) {
- val name: Name = "unary_" + ident()
- if (in.name == raw.MINUS && isNumericLit) simpleExprRest(atPos(in.offset)(literal(true)), true)
- else Select(stripParens(simpleExpr()), name)
+ val name = nme.toUnaryName(rawIdent())
+ if (name == nme.UNARY_- && isNumericLit)
+ simpleExprRest(literal(isNegated = true), canApply = true)
+ else
+ Select(stripParens(simpleExpr()), name)
}
}
else simpleExpr()
}
def xmlLiteral(): Tree
- /* SimpleExpr ::= new (ClassTemplate | TemplateBody)
- * | BlockExpr
- * | SimpleExpr1 [`_']
- * SimpleExpr1 ::= literal
- * | xLiteral
- * | Path
- * | `(' [Exprs] `)'
- * | SimpleExpr `.' Id
- * | SimpleExpr TypeArgs
- * | SimpleExpr1 ArgumentExprs
+ /** {{{
+ * SimpleExpr ::= new (ClassTemplate | TemplateBody)
+ * | BlockExpr
+ * | SimpleExpr1 [`_']
+ * SimpleExpr1 ::= literal
+ * | xLiteral
+ * | Path
+ * | `(' [Exprs] `)'
+ * | SimpleExpr `.' Id
+ * | SimpleExpr TypeArgs
+ * | SimpleExpr1 ArgumentExprs
+ * }}}
*/
def simpleExpr(): Tree = {
var canApply = true
val t =
- if (isLiteral) atPos(in.offset)(literal(false))
+ if (isLiteral) literal()
else in.token match {
case XMLSTART =>
xmlLiteral()
case IDENTIFIER | BACKQUOTED_IDENT | THIS | SUPER =>
- path(true, false)
+ path(thisOK = true, typeOK = false)
case USCORE =>
val start = in.offset
val pname = freshName("x$")
@@ -1454,14 +1561,14 @@ self =>
val nstart = in.skipToken()
val npos = r2p(nstart, nstart, in.lastOffset)
val tstart = in.offset
- val (parents, argss, self, stats) = template(false)
+ val (parents, argss, self, stats) = template(isTrait = false)
val cpos = r2p(tstart, tstart, in.lastOffset max tstart)
makeNew(parents, self, stats, argss, npos, cpos)
case _ =>
syntaxErrorOrIncomplete("illegal start of simple expression", true)
errorTermTree
}
- simpleExprRest(t, canApply)
+ simpleExprRest(t, canApply = canApply)
}
def simpleExprRest(t: Tree, canApply: Boolean): Tree = {
@@ -1469,15 +1576,16 @@ self =>
in.token match {
case DOT =>
in.nextToken()
- simpleExprRest(selector(stripParens(t)), true)
+ simpleExprRest(selector(stripParens(t)), canApply = true)
case LBRACKET =>
val t1 = stripParens(t)
t1 match {
- case Ident(_) | Select(_, _) =>
- val tapp = atPos(t1.pos.startOrPoint, in.offset) {
- TypeApply(t1, exprTypeArgs())
- }
- simpleExprRest(tapp, true)
+ case Ident(_) | Select(_, _) | Apply(_, _) =>
+ var app: Tree = t1
+ while (in.token == LBRACKET)
+ app = atPos(app.pos.startOrPoint, in.offset)(TypeApply(app, exprTypeArgs()))
+
+ simpleExprRest(app, canApply = true)
case _ =>
t1
}
@@ -1493,7 +1601,7 @@ self =>
}
Apply(sel, argumentExprs())
}
- simpleExprRest(app, true)
+ simpleExprRest(app, canApply = true)
case USCORE =>
atPos(t.pos.startOrPoint, in.skipToken()) {
Typed(stripParens(t), Function(Nil, EmptyTree))
@@ -1503,8 +1611,10 @@ self =>
}
}
- /** ArgumentExprs ::= `(' [Exprs] `)'
- * | [nl] BlockExpr
+ /** {{{
+ * ArgumentExprs ::= `(' [Exprs] `)'
+ * | [nl] BlockExpr
+ * }}}
*/
def argumentExprs(): List[Tree] = {
def args(): List[Tree] = commaSeparated {
@@ -1521,14 +1631,15 @@ self =>
case _ => Nil
}
}
- /** A succession of argument lists.
- */
+ /** A succession of argument lists. */
def multipleArgumentExprs(): List[List[Tree]] = {
if (in.token != LPAREN) Nil
else argumentExprs() :: multipleArgumentExprs()
}
- /** BlockExpr ::= `{' (CaseClauses | Block) `}'
+ /** {{{
+ * BlockExpr ::= `{' (CaseClauses | Block) `}'
+ * }}}
*/
def blockExpr(): Tree = atPos(in.offset) {
inBraces {
@@ -1537,14 +1648,18 @@ self =>
}
}
- /** Block ::= BlockStatSeq
+ /** {{{
+ * Block ::= BlockStatSeq
+ * }}}
* @note Return tree does not carry position.
*/
def block(): Tree = makeBlock(blockStatSeq())
- /** CaseClauses ::= CaseClause {CaseClause}
- * CaseClause ::= case Pattern [Guard] `=>' Block
- */
+ /** {{{
+ * CaseClauses ::= CaseClause {CaseClause}
+ * CaseClause ::= case Pattern [Guard] `=>' Block
+ * }}}
+ */
def caseClauses(): List[CaseDef] = {
val cases = caseSeparated { atPos(in.offset)(makeCaseDef(pattern(), guard(), caseBlock())) }
if (cases.isEmpty) // trigger error if there are no cases
@@ -1557,48 +1672,55 @@ self =>
def caseBlock(): Tree =
atPos(accept(ARROW))(block())
- /** Guard ::= if PostfixExpr
+ /** {{{
+ * Guard ::= if PostfixExpr
+ * }}}
*/
def guard(): Tree =
if (in.token == IF) { in.nextToken(); stripParens(postfixExpr()) }
else EmptyTree
- /** Enumerators ::= Generator {semi Enumerator}
+ /** {{{
+ * Enumerators ::= Generator {semi Enumerator}
* Enumerator ::= Generator
* | Guard
* | val Pattern1 `=' Expr
+ * }}}
*/
def enumerators(): List[Enumerator] = {
- val newStyle = in.token != VAL
- if (!newStyle)
- deprecationWarning(in.offset, "for (val x <- ... ) has been deprecated; use for (x <- ... ) instead")
val enums = new ListBuffer[Enumerator]
- generator(enums, false)
+ generator(enums, eqOK = false)
while (isStatSep) {
in.nextToken()
- if (newStyle) {
- if (in.token == IF) enums += makeFilter(in.offset, guard())
- else generator(enums, true)
- } else {
- if (in.token == VAL) generator(enums, true)
- else enums += makeFilter(in.offset, expr())
- }
+ if (in.token == IF) enums += makeFilter(in.offset, guard())
+ else generator(enums, eqOK = true)
}
enums.toList
}
- /** Generator ::= Pattern1 (`<-' | '=') Expr [Guard]
+ /** {{{
+ * Generator ::= Pattern1 (`<-' | `=') Expr [Guard]
+ * }}}
*/
def generator(enums: ListBuffer[Enumerator], eqOK: Boolean) {
- val start = in.offset
- if (in.token == VAL) in.nextToken()
- val pat = noSeq.pattern1()
+ val start = in.offset
+ val hasVal = in.token == VAL
+ if (hasVal)
+ in.nextToken()
+
+ val pat = noSeq.pattern1()
val point = in.offset
- val tok = in.token
- if (tok == EQUALS && eqOK) in.nextToken()
+ val hasEq = in.token == EQUALS
+
+ if (hasVal) {
+ if (hasEq) deprecationWarning(in.offset, "val keyword in for comprehension is deprecated")
+ else syntaxError(in.offset, "val in for comprehension must be followed by assignment")
+ }
+
+ if (hasEq && eqOK) in.nextToken()
else accept(LARROW)
val rhs = expr()
- enums += makeGenerator(r2p(start, point, in.lastOffset max start), pat, tok == EQUALS, rhs)
+ enums += makeGenerator(r2p(start, point, in.lastOffset max start), pat, hasEq, rhs)
// why max above? IDE stress tests have shown that lastOffset could be less than start,
// I guess this happens if instead if a for-expression we sit on a closing paren.
while (in.token == IF) enums += makeFilter(in.offset, guard())
@@ -1613,11 +1735,11 @@ self =>
* was threaded through methods as boolean seqOK.
*/
trait SeqContextSensitive extends PatternContextSensitive {
- /** Returns Some(tree) if it finds a star and prematurely ends parsing.
- * This is an artifact of old implementation which has proven difficult
- * to cleanly extract.
- */
- def interceptStarPattern(top: Tree): Option[Tree]
+ // is a sequence pattern _* allowed?
+ def isSequenceOK: Boolean
+
+ // are we in an XML pattern?
+ def isXML: Boolean = false
def functionArgType(): Tree = argType()
def argType(): Tree = {
@@ -1627,20 +1749,24 @@ self =>
in.nextToken()
if (in.token == SUBTYPE || in.token == SUPERTYPE) wildcardType(start)
else atPos(start) { Bind(tpnme.WILDCARD, EmptyTree) }
- case IDENTIFIER if treeInfo.isVariableName(in.name) =>
+ case IDENTIFIER if nme.isVariableName(in.name) =>
atPos(start) { Bind(identForType(), EmptyTree) }
case _ =>
typ()
}
}
- /** Patterns ::= Pattern { `,' Pattern }
- * SeqPatterns ::= SeqPattern { `,' SeqPattern }
+ /** {{{
+ * Patterns ::= Pattern { `,' Pattern }
+ * SeqPatterns ::= SeqPattern { `,' SeqPattern }
+ * }}}
*/
def patterns(): List[Tree] = commaSeparated(pattern())
- /** Pattern ::= Pattern1 { `|' Pattern1 }
- * SeqPattern ::= SeqPattern1 { `|' SeqPattern1 }
+ /** {{{
+ * Pattern ::= Pattern1 { `|' Pattern1 }
+ * SeqPattern ::= SeqPattern1 { `|' SeqPattern1 }
+ * }}}
*/
def pattern(): Tree = {
val start = in.offset
@@ -1654,26 +1780,37 @@ self =>
}
}
- /** Pattern1 ::= varid `:' TypePat
- * | `_' `:' TypePat
- * | Pattern2
- * SeqPattern1 ::= varid `:' TypePat
- * | `_' `:' TypePat
- * | [SeqPattern2]
+ /** {{{
+ * Pattern1 ::= varid `:' TypePat
+ * | `_' `:' TypePat
+ * | Pattern2
+ * SeqPattern1 ::= varid `:' TypePat
+ * | `_' `:' TypePat
+ * | [SeqPattern2]
+ * }}}
*/
def pattern1(): Tree = pattern2() match {
- case p @ Ident(name) if treeInfo.isVarPattern(p) && in.token == COLON =>
- atPos(p.pos.startOrPoint, in.skipToken()) { Typed(p, compoundType()) }
- case p =>
- p
+ case p @ Ident(name) if in.token == COLON =>
+ if (treeInfo.isVarPattern(p))
+ atPos(p.pos.startOrPoint, in.skipToken())(Typed(p, compoundType()))
+ else {
+ syntaxError(in.offset, "Pattern variables must start with a lower-case letter. (SLS 8.1.1.)")
+ p
+ }
+ case p => p
}
- /* Pattern2 ::= varid [ @ Pattern3 ]
+
+ /** {{{
+ * Pattern2 ::= varid [ @ Pattern3 ]
* | Pattern3
- * SeqPattern2 ::= varid [ @ SeqPattern3 ]
+ * SeqPattern2 ::= varid [ @ SeqPattern3 ]
* | SeqPattern3
+ * }}}
*/
def pattern2(): Tree = {
+ val nameOffset = in.offset
val p = pattern3()
+
if (in.token != AT) p
else p match {
case Ident(nme.WILDCARD) =>
@@ -1686,44 +1823,101 @@ self =>
}
}
- /* Pattern3 ::= SimplePattern
+ /** {{{
+ * Pattern3 ::= SimplePattern
* | SimplePattern {Id [nl] SimplePattern}
- * SeqPattern3 ::= SeqSimplePattern [ '*' | '?' | '+' ]
- * | SeqSimplePattern {Id [nl] SeqSimplePattern}
+ * }}}
*/
def pattern3(): Tree = {
+ var top = simplePattern(badPattern3)
+ // after peekahead
+ def acceptWildStar() = atPos(top.pos.startOrPoint, in.prev.offset)(Star(stripParens(top)))
+ def peekahead() = {
+ in.prev copyFrom in
+ in.nextToken()
+ }
+ def pushback() = {
+ in.next copyFrom in
+ in copyFrom in.prev
+ }
+ // See SI-3189, SI-4832 for motivation. Cf SI-3480 for counter-motivation.
+ // TODO: dredge out the remnants of regexp patterns.
+ // /{/ peek for _*) or _*} (for xml escape)
+ if (isSequenceOK) {
+ top match {
+ case Ident(nme.WILDCARD) if (isRawStar) =>
+ peekahead()
+ in.token match {
+ case RBRACE if (isXML) => return acceptWildStar()
+ case RPAREN if (!isXML) => return acceptWildStar()
+ case _ => pushback()
+ }
+ case _ =>
+ }
+ }
val base = opstack
- var top = simplePattern()
- interceptStarPattern(top) foreach { x => return x }
-
while (isIdent && in.name != raw.BAR) {
- top = reduceStack(
- false, base, top, precedence(in.name), treeInfo.isLeftAssoc(in.name))
+ top = reduceStack(isExpr = false, base, top, precedence(in.name), leftAssoc = treeInfo.isLeftAssoc(in.name))
val op = in.name
opstack = OpInfo(top, op, in.offset) :: opstack
ident()
- top = simplePattern()
- }
- stripParens(reduceStack(false, base, top, 0, true))
+ top = simplePattern(badPattern3)
+ }
+ stripParens(reduceStack(isExpr = false, base, top, 0, leftAssoc = true))
+ }
+ def badPattern3(): Tree = {
+ def isComma = in.token == COMMA
+ def isAnyBrace = in.token == RPAREN || in.token == RBRACE
+ val badStart = "illegal start of simple pattern"
+ // better recovery if don't skip delims of patterns
+ var skip = !(isComma || isAnyBrace)
+ val msg = if (!opstack.isEmpty && opstack.head.operator == nme.STAR) {
+ opstack.head.operand match {
+ case Ident(nme.WILDCARD) =>
+ if (isSequenceOK && isComma)
+ "bad use of _* (a sequence pattern must be the last pattern)"
+ else if (isSequenceOK && isAnyBrace) {
+ skip = true // do skip bad paren; scanner may skip bad brace already
+ "bad brace or paren after _*"
+ } else if (!isSequenceOK && isAnyBrace)
+ "bad use of _* (sequence pattern not allowed)"
+ else badStart
+ case _ =>
+ if (isSequenceOK && isAnyBrace)
+ "use _* to match a sequence"
+ else if (isComma || isAnyBrace)
+ "trailing * is not a valid pattern"
+ else badStart
+ }
+ } else {
+ badStart
+ }
+ syntaxErrorOrIncomplete(msg, skip)
+ errorPatternTree
}
- /** SimplePattern ::= varid
+ /** {{{
+ * SimplePattern ::= varid
* | `_'
* | literal
* | XmlPattern
- * | StableId [TypeArgs] [`(' [SeqPatterns] `)']
+ * | StableId /[TypeArgs]/ [`(' [Patterns] `)']
+ * | StableId [`(' [Patterns] `)']
+ * | StableId [`(' [Patterns] `,' [varid `@'] `_' `*' `)']
* | `(' [Patterns] `)'
- * SimpleSeqPattern ::= varid
- * | `_'
- * | literal
- * | XmlPattern
- * | `<' xLiteralPattern
- * | StableId [TypeArgs] [`(' [SeqPatterns] `)']
- * | `(' [SeqPatterns] `)'
+ * }}}
*
* XXX: Hook for IDE
*/
def simplePattern(): Tree = {
+ // simple diagnostics for this entry point
+ def badStart(): Tree = {
+ syntaxErrorOrIncomplete("illegal start of simple pattern", true)
+ errorPatternTree
+ }
+ simplePattern(badStart)
+ }
+ def simplePattern(onError: () => Tree): Tree = {
val start = in.offset
in.token match {
case IDENTIFIER | BACKQUOTED_IDENT | THIS =>
@@ -1732,13 +1926,13 @@ self =>
case INTLIT | LONGLIT | FLOATLIT | DOUBLELIT =>
t match {
case Ident(nme.MINUS) =>
- return atPos(start) { literal(true) }
+ return literal(isNegated = true, inPattern = true, start = start)
case _ =>
}
case _ =>
}
val typeAppliedTree = in.token match {
- case LBRACKET => atPos(start, in.offset)(TypeApply(convertToTypeId(t), typeArgs()))
+ case LBRACKET => atPos(start, in.offset)(AppliedTypeTree(convertToTypeId(t), typeArgs()))
case _ => t
}
in.token match {
@@ -1749,41 +1943,34 @@ self =>
in.nextToken()
atPos(start, start) { Ident(nme.WILDCARD) }
case CHARLIT | INTLIT | LONGLIT | FLOATLIT | DOUBLELIT |
- STRINGLIT | SYMBOLLIT | TRUE | FALSE | NULL =>
- atPos(start) { literal(false) }
+ STRINGLIT | INTERPOLATIONID | SYMBOLLIT | TRUE | FALSE | NULL =>
+ literal(inPattern = true)
case LPAREN =>
atPos(start)(makeParens(noSeq.patterns()))
case XMLSTART =>
xmlLiteralPattern()
case _ =>
- syntaxErrorOrIncomplete("illegal start of simple pattern", true)
- errorPatternTree
+ onError()
}
}
}
- /** The implementation of the context sensitive methods for parsing
- * outside of patterns.
- */
+ /** The implementation of the context sensitive methods for parsing outside of patterns. */
object outPattern extends PatternContextSensitive {
def argType(): Tree = typ()
def functionArgType(): Tree = paramType(useStartAsPosition = true)
}
- /** The implementation for parsing inside of patterns at points where
- * sequences are allowed.
- */
+ /** The implementation for parsing inside of patterns at points where sequences are allowed. */
object seqOK extends SeqContextSensitive {
- // See ticket #3189 for the motivation for the null check.
- // TODO: dredge out the remnants of regexp patterns.
- // ... and now this is back the way it was because it caused #3480.
- def interceptStarPattern(top: Tree): Option[Tree] =
- if (isRawStar) Some(atPos(top.pos.startOrPoint, in.skipToken())(Star(stripParens(top))))
- else None
+ val isSequenceOK = true
}
- /** The implementation for parsing inside of patterns at points where
- * sequences are disallowed.
- */
+ /** The implementation for parsing inside of patterns at points where sequences are disallowed. */
object noSeq extends SeqContextSensitive {
- def interceptStarPattern(top: Tree) = None
+ val isSequenceOK = false
+ }
+ /** For use from xml pattern, where sequence is allowed and encouraged. */
+ object xmlSeqOK extends SeqContextSensitive {
+ val isSequenceOK = true
+ override val isXML = true
}
/** These are default entry points into the pattern context sensitive methods:
* they are all initiated from non-pattern context.
@@ -1794,11 +1981,11 @@ self =>
def exprTypeArgs() = outPattern.typeArgs()
def exprSimpleType() = outPattern.simpleType()
- /** Default entry points into some pattern contexts.
- */
+ /** Default entry points into some pattern contexts. */
def pattern(): Tree = noSeq.pattern()
def patterns(): List[Tree] = noSeq.patterns()
- def seqPatterns(): List[Tree] = seqOK.patterns() // Also called from xml parser
+ def seqPatterns(): List[Tree] = seqOK.patterns()
+ def xmlSeqPatterns(): List[Tree] = xmlSeqOK.patterns() // Called from xml parser
def argumentPatterns(): List[Tree] = inParens {
if (in.token == RPAREN) Nil
else seqPatterns()
@@ -1807,8 +1994,8 @@ self =>
/* -------- MODIFIERS and ANNOTATIONS ------------------------------------------- */
- /** Drop `private' modifier when followed by a qualifier.
- * Contract `abstract' and `override' to ABSOVERRIDE
+ /** Drop `private` modifier when followed by a qualifier.
+ * Contract `abstract` and `override` to ABSOVERRIDE
*/
private def normalize(mods: Modifiers): Modifiers =
if (mods.isPrivate && mods.hasAccessBoundary)
@@ -1824,9 +2011,12 @@ self =>
(mods | mod) withPosition (mod, pos)
}
- private def tokenRange(token: TokenData) = r2p(token.offset, token.offset, token.offset + token.name.length - 1)
+ private def tokenRange(token: TokenData) =
+ r2p(token.offset, token.offset, token.offset + token.name.length - 1)
- /** AccessQualifier ::= "[" (Id | this) "]"
+ /** {{{
+ * AccessQualifier ::= `[' (Id | this) `]'
+ * }}}
*/
def accessQualifierOpt(mods: Modifiers): Modifiers = {
var result = mods
@@ -1852,7 +2042,9 @@ self =>
SEALED -> Flags.SEALED
)
- /** AccessModifier ::= (private | protected) [AccessQualifier]
+ /** {{{
+ * AccessModifier ::= (private | protected) [AccessQualifier]
+ * }}}
*/
def accessModifierOpt(): Modifiers = normalize {
in.token match {
@@ -1861,10 +2053,12 @@ self =>
}
}
- /** Modifiers ::= {Modifier}
+ /** {{{
+ * Modifiers ::= {Modifier}
* Modifier ::= LocalModifier
- * | AccessModifier
- * | override
+ * | AccessModifier
+ * | override
+ * }}}
*/
def modifiers(): Modifiers = normalize {
def loop(mods: Modifiers): Modifiers = in.token match {
@@ -1881,8 +2075,10 @@ self =>
loop(NoMods)
}
- /** LocalModifiers ::= {LocalModifier}
+ /** {{{
+ * LocalModifiers ::= {LocalModifier}
* LocalModifier ::= abstract | final | sealed | implicit | lazy
+ * }}}
*/
def localModifiers(): Modifiers = {
def loop(mods: Modifiers): Modifiers =
@@ -1892,8 +2088,10 @@ self =>
loop(NoMods)
}
- /** Annotations ::= {`@' SimpleType {ArgumentExprs}}
+ /** {{{
+ * Annotations ::= {`@' SimpleType {ArgumentExprs}}
* ConsrAnnotations ::= {`@' SimpleType ArgumentExprs}
+ * }}}
*/
def annotations(skipNewLines: Boolean): List[Tree] = readAnnots {
val t = annotationExpr()
@@ -1907,26 +2105,28 @@ self =>
def annotationExpr(): Tree = atPos(in.offset) {
val t = exprSimpleType()
if (in.token == LPAREN) New(t, multipleArgumentExprs())
- else New(t, List(Nil))
+ else New(t, ListOfNil)
}
/* -------- PARAMETERS ------------------------------------------- */
- /** ParamClauses ::= {ParamClause} [[nl] `(' implicit Params `)']
- * ParamClause ::= [nl] `(' [Params] ')'
+ /** {{{
+ * ParamClauses ::= {ParamClause} [[nl] `(' implicit Params `)']
+ * ParamClause ::= [nl] `(' [Params] `)'
* Params ::= Param {`,' Param}
* Param ::= {Annotation} Id [`:' ParamType] [`=' Expr]
* ClassParamClauses ::= {ClassParamClause} [[nl] `(' implicit ClassParams `)']
- * ClassParamClause ::= [nl] `(' [ClassParams] ')'
+ * ClassParamClause ::= [nl] `(' [ClassParams] `)'
* ClassParams ::= ClassParam {`,' ClassParam}
* ClassParam ::= {Annotation} [{Modifier} (`val' | `var')] Id [`:' ParamType] [`=' Expr]
+ * }}}
*/
def paramClauses(owner: Name, contextBounds: List[Tree], ofCaseClass: Boolean): List[List[ValDef]] = {
var implicitmod = 0
var caseParam = ofCaseClass
def param(): ValDef = {
val start = in.offset
- val annots = annotations(false)
+ val annots = annotations(skipNewLines = false)
var mods = Modifiers(Flags.PARAM)
if (owner.isTypeName) {
mods = modifiers() | Flags.PARAMACCESSOR
@@ -1938,7 +2138,7 @@ self =>
in.nextToken()
case _ =>
if (mods.flags != Flags.PARAMACCESSOR) accept(VAL)
- if (!caseParam) mods |= Flags.PRIVATE | Flags.LOCAL
+ if (!caseParam) mods |= Flags.PrivateLocal
}
if (caseParam) mods |= Flags.CASEACCESSOR
}
@@ -1979,8 +2179,6 @@ self =>
return Nil
if (in.token == IMPLICIT) {
- if (contextBounds.nonEmpty)
- syntaxError("cannot have both implicit parameters and context bounds `: ...' or view bounds `<% ...' on type parameters", false)
in.nextToken()
implicitmod = Flags.IMPLICIT
}
@@ -2010,7 +2208,9 @@ self =>
addEvidenceParams(owner, result, contextBounds)
}
- /** ParamType ::= Type | `=>' Type | Type `*'
+ /** {{{
+ * ParamType ::= Type | `=>' Type | Type `*'
+ * }}}
*/
def paramType(): Tree = paramType(useStartAsPosition = false)
def paramType(useStartAsPosition: Boolean): Tree = {
@@ -2030,12 +2230,14 @@ self =>
}
}
- /** TypeParamClauseOpt ::= [TypeParamClause]
+ /** {{{
+ * TypeParamClauseOpt ::= [TypeParamClause]
* TypeParamClause ::= `[' VariantTypeParam {`,' VariantTypeParam} `]']
* VariantTypeParam ::= {Annotation} [`+' | `-'] TypeParam
* FunTypeParamClauseOpt ::= [FunTypeParamClause]
* FunTypeParamClause ::= `[' TypeParam {`,' TypeParam} `]']
* TypeParam ::= Id TypeParamClauseOpt TypeBounds {<% Type} {":" Type}
+ * }}}
*/
def typeParamClauseOpt(owner: Name, contextBoundBuf: ListBuffer[Tree]): List[TypeDef] = {
def typeParam(ms: Modifiers): TypeDef = {
@@ -2072,11 +2274,13 @@ self =>
param
}
newLineOptWhenFollowedBy(LBRACKET)
- if (in.token == LBRACKET) inBrackets(commaSeparated(typeParam(NoMods withAnnotations annotations(true))))
+ if (in.token == LBRACKET) inBrackets(commaSeparated(typeParam(NoMods withAnnotations annotations(skipNewLines = true))))
else Nil
}
- /** TypeBounds ::= [`>:' Type] [`<:' Type]
+ /** {{{
+ * TypeBounds ::= [`>:' Type] [`<:' Type]
+ * }}}
*/
def typeBounds(): TypeBoundsTree = {
val t = TypeBoundsTree(
@@ -2093,7 +2297,9 @@ self =>
/* -------- DEFS ------------------------------------------- */
- /** Import ::= import ImportExpr {`,' ImportExpr}
+ /** {{{
+ * Import ::= import ImportExpr {`,' ImportExpr}
+ * }}}
*/
def importClause(): List[Tree] = {
val offset = accept(IMPORT)
@@ -2106,7 +2312,9 @@ self =>
}
}
- /** ImportExpr ::= StableId `.' (Id | `_' | ImportSelectors)
+ /** {{{
+ * ImportExpr ::= StableId `.' (Id | `_' | ImportSelectors)
+ * }}}
*/
def importExpr(): Tree = {
val start = in.offset
@@ -2118,7 +2326,7 @@ self =>
accept(DOT)
result
}
- /** Walks down import foo.bar.baz.{ ... } until it ends at a
+ /** Walks down import `foo.bar.baz.{ ... }` until it ends at a
* an underscore, a left brace, or an undotted identifier.
*/
def loop(expr: Tree): Tree = {
@@ -2152,7 +2360,9 @@ self =>
})
}
- /** ImportSelectors ::= `{' {ImportSelector `,'} (ImportSelector | `_') `}'
+ /** {{{
+ * ImportSelectors ::= `{' {ImportSelector `,'} (ImportSelector | `_') `}'
+ * }}}
*/
def importSelectors(): List[ImportSelector] = {
val selectors = inBracesOrNil(commaSeparated(importSelector()))
@@ -2168,7 +2378,9 @@ self =>
else ident()
}
- /** ImportSelector ::= Id [`=>' Id | `=>' `_']
+ /** {{{
+ * ImportSelector ::= Id [`=>' Id | `=>' `_']
+ * }}}
*/
def importSelector(): ImportSelector = {
val start = in.offset
@@ -2187,7 +2399,8 @@ self =>
ImportSelector(name, start, rename, renameOffset)
}
- /** Def ::= val PatDef
+ /** {{{
+ * Def ::= val PatDef
* | var PatDef
* | def FunDef
* | type [nl] TypeDef
@@ -2196,6 +2409,7 @@ self =>
* | var PatDcl
* | def FunDcl
* | type [nl] TypeDcl
+ * }}}
*/
def defOrDcl(pos: Int, mods: Modifiers): List[Tree] = {
if (mods.isLazy && in.token != VAL)
@@ -2217,13 +2431,15 @@ self =>
private def caseAwareTokenOffset = if (in.token == CASECLASS || in.token == CASEOBJECT) in.prev.offset else in.offset
def nonLocalDefOrDcl : List[Tree] = {
- val annots = annotations(true)
+ val annots = annotations(skipNewLines = true)
defOrDcl(caseAwareTokenOffset, modifiers() withAnnotations annots)
}
- /** PatDef ::= Pattern2 {`,' Pattern2} [`:' Type] `=' Expr
+ /** {{{
+ * PatDef ::= Pattern2 {`,' Pattern2} [`:' Type] `=' Expr
* ValDcl ::= Id {`,' Id} `:' Type
* VarDef ::= PatDef | Id {`,' Id} `:' Type `=' `_'
+ * }}}
*/
def patDefOrDcl(pos : Int, mods: Modifiers): List[Tree] = {
var newmods = mods
@@ -2268,9 +2484,11 @@ self =>
trees
}
- /** VarDef ::= PatDef
+ /** {{{
+ * VarDef ::= PatDef
* | Id {`,' Id} `:' Type `=' `_'
* VarDcl ::= Id {`,' Id} `:' Type
+ * }}}
def varDefOrDcl(mods: Modifiers): List[Tree] = {
var newmods = mods | Flags.MUTABLE
val lhs = new ListBuffer[(Int, Name)]
@@ -2294,17 +2512,20 @@ self =>
}
*/
- /** FunDef ::= FunSig `:' Type `=' Expr
- * | FunSig [nl] `{' Block `}'
- * | this ParamClause ParamClauses (`=' ConstrExpr | [nl] ConstrBlock)
+ /** {{{
+ * FunDef ::= FunSig [`:' Type] `=' [`macro'] Expr
+ * | FunSig [nl] `{' Block `}'
+ * | `this' ParamClause ParamClauses
+ * (`=' ConstrExpr | [nl] ConstrBlock)
* FunDcl ::= FunSig [`:' Type]
* FunSig ::= id [FunTypeParamClause] ParamClauses
+ * }}}
*/
def funDefOrDcl(start : Int, mods: Modifiers): Tree = {
in.nextToken
if (in.token == THIS) {
atPos(start, in.skipToken()) {
- val vparamss = paramClauses(nme.CONSTRUCTOR, classContextBounds map (_.duplicate), false)
+ val vparamss = paramClauses(nme.CONSTRUCTOR, classContextBounds map (_.duplicate), ofCaseClass = false)
newLineOptWhenFollowedBy(LBRACE)
val rhs = in.token match {
case LBRACE => atPos(in.offset) { constrBlock(vparamss) }
@@ -2314,44 +2535,61 @@ self =>
}
}
else {
- var newmods = mods
val nameOffset = in.offset
val name = ident()
- val result = atPos(start, if (name == nme.ERROR) start else nameOffset) {
- // contextBoundBuf is for context bounded type parameters of the form
- // [T : B] or [T : => B]; it contains the equivalent implicit parameter type,
- // i.e. (B[T] or T => B)
- val contextBoundBuf = new ListBuffer[Tree]
- val tparams = typeParamClauseOpt(name, contextBoundBuf)
- val vparamss = paramClauses(name, contextBoundBuf.toList, false)
- newLineOptWhenFollowedBy(LBRACE)
- var restype = fromWithinReturnType(typedOpt())
- val rhs =
- if (isStatSep || in.token == RBRACE) {
- if (restype.isEmpty) restype = scalaUnitConstr
- newmods |= Flags.DEFERRED
- EmptyTree
- } else if (restype.isEmpty && in.token == LBRACE) {
- restype = scalaUnitConstr
- blockExpr()
+ funDefRest(start, nameOffset, mods, name)
+ }
+ }
+
+ def funDefRest(start: Int, nameOffset: Int, mods: Modifiers, name: Name): Tree = {
+ val result = atPos(start, if (name.toTermName == nme.ERROR) start else nameOffset) {
+ var newmods = mods
+ // contextBoundBuf is for context bounded type parameters of the form
+ // [T : B] or [T : => B]; it contains the equivalent implicit parameter type,
+ // i.e. (B[T] or T => B)
+ val contextBoundBuf = new ListBuffer[Tree]
+ val tparams = typeParamClauseOpt(name, contextBoundBuf)
+ val vparamss = paramClauses(name, contextBoundBuf.toList, ofCaseClass = false)
+ newLineOptWhenFollowedBy(LBRACE)
+ var restype = fromWithinReturnType(typedOpt())
+ val rhs =
+ if (isStatSep || in.token == RBRACE) {
+ if (restype.isEmpty) restype = scalaUnitConstr
+ newmods |= Flags.DEFERRED
+ EmptyTree
+ } else if (restype.isEmpty && in.token == LBRACE) {
+ restype = scalaUnitConstr
+ blockExpr()
+ } else {
+ if (in.token == EQUALS) {
+ in.nextTokenAllow(nme.MACROkw)
+ if (in.token == IDENTIFIER && in.name == nme.MACROkw) {
+ in.nextToken()
+ newmods |= Flags.MACRO
+ }
} else {
- equalsExpr()
+ accept(EQUALS)
}
- DefDef(newmods, name, tparams, vparamss, restype, rhs)
- }
- signalParseProgress(result.pos)
- result
+ expr()
+ }
+ DefDef(newmods, name, tparams, vparamss, restype, rhs)
}
+ signalParseProgress(result.pos)
+ result
}
- /** ConstrExpr ::= SelfInvocation
+ /** {{{
+ * ConstrExpr ::= SelfInvocation
* | ConstrBlock
+ * }}}
*/
def constrExpr(vparamss: List[List[ValDef]]): Tree =
if (in.token == LBRACE) constrBlock(vparamss)
- else Block(List(selfInvocation(vparamss)), Literal(()))
+ else Block(List(selfInvocation(vparamss)), Literal(Constant()))
- /** SelfInvocation ::= this ArgumentExprs {ArgumentExprs}
+ /** {{{
+ * SelfInvocation ::= this ArgumentExprs {ArgumentExprs}
+ * }}}
*/
def selfInvocation(vparamss: List[List[ValDef]]): Tree =
atPos(accept(THIS)) {
@@ -2366,7 +2604,9 @@ self =>
else Apply(t, vparamss.last.map(vp => Ident(vp.name)))
}
- /** ConstrBlock ::= `{' SelfInvocation {semi BlockStat} `}'
+ /** {{{
+ * ConstrBlock ::= `{' SelfInvocation {semi BlockStat} `}'
+ * }}}
*/
def constrBlock(vparamss: List[List[ValDef]]): Tree =
atPos(in.skipToken()) {
@@ -2375,16 +2615,20 @@ self =>
else Nil
}
accept(RBRACE)
- Block(stats, Literal(()))
+ Block(stats, Literal(Constant()))
}
- /** TypeDef ::= type Id [TypeParamClause] `=' Type
+ /** {{{
+ * TypeDef ::= type Id [TypeParamClause] `=' Type
+ * | FunSig `=' Expr
* TypeDcl ::= type Id [TypeParamClause] TypeBounds
+ * }}}
*/
def typeDefOrDcl(start: Int, mods: Modifiers): Tree = {
in.nextToken()
newLinesOpt()
atPos(start, in.offset) {
+ val nameOffset = in.offset
val name = identForType()
// @M! a type alias as well as an abstract type may declare type parameters
val tparams = typeParamClauseOpt(name, null)
@@ -2401,17 +2645,19 @@ self =>
}
}
- /** Hook for IDE, for top-level classes/objects */
+ /** Hook for IDE, for top-level classes/objects. */
def topLevelTmplDef: Tree = {
- val annots = annotations(true)
+ val annots = annotations(skipNewLines = true)
val pos = caseAwareTokenOffset
val mods = modifiers() withAnnotations annots
tmplDef(pos, mods)
}
- /** TmplDef ::= [case] class ClassDef
+ /** {{{
+ * TmplDef ::= [case] class ClassDef
* | [case] object ObjectDef
* | [override] trait TraitDef
+ * }}}
*/
def tmplDef(pos: Int, mods: Modifiers): Tree = {
if (mods.isLazy) syntaxError("classes cannot be lazy", false)
@@ -2432,32 +2678,32 @@ self =>
}
}
- /** ClassDef ::= Id [TypeParamClause] {Annotation}
- [AccessModifier] ClassParamClauses RequiresTypeOpt ClassTemplateOpt
+ /** {{{
+ * ClassDef ::= Id [TypeParamClause] {Annotation}
+ * [AccessModifier] ClassParamClauses RequiresTypeOpt ClassTemplateOpt
* TraitDef ::= Id [TypeParamClause] RequiresTypeOpt TraitTemplateOpt
+ * }}}
*/
def classDef(start: Int, mods: Modifiers): ClassDef = {
in.nextToken
val nameOffset = in.offset
val name = identForType()
- def isTrait = mods.hasTraitFlag
-
atPos(start, if (name == tpnme.ERROR) start else nameOffset) {
savingClassContextBounds {
val contextBoundBuf = new ListBuffer[Tree]
val tparams = typeParamClauseOpt(name, contextBoundBuf)
classContextBounds = contextBoundBuf.toList
- val tstart = in.offset :: classContextBounds.map(_.pos.startOrPoint) min;
- if (!classContextBounds.isEmpty && isTrait) {
+ val tstart = (in.offset :: classContextBounds.map(_.pos.startOrPoint)).min
+ if (!classContextBounds.isEmpty && mods.isTrait) {
syntaxError("traits cannot have type parameters with context bounds `: ...' nor view bounds `<% ...'", false)
classContextBounds = List()
}
val constrAnnots = constructorAnnotations()
val (constrMods, vparamss) =
- if (isTrait) (Modifiers(Flags.TRAIT), List())
- else (accessModifierOpt(), paramClauses(name, classContextBounds, mods.isCase))
+ if (mods.isTrait) (Modifiers(Flags.TRAIT), List())
+ else (accessModifierOpt(), paramClauses(name, classContextBounds, ofCaseClass = mods.isCase))
var mods1 = mods
- if (isTrait) {
+ if (mods.isTrait) {
if (settings.YvirtClasses && in.token == SUBTYPE) mods1 |= Flags.DEFERRED
} else if (in.token == SUBTYPE) {
syntaxError("classes are not allowed to be virtual", false)
@@ -2474,7 +2720,9 @@ self =>
}
}
- /** ObjectDef ::= Id ClassTemplateOpt
+ /** {{{
+ * ObjectDef ::= Id ClassTemplateOpt
+ * }}}
*/
def objectDef(start: Int, mods: Modifiers): ModuleDef = {
in.nextToken
@@ -2488,14 +2736,19 @@ self =>
}
}
- /** ClassParents ::= AnnotType {`(' [Exprs] `)'} {with AnnotType}
+ /** {{{
+ * ClassParents ::= AnnotType {`(' [Exprs] `)'} {with AnnotType}
* TraitParents ::= AnnotType {with AnnotType}
+ * }}}
*/
def templateParents(isTrait: Boolean): (List[Tree], List[List[Tree]]) = {
val parents = new ListBuffer[Tree] += startAnnotType()
- val argss =
+ val argss = (
+ // TODO: the insertion of ListOfNil here is where "new Foo" becomes
+ // indistinguishable from "new Foo()".
if (in.token == LPAREN && !isTrait) multipleArgumentExprs()
- else List(Nil)
+ else ListOfNil
+ )
while (in.token == WITH) {
in.nextToken()
@@ -2504,95 +2757,104 @@ self =>
(parents.toList, argss)
}
- /** ClassTemplate ::= [EarlyDefs with] ClassParents [TemplateBody]
+ /** {{{
+ * ClassTemplate ::= [EarlyDefs with] ClassParents [TemplateBody]
* TraitTemplate ::= [EarlyDefs with] TraitParents [TemplateBody]
* EarlyDefs ::= `{' [EarlyDef {semi EarlyDef}] `}'
* EarlyDef ::= Annotations Modifiers PatDef
+ * }}}
*/
def template(isTrait: Boolean): (List[Tree], List[List[Tree]], ValDef, List[Tree]) = {
newLineOptWhenFollowedBy(LBRACE)
if (in.token == LBRACE) {
// @S: pre template body cannot stub like post body can!
- val (self, body) = templateBody(true)
+ val (self, body) = templateBody(isPre = true)
if (in.token == WITH && self.isEmpty) {
val earlyDefs: List[Tree] = body flatMap {
- case vdef @ ValDef(mods, name, tpt, rhs) if !mods.isDeferred =>
- List(treeCopy.ValDef(vdef, mods | Flags.PRESUPER, name, tpt, rhs))
+ case vdef @ ValDef(mods, _, _, _) if !mods.isDeferred =>
+ List(copyValDef(vdef)(mods = mods | Flags.PRESUPER))
case tdef @ TypeDef(mods, name, tparams, rhs) =>
List(treeCopy.TypeDef(tdef, mods | Flags.PRESUPER, name, tparams, rhs))
+ case docdef @ DocDef(comm, rhs) =>
+ List(treeCopy.DocDef(docdef, comm, rhs))
case stat if !stat.isEmpty =>
syntaxError(stat.pos, "only type definitions and concrete field definitions allowed in early object initialization section", false)
List()
case _ => List()
}
in.nextToken()
- val (parents, argss) = templateParents(isTrait)
- val (self1, body1) = templateBodyOpt(isTrait)
+ val (parents, argss) = templateParents(isTrait = isTrait)
+ val (self1, body1) = templateBodyOpt(traitParentSeen = isTrait)
(parents, argss, self1, earlyDefs ::: body1)
} else {
- (List(), List(List()), self, body)
+ (List(), ListOfNil, self, body)
}
} else {
- val (parents, argss) = templateParents(isTrait)
- val (self, body) = templateBodyOpt(isTrait)
+ val (parents, argss) = templateParents(isTrait = isTrait)
+ val (self, body) = templateBodyOpt(traitParentSeen = isTrait)
(parents, argss, self, body)
}
}
def isInterface(mods: Modifiers, body: List[Tree]): Boolean =
- mods.hasTraitFlag && (body forall treeInfo.isInterfaceMember)
+ mods.isTrait && (body forall treeInfo.isInterfaceMember)
- /** ClassTemplateOpt ::= 'extends' ClassTemplate | [['extends'] TemplateBody]
- * TraitTemplateOpt ::= TraitExtends TraitTemplate | [['extends'] TemplateBody] | '<:' TemplateBody
- * TraitExtends ::= 'extends' | `<:'
+ /** {{{
+ * ClassTemplateOpt ::= `extends' ClassTemplate | [[`extends'] TemplateBody]
+ * TraitTemplateOpt ::= TraitExtends TraitTemplate | [[`extends'] TemplateBody] | `<:' TemplateBody
+ * TraitExtends ::= `extends' | `<:'
+ * }}}
*/
def templateOpt(mods: Modifiers, name: Name, constrMods: Modifiers, vparamss: List[List[ValDef]], tstart: Int): Template = {
val (parents0, argss, self, body) = (
- if (in.token == EXTENDS || in.token == SUBTYPE && mods.hasTraitFlag) {
+ if (in.token == EXTENDS || in.token == SUBTYPE && mods.isTrait) {
in.nextToken()
- template(mods.hasTraitFlag)
+ template(isTrait = mods.isTrait)
}
else {
newLineOptWhenFollowedBy(LBRACE)
- val (self, body) = templateBodyOpt(false)
- (List(), List(List()), self, body)
+ val (self, body) = templateBodyOpt(traitParentSeen = false)
+ (List(), ListOfNil, self, body)
}
)
-
- val tstart0 = if (body.isEmpty && in.lastOffset < tstart) in.lastOffset else tstart
- atPos(tstart0) {
- if (isAnyValType(name)) {
- val parent = if (name == tpnme.AnyVal) tpnme.Any else tpnme.AnyVal
- Template(List(scalaDot(parent)), self, body)
+ def anyrefParents() = {
+ val caseParents = if (mods.isCase) List(productConstr, serializableConstr) else Nil
+ parents0 ::: caseParents match {
+ case Nil => List(atPos(o2p(in.offset))(scalaAnyRefConstr))
+ case ps => ps
}
- else {
- val parents = (
- if (!isInterface(mods, body) && !isScalaArray(name)) parents0 :+ scalaScalaObjectConstr
- else if (parents0.isEmpty) List(scalaAnyRefConstr)
- else parents0
- ) ++ (
- if (mods.isCase) List(productConstr, serializableConstr)
- else Nil
- )
+ }
+ def anyvalConstructor() = (
+ // Not a well-formed constructor, has to be finished later - see note
+ // regarding AnyVal constructor in AddInterfaces.
+ DefDef(NoMods, nme.CONSTRUCTOR, Nil, ListOfNil, TypeTree(), Block(Nil, Literal(Constant())))
+ )
+ val tstart0 = if (body.isEmpty && in.lastOffset < tstart) in.lastOffset else tstart
- Template(parents, self, constrMods, vparamss, argss, body, o2p(tstart))
- }
+ atPos(tstart0) {
+ // Exclude only the 9 primitives plus AnyVal.
+ if (inScalaRootPackage && ScalaValueClassNames.contains(name))
+ Template(parents0, self, anyvalConstructor :: body)
+ else
+ Template(anyrefParents, self, constrMods, vparamss, argss, body, o2p(tstart))
}
}
/* -------- TEMPLATES ------------------------------------------- */
- /** TemplateBody ::= [nl] `{' TemplateStatSeq `}'
+ /** {{{
+ * TemplateBody ::= [nl] `{' TemplateStatSeq `}'
+ * }}}
* @param isPre specifies whether in early initializer (true) or not (false)
*/
- def templateBody(isPre: Boolean) = inBraces(templateStatSeq(isPre)) match {
- case (self, Nil) => (self, List(EmptyTree))
+ def templateBody(isPre: Boolean) = inBraces(templateStatSeq(isPre = isPre)) match {
+ case (self, Nil) => (self, EmptyTree.asList)
case result => result
}
def templateBodyOpt(traitParentSeen: Boolean): (ValDef, List[Tree]) = {
newLineOptWhenFollowedBy(LBRACE)
if (in.token == LBRACE) {
- templateBody(false)
+ templateBody(isPre = false)
} else {
if (in.token == LPAREN)
syntaxError((if (traitParentSeen) "parents of traits" else "traits or objects")+
@@ -2601,13 +2863,15 @@ self =>
}
}
- /** Refinement ::= [nl] `{' RefineStat {semi RefineStat} `}'
+ /** {{{
+ * Refinement ::= [nl] `{' RefineStat {semi RefineStat} `}'
+ * }}}
*/
def refinement(): List[Tree] = inBraces(refineStatSeq())
/* -------- STATSEQS ------------------------------------------- */
- /** Create a tree representing a packaging */
+ /** Create a tree representing a packaging. */
def makePackaging(start: Int, pkg: Tree, stats: List[Tree]): PackageDef = pkg match {
case x: RefTree => atPos(start, pkg.pos.point)(PackageDef(x, stats))
}
@@ -2622,11 +2886,15 @@ self =>
*/
/** Create a tree representing a package object, converting
+ * {{{
* package object foo { ... }
+ * }}}
* to
+ * {{{
* package foo {
* object `package` { ... }
* }
+ * }}}
*/
def makePackageObject(start: Int, objDef: ModuleDef): PackageDef = objDef match {
case ModuleDef(mods, name, impl) =>
@@ -2634,20 +2902,25 @@ self =>
start, atPos(o2p(objDef.pos.startOrPoint)){ Ident(name) }, List(ModuleDef(mods, nme.PACKAGEkw, impl)))
}
- /** Packaging ::= package QualId [nl] `{' TopStatSeq `}'
+ /** {{{
+ * Packaging ::= package QualId [nl] `{' TopStatSeq `}'
+ * }}}
*/
def packaging(start: Int): Tree = {
+ val nameOffset = in.offset
val pkg = pkgQualId()
val stats = inBracesOrNil(topStatSeq())
makePackaging(start, pkg, stats)
}
- /** TopStatSeq ::= TopStat {semi TopStat}
+ /** {{{
+ * TopStatSeq ::= TopStat {semi TopStat}
* TopStat ::= Annotations Modifiers TmplDef
* | Packaging
* | package object objectDef
* | Import
* |
+ * }}}
*/
def topStatSeq(): List[Tree] = {
val stats = new ListBuffer[Tree]
@@ -2676,13 +2949,22 @@ self =>
stats.toList
}
- /** TemplateStatSeq ::= [id [`:' Type] `=>'] TemplateStat {semi TemplateStat}
+ /** Informal - for the repl and other direct parser accessors.
+ */
+ def templateStats(): List[Tree] = templateStatSeq(isPre = false)._2 match {
+ case Nil => EmptyTree.asList
+ case stats => stats
+ }
+
+ /** {{{
+ * TemplateStatSeq ::= [id [`:' Type] `=>'] TemplateStat {semi TemplateStat}
* TemplateStat ::= Import
* | Annotations Modifiers Def
* | Annotations Modifiers Dcl
* | Expr1
* | super ArgumentExprs {ArgumentExprs}
* |
+ * }}}
* @param isPre specifies whether in early initializer (true) or not (false)
*/
def templateStatSeq(isPre : Boolean): (ValDef, List[Tree]) = checkNoEscapingPlaceholders {
@@ -2725,10 +3007,12 @@ self =>
(self, stats.toList)
}
- /** RefineStatSeq ::= RefineStat {semi RefineStat}
+ /** {{{
+ * RefineStatSeq ::= RefineStat {semi RefineStat}
* RefineStat ::= Dcl
* | type TypeDef
* |
+ * }}}
*/
def refineStatSeq(): List[Tree] = checkNoEscapingPlaceholders {
val stats = new ListBuffer[Tree]
@@ -2751,7 +3035,7 @@ self =>
def localDef : List[Tree] = {
atEndPos {
atStartPos(in.offset) {
- val annots = annotations(true)
+ val annots = annotations(skipNewLines = true)
val mods = localModifiers() withAnnotations annots
if (!(mods hasFlag ~(Flags.IMPLICIT | Flags.LAZY))) defOrDcl(mods)
else List(tmplDef(mods))
@@ -2761,7 +3045,7 @@ self =>
*/
def localDef(implicitMod: Int): List[Tree] = {
- val annots = annotations(true)
+ val annots = annotations(skipNewLines = true)
val pos = in.offset
val mods = (localModifiers() | implicitMod) withAnnotations annots
val defs =
@@ -2769,24 +3053,26 @@ self =>
else List(tmplDef(pos, mods))
in.token match {
- case RBRACE | CASE => defs :+ (Literal(()) setPos o2p(in.offset))
+ case RBRACE | CASE => defs :+ (Literal(Constant()) setPos o2p(in.offset))
case _ => defs
}
}
- /** BlockStatSeq ::= { BlockStat semi } [ResultExpr]
+ /** {{{
+ * BlockStatSeq ::= { BlockStat semi } [ResultExpr]
* BlockStat ::= Import
* | Annotations [implicit] [lazy] Def
* | Annotations LocalModifiers TmplDef
* | Expr1
* |
+ * }}}
*/
def blockStatSeq(): List[Tree] = checkNoEscapingPlaceholders {
val stats = new ListBuffer[Tree]
while (!isStatSeqEnd && in.token != CASE) {
if (in.token == IMPORT) {
stats ++= importClause()
- acceptStatSep()
+ acceptStatSepOpt()
}
else if (isExprIntro) {
stats += statement(InBlock)
@@ -2813,7 +3099,9 @@ self =>
stats.toList
}
- /** CompilationUnit ::= {package QualId semi} TopStatSeq
+ /** {{{
+ * CompilationUnit ::= {package QualId semi} TopStatSeq
+ * }}}
*/
def compilationUnit(): Tree = checkNoEscapingPlaceholders {
def topstats(): List[Tree] = {
@@ -2829,8 +3117,10 @@ self =>
ts ++= topStatSeq()
}
} else {
+ val nameOffset = in.offset
in.flushDoc
val pkg = pkgQualId()
+
if (in.token == EOF) {
ts += makePackaging(start, pkg, List())
} else if (isStatSep) {
diff --git a/src/compiler/scala/tools/nsc/ast/parser/Patch.scala b/src/compiler/scala/tools/nsc/ast/parser/Patch.scala
index cee8be2..0829b1a 100644
--- a/src/compiler/scala/tools/nsc/ast/parser/Patch.scala
+++ b/src/compiler/scala/tools/nsc/ast/parser/Patch.scala
@@ -1,5 +1,5 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
+ * Copyright 2005-2013 LAMP/EPFL
* @author Martin Odersky
*/
package scala.tools.nsc.ast.parser
diff --git a/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala b/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala
index b90b55a..8d295a2 100644
--- a/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala
+++ b/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala
@@ -1,12 +1,13 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
+ * Copyright 2005-2013 LAMP/EPFL
* @author Martin Odersky
*/
package scala.tools.nsc
package ast.parser
-import scala.tools.nsc.util._
-import Chars._
+import scala.tools.nsc.util.CharArrayReader
+import scala.reflect.internal.util._
+import scala.reflect.internal.Chars._
import Tokens._
import scala.annotation.switch
import scala.collection.mutable.{ ListBuffer, ArrayBuffer }
@@ -28,6 +29,7 @@ trait ScannersCommon {
def warning(off: Int, msg: String): Unit
def error (off: Int, msg: String): Unit
def incompleteInputError(off: Int, msg: String): Unit
+ def deprecationWarning(off: Int, msg: String): Unit
}
def createKeywordArray(keywords: Seq[(Name, Int)], defaultToken: Int): (Int, Array[Int]) = {
@@ -82,6 +84,9 @@ trait Scanners extends ScannersCommon {
}
abstract class Scanner extends CharArrayReader with TokenData with ScannerCommon {
+ private def isDigit(c: Char) = java.lang.Character isDigit c
+
+ def isAtEnd = charOffset >= buf.length
def flush = { charOffset = offset; nextChar(); this }
@@ -108,11 +113,24 @@ trait Scanners extends ScannersCommon {
cbuf.append(c)
}
+ /** Determines whether this scanner should emit identifier deprecation warnings,
+ * e.g. when seeing `macro` or `then`, which are planned to become keywords in future versions of Scala.
+ */
+ protected def emitIdentifierDeprecationWarnings = true
+
/** Clear buffer and set name and token */
- private def finishNamed() {
+ private def finishNamed(idtoken: Int = IDENTIFIER) {
name = newTermName(cbuf.toString)
- token = name2token(name)
cbuf.clear()
+ token = idtoken
+ if (idtoken == IDENTIFIER) {
+ val idx = name.start - kwOffset
+ if (idx >= 0 && idx < kwArray.length) {
+ token = kwArray(idx)
+ if (token == IDENTIFIER && allowIdent != name && emitIdentifierDeprecationWarnings)
+ deprecationWarning(name+" is now a reserved word; usage as an identifier is deprecated")
+ }
+ }
}
/** Clear buffer and set string */
@@ -124,26 +142,17 @@ trait Scanners extends ScannersCommon {
/** Should doc comments be built? */
def buildDocs: Boolean = forScaladoc
- /** buffer for the documentation comment
+ /** holder for the documentation comment
*/
- var docBuffer: StringBuilder = null
- var docPos: Position = null
+ var docComment: DocComment = null
- /** Return current docBuffer and set docBuffer to null */
def flushDoc: DocComment = {
- val ret = if (docBuffer != null) DocComment(docBuffer.toString, docPos) else null
- docBuffer = null
+ val ret = docComment
+ docComment = null
ret
}
- /** add the given character to the documentation buffer
- */
- protected def putDocChar(c: Char) {
- if (docBuffer ne null) docBuffer.append(c)
- }
-
protected def foundComment(value: String, start: Int, end: Int) = ()
-
protected def foundDocComment(value: String, start: Int, end: Int) = ()
private class TokenData0 extends TokenData
@@ -154,11 +163,31 @@ trait Scanners extends ScannersCommon {
val prev : TokenData = new TokenData0
/** a stack of tokens which indicates whether line-ends can be statement separators
+ * also used for keeping track of nesting levels.
+ * We keep track of the closing symbol of a region. This can be
+ * RPAREN if region starts with '('
+ * RBRACKET if region starts with '['
+ * RBRACE if region starts with '{'
+ * ARROW if region starts with `case'
+ * STRINGLIT if region is a string interpolation expression starting with '${'
+ * (the STRINGLIT appears twice in succession on the stack iff the
+ * expression is a multiline string literal).
*/
var sepRegions: List[Int] = List()
// Get next token ------------------------------------------------------------
+ /** Are we directly in a string interpolation expression?
+ */
+ private def inStringInterpolation =
+ sepRegions.nonEmpty && sepRegions.head == STRINGLIT
+
+ /** Are we directly in a multiline string interpolation expression?
+ * @pre inStringInterpolation
+ */
+ private def inMultiLineInterpolation =
+ inStringInterpolation && sepRegions.tail.nonEmpty && sepRegions.tail.head == STRINGPART
+
/** read next token and return last offset
*/
def skipToken(): Offset = {
@@ -167,6 +196,20 @@ trait Scanners extends ScannersCommon {
off
}
+ /** Allow an otherwise deprecated ident here */
+ private var allowIdent: Name = nme.EMPTY
+
+ /** Get next token, and allow the otherwise deprecated ident `name` */
+ def nextTokenAllow(name: Name) = {
+ val prev = allowIdent
+ allowIdent = name
+ try {
+ nextToken()
+ } finally {
+ allowIdent = prev
+ }
+ }
+
/** Produce next token, filling TokenData fields of Scanner.
*/
def nextToken() {
@@ -182,26 +225,38 @@ trait Scanners extends ScannersCommon {
case CASE =>
sepRegions = ARROW :: sepRegions
case RBRACE =>
- sepRegions = sepRegions dropWhile (_ != RBRACE)
+ while (!sepRegions.isEmpty && sepRegions.head != RBRACE)
+ sepRegions = sepRegions.tail
if (!sepRegions.isEmpty) sepRegions = sepRegions.tail
- case RBRACKET | RPAREN | ARROW =>
+ docComment = null
+ case RBRACKET | RPAREN =>
if (!sepRegions.isEmpty && sepRegions.head == lastToken)
sepRegions = sepRegions.tail
- case _ =>
- }
- (lastToken: @switch) match {
- case RBRACE | RBRACKET | RPAREN =>
- docBuffer = null
+ docComment = null
+ case ARROW =>
+ if (!sepRegions.isEmpty && sepRegions.head == lastToken)
+ sepRegions = sepRegions.tail
+ case STRINGLIT =>
+ if (inMultiLineInterpolation)
+ sepRegions = sepRegions.tail.tail
+ else if (inStringInterpolation)
+ sepRegions = sepRegions.tail
case _ =>
}
// Read a token or copy it from `next` tokenData
if (next.token == EMPTY) {
lastOffset = charOffset - 1
- if(lastOffset > 0 && buf(lastOffset) == '\n' && buf(lastOffset - 1) == '\r') {
+ if (lastOffset > 0 && buf(lastOffset) == '\n' && buf(lastOffset - 1) == '\r') {
lastOffset -= 1
}
- fetchToken()
+ if (inStringInterpolation) fetchStringPart() else fetchToken()
+ if(token == ERROR) {
+ if (inMultiLineInterpolation)
+ sepRegions = sepRegions.tail.tail
+ else if (inStringInterpolation)
+ sepRegions = sepRegions.tail
+ }
} else {
this copyFrom next
next.token = EMPTY
@@ -225,10 +280,16 @@ trait Scanners extends ScannersCommon {
prev copyFrom this
val nextLastOffset = charOffset - 1
fetchToken()
+ def resetOffset() {
+ offset = prev.offset
+ lastOffset = prev.lastOffset
+ }
if (token == CLASS) {
token = CASECLASS
+ resetOffset()
} else if (token == OBJECT) {
token = CASEOBJECT
+ resetOffset()
} else {
lastOffset = nextLastOffset
next copyFrom this
@@ -298,18 +359,23 @@ trait Scanners extends ScannersCommon {
'z' =>
putChar(ch)
nextChar()
- getIdentRest() // scala-mode: wrong indent for multi-line case blocks
+ getIdentRest()
+ if (ch == '"' && token == IDENTIFIER)
+ token = INTERPOLATIONID
case '<' => // is XMLSTART?
- val last = if (charOffset >= 2) buf(charOffset - 2) else ' '
- nextChar()
- last match {
- case ' '|'\t'|'\n'|'{'|'('|'>' if isNameStart(ch) || ch == '!' || ch == '?' =>
- token = XMLSTART
- case _ =>
- // Console.println("found '<', but last is '"+in.last+"'"); // DEBUG
- putChar('<')
- getOperatorRest()
+ def fetchLT() = {
+ val last = if (charOffset >= 2) buf(charOffset - 2) else ' '
+ nextChar()
+ last match {
+ case ' ' | '\t' | '\n' | '{' | '(' | '>' if isNameStart(ch) || ch == '!' || ch == '?' =>
+ token = XMLSTART
+ case _ =>
+ // Console.println("found '<', but last is '"+in.last+"'"); // DEBUG
+ putChar('<')
+ getOperatorRest()
+ }
}
+ fetchLT
case '~' | '!' | '@' | '#' | '%' |
'^' | '*' | '+' | '-' | /*'<' | */
'>' | '?' | ':' | '=' | '&' |
@@ -326,53 +392,89 @@ trait Scanners extends ScannersCommon {
getOperatorRest()
}
case '0' =>
- putChar(ch)
- nextChar()
- if (ch == 'x' || ch == 'X') {
+ def fetchZero() = {
+ putChar(ch)
nextChar()
- base = 16
- } else {
- base = 8
+ if (ch == 'x' || ch == 'X') {
+ nextChar()
+ base = 16
+ } else {
+ /**
+ * What should leading 0 be in the future? It is potentially dangerous
+ * to let it be base-10 because of history. Should it be an error? Is
+ * there a realistic situation where one would need it?
+ */
+ if (isDigit(ch)) {
+ if (opt.future) syntaxError("Non-zero numbers may not have a leading zero.")
+ else deprecationWarning("Treating numbers with a leading zero as octal is deprecated.")
+ }
+ base = 8
+ }
+ getNumber()
}
- getNumber()
+ fetchZero
case '1' | '2' | '3' | '4' | '5' | '6' | '7' | '8' | '9' =>
base = 10
getNumber()
case '`' =>
getBackquotedIdent()
case '\"' =>
- nextChar()
- if (ch == '\"') {
- nextChar()
- if (ch == '\"') {
+ def fetchDoubleQuote() = {
+ if (token == INTERPOLATIONID) {
nextRawChar()
- getMultiLineStringLit()
+ if (ch == '\"') {
+ nextRawChar()
+ if (ch == '\"') {
+ offset += 3
+ nextRawChar()
+ getStringPart(multiLine = true)
+ sepRegions = STRINGPART :: sepRegions // indicate string part
+ sepRegions = STRINGLIT :: sepRegions // once more to indicate multi line string part
+ } else {
+ token = STRINGLIT
+ strVal = ""
+ }
+ } else {
+ offset += 1
+ getStringPart(multiLine = false)
+ sepRegions = STRINGLIT :: sepRegions // indicate single line string part
+ }
} else {
- token = STRINGLIT
- strVal = ""
+ nextChar()
+ if (ch == '\"') {
+ nextChar()
+ if (ch == '\"') {
+ nextRawChar()
+ getRawStringLit()
+ } else {
+ token = STRINGLIT
+ strVal = ""
+ }
+ } else {
+ getStringLit()
+ }
}
- } else if (getStringLit('\"')) {
- setStrVal()
- token = STRINGLIT
- } else {
- syntaxError("unclosed string literal")
}
+ fetchDoubleQuote
case '\'' =>
- nextChar()
- if (isIdentifierStart(ch))
- charLitOr(getIdentRest)
- else if (isOperatorPart(ch) && (ch != '\\'))
- charLitOr(getOperatorRest)
- else {
- getLitChar()
- if (ch == '\'') {
- nextChar()
- token = CHARLIT
- setStrVal()
- } else {
- syntaxError("unclosed character literal")
+ def fetchSingleQuote() = {
+ nextChar()
+ if (isIdentifierStart(ch))
+ charLitOr(getIdentRest)
+ else if (isOperatorPart(ch) && (ch != '\\'))
+ charLitOr(getOperatorRest)
+ else {
+ getLitChar()
+ if (ch == '\'') {
+ nextChar()
+ token = CHARLIT
+ setStrVal()
+ } else {
+ syntaxError("unclosed character literal")
+ }
}
}
+ fetchSingleQuote
case '.' =>
nextChar()
if ('0' <= ch && ch <= '9') {
@@ -397,28 +499,31 @@ trait Scanners extends ScannersCommon {
case ']' =>
nextChar(); token = RBRACKET
case SU =>
- if (charOffset >= buf.length) token = EOF
+ if (isAtEnd) token = EOF
else {
syntaxError("illegal character")
nextChar()
}
case _ =>
- if (ch == '\u21D2') {
- nextChar(); token = ARROW
- } else if (ch == '\u2190') {
- nextChar(); token = LARROW
- } else if (Character.isUnicodeIdentifierStart(ch)) {
- putChar(ch)
- nextChar()
- getIdentRest()
- } else if (isSpecial(ch)) {
- putChar(ch)
- nextChar()
- getOperatorRest()
- } else {
- syntaxError("illegal character")
- nextChar()
+ def fetchOther() = {
+ if (ch == '\u21D2') {
+ nextChar(); token = ARROW
+ } else if (ch == '\u2190') {
+ nextChar(); token = LARROW
+ } else if (Character.isUnicodeIdentifierStart(ch)) {
+ putChar(ch)
+ nextChar()
+ getIdentRest()
+ } else if (isSpecial(ch)) {
+ putChar(ch)
+ nextChar()
+ getOperatorRest()
+ } else {
+ syntaxError("illegal character '" + ("" + '\\' + 'u' + "%04x".format(ch: Int)) + "'")
+ nextChar()
+ }
}
+ fetchOther
}
}
@@ -435,7 +540,7 @@ trait Scanners extends ScannersCommon {
nextChar()
} while ((ch != CR) && (ch != LF) && (ch != SU))
} else {
- docBuffer = null
+ docComment = null
var openComments = 1
appendToComment()
nextChar()
@@ -443,24 +548,23 @@ trait Scanners extends ScannersCommon {
var buildingDocComment = false
if (ch == '*' && buildDocs) {
buildingDocComment = true
- docBuffer = new StringBuilder("/**")
}
while (openComments > 0) {
do {
do {
if (ch == '/') {
- nextChar(); putDocChar(ch); appendToComment()
+ nextChar(); appendToComment()
if (ch == '*') {
- nextChar(); putDocChar(ch); appendToComment()
+ nextChar(); appendToComment()
openComments += 1
}
}
if (ch != '*' && ch != SU) {
- nextChar(); putDocChar(ch); appendToComment()
+ nextChar(); appendToComment()
}
} while (ch != '*' && ch != SU)
while (ch == '*') {
- nextChar(); putDocChar(ch); appendToComment()
+ nextChar(); appendToComment()
}
} while (ch != '/' && ch != SU)
if (ch == '/') nextChar()
@@ -501,12 +605,13 @@ trait Scanners extends ScannersCommon {
// Identifiers ---------------------------------------------------------------
- private def getBackquotedIdent(): Unit = {
+ private def getBackquotedIdent() {
nextChar()
- if (getStringLit('`')) {
- finishNamed();
+ getLitChars('`')
+ if (ch == '`') {
+ nextChar()
+ finishNamed(BACKQUOTED_IDENT)
if (name.length == 0) syntaxError("empty quoted identifier")
- token = BACKQUOTED_IDENT
}
else syntaxError("unclosed quoted identifier")
}
@@ -575,50 +680,126 @@ trait Scanners extends ScannersCommon {
}
}
- private def getStringLit(delimiter: Char): Boolean = {
- while (ch != delimiter && (isUnicodeEscape || ch != CR && ch != LF && ch != SU)) {
- getLitChar()
- }
- if (ch == delimiter) { nextChar(); true }
- else false
+
+// Literals -----------------------------------------------------------------
+
+ private def getStringLit() = {
+ getLitChars('"')
+ if (ch == '"') {
+ setStrVal()
+ nextChar()
+ token = STRINGLIT
+ } else syntaxError("unclosed string literal")
}
- private def getMultiLineStringLit() {
+ private def getRawStringLit(): Unit = {
if (ch == '\"') {
nextRawChar()
- if (ch == '\"') {
+ if (isTripleQuote()) {
+ setStrVal()
+ token = STRINGLIT
+ } else
+ getRawStringLit()
+ } else if (ch == SU) {
+ incompleteInputError("unclosed multi-line string literal")
+ } else {
+ putChar(ch)
+ nextRawChar()
+ getRawStringLit()
+ }
+ }
+
+ @annotation.tailrec private def getStringPart(multiLine: Boolean): Unit = {
+ def finishStringPart() = {
+ setStrVal()
+ token = STRINGPART
+ next.lastOffset = charOffset - 1
+ next.offset = charOffset - 1
+ }
+ if (ch == '"') {
+ if (multiLine) {
nextRawChar()
- if (ch == '\"') {
- nextChar()
- while (ch == '\"') {
- putChar('\"')
- nextChar()
- }
- token = STRINGLIT
+ if (isTripleQuote()) {
setStrVal()
- } else {
- putChar('\"')
- putChar('\"')
- getMultiLineStringLit()
+ token = STRINGLIT
+ } else
+ getStringPart(multiLine)
+ } else {
+ nextChar()
+ setStrVal()
+ token = STRINGLIT
+ }
+ } else if (ch == '$') {
+ nextRawChar()
+ if (ch == '$') {
+ putChar(ch)
+ nextRawChar()
+ getStringPart(multiLine)
+ } else if (ch == '{') {
+ finishStringPart()
+ nextRawChar()
+ next.token = LBRACE
+ } else if (Character.isUnicodeIdentifierStart(ch)) {
+ finishStringPart()
+ do {
+ putChar(ch)
+ nextRawChar()
+ } while (ch != SU && Character.isUnicodeIdentifierPart(ch))
+ next.token = IDENTIFIER
+ next.name = newTermName(cbuf.toString)
+ cbuf.clear()
+ val idx = next.name.start - kwOffset
+ if (idx >= 0 && idx < kwArray.length) {
+ next.token = kwArray(idx)
}
} else {
- putChar('\"')
- getMultiLineStringLit()
+ syntaxError("invalid string interpolation: `$$', `$'ident or `$'BlockExpr expected")
}
- } else if (ch == SU) {
- incompleteInputError("unclosed multi-line string literal")
} else {
- putChar(ch)
- nextRawChar()
- getMultiLineStringLit()
+ val isUnclosedLiteral = !isUnicodeEscape && (ch == SU || (!multiLine && (ch == CR || ch == LF)))
+ if (isUnclosedLiteral) {
+ if (multiLine)
+ incompleteInputError("unclosed multi-line string literal")
+ else
+ syntaxError("unclosed string literal")
+ }
+ else {
+ putChar(ch)
+ nextRawChar()
+ getStringPart(multiLine)
+ }
}
}
-// Literals -----------------------------------------------------------------
+ private def fetchStringPart() = {
+ offset = charOffset - 1
+ getStringPart(multiLine = inMultiLineInterpolation)
+ }
- /** read next character in character or string literal:
- */
- protected def getLitChar() =
+ private def isTripleQuote(): Boolean =
+ if (ch == '"') {
+ nextRawChar()
+ if (ch == '"') {
+ nextChar()
+ while (ch == '"') {
+ putChar('"')
+ nextChar()
+ }
+ true
+ } else {
+ putChar('"')
+ putChar('"')
+ false
+ }
+ } else {
+ putChar('"')
+ false
+ }
+
+ /** copy current character into cbuf, interpreting any escape sequences,
+ * and advance to next character.
+ */
+ protected def getLitChar(): Unit =
if (ch == '\\') {
nextChar()
if ('0' <= ch && ch <= '7') {
@@ -644,9 +825,7 @@ trait Scanners extends ScannersCommon {
case '\"' => putChar('\"')
case '\'' => putChar('\'')
case '\\' => putChar('\\')
- case _ =>
- syntaxError(charOffset - 1, "invalid escape character")
- putChar(ch)
+ case _ => invalidEscape()
}
nextChar()
}
@@ -655,6 +834,16 @@ trait Scanners extends ScannersCommon {
nextChar()
}
+ protected def invalidEscape(): Unit = {
+ syntaxError(charOffset - 1, "invalid escape character")
+ putChar(ch)
+ }
+
+ private def getLitChars(delimiter: Char) = {
+ while (ch != delimiter && !isAtEnd && (ch != SU && ch != CR && ch != LF || isUnicodeEscape))
+ getLitChar()
+ }
+
/** read fractional part and exponent of floating point number
* if one is present.
*/
@@ -739,12 +928,25 @@ trait Scanners extends ScannersCommon {
/** Convert current strVal, base to double value
*/
def floatVal(negated: Boolean): Double = {
+
val limit: Double =
if (token == DOUBLELIT) Double.MaxValue else Float.MaxValue
try {
val value: Double = java.lang.Double.valueOf(strVal).doubleValue()
+ def isDeprecatedForm = {
+ val idx = strVal indexOf '.'
+ (idx == strVal.length - 1) || (
+ (idx >= 0)
+ && (idx + 1 < strVal.length)
+ && (!Character.isDigit(strVal charAt (idx + 1)))
+ )
+ }
if (value > limit)
syntaxError("floating point number too large")
+ if (isDeprecatedForm) {
+ deprecationWarning("This lexical syntax is deprecated. From scala 2.11, a dot will only be considered part of a number if it is immediately followed by a digit.")
+ }
+
if (negated) -value else value
} catch {
case _: NumberFormatException =>
@@ -763,9 +965,8 @@ trait Scanners extends ScannersCommon {
/** Read a number into strVal and set base
*/
protected def getNumber() {
- def isDigit(c: Char) = java.lang.Character isDigit c
val base1 = if (base < 10) 10 else base
- // read 8,9's even if format is octal, produce a malformed number error afterwards.
+ // read 8,9's even if format is octal, produce a malformed number error afterwards.
while (digit2int(ch, base1) >= 0) {
putChar(ch)
nextChar()
@@ -798,29 +999,36 @@ trait Scanners extends ScannersCommon {
restOfUncertainToken()
else {
val lookahead = lookaheadReader
- val isDefinitelyNumber =
- (lookahead.getc(): @switch) match {
- /** Another digit is a giveaway. */
- case '0' | '1' | '2' | '3' | '4' | '5' | '6' | '7' | '8' | '9' =>
- true
-
- /** Backquoted idents like 22.`foo`. */
- case '`' =>
- return setStrVal() /** Note the early return */
-
- /** These letters may be part of a literal, or a method invocation on an Int */
- case 'd' | 'D' | 'f' | 'F' =>
- !isIdentifierPart(lookahead.getc())
-
- /** A little more special handling for e.g. 5e7 */
- case 'e' | 'E' =>
- val ch = lookahead.getc()
- !isIdentifierPart(ch) || (isDigit(ch) || ch == '+' || ch == '-')
-
- case x =>
- !isIdentifierStart(x)
- }
-
+ val c = lookahead.getc()
+
+ /** As of scala 2.11, it isn't a number unless c here is a digit, so
+ * opt.future excludes the rest of the logic.
+ */
+ if (opt.future && !isDigit(c))
+ return setStrVal()
+
+ val isDefinitelyNumber = (c: @switch) match {
+ /** Another digit is a giveaway. */
+ case '0' | '1' | '2' | '3' | '4' | '5' | '6' | '7' | '8' | '9' =>
+ true
+
+ /** Backquoted idents like 22.`foo`. */
+ case '`' =>
+ return setStrVal() /** Note the early return */
+
+ /** These letters may be part of a literal, or a method invocation on an Int.
+ */
+ case 'd' | 'D' | 'f' | 'F' =>
+ !isIdentifierPart(lookahead.getc())
+
+ /** A little more special handling for e.g. 5e7 */
+ case 'e' | 'E' =>
+ val ch = lookahead.getc()
+ !isIdentifierPart(ch) || (isDigit(ch) || ch == '+' || ch == '-')
+
+ case x =>
+ !isIdentifierStart(x)
+ }
if (isDefinitelyNumber) restOfNumber()
else restOfUncertainToken()
}
@@ -857,6 +1065,8 @@ trait Scanners extends ScannersCommon {
*/
def syntaxError(msg: String): Unit = syntaxError(offset, msg)
+ def deprecationWarning(msg: String): Unit = deprecationWarning(offset, msg)
+
/** signal an error where the input ended in the middle of a token */
def incompleteInputError(msg: String) {
incompleteInputError(offset, msg)
@@ -879,6 +1089,10 @@ trait Scanners extends ScannersCommon {
"double(" + floatVal + ")"
case STRINGLIT =>
"string(" + strVal + ")"
+ case STRINGPART =>
+ "stringpart(" + strVal + ")"
+ case INTERPOLATIONID =>
+ "interpolationid(" + name + ")"
case SEMI =>
";"
case NEWLINE =>
@@ -965,8 +1179,9 @@ trait Scanners extends ScannersCommon {
nme.VIEWBOUNDkw -> VIEWBOUND,
nme.SUPERTYPEkw -> SUPERTYPE,
nme.HASHkw -> HASH,
- nme.ATkw -> AT
- )
+ nme.ATkw -> AT,
+ nme.MACROkw -> IDENTIFIER,
+ nme.THENkw -> IDENTIFIER)
private var kwOffset: Int = -1
private val kwArray: Array[Int] = {
@@ -975,14 +1190,7 @@ trait Scanners extends ScannersCommon {
arr
}
- final val token2name = allKeywords map (_.swap) toMap
-
- /** Convert name to token */
- final def name2token(name: Name) = {
- val idx = name.start - kwOffset
- if (idx >= 0 && idx < kwArray.length) kwArray(idx)
- else IDENTIFIER
- }
+ final val token2name = (allKeywords map (_.swap)).toMap
// Token representation ----------------------------------------------------
@@ -994,7 +1202,7 @@ trait Scanners extends ScannersCommon {
case LONGLIT => "long literal"
case FLOATLIT => "float literal"
case DOUBLELIT => "double literal"
- case STRINGLIT => "string literal"
+ case STRINGLIT | STRINGPART | INTERPOLATIONID => "string literal"
case SYMBOLLIT => "symbol literal"
case LPAREN => "'('"
case RPAREN => "')'"
@@ -1028,7 +1236,8 @@ trait Scanners extends ScannersCommon {
override val decodeUni: Boolean = !settings.nouescape.value
// suppress warnings, throw exception on errors
- def warning(off: Offset, msg: String): Unit = {}
+ def warning(off: Offset, msg: String): Unit = ()
+ def deprecationWarning(off: Offset, msg: String): Unit = ()
def error (off: Offset, msg: String): Unit = throw new MalformedInput(off, msg)
def incompleteInputError(off: Offset, msg: String): Unit = throw new MalformedInput(off, msg)
}
@@ -1038,8 +1247,9 @@ trait Scanners extends ScannersCommon {
class UnitScanner(unit: CompilationUnit, patches: List[BracePatch]) extends SourceFileScanner(unit.source) {
def this(unit: CompilationUnit) = this(unit, List())
- override def warning(off: Offset, msg: String) = unit.warning(unit.position(off), msg)
- override def error (off: Offset, msg: String) = unit.error(unit.position(off), msg)
+ override def warning(off: Offset, msg: String) = unit.warning(unit.position(off), msg)
+ override def deprecationWarning(off: Offset, msg: String) = unit.deprecationWarning(unit.position(off), msg)
+ override def error (off: Offset, msg: String) = unit.error(unit.position(off), msg)
override def incompleteInputError(off: Offset, msg: String) = unit.incompleteInputError(unit.position(off), msg)
private var bracePatches: List[BracePatch] = patches
@@ -1086,18 +1296,18 @@ trait Scanners extends ScannersCommon {
override def foundComment(value: String, start: Int, end: Int) {
val pos = new RangePosition(unit.source, start, start, end)
- unit.comments += unit.Comment(value, pos)
- unit.comment(pos, value)
+ unit.comment(pos, value)
}
override def foundDocComment(value: String, start: Int, end: Int) {
- docPos = new RangePosition(unit.source, start, start, end)
+ val docPos = new RangePosition(unit.source, start, start, end)
+ docComment = new DocComment(value, docPos)
unit.comment(docPos, value)
}
}
class ParensAnalyzer(unit: CompilationUnit, patches: List[BracePatch]) extends UnitScanner(unit, patches) {
- var balance = collection.mutable.Map(RPAREN -> 0, RBRACKET -> 0, RBRACE -> 0)
+ var balance = scala.collection.mutable.Map(RPAREN -> 0, RBRACKET -> 0, RBRACE -> 0)
init()
@@ -1112,7 +1322,7 @@ trait Scanners extends ScannersCommon {
var lineCount = 1
var lastOffset = 0
var indent = 0
- val oldBalance = collection.mutable.Map[Int, Int]()
+ val oldBalance = scala.collection.mutable.Map[Int, Int]()
def markBalance() = for ((k, v) <- balance) oldBalance(k) = v
markBalance()
@@ -1283,6 +1493,10 @@ trait Scanners extends ScannersCommon {
def improves(patches1: List[BracePatch]): Boolean =
imbalanceMeasure > new ParensAnalyzer(unit, patches1).imbalanceMeasure
+ // don't emit deprecation warnings about identifiers like `macro` or `then`
+ // when skimming through the source file trying to heal braces
+ override def emitIdentifierDeprecationWarnings = false
+
override def error(offset: Int, msg: String) {}
}
}
diff --git a/src/compiler/scala/tools/nsc/ast/parser/SymbolicXMLBuilder.scala b/src/compiler/scala/tools/nsc/ast/parser/SymbolicXMLBuilder.scala
old mode 100644
new mode 100755
index 528705b..e8ef670
--- a/src/compiler/scala/tools/nsc/ast/parser/SymbolicXMLBuilder.scala
+++ b/src/compiler/scala/tools/nsc/ast/parser/SymbolicXMLBuilder.scala
@@ -1,5 +1,5 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
+ * Copyright 2005-2013 LAMP/EPFL
* @author Burak Emir
*/
@@ -7,17 +7,19 @@ package scala.tools.nsc
package ast.parser
import scala.collection.{ mutable, immutable }
-import xml.{ EntityRef, Text }
-import xml.XML.{ xmlns }
+import scala.xml.{ EntityRef, Text }
+import scala.xml.XML.{ xmlns }
import symtab.Flags.MUTABLE
-import scala.tools.util.StringOps.splitWhere
+import scala.reflect.internal.util.StringOps.splitWhere
+import scala.language.implicitConversions
-/** This class builds instance of <code>Tree</code> that represent XML.
+/** This class builds instance of `Tree` that represent XML.
*
- * Note from martin: This needs to have its position info reworked. I don't understand exactly
- * what's done here. To make validation pass, I set many positions to be transparent. Not sure this
- * is a good idea for navigating XML trees in the IDE< but it's the best I can do right now. If someone
- * who understands this part better wants to give it a shot, please do!
+ * Note from martin: This needs to have its position info reworked. I don't
+ * understand exactly what's done here. To make validation pass, I set many
+ * positions to be transparent. Not sure this is a good idea for navigating
+ * XML trees in the IDE but it's the best I can do right now. If someone
+ * who understands this part better wants to give it a shot, please do!
*
* @author Burak Emir
* @version 1.0
@@ -26,9 +28,9 @@ abstract class SymbolicXMLBuilder(p: Parsers#Parser, preserveWS: Boolean) {
val global: Global
import global._
- var isPattern: Boolean = _
+ private[parser] var isPattern: Boolean = _
- trait XMLTypeNames extends LibraryTypeNames {
+ private object xmltypes extends TypeNames {
val _Comment: NameType = "Comment"
val _Elem: NameType = "Elem"
val _EntityRef: NameType = "EntityRef"
@@ -43,7 +45,7 @@ abstract class SymbolicXMLBuilder(p: Parsers#Parser, preserveWS: Boolean) {
val _UnprefixedAttribute: NameType = "UnprefixedAttribute"
}
- trait XMLTermNames extends LibraryTermNames {
+ private object xmlterms extends TermNames {
val _Null: NameType = "Null"
val __Elem: NameType = "Elem"
val __Text: NameType = "Text"
@@ -55,16 +57,10 @@ abstract class SymbolicXMLBuilder(p: Parsers#Parser, preserveWS: Boolean) {
val _xml: NameType = "xml"
}
- private object xmltypes extends XMLTypeNames {
- type NameType = TypeName
- implicit def createNameType(name: String): TypeName = newTypeName(name)
- }
- private object xmlterms extends XMLTermNames {
- type NameType = TermName
- implicit def createNameType(name: String): TermName = newTermName(name)
- }
- import xmltypes._
- import xmlterms._
+ import xmltypes.{_Comment, _Elem, _EntityRef, _Group, _MetaData, _NamespaceBinding, _NodeBuffer,
+ _PrefixedAttribute, _ProcInstr, _Text, _Unparsed, _UnprefixedAttribute}
+
+ import xmlterms.{_Null, __Elem, __Text, _buf, _md, _plus, _scope, _tmpscope, _xml}
// convenience methods
private def LL[A](x: A*): List[List[A]] = List(List(x:_*))
@@ -97,7 +93,8 @@ abstract class SymbolicXMLBuilder(p: Parsers#Parser, preserveWS: Boolean) {
pre: Tree,
label: Tree,
attrs: Tree,
- scope:Tree,
+ scope: Tree,
+ empty: Boolean,
children: Seq[Tree]): Tree =
{
def starArgs =
@@ -105,7 +102,7 @@ abstract class SymbolicXMLBuilder(p: Parsers#Parser, preserveWS: Boolean) {
else List(Typed(makeXMLseq(pos, children), wildStar))
def pat = Apply(_scala_xml__Elem, List(pre, label, wild, wild) ::: convertToTextPat(children))
- def nonpat = New(_scala_xml_Elem, List(List(pre, label, attrs, scope) ::: starArgs))
+ def nonpat = New(_scala_xml_Elem, List(List(pre, label, attrs, scope, if (empty) Literal(Constant(true)) else Literal(Constant(false))) ::: starArgs))
atPos(pos) { if (isPattern) pat else nonpat }
}
@@ -136,7 +133,7 @@ abstract class SymbolicXMLBuilder(p: Parsers#Parser, preserveWS: Boolean) {
case (Some(pre), rest) => (const(pre), const(rest))
case _ => (wild, const(n))
}
- mkXML(pos, true, prepat, labpat, null, null, args)
+ mkXML(pos, true, prepat, labpat, null, null, false, args)
}
protected def convertToTextPat(t: Tree): Tree = t match {
@@ -147,7 +144,7 @@ abstract class SymbolicXMLBuilder(p: Parsers#Parser, preserveWS: Boolean) {
(buf map convertToTextPat).toList
def parseAttribute(pos: Position, s: String): Tree = {
- val ts = xml.Utility.parseAttributeValue(s) map {
+ val ts = scala.xml.Utility.parseAttributeValue(s) map {
case Text(s) => text(pos, s)
case EntityRef(s) => entityRef(pos, s)
}
@@ -165,7 +162,7 @@ abstract class SymbolicXMLBuilder(p: Parsers#Parser, preserveWS: Boolean) {
/** could optimize if args.length == 0, args.length == 1 AND args(0) is <: Node. */
def makeXMLseq(pos: Position, args: Seq[Tree]) = {
- val buffer = ValDef(NoMods, _buf, TypeTree(), New(_scala_xml_NodeBuffer, List(Nil)))
+ val buffer = ValDef(NoMods, _buf, TypeTree(), New(_scala_xml_NodeBuffer, ListOfNil))
val applies = args filterNot isEmptyText map (t => Apply(Select(Ident(_buf), _plus), List(t)))
atPos(pos)( Block(buffer :: applies.toList, Ident(_buf)) )
@@ -184,7 +181,7 @@ abstract class SymbolicXMLBuilder(p: Parsers#Parser, preserveWS: Boolean) {
def unparsed(pos: Position, str: String): Tree =
atPos(pos)( New(_scala_xml_Unparsed, LL(const(str))) )
- def element(pos: Position, qname: String, attrMap: mutable.Map[String, Tree], args: Seq[Tree]): Tree = {
+ def element(pos: Position, qname: String, attrMap: mutable.Map[String, Tree], empty: Boolean, args: Seq[Tree]): Tree = {
def handleNamespaceBinding(pre: String, z: String): Tree = {
def mkAssign(t: Tree): Tree = Assign(
Ident(_tmpscope),
@@ -255,6 +252,7 @@ abstract class SymbolicXMLBuilder(p: Parsers#Parser, preserveWS: Boolean) {
const(newlabel),
makeSymbolicAttrs,
Ident(_scope),
+ empty,
args
)
diff --git a/src/compiler/scala/tools/nsc/ast/parser/SyntaxAnalyzer.scala b/src/compiler/scala/tools/nsc/ast/parser/SyntaxAnalyzer.scala
index b5ec0ce..8a9ce89 100644
--- a/src/compiler/scala/tools/nsc/ast/parser/SyntaxAnalyzer.scala
+++ b/src/compiler/scala/tools/nsc/ast/parser/SyntaxAnalyzer.scala
@@ -1,5 +1,5 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
+ * Copyright 2005-2013 LAMP/EPFL
* @author Martin Odersky
*/
@@ -24,7 +24,7 @@ abstract class SyntaxAnalyzer extends SubComponent with Parsers with MarkupParse
import global._
informProgress("parsing " + unit)
unit.body =
- if (unit.source.file.name.endsWith(".java")) new JavaUnitParser(unit).parse()
+ if (unit.isJava) new JavaUnitParser(unit).parse()
else if (reporter.incompleteHandled) new UnitParser(unit).parse()
else new UnitParser(unit).smartParse()
diff --git a/src/compiler/scala/tools/nsc/ast/parser/Tokens.scala b/src/compiler/scala/tools/nsc/ast/parser/Tokens.scala
index e530b72..c3fd414 100644
--- a/src/compiler/scala/tools/nsc/ast/parser/Tokens.scala
+++ b/src/compiler/scala/tools/nsc/ast/parser/Tokens.scala
@@ -1,19 +1,19 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
+ * Copyright 2005-2013 LAMP/EPFL
* @author Martin Odersky
*/
package scala.tools.nsc
package ast.parser
-import annotation.switch
+import scala.annotation.switch
/** Common code between JavaTokens and Tokens. Not as much (and not as concrete)
* as one might like because JavaTokens for no clear reason chose new numbers for
* identical token sets.
*/
abstract class Tokens {
- import util.Chars._
+ import scala.reflect.internal.Chars._
/** special tokens */
final val EMPTY = -3
@@ -37,27 +37,31 @@ abstract class Tokens {
def isKeyword(code: Int): Boolean
def isSymbol(code: Int): Boolean
- final def isSpace(at: Char) = at == ' ' || at == '\t'
- final def isNewLine(at: Char) = at == CR || at == LF || at == FF
- final def isBrace(code : Int) = code >= LPAREN && code <= RBRACE
- final def isOpenBrace(code : Int) = isBrace(code) && (code % 2 == 0)
- final def isCloseBrace(code : Int) = isBrace(code) && (code % 2 == 1)
+ final def isSpace(at: Char) = at == ' ' || at == '\t'
+ final def isNewLine(at: Char) = at == CR || at == LF || at == FF
+ final def isBrace(code: Int) = code >= LPAREN && code <= RBRACE
+ final def isOpenBrace(code: Int) = isBrace(code) && (code % 2 == 0)
+ final def isCloseBrace(code: Int) = isBrace(code) && (code % 2 == 1)
}
object Tokens extends Tokens {
- final val SYMBOLLIT = 7
- def isLiteral(code : Int) =
- code >= CHARLIT && code <= SYMBOLLIT
+ final val STRINGPART = 7 // a part of an interpolated string
+ final val SYMBOLLIT = 8
+ final val INTERPOLATIONID = 9 // the lead identifier of an interpolated string
+
+ def isLiteral(code: Int) =
+ code >= CHARLIT && code <= INTERPOLATIONID
+
/** identifiers */
final val IDENTIFIER = 10
final val BACKQUOTED_IDENT = 11
- def isIdentifier(code : Int) =
+ def isIdentifier(code: Int) =
code >= IDENTIFIER && code <= BACKQUOTED_IDENT
- @switch def canBeginExpression(code : Int) = code match {
+ @switch def canBeginExpression(code: Int) = code match {
case IDENTIFIER|BACKQUOTED_IDENT|USCORE => true
- case LBRACE|LPAREN|LBRACKET|COMMENT|STRINGLIT => true
+ case LBRACE|LPAREN|LBRACKET|COMMENT => true
case IF|DO|WHILE|FOR|NEW|TRY|THROW => true
case NULL|THIS|TRUE|FALSE => true
case code => isLiteral(code)
@@ -106,11 +110,13 @@ object Tokens extends Tokens {
final val MATCH = 58
final val FORSOME = 59
final val LAZY = 61
+ final val MACRO = 62 // not yet used in 2.10
+ final val THEN = 63 // not yet used in 2.10
- def isKeyword(code : Int) =
+ def isKeyword(code: Int) =
code >= IF && code <= LAZY
- @switch def isDefinition(code : Int) = code match {
+ @switch def isDefinition(code: Int) = code match {
case CLASS|TRAIT|OBJECT => true
case CASECLASS|CASEOBJECT => true
case DEF|VAL|VAR => true
@@ -135,7 +141,7 @@ object Tokens extends Tokens {
final val AT = 83
final val VIEWBOUND = 84
- def isSymbol(code : Int) =
+ def isSymbol(code: Int) =
code >= COMMA && code <= VIEWBOUND
/** parenthesis */
diff --git a/src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala b/src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala
index 810d3e7..1412bff 100644
--- a/src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala
+++ b/src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala
@@ -1,5 +1,5 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
+ * Copyright 2005-2013 LAMP/EPFL
* @author Martin Odersky
*/
@@ -29,11 +29,13 @@ abstract class TreeBuilder {
def rootId(name: Name) = gen.rootId(name)
def rootScalaDot(name: Name) = gen.rootScalaDot(name)
def scalaDot(name: Name) = gen.scalaDot(name)
- def scalaAnyRefConstr = gen.scalaAnyRefConstr
- def scalaUnitConstr = gen.scalaUnitConstr
- def scalaScalaObjectConstr = gen.scalaScalaObjectConstr
- def productConstr = gen.productConstr
- def serializableConstr = gen.serializableConstr
+ def scalaAnyRefConstr = scalaDot(tpnme.AnyRef)
+ def scalaAnyValConstr = scalaDot(tpnme.AnyVal)
+ def scalaAnyConstr = scalaDot(tpnme.Any)
+ def scalaUnitConstr = scalaDot(tpnme.Unit)
+ def productConstr = scalaDot(tpnme.Product)
+ def productConstrN(n: Int) = scalaDot(newTypeName("Product" + n))
+ def serializableConstr = scalaDot(tpnme.Serializable)
def convertToTypeName(t: Tree) = gen.convertToTypeName(t)
@@ -119,11 +121,6 @@ abstract class TreeBuilder {
private def getVariables(tree: Tree): List[(Name, Tree, Position)] =
new GetVarTraverser apply tree
- private def makeTuple(trees: List[Tree], isType: Boolean): Tree = {
- val tupString = "Tuple" + trees.length
- Apply(scalaDot(if (isType) newTypeName(tupString) else newTermName(tupString)), trees)
- }
-
def byNameApplication(tpe: Tree): Tree =
AppliedTypeTree(rootScalaDot(tpnme.BYNAME_PARAM_CLASS_NAME), List(tpe))
def repeatedApplication(tpe: Tree): Tree =
@@ -132,8 +129,13 @@ abstract class TreeBuilder {
def makeImportSelector(name: Name, nameOffset: Int): ImportSelector =
ImportSelector(name, nameOffset, name, nameOffset)
+ private def makeTuple(trees: List[Tree], isType: Boolean): Tree = {
+ val tupString = "Tuple" + trees.length
+ Apply(scalaDot(if (isType) newTypeName(tupString) else newTermName(tupString)), trees)
+ }
+
def makeTupleTerm(trees: List[Tree], flattenUnary: Boolean): Tree = trees match {
- case Nil => Literal(())
+ case Nil => Literal(Constant())
case List(tree) if flattenUnary => tree
case _ => makeTuple(trees, false)
}
@@ -197,6 +199,15 @@ abstract class TreeBuilder {
}
}
+ /** Creates a tree representing new Object { stats }.
+ * To make sure an anonymous subclass of Object is created,
+ * if there are no stats, a () is added.
+ */
+ def makeAnonymousNew(stats: List[Tree]): Tree = {
+ val stats1 = if (stats.isEmpty) List(Literal(Constant(()))) else stats
+ makeNew(Nil, emptyValDef, stats1, ListOfNil, NoPosition, NoPosition)
+ }
+
/** Create positioned tree representing an object creation <new parents { stats }
* @param npos the position of the new
* @param cpos the position of the anonymous class starting with parents
@@ -215,12 +226,12 @@ abstract class TreeBuilder {
atPos(cpos) {
ClassDef(
Modifiers(FINAL), x, Nil,
- Template(parents, self, NoMods, List(Nil), argss, stats, cpos.focus))
+ Template(parents, self, NoMods, ListOfNil, argss, stats, cpos.focus))
}),
atPos(npos) {
New(
Ident(x) setPos npos.focus,
- List(Nil))
+ ListOfNil)
}
)
}
@@ -234,55 +245,62 @@ abstract class TreeBuilder {
Assign(lhs, rhs)
}
+ /** Tree for `od op`, start is start0 if od.pos is borked. */
+ def makePostfixSelect(start0: Int, end: Int, od: Tree, op: Name): Tree = {
+ val start = if (od.pos.isDefined) od.pos.startOrPoint else start0
+ atPos(r2p(start, end, end + op.length)) { new PostfixSelect(od, op.encode) }
+ }
+
/** A type tree corresponding to (possibly unary) intersection type */
def makeIntersectionTypeTree(tps: List[Tree]): Tree =
if (tps.tail.isEmpty) tps.head
else CompoundTypeTree(Template(tps, emptyValDef, Nil))
/** Create tree representing a while loop */
- def makeWhile(lname: TermName, cond: Tree, body: Tree): Tree = {
- val continu = atPos(o2p(body.pos.endOrPoint)) { Apply(Ident(lname), Nil) }
- val rhs = If(cond, Block(List(body), continu), Literal(()))
+ def makeWhile(startPos: Int, cond: Tree, body: Tree): Tree = {
+ val lname = freshTermName(nme.WHILE_PREFIX)
+ def default = wrappingPos(List(cond, body)) match {
+ case p if p.isDefined => p.endOrPoint
+ case _ => startPos
+ }
+ val continu = atPos(o2p(body.pos pointOrElse default)) { Apply(Ident(lname), Nil) }
+ val rhs = If(cond, Block(List(body), continu), Literal(Constant()))
LabelDef(lname, Nil, rhs)
}
/** Create tree representing a do-while loop */
def makeDoWhile(lname: TermName, body: Tree, cond: Tree): Tree = {
val continu = Apply(Ident(lname), Nil)
- val rhs = Block(List(body), If(cond, continu, Literal(())))
+ val rhs = Block(List(body), If(cond, continu, Literal(Constant())))
LabelDef(lname, Nil, rhs)
}
- /** Create block of statements `stats' */
+ /** Create block of statements `stats` */
def makeBlock(stats: List[Tree]): Tree =
- if (stats.isEmpty) Literal(())
- else if (!stats.last.isTerm) Block(stats, Literal(()))
+ if (stats.isEmpty) Literal(Constant())
+ else if (!stats.last.isTerm) Block(stats, Literal(Constant()))
else if (stats.length == 1) stats.head
else Block(stats.init, stats.last)
+ def makeFilter(tree: Tree, condition: Tree, scrutineeName: String): Tree = {
+ val cases = List(
+ CaseDef(condition, EmptyTree, Literal(Constant(true))),
+ CaseDef(Ident(nme.WILDCARD), EmptyTree, Literal(Constant(false)))
+ )
+ val matchTree = makeVisitor(cases, false, scrutineeName)
+
+ atPos(tree.pos)(Apply(Select(tree, nme.withFilter), matchTree :: Nil))
+ }
+
/** Create tree for for-comprehension generator <val pat0 <- rhs0> */
def makeGenerator(pos: Position, pat: Tree, valeq: Boolean, rhs: Tree): Enumerator = {
val pat1 = patvarTransformer.transform(pat)
val rhs1 =
- if (valeq) rhs
- else matchVarPattern(pat1) match {
- case Some(_) =>
- rhs
- case None =>
- atPos(rhs.pos) {
- Apply(
- Select(rhs, nme.filter),
- List(
- makeVisitor(
- List(
- CaseDef(pat1.duplicate, EmptyTree, Literal(true)),
- CaseDef(Ident(nme.WILDCARD), EmptyTree, Literal(false))),
- false,
- nme.CHECK_IF_REFUTABLE_STRING
- )))
- }
- }
- if (valeq) ValEq(pos, pat1, rhs1) else ValFrom(pos, pat1, rhs1)
+ if (valeq || treeInfo.isVarPatternDeep(pat)) rhs
+ else makeFilter(rhs, pat1.duplicate, nme.CHECK_IF_REFUTABLE_STRING)
+
+ if (valeq) ValEq(pos, pat1, rhs1)
+ else ValFrom(pos, pat1, rhs1)
}
def makeParam(pname: TermName, tpe: Tree) =
@@ -309,7 +327,7 @@ abstract class TreeBuilder {
* for (P <- G) E ==> G.foreach (P => E)
*
* Here and in the following (P => E) is interpreted as the function (P => E)
- * if P is a a variable pattern and as the partial function { case P => E } otherwise.
+ * if P is a variable pattern and as the partial function { case P => E } otherwise.
*
* 2.
*
@@ -317,7 +335,7 @@ abstract class TreeBuilder {
*
* 3.
*
- * for (P_1 <- G_1; val P_2 <- G_2; ...) ...
+ * for (P_1 <- G_1; P_2 <- G_2; ...) ...
* ==>
* G_1.flatMap (P_1 => for (P_2 <- G_2; ...) ...)
*
@@ -329,7 +347,7 @@ abstract class TreeBuilder {
*
* 5. For N < MaxTupleArity:
*
- * for (P_1 <- G; val P_2 = E_2; val P_N = E_N; ...)
+ * for (P_1 <- G; P_2 = E_2; val P_N = E_N; ...)
* ==>
* for (TupleN(P_1, P_2, ... P_N) <-
* for (x_1 @ P_1 <- G) yield {
@@ -469,15 +487,11 @@ abstract class TreeBuilder {
def makeVisitor(cases: List[CaseDef], checkExhaustive: Boolean): Tree =
makeVisitor(cases, checkExhaustive, "x$")
- private def makeUnchecked(expr: Tree): Tree = atPos(expr.pos) {
- Annotated(New(scalaDot(definitions.UncheckedClass.name), List(Nil)), expr)
- }
-
/** Create visitor <x => x match cases> */
def makeVisitor(cases: List[CaseDef], checkExhaustive: Boolean, prefix: String): Tree = {
- val x = freshTermName(prefix)
- val id = Ident(x)
- val sel = if (checkExhaustive) id else makeUnchecked(id)
+ val x = freshTermName(prefix)
+ val id = Ident(x)
+ val sel = if (checkExhaustive) id else gen.mkUnchecked(id)
Function(List(makeSyntheticParam(x)), Match(sel, cases))
}
@@ -511,37 +525,6 @@ abstract class TreeBuilder {
def makePatDef(pat: Tree, rhs: Tree): List[Tree] =
makePatDef(Modifiers(0), pat, rhs)
- /** For debugging only. Desugar a match statement like so:
- * val x = scrutinee
- * x match {
- * case case1 => ...
- * case _ => x match {
- * case case2 => ...
- * case _ => x match ...
- * }
- * }
- *
- * This way there are never transitions between nontrivial casedefs.
- * Of course many things break: exhaustiveness and unreachable checking
- * do not work, no switches will be generated, etc.
- */
- def makeSequencedMatch(selector: Tree, cases: List[CaseDef]): Tree = {
- require(cases.nonEmpty)
-
- val selectorName = freshTermName()
- val valdef = atPos(selector.pos)(ValDef(Modifiers(PRIVATE | LOCAL | SYNTHETIC), selectorName, TypeTree(), selector))
- val nselector = Ident(selectorName)
-
- def loop(cds: List[CaseDef]): Match = {
- def mkNext = CaseDef(Ident(nme.WILDCARD), EmptyTree, loop(cds.tail))
-
- if (cds.size == 1) Match(nselector, cds)
- else Match(selector, List(cds.head, mkNext))
- }
-
- Block(List(valdef), loop(cases))
- }
-
/** Create tree for pattern definition <mods val pat0 = rhs> */
def makePatDef(mods: Modifiers, pat: Tree, rhs: Tree): List[Tree] = matchVarPattern(pat) match {
case Some((name, tpt)) =>
@@ -558,14 +541,32 @@ abstract class TreeBuilder {
// val/var x_1 = t$._1
// ...
// val/var x_N = t$._N
- val pat1 = patvarTransformer.transform(pat)
+
+ val rhsUnchecked = gen.mkUnchecked(rhs)
+
+ // TODO: clean this up -- there is too much information packked into makePatDef's `pat` argument
+ // when it's a simple identifier (case Some((name, tpt)) -- above),
+ // pat should have the type ascription that was specified by the user
+ // however, in `case None` (here), we must be careful not to generate illegal pattern trees (such as `(a, b): Tuple2[Int, String]`)
+ // i.e., this must hold: pat1 match { case Typed(expr, tp) => assert(expr.isInstanceOf[Ident]) case _ => }
+ // if we encounter such an erroneous pattern, we strip off the type ascription from pat and propagate the type information to rhs
+ val (pat1, rhs1) = patvarTransformer.transform(pat) match {
+ // move the Typed ascription to the rhs
+ case Typed(expr, tpt) if !expr.isInstanceOf[Ident] =>
+ val rhsTypedUnchecked =
+ if (tpt.isEmpty) rhsUnchecked
+ else Typed(rhsUnchecked, tpt) setPos (rhs.pos union tpt.pos)
+ (expr, rhsTypedUnchecked)
+ case ok =>
+ (ok, rhsUnchecked)
+ }
val vars = getVariables(pat1)
val matchExpr = atPos((pat1.pos union rhs.pos).makeTransparent) {
Match(
- makeUnchecked(rhs),
+ rhs1,
List(
atPos(pat1.pos) {
- CaseDef(pat1, EmptyTree, makeTupleTerm(vars map (_._1) map Ident, true))
+ CaseDef(pat1, EmptyTree, makeTupleTerm(vars map (_._1) map Ident.apply, true))
}
))
}
@@ -578,7 +579,7 @@ abstract class TreeBuilder {
val tmp = freshTermName()
val firstDef =
atPos(matchExpr.pos) {
- ValDef(Modifiers(PRIVATE | LOCAL | SYNTHETIC | (mods.flags & LAZY)),
+ ValDef(Modifiers(PrivateLocal | SYNTHETIC | (mods.flags & LAZY)),
tmp, TypeTree(), matchExpr)
}
var cnt = 0
@@ -594,13 +595,19 @@ abstract class TreeBuilder {
def makeFunctionTypeTree(argtpes: List[Tree], restpe: Tree): Tree =
AppliedTypeTree(rootScalaDot(newTypeName("Function" + argtpes.length)), argtpes ::: List(restpe))
- /** Append implicit parameter section if `contextBounds' nonempty */
- def addEvidenceParams(owner: Name, vparamss: List[List[ValDef]], contextBounds: List[Tree]): List[List[ValDef]] =
+ /** Append implicit parameter section if `contextBounds` nonempty */
+ def addEvidenceParams(owner: Name, vparamss: List[List[ValDef]], contextBounds: List[Tree]): List[List[ValDef]] = {
if (contextBounds.isEmpty) vparamss
else {
val mods = Modifiers(if (owner.isTypeName) PARAMACCESSOR | LOCAL | PRIVATE else PARAM)
- def makeEvidenceParam(tpt: Tree) = ValDef(mods | IMPLICIT, freshTermName(nme.EVIDENCE_PARAM_PREFIX), tpt, EmptyTree)
- vparamss ::: List(contextBounds map makeEvidenceParam)
+ def makeEvidenceParam(tpt: Tree) = ValDef(mods | IMPLICIT | SYNTHETIC, freshTermName(nme.EVIDENCE_PARAM_PREFIX), tpt, EmptyTree)
+ val evidenceParams = contextBounds map makeEvidenceParam
+
+ val vparamssLast = if(vparamss.nonEmpty) vparamss.last else Nil
+ if(vparamssLast.nonEmpty && vparamssLast.head.mods.hasFlag(IMPLICIT))
+ vparamss.init ::: List(evidenceParams ::: vparamssLast)
+ else
+ vparamss ::: List(evidenceParams)
+ }
}
-
}
diff --git a/src/compiler/scala/tools/nsc/backend/JavaPlatform.scala b/src/compiler/scala/tools/nsc/backend/JavaPlatform.scala
index 522b1dd..fc5d437 100644
--- a/src/compiler/scala/tools/nsc/backend/JavaPlatform.scala
+++ b/src/compiler/scala/tools/nsc/backend/JavaPlatform.scala
@@ -1,5 +1,5 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
+ * Copyright 2005-2013 LAMP/EPFL
* @author Paul Phillips
*/
@@ -7,31 +7,55 @@ package scala.tools.nsc
package backend
import io.AbstractFile
-import util.JavaClassPath
+import util.{ClassPath,JavaClassPath,MergedClassPath,DeltaClassPath}
import util.ClassPath.{ JavaContext, DefaultJavaContext }
import scala.tools.util.PathResolver
-trait JavaPlatform extends Platform[AbstractFile] {
+trait JavaPlatform extends Platform {
import global._
import definitions._
- lazy val classPath = new PathResolver(settings).result
- def rootLoader = new loaders.JavaPackageLoader(classPath)
+ type BinaryRepr = AbstractFile
+
+ private var currentClassPath: Option[MergedClassPath[BinaryRepr]] = None
+
+ def classPath: ClassPath[BinaryRepr] = {
+ if (currentClassPath.isEmpty) currentClassPath = Some(new PathResolver(settings).result)
+ currentClassPath.get
+ }
+
+ /** Update classpath with a substituted subentry */
+ def updateClassPath(subst: Map[ClassPath[BinaryRepr], ClassPath[BinaryRepr]]) =
+ currentClassPath = Some(new DeltaClassPath(currentClassPath.get, subst))
+
+ def rootLoader = new loaders.PackageLoader(classPath.asInstanceOf[ClassPath[platform.BinaryRepr]])
+ // [Martin] Why do we need a cast here?
+ // The problem is that we cannot specify at this point that global.platform should be of type JavaPlatform.
+ // So we cannot infer that global.platform.BinaryRepr is AbstractFile.
+ // Ideally, we should be able to write at the top of the JavaPlatform trait:
+ // val global: Global { val platform: JavaPlatform }
+ // import global._
+ // Right now, this does nothing because the concrete definition of platform in Global
+ // replaces the tighter abstract definition here. If we had DOT typing rules, the two
+ // types would be conjoined and everything would work out. Yet another reason to push for DOT.
private def depAnalysisPhase =
if (settings.make.isDefault) Nil
else List(dependencyAnalysis)
+ private def classEmitPhase =
+ if (settings.target.value == "jvm-1.5-fjbg") genJVM
+ else genASM
+
def platformPhases = List(
- flatten, // get rid of inner classes
- liftcode, // generate reified trees
- genJVM // generate .class files
+ flatten, // get rid of inner classes
+ classEmitPhase // generate .class files
) ++ depAnalysisPhase
- lazy val externalEquals = getMember(BoxesRunTimeClass, nme.equals_)
- lazy val externalEqualsNumNum = getMember(BoxesRunTimeClass, "equalsNumNum")
- lazy val externalEqualsNumChar = getMember(BoxesRunTimeClass, "equalsNumChar")
- lazy val externalEqualsNumObject = getMember(BoxesRunTimeClass, "equalsNumObject")
+ lazy val externalEquals = getDecl(BoxesRunTimeClass, nme.equals_)
+ lazy val externalEqualsNumNum = getDecl(BoxesRunTimeClass, nme.equalsNumNum)
+ lazy val externalEqualsNumChar = getDecl(BoxesRunTimeClass, nme.equalsNumChar)
+ lazy val externalEqualsNumObject = getDecl(BoxesRunTimeClass, nme.equalsNumObject)
/** We could get away with excluding BoxedBooleanClass for the
* purpose of equality testing since it need not compare equal
@@ -46,4 +70,12 @@ trait JavaPlatform extends Platform[AbstractFile] {
(sym isNonBottomSubClass BoxedCharacterClass) ||
(sym isNonBottomSubClass BoxedBooleanClass)
}
+
+ def newClassLoader(bin: AbstractFile): loaders.SymbolLoader =
+ new loaders.ClassfileLoader(bin)
+
+ def doLoad(cls: ClassPath[BinaryRepr]#ClassRep): Boolean = true
+
+ def needCompile(bin: AbstractFile, src: AbstractFile) =
+ src.lastModified >= bin.lastModified
}
diff --git a/src/compiler/scala/tools/nsc/backend/MSILPlatform.scala b/src/compiler/scala/tools/nsc/backend/MSILPlatform.scala
index 1606e6b..4493685 100644
--- a/src/compiler/scala/tools/nsc/backend/MSILPlatform.scala
+++ b/src/compiler/scala/tools/nsc/backend/MSILPlatform.scala
@@ -1,18 +1,21 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
+ * Copyright 2005-2013 LAMP/EPFL
* @author Paul Phillips
*/
package scala.tools.nsc
package backend
-import ch.epfl.lamp.compiler.msil.{ Type => MSILType }
-import util.MsilClassPath
+import ch.epfl.lamp.compiler.{ msil => msillib }
+import util.{ ClassPath, MsilClassPath }
import msil.GenMSIL
+import io.{ AbstractFile, MsilFile }
-trait MSILPlatform extends Platform[MSILType] {
+trait MSILPlatform extends Platform {
import global._
- import definitions.{ ComparatorClass, BoxedNumberClass, getMember, getClass }
+ import definitions.{ ComparatorClass, BoxedNumberClass, getMember }
+
+ type BinaryRepr = MsilFile
if (settings.verbose.value)
inform("[AssemRefs = " + settings.assemrefs.value + "]")
@@ -25,7 +28,12 @@ trait MSILPlatform extends Platform[MSILType] {
} with GenMSIL
lazy val classPath = MsilClassPath.fromSettings(settings)
- def rootLoader = new loaders.NamespaceLoader(classPath)
+ def rootLoader = new loaders.PackageLoader(classPath.asInstanceOf[ClassPath[platform.BinaryRepr]])
+ // See discussion in JavaPlatForm for why we need a cast here.
+
+ /** Update classpath with a substituted subentry */
+ def updateClassPath(subst: Map[ClassPath[BinaryRepr], ClassPath[BinaryRepr]]) =
+ throw new UnsupportedOperationException("classpath invalidations not supported on MSIL")
def platformPhases = List(
genMSIL // generate .msil files
@@ -33,4 +41,29 @@ trait MSILPlatform extends Platform[MSILType] {
lazy val externalEquals = getMember(ComparatorClass.companionModule, nme.equals_)
def isMaybeBoxed(sym: Symbol) = sym isNonBottomSubClass BoxedNumberClass
+
+ def newClassLoader(bin: MsilFile): loaders.SymbolLoader = new loaders.MsilFileLoader(bin)
+
+ /**
+ * Tells whether a class should be loaded and entered into the package
+ * scope. On .NET, this method returns `false` for all synthetic classes
+ * (anonymous classes, implementation classes, module classes), their
+ * symtab is encoded in the pickle of another class.
+ */
+ def doLoad(cls: ClassPath[BinaryRepr]#ClassRep): Boolean = {
+ if (cls.binary.isDefined) {
+ val typ = cls.binary.get.msilType
+ if (typ.IsDefined(loaders.clrTypes.SCALA_SYMTAB_ATTR, false)) {
+ val attrs = typ.GetCustomAttributes(loaders.clrTypes.SCALA_SYMTAB_ATTR, false)
+ assert(attrs.length == 1, attrs.length)
+ val a = attrs(0).asInstanceOf[msillib.Attribute]
+ // symtab_constr takes a byte array argument (the pickle), i.e. typ has a pickle.
+ // otherwise, symtab_default_constr was used, which marks typ as scala-synthetic.
+ a.getConstructor() == loaders.clrTypes.SYMTAB_CONSTR
+ } else true // always load non-scala types
+ } else true // always load source
+ }
+
+ def needCompile(bin: MsilFile, src: AbstractFile) =
+ false // always use compiled file on .net
}
diff --git a/src/compiler/scala/tools/nsc/backend/Platform.scala b/src/compiler/scala/tools/nsc/backend/Platform.scala
index de27185..e2b22c0 100644
--- a/src/compiler/scala/tools/nsc/backend/Platform.scala
+++ b/src/compiler/scala/tools/nsc/backend/Platform.scala
@@ -1,5 +1,5 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
+ * Copyright 2005-2013 LAMP/EPFL
* @author Paul Phillips
*/
@@ -7,19 +7,26 @@ package scala.tools.nsc
package backend
import util.ClassPath
+import io.AbstractFile
/** The platform dependent pieces of Global.
*/
-trait Platform[T] {
+trait Platform {
val global: Global
import global._
+ /** The binary classfile representation type */
+ type BinaryRepr
+
/** The compiler classpath. */
- def classPath: ClassPath[T]
+ def classPath: ClassPath[BinaryRepr]
/** The root symbol loader. */
def rootLoader: LazyType
+ /** Update classpath with a substitution that maps entries to entries */
+ def updateClassPath(subst: Map[ClassPath[BinaryRepr], ClassPath[BinaryRepr]])
+
/** Any platform-specific phases. */
def platformPhases: List[SubComponent]
@@ -28,5 +35,24 @@ trait Platform[T] {
/** The various ways a boxed primitive might materialize at runtime. */
def isMaybeBoxed(sym: Symbol): Boolean
+
+ /** Create a new class loader to load class file `bin` */
+ def newClassLoader(bin: BinaryRepr): loaders.SymbolLoader
+
+ /**
+ * Tells whether a class should be loaded and entered into the package
+ * scope. On .NET, this method returns `false` for all synthetic classes
+ * (anonymous classes, implementation classes, module classes), their
+ * symtab is encoded in the pickle of another class.
+ */
+ def doLoad(cls: ClassPath[BinaryRepr]#ClassRep): Boolean
+
+ /**
+ * Tells whether a class with both a binary and a source representation
+ * (found in classpath and in sourcepath) should be re-compiled. Behaves
+ * on the JVM similar to javac, i.e. if the source file is newer than the classfile,
+ * a re-compile is triggered. On .NET by contrast classfiles always take precedence.
+ */
+ def needCompile(bin: BinaryRepr, src: AbstractFile): Boolean
}
diff --git a/src/compiler/scala/tools/nsc/backend/ScalaPrimitives.scala b/src/compiler/scala/tools/nsc/backend/ScalaPrimitives.scala
index 0948e07..8cbb5bc 100644
--- a/src/compiler/scala/tools/nsc/backend/ScalaPrimitives.scala
+++ b/src/compiler/scala/tools/nsc/backend/ScalaPrimitives.scala
@@ -1,40 +1,30 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
+ * Copyright 2005-2013 LAMP/EPFL
* @author Martin Odersky
*/
-
package scala.tools.nsc
package backend
import scala.tools.nsc.backend.icode._
import scala.collection.{ mutable, immutable }
-/**
- * Scala primitive operations are represented as methods in Any and
- * AnyVal subclasses. Here we demultiplex them by providing a mapping
- * from their symbols to integers. Different methods exist for
- * different value types, but with the same meaning (like plus, minus,
- * etc.). They will all be mapped to the same int.
+/** Scala primitive operations are represented as methods in `Any` and
+ * `AnyVal` subclasses. Here we demultiplex them by providing a mapping
+ * from their symbols to integers. Different methods exist for
+ * different value types, but with the same meaning (like plus, minus,
+ * etc.). They will all be mapped to the same int.
*
- * <p>Note: The three equal methods have the following semantics:</p>
- * <ul>
- * <li>
- * <code>"=="</code> checks for null, and if non-null, calls
- * <code>java.lang.Object.equals</code><br/>
- * <code>(class: Any; modifier: final)</code>. Primitive: EQ
- * </li>
- * <li>
- * <code>"eq"</code> usual reference comparison<br/>
- * <code>(class: AnyRef; modifier: final)</code>. Primitive: ID
- * </li>
- * <li>
- * <code>"equals"</code> user-defined equality (Java semantics)<br/>
- * <code>(class: Object; modifier: none)</code>. Primitive: EQUALS
- * </li>
- * </ul>
+ * Note: The three equal methods have the following semantics:
+ * - `"=="` checks for `null`, and if non-null, calls
+ * `java.lang.Object.equals`
+ * `(class: Any; modifier: final)`. Primitive: `EQ`
+ * - `"eq"` usual reference comparison
+ * `(class: AnyRef; modifier: final)`. Primitive: `ID`
+ * - `"equals"` user-defined equality (Java semantics)
+ * `(class: Object; modifier: none)`. Primitive: `EQUALS`
*
- * Inspired from the scalac compiler.
+ * Inspired from the `scalac` compiler.
*/
abstract class ScalaPrimitives {
val global: Global
@@ -575,7 +565,7 @@ abstract class ScalaPrimitives {
import definitions._
val code = getPrimitive(fun)
- def elementType = atPhase(currentRun.typerPhase) {
+ def elementType = beforeTyper {
val arrayParent = tpe :: tpe.parents collectFirst {
case TypeRef(_, ArrayClass, elem :: Nil) => elem
}
diff --git a/src/compiler/scala/tools/nsc/backend/WorklistAlgorithm.scala b/src/compiler/scala/tools/nsc/backend/WorklistAlgorithm.scala
index d6fab2e..798a80e 100644
--- a/src/compiler/scala/tools/nsc/backend/WorklistAlgorithm.scala
+++ b/src/compiler/scala/tools/nsc/backend/WorklistAlgorithm.scala
@@ -1,5 +1,5 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
+ * Copyright 2005-2013 LAMP/EPFL
* @author Martin Odersky
*/
diff --git a/src/compiler/scala/tools/nsc/backend/icode/BasicBlocks.scala b/src/compiler/scala/tools/nsc/backend/icode/BasicBlocks.scala
index 70bbd85..d50d4cd 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/BasicBlocks.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/BasicBlocks.scala
@@ -1,41 +1,113 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
+ * Copyright 2005-2013 LAMP/EPFL
* @author Martin Odersky
*/
-
package scala.tools.nsc
package backend
package icode
import scala.collection.{ mutable, immutable }
-import mutable.{ ArrayBuffer }
-import util.{ Position, NoPosition }
+import mutable.{ ListBuffer, ArrayBuffer }
+import scala.reflect.internal.util.{ Position, NoPosition }
import backend.icode.analysis.ProgramPoint
+import scala.language.postfixOps
trait BasicBlocks {
self: ICodes =>
import opcodes._
- import global.{ settings, log, nme }
+ import global.{ ifDebug, settings, log, nme }
import nme.isExceptionResultName
+ /** Override Array creation for efficiency (to not go through reflection). */
+ private implicit val instructionTag: scala.reflect.ClassTag[Instruction] = new scala.reflect.ClassTag[Instruction] {
+ def runtimeClass: java.lang.Class[Instruction] = classOf[Instruction]
+ final override def newArray(len: Int): Array[Instruction] = new Array[Instruction](len)
+ }
+
+ object NoBasicBlock extends BasicBlock(-1, null)
+
/** This class represents a basic block. Each
* basic block contains a list of instructions that are
* either executed all, or none. No jumps
* to/from the "middle" of the basic block are allowed (modulo exceptions).
*/
- class BasicBlock(val label: Int, val method: IMethod)
- extends AnyRef
- with ProgramPoint[BasicBlock]
- with Seq[Instruction] {
+ class BasicBlock(val label: Int, val method: IMethod) extends ProgramPoint[BasicBlock] {
+ outer =>
import BBFlags._
def code = method.code
+ private final class SuccessorList() {
+ private var successors: List[BasicBlock] = Nil
+ /** This method is very hot! Handle with care. */
+ private def updateConserve() {
+ var lb: ListBuffer[BasicBlock] = null
+ var matches = 0
+ var remaining = successors
+ val direct = directSuccessors
+ var scratchHandlers: List[ExceptionHandler] = method.exh
+ var scratchBlocks: List[BasicBlock] = direct
+
+ def addBlock(bb: BasicBlock) {
+ if (matches < 0)
+ lb += bb
+ else if (remaining.isEmpty || bb != remaining.head) {
+ lb = ListBuffer[BasicBlock]() ++= (successors take matches) += bb
+ matches = -1
+ }
+ else {
+ matches += 1
+ remaining = remaining.tail
+ }
+ }
+
+ while (scratchBlocks ne Nil) {
+ addBlock(scratchBlocks.head)
+ scratchBlocks = scratchBlocks.tail
+ }
+ /** Return a list of successors for 'b' that come from exception handlers
+ * covering b's (non-exceptional) successors. These exception handlers
+ * might not cover 'b' itself. This situation corresponds to an
+ * exception being thrown as the first thing of one of b's successors.
+ */
+ while (scratchHandlers ne Nil) {
+ val handler = scratchHandlers.head
+ if (handler covers outer)
+ addBlock(handler.startBlock)
+
+ scratchBlocks = direct
+ while (scratchBlocks ne Nil) {
+ if (handler covers scratchBlocks.head)
+ addBlock(handler.startBlock)
+ scratchBlocks = scratchBlocks.tail
+ }
+ scratchHandlers = scratchHandlers.tail
+ }
+ // Blocks did not align: create a new list.
+ if (matches < 0)
+ successors = lb.toList
+ // Blocks aligned, but more blocks remain. Take a prefix of the list.
+ else if (remaining.nonEmpty)
+ successors = successors take matches
+ // Otherwise the list is unchanged, leave it alone.
+ }
+
+ /** This is called millions of times: it is performance sensitive. */
+ def updateSuccs() {
+ if (isEmpty) {
+ if (successors.nonEmpty)
+ successors = Nil
+ }
+ else updateConserve()
+ }
+ def toList = successors
+ }
+
/** Flags of this basic block. */
- private var flags: Int = 0
+ private[this] var flags: Int = 0
/** Does this block have the given flag? */
def hasFlag(flag: Int): Boolean = (flags & flag) != 0
@@ -76,30 +148,32 @@ trait BasicBlocks {
setFlag(DIRTYSUCCS | DIRTYPREDS)
/** Cached predecessors. */
- var preds: List[BasicBlock] = null
+ var preds: List[BasicBlock] = Nil
/** Local variables that are in scope at entry of this basic block. Used
* for debugging information.
*/
- var varsInScope: mutable.Set[Local] = new mutable.LinkedHashSet()
+ val varsInScope: mutable.Set[Local] = new mutable.LinkedHashSet()
/** ICode instructions, used as temporary storage while emitting code.
- * Once closed is called, only the `instrs' array should be used.
+ * Once closed is called, only the `instrs` array should be used.
*/
private var instructionList: List[Instruction] = Nil
-
private var instrs: Array[Instruction] = _
- override def toList: List[Instruction] =
+ def take(n: Int): Seq[Instruction] =
+ if (closed) instrs take n else instructionList takeRight n reverse
+
+ def toList: List[Instruction] =
if (closed) instrs.toList else instructionList.reverse
/** Return an iterator over the instructions in this basic block. */
def iterator: Iterator[Instruction] =
- if (closed) instrs.iterator else instructionList.reverse.iterator
+ if (closed) instrs.iterator else instructionList.reverseIterator
/** return the underlying array of instructions */
def getArray: Array[Instruction] = {
- assert(closed)
+ assert(closed, this)
instrs
}
@@ -113,21 +187,42 @@ trait BasicBlocks {
* Returns -1 if not found.
*/
def indexOf(inst: Instruction): Int = {
- assert(closed)
+ assert(closed, this)
instrs indexWhere (_ eq inst)
}
/** Apply a function to all the instructions of the block. */
- override def foreach[U](f: Instruction => U) = {
- if (!closed) {
- method.dump
- global.abort("Traversing an open block!: " + label + " in " + method)
- }
- instrs foreach f
+ final def foreach[U](f: Instruction => U) = {
+ if (!closed) dumpMethodAndAbort(method, this)
+ else instrs foreach f
+
+ // !!! If I replace "instrs foreach f" with the following:
+ // var i = 0
+ // val len = instrs.length
+ // while (i < len) {
+ // f(instrs(i))
+ // i += 1
+ // }
+ //
+ // Then when compiling under -optimise, quick.plugins fails as follows:
+ //
+ // quick.plugins:
+ // [mkdir] Created dir: /scratch/trunk6/build/quick/classes/continuations-plugin
+ // [scalacfork] Compiling 5 files to /scratch/trunk6/build/quick/classes/continuations-plugin
+ // [scalacfork] error: java.lang.VerifyError: (class: scala/tools/nsc/typechecker/Implicits$ImplicitSearch, method: typedImplicit0 signature: (Lscala/tools/nsc/typechecker/Implicits$ImplicitInfo;Z)Lscala/tools/nsc/typechecker/Implicits$SearchResult;) Incompatible object argument for function call
+ // [scalacfork] at scala.tools.nsc.typechecker.Implicits$class.inferImplicit(Implicits.scala:67)
+ // [scalacfork] at scala.tools.nsc.Global$$anon$1.inferImplicit(Global.scala:419)
+ // [scalacfork] at scala.tools.nsc.typechecker.Typers$Typer.wrapImplicit$1(Typers.scala:170)
+ // [scalacfork] at scala.tools.nsc.typechecker.Typers$Typer.inferView(Typers.scala:174)
+ // [scalacfork] at scala.tools.nsc.typechecker.Typers$Typer.adapt(Typers.scala:963)
+ // [scalacfork] at scala.tools.nsc.typechecker.Typers$Typer.typed(Typers.scala:4378)
+ //
+ // This is bad and should be understood/eliminated.
}
/** The number of instructions in this basic block so far. */
def length = if (closed) instrs.length else instructionList.length
+ def size = length
/** Return the n-th instruction. */
def apply(n: Int): Instruction =
@@ -136,8 +231,8 @@ trait BasicBlocks {
///////////////////// Substitutions ///////////////////////
/**
- * Replace the instruction at the given position. Used by labels when
- * they are anchored. It retains the position of the previous instruction.
+ * Replace the instruction at the given position. Used by labels when they are anchored.
+ * The replacing instruction is given the nsc.util.Position of the instruction it replaces.
*/
def replaceInstruction(pos: Int, instr: Instruction): Boolean = {
assert(closed, "Instructions can be replaced only after the basic block is closed")
@@ -149,8 +244,8 @@ trait BasicBlocks {
/**
* Replace the given instruction with the new one.
- * Returns `true' if it actually changed something.
- * It retains the position of the previous instruction.
+ * Returns `true` if it actually changed something.
+ * The replacing instruction is given the nsc.util.Position of the instruction it replaces.
*/
def replaceInstruction(oldInstr: Instruction, newInstr: Instruction): Boolean = {
assert(closed, "Instructions can be replaced only after the basic block is closed")
@@ -198,7 +293,7 @@ trait BasicBlocks {
* @param positions ...
*/
def removeInstructionsAt(positions: Int*) {
- assert(closed)
+ assert(closed, this)
instrs = instrs.indices.toArray filterNot positions.toSet map instrs
code.touched = true
}
@@ -208,7 +303,7 @@ trait BasicBlocks {
*/
def removeLastInstruction() {
if (closed)
- removeInstructionsAt(size)
+ removeInstructionsAt(length)
else {
instructionList = instructionList.tail
code.touched = true
@@ -225,7 +320,12 @@ trait BasicBlocks {
else
instrs.zipWithIndex collect {
case (oldInstr, i) if map contains oldInstr =>
- code.touched |= replaceInstruction(i, map(oldInstr))
+ // SI-6288 clone important here because `replaceInstruction` assigns
+ // a position to `newInstr`. Without this, a single instruction can
+ // be added twice, and the position last position assigned clobbers
+ // all previous positions in other usages.
+ val newInstr = map(oldInstr).clone()
+ code.touched |= replaceInstruction(i, newInstr)
}
////////////////////// Emit //////////////////////
@@ -248,7 +348,7 @@ trait BasicBlocks {
print()
Console.println("trying to emit: " + instr)
} */
- assert(!closed || ignore, "BasicBlock closed")
+ assert(!closed || ignore, this)
if (ignore) {
if (settings.debug.value) {
@@ -268,8 +368,8 @@ trait BasicBlocks {
}
}
- def emit(instrs: Seq[Instruction]) {
- instrs foreach (i => emit(i, i.pos))
+ def emit(is: Seq[Instruction]) {
+ is foreach (i => emit(i, i.pos))
}
/** The semantics of this are a little odd but it's designed to work
@@ -280,38 +380,44 @@ trait BasicBlocks {
* calling setPos on any instruction using the two arg version which
* I wanted to include in a call to emitOnly.
*/
- def emitOnly(instrs: Instruction*) {
- instrs foreach (i => if (i.pos == NoPosition) emit(i) else emit(i, i.pos))
- this.close
+ def emitOnly(is: Instruction*) {
+ is foreach (i => if (i.pos == NoPosition) emit(i) else emit(i, i.pos))
+ this.close()
}
/** do nothing if block is already closed */
def closeWith(instr: Instruction) {
- if (closed) () else {
+ if (!closed) {
emit(instr)
- close
+ close()
}
}
def closeWith(instr: Instruction, pos: Position) {
- if (closed) () else {
+ if (!closed) {
emit(instr, pos)
- close
+ close()
}
}
/** Close the block */
def close() {
- assert(!closed || ignore)
- assert(instructionList.nonEmpty, "Empty block.")
- closed = true
- setFlag(DIRTYSUCCS)
- instructionList = instructionList.reverse
- instrs = instructionList.toArray
+ assert(!closed || ignore, this)
+ assert(instructionList.nonEmpty, "Empty block: " + this)
+ if (ignore && closed) { // redundant `ignore &&` for clarity -- we should never be in state `!ignore && closed`
+ // not doing anything to this block is important...
+ // because the else branch reverses innocent blocks, which is wrong when they're in ignore mode (and closed)
+ // reversing the instructions when (closed && ignore) wreaks havoc for nested label jumps (see comments in genLoad)
+ } else {
+ closed = true
+ setFlag(DIRTYSUCCS)
+ instructionList = instructionList.reverse
+ instrs = instructionList.toArray
+ }
}
def open() {
- assert(closed)
+ assert(closed, this)
closed = false
ignore = false
touched = true
@@ -321,10 +427,11 @@ trait BasicBlocks {
def clear() {
instructionList = Nil
instrs = null
- preds = null
+ preds = Nil
}
- override def isEmpty: Boolean = instructionList.isEmpty
+ final def isEmpty = instructionList.isEmpty
+ final def nonEmpty = !isEmpty
/** Enter ignore mode: new 'emit'ted instructions will not be
* added to this basic block. It makes the generation of THROW
@@ -335,63 +442,50 @@ trait BasicBlocks {
}
def exitIgnoreMode() {
- assert(ignore, "Exit ignore mode when not in ignore mode.")
+ assert(ignore, "Exit ignore mode when not in ignore mode: " + this)
ignore = false
}
/** Return the last instruction of this basic block. */
def lastInstruction =
- if (closed) instrs.last
+ if (closed) instrs(instrs.length - 1)
else instructionList.head
def firstInstruction =
if (closed) instrs(0)
else instructionList.last
+ def exceptionSuccessors: List[BasicBlock] =
+ exceptionSuccessorsForBlock(this)
+
def exceptionSuccessorsForBlock(block: BasicBlock): List[BasicBlock] =
method.exh collect { case x if x covers block => x.startBlock }
/** Cached value of successors. Must be recomputed whenever a block in the current method is changed. */
- private var succs: List[BasicBlock] = Nil
- private def updateSuccs() {
- resetFlag(DIRTYSUCCS)
- succs =
- if (isEmpty) Nil
- else exceptionSuccessors ++ directSuccessors ++ indirectExceptionSuccessors
- }
+ private val succs = new SuccessorList
- def successors : List[BasicBlock] = {
- if (touched) updateSuccs()
- succs
+ def successors: List[BasicBlock] = {
+ if (touched) {
+ succs.updateSuccs()
+ resetFlag(DIRTYSUCCS)
+ }
+ succs.toList
}
def directSuccessors: List[BasicBlock] =
if (isEmpty) Nil else lastInstruction match {
- case JUMP(whereto) => List(whereto)
+ case JUMP(whereto) => whereto :: Nil
case CJUMP(succ, fail, _, _) => fail :: succ :: Nil
case CZJUMP(succ, fail, _, _) => fail :: succ :: Nil
case SWITCH(_, labels) => labels
case RETURN(_) => Nil
case THROW(_) => Nil
case _ =>
- if (closed) {
- dump
- global.abort("The last instruction is not a control flow instruction: " + lastInstruction)
- }
+ if (closed)
+ dumpClassesAndAbort("The last instruction is not a control flow instruction: " + lastInstruction)
else Nil
}
- def exceptionSuccessors: List[BasicBlock] =
- exceptionSuccessorsForBlock(this)
-
- /** Return a list of successors for 'b' that come from exception handlers
- * covering b's (non-exceptional) successors. These exception handlers
- * might not cover 'b' itself. This situation corresponds to an
- * exception being thrown as the first thing of one of b's successors.
- */
- def indirectExceptionSuccessors: List[BasicBlock] =
- directSuccessors flatMap exceptionSuccessorsForBlock distinct
-
/** Returns the predecessors of this block. */
def predecessors: List[BasicBlock] = {
if (hasFlag(DIRTYPREDS)) {
@@ -440,11 +534,11 @@ trait BasicBlocks {
object BBFlags {
val flagMap = Map[Int, String](
LOOP_HEADER -> "loopheader",
- IGNORING -> "ignore",
- EX_HEADER -> "exheader",
- CLOSED -> "closed",
- DIRTYSUCCS -> "dirtysuccs",
- DIRTYPREDS -> "dirtypreds"
+ IGNORING -> "ignore",
+ EX_HEADER -> "exheader",
+ CLOSED -> "closed",
+ DIRTYSUCCS -> "dirtysuccs",
+ DIRTYPREDS -> "dirtypreds"
)
def flagsToString(flags: Int) = {
flagMap collect { case (bit, name) if (bit & flags) != 0 => "<" + name + ">" } mkString " "
diff --git a/src/compiler/scala/tools/nsc/backend/icode/CheckerException.scala b/src/compiler/scala/tools/nsc/backend/icode/CheckerException.scala
index 0a7a155..0856f2f 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/CheckerException.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/CheckerException.scala
@@ -1,5 +1,5 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
+ * Copyright 2005-2013 LAMP/EPFL
* @author Martin Odersky
*/
diff --git a/src/compiler/scala/tools/nsc/backend/icode/ExceptionHandlers.scala b/src/compiler/scala/tools/nsc/backend/icode/ExceptionHandlers.scala
index 993002d..2cebf7a 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/ExceptionHandlers.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/ExceptionHandlers.scala
@@ -1,30 +1,28 @@
/* NSC -- new scala compiler
- * Copyright 2005 LAMP/EPFL
+ * Copyright 2005-2013 LAMP/EPFL
* @author Martin Odersky
*/
-
package scala.tools.nsc
package backend
package icode
-import scala.collection.{ mutable, immutable, generic }
-import util.{ Position, NoPosition }
+import scala.collection.{ mutable, immutable }
/**
- * Exception handlers are pieces of code that `handle' exceptions on
+ * Exception handlers are pieces of code that `handle` exceptions on
* the covered basic blocks. Since Scala's exception handling uses
* pattern matching instead of just class names to identify handlers,
- * all our handlers will catch `Throwable' and rely on proper ordering
+ * all our handlers will catch `Throwable` and rely on proper ordering
* in the generated code to preserve nesting.
*/
trait ExceptionHandlers {
self: ICodes =>
- import global.{ definitions, Symbol, NoSymbol }
+ import global._
import definitions.{ ThrowableClass }
- class ExceptionHandler(val method: IMethod, val label: String, val cls: Symbol, val pos: Position) {
+ class ExceptionHandler(val method: IMethod, val label: TermName, val cls: Symbol, val pos: Position) {
def loadExceptionClass = if (cls == NoSymbol) ThrowableClass else cls
private var _startBlock: BasicBlock = _;
var finalizer: Finalizer = _;
@@ -46,7 +44,7 @@ trait ExceptionHandlers {
this
}
- /** Is `b' covered by this exception handler? */
+ /** Is `b` covered by this exception handler? */
def covers(b: BasicBlock): Boolean = covered(b)
/** The body of this exception handler. May contain 'dead' blocks (which will not
@@ -69,12 +67,12 @@ trait ExceptionHandlers {
def dup: ExceptionHandler = new ExceptionHandler(this)
}
- class Finalizer(method: IMethod, label: String, pos: Position) extends ExceptionHandler(method, label, NoSymbol, pos) {
+ class Finalizer(method: IMethod, label: TermName, pos: Position) extends ExceptionHandler(method, label, NoSymbol, pos) {
override def toString() = "finalizer_" + label
override def dup: Finalizer = new Finalizer(method, label, pos)
}
- object NoFinalizer extends Finalizer(null, "<no finalizer>", NoPosition) {
+ object NoFinalizer extends Finalizer(null, newTermNameCached("<no finalizer>"), NoPosition) {
override def startBlock: BasicBlock = sys.error("NoFinalizer cannot have a start block.");
override def setStartBlock(b: BasicBlock): Unit = sys.error("NoFinalizer cannot have a start block.");
override def dup = this
diff --git a/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala b/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala
index d9e8a79..71a5b85 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala
@@ -1,5 +1,5 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
+ * Copyright 2005-2013 LAMP/EPFL
* @author Martin Odersky
*/
@@ -13,6 +13,7 @@ import scala.collection.mutable.{ ListBuffer, Buffer }
import scala.tools.nsc.symtab._
import scala.annotation.switch
import PartialFunction._
+import scala.language.postfixOps
/** This class ...
*
@@ -24,7 +25,7 @@ abstract class GenICode extends SubComponent {
import icodes._
import icodes.opcodes._
import definitions.{
- ArrayClass, ObjectClass, ThrowableClass, StringClass, StringModule, NothingClass, NullClass, AnyRefClass,
+ ArrayClass, ObjectClass, ThrowableClass, StringClass, StringModule, AnyRefClass,
Object_equals, Object_isInstanceOf, Object_asInstanceOf, ScalaRunTimeModule,
BoxedNumberClass, BoxedCharacterClass,
getMember
@@ -39,14 +40,16 @@ abstract class GenICode extends SubComponent {
override def newPhase(prev: Phase) = new ICodePhase(prev)
- private def debugLog(msg: => String): Unit =
- if (settings.debug.value) log(msg)
+ @inline private def debugassert(cond: => Boolean, msg: => Any) {
+ if (settings.debug.value)
+ assert(cond, msg)
+ }
class ICodePhase(prev: Phase) extends StdPhase(prev) {
override def description = "Generate ICode from the AST"
- var unit: CompilationUnit = _
+ var unit: CompilationUnit = NoCompilationUnit
override def run() {
scalaPrimitives.init
@@ -59,7 +62,7 @@ abstract class GenICode extends SubComponent {
unit.icode.clear
informProgress("Generating icode for " + unit)
gen(unit.body)
- this.unit = null
+ this.unit = NoCompilationUnit
}
def gen(tree: Tree): Context = gen(tree, new Context())
@@ -70,6 +73,14 @@ abstract class GenICode extends SubComponent {
ctx1
}
+ /** If the selector type has a member with the right name,
+ * it is the host class; otherwise the symbol's owner.
+ */
+ def findHostClass(selector: Type, sym: Symbol) = selector member sym.name match {
+ case NoSymbol => log(s"Rejecting $selector as host class for $sym") ; sym.owner
+ case _ => selector.typeSymbol
+ }
+
/////////////////// Code generation ///////////////////////
def gen(tree: Tree, ctx: Context): Context = tree match {
@@ -79,7 +90,7 @@ abstract class GenICode extends SubComponent {
gen(stats, ctx setPackage pid.name)
case ClassDef(mods, name, _, impl) =>
- log("Generating class: " + tree.symbol.fullName)
+ debuglog("Generating class: " + tree.symbol.fullName)
val outerClass = ctx.clazz
ctx setClass (new IClass(tree.symbol) setCompilationUnit unit)
addClassFields(ctx, tree.symbol);
@@ -92,16 +103,15 @@ abstract class GenICode extends SubComponent {
// !! modules should be eliminated by refcheck... or not?
case ModuleDef(mods, name, impl) =>
- abort("Modules should not reach backend!")
+ abort("Modules should not reach backend! " + tree)
case ValDef(mods, name, tpt, rhs) =>
ctx // we use the symbol to add fields
case DefDef(mods, name, tparams, vparamss, tpt, rhs) =>
- if (settings.debug.value)
- log("Entering method " + name)
+ debuglog("Entering method " + name)
val m = new IMethod(tree.symbol)
- m.sourceFile = unit.source.toString()
+ m.sourceFile = unit.source
m.returnType = if (tree.symbol.isConstructor) UNIT
else toTypeKind(tree.symbol.info.resultType)
ctx.clazz.addMethod(m)
@@ -120,7 +130,10 @@ abstract class GenICode extends SubComponent {
case Block(_, Return(_)) => ()
case Return(_) => ()
case EmptyTree =>
- globalError("Concrete method has no definition: " + tree)
+ globalError("Concrete method has no definition: " + tree + (
+ if (settings.debug.value) "(found: " + m.symbol.owner.info.decls.toList.mkString(", ") + ")"
+ else "")
+ )
case _ => if (ctx1.bb.isEmpty)
ctx1.bb.closeWith(RETURN(m.returnType), rhs.pos)
else
@@ -129,7 +142,7 @@ abstract class GenICode extends SubComponent {
if (!ctx1.bb.closed) ctx1.bb.close
prune(ctx1.method)
} else
- ctx1.method.setCode(null)
+ ctx1.method.setCode(NoCode)
ctx1
case Template(_, _, body) =>
@@ -175,7 +188,7 @@ abstract class GenICode extends SubComponent {
}
private def genThrow(expr: Tree, ctx: Context): (Context, TypeKind) = {
- require(expr.tpe <:< ThrowableClass.tpe)
+ require(expr.tpe <:< ThrowableClass.tpe, expr.tpe)
val thrownKind = toTypeKind(expr.tpe)
val ctx1 = genLoad(expr, ctx, thrownKind)
@@ -194,13 +207,11 @@ abstract class GenICode extends SubComponent {
var ctx1 = ctx
var resKind = toTypeKind(larg.tpe)
- if (settings.debug.value) {
- assert(args.length <= 1,
+ debugassert(args.length <= 1,
"Too many arguments for primitive function: " + fun.symbol)
- assert(resKind.isNumericType | resKind == BOOL,
+ debugassert(resKind.isNumericType | resKind == BOOL,
resKind.toString() + " is not a numeric or boolean type " +
"[operation: " + fun.symbol + "]")
- }
args match {
// unary operation
@@ -271,16 +282,14 @@ abstract class GenICode extends SubComponent {
if (scalaPrimitives.isArrayGet(code)) {
// load argument on stack
- if (settings.debug.value)
- assert(args.length == 1,
+ debugassert(args.length == 1,
"Too many arguments for array get operation: " + tree);
ctx1 = genLoad(args.head, ctx1, INT)
generatedType = elem
ctx1.bb.emit(LOAD_ARRAY_ITEM(elementType), tree.pos)
}
else if (scalaPrimitives.isArraySet(code)) {
- if (settings.debug.value)
- assert(args.length == 2,
+ debugassert(args.length == 2,
"Too many arguments for array set operation: " + tree);
ctx1 = genLoad(args.head, ctx1, INT)
ctx1 = genLoad(args.tail.head, ctx1, toTypeKind(args.tail.head.tpe))
@@ -316,7 +325,7 @@ abstract class GenICode extends SubComponent {
MONITOR_ENTER() setPos tree.pos
))
ctx1.enterSynchronized(monitor)
- debugLog("synchronized block start")
+ debuglog("synchronized block start")
ctx1 = ctx1.Try(
bodyCtx => {
@@ -340,7 +349,7 @@ abstract class GenICode extends SubComponent {
exhCtx
})), EmptyTree, tree)
- debugLog("synchronized block end with block %s closed=%s".format(ctx1.bb, ctx1.bb.closed))
+ debuglog("synchronized block end with block %s closed=%s".format(ctx1.bb, ctx1.bb.closed))
ctx1.exitSynchronized(monitor)
if (hasResult)
ctx1.bb.emit(LOAD_LOCAL(monitorResult))
@@ -368,12 +377,12 @@ abstract class GenICode extends SubComponent {
val resKind = if (hasUnitBranch) UNIT else ifKind
if (hasUnitBranch)
- debugLog("Will drop result from an if branch")
+ debuglog("Will drop result from an if branch")
thenCtx = genLoad(thenp, thenCtx, resKind)
elseCtx = genLoad(elsep, elseCtx, resKind)
- assert(!settings.debug.value || !(hasUnitBranch && expectedType != UNIT),
+ debugassert(!hasUnitBranch || expectedType == UNIT,
"I produce UNIT in a context where " + expectedType + " is expected!")
// alternatives may be already closed by a tail-recursive jump
@@ -393,15 +402,15 @@ abstract class GenICode extends SubComponent {
for (CaseDef(pat, _, body) <- catches.reverse) yield {
def genWildcardHandler(sym: Symbol): (Symbol, TypeKind, Context => Context) =
(sym, kind, ctx => {
- ctx.bb.emit(DROP(REFERENCE(sym)))
+ ctx.bb.emit(DROP(REFERENCE(sym))) // drop the loaded exception
genLoad(body, ctx, kind)
})
pat match {
case Typed(Ident(nme.WILDCARD), tpt) => genWildcardHandler(tpt.tpe.typeSymbol)
case Ident(nme.WILDCARD) => genWildcardHandler(ThrowableClass)
- case Bind(name, _) =>
- val exception = ctx.method addLocal new Local(pat.symbol, toTypeKind(pat.symbol.tpe), false)
+ case Bind(_, _) =>
+ val exception = ctx.method addLocal new Local(pat.symbol, toTypeKind(pat.symbol.tpe), false) // the exception will be loaded and stored into this local
(pat.symbol.tpe.typeSymbol, kind, {
ctx: Context =>
@@ -435,9 +444,8 @@ abstract class GenICode extends SubComponent {
else if (isArrayOp(code))
genArrayOp(tree, ctx, code, expectedType)
else if (isLogicalOp(code) || isComparisonOp(code)) {
- val trueCtx = ctx.newBlock
- val falseCtx = ctx.newBlock
- val afterCtx = ctx.newBlock
+ val trueCtx, falseCtx, afterCtx = ctx.newBlock
+
genCond(tree, ctx, trueCtx, falseCtx)
trueCtx.bb.emitOnly(
CONSTANT(Constant(true)) setPos tree.pos,
@@ -456,42 +464,32 @@ abstract class GenICode extends SubComponent {
genCoercion(tree, ctx1, code)
(ctx1, scalaPrimitives.generatedKind(code))
}
- else abort("Primitive operation not handled yet: " + sym.fullName + "(" +
- fun.symbol.simpleName + ") " + " at: " + (tree.pos))
+ else abort(
+ "Primitive operation not handled yet: " + sym.fullName + "(" +
+ fun.symbol.simpleName + ") " + " at: " + (tree.pos)
+ )
}
/**
* forMSIL
*/
- private def msil_IsValuetypeInstMethod(msym: Symbol) = {
- val mMSILOpt = loaders.clrTypes.methods.get(msym)
- if (mMSILOpt.isEmpty) false
- else {
- val mMSIL = mMSILOpt.get
- val res = mMSIL.IsInstance && mMSIL.DeclaringType.IsValueType
- res
- }
- }
-
- /**
- * forMSIL
- */
- private def msil_IsValuetypeInstField(fsym: Symbol) = {
- val fMSILOpt = loaders.clrTypes.fields.get(fsym)
- if (fMSILOpt.isEmpty) false
- else {
- val fMSIL = fMSILOpt.get
- val res = !fMSIL.IsStatic && fMSIL.DeclaringType.IsValueType
- res
- }
- }
+ private def msil_IsValuetypeInstMethod(msym: Symbol) = (
+ loaders.clrTypes.methods get msym exists (mMSIL =>
+ mMSIL.IsInstance && mMSIL.DeclaringType.IsValueType
+ )
+ )
+ private def msil_IsValuetypeInstField(fsym: Symbol) = (
+ loaders.clrTypes.fields get fsym exists (fMSIL =>
+ !fMSIL.IsStatic && fMSIL.DeclaringType.IsValueType
+ )
+ )
/**
* forMSIL: Adds a local var, the emitted code requires one more slot on the stack as on entry
*/
private def msil_genLoadZeroOfNonEnumValuetype(ctx: Context, kind: TypeKind, pos: Position, leaveAddressOnStackInstead: Boolean) {
val REFERENCE(clssym) = kind
- assert(loaders.clrTypes.isNonEnumValuetype(clssym))
+ assert(loaders.clrTypes.isNonEnumValuetype(clssym), clssym)
val local = ctx.makeLocal(pos, clssym.tpe, "tmp")
ctx.method.addLocal(local)
ctx.bb.emit(CIL_LOAD_LOCAL_ADDRESS(local), pos)
@@ -509,8 +507,7 @@ abstract class GenICode extends SubComponent {
private def msil_genLoadAddressOf(tree: Tree, ctx: Context, expectedType: TypeKind, butRawValueIsAlsoGoodEnough: Boolean): Context = {
var generatedType = expectedType
var addressTaken = false
- if (settings.debug.value)
- log("at line: " + (if (tree.pos.isDefined) tree.pos.line else tree.pos))
+ debuglog("at line: " + (if (tree.pos.isDefined) tree.pos.line else tree.pos))
var resCtx: Context = tree match {
@@ -558,8 +555,7 @@ abstract class GenICode extends SubComponent {
if (scalaPrimitives.isArrayGet(code)) {
var ctx1 = genLoad(arrayObj, ctx, k)
// load argument on stack
- if (settings.debug.value)
- assert(args.length == 1, "Too many arguments for array get operation: " + tree);
+ debugassert(args.length == 1, "Too many arguments for array get operation: " + tree)
ctx1 = genLoad(args.head, ctx1, INT)
generatedType = elementType // actually "managed pointer to element type" but the callsite is aware of this
ctx1.bb.emit(CIL_LOAD_ARRAY_ITEM_ADDRESS(elementType), tree.pos)
@@ -581,7 +577,7 @@ abstract class GenICode extends SubComponent {
Even if it's not, the code below to handler !addressTaken below. */
}
- if(!addressTaken) {
+ if (!addressTaken) {
resCtx = genLoad(tree, ctx, expectedType)
if (!butRawValueIsAlsoGoodEnough) {
// raw value on stack (must be an intermediate result, e.g. returned by method call), take address
@@ -612,54 +608,58 @@ abstract class GenICode extends SubComponent {
*/
private def genLoad(tree: Tree, ctx: Context, expectedType: TypeKind): Context = {
var generatedType = expectedType
- if (settings.debug.value)
- log("at line: " + (if (tree.pos.isDefined) tree.pos.line else tree.pos))
+ debuglog("at line: " + (if (tree.pos.isDefined) tree.pos.line else tree.pos))
val resCtx: Context = tree match {
case LabelDef(name, params, rhs) =>
- val ctx1 = ctx.newBlock
- if (nme.isLoopHeaderLabel(name))
- ctx1.bb.loopHeader = true
-
- ctx1.labels.get(tree.symbol) match {
- case Some(label) =>
- log("Found existing label for " + tree.symbol)
- label.anchor(ctx1.bb)
- label.patch(ctx.method.code)
-
- case None =>
- val pair = (tree.symbol -> (new Label(tree.symbol) anchor ctx1.bb setParams (params map (_.symbol))))
- log("Adding label " + tree.symbol + " in genLoad.")
- ctx1.labels += pair
- ctx.method.addLocals(params map (p => new Local(p.symbol, toTypeKind(p.symbol.info), false)));
- }
-
- ctx.bb.closeWith(JUMP(ctx1.bb), tree.pos)
- genLoad(rhs, ctx1, expectedType /*toTypeKind(tree.symbol.info.resultType)*/)
+ def genLoadLabelDef = {
+ val ctx1 = ctx.newBlock
+ if (nme.isLoopHeaderLabel(name))
+ ctx1.bb.loopHeader = true
+
+ ctx1.labels.get(tree.symbol) match {
+ case Some(label) =>
+ debuglog("Found existing label for " + tree.symbol.fullLocationString)
+ label.anchor(ctx1.bb)
+ label.patch(ctx.method.code)
+
+ case None =>
+ val pair = (tree.symbol -> (new Label(tree.symbol) anchor ctx1.bb setParams (params map (_.symbol))))
+ debuglog("Adding label " + tree.symbol.fullLocationString + " in genLoad.")
+ ctx1.labels += pair
+ ctx.method.addLocals(params map (p => new Local(p.symbol, toTypeKind(p.symbol.info), false)));
+ }
- case ValDef(_, nme.THIS, _, _) =>
- if (settings.debug.value) log("skipping trivial assign to _$this: " + tree)
- ctx
+ ctx.bb.closeWith(JUMP(ctx1.bb), tree.pos)
+ genLoad(rhs, ctx1, expectedType /*toTypeKind(tree.symbol.info.resultType)*/)
+ }
+ genLoadLabelDef
- case ValDef(_, _, _, rhs) =>
- val sym = tree.symbol
- val local = ctx.method.addLocal(new Local(sym, toTypeKind(sym.info), false))
+ case ValDef(_, name, _, rhs) =>
+ def genLoadValDef =
+ if (name == nme.THIS) {
+ debuglog("skipping trivial assign to _$this: " + tree)
+ ctx
+ } else {
+ val sym = tree.symbol
+ val local = ctx.method.addLocal(new Local(sym, toTypeKind(sym.info), false))
- if (rhs == EmptyTree) {
- if (settings.debug.value)
- log("Uninitialized variable " + tree + " at: " + (tree.pos));
- ctx.bb.emit(getZeroOf(local.kind))
- }
+ if (rhs == EmptyTree) {
+ debuglog("Uninitialized variable " + tree + " at: " + (tree.pos));
+ ctx.bb.emit(getZeroOf(local.kind))
+ }
- var ctx1 = ctx
- if (rhs != EmptyTree)
- ctx1 = genLoad(rhs, ctx, local.kind);
+ var ctx1 = ctx
+ if (rhs != EmptyTree)
+ ctx1 = genLoad(rhs, ctx, local.kind);
- ctx1.bb.emit(STORE_LOCAL(local), tree.pos)
- ctx1.scope.add(local)
- ctx1.bb.emit(SCOPE_ENTER(local))
- generatedType = UNIT
- ctx1
+ ctx1.bb.emit(STORE_LOCAL(local), tree.pos)
+ ctx1.scope.add(local)
+ ctx1.bb.emit(SCOPE_ENTER(local))
+ generatedType = UNIT
+ ctx1
+ }
+ genLoadValDef
case t @ If(cond, thenp, elsep) =>
val (newCtx, resKind) = genLoadIf(t, ctx, expectedType)
@@ -667,54 +667,55 @@ abstract class GenICode extends SubComponent {
newCtx
case Return(expr) =>
- val returnedKind = toTypeKind(expr.tpe)
- log("Return(" + expr + ") with returnedKind = " + returnedKind)
-
- var ctx1 = genLoad(expr, ctx, returnedKind)
- lazy val tmp = ctx1.makeLocal(tree.pos, expr.tpe, "tmp")
- val saved = savingCleanups(ctx1) {
- var saved = false
- ctx1.cleanups foreach {
- case MonitorRelease(m) =>
- if (settings.debug.value)
- log("removing " + m + " from cleanups: " + ctx1.cleanups)
- ctx1.bb.emit(Seq(LOAD_LOCAL(m), MONITOR_EXIT()))
- ctx1.exitSynchronized(m)
-
- case Finalizer(f, finalizerCtx) =>
- if (settings.debug.value)
- log("removing " + f + " from cleanups: " + ctx1.cleanups)
-
- if (returnedKind != UNIT && mayCleanStack(f)) {
- log("Emitting STORE_LOCAL for " + tmp + " to save finalizer.")
- ctx1.bb.emit(STORE_LOCAL(tmp))
- saved = true
- }
+ def genLoadReturn = {
+ val returnedKind = toTypeKind(expr.tpe)
+ debuglog("Return(" + expr + ") with returnedKind = " + returnedKind)
+
+ var ctx1 = genLoad(expr, ctx, returnedKind)
+ lazy val tmp = ctx1.makeLocal(tree.pos, expr.tpe, "tmp")
+ val saved = savingCleanups(ctx1) {
+ var savedFinalizer = false
+ ctx1.cleanups foreach {
+ case MonitorRelease(m) =>
+ debuglog("removing " + m + " from cleanups: " + ctx1.cleanups)
+ ctx1.bb.emit(Seq(LOAD_LOCAL(m), MONITOR_EXIT()))
+ ctx1.exitSynchronized(m)
+
+ case Finalizer(f, finalizerCtx) =>
+ debuglog("removing " + f + " from cleanups: " + ctx1.cleanups)
+ if (returnedKind != UNIT && mayCleanStack(f)) {
+ log("Emitting STORE_LOCAL for " + tmp + " to save finalizer.")
+ ctx1.bb.emit(STORE_LOCAL(tmp))
+ savedFinalizer = true
+ }
- // duplicate finalizer (takes care of anchored labels)
- val f1 = duplicateFinalizer(Set.empty ++ ctx1.labels.keySet, ctx1, f)
+ // duplicate finalizer (takes care of anchored labels)
+ val f1 = duplicateFinalizer(Set.empty ++ ctx1.labels.keySet, ctx1, f)
- // we have to run this without the same finalizer in
- // the list, otherwise infinite recursion happens for
- // finalizers that contain 'return'
- val fctx = finalizerCtx.newBlock
- ctx1.bb.closeWith(JUMP(fctx.bb))
- ctx1 = genLoad(f1, fctx, UNIT)
+ // we have to run this without the same finalizer in
+ // the list, otherwise infinite recursion happens for
+ // finalizers that contain 'return'
+ val fctx = finalizerCtx.newBlock
+ ctx1.bb.closeWith(JUMP(fctx.bb))
+ ctx1 = genLoad(f1, fctx, UNIT)
+ }
+ savedFinalizer
}
- saved
- }
- if (saved) {
- log("Emitting LOAD_LOCAL for " + tmp + " after saving finalizer.")
- ctx1.bb.emit(LOAD_LOCAL(tmp))
+ if (saved) {
+ log("Emitting LOAD_LOCAL for " + tmp + " after saving finalizer.")
+ ctx1.bb.emit(LOAD_LOCAL(tmp))
+ }
+ adapt(returnedKind, ctx1.method.returnType, ctx1, tree.pos)
+ ctx1.bb.emit(RETURN(ctx.method.returnType), tree.pos)
+ ctx1.bb.enterIgnoreMode
+ generatedType = expectedType
+ ctx1
}
- adapt(returnedKind, ctx1.method.returnType, ctx1, tree.pos)
- ctx1.bb.emit(RETURN(ctx.method.returnType), tree.pos)
- ctx1.bb.enterIgnoreMode
- generatedType = expectedType
- ctx1
+ genLoadReturn
- case t @ Try(_, _, _) => genLoadTry(t, ctx, (x: TypeKind) => generatedType = x)
+ case t @ Try(_, _, _) =>
+ genLoadTry(t, ctx, generatedType = _)
case Throw(expr) =>
val (ctx1, expectedType) = genThrow(expr, ctx)
@@ -722,44 +723,47 @@ abstract class GenICode extends SubComponent {
ctx1
case New(tpt) =>
- abort("Unexpected New")
+ abort("Unexpected New(" + tpt.summaryString + "/" + tpt + ") received in icode.\n" +
+ " Call was genLoad" + ((tree, ctx, expectedType)))
case Apply(TypeApply(fun, targs), _) =>
- val sym = fun.symbol
- val cast = sym match {
- case Object_isInstanceOf => false
- case Object_asInstanceOf => true
- case _ => abort("Unexpected type application " + fun + "[sym: " + sym.fullName + "]" + " in: " + tree)
- }
+ def genLoadApply1 = {
+ val sym = fun.symbol
+ val cast = sym match {
+ case Object_isInstanceOf => false
+ case Object_asInstanceOf => true
+ case _ => abort("Unexpected type application " + fun + "[sym: " + sym.fullName + "]" + " in: " + tree)
+ }
- val Select(obj, _) = fun
- val l = toTypeKind(obj.tpe)
- val r = toTypeKind(targs.head.tpe)
- val ctx1 = genLoadQualifier(fun, ctx)
-
- if (l.isValueType && r.isValueType)
- genConversion(l, r, ctx1, cast)
- else if (l.isValueType) {
- ctx1.bb.emit(DROP(l), fun.pos)
- if (cast) {
- ctx1.bb.emit(Seq(
- NEW(REFERENCE(definitions.ClassCastExceptionClass)),
- DUP(ObjectReference),
- THROW(definitions.ClassCastExceptionClass)
- ))
- } else
- ctx1.bb.emit(CONSTANT(Constant(false)))
- }
- else if (r.isValueType && cast) {
- assert(false, tree) /* Erasure should have added an unboxing operation to prevent that. */
+ val Select(obj, _) = fun
+ val l = toTypeKind(obj.tpe)
+ val r = toTypeKind(targs.head.tpe)
+ val ctx1 = genLoadQualifier(fun, ctx)
+
+ if (l.isValueType && r.isValueType)
+ genConversion(l, r, ctx1, cast)
+ else if (l.isValueType) {
+ ctx1.bb.emit(DROP(l), fun.pos)
+ if (cast) {
+ ctx1.bb.emit(Seq(
+ NEW(REFERENCE(definitions.ClassCastExceptionClass)),
+ DUP(ObjectReference),
+ THROW(definitions.ClassCastExceptionClass)
+ ))
+ } else
+ ctx1.bb.emit(CONSTANT(Constant(false)))
+ } else if (r.isValueType && cast) {
+ /* Erasure should have added an unboxing operation to prevent that. */
+ abort("should have been unboxed by erasure: " + tree)
+ } else if (r.isValueType) {
+ ctx.bb.emit(IS_INSTANCE(REFERENCE(definitions.boxedClass(r.toType.typeSymbol))))
+ } else {
+ genCast(l, r, ctx1, cast)
+ }
+ generatedType = if (cast) r else BOOL;
+ ctx1
}
- else if (r.isValueType)
- ctx.bb.emit(IS_INSTANCE(REFERENCE(definitions.boxedClass(r.toType.typeSymbol))))
- else
- genCast(l, r, ctx1, cast);
-
- generatedType = if (cast) r else BOOL;
- ctx1
+ genLoadApply1
// 'super' call: Note: since constructors are supposed to
// return an instance of what they construct, we have to take
@@ -768,102 +772,105 @@ abstract class GenICode extends SubComponent {
// therefore, we can ignore this fact, and generate code that leaves nothing
// on the stack (contrary to what the type in the AST says).
case Apply(fun @ Select(Super(_, mix), _), args) =>
- if (settings.debug.value)
- log("Call to super: " + tree);
- val invokeStyle = SuperCall(mix)
-// if (fun.symbol.isConstructor) Static(true) else SuperCall(mix);
-
- ctx.bb.emit(THIS(ctx.clazz.symbol), tree.pos)
- val ctx1 = genLoadArguments(args, fun.symbol.info.paramTypes, ctx)
-
- ctx1.bb.emit(CALL_METHOD(fun.symbol, invokeStyle), tree.pos)
- generatedType =
- if (fun.symbol.isConstructor) UNIT
- else toTypeKind(fun.symbol.info.resultType)
- ctx1
+ def genLoadApply2 = {
+ debuglog("Call to super: " + tree);
+ val invokeStyle = SuperCall(mix)
+ // if (fun.symbol.isConstructor) Static(true) else SuperCall(mix);
+
+ ctx.bb.emit(THIS(ctx.clazz.symbol), tree.pos)
+ val ctx1 = genLoadArguments(args, fun.symbol.info.paramTypes, ctx)
+
+ ctx1.bb.emit(CALL_METHOD(fun.symbol, invokeStyle), tree.pos)
+ generatedType =
+ if (fun.symbol.isConstructor) UNIT
+ else toTypeKind(fun.symbol.info.resultType)
+ ctx1
+ }
+ genLoadApply2
// 'new' constructor call: Note: since constructors are
// thought to return an instance of what they construct,
// we have to 'simulate' it by DUPlicating the freshly created
// instance (on JVM, <init> methods return VOID).
case Apply(fun @ Select(New(tpt), nme.CONSTRUCTOR), args) =>
- val ctor = fun.symbol
- if (settings.debug.value)
- assert(ctor.isClassConstructor,
- "'new' call to non-constructor: " + ctor.name)
-
- generatedType = toTypeKind(tpt.tpe)
- if (settings.debug.value)
- assert(generatedType.isReferenceType || generatedType.isArrayType,
- "Non reference type cannot be instantiated: " + generatedType)
-
- generatedType match {
- case arr @ ARRAY(elem) =>
- val ctx1 = genLoadArguments(args, ctor.info.paramTypes, ctx)
- val dims = arr.dimensions
- var elemKind = arr.elementKind
- if (args.length > dims)
- unit.error(tree.pos, "too many arguments for array constructor: found " + args.length +
- " but array has only " + dims + " dimension(s)")
- if (args.length != dims)
- for (i <- args.length until dims) elemKind = ARRAY(elemKind)
- ctx1.bb.emit(CREATE_ARRAY(elemKind, args.length), tree.pos)
- ctx1
+ def genLoadApply3 = {
+ val ctor = fun.symbol
+ debugassert(ctor.isClassConstructor,
+ "'new' call to non-constructor: " + ctor.name)
+
+ generatedType = toTypeKind(tpt.tpe)
+ debugassert(generatedType.isReferenceType || generatedType.isArrayType,
+ "Non reference type cannot be instantiated: " + generatedType)
- case rt @ REFERENCE(cls) =>
- if (settings.debug.value)
- assert(ctor.owner == cls,
- "Symbol " + ctor.owner.fullName + " is different than " + tpt)
-
- val ctx2 = if (forMSIL && loaders.clrTypes.isNonEnumValuetype(cls)) {
- /* parameterful constructors are the only possible custom constructors,
- a default constructor can't be defined for valuetypes, CLR dixit */
- val isDefaultConstructor = args.isEmpty
- if (isDefaultConstructor) {
- msil_genLoadZeroOfNonEnumValuetype(ctx, rt, tree.pos, leaveAddressOnStackInstead = false)
- ctx
+ generatedType match {
+ case arr @ ARRAY(elem) =>
+ val ctx1 = genLoadArguments(args, ctor.info.paramTypes, ctx)
+ val dims = arr.dimensions
+ var elemKind = arr.elementKind
+ if (args.length > dims)
+ unit.error(tree.pos, "too many arguments for array constructor: found " + args.length +
+ " but array has only " + dims + " dimension(s)")
+ if (args.length != dims)
+ for (i <- args.length until dims) elemKind = ARRAY(elemKind)
+ ctx1.bb.emit(CREATE_ARRAY(elemKind, args.length), tree.pos)
+ ctx1
+
+ case rt @ REFERENCE(cls) =>
+ debugassert(ctor.owner == cls,
+ "Symbol " + ctor.owner.fullName + " is different than " + tpt)
+
+ val ctx2 = if (forMSIL && loaders.clrTypes.isNonEnumValuetype(cls)) {
+ /* parameterful constructors are the only possible custom constructors,
+ a default constructor can't be defined for valuetypes, CLR dixit */
+ val isDefaultConstructor = args.isEmpty
+ if (isDefaultConstructor) {
+ msil_genLoadZeroOfNonEnumValuetype(ctx, rt, tree.pos, leaveAddressOnStackInstead = false)
+ ctx
+ } else {
+ val ctx1 = genLoadArguments(args, ctor.info.paramTypes, ctx)
+ ctx1.bb.emit(CIL_NEWOBJ(ctor), tree.pos)
+ ctx1
+ }
} else {
+ val nw = NEW(rt)
+ ctx.bb.emit(nw, tree.pos)
+ ctx.bb.emit(DUP(generatedType))
val ctx1 = genLoadArguments(args, ctor.info.paramTypes, ctx)
- ctx1.bb.emit(CIL_NEWOBJ(ctor), tree.pos)
+
+ val init = CALL_METHOD(ctor, Static(true))
+ nw.init = init
+ ctx1.bb.emit(init, tree.pos)
ctx1
}
- } else {
- val nw = NEW(rt)
- ctx.bb.emit(nw, tree.pos)
- ctx.bb.emit(DUP(generatedType))
- val ctx1 = genLoadArguments(args, ctor.info.paramTypes, ctx)
+ ctx2
- val init = CALL_METHOD(ctor, Static(true))
- nw.init = init
- ctx1.bb.emit(init, tree.pos)
- ctx1
- }
- ctx2
-
- case _ =>
- abort("Cannot instantiate " + tpt + "of kind: " + generatedType)
+ case _ =>
+ abort("Cannot instantiate " + tpt + " of kind: " + generatedType)
+ }
}
+ genLoadApply3
case Apply(fun @ _, List(expr)) if (definitions.isBox(fun.symbol)) =>
- if (settings.debug.value)
- log("BOX : " + fun.symbol.fullName);
- val ctx1 = genLoad(expr, ctx, toTypeKind(expr.tpe))
- val nativeKind = toTypeKind(expr.tpe)
- if (settings.Xdce.value) {
- // we store this boxed value to a local, even if not really needed.
- // boxing optimization might use it, and dead code elimination will
- // take care of unnecessary stores
- var loc1 = ctx.makeLocal(tree.pos, expr.tpe, "boxed")
- ctx1.bb.emit(STORE_LOCAL(loc1))
- ctx1.bb.emit(LOAD_LOCAL(loc1))
+ def genLoadApply4 = {
+ debuglog("BOX : " + fun.symbol.fullName);
+ val ctx1 = genLoad(expr, ctx, toTypeKind(expr.tpe))
+ val nativeKind = toTypeKind(expr.tpe)
+ if (settings.Xdce.value) {
+ // we store this boxed value to a local, even if not really needed.
+ // boxing optimization might use it, and dead code elimination will
+ // take care of unnecessary stores
+ var loc1 = ctx.makeLocal(tree.pos, expr.tpe, "boxed")
+ ctx1.bb.emit(STORE_LOCAL(loc1))
+ ctx1.bb.emit(LOAD_LOCAL(loc1))
+ }
+ ctx1.bb.emit(BOX(nativeKind), expr.pos)
+ generatedType = toTypeKind(fun.symbol.tpe.resultType)
+ ctx1
}
- ctx1.bb.emit(BOX(nativeKind), expr.pos)
- generatedType = toTypeKind(fun.symbol.tpe.resultType)
- ctx1
+ genLoadApply4
case Apply(fun @ _, List(expr)) if (definitions.isUnbox(fun.symbol)) =>
- if (settings.debug.value)
- log("UNBOX : " + fun.symbol.fullName)
+ debuglog("UNBOX : " + fun.symbol.fullName)
val ctx1 = genLoad(expr, ctx, toTypeKind(expr.tpe))
val boxType = toTypeKind(fun.symbol.owner.linkedClassOfClass.tpe)
generatedType = boxType
@@ -871,181 +878,187 @@ abstract class GenICode extends SubComponent {
ctx1
case Apply(fun @ _, List(expr)) if (forMSIL && loaders.clrTypes.isAddressOf(fun.symbol)) =>
- if (settings.debug.value)
- log("ADDRESSOF : " + fun.symbol.fullName);
+ debuglog("ADDRESSOF : " + fun.symbol.fullName);
val ctx1 = msil_genLoadAddressOf(expr, ctx, toTypeKind(expr.tpe), butRawValueIsAlsoGoodEnough = false)
generatedType = toTypeKind(fun.symbol.tpe.resultType)
ctx1
case app @ Apply(fun, args) =>
- val sym = fun.symbol
-
- if (sym.isLabel) { // jump to a label
- val label = ctx.labels.getOrElse(sym, {
- // it is a forward jump, scan for labels
- log("Performing scan for label because of forward jump.")
- scanForLabels(ctx.defdef, ctx)
- ctx.labels.get(sym) match {
- case Some(l) =>
- log("Found label: " + l)
- l
- case _ =>
- abort("Unknown label target: " + sym +
- " at: " + (fun.pos) + ": ctx: " + ctx)
- }
- })
- val ctx1 = genLoadLabelArguments(args, label, ctx)
- ctx1.bb.emitOnly(if (label.anchored) JUMP(label.block) else PJUMP(label))
- ctx1.bb.enterIgnoreMode
- ctx1
- } else if (isPrimitive(sym)) { // primitive method call
- val (newCtx, resKind) = genPrimitiveOp(app, ctx, expectedType)
- generatedType = resKind
- newCtx
- } else { // normal method call
- if (settings.debug.value)
- log("Gen CALL_METHOD with sym: " + sym + " isStaticSymbol: " + sym.isStaticMember);
- val invokeStyle =
- if (sym.isStaticMember)
- Static(false)
- else if (sym.isPrivate || sym.isClassConstructor)
- Static(true)
- else
- Dynamic
-
- var ctx1 =
- if (invokeStyle.hasInstance) {
- if (forMSIL && !(invokeStyle.isInstanceOf[SuperCall]) && msil_IsValuetypeInstMethod(sym))
- msil_genLoadQualifierAddress(fun, ctx)
+ def genLoadApply6 = {
+ val sym = fun.symbol
+
+ if (sym.isLabel) { // jump to a label
+ val label = ctx.labels.getOrElse(sym, {
+ // it is a forward jump, scan for labels
+ resolveForwardLabel(ctx.defdef, ctx, sym)
+ ctx.labels.get(sym) match {
+ case Some(l) =>
+ log("Forward jump for " + sym.fullLocationString + ": scan found label " + l)
+ l
+ case _ =>
+ abort("Unknown label target: " + sym + " at: " + (fun.pos) + ": ctx: " + ctx)
+ }
+ })
+ // note: when one of the args to genLoadLabelArguments is a jump to a label,
+ // it will call back into genLoad and arrive at this case, which will then set ctx1.bb.ignore to true,
+ // this is okay, since we're jumping unconditionally, so the loads and jumps emitted by the outer
+ // call to genLoad (by calling genLoadLabelArguments and emitOnly) can safely be ignored,
+ // however, as emitOnly will close the block, which reverses its instructions (when it's still open),
+ // we better not reverse when the block has already been closed but is in ignore mode
+ // (if it's not in ignore mode, double-closing is an error)
+ val ctx1 = genLoadLabelArguments(args, label, ctx)
+ ctx1.bb.emitOnly(if (label.anchored) JUMP(label.block) else PJUMP(label))
+ ctx1.bb.enterIgnoreMode
+ ctx1
+ } else if (isPrimitive(sym)) { // primitive method call
+ val (newCtx, resKind) = genPrimitiveOp(app, ctx, expectedType)
+ generatedType = resKind
+ newCtx
+ } else { // normal method call
+ debuglog("Gen CALL_METHOD with sym: " + sym + " isStaticSymbol: " + sym.isStaticMember);
+ val invokeStyle =
+ if (sym.isStaticMember)
+ Static(false)
+ else if (sym.isPrivate || sym.isClassConstructor)
+ Static(true)
else
- genLoadQualifier(fun, ctx)
- } else ctx
-
- ctx1 = genLoadArguments(args, sym.info.paramTypes, ctx1)
- val cm = CALL_METHOD(sym, invokeStyle)
-
- /** In a couple cases, squirrel away a little extra information in the
- * CALL_METHOD for use by GenJVM.
- */
- fun match {
- case Select(qual, _) =>
- val qualSym = qual.tpe.typeSymbol
- if (qualSym == ArrayClass) cm setTargetTypeKind toTypeKind(qual.tpe)
- else cm setHostClass qualSym
-
- if (settings.debug.value) log(
- if (qualSym == ArrayClass) "Stored target type kind " + toTypeKind(qual.tpe) + " for " + sym.fullName
- else "Set more precise host class for " + sym.fullName + " host: " + qualSym
- )
- case _ =>
- }
- ctx1.bb.emit(cm, tree.pos)
+ Dynamic
- if (sym == ctx1.method.symbol) {
- ctx1.method.recursive = true
+ var ctx1 =
+ if (invokeStyle.hasInstance) {
+ if (forMSIL && !(invokeStyle.isInstanceOf[SuperCall]) && msil_IsValuetypeInstMethod(sym))
+ msil_genLoadQualifierAddress(fun, ctx)
+ else
+ genLoadQualifier(fun, ctx)
+ } else ctx
+
+ ctx1 = genLoadArguments(args, sym.info.paramTypes, ctx1)
+ val cm = CALL_METHOD(sym, invokeStyle)
+
+ /** In a couple cases, squirrel away a little extra information in the
+ * CALL_METHOD for use by GenJVM.
+ */
+ fun match {
+ case Select(qual, _) =>
+ val qualSym = findHostClass(qual.tpe, sym)
+ if (qualSym == ArrayClass) {
+ val kind = toTypeKind(qual.tpe)
+ cm setTargetTypeKind kind
+ log(s"Stored target type kind for {$sym.fullName} as $kind")
+ }
+ else {
+ cm setHostClass qualSym
+ if (qual.tpe.typeSymbol != qualSym)
+ log(s"Precisified host class for $sym from ${qual.tpe.typeSymbol.fullName} to ${qualSym.fullName}")
+ }
+ case _ =>
+ }
+ ctx1.bb.emit(cm, tree.pos)
+ ctx1.method.updateRecursive(sym)
+ generatedType =
+ if (sym.isClassConstructor) UNIT
+ else toTypeKind(sym.info.resultType);
+ ctx1
}
- generatedType =
- if (sym.isClassConstructor) UNIT
- else toTypeKind(sym.info.resultType);
- ctx1
}
+ genLoadApply6
case ApplyDynamic(qual, args) =>
- assert(!forMSIL)
- ctx.clazz.bootstrapClass = Some("scala.runtime.DynamicDispatch")
- val ctx1 = genLoad(qual, ctx, ObjectReference)
- genLoadArguments(args, tree.symbol.info.paramTypes, ctx1)
- ctx1.bb.emit(CALL_METHOD(tree.symbol, InvokeDynamic), tree.pos)
- ctx1
+ assert(!forMSIL, tree)
+ // TODO - this is where we'd catch dynamic applies for invokedynamic.
+ sys.error("No invokedynamic support yet.")
+ // val ctx1 = genLoad(qual, ctx, ObjectReference)
+ // genLoadArguments(args, tree.symbol.info.paramTypes, ctx1)
+ // ctx1.bb.emit(CALL_METHOD(tree.symbol, InvokeDynamic), tree.pos)
+ // ctx1
case This(qual) =>
- assert(tree.symbol == ctx.clazz.symbol || tree.symbol.isModuleClass,
- "Trying to access the this of another class: " +
- "tree.symbol = " + tree.symbol + ", ctx.clazz.symbol = " + ctx.clazz.symbol + " compilation unit:"+unit)
- if (tree.symbol.isModuleClass && tree.symbol != ctx.clazz.symbol) {
- if (settings.debug.value)
- log("LOAD_MODULE from 'This': " + tree.symbol);
- assert(!tree.symbol.isPackageClass, "Cannot use package as value: " + tree)
- genLoadModule(ctx, tree.symbol, tree.pos)
- generatedType = REFERENCE(tree.symbol)
- } else {
- ctx.bb.emit(THIS(ctx.clazz.symbol), tree.pos)
- generatedType = REFERENCE(
- if (tree.symbol == ArrayClass) ObjectClass else ctx.clazz.symbol
- )
+ def genLoadThis = {
+ assert(tree.symbol == ctx.clazz.symbol || tree.symbol.isModuleClass,
+ "Trying to access the this of another class: " +
+ "tree.symbol = " + tree.symbol + ", ctx.clazz.symbol = " + ctx.clazz.symbol + " compilation unit:"+unit)
+ if (tree.symbol.isModuleClass && tree.symbol != ctx.clazz.symbol) {
+ genLoadModule(ctx, tree)
+ generatedType = REFERENCE(tree.symbol)
+ } else {
+ ctx.bb.emit(THIS(ctx.clazz.symbol), tree.pos)
+ generatedType = REFERENCE(
+ if (tree.symbol == ArrayClass) ObjectClass else ctx.clazz.symbol
+ )
+ }
+ ctx
}
- ctx
+ genLoadThis
case Select(Ident(nme.EMPTY_PACKAGE_NAME), module) =>
- if (settings.debug.value) {
- assert(tree.symbol.isModule,
- "Selection of non-module from empty package: " + tree.toString() +
- " sym: " + tree.symbol +
- " at: " + (tree.pos))
- log("LOAD_MODULE from Select(<emptypackage>): " + tree.symbol);
- }
- assert(!tree.symbol.isPackageClass, "Cannot use package as value: " + tree)
- genLoadModule(ctx, tree.symbol, tree.pos)
- ctx
+ debugassert(tree.symbol.isModule,
+ "Selection of non-module from empty package: " + tree +
+ " sym: " + tree.symbol + " at: " + (tree.pos)
+ )
+ genLoadModule(ctx, tree)
case Select(qualifier, selector) =>
- val sym = tree.symbol
- generatedType = toTypeKind(sym.info)
- val hostClass = qualifier.tpe.typeSymbol.orElse(sym.owner)
+ def genLoadSelect = {
+ val sym = tree.symbol
+ generatedType = toTypeKind(sym.info)
+ val hostClass = findHostClass(qualifier.tpe, sym)
+ log(s"Host class of $sym with qual $qualifier (${qualifier.tpe}) is $hostClass")
- if (sym.isModule) {
- if (settings.debug.value)
- log("LOAD_MODULE from Select(qualifier, selector): " + sym)
- assert(!tree.symbol.isPackageClass, "Cannot use package as value: " + tree)
- genLoadModule(ctx, sym, tree.pos)
- ctx
- } else if (sym.isStaticMember) {
- ctx.bb.emit(LOAD_FIELD(sym, true) setHostClass hostClass, tree.pos)
- ctx
- } else {
- val ctx1 = genLoadQualifier(tree, ctx)
- ctx1.bb.emit(LOAD_FIELD(sym, false) setHostClass hostClass, tree.pos)
- ctx1
+ if (sym.isModule) {
+ genLoadModule(ctx, tree)
+ }
+ else if (sym.isStaticMember) {
+ ctx.bb.emit(LOAD_FIELD(sym, true) setHostClass hostClass, tree.pos)
+ ctx
+ } else {
+ val ctx1 = genLoadQualifier(tree, ctx)
+ ctx1.bb.emit(LOAD_FIELD(sym, false) setHostClass hostClass, tree.pos)
+ ctx1
+ }
}
+ genLoadSelect
case Ident(name) =>
- val sym = tree.symbol
- if (!sym.isPackage) {
- if (sym.isModule) {
- if (settings.debug.value)
- log("LOAD_MODULE from Ident(name): " + sym)
- assert(!sym.isPackageClass, "Cannot use package as value: " + tree)
- genLoadModule(ctx, sym, tree.pos)
- generatedType = toTypeKind(sym.info)
- } else {
- try {
- val Some(l) = ctx.method.lookupLocal(sym)
- ctx.bb.emit(LOAD_LOCAL(l), tree.pos)
- generatedType = l.kind
- } catch {
- case ex: MatchError =>
- abort("symbol " + sym + " does not exist in " + ctx.method)
+ def genLoadIdent = {
+ val sym = tree.symbol
+ if (!sym.isPackage) {
+ if (sym.isModule) {
+ genLoadModule(ctx, tree)
+ generatedType = toTypeKind(sym.info)
+ } else {
+ try {
+ val Some(l) = ctx.method.lookupLocal(sym)
+ ctx.bb.emit(LOAD_LOCAL(l), tree.pos)
+ generatedType = l.kind
+ } catch {
+ case ex: MatchError =>
+ abort("symbol " + sym + " does not exist in " + ctx.method)
+ }
}
}
+ ctx
}
- ctx
+ genLoadIdent
case Literal(value) =>
- if (value.tag != UnitTag) (value.tag, expectedType) match {
- case (IntTag, LONG) =>
- ctx.bb.emit(CONSTANT(Constant(value.longValue)), tree.pos);
- generatedType = LONG
- case (FloatTag, DOUBLE) =>
- ctx.bb.emit(CONSTANT(Constant(value.doubleValue)), tree.pos);
- generatedType = DOUBLE
- case (NullTag, _) =>
- ctx.bb.emit(CONSTANT(value), tree.pos);
- generatedType = NullReference
- case _ =>
- ctx.bb.emit(CONSTANT(value), tree.pos);
- generatedType = toTypeKind(tree.tpe)
+ def genLoadLiteral = {
+ if (value.tag != UnitTag) (value.tag, expectedType) match {
+ case (IntTag, LONG) =>
+ ctx.bb.emit(CONSTANT(Constant(value.longValue)), tree.pos);
+ generatedType = LONG
+ case (FloatTag, DOUBLE) =>
+ ctx.bb.emit(CONSTANT(Constant(value.doubleValue)), tree.pos);
+ generatedType = DOUBLE
+ case (NullTag, _) =>
+ ctx.bb.emit(CONSTANT(value), tree.pos);
+ generatedType = NullReference
+ case _ =>
+ ctx.bb.emit(CONSTANT(value), tree.pos);
+ generatedType = toTypeKind(tree.tpe)
+ }
+ ctx
}
- ctx
+ genLoadLiteral
case Block(stats, expr) =>
ctx.enterScope
@@ -1065,57 +1078,72 @@ abstract class GenICode extends SubComponent {
genStat(tree, ctx)
case ArrayValue(tpt @ TypeTree(), _elems) =>
- var ctx1 = ctx
- val elmKind = toTypeKind(tpt.tpe)
- generatedType = ARRAY(elmKind)
- val elems = _elems.toIndexedSeq
-
- ctx1.bb.emit(CONSTANT(new Constant(elems.length)), tree.pos)
- ctx1.bb.emit(CREATE_ARRAY(elmKind, 1))
- // inline array literals
- var i = 0
- while (i < elems.length) {
- ctx1.bb.emit(DUP(generatedType), tree.pos)
- ctx1.bb.emit(CONSTANT(new Constant(i)))
- ctx1 = genLoad(elems(i), ctx1, elmKind)
- ctx1.bb.emit(STORE_ARRAY_ITEM(elmKind))
- i = i + 1
+ def genLoadArrayValue = {
+ var ctx1 = ctx
+ val elmKind = toTypeKind(tpt.tpe)
+ generatedType = ARRAY(elmKind)
+ val elems = _elems.toIndexedSeq
+
+ ctx1.bb.emit(CONSTANT(new Constant(elems.length)), tree.pos)
+ ctx1.bb.emit(CREATE_ARRAY(elmKind, 1))
+ // inline array literals
+ var i = 0
+ while (i < elems.length) {
+ ctx1.bb.emit(DUP(generatedType), tree.pos)
+ ctx1.bb.emit(CONSTANT(new Constant(i)))
+ ctx1 = genLoad(elems(i), ctx1, elmKind)
+ ctx1.bb.emit(STORE_ARRAY_ITEM(elmKind))
+ i = i + 1
+ }
+ ctx1
}
- ctx1
+ genLoadArrayValue
case Match(selector, cases) =>
- if (settings.debug.value)
- log("Generating SWITCH statement.");
- var ctx1 = genLoad(selector, ctx, INT)
- val afterCtx = ctx1.newBlock
- var caseCtx: Context = null
- generatedType = toTypeKind(tree.tpe)
-
- var targets: List[BasicBlock] = Nil
- var tags: List[Int] = Nil
- var default: BasicBlock = afterCtx.bb
-
- for (caze @ CaseDef(pat, guard, body) <- cases) {
- assert(guard == EmptyTree)
- val tmpCtx = ctx1.newBlock
- pat match {
- case Literal(value) =>
- tags = value.intValue :: tags
- targets = tmpCtx.bb :: targets
- case Ident(nme.WILDCARD) =>
- default = tmpCtx.bb
- case _ =>
- abort("Invalid case statement in switch-like pattern match: " +
- tree + " at: " + (tree.pos))
- }
+ def genLoadMatch = {
+ debuglog("Generating SWITCH statement.");
+ var ctx1 = genLoad(selector, ctx, INT) // TODO: Java 7 allows strings in switches (so, don't assume INT and don't convert the literals using intValue)
+ val afterCtx = ctx1.newBlock
+ var caseCtx: Context = null
+ generatedType = toTypeKind(tree.tpe)
+
+ var targets: List[BasicBlock] = Nil
+ var tags: List[Int] = Nil
+ var default: BasicBlock = afterCtx.bb
+
+ for (caze @ CaseDef(pat, guard, body) <- cases) {
+ assert(guard == EmptyTree, guard)
+ val tmpCtx = ctx1.newBlock
+ pat match {
+ case Literal(value) =>
+ tags = value.intValue :: tags
+ targets = tmpCtx.bb :: targets
+ case Ident(nme.WILDCARD) =>
+ default = tmpCtx.bb
+ case Alternative(alts) =>
+ alts foreach {
+ case Literal(value) =>
+ tags = value.intValue :: tags
+ targets = tmpCtx.bb :: targets
+ case _ =>
+ abort("Invalid case in alternative in switch-like pattern match: " +
+ tree + " at: " + tree.pos)
+ }
+ case _ =>
+ abort("Invalid case statement in switch-like pattern match: " +
+ tree + " at: " + (tree.pos))
+ }
- caseCtx = genLoad(body, tmpCtx, generatedType)
- caseCtx.bb.closeWith(JUMP(afterCtx.bb) setPos caze.pos)
+ caseCtx = genLoad(body, tmpCtx, generatedType)
+ // close the block unless it's already been closed by the body, which closes the block if it ends in a jump (which is emitted to have alternatives share their body)
+ caseCtx.bb.closeWith(JUMP(afterCtx.bb) setPos caze.pos)
+ }
+ ctx1.bb.emitOnly(
+ SWITCH(tags.reverse map (x => List(x)), (default :: targets).reverse) setPos tree.pos
+ )
+ afterCtx
}
- ctx1.bb.emitOnly(
- SWITCH(tags.reverse map (x => List(x)), (default :: targets).reverse) setPos tree.pos
- )
- afterCtx
+ genLoadMatch
case EmptyTree =>
if (expectedType != UNIT)
@@ -1133,42 +1161,32 @@ abstract class GenICode extends SubComponent {
resCtx
}
- private def adapt(from: TypeKind, to: TypeKind, ctx: Context, pos: Position): Unit = {
- if (!(from <:< to) && !(from == NullReference && to == NothingReference)) {
- to match {
- case UNIT =>
- ctx.bb.emit(DROP(from), pos)
- if (settings.debug.value)
- log("Dropped an " + from);
-
- case _ =>
- if (settings.debug.value) {
- assert(from != UNIT,
- "Can't convert from UNIT to " + to + " at: " + pos)
- }
- assert(!from.isReferenceType && !to.isReferenceType,
- "type error: can't convert from " + from + " to " + to +" in unit " + unit.source + " at " + pos)
-
- ctx.bb.emit(CALL_PRIMITIVE(Conversion(from, to)), pos)
- }
- } else if (from == NothingReference) {
- ctx.bb.emit(THROW(ThrowableClass))
- ctx.bb.enterIgnoreMode
- } else if (from == NullReference) {
- ctx.bb.emit(DROP(from))
- ctx.bb.emit(CONSTANT(Constant(null)))
+ private def adapt(from: TypeKind, to: TypeKind, ctx: Context, pos: Position) {
+ // An awful lot of bugs explode here - let's leave ourselves more clues.
+ // A typical example is an overloaded type assigned after typer.
+ log(s"GenICode#adapt($from, $to, $ctx, $pos)")
+
+ val conforms = (from <:< to) || (from == NullReference && to == NothingReference)
+ def coerce(from: TypeKind, to: TypeKind) = ctx.bb.emit(CALL_PRIMITIVE(Conversion(from, to)), pos)
+ def checkAssertions() {
+ def msg = s"Can't convert from $from to $to in unit ${unit.source} at $pos"
+ debugassert(from != UNIT, msg)
+ assert(!from.isReferenceType && !to.isReferenceType, msg)
}
- else if (from == ThrowableReference && !(ThrowableClass.tpe <:< to.toType)) {
- log("Inserted check-cast on throwable to " + to + " at " + pos)
- ctx.bb.emit(CHECK_CAST(to))
+ if (conforms) from match {
+ case NothingReference => ctx.bb.emit(THROW(ThrowableClass)) ; ctx.bb.enterIgnoreMode
+ case NullReference => ctx.bb.emit(Seq(DROP(from), CONSTANT(Constant(null))))
+ case ThrowableReference if !(ThrowableClass.tpe <:< to.toType) => ctx.bb.emit(CHECK_CAST(to)) // downcast throwables
+ case BYTE | SHORT | CHAR | INT if to == LONG => coerce(INT, LONG) // widen subrange types
+ case _ => ()
}
- else (from, to) match {
- case (BYTE, LONG) | (SHORT, LONG) | (CHAR, LONG) | (INT, LONG) => ctx.bb.emit(CALL_PRIMITIVE(Conversion(INT, LONG)))
- case _ => ()
+ else to match {
+ case UNIT => ctx.bb.emit(DROP(from), pos) // value discarding
+ case _ => checkAssertions() ; coerce(from, to) // other primitive coercions
}
}
- /** Load the qualifier of `tree' on top of the stack. */
+ /** Load the qualifier of `tree` on top of the stack. */
private def genLoadQualifier(tree: Tree, ctx: Context): Context =
tree match {
case Select(qualifier, _) =>
@@ -1190,11 +1208,10 @@ abstract class GenICode extends SubComponent {
* Generate code that loads args into label parameters.
*/
private def genLoadLabelArguments(args: List[Tree], label: Label, ctx: Context): Context = {
- if (settings.debug.value) {
- assert(args.length == label.params.length,
- "Wrong number of arguments in call to label " + label.symbol)
- }
-
+ debugassert(
+ args.length == label.params.length,
+ "Wrong number of arguments in call to label " + label.symbol
+ )
var ctx1 = ctx
def isTrivial(kv: (Tree, Symbol)) = kv match {
@@ -1226,8 +1243,19 @@ abstract class GenICode extends SubComponent {
genLoad(arg, res, toTypeKind(tpe))
}
- private def genLoadModule(ctx: Context, sym: Symbol, pos: Position) {
- ctx.bb.emit(LOAD_MODULE(sym), pos)
+ private def genLoadModule(ctx: Context, tree: Tree): Context = {
+ // Working around SI-5604. Rather than failing the compile when we see
+ // a package here, check if there's a package object.
+ val sym = (
+ if (!tree.symbol.isPackageClass) tree.symbol
+ else tree.symbol.info.member(nme.PACKAGE) match {
+ case NoSymbol => abort("Cannot use package as value: " + tree)
+ case s => debugwarn("Bug: found package class where package object expected. Converting.") ; s.moduleClass
+ }
+ )
+ debuglog("LOAD_MODULE from %s: %s".format(tree.shortClass, sym))
+ ctx.bb.emit(LOAD_MODULE(sym), tree.pos)
+ ctx
}
def genConversion(from: TypeKind, to: TypeKind, ctx: Context, cast: Boolean) = {
@@ -1329,7 +1357,7 @@ abstract class GenICode extends SubComponent {
/** The Object => String overload.
*/
- private lazy val String_valueOf: Symbol = getMember(StringModule, "valueOf") filter (sym =>
+ private lazy val String_valueOf: Symbol = getMember(StringModule, nme.valueOf) filter (sym =>
sym.info.paramTypes match {
case List(pt) => pt.typeSymbol == ObjectClass
case _ => false
@@ -1341,7 +1369,7 @@ abstract class GenICode extends SubComponent {
// case we want to get more precise.
//
// private def valueOfForType(tp: Type): Symbol = {
- // val xs = getMember(StringModule, "valueOf") filter (sym =>
+ // val xs = getMember(StringModule, nme.valueOf) filter (sym =>
// // We always exclude the Array[Char] overload because java throws an NPE if
// // you pass it a null. It will instead find the Object one, which doesn't.
// sym.info.paramTypes match {
@@ -1365,12 +1393,12 @@ abstract class GenICode extends SubComponent {
liftStringConcat(tree) match {
// Optimization for expressions of the form "" + x. We can avoid the StringBuilder.
case List(Literal(Constant("")), arg) if !forMSIL =>
- if (settings.debug.value) log("Rewriting \"\" + x as String.valueOf(x) for: " + arg)
+ debuglog("Rewriting \"\" + x as String.valueOf(x) for: " + arg)
val ctx1 = genLoad(arg, ctx, ObjectReference)
ctx1.bb.emit(CALL_METHOD(String_valueOf, Static(false)), arg.pos)
ctx1
case concatenations =>
- if (settings.debug.value) log("Lifted string concatenations for " + tree + "\n to: " + concatenations)
+ debuglog("Lifted string concatenations for " + tree + "\n to: " + concatenations)
var ctx1 = ctx
ctx1.bb.emit(CALL_PRIMITIVE(StartConcat), tree.pos)
for (elem <- concatenations) {
@@ -1388,7 +1416,7 @@ abstract class GenICode extends SubComponent {
def genScalaHash(tree: Tree, ctx: Context): Context = {
val hashMethod = {
ctx.bb.emit(LOAD_MODULE(ScalaRunTimeModule))
- getMember(ScalaRunTimeModule, "hash")
+ getMember(ScalaRunTimeModule, nme.hash_)
}
val ctx1 = genLoad(tree, ctx, ObjectReference)
@@ -1420,21 +1448,17 @@ abstract class GenICode extends SubComponent {
def ifOneIsNull(l: Tree, r: Tree) = if (isNull(l)) r else if (isNull(r)) l else null
/**
- * Traverse the tree and store label stubs in the context. This is
- * necessary to handle forward jumps, because at a label application
- * with arguments, the symbols of the corresponding LabelDef parameters
- * are not yet known.
+ * Find the label denoted by `lsym` and enter it in context `ctx`.
*
- * Since it is expensive to traverse each method twice, this method is called
- * only when forward jumps really happen, and then it re-traverses the whole
- * method, scanning for LabelDefs.
+ * We only enter one symbol at a time, even though we might traverse the same
+ * tree more than once per method. That's because we cannot enter labels that
+ * might be duplicated (for instance, inside finally blocks).
*
* TODO: restrict the scanning to smaller subtrees than the whole method.
* It is sufficient to scan the trees of the innermost enclosing block.
*/
- //
- private def scanForLabels(tree: Tree, ctx: Context): Unit = tree foreachPartial {
- case t @ LabelDef(_, params, rhs) =>
+ private def resolveForwardLabel(tree: Tree, ctx: Context, lsym: Symbol): Unit = tree foreachPartial {
+ case t @ LabelDef(_, params, rhs) if t.symbol == lsym =>
ctx.labels.getOrElseUpdate(t.symbol, {
val locals = params map (p => new Local(p.symbol, toTypeKind(p.symbol.info), false))
ctx.method addLocals locals
@@ -1485,8 +1509,7 @@ abstract class GenICode extends SubComponent {
}
}
- if (settings.debug.value)
- log("Entering genCond with tree: " + tree);
+ debuglog("Entering genCond with tree: " + tree);
// the default emission
def default() = {
@@ -1548,20 +1571,8 @@ abstract class GenICode extends SubComponent {
* @param elseCtx target context if the comparison yields false
*/
def genEqEqPrimitive(l: Tree, r: Tree, ctx: Context)(thenCtx: Context, elseCtx: Context): Unit = {
- def getTempLocal: Local = ctx.method.lookupLocal(nme.EQEQ_LOCAL_VAR) match {
- case Some(local) => local
- case None =>
- val local = ctx.makeLocal(l.pos, AnyRefClass.typeConstructor, nme.EQEQ_LOCAL_VAR)
- //assert(!l.pos.source.isEmpty, "bad position, unit = "+unit+", tree = "+l+", pos = "+l.pos.source)
- // Note - I commented these out because they were crashing the test case in ticket #2426
- // (and I have also had to comment them out at various times while working on equality.)
- // I don't know what purpose they are serving but it would be nice if they didn't have to
- // crash the compiler.
- // assert(l.pos.source == unit.source)
- // assert(r.pos.source == unit.source)
- local.start = (l.pos).line
- local.end = (r.pos).line
- local
+ def getTempLocal = ctx.method.lookupLocal(nme.EQEQ_LOCAL_VAR) getOrElse {
+ ctx.makeLocal(l.pos, AnyRefClass.tpe, nme.EQEQ_LOCAL_VAR)
}
/** True if the equality comparison is between values that require the use of the rich equality
@@ -1577,7 +1588,7 @@ abstract class GenICode extends SubComponent {
if (mustUseAnyComparator) {
// when -optimise is on we call the @inline-version of equals, found in ScalaRunTime
val equalsMethod =
- if (!settings.XO.value) {
+ if (!settings.optimise.value) {
def default = platform.externalEquals
platform match {
case x: JavaPlatform =>
@@ -1599,9 +1610,10 @@ abstract class GenICode extends SubComponent {
val ctx1 = genLoad(l, ctx, ObjectReference)
val ctx2 = genLoad(r, ctx1, ObjectReference)
- ctx2.bb.emit(CALL_METHOD(equalsMethod, if (settings.XO.value) Dynamic else Static(false)))
- ctx2.bb.emit(CZJUMP(thenCtx.bb, elseCtx.bb, NE, BOOL))
- ctx2.bb.close
+ ctx2.bb.emitOnly(
+ CALL_METHOD(equalsMethod, if (settings.optimise.value) Dynamic else Static(false)),
+ CZJUMP(thenCtx.bb, elseCtx.bb, NE, BOOL)
+ )
}
else {
if (isNull(l))
@@ -1613,8 +1625,6 @@ abstract class GenICode extends SubComponent {
} else {
val eqEqTempLocal = getTempLocal
var ctx1 = genLoad(l, ctx, ObjectReference)
-
- // dicey refactor section
lazy val nonNullCtx = ctx1.newBlock
// l == r -> if (l eq null) r eq null else l.equals(r)
@@ -1645,8 +1655,7 @@ abstract class GenICode extends SubComponent {
* class.
*/
private def addClassFields(ctx: Context, cls: Symbol) {
- if (settings.debug.value)
- assert(ctx.clazz.symbol eq cls,
+ debugassert(ctx.clazz.symbol eq cls,
"Classes are not the same: " + ctx.clazz.symbol + ", " + cls)
/** Non-method term members are fields, except for module members. Module
@@ -1707,13 +1716,12 @@ abstract class GenICode extends SubComponent {
if (block.size == 1 && optCont.isDefined) {
val Some(cont) = optCont;
val pred = block.predecessors;
- log("Preds: " + pred + " of " + block + " (" + optCont + ")");
+ debuglog("Preds: " + pred + " of " + block + " (" + optCont + ")");
pred foreach { p =>
changed = true
p.lastInstruction match {
case CJUMP(succ, fail, cond, kind) if (succ == block || fail == block) =>
- if (settings.debug.value)
- log("Pruning empty if branch.");
+ debuglog("Pruning empty if branch.");
p.replaceInstruction(p.lastInstruction,
if (block == succ)
if (block == fail)
@@ -1726,8 +1734,7 @@ abstract class GenICode extends SubComponent {
abort("Could not find block in preds: " + method + " " + block + " " + pred + " " + p))
case CZJUMP(succ, fail, cond, kind) if (succ == block || fail == block) =>
- if (settings.debug.value)
- log("Pruning empty ifz branch.");
+ debuglog("Pruning empty ifz branch.");
p.replaceInstruction(p.lastInstruction,
if (block == succ)
if (block == fail)
@@ -1740,15 +1747,12 @@ abstract class GenICode extends SubComponent {
abort("Could not find block in preds"))
case JUMP(b) if (b == block) =>
- if (settings.debug.value)
- log("Pruning empty JMP branch.");
+ debuglog("Pruning empty JMP branch.");
val replaced = p.replaceInstruction(p.lastInstruction, JUMP(cont))
- if (settings.debug.value)
- assert(replaced, "Didn't find p.lastInstruction")
+ debugassert(replaced, "Didn't find p.lastInstruction")
case SWITCH(tags, labels) if (labels contains block) =>
- if (settings.debug.value)
- log("Pruning empty SWITCH branch.");
+ debuglog("Pruning empty SWITCH branch.");
p.replaceInstruction(p.lastInstruction,
SWITCH(tags, labels map (l => if (l == block) cont else l)))
@@ -1758,7 +1762,7 @@ abstract class GenICode extends SubComponent {
}
}
if (changed) {
- log("Removing block: " + block)
+ debuglog("Removing block: " + block)
method.code.removeBlock(block)
for (e <- method.exh) {
e.covered = e.covered filter (_ != block)
@@ -1773,11 +1777,10 @@ abstract class GenICode extends SubComponent {
do {
changed = false
n += 1
- method.code.blocks foreach prune0
+ method.blocks foreach prune0
} while (changed)
- if (settings.debug.value)
- log("Prune fixpoint reached in " + n + " iterations.");
+ debuglog("Prune fixpoint reached in " + n + " iterations.");
}
def getMaxType(ts: List[Type]): TypeKind =
@@ -1799,7 +1802,7 @@ abstract class GenICode extends SubComponent {
* to delay it any more: they will be used at some point.
*/
class DuplicateLabels(boundLabels: Set[Symbol]) extends Transformer {
- val labels: mutable.Map[Symbol, Symbol] = new mutable.HashMap
+ val labels = perRunCaches.newMap[Symbol, Symbol]()
var method: Symbol = _
var ctx: Context = _
@@ -1813,7 +1816,7 @@ abstract class GenICode extends SubComponent {
val sym = t.symbol
def getLabel(pos: Position, name: Name) =
labels.getOrElseUpdate(sym,
- method.newLabel(sym.pos, unit.freshTermName(name.toString)) setInfo sym.tpe
+ method.newLabel(unit.freshTermName(name.toString), sym.pos) setInfo sym.tpe
)
t match {
@@ -1876,7 +1879,7 @@ abstract class GenICode extends SubComponent {
var bb: BasicBlock = _
/** Map from label symbols to label objects. */
- var labels = mutable.HashMap[Symbol, Label]()
+ var labels = perRunCaches.newMap[Symbol, Label]()
/** Current method definition. */
var defdef: DefDef = _
@@ -1884,7 +1887,7 @@ abstract class GenICode extends SubComponent {
/** current exception handlers */
var handlers: List[ExceptionHandler] = Nil
- /** The current monitors or finalizers, to be cleaned up upon `return'. */
+ /** The current monitors or finalizers, to be cleaned up upon `return`. */
var cleanups: List[Cleanup] = Nil
/** The exception handlers we are currently generating code for */
@@ -1895,21 +1898,11 @@ abstract class GenICode extends SubComponent {
var handlerCount = 0
- override def toString(): String = {
- val buf = new StringBuilder()
- buf.append("\tpackage: ").append(packg).append('\n')
- buf.append("\tclazz: ").append(clazz).append('\n')
- buf.append("\tmethod: ").append(method).append('\n')
- buf.append("\tbb: ").append(bb).append('\n')
- buf.append("\tlabels: ").append(labels).append('\n')
- buf.append("\texception handlers: ").append(handlers).append('\n')
- buf.append("\tcleanups: ").append(cleanups).append('\n')
- buf.append("\tscope: ").append(scope).append('\n')
- buf.toString()
- }
+ override def toString =
+ s"package $packg { class $clazz { def $method { bb=$bb } } }"
def loadException(ctx: Context, exh: ExceptionHandler, pos: Position) = {
- log("Emitting LOAD_EXCEPTION for class: " + exh.loadExceptionClass)
+ debuglog("Emitting LOAD_EXCEPTION for class: " + exh.loadExceptionClass)
ctx.bb.emit(LOAD_EXCEPTION(exh.loadExceptionClass) setPos pos, pos)
}
@@ -1980,9 +1973,9 @@ abstract class GenICode extends SubComponent {
*/
def enterMethod(m: IMethod, d: DefDef): Context = {
val ctx1 = new Context(this) setMethod(m)
- ctx1.labels = new mutable.HashMap()
+ ctx1.labels = mutable.HashMap()
ctx1.method.code = new Code(m)
- ctx1.bb = ctx1.method.code.startBlock
+ ctx1.bb = ctx1.method.startBlock
ctx1.defdef = d
ctx1.scope = EmptyScope
ctx1.enterScope
@@ -1990,11 +1983,12 @@ abstract class GenICode extends SubComponent {
}
/** Return a new context for a new basic block. */
- def newBlock: Context = {
+ def newBlock(): Context = {
val block = method.code.newBlock
handlers foreach (_ addCoveredBlock block)
currentExceptionHandlers foreach (_ addBlock block)
- block.varsInScope = new mutable.HashSet() ++= scope.varsInScope
+ block.varsInScope.clear()
+ block.varsInScope ++= scope.varsInScope
new Context(this) setBasicBlock block
}
@@ -2016,12 +2010,11 @@ abstract class GenICode extends SubComponent {
*/
private def newExceptionHandler(cls: Symbol, resultKind: TypeKind, pos: Position): ExceptionHandler = {
handlerCount += 1
- val exh = new ExceptionHandler(method, "" + handlerCount, cls, pos)
+ val exh = new ExceptionHandler(method, newTermNameCached("" + handlerCount), cls, pos)
exh.resultKind = resultKind
method.addHandler(exh)
handlers = exh :: handlers
- if (settings.debug.value)
- log("added handler: " + exh);
+ debuglog("added handler: " + exh);
exh
}
@@ -2031,8 +2024,7 @@ abstract class GenICode extends SubComponent {
private def addActiveHandler(exh: ExceptionHandler) {
handlerCount += 1
handlers = exh :: handlers
- if (settings.debug.value)
- log("added handler: " + exh);
+ debuglog("added handler: " + exh);
}
/** Return a new context for generating code for the given
@@ -2055,8 +2047,7 @@ abstract class GenICode extends SubComponent {
"Wrong nesting of exception handlers." + this + " for " + exh)
handlerCount -= 1
handlers = handlers.tail
- if (settings.debug.value)
- log("removed handler: " + exh);
+ debuglog("removed handler: " + exh);
}
@@ -2065,9 +2056,7 @@ abstract class GenICode extends SubComponent {
/** Make a fresh local variable. It ensures the 'name' is unique. */
def makeLocal(pos: Position, tpe: Type, name: String): Local = {
- val sym = method.symbol.newVariable(pos, unit.freshTermName(name))
- .setInfo(tpe)
- .setFlag(Flags.SYNTHETIC)
+ val sym = method.symbol.newVariable(unit.freshTermName(name), pos, Flags.SYNTHETIC) setInfo tpe
this.method.addLocal(new Local(sym, toTypeKind(tpe), false))
}
@@ -2139,12 +2128,12 @@ abstract class GenICode extends SubComponent {
exh
}) else None
- val exhs = handlers.map { handler =>
- val exh = this.newExceptionHandler(handler._1, handler._2, tree.pos)
+ val exhs = handlers.map { case (sym, kind, handler) => // def genWildcardHandler(sym: Symbol): (Symbol, TypeKind, Context => Context) =
+ val exh = this.newExceptionHandler(sym, kind, tree.pos)
var ctx1 = outerCtx.enterExceptionHandler(exh)
ctx1.addFinalizer(finalizer, finalizerCtx)
loadException(ctx1, exh, tree.pos)
- ctx1 = handler._3(ctx1)
+ ctx1 = handler(ctx1)
// emit finalizer
val ctx2 = emitFinalizer(ctx1)
ctx2.bb.closeWith(JUMP(afterCtx.bb))
@@ -2178,8 +2167,8 @@ abstract class GenICode extends SubComponent {
* but with `NoSymbol` as the exception class. The covered blocks are all blocks of
* the `try { .. } catch { .. }`.
*
- * Also, TryMsil does not enter any Finalizers into the `cleanups', because the
- * CLI takes care of running the finalizer when seeing a `leave' statement inside
+ * Also, TryMsil does not enter any Finalizers into the `cleanups`, because the
+ * CLI takes care of running the finalizer when seeing a `leave` statement inside
* a try / catch.
*/
def TryMsil(body: Context => Context,
@@ -2232,7 +2221,7 @@ abstract class GenICode extends SubComponent {
* Represent a label in the current method code. In order
* to support forward jumps, labels can be created without
* having a deisgnated target block. They can later be attached
- * by calling `anchor'.
+ * by calling `anchor`.
*/
class Label(val symbol: Symbol) {
var anchored = false
@@ -2264,7 +2253,7 @@ abstract class GenICode extends SubComponent {
* jumps to the given basic block.
*/
def patch(code: Code) {
- val map = toPatch map (i => (i -> patch(i))) toMap;
+ val map = mapFrom(toPatch)(patch)
code.blocks foreach (_ subst map)
}
diff --git a/src/compiler/scala/tools/nsc/backend/icode/ICodeCheckers.scala b/src/compiler/scala/tools/nsc/backend/icode/ICodeCheckers.scala
index 3d3097e..f05def3 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/ICodeCheckers.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/ICodeCheckers.scala
@@ -1,5 +1,5 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
+ * Copyright 2005-2013 LAMP/EPFL
* @author Martin Odersky
*/
@@ -51,6 +51,14 @@ abstract class ICodeCheckers {
*
* @todo Better checks for <code>MONITOR_ENTER/EXIT</code>
* Better checks for local var initializations
+ *
+ * @todo Iulian says: I think there's some outdated logic in the checker.
+ * The issue with exception handlers being special for least upper
+ * bounds pointed out some refactoring in the lattice class. Maybe
+ * a worthwhile refactoring would be to make the checker use the
+ * DataFlowAnalysis class, and use the lattice trait. In the
+ * implementation of LUB, there's a flag telling if one of the
+ * successors is 'exceptional'. The inliner is using this mechanism.
*/
class ICodeChecker {
import icodes._
@@ -60,8 +68,8 @@ abstract class ICodeCheckers {
var method: IMethod = _
var code: Code = _
- val in: mutable.Map[BasicBlock, TypeStack] = new mutable.HashMap()
- val out: mutable.Map[BasicBlock, TypeStack] = new mutable.HashMap()
+ val in: mutable.Map[BasicBlock, TypeStack] = perRunCaches.newMap()
+ val out: mutable.Map[BasicBlock, TypeStack] = perRunCaches.newMap()
val emptyStack = new TypeStack() {
override def toString = "<empty>"
}
@@ -209,7 +217,13 @@ abstract class ICodeCheckers {
throw new CheckerException(incompatibleString)
}
else {
- val newStack = new TypeStack((s1.types, s2.types).zipped map lub)
+ val newStack: TypeStack = try {
+ new TypeStack((s1.types, s2.types).zipped map lub)
+ } catch {
+ case t: Exception =>
+ checkerDebug(t.toString + ": " + s1.types.toString + " vs " + s2.types.toString)
+ new TypeStack(s1.types)
+ }
if (newStack.isEmpty || s1.types == s2.types) () // not interesting to report
else checkerDebug("Checker created new stack:\n (%s, %s) => %s".format(s1, s2, newStack))
@@ -697,7 +711,7 @@ abstract class ICodeCheckers {
//////////////// Error reporting /////////////////////////
def icodeError(msg: String) {
- ICodeCheckers.this.global.globalError(
+ ICodeCheckers.this.global.warning(
"!! ICode checker fatality in " + method +
"\n at: " + basicBlock.fullString +
"\n error message: " + msg
diff --git a/src/compiler/scala/tools/nsc/backend/icode/ICodes.scala b/src/compiler/scala/tools/nsc/backend/icode/ICodes.scala
index 06c3ee2..9320108 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/ICodes.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/ICodes.scala
@@ -1,13 +1,8 @@
/* NSC -- new scala compiler
- * Copyright 2005-2011 LAMP/EPFL
- * @author Martin Odersky
- */
-/* NSC -- new scala compiler
- * Copyright 2005-2011 LAMP/EPFL
+ * Copyright 2005-2013 LAMP/EPFL
* @author Martin Odersky
*/
-
package scala.tools.nsc
package backend
package icode
@@ -53,6 +48,9 @@ abstract class ICodes extends AnyRef
case x => global.abort("Unknown linearizer: " + x)
}
+ def newTextPrinter() =
+ new TextPrinter(new PrintWriter(Console.out, true), new DumpLinearizer)
+
/** Have to be careful because dump calls around, possibly
* re-entering methods which initiated the dump (like foreach
* in BasicBlocks) which leads to the icode output olympics.
@@ -61,31 +59,35 @@ abstract class ICodes extends AnyRef
/** Print all classes and basic blocks. Used for debugging. */
- def dump() {
- if (alreadyDumping) return
+ def dumpClassesAndAbort(msg: String): Nothing = {
+ if (alreadyDumping) global.abort(msg)
else alreadyDumping = true
- val printer = new TextPrinter(new PrintWriter(Console.out, true),
- new DumpLinearizer)
-
+ Console.println(msg)
+ val printer = newTextPrinter()
classes.values foreach printer.printClass
+ global.abort(msg)
+ }
+
+ def dumpMethodAndAbort(m: IMethod, msg: String): Nothing = {
+ Console.println("Fatal bug in inlinerwhile traversing " + m + ": " + msg)
+ m.dump()
+ global.abort("" + m)
}
+ def dumpMethodAndAbort(m: IMethod, b: BasicBlock): Nothing =
+ dumpMethodAndAbort(m, "found open block " + b + " " + b.flagsString)
def checkValid(m: IMethod) {
- // always dicey to iterate over mutable structures
- val bs = m.code.blocks.toList
-
- for (b <- bs ; if !b.closed) {
- // Something is leaving open/empty blocks around (see SI-4840) so
- // let's not kill the deal unless it's nonempty.
- if (b.isEmpty) {
- log("!!! Found open but empty block while inlining " + m + ": removing from block list.")
- m.code removeBlock b
- }
- else {
- Console.println("Fatal bug in inliner: found open block when inlining " + m)
- m.dump
- global.abort("Open block was: " + b + " " + b.flagsString)
+ // always slightly dicey to iterate over mutable structures
+ m foreachBlock { b =>
+ if (!b.closed) {
+ // Something is leaving open/empty blocks around (see SI-4840) so
+ // let's not kill the deal unless it's nonempty.
+ if (b.isEmpty) {
+ log("!!! Found open but empty block while inlining " + m + ": removing from block list.")
+ m.code removeBlock b
+ }
+ else dumpMethodAndAbort(m, b)
}
}
}
diff --git a/src/compiler/scala/tools/nsc/backend/icode/Linearizers.scala b/src/compiler/scala/tools/nsc/backend/icode/Linearizers.scala
index 9f22a6d..a38eab4 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/Linearizers.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/Linearizers.scala
@@ -1,5 +1,5 @@
/* NSC -- new scala compiler
- * Copyright 2005 LAMP/EPFL
+ * Copyright 2005-2013 LAMP/EPFL
* @author Martin Odersky
*/
@@ -14,6 +14,8 @@ import mutable.ListBuffer
trait Linearizers {
self: ICodes =>
+
+ import global.debuglog
import opcodes._
abstract class Linearizer {
@@ -34,7 +36,7 @@ trait Linearizers {
var blocks: List[BasicBlock] = Nil
def linearize(m: IMethod): List[BasicBlock] = {
- val b = m.code.startBlock;
+ val b = m.startBlock;
blocks = Nil;
run {
@@ -104,7 +106,7 @@ trait Linearizers {
def linearize(m: IMethod): List[BasicBlock] = {
blocks = Nil;
- dfs(m.code.startBlock);
+ dfs(m.startBlock);
m.exh foreach (b => dfs(b.startBlock));
blocks.reverse
@@ -148,14 +150,14 @@ trait Linearizers {
added.clear;
m.exh foreach (b => rpo(b.startBlock));
- rpo(m.code.startBlock);
+ rpo(m.startBlock);
// if the start block has predecessors, it won't be the first one
// in the linearization, so we need to enforce it here
- if (m.code.startBlock.predecessors eq Nil)
+ if (m.startBlock.predecessors eq Nil)
blocks
else
- m.code.startBlock :: (blocks.filterNot(_ == m.code.startBlock))
+ m.startBlock :: (blocks.filterNot(_ == m.startBlock))
}
def linearizeAt(m: IMethod, start: BasicBlock): List[BasicBlock] = {
@@ -178,11 +180,14 @@ trait Linearizers {
* Prepend b to the list, if not already scheduled.
* @return Returns true if the block was added.
*/
- def add(b: BasicBlock) =
+ def add(b: BasicBlock) = {
+ debuglog("Linearizer adding block " + b.label)
+
if (!added(b.label)) {
added += b.label
blocks = b :: blocks;
}
+ }
}
/** A 'dump' of the blocks in this method, which does not
@@ -190,7 +195,7 @@ trait Linearizers {
* the last instruction being a jump).
*/
class DumpLinearizer extends Linearizer {
- def linearize(m: IMethod): List[BasicBlock] = m.code.blocks.toList
+ def linearize(m: IMethod): List[BasicBlock] = m.blocks
def linearizeAt(m: IMethod, start: BasicBlock): List[BasicBlock] = sys.error("not implemented")
}
@@ -226,7 +231,7 @@ trait Linearizers {
val handlersByCovered = m.exh.groupBy(_.covered)
// number of basic blocks covered by the entire try-catch expression
- def size(covered: collection.immutable.Set[BasicBlock]) = {
+ def size(covered: scala.collection.immutable.Set[BasicBlock]) = {
val hs = handlersByCovered(covered)
covered.size + (hs :\ 0)((h, s) => h.blocks.length + s)
}
@@ -245,7 +250,7 @@ trait Linearizers {
* @param frozen blocks can't be moved (fist block of a method, blocks directly following a try-catch)
*/
def groupBlocks(method: IMethod, blocks: List[BasicBlock], handlers: List[ExceptionHandler], frozen: mutable.HashSet[BasicBlock]) = {
- assert(blocks.head == method.code.startBlock, method)
+ assert(blocks.head == method.startBlock, method)
// blocks before the try, and blocks for the try
val beforeAndTry = new ListBuffer[BasicBlock]()
@@ -279,7 +284,7 @@ trait Linearizers {
handler.startBlock +=: lb
}
- // The first block emitted after a try-catch must be the the one that the try / catch
+ // The first block emitted after a try-catch must be the one that the try / catch
// blocks jump to (because in msil, these jumps cannot be emitted manually)
var firstAfter: Option[BasicBlock] = None
diff --git a/src/compiler/scala/tools/nsc/backend/icode/Members.scala b/src/compiler/scala/tools/nsc/backend/icode/Members.scala
index 630f109..00bcf60 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/Members.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/Members.scala
@@ -1,16 +1,15 @@
/* NSC -- new scala compiler
- * Copyright 2005-2011 LAMP/EPFL
+ * Copyright 2005-2013 LAMP/EPFL
* @author Martin Odersky
*/
-
package scala.tools.nsc
package backend
package icode
import java.io.PrintWriter
import scala.collection.{ mutable, immutable }
-import mutable.{ HashMap, ListBuffer }
+import scala.reflect.internal.util.{ SourceFile, NoSourceFile }
import symtab.Flags.{ DEFERRED }
trait ReferenceEquality {
@@ -18,28 +17,35 @@ trait ReferenceEquality {
override def equals(that: Any) = this eq that.asInstanceOf[AnyRef]
}
-trait Members { self: ICodes =>
+trait Members {
+ self: ICodes =>
+
import global._
+ object NoCode extends Code(null, "NoCode") {
+ override def blocksList: List[BasicBlock] = Nil
+ }
+
/**
* This class represents the intermediate code of a method or
* other multi-block piece of code, like exception handlers.
*/
- class Code(label: String, method: IMethod) {
- def this(method: IMethod) = this(method.symbol.simpleName.toString, method)
-
+ class Code(method: IMethod, name: String) {
+ def this(method: IMethod) = this(method, method.symbol.decodedName.toString.intern)
/** The set of all blocks */
- val blocks: ListBuffer[BasicBlock] = new ListBuffer
+ val blocks = mutable.ListBuffer[BasicBlock]()
/** The start block of the method */
- var startBlock: BasicBlock = null
-
- /** The stack produced by this method */
- var producedStack: TypeStack = null
+ var startBlock: BasicBlock = NoBasicBlock
private var currentLabel: Int = 0
private var _touched = false
+ def blocksList: List[BasicBlock] = blocks.toList
+ def instructions = blocksList flatMap (_.iterator)
+ def blockCount = blocks.size
+ def instructionCount = (blocks map (_.length)).sum
+
def touched = _touched
def touched_=(b: Boolean): Unit = {
if (b)
@@ -71,7 +77,7 @@ trait Members { self: ICodes =>
}
/** This methods returns a string representation of the ICode */
- override def toString() : String = "ICode '" + label + "'";
+ override def toString = "ICode '" + name + "'";
/* Compute a unique new label */
def nextLabel: Int = {
@@ -81,7 +87,7 @@ trait Members { self: ICodes =>
/* Create a new block and append it to the list
*/
- def newBlock: BasicBlock = {
+ def newBlock(): BasicBlock = {
touched = true
val block = new BasicBlock(nextLabel, method);
blocks += block;
@@ -104,7 +110,6 @@ trait Members { self: ICodes =>
var fields: List[IField] = Nil
var methods: List[IMethod] = Nil
var cunit: CompilationUnit = _
- var bootstrapClass: Option[String] = None
def addField(f: IField): this.type = {
fields = f :: fields;
@@ -134,6 +139,8 @@ trait Members { self: ICodes =>
/** Represent a field in ICode */
class IField(val symbol: Symbol) extends IMember { }
+ object NoIMethod extends IMethod(NoSymbol) { }
+
/**
* Represents a method in ICode. Local variables contain
* both locals and parameters, similar to the way the JVM
@@ -145,15 +152,26 @@ trait Members { self: ICodes =>
* finished (GenICode does that).
*/
class IMethod(val symbol: Symbol) extends IMember {
- var code: Code = null
+ var code: Code = NoCode
+
+ def newBlock() = code.newBlock
+ def startBlock = code.startBlock
+ def lastBlock = { assert(blocks.nonEmpty, symbol); blocks.last }
+ def blocks = code.blocksList
+ def linearizedBlocks(lin: Linearizer = self.linearizer): List[BasicBlock] = lin linearize this
+
+ def foreachBlock[U](f: BasicBlock => U): Unit = blocks foreach f
+ def foreachInstr[U](f: Instruction => U): Unit = foreachBlock(_.toList foreach f)
+
var native = false
/** The list of exception handlers, ordered from innermost to outermost. */
var exh: List[ExceptionHandler] = Nil
- var sourceFile: String = _
+ var sourceFile: SourceFile = NoSourceFile
var returnType: TypeKind = _
-
var recursive: Boolean = false
+ var bytecodeHasEHs = false // set by ICodeReader only, used by Inliner to prevent inlining (SI-6188)
+ var bytecodeHasInvokeDynamic = false // set by ICodeReader only, used by Inliner to prevent inlining until we have proper invoke dynamic support
/** local variables and method parameters */
var locals: List[Local] = Nil
@@ -161,18 +179,17 @@ trait Members { self: ICodes =>
/** method parameters */
var params: List[Local] = Nil
- def hasCode = code != null
+ def hasCode = code ne NoCode
def setCode(code: Code): IMethod = {
this.code = code;
this
}
- def addLocal(l: Local): Local =
- locals find (_ == l) getOrElse {
- locals ::= l
- l
- }
+ final def updateRecursive(called: Symbol): Unit = {
+ recursive ||= (called == symbol)
+ }
+ def addLocal(l: Local): Local = findOrElse(locals)(_ == l) { locals ::= l ; l }
def addParam(p: Local): Unit =
if (params contains p) ()
@@ -197,14 +214,22 @@ trait Members { self: ICodes =>
override def toString() = symbol.fullName
+ def matchesSignature(other: IMethod) = {
+ (symbol.name == other.symbol.name) &&
+ (params corresponds other.params)(_.kind == _.kind) &&
+ (returnType == other.returnType)
+ }
+
import opcodes._
def checkLocals(): Unit = {
- def localsSet = code.blocks.flatten collect {
- case LOAD_LOCAL(l) => l
- case STORE_LOCAL(l) => l
- } toSet
+ def localsSet = (code.blocks flatMap { bb =>
+ bb.iterator collect {
+ case LOAD_LOCAL(l) => l
+ case STORE_LOCAL(l) => l
+ }
+ }).toSet
- if (code != null) {
+ if (hasCode) {
log("[checking locals of " + this + "]")
locals filterNot localsSet foreach { l =>
log("Local " + l + " is not declared in " + this)
@@ -218,11 +243,11 @@ trait Members { self: ICodes =>
*
* This method should be most effective after heavy inlining.
*/
- def normalize(): Unit = if (this.code ne null) {
+ def normalize(): Unit = if (this.hasCode) {
val nextBlock: mutable.Map[BasicBlock, BasicBlock] = mutable.HashMap.empty
for (b <- code.blocks.toList
if b.successors.length == 1;
- val succ = b.successors.head;
+ succ = b.successors.head;
if succ ne b;
if succ.predecessors.length == 1;
if succ.predecessors.head eq b;
@@ -238,11 +263,23 @@ trait Members { self: ICodes =>
var succ = bb
do {
succ = nextBlock(succ);
- bb.removeLastInstruction
- succ.toList foreach { i => bb.emit(i, i.pos) }
- code.removeBlock(succ)
+ val lastInstr = bb.lastInstruction
+ /* Ticket SI-5672
+ * Besides removing the control-flow instruction at the end of `bb` (usually a JUMP), we have to pop any values it pushes.
+ * Examples:
+ * `SWITCH` consisting of just the default case, or
+ * `CJUMP(targetBlock, targetBlock, _, _)` ie where success and failure targets coincide (this one consumes two stack values).
+ */
+ val oldTKs = lastInstr.consumedTypes
+ assert(lastInstr.consumed == oldTKs.size, "Someone forgot to override consumedTypes() in " + lastInstr)
+
+ bb.removeLastInstruction
+ for(tk <- oldTKs.reverse) { bb.emit(DROP(tk), lastInstr.pos) }
+ succ.toList foreach { i => bb.emit(i, i.pos) }
+ code.removeBlock(succ)
+ exh foreach { e => e.covered = e.covered - succ }
+
nextBlock -= bb
- exh foreach { e => e.covered = e.covered - succ }
} while (nextBlock.isDefinedAt(succ))
bb.close
} else
@@ -252,9 +289,8 @@ trait Members { self: ICodes =>
}
def dump() {
- val printer = new TextPrinter(new PrintWriter(Console.out, true),
- new DumpLinearizer)
- printer.printMethod(this)
+ Console.println("dumping IMethod(" + symbol + ")")
+ newTextPrinter() printMethod this
}
}
diff --git a/src/compiler/scala/tools/nsc/backend/icode/Opcodes.scala b/src/compiler/scala/tools/nsc/backend/icode/Opcodes.scala
index ba33c42..a3a0edb 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/Opcodes.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/Opcodes.scala
@@ -1,5 +1,5 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
+ * Copyright 2005-2013 LAMP/EPFL
* @author Martin Odersky
*/
@@ -10,42 +10,53 @@ package backend
package icode
import scala.tools.nsc.ast._
-import scala.tools.nsc.util.{Position,NoPosition}
+import scala.reflect.internal.util.{Position,NoPosition}
/*
A pattern match
- case THIS(clasz) =>
- case STORE_THIS(kind) =>
- case CONSTANT(const) =>
- case LOAD_ARRAY_ITEM(kind) =>
- case LOAD_LOCAL(local) =>
- case LOAD_FIELD(field, isStatic) =>
- case LOAD_MODULE(module) =>
- case STORE_ARRAY_ITEM(kind) =>
- case STORE_LOCAL(local) =>
- case STORE_FIELD(field, isStatic) =>
- case CALL_PRIMITIVE(primitive) =>
- case CALL_METHOD(method, style) =>
- case NEW(kind) =>
- case CREATE_ARRAY(elem, dims) =>
- case IS_INSTANCE(tpe) =>
- case CHECK_CAST(tpe) =>
- case SWITCH(tags, labels) =>
- case JUMP(whereto) =>
- case CJUMP(success, failure, cond, kind) =>
- case CZJUMP(success, failure, cond, kind) =>
- case RETURN(kind) =>
- case THROW(clasz) =>
- case DROP(kind) =>
- case DUP(kind) =>
- case MONITOR_ENTER() =>
- case MONITOR_EXIT() =>
- case BOX(boxType) =>
- case UNBOX(tpe) =>
- case SCOPE_ENTER(lv) =>
- case SCOPE_EXIT(lv) =>
- case LOAD_EXCEPTION(clasz) =>
+ // locals
+ case THIS(clasz) =>
+ case STORE_THIS(kind) =>
+ case LOAD_LOCAL(local) =>
+ case STORE_LOCAL(local) =>
+ case SCOPE_ENTER(lv) =>
+ case SCOPE_EXIT(lv) =>
+ // stack
+ case LOAD_MODULE(module) =>
+ case LOAD_EXCEPTION(clasz) =>
+ case DROP(kind) =>
+ case DUP(kind) =>
+ // constants
+ case CONSTANT(const) =>
+ // arithlogic
+ case CALL_PRIMITIVE(primitive) =>
+ // casts
+ case IS_INSTANCE(tpe) =>
+ case CHECK_CAST(tpe) =>
+ // objs
+ case NEW(kind) =>
+ case MONITOR_ENTER() =>
+ case MONITOR_EXIT() =>
+ case BOX(boxType) =>
+ case UNBOX(tpe) =>
+ // flds
+ case LOAD_FIELD(field, isStatic) =>
+ case STORE_FIELD(field, isStatic) =>
+ // mthds
+ case CALL_METHOD(method, style) =>
+ // arrays
+ case LOAD_ARRAY_ITEM(kind) =>
+ case STORE_ARRAY_ITEM(kind) =>
+ case CREATE_ARRAY(elem, dims) =>
+ // jumps
+ case SWITCH(tags, labels) =>
+ case JUMP(whereto) =>
+ case CJUMP(success, failure, cond, kind) =>
+ case CZJUMP(success, failure, cond, kind) =>
+ // ret
+ case RETURN(kind) =>
+ case THROW(clasz) =>
*/
@@ -58,10 +69,32 @@ import scala.tools.nsc.util.{Position,NoPosition}
trait Opcodes { self: ICodes =>
import global.{Symbol, NoSymbol, Type, Name, Constant};
+ // categories of ICode instructions
+ final val localsCat = 1
+ final val stackCat = 2
+ final val constCat = 3
+ final val arilogCat = 4
+ final val castsCat = 5
+ final val objsCat = 6
+ final val fldsCat = 7
+ final val mthdsCat = 8
+ final val arraysCat = 9
+ final val jumpsCat = 10
+ final val retCat = 11
+
+ private lazy val ObjectReferenceList = ObjectReference :: Nil
+
/** This class represents an instruction of the intermediate code.
* Each case subclass will represent a specific operation.
*/
abstract class Instruction extends Cloneable {
+ // Vlad: I used these for checking the quality of the implementation, and we should regularely run a build with them
+ // enabled. But for production these should definitely be disabled, unless we enjoy getting angry emails from Greg :)
+ //if (!this.isInstanceOf[opcodes.LOAD_EXCEPTION])
+ // assert(consumed == consumedTypes.length)
+ //assert(produced == producedTypes.length)
+
+ def category: Int = 0 // undefined
/** This abstract method returns the number of used elements on the stack */
def consumed : Int = 0
@@ -75,6 +108,7 @@ trait Opcodes { self: ICodes =>
def consumedTypes: List[TypeKind] = Nil
/** This instruction produces these types on top of the stack. */
+ // Vlad: I wonder why we keep producedTypes around -- it looks like an useless thing to have
def producedTypes: List[TypeKind] = Nil
/** This method returns the difference of size of the stack when the instruction is used */
@@ -94,7 +128,7 @@ trait Opcodes { self: ICodes =>
}
/** Clone this instruction. */
- override def clone: Instruction =
+ override def clone(): Instruction =
super.clone.asInstanceOf[Instruction]
}
@@ -117,7 +151,14 @@ trait Opcodes { self: ICodes =>
override def consumed = 0
override def produced = 1
- override def producedTypes = List(REFERENCE(clasz))
+ override def producedTypes =
+ // we're not allowed to have REFERENCE(Array), but what about compiling the Array class? Well, we use object for it.
+ if (clasz != global.definitions.ArrayClass)
+ REFERENCE(clasz) :: Nil
+ else
+ ObjectReference :: Nil
+
+ override def category = localsCat
}
/** Loads a constant on the stack.
@@ -129,7 +170,9 @@ trait Opcodes { self: ICodes =>
override def consumed = 0
override def produced = 1
- override def producedTypes = List(toTypeKind(constant.tpe))
+ override def producedTypes = toTypeKind(constant.tpe) :: Nil
+
+ override def category = constCat
}
/** Loads an element of an array. The array and the index should
@@ -141,8 +184,10 @@ trait Opcodes { self: ICodes =>
override def consumed = 2
override def produced = 1
- override def consumedTypes = List(ARRAY(kind), INT)
- override def producedTypes = List(kind)
+ override def consumedTypes = ARRAY(kind) :: INT :: Nil
+ override def producedTypes = kind :: Nil
+
+ override def category = arraysCat
}
/** Load a local variable on the stack. It can be a method argument.
@@ -153,7 +198,9 @@ trait Opcodes { self: ICodes =>
override def consumed = 0
override def produced = 1
- override def producedTypes = List(local.kind)
+ override def producedTypes = local.kind :: Nil
+
+ override def category = localsCat
}
/** Load a field on the stack. The object to which it refers should be
@@ -169,13 +216,15 @@ trait Opcodes { self: ICodes =>
override def consumed = if (isStatic) 0 else 1
override def produced = 1
- override def consumedTypes = if (isStatic) Nil else List(REFERENCE(field.owner));
- override def producedTypes = List(toTypeKind(field.tpe));
+ override def consumedTypes = if (isStatic) Nil else REFERENCE(field.owner) :: Nil
+ override def producedTypes = toTypeKind(field.tpe) :: Nil
// more precise information about how to load this field
// see #4283
var hostClass: Symbol = field.owner
def setHostClass(cls: Symbol): this.type = { hostClass = cls; this }
+
+ override def category = fldsCat
}
case class LOAD_MODULE(module: Symbol) extends Instruction {
@@ -186,7 +235,9 @@ trait Opcodes { self: ICodes =>
override def consumed = 0
override def produced = 1
- override def producedTypes = List(REFERENCE(module))
+ override def producedTypes = REFERENCE(module) :: Nil
+
+ override def category = stackCat
}
/** Store a value into an array at a specified index.
@@ -197,7 +248,9 @@ trait Opcodes { self: ICodes =>
override def consumed = 3
override def produced = 0
- override def consumedTypes = List(ARRAY(kind), INT, kind)
+ override def consumedTypes = ARRAY(kind) :: INT :: kind :: Nil
+
+ override def category = arraysCat
}
/** Store a value into a local variable. It can be an argument.
@@ -208,7 +261,9 @@ trait Opcodes { self: ICodes =>
override def consumed = 1
override def produced = 0
- override def consumedTypes = List(local.kind)
+ override def consumedTypes = local.kind :: Nil
+
+ override def category = localsCat
}
/** Store a value into a field.
@@ -225,9 +280,11 @@ trait Opcodes { self: ICodes =>
override def consumedTypes =
if (isStatic)
- List(toTypeKind(field.tpe))
+ toTypeKind(field.tpe) :: Nil
else
- List(REFERENCE(field.owner), toTypeKind(field.tpe));
+ REFERENCE(field.owner) :: toTypeKind(field.tpe) :: Nil;
+
+ override def category = fldsCat
}
/** Store a value into the 'this' pointer.
@@ -237,7 +294,8 @@ trait Opcodes { self: ICodes =>
case class STORE_THIS(kind: TypeKind) extends Instruction {
override def consumed = 1
override def produced = 0
- override def consumedTypes = List(kind)
+ override def consumedTypes = kind :: Nil
+ override def category = localsCat
}
/** Call a primitive function.
@@ -263,35 +321,37 @@ trait Opcodes { self: ICodes =>
override def produced = 1
override def consumedTypes = primitive match {
- case Negation(kind) => List(kind)
- case Test(_, kind, true) => List(kind)
- case Test(_, kind, false) => List(kind, kind)
- case Comparison(_, kind) => List(kind, kind)
- case Arithmetic(NOT, kind) => List(kind)
- case Arithmetic(_, kind) => List(kind, kind)
- case Logical(_, kind) => List(kind, kind)
- case Shift(_, kind) => List(kind, INT)
- case Conversion(from, _) => List(from)
- case ArrayLength(kind) => List(ARRAY(kind))
- case StringConcat(kind) => List(ConcatClass, kind)
+ case Negation(kind) => kind :: Nil
+ case Test(_, kind, true) => kind :: Nil
+ case Test(_, kind, false) => kind :: kind :: Nil
+ case Comparison(_, kind) => kind :: kind :: Nil
+ case Arithmetic(NOT, kind) => kind :: Nil
+ case Arithmetic(_, kind) => kind :: kind :: Nil
+ case Logical(_, kind) => kind :: kind :: Nil
+ case Shift(_, kind) => kind :: INT :: Nil
+ case Conversion(from, _) => from :: Nil
+ case ArrayLength(kind) => ARRAY(kind) :: Nil
+ case StringConcat(kind) => ConcatClass :: kind :: Nil
case StartConcat => Nil
- case EndConcat => List(ConcatClass)
+ case EndConcat => ConcatClass :: Nil
}
override def producedTypes = primitive match {
- case Negation(kind) => List(kind)
- case Test(_, _, true) => List(BOOL)
- case Test(_, _, false) => List(BOOL)
- case Comparison(_, _) => List(INT)
- case Arithmetic(_, kind) => List(kind)
- case Logical(_, kind) => List(kind)
- case Shift(_, kind) => List(kind)
- case Conversion(_, to) => List(to)
- case ArrayLength(_) => List(INT)
- case StringConcat(_) => List(ConcatClass)
- case StartConcat => List(ConcatClass)
- case EndConcat => List(REFERENCE(global.definitions.StringClass))
+ case Negation(kind) => kind :: Nil
+ case Test(_, _, true) => BOOL :: Nil
+ case Test(_, _, false) => BOOL :: Nil
+ case Comparison(_, _) => INT :: Nil
+ case Arithmetic(_, kind) => kind :: Nil
+ case Logical(_, kind) => kind :: Nil
+ case Shift(_, kind) => kind :: Nil
+ case Conversion(_, to) => to :: Nil
+ case ArrayLength(_) => INT :: Nil
+ case StringConcat(_) => ConcatClass :: Nil
+ case StartConcat => ConcatClass :: Nil
+ case EndConcat => REFERENCE(global.definitions.StringClass) :: Nil
}
+
+ override def category = arilogCat
}
/** This class represents a CALL_METHOD instruction
@@ -334,33 +394,59 @@ trait Opcodes { self: ICodes =>
else args
}
- override def produced =
- if (producedType == UNIT || method.isConstructor) 0
- else 1
-
- private def producedType: TypeKind = toTypeKind(method.info.resultType)
- override def producedTypes =
- if (produced == 0) Nil
- else List(producedType)
+ private val producedList = toTypeKind(method.info.resultType) match {
+ case UNIT => Nil
+ case _ if method.isConstructor => Nil
+ case kind => kind :: Nil
+ }
+ override def produced = producedList.size
+ override def producedTypes = producedList
/** object identity is equality for CALL_METHODs. Needed for
* being able to store such instructions into maps, when more
* than one CALL_METHOD to the same method might exist.
*/
+
+ override def category = mthdsCat
+ }
+
+ /**
+ * A place holder entry that allows us to parse class files with invoke dynamic
+ * instructions. Because the compiler doesn't yet really understand the
+ * behavior of invokeDynamic, this op acts as a poison pill. Any attempt to analyze
+ * this instruction will cause a failure. The only optimization that
+ * should ever look at non-Scala generated icode is the inliner, and it
+ * has been modified to not examine any method with invokeDynamic
+ * instructions. So if this poison pill ever causes problems then
+ * there's been a serious misunderstanding
+ */
+ // TODO do the real thing
+ case class INVOKE_DYNAMIC(poolEntry: Char) extends Instruction {
+ private def error = sys.error("INVOKE_DYNAMIC is not fully implemented and should not be analyzed")
+ override def consumed = error
+ override def produced = error
+ override def producedTypes = error
+ override def category = error
}
case class BOX(boxType: TypeKind) extends Instruction {
+ assert(boxType.isValueType && (boxType ne UNIT)) // documentation
override def toString(): String = "BOX " + boxType
override def consumed = 1
override def consumedTypes = boxType :: Nil
override def produced = 1
+ override def producedTypes = BOXED(boxType) :: Nil
+ override def category = objsCat
}
case class UNBOX(boxType: TypeKind) extends Instruction {
+ assert(boxType.isValueType && !boxType.isInstanceOf[BOXED] && (boxType ne UNIT)) // documentation
override def toString(): String = "UNBOX " + boxType
override def consumed = 1
- override def consumedTypes = ObjectReference :: Nil
+ override def consumedTypes = ObjectReferenceList
override def produced = 1
+ override def producedTypes = boxType :: Nil
+ override def category = objsCat
}
/** Create a new instance of a class through the specified constructor
@@ -373,9 +459,12 @@ trait Opcodes { self: ICodes =>
override def consumed = 0;
override def produced = 1;
+ override def producedTypes = kind :: Nil
/** The corresponding constructor call. */
var init: CALL_METHOD = _
+
+ override def category = objsCat
}
@@ -390,6 +479,9 @@ trait Opcodes { self: ICodes =>
override def consumed = dims;
override def consumedTypes = List.fill(dims)(INT)
override def produced = 1;
+ override def producedTypes = ARRAY(elem) :: Nil
+
+ override def category = arraysCat
}
/** This class represents a IS_INSTANCE instruction
@@ -401,8 +493,11 @@ trait Opcodes { self: ICodes =>
override def toString(): String ="IS_INSTANCE "+typ
override def consumed = 1
- override def consumedTypes = ObjectReference :: Nil
override def produced = 1
+ override def consumedTypes = ObjectReferenceList
+ override def producedTypes = BOOL :: Nil
+
+ override def category = castsCat
}
/** This class represents a CHECK_CAST instruction
@@ -415,8 +510,10 @@ trait Opcodes { self: ICodes =>
override def consumed = 1
override def produced = 1
- override val consumedTypes = List(ObjectReference)
- override def producedTypes = List(typ)
+ override def consumedTypes = ObjectReferenceList
+ override def producedTypes = typ :: Nil
+
+ override def category = castsCat
}
/** This class represents a SWITCH instruction
@@ -433,6 +530,12 @@ trait Opcodes { self: ICodes =>
override def consumed = 1
override def produced = 0
+
+ override def consumedTypes = INT :: Nil
+
+ def flatTagsCount: Int = { var acc = 0; var rest = tags; while(rest.nonEmpty) { acc += rest.head.length; rest = rest.tail }; acc } // a one-liner
+
+ override def category = jumpsCat
}
/** This class represents a JUMP instruction
@@ -445,6 +548,8 @@ trait Opcodes { self: ICodes =>
override def consumed = 0
override def produced = 0
+
+ override def category = jumpsCat
}
/** This class represents a CJUMP instruction
@@ -466,6 +571,10 @@ trait Opcodes { self: ICodes =>
override def consumed = 2
override def produced = 0
+
+ override def consumedTypes = kind :: kind :: Nil
+
+ override def category = jumpsCat
}
/** This class represents a CZJUMP instruction
@@ -485,6 +594,9 @@ trait Opcodes { self: ICodes =>
override def consumed = 1
override def produced = 0
+
+ override def consumedTypes = kind :: Nil
+ override def category = jumpsCat
}
@@ -495,6 +607,10 @@ trait Opcodes { self: ICodes =>
case class RETURN(kind: TypeKind) extends Instruction {
override def consumed = if (kind == UNIT) 0 else 1
override def produced = 0
+
+ override def consumedTypes = if (kind == UNIT) Nil else kind :: Nil
+
+ override def category = retCat
}
/** This class represents a THROW instruction
@@ -510,6 +626,10 @@ trait Opcodes { self: ICodes =>
override def consumed = 1
override def produced = 0
+
+ override def consumedTypes = toTypeKind(clasz.tpe) :: Nil
+
+ override def category = retCat
}
/** This class represents a DROP instruction
@@ -522,6 +642,10 @@ trait Opcodes { self: ICodes =>
override def consumed = 1
override def produced = 0
+
+ override def consumedTypes = typ :: Nil
+
+ override def category = stackCat
}
/** This class represents a DUP instruction
@@ -531,6 +655,9 @@ trait Opcodes { self: ICodes =>
case class DUP (typ: TypeKind) extends Instruction {
override def consumed = 1
override def produced = 2
+ override def consumedTypes = typ :: Nil
+ override def producedTypes = typ :: typ :: Nil
+ override def category = stackCat
}
/** This class represents a MONITOR_ENTER instruction
@@ -543,6 +670,10 @@ trait Opcodes { self: ICodes =>
override def consumed = 1
override def produced = 0
+
+ override def consumedTypes = ObjectReference :: Nil
+
+ override def category = objsCat
}
/** This class represents a MONITOR_EXIT instruction
@@ -555,6 +686,10 @@ trait Opcodes { self: ICodes =>
override def consumed = 1;
override def produced = 0;
+
+ override def consumedTypes = ObjectReference :: Nil
+
+ override def category = objsCat
}
/** A local variable becomes visible at this point in code.
@@ -565,6 +700,7 @@ trait Opcodes { self: ICodes =>
override def toString(): String = "SCOPE_ENTER " + lv
override def consumed = 0
override def produced = 0
+ override def category = localsCat
}
/** A local variable leaves its scope at this point in code.
@@ -575,6 +711,7 @@ trait Opcodes { self: ICodes =>
override def toString(): String = "SCOPE_EXIT " + lv
override def consumed = 0
override def produced = 0
+ override def category = localsCat
}
/** Fake instruction. It designates the VM who pushes an exception
@@ -586,57 +723,55 @@ trait Opcodes { self: ICodes =>
override def consumed = sys.error("LOAD_EXCEPTION does clean the whole stack, no idea how many things it consumes!")
override def produced = 1
override def producedTypes = REFERENCE(clasz) :: Nil
+ override def category = stackCat
}
/** This class represents a method invocation style. */
sealed abstract class InvokeStyle {
/** Is this a dynamic method call? */
- def isDynamic: Boolean = this match {
- case Dynamic => true
- case _ => false
- }
+ def isDynamic: Boolean = false
/** Is this a static method call? */
- def isStatic: Boolean = this match {
- case Static(_) => true
- case _ => false
- }
+ def isStatic: Boolean = false
- def isSuper: Boolean = this match {
- case SuperCall(_) => true
- case _ => false
- }
+ def isSuper: Boolean = false
/** Is this an instance method call? */
- def hasInstance: Boolean = this match {
- case Static(false) => false
- case _ => true
- }
+ def hasInstance: Boolean = true
/** Returns a string representation of this style. */
- override def toString(): String = this match {
- case Dynamic => "dynamic"
- case InvokeDynamic => "invoke-dynamic"
- case Static(false) => "static-class"
- case Static(true) => "static-instance"
- case SuperCall(mix) => "super(" + mix + ")"
- }
+ override def toString(): String
}
- /** Virtual calls */
- case object Dynamic extends InvokeStyle
-
- /** InvokeDynamic a la JSR 292 (experimental). */
- case object InvokeDynamic extends InvokeStyle
+ /** Virtual calls.
+ * On JVM, translated to either `invokeinterface` or `invokevirtual`.
+ */
+ case object Dynamic extends InvokeStyle {
+ override def isDynamic = true
+ override def toString(): String = "dynamic"
+ }
/**
- * Special invoke. Static(true) is used for calls to private
- * members.
+ * Special invoke:
+ * Static(true) is used for calls to private members, ie `invokespecial` on JVM.
+ * Static(false) is used for calls to class-level instance-less static methods, ie `invokestatic` on JVM.
*/
- case class Static(onInstance: Boolean) extends InvokeStyle
+ case class Static(onInstance: Boolean) extends InvokeStyle {
+ override def isStatic = true
+ override def hasInstance = onInstance
+ override def toString(): String = {
+ if(onInstance) "static-instance"
+ else "static-class"
+ }
+ }
- /** Call through super[mix]. */
- case class SuperCall(mix: Name) extends InvokeStyle
+ /** Call through super[mix].
+ * On JVM, translated to `invokespecial`.
+ */
+ case class SuperCall(mix: Name) extends InvokeStyle {
+ override def isSuper = true
+ override def toString(): String = { "super(" + mix + ")" }
+ }
// CLR backend
@@ -648,8 +783,10 @@ trait Opcodes { self: ICodes =>
override def consumed = 0
override def produced = 1
- override def producedTypes = List(msil_mgdptr(local.kind))
- }
+ override def producedTypes = msil_mgdptr(local.kind) :: Nil
+
+ override def category = localsCat
+ }
case class CIL_LOAD_FIELD_ADDRESS(field: Symbol, isStatic: Boolean) extends Instruction {
/** Returns a string representation of this instruction */
@@ -659,9 +796,11 @@ trait Opcodes { self: ICodes =>
override def consumed = if (isStatic) 0 else 1
override def produced = 1
- override def consumedTypes = if (isStatic) Nil else List(REFERENCE(field.owner));
- override def producedTypes = List(msil_mgdptr(REFERENCE(field.owner)));
-}
+ override def consumedTypes = if (isStatic) Nil else REFERENCE(field.owner) :: Nil;
+ override def producedTypes = msil_mgdptr(REFERENCE(field.owner)) :: Nil;
+
+ override def category = fldsCat
+ }
case class CIL_LOAD_ARRAY_ITEM_ADDRESS(kind: TypeKind) extends Instruction {
/** Returns a string representation of this instruction */
@@ -670,23 +809,27 @@ trait Opcodes { self: ICodes =>
override def consumed = 2
override def produced = 1
- override def consumedTypes = List(ARRAY(kind), INT)
- override def producedTypes = List(msil_mgdptr(kind))
+ override def consumedTypes = ARRAY(kind) :: INT :: Nil
+ override def producedTypes = msil_mgdptr(kind) :: Nil
+
+ override def category = arraysCat
}
case class CIL_UNBOX(valueType: TypeKind) extends Instruction {
override def toString(): String = "CIL_UNBOX " + valueType
override def consumed = 1
- override def consumedTypes = ObjectReference :: Nil // actually consumes a 'boxed valueType'
+ override def consumedTypes = ObjectReferenceList // actually consumes a 'boxed valueType'
override def produced = 1
- override def producedTypes = List(msil_mgdptr(valueType))
+ override def producedTypes = msil_mgdptr(valueType) :: Nil
+ override def category = objsCat
}
case class CIL_INITOBJ(valueType: TypeKind) extends Instruction {
override def toString(): String = "CIL_INITOBJ " + valueType
override def consumed = 1
- override def consumedTypes = ObjectReference :: Nil // actually consumes a managed pointer
+ override def consumedTypes = ObjectReferenceList // actually consumes a managed pointer
override def produced = 0
+ override def category = objsCat
}
case class CIL_NEWOBJ(method: Symbol) extends Instruction {
@@ -695,8 +838,8 @@ trait Opcodes { self: ICodes =>
override def consumed = method.tpe.paramTypes.length
override def consumedTypes = method.tpe.paramTypes map toTypeKind
override def produced = 1
- override def producedTypes = List(toTypeKind(method.tpe.resultType))
+ override def producedTypes = toTypeKind(method.tpe.resultType) :: Nil
+ override def category = objsCat
}
-
}
}
diff --git a/src/compiler/scala/tools/nsc/backend/icode/Primitives.scala b/src/compiler/scala/tools/nsc/backend/icode/Primitives.scala
index 37fff0e..c857904 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/Primitives.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/Primitives.scala
@@ -1,5 +1,5 @@
/* NSC -- new scala compiler
- * Copyright 2005-2011 LAMP/EPFL
+ * Copyright 2005-2013 LAMP/EPFL
* @author Martin Odersky
*/
@@ -120,47 +120,69 @@ trait Primitives { self: ICodes =>
/** This class represents a test operation. */
- class TestOp {
+ sealed abstract class TestOp {
/** Returns the negation of this operation. */
- def negate(): TestOp = this match {
- case EQ => NE
- case NE => EQ
- case LT => GE
- case GE => LT
- case LE => GT
- case GT => LE
- case _ => throw new RuntimeException("TestOp unknown case")
- }
+ def negate(): TestOp
/** Returns a string representation of this operation. */
- override def toString(): String = this match {
- case EQ => "EQ"
- case NE => "NE"
- case LT => "LT"
- case GE => "GE"
- case LE => "LE"
- case GT => "GT"
- case _ => throw new RuntimeException("TestOp unknown case")
- }
+ override def toString(): String
+
+ /** used only from GenASM */
+ def opcodeIF(): Int
+
+ /** used only from GenASM */
+ def opcodeIFICMP(): Int
+
}
+
/** An equality test */
- case object EQ extends TestOp
+ case object EQ extends TestOp {
+ def negate() = NE
+ override def toString() = "EQ"
+ override def opcodeIF() = scala.tools.asm.Opcodes.IFEQ
+ override def opcodeIFICMP() = scala.tools.asm.Opcodes.IF_ICMPEQ
+ }
/** A non-equality test */
- case object NE extends TestOp
+ case object NE extends TestOp {
+ def negate() = EQ
+ override def toString() = "NE"
+ override def opcodeIF() = scala.tools.asm.Opcodes.IFNE
+ override def opcodeIFICMP() = scala.tools.asm.Opcodes.IF_ICMPNE
+ }
/** A less-than test */
- case object LT extends TestOp
+ case object LT extends TestOp {
+ def negate() = GE
+ override def toString() = "LT"
+ override def opcodeIF() = scala.tools.asm.Opcodes.IFLT
+ override def opcodeIFICMP() = scala.tools.asm.Opcodes.IF_ICMPLT
+ }
/** A greater-than-or-equal test */
- case object GE extends TestOp
+ case object GE extends TestOp {
+ def negate() = LT
+ override def toString() = "GE"
+ override def opcodeIF() = scala.tools.asm.Opcodes.IFGE
+ override def opcodeIFICMP() = scala.tools.asm.Opcodes.IF_ICMPGE
+ }
/** A less-than-or-equal test */
- case object LE extends TestOp
+ case object LE extends TestOp {
+ def negate() = GT
+ override def toString() = "LE"
+ override def opcodeIF() = scala.tools.asm.Opcodes.IFLE
+ override def opcodeIFICMP() = scala.tools.asm.Opcodes.IF_ICMPLE
+ }
/** A greater-than test */
- case object GT extends TestOp
+ case object GT extends TestOp {
+ def negate() = LE
+ override def toString() = "GT"
+ override def opcodeIF() = scala.tools.asm.Opcodes.IFGT
+ override def opcodeIFICMP() = scala.tools.asm.Opcodes.IF_ICMPGT
+ }
/** This class represents an arithmetic operation. */
class ArithmeticOp {
diff --git a/src/compiler/scala/tools/nsc/backend/icode/Printers.scala b/src/compiler/scala/tools/nsc/backend/icode/Printers.scala
index 958a708..6cac641 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/Printers.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/Printers.scala
@@ -1,17 +1,15 @@
/* NSC -- new scala compiler
- * Copyright 2005-2011 LAMP/EPFL
+ * Copyright 2005-2013 LAMP/EPFL
* @author Martin Odersky
*/
-
package scala.tools.nsc
package backend
package icode
import java.io.PrintWriter
-
import scala.tools.nsc.symtab.Flags
-import scala.tools.nsc.util.Position
+import scala.reflect.internal.util.Position
trait Printers { self: ICodes =>
import global._
@@ -84,7 +82,7 @@ trait Printers { self: ICodes =>
if (!m.isAbstractMethod) {
println(" {")
println("locals: " + m.locals.mkString("", ", ", ""))
- println("startBlock: " + m.code.startBlock)
+ println("startBlock: " + m.startBlock)
println("blocks: " + m.code.blocks.mkString("[", ",", "]"))
println
lin.linearize(m) foreach printBlock
@@ -105,7 +103,7 @@ trait Printers { self: ICodes =>
def printExceptionHandler(e: ExceptionHandler) {
indent;
- println("catch (" + e.cls.simpleName + ") in " + e.covered + " starting at: " + e.startBlock);
+ println("catch (" + e.cls.simpleName + ") in " + e.covered.toSeq.sortBy(_.label) + " starting at: " + e.startBlock);
println("consisting of blocks: " + e.blocks);
undent;
println("with finalizer: " + e.finalizer);
diff --git a/src/compiler/scala/tools/nsc/backend/icode/Repository.scala b/src/compiler/scala/tools/nsc/backend/icode/Repository.scala
index 02302be..e73015c 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/Repository.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/Repository.scala
@@ -1,5 +1,5 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
+ * Copyright 2005-2013 LAMP/EPFL
* @author Martin Odersky
*/
@@ -18,7 +18,7 @@ trait Repository {
import global._
import icodes._
- val loaded: mutable.Map[Symbol, IClass] = new mutable.HashMap
+ val loaded: mutable.Map[Symbol, IClass] = perRunCaches.newMap()
/** Is the given class available as icode? */
def available(sym: Symbol) = classes.contains(sym) || loaded.contains(sym)
@@ -38,19 +38,21 @@ trait Repository {
}
/** Load bytecode for given symbol. */
- def load(sym: Symbol) {
+ def load(sym: Symbol): Boolean = {
try {
val (c1, c2) = icodeReader.readClass(sym)
- assert(c1.symbol == sym || c2.symbol == sym,
- "c1.symbol = %s, c2.symbol = %s, sym = %s".format(c1.symbol, c2.symbol, sym))
+ assert(c1.symbol == sym || c2.symbol == sym, "c1.symbol = %s, c2.symbol = %s, sym = %s".format(c1.symbol, c2.symbol, sym))
loaded += (c1.symbol -> c1)
loaded += (c2.symbol -> c2)
+
+ true
} catch {
case e: Throwable => // possible exceptions are MissingRequirementError, IOException and TypeError -> no better common supertype
log("Failed to load %s. [%s]".format(sym.fullName, e.getMessage))
- if (settings.debug.value)
- e.printStackTrace
+ if (settings.debug.value) { e.printStackTrace }
+
+ false
}
}
}
diff --git a/src/compiler/scala/tools/nsc/backend/icode/TypeKinds.scala b/src/compiler/scala/tools/nsc/backend/icode/TypeKinds.scala
index 952949f..4f8fda8 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/TypeKinds.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/TypeKinds.scala
@@ -1,5 +1,5 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
+ * Copyright 2005-2013 LAMP/EPFL
* @author Martin Odersky
*/
@@ -26,7 +26,6 @@ package icode
trait TypeKinds { self: ICodes =>
import global._
import definitions.{ ArrayClass, AnyRefClass, ObjectClass, NullClass, NothingClass, arrayType }
- import icodes.{ checkerDebug, NothingReference, NullReference }
/** A map from scala primitive Types to ICode TypeKinds */
lazy val primitiveTypeMap: Map[Symbol, TypeKind] = {
@@ -45,7 +44,7 @@ trait TypeKinds { self: ICodes =>
}
/** Reverse map for toType */
private lazy val reversePrimitiveMap: Map[TypeKind, Symbol] =
- primitiveTypeMap map (_.swap) toMap
+ (primitiveTypeMap map (_.swap)).toMap
/** This class represents a type kind. Type kinds
* represent the types that the VM know (or the ICode
@@ -75,22 +74,19 @@ trait TypeKinds { self: ICodes =>
case _ => false
}
- /** On the JVM, these types are like Ints for the
- * purposes of calculating the lub.
+ /** On the JVM,
+ * BOOL, BYTE, CHAR, SHORT, and INT
+ * are like Ints for the purposes of calculating the lub.
*/
- def isIntSizedType: Boolean = this match {
- case BOOL | CHAR | BYTE | SHORT | INT => true
- case _ => false
- }
- def isIntegralType: Boolean = this match {
- case BYTE | SHORT | INT | LONG | CHAR => true
- case _ => false
- }
- def isRealType: Boolean = this match {
- case FLOAT | DOUBLE => true
- case _ => false
- }
- def isNumericType: Boolean = isIntegralType | isRealType
+ def isIntSizedType: Boolean = false
+
+ /** On the JVM, similar to isIntSizedType except that BOOL isn't integral while LONG is. */
+ def isIntegralType: Boolean = false
+
+ /** On the JVM, FLOAT and DOUBLE. */
+ def isRealType: Boolean = false
+
+ final def isNumericType: Boolean = isIntegralType | isRealType
/** Simple subtyping check */
def <:<(other: TypeKind): Boolean = (this eq other) || (this match {
@@ -98,11 +94,8 @@ trait TypeKinds { self: ICodes =>
case _ => this eq other
})
- /** Is this type a category 2 type in JVM terms? */
- def isWideType: Boolean = this match {
- case DOUBLE | LONG => true
- case _ => false
- }
+ /** Is this type a category 2 type in JVM terms? (ie, is it LONG or DOUBLE?) */
+ def isWideType: Boolean = false
/** The number of dimensions for array types. */
def dimensions: Int = 0
@@ -146,17 +139,13 @@ trait TypeKinds { self: ICodes =>
* Here we make the adjustment by rewinding to a pre-erasure state and
* sifting through the parents for a class type.
*/
- def lub0(tk1: TypeKind, tk2: TypeKind): Type = atPhase(currentRun.uncurryPhase) {
+ def lub0(tk1: TypeKind, tk2: TypeKind): Type = beforeUncurry {
import definitions._
val tp = global.lub(List(tk1.toType, tk2.toType))
- val (front, rest) = tp.parents span (_.typeSymbol.hasTraitFlag)
+ val (front, rest) = tp.parents span (_.typeSymbol.isTrait)
- if (front.isEmpty) tp
- else if (rest.isEmpty) front.head // all parents are interfaces
- else rest.head match {
- case AnyRefClass | ObjectClass => tp
- case x => x
- }
+ if (front.isEmpty || rest.isEmpty || rest.head.typeSymbol == ObjectClass) tp
+ else rest.head
}
def isIntLub = (
@@ -187,6 +176,7 @@ trait TypeKinds { self: ICodes =>
/** A boolean value */
case object BOOL extends ValueTypeKind {
+ override def isIntSizedType = true
def maxType(other: TypeKind) = other match {
case BOOL | REFERENCE(NothingClass) => BOOL
case _ => uncomparable(other)
@@ -200,6 +190,8 @@ trait TypeKinds { self: ICodes =>
/** A 1-byte signed integer */
case object BYTE extends ValueTypeKind {
+ override def isIntSizedType = true
+ override def isIntegralType = true
def maxType(other: TypeKind) = {
if (other == BYTE || other.isNothingType) BYTE
else if (other == CHAR) INT
@@ -210,6 +202,8 @@ trait TypeKinds { self: ICodes =>
/** A 2-byte signed integer */
case object SHORT extends ValueTypeKind {
+ override def isIntSizedType = true
+ override def isIntegralType = true
override def maxType(other: TypeKind) = other match {
case BYTE | SHORT | REFERENCE(NothingClass) => SHORT
case CHAR => INT
@@ -220,6 +214,8 @@ trait TypeKinds { self: ICodes =>
/** A 2-byte UNSIGNED integer */
case object CHAR extends ValueTypeKind {
+ override def isIntSizedType = true
+ override def isIntegralType = true
override def maxType(other: TypeKind) = other match {
case CHAR | REFERENCE(NothingClass) => CHAR
case BYTE | SHORT => INT
@@ -230,6 +226,8 @@ trait TypeKinds { self: ICodes =>
/** A 4-byte signed integer */
case object INT extends ValueTypeKind {
+ override def isIntSizedType = true
+ override def isIntegralType = true
override def maxType(other: TypeKind) = other match {
case BYTE | SHORT | CHAR | INT | REFERENCE(NothingClass) => INT
case LONG | FLOAT | DOUBLE => other
@@ -239,6 +237,8 @@ trait TypeKinds { self: ICodes =>
/** An 8-byte signed integer */
case object LONG extends ValueTypeKind {
+ override def isIntegralType = true
+ override def isWideType = true
override def maxType(other: TypeKind): TypeKind =
if (other.isIntegralType || other.isNothingType) LONG
else if (other.isRealType) DOUBLE
@@ -247,6 +247,7 @@ trait TypeKinds { self: ICodes =>
/** A 4-byte floating point number */
case object FLOAT extends ValueTypeKind {
+ override def isRealType = true
override def maxType(other: TypeKind): TypeKind =
if (other == DOUBLE) DOUBLE
else if (other.isNumericType || other.isNothingType) FLOAT
@@ -255,6 +256,8 @@ trait TypeKinds { self: ICodes =>
/** An 8-byte floating point number */
case object DOUBLE extends ValueTypeKind {
+ override def isRealType = true
+ override def isWideType = true
override def maxType(other: TypeKind): TypeKind =
if (other.isNumericType || other.isNothingType) DOUBLE
else uncomparable(other)
@@ -271,7 +274,7 @@ trait TypeKinds { self: ICodes =>
"REFERENCE to NoSymbol not allowed!")
/**
- * Approximate `lub'. The common type of two references is
+ * Approximate `lub`. The common type of two references is
* always AnyRef. For 'real' least upper bound wrt to subclassing
* use method 'lub'.
*/
@@ -307,7 +310,7 @@ trait TypeKinds { self: ICodes =>
}
/**
- * Approximate `lub'. The common type of two references is
+ * Approximate `lub`. The common type of two references is
* always AnyRef. For 'real' least upper bound wrt to subclassing
* use method 'lub'.
*/
@@ -352,7 +355,7 @@ trait TypeKinds { self: ICodes =>
override def toString = "ConcatClass"
/**
- * Approximate `lub'. The common type of two references is
+ * Approximate `lub`. The common type of two references is
* always AnyRef. For 'real' least upper bound wrt to subclassing
* use method 'lub'.
*/
@@ -380,12 +383,16 @@ trait TypeKinds { self: ICodes =>
case TypeRef(_, sym, args) => primitiveOrClassType(sym, args)
case ClassInfoType(_, _, ArrayClass) => abort("ClassInfoType to ArrayClass!")
case ClassInfoType(_, _, sym) => primitiveOrRefType(sym)
+
+ // !!! Iulian says types which make no sense after erasure should not reach here,
+ // which includes the ExistentialType, AnnotatedType, RefinedType. I don't know
+ // if the first two cases exist because they do or as a defensive measure, but
+ // at the time I added it, RefinedTypes were indeed reaching here.
case ExistentialType(_, t) => toTypeKind(t)
case AnnotatedType(_, t, _) => toTypeKind(t)
- // PP to ID: I added RefinedType here, is this OK or should they never be
- // allowed to reach here?
case RefinedType(parents, _) => parents map toTypeKind reduceLeft lub
- // bq: useful hack when wildcard types come here
+ // For sure WildcardTypes shouldn't reach here either, but when
+ // debugging such situations this may come in handy.
// case WildcardType => REFERENCE(ObjectClass)
case norm => abort(
"Unknown type: %s, %s [%s, %s] TypeRef? %s".format(
@@ -413,7 +420,7 @@ trait TypeKinds { self: ICodes =>
// between "object PackratParsers$class" and "trait PackratParsers"
if (sym.isImplClass) {
// pos/spec-List.scala is the sole failure if we don't check for NoSymbol
- val traitSym = sym.owner.info.decl(nme.interfaceName(sym.name))
+ val traitSym = sym.owner.info.decl(tpnme.interfaceName(sym.name))
if (traitSym != NoSymbol)
return REFERENCE(traitSym)
}
diff --git a/src/compiler/scala/tools/nsc/backend/icode/TypeStacks.scala b/src/compiler/scala/tools/nsc/backend/icode/TypeStacks.scala
index 45ab7ae..23d3d05 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/TypeStacks.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/TypeStacks.scala
@@ -1,5 +1,5 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
+ * Copyright 2005-2013 LAMP/EPFL
* @author Martin Odersky
*/
@@ -22,6 +22,8 @@ trait TypeStacks {
*/
type Rep = List[TypeKind]
+ object NoTypeStack extends TypeStack(Nil) { }
+
class TypeStack(var types: Rep) {
if (types.nonEmpty)
checkerDebug("Created " + this)
diff --git a/src/compiler/scala/tools/nsc/backend/icode/analysis/CopyPropagation.scala b/src/compiler/scala/tools/nsc/backend/icode/analysis/CopyPropagation.scala
index e1e9360..53111d0 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/analysis/CopyPropagation.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/analysis/CopyPropagation.scala
@@ -1,13 +1,12 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
+ * Copyright 2005-2013 LAMP/EPFL
* @author Martin Odersky
*/
-
package scala.tools.nsc
package backend.icode.analysis
-import scala.collection.mutable.{ Map, HashMap }
+import scala.collection.{ mutable, immutable }
/** A modified copy-propagation like analysis. It
* is augmented with a record-like value which is used
@@ -30,7 +29,7 @@ abstract class CopyPropagation {
abstract class Value {
def isRecord = false
}
- case class Record(cls: Symbol, bindings: Map[Symbol, Value]) extends Value {
+ case class Record(cls: Symbol, bindings: mutable.Map[Symbol, Value]) extends Value {
override def isRecord = true
}
/** The value of some location in memory. */
@@ -46,13 +45,13 @@ abstract class CopyPropagation {
case object Unknown extends Value
/** The bottom record. */
- object AllRecords extends Record(NoSymbol, new HashMap[Symbol, Value])
+ object AllRecords extends Record(NoSymbol, mutable.HashMap[Symbol, Value]())
/** The lattice for this analysis. */
object copyLattice extends SemiLattice {
- type Bindings = Map[Location, Value]
+ type Bindings = mutable.Map[Location, Value]
- def emptyBinding = new HashMap[Location, Value]()
+ def emptyBinding = mutable.HashMap[Location, Value]()
class State(val bindings: Bindings, var stack: List[Value]) {
@@ -141,7 +140,7 @@ abstract class CopyPropagation {
"\nBindings: " + bindings + "\nStack: " + stack;
def dup: State = {
- val b: Bindings = new HashMap()
+ val b: Bindings = mutable.HashMap()
b ++= bindings
new State(b, stack)
}
@@ -176,7 +175,7 @@ abstract class CopyPropagation {
if (v1 == v2) v1 else Unknown
}
*/
- val resBindings = new HashMap[Location, Value]
+ val resBindings = mutable.HashMap[Location, Value]()
for ((k, v) <- a.bindings if b.bindings.isDefinedAt(k) && v == b.bindings(k))
resBindings += (k -> v);
@@ -195,38 +194,38 @@ abstract class CopyPropagation {
this.method = m
init {
- worklist += m.code.startBlock
+ worklist += m.startBlock
worklist ++= (m.exh map (_.startBlock))
- m.code.blocks.foreach { b =>
+ m foreachBlock { b =>
in(b) = lattice.bottom
out(b) = lattice.bottom
- assert(out.contains(b))
- log("Added point: " + b)
+ assert(out.contains(b), out)
+ debuglog("CopyAnalysis added point: " + b)
}
m.exh foreach { e =>
in(e.startBlock) = new copyLattice.State(copyLattice.emptyBinding, copyLattice.exceptionHandlerStack);
}
// first block is special: it's not bottom, but a precisely defined state with no bindings
- in(m.code.startBlock) = new lattice.State(lattice.emptyBinding, Nil);
+ in(m.startBlock) = new lattice.State(lattice.emptyBinding, Nil);
}
}
override def run() {
forwardAnalysis(blockTransfer)
if (settings.debug.value) {
- linearizer.linearize(method).foreach(b => if (b != method.code.startBlock)
+ linearizer.linearize(method).foreach(b => if (b != method.startBlock)
assert(in(b) != lattice.bottom,
"Block " + b + " in " + this.method + " has input equal to bottom -- not visited?"));
}
}
def blockTransfer(b: BasicBlock, in: lattice.Elem): lattice.Elem =
- b.foldLeft(in)(interpret)
+ b.iterator.foldLeft(in)(interpret)
import opcodes._
- private def retain[A, B](map: Map[A, B])(p: (A, B) => Boolean) = {
+ private def retain[A, B](map: mutable.Map[A, B])(p: (A, B) => Boolean) = {
for ((k, v) <- map ; if !p(k, v)) map -= k
map
}
@@ -234,12 +233,7 @@ abstract class CopyPropagation {
/** Abstract interpretation for one instruction. */
def interpret(in: copyLattice.Elem, i: Instruction): copyLattice.Elem = {
var out = in.dup
-
- if (settings.debug.value) {
- log("- " + i)
- log("in: " + in)
- log("\n")
- }
+ debuglog("- " + i + "\nin: " + in + "\n")
i match {
case THIS(_) =>
@@ -327,7 +321,7 @@ abstract class CopyPropagation {
out.stack = Unknown :: out.stack.drop(i.consumed)
case CALL_METHOD(method, style) => style match {
- case Dynamic | InvokeDynamic =>
+ case Dynamic =>
out = simulateCall(in, method, false)
case Static(onInstance) =>
@@ -370,7 +364,7 @@ abstract class CopyPropagation {
case NEW(kind) =>
val v1 = kind match {
- case REFERENCE(cls) => Record(cls, new HashMap[Symbol, Value])
+ case REFERENCE(cls) => Record(cls, mutable.HashMap[Symbol, Value]())
case _ => Unknown
}
out.stack = v1 :: out.stack
@@ -422,8 +416,7 @@ abstract class CopyPropagation {
out.stack = Unknown :: Nil
case _ =>
- dump
- abort("Unknown instruction: " + i)
+ dumpClassesAndAbort("Unknown instruction: " + i)
}
out
} /* def interpret */
@@ -525,12 +518,12 @@ abstract class CopyPropagation {
* method has to find the correct mapping from fields to the order in which
* they are passed on the stack. It works for primary constructors.
*/
- private def getBindingsForPrimaryCtor(in: copyLattice.State, ctor: Symbol): Map[Symbol, Value] = {
+ private def getBindingsForPrimaryCtor(in: copyLattice.State, ctor: Symbol): mutable.Map[Symbol, Value] = {
val paramAccessors = ctor.owner.constrParamAccessors;
- var values = in.stack.take(1 + ctor.info.paramTypes.length).reverse.drop(1);
- val bindings = new HashMap[Symbol, Value];
+ var values = in.stack.take(1 + ctor.info.paramTypes.length).reverse.drop(1);
+ val bindings = mutable.HashMap[Symbol, Value]()
- if (settings.debug.value) log("getBindings for: " + ctor + " acc: " + paramAccessors)
+ debuglog("getBindings for: " + ctor + " acc: " + paramAccessors)
var paramTypes = ctor.tpe.paramTypes
val diff = paramTypes.length - paramAccessors.length
@@ -538,11 +531,11 @@ abstract class CopyPropagation {
case 0 => ()
case 1 if ctor.tpe.paramTypes.head == ctor.owner.rawowner.tpe =>
// it's an unused outer
- log("considering unused outer at position 0 in " + ctor.tpe.paramTypes)
+ debuglog("considering unused outer at position 0 in " + ctor.tpe.paramTypes)
paramTypes = paramTypes.tail
values = values.tail
case _ =>
- log("giving up on " + ctor + "(diff: " + diff + ")")
+ debuglog("giving up on " + ctor + "(diff: " + diff + ")")
return bindings
}
@@ -557,7 +550,7 @@ abstract class CopyPropagation {
values = values.tail;
}
- if (settings.debug.value) log("\t" + bindings)
+ debuglog("\t" + bindings)
bindings
}
@@ -569,13 +562,12 @@ abstract class CopyPropagation {
final def isPureMethod(m: Symbol): Boolean =
m.isGetter // abstract getters are still pure, as we 'know'
- final override def toString(): String = {
- var res = ""
- for (b <- this.method.code.blocks.toList)
- res = (res + "\nIN(" + b.label + "):\t Bindings: " + in(b).bindings +
- "\nIN(" + b.label +"):\t Stack: " + in(b).stack) + "\n";
- res
- }
+ final override def toString() = (
+ method.blocks map { b =>
+ "\nIN(%s):\t Bindings: %s".format(b.label, in(b).bindings) +
+ "\nIN(%s):\t Stack: %s".format(b.label, in(b).stack)
+ }
+ ).mkString
} /* class CopyAnalysis */
}
diff --git a/src/compiler/scala/tools/nsc/backend/icode/analysis/DataFlowAnalysis.scala b/src/compiler/scala/tools/nsc/backend/icode/analysis/DataFlowAnalysis.scala
index 60cb679..04c3eed 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/analysis/DataFlowAnalysis.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/analysis/DataFlowAnalysis.scala
@@ -1,5 +1,5 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
+ * Copyright 2005-2013 LAMP/EPFL
* @author Martin Odersky
*/
@@ -60,20 +60,17 @@ trait DataFlowAnalysis[L <: SemiLattice] {
val output = f(point, in(point))
if ((lattice.bottom == out(point)) || output != out(point)) {
-// Console.println("Output changed at " + point
-// + " from: " + out(point) + " to: " + output
-// + " for input: " + in(point) + " and they are different: " + (output != out(point)))
+ // Console.println("Output changed at " + point
+ // + " from: " + out(point) + " to: " + output
+ // + " for input: " + in(point) + " and they are different: " + (output != out(point)))
out(point) = output
val succs = point.successors
succs foreach { p =>
- if (!worklist(p))
- worklist += p;
- if (!in.isDefinedAt(p))
- assert(false, "Invalid successor for: " + point + " successor " + p + " does not exist")
-// if (!p.exceptionHandlerHeader) {
-// println("lubbing " + p.predecessors + " outs: " + p.predecessors.map(out.apply).mkString("\n", "\n", ""))
- in(p) = lattice.lub(in(p) :: (p.predecessors map out.apply), p.exceptionHandlerStart)
-// }
+ val updated = lattice.lub(in(p) :: (p.predecessors map out.apply), p.exceptionHandlerStart)
+ if(updated != in(p)) {
+ in(p) = updated
+ if (!worklist(p)) { worklist += p; }
+ }
}
}
}
diff --git a/src/compiler/scala/tools/nsc/backend/icode/analysis/Liveness.scala b/src/compiler/scala/tools/nsc/backend/icode/analysis/Liveness.scala
index 7d96c72..abda639 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/analysis/Liveness.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/analysis/Liveness.scala
@@ -1,5 +1,5 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
+ * Copyright 2005-2013 LAMP/EPFL
* @author Martin Odersky
*/
@@ -33,27 +33,25 @@ abstract class Liveness {
final class LivenessAnalysis extends DataFlowAnalysis[livenessLattice.type] {
type P = BasicBlock
- val lattice = livenessLattice
-
- var method: IMethod = _
-
- val gen: mutable.Map[BasicBlock, Set[Local]] = new mutable.HashMap()
- val kill: mutable.Map[BasicBlock, Set[Local]] = new mutable.HashMap()
+ val lattice = livenessLattice
+ var method: IMethod = _
+ val gen: mutable.Map[BasicBlock, Set[Local]] = perRunCaches.newMap()
+ val kill: mutable.Map[BasicBlock, Set[Local]] = perRunCaches.newMap()
def init(m: IMethod) {
this.method = m
- gen.clear
- kill.clear
+ gen.clear()
+ kill.clear()
- for (b <- m.code.blocks;
- (g, k) = genAndKill(b)) {
+ m foreachBlock { b =>
+ val (g, k) = genAndKill(b)
gen += (b -> g)
kill += (b -> k)
}
init {
- worklist ++= m.code.blocks.toList
- m.code.blocks.foreach { b =>
+ m foreachBlock { b =>
+ worklist += b
in(b) = lattice.bottom
out(b) = lattice.bottom
}
@@ -77,7 +75,7 @@ abstract class Liveness {
override def run() {
backwardAnalysis(blockTransfer)
if (settings.debug.value) {
- linearizer.linearize(method).foreach(b => if (b != method.code.startBlock)
+ linearizer.linearize(method).foreach(b => if (b != method.startBlock)
assert(lattice.bottom != in(b),
"Block " + b + " in " + this.method + " has input equal to bottom -- not visited?"));
}
@@ -88,32 +86,17 @@ abstract class Liveness {
/** Abstract interpretation for one instruction. Very important:
* liveness is a backward DFA, so this method should be used to compute
- * liveness *before* the given instruction `i'.
+ * liveness *before* the given instruction `i`.
*/
def interpret(out: lattice.Elem, i: Instruction): lattice.Elem = {
- var in = out
-
- if (settings.debug.value) {
- log("- " + i)
- log("out: " + out)
- log("\n")
- }
-
+ debuglog("- " + i + "\nout: " + out + "\n")
i match {
- case LOAD_LOCAL(l) => in += l
- case STORE_LOCAL(l) => in -= l
- case _ =>
- ()
- }
- in
- } /* def interpret */
-
- override def toString(): String = {
- val buf = new StringBuilder()
- for (b <- method.code.blocks.toList) {
- buf.append("\nlive-in(" + b + ")=" + in(b) + "\nlive-out(" + b + ")=" + out(b));
+ case LOAD_LOCAL(l) => out + l
+ case STORE_LOCAL(l) => out - l
+ case _ => out
}
- buf.toString()
}
+ override def toString() =
+ (method.blocks map (b => "\nlive-in(%s)=%s\nlive-out(%s)=%s".format(b, in(b), b, out(b)))).mkString
} /* Liveness analysis */
}
diff --git a/src/compiler/scala/tools/nsc/backend/icode/analysis/LubException.scala b/src/compiler/scala/tools/nsc/backend/icode/analysis/LubException.scala
index ec05532..e91bf7a 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/analysis/LubException.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/analysis/LubException.scala
@@ -1,5 +1,5 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
+ * Copyright 2005-2013 LAMP/EPFL
* @author Martin Odersky
*/
diff --git a/src/compiler/scala/tools/nsc/backend/icode/analysis/ProgramPoint.scala b/src/compiler/scala/tools/nsc/backend/icode/analysis/ProgramPoint.scala
index a8c69eb..4e4026f 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/analysis/ProgramPoint.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/analysis/ProgramPoint.scala
@@ -1,5 +1,5 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
+ * Copyright 2005-2013 LAMP/EPFL
* @author Martin Odersky
*/
diff --git a/src/compiler/scala/tools/nsc/backend/icode/analysis/ReachingDefinitions.scala b/src/compiler/scala/tools/nsc/backend/icode/analysis/ReachingDefinitions.scala
index 31a2dbf..2717c43 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/analysis/ReachingDefinitions.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/analysis/ReachingDefinitions.scala
@@ -1,5 +1,5 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
+ * Copyright 2005-2013 LAMP/EPFL
* @author Martin Odersky
*/
@@ -26,61 +26,62 @@ abstract class ReachingDefinitions {
*/
object rdefLattice extends SemiLattice {
type Definition = (Local, BasicBlock, Int)
- type Elem = IState[Set[Definition], Stack]
- type StackPos = Set[(BasicBlock, Int)]
- type Stack = List[StackPos]
+ type Elem = IState[ListSet[Definition], Stack]
+ type StackPos = ListSet[(BasicBlock, Int)]
+ type Stack = List[StackPos]
private def referenceEqualSet(name: String) = new ListSet[Definition] with ReferenceEquality {
override def toString = "<" + name + ">"
}
- val top: Elem = IState(referenceEqualSet("top"), Nil)
+ val top: Elem = IState(referenceEqualSet("top"), Nil)
val bottom: Elem = IState(referenceEqualSet("bottom"), Nil)
/** The least upper bound is set inclusion for locals, and pairwise set inclusion for stacks. */
- def lub2(exceptional: Boolean)(a: Elem, b: Elem): Elem =
+ def lub2(exceptional: Boolean)(a: Elem, b: Elem): Elem = {
if (bottom == a) b
else if (bottom == b) a
- else {
- val locals = a.vars ++ b.vars
- val stack =
- if (a.stack.isEmpty) b.stack
- else if (b.stack.isEmpty) a.stack
- else (a.stack, b.stack).zipped map (_ ++ _)
-
- IState(locals, stack)
-
- // val res = IState(locals, stack)
- // Console.println("\tlub2: " + a + ", " + b)
- // Console.println("\tis: " + res)
- // if (res._1 eq bottom._1) (new ListSet[Definition], Nil)
- // else res
- // res
- }
+ else IState(a.vars ++ b.vars,
+ if (a.stack.isEmpty) b.stack
+ else if (b.stack.isEmpty) a.stack
+ else {
+ // !!! These stacks are with some frequency not of the same size.
+ // I can't reverse engineer the logic well enough to say whether this
+ // indicates a problem. Even if it doesn't indicate a problem,
+ // it'd be nice not to call zip with mismatched sequences because
+ // it makes it harder to spot the real problems.
+ val result = (a.stack, b.stack).zipped map (_ ++ _)
+ if (settings.debug.value && (a.stack.length != b.stack.length))
+ debugwarn("Mismatched stacks in ReachingDefinitions#lub2: " + a.stack + ", " + b.stack + ", returning " + result)
+ result
+ }
+ )
+ }
}
class ReachingDefinitionsAnalysis extends DataFlowAnalysis[rdefLattice.type] {
type P = BasicBlock
val lattice = rdefLattice
- import lattice.Definition
- import lattice.Stack
- import lattice.Elem
-
+ import lattice.{ Definition, Stack, Elem, StackPos }
var method: IMethod = _
- val gen: mutable.Map[BasicBlock, Set[Definition]] = new mutable.HashMap()
- val kill: mutable.Map[BasicBlock, Set[Local]] = new mutable.HashMap()
- val drops: mutable.Map[BasicBlock, Int] = new mutable.HashMap()
- val outStack: mutable.Map[BasicBlock, Stack] = new mutable.HashMap()
+ val gen = mutable.Map[BasicBlock, ListSet[Definition]]()
+ val kill = mutable.Map[BasicBlock, ListSet[Local]]()
+ val drops = mutable.Map[BasicBlock, Int]()
+ val outStack = mutable.Map[BasicBlock, Stack]()
def init(m: IMethod) {
this.method = m
- gen.clear; kill.clear
- drops.clear; outStack.clear
- for (b <- m.code.blocks.toList;
- (g, k) = genAndKill(b);
- (d, st) = dropsAndGen(b)) {
+ gen.clear()
+ kill.clear()
+ drops.clear()
+ outStack.clear()
+
+ m foreachBlock { b =>
+ val (g, k) = genAndKill(b)
+ val (d, st) = dropsAndGen(b)
+
gen += (b -> g)
kill += (b -> k)
drops += (b -> d)
@@ -88,36 +89,32 @@ abstract class ReachingDefinitions {
}
init {
- worklist ++= m.code.blocks.toList
- m.code.blocks.foreach { b =>
+ m foreachBlock { b =>
+ worklist += b
in(b) = lattice.bottom
out(b) = lattice.bottom
}
m.exh foreach { e =>
- in(e.startBlock) = lattice.IState(new ListSet[Definition], List(new ListSet[(BasicBlock, Int)]))
+ in(e.startBlock) = lattice.IState(new ListSet[Definition], List(new StackPos))
}
-
}
}
import opcodes._
- def genAndKill(b: BasicBlock): (Set[Definition], Set[Local]) = {
- var genSet: Set[Definition] = new immutable.HashSet
- var killSet: Set[Local] = new immutable.HashSet
- for ((i, idx) <- b.toList.zipWithIndex) i match {
- case STORE_LOCAL(local) =>
- killSet = killSet + local
- genSet = updateReachingDefinition(b, idx, genSet)
- case _ => ()
+ def genAndKill(b: BasicBlock): (ListSet[Definition], ListSet[Local]) = {
+ var genSet = ListSet[Definition]()
+ var killSet = ListSet[Local]()
+ for ((STORE_LOCAL(local), idx) <- b.toList.zipWithIndex) {
+ killSet = killSet + local
+ genSet = updateReachingDefinition(b, idx, genSet)
}
(genSet, killSet)
}
- private def dropsAndGen(b: BasicBlock): (Int, List[Set[(BasicBlock, Int)]]) = {
- var depth = 0
- var drops = 0
- var stackOut: List[Set[(BasicBlock, Int)]] = Nil
+ private def dropsAndGen(b: BasicBlock): (Int, Stack) = {
+ var depth, drops = 0
+ var stackOut: Stack = Nil
for ((instr, idx) <- b.toList.zipWithIndex) {
instr match {
@@ -131,10 +128,10 @@ abstract class ReachingDefinitions {
depth -= instr.consumed
}
var prod = instr.produced
- depth = depth + prod
+ depth += prod
while (prod > 0) {
- stackOut = collection.immutable.Set((b, idx)) :: stackOut
- prod = prod - 1
+ stackOut ::= ListSet((b, idx))
+ prod -= 1
}
}
// Console.println("drops(" + b + ") = " + drops)
@@ -145,7 +142,7 @@ abstract class ReachingDefinitions {
override def run() {
forwardAnalysis(blockTransfer)
if (settings.debug.value) {
- linearizer.linearize(method).foreach(b => if (b != method.code.startBlock)
+ linearizer.linearize(method).foreach(b => if (b != method.startBlock)
assert(lattice.bottom != in(b),
"Block " + b + " in " + this.method + " has input equal to bottom -- not visited? " + in(b)
+ ": bot: " + lattice.bottom
@@ -156,14 +153,14 @@ abstract class ReachingDefinitions {
import opcodes._
import lattice.IState
- def updateReachingDefinition(b: BasicBlock, idx: Int, rd: Set[Definition]): Set[Definition] = {
+ def updateReachingDefinition(b: BasicBlock, idx: Int, rd: ListSet[Definition]): ListSet[Definition] = {
val STORE_LOCAL(local) = b(idx)
var tmp = local
(rd filter { case (l, _, _) => l != tmp }) + ((tmp, b, idx))
}
private def blockTransfer(b: BasicBlock, in: lattice.Elem): lattice.Elem = {
- var locals: Set[Definition] = (in.vars filter { case (l, _, _) => !kill(b)(l) }) ++ gen(b)
+ var locals: ListSet[Definition] = (in.vars filter { case (l, _, _) => !kill(b)(l) }) ++ gen(b)
if (locals eq lattice.bottom.vars) locals = new ListSet[Definition]
IState(locals, outStack(b) ::: in.stack.drop(drops(b)))
}
@@ -172,7 +169,8 @@ abstract class ReachingDefinitions {
def interpret(b: BasicBlock, idx: Int, in: lattice.Elem): Elem = {
var locals = in.vars
var stack = in.stack
- val instr = b(idx)
+ val instr = b(idx)
+
instr match {
case STORE_LOCAL(l1) =>
locals = updateReachingDefinition(b, idx, locals)
@@ -185,7 +183,7 @@ abstract class ReachingDefinitions {
var prod = instr.produced
while (prod > 0) {
- stack = collection.immutable.Set((b, idx)) :: stack
+ stack ::= ListSet((b, idx))
prod -= 1
}
@@ -197,7 +195,8 @@ abstract class ReachingDefinitions {
* value found below the topmost element of the stack.
*/
def findDefs(bb: BasicBlock, idx: Int, m: Int, depth: Int): List[(BasicBlock, Int)] = if (idx > 0) {
- assert(bb.closed)
+ assert(bb.closed, bb)
+
var instrs = bb.getArray
var res: List[(BasicBlock, Int)] = Nil
var i = idx
@@ -241,12 +240,10 @@ abstract class ReachingDefinitions {
findDefs(bb, idx, m, 0)
override def toString: String = {
- val sb = new StringBuilder
- sb.append("rdef: \n")
- for (b <- method.code.blocks)
- sb.append("rdef_entry(" + b + ")= " + in(b)).append("\nrdef_exit(" + b + ")= " + out(b))
- sb.toString()
+ method.code.blocks map { b =>
+ " entry(%s) = %s\n".format(b, in(b)) +
+ " exit(%s) = %s\n".format(b, out(b))
+ } mkString ("ReachingDefinitions {\n", "\n", "\n}")
}
-
}
}
diff --git a/src/compiler/scala/tools/nsc/backend/icode/analysis/SemiLattice.scala b/src/compiler/scala/tools/nsc/backend/icode/analysis/SemiLattice.scala
index d458b25..f718c70 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/analysis/SemiLattice.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/analysis/SemiLattice.scala
@@ -1,9 +1,8 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
+ * Copyright 2005-2013 LAMP/EPFL
* @author Martin Odersky
*/
-
package scala.tools.nsc
package backend.icode
package analysis
@@ -26,6 +25,11 @@ trait SemiLattice {
case _ =>
false
}
+ private def tstring(x: Any): String = x match {
+ case xs: TraversableOnce[_] => xs map tstring mkString " "
+ case _ => "" + x
+ }
+ override def toString = "IState(" + tstring(vars) + ", " + tstring(stack) + ")"
}
/** Return the least upper bound of a and b. */
diff --git a/src/compiler/scala/tools/nsc/backend/icode/analysis/TypeFlowAnalysis.scala b/src/compiler/scala/tools/nsc/backend/icode/analysis/TypeFlowAnalysis.scala
index e8a58ff..b2ecb43 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/analysis/TypeFlowAnalysis.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/analysis/TypeFlowAnalysis.scala
@@ -1,5 +1,5 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
+ * Copyright 2005-2013 LAMP/EPFL
* @author Martin Odersky
*/
@@ -56,7 +56,7 @@ abstract class TypeFlowAnalysis {
/** A map which returns the bottom type for unfound elements */
class VarBinding extends mutable.HashMap[icodes.Local, icodes.TypeKind] {
- override def get(l: icodes.Local) = super.get(l) orElse Some(typeLattice.bottom)
+ override def default(l: icodes.Local) = typeLattice.bottom
def this(o: VarBinding) = {
this()
@@ -110,16 +110,16 @@ abstract class TypeFlowAnalysis {
this.method = m
//typeFlowLattice.lubs = 0
init {
- worklist += m.code.startBlock
+ worklist += m.startBlock
worklist ++= (m.exh map (_.startBlock))
- m.code.blocks.foreach { b =>
+ m foreachBlock { b =>
in(b) = typeFlowLattice.bottom
out(b) = typeFlowLattice.bottom
}
// start block has var bindings for each of its parameters
val entryBindings = new VarBinding ++= (m.params map (p => ((p, p.kind))))
- in(m.code.startBlock) = lattice.IState(entryBindings, typeStackLattice.bottom)
+ in(m.startBlock) = lattice.IState(entryBindings, typeStackLattice.bottom)
m.exh foreach { e =>
in(e.startBlock) = lattice.IState(in(e.startBlock).vars, typeStackLattice.exceptionHandlerStack)
@@ -127,32 +127,6 @@ abstract class TypeFlowAnalysis {
}
}
- /** reinitialize the analysis, keeping around solutions from a previous run. */
- def reinit(m: icodes.IMethod) {
- if (this.method == null || this.method.symbol != m.symbol)
- init(m)
- else reinit {
- for (b <- m.code.blocks; if !in.isDefinedAt(b)) {
- for (p <- b.predecessors) {
- if (out.isDefinedAt(p)) {
- in(b) = out(p)
- worklist += p
- }
-/* else
- in(b) = typeFlowLattice.bottom
-*/ }
- out(b) = typeFlowLattice.bottom
- }
- for (handler <- m.exh) {
- val start = handler.startBlock
- if (!in.contains(start)) {
- worklist += start
- in(start) = lattice.IState(in(start).vars, typeStackLattice.exceptionHandlerStack)
- }
- }
- }
- }
-
def this(m: icodes.IMethod) {
this()
init(m)
@@ -160,234 +134,48 @@ abstract class TypeFlowAnalysis {
def run = {
timer.start
-// icodes.lubs0 = 0
+ // icodes.lubs0 = 0
forwardAnalysis(blockTransfer)
val t = timer.stop
if (settings.debug.value) {
- linearizer.linearize(method).foreach(b => if (b != method.code.startBlock)
+ linearizer.linearize(method).foreach(b => if (b != method.startBlock)
assert(visited.contains(b),
"Block " + b + " in " + this.method + " has input equal to bottom -- not visited? .." + visited));
}
-// log("" + method.symbol.fullName + " [" + method.code.blocks.size + " blocks] "
-// + "\n\t" + iterations + " iterations: " + t + " ms."
-// + "\n\tlubs: " + typeFlowLattice.lubs + " out of which " + icodes.lubs0 + " typer lubs")
+ // log("" + method.symbol.fullName + " [" + method.code.blocks.size + " blocks] "
+ // + "\n\t" + iterations + " iterations: " + t + " ms."
+ // + "\n\tlubs: " + typeFlowLattice.lubs + " out of which " + icodes.lubs0 + " typer lubs")
}
def blockTransfer(b: BasicBlock, in: lattice.Elem): lattice.Elem = {
- b.foldLeft(in)(interpret)
- }
- /** The flow function of a given basic block. */
- /* var flowFun: immutable.Map[BasicBlock, TransferFunction] = new immutable.HashMap */
-
- /** Fill flowFun with a transfer function per basic block. */
-/*
- private def buildFlowFunctions(blocks: List[BasicBlock]) {
- def transfer(b: BasicBlock): TransferFunction = {
- var gens: List[Gen] = Nil
- var consumed: Int = 0
- val stack = new SimulatedStack
-
- for (instr <- b) instr match {
- case THIS(clasz) =>
- stack push toTypeKind(clasz.tpe)
-
- case CONSTANT(const) =>
- stack push toTypeKind(const.tpe)
-
- case LOAD_ARRAY_ITEM(kind) =>
- stack.pop2
- stack.push(kind)
-
- case LOAD_LOCAL(local) =>
- val t = bindings(local)
- stack push (if (t == typeLattice.bottom) local.kind else t)
-
- case LOAD_FIELD(field, isStatic) =>
- if (!isStatic)
- stack.pop
- stack push toTypeKind(field.tpe)
-
- case LOAD_MODULE(module) =>
- stack push toTypeKind(module.tpe)
-
- case STORE_ARRAY_ITEM(kind) =>
- stack.pop3
-
- case STORE_LOCAL(local) =>
- val t = stack.pop
- bindings += (local -> t)
-
- case STORE_THIS(_) =>
- stack.pop
-
- case STORE_FIELD(field, isStatic) =>
- if (isStatic)
- stack.pop
- else
- stack.pop2
-
- case CALL_PRIMITIVE(primitive) =>
- primitive match {
- case Negation(kind) =>
- stack.pop; stack.push(kind)
- case Test(_, kind, zero) =>
- stack.pop
- if (!zero) stack.pop
- stack push BOOL;
- case Comparison(_, _) =>
- stack.pop2
- stack push INT
-
- case Arithmetic(op, kind) =>
- stack.pop
- if (op != NOT)
- stack.pop
- val k = kind match {
- case BYTE | SHORT | CHAR => INT
- case _ => kind
- }
- stack push k
-
- case Logical(op, kind) =>
- stack.pop2
- stack push kind
-
- case Shift(op, kind) =>
- stack.pop2
- stack push kind
-
- case Conversion(src, dst) =>
- stack.pop
- stack push dst
-
- case ArrayLength(kind) =>
- stack.pop
- stack push INT
-
- case StartConcat =>
- stack.push(ConcatClass)
-
- case EndConcat =>
- stack.pop
- stack.push(STRING)
-
- case StringConcat(el) =>
- stack.pop2
- stack push ConcatClass
- }
-
- case CALL_METHOD(method, style) => style match {
- case Dynamic =>
- stack.pop(1 + method.info.paramTypes.length)
- stack.push(toTypeKind(method.info.resultType))
-
- case Static(onInstance) =>
- if (onInstance) {
- stack.pop(1 + method.info.paramTypes.length)
- if (!method.isConstructor)
- stack.push(toTypeKind(method.info.resultType));
- } else {
- stack.pop(method.info.paramTypes.length)
- stack.push(toTypeKind(method.info.resultType))
- }
-
- case SuperCall(mix) =>
- stack.pop(1 + method.info.paramTypes.length)
- stack.push(toTypeKind(method.info.resultType))
- }
-
- case BOX(kind) =>
- stack.pop
- stack.push(BOXED(kind))
-
- case UNBOX(kind) =>
- stack.pop
- stack.push(kind)
-
- case NEW(kind) =>
- stack.push(kind)
-
- case CREATE_ARRAY(elem, dims) =>
- stack.pop(dims)
- stack.push(ARRAY(elem))
-
- case IS_INSTANCE(tpe) =>
- stack.pop
- stack.push(BOOL)
-
- case CHECK_CAST(tpe) =>
- stack.pop
- stack.push(tpe)
-
- case SWITCH(tags, labels) =>
- stack.pop
-
- case JUMP(whereto) =>
- ()
-
- case CJUMP(success, failure, cond, kind) =>
- stack.pop2
-
- case CZJUMP(success, failure, cond, kind) =>
- stack.pop
-
- case RETURN(kind) =>
- if (kind != UNIT)
- stack.pop;
-
- case THROW() =>
- stack.pop
-
- case DROP(kind) =>
- stack.pop
-
- case DUP(kind) =>
- stack.push(stack.head)
-
- case MONITOR_ENTER() =>
- stack.pop
-
- case MONITOR_EXIT() =>
- stack.pop
-
- case SCOPE_ENTER(_) | SCOPE_EXIT(_) =>
- ()
-
- case LOAD_EXCEPTION(_) =>
- stack.pop(stack.length)
- stack.push(typeLattice.Object)
-
- case _ =>
- dump
- abort("Unknown instruction: " + i)
-
- }
-
- new TransferFunction(consumed, gens)
- }
-
- for (b <- blocks) {
- flowFun = flowFun + (b -> transfer(b))
+ var result = lattice.IState(new VarBinding(in.vars), new TypeStack(in.stack))
+ var instrs = b.toList
+ while(!instrs.isEmpty) {
+ val i = instrs.head
+ result = mutatingInterpret(result, i)
+ instrs = instrs.tail
}
+ result
}
-*/
+
/** Abstract interpretation for one instruction. */
def interpret(in: typeFlowLattice.Elem, i: Instruction): typeFlowLattice.Elem = {
val out = lattice.IState(new VarBinding(in.vars), new TypeStack(in.stack))
+ mutatingInterpret(out, i)
+ }
+
+ def mutatingInterpret(out: typeFlowLattice.Elem, i: Instruction): typeFlowLattice.Elem = {
val bindings = out.vars
val stack = out.stack
if (settings.debug.value) {
-// Console.println("[before] Stack: " + stack);
-// Console.println(i);
+ // Console.println("[before] Stack: " + stack);
+ // Console.println(i);
}
i match {
- case THIS(clasz) =>
- stack push toTypeKind(clasz.tpe)
-
- case CONSTANT(const) =>
- stack push toTypeKind(const.tpe)
+ case THIS(clasz) => stack push toTypeKind(clasz.tpe)
+ case CONSTANT(const) => stack push toTypeKind(const.tpe)
case LOAD_ARRAY_ITEM(kind) =>
stack.pop2 match {
@@ -403,148 +191,80 @@ abstract class TypeFlowAnalysis {
stack push (if (t == typeLattice.bottom) local.kind else t)
case LOAD_FIELD(field, isStatic) =>
- if (!isStatic)
- stack.pop
+ if (!isStatic) { stack.pop }
stack push toTypeKind(field.tpe)
- case LOAD_MODULE(module) =>
- stack push toTypeKind(module.tpe)
+ case LOAD_MODULE(module) => stack push toTypeKind(module.tpe)
+ case STORE_ARRAY_ITEM(kind) => stack.pop3
+ case STORE_LOCAL(local) => val t = stack.pop; bindings += (local -> t)
+ case STORE_THIS(_) => stack.pop
- case STORE_ARRAY_ITEM(kind) =>
- stack.pop3
-
- case STORE_LOCAL(local) =>
- val t = stack.pop
- bindings += (local -> t)
-
- case STORE_THIS(_) =>
- stack.pop
-
- case STORE_FIELD(field, isStatic) =>
- if (isStatic)
- stack.pop
- else
- stack.pop2
+ case STORE_FIELD(field, isStatic) => if (isStatic) stack.pop else stack.pop2
case CALL_PRIMITIVE(primitive) =>
primitive match {
- case Negation(kind) =>
- stack.pop; stack.push(kind)
+ case Negation(kind) => stack.pop; stack.push(kind)
+
case Test(_, kind, zero) =>
stack.pop
- if (!zero) stack.pop
+ if (!zero) { stack.pop }
stack push BOOL;
- case Comparison(_, _) =>
- stack.pop2
- stack push INT
+
+ case Comparison(_, _) => stack.pop2; stack push INT
case Arithmetic(op, kind) =>
stack.pop
- if (op != NOT)
- stack.pop
+ if (op != NOT) { stack.pop }
val k = kind match {
case BYTE | SHORT | CHAR => INT
case _ => kind
}
stack push k
- case Logical(op, kind) =>
- stack.pop2
- stack push kind
-
- case Shift(op, kind) =>
- stack.pop2
- stack push kind
-
- case Conversion(src, dst) =>
- stack.pop
- stack push dst
-
- case ArrayLength(kind) =>
- stack.pop
- stack push INT
-
- case StartConcat =>
- stack.push(ConcatClass)
-
- case EndConcat =>
- stack.pop
- stack.push(STRING)
-
- case StringConcat(el) =>
- stack.pop2
- stack push ConcatClass
+ case Logical(op, kind) => stack.pop2; stack push kind
+ case Shift(op, kind) => stack.pop2; stack push kind
+ case Conversion(src, dst) => stack.pop; stack push dst
+ case ArrayLength(kind) => stack.pop; stack push INT
+ case StartConcat => stack.push(ConcatClass)
+ case EndConcat => stack.pop; stack.push(STRING)
+ case StringConcat(el) => stack.pop2; stack push ConcatClass
}
case cm @ CALL_METHOD(_, _) =>
stack pop cm.consumed
cm.producedTypes foreach (stack push _)
- case BOX(kind) =>
- stack.pop
- stack.push(BOXED(kind))
+ case BOX(kind) => stack.pop; stack.push(BOXED(kind))
+ case UNBOX(kind) => stack.pop; stack.push(kind)
- case UNBOX(kind) =>
- stack.pop
- stack.push(kind)
+ case NEW(kind) => stack.push(kind)
- case NEW(kind) =>
- stack.push(kind)
+ case CREATE_ARRAY(elem, dims) => stack.pop(dims); stack.push(ARRAY(elem))
- case CREATE_ARRAY(elem, dims) =>
- stack.pop(dims)
- stack.push(ARRAY(elem))
+ case IS_INSTANCE(tpe) => stack.pop; stack.push(BOOL)
+ case CHECK_CAST(tpe) => stack.pop; stack.push(tpe)
- case IS_INSTANCE(tpe) =>
- stack.pop
- stack.push(BOOL)
+ case _: SWITCH => stack.pop
+ case _: JUMP => ()
+ case _: CJUMP => stack.pop2
+ case _: CZJUMP => stack.pop
- case CHECK_CAST(tpe) =>
- stack.pop
- stack.push(tpe)
+ case RETURN(kind) => if (kind != UNIT) { stack.pop }
+ case THROW(_) => stack.pop
- case SWITCH(tags, labels) =>
- stack.pop
+ case DROP(kind) => stack.pop
+ case DUP(kind) => stack.push(stack.head)
- case JUMP(whereto) =>
- ()
+ case MONITOR_ENTER() | MONITOR_EXIT() => stack.pop
- case CJUMP(success, failure, cond, kind) =>
- stack.pop2
+ case SCOPE_ENTER(_) | SCOPE_EXIT(_) => ()
- case CZJUMP(success, failure, cond, kind) =>
- stack.pop
-
- case RETURN(kind) =>
- if (kind != UNIT)
- stack.pop;
-
- case THROW(_) =>
- stack.pop
-
- case DROP(kind) =>
- stack.pop
-
- case DUP(kind) =>
- stack.push(stack.head)
-
- case MONITOR_ENTER() =>
- stack.pop
-
- case MONITOR_EXIT() =>
- stack.pop
-
- case SCOPE_ENTER(_) | SCOPE_EXIT(_) =>
- ()
-
- case LOAD_EXCEPTION(_) =>
+ case LOAD_EXCEPTION(clasz) =>
stack.pop(stack.length)
- stack.push(typeLattice.top)
+ stack.push(toTypeKind(clasz.tpe))
case _ =>
- dump
- abort("Unknown instruction: " + i)
-
+ dumpClassesAndAbort("Unknown instruction: " + i)
}
out
} // interpret
@@ -621,6 +341,421 @@ abstract class TypeFlowAnalysis {
}
}
+ case class CallsiteInfo(bb: icodes.BasicBlock, receiver: Symbol, stackLength: Int, concreteMethod: Symbol)
+
+ /**
+
+ A full type-flow analysis on a method computes in- and out-flows for each basic block (that's what MethodTFA does).
+
+ For the purposes of Inliner, doing so guarantees that an abstract typestack-slot is available by the time an inlining candidate (a CALL_METHOD instruction) is visited.
+ This subclass (MTFAGrowable) of MethodTFA also aims at performing such analysis on CALL_METHOD instructions, with some differences:
+
+ (a) early screening is performed while the type-flow is being computed (in an override of `blockTransfer`) by testing a subset of the conditions that Inliner checks later.
+ The reasoning here is: if the early check fails at some iteration, there's no chance a follow-up iteration (with a yet more lub-ed typestack-slot) will succeed.
+ Failure is sufficient to remove that particular CALL_METHOD from the typeflow's `remainingCALLs`.
+ A forward note: in case inlining occurs at some basic block B, all blocks reachable from B get their CALL_METHOD instructions considered again as candidates
+ (because of the more precise types that -- perhaps -- can be computed).
+
+ (b) in case the early check does not fail, no conclusive decision can be made, thus the CALL_METHOD stays `isOnwatchlist`.
+
+ In other words, `remainingCALLs` tracks those callsites that still remain as candidates for inlining, so that Inliner can focus on those.
+ `remainingCALLs` also caches info about the typestack just before the callsite, so as to spare computing them again at inlining time.
+
+ Besides caching, a further optimization involves skipping those basic blocks whose in-flow and out-flow isn't needed anyway (as explained next).
+ A basic block lacking a callsite in `remainingCALLs`, when visisted by the standard algorithm, won't cause any inlining.
+ But as we know from the way type-flows are computed, computing the in- and out-flow for a basic block relies in general on those of other basic blocks.
+ In detail, we want to focus on that sub-graph of the CFG such that control flow may reach a remaining candidate callsite.
+ Those basic blocks not in that subgraph can be skipped altogether. That's why:
+ - `forwardAnalysis()` in `MTFAGrowable` now checks for inclusion of a basic block in `relevantBBs`
+ - same check is performed before adding a block to the worklist, and as part of choosing successors.
+ The bookkeeping supporting on-the-fly pruning of irrelevant blocks requires overridding most methods of the dataflow-analysis.
+
+ The rest of the story takes place in Inliner, which does not visit all of the method's basic blocks but only on those represented in `remainingCALLs`.
+
+ @author Miguel Garcia, http://lampwww.epfl.ch/~magarcia/ScalaCompilerCornerReloaded/
+
+ */
+ class MTFAGrowable extends MethodTFA {
+
+ import icodes._
+
+ val remainingCALLs = mutable.Map.empty[opcodes.CALL_METHOD, CallsiteInfo]
+
+ val preCandidates = mutable.Set.empty[BasicBlock]
+
+ var callerLin: Traversable[BasicBlock] = null
+
+ override def run {
+
+ timer.start
+ forwardAnalysis(blockTransfer)
+ val t = timer.stop
+
+ /* Now that `forwardAnalysis(blockTransfer)` has finished, all inlining candidates can be found in `remainingCALLs`,
+ whose keys are callsites and whose values are pieces of information about the typestack just before the callsite in question.
+ In order to keep `analyzeMethod()` simple, we collect in `preCandidates` those basic blocks containing at least one candidate. */
+ preCandidates.clear()
+ for(rc <- remainingCALLs) {
+ preCandidates += rc._2.bb
+ }
+
+ if (settings.debug.value) {
+ for(b <- callerLin; if (b != method.startBlock) && preCandidates(b)) {
+ assert(visited.contains(b),
+ "Block " + b + " in " + this.method + " has input equal to bottom -- not visited? .." + visited)
+ }
+ }
+
+ }
+
+ var shrinkedWatchlist = false
+
+ /*
+ This is the method where information cached elsewhere is put to use. References are given those other places that populate those caches.
+
+ The goal is avoiding computing type-flows for blocks we don't need (ie blocks not tracked in `relevantBBs`). The method used to add to `relevantBBs` is `putOnRadar`.
+
+ Moreover, it's often the case that the last CALL_METHOD of interest ("of interest" equates to "being tracked in `isOnWatchlist`) isn't the last instruction on the block.
+ There are cases where the typeflows computed past this `lastInstruction` are needed, and cases when they aren't.
+ The reasoning behind this decsision is described in `populatePerimeter()`. All `blockTransfer()` needs to do (in order to know at which instruction it can stop)
+ is querying `isOnPerimeter`.
+
+ Upon visiting a CALL_METHOD that's an inlining candidate, the relevant pieces of information about the pre-instruction typestack are collected for future use.
+ That is, unless the candidacy test fails. The reasoning here is: if such early check fails at some iteration, there's no chance a follow-up iteration
+ (with a yet more lub-ed typestack-slot) will succeed. In case of failure we can safely remove the CALL_METHOD from both `isOnWatchlist` and `remainingCALLs`.
+
+ */
+ override def blockTransfer(b: BasicBlock, in: lattice.Elem): lattice.Elem = {
+ var result = lattice.IState(new VarBinding(in.vars), new TypeStack(in.stack))
+
+ val stopAt = if(isOnPerimeter(b)) lastInstruction(b) else null;
+ var isPastLast = false
+
+ var instrs = b.toList
+ while(!isPastLast && !instrs.isEmpty) {
+ val i = instrs.head
+
+ if(isOnWatchlist(i)) {
+ val cm = i.asInstanceOf[opcodes.CALL_METHOD]
+ val msym = cm.method
+ val paramsLength = msym.info.paramTypes.size
+ val receiver = result.stack.types.drop(paramsLength).head match {
+ case REFERENCE(s) => s
+ case _ => NoSymbol // e.g. the scrutinee is BOX(s) or ARRAY
+ }
+ val concreteMethod = inliner.lookupImplFor(msym, receiver)
+ val isCandidate = {
+ ( inliner.isClosureClass(receiver) || concreteMethod.isEffectivelyFinal || receiver.isEffectivelyFinal ) &&
+ !blackballed(concreteMethod)
+ }
+ if(isCandidate) {
+ remainingCALLs += Pair(cm, CallsiteInfo(b, receiver, result.stack.length, concreteMethod))
+ } else {
+ remainingCALLs.remove(cm)
+ isOnWatchlist.remove(cm)
+ shrinkedWatchlist = true
+ }
+ }
+
+ isPastLast = (i eq stopAt)
+
+ if(!isPastLast) {
+ result = mutatingInterpret(result, i)
+ instrs = instrs.tail
+ }
+ }
+
+ result
+ } // end of method blockTransfer
+
+ val isOnWatchlist = mutable.Set.empty[Instruction]
+
+ val warnIfInlineFails = mutable.Set.empty[opcodes.CALL_METHOD] // cache for a given IMethod (ie cleared on Inliner.analyzeMethod).
+
+ /* Each time CallerCalleeInfo.isSafeToInline determines a concrete callee is unsafe to inline in the current caller,
+ the fact is recorded in this TFA instance for the purpose of avoiding devoting processing to that callsite next time.
+ The condition of "being unsafe to inline in the current caller" sticks across inlinings and TFA re-inits
+ because it depends on the instructions of the callee, which stay unchanged during the course of `analyzeInc(caller)`
+ (with the caveat of the side-effecting `makePublic` in `helperIsSafeToInline`).*/
+ val knownUnsafe = mutable.Set.empty[Symbol]
+ val knownSafe = mutable.Set.empty[Symbol]
+ val knownNever = mutable.Set.empty[Symbol] // `knownNever` needs be cleared only at the very end of the inlining phase (unlike `knownUnsafe` and `knownSafe`)
+ final def blackballed(msym: Symbol): Boolean = { knownUnsafe(msym) || knownNever(msym) }
+
+ val relevantBBs = mutable.Set.empty[BasicBlock]
+
+ /*
+ * Rationale to prevent some methods from ever being inlined:
+ *
+ * (1) inlining getters and setters results in exposing a private field,
+ * which may itself prevent inlining of the caller (at best) or
+ * lead to situations like SI-5442 ("IllegalAccessError when mixing optimized and unoptimized bytecode")
+ *
+ * (2) only invocations having a receiver object are considered (ie no static-methods are ever inlined).
+ * This is taken care of by checking `isDynamic` (ie virtual method dispatch) and `Static(true)` (ie calls to private members)
+ */
+ private def isPreCandidate(cm: opcodes.CALL_METHOD): Boolean = {
+ val msym = cm.method
+ val style = cm.style
+
+ !blackballed(msym) &&
+ !msym.isConstructor &&
+ (!msym.isAccessor || inliner.isClosureClass(msym.owner)) &&
+ (style.isDynamic || (style.hasInstance && style.isStatic))
+ }
+
+ override def init(m: icodes.IMethod) {
+ super.init(m)
+ remainingCALLs.clear()
+ knownUnsafe.clear()
+ knownSafe.clear()
+ // initially populate the watchlist with all callsites standing a chance of being inlined
+ isOnWatchlist.clear()
+ relevantBBs.clear()
+ warnIfInlineFails.clear()
+ /* TODO Do we want to perform inlining in non-finally exception handlers?
+ * Seems counterproductive (the larger the method the less likely it will be JITed.
+ * It's not that putting on radar only `linearizer linearizeAt (m, m.startBlock)` makes for much shorter inlining times (a minor speedup nonetheless)
+ * but the effect on method size could be explored. */
+ putOnRadar(m.linearizedBlocks(linearizer))
+ populatePerimeter()
+ // usually but not always true (counterexample in SI-6015) `(relevantBBs.isEmpty || relevantBBs.contains(m.startBlock))`
+ }
+
+ def conclusives(b: BasicBlock): List[opcodes.CALL_METHOD] = {
+ knownBeforehand(b) filter { cm => inliner.isMonadicMethod(cm.method) || inliner.hasInline(cm.method) }
+ }
+
+ def knownBeforehand(b: BasicBlock): List[opcodes.CALL_METHOD] = {
+ b.toList collect { case c : opcodes.CALL_METHOD => c } filter { cm => isPreCandidate(cm) && isReceiverKnown(cm) }
+ }
+
+ private def isReceiverKnown(cm: opcodes.CALL_METHOD): Boolean = {
+ cm.method.isEffectivelyFinal && cm.method.owner.isEffectivelyFinal
+ }
+
+ private def putOnRadar(blocks: Traversable[BasicBlock]) {
+ for(bb <- blocks) {
+ val calls = bb.toList collect { case cm : opcodes.CALL_METHOD => cm }
+ for(c <- calls; if(inliner.hasInline(c.method))) {
+ warnIfInlineFails += c
+ }
+ val preCands = calls filter isPreCandidate
+ isOnWatchlist ++= preCands
+ }
+ relevantBBs ++= blocks
+ }
+
+ /* the argument is also included in the result */
+ private def transitivePreds(b: BasicBlock): Set[BasicBlock] = { transitivePreds(List(b)) }
+
+ /* those BBs in the argument are also included in the result */
+ private def transitivePreds(starters: Traversable[BasicBlock]): Set[BasicBlock] = {
+ val result = mutable.Set.empty[BasicBlock]
+ var toVisit: List[BasicBlock] = starters.toList.distinct
+ while(toVisit.nonEmpty) {
+ val h = toVisit.head
+ toVisit = toVisit.tail
+ result += h
+ for(p <- h.predecessors; if !result(p) && !toVisit.contains(p)) { toVisit = p :: toVisit }
+ }
+ result.toSet
+ }
+
+ /* those BBs in the argument are also included in the result */
+ private def transitiveSuccs(starters: Traversable[BasicBlock]): Set[BasicBlock] = {
+ val result = mutable.Set.empty[BasicBlock]
+ var toVisit: List[BasicBlock] = starters.toList.distinct
+ while(toVisit.nonEmpty) {
+ val h = toVisit.head
+ toVisit = toVisit.tail
+ result += h
+ for(p <- h.successors; if !result(p) && !toVisit.contains(p)) { toVisit = p :: toVisit }
+ }
+ result.toSet
+ }
+
+ /* A basic block B is "on the perimeter" of the current control-flow subgraph if none of its successors belongs to that subgraph.
+ * In that case, for the purposes of inlining, we're interested in the typestack right before the last inline candidate in B, not in those afterwards.
+ * In particular we can do without computing the outflow at B. */
+ private def populatePerimeter() {
+ isOnPerimeter.clear()
+ var done = true
+ do {
+ val (frontier, toPrune) = (relevantBBs filter hasNoRelevantSuccs) partition isWatching
+ isOnPerimeter ++= frontier
+ relevantBBs --= toPrune
+ done = toPrune.isEmpty
+ } while(!done)
+
+ lastInstruction.clear()
+ for (b <- isOnPerimeter; lastIns = b.toList.reverse find isOnWatchlist) {
+ lastInstruction += (b -> lastIns.get.asInstanceOf[opcodes.CALL_METHOD])
+ }
+
+ // assertion: "no relevant block can have a predecessor that is on perimeter"
+ assert((for (b <- relevantBBs; if transitivePreds(b.predecessors) exists isOnPerimeter) yield b).isEmpty)
+ }
+
+ private val isOnPerimeter = mutable.Set.empty[BasicBlock]
+ private val lastInstruction = mutable.Map.empty[BasicBlock, opcodes.CALL_METHOD]
+
+ def hasNoRelevantSuccs(x: BasicBlock): Boolean = { !(x.successors exists relevantBBs) }
+
+ def isWatching(x: BasicBlock): Boolean = (x.toList exists isOnWatchlist)
+
+
+
+
+ /**
+
+ This method is invoked after one or more inlinings have been performed in basic blocks whose in-flow is non-bottom (this makes a difference later).
+ What we know about those inlinings is given by:
+
+ - `staleOut`: These are the blocks where a callsite was inlined.
+ For each callsite, all instructions in that block before the callsite were left in the block, and the rest moved to an `afterBlock`.
+ The out-flow of these basic blocks is thus in general stale, that's why we'll add them to the TFA worklist.
+
+ - `inlined` : These blocks were spliced into the method's CFG as part of inlining. Being new blocks, they haven't been visited yet by the typeflow analysis.
+
+ - `staleIn` : These blocks are what `doInline()` calls `afterBlock`s, ie the new home for instructions that previously appearead
+ after a callsite in a `staleOut` block.
+
+ Based on the above information, we have to bring up-to-date the caches that `forwardAnalysis` and `blockTransfer` use to skip blocks and instructions.
+ Those caches are `relevantBBs` and `isOnPerimeter` (for blocks) and `isOnWatchlist` and `lastInstruction` (for CALL_METHODs).
+ Please notice that all `inlined` and `staleIn` blocks are reachable from `staleOut` blocks.
+
+ The update takes place in two steps:
+
+ (1) `staleOut foreach { so => putOnRadar(linearizer linearizeAt (m, so)) }`
+ This results in initial populations for `relevantBBs` and `isOnWatchlist`.
+ Because of the way `isPreCandidate` reuses previous decision-outcomes that are still valid,
+ this already prunes some candidates standing no chance of being inlined.
+
+ (2) `populatePerimeter()`
+ Based on the CFG-subgraph determined in (1) as reflected in `relevantBBs`,
+ this method detects some blocks whose typeflows aren't needed past a certain CALL_METHOD
+ (not needed because none of its successors is relevant for the purposes of inlining, see `hasNoRelevantSuccs`).
+ The blocks thus chosen are said to be "on the perimeter" of the CFG-subgraph.
+ For each of them, its `lastInstruction` (after which no more typeflows are needed) is found.
+
+ */
+ def reinit(m: icodes.IMethod, staleOut: List[BasicBlock], inlined: scala.collection.Set[BasicBlock], staleIn: scala.collection.Set[BasicBlock]) {
+ if (this.method == null || this.method.symbol != m.symbol) {
+ init(m)
+ return
+ } else if(staleOut.isEmpty && inlined.isEmpty && staleIn.isEmpty) {
+ // this promotes invoking reinit if in doubt, no performance degradation will ensue!
+ return;
+ }
+
+ worklist.clear // calling reinit(f: => Unit) would also clear visited, thus forgetting about blocks visited before reinit.
+
+ // asserts conveying an idea what CFG shapes arrive here:
+ // staleIn foreach (p => assert( !in.isDefinedAt(p), p))
+ // staleIn foreach (p => assert(!out.isDefinedAt(p), p))
+ // inlined foreach (p => assert( !in.isDefinedAt(p), p))
+ // inlined foreach (p => assert(!out.isDefinedAt(p), p))
+ // inlined foreach (p => assert(!p.successors.isEmpty || p.lastInstruction.isInstanceOf[icodes.opcodes.THROW], p))
+ // staleOut foreach (p => assert( in.isDefinedAt(p), p))
+
+ // remainingCALLs.clear()
+ isOnWatchlist.clear()
+ relevantBBs.clear()
+
+ // never rewrite in(m.startBlock)
+ staleOut foreach { b =>
+ enqueue(b)
+ out(b) = typeFlowLattice.bottom
+ }
+ // nothing else is added to the worklist, bb's reachable via succs will be tfa'ed
+ blankOut(inlined)
+ blankOut(staleIn)
+ // no need to add startBlocks from m.exh
+
+ staleOut foreach { so => putOnRadar(linearizer linearizeAt (m, so)) }
+ populatePerimeter()
+
+ } // end of method reinit
+
+ /* this is not a general purpose method to add to the worklist,
+ * because the assert is expected to hold only when called from MTFAGrowable.reinit() */
+ private def enqueue(b: BasicBlock) {
+ assert(in(b) ne typeFlowLattice.bottom)
+ if(!worklist.contains(b)) { worklist += b }
+ }
+
+ /* this is not a general purpose method to add to the worklist,
+ * because the assert is expected to hold only when called from MTFAGrowable.reinit() */
+ private def enqueue(bs: Traversable[BasicBlock]) {
+ bs foreach enqueue
+ }
+
+ private def blankOut(blocks: scala.collection.Set[BasicBlock]) {
+ blocks foreach { b =>
+ in(b) = typeFlowLattice.bottom
+ out(b) = typeFlowLattice.bottom
+ }
+ }
+
+ /*
+ This is basically the plain-old forward-analysis part of a dataflow algorithm,
+ adapted to skip non-relevant blocks (as determined by `reinit()` via `populatePerimeter()`).
+
+ The adaptations are:
+
+ - only relevant blocks dequeued from the worklist move on to have the transfer function applied
+
+ - `visited` now means the transfer function was applied to the block,
+ but please notice that this does not imply anymore its out-flow to be different from bottom,
+ because a block on the perimeter will have per-instruction typeflows computed only up to its `lastInstruction`.
+ In case you need to know whether a visted block `v` has been "fully visited", evaluate `out(v) ne typeflowLattice.bottom`
+
+ - given that the transfer function may remove callsite-candidates from the watchlist (thus, they are not candidates anymore)
+ there's an opportunity to detect whether a previously relevant block has been left without candidates.
+ That's what `shrinkedWatchlist` detects. Provided the block was on the perimeter, we know we can skip it from now now,
+ and we can also constrain the CFG-subgraph by finding a new perimeter (thus the invocation to `populatePerimeter()`).
+ */
+ override def forwardAnalysis(f: (P, lattice.Elem) => lattice.Elem): Unit = {
+ while (!worklist.isEmpty && relevantBBs.nonEmpty) {
+ if (stat) iterations += 1
+ val point = worklist.iterator.next; worklist -= point;
+ if(relevantBBs(point)) {
+ shrinkedWatchlist = false
+ val output = f(point, in(point))
+ visited += point;
+ if(isOnPerimeter(point)) {
+ if(shrinkedWatchlist && !isWatching(point)) {
+ relevantBBs -= point;
+ populatePerimeter()
+ }
+ } else {
+ val propagate = ((lattice.bottom == out(point)) || output != out(point))
+ if (propagate) {
+ out(point) = output
+ val succs = point.successors filter relevantBBs
+ succs foreach { p =>
+ assert((p.predecessors filter isOnPerimeter).isEmpty)
+ val existing = in(p)
+ // TODO move the following assertion to typeFlowLattice.lub2 for wider applicability (ie MethodTFA in addition to MTFAGrowable).
+ assert(existing == lattice.bottom ||
+ p.exceptionHandlerStart ||
+ (output.stack.length == existing.stack.length),
+ "Trying to merge non-bottom type-stacks with different stack heights. For a possible cause see SI-6157.")
+ val updated = lattice.lub(List(output, existing), p.exceptionHandlerStart)
+ if(updated != in(p)) {
+ in(p) = updated
+ enqueue(p)
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+
+ }
+
class Timer {
var millis = 0L
diff --git a/src/compiler/scala/tools/nsc/backend/jvm/BytecodeWriters.scala b/src/compiler/scala/tools/nsc/backend/jvm/BytecodeWriters.scala
index 2f0d86c..fb1f45f 100644
--- a/src/compiler/scala/tools/nsc/backend/jvm/BytecodeWriters.scala
+++ b/src/compiler/scala/tools/nsc/backend/jvm/BytecodeWriters.scala
@@ -1,17 +1,18 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
+ * Copyright 2005-2013 LAMP/EPFL
* @author Paul Phillips
*/
package scala.tools.nsc
package backend.jvm
-import ch.epfl.lamp.fjbg._
-import java.io.{ DataOutputStream, OutputStream }
-import scala.tools.nsc.io.{ AbstractFile, Path }
+import java.io.{ DataOutputStream, FileOutputStream, OutputStream, File => JFile }
+import scala.tools.nsc.io._
import scala.tools.nsc.util.ScalaClassLoader
-import scala.tools.util.Javap
-import java.util.jar.{ JarEntry, JarOutputStream }
+import scala.tools.util.JavapClass
+import java.util.jar.{ JarEntry, JarOutputStream, Attributes }
+import Attributes.Name
+import scala.language.postfixOps
/** For the last mile: turning generated bytecode in memory into
* something you can use. Has implementations for writing to class
@@ -22,57 +23,61 @@ trait BytecodeWriters {
import global._
private def outputDirectory(sym: Symbol): AbstractFile = (
- settings.outputDirs.outputDirFor {
- atPhase(currentRun.flattenPhase.prev)(sym.sourceFile)
- }
+ settings.outputDirs.outputDirFor(beforeFlatten(sym.sourceFile))
)
- private def getFile(base: AbstractFile, cls: JClass, suffix: String): AbstractFile = {
+ private def getFile(base: AbstractFile, /*cls.getName()*/ clsName: String, suffix: String): AbstractFile = {
var dir = base
- val pathParts = cls.getName().split("[./]").toList
+ val pathParts = clsName.split("[./]").toList
for (part <- pathParts.init) {
dir = dir.subdirectoryNamed(part)
}
dir.fileNamed(pathParts.last + suffix)
}
- private def getFile(sym: Symbol, cls: JClass, suffix: String): AbstractFile =
- getFile(outputDirectory(sym), cls, suffix)
+ private def getFile(sym: Symbol, clsName: String, suffix: String): AbstractFile =
+ getFile(outputDirectory(sym), clsName, suffix)
trait BytecodeWriter {
- def writeClass(label: String, jclass: JClass, sym: Symbol): Unit
+ def writeClass(label: String, jclassName: String, jclassBytes: Array[Byte], sym: Symbol): Unit
def close(): Unit = ()
}
- class DirectToJarfileWriter(val jarFile: AbstractFile) extends BytecodeWriter {
- private val out = new JarOutputStream(jarFile.bufferedOutput)
- def writeClass(label: String, jclass: JClass, sym: Symbol) {
- val path = jclass.getName + ".class"
- out putNextEntry new JarEntry(path)
- val dataStream = new DataOutputStream(out)
- try jclass writeTo dataStream
- finally dataStream.flush()
+ class DirectToJarfileWriter(jfile: JFile) extends BytecodeWriter {
+ val jarMainAttrs = (
+ if (settings.mainClass.isDefault) Nil
+ else List(Name.MAIN_CLASS -> settings.mainClass.value)
+ )
+ val writer = new Jar(jfile).jarWriter(jarMainAttrs: _*)
+
+ def writeClass(label: String, jclassName: String, jclassBytes: Array[Byte], sym: Symbol) {
+ val path = jclassName + ".class"
+ val out = writer.newOutputStream(path)
+
+ try out.write(jclassBytes, 0, jclassBytes.length)
+ finally out.flush()
+
informProgress("added " + label + path + " to jar")
}
- override def close() = out.close()
+ override def close() = writer.close()
}
trait JavapBytecodeWriter extends BytecodeWriter {
- val baseDir = Path(settings.Ygenjavap.value)
+ val baseDir = Directory(settings.Ygenjavap.value).createDirectory()
def emitJavap(bytes: Array[Byte], javapFile: io.File) {
val pw = javapFile.printWriter()
- val javap = new Javap(ScalaClassLoader.getSystemLoader(), pw) {
+ val javap = new JavapClass(ScalaClassLoader.appLoader, pw) {
override def findBytes(path: String): Array[Byte] = bytes
}
try javap(Seq("-verbose", "dummy")) foreach (_.show())
finally pw.close()
}
- abstract override def writeClass(label: String, jclass: JClass, sym: Symbol) {
- super.writeClass(label, jclass, sym)
+ abstract override def writeClass(label: String, jclassName: String, jclassBytes: Array[Byte], sym: Symbol) {
+ super.writeClass(label, jclassName, jclassBytes, sym)
- val bytes = getFile(sym, jclass, ".class").toByteArray
- val segments = jclass.getName().split("[./]")
- val javapFile = segments.foldLeft(baseDir)(_ / _) changeExtension "javap" toFile
+ val bytes = getFile(sym, jclassName, ".class").toByteArray
+ val segments = jclassName.split("[./]")
+ val javapFile = segments.foldLeft(baseDir: Path)(_ / _) changeExtension "javap" toFile;
javapFile.parent.createDirectory()
emitJavap(bytes, javapFile)
@@ -80,13 +85,29 @@ trait BytecodeWriters {
}
trait ClassBytecodeWriter extends BytecodeWriter {
- def writeClass(label: String, jclass: JClass, sym: Symbol) {
- val outfile = getFile(sym, jclass, ".class")
+ def writeClass(label: String, jclassName: String, jclassBytes: Array[Byte], sym: Symbol) {
+ val outfile = getFile(sym, jclassName, ".class")
val outstream = new DataOutputStream(outfile.bufferedOutput)
- try jclass writeTo outstream
+ try outstream.write(jclassBytes, 0, jclassBytes.length)
finally outstream.close()
informProgress("wrote '" + label + "' to " + outfile)
}
}
+
+ trait DumpBytecodeWriter extends BytecodeWriter {
+ val baseDir = Directory(settings.Ydumpclasses.value).createDirectory()
+
+ abstract override def writeClass(label: String, jclassName: String, jclassBytes: Array[Byte], sym: Symbol) {
+ super.writeClass(label, jclassName, jclassBytes, sym)
+
+ val pathName = jclassName
+ var dumpFile = pathName.split("[./]").foldLeft(baseDir: Path) (_ / _) changeExtension "class" toFile;
+ dumpFile.parent.createDirectory()
+ val outstream = new DataOutputStream(new FileOutputStream(dumpFile.path))
+
+ try outstream.write(jclassBytes, 0, jclassBytes.length)
+ finally outstream.close()
+ }
+ }
}
diff --git a/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala b/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala
new file mode 100644
index 0000000..3712745
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala
@@ -0,0 +1,3355 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2013 LAMP/EPFL
+ * @author Martin Odersky
+ */
+
+package scala.tools.nsc
+package backend.jvm
+
+import java.nio.ByteBuffer
+import scala.collection.{ mutable, immutable }
+import scala.reflect.internal.pickling.{ PickleFormat, PickleBuffer }
+import scala.tools.nsc.symtab._
+import scala.tools.nsc.io.AbstractFile
+
+import scala.tools.asm
+import asm.Label
+
+/**
+ * @author Iulian Dragos (version 1.0, FJBG-based implementation)
+ * @author Miguel Garcia (version 2.0, ASM-based implementation)
+ *
+ * Documentation at http://lamp.epfl.ch/~magarcia/ScalaCompilerCornerReloaded/2012Q2/GenASM.pdf
+ */
+abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
+ import global._
+ import icodes._
+ import icodes.opcodes._
+ import definitions._
+
+ val phaseName = "jvm"
+
+ /** Create a new phase */
+ override def newPhase(p: Phase): Phase = new AsmPhase(p)
+
+ /** JVM code generation phase
+ */
+ class AsmPhase(prev: Phase) extends ICodePhase(prev) {
+ def name = phaseName
+ override def erasedTypes = true
+ def apply(cls: IClass) = sys.error("no implementation")
+
+ val BeanInfoAttr = rootMirror.getRequiredClass("scala.beans.BeanInfo")
+
+ private def initBytecodeWriter(entryPoints: List[IClass]): BytecodeWriter = {
+ settings.outputDirs.getSingleOutput match {
+ case Some(f) if f hasExtension "jar" =>
+ // If no main class was specified, see if there's only one
+ // entry point among the classes going into the jar.
+ if (settings.mainClass.isDefault) {
+ entryPoints map (_.symbol fullName '.') match {
+ case Nil =>
+ log("No Main-Class designated or discovered.")
+ case name :: Nil =>
+ log("Unique entry point: setting Main-Class to " + name)
+ settings.mainClass.value = name
+ case names =>
+ log("No Main-Class due to multiple entry points:\n " + names.mkString("\n "))
+ }
+ }
+ else log("Main-Class was specified: " + settings.mainClass.value)
+
+ new DirectToJarfileWriter(f.file)
+
+ case _ =>
+ if (settings.Ygenjavap.isDefault) {
+ if(settings.Ydumpclasses.isDefault)
+ new ClassBytecodeWriter { }
+ else
+ new ClassBytecodeWriter with DumpBytecodeWriter { }
+ }
+ else new ClassBytecodeWriter with JavapBytecodeWriter { }
+
+ // TODO A ScalapBytecodeWriter could take asm.util.Textifier as starting point.
+ // Three areas where javap ouput is less than ideal (e.g. when comparing versions of the same classfile) are:
+ // (a) unreadable pickle;
+ // (b) two constant pools, while having identical contents, are displayed differently due to physical layout.
+ // (c) stack maps (classfile version 50 and up) are displayed in encoded form by javap, their expansion makes more sense instead.
+ }
+ }
+
+ override def run() {
+
+ if (settings.debug.value)
+ inform("[running phase " + name + " on icode]")
+
+ if (settings.Xdce.value)
+ for ((sym, cls) <- icodes.classes if inliner.isClosureClass(sym) && !deadCode.liveClosures(sym)) {
+ log(s"Optimizer eliminated ${sym.fullNameString}")
+ deadCode.elidedClosures += sym
+ icodes.classes -= sym
+ }
+
+ // For predictably ordered error messages.
+ var sortedClasses = classes.values.toList sortBy (_.symbol.fullName)
+
+ // Warn when classes will overwrite one another on case-insensitive systems.
+ for ((_, v1 :: v2 :: _) <- sortedClasses groupBy (_.symbol.javaClassName.toString.toLowerCase)) {
+ v1.cunit.warning(v1.symbol.pos,
+ s"Class ${v1.symbol.javaClassName} differs only in case from ${v2.symbol.javaClassName}. " +
+ "Such classes will overwrite one another on case-insensitive filesystems.")
+ }
+
+ debuglog("Created new bytecode generator for " + classes.size + " classes.")
+ val bytecodeWriter = initBytecodeWriter(sortedClasses filter isJavaEntryPoint)
+ val plainCodeGen = new JPlainBuilder(bytecodeWriter)
+ val mirrorCodeGen = new JMirrorBuilder(bytecodeWriter)
+ val beanInfoCodeGen = new JBeanInfoBuilder(bytecodeWriter)
+
+ while(!sortedClasses.isEmpty) {
+ val c = sortedClasses.head
+
+ if (isStaticModule(c.symbol) && isTopLevelModule(c.symbol)) {
+ if (c.symbol.companionClass == NoSymbol) {
+ mirrorCodeGen.genMirrorClass(c.symbol, c.cunit)
+ } else {
+ log("No mirror class for module with linked class: " + c.symbol.fullName)
+ }
+ }
+
+ plainCodeGen.genClass(c)
+
+ if (c.symbol hasAnnotation BeanInfoAttr) {
+ beanInfoCodeGen.genBeanInfoClass(c)
+ }
+
+ sortedClasses = sortedClasses.tail
+ classes -= c.symbol // GC opportunity
+ }
+
+ bytecodeWriter.close()
+ classes.clear()
+ reverseJavaName.clear()
+
+ /* don't javaNameCache.clear() because that causes the following tests to fail:
+ * test/files/run/macro-repl-dontexpand.scala
+ * test/files/jvm/interpreter.scala
+ * TODO but why? what use could javaNameCache possibly see once GenJVM is over?
+ */
+
+ /* TODO After emitting all class files (e.g., in a separate compiler phase) ASM can perform bytecode verification:
+ *
+ * (1) call the asm.util.CheckAdapter.verify() overload:
+ * public static void verify(ClassReader cr, ClassLoader loader, boolean dump, PrintWriter pw)
+ *
+ * (2) passing a custom ClassLoader to verify inter-dependent classes.
+ *
+ * Alternatively, an offline-bytecode verifier could be used (e.g. Maxine brings one as separate tool).
+ */
+
+ } // end of AsmPhase.run()
+
+ } // end of class AsmPhase
+
+ var pickledBytes = 0 // statistics
+
+ // Don't put this in per run caches. Contains entries for classes as well as members.
+ val javaNameCache = new mutable.WeakHashMap[Symbol, Name]() ++= List(
+ NothingClass -> binarynme.RuntimeNothing,
+ RuntimeNothingClass -> binarynme.RuntimeNothing,
+ NullClass -> binarynme.RuntimeNull,
+ RuntimeNullClass -> binarynme.RuntimeNull
+ )
+
+ // unlike javaNameCache, reverseJavaName contains entries only for class symbols and their internal names.
+ val reverseJavaName = mutable.Map.empty[String, Symbol] ++= List(
+ binarynme.RuntimeNothing.toString() -> RuntimeNothingClass, // RuntimeNothingClass is the bytecode-level return type of Scala methods with Nothing return-type.
+ binarynme.RuntimeNull.toString() -> RuntimeNullClass
+ )
+
+ private def mkFlags(args: Int*) = args.foldLeft(0)(_ | _)
+ private def hasPublicBitSet(flags: Int) = (flags & asm.Opcodes.ACC_PUBLIC) != 0
+ private def isRemote(s: Symbol) = s hasAnnotation RemoteAttr
+
+ /**
+ * Return the Java modifiers for the given symbol.
+ * Java modifiers for classes:
+ * - public, abstract, final, strictfp (not used)
+ * for interfaces:
+ * - the same as for classes, without 'final'
+ * for fields:
+ * - public, private (*)
+ * - static, final
+ * for methods:
+ * - the same as for fields, plus:
+ * - abstract, synchronized (not used), strictfp (not used), native (not used)
+ *
+ * (*) protected cannot be used, since inner classes 'see' protected members,
+ * and they would fail verification after lifted.
+ */
+ def javaFlags(sym: Symbol): Int = {
+ // constructors of module classes should be private
+ // PP: why are they only being marked private at this stage and not earlier?
+ val privateFlag =
+ sym.isPrivate || (sym.isPrimaryConstructor && isTopLevelModule(sym.owner))
+
+ // Final: the only fields which can receive ACC_FINAL are eager vals.
+ // Neither vars nor lazy vals can, because:
+ //
+ // Source: http://docs.oracle.com/javase/specs/jls/se7/html/jls-17.html#jls-17.5.3
+ // "Another problem is that the specification allows aggressive
+ // optimization of final fields. Within a thread, it is permissible to
+ // reorder reads of a final field with those modifications of a final
+ // field that do not take place in the constructor."
+ //
+ // A var or lazy val which is marked final still has meaning to the
+ // scala compiler. The word final is heavily overloaded unfortunately;
+ // for us it means "not overridable". At present you can't override
+ // vars regardless; this may change.
+ //
+ // The logic does not check .isFinal (which checks flags for the FINAL flag,
+ // and includes symbols marked lateFINAL) instead inspecting rawflags so
+ // we can exclude lateFINAL. Such symbols are eligible for inlining, but to
+ // avoid breaking proxy software which depends on subclassing, we do not
+ // emit ACC_FINAL.
+ // Nested objects won't receive ACC_FINAL in order to allow for their overriding.
+
+ val finalFlag = (
+ (((sym.rawflags & Flags.FINAL) != 0) || isTopLevelModule(sym))
+ && !sym.enclClass.isInterface
+ && !sym.isClassConstructor
+ && !sym.isMutable // lazy vals and vars both
+ )
+
+ // Primitives are "abstract final" to prohibit instantiation
+ // without having to provide any implementations, but that is an
+ // illegal combination of modifiers at the bytecode level so
+ // suppress final if abstract if present.
+ import asm.Opcodes._
+ mkFlags(
+ if (privateFlag) ACC_PRIVATE else ACC_PUBLIC,
+ if (sym.isDeferred || sym.hasAbstractFlag) ACC_ABSTRACT else 0,
+ if (sym.isInterface) ACC_INTERFACE else 0,
+ if (finalFlag && !sym.hasAbstractFlag) ACC_FINAL else 0,
+ if (sym.isStaticMember) ACC_STATIC else 0,
+ if (sym.isBridge) ACC_BRIDGE | ACC_SYNTHETIC else 0,
+ if (sym.isArtifact) ACC_SYNTHETIC else 0,
+ if (sym.isClass && !sym.isInterface) ACC_SUPER else 0,
+ if (sym.isVarargsMethod) ACC_VARARGS else 0,
+ if (sym.hasFlag(Flags.SYNCHRONIZED)) ACC_SYNCHRONIZED else 0
+ )
+ }
+
+ def javaFieldFlags(sym: Symbol) = {
+ javaFlags(sym) | mkFlags(
+ if (sym hasAnnotation TransientAttr) asm.Opcodes.ACC_TRANSIENT else 0,
+ if (sym hasAnnotation VolatileAttr) asm.Opcodes.ACC_VOLATILE else 0,
+ if (sym.isMutable) 0 else asm.Opcodes.ACC_FINAL
+ )
+ }
+
+ def isTopLevelModule(sym: Symbol): Boolean =
+ afterPickler { sym.isModuleClass && !sym.isImplClass && !sym.isNestedClass }
+
+ def isStaticModule(sym: Symbol): Boolean = {
+ sym.isModuleClass && !sym.isImplClass && !sym.isLifted
+ }
+
+ // -----------------------------------------------------------------------------------------
+ // finding the least upper bound in agreement with the bytecode verifier (given two internal names handed by ASM)
+ // Background:
+ // http://gallium.inria.fr/~xleroy/publi/bytecode-verification-JAR.pdf
+ // http://comments.gmane.org/gmane.comp.java.vm.languages/2293
+ // https://issues.scala-lang.org/browse/SI-3872
+ // -----------------------------------------------------------------------------------------
+
+ /**
+ * Given an internal name (eg "java/lang/Integer") returns the class symbol for it.
+ *
+ * Better not to need this method (an example where control flow arrives here is welcome).
+ * This method is invoked only upon both (1) and (2) below happening:
+ * (1) providing an asm.ClassWriter with an internal name by other means than javaName()
+ * (2) forgetting to track the corresponding class-symbol in reverseJavaName.
+ *
+ * (The first item is already unlikely because we rely on javaName()
+ * to do the bookkeeping for entries that should go in innerClassBuffer.)
+ *
+ * (We could do completely without this method at the expense of computing stack-map-frames ourselves and
+ * invoking visitFrame(), but that would require another pass over all instructions.)
+ *
+ * Right now I can't think of any invocation of visitSomething() on MethodVisitor
+ * where we hand an internal name not backed by a reverseJavaName.
+ * However, I'm leaving this note just in case any such oversight is discovered.
+ */
+ def inameToSymbol(iname: String): Symbol = {
+ val name = global.newTypeName(iname)
+ val res0 =
+ if (nme.isModuleName(name)) rootMirror.getModule(nme.stripModuleSuffix(name))
+ else rootMirror.getClassByName(name.replace('/', '.')) // TODO fails for inner classes (but this hasn't been tested).
+ assert(res0 != NoSymbol)
+ val res = jsymbol(res0)
+ res
+ }
+
+ def jsymbol(sym: Symbol): Symbol = {
+ if(sym.isJavaDefined && sym.isModuleClass) sym.linkedClassOfClass
+ else if(sym.isModule) sym.moduleClass
+ else sym // we track only module-classes and plain-classes
+ }
+
+ private def superClasses(s: Symbol): List[Symbol] = {
+ assert(!s.isInterface)
+ s.superClass match {
+ case NoSymbol => List(s)
+ case sc => s :: superClasses(sc)
+ }
+ }
+
+ private def firstCommonSuffix(as: List[Symbol], bs: List[Symbol]): Symbol = {
+ assert(!(as contains NoSymbol))
+ assert(!(bs contains NoSymbol))
+ var chainA = as
+ var chainB = bs
+ var fcs: Symbol = NoSymbol
+ do {
+ if (chainB contains chainA.head) fcs = chainA.head
+ else if (chainA contains chainB.head) fcs = chainB.head
+ else {
+ chainA = chainA.tail
+ chainB = chainB.tail
+ }
+ } while(fcs == NoSymbol)
+ fcs
+ }
+
+ private def jvmWiseLUB(a: Symbol, b: Symbol): Symbol = {
+ assert(a.isClass)
+ assert(b.isClass)
+
+ val res = Pair(a.isInterface, b.isInterface) match {
+ case (true, true) =>
+ global.lub(List(a.tpe, b.tpe)).typeSymbol // TODO assert == firstCommonSuffix of resp. parents
+ case (true, false) =>
+ if(b isSubClass a) a else ObjectClass
+ case (false, true) =>
+ if(a isSubClass b) b else ObjectClass
+ case _ =>
+ firstCommonSuffix(superClasses(a), superClasses(b))
+ }
+ assert(res != NoSymbol)
+ res
+ }
+
+ /* The internal name of the least common ancestor of the types given by inameA and inameB.
+ It's what ASM needs to know in order to compute stack map frames, http://asm.ow2.org/doc/developer-guide.html#controlflow */
+ def getCommonSuperClass(inameA: String, inameB: String): String = {
+ val a = reverseJavaName.getOrElseUpdate(inameA, inameToSymbol(inameA))
+ val b = reverseJavaName.getOrElseUpdate(inameB, inameToSymbol(inameB))
+
+ // global.lub(List(a.tpe, b.tpe)).typeSymbol.javaBinaryName.toString()
+ // icodes.lub(icodes.toTypeKind(a.tpe), icodes.toTypeKind(b.tpe)).toType
+ val lcaSym = jvmWiseLUB(a, b)
+ val lcaName = lcaSym.javaBinaryName.toString // don't call javaName because that side-effects innerClassBuffer.
+ val oldsym = reverseJavaName.put(lcaName, lcaSym)
+ assert(oldsym.isEmpty || (oldsym.get == lcaSym), "somehow we're not managing to compute common-super-class for ASM consumption")
+ assert(lcaName != "scala/Any")
+
+ lcaName // TODO ASM caches the answer during the lifetime of a ClassWriter. We outlive that. Do some caching.
+ }
+
+ class CClassWriter(flags: Int) extends asm.ClassWriter(flags) {
+ override def getCommonSuperClass(iname1: String, iname2: String): String = {
+ GenASM.this.getCommonSuperClass(iname1, iname2)
+ }
+ }
+
+ // -----------------------------------------------------------------------------------------
+ // constants
+ // -----------------------------------------------------------------------------------------
+
+ private val classfileVersion: Int = settings.target.value match {
+ case "jvm-1.5" => asm.Opcodes.V1_5
+ case "jvm-1.5-asm" => asm.Opcodes.V1_5
+ case "jvm-1.6" => asm.Opcodes.V1_6
+ case "jvm-1.7" => asm.Opcodes.V1_7
+ }
+
+ private val majorVersion: Int = (classfileVersion & 0xFF)
+ private val emitStackMapFrame = (majorVersion >= 50)
+
+ private val extraProc: Int = mkFlags(
+ asm.ClassWriter.COMPUTE_MAXS,
+ if(emitStackMapFrame) asm.ClassWriter.COMPUTE_FRAMES else 0
+ )
+
+ val JAVA_LANG_OBJECT = asm.Type.getObjectType("java/lang/Object")
+ val JAVA_LANG_STRING = asm.Type.getObjectType("java/lang/String")
+
+ /**
+ * We call many Java varargs methods from ASM library that expect Arra[asm.Type] as argument so
+ * we override default (compiler-generated) ClassTag so we can provide specialized newArray implementation.
+ *
+ * Examples of methods that should pick our definition are: JBuilder.javaType and JPlainBuilder.genMethod.
+ */
+ private implicit val asmTypeTag: scala.reflect.ClassTag[asm.Type] = new scala.reflect.ClassTag[asm.Type] {
+ def runtimeClass: java.lang.Class[asm.Type] = classOf[asm.Type]
+ final override def newArray(len: Int): Array[asm.Type] = new Array[asm.Type](len)
+ }
+
+ /** basic functionality for class file building */
+ abstract class JBuilder(bytecodeWriter: BytecodeWriter) {
+
+ val EMPTY_JTYPE_ARRAY = Array.empty[asm.Type]
+ val EMPTY_STRING_ARRAY = Array.empty[String]
+
+ val mdesc_arglessvoid = "()V"
+
+ val CLASS_CONSTRUCTOR_NAME = "<clinit>"
+ val INSTANCE_CONSTRUCTOR_NAME = "<init>"
+
+ val INNER_CLASSES_FLAGS =
+ (asm.Opcodes.ACC_PUBLIC | asm.Opcodes.ACC_PRIVATE | asm.Opcodes.ACC_PROTECTED |
+ asm.Opcodes.ACC_STATIC | asm.Opcodes.ACC_INTERFACE | asm.Opcodes.ACC_ABSTRACT)
+
+ // -----------------------------------------------------------------------------------------
+ // factory methods
+ // -----------------------------------------------------------------------------------------
+
+ /**
+ * Returns a new ClassWriter for the class given by arguments.
+ *
+ * @param access the class's access flags. This parameter also indicates if the class is deprecated.
+ *
+ * @param name the internal name of the class.
+ *
+ * @param signature the signature of this class. May be <tt>null</tt> if
+ * the class is not a generic one, and does not extend or implement
+ * generic classes or interfaces.
+ *
+ * @param superName the internal of name of the super class. For interfaces,
+ * the super class is {@link Object}. May be <tt>null</tt>, but
+ * only for the {@link Object} class.
+ *
+ * @param interfaces the internal names of the class's interfaces (see
+ * {@link Type#getInternalName() getInternalName}). May be
+ * <tt>null</tt>.
+ */
+ def createJClass(access: Int, name: String, signature: String, superName: String, interfaces: Array[String]): asm.ClassWriter = {
+ val cw = new CClassWriter(extraProc)
+ cw.visit(classfileVersion,
+ access, name, signature,
+ superName, interfaces)
+
+ cw
+ }
+
+ def createJAttribute(name: String, b: Array[Byte], offset: Int, len: Int): asm.Attribute = {
+ val dest = new Array[Byte](len);
+ System.arraycopy(b, offset, dest, 0, len);
+ new asm.CustomAttr(name, dest)
+ }
+
+ // -----------------------------------------------------------------------------------------
+ // utilities useful when emitting plain, mirror, and beaninfo classes.
+ // -----------------------------------------------------------------------------------------
+
+ def writeIfNotTooBig(label: String, jclassName: String, jclass: asm.ClassWriter, sym: Symbol) {
+ try {
+ val arr = jclass.toByteArray()
+ bytecodeWriter.writeClass(label, jclassName, arr, sym)
+ } catch {
+ case e: java.lang.RuntimeException if e != null && (e.getMessage contains "too large!") =>
+ reporter.error(sym.pos,
+ s"Could not write class $jclassName because it exceeds JVM code size limits. ${e.getMessage}")
+ }
+ }
+
+ /** Specialized array conversion to prevent calling
+ * java.lang.reflect.Array.newInstance via TraversableOnce.toArray
+ */
+ def mkArray(xs: Traversable[asm.Type]): Array[asm.Type] = { val a = new Array[asm.Type](xs.size); xs.copyToArray(a); a }
+ def mkArray(xs: Traversable[String]): Array[String] = { val a = new Array[String](xs.size); xs.copyToArray(a); a }
+
+ // -----------------------------------------------------------------------------------------
+ // Getters for (JVMS 4.2) internal and unqualified names (represented as JType instances).
+ // These getters track behind the scenes the inner classes referred to in the class being emitted,
+ // so as to build the InnerClasses attribute (JVMS 4.7.6) via `addInnerClasses()`
+ // (which also adds as member classes those inner classes that have been declared,
+ // thus also covering the case of inner classes declared but otherwise not referred).
+ // -----------------------------------------------------------------------------------------
+
+ val innerClassBuffer = mutable.LinkedHashSet[Symbol]()
+
+ /** For given symbol return a symbol corresponding to a class that should be declared as inner class.
+ *
+ * For example:
+ * class A {
+ * class B
+ * object C
+ * }
+ *
+ * then method will return:
+ * NoSymbol for A,
+ * the same symbol for A.B (corresponding to A$B class), and
+ * A$C$ symbol for A.C.
+ */
+ def innerClassSymbolFor(s: Symbol): Symbol =
+ if (s.isClass) s else if (s.isModule) s.moduleClass else NoSymbol
+
+ /** Return the name of this symbol that can be used on the Java platform. It removes spaces from names.
+ *
+ * Special handling:
+ * scala.Nothing erases to scala.runtime.Nothing$
+ * scala.Null erases to scala.runtime.Null$
+ *
+ * This is needed because they are not real classes, and they mean
+ * 'abrupt termination upon evaluation of that expression' or null respectively.
+ * This handling is done already in GenICode, but here we need to remove
+ * references from method signatures to these types, because such classes
+ * cannot exist in the classpath: the type checker will be very confused.
+ */
+ def javaName(sym: Symbol): String = {
+
+ /**
+ * Checks if given symbol corresponds to inner class/object and add it to innerClassBuffer
+ *
+ * Note: This method is called recursively thus making sure that we add complete chain
+ * of inner class all until root class.
+ */
+ def collectInnerClass(s: Symbol): Unit = {
+ // TODO: some beforeFlatten { ... } which accounts for
+ // being nested in parameterized classes (if we're going to selectively flatten.)
+ val x = innerClassSymbolFor(s)
+ if(x ne NoSymbol) {
+ assert(x.isClass, "not an inner-class symbol")
+ val isInner = !x.rawowner.isPackageClass
+ if (isInner) {
+ innerClassBuffer += x
+ collectInnerClass(x.rawowner)
+ }
+ }
+ }
+
+ collectInnerClass(sym)
+
+ var hasInternalName = (sym.isClass || (sym.isModule && !sym.isMethod))
+ val cachedJN = javaNameCache.getOrElseUpdate(sym, {
+ if (hasInternalName) { sym.javaBinaryName }
+ else { sym.javaSimpleName }
+ })
+
+ if(emitStackMapFrame && hasInternalName) {
+ val internalName = cachedJN.toString()
+ val trackedSym = jsymbol(sym)
+ reverseJavaName.get(internalName) match {
+ case None =>
+ reverseJavaName.put(internalName, trackedSym)
+ case Some(oldsym) =>
+ assert((oldsym == trackedSym) || (oldsym == RuntimeNothingClass) || (oldsym == RuntimeNullClass) ||
+ (oldsym.isModuleClass && (oldsym.sourceModule == trackedSym.sourceModule)), // In contrast, neither NothingClass nor NullClass show up bytecode-level.
+ "how can getCommonSuperclass() do its job if different class symbols get the same bytecode-level internal name: " + internalName)
+ }
+ }
+
+ cachedJN.toString
+ }
+
+ def descriptor(t: Type): String = { javaType(t).getDescriptor }
+ def descriptor(k: TypeKind): String = { javaType(k).getDescriptor }
+ def descriptor(s: Symbol): String = { javaType(s).getDescriptor }
+
+ def javaType(tk: TypeKind): asm.Type = {
+ if(tk.isValueType) {
+ if(tk.isIntSizedType) {
+ (tk: @unchecked) match {
+ case BOOL => asm.Type.BOOLEAN_TYPE
+ case BYTE => asm.Type.BYTE_TYPE
+ case SHORT => asm.Type.SHORT_TYPE
+ case CHAR => asm.Type.CHAR_TYPE
+ case INT => asm.Type.INT_TYPE
+ }
+ } else {
+ (tk: @unchecked) match {
+ case UNIT => asm.Type.VOID_TYPE
+ case LONG => asm.Type.LONG_TYPE
+ case FLOAT => asm.Type.FLOAT_TYPE
+ case DOUBLE => asm.Type.DOUBLE_TYPE
+ }
+ }
+ } else {
+ assert(!tk.isBoxedType, tk) // documentation (BOXED matches none below anyway)
+ (tk: @unchecked) match {
+ case REFERENCE(cls) => asm.Type.getObjectType(javaName(cls))
+ case ARRAY(elem) => javaArrayType(javaType(elem))
+ }
+ }
+ }
+
+ def javaType(t: Type): asm.Type = javaType(toTypeKind(t))
+
+ def javaType(s: Symbol): asm.Type = {
+ if (s.isMethod) {
+ val resT: asm.Type = if (s.isClassConstructor) asm.Type.VOID_TYPE else javaType(s.tpe.resultType);
+ asm.Type.getMethodType( resT, (s.tpe.paramTypes map javaType): _*)
+ } else { javaType(s.tpe) }
+ }
+
+ def javaArrayType(elem: asm.Type): asm.Type = { asm.Type.getObjectType("[" + elem.getDescriptor) }
+
+ def isDeprecated(sym: Symbol): Boolean = { sym.annotations exists (_ matches definitions.DeprecatedAttr) }
+
+ def addInnerClasses(csym: Symbol, jclass: asm.ClassVisitor) {
+ /** The outer name for this inner class. Note that it returns null
+ * when the inner class should not get an index in the constant pool.
+ * That means non-member classes (anonymous). See Section 4.7.5 in the JVMS.
+ */
+ def outerName(innerSym: Symbol): String = {
+ if (innerSym.originalEnclosingMethod != NoSymbol)
+ null
+ else {
+ val outerName = javaName(innerSym.rawowner)
+ if (isTopLevelModule(innerSym.rawowner)) "" + nme.stripModuleSuffix(newTermName(outerName))
+ else outerName
+ }
+ }
+
+ def innerName(innerSym: Symbol): String =
+ if (innerSym.isAnonymousClass || innerSym.isAnonymousFunction)
+ null
+ else
+ innerSym.rawname + innerSym.moduleSuffix
+
+ // add inner classes which might not have been referenced yet
+ afterErasure {
+ for (sym <- List(csym, csym.linkedClassOfClass); m <- sym.info.decls.map(innerClassSymbolFor) if m.isClass)
+ innerClassBuffer += m
+ }
+
+ val allInners: List[Symbol] = innerClassBuffer.toList filterNot deadCode.elidedClosures
+
+ if (allInners.nonEmpty) {
+ debuglog(csym.fullName('.') + " contains " + allInners.size + " inner classes.")
+
+ // entries ready to be serialized into the classfile, used to detect duplicates.
+ val entries = mutable.Map.empty[String, String]
+
+ // sort them so inner classes succeed their enclosing class to satisfy the Eclipse Java compiler
+ for (innerSym <- allInners sortBy (_.name.length)) { // TODO why not sortBy (_.name.toString()) ??
+ val flags = mkFlags(
+ if (innerSym.rawowner.hasModuleFlag) asm.Opcodes.ACC_STATIC else 0,
+ javaFlags(innerSym),
+ if(isDeprecated(innerSym)) asm.Opcodes.ACC_DEPRECATED else 0 // ASM pseudo-access flag
+ ) & (INNER_CLASSES_FLAGS | asm.Opcodes.ACC_DEPRECATED)
+ val jname = javaName(innerSym) // never null
+ val oname = outerName(innerSym) // null when method-enclosed
+ val iname = innerName(innerSym) // null for anonymous inner class
+
+ // Mimicking javap inner class output
+ debuglog(
+ if (oname == null || iname == null) "//class " + jname
+ else "//%s=class %s of class %s".format(iname, jname, oname)
+ )
+
+ assert(jname != null, "javaName is broken.") // documentation
+ val doAdd = entries.get(jname) match {
+ // TODO is it ok for prevOName to be null? (Someone should really document the invariants of the InnerClasses bytecode attribute)
+ case Some(prevOName) =>
+ // this occurs e.g. when innerClassBuffer contains both class Thread$State, object Thread$State,
+ // i.e. for them it must be the case that oname == java/lang/Thread
+ assert(prevOName == oname, "duplicate")
+ false
+ case None => true
+ }
+
+ if(doAdd) {
+ entries += (jname -> oname)
+ jclass.visitInnerClass(jname, oname, iname, flags)
+ }
+
+ /*
+ * TODO assert (JVMS 4.7.6 The InnerClasses attribute)
+ * If a class file has a version number that is greater than or equal to 51.0, and
+ * has an InnerClasses attribute in its attributes table, then for all entries in the
+ * classes array of the InnerClasses attribute, the value of the
+ * outer_class_info_index item must be zero if the value of the
+ * inner_name_index item is zero.
+ */
+
+ }
+ }
+ }
+
+ } // end of class JBuilder
+
+
+ /** functionality for building plain and mirror classes */
+ abstract class JCommonBuilder(bytecodeWriter: BytecodeWriter) extends JBuilder(bytecodeWriter) {
+
+ def debugLevel = settings.debuginfo.indexOfChoice
+
+ val emitSource = debugLevel >= 1
+ val emitLines = debugLevel >= 2
+ val emitVars = debugLevel >= 3
+
+ // -----------------------------------------------------------------------------------------
+ // more constants
+ // -----------------------------------------------------------------------------------------
+
+ val PublicStatic = asm.Opcodes.ACC_PUBLIC | asm.Opcodes.ACC_STATIC
+ val PublicStaticFinal = asm.Opcodes.ACC_PUBLIC | asm.Opcodes.ACC_STATIC | asm.Opcodes.ACC_FINAL
+
+ val strMODULE_INSTANCE_FIELD = nme.MODULE_INSTANCE_FIELD.toString
+
+ // -----------------------------------------------------------------------------------------
+ // Custom attribute (JVMS 4.7.1) "ScalaSig" used as marker only
+ // i.e., the pickle is contained in a custom annotation, see:
+ // (1) `addAnnotations()`,
+ // (2) SID # 10 (draft) - Storage of pickled Scala signatures in class files, http://www.scala-lang.org/sid/10
+ // (3) SID # 5 - Internals of Scala Annotations, http://www.scala-lang.org/sid/5
+ // That annotation in turn is not related to the "java-generic-signature" (JVMS 4.7.9)
+ // other than both ending up encoded as attributes (JVMS 4.7)
+ // (with the caveat that the "ScalaSig" attribute is associated to some classes,
+ // while the "Signature" attribute can be associated to classes, methods, and fields.)
+ // -----------------------------------------------------------------------------------------
+
+ val versionPickle = {
+ val vp = new PickleBuffer(new Array[Byte](16), -1, 0)
+ assert(vp.writeIndex == 0, vp)
+ vp writeNat PickleFormat.MajorVersion
+ vp writeNat PickleFormat.MinorVersion
+ vp writeNat 0
+ vp
+ }
+
+ def pickleMarkerLocal = {
+ createJAttribute(tpnme.ScalaSignatureATTR.toString, versionPickle.bytes, 0, versionPickle.writeIndex)
+ }
+
+ def pickleMarkerForeign = {
+ createJAttribute(tpnme.ScalaATTR.toString, new Array[Byte](0), 0, 0)
+ }
+
+ /** Returns a ScalaSignature annotation if it must be added to this class, none otherwise.
+ * This annotation must be added to the class' annotations list when generating them.
+ *
+ * Depending on whether the returned option is defined, it adds to `jclass` one of:
+ * (a) the ScalaSig marker attribute
+ * (indicating that a scala-signature-annotation aka pickle is present in this class); or
+ * (b) the Scala marker attribute
+ * (indicating that a scala-signature-annotation aka pickle is to be found in another file).
+ *
+ *
+ * @param jclassName The class file that is being readied.
+ * @param sym The symbol for which the signature has been entered in the symData map.
+ * This is different than the symbol
+ * that is being generated in the case of a mirror class.
+ * @return An option that is:
+ * - defined and contains an AnnotationInfo of the ScalaSignature type,
+ * instantiated with the pickle signature for sym.
+ * - empty if the jclass/sym pair must not contain a pickle.
+ *
+ */
+ def getAnnotPickle(jclassName: String, sym: Symbol): Option[AnnotationInfo] = {
+ currentRun.symData get sym match {
+ case Some(pickle) if !nme.isModuleName(newTermName(jclassName)) =>
+ val scalaAnnot = {
+ val sigBytes = ScalaSigBytes(pickle.bytes.take(pickle.writeIndex))
+ AnnotationInfo(sigBytes.sigAnnot, Nil, List((nme.bytes, sigBytes)))
+ }
+ pickledBytes += pickle.writeIndex
+ currentRun.symData -= sym
+ currentRun.symData -= sym.companionSymbol
+ Some(scalaAnnot)
+ case _ =>
+ None
+ }
+ }
+
+ /**
+ * Quoting from JVMS 4.7.5 The Exceptions Attribute
+ * "The Exceptions attribute indicates which checked exceptions a method may throw.
+ * There may be at most one Exceptions attribute in each method_info structure."
+ *
+ * The contents of that attribute are determined by the `String[] exceptions` argument to ASM's ClassVisitor.visitMethod()
+ * This method returns such list of internal names.
+ */
+ def getExceptions(excs: List[AnnotationInfo]): List[String] =
+ for (ThrownException(exc) <- excs.distinct)
+ yield javaName(exc)
+
+ /** Whether an annotation should be emitted as a Java annotation
+ * .initialize: if 'annot' is read from pickle, atp might be un-initialized
+ */
+ private def shouldEmitAnnotation(annot: AnnotationInfo) =
+ annot.symbol.initialize.isJavaDefined &&
+ annot.matches(ClassfileAnnotationClass) &&
+ annot.args.isEmpty &&
+ !annot.matches(DeprecatedAttr)
+
+ // @M don't generate java generics sigs for (members of) implementation
+ // classes, as they are monomorphic (TODO: ok?)
+ private def needsGenericSignature(sym: Symbol) = !(
+ // PP: This condition used to include sym.hasExpandedName, but this leads
+ // to the total loss of generic information if a private member is
+ // accessed from a closure: both the field and the accessor were generated
+ // without it. This is particularly bad because the availability of
+ // generic information could disappear as a consequence of a seemingly
+ // unrelated change.
+ settings.Ynogenericsig.value
+ || sym.isArtifact
+ || sym.isLiftedMethod
+ || sym.isBridge
+ || (sym.ownerChain exists (_.isImplClass))
+ )
+
+ def getCurrentCUnit(): CompilationUnit
+
+ /** @return
+ * - `null` if no Java signature is to be added (`null` is what ASM expects in these cases).
+ * - otherwise the signature in question
+ */
+ def getGenericSignature(sym: Symbol, owner: Symbol): String = {
+
+ if (!needsGenericSignature(sym)) { return null }
+
+ val memberTpe = beforeErasure(owner.thisType.memberInfo(sym))
+
+ val jsOpt: Option[String] = erasure.javaSig(sym, memberTpe)
+ if (jsOpt.isEmpty) { return null }
+
+ val sig = jsOpt.get
+ log(sig) // This seems useful enough in the general case.
+
+ def wrap(op: => Unit) = {
+ try { op; true }
+ catch { case _: Throwable => false }
+ }
+
+ if (settings.Xverify.value) {
+ // Run the signature parser to catch bogus signatures.
+ val isValidSignature = wrap {
+ // Alternative: scala.tools.reflect.SigParser (frontend to sun.reflect.generics.parser.SignatureParser)
+ import scala.tools.asm.util.SignatureChecker
+ if (sym.isMethod) { SignatureChecker checkMethodSignature sig } // requires asm-util.jar
+ else if (sym.isTerm) { SignatureChecker checkFieldSignature sig }
+ else { SignatureChecker checkClassSignature sig }
+ }
+
+ if(!isValidSignature) {
+ getCurrentCUnit().warning(sym.pos,
+ """|compiler bug: created invalid generic signature for %s in %s
+ |signature: %s
+ |if this is reproducible, please report bug at https://issues.scala-lang.org/
+ """.trim.stripMargin.format(sym, sym.owner.skipPackageObject.fullName, sig))
+ return null
+ }
+ }
+
+ if ((settings.check containsName phaseName)) {
+ val normalizedTpe = beforeErasure(erasure.prepareSigMap(memberTpe))
+ val bytecodeTpe = owner.thisType.memberInfo(sym)
+ if (!sym.isType && !sym.isConstructor && !(erasure.erasure(sym)(normalizedTpe) =:= bytecodeTpe)) {
+ getCurrentCUnit().warning(sym.pos,
+ """|compiler bug: created generic signature for %s in %s that does not conform to its erasure
+ |signature: %s
+ |original type: %s
+ |normalized type: %s
+ |erasure type: %s
+ |if this is reproducible, please report bug at http://issues.scala-lang.org/
+ """.trim.stripMargin.format(sym, sym.owner.skipPackageObject.fullName, sig, memberTpe, normalizedTpe, bytecodeTpe))
+ return null
+ }
+ }
+
+ sig
+ }
+
+ def ubytesToCharArray(bytes: Array[Byte]): Array[Char] = {
+ val ca = new Array[Char](bytes.size)
+ var idx = 0
+ while(idx < bytes.size) {
+ val b: Byte = bytes(idx)
+ assert((b & ~0x7f) == 0)
+ ca(idx) = b.asInstanceOf[Char]
+ idx += 1
+ }
+
+ ca
+ }
+
+ private def arrEncode(sb: ScalaSigBytes): Array[String] = {
+ var strs: List[String] = Nil
+ val bSeven: Array[Byte] = sb.sevenBitsMayBeZero
+ // chop into slices of at most 65535 bytes, counting 0x00 as taking two bytes (as per JVMS 4.4.7 The CONSTANT_Utf8_info Structure)
+ var prevOffset = 0
+ var offset = 0
+ var encLength = 0
+ while(offset < bSeven.size) {
+ val deltaEncLength = (if(bSeven(offset) == 0) 2 else 1)
+ val newEncLength = encLength.toLong + deltaEncLength
+ if(newEncLength >= 65535) {
+ val ba = bSeven.slice(prevOffset, offset)
+ strs ::= new java.lang.String(ubytesToCharArray(ba))
+ encLength = 0
+ prevOffset = offset
+ } else {
+ encLength += deltaEncLength
+ offset += 1
+ }
+ }
+ if(prevOffset < offset) {
+ assert(offset == bSeven.length)
+ val ba = bSeven.slice(prevOffset, offset)
+ strs ::= new java.lang.String(ubytesToCharArray(ba))
+ }
+ assert(strs.size > 1, "encode instead as one String via strEncode()") // TODO too strict?
+ strs.reverse.toArray
+ }
+
+ private def strEncode(sb: ScalaSigBytes): String = {
+ val ca = ubytesToCharArray(sb.sevenBitsMayBeZero)
+ new java.lang.String(ca)
+ // debug val bvA = new asm.ByteVector; bvA.putUTF8(s)
+ // debug val enc: Array[Byte] = scala.reflect.internal.pickling.ByteCodecs.encode(bytes)
+ // debug assert(enc(idx) == bvA.getByte(idx + 2))
+ // debug assert(bvA.getLength == enc.size + 2)
+ }
+
+ def emitArgument(av: asm.AnnotationVisitor,
+ name: String,
+ arg: ClassfileAnnotArg) {
+ arg match {
+
+ case LiteralAnnotArg(const) =>
+ if(const.isNonUnitAnyVal) { av.visit(name, const.value) }
+ else {
+ const.tag match {
+ case StringTag =>
+ assert(const.value != null, const) // TODO this invariant isn't documented in `case class Constant`
+ av.visit(name, const.stringValue) // `stringValue` special-cases null, but that execution path isn't exercised for a const with StringTag
+ case ClazzTag => av.visit(name, javaType(const.typeValue))
+ case EnumTag =>
+ val edesc = descriptor(const.tpe) // the class descriptor of the enumeration class.
+ val evalue = const.symbolValue.name.toString // value the actual enumeration value.
+ av.visitEnum(name, edesc, evalue)
+ }
+ }
+
+ case sb at ScalaSigBytes(bytes) =>
+ // see http://www.scala-lang.org/sid/10 (Storage of pickled Scala signatures in class files)
+ // also JVMS Sec. 4.7.16.1 The element_value structure and JVMS Sec. 4.4.7 The CONSTANT_Utf8_info Structure.
+ if (sb.fitsInOneString)
+ av.visit(name, strEncode(sb))
+ else {
+ val arrAnnotV: asm.AnnotationVisitor = av.visitArray(name)
+ for(arg <- arrEncode(sb)) { arrAnnotV.visit(name, arg) }
+ arrAnnotV.visitEnd()
+ }
+ // for the lazy val in ScalaSigBytes to be GC'ed, the invoker of emitAnnotations() should hold the ScalaSigBytes in a method-local var that doesn't escape.
+
+ case ArrayAnnotArg(args) =>
+ val arrAnnotV: asm.AnnotationVisitor = av.visitArray(name)
+ for(arg <- args) { emitArgument(arrAnnotV, null, arg) }
+ arrAnnotV.visitEnd()
+
+ case NestedAnnotArg(annInfo) =>
+ val AnnotationInfo(typ, args, assocs) = annInfo
+ assert(args.isEmpty, args)
+ val desc = descriptor(typ) // the class descriptor of the nested annotation class
+ val nestedVisitor = av.visitAnnotation(name, desc)
+ emitAssocs(nestedVisitor, assocs)
+ }
+ }
+
+ def emitAssocs(av: asm.AnnotationVisitor, assocs: List[(Name, ClassfileAnnotArg)]) {
+ for ((name, value) <- assocs) {
+ emitArgument(av, name.toString(), value)
+ }
+ av.visitEnd()
+ }
+
+ def emitAnnotations(cw: asm.ClassVisitor, annotations: List[AnnotationInfo]) {
+ for(annot <- annotations; if shouldEmitAnnotation(annot)) {
+ val AnnotationInfo(typ, args, assocs) = annot
+ assert(args.isEmpty, args)
+ val av = cw.visitAnnotation(descriptor(typ), true)
+ emitAssocs(av, assocs)
+ }
+ }
+
+ def emitAnnotations(mw: asm.MethodVisitor, annotations: List[AnnotationInfo]) {
+ for(annot <- annotations; if shouldEmitAnnotation(annot)) {
+ val AnnotationInfo(typ, args, assocs) = annot
+ assert(args.isEmpty, args)
+ val av = mw.visitAnnotation(descriptor(typ), true)
+ emitAssocs(av, assocs)
+ }
+ }
+
+ def emitAnnotations(fw: asm.FieldVisitor, annotations: List[AnnotationInfo]) {
+ for(annot <- annotations; if shouldEmitAnnotation(annot)) {
+ val AnnotationInfo(typ, args, assocs) = annot
+ assert(args.isEmpty, args)
+ val av = fw.visitAnnotation(descriptor(typ), true)
+ emitAssocs(av, assocs)
+ }
+ }
+
+ def emitParamAnnotations(jmethod: asm.MethodVisitor, pannotss: List[List[AnnotationInfo]]) {
+ val annotationss = pannotss map (_ filter shouldEmitAnnotation)
+ if (annotationss forall (_.isEmpty)) return
+ for (Pair(annots, idx) <- annotationss.zipWithIndex;
+ annot <- annots) {
+ val AnnotationInfo(typ, args, assocs) = annot
+ assert(args.isEmpty, args)
+ val pannVisitor: asm.AnnotationVisitor = jmethod.visitParameterAnnotation(idx, descriptor(typ), true)
+ emitAssocs(pannVisitor, assocs)
+ }
+ }
+
+ /** Adds a @remote annotation, actual use unknown.
+ *
+ * Invoked from genMethod() and addForwarder().
+ */
+ def addRemoteExceptionAnnot(isRemoteClass: Boolean, isJMethodPublic: Boolean, meth: Symbol) {
+ val needsAnnotation = (
+ ( isRemoteClass ||
+ isRemote(meth) && isJMethodPublic
+ ) && !(meth.throwsAnnotations contains RemoteExceptionClass)
+ )
+ if (needsAnnotation) {
+ val c = Constant(RemoteExceptionClass.tpe)
+ val arg = Literal(c) setType c.tpe
+ meth.addAnnotation(appliedType(ThrowsClass, c.tpe), arg)
+ }
+ }
+
+ // -----------------------------------------------------------------------------------------
+ // Static forwarders (related to mirror classes but also present in
+ // a plain class lacking companion module, for details see `isCandidateForForwarders`).
+ // -----------------------------------------------------------------------------------------
+
+ /** Add a forwarder for method m. Used only from addForwarders(). */
+ private def addForwarder(isRemoteClass: Boolean, jclass: asm.ClassVisitor, module: Symbol, m: Symbol) {
+ val moduleName = javaName(module)
+ val methodInfo = module.thisType.memberInfo(m)
+ val paramJavaTypes: List[asm.Type] = methodInfo.paramTypes map javaType
+ // val paramNames = 0 until paramJavaTypes.length map ("x_" + _)
+
+ /** Forwarders must not be marked final,
+ * as the JVM will not allow redefinition of a final static method,
+ * and we don't know what classes might be subclassing the companion class. See SI-4827.
+ */
+ // TODO: evaluate the other flags we might be dropping on the floor here.
+ // TODO: ACC_SYNTHETIC ?
+ val flags = PublicStatic | (
+ if (m.isVarargsMethod) asm.Opcodes.ACC_VARARGS else 0
+ )
+
+ // TODO needed? for(ann <- m.annotations) { ann.symbol.initialize }
+ val jgensig = if (m.isDeferred) null else getGenericSignature(m, module); // only add generic signature if method concrete; bug #1745
+ addRemoteExceptionAnnot(isRemoteClass, hasPublicBitSet(flags), m)
+ val (throws, others) = m.annotations partition (_.symbol == ThrowsClass)
+ val thrownExceptions: List[String] = getExceptions(throws)
+
+ val jReturnType = javaType(methodInfo.resultType)
+ val mdesc = asm.Type.getMethodDescriptor(jReturnType, paramJavaTypes: _*)
+ val mirrorMethodName = javaName(m)
+ val mirrorMethod: asm.MethodVisitor = jclass.visitMethod(
+ flags,
+ mirrorMethodName,
+ mdesc,
+ jgensig,
+ mkArray(thrownExceptions)
+ )
+
+ // typestate: entering mode with valid call sequences:
+ // [ visitAnnotationDefault ] ( visitAnnotation | visitParameterAnnotation | visitAttribute )*
+
+ emitAnnotations(mirrorMethod, others)
+ emitParamAnnotations(mirrorMethod, m.info.params.map(_.annotations))
+
+ // typestate: entering mode with valid call sequences:
+ // visitCode ( visitFrame | visitXInsn | visitLabel | visitTryCatchBlock | visitLocalVariable | visitLineNumber )* visitMaxs ] visitEnd
+
+ mirrorMethod.visitCode()
+
+ mirrorMethod.visitFieldInsn(asm.Opcodes.GETSTATIC, moduleName, strMODULE_INSTANCE_FIELD, descriptor(module))
+
+ var index = 0
+ for(jparamType <- paramJavaTypes) {
+ mirrorMethod.visitVarInsn(jparamType.getOpcode(asm.Opcodes.ILOAD), index)
+ assert(jparamType.getSort() != asm.Type.METHOD, jparamType)
+ index += jparamType.getSize()
+ }
+
+ mirrorMethod.visitMethodInsn(asm.Opcodes.INVOKEVIRTUAL, moduleName, mirrorMethodName, javaType(m).getDescriptor)
+ mirrorMethod.visitInsn(jReturnType.getOpcode(asm.Opcodes.IRETURN))
+
+ mirrorMethod.visitMaxs(0, 0) // just to follow protocol, dummy arguments
+ mirrorMethod.visitEnd()
+
+ }
+
+ /** Add forwarders for all methods defined in `module` that don't conflict
+ * with methods in the companion class of `module`. A conflict arises when
+ * a method with the same name is defined both in a class and its companion object:
+ * method signature is not taken into account.
+ */
+ def addForwarders(isRemoteClass: Boolean, jclass: asm.ClassVisitor, jclassName: String, moduleClass: Symbol) {
+ assert(moduleClass.isModuleClass, moduleClass)
+ debuglog("Dumping mirror class for object: " + moduleClass)
+
+ val linkedClass = moduleClass.companionClass
+ val linkedModule = linkedClass.companionSymbol
+ lazy val conflictingNames: Set[Name] = {
+ (linkedClass.info.members collect { case sym if sym.name.isTermName => sym.name }).toSet
+ }
+ debuglog("Potentially conflicting names for forwarders: " + conflictingNames)
+
+ for (m <- moduleClass.info.membersBasedOnFlags(ExcludedForwarderFlags, Flags.METHOD)) {
+ if (m.isType || m.isDeferred || (m.owner eq ObjectClass) || m.isConstructor)
+ debuglog("No forwarder for '%s' from %s to '%s'".format(m, jclassName, moduleClass))
+ else if (conflictingNames(m.name))
+ log("No forwarder for " + m + " due to conflict with " + linkedClass.info.member(m.name))
+ else if (m.hasAccessBoundary)
+ log(s"No forwarder for non-public member $m")
+ else {
+ log("Adding static forwarder for '%s' from %s to '%s'".format(m, jclassName, moduleClass))
+ addForwarder(isRemoteClass, jclass, moduleClass, m)
+ }
+ }
+ }
+
+ } // end of class JCommonBuilder
+
+
+ trait JAndroidBuilder {
+ self: JPlainBuilder =>
+
+ /** From the reference documentation of the Android SDK:
+ * The `Parcelable` interface identifies classes whose instances can be written to and restored from a `Parcel`.
+ * Classes implementing the `Parcelable` interface must also have a static field called `CREATOR`,
+ * which is an object implementing the `Parcelable.Creator` interface.
+ */
+ private val androidFieldName = newTermName("CREATOR")
+
+ private lazy val AndroidParcelableInterface = rootMirror.getClassIfDefined("android.os.Parcelable")
+ private lazy val AndroidCreatorClass = rootMirror.getClassIfDefined("android.os.Parcelable$Creator")
+
+ def isAndroidParcelableClass(sym: Symbol) =
+ (AndroidParcelableInterface != NoSymbol) &&
+ (sym.parentSymbols contains AndroidParcelableInterface)
+
+ /* Typestate: should be called before emitting fields (because it adds an IField to the current IClass). */
+ def addCreatorCode(block: BasicBlock) {
+ val fieldSymbol = (
+ clasz.symbol.newValue(newTermName(androidFieldName), NoPosition, Flags.STATIC | Flags.FINAL)
+ setInfo AndroidCreatorClass.tpe
+ )
+ val methodSymbol = definitions.getMember(clasz.symbol.companionModule, androidFieldName)
+ clasz addField new IField(fieldSymbol)
+ block emit CALL_METHOD(methodSymbol, Static(false))
+ block emit STORE_FIELD(fieldSymbol, true)
+ }
+
+ def legacyAddCreatorCode(clinit: asm.MethodVisitor) {
+ val creatorType: asm.Type = javaType(AndroidCreatorClass)
+ val tdesc_creator = creatorType.getDescriptor
+
+ jclass.visitField(
+ PublicStaticFinal,
+ androidFieldName,
+ tdesc_creator,
+ null, // no java-generic-signature
+ null // no initial value
+ ).visitEnd()
+
+ val moduleName = javaName(clasz.symbol)+"$"
+
+ // GETSTATIC `moduleName`.MODULE$ : `moduleName`;
+ clinit.visitFieldInsn(
+ asm.Opcodes.GETSTATIC,
+ moduleName,
+ strMODULE_INSTANCE_FIELD,
+ asm.Type.getObjectType(moduleName).getDescriptor
+ )
+
+ // INVOKEVIRTUAL `moduleName`.CREATOR() : android.os.Parcelable$Creator;
+ clinit.visitMethodInsn(
+ asm.Opcodes.INVOKEVIRTUAL,
+ moduleName,
+ androidFieldName,
+ asm.Type.getMethodDescriptor(creatorType, Array.empty[asm.Type]: _*)
+ )
+
+ // PUTSTATIC `thisName`.CREATOR;
+ clinit.visitFieldInsn(
+ asm.Opcodes.PUTSTATIC,
+ thisName,
+ androidFieldName,
+ tdesc_creator
+ )
+ }
+
+ } // end of trait JAndroidBuilder
+
+ /** Map from type kinds to the Java reference types.
+ * It is used to push class literals onto the operand stack.
+ * @see Predef.classOf
+ * @see genConstant()
+ */
+ private val classLiteral = immutable.Map[TypeKind, asm.Type](
+ UNIT -> asm.Type.getObjectType("java/lang/Void"),
+ BOOL -> asm.Type.getObjectType("java/lang/Boolean"),
+ BYTE -> asm.Type.getObjectType("java/lang/Byte"),
+ SHORT -> asm.Type.getObjectType("java/lang/Short"),
+ CHAR -> asm.Type.getObjectType("java/lang/Character"),
+ INT -> asm.Type.getObjectType("java/lang/Integer"),
+ LONG -> asm.Type.getObjectType("java/lang/Long"),
+ FLOAT -> asm.Type.getObjectType("java/lang/Float"),
+ DOUBLE -> asm.Type.getObjectType("java/lang/Double")
+ )
+
+ def isNonUnitValueTK(tk: TypeKind): Boolean = { tk.isValueType && tk != UNIT }
+
+ case class MethodNameAndType(mname: String, mdesc: String)
+
+ private val jBoxTo: Map[TypeKind, MethodNameAndType] = {
+ Map(
+ BOOL -> MethodNameAndType("boxToBoolean", "(Z)Ljava/lang/Boolean;" ) ,
+ BYTE -> MethodNameAndType("boxToByte", "(B)Ljava/lang/Byte;" ) ,
+ CHAR -> MethodNameAndType("boxToCharacter", "(C)Ljava/lang/Character;") ,
+ SHORT -> MethodNameAndType("boxToShort", "(S)Ljava/lang/Short;" ) ,
+ INT -> MethodNameAndType("boxToInteger", "(I)Ljava/lang/Integer;" ) ,
+ LONG -> MethodNameAndType("boxToLong", "(J)Ljava/lang/Long;" ) ,
+ FLOAT -> MethodNameAndType("boxToFloat", "(F)Ljava/lang/Float;" ) ,
+ DOUBLE -> MethodNameAndType("boxToDouble", "(D)Ljava/lang/Double;" )
+ )
+ }
+
+ private val jUnboxTo: Map[TypeKind, MethodNameAndType] = {
+ Map(
+ BOOL -> MethodNameAndType("unboxToBoolean", "(Ljava/lang/Object;)Z") ,
+ BYTE -> MethodNameAndType("unboxToByte", "(Ljava/lang/Object;)B") ,
+ CHAR -> MethodNameAndType("unboxToChar", "(Ljava/lang/Object;)C") ,
+ SHORT -> MethodNameAndType("unboxToShort", "(Ljava/lang/Object;)S") ,
+ INT -> MethodNameAndType("unboxToInt", "(Ljava/lang/Object;)I") ,
+ LONG -> MethodNameAndType("unboxToLong", "(Ljava/lang/Object;)J") ,
+ FLOAT -> MethodNameAndType("unboxToFloat", "(Ljava/lang/Object;)F") ,
+ DOUBLE -> MethodNameAndType("unboxToDouble", "(Ljava/lang/Object;)D")
+ )
+ }
+
+ case class BlockInteval(start: BasicBlock, end: BasicBlock)
+
+ /** builder of plain classes */
+ class JPlainBuilder(bytecodeWriter: BytecodeWriter)
+ extends JCommonBuilder(bytecodeWriter)
+ with JAndroidBuilder {
+
+ val MIN_SWITCH_DENSITY = 0.7
+
+ val StringBuilderClassName = javaName(definitions.StringBuilderClass)
+ val BoxesRunTime = "scala/runtime/BoxesRunTime"
+
+ val StringBuilderType = asm.Type.getObjectType(StringBuilderClassName)
+ val mdesc_toString = "()Ljava/lang/String;"
+ val mdesc_arrayClone = "()Ljava/lang/Object;"
+
+ val tdesc_long = asm.Type.LONG_TYPE.getDescriptor // ie. "J"
+
+ def isParcelableClass = isAndroidParcelableClass(clasz.symbol)
+
+ def serialVUID: Option[Long] = clasz.symbol getAnnotation SerialVersionUIDAttr collect {
+ case AnnotationInfo(_, Literal(const) :: _, _) => const.longValue
+ }
+
+ private def getSuperInterfaces(c: IClass): Array[String] = {
+
+ // Additional interface parents based on annotations and other cues
+ def newParentForAttr(attr: Symbol): Option[Symbol] = attr match {
+ case SerializableAttr => Some(SerializableClass)
+ case CloneableAttr => Some(CloneableClass)
+ case RemoteAttr => Some(RemoteInterfaceClass)
+ case _ => None
+ }
+
+ /** Drop redundant interfaces (ones which are implemented by some other parent) from the immediate parents.
+ * This is important on Android because there is otherwise an interface explosion.
+ */
+ def minimizeInterfaces(lstIfaces: List[Symbol]): List[Symbol] = {
+ var rest = lstIfaces
+ var leaves = List.empty[Symbol]
+ while(!rest.isEmpty) {
+ val candidate = rest.head
+ val nonLeaf = leaves exists { lsym => lsym isSubClass candidate }
+ if(!nonLeaf) {
+ leaves = candidate :: (leaves filterNot { lsym => candidate isSubClass lsym })
+ }
+ rest = rest.tail
+ }
+
+ leaves
+ }
+
+ val ps = c.symbol.info.parents
+ val superInterfaces0: List[Symbol] = if(ps.isEmpty) Nil else c.symbol.mixinClasses;
+ val superInterfaces = (superInterfaces0 ++ c.symbol.annotations.flatMap(ann => newParentForAttr(ann.symbol))).distinct
+
+ if(superInterfaces.isEmpty) EMPTY_STRING_ARRAY
+ else mkArray(minimizeInterfaces(superInterfaces) map javaName)
+ }
+
+ var clasz: IClass = _ // this var must be assigned only by genClass()
+ var jclass: asm.ClassWriter = _ // the classfile being emitted
+ var thisName: String = _ // the internal name of jclass
+
+ def thisDescr: String = {
+ assert(thisName != null, "thisDescr invoked too soon.")
+ asm.Type.getObjectType(thisName).getDescriptor
+ }
+
+ def getCurrentCUnit(): CompilationUnit = { clasz.cunit }
+
+ def genClass(c: IClass) {
+ clasz = c
+ innerClassBuffer.clear()
+
+ thisName = javaName(c.symbol) // the internal name of the class being emitted
+
+ val ps = c.symbol.info.parents
+ val superClass: String = if(ps.isEmpty) JAVA_LANG_OBJECT.getInternalName else javaName(ps.head.typeSymbol);
+
+ val ifaces = getSuperInterfaces(c)
+
+ val thisSignature = getGenericSignature(c.symbol, c.symbol.owner)
+ val flags = mkFlags(
+ javaFlags(c.symbol),
+ if(isDeprecated(c.symbol)) asm.Opcodes.ACC_DEPRECATED else 0 // ASM pseudo access flag
+ )
+ jclass = createJClass(flags,
+ thisName, thisSignature,
+ superClass, ifaces)
+
+ // typestate: entering mode with valid call sequences:
+ // [ visitSource ] [ visitOuterClass ] ( visitAnnotation | visitAttribute )*
+
+ if(emitSource) {
+ jclass.visitSource(c.cunit.source.toString,
+ null /* SourceDebugExtension */)
+ }
+
+ val enclM = getEnclosingMethodAttribute()
+ if(enclM != null) {
+ val EnclMethodEntry(className, methodName, methodType) = enclM
+ jclass.visitOuterClass(className, methodName, methodType.getDescriptor)
+ }
+
+ // typestate: entering mode with valid call sequences:
+ // ( visitAnnotation | visitAttribute )*
+
+ val ssa = getAnnotPickle(thisName, c.symbol)
+ jclass.visitAttribute(if(ssa.isDefined) pickleMarkerLocal else pickleMarkerForeign)
+ emitAnnotations(jclass, c.symbol.annotations ++ ssa)
+
+ // typestate: entering mode with valid call sequences:
+ // ( visitInnerClass | visitField | visitMethod )* visitEnd
+
+ if (isStaticModule(c.symbol) || isParcelableClass) {
+
+ if (isStaticModule(c.symbol)) { addModuleInstanceField() }
+ addStaticInit(c.lookupStaticCtor)
+
+ } else {
+
+ for (constructor <- c.lookupStaticCtor) {
+ addStaticInit(Some(constructor))
+ }
+ val skipStaticForwarders = (c.symbol.isInterface || settings.noForwarders.value)
+ if (!skipStaticForwarders) {
+ val lmoc = c.symbol.companionModule
+ // add static forwarders if there are no name conflicts; see bugs #363 and #1735
+ if (lmoc != NoSymbol) {
+ // it must be a top level class (name contains no $s)
+ val isCandidateForForwarders = {
+ afterPickler { !(lmoc.name.toString contains '$') && lmoc.hasModuleFlag && !lmoc.isImplClass && !lmoc.isNestedClass }
+ }
+ if (isCandidateForForwarders) {
+ log("Adding static forwarders from '%s' to implementations in '%s'".format(c.symbol, lmoc))
+ addForwarders(isRemote(clasz.symbol), jclass, thisName, lmoc.moduleClass)
+ }
+ }
+ }
+
+ }
+
+ // add static serialVersionUID field if `clasz` annotated with `@SerialVersionUID(uid: Long)`
+ serialVUID foreach { value =>
+ val fieldName = "serialVersionUID"
+ jclass.visitField(
+ PublicStaticFinal,
+ fieldName,
+ tdesc_long,
+ null, // no java-generic-signature
+ value
+ ).visitEnd()
+ }
+
+ clasz.fields foreach genField
+ clasz.methods foreach { im =>
+ if (im.symbol.isBridge && isRedundantBridge(im, clasz))
+ // We can't backport the erasure fix of SI-7120 to 2.10.x, but we can detect and delete
+ // bridge methods with identical signatures to their targets.
+ //
+ // NOTE: this backstop only implemented here in the ASM backend, and is not implemented in the FJBG backend.
+ debugwarn(s"Discarding redundant bridge method: ${im.symbol.debugLocationString}. See SI-8114.")
+ else
+ genMethod(im, c.symbol.isInterface)
+ }
+
+ addInnerClasses(clasz.symbol, jclass)
+ jclass.visitEnd()
+ writeIfNotTooBig("" + c.symbol.name, thisName, jclass, c.symbol)
+ }
+
+ private def isRedundantBridge(bridge: IMethod, owner: IClass): Boolean = {
+ def lastCalledMethod: Option[Symbol] = bridge.code.instructions.reverseIterator.collectFirst {
+ case CALL_METHOD(meth, _) => meth
+ }
+ def hasSameSignatureAsBridge(targetMethod: Symbol): Boolean = {
+ val targetIMethod = clasz.methods find (m => m.symbol == targetMethod)
+ // Important to compare the IMethod#paramss, rather then the erased MethodTypes, as
+ // due to the bug SI-7120, these are out of sync. For example, in the `applyOrElse`
+ // method in run/t8114.scala, the method symbol info has a parameter of type `Long`,
+ // but the IMethod parameter has type `Object`. The latter comes from the info of the
+ // symbol representing the parameter ValDef in the tree, which is incorrectly erased.
+ targetIMethod exists (m => bridge.matchesSignature(m))
+ }
+ lastCalledMethod exists hasSameSignatureAsBridge
+ }
+
+ /**
+ * @param owner internal name of the enclosing class of the class.
+ *
+ * @param name the name of the method that contains the class.
+
+ * @param methodType the method that contains the class.
+ */
+ case class EnclMethodEntry(owner: String, name: String, methodType: asm.Type)
+
+ /**
+ * @return null if the current class is not internal to a method
+ *
+ * Quoting from JVMS 4.7.7 The EnclosingMethod Attribute
+ * A class must have an EnclosingMethod attribute if and only if it is a local class or an anonymous class.
+ * A class may have no more than one EnclosingMethod attribute.
+ *
+ */
+ private def getEnclosingMethodAttribute(): EnclMethodEntry = { // JVMS 4.7.7
+ var res: EnclMethodEntry = null
+ val clazz = clasz.symbol
+ val sym = clazz.originalEnclosingMethod
+ if (sym.isMethod) {
+ debuglog("enclosing method for %s is %s (in %s)".format(clazz, sym, sym.enclClass))
+ res = EnclMethodEntry(javaName(sym.enclClass), javaName(sym), javaType(sym))
+ } else if (clazz.isAnonymousClass) {
+ val enclClass = clazz.rawowner
+ assert(enclClass.isClass, enclClass)
+ val sym = enclClass.primaryConstructor
+ if (sym == NoSymbol) {
+ log("Ran out of room looking for an enclosing method for %s: no constructor here.".format(enclClass, clazz))
+ } else {
+ debuglog("enclosing method for %s is %s (in %s)".format(clazz, sym, enclClass))
+ res = EnclMethodEntry(javaName(enclClass), javaName(sym), javaType(sym))
+ }
+ }
+
+ res
+ }
+
+ def genField(f: IField) {
+ debuglog("Adding field: " + f.symbol.fullName)
+
+ val javagensig = getGenericSignature(f.symbol, clasz.symbol)
+
+ val flags = mkFlags(
+ javaFieldFlags(f.symbol),
+ if(isDeprecated(f.symbol)) asm.Opcodes.ACC_DEPRECATED else 0 // ASM pseudo access flag
+ )
+
+ val jfield: asm.FieldVisitor = jclass.visitField(
+ flags,
+ javaName(f.symbol),
+ javaType(f.symbol.tpe).getDescriptor(),
+ javagensig,
+ null // no initial value
+ )
+
+ emitAnnotations(jfield, f.symbol.annotations)
+ jfield.visitEnd()
+ }
+
+ var method: IMethod = _
+ var jmethod: asm.MethodVisitor = _
+ var jMethodName: String = _
+
+ final def emit(opc: Int) { jmethod.visitInsn(opc) }
+
+ def genMethod(m: IMethod, isJInterface: Boolean) {
+
+ def isClosureApply(sym: Symbol): Boolean = {
+ (sym.name == nme.apply) &&
+ sym.owner.isSynthetic &&
+ sym.owner.tpe.parents.exists { t =>
+ val TypeRef(_, sym, _) = t
+ FunctionClass contains sym
+ }
+ }
+
+ if (m.symbol.isStaticConstructor || definitions.isGetClass(m.symbol)) return
+
+ debuglog("Generating method " + m.symbol.fullName)
+ method = m
+ computeLocalVarsIndex(m)
+
+ var resTpe: asm.Type = javaType(m.symbol.tpe.resultType)
+ if (m.symbol.isClassConstructor)
+ resTpe = asm.Type.VOID_TYPE
+
+ val flags = mkFlags(
+ javaFlags(m.symbol),
+ if (isJInterface) asm.Opcodes.ACC_ABSTRACT else 0,
+ if (m.symbol.isStrictFP) asm.Opcodes.ACC_STRICT else 0,
+ if (method.native) asm.Opcodes.ACC_NATIVE else 0, // native methods of objects are generated in mirror classes
+ if(isDeprecated(m.symbol)) asm.Opcodes.ACC_DEPRECATED else 0 // ASM pseudo access flag
+ )
+
+ // TODO needed? for(ann <- m.symbol.annotations) { ann.symbol.initialize }
+ val jgensig = getGenericSignature(m.symbol, clasz.symbol)
+ addRemoteExceptionAnnot(isRemote(clasz.symbol), hasPublicBitSet(flags), m.symbol)
+ val (excs, others) = m.symbol.annotations partition (_.symbol == ThrowsClass)
+ val thrownExceptions: List[String] = getExceptions(excs)
+
+ jMethodName = javaName(m.symbol)
+ val mdesc = asm.Type.getMethodDescriptor(resTpe, (m.params map (p => javaType(p.kind))): _*)
+ jmethod = jclass.visitMethod(
+ flags,
+ jMethodName,
+ mdesc,
+ jgensig,
+ mkArray(thrownExceptions)
+ )
+
+ // TODO param names: (m.params map (p => javaName(p.sym)))
+
+ // typestate: entering mode with valid call sequences:
+ // [ visitAnnotationDefault ] ( visitAnnotation | visitParameterAnnotation | visitAttribute )*
+
+ emitAnnotations(jmethod, others)
+ emitParamAnnotations(jmethod, m.params.map(_.sym.annotations))
+
+ // typestate: entering mode with valid call sequences:
+ // [ visitCode ( visitFrame | visitXInsn | visitLabel | visitTryCatchBlock | visitLocalVariable | visitLineNumber )* visitMaxs ] visitEnd
+ // In addition, the visitXInsn and visitLabel methods must be called in the sequential order of the bytecode instructions of the visited code,
+ // visitTryCatchBlock must be called before the labels passed as arguments have been visited, and
+ // the visitLocalVariable and visitLineNumber methods must be called after the labels passed as arguments have been visited.
+
+ val hasAbstractBitSet = ((flags & asm.Opcodes.ACC_ABSTRACT) != 0)
+ val hasCodeAttribute = (!hasAbstractBitSet && !method.native)
+ if (hasCodeAttribute) {
+
+ jmethod.visitCode()
+
+ if (emitVars && isClosureApply(method.symbol)) {
+ // add a fake local for debugging purposes
+ val outerField = clasz.symbol.info.decl(nme.OUTER_LOCAL)
+ if (outerField != NoSymbol) {
+ log("Adding fake local to represent outer 'this' for closure " + clasz)
+ val _this =
+ new Local(method.symbol.newVariable(nme.FAKE_LOCAL_THIS),
+ toTypeKind(outerField.tpe),
+ false)
+ m.locals = m.locals ::: List(_this)
+ computeLocalVarsIndex(m) // since we added a new local, we need to recompute indexes
+ jmethod.visitVarInsn(asm.Opcodes.ALOAD, 0)
+ jmethod.visitFieldInsn(asm.Opcodes.GETFIELD,
+ javaName(clasz.symbol), // field owner
+ javaName(outerField), // field name
+ descriptor(outerField) // field descriptor
+ )
+ assert(_this.kind.isReferenceType, _this.kind)
+ jmethod.visitVarInsn(asm.Opcodes.ASTORE, indexOf(_this))
+ }
+ }
+
+ assert( m.locals forall { local => (m.params contains local) == local.arg }, m.locals )
+
+ val hasStaticBitSet = ((flags & asm.Opcodes.ACC_STATIC) != 0)
+ genCode(m, emitVars, hasStaticBitSet)
+
+ jmethod.visitMaxs(0, 0) // just to follow protocol, dummy arguments
+ }
+
+ jmethod.visitEnd()
+
+ }
+
+ def addModuleInstanceField() {
+ val fv =
+ jclass.visitField(PublicStaticFinal, // TODO confirm whether we really don't want ACC_SYNTHETIC nor ACC_DEPRECATED
+ strMODULE_INSTANCE_FIELD,
+ thisDescr,
+ null, // no java-generic-signature
+ null // no initial value
+ )
+
+ // typestate: entering mode with valid call sequences:
+ // ( visitAnnotation | visitAttribute )* visitEnd.
+
+ fv.visitEnd()
+ }
+
+
+ /* Typestate: should be called before being done with emitting fields (because it invokes addCreatorCode() which adds an IField to the current IClass). */
+ def addStaticInit(mopt: Option[IMethod]) {
+
+ val clinitMethod: asm.MethodVisitor = jclass.visitMethod(
+ PublicStatic, // TODO confirm whether we really don't want ACC_SYNTHETIC nor ACC_DEPRECATED
+ CLASS_CONSTRUCTOR_NAME,
+ mdesc_arglessvoid,
+ null, // no java-generic-signature
+ null // no throwable exceptions
+ )
+
+ mopt match {
+
+ case Some(m) =>
+
+ val oldLastBlock = m.lastBlock
+ val lastBlock = m.newBlock()
+ oldLastBlock.replaceInstruction(oldLastBlock.length - 1, JUMP(lastBlock))
+
+ if (isStaticModule(clasz.symbol)) {
+ // call object's private ctor from static ctor
+ lastBlock emit NEW(REFERENCE(m.symbol.enclClass))
+ lastBlock emit CALL_METHOD(m.symbol.enclClass.primaryConstructor, Static(true))
+ }
+
+ if (isParcelableClass) { addCreatorCode(lastBlock) }
+
+ lastBlock emit RETURN(UNIT)
+ lastBlock.close
+
+ method = m
+ jmethod = clinitMethod
+ jMethodName = CLASS_CONSTRUCTOR_NAME
+ jmethod.visitCode()
+ genCode(m, false, true)
+ jmethod.visitMaxs(0, 0) // just to follow protocol, dummy arguments
+ jmethod.visitEnd()
+
+ case None =>
+ clinitMethod.visitCode()
+ legacyStaticInitializer(clinitMethod)
+ clinitMethod.visitMaxs(0, 0) // just to follow protocol, dummy arguments
+ clinitMethod.visitEnd()
+
+ }
+ }
+
+ /* used only from addStaticInit() */
+ private def legacyStaticInitializer(clinit: asm.MethodVisitor) {
+ if (isStaticModule(clasz.symbol)) {
+ clinit.visitTypeInsn(asm.Opcodes.NEW, thisName)
+ clinit.visitMethodInsn(asm.Opcodes.INVOKESPECIAL,
+ thisName, INSTANCE_CONSTRUCTOR_NAME, mdesc_arglessvoid)
+ }
+
+ if (isParcelableClass) { legacyAddCreatorCode(clinit) }
+
+ clinit.visitInsn(asm.Opcodes.RETURN)
+ }
+
+ // -----------------------------------------------------------------------------------------
+ // Emitting bytecode instructions.
+ // -----------------------------------------------------------------------------------------
+
+ private def genConstant(mv: asm.MethodVisitor, const: Constant) {
+ const.tag match {
+
+ case BooleanTag => jcode.boolconst(const.booleanValue)
+
+ case ByteTag => jcode.iconst(const.byteValue)
+ case ShortTag => jcode.iconst(const.shortValue)
+ case CharTag => jcode.iconst(const.charValue)
+ case IntTag => jcode.iconst(const.intValue)
+
+ case LongTag => jcode.lconst(const.longValue)
+ case FloatTag => jcode.fconst(const.floatValue)
+ case DoubleTag => jcode.dconst(const.doubleValue)
+
+ case UnitTag => ()
+
+ case StringTag =>
+ assert(const.value != null, const) // TODO this invariant isn't documented in `case class Constant`
+ mv.visitLdcInsn(const.stringValue) // `stringValue` special-cases null, but not for a const with StringTag
+
+ case NullTag => mv.visitInsn(asm.Opcodes.ACONST_NULL)
+
+ case ClazzTag =>
+ val kind = toTypeKind(const.typeValue)
+ val toPush: asm.Type =
+ if (kind.isValueType) classLiteral(kind)
+ else javaType(kind);
+ mv.visitLdcInsn(toPush)
+
+ case EnumTag =>
+ val sym = const.symbolValue
+ mv.visitFieldInsn(
+ asm.Opcodes.GETSTATIC,
+ javaName(sym.owner),
+ javaName(sym),
+ javaType(sym.tpe.underlying).getDescriptor()
+ )
+
+ case _ => abort("Unknown constant value: " + const)
+ }
+ }
+
+ /** Just a namespace for utilities that encapsulate MethodVisitor idioms.
+ * In the ASM world, org.objectweb.asm.commons.InstructionAdapter plays a similar role,
+ * but the methods here allow choosing when to transition from ICode to ASM types
+ * (including not at all, e.g. for performance).
+ */
+ object jcode {
+
+ import asm.Opcodes;
+
+ def aconst(cst: AnyRef) {
+ if (cst == null) { jmethod.visitInsn(Opcodes.ACONST_NULL) }
+ else { jmethod.visitLdcInsn(cst) }
+ }
+
+ final def boolconst(b: Boolean) { iconst(if(b) 1 else 0) }
+
+ def iconst(cst: Int) {
+ if (cst >= -1 && cst <= 5) {
+ jmethod.visitInsn(Opcodes.ICONST_0 + cst)
+ } else if (cst >= java.lang.Byte.MIN_VALUE && cst <= java.lang.Byte.MAX_VALUE) {
+ jmethod.visitIntInsn(Opcodes.BIPUSH, cst)
+ } else if (cst >= java.lang.Short.MIN_VALUE && cst <= java.lang.Short.MAX_VALUE) {
+ jmethod.visitIntInsn(Opcodes.SIPUSH, cst)
+ } else {
+ jmethod.visitLdcInsn(new Integer(cst))
+ }
+ }
+
+ def lconst(cst: Long) {
+ if (cst == 0L || cst == 1L) {
+ jmethod.visitInsn(Opcodes.LCONST_0 + cst.asInstanceOf[Int])
+ } else {
+ jmethod.visitLdcInsn(new java.lang.Long(cst))
+ }
+ }
+
+ def fconst(cst: Float) {
+ val bits: Int = java.lang.Float.floatToIntBits(cst)
+ if (bits == 0L || bits == 0x3f800000 || bits == 0x40000000) { // 0..2
+ jmethod.visitInsn(Opcodes.FCONST_0 + cst.asInstanceOf[Int])
+ } else {
+ jmethod.visitLdcInsn(new java.lang.Float(cst))
+ }
+ }
+
+ def dconst(cst: Double) {
+ val bits: Long = java.lang.Double.doubleToLongBits(cst)
+ if (bits == 0L || bits == 0x3ff0000000000000L) { // +0.0d and 1.0d
+ jmethod.visitInsn(Opcodes.DCONST_0 + cst.asInstanceOf[Int])
+ } else {
+ jmethod.visitLdcInsn(new java.lang.Double(cst))
+ }
+ }
+
+ def newarray(elem: TypeKind) {
+ if(elem.isRefOrArrayType) {
+ jmethod.visitTypeInsn(Opcodes.ANEWARRAY, javaType(elem).getInternalName)
+ } else {
+ val rand = {
+ if(elem.isIntSizedType) {
+ (elem: @unchecked) match {
+ case BOOL => Opcodes.T_BOOLEAN
+ case BYTE => Opcodes.T_BYTE
+ case SHORT => Opcodes.T_SHORT
+ case CHAR => Opcodes.T_CHAR
+ case INT => Opcodes.T_INT
+ }
+ } else {
+ (elem: @unchecked) match {
+ case LONG => Opcodes.T_LONG
+ case FLOAT => Opcodes.T_FLOAT
+ case DOUBLE => Opcodes.T_DOUBLE
+ }
+ }
+ }
+ jmethod.visitIntInsn(Opcodes.NEWARRAY, rand)
+ }
+ }
+
+
+ def load( idx: Int, tk: TypeKind) { emitVarInsn(Opcodes.ILOAD, idx, tk) }
+ def store(idx: Int, tk: TypeKind) { emitVarInsn(Opcodes.ISTORE, idx, tk) }
+
+ def aload( tk: TypeKind) { emitTypeBased(aloadOpcodes, tk) }
+ def astore(tk: TypeKind) { emitTypeBased(astoreOpcodes, tk) }
+
+ def neg(tk: TypeKind) { emitPrimitive(negOpcodes, tk) }
+ def add(tk: TypeKind) { emitPrimitive(addOpcodes, tk) }
+ def sub(tk: TypeKind) { emitPrimitive(subOpcodes, tk) }
+ def mul(tk: TypeKind) { emitPrimitive(mulOpcodes, tk) }
+ def div(tk: TypeKind) { emitPrimitive(divOpcodes, tk) }
+ def rem(tk: TypeKind) { emitPrimitive(remOpcodes, tk) }
+
+ def invokespecial(owner: String, name: String, desc: String) {
+ jmethod.visitMethodInsn(Opcodes.INVOKESPECIAL, owner, name, desc)
+ }
+ def invokestatic(owner: String, name: String, desc: String) {
+ jmethod.visitMethodInsn(Opcodes.INVOKESTATIC, owner, name, desc)
+ }
+ def invokeinterface(owner: String, name: String, desc: String) {
+ jmethod.visitMethodInsn(Opcodes.INVOKEINTERFACE, owner, name, desc)
+ }
+ def invokevirtual(owner: String, name: String, desc: String) {
+ jmethod.visitMethodInsn(Opcodes.INVOKEVIRTUAL, owner, name, desc)
+ }
+
+ def goTo(label: asm.Label) { jmethod.visitJumpInsn(Opcodes.GOTO, label) }
+ def emitIF(cond: TestOp, label: asm.Label) { jmethod.visitJumpInsn(cond.opcodeIF, label) }
+ def emitIF_ICMP(cond: TestOp, label: asm.Label) { jmethod.visitJumpInsn(cond.opcodeIFICMP, label) }
+ def emitIF_ACMP(cond: TestOp, label: asm.Label) {
+ assert((cond == EQ) || (cond == NE), cond)
+ val opc = (if(cond == EQ) Opcodes.IF_ACMPEQ else Opcodes.IF_ACMPNE)
+ jmethod.visitJumpInsn(opc, label)
+ }
+ def emitIFNONNULL(label: asm.Label) { jmethod.visitJumpInsn(Opcodes.IFNONNULL, label) }
+ def emitIFNULL (label: asm.Label) { jmethod.visitJumpInsn(Opcodes.IFNULL, label) }
+
+ def emitRETURN(tk: TypeKind) {
+ if(tk == UNIT) { jmethod.visitInsn(Opcodes.RETURN) }
+ else { emitTypeBased(returnOpcodes, tk) }
+ }
+
+ /** Emits one of tableswitch or lookoupswitch. */
+ def emitSWITCH(keys: Array[Int], branches: Array[asm.Label], defaultBranch: asm.Label, minDensity: Double) {
+ assert(keys.length == branches.length)
+
+ // For empty keys, it makes sense emitting LOOKUPSWITCH with defaultBranch only.
+ // Similar to what javac emits for a switch statement consisting only of a default case.
+ if (keys.length == 0) {
+ jmethod.visitLookupSwitchInsn(defaultBranch, keys, branches)
+ return
+ }
+
+ // sort `keys` by increasing key, keeping `branches` in sync. TODO FIXME use quicksort
+ var i = 1
+ while (i < keys.length) {
+ var j = 1
+ while (j <= keys.length - i) {
+ if (keys(j) < keys(j - 1)) {
+ val tmp = keys(j)
+ keys(j) = keys(j - 1)
+ keys(j - 1) = tmp
+ val tmpL = branches(j)
+ branches(j) = branches(j - 1)
+ branches(j - 1) = tmpL
+ }
+ j += 1
+ }
+ i += 1
+ }
+
+ // check for duplicate keys to avoid "VerifyError: unsorted lookupswitch" (SI-6011)
+ i = 1
+ while (i < keys.length) {
+ if(keys(i-1) == keys(i)) {
+ abort("duplicate keys in SWITCH, can't pick arbitrarily one of them to evict, see SI-6011.")
+ }
+ i += 1
+ }
+
+ val keyMin = keys(0)
+ val keyMax = keys(keys.length - 1)
+
+ val isDenseEnough: Boolean = {
+ /** Calculate in long to guard against overflow. TODO what overflow??? */
+ val keyRangeD: Double = (keyMax.asInstanceOf[Long] - keyMin + 1).asInstanceOf[Double]
+ val klenD: Double = keys.length
+ val kdensity: Double = (klenD / keyRangeD)
+
+ kdensity >= minDensity
+ }
+
+ if (isDenseEnough) {
+ // use a table in which holes are filled with defaultBranch.
+ val keyRange = (keyMax - keyMin + 1)
+ val newBranches = new Array[asm.Label](keyRange)
+ var oldPos = 0;
+ var i = 0
+ while(i < keyRange) {
+ val key = keyMin + i;
+ if (keys(oldPos) == key) {
+ newBranches(i) = branches(oldPos)
+ oldPos += 1
+ } else {
+ newBranches(i) = defaultBranch
+ }
+ i += 1
+ }
+ assert(oldPos == keys.length, "emitSWITCH")
+ jmethod.visitTableSwitchInsn(keyMin, keyMax, defaultBranch, newBranches: _*)
+ } else {
+ jmethod.visitLookupSwitchInsn(defaultBranch, keys, branches)
+ }
+ }
+
+ // internal helpers -- not part of the public API of `jcode`
+ // don't make private otherwise inlining will suffer
+
+ def emitVarInsn(opc: Int, idx: Int, tk: TypeKind) {
+ assert((opc == Opcodes.ILOAD) || (opc == Opcodes.ISTORE), opc)
+ jmethod.visitVarInsn(javaType(tk).getOpcode(opc), idx)
+ }
+
+ // ---------------- array load and store ----------------
+
+ val aloadOpcodes = { import Opcodes._; Array(AALOAD, BALOAD, SALOAD, CALOAD, IALOAD, LALOAD, FALOAD, DALOAD) }
+ val astoreOpcodes = { import Opcodes._; Array(AASTORE, BASTORE, SASTORE, CASTORE, IASTORE, LASTORE, FASTORE, DASTORE) }
+
+ val returnOpcodes = { import Opcodes._; Array(ARETURN, IRETURN, IRETURN, IRETURN, IRETURN, LRETURN, FRETURN, DRETURN) }
+
+ def emitTypeBased(opcs: Array[Int], tk: TypeKind) {
+ assert(tk != UNIT, tk)
+ val opc = {
+ if(tk.isRefOrArrayType) { opcs(0) }
+ else if(tk.isIntSizedType) {
+ (tk: @unchecked) match {
+ case BOOL | BYTE => opcs(1)
+ case SHORT => opcs(2)
+ case CHAR => opcs(3)
+ case INT => opcs(4)
+ }
+ } else {
+ (tk: @unchecked) match {
+ case LONG => opcs(5)
+ case FLOAT => opcs(6)
+ case DOUBLE => opcs(7)
+ }
+ }
+ }
+ jmethod.visitInsn(opc)
+ }
+
+ // ---------------- primitive operations ----------------
+
+ val negOpcodes: Array[Int] = { import Opcodes._; Array(INEG, LNEG, FNEG, DNEG) }
+ val addOpcodes: Array[Int] = { import Opcodes._; Array(IADD, LADD, FADD, DADD) }
+ val subOpcodes: Array[Int] = { import Opcodes._; Array(ISUB, LSUB, FSUB, DSUB) }
+ val mulOpcodes: Array[Int] = { import Opcodes._; Array(IMUL, LMUL, FMUL, DMUL) }
+ val divOpcodes: Array[Int] = { import Opcodes._; Array(IDIV, LDIV, FDIV, DDIV) }
+ val remOpcodes: Array[Int] = { import Opcodes._; Array(IREM, LREM, FREM, DREM) }
+
+ def emitPrimitive(opcs: Array[Int], tk: TypeKind) {
+ val opc = {
+ if(tk.isIntSizedType) { opcs(0) }
+ else {
+ (tk: @unchecked) match {
+ case LONG => opcs(1)
+ case FLOAT => opcs(2)
+ case DOUBLE => opcs(3)
+ }
+ }
+ }
+ jmethod.visitInsn(opc)
+ }
+
+ }
+
+ /** Invoked from genMethod() and addStaticInit() */
+ def genCode(m: IMethod,
+ emitVars: Boolean, // this param name hides the instance-level var
+ isStatic: Boolean) {
+
+
+ newNormal.normalize(m)
+
+ // ------------------------------------------------------------------------------------------------------------
+ // Part 1 of genCode(): setting up one-to-one correspondence between ASM Labels and BasicBlocks `linearization`
+ // ------------------------------------------------------------------------------------------------------------
+
+ val linearization: List[BasicBlock] = linearizer.linearize(m)
+ if(linearization.isEmpty) { return }
+
+ var isModuleInitialized = false
+
+ val labels: scala.collection.Map[BasicBlock, asm.Label] = mutable.HashMap(linearization map (_ -> new asm.Label()) : _*)
+
+ val onePastLast = new asm.Label // token for the mythical instruction past the last instruction in the method being emitted
+
+ // maps a BasicBlock b to the Label that corresponds to b's successor in the linearization. The last BasicBlock is mapped to the onePastLast label.
+ val linNext: scala.collection.Map[BasicBlock, asm.Label] = {
+ val result = mutable.HashMap.empty[BasicBlock, asm.Label]
+ var rest = linearization
+ var prev = rest.head
+ rest = rest.tail
+ while(!rest.isEmpty) {
+ result += (prev -> labels(rest.head))
+ prev = rest.head
+ rest = rest.tail
+ }
+ assert(!result.contains(prev))
+ result += (prev -> onePastLast)
+
+ result
+ }
+
+ // ------------------------------------------------------------------------------------------------------------
+ // Part 2 of genCode(): demarcating exception handler boundaries (visitTryCatchBlock() must be invoked before visitLabel() in genBlock())
+ // ------------------------------------------------------------------------------------------------------------
+
+ /**Generate exception handlers for the current method.
+ *
+ * Quoting from the JVMS 4.7.3 The Code Attribute
+ * The items of the Code_attribute structure are as follows:
+ * . . .
+ * exception_table[]
+ * Each entry in the exception_table array describes one
+ * exception handler in the code array. The order of the handlers in
+ * the exception_table array is significant.
+ * Each exception_table entry contains the following four items:
+ * start_pc, end_pc:
+ * ... The value of end_pc either must be a valid index into
+ * the code array of the opcode of an instruction or must be equal to code_length,
+ * the length of the code array.
+ * handler_pc:
+ * The value of the handler_pc item indicates the start of the exception handler
+ * catch_type:
+ * ... If the value of the catch_type item is zero,
+ * this exception handler is called for all exceptions.
+ * This is used to implement finally
+ */
+ def genExceptionHandlers() {
+
+ /** Return a list of pairs of intervals where the handler is active.
+ * Each interval is closed on both ends, ie. inclusive both in the left and right endpoints: [start, end].
+ * Preconditions:
+ * - e.covered non-empty
+ * Postconditions for the result:
+ * - always non-empty
+ * - intervals are sorted as per `linearization`
+ * - the argument's `covered` blocks have been grouped into maximally contiguous intervals,
+ * ie. between any two intervals in the result there is a non-empty gap.
+ * - each of the `covered` blocks in the argument is contained in some interval in the result
+ */
+ def intervals(e: ExceptionHandler): List[BlockInteval] = {
+ assert(e.covered.nonEmpty, e)
+ var result: List[BlockInteval] = Nil
+ var rest = linearization
+
+ // find intervals
+ while(!rest.isEmpty) {
+ // find interval start
+ var start: BasicBlock = null
+ while(!rest.isEmpty && (start eq null)) {
+ if(e.covered(rest.head)) { start = rest.head }
+ rest = rest.tail
+ }
+ if(start ne null) {
+ // find interval end
+ var end = start // for the time being
+ while(!rest.isEmpty && (e.covered(rest.head))) {
+ end = rest.head
+ rest = rest.tail
+ }
+ result = BlockInteval(start, end) :: result
+ }
+ }
+
+ assert(result.nonEmpty, e)
+
+ result
+ }
+
+ /* TODO test/files/run/exceptions-2.scala displays an ExceptionHandler.covered that contains
+ * blocks not in the linearization (dead-code?). Is that well-formed or not?
+ * For now, we ignore those blocks (after all, that's what `genBlocks(linearization)` in effect does).
+ */
+ for (e <- this.method.exh) {
+ val ignore: Set[BasicBlock] = (e.covered filterNot { b => linearization contains b } )
+ // TODO someday assert(ignore.isEmpty, "an ExceptionHandler.covered contains blocks not in the linearization (dead-code?)")
+ if(ignore.nonEmpty) {
+ e.covered = e.covered filterNot ignore
+ }
+ }
+
+ // an ExceptionHandler lacking covered blocks doesn't get an entry in the Exceptions table.
+ // TODO in that case, ExceptionHandler.cls doesn't go through javaName(). What if cls is an inner class?
+ for (e <- this.method.exh ; if e.covered.nonEmpty ; p <- intervals(e)) {
+ debuglog("Adding exception handler " + e + "at block: " + e.startBlock + " for " + method +
+ " from: " + p.start + " to: " + p.end + " catching: " + e.cls);
+ val cls: String = if (e.cls == NoSymbol || e.cls == ThrowableClass) null
+ else javaName(e.cls)
+ jmethod.visitTryCatchBlock(labels(p.start), linNext(p.end), labels(e.startBlock), cls)
+ }
+ } // end of genCode()'s genExceptionHandlers()
+
+ if (m.exh.nonEmpty) { genExceptionHandlers() }
+
+ // ------------------------------------------------------------------------------------------------------------
+ // Part 3 of genCode(): "Infrastructure" to later emit debug info for local variables and method params (LocalVariablesTable bytecode attribute).
+ // ------------------------------------------------------------------------------------------------------------
+
+ case class LocVarEntry(local: Local, start: asm.Label, end: asm.Label) // start is inclusive while end exclusive.
+
+ case class Interval(lstart: asm.Label, lend: asm.Label) {
+ final def start = lstart.getOffset
+ final def end = lend.getOffset
+
+ def precedes(that: Interval): Boolean = { this.end < that.start }
+
+ def overlaps(that: Interval): Boolean = { !(this.precedes(that) || that.precedes(this)) }
+
+ def mergeWith(that: Interval): Interval = {
+ val newStart = if(this.start <= that.start) this.lstart else that.lstart;
+ val newEnd = if(this.end <= that.end) that.lend else this.lend;
+ Interval(newStart, newEnd)
+ }
+
+ def repOK: Boolean = { start <= end }
+
+ }
+
+ /** Track those instruction ranges where certain locals are in scope. Used to later emit the LocalVariableTable attribute (JVMS 4.7.13) */
+ object scoping {
+
+ private val pending = mutable.Map.empty[Local, mutable.Stack[Label]]
+ private var seen: List[LocVarEntry] = Nil
+
+ private def fuse(ranges: List[Interval], added: Interval): List[Interval] = {
+ assert(added.repOK, added)
+ if(ranges.isEmpty) { return List(added) }
+ // precond: ranges is sorted by increasing start
+ var fused: List[Interval] = Nil
+ var done = false
+ var rest = ranges
+ while(!done && rest.nonEmpty) {
+ val current = rest.head
+ assert(current.repOK, current)
+ rest = rest.tail
+ if(added precedes current) {
+ fused = fused ::: ( added :: current :: rest )
+ done = true
+ } else if(current overlaps added) {
+ fused = fused ::: ( added.mergeWith(current) :: rest )
+ done = true
+ }
+ }
+ if(!done) { fused = fused ::: List(added) }
+ assert(repOK(fused), fused)
+
+ fused
+ }
+
+ def pushScope(lv: Local, start: Label) {
+ val st = pending.getOrElseUpdate(lv, mutable.Stack.empty[Label])
+ st.push(start)
+ }
+ def popScope(lv: Local, end: Label, iPos: Position) {
+ pending.get(lv) match {
+ case Some(st) if st.nonEmpty =>
+ val start = st.pop()
+ seen ::= LocVarEntry(lv, start, end)
+ case _ =>
+ // TODO SI-6049 track down the cause for these.
+ debugwarn(s"$iPos: Visited SCOPE_EXIT before visiting corresponding SCOPE_ENTER. SI-6191")
+ }
+ }
+
+ def getMerged(): scala.collection.Map[Local, List[Interval]] = {
+ // TODO should but isn't: unbalanced start(s) of scope(s)
+ val shouldBeEmpty = pending filter { p => val Pair(k, st) = p; st.nonEmpty };
+ val merged = mutable.Map[Local, List[Interval]]()
+ def addToMerged(lv: Local, start: Label, end: Label) {
+ val intv = Interval(start, end)
+ merged(lv) = if (merged contains lv) fuse(merged(lv), intv) else intv :: Nil
+ }
+ for(LocVarEntry(lv, start, end) <- seen) { addToMerged(lv, start, end) }
+
+ /* for each var with unbalanced start(s) of scope(s):
+ (a) take the earliest start (among unbalanced and balanced starts)
+ (b) take the latest end (onePastLast if none available)
+ (c) merge the thus made-up interval
+ */
+ for(Pair(k, st) <- shouldBeEmpty) {
+ var start = st.toList.sortBy(_.getOffset).head
+ if(merged.isDefinedAt(k)) {
+ val balancedStart = merged(k).head.lstart
+ if(balancedStart.getOffset < start.getOffset) {
+ start = balancedStart;
+ }
+ }
+ val endOpt: Option[Label] = for(ranges <- merged.get(k)) yield ranges.last.lend;
+ val end = endOpt.getOrElse(onePastLast)
+ addToMerged(k, start, end)
+ }
+
+ merged
+ }
+
+ private def repOK(fused: List[Interval]): Boolean = {
+ fused match {
+ case Nil => true
+ case h :: Nil => h.repOK
+ case h :: n :: rest =>
+ h.repOK && h.precedes(n) && !h.overlaps(n) && repOK(n :: rest)
+ }
+ }
+
+ }
+
+ def genLocalVariableTable() {
+ // adding `this` and method params.
+ if (!isStatic) {
+ jmethod.visitLocalVariable("this", thisDescr, null, labels(m.startBlock), onePastLast, 0)
+ }
+ for(lv <- m.params) {
+ jmethod.visitLocalVariable(javaName(lv.sym), descriptor(lv.kind), null, labels(m.startBlock), onePastLast, indexOf(lv))
+ }
+ // adding non-param locals
+ var anonCounter = 0
+ var fltnd: List[Triple[String, Local, Interval]] = Nil
+ for(Pair(local, ranges) <- scoping.getMerged()) {
+ var name = javaName(local.sym)
+ if (name == null) {
+ anonCounter += 1;
+ name = "<anon" + anonCounter + ">"
+ }
+ for(intrvl <- ranges) {
+ fltnd ::= Triple(name, local, intrvl)
+ }
+ }
+ // quest for deterministic output that Map.toList doesn't provide (so that ant test.stability doesn't complain).
+ val srtd = fltnd.sortBy { kr =>
+ val Triple(name: String, local: Local, intrvl: Interval) = kr
+
+ Triple(intrvl.start, intrvl.end - intrvl.start, name) // ie sort by (start, length, name)
+ }
+
+ for(Triple(name, local, Interval(start, end)) <- srtd) {
+ jmethod.visitLocalVariable(name, descriptor(local.kind), null, start, end, indexOf(local))
+ }
+ // "There may be no more than one LocalVariableTable attribute per local variable in the Code attribute"
+ }
+
+ // ------------------------------------------------------------------------------------------------------------
+ // Part 4 of genCode(): Bookkeeping (to later emit debug info) of association between line-number and instruction position.
+ // ------------------------------------------------------------------------------------------------------------
+
+ case class LineNumberEntry(line: Int, start: asm.Label)
+ var lastLineNr: Int = -1
+ var lnEntries: List[LineNumberEntry] = Nil
+
+ // ------------------------------------------------------------------------------------------------------------
+ // Part 5 of genCode(): "Utilities" to emit code proper (most prominently: genBlock()).
+ // ------------------------------------------------------------------------------------------------------------
+
+ var nextBlock: BasicBlock = linearization.head
+
+ def genBlocks(l: List[BasicBlock]): Unit = l match {
+ case Nil => ()
+ case x :: Nil => nextBlock = null; genBlock(x)
+ case x :: y :: ys => nextBlock = y; genBlock(x); genBlocks(y :: ys)
+ }
+
+ def isAccessibleFrom(target: Symbol, site: Symbol): Boolean = {
+ target.isPublic || target.isProtected && {
+ (site.enclClass isSubClass target.enclClass) ||
+ (site.enclosingPackage == target.privateWithin)
+ }
+ } // end of genCode()'s isAccessibleFrom()
+
+ def genCallMethod(call: CALL_METHOD) {
+ val CALL_METHOD(method, style) = call
+ val siteSymbol = clasz.symbol
+ val hostSymbol = call.hostClass
+ val methodOwner = method.owner
+ // info calls so that types are up to date; erasure may add lateINTERFACE to traits
+ hostSymbol.info ; methodOwner.info
+
+ def needsInterfaceCall(sym: Symbol) = (
+ sym.isInterface
+ || sym.isJavaDefined && sym.isNonBottomSubClass(ClassfileAnnotationClass)
+ )
+ // whether to reference the type of the receiver or
+ // the type of the method owner
+ val useMethodOwner = (
+ style != Dynamic
+ || hostSymbol.isBottomClass
+ || methodOwner == ObjectClass
+ )
+ val receiver = if (useMethodOwner) methodOwner else hostSymbol
+ val jowner = javaName(receiver)
+ val jname = javaName(method)
+ val jtype = javaType(method).getDescriptor()
+
+ def dbg(invoke: String) {
+ debuglog("%s %s %s.%s:%s".format(invoke, receiver.accessString, jowner, jname, jtype))
+ }
+
+ def initModule() {
+ // we initialize the MODULE$ field immediately after the super ctor
+ if (isStaticModule(siteSymbol) && !isModuleInitialized &&
+ jMethodName == INSTANCE_CONSTRUCTOR_NAME &&
+ jname == INSTANCE_CONSTRUCTOR_NAME) {
+ isModuleInitialized = true
+ jmethod.visitVarInsn(asm.Opcodes.ALOAD, 0)
+ jmethod.visitFieldInsn(asm.Opcodes.PUTSTATIC, thisName, strMODULE_INSTANCE_FIELD, thisDescr)
+ }
+ }
+
+ style match {
+ case Static(true) => dbg("invokespecial"); jcode.invokespecial (jowner, jname, jtype)
+ case Static(false) => dbg("invokestatic"); jcode.invokestatic (jowner, jname, jtype)
+ case Dynamic if needsInterfaceCall(receiver) => dbg("invokinterface"); jcode.invokeinterface(jowner, jname, jtype)
+ case Dynamic => dbg("invokevirtual"); jcode.invokevirtual (jowner, jname, jtype)
+ case SuperCall(_) =>
+ dbg("invokespecial")
+ jcode.invokespecial(jowner, jname, jtype)
+ initModule()
+ }
+ } // end of genCode()'s genCallMethod()
+
+ def genBlock(b: BasicBlock) {
+ jmethod.visitLabel(labels(b))
+
+ debuglog("Generating code for block: " + b)
+
+ // val lastInstr = b.lastInstruction
+
+ for (instr <- b) {
+
+ if(instr.pos.isDefined) {
+ val iPos = instr.pos
+ val currentLineNr = iPos.line
+ val skip = (currentLineNr == lastLineNr) // if(iPos.isRange) iPos.sameRange(lastPos) else
+ if(!skip) {
+ lastLineNr = currentLineNr
+ val lineLab = new asm.Label
+ jmethod.visitLabel(lineLab)
+ lnEntries ::= LineNumberEntry(currentLineNr, lineLab)
+ }
+ }
+
+ genInstr(instr, b)
+
+ }
+
+ }
+
+ def genInstr(instr: Instruction, b: BasicBlock) {
+ import asm.Opcodes
+ (instr.category: @scala.annotation.switch) match {
+
+ case icodes.localsCat =>
+ def genLocalInstr() = (instr: @unchecked) match {
+ case THIS(_) => jmethod.visitVarInsn(Opcodes.ALOAD, 0)
+ case LOAD_LOCAL(local) => jcode.load(indexOf(local), local.kind)
+ case STORE_LOCAL(local) => jcode.store(indexOf(local), local.kind)
+ case STORE_THIS(_) =>
+ // this only works for impl classes because the self parameter comes first
+ // in the method signature. If that changes, this code has to be revisited.
+ jmethod.visitVarInsn(Opcodes.ASTORE, 0)
+
+ case SCOPE_ENTER(lv) =>
+ // locals removed by closelim (via CopyPropagation) may have left behind SCOPE_ENTER, SCOPE_EXIT that are to be ignored
+ val relevant = (!lv.sym.isSynthetic && m.locals.contains(lv))
+ if (relevant) { // TODO check: does GenICode emit SCOPE_ENTER, SCOPE_EXIT for synthetic vars?
+ // this label will have DEBUG bit set in its flags (ie ASM ignores it for dataflow purposes)
+ // similarly, these labels aren't tracked in the `labels` map.
+ val start = new asm.Label
+ jmethod.visitLabel(start)
+ scoping.pushScope(lv, start)
+ }
+
+ case SCOPE_EXIT(lv) =>
+ val relevant = (!lv.sym.isSynthetic && m.locals.contains(lv))
+ if (relevant) {
+ // this label will have DEBUG bit set in its flags (ie ASM ignores it for dataflow purposes)
+ // similarly, these labels aren't tracked in the `labels` map.
+ val end = new asm.Label
+ jmethod.visitLabel(end)
+ scoping.popScope(lv, end, instr.pos)
+ }
+ }
+ genLocalInstr
+
+ case icodes.stackCat =>
+ def genStackInstr() = (instr: @unchecked) match {
+
+ case LOAD_MODULE(module) =>
+ // assert(module.isModule, "Expected module: " + module)
+ debuglog("generating LOAD_MODULE for: " + module + " flags: " + Flags.flagsToString(module.flags));
+ if (clasz.symbol == module.moduleClass && jMethodName != nme.readResolve.toString) {
+ jmethod.visitVarInsn(Opcodes.ALOAD, 0)
+ } else {
+ jmethod.visitFieldInsn(
+ Opcodes.GETSTATIC,
+ javaName(module) /* + "$" */ ,
+ strMODULE_INSTANCE_FIELD,
+ descriptor(module))
+ }
+
+ case DROP(kind) => emit(if (kind.isWideType) Opcodes.POP2 else Opcodes.POP)
+
+ case DUP(kind) => emit(if (kind.isWideType) Opcodes.DUP2 else Opcodes.DUP)
+
+ case LOAD_EXCEPTION(_) => ()
+ }
+ genStackInstr
+
+ case icodes.constCat => genConstant(jmethod, instr.asInstanceOf[CONSTANT].constant)
+
+ case icodes.arilogCat => genPrimitive(instr.asInstanceOf[CALL_PRIMITIVE].primitive, instr.pos)
+
+ case icodes.castsCat =>
+ def genCastInstr() = (instr: @unchecked) match {
+
+ case IS_INSTANCE(tpe) =>
+ val jtyp: asm.Type =
+ tpe match {
+ case REFERENCE(cls) => asm.Type.getObjectType(javaName(cls))
+ case ARRAY(elem) => javaArrayType(javaType(elem))
+ case _ => abort("Unknown reference type in IS_INSTANCE: " + tpe)
+ }
+ jmethod.visitTypeInsn(Opcodes.INSTANCEOF, jtyp.getInternalName)
+
+ case CHECK_CAST(tpe) =>
+ tpe match {
+
+ case REFERENCE(cls) =>
+ if (cls != ObjectClass) { // No need to checkcast for Objects
+ jmethod.visitTypeInsn(Opcodes.CHECKCAST, javaName(cls))
+ }
+
+ case ARRAY(elem) =>
+ val iname = javaArrayType(javaType(elem)).getInternalName
+ jmethod.visitTypeInsn(Opcodes.CHECKCAST, iname)
+
+ case _ => abort("Unknown reference type in IS_INSTANCE: " + tpe)
+ }
+
+ }
+ genCastInstr
+
+ case icodes.objsCat =>
+ def genObjsInstr() = (instr: @unchecked) match {
+
+ case BOX(kind) =>
+ val MethodNameAndType(mname, mdesc) = jBoxTo(kind)
+ jcode.invokestatic(BoxesRunTime, mname, mdesc)
+
+ case UNBOX(kind) =>
+ val MethodNameAndType(mname, mdesc) = jUnboxTo(kind)
+ jcode.invokestatic(BoxesRunTime, mname, mdesc)
+
+ case NEW(REFERENCE(cls)) =>
+ val className = javaName(cls)
+ jmethod.visitTypeInsn(Opcodes.NEW, className)
+
+ case MONITOR_ENTER() => emit(Opcodes.MONITORENTER)
+ case MONITOR_EXIT() => emit(Opcodes.MONITOREXIT)
+ }
+ genObjsInstr
+
+ case icodes.fldsCat =>
+ def genFldsInstr() = (instr: @unchecked) match {
+
+ case lf @ LOAD_FIELD(field, isStatic) =>
+ var owner = javaName(lf.hostClass)
+ debuglog("LOAD_FIELD with owner: " + owner + " flags: " + Flags.flagsToString(field.owner.flags))
+ val fieldJName = javaName(field)
+ val fieldDescr = descriptor(field)
+ val opc = if (isStatic) Opcodes.GETSTATIC else Opcodes.GETFIELD
+ jmethod.visitFieldInsn(opc, owner, fieldJName, fieldDescr)
+
+ case STORE_FIELD(field, isStatic) =>
+ val owner = javaName(field.owner)
+ val fieldJName = javaName(field)
+ val fieldDescr = descriptor(field)
+ val opc = if (isStatic) Opcodes.PUTSTATIC else Opcodes.PUTFIELD
+ jmethod.visitFieldInsn(opc, owner, fieldJName, fieldDescr)
+
+ }
+ genFldsInstr
+
+ case icodes.mthdsCat =>
+ def genMethodsInstr() = (instr: @unchecked) match {
+
+ /** Special handling to access native Array.clone() */
+ case call @ CALL_METHOD(definitions.Array_clone, Dynamic) =>
+ val target: String = javaType(call.targetTypeKind).getInternalName
+ jcode.invokevirtual(target, "clone", mdesc_arrayClone)
+
+ case call @ CALL_METHOD(method, style) => genCallMethod(call)
+
+ }
+ genMethodsInstr
+
+ case icodes.arraysCat =>
+ def genArraysInstr() = (instr: @unchecked) match {
+ case LOAD_ARRAY_ITEM(kind) => jcode.aload(kind)
+ case STORE_ARRAY_ITEM(kind) => jcode.astore(kind)
+ case CREATE_ARRAY(elem, 1) => jcode newarray elem
+ case CREATE_ARRAY(elem, dims) => jmethod.visitMultiANewArrayInsn(descriptor(ArrayN(elem, dims)), dims)
+ }
+ genArraysInstr
+
+ case icodes.jumpsCat =>
+ def genJumpInstr() = (instr: @unchecked) match {
+
+ case sw @ SWITCH(tagss, branches) =>
+ assert(branches.length == tagss.length + 1, sw)
+ val flatSize = sw.flatTagsCount
+ val flatKeys = new Array[Int](flatSize)
+ val flatBranches = new Array[asm.Label](flatSize)
+
+ var restTagss = tagss
+ var restBranches = branches
+ var k = 0 // ranges over flatKeys and flatBranches
+ while (restTagss.nonEmpty) {
+ val currLabel = labels(restBranches.head)
+ for (cTag <- restTagss.head) {
+ flatKeys(k) = cTag;
+ flatBranches(k) = currLabel
+ k += 1
+ }
+ restTagss = restTagss.tail
+ restBranches = restBranches.tail
+ }
+ val defaultLabel = labels(restBranches.head)
+ assert(restBranches.tail.isEmpty)
+ debuglog("Emitting SWITCH:\ntags: " + tagss + "\nbranches: " + branches)
+ jcode.emitSWITCH(flatKeys, flatBranches, defaultLabel, MIN_SWITCH_DENSITY)
+
+ case JUMP(whereto) =>
+ if (nextBlock != whereto) {
+ jcode goTo labels(whereto)
+ } else if (m.exh.exists(eh => eh.covers(b))) {
+ // SI-6102: Determine whether eliding this JUMP results in an empty range being covered by some EH.
+ // If so, emit a NOP in place of the elided JUMP, to avoid "java.lang.ClassFormatError: Illegal exception table range"
+ val isSthgLeft = b.toList.exists {
+ case _: LOAD_EXCEPTION => false
+ case _: SCOPE_ENTER => false
+ case _: SCOPE_EXIT => false
+ case _: JUMP => false
+ case _ => true
+ }
+ if (!isSthgLeft) {
+ emit(asm.Opcodes.NOP)
+ }
+ }
+
+ case CJUMP(success, failure, cond, kind) =>
+ if (kind.isIntSizedType) { // BOOL, BYTE, CHAR, SHORT, or INT
+ if (nextBlock == success) {
+ jcode.emitIF_ICMP(cond.negate, labels(failure))
+ // .. and fall through to success label
+ } else {
+ jcode.emitIF_ICMP(cond, labels(success))
+ if (nextBlock != failure) { jcode goTo labels(failure) }
+ }
+ } else if (kind.isRefOrArrayType) { // REFERENCE(_) | ARRAY(_)
+ if (nextBlock == success) {
+ jcode.emitIF_ACMP(cond.negate, labels(failure))
+ // .. and fall through to success label
+ } else {
+ jcode.emitIF_ACMP(cond, labels(success))
+ if (nextBlock != failure) { jcode goTo labels(failure) }
+ }
+ } else {
+ (kind: @unchecked) match {
+ case LONG => emit(Opcodes.LCMP)
+ case FLOAT =>
+ if (cond == LT || cond == LE) emit(Opcodes.FCMPG)
+ else emit(Opcodes.FCMPL)
+ case DOUBLE =>
+ if (cond == LT || cond == LE) emit(Opcodes.DCMPG)
+ else emit(Opcodes.DCMPL)
+ }
+ if (nextBlock == success) {
+ jcode.emitIF(cond.negate, labels(failure))
+ // .. and fall through to success label
+ } else {
+ jcode.emitIF(cond, labels(success))
+ if (nextBlock != failure) { jcode goTo labels(failure) }
+ }
+ }
+
+ case CZJUMP(success, failure, cond, kind) =>
+ if (kind.isIntSizedType) { // BOOL, BYTE, CHAR, SHORT, or INT
+ if (nextBlock == success) {
+ jcode.emitIF(cond.negate, labels(failure))
+ } else {
+ jcode.emitIF(cond, labels(success))
+ if (nextBlock != failure) { jcode goTo labels(failure) }
+ }
+ } else if (kind.isRefOrArrayType) { // REFERENCE(_) | ARRAY(_)
+ val Success = success
+ val Failure = failure
+ // @unchecked because references aren't compared with GT, GE, LT, LE.
+ ((cond, nextBlock): @unchecked) match {
+ case (EQ, Success) => jcode emitIFNONNULL labels(failure)
+ case (NE, Failure) => jcode emitIFNONNULL labels(success)
+ case (EQ, Failure) => jcode emitIFNULL labels(success)
+ case (NE, Success) => jcode emitIFNULL labels(failure)
+ case (EQ, _) =>
+ jcode emitIFNULL labels(success)
+ jcode goTo labels(failure)
+ case (NE, _) =>
+ jcode emitIFNONNULL labels(success)
+ jcode goTo labels(failure)
+ }
+ } else {
+ (kind: @unchecked) match {
+ case LONG =>
+ emit(Opcodes.LCONST_0)
+ emit(Opcodes.LCMP)
+ case FLOAT =>
+ emit(Opcodes.FCONST_0)
+ if (cond == LT || cond == LE) emit(Opcodes.FCMPG)
+ else emit(Opcodes.FCMPL)
+ case DOUBLE =>
+ emit(Opcodes.DCONST_0)
+ if (cond == LT || cond == LE) emit(Opcodes.DCMPG)
+ else emit(Opcodes.DCMPL)
+ }
+ if (nextBlock == success) {
+ jcode.emitIF(cond.negate, labels(failure))
+ } else {
+ jcode.emitIF(cond, labels(success))
+ if (nextBlock != failure) { jcode goTo labels(failure) }
+ }
+ }
+
+ }
+ genJumpInstr
+
+ case icodes.retCat =>
+ def genRetInstr() = (instr: @unchecked) match {
+ case RETURN(kind) => jcode emitRETURN kind
+ case THROW(_) => emit(Opcodes.ATHROW)
+ }
+ genRetInstr
+ }
+ }
+
+ /**
+ * Emits one or more conversion instructions based on the types given as arguments.
+ *
+ * @param from The type of the value to be converted into another type.
+ * @param to The type the value will be converted into.
+ */
+ def emitT2T(from: TypeKind, to: TypeKind) {
+ assert(isNonUnitValueTK(from), from)
+ assert(isNonUnitValueTK(to), to)
+
+ def pickOne(opcs: Array[Int]) {
+ val chosen = (to: @unchecked) match {
+ case BYTE => opcs(0)
+ case SHORT => opcs(1)
+ case CHAR => opcs(2)
+ case INT => opcs(3)
+ case LONG => opcs(4)
+ case FLOAT => opcs(5)
+ case DOUBLE => opcs(6)
+ }
+ if(chosen != -1) { emit(chosen) }
+ }
+
+ if(from == to) { return }
+ if((from == BOOL) || (to == BOOL)) {
+ // the only conversion involving BOOL that is allowed is (BOOL -> BOOL)
+ throw new Error("inconvertible types : " + from.toString() + " -> " + to.toString())
+ }
+
+ if(from.isIntSizedType) { // BYTE, CHAR, SHORT, and INT. (we're done with BOOL already)
+
+ val fromByte = { import asm.Opcodes._; Array( -1, -1, I2C, -1, I2L, I2F, I2D) } // do nothing for (BYTE -> SHORT) and for (BYTE -> INT)
+ val fromChar = { import asm.Opcodes._; Array(I2B, I2S, -1, -1, I2L, I2F, I2D) } // for (CHAR -> INT) do nothing
+ val fromShort = { import asm.Opcodes._; Array(I2B, -1, I2C, -1, I2L, I2F, I2D) } // for (SHORT -> INT) do nothing
+ val fromInt = { import asm.Opcodes._; Array(I2B, I2S, I2C, -1, I2L, I2F, I2D) }
+
+ (from: @unchecked) match {
+ case BYTE => pickOne(fromByte)
+ case SHORT => pickOne(fromShort)
+ case CHAR => pickOne(fromChar)
+ case INT => pickOne(fromInt)
+ }
+
+ } else { // FLOAT, LONG, DOUBLE
+
+ (from: @unchecked) match {
+ case FLOAT =>
+ import asm.Opcodes.{ F2L, F2D, F2I }
+ (to: @unchecked) match {
+ case LONG => emit(F2L)
+ case DOUBLE => emit(F2D)
+ case _ => emit(F2I); emitT2T(INT, to)
+ }
+
+ case LONG =>
+ import asm.Opcodes.{ L2F, L2D, L2I }
+ (to: @unchecked) match {
+ case FLOAT => emit(L2F)
+ case DOUBLE => emit(L2D)
+ case _ => emit(L2I); emitT2T(INT, to)
+ }
+
+ case DOUBLE =>
+ import asm.Opcodes.{ D2L, D2F, D2I }
+ (to: @unchecked) match {
+ case FLOAT => emit(D2F)
+ case LONG => emit(D2L)
+ case _ => emit(D2I); emitT2T(INT, to)
+ }
+ }
+ }
+ } // end of genCode()'s emitT2T()
+
+ def genPrimitive(primitive: Primitive, pos: Position) {
+
+ import asm.Opcodes;
+
+ primitive match {
+
+ case Negation(kind) => jcode.neg(kind)
+
+ case Arithmetic(op, kind) =>
+ def genArith() = {
+ op match {
+
+ case ADD => jcode.add(kind)
+ case SUB => jcode.sub(kind)
+ case MUL => jcode.mul(kind)
+ case DIV => jcode.div(kind)
+ case REM => jcode.rem(kind)
+
+ case NOT =>
+ if(kind.isIntSizedType) {
+ emit(Opcodes.ICONST_M1)
+ emit(Opcodes.IXOR)
+ } else if(kind == LONG) {
+ jmethod.visitLdcInsn(new java.lang.Long(-1))
+ jmethod.visitInsn(Opcodes.LXOR)
+ } else {
+ abort("Impossible to negate an " + kind)
+ }
+
+ case _ =>
+ abort("Unknown arithmetic primitive " + primitive)
+ }
+ }
+ genArith
+
+ // TODO Logical's 2nd elem should be declared ValueTypeKind, to better approximate its allowed values (isIntSized, its comments appears to convey)
+ // TODO GenICode uses `toTypeKind` to define that elem, `toValueTypeKind` would be needed instead.
+ // TODO How about adding some asserts to Logical and similar ones to capture the remaining constraint (UNIT not allowed).
+ case Logical(op, kind) =>
+ def genLogical() = op match {
+ case AND =>
+ kind match {
+ case LONG => emit(Opcodes.LAND)
+ case INT => emit(Opcodes.IAND)
+ case _ =>
+ emit(Opcodes.IAND)
+ if (kind != BOOL) { emitT2T(INT, kind) }
+ }
+ case OR =>
+ kind match {
+ case LONG => emit(Opcodes.LOR)
+ case INT => emit(Opcodes.IOR)
+ case _ =>
+ emit(Opcodes.IOR)
+ if (kind != BOOL) { emitT2T(INT, kind) }
+ }
+ case XOR =>
+ kind match {
+ case LONG => emit(Opcodes.LXOR)
+ case INT => emit(Opcodes.IXOR)
+ case _ =>
+ emit(Opcodes.IXOR)
+ if (kind != BOOL) { emitT2T(INT, kind) }
+ }
+ }
+ genLogical
+
+ case Shift(op, kind) =>
+ def genShift() = op match {
+ case LSL =>
+ kind match {
+ case LONG => emit(Opcodes.LSHL)
+ case INT => emit(Opcodes.ISHL)
+ case _ =>
+ emit(Opcodes.ISHL)
+ emitT2T(INT, kind)
+ }
+ case ASR =>
+ kind match {
+ case LONG => emit(Opcodes.LSHR)
+ case INT => emit(Opcodes.ISHR)
+ case _ =>
+ emit(Opcodes.ISHR)
+ emitT2T(INT, kind)
+ }
+ case LSR =>
+ kind match {
+ case LONG => emit(Opcodes.LUSHR)
+ case INT => emit(Opcodes.IUSHR)
+ case _ =>
+ emit(Opcodes.IUSHR)
+ emitT2T(INT, kind)
+ }
+ }
+ genShift
+
+ case Comparison(op, kind) =>
+ def genCompare() = op match {
+ case CMP =>
+ (kind: @unchecked) match {
+ case LONG => emit(Opcodes.LCMP)
+ }
+ case CMPL =>
+ (kind: @unchecked) match {
+ case FLOAT => emit(Opcodes.FCMPL)
+ case DOUBLE => emit(Opcodes.DCMPL)
+ }
+ case CMPG =>
+ (kind: @unchecked) match {
+ case FLOAT => emit(Opcodes.FCMPG)
+ case DOUBLE => emit(Opcodes.DCMPL) // TODO bug? why not DCMPG? http://docs.oracle.com/javase/specs/jvms/se5.0/html/Instructions2.doc3.html
+
+ }
+ }
+ genCompare
+
+ case Conversion(src, dst) =>
+ debuglog("Converting from: " + src + " to: " + dst)
+ if (dst == BOOL) { println("Illegal conversion at: " + clasz + " at: " + pos.source + ":" + pos.line) }
+ else { emitT2T(src, dst) }
+
+ case ArrayLength(_) => emit(Opcodes.ARRAYLENGTH)
+
+ case StartConcat =>
+ jmethod.visitTypeInsn(Opcodes.NEW, StringBuilderClassName)
+ jmethod.visitInsn(Opcodes.DUP)
+ jcode.invokespecial(
+ StringBuilderClassName,
+ INSTANCE_CONSTRUCTOR_NAME,
+ mdesc_arglessvoid
+ )
+
+ case StringConcat(el) =>
+ val jtype = el match {
+ case REFERENCE(_) | ARRAY(_) => JAVA_LANG_OBJECT
+ case _ => javaType(el)
+ }
+ jcode.invokevirtual(
+ StringBuilderClassName,
+ "append",
+ asm.Type.getMethodDescriptor(StringBuilderType, Array(jtype): _*)
+ )
+
+ case EndConcat =>
+ jcode.invokevirtual(StringBuilderClassName, "toString", mdesc_toString)
+
+ case _ => abort("Unimplemented primitive " + primitive)
+ }
+ } // end of genCode()'s genPrimitive()
+
+ // ------------------------------------------------------------------------------------------------------------
+ // Part 6 of genCode(): the executable part of genCode() starts here.
+ // ------------------------------------------------------------------------------------------------------------
+
+ genBlocks(linearization)
+
+ jmethod.visitLabel(onePastLast)
+
+ if(emitLines) {
+ for(LineNumberEntry(line, start) <- lnEntries.sortBy(_.start.getOffset)) { jmethod.visitLineNumber(line, start) }
+ }
+ if(emitVars) { genLocalVariableTable() }
+
+ } // end of BytecodeGenerator.genCode()
+
+
+ ////////////////////// local vars ///////////////////////
+
+ // def sizeOf(sym: Symbol): Int = sizeOf(toTypeKind(sym.tpe))
+
+ def sizeOf(k: TypeKind): Int = if(k.isWideType) 2 else 1
+
+ // def indexOf(m: IMethod, sym: Symbol): Int = {
+ // val Some(local) = m lookupLocal sym
+ // indexOf(local)
+ // }
+
+ final def indexOf(local: Local): Int = {
+ assert(local.index >= 0, "Invalid index for: " + local + "{" + local.## + "}: ")
+ local.index
+ }
+
+ /**
+ * Compute the indexes of each local variable of the given method.
+ * *Does not assume the parameters come first!*
+ */
+ def computeLocalVarsIndex(m: IMethod) {
+ var idx = if (m.symbol.isStaticMember) 0 else 1;
+
+ for (l <- m.params) {
+ debuglog("Index value for " + l + "{" + l.## + "}: " + idx)
+ l.index = idx
+ idx += sizeOf(l.kind)
+ }
+
+ for (l <- m.locals if !l.arg) {
+ debuglog("Index value for " + l + "{" + l.## + "}: " + idx)
+ l.index = idx
+ idx += sizeOf(l.kind)
+ }
+ }
+
+ } // end of class JPlainBuilder
+
+
+ /** builder of mirror classes */
+ class JMirrorBuilder(bytecodeWriter: BytecodeWriter) extends JCommonBuilder(bytecodeWriter) {
+
+ private var cunit: CompilationUnit = _
+ def getCurrentCUnit(): CompilationUnit = cunit;
+
+ /** Generate a mirror class for a top-level module. A mirror class is a class
+ * containing only static methods that forward to the corresponding method
+ * on the MODULE instance of the given Scala object. It will only be
+ * generated if there is no companion class: if there is, an attempt will
+ * instead be made to add the forwarder methods to the companion class.
+ */
+ def genMirrorClass(modsym: Symbol, cunit: CompilationUnit) {
+ assert(modsym.companionClass == NoSymbol, modsym)
+ innerClassBuffer.clear()
+ this.cunit = cunit
+ val moduleName = javaName(modsym) // + "$"
+ val mirrorName = moduleName.substring(0, moduleName.length() - 1)
+
+ val flags = (asm.Opcodes.ACC_SUPER | asm.Opcodes.ACC_PUBLIC | asm.Opcodes.ACC_FINAL)
+ val mirrorClass = createJClass(flags,
+ mirrorName,
+ null /* no java-generic-signature */,
+ JAVA_LANG_OBJECT.getInternalName,
+ EMPTY_STRING_ARRAY)
+
+ log("Dumping mirror class for '%s'".format(mirrorName))
+
+ // typestate: entering mode with valid call sequences:
+ // [ visitSource ] [ visitOuterClass ] ( visitAnnotation | visitAttribute )*
+
+ if(emitSource) {
+ mirrorClass.visitSource("" + cunit.source,
+ null /* SourceDebugExtension */)
+ }
+
+ val ssa = getAnnotPickle(mirrorName, modsym.companionSymbol)
+ mirrorClass.visitAttribute(if(ssa.isDefined) pickleMarkerLocal else pickleMarkerForeign)
+ emitAnnotations(mirrorClass, modsym.annotations ++ ssa)
+
+ // typestate: entering mode with valid call sequences:
+ // ( visitInnerClass | visitField | visitMethod )* visitEnd
+
+ addForwarders(isRemote(modsym), mirrorClass, mirrorName, modsym)
+
+ addInnerClasses(modsym, mirrorClass)
+ mirrorClass.visitEnd()
+ writeIfNotTooBig("" + modsym.name, mirrorName, mirrorClass, modsym)
+ }
+
+
+ } // end of class JMirrorBuilder
+
+
+ /** builder of bean info classes */
+ class JBeanInfoBuilder(bytecodeWriter: BytecodeWriter) extends JBuilder(bytecodeWriter) {
+
+ /**
+ * Generate a bean info class that describes the given class.
+ *
+ * @author Ross Judson (ross.judson at soletta.com)
+ */
+ def genBeanInfoClass(clasz: IClass) {
+
+ // val BeanInfoSkipAttr = definitions.getRequiredClass("scala.beans.BeanInfoSkip")
+ // val BeanDisplayNameAttr = definitions.getRequiredClass("scala.beans.BeanDisplayName")
+ // val BeanDescriptionAttr = definitions.getRequiredClass("scala.beans.BeanDescription")
+ // val description = c.symbol getAnnotation BeanDescriptionAttr
+ // informProgress(description.toString)
+ innerClassBuffer.clear()
+
+ val flags = mkFlags(
+ javaFlags(clasz.symbol),
+ if(isDeprecated(clasz.symbol)) asm.Opcodes.ACC_DEPRECATED else 0 // ASM pseudo access flag
+ )
+
+ val beanInfoName = (javaName(clasz.symbol) + "BeanInfo")
+ val beanInfoClass = createJClass(
+ flags,
+ beanInfoName,
+ null, // no java-generic-signature
+ "scala/beans/ScalaBeanInfo",
+ EMPTY_STRING_ARRAY
+ )
+
+ // beanInfoClass typestate: entering mode with valid call sequences:
+ // [ visitSource ] [ visitOuterClass ] ( visitAnnotation | visitAttribute )*
+
+ beanInfoClass.visitSource(
+ clasz.cunit.source.toString,
+ null /* SourceDebugExtension */
+ )
+
+ var fieldList = List[String]()
+
+ for (f <- clasz.fields if f.symbol.hasGetter;
+ g = f.symbol.getter(clasz.symbol);
+ s = f.symbol.setter(clasz.symbol);
+ if g.isPublic && !(f.symbol.name startsWith "$")
+ ) {
+ // inserting $outer breaks the bean
+ fieldList = javaName(f.symbol) :: javaName(g) :: (if (s != NoSymbol) javaName(s) else null) :: fieldList
+ }
+
+ val methodList: List[String] =
+ for (m <- clasz.methods
+ if !m.symbol.isConstructor &&
+ m.symbol.isPublic &&
+ !(m.symbol.name startsWith "$") &&
+ !m.symbol.isGetter &&
+ !m.symbol.isSetter)
+ yield javaName(m.symbol)
+
+ // beanInfoClass typestate: entering mode with valid call sequences:
+ // ( visitInnerClass | visitField | visitMethod )* visitEnd
+
+ val constructor = beanInfoClass.visitMethod(
+ asm.Opcodes.ACC_PUBLIC,
+ INSTANCE_CONSTRUCTOR_NAME,
+ mdesc_arglessvoid,
+ null, // no java-generic-signature
+ EMPTY_STRING_ARRAY // no throwable exceptions
+ )
+
+ // constructor typestate: entering mode with valid call sequences:
+ // [ visitAnnotationDefault ] ( visitAnnotation | visitParameterAnnotation | visitAttribute )*
+
+ val stringArrayJType: asm.Type = javaArrayType(JAVA_LANG_STRING)
+ val conJType: asm.Type =
+ asm.Type.getMethodType(
+ asm.Type.VOID_TYPE,
+ Array(javaType(ClassClass), stringArrayJType, stringArrayJType): _*
+ )
+
+ def push(lst: List[String]) {
+ var fi = 0
+ for (f <- lst) {
+ constructor.visitInsn(asm.Opcodes.DUP)
+ constructor.visitLdcInsn(new java.lang.Integer(fi))
+ if (f == null) { constructor.visitInsn(asm.Opcodes.ACONST_NULL) }
+ else { constructor.visitLdcInsn(f) }
+ constructor.visitInsn(JAVA_LANG_STRING.getOpcode(asm.Opcodes.IASTORE))
+ fi += 1
+ }
+ }
+
+ // constructor typestate: entering mode with valid call sequences:
+ // [ visitCode ( visitFrame | visitXInsn | visitLabel | visitTryCatchBlock | visitLocalVariable | visitLineNumber )* visitMaxs ] visitEnd
+
+ constructor.visitCode()
+
+ constructor.visitVarInsn(asm.Opcodes.ALOAD, 0)
+ // push the class
+ constructor.visitLdcInsn(javaType(clasz.symbol))
+
+ // push the string array of field information
+ constructor.visitLdcInsn(new java.lang.Integer(fieldList.length))
+ constructor.visitTypeInsn(asm.Opcodes.ANEWARRAY, JAVA_LANG_STRING.getInternalName)
+ push(fieldList)
+
+ // push the string array of method information
+ constructor.visitLdcInsn(new java.lang.Integer(methodList.length))
+ constructor.visitTypeInsn(asm.Opcodes.ANEWARRAY, JAVA_LANG_STRING.getInternalName)
+ push(methodList)
+
+ // invoke the superclass constructor, which will do the
+ // necessary java reflection and create Method objects.
+ constructor.visitMethodInsn(asm.Opcodes.INVOKESPECIAL, "scala/beans/ScalaBeanInfo", INSTANCE_CONSTRUCTOR_NAME, conJType.getDescriptor)
+ constructor.visitInsn(asm.Opcodes.RETURN)
+
+ constructor.visitMaxs(0, 0) // just to follow protocol, dummy arguments
+ constructor.visitEnd()
+
+ addInnerClasses(clasz.symbol, beanInfoClass)
+ beanInfoClass.visitEnd()
+
+ writeIfNotTooBig("BeanInfo ", beanInfoName, beanInfoClass, clasz.symbol)
+ }
+
+ } // end of class JBeanInfoBuilder
+
+ /** A namespace for utilities to normalize the code of an IMethod, over and beyond what IMethod.normalize() strives for.
+ * In particualr, IMethod.normalize() doesn't collapseJumpChains().
+ *
+ * TODO Eventually, these utilities should be moved to IMethod and reused from normalize() (there's nothing JVM-specific about them).
+ */
+ object newNormal {
+
+ def startsWithJump(b: BasicBlock): Boolean = { assert(b.nonEmpty, "empty block"); b.firstInstruction.isInstanceOf[JUMP] }
+
+ /** Prune from an exception handler those covered blocks which are jump-only. */
+ private def coverWhatCountsOnly(m: IMethod): Boolean = {
+ assert(m.hasCode, "code-less method")
+
+ var wasReduced = false
+ for(h <- m.exh) {
+ val shouldntCover = (h.covered filter startsWithJump)
+ if(shouldntCover.nonEmpty) {
+ wasReduced = true
+ h.covered --= shouldntCover // not removing any block on purpose.
+ }
+ }
+
+ wasReduced
+ }
+
+ /** An exception handler is pruned provided any of the following holds:
+ * (1) it covers nothing (for example, this may result after removing unreachable blocks)
+ * (2) each block it covers is of the form: JUMP(_)
+ * Return true iff one or more ExceptionHandlers were removed.
+ *
+ * A caveat: removing an exception handler, for whatever reason, means that its handler code (even if unreachable)
+ * won't be able to cause a class-loading-exception. As a result, behavior can be different.
+ */
+ private def elimNonCoveringExh(m: IMethod): Boolean = {
+ assert(m.hasCode, "code-less method")
+
+ def isRedundant(eh: ExceptionHandler): Boolean = {
+ (eh.cls != NoSymbol) && ( // TODO `eh.isFinallyBlock` more readable than `eh.cls != NoSymbol`
+ eh.covered.isEmpty
+ || (eh.covered forall startsWithJump)
+ )
+ }
+
+ var wasReduced = false
+ val toPrune = (m.exh.toSet filter isRedundant)
+ if(toPrune.nonEmpty) {
+ wasReduced = true
+ for(h <- toPrune; r <- h.blocks) { m.code.removeBlock(r) } // TODO m.code.removeExh(h)
+ m.exh = (m.exh filterNot toPrune)
+ }
+
+ wasReduced
+ }
+
+ private def isJumpOnly(b: BasicBlock): Option[BasicBlock] = {
+ b.toList match {
+ case JUMP(whereto) :: rest =>
+ assert(rest.isEmpty, "A block contains instructions after JUMP (looks like enterIgnoreMode() was itself ignored.)")
+ Some(whereto)
+ case _ => None
+ }
+ }
+
+ private def directSuccStar(b: BasicBlock): List[BasicBlock] = { directSuccStar(List(b)) }
+
+ /** Transitive closure of successors potentially reachable due to normal (non-exceptional) control flow.
+ Those BBs in the argument are also included in the result */
+ private def directSuccStar(starters: Traversable[BasicBlock]): List[BasicBlock] = {
+ val result = new mutable.ListBuffer[BasicBlock]
+ var toVisit: List[BasicBlock] = starters.toList.distinct
+ while(toVisit.nonEmpty) {
+ val h = toVisit.head
+ toVisit = toVisit.tail
+ result += h
+ for(p <- h.directSuccessors; if !result.contains(p) && !toVisit.contains(p)) { toVisit = p :: toVisit }
+ }
+ result.toList
+ }
+
+ /** Returns:
+ * for single-block self-loops, the pair (start, Nil)
+ * for other cycles, the pair (backedge-target, basic-blocks-in-the-cycle-except-backedge-target)
+ * otherwise a pair consisting of:
+ * (a) the endpoint of a (single or multi-hop) chain of JUMPs
+ * (such endpoint does not start with a JUMP and therefore is not part of the chain); and
+ * (b) the chain (ie blocks to be removed when collapsing the chain of jumps).
+ * Precondition: the BasicBlock given as argument starts with an unconditional JUMP.
+ */
+ private def finalDestination(start: BasicBlock): (BasicBlock, List[BasicBlock]) = {
+ assert(startsWithJump(start), "not the start of a (single or multi-hop) chain of JUMPs.")
+ var hops: List[BasicBlock] = Nil
+ var prev = start
+ var done = false
+ do {
+ done = isJumpOnly(prev) match {
+ case Some(dest) =>
+ if (dest == start) { return (start, hops) } // leave infinite-loops in place
+ hops ::= prev
+ if (hops.contains(dest)) {
+ // leave infinite-loops in place
+ return (dest, hops filterNot (dest eq _))
+ }
+ prev = dest;
+ false
+ case None => true
+ }
+ } while(!done)
+
+ (prev, hops)
+ }
+
+ /**
+ * Collapse a chain of "jump-only" blocks such as:
+ *
+ * JUMP b1;
+ * b1: JUMP b2;
+ * b2: JUMP ... etc.
+ *
+ * by re-wiring predecessors to target directly the "final destination".
+ * Even if covered by an exception handler, a "non-self-loop jump-only block" can always be removed.
+
+ * Returns true if any replacement was made, false otherwise.
+ *
+ * In more detail:
+ * Starting at each of the entry points (m.startBlock, the start block of each exception handler)
+ * rephrase those control-flow instructions targeting a jump-only block (which jumps to a final destination D) to target D.
+ * The blocks thus skipped are also removed from IMethod.blocks.
+ *
+ * Rationale for this normalization:
+ * test/files/run/private-inline.scala after -optimize is chock full of
+ * BasicBlocks containing just JUMP(whereTo), where no exception handler straddles them.
+ * They should be collapsed by IMethod.normalize() but aren't.
+ * That was fine in FJBG times when by the time the exception table was emitted,
+ * it already contained "anchored" labels (ie instruction offsets were known)
+ * and thus ranges with identical (start, end) (i.e, identical after GenJVM omitted the JUMPs in question)
+ * could be weeded out to avoid "java.lang.ClassFormatError: Illegal exception table range"
+ * Now that visitTryCatchBlock() must be called before Labels are resolved,
+ * this method gets rid of the BasicBlocks described above (to recap, consisting of just a JUMP).
+ */
+ private def collapseJumpOnlyBlocks(m: IMethod): Boolean = {
+ assert(m.hasCode, "code-less method")
+
+ /* "start" is relative in a cycle, but we call this helper with the "first" entry-point we found. */
+ def realTarget(jumpStart: BasicBlock): Map[BasicBlock, BasicBlock] = {
+ assert(startsWithJump(jumpStart), "not part of a jump-chain")
+ val Pair(dest, redundants) = finalDestination(jumpStart)
+ (for(skipOver <- redundants) yield Pair(skipOver, dest)).toMap
+ }
+
+ def rephraseGotos(detour: Map[BasicBlock, BasicBlock]) {
+ for(Pair(oldTarget, newTarget) <- detour.iterator) {
+ if(m.startBlock == oldTarget) {
+ m.code.startBlock = newTarget
+ }
+ for(eh <- m.exh; if eh.startBlock == oldTarget) {
+ eh.setStartBlock(newTarget)
+ }
+ for(b <- m.blocks; if !detour.isDefinedAt(b)) {
+ val idxLast = (b.size - 1)
+ b.lastInstruction match {
+ case JUMP(whereto) =>
+ if (whereto == oldTarget) {
+ b.replaceInstruction(idxLast, JUMP(newTarget))
+ }
+ case CJUMP(succ, fail, cond, kind) =>
+ if ((succ == oldTarget) || (fail == oldTarget)) {
+ b.replaceInstruction(idxLast, CJUMP(detour.getOrElse(succ, succ),
+ detour.getOrElse(fail, fail),
+ cond, kind))
+ }
+ case CZJUMP(succ, fail, cond, kind) =>
+ if ((succ == oldTarget) || (fail == oldTarget)) {
+ b.replaceInstruction(idxLast, CZJUMP(detour.getOrElse(succ, succ),
+ detour.getOrElse(fail, fail),
+ cond, kind))
+ }
+ case SWITCH(tags, labels) =>
+ if(labels exists (detour.isDefinedAt(_))) {
+ val newLabels = (labels map { lab => detour.getOrElse(lab, lab) })
+ b.replaceInstruction(idxLast, SWITCH(tags, newLabels))
+ }
+ case _ => ()
+ }
+ }
+ }
+ }
+
+ /* remove from all containers that may contain a reference to */
+ def elide(redu: BasicBlock) {
+ assert(m.startBlock != redu, "startBlock should have been re-wired by now")
+ m.code.removeBlock(redu);
+ }
+
+ var wasReduced = false
+ val entryPoints: List[BasicBlock] = m.startBlock :: (m.exh map (_.startBlock));
+
+ var elided = mutable.Set.empty[BasicBlock] // debug
+ var newTargets = mutable.Set.empty[BasicBlock] // debug
+
+ for (ep <- entryPoints) {
+ var reachable = directSuccStar(ep) // this list may contain blocks belonging to jump-chains that we'll skip over
+ while(reachable.nonEmpty) {
+ val h = reachable.head
+ reachable = reachable.tail
+ if(startsWithJump(h)) {
+ val detour = realTarget(h)
+ if(detour.nonEmpty) {
+ wasReduced = true
+ reachable = (reachable filterNot (detour.keySet.contains(_)))
+ rephraseGotos(detour)
+ detour.keySet foreach elide
+ elided ++= detour.keySet
+ newTargets ++= detour.values
+ }
+ }
+ }
+ }
+ assert(newTargets.intersect(elided).isEmpty, "contradiction: we just elided the final destionation of a jump-chain")
+
+ wasReduced
+ }
+
+ def normalize(m: IMethod) {
+ if(!m.hasCode) { return }
+ collapseJumpOnlyBlocks(m)
+ var wasReduced = false;
+ do {
+ wasReduced = false
+ // Prune from an exception handler those covered blocks which are jump-only.
+ wasReduced |= coverWhatCountsOnly(m); icodes.checkValid(m) // TODO should be unnecessary now that collapseJumpOnlyBlocks(m) is in place
+ // Prune exception handlers covering nothing.
+ wasReduced |= elimNonCoveringExh(m); icodes.checkValid(m)
+
+ // TODO see note in genExceptionHandlers about an ExceptionHandler.covered containing dead blocks (newNormal should remove them, but, where do those blocks come from?)
+ } while (wasReduced)
+
+ // TODO this would be a good time to remove synthetic local vars seeing no use, don't forget to call computeLocalVarsIndex() afterwards.
+ }
+
+ }
+
+}
diff --git a/src/compiler/scala/tools/nsc/backend/jvm/GenAndroid.scala b/src/compiler/scala/tools/nsc/backend/jvm/GenAndroid.scala
index 301dbd1..72b7e35 100644
--- a/src/compiler/scala/tools/nsc/backend/jvm/GenAndroid.scala
+++ b/src/compiler/scala/tools/nsc/backend/jvm/GenAndroid.scala
@@ -1,5 +1,5 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
+ * Copyright 2005-2013 LAMP/EPFL
* @author Stephane Micheloud
*/
@@ -23,25 +23,21 @@ trait GenAndroid {
* `Parcelable` interface must also have a static field called `CREATOR`,
* which is an object implementing the `Parcelable.Creator` interface.
*/
- private val fieldName = "CREATOR"
+ private val fieldName = newTermName("CREATOR")
- private lazy val AndroidParcelableInterface =
- try definitions.getClass("android.os.Parcelable")
- catch { case _: FatalError => NoSymbol }
-
- private lazy val AndroidCreatorClass =
- if (AndroidParcelableInterface == NoSymbol) NoSymbol
- else definitions.getClass("android.os.Parcelable$Creator")
+ private lazy val AndroidParcelableInterface = rootMirror.getClassIfDefined("android.os.Parcelable")
+ private lazy val AndroidCreatorClass = rootMirror.getClassIfDefined("android.os.Parcelable$Creator")
def isAndroidParcelableClass(sym: Symbol) =
(AndroidParcelableInterface != NoSymbol) &&
- (sym.info.parents contains AndroidParcelableInterface.tpe)
+ (sym.parentSymbols contains AndroidParcelableInterface)
def addCreatorCode(codegen: BytecodeGenerator, block: BasicBlock) {
import codegen._
- val fieldSymbol = clasz.symbol.newValue(NoPosition, newTermName(fieldName))
- .setFlag(Flags.STATIC | Flags.FINAL)
- .setInfo(AndroidCreatorClass.tpe)
+ val fieldSymbol = (
+ clasz.symbol.newValue(newTermName(fieldName), NoPosition, Flags.STATIC | Flags.FINAL)
+ setInfo AndroidCreatorClass.tpe
+ )
val methodSymbol = definitions.getMember(clasz.symbol.companionModule, fieldName)
clasz addField new IField(fieldSymbol)
block emit CALL_METHOD(methodSymbol, Static(false))
diff --git a/src/compiler/scala/tools/nsc/backend/jvm/GenJVM.scala b/src/compiler/scala/tools/nsc/backend/jvm/GenJVM.scala
index 4f4da18..36b294b 100644
--- a/src/compiler/scala/tools/nsc/backend/jvm/GenJVM.scala
+++ b/src/compiler/scala/tools/nsc/backend/jvm/GenJVM.scala
@@ -1,25 +1,24 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
+ * Copyright 2005-2013 LAMP/EPFL
* @author Iulian Dragos
*/
-
package scala.tools.nsc
package backend.jvm
-import java.io.{ DataOutputStream, OutputStream }
+import java.io.{ByteArrayOutputStream, DataOutputStream, OutputStream }
import java.nio.ByteBuffer
import scala.collection.{ mutable, immutable }
-import scala.reflect.generic.{ PickleFormat, PickleBuffer }
-import scala.tools.reflect.SigParser
-import scala.tools.nsc.io.{ AbstractFile, Path }
-import scala.tools.nsc.util.ScalaClassLoader
+import scala.reflect.internal.pickling.{ PickleFormat, PickleBuffer }
import scala.tools.nsc.symtab._
-import scala.tools.nsc.symtab.classfile.ClassfileConstants._
+import scala.reflect.internal.util.{ SourceFile, NoSourceFile }
+import scala.reflect.internal.ClassfileConstants._
import ch.epfl.lamp.fjbg._
import JAccessFlags._
import JObjectType.{ JAVA_LANG_STRING, JAVA_LANG_OBJECT }
import java.util.jar.{ JarEntry, JarOutputStream }
+import scala.tools.nsc.io.AbstractFile
+import scala.language.postfixOps
/** This class ...
*
@@ -27,39 +26,17 @@ import java.util.jar.{ JarEntry, JarOutputStream }
* @version 1.0
*
*/
-abstract class GenJVM extends SubComponent with GenJVMUtil with GenAndroid with BytecodeWriters {
+abstract class GenJVM extends SubComponent with GenJVMUtil with GenAndroid with BytecodeWriters with GenJVMASM {
import global._
import icodes._
import icodes.opcodes._
- import definitions.{
- NullClass, RuntimeNullClass, NothingClass, RuntimeNothingClass,
- AnyClass, ObjectClass, ThrowsClass, ThrowableClass, ClassfileAnnotationClass,
- SerializableClass, StringClass, ClassClass, FunctionClass,
- DeprecatedAttr, SerializableAttr, SerialVersionUIDAttr, VolatileAttr,
- TransientAttr, CloneableAttr, RemoteAttr
- }
+ import definitions._
val phaseName = "jvm"
/** Create a new phase */
override def newPhase(p: Phase): Phase = new JvmPhase(p)
- private def outputDirectory(sym: Symbol): AbstractFile = (
- settings.outputDirs.outputDirFor {
- atPhase(currentRun.flattenPhase.prev)(sym.sourceFile)
- }
- )
- private def getFile(base: AbstractFile, cls: JClass, suffix: String): AbstractFile = {
- var dir = base
- val pathParts = cls.getName().split("[./]").toList
- for (part <- pathParts.init) {
- dir = dir.subdirectoryNamed(part)
- }
- dir.fileNamed(pathParts.last + suffix)
- }
- private def getFile(sym: Symbol, cls: JClass, suffix: String): AbstractFile =
- getFile(outputDirectory(sym), cls, suffix)
-
/** JVM code generation phase
*/
class JvmPhase(prev: Phase) extends ICodePhase(prev) {
@@ -70,31 +47,64 @@ abstract class GenJVM extends SubComponent with GenJVMUtil with GenAndroid with
override def run() {
// we reinstantiate the bytecode generator at each run, to allow the GC
// to collect everything
- if (settings.debug.value) inform("[running phase " + name + " on icode]")
+ if (settings.debug.value)
+ inform("[running phase " + name + " on icode]")
+
if (settings.Xdce.value)
- for ((sym, cls) <- icodes.classes if inliner.isClosureClass(sym) && !deadCode.liveClosures(sym))
+ for ((sym, cls) <- icodes.classes if inliner.isClosureClass(sym) && !deadCode.liveClosures(sym)) {
+ log(s"Optimizer eliminated ${sym.fullNameString}")
icodes.classes -= sym
+ }
+
+ // For predictably ordered error messages.
+ val sortedClasses = classes.values.toList sortBy ("" + _.symbol.fullName)
+ val entryPoints = sortedClasses filter isJavaEntryPoint
val bytecodeWriter = settings.outputDirs.getSingleOutput match {
- case Some(f) if f hasExtension "jar" =>
- new DirectToJarfileWriter(f)
- case _ =>
- if (settings.Ygenjavap.isDefault) new ClassBytecodeWriter { }
+ case Some(f) if f hasExtension "jar" =>
+ // If no main class was specified, see if there's only one
+ // entry point among the classes going into the jar.
+ if (settings.mainClass.isDefault) {
+ entryPoints map (_.symbol fullName '.') match {
+ case Nil =>
+ log("No Main-Class designated or discovered.")
+ case name :: Nil =>
+ log("Unique entry point: setting Main-Class to " + name)
+ settings.mainClass.value = name
+ case names =>
+ log("No Main-Class due to multiple entry points:\n " + names.mkString("\n "))
+ }
+ }
+ else log("Main-Class was specified: " + settings.mainClass.value)
+
+ new DirectToJarfileWriter(f.file)
+
+ case _ =>
+ if (settings.Ygenjavap.isDefault) {
+ if(settings.Ydumpclasses.isDefault)
+ new ClassBytecodeWriter { }
+ else
+ new ClassBytecodeWriter with DumpBytecodeWriter { }
+ }
else new ClassBytecodeWriter with JavapBytecodeWriter { }
}
+
val codeGenerator = new BytecodeGenerator(bytecodeWriter)
- classes.values foreach (codeGenerator genClass _)
+ debuglog("Created new bytecode generator for " + classes.size + " classes.")
+
+ sortedClasses foreach { c =>
+ try codeGenerator.genClass(c)
+ catch {
+ case e: JCode.CodeSizeTooBigException =>
+ log("Skipped class %s because it has methods that are too long.".format(c))
+ }
+ }
+
bytecodeWriter.close()
classes.clear()
}
}
- /** Return the suffix of a class name */
- def moduleSuffix(sym: Symbol) =
- if (sym.hasModuleFlag && !sym.isMethod &&
- !sym.isImplClass && !sym.isJavaDefined) "$"
- else ""
-
var pickledBytes = 0 // statistics
/**
@@ -108,45 +118,91 @@ abstract class GenJVM extends SubComponent with GenJVMUtil with GenAndroid with
val MIN_SWITCH_DENSITY = 0.7
val INNER_CLASSES_FLAGS =
- (ACC_PUBLIC | ACC_PRIVATE | ACC_PROTECTED | ACC_STATIC | ACC_FINAL | ACC_INTERFACE | ACC_ABSTRACT)
+ (ACC_PUBLIC | ACC_PRIVATE | ACC_PROTECTED | ACC_STATIC | ACC_INTERFACE | ACC_ABSTRACT)
val PublicStatic = ACC_PUBLIC | ACC_STATIC
val PublicStaticFinal = ACC_PUBLIC | ACC_STATIC | ACC_FINAL
- val StringBuilderClassName = definitions.StringBuilderClass.fullName
+ val StringBuilderClassName = javaName(definitions.StringBuilderClass)
val BoxesRunTime = "scala.runtime.BoxesRunTime"
- val StringBuilderType = new JObjectType(StringBuilderClassName)
- val toStringType = new JMethodType(JAVA_LANG_STRING, JType.EMPTY_ARRAY)
+ val StringBuilderType = new JObjectType(StringBuilderClassName) // TODO use ASMType.getObjectType
+ val toStringType = new JMethodType(JAVA_LANG_STRING, JType.EMPTY_ARRAY) // TODO use ASMType.getMethodType
val arrayCloneType = new JMethodType(JAVA_LANG_OBJECT, JType.EMPTY_ARRAY)
val MethodTypeType = new JObjectType("java.dyn.MethodType")
val JavaLangClassType = new JObjectType("java.lang.Class")
val MethodHandleType = new JObjectType("java.dyn.MethodHandle")
// Scala attributes
- val BeanInfoAttr = definitions.getClass("scala.reflect.BeanInfo")
- val BeanInfoSkipAttr = definitions.getClass("scala.reflect.BeanInfoSkip")
- val BeanDisplayNameAttr = definitions.getClass("scala.reflect.BeanDisplayName")
- val BeanDescriptionAttr = definitions.getClass("scala.reflect.BeanDescription")
-
- lazy val CloneableClass = definitions.getClass("java.lang.Cloneable")
- lazy val RemoteInterface = definitions.getClass("java.rmi.Remote")
- lazy val RemoteException = definitions.getClass("java.rmi.RemoteException").tpe
+ val BeanInfoAttr = rootMirror.getRequiredClass("scala.beans.BeanInfo")
+ val BeanInfoSkipAttr = rootMirror.getRequiredClass("scala.beans.BeanInfoSkip")
+ val BeanDisplayNameAttr = rootMirror.getRequiredClass("scala.beans.BeanDisplayName")
+ val BeanDescriptionAttr = rootMirror.getRequiredClass("scala.beans.BeanDescription")
+
+ // Additional interface parents based on annotations and other cues
+ def newParentForAttr(attr: Symbol): Option[Symbol] = attr match {
+ case SerializableAttr => Some(SerializableClass)
+ case CloneableAttr => Some(JavaCloneableClass)
+ case RemoteAttr => Some(RemoteInterfaceClass)
+ case _ => None
+ }
val versionPickle = {
val vp = new PickleBuffer(new Array[Byte](16), -1, 0)
- assert(vp.writeIndex == 0)
+ assert(vp.writeIndex == 0, vp)
vp writeNat PickleFormat.MajorVersion
vp writeNat PickleFormat.MinorVersion
vp writeNat 0
vp
}
+ private def helperBoxTo(kind: ValueTypeKind): Tuple2[String, JMethodType] = {
+ val boxedType = definitions.boxedClass(kind.toType.typeSymbol)
+ val mtype = new JMethodType(javaType(boxedType), Array(javaType(kind)))
+
+ Pair("boxTo" + boxedType.decodedName, mtype)
+ }
+
+ private val jBoxTo: Map[TypeKind, Tuple2[String, JMethodType]] = Map(
+ BOOL -> helperBoxTo(BOOL) ,
+ BYTE -> helperBoxTo(BYTE) ,
+ CHAR -> helperBoxTo(CHAR) ,
+ SHORT -> helperBoxTo(SHORT) ,
+ INT -> helperBoxTo(INT) ,
+ LONG -> helperBoxTo(LONG) ,
+ FLOAT -> helperBoxTo(FLOAT) ,
+ DOUBLE -> helperBoxTo(DOUBLE)
+ )
+
+ private def helperUnboxTo(kind: ValueTypeKind): Tuple2[String, JMethodType] = {
+ val mtype = new JMethodType(javaType(kind), Array(JAVA_LANG_OBJECT))
+ val mname = "unboxTo" + kind.toType.typeSymbol.decodedName
+
+ Pair(mname, mtype)
+ }
+
+ private val jUnboxTo: Map[TypeKind, Tuple2[String, JMethodType]] = Map(
+ BOOL -> helperUnboxTo(BOOL) ,
+ BYTE -> helperUnboxTo(BYTE) ,
+ CHAR -> helperUnboxTo(CHAR) ,
+ SHORT -> helperUnboxTo(SHORT) ,
+ INT -> helperUnboxTo(INT) ,
+ LONG -> helperUnboxTo(LONG) ,
+ FLOAT -> helperUnboxTo(FLOAT) ,
+ DOUBLE -> helperUnboxTo(DOUBLE)
+ )
+
var clasz: IClass = _
var method: IMethod = _
var jclass: JClass = _
var jmethod: JMethod = _
-// var jcode: JExtendedCode = _
+ // var jcode: JExtendedCode = _
+
+ def isParcelableClass = isAndroidParcelableClass(clasz.symbol)
+ def isRemoteClass = clasz.symbol hasAnnotation RemoteAttr
+ def serialVUID = clasz.symbol getAnnotation SerialVersionUIDAttr collect {
+ case AnnotationInfo(_, Literal(const) :: _, _) => const.longValue
+ }
val fjbgContext = new FJBGContext(49, 0)
@@ -154,6 +210,15 @@ abstract class GenJVM extends SubComponent with GenJVMUtil with GenAndroid with
val emitLines = debugLevel >= 2
val emitVars = debugLevel >= 3
+ // bug had phase with wrong name; leaving enabled for brief pseudo deprecation
+ private val checkSignatures = (
+ (settings.check containsName phaseName)
+ || (settings.check.value contains "genjvm") && {
+ global.warning("This option will be removed: please use -Ycheck:%s, not -Ycheck:genjvm." format phaseName)
+ true
+ }
+ )
+
/** For given symbol return a symbol corresponding to a class that should be declared as inner class.
*
* For example:
@@ -168,7 +233,7 @@ abstract class GenJVM extends SubComponent with GenJVMUtil with GenAndroid with
private def innerClassSymbolFor(s: Symbol): Symbol =
if (s.isClass) s else if (s.isModule) s.moduleClass else NoSymbol
- override def javaName(sym: Symbol): String = {
+ override def javaName(sym: Symbol): String = { // TODO Miguel says: check whether a single pass over `icodes.classes` can populate `innerClassBuffer` faster.
/**
* Checks if given symbol corresponds to inner class/object and add it to innerClassBuffer
*
@@ -176,13 +241,16 @@ abstract class GenJVM extends SubComponent with GenJVMUtil with GenAndroid with
* of inner class all until root class.
*/
def collectInnerClass(s: Symbol): Unit = {
- // TODO: something atPhase(currentRun.flattenPhase.prev) which accounts for
+ // TODO: some beforeFlatten { ... } which accounts for
// being nested in parameterized classes (if we're going to selectively flatten.)
val x = innerClassSymbolFor(s)
- val isInner = x.isClass && !x.rawowner.isPackageClass
- if (isInner) {
- innerClassBuffer += x
- collectInnerClass(x.rawowner)
+ if(x ne NoSymbol) {
+ assert(x.isClass, "not an inner-class symbol")
+ val isInner = !x.rawowner.isPackageClass
+ if (isInner) {
+ innerClassBuffer += x
+ collectInnerClass(x.rawowner)
+ }
}
}
collectInnerClass(sym)
@@ -198,7 +266,7 @@ abstract class GenJVM extends SubComponent with GenJVMUtil with GenAndroid with
*/
def emitClass(jclass: JClass, sym: Symbol) {
addInnerClasses(jclass)
- writeClass("" + sym.name, jclass, sym)
+ writeClass("" + sym.name, jclass.getName(), toByteArray(jclass), sym)
}
/** Returns the ScalaSignature annotation if it must be added to this class,
@@ -222,7 +290,7 @@ abstract class GenJVM extends SubComponent with GenJVMUtil with GenAndroid with
*/
def scalaSignatureAddingMarker(jclass: JClass, sym: Symbol): Option[AnnotationInfo] =
currentRun.symData get sym match {
- case Some(pickle) if !jclass.getName().endsWith("$") =>
+ case Some(pickle) if !nme.isModuleName(newTermName(jclass.getName)) =>
val scalaAttr =
fjbgContext.JOtherAttribute(jclass, jclass, tpnme.ScalaSignatureATTR.toString,
versionPickle.bytes, versionPickle.writeIndex)
@@ -242,71 +310,57 @@ abstract class GenJVM extends SubComponent with GenJVMUtil with GenAndroid with
None
}
- var serialVUID: Option[Long] = None
- var isRemoteClass: Boolean = false
- var isParcelableClass = false
+ private var innerClassBuffer = mutable.LinkedHashSet[Symbol]()
- private val innerClassBuffer = new mutable.ListBuffer[Symbol]
-
- /** Drop redundant interfaces (ones which are implemented by some
- * other parent) from the immediate parents. This is important on
- * android because there is otherwise an interface explosion.
+ /** Drop redundant interfaces (ones which are implemented by some other parent) from the immediate parents.
+ * This is important on Android because there is otherwise an interface explosion.
*/
- private def minimizeInterfaces(interfaces: List[Symbol]): List[Symbol] = (
- interfaces filterNot (int1 =>
- interfaces exists (int2 =>
- (int1 ne int2) && (int2 isSubClass int1)
- )
- )
- )
+ private def minimizeInterfaces(interfaces: List[Symbol]): List[Symbol] = {
+ var rest = interfaces
+ var leaves = List.empty[Symbol]
+ while(!rest.isEmpty) {
+ val candidate = rest.head
+ val nonLeaf = leaves exists { lsym => lsym isSubClass candidate }
+ if(!nonLeaf) {
+ leaves = candidate :: (leaves filterNot { lsym => candidate isSubClass lsym })
+ }
+ rest = rest.tail
+ }
+
+ leaves
+ }
def genClass(c: IClass) {
clasz = c
innerClassBuffer.clear()
- var parents = c.symbol.info.parents
- var ifaces = JClass.NO_INTERFACES
val name = javaName(c.symbol)
- serialVUID = None
- isRemoteClass = false
- isParcelableClass = isAndroidParcelableClass(c.symbol)
-
- if (parents.isEmpty)
- parents = List(ObjectClass.tpe)
-
- for (annot <- c.symbol.annotations) annot match {
- case AnnotationInfo(tp, _, _) if tp.typeSymbol == SerializableAttr =>
- parents :+= SerializableClass.tpe
- case AnnotationInfo(tp, _, _) if tp.typeSymbol == CloneableAttr =>
- parents :+= CloneableClass.tpe
- case AnnotationInfo(tp, Literal(const) :: _, _) if tp.typeSymbol == SerialVersionUIDAttr =>
- serialVUID = Some(const.longValue)
- case AnnotationInfo(tp, _, _) if tp.typeSymbol == RemoteAttr =>
- parents :+= RemoteInterface.tpe
- isRemoteClass = true
- case _ =>
- }
- parents = parents.distinct
+ val ps = c.symbol.info.parents
+
+ val superClass: Symbol = if(ps.isEmpty) ObjectClass else ps.head.typeSymbol;
- if (parents.tail.nonEmpty)
- ifaces = mkArray(minimizeInterfaces(parents drop 1 map (_.typeSymbol)) map javaName)
+ val superInterfaces0: List[Symbol] = if(ps.isEmpty) Nil else c.symbol.mixinClasses;
+ val superInterfaces = superInterfaces0 ++ c.symbol.annotations.flatMap(ann => newParentForAttr(ann.symbol)) distinct
+
+ val ifaces =
+ if(superInterfaces.isEmpty) JClass.NO_INTERFACES
+ else mkArray(minimizeInterfaces(superInterfaces) map javaName)
jclass = fjbgContext.JClass(javaFlags(c.symbol),
name,
- javaName(parents(0).typeSymbol),
+ javaName(superClass),
ifaces,
c.cunit.source.toString)
- if (isStaticModule(c.symbol) || serialVUID != None || isParcelableClass ||
- clasz.bootstrapClass.isDefined) {
+ if (isStaticModule(c.symbol) || serialVUID != None || isParcelableClass) {
if (isStaticModule(c.symbol))
addModuleInstanceField
addStaticInit(jclass, c.lookupStaticCtor)
if (isTopLevelModule(c.symbol)) {
if (c.symbol.companionClass == NoSymbol)
- dumpMirrorClass(c.symbol, c.cunit.source.toString)
+ generateMirrorClass(c.symbol, c.cunit.source)
else
log("No mirror class for module with linked class: " +
c.symbol.fullName)
@@ -317,7 +371,7 @@ abstract class GenJVM extends SubComponent with GenJVMUtil with GenAndroid with
// it must be a top level class (name contains no $s)
def isCandidateForForwarders(sym: Symbol): Boolean =
- atPhase(currentRun.picklerPhase.next) {
+ afterPickler {
!(sym.name.toString contains '$') && sym.hasModuleFlag && !sym.isImplClass && !sym.isNestedClass
}
@@ -341,16 +395,12 @@ abstract class GenJVM extends SubComponent with GenJVMUtil with GenAndroid with
}
}
- if (clasz.bootstrapClass.isDefined)
- jclass setBootstrapClass clasz.bootstrapClass.get
-
clasz.fields foreach genField
clasz.methods foreach genMethod
val ssa = scalaSignatureAddingMarker(jclass, c.symbol)
addGenericSignature(jclass, c.symbol, c.symbol.owner)
addAnnotations(jclass, c.symbol.annotations ++ ssa)
-
addEnclosingMethodAttribute(jclass, c.symbol)
emitClass(jclass, c.symbol)
@@ -361,7 +411,7 @@ abstract class GenJVM extends SubComponent with GenJVMUtil with GenAndroid with
private def addEnclosingMethodAttribute(jclass: JClass, clazz: Symbol) {
val sym = clazz.originalEnclosingMethod
if (sym.isMethod) {
- log("enclosing method for %s is %s (in %s)".format(clazz, sym, sym.enclClass))
+ debuglog("enclosing method for %s is %s (in %s)".format(clazz, sym, sym.enclClass))
jclass addAttribute fjbgContext.JEnclosingMethodAttribute(
jclass,
javaName(sym.enclClass),
@@ -370,14 +420,14 @@ abstract class GenJVM extends SubComponent with GenJVMUtil with GenAndroid with
)
} else if (clazz.isAnonymousClass) {
val enclClass = clazz.rawowner
- assert(enclClass.isClass, "" + enclClass)
+ assert(enclClass.isClass, enclClass)
val sym = enclClass.primaryConstructor
if (sym == NoSymbol)
log("Ran out of room looking for an enclosing method for %s: no constructor here.".format(
enclClass, clazz)
)
else {
- log("enclosing method for %s is %s (in %s)".format(clazz, sym, enclClass))
+ debuglog("enclosing method for %s is %s (in %s)".format(clazz, sym, enclClass))
jclass addAttribute fjbgContext.JEnclosingMethodAttribute(
jclass,
javaName(enclClass),
@@ -388,25 +438,33 @@ abstract class GenJVM extends SubComponent with GenJVMUtil with GenAndroid with
}
}
+ private def toByteArray(jc: JClass): Array[Byte] = {
+ val bos = new java.io.ByteArrayOutputStream()
+ val dos = new java.io.DataOutputStream(bos)
+ jc.writeTo(dos)
+ dos.close()
+ bos.toByteArray
+ }
+
/**
* Generate a bean info class that describes the given class.
*
* @author Ross Judson (ross.judson at soletta.com)
*/
def genBeanInfoClass(c: IClass) {
- val description = c.symbol.annotations.find(_.atp.typeSymbol == BeanDescriptionAttr)
+ val description = c.symbol getAnnotation BeanDescriptionAttr
// informProgress(description.toString)
val beanInfoClass = fjbgContext.JClass(javaFlags(c.symbol),
javaName(c.symbol) + "BeanInfo",
- "scala/reflect/ScalaBeanInfo",
+ "scala/beans/ScalaBeanInfo",
JClass.NO_INTERFACES,
c.cunit.source.toString)
var fieldList = List[String]()
for (f <- clasz.fields if f.symbol.hasGetter;
- val g = f.symbol.getter(c.symbol);
- val s = f.symbol.setter(c.symbol);
+ g = f.symbol.getter(c.symbol);
+ s = f.symbol.setter(c.symbol);
if g.isPublic && !(f.symbol.name startsWith "$")) // inserting $outer breaks the bean
fieldList = javaName(f.symbol) :: javaName(g) :: (if (s != NoSymbol) javaName(s) else null) :: fieldList
val methodList =
@@ -441,7 +499,7 @@ abstract class GenJVM extends SubComponent with GenJVMUtil with GenAndroid with
// push the class
jcode emitPUSH javaType(c.symbol).asInstanceOf[JReferenceType]
- // push the the string array of field information
+ // push the string array of field information
jcode emitPUSH fieldList.length
jcode emitANEWARRAY strKind
push(fieldList)
@@ -453,11 +511,11 @@ abstract class GenJVM extends SubComponent with GenJVMUtil with GenAndroid with
// invoke the superclass constructor, which will do the
// necessary java reflection and create Method objects.
- jcode.emitINVOKESPECIAL("scala/reflect/ScalaBeanInfo", "<init>", conType)
+ jcode.emitINVOKESPECIAL("scala/beans/ScalaBeanInfo", "<init>", conType)
jcode.emitRETURN()
// write the bean information class file.
- writeClass("BeanInfo ", beanInfoClass, c.symbol)
+ writeClass("BeanInfo ", beanInfoClass.getName(), toByteArray(beanInfoClass), c.symbol)
}
/** Add the given 'throws' attributes to jmethod */
@@ -471,15 +529,14 @@ abstract class GenJVM extends SubComponent with GenJVMUtil with GenAndroid with
// put some random value; the actual number is determined at the end
buf putShort 0xbaba.toShort
- for (AnnotationInfo(tp, List(exc), _) <- excs.distinct if tp.typeSymbol == ThrowsClass) {
- val Literal(const) = exc
+ for (ThrownException(exc) <- excs.distinct) {
buf.putShort(
cpool.addClass(
- javaName(const.typeValue.typeSymbol)).shortValue)
+ javaName(exc)).shortValue)
nattr += 1
}
- assert(nattr > 0)
+ assert(nattr > 0, nattr)
buf.putShort(0, nattr.toShort)
addAttribute(jmethod, tpnme.ExceptionsATTR, buf)
}
@@ -488,8 +545,8 @@ abstract class GenJVM extends SubComponent with GenJVMUtil with GenAndroid with
* .initialize: if 'annot' is read from pickle, atp might be un-initialized
*/
private def shouldEmitAnnotation(annot: AnnotationInfo) =
- annot.atp.typeSymbol.initialize.isJavaDefined &&
- annot.atp.typeSymbol.isNonBottomSubClass(ClassfileAnnotationClass) &&
+ annot.symbol.initialize.isJavaDefined &&
+ annot.matches(ClassfileAnnotationClass) &&
annot.args.isEmpty
private def emitJavaAnnotations(cpool: JConstantPool, buf: ByteBuffer, annotations: List[AnnotationInfo]): Int = {
@@ -523,7 +580,7 @@ abstract class GenJVM extends SubComponent with GenJVMUtil with GenAndroid with
case StringTag =>
buf put 's'.toByte
buf putShort cpool.addUtf8(const.stringValue).toShort
- case ClassTag =>
+ case ClazzTag =>
buf put 'c'.toByte
buf putShort cpool.addUtf8(javaType(const.typeValue).getSignature()).toShort
case EnumTag =>
@@ -561,7 +618,7 @@ abstract class GenJVM extends SubComponent with GenJVMUtil with GenAndroid with
val AnnotationInfo(typ, args, assocs) = annotInfo
val jtype = javaType(typ)
buf putShort cpool.addUtf8(jtype.getSignature()).toShort
- assert(args.isEmpty, args.toString)
+ assert(args.isEmpty, args)
buf putShort assocs.length.toShort
for ((name, value) <- assocs) {
buf putShort cpool.addUtf8(name.toString).toShort
@@ -585,14 +642,6 @@ abstract class GenJVM extends SubComponent with GenJVMUtil with GenAndroid with
nannots
}
- /** Run the signature parser to catch bogus signatures.
- */
- def isValidSignature(sym: Symbol, sig: String) = (
- if (sym.isMethod) SigParser verifyMethod sig
- else if (sym.isTerm) SigParser verifyType sig
- else SigParser verifyClass sig
- )
-
// @M don't generate java generics sigs for (members of) implementation
// classes, as they are monomorphic (TODO: ok?)
private def needsGenericSignature(sym: Symbol) = !(
@@ -602,50 +651,38 @@ abstract class GenJVM extends SubComponent with GenJVMUtil with GenAndroid with
// without it. This is particularly bad because the availability of
// generic information could disappear as a consequence of a seemingly
// unrelated change.
- sym.isSynthetic
+ settings.Ynogenericsig.value
+ || sym.isArtifact
|| sym.isLiftedMethod
|| sym.isBridge
|| (sym.ownerChain exists (_.isImplClass))
)
def addGenericSignature(jmember: JMember, sym: Symbol, owner: Symbol) {
if (needsGenericSignature(sym)) {
- val memberTpe = atPhase(currentRun.erasurePhase)(owner.thisType.memberInfo(sym))
- // println("addGenericSignature sym: " + sym.fullName + " : " + memberTpe + " sym.info: " + sym.info)
- // println("addGenericSignature: "+ (sym.ownerChain map (x => (x.name, x.isImplClass))))
+ val memberTpe = beforeErasure(owner.thisType.memberInfo(sym))
+
erasure.javaSig(sym, memberTpe) foreach { sig =>
- /** Since we're using a sun internal class for signature validation,
- * we have to allow for it not existing or otherwise malfunctioning:
- * in which case we treat every signature as valid. Medium term we
- * should certainly write independent signature validation.
- */
- if (settings.Xverify.value && SigParser.isParserAvailable && !isValidSignature(sym, sig)) {
- clasz.cunit.warning(sym.pos,
- """|compiler bug: created invalid generic signature for %s in %s
- |signature: %s
- |if this is reproducible, please report bug at http://lampsvn.epfl.ch/trac/scala
- """.trim.stripMargin.format(sym, sym.owner.skipPackageObject.fullName, sig))
- return
- }
- if ((settings.check.value contains "genjvm")) {
- val normalizedTpe = atPhase(currentRun.erasurePhase)(erasure.prepareSigMap(memberTpe))
+ // This seems useful enough in the general case.
+ log(sig)
+ if (checkSignatures) {
+ val normalizedTpe = beforeErasure(erasure.prepareSigMap(memberTpe))
val bytecodeTpe = owner.thisType.memberInfo(sym)
- if (!sym.isType && !sym.isConstructor && !(erasure.erasure(normalizedTpe) =:= bytecodeTpe)) {
+ if (!sym.isType && !sym.isConstructor && !(erasure.erasure(sym)(normalizedTpe) =:= bytecodeTpe)) {
clasz.cunit.warning(sym.pos,
"""|compiler bug: created generic signature for %s in %s that does not conform to its erasure
|signature: %s
|original type: %s
|normalized type: %s
|erasure type: %s
- |if this is reproducible, please report bug at http://lampsvn.epfl.ch/trac/scala
+ |if this is reproducible, please report bug at https://issues.scala-lang.org/
""".trim.stripMargin.format(sym, sym.owner.skipPackageObject.fullName, sig, memberTpe, normalizedTpe, bytecodeTpe))
return
}
}
val index = jmember.getConstantPool.addUtf8(sig).toShort
if (opt.verboseDebug)
- atPhase(currentRun.erasurePhase) {
- println("add generic sig "+sym+":"+sym.info+" ==> "+sig+" @ "+index)
- }
+ beforeErasure(println("add generic sig "+sym+":"+sym.info+" ==> "+sig+" @ "+index))
+
val buf = ByteBuffer.allocate(2)
buf putShort index
addAttribute(jmember, tpnme.SignatureATTR, buf)
@@ -654,7 +691,7 @@ abstract class GenJVM extends SubComponent with GenJVMUtil with GenAndroid with
}
def addAnnotations(jmember: JMember, annotations: List[AnnotationInfo]) {
- if (annotations.exists(_.atp.typeSymbol == definitions.DeprecatedAttr)) {
+ if (annotations exists (_ matches definitions.DeprecatedAttr)) {
val attr = jmember.getContext().JOtherAttribute(
jmember.getJClass(), jmember, tpnme.DeprecatedATTR.toString,
new Array[Byte](0), 0)
@@ -708,72 +745,66 @@ abstract class GenJVM extends SubComponent with GenJVMUtil with GenAndroid with
null
else {
val outerName = javaName(innerSym.rawowner)
- if (isTopLevelModule(innerSym.rawowner)) outerName stripSuffix "$"
+ if (isTopLevelModule(innerSym.rawowner)) "" + nme.stripModuleSuffix(newTermName(outerName))
else outerName
}
}
+
def innerName(innerSym: Symbol): String =
if (innerSym.isAnonymousClass || innerSym.isAnonymousFunction)
null
else
- innerSym.rawname + moduleSuffix(innerSym)
+ innerSym.rawname + innerSym.moduleSuffix
// add inner classes which might not have been referenced yet
- atPhase(currentRun.erasurePhase.next) {
+ afterErasure {
for (sym <- List(clasz.symbol, clasz.symbol.linkedClassOfClass); m <- sym.info.decls.map(innerClassSymbolFor) if m.isClass)
innerClassBuffer += m
}
val allInners = innerClassBuffer.toList
if (allInners.nonEmpty) {
+ debuglog(clasz.symbol.fullName('.') + " contains " + allInners.size + " inner classes.")
val innerClassesAttr = jclass.getInnerClasses()
// sort them so inner classes succeed their enclosing class
// to satisfy the Eclipse Java compiler
- //for (innerSym <- innerClasses.toList sortBy (_.name.length)) {
- for (innerSym <- allInners.distinct sortBy (_.name.length)) {
- var flags = javaFlags(innerSym)
- if (innerSym.rawowner.hasModuleFlag)
- flags |= ACC_STATIC
-
- innerClassesAttr.addEntry(
- javaName(innerSym),
- outerName(innerSym),
- innerName(innerSym),
- flags & INNER_CLASSES_FLAGS
+ for (innerSym <- allInners sortBy (_.name.length)) {
+ val flags = {
+ val staticFlag = if (innerSym.rawowner.hasModuleFlag) ACC_STATIC else 0
+ (javaFlags(innerSym) | staticFlag) & INNER_CLASSES_FLAGS
+ }
+ val jname = javaName(innerSym)
+ val oname = outerName(innerSym)
+ val iname = innerName(innerSym)
+
+ // Mimicking javap inner class output
+ debuglog(
+ if (oname == null || iname == null) "//class " + jname
+ else "//%s=class %s of class %s".format(iname, jname, oname)
)
+
+ innerClassesAttr.addEntry(jname, oname, iname, flags)
}
}
}
def genField(f: IField) {
- if (settings.debug.value)
- log("Adding field: " + f.symbol.fullName)
+ debuglog("Adding field: " + f.symbol.fullName)
- val attributes = f.symbol.annotations.map(_.atp.typeSymbol).foldLeft(0) {
- case (res, TransientAttr) => res | ACC_TRANSIENT
- case (res, VolatileAttr) => res | ACC_VOLATILE
- case (res, _) => res
- }
-
- var flags = javaFlags(f.symbol)
- // Make sure ACC_FINAL is only on eager vals.
- if (f.symbol.isMutable) flags &= ~ACC_FINAL
- else flags |= ACC_FINAL
-
- val jfield =
- jclass.addNewField(flags | attributes,
- javaName(f.symbol),
- javaType(f.symbol.tpe))
+ val jfield = jclass.addNewField(
+ javaFieldFlags(f.symbol),
+ javaName(f.symbol),
+ javaType(f.symbol.tpe)
+ )
addGenericSignature(jfield, f.symbol, clasz.symbol)
addAnnotations(jfield, f.symbol.annotations)
}
def genMethod(m: IMethod) {
- if (m.symbol.isStaticConstructor) return
- if ((m.symbol.name == nme.getClass_) && m.params.isEmpty) return
+ if (m.symbol.isStaticConstructor || definitions.isGetClass(m.symbol)) return
- log("Generating method " + m.symbol.fullName)
+ debuglog("Generating method " + m.symbol.fullName)
method = m
endPC.clear
computeLocalVarsIndex(m)
@@ -810,7 +841,7 @@ abstract class GenJVM extends SubComponent with GenJVMUtil with GenAndroid with
if (outerField != NoSymbol) {
log("Adding fake local to represent outer 'this' for closure " + clasz)
val _this = new Local(
- method.symbol.newVariable(NoPosition, nme.FAKE_LOCAL_THIS), toTypeKind(outerField.tpe), false)
+ method.symbol.newVariable(nme.FAKE_LOCAL_THIS), toTypeKind(outerField.tpe), false)
m.locals = m.locals ::: List(_this)
computeLocalVarsIndex(m) // since we added a new local, we need to recompute indexes
@@ -823,8 +854,7 @@ abstract class GenJVM extends SubComponent with GenJVMUtil with GenAndroid with
}
for (local <- m.locals if ! m.params.contains(local)) {
- if (settings.debug.value)
- log("add local var: " + local)
+ debuglog("add local var: " + local)
jmethod.addNewLocalVariable(javaType(local.kind), javaName(local.sym))
}
@@ -834,41 +864,32 @@ abstract class GenJVM extends SubComponent with GenJVMUtil with GenAndroid with
}
addGenericSignature(jmethod, m.symbol, clasz.symbol)
- val (excs, others) = splitAnnotations(m.symbol.annotations, ThrowsClass)
+ val (excs, others) = m.symbol.annotations partition (_.symbol == ThrowsClass)
addExceptionsAttribute(jmethod, excs)
addAnnotations(jmethod, others)
addParamAnnotations(jmethod, m.params.map(_.sym.annotations))
- }
-
- private def addRemoteException(jmethod: JMethod, meth: Symbol) {
- def isRemoteThrows(ainfo: AnnotationInfo) = ainfo match {
- case AnnotationInfo(tp, List(arg), _) if tp.typeSymbol == ThrowsClass =>
- arg match {
- case Literal(Constant(tpe: Type)) if tpe.typeSymbol == RemoteException.typeSymbol => true
- case _ => false
- }
- case _ => false
- }
- if (isRemoteClass ||
- (meth.hasAnnotation(RemoteAttr) && jmethod.isPublic)) {
- val c = Constant(RemoteException)
- val ainfo = AnnotationInfo(ThrowsClass.tpe, List(Literal(c).setType(c.tpe)), List())
- if (!meth.annotations.exists(isRemoteThrows)) {
- meth addAnnotation ainfo
- }
+ // check for code size
+ try jmethod.freeze()
+ catch {
+ case e: JCode.CodeSizeTooBigException =>
+ clasz.cunit.error(m.symbol.pos, "Code size exceeds JVM limits: %d".format(e.codeSize))
+ throw e
}
}
-
- /** Return a pair of lists of annotations, first one containing all
- * annotations for the given symbol, and the rest.
+ /** Adds a @remote annotation, actual use unknown.
*/
- private def splitAnnotations(annotations: List[AnnotationInfo], annotSym: Symbol): (List[AnnotationInfo], List[AnnotationInfo]) = {
- annotations.partition { a => a match {
- case AnnotationInfo(tp, _, _) if tp.typeSymbol == annotSym => true
- case _ => false
- }}
+ private def addRemoteException(jmethod: JMethod, meth: Symbol) {
+ val needsAnnotation = (
+ (isRemoteClass || (meth hasAnnotation RemoteAttr) && jmethod.isPublic)
+ && !(meth.throwsAnnotations contains RemoteExceptionClass)
+ )
+ if (needsAnnotation) {
+ val c = Constant(RemoteExceptionClass.tpe)
+ val arg = Literal(c) setType c.tpe
+ meth.addAnnotation(appliedType(ThrowsClass, c.tpe), arg)
+ }
}
private def isClosureApply(sym: Symbol): Boolean = {
@@ -896,10 +917,8 @@ abstract class GenJVM extends SubComponent with GenJVMUtil with GenAndroid with
mopt match {
case Some(m) =>
- if (clasz.bootstrapClass.isDefined) legacyEmitBootstrapMethodInstall(clinit)
-
- val oldLastBlock = m.code.blocks.last
- val lastBlock = m.code.newBlock
+ val oldLastBlock = m.lastBlock
+ val lastBlock = m.newBlock()
oldLastBlock.replaceInstruction(oldLastBlock.length - 1, JUMP(lastBlock))
if (isStaticModule(clasz.symbol)) {
@@ -912,9 +931,7 @@ abstract class GenJVM extends SubComponent with GenJVMUtil with GenAndroid with
serialVUID foreach { value =>
import Flags._, definitions._
val fieldName = "serialVersionUID"
- val fieldSymbol = clasz.symbol.newValue(NoPosition, newTermName(fieldName))
- .setFlag(STATIC | FINAL)
- .setInfo(longType)
+ val fieldSymbol = clasz.symbol.newValue(newTermName(fieldName), NoPosition, STATIC | FINAL) setInfo LongClass.tpe
clasz addField new IField(fieldSymbol)
lastBlock emit CONSTANT(Constant(value))
lastBlock emit STORE_FIELD(fieldSymbol, true)
@@ -923,11 +940,6 @@ abstract class GenJVM extends SubComponent with GenJVMUtil with GenAndroid with
if (isParcelableClass)
addCreatorCode(BytecodeGenerator.this, lastBlock)
- if (clasz.bootstrapClass.isDefined) {
- // emit bootstrap method install
- //emitBootstrapMethodInstall(block)
- }
-
lastBlock emit RETURN(UNIT)
lastBlock.close
@@ -958,37 +970,26 @@ abstract class GenJVM extends SubComponent with GenJVMUtil with GenAndroid with
if (isParcelableClass)
legacyAddCreatorCode(BytecodeGenerator.this, clinit)
- if (clasz.bootstrapClass.isDefined)
- legacyEmitBootstrapMethodInstall(clinit)
-
clinit.emitRETURN()
}
- /** Emit code that installs a boostrap method for invoke dynamic. It
- * installs the default method, found in scala.runtime.DynamicDispatch.
- */
- def legacyEmitBootstrapMethodInstall(jcode: JExtendedCode) {
- jcode emitPUSH jclass.getType.asInstanceOf[JReferenceType]
- jcode emitPUSH new JObjectType("scala.runtime.DynamicDispatch")
- jcode emitPUSH "bootstrapInvokeDynamic"
- jcode.emitGETSTATIC("java.dyn.Linkage", "BOOTSTRAP_METHOD_TYPE", MethodTypeType)
- jcode.emitDUP
- jcode.emitINVOKESTATIC("scala.Console", "println", new JMethodType(JType.VOID, Array(JAVA_LANG_OBJECT)))
- jcode.emitINVOKESTATIC("java.dyn.MethodHandles", "findStatic",
- new JMethodType(MethodHandleType, Array(JavaLangClassType, JAVA_LANG_STRING, MethodTypeType)))
- jcode.emitINVOKESTATIC("java.dyn.Linkage", "registerBootstrapMethod",
- new JMethodType(JType.VOID, Array(JavaLangClassType, MethodHandleType)))
- }
-
/** Add a forwarder for method m */
- def addForwarder(jclass: JClass, module: Symbol, m: Symbol, accessFlags: Int) {
+ def addForwarder(jclass: JClass, module: Symbol, m: Symbol) {
val moduleName = javaName(module)
val methodInfo = module.thisType.memberInfo(m)
val paramJavaTypes = methodInfo.paramTypes map javaType
val paramNames = 0 until paramJavaTypes.length map ("x_" + _)
+ // TODO: evaluate the other flags we might be dropping on the floor here.
+ val flags = PublicStatic | (
+ if (m.isVarargsMethod) ACC_VARARGS else 0
+ )
+ /** Forwarders must not be marked final, as the JVM will not allow
+ * redefinition of a final static method, and we don't know what classes
+ * might be subclassing the companion class. See SI-4827.
+ */
val mirrorMethod = jclass.addNewMethod(
- accessFlags,
+ flags,
javaName(m),
javaType(methodInfo.resultType),
mkArray(paramJavaTypes),
@@ -1015,86 +1016,84 @@ abstract class GenJVM extends SubComponent with GenJVMUtil with GenAndroid with
if (!m.isDeferred)
addGenericSignature(mirrorMethod, m, module)
- val (throws, others) = splitAnnotations(m.annotations, ThrowsClass)
+ val (throws, others) = m.annotations partition (_.symbol == ThrowsClass)
addExceptionsAttribute(mirrorMethod, throws)
addAnnotations(mirrorMethod, others)
addParamAnnotations(mirrorMethod, m.info.params.map(_.annotations))
}
- /** Add forwarders for all methods defined in `module' that don't conflict
- * with methods in the companion class of `module'. A conflict arises when
+ /** Add forwarders for all methods defined in `module` that don't conflict
+ * with methods in the companion class of `module`. A conflict arises when
* a method with the same name is defined both in a class and its companion
* object: method signature is not taken into account.
*/
def addForwarders(jclass: JClass, moduleClass: Symbol) {
- assert(moduleClass.isModuleClass)
- if (settings.debug.value)
- log("Dumping mirror class for object: " + moduleClass)
+ assert(moduleClass.isModuleClass, moduleClass)
+ debuglog("Dumping mirror class for object: " + moduleClass)
val className = jclass.getName
val linkedClass = moduleClass.companionClass
val linkedModule = linkedClass.companionSymbol
+ lazy val conflictingNames: Set[Name] = {
+ linkedClass.info.members collect { case sym if sym.name.isTermName => sym.name } toSet
+ }
+ debuglog("Potentially conflicting names for forwarders: " + conflictingNames)
- /** If we use the usual algorithm for forwarders, we run into a problem if
- * an object extends its companion class. However, there is an out: since
- * all the forwarders are static, inheriting from the class is no problem
- * so long as the methods aren't final (the JVM will not allow redefinition
- * of a final static method.) Thus the following.
- */
- val isIncestuous = moduleClass.tpe <:< linkedClass.tpe
- val accessFlags = if (isIncestuous) PublicStatic else PublicStaticFinal
-
- /** There was a bit of a gordian logic knot here regarding forwarders.
- * All we really have to do is exclude certain categories of symbols and
- * then all matching names.
- */
- def memberNames(sym: Symbol) = sym.info.members map (_.name.toString) toSet
- lazy val membersInCommon =
- memberNames(linkedModule) intersect memberNames(linkedClass)
-
- /** Should method `m' get a forwarder in the mirror class? */
- def shouldForward(m: Symbol): Boolean = (
- m.owner != ObjectClass
- && m.isMethod
- && m.isPublic
- && !m.hasFlag(Flags.CASE | Flags.DEFERRED | Flags.SPECIALIZED | Flags.LIFTED)
- && !m.isConstructor
- && !m.isStaticMember
- && !membersInCommon(m.name.toString)
- )
-
- for (m <- moduleClass.info.nonPrivateMembers) {
- if (shouldForward(m)) {
+ for (m <- moduleClass.info.membersBasedOnFlags(ExcludedForwarderFlags, Flags.METHOD)) {
+ if (m.isType || m.isDeferred || (m.owner eq ObjectClass) || m.isConstructor)
+ debuglog("No forwarder for '%s' from %s to '%s'".format(m, className, moduleClass))
+ else if (conflictingNames(m.name))
+ log("No forwarder for " + m + " due to conflict with " + linkedClass.info.member(m.name))
+ else {
log("Adding static forwarder for '%s' from %s to '%s'".format(m, className, moduleClass))
- addForwarder(jclass, moduleClass, m, accessFlags)
- }
- else if (settings.debug.value) {
- log("No forwarder for '%s' from %s to '%s'".format(m, className, moduleClass))
+ addForwarder(jclass, moduleClass, m)
}
}
}
- /** Dump a mirror class for a top-level module. A mirror class is a class
+ /** Generate a mirror class for a top-level module. A mirror class is a class
* containing only static methods that forward to the corresponding method
* on the MODULE instance of the given Scala object. It will only be
* generated if there is no companion class: if there is, an attempt will
* instead be made to add the forwarder methods to the companion class.
*/
- def dumpMirrorClass(clasz: Symbol, sourceFile: String) {
+ def generateMirrorClass(clasz: Symbol, sourceFile: SourceFile) {
import JAccessFlags._
+ /* We need to save inner classes buffer and create a new one to make sure
+ * that we do confuse inner classes of the class we mirror with inner
+ * classes of the class we are mirroring. These two sets can be different
+ * as seen in this case:
+ *
+ * class A {
+ * class B
+ * def b: B = new B
+ * }
+ * object C extends A
+ *
+ * Here mirror class of C has a static forwarder for (inherited) method `b`
+ * therefore it refers to class `B` and needs InnerClasses entry. However,
+ * the real class for `C` (named `C$`) is empty and does not refer to `B`
+ * thus does not need InnerClasses entry it.
+ *
+ * NOTE: This logic has been refactored in GenASM and everything is
+ * implemented in a much cleaner way by having two separate buffers.
+ */
+ val savedInnerClasses = innerClassBuffer
+ innerClassBuffer = mutable.LinkedHashSet[Symbol]()
val moduleName = javaName(clasz) // + "$"
val mirrorName = moduleName.substring(0, moduleName.length() - 1)
val mirrorClass = fjbgContext.JClass(ACC_SUPER | ACC_PUBLIC | ACC_FINAL,
mirrorName,
JAVA_LANG_OBJECT.getName,
JClass.NO_INTERFACES,
- sourceFile)
+ "" + sourceFile)
log("Dumping mirror class for '%s'".format(mirrorClass.getName))
addForwarders(mirrorClass, clasz)
val ssa = scalaSignatureAddingMarker(mirrorClass, clasz.companionSymbol)
addAnnotations(mirrorClass, clasz.annotations ++ ssa)
emitClass(mirrorClass, clasz)
+ innerClassBuffer = savedInnerClasses
}
var linearization: List[BasicBlock] = Nil
@@ -1107,8 +1106,7 @@ abstract class GenJVM extends SubComponent with GenJVMUtil with GenAndroid with
val jcode = jmethod.getCode.asInstanceOf[JExtendedCode]
def makeLabels(bs: List[BasicBlock]) = {
- if (settings.debug.value)
- log("Making labels for: " + method)
+ debuglog("Making labels for: " + method)
mutable.HashMap(bs map (_ -> jcode.newLabel) : _*)
}
@@ -1117,8 +1115,6 @@ abstract class GenJVM extends SubComponent with GenJVMUtil with GenAndroid with
linearization = linearizer.linearize(m)
val labels = makeLabels(linearization)
- /** local variables whose scope appears in this block. */
- val varsInBlock: mutable.Set[Local] = new mutable.HashSet
var nextBlock: BasicBlock = linearization.head
@@ -1128,294 +1124,297 @@ abstract class GenJVM extends SubComponent with GenJVMUtil with GenAndroid with
case x :: y :: ys => nextBlock = y; genBlock(x); genBlocks(y :: ys)
}
- /** Generate exception handlers for the current method. */
- def genExceptionHandlers() {
+ /** Generate exception handlers for the current method. */
+ def genExceptionHandlers() {
- /** Return a list of pairs of intervals where the handler is active.
- * The intervals in the list have to be inclusive in the beginning and
- * exclusive in the end: [start, end).
- */
- def ranges(e: ExceptionHandler): List[(Int, Int)] = {
- var covered = e.covered
- var ranges: List[(Int, Int)] = Nil
- var start = -1
- var end = -1
-
- linearization foreach { b =>
- if (! (covered contains b) ) {
- if (start >= 0) { // we're inside a handler range
- end = labels(b).getAnchor()
- ranges ::= (start, end)
- start = -1
+ /** Return a list of pairs of intervals where the handler is active.
+ * The intervals in the list have to be inclusive in the beginning and
+ * exclusive in the end: [start, end).
+ */
+ def ranges(e: ExceptionHandler): List[(Int, Int)] = {
+ var covered = e.covered
+ var ranges: List[(Int, Int)] = Nil
+ var start = -1
+ var end = -1
+
+ linearization foreach { b =>
+ if (! (covered contains b) ) {
+ if (start >= 0) { // we're inside a handler range
+ end = labels(b).getAnchor()
+ ranges ::= ((start, end))
+ start = -1
+ }
+ } else {
+ if (start < 0) // we're not inside a handler range
+ start = labels(b).getAnchor()
+
+ end = endPC(b)
+ covered -= b
}
- } else {
- if (start < 0) // we're not inside a handler range
- start = labels(b).getAnchor()
+ }
- end = endPC(b)
- covered -= b
+ /* Add the last interval. Note that since the intervals are
+ * open-ended to the right, we have to give a number past the actual
+ * code!
+ */
+ if (start >= 0) {
+ ranges ::= ((start, jcode.getPC()))
}
- }
- /* Add the last interval. Note that since the intervals are
- * open-ended to the right, we have to give a number past the actual
- * code!
- */
- if (start >= 0) {
- ranges ::= (start, jcode.getPC())
+ if (!covered.isEmpty)
+ debuglog("Some covered blocks were not found in method: " + method +
+ " covered: " + covered + " not in " + linearization)
+ ranges
}
- if (!covered.isEmpty)
- if (settings.debug.value)
- log("Some covered blocks were not found in method: " + method +
- " covered: " + covered + " not in " + linearization)
- ranges
+ for (e <- this.method.exh ; p <- ranges(e).sortBy(_._1)) {
+ if (p._1 < p._2) {
+ debuglog("Adding exception handler " + e + "at block: " + e.startBlock + " for " + method +
+ " from: " + p._1 + " to: " + p._2 + " catching: " + e.cls);
+ val cls = if (e.cls == NoSymbol || e.cls == ThrowableClass) null
+ else javaName(e.cls)
+ jcode.addExceptionHandler(p._1, p._2,
+ labels(e.startBlock).getAnchor(),
+ cls)
+ } else
+ log("Empty exception range: " + p)
+ }
}
- for (e <- this.method.exh ; p <- ranges(e).sortBy(_._1)) {
- if (p._1 < p._2) {
- if (settings.debug.value)
- log("Adding exception handler " + e + "at block: " + e.startBlock + " for " + method +
- " from: " + p._1 + " to: " + p._2 + " catching: " + e.cls);
- val cls = if (e.cls == NoSymbol || e.cls == ThrowableClass) null
- else javaName(e.cls)
- jcode.addExceptionHandler(p._1, p._2,
- labels(e.startBlock).getAnchor(),
- cls)
- } else
- log("Empty exception range: " + p)
+ def isAccessibleFrom(target: Symbol, site: Symbol): Boolean = {
+ target.isPublic || target.isProtected && {
+ (site.enclClass isSubClass target.enclClass) ||
+ (site.enclosingPackage == target.privateWithin)
+ }
}
- }
- def genBlock(b: BasicBlock) {
- labels(b).anchorToNext()
+ def genCallMethod(call: CALL_METHOD) {
+ val CALL_METHOD(method, style) = call
+ val siteSymbol = clasz.symbol
+ val hostSymbol = call.hostClass
+ val methodOwner = method.owner
+ // info calls so that types are up to date; erasure may add lateINTERFACE to traits
+ hostSymbol.info ; methodOwner.info
+
+ def needsInterfaceCall(sym: Symbol) = (
+ sym.isInterface
+ || sym.isJavaDefined && sym.isNonBottomSubClass(ClassfileAnnotationClass)
+ )
+ // whether to reference the type of the receiver or
+ // the type of the method owner
+ val useMethodOwner = (
+ style != Dynamic
+ || hostSymbol.isBottomClass
+ || methodOwner == ObjectClass
+ )
+ val receiver = if (useMethodOwner) methodOwner else hostSymbol
+ val jowner = javaName(receiver)
+ val jname = javaName(method)
+ val jtype = javaType(method).asInstanceOf[JMethodType]
- if (settings.debug.value)
- log("Generating code for block: " + b + " at pc: " + labels(b).getAnchor())
- var lastMappedPC = 0
- var lastLineNr = 0
- var crtPC = 0
- varsInBlock.clear()
-
- for (instr <- b) {
- class CompilationException(msg: String) extends Exception(msg) {
- override def toString: String = {
- msg +
- "\nCurrent method: " + method +
- "\nCurrent block: " + b +
- "\nCurrent instruction: " + instr +
- "\n---------------------" +
- method.dump
- }
+ def dbg(invoke: String) {
+ debuglog("%s %s %s.%s:%s".format(invoke, receiver.accessString, jowner, jname, jtype))
}
- def assert(cond: Boolean, msg: String) =
- if (!cond) throw new CompilationException(msg)
- instr match {
- case THIS(clasz) =>
+ def initModule() {
+ // we initialize the MODULE$ field immediately after the super ctor
+ if (isStaticModule(siteSymbol) && !isModuleInitialized &&
+ jmethod.getName() == JMethod.INSTANCE_CONSTRUCTOR_NAME &&
+ jname == JMethod.INSTANCE_CONSTRUCTOR_NAME) {
+ isModuleInitialized = true
jcode.emitALOAD_0()
+ jcode.emitPUTSTATIC(jclass.getName(),
+ nme.MODULE_INSTANCE_FIELD.toString,
+ jclass.getType())
+ }
+ }
- case CONSTANT(const) =>
- genConstant(jcode, const)
+ style match {
+ case Static(true) => dbg("invokespecial"); jcode.emitINVOKESPECIAL(jowner, jname, jtype)
+ case Static(false) => dbg("invokestatic"); jcode.emitINVOKESTATIC(jowner, jname, jtype)
+ case Dynamic if needsInterfaceCall(receiver) => dbg("invokinterface"); jcode.emitINVOKEINTERFACE(jowner, jname, jtype)
+ case Dynamic => dbg("invokevirtual"); jcode.emitINVOKEVIRTUAL(jowner, jname, jtype)
+ case SuperCall(_) =>
+ dbg("invokespecial")
+ jcode.emitINVOKESPECIAL(jowner, jname, jtype)
+ initModule()
+ }
+ }
- case LOAD_ARRAY_ITEM(kind) =>
- jcode.emitALOAD(javaType(kind))
+ def genBlock(b: BasicBlock) {
+ labels(b).anchorToNext()
- case LOAD_LOCAL(local) =>
- jcode.emitLOAD(indexOf(local), javaType(local.kind))
+ debuglog("Generating code for block: " + b + " at pc: " + labels(b).getAnchor())
+ var lastMappedPC = 0
+ var lastLineNr = 0
+ var crtPC = 0
- case lf @ LOAD_FIELD(field, isStatic) =>
- var owner = javaName(lf.hostClass)
- if (settings.debug.value)
- log("LOAD_FIELD with owner: " + owner +
- " flags: " + Flags.flagsToString(field.owner.flags))
- if (isStatic)
- jcode.emitGETSTATIC(owner,
- javaName(field),
- javaType(field))
- else
- jcode.emitGETFIELD(owner,
- javaName(field),
- javaType(field))
-
- case LOAD_MODULE(module) =>
-// assert(module.isModule, "Expected module: " + module)
- if (settings.debug.value)
- log("generating LOAD_MODULE for: " + module + " flags: " +
- Flags.flagsToString(module.flags));
- if (clasz.symbol == module.moduleClass && jmethod.getName() != nme.readResolve.toString)
- jcode.emitALOAD_0()
- else
- jcode.emitGETSTATIC(javaName(module) /* + "$" */ ,
- nme.MODULE_INSTANCE_FIELD.toString,
- javaType(module))
-
- case STORE_ARRAY_ITEM(kind) =>
- jcode emitASTORE javaType(kind)
-
- case STORE_LOCAL(local) =>
- jcode.emitSTORE(indexOf(local), javaType(local.kind))
-
- case STORE_THIS(_) =>
- // this only works for impl classes because the self parameter comes first
- // in the method signature. If that changes, this code has to be revisited.
- jcode.emitASTORE_0()
-
- case STORE_FIELD(field, isStatic) =>
- val owner = javaName(field.owner)
- if (isStatic)
- jcode.emitPUTSTATIC(owner,
- javaName(field),
- javaType(field))
- else
- jcode.emitPUTFIELD(owner,
- javaName(field),
- javaType(field))
-
- case CALL_PRIMITIVE(primitive) =>
- genPrimitive(primitive, instr.pos)
-
- /** Special handling to access native Array.clone() */
- case call @ CALL_METHOD(definitions.Array_clone, Dynamic) =>
- val target: String = javaType(call.targetTypeKind).getSignature()
- jcode.emitINVOKEVIRTUAL(target, "clone", arrayCloneType)
-
- case call @ CALL_METHOD(method, style) =>
- val owner: String = javaName(method.owner)
- // reference the type of the receiver instead of the method owner (if not an interface!)
- val dynamicOwner =
- if (needsInterfaceCall(call.hostClass)) owner
- else javaName(call.hostClass)
- val jname = javaName(method)
- val jtype = javaType(method).asInstanceOf[JMethodType]
-
- style match {
- case InvokeDynamic =>
- jcode.emitINVOKEINTERFACE("java.dyn.Dynamic", jname, jtype)
-
- case Dynamic =>
- if (needsInterfaceCall(method.owner) && (method.owner ne ObjectClass))
- jcode.emitINVOKEINTERFACE(owner, jname, jtype)
- else
- jcode.emitINVOKEVIRTUAL(dynamicOwner, jname, jtype)
-
- case Static(instance) =>
- if (instance)
- jcode.emitINVOKESPECIAL(owner, jname, jtype)
- else
- jcode.emitINVOKESTATIC(owner, jname, jtype)
-
- case SuperCall(_) =>
- jcode.emitINVOKESPECIAL(owner, jname, jtype)
- // we initialize the MODULE$ field immediately after the super ctor
- if (isStaticModule(clasz.symbol) && !isModuleInitialized &&
- jmethod.getName() == JMethod.INSTANCE_CONSTRUCTOR_NAME &&
- jname == JMethod.INSTANCE_CONSTRUCTOR_NAME) {
- isModuleInitialized = true
- jcode.emitALOAD_0()
- jcode.emitPUTSTATIC(jclass.getName(),
- nme.MODULE_INSTANCE_FIELD.toString,
- jclass.getType())
+ /** local variables whose scope appears in this block. */
+ val varsInBlock: mutable.Set[Local] = new mutable.HashSet
+ val lastInstr = b.lastInstruction
+
+ for (instr <- b) {
+ instr match {
+ case THIS(clasz) => jcode.emitALOAD_0()
+
+ case CONSTANT(const) => genConstant(jcode, const)
+
+ case LOAD_ARRAY_ITEM(kind) =>
+ if(kind.isRefOrArrayType) { jcode.emitAALOAD() }
+ else {
+ (kind: @unchecked) match {
+ case UNIT => throw new IllegalArgumentException("invalid type for aload " + kind)
+ case BOOL | BYTE => jcode.emitBALOAD()
+ case SHORT => jcode.emitSALOAD()
+ case CHAR => jcode.emitCALOAD()
+ case INT => jcode.emitIALOAD()
+ case LONG => jcode.emitLALOAD()
+ case FLOAT => jcode.emitFALOAD()
+ case DOUBLE => jcode.emitDALOAD()
}
- }
+ }
+
+ case LOAD_LOCAL(local) => jcode.emitLOAD(indexOf(local), javaType(local.kind))
+
+ case lf @ LOAD_FIELD(field, isStatic) =>
+ var owner = javaName(lf.hostClass)
+ debuglog("LOAD_FIELD with owner: " + owner +
+ " flags: " + Flags.flagsToString(field.owner.flags))
+ val fieldJName = javaName(field)
+ val fieldJType = javaType(field)
+ if (isStatic) jcode.emitGETSTATIC(owner, fieldJName, fieldJType)
+ else jcode.emitGETFIELD( owner, fieldJName, fieldJType)
+
+ case LOAD_MODULE(module) =>
+ // assert(module.isModule, "Expected module: " + module)
+ debuglog("generating LOAD_MODULE for: " + module + " flags: " + Flags.flagsToString(module.flags));
+ if (clasz.symbol == module.moduleClass && jmethod.getName() != nme.readResolve.toString)
+ jcode.emitALOAD_0()
+ else
+ jcode.emitGETSTATIC(javaName(module) /* + "$" */ ,
+ nme.MODULE_INSTANCE_FIELD.toString,
+ javaType(module))
+
+ case STORE_ARRAY_ITEM(kind) =>
+ if(kind.isRefOrArrayType) { jcode.emitAASTORE() }
+ else {
+ (kind: @unchecked) match {
+ case UNIT => throw new IllegalArgumentException("invalid type for astore " + kind)
+ case BOOL | BYTE => jcode.emitBASTORE()
+ case SHORT => jcode.emitSASTORE()
+ case CHAR => jcode.emitCASTORE()
+ case INT => jcode.emitIASTORE()
+ case LONG => jcode.emitLASTORE()
+ case FLOAT => jcode.emitFASTORE()
+ case DOUBLE => jcode.emitDASTORE()
+ }
+ }
- case BOX(kind) =>
- val boxedType = definitions.boxedClass(kind.toType.typeSymbol)
- val mtype = new JMethodType(javaType(boxedType), Array(javaType(kind)))
- jcode.emitINVOKESTATIC(BoxesRunTime, "boxTo" + boxedType.decodedName, mtype)
+ case STORE_LOCAL(local) =>
+ jcode.emitSTORE(indexOf(local), javaType(local.kind))
- case UNBOX(kind) =>
- val mtype = new JMethodType(javaType(kind), Array(JAVA_LANG_OBJECT))
- jcode.emitINVOKESTATIC(BoxesRunTime, "unboxTo" + kind.toType.typeSymbol.decodedName, mtype)
+ case STORE_THIS(_) =>
+ // this only works for impl classes because the self parameter comes first
+ // in the method signature. If that changes, this code has to be revisited.
+ jcode.emitASTORE_0()
- case NEW(REFERENCE(cls)) =>
- val className = javaName(cls)
- jcode emitNEW className
+ case STORE_FIELD(field, isStatic) =>
+ val owner = javaName(field.owner)
+ val fieldJName = javaName(field)
+ val fieldJType = javaType(field)
+ if (isStatic) jcode.emitPUTSTATIC(owner, fieldJName, fieldJType)
+ else jcode.emitPUTFIELD( owner, fieldJName, fieldJType)
- case CREATE_ARRAY(elem, 1) => elem match {
- case REFERENCE(_) | ARRAY(_) =>
- jcode emitANEWARRAY javaType(elem).asInstanceOf[JReferenceType]
- case _ =>
- jcode emitNEWARRAY javaType(elem)
- }
+ case CALL_PRIMITIVE(primitive) => genPrimitive(primitive, instr.pos)
- case CREATE_ARRAY(elem, dims) =>
- jcode.emitMULTIANEWARRAY(javaType(ArrayN(elem, dims)).asInstanceOf[JReferenceType], dims)
+ /** Special handling to access native Array.clone() */
+ case call @ CALL_METHOD(definitions.Array_clone, Dynamic) =>
+ val target: String = javaType(call.targetTypeKind).getSignature()
+ jcode.emitINVOKEVIRTUAL(target, "clone", arrayCloneType)
- case IS_INSTANCE(tpe) =>
- tpe match {
- case REFERENCE(cls) =>
- jcode emitINSTANCEOF new JObjectType(javaName(cls))
- case ARRAY(elem) =>
- jcode emitINSTANCEOF new JArrayType(javaType(elem))
- case _ =>
- abort("Unknown reference type in IS_INSTANCE: " + tpe)
- }
+ case call @ CALL_METHOD(method, style) => genCallMethod(call)
- case CHECK_CAST(tpe) =>
- tpe match {
- case REFERENCE(cls) =>
- // No need to checkcast for Objects
- if (cls != ObjectClass)
- jcode emitCHECKCAST new JObjectType(javaName(cls))
- case ARRAY(elem) =>
- jcode emitCHECKCAST new JArrayType(javaType(elem))
- case _ =>
- abort("Unknown reference type in IS_INSTANCE: " + tpe)
- }
+ case BOX(kind) =>
+ val Pair(mname, mtype) = jBoxTo(kind)
+ jcode.emitINVOKESTATIC(BoxesRunTime, mname, mtype)
- case SWITCH(tags, branches) =>
- val tagArray = new Array[Array[Int]](tags.length)
- var caze = tags
- var i = 0
+ case UNBOX(kind) =>
+ val Pair(mname, mtype) = jUnboxTo(kind)
+ jcode.emitINVOKESTATIC(BoxesRunTime, mname, mtype)
- while (i < tagArray.length) {
- tagArray(i) = new Array[Int](caze.head.length)
- caze.head.copyToArray(tagArray(i), 0)
- i += 1
- caze = caze.tail
- }
- val branchArray = jcode.newLabels(tagArray.length)
- i = 0
- while (i < branchArray.length) {
- branchArray(i) = labels(branches(i))
- i += 1
- }
- if (settings.debug.value)
- log("Emitting SWITCH:\ntags: " + tags + "\nbranches: " + branches)
- jcode.emitSWITCH(tagArray,
- branchArray,
- labels(branches.last),
- MIN_SWITCH_DENSITY)
- ()
-
- case JUMP(whereto) =>
- if (nextBlock != whereto)
- jcode.emitGOTO_maybe_W(labels(whereto), false) // default to short jumps
-
- case CJUMP(success, failure, cond, kind) =>
- kind match {
- case BOOL | BYTE | CHAR | SHORT | INT =>
+ case NEW(REFERENCE(cls)) =>
+ val className = javaName(cls)
+ jcode emitNEW className
+
+ case CREATE_ARRAY(elem, 1) =>
+ if(elem.isRefOrArrayType) { jcode emitANEWARRAY javaType(elem).asInstanceOf[JReferenceType] }
+ else { jcode emitNEWARRAY javaType(elem) }
+
+ case CREATE_ARRAY(elem, dims) =>
+ jcode.emitMULTIANEWARRAY(javaType(ArrayN(elem, dims)).asInstanceOf[JReferenceType], dims)
+
+ case IS_INSTANCE(tpe) =>
+ tpe match {
+ case REFERENCE(cls) => jcode emitINSTANCEOF new JObjectType(javaName(cls))
+ case ARRAY(elem) => jcode emitINSTANCEOF new JArrayType(javaType(elem))
+ case _ => abort("Unknown reference type in IS_INSTANCE: " + tpe)
+ }
+
+ case CHECK_CAST(tpe) =>
+ tpe match {
+ case REFERENCE(cls) => if (cls != ObjectClass) { jcode emitCHECKCAST new JObjectType(javaName(cls)) } // No need to checkcast for Objects
+ case ARRAY(elem) => jcode emitCHECKCAST new JArrayType(javaType(elem))
+ case _ => abort("Unknown reference type in IS_INSTANCE: " + tpe)
+ }
+
+ case SWITCH(tags, branches) =>
+ val tagArray = new Array[Array[Int]](tags.length)
+ var caze = tags
+ var i = 0
+
+ while (i < tagArray.length) {
+ tagArray(i) = new Array[Int](caze.head.length)
+ caze.head.copyToArray(tagArray(i), 0)
+ i += 1
+ caze = caze.tail
+ }
+ val branchArray = jcode.newLabels(tagArray.length)
+ i = 0
+ while (i < branchArray.length) {
+ branchArray(i) = labels(branches(i))
+ i += 1
+ }
+ debuglog("Emitting SWITCH:\ntags: " + tags + "\nbranches: " + branches)
+ jcode.emitSWITCH(tagArray,
+ branchArray,
+ labels(branches.last),
+ MIN_SWITCH_DENSITY)
+ ()
+
+ case JUMP(whereto) =>
+ if (nextBlock != whereto)
+ jcode.emitGOTO_maybe_W(labels(whereto), false) // default to short jumps
+
+ case CJUMP(success, failure, cond, kind) =>
+ if(kind.isIntSizedType) { // BOOL, BYTE, CHAR, SHORT, or INT
if (nextBlock == success) {
- jcode.emitIF_ICMP(conds(negate(cond)), labels(failure))
+ jcode.emitIF_ICMP(conds(cond.negate()), labels(failure))
// .. and fall through to success label
} else {
jcode.emitIF_ICMP(conds(cond), labels(success))
if (nextBlock != failure)
jcode.emitGOTO_maybe_W(labels(failure), false)
}
-
- case REFERENCE(_) | ARRAY(_) =>
+ } else if(kind.isRefOrArrayType) { // REFERENCE(_) | ARRAY(_)
if (nextBlock == success) {
- jcode.emitIF_ACMP(conds(negate(cond)), labels(failure))
+ jcode.emitIF_ACMP(conds(cond.negate()), labels(failure))
// .. and fall through to success label
} else {
jcode.emitIF_ACMP(conds(cond), labels(success))
if (nextBlock != failure)
jcode.emitGOTO_maybe_W(labels(failure), false)
}
-
- case _ =>
+ } else {
(kind: @unchecked) match {
case LONG => jcode.emitLCMP()
case FLOAT =>
@@ -1426,50 +1425,45 @@ abstract class GenJVM extends SubComponent with GenJVMUtil with GenAndroid with
else jcode.emitDCMPL()
}
if (nextBlock == success) {
- jcode.emitIF(conds(negate(cond)), labels(failure))
+ jcode.emitIF(conds(cond.negate()), labels(failure))
// .. and fall through to success label
} else {
jcode.emitIF(conds(cond), labels(success));
if (nextBlock != failure)
jcode.emitGOTO_maybe_W(labels(failure), false)
}
- }
+ }
- case CZJUMP(success, failure, cond, kind) =>
- kind match {
- case BOOL | BYTE | CHAR | SHORT | INT =>
+ case CZJUMP(success, failure, cond, kind) =>
+ if(kind.isIntSizedType) { // BOOL, BYTE, CHAR, SHORT, or INT
if (nextBlock == success) {
- jcode.emitIF(conds(negate(cond)), labels(failure))
+ jcode.emitIF(conds(cond.negate()), labels(failure))
} else {
jcode.emitIF(conds(cond), labels(success))
if (nextBlock != failure)
jcode.emitGOTO_maybe_W(labels(failure), false)
}
-
- case REFERENCE(_) | ARRAY(_) =>
+ } else if(kind.isRefOrArrayType) { // REFERENCE(_) | ARRAY(_)
val Success = success
val Failure = failure
(cond, nextBlock) match {
- case (EQ, Success) =>
- jcode emitIFNONNULL labels(failure)
- case (NE, Failure) =>
- jcode emitIFNONNULL labels(success)
- case (EQ, Failure) =>
- jcode emitIFNULL labels(success)
- case (NE, Success) =>
- jcode emitIFNULL labels(failure)
+ case (EQ, Success) => jcode emitIFNONNULL labels(failure)
+ case (NE, Failure) => jcode emitIFNONNULL labels(success)
+ case (EQ, Failure) => jcode emitIFNULL labels(success)
+ case (NE, Success) => jcode emitIFNULL labels(failure)
case (EQ, _) =>
jcode emitIFNULL labels(success)
jcode.emitGOTO_maybe_W(labels(failure), false)
case (NE, _) =>
jcode emitIFNONNULL labels(success)
jcode.emitGOTO_maybe_W(labels(failure), false)
+ case _ =>
}
-
- case _ =>
+ } else {
(kind: @unchecked) match {
case LONG =>
- jcode.emitLCONST_0(); jcode.emitLCMP()
+ jcode.emitLCONST_0()
+ jcode.emitLCMP()
case FLOAT =>
jcode.emitFCONST_0()
if (cond == LT || cond == LE) jcode.emitFCMPG()
@@ -1480,264 +1474,254 @@ abstract class GenJVM extends SubComponent with GenJVMUtil with GenAndroid with
else jcode.emitDCMPL()
}
if (nextBlock == success) {
- jcode.emitIF(conds(negate(cond)), labels(failure))
+ jcode.emitIF(conds(cond.negate()), labels(failure))
} else {
jcode.emitIF(conds(cond), labels(success))
if (nextBlock != failure)
jcode.emitGOTO_maybe_W(labels(failure), false)
}
- }
+ }
- case RETURN(kind) =>
- jcode emitRETURN javaType(kind)
+ case RETURN(kind) => jcode emitRETURN javaType(kind)
- case THROW(_) =>
- jcode.emitATHROW()
+ case THROW(_) => jcode.emitATHROW()
- case DROP(kind) =>
- kind match {
- case LONG | DOUBLE => jcode.emitPOP2()
- case _ => jcode.emitPOP()
- }
+ case DROP(kind) =>
+ if(kind.isWideType) jcode.emitPOP2()
+ else jcode.emitPOP()
- case DUP(kind) =>
- kind match {
- case LONG | DOUBLE => jcode.emitDUP2()
- case _ => jcode.emitDUP()
- }
+ case DUP(kind) =>
+ if(kind.isWideType) jcode.emitDUP2()
+ else jcode.emitDUP()
- case MONITOR_ENTER() =>
- jcode.emitMONITORENTER()
+ case MONITOR_ENTER() => jcode.emitMONITORENTER()
- case MONITOR_EXIT() =>
- jcode.emitMONITOREXIT()
+ case MONITOR_EXIT() => jcode.emitMONITOREXIT()
- case SCOPE_ENTER(lv) =>
- varsInBlock += lv
- lv.start = jcode.getPC()
+ case SCOPE_ENTER(lv) =>
+ varsInBlock += lv
+ lv.start = jcode.getPC()
- case SCOPE_EXIT(lv) =>
- if (varsInBlock(lv)) {
- lv.ranges = (lv.start, jcode.getPC()) :: lv.ranges
- varsInBlock -= lv
- }
- else if (b.varsInScope(lv)) {
- lv.ranges = (labels(b).getAnchor(), jcode.getPC()) :: lv.ranges
- b.varsInScope -= lv
- } else
- assert(false, "Illegal local var nesting: " + method)
+ case SCOPE_EXIT(lv) =>
+ if (varsInBlock(lv)) {
+ lv.ranges = (lv.start, jcode.getPC()) :: lv.ranges
+ varsInBlock -= lv
+ }
+ else if (b.varsInScope(lv)) {
+ lv.ranges = (labels(b).getAnchor(), jcode.getPC()) :: lv.ranges
+ b.varsInScope -= lv
+ }
+ else dumpMethodAndAbort(method, "Illegal local var nesting")
- case LOAD_EXCEPTION(_) =>
- ()
- }
+ case LOAD_EXCEPTION(_) =>
+ ()
+ }
- crtPC = jcode.getPC()
+ crtPC = jcode.getPC()
-// assert(instr.pos.source.isEmpty || instr.pos.source.get == (clasz.cunit.source), "sources don't match")
-// val crtLine = instr.pos.line.get(lastLineNr);
+ // assert(instr.pos.source.isEmpty || instr.pos.source.get == (clasz.cunit.source), "sources don't match")
+ // val crtLine = instr.pos.line.get(lastLineNr);
- val crtLine = try {
- if (instr.pos == NoPosition) lastLineNr else (instr.pos).line // check NoPosition to avoid costly exception
- } catch {
- case _: UnsupportedOperationException =>
- log("Warning: wrong position in: " + method)
- lastLineNr
- }
+ val crtLine = try {
+ if (instr.pos == NoPosition) lastLineNr else (instr.pos).line // check NoPosition to avoid costly exception
+ } catch {
+ case _: UnsupportedOperationException =>
+ log("Warning: wrong position in: " + method)
+ lastLineNr
+ }
- if (b.lastInstruction == instr)
- endPC(b) = jcode.getPC()
+ if (instr eq lastInstr) { endPC(b) = jcode.getPC() }
- //System.err.println("CRTLINE: " + instr.pos + " " +
- // /* (if (instr.pos < clasz.cunit.source.content.length) clasz.cunit.source.content(instr.pos) else '*') + */ " " + crtLine);
+ //System.err.println("CRTLINE: " + instr.pos + " " +
+ // /* (if (instr.pos < clasz.cunit.source.content.length) clasz.cunit.source.content(instr.pos) else '*') + */ " " + crtLine);
- if (crtPC > lastMappedPC) {
- jcode.completeLineNumber(lastMappedPC, crtPC, crtLine)
- lastMappedPC = crtPC
- lastLineNr = crtLine
+ if (crtPC > lastMappedPC) {
+ jcode.completeLineNumber(lastMappedPC, crtPC, crtLine)
+ lastMappedPC = crtPC
+ lastLineNr = crtLine
+ }
}
- }
- // local vars that survived this basic block
- for (lv <- varsInBlock) {
- lv.ranges = (lv.start, jcode.getPC()) :: lv.ranges
- }
- for (lv <- b.varsInScope) {
- lv.ranges = (labels(b).getAnchor(), jcode.getPC()) :: lv.ranges
+ // local vars that survived this basic block
+ for (lv <- varsInBlock) {
+ lv.ranges = (lv.start, jcode.getPC()) :: lv.ranges
+ }
+ for (lv <- b.varsInScope) {
+ lv.ranges = (labels(b).getAnchor(), jcode.getPC()) :: lv.ranges
+ }
}
- }
-
- /**
- * @param primitive ...
- * @param pos ...
- */
- def genPrimitive(primitive: Primitive, pos: Position) {
- primitive match {
- case Negation(kind) =>
- kind match {
- case BOOL | BYTE | CHAR | SHORT | INT =>
- jcode.emitINEG()
- case LONG => jcode.emitLNEG()
- case FLOAT => jcode.emitFNEG()
- case DOUBLE => jcode.emitDNEG()
- case _ => abort("Impossible to negate a " + kind)
- }
- case Arithmetic(op, kind) =>
- op match {
- case ADD => jcode.emitADD(javaType(kind))
- case SUB =>
- (kind: @unchecked) match {
- case BOOL | BYTE | CHAR | SHORT | INT =>
- jcode.emitISUB()
- case LONG => jcode.emitLSUB()
- case FLOAT => jcode.emitFSUB()
- case DOUBLE => jcode.emitDSUB()
+ /**
+ * @param primitive ...
+ * @param pos ...
+ */
+ def genPrimitive(primitive: Primitive, pos: Position) {
+ primitive match {
+ case Negation(kind) =>
+ if(kind.isIntSizedType) { jcode.emitINEG() }
+ else {
+ kind match {
+ case LONG => jcode.emitLNEG()
+ case FLOAT => jcode.emitFNEG()
+ case DOUBLE => jcode.emitDNEG()
+ case _ => abort("Impossible to negate a " + kind)
}
+ }
- case MUL =>
- (kind: @unchecked) match {
- case BOOL | BYTE | CHAR | SHORT | INT =>
- jcode.emitIMUL()
- case LONG => jcode.emitLMUL()
- case FLOAT => jcode.emitFMUL()
- case DOUBLE => jcode.emitDMUL()
- }
+ case Arithmetic(op, kind) =>
+ op match {
+ case ADD =>
+ if(kind.isIntSizedType) { jcode.emitIADD() }
+ else {
+ (kind: @unchecked) match {
+ case LONG => jcode.emitLADD()
+ case FLOAT => jcode.emitFADD()
+ case DOUBLE => jcode.emitDADD()
+ }
+ }
- case DIV =>
- (kind: @unchecked) match {
- case BOOL | BYTE | CHAR | SHORT | INT =>
- jcode.emitIDIV()
- case LONG => jcode.emitLDIV()
- case FLOAT => jcode.emitFDIV()
- case DOUBLE => jcode.emitDDIV()
- }
+ case SUB =>
+ if(kind.isIntSizedType) { jcode.emitISUB() }
+ else {
+ (kind: @unchecked) match {
+ case LONG => jcode.emitLSUB()
+ case FLOAT => jcode.emitFSUB()
+ case DOUBLE => jcode.emitDSUB()
+ }
+ }
- case REM =>
- (kind: @unchecked) match {
- case BOOL | BYTE | CHAR | SHORT | INT =>
- jcode.emitIREM()
- case LONG => jcode.emitLREM()
- case FLOAT => jcode.emitFREM()
- case DOUBLE => jcode.emitDREM()
- }
+ case MUL =>
+ if(kind.isIntSizedType) { jcode.emitIMUL() }
+ else {
+ (kind: @unchecked) match {
+ case LONG => jcode.emitLMUL()
+ case FLOAT => jcode.emitFMUL()
+ case DOUBLE => jcode.emitDMUL()
+ }
+ }
- case NOT =>
- kind match {
- case BOOL | BYTE | CHAR | SHORT | INT =>
+ case DIV =>
+ if(kind.isIntSizedType) { jcode.emitIDIV() }
+ else {
+ (kind: @unchecked) match {
+ case LONG => jcode.emitLDIV()
+ case FLOAT => jcode.emitFDIV()
+ case DOUBLE => jcode.emitDDIV()
+ }
+ }
+
+ case REM =>
+ if(kind.isIntSizedType) { jcode.emitIREM() }
+ else {
+ (kind: @unchecked) match {
+ case LONG => jcode.emitLREM()
+ case FLOAT => jcode.emitFREM()
+ case DOUBLE => jcode.emitDREM()
+ }
+ }
+
+ case NOT =>
+ if(kind.isIntSizedType) {
jcode.emitPUSH(-1)
jcode.emitIXOR()
- case LONG =>
+ } else if(kind == LONG) {
jcode.emitPUSH(-1l)
jcode.emitLXOR()
- case _ =>
+ } else {
abort("Impossible to negate an " + kind)
- }
-
- case _ =>
- abort("Unknown arithmetic primitive " + primitive)
- }
-
- case Logical(op, kind) => (op, kind) match {
- case (AND, LONG) =>
- jcode.emitLAND()
- case (AND, INT) =>
- jcode.emitIAND()
- case (AND, _) =>
- jcode.emitIAND()
- if (kind != BOOL)
- jcode.emitT2T(javaType(INT), javaType(kind));
-
- case (OR, LONG) =>
- jcode.emitLOR()
- case (OR, INT) =>
- jcode.emitIOR()
- case (OR, _) =>
- jcode.emitIOR()
- if (kind != BOOL)
- jcode.emitT2T(javaType(INT), javaType(kind));
-
- case (XOR, LONG) =>
- jcode.emitLXOR()
- case (XOR, INT) =>
- jcode.emitIXOR()
- case (XOR, _) =>
- jcode.emitIXOR()
- if (kind != BOOL)
- jcode.emitT2T(javaType(INT), javaType(kind));
- }
-
- case Shift(op, kind) => (op, kind) match {
- case (LSL, LONG) =>
- jcode.emitLSHL()
- case (LSL, INT) =>
- jcode.emitISHL()
- case (LSL, _) =>
- jcode.emitISHL()
- jcode.emitT2T(javaType(INT), javaType(kind))
-
- case (ASR, LONG) =>
- jcode.emitLSHR()
- case (ASR, INT) =>
- jcode.emitISHR()
- case (ASR, _) =>
- jcode.emitISHR()
- jcode.emitT2T(javaType(INT), javaType(kind))
-
- case (LSR, LONG) =>
- jcode.emitLUSHR()
- case (LSR, INT) =>
- jcode.emitIUSHR()
- case (LSR, _) =>
- jcode.emitIUSHR()
- jcode.emitT2T(javaType(INT), javaType(kind))
- }
+ }
- case Comparison(op, kind) => ((op, kind): @unchecked) match {
- case (CMP, LONG) => jcode.emitLCMP()
- case (CMPL, FLOAT) => jcode.emitFCMPL()
- case (CMPG, FLOAT) => jcode.emitFCMPG()
- case (CMPL, DOUBLE) => jcode.emitDCMPL()
- case (CMPG, DOUBLE) => jcode.emitDCMPL()
- }
+ case _ =>
+ abort("Unknown arithmetic primitive " + primitive)
+ }
- case Conversion(src, dst) =>
- if (settings.debug.value)
- log("Converting from: " + src + " to: " + dst)
- if (dst == BOOL) {
- println("Illegal conversion at: " + clasz +
- " at: " + pos.source + ":" + pos.line)
- } else
- jcode.emitT2T(javaType(src), javaType(dst))
+ case Logical(op, kind) => ((op, kind): @unchecked) match {
+ case (AND, LONG) => jcode.emitLAND()
+ case (AND, INT) => jcode.emitIAND()
+ case (AND, _) =>
+ jcode.emitIAND()
+ if (kind != BOOL)
+ jcode.emitT2T(javaType(INT), javaType(kind));
+
+ case (OR, LONG) => jcode.emitLOR()
+ case (OR, INT) => jcode.emitIOR()
+ case (OR, _) =>
+ jcode.emitIOR()
+ if (kind != BOOL)
+ jcode.emitT2T(javaType(INT), javaType(kind));
+
+ case (XOR, LONG) => jcode.emitLXOR()
+ case (XOR, INT) => jcode.emitIXOR()
+ case (XOR, _) =>
+ jcode.emitIXOR()
+ if (kind != BOOL)
+ jcode.emitT2T(javaType(INT), javaType(kind));
+ }
- case ArrayLength(_) =>
- jcode.emitARRAYLENGTH()
+ case Shift(op, kind) => ((op, kind): @unchecked) match {
+ case (LSL, LONG) => jcode.emitLSHL()
+ case (LSL, INT) => jcode.emitISHL()
+ case (LSL, _) =>
+ jcode.emitISHL()
+ jcode.emitT2T(javaType(INT), javaType(kind))
+
+ case (ASR, LONG) => jcode.emitLSHR()
+ case (ASR, INT) => jcode.emitISHR()
+ case (ASR, _) =>
+ jcode.emitISHR()
+ jcode.emitT2T(javaType(INT), javaType(kind))
+
+ case (LSR, LONG) => jcode.emitLUSHR()
+ case (LSR, INT) => jcode.emitIUSHR()
+ case (LSR, _) =>
+ jcode.emitIUSHR()
+ jcode.emitT2T(javaType(INT), javaType(kind))
+ }
- case StartConcat =>
- jcode emitNEW StringBuilderClassName
- jcode.emitDUP()
- jcode.emitINVOKESPECIAL(StringBuilderClassName,
- JMethod.INSTANCE_CONSTRUCTOR_NAME,
- JMethodType.ARGLESS_VOID_FUNCTION)
-
- case StringConcat(el) =>
- val jtype = el match {
- case REFERENCE(_) | ARRAY(_) => JAVA_LANG_OBJECT
- case _ => javaType(el)
+ case Comparison(op, kind) => ((op, kind): @unchecked) match {
+ case (CMP, LONG) => jcode.emitLCMP()
+ case (CMPL, FLOAT) => jcode.emitFCMPL()
+ case (CMPG, FLOAT) => jcode.emitFCMPG()
+ case (CMPL, DOUBLE) => jcode.emitDCMPL()
+ case (CMPG, DOUBLE) => jcode.emitDCMPL()
}
- jcode.emitINVOKEVIRTUAL(StringBuilderClassName,
- "append",
- new JMethodType(StringBuilderType,
- Array(jtype)))
- case EndConcat =>
- jcode.emitINVOKEVIRTUAL(StringBuilderClassName,
- "toString",
- toStringType)
- case _ =>
- abort("Unimplemented primitive " + primitive)
+ case Conversion(src, dst) =>
+ debuglog("Converting from: " + src + " to: " + dst)
+ if (dst == BOOL) {
+ println("Illegal conversion at: " + clasz + " at: " + pos.source + ":" + pos.line)
+ } else
+ jcode.emitT2T(javaType(src), javaType(dst))
+
+ case ArrayLength(_) =>
+ jcode.emitARRAYLENGTH()
+
+ case StartConcat =>
+ jcode emitNEW StringBuilderClassName
+ jcode.emitDUP()
+ jcode.emitINVOKESPECIAL(StringBuilderClassName,
+ JMethod.INSTANCE_CONSTRUCTOR_NAME,
+ JMethodType.ARGLESS_VOID_FUNCTION)
+
+ case StringConcat(el) =>
+ val jtype = el match {
+ case REFERENCE(_) | ARRAY(_) => JAVA_LANG_OBJECT
+ case _ => javaType(el)
+ }
+ jcode.emitINVOKEVIRTUAL(StringBuilderClassName,
+ "append",
+ new JMethodType(StringBuilderType,
+ Array(jtype)))
+ case EndConcat =>
+ jcode.emitINVOKEVIRTUAL(StringBuilderClassName,
+ "toString",
+ toStringType)
+
+ case _ =>
+ abort("Unimplemented primitive " + primitive)
+ }
}
- }
// genCode starts here
genBlocks(linearization)
@@ -1807,10 +1791,7 @@ abstract class GenJVM extends SubComponent with GenJVMUtil with GenAndroid with
def sizeOf(sym: Symbol): Int = sizeOf(toTypeKind(sym.tpe))
- def sizeOf(k: TypeKind): Int = k match {
- case DOUBLE | LONG => 2
- case _ => 1
- }
+ def sizeOf(k: TypeKind): Int = if(k.isWideType) 2 else 1
def indexOf(m: IMethod, sym: Symbol): Int = {
val Some(local) = m lookupLocal sym
@@ -1818,23 +1799,25 @@ abstract class GenJVM extends SubComponent with GenJVMUtil with GenAndroid with
}
def indexOf(local: Local): Int = {
- assert(local.index >= 0,
- "Invalid index for: " + local + "{" + local.## + "}: ")
+ assert(local.index >= 0, "Invalid index for: " + local + "{" + local.## + "}: ")
local.index
}
/**
* Compute the indexes of each local variable of the given
- * method. Assumes parameters come first in the list of locals.
+ * method. *Does not assume the parameters come first!*
*/
def computeLocalVarsIndex(m: IMethod) {
- var idx = 1
- if (m.symbol.isStaticMember)
- idx = 0;
+ var idx = if (m.symbol.isStaticMember) 0 else 1;
- for (l <- m.locals) {
- if (settings.debug.value)
- log("Index value for " + l + "{" + l.## + "}: " + idx)
+ for (l <- m.params) {
+ debuglog("Index value for " + l + "{" + l.## + "}: " + idx)
+ l.index = idx
+ idx += sizeOf(l.kind)
+ }
+
+ for (l <- m.locals if !(m.params contains l)) {
+ debuglog("Index value for " + l + "{" + l.## + "}: " + idx)
l.index = idx
idx += sizeOf(l.kind)
}
@@ -1842,20 +1825,6 @@ abstract class GenJVM extends SubComponent with GenJVMUtil with GenAndroid with
////////////////////// Utilities ////////////////////////
- /** Calls to methods in 'sym' need invokeinterface? */
- def needsInterfaceCall(sym: Symbol): Boolean = {
- log("checking for interface call: " + sym.fullName)
- // the following call to 'info' may cause certain symbols to fail loading
- // because we're too late in the compilation chain (aliases to overloaded
- // symbols will not be properly resolved, see scala.Range, method
- // `super$++` that fails in UnPickler at LazyTypeRefAndAlias.complete
- if (sym.isTrait) sym.info // needed so that the type is up to date
- // (erasure may add lateINTERFACE to traits)
-
- sym.isInterface ||
- (sym.isJavaDefined && sym.isNonBottomSubClass(ClassfileAnnotationClass))
- }
-
/** Merge adjacent ranges. */
private def mergeEntries(ranges: List[(Int, Int)]): List[(Int, Int)] =
(ranges.foldLeft(Nil: List[(Int, Int)]) { (collapsed: List[(Int, Int)], p: (Int, Int)) => (collapsed, p) match {
@@ -1863,15 +1832,10 @@ abstract class GenJVM extends SubComponent with GenJVMUtil with GenAndroid with
case ((s1, e1) :: rest, (s2, e2)) if (e1 == s2) => (s1, e2) :: rest
case _ => p :: collapsed
}}).reverse
-
- def assert(cond: Boolean, msg: => String) = if (!cond) {
- method.dump
- abort(msg + "\nMethod: " + method)
- }
-
- def assert(cond: Boolean) { assert(cond, "Assertion failed.") }
}
+ private def mkFlags(args: Int*) = args.foldLeft(0)(_ | _)
+
/**
* Return the Java modifiers for the given symbol.
* Java modifiers for classes:
@@ -1889,28 +1853,66 @@ abstract class GenJVM extends SubComponent with GenJVMUtil with GenAndroid with
* and they would fail verification after lifted.
*/
def javaFlags(sym: Symbol): Int = {
- def mkFlags(args: Int*) = args.foldLeft(0)(_ | _)
// constructors of module classes should be private
// PP: why are they only being marked private at this stage and not earlier?
- val isConsideredPrivate =
+ val privateFlag =
sym.isPrivate || (sym.isPrimaryConstructor && isTopLevelModule(sym.owner))
+ // Final: the only fields which can receive ACC_FINAL are eager vals.
+ // Neither vars nor lazy vals can, because:
+ //
+ // Source: http://docs.oracle.com/javase/specs/jls/se7/html/jls-17.html#jls-17.5.3
+ // "Another problem is that the specification allows aggressive
+ // optimization of final fields. Within a thread, it is permissible to
+ // reorder reads of a final field with those modifications of a final
+ // field that do not take place in the constructor."
+ //
+ // A var or lazy val which is marked final still has meaning to the
+ // scala compiler. The word final is heavily overloaded unfortunately;
+ // for us it means "not overridable". At present you can't override
+ // vars regardless; this may change.
+ //
+ // The logic does not check .isFinal (which checks flags for the FINAL flag,
+ // and includes symbols marked lateFINAL) instead inspecting rawflags so
+ // we can exclude lateFINAL. Such symbols are eligible for inlining, but to
+ // avoid breaking proxy software which depends on subclassing, we do not
+ // emit ACC_FINAL.
+ // Nested objects won't receive ACC_FINAL in order to allow for their overriding.
+
+ val finalFlag = (
+ (((sym.rawflags & Flags.FINAL) != 0) || isTopLevelModule(sym))
+ && !sym.enclClass.isInterface
+ && !sym.isClassConstructor
+ && !sym.isMutable // lazy vals and vars both
+ )
+
+ // Primitives are "abstract final" to prohibit instantiation
+ // without having to provide any implementations, but that is an
+ // illegal combination of modifiers at the bytecode level so
+ // suppress final if abstract if present.
mkFlags(
- if (isConsideredPrivate) ACC_PRIVATE else ACC_PUBLIC,
+ if (privateFlag) ACC_PRIVATE else ACC_PUBLIC,
if (sym.isDeferred || sym.hasAbstractFlag) ACC_ABSTRACT else 0,
if (sym.isInterface) ACC_INTERFACE else 0,
- if (sym.isFinal && !sym.enclClass.isInterface && !sym.isClassConstructor) ACC_FINAL else 0,
+ if (finalFlag && !sym.hasAbstractFlag) ACC_FINAL else 0,
if (sym.isStaticMember) ACC_STATIC else 0,
if (sym.isBridge) ACC_BRIDGE | ACC_SYNTHETIC else 0,
+ if (sym.isArtifact) ACC_SYNTHETIC else 0,
if (sym.isClass && !sym.isInterface) ACC_SUPER else 0,
- if (sym.isVarargsMethod) ACC_VARARGS else 0
+ if (sym.isVarargsMethod) ACC_VARARGS else 0,
+ if (sym.hasFlag(Flags.SYNCHRONIZED)) JAVA_ACC_SYNCHRONIZED else 0
)
}
+ def javaFieldFlags(sym: Symbol) = (
+ javaFlags(sym) | mkFlags(
+ if (sym hasAnnotation TransientAttr) ACC_TRANSIENT else 0,
+ if (sym hasAnnotation VolatileAttr) ACC_VOLATILE else 0,
+ if (sym.isMutable) 0 else ACC_FINAL
+ )
+ )
def isTopLevelModule(sym: Symbol): Boolean =
- atPhase (currentRun.picklerPhase.next) {
- sym.isModuleClass && !sym.isImplClass && !sym.isNestedClass
- }
+ afterPickler { sym.isModuleClass && !sym.isImplClass && !sym.isNestedClass }
def isStaticModule(sym: Symbol): Boolean = {
sym.isModuleClass && !sym.isImplClass && !sym.isLifted
diff --git a/src/compiler/scala/tools/nsc/backend/jvm/GenJVMASM.scala b/src/compiler/scala/tools/nsc/backend/jvm/GenJVMASM.scala
new file mode 100644
index 0000000..540935f
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/backend/jvm/GenJVMASM.scala
@@ -0,0 +1,99 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2013 LAMP/EPFL
+ * @author Jason Zaugg
+ */
+
+package scala.tools.nsc
+package backend.jvm
+import scala.tools.nsc.io.AbstractFile
+import scala.tools.nsc.symtab._
+
+/** Code shared between the legagy backend [[scala.tools.nsc.backend.jvm.GenJVM]]
+ * and the new backend [[scala.tools.nsc.backend.jvm.GenASM]]. There should be
+ * more here, but for now I'm starting with the refactorings that are either
+ * straightforward to review or necessary for maintenance.
+ */
+trait GenJVMASM {
+ val global: Global
+ import global._
+ import icodes._
+ import definitions._
+
+ protected def outputDirectory(sym: Symbol): AbstractFile =
+ settings.outputDirs outputDirFor beforeFlatten(sym.sourceFile)
+
+ protected def getFile(base: AbstractFile, clsName: String, suffix: String): AbstractFile = {
+ var dir = base
+ val pathParts = clsName.split("[./]").toList
+ for (part <- pathParts.init) {
+ dir = dir.subdirectoryNamed(part)
+ }
+ dir.fileNamed(pathParts.last + suffix)
+ }
+ protected def getFile(sym: Symbol, clsName: String, suffix: String): AbstractFile =
+ getFile(outputDirectory(sym), clsName, suffix)
+
+ protected val ExcludedForwarderFlags = {
+ import Flags._
+ // Should include DEFERRED but this breaks findMember.
+ ( CASE | SPECIALIZED | LIFTED | PROTECTED | STATIC | EXPANDEDNAME | BridgeAndPrivateFlags | MACRO )
+ }
+
+ protected def isJavaEntryPoint(icls: IClass) = {
+ val sym = icls.symbol
+ def fail(msg: String, pos: Position = sym.pos) = {
+ icls.cunit.warning(sym.pos,
+ sym.name + " has a main method with parameter type Array[String], but " + sym.fullName('.') + " will not be a runnable program.\n" +
+ " Reason: " + msg
+ // TODO: make this next claim true, if possible
+ // by generating valid main methods as static in module classes
+ // not sure what the jvm allows here
+ // + " You can still run the program by calling it as " + sym.javaSimpleName + " instead."
+ )
+ false
+ }
+ def failNoForwarder(msg: String) = {
+ fail(msg + ", which means no static forwarder can be generated.\n")
+ }
+ val possibles = if (sym.hasModuleFlag) (sym.tpe nonPrivateMember nme.main).alternatives else Nil
+ val hasApproximate = possibles exists { m =>
+ m.info match {
+ case MethodType(p :: Nil, _) => p.tpe.typeSymbol == ArrayClass
+ case _ => false
+ }
+ }
+ // At this point it's a module with a main-looking method, so either succeed or warn that it isn't.
+ hasApproximate && {
+ // Before erasure so we can identify generic mains.
+ beforeErasure {
+ val companion = sym.linkedClassOfClass
+ val companionMain = companion.tpe.member(nme.main)
+
+ if (hasJavaMainMethod(companion))
+ failNoForwarder("companion contains its own main method")
+ else if (companion.tpe.member(nme.main) != NoSymbol)
+ // this is only because forwarders aren't smart enough yet
+ failNoForwarder("companion contains its own main method (implementation restriction: no main is allowed, regardless of signature)")
+ else if (companion.isTrait)
+ failNoForwarder("companion is a trait")
+ // Now either succeeed, or issue some additional warnings for things which look like
+ // attempts to be java main methods.
+ else (possibles exists isJavaMainMethod) || {
+ possibles exists { m =>
+ m.info match {
+ case PolyType(_, _) =>
+ fail("main methods cannot be generic.")
+ case MethodType(params, res) =>
+ if (res.typeSymbol :: params exists (_.isAbstractType))
+ fail("main methods cannot refer to type parameters or abstract types.", m.pos)
+ else
+ isJavaMainMethod(m) || fail("main method must have exact signature (Array[String])Unit", m.pos)
+ case tp =>
+ fail("don't know what this is: " + tp, m.pos)
+ }
+ }
+ }
+ }
+ }
+ }
+}
diff --git a/src/compiler/scala/tools/nsc/backend/jvm/GenJVMUtil.scala b/src/compiler/scala/tools/nsc/backend/jvm/GenJVMUtil.scala
index 418dbea..e002a61 100644
--- a/src/compiler/scala/tools/nsc/backend/jvm/GenJVMUtil.scala
+++ b/src/compiler/scala/tools/nsc/backend/jvm/GenJVMUtil.scala
@@ -1,14 +1,12 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
+ * Copyright 2005-2013 LAMP/EPFL
* @author Iulian Dragos
*/
-
package scala.tools.nsc
package backend.jvm
import scala.collection.{ mutable, immutable }
-
import ch.epfl.lamp.fjbg._
trait GenJVMUtil {
@@ -34,16 +32,13 @@ trait GenJVMUtil {
DOUBLE -> new JObjectType("java.lang.Double")
)
- private val javaNameCache = {
- val map = new mutable.WeakHashMap[Symbol, String]()
- map ++= List(
- NothingClass -> RuntimeNothingClass.fullName('/'),
- RuntimeNothingClass -> RuntimeNothingClass.fullName('/'),
- NullClass -> RuntimeNullClass.fullName('/'),
- RuntimeNullClass -> RuntimeNullClass.fullName('/')
- )
- map
- }
+ // Don't put this in per run caches.
+ private val javaNameCache = new mutable.WeakHashMap[Symbol, Name]() ++= List(
+ NothingClass -> binarynme.RuntimeNothing,
+ RuntimeNothingClass -> binarynme.RuntimeNothing,
+ NullClass -> binarynme.RuntimeNull,
+ RuntimeNullClass -> binarynme.RuntimeNull
+ )
/** This trait may be used by tools who need access to
* utility methods like javaName and javaType. (for instance,
@@ -59,14 +54,6 @@ trait GenJVMUtil {
LE -> JExtendedCode.COND_LE,
GE -> JExtendedCode.COND_GE
)
- val negate = immutable.Map[TestOp, TestOp](
- EQ -> NE,
- NE -> EQ,
- LT -> GE,
- GT -> LE,
- LE -> GT,
- GE -> LT
- )
/** Specialized array conversion to prevent calling
* java.lang.reflect.Array.newInstance via TraversableOnce.toArray
@@ -75,7 +62,6 @@ trait GenJVMUtil {
def mkArray(xs: Traversable[JType]): Array[JType] = { val a = new Array[JType](xs.size); xs.copyToArray(a); a }
def mkArray(xs: Traversable[String]): Array[String] = { val a = new Array[String](xs.size); xs.copyToArray(a); a }
-
/** Return the a name of this symbol that can be used on the Java
* platform. It removes spaces from names.
*
@@ -92,10 +78,10 @@ trait GenJVMUtil {
def javaName(sym: Symbol): String =
javaNameCache.getOrElseUpdate(sym, {
if (sym.isClass || (sym.isModule && !sym.isMethod))
- sym.fullName('/') + moduleSuffix(sym)
+ sym.javaBinaryName
else
- sym.simpleName.toString.trim() + moduleSuffix(sym)
- })
+ sym.javaSimpleName
+ }).toString
def javaType(t: TypeKind): JType = (t: @unchecked) match {
case UNIT => JType.VOID
@@ -135,7 +121,7 @@ trait GenJVMUtil {
case DoubleTag => jcode emitPUSH const.doubleValue
case StringTag => jcode emitPUSH const.stringValue
case NullTag => jcode.emitACONST_NULL()
- case ClassTag =>
+ case ClazzTag =>
val kind = toTypeKind(const.typeValue)
val toPush =
if (kind.isValueType) classLiteral(kind)
diff --git a/src/compiler/scala/tools/nsc/backend/msil/GenMSIL.scala b/src/compiler/scala/tools/nsc/backend/msil/GenMSIL.scala
index 1968ca5..aaffaa8 100644
--- a/src/compiler/scala/tools/nsc/backend/msil/GenMSIL.scala
+++ b/src/compiler/scala/tools/nsc/backend/msil/GenMSIL.scala
@@ -1,5 +1,5 @@
/* NSC -- new scala compiler
- * Copyright 2005-2011 LAMP/EPFL
+ * Copyright 2005-2013 LAMP/EPFL
* @author Nikolay Mihaylov
*/
@@ -9,13 +9,13 @@ package backend.msil
import java.io.{File, IOException}
import java.nio.{ByteBuffer, ByteOrder}
-
-import scala.collection.mutable.{Map, HashMap, HashSet, Stack, ListBuffer}
+import scala.collection.{ mutable, immutable }
import scala.tools.nsc.symtab._
import ch.epfl.lamp.compiler.msil.{Type => MsilType, _}
import ch.epfl.lamp.compiler.msil.emit._
import ch.epfl.lamp.compiler.msil.util.PECustomMod
+import scala.language.postfixOps
abstract class GenMSIL extends SubComponent {
import global._
@@ -24,6 +24,8 @@ abstract class GenMSIL extends SubComponent {
import icodes._
import icodes.opcodes._
+ val x = loaders
+
/** Create a new phase */
override def newPhase(p: Phase) = new MsilPhase(p)
@@ -123,14 +125,14 @@ abstract class GenMSIL extends SubComponent {
// Scala attributes
// symtab.Definitions -> object (singleton..)
val SerializableAttr = definitions.SerializableAttr.tpe
- val CloneableAttr = definitions.getClass("scala.cloneable").tpe
- val TransientAtt = definitions.getClass("scala.transient").tpe
+ val CloneableAttr = definitions.CloneableAttr.tpe
+ val TransientAtt = definitions.TransientAttr.tpe
// remoting: the architectures are too different, no mapping (no portable code
// possible)
// java instance methods that are mapped to static methods in .net
// these will need to be called with OpCodes.Call (not Callvirt)
- val dynToStatMapped: HashSet[Symbol] = new HashSet()
+ val dynToStatMapped = mutable.HashSet[Symbol]()
initMappings()
@@ -278,8 +280,7 @@ abstract class GenMSIL extends SubComponent {
/*
- if (settings.debug.value)
- log("creating annotations: " + annotations + " for member : " + member)
+ debuglog("creating annotations: " + annotations + " for member : " + member)
for (annot@ AnnotationInfo(typ, annArgs, nvPairs) <- annotations ;
if annot.isConstant)
//!typ.typeSymbol.isJavaDefined
@@ -365,7 +366,7 @@ abstract class GenMSIL extends SubComponent {
arr.foreach(emitConst)
}
- // TODO: other Tags: NoTag, UnitTag, ClassTag, EnumTag, ArrayTag ???
+ // TODO: other Tags: NoTag, UnitTag, ClazzTag, EnumTag, ArrayTag ???
case _ => abort("could not handle attribute argument: " + const)
}
@@ -388,7 +389,7 @@ abstract class GenMSIL extends SubComponent {
case DoubleTag => buf.put(0x0d.toByte)
case StringTag => buf.put(0x0e.toByte)
- // TODO: other Tags: NoTag, UnitTag, ClassTag, EnumTag ???
+ // TODO: other Tags: NoTag, UnitTag, ClazzTag, EnumTag ???
// ArrayTag falls in here
case _ => abort("could not handle attribute argument: " + c)
@@ -451,8 +452,7 @@ abstract class GenMSIL extends SubComponent {
val iclass = classes(sym)
val tBuilder = types(sym).asInstanceOf[TypeBuilder]
- if (settings.debug.value)
- log("Calling CreatType for " + sym + ", " + tBuilder.toString)
+ debuglog("Calling CreatType for " + sym + ", " + tBuilder.toString)
tBuilder.CreateType()
tBuilder.setSourceFilepath(iclass.cunit.source.file.path)
@@ -466,8 +466,7 @@ abstract class GenMSIL extends SubComponent {
private[GenMSIL] def genClass(iclass: IClass) {
val sym = iclass.symbol
- if (settings.debug.value)
- log("Generating class " + sym + " flags: " + Flags.flagsToString(sym.flags))
+ debuglog("Generating class " + sym + " flags: " + Flags.flagsToString(sym.flags))
clasz = iclass
val tBuilder = getType(sym).asInstanceOf[TypeBuilder]
@@ -476,11 +475,10 @@ abstract class GenMSIL extends SubComponent {
// "Clone": if the code is non-portable, "Clone" is defined, not "clone"
// TODO: improve condition (should override AnyRef.clone)
if (iclass.methods.forall(m => {
- !((m.symbol.name.toString() != "clone" || m.symbol.name.toString() != "Clone") &&
+ !((m.symbol.name.toString != "clone" || m.symbol.name.toString != "Clone") &&
m.symbol.tpe.paramTypes.length != 0)
})) {
- if (settings.debug.value)
- log("auto-generating cloneable method for " + sym)
+ debuglog("auto-generating cloneable method for " + sym)
val attrs: Short = (MethodAttributes.Public | MethodAttributes.Virtual |
MethodAttributes.HideBySig).toShort
val cloneMethod = tBuilder.DefineMethod("Clone", attrs, MOBJECT,
@@ -497,7 +495,7 @@ abstract class GenMSIL extends SubComponent {
if (isTopLevelModule(sym)) {
if (sym.companionClass == NoSymbol)
- dumpMirrorClass(sym)
+ generateMirrorClass(sym)
else
log("No mirror class for module with linked class: " +
sym.fullName)
@@ -513,8 +511,7 @@ abstract class GenMSIL extends SubComponent {
private def genMethod(m: IMethod) {
- if (settings.debug.value)
- log("Generating method " + m.symbol + " flags: " + Flags.flagsToString(m.symbol.flags) +
+ debuglog("Generating method " + m.symbol + " flags: " + Flags.flagsToString(m.symbol.flags) +
" owner: " + m.symbol.owner)
method = m
localBuilders.clear
@@ -543,8 +540,7 @@ abstract class GenMSIL extends SubComponent {
if (mcode != null) {
for (local <- m.locals ; if !(m.params contains local)) {
- if (settings.debug.value)
- log("add local var: " + local + ", of kind " + local.kind)
+ debuglog("add local var: " + local + ", of kind " + local.kind)
val t: MsilType = msilType(local.kind)
val localBuilder = mcode.DeclareLocal(t)
localBuilder.SetLocalSymInfo(msilName(local.sym))
@@ -561,7 +557,7 @@ abstract class GenMSIL extends SubComponent {
*/
val msilLinearizer = new MSILLinearizer()
- val labels: HashMap[BasicBlock, Label] = new HashMap()
+ val labels = mutable.HashMap[BasicBlock, Label]()
/* when emitting .line, it's enough to include the full filename just once per method, thus reducing filesize.
* this scheme relies on the fact that the entry block is emitted first. */
@@ -570,8 +566,7 @@ abstract class GenMSIL extends SubComponent {
def genCode(m: IMethod) {
def makeLabels(blocks: List[BasicBlock]) = {
- if (settings.debug.value)
- log("Making labels for: " + method)
+ debuglog("Making labels for: " + method)
for (bb <- blocks) labels(bb) = mcode.DefineLabel()
}
@@ -617,19 +612,19 @@ abstract class GenMSIL extends SubComponent {
}
// the try blocks starting at a certain BasicBlock
- val beginExBlock = new HashMap[BasicBlock, List[ExceptionHandler]]()
+ val beginExBlock = mutable.HashMap[BasicBlock, List[ExceptionHandler]]()
// the catch blocks starting / endling at a certain BasicBlock
- val beginCatchBlock = new HashMap[BasicBlock, ExceptionHandler]()
- val endExBlock = new HashMap[BasicBlock, List[ExceptionHandler]]()
+ val beginCatchBlock = mutable.HashMap[BasicBlock, ExceptionHandler]()
+ val endExBlock = mutable.HashMap[BasicBlock, List[ExceptionHandler]]()
/** When emitting the code (genBlock), the number of currently active try / catch
- * blocks. When seeing a `RETURN' inside a try / catch, we need to
+ * blocks. When seeing a `RETURN` inside a try / catch, we need to
* - store the result in a local (if it's not UNIT)
* - emit `Leave handlerReturnLabel` instead of the Return
* - emit code at the end: load the local and return its value
*/
- var currentHandlers = new Stack[ExceptionHandler]
+ var currentHandlers = new mutable.Stack[ExceptionHandler]
// The IMethod the Local/Label/Kind below belong to
var handlerReturnMethod: IMethod = _
// Stores the result when returning inside an exception block
@@ -656,11 +651,11 @@ abstract class GenMSIL extends SubComponent {
* So for every finalizer, we have a label which marks the place of the `endfinally`,
* nested try/catch blocks will leave there.
*/
- val endFinallyLabels = new HashMap[ExceptionHandler, Label]()
+ val endFinallyLabels = mutable.HashMap[ExceptionHandler, Label]()
/** Computes which blocks are the beginning / end of a try or catch block */
private def computeExceptionMaps(blocks: List[BasicBlock], m: IMethod): List[BasicBlock] = {
- val visitedBlocks = new HashSet[BasicBlock]()
+ val visitedBlocks = new mutable.HashSet[BasicBlock]()
// handlers which have not been introduced so far
var openHandlers = m.exh
@@ -687,11 +682,11 @@ abstract class GenMSIL extends SubComponent {
// Stack of nested try blocks. Each bloc has a List of ExceptionHandler (multiple
// catch statements). Example *1*: Stack(List(h2, h3), List(h1))
- val currentTryHandlers = new Stack[List[ExceptionHandler]]()
+ val currentTryHandlers = new mutable.Stack[List[ExceptionHandler]]()
// Stack of nested catch blocks. The head of the list is the current catch block. The
// tail is all following catch blocks. Example *2*: Stack(List(h3), List(h4, h5))
- val currentCatchHandlers = new Stack[List[ExceptionHandler]]()
+ val currentCatchHandlers = new mutable.Stack[List[ExceptionHandler]]()
for (b <- blocks) {
@@ -750,7 +745,7 @@ abstract class GenMSIL extends SubComponent {
// (checked by the assertions below)
val sizes = newHandlersBySize.keys.toList.sortWith(_ > _)
- val beginHandlers = new ListBuffer[ExceptionHandler]
+ val beginHandlers = new mutable.ListBuffer[ExceptionHandler]
for (s <- sizes) {
val sHandlers = newHandlersBySize(s)
for (h <- sHandlers) {
@@ -817,8 +812,7 @@ abstract class GenMSIL extends SubComponent {
def genBlock(block: BasicBlock, prev: BasicBlock, next: BasicBlock) {
def loadLocalOrAddress(local: Local, msg : String , loadAddr : Boolean) {
- if (settings.debug.value)
- log(msg + " for " + local)
+ debuglog(msg + " for " + local)
val isArg = local.arg
val i = local.index
if (isArg)
@@ -828,8 +822,7 @@ abstract class GenMSIL extends SubComponent {
}
def loadFieldOrAddress(field: Symbol, isStatic: Boolean, msg: String, loadAddr : Boolean) {
- if (settings.debug.value)
- log(msg + " with owner: " + field.owner +
+ debuglog(msg + " with owner: " + field.owner +
" flags: " + Flags.flagsToString(field.owner.flags))
var fieldInfo = fields.get(field) match {
case Some(fInfo) => fInfo
@@ -915,8 +908,7 @@ abstract class GenMSIL extends SubComponent {
}
mcode.MarkLabel(labels(block))
- if (settings.debug.value)
- log("Generating code for block: " + block)
+ debuglog("Generating code for block: " + block)
for (handler <- beginCatchBlock.get(block)) {
if (!currentHandlers.isEmpty && currentHandlers.top.covered == handler.covered) {
@@ -977,7 +969,7 @@ abstract class GenMSIL extends SubComponent {
case DoubleTag => mcode.Emit(OpCodes.Ldc_R8, const.doubleValue)
case StringTag => mcode.Emit(OpCodes.Ldstr, const.stringValue)
case NullTag => mcode.Emit(OpCodes.Ldnull)
- case ClassTag =>
+ case ClazzTag =>
mcode.Emit(OpCodes.Ldtoken, msilType(const.typeValue))
mcode.Emit(OpCodes.Call, TYPE_FROM_HANDLE)
case _ => abort("Unknown constant value: " + const)
@@ -1016,8 +1008,7 @@ abstract class GenMSIL extends SubComponent {
mcode.Emit(OpCodes.Newobj, constructorInfo)
case LOAD_MODULE(module) =>
- if (settings.debug.value)
- log("Generating LOAD_MODULE for: " + showsym(module))
+ debuglog("Generating LOAD_MODULE for: " + showsym(module))
mcode.Emit(OpCodes.Ldsfld, getModuleInstanceField(module))
case STORE_ARRAY_ITEM(kind) =>
@@ -1039,8 +1030,7 @@ abstract class GenMSIL extends SubComponent {
case STORE_LOCAL(local) =>
val isArg = local.arg
val i = local.index
- if (settings.debug.value)
- log("store_local for " + local + ", index " + i)
+ debuglog("store_local for " + local + ", index " + i)
// there are some locals defined by the compiler that
// are isArg and are need to be stored.
@@ -1136,7 +1126,7 @@ abstract class GenMSIL extends SubComponent {
}
// method: implicit view(FunctionX[PType0, PType1, ...,PTypeN, ResType]):DelegateType
- val (isDelegateView, paramType, resType) = atPhase(currentRun.typerPhase) {
+ val (isDelegateView, paramType, resType) = beforeTyper {
msym.tpe match {
case MethodType(params, resultType)
if (params.length == 1 && msym.name == nme.view_) =>
@@ -1274,7 +1264,7 @@ abstract class GenMSIL extends SubComponent {
i += 1
}
val defaultTarget = labels(branches(i))
- if (next != defaultTarget)
+ if (next != branches(i))
mcode.Emit(OpCodes.Br, defaultTarget)
case JUMP(whereto) =>
@@ -1370,8 +1360,7 @@ abstract class GenMSIL extends SubComponent {
}
case Conversion(src, dst) =>
- if (settings.debug.value)
- log("Converting from: " + src + " to: " + dst)
+ debuglog("Converting from: " + src + " to: " + dst)
dst match {
case BYTE => mcode.Emit(OpCodes.Conv_I1) // I1 for System.SByte, i.e. a scala.Byte
@@ -1564,7 +1553,7 @@ abstract class GenMSIL extends SubComponent {
}
def emitBrBool(cond: TestOp, dest: Label) {
- cond match {
+ (cond: @unchecked) match {
// EQ -> Brfalse, NE -> Brtrue; this is because we come from
// a CZJUMP. If the value on the stack is 0 (e.g. a boolean
// method returned false), and we are in the case EQ, then
@@ -1585,8 +1574,7 @@ abstract class GenMSIL extends SubComponent {
val params = m.params
for (l <- params) {
- if (settings.debug.value)
- log("Index value for parameter " + l + ": " + idx)
+ debuglog("Index value for parameter " + l + ": " + idx)
l.index = idx
idx += 1 // sizeOf(l.kind)
}
@@ -1595,8 +1583,7 @@ abstract class GenMSIL extends SubComponent {
idx = 0
for (l <- locvars) {
- if (settings.debug.value)
- log("Index value for local variable " + l + ": " + idx)
+ debuglog("Index value for local variable " + l + ": " + idx)
l.index = idx
idx += 1 // sizeOf(l.kind)
}
@@ -1617,9 +1604,7 @@ abstract class GenMSIL extends SubComponent {
* not exist in the classpath: the type checker will be very confused.
*/
def msilName(sym: Symbol): String = {
- val suffix: String = if (sym.hasModuleFlag && !sym.isMethod &&
- !sym.isImplClass &&
- !sym.isJavaDefined) "$" else ""
+ val suffix = sym.moduleSuffix
// Flags.JAVA: "symbol was not defined by a scala-class" (java, or .net-class)
if (sym == definitions.NothingClass)
@@ -1631,7 +1616,7 @@ abstract class GenMSIL extends SubComponent {
if (sym.isNestedClass) sym.simpleName
else sym.fullName
} else
- sym.simpleName.toString().trim()) + suffix
+ sym.simpleName.toString.trim()) + suffix
}
@@ -1722,13 +1707,13 @@ abstract class GenMSIL extends SubComponent {
var entryPoint: Symbol = _
- val notInitializedModules: HashSet[Symbol] = new HashSet()
+ val notInitializedModules = mutable.HashSet[Symbol]()
// TODO: create fields also in def createType, and not in genClass,
// add a getField method (it only works as it is because fields never
// accessed from outside a class)
- val localBuilders: HashMap[Local, LocalBuilder] = new HashMap()
+ val localBuilders = mutable.HashMap[Local, LocalBuilder]()
private[GenMSIL] def findEntryPoint(cls: IClass) {
@@ -1857,8 +1842,7 @@ abstract class GenMSIL extends SubComponent {
else sym.info.parents.distinct
val superType : MsilType = if (isInterface(sym)) null else msilTypeFromSym(parents.head.typeSymbol)
- if (settings.debug.value)
- log("super type: " + parents(0).typeSymbol + ", msil type: " + superType)
+ debuglog("super type: " + parents(0).typeSymbol + ", msil type: " + superType)
val interfaces: Array[MsilType] =
parents.tail.map(p => msilTypeFromSym(p.typeSymbol)).toArray
@@ -1898,8 +1882,7 @@ abstract class GenMSIL extends SubComponent {
for (ifield <- iclass.fields) {
val sym = ifield.symbol
- if (settings.debug.value)
- log("Adding field: " + sym.fullName)
+ debuglog("Adding field: " + sym.fullName)
var attributes = msilFieldFlags(sym)
val fieldTypeWithCustomMods =
@@ -1916,8 +1899,8 @@ abstract class GenMSIL extends SubComponent {
val sc = iclass.lookupStaticCtor
if (sc.isDefined) {
val m = sc.get
- val oldLastBlock = m.code.blocks.last
- val lastBlock = m.code.newBlock
+ val oldLastBlock = m.lastBlock
+ val lastBlock = m.newBlock()
oldLastBlock.replaceInstruction(oldLastBlock.length - 1, JUMP(lastBlock))
// call object's private ctor from static ctor
lastBlock.emit(CIL_NEWOBJ(iclass.symbol.primaryConstructor))
@@ -1930,8 +1913,7 @@ abstract class GenMSIL extends SubComponent {
if (iclass.symbol != definitions.ArrayClass) {
for (m: IMethod <- iclass.methods) {
val sym = m.symbol
- if (settings.debug.value)
- log("Creating MethodBuilder for " + Flags.flagsToString(sym.flags) + " " +
+ debuglog("Creating MethodBuilder for " + Flags.flagsToString(sym.flags) + " " +
sym.owner.fullName + "::" + sym.name)
val ownerType = getType(sym.enclClass).asInstanceOf[TypeBuilder]
@@ -1957,8 +1939,7 @@ abstract class GenMSIL extends SubComponent {
if (!methods.contains(sym))
mapMethod(sym, method)
addAttributes(method, sym.annotations)
- if (settings.debug.value)
- log("\t created MethodBuilder " + method)
+ debuglog("\t created MethodBuilder " + method)
}
}
} // method builders created for non-array iclass
@@ -1974,7 +1955,7 @@ abstract class GenMSIL extends SubComponent {
} // createClassMembers0
private def isTopLevelModule(sym: Symbol): Boolean =
- atPhase (currentRun.refchecksPhase) {
+ beforeRefchecks {
sym.isModuleClass && !sym.isImplClass && !sym.isNestedClass
}
@@ -1995,8 +1976,7 @@ abstract class GenMSIL extends SubComponent {
}
private def addModuleInstanceField(sym: Symbol) {
- if (settings.debug.value)
- log("Adding Module-Instance Field for " + showsym(sym))
+ debuglog("Adding Module-Instance Field for " + showsym(sym))
val tBuilder = getType(sym).asInstanceOf[TypeBuilder]
val fb = tBuilder.DefineField(MODULE_INSTANCE_NAME,
tBuilder,
@@ -2034,7 +2014,7 @@ abstract class GenMSIL extends SubComponent {
}
def nestingAwareFullClassname(csym: Symbol) : String = {
- val suffix = moduleSuffix(csym)
+ val suffix = csym.moduleSuffix
val res = if (csym.isNestedClass)
nestingAwareFullClassname(csym.owner) + "+" + csym.encodedName
else
@@ -2042,12 +2022,6 @@ abstract class GenMSIL extends SubComponent {
res + suffix
}
- /** cut&pasted from GenJVM */
- def moduleSuffix(sym: Symbol) =
- if (sym.hasFlag(Flags.MODULE) && !sym.isMethod &&
- !sym.isImplClass && !sym.hasFlag(Flags.JAVA)) "$"
- else "";
-
/** Adds a static initializer which creates an instance of the module
* class (calls the primary constructor). A special primary constructor
* will be generated (notInitializedModules) which stores the new instance
@@ -2075,11 +2049,10 @@ abstract class GenMSIL extends SubComponent {
sicode.Emit(OpCodes.Ret)
}
- private def dumpMirrorClass(sym: Symbol) {
+ private def generateMirrorClass(sym: Symbol) {
val tBuilder = getType(sym)
assert(sym.isModuleClass, "Can't generate Mirror-Class for the Non-Module class " + sym)
- if (settings.debug.value)
- log("Dumping mirror class for object: " + sym)
+ debuglog("Dumping mirror class for object: " + sym)
val moduleName = msilName(sym)
val mirrorName = moduleName.substring(0, moduleName.length() - 1)
val mirrorTypeBuilder = mmodule.DefineType(mirrorName,
@@ -2096,8 +2069,7 @@ abstract class GenMSIL extends SubComponent {
m.isMethod && !m.isClassConstructor && !m.isStaticMember && !m.isCase &&
!m.isDeferred)
{
- if (settings.debug.value)
- log(" Mirroring method: " + m)
+ debuglog(" Mirroring method: " + m)
val paramTypes = msilParamTypes(m)
val paramNames: Array[String] = new Array[String](paramTypes.length)
for (i <- 0 until paramTypes.length)
diff --git a/src/compiler/scala/tools/nsc/backend/opt/ClosureElimination.scala b/src/compiler/scala/tools/nsc/backend/opt/ClosureElimination.scala
index cd38a2a..23f932b 100644
--- a/src/compiler/scala/tools/nsc/backend/opt/ClosureElimination.scala
+++ b/src/compiler/scala/tools/nsc/backend/opt/ClosureElimination.scala
@@ -1,13 +1,11 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
+ * Copyright 2005-2013 LAMP/EPFL
* @author Iulian Dragos
*/
-
package scala.tools.nsc
package backend.opt
-import scala.collection.mutable.{Map, HashMap}
import scala.tools.nsc.backend.icode.analysis.LubException
import scala.tools.nsc.symtab._
@@ -37,7 +35,7 @@ abstract class ClosureElimination extends SubComponent {
case (STORE_LOCAL(x), LOAD_LOCAL(y)) if (x == y) =>
var liveOut = liveness.out(bb)
if (!liveOut(x)) {
- log("store/load to a dead local? " + x)
+ debuglog("store/load to a dead local? " + x)
val instrs = bb.getArray
var idx = instrs.length - 1
while (idx > 0 && (instrs(idx) ne i2)) {
@@ -45,7 +43,7 @@ abstract class ClosureElimination extends SubComponent {
idx -= 1
}
if (!liveOut(x)) {
- log("removing dead store/load " + x)
+ log("Removing dead store/load of " + x.sym.initialize.defString)
Some(Nil)
} else None
} else
@@ -86,6 +84,7 @@ abstract class ClosureElimination extends SubComponent {
*/
class ClosureElim {
def analyzeClass(cls: IClass): Unit = if (settings.Xcloselim.value) {
+ log(s"Analyzing ${cls.methods.size} methods in $cls.")
cls.methods foreach { m =>
analyzeMethod(m)
peephole(m)
@@ -96,39 +95,35 @@ abstract class ClosureElimination extends SubComponent {
import copyPropagation._
/* Some embryonic copy propagation. */
- def analyzeMethod(m: IMethod): Unit = try {if (m.code ne null) {
- log("Analyzing " + m)
+ def analyzeMethod(m: IMethod): Unit = try {if (m.hasCode) {
cpp.init(m)
cpp.run
- for (bb <- linearizer.linearize(m)) {
+ m.linearizedBlocks() foreach { bb =>
var info = cpp.in(bb)
- if (settings.debug.value) log("Cpp info at entry to block " + bb + ": " + info)
+ debuglog("Cpp info at entry to block " + bb + ": " + info)
for (i <- bb) {
i match {
case LOAD_LOCAL(l) if info.bindings isDefinedAt LocalVar(l) =>
val t = info.getBinding(l)
t match {
- case Deref(LocalVar(_)) | Deref(This) | Const(_) =>
+ case Deref(This) | Const(_) =>
bb.replaceInstruction(i, valueToInstruction(t));
- log("replaced " + i + " with " + t)
+ debuglog(s"replaced $i with $t")
case _ =>
- bb.replaceInstruction(i, LOAD_LOCAL(info.getAlias(l)))
- log("replaced " + i + " with " + info.getAlias(l))
-
+ val t = info.getAlias(l)
+ bb.replaceInstruction(i, LOAD_LOCAL(t))
+ debuglog(s"replaced $i with $t")
}
case LOAD_FIELD(f, false) /* if accessible(f, m.symbol) */ =>
def replaceFieldAccess(r: Record) {
val Record(cls, bindings) = r
- info.getFieldNonRecordValue(r, f) match {
- case Some(v) =>
- bb.replaceInstruction(i,
- DROP(REFERENCE(cls)) :: valueToInstruction(v) :: Nil);
- log("Replaced " + i + " with " + info.getFieldNonRecordValue(r, f));
- case None =>
+ info.getFieldNonRecordValue(r, f) foreach { v =>
+ bb.replaceInstruction(i, DROP(REFERENCE(cls)) :: valueToInstruction(v) :: Nil)
+ debuglog(s"replaced $i with $v")
}
}
@@ -152,21 +147,21 @@ abstract class ClosureElimination extends SubComponent {
case _ =>
}
- case UNBOX(_) =>
+ case UNBOX(boxType) =>
info.stack match {
case Deref(LocalVar(loc1)) :: _ if info.bindings isDefinedAt LocalVar(loc1) =>
val value = info.getBinding(loc1)
value match {
- case Boxed(LocalVar(loc2)) =>
+ case Boxed(LocalVar(loc2)) if loc2.kind == boxType =>
bb.replaceInstruction(i, DROP(icodes.ObjectReference) :: valueToInstruction(info.getBinding(loc2)) :: Nil)
- log("replaced " + i + " with " + info.getBinding(loc2))
+ debuglog("replaced " + i + " with " + info.getBinding(loc2))
case _ =>
()
}
- case Boxed(LocalVar(loc1)) :: _ =>
+ case Boxed(LocalVar(loc1)) :: _ if loc1.kind == boxType =>
val loc2 = info.getAlias(loc1)
bb.replaceInstruction(i, DROP(icodes.ObjectReference) :: valueToInstruction(Deref(LocalVar(loc2))) :: Nil)
- log("replaced " + i + " with " + LocalVar(loc2))
+ debuglog("replaced " + i + " with " + LocalVar(loc2))
case _ =>
}
@@ -203,28 +198,25 @@ abstract class ClosureElimination extends SubComponent {
/** Peephole optimization. */
abstract class PeepholeOpt {
- private var method: IMethod = null
+ private var method: IMethod = NoIMethod
/** Concrete implementations will perform their optimizations here */
def peep(bb: BasicBlock, i1: Instruction, i2: Instruction): Option[List[Instruction]]
var liveness: global.icodes.liveness.LivenessAnalysis = null
- def apply(m: IMethod): Unit = if (m.code ne null) {
+ def apply(m: IMethod): Unit = if (m.hasCode) {
method = m
liveness = new global.icodes.liveness.LivenessAnalysis
liveness.init(m)
liveness.run
- for (b <- m.code.blocks)
- transformBlock(b)
+ m foreachBlock transformBlock
}
def transformBlock(b: BasicBlock): Unit = if (b.size >= 2) {
- var newInstructions: List[Instruction] = Nil
-
- newInstructions = b.toList
-
+ var newInstructions: List[Instruction] = b.toList
var redo = false
+
do {
var h = newInstructions.head
var t = newInstructions.tail
@@ -234,7 +226,7 @@ abstract class ClosureElimination extends SubComponent {
while (t != Nil) {
peep(b, h, t.head) match {
case Some(newInstrs) =>
- newInstructions = seen.reverse ::: newInstrs ::: t.tail;
+ newInstructions = seen reverse_::: newInstrs ::: t.tail
redo = true
case None =>
()
diff --git a/src/compiler/scala/tools/nsc/backend/opt/DeadCodeElimination.scala b/src/compiler/scala/tools/nsc/backend/opt/DeadCodeElimination.scala
index d176ef4..db56f61 100644
--- a/src/compiler/scala/tools/nsc/backend/opt/DeadCodeElimination.scala
+++ b/src/compiler/scala/tools/nsc/backend/opt/DeadCodeElimination.scala
@@ -1,5 +1,5 @@
/* NSC -- new scala compiler
- * Copyright 2005-2011 LAMP/EPFL
+ * Copyright 2005-2013 LAMP/EPFL
* @author Iulian Dragos
*/
@@ -16,6 +16,10 @@ abstract class DeadCodeElimination extends SubComponent {
import global._
import icodes._
import icodes.opcodes._
+ import definitions.RuntimePackage
+
+ /** The block and index where an instruction is located */
+ type InstrLoc = (BasicBlock, Int)
val phaseName = "dce"
@@ -36,13 +40,20 @@ abstract class DeadCodeElimination extends SubComponent {
}
/** closures that are instantiated at least once, after dead code elimination */
- val liveClosures: mutable.Set[Symbol] = new mutable.HashSet()
+ val liveClosures = perRunCaches.newSet[Symbol]()
+
+ /** closures that are eliminated, populated by GenASM.AsmPhase.run()
+ * these class symbols won't have a .class physical file, thus shouldn't be included in InnerClasses JVM attribute,
+ * otherwise some tools get confused or slow (SI-6546)
+ * */
+ val elidedClosures = perRunCaches.newSet[Symbol]()
/** Remove dead code.
*/
class DeadCode {
def analyzeClass(cls: IClass) {
+ log(s"Analyzing ${cls.methods.size} methods in $cls.")
cls.methods.foreach { m =>
this.method = m
dieCodeDie(m)
@@ -53,61 +64,117 @@ abstract class DeadCodeElimination extends SubComponent {
val rdef = new reachingDefinitions.ReachingDefinitionsAnalysis;
/** Use-def chain: give the reaching definitions at the beginning of given instruction. */
- var defs: immutable.Map[(BasicBlock, Int), immutable.Set[rdef.lattice.Definition]] = immutable.HashMap.empty
+ var defs: immutable.Map[InstrLoc, immutable.Set[rdef.lattice.Definition]] = immutable.HashMap.empty
/** Useful instructions which have not been scanned yet. */
- val worklist: mutable.Set[(BasicBlock, Int)] = new mutable.LinkedHashSet
+ val worklist: mutable.Set[InstrLoc] = new mutable.LinkedHashSet
/** what instructions have been marked as useful? */
- val useful: mutable.Map[BasicBlock, mutable.BitSet] = new mutable.HashMap
+ val useful: mutable.Map[BasicBlock, mutable.BitSet] = perRunCaches.newMap()
/** what local variables have been accessed at least once? */
var accessedLocals: List[Local] = Nil
+ /** Map from a local and a basic block to the instructions that store to that local in that basic block */
+ val localStores = mutable.Map[(Local, BasicBlock), mutable.BitSet]() withDefault {_ => mutable.BitSet()}
+
+ /** Stores that clobber previous stores to array or ref locals. See SI-5313 */
+ val clobbers = mutable.Set[InstrLoc]()
+
/** the current method. */
var method: IMethod = _
/** Map instructions who have a drop on some control path, to that DROP instruction. */
- val dropOf: mutable.Map[(BasicBlock, Int), (BasicBlock, Int)] = new mutable.HashMap()
+ val dropOf: mutable.Map[InstrLoc, List[InstrLoc]] = perRunCaches.newMap()
def dieCodeDie(m: IMethod) {
- if (m.code ne null) {
- log("dead code elimination on " + m);
- dropOf.clear
- m.code.blocks.clear
+ if (m.hasCode) {
+ debuglog("dead code elimination on " + m);
+ dropOf.clear()
+ localStores.clear()
+ clobbers.clear()
+ m.code.blocks.clear()
accessedLocals = m.params.reverse
m.code.blocks ++= linearizer.linearize(m)
collectRDef(m)
- mark
+ mark()
sweep(m)
accessedLocals = accessedLocals.distinct
- if (m.locals diff accessedLocals nonEmpty) {
- log("Removed dead locals: " + (m.locals diff accessedLocals))
+ val diff = m.locals diff accessedLocals
+ if (diff.nonEmpty) {
+ val msg = diff.map(_.sym.name)mkString(", ")
+ log(s"Removed ${diff.size} dead locals: $msg")
m.locals = accessedLocals.reverse
}
}
}
/** collect reaching definitions and initial useful instructions for this method. */
- def collectRDef(m: IMethod): Unit = if (m.code ne null) {
- defs = immutable.HashMap.empty; worklist.clear; useful.clear;
+ def collectRDef(m: IMethod): Unit = if (m.hasCode) {
+ defs = immutable.HashMap.empty; worklist.clear(); useful.clear();
rdef.init(m);
rdef.run;
- for (bb <- m.code.blocks.toList) {
+ m foreachBlock { bb =>
useful(bb) = new mutable.BitSet(bb.size)
var rd = rdef.in(bb);
for (Pair(i, idx) <- bb.toList.zipWithIndex) {
+
+ // utility for adding to worklist
+ def moveToWorkList() = moveToWorkListIf(true)
+
+ // utility for (conditionally) adding to worklist
+ def moveToWorkListIf(cond: Boolean) =
+ if (cond) {
+ debuglog("in worklist: " + i)
+ worklist += ((bb, idx))
+ } else {
+ debuglog("not in worklist: " + i)
+ }
+
+ // instruction-specific logic
i match {
- case LOAD_LOCAL(l) =>
+
+ case LOAD_LOCAL(_) =>
defs = defs + Pair(((bb, idx)), rd.vars)
-// Console.println(i + ": " + (bb, idx) + " rd: " + rd + " and having: " + defs)
+ moveToWorkListIf(false)
+
+ case STORE_LOCAL(l) =>
+ /* SI-4935 Check whether a module is stack top, if so mark the instruction that loaded it
+ * (otherwise any side-effects of the module's constructor go lost).
+ * (a) The other two cases where a module's value is stored (STORE_FIELD and STORE_ARRAY_ITEM)
+ * are already marked (case clause below).
+ * (b) A CALL_METHOD targeting a method `m1` where the receiver is potentially a module (case clause below)
+ * will have the module's load marked provided `isSideEffecting(m1)`.
+ * TODO check for purity (the ICode?) of the module's constructor (besides m1's purity).
+ * See also https://github.com/paulp/scala/blob/topic/purity-analysis/src/compiler/scala/tools/nsc/backend/opt/DeadCodeElimination.scala
+ */
+ val necessary = rdef.findDefs(bb, idx, 1) exists { p =>
+ val (bb1, idx1) = p
+ bb1(idx1) match {
+ case LOAD_MODULE(module) => isLoadNeeded(module)
+ case _ => false
+ }
+ }
+ moveToWorkListIf(necessary)
+
+ // add it to the localStores map
+ val key = (l, bb)
+ val set = localStores(key)
+ set += idx
+ localStores(key) = set
+
case RETURN(_) | JUMP(_) | CJUMP(_, _, _, _) | CZJUMP(_, _, _, _) | STORE_FIELD(_, _) |
- THROW(_) | STORE_ARRAY_ITEM(_) | SCOPE_ENTER(_) | SCOPE_EXIT(_) | STORE_THIS(_) |
- LOAD_EXCEPTION(_) | SWITCH(_, _) | MONITOR_ENTER() | MONITOR_EXIT() => worklist += ((bb, idx))
- case CALL_METHOD(m1, _) if isSideEffecting(m1) => worklist += ((bb, idx)); log("marking " + m1)
+ THROW(_) | LOAD_ARRAY_ITEM(_) | STORE_ARRAY_ITEM(_) | SCOPE_ENTER(_) | SCOPE_EXIT(_) | STORE_THIS(_) |
+ LOAD_EXCEPTION(_) | SWITCH(_, _) | MONITOR_ENTER() | MONITOR_EXIT() =>
+ moveToWorkList()
+
+ case CALL_METHOD(m1, _) if isSideEffecting(m1) =>
+ moveToWorkList()
+
case CALL_METHOD(m1, SuperCall(_)) =>
- worklist += ((bb, idx)) // super calls to constructor
+ moveToWorkList() // super calls to constructor
+
case DROP(_) =>
val necessary = rdef.findDefs(bb, idx, 1) exists { p =>
val (bb1, idx1) = p
@@ -115,44 +182,75 @@ abstract class DeadCodeElimination extends SubComponent {
case CALL_METHOD(m1, _) if isSideEffecting(m1) => true
case LOAD_EXCEPTION(_) | DUP(_) | LOAD_MODULE(_) => true
case _ =>
- dropOf((bb1, idx1)) = (bb, idx)
-// println("DROP is innessential: " + i + " because of: " + bb1(idx1) + " at " + bb1 + ":" + idx1)
+ dropOf((bb1, idx1)) = (bb,idx) :: dropOf.getOrElse((bb1, idx1), Nil)
+ debuglog("DROP is innessential: " + i + " because of: " + bb1(idx1) + " at " + bb1 + ":" + idx1)
false
}
}
- if (necessary) worklist += ((bb, idx))
+ moveToWorkListIf(necessary)
case _ => ()
+ moveToWorkListIf(false)
}
rd = rdef.interpret(bb, idx, rd)
}
}
}
+ private def isLoadNeeded(module: Symbol): Boolean = {
+ module.info.member(nme.CONSTRUCTOR).filter(isSideEffecting) != NoSymbol
+ }
+
/** Mark useful instructions. Instructions in the worklist are each inspected and their
* dependencies are marked useful too, and added to the worklist.
*/
def mark() {
// log("Starting with worklist: " + worklist)
while (!worklist.isEmpty) {
- val (bb, idx) = worklist.iterator.next
+ val (bb, idx) = worklist.head
worklist -= ((bb, idx))
- if (settings.debug.value)
- log("Marking instr: \tBB_" + bb + ": " + idx + " " + bb(idx))
+ debuglog("Marking instr: \tBB_" + bb + ": " + idx + " " + bb(idx))
val instr = bb(idx)
+ // adds the instrutions that define the stack values about to be consumed to the work list to
+ // be marked useful
+ def addDefs() = for ((bb1, idx1) <- rdef.findDefs(bb, idx, instr.consumed) if !useful(bb1)(idx1)) {
+ debuglog(s"\t${bb1(idx1)} is consumed by $instr")
+ worklist += ((bb1, idx1))
+ }
+
+ // DROP logic -- if an instruction is useful, its drops are also useful
+ // and we don't mark the DROPs as useful directly but add them to the
+ // worklist so we also mark their reaching defs as useful - see SI-7060
if (!useful(bb)(idx)) {
useful(bb) += idx
- dropOf.get(bb, idx) match {
- case Some((bb1, idx1)) => useful(bb1) += idx1
- case None => ()
+ dropOf.get(bb, idx) foreach {
+ for ((bb1, idx1) <- _) {
+ /*
+ * SI-7060: A drop that we now mark as useful can be reached via several paths,
+ * so we should follow by marking all its reaching definition as useful too:
+ */
+ debuglog("\tAdding: " + bb1(idx1) + " to the worklist, as a useful DROP.")
+ worklist += ((bb1, idx1))
+ }
}
+
+ // per-instruction logic
instr match {
case LOAD_LOCAL(l1) =>
for ((l2, bb1, idx1) <- defs((bb, idx)) if l1 == l2; if !useful(bb1)(idx1)) {
- log("\tAdding " + bb1(idx1))
+ debuglog("\tAdding " + bb1(idx1))
worklist += ((bb1, idx1))
}
+ case STORE_LOCAL(l1) if l1.kind.isRefOrArrayType =>
+ addDefs()
+ // see SI-5313
+ // search for clobbers of this store if we aren't doing l1 = null
+ // this doesn't catch the second store in x=null;l1=x; but in practice this catches
+ // a lot of null stores very cheaply
+ if (idx == 0 || bb(idx - 1) != CONSTANT(Constant(null)))
+ findClobbers(l1, bb, idx + 1)
+
case nw @ NEW(REFERENCE(sym)) =>
assert(nw.init ne null, "null new.init at: " + bb + ": " + idx + "(" + instr + ")")
worklist += findInstruction(bb, nw.init)
@@ -172,26 +270,86 @@ abstract class DeadCodeElimination extends SubComponent {
()
case _ =>
- for ((bb1, idx1) <- rdef.findDefs(bb, idx, instr.consumed) if !useful(bb1)(idx1)) {
- log("\tAdding " + bb1(idx1))
- worklist += ((bb1, idx1))
- }
+ addDefs()
}
}
}
}
+ /**
+ * Finds and marks all clobbers of the given local starting in the given
+ * basic block at the given index
+ *
+ * Storing to local variables of reference or array type may be indirectly
+ * observable because it may remove a reference to an object which may allow the object
+ * to be gc'd. See SI-5313. In this code I call the LOCAL_STORE(s) that immediately follow a
+ * LOCAL_STORE and that store to the same local "clobbers." If a LOCAL_STORE is marked
+ * useful then its clobbers must go into the set of clobbers, which will be
+ * compensated for later
+ */
+ def findClobbers(l: Local, bb: BasicBlock, idx: Int) {
+ // previously visited blocks tracked to prevent searching forever in a cycle
+ val inspected = mutable.Set[BasicBlock]()
+ // our worklist of blocks that still need to be checked
+ val blocksToBeInspected = mutable.Set[BasicBlock]()
+
+ // Tries to find the next clobber of l1 in bb1 starting at idx1.
+ // if it finds one it adds the clobber to clobbers set for later
+ // handling. If not it adds the direct successor blocks to
+ // the uninspectedBlocks to try to find clobbers there. Either way
+ // it adds the exception successor blocks for further search
+ def findClobberInBlock(idx1: Int, bb1: BasicBlock) {
+ val key = ((l, bb1))
+ val foundClobber = (localStores contains key) && {
+ def minIdx(s : mutable.BitSet) = if(s.isEmpty) -1 else s.min
+
+ // find the smallest index greater than or equal to idx1
+ val clobberIdx = minIdx(localStores(key) dropWhile (_ < idx1))
+ if (clobberIdx == -1)
+ false
+ else {
+ debuglog(s"\t${bb1(clobberIdx)} is a clobber of ${bb(idx)}")
+ clobbers += ((bb1, clobberIdx))
+ true
+ }
+ }
+
+ // always need to look into the exception successors for additional clobbers
+ // because we don't know when flow might enter an exception handler
+ blocksToBeInspected ++= (bb1.exceptionSuccessors filterNot inspected)
+ // If we didn't find a clobber here then we need to look at successor blocks.
+ // if we found a clobber then we don't need to search in the direct successors
+ if (!foundClobber) {
+ blocksToBeInspected ++= (bb1.directSuccessors filterNot inspected)
+ }
+ }
+
+ // first search starting at the current index
+ // note we don't put bb in the inspected list yet because a loop may later force
+ // us back around to search from the beginning of bb
+ findClobberInBlock(idx, bb)
+ // then loop until we've exhausted the set of uninspected blocks
+ while(!blocksToBeInspected.isEmpty) {
+ val bb1 = blocksToBeInspected.head
+ blocksToBeInspected -= bb1
+ inspected += bb1
+ findClobberInBlock(0, bb1)
+ }
+ }
+
def sweep(m: IMethod) {
val compensations = computeCompensations(m)
- for (bb <- m.code.blocks.toList) {
-// Console.println("** Sweeping block " + bb + " **")
+ debuglog("Sweeping: " + m)
+
+ m foreachBlock { bb =>
+ debuglog(bb + ":")
val oldInstr = bb.toList
bb.open
bb.clear
for (Pair(i, idx) <- oldInstr.zipWithIndex) {
if (useful(bb)(idx)) {
-// log(" " + i + " is useful")
+ debuglog(" * " + i + " is useful")
bb.emit(i, i.pos)
compensations.get(bb, idx) match {
case Some(is) => is foreach bb.emit
@@ -208,30 +366,37 @@ abstract class DeadCodeElimination extends SubComponent {
} else {
i match {
case NEW(REFERENCE(sym)) =>
- log("skipped object creation: " + sym + "inside " + m)
+ log(s"Eliminated instantation of $sym inside $m")
+ case STORE_LOCAL(l) if clobbers contains ((bb, idx)) =>
+ // if an unused instruction was a clobber of a used store to a reference or array type
+ // then we'll replace it with the store of a null to make sure the reference is
+ // eliminated. See SI-5313
+ bb emit CONSTANT(Constant(null))
+ bb emit STORE_LOCAL(l)
case _ => ()
}
- if (settings.debug.value) log("Skipped: bb_" + bb + ": " + idx + "( " + i + ")")
+ debuglog(" " + i + " [swept]")
}
}
if (bb.nonEmpty) bb.close
- else log("empty block encountered")
+ else log(s"empty block encountered in $m")
}
}
- private def computeCompensations(m: IMethod): mutable.Map[(BasicBlock, Int), List[Instruction]] = {
- val compensations: mutable.Map[(BasicBlock, Int), List[Instruction]] = new mutable.HashMap
+ private def computeCompensations(m: IMethod): mutable.Map[InstrLoc, List[Instruction]] = {
+ val compensations: mutable.Map[InstrLoc, List[Instruction]] = new mutable.HashMap
- for (bb <- m.code.blocks) {
+ m foreachBlock { bb =>
assert(bb.closed, "Open block in computeCompensations")
- for ((i, idx) <- bb.toList.zipWithIndex) {
+ foreachWithIndex(bb.toList) { (i, idx) =>
if (!useful(bb)(idx)) {
- for ((consumedType, depth) <- i.consumedTypes.reverse.zipWithIndex) {
- log("Finding definitions of: " + i + "\n\t" + consumedType + " at depth: " + depth)
+ foreachWithIndex(i.consumedTypes.reverse) { (consumedType, depth) =>
+ debuglog("Finding definitions of: " + i + "\n\t" + consumedType + " at depth: " + depth)
val defs = rdef.findDefs(bb, idx, 1, depth)
for (d <- defs) {
val (bb, idx) = d
+ debuglog("rdef: "+ bb(idx))
bb(idx) match {
case DUP(_) if idx > 0 =>
bb(idx - 1) match {
@@ -260,7 +425,7 @@ abstract class DeadCodeElimination extends SubComponent {
res
}
- private def findInstruction(bb: BasicBlock, i: Instruction): (BasicBlock, Int) = {
+ private def findInstruction(bb: BasicBlock, i: Instruction): InstrLoc = {
for (b <- linearizer.linearizeAt(method, bb)) {
val idx = b.toList indexWhere (_ eq i)
if (idx != -1)
@@ -269,15 +434,12 @@ abstract class DeadCodeElimination extends SubComponent {
abort("could not find init in: " + method)
}
+ private def isPure(sym: Symbol) = (
+ (sym.isGetter && sym.isEffectivelyFinal && !sym.isLazy)
+ || (sym.isPrimaryConstructor && (sym.enclosingPackage == RuntimePackage || inliner.isClosureClass(sym.owner)))
+ )
/** Is 'sym' a side-effecting method? TODO: proper analysis. */
- private def isSideEffecting(sym: Symbol): Boolean = {
- !((sym.isGetter && sym.isFinal && !sym.isLazy)
- || (sym.isConstructor
- && !(sym.owner == method.symbol.owner && method.symbol.isConstructor) // a call to another constructor
- && sym.owner.owner == definitions.RuntimePackage.moduleClass)
- || (sym.isConstructor && inliner.isClosureClass(sym.owner))
-/* || definitions.isBox(sym)
- || definitions.isUnbox(sym)*/)
- }
+ private def isSideEffecting(sym: Symbol) = !isPure(sym)
+
} /* DeadCode */
}
diff --git a/src/compiler/scala/tools/nsc/backend/opt/InlineExceptionHandlers.scala b/src/compiler/scala/tools/nsc/backend/opt/InlineExceptionHandlers.scala
new file mode 100644
index 0000000..ab238af
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/backend/opt/InlineExceptionHandlers.scala
@@ -0,0 +1,389 @@
+/* NSC -- new scala compiler
+ * Copyright 2005-2013 LAMP/EPFL
+ */
+
+package scala.tools.nsc
+package backend.opt
+import scala.util.control.Breaks._
+
+/**
+ * This optimization phase inlines the exception handlers so that further phases can optimize the code better
+ *
+ * {{{
+ * try {
+ * ...
+ * if (condition)
+ * throw IllegalArgumentException("sth")
+ * } catch {
+ * case e: IllegalArgumentException => <handler code>
+ * case e: ... => ...
+ * }
+ * }}}
+ *
+ * will inline the exception handler code to:
+ *
+ * {{{
+ * try {
+ * ...
+ * if (condition)
+ * <handler code> // + jump to the end of the catch statement
+ * } catch {
+ * case e: IllegalArgumentException => <handler code>
+ * case e: ... => ...
+ * }
+ * }}}
+ *
+ * Q: How does the inlining work, ICode level?
+ * A: if a block contains a THROW(A) instruction AND there is a handler that takes A or a superclass of A we do:
+ * 1. We duplicate the handler code such that we can transform THROW into a JUMP
+ * 2. We analyze the handler to see what local it expects the exception to be placed in
+ * 3. We place the exception that is thrown in the correct "local variable" slot and clean up the stack
+ * 4. We finally JUMP to the duplicate handler
+ * All the above logic is implemented in InlineExceptionHandlersPhase.apply(bblock: BasicBlock)
+ *
+ * Q: Why do we need to duplicate the handler?
+ * A: An exception might be thrown in a method that we invoke in the function and we cannot see that THROW command
+ * directly. In order to catch such exceptions, we keep the exception handler in place and duplicate it in order
+ * to inline its code.
+ *
+ * @author Vlad Ureche
+ */
+abstract class InlineExceptionHandlers extends SubComponent {
+ import global._
+ import icodes._
+ import icodes.opcodes._
+
+ val phaseName = "inlineExceptionHandlers"
+
+ /** Create a new phase */
+ override def newPhase(p: Phase) = new InlineExceptionHandlersPhase(p)
+
+ /**
+ * Inlining Exception Handlers
+ */
+ class InlineExceptionHandlersPhase(prev: Phase) extends ICodePhase(prev) {
+ def name = phaseName
+
+ /* This map is used to keep track of duplicated exception handlers
+ * explanation: for each exception handler basic block, there is a copy of it
+ * -some exception handler basic blocks might not be duplicated because they have an unknown format => Option[(...)]
+ * -some exception handler duplicates expect the exception on the stack while others expect it in a local
+ * => Option[Local]
+ */
+ private val handlerCopies = perRunCaches.newMap[BasicBlock, Option[(Option[Local], BasicBlock)]]
+ /* This map is the inverse of handlerCopies, used to compute the stack of duplicate blocks */
+ private val handlerCopiesInverted = perRunCaches.newMap[BasicBlock, (BasicBlock, TypeKind)]
+ private def handlerLocal(bb: BasicBlock): Option[Local] =
+ for (v <- handlerCopies get bb ; (local, block) <- v ; l <- local) yield l
+
+ /* Type Flow Analysis */
+ private val tfa: analysis.MethodTFA = new analysis.MethodTFA()
+ private var tfaCache: Map[Int, tfa.lattice.Elem] = Map.empty
+ private var analyzedMethod: IMethod = NoIMethod
+
+ /* Blocks that need to be analyzed */
+ private var todoBlocks: List[BasicBlock] = Nil
+
+ /* Used only for warnings */
+ private var currentClass: IClass = null
+
+ /** Apply exception handler inlining to a class */
+ override def apply(c: IClass): Unit =
+ if (settings.inlineHandlers.value) {
+ val startTime = System.currentTimeMillis
+ currentClass = c
+
+ debuglog("Starting InlineExceptionHandlers on " + c)
+ c.methods foreach applyMethod
+ debuglog("Finished InlineExceptionHandlers on " + c + "... " + (System.currentTimeMillis - startTime) + "ms")
+ currentClass = null
+ }
+
+ /**
+ * Apply exception handler inlining to a method
+ *
+ * Note: for each exception handling block, we (might) create duplicates. Therefore we iterate until we get to a
+ * fixed point where all the possible handlers have been inlined.
+ *
+ * TODO: Should we have an inlining depth limit? A nested sequence of n try-catch blocks can lead to at most 2n
+ * inlined blocks, so worst case scenario we double the size of the code
+ */
+ private def applyMethod(method: IMethod): Unit = {
+ if (method.hasCode) {
+ // create the list of starting blocks
+ todoBlocks = global.icodes.linearizer.linearize(method)
+
+ while (todoBlocks.nonEmpty) {
+ val levelBlocks = todoBlocks
+ todoBlocks = Nil
+ levelBlocks foreach applyBasicBlock // new blocks will be added to todoBlocks
+ }
+ }
+
+ // Cleanup the references after we finished the file
+ handlerCopies.clear()
+ handlerCopiesInverted.clear()
+ todoBlocks = Nil
+
+ // Type flow analysis cleanup
+ analyzedMethod = NoIMethod
+ tfaCache = Map.empty
+ //TODO: Need a way to clear tfa structures
+ }
+
+ /** Apply exception handler inlining to a basic block */
+ private def applyBasicBlock(bblock: BasicBlock): Unit = {
+ /*
+ * The logic of this entire method:
+ * - for each basic block, we look at each instruction until we find a THROW instruction
+ * - once we found a THROW instruction, we decide if it is DECIDABLE which of handler will catch the exception
+ * (see method findExceptionHandler for more details)
+ * - if we decided there is a handler that will catch the exception, we need to replace the THROW instruction by
+ * a set of equivalent instructions:
+ * * we need to compute the static types of the stack slots
+ * * we need to clear the stack, everything but the exception instance on top (or in a local variable slot)
+ * * we need to JUMP to the duplicate exception handler
+ * - we compute the static types of the stack slots in function getTypesAtInstruction
+ * - we duplicate the exception handler (and we get back the information of whether the duplicate expects the
+ * exception instance on top of the stack or in a local variable slot)
+ * - we compute the necessary code to put the exception in its place, clear the stack and JUMP
+ * - we change the THROW exception to the new Clear stack + JUMP code
+ */
+ for {
+ (instr @ THROW(clazz), index) <- bblock.iterator.zipWithIndex
+ // Decide if any handler fits this exception
+ // If not, then nothing to do, we cannot determine statically which handler will catch the exception
+ (handler, caughtException) <- findExceptionHandler(toTypeKind(clazz.tpe), bblock.exceptionSuccessors)
+ } {
+ log(" Replacing " + instr + " in " + bblock + " to new handler")
+
+ // Solve the stack and drop the element that we already stored, which should be the exception
+ // needs to be done here to be the first thing before code becomes altered
+ val typeInfo = getTypesAtInstruction(bblock, index)
+
+ // Duplicate exception handler
+ duplicateExceptionHandlerCache(handler) match {
+ case None =>
+ log(" Could not duplicate handler for " + instr + " in " + bblock)
+
+ case Some((exceptionLocalOpt, newHandler)) =>
+ val onStackException = typeInfo.head
+ val thrownException = toTypeKind(clazz.tpe)
+
+ // A couple of sanity checks, to make sure we don't touch code we can't safely handle
+ val canReplaceHandler = (
+ typeInfo.nonEmpty
+ && (index == bblock.length - 1)
+ && (onStackException <:< thrownException)
+ )
+ // in other words: what's on the stack MUST conform to what's in the THROW(..)!
+
+ if (!canReplaceHandler) {
+ currentClass.cunit.warning(NoPosition, "Unable to inline the exception handler inside incorrect" +
+ " block:\n" + bblock.iterator.mkString("\n") + "\nwith stack: " + typeInfo + " just " +
+ "before instruction index " + index)
+ }
+ else {
+ // Prepare the new code to replace the THROW instruction
+ val newCode = exceptionLocalOpt match {
+ // the handler duplicate expects the exception in a local: easy one :)
+ case Some(local) =>
+ // in the first cycle we remove the exception Type
+ STORE_LOCAL(local) +: typeInfo.tail.map(x => DROP(x)) :+ JUMP(newHandler)
+
+ // we already have the exception on top of the stack, only need to JUMP
+ case None if typeInfo.length == 1 =>
+ JUMP(newHandler) :: Nil
+
+ // we have the exception on top of the stack but we have other stuff on the stack
+ // create a local, load exception, clear the stack and finally store the exception on the stack
+ case _ =>
+ val exceptionType = typeInfo.head
+ // Here we could create a single local for all exceptions of a certain type. TODO: try that.
+ val localName = currentClass.cunit.freshTermName("exception$")
+ val localType = exceptionType
+ val localSymbol = bblock.method.symbol.newValue(localName).setInfo(localType.toType)
+ val local = new Local(localSymbol, localType, false)
+
+ bblock.method.addLocal(local)
+
+ // Save the exception, drop the stack and place back the exception
+ STORE_LOCAL(local) :: typeInfo.tail.map(x => DROP(x)) ::: List(LOAD_LOCAL(local), JUMP(newHandler))
+ }
+ // replace THROW by the new code
+ bblock.replaceInstruction(instr, newCode)
+
+ // notify the successors changed for the current block
+ // notify the predecessors changed for the inlined handler block
+ bblock.touched = true
+ newHandler.touched = true
+
+ log(" Replaced " + instr + " in " + bblock + " to new handler")
+ log("OPTIMIZED class " + currentClass + " method " +
+ bblock.method + " block " + bblock + " newhandler " +
+ newHandler + ":\n\t\t" + onStackException + " <:< " +
+ thrownException + " <:< " + caughtException)
+
+ }
+ }
+ }
+ }
+
+ /**
+ * Gets the types on the stack at a certain point in the program. Note that we want to analyze the method lazily
+ * and therefore use the analyzedMethod variable
+ */
+ private def getTypesAtInstruction(bblock: BasicBlock, index: Int): List[TypeKind] = {
+ // get the stack at the block entry
+ var typeInfo = getTypesAtBlockEntry(bblock)
+
+ // perform tfa to the current instruction
+ log(" stack at the beginning of block " + bblock + " in function " +
+ bblock.method + ": " + typeInfo.stack)
+ for (i <- 0 to (index - 1)) {
+ typeInfo = tfa.interpret(typeInfo, bblock(i))
+ log(" stack after interpret: " + typeInfo.stack + " after instruction " +
+ bblock(i))
+ }
+ log(" stack before instruction " + index + " of block " + bblock + " in function " +
+ bblock.method + ": " + typeInfo.stack)
+
+ // return the result
+ typeInfo.stack.types
+ }
+
+ /**
+ * Gets the stack at the block entry. Normally the typeFlowAnalysis should be run again, but we know how to compute
+ * the stack for handler duplicates. For the locals, it's safe to assume the info from the original handler is
+ * still valid (a more precise analysis can be done, but it's not necessary)
+ */
+ private def getTypesAtBlockEntry(bblock: BasicBlock): tfa.lattice.Elem = {
+ // lazily perform tfa, because it's expensive
+ // cache results by block label, as rewriting the code messes up the block's hashCode
+ if (analyzedMethod eq NoIMethod) {
+ analyzedMethod = bblock.method
+ tfa.init(bblock.method)
+ tfa.run
+ log(" performed tfa on method: " + bblock.method)
+
+ for (block <- bblock.method.blocks.sortBy(_.label))
+ tfaCache += block.label -> tfa.in(block)
+ }
+
+ log(" getting typeinfo at the beginning of block " + bblock)
+
+ tfaCache.getOrElse(bblock.label, {
+ // this block was not analyzed, but it's a copy of some other block so its stack should be the same
+ log(" getting typeinfo at the beginning of block " + bblock + " as a copy of " +
+ handlerCopiesInverted(bblock))
+ val (origBlock, exception) = handlerCopiesInverted(bblock)
+ val typeInfo = getTypesAtBlockEntry(origBlock)
+ val stack =
+ if (handlerLocal(origBlock).nonEmpty) Nil // empty stack, the handler copy expects an empty stack
+ else List(exception) // one slot on the stack for the exception
+
+ // If we use the mutability property, it crashes the analysis
+ tfa.lattice.IState(new analysis.VarBinding(typeInfo.vars), new icodes.TypeStack(stack))
+ })
+ }
+
+ /**
+ * Finds the first exception handler that matches the current exception
+ *
+ * Note the following code:
+ * {{{
+ * try {
+ * throw new IllegalArgumentException("...")
+ * } catch {
+ * case e: RuntimeException => log("RuntimeException")
+ * case i: IllegalArgumentException => log("IllegalArgumentException")
+ * }
+ * }}}
+ *
+ * will print "RuntimeException" => we need the *first* valid handler
+ *
+ * There's a hidden catch here: say we have the following code:
+ * {{{
+ * try {
+ * val exception: Throwable =
+ * if (scala.util.Random.nextInt % 2 == 0)
+ * new IllegalArgumentException("even")
+ * else
+ * new StackOverflowError("odd")
+ * throw exception
+ * } catch {
+ * case e: IllegalArgumentException =>
+ * println("Correct, IllegalArgumentException")
+ * case e: StackOverflowError =>
+ * println("Correct, StackOverflowException")
+ * case t: Throwable =>
+ * println("WROOOONG, not Throwable!")
+ * }
+ * }}}
+ *
+ * We don't want to select a handler if there's at least one that's more specific!
+ */
+ def findExceptionHandler(thrownException: TypeKind, handlers: List[BasicBlock]): Option[(BasicBlock, TypeKind)] = {
+ for (handler <- handlers ; LOAD_EXCEPTION(clazz) <- handler take 1) {
+ val caughtException = toTypeKind(clazz.tpe)
+ // we'll do inlining here: createdException <:< thrownException <:< caughtException, good!
+ if (thrownException <:< caughtException)
+ return Some((handler, caughtException))
+ // we can't do inlining here, the handling mechanism is more precise than we can reason about
+ if (caughtException <:< thrownException)
+ return None
+ // no result yet, look deeper in the handler stack
+ }
+ None
+ }
+
+ /**
+ * This function takes care of duplicating the basic block code for inlining the handler
+ *
+ * Note: This function does not duplicate the same basic block twice. It wil contain a map of the duplicated
+ * basic blocks
+ */
+ private def duplicateExceptionHandlerCache(handler: BasicBlock) =
+ handlerCopies.getOrElseUpdate(handler, duplicateExceptionHandler(handler))
+
+ /** This function takes care of actual duplication */
+ private def duplicateExceptionHandler(handler: BasicBlock): Option[(Option[Local], BasicBlock)] = {
+ log(" duplicating handler block " + handler)
+
+ handler take 2 match {
+ case Seq(LOAD_EXCEPTION(caughtClass), next) =>
+ val (dropCount, exceptionLocal) = next match {
+ case STORE_LOCAL(local) => (2, Some(local)) // we drop both LOAD_EXCEPTION and STORE_LOCAL
+ case _ => (1, None) // we only drop the LOAD_EXCEPTION and expect the exception on the stack
+ }
+ val caughtException = toTypeKind(caughtClass.tpe)
+ // copy the exception handler code once again, dropping the LOAD_EXCEPTION
+ val copy = handler.code.newBlock
+ copy.emitOnly((handler.iterator drop dropCount).toSeq: _*)
+
+ // extend the handlers of the handler to the copy
+ for (parentHandler <- handler.method.exh ; if parentHandler covers handler) {
+ parentHandler.addCoveredBlock(copy)
+ // notify the parent handler that the successors changed
+ parentHandler.startBlock.touched = true
+ }
+
+ // notify the successors of the inlined handler might have changed
+ copy.touched = true
+ handler.touched = true
+ log(" duplicated handler block " + handler + " to " + copy)
+
+ // announce the duplicate handler
+ handlerCopiesInverted(copy) = ((handler, caughtException))
+ todoBlocks ::= copy
+
+ Some((exceptionLocal, copy))
+
+ case _ =>
+ currentClass.cunit.warning(NoPosition, "Unable to inline the exception handler due to incorrect format:\n" +
+ handler.iterator.mkString("\n"))
+ None
+ }
+ }
+ }
+}
diff --git a/src/compiler/scala/tools/nsc/backend/opt/Inliners.scala b/src/compiler/scala/tools/nsc/backend/opt/Inliners.scala
index 82ca2d0..498db78 100644
--- a/src/compiler/scala/tools/nsc/backend/opt/Inliners.scala
+++ b/src/compiler/scala/tools/nsc/backend/opt/Inliners.scala
@@ -1,5 +1,5 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
+ * Copyright 2005-2013 LAMP/EPFL
* @author Iulian Dragos
*/
@@ -9,8 +9,32 @@ package backend.opt
import scala.collection.mutable
import scala.tools.nsc.symtab._
+import scala.reflect.internal.util.NoSourceFile
/**
+ * Inliner balances two competing goals:
+ * (a) aggressive inlining of:
+ * (a.1) the apply methods of anonymous closures, so that their anon-classes can be eliminated;
+ * (a.2) higher-order-methods defined in an external library, e.g. `Range.foreach()` among many others.
+ * (b) circumventing the barrier to inter-library inlining that private accesses in the callee impose.
+ *
+ * Summing up the discussion in SI-5442 and SI-5891,
+ * the current implementation achieves to a large degree both goals above, and
+ * overcomes a problem exhibited by previous versions:
+ *
+ * (1) Problem: Attempting to access a private member `p` at runtime resulting in an `IllegalAccessError`,
+ * where `p` is defined in a library L, and is accessed from a library C (for Client),
+ * where C was compiled against L', an optimized version of L where the inliner made `p` public at the bytecode level.
+ * The only such members are fields, either synthetic or isParamAccessor, and thus having a dollar sign in their name
+ * (the accesibility of methods and constructors isn't touched by the inliner).
+ *
+ * Thus we add one more goal to our list:
+ * (c) Compile C (either optimized or not) against any of L or L',
+ * so that it runs with either L or L' (in particular, compile against L' and run with L).
+ *
+ * The chosen strategy is described in some detail in the comments for `accessRequirements()` and `potentiallyPublicized()`.
+ * Documentation at http://lamp.epfl.ch/~magarcia/ScalaCompilerCornerReloaded/2011Q4/Inliner.pdf
+ *
* @author Iulian Dragos
*/
abstract class Inliners extends SubComponent {
@@ -20,7 +44,7 @@ abstract class Inliners extends SubComponent {
import definitions.{
NullClass, NothingClass, ObjectClass,
PredefModule, RuntimePackage, ScalaInlineClass, ScalaNoInlineClass,
- isFunctionType
+ isFunctionType, isByNameParamType
}
val phaseName = "inliner"
@@ -37,6 +61,35 @@ abstract class Inliners extends SubComponent {
res
}
+ /** Look up implementation of method 'sym in 'clazz'.
+ */
+ def lookupImplFor(sym: Symbol, clazz: Symbol): Symbol = {
+ // TODO: verify that clazz.superClass is equivalent here to clazz.tpe.parents(0).typeSymbol (.tpe vs .info)
+ def needsLookup = (
+ (clazz != NoSymbol)
+ && (clazz != sym.owner)
+ && !sym.isEffectivelyFinal
+ && clazz.isEffectivelyFinal
+ )
+ def lookup(clazz: Symbol): Symbol = {
+ // println("\t\tlooking up " + meth + " in " + clazz.fullName + " meth.owner = " + meth.owner)
+ assert(clazz != NoSymbol, "Walked up past Object.superClass looking for " + sym +
+ ", most likely this reveals the TFA at fault (receiver and callee don't match).")
+ if (sym.owner == clazz || isBottomType(clazz)) sym
+ else sym.overridingSymbol(clazz) match {
+ case NoSymbol => if (sym.owner.isTrait) sym else lookup(clazz.superClass)
+ case imp => imp
+ }
+ }
+ if (needsLookup) {
+ val concreteMethod = lookup(clazz)
+ debuglog("\tlooked up method: " + concreteMethod.fullName)
+
+ concreteMethod
+ }
+ else sym
+ }
+
/* A warning threshold */
private final val MAX_INLINE_MILLIS = 2000
@@ -58,194 +111,481 @@ abstract class Inliners extends SubComponent {
def name = phaseName
val inliner = new Inliner
- override def apply(c: IClass) {
- inliner analyzeClass c
+ object iclassOrdering extends Ordering[IClass] {
+ def compare(a: IClass, b: IClass) = {
+ val sourceNamesComparison = (a.cunit.toString() compare b.cunit.toString())
+ if(sourceNamesComparison != 0) sourceNamesComparison
+ else {
+ val namesComparison = (a.toString() compare b.toString())
+ if(namesComparison != 0) namesComparison
+ else {
+ a.symbol.id compare b.symbol.id
+ }
+ }
+ }
+ }
+ val queue = new mutable.PriorityQueue[IClass]()(iclassOrdering)
+
+ override def apply(c: IClass) { queue += c }
+
+ override def run() {
+ knownLacksInline.clear()
+ knownHasInline.clear()
+ try {
+ super.run()
+ for(c <- queue) { inliner analyzeClass c }
+ } finally {
+ inliner.clearCaches()
+ knownLacksInline.clear()
+ knownHasInline.clear()
+ }
}
}
def isBottomType(sym: Symbol) = sym == NullClass || sym == NothingClass
- def posToStr(pos: util.Position) = if (pos.isDefined) pos.point.toString else "<nopos>"
/** Is the given class a closure? */
def isClosureClass(cls: Symbol): Boolean =
cls.isFinal && cls.isSynthetic && !cls.isModuleClass && cls.isAnonymousFunction
+ /*
+ TODO now that Inliner runs faster we could consider additional "monadic methods" (in the limit, all those taking a closure as last arg)
+ Any "monadic method" occurring in a given caller C that is not `isMonadicMethod()` will prevent CloseElim from eliminating
+ any anonymous-closure-class any whose instances are given as argument to C invocations.
+ */
+ def isMonadicMethod(sym: Symbol) = {
+ nme.unspecializedName(sym.name) match {
+ case nme.foreach | nme.filter | nme.withFilter | nme.map | nme.flatMap => true
+ case _ => false
+ }
+ }
+
+ val knownLacksInline = mutable.Set.empty[Symbol] // cache to avoid multiple inliner.hasInline() calls.
+ val knownHasInline = mutable.Set.empty[Symbol] // as above. Motivated by the need to warn on "inliner failures".
+
+ def hasInline(sym: Symbol) = {
+ if (knownLacksInline(sym)) false
+ else if(knownHasInline(sym)) true
+ else {
+ val b = (sym hasAnnotation ScalaInlineClass)
+ if(b) { knownHasInline += sym }
+ else { knownLacksInline += sym }
+
+ b
+ }
+ }
+
+ def hasNoInline(sym: Symbol) = sym hasAnnotation ScalaNoInlineClass
+
/**
* Simple inliner.
*/
class Inliner {
object NonPublicRefs extends Enumeration {
- val Public, Protected, Private = Value
+ val Private, Protected, Public = Value
/** Cache whether a method calls private members. */
- val usesNonPublics: mutable.Map[IMethod, Value] = new mutable.HashMap
+ val usesNonPublics = mutable.Map.empty[IMethod, Value]
}
import NonPublicRefs._
- /* fresh name counter */
- val fresh = new mutable.HashMap[String, Int] withDefaultValue 0
- def freshName(s: String) = {
- fresh(s) += 1
- s + fresh(s)
+ /** The current iclass */
+ private var currentIClazz: IClass = _
+ private def warn(pos: Position, msg: String) = currentIClazz.cunit.inlinerWarning(pos, msg)
+
+ private def ownedName(sym: Symbol): String = afterUncurry {
+ val count = (
+ if (!sym.isMethod) 1
+ else if (sym.owner.isAnonymousFunction) 3
+ else 2
+ )
+ (sym.ownerChain take count filterNot (_.isPackageClass)).reverseMap(_.nameString).mkString(".")
+ }
+ private def inlineLog(what: String, main: => String, comment: => String) {
+ def cstr = comment match {
+ case "" => ""
+ case str => " // " + str
+ }
+ val width = if (currentIClazz eq null) 40 else currentIClazz.symbol.enclosingPackage.fullName.length + 25
+ val fmt = "%8s %-" + width + "s" + cstr
+ log(fmt.format(what, main))
+ }
+ private def inlineLog(what: String, main: Symbol, comment: => String) {
+ inlineLog(what, ownedName(main), comment)
}
- private def hasInline(sym: Symbol) = sym hasAnnotation ScalaInlineClass
- private def hasNoInline(sym: Symbol) = sym hasAnnotation ScalaNoInlineClass
+ val recentTFAs = mutable.Map.empty[Symbol, Tuple2[Boolean, analysis.MethodTFA]]
- /** The current iclass */
- private var currentIClazz: IClass = _
- private def warn(pos: Position, msg: String) = currentIClazz.cunit.warning(pos, msg)
+ private def getRecentTFA(incm: IMethod, forceable: Boolean): (Boolean, analysis.MethodTFA) = {
+
+ def containsRETURN(blocks: List[BasicBlock]) = blocks exists { bb => bb.lastInstruction.isInstanceOf[RETURN] }
+
+ val opt = recentTFAs.get(incm.symbol)
+ if(opt.isDefined) {
+ // FYI val cachedBBs = opt.get._2.in.keySet
+ // FYI assert(incm.blocks.toSet == cachedBBs)
+ // incm.code.touched plays no role here
+ return opt.get
+ }
+
+ val hasRETURN = containsRETURN(incm.code.blocksList) || (incm.exh exists { eh => containsRETURN(eh.blocks) })
+ var a: analysis.MethodTFA = null
+ if(hasRETURN) { a = new analysis.MethodTFA(incm); a.run }
+
+ if(forceable) { recentTFAs.put(incm.symbol, (hasRETURN, a)) }
+
+ (hasRETURN, a)
+ }
+
+ def clearCaches() {
+ // methods
+ NonPublicRefs.usesNonPublics.clear()
+ recentTFAs.clear
+ tfa.knownUnsafe.clear()
+ tfa.knownSafe.clear()
+ tfa.knownNever.clear()
+ // basic blocks
+ tfa.preCandidates.clear()
+ tfa.relevantBBs.clear()
+ // callsites
+ tfa.remainingCALLs.clear()
+ tfa.isOnWatchlist.clear()
+ }
+
+ object imethodOrdering extends Ordering[IMethod] {
+ def compare(a: IMethod, b: IMethod) = {
+ val namesComparison = (a.toString() compare b.toString())
+ if(namesComparison != 0) namesComparison
+ else {
+ a.symbol.id compare b.symbol.id
+ }
+ }
+ }
def analyzeClass(cls: IClass): Unit =
if (settings.inline.value) {
- if (settings.debug.value)
- log("Analyzing " + cls)
+ inlineLog("class", s"${cls.symbol.decodedName}", s"analyzing ${cls.methods.size} methods in $cls")
this.currentIClazz = cls
- cls.methods filterNot (_.symbol.isConstructor) foreach analyzeMethod
+ val ms = cls.methods sorted imethodOrdering
+ ms foreach { im =>
+ if (hasInline(im.symbol)) {
+ inlineLog("skip", im.symbol, "no inlining into @inline methods")
+ }
+ else if(im.hasCode && !im.symbol.isBridge) {
+ analyzeMethod(im)
+ }
+ }
}
- val tfa = new analysis.MethodTFA()
+ val tfa = new analysis.MTFAGrowable()
tfa.stat = global.opt.printStats
+ val staleOut = new mutable.ListBuffer[BasicBlock]
+ val splicedBlocks = mutable.Set.empty[BasicBlock]
+ val staleIn = mutable.Set.empty[BasicBlock]
+
+ /**
+ * A transformation local to the body of the IMethod received as argument.
+ * An linining decision consists in replacing a callsite with the body of the callee.
+ * Please notice that, because `analyzeMethod()` itself may modify a method body,
+ * the particular callee bodies that end up being inlined depend on the particular order in which methods are visited
+ * (no topological sorting over the call-graph is attempted).
+ *
+ * Making an inlining decision requires type-flow information for both caller and callee.
+ * Regarding the caller, such information is needed only for basic blocks containing inlining candidates
+ * (and their transitive predecessors). This observation leads to using a custom type-flow analysis (MTFAGrowable)
+ * that can be re-inited, i.e. that reuses lattice elements (type-flow information computed in a previous iteration)
+ * as starting point for faster convergence in a new iteration.
+ *
+ * The mechanics of inlining are iterative for a given invocation of `analyzeMethod(m)`,
+ * and are affected by inlinings from previous iterations
+ * (ie, "heuristic" rules are based on statistics tracked for that purpose):
+ *
+ * (1) before the iterations proper start, so-called preinlining is performed.
+ * Those callsites whose (receiver, concreteMethod) are both known statically
+ * can be analyzed for inlining before computing a type-flow. Details in `preInline()`
+ *
+ * (2) the first iteration computes type-flow information for basic blocks containing inlining candidates
+ * (and their transitive predecessors), so called `relevantBBs` basic blocks.
+ * The ensuing analysis of each candidate (performed by `analyzeInc()`)
+ * may result in a CFG isomorphic to that of the callee being inserted in place of the callsite
+ * (i.e. a CALL_METHOD instruction is replaced with a single-entry single-exit CFG,
+ * a substitution we call "successful inlining").
+ *
+ * (3) following iterations have `relevantBBs` updated to focus on the inlined basic blocks and their successors only.
+ * Details in `MTFAGrowable.reinit()`
+ * */
+ def analyzeMethod(m: IMethod): Unit = {
+ // m.normalize
+ if (settings.debug.value)
+ inlineLog("caller", ownedName(m.symbol), "in " + m.symbol.owner.fullName)
- // how many times have we already inlined this method here?
- private val inlinedMethodCount: mutable.Map[Symbol, Int] = new mutable.HashMap[Symbol, Int] {
- override def default(k: Symbol) = 0
- }
-
- def analyzeMethod(m: IMethod) {
- var sizeBeforeInlining = if (m.code ne null) m.code.blocks.length else 0
- var instrBeforeInlining = if (m.code ne null) m.code.blocks.foldLeft(0)(_ + _.length) else 0
+ var sizeBeforeInlining = m.code.blockCount
+ var instrBeforeInlining = m.code.instructionCount
var retry = false
var count = 0
- fresh.clear()
- inlinedMethodCount.clear()
+
+ // fresh name counter
+ val fresh = mutable.HashMap.empty[String, Int] withDefaultValue 0
+ // how many times have we already inlined this method here?
+ val inlinedMethodCount = mutable.HashMap.empty[Symbol, Int] withDefaultValue 0
val caller = new IMethodInfo(m)
- var info: tfa.lattice.Elem = null
-
- def analyzeInc(msym: Symbol, i: Instruction, bb: BasicBlock): Boolean = {
- var inlined = false
- def paramTypes = msym.info.paramTypes
- val receiver = (info.stack.types drop paramTypes.length).head match {
- case REFERENCE(s) => s
- case _ => NoSymbol
+ def analyzeMessage = s"Analyzing ${caller.length} blocks of $m for inlining sites."
+
+ def preInline(isFirstRound: Boolean): Int = {
+ val inputBlocks = caller.m.linearizedBlocks()
+ val callsites: Function1[BasicBlock, List[opcodes.CALL_METHOD]] = {
+ if(isFirstRound) tfa.conclusives else tfa.knownBeforehand
+ }
+ inlineWithoutTFA(inputBlocks, callsites)
+ }
+
+ /**
+ * Inline straightforward callsites (those that can be inlined without a TFA).
+ *
+ * To perform inlining, all we need to know is listed as formal params in `analyzeInc()`:
+ * - callsite and block containing it
+ * - actual (ie runtime) class of the receiver
+ * - actual (ie runtime) method being invoked
+ * - stack length just before the callsite (to check whether enough arguments have been pushed).
+ * The assert below lists the conditions under which "no TFA is needed"
+ * (the statically known receiver and method are both final, thus, at runtime they can't be any others than those).
+ *
+ */
+ def inlineWithoutTFA(inputBlocks: Traversable[BasicBlock], callsites: Function1[BasicBlock, List[opcodes.CALL_METHOD]]): Int = {
+ var inlineCount = 0
+ import scala.util.control.Breaks._
+ for(x <- inputBlocks; easyCake = callsites(x); if easyCake.nonEmpty) {
+ breakable {
+ for(ocm <- easyCake) {
+ assert(ocm.method.isEffectivelyFinal && ocm.method.owner.isEffectivelyFinal)
+ if(analyzeInc(ocm, x, ocm.method.owner, -1, ocm.method)) {
+ inlineCount += 1
+ break
+ }
+ }
+ }
+ }
+
+ inlineCount
+ }
+
+ /**
+ * Decides whether it's feasible and desirable to inline the body of the method given by `concreteMethod`
+ * at the program point given by `i` (a callsite). The boolean result indicates whether inlining was performed.
+ *
+ */
+ def analyzeInc(i: CALL_METHOD, bb: BasicBlock, receiver: Symbol, stackLength: Int, concreteMethod: Symbol): Boolean = {
+ assert(bb.toList contains i, "Candidate callsite does not belong to BasicBlock.")
+ val shouldWarn = hasInline(i.method)
+
+ def warnNoInline(reason: String): Boolean = {
+ def msg = "Could not inline required method %s because %s.".format(i.method.originalName.decode, reason)
+ if (settings.debug.value)
+ inlineLog("fail", i.method.fullName, reason)
+ if (shouldWarn)
+ warn(i.pos, msg)
+
+ false
}
- val concreteMethod = lookupImplFor(msym, receiver)
- def warnNoInline(reason: String) = {
- if (hasInline(msym) && !caller.isBridge)
- warn(i.pos, "Could not inline required method %s because %s.".format(msym.originalName.decode, reason))
+ var isAvailable = icodes available concreteMethod.enclClass
+
+ if (!isAvailable && shouldLoadImplFor(concreteMethod, receiver)) {
+ // Until r22824 this line was:
+ // icodes.icode(concreteMethod.enclClass, true)
+ //
+ // Changing it to
+ // icodes.load(concreteMethod.enclClass)
+ // was the proximate cause for SI-3882:
+ // error: Illegal index: 0 overlaps List((variable par1,LONG))
+ // error: Illegal index: 0 overlaps List((variable par1,LONG))
+ isAvailable = icodes.load(concreteMethod.enclClass)
}
- if (shouldLoadImplFor(concreteMethod, receiver))
- icodes.load(concreteMethod.enclClass)
+ def isCandidate = (
+ isClosureClass(receiver)
+ || concreteMethod.isEffectivelyFinal
+ || receiver.isEffectivelyFinal
+ )
- def isAvailable = icodes available concreteMethod.enclClass
- def isCandidate = isClosureClass(receiver) || concreteMethod.isEffectivelyFinal || receiver.isFinal
def isApply = concreteMethod.name == nme.apply
- def isCountable = !(isClosureClass(receiver)
- || isApply
- || isMonadicMethod(concreteMethod)
- || receiver.enclosingPackage == definitions.RuntimePackage
- ) // only count non-closures
-
- if (settings.debug.value)
- log("Treating " + i
+
+ def isCountable = !(
+ isClosureClass(receiver)
+ || isApply
+ || isMonadicMethod(concreteMethod)
+ || receiver.enclosingPackage == definitions.RuntimePackage
+ ) // only count non-closures
+
+ debuglog("Treating " + i
+ "\n\treceiver: " + receiver
+ "\n\ticodes.available: " + isAvailable
+ "\n\tconcreteMethod.isEffectivelyFinal: " + concreteMethod.isEffectivelyFinal)
- if (isAvailable && isCandidate) {
- lookupIMethod(concreteMethod, receiver) match {
- case Some(callee) =>
- val inc = new IMethodInfo(callee)
- val pair = new CallerCalleeInfo(caller, inc)
-
- if (pair isStampedForInlining info.stack) {
- retry = true
- inlined = true
- if (isCountable)
- count += 1
-
- pair.doInline(bb, i)
- if (!inc.inline || inc.isMonadic)
- caller.inlinedCalls += 1
- inlinedMethodCount(inc.sym) += 1
-
- /* Remove this method from the cache, as the calls-private relation
- * might have changed after the inlining.
- */
- usesNonPublics -= m
- }
- else {
- if (settings.debug.value)
- pair logFailure info.stack
+ if (!isCandidate) warnNoInline("it can be overridden")
+ else if (!isAvailable) warnNoInline("bytecode unavailable")
+ else lookupIMethod(concreteMethod, receiver) filter (callee => callee.hasCode || warnNoInline("callee has no code")) exists { callee =>
+ val inc = new IMethodInfo(callee)
+ val pair = new CallerCalleeInfo(caller, inc, fresh, inlinedMethodCount)
- warnNoInline(pair failureReason info.stack)
- }
- case None =>
- warnNoInline("bytecode was not available")
- if (settings.debug.value)
- log("could not find icode\n\treceiver: " + receiver + "\n\tmethod: " + concreteMethod)
+ if (inc.hasHandlers && (stackLength == -1)) {
+ // no inlining is done, yet don't warn about it, stackLength == -1 indicates we're trying to inlineWithoutTFA.
+ // Shortly, a TFA will be computed and an error message reported if indeed inlining not possible.
+ false
+ }
+ else {
+ val isSafe = pair isStampedForInlining stackLength match {
+ case DontInlineHere(msg) => warnNoInline(msg)
+ case NeverSafeToInline => false
+ case InlineableAtThisCaller => true
+ case inl @ FeasibleInline(_, _) if !inl.isSafe => false
+ case FeasibleInline(required, toPublicize) =>
+ for (f <- toPublicize) {
+ inlineLog("access", f, "making public")
+ f setFlag Flags.notPRIVATE
+ f setFlag Flags.notPROTECTED
+ }
+ // only add to `knownSafe` after all `toPublicize` fields actually made public.
+ if (required == NonPublicRefs.Public)
+ tfa.knownSafe += inc.sym
+
+ true
+ }
+ isSafe && {
+ retry = true
+ if (isCountable) count += 1
+ pair.doInline(bb, i)
+ if (!pair.isInlineForced || inc.isMonadic) caller.inlinedCalls += 1
+ inlinedMethodCount(inc.sym) += 1
+
+ // Remove the caller from the cache (this inlining might have changed its calls-private relation).
+ usesNonPublics -= m
+ recentTFAs -= m.symbol
+ true
+ }
}
}
- else warnNoInline(
- if (!isAvailable) "bytecode was not available"
- else "it is not final"
- )
- inlined
}
- import scala.util.control.Breaks._
+ /* Pre-inlining consists in invoking the usual inlining subroutine with (receiver class, concrete method) pairs as input
+ * where both method and receiver are final, which implies that the receiver computed via TFA will always match `concreteMethod.owner`.
+ *
+ * As with any invocation of `analyzeInc()` the inlining outcome is based on heuristics which favor inlining an isMonadicMethod before other methods.
+ * That's why preInline() is invoked twice: any inlinings downplayed by the heuristics during the first round get an opportunity to rank higher during the second.
+ *
+ * As a whole, both `preInline()` invocations amount to priming the inlining process,
+ * so that the first TFA that is run afterwards is able to gain more information as compared to a cold-start.
+ */
+ val totalPreInlines = {
+ val firstRound = preInline(true)
+ if(firstRound == 0) 0 else (firstRound + preInline(false))
+ }
+ staleOut.clear()
+ splicedBlocks.clear()
+ staleIn.clear()
+
do {
retry = false
- if (caller.inline) {
- log("Not inlining into " + caller.sym.originalName.decode + " because it is marked @inline.")
- }
- else if (caller.hasCode) {
- log("Analyzing " + m + " count " + count + " with " + caller.length + " blocks")
- tfa init m
- tfa.run
- caller.linearized foreach { bb =>
- info = tfa in bb
-
- breakable {
- for (i <- bb) {
- i match {
- case CALL_METHOD(msym, Dynamic) =>
- if (analyzeInc(msym, i, bb)) break
- case _ => ()
- }
- info = tfa.interpret(info, i)
+ debuglog(analyzeMessage)
+
+ /* it's important not to inline in unreachable basic blocks. linearizedBlocks() returns only reachable ones. */
+ tfa.callerLin = caller.m.linearizedBlocks()
+ /* TODO Do we really want to inline inside exception handlers?
+ * Seems counterproductive (the larger the method the less likely it will be JITed).
+ * The alternative would be `linearizer.linearizeAt(caller.m, caller.m.startBlock)`.
+ * And, we would cut down on TFA iterations, too.
+ * See also comment on the same topic in TypeFlowAnalysis. */
+
+ tfa.reinit(m, staleOut.toList, splicedBlocks, staleIn)
+ tfa.run
+
+ staleOut.clear()
+ splicedBlocks.clear()
+ staleIn.clear()
+
+ import scala.util.control.Breaks._
+ for(bb <- tfa.callerLin; if tfa.preCandidates(bb)) {
+ val cms = bb.toList collect { case cm : CALL_METHOD => cm }
+ breakable {
+ for (cm <- cms; if tfa.remainingCALLs.isDefinedAt(cm)) {
+ val analysis.CallsiteInfo(_, receiver, stackLength, concreteMethod) = tfa.remainingCALLs(cm)
+ if (analyzeInc(cm, bb, receiver, stackLength, concreteMethod)) {
+ break
}
}
}
+ }
+
+ /* As part of inlining, some instructions are moved to a new block.
+ * In detail: the instructions moved to a new block originally appeared after a (by now inlined) callsite.
+ * Their new home is an `afterBlock` created by `doInline()` to that effect.
+ * Each block in staleIn is one such `afterBlock`.
+ *
+ * Some of those instructions may be CALL_METHOD possibly tracked in `remainingCALLs`
+ * (with an entry still noting the old containing block). However, that causes no problem:
+ *
+ * (1) such callsites won't be analyzed for inlining by `analyzeInc()` (*in this iteration*)
+ * because of the `break` that abandons the original basic block where it was contained.
+ *
+ * (2) Additionally, its new containing block won't be visited either (*in this iteration*)
+ * because the new blocks don't show up in the linearization computed before inlinings started:
+ * `for(bb <- tfa.callerLin; if tfa.preCandidates(bb)) {`
+ *
+ * For a next iteration, the new home of any instructions that have moved
+ * will be tracked properly in `remainingCALLs` after `MTFAGrowable.reinit()` puts on radar their new homes.
+ *
+ */
+ if(retry) {
+ for(afterBlock <- staleIn) {
+ val justCALLsAfter = afterBlock.toList collect { case c : opcodes.CALL_METHOD => c }
+ for(ia <- justCALLsAfter) { tfa.remainingCALLs.remove(ia) }
+ }
+ }
- if (tfa.stat)
- log(m.symbol.fullName + " iterations: " + tfa.iterations + " (size: " + caller.length + ")")
+ /*
+ if(splicedBlocks.nonEmpty) { // TODO explore (saves time but leads to slightly different inlining decisions)
+ // opportunistically perform straightforward inlinings before the next typeflow round
+ val savedRetry = retry
+ val savedStaleOut = staleOut.toSet; staleOut.clear()
+ val savedStaleIn = staleIn.toSet ; staleIn.clear()
+ val howmany = inlineWithoutTFA(splicedBlocks, tfa.knownBeforehand)
+ splicedBlocks ++= staleIn
+ staleOut.clear(); staleOut ++= savedStaleOut;
+ staleIn.clear(); staleIn ++= savedStaleIn;
+ retry = savedRetry
}
+ */
+
+ if (tfa.stat)
+ log(m.symbol.fullName + " iterations: " + tfa.iterations + " (size: " + caller.length + ")")
}
while (retry && count < MAX_INLINE_RETRY)
- m.normalize
- if (sizeBeforeInlining > 0) {
- val instrAfterInlining = m.code.blocks.foldLeft(0)(_ + _.length)
- val prefix = if ((instrAfterInlining > 2 * instrBeforeInlining) && (instrAfterInlining > 200)) " !! " else ""
- log(prefix + " %s blocks before inlining: %d (%d) after: %d (%d)".format(
- m.symbol.fullName, sizeBeforeInlining, instrBeforeInlining, m.code.blocks.length, instrAfterInlining))
+ for(inlFail <- tfa.warnIfInlineFails) {
+ warn(inlFail.pos, "At the end of the day, could not inline @inline-marked method " + inlFail.method.originalName.decode)
}
- }
- private def isMonadicMethod(sym: Symbol) = {
- val (origName, _, _) = nme.splitSpecializedName(sym.name)
- origName match {
- case nme.foreach | nme.filter | nme.withFilter | nme.map | nme.flatMap => true
- case _ => false
+ m.normalize
+ if (sizeBeforeInlining > 0) {
+ val instrAfterInlining = m.code.instructionCount
+ val prefix = if ((instrAfterInlining > 2 * instrBeforeInlining) && (instrAfterInlining > 200)) "!!" else ""
+ val inlinings = caller.inlinedCalls
+ if (inlinings > 0) {
+ val s1 = s"instructions $instrBeforeInlining -> $instrAfterInlining"
+ val s2 = if (sizeBeforeInlining == m.code.blockCount) "" else s", blocks $sizeBeforeInlining -> ${m.code.blockCount}"
+ val callees = inlinedMethodCount.toList map { case (k, v) => k.fullNameString + ( if (v == 1) "" else "/" + v ) }
+
+ inlineLog("inlined", m.symbol.fullName, callees.sorted.mkString(inlinings + " inlined: ", ", ", ""))
+ inlineLog("<<tldr>>", m.symbol.fullName, s"${m.symbol.nameString}: $s1$s2")
+ }
}
}
- private def isHigherOrderMethod(sym: Symbol) =
- sym.isMethod && atPhase(currentRun.erasurePhase.prev)(sym.info.paramTypes exists isFunctionType)
+ private def isHigherOrderMethod(sym: Symbol) = (
+ sym.isMethod
+ && beforeExplicitOuter(sym.info.paramTypes exists isFunctionType) // was "at erasurePhase.prev"
+ )
/** Should method 'sym' being called in 'receiver' be loaded from disk? */
def shouldLoadImplFor(sym: Symbol, receiver: Symbol): Boolean = {
@@ -253,89 +593,195 @@ abstract class Inliners extends SubComponent {
def loadCondition = sym.isEffectivelyFinal && isMonadicMethod(sym) && isHigherOrderMethod(sym)
val res = hasInline(sym) || alwaysLoad || loadCondition
- if (settings.debug.value)
- log("shouldLoadImplFor: " + receiver + "." + sym + ": " + res)
+ debuglog("shouldLoadImplFor: " + receiver + "." + sym + ": " + res)
res
}
- /** Look up implementation of method 'sym in 'clazz'.
- */
- def lookupImplFor(sym: Symbol, clazz: Symbol): Symbol = {
- // TODO: verify that clazz.superClass is equivalent here to clazz.tpe.parents(0).typeSymbol (.tpe vs .info)
- def needsLookup = (clazz != NoSymbol) && (clazz != sym.owner) && !sym.isEffectivelyFinal && clazz.isFinal
-
- def lookup(clazz: Symbol): Symbol = {
- // println("\t\tlooking up " + meth + " in " + clazz.fullName + " meth.owner = " + meth.owner)
- if (sym.owner == clazz || isBottomType(clazz)) sym
- else sym.overridingSymbol(clazz) match {
- case NoSymbol => if (sym.owner.isTrait) sym else lookup(clazz.superClass)
- case imp => imp
- }
- }
- if (needsLookup) {
- val concreteMethod = lookup(clazz)
- if (settings.debug.value)
- log("\tlooked up method: " + concreteMethod.fullName)
-
- concreteMethod
- }
- else sym
- }
-
class IMethodInfo(val m: IMethod) {
+ override def toString = m.toString
+
val sym = m.symbol
val name = sym.name
def owner = sym.owner
def paramTypes = sym.info.paramTypes
def minimumStack = paramTypes.length + 1
- def inline = hasInline(sym)
- def noinline = hasNoInline(sym)
- def numInlined = inlinedMethodCount(sym)
-
def isBridge = sym.isBridge
- def isInClosure = isClosureClass(owner)
- def isHigherOrder = isHigherOrderMethod(sym)
+ val isInClosure = isClosureClass(owner)
+ val isHigherOrder = isHigherOrderMethod(sym)
def isMonadic = isMonadicMethod(sym)
def handlers = m.exh
- def blocks = if (m.code eq null) sys.error("blocks = null + " + m) else m.code.blocks
+ def blocks = m.blocks
def locals = m.locals
def length = blocks.length
def openBlocks = blocks filterNot (_.closed)
- def instructions = blocks.flatten
- def linearized = linearizer linearize m
-
- def isSmall = (length <= SMALL_METHOD_SIZE) && blocks(0).length < 10
- def isLarge = length > MAX_INLINE_SIZE
- def isRecursive = m.recursive
- def hasCode = m.code != null
- def hasSourceFile = m.sourceFile != null
- def hasHandlers = handlers.nonEmpty
+ def instructions = m.code.instructions
+ // def linearized = linearizer linearize m
+
+ def isSmall = (length <= SMALL_METHOD_SIZE) && blocks(0).length < 10
+ def isLarge = length > MAX_INLINE_SIZE
+ def isRecursive = m.recursive
+ def hasHandlers = handlers.nonEmpty || m.bytecodeHasEHs
+ def hasClosureParam = paramTypes exists (tp => isByNameParamType(tp) || isFunctionType(tp))
+
+ def isSynchronized = sym.hasFlag(Flags.SYNCHRONIZED)
+ def hasNonFinalizerHandler = handlers exists {
+ case _: Finalizer => true
+ case _ => false
+ }
- // the number of inlined calls in 'm', used by 'shouldInline'
+ // the number of inlined calls in 'm', used by 'isScoreOK'
var inlinedCalls = 0
def addLocals(ls: List[Local]) = m.locals ++= ls
def addLocal(l: Local) = addLocals(List(l))
def addHandlers(exhs: List[ExceptionHandler]) = m.exh = exhs ::: m.exh
+
+ /**
+ * This method inspects the callee's instructions, finding out the most restrictive accessibility implied by them.
+ *
+ * Rather than giving up upon encountering an access to a private field `p`, it provisorily admits `p` as "can-be-made-public", provided:
+ * - `p` is being compiled as part of this compilation run, and
+ * - `p` is synthetic or param-accessor.
+ *
+ * This method is side-effect free, in particular it lets the invoker decide
+ * whether the accessibility of the `toBecomePublic` fields should be changed or not.
+ */
+ def accessRequirements: AccessReq = {
+
+ var toBecomePublic: List[Symbol] = Nil
+
+ def check(sym: Symbol, cond: Boolean) =
+ if (cond) Private
+ else if (sym.isProtected) Protected
+ else Public
+
+ def canMakePublic(f: Symbol): Boolean =
+ (m.sourceFile ne NoSourceFile) &&
+ (f.isSynthetic || f.isParamAccessor) &&
+ { toBecomePublic = f :: toBecomePublic; true }
+
+ /* A safety check to consider as private, for the purposes of inlining, a public field that:
+ * (1) is defined in an external library, and
+ * (2) can be presumed synthetic (due to a dollar sign in its name).
+ * Such field was made public by `doMakePublic()` and we don't want to rely on that,
+ * because under other compilation conditions (ie no -optimize) that won't be the case anymore.
+ *
+ * This allows aggressive intra-library inlining (making public if needed)
+ * that does not break inter-library scenarios (see comment for `Inliners`).
+ *
+ * TODO handle more robustly the case of a trait var changed at the source-level from public to private[this]
+ * (eg by having ICodeReader use unpickler, see SI-5442).
+
+ DISABLED
+
+ def potentiallyPublicized(f: Symbol): Boolean = {
+ (m.sourceFile eq NoSourceFile) && f.name.containsChar('$')
+ }
+ */
+
+ def checkField(f: Symbol) = check(f, f.isPrivate && !canMakePublic(f))
+ def checkSuper(n: Symbol) = check(n, n.isPrivate || !n.isClassConstructor)
+ def checkMethod(n: Symbol) = check(n, n.isPrivate)
+
+ def getAccess(i: Instruction) = i match {
+ case CALL_METHOD(n, SuperCall(_)) => checkSuper(n)
+ case CALL_METHOD(n, _) => checkMethod(n)
+ case LOAD_FIELD(f, _) => checkField(f)
+ case STORE_FIELD(f, _) => checkField(f)
+ case _ => Public
+ }
+
+ var seen = Public
+ val iter = instructions.iterator
+ while((seen ne Private) && iter.hasNext) {
+ val i = iter.next()
+ getAccess(i) match {
+ case Private =>
+ inlineLog("access", s"instruction $i requires private access", "pos=" + i.pos)
+ toBecomePublic = Nil
+ seen = Private
+ case Protected => seen = Protected
+ case _ => ()
+ }
+ }
+
+ AccessReq(seen, toBecomePublic)
+ }
+
}
- class CallerCalleeInfo(val caller: IMethodInfo, val inc: IMethodInfo) {
+ /**
+ * Classifies a pair (caller, callee) into one of four categories:
+ *
+ * (a) inlining should be performed, classified in turn into:
+ * (a.1) `InlineableAtThisCaller`: unconditionally at this caller
+ * (a.2) `FeasibleInline`: it only remains for certain access requirements to be met (see `IMethodInfo.accessRequirements()`)
+ *
+ * (b) inlining shouldn't be performed, classified in turn into:
+ * (b.1) `DontInlineHere`: indicates that this particular occurrence of the callee at the caller shouldn't be inlined.
+ * - Nothing is said about the outcome for other callers, or for other occurrences of the callee for the same caller.
+ * - In particular inlining might be possible, but heuristics gave a low score for it.
+ * (b.2) `NeverSafeToInline`: the callee can't be inlined anywhere, irrespective of caller.
+ *
+ * The classification above is computed by `isStampedForInlining()` based on which `analyzeInc()` goes on to:
+ * - either log the reason for failure --- case (b) ---,
+ * - or perform inlining --- case (a) ---.
+ */
+ sealed abstract class InlineSafetyInfo {
+ def isSafe = false
+ def isUnsafe = !isSafe
+ }
+ case object NeverSafeToInline extends InlineSafetyInfo
+ case object InlineableAtThisCaller extends InlineSafetyInfo { override def isSafe = true }
+ case class DontInlineHere(msg: String) extends InlineSafetyInfo
+ case class FeasibleInline(accessNeeded: NonPublicRefs.Value,
+ toBecomePublic: List[Symbol]) extends InlineSafetyInfo {
+ override def isSafe = true
+ }
+
+ case class AccessReq(
+ accessNeeded: NonPublicRefs.Value,
+ toBecomePublic: List[Symbol]
+ )
+
+ final class CallerCalleeInfo(val caller: IMethodInfo, val inc: IMethodInfo, fresh: mutable.Map[String, Int], inlinedMethodCount: scala.collection.Map[Symbol, Int]) {
+
+ assert(!caller.isBridge && inc.m.hasCode,
+ "A guard in Inliner.analyzeClass() should have prevented from getting here.")
+
def isLargeSum = caller.length + inc.length - 1 > SMALL_METHOD_SIZE
+ private def freshName(s: String): TermName = {
+ fresh(s) += 1
+ newTermName(s + fresh(s))
+ }
+
+ private def isKnownToInlineSafely: Boolean = { tfa.knownSafe(inc.sym) }
+
+ val isInlineForced = hasInline(inc.sym)
+ val isInlineForbidden = hasNoInline(inc.sym)
+ assert(!(isInlineForced && isInlineForbidden), "method ("+inc.m+") marked both @inline and @noinline.")
+
/** Inline 'inc' into 'caller' at the given block and instruction.
* The instruction must be a CALL_METHOD.
*/
- def doInline(block: BasicBlock, instr: Instruction) {
+ def doInline(block: BasicBlock, instr: CALL_METHOD) {
+
+ staleOut += block
+
+ tfa.remainingCALLs.remove(instr) // this bookkpeeping is done here and not in MTFAGrowable.reinit due to (1st) convenience and (2nd) necessity.
+ tfa.isOnWatchlist.remove(instr) // ditto
+ tfa.warnIfInlineFails.remove(instr)
+
val targetPos = instr.pos
- log("Inlining " + inc.m + " in " + caller.m + " at pos: " + posToStr(targetPos))
def blockEmit(i: Instruction) = block.emit(i, targetPos)
def newLocal(baseName: String, kind: TypeKind) =
- new Local(caller.sym.newVariable(targetPos, freshName(baseName)), kind, false)
+ new Local(caller.sym.newVariable(freshName(baseName), targetPos) setInfo kind.toType, kind, false)
- val a = new analysis.MethodTFA(inc.m)
+ val (hasRETURN, a) = getRecentTFA(inc.m, isInlineForced)
/* The exception handlers that are active at the current block. */
val activeHandlers = caller.handlers filter (_ covered block)
@@ -364,7 +810,7 @@ abstract class Inliners extends SubComponent {
case x => newLocal("$retVal", x)
}
- val inlinedLocals: mutable.Map[Local, Local] = new mutable.HashMap
+ val inlinedLocals = mutable.HashMap.empty[Local, Local]
/** Add a new block in the current context. */
def newBlock() = {
@@ -383,9 +829,9 @@ abstract class Inliners extends SubComponent {
handler
}
- /** alfa-rename `l' in caller's context. */
+ /** alfa-rename `l` in caller's context. */
def dupLocal(l: Local): Local = {
- val sym = caller.sym.newVariable(l.sym.pos, freshName(l.sym.name.toString))
+ val sym = caller.sym.newVariable(freshName(l.sym.name.toString), l.sym.pos)
// sym.setInfo(l.sym.tpe)
val dupped = new Local(sym, l.kind, false)
inlinedLocals(l) = dupped
@@ -441,14 +887,11 @@ abstract class Inliners extends SubComponent {
if (retVal ne null)
caller addLocal retVal
- inc.blocks foreach { b =>
+ inc.m foreachBlock { b =>
inlinedBlock += (b -> newBlock())
inlinedBlock(b).varsInScope ++= (b.varsInScope map inlinedLocals)
}
- // analyse callee
- a.run
-
// re-emit the instructions before the call
block.open
block.clear
@@ -459,12 +902,13 @@ abstract class Inliners extends SubComponent {
blockEmit(STORE_LOCAL(inlinedThis))
// jump to the start block of the callee
- blockEmit(JUMP(inlinedBlock(inc.m.code.startBlock)))
+ blockEmit(JUMP(inlinedBlock(inc.m.startBlock)))
block.close
// duplicate the other blocks in the callee
- linearizer linearize inc.m foreach { bb =>
- var info = a in bb
+ val calleeLin = inc.m.linearizedBlocks()
+ calleeLin foreach { bb =>
+ var info = if(hasRETURN) (a in bb) else null
def emitInlined(i: Instruction) = inlinedBlock(bb).emit(i, targetPos)
def emitDrops(toDrop: Int) = info.stack.types drop toDrop foreach (t => emitInlined(DROP(t)))
@@ -480,7 +924,7 @@ abstract class Inliners extends SubComponent {
case _ => ()
}
emitInlined(map(i))
- info = a.interpret(info, i)
+ info = if(hasRETURN) a.interpret(info, i) else null
}
inlinedBlock(bb).close
}
@@ -488,163 +932,143 @@ abstract class Inliners extends SubComponent {
afterBlock emit instrAfter
afterBlock.close
+ staleIn += afterBlock
+ splicedBlocks ++= (calleeLin map inlinedBlock)
+
// add exception handlers of the callee
caller addHandlers (inc.handlers map translateExh)
assert(pending.isEmpty, "Pending NEW elements: " + pending)
if (settings.debug.value) icodes.checkValid(caller.m)
}
- def isStampedForInlining(stack: TypeStack) =
- !sameSymbols && inc.hasCode && shouldInline && isSafeToInline(stack)
-
- def logFailure(stack: TypeStack) = log(
- """|inline failed for %s:
- | pair.sameSymbols: %s
- | inc.numInlined < 2: %s
- | inc.hasCode: %s
- | isSafeToInline: %s
- | shouldInline: %s
- """.stripMargin.format(
- inc.m, sameSymbols, inc.numInlined < 2,
- inc.hasCode, isSafeToInline(stack), shouldInline
- )
- )
+ def isStampedForInlining(stackLength: Int): InlineSafetyInfo = {
- def failureReason(stack: TypeStack) =
- if (!inc.hasCode) "bytecode was unavailable"
- else if (!isSafeToInline(stack)) "it is unsafe (target may reference private fields)"
- else "of a bug (run with -Ylog:inline -Ydebug for more information)"
+ if(tfa.blackballed(inc.sym)) { return NeverSafeToInline }
- def canAccess(level: NonPublicRefs.Value) = level match {
- case Private => caller.owner == inc.owner
- case Protected => caller.owner.tpe <:< inc.owner.tpe
- case Public => true
- }
- private def sameSymbols = caller.sym == inc.sym
+ if(!isKnownToInlineSafely) {
- /** A method is safe to inline when:
- * - it does not contain calls to private methods when
- * called from another class
- * - it is not inlined into a position with non-empty stack,
- * while having a top-level finalizer (see liftedTry problem)
- * - it is not recursive
- * Note:
- * - synthetic private members are made public in this pass.
- */
- def isSafeToInline(stack: TypeStack): Boolean = {
- def makePublic(f: Symbol): Boolean =
- inc.hasSourceFile && (f.isSynthetic || f.isParamAccessor) && {
- if (settings.debug.value)
- log("Making not-private symbol out of synthetic: " + f)
-
- if (f hasFlag Flags.PRIVATE) f setFlag Flags.notPRIVATE
- true
+ if(inc.openBlocks.nonEmpty) {
+ val msg = ("Encountered " + inc.openBlocks.size + " open block(s) in isSafeToInline: this indicates a bug in the optimizer!\n" +
+ " caller = " + caller.m + ", callee = " + inc.m)
+ warn(inc.sym.pos, msg)
+ tfa.knownNever += inc.sym
+ return DontInlineHere("Open blocks in " + inc.m)
}
- if (!inc.hasCode || inc.isRecursive)
- return false
-
- val accessNeeded = usesNonPublics.getOrElseUpdate(inc.m, {
- // Avoiding crashing the compiler if there are open blocks.
- inc.openBlocks foreach { b =>
- warn(inc.sym.pos,
- "Encountered open block in isSafeToInline: this indicates a bug in the optimizer!\n" +
- " caller = " + caller.m + ", callee = " + inc.m
- )
- return false
+ val reasonWhyNever: String = {
+ var rs: List[String] = Nil
+ if(inc.isRecursive) { rs ::= "is recursive" }
+ if(isInlineForbidden) { rs ::= "is annotated @noinline" }
+ if(inc.isSynchronized) { rs ::= "is synchronized method" }
+ if(inc.m.bytecodeHasEHs) { rs ::= "bytecode contains exception handlers / finally clause" } // SI-6188
+ if(inc.m.bytecodeHasInvokeDynamic) { rs ::= "bytecode contains invoke dynamic" }
+ if(rs.isEmpty) null else rs.mkString("", ", and ", "")
}
- def check(sym: Symbol, cond: Boolean) =
- if (cond) Private
- else if (sym.isProtected) Protected
- else Public
-
- def checkField(f: Symbol) = check(f, f.isPrivate && !makePublic(f))
- def checkSuper(m: Symbol) = check(m, m.isPrivate || !m.isClassConstructor)
- def checkMethod(m: Symbol) = check(m, m.isPrivate)
-
- def getAccess(i: Instruction) = i match {
- case CALL_METHOD(m, SuperCall(_)) => checkSuper(m)
- case CALL_METHOD(m, _) => checkMethod(m)
- case LOAD_FIELD(f, _) => checkField(f)
- case STORE_FIELD(f, _) => checkField(f)
- case _ => Public
+
+ if(reasonWhyNever != null) {
+ tfa.knownNever += inc.sym
+ inlineLog("never", inc.sym, reasonWhyNever)
+ // next time around NeverSafeToInline is returned, thus skipping (duplicate) msg, this is intended.
+ return DontInlineHere(inc.m + " " + reasonWhyNever)
}
- def iterate(): NonPublicRefs.Value = {
- var seenProtected = false
- inc.instructions foreach { i =>
- getAccess(i) match {
- case Private => return Private
- case Protected => seenProtected = true
- case _ => ()
- }
- }
- if (seenProtected) Protected else Public
+ if(sameSymbols) { // TODO but this also amounts to recursive, ie should lead to adding to tfa.knownNever, right?
+ tfa.knownUnsafe += inc.sym;
+ return DontInlineHere("sameSymbols (ie caller == callee)")
}
- iterate()
- })
- def isIllegalStack = (stack.length > inc.minimumStack && inc.hasHandlers) || {
- if (settings.debug.value)
- log("method " + inc.sym + " is used on a non-empty stack with finalizer.")
+ }
- false
+ /*
+ * From here on, two main categories of checks remain, (a) and (b) below:
+ * (a.1) either the scoring heuristics give green light; or
+ * (a.2) forced as candidate due to @inline.
+ * After that, safety proper is checked:
+ * (b.1) the callee does not contain calls to private methods when called from another class
+ * (b.2) the callee is not going to be inlined into a position with non-empty stack,
+ * while having a top-level finalizer (see liftedTry problem)
+ * As a result of (b), some synthetic private members can be chosen to become public.
+ */
+
+ val score = inlinerScore
+ val scoreStr = if (score > 0) "+" + score else "" + score
+ val what = if (score > 0) "ok to" else "don't"
+ inlineLog(scoreStr, inc.m.symbol, s"$what inline into ${ownedName(caller.m.symbol)}")
+
+ if (!isInlineForced && score <= 0) {
+ // During inlining retry, a previous caller-callee pair that scored low may pass.
+ // Thus, adding the callee to tfa.knownUnsafe isn't warranted.
+ return DontInlineHere(s"inliner heuristic")
+ }
+
+ if(inc.hasHandlers && (stackLength > inc.minimumStack)) {
+ return DontInlineHere("callee contains exception handlers / finally clause, and is invoked with non-empty operand stack") // SI-6157
+ }
+
+ if(isKnownToInlineSafely) { return InlineableAtThisCaller }
+
+ if(stackLength > inc.minimumStack && inc.hasNonFinalizerHandler) {
+ val msg = "method " + inc.sym + " is used on a non-empty stack with finalizer."
+ debuglog(msg)
+ // FYI: not reason enough to add inc.sym to tfa.knownUnsafe (because at other callsite in this caller, inlining might be ok)
+ return DontInlineHere(msg)
+ }
+
+ val accReq = inc.accessRequirements
+ if(!canAccess(accReq.accessNeeded)) {
+ tfa.knownUnsafe += inc.sym
+ val msg = "access level required by callee not matched by caller"
+ inlineLog("fail", inc.sym, msg)
+ return DontInlineHere(msg)
}
-// if (!canAccess(accessNeeded))
-// println("access needed and failed: " + accessNeeded)
- canAccess(accessNeeded) && !isIllegalStack
+
+ FeasibleInline(accReq.accessNeeded, accReq.toBecomePublic)
+
}
- /** Decide whether to inline or not. Heuristics:
+ def canAccess(level: NonPublicRefs.Value) = level match {
+ case Private => caller.owner == inc.owner
+ case Protected => caller.owner.tpe <:< inc.owner.tpe
+ case Public => true
+ }
+ private def sameSymbols = caller.sym == inc.sym
+ private def sameOwner = caller.owner == inc.owner
+
+ /** Gives green light for inlining (which may still be vetoed later). Heuristics:
* - it's bad to make the caller larger (> SMALL_METHOD_SIZE) if it was small
* - it's bad to inline large methods
* - it's good to inline higher order functions
* - it's good to inline closures functions.
* - it's bad (useless) to inline inside bridge methods
*/
- private def neverInline = caller.isBridge || !inc.hasCode || inc.noinline
- private def alwaysInline = inc.inline
-
- def shouldInline: Boolean = !neverInline && (alwaysInline || {
- if (settings.debug.value)
- log("shouldInline: " + caller.m + " with " + inc.m)
-
+ def inlinerScore: Int = {
var score = 0
- // better not inline inside closures, but hope that the closure itself
- // is repeatedly inlined
- if (caller.isInClosure) score -= 2
+ // better not inline inside closures, but hope that the closure itself is repeatedly inlined
+ if (caller.isInClosure) score -= 2
else if (caller.inlinedCalls < 1) score -= 1 // only monadic methods can trigger the first inline
- if (inc.isSmall)
- score += 1
+ if (inc.isSmall) score += 1;
+ // if (inc.hasClosureParam) score += 2
+ if (inc.isLarge) score -= 1;
if (caller.isSmall && isLargeSum) {
score -= 1
- if (settings.debug.value)
- log("shouldInline: score decreased to " + score + " because small " + caller + " would become large")
+ debuglog(s"inliner score decreased to $score because small caller $caller would become large")
}
- if (inc.isLarge)
- score -= 1
- if (inc.isMonadic)
- score += 3
- else if (inc.isHigherOrder)
- score += 1
- if (inc.isInClosure)
- score += 2
- if (inc.numInlined > 2)
- score -= 2
+ if (inc.isMonadic) score += 3
+ else if (inc.isHigherOrder) score += 1
- log("shouldInline(" + inc.m + ") score: " + score)
-
- score > 0
- })
+ if (inc.isInClosure) score += 2
+ if (inlinedMethodCount(inc.sym) > 2) score -= 2
+ score
+ }
}
def lookupIMethod(meth: Symbol, receiver: Symbol): Option[IMethod] = {
def tryParent(sym: Symbol) = icodes icode sym flatMap (_ lookupMethod meth)
- receiver.info.baseClasses.iterator map tryParent find (_.isDefined) getOrElse None
+ (receiver.info.baseClasses.iterator map tryParent find (_.isDefined)).flatten
}
} /* class Inliner */
} /* class Inliners */
diff --git a/src/compiler/scala/tools/nsc/dependencies/Changes.scala b/src/compiler/scala/tools/nsc/dependencies/Changes.scala
index baab027..7f5f412 100644
--- a/src/compiler/scala/tools/nsc/dependencies/Changes.scala
+++ b/src/compiler/scala/tools/nsc/dependencies/Changes.scala
@@ -3,7 +3,7 @@ package dependencies
import symtab.Flags
-import collection._
+import scala.collection._
/** A component that describes the possible changes between successive
* compilations of a class.
@@ -19,7 +19,7 @@ abstract class Changes {
abstract class Change
private lazy val annotationsChecked =
- List(definitions.getClass("scala.specialized")) // Any others that should be checked?
+ List(definitions.SpecializedClass) // Any others that should be checked?
private val flagsToCheck = IMPLICIT | FINAL | PRIVATE | PROTECTED | SEALED |
OVERRIDE | CASE | ABSTRACT | DEFERRED | METHOD |
@@ -52,7 +52,7 @@ abstract class Changes {
private val changedTypeParams = new mutable.HashSet[String]
private def sameParameterSymbolNames(sym1: Symbol, sym2: Symbol): Boolean =
- sameSymbol(sym1, sym2, true) || sym2.encodedName.startsWith(sym1.encodedName + "$") // see #3140
+ sameSymbol(sym1, sym2, true) || sym2.encodedName.startsWith(sym1.encodedName + nme.NAME_JOIN_STRING) // see #3140
private def sameSymbol(sym1: Symbol, sym2: Symbol, simple: Boolean = false): Boolean =
if (simple) sym1.encodedName == sym2.encodedName else sym1.fullName == sym2.fullName
private def sameFlags(sym1: Symbol, sym2: Symbol): Boolean =
diff --git a/src/compiler/scala/tools/nsc/dependencies/DependencyAnalysis.scala b/src/compiler/scala/tools/nsc/dependencies/DependencyAnalysis.scala
index b2a9387..cdde768 100644
--- a/src/compiler/scala/tools/nsc/dependencies/DependencyAnalysis.scala
+++ b/src/compiler/scala/tools/nsc/dependencies/DependencyAnalysis.scala
@@ -1,10 +1,11 @@
package scala.tools.nsc
package dependencies
-import util.SourceFile
-import io.{ AbstractFile, Path }
-import collection._
+import io.Path
+import scala.collection._
import symtab.Flags
+import scala.tools.nsc.io.AbstractFile
+import scala.reflect.internal.util.SourceFile
trait DependencyAnalysis extends SubComponent with Files {
import global._
@@ -144,10 +145,8 @@ trait DependencyAnalysis extends SubComponent with Files {
val name = d.toString
d.symbol match {
case s : ModuleClassSymbol =>
- val isTopLevelModule =
- atPhase (currentRun.picklerPhase.next) {
- !s.isImplClass && !s.isNestedClass
- }
+ val isTopLevelModule = afterPickler { !s.isImplClass && !s.isNestedClass }
+
if (isTopLevelModule && (s.companionModule != NoSymbol)) {
dependencies.emits(source, nameToFile(unit.source.file, name))
}
@@ -181,16 +180,18 @@ trait DependencyAnalysis extends SubComponent with Files {
|| (tree.symbol.sourceFile.path != file.path))
&& (!tree.symbol.isClassConstructor)) {
updateReferences(tree.symbol.fullName)
- atPhase(currentRun.uncurryPhase.prev) {
- checkType(tree.symbol.tpe)
- }
+ // was "at uncurryPhase.prev", which is actually non-deterministic
+ // because the continuations plugin may or may not supply uncurry's
+ // immediately preceding phase.
+ beforeRefchecks(checkType(tree.symbol.tpe))
}
tree match {
case cdef: ClassDef if !cdef.symbol.hasPackageFlag &&
!cdef.symbol.isAnonymousFunction =>
if (cdef.symbol != NoSymbol) buf += cdef.symbol
- atPhase(currentRun.erasurePhase.prev) {
+ // was "at erasurePhase.prev"
+ beforeExplicitOuter {
for (s <- cdef.symbol.info.decls)
s match {
case ts: TypeSymbol if !ts.isClass =>
@@ -201,9 +202,8 @@ trait DependencyAnalysis extends SubComponent with Files {
super.traverse(tree)
case ddef: DefDef =>
- atPhase(currentRun.typerPhase.prev) {
- checkType(ddef.symbol.tpe)
- }
+ // was "at typer.prev"
+ beforeTyper { checkType(ddef.symbol.tpe) }
super.traverse(tree)
case a @ Select(q, n) if ((a.symbol != NoSymbol) && (q.symbol != null)) => // #2556
if (!a.symbol.isConstructor &&
diff --git a/src/compiler/scala/tools/nsc/doc/DocFactory.scala b/src/compiler/scala/tools/nsc/doc/DocFactory.scala
index 43363f7..a091b04 100644
--- a/src/compiler/scala/tools/nsc/doc/DocFactory.scala
+++ b/src/compiler/scala/tools/nsc/doc/DocFactory.scala
@@ -1,5 +1,5 @@
/* NSC -- new Scala compiler
- * Copyright 2007-2011 LAMP/EPFL
+ * Copyright 2007-2013 LAMP/EPFL
* @author David Bernard, Manohar Jonnalagedda
*/
@@ -8,7 +8,7 @@ package doc
import scala.util.control.ControlThrowable
import reporters.Reporter
-import util.NoPosition
+import scala.reflect.internal.util.{ NoPosition, BatchSourceFile}
import io.{ File, Directory }
import DocParser.Parsed
@@ -39,20 +39,23 @@ class DocFactory(val reporter: Reporter, val settings: doc.Settings) { processor
phasesSet += analyzer.namerFactory
phasesSet += analyzer.packageObjects
phasesSet += analyzer.typerFactory
- phasesSet += superAccessors
- phasesSet += pickler
- phasesSet += refchecks
}
override def forScaladoc = true
}
- /** Creates a scaladoc site for all symbols defined in this call's `files`,
- * as well as those defined in `files` of previous calls to the same processor.
- * @param files The list of paths (relative to the compiler's source path,
- * or absolute) of files to document. */
- def makeUniverse(files: List[String]): Option[Universe] = {
+ /** Creates a scaladoc site for all symbols defined in this call's `source`,
+ * as well as those defined in `sources` of previous calls to the same processor.
+ * @param source The list of paths (relative to the compiler's source path,
+ * or absolute) of files to document or the source code. */
+ def makeUniverse(source: Either[List[String], String]): Option[Universe] = {
assert(settings.docformat.value == "html")
- new compiler.Run() compile files
+ source match {
+ case Left(files) =>
+ new compiler.Run() compile files
+ case Right(sourceCode) =>
+ new compiler.Run() compileSources List(new BatchSourceFile("newSource", sourceCode))
+ }
+
if (reporter.hasErrors)
return None
@@ -74,29 +77,34 @@ class DocFactory(val reporter: Reporter, val settings: doc.Settings) { processor
val modelFactory = (
new { override val global: compiler.type = compiler }
with model.ModelFactory(compiler, settings)
- with model.comment.CommentFactory
- with model.TreeFactory {
- override def templateShouldDocument(sym: compiler.Symbol) =
- extraTemplatesToDocument(sym) || super.templateShouldDocument(sym)
+ with model.ModelFactoryImplicitSupport
+ with model.ModelFactoryTypeSupport
+ with model.diagram.DiagramFactory
+ with model.CommentFactory
+ with model.TreeFactory
+ with model.MemberLookup {
+ override def templateShouldDocument(sym: compiler.Symbol, inTpl: DocTemplateImpl) =
+ extraTemplatesToDocument(sym) || super.templateShouldDocument(sym, inTpl)
}
)
modelFactory.makeModel match {
case Some(madeModel) =>
- println("model contains " + modelFactory.templatesCount + " documentable templates")
+ if (!settings.scaladocQuietRun)
+ println("model contains " + modelFactory.templatesCount + " documentable templates")
Some(madeModel)
case None =>
- println("no documentable class found in compilation units")
+ if (!settings.scaladocQuietRun)
+ println("no documentable class found in compilation units")
None
}
-
}
object NoCompilerRunException extends ControlThrowable { }
val documentError: PartialFunction[Throwable, Unit] = {
case NoCompilerRunException =>
- reporter.info(NoPosition, "No documentation generated with unsucessful compiler run", false)
+ reporter.info(null, "No documentation generated with unsucessful compiler run", false)
case _: ClassNotFoundException =>
()
}
@@ -111,7 +119,7 @@ class DocFactory(val reporter: Reporter, val settings: doc.Settings) { processor
docletInstance match {
case universer: Universer =>
- val universe = makeUniverse(files) getOrElse { throw NoCompilerRunException }
+ val universe = makeUniverse(Left(files)) getOrElse { throw NoCompilerRunException }
universer setUniverse universe
docletInstance match {
diff --git a/src/compiler/scala/tools/nsc/doc/DocParser.scala b/src/compiler/scala/tools/nsc/doc/DocParser.scala
index 2bd80f3..27c995e 100644
--- a/src/compiler/scala/tools/nsc/doc/DocParser.scala
+++ b/src/compiler/scala/tools/nsc/doc/DocParser.scala
@@ -1,5 +1,5 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
+ * Copyright 2005-2013 LAMP/EPFL
* @author Paul Phillips
*/
@@ -8,7 +8,7 @@ package nsc
package doc
import reporters._
-import util._
+import scala.reflect.internal.util._
import interactive.RangePositions
import DocParser.Parsed
diff --git a/src/compiler/scala/tools/nsc/doc/Index.scala b/src/compiler/scala/tools/nsc/doc/Index.scala
index b0ca23f..f9b9eec 100644
--- a/src/compiler/scala/tools/nsc/doc/Index.scala
+++ b/src/compiler/scala/tools/nsc/doc/Index.scala
@@ -1,5 +1,5 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
+ * Copyright 2005-2013 LAMP/EPFL
* @author Martin Odersky
*/
diff --git a/src/compiler/scala/tools/nsc/doc/Settings.scala b/src/compiler/scala/tools/nsc/doc/Settings.scala
index 6c74951..8c0628c 100644
--- a/src/compiler/scala/tools/nsc/doc/Settings.scala
+++ b/src/compiler/scala/tools/nsc/doc/Settings.scala
@@ -1,5 +1,5 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
+ * Copyright 2005-2013 LAMP/EPFL
* @author Martin Odersky
*/
@@ -7,11 +7,12 @@ package scala.tools.nsc
package doc
import java.io.File
-import java.lang.System
+import scala.language.postfixOps
/** An extended version of compiler settings, with additional Scaladoc-specific options.
- * @param error A function that prints a string to the appropriate error stream. */
-class Settings(error: String => Unit) extends scala.tools.nsc.Settings(error) {
+ * @param error A function that prints a string to the appropriate error stream
+ * @param print A function that prints the string, without any extra boilerplate of error */
+class Settings(error: String => Unit, val printMsg: String => Unit = println(_)) extends scala.tools.nsc.Settings(error) {
/** A setting that defines in which format the documentation is output. ''Note:'' this setting is currently always
* `html`. */
@@ -69,6 +70,12 @@ class Settings(error: String => Unit) extends scala.tools.nsc.Settings(error) {
""
)
+ val docExternalDoc = MultiStringSetting (
+ "-doc-external-doc",
+ "external-doc",
+ "comma-separated list of classpath_entry_path#doc_URL pairs describing external dependencies."
+ )
+
val useStupidTypes = BooleanSetting (
"-Yuse-stupid-types",
"Print the types of inherited members as seen from their original definition context. Hint: you don't want to do that!"
@@ -87,6 +94,122 @@ class Settings(error: String => Unit) extends scala.tools.nsc.Settings(error) {
""
)
+ val docImplicits = BooleanSetting (
+ "-implicits",
+ "Document members inherited by implicit conversions."
+ )
+
+ val docImplicitsDebug = BooleanSetting (
+ "-implicits-debug",
+ "Show debugging information for members inherited by implicit conversions."
+ )
+
+ val docImplicitsShowAll = BooleanSetting (
+ "-implicits-show-all",
+ "Show members inherited by implicit conversions that are impossible in the default scope. " +
+ "(for example conversions that require Numeric[String] to be in scope)"
+ )
+
+ val docImplicitsSoundShadowing = BooleanSetting (
+ "-implicits-sound-shadowing",
+ "Use a sound implicit shadowing calculation. Note: this interacts badly with usecases, so " +
+ "only use it if you haven't defined usecase for implicitly inherited members."
+ )
+
+ val docImplicitsHide = MultiStringSetting (
+ "-implicits-hide",
+ "implicit(s)",
+ "Hide the members inherited by the given comma separated, fully qualified implicit conversions. Add dot (.) to include default conversions."
+ )
+
+ val docDiagrams = BooleanSetting (
+ "-diagrams",
+ "Create inheritance diagrams for classes, traits and packages."
+ )
+
+ val docDiagramsDebug = BooleanSetting (
+ "-diagrams-debug",
+ "Show debugging information for the diagram creation process."
+ )
+
+ val docDiagramsDotPath = PathSetting (
+ "-diagrams-dot-path",
+ "The path to the dot executable used to generate the inheritance diagrams. Eg: /usr/bin/dot",
+ "dot" // by default, just pick up the system-wide dot
+ )
+
+ /** The maxium nuber of normal classes to show in the diagram */
+ val docDiagramsMaxNormalClasses = IntSetting(
+ "-diagrams-max-classes",
+ "The maximum number of superclasses or subclasses to show in a diagram",
+ 15,
+ None,
+ _ => None
+ )
+
+ /** The maxium nuber of implcit classes to show in the diagram */
+ val docDiagramsMaxImplicitClasses = IntSetting(
+ "-diagrams-max-implicits",
+ "The maximum number of implicitly converted classes to show in a diagram",
+ 10,
+ None,
+ _ => None
+ )
+
+ val docDiagramsDotTimeout = IntSetting(
+ "-diagrams-dot-timeout",
+ "The timeout before the graphviz dot util is forcefully closed, in seconds (default: 10)",
+ 10,
+ None,
+ _ => None
+ )
+
+ val docDiagramsDotRestart = IntSetting(
+ "-diagrams-dot-restart",
+ "The number of times to restart a malfunctioning dot process before disabling diagrams (default: 5)",
+ 5,
+ None,
+ _ => None
+ )
+
+ val docRawOutput = BooleanSetting (
+ "-raw-output",
+ "For each html file, create another .html.raw file containing only the text. (can be used for quickly diffing two scaladoc outputs)"
+ )
+
+ val docNoPrefixes = BooleanSetting (
+ "-no-prefixes",
+ "Prevents generating prefixes in types, possibly creating ambiguous references, but significantly speeding up scaladoc."
+ )
+
+ val docNoLinkWarnings = BooleanSetting (
+ "-no-link-warnings",
+ "Avoid warnings for ambiguous and incorrect links."
+ )
+
+ val docSkipPackages = StringSetting (
+ "-skip-packages",
+ "<package1>:...:<packageN>",
+ "A colon-delimited list of fully qualified package names that will be skipped from scaladoc.",
+ ""
+ )
+
+ val docExpandAllTypes = BooleanSetting (
+ "-expand-all-types",
+ "Expand all type aliases and abstract types into full template pages. (locally this can be done with the @template annotation)"
+ )
+
+ val docExternalUrls = MultiStringSetting (
+ "-external-urls",
+ "externalUrl(s)",
+ "(deprecated) comma-separated list of package_names=doc_URL for external dependencies, where package names are ':'-separated"
+ )
+
+ val docGroups = BooleanSetting (
+ "-groups",
+ "Group similar functions together (based on the @group annotation)"
+ )
+
// Somewhere slightly before r18708 scaladoc stopped building unless the
// self-type check was suppressed. I hijacked the slotted-for-removal-anyway
// suppress-vt-warnings option and renamed it for this purpose.
@@ -94,7 +217,149 @@ class Settings(error: String => Unit) extends scala.tools.nsc.Settings(error) {
// For improved help output.
def scaladocSpecific = Set[Settings#Setting](
- docformat, doctitle, docfooter, docversion, docUncompilable, docsourceurl, docgenerator
+ docformat, doctitle, docfooter, docversion, docUncompilable, docsourceurl, docgenerator, docRootContent, useStupidTypes,
+ docDiagrams, docDiagramsDebug, docDiagramsDotPath,
+ docDiagramsDotTimeout, docDiagramsDotRestart,
+ docImplicits, docImplicitsDebug, docImplicitsShowAll, docImplicitsHide,
+ docDiagramsMaxNormalClasses, docDiagramsMaxImplicitClasses,
+ docNoPrefixes, docNoLinkWarnings, docRawOutput, docSkipPackages,
+ docExpandAllTypes, docGroups
)
val isScaladocSpecific: String => Boolean = scaladocSpecific map (_.name)
+
+ override def isScaladoc = true
+
+ // set by the testsuite, when checking test output
+ var scaladocQuietRun = false
+
+ lazy val skipPackageNames =
+ if (docSkipPackages.value == "")
+ Set[String]()
+ else
+ docSkipPackages.value.toLowerCase.split(':').toSet
+
+ def skipPackage(qname: String) =
+ skipPackageNames(qname.toLowerCase)
+
+ lazy val hiddenImplicits: Set[String] = {
+ if (docImplicitsHide.value.isEmpty) hardcoded.commonConversionTargets
+ else docImplicitsHide.value.toSet flatMap { name: String =>
+ if(name == ".") hardcoded.commonConversionTargets
+ else Set(name)
+ }
+ }
+
+ def appendIndex(url: String): String = url.stripSuffix("index.html").stripSuffix("/") + "/index.html"
+
+ // Deprecated together with 'docExternalUrls' option.
+ lazy val extUrlPackageMapping: Map[String, String] = (Map.empty[String, String] /: docExternalUrls.value) {
+ case (map, binding) =>
+ val idx = binding indexOf "="
+ val pkgs = binding substring (0, idx) split ":"
+ val url = appendIndex(binding substring (idx + 1))
+ map ++ (pkgs map (_ -> url))
+ }
+
+ lazy val extUrlMapping: Map[String, String] = docExternalDoc.value flatMap { s =>
+ val idx = s.indexOf("#")
+ if (idx > 0) {
+ val (first, last) = s.splitAt(idx)
+ Some(new File(first).getAbsolutePath -> appendIndex(last.substring(1)))
+ } else {
+ error(s"Illegal -doc-external-doc option; expected a pair with '#' separator, found: '$s'")
+ None
+ }
+ } toMap
+
+ /**
+ * This is the hardcoded area of Scaladoc. This is where "undesirable" stuff gets eliminated. I know it's not pretty,
+ * but ultimately scaladoc has to be useful. :)
+ */
+ object hardcoded {
+
+ /** The common context bounds and some humanly explanations. Feel free to add more explanations
+ * `<root>.scala.package.Numeric` is the type class
+ * `tparam` is the name of the type parameter it gets (this only describes type classes with 1 type param)
+ * the function result should be a humanly-understandable description of the type class
+ */
+ val knownTypeClasses: Map[String, String => String] = Map() +
+ ("scala.math.Numeric" -> ((tparam: String) => tparam + " is a numeric class, such as Int, Long, Float or Double")) +
+ ("scala.math.Integral" -> ((tparam: String) => tparam + " is an integral numeric class, such as Int or Long")) +
+ ("scala.math.Fractional" -> ((tparam: String) => tparam + " is a fractional numeric class, such as Float or Double")) +
+ ("scala.reflect.Manifest" -> ((tparam: String) => tparam + " is accompanied by a Manifest, which is a runtime representation of its type that survives erasure")) +
+ ("scala.reflect.ClassManifest" -> ((tparam: String) => tparam + " is accompanied by a ClassManifest, which is a runtime representation of its type that survives erasure")) +
+ ("scala.reflect.OptManifest" -> ((tparam: String) => tparam + " is accompanied by an OptManifest, which can be either a runtime representation of its type or the NoManifest, which means the runtime type is not available")) +
+ ("scala.reflect.ClassTag" -> ((tparam: String) => tparam + " is accompanied by a ClassTag, which is a runtime representation of its type that survives erasure")) +
+ ("scala.reflect.api.TypeTags.WeakTypeTag" -> ((tparam: String) => tparam + " is accompanied by an WeakTypeTag, which is a runtime representation of its type that survives erasure")) +
+ ("scala.reflect.api.TypeTags.TypeTag" -> ((tparam: String) => tparam + " is accompanied by a TypeTag, which is a runtime representation of its type that survives erasure"))
+
+ /**
+ * Set of classes to exclude from index and diagrams
+ * TODO: Should be configurable
+ */
+ def isExcluded(qname: String) = {
+ ( ( qname.startsWith("scala.Tuple") || qname.startsWith("scala.Product") ||
+ qname.startsWith("scala.Function") || qname.startsWith("scala.runtime.AbstractFunction")
+ ) && !(
+ qname == "scala.Tuple1" || qname == "scala.Tuple2" ||
+ qname == "scala.Product" || qname == "scala.Product1" || qname == "scala.Product2" ||
+ qname == "scala.Function" || qname == "scala.Function1" || qname == "scala.Function2" ||
+ qname == "scala.runtime.AbstractFunction0" || qname == "scala.runtime.AbstractFunction1" ||
+ qname == "scala.runtime.AbstractFunction2"
+ )
+ )
+ }
+
+ /** Common conversion targets that affect any class in Scala */
+ val commonConversionTargets = Set(
+ "scala.Predef.any2stringfmt",
+ "scala.Predef.any2stringadd",
+ "scala.Predef.any2ArrowAssoc",
+ "scala.Predef.any2Ensuring",
+ "scala.collection.TraversableOnce.alternateImplicit")
+
+ /** There's a reason all these are specialized by hand but documenting each of them is beyond the point */
+ val arraySkipConversions = List(
+ "scala.Predef.refArrayOps",
+ "scala.Predef.intArrayOps",
+ "scala.Predef.doubleArrayOps",
+ "scala.Predef.longArrayOps",
+ "scala.Predef.floatArrayOps",
+ "scala.Predef.charArrayOps",
+ "scala.Predef.byteArrayOps",
+ "scala.Predef.shortArrayOps",
+ "scala.Predef.booleanArrayOps",
+ "scala.Predef.unitArrayOps",
+ "scala.LowPriorityImplicits.wrapRefArray",
+ "scala.LowPriorityImplicits.wrapIntArray",
+ "scala.LowPriorityImplicits.wrapDoubleArray",
+ "scala.LowPriorityImplicits.wrapLongArray",
+ "scala.LowPriorityImplicits.wrapFloatArray",
+ "scala.LowPriorityImplicits.wrapCharArray",
+ "scala.LowPriorityImplicits.wrapByteArray",
+ "scala.LowPriorityImplicits.wrapShortArray",
+ "scala.LowPriorityImplicits.wrapBooleanArray",
+ "scala.LowPriorityImplicits.wrapUnitArray",
+ "scala.LowPriorityImplicits.genericWrapArray")
+
+ // included as names as here we don't have access to a Global with Definitions :(
+ def valueClassList = List("unit", "boolean", "byte", "short", "char", "int", "long", "float", "double")
+ def valueClassFilterPrefixes = List("scala.LowPriorityImplicits", "scala.Predef")
+
+ /** Dirty, dirty, dirty hack: the value params conversions can all kick in -- and they are disambiguated by priority
+ * but showing priority in scaladoc would make no sense -- so we have to manually remove the conversions that we
+ * know will never get a chance to kick in. Anyway, DIRTY DIRTY DIRTY! */
+ def valueClassFilter(value: String, conversionName: String): Boolean = {
+ val valueName = value.toLowerCase
+ val otherValues = valueClassList.filterNot(_ == valueName)
+
+ for (prefix <- valueClassFilterPrefixes)
+ if (conversionName.startsWith(prefix))
+ for (otherValue <- otherValues)
+ if (conversionName.startsWith(prefix + "." + otherValue))
+ return false
+
+ true
+ }
+ }
}
diff --git a/src/compiler/scala/tools/nsc/doc/Uncompilable.scala b/src/compiler/scala/tools/nsc/doc/Uncompilable.scala
index 9b29ebd..d3e5c86 100644
--- a/src/compiler/scala/tools/nsc/doc/Uncompilable.scala
+++ b/src/compiler/scala/tools/nsc/doc/Uncompilable.scala
@@ -1,10 +1,12 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
+ * Copyright 2005-2013 LAMP/EPFL
* @author Paul Phillips
*/
package scala.tools.nsc
package doc
+import scala.language.implicitConversions
+import scala.language.postfixOps
/** Some glue between DocParser (which reads source files which can't be compiled)
* and the scaladoc model.
@@ -14,7 +16,8 @@ trait Uncompilable {
val settings: Settings
import global.{ reporter, inform, warning, newTypeName, newTermName, Symbol, Name, DocComment, NoSymbol }
- import global.definitions.RootClass
+ import global.definitions.AnyRefClass
+ import global.rootMirror.RootClass
private implicit def translateName(name: Global#Name) =
if (name.isTypeName) newTypeName("" + name) else newTermName("" + name)
@@ -32,7 +35,7 @@ trait Uncompilable {
}
def files = settings.uncompilableFiles
def symbols = pairs map (_._1)
- def templates = symbols filter (x => x.isClass || x.isTrait) toSet
+ def templates = symbols filter (x => x.isClass || x.isTrait || x == AnyRefClass/* which is now a type alias */) toSet
def comments = {
if (settings.debug.value || settings.verbose.value)
inform("Found %d uncompilable files: %s".format(files.size, files mkString ", "))
diff --git a/src/compiler/scala/tools/nsc/doc/Universe.scala b/src/compiler/scala/tools/nsc/doc/Universe.scala
index fe0ef71..11520c8 100644
--- a/src/compiler/scala/tools/nsc/doc/Universe.scala
+++ b/src/compiler/scala/tools/nsc/doc/Universe.scala
@@ -1,5 +1,5 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
+ * Copyright 2005-2013 LAMP/EPFL
* @author Martin Odersky
*/
diff --git a/src/compiler/scala/tools/nsc/doc/base/CommentFactoryBase.scala b/src/compiler/scala/tools/nsc/doc/base/CommentFactoryBase.scala
new file mode 100755
index 0000000..f509c63
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/doc/base/CommentFactoryBase.scala
@@ -0,0 +1,955 @@
+/* NSC -- new Scala compiler
+ * Copyright 2007-2012 LAMP/EPFL
+ * @author Manohar Jonnalagedda
+ */
+
+package scala.tools.nsc
+package doc
+package base
+
+import base.comment._
+import reporters.Reporter
+import scala.collection._
+import scala.util.matching.Regex
+import scala.annotation.switch
+import scala.reflect.internal.util.{NoPosition, Position}
+import scala.language.postfixOps
+
+/** The comment parser transforms raw comment strings into `Comment` objects.
+ * Call `parse` to run the parser. Note that the parser is stateless and
+ * should only be built once for a given Scaladoc run.
+ *
+ * @param reporter The reporter on which user messages (error, warnings) should be printed.
+ *
+ * @author Manohar Jonnalagedda
+ * @author Gilles Dubochet */
+trait CommentFactoryBase { this: MemberLookupBase =>
+
+ val global: Global
+ import global.{ reporter, definitions, Symbol }
+
+ /* Creates comments with necessary arguments */
+ def createComment (
+ body0: Option[Body] = None,
+ authors0: List[Body] = List.empty,
+ see0: List[Body] = List.empty,
+ result0: Option[Body] = None,
+ throws0: Map[String,Body] = Map.empty,
+ valueParams0: Map[String,Body] = Map.empty,
+ typeParams0: Map[String,Body] = Map.empty,
+ version0: Option[Body] = None,
+ since0: Option[Body] = None,
+ todo0: List[Body] = List.empty,
+ deprecated0: Option[Body] = None,
+ note0: List[Body] = List.empty,
+ example0: List[Body] = List.empty,
+ constructor0: Option[Body] = None,
+ source0: Option[String] = None,
+ inheritDiagram0: List[String] = List.empty,
+ contentDiagram0: List[String] = List.empty,
+ group0: Option[Body] = None,
+ groupDesc0: Map[String,Body] = Map.empty,
+ groupNames0: Map[String,Body] = Map.empty,
+ groupPrio0: Map[String,Body] = Map.empty
+ ) : Comment = new Comment{
+ val body = if(body0 isDefined) body0.get else Body(Seq.empty)
+ val authors = authors0
+ val see = see0
+ val result = result0
+ val throws = throws0
+ val valueParams = valueParams0
+ val typeParams = typeParams0
+ val version = version0
+ val since = since0
+ val todo = todo0
+ val deprecated = deprecated0
+ val note = note0
+ val example = example0
+ val constructor = constructor0
+ val source = source0
+ val inheritDiagram = inheritDiagram0
+ val contentDiagram = contentDiagram0
+ val groupDesc = groupDesc0
+ val group =
+ group0 match {
+ case Some(Body(List(Paragraph(Chain(List(Summary(Text(groupId)))))))) => Some(groupId.toString.trim)
+ case _ => None
+ }
+ val groupPrio = groupPrio0 flatMap {
+ case (group, body) =>
+ try {
+ body match {
+ case Body(List(Paragraph(Chain(List(Summary(Text(prio))))))) => List(group -> prio.trim.toInt)
+ case _ => List()
+ }
+ } catch {
+ case _: java.lang.NumberFormatException => List()
+ }
+ }
+ val groupNames = groupNames0 flatMap {
+ case (group, body) =>
+ try {
+ body match {
+ case Body(List(Paragraph(Chain(List(Summary(Text(name))))))) if (!name.trim.contains("\n")) => List(group -> (name.trim))
+ case _ => List()
+ }
+ } catch {
+ case _: java.lang.NumberFormatException => List()
+ }
+ }
+
+ }
+
+ private val endOfText = '\u0003'
+ private val endOfLine = '\u000A'
+
+ /** Something that should not have happened, happened, and Scaladoc should exit. */
+ private def oops(msg: String): Nothing =
+ throw FatalError("program logic: " + msg)
+
+ /** The body of a line, dropping the (optional) start star-marker,
+ * one leading whitespace and all trailing whitespace. */
+ private val CleanCommentLine =
+ new Regex("""(?:\s*\*\s?)?(.*)""")
+
+ /** Dangerous HTML tags that should be replaced by something safer,
+ * such as wiki syntax, or that should be dropped. */
+ private val DangerousTags =
+ new Regex("""<(/?(div|ol|ul|li|h[1-6]|p))( [^>]*)?/?>|<!--.*-->""")
+
+ /** Maps a dangerous HTML tag to a safe wiki replacement, or an empty string
+ * if it cannot be salvaged. */
+ private def htmlReplacement(mtch: Regex.Match): String = mtch.group(1) match {
+ case "p" | "div" => "\n\n"
+ case "h1" => "\n= "
+ case "/h1" => " =\n"
+ case "h2" => "\n== "
+ case "/h2" => " ==\n"
+ case "h3" => "\n=== "
+ case "/h3" => " ===\n"
+ case "h4" | "h5" | "h6" => "\n==== "
+ case "/h4" | "/h5" | "/h6" => " ====\n"
+ case "li" => "\n * - "
+ case _ => ""
+ }
+
+ /** Javadoc tags that should be replaced by something useful, such as wiki
+ * syntax, or that should be dropped. */
+ private val JavadocTags =
+ new Regex("""\{\@(code|docRoot|inheritDoc|link|linkplain|literal|value)([^}]*)\}""")
+
+ /** Maps a javadoc tag to a useful wiki replacement, or an empty string if it cannot be salvaged. */
+ private def javadocReplacement(mtch: Regex.Match): String = mtch.group(1) match {
+ case "code" => "`" + mtch.group(2) + "`"
+ case "docRoot" => ""
+ case "inheritDoc" => ""
+ case "link" => "`" + mtch.group(2) + "`"
+ case "linkplain" => "`" + mtch.group(2) + "`"
+ case "literal" => mtch.group(2)
+ case "value" => "`" + mtch.group(2) + "`"
+ case _ => ""
+ }
+
+ /** Safe HTML tags that can be kept. */
+ private val SafeTags =
+ new Regex("""((&\w+;)|(&#\d+;)|(</?(abbr|acronym|address|area|a|bdo|big|blockquote|br|button|b|caption|cite|code|col|colgroup|dd|del|dfn|em|fieldset|form|hr|img|input|ins|i|kbd|label|legend|link|map|object|optgroup|option|param|pre|q|samp|select|small|span|strong|sub|sup|table|tbody|td|textarea|tfoot|th|thead|tr|tt|var)( [^>]*)?/?>))""")
+
+ private val safeTagMarker = '\u000E'
+
+ /** A Scaladoc tag not linked to a symbol and not followed by text */
+ private val SingleTagRegex =
+ new Regex("""\s*@(\S+)\s*""")
+
+ /** A Scaladoc tag not linked to a symbol. Returns the name of the tag, and the rest of the line. */
+ private val SimpleTagRegex =
+ new Regex("""\s*@(\S+)\s+(.*)""")
+
+ /** A Scaladoc tag linked to a symbol. Returns the name of the tag, the name
+ * of the symbol, and the rest of the line. */
+ private val SymbolTagRegex =
+ new Regex("""\s*@(param|tparam|throws|groupdesc|groupname|groupprio)\s+(\S*)\s*(.*)""")
+
+ /** The start of a scaladoc code block */
+ private val CodeBlockStartRegex =
+ new Regex("""(.*?)((?:\{\{\{)|(?:\u000E<pre(?: [^>]*)?>\u000E))(.*)""")
+
+ /** The end of a scaladoc code block */
+ private val CodeBlockEndRegex =
+ new Regex("""(.*?)((?:\}\}\})|(?:\u000E</pre>\u000E))(.*)""")
+
+ /** A key used for a tag map. The key is built from the name of the tag and
+ * from the linked symbol if the tag has one.
+ * Equality on tag keys is structural. */
+ private sealed abstract class TagKey {
+ def name: String
+ }
+
+ private final case class SimpleTagKey(name: String) extends TagKey
+ private final case class SymbolTagKey(name: String, symbol: String) extends TagKey
+
+ /** Parses a raw comment string into a `Comment` object.
+ * @param comment The expanded comment string (including start and end markers) to be parsed.
+ * @param src The raw comment source string.
+ * @param pos The position of the comment in source. */
+ protected def parseAtSymbol(comment: String, src: String, pos: Position, siteOpt: Option[Symbol] = None): Comment = {
+ /** The cleaned raw comment as a list of lines. Cleaning removes comment
+ * start and end markers, line start markers and unnecessary whitespace. */
+ def clean(comment: String): List[String] = {
+ def cleanLine(line: String): String = {
+ //replaceAll removes trailing whitespaces
+ line.replaceAll("""\s+$""", "") match {
+ case CleanCommentLine(ctl) => ctl
+ case tl => tl
+ }
+ }
+ val strippedComment = comment.trim.stripPrefix("/*").stripSuffix("*/")
+ val safeComment = DangerousTags.replaceAllIn(strippedComment, { htmlReplacement(_) })
+ val javadoclessComment = JavadocTags.replaceAllIn(safeComment, { javadocReplacement(_) })
+ val markedTagComment =
+ SafeTags.replaceAllIn(javadoclessComment, { mtch =>
+ java.util.regex.Matcher.quoteReplacement(safeTagMarker + mtch.matched + safeTagMarker)
+ })
+ markedTagComment.lines.toList map (cleanLine(_))
+ }
+
+ /** Parses a comment (in the form of a list of lines) to a `Comment`
+ * instance, recursively on lines. To do so, it splits the whole comment
+ * into main body and tag bodies, then runs the `WikiParser` on each body
+ * before creating the comment instance.
+ *
+ * @param docBody The body of the comment parsed until now.
+ * @param tags All tags parsed until now.
+ * @param lastTagKey The last parsed tag, or `None` if the tag section hasn't started. Lines that are not tagged
+ * are part of the previous tag or, if none exists, of the body.
+ * @param remaining The lines that must still recursively be parsed.
+ * @param inCodeBlock Whether the next line is part of a code block (in which no tags must be read). */
+ def parse0 (
+ docBody: StringBuilder,
+ tags: Map[TagKey, List[String]],
+ lastTagKey: Option[TagKey],
+ remaining: List[String],
+ inCodeBlock: Boolean
+ ): Comment = remaining match {
+
+ case CodeBlockStartRegex(before, marker, after) :: ls if (!inCodeBlock) =>
+ if (!before.trim.isEmpty && !after.trim.isEmpty)
+ parse0(docBody, tags, lastTagKey, before :: marker :: after :: ls, false)
+ else if (!before.trim.isEmpty)
+ parse0(docBody, tags, lastTagKey, before :: marker :: ls, false)
+ else if (!after.trim.isEmpty)
+ parse0(docBody, tags, lastTagKey, marker :: after :: ls, true)
+ else lastTagKey match {
+ case Some(key) =>
+ val value =
+ ((tags get key): @unchecked) match {
+ case Some(b :: bs) => (b + endOfLine + marker) :: bs
+ case None => oops("lastTagKey set when no tag exists for key")
+ }
+ parse0(docBody, tags + (key -> value), lastTagKey, ls, true)
+ case None =>
+ parse0(docBody append endOfLine append marker, tags, lastTagKey, ls, true)
+ }
+
+ case CodeBlockEndRegex(before, marker, after) :: ls =>
+ if (!before.trim.isEmpty && !after.trim.isEmpty)
+ parse0(docBody, tags, lastTagKey, before :: marker :: after :: ls, true)
+ if (!before.trim.isEmpty)
+ parse0(docBody, tags, lastTagKey, before :: marker :: ls, true)
+ else if (!after.trim.isEmpty)
+ parse0(docBody, tags, lastTagKey, marker :: after :: ls, false)
+ else lastTagKey match {
+ case Some(key) =>
+ val value =
+ ((tags get key): @unchecked) match {
+ case Some(b :: bs) => (b + endOfLine + marker) :: bs
+ case None => oops("lastTagKey set when no tag exists for key")
+ }
+ parse0(docBody, tags + (key -> value), lastTagKey, ls, false)
+ case None =>
+ parse0(docBody append endOfLine append marker, tags, lastTagKey, ls, false)
+ }
+
+ case SymbolTagRegex(name, sym, body) :: ls if (!inCodeBlock) =>
+ val key = SymbolTagKey(name, sym)
+ val value = body :: tags.getOrElse(key, Nil)
+ parse0(docBody, tags + (key -> value), Some(key), ls, inCodeBlock)
+
+ case SimpleTagRegex(name, body) :: ls if (!inCodeBlock) =>
+ val key = SimpleTagKey(name)
+ val value = body :: tags.getOrElse(key, Nil)
+ parse0(docBody, tags + (key -> value), Some(key), ls, inCodeBlock)
+
+ case SingleTagRegex(name) :: ls if (!inCodeBlock) =>
+ val key = SimpleTagKey(name)
+ val value = "" :: tags.getOrElse(key, Nil)
+ parse0(docBody, tags + (key -> value), Some(key), ls, inCodeBlock)
+
+ case line :: ls if (lastTagKey.isDefined) =>
+ val key = lastTagKey.get
+ val value =
+ ((tags get key): @unchecked) match {
+ case Some(b :: bs) => (b + endOfLine + line) :: bs
+ case None => oops("lastTagKey set when no tag exists for key")
+ }
+ parse0(docBody, tags + (key -> value), lastTagKey, ls, inCodeBlock)
+
+ case line :: ls =>
+ if (docBody.length > 0) docBody append endOfLine
+ docBody append line
+ parse0(docBody, tags, lastTagKey, ls, inCodeBlock)
+
+ case Nil =>
+ // Take the {inheritance, content} diagram keys aside, as it doesn't need any parsing
+ val inheritDiagramTag = SimpleTagKey("inheritanceDiagram")
+ val contentDiagramTag = SimpleTagKey("contentDiagram")
+
+ val inheritDiagramText: List[String] = tags.get(inheritDiagramTag) match {
+ case Some(list) => list
+ case None => List.empty
+ }
+
+ val contentDiagramText: List[String] = tags.get(contentDiagramTag) match {
+ case Some(list) => list
+ case None => List.empty
+ }
+
+ val stripTags=List(inheritDiagramTag, contentDiagramTag, SimpleTagKey("template"), SimpleTagKey("documentable"))
+ val tagsWithoutDiagram = tags.filterNot(pair => stripTags.contains(pair._1))
+
+ val bodyTags: mutable.Map[TagKey, List[Body]] =
+ mutable.Map(tagsWithoutDiagram mapValues {tag => tag map (parseWikiAtSymbol(_, pos, siteOpt))} toSeq: _*)
+
+ def oneTag(key: SimpleTagKey): Option[Body] =
+ ((bodyTags remove key): @unchecked) match {
+ case Some(r :: rs) =>
+ if (!rs.isEmpty) reporter.warning(pos, "Only one '@" + key.name + "' tag is allowed")
+ Some(r)
+ case None => None
+ }
+
+ def allTags(key: SimpleTagKey): List[Body] =
+ (bodyTags remove key) getOrElse Nil
+
+ def allSymsOneTag(key: TagKey): Map[String, Body] = {
+ val keys: Seq[SymbolTagKey] =
+ bodyTags.keys.toSeq flatMap {
+ case stk: SymbolTagKey if (stk.name == key.name) => Some(stk)
+ case stk: SimpleTagKey if (stk.name == key.name) =>
+ reporter.warning(pos, "Tag '@" + stk.name + "' must be followed by a symbol name")
+ None
+ case _ => None
+ }
+ val pairs: Seq[(String, Body)] =
+ for (key <- keys) yield {
+ val bs = (bodyTags remove key).get
+ if (bs.length > 1)
+ reporter.warning(pos, "Only one '@" + key.name + "' tag for symbol " + key.symbol + " is allowed")
+ (key.symbol, bs.head)
+ }
+ Map.empty[String, Body] ++ pairs
+ }
+
+ val com = createComment (
+ body0 = Some(parseWikiAtSymbol(docBody.toString, pos, siteOpt)),
+ authors0 = allTags(SimpleTagKey("author")),
+ see0 = allTags(SimpleTagKey("see")),
+ result0 = oneTag(SimpleTagKey("return")),
+ throws0 = allSymsOneTag(SimpleTagKey("throws")),
+ valueParams0 = allSymsOneTag(SimpleTagKey("param")),
+ typeParams0 = allSymsOneTag(SimpleTagKey("tparam")),
+ version0 = oneTag(SimpleTagKey("version")),
+ since0 = oneTag(SimpleTagKey("since")),
+ todo0 = allTags(SimpleTagKey("todo")),
+ deprecated0 = oneTag(SimpleTagKey("deprecated")),
+ note0 = allTags(SimpleTagKey("note")),
+ example0 = allTags(SimpleTagKey("example")),
+ constructor0 = oneTag(SimpleTagKey("constructor")),
+ source0 = Some(clean(src).mkString("\n")),
+ inheritDiagram0 = inheritDiagramText,
+ contentDiagram0 = contentDiagramText,
+ group0 = oneTag(SimpleTagKey("group")),
+ groupDesc0 = allSymsOneTag(SimpleTagKey("groupdesc")),
+ groupNames0 = allSymsOneTag(SimpleTagKey("groupname")),
+ groupPrio0 = allSymsOneTag(SimpleTagKey("groupprio"))
+ )
+
+ for ((key, _) <- bodyTags)
+ reporter.warning(pos, "Tag '@" + key.name + "' is not recognised")
+
+ com
+
+ }
+
+ parse0(new StringBuilder(comment.size), Map.empty, None, clean(comment), false)
+
+ }
+
+ /** Parses a string containing wiki syntax into a `Comment` object.
+ * Note that the string is assumed to be clean:
+ * - Removed Scaladoc start and end markers.
+ * - Removed start-of-line star and one whitespace afterwards (if present).
+ * - Removed all end-of-line whitespace.
+ * - Only `endOfLine` is used to mark line endings. */
+ def parseWikiAtSymbol(string: String, pos: Position, siteOpt: Option[Symbol]): Body = new WikiParser(string, pos, siteOpt).document()
+
+ /** TODO
+ *
+ * @author Ingo Maier
+ * @author Manohar Jonnalagedda
+ * @author Gilles Dubochet */
+ protected final class WikiParser(val buffer: String, pos: Position, siteOpt: Option[Symbol]) extends CharReader(buffer) { wiki =>
+ var summaryParsed = false
+
+ def document(): Body = {
+ val blocks = new mutable.ListBuffer[Block]
+ while (char != endOfText)
+ blocks += block()
+ Body(blocks.toList)
+ }
+
+ /* BLOCKS */
+
+ /** {{{ block ::= code | title | hrule | para }}} */
+ def block(): Block = {
+ if (checkSkipInitWhitespace("{{{"))
+ code()
+ else if (checkSkipInitWhitespace('='))
+ title()
+ else if (checkSkipInitWhitespace("----"))
+ hrule()
+ else if (checkList)
+ listBlock
+ else {
+ para()
+ }
+ }
+
+ /** listStyle ::= '-' spc | '1.' spc | 'I.' spc | 'i.' spc | 'A.' spc | 'a.' spc
+ * Characters used to build lists and their constructors */
+ protected val listStyles = Map[String, (Seq[Block] => Block)]( // TODO Should this be defined at some list companion?
+ "- " -> ( UnorderedList(_) ),
+ "1. " -> ( OrderedList(_,"decimal") ),
+ "I. " -> ( OrderedList(_,"upperRoman") ),
+ "i. " -> ( OrderedList(_,"lowerRoman") ),
+ "A. " -> ( OrderedList(_,"upperAlpha") ),
+ "a. " -> ( OrderedList(_,"lowerAlpha") )
+ )
+
+ /** Checks if the current line is formed with more than one space and one the listStyles */
+ def checkList =
+ (countWhitespace > 0) && (listStyles.keys exists { checkSkipInitWhitespace(_) })
+
+ /** {{{
+ * nListBlock ::= nLine { mListBlock }
+ * nLine ::= nSpc listStyle para '\n'
+ * }}}
+ * Where n and m stand for the number of spaces. When `m > n`, a new list is nested. */
+ def listBlock: Block = {
+
+ /** Consumes one list item block and returns it, or None if the block is
+ * not a list or a different list. */
+ def listLine(indent: Int, style: String): Option[Block] =
+ if (countWhitespace > indent && checkList)
+ Some(listBlock)
+ else if (countWhitespace != indent || !checkSkipInitWhitespace(style))
+ None
+ else {
+ jumpWhitespace()
+ jump(style)
+ val p = Paragraph(inline(false))
+ blockEnded("end of list line ")
+ Some(p)
+ }
+
+ /** Consumes all list item blocks (possibly with nested lists) of the
+ * same list and returns the list block. */
+ def listLevel(indent: Int, style: String): Block = {
+ val lines = mutable.ListBuffer.empty[Block]
+ var line: Option[Block] = listLine(indent, style)
+ while (line.isDefined) {
+ lines += line.get
+ line = listLine(indent, style)
+ }
+ val constructor = listStyles(style)
+ constructor(lines)
+ }
+
+ val indent = countWhitespace
+ val style = (listStyles.keys find { checkSkipInitWhitespace(_) }).getOrElse(listStyles.keys.head)
+ listLevel(indent, style)
+ }
+
+ def code(): Block = {
+ jumpWhitespace()
+ jump("{{{")
+ val str = readUntil("}}}")
+ if (char == endOfText)
+ reportError(pos, "unclosed code block")
+ else
+ jump("}}}")
+ blockEnded("code block")
+ Code(normalizeIndentation(str))
+ }
+
+ /** {{{ title ::= ('=' inline '=' | "==" inline "==" | ...) '\n' }}} */
+ def title(): Block = {
+ jumpWhitespace()
+ val inLevel = repeatJump('=')
+ val text = inline(check("=" * inLevel))
+ val outLevel = repeatJump('=', inLevel)
+ if (inLevel != outLevel)
+ reportError(pos, "unbalanced or unclosed heading")
+ blockEnded("heading")
+ Title(text, inLevel)
+ }
+
+ /** {{{ hrule ::= "----" { '-' } '\n' }}} */
+ def hrule(): Block = {
+ jumpWhitespace()
+ repeatJump('-')
+ blockEnded("horizontal rule")
+ HorizontalRule()
+ }
+
+ /** {{{ para ::= inline '\n' }}} */
+ def para(): Block = {
+ val p =
+ if (summaryParsed)
+ Paragraph(inline(false))
+ else {
+ val s = summary()
+ val r =
+ if (checkParaEnded) List(s) else List(s, inline(false))
+ summaryParsed = true
+ Paragraph(Chain(r))
+ }
+ while (char == endOfLine && char != endOfText)
+ nextChar()
+ p
+ }
+
+ /* INLINES */
+
+ val OPEN_TAG = "^<([A-Za-z]+)( [^>]*)?(/?)>$".r
+ val CLOSE_TAG = "^</([A-Za-z]+)>$".r
+ private def readHTMLFrom(begin: HtmlTag): String = {
+ val list = mutable.ListBuffer.empty[String]
+ val stack = mutable.ListBuffer.empty[String]
+
+ begin.close match {
+ case Some(HtmlTag(CLOSE_TAG(s))) =>
+ stack += s
+ case _ =>
+ return ""
+ }
+
+ do {
+ val str = readUntil { char == safeTagMarker || char == endOfText }
+ nextChar()
+
+ list += str
+
+ str match {
+ case OPEN_TAG(s, _, standalone) => {
+ if (standalone != "/") {
+ stack += s
+ }
+ }
+ case CLOSE_TAG(s) => {
+ if (s == stack.last) {
+ stack.remove(stack.length-1)
+ }
+ }
+ case _ => ;
+ }
+ } while (stack.length > 0 && char != endOfText)
+
+ list mkString ""
+ }
+
+ def inline(isInlineEnd: => Boolean): Inline = {
+
+ def inline0(): Inline = {
+ if (char == safeTagMarker) {
+ val tag = htmlTag()
+ HtmlTag(tag.data + readHTMLFrom(tag))
+ }
+ else if (check("'''")) bold()
+ else if (check("''")) italic()
+ else if (check("`")) monospace()
+ else if (check("__")) underline()
+ else if (check("^")) superscript()
+ else if (check(",,")) subscript()
+ else if (check("[[")) link()
+ else {
+ val str = readUntil { char == safeTagMarker || check("''") || char == '`' || check("__") || char == '^' || check(",,") || check("[[") || isInlineEnd || checkParaEnded || char == endOfLine }
+ Text(str)
+ }
+ }
+
+ val inlines: List[Inline] = {
+ val iss = mutable.ListBuffer.empty[Inline]
+ iss += inline0()
+ while (!isInlineEnd && !checkParaEnded) {
+ val skipEndOfLine = if (char == endOfLine) {
+ nextChar()
+ true
+ } else {
+ false
+ }
+
+ val current = inline0()
+ (iss.last, current) match {
+ case (Text(t1), Text(t2)) if skipEndOfLine =>
+ iss.update(iss.length - 1, Text(t1 + endOfLine + t2))
+ case (i1, i2) if skipEndOfLine =>
+ iss ++= List(Text(endOfLine.toString), i2)
+ case _ => iss += current
+ }
+ }
+ iss.toList
+ }
+
+ inlines match {
+ case Nil => Text("")
+ case i :: Nil => i
+ case is => Chain(is)
+ }
+
+ }
+
+ def htmlTag(): HtmlTag = {
+ jump(safeTagMarker)
+ val read = readUntil(safeTagMarker)
+ if (char != endOfText) jump(safeTagMarker)
+ HtmlTag(read)
+ }
+
+ def bold(): Inline = {
+ jump("'''")
+ val i = inline(check("'''"))
+ jump("'''")
+ Bold(i)
+ }
+
+ def italic(): Inline = {
+ jump("''")
+ val i = inline(check("''"))
+ jump("''")
+ Italic(i)
+ }
+
+ def monospace(): Inline = {
+ jump("`")
+ val i = inline(check("`"))
+ jump("`")
+ Monospace(i)
+ }
+
+ def underline(): Inline = {
+ jump("__")
+ val i = inline(check("__"))
+ jump("__")
+ Underline(i)
+ }
+
+ def superscript(): Inline = {
+ jump("^")
+ val i = inline(check("^"))
+ if (jump("^")) {
+ Superscript(i)
+ } else {
+ Chain(Seq(Text("^"), i))
+ }
+ }
+
+ def subscript(): Inline = {
+ jump(",,")
+ val i = inline(check(",,"))
+ jump(",,")
+ Subscript(i)
+ }
+
+ def summary(): Inline = {
+ val i = inline(check("."))
+ Summary(
+ if (jump("."))
+ Chain(List(i, Text(".")))
+ else
+ i
+ )
+ }
+
+ def link(): Inline = {
+ val SchemeUri = """([a-z]+:.*)""".r
+ jump("[[")
+ var parens = 2 + repeatJump('[')
+ val start = "[" * parens
+ val stop = "]" * parens
+ //println("link with " + parens + " matching parens")
+ val target = readUntil { check(stop) || check(" ") }
+ val title =
+ if (!check(stop)) Some({
+ jump(" ")
+ inline(check(stop))
+ })
+ else None
+ jump(stop)
+
+ (target, title) match {
+ case (SchemeUri(uri), optTitle) =>
+ Link(uri, optTitle getOrElse Text(uri))
+ case (qualName, optTitle) =>
+ makeEntityLink(optTitle getOrElse Text(target), pos, target, siteOpt)
+ }
+ }
+
+ /* UTILITY */
+
+ /** {{{ eol ::= { whitespace } '\n' }}} */
+ def blockEnded(blockType: String): Unit = {
+ if (char != endOfLine && char != endOfText) {
+ reportError(pos, "no additional content on same line after " + blockType)
+ jumpUntil(endOfLine)
+ }
+ while (char == endOfLine)
+ nextChar()
+ }
+
+ /**
+ * Eliminates the (common) leading spaces in all lines, based on the first line
+ * For indented pieces of code, it reduces the indent to the least whitespace prefix:
+ * {{{
+ * indented example
+ * another indented line
+ * if (condition)
+ * then do something;
+ * ^ this is the least whitespace prefix
+ * }}}
+ */
+ def normalizeIndentation(_code: String): String = {
+
+ var code = _code.trim
+ var maxSkip = Integer.MAX_VALUE
+ var crtSkip = 0
+ var wsArea = true
+ var index = 0
+ var firstLine = true
+ var emptyLine = true
+
+ while (index < code.length) {
+ code(index) match {
+ case ' ' =>
+ if (wsArea)
+ crtSkip += 1
+ case c =>
+ wsArea = (c == '\n')
+ maxSkip = if (firstLine || emptyLine) maxSkip else if (maxSkip <= crtSkip) maxSkip else crtSkip
+ crtSkip = if (c == '\n') 0 else crtSkip
+ firstLine = if (c == '\n') false else firstLine
+ emptyLine = if (c == '\n') true else false
+ }
+ index += 1
+ }
+
+ if (maxSkip == 0)
+ code
+ else {
+ index = 0
+ val builder = new StringBuilder
+ while (index < code.length) {
+ builder.append(code(index))
+ if (code(index) == '\n') {
+ // we want to skip as many spaces are available, if there are less spaces (like on empty lines, do not
+ // over-consume them)
+ index += 1
+ val limit = index + maxSkip
+ while ((index < code.length) && (code(index) == ' ') && index < limit)
+ index += 1
+ }
+ else
+ index += 1
+ }
+ builder.toString
+ }
+ }
+
+ def checkParaEnded(): Boolean = {
+ (char == endOfText) ||
+ ((char == endOfLine) && {
+ val poff = offset
+ nextChar() // read EOL
+ val ok = {
+ checkSkipInitWhitespace(endOfLine) ||
+ checkSkipInitWhitespace('=') ||
+ checkSkipInitWhitespace("{{{") ||
+ checkList ||
+ checkSkipInitWhitespace('\u003D')
+ }
+ offset = poff
+ ok
+ })
+ }
+
+ def reportError(pos: Position, message: String) {
+ reporter.warning(pos, message)
+ }
+ }
+
+ protected sealed class CharReader(buffer: String) { reader =>
+
+ var offset: Int = 0
+ def char: Char =
+ if (offset >= buffer.length) endOfText else buffer charAt offset
+
+ final def nextChar() {
+ offset += 1
+ }
+
+ final def check(chars: String): Boolean = {
+ val poff = offset
+ val ok = jump(chars)
+ offset = poff
+ ok
+ }
+
+ def checkSkipInitWhitespace(c: Char): Boolean = {
+ val poff = offset
+ jumpWhitespace()
+ val ok = jump(c)
+ offset = poff
+ ok
+ }
+
+ def checkSkipInitWhitespace(chars: String): Boolean = {
+ val poff = offset
+ jumpWhitespace()
+ val (ok0, chars0) =
+ if (chars.charAt(0) == ' ')
+ (offset > poff, chars substring 1)
+ else
+ (true, chars)
+ val ok = ok0 && jump(chars0)
+ offset = poff
+ ok
+ }
+
+ def countWhitespace: Int = {
+ var count = 0
+ val poff = offset
+ while (isWhitespace(char) && char != endOfText) {
+ nextChar()
+ count += 1
+ }
+ offset = poff
+ count
+ }
+
+ /* JUMPERS */
+
+ /** jumps a character and consumes it
+ * @return true only if the correct character has been jumped */
+ final def jump(ch: Char): Boolean = {
+ if (char == ch) {
+ nextChar()
+ true
+ }
+ else false
+ }
+
+ /** jumps all the characters in chars, consuming them in the process.
+ * @return true only if the correct characters have been jumped */
+ final def jump(chars: String): Boolean = {
+ var index = 0
+ while (index < chars.length && char == chars.charAt(index) && char != endOfText) {
+ nextChar()
+ index += 1
+ }
+ index == chars.length
+ }
+
+ final def repeatJump(c: Char, max: Int = Int.MaxValue): Int = {
+ var count = 0
+ while (jump(c) && count < max)
+ count += 1
+ count
+ }
+
+ final def jumpUntil(ch: Char): Int = {
+ var count = 0
+ while (char != ch && char != endOfText) {
+ nextChar()
+ count += 1
+ }
+ count
+ }
+
+ final def jumpUntil(chars: String): Int = {
+ assert(chars.length > 0)
+ var count = 0
+ val c = chars.charAt(0)
+ while (!check(chars) && char != endOfText) {
+ nextChar()
+ while (char != c && char != endOfText) {
+ nextChar()
+ count += 1
+ }
+ }
+ count
+ }
+
+ final def jumpUntil(pred: => Boolean): Int = {
+ var count = 0
+ while (!pred && char != endOfText) {
+ nextChar()
+ count += 1
+ }
+ count
+ }
+
+ def jumpWhitespace() = jumpUntil(!isWhitespace(char))
+
+ /* READERS */
+
+ final def readUntil(c: Char): String = {
+ withRead {
+ while (char != c && char != endOfText) {
+ nextChar()
+ }
+ }
+ }
+
+ final def readUntil(chars: String): String = {
+ assert(chars.length > 0)
+ withRead {
+ val c = chars.charAt(0)
+ while (!check(chars) && char != endOfText) {
+ nextChar()
+ while (char != c && char != endOfText)
+ nextChar()
+ }
+ }
+ }
+
+ final def readUntil(pred: => Boolean): String = {
+ withRead {
+ while (char != endOfText && !pred) {
+ nextChar()
+ }
+ }
+ }
+
+ private def withRead(read: => Unit): String = {
+ val start = offset
+ read
+ buffer.substring(start, offset)
+ }
+
+
+ /* CHARS CLASSES */
+
+ def isWhitespace(c: Char) = c == ' ' || c == '\t'
+
+ }
+
+}
diff --git a/src/compiler/scala/tools/nsc/doc/base/LinkTo.scala b/src/compiler/scala/tools/nsc/doc/base/LinkTo.scala
new file mode 100755
index 0000000..c111798
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/doc/base/LinkTo.scala
@@ -0,0 +1,15 @@
+/* NSC -- new Scala compiler
+ * Copyright 2007-2013 LAMP/EPFL
+ */
+
+package scala.tools.nsc
+package doc
+package base
+
+import scala.collection._
+
+sealed trait LinkTo
+final case class LinkToMember[Mbr, Tpl](mbr: Mbr, tpl: Tpl) extends LinkTo
+final case class LinkToTpl[Tpl](tpl: Tpl) extends LinkTo
+final case class LinkToExternal(name: String, url: String) extends LinkTo
+final case class Tooltip(name: String) extends LinkTo
diff --git a/src/compiler/scala/tools/nsc/doc/base/MemberLookupBase.scala b/src/compiler/scala/tools/nsc/doc/base/MemberLookupBase.scala
new file mode 100755
index 0000000..cdcfeaa
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/doc/base/MemberLookupBase.scala
@@ -0,0 +1,206 @@
+package scala.tools.nsc
+package doc
+package base
+
+import comment._
+
+/** This trait extracts all required information for documentation from compilation units.
+ * The base trait has been extracted to allow getting light-weight documentation
+ * for a particular symbol in the IDE.*/
+trait MemberLookupBase {
+
+ val global: Global
+ import global._
+
+ def internalLink(sym: Symbol, site: Symbol): Option[LinkTo]
+ def chooseLink(links: List[LinkTo]): LinkTo
+ def toString(link: LinkTo): String
+ def findExternalLink(sym: Symbol, name: String): Option[LinkToExternal]
+ def warnNoLink: Boolean
+
+ import global._
+ import rootMirror.{RootPackage, EmptyPackage}
+
+ private def isRoot(s: Symbol) = s.isRootSymbol || s.isEmptyPackage || s.isEmptyPackageClass
+
+ def makeEntityLink(title: Inline, pos: Position, query: String, siteOpt: Option[Symbol]) =
+ new EntityLink(title) { lazy val link = memberLookup(pos, query, siteOpt) }
+
+ private var showExplanation = true
+ private def explanation: String =
+ if (showExplanation) {
+ showExplanation = false
+ """
+ |Quick crash course on using Scaladoc links
+ |==========================================
+ |Disambiguating terms and types: Prefix terms with '$' and types with '!' in case both names are in use:
+ | - [[scala.collection.immutable.List!.apply class List's apply method]] and
+ | - [[scala.collection.immutable.List$.apply object List's apply method]]
+ |Disambiguating overloaded members: If a term is overloaded, you can indicate the first part of its signature followed by *:
+ | - [[[scala.collection.immutable.List$.fill[A](Int)(⇒A):List[A]* Fill with a single parameter]]]
+ | - [[[scala.collection.immutable.List$.fill[A](Int,Int)(⇒A):List[List[A]]* Fill with a two parameters]]]
+ |Notes:
+ | - you can use any number of matching square brackets to avoid interference with the signature
+ | - you can use \\. to escape dots in prefixes (don't forget to use * at the end to match the signature!)
+ | - you can use \\# to escape hashes, otherwise they will be considered as delimiters, like dots.""".stripMargin
+ } else ""
+
+ def memberLookup(pos: Position, query: String, siteOpt: Option[Symbol]): LinkTo = {
+ var members = breakMembers(query)
+
+ // (1) First look in the root package, as most of the links are qualified
+ val fromRoot = lookupInRootPackage(pos, members)
+
+ // (2) Or recursively go into each containing template.
+ val fromParents = siteOpt.fold(Stream.empty[Symbol]) { s =>
+ Stream.iterate(s)(_.owner)
+ }.takeWhile (!isRoot(_)).map {
+ lookupInTemplate(pos, members, _)
+ }
+
+ val syms = (fromRoot +: fromParents) find (!_.isEmpty) getOrElse Nil
+
+ val links = syms flatMap { case (sym, site) => internalLink(sym, site) } match {
+ case Nil =>
+ // (3) Look at external links
+ syms.flatMap { case (sym, owner) =>
+ // reconstruct the original link
+ def linkName(sym: Symbol) = {
+ def nameString(s: Symbol) = s.nameString + (if ((s.isModule || s.isModuleClass) && !s.isPackage) "$" else "")
+ val packageSuffix = if (sym.isPackage) ".package" else ""
+
+ sym.ownerChain.reverse.filterNot(isRoot(_)).map(nameString(_)).mkString(".") + packageSuffix
+ }
+
+ if (sym.isClass || sym.isModule || sym.isTrait || sym.isPackage)
+ findExternalLink(sym, linkName(sym))
+ else if (owner.isClass || owner.isModule || owner.isTrait || owner.isPackage)
+ findExternalLink(sym, linkName(owner) + "@" + externalSignature(sym))
+ else
+ None
+ }
+ case links => links
+ }
+ links match {
+ case Nil =>
+ if (warnNoLink)
+ reporter.warning(pos, "Could not find any member to link for \"" + query + "\".")
+ // (4) if we still haven't found anything, create a tooltip
+ Tooltip(query)
+ case List(l) => l
+ case links =>
+ val chosen = chooseLink(links)
+ def linkToString(link: LinkTo) = {
+ val chosenInfo =
+ if (link == chosen) " [chosen]" else ""
+ toString(link) + chosenInfo + "\n"
+ }
+ if (warnNoLink) {
+ val allLinks = links.map(linkToString).mkString
+ reporter.warning(pos,
+ s"""The link target \"$query\" is ambiguous. Several members fit the target:
+ |$allLinks
+ |$explanation""".stripMargin)
+ }
+ chosen
+ }
+ }
+
+ private sealed trait SearchStrategy
+ private case object BothTypeAndTerm extends SearchStrategy
+ private case object OnlyType extends SearchStrategy
+ private case object OnlyTerm extends SearchStrategy
+
+ private def lookupInRootPackage(pos: Position, members: List[String]) =
+ lookupInTemplate(pos, members, EmptyPackage) ::: lookupInTemplate(pos, members, RootPackage)
+
+ private def lookupInTemplate(pos: Position, members: List[String], container: Symbol): List[(Symbol, Symbol)] = {
+ // Maintaining compatibility with previous links is a bit tricky here:
+ // we have a preference for term names for all terms except for the last, where we prefer a class:
+ // How to do this:
+ // - at each step we do a DFS search with the prefered strategy
+ // - if the search doesn't return any members, we backtrack on the last decision
+ // * we look for terms with the last member's name
+ // * we look for types with the same name, all the way up
+ val result = members match {
+ case Nil => Nil
+ case mbrName::Nil =>
+ var syms = lookupInTemplate(pos, mbrName, container, OnlyType) map ((_, container))
+ if (syms.isEmpty)
+ syms = lookupInTemplate(pos, mbrName, container, OnlyTerm) map ((_, container))
+ syms
+
+ case tplName::rest =>
+ def completeSearch(syms: List[Symbol]) =
+ syms flatMap (lookupInTemplate(pos, rest, _))
+
+ completeSearch(lookupInTemplate(pos, tplName, container, OnlyTerm)) match {
+ case Nil => completeSearch(lookupInTemplate(pos, tplName, container, OnlyType))
+ case syms => syms
+ }
+ }
+ //println("lookupInTemplate(" + members + ", " + container + ") => " + result)
+ result
+ }
+
+ private def lookupInTemplate(pos: Position, member: String, container: Symbol, strategy: SearchStrategy): List[Symbol] = {
+ val name = member.stripSuffix("$").stripSuffix("!").stripSuffix("*")
+ def signatureMatch(sym: Symbol): Boolean = externalSignature(sym).startsWith(name)
+
+ // We need to cleanup the bogus classes created by the .class file parser. For example, [[scala.Predef]] resolves
+ // to (bogus) class scala.Predef loaded by the class loader -- which we need to eliminate by looking at the info
+ // and removing NoType classes
+ def cleanupBogusClasses(syms: List[Symbol]) = { syms.filter(_.info != NoType) }
+
+ def syms(name: Name) = container.info.nonPrivateMember(name.encodedName).alternatives
+ def termSyms = cleanupBogusClasses(syms(newTermName(name)))
+ def typeSyms = cleanupBogusClasses(syms(newTypeName(name)))
+
+ val result = if (member.endsWith("$"))
+ termSyms
+ else if (member.endsWith("!"))
+ typeSyms
+ else if (member.endsWith("*"))
+ cleanupBogusClasses(container.info.nonPrivateDecls) filter signatureMatch
+ else
+ strategy match {
+ case BothTypeAndTerm => termSyms ::: typeSyms
+ case OnlyType => typeSyms
+ case OnlyTerm => termSyms
+ }
+
+ //println("lookupInTemplate(" + member + ", " + container + ") => " + result)
+ result
+ }
+
+ private def breakMembers(query: String): List[String] = {
+ // Okay, how does this work? Well: you split on . but you don't want to split on \. => thus the ugly regex
+ // query.split((?<=[^\\\\])\\.).map(_.replaceAll("\\."))
+ // The same code, just faster:
+ var members = List[String]()
+ var index = 0
+ var last_index = 0
+ val length = query.length
+ while (index < length) {
+ if ((query.charAt(index) == '.' || query.charAt(index) == '#') &&
+ ((index == 0) || (query.charAt(index-1) != '\\'))) {
+
+ val member = query.substring(last_index, index).replaceAll("\\\\([#\\.])", "$1")
+ // we want to allow javadoc-style links [[#member]] -- which requires us to remove empty members from the first
+ // elemnt in the list
+ if ((member != "") || (!members.isEmpty))
+ members ::= member
+ last_index = index + 1
+ }
+ index += 1
+ }
+ if (last_index < length)
+ members ::= query.substring(last_index, length).replaceAll("\\\\\\.", ".")
+ members.reverse
+ }
+
+ def externalSignature(sym: Symbol) = {
+ sym.info // force it, otherwise we see lazy types
+ (sym.nameString + sym.signatureString).replaceAll("\\s", "")
+ }
+}
diff --git a/src/compiler/scala/tools/nsc/doc/base/comment/Body.scala b/src/compiler/scala/tools/nsc/doc/base/comment/Body.scala
new file mode 100755
index 0000000..eb0d751
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/doc/base/comment/Body.scala
@@ -0,0 +1,95 @@
+/* NSC -- new Scala compiler
+ * Copyright 2007-2013 LAMP/EPFL
+ * @author Manohar Jonnalagedda
+ */
+
+package scala.tools.nsc
+package doc
+package base
+package comment
+
+import scala.collection._
+
+import java.net.URL
+
+/** A body of text. A comment has a single body, which is composed of
+ * at least one block. Inside every body is exactly one summary (see
+ * [[scala.tools.nsc.doc.model.comment.Summary]]). */
+final case class Body(blocks: Seq[Block]) {
+
+ /** The summary text of the comment body. */
+ lazy val summary: Option[Inline] = {
+ def summaryInBlock(block: Block): Seq[Inline] = block match {
+ case Title(text, _) => summaryInInline(text)
+ case Paragraph(text) => summaryInInline(text)
+ case UnorderedList(items) => items flatMap summaryInBlock
+ case OrderedList(items, _) => items flatMap summaryInBlock
+ case DefinitionList(items) => items.values.toSeq flatMap summaryInBlock
+ case _ => Nil
+ }
+ def summaryInInline(text: Inline): Seq[Inline] = text match {
+ case Summary(text) => List(text)
+ case Chain(items) => items flatMap summaryInInline
+ case Italic(text) => summaryInInline(text)
+ case Bold(text) => summaryInInline(text)
+ case Underline(text) => summaryInInline(text)
+ case Superscript(text) => summaryInInline(text)
+ case Subscript(text) => summaryInInline(text)
+ case Link(_, title) => summaryInInline(title)
+ case _ => Nil
+ }
+ (blocks flatMap { summaryInBlock(_) }).toList match {
+ case Nil => None
+ case inline :: Nil => Some(inline)
+ case inlines => Some(Chain(inlines))
+ }
+ }
+}
+
+/** A block-level element of text, such as a paragraph or code block. */
+sealed abstract class Block
+
+final case class Title(text: Inline, level: Int) extends Block
+final case class Paragraph(text: Inline) extends Block
+final case class Code(data: String) extends Block
+final case class UnorderedList(items: Seq[Block]) extends Block
+final case class OrderedList(items: Seq[Block], style: String) extends Block
+final case class DefinitionList(items: SortedMap[Inline, Block]) extends Block
+final case class HorizontalRule() extends Block
+
+/** An section of text inside a block, possibly with formatting. */
+sealed abstract class Inline
+
+final case class Chain(items: Seq[Inline]) extends Inline
+final case class Italic(text: Inline) extends Inline
+final case class Bold(text: Inline) extends Inline
+final case class Underline(text: Inline) extends Inline
+final case class Superscript(text: Inline) extends Inline
+final case class Subscript(text: Inline) extends Inline
+final case class Link(target: String, title: Inline) extends Inline
+final case class Monospace(text: Inline) extends Inline
+final case class Text(text: String) extends Inline
+abstract class EntityLink(val title: Inline) extends Inline { def link: LinkTo }
+object EntityLink {
+ def apply(title: Inline, linkTo: LinkTo) = new EntityLink(title) { def link: LinkTo = linkTo }
+ def unapply(el: EntityLink): Option[(Inline, LinkTo)] = Some((el.title, el.link))
+}
+final case class HtmlTag(data: String) extends Inline {
+ private val Pattern = """(?ms)\A<(/?)(.*?)[\s>].*\z""".r
+ private val (isEnd, tagName) = data match {
+ case Pattern(s1, s2) =>
+ (! s1.isEmpty, Some(s2.toLowerCase))
+ case _ =>
+ (false, None)
+ }
+
+ def canClose(open: HtmlTag) = {
+ isEnd && tagName == open.tagName
+ }
+
+ private val TagsNotToClose = Set("br", "img")
+ def close = tagName collect { case name if !TagsNotToClose(name) => HtmlTag(s"</$name>") }
+}
+
+/** The summary of a comment, usually its first sentence. There must be exactly one summary per body. */
+final case class Summary(text: Inline) extends Inline
diff --git a/src/compiler/scala/tools/nsc/doc/base/comment/Comment.scala b/src/compiler/scala/tools/nsc/doc/base/comment/Comment.scala
new file mode 100644
index 0000000..2b28164
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/doc/base/comment/Comment.scala
@@ -0,0 +1,134 @@
+/* NSC -- new Scala compiler
+ * Copyright 2007-2013 LAMP/EPFL
+ * @author Manohar Jonnalagedda
+ */
+
+package scala.tools.nsc
+package doc
+package base
+package comment
+
+import scala.collection._
+
+/** A Scaladoc comment and all its tags.
+ *
+ * '''Note:''' the only instantiation site of this class is in [[CommentFactory]].
+ *
+ * @author Manohar Jonnalagedda
+ * @author Gilles Dubochet */
+abstract class Comment {
+
+ /** The main body of the comment that describes what the entity does and is. */
+ def body: Body
+
+ private def closeHtmlTags(inline: Inline) = {
+ val stack = mutable.ListBuffer.empty[HtmlTag]
+ def scan(i: Inline) {
+ i match {
+ case Chain(list) =>
+ list foreach scan
+ case tag: HtmlTag => {
+ if (stack.length > 0 && tag.canClose(stack.last)) {
+ stack.remove(stack.length-1)
+ } else {
+ tag.close match {
+ case Some(t) =>
+ stack += t
+ case None =>
+ ;
+ }
+ }
+ }
+ case _ =>
+ ;
+ }
+ }
+ scan(inline)
+ Chain(List(inline) ++ stack.reverse)
+ }
+
+ /** A shorter version of the body. Usually, this is the first sentence of the body. */
+ def short: Inline = {
+ body.summary match {
+ case Some(s) =>
+ closeHtmlTags(s)
+ case _ =>
+ Text("")
+ }
+ }
+
+ /** A list of authors. The empty list is used when no author is defined. */
+ def authors: List[Body]
+
+ /** A list of other resources to see, including links to other entities or
+ * to external documentation. The empty list is used when no other resource
+ * is mentionned. */
+ def see: List[Body]
+
+ /** A description of the result of the entity. Typically, this provides additional
+ * information on the domain of the result, contractual post-conditions, etc. */
+ def result: Option[Body]
+
+ /** A map of exceptions that the entity can throw when accessed, and a
+ * description of what they mean. */
+ def throws: Map[String, Body]
+
+ /** A map of value parameters, and a description of what they are. Typically,
+ * this provides additional information on the domain of the parameters,
+ * contractual pre-conditions, etc. */
+ def valueParams: Map[String, Body]
+
+ /** A map of type parameters, and a description of what they are. Typically,
+ * this provides additional information on the domain of the parameters. */
+ def typeParams: Map[String, Body]
+
+ /** The version number of the entity. There is no formatting or further
+ * meaning attached to this value. */
+ def version: Option[Body]
+
+ /** A version number of a containing entity where this member-entity was introduced. */
+ def since: Option[Body]
+
+ /** An annotation as to expected changes on this entity. */
+ def todo: List[Body]
+
+ /** Whether the entity is deprecated. Using the `@deprecated` Scala attribute
+ * is prefereable to using this Scaladoc tag. */
+ def deprecated: Option[Body]
+
+ /** An additional note concerning the contract of the entity. */
+ def note: List[Body]
+
+ /** A usage example related to the entity. */
+ def example: List[Body]
+
+ /** The comment as it appears in the source text. */
+ def source: Option[String]
+
+ /** A description for the primary constructor */
+ def constructor: Option[Body]
+
+ /** A set of diagram directives for the inheritance diagram */
+ def inheritDiagram: List[String]
+
+ /** A set of diagram directives for the content diagram */
+ def contentDiagram: List[String]
+
+ /** The group this member is part of */
+ def group: Option[String]
+
+ /** Member group descriptions */
+ def groupDesc: Map[String,Body]
+
+ /** Member group names (overriding the short tag) */
+ def groupNames: Map[String,String]
+
+ /** Member group priorities */
+ def groupPrio: Map[String,Int]
+
+ override def toString =
+ body.toString + "\n" +
+ (authors map ("@author " + _.toString)).mkString("\n") +
+ (result map ("@return " + _.toString)).mkString("\n") +
+ (version map ("@version " + _.toString)).mkString
+}
diff --git a/src/compiler/scala/tools/nsc/doc/html/Doclet.scala b/src/compiler/scala/tools/nsc/doc/html/Doclet.scala
index 5b722b5..3aa3e87 100644
--- a/src/compiler/scala/tools/nsc/doc/html/Doclet.scala
+++ b/src/compiler/scala/tools/nsc/doc/html/Doclet.scala
@@ -1,5 +1,5 @@
/* NSC -- new Scala compiler
- * Copyright 2007-2011 LAMP/EPFL
+ * Copyright 2007-2013 LAMP/EPFL
* @author David Bernard, Manohar Jonnalagedda
*/
diff --git a/src/compiler/scala/tools/nsc/doc/html/HtmlFactory.scala b/src/compiler/scala/tools/nsc/doc/html/HtmlFactory.scala
index c21507e..4630c3d 100644
--- a/src/compiler/scala/tools/nsc/doc/html/HtmlFactory.scala
+++ b/src/compiler/scala/tools/nsc/doc/html/HtmlFactory.scala
@@ -1,5 +1,5 @@
/* NSC -- new Scala compiler
- * Copyright 2007-2011 LAMP/EPFL
+ * Copyright 2007-2013 LAMP/EPFL
* @author David Bernard, Manohar Jonnalagedda
*/
@@ -11,6 +11,9 @@ import model._
import java.io.{ File => JFile }
import io.{ Streamable, Directory }
import scala.collection._
+import page.diagram._
+
+import html.page.diagram.DiagramGenerator
/** A class that can generate Scaladoc sites to some fixed root folder.
* @author David Bernard
@@ -29,26 +32,37 @@ class HtmlFactory(val universe: doc.Universe, index: doc.Index) {
"jquery.js",
"jquery.layout.js",
"scheduler.js",
+ "diagrams.js",
"template.js",
"tools.tooltip.js",
+ "modernizr.custom.js",
"index.css",
"ref-index.css",
"template.css",
+ "diagrams.css",
"class.png",
"class_big.png",
+ "class_diagram.png",
"object.png",
"object_big.png",
+ "object_diagram.png",
"package.png",
"package_big.png",
"trait.png",
"trait_big.png",
+ "trait_diagram.png",
+ "type.png",
+ "type_big.png",
+ "type_diagram.png",
"class_to_object_big.png",
"object_to_class_big.png",
- "object_to_trait_big.png",
"trait_to_object_big.png",
+ "object_to_trait_big.png",
+ "type_to_object_big.png",
+ "object_to_type_big.png",
"arrow-down.png",
"arrow-right.png",
@@ -71,6 +85,7 @@ class HtmlFactory(val universe: doc.Universe, index: doc.Index) {
"signaturebg.gif",
"signaturebg2.gif",
"typebg.gif",
+ "conversionbg.gif",
"valuemembersbg.gif",
"navigation-li-a.png",
@@ -80,9 +95,9 @@ class HtmlFactory(val universe: doc.Universe, index: doc.Index) {
"selected.png",
"selected2-right.png",
"selected2.png",
- "unselected.png",
-
- "rootdoc.txt"
+ "selected-right-implicits.png",
+ "selected-implicits.png",
+ "unselected.png"
)
/** Generates the Scaladoc site for a model into the site root.
@@ -104,6 +119,8 @@ class HtmlFactory(val universe: doc.Universe, index: doc.Index) {
finally out.close()
}
+ DiagramGenerator.initialize(universe.settings)
+
libResources foreach (s => copyResource("lib/" + s))
new page.Index(universe, index) writeFor this
@@ -114,16 +131,19 @@ class HtmlFactory(val universe: doc.Universe, index: doc.Index) {
for (letter <- index.firstLetterIndex) {
new html.page.ReferenceIndex(letter._1, index, universe) writeFor this
}
+
+ DiagramGenerator.cleanup()
}
def writeTemplates(writeForThis: HtmlPage => Unit) {
val written = mutable.HashSet.empty[DocTemplateEntity]
+ val diagramGenerator: DiagramGenerator = new DotDiagramGenerator(universe.settings)
def writeTemplate(tpl: DocTemplateEntity) {
if (!(written contains tpl)) {
- writeForThis(new page.Template(tpl))
+ writeForThis(new page.Template(universe, diagramGenerator, tpl))
written += tpl
- tpl.templates map writeTemplate
+ tpl.templates collect { case d: DocTemplateEntity => d } map writeTemplate
}
}
diff --git a/src/compiler/scala/tools/nsc/doc/html/HtmlPage.scala b/src/compiler/scala/tools/nsc/doc/html/HtmlPage.scala
index 44a1728..69da322 100644
--- a/src/compiler/scala/tools/nsc/doc/html/HtmlPage.scala
+++ b/src/compiler/scala/tools/nsc/doc/html/HtmlPage.scala
@@ -1,5 +1,5 @@
/* NSC -- new Scala compiler
- * Copyright 2007-2011 LAMP/EPFL
+ * Copyright 2007-2013 LAMP/EPFL
* @author David Bernard, Manohar Jonnalagedda
*/
@@ -7,14 +7,14 @@ package scala.tools.nsc
package doc
package html
+import base._
+import base.comment._
import model._
-import comment._
-import xml.{XML, NodeSeq}
-import xml.dtd.{DocType, PublicID}
+import scala.xml.{XML, NodeSeq}
+import scala.xml.dtd.{DocType, PublicID}
import scala.collection._
-import scala.reflect.NameTransformer
-import java.nio.channels.Channels
+import java.io.Writer
/** An html page that is part of a Scaladoc site.
* @author David Bernard
@@ -23,6 +23,16 @@ abstract class HtmlPage extends Page { thisPage =>
/** The title of this page. */
protected def title: String
+ /** The page description */
+ protected def description: String =
+ // unless overwritten, will display the title in a spaced format, keeping - and .
+ title.replaceAll("[^a-zA-Z0-9\\.\\-]+", " ").replaceAll("\\-+", " - ").replaceAll(" +", " ")
+
+ /** The page keywords */
+ protected def keywords: String =
+ // unless overwritten, same as description, minus the " - "
+ description.replaceAll(" - ", " ")
+
/** Additional header elements (links, scripts, meta tags, etc.) required for this page. */
protected def headers: NodeSeq
@@ -36,22 +46,26 @@ abstract class HtmlPage extends Page { thisPage =>
<html>
<head>
<title>{ title }</title>
+ <meta name="description" content={ description }/>
+ <meta name="keywords" content={ keywords }/>
<meta http-equiv="content-type" content={ "text/html; charset=" + site.encoding }/>
{ headers }
</head>
{ body }
</html>
- val fos = createFileOutputStream(site)
- val w = Channels.newWriter(fos.getChannel, site.encoding)
- try {
+
+ writeFile(site) { (w: Writer) =>
w.write("<?xml version='1.0' encoding='" + site.encoding + "'?>\n")
w.write(doctype.toString + "\n")
w.write(xml.Xhtml.toXhtml(html))
}
- finally {
- w.close()
- fos.close()
- }
+
+ if (site.universe.settings.docRawOutput.value)
+ writeFile(site, ".raw") {
+ // we're only interested in the body, as this will go into the diff
+ _.write(body.text)
+ }
+
//XML.save(pageFile.getPath, html, site.encoding, xmlDecl = false, doctype = doctype)
}
@@ -74,7 +88,7 @@ abstract class HtmlPage extends Page { thisPage =>
case Title(in, _) => <h6>{ inlineToHtml(in) }</h6>
case Paragraph(in) => <p>{ inlineToHtml(in) }</p>
case Code(data) =>
- <pre>{ SyntaxHigh(data) }</pre> //<pre>{ xml.Text(data) }</pre>
+ <pre>{ SyntaxHigh(data) }</pre> //<pre>{ scala.xml.Text(data) }</pre>
case UnorderedList(items) =>
<ul>{ listItemsToHtml(items) }</ul>
case OrderedList(items, listStyle) =>
@@ -104,12 +118,40 @@ abstract class HtmlPage extends Page { thisPage =>
case Underline(in) => <u>{ inlineToHtml(in) }</u>
case Superscript(in) => <sup>{ inlineToHtml(in) }</sup>
case Subscript(in) => <sub>{ inlineToHtml(in) }</sub>
- case Link(raw, title) => <a href={ raw }>{ inlineToHtml(title) }</a>
- case EntityLink(entity) => templateToHtml(entity)
+ case Link(raw, title) => <a href={ raw } target="_blank">{ inlineToHtml(title) }</a>
case Monospace(in) => <code>{ inlineToHtml(in) }</code>
- case Text(text) => xml.Text(text)
+ case Text(text) => scala.xml.Text(text)
case Summary(in) => inlineToHtml(in)
- case HtmlTag(tag) => xml.Unparsed(tag)
+ case HtmlTag(tag) => scala.xml.Unparsed(tag)
+ case EntityLink(target, link) => linkToHtml(target, link, true)
+ }
+
+ def linkToHtml(text: Inline, link: LinkTo, hasLinks: Boolean) = link match {
+ case LinkToTpl(dtpl: TemplateEntity) =>
+ if (hasLinks)
+ <a href={ relativeLinkTo(dtpl) } class="extype" name={ dtpl.qualifiedName }>{ inlineToHtml(text) }</a>
+ else
+ <span class="extype" name={ dtpl.qualifiedName }>{ inlineToHtml(text) }</span>
+ case LinkToMember(mbr: MemberEntity, inTpl: TemplateEntity) =>
+ if (hasLinks)
+ <a href={ relativeLinkTo(inTpl) + "#" + mbr.signature } class="extmbr" name={ mbr.qualifiedName }>{ inlineToHtml(text) }</a>
+ else
+ <span class="extmbr" name={ mbr.qualifiedName }>{ inlineToHtml(text) }</span>
+ case Tooltip(tooltip) =>
+ <span class="extype" name={ tooltip }>{ inlineToHtml(text) }</span>
+ case LinkToExternal(name, url) =>
+ <a href={ url } class="extype" target="_top">{ inlineToHtml(text) }</a>
+ case _ =>
+ inlineToHtml(text)
+ }
+
+ def typeToHtml(tpes: List[model.TypeEntity], hasLinks: Boolean): NodeSeq = tpes match {
+ case Nil =>
+ NodeSeq.Empty
+ case List(tpe) =>
+ typeToHtml(tpe, hasLinks)
+ case tpe :: rest =>
+ typeToHtml(tpe, hasLinks) ++ scala.xml.Text(" with ") ++ typeToHtml(rest, hasLinks)
}
def typeToHtml(tpe: model.TypeEntity, hasLinks: Boolean): NodeSeq = {
@@ -118,28 +160,22 @@ abstract class HtmlPage extends Page { thisPage =>
if (starts.isEmpty && (inPos == string.length))
NodeSeq.Empty
else if (starts.isEmpty)
- xml.Text(string.slice(inPos, string.length))
+ scala.xml.Text(string.slice(inPos, string.length))
else if (inPos == starts.head)
toLinksIn(inPos, starts)
else {
- xml.Text(string.slice(inPos, starts.head)) ++ toLinksIn(starts.head, starts)
+ scala.xml.Text(string.slice(inPos, starts.head)) ++ toLinksIn(starts.head, starts)
}
}
def toLinksIn(inPos: Int, starts: List[Int]): NodeSeq = {
- val (tpl, width) = tpe.refEntity(inPos)
- (tpl match {
- case dtpl:DocTemplateEntity if hasLinks =>
- <a href={ relativeLinkTo(dtpl) } class="extype" name={ dtpl.qualifiedName }>{
- string.slice(inPos, inPos + width)
- }</a>
- case tpl =>
- <span class="extype" name={ tpl.qualifiedName }>{ string.slice(inPos, inPos + width) }</span>
- }) ++ toLinksOut(inPos + width, starts.tail)
+ val (link, width) = tpe.refEntity(inPos)
+ val text = comment.Text(string.slice(inPos, inPos + width))
+ linkToHtml(text, link, hasLinks) ++ toLinksOut(inPos + width, starts.tail)
}
if (hasLinks)
toLinksOut(0, tpe.refEntity.keySet.toList)
else
- xml.Text(string)
+ scala.xml.Text(string)
}
def typesToHtml(tpess: List[model.TypeEntity], hasLinks: Boolean, sep: NodeSeq): NodeSeq = tpess match {
@@ -153,15 +189,15 @@ abstract class HtmlPage extends Page { thisPage =>
}
/** Returns the HTML code that represents the template in `tpl` as a hyperlinked name. */
- def templateToHtml(tpl: TemplateEntity) = tpl match {
+ def templateToHtml(tpl: TemplateEntity, name: String = null) = tpl match {
case dTpl: DocTemplateEntity =>
if (hasPage(dTpl)) {
- <a href={ relativeLinkTo(dTpl) } class="extype" name={ dTpl.qualifiedName }>{ dTpl.name }</a>
+ <a href={ relativeLinkTo(dTpl) } class="extype" name={ dTpl.qualifiedName }>{ if (name eq null) dTpl.name else name }</a>
} else {
- xml.Text(dTpl.name)
+ scala.xml.Text(if (name eq null) dTpl.name else name)
}
case ndTpl: NoDocTemplate =>
- xml.Text(ndTpl.name)
+ scala.xml.Text(if (name eq null) ndTpl.name else name)
}
/** Returns the HTML code that represents the templates in `tpls` as a list of hyperlinked names. */
@@ -177,10 +213,12 @@ abstract class HtmlPage extends Page { thisPage =>
else if (ety.isTrait) "trait_big.png"
else if (ety.isClass && !ety.companion.isEmpty && ety.companion.get.visibility.isPublic && ety.companion.get.inSource != None) "class_to_object_big.png"
else if (ety.isClass) "class_big.png"
+ else if ((ety.isAbstractType || ety.isAliasType) && !ety.companion.isEmpty && ety.companion.get.visibility.isPublic && ety.companion.get.inSource != None) "type_to_object_big.png"
+ else if ((ety.isAbstractType || ety.isAliasType)) "type_big.png"
else if (ety.isObject && !ety.companion.isEmpty && ety.companion.get.visibility.isPublic && ety.companion.get.inSource != None && ety.companion.get.isClass) "object_to_class_big.png"
else if (ety.isObject && !ety.companion.isEmpty && ety.companion.get.visibility.isPublic && ety.companion.get.inSource != None && ety.companion.get.isTrait) "object_to_trait_big.png"
+ else if (ety.isObject && !ety.companion.isEmpty && ety.companion.get.visibility.isPublic && ety.companion.get.inSource != None && (ety.companion.get.isAbstractType || ety.companion.get.isAliasType)) "object_to_trait_big.png"
else if (ety.isObject) "object_big.png"
else if (ety.isPackage) "package_big.png"
- else "class_big.png" // FIXME: an entity *should* fall into one of the above categories, but AnyRef is somehow not
-
+ else "class_big.png" // FIXME: an entity *should* fall into one of the above categories, but AnyRef is somehow not
}
diff --git a/src/compiler/scala/tools/nsc/doc/html/Page.scala b/src/compiler/scala/tools/nsc/doc/html/Page.scala
index c5bf3e0..62166f7 100644
--- a/src/compiler/scala/tools/nsc/doc/html/Page.scala
+++ b/src/compiler/scala/tools/nsc/doc/html/Page.scala
@@ -1,5 +1,5 @@
/* NSC -- new Scala compiler
- * Copyright 2007-2011 LAMP/EPFL
+ * Copyright 2007-2013 LAMP/EPFL
* @author David Bernard, Manohar Jonnalagedda
*/
@@ -8,6 +8,8 @@ package scala.tools.nsc.doc.html
import scala.tools.nsc.doc.model._
import java.io.{FileOutputStream, File}
import scala.reflect.NameTransformer
+import java.nio.channels.Channels
+import java.io.Writer
abstract class Page {
thisPage =>
@@ -20,8 +22,8 @@ abstract class Page {
def absoluteLinkTo(path: List[String]) = path.reverse.mkString("/")
- def createFileOutputStream(site: HtmlFactory) = {
- val file = new File(site.siteRoot, absoluteLinkTo(thisPage.path))
+ def createFileOutputStream(site: HtmlFactory, suffix: String = "") = {
+ val file = new File(site.siteRoot, absoluteLinkTo(thisPage.path) + suffix)
val folder = file.getParentFile
if (! folder.exists) {
folder.mkdirs
@@ -29,22 +31,42 @@ abstract class Page {
new FileOutputStream(file.getPath)
}
+ def writeFile(site: HtmlFactory, suffix: String = "")(fn: Writer => Unit) = {
+ val fos = createFileOutputStream(site, suffix)
+ val w = Channels.newWriter(fos.getChannel, site.encoding)
+ try {
+ fn(w)
+ }
+ finally {
+ w.close()
+ fos.close()
+ }
+ }
+
/** Writes this page as a file. The file's location is relative to the
* generator's site root, and the encoding is also defined by the generator.
* @param generator The generator that is writing this page. */
def writeFor(site: HtmlFactory): Unit
- def docEntityKindToString(ety: DocTemplateEntity) =
- if (ety.isTrait) "trait"
- else if (ety.isCaseClass) "case class"
- else if (ety.isClass) "class"
- else if (ety.isObject) "object"
- else if (ety.isPackage) "package"
- else "class" // FIXME: an entity *should* fall into one of the above categories, but AnyRef is somehow not
+ def kindToString(mbr: MemberEntity) =
+ mbr match {
+ case c: Class => if (c.isCaseClass) "case class" else "class"
+ case _: Trait => "trait"
+ case _: Package => "package"
+ case _: Object => "object"
+ case _: AbstractType => "type"
+ case _: AliasType => "type"
+ case _: Constructor => "new"
+ case v: Def => "def"
+ case v: Val if (v.isLazyVal) => "lazy val"
+ case v: Val if (v.isVal) => "val"
+ case v: Val if (v.isVar) => "var"
+ case _ => sys.error("Cannot create kind for: " + mbr + " of class " + mbr.getClass)
+ }
def templateToPath(tpl: TemplateEntity): List[String] = {
def doName(tpl: TemplateEntity): String =
- NameTransformer.encode(tpl.name) + (if (tpl.isObject) "$" else "")
+ (if (tpl.inPackageObject) "package$$" else "") + NameTransformer.encode(tpl.name) + (if (tpl.isObject) "$" else "")
def downPacks(pack: Package): List[String] =
if (pack.isRootPackage) Nil else (doName(pack) :: downPacks(pack.inTemplate))
def downInner(nme: String, tpl: TemplateEntity): (String, Package) = {
@@ -83,18 +105,4 @@ abstract class Page {
}
relativize(thisPage.path.reverse, destPath.reverse).mkString("/")
}
-
- def isExcluded(dtpl: DocTemplateEntity) = {
- val qname = dtpl.qualifiedName
- ( ( qname.startsWith("scala.Tuple") || qname.startsWith("scala.Product") ||
- qname.startsWith("scala.Function") || qname.startsWith("scala.runtime.AbstractFunction")
- ) && !(
- qname == "scala.Tuple1" || qname == "scala.Tuple2" ||
- qname == "scala.Product" || qname == "scala.Product1" || qname == "scala.Product2" ||
- qname == "scala.Function" || qname == "scala.Function1" || qname == "scala.Function2" ||
- qname == "scala.runtime.AbstractFunction0" || qname == "scala.runtime.AbstractFunction1" ||
- qname == "scala.runtime.AbstractFunction2"
- )
- )
- }
}
diff --git a/src/compiler/scala/tools/nsc/doc/html/SyntaxHigh.scala b/src/compiler/scala/tools/nsc/doc/html/SyntaxHigh.scala
index f19f449..6fdaaed 100644
--- a/src/compiler/scala/tools/nsc/doc/html/SyntaxHigh.scala
+++ b/src/compiler/scala/tools/nsc/doc/html/SyntaxHigh.scala
@@ -1,11 +1,12 @@
/* NSC -- new Scala compiler
- * Copyright 2010-2011 LAMP/EPFL
+ * Copyright 2010-2013 LAMP/EPFL
* @author Stephane Micheloud
*/
package scala.tools.nsc.doc.html
-import xml.NodeSeq
+import scala.xml.NodeSeq
+import scala.annotation.tailrec
/** Highlight the syntax of Scala code appearing in a `{{{` wiki block
* (see method `HtmlPage.blockToHtml`).
@@ -40,14 +41,14 @@ private[html] object SyntaxHigh {
/** Standard library classes/objects, sorted alphabetically */
val standards = Array (
- "Any", "AnyRef", "AnyVal", "App", "Application", "Array",
- "Boolean", "Byte", "Char", "Class", "Console", "Double",
- "Enumeration", "Float", "Function", "Int",
+ "WeakTypeTag", "Any", "AnyRef", "AnyVal", "App", "Application", "Array",
+ "Boolean", "Byte", "Char", "Class", "ClassTag", "ClassManifest",
+ "Console", "Double", "Enumeration", "Float", "Function", "Int",
"List", "Long", "Manifest", "Map",
- "None", "Nothing", "Null", "Object", "Option",
+ "NoManifest", "None", "Nothing", "Null", "Object", "Option", "OptManifest",
"Pair", "Predef",
"Seq", "Set", "Short", "Some", "String", "Symbol",
- "Triple", "Unit")
+ "Triple", "TypeTag", "Unit")
def apply(data: String): NodeSeq = {
val buf = data.getBytes
@@ -209,9 +210,9 @@ private[html] object SyntaxHigh {
out.toString
}
- def parse(pre: String, i: Int): Int = {
+ @tailrec def parse(pre: String, i: Int): Unit = {
out append pre
- if (i == buf.length) return i
+ if (i == buf.length) return
buf(i) match {
case '\n' =>
parse("\n", i+1)
@@ -219,24 +220,24 @@ private[html] object SyntaxHigh {
parse(" ", i+1)
case '&' =>
parse("&", i+1)
- case '<' =>
+ case '<' if i+1 < buf.length =>
val ch = buf(i+1).toChar
if (ch == '-' || ch == ':' || ch == '%')
parse("<span class=\"kw\"><"+ch+"</span>", i+2)
else
parse("<", i+1)
case '>' =>
- if (buf(i+1) == ':')
+ if (i+1 < buf.length && buf(i+1) == ':')
parse("<span class=\"kw\">>:</span>", i+2)
else
parse(">", i+1)
case '=' =>
- if (buf(i+1) == '>')
+ if (i+1 < buf.length && buf(i+1) == '>')
parse("<span class=\"kw\">=></span>", i+2)
else
parse(buf(i).toChar.toString, i+1)
case '/' =>
- if (buf(i+1) == '/' || buf(i+1) == '*') {
+ if (i+1 < buf.length && (buf(i+1) == '/' || buf(i+1) == '*')) {
val c = comment(i+1)
parse("<span class=\"cmt\">"+c+"</span>", i+c.length)
} else
@@ -257,9 +258,9 @@ private[html] object SyntaxHigh {
else
parse(buf(i).toChar.toString, i+1)
case _ =>
- if (i == 0 || !Character.isJavaIdentifierPart(buf(i-1).toChar)) {
+ if (i == 0 || (i >= 1 && !Character.isJavaIdentifierPart(buf(i-1).toChar))) {
if (Character.isDigit(buf(i)) ||
- (buf(i) == '.' && Character.isDigit(buf(i+1)))) {
+ (buf(i) == '.' && i + 1 < buf.length && Character.isDigit(buf(i+1)))) {
val s = numlit(i)
parse("<span class=\"num\">"+s+"</span>", i+s.length)
} else {
@@ -277,10 +278,9 @@ private[html] object SyntaxHigh {
} else
parse(buf(i).toChar.toString, i+1)
}
- i
}
parse("", 0)
- xml.Unparsed(out.toString)
+ scala.xml.Unparsed(out.toString)
}
}
diff --git a/src/compiler/scala/tools/nsc/doc/html/page/Index.scala b/src/compiler/scala/tools/nsc/doc/html/page/Index.scala
index 0b5204e..8802d7c 100644
--- a/src/compiler/scala/tools/nsc/doc/html/page/Index.scala
+++ b/src/compiler/scala/tools/nsc/doc/html/page/Index.scala
@@ -1,5 +1,5 @@
/* NSC -- new Scala compiler
- * Copyright 2007-2011 LAMP/EPFL
+ * Copyright 2007-2013 LAMP/EPFL
* @author David Bernard, Manohar Jonnalagedda
*/
@@ -14,7 +14,7 @@ import scala.collection._
import scala.xml._
import scala.util.parsing.json.{JSONObject, JSONArray}
-class Index(universe: doc.Universe, index: doc.Index) extends HtmlPage {
+class Index(universe: doc.Universe, val index: doc.Index) extends HtmlPage {
def path = List("index.html")
@@ -24,17 +24,23 @@ class Index(universe: doc.Universe, index: doc.Index) extends HtmlPage {
( if (!s.docversion.isDefault) (" " + s.docversion.value) else "" )
}
- def headers =
+ val headers =
<xml:group>
<link href={ relativeLinkTo{List("index.css", "lib")} } media="screen" type="text/css" rel="stylesheet"/>
- <script type="text/javascript" src={ relativeLinkTo{List("jquery.js", "lib")} }></script>
- <script type="text/javascript" src={ relativeLinkTo{List("jquery-ui.js", "lib")} }></script>
- <script type="text/javascript" src={ relativeLinkTo{List("jquery.layout.js", "lib")} }></script>
- <script type="text/javascript" src={ relativeLinkTo{List("index.js", "lib")} }></script>
- <script type="text/javascript" src={ relativeLinkTo{List("scheduler.js", "lib")} }></script>
</xml:group>
- def body =
+ private val scripts = {
+ val sources =
+ (List("jquery.js", "jquery-ui.js", "jquery.layout.js", "scheduler.js", "index.js").map {
+ x => relativeLinkTo(List(x, "lib"))
+ }) :+ "index.js"
+
+ sources map {
+ src => <script defer="defer" type="text/javascript" src={src}></script>
+ }
+ }
+
+ val body =
<body>
<div id="library">
<img class='class icon' src={ relativeLinkTo{List("class.png", "lib")} }/>
@@ -44,14 +50,33 @@ class Index(universe: doc.Universe, index: doc.Index) extends HtmlPage {
</div>
{ browser }
<div id="content" class="ui-layout-center">
- <iframe name="template" src={ relativeLinkTo{List("package.html")} }/>
+ <iframe id="template" name="template" src={ relativeLinkTo{List("package.html")} }/>
</div>
+ { scripts }
</body>
+ def letters: NodeSeq =
+ '_' +: ('a' to 'z') map {
+ char => {
+ val label = if (char == '_') '#' else char.toUpper
+
+ index.firstLetterIndex.get(char) match {
+ case Some(_) =>
+ <a target="template" href={ "index/index-" + char + ".html" }>{
+ label
+ }</a>
+ case None => <span>{ label }</span>
+ }
+ }
+ }
+
def browser =
<div id="browser" class="ui-layout-west">
<div class="ui-west-center">
- <div id="filter"></div>
+ <div id="filter">
+ <div id="textfilter"></div>
+ <div id="letters">{ letters }</div>
+ </div>
<div class="pack" id="tpl">{
def packageElem(pack: model.Package): NodeSeq = {
<xml:group>
@@ -61,12 +86,14 @@ class Index(universe: doc.Universe, index: doc.Index) extends HtmlPage {
}
<ol class="templates">{
val tpls: Map[String, Seq[DocTemplateEntity]] =
- (pack.templates filter (t => !t.isPackage && !isExcluded(t) )) groupBy (_.name)
+ (pack.templates collect {
+ case t: DocTemplateEntity if !t.isPackage && !universe.settings.hardcoded.isExcluded(t.qualifiedName) => t
+ }) groupBy (_.name)
val placeholderSeq: NodeSeq = <div class="placeholder"></div>
def createLink(entity: DocTemplateEntity, includePlaceholder: Boolean, includeText: Boolean) = {
- val entityType = docEntityKindToString(entity)
+ val entityType = kindToString(entity)
val linkContent = (
{ if (includePlaceholder) placeholderSeq else NodeSeq.Empty }
++
@@ -105,7 +132,7 @@ class Index(universe: doc.Universe, index: doc.Index) extends HtmlPage {
</xml:group>
}
packageElem(universe.rootPackage)
- }</div></div><script src="index.js"></script>
+ }</div></div>
</div>
def packageQualifiedName(ety: DocTemplateEntity): String =
diff --git a/src/compiler/scala/tools/nsc/doc/html/page/IndexScript.scala b/src/compiler/scala/tools/nsc/doc/html/page/IndexScript.scala
index 7edd493..a205e02 100644
--- a/src/compiler/scala/tools/nsc/doc/html/page/IndexScript.scala
+++ b/src/compiler/scala/tools/nsc/doc/html/page/IndexScript.scala
@@ -1,5 +1,5 @@
/* NSC -- new Scala compiler
- * Copyright 2007-2011 LAMP/EPFL
+ * Copyright 2007-2013 LAMP/EPFL
* @author David Bernard, Manohar Jonnalagedda
*/
@@ -15,14 +15,8 @@ class IndexScript(universe: doc.Universe, index: doc.Index) extends Page {
def path = List("index.js")
override def writeFor(site: HtmlFactory) {
- val stream = createFileOutputStream(site)
- val writer = Channels.newWriter(stream.getChannel, site.encoding)
- try {
- writer.write("Index.PACKAGES = " + packages.toString() + ";")
- }
- finally {
- writer.close
- stream.close
+ writeFile(site) {
+ _.write("Index.PACKAGES = " + packages.toString() + ";")
}
}
@@ -33,7 +27,7 @@ class IndexScript(universe: doc.Universe, index: doc.Index) extends Page {
val ary = merged.keys.toList.sortBy(_.toLowerCase).map(key => {
val pairs = merged(key).map(
- t => docEntityKindToString(t) -> relativeLinkTo(t)
+ t => kindToString(t) -> relativeLinkTo(t)
) :+ ("name" -> key)
JSONObject(scala.collection.immutable.Map(pairs : _*))
@@ -68,7 +62,9 @@ class IndexScript(universe: doc.Universe, index: doc.Index) extends Page {
def allPackagesWithTemplates = {
Map(allPackages.map((key) => {
- key -> key.templates.filter(t => !t.isPackage && !isExcluded(t))
+ key -> key.templates.collect {
+ case t: DocTemplateEntity if !t.isPackage && !universe.settings.hardcoded.isExcluded(t.qualifiedName) => t
+ }
}) : _*)
}
}
diff --git a/src/compiler/scala/tools/nsc/doc/html/page/ReferenceIndex.scala b/src/compiler/scala/tools/nsc/doc/html/page/ReferenceIndex.scala
old mode 100644
new mode 100755
index a76cc23..a74c2ee
--- a/src/compiler/scala/tools/nsc/doc/html/page/ReferenceIndex.scala
+++ b/src/compiler/scala/tools/nsc/doc/html/page/ReferenceIndex.scala
@@ -1,5 +1,5 @@
/* NSC -- new Scala compiler
- * Copyright 2007-2011 LAMP/EPFL
+ * Copyright 2007-2013 LAMP/EPFL
* @author Pedro Furlanetto
*/
@@ -34,7 +34,7 @@ class ReferenceIndex(letter: Char, index: doc.Index, universe: Universe) extends
} else {
html
}
- })
+ }).toList.distinct
<div class="entry">
<div class="name">{
@@ -44,7 +44,7 @@ class ReferenceIndex(letter: Char, index: doc.Index, universe: Universe) extends
<strike>{ name }</strike>
}</div>
<div class="occurrences">{
- for (owner <- occurrences) yield owner ++ xml.Text(" ")
+ for (owner <- occurrences) yield owner ++ scala.xml.Text(" ")
}</div>
</div>
}
diff --git a/src/compiler/scala/tools/nsc/doc/html/page/Source.scala b/src/compiler/scala/tools/nsc/doc/html/page/Source.scala
index 7914daa..68289b7 100644
--- a/src/compiler/scala/tools/nsc/doc/html/page/Source.scala
+++ b/src/compiler/scala/tools/nsc/doc/html/page/Source.scala
@@ -1,5 +1,5 @@
/* NSC -- new Scala compiler
- * Copyright 2007-2011 LAMP/EPFL
+ * Copyright 2007-2013 LAMP/EPFL
* @author David Bernard, Manohar Jonnalagedda
*/
@@ -9,8 +9,7 @@ package html
package page
import model._
-import comment._
-import xml.{NodeSeq, Unparsed}
+import scala.xml.{NodeSeq, Unparsed}
import java.io.File
class Source(sourceFile: File) extends HtmlPage {
diff --git a/src/compiler/scala/tools/nsc/doc/html/page/Template.scala b/src/compiler/scala/tools/nsc/doc/html/page/Template.scala
index 9a0c61e..63c77e7 100644
--- a/src/compiler/scala/tools/nsc/doc/html/page/Template.scala
+++ b/src/compiler/scala/tools/nsc/doc/html/page/Template.scala
@@ -1,5 +1,5 @@
/* NSC -- new Scala compiler
- * Copyright 2007-2011 LAMP/EPFL
+ * Copyright 2007-2013 LAMP/EPFL
* @author David Bernard, Manohar Jonnalagedda
*/
@@ -8,37 +8,81 @@ package doc
package html
package page
+import base._
+import base.comment._
+
+import model._
+import model.diagram._
+import diagram._
+
+import scala.xml.{ NodeSeq, Text, UnprefixedAttribute }
+import scala.language.postfixOps
+
import model._
-import scala.xml.{ NodeSeq, Text, XML, UnprefixedAttribute }
+import model.diagram._
+import diagram._
-class Template(tpl: DocTemplateEntity) extends HtmlPage {
+class Template(universe: doc.Universe, generator: DiagramGenerator, tpl: DocTemplateEntity) extends HtmlPage {
- def path =
+ val path =
templateToPath(tpl)
- def title =
- tpl.qualifiedName
+ def title = {
+ val s = universe.settings
- def headers =
+ tpl.name +
+ ( if (!s.doctitle.isDefault) " - " + s.doctitle.value else "" ) +
+ ( if (!s.docversion.isDefault) (" " + s.docversion.value) else "" ) +
+ " - " + tpl.qualifiedName
+ }
+
+ val headers =
<xml:group>
<link href={ relativeLinkTo{List("template.css", "lib")} } media="screen" type="text/css" rel="stylesheet"/>
- <script type="text/javascript" src={ relativeLinkTo{List("jquery.js", "lib")} }></script>
- <script type="text/javascript" src={ relativeLinkTo{List("jquery-ui.js", "lib")} }></script>
- <script type="text/javascript" src={ relativeLinkTo{List("template.js", "lib")} }></script>
- <script type="text/javascript" src={ relativeLinkTo{List("tools.tooltip.js", "lib")} }></script>
+ <link href={ relativeLinkTo{List("diagrams.css", "lib")} } media="screen" type="text/css" rel="stylesheet" id="diagrams-css" />
+ <script type="text/javascript">
+ if(top === self) {{
+ var url = '{ val p = templateToPath(tpl); "../" * (p.size - 1) + "index.html" }';
+ var hash = '{ val p = templateToPath(tpl); (p.tail.reverse ::: List(p.head.replace(".html", ""))).mkString(".") }';
+ var anchor = window.location.hash;
+ var anchor_opt = '';
+ if (anchor.length { scala.xml.Unparsed(">=") /* unless we use Unparsed, it gets escaped and crashes the script */ } 1)
+ anchor_opt = '@' + anchor.substring(1);
+ window.location.href = url + '#' + hash + anchor_opt;
+ }}
+ </script>
</xml:group>
+ private val scripts = {
+ val sources = {
+ val default = List("jquery.js", "jquery-ui.js", "tools.tooltip.js", "template.js")
+ val forDiagrams = List("modernizr.custom.js", "diagrams.js")
+
+ (default ++ (if (universe.settings.docDiagrams.value) forDiagrams else Nil)) map {
+ x => x.replace('.', '-') -> relativeLinkTo(List(x, "lib"))
+ }
+ }
+
+ sources map {
+ case (id, src) =>
+ <script defer="defer" type="text/javascript" id={id} src={src}></script>
+ }
+ }
+
val valueMembers =
- tpl.methods.filterNot(_.isBridge) ++ tpl.values ++ tpl.templates.filter(x => x.isObject || x.isPackage) sorted
+ tpl.methods ++ tpl.values ++ tpl.templates.filter(x => x.isObject || x.isPackage) sorted
val (absValueMembers, nonAbsValueMembers) =
valueMembers partition (_.isAbstract)
- val (deprValueMembers, concValueMembers) =
+ val (deprValueMembers, nonDeprValueMembers) =
nonAbsValueMembers partition (_.deprecation.isDefined)
+ val (concValueMembers, shadowedImplicitMembers) =
+ nonDeprValueMembers partition (!_.isShadowedOrAmbiguousImplicit)
+
val typeMembers =
- tpl.abstractTypes ++ tpl.aliasTypes ++ tpl.templates.filter(x => x.isTrait || x.isClass) sorted
+ tpl.abstractTypes ++ tpl.aliasTypes ++ tpl.templates.filter(x => x.isTrait || x.isClass) sorted (implicitly[Ordering[MemberEntity]])
val constructors = (tpl match {
case cls: Class => (cls.constructors: List[MemberEntity]).sorted
@@ -48,7 +92,7 @@ class Template(tpl: DocTemplateEntity) extends HtmlPage {
/* for body, there is a special case for AnyRef, otherwise AnyRef appears
* like a package/object this problem should be fixed, this implementation
* is just a patch. */
- def body = {
+ val body = {
val templateName = if (tpl.isRootPackage) "root package" else tpl.name
val displayName = tpl.companion match {
case Some(companion) if (companion.visibility.isPublic && companion.inSource != None) =>
@@ -60,11 +104,10 @@ class Template(tpl: DocTemplateEntity) extends HtmlPage {
if (tpl.isRootPackage || tpl.inTemplate.isRootPackage)
NodeSeq.Empty
else
- <p id="owner">{ templatesToHtml(tpl.inTemplate.toRoot.reverse.tail, xml.Text(".")) }</p>
+ <p id="owner">{ templatesToHtml(tpl.inTemplate.toRoot.reverse.tail, scala.xml.Text(".")) }</p>
}
- <body class={ if (tpl.isTrait || tpl.isClass || tpl.qualifiedName == "scala.AnyRef") "type" else "value" }
- onload={ "sh_highlightDocument('../lib/', '.min.js');" }>
+ <body class={ if (tpl.isType) "type" else "value" }>
<div id="definition">
{
tpl.companion match {
@@ -78,25 +121,67 @@ class Template(tpl: DocTemplateEntity) extends HtmlPage {
</div>
{ signature(tpl, true) }
- { memberToCommentHtml(tpl, true) }
+ { memberToCommentHtml(tpl, tpl.inTemplate, true) }
<div id="mbrsel">
- <div id='textfilter'><span class='pre'/><span class='input'><input type='text' accesskey='/'/></span><span class='post'/></div>
- { if (tpl.linearizationTemplates.isEmpty) NodeSeq.Empty else
+ <div id='textfilter'><span class='pre'/><span class='input'><input id='mbrsel-input' type='text' accesskey='/'/></span><span class='post'/></div>
+ { if (tpl.linearizationTemplates.isEmpty && tpl.conversions.isEmpty && (!universe.settings.docGroups.value || (tpl.members.map(_.group).distinct.length == 1)))
+ NodeSeq.Empty
+ else
<div id="order">
<span class="filtertype">Ordering</span>
- <ol><li class="alpha in"><span>Alphabetic</span></li><li class="inherit out"><span>By inheritance</span></li></ol>
+ <ol>
+ {
+ if (!universe.settings.docGroups.value || (tpl.members.map(_.group).distinct.length == 1))
+ NodeSeq.Empty
+ else
+ <li class="group out"><span>Grouped</span></li>
+ }
+ <li class="alpha in"><span>Alphabetic</span></li>
+ {
+ if (tpl.linearizationTemplates.isEmpty && tpl.conversions.isEmpty)
+ NodeSeq.Empty
+ else
+ <li class="inherit out"><span>By inheritance</span></li>
+ }
+ </ol>
</div>
}
- { if (tpl.linearizationTemplates.isEmpty) NodeSeq.Empty else
- <div id="ancestors">
- <span class="filtertype">Inherited</span>
- <ol><li class="hideall out"><span>Hide All</span></li>
- <li class="showall in"><span>Show all</span></li></ol>
- <ol id="linearization">{
- (tpl :: tpl.linearizationTemplates) map { wte => <li class="in" name={ wte.qualifiedName }><span>{ wte.name }</span></li> }
- }</ol>
- </div>
+ { if (tpl.linearizationTemplates.isEmpty && tpl.conversions.isEmpty) NodeSeq.Empty else
+ {
+ if (!tpl.linearizationTemplates.isEmpty)
+ <div id="ancestors">
+ <span class="filtertype">Inherited<br/>
+ </span>
+ <ol id="linearization">
+ { (tpl :: tpl.linearizationTemplates).map(wte => <li class="in" name={ wte.qualifiedName }><span>{ wte.name }</span></li>) }
+ </ol>
+ </div>
+ else NodeSeq.Empty
+ } ++ {
+ if (!tpl.conversions.isEmpty)
+ <div id="ancestors">
+ <span class="filtertype">Implicitly<br/>
+ </span>
+ <ol id="implicits"> {
+ tpl.conversions.map { conv =>
+ val name = conv.conversionQualifiedName
+ val hide = universe.settings.hiddenImplicits(name)
+ <li class="in" name={ name } data-hidden={ hide.toString }><span>{ "by " + conv.conversionShortName }</span></li>
+ }
+ }
+ </ol>
+ </div>
+ else NodeSeq.Empty
+ } ++
+ <div id="ancestors">
+ <span class="filtertype"></span>
+ <ol>
+ <li class="hideall out"><span>Hide All</span></li>
+ <li class="showall in"><span>Show all</span></li>
+ </ol>
+ <a href="http://docs.scala-lang.org/overviews/scaladoc/usage.html#members" target="_blank">Learn more about member selection</a>
+ </div>
}
{
<div id="visbl">
@@ -111,58 +196,86 @@ class Template(tpl: DocTemplateEntity) extends HtmlPage {
{ if (constructors.isEmpty) NodeSeq.Empty else
<div id="constructors" class="members">
<h3>Instance Constructors</h3>
- <ol>{ constructors map (memberToHtml(_)) }</ol>
+ <ol>{ constructors map (memberToHtml(_, tpl)) }</ol>
</div>
}
{ if (typeMembers.isEmpty) NodeSeq.Empty else
<div id="types" class="types members">
<h3>Type Members</h3>
- <ol>{ typeMembers map (memberToHtml(_)) }</ol>
+ <ol>{ typeMembers map (memberToHtml(_, tpl)) }</ol>
</div>
}
{ if (absValueMembers.isEmpty) NodeSeq.Empty else
<div id="values" class="values members">
<h3>Abstract Value Members</h3>
- <ol>{ absValueMembers map (memberToHtml(_)) }</ol>
+ <ol>{ absValueMembers map (memberToHtml(_, tpl)) }</ol>
</div>
}
{ if (concValueMembers.isEmpty) NodeSeq.Empty else
<div id="values" class="values members">
<h3>{ if (absValueMembers.isEmpty) "Value Members" else "Concrete Value Members" }</h3>
- <ol>{ concValueMembers map (memberToHtml(_)) }</ol>
+ <ol>{ concValueMembers map (memberToHtml(_, tpl)) }</ol>
+ </div>
+ }
+
+ { if (shadowedImplicitMembers.isEmpty) NodeSeq.Empty else
+ <div id="values" class="values members">
+ <h3>Shadowed Implicit Value Members</h3>
+ <ol>{ shadowedImplicitMembers map (memberToHtml(_, tpl)) }</ol>
</div>
}
{ if (deprValueMembers.isEmpty) NodeSeq.Empty else
<div id="values" class="values members">
<h3>Deprecated Value Members</h3>
- <ol>{ deprValueMembers map (memberToHtml(_)) }</ol>
+ <ol>{ deprValueMembers map (memberToHtml(_, tpl)) }</ol>
</div>
}
</div>
<div id="inheritedMembers">
{
+ // linearization
NodeSeq fromSeq (for ((superTpl, superType) <- (tpl.linearizationTemplates zip tpl.linearizationTypes)) yield
<div class="parent" name={ superTpl.qualifiedName }>
<h3>Inherited from {
- if (tpl.universe.settings.useStupidTypes.value)
- superTpl match {
- case dtpl: DocTemplateEntity =>
- val sig = signature(dtpl, false, true) \ "_"
- sig
- case tpl: TemplateEntity =>
- tpl.name
- }
- else
- typeToHtml(superType, true)
+ typeToHtmlWithStupidTypes(tpl, superTpl, superType)
}</h3>
</div>
)
}
+ {
+ // implicitly inherited
+ NodeSeq fromSeq (for (conversion <- (tpl.conversions)) yield
+ <div class="conversion" name={ conversion.conversionQualifiedName }>
+ <h3>Inherited by implicit conversion { conversion.conversionShortName } from
+ { typeToHtml(tpl.resultType, true) } to { typeToHtml(conversion.targetType, true) }
+ </h3>
+ </div>
+ )
+ }
+ </div>
+
+ <div id="groupedMembers">
+ {
+ val allGroups = tpl.members.map(_.group).distinct
+ val orderedGroups = allGroups.map(group => (tpl.groupPriority(group), group)).sorted.map(_._2)
+ // linearization
+ NodeSeq fromSeq (for (group <- orderedGroups) yield
+ <div class="group" name={ group }>
+ <h3>{ tpl.groupName(group) }</h3>
+ {
+ tpl.groupDescription(group) match {
+ case Some(body) => <div class="comment cmt">{ bodyToHtml(body) }</div>
+ case _ => NodeSeq.Empty
+ }
+ }
+ </div>
+ )
+ }
</div>
</div>
@@ -171,72 +284,45 @@ class Template(tpl: DocTemplateEntity) extends HtmlPage {
{
if (Set("epfl", "EPFL").contains(tpl.universe.settings.docfooter.value))
- <div id="footer">Scala programming documentation. Copyright (c) 2003-2011 <a href="http://www.epfl.ch" target="_top">EPFL</a>, with contributions from <a href="http://typesafe.com" target="_top">Typesafe</a>.</div>
+ <div id="footer">Scala programming documentation. Copyright (c) 2003-2013 <a href="http://www.epfl.ch" target="_top">EPFL</a>, with contributions from <a href="http://typesafe.com" target="_top">Typesafe</a>.</div>
else
<div id="footer"> { tpl.universe.settings.docfooter.value } </div>
}
-
-
+ { scripts }
</body>
}
- def boundsToString(hi: Option[TypeEntity], lo: Option[TypeEntity]): String = {
- def bound0(bnd: Option[TypeEntity], pre: String): String = bnd match {
- case None => ""
- case Some(tpe) => pre ++ tpe.toString
- }
- bound0(hi, "<:") ++ bound0(lo, ">:")
- }
-
- def tparamsToString(tpss: List[TypeParam]): String = {
- if (tpss.isEmpty) "" else {
- def tparam0(tp: TypeParam): String =
- tp.variance + tp.name + boundsToString(tp.hi, tp.lo)
- def tparams0(tpss: List[TypeParam]): String = (tpss: @unchecked) match {
- case tp :: Nil => tparam0(tp)
- case tp :: tps => tparam0(tp) ++ ", " ++ tparams0(tps)
- }
- "[" + tparams0(tpss) + "]"
- }
- }
-
- def defParamsToString(d: MemberEntity with Def): String = {
- val paramLists: List[String] =
- if (d.valueParams.isEmpty) Nil
- else d.valueParams map (ps => ps map (_.resultType.name) mkString ("(",",",")"))
-
- tparamsToString(d.typeParams) + paramLists.mkString
- }
-
- def memberToHtml(mbr: MemberEntity): NodeSeq = {
- val defParamsString = mbr match {
- case d:MemberEntity with Def => defParamsToString(d)
- case _ => ""
- }
+ def memberToHtml(mbr: MemberEntity, inTpl: DocTemplateEntity): NodeSeq = {
+ val memberComment = memberToCommentHtml(mbr, inTpl, false)
<li name={ mbr.definitionName } visbl={ if (mbr.visibility.isProtected) "prt" else "pub" }
- data-isabs={ mbr.isAbstract.toString }>
- <a id={ mbr.name +defParamsString +":"+ mbr.resultType.name}/>
+ data-isabs={ mbr.isAbstract.toString }
+ fullComment={ if(memberComment.filter(_.label=="div").isEmpty) "no" else "yes" }
+ group={ mbr.group }>
+ <a id={ mbr.signature }/>
+ <a id={ mbr.signatureCompat }/>
{ signature(mbr, false) }
- { memberToCommentHtml(mbr, false) }
+ { memberComment }
</li>
}
- def memberToCommentHtml(mbr: MemberEntity, isSelf: Boolean): NodeSeq = {
+ def memberToCommentHtml(mbr: MemberEntity, inTpl: DocTemplateEntity, isSelf: Boolean): NodeSeq = {
mbr match {
case dte: DocTemplateEntity if isSelf =>
// comment of class itself
- memberToFullCommentHtml(mbr, isSelf = true)
+ <xml:group>
+ <div id="comment" class="fullcommenttop">{ memberToCommentBodyHtml(mbr, inTpl, isSelf = true) }</div>
+ </xml:group>
case dte: DocTemplateEntity if mbr.comment.isDefined =>
// comment of inner, documented class (only short comment, full comment is on the class' own page)
memberToInlineCommentHtml(mbr, isSelf)
case _ =>
// comment of non-class member or non-documentented inner class
- val commentBody = memberToCommentBodyHtml(mbr, isSelf = false)
+ val commentBody = memberToCommentBodyHtml(mbr, inTpl, isSelf = false)
if (commentBody.isEmpty)
NodeSeq.Empty
else {
val shortComment = memberToShortCommentHtml(mbr, isSelf)
- val longComment = memberToUseCaseCommentHtml(mbr, isSelf) ++ memberToCommentBodyHtml(mbr, isSelf)
+ val longComment = memberToUseCaseCommentHtml(mbr, isSelf) ++ memberToCommentBodyHtml(mbr, inTpl, isSelf)
val includedLongComment = if (shortComment.text.trim == longComment.text.trim)
NodeSeq.Empty
@@ -256,22 +342,17 @@ class Template(tpl: DocTemplateEntity) extends HtmlPage {
}
}
- def memberToShortCommentHtml(mbr: MemberEntity, isSelf: Boolean): NodeSeq = {
- if (mbr.comment.isEmpty)
- NodeSeq.Empty
- else
- <p class="shortcomment cmt">{ memberToUseCaseCommentHtml(mbr, isSelf) }{ inlineToHtml(mbr.comment.get.short) }</p>
- }
-
- def memberToFullCommentHtml(mbr: MemberEntity, isSelf: Boolean): NodeSeq =
- <xml:group>
- <div id="comment" class="fullcommenttop">{ memberToCommentBodyHtml(mbr, isSelf = true) }</div>
- </xml:group>
+ def memberToShortCommentHtml(mbr: MemberEntity, isSelf: Boolean): NodeSeq =
+ mbr.comment.fold(NodeSeq.Empty) { comment =>
+ <p class="shortcomment cmt">{ memberToUseCaseCommentHtml(mbr, isSelf) }{ inlineToHtml(comment.short) }</p>
+ }
def memberToInlineCommentHtml(mbr: MemberEntity, isSelf: Boolean): NodeSeq =
<p class="comment cmt">{ inlineToHtml(mbr.comment.get.short) }</p>
- def memberToCommentBodyHtml(mbr: MemberEntity, isSelf: Boolean, isReduced: Boolean = false): NodeSeq = {
+ def memberToCommentBodyHtml(mbr: MemberEntity, inTpl: DocTemplateEntity, isSelf: Boolean, isReduced: Boolean = false): NodeSeq = {
+ val s = universe.settings
+
val memberComment =
if (mbr.comment.isEmpty) NodeSeq.Empty
else <div class="comment cmt">{ commentToHtml(mbr.comment) }</div>
@@ -285,35 +366,34 @@ class Template(tpl: DocTemplateEntity) extends HtmlPage {
case _ => Nil
}
- def mbrCmt = mbr.comment.get
-
- def paramCommentToHtml(prs: List[ParameterEntity]): NodeSeq = prs match {
- case Nil => NodeSeq.Empty
+ def paramCommentToHtml(prs: List[ParameterEntity], comment: Comment): NodeSeq = prs match {
case (tp: TypeParam) :: rest =>
val paramEntry: NodeSeq = {
- <dt class="tparam">{ tp.name }</dt><dd class="cmt">{ bodyToHtml(mbrCmt.typeParams(tp.name)) }</dd>
+ <dt class="tparam">{ tp.name }</dt><dd class="cmt">{ bodyToHtml(comment.typeParams(tp.name)) }</dd>
}
- paramEntry ++ paramCommentToHtml(rest)
+ paramEntry ++ paramCommentToHtml(rest, comment)
case (vp: ValueParam) :: rest =>
val paramEntry: NodeSeq = {
- <dt class="param">{ vp.name }</dt><dd class="cmt">{ bodyToHtml(mbrCmt.valueParams(vp.name)) }</dd>
+ <dt class="param">{ vp.name }</dt><dd class="cmt">{ bodyToHtml(comment.valueParams(vp.name)) }</dd>
}
- paramEntry ++ paramCommentToHtml(rest)
+ paramEntry ++ paramCommentToHtml(rest, comment)
+
+ case _ =>
+ NodeSeq.Empty
}
- if (mbr.comment.isEmpty) NodeSeq.Empty
- else {
+ mbr.comment.fold(NodeSeq.Empty) { comment =>
val cmtedPrs = prs filter {
- case tp: TypeParam => mbrCmt.typeParams isDefinedAt tp.name
- case vp: ValueParam => mbrCmt.valueParams isDefinedAt vp.name
+ case tp: TypeParam => comment.typeParams isDefinedAt tp.name
+ case vp: ValueParam => comment.valueParams isDefinedAt vp.name
}
- if (cmtedPrs.isEmpty && mbrCmt.result.isEmpty) NodeSeq.Empty
+ if (cmtedPrs.isEmpty && comment.result.isEmpty) NodeSeq.Empty
else {
<dl class="paramcmts block">{
- paramCommentToHtml(cmtedPrs) ++ (
- mbrCmt.result match {
+ paramCommentToHtml(cmtedPrs, comment) ++ (
+ comment.result match {
case None => NodeSeq.Empty
case Some(cmt) =>
<dt>returns</dt><dd class="cmt">{ bodyToHtml(cmt) }</dd>
@@ -323,33 +403,112 @@ class Template(tpl: DocTemplateEntity) extends HtmlPage {
}
}
+ val implicitInformation = mbr.byConversion match {
+ case Some(conv) =>
+ <dt class="implicit">Implicit information</dt> ++
+ {
+ val targetType = typeToHtml(conv.targetType, true)
+ val conversionMethod = conv.convertorMethod match {
+ case Left(member) => Text(member.name)
+ case Right(name) => Text(name)
+ }
+
+ // strip off the package object endings, they make things harder to follow
+ val conversionOwnerQualifiedNane = conv.convertorOwner.qualifiedName.stripSuffix(".package")
+ val conversionOwner = templateToHtml(conv.convertorOwner, conversionOwnerQualifiedNane)
+
+ val constraintText = conv.constraints match {
+ case Nil =>
+ NodeSeq.Empty
+ case List(constraint) =>
+ scala.xml.Text("This conversion will take place only if ") ++ constraintToHtml(constraint) ++ scala.xml.Text(".")
+ case List(constraint1, constraint2) =>
+ scala.xml.Text("This conversion will take place only if ") ++ constraintToHtml(constraint1) ++
+ scala.xml.Text(" and at the same time ") ++ constraintToHtml(constraint2) ++ scala.xml.Text(".")
+ case constraints =>
+ <br/> ++ "This conversion will take place only if all of the following constraints are met:" ++ <br/> ++ {
+ var index = 0
+ constraints map { constraint => scala.xml.Text({ index += 1; index } + ". ") ++ constraintToHtml(constraint) ++ <br/> }
+ }
+ }
+
+ <dd>
+ This member is added by an implicit conversion from { typeToHtml(inTpl.resultType, true) } to
+ { targetType } performed by method { conversionMethod } in { conversionOwner }.
+ { constraintText }
+ </dd>
+ } ++ {
+ if (mbr.isShadowedOrAmbiguousImplicit) {
+ // These are the members that are shadowing or ambiguating the current implicit
+ // see ImplicitMemberShadowing trait for more information
+ val shadowingSuggestion = {
+ val params = mbr match {
+ case d: Def => d.valueParams map (_ map (_ name) mkString("(", ", ", ")")) mkString
+ case _ => "" // no parameters
+ }
+ <br/> ++ scala.xml.Text("To access this member you can use a ") ++
+ <a href="http://stackoverflow.com/questions/2087250/what-is-the-purpose-of-type-ascription-in-scala"
+ target="_blank">type ascription</a> ++ scala.xml.Text(":") ++
+ <br/> ++ <div class="cmt"><pre>{"(" + Template.lowerFirstLetter(tpl.name) + ": " + conv.targetType.name + ")." + mbr.name + params }</pre></div>
+ }
+
+ val shadowingWarning: NodeSeq =
+ if (mbr.isShadowedImplicit)
+ scala.xml.Text("This implicitly inherited member is shadowed by one or more members in this " +
+ "class.") ++ shadowingSuggestion
+ else if (mbr.isAmbiguousImplicit)
+ scala.xml.Text("This implicitly inherited member is ambiguous. One or more implicitly " +
+ "inherited members have similar signatures, so calling this member may produce an ambiguous " +
+ "implicit conversion compiler error.") ++ shadowingSuggestion
+ else NodeSeq.Empty
+
+ <dt class="implicit">Shadowing</dt> ++
+ <dd>{ shadowingWarning }</dd>
+
+ } else NodeSeq.Empty
+ }
+ case _ =>
+ NodeSeq.Empty
+ }
+
// --- start attributes block vals
- val attributes: Seq[scala.xml.Node] = {
+ val attributes: NodeSeq = {
val fvs: List[comment.Paragraph] = visibility(mbr).toList
if (fvs.isEmpty || isReduced) NodeSeq.Empty
else {
<dt>Attributes</dt>
- <dd>{ fvs map { fv => { inlineToHtml(fv.text) ++ xml.Text(" ") } } }</dd>
+ <dd>{ fvs map { fv => { inlineToHtml(fv.text) ++ scala.xml.Text(" ") } } }</dd>
}
}
- val definitionClasses: Seq[scala.xml.Node] = {
+ val definitionClasses: NodeSeq = {
val inDefTpls = mbr.inDefinitionTemplates
- if ((inDefTpls.tail.isEmpty && (inDefTpls.head == mbr.inTemplate)) || isReduced) NodeSeq.Empty
+ if ((inDefTpls.tail.isEmpty && (inDefTpls.head == inTpl)) || isReduced) NodeSeq.Empty
else {
<dt>Definition Classes</dt>
- <dd>{ templatesToHtml(inDefTpls, xml.Text(" → ")) }</dd>
+ <dd>{ templatesToHtml(inDefTpls, scala.xml.Text(" → ")) }</dd>
}
}
- val selfType: Seq[scala.xml.Node] = mbr match {
+ val fullSignature: NodeSeq = {
+ mbr match {
+ case nte: NonTemplateMemberEntity if nte.isUseCase =>
+ <div class="full-signature-block toggleContainer">
+ <span class="toggle">Full Signature</span>
+ <div class="hiddenContent full-signature-usecase">{ signature(nte.useCaseOf.get,true) }</div>
+ </div>
+ case _ => NodeSeq.Empty
+ }
+ }
+
+ val selfType: NodeSeq = mbr match {
case dtpl: DocTemplateEntity if (isSelf && !dtpl.selfType.isEmpty && !isReduced) =>
<dt>Self Type</dt>
<dd>{ typeToHtml(dtpl.selfType.get, hasLinks = true) }</dd>
case _ => NodeSeq.Empty
}
- val annotations: Seq[scala.xml.Node] = {
+ val annotations: NodeSeq = {
// A list of annotations which don't show their arguments, e. g. because they are shown separately.
val annotationsWithHiddenArguments = List("deprecated", "Deprecated", "migration")
@@ -371,7 +530,7 @@ class Template(tpl: DocTemplateEntity) extends HtmlPage {
} else NodeSeq.Empty
}
- val sourceLink: Seq[scala.xml.Node] = mbr match {
+ val sourceLink: NodeSeq = mbr match {
case dtpl: DocTemplateEntity if (isSelf && dtpl.sourceUrl.isDefined && dtpl.inSource.isDefined && !isReduced) =>
val (absFile, line) = dtpl.inSource.get
<dt>Source</dt>
@@ -379,81 +538,87 @@ class Template(tpl: DocTemplateEntity) extends HtmlPage {
case _ => NodeSeq.Empty
}
- val deprecation: Seq[scala.xml.Node] =
- if (mbr.deprecation.isEmpty || isReduced) NodeSeq.Empty
- else {
- <dt>Deprecated</dt>
- <dd class="cmt">{ bodyToHtml(mbr.deprecation.get) }</dd>
+ val deprecation: NodeSeq =
+ mbr.deprecation match {
+ case Some(deprecation) if !isReduced =>
+ <dt>Deprecated</dt>
+ <dd class="cmt">{ bodyToHtml(deprecation) }</dd>
+ case _ => NodeSeq.Empty
}
- val migration: Seq[scala.xml.Node] =
- if(mbr.migration.isEmpty || isReduced) NodeSeq.Empty
- else {
+ val migration: NodeSeq =
+ mbr.migration match {
+ case Some(migration) if !isReduced =>
<dt>Migration</dt>
- <dd class="cmt">{ bodyToHtml(mbr.migration.get) }</dd>
+ <dd class="cmt">{ bodyToHtml(migration) }</dd>
+ case _ => NodeSeq.Empty
}
- val mainComment: Seq[scala.xml.Node] = mbr.comment match {
+ val mainComment: NodeSeq = mbr.comment match {
case Some(comment) if (! isReduced) =>
+ def orEmpty[T](it: Iterable[T])(gen: =>NodeSeq): NodeSeq =
+ if (it.isEmpty) NodeSeq.Empty else gen
+
val example =
- if(!comment.example.isEmpty)
+ orEmpty(comment.example) {
<div class="block">Example{ if (comment.example.length > 1) "s" else ""}:
- <ol>{
- val exampleXml: List[scala.xml.NodeSeq] =
- for(example <- comment.example ) yield
- <li class="cmt">{ bodyToHtml(example) }</li>
- exampleXml.reduceLeft(_ ++ Text(", ") ++ _)
+ <ol>{
+ val exampleXml: List[NodeSeq] = for (ex <- comment.example) yield
+ <li class="cmt">{ bodyToHtml(ex) }</li>
+ exampleXml.reduceLeft(_ ++ Text(", ") ++ _)
}</ol>
- </div>
- else NodeSeq.Empty
+ </div>
+ }
- val version: Seq[scala.xml.Node] =
- if(!comment.version.isEmpty) {
+ val version: NodeSeq =
+ orEmpty(comment.version) {
<dt>Version</dt>
- <dd>{ for(body <- comment.version.toList) yield {bodyToHtml(body)} }</dd>
- } else NodeSeq.Empty
+ <dd>{ for(body <- comment.version.toList) yield bodyToHtml(body) }</dd>
+ }
- val sinceVersion: Seq[scala.xml.Node] =
- if(!comment.since.isEmpty) {
+ val sinceVersion: NodeSeq =
+ orEmpty(comment.since) {
<dt>Since</dt>
- <dd>{ for(body <- comment.since.toList) yield {bodyToHtml(body)} }</dd>
- } else NodeSeq.Empty
+ <dd>{ for(body <- comment.since.toList) yield bodyToHtml(body) }</dd>
+ }
- val note: Seq[scala.xml.Node] =
- if(!comment.note.isEmpty) {
+ val note: NodeSeq =
+ orEmpty(comment.note) {
<dt>Note</dt>
<dd>{
- val noteXml: List[scala.xml.NodeSeq] = (for(note <- comment.note ) yield <span class="cmt">{bodyToHtml(note)}</span> )
+ val noteXml: List[NodeSeq] = for(note <- comment.note ) yield <span class="cmt">{bodyToHtml(note)}</span>
noteXml.reduceLeft(_ ++ Text(", ") ++ _)
}</dd>
- } else NodeSeq.Empty
+ }
- val seeAlso: Seq[scala.xml.Node] =
- if(!comment.see.isEmpty) {
+ val seeAlso: NodeSeq =
+ orEmpty(comment.see) {
<dt>See also</dt>
<dd>{
- val seeXml:List[scala.xml.NodeSeq]=(for(see <- comment.see ) yield <span class="cmt">{bodyToHtml(see)}</span> )
- seeXml.reduceLeft(_ ++ Text(", ") ++ _)
+ val seeXml: List[NodeSeq] = for(see <- comment.see ) yield <span class="cmt">{bodyToHtml(see)}</span>
+ seeXml.reduceLeft(_ ++ _)
}</dd>
- } else NodeSeq.Empty
+ }
- val exceptions: Seq[scala.xml.Node] =
- if(!comment.throws.isEmpty) {
+ val exceptions: NodeSeq =
+ orEmpty(comment.throws) {
<dt>Exceptions thrown</dt>
<dd>{
- val exceptionsXml: Iterable[scala.xml.NodeSeq] = (for(exception <- comment.throws ) yield <span class="cmt">{Text(exception._1) ++ bodyToHtml(exception._2)}</span> )
+ val exceptionsXml: List[NodeSeq] =
+ for((name, body) <- comment.throws.toList.sortBy(_._1) ) yield
+ <span class="cmt">{Text(name) ++ bodyToHtml(body)}</span>
exceptionsXml.reduceLeft(_ ++ Text("") ++ _)
}</dd>
- } else NodeSeq.Empty
+ }
- val todo: Seq[scala.xml.Node] =
- if(!comment.todo.isEmpty) {
+ val todo: NodeSeq =
+ orEmpty(comment.todo) {
<dt>To do</dt>
<dd>{
- val todoXml: List[scala.xml.NodeSeq] = (for(todo <- comment.todo ) yield <span class="cmt">{bodyToHtml(todo)}</span> )
+ val todoXml: List[NodeSeq] = (for(todo <- comment.todo ) yield <span class="cmt">{bodyToHtml(todo)}</span> )
todoXml.reduceLeft(_ ++ Text(", ") ++ _)
}</dd>
- } else NodeSeq.Empty
+ }
example ++ version ++ sinceVersion ++ exceptions ++ todo ++ note ++ seeAlso
@@ -461,7 +626,7 @@ class Template(tpl: DocTemplateEntity) extends HtmlPage {
}
// end attributes block vals ---
- val attributesInfo = attributes ++ definitionClasses ++ selfType ++ annotations ++ deprecation ++ migration ++ sourceLink ++ mainComment
+ val attributesInfo = implicitInformation ++ attributes ++ definitionClasses ++ fullSignature ++ selfType ++ annotations ++ deprecation ++ migration ++ sourceLink ++ mainComment
val attributesBlock =
if (attributesInfo.isEmpty)
NodeSeq.Empty
@@ -473,43 +638,53 @@ class Template(tpl: DocTemplateEntity) extends HtmlPage {
<div class="toggleContainer block">
<span class="toggle">Linear Supertypes</span>
<div class="superTypes hiddenContent">{
- typesToHtml(dtpl.linearizationTypes, hasLinks = true, sep = xml.Text(", "))
+ typesToHtml(dtpl.linearizationTypes, hasLinks = true, sep = scala.xml.Text(", "))
}</div>
</div>
case _ => NodeSeq.Empty
}
val subclasses = mbr match {
- case dtpl: DocTemplateEntity if isSelf && !isReduced && dtpl.subClasses.nonEmpty =>
+ case dtpl: DocTemplateEntity if isSelf && !isReduced && dtpl.allSubClasses.nonEmpty =>
<div class="toggleContainer block">
<span class="toggle">Known Subclasses</span>
<div class="subClasses hiddenContent">{
- templatesToHtml(dtpl.subClasses.sortBy(_.name), xml.Text(", "))
+ templatesToHtml(dtpl.allSubClasses.sortBy(_.name), scala.xml.Text(", "))
}</div>
</div>
case _ => NodeSeq.Empty
}
- memberComment ++ paramComments ++ attributesBlock ++ linearization ++ subclasses
- }
+ def createDiagram(f: DocTemplateEntity => Option[Diagram], description: String, id: String): NodeSeq =
+ if (s.docDiagrams.value) mbr match {
+ case dtpl: DocTemplateEntity if isSelf && !isReduced =>
+ val diagram = f(dtpl)
+ if (diagram.isDefined) {
+ val s = universe.settings
+ val diagramSvg = generator.generate(diagram.get, tpl, this)
+ if (diagramSvg != NodeSeq.Empty) {
+ <div class="toggleContainer block diagram-container" id={ id + "-container"}>
+ <span class="toggle diagram-link">{ description }</span>
+ <a href="http://docs.scala-lang.org/overviews/scaladoc/usage.html#diagrams" target="_blank" class="diagram-help">Learn more about scaladoc diagrams</a>
+ <div class="diagram" id={ id }>{
+ diagramSvg
+ }</div>
+ </div>
+ } else NodeSeq.Empty
+ } else NodeSeq.Empty
+ case _ => NodeSeq.Empty
+ } else NodeSeq.Empty // diagrams not generated
- def kindToString(mbr: MemberEntity): String = {
- mbr match {
- case tpl: DocTemplateEntity => docEntityKindToString(tpl)
- case ctor: Constructor => "new"
- case tme: MemberEntity =>
- ( if (tme.isDef) "def"
- else if (tme.isVal) "val"
- else if (tme.isLazyVal) "lazy val"
- else if (tme.isVar) "var"
- else "type")
- }
+ val typeHierarchy = createDiagram(_.inheritanceDiagram, "Type Hierarchy", "inheritance-diagram")
+ val contentHierarchy = createDiagram(_.contentDiagram, "Content Hierarchy", "content-diagram")
+
+ memberComment ++ paramComments ++ attributesBlock ++ linearization ++ subclasses ++ typeHierarchy ++ contentHierarchy
}
def boundsToHtml(hi: Option[TypeEntity], lo: Option[TypeEntity], hasLinks: Boolean): NodeSeq = {
def bound0(bnd: Option[TypeEntity], pre: String): NodeSeq = bnd match {
case None => NodeSeq.Empty
- case Some(tpe) => xml.Text(pre) ++ typeToHtml(tpe, hasLinks)
+ case Some(tpe) => scala.xml.Text(pre) ++ typeToHtml(tpe, hasLinks)
}
bound0(lo, " >: ") ++ bound0(hi, " <: ")
}
@@ -523,13 +698,13 @@ class Template(tpl: DocTemplateEntity) extends HtmlPage {
case PrivateInTemplate(owner) if (owner == mbr.inTemplate) =>
Some(Paragraph(CText("private")))
case PrivateInTemplate(owner) =>
- Some(Paragraph(Chain(List(CText("private["), EntityLink(owner), CText("]")))))
+ Some(Paragraph(Chain(List(CText("private["), EntityLink(comment.Text(owner.qualifiedName), LinkToTpl(owner)), CText("]")))))
case ProtectedInInstance() =>
Some(Paragraph(CText("protected[this]")))
case ProtectedInTemplate(owner) if (owner == mbr.inTemplate) =>
Some(Paragraph(CText("protected")))
case ProtectedInTemplate(owner) =>
- Some(Paragraph(Chain(List(CText("protected["), EntityLink(owner), CText("]")))))
+ Some(Paragraph(Chain(List(CText("protected["), EntityLink(comment.Text(owner.qualifiedName), LinkToTpl(owner)), CText("]")))))
case Public() =>
None
}
@@ -540,17 +715,26 @@ class Template(tpl: DocTemplateEntity) extends HtmlPage {
def inside(hasLinks: Boolean, nameLink: String = ""): NodeSeq =
<xml:group>
<span class="modifier_kind">
- <span class="modifier">{ mbr.flags.map(flag => inlineToHtml(flag.text) ++ xml.Text(" ")) }</span>
+ <span class="modifier">{ mbr.flags.map(flag => inlineToHtml(flag.text) ++ scala.xml.Text(" ")) }</span>
<span class="kind">{ kindToString(mbr) }</span>
</span>
<span class="symbol">
{
+ val nameClass =
+ if (mbr.isImplicitlyInherited)
+ if (mbr.isShadowedOrAmbiguousImplicit)
+ "implicit shadowed"
+ else
+ "implicit"
+ else
+ "name"
+
val nameHtml = {
val value = if (mbr.isConstructor) tpl.name else mbr.name
val span = if (mbr.deprecation.isDefined)
- <span class={"name deprecated"} title={"Deprecated: "+bodyToStr(mbr.deprecation.get)}>{ value }</span>
+ <span class={ nameClass + " deprecated"} title={"Deprecated: "+bodyToStr(mbr.deprecation.get)}>{ value }</span>
else
- <span class={"name"}>{ value }</span>
+ <span class={ nameClass }>{ value }</span>
val encoded = scala.reflect.NameTransformer.encode(value)
if (encoded != value) {
span % new UnprefixedAttribute("title",
@@ -567,7 +751,7 @@ class Template(tpl: DocTemplateEntity) extends HtmlPage {
<a href={nameLink}>{nameHtml}</a>
else nameHtml
}{
- def tparamsToHtml(mbr: Entity): NodeSeq = mbr match {
+ def tparamsToHtml(mbr: Any): NodeSeq = mbr match {
case hk: HigherKinded =>
val tpss = hk.typeParams
if (tpss.isEmpty) NodeSeq.Empty else {
@@ -579,7 +763,7 @@ class Template(tpl: DocTemplateEntity) extends HtmlPage {
}
<span class="tparams">[{ tparams0(tpss) }]</span>
}
- case _ => NodeSeq.Empty
+ case _ => NodeSeq.Empty
}
tparamsToHtml(mbr)
}{
@@ -616,20 +800,21 @@ class Template(tpl: DocTemplateEntity) extends HtmlPage {
}
}{ if (isReduced) NodeSeq.Empty else {
mbr match {
- case tpl: DocTemplateEntity if tpl.parentType.isDefined =>
- <span class="result"> extends { typeToHtml(tpl.parentType.get, hasLinks) }</span>
-
case tme: MemberEntity if (tme.isDef || tme.isVal || tme.isLazyVal || tme.isVar) =>
<span class="result">: { typeToHtml(tme.resultType, hasLinks) }</span>
- case abt: AbstractType =>
+ case abt: MemberEntity with AbstractType =>
val b2s = boundsToHtml(abt.hi, abt.lo, hasLinks)
if (b2s != NodeSeq.Empty)
<span class="result">{ b2s }</span>
else NodeSeq.Empty
- case alt: AliasType =>
+ case alt: MemberEntity with AliasType =>
<span class="result"> = { typeToHtml(alt.alias, hasLinks) }</span>
+
+ case tpl: MemberTemplateEntity if !tpl.parentTypes.isEmpty =>
+ <span class="result"> extends { typeToHtml(tpl.parentTypes.map(_._2), hasLinks) }</span>
+
case _ => NodeSeq.Empty
}
}}
@@ -637,7 +822,7 @@ class Template(tpl: DocTemplateEntity) extends HtmlPage {
</xml:group>
mbr match {
case dte: DocTemplateEntity if !isSelf =>
- <h4 class="signature">{ inside(hasLinks = false, nameLink = relativeLinkTo(dte)) }</h4>
+ <h4 class="signature">{ inside(hasLinks = true, nameLink = relativeLinkTo(dte)) }</h4>
case _ if isSelf =>
<h4 id="signature" class="signature">{ inside(hasLinks = true) }</h4>
case _ =>
@@ -687,18 +872,13 @@ class Template(tpl: DocTemplateEntity) extends HtmlPage {
val link = relativeLinkTo(mbr)
myXml ++= <span class="name"><a href={link}>{str.substring(from, to)}</a></span>
case mbr: MemberEntity =>
- val anchor = "#" + mbr.name + defParamsString(mbr) + ":" + mbr.resultType.name
+ val anchor = "#" + mbr.signature
val link = relativeLinkTo(mbr.inTemplate)
myXml ++= <span class="name"><a href={link ++ anchor}>{str.substring(from, to)}</a></span>
}
index = to
}
}
- // function used in the MemberEntity case above
- def defParamsString(mbr: Entity):String = mbr match {
- case d:MemberEntity with Def => defParamsToString(d)
- case _ => ""
- }
if (index <= length-1)
myXml ++= codeStringToXml(str.substring(index, length ))
@@ -713,7 +893,7 @@ class Template(tpl: DocTemplateEntity) extends HtmlPage {
def argumentsToHtml0(argss: List[ValueArgument]): NodeSeq = argss match {
case Nil => NodeSeq.Empty
case arg :: Nil => argumentToHtml(arg)
- case arg :: args => argumentToHtml(arg) ++ xml.Text(", ") ++ argumentsToHtml0(args)
+ case arg :: args => argumentToHtml(arg) ++ scala.xml.Text(", ") ++ argumentsToHtml0(args)
}
<span class="args">({ argumentsToHtml0(argss) })</span>
}
@@ -749,4 +929,49 @@ class Template(tpl: DocTemplateEntity) extends HtmlPage {
case _ => inl.toString
}
+ private def typeToHtmlWithStupidTypes(tpl: TemplateEntity, superTpl: TemplateEntity, superType: TypeEntity): NodeSeq =
+ if (tpl.universe.settings.useStupidTypes.value)
+ superTpl match {
+ case dtpl: DocTemplateEntity =>
+ val sig = signature(dtpl, false, true) \ "_"
+ sig
+ case tpl: TemplateEntity =>
+ Text(tpl.name)
+ }
+ else
+ typeToHtml(superType, true)
+
+ private def constraintToHtml(constraint: Constraint): NodeSeq = constraint match {
+ case ktcc: KnownTypeClassConstraint =>
+ scala.xml.Text(ktcc.typeExplanation(ktcc.typeParamName) + " (" + ktcc.typeParamName + ": ") ++
+ templateToHtml(ktcc.typeClassEntity) ++ scala.xml.Text(")")
+ case tcc: TypeClassConstraint =>
+ scala.xml.Text(tcc.typeParamName + " is ") ++
+ <a href="http://stackoverflow.com/questions/2982276/what-is-a-context-bound-in-scala" target="_blank">
+ context-bounded</a> ++ scala.xml.Text(" by " + tcc.typeClassEntity.qualifiedName + " (" + tcc.typeParamName + ": ") ++
+ templateToHtml(tcc.typeClassEntity) ++ scala.xml.Text(")")
+ case impl: ImplicitInScopeConstraint =>
+ scala.xml.Text("an implicit value of type ") ++ typeToHtml(impl.implicitType, true) ++ scala.xml.Text(" is in scope")
+ case eq: EqualTypeParamConstraint =>
+ scala.xml.Text(eq.typeParamName + " is " + eq.rhs.name + " (" + eq.typeParamName + " =:= ") ++
+ typeToHtml(eq.rhs, true) ++ scala.xml.Text(")")
+ case bt: BoundedTypeParamConstraint =>
+ scala.xml.Text(bt.typeParamName + " is a superclass of " + bt.lowerBound.name + " and a subclass of " +
+ bt.upperBound.name + " (" + bt.typeParamName + " >: ") ++
+ typeToHtml(bt.lowerBound, true) ++ scala.xml.Text(" <: ") ++
+ typeToHtml(bt.upperBound, true) ++ scala.xml.Text(")")
+ case lb: LowerBoundedTypeParamConstraint =>
+ scala.xml.Text(lb.typeParamName + " is a superclass of " + lb.lowerBound.name + " (" + lb.typeParamName + " >: ") ++
+ typeToHtml(lb.lowerBound, true) ++ scala.xml.Text(")")
+ case ub: UpperBoundedTypeParamConstraint =>
+ scala.xml.Text(ub.typeParamName + " is a subclass of " + ub.upperBound.name + " (" + ub.typeParamName + " <: ") ++
+ typeToHtml(ub.upperBound, true) ++ scala.xml.Text(")")
+ }
+}
+
+object Template {
+ /* Vlad: Lesson learned the hard way: don't put any stateful code that references the model here,
+ * it won't be garbage collected and you'll end up filling the heap with garbage */
+
+ def lowerFirstLetter(s: String) = if (s.length >= 1) s.substring(0,1).toLowerCase() + s.substring(1) else s
}
diff --git a/src/compiler/scala/tools/nsc/doc/html/page/diagram/DiagramGenerator.scala b/src/compiler/scala/tools/nsc/doc/html/page/diagram/DiagramGenerator.scala
new file mode 100644
index 0000000..61c1819
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/doc/html/page/diagram/DiagramGenerator.scala
@@ -0,0 +1,53 @@
+/**
+ * @author Damien Obrist
+ * @author Vlad Ureche
+ */
+package scala.tools.nsc
+package doc
+package html
+package page
+package diagram
+
+import scala.xml.NodeSeq
+import scala.tools.nsc.doc.html.HtmlPage
+import scala.tools.nsc.doc.model.diagram.Diagram
+import scala.tools.nsc.doc.model.DocTemplateEntity
+
+trait DiagramGenerator {
+
+ /**
+ * Generates a visualization of the internal representation
+ * of a diagram.
+ *
+ * @param d The model of the diagram
+ * @param p The page the diagram will be embedded in (needed for link generation)
+ * @return The HTML to be embedded in the Scaladoc page
+ */
+ def generate(d: Diagram, t: DocTemplateEntity, p: HtmlPage):NodeSeq
+}
+
+object DiagramGenerator {
+
+ // TODO: This is tailored towards the dot generator, since it's the only generator. In the future it should be more
+ // general.
+
+ private[this] var dotRunner: DotRunner = null
+ private[this] var settings: doc.Settings = null
+
+ def initialize(s: doc.Settings) =
+ settings = s
+
+ def getDotRunner() = {
+ if (dotRunner == null)
+ dotRunner = new DotRunner(settings)
+ dotRunner
+ }
+
+ def cleanup() = {
+ DiagramStats.printStats(settings)
+ if (dotRunner != null) {
+ dotRunner.cleanup()
+ dotRunner = null
+ }
+ }
+}
\ No newline at end of file
diff --git a/src/compiler/scala/tools/nsc/doc/html/page/diagram/DiagramStats.scala b/src/compiler/scala/tools/nsc/doc/html/page/diagram/DiagramStats.scala
new file mode 100644
index 0000000..ec00cac
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/doc/html/page/diagram/DiagramStats.scala
@@ -0,0 +1,66 @@
+/**
+ * @author Vlad Ureche
+ */
+package scala.tools.nsc.doc
+package html.page.diagram
+
+object DiagramStats {
+
+ class TimeTracker(title: String) {
+ var totalTime: Long = 0l
+ var maxTime: Long = 0l
+ var instances: Int = 0
+
+ def addTime(ms: Long) = {
+ if (maxTime < ms)
+ maxTime = ms
+ totalTime += ms
+ instances += 1
+ }
+
+ def printStats(print: String => Unit) = {
+ if (instances == 0)
+ print(title + ": no stats gathered")
+ else {
+ print(" " + title)
+ print(" " + "=" * title.length)
+ print(" count: " + instances + " items")
+ print(" total time: " + totalTime + " ms")
+ print(" average time: " + (totalTime/instances) + " ms")
+ print(" maximum time: " + maxTime + " ms")
+ print("")
+ }
+ }
+ }
+
+ private[this] val filterTrack = new TimeTracker("diagrams model filtering")
+ private[this] val modelTrack = new TimeTracker("diagrams model generation")
+ private[this] val dotGenTrack = new TimeTracker("dot diagram generation")
+ private[this] val dotRunTrack = new TimeTracker("dot process runnning")
+ private[this] val svgTrack = new TimeTracker("svg processing")
+ private[this] var brokenImages = 0
+ private[this] var fixedImages = 0
+
+ def printStats(settings: Settings) = {
+ if (settings.docDiagramsDebug.value) {
+ settings.printMsg("\nDiagram generation running time breakdown:\n")
+ filterTrack.printStats(settings.printMsg)
+ modelTrack.printStats(settings.printMsg)
+ dotGenTrack.printStats(settings.printMsg)
+ dotRunTrack.printStats(settings.printMsg)
+ svgTrack.printStats(settings.printMsg)
+ println(" Broken images: " + brokenImages)
+ println(" Fixed images: " + fixedImages)
+ println("")
+ }
+ }
+
+ def addFilterTime(ms: Long) = filterTrack.addTime(ms)
+ def addModelTime(ms: Long) = modelTrack.addTime(ms)
+ def addDotGenerationTime(ms: Long) = dotGenTrack.addTime(ms)
+ def addDotRunningTime(ms: Long) = dotRunTrack.addTime(ms)
+ def addSvgTime(ms: Long) = svgTrack.addTime(ms)
+
+ def addBrokenImage(): Unit = brokenImages += 1
+ def addFixedImage(): Unit = fixedImages += 1
+}
\ No newline at end of file
diff --git a/src/compiler/scala/tools/nsc/doc/html/page/diagram/DotDiagramGenerator.scala b/src/compiler/scala/tools/nsc/doc/html/page/diagram/DotDiagramGenerator.scala
new file mode 100644
index 0000000..8473678
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/doc/html/page/diagram/DotDiagramGenerator.scala
@@ -0,0 +1,511 @@
+/**
+ * @author Damien Obrist
+ * @author Vlad Ureche
+ */
+package scala.tools.nsc
+package doc
+package html
+package page
+package diagram
+
+import scala.xml.{NodeSeq, XML, PrefixedAttribute, Elem, MetaData, Null, UnprefixedAttribute}
+import scala.collection.immutable._
+import javax.xml.parsers.SAXParser
+import model._
+import model.diagram._
+
+class DotDiagramGenerator(settings: doc.Settings) extends DiagramGenerator {
+
+ // the page where the diagram will be embedded
+ private var page: HtmlPage = null
+ // path to the "lib" folder relative to the page
+ private var pathToLib: String = null
+ // maps nodes to unique indices
+ private var node2Index: Map[Node, Int] = null
+ // maps an index to its corresponding node
+ private var index2Node: Map[Int, Node] = null
+ // true if the current diagram is a class diagram
+ private var isInheritanceDiagram = false
+ // incoming implicit nodes (needed for determining the CSS class of a node)
+ private var incomingImplicitNodes: List[Node] = List()
+ // the suffix used when there are two many classes to show
+ private final val MultiSuffix = " classes/traits"
+ // used to generate unique node and edge ids (i.e. avoid conflicts with multiple diagrams)
+ private var counter = 0
+
+ def generate(diagram: Diagram, template: DocTemplateEntity, page: HtmlPage):NodeSeq = {
+ counter = counter + 1;
+ this.page = page
+ pathToLib = "../" * (page.templateToPath(template).size - 1) + "lib/"
+ val dot = generateDot(diagram)
+ val result = generateSVG(dot, template)
+ // clean things up a bit, so we don't leave garbage on the heap
+ this.page = null
+ node2Index = null
+ index2Node = null
+ incomingImplicitNodes = List()
+ result
+ }
+
+ /**
+ * Generates a dot string for a given diagram.
+ */
+ private def generateDot(d: Diagram) = {
+ // inheritance nodes (all nodes except thisNode and implicit nodes)
+ var nodes: List[Node] = null
+ // inheritance edges (all edges except implicit edges)
+ var edges: List[(Node, List[Node])] = null
+
+ // timing
+ var tDot = -System.currentTimeMillis
+
+ // variables specific to class diagrams:
+ // current node of a class diagram
+ var thisNode:Node = null
+ var subClasses = List[Node]()
+ var superClasses = List[Node]()
+ var incomingImplicits = List[Node]()
+ var outgoingImplicits = List[Node]()
+ isInheritanceDiagram = false
+
+ d match {
+ case InheritanceDiagram(_thisNode, _superClasses, _subClasses, _incomingImplicits, _outgoingImplicits) =>
+
+ def textTypeEntity(text: String) =
+ new TypeEntity {
+ val name = text
+ def refEntity: SortedMap[Int, (base.LinkTo, Int)] = SortedMap()
+ }
+
+ // it seems dot chokes on node names over 8000 chars, so let's limit the size of the string
+ // conservatively, we'll limit at 4000, to be sure:
+ def limitSize(str: String) = if (str.length > 4000) str.substring(0, 3996) + " ..." else str
+
+ // avoid overcrowding the diagram:
+ // if there are too many super / sub / implicit nodes, represent
+ // them by on node with a corresponding tooltip
+ superClasses = if (_superClasses.length > settings.docDiagramsMaxNormalClasses.value) {
+ val superClassesTooltip = Some(limitSize(_superClasses.map(_.tpe.name).mkString(", ")))
+ List(NormalNode(textTypeEntity(_superClasses.length + MultiSuffix), None)(superClassesTooltip))
+ } else _superClasses
+
+ subClasses = if (_subClasses.length > settings.docDiagramsMaxNormalClasses.value) {
+ val subClassesTooltip = Some(limitSize(_subClasses.map(_.tpe.name).mkString(", ")))
+ List(NormalNode(textTypeEntity(_subClasses.length + MultiSuffix), None)(subClassesTooltip))
+ } else _subClasses
+
+ incomingImplicits = if (_incomingImplicits.length > settings.docDiagramsMaxImplicitClasses.value) {
+ val incomingImplicitsTooltip = Some(limitSize(_incomingImplicits.map(_.tpe.name).mkString(", ")))
+ List(ImplicitNode(textTypeEntity(_incomingImplicits.length + MultiSuffix), None)(incomingImplicitsTooltip))
+ } else _incomingImplicits
+
+ outgoingImplicits = if (_outgoingImplicits.length > settings.docDiagramsMaxImplicitClasses.value) {
+ val outgoingImplicitsTooltip = Some(limitSize(_outgoingImplicits.map(_.tpe.name).mkString(", ")))
+ List(ImplicitNode(textTypeEntity(_outgoingImplicits.length + MultiSuffix), None)(outgoingImplicitsTooltip))
+ } else _outgoingImplicits
+
+ thisNode = _thisNode
+ nodes = List()
+ edges = (thisNode -> superClasses) :: subClasses.map(_ -> List(thisNode))
+ node2Index = (thisNode::subClasses:::superClasses:::incomingImplicits:::outgoingImplicits).zipWithIndex.toMap
+ isInheritanceDiagram = true
+ incomingImplicitNodes = incomingImplicits
+ case _ =>
+ nodes = d.nodes
+ edges = d.edges
+ node2Index = d.nodes.zipWithIndex.toMap
+ incomingImplicitNodes = List()
+ }
+ index2Node = node2Index map {_.swap}
+
+ val implicitsDot = {
+ if (!isInheritanceDiagram) ""
+ else {
+ // dot cluster containing thisNode
+ val thisCluster = "subgraph clusterThis {\n" +
+ "style=\"invis\"\n" +
+ node2Dot(thisNode) +
+ "}"
+ // dot cluster containing incoming implicit nodes, if any
+ val incomingCluster = {
+ if(incomingImplicits.isEmpty) ""
+ else "subgraph clusterIncoming {\n" +
+ "style=\"invis\"\n" +
+ incomingImplicits.reverse.map(n => node2Dot(n)).mkString +
+ (if (incomingImplicits.size > 1)
+ incomingImplicits.map(n => "node" + node2Index(n)).mkString(" -> ") +
+ " [constraint=\"false\", style=\"invis\", minlen=\"0.0\"];\n"
+ else "") +
+ "}"
+ }
+ // dot cluster containing outgoing implicit nodes, if any
+ val outgoingCluster = {
+ if(outgoingImplicits.isEmpty) ""
+ else "subgraph clusterOutgoing {\n" +
+ "style=\"invis\"\n" +
+ outgoingImplicits.reverse.map(n => node2Dot(n)).mkString +
+ (if (outgoingImplicits.size > 1)
+ outgoingImplicits.map(n => "node" + node2Index(n)).mkString(" -> ") +
+ " [constraint=\"false\", style=\"invis\", minlen=\"0.0\"];\n"
+ else "") +
+ "}"
+ }
+
+ // assemble clusters into another cluster
+ val incomingTooltip = incomingImplicits.map(_.name).mkString(", ") + " can be implicitly converted to " + thisNode.name
+ val outgoingTooltip = thisNode.name + " can be implicitly converted to " + outgoingImplicits.map(_.name).mkString(", ")
+ "subgraph clusterAll {\n" +
+ "style=\"invis\"\n" +
+ outgoingCluster + "\n" +
+ thisCluster + "\n" +
+ incomingCluster + "\n" +
+ // incoming implicit edge
+ (if (!incomingImplicits.isEmpty) {
+ val n = incomingImplicits.last
+ "node" + node2Index(n) +" -> node" + node2Index(thisNode) +
+ " [id=\"" + cssClass(n, thisNode) + "|" + node2Index(n) + "_" + node2Index(thisNode) + "\", tooltip=\"" + incomingTooltip + "\"" +
+ ", constraint=\"false\", minlen=\"2\", ltail=\"clusterIncoming\", lhead=\"clusterThis\", label=\"implicitly\"];\n"
+ } else "") +
+ // outgoing implicit edge
+ (if (!outgoingImplicits.isEmpty) {
+ val n = outgoingImplicits.head
+ "node" + node2Index(thisNode) + " -> node" + node2Index(n) +
+ " [id=\"" + cssClass(thisNode, n) + "|" + node2Index(thisNode) + "_" + node2Index(n) + "\", tooltip=\"" + outgoingTooltip + "\"" +
+ ", constraint=\"false\", minlen=\"2\", ltail=\"clusterThis\", lhead=\"clusterOutgoing\", label=\"implicitly\"];\n"
+ } else "") +
+ "}"
+ }
+ }
+
+ // assemble graph
+ val graph = "digraph G {\n" +
+ // graph / node / edge attributes
+ graphAttributesStr +
+ "node [" + nodeAttributesStr + "];\n" +
+ "edge [" + edgeAttributesStr + "];\n" +
+ implicitsDot + "\n" +
+ // inheritance nodes
+ nodes.map(n => node2Dot(n)).mkString +
+ subClasses.map(n => node2Dot(n)).mkString +
+ superClasses.map(n => node2Dot(n)).mkString +
+ // inheritance edges
+ edges.map{ case (from, tos) => tos.map(to => {
+ val id = "graph" + counter + "_" + node2Index(to) + "_" + node2Index(from)
+ // the X -> Y edge is inverted twice to keep the diagram flowing the right way
+ // that is, an edge from node X to Y will result in a dot instruction nodeY -> nodeX [dir="back"]
+ "node" + node2Index(to) + " -> node" + node2Index(from) +
+ " [id=\"" + cssClass(to, from) + "|" + id + "\", " +
+ "tooltip=\"" + from.name + (if (from.name.endsWith(MultiSuffix)) " are subtypes of " else " is a subtype of ") +
+ to.name + "\", dir=\"back\", arrowtail=\"empty\"];\n"
+ }).mkString}.mkString +
+ "}"
+
+ tDot += System.currentTimeMillis
+ DiagramStats.addDotGenerationTime(tDot)
+
+ graph
+ }
+
+ /**
+ * Generates the dot string of a given node.
+ */
+ private def node2Dot(node: Node) = {
+
+ // escape HTML characters in node names
+ def escape(name: String) = name.replace("&", "&").replace("<", "<").replace(">", ">");
+
+ // assemble node attribues in a map
+ var attr = scala.collection.mutable.Map[String, String]()
+
+ // link
+ node.doctpl match {
+ case Some(tpl) => attr += "URL" -> (page.relativeLinkTo(tpl) + "#inheritance-diagram")
+ case _ =>
+ }
+
+ // tooltip
+ node.tooltip match {
+ case Some(text) => attr += "tooltip" -> text
+ // show full name where available (instead of TraversableOps[A] show scala.collection.parallel.TraversableOps[A])
+ case None if node.tpl.isDefined => attr += "tooltip" -> node.tpl.get.qualifiedName
+ case _ =>
+ }
+
+ // styles
+ if(node.isImplicitNode)
+ attr ++= implicitStyle
+ else if(node.isOutsideNode)
+ attr ++= outsideStyle
+ else if(node.isTraitNode)
+ attr ++= traitStyle
+ else if(node.isClassNode)
+ attr ++= classStyle
+ else if(node.isObjectNode)
+ attr ++= objectStyle
+ else if(node.isTypeNode)
+ attr ++= typeStyle
+ else
+ attr ++= defaultStyle
+
+ // HTML label
+ var name = escape(node.name)
+ var img = ""
+ if(node.isTraitNode)
+ img = "trait_diagram.png"
+ else if(node.isClassNode)
+ img = "class_diagram.png"
+ else if(node.isObjectNode)
+ img = "object_diagram.png"
+ else if(node.isTypeNode)
+ img = "type_diagram.png"
+
+ if(!img.equals("")) {
+ img = "<TD><IMG SCALE=\"TRUE\" SRC=\"" + settings.outdir.value + "/lib/" + img + "\" /></TD>"
+ name = name + " "
+ }
+ val label = "<<TABLE BORDER=\"0\" CELLBORDER=\"0\">" +
+ "<TR>" + img + "<TD VALIGN=\"MIDDLE\">" + name + "</TD></TR>" +
+ "</TABLE>>"
+
+ // dot does not allow to specify a CSS class, therefore
+ // set the id to "{class}|{id}", which will be used in
+ // the transform method
+ val id = "graph" + counter + "_" + node2Index(node)
+ attr += ("id" -> (cssClass(node) + "|" + id))
+
+ // return dot string
+ "node" + node2Index(node) + " [label=" + label + "," + flatten(attr.toMap) + "];\n"
+ }
+
+ /**
+ * Returns the CSS class for an edge connecting node1 and node2.
+ */
+ private def cssClass(node1: Node, node2: Node): String = {
+ if (node1.isImplicitNode && node2.isThisNode)
+ "implicit-incoming"
+ else if (node1.isThisNode && node2.isImplicitNode)
+ "implicit-outgoing"
+ else
+ "inheritance"
+ }
+
+ /**
+ * Returns the CSS class for a node.
+ */
+ private def cssClass(node: Node): String =
+ if (node.isImplicitNode && incomingImplicitNodes.contains(node))
+ "implicit-incoming" + cssBaseClass(node, "", " ")
+ else if (node.isImplicitNode)
+ "implicit-outgoing" + cssBaseClass(node, "", " ")
+ else if (node.isThisNode)
+ "this" + cssBaseClass(node, "", " ")
+ else if (node.isOutsideNode)
+ "outside" + cssBaseClass(node, "", " ")
+ else
+ cssBaseClass(node, "default", "")
+
+ private def cssBaseClass(node: Node, default: String, space: String) =
+ if (node.isClassNode)
+ space + "class"
+ else if (node.isTraitNode)
+ space + "trait"
+ else if (node.isObjectNode)
+ space + "object"
+ else if (node.isTypeNode)
+ space + "type"
+ else
+ default
+
+ /**
+ * Calls dot with a given dot string and returns the SVG output.
+ */
+ private def generateSVG(dotInput: String, template: DocTemplateEntity) = {
+ val dotOutput = DiagramGenerator.getDotRunner.feedToDot(dotInput, template)
+ var tSVG = -System.currentTimeMillis
+
+ val result = if (dotOutput != null) {
+ val src = scala.io.Source.fromString(dotOutput);
+ try {
+ val cpa = scala.xml.parsing.ConstructingParser.fromSource(src, false)
+ val doc = cpa.document()
+ if (doc != null)
+ transform(doc.docElem)
+ else
+ NodeSeq.Empty
+ } catch {
+ case exc: Exception =>
+ if (settings.docDiagramsDebug.value) {
+ settings.printMsg("\n\n**********************************************************************")
+ settings.printMsg("Encountered an error while generating page for " + template.qualifiedName)
+ settings.printMsg(dotInput.toString.split("\n").mkString("\nDot input:\n\t","\n\t",""))
+ settings.printMsg(dotOutput.toString.split("\n").mkString("\nDot output:\n\t","\n\t",""))
+ settings.printMsg(exc.getStackTrace.mkString("\nException: " + exc.toString + ":\n\tat ", "\n\tat ",""))
+ settings.printMsg("\n\n**********************************************************************")
+ } else {
+ settings.printMsg("\nThe diagram for " + template.qualifiedName + " could not be created due to an internal error.")
+ settings.printMsg("Use " + settings.docDiagramsDebug.name + " for more information and please file this as a bug.")
+ }
+ NodeSeq.Empty
+ }
+ } else
+ NodeSeq.Empty
+
+ tSVG += System.currentTimeMillis
+ DiagramStats.addSvgTime(tSVG)
+
+ result
+ }
+
+ /**
+ * Transforms the SVG generated by dot:
+ * - adds a class attribute to the SVG element
+ * - changes the path of the node images from absolute to relative
+ * - assigns id and class attributes to nodes and edges
+ * - removes title elements
+ */
+ private def transform(e:scala.xml.Node): scala.xml.Node = e match {
+ // add an id and class attribute to the SVG element
+ case Elem(prefix, "svg", attribs, scope, child @ _*) => {
+ val klass = if (isInheritanceDiagram) "class-diagram" else "package-diagram"
+ Elem(prefix, "svg", attribs, scope, child map(x => transform(x)) : _*) %
+ new UnprefixedAttribute("id", "graph" + counter, Null) %
+ new UnprefixedAttribute("class", klass, Null)
+ }
+ // change the path of the node images from absolute to relative
+ case img @  => {
+ val href = (img \ "@{http://www.w3.org/1999/xlink}href").toString
+ val file = href.substring(href.lastIndexOf("/") + 1, href.size)
+ img.asInstanceOf[Elem] %
+ new PrefixedAttribute("xlink", "href", pathToLib + file, Null)
+ }
+ // assign id and class attributes to edges and nodes:
+ // the id attribute generated by dot has the format: "{class}|{id}"
+ case g @ Elem(prefix, "g", attribs, scope, children @ _*) if (List("edge", "node").contains((g \ "@class").toString)) => {
+ var res = new Elem(prefix, "g", attribs, scope, (children map(x => transform(x))): _*)
+ val dotId = (g \ "@id").toString
+ if (dotId.count(_ == '|') == 1) {
+ val Array(klass, id) = dotId.toString.split("\\|")
+ /* Sometimes dot "forgets" to add the image -- that's very annoying, but it seems pretty random, and simple
+ * tests like excute 20K times and diff the output don't trigger the bug -- so it's up to us to place the image
+ * back in the node */
+ val kind = getKind(klass)
+ if (kind != "")
+ if (((g \ "a" \ "image").isEmpty)) {
+ DiagramStats.addBrokenImage()
+ val xposition = getPosition(g, "x", -22)
+ val yposition = getPosition(g, "y", -11.3334)
+ if (xposition.isDefined && yposition.isDefined) {
+ val imageNode = <image xmlns:xlink="http://www.w3.org/1999/xlink" xlink:href={ ("./lib/" + kind + "_diagram.png") } width="16px" height="16px" preserveAspectRatio="xMinYMin meet" x={ xposition.get.toString } y={ yposition.get.toString }/>
+ val anchorNode = (g \ "a") match {
+ case Seq(Elem(prefix, "a", attribs, scope, children @ _*)) =>
+ transform(new Elem(prefix, "a", attribs, scope, (children ++ imageNode): _*))
+ case _ =>
+ g \ "a"
+ }
+ res = new Elem(prefix, "g", attribs, scope, anchorNode: _*)
+ DiagramStats.addFixedImage()
+ }
+ }
+ res % new UnprefixedAttribute("id", id, Null) %
+ new UnprefixedAttribute("class", (g \ "@class").toString + " " + klass, Null)
+ }
+ else res
+ }
+ // remove titles
+ case <title>{ _* }</title> =>
+ scala.xml.Text("")
+ // apply recursively
+ case Elem(prefix, label, attribs, scope, child @ _*) =>
+ Elem(prefix, label, attribs, scope, child map(x => transform(x)) : _*)
+ case x => x
+ }
+
+ def getKind(klass: String): String =
+ if (klass.contains("class")) "class"
+ else if (klass.contains("trait")) "trait"
+ else if (klass.contains("object")) "object"
+ else ""
+
+ def getPosition(g: scala.xml.Node, axis: String, offset: Double): Option[Double] = {
+ val node = g \ "a" \ "text" \ ("@" + axis)
+ if (node.isEmpty)
+ None
+ else
+ Some(node.toString.toDouble + offset)
+ }
+
+ /* graph / node / edge attributes */
+
+ private val graphAttributes: Map[String, String] = Map(
+ "compound" -> "true",
+ "rankdir" -> "TB"
+ )
+
+ private val nodeAttributes = Map(
+ "shape" -> "rectangle",
+ "style" -> "filled",
+ "penwidth" -> "1",
+ "margin" -> "0.08,0.01",
+ "width" -> "0.0",
+ "height" -> "0.0",
+ "fontname" -> "Arial",
+ "fontsize" -> "10.00"
+ )
+
+ private val edgeAttributes = Map(
+ "color" -> "#d4d4d4",
+ "arrowsize" -> "0.5",
+ "fontcolor" -> "#aaaaaa",
+ "fontsize" -> "10.00",
+ "fontname" -> "Arial"
+ )
+
+ private val defaultStyle = Map(
+ "color" -> "#ababab",
+ "fillcolor" -> "#e1e1e1",
+ "fontcolor" -> "#7d7d7d",
+ "margin" -> "0.1,0.04"
+ )
+
+ private val implicitStyle = Map(
+ "color" -> "#ababab",
+ "fillcolor" -> "#e1e1e1",
+ "fontcolor" -> "#7d7d7d"
+ )
+
+ private val outsideStyle = Map(
+ "color" -> "#ababab",
+ "fillcolor" -> "#e1e1e1",
+ "fontcolor" -> "#7d7d7d"
+ )
+
+ private val traitStyle = Map(
+ "color" -> "#37657D",
+ "fillcolor" -> "#498AAD",
+ "fontcolor" -> "#ffffff"
+ )
+
+ private val classStyle = Map(
+ "color" -> "#115F3B",
+ "fillcolor" -> "#0A955B",
+ "fontcolor" -> "#ffffff"
+ )
+
+ private val objectStyle = Map(
+ "color" -> "#102966",
+ "fillcolor" -> "#3556a7",
+ "fontcolor" -> "#ffffff"
+ )
+
+ private val typeStyle = Map(
+ "color" -> "#115F3B",
+ "fillcolor" -> "#0A955B",
+ "fontcolor" -> "#ffffff"
+ )
+
+ private def flatten(attributes: Map[String, String]) = attributes.map{ case (key, value) => key + "=\"" + value + "\"" }.mkString(", ")
+
+ private val graphAttributesStr = graphAttributes.map{ case (key, value) => key + "=\"" + value + "\";\n" }.mkString
+ private val nodeAttributesStr = flatten(nodeAttributes)
+ private val edgeAttributesStr = flatten(edgeAttributes)
+}
diff --git a/src/compiler/scala/tools/nsc/doc/html/page/diagram/DotRunner.scala b/src/compiler/scala/tools/nsc/doc/html/page/diagram/DotRunner.scala
new file mode 100644
index 0000000..5cdd5c7
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/doc/html/page/diagram/DotRunner.scala
@@ -0,0 +1,228 @@
+package scala.tools.nsc
+package doc
+package html
+package page
+package diagram
+
+import java.io.InputStream
+import java.io.OutputStream
+import java.io.InputStreamReader
+import java.io.OutputStreamWriter
+import java.io.BufferedWriter
+import java.io.BufferedReader
+import java.io.IOException
+import scala.sys.process._
+import scala.concurrent.SyncVar
+
+import model._
+import model.diagram._
+
+/** This class takes care of running the graphviz dot utility */
+class DotRunner(settings: doc.Settings) {
+
+ private[this] var dotRestarts = 0
+ private[this] var dotProcess: DotProcess = null
+
+ def feedToDot(dotInput: String, template: DocTemplateEntity): String = {
+
+ if (dotProcess == null) {
+ if (dotRestarts < settings.docDiagramsDotRestart.value) {
+ if (dotRestarts != 0)
+ settings.printMsg("Graphviz will be restarted...\n")
+ dotRestarts += 1
+ dotProcess = new DotProcess(settings)
+ } else
+ return null
+ }
+
+ val tStart = System.currentTimeMillis
+ val result = dotProcess.feedToDot(dotInput, template.qualifiedName)
+ val tFinish = System.currentTimeMillis
+ DiagramStats.addDotRunningTime(tFinish - tStart)
+
+ if (result == null) {
+ dotProcess.cleanup()
+ dotProcess = null
+ if (dotRestarts == settings.docDiagramsDotRestart.value) {
+ settings.printMsg("\n")
+ settings.printMsg("**********************************************************************")
+ settings.printMsg("Diagrams will be disabled for this run because the graphviz dot tool")
+ settings.printMsg("has malfunctioned too many times. These scaladoc flags may help:")
+ settings.printMsg("")
+ val baseList = List(settings.docDiagramsDebug,
+ settings.docDiagramsDotPath,
+ settings.docDiagramsDotRestart,
+ settings.docDiagramsDotTimeout)
+ val width = (baseList map (_.helpSyntax.length)).max
+ def helpStr(s: doc.Settings#Setting) = ("%-" + width + "s") format (s.helpSyntax) + " " + s.helpDescription
+ baseList.foreach((sett: doc.Settings#Setting) => settings.printMsg(helpStr(sett)))
+ settings.printMsg("\nPlease note that graphviz package version 2.26 or above is required.")
+ settings.printMsg("**********************************************************************\n\n")
+
+ }
+ }
+
+ result
+ }
+
+ def cleanup() =
+ if (dotProcess != null)
+ dotProcess.cleanup()
+}
+
+class DotProcess(settings: doc.Settings) {
+
+ @volatile var error: Boolean = false // signal an error
+ val inputString = new SyncVar[String] // used for the dot process input
+ val outputString = new SyncVar[String] // used for the dot process output
+ val errorBuffer: StringBuffer = new StringBuffer() // buffer used for both dot process error console AND logging
+
+ // set in only one place, in the main thread
+ var process: Process = null
+ var templateName: String = ""
+ var templateInput: String = ""
+
+ def feedToDot(input: String, template: String): String = {
+
+ templateName = template
+ templateInput = input
+
+ try {
+
+ // process creation
+ if (process == null) {
+ val procIO = new ProcessIO(inputFn(_), outputFn(_), errorFn(_))
+ val processBuilder: ProcessBuilder = Seq(settings.docDiagramsDotPath.value, "-Tsvg")
+ process = processBuilder.run(procIO)
+ }
+
+ // pass the input and wait for the output
+ assert(!inputString.isSet)
+ assert(!outputString.isSet)
+ inputString.put(input)
+ var result = outputString.take(settings.docDiagramsDotTimeout.value * 1000)
+ if (error) result = null
+
+ result
+
+ } catch {
+ case exc: Throwable =>
+ errorBuffer.append(" Main thread in " + templateName + ": " +
+ (if (exc.isInstanceOf[NoSuchElementException]) "Timeout" else "Exception: " + exc))
+ error = true
+ return null
+ }
+ }
+
+ def cleanup(): Unit = {
+
+ // we'll need to know if there was any error for reporting
+ val _error = error
+
+ if (process != null) {
+ // if there's no error, this should exit cleanly
+ if (!error) feedToDot("<finish>", "<finishing>")
+
+ // just in case there's any thread hanging, this will take it out of the loop
+ error = true
+ process.destroy()
+ // we'll need to unblock the input again
+ if (!inputString.isSet) inputString.put("")
+ if (outputString.isSet) outputString.take()
+ }
+
+ if (_error) {
+ if (settings.docDiagramsDebug.value) {
+ settings.printMsg("\n**********************************************************************")
+ settings.printMsg("The graphviz dot diagram tool has malfunctioned and will be restarted.")
+ settings.printMsg("\nThe following is the log of the failure:")
+ settings.printMsg(errorBuffer.toString)
+ settings.printMsg(" Cleanup: Last template: " + templateName)
+ settings.printMsg(" Cleanup: Last dot input: \n " + templateInput.replaceAll("\n","\n ") + "\n")
+ settings.printMsg(" Cleanup: Dot path: " + settings.docDiagramsDotPath.value)
+ if (process != null)
+ settings.printMsg(" Cleanup: Dot exit code: " + process.exitValue)
+ settings.printMsg("**********************************************************************")
+ } else {
+ // we shouldn't just sit there for 50s not reporting anything, no?
+ settings.printMsg("Graphviz dot encountered an error when generating the diagram for:")
+ settings.printMsg(templateName)
+ settings.printMsg("These are usually spurious errors, but if you notice a persistant error on")
+ settings.printMsg("a diagram, please use the " + settings.docDiagramsDebug.name + " flag and report a bug with the output.")
+ }
+ }
+ }
+
+ /* The standard input passing function */
+ private[this] def inputFn(stdin: OutputStream): Unit = {
+ val writer = new BufferedWriter(new OutputStreamWriter(stdin))
+ try {
+ var input = inputString.take()
+
+ while (!error) {
+ if (input == "<finish>") {
+ // empty => signal to finish
+ stdin.close()
+ return
+ } else {
+ // send output to dot
+ writer.write(input + "\n\n")
+ writer.flush()
+ }
+
+ if (!error) input = inputString.take()
+ }
+ stdin.close()
+ } catch {
+ case exc: Throwable =>
+ error = true
+ stdin.close()
+ errorBuffer.append(" Input thread in " + templateName + ": Exception: " + exc + "\n")
+ }
+ }
+
+ private[this] def outputFn(stdOut: InputStream): Unit = {
+ val reader = new BufferedReader(new InputStreamReader(stdOut))
+ var buffer: StringBuilder = new StringBuilder()
+ try {
+ var line = reader.readLine
+ while (!error && line != null) {
+ buffer.append(line + "\n")
+ // signal the last element in the svg (only for output)
+ if (line == "</svg>") {
+ outputString.put(buffer.toString)
+ buffer.setLength(0)
+ }
+ if (error) { stdOut.close(); return }
+ line = reader.readLine
+ }
+ assert(!outputString.isSet)
+ outputString.put(buffer.toString)
+ stdOut.close()
+ } catch {
+ case exc: Throwable =>
+ error = true
+ stdOut.close()
+ errorBuffer.append(" Output thread in " + templateName + ": Exception: " + exc + "\n")
+ }
+ }
+
+ private[this] def errorFn(stdErr: InputStream): Unit = {
+ val reader = new BufferedReader(new InputStreamReader(stdErr))
+ var buffer: StringBuilder = new StringBuilder()
+ try {
+ var line = reader.readLine
+ while (line != null) {
+ errorBuffer.append(" DOT <error console>: " + line + "\n")
+ error = true
+ line = reader.readLine
+ }
+ stdErr.close()
+ } catch {
+ case exc: Throwable =>
+ error = true
+ stdErr.close()
+ errorBuffer.append(" Error thread in " + templateName + ": Exception: " + exc + "\n")
+ }
+ }
+}
\ No newline at end of file
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/class_diagram.png b/src/compiler/scala/tools/nsc/doc/html/resource/lib/class_diagram.png
new file mode 100644
index 0000000..9d7aec7
Binary files /dev/null and b/src/compiler/scala/tools/nsc/doc/html/resource/lib/class_diagram.png differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/conversionbg.gif b/src/compiler/scala/tools/nsc/doc/html/resource/lib/conversionbg.gif
new file mode 100644
index 0000000..4be145d
Binary files /dev/null and b/src/compiler/scala/tools/nsc/doc/html/resource/lib/conversionbg.gif differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/diagrams.css b/src/compiler/scala/tools/nsc/doc/html/resource/lib/diagrams.css
new file mode 100644
index 0000000..5fe33f7
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/doc/html/resource/lib/diagrams.css
@@ -0,0 +1,143 @@
+.diagram-container
+{
+ display: none;
+}
+
+.diagram
+{
+ overflow: hidden;
+ padding-top:15px;
+}
+
+.diagram svg
+{
+ display: block;
+ position: absolute;
+ visibility: hidden;
+ margin: auto;
+}
+
+.diagram-help
+{
+ float:right;
+ display:none;
+}
+
+.magnifying
+{
+ cursor: -webkit-zoom-in ! important;
+ cursor: -moz-zoom-in ! important;
+ cursor: pointer;
+}
+
+#close-link
+{
+ position: absolute;
+ z-index: 100;
+ font-family: Arial, sans-serif;
+ font-size: 10pt;
+ text-decoration: underline;
+ color: #315479;
+}
+
+#close:hover
+{
+ text-decoration: none;
+}
+
+svg a
+{
+ cursor:pointer;
+}
+
+svg text
+{
+ font-size: 10px;
+}
+
+/* try to move the node text 1px in order to be vertically
+ centered (does not work in all browsers) */
+svg .node text
+{
+ transform: translate(0px,1px);
+ -ms-transform: translate(0px,1px);
+ -webkit-transform: translate(0px,1px);
+ -o-transform: translate(0px,1px);
+ -moz-transform: translate(0px,1px);
+}
+
+/* hover effect for edges */
+
+svg .edge.over text,
+svg .edge.implicit-incoming.over polygon,
+svg .edge.implicit-outgoing.over polygon
+{
+ fill: #202020;
+}
+
+svg .edge.over path,
+svg .edge.over polygon
+{
+ stroke: #202020;
+}
+
+/* hover effect for nodes in class diagrams */
+
+svg.class-diagram .node
+{
+ opacity: 0.75;
+}
+
+svg.class-diagram .node.this
+{
+ opacity: 1.0;
+}
+
+svg.class-diagram .node.over
+{
+ opacity: 1.0;
+}
+
+svg .node.over polygon
+{
+ stroke: #202020;
+}
+
+/* hover effect for nodes in package diagrams */
+
+svg.package-diagram .node.class.over polygon,
+svg.class-diagram .node.this.class.over polygon
+{
+ fill: #098552;
+ fill: #04663e;
+}
+
+svg.package-diagram .node.trait.over polygon,
+svg.class-diagram .node.this.trait.over polygon
+{
+ fill: #3c7b9b;
+ fill: #235d7b;
+}
+
+svg.package-diagram .node.type.over polygon,
+svg.class-diagram .node.this.type.over polygon
+{
+ fill: #098552;
+ fill: #04663e;
+}
+
+
+svg.package-diagram .node.object.over polygon
+{
+ fill: #183377;
+}
+
+svg.package-diagram .node.outside.over polygon
+{
+ fill: #d4d4d4;
+}
+
+svg.package-diagram .node.default.over polygon
+{
+ fill: #d4d4d4;
+}
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/diagrams.js b/src/compiler/scala/tools/nsc/doc/html/resource/lib/diagrams.js
new file mode 100644
index 0000000..478f2e3
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/doc/html/resource/lib/diagrams.js
@@ -0,0 +1,324 @@
+/**
+ * JavaScript functions enhancing the SVG diagrams.
+ *
+ * @author Damien Obrist
+ */
+
+var diagrams = {};
+
+/**
+ * Initializes the diagrams in the main window.
+ */
+$(document).ready(function()
+{
+ // hide diagrams in browsers not supporting SVG
+ if(Modernizr && !Modernizr.inlinesvg)
+ return;
+
+ // only execute this in the main window
+ if(diagrams.isPopup)
+ return;
+
+ if($("#content-diagram").length)
+ $("#inheritance-diagram").css("padding-bottom", "20px");
+
+ $(".diagram-container").css("display", "block");
+
+ $(".diagram").each(function() {
+ // store inital dimensions
+ $(this).data("width", $("svg", $(this)).width());
+ $(this).data("height", $("svg", $(this)).height());
+ // store unscaled clone of SVG element
+ $(this).data("svg", $(this).get(0).childNodes[0].cloneNode(true));
+ });
+
+ // make diagram visible, hide container
+ $(".diagram").css("display", "none");
+ $(".diagram svg").css({
+ "position": "static",
+ "visibility": "visible",
+ "z-index": "auto"
+ });
+
+ // enable linking to diagrams
+ if($(location).attr("hash") == "#inheritance-diagram") {
+ diagrams.toggle($("#inheritance-diagram-container"), true);
+ } else if($(location).attr("hash") == "#content-diagram") {
+ diagrams.toggle($("#content-diagram-container"), true);
+ }
+
+ $(".diagram-link").click(function() {
+ diagrams.toggle($(this).parent());
+ });
+
+ // register resize function
+ $(window).resize(diagrams.resize);
+
+ // don't bubble event to parent div
+ // when clicking on a node of a resized
+ // diagram
+ $("svg a").click(function(e) {
+ e.stopPropagation();
+ });
+
+ diagrams.initHighlighting();
+});
+
+/**
+ * Initializes the diagrams in the popup.
+ */
+diagrams.initPopup = function(id)
+{
+ // copy diagram from main window
+ if(!jQuery.browser.msie)
+ $("body").append(opener.$("#" + id).data("svg"));
+
+ // positioning
+ $("svg").css("position", "absolute");
+ $(window).resize(function()
+ {
+ var svg_w = $("svg").css("width").replace("px", "");
+ var svg_h = $("svg").css("height").replace("px", "");
+ var x = $(window).width() / 2 - svg_w / 2;
+ if(x < 0) x = 0;
+ var y = $(window).height() / 2 - svg_h / 2;
+ if(y < 0) y = 0;
+ $("svg").css("left", x + "px");
+ $("svg").css("top", y + "px");
+ });
+ $(window).resize();
+
+ diagrams.initHighlighting();
+ $("svg a").click(function(e) {
+ opener.diagrams.redirectFromPopup(this.href.baseVal);
+ window.close();
+ });
+ $(document).keyup(function(e) {
+ if (e.keyCode == 27) window.close();
+ });
+}
+
+/**
+ * Initializes highlighting for nodes and edges.
+ */
+diagrams.initHighlighting = function()
+{
+ // helper function since $.hover doesn't work in IE
+
+ function hover(elements, fn)
+ {
+ elements.mouseover(fn);
+ elements.mouseout(fn);
+ }
+
+ // inheritance edges
+
+ hover($("svg .edge.inheritance"), function(evt){
+ var toggleClass = evt.type == "mouseout" ? diagrams.removeClass : diagrams.addClass;
+ var parts = $(this).attr("id").split("_");
+ toggleClass($("#" + parts[0] + "_" + parts[1]));
+ toggleClass($("#" + parts[0] + "_" + parts[2]));
+ toggleClass($(this));
+ });
+
+ // nodes
+
+ hover($("svg .node"), function(evt){
+ var toggleClass = evt.type == "mouseout" ? diagrams.removeClass : diagrams.addClass;
+ toggleClass($(this));
+ var parts = $(this).attr("id").split("_");
+ var index = parts[1];
+ $("svg#" + parts[0] + " .edge.inheritance").each(function(){
+ var parts2 = $(this).attr("id").split("_");
+ if(parts2[1] == index)
+ {
+ toggleClass($("#" + parts2[0] + "_" + parts2[2]));
+ toggleClass($(this));
+ } else if(parts2[2] == index)
+ {
+ toggleClass($("#" + parts2[0] + "_" + parts2[1]));
+ toggleClass($(this));
+ }
+ });
+ });
+
+ // incoming implicits
+
+ hover($("svg .node.implicit-incoming"), function(evt){
+ var toggleClass = evt.type == "mouseout" ? diagrams.removeClass : diagrams.addClass;
+ toggleClass($(this));
+ toggleClass($("svg .edge.implicit-incoming"));
+ toggleClass($("svg .node.this"));
+ });
+
+ hover($("svg .edge.implicit-incoming"), function(evt){
+ var toggleClass = evt.type == "mouseout" ? diagrams.removeClass : diagrams.addClass;
+ toggleClass($(this));
+ toggleClass($("svg .node.this"));
+ $("svg .node.implicit-incoming").each(function(){
+ toggleClass($(this));
+ });
+ });
+
+ // implicit outgoing nodes
+
+ hover($("svg .node.implicit-outgoing"), function(evt){
+ var toggleClass = evt.type == "mouseout" ? diagrams.removeClass : diagrams.addClass;
+ toggleClass($(this));
+ toggleClass($("svg .edge.implicit-outgoing"));
+ toggleClass($("svg .node.this"));
+ });
+
+ hover($("svg .edge.implicit-outgoing"), function(evt){
+ var toggleClass = evt.type == "mouseout" ? diagrams.removeClass : diagrams.addClass;
+ toggleClass($(this));
+ toggleClass($("svg .node.this"));
+ $("svg .node.implicit-outgoing").each(function(){
+ toggleClass($(this));
+ });
+ });
+};
+
+/**
+ * Resizes the diagrams according to the available width.
+ */
+diagrams.resize = function()
+{
+ // available width
+ var availableWidth = $("body").width() - 20;
+
+ $(".diagram-container").each(function() {
+ // unregister click event on whole div
+ $(".diagram", this).unbind("click");
+ var diagramWidth = $(".diagram", this).data("width");
+ var diagramHeight = $(".diagram", this).data("height");
+
+ if(diagramWidth > availableWidth)
+ {
+ // resize diagram
+ var height = diagramHeight / diagramWidth * availableWidth;
+ $(".diagram svg", this).width(availableWidth);
+ $(".diagram svg", this).height(height);
+
+ // register click event on whole div
+ $(".diagram", this).click(function() {
+ diagrams.popup($(this));
+ });
+ $(".diagram", this).addClass("magnifying");
+ }
+ else
+ {
+ // restore full size of diagram
+ $(".diagram svg", this).width(diagramWidth);
+ $(".diagram svg", this).height(diagramHeight);
+ // don't show custom cursor any more
+ $(".diagram", this).removeClass("magnifying");
+ }
+ });
+};
+
+/**
+ * Shows or hides a diagram depending on its current state.
+ */
+diagrams.toggle = function(container, dontAnimate)
+{
+ // change class of link
+ $(".diagram-link", container).toggleClass("open");
+ // get element to show / hide
+ var div = $(".diagram", container);
+ if (div.is(':visible'))
+ {
+ $(".diagram-help", container).hide();
+ div.unbind("click");
+ div.removeClass("magnifying");
+ div.slideUp(100);
+ }
+ else
+ {
+ diagrams.resize();
+ if(dontAnimate)
+ div.show();
+ else
+ div.slideDown(100);
+ $(".diagram-help", container).show();
+ }
+};
+
+/**
+ * Opens a popup containing a copy of a diagram.
+ */
+diagrams.windows = {};
+diagrams.popup = function(diagram)
+{
+ var id = diagram.attr("id");
+ if(!diagrams.windows[id] || diagrams.windows[id].closed) {
+ var title = $(".symbol .name", $("#signature")).text();
+ // cloning from parent window to popup somehow doesn't work in IE
+ // therefore include the SVG as a string into the HTML
+ var svgIE = jQuery.browser.msie ? $("<div />").append(diagram.data("svg")).html() : "";
+ var html = '' +
+ '<?xml version="1.0" encoding="UTF-8"?>\n' +
+ '<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.1//EN" "http://www.w3.org/TR/xhtml11/DTD/xhtml11.dtd">\n' +
+ '<html>\n' +
+ ' <head>\n' +
+ ' <title>' + title + '</title>\n' +
+ ' <link href="' + $("#diagrams-css").attr("href") + '" media="screen" type="text/css" rel="stylesheet" />\n' +
+ ' <script type="text/javascript" src="' + $("#jquery-js").attr("src") + '"></script>\n' +
+ ' <script type="text/javascript" src="' + $("#diagrams-js").attr("src") + '"></script>\n' +
+ ' <script type="text/javascript">\n' +
+ ' diagrams.isPopup = true;\n' +
+ ' </script>\n' +
+ ' </head>\n' +
+ ' <body onload="diagrams.initPopup(\'' + id + '\');">\n' +
+ ' <a href="#" onclick="window.close();" id="close-link">Close this window</a>\n' +
+ ' ' + svgIE + '\n' +
+ ' </body>\n' +
+ '</html>';
+
+ var padding = 30;
+ var screenHeight = screen.availHeight;
+ var screenWidth = screen.availWidth;
+ var w = Math.min(screenWidth, diagram.data("width") + 2 * padding);
+ var h = Math.min(screenHeight, diagram.data("height") + 2 * padding);
+ var left = (screenWidth - w) / 2;
+ var top = (screenHeight - h) / 2;
+ var parameters = "height=" + h + ", width=" + w + ", left=" + left + ", top=" + top + ", scrollbars=yes, location=no, resizable=yes";
+ var win = window.open("about:blank", "_blank", parameters);
+ win.document.open();
+ win.document.write(html);
+ win.document.close();
+ diagrams.windows[id] = win;
+ }
+ win.focus();
+};
+
+/**
+ * This method is called from within the popup when a node is clicked.
+ */
+diagrams.redirectFromPopup = function(url)
+{
+ window.location = url;
+};
+
+/**
+ * Helper method that adds a class to a SVG element.
+ */
+diagrams.addClass = function(svgElem, newClass) {
+ newClass = newClass || "over";
+ var classes = svgElem.attr("class");
+ if ($.inArray(newClass, classes.split(/\s+/)) == -1) {
+ classes += (classes ? ' ' : '') + newClass;
+ svgElem.attr("class", classes);
+ }
+};
+
+/**
+ * Helper method that removes a class from a SVG element.
+ */
+diagrams.removeClass = function(svgElem, oldClass) {
+ oldClass = oldClass || "over";
+ var classes = svgElem.attr("class");
+ classes = $.grep(classes.split(/\s+/), function(n, i) { return n != oldClass; }).join(' ');
+ svgElem.attr("class", classes);
+};
+
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/index.css b/src/compiler/scala/tools/nsc/doc/html/resource/lib/index.css
index c6136c5..55fb370 100644
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/index.css
+++ b/src/compiler/scala/tools/nsc/doc/html/resource/lib/index.css
@@ -1,23 +1,32 @@
* {
- color: inherit;
- font-size: 10pt;
- text-decoration: none;
+ color: inherit;
+ font-size: 10pt;
+ text-decoration: none;
font-family: Arial, sans-serif;
- border-width: 0px;
- padding: 0px;
- margin: 0px;
+ border-width: 0px;
+ padding: 0px;
+ margin: 0px;
}
a {
- cursor: pointer;
+ cursor: pointer;
}
a:hover {
- text-decoration: underline;
+ text-decoration: underline;
}
h1 {
- display: none;
+ display: none;
+}
+
+.selected {
+ -moz-box-shadow: inset 0px 5px 10px rgba(58, 88, 97, .36);
+ -webkit-box-shadow: inset 0px 5px 10px rgba(58, 88, 97, .36);
+ border-top: solid 1px rgba(119, 138, 153, 0.8);
+ border-bottom: solid 1px rgba(151, 173, 191, 0.4);
+ background-color: #ced2d9;
+ margin: -1px 0px;
}
/*.letters {
@@ -33,81 +42,81 @@ h1 {
}
#browser {
- top: 0px;
- left: 0px;
- bottom: 0px;
- width: 100%;
- display: block;
- position: fixed;
+ top: 0px;
+ left: 0px;
+ bottom: 0px;
+ width: 100%;
+ display: block;
+ position: fixed;
}
#filter {
- position: absolute;
- display: block;
-/* padding: 5px;*/
- right: 0;
- left: 0;
- top: 0;
- background-image:url('filterbg.gif');
- background-repeat:repeat-x;
- background-color: #ededee; /* light gray */
- /*background-color: #DADADA;*/
- border:1px solid #bbbbbb;
- border-top:0;
- border-left:0;
- border-right:0;
+ position: absolute;
+ display: block;
+/* padding: 5px;*/
+ right: 0;
+ left: 0;
+ top: 0;
+ background-image:url('filterbg.gif');
+ background-repeat:repeat-x;
+ background-color: #ededee; /* light gray */
+ /*background-color: #DADADA;*/
+ border:1px solid #bbbbbb;
+ border-top:0;
+ border-left:0;
+ border-right:0;
}
#textfilter {
- position: relative;
- display: block;
- height: 20px;
- margin-top: 5px;
- margin-bottom: 5px;
+ position: relative;
+ display: block;
+ height: 20px;
+ margin-top: 5px;
+ margin-bottom: 5px;
}
#textfilter > .pre {
- display: block;
- position: absolute;
- top: 0;
- left: 0;
- height: 23px;
- width: 21px;
- background: url("filter_box_left.png");
+ display: block;
+ position: absolute;
+ top: 0;
+ left: 0;
+ height: 23px;
+ width: 21px;
+ background: url("filter_box_left.png");
}
#textfilter > .input {
- display: block;
- position: absolute;
- top: 0;
- right: 20px;
- left: 20px;
+ display: block;
+ position: absolute;
+ top: 0;
+ right: 20px;
+ left: 20px;
}
#textfilter > .input > input {
- height: 20px;
- padding: 1px;
- font-weight: bold;
- color: #000000;
- background: #ffffff url("filterboxbarbg.png") repeat-x bottom left;
- width: 100%;
+ height: 20px;
+ padding: 1px;
+ font-weight: bold;
+ color: #000000;
+ background: #ffffff url("filterboxbarbg.png") repeat-x bottom left;
+ width: 100%;
}
#textfilter > .post {
- display: block;
- position: absolute;
- top: 0;
- right: 0;
- height: 23px;
- width: 21px;
- background: url("filter_box_right.png");
+ display: block;
+ position: absolute;
+ top: 0;
+ right: 0;
+ height: 23px;
+ width: 21px;
+ background: url("filter_box_right.png");
}
/*#textfilter {
- position: relative;
- display: block;
+ position: relative;
+ display: block;
height: 20px;
- margin-bottom: 5px;
+ margin-bottom: 5px;
}
#textfilter > .pre {
@@ -121,7 +130,7 @@ h1 {
}
#textfilter > .input {
- display: block;
+ display: block;
position: absolute;
top: 0;
right: 20px;
@@ -129,11 +138,11 @@ h1 {
}
#textfilter > .input > input {
- height: 16px;
- padding: 2px;
- font-weight: bold;
- color: darkblue;
- background-color: white;
+ height: 16px;
+ padding: 2px;
+ font-weight: bold;
+ color: darkblue;
+ background-color: white;
width: 100%;
}
@@ -148,22 +157,22 @@ h1 {
}*/
#focusfilter {
- position: relative;
- text-align: center;
- display: block;
- padding: 5px;
- background-color: #fffebd; /* light yellow*/
- text-shadow: #ffffff 0 1px 0;
+ position: relative;
+ text-align: center;
+ display: block;
+ padding: 5px;
+ background-color: #fffebd; /* light yellow*/
+ text-shadow: #ffffff 0 1px 0;
}
#focusfilter .focuscoll {
- font-weight: bold;
- text-shadow: #ffffff 0 1px 0;
+ font-weight: bold;
+ text-shadow: #ffffff 0 1px 0;
}
#focusfilter img {
- bottom: -2px;
- position: relative;
+ bottom: -2px;
+ position: relative;
}
#kindfilter {
@@ -182,10 +191,9 @@ h1 {
}
#kindfilter > a:hover {
- color: #4C4C4C;
- text-decoration: none;
- text-shadow: #ffffff 0 1px 0;
-
+ color: #4C4C4C;
+ text-decoration: none;
+ text-shadow: #ffffff 0 1px 0;
}
#letters {
@@ -198,7 +206,7 @@ h1 {
border-right:0;
}
-#letters > a {
+#letters > a, #letters > span {
/* font-family: monospace;*/
color: #858484;
font-weight: bold;
@@ -206,119 +214,123 @@ h1 {
text-shadow: #ffffff 0 1px 0;
padding-right: 2px;
}
+
+#letters > span {
+ color: #bbb;
+}
#tpl {
- display: block;
- position: fixed;
- overflow: auto;
- right: 0;
- left: 0;
- bottom: 0;
- top: 5px;
- position: absolute;
- display: block;
+ display: block;
+ position: fixed;
+ overflow: auto;
+ right: 0;
+ left: 0;
+ bottom: 0;
+ top: 5px;
+ position: absolute;
+ display: block;
}
#tpl .packhide {
- display: block;
- float: right;
- font-weight: normal;
- color: white;
+ display: block;
+ float: right;
+ font-weight: normal;
+ color: white;
}
#tpl .packfocus {
- display: block;
- float: right;
- font-weight: normal;
- color: white;
+ display: block;
+ float: right;
+ font-weight: normal;
+ color: white;
}
#tpl .packages > ol {
- background-color: #dadfe6;
- /*margin-bottom: 5px;*/
+ background-color: #dadfe6;
+ /*margin-bottom: 5px;*/
}
/*#tpl .packages > ol > li {
- margin-bottom: 1px;
+ margin-bottom: 1px;
}*/
#tpl .packages > li > a {
- padding: 0px 5px;
+ padding: 0px 5px;
}
#tpl .packages > li > a.tplshow {
- display: block;
- color: white;
- font-weight: bold;
- display: block;
- text-shadow: #000000 0 1px 0;
+ display: block;
+ color: white;
+ font-weight: bold;
+ display: block;
+ text-shadow: #000000 0 1px 0;
}
#tpl ol > li.pack {
- padding: 3px 5px;
- background: url("packagesbg.gif");
- background-repeat:repeat-x;
- min-height: 14px;
- background-color: #6e808e;
+ padding: 3px 5px;
+ background: url("packagesbg.gif");
+ background-repeat:repeat-x;
+ min-height: 14px;
+ background-color: #6e808e;
}
#tpl ol > li {
- display: block;
+ display: block;
}
#tpl .templates > li {
- padding-left: 5px;
- min-height: 18px;
+ padding-left: 5px;
+ min-height: 18px;
}
#tpl ol > li .icon {
- padding-right: 5px;
- bottom: -2px;
- position: relative;
+ padding-right: 5px;
+ bottom: -2px;
+ position: relative;
}
#tpl .templates div.placeholder {
- padding-right: 5px;
- width: 13px;
- display: inline-block;
+ padding-right: 5px;
+ width: 13px;
+ display: inline-block;
}
#tpl .templates span.tplLink {
- padding-left: 5px;
+ padding-left: 5px;
}
#content {
- border-left-width: 1px;
- border-left-color: black;
- border-left-style: white;
- right: 0px;
- left: 0px;
- bottom: 0px;
- top: 0px;
- position: fixed;
- margin-left: 300px;
- display: block;
+ border-left-width: 1px;
+ border-left-color: black;
+ border-left-style: white;
+ right: 0px;
+ left: 0px;
+ bottom: 0px;
+ top: 0px;
+ position: fixed;
+ margin-left: 300px;
+ display: block;
}
#content > iframe {
- display: block;
- height: 100%;
- width: 100%;
+ display: block;
+ height: 100%;
+ width: 100%;
}
.ui-layout-pane {
- background: #FFF;
- overflow: auto;
+ background: #FFF;
+ overflow: auto;
}
.ui-layout-resizer {
- background-image:url('filterbg.gif');
- background-repeat:repeat-x;
- background-color: #ededee; /* light gray */
- border:1px solid #bbbbbb;
- border-top:0;
- border-bottom:0;
- border-left: 0;
+ background-image:url('filterbg.gif');
+ background-repeat:repeat-x;
+ background-color: #ededee; /* light gray */
+ border:1px solid #bbbbbb;
+ border-top:0;
+ border-bottom:0;
+ border-left: 0;
}
.ui-layout-toggler {
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/index.js b/src/compiler/scala/tools/nsc/doc/html/resource/lib/index.js
index e9ed718..96689ae 100644
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/index.js
+++ b/src/compiler/scala/tools/nsc/doc/html/resource/lib/index.js
@@ -14,16 +14,19 @@ var title = $(document).attr('title');
var lastHash = "";
$(document).ready(function() {
- $('body').layout({ west__size: '20%' });
- $('#browser').layout({
- center__paneSelector: ".ui-west-center"
+ $('body').layout({
+ west__size: '20%',
+ center__maskContents: true
+ });
+ $('#browser').layout({
+ center__paneSelector: ".ui-west-center"
//,center__initClosed:true
- ,north__paneSelector: ".ui-west-north"
- });
+ ,north__paneSelector: ".ui-west-north"
+ });
$('iframe').bind("load", function(){
var subtitle = $(this).contents().find('title').text();
$(document).attr('title', (title ? title + " - " : "") + subtitle);
-
+
setUrlFragmentFromFrameSrc();
});
@@ -81,16 +84,16 @@ function setUrlFragmentFromFrameSrc() {
var commonLength = location.pathname.lastIndexOf("/");
var frameLocation = frames["template"].location;
var relativePath = frameLocation.pathname.slice(commonLength + 1);
-
+
if(!relativePath || frameLocation.pathname.indexOf("/") < 0)
return;
-
+
// Add #, remove ".html" and replace "/" with "."
fragment = "#" + relativePath.replace(/\.html$/, "").replace(/\//g, ".");
-
+
// Add the frame's hash after an @
if(frameLocation.hash) fragment += ("@" + frameLocation.hash.slice(1));
-
+
// Use replace to not add history items
lastFragment = fragment;
location.replace(fragment);
@@ -109,7 +112,7 @@ var Index = {};
if (type == 'object') {
href = t['object'];
} else {
- href = t['class'] || t['trait'] || t['case class'];
+ href = t['class'] || t['trait'] || t['case class'] || t['type'];
}
return [
'<a class="tplshow" target="template" href="',
@@ -142,10 +145,10 @@ var Index = {};
inner += openLink(template, 'object');
}
- if (template['class'] || template['trait'] || template['case class']) {
+ if (template['class'] || template['trait'] || template['case class'] || template['type']) {
inner += (inner == '') ?
'<div class="placeholder" />' : '</a>';
- inner += openLink(template, template['trait'] ? 'trait' : 'class');
+ inner += openLink(template, template['trait'] ? 'trait' : template['type'] ? 'type' : 'class');
} else {
inner += '<div class="placeholder"/>';
}
@@ -245,6 +248,7 @@ function configureEntityList() {
function prepareEntityList() {
var classIcon = $("#library > img.class");
var traitIcon = $("#library > img.trait");
+ var typeIcon = $("#library > img.type");
var objectIcon = $("#library > img.object");
var packageIcon = $("#library > img.package");
@@ -252,6 +256,7 @@ function prepareEntityList() {
$('#tpl li.pack').each(function () {
$("span.class", this).each(function() { $(this).replaceWith(classIcon.clone()); });
$("span.trait", this).each(function() { $(this).replaceWith(traitIcon.clone()); });
+ $("span.type", this).each(function() { $(this).replaceWith(typeIcon.clone()); });
$("span.object", this).each(function() { $(this).replaceWith(objectIcon.clone()); });
$("span.package", this).each(function() { $(this).replaceWith(packageIcon.clone()); });
});
@@ -260,17 +265,96 @@ function prepareEntityList() {
.prepend("<a class='packfocus'>focus</a>");
}
+/* Handles all key presses while scrolling around with keyboard shortcuts in left panel */
+function keyboardScrolldownLeftPane() {
+ scheduler.add("init", function() {
+ $("#textfilter input").blur();
+ var $items = $("#tpl li");
+ $items.first().addClass('selected');
+
+ $(window).bind("keydown", function(e) {
+ var $old = $items.filter('.selected'),
+ $new;
+
+ switch ( e.keyCode ) {
+
+ case 9: // tab
+ $old.removeClass('selected');
+ break;
+
+ case 13: // enter
+ $old.removeClass('selected');
+ var $url = $old.children().filter('a:last').attr('href');
+ $("#template").attr("src",$url);
+ break;
+
+ case 27: // escape
+ $old.removeClass('selected');
+ $(window).unbind(e);
+ $("#textfilter input").focus();
+
+ break;
+
+ case 38: // up
+ $new = $old.prev();
+
+ if (!$new.length) {
+ $new = $old.parent().prev();
+ }
+
+ if ($new.is('ol') && $new.children(':last').is('ol')) {
+ $new = $new.children().children(':last');
+ } else if ($new.is('ol')) {
+ $new = $new.children(':last');
+ }
+
+ break;
+
+ case 40: // down
+ $new = $old.next();
+ if (!$new.length) {
+ $new = $old.parent().parent().next();
+ }
+ if ($new.is('ol')) {
+ $new = $new.children(':first');
+ }
+ break;
+ }
+
+ if ($new.is('li')) {
+ $old.removeClass('selected');
+ $new.addClass('selected');
+ } else if (e.keyCode == 38) {
+ $(window).unbind(e);
+ $("#textfilter input").focus();
+ }
+ });
+ });
+}
+
/* Configures the text filter */
function configureTextFilter() {
scheduler.add("init", function() {
- $("#filter").append("<div id='textfilter'><span class='pre'/><span class='input'><input type='text' accesskey='/'/></span><span class='post'/></div>");
- printAlphabet();
+ $("#textfilter").append("<span class='pre'/><span class='input'><input id='index-input' type='text' accesskey='/'/></span><span class='post'/>");
var input = $("#textfilter input");
resizeFilterBlock();
- input.bind("keyup", function(event) {
+ input.bind('keyup', function(event) {
if (event.keyCode == 27) { // escape
input.attr("value", "");
}
+ if (event.keyCode == 40) { // down arrow
+ $(window).unbind("keydown");
+ keyboardScrolldownLeftPane();
+ return false;
+ }
+ textFilter();
+ });
+ input.bind('keydown', function(event) {
+ if (event.keyCode == 9) { // tab
+ $("#template").contents().find("#mbrsel-input").focus();
+ input.attr("value", "");
+ return false;
+ }
textFilter();
});
input.focus(function(event) { input.select(); });
@@ -342,7 +426,7 @@ function textFilter() {
});
configureHideFilter();
};
-
+
scheduler.add('filter', searchLoop);
}
@@ -450,19 +534,3 @@ function kindFilterSync() {
function resizeFilterBlock() {
$("#tpl").css("top", $("#filter").outerHeight(true));
}
-
-function printAlphabet() {
- var html = '<a target="template" href="index/index-_.html">#</a>';
- var c;
- for (c = 'a'; c < 'z'; c = String.fromCharCode(c.charCodeAt(0) + 1)) {
- html += [
- '<a target="template" href="index/index-',
- c,
- '.html">',
- c.toUpperCase(),
- '</a>'
- ].join('');
- }
- $("#filter").append('<div id="letters">' + html + '</div>');
-}
-
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/jquery-ui.js b/src/compiler/scala/tools/nsc/doc/html/resource/lib/jquery-ui.js
old mode 100644
new mode 100755
index d223b4a..faab0cf
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/jquery-ui.js
+++ b/src/compiler/scala/tools/nsc/doc/html/resource/lib/jquery-ui.js
@@ -1,401 +1,6 @@
-/*!
- * jQuery UI 1.9m2
- *
- * Copyright (c) 2010 AUTHORS.txt (http://jqueryui.com/about)
- * Dual licensed under the MIT (MIT-LICENSE.txt)
- * and GPL (GPL-LICENSE.txt) licenses.
- *
- * http://docs.jquery.com/UI
- */
-(function(c){c.ui=c.ui||{};if(!c.ui.version){c.extend(c.ui,{version:"1.9m2",plugin:{add:function(a,b,d){a=c.ui[a].prototype;for(var e in d){a.plugins[e]=a.plugins[e]||[];a.plugins[e].push([b,d[e]])}},call:function(a,b,d){if((b=a.plugins[b])&&a.element[0].parentNode)for(var e=0;e<b.length;e++)a.options[b[e][0]]&&b[e][1].apply(a.element,d)}},contains:function(a,b){return document.compareDocumentPosition?a.compareDocumentPosition(b)&16:a!==b&&a.contains(b)},hasScroll:function(a,b){if(c(a).c [...]
-"hidden")return false;b=b&&b=="left"?"scrollLeft":"scrollTop";var d=false;if(a[b]>0)return true;a[b]=1;d=a[b]>0;a[b]=0;return d},isOverAxis:function(a,b,d){return a>b&&a<b+d},isOver:function(a,b,d,e,f,g){return c.ui.isOverAxis(a,d,f)&&c.ui.isOverAxis(b,e,g)},keyCode:{ALT:18,BACKSPACE:8,CAPS_LOCK:20,COMMA:188,COMMAND:91,COMMAND_LEFT:91,COMMAND_RIGHT:93,CONTROL:17,DELETE:46,DOWN:40,END:35,ENTER:13,ESCAPE:27,HOME:36,INSERT:45,LEFT:37,MENU:93,NUMPAD_ADD:107,NUMPAD_DECIMAL:110,NUMPAD_DIVIDE:1 [...]
-NUMPAD_MULTIPLY:106,NUMPAD_SUBTRACT:109,PAGE_DOWN:34,PAGE_UP:33,PERIOD:190,RIGHT:39,SHIFT:16,SPACE:32,TAB:9,UP:38,WINDOWS:91}});c.fn.extend({_focus:c.fn.focus,focus:function(a,b){return typeof a==="number"?this.each(function(){var d=this;setTimeout(function(){c(d).focus();b&&b.call(d)},a)}):this._focus.apply(this,arguments)},enableSelection:function(){return this.attr("unselectable","off").css("MozUserSelect","")},disableSelection:function(){return this.attr("unselectable","on").css("Moz [...]
-"none")},scrollParent:function(){var a;a=c.browser.msie&&/(static|relative)/.test(this.css("position"))||/absolute/.test(this.css("position"))?this.parents().filter(function(){return/(relative|absolute|fixed)/.test(c.curCSS(this,"position",1))&&/(auto|scroll)/.test(c.curCSS(this,"overflow",1)+c.curCSS(this,"overflow-y",1)+c.curCSS(this,"overflow-x",1))}).eq(0):this.parents().filter(function(){return/(auto|scroll)/.test(c.curCSS(this,"overflow",1)+c.curCSS(this,"overflow-y",1)+c.curCSS(th [...]
-1))}).eq(0);return/fixed/.test(this.css("position"))||!a.length?c(document):a},zIndex:function(a){if(a!==undefined)return this.css("zIndex",a);if(this.length){a=c(this[0]);for(var b;a.length&&a[0]!==document;){b=a.css("position");if(b=="absolute"||b=="relative"||b=="fixed"){b=parseInt(a.css("zIndex"));if(!isNaN(b)&&b!=0)return b}a=a.parent()}}return 0}});c.extend(c.expr[":"],{data:function(a,b,d){return!!c.data(a,d[3])},focusable:function(a){var b=a.nodeName.toLowerCase(),d=c.attr(a,"tab [...]
-!a.disabled:"a"==b||"area"==b?a.href||!isNaN(d):!isNaN(d))&&!c(a)["area"==b?"parents":"closest"](":hidden").length},tabbable:function(a){var b=c.attr(a,"tabindex");return(isNaN(b)||b>=0)&&c(a).is(":focusable")}})}})(jQuery);
-(function(b){var j=b.fn.remove;b.fn.remove=function(a,c){return this.each(function(){if(!c)if(!a||b.filter(a,[this]).length)b("*",this).add(this).each(function(){b(this).triggerHandler("remove")});return j.call(b(this),a,c)})};b.widget=function(a,c,d){var e=a.split(".")[0],g;a=a.split(".")[1];g=e+"-"+a;if(!d){d=c;c=b.Widget}b.expr[":"][g]=function(f){return!!b.data(f,a)};b[e]=b[e]||{};b[e][a]=function(f,i){arguments.length&&this._createWidget(f,i)};var h=new c;h.options=b.extend({},h.opt [...]
-b.extend(true,h,{namespace:e,widgetName:a,widgetEventPrefix:b[e][a].prototype.widgetEventPrefix||a,widgetBaseClass:g,base:c.prototype},d);b.widget.bridge(a,b[e][a])};b.widget.bridge=function(a,c){b.fn[a]=function(d){var e=typeof d==="string",g=Array.prototype.slice.call(arguments,1),h=this;d=!e&&g.length?b.extend.apply(null,[true,d].concat(g)):d;if(e&&d.substring(0,1)==="_")return h;e?this.each(function(){var f=b.data(this,a),i=f&&b.isFunction(f[d])?f[d].apply(f,g):f;if(i!==f&&i!==undefi [...]
-return false}}):this.each(function(){var f=b.data(this,a);if(f){d&&f.option(d);f._init()}else b.data(this,a,new c(d,this))});return h}};b.Widget=function(a,c){arguments.length&&this._createWidget(a,c)};b.Widget.prototype={widgetName:"widget",widgetEventPrefix:"",options:{disabled:false},_createWidget:function(a,c){this.element=b(c).data(this.widgetName,this);this.options=b.extend(true,{},this.options,b.metadata&&b.metadata.get(c)[this.widgetName],a);var d=this;this.element.bind("remove." [...]
-function(){d.destroy()});this._create();this._init()},_create:function(){},_init:function(){},_super:function(a){return this.base[a].apply(this,Array.prototype.slice.call(arguments,1))},_superApply:function(a,c){return this.base[a].apply(this,c)},destroy:function(){this.element.unbind("."+this.widgetName).removeData(this.widgetName);this.widget().unbind("."+this.widgetName).removeAttr("aria-disabled").removeClass(this.widgetBaseClass+"-disabled ui-state-disabled")},widget:function(){retu [...]
-option:function(a,c){var d=a,e=this;if(arguments.length===0)return b.extend({},e.options);if(typeof a==="string"){if(c===undefined)return this.options[a];d={};d[a]=c}b.each(d,function(g,h){e._setOption(g,h)});return e},_setOption:function(a,c){this.options[a]=c;if(a==="disabled")this.widget()[c?"addClass":"removeClass"](this.widgetBaseClass+"-disabled ui-state-disabled").attr("aria-disabled",c);return this},enable:function(){return this._setOption("disabled",false)},disable:function(){re [...]
-true)},_trigger:function(a,c,d){var e=this.options[a];c=b.Event(c);c.type=(a===this.widgetEventPrefix?a:this.widgetEventPrefix+a).toLowerCase();d=d||{};if(c.originalEvent){a=b.event.props.length;for(var g;a;){g=b.event.props[--a];c[g]=c.originalEvent[g]}}this.element.trigger(c,d);return!(b.isFunction(e)&&e.call(this.element[0],c,d)===false||c.isDefaultPrevented())}}})(jQuery);
-(function(c){c.widget("ui.mouse",{options:{cancel:":input,option",distance:1,delay:0},_mouseInit:function(){var a=this;this.element.bind("mousedown."+this.widgetName,function(b){return a._mouseDown(b)}).bind("click."+this.widgetName,function(b){if(a._preventClickEvent){a._preventClickEvent=false;b.stopImmediatePropagation();return false}});this.started=false},_mouseDestroy:function(){this.element.unbind("."+this.widgetName)},_mouseDown:function(a){a.originalEvent=a.originalEvent||{};if(! [...]
-this._mouseUp(a);this._mouseDownEvent=a;var b=this,e=a.which==1,f=typeof this.options.cancel=="string"?c(a.target).parents().add(a.target).filter(this.options.cancel).length:false;if(!e||f||!this._mouseCapture(a))return true;this.mouseDelayMet=!this.options.delay;if(!this.mouseDelayMet)this._mouseDelayTimer=setTimeout(function(){b.mouseDelayMet=true},this.options.delay);if(this._mouseDistanceMet(a)&&this._mouseDelayMet(a)){this._mouseStarted=this._mouseStart(a)!==false;if(!this._mouseSta [...]
-return true}}this._mouseMoveDelegate=function(d){return b._mouseMove(d)};this._mouseUpDelegate=function(d){return b._mouseUp(d)};c(document).bind("mousemove."+this.widgetName,this._mouseMoveDelegate).bind("mouseup."+this.widgetName,this._mouseUpDelegate);c.browser.safari||a.preventDefault();return a.originalEvent.mouseHandled=true}},_mouseMove:function(a){if(c.browser.msie&&!a.button)return this._mouseUp(a);if(this._mouseStarted){this._mouseDrag(a);return a.preventDefault()}if(this._mous [...]
-this._mouseDelayMet(a))(this._mouseStarted=this._mouseStart(this._mouseDownEvent,a)!==false)?this._mouseDrag(a):this._mouseUp(a);return!this._mouseStarted},_mouseUp:function(a){c(document).unbind("mousemove."+this.widgetName,this._mouseMoveDelegate).unbind("mouseup."+this.widgetName,this._mouseUpDelegate);if(this._mouseStarted){this._mouseStarted=false;this._preventClickEvent=a.target==this._mouseDownEvent.target;this._mouseStop(a)}return false},_mouseDistanceMet:function(a){return Math. [...]
-a.pageX),Math.abs(this._mouseDownEvent.pageY-a.pageY))>=this.options.distance},_mouseDelayMet:function(){return this.mouseDelayMet},_mouseStart:function(){},_mouseDrag:function(){},_mouseStop:function(){},_mouseCapture:function(){return true}})})(jQuery);
-(function(d){d.widget("ui.draggable",d.ui.mouse,{widgetEventPrefix:"drag",options:{addClasses:true,appendTo:"parent",axis:false,connectToSortable:false,containment:false,cursor:"auto",cursorAt:false,grid:false,handle:false,helper:"original",iframeFix:false,opacity:false,refreshPositions:false,revert:false,revertDuration:500,scope:"default",scroll:true,scrollSensitivity:20,scrollSpeed:20,snap:false,snapMode:"both",snapTolerance:20,stack:false,zIndex:false},_create:function(){if(this.optio [...]
-"original"&&!/^(?:r|a|f)/.test(this.element.css("position")))this.element[0].style.position="relative";this.options.addClasses&&this.element.addClass("ui-draggable");this.options.disabled&&this.element.addClass("ui-draggable-disabled");this._mouseInit()},destroy:function(){if(this.element.data("draggable")){this.element.removeData("draggable").unbind(".draggable").removeClass("ui-draggable ui-draggable-dragging ui-draggable-disabled");this._mouseDestroy();return this}},_mouseCapture:func [...]
-this.options;if(this.helper||b.disabled||d(a.target).is(".ui-resizable-handle"))return false;this.handle=this._getHandle(a);if(!this.handle)return false;return true},_mouseStart:function(a){var b=this.options;this.helper=this._createHelper(a);this._cacheHelperProportions();if(d.ui.ddmanager)d.ui.ddmanager.current=this;this._cacheMargins();this.cssPosition=this.helper.css("position");this.scrollParent=this.helper.scrollParent();this.offset=this.positionAbs=this.element.offset();this.offse [...]
-this.margins.top,left:this.offset.left-this.margins.left};d.extend(this.offset,{click:{left:a.pageX-this.offset.left,top:a.pageY-this.offset.top},parent:this._getParentOffset(),relative:this._getRelativeOffset()});this.originalPosition=this.position=this._generatePosition(a);this.originalPageX=a.pageX;this.originalPageY=a.pageY;b.cursorAt&&this._adjustOffsetFromHelper(b.cursorAt);b.containment&&this._setContainment();if(this._trigger("start",a)===false){this._clear();return false}this._c [...]
-d.ui.ddmanager&&!b.dropBehaviour&&d.ui.ddmanager.prepareOffsets(this,a);this.helper.addClass("ui-draggable-dragging");this._mouseDrag(a,true);return true},_mouseDrag:function(a,b){this.position=this._generatePosition(a);this.positionAbs=this._convertPositionTo("absolute");if(!b){b=this._uiHash();if(this._trigger("drag",a,b)===false){this._mouseUp({});return false}this.position=b.position}if(!this.options.axis||this.options.axis!="y")this.helper[0].style.left=this.position.left+"px";if(!t [...]
-this.options.axis!="x")this.helper[0].style.top=this.position.top+"px";d.ui.ddmanager&&d.ui.ddmanager.drag(this,a);return false},_mouseStop:function(a){var b=false;if(d.ui.ddmanager&&!this.options.dropBehaviour)b=d.ui.ddmanager.drop(this,a);if(this.dropped){b=this.dropped;this.dropped=false}if(!this.element[0]||!this.element[0].parentNode)return false;if(this.options.revert=="invalid"&&!b||this.options.revert=="valid"&&b||this.options.revert===true||d.isFunction(this.options.revert)&&thi [...]
-b)){var c=this;d(this.helper).animate(this.originalPosition,parseInt(this.options.revertDuration,10),function(){c._trigger("stop",a)!==false&&c._clear()})}else this._trigger("stop",a)!==false&&this._clear();return false},cancel:function(){this.helper.is(".ui-draggable-dragging")?this._mouseUp({}):this._clear();return this},_getHandle:function(a){var b=!this.options.handle||!d(this.options.handle,this.element).length?true:false;d(this.options.handle,this.element).find("*").andSelf().each( [...]
-a.target)b=true});return b},_createHelper:function(a){var b=this.options;a=d.isFunction(b.helper)?d(b.helper.apply(this.element[0],[a])):b.helper=="clone"?this.element.clone():this.element;a.parents("body").length||a.appendTo(b.appendTo=="parent"?this.element[0].parentNode:b.appendTo);a[0]!=this.element[0]&&!/(fixed|absolute)/.test(a.css("position"))&&a.css("position","absolute");return a},_adjustOffsetFromHelper:function(a){if(typeof a=="string")a=a.split(" ");if(d.isArray(a))a={left:+a [...]
-0};if("left"in a)this.offset.click.left=a.left+this.margins.left;if("right"in a)this.offset.click.left=this.helperProportions.width-a.right+this.margins.left;if("top"in a)this.offset.click.top=a.top+this.margins.top;if("bottom"in a)this.offset.click.top=this.helperProportions.height-a.bottom+this.margins.top},_getParentOffset:function(){this.offsetParent=this.helper.offsetParent();var a=this.offsetParent.offset();if(this.cssPosition=="absolute"&&this.scrollParent[0]!=document&&d.ui.conta [...]
-this.offsetParent[0])){a.left+=this.scrollParent.scrollLeft();a.top+=this.scrollParent.scrollTop()}if(this.offsetParent[0]==document.body||this.offsetParent[0].tagName&&this.offsetParent[0].tagName.toLowerCase()=="html"&&d.browser.msie)a={top:0,left:0};return{top:a.top+(parseInt(this.offsetParent.css("borderTopWidth"),10)||0),left:a.left+(parseInt(this.offsetParent.css("borderLeftWidth"),10)||0)}},_getRelativeOffset:function(){if(this.cssPosition=="relative"){var a=this.element.position( [...]
-(parseInt(this.helper.css("top"),10)||0)+this.scrollParent.scrollTop(),left:a.left-(parseInt(this.helper.css("left"),10)||0)+this.scrollParent.scrollLeft()}}else return{top:0,left:0}},_cacheMargins:function(){this.margins={left:parseInt(this.element.css("marginLeft"),10)||0,top:parseInt(this.element.css("marginTop"),10)||0}},_cacheHelperProportions:function(){this.helperProportions={width:this.helper.outerWidth(),height:this.helper.outerHeight()}},_setContainment:function(){var a=this.op [...]
-"parent")a.containment=this.helper[0].parentNode;if(a.containment=="document"||a.containment=="window")this.containment=[0-this.offset.relative.left-this.offset.parent.left,0-this.offset.relative.top-this.offset.parent.top,d(a.containment=="document"?document:window).width()-this.helperProportions.width-this.margins.left,(d(a.containment=="document"?document:window).height()||document.body.parentNode.scrollHeight)-this.helperProportions.height-this.margins.top];if(!/^(document|window|par [...]
-a.containment.constructor!=Array){var b=d(a.containment)[0];if(b){a=d(a.containment).offset();var c=d(b).css("overflow")!="hidden";this.containment=[a.left+(parseInt(d(b).css("borderLeftWidth"),10)||0)+(parseInt(d(b).css("paddingLeft"),10)||0)-this.margins.left,a.top+(parseInt(d(b).css("borderTopWidth"),10)||0)+(parseInt(d(b).css("paddingTop"),10)||0)-this.margins.top,a.left+(c?Math.max(b.scrollWidth,b.offsetWidth):b.offsetWidth)-(parseInt(d(b).css("borderLeftWidth"),10)||0)-(parseInt(d( [...]
-10)||0)-this.helperProportions.width-this.margins.left,a.top+(c?Math.max(b.scrollHeight,b.offsetHeight):b.offsetHeight)-(parseInt(d(b).css("borderTopWidth"),10)||0)-(parseInt(d(b).css("paddingBottom"),10)||0)-this.helperProportions.height-this.margins.top]}}else if(a.containment.constructor==Array)this.containment=a.containment},_convertPositionTo:function(a,b){if(!b)b=this.position;a=a=="absolute"?1:-1;var c=this.cssPosition=="absolute"&&!(this.scrollParent[0]!=document&&d.ui.contains(t [...]
-this.offsetParent[0]))?this.offsetParent:this.scrollParent,f=/(html|body)/i.test(c[0].tagName);return{top:b.top+this.offset.relative.top*a+this.offset.parent.top*a-(d.browser.safari&&d.browser.version<526&&this.cssPosition=="fixed"?0:(this.cssPosition=="fixed"?-this.scrollParent.scrollTop():f?0:c.scrollTop())*a),left:b.left+this.offset.relative.left*a+this.offset.parent.left*a-(d.browser.safari&&d.browser.version<526&&this.cssPosition=="fixed"?0:(this.cssPosition=="fixed"?-this.scrollPar [...]
-f?0:c.scrollLeft())*a)}},_generatePosition:function(a){var b=this.options,c=this.cssPosition=="absolute"&&!(this.scrollParent[0]!=document&&d.ui.contains(this.scrollParent[0],this.offsetParent[0]))?this.offsetParent:this.scrollParent,f=/(html|body)/i.test(c[0].tagName),e=a.pageX,g=a.pageY;if(this.originalPosition){if(this.containment){if(a.pageX-this.offset.click.left<this.containment[0])e=this.containment[0]+this.offset.click.left;if(a.pageY-this.offset.click.top<this.containment[1])g=t [...]
-this.offset.click.top;if(a.pageX-this.offset.click.left>this.containment[2])e=this.containment[2]+this.offset.click.left;if(a.pageY-this.offset.click.top>this.containment[3])g=this.containment[3]+this.offset.click.top}if(b.grid){g=this.originalPageY+Math.round((g-this.originalPageY)/b.grid[1])*b.grid[1];g=this.containment?!(g-this.offset.click.top<this.containment[1]||g-this.offset.click.top>this.containment[3])?g:!(g-this.offset.click.top<this.containment[1])?g-b.grid[1]:g+b.grid[1]:g;e [...]
-Math.round((e-this.originalPageX)/b.grid[0])*b.grid[0];e=this.containment?!(e-this.offset.click.left<this.containment[0]||e-this.offset.click.left>this.containment[2])?e:!(e-this.offset.click.left<this.containment[0])?e-b.grid[0]:e+b.grid[0]:e}}return{top:g-this.offset.click.top-this.offset.relative.top-this.offset.parent.top+(d.browser.safari&&d.browser.version<526&&this.cssPosition=="fixed"?0:this.cssPosition=="fixed"?-this.scrollParent.scrollTop():f?0:c.scrollTop()),left:e-this.offset [...]
-this.offset.relative.left-this.offset.parent.left+(d.browser.safari&&d.browser.version<526&&this.cssPosition=="fixed"?0:this.cssPosition=="fixed"?-this.scrollParent.scrollLeft():f?0:c.scrollLeft())}},_clear:function(){this.helper.removeClass("ui-draggable-dragging");this.helper[0]!=this.element[0]&&!this.cancelHelperRemoval&&this.helper.remove();this.helper=null;this.cancelHelperRemoval=false},_trigger:function(a,b,c){c=c||this._uiHash();d.ui.plugin.call(this,a,[b,c]);if(a=="drag")this.p [...]
-this._convertPositionTo("absolute");return d.Widget.prototype._trigger.call(this,a,b,c)},plugins:{},_uiHash:function(){return{helper:this.helper,position:this.position,originalPosition:this.originalPosition,offset:this.positionAbs}}});d.extend(d.ui.draggable,{version:"1.9m2"});d.ui.plugin.add("draggable","connectToSortable",{start:function(a,b){var c=d(this).data("draggable"),f=c.options,e=d.extend({},b,{item:c.element});c.sortables=[];d(f.connectToSortable).each(function(){var g=d.data( [...]
-if(g&&!g.options.disabled){c.sortables.push({instance:g,shouldRevert:g.options.revert});g._refreshItems();g._trigger("activate",a,e)}})},stop:function(a,b){var c=d(this).data("draggable"),f=d.extend({},b,{item:c.element});d.each(c.sortables,function(){if(this.instance.isOver){this.instance.isOver=0;c.cancelHelperRemoval=true;this.instance.cancelHelperRemoval=false;if(this.shouldRevert)this.instance.options.revert=true;this.instance._mouseStop(a);this.instance.options.helper=this.instance [...]
-c.options.helper=="original"&&this.instance.currentItem.css({top:"auto",left:"auto"})}else{this.instance.cancelHelperRemoval=false;this.instance._trigger("deactivate",a,f)}})},drag:function(a,b){var c=d(this).data("draggable"),f=this;d.each(c.sortables,function(){this.instance.positionAbs=c.positionAbs;this.instance.helperProportions=c.helperProportions;this.instance.offset.click=c.offset.click;if(this.instance._intersectsWith(this.instance.containerCache)){if(!this.instance.isOver){this [...]
-1;this.instance.currentItem=d(f).clone().appendTo(this.instance.element).data("sortable-item",true);this.instance.options._helper=this.instance.options.helper;this.instance.options.helper=function(){return b.helper[0]};a.target=this.instance.currentItem[0];this.instance._mouseCapture(a,true);this.instance._mouseStart(a,true,true);this.instance.offset.click.top=c.offset.click.top;this.instance.offset.click.left=c.offset.click.left;this.instance.offset.parent.left-=c.offset.parent.left-thi [...]
-this.instance.offset.parent.top-=c.offset.parent.top-this.instance.offset.parent.top;c._trigger("toSortable",a);c.dropped=this.instance.element;c.currentItem=c.element;this.instance.fromOutside=c}this.instance.currentItem&&this.instance._mouseDrag(a)}else if(this.instance.isOver){this.instance.isOver=0;this.instance.cancelHelperRemoval=true;this.instance.options.revert=false;this.instance._trigger("out",a,this.instance._uiHash(this.instance));this.instance._mouseStop(a,true);this.instanc [...]
-this.instance.options._helper;this.instance.currentItem.remove();this.instance.placeholder&&this.instance.placeholder.remove();c._trigger("fromSortable",a);c.dropped=false}})}});d.ui.plugin.add("draggable","cursor",{start:function(){var a=d("body"),b=d(this).data("draggable").options;if(a.css("cursor"))b._cursor=a.css("cursor");a.css("cursor",b.cursor)},stop:function(){var a=d(this).data("draggable").options;a._cursor&&d("body").css("cursor",a._cursor)}});d.ui.plugin.add("draggable","ifr [...]
-d(this).data("draggable").options;d(a.iframeFix===true?"iframe":a.iframeFix).each(function(){d('<div class="ui-draggable-iframeFix" style="background: #fff;"></div>').css({width:this.offsetWidth+"px",height:this.offsetHeight+"px",position:"absolute",opacity:"0.001",zIndex:1E3}).css(d(this).offset()).appendTo("body")})},stop:function(){d("div.ui-draggable-iframeFix").each(function(){this.parentNode.removeChild(this)})}});d.ui.plugin.add("draggable","opacity",{start:function(a,b){a=d(b.hel [...]
-if(a.css("opacity"))b._opacity=a.css("opacity");a.css("opacity",b.opacity)},stop:function(a,b){a=d(this).data("draggable").options;a._opacity&&d(b.helper).css("opacity",a._opacity)}});d.ui.plugin.add("draggable","scroll",{start:function(){var a=d(this).data("draggable");if(a.scrollParent[0]!=document&&a.scrollParent[0].tagName!="HTML")a.overflowOffset=a.scrollParent.offset()},drag:function(a){var b=d(this).data("draggable"),c=b.options,f=false;if(b.scrollParent[0]!=document&&b.scrollPare [...]
-"HTML"){if(!c.axis||c.axis!="x")if(b.overflowOffset.top+b.scrollParent[0].offsetHeight-a.pageY<c.scrollSensitivity)b.scrollParent[0].scrollTop=f=b.scrollParent[0].scrollTop+c.scrollSpeed;else if(a.pageY-b.overflowOffset.top<c.scrollSensitivity)b.scrollParent[0].scrollTop=f=b.scrollParent[0].scrollTop-c.scrollSpeed;if(!c.axis||c.axis!="y")if(b.overflowOffset.left+b.scrollParent[0].offsetWidth-a.pageX<c.scrollSensitivity)b.scrollParent[0].scrollLeft=f=b.scrollParent[0].scrollLeft+c.scrollS [...]
-b.overflowOffset.left<c.scrollSensitivity)b.scrollParent[0].scrollLeft=f=b.scrollParent[0].scrollLeft-c.scrollSpeed}else{if(!c.axis||c.axis!="x")if(a.pageY-d(document).scrollTop()<c.scrollSensitivity)f=d(document).scrollTop(d(document).scrollTop()-c.scrollSpeed);else if(d(window).height()-(a.pageY-d(document).scrollTop())<c.scrollSensitivity)f=d(document).scrollTop(d(document).scrollTop()+c.scrollSpeed);if(!c.axis||c.axis!="y")if(a.pageX-d(document).scrollLeft()<c.scrollSensitivity)f=d(d [...]
-c.scrollSpeed);else if(d(window).width()-(a.pageX-d(document).scrollLeft())<c.scrollSensitivity)f=d(document).scrollLeft(d(document).scrollLeft()+c.scrollSpeed)}f!==false&&d.ui.ddmanager&&!c.dropBehaviour&&d.ui.ddmanager.prepareOffsets(b,a)}});d.ui.plugin.add("draggable","snap",{start:function(){var a=d(this).data("draggable"),b=a.options;a.snapElements=[];d(b.snap.constructor!=String?b.snap.items||":data(draggable)":b.snap).each(function(){var c=d(this),f=c.offset();this!=a.element[0]&& [...]
-width:c.outerWidth(),height:c.outerHeight(),top:f.top,left:f.left})})},drag:function(a,b){for(var c=d(this).data("draggable"),f=c.options,e=f.snapTolerance,g=b.offset.left,n=g+c.helperProportions.width,m=b.offset.top,o=m+c.helperProportions.height,h=c.snapElements.length-1;h>=0;h--){var i=c.snapElements[h].left,k=i+c.snapElements[h].width,j=c.snapElements[h].top,l=j+c.snapElements[h].height;if(i-e<g&&g<k+e&&j-e<m&&m<l+e||i-e<g&&g<k+e&&j-e<o&&o<l+e||i-e<n&&n<k+e&&j-e<m&&m<l+e||i-e<n&&n<k+ [...]
-o<l+e){if(f.snapMode!="inner"){var p=Math.abs(j-o)<=e,q=Math.abs(l-m)<=e,r=Math.abs(i-n)<=e,s=Math.abs(k-g)<=e;if(p)b.position.top=c._convertPositionTo("relative",{top:j-c.helperProportions.height,left:0}).top-c.margins.top;if(q)b.position.top=c._convertPositionTo("relative",{top:l,left:0}).top-c.margins.top;if(r)b.position.left=c._convertPositionTo("relative",{top:0,left:i-c.helperProportions.width}).left-c.margins.left;if(s)b.position.left=c._convertPositionTo("relative",{top:0,left:k} [...]
-p||q||r||s;if(f.snapMode!="outer"){p=Math.abs(j-m)<=e;q=Math.abs(l-o)<=e;r=Math.abs(i-g)<=e;s=Math.abs(k-n)<=e;if(p)b.position.top=c._convertPositionTo("relative",{top:j,left:0}).top-c.margins.top;if(q)b.position.top=c._convertPositionTo("relative",{top:l-c.helperProportions.height,left:0}).top-c.margins.top;if(r)b.position.left=c._convertPositionTo("relative",{top:0,left:i}).left-c.margins.left;if(s)b.position.left=c._convertPositionTo("relative",{top:0,left:k-c.helperProportions.width} [...]
-(p||q||r||s||t))c.options.snap.snap&&c.options.snap.snap.call(c.element,a,d.extend(c._uiHash(),{snapItem:c.snapElements[h].item}));c.snapElements[h].snapping=p||q||r||s||t}else{c.snapElements[h].snapping&&c.options.snap.release&&c.options.snap.release.call(c.element,a,d.extend(c._uiHash(),{snapItem:c.snapElements[h].item}));c.snapElements[h].snapping=false}}}});d.ui.plugin.add("draggable","stack",{start:function(){var a=d(this).data("draggable").options;a=d.makeArray(d(a.stack)).sort(fun [...]
-10)||0)-(parseInt(d(f).css("zIndex"),10)||0)});if(a.length){var b=parseInt(a[0].style.zIndex)||0;d(a).each(function(c){this.style.zIndex=b+c});this[0].style.zIndex=b+a.length}}});d.ui.plugin.add("draggable","zIndex",{start:function(a,b){a=d(b.helper);b=d(this).data("draggable").options;if(a.css("zIndex"))b._zIndex=a.css("zIndex");a.css("zIndex",b.zIndex)},stop:function(a,b){a=d(this).data("draggable").options;a._zIndex&&d(b.helper).css("zIndex",a._zIndex)}})})(jQuery);
-(function(d){d.widget("ui.droppable",{widgetEventPrefix:"drop",options:{accept:"*",activeClass:false,addClasses:true,greedy:false,hoverClass:false,scope:"default",tolerance:"intersect"},_create:function(){var a=this.options,b=a.accept;this.isover=0;this.isout=1;this.accept=d.isFunction(b)?b:function(c){return c.is(b)};this.proportions={width:this.element[0].offsetWidth,height:this.element[0].offsetHeight};d.ui.ddmanager.droppables[a.scope]=d.ui.ddmanager.droppables[a.scope]||[];d.ui.ddma [...]
-a.addClasses&&this.element.addClass("ui-droppable")},destroy:function(){for(var a=d.ui.ddmanager.droppables[this.options.scope],b=0;b<a.length;b++)a[b]==this&&a.splice(b,1);this.element.removeClass("ui-droppable ui-droppable-disabled").removeData("droppable").unbind(".droppable");return this},_setOption:function(a,b){if(a=="accept")this.accept=d.isFunction(b)?b:function(c){return c.is(b)};d.Widget.prototype._setOption.apply(this,arguments)},_activate:function(a){var b=d.ui.ddmanager.curr [...]
-this.element.addClass(this.options.activeClass);b&&this._trigger("activate",a,this.ui(b))},_deactivate:function(a){var b=d.ui.ddmanager.current;this.options.activeClass&&this.element.removeClass(this.options.activeClass);b&&this._trigger("deactivate",a,this.ui(b))},_over:function(a){var b=d.ui.ddmanager.current;if(!(!b||(b.currentItem||b.element)[0]==this.element[0]))if(this.accept.call(this.element[0],b.currentItem||b.element)){this.options.hoverClass&&this.element.addClass(this.options [...]
-this._trigger("over",a,this.ui(b))}},_out:function(a){var b=d.ui.ddmanager.current;if(!(!b||(b.currentItem||b.element)[0]==this.element[0]))if(this.accept.call(this.element[0],b.currentItem||b.element)){this.options.hoverClass&&this.element.removeClass(this.options.hoverClass);this._trigger("out",a,this.ui(b))}},_drop:function(a,b){var c=b||d.ui.ddmanager.current;if(!c||(c.currentItem||c.element)[0]==this.element[0])return false;var e=false;this.element.find(":data(droppable)").not(".ui- [...]
-d.data(this,"droppable");if(g.options.greedy&&!g.options.disabled&&g.options.scope==c.options.scope&&g.accept.call(g.element[0],c.currentItem||c.element)&&d.ui.intersect(c,d.extend(g,{offset:g.element.offset()}),g.options.tolerance)){e=true;return false}});if(e)return false;if(this.accept.call(this.element[0],c.currentItem||c.element)){this.options.activeClass&&this.element.removeClass(this.options.activeClass);this.options.hoverClass&&this.element.removeClass(this.options.hoverClass);th [...]
-a,this.ui(c));return this.element}return false},ui:function(a){return{draggable:a.currentItem||a.element,helper:a.helper,position:a.position,offset:a.positionAbs}}});d.extend(d.ui.droppable,{version:"1.9m2"});d.ui.intersect=function(a,b,c){if(!b.offset)return false;var e=(a.positionAbs||a.position.absolute).left,g=e+a.helperProportions.width,f=(a.positionAbs||a.position.absolute).top,h=f+a.helperProportions.height,i=b.offset.left,k=i+b.proportions.width,j=b.offset.top,l=j+b.proportions.height;
-switch(c){case "fit":return i<e&&g<k&&j<f&&h<l;case "intersect":return i<e+a.helperProportions.width/2&&g-a.helperProportions.width/2<k&&j<f+a.helperProportions.height/2&&h-a.helperProportions.height/2<l;case "pointer":return d.ui.isOver((a.positionAbs||a.position.absolute).top+(a.clickOffset||a.offset.click).top,(a.positionAbs||a.position.absolute).left+(a.clickOffset||a.offset.click).left,j,i,b.proportions.height,b.proportions.width);case "touch":return(f>=j&&f<=l||h>=j&&h<=l||f<j&&h>l [...]
-e<=k||g>=i&&g<=k||e<i&&g>k);default:return false}};d.ui.ddmanager={current:null,droppables:{"default":[]},prepareOffsets:function(a,b){var c=d.ui.ddmanager.droppables[a.options.scope]||[],e=b?b.type:null,g=(a.currentItem||a.element).find(":data(droppable)").andSelf(),f=0;a:for(;f<c.length;f++)if(!(c[f].options.disabled||a&&!c[f].accept.call(c[f].element[0],a.currentItem||a.element))){for(var h=0;h<g.length;h++)if(g[h]==c[f].element[0]){c[f].proportions.height=0;continue a}c[f].visible=c[ [...]
-"none";if(c[f].visible){c[f].offset=c[f].element.offset();c[f].proportions={width:c[f].element[0].offsetWidth,height:c[f].element[0].offsetHeight};e=="mousedown"&&c[f]._activate.call(c[f],b)}}},drop:function(a,b){var c=false;d.each(d.ui.ddmanager.droppables[a.options.scope]||[],function(){if(this.options){if(!this.options.disabled&&this.visible&&d.ui.intersect(a,this,this.options.tolerance))c=c||this._drop.call(this,b);if(!this.options.disabled&&this.visible&&this.accept.call(this.elemen [...]
-a.element)){this.isout=1;this.isover=0;this._deactivate.call(this,b)}}});return c},drag:function(a,b){a.options.refreshPositions&&d.ui.ddmanager.prepareOffsets(a,b);d.each(d.ui.ddmanager.droppables[a.options.scope]||[],function(){if(!(this.options.disabled||this.greedyChild||!this.visible)){var c=d.ui.intersect(a,this,this.options.tolerance);if(c=!c&&this.isover==1?"isout":c&&this.isover==0?"isover":null){var e;if(this.options.greedy){var g=this.element.parents(":data(droppable):eq(0)"); [...]
-d.data(g[0],"droppable");e.greedyChild=c=="isover"?1:0}}if(e&&c=="isover"){e.isover=0;e.isout=1;e._out.call(e,b)}this[c]=1;this[c=="isout"?"isover":"isout"]=0;this[c=="isover"?"_over":"_out"].call(this,b);if(e&&c=="isout"){e.isout=0;e.isover=1;e._over.call(e,b)}}}})}}})(jQuery);
-(function(e){e.widget("ui.resizable",e.ui.mouse,{widgetEventPrefix:"resize",options:{alsoResize:false,animate:false,animateDuration:"slow",animateEasing:"swing",aspectRatio:false,autoHide:false,containment:false,ghost:false,grid:false,handles:"e,s,se",helper:false,maxHeight:null,maxWidth:null,minHeight:10,minWidth:10,zIndex:1E3},_create:function(){var b=this,a=this.options;this.element.addClass("ui-resizable");e.extend(this,{_aspectRatio:!!a.aspectRatio,aspectRatio:a.aspectRatio,original [...]
-_proportionallyResizeElements:[],_helper:a.helper||a.ghost||a.animate?a.helper||"ui-resizable-helper":null});if(this.element[0].nodeName.match(/canvas|textarea|input|select|button|img/i)){/relative/.test(this.element.css("position"))&&e.browser.opera&&this.element.css({position:"relative",top:"auto",left:"auto"});this.element.wrap(e('<div class="ui-wrapper" style="overflow: hidden;"></div>').css({position:this.element.css("position"),width:this.element.outerWidth(),height:this.element.ou [...]
-top:this.element.css("top"),left:this.element.css("left")}));this.element=this.element.parent().data("resizable",this.element.data("resizable"));this.elementIsWrapper=true;this.element.css({marginLeft:this.originalElement.css("marginLeft"),marginTop:this.originalElement.css("marginTop"),marginRight:this.originalElement.css("marginRight"),marginBottom:this.originalElement.css("marginBottom")});this.originalElement.css({marginLeft:0,marginTop:0,marginRight:0,marginBottom:0});this.originalR [...]
-this.originalElement.css("resize");this.originalElement.css("resize","none");this._proportionallyResizeElements.push(this.originalElement.css({position:"static",zoom:1,display:"block"}));this.originalElement.css({margin:this.originalElement.css("margin")});this._proportionallyResize()}this.handles=a.handles||(!e(".ui-resizable-handle",this.element).length?"e,s,se":{n:".ui-resizable-n",e:".ui-resizable-e",s:".ui-resizable-s",w:".ui-resizable-w",se:".ui-resizable-se",sw:".ui-resizable-sw", [...]
-nw:".ui-resizable-nw"});if(this.handles.constructor==String){if(this.handles=="all")this.handles="n,e,s,w,se,sw,ne,nw";var c=this.handles.split(",");this.handles={};for(var d=0;d<c.length;d++){var f=e.trim(c[d]),g=e('<div class="ui-resizable-handle '+("ui-resizable-"+f)+'"></div>');/sw|se|ne|nw/.test(f)&&g.css({zIndex:++a.zIndex});"se"==f&&g.addClass("ui-icon ui-icon-gripsmall-diagonal-se");this.handles[f]=".ui-resizable-"+f;this.element.append(g)}}this._renderAxis=function(h){h=h||this. [...]
-String)this.handles[i]=e(this.handles[i],this.element).show();if(this.elementIsWrapper&&this.originalElement[0].nodeName.match(/textarea|input|select|button/i)){var j=e(this.handles[i],this.element),k=0;k=/sw|ne|nw|se|n|s/.test(i)?j.outerHeight():j.outerWidth();j=["padding",/ne|nw|n/.test(i)?"Top":/se|sw|s/.test(i)?"Bottom":/^e$/.test(i)?"Right":"Left"].join("");h.css(j,k);this._proportionallyResize()}e(this.handles[i])}};this._renderAxis(this.element);this._handles=e(".ui-resizable-hand [...]
-this._handles.mouseover(function(){if(!b.resizing){if(this.className)var h=this.className.match(/ui-resizable-(se|sw|ne|nw|n|e|s|w)/i);b.axis=h&&h[1]?h[1]:"se"}});if(a.autoHide){this._handles.hide();e(this.element).addClass("ui-resizable-autohide").hover(function(){e(this).removeClass("ui-resizable-autohide");b._handles.show()},function(){if(!b.resizing){e(this).addClass("ui-resizable-autohide");b._handles.hide()}})}this._mouseInit()},destroy:function(){this._mouseDestroy();var b=functio [...]
-if(this.elementIsWrapper){b(this.element);var a=this.element;a.after(this.originalElement.css({position:a.css("position"),width:a.outerWidth(),height:a.outerHeight(),top:a.css("top"),left:a.css("left")})).remove()}this.originalElement.css("resize",this.originalResizeStyle);b(this.originalElement);return this},_mouseCapture:function(b){var a=false;for(var c in this.handles)if(e(this.handles[c])[0]==b.target)a=true;return!this.options.disabled&&a},_mouseStart:function(b){var a=this.options [...]
-d=this.element;this.resizing=true;this.documentScroll={top:e(document).scrollTop(),left:e(document).scrollLeft()};if(d.is(".ui-draggable")||/absolute/.test(d.css("position")))d.css({position:"absolute",top:c.top,left:c.left});e.browser.opera&&/relative/.test(d.css("position"))&&d.css({position:"relative",top:"auto",left:"auto"});this._renderProxy();c=m(this.helper.css("left"));var f=m(this.helper.css("top"));if(a.containment){c+=e(a.containment).scrollLeft()||0;f+=e(a.containment).scroll [...]
-this.helper.offset();this.position={left:c,top:f};this.size=this._helper?{width:d.outerWidth(),height:d.outerHeight()}:{width:d.width(),height:d.height()};this.originalSize=this._helper?{width:d.outerWidth(),height:d.outerHeight()}:{width:d.width(),height:d.height()};this.originalPosition={left:c,top:f};this.sizeDiff={width:d.outerWidth()-d.width(),height:d.outerHeight()-d.height()};this.originalMousePosition={left:b.pageX,top:b.pageY};this.aspectRatio=typeof a.aspectRatio=="number"?a.as [...]
-this.originalSize.width/this.originalSize.height||1;a=e(".ui-resizable-"+this.axis).css("cursor");e("body").css("cursor",a=="auto"?this.axis+"-resize":a);d.addClass("ui-resizable-resizing");this._propagate("start",b);return true},_mouseDrag:function(b){var a=this.helper,c=this.originalMousePosition,d=this._change[this.axis];if(!d)return false;c=d.apply(this,[b,b.pageX-c.left||0,b.pageY-c.top||0]);if(this._aspectRatio||b.shiftKey)c=this._updateRatio(c,b);c=this._respectSize(c,b);this._pro [...]
-b);a.css({top:this.position.top+"px",left:this.position.left+"px",width:this.size.width+"px",height:this.size.height+"px"});!this._helper&&this._proportionallyResizeElements.length&&this._proportionallyResize();this._updateCache(c);this._trigger("resize",b,this.ui());return false},_mouseStop:function(b){this.resizing=false;var a=this.options,c=this;if(this._helper){var d=this._proportionallyResizeElements,f=d.length&&/textarea/i.test(d[0].nodeName);d=f&&e.ui.hasScroll(d[0],"left")?0:c.si [...]
-f={width:c.size.width-(f?0:c.sizeDiff.width),height:c.size.height-d};d=parseInt(c.element.css("left"),10)+(c.position.left-c.originalPosition.left)||null;var g=parseInt(c.element.css("top"),10)+(c.position.top-c.originalPosition.top)||null;a.animate||this.element.css(e.extend(f,{top:g,left:d}));c.helper.height(c.size.height);c.helper.width(c.size.width);this._helper&&!a.animate&&this._proportionallyResize()}e("body").css("cursor","auto");this.element.removeClass("ui-resizable-resizing"); [...]
-b);this._helper&&this.helper.remove();return false},_updateCache:function(b){this.offset=this.helper.offset();if(l(b.left))this.position.left=b.left;if(l(b.top))this.position.top=b.top;if(l(b.height))this.size.height=b.height;if(l(b.width))this.size.width=b.width},_updateRatio:function(b){var a=this.position,c=this.size,d=this.axis;if(b.height)b.width=c.height*this.aspectRatio;else if(b.width)b.height=c.width/this.aspectRatio;if(d=="sw"){b.left=a.left+(c.width-b.width);b.top=null}if(d==" [...]
-a.top+(c.height-b.height);b.left=a.left+(c.width-b.width)}return b},_respectSize:function(b){var a=this.options,c=this.axis,d=l(b.width)&&a.maxWidth&&a.maxWidth<b.width,f=l(b.height)&&a.maxHeight&&a.maxHeight<b.height,g=l(b.width)&&a.minWidth&&a.minWidth>b.width,h=l(b.height)&&a.minHeight&&a.minHeight>b.height;if(g)b.width=a.minWidth;if(h)b.height=a.minHeight;if(d)b.width=a.maxWidth;if(f)b.height=a.maxHeight;var i=this.originalPosition.left+this.originalSize.width,j=this.position.top+thi [...]
-k=/sw|nw|w/.test(c);c=/nw|ne|n/.test(c);if(g&&k)b.left=i-a.minWidth;if(d&&k)b.left=i-a.maxWidth;if(h&&c)b.top=j-a.minHeight;if(f&&c)b.top=j-a.maxHeight;if((a=!b.width&&!b.height)&&!b.left&&b.top)b.top=null;else if(a&&!b.top&&b.left)b.left=null;return b},_proportionallyResize:function(){if(this._proportionallyResizeElements.length)for(var b=this.helper||this.element,a=0;a<this._proportionallyResizeElements.length;a++){var c=this._proportionallyResizeElements[a];if(!this.borderDif){var d=[ [...]
-c.css("borderRightWidth"),c.css("borderBottomWidth"),c.css("borderLeftWidth")],f=[c.css("paddingTop"),c.css("paddingRight"),c.css("paddingBottom"),c.css("paddingLeft")];this.borderDif=e.map(d,function(g,h){g=parseInt(g,10)||0;h=parseInt(f[h],10)||0;return g+h})}e.browser.msie&&(e(b).is(":hidden")||e(b).parents(":hidden").length)||c.css({height:b.height()-this.borderDif[0]-this.borderDif[2]||0,width:b.width()-this.borderDif[1]-this.borderDif[3]||0})}},_renderProxy:function(){var b=this.op [...]
-this.element.offset();if(this._helper){this.helper=this.helper||e('<div style="overflow:hidden;"></div>');var a=e.browser.msie&&e.browser.version<7,c=a?1:0;a=a?2:-1;this.helper.addClass(this._helper).css({width:this.element.outerWidth()+a,height:this.element.outerHeight()+a,position:"absolute",left:this.elementOffset.left-c+"px",top:this.elementOffset.top-c+"px",zIndex:++b.zIndex});this.helper.appendTo("body").disableSelection()}else this.helper=this.element},_change:{e:function(b,a){ret [...]
-a}},w:function(b,a){return{left:this.originalPosition.left+a,width:this.originalSize.width-a}},n:function(b,a,c){return{top:this.originalPosition.top+c,height:this.originalSize.height-c}},s:function(b,a,c){return{height:this.originalSize.height+c}},se:function(b,a,c){return e.extend(this._change.s.apply(this,arguments),this._change.e.apply(this,[b,a,c]))},sw:function(b,a,c){return e.extend(this._change.s.apply(this,arguments),this._change.w.apply(this,[b,a,c]))},ne:function(b,a,c){return [...]
-arguments),this._change.e.apply(this,[b,a,c]))},nw:function(b,a,c){return e.extend(this._change.n.apply(this,arguments),this._change.w.apply(this,[b,a,c]))}},_propagate:function(b,a){e.ui.plugin.call(this,b,[a,this.ui()]);b!="resize"&&this._trigger(b,a,this.ui())},plugins:{},ui:function(){return{originalElement:this.originalElement,element:this.element,helper:this.helper,position:this.position,size:this.size,originalSize:this.originalSize,originalPosition:this.originalPosition}}});e.exte [...]
-{version:"1.9m2"});e.ui.plugin.add("resizable","alsoResize",{start:function(){var b=e(this).data("resizable").options,a=function(c){e(c).each(function(){var d=e(this);d.data("resizable-alsoresize",{width:parseInt(d.width(),10),height:parseInt(d.height(),10),left:parseInt(d.css("left"),10),top:parseInt(d.css("top"),10),position:d.css("position")})})};if(typeof b.alsoResize=="object"&&!b.alsoResize.parentNode)if(b.alsoResize.length){b.alsoResize=b.alsoResize[0];a(b.alsoResize)}else e.each( [...]
-function(c){a(c)});else a(b.alsoResize)},resize:function(b,a){var c=e(this).data("resizable");b=c.options;var d=c.originalSize,f=c.originalPosition,g={height:c.size.height-d.height||0,width:c.size.width-d.width||0,top:c.position.top-f.top||0,left:c.position.left-f.left||0},h=function(i,j){e(i).each(function(){var k=e(this),r=e(this).data("resizable-alsoresize"),q={},s=j&&j.length?j:k.parents(a.originalElement[0]).length?["width","height"]:["width","height","top","left"];e.each(s,function [...]
-(r[p]||0)+(g[p]||0))&&n>=0)q[p]=n||null});if(e.browser.opera&&/relative/.test(k.css("position"))){c._revertToRelativePosition=true;k.css({position:"absolute",top:"auto",left:"auto"})}k.css(q)})};typeof b.alsoResize=="object"&&!b.alsoResize.nodeType?e.each(b.alsoResize,function(i,j){h(i,j)}):h(b.alsoResize)},stop:function(){var b=e(this).data("resizable"),a=function(c){e(c).each(function(){var d=e(this);d.css({position:d.data("resizable-alsoresize").position})})};if(b._revertToRelativePos [...]
-false;typeof o.alsoResize=="object"&&!o.alsoResize.nodeType?e.each(o.alsoResize,function(c){a(c)}):a(o.alsoResize)}e(this).removeData("resizable-alsoresize")}});e.ui.plugin.add("resizable","animate",{stop:function(b){var a=e(this).data("resizable"),c=a.options,d=a._proportionallyResizeElements,f=d.length&&/textarea/i.test(d[0].nodeName),g=f&&e.ui.hasScroll(d[0],"left")?0:a.sizeDiff.height;f={width:a.size.width-(f?0:a.sizeDiff.width),height:a.size.height-g};g=parseInt(a.element.css("left" [...]
-a.originalPosition.left)||null;var h=parseInt(a.element.css("top"),10)+(a.position.top-a.originalPosition.top)||null;a.element.animate(e.extend(f,h&&g?{top:h,left:g}:{}),{duration:c.animateDuration,easing:c.animateEasing,step:function(){var i={width:parseInt(a.element.css("width"),10),height:parseInt(a.element.css("height"),10),top:parseInt(a.element.css("top"),10),left:parseInt(a.element.css("left"),10)};d&&d.length&&e(d[0]).css({width:i.width,height:i.height});a._updateCache(i);a._prop [...]
-b)}})}});e.ui.plugin.add("resizable","containment",{start:function(){var b=e(this).data("resizable"),a=b.element,c=b.options.containment;if(a=c instanceof e?c.get(0):/parent/.test(c)?a.parent().get(0):c){b.containerElement=e(a);if(/document/.test(c)||c==document){b.containerOffset={left:0,top:0};b.containerPosition={left:0,top:0};b.parentData={element:e(document),left:0,top:0,width:e(document).width(),height:e(document).height()||document.body.parentNode.scrollHeight}}else{var d=e(a),f=[ [...]
-"Right","Left","Bottom"]).each(function(i,j){f[i]=m(d.css("padding"+j))});b.containerOffset=d.offset();b.containerPosition=d.position();b.containerSize={height:d.innerHeight()-f[3],width:d.innerWidth()-f[1]};c=b.containerOffset;var g=b.containerSize.height,h=b.containerSize.width;h=e.ui.hasScroll(a,"left")?a.scrollWidth:h;g=e.ui.hasScroll(a)?a.scrollHeight:g;b.parentData={element:a,left:c.left,top:c.top,width:h,height:g}}}},resize:function(b){var a=e(this).data("resizable"),c=a.options,d [...]
-f=a.position;b=a._aspectRatio||b.shiftKey;var g={top:0,left:0},h=a.containerElement;if(h[0]!=document&&/static/.test(h.css("position")))g=d;if(f.left<(a._helper?d.left:0)){a.size.width+=a._helper?a.position.left-d.left:a.position.left-g.left;if(b)a.size.height=a.size.width/c.aspectRatio;a.position.left=c.helper?d.left:0}if(f.top<(a._helper?d.top:0)){a.size.height+=a._helper?a.position.top-d.top:a.position.top;if(b)a.size.width=a.size.height*c.aspectRatio;a.position.top=a._helper?d.top:0} [...]
-a.parentData.left+a.position.left;a.offset.top=a.parentData.top+a.position.top;c=Math.abs((a._helper?a.offset.left-g.left:a.offset.left-g.left)+a.sizeDiff.width);d=Math.abs((a._helper?a.offset.top-g.top:a.offset.top-d.top)+a.sizeDiff.height);f=a.containerElement.get(0)==a.element.parent().get(0);g=/relative|absolute/.test(a.containerElement.css("position"));if(f&&g)c-=a.parentData.left;if(c+a.size.width>=a.parentData.width){a.size.width=a.parentData.width-c;if(b)a.size.height=a.size.widt [...]
-a.size.height>=a.parentData.height){a.size.height=a.parentData.height-d;if(b)a.size.width=a.size.height*a.aspectRatio}},stop:function(){var b=e(this).data("resizable"),a=b.options,c=b.containerOffset,d=b.containerPosition,f=b.containerElement,g=e(b.helper),h=g.offset(),i=g.outerWidth()-b.sizeDiff.width;g=g.outerHeight()-b.sizeDiff.height;b._helper&&!a.animate&&/relative/.test(f.css("position"))&&e(this).css({left:h.left-d.left-c.left,width:i,height:g});b._helper&&!a.animate&&/static/.tes [...]
-e(this).css({left:h.left-d.left-c.left,width:i,height:g})}});e.ui.plugin.add("resizable","ghost",{start:function(){var b=e(this).data("resizable"),a=b.options,c=b.size;b.ghost=b.originalElement.clone();b.ghost.css({opacity:0.25,display:"block",position:"relative",height:c.height,width:c.width,margin:0,left:0,top:0}).addClass("ui-resizable-ghost").addClass(typeof a.ghost=="string"?a.ghost:"");b.ghost.appendTo(b.helper)},resize:function(){var b=e(this).data("resizable");b.ghost&&b.ghost.cs [...]
-height:b.size.height,width:b.size.width})},stop:function(){var b=e(this).data("resizable");b.ghost&&b.helper&&b.helper.get(0).removeChild(b.ghost.get(0))}});e.ui.plugin.add("resizable","grid",{resize:function(){var b=e(this).data("resizable"),a=b.options,c=b.size,d=b.originalSize,f=b.originalPosition,g=b.axis;a.grid=typeof a.grid=="number"?[a.grid,a.grid]:a.grid;var h=Math.round((c.width-d.width)/(a.grid[0]||1))*(a.grid[0]||1);a=Math.round((c.height-d.height)/(a.grid[1]||1))*(a.grid[1]|| [...]
-d.width+h;b.size.height=d.height+a}else if(/^(ne)$/.test(g)){b.size.width=d.width+h;b.size.height=d.height+a;b.position.top=f.top-a}else{if(/^(sw)$/.test(g)){b.size.width=d.width+h;b.size.height=d.height+a}else{b.size.width=d.width+h;b.size.height=d.height+a;b.position.top=f.top-a}b.position.left=f.left-h}}});var m=function(b){return parseInt(b,10)||0},l=function(b){return!isNaN(parseInt(b,10))}})(jQuery);
-(function(e){e.widget("ui.selectable",e.ui.mouse,{options:{appendTo:"body",autoRefresh:true,distance:0,filter:"*",tolerance:"touch"},_create:function(){var c=this;this.element.addClass("ui-selectable");this.dragged=false;var f;this.refresh=function(){f=e(c.options.filter,c.element[0]);f.each(function(){var d=e(this),b=d.offset();e.data(this,"selectable-item",{element:this,$element:d,left:b.left,top:b.top,right:b.left+d.outerWidth(),bottom:b.top+d.outerHeight(),startselected:false,selecte [...]
-selecting:d.hasClass("ui-selecting"),unselecting:d.hasClass("ui-unselecting")})})};this.refresh();this.selectees=f.addClass("ui-selectee");this._mouseInit();this.helper=e("<div class='ui-selectable-helper'></div>")},destroy:function(){this.selectees.removeClass("ui-selectee").removeData("selectable-item");this.element.removeClass("ui-selectable ui-selectable-disabled").removeData("selectable").unbind(".selectable");this._mouseDestroy();return this},_mouseStart:function(c){var f=this;this [...]
-c.pageY];if(!this.options.disabled){var d=this.options;this.selectees=e(d.filter,this.element[0]);this._trigger("start",c);e(d.appendTo).append(this.helper);this.helper.css({left:c.clientX,top:c.clientY,width:0,height:0});d.autoRefresh&&this.refresh();this.selectees.filter(".ui-selected").each(function(){var b=e.data(this,"selectable-item");b.startselected=true;if(!c.metaKey){b.$element.removeClass("ui-selected");b.selected=false;b.$element.addClass("ui-unselecting");b.unselecting=true;f [...]
-c,{unselecting:b.element})}});e(c.target).parents().andSelf().each(function(){var b=e.data(this,"selectable-item");if(b){var g=!c.metaKey||!b.$element.hasClass("ui-selected");b.$element.removeClass(g?"ui-unselecting":"ui-selected").addClass(g?"ui-selecting":"ui-unselecting");b.unselecting=!g;b.selecting=g;(b.selected=g)?f._trigger("selecting",c,{selecting:b.element}):f._trigger("unselecting",c,{unselecting:b.element});return false}})}},_mouseDrag:function(c){var f=this;this.dragged=true; [...]
-this.options,b=this.opos[0],g=this.opos[1],h=c.pageX,i=c.pageY;if(b>h){var j=h;h=b;b=j}if(g>i){j=i;i=g;g=j}this.helper.css({left:b,top:g,width:h-b,height:i-g});this.selectees.each(function(){var a=e.data(this,"selectable-item");if(!(!a||a.element==f.element[0])){var k=false;if(d.tolerance=="touch")k=!(a.left>h||a.right<b||a.top>i||a.bottom<g);else if(d.tolerance=="fit")k=a.left>b&&a.right<h&&a.top>g&&a.bottom<i;if(k){if(a.selected){a.$element.removeClass("ui-selected");a.selected=false}i [...]
-a.unselecting=false}if(!a.selecting){a.$element.addClass("ui-selecting");a.selecting=true;f._trigger("selecting",c,{selecting:a.element})}}else{if(a.selecting)if(c.metaKey&&a.startselected){a.$element.removeClass("ui-selecting");a.selecting=false;a.$element.addClass("ui-selected");a.selected=true}else{a.$element.removeClass("ui-selecting");a.selecting=false;if(a.startselected){a.$element.addClass("ui-unselecting");a.unselecting=true}f._trigger("unselecting",c,{unselecting:a.element})}if( [...]
-!a.startselected){a.$element.removeClass("ui-selected");a.selected=false;a.$element.addClass("ui-unselecting");a.unselecting=true;f._trigger("unselecting",c,{unselecting:a.element})}}}});return false}},_mouseStop:function(c){var f=this;this.dragged=false;e(".ui-unselecting",this.element[0]).each(function(){var d=e.data(this,"selectable-item");d.$element.removeClass("ui-unselecting");d.unselecting=false;d.startselected=false;f._trigger("unselected",c,{unselected:d.element})});e(".ui-selec [...]
-e.data(this,"selectable-item");d.$element.removeClass("ui-selecting").addClass("ui-selected");d.selecting=false;d.selected=true;d.startselected=true;f._trigger("selected",c,{selected:d.element})});this._trigger("stop",c);this.helper.remove();return false}});e.extend(e.ui.selectable,{version:"1.9m2"})})(jQuery);
-(function(d){d.widget("ui.sortable",d.ui.mouse,{widgetEventPrefix:"sort",options:{appendTo:"parent",axis:false,connectWith:false,containment:false,cursor:"auto",cursorAt:false,dropOnEmpty:true,forcePlaceholderSize:false,forceHelperSize:false,grid:false,handle:false,helper:"original",items:"> *",opacity:false,placeholder:false,revert:false,scroll:true,scrollSensitivity:20,scrollSpeed:20,scope:"default",tolerance:"intersect",zIndex:1E3},_create:function(){this.containerCache={};this.elemen [...]
-this.refresh();this.floating=this.items.length?/left|right/.test(this.items[0].item.css("float")):false;this.offset=this.element.offset();this._mouseInit()},destroy:function(){this.element.removeClass("ui-sortable ui-sortable-disabled").removeData("sortable").unbind(".sortable");this._mouseDestroy();for(var a=this.items.length-1;a>=0;a--)this.items[a].item.removeData("sortable-item");return this},_setOption:function(a,b){if(a==="disabled"){this.options[a]=b;this.widget()[b?"addClass":"re [...]
-arguments)},_mouseCapture:function(a,b){if(this.reverting)return false;if(this.options.disabled||this.options.type=="static")return false;this._refreshItems(a);var c=null,e=this;d(a.target).parents().each(function(){if(d.data(this,"sortable-item")==e){c=d(this);return false}});if(d.data(a.target,"sortable-item")==e)c=d(a.target);if(!c)return false;if(this.options.handle&&!b){var f=false;d(this.options.handle,c).find("*").andSelf().each(function(){if(this==a.target)f=true});if(!f)return f [...]
-c;this._removeCurrentsFromItems();return true},_mouseStart:function(a,b,c){b=this.options;var e=this;this.currentContainer=this;this.refreshPositions();this.helper=this._createHelper(a);this._cacheHelperProportions();this._cacheMargins();this.scrollParent=this.helper.scrollParent();this.offset=this.currentItem.offset();this.offset={top:this.offset.top-this.margins.top,left:this.offset.left-this.margins.left};this.helper.css("position","absolute");this.cssPosition=this.helper.css("positio [...]
-{click:{left:a.pageX-this.offset.left,top:a.pageY-this.offset.top},parent:this._getParentOffset(),relative:this._getRelativeOffset()});this.originalPosition=this._generatePosition(a);this.originalPageX=a.pageX;this.originalPageY=a.pageY;b.cursorAt&&this._adjustOffsetFromHelper(b.cursorAt);this.domPosition={prev:this.currentItem.prev()[0],parent:this.currentItem.parent()[0]};this.helper[0]!=this.currentItem[0]&&this.currentItem.hide();this._createPlaceholder();b.containment&&this._setCont [...]
-if(b.cursor){if(d("body").css("cursor"))this._storedCursor=d("body").css("cursor");d("body").css("cursor",b.cursor)}if(b.opacity){if(this.helper.css("opacity"))this._storedOpacity=this.helper.css("opacity");this.helper.css("opacity",b.opacity)}if(b.zIndex){if(this.helper.css("zIndex"))this._storedZIndex=this.helper.css("zIndex");this.helper.css("zIndex",b.zIndex)}if(this.scrollParent[0]!=document&&this.scrollParent[0].tagName!="HTML")this.overflowOffset=this.scrollParent.offset();this._t [...]
-a,this._uiHash());this._preserveHelperProportions||this._cacheHelperProportions();if(!c)for(c=this.containers.length-1;c>=0;c--)this.containers[c]._trigger("activate",a,e._uiHash(this));if(d.ui.ddmanager)d.ui.ddmanager.current=this;d.ui.ddmanager&&!b.dropBehaviour&&d.ui.ddmanager.prepareOffsets(this,a);this.dragging=true;this.helper.addClass("ui-sortable-helper");this._mouseDrag(a);return true},_mouseDrag:function(a){this.position=this._generatePosition(a);this.positionAbs=this._convertP [...]
-if(!this.lastPositionAbs)this.lastPositionAbs=this.positionAbs;if(this.options.scroll){var b=this.options,c=false;if(this.scrollParent[0]!=document&&this.scrollParent[0].tagName!="HTML"){if(this.overflowOffset.top+this.scrollParent[0].offsetHeight-a.pageY<b.scrollSensitivity)this.scrollParent[0].scrollTop=c=this.scrollParent[0].scrollTop+b.scrollSpeed;else if(a.pageY-this.overflowOffset.top<b.scrollSensitivity)this.scrollParent[0].scrollTop=c=this.scrollParent[0].scrollTop-b.scrollSpeed; [...]
-this.scrollParent[0].offsetWidth-a.pageX<b.scrollSensitivity)this.scrollParent[0].scrollLeft=c=this.scrollParent[0].scrollLeft+b.scrollSpeed;else if(a.pageX-this.overflowOffset.left<b.scrollSensitivity)this.scrollParent[0].scrollLeft=c=this.scrollParent[0].scrollLeft-b.scrollSpeed}else{if(a.pageY-d(document).scrollTop()<b.scrollSensitivity)c=d(document).scrollTop(d(document).scrollTop()-b.scrollSpeed);else if(d(window).height()-(a.pageY-d(document).scrollTop())<b.scrollSensitivity)c=d(do [...]
-b.scrollSpeed);if(a.pageX-d(document).scrollLeft()<b.scrollSensitivity)c=d(document).scrollLeft(d(document).scrollLeft()-b.scrollSpeed);else if(d(window).width()-(a.pageX-d(document).scrollLeft())<b.scrollSensitivity)c=d(document).scrollLeft(d(document).scrollLeft()+b.scrollSpeed)}c!==false&&d.ui.ddmanager&&!b.dropBehaviour&&d.ui.ddmanager.prepareOffsets(this,a)}this.positionAbs=this._convertPositionTo("absolute");if(!this.options.axis||this.options.axis!="y")this.helper[0].style.left=th [...]
-"px";if(!this.options.axis||this.options.axis!="x")this.helper[0].style.top=this.position.top+"px";for(b=this.items.length-1;b>=0;b--){c=this.items[b];var e=c.item[0],f=this._intersectsWithPointer(c);if(f)if(e!=this.currentItem[0]&&this.placeholder[f==1?"next":"prev"]()[0]!=e&&!d.ui.contains(this.placeholder[0],e)&&(this.options.type=="semi-dynamic"?!d.ui.contains(this.element[0],e):true)){this.direction=f==1?"down":"up";if(this.options.tolerance=="pointer"||this._intersectsWithSides(c)) [...]
-c);else break;this._trigger("change",a,this._uiHash());break}}this._contactContainers(a);d.ui.ddmanager&&d.ui.ddmanager.drag(this,a);this._trigger("sort",a,this._uiHash());this.lastPositionAbs=this.positionAbs;return false},_mouseStop:function(a,b){if(a){d.ui.ddmanager&&!this.options.dropBehaviour&&d.ui.ddmanager.drop(this,a);if(this.options.revert){var c=this;b=c.placeholder.offset();c.reverting=true;d(this.helper).animate({left:b.left-this.offset.parent.left-c.margins.left+(this.offset [...]
-document.body?0:this.offsetParent[0].scrollLeft),top:b.top-this.offset.parent.top-c.margins.top+(this.offsetParent[0]==document.body?0:this.offsetParent[0].scrollTop)},parseInt(this.options.revert,10)||500,function(){c._clear(a)})}else this._clear(a,b);return false}},cancel:function(){var a=this;if(this.dragging){this._mouseUp();this.options.helper=="original"?this.currentItem.css(this._storedCSS).removeClass("ui-sortable-helper"):this.currentItem.show();for(var b=this.containers.length- [...]
-null,a._uiHash(this));if(this.containers[b].containerCache.over){this.containers[b]._trigger("out",null,a._uiHash(this));this.containers[b].containerCache.over=0}}}this.placeholder[0].parentNode&&this.placeholder[0].parentNode.removeChild(this.placeholder[0]);this.options.helper!="original"&&this.helper&&this.helper[0].parentNode&&this.helper.remove();d.extend(this,{helper:null,dragging:false,reverting:false,_noFinalSort:null});this.domPosition.prev?d(this.domPosition.prev).after(this.cu [...]
-d(this.domPosition.parent).prepend(this.currentItem);return this},serialize:function(a){var b=this._getItemsAsjQuery(a&&a.connected),c=[];a=a||{};d(b).each(function(){var e=(d(a.item||this).attr(a.attribute||"id")||"").match(a.expression||/(.+)[-=_](.+)/);if(e)c.push((a.key||e[1]+"[]")+"="+(a.key&&a.expression?e[1]:e[2]))});return c.join("&")},toArray:function(a){var b=this._getItemsAsjQuery(a&&a.connected),c=[];a=a||{};b.each(function(){c.push(d(a.item||this).attr(a.attribute||"id")||"" [...]
-_intersectsWith:function(a){var b=this.positionAbs.left,c=b+this.helperProportions.width,e=this.positionAbs.top,f=e+this.helperProportions.height,g=a.left,h=g+a.width,i=a.top,k=i+a.height,j=this.offset.click.top,l=this.offset.click.left;j=e+j>i&&e+j<k&&b+l>g&&b+l<h;return this.options.tolerance=="pointer"||this.options.forcePointerForContainers||this.options.tolerance!="pointer"&&this.helperProportions[this.floating?"width":"height"]>a[this.floating?"width":"height"]?j:g<b+this.helperPro [...]
-2&&c-this.helperProportions.width/2<h&&i<e+this.helperProportions.height/2&&f-this.helperProportions.height/2<k},_intersectsWithPointer:function(a){var b=d.ui.isOverAxis(this.positionAbs.top+this.offset.click.top,a.top,a.height);a=d.ui.isOverAxis(this.positionAbs.left+this.offset.click.left,a.left,a.width);b=b&&a;a=this._getDragVerticalDirection();var c=this._getDragHorizontalDirection();if(!b)return false;return this.floating?c&&c=="right"||a=="down"?2:1:a&&(a=="down"?2:1)},_intersectsW [...]
-d.ui.isOverAxis(this.positionAbs.top+this.offset.click.top,a.top+a.height/2,a.height);a=d.ui.isOverAxis(this.positionAbs.left+this.offset.click.left,a.left+a.width/2,a.width);var c=this._getDragVerticalDirection(),e=this._getDragHorizontalDirection();return this.floating&&e?e=="right"&&a||e=="left"&&!a:c&&(c=="down"&&b||c=="up"&&!b)},_getDragVerticalDirection:function(){var a=this.positionAbs.top-this.lastPositionAbs.top;return a!=0&&(a>0?"down":"up")},_getDragHorizontalDirection:functio [...]
-this.positionAbs.left-this.lastPositionAbs.left;return a!=0&&(a>0?"right":"left")},refresh:function(a){this._refreshItems(a);this.refreshPositions();return this},_connectWith:function(){var a=this.options;return a.connectWith.constructor==String?[a.connectWith]:a.connectWith},_getItemsAsjQuery:function(a){var b=[],c=[],e=this._connectWith();if(e&&a)for(a=e.length-1;a>=0;a--)for(var f=d(e[a]),g=f.length-1;g>=0;g--){var h=d.data(f[g],"sortable");if(h&&h!=this&&!h.options.disabled)c.push([d [...]
-h.options.items.call(h.element):d(h.options.items,h.element).not(".ui-sortable-helper").not(".ui-sortable-placeholder"),h])}c.push([d.isFunction(this.options.items)?this.options.items.call(this.element,null,{options:this.options,item:this.currentItem}):d(this.options.items,this.element).not(".ui-sortable-helper").not(".ui-sortable-placeholder"),this]);for(a=c.length-1;a>=0;a--)c[a][0].each(function(){b.push(this)});return d(b)},_removeCurrentsFromItems:function(){for(var a=this.currentIt [...]
-b=0;b<this.items.length;b++)for(var c=0;c<a.length;c++)a[c]==this.items[b].item[0]&&this.items.splice(b,1)},_refreshItems:function(a){this.items=[];this.containers=[this];var b=this.items,c=[[d.isFunction(this.options.items)?this.options.items.call(this.element[0],a,{item:this.currentItem}):d(this.options.items,this.element),this]],e=this._connectWith();if(e)for(var f=e.length-1;f>=0;f--)for(var g=d(e[f]),h=g.length-1;h>=0;h--){var i=d.data(g[h],"sortable");if(i&&i!=this&&!i.options.disa [...]
-i.options.items.call(i.element[0],a,{item:this.currentItem}):d(i.options.items,i.element),i]);this.containers.push(i)}}for(f=c.length-1;f>=0;f--){a=c[f][1];e=c[f][0];h=0;for(g=e.length;h<g;h++){i=d(e[h]);i.data("sortable-item",a);b.push({item:i,instance:a,width:0,height:0,left:0,top:0})}}},refreshPositions:function(a){if(this.offsetParent&&this.helper)this.offset.parent=this._getParentOffset();for(var b=this.items.length-1;b>=0;b--){var c=this.items[b],e=this.options.toleranceElement?d(t [...]
-c.item):c.item;if(!a){c.width=e.outerWidth();c.height=e.outerHeight()}e=e.offset();c.left=e.left;c.top=e.top}if(this.options.custom&&this.options.custom.refreshContainers)this.options.custom.refreshContainers.call(this);else for(b=this.containers.length-1;b>=0;b--){e=this.containers[b].element.offset();this.containers[b].containerCache.left=e.left;this.containers[b].containerCache.top=e.top;this.containers[b].containerCache.width=this.containers[b].element.outerWidth();this.containers[b] [...]
-this.containers[b].element.outerHeight()}return this},_createPlaceholder:function(a){var b=a||this,c=b.options;if(!c.placeholder||c.placeholder.constructor==String){var e=c.placeholder;c.placeholder={element:function(){var f=d(document.createElement(b.currentItem[0].nodeName)).addClass(e||b.currentItem[0].className+" ui-sortable-placeholder").removeClass("ui-sortable-helper")[0];if(!e)f.style.visibility="hidden";return f},update:function(f,g){if(!(e&&!c.forcePlaceholderSize)){g.height()| [...]
-parseInt(b.currentItem.css("paddingTop")||0,10)-parseInt(b.currentItem.css("paddingBottom")||0,10));g.width()||g.width(b.currentItem.innerWidth()-parseInt(b.currentItem.css("paddingLeft")||0,10)-parseInt(b.currentItem.css("paddingRight")||0,10))}}}}b.placeholder=d(c.placeholder.element.call(b.element,b.currentItem));b.currentItem.after(b.placeholder);c.placeholder.update(b,b.placeholder)},_contactContainers:function(a){for(var b=null,c=null,e=this.containers.length-1;e>=0;e--)if(!d.ui.co [...]
-this.containers[e].element[0]))if(this._intersectsWith(this.containers[e].containerCache)){if(!(b&&d.ui.contains(this.containers[e].element[0],b.element[0]))){b=this.containers[e];c=e}}else if(this.containers[e].containerCache.over){this.containers[e]._trigger("out",a,this._uiHash(this));this.containers[e].containerCache.over=0}if(b)if(this.containers.length===1){this.containers[c]._trigger("over",a,this._uiHash(this));this.containers[c].containerCache.over=1}else if(this.currentContaine [...]
-1E4;e=null;for(var f=this.positionAbs[this.containers[c].floating?"left":"top"],g=this.items.length-1;g>=0;g--)if(d.ui.contains(this.containers[c].element[0],this.items[g].item[0])){var h=this.items[g][this.containers[c].floating?"left":"top"];if(Math.abs(h-f)<b){b=Math.abs(h-f);e=this.items[g]}}if(e||this.options.dropOnEmpty){this.currentContainer=this.containers[c];e?this._rearrange(a,e,null,true):this._rearrange(a,null,this.containers[c].element,true);this._trigger("change",a,this._ui [...]
-a,this._uiHash(this));this.options.placeholder.update(this.currentContainer,this.placeholder);this.containers[c]._trigger("over",a,this._uiHash(this));this.containers[c].containerCache.over=1}}},_createHelper:function(a){var b=this.options;a=d.isFunction(b.helper)?d(b.helper.apply(this.element[0],[a,this.currentItem])):b.helper=="clone"?this.currentItem.clone():this.currentItem;a.parents("body").length||d(b.appendTo!="parent"?b.appendTo:this.currentItem[0].parentNode)[0].appendChild(a[0] [...]
-this.currentItem[0])this._storedCSS={width:this.currentItem[0].style.width,height:this.currentItem[0].style.height,position:this.currentItem.css("position"),top:this.currentItem.css("top"),left:this.currentItem.css("left")};if(a[0].style.width==""||b.forceHelperSize)a.width(this.currentItem.width());if(a[0].style.height==""||b.forceHelperSize)a.height(this.currentItem.height());return a},_adjustOffsetFromHelper:function(a){if(typeof a=="string")a=a.split(" ");if(d.isArray(a))a={left:+a[0 [...]
-0};if("left"in a)this.offset.click.left=a.left+this.margins.left;if("right"in a)this.offset.click.left=this.helperProportions.width-a.right+this.margins.left;if("top"in a)this.offset.click.top=a.top+this.margins.top;if("bottom"in a)this.offset.click.top=this.helperProportions.height-a.bottom+this.margins.top},_getParentOffset:function(){this.offsetParent=this.helper.offsetParent();var a=this.offsetParent.offset();if(this.cssPosition=="absolute"&&this.scrollParent[0]!=document&&d.ui.conta [...]
-this.offsetParent[0])){a.left+=this.scrollParent.scrollLeft();a.top+=this.scrollParent.scrollTop()}if(this.offsetParent[0]==document.body||this.offsetParent[0].tagName&&this.offsetParent[0].tagName.toLowerCase()=="html"&&d.browser.msie)a={top:0,left:0};return{top:a.top+(parseInt(this.offsetParent.css("borderTopWidth"),10)||0),left:a.left+(parseInt(this.offsetParent.css("borderLeftWidth"),10)||0)}},_getRelativeOffset:function(){if(this.cssPosition=="relative"){var a=this.currentItem.posit [...]
-(parseInt(this.helper.css("top"),10)||0)+this.scrollParent.scrollTop(),left:a.left-(parseInt(this.helper.css("left"),10)||0)+this.scrollParent.scrollLeft()}}else return{top:0,left:0}},_cacheMargins:function(){this.margins={left:parseInt(this.currentItem.css("marginLeft"),10)||0,top:parseInt(this.currentItem.css("marginTop"),10)||0}},_cacheHelperProportions:function(){this.helperProportions={width:this.helper.outerWidth(),height:this.helper.outerHeight()}},_setContainment:function(){var a [...]
-if(a.containment=="parent")a.containment=this.helper[0].parentNode;if(a.containment=="document"||a.containment=="window")this.containment=[0-this.offset.relative.left-this.offset.parent.left,0-this.offset.relative.top-this.offset.parent.top,d(a.containment=="document"?document:window).width()-this.helperProportions.width-this.margins.left,(d(a.containment=="document"?document:window).height()||document.body.parentNode.scrollHeight)-this.helperProportions.height-this.margins.top];if(!/^(d [...]
-d(a.containment)[0];a=d(a.containment).offset();var c=d(b).css("overflow")!="hidden";this.containment=[a.left+(parseInt(d(b).css("borderLeftWidth"),10)||0)+(parseInt(d(b).css("paddingLeft"),10)||0)-this.margins.left,a.top+(parseInt(d(b).css("borderTopWidth"),10)||0)+(parseInt(d(b).css("paddingTop"),10)||0)-this.margins.top,a.left+(c?Math.max(b.scrollWidth,b.offsetWidth):b.offsetWidth)-(parseInt(d(b).css("borderLeftWidth"),10)||0)-(parseInt(d(b).css("paddingRight"),10)||0)-this.helperProp [...]
-this.margins.left,a.top+(c?Math.max(b.scrollHeight,b.offsetHeight):b.offsetHeight)-(parseInt(d(b).css("borderTopWidth"),10)||0)-(parseInt(d(b).css("paddingBottom"),10)||0)-this.helperProportions.height-this.margins.top]}},_convertPositionTo:function(a,b){if(!b)b=this.position;a=a=="absolute"?1:-1;var c=this.cssPosition=="absolute"&&!(this.scrollParent[0]!=document&&d.ui.contains(this.scrollParent[0],this.offsetParent[0]))?this.offsetParent:this.scrollParent,e=/(html|body)/i.test(c[0].tag [...]
-this.offset.relative.top*a+this.offset.parent.top*a-(d.browser.safari&&this.cssPosition=="fixed"?0:(this.cssPosition=="fixed"?-this.scrollParent.scrollTop():e?0:c.scrollTop())*a),left:b.left+this.offset.relative.left*a+this.offset.parent.left*a-(d.browser.safari&&this.cssPosition=="fixed"?0:(this.cssPosition=="fixed"?-this.scrollParent.scrollLeft():e?0:c.scrollLeft())*a)}},_generatePosition:function(a){var b=this.options,c=this.cssPosition=="absolute"&&!(this.scrollParent[0]!=document&&d [...]
-this.offsetParent[0]))?this.offsetParent:this.scrollParent,e=/(html|body)/i.test(c[0].tagName);if(this.cssPosition=="relative"&&!(this.scrollParent[0]!=document&&this.scrollParent[0]!=this.offsetParent[0]))this.offset.relative=this._getRelativeOffset();var f=a.pageX,g=a.pageY;if(this.originalPosition){if(this.containment){if(a.pageX-this.offset.click.left<this.containment[0])f=this.containment[0]+this.offset.click.left;if(a.pageY-this.offset.click.top<this.containment[1])g=this.containme [...]
-if(a.pageX-this.offset.click.left>this.containment[2])f=this.containment[2]+this.offset.click.left;if(a.pageY-this.offset.click.top>this.containment[3])g=this.containment[3]+this.offset.click.top}if(b.grid){g=this.originalPageY+Math.round((g-this.originalPageY)/b.grid[1])*b.grid[1];g=this.containment?!(g-this.offset.click.top<this.containment[1]||g-this.offset.click.top>this.containment[3])?g:!(g-this.offset.click.top<this.containment[1])?g-b.grid[1]:g+b.grid[1]:g;f=this.originalPageX+Ma [...]
-this.originalPageX)/b.grid[0])*b.grid[0];f=this.containment?!(f-this.offset.click.left<this.containment[0]||f-this.offset.click.left>this.containment[2])?f:!(f-this.offset.click.left<this.containment[0])?f-b.grid[0]:f+b.grid[0]:f}}return{top:g-this.offset.click.top-this.offset.relative.top-this.offset.parent.top+(d.browser.safari&&this.cssPosition=="fixed"?0:this.cssPosition=="fixed"?-this.scrollParent.scrollTop():e?0:c.scrollTop()),left:f-this.offset.click.left-this.offset.relative.left [...]
-(d.browser.safari&&this.cssPosition=="fixed"?0:this.cssPosition=="fixed"?-this.scrollParent.scrollLeft():e?0:c.scrollLeft())}},_rearrange:function(a,b,c,e){c?c[0].appendChild(this.placeholder[0]):b.item[0].parentNode.insertBefore(this.placeholder[0],this.direction=="down"?b.item[0]:b.item[0].nextSibling);this.counter=this.counter?++this.counter:1;var f=this,g=this.counter;window.setTimeout(function(){g==f.counter&&f.refreshPositions(!e)},0)},_clear:function(a,b){this.reverting=false;var [...]
-this.currentItem[0].parentNode&&this.placeholder.before(this.currentItem);this._noFinalSort=null;if(this.helper[0]==this.currentItem[0]){for(var e in this._storedCSS)if(this._storedCSS[e]=="auto"||this._storedCSS[e]=="static")this._storedCSS[e]="";this.currentItem.css(this._storedCSS).removeClass("ui-sortable-helper")}else this.currentItem.show();this.fromOutside&&!b&&c.push(function(f){this._trigger("receive",f,this._uiHash(this.fromOutside))});if((this.fromOutside||this.domPosition.pre [...]
-this.domPosition.parent!=this.currentItem.parent()[0])&&!b)c.push(function(f){this._trigger("update",f,this._uiHash())});if(!d.ui.contains(this.element[0],this.currentItem[0])){b||c.push(function(f){this._trigger("remove",f,this._uiHash())});for(e=this.containers.length-1;e>=0;e--)if(d.ui.contains(this.containers[e].element[0],this.currentItem[0])&&!b){c.push(function(f){return function(g){f._trigger("receive",g,this._uiHash(this))}}.call(this,this.containers[e]));c.push(function(f){retu [...]
-g,this._uiHash(this))}}.call(this,this.containers[e]))}}for(e=this.containers.length-1;e>=0;e--){b||c.push(function(f){return function(g){f._trigger("deactivate",g,this._uiHash(this))}}.call(this,this.containers[e]));if(this.containers[e].containerCache.over){c.push(function(f){return function(g){f._trigger("out",g,this._uiHash(this))}}.call(this,this.containers[e]));this.containers[e].containerCache.over=0}}this._storedCursor&&d("body").css("cursor",this._storedCursor);this._storedOpaci [...]
-this._storedOpacity);if(this._storedZIndex)this.helper.css("zIndex",this._storedZIndex=="auto"?"":this._storedZIndex);this.dragging=false;if(this.cancelHelperRemoval){if(!b){this._trigger("beforeStop",a,this._uiHash());for(e=0;e<c.length;e++)c[e].call(this,a);this._trigger("stop",a,this._uiHash())}return false}b||this._trigger("beforeStop",a,this._uiHash());this.placeholder[0].parentNode.removeChild(this.placeholder[0]);this.helper[0]!=this.currentItem[0]&&this.helper.remove();this.helpe [...]
-0;e<c.length;e++)c[e].call(this,a);this._trigger("stop",a,this._uiHash())}this.fromOutside=false;return true},_trigger:function(){d.Widget.prototype._trigger.apply(this,arguments)===false&&this.cancel()},_uiHash:function(a){var b=a||this;return{helper:b.helper,placeholder:b.placeholder||d([]),position:b.position,originalPosition:b.originalPosition,offset:b.positionAbs,item:b.currentItem,sender:a?a.element:null}}});d.extend(d.ui.sortable,{version:"1.9m2"})})(jQuery);
-jQuery.effects||function(f){function k(c){var a;if(c&&c.constructor==Array&&c.length==3)return c;if(a=/rgb\(\s*([0-9]{1,3})\s*,\s*([0-9]{1,3})\s*,\s*([0-9]{1,3})\s*\)/.exec(c))return[parseInt(a[1],10),parseInt(a[2],10),parseInt(a[3],10)];if(a=/rgb\(\s*([0-9]+(?:\.[0-9]+)?)\%\s*,\s*([0-9]+(?:\.[0-9]+)?)\%\s*,\s*([0-9]+(?:\.[0-9]+)?)\%\s*\)/.exec(c))return[parseFloat(a[1])*2.55,parseFloat(a[2])*2.55,parseFloat(a[3])*2.55];if(a=/#([a-fA-F0-9]{2})([a-fA-F0-9]{2})([a-fA-F0-9]{2})/.exec(c))ret [...]
-16),parseInt(a[2],16),parseInt(a[3],16)];if(a=/#([a-fA-F0-9])([a-fA-F0-9])([a-fA-F0-9])/.exec(c))return[parseInt(a[1]+a[1],16),parseInt(a[2]+a[2],16),parseInt(a[3]+a[3],16)];if(/rgba\(0, 0, 0, 0\)/.exec(c))return l.transparent;return l[f.trim(c).toLowerCase()]}function q(c,a){var b;do{b=f.curCSS(c,a);if(b!=""&&b!="transparent"||f.nodeName(c,"body"))break;a="backgroundColor"}while(c=c.parentNode);return k(b)}function m(){var c=document.defaultView?document.defaultView.getComputedStyle(thi [...]
-a={},b,d;if(c&&c.length&&c[0]&&c[c[0]])for(var e=c.length;e--;){b=c[e];if(typeof c[b]=="string"){d=b.replace(/\-(\w)/g,function(g,h){return h.toUpperCase()});a[d]=c[b]}}else for(b in c)if(typeof c[b]==="string")a[b]=c[b];return a}function n(c){var a,b;for(a in c){b=c[a];if(b==null||f.isFunction(b)||a in r||/scrollbar/.test(a)||!/color/i.test(a)&&isNaN(parseFloat(b)))delete c[a]}return c}function s(c,a){var b={_:0},d;for(d in a)if(c[d]!=a[d])b[d]=a[d];return b}function j(c,a,b,d){if(typeo [...]
-a;b=null;a=c;c=a.effect}if(f.isFunction(a)){d=a;b=null;a={}}if(f.isFunction(b)){d=b;b=null}if(typeof a=="number"||f.fx.speeds[a]){d=b;b=a;a={}}a=a||{};b=b||a.duration;b=f.fx.off?0:typeof b=="number"?b:f.fx.speeds[b]||f.fx.speeds._default;d=d||a.complete;return[c,a,b,d]}f.effects={};f.each(["backgroundColor","borderBottomColor","borderLeftColor","borderRightColor","borderTopColor","color","outlineColor"],function(c,a){f.fx.step[a]=function(b){if(!b.colorInit){b.start=q(b.elem,a);b.end=k(b [...]
-true}b.elem.style[a]="rgb("+Math.max(Math.min(parseInt(b.pos*(b.end[0]-b.start[0])+b.start[0],10),255),0)+","+Math.max(Math.min(parseInt(b.pos*(b.end[1]-b.start[1])+b.start[1],10),255),0)+","+Math.max(Math.min(parseInt(b.pos*(b.end[2]-b.start[2])+b.start[2],10),255),0)+")"}});var l={aqua:[0,255,255],azure:[240,255,255],beige:[245,245,220],black:[0,0,0],blue:[0,0,255],brown:[165,42,42],cyan:[0,255,255],darkblue:[0,0,139],darkcyan:[0,139,139],darkgrey:[169,169,169],darkgreen:[0,100,0],dark [...]
-183,107],darkmagenta:[139,0,139],darkolivegreen:[85,107,47],darkorange:[255,140,0],darkorchid:[153,50,204],darkred:[139,0,0],darksalmon:[233,150,122],darkviolet:[148,0,211],fuchsia:[255,0,255],gold:[255,215,0],green:[0,128,0],indigo:[75,0,130],khaki:[240,230,140],lightblue:[173,216,230],lightcyan:[224,255,255],lightgreen:[144,238,144],lightgrey:[211,211,211],lightpink:[255,182,193],lightyellow:[255,255,224],lime:[0,255,0],magenta:[255,0,255],maroon:[128,0,0],navy:[0,0,128],olive:[128,128 [...]
-165,0],pink:[255,192,203],purple:[128,0,128],violet:[128,0,128],red:[255,0,0],silver:[192,192,192],white:[255,255,255],yellow:[255,255,0],transparent:[255,255,255]},o=["add","remove","toggle"],r={border:1,borderBottom:1,borderColor:1,borderLeft:1,borderRight:1,borderTop:1,borderWidth:1,margin:1,padding:1};f.effects.animateClass=function(c,a,b,d){if(f.isFunction(b)){d=b;b=null}return this.each(function(){var e=f(this),g=e.attr("style")||" ",h=n(m.call(this)),p,t=e.attr("className");f.each [...]
-i){c[i]&&e[i+"Class"](c[i])});p=n(m.call(this));e.attr("className",t);e.animate(s(h,p),a,b,function(){f.each(o,function(u,i){c[i]&&e[i+"Class"](c[i])});if(typeof e.attr("style")=="object"){e.attr("style").cssText="";e.attr("style").cssText=g}else e.attr("style",g);d&&d.apply(this,arguments)})})};f.fn.extend({_addClass:f.fn.addClass,addClass:function(c,a,b,d){return a?f.effects.animateClass.apply(this,[{add:c},a,b,d]):this._addClass(c)},_removeClass:f.fn.removeClass,removeClass:function(c [...]
-f.effects.animateClass.apply(this,[{remove:c},a,b,d]):this._removeClass(c)},_toggleClass:f.fn.toggleClass,toggleClass:function(c,a,b,d,e){return typeof a=="boolean"||a===undefined?b?f.effects.animateClass.apply(this,[a?{add:c}:{remove:c},b,d,e]):this._toggleClass(c,a):f.effects.animateClass.apply(this,[{toggle:c},a,b,d])},switchClass:function(c,a,b,d,e){return f.effects.animateClass.apply(this,[{add:a,remove:c},b,d,e])}});f.extend(f.effects,{version:"1.9m2",save:function(c,a){for(var b=0 [...]
-null&&c.data("ec.storage."+a[b],c[0].style[a[b]])},restore:function(c,a){for(var b=0;b<a.length;b++)a[b]!==null&&c.css(a[b],c.data("ec.storage."+a[b]))},setMode:function(c,a){if(a=="toggle")a=c.is(":hidden")?"show":"hide";return a},getBaseline:function(c,a){var b;switch(c[0]){case "top":b=0;break;case "middle":b=0.5;break;case "bottom":b=1;break;default:b=c[0]/a.height}switch(c[1]){case "left":c=0;break;case "center":c=0.5;break;case "right":c=1;break;default:c=c[1]/a.width}return{x:c,y: [...]
-var a={width:c.outerWidth(true),height:c.outerHeight(true),"float":c.css("float")},b=f("<div></div>").addClass("ui-effects-wrapper").css({fontSize:"100%",background:"transparent",border:"none",margin:0,padding:0});c.wrap(b);b=c.parent();if(c.css("position")=="static"){b.css({position:"relative"});c.css({position:"relative"})}else{f.extend(a,{position:c.css("position"),zIndex:c.css("z-index")});f.each(["top","left","bottom","right"],function(d,e){a[e]=c.css(e);if(isNaN(parseInt(a[e],10))) [...]
-c.css({position:"relative",top:0,left:0})}return b.css(a).show()},removeWrapper:function(c){if(c.parent().is(".ui-effects-wrapper"))return c.parent().replaceWith(c);return c},setTransition:function(c,a,b,d){d=d||{};f.each(a,function(e,g){unit=c.cssUnit(g);if(unit[0]>0)d[g]=unit[0]*b+unit[1]});return d}});f.fn.extend({effect:function(c){var a=j.apply(this,arguments);a={options:a[1],duration:a[2],callback:a[3]};var b=f.effects[c];return b&&!f.fx.off?b.call(this,a):this},_show:f.fn.show,sho [...]
-typeof c=="number"||f.fx.speeds[c])return this._show.apply(this,arguments);else{var a=j.apply(this,arguments);a[1].mode="show";return this.effect.apply(this,a)}},_hide:f.fn.hide,hide:function(c){if(!c||typeof c=="number"||f.fx.speeds[c])return this._hide.apply(this,arguments);else{var a=j.apply(this,arguments);a[1].mode="hide";return this.effect.apply(this,a)}},__toggle:f.fn.toggle,toggle:function(c){if(!c||typeof c=="number"||f.fx.speeds[c]||typeof c=="boolean"||f.isFunction(c))return t [...]
-arguments);else{var a=j.apply(this,arguments);a[1].mode="toggle";return this.effect.apply(this,a)}},cssUnit:function(c){var a=this.css(c),b=[];f.each(["em","px","%","pt"],function(d,e){if(a.indexOf(e)>0)b=[parseFloat(a),e]});return b}});f.easing.jswing=f.easing.swing;f.extend(f.easing,{def:"easeOutQuad",swing:function(c,a,b,d,e){return f.easing[f.easing.def](c,a,b,d,e)},easeInQuad:function(c,a,b,d,e){return d*(a/=e)*a+b},easeOutQuad:function(c,a,b,d,e){return-d*(a/=e)*(a-2)+b},easeInOutQ [...]
-a,b,d,e){if((a/=e/2)<1)return d/2*a*a+b;return-d/2*(--a*(a-2)-1)+b},easeInCubic:function(c,a,b,d,e){return d*(a/=e)*a*a+b},easeOutCubic:function(c,a,b,d,e){return d*((a=a/e-1)*a*a+1)+b},easeInOutCubic:function(c,a,b,d,e){if((a/=e/2)<1)return d/2*a*a*a+b;return d/2*((a-=2)*a*a+2)+b},easeInQuart:function(c,a,b,d,e){return d*(a/=e)*a*a*a+b},easeOutQuart:function(c,a,b,d,e){return-d*((a=a/e-1)*a*a*a-1)+b},easeInOutQuart:function(c,a,b,d,e){if((a/=e/2)<1)return d/2*a*a*a*a+b;return-d/2*((a-=2 [...]
-b},easeInQuint:function(c,a,b,d,e){return d*(a/=e)*a*a*a*a+b},easeOutQuint:function(c,a,b,d,e){return d*((a=a/e-1)*a*a*a*a+1)+b},easeInOutQuint:function(c,a,b,d,e){if((a/=e/2)<1)return d/2*a*a*a*a*a+b;return d/2*((a-=2)*a*a*a*a+2)+b},easeInSine:function(c,a,b,d,e){return-d*Math.cos(a/e*(Math.PI/2))+d+b},easeOutSine:function(c,a,b,d,e){return d*Math.sin(a/e*(Math.PI/2))+b},easeInOutSine:function(c,a,b,d,e){return-d/2*(Math.cos(Math.PI*a/e)-1)+b},easeInExpo:function(c,a,b,d,e){return a==0? [...]
-10*(a/e-1))+b},easeOutExpo:function(c,a,b,d,e){return a==e?b+d:d*(-Math.pow(2,-10*a/e)+1)+b},easeInOutExpo:function(c,a,b,d,e){if(a==0)return b;if(a==e)return b+d;if((a/=e/2)<1)return d/2*Math.pow(2,10*(a-1))+b;return d/2*(-Math.pow(2,-10*--a)+2)+b},easeInCirc:function(c,a,b,d,e){return-d*(Math.sqrt(1-(a/=e)*a)-1)+b},easeOutCirc:function(c,a,b,d,e){return d*Math.sqrt(1-(a=a/e-1)*a)+b},easeInOutCirc:function(c,a,b,d,e){if((a/=e/2)<1)return-d/2*(Math.sqrt(1-a*a)-1)+b;return d/2*(Math.sqrt( [...]
-a)+1)+b},easeInElastic:function(c,a,b,d,e){c=1.70158;var g=0,h=d;if(a==0)return b;if((a/=e)==1)return b+d;g||(g=e*0.3);if(h<Math.abs(d)){h=d;c=g/4}else c=g/(2*Math.PI)*Math.asin(d/h);return-(h*Math.pow(2,10*(a-=1))*Math.sin((a*e-c)*2*Math.PI/g))+b},easeOutElastic:function(c,a,b,d,e){c=1.70158;var g=0,h=d;if(a==0)return b;if((a/=e)==1)return b+d;g||(g=e*0.3);if(h<Math.abs(d)){h=d;c=g/4}else c=g/(2*Math.PI)*Math.asin(d/h);return h*Math.pow(2,-10*a)*Math.sin((a*e-c)*2*Math.PI/g)+d+b},easeIn [...]
-a,b,d,e){c=1.70158;var g=0,h=d;if(a==0)return b;if((a/=e/2)==2)return b+d;g||(g=e*0.3*1.5);if(h<Math.abs(d)){h=d;c=g/4}else c=g/(2*Math.PI)*Math.asin(d/h);if(a<1)return-0.5*h*Math.pow(2,10*(a-=1))*Math.sin((a*e-c)*2*Math.PI/g)+b;return h*Math.pow(2,-10*(a-=1))*Math.sin((a*e-c)*2*Math.PI/g)*0.5+d+b},easeInBack:function(c,a,b,d,e,g){if(g==undefined)g=1.70158;return d*(a/=e)*a*((g+1)*a-g)+b},easeOutBack:function(c,a,b,d,e,g){if(g==undefined)g=1.70158;return d*((a=a/e-1)*a*((g+1)*a+g)+1)+b}, [...]
-a,b,d,e,g){if(g==undefined)g=1.70158;if((a/=e/2)<1)return d/2*a*a*(((g*=1.525)+1)*a-g)+b;return d/2*((a-=2)*a*(((g*=1.525)+1)*a+g)+2)+b},easeInBounce:function(c,a,b,d,e){return d-f.easing.easeOutBounce(c,e-a,0,d,e)+b},easeOutBounce:function(c,a,b,d,e){return(a/=e)<1/2.75?d*7.5625*a*a+b:a<2/2.75?d*(7.5625*(a-=1.5/2.75)*a+0.75)+b:a<2.5/2.75?d*(7.5625*(a-=2.25/2.75)*a+0.9375)+b:d*(7.5625*(a-=2.625/2.75)*a+0.984375)+b},easeInOutBounce:function(c,a,b,d,e){if(a<e/2)return f.easing.easeInBounce [...]
-d,e)*0.5+b;return f.easing.easeOutBounce(c,a*2-e,0,d,e)*0.5+d*0.5+b}})}(jQuery);
-(function(b){b.effects.blind=function(c){return this.queue(function(){var a=b(this),g=["position","top","left"],f=b.effects.setMode(a,c.options.mode||"hide"),d=c.options.direction||"vertical";b.effects.save(a,g);a.show();var e=b.effects.createWrapper(a).css({overflow:"hidden"}),h=d=="vertical"?"height":"width";d=d=="vertical"?e.height():e.width();f=="show"&&e.css(h,0);var i={};i[h]=f=="show"?d:0;e.animate(i,c.duration,c.options.easing,function(){f=="hide"&&a.hide();b.effects.restore(a,g) [...]
-c.callback&&c.callback.apply(a[0],arguments);a.dequeue()})})}})(jQuery);
-(function(e){e.effects.bounce=function(b){return this.queue(function(){var a=e(this),l=["position","top","left"],h=e.effects.setMode(a,b.options.mode||"effect"),d=b.options.direction||"up",c=b.options.distance||20,m=b.options.times||5,i=b.duration||250;/show|hide/.test(h)&&l.push("opacity");e.effects.save(a,l);a.show();e.effects.createWrapper(a);var f=d=="up"||d=="down"?"top":"left";d=d=="up"||d=="left"?"pos":"neg";c=b.options.distance||(f=="top"?a.outerHeight({margin:true})/3:a.outerWid [...]
-3);if(h=="show")a.css("opacity",0).css(f,d=="pos"?-c:c);if(h=="hide")c/=m*2;h!="hide"&&m--;if(h=="show"){var g={opacity:1};g[f]=(d=="pos"?"+=":"-=")+c;a.animate(g,i/2,b.options.easing);c/=2;m--}for(g=0;g<m;g++){var j={},k={};j[f]=(d=="pos"?"-=":"+=")+c;k[f]=(d=="pos"?"+=":"-=")+c;a.animate(j,i/2,b.options.easing).animate(k,i/2,b.options.easing);c=h=="hide"?c*2:c/2}if(h=="hide"){g={opacity:0};g[f]=(d=="pos"?"-=":"+=")+c;a.animate(g,i/2,b.options.easing,function(){a.hide();e.effects.restor [...]
-b.callback&&b.callback.apply(this,arguments)})}else{j={};k={};j[f]=(d=="pos"?"-=":"+=")+c;k[f]=(d=="pos"?"+=":"-=")+c;a.animate(j,i/2,b.options.easing).animate(k,i/2,b.options.easing,function(){e.effects.restore(a,l);e.effects.removeWrapper(a);b.callback&&b.callback.apply(this,arguments)})}a.queue("fx",function(){a.dequeue()});a.dequeue()})}})(jQuery);
-(function(b){b.effects.clip=function(e){return this.queue(function(){var a=b(this),i=["position","top","left","height","width"],f=b.effects.setMode(a,e.options.mode||"hide"),c=e.options.direction||"vertical";b.effects.save(a,i);a.show();var d=b.effects.createWrapper(a).css({overflow:"hidden"});d=a[0].tagName=="IMG"?d:a;var g={size:c=="vertical"?"height":"width",position:c=="vertical"?"top":"left"};c=c=="vertical"?d.height():d.width();if(f=="show"){d.css(g.size,0);d.css(g.position,c/2)}va [...]
-f=="show"?c:0;h[g.position]=f=="show"?0:c/2;d.animate(h,{queue:false,duration:e.duration,easing:e.options.easing,complete:function(){f=="hide"&&a.hide();b.effects.restore(a,i);b.effects.removeWrapper(a);e.callback&&e.callback.apply(a[0],arguments);a.dequeue()}})})}})(jQuery);
-(function(c){c.effects.drop=function(d){return this.queue(function(){var a=c(this),h=["position","top","left","opacity"],e=c.effects.setMode(a,d.options.mode||"hide"),b=d.options.direction||"left";c.effects.save(a,h);a.show();c.effects.createWrapper(a);var f=b=="up"||b=="down"?"top":"left";b=b=="up"||b=="left"?"pos":"neg";var g=d.options.distance||(f=="top"?a.outerHeight({margin:true})/2:a.outerWidth({margin:true})/2);if(e=="show")a.css("opacity",0).css(f,b=="pos"?-g:g);var i={opacity:e= [...]
-0};i[f]=(e=="show"?b=="pos"?"+=":"-=":b=="pos"?"-=":"+=")+g;a.animate(i,{queue:false,duration:d.duration,easing:d.options.easing,complete:function(){e=="hide"&&a.hide();c.effects.restore(a,h);c.effects.removeWrapper(a);d.callback&&d.callback.apply(this,arguments);a.dequeue()}})})}})(jQuery);
-(function(j){j.effects.explode=function(a){return this.queue(function(){var c=a.options.pieces?Math.round(Math.sqrt(a.options.pieces)):3,d=a.options.pieces?Math.round(Math.sqrt(a.options.pieces)):3;a.options.mode=a.options.mode=="toggle"?j(this).is(":visible")?"hide":"show":a.options.mode;var b=j(this).show().css("visibility","hidden"),g=b.offset();g.top-=parseInt(b.css("marginTop"),10)||0;g.left-=parseInt(b.css("marginLeft"),10)||0;for(var h=b.outerWidth(true),i=b.outerHeight(true),e=0; [...]
-0;f<d;f++)b.clone().appendTo("body").wrap("<div></div>").css({position:"absolute",visibility:"visible",left:-f*(h/d),top:-e*(i/c)}).parent().addClass("ui-effects-explode").css({position:"absolute",overflow:"hidden",width:h/d,height:i/c,left:g.left+f*(h/d)+(a.options.mode=="show"?(f-Math.floor(d/2))*(h/d):0),top:g.top+e*(i/c)+(a.options.mode=="show"?(e-Math.floor(c/2))*(i/c):0),opacity:a.options.mode=="show"?0:1}).animate({left:g.left+f*(h/d)+(a.options.mode=="show"?0:(f-Math.floor(d/2))* [...]
-e*(i/c)+(a.options.mode=="show"?0:(e-Math.floor(c/2))*(i/c)),opacity:a.options.mode=="show"?1:0},a.duration||500);setTimeout(function(){a.options.mode=="show"?b.css({visibility:"visible"}):b.css({visibility:"visible"}).hide();a.callback&&a.callback.apply(b[0]);b.dequeue();j("div.ui-effects-explode").remove()},a.duration||500)})}})(jQuery);
-(function(b){b.effects.fade=function(a){return this.queue(function(){var c=b(this),d=b.effects.setMode(c,a.options.mode||"hide");c.animate({opacity:d},{queue:false,duration:a.duration,easing:a.options.easing,complete:function(){a.callback&&a.callback.apply(this,arguments);c.dequeue()}})})}})(jQuery);
-(function(c){c.effects.fold=function(a){return this.queue(function(){var b=c(this),j=["position","top","left"],d=c.effects.setMode(b,a.options.mode||"hide"),g=a.options.size||15,h=!!a.options.horizFirst,k=a.duration?a.duration/2:c.fx.speeds._default/2;c.effects.save(b,j);b.show();var e=c.effects.createWrapper(b).css({overflow:"hidden"}),f=d=="show"!=h,l=f?["width","height"]:["height","width"];f=f?[e.width(),e.height()]:[e.height(),e.width()];var i=/([0-9]+)%/.exec(g);if(i)g=parseInt(i[1] [...]
-f[d=="hide"?0:1];if(d=="show")e.css(h?{height:0,width:g}:{height:g,width:0});h={};i={};h[l[0]]=d=="show"?f[0]:g;i[l[1]]=d=="show"?f[1]:0;e.animate(h,k,a.options.easing).animate(i,k,a.options.easing,function(){d=="hide"&&b.hide();c.effects.restore(b,j);c.effects.removeWrapper(b);a.callback&&a.callback.apply(b[0],arguments);b.dequeue()})})}})(jQuery);
-(function(b){b.effects.highlight=function(c){return this.queue(function(){var a=b(this),e=["backgroundImage","backgroundColor","opacity"],d=b.effects.setMode(a,c.options.mode||"show"),f={backgroundColor:a.css("backgroundColor")};if(d=="hide")f.opacity=0;b.effects.save(a,e);a.show().css({backgroundImage:"none",backgroundColor:c.options.color||"#ffff99"}).animate(f,{queue:false,duration:c.duration,easing:c.options.easing,complete:function(){d=="hide"&&a.hide();b.effects.restore(a,e);d=="sh [...]
-this.style.removeAttribute("filter");c.callback&&c.callback.apply(this,arguments);a.dequeue()}})})}})(jQuery);
-(function(d){d.effects.pulsate=function(a){return this.queue(function(){var b=d(this),c=d.effects.setMode(b,a.options.mode||"show");times=(a.options.times||5)*2-1;duration=a.duration?a.duration/2:d.fx.speeds._default/2;isVisible=b.is(":visible");animateTo=0;if(!isVisible){b.css("opacity",0).show();animateTo=1}if(c=="hide"&&isVisible||c=="show"&&!isVisible)times--;for(c=0;c<times;c++){b.animate({opacity:animateTo},duration,a.options.easing);animateTo=(animateTo+1)%2}b.animate({opacity:ani [...]
-a.options.easing,function(){animateTo==0&&b.hide();a.callback&&a.callback.apply(this,arguments)});b.queue("fx",function(){b.dequeue()}).dequeue()})}})(jQuery);
-(function(c){c.effects.puff=function(b){return this.queue(function(){var a=c(this),e=c.effects.setMode(a,b.options.mode||"hide"),g=parseInt(b.options.percent,10)||150,h=g/100,i={height:a.height(),width:a.width()};c.extend(b.options,{fade:true,mode:e,percent:e=="hide"?g:100,from:e=="hide"?i:{height:i.height*h,width:i.width*h}});a.effect("scale",b.options,b.duration,b.callback);a.dequeue()})};c.effects.scale=function(b){return this.queue(function(){var a=c(this),e=c.extend(true,{},b.option [...]
-b.options.mode||"effect"),h=parseInt(b.options.percent,10)||(parseInt(b.options.percent,10)==0?0:g=="hide"?0:100),i=b.options.direction||"both",f=b.options.origin;if(g!="effect"){e.origin=f||["middle","center"];e.restore=true}f={height:a.height(),width:a.width()};a.from=b.options.from||(g=="show"?{height:0,width:0}:f);h={y:i!="horizontal"?h/100:1,x:i!="vertical"?h/100:1};a.to={height:f.height*h.y,width:f.width*h.x};if(b.options.fade){if(g=="show"){a.from.opacity=0;a.to.opacity=1}if(g=="h [...]
-1;a.to.opacity=0}}e.from=a.from;e.to=a.to;e.mode=g;a.effect("size",e,b.duration,b.callback);a.dequeue()})};c.effects.size=function(b){return this.queue(function(){var a=c(this),e=["position","top","left","width","height","overflow","opacity"],g=["position","top","left","overflow","opacity"],h=["width","height","overflow"],i=["fontSize"],f=["borderTopWidth","borderBottomWidth","paddingTop","paddingBottom"],k=["borderLeftWidth","borderRightWidth","paddingLeft","paddingRight"],p=c.effects.s [...]
-b.options.mode||"effect"),n=b.options.restore||false,m=b.options.scale||"both",l=b.options.origin,j={height:a.height(),width:a.width()};a.from=b.options.from||j;a.to=b.options.to||j;if(l){l=c.effects.getBaseline(l,j);a.from.top=(j.height-a.from.height)*l.y;a.from.left=(j.width-a.from.width)*l.x;a.to.top=(j.height-a.to.height)*l.y;a.to.left=(j.width-a.to.width)*l.x}var d={from:{y:a.from.height/j.height,x:a.from.width/j.width},to:{y:a.to.height/j.height,x:a.to.width/j.width}};if(m=="box"|| [...]
-d.to.y){e=e.concat(f);a.from=c.effects.setTransition(a,f,d.from.y,a.from);a.to=c.effects.setTransition(a,f,d.to.y,a.to)}if(d.from.x!=d.to.x){e=e.concat(k);a.from=c.effects.setTransition(a,k,d.from.x,a.from);a.to=c.effects.setTransition(a,k,d.to.x,a.to)}}if(m=="content"||m=="both")if(d.from.y!=d.to.y){e=e.concat(i);a.from=c.effects.setTransition(a,i,d.from.y,a.from);a.to=c.effects.setTransition(a,i,d.to.y,a.to)}c.effects.save(a,n?e:g);a.show();c.effects.createWrapper(a);a.css("overflow"," [...]
-if(m=="content"||m=="both"){f=f.concat(["marginTop","marginBottom"]).concat(i);k=k.concat(["marginLeft","marginRight"]);h=e.concat(f).concat(k);a.find("*[width]").each(function(){child=c(this);n&&c.effects.save(child,h);var o={height:child.height(),width:child.width()};child.from={height:o.height*d.from.y,width:o.width*d.from.x};child.to={height:o.height*d.to.y,width:o.width*d.to.x};if(d.from.y!=d.to.y){child.from=c.effects.setTransition(child,f,d.from.y,child.from);child.to=c.effects.se [...]
-f,d.to.y,child.to)}if(d.from.x!=d.to.x){child.from=c.effects.setTransition(child,k,d.from.x,child.from);child.to=c.effects.setTransition(child,k,d.to.x,child.to)}child.css(child.from);child.animate(child.to,b.duration,b.options.easing,function(){n&&c.effects.restore(child,h)})})}a.animate(a.to,{queue:false,duration:b.duration,easing:b.options.easing,complete:function(){a.to.opacity===0&&a.css("opacity",a.from.opacity);p=="hide"&&a.hide();c.effects.restore(a,n?e:g);c.effects.removeWrapper [...]
-b.callback.apply(this,arguments);a.dequeue()}})})}})(jQuery);
-(function(d){d.effects.shake=function(a){return this.queue(function(){var b=d(this),j=["position","top","left"];d.effects.setMode(b,a.options.mode||"effect");var c=a.options.direction||"left",e=a.options.distance||20,l=a.options.times||3,f=a.duration||a.options.duration||140;d.effects.save(b,j);b.show();d.effects.createWrapper(b);var g=c=="up"||c=="down"?"top":"left",h=c=="up"||c=="left"?"pos":"neg";c={};var i={},k={};c[g]=(h=="pos"?"-=":"+=")+e;i[g]=(h=="pos"?"+=":"-=")+e*2;k[g]=(h=="po [...]
-e*2;b.animate(c,f,a.options.easing);for(e=1;e<l;e++)b.animate(i,f,a.options.easing).animate(k,f,a.options.easing);b.animate(i,f,a.options.easing).animate(c,f/2,a.options.easing,function(){d.effects.restore(b,j);d.effects.removeWrapper(b);a.callback&&a.callback.apply(this,arguments)});b.queue("fx",function(){b.dequeue()});b.dequeue()})}})(jQuery);
-(function(c){c.effects.slide=function(d){return this.queue(function(){var a=c(this),h=["position","top","left"],e=c.effects.setMode(a,d.options.mode||"show"),b=d.options.direction||"left";c.effects.save(a,h);a.show();c.effects.createWrapper(a).css({overflow:"hidden"});var f=b=="up"||b=="down"?"top":"left";b=b=="up"||b=="left"?"pos":"neg";var g=d.options.distance||(f=="top"?a.outerHeight({margin:true}):a.outerWidth({margin:true}));if(e=="show")a.css(f,b=="pos"?-g:g);var i={};i[f]=(e=="sho [...]
-"+=":"-=":b=="pos"?"-=":"+=")+g;a.animate(i,{queue:false,duration:d.duration,easing:d.options.easing,complete:function(){e=="hide"&&a.hide();c.effects.restore(a,h);c.effects.removeWrapper(a);d.callback&&d.callback.apply(this,arguments);a.dequeue()}})})}})(jQuery);
-(function(e){e.effects.transfer=function(a){return this.queue(function(){var b=e(this),c=e(a.options.to),d=c.offset();c={top:d.top,left:d.left,height:c.innerHeight(),width:c.innerWidth()};d=b.offset();var f=e('<div class="ui-effects-transfer"></div>').appendTo(document.body).addClass(a.options.className).css({top:d.top,left:d.left,height:b.innerHeight(),width:b.innerWidth(),position:"absolute"}).animate(c,a.duration,a.options.easing,function(){f.remove();a.callback&&a.callback.apply(b[0] [...]
-b.dequeue()})})}})(jQuery);
-(function(d){d.widget("ui.accordion",{options:{active:0,animated:"slide",autoHeight:true,clearStyle:false,collapsible:false,event:"click",fillSpace:false,header:"> li > :first-child,> :not(li):even",icons:{header:"ui-icon-triangle-1-e",headerSelected:"ui-icon-triangle-1-s"},navigation:false,navigationFilter:function(){return this.href.toLowerCase()==location.href.toLowerCase()}},_create:function(){var a=this.options,b=this;this.running=0;this.element.addClass("ui-accordion ui-widget ui-h [...]
-this.element.children("li").addClass("ui-accordion-li-fix");this.headers=this.element.find(a.header).addClass("ui-accordion-header ui-helper-reset ui-state-default ui-corner-all").bind("mouseenter.accordion",function(){d(this).addClass("ui-state-hover")}).bind("mouseleave.accordion",function(){d(this).removeClass("ui-state-hover")}).bind("focus.accordion",function(){d(this).addClass("ui-state-focus")}).bind("blur.accordion",function(){d(this).removeClass("ui-state-focus")});this.headers. [...]
-if(a.navigation){var c=this.element.find("a").filter(a.navigationFilter);if(c.length){var f=c.closest(".ui-accordion-header");this.active=f.length?f:c.closest(".ui-accordion-content").prev()}}this.active=this._findActive(this.active||a.active).toggleClass("ui-state-default").toggleClass("ui-state-active").toggleClass("ui-corner-all").toggleClass("ui-corner-top");this.active.next().addClass("ui-accordion-content-active");this._createIcons();this.resize();this.element.attr("role","tablist" [...]
-"tab").bind("keydown",function(g){return b._keydown(g)}).next().attr("role","tabpanel");this.headers.not(this.active||"").attr("aria-expanded","false").attr("tabIndex","-1").next().hide();this.active.length?this.active.attr("aria-expanded","true").attr("tabIndex","0"):this.headers.eq(0).attr("tabIndex","0");d.browser.safari||this.headers.find("a").attr("tabIndex","-1");a.event&&this.headers.bind(a.event+".accordion",function(g){b._clickHandler.call(b,g,this);g.preventDefault()})},_create [...]
-this.options;if(a.icons){d("<span/>").addClass("ui-icon "+a.icons.header).prependTo(this.headers);this.active.find(".ui-icon").toggleClass(a.icons.header).toggleClass(a.icons.headerSelected);this.element.addClass("ui-accordion-icons")}},_destroyIcons:function(){this.headers.children(".ui-icon").remove();this.element.removeClass("ui-accordion-icons")},destroy:function(){var a=this.options;this.element.removeClass("ui-accordion ui-widget ui-helper-reset").removeAttr("role").unbind(".accord [...]
-this.headers.unbind(".accordion").removeClass("ui-accordion-header ui-helper-reset ui-state-default ui-corner-all ui-state-active ui-corner-top").removeAttr("role").removeAttr("aria-expanded").removeAttr("tabIndex");this.headers.find("a").removeAttr("tabIndex");this._destroyIcons();var b=this.headers.next().css("display","").removeAttr("role").removeClass("ui-helper-reset ui-widget-content ui-corner-bottom ui-accordion-content ui-accordion-content-active");if(a.autoHeight||a.fillHeight)b [...]
-"");return this},_setOption:function(a,b){this._superApply("_setOption",arguments);a=="active"&&this.activate(b);if(a=="icons"){this._destroyIcons();b&&this._createIcons()}},_keydown:function(a){var b=d.ui.keyCode;if(!(this.options.disabled||a.altKey||a.ctrlKey)){var c=this.headers.length,f=this.headers.index(a.target),g=false;switch(a.keyCode){case b.RIGHT:case b.DOWN:g=this.headers[(f+1)%c];break;case b.LEFT:case b.UP:g=this.headers[(f-1+c)%c];break;case b.SPACE:case b.ENTER:this._clic [...]
-a.target);a.preventDefault()}if(g){d(a.target).attr("tabIndex","-1");d(g).attr("tabIndex","0");g.focus();return false}return true}},resize:function(){var a=this.options,b;if(a.fillSpace){if(d.browser.msie){var c=this.element.parent().css("overflow");this.element.parent().css("overflow","hidden")}b=this.element.parent().height();d.browser.msie&&this.element.parent().css("overflow",c);this.headers.each(function(){b-=d(this).outerHeight(true)});this.headers.next().each(function(){d(this).he [...]
-b-d(this).innerHeight()+d(this).height()))}).css("overflow","auto")}else if(a.autoHeight){b=0;this.headers.next().each(function(){b=Math.max(b,d(this).height())}).height(b)}return this},activate:function(a){this.options.active=a;a=this._findActive(a)[0];this._clickHandler({target:a},a);return this},_findActive:function(a){return a?typeof a=="number"?this.headers.filter(":eq("+a+")"):this.headers.not(this.headers.not(a)):a===false?d([]):this.headers.filter(":eq(0)")},_clickHandler:functio [...]
-this.options;if(!c.disabled)if(a.target){a=d(a.currentTarget||b);b=a[0]==this.active[0];c.active=c.collapsible&&b?false:d(".ui-accordion-header",this.element).index(a);if(!(this.running||!c.collapsible&&b)){this.active.removeClass("ui-state-active ui-corner-top").addClass("ui-state-default ui-corner-all").find(".ui-icon").removeClass(c.icons.headerSelected).addClass(c.icons.header);if(!b){a.removeClass("ui-state-default ui-corner-all").addClass("ui-state-active ui-corner-top").find(".ui- [...]
-a.next().addClass("ui-accordion-content-active")}e=a.next();f=this.active.next();g={options:c,newHeader:b&&c.collapsible?d([]):a,oldHeader:this.active,newContent:b&&c.collapsible?d([]):e,oldContent:f};c=this.headers.index(this.active[0])>this.headers.index(a[0]);this.active=b?d([]):a;this._toggle(e,f,g,b,c)}}else if(c.collapsible){this.active.removeClass("ui-state-active ui-corner-top").addClass("ui-state-default ui-corner-all").find(".ui-icon").removeClass(c.icons.headerSelected).addCla [...]
-this.active.next().addClass("ui-accordion-content-active");var f=this.active.next(),g={options:c,newHeader:d([]),oldHeader:c.active,newContent:d([]),oldContent:f},e=this.active=d([]);this._toggle(e,f,g)}},_toggle:function(a,b,c,f,g){var e=this.options,k=this;this.toShow=a;this.toHide=b;this.data=c;var i=function(){if(k)return k._completed.apply(k,arguments)};this._trigger("changestart",null,this.data);this.running=b.size()===0?a.size():b.size();if(e.animated){c={};c=e.collapsible&&f?{toS [...]
-toHide:b,complete:i,down:g,autoHeight:e.autoHeight||e.fillSpace}:{toShow:a,toHide:b,complete:i,down:g,autoHeight:e.autoHeight||e.fillSpace};if(!e.proxied)e.proxied=e.animated;if(!e.proxiedDuration)e.proxiedDuration=e.duration;e.animated=d.isFunction(e.proxied)?e.proxied(c):e.proxied;e.duration=d.isFunction(e.proxiedDuration)?e.proxiedDuration(c):e.proxiedDuration;f=d.ui.accordion.animations;var h=e.duration,j=e.animated;if(j&&!f[j]&&!d.easing[j])j="slide";f[j]||(f[j]=function(l){this.sli [...]
-duration:h||700})});f[j](c)}else{if(e.collapsible&&f)a.toggle();else{b.hide();a.show()}i(true)}b.prev().attr("aria-expanded","false").attr("tabIndex","-1").blur();a.prev().attr("aria-expanded","true").attr("tabIndex","0").focus()},_completed:function(a){var b=this.options;this.running=a?0:--this.running;if(!this.running){b.clearStyle&&this.toShow.add(this.toHide).css({height:"",overflow:""});this.toHide.removeClass("ui-accordion-content-active");this._trigger("change",null,this.data)}}}) [...]
-{version:"1.9m2",animations:{slide:function(a,b){a=d.extend({easing:"swing",duration:300},a,b);if(a.toHide.size())if(a.toShow.size()){var c=a.toShow.css("overflow"),f=0,g={},e={},k;b=a.toShow;k=b[0].style.width;b.width(parseInt(b.parent().width(),10)-parseInt(b.css("paddingLeft"),10)-parseInt(b.css("paddingRight"),10)-(parseInt(b.css("borderLeftWidth"),10)||0)-(parseInt(b.css("borderRightWidth"),10)||0));d.each(["height","paddingTop","paddingBottom"],function(i,h){e[h]="hide";i=(""+d.css [...]
-h)).match(/^([\d+-.]+)(.*)$/);g[h]={value:i[1],unit:i[2]||"px"}});a.toShow.css({height:0,overflow:"hidden"}).show();a.toHide.filter(":hidden").each(a.complete).end().filter(":visible").animate(e,{step:function(i,h){if(h.prop=="height")f=h.end-h.start===0?0:(h.now-h.start)/(h.end-h.start);a.toShow[0].style[h.prop]=f*g[h.prop].value+g[h.prop].unit},duration:a.duration,easing:a.easing,complete:function(){a.autoHeight||a.toShow.css("height","");a.toShow.css("width",k);a.toShow.css({overflow: [...]
-a);else a.toShow.animate({height:"show"},a)},bounceslide:function(a){this.slide(a,{easing:a.down?"easeOutBounce":"swing",duration:a.down?1E3:200})}}})})(jQuery);
-(function(e){e.widget("ui.autocomplete",{options:{minLength:1,delay:300},_create:function(){var a=this,d=this.element[0].ownerDocument;this.element.addClass("ui-autocomplete-input").attr("autocomplete","off").attr({role:"textbox","aria-autocomplete":"list","aria-haspopup":"true"}).bind("keydown.autocomplete",function(c){var b=e.ui.keyCode;switch(c.keyCode){case b.PAGE_UP:a._move("previousPage",c);break;case b.PAGE_DOWN:a._move("nextPage",c);break;case b.UP:a._move("previous",c);c.prevent [...]
-break;case b.DOWN:a._move("next",c);c.preventDefault();break;case b.ENTER:case b.NUMPAD_ENTER:a.menu.active&&c.preventDefault();case b.TAB:if(!a.menu.active)return;a.menu.select(c);break;case b.ESCAPE:a.element.val(a.term);a.close(c);break;case b.LEFT:case b.RIGHT:case b.SHIFT:case b.CONTROL:case b.ALT:case b.COMMAND:case b.COMMAND_RIGHT:case b.INSERT:case b.CAPS_LOCK:case b.END:case b.HOME:break;default:clearTimeout(a.searching);a.searching=setTimeout(function(){a.search(null,c)},a.opti [...]
-break}}).bind("focus.autocomplete",function(){a.selectedItem=null;a.previous=a.element.val()}).bind("blur.autocomplete",function(c){clearTimeout(a.searching);a.closing=setTimeout(function(){a.close(c);a._change(c)},150)});this._initSource();this.response=function(){return a._response.apply(a,arguments)};this.menu=e("<ul></ul>").addClass("ui-autocomplete").appendTo("body",d).mousedown(function(){setTimeout(function(){clearTimeout(a.closing)},13)}).menu({input:e(),focus:function(c,b){b=b.i [...]
-false!==a._trigger("focus",null,{item:b})&&/^key/.test(c.originalEvent.type)&&a.element.val(b.value)},select:function(c,b){b=b.item.data("item.autocomplete");false!==a._trigger("select",c,{item:b})&&a.element.val(b.value);a.close(c);c=a.previous;if(a.element[0]!==d.activeElement){a.element.focus();a.previous=c}a.selectedItem=b},blur:function(){a.menu.element.is(":visible")&&a.element.val(a.term)}}).zIndex(this.element.zIndex()+1).css({top:0,left:0}).hide().data("menu");e.fn.bgiframe&&thi [...]
-destroy:function(){this.element.removeClass("ui-autocomplete-input").removeAttr("autocomplete").removeAttr("role").removeAttr("aria-autocomplete").removeAttr("aria-haspopup");this.menu.element.remove();this._super("destroy")},_setOption:function(a){this._superApply("_setOption",arguments);a==="source"&&this._initSource()},_initSource:function(){var a,d;if(e.isArray(this.options.source)){a=this.options.source;this.source=function(c,b){b(e.ui.autocomplete.filter(a,c.term))}}else if(typeof [...]
-"string"){d=this.options.source;this.source=function(c,b){e.getJSON(d,c,b)}}else this.source=this.options.source},search:function(a,d){a=a!=null?a:this.element.val();if(a.length<this.options.minLength)return this.close(d);clearTimeout(this.closing);if(this._trigger("search")!==false)return this._search(a)},_search:function(a){this.term=this.element.addClass("ui-autocomplete-loading").val();this.source({term:a},this.response)},_response:function(a){if(a.length){a=this._normalize(a);this._ [...]
-this._trigger("open")}else this.close();this.element.removeClass("ui-autocomplete-loading")},close:function(a){clearTimeout(this.closing);if(this.menu.element.is(":visible")){this._trigger("close",a);this.menu.element.hide();this.menu.deactivate()}},_change:function(a){this.previous!==this.element.val()&&this._trigger("change",a,{item:this.selectedItem})},_normalize:function(a){if(a.length&&a[0].label&&a[0].value)return a;return e.map(a,function(d){if(typeof d==="string")return{label:d,v [...]
-d.value,value:d.value||d.label},d)})},_suggest:function(a){var d=this.menu.element.empty().zIndex(this.element.zIndex()+1),c;this._renderMenu(d,a);this.menu.deactivate();this.menu.refresh();this.menu.element.show().position({my:"left top",at:"left bottom",of:this.element,collision:"none"});a=d.width("").width();c=this.element.width();d.width(Math.max(a,c))},_renderMenu:function(a,d){var c=this;e.each(d,function(b,f){c._renderItem(a,f)})},_renderItem:function(a,d){return e("<li></li>").da [...]
-d).append("<a>"+d.label+"</a>").appendTo(a)},_move:function(a,d){if(this.menu.element.is(":visible"))if(this.menu.first()&&/^previous/.test(a)||this.menu.last()&&/^next/.test(a)){this.element.val(this.term);this.menu.deactivate()}else this.menu[a](d);else this.search(null,d)},widget:function(){return this.menu.element}});e.extend(e.ui.autocomplete,{escapeRegex:function(a){return a.replace(/([\^\$\(\)\[\]\{\}\*\.\+\?\|\\])/gi,"\\$1")},filter:function(a,d){var c=new RegExp(e.ui.autocomplet [...]
-"i");return e.grep(a,function(b){return c.test(b.label||b.value||b)})}})})(jQuery);
-(function(a){var g,i=function(b){a(":ui-button",b.target.form).each(function(){var c=a(this).data("button");setTimeout(function(){c.refresh()},1)})},h=function(b){var c=b.name,d=b.form,e=a([]);if(c)e=d?a(d).find("[name='"+c+"']"):a("[name='"+c+"']",b.ownerDocument).filter(function(){return!this.form});return e};a.widget("ui.button",{options:{text:true,label:null,icons:{primary:null,secondary:null}},_create:function(){this.element.closest("form").unbind("reset.button").bind("reset.button" [...]
-this.hasTitle=!!this.buttonElement.attr("title");var b=this,c=this.options,d=this.type==="checkbox"||this.type==="radio",e="ui-state-hover"+(!d?" ui-state-active":"");if(c.label===null)c.label=this.buttonElement.html();if(this.element.is(":disabled"))c.disabled=true;this.buttonElement.addClass("ui-button ui-widget ui-state-default ui-corner-all").attr("role","button").bind("mouseenter.button",function(){if(!c.disabled){a(this).addClass("ui-state-hover");this===g&&a(this).addClass("ui-sta [...]
-function(){c.disabled||a(this).removeClass(e)}).bind("focus.button",function(){a(this).addClass("ui-state-focus")}).bind("blur.button",function(){a(this).removeClass("ui-state-focus")});d&&this.element.bind("change.button",function(){b.refresh()});if(this.type==="checkbox")this.buttonElement.bind("click.button",function(){if(c.disabled)return false;a(this).toggleClass("ui-state-active");b.buttonElement.attr("aria-pressed",b.element[0].checked)});else if(this.type==="radio")this.buttonEle [...]
-function(){if(c.disabled)return false;a(this).addClass("ui-state-active");b.buttonElement.attr("aria-pressed",true);var f=b.element[0];h(f).not(f).map(function(){return a(this).button("widget")[0]}).removeClass("ui-state-active").attr("aria-pressed",false)});else{this.buttonElement.bind("mousedown.button",function(){if(c.disabled)return false;a(this).addClass("ui-state-active");g=this;a(document).one("mouseup",function(){g=null})}).bind("mouseup.button",function(){if(c.disabled)return fa [...]
-function(f){if(c.disabled)return false;if(f.keyCode==a.ui.keyCode.SPACE||f.keyCode==a.ui.keyCode.ENTER)a(this).addClass("ui-state-active")}).bind("keyup.button",function(){a(this).removeClass("ui-state-active")});this.buttonElement.is("a")&&this.buttonElement.keyup(function(f){f.keyCode===a.ui.keyCode.SPACE&&a(this).click()})}this._setOption("disabled",c.disabled)},_determineButtonType:function(){this.type=this.element.is(":checkbox")?"checkbox":this.element.is(":radio")?"radio":this.ele [...]
-"input":"button";if(this.type==="checkbox"||this.type==="radio"){this.buttonElement=this.element.parents().last().find("[for="+this.element.attr("id")+"]");this.element.addClass("ui-helper-hidden-accessible");var b=this.element.is(":checked");b&&this.buttonElement.addClass("ui-state-active");this.buttonElement.attr("aria-pressed",b)}else this.buttonElement=this.element},widget:function(){return this.buttonElement},destroy:function(){this.element.removeClass("ui-helper-hidden-accessible") [...]
-this.hasTitle||this.buttonElement.removeAttr("title");this._super("destroy")},_setOption:function(b,c){this._superApply("_setOption",arguments);if(b==="disabled")c?this.element.attr("disabled",true):this.element.removeAttr("disabled");this._resetButton()},refresh:function(){var b=this.element.is(":disabled");b!==this.options.disabled&&this._setOption("disabled",b);if(this.type==="radio")h(this.element[0]).each(function(){a(this).is(":checked")?a(this).button("widget").addClass("ui-state- [...]
-true):a(this).button("widget").removeClass("ui-state-active").attr("aria-pressed",false)});else if(this.type==="checkbox")this.element.is(":checked")?this.buttonElement.addClass("ui-state-active").attr("aria-pressed",true):this.buttonElement.removeClass("ui-state-active").attr("aria-pressed",false)},_resetButton:function(){if(this.type==="input")this.options.label&&this.element.val(this.options.label);else{var b=this.buttonElement.removeClass("ui-button-icons-only ui-button-icon-only ui- [...]
-c=a("<span></span>").addClass("ui-button-text").html(this.options.label).appendTo(b.empty()).text(),d=this.options.icons,e=d.primary&&d.secondary;if(d.primary||d.secondary){b.addClass("ui-button-text-icon"+(e?"s":d.primary?"-primary":"-secondary"));d.primary&&b.prepend("<span class='ui-button-icon-primary ui-icon "+d.primary+"'></span>");d.secondary&&b.append("<span class='ui-button-icon-secondary ui-icon "+d.secondary+"'></span>");if(!this.options.text){b.addClass(e?"ui-button-icons-onl [...]
-this.hasTitle||b.attr("title",c)}}else b.addClass("ui-button-text-only")}}});a.widget("ui.buttonset",{_create:function(){this.element.addClass("ui-buttonset");this._init()},_init:function(){this.refresh()},_setOption:function(b,c){b==="disabled"&&this.buttons.button("option",b,c);this._superApply("_setOption",arguments)},refresh:function(){this.buttons=this.element.find(":button, :submit, :reset, :checkbox, :radio, a, :data(button)").filter(":ui-button").button("refresh").end().not(":ui- [...]
-destroy:function(){this.element.removeClass("ui-buttonset");this.buttons.map(function(){return a(this).button("widget")[0]}).removeClass("ui-corner-left ui-corner-right").end().button("destroy");this._super("destroy")}})})(jQuery);
-(function(d){function K(){this.debug=false;this._curInst=null;this._keyEvent=false;this._disabledInputs=[];this._inDialog=this._datepickerShowing=false;this._mainDivId="ui-datepicker-div";this._inlineClass="ui-datepicker-inline";this._appendClass="ui-datepicker-append";this._triggerClass="ui-datepicker-trigger";this._dialogClass="ui-datepicker-dialog";this._disableClass="ui-datepicker-disabled";this._unselectableClass="ui-datepicker-unselectable";this._currentClass="ui-datepicker-current [...]
-"ui-datepicker-days-cell-over";this.regional=[];this.regional[""]={closeText:"Done",prevText:"Prev",nextText:"Next",currentText:"Today",monthNames:["January","February","March","April","May","June","July","August","September","October","November","December"],monthNamesShort:["Jan","Feb","Mar","Apr","May","Jun","Jul","Aug","Sep","Oct","Nov","Dec"],dayNames:["Sunday","Monday","Tuesday","Wednesday","Thursday","Friday","Saturday"],dayNamesShort:["Sun","Mon","Tue","Wed","Thu","Fri","Sat"],day [...]
-"Mo","Tu","We","Th","Fr","Sa"],weekHeader:"Wk",dateFormat:"mm/dd/yy",firstDay:0,isRTL:false,showMonthAfterYear:false,yearSuffix:""};this._defaults={showOn:"focus",showAnim:"fadeIn",showOptions:{},defaultDate:null,appendText:"",buttonText:"...",buttonImage:"",buttonImageOnly:false,hideIfNoPrevNext:false,navigationAsDateFormat:false,gotoCurrent:false,changeMonth:false,changeYear:false,yearRange:"c-10:c+10",showOtherMonths:false,selectOtherMonths:false,showWeek:false,calculateWeek:this.iso8 [...]
-minDate:null,maxDate:null,duration:"fast",beforeShowDay:null,beforeShow:null,onSelect:null,onChangeMonthYear:null,onClose:null,numberOfMonths:1,showCurrentAtPos:0,stepMonths:1,stepBigMonths:12,altField:"",altFormat:"",constrainInput:true,showButtonPanel:false,autoSize:false};d.extend(this._defaults,this.regional[""]);this.dpDiv=d('<div id="'+this._mainDivId+'" class="ui-datepicker ui-widget ui-widget-content ui-helper-clearfix ui-corner-all ui-helper-hidden-accessible"></div>')}function [...]
-b);for(var c in b)if(b[c]==null||b[c]==undefined)a[c]=b[c];return a}d.extend(d.ui,{datepicker:{version:"1.9m2"}});var y=(new Date).getTime();d.extend(K.prototype,{markerClassName:"hasDatepicker",log:function(){this.debug&&console.log.apply("",arguments)},_widgetDatepicker:function(){return this.dpDiv},setDefaults:function(a){E(this._defaults,a||{});return this},_attachDatepicker:function(a,b){var c=null;for(var e in this._defaults){var f=a.getAttribute("date:"+e);if(f){c=c||{};try{c[e]=e [...]
-f}}}e=a.nodeName.toLowerCase();f=e=="div"||e=="span";if(!a.id){this.uuid+=1;a.id="dp"+this.uuid}var i=this._newInst(d(a),f);i.settings=d.extend({},b||{},c||{});if(e=="input")this._connectDatepicker(a,i);else f&&this._inlineDatepicker(a,i)},_newInst:function(a,b){return{id:a[0].id.replace(/([^A-Za-z0-9_])/g,"\\\\$1"),input:a,selectedDay:0,selectedMonth:0,selectedYear:0,drawMonth:0,drawYear:0,inline:b,dpDiv:!b?this.dpDiv:d('<div class="'+this._inlineClass+' ui-datepicker ui-widget ui-widge [...]
-_connectDatepicker:function(a,b){var c=d(a);b.append=d([]);b.trigger=d([]);if(!c.hasClass(this.markerClassName)){this._attachments(c,b);c.addClass(this.markerClassName).keydown(this._doKeyDown).keypress(this._doKeyPress).keyup(this._doKeyUp).bind("setData.datepicker",function(e,f,h){b.settings[f]=h}).bind("getData.datepicker",function(e,f){return this._get(b,f)});this._autoSize(b);d.data(a,"datepicker",b)}},_attachments:function(a,b){var c=this._get(b,"appendText"),e=this._get(b,"isRTL") [...]
-b.append.remove();if(c){b.append=d('<span class="'+this._appendClass+'">'+c+"</span>");a[e?"before":"after"](b.append)}a.unbind("focus",this._showDatepicker);b.trigger&&b.trigger.remove();c=this._get(b,"showOn");if(c=="focus"||c=="both")a.focus(this._showDatepicker);if(c=="button"||c=="both"){c=this._get(b,"buttonText");var f=this._get(b,"buttonImage");b.trigger=d(this._get(b,"buttonImageOnly")?d("<img/>").addClass(this._triggerClass).attr({src:f,alt:c,title:c}):d('<button type="button"> [...]
-""?c:d("<img/>").attr({src:f,alt:c,title:c})));a[e?"before":"after"](b.trigger);b.trigger.click(function(){d.datepicker._datepickerShowing&&d.datepicker._lastInput==a[0]?d.datepicker._hideDatepicker():d.datepicker._showDatepicker(a[0]);return false})}},_autoSize:function(a){if(this._get(a,"autoSize")&&!a.inline){var b=new Date(2009,11,20),c=this._get(a,"dateFormat");if(c.match(/[DM]/)){var e=function(f){for(var h=0,i=0,g=0;g<f.length;g++)if(f[g].length>h){h=f[g].length;i=g}return i};b.se [...]
-c.match(/MM/)?"monthNames":"monthNamesShort")));b.setDate(e(this._get(a,c.match(/DD/)?"dayNames":"dayNamesShort"))+20-b.getDay())}a.input.attr("size",this._formatDate(a,b).length)}},_inlineDatepicker:function(a,b){var c=d(a);if(!c.hasClass(this.markerClassName)){c.addClass(this.markerClassName).append(b.dpDiv).bind("setData.datepicker",function(e,f,h){b.settings[f]=h}).bind("getData.datepicker",function(e,f){return this._get(b,f)});d.data(a,"datepicker",b);this._setDate(b,this._getDefaul [...]
-true);this._updateDatepicker(b);this._updateAlternate(b)}},_dialogDatepicker:function(a,b,c,e,f){a=this._dialogInst;if(!a){this.uuid+=1;this._dialogInput=d('<input type="text" id="'+("dp"+this.uuid)+'" style="position: absolute; top: -100px; width: 0px; z-index: -10;"/>');this._dialogInput.keydown(this._doKeyDown);d("body").append(this._dialogInput);a=this._dialogInst=this._newInst(this._dialogInput,false);a.settings={};d.data(this._dialogInput[0],"datepicker",a)}E(a.settings,e||{});b=b& [...]
-Date?this._formatDate(a,b):b;this._dialogInput.val(b);this._pos=f?f.length?f:[f.pageX,f.pageY]:null;if(!this._pos)this._pos=[document.documentElement.clientWidth/2-100+(document.documentElement.scrollLeft||document.body.scrollLeft),document.documentElement.clientHeight/2-150+(document.documentElement.scrollTop||document.body.scrollTop)];this._dialogInput.css("left",this._pos[0]+20+"px").css("top",this._pos[1]+"px");a.settings.onSelect=c;this._inDialog=true;this.dpDiv.addClass(this._dialo [...]
-d.blockUI&&d.blockUI(this.dpDiv);d.data(this._dialogInput[0],"datepicker",a);return this},_destroyDatepicker:function(a){var b=d(a),c=d.data(a,"datepicker");if(b.hasClass(this.markerClassName)){var e=a.nodeName.toLowerCase();d.removeData(a,"datepicker");if(e=="input"){c.append.remove();c.trigger.remove();b.removeClass(this.markerClassName).unbind("focus",this._showDatepicker).unbind("keydown",this._doKeyDown).unbind("keypress",this._doKeyPress).unbind("keyup",this._doKeyUp)}else if(e=="d [...]
-_enableDatepicker:function(a){var b=d(a),c=d.data(a,"datepicker");if(b.hasClass(this.markerClassName)){var e=a.nodeName.toLowerCase();if(e=="input"){a.disabled=false;c.trigger.filter("button").each(function(){this.disabled=false}).end().filter("img").css({opacity:"1.0",cursor:""})}else if(e=="div"||e=="span")b.children("."+this._inlineClass).children().removeClass("ui-state-disabled");this._disabledInputs=d.map(this._disabledInputs,function(f){return f==a?null:f})}},_disableDatepicker:fu [...]
-d(a),c=d.data(a,"datepicker");if(b.hasClass(this.markerClassName)){var e=a.nodeName.toLowerCase();if(e=="input"){a.disabled=true;c.trigger.filter("button").each(function(){this.disabled=true}).end().filter("img").css({opacity:"0.5",cursor:"default"})}else if(e=="div"||e=="span")b.children("."+this._inlineClass).children().addClass("ui-state-disabled");this._disabledInputs=d.map(this._disabledInputs,function(f){return f==a?null:f});this._disabledInputs[this._disabledInputs.length]=a}},_is [...]
-for(var b=0;b<this._disabledInputs.length;b++)if(this._disabledInputs[b]==a)return true;return false},_getInst:function(a){try{return d.data(a,"datepicker")}catch(b){throw"Missing instance data for this datepicker";}},_optionDatepicker:function(a,b,c){var e=this._getInst(a);if(arguments.length==2&&typeof b=="string")return b=="defaults"?d.extend({},d.datepicker._defaults):e?b=="all"?d.extend({},e.settings):this._get(e,b):null;var f=b||{};if(typeof b=="string"){f={};f[b]=c}if(e){this._cur [...]
-this._hideDatepicker();var h=this._getDateDatepicker(a,true);E(e.settings,f);this._attachments(d(a),e);this._autoSize(e);this._setDateDatepicker(a,h);this._updateDatepicker(e)}},_changeDatepicker:function(a,b,c){this._optionDatepicker(a,b,c)},_refreshDatepicker:function(a){(a=this._getInst(a))&&this._updateDatepicker(a)},_setDateDatepicker:function(a,b){if(a=this._getInst(a)){this._setDate(a,b);this._updateDatepicker(a);this._updateAlternate(a)}},_getDateDatepicker:function(a,b){(a=this. [...]
-!a.inline&&this._setDateFromField(a,b);return a?this._getDate(a):null},_doKeyDown:function(a){var b=d.datepicker._getInst(a.target),c=true,e=b.dpDiv.is(".ui-datepicker-rtl");b._keyEvent=true;if(d.datepicker._datepickerShowing)switch(a.keyCode){case 9:d.datepicker._hideDatepicker();c=false;break;case 13:c=d("td."+d.datepicker._dayOverClass,b.dpDiv).add(d("td."+d.datepicker._currentClass,b.dpDiv));c[0]?d.datepicker._selectDay(a.target,b.selectedMonth,b.selectedYear,c[0]):d.datepicker._hide [...]
-return false;case 27:d.datepicker._hideDatepicker();break;case 33:d.datepicker._adjustDate(a.target,a.ctrlKey?-d.datepicker._get(b,"stepBigMonths"):-d.datepicker._get(b,"stepMonths"),"M");break;case 34:d.datepicker._adjustDate(a.target,a.ctrlKey?+d.datepicker._get(b,"stepBigMonths"):+d.datepicker._get(b,"stepMonths"),"M");break;case 35:if(a.ctrlKey||a.metaKey)d.datepicker._clearDate(a.target);c=a.ctrlKey||a.metaKey;break;case 36:if(a.ctrlKey||a.metaKey)d.datepicker._gotoToday(a.target);c [...]
-a.metaKey;break;case 37:if(a.ctrlKey||a.metaKey)d.datepicker._adjustDate(a.target,e?+1:-1,"D");c=a.ctrlKey||a.metaKey;if(a.originalEvent.altKey)d.datepicker._adjustDate(a.target,a.ctrlKey?-d.datepicker._get(b,"stepBigMonths"):-d.datepicker._get(b,"stepMonths"),"M");break;case 38:if(a.ctrlKey||a.metaKey)d.datepicker._adjustDate(a.target,-7,"D");c=a.ctrlKey||a.metaKey;break;case 39:if(a.ctrlKey||a.metaKey)d.datepicker._adjustDate(a.target,e?-1:+1,"D");c=a.ctrlKey||a.metaKey;if(a.originalEv [...]
-a.ctrlKey?+d.datepicker._get(b,"stepBigMonths"):+d.datepicker._get(b,"stepMonths"),"M");break;case 40:if(a.ctrlKey||a.metaKey)d.datepicker._adjustDate(a.target,+7,"D");c=a.ctrlKey||a.metaKey;break;default:c=false}else if(a.keyCode==36&&a.ctrlKey)d.datepicker._showDatepicker(this);else c=false;if(c){a.preventDefault();a.stopPropagation()}},_doKeyPress:function(a){var b=d.datepicker._getInst(a.target);if(d.datepicker._get(b,"constrainInput")){b=d.datepicker._possibleChars(d.datepicker._get [...]
-var c=String.fromCharCode(a.charCode==undefined?a.keyCode:a.charCode);return a.ctrlKey||c<" "||!b||b.indexOf(c)>-1}},_doKeyUp:function(a){a=d.datepicker._getInst(a.target);if(a.input.val()!=a.lastVal)try{if(d.datepicker.parseDate(d.datepicker._get(a,"dateFormat"),a.input?a.input.val():null,d.datepicker._getFormatConfig(a))){d.datepicker._setDateFromField(a);d.datepicker._updateAlternate(a);d.datepicker._updateDatepicker(a)}}catch(b){d.datepicker.log(b)}return true},_showDatepicker:functi [...]
-a;if(a.nodeName.toLowerCase()!="input")a=d("input",a.parentNode)[0];if(!(d.datepicker._isDisabledDatepicker(a)||d.datepicker._lastInput==a)){var b=d.datepicker._getInst(a);d.datepicker._curInst&&d.datepicker._curInst!=b&&d.datepicker._curInst.dpDiv.stop(true,true);var c=d.datepicker._get(b,"beforeShow");E(b.settings,c?c.apply(a,[a,b]):{});b.lastVal=null;d.datepicker._lastInput=a;d.datepicker._setDateFromField(b);if(d.datepicker._inDialog)a.value="";if(!d.datepicker._pos){d.datepicker._po [...]
-d.datepicker._pos[1]+=a.offsetHeight}var e=false;d(a).parents().each(function(){e|=d(this).css("position")=="fixed";return!e});if(e&&d.browser.opera){d.datepicker._pos[0]-=document.documentElement.scrollLeft;d.datepicker._pos[1]-=document.documentElement.scrollTop}c={left:d.datepicker._pos[0],top:d.datepicker._pos[1]};d.datepicker._pos=null;b.dpDiv.css({position:"absolute",display:"block",top:"-1000px"});d.datepicker._updateDatepicker(b);c=d.datepicker._checkOffset(b,c,e);b.dpDiv.css({po [...]
-d.blockUI?"static":e?"fixed":"absolute",display:"none",left:c.left+"px",top:c.top+"px"});if(!b.inline){c=d.datepicker._get(b,"showAnim");var f=d.datepicker._get(b,"duration"),h=function(){d.datepicker._datepickerShowing=true;var i=d.datepicker._getBorders(b.dpDiv);b.dpDiv.find("iframe.ui-datepicker-cover").css({left:-i[0],top:-i[1],width:b.dpDiv.outerWidth(),height:b.dpDiv.outerHeight()})};b.dpDiv.zIndex(d(a).zIndex()+1);d.effects&&d.effects[c]?b.dpDiv.show(c,d.datepicker._get(b,"showOpt [...]
-h):b.dpDiv[c||"show"](c?f:null,h);if(!c||!f)h();b.input.is(":visible")&&!b.input.is(":disabled")&&b.input.focus();d.datepicker._curInst=b}}},_updateDatepicker:function(a){var b=this,c=d.datepicker._getBorders(a.dpDiv);a.dpDiv.empty().append(this._generateHTML(a)).find("iframe.ui-datepicker-cover").css({left:-c[0],top:-c[1],width:a.dpDiv.outerWidth(),height:a.dpDiv.outerHeight()}).end().find("button, .ui-datepicker-prev, .ui-datepicker-next, .ui-datepicker-calendar td a").bind("mouseout", [...]
-this.className.indexOf("ui-datepicker-prev")!=-1&&d(this).removeClass("ui-datepicker-prev-hover");this.className.indexOf("ui-datepicker-next")!=-1&&d(this).removeClass("ui-datepicker-next-hover")}).bind("mouseover",function(){if(!b._isDisabledDatepicker(a.inline?a.dpDiv.parent()[0]:a.input[0])){d(this).parents(".ui-datepicker-calendar").find("a").removeClass("ui-state-hover");d(this).addClass("ui-state-hover");this.className.indexOf("ui-datepicker-prev")!=-1&&d(this).addClass("ui-datepic [...]
-this.className.indexOf("ui-datepicker-next")!=-1&&d(this).addClass("ui-datepicker-next-hover")}}).end().find("."+this._dayOverClass+" a").trigger("mouseover").end();c=this._getNumberOfMonths(a);var e=c[1];e>1?a.dpDiv.addClass("ui-datepicker-multi-"+e).css("width",17*e+"em"):a.dpDiv.removeClass("ui-datepicker-multi-2 ui-datepicker-multi-3 ui-datepicker-multi-4").width("");a.dpDiv[(c[0]!=1||c[1]!=1?"add":"remove")+"Class"]("ui-datepicker-multi");a.dpDiv[(this._get(a,"isRTL")?"add":"remove" [...]
-a==d.datepicker._curInst&&d.datepicker._datepickerShowing&&a.input&&a.input.is(":visible")&&!a.input.is(":disabled")&&a.input.focus()},_getBorders:function(a){var b=function(c){return{thin:1,medium:2,thick:3}[c]||c};return[parseFloat(b(a.css("border-left-width"))),parseFloat(b(a.css("border-top-width")))]},_checkOffset:function(a,b,c){var e=a.dpDiv.outerWidth(),f=a.dpDiv.outerHeight(),h=a.input?a.input.outerWidth():0,i=a.input?a.input.outerHeight():0,g=document.documentElement.clientWidt [...]
-k=document.documentElement.clientHeight+d(document).scrollTop();b.left-=this._get(a,"isRTL")?e-h:0;b.left-=c&&b.left==a.input.offset().left?d(document).scrollLeft():0;b.top-=c&&b.top==a.input.offset().top+i?d(document).scrollTop():0;b.left-=Math.min(b.left,b.left+e>g&&g>e?Math.abs(b.left+e-g):0);b.top-=Math.min(b.top,b.top+f>k&&k>f?Math.abs(f+i):0);return b},_findPos:function(a){for(var b=this._get(this._getInst(a),"isRTL");a&&(a.type=="hidden"||a.nodeType!=1);)a=a[b?"previousSibling":"n [...]
-a=d(a).offset();return[a.left,a.top]},_hideDatepicker:function(a){var b=this._curInst;if(!(!b||a&&b!=d.data(a,"datepicker")))if(this._datepickerShowing){a=this._get(b,"showAnim");var c=this._get(b,"duration"),e=function(){d.datepicker._tidyDialog(b);this._curInst=null};d.effects&&d.effects[a]?b.dpDiv.hide(a,d.datepicker._get(b,"showOptions"),c,e):b.dpDiv[a=="slideDown"?"slideUp":a=="fadeIn"?"fadeOut":"hide"](a?c:null,e);a||e();if(a=this._get(b,"onClose"))a.apply(b.input?b.input[0]:null,[ [...]
-"",b]);this._datepickerShowing=false;this._lastInput=null;if(this._inDialog){this._dialogInput.css({position:"absolute",left:"0",top:"-100px"});if(d.blockUI){d.unblockUI();d("body").append(this.dpDiv)}}this._inDialog=false}},_tidyDialog:function(a){a.dpDiv.removeClass(this._dialogClass).unbind(".ui-datepicker-calendar")},_checkExternalClick:function(a){if(d.datepicker._curInst){a=d(a.target);a[0].id!=d.datepicker._mainDivId&&a.parents("#"+d.datepicker._mainDivId).length==0&&!a.hasClass(d [...]
-!a.hasClass(d.datepicker._triggerClass)&&d.datepicker._datepickerShowing&&!(d.datepicker._inDialog&&d.blockUI)&&d.datepicker._hideDatepicker()}},_adjustDate:function(a,b,c){a=d(a);var e=this._getInst(a[0]);if(!this._isDisabledDatepicker(a[0])){this._adjustInstDate(e,b+(c=="M"?this._get(e,"showCurrentAtPos"):0),c);this._updateDatepicker(e)}},_gotoToday:function(a){a=d(a);var b=this._getInst(a[0]);if(this._get(b,"gotoCurrent")&&b.currentDay){b.selectedDay=b.currentDay;b.drawMonth=b.selecte [...]
-b.drawYear=b.selectedYear=b.currentYear}else{var c=new Date;b.selectedDay=c.getDate();b.drawMonth=b.selectedMonth=c.getMonth();b.drawYear=b.selectedYear=c.getFullYear()}this._notifyChange(b);this._adjustDate(a)},_selectMonthYear:function(a,b,c){a=d(a);var e=this._getInst(a[0]);e._selectingMonthYear=false;e["selected"+(c=="M"?"Month":"Year")]=e["draw"+(c=="M"?"Month":"Year")]=parseInt(b.options[b.selectedIndex].value,10);this._notifyChange(e);this._adjustDate(a)},_clickMonthYear:function( [...]
-a.input&&a._selectingMonthYear&&!d.browser.msie&&a.input.focus();a._selectingMonthYear=!a._selectingMonthYear},_selectDay:function(a,b,c,e){var f=d(a);if(!(d(e).hasClass(this._unselectableClass)||this._isDisabledDatepicker(f[0]))){f=this._getInst(f[0]);f.selectedDay=f.currentDay=d("a",e).html();f.selectedMonth=f.currentMonth=b;f.selectedYear=f.currentYear=c;this._selectDate(a,this._formatDate(f,f.currentDay,f.currentMonth,f.currentYear))}},_clearDate:function(a){a=d(a);this._getInst(a[0] [...]
-"")},_selectDate:function(a,b){a=this._getInst(d(a)[0]);b=b!=null?b:this._formatDate(a);a.input&&a.input.val(b);this._updateAlternate(a);var c=this._get(a,"onSelect");if(c)c.apply(a.input?a.input[0]:null,[b,a]);else a.input&&a.input.trigger("change");if(a.inline)this._updateDatepicker(a);else{this._hideDatepicker();this._lastInput=a.input[0];typeof a.input[0]!="object"&&a.input.focus();this._lastInput=null}},_updateAlternate:function(a){var b=this._get(a,"altField");if(b){var c=this._get [...]
-this._get(a,"dateFormat"),e=this._getDate(a),f=this.formatDate(c,e,this._getFormatConfig(a));d(b).each(function(){d(this).val(f)})}},noWeekends:function(a){a=a.getDay();return[a>0&&a<6,""]},iso8601Week:function(a){a=new Date(a.getTime());a.setDate(a.getDate()+4-(a.getDay()||7));var b=a.getTime();a.setMonth(0);a.setDate(1);return Math.floor(Math.round((b-a)/864E5)/7)+1},parseDate:function(a,b,c){if(a==null||b==null)throw"Invalid arguments";b=typeof b=="object"?b.toString():b+"";if(b=="")r [...]
-for(var e=(c?c.shortYearCutoff:null)||this._defaults.shortYearCutoff,f=(c?c.dayNamesShort:null)||this._defaults.dayNamesShort,h=(c?c.dayNames:null)||this._defaults.dayNames,i=(c?c.monthNamesShort:null)||this._defaults.monthNamesShort,g=(c?c.monthNames:null)||this._defaults.monthNames,k=c=-1,l=-1,u=-1,j=false,o=function(p){(p=z+1<a.length&&a.charAt(z+1)==p)&&z++;return p},m=function(p){o(p);p=new RegExp("^\\d{1,"+(p=="@"?14:p=="!"?20:p=="y"?4:p=="o"?3:2)+"}");p=b.substring(s).match(p);if( [...]
-s;s+=p[0].length;return parseInt(p[0],10)},n=function(p,w,G){p=o(p)?G:w;for(w=0;w<p.length;w++)if(b.substr(s,p[w].length)==p[w]){s+=p[w].length;return w+1}throw"Unknown name at position "+s;},r=function(){if(b.charAt(s)!=a.charAt(z))throw"Unexpected literal at position "+s;s++},s=0,z=0;z<a.length;z++)if(j)if(a.charAt(z)=="'"&&!o("'"))j=false;else r();else switch(a.charAt(z)){case "d":l=m("d");break;case "D":n("D",f,h);break;case "o":u=m("o");break;case "m":k=m("m");break;case "M":k=n("M" [...]
-case "y":c=m("y");break;case "@":var v=new Date(m("@"));c=v.getFullYear();k=v.getMonth()+1;l=v.getDate();break;case "!":v=new Date((m("!")-this._ticksTo1970)/1E4);c=v.getFullYear();k=v.getMonth()+1;l=v.getDate();break;case "'":if(o("'"))r();else j=true;break;default:r()}if(c==-1)c=(new Date).getFullYear();else if(c<100)c+=(new Date).getFullYear()-(new Date).getFullYear()%100+(c<=e?0:-100);if(u>-1){k=1;l=u;do{e=this._getDaysInMonth(c,k-1);if(l<=e)break;k++;l-=e}while(1)}v=this._daylightSa [...]
-k-1,l));if(v.getFullYear()!=c||v.getMonth()+1!=k||v.getDate()!=l)throw"Invalid date";return v},ATOM:"yy-mm-dd",COOKIE:"D, dd M yy",ISO_8601:"yy-mm-dd",RFC_822:"D, d M y",RFC_850:"DD, dd-M-y",RFC_1036:"D, d M y",RFC_1123:"D, d M yy",RFC_2822:"D, d M yy",RSS:"D, d M y",TICKS:"!",TIMESTAMP:"@",W3C:"yy-mm-dd",_ticksTo1970:(718685+Math.floor(492.5)-Math.floor(19.7)+Math.floor(4.925))*24*60*60*1E7,formatDate:function(a,b,c){if(!b)return"";var e=(c?c.dayNamesShort:null)||this._defaults.dayNames [...]
-c.dayNames:null)||this._defaults.dayNames,h=(c?c.monthNamesShort:null)||this._defaults.monthNamesShort;c=(c?c.monthNames:null)||this._defaults.monthNames;var i=function(o){(o=j+1<a.length&&a.charAt(j+1)==o)&&j++;return o},g=function(o,m,n){m=""+m;if(i(o))for(;m.length<n;)m="0"+m;return m},k=function(o,m,n,r){return i(o)?r[m]:n[m]},l="",u=false;if(b)for(var j=0;j<a.length;j++)if(u)if(a.charAt(j)=="'"&&!i("'"))u=false;else l+=a.charAt(j);else switch(a.charAt(j)){case "d":l+=g("d",b.getDate [...]
-case "D":l+=k("D",b.getDay(),e,f);break;case "o":l+=g("o",(b.getTime()-(new Date(b.getFullYear(),0,0)).getTime())/864E5,3);break;case "m":l+=g("m",b.getMonth()+1,2);break;case "M":l+=k("M",b.getMonth(),h,c);break;case "y":l+=i("y")?b.getFullYear():(b.getYear()%100<10?"0":"")+b.getYear()%100;break;case "@":l+=b.getTime();break;case "!":l+=b.getTime()*1E4+this._ticksTo1970;break;case "'":if(i("'"))l+="'";else u=true;break;default:l+=a.charAt(j)}return l},_possibleChars:function(a){for(var [...]
-e=function(h){(h=f+1<a.length&&a.charAt(f+1)==h)&&f++;return h},f=0;f<a.length;f++)if(c)if(a.charAt(f)=="'"&&!e("'"))c=false;else b+=a.charAt(f);else switch(a.charAt(f)){case "d":case "m":case "y":case "@":b+="0123456789";break;case "D":case "M":return null;case "'":if(e("'"))b+="'";else c=true;break;default:b+=a.charAt(f)}return b},_get:function(a,b){return a.settings[b]!==undefined?a.settings[b]:this._defaults[b]},_setDateFromField:function(a,b){if(a.input.val()!=a.lastVal){var c=this. [...]
-e=a.lastVal=a.input?a.input.val():null,f,h;f=h=this._getDefaultDate(a);var i=this._getFormatConfig(a);try{f=this.parseDate(c,e,i)||h}catch(g){this.log(g);e=b?"":e}a.selectedDay=f.getDate();a.drawMonth=a.selectedMonth=f.getMonth();a.drawYear=a.selectedYear=f.getFullYear();a.currentDay=e?f.getDate():0;a.currentMonth=e?f.getMonth():0;a.currentYear=e?f.getFullYear():0;this._adjustInstDate(a)}},_getDefaultDate:function(a){return this._restrictMinMax(a,this._determineDate(a,this._get(a,"defaul [...]
-_determineDate:function(a,b,c){var e=function(h){var i=new Date;i.setDate(i.getDate()+h);return i},f=function(h){try{return d.datepicker.parseDate(d.datepicker._get(a,"dateFormat"),h,d.datepicker._getFormatConfig(a))}catch(i){}var g=(h.toLowerCase().match(/^c/)?d.datepicker._getDate(a):null)||new Date,k=g.getFullYear(),l=g.getMonth();g=g.getDate();for(var u=/([+-]?[0-9]+)\s*(d|D|w|W|m|M|y|Y)?/g,j=u.exec(h);j;){switch(j[2]||"d"){case "d":case "D":g+=parseInt(j[1],10);break;case "w":case " [...]
-10)*7;break;case "m":case "M":l+=parseInt(j[1],10);g=Math.min(g,d.datepicker._getDaysInMonth(k,l));break;case "y":case "Y":k+=parseInt(j[1],10);g=Math.min(g,d.datepicker._getDaysInMonth(k,l));break}j=u.exec(h)}return new Date(k,l,g)};if(b=(b=b==null?c:typeof b=="string"?f(b):typeof b=="number"?isNaN(b)?c:e(b):b)&&b.toString()=="Invalid Date"?c:b){b.setHours(0);b.setMinutes(0);b.setSeconds(0);b.setMilliseconds(0)}return this._daylightSavingAdjust(b)},_daylightSavingAdjust:function(a){if(! [...]
-a.setHours(a.getHours()>12?a.getHours()+2:0);return a},_setDate:function(a,b,c){var e=!b,f=a.selectedMonth,h=a.selectedYear;b=this._restrictMinMax(a,this._determineDate(a,b,new Date));a.selectedDay=a.currentDay=b.getDate();a.drawMonth=a.selectedMonth=a.currentMonth=b.getMonth();a.drawYear=a.selectedYear=a.currentYear=b.getFullYear();if((f!=a.selectedMonth||h!=a.selectedYear)&&!c)this._notifyChange(a);this._adjustInstDate(a);if(a.input)a.input.val(e?"":this._formatDate(a))},_getDate:funct [...]
-a.input&&a.input.val()==""?null:this._daylightSavingAdjust(new Date(a.currentYear,a.currentMonth,a.currentDay))},_generateHTML:function(a){var b=new Date;b=this._daylightSavingAdjust(new Date(b.getFullYear(),b.getMonth(),b.getDate()));var c=this._get(a,"isRTL"),e=this._get(a,"showButtonPanel"),f=this._get(a,"hideIfNoPrevNext"),h=this._get(a,"navigationAsDateFormat"),i=this._getNumberOfMonths(a),g=this._get(a,"showCurrentAtPos"),k=this._get(a,"stepMonths"),l=i[0]!=1||i[1]!=1,u=this._dayli [...]
-new Date(9999,9,9):new Date(a.currentYear,a.currentMonth,a.currentDay)),j=this._getMinMaxDate(a,"min"),o=this._getMinMaxDate(a,"max");g=a.drawMonth-g;var m=a.drawYear;if(g<0){g+=12;m--}if(o){var n=this._daylightSavingAdjust(new Date(o.getFullYear(),o.getMonth()-i[0]*i[1]+1,o.getDate()));for(n=j&&n<j?j:n;this._daylightSavingAdjust(new Date(m,g,1))>n;){g--;if(g<0){g=11;m--}}}a.drawMonth=g;a.drawYear=m;n=this._get(a,"prevText");n=!h?n:this.formatDate(n,this._daylightSavingAdjust(new Date(m, [...]
-n=this._canAdjustMonth(a,-1,m,g)?'<a class="ui-datepicker-prev ui-corner-all" onclick="DP_jQuery_'+y+".datepicker._adjustDate('#"+a.id+"', -"+k+", 'M');\" title=\""+n+'"><span class="ui-icon ui-icon-circle-triangle-'+(c?"e":"w")+'">'+n+"</span></a>":f?"":'<a class="ui-datepicker-prev ui-corner-all ui-state-disabled" title="'+n+'"><span class="ui-icon ui-icon-circle-triangle-'+(c?"e":"w")+'">'+n+"</span></a>";var r=this._get(a,"nextText");r=!h?r:this.formatDate(r,this._daylightSavingAdjus [...]
-g+k,1)),this._getFormatConfig(a));f=this._canAdjustMonth(a,+1,m,g)?'<a class="ui-datepicker-next ui-corner-all" onclick="DP_jQuery_'+y+".datepicker._adjustDate('#"+a.id+"', +"+k+", 'M');\" title=\""+r+'"><span class="ui-icon ui-icon-circle-triangle-'+(c?"w":"e")+'">'+r+"</span></a>":f?"":'<a class="ui-datepicker-next ui-corner-all ui-state-disabled" title="'+r+'"><span class="ui-icon ui-icon-circle-triangle-'+(c?"w":"e")+'">'+r+"</span></a>";k=this._get(a,"currentText");r=this._get(a,"go [...]
-a.currentDay?u:b;k=!h?k:this.formatDate(k,r,this._getFormatConfig(a));h=!a.inline?'<button type="button" class="ui-datepicker-close ui-state-default ui-priority-primary ui-corner-all" onclick="DP_jQuery_'+y+'.datepicker._hideDatepicker();">'+this._get(a,"closeText")+"</button>":"";e=e?'<div class="ui-datepicker-buttonpane ui-widget-content">'+(c?h:"")+(this._isInRange(a,r)?'<button type="button" class="ui-datepicker-current ui-state-default ui-priority-secondary ui-corner-all" onclick="D [...]
-y+".datepicker._gotoToday('#"+a.id+"');\">"+k+"</button>":"")+(c?"":h)+"</div>":"";h=parseInt(this._get(a,"firstDay"),10);h=isNaN(h)?0:h;k=this._get(a,"showWeek");r=this._get(a,"dayNames");this._get(a,"dayNamesShort");var s=this._get(a,"dayNamesMin"),z=this._get(a,"monthNames"),v=this._get(a,"monthNamesShort"),p=this._get(a,"beforeShowDay"),w=this._get(a,"showOtherMonths"),G=this._get(a,"selectOtherMonths");this._get(a,"calculateWeek");for(var L=this._getDefaultDate(a),H="",C=0;C<i[0];C+ [...]
-"",D=0;D<i[1];D++){var I=this._daylightSavingAdjust(new Date(m,g,a.selectedDay)),t=" ui-corner-all",x="";if(l){x+='<div class="ui-datepicker-group';if(i[1]>1)switch(D){case 0:x+=" ui-datepicker-group-first";t=" ui-corner-"+(c?"right":"left");break;case i[1]-1:x+=" ui-datepicker-group-last";t=" ui-corner-"+(c?"left":"right");break;default:x+=" ui-datepicker-group-middle";t="";break}x+='">'}x+='<div class="ui-datepicker-header ui-widget-header ui-helper-clearfix'+t+'">'+(/all|left/.test(t) [...]
-f:n:"")+(/all|right/.test(t)&&C==0?c?n:f:"")+this._generateMonthYearHeader(a,g,m,j,o,C>0||D>0,z,v)+'</div><table class="ui-datepicker-calendar"><thead><tr>';var A=k?'<th class="ui-datepicker-week-col">'+this._get(a,"weekHeader")+"</th>":"";for(t=0;t<7;t++){var q=(t+h)%7;A+="<th"+((t+h+6)%7>=5?' class="ui-datepicker-week-end"':"")+'><span title="'+r[q]+'">'+s[q]+"</span></th>"}x+=A+"</tr></thead><tbody>";A=this._getDaysInMonth(m,g);if(m==a.selectedYear&&g==a.selectedMonth)a.selectedDay=Ma [...]
-A);t=(this._getFirstDayOfMonth(m,g)-h+7)%7;A=l?6:Math.ceil((t+A)/7);q=this._daylightSavingAdjust(new Date(m,g,1-t));for(var N=0;N<A;N++){x+="<tr>";var O=!k?"":'<td class="ui-datepicker-week-col">'+this._get(a,"calculateWeek")(q)+"</td>";for(t=0;t<7;t++){var F=p?p.apply(a.input?a.input[0]:null,[q]):[true,""],B=q.getMonth()!=g,J=B&&!G||!F[0]||j&&q<j||o&&q>o;O+='<td class="'+((t+h+6)%7>=5?" ui-datepicker-week-end":"")+(B?" ui-datepicker-other-month":"")+(q.getTime()==I.getTime()&&g==a.selec [...]
-a._keyEvent||L.getTime()==q.getTime()&&L.getTime()==I.getTime()?" "+this._dayOverClass:"")+(J?" "+this._unselectableClass+" ui-state-disabled":"")+(B&&!w?"":" "+F[1]+(q.getTime()==u.getTime()?" "+this._currentClass:"")+(q.getTime()==b.getTime()?" ui-datepicker-today":""))+'"'+((!B||w)&&F[2]?' title="'+F[2]+'"':"")+(J?"":' onclick="DP_jQuery_'+y+".datepicker._selectDay('#"+a.id+"',"+q.getMonth()+","+q.getFullYear()+', this);return false;"')+">"+(B&&!w?" ":J?'<span class="ui-state-def [...]
-"</span>":'<a class="ui-state-default'+(q.getTime()==b.getTime()?" ui-state-highlight":"")+(q.getTime()==I.getTime()?" ui-state-active":"")+(B?" ui-priority-secondary":"")+'" href="#">'+q.getDate()+"</a>")+"</td>";q.setDate(q.getDate()+1);q=this._daylightSavingAdjust(q)}x+=O+"</tr>"}g++;if(g>11){g=0;m++}x+="</tbody></table>"+(l?"</div>"+(i[0]>0&&D==i[1]-1?'<div class="ui-datepicker-row-break"></div>':""):"");M+=x}H+=M}H+=e+(d.browser.msie&&parseInt(d.browser.version,10)<7&&!a.inline?'<if [...]
-"");a._keyEvent=false;return H},_generateMonthYearHeader:function(a,b,c,e,f,h,i,g){var k=this._get(a,"changeMonth"),l=this._get(a,"changeYear"),u=this._get(a,"showMonthAfterYear"),j='<div class="ui-datepicker-title">',o="";if(h||!k)o+='<span class="ui-datepicker-month">'+i[b]+"</span>";else{i=e&&e.getFullYear()==c;var m=f&&f.getFullYear()==c;o+='<select class="ui-datepicker-month" onchange="DP_jQuery_'+y+".datepicker._selectMonthYear('#"+a.id+"', this, 'M');\" onclick=\"DP_jQuery_"+y+".d [...]
-a.id+"');\">";for(var n=0;n<12;n++)if((!i||n>=e.getMonth())&&(!m||n<=f.getMonth()))o+='<option value="'+n+'"'+(n==b?' selected="selected"':"")+">"+g[n]+"</option>";o+="</select>"}u||(j+=o+(h||!(k&&l)?" ":""));if(h||!l)j+='<span class="ui-datepicker-year">'+c+"</span>";else{g=this._get(a,"yearRange").split(":");var r=(new Date).getFullYear();i=function(s){s=s.match(/c[+-].*/)?c+parseInt(s.substring(1),10):s.match(/[+-].*/)?r+parseInt(s,10):parseInt(s,10);return isNaN(s)?r:s};b=i(g[0] [...]
-i(g[1]||""));b=e?Math.max(b,e.getFullYear()):b;g=f?Math.min(g,f.getFullYear()):g;for(j+='<select class="ui-datepicker-year" onchange="DP_jQuery_'+y+".datepicker._selectMonthYear('#"+a.id+"', this, 'Y');\" onclick=\"DP_jQuery_"+y+".datepicker._clickMonthYear('#"+a.id+"');\">";b<=g;b++)j+='<option value="'+b+'"'+(b==c?' selected="selected"':"")+">"+b+"</option>";j+="</select>"}j+=this._get(a,"yearSuffix");if(u)j+=(h||!(k&&l)?" ":"")+o;j+="</div>";return j},_adjustInstDate:function(a,b [...]
-a.drawYear+(c=="Y"?b:0),f=a.drawMonth+(c=="M"?b:0);b=Math.min(a.selectedDay,this._getDaysInMonth(e,f))+(c=="D"?b:0);e=this._restrictMinMax(a,this._daylightSavingAdjust(new Date(e,f,b)));a.selectedDay=e.getDate();a.drawMonth=a.selectedMonth=e.getMonth();a.drawYear=a.selectedYear=e.getFullYear();if(c=="M"||c=="Y")this._notifyChange(a)},_restrictMinMax:function(a,b){var c=this._getMinMaxDate(a,"min");a=this._getMinMaxDate(a,"max");b=c&&b<c?c:b;return b=a&&b>a?a:b},_notifyChange:function(a){ [...]
-"onChangeMonthYear");if(b)b.apply(a.input?a.input[0]:null,[a.selectedYear,a.selectedMonth+1,a])},_getNumberOfMonths:function(a){a=this._get(a,"numberOfMonths");return a==null?[1,1]:typeof a=="number"?[1,a]:a},_getMinMaxDate:function(a,b){return this._determineDate(a,this._get(a,b+"Date"),null)},_getDaysInMonth:function(a,b){return 32-(new Date(a,b,32)).getDate()},_getFirstDayOfMonth:function(a,b){return(new Date(a,b,1)).getDay()},_canAdjustMonth:function(a,b,c,e){var f=this._getNumberOfM [...]
-c=this._daylightSavingAdjust(new Date(c,e+(b<0?b:f[0]*f[1]),1));b<0&&c.setDate(this._getDaysInMonth(c.getFullYear(),c.getMonth()));return this._isInRange(a,c)},_isInRange:function(a,b){var c=this._getMinMaxDate(a,"min");a=this._getMinMaxDate(a,"max");return(!c||b.getTime()>=c.getTime())&&(!a||b.getTime()<=a.getTime())},_getFormatConfig:function(a){var b=this._get(a,"shortYearCutoff");b=typeof b!="string"?b:(new Date).getFullYear()%100+parseInt(b,10);return{shortYearCutoff:b,dayNamesShort [...]
-"dayNamesShort"),dayNames:this._get(a,"dayNames"),monthNamesShort:this._get(a,"monthNamesShort"),monthNames:this._get(a,"monthNames")}},_formatDate:function(a,b,c,e){if(!b){a.currentDay=a.selectedDay;a.currentMonth=a.selectedMonth;a.currentYear=a.selectedYear}b=b?typeof b=="object"?b:this._daylightSavingAdjust(new Date(e,c,b)):this._daylightSavingAdjust(new Date(a.currentYear,a.currentMonth,a.currentDay));return this.formatDate(this._get(a,"dateFormat"),b,this._getFormatConfig(a))}});d.f [...]
-function(a){if(!d.datepicker.initialized){d(document).mousedown(d.datepicker._checkExternalClick).find("body").append(d.datepicker.dpDiv);d.datepicker.initialized=true}var b=Array.prototype.slice.call(arguments,1);if(typeof a=="string"&&(a=="isDisabled"||a=="getDate"||a=="widget"))return d.datepicker["_"+a+"Datepicker"].apply(d.datepicker,[this[0]].concat(b));if(a=="option"&&arguments.length==2&&typeof arguments[1]=="string")return d.datepicker["_"+a+"Datepicker"].apply(d.datepicker,[thi [...]
-return this.each(function(){typeof a=="string"?d.datepicker["_"+a+"Datepicker"].apply(d.datepicker,[this].concat(b)):d.datepicker._attachDatepicker(this,a)})};d.datepicker=new K;d.datepicker.initialized=false;d.datepicker.uuid=(new Date).getTime();d.datepicker.version="1.9m2";window["DP_jQuery_"+y]=d})(jQuery);
-(function(c){c.widget("ui.dialog",{options:{autoOpen:true,buttons:{},closeOnEscape:true,closeText:"close",dialogClass:"",draggable:true,hide:null,height:"auto",maxHeight:false,maxWidth:false,minHeight:150,minWidth:150,modal:false,position:"center",resizable:true,show:null,stack:true,title:"",width:300,zIndex:1E3},_create:function(){this.originalTitle=this.element.attr("title");var a=this,b=a.options,d=b.title||a.originalTitle||" ",e=c.ui.dialog.getTitleId(a.element),g=(a.uiDialog=c( [...]
-b.dialogClass).css({zIndex:b.zIndex}).attr("tabIndex",-1).css("outline",0).keydown(function(i){if(b.closeOnEscape&&i.keyCode&&i.keyCode===c.ui.keyCode.ESCAPE){a.close(i);i.preventDefault()}}).attr({role:"dialog","aria-labelledby":e}).mousedown(function(i){a.moveToTop(false,i)});a.element.show().removeAttr("title").addClass("ui-dialog-content ui-widget-content").appendTo(g);var f=(a.uiDialogTitlebar=c("<div></div>")).addClass("ui-dialog-titlebar ui-widget-header ui-corner-all ui-helper-cl [...]
-h=c('<a href="#"></a>').addClass("ui-dialog-titlebar-close ui-corner-all").attr("role","button").hover(function(){h.addClass("ui-state-hover")},function(){h.removeClass("ui-state-hover")}).focus(function(){h.addClass("ui-state-focus")}).blur(function(){h.removeClass("ui-state-focus")}).click(function(i){a.close(i);return false}).appendTo(f);(a.uiDialogTitlebarCloseText=c("<span></span>")).addClass("ui-icon ui-icon-closethick").text(b.closeText).appendTo(h);c("<span></span>").addClass("ui [...]
-e).html(d).prependTo(f);if(c.isFunction(b.beforeclose)&&!c.isFunction(b.beforeClose))b.beforeClose=b.beforeclose;f.find("*").add(f).disableSelection();b.draggable&&c.fn.draggable&&a._makeDraggable();b.resizable&&c.fn.resizable&&a._makeResizable();a._createButtons(b.buttons);a._isOpen=false;c.fn.bgiframe&&g.bgiframe()},_init:function(){this.options.autoOpen&&this.open()},destroy:function(){var a=this;a.overlay&&a.overlay.destroy();a.uiDialog.hide();a.element.unbind(".dialog").removeData(" [...]
-a.uiDialog.remove();a.originalTitle&&a.element.attr("title",a.originalTitle);return a},widget:function(){return this.uiDialog},close:function(a){var b=this,d;if(false!==b._trigger("beforeClose",a)){b.overlay&&b.overlay.destroy();b.uiDialog.unbind("keypress.ui-dialog");b._isOpen=false;if(b.options.hide)b.uiDialog.hide(b.options.hide,function(){b._trigger("close",a)});else{b.uiDialog.hide();b._trigger("close",a)}c.ui.dialog.overlay.resize();if(b.options.modal){d=0;c(".ui-dialog").each(func [...]
-b.uiDialog[0])d=Math.max(d,c(this).css("z-index"))});c.ui.dialog.maxZ=d}return b}},isOpen:function(){return this._isOpen},moveToTop:function(a,b){var d=this,e=d.options;if(e.modal&&!a||!e.stack&&!e.modal)return d._trigger("focus",b);if(e.zIndex>c.ui.dialog.maxZ)c.ui.dialog.maxZ=e.zIndex;if(d.overlay){c.ui.dialog.maxZ+=1;d.overlay.$el.css("z-index",c.ui.dialog.overlay.maxZ=c.ui.dialog.maxZ)}a={scrollTop:d.element.attr("scrollTop"),scrollLeft:d.element.attr("scrollLeft")};c.ui.dialog.maxZ+ [...]
-c.ui.dialog.maxZ);d.element.attr(a);d._trigger("focus",b);return d},open:function(){if(!this._isOpen){var a=this,b=a.options,d=a.uiDialog;a.overlay=b.modal?new c.ui.dialog.overlay(a):null;d.next().length&&d.appendTo("body");a._size();a._position(b.position);d.show(b.show);a.moveToTop(true);b.modal&&d.bind("keypress.ui-dialog",function(e){if(e.keyCode===c.ui.keyCode.TAB){var g=c(":tabbable",this),f=g.filter(":first");g=g.filter(":last");if(e.target===g[0]&&!e.shiftKey){f.focus(1);return f [...]
-f[0]&&e.shiftKey){g.focus(1);return false}}});c([]).add(d.find(".ui-dialog-content :tabbable:first")).add(d.find(".ui-dialog-buttonpane :tabbable:first")).add(d).filter(":first").focus();a._trigger("open");a._isOpen=true;return a}},_createButtons:function(a){var b=this,d=false,e=c("<div></div>").addClass("ui-dialog-buttonpane ui-widget-content ui-helper-clearfix");b.uiDialog.find(".ui-dialog-buttonpane").remove();typeof a==="object"&&a!==null&&c.each(a,function(){return!(d=true)});if(d){ [...]
-function(g,f){g=c('<button type="button"></button>').text(g).click(function(){f.apply(b.element[0],arguments)}).appendTo(e);c.fn.button&&g.button()});e.appendTo(b.uiDialog)}},_makeDraggable:function(){function a(f){return{position:f.position,offset:f.offset}}var b=this,d=b.options,e=c(document),g;b.uiDialog.draggable({cancel:".ui-dialog-content, .ui-dialog-titlebar-close",handle:".ui-dialog-titlebar",containment:"document",start:function(f,h){g=d.height==="auto"?"auto":c(this).height();c [...]
-b._trigger("dragStart",f,a(h))},drag:function(f,h){b._trigger("drag",f,a(h))},stop:function(f,h){d.position=[h.position.left-e.scrollLeft(),h.position.top-e.scrollTop()];c(this).removeClass("ui-dialog-dragging").height(g);b._trigger("dragStop",f,a(h));c.ui.dialog.overlay.resize()}})},_makeResizable:function(a){function b(f){return{originalPosition:f.originalPosition,originalSize:f.originalSize,position:f.position,size:f.size}}a=a===undefined?this.options.resizable:a;var d=this,e=d.option [...]
-a=typeof a==="string"?a:"n,e,s,w,se,sw,ne,nw";d.uiDialog.resizable({cancel:".ui-dialog-content",containment:"document",alsoResize:d.element,maxWidth:e.maxWidth,maxHeight:e.maxHeight,minWidth:e.minWidth,minHeight:d._minHeight(),handles:a,start:function(f,h){c(this).addClass("ui-dialog-resizing");d._trigger("resizeStart",f,b(h))},resize:function(f,h){d._trigger("resize",f,b(h))},stop:function(f,h){c(this).removeClass("ui-dialog-resizing");e.height=c(this).height();e.width=c(this).width();d [...]
-f,b(h));c.ui.dialog.overlay.resize()}}).css("position",g).find(".ui-resizable-se").addClass("ui-icon ui-icon-grip-diagonal-se")},_minHeight:function(){var a=this.options;return a.height==="auto"?a.minHeight:Math.min(a.minHeight,a.height)},_position:function(a){var b=[],d=[0,0];a=a||c.ui.dialog.prototype.options.position;if(typeof a==="string"||typeof a==="object"&&"0"in a){b=a.split?a.split(" "):[a[0],a[1]];if(b.length===1)b[1]=b[0];c.each(["left","top"],function(e,g){if(+b[e]===b[e]){d[ [...]
-g}})}else if(typeof a==="object"){if("left"in a){b[0]="left";d[0]=a.left}else if("right"in a){b[0]="right";d[0]=-a.right}if("top"in a){b[1]="top";d[1]=a.top}else if("bottom"in a){b[1]="bottom";d[1]=-a.bottom}}(a=this.uiDialog.is(":visible"))||this.uiDialog.show();this.uiDialog.css({top:0,left:0}).position({my:b.join(" "),at:b.join(" "),offset:d.join(" "),of:window,collision:"fit",using:function(e){var g=c(this).css(e).offset().top;g<0&&c(this).css("top",e.top-g)}});a||this.uiDialog.hide( [...]
-b){var d=this,e=d.uiDialog,g=e.is(":data(resizable)"),f=false;switch(a){case "beforeclose":a="beforeClose";break;case "buttons":d._createButtons(b);break;case "closeText":d.uiDialogTitlebarCloseText.text(""+b);break;case "dialogClass":e.removeClass(d.options.dialogClass).addClass("ui-dialog ui-widget ui-widget-content ui-corner-all "+b);break;case "disabled":b?e.addClass("ui-dialog-disabled"):e.removeClass("ui-dialog-disabled");break;case "draggable":b?d._makeDraggable():e.draggable("des [...]
-case "height":f=true;break;case "maxHeight":g&&e.resizable("option","maxHeight",b);f=true;break;case "maxWidth":g&&e.resizable("option","maxWidth",b);f=true;break;case "minHeight":g&&e.resizable("option","minHeight",b);f=true;break;case "minWidth":g&&e.resizable("option","minWidth",b);f=true;break;case "position":d._position(b);break;case "resizable":g&&!b&&e.resizable("destroy");g&&typeof b==="string"&&e.resizable("option","handles",b);!g&&b!==false&&d._makeResizable(b);break;case "titl [...]
-d.uiDialogTitlebar).html(""+(b||" "));break;case "width":f=true;break}c.Widget.prototype._setOption.apply(d,arguments);f&&d._size()},_size:function(){var a=this.options,b;this.element.css({width:"auto",minHeight:0,height:0});b=this.uiDialog.css({height:"auto",width:a.width}).height();this.element.css(a.height==="auto"?{minHeight:Math.max(a.minHeight-b,0),height:"auto"}:{minHeight:0,height:Math.max(a.height-b,0)}).show();this.uiDialog.is(":data(resizable)")&&this.uiDialog.resizable(" [...]
-this._minHeight())}});c.extend(c.ui.dialog,{version:"1.9m2",uuid:0,maxZ:0,getTitleId:function(a){a=a.attr("id");if(!a){this.uuid+=1;a=this.uuid}return"ui-dialog-title-"+a},overlay:function(a){this.$el=c.ui.dialog.overlay.create(a)}});c.extend(c.ui.dialog.overlay,{instances:[],oldInstances:[],maxZ:0,events:c.map("focus,mousedown,mouseup,keydown,keypress,click".split(","),function(a){return a+".dialog-overlay"}).join(" "),create:function(a){if(this.instances.length===0){setTimeout(function [...]
-c(document).bind(c.ui.dialog.overlay.events,function(d){return c(d.target).zIndex()>=c.ui.dialog.overlay.maxZ})},1);c(document).bind("keydown.dialog-overlay",function(d){if(a.options.closeOnEscape&&d.keyCode&&d.keyCode===c.ui.keyCode.ESCAPE){a.close(d);d.preventDefault()}});c(window).bind("resize.dialog-overlay",c.ui.dialog.overlay.resize)}var b=(this.oldInstances.pop()||c("<div></div>").addClass("ui-widget-overlay")).appendTo(document.body).css({width:this.width(),height:this.height()}) [...]
-b.bgiframe();this.instances.push(b);return b},destroy:function(a){this.oldInstances.push(this.instances.splice(c.inArray(a,this.instances),1)[0]);this.instances.length===0&&c([document,window]).unbind(".dialog-overlay");a.remove();var b=0;c.each(this.instances,function(){b=Math.max(b,this.css("z-index"))});this.maxZ=b},height:function(){var a,b;if(c.browser.msie&&c.browser.version<7){a=Math.max(document.documentElement.scrollHeight,document.body.scrollHeight);b=Math.max(document.document [...]
-document.body.offsetHeight);return a<b?c(window).height()+"px":a+"px"}else return c(document).height()+"px"},width:function(){var a,b;if(c.browser.msie&&c.browser.version<7){a=Math.max(document.documentElement.scrollWidth,document.body.scrollWidth);b=Math.max(document.documentElement.offsetWidth,document.body.offsetWidth);return a<b?c(window).width()+"px":a+"px"}else return c(document).width()+"px"},resize:function(){var a=c([]);c.each(c.ui.dialog.overlay.instances,function(){a=a.add(thi [...]
-height:0}).css({width:c.ui.dialog.overlay.width(),height:c.ui.dialog.overlay.height()})}});c.extend(c.ui.dialog.overlay.prototype,{destroy:function(){c.ui.dialog.overlay.destroy(this.$el)}})})(jQuery);
-(function(c){c.widget("ui.menu",{_create:function(){var a=this;this.element.addClass("ui-menu ui-widget ui-widget-content ui-corner-all").attr({role:"listbox","aria-activedescendant":"ui-active-menuitem"}).bind("click.menu",function(b){if(a.options.disabled)return false;if(c(b.target).closest(".ui-menu-item a").length){b.preventDefault();a.select(b)}});this.refresh();if(!this.options.input)this.options.input=this.element.attr("tabIndex",0);this.options.input.bind("keydown.menu",function( [...]
-b.preventDefault();b.stopImmediatePropagation();break;case c.ui.keyCode.PAGE_DOWN:a.nextPage();b.preventDefault();b.stopImmediatePropagation();break;case c.ui.keyCode.UP:a.previous();b.preventDefault();b.stopImmediatePropagation();break;case c.ui.keyCode.DOWN:a.next();b.preventDefault();b.stopImmediatePropagation();break;case c.ui.keyCode.ENTER:a.select();b.preventDefault();b.stopImmediatePropagation();break}})},destroy:function(){c.Widget.prototype.destroy.apply(this,arguments);this.ele [...]
-this.element.children(".ui-menu-item").removeClass("ui-menu-item").removeAttr("role").children("a").removeClass("ui-corner-all").removeAttr("tabIndex").unbind(".menu")},refresh:function(){var a=this;this.element.children("li:not(.ui-menu-item):has(a)").addClass("ui-menu-item").attr("role","menuitem").children("a").addClass("ui-corner-all").attr("tabIndex",-1).bind("mouseenter.menu",function(b){a.options.disabled||a.activate(b,c(this).parent())}).bind("mouseleave.menu",function(){a.option [...]
-a.deactivate()})},activate:function(a,b){this.deactivate();if(this._hasScroll()){var d=b.offset().top-this.element.offset().top,e=this.element.attr("scrollTop"),f=this.element.height();if(d<0)this.element.attr("scrollTop",e+d);else d>f&&this.element.attr("scrollTop",e+d-f+b.height())}this.active=b.eq(0).children("a").addClass("ui-state-hover").attr("id","ui-active-menuitem").end();this._trigger("focus",a,{item:b})},deactivate:function(){if(this.active){this.active.children("a").removeCla [...]
-this._trigger("blur");this.active=null}},next:function(a){this._move("next",".ui-menu-item:first",a)},previous:function(a){this._move("prev",".ui-menu-item:last",a)},first:function(){return this.active&&!this.active.prevAll(".ui-menu-item").length},last:function(){return this.active&&!this.active.nextAll(".ui-menu-item").length},_move:function(a,b,d){if(this.active){a=this.active[a+"All"](".ui-menu-item").eq(0);a.length?this.activate(d,a):this.activate(d,this.element.children(b))}else th [...]
-this.element.children(b))},nextPage:function(a){if(this._hasScroll())if(!this.active||this.last())this.activate(a,this.element.children(":first"));else{var b=this.active.offset().top,d=this.element.height(),e=this.element.children("li").filter(function(){var f=c(this).offset().top-b-d+c(this).height();return f<10&&f>-10});e.length||(e=this.element.children(":last"));this.activate(a,e)}else this.activate(a,this.element.children(!this.active||this.last()?":first":":last"))},previousPage:fu [...]
-this.first())this.activate(a,this.element.children(":last"));else{var b=this.active.offset().top,d=this.element.height();result=this.element.children("li").filter(function(){var e=c(this).offset().top-b+d-c(this).height();return e<10&&e>-10});result.length||(result=this.element.children(":first"));this.activate(a,result)}else this.activate(a,this.element.children(!this.active||this.first()?":last":":first"))},_hasScroll:function(){return this.element.height()<this.element.attr("scrollHei [...]
-a,{item:this.active})}})})(jQuery);
-(function(c){c.ui=c.ui||{};var m=/left|center|right/,n=/top|center|bottom/,p=c.fn.position,q=c.fn.offset;c.fn.position=function(a){if(!a||!a.of)return p.apply(this,arguments);a=c.extend({},a);var b=c(a.of),d=(a.collision||"flip").split(" "),e=a.offset?a.offset.split(" "):[0,0],g,h,i;if(a.of.nodeType===9){g=b.width();h=b.height();i={top:0,left:0}}else if(a.of.scrollTo&&a.of.document){g=b.width();h=b.height();i={top:b.scrollTop(),left:b.scrollLeft()}}else if(a.of.preventDefault){a.at="left [...]
-0;i={top:a.of.pageY,left:a.of.pageX}}else{g=b.outerWidth();h=b.outerHeight();i=b.offset()}c.each(["my","at"],function(){var f=(a[this]||"").split(" ");if(f.length===1)f=m.test(f[0])?f.concat(["center"]):n.test(f[0])?["center"].concat(f):["center","center"];f[0]=m.test(f[0])?f[0]:"center";f[1]=n.test(f[1])?f[1]:"center";a[this]=f});if(d.length===1)d[1]=d[0];e[0]=parseInt(e[0],10)||0;if(e.length===1)e[1]=e[0];e[1]=parseInt(e[1],10)||0;if(a.at[0]==="right")i.left+=g;else if(a.at[0]==="cente [...]
-g/2;if(a.at[1]==="bottom")i.top+=h;else if(a.at[1]==="center")i.top+=h/2;i.left+=e[0];i.top+=e[1];return this.each(function(){var f=c(this),k=f.outerWidth(),l=f.outerHeight(),j=c.extend({},i);if(a.my[0]==="right")j.left-=k;else if(a.my[0]==="center")j.left-=k/2;if(a.my[1]==="bottom")j.top-=l;else if(a.my[1]==="center")j.top-=l/2;j.left=parseInt(j.left);j.top=parseInt(j.top);c.each(["left","top"],function(o,r){c.ui.position[d[o]]&&c.ui.position[d[o]][r](j,{targetWidth:g,targetHeight:h,ele [...]
-elemHeight:l,offset:e,my:a.my,at:a.at})});c.fn.bgiframe&&f.bgiframe();f.offset(c.extend(j,{using:a.using}))})};c.ui.position={fit:{left:function(a,b){var d=c(window);b=a.left+b.elemWidth-d.width()-d.scrollLeft();a.left=b>0?a.left-b:Math.max(0,a.left)},top:function(a,b){var d=c(window);b=a.top+b.elemHeight-d.height()-d.scrollTop();a.top=b>0?a.top-b:Math.max(0,a.top)}},flip:{left:function(a,b){if(b.at[0]!=="center"){var d=c(window);d=a.left+b.elemWidth-d.width()-d.scrollLeft();var e=b.my[0 [...]
--b.elemWidth:b.my[0]==="right"?b.elemWidth:0,g=-2*b.offset[0];a.left+=a.left<0?e+b.targetWidth+g:d>0?e-b.targetWidth+g:0}},top:function(a,b){if(b.at[1]!=="center"){var d=c(window);d=a.top+b.elemHeight-d.height()-d.scrollTop();var e=b.my[1]==="top"?-b.elemHeight:b.my[1]==="bottom"?b.elemHeight:0,g=b.at[1]==="top"?b.targetHeight:-b.targetHeight,h=-2*b.offset[1];a.top+=a.top<0?e+b.targetHeight+h:d>0?e+g+h:0}}}};if(!c.offset.setOffset){c.offset.setOffset=function(a,b){if(/static/.test(c.curC [...]
-"relative";var d=c(a),e=d.offset(),g=parseInt(c.curCSS(a,"top",true),10)||0,h=parseInt(c.curCSS(a,"left",true),10)||0;e={top:b.top-e.top+g,left:b.left-e.left+h};"using"in b?b.using.call(a,e):d.css(e)};c.fn.offset=function(a){var b=this[0];if(!b||!b.ownerDocument)return null;if(a)return this.each(function(){c.offset.setOffset(this,a)});return q.call(this)}}})(jQuery);
-(function(b){b.widget("ui.progressbar",{options:{value:0},_create:function(){this.element.addClass("ui-progressbar ui-widget ui-widget-content ui-corner-all").attr({role:"progressbar","aria-valuemin":this._valueMin(),"aria-valuemax":this._valueMax(),"aria-valuenow":this._value()});this.valueDiv=b("<div class='ui-progressbar-value ui-widget-header ui-corner-left'></div>").appendTo(this.element);this._refreshValue()},destroy:function(){this.element.removeClass("ui-progressbar ui-widget ui- [...]
-this.valueDiv.remove();this._superApply("destroy",arguments)},value:function(a){if(a===undefined)return this._value();this._setOption("value",a);return this},_setOption:function(a,c){switch(a){case "value":this.options.value=c;this._refreshValue();this._trigger("change");break}this._superApply("_setOption",arguments)},_value:function(){var a=this.options.value;if(typeof a!=="number")a=0;if(a<this._valueMin())a=this._valueMin();if(a>this._valueMax())a=this._valueMax();return a},_valueMin: [...]
-_valueMax:function(){return 100},_refreshValue:function(){var a=this.value();this.valueDiv[a===this._valueMax()?"addClass":"removeClass"]("ui-corner-right").width(a+"%");this.element.attr("aria-valuenow",a)}});b.extend(b.ui.progressbar,{version:"1.9m2"})})(jQuery);
-(function(d){d.widget("ui.slider",d.ui.mouse,{widgetEventPrefix:"slide",options:{animate:false,distance:0,max:100,min:0,orientation:"horizontal",range:false,step:1,value:0,values:null},_create:function(){var a=this,b=this.options;this._mouseSliding=this._keySliding=false;this._animateOff=true;this._handleIndex=null;this._detectOrientation();this._mouseInit();this.element.addClass("ui-slider ui-slider-"+this.orientation+" ui-widget ui-widget-content ui-corner-all");b.disabled&&this.elemen [...]
-this.range=d([]);if(b.range){if(b.range===true){this.range=d("<div></div>");if(!b.values)b.values=[this._valueMin(),this._valueMin()];if(b.values.length&&b.values.length!==2)b.values=[b.values[0],b.values[0]]}else this.range=d("<div></div>");this.range.appendTo(this.element).addClass("ui-slider-range");if(b.range==="min"||b.range==="max")this.range.addClass("ui-slider-range-"+b.range);this.range.addClass("ui-widget-header")}d(".ui-slider-handle",this.element).length===0&&d("<a href='#'>< [...]
-if(b.values&&b.values.length)for(;d(".ui-slider-handle",this.element).length<b.values.length;)d("<a href='#'></a>").appendTo(this.element).addClass("ui-slider-handle");this.handles=d(".ui-slider-handle",this.element).addClass("ui-state-default ui-corner-all");this.handle=this.handles.eq(0);this.handles.add(this.range).filter("a").click(function(c){c.preventDefault()}).hover(function(){b.disabled||d(this).addClass("ui-state-hover")},function(){d(this).removeClass("ui-state-hover")}).focus [...]
-else{d(".ui-slider .ui-state-focus").removeClass("ui-state-focus");d(this).addClass("ui-state-focus")}}).blur(function(){d(this).removeClass("ui-state-focus")});this.handles.each(function(c){d(this).data("index.ui-slider-handle",c)});this.handles.keydown(function(c){var e=true,f=d(this).data("index.ui-slider-handle"),g,h,i;if(!a.options.disabled){switch(c.keyCode){case d.ui.keyCode.HOME:case d.ui.keyCode.END:case d.ui.keyCode.PAGE_UP:case d.ui.keyCode.PAGE_DOWN:case d.ui.keyCode.UP:case [...]
-false;if(!a._keySliding){a._keySliding=true;d(this).addClass("ui-state-active");g=a._start(c,f);if(g===false)return}break}i=a.options.step;g=a.options.values&&a.options.values.length?(h=a.values(f)):(h=a.value());switch(c.keyCode){case d.ui.keyCode.HOME:h=a._valueMin();break;case d.ui.keyCode.END:h=a._valueMax();break;case d.ui.keyCode.PAGE_UP:h=a._trimAlignValue(g+(a._valueMax()-a._valueMin())/5);break;case d.ui.keyCode.PAGE_DOWN:h=a._trimAlignValue(g-(a._valueMax()-a._valueMin())/5);br [...]
-a._valueMax())return;h=a._trimAlignValue(g+i);break;case d.ui.keyCode.DOWN:case d.ui.keyCode.LEFT:if(g===a._valueMin())return;h=a._trimAlignValue(g-i);break}a._slide(c,f,h);return e}}).keyup(function(c){var e=d(this).data("index.ui-slider-handle");if(a._keySliding){a._keySliding=false;a._stop(c,e);a._change(c,e);d(this).removeClass("ui-state-active")}});this._refreshValue();this._animateOff=false},destroy:function(){this.handles.remove();this.range.remove();this.element.removeClass("ui-s [...]
-this._mouseDestroy();return this},_mouseCapture:function(a){var b=this.options,c,e,f,g,h,i;if(b.disabled)return false;this.elementSize={width:this.element.outerWidth(),height:this.element.outerHeight()};this.elementOffset=this.element.offset();c={x:a.pageX,y:a.pageY};e=this._normValueFromMouse(c);f=this._valueMax()-this._valueMin()+1;h=this;this.handles.each(function(j){var k=Math.abs(e-h.values(j));if(f>k){f=k;g=d(this);i=j}});if(b.range===true&&this.values(1)===b.min){i+=1;g=d(this.han [...]
-i)===false)return false;this._mouseSliding=true;h._handleIndex=i;g.addClass("ui-state-active").focus();b=g.offset();this._clickOffset=!d(a.target).parents().andSelf().is(".ui-slider-handle")?{left:0,top:0}:{left:a.pageX-b.left-g.width()/2,top:a.pageY-b.top-g.height()/2-(parseInt(g.css("borderTopWidth"),10)||0)-(parseInt(g.css("borderBottomWidth"),10)||0)+(parseInt(g.css("marginTop"),10)||0)};e=this._normValueFromMouse(c);this._slide(a,i,e);return this._animateOff=true},_mouseStart:functi [...]
-_mouseDrag:function(a){var b=this._normValueFromMouse({x:a.pageX,y:a.pageY});this._slide(a,this._handleIndex,b);return false},_mouseStop:function(a){this.handles.removeClass("ui-state-active");this._mouseSliding=false;this._stop(a,this._handleIndex);this._change(a,this._handleIndex);this._clickOffset=this._handleIndex=null;return this._animateOff=false},_detectOrientation:function(){this.orientation=this.options.orientation==="vertical"?"vertical":"horizontal"},_normValueFromMouse:functi [...]
-if(this.orientation==="horizontal"){b=this.elementSize.width;a=a.x-this.elementOffset.left-(this._clickOffset?this._clickOffset.left:0)}else{b=this.elementSize.height;a=a.y-this.elementOffset.top-(this._clickOffset?this._clickOffset.top:0)}b=a/b;if(b>1)b=1;if(b<0)b=0;if(this.orientation==="vertical")b=1-b;a=this._valueMax()-this._valueMin();return this._trimAlignValue(this._valueMin()+b*a)},_start:function(a,b){var c={handle:this.handles[b],value:this.value()};if(this.options.values&&thi [...]
-this.values(b);c.values=this.values()}return this._trigger("start",a,c)},_slide:function(a,b,c){var e;if(this.options.values&&this.options.values.length){e=this.values(b?0:1);if(this.options.values.length===2&&this.options.range===true&&(b===0&&c>e||b===1&&c<e))c=e;if(c!==this.values(b)){e=this.values();e[b]=c;a=this._trigger("slide",a,{handle:this.handles[b],value:c,values:e});this.values(b?0:1);a!==false&&this.values(b,c,true)}}else if(c!==this.value()){a=this._trigger("slide",a,{handl [...]
-value:c});a!==false&&this.value(c)}},_stop:function(a,b){var c={handle:this.handles[b],value:this.value()};if(this.options.values&&this.options.values.length){c.value=this.values(b);c.values=this.values()}this._trigger("stop",a,c)},_change:function(a,b){if(!this._keySliding&&!this._mouseSliding){var c={handle:this.handles[b],value:this.value()};if(this.options.values&&this.options.values.length){c.value=this.values(b);c.values=this.values()}this._trigger("change",a,c)}},value:function(a) [...]
-this._trimAlignValue(a);this._refreshValue();this._change(null,0)}return this._value()},values:function(a,b){var c,e,f;if(arguments.length>1){this.options.values[a]=this._trimAlignValue(b);this._refreshValue();this._change(null,a)}if(arguments.length)if(d.isArray(arguments[0])){c=this.options.values;e=arguments[0];for(f=0;f<c.length;f+=1){c[f]=this._trimAlignValue(e[f]);this._change(null,f)}this._refreshValue()}else return this.options.values&&this.options.values.length?this._values(a):t [...]
-else return this._values()},_setOption:function(a,b){var c,e=0;if(d.isArray(this.options.values))e=this.options.values.length;this._superApply("_setOption",arguments);switch(a){case "disabled":if(b){this.handles.filter(".ui-state-focus").blur();this.handles.removeClass("ui-state-hover");this.handles.attr("disabled","disabled");this.element.addClass("ui-disabled")}else{this.handles.removeAttr("disabled");this.element.removeClass("ui-disabled")}break;case "orientation":this._detectOrientat [...]
-this.orientation);this._refreshValue();break;case "value":this._animateOff=true;this._refreshValue();this._change(null,0);this._animateOff=false;break;case "values":this._animateOff=true;this._refreshValue();for(c=0;c<e;c+=1)this._change(null,c);this._animateOff=false;break}},_value:function(){var a=this.options.value;return a=this._trimAlignValue(a)},_values:function(a){var b,c;if(arguments.length){b=this.options.values[a];return b=this._trimAlignValue(b)}else{b=this.options.values.slic [...]
-0;c<b.length;c+=1)b[c]=this._trimAlignValue(b[c]);return b}},_trimAlignValue:function(a){if(a<this._valueMin())return this._valueMin();if(a>this._valueMax())return this._valueMax();var b=this.options.step>0?this.options.step:1,c=a%b;a=a-c;if(Math.abs(c)*2>=b)a+=c>0?b:-b;return parseFloat(a.toFixed(5))},_valueMin:function(){return this.options.min},_valueMax:function(){return this.options.max},_refreshValue:function(){var a=this.options.range,b=this.options,c=this,e=!this._animateOff?b.an [...]
-f,g={},h,i,j,k;if(this.options.values&&this.options.values.length)this.handles.each(function(l){f=(c.values(l)-c._valueMin())/(c._valueMax()-c._valueMin())*100;g[c.orientation==="horizontal"?"left":"bottom"]=f+"%";d(this).stop(1,1)[e?"animate":"css"](g,b.animate);if(c.options.range===true)if(c.orientation==="horizontal"){if(l===0)c.range.stop(1,1)[e?"animate":"css"]({left:f+"%"},b.animate);if(l===1)c.range[e?"animate":"css"]({width:f-h+"%"},{queue:false,duration:b.animate})}else{if(l===0 [...]
-1)[e?"animate":"css"]({bottom:f+"%"},b.animate);if(l===1)c.range[e?"animate":"css"]({height:f-h+"%"},{queue:false,duration:b.animate})}h=f});else{i=this.value();j=this._valueMin();k=this._valueMax();f=k!==j?(i-j)/(k-j)*100:0;g[c.orientation==="horizontal"?"left":"bottom"]=f+"%";this.handle.stop(1,1)[e?"animate":"css"](g,b.animate);if(a==="min"&&this.orientation==="horizontal")this.range.stop(1,1)[e?"animate":"css"]({width:f+"%"},b.animate);if(a==="max"&&this.orientation==="horizontal")th [...]
-"animate":"css"]({width:100-f+"%"},{queue:false,duration:b.animate});if(a==="min"&&this.orientation==="vertical")this.range.stop(1,1)[e?"animate":"css"]({height:f+"%"},b.animate);if(a==="max"&&this.orientation==="vertical")this.range[e?"animate":"css"]({height:100-f+"%"},{queue:false,duration:b.animate})}}});d.extend(d.ui.slider,{version:"1.9m2"})})(jQuery);
-(function(d){function s(){return++u}function v(){return++w}var u=0,w=0;d.widget("ui.tabs",{options:{add:null,ajaxOptions:null,cache:false,cookie:null,collapsible:false,disable:null,disabled:[],enable:null,event:"click",fx:null,idPrefix:"ui-tabs-",load:null,panelTemplate:"<div></div>",remove:null,select:null,show:null,spinner:"<em>Loading…</em>",tabTemplate:'<li><a href="#{href}"><span>#{label}</span></a></li>'},_create:function(){this._tabify(true)},_setOption:function(c,e){if(c==" [...]
-e==this.options.selected||this.select(e);else{this.options[c]=e;this._tabify()}},_tabId:function(c){return c.title&&c.title.replace(/\s/g,"_").replace(/[^A-Za-z0-9\-_:\.]/g,"")||this.options.idPrefix+s()},_sanitizeSelector:function(c){return c.replace(/:/g,"\\:")},_cookie:function(){var c=this.cookie||(this.cookie=this.options.cookie.name||"ui-tabs-"+v());return d.cookie.apply(null,[c].concat(d.makeArray(arguments)))},_ui:function(c,e){return{tab:c,panel:e,index:this.anchors.index(c)}},_ [...]
-d(this);c.html(c.data("label.tabs")).removeData("label.tabs")})},_tabify:function(c){function e(g,f){g.css({display:""});!d.support.opacity&&f.opacity&&g[0].style.removeAttribute("filter")}this.list=this.element.find("ol,ul").eq(0);this.lis=d("li:has(a[href])",this.list);this.anchors=this.lis.map(function(){return d("a",this)[0]});this.panels=d([]);var a=this,b=this.options,h=/^#.+/;this.anchors.each(function(g,f){var j=d(f).attr("href"),l=j.split("#")[0],p;if(l&&(l===location.toString() [...]
-(p=d("base")[0])&&l===p.href)){j=f.hash;f.href=j}if(h.test(j))a.panels=a.panels.add(a._sanitizeSelector(j));else if(j!="#"){d.data(f,"href.tabs",j);d.data(f,"load.tabs",j.replace(/#.*$/,""));j=a._tabId(f);f.href="#"+j;f=d("#"+j);if(!f.length){f=d(b.panelTemplate).attr("id",j).addClass("ui-tabs-panel ui-widget-content ui-corner-bottom").insertAfter(a.panels[g-1]||a.list);f.data("destroy.tabs",true)}a.panels=a.panels.add(f)}else b.disabled.push(g)});if(c){this.element.addClass("ui-tabs ui- [...]
-this.list.addClass("ui-tabs-nav ui-helper-reset ui-helper-clearfix ui-widget-header ui-corner-all");this.lis.addClass("ui-state-default ui-corner-top");this.panels.addClass("ui-tabs-panel ui-widget-content ui-corner-bottom");if(b.selected===undefined){location.hash&&this.anchors.each(function(g,f){if(f.hash==location.hash){b.selected=g;return false}});if(typeof b.selected!="number"&&b.cookie)b.selected=parseInt(a._cookie(),10);if(typeof b.selected!="number"&&this.lis.filter(".ui-tabs-sel [...]
-this.lis.index(this.lis.filter(".ui-tabs-selected"));b.selected=b.selected||(this.lis.length?0:-1)}else if(b.selected===null)b.selected=-1;b.selected=b.selected>=0&&this.anchors[b.selected]||b.selected<0?b.selected:0;b.disabled=d.unique(b.disabled.concat(d.map(this.lis.filter(".ui-state-disabled"),function(g){return a.lis.index(g)}))).sort();d.inArray(b.selected,b.disabled)!=-1&&b.disabled.splice(d.inArray(b.selected,b.disabled),1);this.panels.addClass("ui-tabs-hide");this.lis.removeClas [...]
-if(b.selected>=0&&this.anchors.length){this.panels.eq(b.selected).removeClass("ui-tabs-hide");this.lis.eq(b.selected).addClass("ui-tabs-selected ui-state-active");a.element.queue("tabs",function(){a._trigger("show",null,a._ui(a.anchors[b.selected],a.panels[b.selected]))});this.load(b.selected)}d(window).bind("unload",function(){a.lis.add(a.anchors).unbind(".tabs");a.lis=a.anchors=a.panels=null})}else b.selected=this.lis.index(this.lis.filter(".ui-tabs-selected"));this.element[b.collapsib [...]
-"removeClass"]("ui-tabs-collapsible");b.cookie&&this._cookie(b.selected,b.cookie);c=0;for(var i;i=this.lis[c];c++)d(i)[d.inArray(c,b.disabled)!=-1&&!d(i).hasClass("ui-tabs-selected")?"addClass":"removeClass"]("ui-state-disabled");b.cache===false&&this.anchors.removeData("cache.tabs");this.lis.add(this.anchors).unbind(".tabs");if(b.event!="mouseover"){var k=function(g,f){f.is(":not(.ui-state-disabled)")&&f.addClass("ui-state-"+g)},n=function(g,f){f.removeClass("ui-state-"+g)};this.lis.bin [...]
-function(){k("hover",d(this))});this.lis.bind("mouseout.tabs",function(){n("hover",d(this))});this.anchors.bind("focus.tabs",function(){k("focus",d(this).closest("li"))});this.anchors.bind("blur.tabs",function(){n("focus",d(this).closest("li"))})}var m,o;if(b.fx)if(d.isArray(b.fx)){m=b.fx[0];o=b.fx[1]}else m=o=b.fx;var q=o?function(g,f){d(g).closest("li").addClass("ui-tabs-selected ui-state-active");f.hide().removeClass("ui-tabs-hide").animate(o,o.duration||"normal",function(){e(f,o);a._ [...]
-null,a._ui(g,f[0]))})}:function(g,f){d(g).closest("li").addClass("ui-tabs-selected ui-state-active");f.removeClass("ui-tabs-hide");a._trigger("show",null,a._ui(g,f[0]))},r=m?function(g,f){f.animate(m,m.duration||"normal",function(){a.lis.removeClass("ui-tabs-selected ui-state-active");f.addClass("ui-tabs-hide");e(f,m);a.element.dequeue("tabs")})}:function(g,f){a.lis.removeClass("ui-tabs-selected ui-state-active");f.addClass("ui-tabs-hide");a.element.dequeue("tabs")};this.anchors.bind(b.e [...]
-function(){var g=this,f=d(this).closest("li"),j=a.panels.filter(":not(.ui-tabs-hide)"),l=d(a._sanitizeSelector(this.hash));if(f.hasClass("ui-tabs-selected")&&!b.collapsible||f.hasClass("ui-state-disabled")||f.hasClass("ui-state-processing")||a._trigger("select",null,a._ui(this,l[0]))===false){this.blur();return false}b.selected=a.anchors.index(this);a.abort();if(b.collapsible)if(f.hasClass("ui-tabs-selected")){b.selected=-1;b.cookie&&a._cookie(b.selected,b.cookie);a.element.queue("tabs", [...]
-j)}).dequeue("tabs");this.blur();return false}else if(!j.length){b.cookie&&a._cookie(b.selected,b.cookie);a.element.queue("tabs",function(){q(g,l)});a.load(a.anchors.index(this));this.blur();return false}b.cookie&&a._cookie(b.selected,b.cookie);if(l.length){j.length&&a.element.queue("tabs",function(){r(g,j)});a.element.queue("tabs",function(){q(g,l)});a.load(a.anchors.index(this))}else throw"jQuery UI Tabs: Mismatching fragment identifier.";d.browser.msie&&this.blur()});this.anchors.bind [...]
-function(){return false})},destroy:function(){var c=this.options;this.abort();this.element.unbind(".tabs").removeClass("ui-tabs ui-widget ui-widget-content ui-corner-all ui-tabs-collapsible").removeData("tabs");this.list.removeClass("ui-tabs-nav ui-helper-reset ui-helper-clearfix ui-widget-header ui-corner-all");this.anchors.each(function(){var e=d.data(this,"href.tabs");if(e)this.href=e;var a=d(this).unbind(".tabs");d.each(["href","load","cache"],function(b,h){a.removeData(h+".tabs")})} [...]
-"destroy.tabs")?d(this).remove():d(this).removeClass("ui-state-default ui-corner-top ui-tabs-selected ui-state-active ui-state-hover ui-state-focus ui-state-disabled ui-tabs-panel ui-widget-content ui-corner-bottom ui-tabs-hide")});c.cookie&&this._cookie(null,c.cookie);return this},add:function(c,e,a){if(a===undefined)a=this.anchors.length;var b=this,h=this.options;e=d(h.tabTemplate.replace(/#\{href\}/g,c).replace(/#\{label\}/g,e));c=!c.indexOf("#")?c.replace("#",""):this._tabId(d("a",e) [...]
-true);var i=d("#"+c);i.length||(i=d(h.panelTemplate).attr("id",c).data("destroy.tabs",true));i.addClass("ui-tabs-panel ui-widget-content ui-corner-bottom ui-tabs-hide");if(a>=this.lis.length){e.appendTo(this.list);i.appendTo(this.list[0].parentNode)}else{e.insertBefore(this.lis[a]);i.insertBefore(this.panels[a])}h.disabled=d.map(h.disabled,function(k){return k>=a?++k:k});this._tabify();if(this.anchors.length==1){h.selected=0;e.addClass("ui-tabs-selected ui-state-active");i.removeClass("u [...]
-this.element.queue("tabs",function(){b._trigger("show",null,b._ui(b.anchors[0],b.panels[0]))});this.load(0)}this._trigger("add",null,this._ui(this.anchors[a],this.panels[a]));return this},remove:function(c){var e=this.options,a=this.lis.eq(c).remove(),b=this.panels.eq(c).remove();if(a.hasClass("ui-tabs-selected")&&this.anchors.length>1)this.select(c+(c+1<this.anchors.length?1:-1));e.disabled=d.map(d.grep(e.disabled,function(h){return h!=c}),function(h){return h>=c?--h:h});this._tabify(); [...]
-null,this._ui(a.find("a")[0],b[0]));return this},enable:function(c){var e=this.options;if(d.inArray(c,e.disabled)!=-1){this.lis.eq(c).removeClass("ui-state-disabled");e.disabled=d.grep(e.disabled,function(a){return a!=c});this._trigger("enable",null,this._ui(this.anchors[c],this.panels[c]));return this}},disable:function(c){var e=this.options;if(c!=e.selected){this.lis.eq(c).addClass("ui-state-disabled");e.disabled.push(c);e.disabled.sort();this._trigger("disable",null,this._ui(this.anch [...]
-select:function(c){if(typeof c=="string")c=this.anchors.index(this.anchors.filter("[href$="+c+"]"));else if(c===null)c=-1;if(c==-1&&this.options.collapsible)c=this.options.selected;this.anchors.eq(c).trigger(this.options.event+".tabs");return this},load:function(c){var e=this,a=this.options,b=this.anchors.eq(c)[0],h=d.data(b,"load.tabs");this.abort();if(!h||this.element.queue("tabs").length!==0&&d.data(b,"cache.tabs"))this.element.dequeue("tabs");else{this.lis.eq(c).addClass("ui-state-pr [...]
-if(a.spinner){var i=d("span",b);i.data("label.tabs",i.html()).html(a.spinner)}this.xhr=d.ajax(d.extend({},a.ajaxOptions,{url:h,success:function(k,n){d(e._sanitizeSelector(b.hash)).html(k);e._cleanup();a.cache&&d.data(b,"cache.tabs",true);e._trigger("load",null,e._ui(e.anchors[c],e.panels[c]));try{a.ajaxOptions.success(k,n)}catch(m){}},error:function(k,n){e._cleanup();e._trigger("load",null,e._ui(e.anchors[c],e.panels[c]));try{a.ajaxOptions.error(k,n,c,b)}catch(m){}}}));e.element.dequeue( [...]
-abort:function(){this.element.queue([]);this.panels.stop(false,true);this.element.queue("tabs",this.element.queue("tabs").splice(-2,2));if(this.xhr){this.xhr.abort();delete this.xhr}this._cleanup();return this},url:function(c,e){this.anchors.eq(c).removeData("cache.tabs").data("load.tabs",e);return this},length:function(){return this.anchors.length}});d.extend(d.ui.tabs,{version:"1.9m2"});d.extend(d.ui.tabs.prototype,{rotation:null,rotate:function(c,e){var a=this,b=this.options,h=a._rota [...]
-function(i){clearTimeout(a.rotation);a.rotation=setTimeout(function(){var k=b.selected;a.select(++k<a.anchors.length?k:0)},c);i&&i.stopPropagation()});e=a._unrotate||(a._unrotate=!e?function(i){i.clientX&&a.rotate(null)}:function(){t=b.selected;h()});if(c){this.element.bind("tabsshow",h);this.anchors.bind(b.event+".tabs",e);h()}else{clearTimeout(a.rotation);this.element.unbind("tabsshow",h);this.anchors.unbind(b.event+".tabs",e);delete this._rotate;delete this._unrotate}return this}})})( [...]
-(function(b){b(document.body).is("[role]")||b(document.body).attr("role","application");var f=0;b.widget("ui.tooltip",{options:{tooltipClass:"ui-widget-content",content:function(){return b(this).attr("title")},position:{my:"left center",at:"right center",offset:"15 0"}},_init:function(){var c=this;this.tooltip=b("<div></div>").attr("id","ui-tooltip-"+f++).attr("role","tooltip").attr("aria-hidden","true").addClass("ui-tooltip ui-widget ui-corner-all").addClass(this.options.tooltipClass).a [...]
-this.tooltipContent=b("<div></div>").addClass("ui-tooltip-content").appendTo(this.tooltip);this.opacity=this.tooltip.css("opacity");this.element.bind("focus.tooltip mouseenter.tooltip",function(a){c.open(a)}).bind("blur.tooltip mouseleave.tooltip",function(a){c.close(a)})},enable:function(){this.options.disabled=false},disable:function(){this.options.disabled=true},destroy:function(){this.tooltip.remove();b.Widget.prototype.destroy.apply(this,arguments)},widget:function(){return this.too [...]
-this.element;if(!(this.current&&this.current[0]==a[0])){var d=this;this.current=a;this.currentTitle=a.attr("title");var e=this.options.content.call(a[0],function(g){d.current==a&&d._show(c,a,g)});e&&d._show(c,a,e)}},_show:function(c,a,d){if(d){a.attr("title","");if(!this.options.disabled){this.tooltipContent.html(d);this.tooltip.css({top:0,left:0}).show().position(b.extend(this.options.position,{of:a})).hide();this.tooltip.attr("aria-hidden","false");a.attr("aria-describedby",this.toolti [...]
-if(this.tooltip.is(":animated"))this.tooltip.stop().show().fadeTo("normal",this.opacity);else this.tooltip.is(":visible")?this.tooltip.fadeTo("normal",this.opacity):this.tooltip.fadeIn();this._trigger("open",c)}}},close:function(c){if(this.current){var a=this.current.attr("title",this.currentTitle);this.current=null;if(!this.options.disabled){a.removeAttr("aria-describedby");this.tooltip.attr("aria-hidden","true");this.tooltip.is(":animated")?this.tooltip.stop().fadeTo("normal",0,functio [...]
-"")}):this.tooltip.stop().fadeOut();this._trigger("close",c)}}}})})(jQuery);
+/*! jQuery UI - v1.9.0 - 2012-10-05
+* http://jqueryui.com
+* Includes: jquery.ui.core.js, jquery.ui.widget.js, jquery.ui.mouse.js, jquery.ui.position.js, jquery.ui.accordion.js, jquery.ui.autocomplete.js, jquery.ui.button.js, jquery.ui.datepicker.js, jquery.ui.dialog.js, jquery.ui.draggable.js, jquery.ui.droppable.js, jquery.ui.effect.js, jquery.ui.effect-blind.js, jquery.ui.effect-bounce.js, jquery.ui.effect-clip.js, jquery.ui.effect-drop.js, jquery.ui.effect-explode.js, jquery.ui.effect-fade.js, jquery.ui.effect-fold.js, jquery.ui.effect-highl [...]
+* Copyright (c) 2012 jQuery Foundation and other contributors Licensed MIT */
+
+(function(e,t){function i(t,n){var r,i,o,u=t.nodeName.toLowerCase();return"area"===u?(r=t.parentNode,i=r.name,!t.href||!i||r.nodeName.toLowerCase()!=="map"?!1:(o=e("img[usemap=#"+i+"]")[0],!!o&&s(o))):(/input|select|textarea|button|object/.test(u)?!t.disabled:"a"===u?t.href||n:n)&&s(t)}function s(t){return!e(t).parents().andSelf().filter(function(){return e.css(this,"visibility")==="hidden"||e.expr.filters.hidden(this)}).length}var n=0,r=/^ui-id-\d+$/;e.ui=e.ui||{};if(e.ui.version)return [...]
\ No newline at end of file
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/jquery.js b/src/compiler/scala/tools/nsc/doc/html/resource/lib/jquery.js
index 7c24308..bc3fbc8 100644
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/jquery.js
+++ b/src/compiler/scala/tools/nsc/doc/html/resource/lib/jquery.js
@@ -1,154 +1,2 @@
-/*!
- * jQuery JavaScript Library v1.4.2
- * http://jquery.com/
- *
- * Copyright 2010, John Resig
- * Dual licensed under the MIT or GPL Version 2 licenses.
- * http://jquery.org/license
- *
- * Includes Sizzle.js
- * http://sizzlejs.com/
- * Copyright 2010, The Dojo Foundation
- * Released under the MIT, BSD, and GPL Licenses.
- *
- * Date: Sat Feb 13 22:33:48 2010 -0500
- */
-(function(A,w){function ma(){if(!c.isReady){try{s.documentElement.doScroll("left")}catch(a){setTimeout(ma,1);return}c.ready()}}function Qa(a,b){b.src?c.ajax({url:b.src,async:false,dataType:"script"}):c.globalEval(b.text||b.textContent||b.innerHTML||"");b.parentNode&&b.parentNode.removeChild(b)}function X(a,b,d,f,e,j){var i=a.length;if(typeof b==="object"){for(var o in b)X(a,o,b[o],f,e,d);return a}if(d!==w){f=!j&&f&&c.isFunction(d);for(o=0;o<i;o++)e(a[o],b,f?d.call(a[o],o,e(a[o],b)):d,j); [...]
-e(a[0],b):w}function J(){return(new Date).getTime()}function Y(){return false}function Z(){return true}function na(a,b,d){d[0].type=a;return c.event.handle.apply(b,d)}function oa(a){var b,d=[],f=[],e=arguments,j,i,o,k,n,r;i=c.data(this,"events");if(!(a.liveFired===this||!i||!i.live||a.button&&a.type==="click")){a.liveFired=this;var u=i.live.slice(0);for(k=0;k<u.length;k++){i=u[k];i.origType.replace(O,"")===a.type?f.push(i.selector):u.splice(k--,1)}j=c(a.target).closest(f,a.currentTarget) [...]
-j.length;n<r;n++)for(k=0;k<u.length;k++){i=u[k];if(j[n].selector===i.selector){o=j[n].elem;f=null;if(i.preType==="mouseenter"||i.preType==="mouseleave")f=c(a.relatedTarget).closest(i.selector)[0];if(!f||f!==o)d.push({elem:o,handleObj:i})}}n=0;for(r=d.length;n<r;n++){j=d[n];a.currentTarget=j.elem;a.data=j.handleObj.data;a.handleObj=j.handleObj;if(j.handleObj.origHandler.apply(j.elem,e)===false){b=false;break}}return b}}function pa(a,b){return"live."+(a&&a!=="*"?a+".":"")+b.replace(/\./g," [...]
-"&")}function qa(a){return!a||!a.parentNode||a.parentNode.nodeType===11}function ra(a,b){var d=0;b.each(function(){if(this.nodeName===(a[d]&&a[d].nodeName)){var f=c.data(a[d++]),e=c.data(this,f);if(f=f&&f.events){delete e.handle;e.events={};for(var j in f)for(var i in f[j])c.event.add(this,j,f[j][i],f[j][i].data)}}})}function sa(a,b,d){var f,e,j;b=b&&b[0]?b[0].ownerDocument||b[0]:s;if(a.length===1&&typeof a[0]==="string"&&a[0].length<512&&b===s&&!ta.test(a[0])&&(c.support.checkClone||!ua [...]
-true;if(j=c.fragments[a[0]])if(j!==1)f=j}if(!f){f=b.createDocumentFragment();c.clean(a,b,f,d)}if(e)c.fragments[a[0]]=j?f:1;return{fragment:f,cacheable:e}}function K(a,b){var d={};c.each(va.concat.apply([],va.slice(0,b)),function(){d[this]=a});return d}function wa(a){return"scrollTo"in a&&a.document?a:a.nodeType===9?a.defaultView||a.parentWindow:false}var c=function(a,b){return new c.fn.init(a,b)},Ra=A.jQuery,Sa=A.$,s=A.document,T,Ta=/^[^<]*(<[\w\W]+>)[^>]*$|^#([\w-]+)$/,Ua=/^.[^:#\[\.,]* [...]
-Wa=/^(\s|\u00A0)+|(\s|\u00A0)+$/g,Xa=/^<(\w+)\s*\/?>(?:<\/\1>)?$/,P=navigator.userAgent,xa=false,Q=[],L,$=Object.prototype.toString,aa=Object.prototype.hasOwnProperty,ba=Array.prototype.push,R=Array.prototype.slice,ya=Array.prototype.indexOf;c.fn=c.prototype={init:function(a,b){var d,f;if(!a)return this;if(a.nodeType){this.context=this[0]=a;this.length=1;return this}if(a==="body"&&!b){this.context=s;this[0]=s.body;this.selector="body";this.length=1;return this}if(typeof a==="string")if(( [...]
-(d[1]||!b))if(d[1]){f=b?b.ownerDocument||b:s;if(a=Xa.exec(a))if(c.isPlainObject(b)){a=[s.createElement(a[1])];c.fn.attr.call(a,b,true)}else a=[f.createElement(a[1])];else{a=sa([d[1]],[f]);a=(a.cacheable?a.fragment.cloneNode(true):a.fragment).childNodes}return c.merge(this,a)}else{if(b=s.getElementById(d[2])){if(b.id!==d[2])return T.find(a);this.length=1;this[0]=b}this.context=s;this.selector=a;return this}else if(!b&&/^\w+$/.test(a)){this.selector=a;this.context=s;a=s.getElementsByTagNam [...]
-a)}else return!b||b.jquery?(b||T).find(a):c(b).find(a);else if(c.isFunction(a))return T.ready(a);if(a.selector!==w){this.selector=a.selector;this.context=a.context}return c.makeArray(a,this)},selector:"",jquery:"1.4.2",length:0,size:function(){return this.length},toArray:function(){return R.call(this,0)},get:function(a){return a==null?this.toArray():a<0?this.slice(a)[0]:this[a]},pushStack:function(a,b,d){var f=c();c.isArray(a)?ba.apply(f,a):c.merge(f,a);f.prevObject=this;f.context=this.c [...]
-"find")f.selector=this.selector+(this.selector?" ":"")+d;else if(b)f.selector=this.selector+"."+b+"("+d+")";return f},each:function(a,b){return c.each(this,a,b)},ready:function(a){c.bindReady();if(c.isReady)a.call(s,c);else Q&&Q.push(a);return this},eq:function(a){return a===-1?this.slice(a):this.slice(a,+a+1)},first:function(){return this.eq(0)},last:function(){return this.eq(-1)},slice:function(){return this.pushStack(R.apply(this,arguments),"slice",R.call(arguments).join(","))},map:fu [...]
-function(b,d){return a.call(b,d,b)}))},end:function(){return this.prevObject||c(null)},push:ba,sort:[].sort,splice:[].splice};c.fn.init.prototype=c.fn;c.extend=c.fn.extend=function(){var a=arguments[0]||{},b=1,d=arguments.length,f=false,e,j,i,o;if(typeof a==="boolean"){f=a;a=arguments[1]||{};b=2}if(typeof a!=="object"&&!c.isFunction(a))a={};if(d===b){a=this;--b}for(;b<d;b++)if((e=arguments[b])!=null)for(j in e){i=a[j];o=e[j];if(a!==o)if(f&&o&&(c.isPlainObject(o)||c.isArray(o))){i=i&&(c.i [...]
-c.isArray(i))?i:c.isArray(o)?[]:{};a[j]=c.extend(f,i,o)}else if(o!==w)a[j]=o}return a};c.extend({noConflict:function(a){A.$=Sa;if(a)A.jQuery=Ra;return c},isReady:false,ready:function(){if(!c.isReady){if(!s.body)return setTimeout(c.ready,13);c.isReady=true;if(Q){for(var a,b=0;a=Q[b++];)a.call(s,c);Q=null}c.fn.triggerHandler&&c(s).triggerHandler("ready")}},bindReady:function(){if(!xa){xa=true;if(s.readyState==="complete")return c.ready();if(s.addEventListener){s.addEventListener("DOMConten [...]
-L,false);A.addEventListener("load",c.ready,false)}else if(s.attachEvent){s.attachEvent("onreadystatechange",L);A.attachEvent("onload",c.ready);var a=false;try{a=A.frameElement==null}catch(b){}s.documentElement.doScroll&&a&&ma()}}},isFunction:function(a){return $.call(a)==="[object Function]"},isArray:function(a){return $.call(a)==="[object Array]"},isPlainObject:function(a){if(!a||$.call(a)!=="[object Object]"||a.nodeType||a.setInterval)return false;if(a.constructor&&!aa.call(a,"construc [...]
-"isPrototypeOf"))return false;var b;for(b in a);return b===w||aa.call(a,b)},isEmptyObject:function(a){for(var b in a)return false;return true},error:function(a){throw a;},parseJSON:function(a){if(typeof a!=="string"||!a)return null;a=c.trim(a);if(/^[\],:{}\s]*$/.test(a.replace(/\\(?:["\\\/bfnrt]|u[0-9a-fA-F]{4})/g,"@").replace(/"[^"\\\n\r]*"|true|false|null|-?\d+(?:\.\d*)?(?:[eE][+\-]?\d+)?/g,"]").replace(/(?:^|:|,)(?:\s*\[)+/g,"")))return A.JSON&&A.JSON.parse?A.JSON.parse(a):(new Functi [...]
-a))();else c.error("Invalid JSON: "+a)},noop:function(){},globalEval:function(a){if(a&&Va.test(a)){var b=s.getElementsByTagName("head")[0]||s.documentElement,d=s.createElement("script");d.type="text/javascript";if(c.support.scriptEval)d.appendChild(s.createTextNode(a));else d.text=a;b.insertBefore(d,b.firstChild);b.removeChild(d)}},nodeName:function(a,b){return a.nodeName&&a.nodeName.toUpperCase()===b.toUpperCase()},each:function(a,b,d){var f,e=0,j=a.length,i=j===w||c.isFunction(a);if(d) [...]
-d)===false)break}else for(;e<j;){if(b.apply(a[e++],d)===false)break}else if(i)for(f in a){if(b.call(a[f],f,a[f])===false)break}else for(d=a[0];e<j&&b.call(d,e,d)!==false;d=a[++e]);return a},trim:function(a){return(a||"").replace(Wa,"")},makeArray:function(a,b){b=b||[];if(a!=null)a.length==null||typeof a==="string"||c.isFunction(a)||typeof a!=="function"&&a.setInterval?ba.call(b,a):c.merge(b,a);return b},inArray:function(a,b){if(b.indexOf)return b.indexOf(a);for(var d=0,f=b.length;d<f;d++ [...]
-a)return d;return-1},merge:function(a,b){var d=a.length,f=0;if(typeof b.length==="number")for(var e=b.length;f<e;f++)a[d++]=b[f];else for(;b[f]!==w;)a[d++]=b[f++];a.length=d;return a},grep:function(a,b,d){for(var f=[],e=0,j=a.length;e<j;e++)!d!==!b(a[e],e)&&f.push(a[e]);return f},map:function(a,b,d){for(var f=[],e,j=0,i=a.length;j<i;j++){e=b(a[j],j,d);if(e!=null)f[f.length]=e}return f.concat.apply([],f)},guid:1,proxy:function(a,b,d){if(arguments.length===2)if(typeof b==="string"){d=a;a=d [...]
-!c.isFunction(b)){d=b;b=w}if(!b&&a)b=function(){return a.apply(d||this,arguments)};if(a)b.guid=a.guid=a.guid||b.guid||c.guid++;return b},uaMatch:function(a){a=a.toLowerCase();a=/(webkit)[ \/]([\w.]+)/.exec(a)||/(opera)(?:.*version)?[ \/]([\w.]+)/.exec(a)||/(msie) ([\w.]+)/.exec(a)||!/compatible/.test(a)&&/(mozilla)(?:.*? rv:([\w.]+))?/.exec(a)||[];return{browser:a[1]||"",version:a[2]||"0"}},browser:{}});P=c.uaMatch(P);if(P.browser){c.browser[P.browser]=true;c.browser.version=P.version}if [...]
-true;if(ya)c.inArray=function(a,b){return ya.call(b,a)};T=c(s);if(s.addEventListener)L=function(){s.removeEventListener("DOMContentLoaded",L,false);c.ready()};else if(s.attachEvent)L=function(){if(s.readyState==="complete"){s.detachEvent("onreadystatechange",L);c.ready()}};(function(){c.support={};var a=s.documentElement,b=s.createElement("script"),d=s.createElement("div"),f="script"+J();d.style.display="none";d.innerHTML=" <link/><table></table><a href='/a' style='color:red;float:left [...]
-var e=d.getElementsByTagName("*"),j=d.getElementsByTagName("a")[0];if(!(!e||!e.length||!j)){c.support={leadingWhitespace:d.firstChild.nodeType===3,tbody:!d.getElementsByTagName("tbody").length,htmlSerialize:!!d.getElementsByTagName("link").length,style:/red/.test(j.getAttribute("style")),hrefNormalized:j.getAttribute("href")==="/a",opacity:/^0.55$/.test(j.style.opacity),cssFloat:!!j.style.cssFloat,checkOn:d.getElementsByTagName("input")[0].value==="on",optSelected:s.createElement("select [...]
-parentNode:d.removeChild(d.appendChild(s.createElement("div"))).parentNode===null,deleteExpando:true,checkClone:false,scriptEval:false,noCloneEvent:true,boxModel:null};b.type="text/javascript";try{b.appendChild(s.createTextNode("window."+f+"=1;"))}catch(i){}a.insertBefore(b,a.firstChild);if(A[f]){c.support.scriptEval=true;delete A[f]}try{delete b.test}catch(o){c.support.deleteExpando=false}a.removeChild(b);if(d.attachEvent&&d.fireEvent){d.attachEvent("onclick",function k(){c.support.noCl [...]
-false;d.detachEvent("onclick",k)});d.cloneNode(true).fireEvent("onclick")}d=s.createElement("div");d.innerHTML="<input type='radio' name='radiotest' checked='checked'/>";a=s.createDocumentFragment();a.appendChild(d.firstChild);c.support.checkClone=a.cloneNode(true).cloneNode(true).lastChild.checked;c(function(){var k=s.createElement("div");k.style.width=k.style.paddingLeft="1px";s.body.appendChild(k);c.boxModel=c.support.boxModel=k.offsetWidth===2;s.body.removeChild(k).style.display="non [...]
-s.createElement("div");k="on"+k;var r=k in n;if(!r){n.setAttribute(k,"return;");r=typeof n[k]==="function"}return r};c.support.submitBubbles=a("submit");c.support.changeBubbles=a("change");a=b=d=e=j=null}})();c.props={"for":"htmlFor","class":"className",readonly:"readOnly",maxlength:"maxLength",cellspacing:"cellSpacing",rowspan:"rowSpan",colspan:"colSpan",tabindex:"tabIndex",usemap:"useMap",frameborder:"frameBorder"};var G="jQuery"+J(),Ya=0,za={};c.extend({cache:{},expando:G,noData:{embe [...]
-applet:true},data:function(a,b,d){if(!(a.nodeName&&c.noData[a.nodeName.toLowerCase()])){a=a==A?za:a;var f=a[G],e=c.cache;if(!f&&typeof b==="string"&&d===w)return null;f||(f=++Ya);if(typeof b==="object"){a[G]=f;e[f]=c.extend(true,{},b)}else if(!e[f]){a[G]=f;e[f]={}}a=e[f];if(d!==w)a[b]=d;return typeof b==="string"?a[b]:a}},removeData:function(a,b){if(!(a.nodeName&&c.noData[a.nodeName.toLowerCase()])){a=a==A?za:a;var d=a[G],f=c.cache,e=f[d];if(b){if(e){delete e[b];c.isEmptyObject(e)&&c.rem [...]
-else a.removeAttribute&&a.removeAttribute(c.expando);delete f[d]}}}});c.fn.extend({data:function(a,b){if(typeof a==="undefined"&&this.length)return c.data(this[0]);else if(typeof a==="object")return this.each(function(){c.data(this,a)});var d=a.split(".");d[1]=d[1]?"."+d[1]:"";if(b===w){var f=this.triggerHandler("getData"+d[1]+"!",[d[0]]);if(f===w&&this.length)f=c.data(this[0],a);return f===w&&d[1]?this.data(d[0]):f}else return this.trigger("setData"+d[1]+"!",[d[0],b]).each(function(){c. [...]
-a,b)})},removeData:function(a){return this.each(function(){c.removeData(this,a)})}});c.extend({queue:function(a,b,d){if(a){b=(b||"fx")+"queue";var f=c.data(a,b);if(!d)return f||[];if(!f||c.isArray(d))f=c.data(a,b,c.makeArray(d));else f.push(d);return f}},dequeue:function(a,b){b=b||"fx";var d=c.queue(a,b),f=d.shift();if(f==="inprogress")f=d.shift();if(f){b==="fx"&&d.unshift("inprogress");f.call(a,function(){c.dequeue(a,b)})}}});c.fn.extend({queue:function(a,b){if(typeof a!=="string"){b=a; [...]
-w)return c.queue(this[0],a);return this.each(function(){var d=c.queue(this,a,b);a==="fx"&&d[0]!=="inprogress"&&c.dequeue(this,a)})},dequeue:function(a){return this.each(function(){c.dequeue(this,a)})},delay:function(a,b){a=c.fx?c.fx.speeds[a]||a:a;b=b||"fx";return this.queue(b,function(){var d=this;setTimeout(function(){c.dequeue(d,b)},a)})},clearQueue:function(a){return this.queue(a||"fx",[])}});var Aa=/[\n\t]/g,ca=/\s+/,Za=/\r/g,$a=/href|src|style/,ab=/(button|input)/i,bb=/(button|inpu [...]
-cb=/^(a|area)$/i,Ba=/radio|checkbox/;c.fn.extend({attr:function(a,b){return X(this,a,b,true,c.attr)},removeAttr:function(a){return this.each(function(){c.attr(this,a,"");this.nodeType===1&&this.removeAttribute(a)})},addClass:function(a){if(c.isFunction(a))return this.each(function(n){var r=c(this);r.addClass(a.call(this,n,r.attr("class")))});if(a&&typeof a==="string")for(var b=(a||"").split(ca),d=0,f=this.length;d<f;d++){var e=this[d];if(e.nodeType===1)if(e.className){for(var j=" "+e.cla [...]
-i=e.className,o=0,k=b.length;o<k;o++)if(j.indexOf(" "+b[o]+" ")<0)i+=" "+b[o];e.className=c.trim(i)}else e.className=a}return this},removeClass:function(a){if(c.isFunction(a))return this.each(function(k){var n=c(this);n.removeClass(a.call(this,k,n.attr("class")))});if(a&&typeof a==="string"||a===w)for(var b=(a||"").split(ca),d=0,f=this.length;d<f;d++){var e=this[d];if(e.nodeType===1&&e.className)if(a){for(var j=(" "+e.className+" ").replace(Aa," "),i=0,o=b.length;i<o;i++)j=j.replace(" "+ [...]
-" ");e.className=c.trim(j)}else e.className=""}return this},toggleClass:function(a,b){var d=typeof a,f=typeof b==="boolean";if(c.isFunction(a))return this.each(function(e){var j=c(this);j.toggleClass(a.call(this,e,j.attr("class"),b),b)});return this.each(function(){if(d==="string")for(var e,j=0,i=c(this),o=b,k=a.split(ca);e=k[j++];){o=f?o:!i.hasClass(e);i[o?"addClass":"removeClass"](e)}else if(d==="undefined"||d==="boolean"){this.className&&c.data(this,"__className__",this.className);thi [...]
-this.className||a===false?"":c.data(this,"__className__")||""}})},hasClass:function(a){a=" "+a+" ";for(var b=0,d=this.length;b<d;b++)if((" "+this[b].className+" ").replace(Aa," ").indexOf(a)>-1)return true;return false},val:function(a){if(a===w){var b=this[0];if(b){if(c.nodeName(b,"option"))return(b.attributes.value||{}).specified?b.value:b.text;if(c.nodeName(b,"select")){var d=b.selectedIndex,f=[],e=b.options;b=b.type==="select-one";if(d<0)return null;var j=b?d:0;for(d=b?d+1:e.length;j< [...]
-e[j];if(i.selected){a=c(i).val();if(b)return a;f.push(a)}}return f}if(Ba.test(b.type)&&!c.support.checkOn)return b.getAttribute("value")===null?"on":b.value;return(b.value||"").replace(Za,"")}return w}var o=c.isFunction(a);return this.each(function(k){var n=c(this),r=a;if(this.nodeType===1){if(o)r=a.call(this,k,n.val());if(typeof r==="number")r+="";if(c.isArray(r)&&Ba.test(this.type))this.checked=c.inArray(n.val(),r)>=0;else if(c.nodeName(this,"select")){var u=c.makeArray(r);c("option",t [...]
-c.inArray(c(this).val(),u)>=0});if(!u.length)this.selectedIndex=-1}else this.value=r}})}});c.extend({attrFn:{val:true,css:true,html:true,text:true,data:true,width:true,height:true,offset:true},attr:function(a,b,d,f){if(!a||a.nodeType===3||a.nodeType===8)return w;if(f&&b in c.attrFn)return c(a)[b](d);f=a.nodeType!==1||!c.isXMLDoc(a);var e=d!==w;b=f&&c.props[b]||b;if(a.nodeType===1){var j=$a.test(b);if(b in a&&f&&!j){if(e){b==="type"&&ab.test(a.nodeName)&&a.parentNode&&c.error("type proper [...]
-a[b]=d}if(c.nodeName(a,"form")&&a.getAttributeNode(b))return a.getAttributeNode(b).nodeValue;if(b==="tabIndex")return(b=a.getAttributeNode("tabIndex"))&&b.specified?b.value:bb.test(a.nodeName)||cb.test(a.nodeName)&&a.href?0:w;return a[b]}if(!c.support.style&&f&&b==="style"){if(e)a.style.cssText=""+d;return a.style.cssText}e&&a.setAttribute(b,""+d);a=!c.support.hrefNormalized&&f&&j?a.getAttribute(b,2):a.getAttribute(b);return a===null?w:a}return c.style(a,b,d)}});var O=/\.(.*)$/,db=functi [...]
-function(b){return"\\"+b})};c.event={add:function(a,b,d,f){if(!(a.nodeType===3||a.nodeType===8)){if(a.setInterval&&a!==A&&!a.frameElement)a=A;var e,j;if(d.handler){e=d;d=e.handler}if(!d.guid)d.guid=c.guid++;if(j=c.data(a)){var i=j.events=j.events||{},o=j.handle;if(!o)j.handle=o=function(){return typeof c!=="undefined"&&!c.event.triggered?c.event.handle.apply(o.elem,arguments):w};o.elem=a;b=b.split(" ");for(var k,n=0,r;k=b[n++];){j=e?c.extend({},e):{handler:d,data:f};if(k.indexOf(".")>-1) [...]
-k=r.shift();j.namespace=r.slice(0).sort().join(".")}else{r=[];j.namespace=""}j.type=k;j.guid=d.guid;var u=i[k],z=c.event.special[k]||{};if(!u){u=i[k]=[];if(!z.setup||z.setup.call(a,f,r,o)===false)if(a.addEventListener)a.addEventListener(k,o,false);else a.attachEvent&&a.attachEvent("on"+k,o)}if(z.add){z.add.call(a,j);if(!j.handler.guid)j.handler.guid=d.guid}u.push(j);c.event.global[k]=true}a=null}}},global:{},remove:function(a,b,d,f){if(!(a.nodeType===3||a.nodeType===8)){var e,j=0,i,o,k,n [...]
-C=z&&z.events;if(z&&C){if(b&&b.type){d=b.handler;b=b.type}if(!b||typeof b==="string"&&b.charAt(0)==="."){b=b||"";for(e in C)c.event.remove(a,e+b)}else{for(b=b.split(" ");e=b[j++];){n=e;i=e.indexOf(".")<0;o=[];if(!i){o=e.split(".");e=o.shift();k=new RegExp("(^|\\.)"+c.map(o.slice(0).sort(),db).join("\\.(?:.*\\.)?")+"(\\.|$)")}if(r=C[e])if(d){n=c.event.special[e]||{};for(B=f||0;B<r.length;B++){u=r[B];if(d.guid===u.guid){if(i||k.test(u.namespace)){f==null&&r.splice(B--,1);n.remove&&n.remove [...]
-null)break}}if(r.length===0||f!=null&&r.length===1){if(!n.teardown||n.teardown.call(a,o)===false)Ca(a,e,z.handle);delete C[e]}}else for(var B=0;B<r.length;B++){u=r[B];if(i||k.test(u.namespace)){c.event.remove(a,n,u.handler,B);r.splice(B--,1)}}}if(c.isEmptyObject(C)){if(b=z.handle)b.elem=null;delete z.events;delete z.handle;c.isEmptyObject(z)&&c.removeData(a)}}}}},trigger:function(a,b,d,f){var e=a.type||a;if(!f){a=typeof a==="object"?a[G]?a:c.extend(c.Event(e),a):c.Event(e);if(e.indexOf(" [...]
-e=e.slice(0,-1);a.exclusive=true}if(!d){a.stopPropagation();c.event.global[e]&&c.each(c.cache,function(){this.events&&this.events[e]&&c.event.trigger(a,b,this.handle.elem)})}if(!d||d.nodeType===3||d.nodeType===8)return w;a.result=w;a.target=d;b=c.makeArray(b);b.unshift(a)}a.currentTarget=d;(f=c.data(d,"handle"))&&f.apply(d,b);f=d.parentNode||d.ownerDocument;try{if(!(d&&d.nodeName&&c.noData[d.nodeName.toLowerCase()]))if(d["on"+e]&&d["on"+e].apply(d,b)===false)a.result=false}catch(j){}if(! [...]
-f)c.event.trigger(a,b,f,true);else if(!a.isDefaultPrevented()){f=a.target;var i,o=c.nodeName(f,"a")&&e==="click",k=c.event.special[e]||{};if((!k._default||k._default.call(d,a)===false)&&!o&&!(f&&f.nodeName&&c.noData[f.nodeName.toLowerCase()])){try{if(f[e]){if(i=f["on"+e])f["on"+e]=null;c.event.triggered=true;f[e]()}}catch(n){}if(i)f["on"+e]=i;c.event.triggered=false}}},handle:function(a){var b,d,f,e;a=arguments[0]=c.event.fix(a||A.event);a.currentTarget=this;b=a.type.indexOf(".")<0&&!a.e [...]
-if(!b){d=a.type.split(".");a.type=d.shift();f=new RegExp("(^|\\.)"+d.slice(0).sort().join("\\.(?:.*\\.)?")+"(\\.|$)")}e=c.data(this,"events");d=e[a.type];if(e&&d){d=d.slice(0);e=0;for(var j=d.length;e<j;e++){var i=d[e];if(b||f.test(i.namespace)){a.handler=i.handler;a.data=i.data;a.handleObj=i;i=i.handler.apply(this,arguments);if(i!==w){a.result=i;if(i===false){a.preventDefault();a.stopPropagation()}}if(a.isImmediatePropagationStopped())break}}}return a.result},props:"altKey attrChange at [...]
-fix:function(a){if(a[G])return a;var b=a;a=c.Event(b);for(var d=this.props.length,f;d;){f=this.props[--d];a[f]=b[f]}if(!a.target)a.target=a.srcElement||s;if(a.target.nodeType===3)a.target=a.target.parentNode;if(!a.relatedTarget&&a.fromElement)a.relatedTarget=a.fromElement===a.target?a.toElement:a.fromElement;if(a.pageX==null&&a.clientX!=null){b=s.documentElement;d=s.body;a.pageX=a.clientX+(b&&b.scrollLeft||d&&d.scrollLeft||0)-(b&&b.clientLeft||d&&d.clientLeft||0);a.pageY=a.clientY+(b&&b. [...]
-d&&d.scrollTop||0)-(b&&b.clientTop||d&&d.clientTop||0)}if(!a.which&&(a.charCode||a.charCode===0?a.charCode:a.keyCode))a.which=a.charCode||a.keyCode;if(!a.metaKey&&a.ctrlKey)a.metaKey=a.ctrlKey;if(!a.which&&a.button!==w)a.which=a.button&1?1:a.button&2?3:a.button&4?2:0;return a},guid:1E8,proxy:c.proxy,special:{ready:{setup:c.bindReady,teardown:c.noop},live:{add:function(a){c.event.add(this,a.origType,c.extend({},a,{handler:oa}))},remove:function(a){var b=true,d=a.origType.replace(O,"");c.e [...]
-"events").live||[],function(){if(d===this.origType.replace(O,""))return b=false});b&&c.event.remove(this,a.origType,oa)}},beforeunload:{setup:function(a,b,d){if(this.setInterval)this.onbeforeunload=d;return false},teardown:function(a,b){if(this.onbeforeunload===b)this.onbeforeunload=null}}}};var Ca=s.removeEventListener?function(a,b,d){a.removeEventListener(b,d,false)}:function(a,b,d){a.detachEvent("on"+b,d)};c.Event=function(a){if(!this.preventDefault)return new c.Event(a);if(a&&a.type) [...]
-a;this.type=a.type}else this.type=a;this.timeStamp=J();this[G]=true};c.Event.prototype={preventDefault:function(){this.isDefaultPrevented=Z;var a=this.originalEvent;if(a){a.preventDefault&&a.preventDefault();a.returnValue=false}},stopPropagation:function(){this.isPropagationStopped=Z;var a=this.originalEvent;if(a){a.stopPropagation&&a.stopPropagation();a.cancelBubble=true}},stopImmediatePropagation:function(){this.isImmediatePropagationStopped=Z;this.stopPropagation()},isDefaultPrevented [...]
-isImmediatePropagationStopped:Y};var Da=function(a){var b=a.relatedTarget;try{for(;b&&b!==this;)b=b.parentNode;if(b!==this){a.type=a.data;c.event.handle.apply(this,arguments)}}catch(d){}},Ea=function(a){a.type=a.data;c.event.handle.apply(this,arguments)};c.each({mouseenter:"mouseover",mouseleave:"mouseout"},function(a,b){c.event.special[a]={setup:function(d){c.event.add(this,b,d&&d.selector?Ea:Da,a)},teardown:function(d){c.event.remove(this,b,d&&d.selector?Ea:Da)}}});if(!c.support.submit [...]
-{setup:function(){if(this.nodeName.toLowerCase()!=="form"){c.event.add(this,"click.specialSubmit",function(a){var b=a.target,d=b.type;if((d==="submit"||d==="image")&&c(b).closest("form").length)return na("submit",this,arguments)});c.event.add(this,"keypress.specialSubmit",function(a){var b=a.target,d=b.type;if((d==="text"||d==="password")&&c(b).closest("form").length&&a.keyCode===13)return na("submit",this,arguments)})}else return false},teardown:function(){c.event.remove(this,".specialS [...]
-if(!c.support.changeBubbles){var da=/textarea|input|select/i,ea,Fa=function(a){var b=a.type,d=a.value;if(b==="radio"||b==="checkbox")d=a.checked;else if(b==="select-multiple")d=a.selectedIndex>-1?c.map(a.options,function(f){return f.selected}).join("-"):"";else if(a.nodeName.toLowerCase()==="select")d=a.selectedIndex;return d},fa=function(a,b){var d=a.target,f,e;if(!(!da.test(d.nodeName)||d.readOnly)){f=c.data(d,"_change_data");e=Fa(d);if(a.type!=="focusout"||d.type!=="radio")c.data(d,"_ [...]
-e);if(!(f===w||e===f))if(f!=null||e){a.type="change";return c.event.trigger(a,b,d)}}};c.event.special.change={filters:{focusout:fa,click:function(a){var b=a.target,d=b.type;if(d==="radio"||d==="checkbox"||b.nodeName.toLowerCase()==="select")return fa.call(this,a)},keydown:function(a){var b=a.target,d=b.type;if(a.keyCode===13&&b.nodeName.toLowerCase()!=="textarea"||a.keyCode===32&&(d==="checkbox"||d==="radio")||d==="select-multiple")return fa.call(this,a)},beforeactivate:function(a){a=a.t [...]
-"_change_data",Fa(a))}},setup:function(){if(this.type==="file")return false;for(var a in ea)c.event.add(this,a+".specialChange",ea[a]);return da.test(this.nodeName)},teardown:function(){c.event.remove(this,".specialChange");return da.test(this.nodeName)}};ea=c.event.special.change.filters}s.addEventListener&&c.each({focus:"focusin",blur:"focusout"},function(a,b){function d(f){f=c.event.fix(f);f.type=b;return c.event.handle.call(this,f)}c.event.special[b]={setup:function(){this.addEventLi [...]
-d,true)},teardown:function(){this.removeEventListener(a,d,true)}}});c.each(["bind","one"],function(a,b){c.fn[b]=function(d,f,e){if(typeof d==="object"){for(var j in d)this[b](j,f,d[j],e);return this}if(c.isFunction(f)){e=f;f=w}var i=b==="one"?c.proxy(e,function(k){c(this).unbind(k,i);return e.apply(this,arguments)}):e;if(d==="unload"&&b!=="one")this.one(d,f,e);else{j=0;for(var o=this.length;j<o;j++)c.event.add(this[j],d,i,f)}return this}});c.fn.extend({unbind:function(a,b){if(typeof a=== [...]
-!a.preventDefault)for(var d in a)this.unbind(d,a[d]);else{d=0;for(var f=this.length;d<f;d++)c.event.remove(this[d],a,b)}return this},delegate:function(a,b,d,f){return this.live(b,d,f,a)},undelegate:function(a,b,d){return arguments.length===0?this.unbind("live"):this.die(b,null,d,a)},trigger:function(a,b){return this.each(function(){c.event.trigger(a,b,this)})},triggerHandler:function(a,b){if(this[0]){a=c.Event(a);a.preventDefault();a.stopPropagation();c.event.trigger(a,b,this[0]);return [...]
-toggle:function(a){for(var b=arguments,d=1;d<b.length;)c.proxy(a,b[d++]);return this.click(c.proxy(a,function(f){var e=(c.data(this,"lastToggle"+a.guid)||0)%d;c.data(this,"lastToggle"+a.guid,e+1);f.preventDefault();return b[e].apply(this,arguments)||false}))},hover:function(a,b){return this.mouseenter(a).mouseleave(b||a)}});var Ga={focus:"focusin",blur:"focusout",mouseenter:"mouseover",mouseleave:"mouseout"};c.each(["live","die"],function(a,b){c.fn[b]=function(d,f,e,j){var i,o=0,k,n,r=j| [...]
-u=j?this:c(this.context);if(c.isFunction(f)){e=f;f=w}for(d=(d||"").split(" ");(i=d[o++])!=null;){j=O.exec(i);k="";if(j){k=j[0];i=i.replace(O,"")}if(i==="hover")d.push("mouseenter"+k,"mouseleave"+k);else{n=i;if(i==="focus"||i==="blur"){d.push(Ga[i]+k);i+=k}else i=(Ga[i]||i)+k;b==="live"?u.each(function(){c.event.add(this,pa(i,r),{data:f,selector:r,handler:e,origType:i,origHandler:e,preType:n})}):u.unbind(pa(i,r),e)}}return this}});c.each("blur focus focusin focusout load resize scroll unl [...]
-function(a,b){c.fn[b]=function(d){return d?this.bind(b,d):this.trigger(b)};if(c.attrFn)c.attrFn[b]=true});A.attachEvent&&!A.addEventListener&&A.attachEvent("onunload",function(){for(var a in c.cache)if(c.cache[a].handle)try{c.event.remove(c.cache[a].handle.elem)}catch(b){}});(function(){function a(g){for(var h="",l,m=0;g[m];m++){l=g[m];if(l.nodeType===3||l.nodeType===4)h+=l.nodeValue;else if(l.nodeType!==8)h+=a(l.childNodes)}return h}function b(g,h,l,m,q,p){q=0;for(var v=m.length;q<v;q++ [...]
-if(t){t=t[g];for(var y=false;t;){if(t.sizcache===l){y=m[t.sizset];break}if(t.nodeType===1&&!p){t.sizcache=l;t.sizset=q}if(t.nodeName.toLowerCase()===h){y=t;break}t=t[g]}m[q]=y}}}function d(g,h,l,m,q,p){q=0;for(var v=m.length;q<v;q++){var t=m[q];if(t){t=t[g];for(var y=false;t;){if(t.sizcache===l){y=m[t.sizset];break}if(t.nodeType===1){if(!p){t.sizcache=l;t.sizset=q}if(typeof h!=="string"){if(t===h){y=true;break}}else if(k.filter(h,[t]).length>0){y=t;break}}t=t[g]}m[q]=y}}}var f=/((?:\((?: [...]
-e=0,j=Object.prototype.toString,i=false,o=true;[0,0].sort(function(){o=false;return 0});var k=function(g,h,l,m){l=l||[];var q=h=h||s;if(h.nodeType!==1&&h.nodeType!==9)return[];if(!g||typeof g!=="string")return l;for(var p=[],v,t,y,S,H=true,M=x(h),I=g;(f.exec(""),v=f.exec(I))!==null;){I=v[3];p.push(v[1]);if(v[2]){S=v[3];break}}if(p.length>1&&r.exec(g))if(p.length===2&&n.relative[p[0]])t=ga(p[0]+p[1],h);else for(t=n.relative[p[0]]?[h]:k(p.shift(),h);p.length;){g=p.shift();if(n.relative[g]) [...]
-t=ga(g,t)}else{if(!m&&p.length>1&&h.nodeType===9&&!M&&n.match.ID.test(p[0])&&!n.match.ID.test(p[p.length-1])){v=k.find(p.shift(),h,M);h=v.expr?k.filter(v.expr,v.set)[0]:v.set[0]}if(h){v=m?{expr:p.pop(),set:z(m)}:k.find(p.pop(),p.length===1&&(p[0]==="~"||p[0]==="+")&&h.parentNode?h.parentNode:h,M);t=v.expr?k.filter(v.expr,v.set):v.set;if(p.length>0)y=z(t);else H=false;for(;p.length;){var D=p.pop();v=D;if(n.relative[D])v=p.pop();else D="";if(v==null)v=h;n.relative[D](y,v,M)}}else y=[]}y||( [...]
-g);if(j.call(y)==="[object Array]")if(H)if(h&&h.nodeType===1)for(g=0;y[g]!=null;g++){if(y[g]&&(y[g]===true||y[g].nodeType===1&&E(h,y[g])))l.push(t[g])}else for(g=0;y[g]!=null;g++)y[g]&&y[g].nodeType===1&&l.push(t[g]);else l.push.apply(l,y);else z(y,l);if(S){k(S,q,l,m);k.uniqueSort(l)}return l};k.uniqueSort=function(g){if(B){i=o;g.sort(B);if(i)for(var h=1;h<g.length;h++)g[h]===g[h-1]&&g.splice(h--,1)}return g};k.matches=function(g,h){return k(g,null,null,h)};k.find=function(g,h,l){var m,q [...]
-for(var p=0,v=n.order.length;p<v;p++){var t=n.order[p];if(q=n.leftMatch[t].exec(g)){var y=q[1];q.splice(1,1);if(y.substr(y.length-1)!=="\\"){q[1]=(q[1]||"").replace(/\\/g,"");m=n.find[t](q,h,l);if(m!=null){g=g.replace(n.match[t],"");break}}}}m||(m=h.getElementsByTagName("*"));return{set:m,expr:g}};k.filter=function(g,h,l,m){for(var q=g,p=[],v=h,t,y,S=h&&h[0]&&x(h[0]);g&&h.length;){for(var H in n.filter)if((t=n.leftMatch[H].exec(g))!=null&&t[2]){var M=n.filter[H],I,D;D=t[1];y=false;t.spli [...]
-1)!=="\\"){if(v===p)p=[];if(n.preFilter[H])if(t=n.preFilter[H](t,v,l,p,m,S)){if(t===true)continue}else y=I=true;if(t)for(var U=0;(D=v[U])!=null;U++)if(D){I=M(D,t,U,v);var Ha=m^!!I;if(l&&I!=null)if(Ha)y=true;else v[U]=false;else if(Ha){p.push(D);y=true}}if(I!==w){l||(v=p);g=g.replace(n.match[H],"");if(!y)return[];break}}}if(g===q)if(y==null)k.error(g);else break;q=g}return v};k.error=function(g){throw"Syntax error, unrecognized expression: "+g;};var n=k.selectors={order:["ID","NAME","TAG" [...]
-CLASS:/\.((?:[\w\u00c0-\uFFFF-]|\\.)+)/,NAME:/\[name=['"]*((?:[\w\u00c0-\uFFFF-]|\\.)+)['"]*\]/,ATTR:/\[\s*((?:[\w\u00c0-\uFFFF-]|\\.)+)\s*(?:(\S?=)\s*(['"]*)(.*?)\3|)\s*\]/,TAG:/^((?:[\w\u00c0-\uFFFF\*-]|\\.)+)/,CHILD:/:(only|nth|last|first)-child(?:\((even|odd|[\dn+-]*)\))?/,POS:/:(nth|eq|gt|lt|first|last|even|odd)(?:\((\d*)\))?(?=[^-]|$)/,PSEUDO:/:((?:[\w\u00c0-\uFFFF-]|\\.)+)(?:\((['"]?)((?:\([^\)]+\)|[^\(\)]*)+)\2\))?/},leftMatch:{},attrMap:{"class":"className","for":"htmlFor"},attr [...]
-relative:{"+":function(g,h){var l=typeof h==="string",m=l&&!/\W/.test(h);l=l&&!m;if(m)h=h.toLowerCase();m=0;for(var q=g.length,p;m<q;m++)if(p=g[m]){for(;(p=p.previousSibling)&&p.nodeType!==1;);g[m]=l||p&&p.nodeName.toLowerCase()===h?p||false:p===h}l&&k.filter(h,g,true)},">":function(g,h){var l=typeof h==="string";if(l&&!/\W/.test(h)){h=h.toLowerCase();for(var m=0,q=g.length;m<q;m++){var p=g[m];if(p){l=p.parentNode;g[m]=l.nodeName.toLowerCase()===h?l:false}}}else{m=0;for(q=g.length;m<q;m+ [...]
-l?p.parentNode:p.parentNode===h;l&&k.filter(h,g,true)}},"":function(g,h,l){var m=e++,q=d;if(typeof h==="string"&&!/\W/.test(h)){var p=h=h.toLowerCase();q=b}q("parentNode",h,m,g,p,l)},"~":function(g,h,l){var m=e++,q=d;if(typeof h==="string"&&!/\W/.test(h)){var p=h=h.toLowerCase();q=b}q("previousSibling",h,m,g,p,l)}},find:{ID:function(g,h,l){if(typeof h.getElementById!=="undefined"&&!l)return(g=h.getElementById(g[1]))?[g]:[]},NAME:function(g,h){if(typeof h.getElementsByName!=="undefined"){ [...]
-h=h.getElementsByName(g[1]);for(var m=0,q=h.length;m<q;m++)h[m].getAttribute("name")===g[1]&&l.push(h[m]);return l.length===0?null:l}},TAG:function(g,h){return h.getElementsByTagName(g[1])}},preFilter:{CLASS:function(g,h,l,m,q,p){g=" "+g[1].replace(/\\/g,"")+" ";if(p)return g;p=0;for(var v;(v=h[p])!=null;p++)if(v)if(q^(v.className&&(" "+v.className+" ").replace(/[\t\n]/g," ").indexOf(g)>=0))l||m.push(v);else if(l)h[p]=false;return false},ID:function(g){return g[1].replace(/\\/g,"")},TAG: [...]
-CHILD:function(g){if(g[1]==="nth"){var h=/(-?)(\d*)n((?:\+|-)?\d*)/.exec(g[2]==="even"&&"2n"||g[2]==="odd"&&"2n+1"||!/\D/.test(g[2])&&"0n+"+g[2]||g[2]);g[2]=h[1]+(h[2]||1)-0;g[3]=h[3]-0}g[0]=e++;return g},ATTR:function(g,h,l,m,q,p){h=g[1].replace(/\\/g,"");if(!p&&n.attrMap[h])g[1]=n.attrMap[h];if(g[2]==="~=")g[4]=" "+g[4]+" ";return g},PSEUDO:function(g,h,l,m,q){if(g[1]==="not")if((f.exec(g[3])||"").length>1||/^\w/.test(g[3]))g[3]=k(g[3],null,null,h);else{g=k.filter(g[3],h,l,true^q);l||m [...]
-g);return false}else if(n.match.POS.test(g[0])||n.match.CHILD.test(g[0]))return true;return g},POS:function(g){g.unshift(true);return g}},filters:{enabled:function(g){return g.disabled===false&&g.type!=="hidden"},disabled:function(g){return g.disabled===true},checked:function(g){return g.checked===true},selected:function(g){return g.selected===true},parent:function(g){return!!g.firstChild},empty:function(g){return!g.firstChild},has:function(g,h,l){return!!k(l[3],g).length},header:functio [...]
-text:function(g){return"text"===g.type},radio:function(g){return"radio"===g.type},checkbox:function(g){return"checkbox"===g.type},file:function(g){return"file"===g.type},password:function(g){return"password"===g.type},submit:function(g){return"submit"===g.type},image:function(g){return"image"===g.type},reset:function(g){return"reset"===g.type},button:function(g){return"button"===g.type||g.nodeName.toLowerCase()==="button"},input:function(g){return/input|select|textarea|button/i.test(g.no [...]
-setFilters:{first:function(g,h){return h===0},last:function(g,h,l,m){return h===m.length-1},even:function(g,h){return h%2===0},odd:function(g,h){return h%2===1},lt:function(g,h,l){return h<l[3]-0},gt:function(g,h,l){return h>l[3]-0},nth:function(g,h,l){return l[3]-0===h},eq:function(g,h,l){return l[3]-0===h}},filter:{PSEUDO:function(g,h,l,m){var q=h[1],p=n.filters[q];if(p)return p(g,l,h,m);else if(q==="contains")return(g.textContent||g.innerText||a([g])||"").indexOf(h[3])>=0;else if(q=== [...]
-h[3];l=0;for(m=h.length;l<m;l++)if(h[l]===g)return false;return true}else k.error("Syntax error, unrecognized expression: "+q)},CHILD:function(g,h){var l=h[1],m=g;switch(l){case "only":case "first":for(;m=m.previousSibling;)if(m.nodeType===1)return false;if(l==="first")return true;m=g;case "last":for(;m=m.nextSibling;)if(m.nodeType===1)return false;return true;case "nth":l=h[2];var q=h[3];if(l===1&&q===0)return true;h=h[0];var p=g.parentNode;if(p&&(p.sizcache!==h||!g.nodeIndex)){var v=0; [...]
-m.nextSibling)if(m.nodeType===1)m.nodeIndex=++v;p.sizcache=h}g=g.nodeIndex-q;return l===0?g===0:g%l===0&&g/l>=0}},ID:function(g,h){return g.nodeType===1&&g.getAttribute("id")===h},TAG:function(g,h){return h==="*"&&g.nodeType===1||g.nodeName.toLowerCase()===h},CLASS:function(g,h){return(" "+(g.className||g.getAttribute("class"))+" ").indexOf(h)>-1},ATTR:function(g,h){var l=h[1];g=n.attrHandle[l]?n.attrHandle[l](g):g[l]!=null?g[l]:g.getAttribute(l);l=g+"";var m=h[2];h=h[4];return g==null?m [...]
-"="?l===h:m==="*="?l.indexOf(h)>=0:m==="~="?(" "+l+" ").indexOf(h)>=0:!h?l&&g!==false:m==="!="?l!==h:m==="^="?l.indexOf(h)===0:m==="$="?l.substr(l.length-h.length)===h:m==="|="?l===h||l.substr(0,h.length+1)===h+"-":false},POS:function(g,h,l,m){var q=n.setFilters[h[2]];if(q)return q(g,l,h,m)}}},r=n.match.POS;for(var u in n.match){n.match[u]=new RegExp(n.match[u].source+/(?![^\[]*\])(?![^\(]*\))/.source);n.leftMatch[u]=new RegExp(/(^(?:.|\r|\n)*?)/.source+n.match[u].source.replace(/\\(\d+) [...]
-h){return"\\"+(h-0+1)}))}var z=function(g,h){g=Array.prototype.slice.call(g,0);if(h){h.push.apply(h,g);return h}return g};try{Array.prototype.slice.call(s.documentElement.childNodes,0)}catch(C){z=function(g,h){h=h||[];if(j.call(g)==="[object Array]")Array.prototype.push.apply(h,g);else if(typeof g.length==="number")for(var l=0,m=g.length;l<m;l++)h.push(g[l]);else for(l=0;g[l];l++)h.push(g[l]);return h}}var B;if(s.documentElement.compareDocumentPosition)B=function(g,h){if(!g.compareDocume [...]
-!h.compareDocumentPosition){if(g==h)i=true;return g.compareDocumentPosition?-1:1}g=g.compareDocumentPosition(h)&4?-1:g===h?0:1;if(g===0)i=true;return g};else if("sourceIndex"in s.documentElement)B=function(g,h){if(!g.sourceIndex||!h.sourceIndex){if(g==h)i=true;return g.sourceIndex?-1:1}g=g.sourceIndex-h.sourceIndex;if(g===0)i=true;return g};else if(s.createRange)B=function(g,h){if(!g.ownerDocument||!h.ownerDocument){if(g==h)i=true;return g.ownerDocument?-1:1}var l=g.ownerDocument.createR [...]
-h.ownerDocument.createRange();l.setStart(g,0);l.setEnd(g,0);m.setStart(h,0);m.setEnd(h,0);g=l.compareBoundaryPoints(Range.START_TO_END,m);if(g===0)i=true;return g};(function(){var g=s.createElement("div"),h="script"+(new Date).getTime();g.innerHTML="<a name='"+h+"'/>";var l=s.documentElement;l.insertBefore(g,l.firstChild);if(s.getElementById(h)){n.find.ID=function(m,q,p){if(typeof q.getElementById!=="undefined"&&!p)return(q=q.getElementById(m[1]))?q.id===m[1]||typeof q.getAttributeNode!= [...]
-q.getAttributeNode("id").nodeValue===m[1]?[q]:w:[]};n.filter.ID=function(m,q){var p=typeof m.getAttributeNode!=="undefined"&&m.getAttributeNode("id");return m.nodeType===1&&p&&p.nodeValue===q}}l.removeChild(g);l=g=null})();(function(){var g=s.createElement("div");g.appendChild(s.createComment(""));if(g.getElementsByTagName("*").length>0)n.find.TAG=function(h,l){l=l.getElementsByTagName(h[1]);if(h[1]==="*"){h=[];for(var m=0;l[m];m++)l[m].nodeType===1&&h.push(l[m]);l=h}return l};g.innerHTM [...]
-if(g.firstChild&&typeof g.firstChild.getAttribute!=="undefined"&&g.firstChild.getAttribute("href")!=="#")n.attrHandle.href=function(h){return h.getAttribute("href",2)};g=null})();s.querySelectorAll&&function(){var g=k,h=s.createElement("div");h.innerHTML="<p class='TEST'></p>";if(!(h.querySelectorAll&&h.querySelectorAll(".TEST").length===0)){k=function(m,q,p,v){q=q||s;if(!v&&q.nodeType===9&&!x(q))try{return z(q.querySelectorAll(m),p)}catch(t){}return g(m,q,p,v)};for(var l in g)k[l]=g[l]; [...]
-(function(){var g=s.createElement("div");g.innerHTML="<div class='test e'></div><div class='test'></div>";if(!(!g.getElementsByClassName||g.getElementsByClassName("e").length===0)){g.lastChild.className="e";if(g.getElementsByClassName("e").length!==1){n.order.splice(1,0,"CLASS");n.find.CLASS=function(h,l,m){if(typeof l.getElementsByClassName!=="undefined"&&!m)return l.getElementsByClassName(h[1])};g=null}}})();var E=s.compareDocumentPosition?function(g,h){return!!(g.compareDocumentPositi [...]
-function(g,h){return g!==h&&(g.contains?g.contains(h):true)},x=function(g){return(g=(g?g.ownerDocument||g:0).documentElement)?g.nodeName!=="HTML":false},ga=function(g,h){var l=[],m="",q;for(h=h.nodeType?[h]:h;q=n.match.PSEUDO.exec(g);){m+=q[0];g=g.replace(n.match.PSEUDO,"")}g=n.relative[g]?g+"*":g;q=0;for(var p=h.length;q<p;q++)k(g,h[q],l);return k.filter(m,l)};c.find=k;c.expr=k.selectors;c.expr[":"]=c.expr.filters;c.unique=k.uniqueSort;c.text=a;c.isXMLDoc=x;c.contains=E})();var eb=/Unti [...]
-gb=/,/;R=Array.prototype.slice;var Ia=function(a,b,d){if(c.isFunction(b))return c.grep(a,function(e,j){return!!b.call(e,j,e)===d});else if(b.nodeType)return c.grep(a,function(e){return e===b===d});else if(typeof b==="string"){var f=c.grep(a,function(e){return e.nodeType===1});if(Ua.test(b))return c.filter(b,f,!d);else b=c.filter(b,f)}return c.grep(a,function(e){return c.inArray(e,b)>=0===d})};c.fn.extend({find:function(a){for(var b=this.pushStack("","find",a),d=0,f=0,e=this.length;f<e;f+ [...]
-c.find(a,this[f],b);if(f>0)for(var j=d;j<b.length;j++)for(var i=0;i<d;i++)if(b[i]===b[j]){b.splice(j--,1);break}}return b},has:function(a){var b=c(a);return this.filter(function(){for(var d=0,f=b.length;d<f;d++)if(c.contains(this,b[d]))return true})},not:function(a){return this.pushStack(Ia(this,a,false),"not",a)},filter:function(a){return this.pushStack(Ia(this,a,true),"filter",a)},is:function(a){return!!a&&c.filter(a,this).length>0},closest:function(a,b){if(c.isArray(a)){var d=[],f=thi [...]
-{},i;if(f&&a.length){e=0;for(var o=a.length;e<o;e++){i=a[e];j[i]||(j[i]=c.expr.match.POS.test(i)?c(i,b||this.context):i)}for(;f&&f.ownerDocument&&f!==b;){for(i in j){e=j[i];if(e.jquery?e.index(f)>-1:c(f).is(e)){d.push({selector:i,elem:f});delete j[i]}}f=f.parentNode}}return d}var k=c.expr.match.POS.test(a)?c(a,b||this.context):null;return this.map(function(n,r){for(;r&&r.ownerDocument&&r!==b;){if(k?k.index(r)>-1:c(r).is(a))return r;r=r.parentNode}return null})},index:function(a){if(!a||t [...]
-"string")return c.inArray(this[0],a?c(a):this.parent().children());return c.inArray(a.jquery?a[0]:a,this)},add:function(a,b){a=typeof a==="string"?c(a,b||this.context):c.makeArray(a);b=c.merge(this.get(),a);return this.pushStack(qa(a[0])||qa(b[0])?b:c.unique(b))},andSelf:function(){return this.add(this.prevObject)}});c.each({parent:function(a){return(a=a.parentNode)&&a.nodeType!==11?a:null},parents:function(a){return c.dir(a,"parentNode")},parentsUntil:function(a,b,d){return c.dir(a,"par [...]
-d)},next:function(a){return c.nth(a,2,"nextSibling")},prev:function(a){return c.nth(a,2,"previousSibling")},nextAll:function(a){return c.dir(a,"nextSibling")},prevAll:function(a){return c.dir(a,"previousSibling")},nextUntil:function(a,b,d){return c.dir(a,"nextSibling",d)},prevUntil:function(a,b,d){return c.dir(a,"previousSibling",d)},siblings:function(a){return c.sibling(a.parentNode.firstChild,a)},children:function(a){return c.sibling(a.firstChild)},contents:function(a){return c.nodeNam [...]
-a.contentDocument||a.contentWindow.document:c.makeArray(a.childNodes)}},function(a,b){c.fn[a]=function(d,f){var e=c.map(this,b,d);eb.test(a)||(f=d);if(f&&typeof f==="string")e=c.filter(f,e);e=this.length>1?c.unique(e):e;if((this.length>1||gb.test(f))&&fb.test(a))e=e.reverse();return this.pushStack(e,a,R.call(arguments).join(","))}});c.extend({filter:function(a,b,d){if(d)a=":not("+a+")";return c.find.matches(a,b)},dir:function(a,b,d){var f=[];for(a=a[b];a&&a.nodeType!==9&&(d===w||a.nodeTy [...]
-1&&f.push(a);a=a[b]}return f},nth:function(a,b,d){b=b||1;for(var f=0;a;a=a[d])if(a.nodeType===1&&++f===b)break;return a},sibling:function(a,b){for(var d=[];a;a=a.nextSibling)a.nodeType===1&&a!==b&&d.push(a);return d}});var Ja=/ jQuery\d+="(?:\d+|null)"/g,V=/^\s+/,Ka=/(<([\w:]+)[^>]*?)\/>/g,hb=/^(?:area|br|col|embed|hr|img|input|link|meta|param)$/i,La=/<([\w:]+)/,ib=/<tbody/i,jb=/<|&#?\w+;/,ta=/<script|<object|<embed|<option|<style/i,ua=/checked\s*(?:[^=]|=\s*.checked.)/i,Ma=function(a,b, [...]
-a:b+"></"+d+">"},F={option:[1,"<select multiple='multiple'>","</select>"],legend:[1,"<fieldset>","</fieldset>"],thead:[1,"<table>","</table>"],tr:[2,"<table><tbody>","</tbody></table>"],td:[3,"<table><tbody><tr>","</tr></tbody></table>"],col:[2,"<table><tbody></tbody><colgroup>","</colgroup></table>"],area:[1,"<map>","</map>"],_default:[0,"",""]};F.optgroup=F.option;F.tbody=F.tfoot=F.colgroup=F.caption=F.thead;F.th=F.td;if(!c.support.htmlSerialize)F._default=[1,"div<div>","</div>"];c.fn. [...]
-c(this);d.text(a.call(this,b,d.text()))});if(typeof a!=="object"&&a!==w)return this.empty().append((this[0]&&this[0].ownerDocument||s).createTextNode(a));return c.text(this)},wrapAll:function(a){if(c.isFunction(a))return this.each(function(d){c(this).wrapAll(a.call(this,d))});if(this[0]){var b=c(a,this[0].ownerDocument).eq(0).clone(true);this[0].parentNode&&b.insertBefore(this[0]);b.map(function(){for(var d=this;d.firstChild&&d.firstChild.nodeType===1;)d=d.firstChild;return d}).append(th [...]
-wrapInner:function(a){if(c.isFunction(a))return this.each(function(b){c(this).wrapInner(a.call(this,b))});return this.each(function(){var b=c(this),d=b.contents();d.length?d.wrapAll(a):b.append(a)})},wrap:function(a){return this.each(function(){c(this).wrapAll(a)})},unwrap:function(){return this.parent().each(function(){c.nodeName(this,"body")||c(this).replaceWith(this.childNodes)}).end()},append:function(){return this.domManip(arguments,true,function(a){this.nodeType===1&&this.appendChi [...]
-prepend:function(){return this.domManip(arguments,true,function(a){this.nodeType===1&&this.insertBefore(a,this.firstChild)})},before:function(){if(this[0]&&this[0].parentNode)return this.domManip(arguments,false,function(b){this.parentNode.insertBefore(b,this)});else if(arguments.length){var a=c(arguments[0]);a.push.apply(a,this.toArray());return this.pushStack(a,"before",arguments)}},after:function(){if(this[0]&&this[0].parentNode)return this.domManip(arguments,false,function(b){this.pa [...]
-this.nextSibling)});else if(arguments.length){var a=this.pushStack(this,"after",arguments);a.push.apply(a,c(arguments[0]).toArray());return a}},remove:function(a,b){for(var d=0,f;(f=this[d])!=null;d++)if(!a||c.filter(a,[f]).length){if(!b&&f.nodeType===1){c.cleanData(f.getElementsByTagName("*"));c.cleanData([f])}f.parentNode&&f.parentNode.removeChild(f)}return this},empty:function(){for(var a=0,b;(b=this[a])!=null;a++)for(b.nodeType===1&&c.cleanData(b.getElementsByTagName("*"));b.firstChi [...]
-return this},clone:function(a){var b=this.map(function(){if(!c.support.noCloneEvent&&!c.isXMLDoc(this)){var d=this.outerHTML,f=this.ownerDocument;if(!d){d=f.createElement("div");d.appendChild(this.cloneNode(true));d=d.innerHTML}return c.clean([d.replace(Ja,"").replace(/=([^="'>\s]+\/)>/g,'="$1">').replace(V,"")],f)[0]}else return this.cloneNode(true)});if(a===true){ra(this,b);ra(this.find("*"),b.find("*"))}return b},html:function(a){if(a===w)return this[0]&&this[0].nodeType===1?this[0].i [...]
-""):null;else if(typeof a==="string"&&!ta.test(a)&&(c.support.leadingWhitespace||!V.test(a))&&!F[(La.exec(a)||["",""])[1].toLowerCase()]){a=a.replace(Ka,Ma);try{for(var b=0,d=this.length;b<d;b++)if(this[b].nodeType===1){c.cleanData(this[b].getElementsByTagName("*"));this[b].innerHTML=a}}catch(f){this.empty().append(a)}}else c.isFunction(a)?this.each(function(e){var j=c(this),i=j.html();j.empty().append(function(){return a.call(this,e,i)})}):this.empty().append(a);return this},replaceWith [...]
-this[0].parentNode){if(c.isFunction(a))return this.each(function(b){var d=c(this),f=d.html();d.replaceWith(a.call(this,b,f))});if(typeof a!=="string")a=c(a).detach();return this.each(function(){var b=this.nextSibling,d=this.parentNode;c(this).remove();b?c(b).before(a):c(d).append(a)})}else return this.pushStack(c(c.isFunction(a)?a():a),"replaceWith",a)},detach:function(a){return this.remove(a,true)},domManip:function(a,b,d){function f(u){return c.nodeName(u,"table")?u.getElementsByTagNam [...]
-u.appendChild(u.ownerDocument.createElement("tbody")):u}var e,j,i=a[0],o=[],k;if(!c.support.checkClone&&arguments.length===3&&typeof i==="string"&&ua.test(i))return this.each(function(){c(this).domManip(a,b,d,true)});if(c.isFunction(i))return this.each(function(u){var z=c(this);a[0]=i.call(this,u,b?z.html():w);z.domManip(a,b,d)});if(this[0]){e=i&&i.parentNode;e=c.support.parentNode&&e&&e.nodeType===11&&e.childNodes.length===this.length?{fragment:e}:sa(a,this,o);k=e.fragment;if(j=k.childN [...]
-1?(k=k.firstChild):k.firstChild){b=b&&c.nodeName(j,"tr");for(var n=0,r=this.length;n<r;n++)d.call(b?f(this[n],j):this[n],n>0||e.cacheable||this.length>1?k.cloneNode(true):k)}o.length&&c.each(o,Qa)}return this}});c.fragments={};c.each({appendTo:"append",prependTo:"prepend",insertBefore:"before",insertAfter:"after",replaceAll:"replaceWith"},function(a,b){c.fn[a]=function(d){var f=[];d=c(d);var e=this.length===1&&this[0].parentNode;if(e&&e.nodeType===11&&e.childNodes.length===1&&d.length=== [...]
-return this}else{e=0;for(var j=d.length;e<j;e++){var i=(e>0?this.clone(true):this).get();c.fn[b].apply(c(d[e]),i);f=f.concat(i)}return this.pushStack(f,a,d.selector)}}});c.extend({clean:function(a,b,d,f){b=b||s;if(typeof b.createElement==="undefined")b=b.ownerDocument||b[0]&&b[0].ownerDocument||s;for(var e=[],j=0,i;(i=a[j])!=null;j++){if(typeof i==="number")i+="";if(i){if(typeof i==="string"&&!jb.test(i))i=b.createTextNode(i);else if(typeof i==="string"){i=i.replace(Ka,Ma);var o=(La.exec [...]
-""])[1].toLowerCase(),k=F[o]||F._default,n=k[0],r=b.createElement("div");for(r.innerHTML=k[1]+i+k[2];n--;)r=r.lastChild;if(!c.support.tbody){n=ib.test(i);o=o==="table"&&!n?r.firstChild&&r.firstChild.childNodes:k[1]==="<table>"&&!n?r.childNodes:[];for(k=o.length-1;k>=0;--k)c.nodeName(o[k],"tbody")&&!o[k].childNodes.length&&o[k].parentNode.removeChild(o[k])}!c.support.leadingWhitespace&&V.test(i)&&r.insertBefore(b.createTextNode(V.exec(i)[0]),r.firstChild);i=r.childNodes}if(i.nodeType)e.pu [...]
-c.merge(e,i)}}if(d)for(j=0;e[j];j++)if(f&&c.nodeName(e[j],"script")&&(!e[j].type||e[j].type.toLowerCase()==="text/javascript"))f.push(e[j].parentNode?e[j].parentNode.removeChild(e[j]):e[j]);else{e[j].nodeType===1&&e.splice.apply(e,[j+1,0].concat(c.makeArray(e[j].getElementsByTagName("script"))));d.appendChild(e[j])}return e},cleanData:function(a){for(var b,d,f=c.cache,e=c.event.special,j=c.support.deleteExpando,i=0,o;(o=a[i])!=null;i++)if(d=o[c.expando]){b=f[d];if(b.events)for(var k in b [...]
-c.event.remove(o,k):Ca(o,k,b.handle);if(j)delete o[c.expando];else o.removeAttribute&&o.removeAttribute(c.expando);delete f[d]}}});var kb=/z-?index|font-?weight|opacity|zoom|line-?height/i,Na=/alpha\([^)]*\)/,Oa=/opacity=([^)]*)/,ha=/float/i,ia=/-([a-z])/ig,lb=/([A-Z])/g,mb=/^-?\d+(?:px)?$/i,nb=/^-?\d/,ob={position:"absolute",visibility:"hidden",display:"block"},pb=["Left","Right"],qb=["Top","Bottom"],rb=s.defaultView&&s.defaultView.getComputedStyle,Pa=c.support.cssFloat?"cssFloat":"styl [...]
-function(a,b){return b.toUpperCase()};c.fn.css=function(a,b){return X(this,a,b,true,function(d,f,e){if(e===w)return c.curCSS(d,f);if(typeof e==="number"&&!kb.test(f))e+="px";c.style(d,f,e)})};c.extend({style:function(a,b,d){if(!a||a.nodeType===3||a.nodeType===8)return w;if((b==="width"||b==="height")&&parseFloat(d)<0)d=w;var f=a.style||a,e=d!==w;if(!c.support.opacity&&b==="opacity"){if(e){f.zoom=1;b=parseInt(d,10)+""==="NaN"?"":"alpha(opacity="+d*100+")";a=f.filter||c.curCSS(a,"filter")| [...]
-Na.test(a)?a.replace(Na,b):b}return f.filter&&f.filter.indexOf("opacity=")>=0?parseFloat(Oa.exec(f.filter)[1])/100+"":""}if(ha.test(b))b=Pa;b=b.replace(ia,ja);if(e)f[b]=d;return f[b]},css:function(a,b,d,f){if(b==="width"||b==="height"){var e,j=b==="width"?pb:qb;function i(){e=b==="width"?a.offsetWidth:a.offsetHeight;f!=="border"&&c.each(j,function(){f||(e-=parseFloat(c.curCSS(a,"padding"+this,true))||0);if(f==="margin")e+=parseFloat(c.curCSS(a,"margin"+this,true))||0;else e-=parseFloat(c [...]
-"border"+this+"Width",true))||0})}a.offsetWidth!==0?i():c.swap(a,ob,i);return Math.max(0,Math.round(e))}return c.curCSS(a,b,d)},curCSS:function(a,b,d){var f,e=a.style;if(!c.support.opacity&&b==="opacity"&&a.currentStyle){f=Oa.test(a.currentStyle.filter||"")?parseFloat(RegExp.$1)/100+"":"";return f===""?"1":f}if(ha.test(b))b=Pa;if(!d&&e&&e[b])f=e[b];else if(rb){if(ha.test(b))b="float";b=b.replace(lb,"-$1").toLowerCase();e=a.ownerDocument.defaultView;if(!e)return null;if(a=e.getComputedSty [...]
-a.getPropertyValue(b);if(b==="opacity"&&f==="")f="1"}else if(a.currentStyle){d=b.replace(ia,ja);f=a.currentStyle[b]||a.currentStyle[d];if(!mb.test(f)&&nb.test(f)){b=e.left;var j=a.runtimeStyle.left;a.runtimeStyle.left=a.currentStyle.left;e.left=d==="fontSize"?"1em":f||0;f=e.pixelLeft+"px";e.left=b;a.runtimeStyle.left=j}}return f},swap:function(a,b,d){var f={};for(var e in b){f[e]=a.style[e];a.style[e]=b[e]}d.call(a);for(e in b)a.style[e]=f[e]}});if(c.expr&&c.expr.filters){c.expr.filters. [...]
-a.offsetWidth,d=a.offsetHeight,f=a.nodeName.toLowerCase()==="tr";return b===0&&d===0&&!f?true:b>0&&d>0&&!f?false:c.curCSS(a,"display")==="none"};c.expr.filters.visible=function(a){return!c.expr.filters.hidden(a)}}var sb=J(),tb=/<script(.|\s)*?\/script>/gi,ub=/select|textarea/i,vb=/color|date|datetime|email|hidden|month|number|password|range|search|tel|text|time|url|week/i,N=/=\?(&|$)/,ka=/\?/,wb=/(\?|&)_=.*?(&|$)/,xb=/^(\w+:)?\/\/([^\/?#]+)/,yb=/%20/g,zb=c.fn.load;c.fn.extend({load:funct [...]
-"string")return zb.call(this,a);else if(!this.length)return this;var f=a.indexOf(" ");if(f>=0){var e=a.slice(f,a.length);a=a.slice(0,f)}f="GET";if(b)if(c.isFunction(b)){d=b;b=null}else if(typeof b==="object"){b=c.param(b,c.ajaxSettings.traditional);f="POST"}var j=this;c.ajax({url:a,type:f,dataType:"html",data:b,complete:function(i,o){if(o==="success"||o==="notmodified")j.html(e?c("<div />").append(i.responseText.replace(tb,"")).find(e):i.responseText);d&&j.each(d,[i.responseText,o,i])}}) [...]
-serialize:function(){return c.param(this.serializeArray())},serializeArray:function(){return this.map(function(){return this.elements?c.makeArray(this.elements):this}).filter(function(){return this.name&&!this.disabled&&(this.checked||ub.test(this.nodeName)||vb.test(this.type))}).map(function(a,b){a=c(this).val();return a==null?null:c.isArray(a)?c.map(a,function(d){return{name:b.name,value:d}}):{name:b.name,value:a}}).get()}});c.each("ajaxStart ajaxStop ajaxComplete ajaxError ajaxSuccess [...]
-function(a,b){c.fn[b]=function(d){return this.bind(b,d)}});c.extend({get:function(a,b,d,f){if(c.isFunction(b)){f=f||d;d=b;b=null}return c.ajax({type:"GET",url:a,data:b,success:d,dataType:f})},getScript:function(a,b){return c.get(a,null,b,"script")},getJSON:function(a,b,d){return c.get(a,b,d,"json")},post:function(a,b,d,f){if(c.isFunction(b)){f=f||d;d=b;b={}}return c.ajax({type:"POST",url:a,data:b,success:d,dataType:f})},ajaxSetup:function(a){c.extend(c.ajaxSettings,a)},ajaxSettings:{url: [...]
-global:true,type:"GET",contentType:"application/x-www-form-urlencoded",processData:true,async:true,xhr:A.XMLHttpRequest&&(A.location.protocol!=="file:"||!A.ActiveXObject)?function(){return new A.XMLHttpRequest}:function(){try{return new A.ActiveXObject("Microsoft.XMLHTTP")}catch(a){}},accepts:{xml:"application/xml, text/xml",html:"text/html",script:"text/javascript, application/javascript",json:"application/json, text/javascript",text:"text/plain",_default:"*/*"}},lastModified:{},etag:{} [...]
-e.success.call(k,o,i,x);e.global&&f("ajaxSuccess",[x,e])}function d(){e.complete&&e.complete.call(k,x,i);e.global&&f("ajaxComplete",[x,e]);e.global&&!--c.active&&c.event.trigger("ajaxStop")}function f(q,p){(e.context?c(e.context):c.event).trigger(q,p)}var e=c.extend(true,{},c.ajaxSettings,a),j,i,o,k=a&&a.context||e,n=e.type.toUpperCase();if(e.data&&e.processData&&typeof e.data!=="string")e.data=c.param(e.data,e.traditional);if(e.dataType==="jsonp"){if(n==="GET")N.test(e.url)||(e.url+=(ka [...]
-"&":"?")+(e.jsonp||"callback")+"=?");else if(!e.data||!N.test(e.data))e.data=(e.data?e.data+"&":"")+(e.jsonp||"callback")+"=?";e.dataType="json"}if(e.dataType==="json"&&(e.data&&N.test(e.data)||N.test(e.url))){j=e.jsonpCallback||"jsonp"+sb++;if(e.data)e.data=(e.data+"").replace(N,"="+j+"$1");e.url=e.url.replace(N,"="+j+"$1");e.dataType="script";A[j]=A[j]||function(q){o=q;b();d();A[j]=w;try{delete A[j]}catch(p){}z&&z.removeChild(C)}}if(e.dataType==="script"&&e.cache===null)e.cache=false;i [...]
-false&&n==="GET"){var r=J(),u=e.url.replace(wb,"$1_="+r+"$2");e.url=u+(u===e.url?(ka.test(e.url)?"&":"?")+"_="+r:"")}if(e.data&&n==="GET")e.url+=(ka.test(e.url)?"&":"?")+e.data;e.global&&!c.active++&&c.event.trigger("ajaxStart");r=(r=xb.exec(e.url))&&(r[1]&&r[1]!==location.protocol||r[2]!==location.host);if(e.dataType==="script"&&n==="GET"&&r){var z=s.getElementsByTagName("head")[0]||s.documentElement,C=s.createElement("script");C.src=e.url;if(e.scriptCharset)C.charset=e.scriptCharset;if [...]
-false;C.onload=C.onreadystatechange=function(){if(!B&&(!this.readyState||this.readyState==="loaded"||this.readyState==="complete")){B=true;b();d();C.onload=C.onreadystatechange=null;z&&C.parentNode&&z.removeChild(C)}}}z.insertBefore(C,z.firstChild);return w}var E=false,x=e.xhr();if(x){e.username?x.open(n,e.url,e.async,e.username,e.password):x.open(n,e.url,e.async);try{if(e.data||a&&a.contentType)x.setRequestHeader("Content-Type",e.contentType);if(e.ifModified){c.lastModified[e.url]&&x.se [...]
-c.lastModified[e.url]);c.etag[e.url]&&x.setRequestHeader("If-None-Match",c.etag[e.url])}r||x.setRequestHeader("X-Requested-With","XMLHttpRequest");x.setRequestHeader("Accept",e.dataType&&e.accepts[e.dataType]?e.accepts[e.dataType]+", */*":e.accepts._default)}catch(ga){}if(e.beforeSend&&e.beforeSend.call(k,x,e)===false){e.global&&!--c.active&&c.event.trigger("ajaxStop");x.abort();return false}e.global&&f("ajaxSend",[x,e]);var g=x.onreadystatechange=function(q){if(!x||x.readyState===0||q== [...]
-d();E=true;if(x)x.onreadystatechange=c.noop}else if(!E&&x&&(x.readyState===4||q==="timeout")){E=true;x.onreadystatechange=c.noop;i=q==="timeout"?"timeout":!c.httpSuccess(x)?"error":e.ifModified&&c.httpNotModified(x,e.url)?"notmodified":"success";var p;if(i==="success")try{o=c.httpData(x,e.dataType,e)}catch(v){i="parsererror";p=v}if(i==="success"||i==="notmodified")j||b();else c.handleError(e,x,i,p);d();q==="timeout"&&x.abort();if(e.async)x=null}};try{var h=x.abort;x.abort=function(){x&&h [...]
-g("abort")}}catch(l){}e.async&&e.timeout>0&&setTimeout(function(){x&&!E&&g("timeout")},e.timeout);try{x.send(n==="POST"||n==="PUT"||n==="DELETE"?e.data:null)}catch(m){c.handleError(e,x,null,m);d()}e.async||g();return x}},handleError:function(a,b,d,f){if(a.error)a.error.call(a.context||a,b,d,f);if(a.global)(a.context?c(a.context):c.event).trigger("ajaxError",[b,a,f])},active:0,httpSuccess:function(a){try{return!a.status&&location.protocol==="file:"||a.status>=200&&a.status<300||a.status== [...]
-1223||a.status===0}catch(b){}return false},httpNotModified:function(a,b){var d=a.getResponseHeader("Last-Modified"),f=a.getResponseHeader("Etag");if(d)c.lastModified[b]=d;if(f)c.etag[b]=f;return a.status===304||a.status===0},httpData:function(a,b,d){var f=a.getResponseHeader("content-type")||"",e=b==="xml"||!b&&f.indexOf("xml")>=0;a=e?a.responseXML:a.responseText;e&&a.documentElement.nodeName==="parsererror"&&c.error("parsererror");if(d&&d.dataFilter)a=d.dataFilter(a,b);if(typeof a==="st [...]
-"json"||!b&&f.indexOf("json")>=0)a=c.parseJSON(a);else if(b==="script"||!b&&f.indexOf("javascript")>=0)c.globalEval(a);return a},param:function(a,b){function d(i,o){if(c.isArray(o))c.each(o,function(k,n){b||/\[\]$/.test(i)?f(i,n):d(i+"["+(typeof n==="object"||c.isArray(n)?k:"")+"]",n)});else!b&&o!=null&&typeof o==="object"?c.each(o,function(k,n){d(i+"["+k+"]",n)}):f(i,o)}function f(i,o){o=c.isFunction(o)?o():o;e[e.length]=encodeURIComponent(i)+"="+encodeURIComponent(o)}var e=[];if(b===w) [...]
-if(c.isArray(a)||a.jquery)c.each(a,function(){f(this.name,this.value)});else for(var j in a)d(j,a[j]);return e.join("&").replace(yb,"+")}});var la={},Ab=/toggle|show|hide/,Bb=/^([+-]=)?([\d+-.]+)(.*)$/,W,va=[["height","marginTop","marginBottom","paddingTop","paddingBottom"],["width","marginLeft","marginRight","paddingLeft","paddingRight"],["opacity"]];c.fn.extend({show:function(a,b){if(a||a===0)return this.animate(K("show",3),a,b);else{a=0;for(b=this.length;a<b;a++){var d=c.data(this[a], [...]
-this[a].style.display=d||"";if(c.css(this[a],"display")==="none"){d=this[a].nodeName;var f;if(la[d])f=la[d];else{var e=c("<"+d+" />").appendTo("body");f=e.css("display");if(f==="none")f="block";e.remove();la[d]=f}c.data(this[a],"olddisplay",f)}}a=0;for(b=this.length;a<b;a++)this[a].style.display=c.data(this[a],"olddisplay")||"";return this}},hide:function(a,b){if(a||a===0)return this.animate(K("hide",3),a,b);else{a=0;for(b=this.length;a<b;a++){var d=c.data(this[a],"olddisplay");!d&&d!==" [...]
-"olddisplay",c.css(this[a],"display"))}a=0;for(b=this.length;a<b;a++)this[a].style.display="none";return this}},_toggle:c.fn.toggle,toggle:function(a,b){var d=typeof a==="boolean";if(c.isFunction(a)&&c.isFunction(b))this._toggle.apply(this,arguments);else a==null||d?this.each(function(){var f=d?a:c(this).is(":hidden");c(this)[f?"show":"hide"]()}):this.animate(K("toggle",3),a,b);return this},fadeTo:function(a,b,d){return this.filter(":hidden").css("opacity",0).show().end().animate({opacit [...]
-animate:function(a,b,d,f){var e=c.speed(b,d,f);if(c.isEmptyObject(a))return this.each(e.complete);return this[e.queue===false?"each":"queue"](function(){var j=c.extend({},e),i,o=this.nodeType===1&&c(this).is(":hidden"),k=this;for(i in a){var n=i.replace(ia,ja);if(i!==n){a[n]=a[i];delete a[i];i=n}if(a[i]==="hide"&&o||a[i]==="show"&&!o)return j.complete.call(this);if((i==="height"||i==="width")&&this.style){j.display=c.css(this,"display");j.overflow=this.style.overflow}if(c.isArray(a[i])){ [...]
-j.specialEasing||{})[i]=a[i][1];a[i]=a[i][0]}}if(j.overflow!=null)this.style.overflow="hidden";j.curAnim=c.extend({},a);c.each(a,function(r,u){var z=new c.fx(k,j,r);if(Ab.test(u))z[u==="toggle"?o?"show":"hide":u](a);else{var C=Bb.exec(u),B=z.cur(true)||0;if(C){u=parseFloat(C[2]);var E=C[3]||"px";if(E!=="px"){k.style[r]=(u||1)+E;B=(u||1)/z.cur(true)*B;k.style[r]=B+E}if(C[1])u=(C[1]==="-="?-1:1)*u+B;z.custom(B,u,E)}else z.custom(B,u,"")}});return true})},stop:function(a,b){var d=c.timers;a [...]
-this.each(function(){for(var f=d.length-1;f>=0;f--)if(d[f].elem===this){b&&d[f](true);d.splice(f,1)}});b||this.dequeue();return this}});c.each({slideDown:K("show",1),slideUp:K("hide",1),slideToggle:K("toggle",1),fadeIn:{opacity:"show"},fadeOut:{opacity:"hide"}},function(a,b){c.fn[a]=function(d,f){return this.animate(b,d,f)}});c.extend({speed:function(a,b,d){var f=a&&typeof a==="object"?a:{complete:d||!d&&b||c.isFunction(a)&&a,duration:a,easing:d&&b||b&&!c.isFunction(b)&&b};f.duration=c.f [...]
-"number"?f.duration:c.fx.speeds[f.duration]||c.fx.speeds._default;f.old=f.complete;f.complete=function(){f.queue!==false&&c(this).dequeue();c.isFunction(f.old)&&f.old.call(this)};return f},easing:{linear:function(a,b,d,f){return d+f*a},swing:function(a,b,d,f){return(-Math.cos(a*Math.PI)/2+0.5)*f+d}},timers:[],fx:function(a,b,d){this.options=b;this.elem=a;this.prop=d;if(!b.orig)b.orig={}}});c.fx.prototype={update:function(){this.options.step&&this.options.step.call(this.elem,this.now,this [...]
-c.fx.step._default)(this);if((this.prop==="height"||this.prop==="width")&&this.elem.style)this.elem.style.display="block"},cur:function(a){if(this.elem[this.prop]!=null&&(!this.elem.style||this.elem.style[this.prop]==null))return this.elem[this.prop];return(a=parseFloat(c.css(this.elem,this.prop,a)))&&a>-10000?a:parseFloat(c.curCSS(this.elem,this.prop))||0},custom:function(a,b,d){function f(j){return e.step(j)}this.startTime=J();this.start=a;this.end=b;this.unit=d||this.unit||"px";this.n [...]
-this.pos=this.state=0;var e=this;f.elem=this.elem;if(f()&&c.timers.push(f)&&!W)W=setInterval(c.fx.tick,13)},show:function(){this.options.orig[this.prop]=c.style(this.elem,this.prop);this.options.show=true;this.custom(this.prop==="width"||this.prop==="height"?1:0,this.cur());c(this.elem).show()},hide:function(){this.options.orig[this.prop]=c.style(this.elem,this.prop);this.options.hide=true;this.custom(this.cur(),0)},step:function(a){var b=J(),d=true;if(a||b>=this.options.duration+this.st [...]
-this.end;this.pos=this.state=1;this.update();this.options.curAnim[this.prop]=true;for(var f in this.options.curAnim)if(this.options.curAnim[f]!==true)d=false;if(d){if(this.options.display!=null){this.elem.style.overflow=this.options.overflow;a=c.data(this.elem,"olddisplay");this.elem.style.display=a?a:this.options.display;if(c.css(this.elem,"display")==="none")this.elem.style.display="block"}this.options.hide&&c(this.elem).hide();if(this.options.hide||this.options.show)for(var e in this. [...]
-e,this.options.orig[e]);this.options.complete.call(this.elem)}return false}else{e=b-this.startTime;this.state=e/this.options.duration;a=this.options.easing||(c.easing.swing?"swing":"linear");this.pos=c.easing[this.options.specialEasing&&this.options.specialEasing[this.prop]||a](this.state,e,0,1,this.options.duration);this.now=this.start+(this.end-this.start)*this.pos;this.update()}return true}};c.extend(c.fx,{tick:function(){for(var a=c.timers,b=0;b<a.length;b++)a[b]()||a.splice(b--,1);a [...]
-c.fx.stop()},stop:function(){clearInterval(W);W=null},speeds:{slow:600,fast:200,_default:400},step:{opacity:function(a){c.style(a.elem,"opacity",a.now)},_default:function(a){if(a.elem.style&&a.elem.style[a.prop]!=null)a.elem.style[a.prop]=(a.prop==="width"||a.prop==="height"?Math.max(0,a.now):a.now)+a.unit;else a.elem[a.prop]=a.now}}});if(c.expr&&c.expr.filters)c.expr.filters.animated=function(a){return c.grep(c.timers,function(b){return a===b.elem}).length};c.fn.offset="getBoundingClien [...]
-function(a){var b=this[0];if(a)return this.each(function(e){c.offset.setOffset(this,a,e)});if(!b||!b.ownerDocument)return null;if(b===b.ownerDocument.body)return c.offset.bodyOffset(b);var d=b.getBoundingClientRect(),f=b.ownerDocument;b=f.body;f=f.documentElement;return{top:d.top+(self.pageYOffset||c.support.boxModel&&f.scrollTop||b.scrollTop)-(f.clientTop||b.clientTop||0),left:d.left+(self.pageXOffset||c.support.boxModel&&f.scrollLeft||b.scrollLeft)-(f.clientLeft||b.clientLeft||0)}}:fun [...]
-this[0];if(a)return this.each(function(r){c.offset.setOffset(this,a,r)});if(!b||!b.ownerDocument)return null;if(b===b.ownerDocument.body)return c.offset.bodyOffset(b);c.offset.initialize();var d=b.offsetParent,f=b,e=b.ownerDocument,j,i=e.documentElement,o=e.body;f=(e=e.defaultView)?e.getComputedStyle(b,null):b.currentStyle;for(var k=b.offsetTop,n=b.offsetLeft;(b=b.parentNode)&&b!==o&&b!==i;){if(c.offset.supportsFixedPosition&&f.position==="fixed")break;j=e?e.getComputedStyle(b,null):b.cu [...]
-k-=b.scrollTop;n-=b.scrollLeft;if(b===d){k+=b.offsetTop;n+=b.offsetLeft;if(c.offset.doesNotAddBorder&&!(c.offset.doesAddBorderForTableAndCells&&/^t(able|d|h)$/i.test(b.nodeName))){k+=parseFloat(j.borderTopWidth)||0;n+=parseFloat(j.borderLeftWidth)||0}f=d;d=b.offsetParent}if(c.offset.subtractsBorderForOverflowNotVisible&&j.overflow!=="visible"){k+=parseFloat(j.borderTopWidth)||0;n+=parseFloat(j.borderLeftWidth)||0}f=j}if(f.position==="relative"||f.position==="static"){k+=o.offsetTop;n+=o. [...]
-f.position==="fixed"){k+=Math.max(i.scrollTop,o.scrollTop);n+=Math.max(i.scrollLeft,o.scrollLeft)}return{top:k,left:n}};c.offset={initialize:function(){var a=s.body,b=s.createElement("div"),d,f,e,j=parseFloat(c.curCSS(a,"marginTop",true))||0;c.extend(b.style,{position:"absolute",top:0,left:0,margin:0,border:0,width:"1px",height:"1px",visibility:"hidden"});b.innerHTML="<div style='position:absolute;top:0;left:0;margin:0;border:5px solid #000;padding:0;width:1px;height:1px;'><div></div></d [...]
-a.insertBefore(b,a.firstChild);d=b.firstChild;f=d.firstChild;e=d.nextSibling.firstChild.firstChild;this.doesNotAddBorder=f.offsetTop!==5;this.doesAddBorderForTableAndCells=e.offsetTop===5;f.style.position="fixed";f.style.top="20px";this.supportsFixedPosition=f.offsetTop===20||f.offsetTop===15;f.style.position=f.style.top="";d.style.overflow="hidden";d.style.position="relative";this.subtractsBorderForOverflowNotVisible=f.offsetTop===-5;this.doesNotIncludeMarginInBodyOffset=a.offsetTop!==j [...]
-c.offset.initialize=c.noop},bodyOffset:function(a){var b=a.offsetTop,d=a.offsetLeft;c.offset.initialize();if(c.offset.doesNotIncludeMarginInBodyOffset){b+=parseFloat(c.curCSS(a,"marginTop",true))||0;d+=parseFloat(c.curCSS(a,"marginLeft",true))||0}return{top:b,left:d}},setOffset:function(a,b,d){if(/static/.test(c.curCSS(a,"position")))a.style.position="relative";var f=c(a),e=f.offset(),j=parseInt(c.curCSS(a,"top",true),10)||0,i=parseInt(c.curCSS(a,"left",true),10)||0;if(c.isFunction(b))b= [...]
-d,e);d={top:b.top-e.top+j,left:b.left-e.left+i};"using"in b?b.using.call(a,d):f.css(d)}};c.fn.extend({position:function(){if(!this[0])return null;var a=this[0],b=this.offsetParent(),d=this.offset(),f=/^body|html$/i.test(b[0].nodeName)?{top:0,left:0}:b.offset();d.top-=parseFloat(c.curCSS(a,"marginTop",true))||0;d.left-=parseFloat(c.curCSS(a,"marginLeft",true))||0;f.top+=parseFloat(c.curCSS(b[0],"borderTopWidth",true))||0;f.left+=parseFloat(c.curCSS(b[0],"borderLeftWidth",true))||0;return{ [...]
-f.top,left:d.left-f.left}},offsetParent:function(){return this.map(function(){for(var a=this.offsetParent||s.body;a&&!/^body|html$/i.test(a.nodeName)&&c.css(a,"position")==="static";)a=a.offsetParent;return a})}});c.each(["Left","Top"],function(a,b){var d="scroll"+b;c.fn[d]=function(f){var e=this[0],j;if(!e)return null;if(f!==w)return this.each(function(){if(j=wa(this))j.scrollTo(!a?f:c(j).scrollLeft(),a?f:c(j).scrollTop());else this[d]=f});else return(j=wa(e))?"pageXOffset"in j?j[a?"pag [...]
-"pageXOffset"]:c.support.boxModel&&j.document.documentElement[d]||j.document.body[d]:e[d]}});c.each(["Height","Width"],function(a,b){var d=b.toLowerCase();c.fn["inner"+b]=function(){return this[0]?c.css(this[0],d,false,"padding"):null};c.fn["outer"+b]=function(f){return this[0]?c.css(this[0],d,false,f?"margin":"border"):null};c.fn[d]=function(f){var e=this[0];if(!e)return f==null?null:this;if(c.isFunction(f))return this.each(function(j){var i=c(this);i[d](f.call(this,j,i[d]()))});return" [...]
-e&&e.document?e.document.compatMode==="CSS1Compat"&&e.document.documentElement["client"+b]||e.document.body["client"+b]:e.nodeType===9?Math.max(e.documentElement["client"+b],e.body["scroll"+b],e.documentElement["scroll"+b],e.body["offset"+b],e.documentElement["offset"+b]):f===w?c.css(e,d):this.css(d,typeof f==="string"?f:f+"px")}});A.jQuery=A.$=c})(window);
+/*! jQuery v1.8.2 jquery.com | jquery.org/license */
+(function(a,b){function G(a){var b=F[a]={};return p.each(a.split(s),function(a,c){b[c]=!0}),b}function J(a,c,d){if(d===b&&a.nodeType===1){var e="data-"+c.replace(I,"-$1").toLowerCase();d=a.getAttribute(e);if(typeof d=="string"){try{d=d==="true"?!0:d==="false"?!1:d==="null"?null:+d+""===d?+d:H.test(d)?p.parseJSON(d):d}catch(f){}p.data(a,c,d)}else d=b}return d}function K(a){var b;for(b in a){if(b==="data"&&p.isEmptyObject(a[b]))continue;if(b!=="toJSON")return!1}return!0}function ba(){retur [...]
\ No newline at end of file
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/jquery.layout.js b/src/compiler/scala/tools/nsc/doc/html/resource/lib/jquery.layout.js
index 4ab9976..4dd4867 100644
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/jquery.layout.js
+++ b/src/compiler/scala/tools/nsc/doc/html/resource/lib/jquery.layout.js
@@ -1,18 +1,5486 @@
-/*
- * jquery.layout 1.3.0 - Release Candidate 29.3
+/**
+ * @preserve jquery.layout 1.3.0 - Release Candidate 30.62
+ * $Date: 2012-08-04 08:00:00 (Thu, 23 Aug 2012) $
+ * $Rev: 303006 $
*
- * Copyright (c) 2010
+ * Copyright (c) 2012
* Fabrizio Balliano (http://www.fabrizioballiano.net)
* Kevin Dalman (http://allpro.net)
*
* Dual licensed under the GPL (http://www.gnu.org/licenses/gpl.html)
* and MIT (http://www.opensource.org/licenses/mit-license.php) licenses.
*
+ * Changelog: http://layout.jquery-dev.net/changelog.cfm#1.3.0.rc30.62
+ * NOTE: This is a short-term release to patch a couple of bugs.
+ * These bugs are listed as officially fixed in RC30.7, which will be released shortly.
+ *
* Docs: http://layout.jquery-dev.net/documentation.html
* Tips: http://layout.jquery-dev.net/tips.html
* Help: http://groups.google.com/group/jquery-ui-layout
+ */
+
+/* JavaDoc Info: http://code.google.com/closure/compiler/docs/js-for-compiler.html
+ * {!Object} non-nullable type (never NULL)
+ * {?string} nullable type (sometimes NULL) - default for {Object}
+ * {number=} optional parameter
+ * {*} ALL types
+ */
+
+// NOTE: For best readability, view with a fixed-width font and tabs equal to 4-chars
+
+;(function ($) {
+
+// alias Math methods - used a lot!
+var min = Math.min
+, max = Math.max
+, round = Math.floor
+
+, isStr = function (v) { return $.type(v) === "string"; }
+
+, runPluginCallbacks = function (Instance, a_fn) {
+ if ($.isArray(a_fn))
+ for (var i=0, c=a_fn.length; i<c; i++) {
+ var fn = a_fn[i];
+ try {
+ if (isStr(fn)) // 'name' of a function
+ fn = eval(fn);
+ if ($.isFunction(fn))
+ fn( Instance );
+ } catch (ex) {}
+ }
+ }
+
+;
+
+
+/*
+ * GENERIC $.layout METHODS - used by all layouts
+ */
+$.layout = {
+
+ version: "1.3.rc30.62"
+, revision: 0.033006 // 1.3.0 final = 1.0300 - major(n+).minor(nn)+patch(nn+)
+
+ // can update code here if $.browser is phased out
+, browser: {
+ mozilla: !!$.browser.mozilla
+ , webkit: !!$.browser.webkit || !!$.browser.safari // webkit = jQ 1.4
+ , msie: !!$.browser.msie
+ , isIE6: $.browser.msie && $.browser.version == 6
+ , boxModel: $.support.boxModel !== false || !$.browser.msie // ONLY IE reverts to old box-model - update for older jQ onReady
+ , version: $.browser.version // not used in Layout core, but may be used by plugins
+ }
+
+ // *PREDEFINED* EFFECTS & DEFAULTS
+ // MUST list effect here - OR MUST set an fxSettings option (can be an empty hash: {})
+, effects: {
+
+ // Pane Open/Close Animations
+ slide: {
+ all: { duration: "fast" } // eg: duration: 1000, easing: "easeOutBounce"
+ , north: { direction: "up" }
+ , south: { direction: "down" }
+ , east: { direction: "right"}
+ , west: { direction: "left" }
+ }
+ , drop: {
+ all: { duration: "slow" }
+ , north: { direction: "up" }
+ , south: { direction: "down" }
+ , east: { direction: "right"}
+ , west: { direction: "left" }
+ }
+ , scale: {
+ all: { duration: "fast" }
+ }
+ // these are not recommended, but can be used
+ , blind: {}
+ , clip: {}
+ , explode: {}
+ , fade: {}
+ , fold: {}
+ , puff: {}
+
+ // Pane Resize Animations
+ , size: {
+ all: { easing: "swing" }
+ }
+ }
+
+ // INTERNAL CONFIG DATA - DO NOT CHANGE THIS!
+, config: {
+ optionRootKeys: "effects,panes,north,south,west,east,center".split(",")
+ , allPanes: "north,south,west,east,center".split(",")
+ , borderPanes: "north,south,west,east".split(",")
+ , oppositeEdge: {
+ north: "south"
+ , south: "north"
+ , east: "west"
+ , west: "east"
+ }
+ // offscreen data
+ , offscreenCSS: { left: "-99999px", right: "auto" } // used by hide/close if useOffscreenClose=true
+ , offscreenReset: "offscreenReset" // key used for data
+ // CSS used in multiple places
+ , hidden: { visibility: "hidden" }
+ , visible: { visibility: "visible" }
+ // layout element settings
+ , resizers: {
+ cssReq: {
+ position: "absolute"
+ , padding: 0
+ , margin: 0
+ , fontSize: "1px"
+ , textAlign: "left" // to counter-act "center" alignment!
+ , overflow: "hidden" // prevent toggler-button from overflowing
+ // SEE $.layout.defaults.zIndexes.resizer_normal
+ }
+ , cssDemo: { // DEMO CSS - applied if: options.PANE.applyDemoStyles=true
+ background: "#DDD"
+ , border: "none"
+ }
+ }
+ , togglers: {
+ cssReq: {
+ position: "absolute"
+ , display: "block"
+ , padding: 0
+ , margin: 0
+ , overflow: "hidden"
+ , textAlign: "center"
+ , fontSize: "1px"
+ , cursor: "pointer"
+ , zIndex: 1
+ }
+ , cssDemo: { // DEMO CSS - applied if: options.PANE.applyDemoStyles=true
+ background: "#AAA"
+ }
+ }
+ , content: {
+ cssReq: {
+ position: "relative" /* contain floated or positioned elements */
+ }
+ , cssDemo: { // DEMO CSS - applied if: options.PANE.applyDemoStyles=true
+ overflow: "auto"
+ , padding: "10px"
+ }
+ , cssDemoPane: { // DEMO CSS - REMOVE scrolling from 'pane' when it has a content-div
+ overflow: "hidden"
+ , padding: 0
+ }
+ }
+ , panes: { // defaults for ALL panes - overridden by 'per-pane settings' below
+ cssReq: {
+ position: "absolute"
+ , margin: 0
+ // $.layout.defaults.zIndexes.pane_normal
+ }
+ , cssDemo: { // DEMO CSS - applied if: options.PANE.applyDemoStyles=true
+ padding: "10px"
+ , background: "#FFF"
+ , border: "1px solid #BBB"
+ , overflow: "auto"
+ }
+ }
+ , north: {
+ side: "Top"
+ , sizeType: "Height"
+ , dir: "horz"
+ , cssReq: {
+ top: 0
+ , bottom: "auto"
+ , left: 0
+ , right: 0
+ , width: "auto"
+ // height: DYNAMIC
+ }
+ }
+ , south: {
+ side: "Bottom"
+ , sizeType: "Height"
+ , dir: "horz"
+ , cssReq: {
+ top: "auto"
+ , bottom: 0
+ , left: 0
+ , right: 0
+ , width: "auto"
+ // height: DYNAMIC
+ }
+ }
+ , east: {
+ side: "Right"
+ , sizeType: "Width"
+ , dir: "vert"
+ , cssReq: {
+ left: "auto"
+ , right: 0
+ , top: "auto" // DYNAMIC
+ , bottom: "auto" // DYNAMIC
+ , height: "auto"
+ // width: DYNAMIC
+ }
+ }
+ , west: {
+ side: "Left"
+ , sizeType: "Width"
+ , dir: "vert"
+ , cssReq: {
+ left: 0
+ , right: "auto"
+ , top: "auto" // DYNAMIC
+ , bottom: "auto" // DYNAMIC
+ , height: "auto"
+ // width: DYNAMIC
+ }
+ }
+ , center: {
+ dir: "center"
+ , cssReq: {
+ left: "auto" // DYNAMIC
+ , right: "auto" // DYNAMIC
+ , top: "auto" // DYNAMIC
+ , bottom: "auto" // DYNAMIC
+ , height: "auto"
+ , width: "auto"
+ }
+ }
+ }
+
+ // CALLBACK FUNCTION NAMESPACE - used to store reusable callback functions
+, callbacks: {}
+
+, getParentPaneElem: function (el) {
+ // must pass either a container or pane element
+ var $el = $(el)
+ , layout = $el.data("layout") || $el.data("parentLayout");
+ if (layout) {
+ var $cont = layout.container;
+ // see if this container is directly-nested inside an outer-pane
+ if ($cont.data("layoutPane")) return $cont;
+ var $pane = $cont.closest("."+ $.layout.defaults.panes.paneClass);
+ // if a pane was found, return it
+ if ($pane.data("layoutPane")) return $pane;
+ }
+ return null;
+ }
+
+, getParentPaneInstance: function (el) {
+ // must pass either a container or pane element
+ var $pane = $.layout.getParentPaneElem(el);
+ return $pane ? $pane.data("layoutPane") : null;
+ }
+
+, getParentLayoutInstance: function (el) {
+ // must pass either a container or pane element
+ var $pane = $.layout.getParentPaneElem(el);
+ return $pane ? $pane.data("parentLayout") : null;
+ }
+
+, getEventObject: function (evt) {
+ return typeof evt === "object" && evt.stopPropagation ? evt : null;
+ }
+, parsePaneName: function (evt_or_pane) {
+ // getEventObject() automatically calls .stopPropagation(), WHICH MUST BE DONE!
+ var evt = $.layout.getEventObject( evt_or_pane );
+ if (evt) {
+ // ALWAYS stop propagation of events triggered in Layout!
+ evt.stopPropagation();
+ return $(this).data("layoutEdge");
+ }
+ else
+ return evt_or_pane;
+ }
+
+
+ // LAYOUT-PLUGIN REGISTRATION
+ // more plugins can added beyond this default list
+, plugins: {
+ draggable: !!$.fn.draggable // resizing
+ , effects: {
+ core: !!$.effects // animimations (specific effects tested by initOptions)
+ , slide: $.effects && $.effects.slide // default effect
+ }
+ }
+
+// arrays of plugin or other methods to be triggered for events in *each layout* - will be passed 'Instance'
+, onCreate: [] // runs when layout is just starting to be created - right after options are set
+, onLoad: [] // runs after layout container and global events init, but before initPanes is called
+, onReady: [] // runs after initialization *completes* - ie, after initPanes completes successfully
+, onDestroy: [] // runs after layout is destroyed
+, onUnload: [] // runs after layout is destroyed OR when page unloads
+, afterOpen: [] // runs after setAsOpen() completes
+, afterClose: [] // runs after setAsClosed() completes
+
+ /*
+ * GENERIC UTILITY METHODS
+ */
+
+ // calculate and return the scrollbar width, as an integer
+, scrollbarWidth: function () { return window.scrollbarWidth || $.layout.getScrollbarSize('width'); }
+, scrollbarHeight: function () { return window.scrollbarHeight || $.layout.getScrollbarSize('height'); }
+, getScrollbarSize: function (dim) {
+ var $c = $('<div style="position: absolute; top: -10000px; left: -10000px; width: 100px; height: 100px; overflow: scroll;"></div>').appendTo("body");
+ var d = { width: $c.width() - $c[0].clientWidth, height: $c.height() - $c[0].clientHeight };
+ $c.remove();
+ window.scrollbarWidth = d.width;
+ window.scrollbarHeight = d.height;
+ return dim.match(/^(width|height)$/) ? d[dim] : d;
+ }
+
+
+ /**
+ * Returns hash container 'display' and 'visibility'
+ *
+ * @see $.swap() - swaps CSS, runs callback, resets CSS
+ */
+, showInvisibly: function ($E, force) {
+ if ($E && $E.length && (force || $E.css('display') === "none")) { // only if not *already hidden*
+ var s = $E[0].style
+ // save ONLY the 'style' props because that is what we must restore
+ , CSS = { display: s.display || '', visibility: s.visibility || '' };
+ // show element 'invisibly' so can be measured
+ $E.css({ display: "block", visibility: "hidden" });
+ return CSS;
+ }
+ return {};
+ }
+
+ /**
+ * Returns data for setting size of an element (container or a pane).
+ *
+ * @see _create(), onWindowResize() for container, plus others for pane
+ * @return JSON Returns a hash of all dimensions: top, bottom, left, right, outerWidth, innerHeight, etc
+ */
+, getElementDimensions: function ($E) {
+ var
+ d = {} // dimensions hash
+ , x = d.css = {} // CSS hash
+ , i = {} // TEMP insets
+ , b, p // TEMP border, padding
+ , N = $.layout.cssNum
+ , off = $E.offset()
+ ;
+ d.offsetLeft = off.left;
+ d.offsetTop = off.top;
+
+ $.each("Left,Right,Top,Bottom".split(","), function (idx, e) { // e = edge
+ b = x["border" + e] = $.layout.borderWidth($E, e);
+ p = x["padding"+ e] = $.layout.cssNum($E, "padding"+e);
+ i[e] = b + p; // total offset of content from outer side
+ d["inset"+ e] = p; // eg: insetLeft = paddingLeft
+ });
+
+ d.offsetWidth = $E.innerWidth(); // offsetWidth is used in calc when doing manual resize
+ d.offsetHeight = $E.innerHeight(); // ditto
+ d.outerWidth = $E.outerWidth();
+ d.outerHeight = $E.outerHeight();
+ d.innerWidth = max(0, d.outerWidth - i.Left - i.Right);
+ d.innerHeight = max(0, d.outerHeight - i.Top - i.Bottom);
+
+ x.width = $E.width();
+ x.height = $E.height();
+ x.top = N($E,"top",true);
+ x.bottom = N($E,"bottom",true);
+ x.left = N($E,"left",true);
+ x.right = N($E,"right",true);
+
+ //d.visible = $E.is(":visible");// && x.width > 0 && x.height > 0;
+
+ return d;
+ }
+
+, getElementCSS: function ($E, list) {
+ var
+ CSS = {}
+ , style = $E[0].style
+ , props = list.split(",")
+ , sides = "Top,Bottom,Left,Right".split(",")
+ , attrs = "Color,Style,Width".split(",")
+ , p, s, a, i, j, k
+ ;
+ for (i=0; i < props.length; i++) {
+ p = props[i];
+ if (p.match(/(border|padding|margin)$/))
+ for (j=0; j < 4; j++) {
+ s = sides[j];
+ if (p === "border")
+ for (k=0; k < 3; k++) {
+ a = attrs[k];
+ CSS[p+s+a] = style[p+s+a];
+ }
+ else
+ CSS[p+s] = style[p+s];
+ }
+ else
+ CSS[p] = style[p];
+ };
+ return CSS
+ }
+
+ /**
+ * Return the innerWidth for the current browser/doctype
+ *
+ * @see initPanes(), sizeMidPanes(), initHandles(), sizeHandles()
+ * @param {Array.<Object>} $E Must pass a jQuery object - first element is processed
+ * @param {number=} outerWidth (optional) Can pass a width, allowing calculations BEFORE element is resized
+ * @return {number} Returns the innerWidth of the elem by subtracting padding and borders
+ */
+, cssWidth: function ($E, outerWidth) {
+ // a 'calculated' outerHeight can be passed so borders and/or padding are removed if needed
+ if (outerWidth <= 0) return 0;
+
+ if (!$.layout.browser.boxModel) return outerWidth;
+
+ // strip border and padding from outerWidth to get CSS Width
+ var b = $.layout.borderWidth
+ , n = $.layout.cssNum
+ , W = outerWidth
+ - b($E, "Left")
+ - b($E, "Right")
+ - n($E, "paddingLeft")
+ - n($E, "paddingRight");
+
+ return max(0,W);
+ }
+
+ /**
+ * Return the innerHeight for the current browser/doctype
+ *
+ * @see initPanes(), sizeMidPanes(), initHandles(), sizeHandles()
+ * @param {Array.<Object>} $E Must pass a jQuery object - first element is processed
+ * @param {number=} outerHeight (optional) Can pass a width, allowing calculations BEFORE element is resized
+ * @return {number} Returns the innerHeight of the elem by subtracting padding and borders
+ */
+, cssHeight: function ($E, outerHeight) {
+ // a 'calculated' outerHeight can be passed so borders and/or padding are removed if needed
+ if (outerHeight <= 0) return 0;
+
+ if (!$.layout.browser.boxModel) return outerHeight;
+
+ // strip border and padding from outerHeight to get CSS Height
+ var b = $.layout.borderWidth
+ , n = $.layout.cssNum
+ , H = outerHeight
+ - b($E, "Top")
+ - b($E, "Bottom")
+ - n($E, "paddingTop")
+ - n($E, "paddingBottom");
+
+ return max(0,H);
+ }
+
+ /**
+ * Returns the 'current CSS numeric value' for a CSS property - 0 if property does not exist
+ *
+ * @see Called by many methods
+ * @param {Array.<Object>} $E Must pass a jQuery object - first element is processed
+ * @param {string} prop The name of the CSS property, eg: top, width, etc.
+ * @param {boolean=} [allowAuto=false] true = return 'auto' if that is value; false = return 0
+ * @return {(string|number)} Usually used to get an integer value for position (top, left) or size (height, width)
+ */
+, cssNum: function ($E, prop, allowAuto) {
+ if (!$E.jquery) $E = $($E);
+ var CSS = $.layout.showInvisibly($E)
+ , p = $.css($E[0], prop, true)
+ , v = allowAuto && p=="auto" ? p : (parseInt(p, 10) || 0);
+ $E.css( CSS ); // RESET
+ return v;
+ }
+
+, borderWidth: function (el, side) {
+ if (el.jquery) el = el[0];
+ var b = "border"+ side.substr(0,1).toUpperCase() + side.substr(1); // left => Left
+ return $.css(el, b+"Style", true) === "none" ? 0 : (parseInt($.css(el, b+"Width", true), 10) || 0);
+ }
+
+ /**
+ * Mouse-tracking utility - FUTURE REFERENCE
+ *
+ * init: if (!window.mouse) {
+ * window.mouse = { x: 0, y: 0 };
+ * $(document).mousemove( $.layout.trackMouse );
+ * }
+ *
+ * @param {Object} evt
+ *
+, trackMouse: function (evt) {
+ window.mouse = { x: evt.clientX, y: evt.clientY };
+ }
+ */
+
+ /**
+ * SUBROUTINE for preventPrematureSlideClose option
+ *
+ * @param {Object} evt
+ * @param {Object=} el
+ */
+, isMouseOverElem: function (evt, el) {
+ var
+ $E = $(el || this)
+ , d = $E.offset()
+ , T = d.top
+ , L = d.left
+ , R = L + $E.outerWidth()
+ , B = T + $E.outerHeight()
+ , x = evt.pageX // evt.clientX ?
+ , y = evt.pageY // evt.clientY ?
+ ;
+ // if X & Y are < 0, probably means is over an open SELECT
+ return ($.layout.browser.msie && x < 0 && y < 0) || ((x >= L && x <= R) && (y >= T && y <= B));
+ }
+
+ /**
+ * Message/Logging Utility
+ *
+ * @example $.layout.msg("My message"); // log text
+ * @example $.layout.msg("My message", true); // alert text
+ * @example $.layout.msg({ foo: "bar" }, "Title"); // log hash-data, with custom title
+ * @example $.layout.msg({ foo: "bar" }, true, "Title", { sort: false }); -OR-
+ * @example $.layout.msg({ foo: "bar" }, "Title", { sort: false, display: true }); // alert hash-data
+ *
+ * @param {(Object|string)} info String message OR Hash/Array
+ * @param {(Boolean|string|Object)=} [popup=false] True means alert-box - can be skipped
+ * @param {(Object|string)=} [debugTitle=""] Title for Hash data - can be skipped
+ * @param {Object=} [debugOpts] Extra options for debug output
+ */
+, msg: function (info, popup, debugTitle, debugOpts) {
+ if ($.isPlainObject(info) && window.debugData) {
+ if (typeof popup === "string") {
+ debugOpts = debugTitle;
+ debugTitle = popup;
+ }
+ else if (typeof debugTitle === "object") {
+ debugOpts = debugTitle;
+ debugTitle = null;
+ }
+ var t = debugTitle || "log( <object> )"
+ , o = $.extend({ sort: false, returnHTML: false, display: false }, debugOpts);
+ if (popup === true || o.display)
+ debugData( info, t, o );
+ else if (window.console)
+ console.log(debugData( info, t, o ));
+ }
+ else if (popup)
+ alert(info);
+ else if (window.console)
+ console.log(info);
+ else {
+ var id = "#layoutLogger"
+ , $l = $(id);
+ if (!$l.length)
+ $l = createLog();
+ $l.children("ul").append('<li style="padding: 4px 10px; margin: 0; border-top: 1px solid #CCC;">'+ info.replace(/\</g,"<").replace(/\>/g,">") +'</li>');
+ }
+
+ function createLog () {
+ var pos = $.support.fixedPosition ? 'fixed' : 'absolute'
+ , $e = $('<div id="layoutLogger" style="position: '+ pos +'; top: 5px; z-index: 999999; max-width: 25%; overflow: hidden; border: 1px solid #000; border-radius: 5px; background: #FBFBFB; box-shadow: 0 2px 10px rgba(0,0,0,0.3);">'
+ + '<div style="font-size: 13px; font-weight: bold; padding: 5px 10px; background: #F6F6F6; border-radius: 5px 5px 0 0; cursor: move;">'
+ + '<span style="float: right; padding-left: 7px; cursor: pointer;" title="Remove Console" onclick="$(this).closest(\'#layoutLogger\').remove()">X</span>Layout console.log</div>'
+ + '<ul style="font-size: 13px; font-weight: none; list-style: none; margin: 0; padding: 0 0 2px;"></ul>'
+ + '</div>'
+ ).appendTo("body");
+ $e.css('left', $(window).width() - $e.outerWidth() - 5)
+ if ($.ui.draggable) $e.draggable({ handle: ':first-child' });
+ return $e;
+ };
+ }
+
+};
+
+// DEFAULT OPTIONS
+$.layout.defaults = {
+/*
+ * LAYOUT & LAYOUT-CONTAINER OPTIONS
+ * - none of these options are applicable to individual panes
+ */
+ name: "" // Not required, but useful for buttons and used for the state-cookie
+, containerSelector: "" // ONLY used when specifying a childOptions - to find container-element that is NOT directly-nested
+, containerClass: "ui-layout-container" // layout-container element
+, scrollToBookmarkOnLoad: true // after creating a layout, scroll to bookmark in URL (.../page.htm#myBookmark)
+, resizeWithWindow: true // bind thisLayout.resizeAll() to the window.resize event
+, resizeWithWindowDelay: 200 // delay calling resizeAll because makes window resizing very jerky
+, resizeWithWindowMaxDelay: 0 // 0 = none - force resize every XX ms while window is being resized
+, onresizeall_start: null // CALLBACK when resizeAll() STARTS - NOT pane-specific
+, onresizeall_end: null // CALLBACK when resizeAll() ENDS - NOT pane-specific
+, onload_start: null // CALLBACK when Layout inits - after options initialized, but before elements
+, onload_end: null // CALLBACK when Layout inits - after EVERYTHING has been initialized
+, onunload_start: null // CALLBACK when Layout is destroyed OR onWindowUnload
+, onunload_end: null // CALLBACK when Layout is destroyed OR onWindowUnload
+, initPanes: true // false = DO NOT initialize the panes onLoad - will init later
+, showErrorMessages: true // enables fatal error messages to warn developers of common errors
+, showDebugMessages: false // display console-and-alert debug msgs - IF this Layout version _has_ debugging code!
+// Changing this zIndex value will cause other zIndex values to automatically change
+, zIndex: null // the PANE zIndex - resizers and masks will be +1
+// DO NOT CHANGE the zIndex values below unless you clearly understand their relationships
+, zIndexes: { // set _default_ z-index values here...
+ pane_normal: 0 // normal z-index for panes
+ , content_mask: 1 // applied to overlays used to mask content INSIDE panes during resizing
+ , resizer_normal: 2 // normal z-index for resizer-bars
+ , pane_sliding: 100 // applied to *BOTH* the pane and its resizer when a pane is 'slid open'
+ , pane_animate: 1000 // applied to the pane when being animated - not applied to the resizer
+ , resizer_drag: 10000 // applied to the CLONED resizer-bar when being 'dragged'
+ }
+, errors: {
+ pane: "pane" // description of "layout pane element" - used only in error messages
+ , selector: "selector" // description of "jQuery-selector" - used only in error messages
+ , addButtonError: "Error Adding Button \n\nInvalid "
+ , containerMissing: "UI Layout Initialization Error\n\nThe specified layout-container does not exist."
+ , centerPaneMissing: "UI Layout Initialization Error\n\nThe center-pane element does not exist.\n\nThe center-pane is a required element."
+ , noContainerHeight: "UI Layout Initialization Warning\n\nThe layout-container \"CONTAINER\" has no height.\n\nTherefore the layout is 0-height and hence 'invisible'!"
+ , callbackError: "UI Layout Callback Error\n\nThe EVENT callback is not a valid function."
+ }
+/*
+ * PANE DEFAULT SETTINGS
+ * - settings under the 'panes' key become the default settings for *all panes*
+ * - ALL pane-options can also be set specifically for each panes, which will override these 'default values'
+ */
+, panes: { // default options for 'all panes' - will be overridden by 'per-pane settings'
+ applyDemoStyles: false // NOTE: renamed from applyDefaultStyles for clarity
+ , closable: true // pane can open & close
+ , resizable: true // when open, pane can be resized
+ , slidable: true // when closed, pane can 'slide open' over other panes - closes on mouse-out
+ , initClosed: false // true = init pane as 'closed'
+ , initHidden: false // true = init pane as 'hidden' - no resizer-bar/spacing
+ // SELECTORS
+ //, paneSelector: "" // MUST be pane-specific - jQuery selector for pane
+ , contentSelector: ".ui-layout-content" // INNER div/element to auto-size so only it scrolls, not the entire pane!
+ , contentIgnoreSelector: ".ui-layout-ignore" // element(s) to 'ignore' when measuring 'content'
+ , findNestedContent: false // true = $P.find(contentSelector), false = $P.children(contentSelector)
+ // GENERIC ROOT-CLASSES - for auto-generated classNames
+ , paneClass: "ui-layout-pane" // Layout Pane
+ , resizerClass: "ui-layout-resizer" // Resizer Bar
+ , togglerClass: "ui-layout-toggler" // Toggler Button
+ , buttonClass: "ui-layout-button" // CUSTOM Buttons - eg: '[ui-layout-button]-toggle/-open/-close/-pin'
+ // ELEMENT SIZE & SPACING
+ //, size: 100 // MUST be pane-specific -initial size of pane
+ , minSize: 0 // when manually resizing a pane
+ , maxSize: 0 // ditto, 0 = no limit
+ , spacing_open: 6 // space between pane and adjacent panes - when pane is 'open'
+ , spacing_closed: 6 // ditto - when pane is 'closed'
+ , togglerLength_open: 50 // Length = WIDTH of toggler button on north/south sides - HEIGHT on east/west sides
+ , togglerLength_closed: 50 // 100% OR -1 means 'full height/width of resizer bar' - 0 means 'hidden'
+ , togglerAlign_open: "center" // top/left, bottom/right, center, OR...
+ , togglerAlign_closed: "center" // 1 => nn = offset from top/left, -1 => -nn == offset from bottom/right
+ , togglerContent_open: "" // text or HTML to put INSIDE the toggler
+ , togglerContent_closed: "" // ditto
+ // RESIZING OPTIONS
+ , resizerDblClickToggle: true //
+ , autoResize: true // IF size is 'auto' or a percentage, then recalc 'pixel size' whenever the layout resizes
+ , autoReopen: true // IF a pane was auto-closed due to noRoom, reopen it when there is room? False = leave it closed
+ , resizerDragOpacity: 1 // option for ui.draggable
+ //, resizerCursor: "" // MUST be pane-specific - cursor when over resizer-bar
+ , maskContents: false // true = add DIV-mask over-or-inside this pane so can 'drag' over IFRAMES
+ , maskObjects: false // true = add IFRAME-mask over-or-inside this pane to cover objects/applets - content-mask will overlay this mask
+ , maskZindex: null // will override zIndexes.content_mask if specified - not applicable to iframe-panes
+ , resizingGrid: false // grid size that the resizers will snap-to during resizing, eg: [20,20]
+ , livePaneResizing: false // true = LIVE Resizing as resizer is dragged
+ , liveContentResizing: false // true = re-measure header/footer heights as resizer is dragged
+ , liveResizingTolerance: 1 // how many px change before pane resizes, to control performance
+ // SLIDING OPTIONS
+ , sliderCursor: "pointer" // cursor when resizer-bar will trigger 'sliding'
+ , slideTrigger_open: "click" // click, dblclick, mouseenter
+ , slideTrigger_close: "mouseleave"// click, mouseleave
+ , slideDelay_open: 300 // applies only for mouseenter event - 0 = instant open
+ , slideDelay_close: 300 // applies only for mouseleave event (300ms is the minimum!)
+ , hideTogglerOnSlide: false // when pane is slid-open, should the toggler show?
+ , preventQuickSlideClose: $.layout.browser.webkit // Chrome triggers slideClosed as it is opening
+ , preventPrematureSlideClose: false // handle incorrect mouseleave trigger, like when over a SELECT-list in IE
+ // PANE-SPECIFIC TIPS & MESSAGES
+ , tips: {
+ Open: "Open" // eg: "Open Pane"
+ , Close: "Close"
+ , Resize: "Resize"
+ , Slide: "Slide Open"
+ , Pin: "Pin"
+ , Unpin: "Un-Pin"
+ , noRoomToOpen: "Not enough room to show this panel." // alert if user tries to open a pane that cannot
+ , minSizeWarning: "Panel has reached its minimum size" // displays in browser statusbar
+ , maxSizeWarning: "Panel has reached its maximum size" // ditto
+ }
+ // HOT-KEYS & MISC
+ , showOverflowOnHover: false // will bind allowOverflow() utility to pane.onMouseOver
+ , enableCursorHotkey: true // enabled 'cursor' hotkeys
+ //, customHotkey: "" // MUST be pane-specific - EITHER a charCode OR a character
+ , customHotkeyModifier: "SHIFT" // either 'SHIFT', 'CTRL' or 'CTRL+SHIFT' - NOT 'ALT'
+ // PANE ANIMATION
+ // NOTE: fxSss_open, fxSss_close & fxSss_size options (eg: fxName_open) are auto-generated if not passed
+ , fxName: "slide" // ('none' or blank), slide, drop, scale -- only relevant to 'open' & 'close', NOT 'size'
+ , fxSpeed: null // slow, normal, fast, 200, nnn - if passed, will OVERRIDE fxSettings.duration
+ , fxSettings: {} // can be passed, eg: { easing: "easeOutBounce", duration: 1500 }
+ , fxOpacityFix: true // tries to fix opacity in IE to restore anti-aliasing after animation
+ , animatePaneSizing: false // true = animate resizing after dragging resizer-bar OR sizePane() is called
+ /* NOTE: Action-specific FX options are auto-generated from the options above if not specifically set:
+ fxName_open: "slide" // 'Open' pane animation
+ fnName_close: "slide" // 'Close' pane animation
+ fxName_size: "slide" // 'Size' pane animation - when animatePaneSizing = true
+ fxSpeed_open: null
+ fxSpeed_close: null
+ fxSpeed_size: null
+ fxSettings_open: {}
+ fxSettings_close: {}
+ fxSettings_size: {}
+ */
+ // CHILD/NESTED LAYOUTS
+ , childOptions: null // Layout-options for nested/child layout - even {} is valid as options
+ , initChildLayout: true // true = child layout will be created as soon as _this_ layout completes initialization
+ , destroyChildLayout: true // true = destroy child-layout if this pane is destroyed
+ , resizeChildLayout: true // true = trigger child-layout.resizeAll() when this pane is resized
+ // EVENT TRIGGERING
+ , triggerEventsOnLoad: false // true = trigger onopen OR onclose callbacks when layout initializes
+ , triggerEventsDuringLiveResize: true // true = trigger onresize callback REPEATEDLY if livePaneResizing==true
+ // PANE CALLBACKS
+ , onshow_start: null // CALLBACK when pane STARTS to Show - BEFORE onopen/onhide_start
+ , onshow_end: null // CALLBACK when pane ENDS being Shown - AFTER onopen/onhide_end
+ , onhide_start: null // CALLBACK when pane STARTS to Close - BEFORE onclose_start
+ , onhide_end: null // CALLBACK when pane ENDS being Closed - AFTER onclose_end
+ , onopen_start: null // CALLBACK when pane STARTS to Open
+ , onopen_end: null // CALLBACK when pane ENDS being Opened
+ , onclose_start: null // CALLBACK when pane STARTS to Close
+ , onclose_end: null // CALLBACK when pane ENDS being Closed
+ , onresize_start: null // CALLBACK when pane STARTS being Resized ***FOR ANY REASON***
+ , onresize_end: null // CALLBACK when pane ENDS being Resized ***FOR ANY REASON***
+ , onsizecontent_start: null // CALLBACK when sizing of content-element STARTS
+ , onsizecontent_end: null // CALLBACK when sizing of content-element ENDS
+ , onswap_start: null // CALLBACK when pane STARTS to Swap
+ , onswap_end: null // CALLBACK when pane ENDS being Swapped
+ , ondrag_start: null // CALLBACK when pane STARTS being ***MANUALLY*** Resized
+ , ondrag_end: null // CALLBACK when pane ENDS being ***MANUALLY*** Resized
+ }
+/*
+ * PANE-SPECIFIC SETTINGS
+ * - options listed below MUST be specified per-pane - they CANNOT be set under 'panes'
+ * - all options under the 'panes' key can also be set specifically for any pane
+ * - most options under the 'panes' key apply only to 'border-panes' - NOT the the center-pane
+ */
+, north: {
+ paneSelector: ".ui-layout-north"
+ , size: "auto" // eg: "auto", "30%", .30, 200
+ , resizerCursor: "n-resize" // custom = url(myCursor.cur)
+ , customHotkey: "" // EITHER a charCode (43) OR a character ("o")
+ }
+, south: {
+ paneSelector: ".ui-layout-south"
+ , size: "auto"
+ , resizerCursor: "s-resize"
+ , customHotkey: ""
+ }
+, east: {
+ paneSelector: ".ui-layout-east"
+ , size: 200
+ , resizerCursor: "e-resize"
+ , customHotkey: ""
+ }
+, west: {
+ paneSelector: ".ui-layout-west"
+ , size: 200
+ , resizerCursor: "w-resize"
+ , customHotkey: ""
+ }
+, center: {
+ paneSelector: ".ui-layout-center"
+ , minWidth: 0
+ , minHeight: 0
+ }
+};
+
+$.layout.optionsMap = {
+ // layout/global options - NOT pane-options
+ layout: ("stateManagement,effects,zIndexes,errors,"
+ + "name,zIndex,scrollToBookmarkOnLoad,showErrorMessages,"
+ + "resizeWithWindow,resizeWithWindowDelay,resizeWithWindowMaxDelay,"
+ + "onresizeall,onresizeall_start,onresizeall_end,onload,onunload").split(",")
+// borderPanes: [ ALL options that are NOT specified as 'layout' ]
+ // default.panes options that apply to the center-pane (most options apply _only_ to border-panes)
+, center: ("paneClass,contentSelector,contentIgnoreSelector,findNestedContent,applyDemoStyles,triggerEventsOnLoad,"
+ + "showOverflowOnHover,maskContents,maskObjects,liveContentResizing,"
+ + "childOptions,initChildLayout,resizeChildLayout,destroyChildLayout,"
+ + "onresize,onresize_start,onresize_end,onsizecontent,onsizecontent_start,onsizecontent_end").split(",")
+ // options that MUST be specifically set 'per-pane' - CANNOT set in the panes (defaults) key
+, noDefault: ("paneSelector,resizerCursor,customHotkey").split(",")
+};
+
+/**
+ * Processes options passed in converts flat-format data into subkey (JSON) format
+ * In flat-format, subkeys are _currently_ separated with 2 underscores, like north__optName
+ * Plugins may also call this method so they can transform their own data
+ *
+ * @param {!Object} hash Data/options passed by user - may be a single level or nested levels
+ * @return {Object} Returns hash of minWidth & minHeight
+ */
+$.layout.transformData = function (hash) {
+ var json = { panes: {}, center: {} } // init return object
+ , data, branch, optKey, keys, key, val, i, c;
+
+ if (typeof hash !== "object") return json; // no options passed
+
+ // convert all 'flat-keys' to 'sub-key' format
+ for (optKey in hash) {
+ branch = json;
+ data = $.layout.optionsMap.layout;
+ val = hash[ optKey ];
+ keys = optKey.split("__"); // eg: west__size or north__fxSettings__duration
+ c = keys.length - 1;
+ // convert underscore-delimited to subkeys
+ for (i=0; i <= c; i++) {
+ key = keys[i];
+ if (i === c)
+ branch[key] = val;
+ else if (!branch[key])
+ branch[key] = {}; // create the subkey
+ // recurse to sub-key for next loop - if not done
+ branch = branch[key];
+ }
+ }
+
+ return json;
+};
+
+// INTERNAL CONFIG DATA - DO NOT CHANGE THIS!
+$.layout.backwardCompatibility = {
+ // data used by renameOldOptions()
+ map: {
+ // OLD Option Name: NEW Option Name
+ applyDefaultStyles: "applyDemoStyles"
+ , resizeNestedLayout: "resizeChildLayout"
+ , resizeWhileDragging: "livePaneResizing"
+ , resizeContentWhileDragging: "liveContentResizing"
+ , triggerEventsWhileDragging: "triggerEventsDuringLiveResize"
+ , maskIframesOnResize: "maskContents"
+ , useStateCookie: "stateManagement.enabled"
+ , "cookie.autoLoad": "stateManagement.autoLoad"
+ , "cookie.autoSave": "stateManagement.autoSave"
+ , "cookie.keys": "stateManagement.stateKeys"
+ , "cookie.name": "stateManagement.cookie.name"
+ , "cookie.domain": "stateManagement.cookie.domain"
+ , "cookie.path": "stateManagement.cookie.path"
+ , "cookie.expires": "stateManagement.cookie.expires"
+ , "cookie.secure": "stateManagement.cookie.secure"
+ // OLD Language options
+ , noRoomToOpenTip: "tips.noRoomToOpen"
+ , togglerTip_open: "tips.Close" // open = Close
+ , togglerTip_closed: "tips.Open" // closed = Open
+ , resizerTip: "tips.Resize"
+ , sliderTip: "tips.Slide"
+ }
+
+/**
+* @param {Object} opts
+*/
+, renameOptions: function (opts) {
+ var map = $.layout.backwardCompatibility.map
+ , oldData, newData, value
+ ;
+ for (var itemPath in map) {
+ oldData = getBranch( itemPath );
+ value = oldData.branch[ oldData.key ];
+ if (value !== undefined) {
+ newData = getBranch( map[itemPath], true );
+ newData.branch[ newData.key ] = value;
+ delete oldData.branch[ oldData.key ];
+ }
+ }
+
+ /**
+ * @param {string} path
+ * @param {boolean=} [create=false] Create path if does not exist
+ */
+ function getBranch (path, create) {
+ var a = path.split(".") // split keys into array
+ , c = a.length - 1
+ , D = { branch: opts, key: a[c] } // init branch at top & set key (last item)
+ , i = 0, k, undef;
+ for (; i<c; i++) { // skip the last key (data)
+ k = a[i];
+ if (D.branch[ k ] == undefined) { // child-key does not exist
+ if (create) {
+ D.branch = D.branch[ k ] = {}; // create child-branch
+ }
+ else // can't go any farther
+ D.branch = {}; // branch is undefined
+ }
+ else
+ D.branch = D.branch[ k ]; // get child-branch
+ }
+ return D;
+ };
+ }
+
+/**
+* @param {Object} opts
+*/
+, renameAllOptions: function (opts) {
+ var ren = $.layout.backwardCompatibility.renameOptions;
+ // rename root (layout) options
+ ren( opts );
+ // rename 'defaults' to 'panes'
+ if (opts.defaults) {
+ if (typeof opts.panes !== "object")
+ opts.panes = {};
+ $.extend(true, opts.panes, opts.defaults);
+ delete opts.defaults;
+ }
+ // rename options in the the options.panes key
+ if (opts.panes) ren( opts.panes );
+ // rename options inside *each pane key*, eg: options.west
+ $.each($.layout.config.allPanes, function (i, pane) {
+ if (opts[pane]) ren( opts[pane] );
+ });
+ return opts;
+ }
+};
+
+
+
+
+/* ============================================================
+ * BEGIN WIDGET: $( selector ).layout( {options} );
+ * ============================================================
+ */
+$.fn.layout = function (opts) {
+ var
+
+ // local aliases to global data
+ browser = $.layout.browser
+, _c = $.layout.config
+
+ // local aliases to utlity methods
+, cssW = $.layout.cssWidth
+, cssH = $.layout.cssHeight
+, elDims = $.layout.getElementDimensions
+, elCSS = $.layout.getElementCSS
+, evtObj = $.layout.getEventObject
+, evtPane = $.layout.parsePaneName
+
+/**
+ * options - populated by initOptions()
+ */
+, options = $.extend(true, {}, $.layout.defaults)
+, effects = options.effects = $.extend(true, {}, $.layout.effects)
+
+/**
+ * layout-state object
+ */
+, state = {
+ // generate unique ID to use for event.namespace so can unbind only events added by 'this layout'
+ id: "layout"+ $.now() // code uses alias: sID
+ , initialized: false
+ , container: {} // init all keys
+ , north: {}
+ , south: {}
+ , east: {}
+ , west: {}
+ , center: {}
+ }
+
+/**
+ * parent/child-layout pointers
+ */
+//, hasParentLayout = false - exists ONLY inside Instance so can be set externally
+, children = {
+ north: null
+ , south: null
+ , east: null
+ , west: null
+ , center: null
+ }
+
+/*
+ * ###########################
+ * INTERNAL HELPER FUNCTIONS
+ * ###########################
+ */
+
+ /**
+ * Manages all internal timers
+ */
+, timer = {
+ data: {}
+ , set: function (s, fn, ms) { timer.clear(s); timer.data[s] = setTimeout(fn, ms); }
+ , clear: function (s) { var t=timer.data; if (t[s]) {clearTimeout(t[s]); delete t[s];} }
+ }
+
+ /**
+ * Alert or console.log a message - IF option is enabled.
+ *
+ * @param {(string|!Object)} msg Message (or debug-data) to display
+ * @param {?boolean} popup True by default, means 'alert', false means use console.log
+ * @param {?boolean} debug True means is a widget debugging message
+ */
+, _log = function (msg, popup, debug) {
+ var o = options;
+ if ((o.showErrorMessages && !debug) || (debug && o.showDebugMessages))
+ $.layout.msg( o.name +' / '+ msg, (popup !== false) );
+ return false;
+ }
+
+ /**
+ * Executes a Callback function after a trigger event, like resize, open or close
+ *
+ * @param {string} evtName Name of the layout callback, eg "onresize_start"
+ * @param {?string} pane This is passed only so we can pass the 'pane object' to the callback
+ * @param {?string|?boolean} skipBoundEvents True = do not run events bound to the elements - only the callbacks set in options
+ */
+, _runCallbacks = function (evtName, pane, skipBoundEvents) {
+ var paneCB = pane && isStr(pane)
+ , s = paneCB ? state[pane] : state
+ , o = paneCB ? options[pane] : options
+ , lName = options.name
+ // names like onopen and onopen_end separate are interchangeable in options...
+ , lng = evtName + (evtName.match(/_/) ? "" : "_end")
+ , shrt = lng.match(/_end$/) ? lng.substr(0, lng.length - 4) : ""
+ , fn = o[lng] || o[shrt]
+ , retVal = "NC" // NC = No Callback
+ , args = []
+ , $P
+ ;
+ if ( !paneCB && $.type(skipBoundEvents) !== 'boolean' )
+ skipBoundEvents = pane; // allow pane param to be skipped for Layout callback
+
+ // first trigger the callback set in the options
+ if (fn) {
+ try {
+ // convert function name (string) to function object
+ if (isStr( fn )) {
+ if (fn.match(/,/)) {
+ // function name cannot contain a comma,
+ // so must be a function name AND a parameter to pass
+ args = fn.split(",")
+ , fn = eval(args[0]);
+ }
+ else // just the name of an external function?
+ fn = eval(fn);
+ }
+ // execute the callback, if exists
+ if ($.isFunction( fn )) {
+ if (args.length)
+ retVal = fn(args[1]); // pass the argument parsed from 'list'
+ else if ( paneCB )
+ // pass data: pane-name, pane-element, pane-state, pane-options, and layout-name
+ retVal = fn( pane, $Ps[pane], s, o, lName );
+ else // must be a layout/container callback - pass suitable info
+ retVal = fn( Instance, s, o, lName );
+ }
+ }
+ catch (ex) {
+ _log( options.errors.callbackError.replace(/EVENT/, $.trim(pane +" "+ lng)), false );
+ }
+ }
+
+ // trigger additional events bound directly to the pane
+ if (!skipBoundEvents && retVal !== false) {
+ if ( paneCB ) { // PANE events can be bound to each pane-elements
+ $P = $Ps[pane];
+ o = options[pane];
+ s = state[pane];
+ $P.triggerHandler('layoutpane'+ lng, [ pane, $P, s, o, lName ]);
+ if (shrt)
+ $P.triggerHandler('layoutpane'+ shrt, [ pane, $P, s, o, lName ]);
+ }
+ else { // LAYOUT events can be bound to the container-element
+ $N.triggerHandler('layout'+ lng, [ Instance, s, o, lName ]);
+ if (shrt)
+ $N.triggerHandler('layout'+ shrt, [ Instance, s, o, lName ]);
+ }
+ }
+
+ // ALWAYS resizeChildLayout after a resize event - even during initialization
+ if (evtName === "onresize_end" || evtName === "onsizecontent_end")
+ resizeChildLayout(pane);
+
+ return retVal;
+ }
+
+
+ /**
+ * cure iframe display issues in IE & other browsers
+ */
+, _fixIframe = function (pane) {
+ if (browser.mozilla) return; // skip FireFox - it auto-refreshes iframes onShow
+ var $P = $Ps[pane];
+ // if the 'pane' is an iframe, do it
+ if (state[pane].tagName === "IFRAME")
+ $P.css(_c.hidden).css(_c.visible);
+ else // ditto for any iframes INSIDE the pane
+ $P.find('IFRAME').css(_c.hidden).css(_c.visible);
+ }
+
+ /**
+ * @param {string} pane Can accept ONLY a 'pane' (east, west, etc)
+ * @param {number=} outerSize (optional) Can pass a width, allowing calculations BEFORE element is resized
+ * @return {number} Returns the innerHeight/Width of el by subtracting padding and borders
+ */
+, cssSize = function (pane, outerSize) {
+ var fn = _c[pane].dir=="horz" ? cssH : cssW;
+ return fn($Ps[pane], outerSize);
+ }
+
+ /**
+ * @param {string} pane Can accept ONLY a 'pane' (east, west, etc)
+ * @return {Object} Returns hash of minWidth & minHeight
+ */
+, cssMinDims = function (pane) {
+ // minWidth/Height means CSS width/height = 1px
+ var $P = $Ps[pane]
+ , dir = _c[pane].dir
+ , d = {
+ minWidth: 1001 - cssW($P, 1000)
+ , minHeight: 1001 - cssH($P, 1000)
+ }
+ ;
+ if (dir === "horz") d.minSize = d.minHeight;
+ if (dir === "vert") d.minSize = d.minWidth;
+ return d;
+ }
+
+ // TODO: see if these methods can be made more useful...
+ // TODO: *maybe* return cssW/H from these so caller can use this info
+
+ /**
+ * @param {(string|!Object)} el
+ * @param {number=} outerWidth
+ * @param {boolean=} [autoHide=false]
+ */
+, setOuterWidth = function (el, outerWidth, autoHide) {
+ var $E = el, w;
+ if (isStr(el)) $E = $Ps[el]; // west
+ else if (!el.jquery) $E = $(el);
+ w = cssW($E, outerWidth);
+ $E.css({ width: w });
+ if (w > 0) {
+ if (autoHide && $E.data('autoHidden') && $E.innerHeight() > 0) {
+ $E.show().data('autoHidden', false);
+ if (!browser.mozilla) // FireFox refreshes iframes - IE does not
+ // make hidden, then visible to 'refresh' display after animation
+ $E.css(_c.hidden).css(_c.visible);
+ }
+ }
+ else if (autoHide && !$E.data('autoHidden'))
+ $E.hide().data('autoHidden', true);
+ }
+
+ /**
+ * @param {(string|!Object)} el
+ * @param {number=} outerHeight
+ * @param {boolean=} [autoHide=false]
+ */
+, setOuterHeight = function (el, outerHeight, autoHide) {
+ var $E = el, h;
+ if (isStr(el)) $E = $Ps[el]; // west
+ else if (!el.jquery) $E = $(el);
+ h = cssH($E, outerHeight);
+ $E.css({ height: h, visibility: "visible" }); // may have been 'hidden' by sizeContent
+ if (h > 0 && $E.innerWidth() > 0) {
+ if (autoHide && $E.data('autoHidden')) {
+ $E.show().data('autoHidden', false);
+ if (!browser.mozilla) // FireFox refreshes iframes - IE does not
+ $E.css(_c.hidden).css(_c.visible);
+ }
+ }
+ else if (autoHide && !$E.data('autoHidden'))
+ $E.hide().data('autoHidden', true);
+ }
+
+ /**
+ * @param {(string|!Object)} el
+ * @param {number=} outerSize
+ * @param {boolean=} [autoHide=false]
+ */
+, setOuterSize = function (el, outerSize, autoHide) {
+ if (_c[pane].dir=="horz") // pane = north or south
+ setOuterHeight(el, outerSize, autoHide);
+ else // pane = east or west
+ setOuterWidth(el, outerSize, autoHide);
+ }
+
+
+ /**
+ * Converts any 'size' params to a pixel/integer size, if not already
+ * If 'auto' or a decimal/percentage is passed as 'size', a pixel-size is calculated
+ *
+ /**
+ * @param {string} pane
+ * @param {(string|number)=} size
+ * @param {string=} [dir]
+ * @return {number}
+ */
+, _parseSize = function (pane, size, dir) {
+ if (!dir) dir = _c[pane].dir;
+
+ if (isStr(size) && size.match(/%/))
+ size = (size === '100%') ? -1 : parseInt(size, 10) / 100; // convert % to decimal
+
+ if (size === 0)
+ return 0;
+ else if (size >= 1)
+ return parseInt(size, 10);
+
+ var o = options, avail = 0;
+ if (dir=="horz") // north or south or center.minHeight
+ avail = sC.innerHeight - ($Ps.north ? o.north.spacing_open : 0) - ($Ps.south ? o.south.spacing_open : 0);
+ else if (dir=="vert") // east or west or center.minWidth
+ avail = sC.innerWidth - ($Ps.west ? o.west.spacing_open : 0) - ($Ps.east ? o.east.spacing_open : 0);
+
+ if (size === -1) // -1 == 100%
+ return avail;
+ else if (size > 0) // percentage, eg: .25
+ return round(avail * size);
+ else if (pane=="center")
+ return 0;
+ else { // size < 0 || size=='auto' || size==Missing || size==Invalid
+ // auto-size the pane
+ var dim = (dir === "horz" ? "height" : "width")
+ , $P = $Ps[pane]
+ , $C = dim === 'height' ? $Cs[pane] : false
+ , vis = $.layout.showInvisibly($P) // show pane invisibly if hidden
+ , szP = $P.css(dim) // SAVE current pane size
+ , szC = $C ? $C.css(dim) : 0 // SAVE current content size
+ ;
+ $P.css(dim, "auto");
+ if ($C) $C.css(dim, "auto");
+ size = (dim === "height") ? $P.outerHeight() : $P.outerWidth(); // MEASURE
+ $P.css(dim, szP).css(vis); // RESET size & visibility
+ if ($C) $C.css(dim, szC);
+ return size;
+ }
+ }
+
+ /**
+ * Calculates current 'size' (outer-width or outer-height) of a border-pane - optionally with 'pane-spacing' added
+ *
+ * @param {(string|!Object)} pane
+ * @param {boolean=} [inclSpace=false]
+ * @return {number} Returns EITHER Width for east/west panes OR Height for north/south panes
+ */
+, getPaneSize = function (pane, inclSpace) {
+ var
+ $P = $Ps[pane]
+ , o = options[pane]
+ , s = state[pane]
+ , oSp = (inclSpace ? o.spacing_open : 0)
+ , cSp = (inclSpace ? o.spacing_closed : 0)
+ ;
+ if (!$P || s.isHidden)
+ return 0;
+ else if (s.isClosed || (s.isSliding && inclSpace))
+ return cSp;
+ else if (_c[pane].dir === "horz")
+ return $P.outerHeight() + oSp;
+ else // dir === "vert"
+ return $P.outerWidth() + oSp;
+ }
+
+ /**
+ * Calculate min/max pane dimensions and limits for resizing
+ *
+ * @param {string} pane
+ * @param {boolean=} [slide=false]
+ */
+, setSizeLimits = function (pane, slide) {
+ if (!isInitialized()) return;
+ var
+ o = options[pane]
+ , s = state[pane]
+ , c = _c[pane]
+ , dir = c.dir
+ , side = c.side.toLowerCase()
+ , type = c.sizeType.toLowerCase()
+ , isSliding = (slide != undefined ? slide : s.isSliding) // only open() passes 'slide' param
+ , $P = $Ps[pane]
+ , paneSpacing = o.spacing_open
+ // measure the pane on the *opposite side* from this pane
+ , altPane = _c.oppositeEdge[pane]
+ , altS = state[altPane]
+ , $altP = $Ps[altPane]
+ , altPaneSize = (!$altP || altS.isVisible===false || altS.isSliding ? 0 : (dir=="horz" ? $altP.outerHeight() : $altP.outerWidth()))
+ , altPaneSpacing = ((!$altP || altS.isHidden ? 0 : options[altPane][ altS.isClosed !== false ? "spacing_closed" : "spacing_open" ]) || 0)
+ // limitSize prevents this pane from 'overlapping' opposite pane
+ , containerSize = (dir=="horz" ? sC.innerHeight : sC.innerWidth)
+ , minCenterDims = cssMinDims("center")
+ , minCenterSize = dir=="horz" ? max(options.center.minHeight, minCenterDims.minHeight) : max(options.center.minWidth, minCenterDims.minWidth)
+ // if pane is 'sliding', then ignore center and alt-pane sizes - because 'overlays' them
+ , limitSize = (containerSize - paneSpacing - (isSliding ? 0 : (_parseSize("center", minCenterSize, dir) + altPaneSize + altPaneSpacing)))
+ , minSize = s.minSize = max( _parseSize(pane, o.minSize), cssMinDims(pane).minSize )
+ , maxSize = s.maxSize = min( (o.maxSize ? _parseSize(pane, o.maxSize) : 100000), limitSize )
+ , r = s.resizerPosition = {} // used to set resizing limits
+ , top = sC.insetTop
+ , left = sC.insetLeft
+ , W = sC.innerWidth
+ , H = sC.innerHeight
+ , rW = o.spacing_open // subtract resizer-width to get top/left position for south/east
+ ;
+ switch (pane) {
+ case "north": r.min = top + minSize;
+ r.max = top + maxSize;
+ break;
+ case "west": r.min = left + minSize;
+ r.max = left + maxSize;
+ break;
+ case "south": r.min = top + H - maxSize - rW;
+ r.max = top + H - minSize - rW;
+ break;
+ case "east": r.min = left + W - maxSize - rW;
+ r.max = left + W - minSize - rW;
+ break;
+ };
+ }
+
+ /**
+ * Returns data for setting the size/position of center pane. Also used to set Height for east/west panes
+ *
+ * @return JSON Returns a hash of all dimensions: top, bottom, left, right, (outer) width and (outer) height
+ */
+, calcNewCenterPaneDims = function () {
+ var d = {
+ top: getPaneSize("north", true) // true = include 'spacing' value for pane
+ , bottom: getPaneSize("south", true)
+ , left: getPaneSize("west", true)
+ , right: getPaneSize("east", true)
+ , width: 0
+ , height: 0
+ };
+
+ // NOTE: sC = state.container
+ // calc center-pane outer dimensions
+ d.width = sC.innerWidth - d.left - d.right; // outerWidth
+ d.height = sC.innerHeight - d.bottom - d.top; // outerHeight
+ // add the 'container border/padding' to get final positions relative to the container
+ d.top += sC.insetTop;
+ d.bottom += sC.insetBottom;
+ d.left += sC.insetLeft;
+ d.right += sC.insetRight;
+
+ return d;
+ }
+
+
+ /**
+ * @param {!Object} el
+ * @param {boolean=} [allStates=false]
+ */
+, getHoverClasses = function (el, allStates) {
+ var
+ $El = $(el)
+ , type = $El.data("layoutRole")
+ , pane = $El.data("layoutEdge")
+ , o = options[pane]
+ , root = o[type +"Class"]
+ , _pane = "-"+ pane // eg: "-west"
+ , _open = "-open"
+ , _closed = "-closed"
+ , _slide = "-sliding"
+ , _hover = "-hover " // NOTE the trailing space
+ , _state = $El.hasClass(root+_closed) ? _closed : _open
+ , _alt = _state === _closed ? _open : _closed
+ , classes = (root+_hover) + (root+_pane+_hover) + (root+_state+_hover) + (root+_pane+_state+_hover)
+ ;
+ if (allStates) // when 'removing' classes, also remove alternate-state classes
+ classes += (root+_alt+_hover) + (root+_pane+_alt+_hover);
+
+ if (type=="resizer" && $El.hasClass(root+_slide))
+ classes += (root+_slide+_hover) + (root+_pane+_slide+_hover);
+
+ return $.trim(classes);
+ }
+, addHover = function (evt, el) {
+ var $E = $(el || this);
+ if (evt && $E.data("layoutRole") === "toggler")
+ evt.stopPropagation(); // prevent triggering 'slide' on Resizer-bar
+ $E.addClass( getHoverClasses($E) );
+ }
+, removeHover = function (evt, el) {
+ var $E = $(el || this);
+ $E.removeClass( getHoverClasses($E, true) );
+ }
+
+, onResizerEnter = function (evt) { // ALSO called by toggler.mouseenter
+ if ($.fn.disableSelection)
+ $("body").disableSelection();
+ }
+, onResizerLeave = function (evt, el) {
+ var
+ e = el || this // el is only passed when called by the timer
+ , pane = $(e).data("layoutEdge")
+ , name = pane +"ResizerLeave"
+ ;
+ timer.clear(pane+"_openSlider"); // cancel slideOpen timer, if set
+ timer.clear(name); // cancel enableSelection timer - may re/set below
+ // this method calls itself on a timer because it needs to allow
+ // enough time for dragging to kick-in and set the isResizing flag
+ // dragging has a 100ms delay set, so this delay must be >100
+ if (!el) // 1st call - mouseleave event
+ timer.set(name, function(){ onResizerLeave(evt, e); }, 200);
+ // if user is resizing, then dragStop will enableSelection(), so can skip it here
+ else if (!state[pane].isResizing && $.fn.enableSelection) // 2nd call - by timer
+ $("body").enableSelection();
+ }
+
+/*
+ * ###########################
+ * INITIALIZATION METHODS
+ * ###########################
+ */
+
+ /**
+ * Initialize the layout - called automatically whenever an instance of layout is created
+ *
+ * @see none - triggered onInit
+ * @return mixed true = fully initialized | false = panes not initialized (yet) | 'cancel' = abort
+ */
+, _create = function () {
+ // initialize config/options
+ initOptions();
+ var o = options;
+
+ // TEMP state so isInitialized returns true during init process
+ state.creatingLayout = true;
+
+ // init plugins for this layout, if there are any (eg: stateManagement)
+ runPluginCallbacks( Instance, $.layout.onCreate );
+
+ // options & state have been initialized, so now run beforeLoad callback
+ // onload will CANCEL layout creation if it returns false
+ if (false === _runCallbacks("onload_start"))
+ return 'cancel';
+
+ // initialize the container element
+ _initContainer();
+
+ // bind hotkey function - keyDown - if required
+ initHotkeys();
+
+ // bind window.onunload
+ $(window).bind("unload."+ sID, unload);
+
+ // init plugins for this layout, if there are any (eg: customButtons)
+ runPluginCallbacks( Instance, $.layout.onLoad );
+
+ // if layout elements are hidden, then layout WILL NOT complete initialization!
+ // initLayoutElements will set initialized=true and run the onload callback IF successful
+ if (o.initPanes) _initLayoutElements();
+
+ delete state.creatingLayout;
+
+ return state.initialized;
+ }
+
+ /**
+ * Initialize the layout IF not already
+ *
+ * @see All methods in Instance run this test
+ * @return boolean true = layoutElements have been initialized | false = panes are not initialized (yet)
+ */
+, isInitialized = function () {
+ if (state.initialized || state.creatingLayout) return true; // already initialized
+ else return _initLayoutElements(); // try to init panes NOW
+ }
+
+ /**
+ * Initialize the layout - called automatically whenever an instance of layout is created
+ *
+ * @see _create() & isInitialized
+ * @return An object pointer to the instance created
+ */
+, _initLayoutElements = function (retry) {
+ // initialize config/options
+ var o = options;
+
+ // CANNOT init panes inside a hidden container!
+ if (!$N.is(":visible")) {
+ // handle Chrome bug where popup window 'has no height'
+ // if layout is BODY element, try again in 50ms
+ // SEE: http://layout.jquery-dev.net/samples/test_popup_window.html
+ if ( !retry && browser.webkit && $N[0].tagName === "BODY" )
+ setTimeout(function(){ _initLayoutElements(true); }, 50);
+ return false;
+ }
+
+ // a center pane is required, so make sure it exists
+ if (!getPane("center").length) {
+ return _log( o.errors.centerPaneMissing );
+ }
+
+ // TEMP state so isInitialized returns true during init process
+ state.creatingLayout = true;
+
+ // update Container dims
+ $.extend(sC, elDims( $N ));
+
+ // initialize all layout elements
+ initPanes(); // size & position panes - calls initHandles() - which calls initResizable()
+
+ if (o.scrollToBookmarkOnLoad) {
+ var l = self.location;
+ if (l.hash) l.replace( l.hash ); // scrollTo Bookmark
+ }
+
+ // check to see if this layout 'nested' inside a pane
+ if (Instance.hasParentLayout)
+ o.resizeWithWindow = false;
+ // bind resizeAll() for 'this layout instance' to window.resize event
+ else if (o.resizeWithWindow)
+ $(window).bind("resize."+ sID, windowResize);
+
+ delete state.creatingLayout;
+ state.initialized = true;
+
+ // init plugins for this layout, if there are any
+ runPluginCallbacks( Instance, $.layout.onReady );
+
+ // now run the onload callback, if exists
+ _runCallbacks("onload_end");
+
+ return true; // elements initialized successfully
+ }
+
+ /**
+ * Initialize nested layouts - called when _initLayoutElements completes
+ *
+ * NOT CURRENTLY USED
+ *
+ * @see _initLayoutElements
+ * @return An object pointer to the instance created
+ */
+, _initChildLayouts = function () {
+ $.each(_c.allPanes, function (idx, pane) {
+ if (options[pane].initChildLayout)
+ createChildLayout( pane );
+ });
+ }
+
+ /**
+ * Initialize nested layouts for a specific pane - can optionally pass layout-options
+ *
+ * @see _initChildLayouts
+ * @param {string|Object} evt_or_pane The pane being opened, ie: north, south, east, or west
+ * @param {Object=} [opts] Layout-options - if passed, will OVERRRIDE options[pane].childOptions
+ * @return An object pointer to the layout instance created - or null
+ */
+, createChildLayout = function (evt_or_pane, opts) {
+ var pane = evtPane.call(this, evt_or_pane)
+ , $P = $Ps[pane]
+ , C = children
+ ;
+ if ($P) {
+ var $C = $Cs[pane]
+ , o = opts || options[pane].childOptions
+ , d = "layout"
+ // determine which element is supposed to be the 'child container'
+ // if pane has a 'containerSelector' OR a 'content-div', use those instead of the pane
+ , $Cont = o.containerSelector ? $P.find( o.containerSelector ) : ($C || $P)
+ , containerFound = $Cont.length
+ // see if a child-layout ALREADY exists on this element
+ , child = containerFound ? (C[pane] = $Cont.data(d) || null) : null
+ ;
+ // if no layout exists, but childOptions are set, try to create the layout now
+ if (!child && containerFound && o)
+ child = C[pane] = $Cont.eq(0).layout(o) || null;
+ if (child)
+ child.hasParentLayout = true; // set parent-flag in child
+ }
+ Instance[pane].child = C[pane]; // ALWAYS set pane-object pointer, even if null
+ }
+
+, windowResize = function () {
+ var delay = Number(options.resizeWithWindowDelay);
+ if (delay < 10) delay = 100; // MUST have a delay!
+ // resizing uses a delay-loop because the resize event fires repeatly - except in FF, but delay anyway
+ timer.clear("winResize"); // if already running
+ timer.set("winResize", function(){
+ timer.clear("winResize");
+ timer.clear("winResizeRepeater");
+ var dims = elDims( $N );
+ // only trigger resizeAll() if container has changed size
+ if (dims.innerWidth !== sC.innerWidth || dims.innerHeight !== sC.innerHeight)
+ resizeAll();
+ }, delay);
+ // ALSO set fixed-delay timer, if not already running
+ if (!timer.data["winResizeRepeater"]) setWindowResizeRepeater();
+ }
+
+, setWindowResizeRepeater = function () {
+ var delay = Number(options.resizeWithWindowMaxDelay);
+ if (delay > 0)
+ timer.set("winResizeRepeater", function(){ setWindowResizeRepeater(); resizeAll(); }, delay);
+ }
+
+, unload = function () {
+ var o = options;
+
+ _runCallbacks("onunload_start");
+
+ // trigger plugin callabacks for this layout (eg: stateManagement)
+ runPluginCallbacks( Instance, $.layout.onUnload );
+
+ _runCallbacks("onunload_end");
+ }
+
+ /**
+ * Validate and initialize container CSS and events
+ *
+ * @see _create()
+ */
+, _initContainer = function () {
+ var
+ N = $N[0]
+ , tag = sC.tagName = N.tagName
+ , id = sC.id = N.id
+ , cls = sC.className = N.className
+ , o = options
+ , name = o.name
+ , fullPage= (tag === "BODY")
+ , props = "overflow,position,margin,padding,border"
+ , css = "layoutCSS"
+ , CSS = {}
+ , hid = "hidden" // used A LOT!
+ // see if this container is a 'pane' inside an outer-layout
+ , parent = $N.data("parentLayout") // parent-layout Instance
+ , pane = $N.data("layoutEdge") // pane-name in parent-layout
+ , isChild = parent && pane
+ ;
+ // sC -> state.container
+ sC.selector = $N.selector.split(".slice")[0];
+ sC.ref = (o.name ? o.name +' layout / ' : '') + tag + (id ? "#"+id : cls ? '.['+cls+']' : ''); // used in messages
+
+ $N .data({
+ layout: Instance
+ , layoutContainer: sID // FLAG to indicate this is a layout-container - contains unique internal ID
+ })
+ .addClass(o.containerClass)
+ ;
+ var layoutMethods = {
+ destroy: ''
+ , initPanes: ''
+ , resizeAll: 'resizeAll'
+ , resize: 'resizeAll'
+ };
+ // loop hash and bind all methods - include layoutID namespacing
+ for (name in layoutMethods) {
+ $N.bind("layout"+ name.toLowerCase() +"."+ sID, Instance[ layoutMethods[name] || name ]);
+ }
+
+ // if this container is another layout's 'pane', then set child/parent pointers
+ if (isChild) {
+ // update parent flag
+ Instance.hasParentLayout = true;
+ // set pointers to THIS child-layout (Instance) in parent-layout
+ // NOTE: parent.PANE.child is an ALIAS to parent.children.PANE
+ parent[pane].child = parent.children[pane] = $N.data("layout");
+ }
+
+ // SAVE original container CSS for use in destroy()
+ if (!$N.data(css)) {
+ // handle props like overflow different for BODY & HTML - has 'system default' values
+ if (fullPage) {
+ CSS = $.extend( elCSS($N, props), {
+ height: $N.css("height")
+ , overflow: $N.css("overflow")
+ , overflowX: $N.css("overflowX")
+ , overflowY: $N.css("overflowY")
+ });
+ // ALSO SAVE <HTML> CSS
+ var $H = $("html");
+ $H.data(css, {
+ height: "auto" // FF would return a fixed px-size!
+ , overflow: $H.css("overflow")
+ , overflowX: $H.css("overflowX")
+ , overflowY: $H.css("overflowY")
+ });
+ }
+ else // handle props normally for non-body elements
+ CSS = elCSS($N, props+",top,bottom,left,right,width,height,overflow,overflowX,overflowY");
+
+ $N.data(css, CSS);
+ }
+
+ try { // format html/body if this is a full page layout
+ if (fullPage) {
+ $("html").css({
+ height: "100%"
+ , overflow: hid
+ , overflowX: hid
+ , overflowY: hid
+ });
+ $("body").css({
+ position: "relative"
+ , height: "100%"
+ , overflow: hid
+ , overflowX: hid
+ , overflowY: hid
+ , margin: 0
+ , padding: 0 // TODO: test whether body-padding could be handled?
+ , border: "none" // a body-border creates problems because it cannot be measured!
+ });
+
+ // set current layout-container dimensions
+ $.extend(sC, elDims( $N ));
+ }
+ else { // set required CSS for overflow and position
+ // ENSURE container will not 'scroll'
+ CSS = { overflow: hid, overflowX: hid, overflowY: hid }
+ var
+ p = $N.css("position")
+ , h = $N.css("height")
+ ;
+ // if this is a NESTED layout, then container/outer-pane ALREADY has position and height
+ if (!isChild) {
+ if (!p || !p.match(/fixed|absolute|relative/))
+ CSS.position = "relative"; // container MUST have a 'position'
+ /*
+ if (!h || h=="auto")
+ CSS.height = "100%"; // container MUST have a 'height'
+ */
+ }
+ $N.css( CSS );
+
+ // set current layout-container dimensions
+ if ( $N.is(":visible") ) {
+ $.extend(sC, elDims( $N ));
+ if (sC.innerHeight < 1)
+ _log( o.errors.noContainerHeight.replace(/CONTAINER/, sC.ref) );
+ }
+ }
+ } catch (ex) {}
+ }
+
+ /**
+ * Bind layout hotkeys - if options enabled
+ *
+ * @see _create() and addPane()
+ * @param {string=} [panes=""] The edge(s) to process
+ */
+, initHotkeys = function (panes) {
+ panes = panes ? panes.split(",") : _c.borderPanes;
+ // bind keyDown to capture hotkeys, if option enabled for ANY pane
+ $.each(panes, function (i, pane) {
+ var o = options[pane];
+ if (o.enableCursorHotkey || o.customHotkey) {
+ $(document).bind("keydown."+ sID, keyDown); // only need to bind this ONCE
+ return false; // BREAK - binding was done
+ }
+ });
+ }
+
+ /**
+ * Build final OPTIONS data
+ *
+ * @see _create()
+ */
+, initOptions = function () {
+ var data, d, pane, key, val, i, c, o;
+
+ // reprocess user's layout-options to have correct options sub-key structure
+ opts = $.layout.transformData( opts ); // panes = default subkey
+
+ // auto-rename old options for backward compatibility
+ opts = $.layout.backwardCompatibility.renameAllOptions( opts );
+
+ // if user-options has 'panes' key (pane-defaults), clean it...
+ if (!$.isEmptyObject(opts.panes)) {
+ // REMOVE any pane-defaults that MUST be set per-pane
+ data = $.layout.optionsMap.noDefault;
+ for (i=0, c=data.length; i<c; i++) {
+ key = data[i];
+ delete opts.panes[key]; // OK if does not exist
+ }
+ // REMOVE any layout-options specified under opts.panes
+ data = $.layout.optionsMap.layout;
+ for (i=0, c=data.length; i<c; i++) {
+ key = data[i];
+ delete opts.panes[key]; // OK if does not exist
+ }
+ }
+
+ // MOVE any NON-layout-options from opts-root to opts.panes
+ data = $.layout.optionsMap.layout;
+ var rootKeys = $.layout.config.optionRootKeys;
+ for (key in opts) {
+ val = opts[key];
+ if ($.inArray(key, rootKeys) < 0 && $.inArray(key, data) < 0) {
+ if (!opts.panes[key])
+ opts.panes[key] = $.isPlainObject(val) ? $.extend(true, {}, val) : val;
+ delete opts[key]
+ }
+ }
+
+ // START by updating ALL options from opts
+ $.extend(true, options, opts);
+
+ // CREATE final options (and config) for EACH pane
+ $.each(_c.allPanes, function (i, pane) {
+
+ // apply 'pane-defaults' to CONFIG.[PANE]
+ _c[pane] = $.extend(true, {}, _c.panes, _c[pane]);
+
+ d = options.panes;
+ o = options[pane];
+
+ // center-pane uses SOME keys in defaults.panes branch
+ if (pane === 'center') {
+ // ONLY copy keys from opts.panes listed in: $.layout.optionsMap.center
+ data = $.layout.optionsMap.center; // list of 'center-pane keys'
+ for (i=0, c=data.length; i<c; i++) { // loop the list...
+ key = data[i];
+ // only need to use pane-default if pane-specific value not set
+ if (!opts.center[key] && (opts.panes[key] || !o[key]))
+ o[key] = d[key]; // pane-default
+ }
+ }
+ else {
+ // border-panes use ALL keys in defaults.panes branch
+ o = options[pane] = $.extend(true, {}, d, o); // re-apply pane-specific opts AFTER pane-defaults
+ createFxOptions( pane );
+ // ensure all border-pane-specific base-classes exist
+ if (!o.resizerClass) o.resizerClass = "ui-layout-resizer";
+ if (!o.togglerClass) o.togglerClass = "ui-layout-toggler";
+ }
+ // ensure we have base pane-class (ALL panes)
+ if (!o.paneClass) o.paneClass = "ui-layout-pane";
+ });
+
+ // update options.zIndexes if a zIndex-option specified
+ var zo = opts.zIndex
+ , z = options.zIndexes;
+ if (zo > 0) {
+ z.pane_normal = zo;
+ z.content_mask = max(zo+1, z.content_mask); // MIN = +1
+ z.resizer_normal = max(zo+2, z.resizer_normal); // MIN = +2
+ }
+
+ // DELETE 'panes' key now that we are done - values were copied to EACH pane
+ delete options.panes;
+
+
+ function createFxOptions ( pane ) {
+ var o = options[pane]
+ , d = options.panes;
+ // ensure fxSettings key to avoid errors
+ if (!o.fxSettings) o.fxSettings = {};
+ if (!d.fxSettings) d.fxSettings = {};
+
+ $.each(["_open","_close","_size"], function (i,n) {
+ var
+ sName = "fxName"+ n
+ , sSpeed = "fxSpeed"+ n
+ , sSettings = "fxSettings"+ n
+ // recalculate fxName according to specificity rules
+ , fxName = o[sName] =
+ o[sName] // options.west.fxName_open
+ || d[sName] // options.panes.fxName_open
+ || o.fxName // options.west.fxName
+ || d.fxName // options.panes.fxName
+ || "none" // MEANS $.layout.defaults.panes.fxName == "" || false || null || 0
+ ;
+ // validate fxName to ensure is valid effect - MUST have effect-config data in options.effects
+ if (fxName === "none" || !$.effects || !$.effects[fxName] || !options.effects[fxName])
+ fxName = o[sName] = "none"; // effect not loaded OR unrecognized fxName
+
+ // set vars for effects subkeys to simplify logic
+ var fx = options.effects[fxName] || {} // effects.slide
+ , fx_all = fx.all || null // effects.slide.all
+ , fx_pane = fx[pane] || null // effects.slide.west
+ ;
+ // create fxSpeed[_open|_close|_size]
+ o[sSpeed] =
+ o[sSpeed] // options.west.fxSpeed_open
+ || d[sSpeed] // options.west.fxSpeed_open
+ || o.fxSpeed // options.west.fxSpeed
+ || d.fxSpeed // options.panes.fxSpeed
+ || null // DEFAULT - let fxSetting.duration control speed
+ ;
+ // create fxSettings[_open|_close|_size]
+ o[sSettings] = $.extend(
+ true
+ , {}
+ , fx_all // effects.slide.all
+ , fx_pane // effects.slide.west
+ , d.fxSettings // options.panes.fxSettings
+ , o.fxSettings // options.west.fxSettings
+ , d[sSettings] // options.panes.fxSettings_open
+ , o[sSettings] // options.west.fxSettings_open
+ );
+ });
+
+ // DONE creating action-specific-settings for this pane,
+ // so DELETE generic options - are no longer meaningful
+ delete o.fxName;
+ delete o.fxSpeed;
+ delete o.fxSettings;
+ }
+ }
+
+ /**
+ * Initialize module objects, styling, size and position for all panes
+ *
+ * @see _initElements()
+ * @param {string} pane The pane to process
+ */
+, getPane = function (pane) {
+ var sel = options[pane].paneSelector
+ if (sel.substr(0,1)==="#") // ID selector
+ // NOTE: elements selected 'by ID' DO NOT have to be 'children'
+ return $N.find(sel).eq(0);
+ else { // class or other selector
+ var $P = $N.children(sel).eq(0);
+ // look for the pane nested inside a 'form' element
+ return $P.length ? $P : $N.children("form:first").children(sel).eq(0);
+ }
+ }
+
+, initPanes = function (evt) {
+ // stopPropagation if called by trigger("layoutinitpanes") - use evtPane utility
+ evtPane(evt);
+
+ // NOTE: do north & south FIRST so we can measure their height - do center LAST
+ $.each(_c.allPanes, function (idx, pane) {
+ addPane( pane, true );
+ });
+
+ // init the pane-handles NOW in case we have to hide or close the pane below
+ initHandles();
+
+ // now that all panes have been initialized and initially-sized,
+ // make sure there is really enough space available for each pane
+ $.each(_c.borderPanes, function (i, pane) {
+ if ($Ps[pane] && state[pane].isVisible) { // pane is OPEN
+ setSizeLimits(pane);
+ makePaneFit(pane); // pane may be Closed, Hidden or Resized by makePaneFit()
+ }
+ });
+ // size center-pane AGAIN in case we 'closed' a border-pane in loop above
+ sizeMidPanes("center");
+
+ // Chrome/Webkit sometimes fires callbacks BEFORE it completes resizing!
+ // Before RC30.3, there was a 10ms delay here, but that caused layout
+ // to load asynchrously, which is BAD, so try skipping delay for now
+
+ // process pane contents and callbacks, and init/resize child-layout if exists
+ $.each(_c.allPanes, function (i, pane) {
+ var o = options[pane];
+ if ($Ps[pane]) {
+ if (state[pane].isVisible) { // pane is OPEN
+ sizeContent(pane);
+ // trigger pane.onResize if triggerEventsOnLoad = true
+ if (o.triggerEventsOnLoad)
+ _runCallbacks("onresize_end", pane);
+ else // automatic if onresize called, otherwise call it specifically
+ // resize child - IF inner-layout already exists (created before this layout)
+ resizeChildLayout(pane);
+ }
+ // init childLayout - even if pane is not visible
+ if (o.initChildLayout && o.childOptions)
+ createChildLayout(pane);
+ }
+ });
+ }
+
+ /**
+ * Add a pane to the layout - subroutine of initPanes()
+ *
+ * @see initPanes()
+ * @param {string} pane The pane to process
+ * @param {boolean=} [force=false] Size content after init
+ */
+, addPane = function (pane, force) {
+ if (!force && !isInitialized()) return;
+ var
+ o = options[pane]
+ , s = state[pane]
+ , c = _c[pane]
+ , fx = s.fx
+ , dir = c.dir
+ , spacing = o.spacing_open || 0
+ , isCenter = (pane === "center")
+ , CSS = {}
+ , $P = $Ps[pane]
+ , size, minSize, maxSize
+ ;
+ // if pane-pointer already exists, remove the old one first
+ if ($P)
+ removePane( pane, false, true, false );
+ else
+ $Cs[pane] = false; // init
+
+ $P = $Ps[pane] = getPane(pane);
+ if (!$P.length) {
+ $Ps[pane] = false; // logic
+ return;
+ }
+
+ // SAVE original Pane CSS
+ if (!$P.data("layoutCSS")) {
+ var props = "position,top,left,bottom,right,width,height,overflow,zIndex,display,backgroundColor,padding,margin,border";
+ $P.data("layoutCSS", elCSS($P, props));
+ }
+
+ // create alias for pane data in Instance - initHandles will add more
+ Instance[pane] = { name: pane, pane: $Ps[pane], content: $Cs[pane], options: options[pane], state: state[pane], child: children[pane] };
+
+ // add classes, attributes & events
+ $P .data({
+ parentLayout: Instance // pointer to Layout Instance
+ , layoutPane: Instance[pane] // NEW pointer to pane-alias-object
+ , layoutEdge: pane
+ , layoutRole: "pane"
+ })
+ .css(c.cssReq).css("zIndex", options.zIndexes.pane_normal)
+ .css(o.applyDemoStyles ? c.cssDemo : {}) // demo styles
+ .addClass( o.paneClass +" "+ o.paneClass+"-"+pane ) // default = "ui-layout-pane ui-layout-pane-west" - may be a dupe of 'paneSelector'
+ .bind("mouseenter."+ sID, addHover )
+ .bind("mouseleave."+ sID, removeHover )
+ ;
+ var paneMethods = {
+ hide: ''
+ , show: ''
+ , toggle: ''
+ , close: ''
+ , open: ''
+ , slideOpen: ''
+ , slideClose: ''
+ , slideToggle: ''
+ , size: 'sizePane'
+ , sizePane: 'sizePane'
+ , sizeContent: ''
+ , sizeHandles: ''
+ , enableClosable: ''
+ , disableClosable: ''
+ , enableSlideable: ''
+ , disableSlideable: ''
+ , enableResizable: ''
+ , disableResizable: ''
+ , swapPanes: 'swapPanes'
+ , swap: 'swapPanes'
+ , move: 'swapPanes'
+ , removePane: 'removePane'
+ , remove: 'removePane'
+ , createChildLayout: ''
+ , resizeChildLayout: ''
+ , resizeAll: 'resizeAll'
+ , resizeLayout: 'resizeAll'
+ }
+ , name;
+ // loop hash and bind all methods - include layoutID namespacing
+ for (name in paneMethods) {
+ $P.bind("layoutpane"+ name.toLowerCase() +"."+ sID, Instance[ paneMethods[name] || name ]);
+ }
+
+ // see if this pane has a 'scrolling-content element'
+ initContent(pane, false); // false = do NOT sizeContent() - called later
+
+ if (!isCenter) {
+ // call _parseSize AFTER applying pane classes & styles - but before making visible (if hidden)
+ // if o.size is auto or not valid, then MEASURE the pane and use that as its 'size'
+ size = s.size = _parseSize(pane, o.size);
+ minSize = _parseSize(pane,o.minSize) || 1;
+ maxSize = _parseSize(pane,o.maxSize) || 100000;
+ if (size > 0) size = max(min(size, maxSize), minSize);
+
+ // state for border-panes
+ s.isClosed = false; // true = pane is closed
+ s.isSliding = false; // true = pane is currently open by 'sliding' over adjacent panes
+ s.isResizing= false; // true = pane is in process of being resized
+ s.isHidden = false; // true = pane is hidden - no spacing, resizer or toggler is visible!
+
+ // array for 'pin buttons' whose classNames are auto-updated on pane-open/-close
+ if (!s.pins) s.pins = [];
+ }
+ // states common to ALL panes
+ s.tagName = $P[0].tagName;
+ s.edge = pane; // useful if pane is (or about to be) 'swapped' - easy find out where it is (or is going)
+ s.noRoom = false; // true = pane 'automatically' hidden due to insufficient room - will unhide automatically
+ s.isVisible = true; // false = pane is invisible - closed OR hidden - simplify logic
+
+ // set css-position to account for container borders & padding
+ switch (pane) {
+ case "north": CSS.top = sC.insetTop;
+ CSS.left = sC.insetLeft;
+ CSS.right = sC.insetRight;
+ break;
+ case "south": CSS.bottom = sC.insetBottom;
+ CSS.left = sC.insetLeft;
+ CSS.right = sC.insetRight;
+ break;
+ case "west": CSS.left = sC.insetLeft; // top, bottom & height set by sizeMidPanes()
+ break;
+ case "east": CSS.right = sC.insetRight; // ditto
+ break;
+ case "center": // top, left, width & height set by sizeMidPanes()
+ }
+
+ if (dir === "horz") // north or south pane
+ CSS.height = cssH($P, size);
+ else if (dir === "vert") // east or west pane
+ CSS.width = cssW($P, size);
+ //else if (isCenter) {}
+
+ $P.css(CSS); // apply size -- top, bottom & height will be set by sizeMidPanes
+ if (dir != "horz") sizeMidPanes(pane, true); // true = skipCallback
+
+ // close or hide the pane if specified in settings
+ if (o.initClosed && o.closable && !o.initHidden)
+ close(pane, true, true); // true, true = force, noAnimation
+ else if (o.initHidden || o.initClosed)
+ hide(pane); // will be completely invisible - no resizer or spacing
+ else if (!s.noRoom)
+ // make the pane visible - in case was initially hidden
+ $P.css("display","block");
+ // ELSE setAsOpen() - called later by initHandles()
+
+ // RESET visibility now - pane will appear IF display:block
+ $P.css("visibility","visible");
+
+ // check option for auto-handling of pop-ups & drop-downs
+ if (o.showOverflowOnHover)
+ $P.hover( allowOverflow, resetOverflow );
+
+ // if manually adding a pane AFTER layout initialization, then...
+ if (state.initialized) {
+ initHandles( pane );
+ initHotkeys( pane );
+ resizeAll(); // will sizeContent if pane is visible
+ if (s.isVisible) { // pane is OPEN
+ if (o.triggerEventsOnLoad)
+ _runCallbacks("onresize_end", pane);
+ else // automatic if onresize called, otherwise call it specifically
+ // resize child - IF inner-layout already exists (created before this layout)
+ resizeChildLayout(pane); // a previously existing childLayout
+ }
+ if (o.initChildLayout && o.childOptions)
+ createChildLayout(pane);
+ }
+ }
+
+ /**
+ * Initialize module objects, styling, size and position for all resize bars and toggler buttons
+ *
+ * @see _create()
+ * @param {string=} [panes=""] The edge(s) to process
+ */
+, initHandles = function (panes) {
+ panes = panes ? panes.split(",") : _c.borderPanes;
+
+ // create toggler DIVs for each pane, and set object pointers for them, eg: $R.north = north toggler DIV
+ $.each(panes, function (i, pane) {
+ var $P = $Ps[pane];
+ $Rs[pane] = false; // INIT
+ $Ts[pane] = false;
+ if (!$P) return; // pane does not exist - skip
+
+ var
+ o = options[pane]
+ , s = state[pane]
+ , c = _c[pane]
+ , paneId = o.paneSelector.substr(0,1) === "#" ? o.paneSelector.substr(1) : ""
+ , rClass = o.resizerClass
+ , tClass = o.togglerClass
+ , side = c.side.toLowerCase()
+ , spacing = (s.isVisible ? o.spacing_open : o.spacing_closed)
+ , _pane = "-"+ pane // used for classNames
+ , _state = (s.isVisible ? "-open" : "-closed") // used for classNames
+ , I = Instance[pane]
+ // INIT RESIZER BAR
+ , $R = I.resizer = $Rs[pane] = $("<div></div>")
+ // INIT TOGGLER BUTTON
+ , $T = I.toggler = (o.closable ? $Ts[pane] = $("<div></div>") : false)
+ ;
+
+ //if (s.isVisible && o.resizable) ... handled by initResizable
+ if (!s.isVisible && o.slidable)
+ $R.attr("title", o.tips.Slide).css("cursor", o.sliderCursor);
+
+ $R // if paneSelector is an ID, then create a matching ID for the resizer, eg: "#paneLeft" => "paneLeft-resizer"
+ .attr("id", paneId ? paneId +"-resizer" : "" )
+ .data({
+ parentLayout: Instance
+ , layoutPane: Instance[pane] // NEW pointer to pane-alias-object
+ , layoutEdge: pane
+ , layoutRole: "resizer"
+ })
+ .css(_c.resizers.cssReq).css("zIndex", options.zIndexes.resizer_normal)
+ .css(o.applyDemoStyles ? _c.resizers.cssDemo : {}) // add demo styles
+ .addClass(rClass +" "+ rClass+_pane)
+ .hover(addHover, removeHover) // ALWAYS add hover-classes, even if resizing is not enabled - handle with CSS instead
+ .hover(onResizerEnter, onResizerLeave) // ALWAYS NEED resizer.mouseleave to balance toggler.mouseenter
+ .appendTo($N) // append DIV to container
+ ;
+
+ if ($T) {
+ $T // if paneSelector is an ID, then create a matching ID for the resizer, eg: "#paneLeft" => "#paneLeft-toggler"
+ .attr("id", paneId ? paneId +"-toggler" : "" )
+ .data({
+ parentLayout: Instance
+ , layoutPane: Instance[pane] // NEW pointer to pane-alias-object
+ , layoutEdge: pane
+ , layoutRole: "toggler"
+ })
+ .css(_c.togglers.cssReq) // add base/required styles
+ .css(o.applyDemoStyles ? _c.togglers.cssDemo : {}) // add demo styles
+ .addClass(tClass +" "+ tClass+_pane)
+ .hover(addHover, removeHover) // ALWAYS add hover-classes, even if toggling is not enabled - handle with CSS instead
+ .bind("mouseenter", onResizerEnter) // NEED toggler.mouseenter because mouseenter MAY NOT fire on resizer
+ .appendTo($R) // append SPAN to resizer DIV
+ ;
+ // ADD INNER-SPANS TO TOGGLER
+ if (o.togglerContent_open) // ui-layout-open
+ $("<span>"+ o.togglerContent_open +"</span>")
+ .data({
+ layoutEdge: pane
+ , layoutRole: "togglerContent"
+ })
+ .data("layoutRole", "togglerContent")
+ .data("layoutEdge", pane)
+ .addClass("content content-open")
+ .css("display","none")
+ .appendTo( $T )
+ //.hover( addHover, removeHover ) // use ui-layout-toggler-west-hover .content-open instead!
+ ;
+ if (o.togglerContent_closed) // ui-layout-closed
+ $("<span>"+ o.togglerContent_closed +"</span>")
+ .data({
+ layoutEdge: pane
+ , layoutRole: "togglerContent"
+ })
+ .addClass("content content-closed")
+ .css("display","none")
+ .appendTo( $T )
+ //.hover( addHover, removeHover ) // use ui-layout-toggler-west-hover .content-closed instead!
+ ;
+ // ADD TOGGLER.click/.hover
+ enableClosable(pane);
+ }
+
+ // add Draggable events
+ initResizable(pane);
+
+ // ADD CLASSNAMES & SLIDE-BINDINGS - eg: class="resizer resizer-west resizer-open"
+ if (s.isVisible)
+ setAsOpen(pane); // onOpen will be called, but NOT onResize
+ else {
+ setAsClosed(pane); // onClose will be called
+ bindStartSlidingEvent(pane, true); // will enable events IF option is set
+ }
+
+ });
+
+ // SET ALL HANDLE DIMENSIONS
+ sizeHandles();
+ }
+
+
+ /**
+ * Initialize scrolling ui-layout-content div - if exists
+ *
+ * @see initPane() - or externally after an Ajax injection
+ * @param {string} [pane] The pane to process
+ * @param {boolean=} [resize=true] Size content after init
+ */
+, initContent = function (pane, resize) {
+ if (!isInitialized()) return;
+ var
+ o = options[pane]
+ , sel = o.contentSelector
+ , I = Instance[pane]
+ , $P = $Ps[pane]
+ , $C
+ ;
+ if (sel) $C = I.content = $Cs[pane] = (o.findNestedContent)
+ ? $P.find(sel).eq(0) // match 1-element only
+ : $P.children(sel).eq(0)
+ ;
+ if ($C && $C.length) {
+ $C.data("layoutRole", "content");
+ // SAVE original Pane CSS
+ if (!$C.data("layoutCSS"))
+ $C.data("layoutCSS", elCSS($C, "height"));
+ $C.css( _c.content.cssReq );
+ if (o.applyDemoStyles) {
+ $C.css( _c.content.cssDemo ); // add padding & overflow: auto to content-div
+ $P.css( _c.content.cssDemoPane ); // REMOVE padding/scrolling from pane
+ }
+ state[pane].content = {}; // init content state
+ if (resize !== false) sizeContent(pane);
+ // sizeContent() is called AFTER init of all elements
+ }
+ else
+ I.content = $Cs[pane] = false;
+ }
+
+
+ /**
+ * Add resize-bars to all panes that specify it in options
+ * -dependancy: $.fn.resizable - will skip if not found
+ *
+ * @see _create()
+ * @param {string=} [panes=""] The edge(s) to process
+ */
+, initResizable = function (panes) {
+ var draggingAvailable = $.layout.plugins.draggable
+ , side // set in start()
+ ;
+ panes = panes ? panes.split(",") : _c.borderPanes;
+
+ $.each(panes, function (idx, pane) {
+ var o = options[pane];
+ if (!draggingAvailable || !$Ps[pane] || !o.resizable) {
+ o.resizable = false;
+ return true; // skip to next
+ }
+
+ var s = state[pane]
+ , z = options.zIndexes
+ , c = _c[pane]
+ , side = c.dir=="horz" ? "top" : "left"
+ , opEdge = _c.oppositeEdge[pane]
+ , masks = pane +",center,"+ opEdge + (c.dir=="horz" ? ",west,east" : "")
+ , $P = $Ps[pane]
+ , $R = $Rs[pane]
+ , base = o.resizerClass
+ , lastPos = 0 // used when live-resizing
+ , r, live // set in start because may change
+ // 'drag' classes are applied to the ORIGINAL resizer-bar while dragging is in process
+ , resizerClass = base+"-drag" // resizer-drag
+ , resizerPaneClass = base+"-"+pane+"-drag" // resizer-north-drag
+ // 'helper' class is applied to the CLONED resizer-bar while it is being dragged
+ , helperClass = base+"-dragging" // resizer-dragging
+ , helperPaneClass = base+"-"+pane+"-dragging" // resizer-north-dragging
+ , helperLimitClass = base+"-dragging-limit" // resizer-drag
+ , helperPaneLimitClass = base+"-"+pane+"-dragging-limit" // resizer-north-drag
+ , helperClassesSet = false // logic var
+ ;
+
+ if (!s.isClosed)
+ $R.attr("title", o.tips.Resize)
+ .css("cursor", o.resizerCursor); // n-resize, s-resize, etc
+
+ $R.draggable({
+ containment: $N[0] // limit resizing to layout container
+ , axis: (c.dir=="horz" ? "y" : "x") // limit resizing to horz or vert axis
+ , delay: 0
+ , distance: 1
+ , grid: o.resizingGrid
+ // basic format for helper - style it using class: .ui-draggable-dragging
+ , helper: "clone"
+ , opacity: o.resizerDragOpacity
+ , addClasses: false // avoid ui-state-disabled class when disabled
+ //, iframeFix: o.draggableIframeFix // TODO: consider using when bug is fixed
+ , zIndex: z.resizer_drag
+
+ , start: function (e, ui) {
+ // REFRESH options & state pointers in case we used swapPanes
+ o = options[pane];
+ s = state[pane];
+ // re-read options
+ live = o.livePaneResizing;
+
+ // ondrag_start callback - will CANCEL hide if returns false
+ // TODO: dragging CANNOT be cancelled like this, so see if there is a way?
+ if (false === _runCallbacks("ondrag_start", pane)) return false;
+
+ s.isResizing = true; // prevent pane from closing while resizing
+ timer.clear(pane+"_closeSlider"); // just in case already triggered
+
+ // SET RESIZER LIMITS - used in drag()
+ setSizeLimits(pane); // update pane/resizer state
+ r = s.resizerPosition;
+ lastPos = ui.position[ side ]
+
+ $R.addClass( resizerClass +" "+ resizerPaneClass ); // add drag classes
+ helperClassesSet = false; // reset logic var - see drag()
+
+ // DISABLE TEXT SELECTION (probably already done by resizer.mouseOver)
+ $('body').disableSelection();
+
+ // MASK PANES CONTAINING IFRAMES, APPLETS OR OTHER TROUBLESOME ELEMENTS
+ showMasks( masks );
+ }
+
+ , drag: function (e, ui) {
+ if (!helperClassesSet) { // can only add classes after clone has been added to the DOM
+ //$(".ui-draggable-dragging")
+ ui.helper
+ .addClass( helperClass +" "+ helperPaneClass ) // add helper classes
+ .css({ right: "auto", bottom: "auto" }) // fix dir="rtl" issue
+ .children().css("visibility","hidden") // hide toggler inside dragged resizer-bar
+ ;
+ helperClassesSet = true;
+ // draggable bug!? RE-SET zIndex to prevent E/W resize-bar showing through N/S pane!
+ if (s.isSliding) $Ps[pane].css("zIndex", z.pane_sliding);
+ }
+ // CONTAIN RESIZER-BAR TO RESIZING LIMITS
+ var limit = 0;
+ if (ui.position[side] < r.min) {
+ ui.position[side] = r.min;
+ limit = -1;
+ }
+ else if (ui.position[side] > r.max) {
+ ui.position[side] = r.max;
+ limit = 1;
+ }
+ // ADD/REMOVE dragging-limit CLASS
+ if (limit) {
+ ui.helper.addClass( helperLimitClass +" "+ helperPaneLimitClass ); // at dragging-limit
+ window.defaultStatus = (limit>0 && pane.match(/(north|west)/)) || (limit<0 && pane.match(/(south|east)/)) ? o.tips.maxSizeWarning : o.tips.minSizeWarning;
+ }
+ else {
+ ui.helper.removeClass( helperLimitClass +" "+ helperPaneLimitClass ); // not at dragging-limit
+ window.defaultStatus = "";
+ }
+ // DYNAMICALLY RESIZE PANES IF OPTION ENABLED
+ // won't trigger unless resizer has actually moved!
+ if (live && Math.abs(ui.position[side] - lastPos) >= o.liveResizingTolerance) {
+ lastPos = ui.position[side];
+ resizePanes(e, ui, pane)
+ }
+ }
+
+ , stop: function (e, ui) {
+ $('body').enableSelection(); // RE-ENABLE TEXT SELECTION
+ window.defaultStatus = ""; // clear 'resizing limit' message from statusbar
+ $R.removeClass( resizerClass +" "+ resizerPaneClass ); // remove drag classes from Resizer
+ s.isResizing = false;
+ resizePanes(e, ui, pane, true, masks); // true = resizingDone
+ }
+
+ });
+ });
+
+ /**
+ * resizePanes
+ *
+ * Sub-routine called from stop() - and drag() if livePaneResizing
+ *
+ * @param {!Object} evt
+ * @param {!Object} ui
+ * @param {string} pane
+ * @param {boolean=} [resizingDone=false]
+ */
+ var resizePanes = function (evt, ui, pane, resizingDone, masks) {
+ var dragPos = ui.position
+ , c = _c[pane]
+ , o = options[pane]
+ , s = state[pane]
+ , resizerPos
+ ;
+ switch (pane) {
+ case "north": resizerPos = dragPos.top; break;
+ case "west": resizerPos = dragPos.left; break;
+ case "south": resizerPos = sC.offsetHeight - dragPos.top - o.spacing_open; break;
+ case "east": resizerPos = sC.offsetWidth - dragPos.left - o.spacing_open; break;
+ };
+ // remove container margin from resizer position to get the pane size
+ var newSize = resizerPos - sC["inset"+ c.side];
+
+ // Disable OR Resize Mask(s) created in drag.start
+ if (!resizingDone) {
+ // ensure we meet liveResizingTolerance criteria
+ if (Math.abs(newSize - s.size) < o.liveResizingTolerance)
+ return; // SKIP resize this time
+ // resize the pane
+ manualSizePane(pane, newSize, false, true); // true = noAnimation
+ sizeMasks(); // resize all visible masks
+ }
+ else { // resizingDone
+ // ondrag_end callback
+ if (false !== _runCallbacks("ondrag_end", pane))
+ manualSizePane(pane, newSize, false, true); // true = noAnimation
+ hideMasks(); // hide all masks, which include panes with 'content/iframe-masks'
+ if (s.isSliding && masks) // RE-SHOW only 'object-masks' so objects won't show through sliding pane
+ showMasks( masks, true ); // true = onlyForObjects
+ }
+ };
+ }
+
+ /**
+ * sizeMask
+ *
+ * Needed to overlay a DIV over an IFRAME-pane because mask CANNOT be *inside* the pane
+ * Called when mask created, and during livePaneResizing
+ */
+, sizeMask = function () {
+ var $M = $(this)
+ , pane = $M.data("layoutMask") // eg: "west"
+ , s = state[pane]
+ ;
+ // only masks over an IFRAME-pane need manual resizing
+ if (s.tagName == "IFRAME" && s.isVisible) // no need to mask closed/hidden panes
+ $M.css({
+ top: s.offsetTop
+ , left: s.offsetLeft
+ , width: s.outerWidth
+ , height: s.outerHeight
+ });
+ /* ALT Method...
+ var $P = $Ps[pane];
+ $M.css( $P.position() ).css({ width: $P[0].offsetWidth, height: $P[0].offsetHeight });
+ */
+ }
+, sizeMasks = function () {
+ $Ms.each( sizeMask ); // resize all 'visible' masks
+ }
+
+, showMasks = function (panes, onlyForObjects) {
+ var a = panes ? panes.split(",") : $.layout.config.allPanes
+ , z = options.zIndexes
+ , o, s;
+ $.each(a, function(i,p){
+ s = state[p];
+ o = options[p];
+ if (s.isVisible && ( (!onlyForObjects && o.maskContents) || o.maskObjects )) {
+ getMasks(p).each(function(){
+ sizeMask.call(this);
+ this.style.zIndex = s.isSliding ? z.pane_sliding+1 : z.pane_normal+1
+ this.style.display = "block";
+ });
+ }
+ });
+ }
+
+, hideMasks = function () {
+ // ensure no pane is resizing - could be a timing issue
+ var skip;
+ $.each( $.layout.config.borderPanes, function(i,p){
+ if (state[p].isResizing) {
+ skip = true;
+ return false; // BREAK
+ }
+ });
+ if (!skip)
+ $Ms.hide(); // hide ALL masks
+ }
+
+, getMasks = function (pane) {
+ var $Masks = $([])
+ , $M, i = 0, c = $Ms.length
+ ;
+ for (; i<c; i++) {
+ $M = $Ms.eq(i);
+ if ($M.data("layoutMask") === pane)
+ $Masks = $Masks.add( $M );
+ }
+ if ($Masks.length)
+ return $Masks;
+ else
+ return createMasks(pane);
+ }
+
+ /**
+ * createMasks
+ *
+ * Generates both DIV (ALWAYS used) and IFRAME (optional) elements as masks
+ * An IFRAME mask is created *under* the DIV when maskObjects=true, because a DIV cannot mask an applet
+ */
+, createMasks = function (pane) {
+ var
+ $P = $Ps[pane]
+ , s = state[pane]
+ , o = options[pane]
+ , z = options.zIndexes
+ //, objMask = o.maskObjects && s.tagName != "IFRAME" // check for option
+ , $Masks = $([])
+ , isIframe, el, $M, css, i
+ ;
+ if (!o.maskContents && !o.maskObjects) return $Masks;
+ // if o.maskObjects=true, then loop TWICE to create BOTH kinds of mask, else only create a DIV
+ for (i=0; i < (o.maskObjects ? 2 : 1); i++) {
+ isIframe = o.maskObjects && i==0;
+ el = document.createElement( isIframe ? "iframe" : "div" );
+ $M = $(el).data("layoutMask", pane); // add data to relate mask to pane
+ el.className = "ui-layout-mask ui-layout-mask-"+ pane; // for user styling
+ css = el.style;
+ // styles common to both DIVs and IFRAMES
+ css.display = "block";
+ css.position = "absolute";
+ if (isIframe) { // IFRAME-only props
+ el.frameborder = 0;
+ el.src = "about:blank";
+ css.opacity = 0;
+ css.filter = "Alpha(Opacity='0')";
+ css.border = 0;
+ }
+ // if pane is an IFRAME, then must mask the pane itself
+ if (s.tagName == "IFRAME") {
+ // NOTE sizing done by a subroutine so can be called during live-resizing
+ css.zIndex = z.pane_normal+1; // 1-higher than pane
+ $N.append( el ); // append to LAYOUT CONTAINER
+ }
+ // otherwise put masks *inside the pane* to mask its contents
+ else {
+ $M.addClass("ui-layout-mask-inside-pane");
+ css.zIndex = o.maskZindex || z.content_mask; // usually 1, but customizable
+ css.top = 0;
+ css.left = 0;
+ css.width = "100%";
+ css.height = "100%";
+ $P.append( el ); // append INSIDE pane element
+ }
+ // add to return object
+ $Masks = $Masks.add( el );
+ // add Mask to cached array so can be resized & reused
+ $Ms = $Ms.add( el );
+ }
+ return $Masks;
+ }
+
+
+ /**
+ * Destroy this layout and reset all elements
+ *
+ * @param {boolean=} [destroyChildren=false] Destory Child-Layouts first?
+ */
+, destroy = function (evt_or_destroyChildren, destroyChildren) {
+ // UNBIND layout events and remove global object
+ $(window).unbind("."+ sID); // resize & unload
+ $(document).unbind("."+ sID); // keyDown (hotkeys)
+
+ if (typeof evt_or_destroyChildren === "object")
+ // stopPropagation if called by trigger("layoutdestroy") - use evtPane utility
+ evtPane(evt_or_destroyChildren);
+ else // no event, so transfer 1st param to destroyChildren param
+ destroyChildren = evt_or_destroyChildren;
+
+ // need to look for parent layout BEFORE we remove the container data, else skips a level
+ //var parentPane = Instance.hasParentLayout ? $.layout.getParentPaneInstance( $N ) : null;
+
+ // reset layout-container
+ $N .clearQueue()
+ .removeData("layout")
+ .removeData("layoutContainer")
+ .removeClass(options.containerClass)
+ .unbind("."+ sID) // remove ALL Layout events
+ ;
+
+ // remove all mask elements that have been created
+ $Ms.remove();
+
+ // loop all panes to remove layout classes, attributes and bindings
+ $.each(_c.allPanes, function (i, pane) {
+ removePane( pane, false, true, destroyChildren ); // true = skipResize
+ });
+
+ // do NOT reset container CSS if is a 'pane' (or 'content') in an outer-layout - ie, THIS layout is 'nested'
+ var css = "layoutCSS";
+ if ($N.data(css) && !$N.data("layoutRole")) // RESET CSS
+ $N.css( $N.data(css) ).removeData(css);
+
+ // for full-page layouts, also reset the <HTML> CSS
+ if (sC.tagName === "BODY" && ($N = $("html")).data(css)) // RESET <HTML> CSS
+ $N.css( $N.data(css) ).removeData(css);
+
+ // trigger plugins for this layout, if there are any
+ runPluginCallbacks( Instance, $.layout.onDestroy );
+
+ // trigger state-management and onunload callback
+ unload();
+
+ // clear the Instance of everything except for container & options (so could recreate)
+ // RE-CREATE: myLayout = myLayout.container.layout( myLayout.options );
+ for (n in Instance)
+ if (!n.match(/^(container|options)$/)) delete Instance[ n ];
+ // add a 'destroyed' flag to make it easy to check
+ Instance.destroyed = true;
+
+ // if this is a child layout, CLEAR the child-pointer in the parent
+ /* for now the pointer REMAINS, but with only container, options and destroyed keys
+ if (parentPane) {
+ var layout = parentPane.pane.data("parentLayout");
+ parentPane.child = layout.children[ parentPane.name ] = null;
+ }
+ */
+
+ return Instance; // for coding convenience
+ }
+
+ /**
+ * Remove a pane from the layout - subroutine of destroy()
+ *
+ * @see destroy()
+ * @param {string|Object} evt_or_pane The pane to process
+ * @param {boolean=} [remove=false] Remove the DOM element?
+ * @param {boolean=} [skipResize=false] Skip calling resizeAll()?
+ * @param {boolean=} [destroyChild=true] Destroy Child-layouts? If not passed, obeys options setting
+ */
+, removePane = function (evt_or_pane, remove, skipResize, destroyChild) {
+ if (!isInitialized()) return;
+ var pane = evtPane.call(this, evt_or_pane)
+ , $P = $Ps[pane]
+ , $C = $Cs[pane]
+ , $R = $Rs[pane]
+ , $T = $Ts[pane]
+ ;
+ // NOTE: elements can still exist even after remove()
+ // so check for missing data(), which is cleared by removed()
+ if ($P && $.isEmptyObject( $P.data() )) $P = false;
+ if ($C && $.isEmptyObject( $C.data() )) $C = false;
+ if ($R && $.isEmptyObject( $R.data() )) $R = false;
+ if ($T && $.isEmptyObject( $T.data() )) $T = false;
+
+ if ($P) $P.stop(true, true);
+
+ // check for a child layout
+ var o = options[pane]
+ , s = state[pane]
+ , d = "layout"
+ , css = "layoutCSS"
+ , child = children[pane] || ($P ? $P.data(d) : 0) || ($C ? $C.data(d) : 0) || null
+ , destroy = destroyChild !== undefined ? destroyChild : o.destroyChildLayout
+ ;
+
+ // FIRST destroy the child-layout(s)
+ if (destroy && child && !child.destroyed) {
+ child.destroy(true); // tell child-layout to destroy ALL its child-layouts too
+ if (child.destroyed) // destroy was successful
+ child = null; // clear pointer for logic below
+ }
+
+ if ($P && remove && !child)
+ $P.remove();
+ else if ($P && $P[0]) {
+ // create list of ALL pane-classes that need to be removed
+ var root = o.paneClass // default="ui-layout-pane"
+ , pRoot = root +"-"+ pane // eg: "ui-layout-pane-west"
+ , _open = "-open"
+ , _sliding= "-sliding"
+ , _closed = "-closed"
+ , classes = [ root, root+_open, root+_closed, root+_sliding, // generic classes
+ pRoot, pRoot+_open, pRoot+_closed, pRoot+_sliding ] // pane-specific classes
+ ;
+ $.merge(classes, getHoverClasses($P, true)); // ADD hover-classes
+ // remove all Layout classes from pane-element
+ $P .removeClass( classes.join(" ") ) // remove ALL pane-classes
+ .removeData("parentLayout")
+ .removeData("layoutPane")
+ .removeData("layoutRole")
+ .removeData("layoutEdge")
+ .removeData("autoHidden") // in case set
+ .unbind("."+ sID) // remove ALL Layout events
+ // TODO: remove these extra unbind commands when jQuery is fixed
+ //.unbind("mouseenter"+ sID)
+ //.unbind("mouseleave"+ sID)
+ ;
+ // do NOT reset CSS if this pane/content is STILL the container of a nested layout!
+ // the nested layout will reset its 'container' CSS when/if it is destroyed
+ if ($C && $C.data(d)) {
+ // a content-div may not have a specific width, so give it one to contain the Layout
+ $C.width( $C.width() );
+ child.resizeAll(); // now resize the Layout
+ }
+ else if ($C)
+ $C.css( $C.data(css) ).removeData(css).removeData("layoutRole");
+ // remove pane AFTER content in case there was a nested layout
+ if (!$P.data(d))
+ $P.css( $P.data(css) ).removeData(css);
+ }
+
+ // REMOVE pane resizer and toggler elements
+ if ($T) $T.remove();
+ if ($R) $R.remove();
+
+ // CLEAR all pointers and state data
+ Instance[pane] = $Ps[pane] = $Cs[pane] = $Rs[pane] = $Ts[pane] = children[pane] = false;
+ s = { removed: true };
+
+ if (!skipResize)
+ resizeAll();
+ }
+
+
+/*
+ * ###########################
+ * ACTION METHODS
+ * ###########################
+ */
+
+, _hidePane = function (pane) {
+ var $P = $Ps[pane]
+ , o = options[pane]
+ , s = $P[0].style
+ ;
+ if (o.useOffscreenClose) {
+ if (!$P.data(_c.offscreenReset))
+ $P.data(_c.offscreenReset, { left: s.left, right: s.right });
+ $P.css( _c.offscreenCSS );
+ }
+ else
+ $P.hide().removeData(_c.offscreenReset);
+ }
+
+, _showPane = function (pane) {
+ var $P = $Ps[pane]
+ , o = options[pane]
+ , off = _c.offscreenCSS
+ , old = $P.data(_c.offscreenReset)
+ , s = $P[0].style
+ ;
+ $P .show() // ALWAYS show, just in case
+ .removeData(_c.offscreenReset);
+ if (o.useOffscreenClose && old) {
+ if (s.left == off.left)
+ s.left = old.left;
+ if (s.right == off.right)
+ s.right = old.right;
+ }
+ }
+
+
+ /**
+ * Completely 'hides' a pane, including its spacing - as if it does not exist
+ * The pane is not actually 'removed' from the source, so can use 'show' to un-hide it
+ *
+ * @param {string|Object} evt_or_pane The pane being hidden, ie: north, south, east, or west
+ * @param {boolean=} [noAnimation=false]
+ */
+, hide = function (evt_or_pane, noAnimation) {
+ if (!isInitialized()) return;
+ var pane = evtPane.call(this, evt_or_pane)
+ , o = options[pane]
+ , s = state[pane]
+ , $P = $Ps[pane]
+ , $R = $Rs[pane]
+ ;
+ if (!$P || s.isHidden) return; // pane does not exist OR is already hidden
+
+ // onhide_start callback - will CANCEL hide if returns false
+ if (state.initialized && false === _runCallbacks("onhide_start", pane)) return;
+
+ s.isSliding = false; // just in case
+
+ // now hide the elements
+ if ($R) $R.hide(); // hide resizer-bar
+ if (!state.initialized || s.isClosed) {
+ s.isClosed = true; // to trigger open-animation on show()
+ s.isHidden = true;
+ s.isVisible = false;
+ if (!state.initialized)
+ _hidePane(pane); // no animation when loading page
+ sizeMidPanes(_c[pane].dir === "horz" ? "" : "center");
+ if (state.initialized || o.triggerEventsOnLoad)
+ _runCallbacks("onhide_end", pane);
+ }
+ else {
+ s.isHiding = true; // used by onclose
+ close(pane, false, noAnimation); // adjust all panes to fit
+ }
+ }
+
+ /**
+ * Show a hidden pane - show as 'closed' by default unless openPane = true
+ *
+ * @param {string|Object} evt_or_pane The pane being opened, ie: north, south, east, or west
+ * @param {boolean=} [openPane=false]
+ * @param {boolean=} [noAnimation=false]
+ * @param {boolean=} [noAlert=false]
+ */
+, show = function (evt_or_pane, openPane, noAnimation, noAlert) {
+ if (!isInitialized()) return;
+ var pane = evtPane.call(this, evt_or_pane)
+ , o = options[pane]
+ , s = state[pane]
+ , $P = $Ps[pane]
+ , $R = $Rs[pane]
+ ;
+ if (!$P || !s.isHidden) return; // pane does not exist OR is not hidden
+
+ // onshow_start callback - will CANCEL show if returns false
+ if (false === _runCallbacks("onshow_start", pane)) return;
+
+ s.isSliding = false; // just in case
+ s.isShowing = true; // used by onopen/onclose
+ //s.isHidden = false; - will be set by open/close - if not cancelled
+
+ // now show the elements
+ //if ($R) $R.show(); - will be shown by open/close
+ if (openPane === false)
+ close(pane, true); // true = force
+ else
+ open(pane, false, noAnimation, noAlert); // adjust all panes to fit
+ }
+
+
+ /**
+ * Toggles a pane open/closed by calling either open or close
+ *
+ * @param {string|Object} evt_or_pane The pane being toggled, ie: north, south, east, or west
+ * @param {boolean=} [slide=false]
+ */
+, toggle = function (evt_or_pane, slide) {
+ if (!isInitialized()) return;
+ var evt = evtObj(evt_or_pane)
+ , pane = evtPane.call(this, evt_or_pane)
+ , s = state[pane]
+ ;
+ if (evt) // called from to $R.dblclick OR triggerPaneEvent
+ evt.stopImmediatePropagation();
+ if (s.isHidden)
+ show(pane); // will call 'open' after unhiding it
+ else if (s.isClosed)
+ open(pane, !!slide);
+ else
+ close(pane);
+ }
+
+
+ /**
+ * Utility method used during init or other auto-processes
+ *
+ * @param {string} pane The pane being closed
+ * @param {boolean=} [setHandles=false]
+ */
+, _closePane = function (pane, setHandles) {
+ var
+ $P = $Ps[pane]
+ , s = state[pane]
+ ;
+ _hidePane(pane);
+ s.isClosed = true;
+ s.isVisible = false;
+ // UNUSED: if (setHandles) setAsClosed(pane, true); // true = force
+ }
+
+ /**
+ * Close the specified pane (animation optional), and resize all other panes as needed
+ *
+ * @param {string|Object} evt_or_pane The pane being closed, ie: north, south, east, or west
+ * @param {boolean=} [force=false]
+ * @param {boolean=} [noAnimation=false]
+ * @param {boolean=} [skipCallback=false]
+ */
+, close = function (evt_or_pane, force, noAnimation, skipCallback) {
+ var pane = evtPane.call(this, evt_or_pane);
+ // if pane has been initialized, but NOT the complete layout, close pane instantly
+ if (!state.initialized && $Ps[pane]) {
+ _closePane(pane); // INIT pane as closed
+ return;
+ }
+ if (!isInitialized()) return;
+
+ var
+ $P = $Ps[pane]
+ , $R = $Rs[pane]
+ , $T = $Ts[pane]
+ , o = options[pane]
+ , s = state[pane]
+ , c = _c[pane]
+ , doFX, isShowing, isHiding, wasSliding;
+
+ // QUEUE in case another action/animation is in progress
+ $N.queue(function( queueNext ){
+
+ if ( !$P
+ || (!o.closable && !s.isShowing && !s.isHiding) // invalid request // (!o.resizable && !o.closable) ???
+ || (!force && s.isClosed && !s.isShowing) // already closed
+ ) return queueNext();
+
+ // onclose_start callback - will CANCEL hide if returns false
+ // SKIP if just 'showing' a hidden pane as 'closed'
+ var abort = !s.isShowing && false === _runCallbacks("onclose_start", pane);
+
+ // transfer logic vars to temp vars
+ isShowing = s.isShowing;
+ isHiding = s.isHiding;
+ wasSliding = s.isSliding;
+ // now clear the logic vars (REQUIRED before aborting)
+ delete s.isShowing;
+ delete s.isHiding;
+
+ if (abort) return queueNext();
+
+ doFX = !noAnimation && !s.isClosed && (o.fxName_close != "none");
+ s.isMoving = true;
+ s.isClosed = true;
+ s.isVisible = false;
+ // update isHidden BEFORE sizing panes
+ if (isHiding) s.isHidden = true;
+ else if (isShowing) s.isHidden = false;
+
+ if (s.isSliding) // pane is being closed, so UNBIND trigger events
+ bindStopSlidingEvents(pane, false); // will set isSliding=false
+ else // resize panes adjacent to this one
+ sizeMidPanes(_c[pane].dir === "horz" ? "" : "center", false); // false = NOT skipCallback
+
+ // if this pane has a resizer bar, move it NOW - before animation
+ setAsClosed(pane);
+
+ // CLOSE THE PANE
+ if (doFX) { // animate the close
+ // mask panes with objects
+ var masks = "center"+ (c.dir=="horz" ? ",west,east" : "");
+ showMasks( masks, true ); // true = ONLY mask panes with maskObjects=true
+ lockPaneForFX(pane, true); // need to set left/top so animation will work
+ $P.hide( o.fxName_close, o.fxSettings_close, o.fxSpeed_close, function () {
+ lockPaneForFX(pane, false); // undo
+ if (s.isClosed) close_2();
+ queueNext();
+ });
+ }
+ else { // hide the pane without animation
+ _hidePane(pane);
+ close_2();
+ queueNext();
+ };
+ });
+
+ // SUBROUTINE
+ function close_2 () {
+ s.isMoving = false;
+ bindStartSlidingEvent(pane, true); // will enable if o.slidable = true
+
+ // if opposite-pane was autoClosed, see if it can be autoOpened now
+ var altPane = _c.oppositeEdge[pane];
+ if (state[ altPane ].noRoom) {
+ setSizeLimits( altPane );
+ makePaneFit( altPane );
+ }
+
+ // hide any masks shown while closing
+ hideMasks();
+
+ if (!skipCallback && (state.initialized || o.triggerEventsOnLoad)) {
+ // onclose callback - UNLESS just 'showing' a hidden pane as 'closed'
+ if (!isShowing) _runCallbacks("onclose_end", pane);
+ // onhide OR onshow callback
+ if (isShowing) _runCallbacks("onshow_end", pane);
+ if (isHiding) _runCallbacks("onhide_end", pane);
+ }
+ }
+ }
+
+ /**
+ * @param {string} pane The pane just closed, ie: north, south, east, or west
+ */
+, setAsClosed = function (pane) {
+ var
+ $P = $Ps[pane]
+ , $R = $Rs[pane]
+ , $T = $Ts[pane]
+ , o = options[pane]
+ , s = state[pane]
+ , side = _c[pane].side.toLowerCase()
+ , inset = "inset"+ _c[pane].side
+ , rClass = o.resizerClass
+ , tClass = o.togglerClass
+ , _pane = "-"+ pane // used for classNames
+ , _open = "-open"
+ , _sliding= "-sliding"
+ , _closed = "-closed"
+ ;
+ $R
+ .css(side, sC[inset]) // move the resizer
+ .removeClass( rClass+_open +" "+ rClass+_pane+_open )
+ .removeClass( rClass+_sliding +" "+ rClass+_pane+_sliding )
+ .addClass( rClass+_closed +" "+ rClass+_pane+_closed )
+ .unbind("dblclick."+ sID)
+ ;
+ // DISABLE 'resizing' when closed - do this BEFORE bindStartSlidingEvent?
+ if (o.resizable && $.layout.plugins.draggable)
+ $R
+ .draggable("disable")
+ .removeClass("ui-state-disabled") // do NOT apply disabled styling - not suitable here
+ .css("cursor", "default")
+ .attr("title","")
+ ;
+
+ // if pane has a toggler button, adjust that too
+ if ($T) {
+ $T
+ .removeClass( tClass+_open +" "+ tClass+_pane+_open )
+ .addClass( tClass+_closed +" "+ tClass+_pane+_closed )
+ .attr("title", o.tips.Open) // may be blank
+ ;
+ // toggler-content - if exists
+ $T.children(".content-open").hide();
+ $T.children(".content-closed").css("display","block");
+ }
+
+ // sync any 'pin buttons'
+ syncPinBtns(pane, false);
+
+ if (state.initialized) {
+ // resize 'length' and position togglers for adjacent panes
+ sizeHandles();
+ }
+ }
+
+ /**
+ * Open the specified pane (animation optional), and resize all other panes as needed
+ *
+ * @param {string|Object} evt_or_pane The pane being opened, ie: north, south, east, or west
+ * @param {boolean=} [slide=false]
+ * @param {boolean=} [noAnimation=false]
+ * @param {boolean=} [noAlert=false]
+ */
+, open = function (evt_or_pane, slide, noAnimation, noAlert) {
+ if (!isInitialized()) return;
+ var pane = evtPane.call(this, evt_or_pane)
+ , $P = $Ps[pane]
+ , $R = $Rs[pane]
+ , $T = $Ts[pane]
+ , o = options[pane]
+ , s = state[pane]
+ , c = _c[pane]
+ , doFX, isShowing
+ ;
+ // QUEUE in case another action/animation is in progress
+ $N.queue(function( queueNext ){
+
+ if ( !$P
+ || (!o.resizable && !o.closable && !s.isShowing) // invalid request
+ || (s.isVisible && !s.isSliding) // already open
+ ) return queueNext();
+
+ // pane can ALSO be unhidden by just calling show(), so handle this scenario
+ if (s.isHidden && !s.isShowing) {
+ queueNext(); // call before show() because it needs the queue free
+ show(pane, true);
+ return;
+ }
+
+ if (o.autoResize && s.size != o.size) // resize pane to original size set in options
+ sizePane(pane, o.size, true, true, true); // true=skipCallback/forceResize/noAnimation
+ else
+ // make sure there is enough space available to open the pane
+ setSizeLimits(pane, slide);
+
+ // onopen_start callback - will CANCEL open if returns false
+ var cbReturn = _runCallbacks("onopen_start", pane);
+
+ if (cbReturn === "abort")
+ return queueNext();
+
+ // update pane-state again in case options were changed in onopen_start
+ if (cbReturn !== "NC") // NC = "No Callback"
+ setSizeLimits(pane, slide);
+
+ if (s.minSize > s.maxSize) { // INSUFFICIENT ROOM FOR PANE TO OPEN!
+ syncPinBtns(pane, false); // make sure pin-buttons are reset
+ if (!noAlert && o.tips.noRoomToOpen)
+ alert(o.tips.noRoomToOpen);
+ return queueNext(); // ABORT
+ }
+
+ if (slide) // START Sliding - will set isSliding=true
+ bindStopSlidingEvents(pane, true); // BIND trigger events to close sliding-pane
+ else if (s.isSliding) // PIN PANE (stop sliding) - open pane 'normally' instead
+ bindStopSlidingEvents(pane, false); // UNBIND trigger events - will set isSliding=false
+ else if (o.slidable)
+ bindStartSlidingEvent(pane, false); // UNBIND trigger events
+
+ s.noRoom = false; // will be reset by makePaneFit if 'noRoom'
+ makePaneFit(pane);
+
+ // transfer logic var to temp var
+ isShowing = s.isShowing;
+ // now clear the logic var
+ delete s.isShowing;
+
+ doFX = !noAnimation && s.isClosed && (o.fxName_open != "none");
+ s.isMoving = true;
+ s.isVisible = true;
+ s.isClosed = false;
+ // update isHidden BEFORE sizing panes - WHY??? Old?
+ if (isShowing) s.isHidden = false;
+
+ if (doFX) { // ANIMATE
+ // mask panes with objects
+ var masks = "center"+ (c.dir=="horz" ? ",west,east" : "");
+ if (s.isSliding) masks += ","+ _c.oppositeEdge[pane];
+ showMasks( masks, true ); // true = ONLY mask panes with maskObjects=true
+ lockPaneForFX(pane, true); // need to set left/top so animation will work
+ $P.show( o.fxName_open, o.fxSettings_open, o.fxSpeed_open, function() {
+ lockPaneForFX(pane, false); // undo
+ if (s.isVisible) open_2(); // continue
+ queueNext();
+ });
+ }
+ else { // no animation
+ _showPane(pane);// just show pane and...
+ open_2(); // continue
+ queueNext();
+ };
+ });
+
+ // SUBROUTINE
+ function open_2 () {
+ s.isMoving = false;
+
+ // cure iframe display issues
+ _fixIframe(pane);
+
+ // NOTE: if isSliding, then other panes are NOT 'resized'
+ if (!s.isSliding) { // resize all panes adjacent to this one
+ hideMasks(); // remove any masks shown while opening
+ sizeMidPanes(_c[pane].dir=="vert" ? "center" : "", false); // false = NOT skipCallback
+ }
+
+ // set classes, position handles and execute callbacks...
+ setAsOpen(pane);
+ };
+
+ }
+
+ /**
+ * @param {string} pane The pane just opened, ie: north, south, east, or west
+ * @param {boolean=} [skipCallback=false]
+ */
+, setAsOpen = function (pane, skipCallback) {
+ var
+ $P = $Ps[pane]
+ , $R = $Rs[pane]
+ , $T = $Ts[pane]
+ , o = options[pane]
+ , s = state[pane]
+ , side = _c[pane].side.toLowerCase()
+ , inset = "inset"+ _c[pane].side
+ , rClass = o.resizerClass
+ , tClass = o.togglerClass
+ , _pane = "-"+ pane // used for classNames
+ , _open = "-open"
+ , _closed = "-closed"
+ , _sliding= "-sliding"
+ ;
+ $R
+ .css(side, sC[inset] + getPaneSize(pane)) // move the resizer
+ .removeClass( rClass+_closed +" "+ rClass+_pane+_closed )
+ .addClass( rClass+_open +" "+ rClass+_pane+_open )
+ ;
+ if (s.isSliding)
+ $R.addClass( rClass+_sliding +" "+ rClass+_pane+_sliding )
+ else // in case 'was sliding'
+ $R.removeClass( rClass+_sliding +" "+ rClass+_pane+_sliding )
+
+ if (o.resizerDblClickToggle)
+ $R.bind("dblclick", toggle );
+ removeHover( 0, $R ); // remove hover classes
+ if (o.resizable && $.layout.plugins.draggable)
+ $R .draggable("enable")
+ .css("cursor", o.resizerCursor)
+ .attr("title", o.tips.Resize);
+ else if (!s.isSliding)
+ $R.css("cursor", "default"); // n-resize, s-resize, etc
+
+ // if pane also has a toggler button, adjust that too
+ if ($T) {
+ $T .removeClass( tClass+_closed +" "+ tClass+_pane+_closed )
+ .addClass( tClass+_open +" "+ tClass+_pane+_open )
+ .attr("title", o.tips.Close); // may be blank
+ removeHover( 0, $T ); // remove hover classes
+ // toggler-content - if exists
+ $T.children(".content-closed").hide();
+ $T.children(".content-open").css("display","block");
+ }
+
+ // sync any 'pin buttons'
+ syncPinBtns(pane, !s.isSliding);
+
+ // update pane-state dimensions - BEFORE resizing content
+ $.extend(s, elDims($P));
+
+ if (state.initialized) {
+ // resize resizer & toggler sizes for all panes
+ sizeHandles();
+ // resize content every time pane opens - to be sure
+ sizeContent(pane, true); // true = remeasure headers/footers, even if 'pane.isMoving'
+ }
+
+ if (!skipCallback && (state.initialized || o.triggerEventsOnLoad) && $P.is(":visible")) {
+ // onopen callback
+ _runCallbacks("onopen_end", pane);
+ // onshow callback - TODO: should this be here?
+ if (s.isShowing) _runCallbacks("onshow_end", pane);
+
+ // ALSO call onresize because layout-size *may* have changed while pane was closed
+ if (state.initialized)
+ _runCallbacks("onresize_end", pane);
+ }
+
+ // TODO: Somehow sizePane("north") is being called after this point???
+ }
+
+
+ /**
+ * slideOpen / slideClose / slideToggle
+ *
+ * Pass-though methods for sliding
+ */
+, slideOpen = function (evt_or_pane) {
+ if (!isInitialized()) return;
+ var evt = evtObj(evt_or_pane)
+ , pane = evtPane.call(this, evt_or_pane)
+ , s = state[pane]
+ , delay = options[pane].slideDelay_open
+ ;
+ // prevent event from triggering on NEW resizer binding created below
+ if (evt) evt.stopImmediatePropagation();
+
+ if (s.isClosed && evt && evt.type === "mouseenter" && delay > 0)
+ // trigger = mouseenter - use a delay
+ timer.set(pane+"_openSlider", open_NOW, delay);
+ else
+ open_NOW(); // will unbind events if is already open
+
+ /**
+ * SUBROUTINE for timed open
+ */
+ function open_NOW () {
+ if (!s.isClosed) // skip if no longer closed!
+ bindStopSlidingEvents(pane, true); // BIND trigger events to close sliding-pane
+ else if (!s.isMoving)
+ open(pane, true); // true = slide - open() will handle binding
+ };
+ }
+
+, slideClose = function (evt_or_pane) {
+ if (!isInitialized()) return;
+ var evt = evtObj(evt_or_pane)
+ , pane = evtPane.call(this, evt_or_pane)
+ , o = options[pane]
+ , s = state[pane]
+ , delay = s.isMoving ? 1000 : 300 // MINIMUM delay - option may override
+ ;
+ if (s.isClosed || s.isResizing)
+ return; // skip if already closed OR in process of resizing
+ else if (o.slideTrigger_close === "click")
+ close_NOW(); // close immediately onClick
+ else if (o.preventQuickSlideClose && s.isMoving)
+ return; // handle Chrome quick-close on slide-open
+ else if (o.preventPrematureSlideClose && evt && $.layout.isMouseOverElem(evt, $Ps[pane]))
+ return; // handle incorrect mouseleave trigger, like when over a SELECT-list in IE
+ else if (evt) // trigger = mouseleave - use a delay
+ // 1 sec delay if 'opening', else .3 sec
+ timer.set(pane+"_closeSlider", close_NOW, max(o.slideDelay_close, delay));
+ else // called programically
+ close_NOW();
+
+ /**
+ * SUBROUTINE for timed close
+ */
+ function close_NOW () {
+ if (s.isClosed) // skip 'close' if already closed!
+ bindStopSlidingEvents(pane, false); // UNBIND trigger events - TODO: is this needed here?
+ else if (!s.isMoving)
+ close(pane); // close will handle unbinding
+ };
+ }
+
+ /**
+ * @param {string|Object} evt_or_pane The pane being opened, ie: north, south, east, or west
+ */
+, slideToggle = function (evt_or_pane) {
+ var pane = evtPane.call(this, evt_or_pane);
+ toggle(pane, true);
+ }
+
+
+ /**
+ * Must set left/top on East/South panes so animation will work properly
+ *
+ * @param {string} pane The pane to lock, 'east' or 'south' - any other is ignored!
+ * @param {boolean} doLock true = set left/top, false = remove
+ */
+, lockPaneForFX = function (pane, doLock) {
+ var $P = $Ps[pane]
+ , s = state[pane]
+ , o = options[pane]
+ , z = options.zIndexes
+ ;
+ if (doLock) {
+ $P.css({ zIndex: z.pane_animate }); // overlay all elements during animation
+ if (pane=="south")
+ $P.css({ top: sC.insetTop + sC.innerHeight - $P.outerHeight() });
+ else if (pane=="east")
+ $P.css({ left: sC.insetLeft + sC.innerWidth - $P.outerWidth() });
+ }
+ else { // animation DONE - RESET CSS
+ // TODO: see if this can be deleted. It causes a quick-close when sliding in Chrome
+ $P.css({ zIndex: (s.isSliding ? z.pane_sliding : z.pane_normal) });
+ if (pane=="south")
+ $P.css({ top: "auto" });
+ // if pane is positioned 'off-screen', then DO NOT screw with it!
+ else if (pane=="east" && !$P.css("left").match(/\-99999/))
+ $P.css({ left: "auto" });
+ // fix anti-aliasing in IE - only needed for animations that change opacity
+ if (browser.msie && o.fxOpacityFix && o.fxName_open != "slide" && $P.css("filter") && $P.css("opacity") == 1)
+ $P[0].style.removeAttribute('filter');
+ }
+ }
+
+
+ /**
+ * Toggle sliding functionality of a specific pane on/off by adding removing 'slide open' trigger
+ *
+ * @see open(), close()
+ * @param {string} pane The pane to enable/disable, 'north', 'south', etc.
+ * @param {boolean} enable Enable or Disable sliding?
+ */
+, bindStartSlidingEvent = function (pane, enable) {
+ var o = options[pane]
+ , $P = $Ps[pane]
+ , $R = $Rs[pane]
+ , evtName = o.slideTrigger_open.toLowerCase()
+ ;
+ if (!$R || (enable && !o.slidable)) return;
+
+ // make sure we have a valid event
+ if (evtName.match(/mouseover/))
+ evtName = o.slideTrigger_open = "mouseenter";
+ else if (!evtName.match(/(click|dblclick|mouseenter)/))
+ evtName = o.slideTrigger_open = "click";
+
+ $R
+ // add or remove event
+ [enable ? "bind" : "unbind"](evtName +'.'+ sID, slideOpen)
+ // set the appropriate cursor & title/tip
+ .css("cursor", enable ? o.sliderCursor : "default")
+ .attr("title", enable ? o.tips.Slide : "")
+ ;
+ }
+
+ /**
+ * Add or remove 'mouseleave' events to 'slide close' when pane is 'sliding' open or closed
+ * Also increases zIndex when pane is sliding open
+ * See bindStartSlidingEvent for code to control 'slide open'
+ *
+ * @see slideOpen(), slideClose()
+ * @param {string} pane The pane to process, 'north', 'south', etc.
+ * @param {boolean} enable Enable or Disable events?
+ */
+, bindStopSlidingEvents = function (pane, enable) {
+ var o = options[pane]
+ , s = state[pane]
+ , c = _c[pane]
+ , z = options.zIndexes
+ , evtName = o.slideTrigger_close.toLowerCase()
+ , action = (enable ? "bind" : "unbind")
+ , $P = $Ps[pane]
+ , $R = $Rs[pane]
+ ;
+ s.isSliding = enable; // logic
+ timer.clear(pane+"_closeSlider"); // just in case
+
+ // remove 'slideOpen' event from resizer
+ // ALSO will raise the zIndex of the pane & resizer
+ if (enable) bindStartSlidingEvent(pane, false);
+
+ // RE/SET zIndex - increases when pane is sliding-open, resets to normal when not
+ $P.css("zIndex", enable ? z.pane_sliding : z.pane_normal);
+ $R.css("zIndex", enable ? z.pane_sliding+2 : z.resizer_normal); // NOTE: mask = pane_sliding+1
+
+ // make sure we have a valid event
+ if (!evtName.match(/(click|mouseleave)/))
+ evtName = o.slideTrigger_close = "mouseleave"; // also catches 'mouseout'
+
+ // add/remove slide triggers
+ $R[action](evtName, slideClose); // base event on resize
+ // need extra events for mouseleave
+ if (evtName === "mouseleave") {
+ // also close on pane.mouseleave
+ $P[action]("mouseleave."+ sID, slideClose);
+ // cancel timer when mouse moves between 'pane' and 'resizer'
+ $R[action]("mouseenter."+ sID, cancelMouseOut);
+ $P[action]("mouseenter."+ sID, cancelMouseOut);
+ }
+
+ if (!enable)
+ timer.clear(pane+"_closeSlider");
+ else if (evtName === "click" && !o.resizable) {
+ // IF pane is not resizable (which already has a cursor and tip)
+ // then set the a cursor & title/tip on resizer when sliding
+ $R.css("cursor", enable ? o.sliderCursor : "default");
+ $R.attr("title", enable ? o.tips.Close : ""); // use Toggler-tip, eg: "Close Pane"
+ }
+
+ // SUBROUTINE for mouseleave timer clearing
+ function cancelMouseOut (evt) {
+ timer.clear(pane+"_closeSlider");
+ evt.stopPropagation();
+ }
+ }
+
+
+ /**
+ * Hides/closes a pane if there is insufficient room - reverses this when there is room again
+ * MUST have already called setSizeLimits() before calling this method
+ *
+ * @param {string} pane The pane being resized
+ * @param {boolean=} [isOpening=false] Called from onOpen?
+ * @param {boolean=} [skipCallback=false] Should the onresize callback be run?
+ * @param {boolean=} [force=false]
+ */
+, makePaneFit = function (pane, isOpening, skipCallback, force) {
+ var
+ o = options[pane]
+ , s = state[pane]
+ , c = _c[pane]
+ , $P = $Ps[pane]
+ , $R = $Rs[pane]
+ , isSidePane = c.dir==="vert"
+ , hasRoom = false
+ ;
+ // special handling for center & east/west panes
+ if (pane === "center" || (isSidePane && s.noVerticalRoom)) {
+ // see if there is enough room to display the pane
+ // ERROR: hasRoom = s.minHeight <= s.maxHeight && (isSidePane || s.minWidth <= s.maxWidth);
+ hasRoom = (s.maxHeight >= 0);
+ if (hasRoom && s.noRoom) { // previously hidden due to noRoom, so show now
+ _showPane(pane);
+ if ($R) $R.show();
+ s.isVisible = true;
+ s.noRoom = false;
+ if (isSidePane) s.noVerticalRoom = false;
+ _fixIframe(pane);
+ }
+ else if (!hasRoom && !s.noRoom) { // not currently hidden, so hide now
+ _hidePane(pane);
+ if ($R) $R.hide();
+ s.isVisible = false;
+ s.noRoom = true;
+ }
+ }
+
+ // see if there is enough room to fit the border-pane
+ if (pane === "center") {
+ // ignore center in this block
+ }
+ else if (s.minSize <= s.maxSize) { // pane CAN fit
+ hasRoom = true;
+ if (s.size > s.maxSize) // pane is too big - shrink it
+ sizePane(pane, s.maxSize, skipCallback, force, true); // true = noAnimation
+ else if (s.size < s.minSize) // pane is too small - enlarge it
+ sizePane(pane, s.minSize, skipCallback, force, true);
+ // need s.isVisible because new pseudoClose method keeps pane visible, but off-screen
+ else if ($R && s.isVisible && $P.is(":visible")) {
+ // make sure resizer-bar is positioned correctly
+ // handles situation where nested layout was 'hidden' when initialized
+ var side = c.side.toLowerCase()
+ , pos = s.size + sC["inset"+ c.side]
+ ;
+ if ($.layout.cssNum($R, side) != pos) $R.css( side, pos );
+ }
+
+ // if was previously hidden due to noRoom, then RESET because NOW there is room
+ if (s.noRoom) {
+ // s.noRoom state will be set by open or show
+ if (s.wasOpen && o.closable) {
+ if (o.autoReopen)
+ open(pane, false, true, true); // true = noAnimation, true = noAlert
+ else // leave the pane closed, so just update state
+ s.noRoom = false;
+ }
+ else
+ show(pane, s.wasOpen, true, true); // true = noAnimation, true = noAlert
+ }
+ }
+ else { // !hasRoom - pane CANNOT fit
+ if (!s.noRoom) { // pane not set as noRoom yet, so hide or close it now...
+ s.noRoom = true; // update state
+ s.wasOpen = !s.isClosed && !s.isSliding;
+ if (s.isClosed){} // SKIP
+ else if (o.closable) // 'close' if possible
+ close(pane, true, true); // true = force, true = noAnimation
+ else // 'hide' pane if cannot just be closed
+ hide(pane, true); // true = noAnimation
+ }
+ }
+ }
+
+
+ /**
+ * sizePane / manualSizePane
+ * sizePane is called only by internal methods whenever a pane needs to be resized
+ * manualSizePane is an exposed flow-through method allowing extra code when pane is 'manually resized'
+ *
+ * @param {string|Object} evt_or_pane The pane being resized
+ * @param {number} size The *desired* new size for this pane - will be validated
+ * @param {boolean=} [skipCallback=false] Should the onresize callback be run?
+ * @param {boolean=} [noAnimation=false]
+ */
+, manualSizePane = function (evt_or_pane, size, skipCallback, noAnimation) {
+ if (!isInitialized()) return;
+ var pane = evtPane.call(this, evt_or_pane)
+ , o = options[pane]
+ , s = state[pane]
+ // if resizing callbacks have been delayed and resizing is now DONE, force resizing to complete...
+ , forceResize = o.livePaneResizing && !s.isResizing
+ ;
+ // ANY call to manualSizePane disables autoResize - ie, percentage sizing
+ o.autoResize = false;
+ // flow-through...
+ sizePane(pane, size, skipCallback, forceResize, noAnimation); // will animate resize if option enabled
+ }
+
+ /**
+ * @param {string|Object} evt_or_pane The pane being resized
+ * @param {number} size The *desired* new size for this pane - will be validated
+ * @param {boolean=} [skipCallback=false] Should the onresize callback be run?
+ * @param {boolean=} [force=false] Force resizing even if does not seem necessary
+ * @param {boolean=} [noAnimation=false]
+ */
+, sizePane = function (evt_or_pane, size, skipCallback, force, noAnimation) {
+ if (!isInitialized()) return;
+ var pane = evtPane.call(this, evt_or_pane) // probably NEVER called from event?
+ , o = options[pane]
+ , s = state[pane]
+ , $P = $Ps[pane]
+ , $R = $Rs[pane]
+ , side = _c[pane].side.toLowerCase()
+ , dimName = _c[pane].sizeType.toLowerCase()
+ , inset = "inset"+ _c[pane].side
+ , skipResizeWhileDragging = s.isResizing && !o.triggerEventsDuringLiveResize
+ , doFX = noAnimation !== true && o.animatePaneSizing
+ , oldSize, newSize
+ ;
+ // QUEUE in case another action/animation is in progress
+ $N.queue(function( queueNext ){
+ // calculate 'current' min/max sizes
+ setSizeLimits(pane); // update pane-state
+ oldSize = s.size;
+ size = _parseSize(pane, size); // handle percentages & auto
+ size = max(size, _parseSize(pane, o.minSize));
+ size = min(size, s.maxSize);
+ if (size < s.minSize) { // not enough room for pane!
+ queueNext(); // call before makePaneFit() because it needs the queue free
+ makePaneFit(pane, false, skipCallback); // will hide or close pane
+ return;
+ }
+
+ // IF newSize is same as oldSize, then nothing to do - abort
+ if (!force && size === oldSize)
+ return queueNext();
+
+ // onresize_start callback CANNOT cancel resizing because this would break the layout!
+ if (!skipCallback && state.initialized && s.isVisible)
+ _runCallbacks("onresize_start", pane);
+
+ // resize the pane, and make sure its visible
+ newSize = cssSize(pane, size);
+
+ if (doFX && $P.is(":visible")) { // ANIMATE
+ var fx = $.layout.effects.size[pane] || $.layout.effects.size.all
+ , easing = o.fxSettings_size.easing || fx.easing
+ , z = options.zIndexes
+ , props = {};
+ props[ dimName ] = newSize +'px';
+ s.isMoving = true;
+ // overlay all elements during animation
+ $P.css({ zIndex: z.pane_animate })
+ .show().animate( props, o.fxSpeed_size, easing, function(){
+ // reset zIndex after animation
+ $P.css({ zIndex: (s.isSliding ? z.pane_sliding : z.pane_normal) });
+ s.isMoving = false;
+ sizePane_2(); // continue
+ queueNext();
+ });
+ }
+ else { // no animation
+ $P.css( dimName, newSize ); // resize pane
+ // if pane is visible, then
+ if ($P.is(":visible"))
+ sizePane_2(); // continue
+ else {
+ // pane is NOT VISIBLE, so just update state data...
+ // when pane is *next opened*, it will have the new size
+ s.size = size; // update state.size
+ $.extend(s, elDims($P)); // update state dimensions
+ }
+ queueNext();
+ };
+
+ });
+
+ // SUBROUTINE
+ function sizePane_2 () {
+ /* Panes are sometimes not sized precisely in some browsers!?
+ * This code will resize the pane up to 3 times to nudge the pane to the correct size
+ */
+ var actual = dimName==='width' ? $P.outerWidth() : $P.outerHeight()
+ , tries = [{
+ pane: pane
+ , count: 1
+ , target: size
+ , actual: actual
+ , correct: (size === actual)
+ , attempt: size
+ , cssSize: newSize
+ }]
+ , lastTry = tries[0]
+ , thisTry = {}
+ , msg = 'Inaccurate size after resizing the '+ pane +'-pane.'
+ ;
+ while ( !lastTry.correct ) {
+ thisTry = { pane: pane, count: lastTry.count+1, target: size };
+
+ if (lastTry.actual > size)
+ thisTry.attempt = max(0, lastTry.attempt - (lastTry.actual - size));
+ else // lastTry.actual < size
+ thisTry.attempt = max(0, lastTry.attempt + (size - lastTry.actual));
+
+ thisTry.cssSize = cssSize(pane, thisTry.attempt);
+ $P.css( dimName, thisTry.cssSize );
+
+ thisTry.actual = dimName=='width' ? $P.outerWidth() : $P.outerHeight();
+ thisTry.correct = (size === thisTry.actual);
+
+ // log attempts and alert the user of this *non-fatal error* (if showDebugMessages)
+ if ( tries.length === 1) {
+ _log(msg, false, true);
+ _log(lastTry, false, true);
+ }
+ _log(thisTry, false, true);
+ // after 4 tries, is as close as its gonna get!
+ if (tries.length > 3) break;
+
+ tries.push( thisTry );
+ lastTry = tries[ tries.length - 1 ];
+ }
+ // END TESTING CODE
+
+ // update pane-state dimensions
+ s.size = size;
+ $.extend(s, elDims($P));
+
+ if (s.isVisible && $P.is(":visible")) {
+ // reposition the resizer-bar
+ if ($R) $R.css( side, size + sC[inset] );
+ // resize the content-div
+ sizeContent(pane);
+ }
+
+ if (!skipCallback && !skipResizeWhileDragging && state.initialized && s.isVisible)
+ _runCallbacks("onresize_end", pane);
+
+ // resize all the adjacent panes, and adjust their toggler buttons
+ // when skipCallback passed, it means the controlling method will handle 'other panes'
+ if (!skipCallback) {
+ // also no callback if live-resize is in progress and NOT triggerEventsDuringLiveResize
+ if (!s.isSliding) sizeMidPanes(_c[pane].dir=="horz" ? "" : "center", skipResizeWhileDragging, force);
+ sizeHandles();
+ }
+
+ // if opposite-pane was autoClosed, see if it can be autoOpened now
+ var altPane = _c.oppositeEdge[pane];
+ if (size < oldSize && state[ altPane ].noRoom) {
+ setSizeLimits( altPane );
+ makePaneFit( altPane, false, skipCallback );
+ }
+
+ // DEBUG - ALERT user/developer so they know there was a sizing problem
+ if (tries.length > 1)
+ _log(msg +'\nSee the Error Console for details.', true, true);
+ }
+ }
+
+ /**
+ * @see initPanes(), sizePane(), resizeAll(), open(), close(), hide()
+ * @param {Array.<string>|string} panes The pane(s) being resized, comma-delmited string
+ * @param {boolean=} [skipCallback=false] Should the onresize callback be run?
+ * @param {boolean=} [force=false]
+ */
+, sizeMidPanes = function (panes, skipCallback, force) {
+ panes = (panes ? panes : "east,west,center").split(",");
+
+ $.each(panes, function (i, pane) {
+ if (!$Ps[pane]) return; // NO PANE - skip
+ var
+ o = options[pane]
+ , s = state[pane]
+ , $P = $Ps[pane]
+ , $R = $Rs[pane]
+ , isCenter= (pane=="center")
+ , hasRoom = true
+ , CSS = {}
+ , newCenter = calcNewCenterPaneDims()
+ ;
+ // update pane-state dimensions
+ $.extend(s, elDims($P));
+
+ if (pane === "center") {
+ if (!force && s.isVisible && newCenter.width === s.outerWidth && newCenter.height === s.outerHeight)
+ return true; // SKIP - pane already the correct size
+ // set state for makePaneFit() logic
+ $.extend(s, cssMinDims(pane), {
+ maxWidth: newCenter.width
+ , maxHeight: newCenter.height
+ });
+ CSS = newCenter;
+ // convert OUTER width/height to CSS width/height
+ CSS.width = cssW($P, CSS.width);
+ // NEW - allow pane to extend 'below' visible area rather than hide it
+ CSS.height = cssH($P, CSS.height);
+ hasRoom = CSS.width >= 0 && CSS.height >= 0; // height >= 0 = ALWAYS TRUE NOW
+ // during layout init, try to shrink east/west panes to make room for center
+ if (!state.initialized && o.minWidth > s.outerWidth) {
+ var
+ reqPx = o.minWidth - s.outerWidth
+ , minE = options.east.minSize || 0
+ , minW = options.west.minSize || 0
+ , sizeE = state.east.size
+ , sizeW = state.west.size
+ , newE = sizeE
+ , newW = sizeW
+ ;
+ if (reqPx > 0 && state.east.isVisible && sizeE > minE) {
+ newE = max( sizeE-minE, sizeE-reqPx );
+ reqPx -= sizeE-newE;
+ }
+ if (reqPx > 0 && state.west.isVisible && sizeW > minW) {
+ newW = max( sizeW-minW, sizeW-reqPx );
+ reqPx -= sizeW-newW;
+ }
+ // IF we found enough extra space, then resize the border panes as calculated
+ if (reqPx === 0) {
+ if (sizeE && sizeE != minE)
+ sizePane('east', newE, true, force, true); // true = skipCallback/noAnimation - initPanes will handle when done
+ if (sizeW && sizeW != minW)
+ sizePane('west', newW, true, force, true);
+ // now start over!
+ sizeMidPanes('center', skipCallback, force);
+ return; // abort this loop
+ }
+ }
+ }
+ else { // for east and west, set only the height, which is same as center height
+ // set state.min/maxWidth/Height for makePaneFit() logic
+ if (s.isVisible && !s.noVerticalRoom)
+ $.extend(s, elDims($P), cssMinDims(pane))
+ if (!force && !s.noVerticalRoom && newCenter.height === s.outerHeight)
+ return true; // SKIP - pane already the correct size
+ // east/west have same top, bottom & height as center
+ CSS.top = newCenter.top;
+ CSS.bottom = newCenter.bottom;
+ // NEW - allow pane to extend 'below' visible area rather than hide it
+ CSS.height = cssH($P, newCenter.height);
+ s.maxHeight = CSS.height;
+ hasRoom = (s.maxHeight >= 0); // ALWAYS TRUE NOW
+ if (!hasRoom) s.noVerticalRoom = true; // makePaneFit() logic
+ }
+
+ if (hasRoom) {
+ // resizeAll passes skipCallback because it triggers callbacks after ALL panes are resized
+ if (!skipCallback && state.initialized)
+ _runCallbacks("onresize_start", pane);
+
+ $P.css(CSS); // apply the CSS to pane
+ if (pane !== "center")
+ sizeHandles(pane); // also update resizer length
+ if (s.noRoom && !s.isClosed && !s.isHidden)
+ makePaneFit(pane); // will re-open/show auto-closed/hidden pane
+ if (s.isVisible) {
+ $.extend(s, elDims($P)); // update pane dimensions
+ if (state.initialized) sizeContent(pane); // also resize the contents, if exists
+ }
+ }
+ else if (!s.noRoom && s.isVisible) // no room for pane
+ makePaneFit(pane); // will hide or close pane
+
+ if (!s.isVisible)
+ return true; // DONE - next pane
+
+ /*
+ * Extra CSS for IE6 or IE7 in Quirks-mode - add 'width' to NORTH/SOUTH panes
+ * Normally these panes have only 'left' & 'right' positions so pane auto-sizes
+ * ALSO required when pane is an IFRAME because will NOT default to 'full width'
+ * TODO: Can I use width:100% for a north/south iframe?
+ * TODO: Sounds like a job for $P.outerWidth( sC.innerWidth ) SETTER METHOD
+ */
+ if (pane === "center") { // finished processing midPanes
+ var fix = browser.isIE6 || !browser.boxModel;
+ if ($Ps.north && (fix || state.north.tagName=="IFRAME"))
+ $Ps.north.css("width", cssW($Ps.north, sC.innerWidth));
+ if ($Ps.south && (fix || state.south.tagName=="IFRAME"))
+ $Ps.south.css("width", cssW($Ps.south, sC.innerWidth));
+ }
+
+ // resizeAll passes skipCallback because it triggers callbacks after ALL panes are resized
+ if (!skipCallback && state.initialized)
+ _runCallbacks("onresize_end", pane);
+ });
+ }
+
+
+ /**
+ * @see window.onresize(), callbacks or custom code
+ */
+, resizeAll = function (evt) {
+ // stopPropagation if called by trigger("layoutdestroy") - use evtPane utility
+ evtPane(evt);
+
+ if (!state.initialized) {
+ _initLayoutElements();
+ return; // no need to resize since we just initialized!
+ }
+ var oldW = sC.innerWidth
+ , oldH = sC.innerHeight
+ ;
+ // cannot size layout when 'container' is hidden or collapsed
+ if (!$N.is(":visible") ) return;
+ $.extend(state.container, elDims( $N )); // UPDATE container dimensions
+ if (!sC.outerHeight) return;
+
+ // onresizeall_start will CANCEL resizing if returns false
+ // state.container has already been set, so user can access this info for calcuations
+ if (false === _runCallbacks("onresizeall_start")) return false;
+
+ var // see if container is now 'smaller' than before
+ shrunkH = (sC.innerHeight < oldH)
+ , shrunkW = (sC.innerWidth < oldW)
+ , $P, o, s, dir
+ ;
+ // NOTE special order for sizing: S-N-E-W
+ $.each(["south","north","east","west"], function (i, pane) {
+ if (!$Ps[pane]) return; // no pane - SKIP
+ s = state[pane];
+ o = options[pane];
+ dir = _c[pane].dir;
+
+ if (o.autoResize && s.size != o.size) // resize pane to original size set in options
+ sizePane(pane, o.size, true, true, true); // true=skipCallback/forceResize/noAnimation
+ else {
+ setSizeLimits(pane);
+ makePaneFit(pane, false, true, true); // true=skipCallback/forceResize
+ }
+ });
+
+ sizeMidPanes("", true, true); // true=skipCallback, true=forceResize
+ sizeHandles(); // reposition the toggler elements
+
+ // trigger all individual pane callbacks AFTER layout has finished resizing
+ o = options; // reuse alias
+ $.each(_c.allPanes, function (i, pane) {
+ $P = $Ps[pane];
+ if (!$P) return; // SKIP
+ if (state[pane].isVisible) // undefined for non-existent panes
+ _runCallbacks("onresize_end", pane); // callback - if exists
+ });
+
+ _runCallbacks("onresizeall_end");
+ //_triggerLayoutEvent(pane, 'resizeall');
+ }
+
+ /**
+ * Whenever a pane resizes or opens that has a nested layout, trigger resizeAll
+ *
+ * @param {string|Object} evt_or_pane The pane just resized or opened
+ */
+, resizeChildLayout = function (evt_or_pane) {
+ var pane = evtPane.call(this, evt_or_pane);
+ if (!options[pane].resizeChildLayout) return;
+ var $P = $Ps[pane]
+ , $C = $Cs[pane]
+ , d = "layout"
+ , P = Instance[pane]
+ , L = children[pane]
+ ;
+ // user may have manually set EITHER instance pointer, so handle that
+ if (P.child && !L) {
+ // have to reverse the pointers!
+ var el = P.child.container;
+ L = children[pane] = (el ? el.data(d) : 0) || null; // set pointer _directly_ to layout instance
+ }
+
+ // if a layout-pointer exists, see if child has been destroyed
+ if (L && L.destroyed)
+ L = children[pane] = null; // clear child pointers
+ // no child layout pointer is set - see if there is a child layout NOW
+ if (!L) L = children[pane] = $P.data(d) || ($C ? $C.data(d) : 0) || null; // set/update child pointers
+
+ // ALWAYS refresh the pane.child alias
+ P.child = children[pane];
+
+ if (L) L.resizeAll();
+ }
+
+
+ /**
+ * IF pane has a content-div, then resize all elements inside pane to fit pane-height
+ *
+ * @param {string|Object} evt_or_panes The pane(s) being resized
+ * @param {boolean=} [remeasure=false] Should the content (header/footer) be remeasured?
+ */
+, sizeContent = function (evt_or_panes, remeasure) {
+ if (!isInitialized()) return;
+
+ var panes = evtPane.call(this, evt_or_panes);
+ panes = panes ? panes.split(",") : _c.allPanes;
+
+ $.each(panes, function (idx, pane) {
+ var
+ $P = $Ps[pane]
+ , $C = $Cs[pane]
+ , o = options[pane]
+ , s = state[pane]
+ , m = s.content // m = measurements
+ ;
+ if (!$P || !$C || !$P.is(":visible")) return true; // NOT VISIBLE - skip
+
+ // if content-element was REMOVED, update OR remove the pointer
+ if (!$C.length) {
+ initContent(pane, false); // false = do NOT sizeContent() - already there!
+ if (!$C) return; // no replacement element found - pointer have been removed
+ }
+
+ // onsizecontent_start will CANCEL resizing if returns false
+ if (false === _runCallbacks("onsizecontent_start", pane)) return;
+
+ // skip re-measuring offsets if live-resizing
+ if ((!s.isMoving && !s.isResizing) || o.liveContentResizing || remeasure || m.top == undefined) {
+ _measure();
+ // if any footers are below pane-bottom, they may not measure correctly,
+ // so allow pane overflow and re-measure
+ if (m.hiddenFooters > 0 && $P.css("overflow") === "hidden") {
+ $P.css("overflow", "visible");
+ _measure(); // remeasure while overflowing
+ $P.css("overflow", "hidden");
+ }
+ }
+ // NOTE: spaceAbove/Below *includes* the pane paddingTop/Bottom, but not pane.borders
+ var newH = s.innerHeight - (m.spaceAbove - s.css.paddingTop) - (m.spaceBelow - s.css.paddingBottom);
+
+ if (!$C.is(":visible") || m.height != newH) {
+ // size the Content element to fit new pane-size - will autoHide if not enough room
+ setOuterHeight($C, newH, true); // true=autoHide
+ m.height = newH; // save new height
+ };
+
+ if (state.initialized)
+ _runCallbacks("onsizecontent_end", pane);
+
+ function _below ($E) {
+ return max(s.css.paddingBottom, (parseInt($E.css("marginBottom"), 10) || 0));
+ };
+
+ function _measure () {
+ var
+ ignore = options[pane].contentIgnoreSelector
+ , $Fs = $C.nextAll().not(ignore || ':lt(0)') // not :lt(0) = ALL
+ , $Fs_vis = $Fs.filter(':visible')
+ , $F = $Fs_vis.filter(':last')
+ ;
+ m = {
+ top: $C[0].offsetTop
+ , height: $C.outerHeight()
+ , numFooters: $Fs.length
+ , hiddenFooters: $Fs.length - $Fs_vis.length
+ , spaceBelow: 0 // correct if no content footer ($E)
+ }
+ m.spaceAbove = m.top; // just for state - not used in calc
+ m.bottom = m.top + m.height;
+ if ($F.length)
+ //spaceBelow = (LastFooter.top + LastFooter.height) [footerBottom] - Content.bottom + max(LastFooter.marginBottom, pane.paddingBotom)
+ m.spaceBelow = ($F[0].offsetTop + $F.outerHeight()) - m.bottom + _below($F);
+ else // no footer - check marginBottom on Content element itself
+ m.spaceBelow = _below($C);
+ };
+ });
+ }
+
+
+ /**
+ * Called every time a pane is opened, closed, or resized to slide the togglers to 'center' and adjust their length if necessary
+ *
+ * @see initHandles(), open(), close(), resizeAll()
+ * @param {string|Object} evt_or_panes The pane(s) being resized
+ */
+, sizeHandles = function (evt_or_panes) {
+ var panes = evtPane.call(this, evt_or_panes)
+ panes = panes ? panes.split(",") : _c.borderPanes;
+
+ $.each(panes, function (i, pane) {
+ var
+ o = options[pane]
+ , s = state[pane]
+ , $P = $Ps[pane]
+ , $R = $Rs[pane]
+ , $T = $Ts[pane]
+ , $TC
+ ;
+ if (!$P || !$R) return;
+
+ var
+ dir = _c[pane].dir
+ , _state = (s.isClosed ? "_closed" : "_open")
+ , spacing = o["spacing"+ _state]
+ , togAlign = o["togglerAlign"+ _state]
+ , togLen = o["togglerLength"+ _state]
+ , paneLen
+ , left
+ , offset
+ , CSS = {}
+ ;
+
+ if (spacing === 0) {
+ $R.hide();
+ return;
+ }
+ else if (!s.noRoom && !s.isHidden) // skip if resizer was hidden for any reason
+ $R.show(); // in case was previously hidden
+
+ // Resizer Bar is ALWAYS same width/height of pane it is attached to
+ if (dir === "horz") { // north/south
+ //paneLen = $P.outerWidth(); // s.outerWidth ||
+ paneLen = sC.innerWidth; // handle offscreen-panes
+ s.resizerLength = paneLen;
+ left = $.layout.cssNum($P, "left")
+ $R.css({
+ width: cssW($R, paneLen) // account for borders & padding
+ , height: cssH($R, spacing) // ditto
+ , left: left > -9999 ? left : sC.insetLeft // handle offscreen-panes
+ });
+ }
+ else { // east/west
+ paneLen = $P.outerHeight(); // s.outerHeight ||
+ s.resizerLength = paneLen;
+ $R.css({
+ height: cssH($R, paneLen) // account for borders & padding
+ , width: cssW($R, spacing) // ditto
+ , top: sC.insetTop + getPaneSize("north", true) // TODO: what if no North pane?
+ //, top: $.layout.cssNum($Ps["center"], "top")
+ });
+ }
+
+ // remove hover classes
+ removeHover( o, $R );
+
+ if ($T) {
+ if (togLen === 0 || (s.isSliding && o.hideTogglerOnSlide)) {
+ $T.hide(); // always HIDE the toggler when 'sliding'
+ return;
+ }
+ else
+ $T.show(); // in case was previously hidden
+
+ if (!(togLen > 0) || togLen === "100%" || togLen > paneLen) {
+ togLen = paneLen;
+ offset = 0;
+ }
+ else { // calculate 'offset' based on options.PANE.togglerAlign_open/closed
+ if (isStr(togAlign)) {
+ switch (togAlign) {
+ case "top":
+ case "left": offset = 0;
+ break;
+ case "bottom":
+ case "right": offset = paneLen - togLen;
+ break;
+ case "middle":
+ case "center":
+ default: offset = round((paneLen - togLen) / 2); // 'default' catches typos
+ }
+ }
+ else { // togAlign = number
+ var x = parseInt(togAlign, 10); //
+ if (togAlign >= 0) offset = x;
+ else offset = paneLen - togLen + x; // NOTE: x is negative!
+ }
+ }
+
+ if (dir === "horz") { // north/south
+ var width = cssW($T, togLen);
+ $T.css({
+ width: width // account for borders & padding
+ , height: cssH($T, spacing) // ditto
+ , left: offset // TODO: VERIFY that toggler positions correctly for ALL values
+ , top: 0
+ });
+ // CENTER the toggler content SPAN
+ $T.children(".content").each(function(){
+ $TC = $(this);
+ $TC.css("marginLeft", round((width-$TC.outerWidth())/2)); // could be negative
+ });
+ }
+ else { // east/west
+ var height = cssH($T, togLen);
+ $T.css({
+ height: height // account for borders & padding
+ , width: cssW($T, spacing) // ditto
+ , top: offset // POSITION the toggler
+ , left: 0
+ });
+ // CENTER the toggler content SPAN
+ $T.children(".content").each(function(){
+ $TC = $(this);
+ $TC.css("marginTop", round((height-$TC.outerHeight())/2)); // could be negative
+ });
+ }
+
+ // remove ALL hover classes
+ removeHover( 0, $T );
+ }
+
+ // DONE measuring and sizing this resizer/toggler, so can be 'hidden' now
+ if (!state.initialized && (o.initHidden || s.noRoom)) {
+ $R.hide();
+ if ($T) $T.hide();
+ }
+ });
+ }
+
+
+ /**
+ * @param {string|Object} evt_or_pane
+ */
+, enableClosable = function (evt_or_pane) {
+ if (!isInitialized()) return;
+ var pane = evtPane.call(this, evt_or_pane)
+ , $T = $Ts[pane]
+ , o = options[pane]
+ ;
+ if (!$T) return;
+ o.closable = true;
+ $T .bind("click."+ sID, function(evt){ evt.stopPropagation(); toggle(pane); })
+ .css("visibility", "visible")
+ .css("cursor", "pointer")
+ .attr("title", state[pane].isClosed ? o.tips.Open : o.tips.Close) // may be blank
+ .show();
+ }
+ /**
+ * @param {string|Object} evt_or_pane
+ * @param {boolean=} [hide=false]
+ */
+, disableClosable = function (evt_or_pane, hide) {
+ if (!isInitialized()) return;
+ var pane = evtPane.call(this, evt_or_pane)
+ , $T = $Ts[pane]
+ ;
+ if (!$T) return;
+ options[pane].closable = false;
+ // is closable is disable, then pane MUST be open!
+ if (state[pane].isClosed) open(pane, false, true);
+ $T .unbind("."+ sID)
+ .css("visibility", hide ? "hidden" : "visible") // instead of hide(), which creates logic issues
+ .css("cursor", "default")
+ .attr("title", "");
+ }
+
+
+ /**
+ * @param {string|Object} evt_or_pane
+ */
+, enableSlidable = function (evt_or_pane) {
+ if (!isInitialized()) return;
+ var pane = evtPane.call(this, evt_or_pane)
+ , $R = $Rs[pane]
+ ;
+ if (!$R || !$R.data('draggable')) return;
+ options[pane].slidable = true;
+ if (state[pane].isClosed)
+ bindStartSlidingEvent(pane, true);
+ }
+ /**
+ * @param {string|Object} evt_or_pane
+ */
+, disableSlidable = function (evt_or_pane) {
+ if (!isInitialized()) return;
+ var pane = evtPane.call(this, evt_or_pane)
+ , $R = $Rs[pane]
+ ;
+ if (!$R) return;
+ options[pane].slidable = false;
+ if (state[pane].isSliding)
+ close(pane, false, true);
+ else {
+ bindStartSlidingEvent(pane, false);
+ $R .css("cursor", "default")
+ .attr("title", "");
+ removeHover(null, $R[0]); // in case currently hovered
+ }
+ }
+
+
+ /**
+ * @param {string|Object} evt_or_pane
+ */
+, enableResizable = function (evt_or_pane) {
+ if (!isInitialized()) return;
+ var pane = evtPane.call(this, evt_or_pane)
+ , $R = $Rs[pane]
+ , o = options[pane]
+ ;
+ if (!$R || !$R.data('draggable')) return;
+ o.resizable = true;
+ $R.draggable("enable");
+ if (!state[pane].isClosed)
+ $R .css("cursor", o.resizerCursor)
+ .attr("title", o.tips.Resize);
+ }
+ /**
+ * @param {string|Object} evt_or_pane
+ */
+, disableResizable = function (evt_or_pane) {
+ if (!isInitialized()) return;
+ var pane = evtPane.call(this, evt_or_pane)
+ , $R = $Rs[pane]
+ ;
+ if (!$R || !$R.data('draggable')) return;
+ options[pane].resizable = false;
+ $R .draggable("disable")
+ .css("cursor", "default")
+ .attr("title", "");
+ removeHover(null, $R[0]); // in case currently hovered
+ }
+
+
+ /**
+ * Move a pane from source-side (eg, west) to target-side (eg, east)
+ * If pane exists on target-side, move that to source-side, ie, 'swap' the panes
+ *
+ * @param {string|Object} evt_or_pane1 The pane/edge being swapped
+ * @param {string} pane2 ditto
+ */
+, swapPanes = function (evt_or_pane1, pane2) {
+ if (!isInitialized()) return;
+ var pane1 = evtPane.call(this, evt_or_pane1);
+ // change state.edge NOW so callbacks can know where pane is headed...
+ state[pane1].edge = pane2;
+ state[pane2].edge = pane1;
+ // run these even if NOT state.initialized
+ if (false === _runCallbacks("onswap_start", pane1)
+ || false === _runCallbacks("onswap_start", pane2)
+ ) {
+ state[pane1].edge = pane1; // reset
+ state[pane2].edge = pane2;
+ return;
+ }
+
+ var
+ oPane1 = copy( pane1 )
+ , oPane2 = copy( pane2 )
+ , sizes = {}
+ ;
+ sizes[pane1] = oPane1 ? oPane1.state.size : 0;
+ sizes[pane2] = oPane2 ? oPane2.state.size : 0;
+
+ // clear pointers & state
+ $Ps[pane1] = false;
+ $Ps[pane2] = false;
+ state[pane1] = {};
+ state[pane2] = {};
+
+ // ALWAYS remove the resizer & toggler elements
+ if ($Ts[pane1]) $Ts[pane1].remove();
+ if ($Ts[pane2]) $Ts[pane2].remove();
+ if ($Rs[pane1]) $Rs[pane1].remove();
+ if ($Rs[pane2]) $Rs[pane2].remove();
+ $Rs[pane1] = $Rs[pane2] = $Ts[pane1] = $Ts[pane2] = false;
+
+ // transfer element pointers and data to NEW Layout keys
+ move( oPane1, pane2 );
+ move( oPane2, pane1 );
+
+ // cleanup objects
+ oPane1 = oPane2 = sizes = null;
+
+ // make panes 'visible' again
+ if ($Ps[pane1]) $Ps[pane1].css(_c.visible);
+ if ($Ps[pane2]) $Ps[pane2].css(_c.visible);
+
+ // fix any size discrepancies caused by swap
+ resizeAll();
+
+ // run these even if NOT state.initialized
+ _runCallbacks("onswap_end", pane1);
+ _runCallbacks("onswap_end", pane2);
+
+ return;
+
+ function copy (n) { // n = pane
+ var
+ $P = $Ps[n]
+ , $C = $Cs[n]
+ ;
+ return !$P ? false : {
+ pane: n
+ , P: $P ? $P[0] : false
+ , C: $C ? $C[0] : false
+ , state: $.extend(true, {}, state[n])
+ , options: $.extend(true, {}, options[n])
+ }
+ };
+
+ function move (oPane, pane) {
+ if (!oPane) return;
+ var
+ P = oPane.P
+ , C = oPane.C
+ , oldPane = oPane.pane
+ , c = _c[pane]
+ , side = c.side.toLowerCase()
+ , inset = "inset"+ c.side
+ // save pane-options that should be retained
+ , s = $.extend(true, {}, state[pane])
+ , o = options[pane]
+ // RETAIN side-specific FX Settings - more below
+ , fx = { resizerCursor: o.resizerCursor }
+ , re, size, pos
+ ;
+ $.each("fxName,fxSpeed,fxSettings".split(","), function (i, k) {
+ fx[k +"_open"] = o[k +"_open"];
+ fx[k +"_close"] = o[k +"_close"];
+ fx[k +"_size"] = o[k +"_size"];
+ });
+
+ // update object pointers and attributes
+ $Ps[pane] = $(P)
+ .data({
+ layoutPane: Instance[pane] // NEW pointer to pane-alias-object
+ , layoutEdge: pane
+ })
+ .css(_c.hidden)
+ .css(c.cssReq)
+ ;
+ $Cs[pane] = C ? $(C) : false;
+
+ // set options and state
+ options[pane] = $.extend(true, {}, oPane.options, fx);
+ state[pane] = $.extend(true, {}, oPane.state);
+
+ // change classNames on the pane, eg: ui-layout-pane-east ==> ui-layout-pane-west
+ re = new RegExp(o.paneClass +"-"+ oldPane, "g");
+ P.className = P.className.replace(re, o.paneClass +"-"+ pane);
+
+ // ALWAYS regenerate the resizer & toggler elements
+ initHandles(pane); // create the required resizer & toggler
+
+ // if moving to different orientation, then keep 'target' pane size
+ if (c.dir != _c[oldPane].dir) {
+ size = sizes[pane] || 0;
+ setSizeLimits(pane); // update pane-state
+ size = max(size, state[pane].minSize);
+ // use manualSizePane to disable autoResize - not useful after panes are swapped
+ manualSizePane(pane, size, true, true); // true/true = skipCallback/noAnimation
+ }
+ else // move the resizer here
+ $Rs[pane].css(side, sC[inset] + (state[pane].isVisible ? getPaneSize(pane) : 0));
+
+
+ // ADD CLASSNAMES & SLIDE-BINDINGS
+ if (oPane.state.isVisible && !s.isVisible)
+ setAsOpen(pane, true); // true = skipCallback
+ else {
+ setAsClosed(pane);
+ bindStartSlidingEvent(pane, true); // will enable events IF option is set
+ }
+
+ // DESTROY the object
+ oPane = null;
+ };
+ }
+
+
+ /**
+ * INTERNAL method to sync pin-buttons when pane is opened or closed
+ * Unpinned means the pane is 'sliding' - ie, over-top of the adjacent panes
+ *
+ * @see open(), setAsOpen(), setAsClosed()
+ * @param {string} pane These are the params returned to callbacks by layout()
+ * @param {boolean} doPin True means set the pin 'down', False means 'up'
+ */
+, syncPinBtns = function (pane, doPin) {
+ if ($.layout.plugins.buttons)
+ $.each(state[pane].pins, function (i, selector) {
+ $.layout.buttons.setPinState(Instance, $(selector), pane, doPin);
+ });
+ }
+
+; // END var DECLARATIONS
+
+ /**
+ * Capture keys when enableCursorHotkey - toggle pane if hotkey pressed
+ *
+ * @see document.keydown()
+ */
+ function keyDown (evt) {
+ if (!evt) return true;
+ var code = evt.keyCode;
+ if (code < 33) return true; // ignore special keys: ENTER, TAB, etc
+
+ var
+ PANE = {
+ 38: "north" // Up Cursor - $.ui.keyCode.UP
+ , 40: "south" // Down Cursor - $.ui.keyCode.DOWN
+ , 37: "west" // Left Cursor - $.ui.keyCode.LEFT
+ , 39: "east" // Right Cursor - $.ui.keyCode.RIGHT
+ }
+ , ALT = evt.altKey // no worky!
+ , SHIFT = evt.shiftKey
+ , CTRL = evt.ctrlKey
+ , CURSOR = (CTRL && code >= 37 && code <= 40)
+ , o, k, m, pane
+ ;
+
+ if (CURSOR && options[PANE[code]].enableCursorHotkey) // valid cursor-hotkey
+ pane = PANE[code];
+ else if (CTRL || SHIFT) // check to see if this matches a custom-hotkey
+ $.each(_c.borderPanes, function (i, p) { // loop each pane to check its hotkey
+ o = options[p];
+ k = o.customHotkey;
+ m = o.customHotkeyModifier; // if missing or invalid, treated as "CTRL+SHIFT"
+ if ((SHIFT && m=="SHIFT") || (CTRL && m=="CTRL") || (CTRL && SHIFT)) { // Modifier matches
+ if (k && code === (isNaN(k) || k <= 9 ? k.toUpperCase().charCodeAt(0) : k)) { // Key matches
+ pane = p;
+ return false; // BREAK
+ }
+ }
+ });
+
+ // validate pane
+ if (!pane || !$Ps[pane] || !options[pane].closable || state[pane].isHidden)
+ return true;
+
+ toggle(pane);
+
+ evt.stopPropagation();
+ evt.returnValue = false; // CANCEL key
+ return false;
+ };
+
+
+/*
+ * ######################################
+ * UTILITY METHODS
+ * called externally or by initButtons
+ * ######################################
+ */
+
+ /**
+ * Change/reset a pane overflow setting & zIndex to allow popups/drop-downs to work
+ *
+ * @param {Object=} [el] (optional) Can also be 'bound' to a click, mouseOver, or other event
+ */
+ function allowOverflow (el) {
+ if (!isInitialized()) return;
+ if (this && this.tagName) el = this; // BOUND to element
+ var $P;
+ if (isStr(el))
+ $P = $Ps[el];
+ else if ($(el).data("layoutRole"))
+ $P = $(el);
+ else
+ $(el).parents().each(function(){
+ if ($(this).data("layoutRole")) {
+ $P = $(this);
+ return false; // BREAK
+ }
+ });
+ if (!$P || !$P.length) return; // INVALID
+
+ var
+ pane = $P.data("layoutEdge")
+ , s = state[pane]
+ ;
+
+ // if pane is already raised, then reset it before doing it again!
+ // this would happen if allowOverflow is attached to BOTH the pane and an element
+ if (s.cssSaved)
+ resetOverflow(pane); // reset previous CSS before continuing
+
+ // if pane is raised by sliding or resizing, or its closed, then abort
+ if (s.isSliding || s.isResizing || s.isClosed) {
+ s.cssSaved = false;
+ return;
+ }
+
+ var
+ newCSS = { zIndex: (options.zIndexes.resizer_normal + 1) }
+ , curCSS = {}
+ , of = $P.css("overflow")
+ , ofX = $P.css("overflowX")
+ , ofY = $P.css("overflowY")
+ ;
+ // determine which, if any, overflow settings need to be changed
+ if (of != "visible") {
+ curCSS.overflow = of;
+ newCSS.overflow = "visible";
+ }
+ if (ofX && !ofX.match(/(visible|auto)/)) {
+ curCSS.overflowX = ofX;
+ newCSS.overflowX = "visible";
+ }
+ if (ofY && !ofY.match(/(visible|auto)/)) {
+ curCSS.overflowY = ofX;
+ newCSS.overflowY = "visible";
+ }
+
+ // save the current overflow settings - even if blank!
+ s.cssSaved = curCSS;
+
+ // apply new CSS to raise zIndex and, if necessary, make overflow 'visible'
+ $P.css( newCSS );
+
+ // make sure the zIndex of all other panes is normal
+ $.each(_c.allPanes, function(i, p) {
+ if (p != pane) resetOverflow(p);
+ });
+
+ };
+ /**
+ * @param {Object=} [el] (optional) Can also be 'bound' to a click, mouseOver, or other event
+ */
+ function resetOverflow (el) {
+ if (!isInitialized()) return;
+ if (this && this.tagName) el = this; // BOUND to element
+ var $P;
+ if (isStr(el))
+ $P = $Ps[el];
+ else if ($(el).data("layoutRole"))
+ $P = $(el);
+ else
+ $(el).parents().each(function(){
+ if ($(this).data("layoutRole")) {
+ $P = $(this);
+ return false; // BREAK
+ }
+ });
+ if (!$P || !$P.length) return; // INVALID
+
+ var
+ pane = $P.data("layoutEdge")
+ , s = state[pane]
+ , CSS = s.cssSaved || {}
+ ;
+ // reset the zIndex
+ if (!s.isSliding && !s.isResizing)
+ $P.css("zIndex", options.zIndexes.pane_normal);
+
+ // reset Overflow - if necessary
+ $P.css( CSS );
+
+ // clear var
+ s.cssSaved = false;
+ };
+
+/*
+ * #####################
+ * CREATE/RETURN LAYOUT
+ * #####################
+ */
+
+ // validate that container exists
+ var $N = $(this).eq(0); // FIRST matching Container element
+ if (!$N.length) {
+ return _log( options.errors.containerMissing );
+ };
+
+ // Users retrieve Instance of a layout with: $N.layout() OR $N.data("layout")
+ // return the Instance-pointer if layout has already been initialized
+ if ($N.data("layoutContainer") && $N.data("layout"))
+ return $N.data("layout"); // cached pointer
+
+ // init global vars
+ var
+ $Ps = {} // Panes x5 - set in initPanes()
+ , $Cs = {} // Content x5 - set in initPanes()
+ , $Rs = {} // Resizers x4 - set in initHandles()
+ , $Ts = {} // Togglers x4 - set in initHandles()
+ , $Ms = $([]) // Masks - up to 2 masks per pane (IFRAME + DIV)
+ // aliases for code brevity
+ , sC = state.container // alias for easy access to 'container dimensions'
+ , sID = state.id // alias for unique layout ID/namespace - eg: "layout435"
+ ;
+
+ // create Instance object to expose data & option Properties, and primary action Methods
+ var Instance = {
+ // layout data
+ options: options // property - options hash
+ , state: state // property - dimensions hash
+ // object pointers
+ , container: $N // property - object pointers for layout container
+ , panes: $Ps // property - object pointers for ALL Panes: panes.north, panes.center
+ , contents: $Cs // property - object pointers for ALL Content: contents.north, contents.center
+ , resizers: $Rs // property - object pointers for ALL Resizers, eg: resizers.north
+ , togglers: $Ts // property - object pointers for ALL Togglers, eg: togglers.north
+ // border-pane open/close
+ , hide: hide // method - ditto
+ , show: show // method - ditto
+ , toggle: toggle // method - pass a 'pane' ("north", "west", etc)
+ , open: open // method - ditto
+ , close: close // method - ditto
+ , slideOpen: slideOpen // method - ditto
+ , slideClose: slideClose // method - ditto
+ , slideToggle: slideToggle // method - ditto
+ // pane actions
+ , setSizeLimits: setSizeLimits // method - pass a 'pane' - update state min/max data
+ , _sizePane: sizePane // method -intended for user by plugins only!
+ , sizePane: manualSizePane // method - pass a 'pane' AND an 'outer-size' in pixels or percent, or 'auto'
+ , sizeContent: sizeContent // method - pass a 'pane'
+ , swapPanes: swapPanes // method - pass TWO 'panes' - will swap them
+ , showMasks: showMasks // method - pass a 'pane' OR list of panes - default = all panes with mask option set
+ , hideMasks: hideMasks // method - ditto'
+ // pane element methods
+ , initContent: initContent // method - ditto
+ , addPane: addPane // method - pass a 'pane'
+ , removePane: removePane // method - pass a 'pane' to remove from layout, add 'true' to delete the pane-elem
+ , createChildLayout: createChildLayout// method - pass a 'pane' and (optional) layout-options (OVERRIDES options[pane].childOptions
+ // special pane option setting
+ , enableClosable: enableClosable // method - pass a 'pane'
+ , disableClosable: disableClosable // method - ditto
+ , enableSlidable: enableSlidable // method - ditto
+ , disableSlidable: disableSlidable // method - ditto
+ , enableResizable: enableResizable // method - ditto
+ , disableResizable: disableResizable// method - ditto
+ // utility methods for panes
+ , allowOverflow: allowOverflow // utility - pass calling element (this)
+ , resetOverflow: resetOverflow // utility - ditto
+ // layout control
+ , destroy: destroy // method - no parameters
+ , initPanes: isInitialized // method - no parameters
+ , resizeAll: resizeAll // method - no parameters
+ // callback triggering
+ , runCallbacks: _runCallbacks // method - pass evtName & pane (if a pane-event), eg: trigger("onopen", "west")
+ // alias collections of options, state and children - created in addPane and extended elsewhere
+ , hasParentLayout: false // set by initContainer()
+ , children: children // pointers to child-layouts, eg: Instance.children["west"]
+ , north: false // alias group: { name: pane, pane: $Ps[pane], options: options[pane], state: state[pane], child: children[pane] }
+ , south: false // ditto
+ , west: false // ditto
+ , east: false // ditto
+ , center: false // ditto
+ };
+
+ // create the border layout NOW
+ if (_create() === 'cancel') // onload_start callback returned false to CANCEL layout creation
+ return null;
+ else // true OR false -- if layout-elements did NOT init (hidden or do not exist), can auto-init later
+ return Instance; // return the Instance object
+
+}
+
+
+/* OLD versions of jQuery only set $.support.boxModel after page is loaded
+ * so if this is IE, use support.boxModel to test for quirks-mode (ONLY IE changes boxModel).
+ */
+$(function(){
+ var b = $.layout.browser;
+ if (b.msie) b.boxModel = $.support.boxModel;
+});
+
+
+/**
+ * jquery.layout.state 1.0
+ * $Date: 2011-07-16 08:00:00 (Sat, 16 July 2011) $
+ *
+ * Copyright (c) 2010
+ * Kevin Dalman (http://allpro.net)
+ *
+ * Dual licensed under the GPL (http://www.gnu.org/licenses/gpl.html)
+ * and MIT (http://www.opensource.org/licenses/mit-license.php) licenses.
+ *
+ * @dependancies: UI Layout 1.3.0.rc30.1 or higher
+ * @dependancies: $.ui.cookie (above)
+ *
+ * @support: http://groups.google.com/group/jquery-ui-layout
+ */
+/*
+ * State-management options stored in options.stateManagement, which includes a .cookie hash
+ * Default options saves ALL KEYS for ALL PANES, ie: pane.size, pane.isClosed, pane.isHidden
+ *
+ * // STATE/COOKIE OPTIONS
+ * @example $(el).layout({
+ stateManagement: {
+ enabled: true
+ , stateKeys: "east.size,west.size,east.isClosed,west.isClosed"
+ , cookie: { name: "appLayout", path: "/" }
+ }
+ })
+ * @example $(el).layout({ stateManagement__enabled: true }) // enable auto-state-management using cookies
+ * @example $(el).layout({ stateManagement__cookie: { name: "appLayout", path: "/" } })
+ * @example $(el).layout({ stateManagement__cookie__name: "appLayout", stateManagement__cookie__path: "/" })
+ *
+ * // STATE/COOKIE METHODS
+ * @example myLayout.saveCookie( "west.isClosed,north.size,south.isHidden", {expires: 7} );
+ * @example myLayout.loadCookie();
+ * @example myLayout.deleteCookie();
+ * @example var JSON = myLayout.readState(); // CURRENT Layout State
+ * @example var JSON = myLayout.readCookie(); // SAVED Layout State (from cookie)
+ * @example var JSON = myLayout.state.stateData; // LAST LOADED Layout State (cookie saved in layout.state hash)
+ *
+ * CUSTOM STATE-MANAGEMENT (eg, saved in a database)
+ * @example var JSON = myLayout.readState( "west.isClosed,north.size,south.isHidden" );
+ * @example myLayout.loadState( JSON );
+ */
+
+/**
+ * UI COOKIE UTILITY
*
- * $Date: 2010-07-13 08:00:00 (Wed, 14 July 2010) $
- * $Rev: 30293 $
+ * A $.cookie OR $.ui.cookie namespace *should be standard*, but until then...
+ * This creates $.ui.cookie so Layout does not need the cookie.jquery.js plugin
+ * NOTE: This utility is REQUIRED by the layout.state plugin
+ *
+ * Cookie methods in Layout are created as part of State Management
+ */
+if (!$.ui) $.ui = {};
+$.ui.cookie = {
+
+ // cookieEnabled is not in DOM specs, but DOES works in all browsers,including IE6
+ acceptsCookies: !!navigator.cookieEnabled
+
+, read: function (name) {
+ var
+ c = document.cookie
+ , cs = c ? c.split(';') : []
+ , pair // loop var
+ ;
+ for (var i=0, n=cs.length; i < n; i++) {
+ pair = $.trim(cs[i]).split('='); // name=value pair
+ if (pair[0] == name) // found the layout cookie
+ return decodeURIComponent(pair[1]);
+
+ }
+ return null;
+ }
+
+, write: function (name, val, cookieOpts) {
+ var
+ params = ''
+ , date = ''
+ , clear = false
+ , o = cookieOpts || {}
+ , x = o.expires
+ ;
+ if (x && x.toUTCString)
+ date = x;
+ else if (x === null || typeof x === 'number') {
+ date = new Date();
+ if (x > 0)
+ date.setDate(date.getDate() + x);
+ else {
+ date.setFullYear(1970);
+ clear = true;
+ }
+ }
+ if (date) params += ';expires='+ date.toUTCString();
+ if (o.path) params += ';path='+ o.path;
+ if (o.domain) params += ';domain='+ o.domain;
+ if (o.secure) params += ';secure';
+ document.cookie = name +'='+ (clear ? "" : encodeURIComponent( val )) + params; // write or clear cookie
+ }
+
+, clear: function (name) {
+ $.ui.cookie.write(name, '', {expires: -1});
+ }
+
+};
+// if cookie.jquery.js is not loaded, create an alias to replicate it
+// this may be useful to other plugins or code dependent on that plugin
+if (!$.cookie) $.cookie = function (k, v, o) {
+ var C = $.ui.cookie;
+ if (v === null)
+ C.clear(k);
+ else if (v === undefined)
+ return C.read(k);
+ else
+ C.write(k, v, o);
+};
+
+
+// tell Layout that the state plugin is available
+$.layout.plugins.stateManagement = true;
+
+// Add State-Management options to layout.defaults
+$.layout.config.optionRootKeys.push("stateManagement");
+$.layout.defaults.stateManagement = {
+ enabled: false // true = enable state-management, even if not using cookies
+, autoSave: true // Save a state-cookie when page exits?
+, autoLoad: true // Load the state-cookie when Layout inits?
+ // List state-data to save - must be pane-specific
+, stateKeys: "north.size,south.size,east.size,west.size,"+
+ "north.isClosed,south.isClosed,east.isClosed,west.isClosed,"+
+ "north.isHidden,south.isHidden,east.isHidden,west.isHidden"
+, cookie: {
+ name: "" // If not specified, will use Layout.name, else just "Layout"
+ , domain: "" // blank = current domain
+ , path: "" // blank = current page, '/' = entire website
+ , expires: "" // 'days' to keep cookie - leave blank for 'session cookie'
+ , secure: false
+ }
+};
+// Set stateManagement as a layout-option, NOT a pane-option
+$.layout.optionsMap.layout.push("stateManagement");
+
+/*
+ * State Management methods
+ */
+$.layout.state = {
+
+ /**
+ * Get the current layout state and save it to a cookie
+ *
+ * myLayout.saveCookie( keys, cookieOpts )
+ *
+ * @param {Object} inst
+ * @param {(string|Array)=} keys
+ * @param {Object=} cookieOpts
+ */
+ saveCookie: function (inst, keys, cookieOpts) {
+ var o = inst.options
+ , oS = o.stateManagement
+ , oC = $.extend(true, {}, oS.cookie, cookieOpts || null)
+ , data = inst.state.stateData = inst.readState( keys || oS.stateKeys ) // read current panes-state
+ ;
+ $.ui.cookie.write( oC.name || o.name || "Layout", $.layout.state.encodeJSON(data), oC );
+ return $.extend(true, {}, data); // return COPY of state.stateData data
+ }
+
+ /**
+ * Remove the state cookie
+ *
+ * @param {Object} inst
+ */
+, deleteCookie: function (inst) {
+ var o = inst.options;
+ $.ui.cookie.clear( o.stateManagement.cookie.name || o.name || "Layout" );
+ }
+
+ /**
+ * Read & return data from the cookie - as JSON
+ *
+ * @param {Object} inst
+ */
+, readCookie: function (inst) {
+ var o = inst.options;
+ var c = $.ui.cookie.read( o.stateManagement.cookie.name || o.name || "Layout" );
+ // convert cookie string back to a hash and return it
+ return c ? $.layout.state.decodeJSON(c) : {};
+ }
+
+ /**
+ * Get data from the cookie and USE IT to loadState
+ *
+ * @param {Object} inst
+ */
+, loadCookie: function (inst) {
+ var c = $.layout.state.readCookie(inst); // READ the cookie
+ if (c) {
+ inst.state.stateData = $.extend(true, {}, c); // SET state.stateData
+ inst.loadState(c); // LOAD the retrieved state
+ }
+ return c;
+ }
+
+ /**
+ * Update layout options from the cookie, if one exists
+ *
+ * @param {Object} inst
+ * @param {Object=} stateData
+ * @param {boolean=} animate
+ */
+, loadState: function (inst, stateData, animate) {
+ stateData = $.layout.transformData( stateData ); // panes = default subkey
+ if ($.isEmptyObject( stateData )) return;
+ $.extend(true, inst.options, stateData); // update layout options
+ // if layout has already been initialized, then UPDATE layout state
+ if (inst.state.initialized) {
+ var pane, vis, o, s, h, c
+ , noAnimate = (animate===false)
+ ;
+ $.each($.layout.config.borderPanes, function (idx, pane) {
+ state = inst.state[pane];
+ o = stateData[ pane ];
+ if (typeof o != 'object') return; // no key, continue
+ s = o.size;
+ c = o.initClosed;
+ h = o.initHidden;
+ vis = state.isVisible;
+ // resize BEFORE opening
+ if (!vis)
+ inst.sizePane(pane, s, false, false);
+ if (h === true) inst.hide(pane, noAnimate);
+ else if (c === false) inst.open (pane, false, noAnimate);
+ else if (c === true) inst.close(pane, false, noAnimate);
+ else if (h === false) inst.show (pane, false, noAnimate);
+ // resize AFTER any other actions
+ if (vis)
+ inst.sizePane(pane, s, false, noAnimate); // animate resize if option passed
+ });
+ };
+ }
+
+ /**
+ * Get the *current layout state* and return it as a hash
+ *
+ * @param {Object=} inst
+ * @param {(string|Array)=} keys
+ */
+, readState: function (inst, keys) {
+ var
+ data = {}
+ , alt = { isClosed: 'initClosed', isHidden: 'initHidden' }
+ , state = inst.state
+ , panes = $.layout.config.allPanes
+ , pair, pane, key, val
+ ;
+ if (!keys) keys = inst.options.stateManagement.stateKeys; // if called by user
+ if ($.isArray(keys)) keys = keys.join(",");
+ // convert keys to an array and change delimiters from '__' to '.'
+ keys = keys.replace(/__/g, ".").split(',');
+ // loop keys and create a data hash
+ for (var i=0, n=keys.length; i < n; i++) {
+ pair = keys[i].split(".");
+ pane = pair[0];
+ key = pair[1];
+ if ($.inArray(pane, panes) < 0) continue; // bad pane!
+ val = state[ pane ][ key ];
+ if (val == undefined) continue;
+ if (key=="isClosed" && state[pane]["isSliding"])
+ val = true; // if sliding, then *really* isClosed
+ ( data[pane] || (data[pane]={}) )[ alt[key] ? alt[key] : key ] = val;
+ }
+ return data;
+ }
+
+ /**
+ * Stringify a JSON hash so can save in a cookie or db-field
+ */
+, encodeJSON: function (JSON) {
+ return parse(JSON);
+ function parse (h) {
+ var D=[], i=0, k, v, t; // k = key, v = value
+ for (k in h) {
+ v = h[k];
+ t = typeof v;
+ if (t == 'string') // STRING - add quotes
+ v = '"'+ v +'"';
+ else if (t == 'object') // SUB-KEY - recurse into it
+ v = parse(v);
+ D[i++] = '"'+ k +'":'+ v;
+ }
+ return '{'+ D.join(',') +'}';
+ };
+ }
+
+ /**
+ * Convert stringified JSON back to a hash object
+ * @see $.parseJSON(), adding in jQuery 1.4.1
+ */
+, decodeJSON: function (str) {
+ try { return $.parseJSON ? $.parseJSON(str) : window["eval"]("("+ str +")") || {}; }
+ catch (e) { return {}; }
+ }
+
+
+, _create: function (inst) {
+ var _ = $.layout.state;
+ // ADD State-Management plugin methods to inst
+ $.extend( inst, {
+ // readCookie - update options from cookie - returns hash of cookie data
+ readCookie: function () { return _.readCookie(inst); }
+ // deleteCookie
+ , deleteCookie: function () { _.deleteCookie(inst); }
+ // saveCookie - optionally pass keys-list and cookie-options (hash)
+ , saveCookie: function (keys, cookieOpts) { return _.saveCookie(inst, keys, cookieOpts); }
+ // loadCookie - readCookie and use to loadState() - returns hash of cookie data
+ , loadCookie: function () { return _.loadCookie(inst); }
+ // loadState - pass a hash of state to use to update options
+ , loadState: function (stateData, animate) { _.loadState(inst, stateData, animate); }
+ // readState - returns hash of current layout-state
+ , readState: function (keys) { return _.readState(inst, keys); }
+ // add JSON utility methods too...
+ , encodeJSON: _.encodeJSON
+ , decodeJSON: _.decodeJSON
+ });
+
+ // init state.stateData key, even if plugin is initially disabled
+ inst.state.stateData = {};
+
+ // read and load cookie-data per options
+ var oS = inst.options.stateManagement;
+ if (oS.enabled) {
+ if (oS.autoLoad) // update the options from the cookie
+ inst.loadCookie();
+ else // don't modify options - just store cookie data in state.stateData
+ inst.state.stateData = inst.readCookie();
+ }
+ }
+
+, _unload: function (inst) {
+ var oS = inst.options.stateManagement;
+ if (oS.enabled) {
+ if (oS.autoSave) // save a state-cookie automatically
+ inst.saveCookie();
+ else // don't save a cookie, but do store state-data in state.stateData key
+ inst.state.stateData = inst.readState();
+ }
+ }
+
+};
+
+// add state initialization method to Layout's onCreate array of functions
+$.layout.onCreate.push( $.layout.state._create );
+$.layout.onUnload.push( $.layout.state._unload );
+
+
+
+
+/**
+ * jquery.layout.buttons 1.0
+ * $Date: 2011-07-16 08:00:00 (Sat, 16 July 2011) $
+ *
+ * Copyright (c) 2010
+ * Kevin Dalman (http://allpro.net)
+ *
+ * Dual licensed under the GPL (http://www.gnu.org/licenses/gpl.html)
+ * and MIT (http://www.opensource.org/licenses/mit-license.php) licenses.
+ *
+ * @dependancies: UI Layout 1.3.0.rc30.1 or higher
+ *
+ * @support: http://groups.google.com/group/jquery-ui-layout
+ *
+ * Docs: [ to come ]
+ * Tips: [ to come ]
+ */
+
+// tell Layout that the state plugin is available
+$.layout.plugins.buttons = true;
+
+// Add buttons options to layout.defaults
+$.layout.defaults.autoBindCustomButtons = false;
+// Specify autoBindCustomButtons as a layout-option, NOT a pane-option
+$.layout.optionsMap.layout.push("autoBindCustomButtons");
+
+/*
+ * Button methods
+ */
+$.layout.buttons = {
+
+ /**
+ * Searches for .ui-layout-button-xxx elements and auto-binds them as layout-buttons
+ *
+ * @see _create()
+ *
+ * @param {Object} inst Layout Instance object
+ */
+ init: function (inst) {
+ var pre = "ui-layout-button-"
+ , layout = inst.options.name || ""
+ , name;
+ $.each("toggle,open,close,pin,toggle-slide,open-slide".split(","), function (i, action) {
+ $.each($.layout.config.borderPanes, function (ii, pane) {
+ $("."+pre+action+"-"+pane).each(function(){
+ // if button was previously 'bound', data.layoutName was set, but is blank if layout has no 'name'
+ name = $(this).data("layoutName") || $(this).attr("layoutName");
+ if (name == undefined || name === layout)
+ inst.bindButton(this, action, pane);
+ });
+ });
+ });
+ }
+
+ /**
+ * Helper function to validate params received by addButton utilities
+ *
+ * Two classes are added to the element, based on the buttonClass...
+ * The type of button is appended to create the 2nd className:
+ * - ui-layout-button-pin // action btnClass
+ * - ui-layout-button-pin-west // action btnClass + pane
+ * - ui-layout-button-toggle
+ * - ui-layout-button-open
+ * - ui-layout-button-close
+ *
+ * @param {Object} inst Layout Instance object
+ * @param {(string|!Object)} selector jQuery selector (or element) for button, eg: ".ui-layout-north .toggle-button"
+ * @param {string} pane Name of the pane the button is for: 'north', 'south', etc.
+ *
+ * @return {Array.<Object>} If both params valid, the element matching 'selector' in a jQuery wrapper - otherwise returns null
+ */
+, get: function (inst, selector, pane, action) {
+ var $E = $(selector)
+ , o = inst.options
+ , err = o.errors.addButtonError
+ ;
+ if (!$E.length) { // element not found
+ $.layout.msg(err +" "+ o.errors.selector +": "+ selector, true);
+ }
+ else if ($.inArray(pane, $.layout.config.borderPanes) < 0) { // invalid 'pane' sepecified
+ $.layout.msg(err +" "+ o.errors.pane +": "+ pane, true);
+ $E = $(""); // NO BUTTON
+ }
+ else { // VALID
+ var btn = o[pane].buttonClass +"-"+ action;
+ $E .addClass( btn +" "+ btn +"-"+ pane )
+ .data("layoutName", o.name); // add layout identifier - even if blank!
+ }
+ return $E;
+ }
+
+
+ /**
+ * NEW syntax for binding layout-buttons - will eventually replace addToggle, addOpen, etc.
+ *
+ * @param {Object} inst Layout Instance object
+ * @param {(string|!Object)} selector jQuery selector (or element) for button, eg: ".ui-layout-north .toggle-button"
+ * @param {string} action
+ * @param {string} pane
+ */
+, bind: function (inst, selector, action, pane) {
+ var _ = $.layout.buttons;
+ switch (action.toLowerCase()) {
+ case "toggle": _.addToggle (inst, selector, pane); break;
+ case "open": _.addOpen (inst, selector, pane); break;
+ case "close": _.addClose (inst, selector, pane); break;
+ case "pin": _.addPin (inst, selector, pane); break;
+ case "toggle-slide": _.addToggle (inst, selector, pane, true); break;
+ case "open-slide": _.addOpen (inst, selector, pane, true); break;
+ }
+ return inst;
+ }
+
+ /**
+ * Add a custom Toggler button for a pane
+ *
+ * @param {Object} inst Layout Instance object
+ * @param {(string|!Object)} selector jQuery selector (or element) for button, eg: ".ui-layout-north .toggle-button"
+ * @param {string} pane Name of the pane the button is for: 'north', 'south', etc.
+ * @param {boolean=} slide true = slide-open, false = pin-open
+ */
+, addToggle: function (inst, selector, pane, slide) {
+ $.layout.buttons.get(inst, selector, pane, "toggle")
+ .click(function(evt){
+ inst.toggle(pane, !!slide);
+ evt.stopPropagation();
+ });
+ return inst;
+ }
+
+ /**
+ * Add a custom Open button for a pane
+ *
+ * @param {Object} inst Layout Instance object
+ * @param {(string|!Object)} selector jQuery selector (or element) for button, eg: ".ui-layout-north .toggle-button"
+ * @param {string} pane Name of the pane the button is for: 'north', 'south', etc.
+ * @param {boolean=} slide true = slide-open, false = pin-open
+ */
+, addOpen: function (inst, selector, pane, slide) {
+ $.layout.buttons.get(inst, selector, pane, "open")
+ .attr("title", inst.options[pane].tips.Open)
+ .click(function (evt) {
+ inst.open(pane, !!slide);
+ evt.stopPropagation();
+ });
+ return inst;
+ }
+
+ /**
+ * Add a custom Close button for a pane
+ *
+ * @param {Object} inst Layout Instance object
+ * @param {(string|!Object)} selector jQuery selector (or element) for button, eg: ".ui-layout-north .toggle-button"
+ * @param {string} pane Name of the pane the button is for: 'north', 'south', etc.
+ */
+, addClose: function (inst, selector, pane) {
+ $.layout.buttons.get(inst, selector, pane, "close")
+ .attr("title", inst.options[pane].tips.Close)
+ .click(function (evt) {
+ inst.close(pane);
+ evt.stopPropagation();
+ });
+ return inst;
+ }
+
+ /**
+ * Add a custom Pin button for a pane
+ *
+ * Four classes are added to the element, based on the paneClass for the associated pane...
+ * Assuming the default paneClass and the pin is 'up', these classes are added for a west-pane pin:
+ * - ui-layout-pane-pin
+ * - ui-layout-pane-west-pin
+ * - ui-layout-pane-pin-up
+ * - ui-layout-pane-west-pin-up
+ *
+ * @param {Object} inst Layout Instance object
+ * @param {(string|!Object)} selector jQuery selector (or element) for button, eg: ".ui-layout-north .toggle-button"
+ * @param {string} pane Name of the pane the pin is for: 'north', 'south', etc.
+ */
+, addPin: function (inst, selector, pane) {
+ var _ = $.layout.buttons
+ , $E = _.get(inst, selector, pane, "pin");
+ if ($E.length) {
+ var s = inst.state[pane];
+ $E.click(function (evt) {
+ _.setPinState(inst, $(this), pane, (s.isSliding || s.isClosed));
+ if (s.isSliding || s.isClosed) inst.open( pane ); // change from sliding to open
+ else inst.close( pane ); // slide-closed
+ evt.stopPropagation();
+ });
+ // add up/down pin attributes and classes
+ _.setPinState(inst, $E, pane, (!s.isClosed && !s.isSliding));
+ // add this pin to the pane data so we can 'sync it' automatically
+ // PANE.pins key is an array so we can store multiple pins for each pane
+ s.pins.push( selector ); // just save the selector string
+ }
+ return inst;
+ }
+
+ /**
+ * Change the class of the pin button to make it look 'up' or 'down'
+ *
+ * @see addPin(), syncPins()
+ *
+ * @param {Object} inst Layout Instance object
+ * @param {Array.<Object>} $Pin The pin-span element in a jQuery wrapper
+ * @param {string} pane These are the params returned to callbacks by layout()
+ * @param {boolean} doPin true = set the pin 'down', false = set it 'up'
+ */
+, setPinState: function (inst, $Pin, pane, doPin) {
+ var updown = $Pin.attr("pin");
+ if (updown && doPin === (updown=="down")) return; // already in correct state
+ var
+ o = inst.options[pane]
+ , pin = o.buttonClass +"-pin"
+ , side = pin +"-"+ pane
+ , UP = pin +"-up "+ side +"-up"
+ , DN = pin +"-down "+side +"-down"
+ ;
+ $Pin
+ .attr("pin", doPin ? "down" : "up") // logic
+ .attr("title", doPin ? o.tips.Unpin : o.tips.Pin)
+ .removeClass( doPin ? UP : DN )
+ .addClass( doPin ? DN : UP )
+ ;
+ }
+
+ /**
+ * INTERNAL function to sync 'pin buttons' when pane is opened or closed
+ * Unpinned means the pane is 'sliding' - ie, over-top of the adjacent panes
+ *
+ * @see open(), close()
+ *
+ * @param {Object} inst Layout Instance object
+ * @param {string} pane These are the params returned to callbacks by layout()
+ * @param {boolean} doPin True means set the pin 'down', False means 'up'
+ */
+, syncPinBtns: function (inst, pane, doPin) {
+ // REAL METHOD IS _INSIDE_ LAYOUT - THIS IS HERE JUST FOR REFERENCE
+ $.each(inst.state[pane].pins, function (i, selector) {
+ $.layout.buttons.setPinState(inst, $(selector), pane, doPin);
+ });
+ }
+
+
+, _load: function (inst) {
+ var _ = $.layout.buttons;
+ // ADD Button methods to Layout Instance
+ // Note: sel = jQuery Selector string
+ $.extend( inst, {
+ bindButton: function (sel, action, pane) { return _.bind(inst, sel, action, pane); }
+ // DEPRECATED METHODS
+ , addToggleBtn: function (sel, pane, slide) { return _.addToggle(inst, sel, pane, slide); }
+ , addOpenBtn: function (sel, pane, slide) { return _.addOpen(inst, sel, pane, slide); }
+ , addCloseBtn: function (sel, pane) { return _.addClose(inst, sel, pane); }
+ , addPinBtn: function (sel, pane) { return _.addPin(inst, sel, pane); }
+ });
+
+ // init state array to hold pin-buttons
+ for (var i=0; i<4; i++) {
+ var pane = $.layout.config.borderPanes[i];
+ inst.state[pane].pins = [];
+ }
+
+ // auto-init buttons onLoad if option is enabled
+ if ( inst.options.autoBindCustomButtons )
+ _.init(inst);
+ }
+
+, _unload: function (inst) {
+ // TODO: unbind all buttons???
+ }
+
+};
+
+// add initialization method to Layout's onLoad array of functions
+$.layout.onLoad.push( $.layout.buttons._load );
+//$.layout.onUnload.push( $.layout.buttons._unload );
+
+
+
+/**
+ * jquery.layout.browserZoom 1.0
+ * $Date: 2011-12-29 08:00:00 (Thu, 29 Dec 2011) $
+ *
+ * Copyright (c) 2012
+ * Kevin Dalman (http://allpro.net)
+ *
+ * Dual licensed under the GPL (http://www.gnu.org/licenses/gpl.html)
+ * and MIT (http://www.opensource.org/licenses/mit-license.php) licenses.
+ *
+ * @dependancies: UI Layout 1.3.0.rc30.1 or higher
+ *
+ * @support: http://groups.google.com/group/jquery-ui-layout
+ *
+ * @todo: Extend logic to handle other problematic zooming in browsers
+ * @todo: Add hotkey/mousewheel bindings to _instantly_ respond to these zoom event
+ */
+
+// tell Layout that the plugin is available
+$.layout.plugins.browserZoom = true;
+
+$.layout.defaults.browserZoomCheckInterval = 1000;
+$.layout.optionsMap.layout.push("browserZoomCheckInterval");
+
+/*
+ * browserZoom methods
*/
-(function($){$.fn.layout=function(opts){var lang={Pane:"Pane",Open:"Open",Close:"Close",Resize:"Resize",Slide:"Slide Open",Pin:"Pin",Unpin:"Un-Pin",selector:"selector",msgNoRoom:"Not enough room to show this pane.",errContainerMissing:"UI Layout Initialization Error\n\nThe specified layout-container does not exist.",errCenterPaneMissing:"UI Layout Initialization Error\n\nThe center-pane element does not exist.\n\nThe center-pane is a required element.",errContainerHeight:"UI Layout Initi [...]
+$.layout.browserZoom = {
+
+ _init: function (inst) {
+ // abort if browser does not need this check
+ if ($.layout.browserZoom.ratio() !== false)
+ $.layout.browserZoom._setTimer(inst);
+ }
+
+, _setTimer: function (inst) {
+ // abort if layout destroyed or browser does not need this check
+ if (inst.destroyed) return;
+ var o = inst.options
+ , s = inst.state
+ // don't need check if inst has parentLayout, but check occassionally in case parent destroyed!
+ // MINIMUM 100ms interval, for performance
+ , ms = inst.hasParentLayout ? 5000 : Math.max( o.browserZoomCheckInterval, 100 )
+ ;
+ // set the timer
+ setTimeout(function(){
+ if (inst.destroyed || !o.resizeWithWindow) return;
+ var d = $.layout.browserZoom.ratio();
+ if (d !== s.browserZoom) {
+ s.browserZoom = d;
+ inst.resizeAll();
+ }
+ // set a NEW timeout
+ $.layout.browserZoom._setTimer(inst);
+ }
+ , ms );
+ }
+
+, ratio: function () {
+ var w = window
+ , s = screen
+ , d = document
+ , dE = d.documentElement || d.body
+ , b = $.layout.browser
+ , v = b.version
+ , r, sW, cW
+ ;
+ // we can ignore all browsers that fire window.resize event onZoom
+ if ((b.msie && v > 8)
+ || !b.msie
+ ) return false; // don't need to track zoom
+
+ if (s.deviceXDPI)
+ return calc(s.deviceXDPI, s.systemXDPI);
+ // everything below is just for future reference!
+ if (b.webkit && (r = d.body.getBoundingClientRect))
+ return calc((r.left - r.right), d.body.offsetWidth);
+ if (b.webkit && (sW = w.outerWidth))
+ return calc(sW, w.innerWidth);
+ if ((sW = s.width) && (cW = dE.clientWidth))
+ return calc(sW, cW);
+ return false; // no match, so cannot - or don't need to - track zoom
+
+ function calc (x,y) { return (parseInt(x,10) / parseInt(y,10) * 100).toFixed(); }
+ }
+
+};
+// add initialization method to Layout's onLoad array of functions
+$.layout.onReady.push( $.layout.browserZoom._init );
+
+
+
+})( jQuery );
\ No newline at end of file
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/modernizr.custom.js b/src/compiler/scala/tools/nsc/doc/html/resource/lib/modernizr.custom.js
new file mode 100644
index 0000000..4688d63
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/doc/html/resource/lib/modernizr.custom.js
@@ -0,0 +1,4 @@
+/* Modernizr 2.5.3 (Custom Build) | MIT & BSD
+ * Build: http://www.modernizr.com/download/#-inlinesvg
+ */
+;window.Modernizr=function(a,b,c){function u(a){i.cssText=a}function v(a,b){return u(prefixes.join(a+";")+(b||""))}function w(a,b){return typeof a===b}function x(a,b){return!!~(""+a).indexOf(b)}function y(a,b,d){for(var e in a){var f=b[a[e]];if(f!==c)return d===!1?a[e]:w(f,"function")?f.bind(d||b):f}return!1}var d="2.5.3",e={},f=b.documentElement,g="modernizr",h=b.createElement(g),i=h.style,j,k={}.toString,l={svg:"http://www.w3.org/2000/svg"},m={},n={},o={},p=[],q=p.slice,r,s={}.hasOwnPr [...]
\ No newline at end of file
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/object_diagram.png b/src/compiler/scala/tools/nsc/doc/html/resource/lib/object_diagram.png
new file mode 100644
index 0000000..6e9f2f7
Binary files /dev/null and b/src/compiler/scala/tools/nsc/doc/html/resource/lib/object_diagram.png differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/object_to_type_big.png b/src/compiler/scala/tools/nsc/doc/html/resource/lib/object_to_type_big.png
new file mode 100644
index 0000000..7502942
Binary files /dev/null and b/src/compiler/scala/tools/nsc/doc/html/resource/lib/object_to_type_big.png differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/raphael-min.js b/src/compiler/scala/tools/nsc/doc/html/resource/lib/raphael-min.js
new file mode 100644
index 0000000..d30dbad
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/doc/html/resource/lib/raphael-min.js
@@ -0,0 +1,10 @@
+// ┌────────────────────────────────────────────────────────────────────┐ \\
+// │ Raphaël 2.1.0 - JavaScript Vector Library │ \\
+// ├────────────────────────────────────────────────────────────────────┤ \\
+// │ Copyright © 2008-2012 Dmitry Baranovskiy (http://raphaeljs.com) │ \\
+// │ Copyright © 2008-2012 Sencha Labs (http://sencha.com) │ \\
+// ├────────────────────────────────────────────────────────────────────┤ \\
+// │ Licensed under the MIT (http://raphaeljs.com/license.html) license.│ \\
+// └────────────────────────────────────────────────────────────────────┘ \\
+
+(function(a){var b="0.3.4",c="hasOwnProperty",d=/[\.\/]/,e="*",f=function(){},g=function(a,b){return a-b},h,i,j={n:{}},k=function(a,b){var c=j,d=i,e=Array.prototype.slice.call(arguments,2),f=k.listeners(a),l=0,m=!1,n,o=[],p={},q=[],r=h,s=[];h=a,i=0;for(var t=0,u=f.length;t<u;t++)"zIndex"in f[t]&&(o.push(f[t].zIndex),f[t].zIndex<0&&(p[f[t].zIndex]=f[t]));o.sort(g);while(o[l]<0){n=p[o[l++]],q.push(n.apply(b,e));if(i){i=d;return q}}for(t=0;t<u;t++){n=f[t];if("zIndex"in n)if(n.zIndex==o[l]){ [...]
\ No newline at end of file
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/ref-index.css b/src/compiler/scala/tools/nsc/doc/html/resource/lib/ref-index.css
old mode 100644
new mode 100755
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/rootdoc.txt b/src/compiler/scala/tools/nsc/doc/html/resource/lib/rootdoc.txt
deleted file mode 100644
index 6145429..0000000
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/rootdoc.txt
+++ /dev/null
@@ -1,27 +0,0 @@
-This is the documentation for the Scala standard library.
-
-== Package structure ==
-
-The [[scala]] package contains core types.
-
-scala.[[scala.collection]] and its subpackages contain a collections framework with higher-order functions for manipulation. Both [[scala.collection.immutable]] and [[scala.collection.mutable]] data structures are available, with immutable as the default. The [[scala.collection.parallel]] collections provide automatic parallel operation.
-
-Other important packages include:
-
- - scala.[[scala.actors]] - Concurrency framework inspired by Erlang.
- - scala.[[scala.io]] - Input and output.
- - scala.[[scala.math]] - Basic math functions and additional numeric types.
- - scala.[[scala.sys]] - Interaction with other processes and the operating system.
- - scala.util.[[scala.util.matching]] - Pattern matching in text using regular expressions.
- - scala.util.parsing.[[scala.util.parsing.combinator]] - Composable combinators for parsing.
- - scala.[[scala.xml]] - XML parsing, manipulation, and serialization.
-
-Many other packages exist. See the complete list on the left.
-
-== Automatic imports ==
-
-Identifiers in the scala package and the [[scala.Predef]] object are always in scope by default.
-
-Some of these identifiers are type aliases provided as shortcuts to commonly used classes. For example, List is an alias for scala.collection.immutable.[[scala.collection.immutable.List]].
-
-Other aliases refer to classes providing by the underlying platform. For example, on the JVM, String is an alias for java.lang.String.
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/selected-implicits.png b/src/compiler/scala/tools/nsc/doc/html/resource/lib/selected-implicits.png
new file mode 100644
index 0000000..bc29efb
Binary files /dev/null and b/src/compiler/scala/tools/nsc/doc/html/resource/lib/selected-implicits.png differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/selected-right-implicits.png b/src/compiler/scala/tools/nsc/doc/html/resource/lib/selected-right-implicits.png
new file mode 100644
index 0000000..8313f49
Binary files /dev/null and b/src/compiler/scala/tools/nsc/doc/html/resource/lib/selected-right-implicits.png differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/template.css b/src/compiler/scala/tools/nsc/doc/html/resource/lib/template.css
index 3af29a1..b066027 100644
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/template.css
+++ b/src/compiler/scala/tools/nsc/doc/html/resource/lib/template.css
@@ -106,7 +106,7 @@ a[href]:hover {
font-size: 24pt;
text-shadow: black 0px 2px 0px;
/* text-shadow: black 0px 0px 0px;*/
-text-decoration: none;
+text-decoration: none;
}
#definition #owner {
@@ -162,8 +162,8 @@ text-decoration: none;
padding-left: 15px;
background: url("arrow-right.png") no-repeat 0 3px transparent;
}
-
-.toggleContainer.open .toggle {
+
+.toggleContainer .toggle.open {
background: url("arrow-down.png") no-repeat 0 3px transparent;
}
@@ -171,10 +171,6 @@ text-decoration: none;
margin-top: 5px;
}
-.toggleContainer .showElement {
- padding-left: 15px;
-}
-
.value #definition {
background-color: #2C475C; /* blue */
background-image:url('defbg-blue.gif');
@@ -205,6 +201,11 @@ dl.attributes > dt {
font-style: italic;
}
+dl.attributes > dt.implicit {
+ font-weight: bold;
+ color: darkgreen;
+}
+
dl.attributes > dd {
display: block;
padding-left: 10em;
@@ -241,6 +242,28 @@ dl.attributes > dd {
color: white;
}
+#inheritedMembers > div.conversion > h3 {
+ background: #dadada url("conversionbg.gif") repeat-x bottom left; /* gray */
+ height: 17px;
+ font-style: italic;
+ font-size: 12pt;
+}
+
+#inheritedMembers > div.conversion > h3 * {
+ color: white;
+}
+
+#groupedMembers > div.group > h3 {
+ background: #dadada url("typebg.gif") repeat-x bottom left; /* green */
+ height: 17px;
+ font-size: 12pt;
+}
+
+#groupedMembers > div.group > h3 * {
+ color: white;
+}
+
+
/* Member cells */
div.members > ol {
@@ -310,11 +333,22 @@ div.members > ol > li:last-child {
font-weight: bold;
}
-.signature .symbol .params .implicit {
+.signature .symbol > .implicit {
+ display: inline-block;
+ font-weight: bold;
+ text-decoration: underline;
+ color: darkgreen;
+}
+
+.signature .symbol .shadowed {
+ color: darkseagreen;
+}
+
+.signature .symbol .params > .implicit {
font-style: italic;
}
-.signature .symbol .name.deprecated {
+.signature .symbol .deprecated {
text-decoration: line-through;
}
@@ -340,20 +374,44 @@ div.members > ol > li:last-child {
color: darkgreen;
}
+.full-signature-usecase h4 span {
+ font-size: 10pt;
+}
+
+.full-signature-usecase > #signature {
+ padding-top: 0px;
+}
+
+#template .full-signature-usecase > .signature.closed {
+ background: none;
+}
+
+#template .full-signature-usecase > .signature.opened {
+ background: none;
+}
+
+.full-signature-block {
+ padding: 5px 0 0;
+ border-top: 1px solid #EBEBEB;
+ margin-top: 5px;
+ margin-bottom: 5px;
+}
+
+
/* Comments text formating */
.cmt {}
.cmt p {
- margin: 0.7em 0;
+ margin: 0.7em 0;
}
.cmt p:first-child {
- margin-top: 0;
+ margin-top: 0;
}
.cmt p:last-child {
- margin-bottom: 0;
+ margin-bottom: 0;
}
.cmt h3,
@@ -469,6 +527,20 @@ div.members > ol > li:last-child {
/* Comments structured layout */
+.group > div.comment {
+ padding-top: 5px;
+ padding-bottom: 5px;
+ padding-right: 5px;
+ padding-left: 5px;
+ border: 1px solid #ddd;
+ background-color: #eeeee;
+ margin-top:5px;
+ margin-bottom:5px;
+ margin-right:5px;
+ margin-left:5px;
+ display: block;
+}
+
p.comment {
display: block;
margin-left: 14.7em;
@@ -515,7 +587,7 @@ div.fullcommenttop .block {
margin-bottom: 5px
}
-div.fullcomment div.block ol li p,
+div.fullcomment div.block ol li p,
div.fullcomment div.block ol li {
display:inline
}
@@ -559,10 +631,10 @@ div.fullcomment dl.paramcmts > dd {
/* Members filter tool */
#textfilter {
- position: relative;
- display: block;
+ position: relative;
+ display: block;
height: 20px;
- margin-bottom: 5px;
+ margin-bottom: 5px;
}
#textfilter > .pre {
@@ -576,7 +648,7 @@ div.fullcomment dl.paramcmts > dd {
}
#textfilter > .input {
- display: block;
+ display: block;
position: absolute;
top: 0;
right: 20px;
@@ -584,10 +656,10 @@ div.fullcomment dl.paramcmts > dd {
}
#textfilter > .input > input {
- height: 20px;
- padding: 1px;
- font-weight: bold;
- color: #000000;
+ height: 20px;
+ padding: 1px;
+ font-weight: bold;
+ color: #000000;
background: #ffffff url("filterboxbarbg.png") repeat-x top left;
width: 100%;
}
@@ -636,6 +708,13 @@ div.fullcomment dl.paramcmts > dd {
display: inline-block;
}
+#mbrsel > div > a {
+ position:relative;
+ top: -8px;
+ font-size: 11px;
+ text-shadow: #ffffff 0 1px 0;
+}
+
#mbrsel > div > ol#linearization {
display: table;
margin-left: 70px;
@@ -659,9 +738,32 @@ div.fullcomment dl.paramcmts > dd {
text-shadow: #ffffff 0 1px 0;
}
+#mbrsel > div > ol#implicits {
+ display: table;
+ margin-left: 70px;
+}
+
+#mbrsel > div > ol#implicits > li.in {
+ text-decoration: none;
+ float: left;
+ padding-right: 10px;
+ margin-right: 5px;
+ background: url(selected-right-implicits.png) no-repeat;
+ background-position: right 0px;
+}
+
+#mbrsel > div > ol#implicits > li.in > span{
+ color: #404040;
+ float: left;
+ padding: 1px 0 1px 10px;
+ background: url(selected-implicits.png) no-repeat;
+ background-position: 0px 0px;
+ text-shadow: #ffffff 0 1px 0;
+}
+
#mbrsel > div > ol > li {
/* padding: 3px 10px;*/
- line-height: 16pt;
+ line-height: 16pt;
display: inline-block;
cursor: pointer;
}
@@ -685,10 +787,10 @@ div.fullcomment dl.paramcmts > dd {
}
#mbrsel > div > ol > li.out {
- text-decoration: none;
- float: left;
- padding-right: 10px;
- margin-right: 5px;
+ text-decoration: none;
+ float: left;
+ padding-right: 10px;
+ margin-right: 5px;
}
#mbrsel > div > ol > li.out > span{
@@ -715,10 +817,32 @@ div.fullcomment dl.paramcmts > dd {
#mbrsel .showall {
color: #4C4C4C;
line-height: 16px;
- font-weight: bold;
+ font-weight: bold;
}
#mbrsel .showall span {
color: #4C4C4C;
- font-weight: bold;
+ font-weight: bold;
}*/
+
+.badge {
+ display: inline-block;
+ padding: 2px 4px;
+ font-size: 11.844px;
+ font-weight: bold;
+ line-height: 14px;
+ color: #ffffff;
+ text-shadow: 0 -1px 0 rgba(0, 0, 0, 0.25);
+ white-space: nowrap;
+ vertical-align: baseline;
+ background-color: #999999;
+ padding-right: 9px;
+ padding-left: 9px;
+ -webkit-border-radius: 9px;
+ -moz-border-radius: 9px;
+ border-radius: 9px;
+}
+
+.badge-red {
+ background-color: #b94a48;
+}
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/template.js b/src/compiler/scala/tools/nsc/doc/html/resource/lib/template.js
index 3cdd9a7..6d1caf6 100644
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/template.js
+++ b/src/compiler/scala/tools/nsc/doc/html/resource/lib/template.js
@@ -2,30 +2,76 @@
// code by Gilles Dubochet with contributions by Pedro Furlanetto
$(document).ready(function(){
- var isHiddenClass;
- if (document.title == 'scala.AnyRef') {
- isHiddenClass = function (name) {
- return name == 'scala.Any';
- };
- } else {
- isHiddenClass = function (name) {
- return name == 'scala.Any' || name == 'scala.AnyRef';
- };
+
+ // Escapes special characters and returns a valid jQuery selector
+ function escapeJquery(str){
+ return str.replace(/([;&,\.\+\*\~':"\!\^#$%@\[\]\(\)=>\|])/g, '\\$1');
}
- $("#linearization li").filter(function(){
+ // highlight and jump to selected member
+ if (window.location.hash) {
+ var temp = window.location.hash.replace('#', '');
+ var elem = '#'+escapeJquery(temp);
+
+ window.scrollTo(0, 0);
+ $(elem).parent().effect("highlight", {color: "#FFCC85"}, 3000);
+ $('html,body').animate({scrollTop:$(elem).parent().offset().top}, 1000);
+ }
+
+ var isHiddenClass = function (name) {
+ return name == 'scala.Any' ||
+ name == 'scala.AnyRef';
+ };
+
+ var isHidden = function (elem) {
+ return $(elem).attr("data-hidden") == 'true';
+ };
+
+ $("#linearization li:gt(0)").filter(function(){
return isHiddenClass($(this).attr("name"));
}).removeClass("in").addClass("out");
-
+
+ $("#implicits li").filter(function(){
+ return isHidden(this);
+ }).removeClass("in").addClass("out");
+
// Pre-filter members
filter();
// Member filter box
var input = $("#textfilter input");
input.bind("keyup", function(event) {
- if (event.keyCode == 27)
- input.val(""); // escape key
- filter(true);
+
+ switch ( event.keyCode ) {
+
+ case 27: // escape key
+ input.val("");
+ filter(true);
+ break;
+
+ case 38: // up
+ input.val("");
+ filter(false);
+ window.scrollTo(0, $("body").offset().top);
+ input.focus();
+ break;
+
+ case 33: //page up
+ input.val("");
+ filter(false);
+ break;
+
+ case 34: //page down
+ input.val("");
+ filter(false);
+ break;
+
+ default:
+ window.scrollTo(0, $("#mbrsel").offset().top);
+ filter(true);
+ break;
+
+ }
});
input.focus(function(event) {
input.select();
@@ -35,13 +81,13 @@ $(document).ready(function(){
filter();
});
$(document).keydown(function(event) {
- if(!event.altKey && !event.ctrlKey &&
- (event.keyCode == 27 || (event.keyCode >= 48 && event.keyCode <= 90)) &&
- document.activeElement != $("#textfilter input")[0]) {
- $("#textfilter input").focus();
+
+ if (event.keyCode == 9) { // tab
+ $("#index-input", window.parent.document).focus();
+ input.attr("value", "");
+ return false;
}
});
- $("#textfilter input").focus();
$("#linearization li").click(function(){
if ($(this).hasClass("in")) {
@@ -54,17 +100,49 @@ $(document).ready(function(){
};
filter();
});
- $("#ancestors > ol > li.hideall").click(function() {
+
+ $("#implicits li").click(function(){
+ if ($(this).hasClass("in")) {
+ $(this).removeClass("in");
+ $(this).addClass("out");
+ }
+ else if ($(this).hasClass("out")) {
+ $(this).removeClass("out");
+ $(this).addClass("in");
+ };
+ filter();
+ });
+
+ $("#mbrsel > div[id=ancestors] > ol > li.hideall").click(function() {
$("#linearization li.in").removeClass("in").addClass("out");
$("#linearization li:first").removeClass("out").addClass("in");
+ $("#implicits li.in").removeClass("in").addClass("out");
+
+ if ($(this).hasClass("out") && $("#mbrsel > div[id=ancestors] > ol > li.showall").hasClass("in")) {
+ $(this).removeClass("out").addClass("in");
+ $("#mbrsel > div[id=ancestors] > ol > li.showall").removeClass("in").addClass("out");
+ }
+
filter();
})
- $("#ancestors > ol > li.showall").click(function() {
- var filtered =
+ $("#mbrsel > div[id=ancestors] > ol > li.showall").click(function() {
+ var filteredLinearization =
$("#linearization li.out").filter(function() {
return ! isHiddenClass($(this).attr("name"));
});
- filtered.removeClass("out").addClass("in");
+ filteredLinearization.removeClass("out").addClass("in");
+
+ var filteredImplicits =
+ $("#implicits li.out").filter(function() {
+ return ! isHidden(this);
+ });
+ filteredImplicits.removeClass("out").addClass("in");
+
+ if ($(this).hasClass("out") && $("#mbrsel > div[id=ancestors] > ol > li.hideall").hasClass("in")) {
+ $(this).removeClass("out").addClass("in");
+ $("#mbrsel > div[id=ancestors] > ol > li.hideall").removeClass("in").addClass("out");
+ }
+
filter();
});
$("#visbl > ol > li.public").click(function() {
@@ -83,18 +161,21 @@ $(document).ready(function(){
});
$("#order > ol > li.alpha").click(function() {
if ($(this).hasClass("out")) {
- $(this).removeClass("out").addClass("in");
- $("#order > ol > li.inherit").removeClass("in").addClass("out");
orderAlpha();
};
})
$("#order > ol > li.inherit").click(function() {
if ($(this).hasClass("out")) {
- $(this).removeClass("out").addClass("in");
- $("#order > ol > li.alpha").removeClass("in").addClass("out");
orderInherit();
};
});
+ $("#order > ol > li.group").click(function() {
+ if ($(this).hasClass("out")) {
+ orderGroup();
+ };
+ });
+ $("#groupedMembers").hide();
+
initInherit();
// Create tooltips
@@ -108,8 +189,10 @@ $(document).ready(function(){
});
/* Add toggle arrows */
- var docAllSigs = $("#template li").has(".fullcomment").find(".signature");
-
+ //var docAllSigs = $("#template li").has(".fullcomment").find(".signature");
+ // trying to speed things up a little bit
+ var docAllSigs = $("#template li[fullComment=yes] .signature");
+
function commentToggleFct(signature){
var parent = signature.parent();
var shortComment = $(".shortcomment", parent);
@@ -129,39 +212,56 @@ $(document).ready(function(){
docAllSigs.click(function() {
commentToggleFct($(this));
});
-
+
/* Linear super types and known subclasses */
- function toggleShowContentFct(outerElement){
- var content = $(".hiddenContent", outerElement);
- var vis = $(":visible", content);
- if (vis.length > 0) {
+ function toggleShowContentFct(e){
+ e.toggleClass("open");
+ var content = $(".hiddenContent", e.parent().get(0));
+ if (content.is(':visible')) {
content.slideUp(100);
- $(".showElement", outerElement).show();
- $(".hideElement", outerElement).hide();
}
else {
content.slideDown(100);
- $(".showElement", outerElement).hide();
- $(".hideElement", outerElement).show();
}
};
- $(".toggleContainer").click(function() {
+
+ $(".toggle:not(.diagram-link)").click(function() {
toggleShowContentFct($(this));
});
-
+
// Set parent window title
windowTitle();
+
+ if ($("#order > ol > li.group").length == 1) { orderGroup(); };
});
function orderAlpha() {
+ $("#order > ol > li.alpha").removeClass("out").addClass("in");
+ $("#order > ol > li.inherit").removeClass("in").addClass("out");
+ $("#order > ol > li.group").removeClass("in").addClass("out");
$("#template > div.parent").hide();
- $("#ancestors").show();
+ $("#template > div.conversion").hide();
+ $("#mbrsel > div[id=ancestors]").show();
filter();
};
function orderInherit() {
+ $("#order > ol > li.inherit").removeClass("out").addClass("in");
+ $("#order > ol > li.alpha").removeClass("in").addClass("out");
+ $("#order > ol > li.group").removeClass("in").addClass("out");
$("#template > div.parent").show();
- $("#ancestors").hide();
+ $("#template > div.conversion").show();
+ $("#mbrsel > div[id=ancestors]").hide();
+ filter();
+};
+
+function orderGroup() {
+ $("#order > ol > li.group").removeClass("out").addClass("in");
+ $("#order > ol > li.alpha").removeClass("in").addClass("out");
+ $("#order > ol > li.inherit").removeClass("in").addClass("out");
+ $("#template > div.parent").hide();
+ $("#template > div.conversion").hide();
+ $("#mbrsel > div[id=ancestors]").show();
filter();
};
@@ -172,41 +272,74 @@ function orderInherit() {
* - initialises a control variable used by the filter method to control whether filtering happens on flat members
* or on inheritance-grouped members. */
function initInherit() {
- // parents is a map from fully-qualified names to the DOM node of parent headings.
- var parents = new Object();
+ // inheritParents is a map from fully-qualified names to the DOM node of parent headings.
+ var inheritParents = new Object();
+ var groupParents = new Object();
$("#inheritedMembers > div.parent").each(function(){
- parents[$(this).attr("name")] = $(this);
+ inheritParents[$(this).attr("name")] = $(this);
+ });
+ $("#inheritedMembers > div.conversion").each(function(){
+ inheritParents[$(this).attr("name")] = $(this);
+ });
+ $("#groupedMembers > div.group").each(function(){
+ groupParents[$(this).attr("name")] = $(this);
});
+
$("#types > ol > li").each(function(){
var mbr = $(this);
this.mbrText = mbr.find("> .fullcomment .cmt").text();
var qualName = mbr.attr("name");
var owner = qualName.slice(0, qualName.indexOf("#"));
var name = qualName.slice(qualName.indexOf("#") + 1);
- var parent = parents[owner];
- if (parent != undefined) {
- var types = $("> .types > ol", parent);
+ var inheritParent = inheritParents[owner];
+ if (inheritParent != undefined) {
+ var types = $("> .types > ol", inheritParent);
+ if (types.length == 0) {
+ inheritParent.append("<div class='types members'><h3>Type Members</h3><ol></ol></div>");
+ types = $("> .types > ol", inheritParent);
+ }
+ var clone = mbr.clone();
+ clone[0].mbrText = this.mbrText;
+ types.append(clone);
+ }
+ var group = mbr.attr("group")
+ var groupParent = groupParents[group];
+ if (groupParent != undefined) {
+ var types = $("> .types > ol", groupParent);
if (types.length == 0) {
- parent.append("<div class='types members'><h3>Type Members</h3><ol></ol></div>");
- types = $("> .types > ol", parent);
+ groupParent.append("<div class='types members'><ol></ol></div>");
+ types = $("> .types > ol", groupParent);
}
var clone = mbr.clone();
clone[0].mbrText = this.mbrText;
types.append(clone);
}
});
+
$("#values > ol > li").each(function(){
var mbr = $(this);
this.mbrText = mbr.find("> .fullcomment .cmt").text();
var qualName = mbr.attr("name");
var owner = qualName.slice(0, qualName.indexOf("#"));
var name = qualName.slice(qualName.indexOf("#") + 1);
- var parent = parents[owner];
- if (parent != undefined) {
- var values = $("> .values > ol", parent);
+ var inheritParent = inheritParents[owner];
+ if (inheritParent != undefined) {
+ var values = $("> .values > ol", inheritParent);
if (values.length == 0) {
- parent.append("<div class='values members'><h3>Value Members</h3><ol></ol></div>");
- values = $("> .values > ol", parent);
+ inheritParent.append("<div class='values members'><h3>Value Members</h3><ol></ol></div>");
+ values = $("> .values > ol", inheritParent);
+ }
+ var clone = mbr.clone();
+ clone[0].mbrText = this.mbrText;
+ values.append(clone);
+ }
+ var group = mbr.attr("group")
+ var groupParent = groupParents[group];
+ if (groupParent != undefined) {
+ var values = $("> .values > ol", groupParent);
+ if (values.length == 0) {
+ groupParent.append("<div class='values members'><ol></ol></div>");
+ values = $("> .values > ol", groupParent);
}
var clone = mbr.clone();
clone[0].mbrText = this.mbrText;
@@ -216,35 +349,64 @@ function initInherit() {
$("#inheritedMembers > div.parent").each(function() {
if ($("> div.members", this).length == 0) { $(this).remove(); };
});
+ $("#inheritedMembers > div.conversion").each(function() {
+ if ($("> div.members", this).length == 0) { $(this).remove(); };
+ });
+ $("#groupedMembers > div.group").each(function() {
+ if ($("> div.members", this).length == 0) { $(this).remove(); };
+ });
};
-function filter(scrollToMember) {
+/* filter used to take boolean scrollToMember */
+function filter() {
var query = $.trim($("#textfilter input").val()).toLowerCase();
query = query.replace(/[-[\]{}()*+?.,\\^$|#]/g, "\\$&").replace(/\s+/g, "|");
var queryRegExp = new RegExp(query, "i");
var privateMembersHidden = $("#visbl > ol > li.public").hasClass("in");
var orderingAlphabetic = $("#order > ol > li.alpha").hasClass("in");
- var hiddenSuperclassElements = orderingAlphabetic ? $("#linearization > li.out") : $("#linearization > li:gt(0)");
- var hiddenSuperclasses = hiddenSuperclassElements.map(function() {
+ var orderingInheritance = $("#order > ol > li.inherit").hasClass("in");
+ var orderingGroups = $("#order > ol > li.group").hasClass("in");
+ var hiddenSuperclassElementsLinearization = orderingInheritance ? $("#linearization > li:gt(0)") : $("#linearization > li.out");
+ var hiddenSuperclassesLinearization = hiddenSuperclassElementsLinearization.map(function() {
+ return $(this).attr("name");
+ }).get();
+ var hiddenSuperclassElementsImplicits = orderingInheritance ? $("#implicits > li") : $("#implicits > li.out");
+ var hiddenSuperclassesImplicits = hiddenSuperclassElementsImplicits.map(function() {
return $(this).attr("name");
}).get();
var hideInheritedMembers;
-
- if(orderingAlphabetic) {
+
+ if (orderingAlphabetic) {
+ $("#allMembers").show();
$("#inheritedMembers").hide();
+ $("#groupedMembers").hide();
hideInheritedMembers = true;
$("#allMembers > .members").each(filterFunc);
- }
- else {
- $("#inheritedMembers").show();
+ } else if (orderingGroups) {
+ $("#groupedMembers").show();
+ $("#inheritedMembers").hide();
+ $("#allMembers").hide();
hideInheritedMembers = true;
- $("#allMembers > .members").each(filterFunc);
+ $("#groupedMembers > .group > .members").each(filterFunc);
+ $("#groupedMembers > div.group").each(function() {
+ $(this).show();
+ if ($("> div.members", this).not(":hidden").length == 0) {
+ $(this).hide();
+ } else {
+ $(this).show();
+ }
+ });
+ } else if (orderingInheritance) {
+ $("#inheritedMembers").show();
+ $("#groupedMembers").hide();
+ $("#allMembers").hide();
hideInheritedMembers = false;
$("#inheritedMembers > .parent > .members").each(filterFunc);
+ $("#inheritedMembers > .conversion > .members").each(filterFunc);
}
-
+
function filterFunc() {
var membersVisible = false;
var members = $(this);
@@ -262,12 +424,18 @@ function filter(scrollToMember) {
ownerIndex = name.lastIndexOf(".");
}
var owner = name.slice(0, ownerIndex);
- for (var i = 0; i < hiddenSuperclasses.length; i++) {
- if (hiddenSuperclasses[i] == owner) {
+ for (var i = 0; i < hiddenSuperclassesLinearization.length; i++) {
+ if (hiddenSuperclassesLinearization[i] == owner) {
mbr.hide();
return;
}
- }
+ };
+ for (var i = 0; i < hiddenSuperclassesImplicits.length; i++) {
+ if (hiddenSuperclassesImplicits[i] == owner) {
+ mbr.hide();
+ return;
+ }
+ };
}
if (query && !(queryRegExp.test(name) || queryRegExp.test(this.mbrText))) {
mbr.hide();
@@ -276,17 +444,13 @@ function filter(scrollToMember) {
mbr.show();
membersVisible = true;
});
-
+
if (membersVisible)
members.show();
else
members.hide();
};
- if (scrollToMember) {
- window.scrollTo(0, $("#mbrsel").offset().top);
- }
-
return false;
};
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/trait_diagram.png b/src/compiler/scala/tools/nsc/doc/html/resource/lib/trait_diagram.png
new file mode 100644
index 0000000..8898325
Binary files /dev/null and b/src/compiler/scala/tools/nsc/doc/html/resource/lib/trait_diagram.png differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/type.png b/src/compiler/scala/tools/nsc/doc/html/resource/lib/type.png
new file mode 100644
index 0000000..6c6e1fe
Binary files /dev/null and b/src/compiler/scala/tools/nsc/doc/html/resource/lib/type.png differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/type_big.png b/src/compiler/scala/tools/nsc/doc/html/resource/lib/type_big.png
new file mode 100644
index 0000000..04c8794
Binary files /dev/null and b/src/compiler/scala/tools/nsc/doc/html/resource/lib/type_big.png differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/type_diagram.png b/src/compiler/scala/tools/nsc/doc/html/resource/lib/type_diagram.png
new file mode 100644
index 0000000..d815252
Binary files /dev/null and b/src/compiler/scala/tools/nsc/doc/html/resource/lib/type_diagram.png differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/type_to_object_big.png b/src/compiler/scala/tools/nsc/doc/html/resource/lib/type_to_object_big.png
new file mode 100644
index 0000000..ef2615b
Binary files /dev/null and b/src/compiler/scala/tools/nsc/doc/html/resource/lib/type_to_object_big.png differ
diff --git a/src/compiler/scala/tools/nsc/doc/model/CommentFactory.scala b/src/compiler/scala/tools/nsc/doc/model/CommentFactory.scala
new file mode 100644
index 0000000..9ba8914
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/doc/model/CommentFactory.scala
@@ -0,0 +1,114 @@
+/* NSC -- new Scala compiler
+ * Copyright 2007-2013 LAMP/EPFL
+ * @author Manohar Jonnalagedda
+ */
+
+package scala.tools.nsc
+package doc
+package model
+
+import base.comment._
+
+import reporters.Reporter
+import scala.collection._
+import scala.reflect.internal.util.{NoPosition, Position}
+import scala.language.postfixOps
+
+/** The comment parser transforms raw comment strings into `Comment` objects.
+ * Call `parse` to run the parser. Note that the parser is stateless and
+ * should only be built once for a given Scaladoc run.
+ *
+ * @param reporter The reporter on which user messages (error, warnings) should be printed.
+ *
+ * @author Manohar Jonnalagedda
+ * @author Gilles Dubochet */
+trait CommentFactory extends base.CommentFactoryBase {
+ thisFactory: ModelFactory with CommentFactory with MemberLookup =>
+
+ val global: Global
+ import global.{ reporter, definitions, Symbol }
+
+ protected val commentCache = mutable.HashMap.empty[(Symbol, TemplateImpl), Comment]
+
+ def addCommentBody(sym: Symbol, inTpl: TemplateImpl, docStr: String, docPos: global.Position): Symbol = {
+ commentCache += (sym, inTpl) -> parse(docStr, docStr, docPos, None)
+ sym
+ }
+
+ def comment(sym: Symbol, currentTpl: Option[DocTemplateImpl], inTpl: DocTemplateImpl): Option[Comment] = {
+ val key = (sym, inTpl)
+ if (commentCache isDefinedAt key)
+ Some(commentCache(key))
+ else {
+ val c = defineComment(sym, currentTpl, inTpl)
+ if (c isDefined) commentCache += (sym, inTpl) -> c.get
+ c
+ }
+ }
+
+ /** A comment is usualy created by the parser, however for some special
+ * cases we have to give some `inTpl` comments (parent class for example)
+ * to the comment of the symbol.
+ * This function manages some of those cases : Param accessor and Primary constructor */
+ def defineComment(sym: Symbol, currentTpl: Option[DocTemplateImpl], inTpl: DocTemplateImpl):Option[Comment] = {
+
+ //param accessor case
+ // We just need the @param argument, we put it into the body
+ if( sym.isParamAccessor &&
+ inTpl.comment.isDefined &&
+ inTpl.comment.get.valueParams.isDefinedAt(sym.encodedName)) {
+ val comContent = Some(inTpl.comment.get.valueParams(sym.encodedName))
+ Some(createComment(body0 = comContent))
+ }
+
+ // Primary constructor case
+ // We need some content of the class definition : @constructor for the body,
+ // @param and @deprecated, we can add some more if necessary
+ else if (sym.isPrimaryConstructor && inTpl.comment.isDefined ) {
+ val tplComment = inTpl.comment.get
+ // If there is nothing to put into the comment there is no need to create it
+ if(tplComment.constructor.isDefined ||
+ tplComment.throws != Map.empty ||
+ tplComment.valueParams != Map.empty ||
+ tplComment.typeParams != Map.empty ||
+ tplComment.deprecated.isDefined
+ )
+ Some(createComment( body0 = tplComment.constructor,
+ throws0 = tplComment.throws,
+ valueParams0 = tplComment.valueParams,
+ typeParams0 = tplComment.typeParams,
+ deprecated0 = tplComment.deprecated
+ ))
+ else None
+ }
+
+ //other comment cases
+ // parse function will make the comment
+ else {
+ val rawComment = global.expandedDocComment(sym, inTpl.sym).trim
+ if (rawComment != "") {
+ val tplOpt = if (currentTpl.isDefined) currentTpl else Some(inTpl)
+ val c = parse(rawComment, global.rawDocComment(sym), global.docCommentPos(sym), tplOpt)
+ Some(c)
+ }
+ else None
+ }
+
+ }
+
+ protected def parse(comment: String, src: String, pos: Position, inTplOpt: Option[DocTemplateImpl] = None): Comment = {
+ assert(!inTplOpt.isDefined || inTplOpt.get != null)
+ parseAtSymbol(comment, src, pos, inTplOpt map (_.sym))
+ }
+
+ /** Parses a string containing wiki syntax into a `Comment` object.
+ * Note that the string is assumed to be clean:
+ * - Removed Scaladoc start and end markers.
+ * - Removed start-of-line star and one whitespace afterwards (if present).
+ * - Removed all end-of-line whitespace.
+ * - Only `endOfLine` is used to mark line endings. */
+ def parseWiki(string: String, pos: Position, inTplOpt: Option[DocTemplateImpl]): Body = {
+ assert(!inTplOpt.isDefined || inTplOpt.get != null)
+ parseWikiAtSymbol(string,pos, inTplOpt map (_.sym))
+ }
+}
diff --git a/src/compiler/scala/tools/nsc/doc/model/Entity.scala b/src/compiler/scala/tools/nsc/doc/model/Entity.scala
index 42f0f67..cbc1a23 100644
--- a/src/compiler/scala/tools/nsc/doc/model/Entity.scala
+++ b/src/compiler/scala/tools/nsc/doc/model/Entity.scala
@@ -1,5 +1,5 @@
/* NSC -- new Scala compiler
- * Copyright 2007-2011 LAMP/EPFL
+ * Copyright 2007-2013 LAMP/EPFL
* @author Manohar Jonnalagedda
* @author Gilles Dubochet
*/
@@ -9,8 +9,8 @@ package doc
package model
import scala.collection._
-import comment._
-
+import base.comment._
+import diagram._
/** An entity in a Scaladoc universe. Entities are declarations in the program and correspond to symbols in the
* compiler. Entities model the following Scala concepts:
@@ -24,6 +24,9 @@ import comment._
* - annotations. */
trait Entity {
+ /** Similar to symbols, so we can track entities */
+ def id: Int
+
/** The name of the entity. Note that the name does not qualify this entity uniquely; use its `qualifiedName`
* instead. */
def name : String
@@ -48,6 +51,17 @@ trait Entity {
/** The annotations attached to this entity, if any. */
def annotations: List[Annotation]
+ /** The kind of the entity */
+ def kind: String
+
+ /** Whether or not the template was defined in a package object */
+ def inPackageObject: Boolean
+
+ /** Indicates whether this entity lives in the types namespace (classes, traits, abstract/alias types) */
+ def isType: Boolean
+
+ /** Indicates whether this entity lives in the terms namespace (objects, packages, methods, values) */
+ def isTerm: Boolean
}
object Entity {
@@ -83,12 +97,14 @@ trait TemplateEntity extends Entity {
/** Whether documentation is available for this template. */
def isDocTemplate: Boolean
+ /** Whether documentation is available for this template. */
+ def isNoDocMemberTemplate: Boolean
+
/** Whether this template is a case class. */
def isCaseClass: Boolean
/** The self-type of this template, if it differs from the template type. */
def selfType : Option[TypeEntity]
-
}
@@ -100,6 +116,9 @@ trait MemberEntity extends Entity {
/** The comment attached to this member, if any. */
def comment: Option[Comment]
+ /** The group this member is from */
+ def group: String
+
/** The template of which this entity is a member. */
def inTemplate: DocTemplateEntity
@@ -167,7 +186,33 @@ trait MemberEntity extends Entity {
/** Whether this member is abstract. */
def isAbstract: Boolean
+ /** If this symbol is a use case, the useCaseOf will contain the member it was derived from, containing the full
+ * signature and the complete parameter descriptions. */
+ def useCaseOf: Option[MemberEntity]
+
+ /** If this member originates from an implicit conversion, we set the implicit information to the correct origin */
+ def byConversion: Option[ImplicitConversion]
+
+ /** The identity of this member, used for linking */
+ def signature: String
+
+ /** Compatibility signature, will be removed from future versions */
+ def signatureCompat: String
+
+ /** Indicates whether the member is inherited by implicit conversion */
+ def isImplicitlyInherited: Boolean
+
+ /** Indicates whether there is another member with the same name in the template that will take precendence */
+ def isShadowedImplicit: Boolean
+
+ /** Indicates whether there are other implicitly inherited members that have similar signatures (and thus they all
+ * become ambiguous) */
+ def isAmbiguousImplicit: Boolean
+
+ /** Indicates whether the implicitly inherited member is shadowed or ambiguous in its template */
+ def isShadowedOrAmbiguousImplicit: Boolean
}
+
object MemberEntity {
// Oh contravariance, contravariance, wherefore art thou contravariance?
// Note: the above works for both the commonly misunderstood meaning of the line and the real one.
@@ -175,22 +220,42 @@ object MemberEntity {
}
/** An entity that is parameterized by types */
-trait HigherKinded extends Entity {
+trait HigherKinded {
/** The type parameters of this entity. */
def typeParams: List[TypeParam]
-
}
/** A template (class, trait, object or package) which is referenced in the universe, but for which no further
* documentation is available. Only templates for which a source file is given are documented by Scaladoc. */
-trait NoDocTemplate extends TemplateEntity
+trait NoDocTemplate extends TemplateEntity {
+ def kind =
+ if (isClass) "class"
+ else if (isTrait) "trait"
+ else if (isObject) "object"
+ else ""
+}
+/** An inherited template that was not documented in its original owner - example:
+ * in classpath: trait T { class C } -- T (and implicitly C) are not documented
+ * in the source: trait U extends T -- C appears in U as a MemberTemplateImpl
+ * -- that is, U has a member for it but C doesn't get its own page */
+trait MemberTemplateEntity extends TemplateEntity with MemberEntity with HigherKinded {
+
+ /** The value parameters of this case class, or an empty list if this class is not a case class. As case class value
+ * parameters cannot be curried, the outer list has exactly one element. */
+ def valueParams: List[List[ValueParam]]
+
+ /** The direct super-type of this template
+ e.g: {{{class A extends B[C[Int]] with D[E]}}} will have two direct parents: class B and D
+ NOTE: we are dropping the refinement here! */
+ def parentTypes: List[(TemplateEntity, TypeEntity)]
+}
/** A template (class, trait, object or package) for which documentation is available. Only templates for which
* a source file is given are documented by Scaladoc. */
-trait DocTemplateEntity extends TemplateEntity with MemberEntity {
+trait DocTemplateEntity extends MemberTemplateEntity {
/** The list of templates such that each is a member of the template that follows it; the first template is always
* this template, the last the root package entity. */
@@ -204,12 +269,6 @@ trait DocTemplateEntity extends TemplateEntity with MemberEntity {
* only if the `docsourceurl` setting has been set. */
def sourceUrl: Option[java.net.URL]
- /** The direct super-type of this template. */
- def parentType: Option[TypeEntity]
-
- @deprecated("Use `linearizationTemplates` and `linearizationTypes` instead", "2.9.0")
- def linearization: List[(TemplateEntity, TypeEntity)]
-
/** All class, trait and object templates which are part of this template's linearization, in lineratization order.
* This template's linearization contains all of its direct and indirect super-classes and super-traits. */
def linearizationTemplates: List[TemplateEntity]
@@ -218,9 +277,13 @@ trait DocTemplateEntity extends TemplateEntity with MemberEntity {
* This template's linearization contains all of its direct and indirect super-types. */
def linearizationTypes: List[TypeEntity]
- /**All class, trait and object templates for which this template is a direct or indirect super-class or super-trait.
- * Only templates for which documentation is available in the universe (`DocTemplateEntity`) are listed. */
- def subClasses: List[DocTemplateEntity]
+ /** All class, trait and object templates for which this template is a direct or indirect super-class or super-trait.
+ * Only templates for which documentation is available in the universe (`DocTemplateEntity`) are listed. */
+ def allSubClasses: List[DocTemplateEntity]
+
+ /** All class, trait and object templates for which this template is a *direct* super-class or super-trait.
+ * Only templates for which documentation is available in the universe (`DocTemplateEntity`) are listed. */
+ def directSubClasses: List[DocTemplateEntity]
/** All members of this template. If this template is a package, only templates for which documentation is available
* in the universe (`DocTemplateEntity`) are listed. */
@@ -228,7 +291,7 @@ trait DocTemplateEntity extends TemplateEntity with MemberEntity {
/** All templates that are members of this template. If this template is a package, only templates for which
* documentation is available in the universe (`DocTemplateEntity`) are listed. */
- def templates: List[DocTemplateEntity]
+ def templates: List[TemplateEntity with MemberEntity]
/** All methods that are members of this template. */
def methods: List[Def]
@@ -242,40 +305,63 @@ trait DocTemplateEntity extends TemplateEntity with MemberEntity {
/** All type aliases that are members of this template. */
def aliasTypes: List[AliasType]
+ /** The primary constructor of this class, if it has been defined. */
+ def primaryConstructor: Option[Constructor]
+
+ /** All constructors of this class, including the primary constructor. */
+ def constructors: List[Constructor]
+
/** The companion of this template, or none. If a class and an object are defined as a pair of the same name, the
* other entity of the pair is the companion. */
def companion: Option[DocTemplateEntity]
-}
+ /** The implicit conversions this template (class or trait, objects and packages are not affected) */
+ def conversions: List[ImplicitConversion]
+ /** The shadowing information for the implicitly added members */
+ def implicitsShadowing: Map[MemberEntity, ImplicitMemberShadowing]
-/** A trait template. */
-trait Trait extends DocTemplateEntity with HigherKinded
+ /** Classes that can be implcitly converted to this class */
+ def incomingImplicitlyConvertedClasses: List[(DocTemplateEntity, ImplicitConversion)]
+ /** Classes to which this class can be implicitly converted to
+ NOTE: Some classes might not be included in the scaladoc run so they will be NoDocTemplateEntities */
+ def outgoingImplicitlyConvertedClasses: List[(TemplateEntity, TypeEntity, ImplicitConversion)]
-/** A class template. */
-trait Class extends Trait with HigherKinded {
+ /** If this template takes place in inheritance and implicit conversion relations, it will be shown in this diagram */
+ def inheritanceDiagram: Option[Diagram]
- /** The primary constructor of this class, if it has been defined. */
- def primaryConstructor: Option[Constructor]
+ /** If this template contains other templates, such as classes and traits, they will be shown in this diagram */
+ def contentDiagram: Option[Diagram]
- /** All constructors of this class, including the primary constructor. */
- def constructors: List[Constructor]
+ /** Returns the group description taken either from this template or its linearizationTypes */
+ def groupDescription(group: String): Option[Body]
- /** The value parameters of this case class, or an empty list if this class is not a case class. As case class value
- * parameters cannot be curried, the outer list has exactly one element. */
- def valueParams: List[List[ValueParam]]
+ /** Returns the group description taken either from this template or its linearizationTypes */
+ def groupPriority(group: String): Int
+
+ /** Returns the group description taken either from this template or its linearizationTypes */
+ def groupName(group: String): String
+}
+/** A trait template. */
+trait Trait extends MemberTemplateEntity {
+ def kind = "trait"
}
+/** A class template. */
+trait Class extends MemberTemplateEntity {
+ override def kind = "class"
+}
/** An object template. */
-trait Object extends DocTemplateEntity
-
+trait Object extends MemberTemplateEntity {
+ def kind = "object"
+}
/** A package template. A package is in the universe if it is declared as a package object, or if it
* contains at least one template. */
-trait Package extends Object {
+trait Package extends DocTemplateEntity {
/** The package of which this package is a member. */
def inTemplate: Package
@@ -286,6 +372,8 @@ trait Package extends Object {
/** All packages that are member of this package. */
def packages: List[Package]
+
+ override def kind = "package"
}
@@ -304,7 +392,6 @@ trait NonTemplateMemberEntity extends MemberEntity {
/** Whether this member is a bridge member. A bridge member does only exist for binary compatibility reasons
* and should not appear in ScalaDoc. */
def isBridge: Boolean
-
}
@@ -315,6 +402,7 @@ trait Def extends NonTemplateMemberEntity with HigherKinded {
* Each parameter block is a list of value parameters. */
def valueParams : List[List[ValueParam]]
+ def kind = "method"
}
@@ -329,15 +417,18 @@ trait Constructor extends NonTemplateMemberEntity {
* element. */
def valueParams : List[List[ValueParam]]
+ def kind = "constructor"
}
/** A value (`val`), lazy val (`lazy val`) or variable (`var`) of a template. */
-trait Val extends NonTemplateMemberEntity
+trait Val extends NonTemplateMemberEntity {
+ def kind = "[lazy] value/variable"
+}
/** An abstract type member of a template. */
-trait AbstractType extends NonTemplateMemberEntity with HigherKinded {
+trait AbstractType extends MemberTemplateEntity with HigherKinded {
/** The lower bound for this abstract type, if it has been defined. */
def lo: Option[TypeEntity]
@@ -345,34 +436,31 @@ trait AbstractType extends NonTemplateMemberEntity with HigherKinded {
/** The upper bound for this abstract type, if it has been defined. */
def hi: Option[TypeEntity]
+ def kind = "abstract type"
}
/** An type alias of a template. */
-trait AliasType extends NonTemplateMemberEntity with HigherKinded {
+trait AliasType extends MemberTemplateEntity with HigherKinded {
/** The type aliased by this type alias. */
def alias: TypeEntity
+ def kind = "type alias"
}
/** A parameter to an entity. */
-trait ParameterEntity extends Entity {
-
- /** Whether this parameter is a type parameter. */
- def isTypeParam: Boolean
-
- /** Whether this parameter is a value parameter. */
- def isValueParam: Boolean
+trait ParameterEntity {
+ def name: String
}
/** A type parameter to a class, trait, or method. */
trait TypeParam extends ParameterEntity with HigherKinded {
- /** The variance of this type type parameter. Valid values are "+", "-", and the empty string. */
+ /** The variance of this type parameter. Valid values are "+", "-", and the empty string. */
def variance: String
/** The lower bound for this type parameter, if it has been defined. */
@@ -380,7 +468,6 @@ trait TypeParam extends ParameterEntity with HigherKinded {
/** The upper bound for this type parameter, if it has been defined. */
def hi: Option[TypeEntity]
-
}
@@ -395,7 +482,6 @@ trait ValueParam extends ParameterEntity {
/** Whether this value parameter is implicit. */
def isImplicit: Boolean
-
}
@@ -408,4 +494,138 @@ trait Annotation extends Entity {
/** The arguments passed to the constructor of the annotation class. */
def arguments: List[ValueArgument]
+ def kind = "annotation"
+}
+
+/** A trait that signals the member results from an implicit conversion */
+trait ImplicitConversion {
+
+ /** The source of the implicit conversion*/
+ def source: DocTemplateEntity
+
+ /** The result type after the conversion */
+ def targetType: TypeEntity
+
+ /** The result type after the conversion
+ * Note: not all targetTypes have a corresponding template. Examples include conversions resulting in refinement
+ * types. Need to check it's not option!
+ */
+ def targetTemplate: Option[TemplateEntity]
+
+ /** The components of the implicit conversion type parents */
+ def targetTypeComponents: List[(TemplateEntity, TypeEntity)]
+
+ /** The entity for the method that performed the conversion, if it's documented (or just its name, otherwise) */
+ def convertorMethod: Either[MemberEntity, String]
+
+ /** A short name of the convertion */
+ def conversionShortName: String
+
+ /** A qualified name uniquely identifying the convertion (currently: the conversion method's qualified name) */
+ def conversionQualifiedName: String
+
+ /** The entity that performed the conversion */
+ def convertorOwner: TemplateEntity
+
+ /** The constraints that the transformations puts on the type parameters */
+ def constraints: List[Constraint]
+
+ /** The members inherited by this implicit conversion */
+ def members: List[MemberEntity]
+
+ /** Is this a hidden implicit conversion (as specified in the settings) */
+ def isHiddenConversion: Boolean
+}
+
+/** Shadowing captures the information that the member is shadowed by some other members
+ * There are two cases of implicitly added member shadowing:
+ * 1) shadowing from a original class member (the class already has that member)
+ * in this case, it won't be possible to call the member directly, the type checker will fail attempting to adapt
+ * the call arguments (or if they fit it will call the original class' method)
+ * 2) shadowing from other possible implicit conversions ()
+ * this will result in an ambiguous implicit converion error
+ */
+trait ImplicitMemberShadowing {
+ /** The members that shadow the current entry use .inTemplate to get to the template name */
+ def shadowingMembers: List[MemberEntity]
+
+ /** The members that ambiguate this implicit conversion
+ Note: for ambiguatingMembers you have the following invariant:
+ assert(ambiguatingMembers.foreach(_.byConversion.isDefined) */
+ def ambiguatingMembers: List[MemberEntity]
+
+ def isShadowed: Boolean = !shadowingMembers.isEmpty
+ def isAmbiguous: Boolean = !ambiguatingMembers.isEmpty
+}
+
+/** A trait that encapsulates a constraint necessary for implicit conversion */
+trait Constraint
+
+/** A constraint involving a type parameter which must be in scope */
+trait ImplicitInScopeConstraint extends Constraint {
+ /** The type of the implicit value required */
+ def implicitType: TypeEntity
+
+ /** toString for debugging */
+ override def toString = "an implicit _: " + implicitType.name + " must be in scope"
+}
+
+trait TypeClassConstraint extends ImplicitInScopeConstraint with TypeParamConstraint {
+ /** Type class name */
+ def typeClassEntity: TemplateEntity
+
+ /** toString for debugging */
+ override def toString = typeParamName + " is a class of type " + typeClassEntity.qualifiedName + " (" +
+ typeParamName + ": " + typeClassEntity.name + ")"
+}
+
+trait KnownTypeClassConstraint extends TypeClassConstraint {
+ /** Type explanation, takes the type parameter name and generates the explanation */
+ def typeExplanation: (String) => String
+
+ /** toString for debugging */
+ override def toString = typeExplanation(typeParamName) + " (" + typeParamName + ": " + typeClassEntity.name + ")"
+}
+
+/** A constraint involving a type parameter */
+trait TypeParamConstraint extends Constraint {
+ /** The type parameter involved */
+ def typeParamName: String
+}
+
+trait EqualTypeParamConstraint extends TypeParamConstraint {
+ /** The rhs */
+ def rhs: TypeEntity
+ /** toString for debugging */
+ override def toString = typeParamName + " is " + rhs.name + " (" + typeParamName + " =:= " + rhs.name + ")"
+}
+
+trait BoundedTypeParamConstraint extends TypeParamConstraint {
+ /** The lower bound */
+ def lowerBound: TypeEntity
+
+ /** The upper bound */
+ def upperBound: TypeEntity
+
+ /** toString for debugging */
+ override def toString = typeParamName + " is a superclass of " + lowerBound.name + " and a subclass of " +
+ upperBound.name + " (" + typeParamName + " >: " + lowerBound.name + " <: " + upperBound.name + ")"
+}
+
+trait LowerBoundedTypeParamConstraint extends TypeParamConstraint {
+ /** The lower bound */
+ def lowerBound: TypeEntity
+
+ /** toString for debugging */
+ override def toString = typeParamName + " is a superclass of " + lowerBound.name + " (" + typeParamName + " >: " +
+ lowerBound.name + ")"
+}
+
+trait UpperBoundedTypeParamConstraint extends TypeParamConstraint {
+ /** The lower bound */
+ def upperBound: TypeEntity
+
+ /** toString for debugging */
+ override def toString = typeParamName + " is a subclass of " + upperBound.name + " (" + typeParamName + " <: " +
+ upperBound.name + ")"
}
diff --git a/src/compiler/scala/tools/nsc/doc/model/IndexModelFactory.scala b/src/compiler/scala/tools/nsc/doc/model/IndexModelFactory.scala
old mode 100644
new mode 100755
index ef3c2be..4ee6daf
--- a/src/compiler/scala/tools/nsc/doc/model/IndexModelFactory.scala
+++ b/src/compiler/scala/tools/nsc/doc/model/IndexModelFactory.scala
@@ -1,5 +1,5 @@
/* NSC -- new Scala compiler
- * Copyright 2007-2011 LAMP/EPFL
+ * Copyright 2007-2013 LAMP/EPFL
* @author Pedro Furlanetto
*/
@@ -15,12 +15,12 @@ object IndexModelFactory {
lazy val firstLetterIndex: Map[Char, SymbolMap] = {
- val result = new mutable.HashMap[Char,SymbolMap] {
+ object result extends mutable.HashMap[Char,SymbolMap] {
/* Owner template ordering */
implicit def orderingSet = math.Ordering.String.on { x: MemberEntity => x.name.toLowerCase }
/* symbol name ordering */
- implicit def orderingMap = math.Ordering.String.on { x: String => x.toLowerCase }
+ implicit def orderingMap = math.Ordering.String
def addMember(d: MemberEntity) = {
val firstLetter = {
@@ -35,7 +35,6 @@ object IndexModelFactory {
} + d
this(firstLetter) = letter + (d.name -> members)
}
-
}
//@scala.annotation.tailrec // TODO
@@ -45,11 +44,7 @@ object IndexModelFactory {
case tpl: DocTemplateEntity =>
result.addMember(tpl)
gather(tpl)
- case alias: AliasType =>
- result.addMember(alias)
- case absType: AbstractType =>
- result.addMember(absType)
- case non: NonTemplateMemberEntity if !non.isConstructor =>
+ case non: MemberEntity if !non.isConstructor =>
result.addMember(non)
case x @ _ =>
}
diff --git a/src/compiler/scala/tools/nsc/doc/model/MemberLookup.scala b/src/compiler/scala/tools/nsc/doc/model/MemberLookup.scala
new file mode 100644
index 0000000..23259a4
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/doc/model/MemberLookup.scala
@@ -0,0 +1,63 @@
+package scala.tools.nsc
+package doc
+package model
+
+import base._
+
+/** This trait extracts all required information for documentation from compilation units */
+trait MemberLookup extends base.MemberLookupBase {
+ thisFactory: ModelFactory =>
+
+ import global._
+ import definitions.{ NothingClass, AnyClass, AnyValClass, AnyRefClass, ListClass }
+
+ override def internalLink(sym: Symbol, site: Symbol): Option[LinkTo] =
+ findTemplateMaybe(sym) match {
+ case Some(tpl) => Some(LinkToTpl(tpl))
+ case None =>
+ findTemplateMaybe(site) flatMap { inTpl =>
+ inTpl.members find (_.asInstanceOf[EntityImpl].sym == sym) map (LinkToMember(_, inTpl))
+ }
+ }
+
+ override def chooseLink(links: List[LinkTo]): LinkTo = {
+ val mbrs = links.collect {
+ case lm at LinkToMember(mbr: MemberEntity, _) => (mbr, lm)
+ }
+ if (mbrs.isEmpty)
+ links.head
+ else
+ mbrs.min(Ordering[MemberEntity].on[(MemberEntity, LinkTo)](_._1))._2
+ }
+
+ override def toString(link: LinkTo) = link match {
+ case LinkToTpl(tpl: EntityImpl) => tpl.sym.toString
+ case LinkToMember(mbr: EntityImpl, inTpl: EntityImpl) =>
+ mbr.sym.signatureString + " in " + inTpl.sym.toString
+ case _ => link.toString
+ }
+
+ override def findExternalLink(sym: Symbol, name: String): Option[LinkToExternal] = {
+ val sym1 =
+ if (sym == AnyClass || sym == AnyRefClass || sym == AnyValClass || sym == NothingClass) ListClass
+ else if (sym.isPackage)
+ /* Get package object which has associatedFile ne null */
+ sym.info.member(newTermName("package"))
+ else sym
+ Option(sym1.associatedFile) flatMap (_.underlyingSource) flatMap { src =>
+ val path = src.path
+ settings.extUrlMapping get path map { url =>
+ LinkToExternal(name, url + "#" + name)
+ }
+ } orElse {
+ // Deprecated option.
+ settings.extUrlPackageMapping find {
+ case (pkg, _) => name startsWith pkg
+ } map {
+ case (_, url) => LinkToExternal(name, url + "#" + name)
+ }
+ }
+ }
+
+ override def warnNoLink = !settings.docNoLinkWarnings.value
+}
diff --git a/src/compiler/scala/tools/nsc/doc/model/ModelFactory.scala b/src/compiler/scala/tools/nsc/doc/model/ModelFactory.scala
index 2a7f113..d9b173b 100644
--- a/src/compiler/scala/tools/nsc/doc/model/ModelFactory.scala
+++ b/src/compiler/scala/tools/nsc/doc/model/ModelFactory.scala
@@ -1,10 +1,12 @@
-/* NSC -- new Scala compiler -- Copyright 2007-2011 LAMP/EPFL */
+/* NSC -- new Scala compiler -- Copyright 2007-2013 LAMP/EPFL */
package scala.tools.nsc
package doc
package model
-import comment._
+import base._
+import base.comment._
+import diagram._
import scala.collection._
import scala.util.matching.Regex
@@ -17,19 +19,32 @@ import model.{ RootPackage => RootPackageEntity }
/** This trait extracts all required information for documentation from compilation units */
class ModelFactory(val global: Global, val settings: doc.Settings) {
- thisFactory: ModelFactory with CommentFactory with TreeFactory =>
+ thisFactory: ModelFactory
+ with ModelFactoryImplicitSupport
+ with ModelFactoryTypeSupport
+ with DiagramFactory
+ with CommentFactory
+ with TreeFactory
+ with MemberLookup =>
import global._
- import definitions.{ ObjectClass, ScalaObjectClass, RootPackage, EmptyPackage, NothingClass, AnyClass, AnyValClass, AnyRefClass }
+ import definitions.{ ObjectClass, NothingClass, AnyClass, AnyValClass, AnyRefClass, ListClass }
+ import rootMirror.{ RootPackage, RootClass, EmptyPackage }
- private var droppedPackages = 0
- def templatesCount = templatesCache.size - droppedPackages
+ // Defaults for member grouping, that may be overridden by the template
+ val defaultGroup = "Ungrouped"
+ val defaultGroupName = "Ungrouped"
+ val defaultGroupDesc = None
+ val defaultGroupPriority = 1000
- private var modelFinished = false
+ def templatesCount = docTemplatesCache.count(_._2.isDocTemplate) - droppedPackages.size
+
+ private var _modelFinished = false
+ def modelFinished: Boolean = _modelFinished
private var universe: Universe = null
private def dbg(msg: String) = if (sys.props contains "scala.scaladoc.debug") println(msg)
- private def closestPackage(sym: Symbol) = {
+ protected def closestPackage(sym: Symbol) = {
if (sym.isPackage || sym.isPackageClass) sym
else sym.enclosingPackage
}
@@ -39,30 +54,30 @@ class ModelFactory(val global: Global, val settings: doc.Settings) {
"memberSym " + memberSym + " templateSym " + templateSym + " encls = " +
closestPackage(memberSym) + ", " + closestPackage(templateSym)
)
- memberSym.inDefaultNamespace || (closestPackage(memberSym) == closestPackage(templateSym))
+ memberSym.isOmittablePrefix || (closestPackage(memberSym) == closestPackage(templateSym))
}
- private lazy val noSubclassCache = Set(AnyClass, AnyRefClass, ObjectClass, ScalaObjectClass)
-
- /** */
def makeModel: Option[Universe] = {
val universe = new Universe { thisUniverse =>
thisFactory.universe = thisUniverse
val settings = thisFactory.settings
- private val rootPackageMaybe = makeRootPackage
- val rootPackage = rootPackageMaybe.orNull
+ val rootPackage = modelCreation.createRootPackage
}
- modelFinished = true
+ _modelFinished = true
+ // complete the links between model entities, everthing that couldn't have been done before
+ universe.rootPackage.completeModel
+
Some(universe) filter (_.rootPackage != null)
}
- /** */
- protected val templatesCache =
- new mutable.LinkedHashMap[Symbol, DocTemplateImpl]
-
- def findTemplate(query: String): Option[DocTemplateImpl] = {
- if (!modelFinished) sys.error("cannot find template in unfinished universe")
- templatesCache.values find { tpl => tpl.qualifiedName == query && !tpl.isObject }
+ // state:
+ var ids = 0
+ private val droppedPackages = mutable.Set[PackageImpl]()
+ protected val docTemplatesCache = new mutable.LinkedHashMap[Symbol, DocTemplateImpl]
+ protected val noDocTemplatesCache = new mutable.LinkedHashMap[Symbol, NoDocTemplateImpl]
+ def packageDropped(tpl: DocTemplateImpl) = tpl match {
+ case p: PackageImpl => droppedPackages(p)
+ case _ => false
}
def optimize(str: String): String =
@@ -70,41 +85,60 @@ class ModelFactory(val global: Global, val settings: doc.Settings) {
/* ============== IMPLEMENTATION PROVIDING ENTITY TYPES ============== */
- abstract class EntityImpl(val sym: Symbol, inTpl: => TemplateImpl) extends Entity {
+ abstract class EntityImpl(val sym: Symbol, val inTpl: TemplateImpl) extends Entity {
+ val id = { ids += 1; ids }
val name = optimize(sym.nameString)
+ val universe = thisFactory.universe
+
+ // Debugging:
+ // assert(id != 36, sym + " " + sym.getClass)
+ //println("Creating entity #" + id + " [" + kind + " " + qualifiedName + "] for sym " + sym.kindString + " " + sym.ownerChain.reverse.map(_.name).mkString("."))
+
def inTemplate: TemplateImpl = inTpl
def toRoot: List[EntityImpl] = this :: inTpl.toRoot
def qualifiedName = name
- val universe = thisFactory.universe
def annotations = sym.annotations.map(makeAnnotation)
+ def inPackageObject: Boolean = sym.owner.isModuleClass && sym.owner.sourceModule.isPackageObject
+ def isType = sym.name.isTypeName
+ def isTerm = sym.name.isTermName
}
trait TemplateImpl extends EntityImpl with TemplateEntity {
override def qualifiedName: String =
- if (inTemplate.isRootPackage) name else optimize(inTemplate.qualifiedName + "." + name)
+ if (inTemplate == null || inTemplate.isRootPackage) name else optimize(inTemplate.qualifiedName + "." + name)
def isPackage = sym.isPackage
def isTrait = sym.isTrait
def isClass = sym.isClass && !sym.isTrait
def isObject = sym.isModule && !sym.isPackage
def isCaseClass = sym.isCaseClass
def isRootPackage = false
+ def isNoDocMemberTemplate = false
def selfType = if (sym.thisSym eq sym) None else Some(makeType(sym.thisSym.typeOfThis, this))
}
- class NoDocTemplateImpl(sym: Symbol, inTpl: => TemplateImpl) extends EntityImpl(sym, inTpl) with TemplateImpl with NoDocTemplate {
- def isDocTemplate = false
- }
-
- abstract class MemberImpl(sym: Symbol, inTpl: => DocTemplateImpl) extends EntityImpl(sym, inTpl) with MemberEntity {
- lazy val comment =
- if (inTpl == null) None else thisFactory.comment(sym, inTpl)
+ abstract class MemberImpl(sym: Symbol, inTpl: DocTemplateImpl) extends EntityImpl(sym, inTpl) with MemberEntity {
+ lazy val comment = {
+ // If the current tpl is a DocTemplate, we consider itself as the root for resolving link targets (instead of the
+ // package the class is in) -- so people can refer to methods directly [[foo]], instead of using [[MyClass.foo]]
+ // in the doc comment of MyClass
+ val thisTpl = this match {
+ case d: DocTemplateImpl => Some(d)
+ case _ => None
+ }
+ if (inTpl != null) thisFactory.comment(sym, thisTpl, inTpl) else None
+ }
+ def group = if (comment.isDefined) comment.get.group.getOrElse(defaultGroup) else defaultGroup
override def inTemplate = inTpl
override def toRoot: List[MemberImpl] = this :: inTpl.toRoot
- def inDefinitionTemplates =
- if (inTpl == null)
- makeRootPackage.toList
- else
- makeTemplate(sym.owner) :: (sym.allOverriddenSymbols map { inhSym => makeTemplate(inhSym.owner) })
+ def inDefinitionTemplates = this match {
+ case mb: NonTemplateMemberEntity if (mb.useCaseOf.isDefined) =>
+ mb.useCaseOf.get.inDefinitionTemplates
+ case _ =>
+ if (inTpl == null)
+ List(makeRootPackage)
+ else
+ makeTemplate(sym.owner)::(sym.allOverriddenSymbols map { inhSym => makeTemplate(inhSym.owner) })
+ }
def visibility = {
if (sym.isPrivateLocal) PrivateInInstance()
else if (sym.isProtectedLocal) ProtectedInInstance()
@@ -124,16 +158,23 @@ class ModelFactory(val global: Global, val settings: doc.Settings) {
if (sym.isImplicit) fgs += Paragraph(Text("implicit"))
if (sym.isSealed) fgs += Paragraph(Text("sealed"))
if (!sym.isTrait && (sym hasFlag Flags.ABSTRACT)) fgs += Paragraph(Text("abstract"))
- if (!sym.isTrait && (sym hasFlag Flags.DEFERRED)) fgs += Paragraph(Text("abstract"))
+ /* Resetting the DEFERRED flag is a little trick here for refined types: (example from scala.collections)
+ * {{{
+ * implicit def traversable2ops[T](t: scala.collection.GenTraversableOnce[T]) = new TraversableOps[T] {
+ * def isParallel = ...
+ * }}}
+ * the type the method returns is TraversableOps, which has all-abstract symbols. But in reality, it couldn't have
+ * any abstract terms, otherwise it would fail compilation. So we reset the DEFERRED flag. */
+ if (!sym.isTrait && (sym hasFlag Flags.DEFERRED) && (!isImplicitlyInherited)) fgs += Paragraph(Text("abstract"))
if (!sym.isModule && (sym hasFlag Flags.FINAL)) fgs += Paragraph(Text("final"))
fgs.toList
}
def deprecation =
if (sym.isDeprecated)
Some((sym.deprecationMessage, sym.deprecationVersion) match {
- case (Some(msg), Some(ver)) => parseWiki("''(Since version " + ver + ")'' " + msg, NoPosition)
- case (Some(msg), None) => parseWiki(msg, NoPosition)
- case (None, Some(ver)) => parseWiki("''(Since version " + ver + ")''", NoPosition)
+ case (Some(msg), Some(ver)) => parseWiki("''(Since version " + ver + ")'' " + msg, NoPosition, Some(inTpl))
+ case (Some(msg), None) => parseWiki(msg, NoPosition, Some(inTpl))
+ case (None, Some(ver)) => parseWiki("''(Since version " + ver + ")''", NoPosition, Some(inTpl))
case (None, None) => Body(Nil)
})
else
@@ -141,9 +182,9 @@ class ModelFactory(val global: Global, val settings: doc.Settings) {
def migration =
if(sym.hasMigrationAnnotation)
Some((sym.migrationMessage, sym.migrationVersion) match {
- case (Some(msg), Some(ver)) => parseWiki("''(Changed in version " + ver + ")'' " + msg, NoPosition)
- case (Some(msg), None) => parseWiki(msg, NoPosition)
- case (None, Some(ver)) => parseWiki("''(Changed in version " + ver + ")''", NoPosition)
+ case (Some(msg), Some(ver)) => parseWiki("''(Changed in version " + ver + ")'' " + msg, NoPosition, Some(inTpl))
+ case (Some(msg), None) => parseWiki(msg, NoPosition, Some(inTpl))
+ case (None, Some(ver)) => parseWiki("''(Changed in version " + ver + ")''", NoPosition, Some(inTpl))
case (None, None) => Body(Nil)
})
else
@@ -158,7 +199,8 @@ class ModelFactory(val global: Global, val settings: doc.Settings) {
case NullaryMethodType(res) => resultTpe(res)
case _ => tpe
}
- makeTypeInTemplateContext(resultTpe(sym.tpe), inTemplate, sym)
+ val tpe = if (!isImplicitlyInherited) sym.tpe else byConversion.get.toType memberInfo sym
+ makeTypeInTemplateContext(resultTpe(tpe), inTemplate, sym)
}
def isDef = false
def isVal = false
@@ -169,28 +211,118 @@ class ModelFactory(val global: Global, val settings: doc.Settings) {
def isAliasType = false
def isAbstractType = false
def isAbstract =
- ((!sym.isTrait && ((sym hasFlag Flags.ABSTRACT) || (sym hasFlag Flags.DEFERRED))) ||
+ // for the explanation of conversion == null see comment on flags
+ ((!sym.isTrait && ((sym hasFlag Flags.ABSTRACT) || (sym hasFlag Flags.DEFERRED)) && (!isImplicitlyInherited)) ||
sym.isAbstractClass || sym.isAbstractType) && !sym.isSynthetic
def isTemplate = false
+ def signature = externalSignature(sym)
+ lazy val signatureCompat = {
+
+ def defParams(mbr: Any): String = mbr match {
+ case d: MemberEntity with Def =>
+ val paramLists: List[String] =
+ if (d.valueParams.isEmpty) Nil
+ else d.valueParams map (ps => ps map (_.resultType.name) mkString ("(",",",")"))
+ paramLists.mkString
+ case _ => ""
+ }
+
+ def tParams(mbr: Any): String = mbr match {
+ case hk: HigherKinded if !hk.typeParams.isEmpty =>
+ def boundsToString(hi: Option[TypeEntity], lo: Option[TypeEntity]): String = {
+ def bound0(bnd: Option[TypeEntity], pre: String): String = bnd match {
+ case None => ""
+ case Some(tpe) => pre ++ tpe.toString
+ }
+ bound0(hi, "<:") ++ bound0(lo, ">:")
+ }
+ "[" + hk.typeParams.map(tp => tp.variance + tp.name + tParams(tp) + boundsToString(tp.hi, tp.lo)).mkString(", ") + "]"
+ case _ => ""
+ }
+
+ (name + tParams(this) + defParams(this) +":"+ resultType.name).replaceAll("\\s","") // no spaces allowed, they break links
+ }
+ // these only apply for NonTemplateMemberEntities
+ def useCaseOf: Option[MemberEntity] = None
+ def byConversion: Option[ImplicitConversionImpl] = None
+ def isImplicitlyInherited = false
+ def isShadowedImplicit = false
+ def isAmbiguousImplicit = false
+ def isShadowedOrAmbiguousImplicit = false
+ }
+
+ /** A template that is not documented at all. The class is instantiated during lookups, to indicate that the class
+ * exists, but should not be documented (either it's not included in the source or it's not visible)
+ */
+ class NoDocTemplateImpl(sym: Symbol, inTpl: TemplateImpl) extends EntityImpl(sym, inTpl) with TemplateImpl with HigherKindedImpl with NoDocTemplate {
+ assert(modelFinished)
+ assert(!(noDocTemplatesCache isDefinedAt sym))
+ noDocTemplatesCache += (sym -> this)
+ def isDocTemplate = false
+ }
+
+ /** An inherited template that was not documented in its original owner - example:
+ * in classpath: trait T { class C } -- T (and implicitly C) are not documented
+ * in the source: trait U extends T -- C appears in U as a MemberTemplateImpl -- that is, U has a member for it
+ * but C doesn't get its own page
+ */
+ abstract class MemberTemplateImpl(sym: Symbol, inTpl: DocTemplateImpl) extends MemberImpl(sym, inTpl) with TemplateImpl with HigherKindedImpl with MemberTemplateEntity {
+ // no templates cache for this class, each owner gets its own instance
+ override def isTemplate = true
+ def isDocTemplate = false
+ override def isNoDocMemberTemplate = true
+ lazy val definitionName = optimize(inDefinitionTemplates.head.qualifiedName + "." + name)
+ def valueParams: List[List[ValueParam]] = Nil /** TODO, these are now only computed for DocTemplates */
+
+ // Seems unused
+ // def parentTemplates =
+ // if (sym.isPackage || sym == AnyClass)
+ // List()
+ // else
+ // sym.tpe.parents.flatMap { tpe: Type =>
+ // val tSym = tpe.typeSymbol
+ // if (tSym != NoSymbol)
+ // List(makeTemplate(tSym))
+ // else
+ // List()
+ // } filter (_.isInstanceOf[DocTemplateEntity])
+
+ def parentTypes =
+ if (sym.isPackage || sym == AnyClass) List() else {
+ val tps = (this match {
+ case a: AliasType => sym.tpe.dealias.parents
+ case a: AbstractType => sym.info.bounds match {
+ case TypeBounds(lo, RefinedType(parents, decls)) => parents
+ case TypeBounds(lo, hi) => hi :: Nil
+ case _ => Nil
+ }
+ case _ => sym.tpe.parents
+ }) map { _.asSeenFrom(sym.thisType, sym) }
+ makeParentTypes(RefinedType(tps, EmptyScope), Some(this), inTpl)
+ }
}
/** The instantiation of `TemplateImpl` triggers the creation of the following entities:
* All ancestors of the template and all non-package members.
*/
- abstract class DocTemplateImpl(sym: Symbol, inTpl: => DocTemplateImpl) extends MemberImpl(sym, inTpl) with TemplateImpl with HigherKindedImpl with DocTemplateEntity {
- //if (inTpl != null) println("mbr " + sym + " in " + (inTpl.toRoot map (_.sym)).mkString(" > "))
+ abstract class DocTemplateImpl(sym: Symbol, inTpl: DocTemplateImpl) extends MemberTemplateImpl(sym, inTpl) with DocTemplateEntity {
+ assert(!modelFinished)
+ assert(!(docTemplatesCache isDefinedAt sym), sym)
+ docTemplatesCache += (sym -> this)
+
if (settings.verbose.value)
inform("Creating doc template for " + sym)
- templatesCache += (sym -> this)
- lazy val definitionName = optimize(inDefinitionTemplates.head.qualifiedName + "." + name)
override def toRoot: List[DocTemplateImpl] = this :: inTpl.toRoot
- def inSource =
- if (sym.sourceFile != null && ! sym.isSynthetic)
- Some((sym.sourceFile, sym.pos.line))
+
+ protected def inSourceFromSymbol(symbol: Symbol) =
+ if (symbol.sourceFile != null && ! symbol.isSynthetic)
+ Some((symbol.sourceFile, symbol.pos.line))
else
None
+ def inSource = inSourceFromSymbol(sym)
+
def sourceUrl = {
def fixPath(s: String) = s.replaceAll("\\" + java.io.File.separator, "/")
val assumedSourceRoot = fixPath(settings.sourcepath.value) stripSuffix "/"
@@ -211,16 +343,10 @@ class ModelFactory(val global: Global, val settings: doc.Settings) {
}
else None
}
- def parentType = {
- if (sym.isPackage || sym == AnyClass) None else {
- val tps =
- (sym.tpe.parents filter (_ != ScalaObjectClass.tpe)) map { _.asSeenFrom(sym.thisType, sym) }
- Some(makeType(RefinedType(tps, EmptyScope), inTpl))
- }
- }
- val linearization: List[(TemplateEntity, TypeEntity)] = {
- sym.ancestors filter (_ != ScalaObjectClass) map { ancestor =>
- val typeEntity = makeType(sym.info.baseType(ancestor), this)
+
+ protected def linearizationFromSymbol(symbol: Symbol): List[(TemplateEntity, TypeEntity)] = {
+ symbol.ancestors map { ancestor =>
+ val typeEntity = makeType(symbol.info.baseType(ancestor), this)
val tmplEntity = makeTemplate(ancestor) match {
case tmpl: DocTemplateImpl => tmpl registerSubClass this ; tmpl
case tmpl => tmpl
@@ -229,239 +355,449 @@ class ModelFactory(val global: Global, val settings: doc.Settings) {
}
}
+ lazy val linearization = linearizationFromSymbol(sym)
def linearizationTemplates = linearization map { _._1 }
def linearizationTypes = linearization map { _._2 }
+ /* Subclass cache */
private lazy val subClassesCache = (
- if (noSubclassCache(sym)) null
+ if (sym == AnyRefClass) null
else mutable.ListBuffer[DocTemplateEntity]()
)
def registerSubClass(sc: DocTemplateEntity): Unit = {
if (subClassesCache != null)
subClassesCache += sc
}
- def subClasses = if (subClassesCache == null) Nil else subClassesCache.toList
-
- protected lazy val memberSyms =
- // Only this class's constructors are part of its members, inherited constructors are not.
- sym.info.members.filter(s => localShouldDocument(s) && (!s.isConstructor || s.owner == sym))
-
- val members = memberSyms flatMap (makeMember(_, this))
- val templates = members collect { case c: DocTemplateEntity => c }
- val methods = members collect { case d: Def => d }
- val values = members collect { case v: Val => v }
- val abstractTypes = members collect { case t: AbstractType => t }
- val aliasTypes = members collect { case t: AliasType => t }
+ def allSubClasses = if (subClassesCache == null) Nil else subClassesCache.toList
+ def directSubClasses = allSubClasses.filter(_.parentTypes.map(_._1).contains(this))
+
+ /* Implcitly convertible class cache */
+ private var implicitlyConvertibleClassesCache: mutable.ListBuffer[(DocTemplateImpl, ImplicitConversionImpl)] = null
+ def registerImplicitlyConvertibleClass(dtpl: DocTemplateImpl, conv: ImplicitConversionImpl): Unit = {
+ if (implicitlyConvertibleClassesCache == null)
+ implicitlyConvertibleClassesCache = mutable.ListBuffer[(DocTemplateImpl, ImplicitConversionImpl)]()
+ implicitlyConvertibleClassesCache += ((dtpl, conv))
+ }
+
+ def incomingImplicitlyConvertedClasses: List[(DocTemplateImpl, ImplicitConversionImpl)] =
+ if (implicitlyConvertibleClassesCache == null)
+ List()
+ else
+ implicitlyConvertibleClassesCache.toList
+
+ // the implicit conversions are generated eagerly, but the members generated by implicit conversions are added
+ // lazily, on completeModel
+ val conversions: List[ImplicitConversionImpl] =
+ if (settings.docImplicits.value) makeImplicitConversions(sym, this) else Nil
+
+ // members as given by the compiler
+ lazy val memberSyms = sym.info.members.filter(s => membersShouldDocument(s, this)).toList
+
+ // the inherited templates (classes, traits or objects)
+ var memberSymsLazy = memberSyms.filter(t => templateShouldDocument(t, this) && !inOriginalOwner(t, this))
+ // the direct members (methods, values, vars, types and directly contained templates)
+ var memberSymsEager = memberSyms.filter(!memberSymsLazy.contains(_))
+ // the members generated by the symbols in memberSymsEager
+ val ownMembers = (memberSymsEager.flatMap(makeMember(_, None, this)))
+
+ // all the members that are documentented PLUS the members inherited by implicit conversions
+ var members: List[MemberImpl] = ownMembers
+
+ def templates = members collect { case c: TemplateEntity with MemberEntity => c }
+ def methods = members collect { case d: Def => d }
+ def values = members collect { case v: Val => v }
+ def abstractTypes = members collect { case t: AbstractType => t }
+ def aliasTypes = members collect { case t: AliasType => t }
+
+ /**
+ * This is the final point in the core model creation: no DocTemplates are created after the model has finished, but
+ * inherited templates and implicit members are added to the members at this point.
+ */
+ def completeModel(): Unit = {
+ // DFS completion
+ // since alias types and abstract types have no own members, there's no reason for them to call completeModel
+ if (!sym.isAliasType && !sym.isAbstractType)
+ for (member <- members)
+ member match {
+ case d: DocTemplateImpl => d.completeModel
+ case _ =>
+ }
+
+ members :::= memberSymsLazy.map(modelCreation.createLazyTemplateMember(_, this))
+
+ // compute linearization to register subclasses
+ linearization
+ outgoingImplicitlyConvertedClasses
+
+ // the members generated by the symbols in memberSymsEager PLUS the members from the usecases
+ val allMembers = ownMembers ::: ownMembers.flatMap(_.useCaseOf.map(_.asInstanceOf[MemberImpl])).distinct
+ implicitsShadowing = makeShadowingTable(allMembers, conversions, this)
+ // finally, add the members generated by implicit conversions
+ members :::= conversions.flatMap(_.memberImpls)
+ }
+
+ var implicitsShadowing = Map[MemberEntity, ImplicitMemberShadowing]()
+
+ lazy val outgoingImplicitlyConvertedClasses: List[(TemplateEntity, TypeEntity, ImplicitConversionImpl)] =
+ conversions flatMap (conv =>
+ if (!implicitExcluded(conv.conversionQualifiedName))
+ conv.targetTypeComponents map {
+ case pair@(template, tpe) =>
+ template match {
+ case d: DocTemplateImpl if (d != this) => d.registerImplicitlyConvertibleClass(this, conv)
+ case _ => // nothing
+ }
+ (pair._1, pair._2, conv)
+ }
+ else List()
+ )
+
override def isTemplate = true
- def isDocTemplate = true
- def companion = sym.companionSymbol match {
- case NoSymbol => None
- case comSym if !isEmptyJavaObject(comSym) && (comSym.isClass || comSym.isModule) =>
- Some(makeDocTemplate(comSym, inTpl))
- case _ => None
+ override def isDocTemplate = true
+ private[this] lazy val companionSymbol =
+ if (sym.isAliasType || sym.isAbstractType) {
+ inTpl.sym.info.member(sym.name.toTermName) match {
+ case NoSymbol => NoSymbol
+ case s =>
+ s.info match {
+ case ot: OverloadedType =>
+ NoSymbol
+ case _ =>
+ // that's to navigate from val Foo: FooExtractor to FooExtractor :)
+ s.info.resultType.typeSymbol
+ }
+ }
+ }
+ else
+ sym.companionSymbol
+
+ def companion =
+ companionSymbol match {
+ case NoSymbol => None
+ case comSym if !isEmptyJavaObject(comSym) && (comSym.isClass || comSym.isModule) =>
+ makeTemplate(comSym) match {
+ case d: DocTemplateImpl => Some(d)
+ case _ => None
+ }
+ case _ => None
+ }
+
+ def constructors: List[MemberImpl with Constructor] = if (isClass) members collect { case d: Constructor => d } else Nil
+ def primaryConstructor: Option[MemberImpl with Constructor] = if (isClass) constructors find { _.isPrimary } else None
+ override def valueParams =
+ // we don't want params on a class (non case class) signature
+ if (isCaseClass) primaryConstructor match {
+ case Some(const) => const.sym.paramss map (_ map (makeValueParam(_, this)))
+ case None => List()
+ }
+ else List.empty
+
+ // These are generated on-demand, make sure you don't call them more than once
+ def inheritanceDiagram = makeInheritanceDiagram(this)
+ def contentDiagram = makeContentDiagram(this)
+
+ def groupSearch[T](extractor: Comment => Option[T]): Option[T] = {
+ val comments = comment +: linearizationTemplates.collect { case dtpl: DocTemplateImpl => dtpl.comment }
+ comments.flatten.map(extractor).flatten.headOption orElse {
+ Option(inTpl) flatMap (_.groupSearch(extractor))
+ }
}
+
+ def groupDescription(group: String): Option[Body] = groupSearch(_.groupDesc.get(group)) orElse { if (group == defaultGroup) defaultGroupDesc else None }
+ def groupPriority(group: String): Int = groupSearch(_.groupPrio.get(group)) getOrElse { if (group == defaultGroup) defaultGroupPriority else 0 }
+ def groupName(group: String): String = groupSearch(_.groupNames.get(group)) getOrElse { if (group == defaultGroup) defaultGroupName else group }
}
- abstract class PackageImpl(sym: Symbol, inTpl: => PackageImpl) extends DocTemplateImpl(sym, inTpl) with Package {
+ abstract class PackageImpl(sym: Symbol, inTpl: PackageImpl) extends DocTemplateImpl(sym, inTpl) with Package {
override def inTemplate = inTpl
override def toRoot: List[PackageImpl] = this :: inTpl.toRoot
- val packages = members collect { case p: Package => p }
+ override lazy val (inSource, linearization) = {
+ val representive = sym.info.members.find {
+ s => s.isPackageObject
+ } getOrElse sym
+ (inSourceFromSymbol(representive), linearizationFromSymbol(representive))
+ }
+ def packages = members collect { case p: PackageImpl if !(droppedPackages contains p) => p }
}
abstract class RootPackageImpl(sym: Symbol) extends PackageImpl(sym, null) with RootPackageEntity
- abstract class NonTemplateMemberImpl(sym: Symbol, inTpl: => DocTemplateImpl) extends MemberImpl(sym, inTpl) with NonTemplateMemberEntity {
+ abstract class NonTemplateMemberImpl(sym: Symbol, conversion: Option[ImplicitConversionImpl],
+ override val useCaseOf: Option[MemberEntity], inTpl: DocTemplateImpl)
+ extends MemberImpl(sym, inTpl) with NonTemplateMemberEntity {
+ override lazy val comment = {
+ val inRealTpl =
+ /* Variable precendence order for implicitly added members: Take the variable defifinitions from ...
+ * 1. the target of the implicit conversion
+ * 2. the definition template (owner)
+ * 3. the current template
+ */
+ if (conversion.isDefined) findTemplateMaybe(conversion.get.toType.typeSymbol) match {
+ case Some(d) if d != makeRootPackage => d //in case of NoSymbol, it will give us the root package
+ case _ => findTemplateMaybe(sym.owner) match {
+ case Some(d) if d != makeRootPackage => d //in case of NoSymbol, it will give us the root package
+ case _ => inTpl
+ }
+ } else inTpl
+ if (inRealTpl != null) thisFactory.comment(sym, None, inRealTpl) else None
+ }
+
override def qualifiedName = optimize(inTemplate.qualifiedName + "#" + name)
- lazy val definitionName = optimize(inDefinitionTemplates.head.qualifiedName + "#" + name)
- def isUseCase = sym.isSynthetic
+ lazy val definitionName = {
+ // this contrived name is here just to satisfy some older tests -- if you decide to remove it, be my guest, and
+ // also remove property("package object") from test/scaladoc/scalacheck/HtmlFactoryTest.scala so you don't break
+ // the test suite...
+ val packageObject = if (inPackageObject) ".package" else ""
+ if (!conversion.isDefined) optimize(inDefinitionTemplates.head.qualifiedName + packageObject + "#" + name)
+ else optimize(conversion.get.conversionQualifiedName + packageObject + "#" + name)
+ }
def isBridge = sym.isBridge
+ def isUseCase = useCaseOf.isDefined
+ override def byConversion: Option[ImplicitConversionImpl] = conversion
+ override def isImplicitlyInherited = { assert(modelFinished); conversion.isDefined }
+ override def isShadowedImplicit = isImplicitlyInherited && inTpl.implicitsShadowing.get(this).map(_.isShadowed).getOrElse(false)
+ override def isAmbiguousImplicit = isImplicitlyInherited && inTpl.implicitsShadowing.get(this).map(_.isAmbiguous).getOrElse(false)
+ override def isShadowedOrAmbiguousImplicit = isShadowedImplicit || isAmbiguousImplicit
}
- abstract class NonTemplateParamMemberImpl(sym: Symbol, inTpl: => DocTemplateImpl) extends NonTemplateMemberImpl(sym, inTpl) {
- def valueParams =
- sym.paramss map { ps => (ps.zipWithIndex) map { case (p, i) =>
+ abstract class NonTemplateParamMemberImpl(sym: Symbol, conversion: Option[ImplicitConversionImpl],
+ useCaseOf: Option[MemberEntity], inTpl: DocTemplateImpl)
+ extends NonTemplateMemberImpl(sym, conversion, useCaseOf, inTpl) {
+ def valueParams = {
+ val info = if (!isImplicitlyInherited) sym.info else conversion.get.toType memberInfo sym
+ info.paramss map { ps => (ps.zipWithIndex) map { case (p, i) =>
if (p.nameString contains "$") makeValueParam(p, inTpl, optimize("arg" + i)) else makeValueParam(p, inTpl)
}}
+ }
}
- abstract class ParameterImpl(sym: Symbol, inTpl: => TemplateImpl) extends EntityImpl(sym, inTpl) with ParameterEntity {
- override def inTemplate = inTpl
+ abstract class ParameterImpl(val sym: Symbol, val inTpl: TemplateImpl) extends ParameterEntity {
+ val name = optimize(sym.nameString)
}
- private trait TypeBoundsImpl extends EntityImpl {
+ private trait AliasImpl {
+ def sym: Symbol
+ def inTpl: TemplateImpl
+ def alias = makeTypeInTemplateContext(sym.tpe.dealias, inTpl, sym)
+ }
+
+ private trait TypeBoundsImpl {
+ def sym: Symbol
+ def inTpl: TemplateImpl
def lo = sym.info.bounds match {
case TypeBounds(lo, hi) if lo.typeSymbol != NothingClass =>
- Some(makeTypeInTemplateContext(appliedType(lo, sym.info.typeParams map {_.tpe}), inTemplate, sym))
+ Some(makeTypeInTemplateContext(appliedType(lo, sym.info.typeParams map {_.tpe}), inTpl, sym))
case _ => None
}
def hi = sym.info.bounds match {
case TypeBounds(lo, hi) if hi.typeSymbol != AnyClass =>
- Some(makeTypeInTemplateContext(appliedType(hi, sym.info.typeParams map {_.tpe}), inTemplate, sym))
+ Some(makeTypeInTemplateContext(appliedType(hi, sym.info.typeParams map {_.tpe}), inTpl, sym))
case _ => None
}
}
- trait HigherKindedImpl extends EntityImpl with HigherKinded {
+ trait HigherKindedImpl extends HigherKinded {
+ def sym: Symbol
+ def inTpl: TemplateImpl
def typeParams =
- sym.typeParams map (makeTypeParam(_, inTemplate))
+ sym.typeParams map (makeTypeParam(_, inTpl))
}
-
/* ============== MAKER METHODS ============== */
- /** */
+ /** This method makes it easier to work with the different kinds of symbols created by scalac by stripping down the
+ * package object abstraction and placing members directly in the package.
+ *
+ * Here's the explanation of what we do. The code:
+ *
+ * package foo {
+ * object `package` {
+ * class Bar
+ * }
+ * }
+ *
+ * will yield this Symbol structure:
+ * +---------+ (2)
+ * | |
+ * +---------------+ +---------- v ------- | ---+ +--------+ (2)
+ * | package foo#1 <---(1)---- module class foo#2 | | | |
+ * +---------------+ | +------------------ | -+ | +------------------- v ---+ |
+ * | | package object foo#3 <-----(1)---- module class package#4 | |
+ * | +----------------------+ | | +---------------------+ | |
+ * +--------------------------+ | | class package$Bar#5 | | |
+ * | +----------------- | -+ | |
+ * +------------------- | ---+ |
+ * | |
+ * +--------+
+ * (1) sourceModule
+ * (2) you get out of owners with .owner
+ *
+ * and normalizeTemplate(Bar.owner) will get us the package, instead of the module class of the package object.
+ */
def normalizeTemplate(aSym: Symbol): Symbol = aSym match {
- case null | EmptyPackage | NoSymbol =>
+ case null | rootMirror.EmptyPackage | NoSymbol =>
normalizeTemplate(RootPackage)
- case ScalaObjectClass | ObjectClass =>
+ case ObjectClass =>
normalizeTemplate(AnyRefClass)
case _ if aSym.isPackageObject =>
- aSym
+ normalizeTemplate(aSym.owner)
case _ if aSym.isModuleClass =>
normalizeTemplate(aSym.sourceModule)
case _ =>
aSym
}
- def makeRootPackage: Option[PackageImpl] =
- makePackage(RootPackage, null)
+ /**
+ * These are all model construction methods. Please do not use them directly, they are calling each other recursively
+ * starting from makeModel. On the other hand, makeTemplate, makeAnnotation, makeMember, makeType should only be used
+ * after the model was created (modelFinished=true) otherwise assertions will start failing.
+ */
+ object modelCreation {
- /** Creates a package entity for the given symbol or returns `None` if the symbol does not denote a package that
- * contains at least one ''documentable'' class, trait or object. Creating a package entity */
- def makePackage(aSym: Symbol, inTpl: => PackageImpl): Option[PackageImpl] = {
- val bSym = normalizeTemplate(aSym)
- if (templatesCache isDefinedAt (bSym))
- Some(templatesCache(bSym) match {case p: PackageImpl => p})
- else {
- val pack =
- if (bSym == RootPackage)
- new RootPackageImpl(bSym) {
- override lazy val comment =
- if(settings.docRootContent.isDefault) None
- else {
- import Streamable._
- Path(settings.docRootContent.value) match {
- case f : File => {
- val rootComment = closing(f.inputStream)(is => parse(slurp(is), "", NoPosition))
- Some(rootComment)
- }
- case _ => None
- }
- }
- override val name = "root"
- override def inTemplate = this
- override def toRoot = this :: Nil
- override def qualifiedName = "_root_"
- override def inheritedFrom = Nil
- override def isRootPackage = true
- override protected lazy val memberSyms =
- (bSym.info.members ++ EmptyPackage.info.members) filter { s =>
- s != EmptyPackage && s != RootPackage
- }
- }
- else
- new PackageImpl(bSym, inTpl) {}
- if (pack.templates.isEmpty) {
- droppedPackages += 1
- None
+ def createRootPackage: PackageImpl = docTemplatesCache.get(RootPackage) match {
+ case Some(root: PackageImpl) => root
+ case _ => modelCreation.createTemplate(RootPackage, null) match {
+ case Some(root: PackageImpl) => root
+ case _ => sys.error("Scaladoc: Unable to create root package!")
}
- else Some(pack)
}
- }
-
- /** */
- def makeTemplate(aSym: Symbol): TemplateImpl = {
- val bSym = normalizeTemplate(aSym)
- if (bSym == RootPackage)
- makeRootPackage.get
- else if (bSym.isPackage)
- makeTemplate(bSym.owner) match {
- case inPkg: PackageImpl => makePackage(bSym, inPkg) getOrElse (new NoDocTemplateImpl(bSym, inPkg))
- case _ => throw new Error("'" + bSym + "' must be in a package")
- }
- else if (templateShouldDocument(bSym))
- makeTemplate(bSym.owner) match {
- case inDTpl: DocTemplateImpl => makeDocTemplate(bSym, inDTpl)
- case _ => throw new Error("'" + bSym + "' must be in documentable template")
- }
- else
- new NoDocTemplateImpl(bSym, makeTemplate(bSym.owner))
- }
+ /**
+ * Create a template, either a package, class, trait or object
+ */
+ def createTemplate(aSym: Symbol, inTpl: DocTemplateImpl): Option[MemberImpl] = {
+ // don't call this after the model finished!
+ assert(!modelFinished)
- /** */
- def makeDocTemplate(aSym: Symbol, inTpl: => DocTemplateImpl): DocTemplateImpl = {
- val bSym = normalizeTemplate(aSym)
- val minimumInTpl =
- if (bSym.owner != inTpl.sym)
- makeTemplate(aSym.owner) match {
- case inDTpl: DocTemplateImpl => inDTpl
- case inNDTpl => throw new Error("'" + bSym + "' is owned by '" + inNDTpl + "' which is not documented")
+ def createRootPackageComment: Option[Comment] =
+ if(settings.docRootContent.isDefault) None
+ else {
+ import Streamable._
+ Path(settings.docRootContent.value) match {
+ case f : File => {
+ val rootComment = closing(f.inputStream)(is => parse(slurp(is), "", NoPosition, Option(inTpl)))
+ Some(rootComment)
+ }
+ case _ => None
+ }
}
- else
- inTpl
- if (templatesCache isDefinedAt (bSym))
- templatesCache(bSym)
- else if (bSym.isModule || (bSym.isAliasType && bSym.tpe.typeSymbol.isModule))
- new DocTemplateImpl(bSym, minimumInTpl) with Object
- else if (bSym.isTrait || (bSym.isAliasType && bSym.tpe.typeSymbol.isTrait))
- new DocTemplateImpl(bSym, minimumInTpl) with Trait
- else if (bSym.isClass || (bSym.isAliasType && bSym.tpe.typeSymbol.isClass))
- new DocTemplateImpl(bSym, minimumInTpl) with Class {
- def valueParams =
- // we don't want params on a class (non case class) signature
- if (isCaseClass) List(sym.constrParamAccessors map (makeValueParam(_, this)))
- else List.empty
- val constructors =
- members collect { case d: Constructor => d }
- def primaryConstructor = constructors find { _.isPrimary }
+
+ def createDocTemplate(bSym: Symbol, inTpl: DocTemplateImpl): DocTemplateImpl = {
+ assert(!modelFinished) // only created BEFORE the model is finished
+ if (bSym.isAliasType && bSym != AnyRefClass)
+ new DocTemplateImpl(bSym, inTpl) with AliasImpl with AliasType { override def isAliasType = true }
+ else if (bSym.isAbstractType)
+ new DocTemplateImpl(bSym, inTpl) with TypeBoundsImpl with AbstractType { override def isAbstractType = true }
+ else if (bSym.isModule)
+ new DocTemplateImpl(bSym, inTpl) with Object {}
+ else if (bSym.isTrait)
+ new DocTemplateImpl(bSym, inTpl) with Trait {}
+ else if (bSym.isClass || bSym == AnyRefClass)
+ new DocTemplateImpl(bSym, inTpl) with Class {}
+ else
+ sys.error("'" + bSym + "' isn't a class, trait or object thus cannot be built as a documentable template.")
}
- else
- throw new Error("'" + bSym + "' that isn't a class, trait or object cannot be built as a documentable template")
- }
- /** */
- def makeAnnotation(annot: AnnotationInfo): Annotation = {
- val aSym = annot.atp.typeSymbol
- new EntityImpl(aSym, makeTemplate(aSym.owner)) with Annotation {
- lazy val annotationClass =
- makeTemplate(annot.atp.typeSymbol)
- val arguments = { // lazy
- def noParams = annot.args map { _ => None }
- val params: List[Option[ValueParam]] = annotationClass match {
- case aClass: Class =>
- (aClass.primaryConstructor map { _.valueParams.head }) match {
- case Some(vps) => vps map { Some(_) }
- case None => noParams
+ val bSym = normalizeTemplate(aSym)
+ if (docTemplatesCache isDefinedAt bSym)
+ return Some(docTemplatesCache(bSym))
+
+ /* Three cases of templates:
+ * (1) root package -- special cased for bootstrapping
+ * (2) package
+ * (3) class/object/trait
+ */
+ if (bSym == RootPackage) // (1)
+ Some(new RootPackageImpl(bSym) {
+ override lazy val comment = createRootPackageComment
+ override val name = "root"
+ override def inTemplate = this
+ override def toRoot = this :: Nil
+ override def qualifiedName = "_root_"
+ override def inheritedFrom = Nil
+ override def isRootPackage = true
+ override lazy val memberSyms =
+ (bSym.info.members ++ EmptyPackage.info.members).toList filter { s =>
+ s != EmptyPackage && s != RootPackage
}
- case _ => noParams
- }
- assert(params.length == annot.args.length)
- (params zip annot.args) flatMap { case (param, arg) =>
- makeTree(arg) match {
- case Some(tree) =>
- Some(new ValueArgument {
- def parameter = param
- def value = tree
- })
- case None => None
+ })
+ else if (bSym.isPackage) // (2)
+ if (settings.skipPackage(makeQualifiedName(bSym)))
+ None
+ else
+ inTpl match {
+ case inPkg: PackageImpl =>
+ val pack = new PackageImpl(bSym, inPkg) {}
+ // Used to check package pruning works:
+ //println(pack.qualifiedName)
+ if (pack.templates.filter(_.isDocTemplate).isEmpty && pack.memberSymsLazy.isEmpty) {
+ droppedPackages += pack
+ None
+ } else
+ Some(pack)
+ case _ =>
+ sys.error("'" + bSym + "' must be in a package")
}
- }
+ else {
+ // no class inheritance at this point
+ assert(inOriginalOwner(bSym, inTpl), bSym + " in " + inTpl)
+ Some(createDocTemplate(bSym, inTpl))
+ }
+ }
+
+ /**
+ * After the model is completed, no more DocTemplateEntities are created.
+ * Therefore any symbol that still appears is:
+ * - MemberTemplateEntity (created here)
+ * - NoDocTemplateEntity (created in makeTemplate)
+ */
+ def createLazyTemplateMember(aSym: Symbol, inTpl: DocTemplateImpl): MemberImpl = {
+
+ // Code is duplicate because the anonymous classes are created statically
+ def createNoDocMemberTemplate(bSym: Symbol, inTpl: DocTemplateImpl): MemberTemplateImpl = {
+ assert(modelFinished) // only created AFTER the model is finished
+ if (bSym.isModule || (bSym.isAliasType && bSym.tpe.typeSymbol.isModule))
+ new MemberTemplateImpl(bSym, inTpl) with Object {}
+ else if (bSym.isTrait || (bSym.isAliasType && bSym.tpe.typeSymbol.isTrait))
+ new MemberTemplateImpl(bSym, inTpl) with Trait {}
+ else if (bSym.isClass || (bSym.isAliasType && bSym.tpe.typeSymbol.isClass))
+ new MemberTemplateImpl(bSym, inTpl) with Class {}
+ else
+ sys.error("'" + bSym + "' isn't a class, trait or object thus cannot be built as a member template.")
}
+
+ assert(modelFinished)
+ val bSym = normalizeTemplate(aSym)
+
+ if (docTemplatesCache isDefinedAt bSym)
+ docTemplatesCache(bSym)
+ else
+ docTemplatesCache.get(bSym.owner) match {
+ case Some(inTpl) =>
+ val mbrs = inTpl.members.collect({ case mbr: MemberImpl if mbr.sym == bSym => mbr })
+ assert(mbrs.length == 1)
+ mbrs.head
+ case _ =>
+ // move the class completely to the new location
+ createNoDocMemberTemplate(bSym, inTpl)
+ }
}
}
- /** */
- def makeMember(aSym: Symbol, inTpl: => DocTemplateImpl): List[MemberImpl] = {
+ /** Get the root package */
+ def makeRootPackage: PackageImpl = docTemplatesCache(RootPackage).asInstanceOf[PackageImpl]
- def makeMember0(bSym: Symbol): Option[MemberImpl] = {
+ // TODO: Should be able to override the type
+ def makeMember(aSym: Symbol, conversion: Option[ImplicitConversionImpl], inTpl: DocTemplateImpl): List[MemberImpl] = {
+
+ def makeMember0(bSym: Symbol, useCaseOf: Option[MemberImpl]): Option[MemberImpl] = {
if (bSym.isGetter && bSym.isLazy)
- Some(new NonTemplateMemberImpl(bSym, inTpl) with Val {
+ Some(new NonTemplateMemberImpl(bSym, conversion, useCaseOf, inTpl) with Val {
override lazy val comment = // The analyser does not duplicate the lazy val's DocDef when it introduces its accessor.
- thisFactory.comment(bSym.accessed, inTpl) // This hack should be removed after analyser is fixed.
+ thisFactory.comment(bSym.accessed, None, inTpl.asInstanceOf[DocTemplateImpl]) // This hack should be removed after analyser is fixed.
override def isLazyVal = true
})
else if (bSym.isGetter && bSym.accessed.isMutable)
- Some(new NonTemplateMemberImpl(bSym, inTpl) with Val {
+ Some(new NonTemplateMemberImpl(bSym, conversion, useCaseOf, inTpl) with Val {
override def isVar = true
})
else if (bSym.isMethod && !bSym.hasAccessorFlag && !bSym.isConstructor && !bSym.isModule) {
@@ -469,39 +805,39 @@ class ModelFactory(val global: Global, val settings: doc.Settings) {
if (bSym == definitions.Object_synchronized) {
val cSymInfo = (bSym.info: @unchecked) match {
case PolyType(ts, MethodType(List(bp), mt)) =>
- val cp = bp.cloneSymbol.setInfo(appliedType(definitions.ByNameParamClass.typeConstructor, List(bp.info)))
+ val cp = bp.cloneSymbol.setPos(bp.pos).setInfo(definitions.byNameType(bp.info))
PolyType(ts, MethodType(List(cp), mt))
}
- bSym.cloneSymbol.setInfo(cSymInfo)
+ bSym.cloneSymbol.setPos(bSym.pos).setInfo(cSymInfo)
}
else bSym
}
- Some(new NonTemplateParamMemberImpl(cSym, inTpl) with HigherKindedImpl with Def {
+ Some(new NonTemplateParamMemberImpl(cSym, conversion, useCaseOf, inTpl) with HigherKindedImpl with Def {
override def isDef = true
})
}
else if (bSym.isConstructor)
- Some(new NonTemplateParamMemberImpl(bSym, inTpl) with Constructor {
- override def isConstructor = true
- def isPrimary = sym.isPrimaryConstructor
- })
+ if (conversion.isDefined)
+ None // don't list constructors inherted by implicit conversion
+ else
+ Some(new NonTemplateParamMemberImpl(bSym, conversion, useCaseOf, inTpl) with Constructor {
+ override def isConstructor = true
+ def isPrimary = sym.isPrimaryConstructor
+ })
else if (bSym.isGetter) // Scala field accessor or Java field
- Some(new NonTemplateMemberImpl(bSym, inTpl) with Val {
+ Some(new NonTemplateMemberImpl(bSym, conversion, useCaseOf, inTpl) with Val {
override def isVal = true
})
- else if (bSym.isAbstractType)
- Some(new NonTemplateMemberImpl(bSym, inTpl) with TypeBoundsImpl with HigherKindedImpl with AbstractType {
+ else if (bSym.isAbstractType && !typeShouldDocument(bSym, inTpl))
+ Some(new MemberTemplateImpl(bSym, inTpl) with TypeBoundsImpl with AbstractType {
override def isAbstractType = true
})
- else if (bSym.isAliasType)
- Some(new NonTemplateMemberImpl(bSym, inTpl) with HigherKindedImpl with AliasType {
+ else if (bSym.isAliasType && !typeShouldDocument(bSym, inTpl))
+ Some(new MemberTemplateImpl(bSym, inTpl) with AliasImpl with AliasType {
override def isAliasType = true
- def alias = makeTypeInTemplateContext(sym.tpe.dealias, inTpl, sym)
})
- else if (bSym.isPackage)
- inTpl match { case inPkg: PackageImpl => makePackage(bSym, inPkg) }
- else if ((bSym.isClass || bSym.isModule) && templateShouldDocument(bSym))
- Some(makeDocTemplate(bSym, inTpl))
+ else if (!modelFinished && (bSym.isPackage || templateShouldDocument(bSym, inTpl)))
+ modelCreation.createTemplate(bSym, inTpl)
else
None
}
@@ -510,18 +846,95 @@ class ModelFactory(val global: Global, val settings: doc.Settings) {
Nil
else {
val allSyms = useCases(aSym, inTpl.sym) map { case (bSym, bComment, bPos) =>
- addCommentBody(bSym, inTpl, bComment, bPos)
+ docComments.put(bSym, DocComment(bComment, bPos)) // put the comment in the list, don't parse it yet, closes SI-4898
+ bSym
}
- (allSyms :+ aSym) flatMap { makeMember0(_) }
+
+ val member = makeMember0(aSym, None)
+ if (allSyms.isEmpty)
+ member.toList
+ else
+ // Use cases replace the original definitions - SI-5054
+ allSyms flatMap { makeMember0(_, member) }
}
+ }
+
+ def findMember(aSym: Symbol, inTpl: DocTemplateImpl): Option[MemberImpl] = {
+ val tplSym = normalizeTemplate(aSym.owner)
+ inTpl.members.find(_.sym == aSym)
+ }
+
+ @deprecated("Use `findLinkTarget` instead.", "2.10.0")
+ def findTemplate(query: String): Option[DocTemplateImpl] = {
+ assert(modelFinished)
+ docTemplatesCache.values find { (tpl: DocTemplateImpl) => tpl.qualifiedName == query && !packageDropped(tpl) && !tpl.isObject }
+ }
+ def findTemplateMaybe(aSym: Symbol): Option[DocTemplateImpl] = {
+ assert(modelFinished)
+ docTemplatesCache.get(normalizeTemplate(aSym)).filterNot(packageDropped(_))
+ }
+
+ def makeTemplate(aSym: Symbol): TemplateImpl = makeTemplate(aSym, None)
+
+ def makeTemplate(aSym: Symbol, inTpl: Option[TemplateImpl]): TemplateImpl = {
+ assert(modelFinished)
+
+ def makeNoDocTemplate(aSym: Symbol, inTpl: TemplateImpl): NoDocTemplateImpl = {
+ val bSym = normalizeTemplate(aSym)
+ noDocTemplatesCache.get(bSym) match {
+ case Some(noDocTpl) => noDocTpl
+ case None => new NoDocTemplateImpl(bSym, inTpl)
+ }
+ }
+
+ findTemplateMaybe(aSym) match {
+ case Some(dtpl) =>
+ dtpl
+ case None =>
+ val bSym = normalizeTemplate(aSym)
+ makeNoDocTemplate(bSym, if (inTpl.isDefined) inTpl.get else makeTemplate(bSym.owner))
+ }
+ }
+
+ def makeAnnotation(annot: AnnotationInfo): scala.tools.nsc.doc.model.Annotation = {
+ val aSym = annot.symbol
+ new EntityImpl(aSym, makeTemplate(aSym.owner)) with scala.tools.nsc.doc.model.Annotation {
+ lazy val annotationClass =
+ makeTemplate(annot.symbol)
+ val arguments = {
+ val paramsOpt: Option[List[ValueParam]] = annotationClass match {
+ case aClass: DocTemplateEntity with Class =>
+ val constr = aClass.constructors collectFirst {
+ case c: MemberImpl if c.sym == annot.original.symbol => c
+ }
+ constr flatMap (_.valueParams.headOption)
+ case _ => None
+ }
+ val argTrees = annot.args map makeTree
+ paramsOpt match {
+ case Some (params) =>
+ params zip argTrees map { case (param, tree) =>
+ new ValueArgument {
+ def parameter = Some(param)
+ def value = tree
+ }
+ }
+ case None =>
+ argTrees map { tree =>
+ new ValueArgument {
+ def parameter = None
+ def value = tree
+ }
+ }
+ }
+ }
+ }
}
/** */
- def makeTypeParam(aSym: Symbol, inTpl: => TemplateImpl): TypeParam =
+ def makeTypeParam(aSym: Symbol, inTpl: TemplateImpl): TypeParam =
new ParameterImpl(aSym, inTpl) with TypeBoundsImpl with HigherKindedImpl with TypeParam {
- def isTypeParam = true
- def isValueParam = false
def variance: String = {
if (sym hasFlag Flags.COVARIANT) "+"
else if (sym hasFlag Flags.CONTRAVARIANT) "-"
@@ -530,36 +943,43 @@ class ModelFactory(val global: Global, val settings: doc.Settings) {
}
/** */
- def makeValueParam(aSym: Symbol, inTpl: => DocTemplateImpl): ValueParam = {
+ def makeValueParam(aSym: Symbol, inTpl: DocTemplateImpl): ValueParam = {
makeValueParam(aSym, inTpl, aSym.nameString)
}
+
/** */
- def makeValueParam(aSym: Symbol, inTpl: => DocTemplateImpl, newName: String): ValueParam =
+ def makeValueParam(aSym: Symbol, inTpl: DocTemplateImpl, newName: String): ValueParam =
new ParameterImpl(aSym, inTpl) with ValueParam {
override val name = newName
- def isTypeParam = false
- def isValueParam = true
def defaultValue =
if (aSym.hasDefault) {
// units.filter should return only one element
(currentRun.units filter (_.source.file == aSym.sourceFile)).toList match {
case List(unit) =>
- (unit.body find (_.symbol == aSym)) match {
- case Some(ValDef(_,_,_,rhs)) => makeTree(rhs)
- case _ => None
+ // SI-4922 `sym == aSym` is insufficent if `aSym` is a clone of symbol
+ // of the parameter in the tree, as can happen with type parametric methods.
+ def isCorrespondingParam(sym: Symbol) = (
+ sym != null &&
+ sym != NoSymbol &&
+ sym.owner == aSym.owner &&
+ sym.name == aSym.name &&
+ sym.isParamWithDefault
+ )
+ unit.body find (t => isCorrespondingParam(t.symbol)) collect {
+ case ValDef(_,_,_,rhs) if rhs ne EmptyTree => makeTree(rhs)
}
case _ => None
}
}
else None
def resultType =
- makeTypeInTemplateContext(sym.tpe, inTpl, sym)
+ makeTypeInTemplateContext(aSym.tpe, inTpl, aSym)
def isImplicit = aSym.isImplicit
}
/** */
- def makeTypeInTemplateContext(aType: Type, inTpl: => TemplateImpl, dclSym: Symbol): TypeEntity = {
+ def makeTypeInTemplateContext(aType: Type, inTpl: TemplateImpl, dclSym: Symbol): TypeEntity = {
def ownerTpl(sym: Symbol): Symbol =
if (sym.isClass || sym.isModule || sym == NoSymbol) sym else ownerTpl(sym.owner)
val tpe =
@@ -572,120 +992,112 @@ class ModelFactory(val global: Global, val settings: doc.Settings) {
makeType(tpe, inTpl)
}
- /** */
- def makeType(aType: Type, inTpl: => TemplateImpl): TypeEntity = {
- def templatePackage = closestPackage(inTpl.sym)
-
- new TypeEntity {
- private val nameBuffer = new StringBuilder
- private var refBuffer = new immutable.TreeMap[Int, (TemplateEntity, Int)]
- private def appendTypes0(types: List[Type], sep: String): Unit = types match {
- case Nil =>
- case tp :: Nil =>
- appendType0(tp)
- case tp :: tps =>
- appendType0(tp)
- nameBuffer append sep
- appendTypes0(tps, sep)
- }
+ /** Get the types of the parents of the current class, ignoring the refinements */
+ def makeParentTypes(aType: Type, tpl: Option[MemberTemplateImpl], inTpl: TemplateImpl): List[(TemplateEntity, TypeEntity)] = aType match {
+ case RefinedType(parents, defs) =>
+ val ignoreParents = Set[Symbol](AnyClass, AnyRefClass, ObjectClass)
+ val filtParents =
+ // we don't want to expose too many links to AnyRef, that will just be redundant information
+ if (tpl.isDefined && { val sym = tpl.get.sym; (!sym.isModule && parents.length < 2) || (sym == AnyValClass) || (sym == AnyRefClass) || (sym == AnyClass) })
+ parents
+ else
+ parents.filterNot((p: Type) => ignoreParents(p.typeSymbol))
- private def appendType0(tpe: Type): Unit = tpe match {
- /* Type refs */
- case tp: TypeRef if definitions.isFunctionType(tp) =>
- val args = tp.normalize.typeArgs
- nameBuffer append '('
- appendTypes0(args.init, ", ")
- nameBuffer append ") ⇒ "
- appendType0(args.last)
- case tp: TypeRef if definitions.isScalaRepeatedParamType(tp) =>
- appendType0(tp.args.head)
- nameBuffer append '*'
- case tp: TypeRef if definitions.isByNameParamType(tp) =>
- nameBuffer append "⇒ "
- appendType0(tp.args.head)
- case tp: TypeRef if definitions.isTupleTypeOrSubtype(tp) =>
- val args = tp.normalize.typeArgs
- nameBuffer append '('
- appendTypes0(args, ", ")
- nameBuffer append ')'
- case TypeRef(pre, aSym, targs) =>
- val preSym = pre.widen.typeSymbol
- // There's a work in progress here trying to deal with the
- // places where undesirable prefixes are printed.
- // ...
- // If the prefix is something worthy of printing, see if the prefix type
- // is in the same package as the enclosing template. If so, print it
- // unqualified and they'll figure it out.
- //
- // val stripPrefixes = List(templatePackage.fullName + ".", "package.", "java.lang.")
- // if (!preSym.printWithoutPrefix) {
- // nameBuffer append stripPrefixes.foldLeft(pre.prefixString)(_ stripPrefix _)
- // }
- val bSym = normalizeTemplate(aSym)
- if (bSym.isNonClassType)
- nameBuffer append bSym.decodedName
- else {
- val tpl = makeTemplate(bSym)
- val pos0 = nameBuffer.length
- refBuffer += pos0 -> (tpl, tpl.name.length)
- nameBuffer append tpl.name
- }
- if (!targs.isEmpty) {
- nameBuffer append '['
- appendTypes0(targs, ", ")
- nameBuffer append ']'
- }
- /* Refined types */
- case RefinedType(parents, defs) =>
- val ignoreParents = Set(AnyClass, ObjectClass)
- val filtParents = parents filterNot (x => ignoreParents(x.typeSymbol)) match {
- case Nil => parents
- case ps => ps
- }
- appendTypes0(filtParents, " with ")
- // XXX Still todo: properly printing refinements.
- // Since I didn't know how to go about displaying a multi-line type, I went with
- // printing single method refinements (which should be the most common) and printing
- // the number of members if there are more.
- defs.toList match {
- case Nil => ()
- case x :: Nil => nameBuffer append (" { " + x.defString + " }")
- case xs => nameBuffer append (" { ... /* %d definitions in type refinement */ }" format xs.size)
+ /** Returns:
+ * - a DocTemplate if the type's symbol is documented
+ * - a NoDocTemplateMember if the type's symbol is not documented in its parent but in another template
+ * - a NoDocTemplate if the type's symbol is not documented at all */
+ def makeTemplateOrMemberTemplate(parent: Type): TemplateImpl = {
+ def noDocTemplate = makeTemplate(parent.typeSymbol)
+ findTemplateMaybe(parent.typeSymbol) match {
+ case Some(tpl) => tpl
+ case None => parent match {
+ case TypeRef(pre, sym, args) =>
+ findTemplateMaybe(pre.typeSymbol) match {
+ case Some(tpl) => findMember(parent.typeSymbol, tpl).collect({case t: TemplateImpl => t}).getOrElse(noDocTemplate)
+ case None => noDocTemplate
+ }
+ case _ => noDocTemplate
}
- /* Eval-by-name types */
- case NullaryMethodType(result) =>
- nameBuffer append '⇒'
- appendType0(result)
- /* Polymorphic types */
- case PolyType(tparams, result) => assert(tparams nonEmpty)
-// throw new Error("Polymorphic type '" + tpe + "' cannot be printed as a type")
- def typeParamsToString(tps: List[Symbol]): String = if(tps isEmpty) "" else
- tps.map{tparam =>
- tparam.varianceString + tparam.name + typeParamsToString(tparam.typeParams)
- }.mkString("[", ", ", "]")
- nameBuffer append typeParamsToString(tparams)
- appendType0(result)
- case tpen =>
- nameBuffer append tpen.toString
+ }
}
- appendType0(aType)
- val refEntity = refBuffer
- val name = optimize(nameBuffer.toString)
- }
+
+ filtParents.map(parent => {
+ val templateEntity = makeTemplateOrMemberTemplate(parent)
+ val typeEntity = makeType(parent, inTpl)
+ (templateEntity, typeEntity)
+ })
+ case _ =>
+ List((makeTemplate(aType.typeSymbol), makeType(aType, inTpl)))
}
- def templateShouldDocument(aSym: Symbol): Boolean = {
- // TODO: document sourceless entities (e.g., Any, etc), based on a new Setting to be added
- (aSym.isPackageClass || (aSym.sourceFile != null)) && localShouldDocument(aSym) &&
- ( aSym.owner == NoSymbol || templateShouldDocument(aSym.owner) ) && !isEmptyJavaObject(aSym)
+ def makeQualifiedName(sym: Symbol, relativeTo: Option[Symbol] = None): String = {
+ val stop = if (relativeTo.isDefined) relativeTo.get.ownerChain.toSet else Set[Symbol]()
+ var sym1 = sym
+ var path = new StringBuilder()
+ // var path = List[Symbol]()
+
+ while ((sym1 != NoSymbol) && (path.isEmpty || !stop(sym1))) {
+ val sym1Norm = normalizeTemplate(sym1)
+ if (!sym1.sourceModule.isPackageObject && sym1Norm != RootPackage) {
+ if (path.length != 0)
+ path.insert(0, ".")
+ path.insert(0, sym1Norm.nameString)
+ // path::= sym1Norm
+ }
+ sym1 = sym1.owner
+ }
+
+ optimize(path.toString)
+ //path.mkString(".")
}
- def isEmptyJavaObject(aSym: Symbol): Boolean = {
- def hasMembers = aSym.info.members.exists(s => localShouldDocument(s) && (!s.isConstructor || s.owner == aSym))
- aSym.isModule && aSym.isJavaDefined && !hasMembers
+ def inOriginalOwner(aSym: Symbol, inTpl: TemplateImpl): Boolean =
+ normalizeTemplate(aSym.owner) == normalizeTemplate(inTpl.sym)
+
+ def templateShouldDocument(aSym: Symbol, inTpl: DocTemplateImpl): Boolean =
+ (aSym.isTrait || aSym.isClass || aSym.isModule || typeShouldDocument(aSym, inTpl)) &&
+ localShouldDocument(aSym) &&
+ !isEmptyJavaObject(aSym) &&
+ // either it's inside the original owner or we can document it later:
+ (!inOriginalOwner(aSym, inTpl) || (aSym.isPackageClass || (aSym.sourceFile != null)))
+
+ def membersShouldDocument(sym: Symbol, inTpl: TemplateImpl) = {
+ // pruning modules that shouldn't be documented
+ // Why Symbol.isInitialized? Well, because we need to avoid exploring all the space available to scaladoc
+ // from the classpath -- scaladoc is a hog, it will explore everything starting from the root package unless we
+ // somehow prune the tree. And isInitialized is a good heuristic for prunning -- if the package was not explored
+ // during typer and refchecks, it's not necessary for the current application and there's no need to explore it.
+ (!sym.isModule || sym.moduleClass.isInitialized) &&
+ // documenting only public and protected members
+ localShouldDocument(sym) &&
+ // Only this class's constructors are part of its members, inherited constructors are not.
+ (!sym.isConstructor || sym.owner == inTpl.sym) &&
+ // If the @bridge annotation overrides a normal member, show it
+ !isPureBridge(sym)
}
- def localShouldDocument(aSym: Symbol): Boolean = {
+ def isEmptyJavaObject(aSym: Symbol): Boolean =
+ aSym.isModule && aSym.isJavaDefined &&
+ aSym.info.members.exists(s => localShouldDocument(s) && (!s.isConstructor || s.owner == aSym))
+
+ def localShouldDocument(aSym: Symbol): Boolean =
!aSym.isPrivate && (aSym.isProtected || aSym.privateWithin == NoSymbol) && !aSym.isSynthetic
- }
+
+ /** Filter '@bridge' methods only if *they don't override non-bridge methods*. See SI-5373 for details */
+ def isPureBridge(sym: Symbol) = sym.isBridge && sym.allOverriddenSymbols.forall(_.isBridge)
+
+ // the classes that are excluded from the index should also be excluded from the diagrams
+ def classExcluded(clazz: TemplateEntity): Boolean = settings.hardcoded.isExcluded(clazz.qualifiedName)
+
+ // the implicit conversions that are excluded from the pages should not appear in the diagram
+ def implicitExcluded(convertorMethod: String): Boolean = settings.hiddenImplicits(convertorMethod)
+
+ // whether or not to create a page for an {abstract,alias} type
+ def typeShouldDocument(bSym: Symbol, inTpl: DocTemplateImpl) =
+ (settings.docExpandAllTypes.value && (bSym.sourceFile != null)) ||
+ (bSym.isAliasType || bSym.isAbstractType) &&
+ { val rawComment = global.expandedDocComment(bSym, inTpl.sym)
+ rawComment.contains("@template") || rawComment.contains("@documentable") }
}
+
diff --git a/src/compiler/scala/tools/nsc/doc/model/ModelFactoryImplicitSupport.scala b/src/compiler/scala/tools/nsc/doc/model/ModelFactoryImplicitSupport.scala
new file mode 100644
index 0000000..f88251b
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/doc/model/ModelFactoryImplicitSupport.scala
@@ -0,0 +1,609 @@
+/* NSC -- new Scala compiler -- Copyright 2007-2013 LAMP/EPFL
+ *
+ * This trait finds implicit conversions for a class in the default scope and creates scaladoc entries for each of them.
+ *
+ * @author Vlad Ureche
+ * @author Adriaan Moors
+ */
+
+package scala.tools.nsc
+package doc
+package model
+
+import scala.collection._
+import scala.util.matching.Regex
+
+import symtab.Flags
+import io._
+
+import model.{ RootPackage => RootPackageEntity }
+
+/**
+ * This trait finds implicit conversions for a class in the default scope and creates scaladoc entries for each of them.
+ *
+ * Let's take this as an example:
+ * {{{
+ * object Test {
+ * class A
+ *
+ * class B {
+ * def foo = 1
+ * }
+ *
+ * class C extends B {
+ * def bar = 2
+ * class implicit
+ * }
+ *
+ * D def conv(a: A) = new C
+ * }
+ * }}}
+ *
+ * Overview:
+ * - scaladoc-ing the above classes, `A` will get two more methods: foo and bar, over its default methods
+ * - the nested classes (specifically `D` above), abstract types, type aliases and constructor members are not added to
+ * `A` (see makeMember0 in ModelFactory, last 3 cases)
+ * - the members added by implicit conversion are always listed under the implicit conversion, not under the class they
+ * actually come from (`foo` will be listed as coming from the implicit conversion to `C` instead of `B`) - see
+ * `definitionName` in MemberImpl
+ *
+ * Internals:
+ * TODO: Give an overview here
+ */
+trait ModelFactoryImplicitSupport {
+ thisFactory: ModelFactory with ModelFactoryTypeSupport with CommentFactory with TreeFactory =>
+
+ import global._
+ import global.analyzer._
+ import global.definitions._
+ import rootMirror.{RootPackage, RootClass, EmptyPackage, EmptyPackageClass}
+ import settings.hardcoded
+
+ // debugging:
+ val DEBUG: Boolean = settings.docImplicitsDebug.value
+ val ERROR: Boolean = true // currently we show all errors
+ @inline final def debug(msg: => String) = if (DEBUG) settings.printMsg(msg)
+ @inline final def error(msg: => String) = if (ERROR) settings.printMsg(msg)
+
+ /** This is a flag that indicates whether to eliminate implicits that cannot be satisfied within the current scope.
+ * For example, if an implicit conversion requires that there is a Numeric[T] in scope:
+ * {{{
+ * class A[T]
+ * class B extends A[Int]
+ * class C extends A[String]
+ * implicit def pimpA[T: Numeric](a: A[T]): D
+ * }}}
+ * For B, no constraints are generated as Numeric[Int] is already in the default scope. On the other hand, for the
+ * conversion from C to D, depending on -implicits-show-all, the conversion can:
+ * - not be generated at all, since there's no Numeric[String] in scope (if ran without -implicits-show-all)
+ * - generated with a *weird* constraint, Numeric[String] as the user might add it by hand (if flag is enabled)
+ */
+ class ImplicitNotFound(tpe: Type) extends Exception("No implicit of type " + tpe + " found in scope.")
+
+ /* ============== MAKER METHODS ============== */
+
+ /**
+ * Make the implicit conversion objects
+ *
+ * A word about the scope of the implicit conversions: currently we look at a very basic context composed of the
+ * default Scala imports (Predef._ for example) and the companion object of the current class, if one exists. In the
+ * future we might want to extend this to more complex scopes.
+ */
+ def makeImplicitConversions(sym: Symbol, inTpl: DocTemplateImpl): List[ImplicitConversionImpl] =
+ // Nothing and Null are somewhat special -- they can be transformed by any implicit conversion available in scope.
+ // But we don't want that, so we'll simply refuse to find implicit conversions on for Nothing and Null
+ if (!(sym.isClass || sym.isTrait || sym == AnyRefClass) || sym == NothingClass || sym == NullClass) Nil
+ else {
+ var context: global.analyzer.Context = global.analyzer.rootContext(NoCompilationUnit)
+
+ val results = global.analyzer.allViewsFrom(sym.tpe, context, sym.typeParams)
+ var conversions = results.flatMap(result => makeImplicitConversion(sym, result._1, result._2, context, inTpl))
+ // also keep empty conversions, so they appear in diagrams
+ // conversions = conversions.filter(!_.members.isEmpty)
+
+ // Filter out specialized conversions from array
+ if (sym == ArrayClass)
+ conversions = conversions.filterNot((conv: ImplicitConversionImpl) =>
+ hardcoded.arraySkipConversions.contains(conv.conversionQualifiedName))
+
+ // Filter out non-sensical conversions from value types
+ if (isPrimitiveValueType(sym.tpe))
+ conversions = conversions.filter((ic: ImplicitConversionImpl) =>
+ hardcoded.valueClassFilter(sym.nameString, ic.conversionQualifiedName))
+
+ // Put the visible conversions in front
+ val (ownConversions, commonConversions) =
+ conversions.partition(!_.isHiddenConversion)
+
+ ownConversions ::: commonConversions
+ }
+
+ /** makeImplicitConversion performs the heavier lifting to get the implicit listing:
+ * - for each possible conversion function (also called view)
+ * * figures out the final result of the view (to what is our class transformed?)
+ * * figures out the necessary constraints on the type parameters (such as T <: Int) and the context (such as Numeric[T])
+ * * lists all inherited members
+ *
+ * What? in details:
+ * - say we start from a class A[T1, T2, T3, T4]
+ * - we have an implicit function (view) in scope:
+ * def pimpA[T3 <: Long, T4](a: A[Int, Foo[Bar[X]], T3, T4])(implicit ev1: TypeTag[T4], ev2: Numeric[T4]): PimpedA
+ * - A is converted to PimpedA ONLY if a couple of constraints are satisfied:
+ * * T1 must be equal to Int
+ * * T2 must be equal to Foo[Bar[X]]
+ * * T3 must be upper bounded by Long
+ * * there must be evidence of Numeric[T4] and a TypeTag[T4] within scope
+ * - the final type is PimpedA and A therefore inherits a couple of members from pimpedA
+ *
+ * How?
+ * some notes:
+ * - Scala's type inference will want to solve all type parameters down to actual types, but we only want constraints
+ * to maintain generality
+ * - therefore, allViewsFrom wraps type parameters into "untouchable" type variables that only gather constraints,
+ * but are never solved down to a type
+ * - these must be reverted back to the type parameters and the constraints must be extracted and simplified (this is
+ * done by the uniteConstraints and boundedTParamsConstraints. Be sure to check them out
+ * - we also need to transform implicit parameters in the view's signature into constraints, such that Numeric[T4]
+ * appears as a constraint
+ */
+ def makeImplicitConversion(sym: Symbol, result: SearchResult, constrs: List[TypeConstraint], context: Context, inTpl: DocTemplateImpl): List[ImplicitConversionImpl] =
+ if (result.tree == EmptyTree) Nil
+ else {
+ // `result` will contain the type of the view (= implicit conversion method)
+ // the search introduces untouchable type variables, but we want to get back to type parameters
+ val viewFullType = result.tree.tpe
+ // set the previously implicit parameters to being explicit
+
+ val (viewSimplifiedType, viewImplicitTypes) = removeImplicitParameters(viewFullType)
+
+ // TODO: Isolate this corner case :) - Predef.<%< and put it in the testsuite
+ if (viewSimplifiedType.params.length != 1) {
+ // This is known to be caused by the `<%<` object in Predef:
+ // {{{
+ // sealed abstract class <%<[-From, +To] extends (From => To) with Serializable
+ // object <%< {
+ // implicit def conformsOrViewsAs[A <% B, B]: A <%< B = new (A <%< B) {def apply(x: A) = x}
+ // }
+ // }}}
+ // so we just won't generate an implicit conversion for implicit methods that only take implicit parameters
+ return Nil
+ }
+
+ // type the view application so we get the exact type of the result (not the formal type)
+ val viewTree = result.tree.setType(viewSimplifiedType)
+ val appliedTree = new ApplyImplicitView(viewTree, List(Ident("<argument>") setType viewTree.tpe.paramTypes.head))
+ val appliedTreeTyped: Tree = {
+ val newContext = context.makeImplicit(context.ambiguousErrors)
+ newContext.macrosEnabled = false
+ val newTyper = global.analyzer.newTyper(newContext)
+ newTyper.silent(_.typed(appliedTree, global.analyzer.EXPRmode, WildcardType), false) match {
+
+ case global.analyzer.SilentResultValue(t: Tree) => t
+ case global.analyzer.SilentTypeError(err) =>
+ global.reporter.warning(sym.pos, err.toString)
+ return Nil
+ }
+ }
+
+ // now we have the final type:
+ val toType = wildcardToNothing(typeVarToOriginOrWildcard(appliedTreeTyped.tpe.finalResultType))
+
+ try {
+ // Transform bound constraints into scaladoc constraints
+ val implParamConstraints = makeImplicitConstraints(viewImplicitTypes, sym, context, inTpl)
+ val boundsConstraints = makeBoundedConstraints(sym.typeParams, constrs, inTpl)
+ // TODO: no substitution constraints appear in the library and compiler scaladoc. Maybe they can be removed?
+ val substConstraints = makeSubstitutionConstraints(result.subst, inTpl)
+ val constraints = implParamConstraints ::: boundsConstraints ::: substConstraints
+
+ List(new ImplicitConversionImpl(sym, result.tree.symbol, toType, constraints, inTpl))
+ } catch {
+ case i: ImplicitNotFound =>
+ //println(" Eliminating: " + toType)
+ Nil
+ }
+ }
+
+ def makeImplicitConstraints(types: List[Type], sym: Symbol, context: Context, inTpl: DocTemplateImpl): List[Constraint] =
+ types.flatMap((tpe:Type) => {
+ // TODO: Before creating constraints, map typeVarToOriginOrWildcard on the implicitTypes
+ val implType = typeVarToOriginOrWildcard(tpe)
+ val qualifiedName = makeQualifiedName(implType.typeSymbol)
+
+ var available: Option[Boolean] = None
+
+ // see: https://groups.google.com/forum/?hl=en&fromgroups#!topic/scala-internals/gm_fr0RKzC4
+ //
+ // println(implType + " => " + implType.isTrivial)
+ // var tpes: List[Type] = List(implType)
+ // while (!tpes.isEmpty) {
+ // val tpe = tpes.head
+ // tpes = tpes.tail
+ // tpe match {
+ // case TypeRef(pre, sym, args) =>
+ // tpes = pre :: args ::: tpes
+ // println(tpe + " => " + tpe.isTrivial)
+ // case _ =>
+ // println(tpe + " (of type" + tpe.getClass + ") => " + tpe.isTrivial)
+ // }
+ // }
+ // println("\n")
+
+ // look for type variables in the type. If there are none, we can decide if the implicit is there or not
+ if (implType.isTrivial) {
+ try {
+ context.flushBuffer() /* any errors here should not prevent future findings */
+ // TODO: Not sure this is the right thing to do -- seems similar to what scalac should be doing
+ val context2 = context.make(context.unit, context.tree, sym.owner, context.scope, context.imports)
+ val search = inferImplicit(EmptyTree, tpe, false, false, context2, false)
+ context.flushBuffer() /* any errors here should not prevent future findings */
+
+ available = Some(search.tree != EmptyTree)
+ } catch {
+ case _: TypeError =>
+ }
+ }
+
+ available match {
+ case Some(true) =>
+ Nil
+ case Some(false) if (!settings.docImplicitsShowAll.value) =>
+ // if -implicits-show-all is not set, we get rid of impossible conversions (such as Numeric[String])
+ throw new ImplicitNotFound(implType)
+ case _ =>
+ val typeParamNames = sym.typeParams.map(_.name)
+
+ // TODO: This is maybe the worst hack I ever did - it's as dirty as hell, but it seems to work, so until I
+ // learn more about symbols, it'll have to do.
+ implType match {
+ case TypeRef(pre, sym, List(TypeRef(NoPrefix, targ, Nil))) if (typeParamNames contains targ.name) =>
+ hardcoded.knownTypeClasses.get(qualifiedName) match {
+ case Some(explanation) =>
+ List(new KnownTypeClassConstraint {
+ val typeParamName = targ.nameString
+ lazy val typeExplanation = explanation
+ lazy val typeClassEntity = makeTemplate(sym)
+ lazy val implicitType: TypeEntity = makeType(implType, inTpl)
+ })
+ case None =>
+ List(new TypeClassConstraint {
+ val typeParamName = targ.nameString
+ lazy val typeClassEntity = makeTemplate(sym)
+ lazy val implicitType: TypeEntity = makeType(implType, inTpl)
+ })
+ }
+ case _ =>
+ List(new ImplicitInScopeConstraint{
+ lazy val implicitType: TypeEntity = makeType(implType, inTpl)
+ })
+ }
+ }
+ })
+
+ def makeSubstitutionConstraints(subst: TreeTypeSubstituter, inTpl: DocTemplateImpl): List[Constraint] =
+ (subst.from zip subst.to) map {
+ case (from, to) =>
+ new EqualTypeParamConstraint {
+ error("Scaladoc implicits: Unexpected type substitution constraint from: " + from + " to: " + to)
+ val typeParamName = from.toString
+ val rhs = makeType(to, inTpl)
+ }
+ }
+
+ def makeBoundedConstraints(tparams: List[Symbol], constrs: List[TypeConstraint], inTpl: DocTemplateImpl): List[Constraint] =
+ (tparams zip constrs) flatMap {
+ case (tparam, constr) => {
+ uniteConstraints(constr) match {
+ case (loBounds, upBounds) => (loBounds filter (_ != NothingClass.tpe), upBounds filter (_ != AnyClass.tpe)) match {
+ case (Nil, Nil) =>
+ Nil
+ case (List(lo), List(up)) if (lo == up) =>
+ List(new EqualTypeParamConstraint {
+ val typeParamName = tparam.nameString
+ lazy val rhs = makeType(lo, inTpl)
+ })
+ case (List(lo), List(up)) =>
+ List(new BoundedTypeParamConstraint {
+ val typeParamName = tparam.nameString
+ lazy val lowerBound = makeType(lo, inTpl)
+ lazy val upperBound = makeType(up, inTpl)
+ })
+ case (List(lo), Nil) =>
+ List(new LowerBoundedTypeParamConstraint {
+ val typeParamName = tparam.nameString
+ lazy val lowerBound = makeType(lo, inTpl)
+ })
+ case (Nil, List(up)) =>
+ List(new UpperBoundedTypeParamConstraint {
+ val typeParamName = tparam.nameString
+ lazy val upperBound = makeType(up, inTpl)
+ })
+ case other =>
+ // this is likely an error on the lub/glb side
+ error("Scaladoc implicits: Error computing lub/glb for: " + (tparam, constr) + ":\n" + other)
+ Nil
+ }
+ }
+ }
+ }
+
+ /* ============== IMPLEMENTATION PROVIDING ENTITY TYPES ============== */
+
+ class ImplicitConversionImpl(
+ val sym: Symbol,
+ val convSym: Symbol,
+ val toType: Type,
+ val constrs: List[Constraint],
+ inTpl: DocTemplateImpl)
+ extends ImplicitConversion {
+
+ def source: DocTemplateEntity = inTpl
+
+ def targetType: TypeEntity = makeType(toType, inTpl)
+
+ def convertorOwner: TemplateEntity =
+ if (convSym != NoSymbol)
+ makeTemplate(convSym.owner)
+ else {
+ error("Scaladoc implicits: " + toString + " = NoSymbol!")
+ makeRootPackage
+ }
+
+ def targetTemplate: Option[TemplateEntity] = toType match {
+ // @Vlad: I'm being extra conservative in template creation -- I don't want to create templates for complex types
+ // such as refinement types because the template can't represent the type corectly (a template corresponds to a
+ // package, class, trait or object)
+ case t: TypeRef => Some(makeTemplate(t.sym))
+ case RefinedType(parents, decls) => None
+ case _ => error("Scaladoc implicits: Could not create template for: " + toType + " of type " + toType.getClass); None
+ }
+
+ def targetTypeComponents: List[(TemplateEntity, TypeEntity)] = makeParentTypes(toType, None, inTpl)
+
+ def convertorMethod: Either[MemberEntity, String] = {
+ var convertor: MemberEntity = null
+
+ convertorOwner match {
+ case doc: DocTemplateImpl =>
+ val convertors = members.collect { case m: MemberImpl if m.sym == convSym => m }
+ if (convertors.length == 1)
+ convertor = convertors.head
+ case _ =>
+ }
+ if (convertor ne null)
+ Left(convertor)
+ else
+ Right(convSym.nameString)
+ }
+
+ def conversionShortName = convSym.nameString
+
+ def conversionQualifiedName = makeQualifiedName(convSym)
+
+ lazy val constraints: List[Constraint] = constrs
+
+ lazy val memberImpls: List[MemberImpl] = {
+ // Obtain the members inherited by the implicit conversion
+ val memberSyms = toType.members.filter(implicitShouldDocument(_)).toList
+ val existingSyms = sym.info.members
+
+ // Debugging part :)
+ debug(sym.nameString + "\n" + "=" * sym.nameString.length())
+ debug(" * conversion " + convSym + " from " + sym.tpe + " to " + toType)
+
+ debug(" -> full type: " + toType)
+ if (constraints.length != 0) {
+ debug(" -> constraints: ")
+ constraints foreach { constr => debug(" - " + constr) }
+ }
+ debug(" -> members:")
+ memberSyms foreach (sym => debug(" - "+ sym.decodedName +" : " + sym.info))
+ debug("")
+
+ memberSyms.flatMap({ aSym =>
+ // we can't just pick up nodes from the original template, although that would be very convenient:
+ // they need the byConversion field to be attached to themselves and the types to be transformed by
+ // asSeenFrom
+
+ // at the same time, the member itself is in the inTpl, not in the new template -- but should pick up
+ // variables from the old template. Ugly huh? We'll always create the member inTpl, but it will change
+ // the template when expanding variables in the comment :)
+ makeMember(aSym, Some(this), inTpl)
+ })
+ }
+
+ lazy val members: List[MemberEntity] = memberImpls
+
+ def isHiddenConversion = settings.hiddenImplicits(conversionQualifiedName)
+
+ override def toString = "Implcit conversion from " + sym.tpe + " to " + toType + " done by " + convSym
+ }
+
+ /* ========================= HELPER METHODS ========================== */
+ /**
+ * Computes the shadowing table for all the members in the implicit conversions
+ * @param mbrs All template's members, including usecases and full signature members
+ * @param convs All the conversions the template takes part in
+ * @param inTpl the ususal :)
+ */
+ def makeShadowingTable(mbrs: List[MemberImpl],
+ convs: List[ImplicitConversionImpl],
+ inTpl: DocTemplateImpl): Map[MemberEntity, ImplicitMemberShadowing] = {
+ assert(modelFinished)
+
+ var shadowingTable = Map[MemberEntity, ImplicitMemberShadowing]()
+
+ for (conv <- convs) {
+ val otherConvs = convs.filterNot(_ == conv)
+
+ for (member <- conv.memberImpls) {
+ // for each member in our list
+ val sym1 = member.sym
+ val tpe1 = conv.toType.memberInfo(sym1)
+
+ // check if it's shadowed by a member in the original class
+ var shadowedBySyms: List[Symbol] = List()
+ for (mbr <- mbrs) {
+ val sym2 = mbr.sym
+ if (sym1.name == sym2.name) {
+ val shadowed = !settings.docImplicitsSoundShadowing.value || {
+ val tpe2 = inTpl.sym.info.memberInfo(sym2)
+ !isDistinguishableFrom(tpe1, tpe2)
+ }
+ if (shadowed)
+ shadowedBySyms ::= sym2
+ }
+ }
+
+ val shadowedByMembers = mbrs.filter((mb: MemberImpl) => shadowedBySyms.contains(mb.sym))
+
+ // check if it's shadowed by another member
+ var ambiguousByMembers: List[MemberEntity] = List()
+ for (conv <- otherConvs)
+ for (member2 <- conv.memberImpls) {
+ val sym2 = member2.sym
+ if (sym1.name == sym2.name) {
+ val tpe2 = conv.toType.memberInfo(sym2)
+ // Ambiguity should be an equivalence relation
+ val ambiguated = !isDistinguishableFrom(tpe1, tpe2) || !isDistinguishableFrom(tpe2, tpe1)
+ if (ambiguated)
+ ambiguousByMembers ::= member2
+ }
+ }
+
+ // we finally have the shadowing info
+ val shadowing = new ImplicitMemberShadowing {
+ def shadowingMembers: List[MemberEntity] = shadowedByMembers
+ def ambiguatingMembers: List[MemberEntity] = ambiguousByMembers
+ }
+
+ shadowingTable += (member -> shadowing)
+ }
+ }
+
+ shadowingTable
+ }
+
+
+ /**
+ * uniteConstraints takes a TypeConstraint instance and simplifies the constraints inside
+ *
+ * Normally TypeConstraint contains multiple lower and upper bounds, and we want to reduce this to a lower and an
+ * upper bound. Here are a couple of catches we need to be aware of:
+ * - before finding a view (implicit method in scope that maps class A[T1,T2,.. Tn] to something else) the type
+ * parameters are transformed into "untouchable" type variables so that type inference does not attempt to
+ * fully solve them down to a type but rather constrains them on both sides just enough for the view to be
+ * applicable -- now, we want to transform those type variables back to the original type parameters
+ * - some of the bounds fail type inference and therefore refer to Nothing => when performing unification (lub, glb)
+ * they start looking ugly => we (unsoundly) transform Nothing to WildcardType so we fool the unification algorithms
+ * into thinking there's nothing there
+ * - we don't want the wildcard types surviving the unification so we replace them back to Nothings
+ */
+ def uniteConstraints(constr: TypeConstraint): (List[Type], List[Type]) =
+ try {
+ (List(wildcardToNothing(lub(constr.loBounds map typeVarToOriginOrWildcard))),
+ List(wildcardToNothing(glb(constr.hiBounds map typeVarToOriginOrWildcard))))
+ } catch {
+ // does this actually ever happen? (probably when type vars occur in the bounds)
+ case x: Throwable => (constr.loBounds.distinct, constr.hiBounds.distinct)
+ }
+
+ /**
+ * Make implicits explicit - Not used curently
+ */
+ object implicitToExplicit extends TypeMap {
+ def apply(tp: Type): Type = mapOver(tp) match {
+ case MethodType(params, resultType) =>
+ MethodType(params.map(param => if (param.isImplicit) param.cloneSymbol.resetFlag(Flags.IMPLICIT) else param), resultType)
+ case other =>
+ other
+ }
+ }
+
+ /**
+ * removeImplicitParameters transforms implicit parameters from the view result type into constraints and
+ * returns the simplified type of the view
+ *
+ * for the example view:
+ * implicit def pimpMyClass[T](a: MyClass[T])(implicit ev: Numeric[T]): PimpedMyClass[T]
+ * the implicit view result type is:
+ * (a: MyClass[T])(implicit ev: Numeric[T]): PimpedMyClass[T]
+ * and the simplified type will be:
+ * MyClass[T] => PimpedMyClass[T]
+ */
+ def removeImplicitParameters(viewType: Type): (Type, List[Type]) = {
+
+ val params = viewType.paramss.flatten
+ val (normalParams, implParams) = params.partition(!_.isImplicit)
+ val simplifiedType = MethodType(normalParams, viewType.finalResultType)
+ val implicitTypes = implParams.map(_.tpe)
+
+ (simplifiedType, implicitTypes)
+ }
+
+ /**
+ * typeVarsToOriginOrWildcard transforms the "untouchable" type variables into either their origins (the original
+ * type parameters) or into wildcard types if nothing matches
+ */
+ object typeVarToOriginOrWildcard extends TypeMap {
+ def apply(tp: Type): Type = mapOver(tp) match {
+ case tv: TypeVar =>
+ if (tv.constr.inst.typeSymbol == NothingClass)
+ WildcardType
+ else
+ tv.origin //appliedType(tv.origin.typeConstructor, tv.typeArgs map this)
+ case other =>
+ if (other.typeSymbol == NothingClass)
+ WildcardType
+ else
+ other
+ }
+ }
+
+ /**
+ * wildcardToNothing transforms wildcard types back to Nothing
+ */
+ object wildcardToNothing extends TypeMap {
+ def apply(tp: Type): Type = mapOver(tp) match {
+ case WildcardType =>
+ NothingClass.tpe
+ case other =>
+ other
+ }
+ }
+
+ /** implicitShouldDocument decides whether a member inherited by implicit conversion should be documented */
+ def implicitShouldDocument(aSym: Symbol): Boolean = {
+ // We shouldn't document:
+ // - constructors
+ // - common methods (in Any, AnyRef, Object) as they are automatically removed
+ // - private and protected members (not accessible following an implicit conversion)
+ // - members starting with _ (usually reserved for internal stuff)
+ localShouldDocument(aSym) && (!aSym.isConstructor) && (aSym.owner != AnyValClass) &&
+ (aSym.owner != AnyClass) && (aSym.owner != ObjectClass) &&
+ (!aSym.isProtected) && (!aSym.isPrivate) && (!aSym.name.startsWith("_")) &&
+ (aSym.isMethod || aSym.isGetter || aSym.isSetter) &&
+ (aSym.nameString != "getClass")
+ }
+
+ /* To put it very bluntly: checks if you can call implicitly added method with t1 when t2 is already there in the
+ * class. We suppose the name of the two members coincides
+ *
+ * The trick here is that the resultType does not matter - the condition for removal it that paramss have the same
+ * structure (A => B => C may not override (A, B) => C) and that all the types involved are
+ * of the implcit conversion's member are subtypes of the parent members' parameters */
+ def isDistinguishableFrom(t1: Type, t2: Type): Boolean = {
+ // Vlad: I tried using matches but it's not exactly what we need:
+ // (p: AnyRef)AnyRef matches ((t: String)AnyRef returns false -- but we want that to be true
+ // !(t1 matches t2)
+ if (t1.paramss.map(_.length) == t2.paramss.map(_.length)) {
+ for ((t1p, t2p) <- t1.paramss.flatten zip t2.paramss.flatten)
+ if (!isSubType(t1 memberInfo t1p, t2 memberInfo t2p))
+ return true // if on the corresponding parameter you give a type that is in t1 but not in t2
+ // def foo(a: Either[Int, Double]): Int = 3
+ // def foo(b: Left[T1]): Int = 6
+ // a.foo(Right(4.5d)) prints out 3 :)
+ false
+ } else true // the member structure is different foo(3, 5) vs foo(3)(5)
+ }
+}
diff --git a/src/compiler/scala/tools/nsc/doc/model/ModelFactoryTypeSupport.scala b/src/compiler/scala/tools/nsc/doc/model/ModelFactoryTypeSupport.scala
new file mode 100644
index 0000000..844a509
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/doc/model/ModelFactoryTypeSupport.scala
@@ -0,0 +1,326 @@
+/* NSC -- new Scala compiler -- Copyright 2007-2013 LAMP/EPFL */
+
+package scala.tools.nsc
+package doc
+package model
+
+import base._
+import diagram._
+
+import scala.collection._
+import scala.util.matching.Regex
+
+import symtab.Flags
+
+import io._
+
+import model.{ RootPackage => RootPackageEntity }
+
+/** This trait extracts all required information for documentation from compilation units */
+trait ModelFactoryTypeSupport {
+ thisFactory: ModelFactory
+ with ModelFactoryImplicitSupport
+ with ModelFactoryTypeSupport
+ with DiagramFactory
+ with CommentFactory
+ with TreeFactory
+ with MemberLookup =>
+
+ import global._
+ import definitions.{ ObjectClass, NothingClass, AnyClass, AnyValClass, AnyRefClass }
+ import rootMirror.{ RootPackage, RootClass, EmptyPackage }
+
+ protected val typeCache = new mutable.LinkedHashMap[Type, TypeEntity]
+
+ /** */
+ def makeType(aType: Type, inTpl: TemplateImpl): TypeEntity = {
+ def templatePackage = closestPackage(inTpl.sym)
+
+ def createTypeEntity = new TypeEntity {
+ private var nameBuffer = new StringBuilder
+ private var refBuffer = new immutable.TreeMap[Int, (LinkTo, Int)]
+ private def appendTypes0(types: List[Type], sep: String): Unit = types match {
+ case Nil =>
+ case tp :: Nil =>
+ appendType0(tp)
+ case tp :: tps =>
+ appendType0(tp)
+ nameBuffer append sep
+ appendTypes0(tps, sep)
+ }
+
+ private def appendType0(tpe: Type): Unit = tpe match {
+ /* Type refs */
+ case tp: TypeRef if definitions.isFunctionType(tp) =>
+ val args = tp.normalize.typeArgs
+ nameBuffer append '('
+ appendTypes0(args.init, ", ")
+ nameBuffer append ") ⇒ "
+ appendType0(args.last)
+ case tp: TypeRef if definitions.isScalaRepeatedParamType(tp) =>
+ appendType0(tp.args.head)
+ nameBuffer append '*'
+ case tp: TypeRef if definitions.isByNameParamType(tp) =>
+ nameBuffer append "⇒ "
+ appendType0(tp.args.head)
+ case tp: TypeRef if definitions.isTupleType(tp) =>
+ val args = tp.normalize.typeArgs
+ nameBuffer append '('
+ appendTypes0(args, ", ")
+ nameBuffer append ')'
+ case TypeRef(pre, aSym, targs) =>
+ val preSym = pre.widen.typeSymbol
+
+ // SI-3314/SI-4888: Classes, Traits and Types can be inherited from a template to another:
+ // class Enum { abstract class Value }
+ // class Day extends Enum { object Mon extends Value /*...*/ }
+ // ===> in such cases we have two options:
+ // (0) if there's no inheritance taking place (Enum#Value) we can link to the template directly
+ // (1) if we generate the doc template for Day, we can link to the correct member
+ // (2) If the symbol comes from an external library for which we know the documentation URL, point to it.
+ // (3) if we don't generate the doc template, we should at least indicate the correct prefix in the tooltip
+ val bSym = normalizeTemplate(aSym)
+ val owner =
+ if ((preSym != NoSymbol) && /* it needs a prefix */
+ (preSym != bSym.owner) && /* prefix is different from owner */
+ (aSym == bSym)) /* normalization doesn't play tricks on us */
+ preSym
+ else
+ bSym.owner
+
+ val link =
+ findTemplateMaybe(bSym) match {
+ case Some(bTpl) if owner == bSym.owner =>
+ // (0) the owner's class is linked AND has a template - lovely
+ bTpl match {
+ case dtpl: DocTemplateEntity => new LinkToTpl(dtpl)
+ case _ => new Tooltip(bTpl.qualifiedName)
+ }
+ case _ =>
+ val oTpl = findTemplateMaybe(owner)
+ (oTpl, oTpl flatMap (findMember(bSym, _))) match {
+ case (Some(oTpl), Some(bMbr)) =>
+ // (1) the owner's class
+ LinkToMember(bMbr, oTpl)
+ case _ =>
+ val name = makeQualifiedName(bSym)
+ if (!bSym.owner.isPackage)
+ Tooltip(name)
+ else
+ findExternalLink(bSym, name).getOrElse (
+ // (3) if we couldn't find neither the owner nor external URL to link to, show a tooltip with the qualified name
+ Tooltip(name)
+ )
+ }
+ }
+
+ // SI-4360 Showing prefixes when necessary
+ // We check whether there's any directly accessible type with the same name in the current template OR if the
+ // type is inherited from one template to another. There may be multiple symbols with the same name in scope,
+ // but we won't show the prefix if our symbol is among them, only if *it's not* -- that's equal to showing
+ // the prefix only for ambiguous references, not for overloaded ones.
+ def needsPrefix: Boolean = {
+ if ((owner != bSym.owner || preSym.isRefinementClass) && (normalizeTemplate(owner) != inTpl.sym))
+ return true
+ // don't get tricked into prefixng method type params and existentials:
+ // I tried several tricks BUT adding the method for which I'm creating the type => that simply won't scale,
+ // as ValueParams are independent of their parent member, and I really don't want to add this information to
+ // all terms, as we're already over the allowed memory footprint
+ if (aSym.isTypeParameterOrSkolem || aSym.isExistentiallyBound /* existential or existential skolem */)
+ return false
+
+ for (tpl <- inTpl.sym.ownerChain) {
+ tpl.info.member(bSym.name) match {
+ case NoSymbol =>
+ // No syms with that name, look further inside the owner chain
+ case sym =>
+ // Symbol found -- either the correct symbol, another one OR an overloaded alternative
+ if (sym == bSym)
+ return false
+ else sym.info match {
+ case OverloadedType(owner, alternatives) =>
+ return alternatives.contains(bSym)
+ case _ =>
+ return true
+ }
+ }
+ }
+ // if it's not found in the owner chain, we can safely leave out the prefix
+ false
+ }
+
+ val prefix =
+ if (!settings.docNoPrefixes.value && needsPrefix && (bSym != AnyRefClass /* which we normalize */)) {
+ if (!owner.isRefinementClass) {
+ val qName = makeQualifiedName(owner, Some(inTpl.sym))
+ if (qName != "") qName + "." else ""
+ }
+ else {
+ nameBuffer append "("
+ appendType0(pre)
+ nameBuffer append ")#"
+ "" // we already appended the prefix
+ }
+ } else ""
+
+ //DEBUGGING:
+ //if (makeQualifiedName(bSym) == "pack1.A") println("needsPrefix(" + bSym + ", " + owner + ", " + inTpl.qualifiedName + ") => " + needsPrefix + " and prefix=" + prefix)
+
+ val name = prefix + bSym.nameString
+ val pos0 = nameBuffer.length
+ refBuffer += pos0 -> ((link, name.length))
+ nameBuffer append name
+
+ if (!targs.isEmpty) {
+ nameBuffer append '['
+ appendTypes0(targs, ", ")
+ nameBuffer append ']'
+ }
+ /* Refined types */
+ case RefinedType(parents, defs) =>
+ val ignoreParents = Set[Symbol](AnyClass, ObjectClass)
+ val filtParents = parents filterNot (x => ignoreParents(x.typeSymbol)) match {
+ case Nil => parents
+ case ps => ps
+ }
+ appendTypes0(filtParents, " with ")
+ // XXX Still todo: properly printing refinements.
+ // Since I didn't know how to go about displaying a multi-line type, I went with
+ // printing single method refinements (which should be the most common) and printing
+ // the number of members if there are more.
+ defs.toList match {
+ case Nil => ()
+ case x :: Nil => nameBuffer append (" { " + x.defString + " }")
+ case xs => nameBuffer append (" { ... /* %d definitions in type refinement */ }" format xs.size)
+ }
+ /* Eval-by-name types */
+ case NullaryMethodType(result) =>
+ nameBuffer append '⇒'
+ appendType0(result)
+
+ /* Polymorphic types */
+ case PolyType(tparams, result) => assert(tparams.nonEmpty)
+ def typeParamsToString(tps: List[Symbol]): String = if (tps.isEmpty) "" else
+ tps.map{tparam =>
+ tparam.varianceString + tparam.name + typeParamsToString(tparam.typeParams)
+ }.mkString("[", ", ", "]")
+ nameBuffer append typeParamsToString(tparams)
+ appendType0(result)
+
+ case et at ExistentialType(quantified, underlying) =>
+
+ def appendInfoStringReduced(sym: Symbol, tp: Type): Unit = {
+ if (sym.isType && !sym.isAliasType && !sym.isClass) {
+ tp match {
+ case PolyType(tparams, _) =>
+ nameBuffer append "["
+ appendTypes0(tparams.map(_.tpe), ", ")
+ nameBuffer append "]"
+ case _ =>
+ }
+ tp.resultType match {
+ case rt @ TypeBounds(_, _) =>
+ appendType0(rt)
+ case rt =>
+ nameBuffer append " <: "
+ appendType0(rt)
+ }
+ } else {
+ // fallback to the Symbol infoString
+ nameBuffer append sym.infoString(tp)
+ }
+ }
+
+ def appendClauses = {
+ nameBuffer append " forSome {"
+ var first = true
+ val qset = quantified.toSet
+ for (sym <- quantified) {
+ if (!first) { nameBuffer append ", " } else first = false
+ if (sym.isSingletonExistential) {
+ nameBuffer append "val "
+ nameBuffer append tpnme.dropSingletonName(sym.name)
+ nameBuffer append ": "
+ appendType0(dropSingletonType(sym.info.bounds.hi))
+ } else {
+ if (sym.flagString != "") nameBuffer append (sym.flagString + " ")
+ if (sym.keyString != "") nameBuffer append (sym.keyString + " ")
+ nameBuffer append sym.varianceString
+ nameBuffer append sym.nameString
+ appendInfoStringReduced(sym, sym.info)
+ }
+ }
+ nameBuffer append "}"
+ }
+
+ underlying match {
+ case TypeRef(pre, sym, args) if et.isRepresentableWithWildcards =>
+ appendType0(typeRef(pre, sym, Nil))
+ nameBuffer append "["
+ var first = true
+ val qset = quantified.toSet
+ for (arg <- args) {
+ if (!first) { nameBuffer append ", " } else first = false
+ arg match {
+ case TypeRef(_, sym, _) if (qset contains sym) =>
+ nameBuffer append "_"
+ appendInfoStringReduced(sym, sym.info)
+ case arg =>
+ appendType0(arg)
+ }
+ }
+ nameBuffer append "]"
+ case MethodType(_, _) | NullaryMethodType(_) | PolyType(_, _) =>
+ nameBuffer append "("
+ appendType0(underlying)
+ nameBuffer append ")"
+ appendClauses
+ case _ =>
+ appendType0(underlying)
+ appendClauses
+ }
+
+ case tb at TypeBounds(lo, hi) =>
+ if (tb.lo != TypeBounds.empty.lo) {
+ nameBuffer append " >: "
+ appendType0(lo)
+ }
+ if (tb.hi != TypeBounds.empty.hi) {
+ nameBuffer append " <: "
+ appendType0(hi)
+ }
+ // case tpen: ThisType | SingleType | SuperType =>
+ // if (tpen.isInstanceOf[ThisType] && tpen.asInstanceOf[ThisType].sym.isEffectiveRoot) {
+ // appendType0 typeRef(NoPrefix, sym, Nil)
+ // } else {
+ // val underlying =
+ // val pre = underlying.typeSymbol.skipPackageObject
+ // if (pre.isOmittablePrefix) pre.fullName + ".type"
+ // else prefixString + "type"
+ case tpen at ThisType(sym) =>
+ appendType0(typeRef(NoPrefix, sym, Nil))
+ nameBuffer append ".this"
+ if (!tpen.underlying.typeSymbol.skipPackageObject.isOmittablePrefix) nameBuffer append ".type"
+ case tpen at SuperType(thistpe, supertpe) =>
+ nameBuffer append "super["
+ appendType0(supertpe)
+ nameBuffer append "]"
+ case tpen at SingleType(pre, sym) =>
+ appendType0(typeRef(pre, sym, Nil))
+ if (!tpen.underlying.typeSymbol.skipPackageObject.isOmittablePrefix) nameBuffer append ".type"
+ case tpen =>
+ nameBuffer append tpen.toString
+ }
+ appendType0(aType)
+ val refEntity = refBuffer
+ val name = optimize(nameBuffer.toString)
+ nameBuffer = null
+ }
+
+ // SI-4360: Entity caching depends on both the type AND the template it's in, as the prefixes might change for the
+ // same type based on the template the type is shown in.
+ if (settings.docNoPrefixes.value)
+ typeCache.getOrElseUpdate(aType, createTypeEntity)
+ else createTypeEntity
+ }
+}
diff --git a/src/compiler/scala/tools/nsc/doc/model/TreeEntity.scala b/src/compiler/scala/tools/nsc/doc/model/TreeEntity.scala
index 23a4e17..5b4ec4a 100644
--- a/src/compiler/scala/tools/nsc/doc/model/TreeEntity.scala
+++ b/src/compiler/scala/tools/nsc/doc/model/TreeEntity.scala
@@ -1,5 +1,5 @@
/* NSC -- new Scala compiler
- * Copyright 2007-2011 LAMP/EPFL
+ * Copyright 2007-2013 LAMP/EPFL
* @author Chris James
*/
diff --git a/src/compiler/scala/tools/nsc/doc/model/TreeFactory.scala b/src/compiler/scala/tools/nsc/doc/model/TreeFactory.scala
old mode 100644
new mode 100755
index 988f2e0..fdad84d
--- a/src/compiler/scala/tools/nsc/doc/model/TreeFactory.scala
+++ b/src/compiler/scala/tools/nsc/doc/model/TreeFactory.scala
@@ -3,7 +3,7 @@ package doc
package model
import scala.collection._
-import util.{RangePosition, OffsetPosition, SourceFile}
+import scala.reflect.internal.util.{RangePosition, OffsetPosition, SourceFile}
/** The goal of this trait is , using makeTree,
* to browse a tree to
@@ -19,7 +19,7 @@ trait TreeFactory { thisTreeFactory: ModelFactory with TreeFactory =>
val global: Global
import global._
- def makeTree(rhs: Tree): Option[TreeEntity] = {
+ def makeTree(rhs: Tree): TreeEntity = {
var expr = new StringBuilder
var refs = new immutable.TreeMap[Int, (Entity, Int)] // start, (Entity to be linked to , end)
@@ -52,7 +52,7 @@ trait TreeFactory { thisTreeFactory: ModelFactory with TreeFactory =>
if (asym.isSetter) asym = asym.getter(asym.owner)
makeTemplate(asym.owner) match {
case docTmpl: DocTemplateImpl =>
- val mbrs: List[MemberImpl] = makeMember(asym,docTmpl)
+ val mbrs: Option[MemberImpl] = findMember(asym, docTmpl)
mbrs foreach { mbr => refs += ((start, (mbr,end))) }
case _ =>
}
@@ -80,17 +80,16 @@ trait TreeFactory { thisTreeFactory: ModelFactory with TreeFactory =>
traverser.traverse(rhs)
- Some(new TreeEntity {
+ new TreeEntity {
val expression = expr.toString
val refEntity = refs
- })
+ }
}
- case pos: OffsetPosition =>
- Some(new TreeEntity {
+ case _ =>
+ new TreeEntity {
val expression = rhs.toString
val refEntity = new immutable.TreeMap[Int, (Entity, Int)]
- })
- case _ => None
+ }
}
}
}
diff --git a/src/compiler/scala/tools/nsc/doc/model/TypeEntity.scala b/src/compiler/scala/tools/nsc/doc/model/TypeEntity.scala
index 67e955f..cf5c1fb 100644
--- a/src/compiler/scala/tools/nsc/doc/model/TypeEntity.scala
+++ b/src/compiler/scala/tools/nsc/doc/model/TypeEntity.scala
@@ -1,5 +1,5 @@
/* NSC -- new Scala compiler
- * Copyright 2007-2011 LAMP/EPFL
+ * Copyright 2007-2013 LAMP/EPFL
* @author Manohar Jonnalagedda
*/
@@ -9,7 +9,6 @@ package model
import scala.collection._
-
/** A type. Note that types and templates contain the same information only for the simplest types. For example, a type
* defines how a template's type parameters are instantiated (as in `List[Cow]`), what the template's prefix is
* (as in `johnsFarm.Cow`), and supports compound or structural types. */
@@ -21,9 +20,8 @@ abstract class TypeEntity {
/** Maps which parts of this type's name reference entities. The map is indexed by the position of the first
* character that reference some entity, and contains the entity and the position of the last referenced
* character. The referenced character ranges do not to overlap or nest. The map is sorted by position. */
- def refEntity: SortedMap[Int, (TemplateEntity, Int)]
+ def refEntity: SortedMap[Int, (base.LinkTo, Int)]
/** The human-readable representation of this type. */
override def toString = name
-
}
diff --git a/src/compiler/scala/tools/nsc/doc/model/ValueArgument.scala b/src/compiler/scala/tools/nsc/doc/model/ValueArgument.scala
index 781ff11..f712869 100644
--- a/src/compiler/scala/tools/nsc/doc/model/ValueArgument.scala
+++ b/src/compiler/scala/tools/nsc/doc/model/ValueArgument.scala
@@ -1,5 +1,5 @@
/* NSC -- new Scala compiler
- * Copyright 2007-2011 LAMP/EPFL
+ * Copyright 2007-2013 LAMP/EPFL
* @author Gilles Dubochet
*/
diff --git a/src/compiler/scala/tools/nsc/doc/model/Visibility.scala b/src/compiler/scala/tools/nsc/doc/model/Visibility.scala
index 657d34e..2258080 100644
--- a/src/compiler/scala/tools/nsc/doc/model/Visibility.scala
+++ b/src/compiler/scala/tools/nsc/doc/model/Visibility.scala
@@ -1,5 +1,5 @@
/* NSC -- new Scala compiler
- * Copyright 2007-2011 LAMP/EPFL
+ * Copyright 2007-2013 LAMP/EPFL
* @author Gilles Dubochet
*/
diff --git a/src/compiler/scala/tools/nsc/doc/model/comment/Body.scala b/src/compiler/scala/tools/nsc/doc/model/comment/Body.scala
deleted file mode 100644
index ef4047c..0000000
--- a/src/compiler/scala/tools/nsc/doc/model/comment/Body.scala
+++ /dev/null
@@ -1,88 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2007-2011 LAMP/EPFL
- * @author Manohar Jonnalagedda
- */
-
-package scala.tools.nsc
-package doc
-package model
-package comment
-
-import scala.collection._
-
-import java.net.URL
-
-/** A body of text. A comment has a single body, which is composed of
- * at least one block. Inside every body is exactly one summary (see
- * [[scala.tools.nsc.doc.model.comment.Summary]]). */
-final case class Body(blocks: Seq[Block]) {
-
- /** The summary text of the comment body. */
- lazy val summary: Option[Inline] = {
- def summaryInBlock(block: Block): Seq[Inline] = block match {
- case Title(text, _) => summaryInInline(text)
- case Paragraph(text) => summaryInInline(text)
- case UnorderedList(items) => items flatMap summaryInBlock
- case OrderedList(items, _) => items flatMap summaryInBlock
- case DefinitionList(items) => items.values.toSeq flatMap summaryInBlock
- case _ => Nil
- }
- def summaryInInline(text: Inline): Seq[Inline] = text match {
- case Summary(text) => List(text)
- case Chain(items) => items flatMap summaryInInline
- case Italic(text) => summaryInInline(text)
- case Bold(text) => summaryInInline(text)
- case Underline(text) => summaryInInline(text)
- case Superscript(text) => summaryInInline(text)
- case Subscript(text) => summaryInInline(text)
- case Link(_, title) => summaryInInline(title)
- case _ => Nil
- }
- (blocks flatMap { summaryInBlock(_) }).toList match {
- case Nil => None
- case inline :: Nil => Some(inline)
- case inlines => Some(Chain(inlines))
- }
- }
-
-}
-
-/** A block-level element of text, such as a paragraph or code block. */
-sealed abstract class Block
-
-final case class Title(text: Inline, level: Int) extends Block
-final case class Paragraph(text: Inline) extends Block
-final case class Code(data: String) extends Block
-final case class UnorderedList(items: Seq[Block]) extends Block
-final case class OrderedList(items: Seq[Block], style: String) extends Block
-final case class DefinitionList(items: SortedMap[Inline, Block]) extends Block
-final case class HorizontalRule() extends Block
-
-/** An section of text inside a block, possibly with formatting. */
-sealed abstract class Inline
-
-final case class Chain(items: Seq[Inline]) extends Inline
-final case class Italic(text: Inline) extends Inline
-final case class Bold(text: Inline) extends Inline
-final case class Underline(text: Inline) extends Inline
-final case class Superscript(text: Inline) extends Inline
-final case class Subscript(text: Inline) extends Inline
-final case class Link(target: String, title: Inline) extends Inline
-final case class EntityLink(target: TemplateEntity) extends Inline
-final case class Monospace(text: Inline) extends Inline
-final case class Text(text: String) extends Inline
-final case class HtmlTag(data: String) extends Inline {
- def canClose(open: HtmlTag) = {
- open.data.stripPrefix("<") == data.stripPrefix("</")
- }
-
- def close = {
- if (data.indexOf("</") == -1)
- Some(HtmlTag("</" + data.stripPrefix("<")))
- else
- None
- }
-}
-
-/** The summary of a comment, usually its first sentence. There must be exactly one summary per body. */
-final case class Summary(text: Inline) extends Inline
diff --git a/src/compiler/scala/tools/nsc/doc/model/comment/Comment.scala b/src/compiler/scala/tools/nsc/doc/model/comment/Comment.scala
deleted file mode 100644
index 914275d..0000000
--- a/src/compiler/scala/tools/nsc/doc/model/comment/Comment.scala
+++ /dev/null
@@ -1,117 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2007-2011 LAMP/EPFL
- * @author Manohar Jonnalagedda
- */
-
-package scala.tools.nsc
-package doc
-package model
-package comment
-
-import scala.collection._
-
-/** A Scaladoc comment and all its tags.
- *
- * '''Note:''' the only instantiation site of this class is in [[CommentFactory]].
- *
- * @author Manohar Jonnalagedda
- * @author Gilles Dubochet */
-abstract class Comment {
-
- /** The main body of the comment that describes what the entity does and is. */
- def body: Body
-
- private def closeHtmlTags(inline: Inline) = {
- val stack = mutable.ListBuffer.empty[HtmlTag]
- def scan(i: Inline) {
- i match {
- case Chain(list) =>
- list foreach scan
- case tag: HtmlTag => {
- if (stack.length > 0 && tag.canClose(stack.last)) {
- stack.remove(stack.length-1)
- } else {
- tag.close match {
- case Some(t) =>
- stack += t
- case None =>
- ;
- }
- }
- }
- case _ =>
- ;
- }
- }
- scan(inline)
- Chain(List(inline) ++ stack.reverse)
- }
-
- /** A shorter version of the body. Usually, this is the first sentence of the body. */
- def short: Inline = {
- body.summary match {
- case Some(s) =>
- closeHtmlTags(s)
- case _ =>
- Text("")
- }
- }
-
- /** A list of authors. The empty list is used when no author is defined. */
- def authors: List[Body]
-
- /** A list of other resources to see, including links to other entities or
- * to external documentation. The empty list is used when no other resource
- * is mentionned. */
- def see: List[Body]
-
- /** A description of the result of the entity. Typically, this provides additional
- * information on the domain of the result, contractual post-conditions, etc. */
- def result: Option[Body]
-
- /** A map of exceptions that the entity can throw when accessed, and a
- * description of what they mean. */
- def throws: Map[String, Body]
-
- /** A map of value parameters, and a description of what they are. Typically,
- * this provides additional information on the domain of the parameters,
- * contractual pre-conditions, etc. */
- def valueParams: Map[String, Body]
-
- /** A map of type parameters, and a description of what they are. Typically,
- * this provides additional information on the domain of the parameters. */
- def typeParams: Map[String, Body]
-
- /** The version number of the entity. There is no formatting or further
- * meaning attached to this value. */
- def version: Option[Body]
-
- /** A version number of a containing entity where this member-entity was introduced. */
- def since: Option[Body]
-
- /** An annotation as to expected changes on this entity. */
- def todo: List[Body]
-
- /** Whether the entity is deprecated. Using the `@deprecated` Scala attribute
- * is prefereable to using this Scaladoc tag. */
- def deprecated: Option[Body]
-
- /** An additional note concerning the contract of the entity. */
- def note: List[Body]
-
- /** A usage example related to the entity. */
- def example: List[Body]
-
- /** The comment as it appears in the source text. */
- def source: Option[String]
-
- /** A description for the primary constructor */
- def constructor: Option[Body]
-
- override def toString =
- body.toString + "\n" +
- (authors map ("@author " + _.toString)).mkString("\n") +
- (result map ("@return " + _.toString)).mkString("\n") +
- (version map ("@version " + _.toString)).mkString
-
-}
diff --git a/src/compiler/scala/tools/nsc/doc/model/comment/CommentFactory.scala b/src/compiler/scala/tools/nsc/doc/model/comment/CommentFactory.scala
deleted file mode 100644
index ea36eb0..0000000
--- a/src/compiler/scala/tools/nsc/doc/model/comment/CommentFactory.scala
+++ /dev/null
@@ -1,955 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2007-2011 LAMP/EPFL
- * @author Manohar Jonnalagedda
- */
-
-package scala.tools.nsc
-package doc
-package model
-package comment
-
-import reporters.Reporter
-import scala.collection._
-import scala.util.matching.Regex
-import scala.annotation.switch
-import util.{NoPosition, Position}
-
-/** The comment parser transforms raw comment strings into `Comment` objects.
- * Call `parse` to run the parser. Note that the parser is stateless and
- * should only be built once for a given Scaladoc run.
- *
- * @param reporter The reporter on which user messages (error, warnings) should be printed.
- *
- * @author Manohar Jonnalagedda
- * @author Gilles Dubochet */
-trait CommentFactory { thisFactory: ModelFactory with CommentFactory =>
-
- val global: Global
- import global.{ reporter, definitions }
-
- protected val commentCache = mutable.HashMap.empty[(global.Symbol, TemplateImpl), Comment]
-
- def addCommentBody(sym: global.Symbol, inTpl: => TemplateImpl, docStr: String, docPos: global.Position): global.Symbol = {
- commentCache += (sym, inTpl) -> parse(docStr, docStr, docPos)
- sym
- }
-
- def comment(sym: global.Symbol, inTpl: => DocTemplateImpl): Option[Comment] = {
- val key = (sym, inTpl)
- if (commentCache isDefinedAt key)
- Some(commentCache(key))
- else { // not reached for use-case comments
- val c = defineComment(sym, inTpl)
- if (c isDefined) commentCache += (sym, inTpl) -> c.get
- c
- }
- }
-
- /** A comment is usualy created by the parser, however for some special
- * cases we have to give some `inTpl` comments (parent class for example)
- * to the comment of the symbol.
- * This function manages some of those cases : Param accessor and Primary constructor */
- def defineComment(sym: global.Symbol, inTpl: => DocTemplateImpl):Option[Comment] = {
-
- //param accessor case
- // We just need the @param argument, we put it into the body
- if( sym.isParamAccessor &&
- inTpl.comment.isDefined &&
- inTpl.comment.get.valueParams.isDefinedAt(sym.encodedName)) {
- val comContent = Some(inTpl.comment.get.valueParams(sym.encodedName))
- Some(createComment(body0 = comContent))
- }
-
- // Primary constructor case
- // We need some content of the class definition : @constructor for the body,
- // @param and @deprecated, we can add some more if necessary
- else if (sym.isPrimaryConstructor && inTpl.comment.isDefined ) {
- val tplComment = inTpl.comment.get
- // If there is nothing to put into the comment there is no need to create it
- if(tplComment.constructor.isDefined ||
- tplComment.throws != Map.empty ||
- tplComment.valueParams != Map.empty ||
- tplComment.typeParams != Map.empty ||
- tplComment.deprecated.isDefined
- )
- Some(createComment( body0 = tplComment.constructor,
- throws0 = tplComment.throws,
- valueParams0 = tplComment.valueParams,
- typeParams0 = tplComment.typeParams,
- deprecated0 = tplComment.deprecated
- ))
- else None
- }
-
- //other comment cases
- // parse function will make the comment
- else {
- val rawComment = global.expandedDocComment(sym, inTpl.sym).trim
- if (rawComment != "") {
- val c = parse(rawComment, global.rawDocComment(sym), global.docCommentPos(sym))
- Some(c)
- }
- else None
- }
-
- }
-
- /* Creates comments with necessary arguments */
- def createComment (
- body0: Option[Body] = None,
- authors0: List[Body] = List.empty,
- see0: List[Body] = List.empty,
- result0: Option[Body] = None,
- throws0: Map[String,Body] = Map.empty,
- valueParams0: Map[String,Body] = Map.empty,
- typeParams0: Map[String,Body] = Map.empty,
- version0: Option[Body] = None,
- since0: Option[Body] = None,
- todo0: List[Body] = List.empty,
- deprecated0: Option[Body] = None,
- note0: List[Body] = List.empty,
- example0: List[Body] = List.empty,
- constructor0: Option[Body] = None,
- source0: Option[String] = None
- ) : Comment = new Comment{
- val body = if(body0 isDefined) body0.get else Body(Seq.empty)
- val authors = authors0
- val see = see0
- val result = result0
- val throws = throws0
- val valueParams = valueParams0
- val typeParams = typeParams0
- val version = version0
- val since = since0
- val todo = todo0
- val deprecated = deprecated0
- val note = note0
- val example = example0
- val constructor = constructor0
- val source = source0
- }
-
- protected val endOfText = '\u0003'
- protected val endOfLine = '\u000A'
-
- /** Something that should not have happened, happened, and Scaladoc should exit. */
- protected def oops(msg: String): Nothing =
- throw FatalError("program logic: " + msg)
-
- /** The body of a line, dropping the (optional) start star-marker,
- * one leading whitespace and all trailing whitespace. */
- protected val CleanCommentLine =
- new Regex("""(?:\s*\*\s?)?(.*)""")
-
- /** Dangerous HTML tags that should be replaced by something safer,
- * such as wiki syntax, or that should be dropped. */
- protected val DangerousTags =
- new Regex("""<(/?(div|ol|ul|li|h[1-6]|p))( [^>]*)?/?>|<!--.*-->""")
-
- /** Maps a dangerous HTML tag to a safe wiki replacement, or an empty string
- * if it cannot be salvaged. */
- protected def htmlReplacement(mtch: Regex.Match): String = mtch.group(1) match {
- case "p" | "div" => "\n\n"
- case "h1" => "\n= "
- case "/h1" => " =\n"
- case "h2" => "\n== "
- case "/h2" => " ==\n"
- case "h3" => "\n=== "
- case "/h3" => " ===\n"
- case "h4" | "h5" | "h6" => "\n==== "
- case "/h4" | "/h5" | "/h6" => " ====\n"
- case "li" => "\n * - "
- case _ => ""
- }
-
- /** Javadoc tags that should be replaced by something useful, such as wiki
- * syntax, or that should be dropped. */
- protected val JavadocTags =
- new Regex("""\{\@(code|docRoot|inheritDoc|link|linkplain|literal|value)([^}]*)\}""")
-
- /** Maps a javadoc tag to a useful wiki replacement, or an empty string if it cannot be salvaged. */
- protected def javadocReplacement(mtch: Regex.Match): String = mtch.group(1) match {
- case "code" => "`" + mtch.group(2) + "`"
- case "docRoot" => ""
- case "inheritDoc" => ""
- case "link" => "`" + mtch.group(2) + "`"
- case "linkplain" => "`" + mtch.group(2) + "`"
- case "literal" => mtch.group(2)
- case "value" => "`" + mtch.group(2) + "`"
- case _ => ""
- }
-
- /** Safe HTML tags that can be kept. */
- protected val SafeTags =
- new Regex("""((&\w+;)|(&#\d+;)|(</?(abbr|acronym|address|area|a|bdo|big|blockquote|br|button|b|caption|cite|code|col|colgroup|dd|del|dfn|em|fieldset|form|hr|img|input|ins|i|kbd|label|legend|link|map|object|optgroup|option|param|pre|q|samp|select|small|span|strong|sub|sup|table|tbody|td|textarea|tfoot|th|thead|tr|tt|var)( [^>]*)?/?>))""")
-
- protected val safeTagMarker = '\u000E'
-
- /** A Scaladoc tag not linked to a symbol. Returns the name of the tag, and the rest of the line. */
- protected val SimpleTag =
- new Regex("""\s*@(\S+)\s+(.*)""")
-
- /** A Scaladoc tag linked to a symbol. Returns the name of the tag, the name
- * of the symbol, and the rest of the line. */
- protected val SymbolTag =
- new Regex("""\s*@(param|tparam|throws)\s+(\S*)\s*(.*)""")
-
- /** The start of a scaladoc code block */
- protected val CodeBlockStart =
- new Regex("""(.*)((?:\{\{\{)|(?:\u000E<pre(?: [^>]*)?>\u000E))(.*)""")
-
- /** The end of a scaladoc code block */
- protected val CodeBlockEnd =
- new Regex("""(.*)((?:\}\}\})|(?:\u000E</pre>\u000E))(.*)""")
-
- /** A key used for a tag map. The key is built from the name of the tag and
- * from the linked symbol if the tag has one.
- * Equality on tag keys is structural. */
- protected sealed abstract class TagKey {
- def name: String
- }
-
- protected final case class SimpleTagKey(name: String) extends TagKey
- protected final case class SymbolTagKey(name: String, symbol: String) extends TagKey
-
- /** Parses a raw comment string into a `Comment` object.
- * @param comment The expanded comment string (including start and end markers) to be parsed.
- * @param src The raw comment source string.
- * @param pos The position of the comment in source. */
- protected def parse(comment: String, src: String, pos: Position): Comment = {
-
- /** The cleaned raw comment as a list of lines. Cleaning removes comment
- * start and end markers, line start markers and unnecessary whitespace. */
- def clean(comment: String): List[String] = {
- def cleanLine(line: String): String = {
- //replaceAll removes trailing whitespaces
- line.replaceAll("""\s+$""", "") match {
- case CleanCommentLine(ctl) => ctl
- case tl => tl
- }
- }
- val strippedComment = comment.trim.stripPrefix("/*").stripSuffix("*/")
- val safeComment = DangerousTags.replaceAllIn(strippedComment, { htmlReplacement(_) })
- val javadoclessComment = JavadocTags.replaceAllIn(safeComment, { javadocReplacement(_) })
- val markedTagComment =
- SafeTags.replaceAllIn(javadoclessComment, { mtch =>
- java.util.regex.Matcher.quoteReplacement(safeTagMarker + mtch.matched + safeTagMarker)
- })
- markedTagComment.lines.toList map (cleanLine(_))
- }
-
- /** Parses a comment (in the form of a list of lines) to a `Comment`
- * instance, recursively on lines. To do so, it splits the whole comment
- * into main body and tag bodies, then runs the `WikiParser` on each body
- * before creating the comment instance.
- *
- * @param docBody The body of the comment parsed until now.
- * @param tags All tags parsed until now.
- * @param lastTagKey The last parsed tag, or `None` if the tag section hasn't started. Lines that are not tagged
- * are part of the previous tag or, if none exists, of the body.
- * @param remaining The lines that must still recursively be parsed.
- * @param inCodeBlock Whether the next line is part of a code block (in which no tags must be read). */
- def parse0 (
- docBody: String,
- tags: Map[TagKey, List[String]],
- lastTagKey: Option[TagKey],
- remaining: List[String],
- inCodeBlock: Boolean
- ): Comment = remaining match {
-
- case CodeBlockStart(before, marker, after) :: ls if (!inCodeBlock) =>
- if (before.trim != "")
- parse0(docBody, tags, lastTagKey, before :: (marker + after) :: ls, false)
- else if (after.trim != "")
- parse0(docBody, tags, lastTagKey, marker :: after :: ls, true)
- else lastTagKey match {
- case Some(key) =>
- val value =
- ((tags get key): @unchecked) match {
- case Some(b :: bs) => (b + endOfLine + marker) :: bs
- case None => oops("lastTagKey set when no tag exists for key")
- }
- parse0(docBody, tags + (key -> value), lastTagKey, ls, true)
- case None =>
- parse0(docBody + endOfLine + marker, tags, lastTagKey, ls, true)
- }
-
- case CodeBlockEnd(before, marker, after) :: ls =>
- if (before.trim != "")
- parse0(docBody, tags, lastTagKey, before :: (marker + after) :: ls, true)
- else if (after.trim != "")
- parse0(docBody, tags, lastTagKey, marker :: after :: ls, false)
- else lastTagKey match {
- case Some(key) =>
- val value =
- ((tags get key): @unchecked) match {
- case Some(b :: bs) => (b + endOfLine + "}}}") :: bs
- case None => oops("lastTagKey set when no tag exists for key")
- }
- parse0(docBody, tags + (key -> value), lastTagKey, ls, false)
- case None =>
- parse0(docBody + endOfLine + marker, tags, lastTagKey, ls, false)
- }
-
- case SymbolTag(name, sym, body) :: ls if (!inCodeBlock) =>
- val key = SymbolTagKey(name, sym)
- val value = body :: tags.getOrElse(key, Nil)
- parse0(docBody, tags + (key -> value), Some(key), ls, inCodeBlock)
-
- case SimpleTag(name, body) :: ls if (!inCodeBlock) =>
- val key = SimpleTagKey(name)
- val value = body :: tags.getOrElse(key, Nil)
- parse0(docBody, tags + (key -> value), Some(key), ls, inCodeBlock)
-
- case line :: ls if (lastTagKey.isDefined) =>
- val key = lastTagKey.get
- val value =
- ((tags get key): @unchecked) match {
- case Some(b :: bs) => (b + endOfLine + line) :: bs
- case None => oops("lastTagKey set when no tag exists for key")
- }
- parse0(docBody, tags + (key -> value), lastTagKey, ls, inCodeBlock)
-
- case line :: ls =>
- val newBody = if (docBody == "") line else docBody + endOfLine + line
- parse0(newBody, tags, lastTagKey, ls, inCodeBlock)
-
- case Nil =>
-
- val bodyTags: mutable.Map[TagKey, List[Body]] =
- mutable.Map(tags mapValues {tag => tag map (parseWiki(_, pos))} toSeq: _*)
-
- def oneTag(key: SimpleTagKey): Option[Body] =
- ((bodyTags remove key): @unchecked) match {
- case Some(r :: rs) =>
- if (!rs.isEmpty) reporter.warning(pos, "Only one '@" + key.name + "' tag is allowed")
- Some(r)
- case None => None
- }
-
- def allTags(key: SimpleTagKey): List[Body] =
- (bodyTags remove key) getOrElse Nil
-
- def allSymsOneTag(key: TagKey): Map[String, Body] = {
- val keys: Seq[SymbolTagKey] =
- bodyTags.keys.toSeq flatMap {
- case stk: SymbolTagKey if (stk.name == key.name) => Some(stk)
- case stk: SimpleTagKey if (stk.name == key.name) =>
- reporter.warning(pos, "Tag '@" + stk.name + "' must be followed by a symbol name")
- None
- case _ => None
- }
- val pairs: Seq[(String, Body)] =
- for (key <- keys) yield {
- val bs = (bodyTags remove key).get
- if (bs.length > 1)
- reporter.warning(pos, "Only one '@" + key.name + "' tag for symbol " + key.symbol + " is allowed")
- (key.symbol, bs.head)
- }
- Map.empty[String, Body] ++ pairs
- }
-
- val com = createComment (
- body0 = Some(parseWiki(docBody, pos)),
- authors0 = allTags(SimpleTagKey("author")),
- see0 = allTags(SimpleTagKey("see")),
- result0 = oneTag(SimpleTagKey("return")),
- throws0 = allSymsOneTag(SimpleTagKey("throws")),
- valueParams0 = allSymsOneTag(SimpleTagKey("param")),
- typeParams0 = allSymsOneTag(SimpleTagKey("tparam")),
- version0 = oneTag(SimpleTagKey("version")),
- since0 = oneTag(SimpleTagKey("since")),
- todo0 = allTags(SimpleTagKey("todo")),
- deprecated0 = oneTag(SimpleTagKey("deprecated")),
- note0 = allTags(SimpleTagKey("note")),
- example0 = allTags(SimpleTagKey("example")),
- constructor0 = oneTag(SimpleTagKey("constructor")),
- source0 = Some(clean(src).mkString("\n"))
- )
-
- for ((key, _) <- bodyTags)
- reporter.warning(pos, "Tag '@" + key.name + "' is not recognised")
-
- com
-
- }
-
- parse0("", Map.empty, None, clean(comment), false)
-
- }
-
- /** Parses a string containing wiki syntax into a `Comment` object.
- * Note that the string is assumed to be clean:
- * - Removed Scaladoc start and end markers.
- * - Removed start-of-line star and one whitespace afterwards (if present).
- * - Removed all end-of-line whitespace.
- * - Only `endOfLine` is used to mark line endings. */
- def parseWiki(string: String, pos: Position): Body = {
- new WikiParser(string.toArray, pos).document()
- }
-
- /** TODO
- *
- * @author Ingo Maier
- * @author Manohar Jonnalagedda
- * @author Gilles Dubochet */
- protected final class WikiParser(val buffer: Array[Char], pos: Position) extends CharReader(buffer) { wiki =>
-
- var summaryParsed = false
-
- def document(): Body = {
- nextChar()
- val blocks = new mutable.ListBuffer[Block]
- while (char != endOfText)
- blocks += block()
- Body(blocks.toList)
- }
-
- /* BLOCKS */
-
- /** {{{ block ::= code | title | hrule | para }}} */
- def block(): Block = {
- if (checkSkipInitWhitespace("{{{"))
- code()
- else if (checkSkipInitWhitespace("="))
- title()
- else if (checkSkipInitWhitespace("----"))
- hrule()
- else if (checkList)
- listBlock
- else {
- para()
- }
- }
-
- /** listStyle ::= '-' spc | '1.' spc | 'I.' spc | 'i.' spc | 'A.' spc | 'a.' spc
- * Characters used to build lists and their constructors */
- protected val listStyles = Map[String, (Seq[Block] => Block)]( // TODO Should this be defined at some list companion?
- "- " -> ( UnorderedList(_) ),
- "1. " -> ( OrderedList(_,"decimal") ),
- "I. " -> ( OrderedList(_,"upperRoman") ),
- "i. " -> ( OrderedList(_,"lowerRoman") ),
- "A. " -> ( OrderedList(_,"upperAlpha") ),
- "a. " -> ( OrderedList(_,"lowerAlpha") )
- )
-
- /** Checks if the current line is formed with more than one space and one the listStyles */
- def checkList =
- (countWhitespace > 0) && (listStyles.keys exists { checkSkipInitWhitespace(_) })
-
- /** {{{
- * nListBlock ::= nLine { mListBlock }
- * nLine ::= nSpc listStyle para '\n'
- * }}}
- * Where n and m stand for the number of spaces. When `m > n`, a new list is nested. */
- def listBlock: Block = {
-
- /** Consumes one list item block and returns it, or None if the block is
- * not a list or a different list. */
- def listLine(indent: Int, style: String): Option[Block] =
- if (countWhitespace > indent && checkList)
- Some(listBlock)
- else if (countWhitespace != indent || !checkSkipInitWhitespace(style))
- None
- else {
- jumpWhitespace()
- jump(style)
- val p = Paragraph(inline(false))
- blockEnded("end of list line ")
- Some(p)
- }
-
- /** Consumes all list item blocks (possibly with nested lists) of the
- * same list and returns the list block. */
- def listLevel(indent: Int, style: String): Block = {
- val lines = mutable.ListBuffer.empty[Block]
- var line: Option[Block] = listLine(indent, style)
- while (line.isDefined) {
- lines += line.get
- line = listLine(indent, style)
- }
- val constructor = listStyles(style)
- constructor(lines)
- }
-
- val indent = countWhitespace
- val style = (listStyles.keys find { checkSkipInitWhitespace(_) }).getOrElse(listStyles.keys.head)
- listLevel(indent, style)
- }
-
- def code(): Block = {
- jumpWhitespace()
- jump("{{{")
- readUntil("}}}")
- if (char == endOfText)
- reportError(pos, "unclosed code block")
- else
- jump("}}}")
- blockEnded("code block")
- Code(getRead)
- }
-
- /** {{{ title ::= ('=' inline '=' | "==" inline "==" | ...) '\n' }}} */
- def title(): Block = {
- jumpWhitespace()
- val inLevel = repeatJump("=")
- val text = inline(check(Array.fill(inLevel)('=')))
- val outLevel = repeatJump("=", inLevel)
- if (inLevel != outLevel)
- reportError(pos, "unbalanced or unclosed heading")
- blockEnded("heading")
- Title(text, inLevel)
- }
-
- /** {{{ hrule ::= "----" { '-' } '\n' }}} */
- def hrule(): Block = {
- jumpWhitespace()
- repeatJump("-")
- blockEnded("horizontal rule")
- HorizontalRule()
- }
-
- /** {{{ para ::= inline '\n' }}} */
- def para(): Block = {
- val p =
- if (summaryParsed)
- Paragraph(inline(false))
- else {
- val s = summary()
- val r =
- if (checkParaEnded) List(s) else List(s, inline(false))
- summaryParsed = true
- Paragraph(Chain(r))
- }
- while (char == endOfLine && char != endOfText)
- nextChar()
- p
- }
-
- /* INLINES */
-
- val OPEN_TAG = "^<([A-Za-z]+)( [^>]*)?(/?)>$".r
- val CLOSE_TAG = "^</([A-Za-z]+)>$".r
- private def readHTMLFrom(begin: HtmlTag): String = {
- val list = mutable.ListBuffer.empty[String]
- val stack = mutable.ListBuffer.empty[String]
-
- begin.close match {
- case Some(HtmlTag(CLOSE_TAG(s))) =>
- stack += s
- case _ =>
- return ""
- }
-
- do {
- readUntil { char == safeTagMarker || char == endOfText }
- val str = getRead()
- nextChar()
-
- list += str
-
- str match {
- case OPEN_TAG(s, _, standalone) => {
- if (standalone != "/") {
- stack += s
- }
- }
- case CLOSE_TAG(s) => {
- if (s == stack.last) {
- stack.remove(stack.length-1)
- }
- }
- case _ => ;
- }
- } while (stack.length > 0 && char != endOfText)
-
- list mkString ""
- }
-
- def inline(isInlineEnd: => Boolean): Inline = {
-
- def inline0(): Inline = {
- if (char == safeTagMarker) {
- val tag = htmlTag()
- HtmlTag(tag.data + readHTMLFrom(tag))
- }
- else if (check("'''")) bold()
- else if (check("''")) italic()
- else if (check("`")) monospace()
- else if (check("__")) underline()
- else if (check("^")) superscript()
- else if (check(",,")) subscript()
- else if (check("[[")) link()
- else {
- readUntil { char == safeTagMarker || check("''") || char == '`' || check("__") || char == '^' || check(",,") || check("[[") || isInlineEnd || checkParaEnded || char == endOfLine }
- Text(getRead())
- }
- }
-
- val inlines: List[Inline] = {
- val iss = mutable.ListBuffer.empty[Inline]
- iss += inline0()
- while (!isInlineEnd && !checkParaEnded) {
- val skipEndOfLine = if (char == endOfLine) {
- nextChar()
- true
- } else {
- false
- }
-
- val current = inline0()
- (iss.last, current) match {
- case (Text(t1), Text(t2)) if skipEndOfLine =>
- iss.update(iss.length - 1, Text(t1 + endOfLine + t2))
- case (i1, i2) if skipEndOfLine =>
- iss ++= List(Text(endOfLine.toString), i2)
- case _ => iss += current
- }
- }
- iss.toList
- }
-
- inlines match {
- case Nil => Text("")
- case i :: Nil => i
- case is => Chain(is)
- }
-
- }
-
- def htmlTag(): HtmlTag = {
- jump(safeTagMarker)
- readUntil(safeTagMarker)
- if (char != endOfText) jump(safeTagMarker)
- var read = getRead
- HtmlTag(read)
- }
-
- def bold(): Inline = {
- jump("'''")
- val i = inline(check("'''"))
- jump("'''")
- Bold(i)
- }
-
- def italic(): Inline = {
- jump("''")
- val i = inline(check("''"))
- jump("''")
- Italic(i)
- }
-
- def monospace(): Inline = {
- jump("`")
- val i = inline(check("`"))
- jump("`")
- Monospace(i)
- }
-
- def underline(): Inline = {
- jump("__")
- val i = inline(check("__"))
- jump("__")
- Underline(i)
- }
-
- def superscript(): Inline = {
- jump("^")
- val i = inline(check("^"))
- if (jump("^")) {
- Superscript(i)
- } else {
- Chain(Seq(Text("^"), i))
- }
- }
-
- def subscript(): Inline = {
- jump(",,")
- val i = inline(check(",,"))
- jump(",,")
- Subscript(i)
- }
-
- def summary(): Inline = {
- val i = inline(check("."))
- Summary(
- if (jump("."))
- Chain(List(i, Text(".")))
- else
- i
- )
- }
-
- def entityLink(query: String): Inline = findTemplate(query) match {
- case Some(tpl) =>
- EntityLink(tpl)
- case None =>
- Text(query)
- }
-
- def link(): Inline = {
- val SchemeUri = """([^:]+:.*)""".r
- jump("[[")
- readUntil { check("]]") || check(" ") }
- val target = getRead()
- val title =
- if (!check("]]")) Some({
- jump(" ")
- inline(check("]]"))
- })
- else None
- jump("]]")
-
- (target, title) match {
- case (SchemeUri(uri), optTitle) =>
- Link(uri, optTitle getOrElse Text(uri))
- case (qualName, optTitle) =>
- optTitle foreach (text => reportError(pos, "entity link to " + qualName + " cannot have a custom title'" + text + "'"))
- // XXX rather than warning here we should allow unqualified names
- // to refer to members of the same package. The "package exists"
- // exclusion is because [[scala]] is used in some scaladoc.
- if (!qualName.contains(".") && !definitions.packageExists(qualName))
- reportError(pos, "entity link to " + qualName + " should be a fully qualified name")
-
- entityLink(qualName)
- }
- }
-
- /* UTILITY */
-
- /** {{{ eol ::= { whitespace } '\n' }}} */
- def blockEnded(blockType: String): Unit = {
- if (char != endOfLine && char != endOfText) {
- reportError(pos, "no additional content on same line after " + blockType)
- jumpUntil(endOfLine)
- }
- while (char == endOfLine)
- nextChar()
- }
-
- def checkParaEnded(): Boolean = {
- (char == endOfText) ||
- ((char == endOfLine) && {
- val poff = offset
- val pc = char
- nextChar() // read EOL
- val ok = {
- checkSkipInitWhitespace(Array(endOfLine)) ||
- checkSkipInitWhitespace(Array('=')) ||
- checkSkipInitWhitespace(Array('{', '{', '{')) ||
- checkList ||
- checkSkipInitWhitespace(Array('\u003D'))
- }
- offset = poff
- char = pc
- ok
- })
- }
-
- def reportError(pos: Position, message: String) {
- reporter.warning(pos, message)
- }
- }
-
- protected sealed class CharReader(buffer: Array[Char]) { reader =>
-
- var char: Char = _
- var offset: Int = 0
-
- final def nextChar() {
- if (offset >= buffer.length)
- char = endOfText
- else {
- char = buffer(offset)
- offset += 1
- }
- }
-
- implicit def strintToChars(s: String): Array[Char] = s.toArray
-
- def store(body: => Unit): String = {
- val pre = offset
- body
- val post = offset
- buffer.toArray.slice(pre, post).toString
- }
-
- final def check(chars: Array[Char]): Boolean = {
- val poff = offset
- val pc = char
- val ok = jump(chars)
- offset = poff
- char = pc
- ok
- }
-
- def checkSkipInitWhitespace(chars: Array[Char]): Boolean = {
- val poff = offset
- val pc = char
- jumpWhitespace()
- val (ok0, chars0) =
- if (chars.head == ' ')
- (offset > poff, chars.tail)
- else
- (true, chars)
- val ok = ok0 && jump(chars0)
- offset = poff
- char = pc
- ok
- }
-
- def countWhitespace: Int = {
- var count = 0
- val poff = offset
- val pc = char
- while (isWhitespace(char) && char != endOfText) {
- nextChar()
- count += 1
- }
- offset = poff
- char = pc
- count
- }
-
- /* JUMPERS */
-
- /** jumps a character and consumes it
- * @return true only if the correct character has been jumped */
- final def jump(ch: Char): Boolean = {
- if (char == ch) {
- nextChar()
- true
- }
- else false
- }
-
- /** jumps all the characters in chars, consuming them in the process.
- * @return true only if the correct characters have been jumped */
- final def jump(chars: Array[Char]): Boolean = {
- var index = 0
- while (index < chars.length && char == chars(index) && char != endOfText) {
- nextChar()
- index += 1
- }
- index == chars.length
- }
-
- final def checkedJump(chars: Array[Char]): Boolean = {
- val poff = offset
- val pc = char
- val ok = jump(chars)
- if (!ok) {
- offset = poff
- char = pc
- }
- ok
- }
-
- final def repeatJump(chars: Array[Char], max: Int): Int = {
- var count = 0
- var more = true
- while (more && count < max) {
- if (!checkedJump(chars))
- more = false
- else
- count += 1
- }
- count
- }
-
- final def repeatJump(chars: Array[Char]): Int = {
- var count = 0
- var more = true
- while (more) {
- if (!checkedJump(chars))
- more = false
- else
- count += 1
- }
- count
- }
-
- final def jumpUntil(ch: Char): Int = {
- var count = 0
- while (char != ch && char != endOfText) {
- nextChar()
- count += 1
- }
- count
- }
-
- final def jumpUntil(chars: Array[Char]): Int = {
- assert(chars.length > 0)
- var count = 0
- val c = chars(0)
- while (!check(chars) && char != endOfText) {
- nextChar()
- while (char != c && char != endOfText) {
- nextChar()
- count += 1
- }
- }
- count
- }
-
- final def jumpUntil(pred: => Boolean): Int = {
- var count = 0
- while (!pred && char != endOfText) {
- nextChar()
- count += 1
- }
- count
- }
-
- def jumpWhitespace() = jumpUntil(!isWhitespace(char))
-
- /* READERS */
-
- private val readBuilder = new mutable.StringBuilder
-
- final def getRead(): String = {
- val bld = readBuilder.toString
- readBuilder.clear()
- if (bld.length < 6) bld.intern else bld
- }
-
- final def readUntil(ch: Char): Int = {
- var count = 0
- while (char != ch && char != endOfText) {
- readBuilder += char
- nextChar()
- }
- count
- }
-
- final def readUntil(chars: Array[Char]): Int = {
- assert(chars.length > 0)
- var count = 0
- val c = chars(0)
- while (!check(chars) && char != endOfText) {
- readBuilder += char
- nextChar()
- while (char != c && char != endOfText) {
- readBuilder += char
- nextChar()
- }
- }
- count
- }
-
- final def readUntil(pred: => Boolean): Int = {
- var count = 0
- while (!pred && char != endOfText) {
- readBuilder += char
- nextChar()
- }
- count
- }
-
- /* CHARS CLASSES */
-
- def isWhitespace(c: Char) = c == ' ' || c == '\t'
-
- }
-
-}
diff --git a/src/compiler/scala/tools/nsc/doc/model/diagram/Diagram.scala b/src/compiler/scala/tools/nsc/doc/model/diagram/Diagram.scala
new file mode 100644
index 0000000..c2aa1f1
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/doc/model/diagram/Diagram.scala
@@ -0,0 +1,146 @@
+package scala.tools.nsc.doc
+package model
+package diagram
+
+import model._
+
+/**
+ * The diagram base classes
+ *
+ * @author Damien Obrist
+ * @author Vlad Ureche
+ */
+abstract class Diagram {
+ def nodes: List[Node]
+ def edges: List[(Node, List[Node])]
+ def isContentDiagram = false // Implemented by ContentDiagram
+ def isInheritanceDiagram = false // Implemented by InheritanceDiagram
+ def depthInfo: DepthInfo
+}
+
+case class ContentDiagram(nodes:List[/*Class*/Node], edges:List[(Node, List[Node])]) extends Diagram {
+ override def isContentDiagram = true
+ lazy val depthInfo = new ContentDiagramDepth(this)
+}
+
+/** A class diagram */
+case class InheritanceDiagram(thisNode: ThisNode,
+ superClasses: List[/*Class*/Node],
+ subClasses: List[/*Class*/Node],
+ incomingImplicits: List[ImplicitNode],
+ outgoingImplicits: List[ImplicitNode]) extends Diagram {
+ def nodes = thisNode :: superClasses ::: subClasses ::: incomingImplicits ::: outgoingImplicits
+ def edges = (thisNode -> (superClasses ::: outgoingImplicits)) ::
+ (subClasses ::: incomingImplicits).map(_ -> List(thisNode))
+
+ override def isInheritanceDiagram = true
+ lazy val depthInfo = new DepthInfo {
+ def maxDepth = 3
+ def nodeDepth(node: Node) =
+ if (node == thisNode) 1
+ else if (superClasses.contains(node)) 0
+ else if (subClasses.contains(node)) 2
+ else if (incomingImplicits.contains(node) || outgoingImplicits.contains(node)) 1
+ else -1
+ }
+}
+
+trait DepthInfo {
+ /** Gives the maximum depth */
+ def maxDepth: Int
+ /** Gives the depth of any node in the diagram or -1 if the node is not in the diagram */
+ def nodeDepth(node: Node): Int
+}
+
+abstract class Node {
+ def name = tpe.name
+ def tpe: TypeEntity
+ def tpl: Option[TemplateEntity]
+ /** shortcut to get a DocTemplateEntity */
+ def doctpl: Option[DocTemplateEntity] = tpl match {
+ case Some(tpl) => tpl match {
+ case d: DocTemplateEntity => Some(d)
+ case _ => None
+ }
+ case _ => None
+ }
+ /* shortcuts to find the node type without matching */
+ def isThisNode = false
+ def isNormalNode = false
+ def isClassNode = if (tpl.isDefined) (tpl.get.isClass || tpl.get.qualifiedName == "scala.AnyRef") else false
+ def isTraitNode = if (tpl.isDefined) tpl.get.isTrait else false
+ def isObjectNode= if (tpl.isDefined) tpl.get.isObject else false
+ def isTypeNode = if (doctpl.isDefined) doctpl.get.isAbstractType || doctpl.get.isAliasType else false
+ def isOtherNode = !(isClassNode || isTraitNode || isObjectNode || isTypeNode)
+ def isImplicitNode = false
+ def isOutsideNode = false
+ def tooltip: Option[String]
+}
+
+// different matchers, allowing you to use the pattern matcher against any node
+// NOTE: A ThisNode or ImplicitNode can at the same time be ClassNode/TraitNode/OtherNode, not exactly according to
+// case class specification -- thus a complete match would be:
+// node match {
+// case ThisNode(tpe, _) => /* case for this node, you can still use .isClass, .isTrait and .isOther */
+// case ImplicitNode(tpe, _) => /* case for an implicit node, you can still use .isClass, .isTrait and .isOther */
+// case _ => node match {
+// case ClassNode(tpe, _) => /* case for a non-this, non-implicit Class node */
+// case TraitNode(tpe, _) => /* case for a non-this, non-implicit Trait node */
+// case OtherNode(tpe, _) => /* case for a non-this, non-implicit Other node */
+// }
+// }
+object Node { def unapply(n: Node): Option[(TypeEntity, Option[TemplateEntity])] = Some((n.tpe, n.tpl)) }
+object ClassNode { def unapply(n: Node): Option[(TypeEntity, Option[TemplateEntity])] = if (n.isClassNode) Some((n.tpe, n.tpl)) else None }
+object TraitNode { def unapply(n: Node): Option[(TypeEntity, Option[TemplateEntity])] = if (n.isTraitNode) Some((n.tpe, n.tpl)) else None }
+object TypeNode { def unapply(n: Node): Option[(TypeEntity, Option[TemplateEntity])] = if (n.isTypeNode) Some((n.tpe, n.tpl)) else None }
+object ObjectNode { def unapply(n: Node): Option[(TypeEntity, Option[TemplateEntity])] = if (n.isObjectNode) Some((n.tpe, n.tpl)) else None }
+object OutsideNode { def unapply(n: Node): Option[(TypeEntity, Option[TemplateEntity])] = if (n.isOutsideNode) Some((n.tpe, n.tpl)) else None }
+object OtherNode { def unapply(n: Node): Option[(TypeEntity, Option[TemplateEntity])] = if (n.isOtherNode) Some((n.tpe, n.tpl)) else None }
+
+
+
+/** The node for the current class */
+case class ThisNode(tpe: TypeEntity, tpl: Option[TemplateEntity])(val tooltip: Option[String] = None) extends Node { override def isThisNode = true }
+
+/** The usual node */
+case class NormalNode(tpe: TypeEntity, tpl: Option[TemplateEntity])(val tooltip: Option[String] = None) extends Node { override def isNormalNode = true }
+
+/** A class or trait the thisnode can be converted to by an implicit conversion
+ * TODO: I think it makes more sense to use the tpe links to templates instead of the TemplateEntity for implicit nodes
+ * since some implicit conversions convert the class to complex types that cannot be represented as a single tmeplate
+ */
+case class ImplicitNode(tpe: TypeEntity, tpl: Option[TemplateEntity])(val tooltip: Option[String] = None) extends Node { override def isImplicitNode = true }
+
+/** An outside node is shown in packages when a class from a different package makes it to the package diagram due to
+ * its relation to a class in the template (see @contentDiagram hideInheritedNodes annotation) */
+case class OutsideNode(tpe: TypeEntity, tpl: Option[TemplateEntity])(val tooltip: Option[String] = None) extends Node { override def isOutsideNode = true }
+
+
+// Computing and offering node depth information
+class ContentDiagramDepth(pack: ContentDiagram) extends DepthInfo {
+ private[this] var _maxDepth = 0
+ private[this] var _nodeDepth = Map[Node, Int]()
+ private[this] var seedNodes = Set[Node]()
+ private[this] val invertedEdges: Map[Node, List[Node]] =
+ pack.edges.flatMap({case (node: Node, outgoing: List[Node]) => outgoing.map((_, node))}).groupBy(_._1).map({case (k, values) => (k, values.map(_._2))}).withDefaultValue(Nil)
+ private[this] val directEdges: Map[Node, List[Node]] = pack.edges.toMap.withDefaultValue(Nil)
+
+ // seed base nodes, to minimize noise - they can't all have parents, else there would only be cycles
+ seedNodes ++= pack.nodes.filter(directEdges(_).isEmpty)
+
+ while (!seedNodes.isEmpty) {
+ var newSeedNodes = Set[Node]()
+ for (node <- seedNodes) {
+ val depth = 1 + (-1 :: directEdges(node).map(_nodeDepth.getOrElse(_, -1))).max
+ if (depth != _nodeDepth.getOrElse(node, -1)) {
+ _nodeDepth += (node -> depth)
+ newSeedNodes ++= invertedEdges(node)
+ if (depth > _maxDepth) _maxDepth = depth
+ }
+ }
+ seedNodes = newSeedNodes
+ }
+
+ val maxDepth = _maxDepth
+ def nodeDepth(node: Node) = _nodeDepth.getOrElse(node, -1)
+}
\ No newline at end of file
diff --git a/src/compiler/scala/tools/nsc/doc/model/diagram/DiagramDirectiveParser.scala b/src/compiler/scala/tools/nsc/doc/model/diagram/DiagramDirectiveParser.scala
new file mode 100644
index 0000000..cd60865
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/doc/model/diagram/DiagramDirectiveParser.scala
@@ -0,0 +1,261 @@
+package scala.tools.nsc.doc
+package model
+package diagram
+
+import model._
+import java.util.regex.{Pattern, Matcher}
+import scala.util.matching.Regex
+
+// statistics
+import html.page.diagram.DiagramStats
+
+/**
+ * This trait takes care of parsing @{inheritance, content}Diagram annotations
+ *
+ * @author Damien Obrist
+ * @author Vlad Ureche
+ */
+trait DiagramDirectiveParser {
+ this: ModelFactory with DiagramFactory with CommentFactory with TreeFactory =>
+
+ import this.global.definitions.AnyRefClass
+
+ ///// DIAGRAM FILTERS //////////////////////////////////////////////////////////////////////////////////////////////
+
+ /**
+ * The DiagramFilter trait directs the diagram engine about the way the diagram should be displayed
+ *
+ * Vlad: There's an explanation I owe to people using diagrams and not finding a way to hide a specific class from
+ * all diagrams at once. So why did I choose to allow you to only control the diagrams at class level? So, the
+ * reason is you would break the separate scaladoc compilation:
+ * If you have an "@diagram hideMyClass" annotation in class A and you run scaladoc on it along with its subclass B
+ * A will not appear in B's diagram. But if you scaladoc only on B, A's comment will not be parsed and the
+ * instructions to hide class A from all diagrams will not be available. Thus I prefer to force you to control the
+ * diagrams of each class locally. The problem does not appear with scalac, as scalac stores all its necessary
+ * information (like scala signatures) serialized in the .class file. But we couldn't store doc comments in the class
+ * file, could we? (Turns out we could, but that's another story)
+ *
+ * Any flaming for this decision should go to scala-internals at googlegroups.com
+ */
+ trait DiagramFilter {
+ /** A flag to hide the diagram completely */
+ def hideDiagram: Boolean
+ /** Hide incoming implicit conversions (for type hierarchy diagrams) */
+ def hideIncomingImplicits: Boolean
+ /** Hide outgoing implicit conversions (for type hierarchy diagrams) */
+ def hideOutgoingImplicits: Boolean
+ /** Hide superclasses (for type hierarchy diagrams) */
+ def hideSuperclasses: Boolean
+ /** Hide subclasses (for type hierarchy diagrams) */
+ def hideSubclasses: Boolean
+ /** Show related classes from other objects/traits/packages (for content diagrams) */
+ def hideInheritedNodes: Boolean
+ /** Hide a node from the diagram */
+ def hideNode(clazz: Node): Boolean
+ /** Hide an edge from the diagram */
+ def hideEdge(clazz1: Node, clazz2: Node): Boolean
+ }
+
+ /** Main entry point into this trait: generate the filter for inheritance diagrams */
+ def makeInheritanceDiagramFilter(template: DocTemplateImpl): DiagramFilter = {
+
+ val defaultFilter =
+ if (template.isClass || template.isTrait || template.sym == AnyRefClass)
+ FullDiagram
+ else
+ NoDiagramAtAll
+
+ if (template.comment.isDefined)
+ makeDiagramFilter(template, template.comment.get.inheritDiagram, defaultFilter, true)
+ else
+ defaultFilter
+ }
+
+ /** Main entry point into this trait: generate the filter for content diagrams */
+ def makeContentDiagramFilter(template: DocTemplateImpl): DiagramFilter = {
+ val defaultFilter = if (template.isPackage || template.isObject) FullDiagram else NoDiagramAtAll
+ if (template.comment.isDefined)
+ makeDiagramFilter(template, template.comment.get.contentDiagram, defaultFilter, false)
+ else
+ defaultFilter
+ }
+
+ protected var tFilter = 0l
+ protected var tModel = 0l
+
+ /** Show the entire diagram, no filtering */
+ case object FullDiagram extends DiagramFilter {
+ val hideDiagram: Boolean = false
+ val hideIncomingImplicits: Boolean = false
+ val hideOutgoingImplicits: Boolean = false
+ val hideSuperclasses: Boolean = false
+ val hideSubclasses: Boolean = false
+ val hideInheritedNodes: Boolean = false
+ def hideNode(clazz: Node): Boolean = false
+ def hideEdge(clazz1: Node, clazz2: Node): Boolean = false
+ }
+
+ /** Hide the diagram completely, no need for special filtering */
+ case object NoDiagramAtAll extends DiagramFilter {
+ val hideDiagram: Boolean = true
+ val hideIncomingImplicits: Boolean = true
+ val hideOutgoingImplicits: Boolean = true
+ val hideSuperclasses: Boolean = true
+ val hideSubclasses: Boolean = true
+ val hideInheritedNodes: Boolean = true
+ def hideNode(clazz: Node): Boolean = true
+ def hideEdge(clazz1: Node, clazz2: Node): Boolean = true
+ }
+
+ /** The AnnotationDiagramFilter trait directs the diagram engine according to an annotation
+ * TODO: Should document the annotation, for now see parseDiagramAnnotation in ModelFactory.scala */
+ case class AnnotationDiagramFilter(hideDiagram: Boolean,
+ hideIncomingImplicits: Boolean,
+ hideOutgoingImplicits: Boolean,
+ hideSuperclasses: Boolean,
+ hideSubclasses: Boolean,
+ hideInheritedNodes: Boolean,
+ hideNodesFilter: List[Pattern],
+ hideEdgesFilter: List[(Pattern, Pattern)]) extends DiagramFilter {
+
+ private[this] def getName(n: Node): String =
+ if (n.tpl.isDefined)
+ n.tpl.get.qualifiedName
+ else
+ n.name
+
+ def hideNode(clazz: Node): Boolean = {
+ val qualifiedName = getName(clazz)
+ for (hideFilter <- hideNodesFilter)
+ if (hideFilter.matcher(qualifiedName).matches) {
+ // println(hideFilter + ".matcher(" + qualifiedName + ").matches = " + hideFilter.matcher(qualifiedName).matches)
+ return true
+ }
+ false
+ }
+
+ def hideEdge(clazz1: Node, clazz2: Node): Boolean = {
+ val clazz1Name = getName(clazz1)
+ val clazz2Name = getName(clazz2)
+ for ((clazz1Filter, clazz2Filter) <- hideEdgesFilter) {
+ if (clazz1Filter.matcher(clazz1Name).matches &&
+ clazz2Filter.matcher(clazz2Name).matches) {
+ // println(clazz1Filter + ".matcher(" + clazz1Name + ").matches = " + clazz1Filter.matcher(clazz1Name).matches)
+ // println(clazz2Filter + ".matcher(" + clazz2Name + ").matches = " + clazz2Filter.matcher(clazz2Name).matches)
+ return true
+ }
+ }
+ false
+ }
+ }
+
+ // TODO: This could certainly be improved -- right now the only regex is *, but there's no way to match a single identifier
+ private val NodeSpecRegex = "\\\"[A-Za-z\\*][A-Za-z\\.\\*]*\\\""
+ private val NodeSpecPattern = Pattern.compile(NodeSpecRegex)
+ private val EdgeSpecRegex = "\\(" + NodeSpecRegex + "\\s*\\->\\s*" + NodeSpecRegex + "\\)"
+ private val EdgeSpecPattern = Pattern.compile(NodeSpecRegex)
+ // And the composed regexes:
+ private val HideNodesRegex = new Regex("^hideNodes(\\s*" + NodeSpecRegex + ")+$")
+ private val HideEdgesRegex = new Regex("^hideEdges(\\s*" + EdgeSpecRegex + ")+$")
+
+ private def makeDiagramFilter(template: DocTemplateImpl,
+ directives: List[String],
+ defaultFilter: DiagramFilter,
+ isInheritanceDiagram: Boolean): DiagramFilter = directives match {
+
+ // if there are no specific diagram directives, return the default filter (either FullDiagram or NoDiagramAtAll)
+ case Nil =>
+ defaultFilter
+
+ // compute the exact filters. By including the annotation, the diagram is autmatically added
+ case _ =>
+ tFilter -= System.currentTimeMillis
+ var hideDiagram0: Boolean = false
+ var hideIncomingImplicits0: Boolean = false
+ var hideOutgoingImplicits0: Boolean = false
+ var hideSuperclasses0: Boolean = false
+ var hideSubclasses0: Boolean = false
+ var hideInheritedNodes0: Boolean = false
+ var hideNodesFilter0: List[Pattern] = Nil
+ var hideEdgesFilter0: List[(Pattern, Pattern)] = Nil
+
+ def warning(message: String) = {
+ // we need the position from the package object (well, ideally its comment, but yeah ...)
+ val sym = if (template.sym.isPackage) template.sym.info.member(global.nme.PACKAGE) else template.sym
+ assert((sym != global.NoSymbol) || (sym == global.definitions.RootPackage))
+ global.reporter.warning(sym.pos, message)
+ }
+
+ def preparePattern(className: String) =
+ "^" + className.stripPrefix("\"").stripSuffix("\"").replaceAll("\\.", "\\\\.").replaceAll("\\*", ".*") + "$"
+
+ // separate entries:
+ val entries = directives.foldRight("")(_ + " " + _).split(",").map(_.trim)
+ for (entry <- entries)
+ entry match {
+ case "hideDiagram" =>
+ hideDiagram0 = true
+ case "hideIncomingImplicits" if isInheritanceDiagram =>
+ hideIncomingImplicits0 = true
+ case "hideOutgoingImplicits" if isInheritanceDiagram =>
+ hideOutgoingImplicits0 = true
+ case "hideSuperclasses" if isInheritanceDiagram =>
+ hideSuperclasses0 = true
+ case "hideSubclasses" if isInheritanceDiagram =>
+ hideSubclasses0 = true
+ case "hideInheritedNodes" if !isInheritanceDiagram =>
+ hideInheritedNodes0 = true
+ case HideNodesRegex(last) =>
+ val matcher = NodeSpecPattern.matcher(entry)
+ while (matcher.find()) {
+ val classPattern = Pattern.compile(preparePattern(matcher.group()))
+ hideNodesFilter0 ::= classPattern
+ }
+ case HideEdgesRegex(last) =>
+ val matcher = NodeSpecPattern.matcher(entry)
+ while (matcher.find()) {
+ val class1Pattern = Pattern.compile(preparePattern(matcher.group()))
+ assert(matcher.find()) // it's got to be there, just matched it!
+ val class2Pattern = Pattern.compile(preparePattern(matcher.group()))
+ hideEdgesFilter0 ::= ((class1Pattern, class2Pattern))
+ }
+ case "" =>
+ // don't need to do anything about it
+ case _ =>
+ warning("Could not understand diagram annotation in " + template.kind + " " + template.qualifiedName +
+ ": unmatched entry \"" + entry + "\".\n" +
+ " This could be because:\n" +
+ " - you forgot to separate entries by commas\n" +
+ " - you used a tag that is not allowed in the current context (like @contentDiagram hideSuperclasses)\n"+
+ " - you did not use one of the allowed tags (see docs.scala-lang.org for scaladoc annotations)")
+ }
+ val result =
+ if (hideDiagram0)
+ NoDiagramAtAll
+ else if ((hideNodesFilter0.isEmpty) &&
+ (hideEdgesFilter0.isEmpty) &&
+ (hideIncomingImplicits0 == false) &&
+ (hideOutgoingImplicits0 == false) &&
+ (hideSuperclasses0 == false) &&
+ (hideSubclasses0 == false) &&
+ (hideInheritedNodes0 == false) &&
+ (hideDiagram0 == false))
+ FullDiagram
+ else
+ AnnotationDiagramFilter(
+ hideDiagram = hideDiagram0,
+ hideIncomingImplicits = hideIncomingImplicits0,
+ hideOutgoingImplicits = hideOutgoingImplicits0,
+ hideSuperclasses = hideSuperclasses0,
+ hideSubclasses = hideSubclasses0,
+ hideInheritedNodes = hideInheritedNodes0,
+ hideNodesFilter = hideNodesFilter0,
+ hideEdgesFilter = hideEdgesFilter0)
+
+ if (settings.docDiagramsDebug.value && result != NoDiagramAtAll && result != FullDiagram)
+ settings.printMsg(template.kind + " " + template.qualifiedName + " filter: " + result)
+ tFilter += System.currentTimeMillis
+
+ result
+ }
+}
diff --git a/src/compiler/scala/tools/nsc/doc/model/diagram/DiagramFactory.scala b/src/compiler/scala/tools/nsc/doc/model/diagram/DiagramFactory.scala
new file mode 100644
index 0000000..cb54a73
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/doc/model/diagram/DiagramFactory.scala
@@ -0,0 +1,271 @@
+package scala.tools.nsc.doc
+package model
+package diagram
+
+import model._
+import scala.collection.mutable
+
+// statistics
+import html.page.diagram.DiagramStats
+
+import scala.collection.immutable.SortedMap
+
+/**
+ * This trait takes care of generating the diagram for classes and packages
+ *
+ * @author Damien Obrist
+ * @author Vlad Ureche
+ */
+trait DiagramFactory extends DiagramDirectiveParser {
+ this: ModelFactory with ModelFactoryTypeSupport with DiagramFactory with CommentFactory with TreeFactory =>
+
+ import this.global.definitions._
+ import this.global._
+
+ // the following can used for hardcoding different relations into the diagram, for bootstrapping purposes
+ def aggregationNode(text: String) =
+ NormalNode(new TypeEntity { val name = text; val refEntity = SortedMap[Int, (base.LinkTo, Int)]() }, None)()
+
+ /** Create the inheritance diagram for this template */
+ def makeInheritanceDiagram(tpl: DocTemplateImpl): Option[Diagram] = {
+
+ tFilter = 0
+ tModel = -System.currentTimeMillis
+
+ // the diagram filter
+ val diagramFilter = makeInheritanceDiagramFilter(tpl)
+
+ def implicitTooltip(from: DocTemplateEntity, to: TemplateEntity, conv: ImplicitConversion) =
+ Some(from.qualifiedName + " can be implicitly converted to " + conv.targetType + " by the implicit method "
+ + conv.conversionShortName + " in " + conv.convertorOwner.kind + " " + conv.convertorOwner.qualifiedName)
+
+ val result =
+ if (diagramFilter == NoDiagramAtAll)
+ None
+ else {
+ // the main node
+ val thisNode = ThisNode(tpl.resultType, Some(tpl))(Some(tpl.qualifiedName + " (this " + tpl.kind + ")"))
+
+ // superclasses
+ var superclasses: List[Node] =
+ tpl.parentTypes.collect {
+ case p: (TemplateEntity, TypeEntity) if !classExcluded(p._1) => NormalNode(p._2, Some(p._1))()
+ }.reverse
+
+ // incoming implcit conversions
+ lazy val incomingImplicitNodes = tpl.incomingImplicitlyConvertedClasses.map {
+ case (incomingTpl, conv) =>
+ ImplicitNode(makeType(incomingTpl.sym.tpe, tpl), Some(incomingTpl))(implicitTooltip(from=incomingTpl, to=tpl, conv=conv))
+ }
+
+ // subclasses
+ var subclasses: List[Node] =
+ tpl.directSubClasses.collect {
+ case d: TemplateImpl if !classExcluded(d) => NormalNode(makeType(d.sym.tpe, tpl), Some(d))()
+ }.sortBy(_.tpl.get.name)(implicitly[Ordering[String]].reverse)
+
+ // outgoing implicit coversions
+ lazy val outgoingImplicitNodes = tpl.outgoingImplicitlyConvertedClasses.map {
+ case (outgoingTpl, outgoingType, conv) =>
+ ImplicitNode(outgoingType, Some(outgoingTpl))(implicitTooltip(from=tpl, to=tpl, conv=conv))
+ }
+
+ // TODO: Everyone should be able to use the @{inherit,content}Diagram annotation to change the diagrams.
+ // Currently, it's possible to leave nodes and edges out, but there's no way to create new nodes and edges
+ // The implementation would need to add the annotations and the logic to select nodes (or create new ones)
+ // and add edges to the diagram -- I bet it wouldn't take too long for someone to do it (one or two days
+ // at most) and it would be a great add to the diagrams.
+ if (tpl.sym == AnyRefClass)
+ subclasses = List(aggregationNode("All user-defined classes and traits"))
+
+ val filteredSuperclasses = if (diagramFilter.hideSuperclasses) Nil else superclasses
+ val filteredIncomingImplicits = if (diagramFilter.hideIncomingImplicits) Nil else incomingImplicitNodes
+ val filteredSubclasses = if (diagramFilter.hideSubclasses) Nil else subclasses
+ val filteredImplicitOutgoingNodes = if (diagramFilter.hideOutgoingImplicits) Nil else outgoingImplicitNodes
+
+ // final diagram filter
+ filterDiagram(InheritanceDiagram(thisNode, filteredSuperclasses.reverse, filteredSubclasses.reverse, filteredIncomingImplicits, filteredImplicitOutgoingNodes), diagramFilter)
+ }
+
+ tModel += System.currentTimeMillis
+ DiagramStats.addFilterTime(tFilter)
+ DiagramStats.addModelTime(tModel-tFilter)
+
+ result
+ }
+
+ /** Create the content diagram for this template */
+ def makeContentDiagram(pack: DocTemplateImpl): Option[Diagram] = {
+
+ tFilter = 0
+ tModel = -System.currentTimeMillis
+
+ // the diagram filter
+ val diagramFilter = makeContentDiagramFilter(pack)
+
+ val result =
+ if (diagramFilter == NoDiagramAtAll)
+ None
+ else {
+ var mapNodes = Map[TemplateEntity, Node]()
+ var nodesShown = Set[TemplateEntity]()
+ var edgesAll = List[(TemplateEntity, List[TemplateEntity])]()
+
+ // classes is the entire set of classes and traits in the package, they are the superset of nodes in the diagram
+ // we collect classes, traits and objects without a companion, which are usually used as values(e.g. scala.None)
+ val nodesAll = pack.members collect {
+ case d: TemplateEntity if ((!diagramFilter.hideInheritedNodes) || (d.inTemplate == pack)) => d
+ }
+
+ def listSuperClasses(member: MemberTemplateImpl) = {
+ // TODO: Everyone should be able to use the @{inherit,content}Diagram annotation to add nodes to diagrams.
+ (pack.sym, member.sym) match {
+ case (ScalaPackage, NullClass) =>
+ List(makeTemplate(AnyRefClass))
+ case (ScalaPackage, NothingClass) =>
+ (List(NullClass) ::: ScalaValueClasses) map { makeTemplate(_) }
+ case _ =>
+ member.parentTypes map {
+ case (template, tpe) => template
+ } filter {
+ nodesAll.contains(_)
+ }
+ }
+ }
+
+ // for each node, add its subclasses
+ for (node <- nodesAll if !classExcluded(node)) {
+ node match {
+ case dnode: MemberTemplateImpl =>
+ val superClasses = listSuperClasses(dnode)
+
+ if (!superClasses.isEmpty) {
+ nodesShown += dnode
+ nodesShown ++= superClasses
+ }
+ edgesAll ::= dnode -> superClasses
+ case _ =>
+ }
+
+ mapNodes += node -> (
+ if (node.inTemplate == pack && (node.isDocTemplate || node.isAbstractType || node.isAliasType))
+ NormalNode(node.resultType, Some(node))()
+ else
+ OutsideNode(node.resultType, Some(node))()
+ )
+ }
+
+ if (nodesShown.isEmpty)
+ None
+ else {
+ val nodes = nodesAll.filter(nodesShown.contains(_)).flatMap(mapNodes.get(_))
+ val edges = edgesAll.map {
+ case (entity, superClasses) => {
+ (mapNodes(entity), superClasses flatMap { mapNodes.get(_) })
+ }
+ } filterNot {
+ case (node, superClassNodes) => superClassNodes.isEmpty
+ }
+
+ val diagram =
+ // TODO: Everyone should be able to use the @{inherit,content}Diagram annotation to change the diagrams.
+ if (pack.sym == ScalaPackage) {
+ // Tried it, but it doesn't look good:
+ // var anyRefSubtypes: List[Node] = List(mapNodes(makeTemplate(AnyRefClass)))
+ // var dirty = true
+ // do {
+ // val length = anyRefSubtypes.length
+ // anyRefSubtypes :::= edges.collect { case p: (Node, List[Node]) if p._2.exists(anyRefSubtypes.contains(_)) => p._1 }
+ // anyRefSubtypes = anyRefSubtypes.distinct
+ // dirty = (anyRefSubtypes.length != length)
+ // } while (dirty)
+ // println(anyRefSubtypes)
+ val anyRefSubtypes = Nil
+ val allAnyRefTypes = aggregationNode("All AnyRef subtypes")
+ val nullTemplate = makeTemplate(NullClass)
+ if (nullTemplate.isDocTemplate)
+ ContentDiagram(allAnyRefTypes::nodes, (mapNodes(nullTemplate), allAnyRefTypes::anyRefSubtypes)::edges.filterNot(_._1.tpl == Some(nullTemplate)))
+ else
+ ContentDiagram(nodes, edges)
+ } else
+ ContentDiagram(nodes, edges)
+
+ filterDiagram(diagram, diagramFilter)
+ }
+ }
+
+ tModel += System.currentTimeMillis
+ DiagramStats.addFilterTime(tFilter)
+ DiagramStats.addModelTime(tModel-tFilter)
+
+ result
+ }
+
+ /** Diagram filtering logic */
+ private def filterDiagram(diagram: Diagram, diagramFilter: DiagramFilter): Option[Diagram] = {
+ tFilter -= System.currentTimeMillis
+
+ val result =
+ if (diagramFilter == FullDiagram)
+ Some(diagram)
+ else if (diagramFilter == NoDiagramAtAll)
+ None
+ else {
+ // Final diagram, with the filtered nodes and edges
+ diagram match {
+ case InheritanceDiagram(thisNode, _, _, _, _) if diagramFilter.hideNode(thisNode) =>
+ None
+
+ case InheritanceDiagram(thisNode, superClasses, subClasses, incomingImplicits, outgoingImplicits) =>
+
+ def hideIncoming(node: Node): Boolean =
+ diagramFilter.hideNode(node) || diagramFilter.hideEdge(node, thisNode)
+
+ def hideOutgoing(node: Node): Boolean =
+ diagramFilter.hideNode(node) || diagramFilter.hideEdge(thisNode, node)
+
+ // println(thisNode)
+ // println(superClasses.map(cl => "super: " + cl + " " + hideOutgoing(cl)).mkString("\n"))
+ // println(subClasses.map(cl => "sub: " + cl + " " + hideIncoming(cl)).mkString("\n"))
+ Some(InheritanceDiagram(thisNode,
+ superClasses.filterNot(hideOutgoing(_)),
+ subClasses.filterNot(hideIncoming(_)),
+ incomingImplicits.filterNot(hideIncoming(_)),
+ outgoingImplicits.filterNot(hideOutgoing(_))))
+
+ case ContentDiagram(nodes0, edges0) =>
+ // Filter out all edges that:
+ // (1) are sources of hidden classes
+ // (2) are manually hidden by the user
+ // (3) are destinations of hidden classes
+ val edges: List[(Node, List[Node])] =
+ diagram.edges.flatMap({
+ case (source, dests) if !diagramFilter.hideNode(source) =>
+ val dests2 = dests.collect({ case dest if (!(diagramFilter.hideEdge(source, dest) || diagramFilter.hideNode(dest))) => dest })
+ if (dests2 != Nil)
+ List((source, dests2))
+ else
+ Nil
+ case _ => Nil
+ })
+
+ // Only show the the non-isolated nodes
+ // TODO: Decide if we really want to hide package members, I'm not sure that's a good idea (!!!)
+ // TODO: Does .distinct cause any stability issues?
+ val sourceNodes = edges.map(_._1)
+ val sinkNodes = edges.map(_._2).flatten
+ val nodes = (sourceNodes ::: sinkNodes).distinct
+ Some(ContentDiagram(nodes, edges))
+ }
+ }
+
+ tFilter += System.currentTimeMillis
+
+ // eliminate all empty diagrams
+ if (result.isDefined && result.get.edges.forall(_._2.isEmpty))
+ None
+ else
+ result
+ }
+
+}
diff --git a/src/compiler/scala/tools/nsc/interactive/BuildManager.scala b/src/compiler/scala/tools/nsc/interactive/BuildManager.scala
index 6771c5d..3e7ac57 100644
--- a/src/compiler/scala/tools/nsc/interactive/BuildManager.scala
+++ b/src/compiler/scala/tools/nsc/interactive/BuildManager.scala
@@ -1,5 +1,5 @@
/* NSC -- new Scala compiler
- * Copyright 2009-2011 Scxala Solutions and LAMP/EPFL
+ * Copyright 2009-2013 Typesafe/Scala Solutions and LAMP/EPFL
* @author Iulian Dragos
* @author Hubert Plocinicak
*/
@@ -9,10 +9,11 @@ package interactive
import scala.collection._
import scala.tools.nsc.reporters.{Reporter, ConsoleReporter}
-import util.FakePos
+import scala.reflect.internal.util.FakePos
import dependencies._
import io.AbstractFile
+import scala.language.implicitConversions
trait BuildManager {
@@ -33,7 +34,7 @@ trait BuildManager {
/** Load saved dependency information. */
def loadFrom(file: AbstractFile, toFile: String => AbstractFile) : Boolean
- /** Save dependency information to `file'. */
+ /** Save dependency information to `file`. */
def saveTo(file: AbstractFile, fromFile: AbstractFile => String)
def compiler: scala.tools.nsc.Global
diff --git a/src/compiler/scala/tools/nsc/interactive/CompilerControl.scala b/src/compiler/scala/tools/nsc/interactive/CompilerControl.scala
index c1a47c5..8d12581 100644
--- a/src/compiler/scala/tools/nsc/interactive/CompilerControl.scala
+++ b/src/compiler/scala/tools/nsc/interactive/CompilerControl.scala
@@ -1,5 +1,5 @@
/* NSC -- new Scala compiler
- * Copyright 2009-2011 Scala Solutions and LAMP/EPFL
+ * Copyright 2009-2013 Typesafe/Scala Solutions and LAMP/EPFL
* @author Martin Odersky
*/
package scala.tools.nsc
@@ -7,11 +7,13 @@ package interactive
import scala.util.control.ControlThrowable
import scala.tools.nsc.io.AbstractFile
-import scala.tools.nsc.util.{SourceFile, Position, WorkScheduler}
import scala.tools.nsc.symtab._
import scala.tools.nsc.ast._
import scala.tools.nsc.util.FailedInterrupt
import scala.tools.nsc.util.EmptyAction
+import scala.tools.nsc.util.WorkScheduler
+import scala.reflect.internal.util.{SourceFile, Position}
+import scala.tools.nsc.util.InterruptReq
/** Interface of interactive compiler to a client such as an IDE
* The model the presentation compiler consists of the following parts:
@@ -68,17 +70,17 @@ trait CompilerControl { self: Global =>
* if it does not yet exist create a new one atomically
* Note: We want to get roid of this operation as it messes compiler invariants.
*/
- @deprecated("use getUnitOf(s) or onUnitOf(s) instead", "2.9.0")
+ @deprecated("use getUnitOf(s) or onUnitOf(s) instead", "2.10.0")
def unitOf(s: SourceFile): RichCompilationUnit = getOrCreateUnitOf(s)
/** The compilation unit corresponding to a position */
- @deprecated("use getUnitOf(pos.source) or onUnitOf(pos.source) instead", "2.9.0")
+ @deprecated("use getUnitOf(pos.source) or onUnitOf(pos.source) instead", "2.10.0")
def unitOf(pos: Position): RichCompilationUnit = getOrCreateUnitOf(pos.source)
/** Removes the CompilationUnit corresponding to the given SourceFile
* from consideration for recompilation.
*/
- def removeUnitOf(s: SourceFile): Option[RichCompilationUnit] = { toBeRemoved += s.file; unitOfFile get s.file }
+ def removeUnitOf(s: SourceFile): Option[RichCompilationUnit] = { toBeRemoved += s.file; unitOfFile get s.file }
/** Returns the top level classes and objects that were deleted
* in the editor since last time recentlyDeleted() was called.
@@ -130,14 +132,19 @@ trait CompilerControl { self: Global =>
}
/** Sets sync var `response` to the smallest fully attributed tree that encloses position `pos`.
- * Note: Unlike for most other ask... operations, the source file belonging to `pos` needs not be be loaded.
+ * Note: Unlike for most other ask... operations, the source file belonging to `pos` needs not be loaded.
*/
def askTypeAt(pos: Position, response: Response[Tree]) =
postWorkItem(new AskTypeAtItem(pos, response))
/** Sets sync var `response` to the fully attributed & typechecked tree contained in `source`.
* @pre `source` needs to be loaded.
+ *
+ * @note Deprecated because of race conditions in the typechecker when the background compiler
+ * is interrupted while typing the same `source`.
+ * @see SI-6578
*/
+ @deprecated("Use `askLoadedTyped` instead to avoid race conditions in the typechecker", "2.10.1")
def askType(source: SourceFile, forceReload: Boolean, response: Response[Tree]) =
postWorkItem(new AskTypeItem(source, forceReload, response))
@@ -155,14 +162,33 @@ trait CompilerControl { self: Global =>
def askLinkPos(sym: Symbol, source: SourceFile, response: Response[Position]) =
postWorkItem(new AskLinkPosItem(sym, source, response))
- /** Sets sync var `response' to list of members that are visible
+ /** Sets sync var `response` to doc comment information for a given symbol.
+ *
+ * @param sym The symbol whose doc comment should be retrieved (might come from a classfile)
+ * @param source The source file that's supposed to contain the definition
+ * @param site The symbol where 'sym' is observed
+ * @param fragments All symbols that can contribute to the generated documentation
+ * together with their source files.
+ * @param response A response that will be set to the following:
+ * If `source` contains a definition of a given symbol that has a doc comment,
+ * the (expanded, raw, position) triplet for a comment, otherwise ("", "", NoPosition).
+ * Note: This operation does not automatically load sources that are not yet loaded.
+ */
+ def askDocComment(sym: Symbol, source: SourceFile, site: Symbol, fragments: List[(Symbol,SourceFile)], response: Response[(String, String, Position)]): Unit =
+ postWorkItem(new AskDocCommentItem(sym, source, site, fragments, response))
+
+ @deprecated("Use method that accepts fragments", "2.10.2")
+ def askDocComment(sym: Symbol, site: Symbol, source: SourceFile, response: Response[(String, String, Position)]): Unit =
+ askDocComment(sym, source, site, (sym,source)::Nil, response)
+
+ /** Sets sync var `response` to list of members that are visible
* as members of the tree enclosing `pos`, possibly reachable by an implicit.
* @pre source is loaded
*/
def askTypeCompletion(pos: Position, response: Response[List[Member]]) =
postWorkItem(new AskTypeCompletionItem(pos, response))
- /** Sets sync var `response' to list of members that are visible
+ /** Sets sync var `response` to list of members that are visible
* as members of the scope enclosing `pos`.
* @pre source is loaded
*/
@@ -176,15 +202,20 @@ trait CompilerControl { self: Global =>
postWorkItem(new AskToDoFirstItem(source))
/** If source is not yet loaded, loads it, and starts a new run, otherwise
- * continues with current pass.
- * Waits until source is fully type checked and returns body in response.
- * @param source The source file that needs to be fully typed.
- * @param response The response, which is set to the fully attributed tree of `source`.
+ * continues with current pass.
+ * Waits until source is fully type checked and returns body in response.
+ * @param source The source file that needs to be fully typed.
+ * @param keepLoaded Whether to keep that file in the PC if it was not loaded before. If
+ the file is already loaded, this flag is ignored.
+ * @param response The response, which is set to the fully attributed tree of `source`.
* If the unit corresponding to `source` has been removed in the meantime
* the a NoSuchUnitError is raised in the response.
*/
- def askLoadedTyped(source: SourceFile, response: Response[Tree]) =
- postWorkItem(new AskLoadedTypedItem(source, response))
+ def askLoadedTyped(source:SourceFile, keepLoaded: Boolean, response: Response[Tree]): Unit =
+ postWorkItem(new AskLoadedTypedItem(source, keepLoaded, response))
+
+ final def askLoadedTyped(source: SourceFile, response: Response[Tree]): Unit =
+ askLoadedTyped(source, false, response)
/** If source if not yet loaded, get an outline view with askParseEntered.
* If source is loaded, wait for it to be typechecked.
@@ -193,7 +224,7 @@ trait CompilerControl { self: Global =>
*/
def askStructure(keepSrcLoaded: Boolean)(source: SourceFile, response: Response[Tree]) = {
getUnit(source) match {
- case Some(_) => askLoadedTyped(source, response)
+ case Some(_) => askLoadedTyped(source, keepSrcLoaded, response)
case None => askParsedEntered(source, keepSrcLoaded, response)
}
}
@@ -208,6 +239,22 @@ trait CompilerControl { self: Global =>
def askParsedEntered(source: SourceFile, keepLoaded: Boolean, response: Response[Tree]) =
postWorkItem(new AskParsedEnteredItem(source, keepLoaded, response))
+ /** Set sync var `response` to a pair consisting of
+ * - the fully qualified name of the first top-level object definition in the file.
+ * or "" if there are no object definitions.
+ * - the text of the instrumented program which, when run,
+ * prints its output and all defined values in a comment column.
+ *
+ * @param source The source file to be analyzed
+ * @param keepLoaded If set to `true`, source file will be kept as a loaded unit afterwards.
+ * If keepLoaded is `false` the operation is run at low priority, only after
+ * everything is brought up to date in a regular type checker run.
+ * @param response The response.
+ */
+ @deprecated("SI-6458: Instrumentation logic will be moved out of the compiler.","2.10.0")
+ def askInstrumented(source: SourceFile, line: Int, response: Response[(String, Array[Char])]) =
+ postWorkItem(new AskInstrumentedItem(source, line, response))
+
/** Cancels current compiler run and start a fresh one where everything will be re-typechecked
* (but not re-loaded).
*/
@@ -216,26 +263,42 @@ trait CompilerControl { self: Global =>
/** Tells the compile server to shutdown, and not to restart again */
def askShutdown() = scheduler raise ShutdownReq
- @deprecated("use parseTree(source) instead", "2.9.0")
+ @deprecated("use parseTree(source) instead", "2.10.0") // deleted 2nd parameter, as this has to run on 2.8 also.
def askParse(source: SourceFile, response: Response[Tree]) = respond(response) {
parseTree(source)
}
/** Returns parse tree for source `source`. No symbols are entered. Syntax errors are reported.
- * Can be called asynchronously from presentation compiler.
+ *
+ * This method is thread-safe and as such can safely run outside of the presentation
+ * compiler thread.
*/
- def parseTree(source: SourceFile): Tree = ask { () =>
- getUnit(source) match {
- case Some(unit) if unit.status >= JustParsed =>
- unit.body
- case _ =>
- new UnitParser(new CompilationUnit(source)).parse()
- }
+ def parseTree(source: SourceFile): Tree = {
+ new UnitParser(new CompilationUnit(source)).parse()
}
/** Asks for a computation to be done quickly on the presentation compiler thread */
def ask[A](op: () => A): A = if (self.onCompilerThread) op() else scheduler doQuickly op
+ /** Asks for a computation to be done on presentation compiler thread, returning
+ * a response with the result or an exception
+ */
+ def askForResponse[A](op: () => A): Response[A] = {
+ val r = new Response[A]
+ if (self.onCompilerThread) {
+ try { r set op() }
+ catch { case exc: Throwable => r raise exc }
+ r
+ } else {
+ val ir = scheduler askDoQuickly op
+ ir onComplete {
+ case Left(result) => r set result
+ case Right(exc) => r raise exc
+ }
+ r
+ }
+ }
+
def onCompilerThread = Thread.currentThread == compileRunner
/** Info given for every member found by completion
@@ -337,8 +400,16 @@ trait CompilerControl { self: Global =>
response raise new MissingResponse
}
- case class AskLoadedTypedItem(val source: SourceFile, response: Response[Tree]) extends WorkItem {
- def apply() = self.waitLoadedTyped(source, response, this.onCompilerThread)
+ case class AskDocCommentItem(val sym: Symbol, val source: SourceFile, val site: Symbol, val fragments: List[(Symbol,SourceFile)], response: Response[(String, String, Position)]) extends WorkItem {
+ def apply() = self.getDocComment(sym, source, site, fragments, response)
+ override def toString = "doc comment "+sym+" in "+source+" with fragments:"+fragments.mkString("(", ",", ")")
+
+ def raiseMissing() =
+ response raise new MissingResponse
+ }
+
+ case class AskLoadedTypedItem(val source: SourceFile, keepLoaded: Boolean, response: Response[Tree]) extends WorkItem {
+ def apply() = self.waitLoadedTyped(source, response, keepLoaded, this.onCompilerThread)
override def toString = "wait loaded & typed "+source
def raiseMissing() =
@@ -353,6 +424,15 @@ trait CompilerControl { self: Global =>
response raise new MissingResponse
}
+ @deprecated("SI-6458: Instrumentation logic will be moved out of the compiler.","2.10.0")
+ case class AskInstrumentedItem(val source: SourceFile, line: Int, response: Response[(String, Array[Char])]) extends WorkItem {
+ def apply() = self.getInstrumented(source, line, response)
+ override def toString = "getInstrumented "+source
+
+ def raiseMissing() =
+ response raise new MissingResponse
+ }
+
/** A do-nothing work scheduler that responds immediately with MissingResponse.
*
* Used during compiler shutdown.
@@ -366,10 +446,20 @@ trait CompilerControl { self: Global =>
case _ => println("don't know what to do with this " + action.getClass)
}
}
-
+
override def doQuickly[A](op: () => A): A = {
throw new FailedInterrupt(new Exception("Posted a work item to a compiler that's shutting down"))
}
+
+ override def askDoQuickly[A](op: () => A): InterruptReq { type R = A } = {
+ val ir = new InterruptReq {
+ type R = A
+ val todo = () => throw new MissingResponse
+ }
+ ir.execute()
+ ir
+ }
+
}
}
diff --git a/src/compiler/scala/tools/nsc/interactive/ContextTrees.scala b/src/compiler/scala/tools/nsc/interactive/ContextTrees.scala
index fc48d48..4a61a98 100644
--- a/src/compiler/scala/tools/nsc/interactive/ContextTrees.scala
+++ b/src/compiler/scala/tools/nsc/interactive/ContextTrees.scala
@@ -1,12 +1,12 @@
/* NSC -- new Scala compiler
- * Copyright 2009-2011 Scala Solutions and LAMP/EPFL
+ * Copyright 2009-2013 Typesafe/Scala Solutions and LAMP/EPFL
* @author Martin Odersky
*/
package scala.tools.nsc
package interactive
-import collection.mutable.ArrayBuffer
-import util.Position
+import scala.collection.mutable.ArrayBuffer
+import scala.annotation.tailrec
trait ContextTrees { self: Global =>
@@ -29,44 +29,59 @@ trait ContextTrees { self: Global =>
override def toString = "ContextTree("+pos+", "+children+")"
}
- /** Optionally returns the smallest context that contains given `pos`, or None if none exists.
+ /** Returns the most precise context possible for the given `pos`.
+ *
+ * It looks for the finest ContextTree containing `pos`, and then look inside
+ * this ContextTree for a child ContextTree located immediately before `pos`.
+ * If such a child exists, returns its context, otherwise returns the context of
+ * the parent ContextTree.
+ *
+ * This is required to always return a context which contains the all the imports
+ * declared up to `pos` (see SI-7280 for a test case).
+ *
+ * Can return None if `pos` is before any valid Scala code.
*/
def locateContext(contexts: Contexts, pos: Position): Option[Context] = synchronized {
- def locateNearestContextTree(contexts: Contexts, pos: Position, recent: Array[ContextTree]): Option[ContextTree] = {
- locateContextTree(contexts, pos) match {
- case Some(x) =>
- recent(0) = x
- locateNearestContextTree(x.children, pos, recent)
- case None => recent(0) match {
- case null => None
- case x => Some(x)
- }
+ @tailrec
+ def locateFinestContextTree(context: ContextTree): ContextTree = {
+ if (context.pos includes pos) {
+ locateContextTree(context.children, pos) match {
+ case Some(x) =>
+ locateFinestContextTree(x)
+ case None =>
+ context
+ }
+ } else {
+ context
}
}
- locateNearestContextTree(contexts, pos, new Array[ContextTree](1)) map (_.context)
+ locateContextTree(contexts, pos) map locateFinestContextTree map (_.context)
}
+ /** Returns the ContextTree containing `pos`, or the ContextTree positioned just before `pos`,
+ * or None if `pos` is located before all ContextTrees.
+ */
def locateContextTree(contexts: Contexts, pos: Position): Option[ContextTree] = {
if (contexts.isEmpty) None
else {
- val hi = contexts.length - 1
- if ((contexts(hi).pos properlyPrecedes pos) || (pos properlyPrecedes contexts(0).pos)) None
- else {
- def loop(lo: Int, hi: Int): Option[ContextTree] = {
+ @tailrec
+ def loop(lo: Int, hi: Int, previousSibling: Option[ContextTree]): Option[ContextTree] = {
+ if (pos properlyPrecedes contexts(lo).pos)
+ previousSibling
+ else if (contexts(hi).pos properlyPrecedes pos)
+ Some(contexts(hi))
+ else {
val mid = (lo + hi) / 2
val midpos = contexts(mid).pos
- if ((pos precedes midpos) && (mid < hi))
- loop(lo, mid)
- else if ((midpos precedes pos) && (lo < mid))
- loop(mid, hi)
- else if (midpos includes pos)
+ if (midpos includes pos)
Some(contexts(mid))
- else if (contexts(mid+1).pos includes pos)
- Some(contexts(mid+1))
- else None
+ else if (midpos properlyPrecedes pos)
+ loop(mid + 1, hi, Some(contexts(mid)))
+ else
+ loop(lo, mid, previousSibling)
}
- loop(0, hi)
}
+ loop(0, contexts.length - 1, None)
}
}
diff --git a/src/compiler/scala/tools/nsc/interactive/Global.scala b/src/compiler/scala/tools/nsc/interactive/Global.scala
index 797c4d2..d6fa42b 100644
--- a/src/compiler/scala/tools/nsc/interactive/Global.scala
+++ b/src/compiler/scala/tools/nsc/interactive/Global.scala
@@ -1,36 +1,42 @@
/* NSC -- new Scala compiler
- * Copyright 2009-2011 Scala Solutions and LAMP/EPFL
+ * Copyright 2009-2013 Typesafe/Scala Solutions and LAMP/EPFL
* @author Martin Odersky
*/
package scala.tools.nsc
package interactive
import java.io.{ PrintWriter, StringWriter, FileReader, FileWriter }
-import collection.mutable.{ArrayBuffer, ListBuffer, SynchronizedBuffer, HashMap}
-
import scala.collection.mutable
-import mutable.{LinkedHashMap, SynchronizedMap, HashSet, LinkedHashSet, SynchronizedSet}
+import mutable.{LinkedHashMap, SynchronizedMap, HashSet, SynchronizedSet}
import scala.concurrent.SyncVar
import scala.util.control.ControlThrowable
import scala.tools.nsc.io.{ AbstractFile, LogReplay, Logger, NullLogger, Replayer }
-import scala.tools.nsc.util.{ SourceFile, BatchSourceFile, Position, RangePosition, NoPosition, WorkScheduler, MultiHashMap }
+import scala.tools.nsc.util.{ WorkScheduler, MultiHashMap }
+import scala.reflect.internal.util.{ SourceFile, BatchSourceFile, Position, RangePosition, NoPosition }
import scala.tools.nsc.reporters._
import scala.tools.nsc.symtab._
import scala.tools.nsc.ast._
import scala.tools.nsc.io.Pickler._
import scala.tools.nsc.typechecker.DivergentImplicit
import scala.annotation.tailrec
-import scala.reflect.generic.Flags.{ACCESSOR, PARAMACCESSOR}
+import symtab.Flags.{ACCESSOR, PARAMACCESSOR}
+import scala.annotation.elidable
+import scala.language.implicitConversions
/** The main class of the presentation compiler in an interactive environment such as an IDE
*/
-class Global(settings: Settings, reporter: Reporter, projectName: String = "")
- extends scala.tools.nsc.Global(settings, reporter)
- with CompilerControl
- with RangePositions
- with ContextTrees
- with RichCompilationUnits
- with Picklers {
+class Global(settings: Settings, _reporter: Reporter, projectName: String = "") extends {
+ /* Is the compiler initializing? Early def, so that the field is true during the
+ * execution of the super constructor.
+ */
+ private var initializing = true
+} with scala.tools.nsc.Global(settings, _reporter)
+ with CompilerControl
+ with RangePositions
+ with ContextTrees
+ with RichCompilationUnits
+ with ScratchPadMaker
+ with Picklers {
import definitions._
@@ -204,7 +210,7 @@ class Global(settings: Settings, reporter: Reporter, projectName: String = "")
protected[interactive] var minRunId = 1
- private var interruptsEnabled = true
+ private[interactive] var interruptsEnabled = true
private val NoResponse: Response[_] = new Response[Any]
@@ -218,7 +224,10 @@ class Global(settings: Settings, reporter: Reporter, projectName: String = "")
/** Called from parser, which signals hereby that a method definition has been parsed.
*/
override def signalParseProgress(pos: Position) {
- checkForMoreWork(pos)
+ // We only want to be interruptible when running on the PC thread.
+ if(onCompilerThread) {
+ checkForMoreWork(pos)
+ }
}
/** Called from typechecker, which signals hereby that a node has been completely typechecked.
@@ -229,8 +238,13 @@ class Global(settings: Settings, reporter: Reporter, projectName: String = "")
* @param result The transformed node
*/
override def signalDone(context: Context, old: Tree, result: Tree) {
- if (interruptsEnabled && analyzer.lockedCount == 0) {
- if (context.unit != null &&
+ val canObserveTree = (
+ interruptsEnabled
+ && analyzer.lockedCount == 0
+ && !context.bufferErrors // SI-7558 look away during exploratory typing in "silent mode"
+ )
+ if (canObserveTree) {
+ if (context.unit.exists &&
result.pos.isOpaqueRange &&
(result.pos includes context.unit.targetPos)) {
var located = new TypedLocator(context.unit.targetPos) locateIn result
@@ -240,14 +254,16 @@ class Global(settings: Settings, reporter: Reporter, projectName: String = "")
}
throw new TyperResult(located)
}
- try {
- checkForMoreWork(old.pos)
- } catch {
- case ex: ValidateException => // Ignore, this will have been reported elsewhere
- debugLog("validate exception caught: "+ex)
- case ex: Throwable =>
- log.flush()
- throw ex
+ else {
+ try {
+ checkForMoreWork(old.pos)
+ } catch {
+ case ex: ValidateException => // Ignore, this will have been reported elsewhere
+ debugLog("validate exception caught: "+ex)
+ case ex: Throwable =>
+ log.flush()
+ throw ex
+ }
}
}
}
@@ -352,6 +368,10 @@ class Global(settings: Settings, reporter: Reporter, projectName: String = "")
case item: WorkItem => Some(item.raiseMissing())
case _ => Some(())
}
+
+ // don't forget to service interrupt requests
+ val iqs = scheduler.dequeueAllInterrupts(_.execute())
+
debugLog("ShutdownReq: cleaning work queue (%d items)".format(units.size))
debugLog("Cleanup up responses (%d loadedType pending, %d parsedEntered pending)"
.format(waitLoadedTypeResponses.size, getParsedEnteredResponses.size))
@@ -428,17 +448,32 @@ class Global(settings: Settings, reporter: Reporter, projectName: String = "")
private var threadId = 0
/** The current presentation compiler runner */
- @volatile private[interactive] var compileRunner = newRunnerThread()
+ @volatile private[interactive] var compileRunner: Thread = newRunnerThread()
+
+ /** Check that the currenyly executing thread is the presentation compiler thread.
+ *
+ * Compiler initialization may happen on a different thread (signalled by globalPhase being NoPhase)
+ */
+ @elidable(elidable.WARNING)
+ override def assertCorrectThread() {
+ assert(initializing || onCompilerThread,
+ "Race condition detected: You are running a presentation compiler method outside the PC thread.[phase: %s]".format(globalPhase) +
+ " Please file a ticket with the current stack trace at https://www.assembla.com/spaces/scala-ide/support/tickets")
+ }
/** Create a new presentation compiler runner.
*/
private def newRunnerThread(): Thread = {
threadId += 1
compileRunner = new PresentationCompilerThread(this, projectName)
+ compileRunner.setDaemon(true)
compileRunner.start()
compileRunner
}
+ private def ensureUpToDate(unit: RichCompilationUnit) =
+ if (!unit.isUpToDate && unit.status != JustParsed) reset(unit) // reparse previously typechecked units.
+
/** Compile all loaded source files in the order given by `allSources`.
*/
private[interactive] final def backgroundCompile() {
@@ -450,8 +485,8 @@ class Global(settings: Settings, reporter: Reporter, projectName: String = "")
// ensure all loaded units are parsed
for (s <- allSources; unit <- getUnit(s)) {
- checkForMoreWork(NoPosition)
- if (!unit.isUpToDate && unit.status != JustParsed) reset(unit) // reparse previously typechecked units.
+ // checkForMoreWork(NoPosition) // disabled, as any work done here would be in an inconsistent state
+ ensureUpToDate(unit)
parseAndEnter(unit)
serviceParsedEntered()
}
@@ -479,8 +514,8 @@ class Global(settings: Settings, reporter: Reporter, projectName: String = "")
} catch {
case ex: FreshRunReq => throw ex // propagate a new run request
case ShutdownReq => throw ShutdownReq // propagate a shutdown request
-
- case ex =>
+ case ex: ControlThrowable => throw ex
+ case ex: Throwable =>
println("[%s]: exception during background compile: ".format(unit.source) + ex)
ex.printStackTrace()
for (r <- waitLoadedTypeResponses(unit.source)) {
@@ -530,6 +565,7 @@ class Global(settings: Settings, reporter: Reporter, projectName: String = "")
unit.defined.clear()
unit.synthetics.clear()
unit.toCheck.clear()
+ unit.checkedFeatures = Set()
unit.targetPos = NoPosition
unit.contexts.clear()
unit.problems.clear()
@@ -620,7 +656,7 @@ class Global(settings: Settings, reporter: Reporter, projectName: String = "")
response raise ex
throw ex
- case ex =>
+ case ex: Throwable =>
if (debugIDE) {
println("exception thrown during response: "+ex)
ex.printStackTrace()
@@ -667,7 +703,7 @@ class Global(settings: Settings, reporter: Reporter, projectName: String = "")
}
sources foreach (removeUnitOf(_))
minRunId = currentRunId
- respond(response) ()
+ respond(response)(())
demandNewCompilerRun()
}
@@ -675,8 +711,8 @@ class Global(settings: Settings, reporter: Reporter, projectName: String = "")
* If we do just removeUnit, some problems with default parameters can ensue.
* Calls to this method could probably be replaced by removeUnit once default parameters are handled more robustly.
*/
- private def afterRunRemoveUnitOf(source: SourceFile) {
- toBeRemovedAfterRun += source.file
+ private def afterRunRemoveUnitsOf(sources: List[SourceFile]) {
+ toBeRemovedAfterRun ++= sources map (_.file)
}
/** A fully attributed tree located at position `pos` */
@@ -684,7 +720,7 @@ class Global(settings: Settings, reporter: Reporter, projectName: String = "")
case None =>
reloadSources(List(pos.source))
try typedTreeAt(pos)
- finally afterRunRemoveUnitOf(pos.source)
+ finally afterRunRemoveUnitsOf(List(pos.source))
case Some(unit) =>
informIDE("typedTreeAt " + pos)
parseAndEnter(unit)
@@ -692,7 +728,7 @@ class Global(settings: Settings, reporter: Reporter, projectName: String = "")
debugLog("at pos "+pos+" was found: "+tree.getClass+" "+tree.pos.show)
tree match {
case Import(expr, _) =>
- debugLog("import found"+expr.tpe+" "+expr.tpe.members)
+ debugLog("import found"+expr.tpe+(if (expr.tpe == null) "" else " "+expr.tpe.members))
case _ =>
}
if (stabilizedType(tree) ne null) {
@@ -703,7 +739,7 @@ class Global(settings: Settings, reporter: Reporter, projectName: String = "")
try {
debugLog("starting targeted type check")
typeCheck(unit)
- println("tree not found at "+pos)
+// println("tree not found at "+pos)
EmptyTree
} catch {
case ex: TyperResult => new Locator(pos) locateIn ex.tree
@@ -714,7 +750,7 @@ class Global(settings: Settings, reporter: Reporter, projectName: String = "")
}
/** A fully attributed tree corresponding to the entire compilation unit */
- private def typedTree(source: SourceFile, forceReload: Boolean): Tree = {
+ private[interactive] def typedTree(source: SourceFile, forceReload: Boolean): Tree = {
informIDE("typedTree " + source + " forceReload: " + forceReload)
val unit = getOrCreateUnitOf(source)
if (forceReload) reset(unit)
@@ -734,51 +770,78 @@ class Global(settings: Settings, reporter: Reporter, projectName: String = "")
respond(response)(typedTree(source, forceReload))
}
- /** Implements CompilerControl.askLinkPos */
- private[interactive] def getLinkPos(sym: Symbol, source: SourceFile, response: Response[Position]) {
+ private def withTempUnits[T](sources: List[SourceFile])(f: (SourceFile => RichCompilationUnit) => T): T = {
+ val unitOfSrc: SourceFile => RichCompilationUnit = src => unitOfFile(src.file)
+ sources filterNot (getUnit(_).isDefined) match {
+ case Nil =>
+ f(unitOfSrc)
+ case unknown =>
+ reloadSources(unknown)
+ try {
+ f(unitOfSrc)
+ } finally
+ afterRunRemoveUnitsOf(unknown)
+ }
+ }
- /** Find position of symbol `sym` in unit `unit`. Pre: `unit is loaded. */
- def findLinkPos(unit: RichCompilationUnit): Position = {
- val originalTypeParams = sym.owner.typeParams
- parseAndEnter(unit)
- val pre = adaptToNewRunMap(ThisType(sym.owner))
- val newsym = pre.typeSymbol.info.decl(sym.name) filter { alt =>
- sym.isType || {
- try {
- val tp1 = pre.memberType(alt) onTypeError NoType
- val tp2 = adaptToNewRunMap(sym.tpe) substSym (originalTypeParams, sym.owner.typeParams)
- matchesType(tp1, tp2, false)
- } catch {
- case ex: Throwable =>
- println("error in hyperlinking: " + ex)
- ex.printStackTrace()
+ private def withTempUnit[T](source: SourceFile)(f: RichCompilationUnit => T): T =
+ withTempUnits(List(source)){ srcToUnit =>
+ f(srcToUnit(source))
+ }
+
+ /** Find a 'mirror' of symbol `sym` in unit `unit`. Pre: `unit is loaded. */
+ private def findMirrorSymbol(sym: Symbol, unit: RichCompilationUnit): Symbol = {
+ val originalTypeParams = sym.owner.typeParams
+ ensureUpToDate(unit)
+ parseAndEnter(unit)
+ val pre = adaptToNewRunMap(ThisType(sym.owner))
+ val rawsym = pre.typeSymbol.info.decl(sym.name)
+ val newsym = rawsym filter { alt =>
+ sym.isType || {
+ try {
+ val tp1 = pre.memberType(alt) onTypeError NoType
+ val tp2 = adaptToNewRunMap(sym.tpe) substSym (originalTypeParams, sym.owner.typeParams)
+ matchesType(tp1, tp2, false) || {
+ debugLog(s"findMirrorSymbol matchesType($tp1, $tp2) failed")
+ val tp3 = adaptToNewRunMap(sym.tpe) substSym (originalTypeParams, alt.owner.typeParams)
+ matchesType(tp1, tp3, false) || {
+ debugLog(s"findMirrorSymbol fallback matchesType($tp1, $tp3) failed")
false
+ }
}
}
+ catch {
+ case ex: ControlThrowable => throw ex
+ case ex: Throwable =>
+ debugLog("error in findMirrorSymbol: " + ex)
+ ex.printStackTrace()
+ false
+ }
}
- if (newsym == NoSymbol) {
- debugLog("link not found " + sym + " " + source + " " + pre)
- NoPosition
- } else if (newsym.isOverloaded) {
- settings.uniqid.value = true
- debugLog("link ambiguous " + sym + " " + source + " " + pre + " " + newsym.alternatives)
- NoPosition
- } else {
- debugLog("link found for " + newsym + ": " + newsym.pos)
- newsym.pos
+ }
+ if (newsym == NoSymbol) {
+ if (rawsym.exists && !rawsym.isOverloaded) rawsym
+ else {
+ debugLog("mirror not found " + sym + " " + unit.source + " " + pre)
+ NoSymbol
}
+ } else if (newsym.isOverloaded) {
+ settings.uniqid.value = true
+ debugLog("mirror ambiguous " + sym + " " + unit.source + " " + pre + " " + newsym.alternatives)
+ NoSymbol
+ } else {
+ debugLog("mirror found for " + newsym + ": " + newsym.pos)
+ newsym
}
+ }
+ /** Implements CompilerControl.askLinkPos */
+ private[interactive] def getLinkPos(sym: Symbol, source: SourceFile, response: Response[Position]) {
informIDE("getLinkPos "+sym+" "+source)
respond(response) {
if (sym.owner.isClass) {
- getUnit(source) match {
- case None =>
- reloadSources(List(source))
- try findLinkPos(getUnit(source).get)
- finally afterRunRemoveUnitOf(source)
- case Some(unit) =>
- findLinkPos(unit)
+ withTempUnit(source){ u =>
+ findMirrorSymbol(sym, u).pos
}
} else {
debugLog("link not in class "+sym+" "+source+" "+sym.owner)
@@ -787,6 +850,38 @@ class Global(settings: Settings, reporter: Reporter, projectName: String = "")
}
}
+ private def forceDocComment(sym: Symbol, unit: RichCompilationUnit) {
+ unit.body foreachPartial {
+ case DocDef(comment, defn) if defn.symbol == sym =>
+ fillDocComment(defn.symbol, comment)
+ EmptyTree
+ case _: ValOrDefDef =>
+ EmptyTree
+ }
+ }
+
+ /** Implements CompilerControl.askDocComment */
+ private[interactive] def getDocComment(sym: Symbol, source: SourceFile, site: Symbol, fragments: List[(Symbol,SourceFile)],
+ response: Response[(String, String, Position)]) {
+ informIDE(s"getDocComment $sym at $source, site $site")
+ respond(response) {
+ withTempUnits(fragments.unzip._2){ units =>
+ for((sym, src) <- fragments) {
+ val mirror = findMirrorSymbol(sym, units(src))
+ if (mirror ne NoSymbol) forceDocComment(mirror, units(src))
+ }
+ val mirror = findMirrorSymbol(sym, units(source))
+ if (mirror eq NoSymbol)
+ ("", "", NoPosition)
+ else {
+ (expandedDocComment(mirror, site), rawDocComment(mirror), docCommentPos(mirror))
+ }
+ }
+ }
+ // New typer run to remove temp units and drop per-run caches that might refer to symbols entered from temp units.
+ newTyperRun()
+ }
+
def stabilizedType(tree: Tree): Type = tree match {
case Ident(_) if tree.symbol.isStable =>
singleType(NoPrefix, tree.symbol)
@@ -829,7 +924,7 @@ class Global(settings: Settings, reporter: Reporter, projectName: String = "")
def add(sym: Symbol, pre: Type, implicitlyAdded: Boolean)(toMember: (Symbol, Type) => M) {
if ((sym.isGetter || sym.isSetter) && sym.accessed != NoSymbol) {
add(sym.accessed, pre, implicitlyAdded)(toMember)
- } else if (!sym.name.decode.containsName(Dollar) && !sym.isSynthetic && sym.hasRawInfo) {
+ } else if (!sym.name.decodedName.containsName(Dollar) && !sym.isSynthetic && sym.hasRawInfo) {
val symtpe = pre.memberType(sym) onTypeError ErrorType
matching(sym, symtpe, this(sym.name)) match {
case Some(m) =>
@@ -860,7 +955,11 @@ class Global(settings: Settings, reporter: Reporter, projectName: String = "")
val enclosing = new Members[ScopeMember]
def addScopeMember(sym: Symbol, pre: Type, viaImport: Tree) =
locals.add(sym, pre, false) { (s, st) =>
- new ScopeMember(s, st, context.isAccessible(s, pre, false), viaImport)
+ // imported val and var are always marked as inaccessible, but they could be accessed through their getters. SI-7995
+ if (s.hasGetter)
+ new ScopeMember(s, st, context.isAccessible(s.getter, pre, superAccess = false), viaImport)
+ else
+ new ScopeMember(s, st, context.isAccessible(s, pre, superAccess = false), viaImport)
}
def localsToEnclosing() = {
enclosing.addNonShadowed(locals)
@@ -911,7 +1010,13 @@ class Global(settings: Settings, reporter: Reporter, projectName: String = "")
val context = doLocateContext(pos)
- if (tree.tpe == null)
+ val shouldTypeQualifier = tree.tpe match {
+ case null => true
+ case mt: MethodType => mt.isImplicit
+ case _ => false
+ }
+
+ if (shouldTypeQualifier)
// TODO: guard with try/catch to deal with ill-typed qualifiers.
tree = analyzer.newTyper(context).typedQualifier(tree)
@@ -924,7 +1029,7 @@ class Global(settings: Settings, reporter: Reporter, projectName: String = "")
val implicitlyAdded = viaView != NoSymbol
members.add(sym, pre, implicitlyAdded) { (s, st) =>
new TypeMember(s, st,
- context.isAccessible(s, pre, superAccess && !implicitlyAdded),
+ context.isAccessible(if (s.hasGetter) s.getter(s.owner) else s, pre, superAccess && !implicitlyAdded),
inherited,
viaView)
}
@@ -957,7 +1062,7 @@ class Global(settings: Settings, reporter: Reporter, projectName: String = "")
if (ownerTpe.isErroneous) List()
else new ImplicitSearch(
tree, functionType(List(ownerTpe), AnyClass.tpe), isView = true,
- context.makeImplicit(reportAmbiguousErrors = false)).allImplicits
+ context0 = context.makeImplicit(reportAmbiguousErrors = false)).allImplicits
for (view <- applicableViews) {
val vtree = viewApply(view)
val vpre = stabilizedType(vtree)
@@ -971,7 +1076,7 @@ class Global(settings: Settings, reporter: Reporter, projectName: String = "")
}
/** Implements CompilerControl.askLoadedTyped */
- private[interactive] def waitLoadedTyped(source: SourceFile, response: Response[Tree], onSameThread: Boolean = true) {
+ private[interactive] def waitLoadedTyped(source: SourceFile, response: Response[Tree], keepLoaded: Boolean = false, onSameThread: Boolean = true) {
getUnit(source) match {
case Some(unit) =>
if (unit.isUpToDate) {
@@ -989,7 +1094,10 @@ class Global(settings: Settings, reporter: Reporter, projectName: String = "")
case None =>
debugLog("load unit and type")
try reloadSources(List(source))
- finally waitLoadedTyped(source, response, onSameThread)
+ finally {
+ waitLoadedTyped(source, response, onSameThread)
+ if (!keepLoaded) removeUnitOf(source)
+ }
}
}
@@ -1021,6 +1129,17 @@ class Global(settings: Settings, reporter: Reporter, projectName: String = "")
}
}
+ @deprecated("SI-6458: Instrumentation logic will be moved out of the compiler.","2.10.0")
+ def getInstrumented(source: SourceFile, line: Int, response: Response[(String, Array[Char])]) =
+ try {
+ interruptsEnabled = false
+ respond(response) {
+ instrument(source, line)
+ }
+ } finally {
+ interruptsEnabled = true
+ }
+
// ---------------- Helper classes ---------------------------
/** A transformer that replaces tree `from` with tree `to` in a given tree */
@@ -1057,7 +1176,6 @@ class Global(settings: Settings, reporter: Reporter, projectName: String = "")
def newTyperRun() {
currentTyperRun = new TyperRun
- perRunCaches.clearAll()
}
class TyperResult(val tree: Tree) extends ControlThrowable
@@ -1066,6 +1184,8 @@ class Global(settings: Settings, reporter: Reporter, projectName: String = "")
implicit def addOnTypeError[T](x: => T): OnTypeError[T] = new OnTypeError(x)
+ // OnTypeError should still catch TypeError because of cyclic references,
+ // but DivergentImplicit shouldn't leak anymore here
class OnTypeError[T](op: => T) {
def onTypeError(alt: => T) = try {
op
@@ -1074,11 +1194,21 @@ class Global(settings: Settings, reporter: Reporter, projectName: String = "")
debugLog("type error caught: "+ex)
alt
case ex: DivergentImplicit =>
- debugLog("divergent implicit caught: "+ex)
- alt
+ if (settings.Xdivergence211.value) {
+ debugLog("this shouldn't happen. DivergentImplicit exception has been thrown with -Xdivergence211 turned on: "+ex)
+ alt
+ } else {
+ debugLog("divergent implicit caught: "+ex)
+ alt
+ }
}
}
+
+ /** The compiler has been initialized. Constructors are evaluated in textual order,
+ * so this is set to true only after all super constructors and the primary constructor
+ * have been executed.
+ */
+ initializing = false
}
object CancelException extends Exception
-
diff --git a/src/compiler/scala/tools/nsc/interactive/InteractiveReporter.scala b/src/compiler/scala/tools/nsc/interactive/InteractiveReporter.scala
index 397e83a..013b152 100644
--- a/src/compiler/scala/tools/nsc/interactive/InteractiveReporter.scala
+++ b/src/compiler/scala/tools/nsc/interactive/InteractiveReporter.scala
@@ -1,12 +1,12 @@
/* NSC -- new Scala compiler
- * Copyright 2009-2011 Scala Solutions and LAMP/EPFL
+ * Copyright 2009-2013 Typesafe/Scala Solutions and LAMP/EPFL
* @author Martin Odersky
*/
package scala.tools.nsc
package interactive
-import collection.mutable.ArrayBuffer
-import util.Position
+import scala.collection.mutable.ArrayBuffer
+import scala.reflect.internal.util.Position
import reporters.Reporter
case class Problem(pos: Position, msg: String, severityLevel: Int)
diff --git a/src/compiler/scala/tools/nsc/interactive/Picklers.scala b/src/compiler/scala/tools/nsc/interactive/Picklers.scala
index 561fa47..64e050e 100644
--- a/src/compiler/scala/tools/nsc/interactive/Picklers.scala
+++ b/src/compiler/scala/tools/nsc/interactive/Picklers.scala
@@ -1,17 +1,19 @@
/* NSC -- new Scala compiler
- * Copyright 2009-2011 Scala Solutions and LAMP/EPFL
+ * Copyright 2009-2013 Typesafe/Scala Solutions and LAMP/EPFL
* @author Martin Odersky
*/
package scala.tools.nsc
package interactive
-import util.{SourceFile, BatchSourceFile, InterruptReq}
+import util.InterruptReq
+import scala.reflect.internal.util.{SourceFile, BatchSourceFile}
import io.{AbstractFile, PlainFile}
-import util.{Position, RangePosition, NoPosition, OffsetPosition, TransparentPosition, EmptyAction}
+import util.EmptyAction
+import scala.reflect.internal.util.{Position, RangePosition, NoPosition, OffsetPosition, TransparentPosition}
import io.{Pickler, CondPickler}
import io.Pickler._
-import collection.mutable
+import scala.collection.mutable
import mutable.ListBuffer
trait Picklers { self: Global =>
@@ -85,7 +87,7 @@ trait Picklers { self: Global =>
implicit lazy val position: Pickler[Position] = transparentPosition | rangePosition | offsetPosition | noPosition
implicit lazy val namePickler: Pickler[Name] =
- pkl[String] .wrapped {
+ pkl[String] .wrapped[Name] {
str => if ((str.length > 1) && (str endsWith "!")) newTypeName(str.init) else newTermName(str)
} {
name => if (name.isTypeName) name.toString+"!" else name.toString
@@ -101,7 +103,7 @@ trait Picklers { self: Global =>
if (sym1.isOverloaded) {
val index = sym1.alternatives.indexOf(sym)
assert(index >= 0, sym1+" not found in alternatives "+sym1.alternatives)
- buf += index.toString
+ buf += newTermName(index.toString)
}
}
}
@@ -115,7 +117,7 @@ trait Picklers { self: Global =>
if (sym.isOverloaded) makeSymbol(sym.alternatives(rest.head.toString.toInt), rest.tail)
else makeSymbol(sym, rest)
}
- pkl[List[Name]] .wrapped { makeSymbol(definitions.RootClass, _) } { ownerNames(_, new ListBuffer).toList }
+ pkl[List[Name]] .wrapped { makeSymbol(rootMirror.RootClass, _) } { ownerNames(_, new ListBuffer).toList }
}
implicit def workEvent: Pickler[WorkEvent] = {
@@ -163,9 +165,14 @@ trait Picklers { self: Global =>
.wrapped { case sym ~ source => new AskLinkPosItem(sym, source, new Response) } { item => item.sym ~ item.source }
.asClass (classOf[AskLinkPosItem])
+ implicit def askDocCommentItem: CondPickler[AskDocCommentItem] =
+ (pkl[Symbol] ~ pkl[SourceFile] ~ pkl[Symbol] ~ pkl[List[(Symbol,SourceFile)]])
+ .wrapped { case sym ~ source ~ site ~ fragments => new AskDocCommentItem(sym, source, site, fragments, new Response) } { item => item.sym ~ item.source ~ item.site ~ item.fragments }
+ .asClass (classOf[AskDocCommentItem])
+
implicit def askLoadedTypedItem: CondPickler[AskLoadedTypedItem] =
pkl[SourceFile]
- .wrapped { source => new AskLoadedTypedItem(source, new Response) } { _.source }
+ .wrapped { source => new AskLoadedTypedItem(source, false, new Response) } { _.source }
.asClass (classOf[AskLoadedTypedItem])
implicit def askParsedEnteredItem: CondPickler[AskParsedEnteredItem] =
@@ -180,5 +187,5 @@ trait Picklers { self: Global =>
implicit def action: Pickler[() => Unit] =
reloadItem | askTypeAtItem | askTypeItem | askTypeCompletionItem | askScopeCompletionItem |
- askToDoFirstItem | askLinkPosItem | askLoadedTypedItem | askParsedEnteredItem | emptyAction
+ askToDoFirstItem | askLinkPosItem | askDocCommentItem | askLoadedTypedItem | askParsedEnteredItem | emptyAction
}
diff --git a/src/compiler/scala/tools/nsc/interactive/PresentationCompilerThread.scala b/src/compiler/scala/tools/nsc/interactive/PresentationCompilerThread.scala
index 098884d..a2d8e5d 100644
--- a/src/compiler/scala/tools/nsc/interactive/PresentationCompilerThread.scala
+++ b/src/compiler/scala/tools/nsc/interactive/PresentationCompilerThread.scala
@@ -1,5 +1,5 @@
/* NSC -- new Scala compiler
- * Copyright 2009-2011 Scala Solutions and LAMP/EPFL
+ * Copyright 2009-2013 Typesafe/Scala Solutions and LAMP/EPFL
* @author Martin Odersky
* @author Iulian Dragos
*/
@@ -36,7 +36,7 @@ final class PresentationCompilerThread(var compiler: Global, name: String = "")
// make sure we don't keep around stale instances
compiler = null
- case ex =>
+ case ex: Throwable =>
compiler.log.flush()
ex match {
diff --git a/src/compiler/scala/tools/nsc/interactive/REPL.scala b/src/compiler/scala/tools/nsc/interactive/REPL.scala
index 27947d5..4b64313 100644
--- a/src/compiler/scala/tools/nsc/interactive/REPL.scala
+++ b/src/compiler/scala/tools/nsc/interactive/REPL.scala
@@ -1,16 +1,19 @@
/* NSC -- new Scala compiler
- * Copyright 2009-2011 Scala Solutions and LAMP/EPFL
+ * Copyright 2009-2013 Typesafe/Scala Solutions and LAMP/EPFL
* @author Martin Odersky
*/
package scala.tools.nsc
package interactive
import scala.concurrent.SyncVar
-import scala.tools.nsc.util._
+import scala.reflect.internal.util._
import scala.tools.nsc.symtab._
import scala.tools.nsc.ast._
import scala.tools.nsc.reporters._
import scala.tools.nsc.io._
+import scala.tools.nsc.scratchpad.SourceInserter
+import scala.tools.nsc.interpreter.AbstractFileClassLoader
+import java.io.{File, FileWriter}
/** Interface of interactive compiler to a client such as an IDE
*/
@@ -34,7 +37,7 @@ object REPL {
reporter = new ConsoleReporter(settings)
val command = new CompilerCommand(args.toList, settings)
if (command.settings.version.value)
- reporter.info(null, versionMsg, true)
+ reporter.echo(versionMsg)
else {
try {
object compiler extends Global(command.settings, reporter) {
@@ -45,7 +48,7 @@ object REPL {
return
}
if (command.shouldStopWithInfo) {
- reporter.info(null, command.getInfoMessage(compiler), true)
+ reporter.echo(command.getInfoMessage(compiler))
} else {
run(compiler)
}
@@ -60,7 +63,7 @@ object REPL {
def main(args: Array[String]) {
process(args)
- sys.exit(if (reporter.hasErrors) 1 else 0)
+ /*sys.*/exit(if (reporter.hasErrors) 1 else 0)// Don't use sys yet as this has to run on 2.8.2 also.
}
def loop(action: (String) => Unit) {
@@ -89,28 +92,75 @@ object REPL {
val completeResult = new Response[List[comp.Member]]
val typedResult = new Response[comp.Tree]
val structureResult = new Response[comp.Tree]
+ @deprecated("SI-6458: Instrumentation logic will be moved out of the compiler.","2.10.0")
+ val instrumentedResult = new Response[(String, Array[Char])]
def makePos(file: String, off1: String, off2: String) = {
val source = toSourceFile(file)
comp.rangePos(source, off1.toInt, off1.toInt, off2.toInt)
}
+
def doTypeAt(pos: Position) {
comp.askTypeAt(pos, typeatResult)
show(typeatResult)
}
+
def doComplete(pos: Position) {
comp.askTypeCompletion(pos, completeResult)
show(completeResult)
}
- def doTypedTree(file: String) {
- comp.askType(toSourceFile(file), true, typedResult)
- show(typedResult)
- }
+
def doStructure(file: String) {
comp.askParsedEntered(toSourceFile(file), false, structureResult)
show(structureResult)
}
+ /** Write instrumented source file to disk.
+ * @param iFullName The full name of the first top-level object in source
+ * @param iContents An Array[Char] containing the instrumented source
+ * @return The name of the instrumented source file
+ */
+ @deprecated("SI-6458: Instrumentation logic will be moved out of the compiler.","2.10.0")
+ def writeInstrumented(iFullName: String, suffix: String, iContents: Array[Char]): String = {
+ val iSimpleName = iFullName drop ((iFullName lastIndexOf '.') + 1)
+ val iSourceName = iSimpleName + suffix
+ val ifile = new FileWriter(iSourceName)
+ ifile.write(iContents)
+ ifile.close()
+ iSourceName
+ }
+
+ /** The method for implementing worksheet functionality.
+ * @param arguments a file name, followed by optional command line arguments that are passed
+ * to the compiler that processes the instrumented source.
+ * @param line A line number that controls uop to which line results should be produced
+ * If line = -1, results are produced for all expressions in the worksheet.
+ * @return The generated file content containing original source in the left column
+ * and outputs in the right column, or None if the presentation compiler
+ * does not respond to askInstrumented.
+ */
+ @deprecated("SI-6458: Instrumentation logic will be moved out of the compiler.","2.10.0")
+ def instrument(arguments: List[String], line: Int): Option[(String, String)] = {
+ val source = toSourceFile(arguments.head)
+ // strip right hand side comment column and any trailing spaces from all lines
+ val strippedContents = SourceInserter.stripRight(source.content)
+ val strippedSource = new BatchSourceFile(source.file, strippedContents)
+ println("stripped source = "+strippedSource+":"+strippedContents.mkString)
+ comp.askReload(List(strippedSource), reloadResult)
+ comp.askInstrumented(strippedSource, line, instrumentedResult)
+ using(instrumentedResult) {
+ case (iFullName, iContents) =>
+ println(s"instrumented source $iFullName = ${iContents.mkString}")
+ val iSourceName = writeInstrumented(iFullName, "$instrumented.scala", iContents)
+ val sSourceName = writeInstrumented(iFullName, "$stripped.scala", strippedContents)
+ (iSourceName, sSourceName)
+/*
+ * val vdirOpt = compileInstrumented(iSourceName, arguments.tail)
+ runInstrumented(vdirOpt, iFullName, strippedSource.content)
+ */
+ }
+ }
+
loop { line =>
(line split " ").toList match {
case "reload" :: args =>
@@ -120,10 +170,8 @@ object REPL {
comp.askReload(List(toSourceFile(file)), reloadResult)
Thread.sleep(millis.toInt)
println("ask type now")
- comp.askType(toSourceFile(file), false, typedResult)
+ comp.askLoadedTyped(toSourceFile(file), keepLoaded = true, typedResult)
typedResult.get
- case List("typed", file) =>
- doTypedTree(file)
case List("typeat", file, off1, off2) =>
doTypeAt(makePos(file, off1, off2))
case List("typeat", file, off1) =>
@@ -132,24 +180,43 @@ object REPL {
doComplete(makePos(file, off1, off2))
case List("complete", file, off1) =>
doComplete(makePos(file, off1, off1))
+ case "instrument" :: arguments =>
+ println(instrument(arguments, -1))
+ case "instrumentTo" :: line :: arguments =>
+ println(instrument(arguments, line.toInt))
case List("quit") =>
comp.askShutdown()
- sys.exit(1)
+ exit(1) // Don't use sys yet as this has to run on 2.8.2 also.
case List("structure", file) =>
doStructure(file)
case _ =>
- println("unrecongized command")
+ print("""Available commands:
+ | reload <file_1> ... <file_n>
+ | reloadAndAskType <file> <sleep-ms>
+ | typed <file>
+ | typeat <file> <start-pos> <end-pos>
+ | typeat <file> <pos>
+ | complete <file> <start-pos> <end-pos>
+ | compile <file> <pos>
+ | instrument <file> <arg>*
+ | instrumentTo <line-num> <file> <arg>*
+ | structure <file>
+ | quit
+ |""".stripMargin)
}
}
}
def toSourceFile(name: String) = new BatchSourceFile(new PlainFile(new java.io.File(name)))
- def show[T](svar: Response[T]) {
- svar.get match {
- case Left(result) => println("==> "+result)
- case Right(exc) => exc.printStackTrace; println("ERROR: "+exc)
+ def using[T, U](svar: Response[T])(op: T => U): Option[U] = {
+ val res = svar.get match {
+ case Left(result) => Some(op(result))
+ case Right(exc) => exc.printStackTrace; println("ERROR: "+exc); None
}
svar.clear()
+ res
}
+
+ def show[T](svar: Response[T]) = using(svar)(res => println("==> "+res))
}
diff --git a/src/compiler/scala/tools/nsc/interactive/RangePositions.scala b/src/compiler/scala/tools/nsc/interactive/RangePositions.scala
index 873ef50..b95f1fa 100644
--- a/src/compiler/scala/tools/nsc/interactive/RangePositions.scala
+++ b/src/compiler/scala/tools/nsc/interactive/RangePositions.scala
@@ -1,13 +1,14 @@
/* NSC -- new Scala compiler
- * Copyright 2009-2011 Scala Solutions and LAMP/EPFL
+ * Copyright 2009-2013 Typesafe/Scala Solutions and LAMP/EPFL
* @author Martin Odersky
*/
package scala.tools.nsc
package interactive
import ast.Trees
-import symtab.Positions
-import scala.tools.nsc.util.{SourceFile, Position, RangePosition, NoPosition, WorkScheduler}
+import ast.Positions
+import scala.reflect.internal.util.{SourceFile, Position, RangePosition, NoPosition}
+import scala.tools.nsc.util.WorkScheduler
import scala.collection.mutable.ListBuffer
/** Handling range positions
@@ -40,11 +41,11 @@ self: scala.tools.nsc.Global =>
/** A position that wraps a set of trees.
* The point of the wrapping position is the point of the default position.
* If some of the trees are ranges, returns a range position enclosing all ranges
- * Otherwise returns default position.
+ * Otherwise returns default position that is either focused or not.
*/
- override def wrappingPos(default: Position, trees: List[Tree]): Position = {
+ override def wrappingPos(default: Position, trees: List[Tree], focus: Boolean): Position = {
val ranged = trees filter (_.pos.isRange)
- if (ranged.isEmpty) default.focus
+ if (ranged.isEmpty) if (focus) default.focus else default
else new RangePosition(default.source, (ranged map (_.pos.start)).min, default.point, (ranged map (_.pos.end)).max)
}
@@ -58,13 +59,25 @@ self: scala.tools.nsc.Global =>
if (headpos.isDefined) wrappingPos(headpos, trees) else headpos
}
-/*
- override def integratePos(tree: Tree, pos: Position) =
- if (pos.isSynthetic && !tree.pos.isSynthetic) tree.syntheticDuplicate
- else tree
-*/
-
// -------------- ensuring no overlaps -------------------------------
+
+ /** Ensure that given tree has no positions that overlap with
+ * any of the positions of `others`. This is done by
+ * shortening the range, assigning TransparentPositions
+ * to some of the nodes in `tree` or focusing on the position.
+ */
+ override def ensureNonOverlapping(tree: Tree, others: List[Tree], focus: Boolean) {
+ def isOverlapping(pos: Position) =
+ pos.isRange && (others exists (pos overlaps _.pos))
+ if (isOverlapping(tree.pos)) {
+ val children = tree.children
+ children foreach (ensureNonOverlapping(_, others, focus))
+ if (tree.pos.isOpaqueRange) {
+ val wpos = wrappingPos(tree.pos, children, focus)
+ tree setPos (if (isOverlapping(wpos)) tree.pos.makeTransparent else wpos)
+ }
+ }
+ }
def solidDescendants(tree: Tree): List[Tree] =
if (tree.pos.isTransparent) tree.children flatMap solidDescendants
@@ -105,24 +118,6 @@ self: scala.tools.nsc.Global =>
if (ts.head == t) replacement ::: ts.tail
else ts.head :: replace(ts.tail, t, replacement)
- /** Ensure that given tree has no positions that overlap with
- * any of the positions of `others`. This is done by
- * shortening the range or assigning TransparentPositions
- * to some of the nodes in `tree`.
- */
- override def ensureNonOverlapping(tree: Tree, others: List[Tree]) {
- def isOverlapping(pos: Position) =
- pos.isRange && (others exists (pos overlaps _.pos))
- if (isOverlapping(tree.pos)) {
- val children = tree.children
- children foreach (ensureNonOverlapping(_, others))
- if (tree.pos.isOpaqueRange) {
- val wpos = wrappingPos(tree.pos.focus, children)
- tree setPos (if (isOverlapping(wpos)) tree.pos.makeTransparent else wpos)
- }
- }
- }
-
/** Does given list of trees have mutually non-overlapping positions?
* pre: None of the trees is transparent
*/
@@ -168,7 +163,7 @@ self: scala.tools.nsc.Global =>
/** Position a tree.
* This means: Set position of a node and position all its unpositioned children.
*/
- override def atPos[T <: Tree](pos: Position)(tree: T): T =
+ override def atPos[T <: Tree](pos: Position)(tree: T): T = {
if (pos.isOpaqueRange) {
if (!tree.isEmpty && tree.pos == NoPosition) {
tree.setPos(pos)
@@ -182,6 +177,7 @@ self: scala.tools.nsc.Global =>
} else {
super.atPos(pos)(tree)
}
+ }
// ---------------- Validating positions ----------------------------------
@@ -190,26 +186,33 @@ self: scala.tools.nsc.Global =>
val source = if (tree.pos.isDefined) tree.pos.source else ""
inform("== "+prefix+" tree ["+tree.id+"] of type "+tree.productPrefix+" at "+tree.pos.show+source)
inform("")
- inform(tree.toString)
+ inform(treeStatus(tree))
inform("")
}
def positionError(msg: String)(body : => Unit) {
- inform("======= Bad positions: "+msg)
- inform("")
+ inform("======= Position error\n" + msg)
body
- inform("=== While validating")
- inform("")
- inform(tree.toString)
- inform("")
+ inform("\nWhile validating #" + tree.id)
+ inform(treeStatus(tree))
+ inform("\nChildren:")
+ tree.children map (t => " " + treeStatus(t, tree)) foreach inform
inform("=======")
throw new ValidateException(msg)
}
def validate(tree: Tree, encltree: Tree): Unit = {
+
if (!tree.isEmpty) {
+ if (settings.Yposdebug.value && (settings.verbose.value || settings.Yrangepos.value))
+ println("[%10s] %s".format("validate", treeStatus(tree, encltree)))
+
if (!tree.pos.isDefined)
- positionError("Unpositioned tree ["+tree.id+"]") { reportTree("Unpositioned", tree) }
+ positionError("Unpositioned tree #"+tree.id) {
+ inform("%15s %s".format("unpositioned", treeStatus(tree, encltree)))
+ inform("%15s %s".format("enclosing", treeStatus(encltree)))
+ encltree.children foreach (t => inform("%15s %s".format("sibling", treeStatus(t, encltree))))
+ }
if (tree.pos.isRange) {
if (!encltree.pos.isRange)
positionError("Synthetic tree ["+encltree.id+"] contains nonsynthetic tree ["+tree.id+"]") {
@@ -239,7 +242,8 @@ self: scala.tools.nsc.Global =>
}
}
- validate(tree, tree)
+ if (phase.id <= currentRun.typerPhase.id)
+ validate(tree, tree)
}
class ValidateException(msg : String) extends Exception(msg)
@@ -260,7 +264,8 @@ self: scala.tools.nsc.Global =>
protected def isEligible(t: Tree) = !t.pos.isTransparent
override def traverse(t: Tree) {
t match {
- case tt : TypeTree if tt.original != null => traverse(tt.original)
+ case tt : TypeTree if tt.original != null && (tt.pos includes tt.original.pos) =>
+ traverse(tt.original)
case _ =>
if (t.pos includes pos) {
if (isEligible(t)) last = t
diff --git a/src/compiler/scala/tools/nsc/interactive/RefinedBuildManager.scala b/src/compiler/scala/tools/nsc/interactive/RefinedBuildManager.scala
index 67cf037..b2ef45a 100644
--- a/src/compiler/scala/tools/nsc/interactive/RefinedBuildManager.scala
+++ b/src/compiler/scala/tools/nsc/interactive/RefinedBuildManager.scala
@@ -1,5 +1,5 @@
/* NSC -- new Scala compiler
- * Copyright 2009-2011 Scala Solutions and LAMP/EPFL
+ * Copyright 2009-2013 Typesafe/Scala Solutions and LAMP/EPFL
* @author Iulian Dragos
* @author Hubert Plocinicak
*/
@@ -12,7 +12,8 @@ import scala.util.control.Breaks._
import scala.tools.nsc.symtab.Flags
import dependencies._
-import util.{FakePos, ClassPath}
+import scala.reflect.internal.util.FakePos
+import util.ClassPath
import io.AbstractFile
import scala.tools.util.PathResolver
@@ -22,6 +23,7 @@ import scala.tools.util.PathResolver
* changes require a compilation. It repeats this process until
* a fixpoint is reached.
*/
+ at deprecated("Use sbt incremental compilation mechanism", "2.10.0")
class RefinedBuildManager(val settings: Settings) extends Changes with BuildManager {
class BuilderGlobal(settings: Settings, reporter : Reporter) extends scala.tools.nsc.Global(settings, reporter) {
@@ -33,8 +35,9 @@ class RefinedBuildManager(val settings: Settings) extends Changes with BuildMana
super.computeInternalPhases
phasesSet += dependencyAnalysis
}
- lazy val _classpath: ClassPath[_] = new NoSourcePathPathResolver(settings).result
- override def classPath: ClassPath[_] = _classpath
+ lazy val _classpath = new NoSourcePathPathResolver(settings).result
+ override def classPath = _classpath.asInstanceOf[ClassPath[platform.BinaryRepr]]
+ // See discussion in JavaPlatForm for why we need a cast here.
def newRun() = new Run()
}
@@ -46,7 +49,7 @@ class RefinedBuildManager(val settings: Settings) extends Changes with BuildMana
protected def newCompiler(settings: Settings) = new BuilderGlobal(settings)
val compiler = newCompiler(settings)
- import compiler.{Symbol, Type, atPhase, currentRun}
+ import compiler.{ Symbol, Type, beforeErasure }
import compiler.dependencyAnalysis.Inherited
private case class SymWithHistory(sym: Symbol, befErasure: Type)
@@ -113,8 +116,8 @@ class RefinedBuildManager(val settings: Settings) extends Changes with BuildMana
// See if we really have corresponding symbols, not just those
// which share the name
def isCorrespondingSym(from: Symbol, to: Symbol): Boolean =
- (from.hasTraitFlag == to.hasTraitFlag) &&
- (from.hasModuleFlag == to.hasModuleFlag)
+ (from.hasFlag(Flags.TRAIT) == to.hasFlag(Flags.TRAIT)) && // has to run in 2.8, so no hasTraitFlag
+ (from.hasFlag(Flags.MODULE) == to.hasFlag(Flags.MODULE))
// For testing purposes only, order irrelevant for compilation
def toStringSet(set: Set[AbstractFile]): String =
@@ -158,17 +161,13 @@ class RefinedBuildManager(val settings: Settings) extends Changes with BuildMana
isCorrespondingSym(s.sym, sym)) match {
case Some(SymWithHistory(oldSym, info)) =>
val changes = changeSet(oldSym.info, sym)
- val changesErasure =
- atPhase(currentRun.erasurePhase.prev) {
- changeSet(info, sym)
- }
+ val changesErasure = beforeErasure(changeSet(info, sym))
+
changesOf(oldSym) = (changes ++ changesErasure).distinct
case _ =>
// a new top level definition
- changesOf(sym) =
- sym.info.parents.filter(_.typeSymbol.isSealed).map(
- p => changeChangeSet(p.typeSymbol,
- sym+" extends a sealed "+p.typeSymbol))
+ changesOf(sym) = sym.parentSymbols filter (_.isSealed) map (p =>
+ changeChangeSet(p, sym+" extends a sealed "+p))
}
}
// Create a change for the top level classes that were removed
@@ -221,7 +220,7 @@ class RefinedBuildManager(val settings: Settings) extends Changes with BuildMana
}
/** Return the set of source files that are invalidated by the given changes. */
- def invalidated(files: Set[AbstractFile], changesOf: collection.Map[Symbol, List[Change]],
+ def invalidated(files: Set[AbstractFile], changesOf: scala.collection.Map[Symbol, List[Change]],
processed: Set[AbstractFile] = Set.empty):
Set[AbstractFile] = {
val buf = new mutable.HashSet[AbstractFile]
@@ -241,7 +240,7 @@ class RefinedBuildManager(val settings: Settings) extends Changes with BuildMana
for ((oldSym, changes) <- changesOf; change <- changes) {
def checkParents(cls: Symbol, file: AbstractFile) {
- val parentChange = cls.info.parents.exists(_.typeSymbol.fullName == oldSym.fullName)
+ val parentChange = cls.parentSymbols exists (_.fullName == oldSym.fullName)
// if (settings.buildmanagerdebug.value)
// compiler.inform("checkParents " + cls + " oldSym: " + oldSym + " parentChange: " + parentChange + " " + cls.info.parents)
change match {
@@ -330,14 +329,10 @@ class RefinedBuildManager(val settings: Settings) extends Changes with BuildMana
/** Update the map of definitions per source file */
private def updateDefinitions(files: Set[AbstractFile]) {
- for (src <- files; val localDefs = compiler.dependencyAnalysis.definitions(src)) {
+ for (src <- files; localDefs = compiler.dependencyAnalysis.definitions(src)) {
definitions(src) = (localDefs map (s => {
this.classes += s.fullName -> src
- SymWithHistory(
- s.cloneSymbol,
- atPhase(currentRun.erasurePhase.prev) {
- s.info.cloneInfo(s)
- })
+ SymWithHistory(s.cloneSymbol, beforeErasure(s.info.cloneInfo(s)))
}))
}
this.references = compiler.dependencyAnalysis.references
@@ -352,7 +347,7 @@ class RefinedBuildManager(val settings: Settings) extends Changes with BuildMana
success
}
- /** Save dependency information to `file'. */
+ /** Save dependency information to `file`. */
def saveTo(file: AbstractFile, fromFile: AbstractFile => String) {
compiler.dependencyAnalysis.dependenciesFile = file
compiler.dependencyAnalysis.saveDependencies(fromFile)
diff --git a/src/compiler/scala/tools/nsc/interactive/Response.scala b/src/compiler/scala/tools/nsc/interactive/Response.scala
index fbb07b1..f36f769 100644
--- a/src/compiler/scala/tools/nsc/interactive/Response.scala
+++ b/src/compiler/scala/tools/nsc/interactive/Response.scala
@@ -1,5 +1,5 @@
/* NSC -- new Scala compiler
- * Copyright 2009-2011 Scala Solutions and LAMP/EPFL
+ * Copyright 2009-2013 Typesafe/Scala Solutions and LAMP/EPFL
* @author Martin Odersky
*/
package scala.tools.nsc
diff --git a/src/compiler/scala/tools/nsc/interactive/RichCompilationUnits.scala b/src/compiler/scala/tools/nsc/interactive/RichCompilationUnits.scala
index 9ef7d33..b83c2cd 100644
--- a/src/compiler/scala/tools/nsc/interactive/RichCompilationUnits.scala
+++ b/src/compiler/scala/tools/nsc/interactive/RichCompilationUnits.scala
@@ -1,12 +1,12 @@
/* NSC -- new Scala compiler
- * Copyright 2009-2011 Scala Solutions and LAMP/EPFL
+ * Copyright 2009-2013 Typesafe/Scala Solutions and LAMP/EPFL
* @author Martin Odersky
*/
package scala.tools.nsc
package interactive
-import scala.tools.nsc.util.{SourceFile, Position, NoPosition}
-import collection.mutable.ArrayBuffer
+import scala.reflect.internal.util.{SourceFile, Position, NoPosition}
+import scala.collection.mutable.ArrayBuffer
trait RichCompilationUnits { self: Global =>
diff --git a/src/compiler/scala/tools/nsc/interactive/ScratchPadMaker.scala b/src/compiler/scala/tools/nsc/interactive/ScratchPadMaker.scala
new file mode 100644
index 0000000..7f0265b
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/interactive/ScratchPadMaker.scala
@@ -0,0 +1,200 @@
+package scala.tools.nsc
+package interactive
+
+import scala.reflect.internal.util.{SourceFile, BatchSourceFile, RangePosition}
+import scala.collection.mutable.ArrayBuffer
+import scala.reflect.internal.Chars.{isLineBreakChar, isWhitespace}
+import ast.parser.Tokens._
+
+ at deprecated("SI-6458: Instrumentation logic will be moved out of the compiler.","2.10.0")
+trait ScratchPadMaker { self: Global =>
+
+ import definitions._
+
+ private case class Patch(offset: Int, text: String)
+
+ private class Patcher(contents: Array[Char], lex: LexicalStructure, endOffset: Int) extends Traverser {
+ var objectName: String = ""
+
+ private val patches = new ArrayBuffer[Patch]
+ private val toPrint = new ArrayBuffer[String]
+ private var skipped = 0
+ private var resNum: Int = -1
+
+ private def nextRes(): String = {
+ resNum += 1
+ "res$"+resNum
+ }
+
+ private def nameType(name: String, tpe: Type): String = {
+ // if name ends in symbol character, add a space to separate it from the following ':'
+ val pad = if (Character.isLetter(name.last) || Character.isDigit(name.last)) "" else " "
+ name+pad+": "+tpe
+ }
+
+ private def nameType(sym: Symbol): String = nameType(sym.name.decoded, sym.tpe)
+
+ private def literal(str: String) = "\"\"\""+str+"\"\"\""
+
+ private val prologue = ";import scala.runtime.WorksheetSupport._; def main(args: Array[String])=$execute{"
+
+ private val epilogue = "}"
+
+ private def applyPendingPatches(offset: Int) = {
+ if (skipped == 0) patches += Patch(offset, prologue)
+ for (msg <- toPrint) patches += Patch(offset, ";System.out.println("+msg+")")
+ toPrint.clear()
+ }
+
+ /** The position where to insert an instrumentation statement in front of giuven statement.
+ * This is at the latest `stat.pos.start`. But in order not to mess with column numbers
+ * in position we try to insert it at the end of the previous token instead.
+ * Furthermore, `(' tokens have to be skipped because they do not show up
+ * in statement range positions.
+ */
+ private def instrumentPos(start: Int): Int = {
+ val (prevToken, prevStart, prevEnd) = lex.locate(start - 1)
+ if (prevStart >= start) start
+ else if (prevToken == LPAREN) instrumentPos(prevStart)
+ else prevEnd
+ }
+
+ private def addSkip(stat: Tree): Unit = {
+ val ipos = instrumentPos(stat.pos.start)
+ if (stat.pos.start > skipped) applyPendingPatches(ipos)
+ if (stat.pos.start >= endOffset)
+ patches += Patch(ipos, ";$stop()")
+ var end = stat.pos.end
+ if (end > skipped) {
+ while (end < contents.length && !isLineBreakChar(contents(end))) end += 1
+ patches += Patch(ipos, ";$skip("+(end-skipped)+"); ")
+ skipped = end
+ }
+ }
+
+ private def addSandbox(expr: Tree) = {}
+// patches += (Patch(expr.pos.start, "sandbox("), Patch(expr.pos.end, ")"))
+
+ private def resultString(prefix: String, expr: String) =
+ literal(prefix + " = ") + " + $show(" + expr + ")"
+
+ private def traverseStat(stat: Tree) =
+ if (stat.pos.isInstanceOf[RangePosition]) {
+ stat match {
+ case ValDef(_, _, _, rhs) =>
+ addSkip(stat)
+ if (stat.symbol.isLazy)
+ toPrint += literal(nameType(stat.symbol) + " = <lazy>")
+ else if (!stat.symbol.isSynthetic) {
+ addSandbox(rhs)
+ toPrint += resultString(nameType(stat.symbol), stat.symbol.name.toString)
+ }
+ case DefDef(_, _, _, _, _, _) =>
+ addSkip(stat)
+ toPrint += literal(nameType(stat.symbol))
+ case Annotated(_, arg) =>
+ traverse(arg)
+ case DocDef(_, defn) =>
+ traverse(defn)
+ case _ =>
+ if (stat.isTerm) {
+ addSkip(stat)
+ if (stat.tpe.typeSymbol == UnitClass) {
+ addSandbox(stat)
+ } else {
+ val resName = nextRes()
+ val dispResName = resName filter ('$' != _)
+ val offset = instrumentPos(stat.pos.start)
+ patches += Patch(offset, "val " + resName + " = ")
+ addSandbox(stat)
+ toPrint += resultString(nameType(dispResName, stat.tpe), resName)
+ }
+ }
+ }
+ }
+
+ override def traverse(tree: Tree): Unit = tree match {
+ case PackageDef(_, _) =>
+ super.traverse(tree)
+ case ModuleDef(_, name, Template(_, _, body)) =>
+ val topLevel = objectName.isEmpty
+ if (topLevel) {
+ objectName = tree.symbol.fullName
+ body foreach traverseStat
+ if (skipped != 0) { // don't issue prologue and epilogue if there are no instrumented statements
+ applyPendingPatches(skipped)
+ patches += Patch(skipped, epilogue)
+ }
+ }
+ case _ =>
+ }
+
+ /** The patched text.
+ * @require traverse is run first
+ */
+ def result: Array[Char] = {
+ val reslen = contents.length + (patches map (_.text.length)).sum
+ val res = Array.ofDim[Char](reslen)
+ var lastOffset = 0
+ var from = 0
+ var to = 0
+ for (Patch(offset, text) <- patches) {
+ val delta = offset - lastOffset
+ assert(delta >= 0)
+ Array.copy(contents, from, res, to, delta)
+ from += delta
+ to += delta
+ lastOffset = offset
+ text.copyToArray(res, to)
+ to += text.length
+ }
+ assert(contents.length - from == reslen - to)
+ Array.copy(contents, from, res, to, contents.length - from)
+ res
+ }
+ }
+
+ class LexicalStructure(source: SourceFile) {
+ val token = new ArrayBuffer[Int]
+ val startOffset = new ArrayBuffer[Int]
+ val endOffset = new ArrayBuffer[Int]
+ private val scanner = new syntaxAnalyzer.UnitScanner(new CompilationUnit(source))
+ scanner.init()
+ while (scanner.token != EOF) {
+ startOffset += scanner.offset
+ token += scanner.token
+ scanner.nextToken
+ endOffset += scanner.lastOffset
+ }
+
+ /** @return token that starts before or at offset, its startOffset, its endOffset
+ */
+ def locate(offset: Int): (Int, Int, Int) = {
+ var lo = 0
+ var hi = token.length - 1
+ while (lo < hi) {
+ val mid = (lo + hi + 1) / 2
+ if (startOffset(mid) <= offset) lo = mid
+ else hi = mid - 1
+ }
+ (token(lo), startOffset(lo), endOffset(lo))
+ }
+ }
+
+ /** Compute an instrumented version of a sourcefile.
+ * @param source The given sourcefile.
+ * @param line The line up to which results should be printed, -1 = whole document.
+ * @return A pair consisting of
+ * - the fully qualified name of the first top-level object definition in the file.
+ * or "" if there are no object definitions.
+ * - the text of the instrumented program which, when run,
+ * prints its output and all defined values in a comment column.
+ */
+ protected def instrument(source: SourceFile, line: Int): (String, Array[Char]) = {
+ val tree = typedTree(source, true)
+ val endOffset = if (line < 0) source.length else source.lineToOffset(line + 1)
+ val patcher = new Patcher(source.content, new LexicalStructure(source), endOffset)
+ patcher.traverse(tree)
+ (patcher.objectName, patcher.result)
+ }
+}
diff --git a/src/compiler/scala/tools/nsc/interactive/SimpleBuildManager.scala b/src/compiler/scala/tools/nsc/interactive/SimpleBuildManager.scala
index 121f356..465dcaa 100644
--- a/src/compiler/scala/tools/nsc/interactive/SimpleBuildManager.scala
+++ b/src/compiler/scala/tools/nsc/interactive/SimpleBuildManager.scala
@@ -1,5 +1,5 @@
/* NSC -- new Scala compiler
- * Copyright 2009-2011 Scala Solutions and LAMP/EPFL
+ * Copyright 2009-2013 Typesafe/Scala Solutions and LAMP/EPFL
* @author Martin Odersky
*/
package scala.tools.nsc
@@ -10,7 +10,7 @@ import scala.collection._
import scala.tools.nsc.reporters.{Reporter, ConsoleReporter}
import dependencies._
-import util.FakePos
+import scala.reflect.internal.util.FakePos
import io.AbstractFile
/** A simple build manager, using the default scalac dependency tracker.
@@ -95,7 +95,7 @@ class SimpleBuildManager(val settings: Settings) extends BuildManager {
success
}
- /** Save dependency information to `file'. */
+ /** Save dependency information to `file`. */
def saveTo(file: AbstractFile, fromFile: AbstractFile => String) {
compiler.dependencyAnalysis.dependenciesFile = file
compiler.dependencyAnalysis.saveDependencies(fromFile)
diff --git a/src/compiler/scala/tools/nsc/interactive/tests/InteractiveTest.scala b/src/compiler/scala/tools/nsc/interactive/tests/InteractiveTest.scala
index f78365a..1c722ea 100644
--- a/src/compiler/scala/tools/nsc/interactive/tests/InteractiveTest.scala
+++ b/src/compiler/scala/tools/nsc/interactive/tests/InteractiveTest.scala
@@ -1,16 +1,21 @@
/* NSC -- new Scala compiler
- * Copyright 2009-2011 Scala Solutions and LAMP/EPFL
+ * Copyright 2009-2013 Typesafe/Scala Solutions and LAMP/EPFL
* @author Martin Odersky
*/
-package scala.tools.nsc.interactive
+package scala.tools.nsc
+package interactive
package tests
-import scala.tools.nsc.Settings
-import scala.tools.nsc.reporters.StoreReporter
-import scala.tools.nsc.util.{BatchSourceFile, SourceFile, Position}
-import scala.tools.nsc.io._
+import core._
-import scala.collection.{immutable, mutable}
+import java.io.File.pathSeparatorChar
+import java.io.File.separatorChar
+
+import scala.annotation.migration
+import scala.reflect.internal.util.Position
+import scala.reflect.internal.util.SourceFile
+
+import scala.collection.mutable.ListBuffer
/** A base class for writing interactive compiler tests.
*
@@ -18,235 +23,107 @@ import scala.collection.{immutable, mutable}
* compiler: instantiation source files, reloading, creating positions, instantiating
* the presentation compiler, random stress testing.
*
- * By default, this class loads all classes found under `src/`. They are found in
- * `sourceFiles`. Positions can be created using `pos(file, line, col)`. The presentation
- * compiler is available through `compiler`.
+ * By default, this class loads all scala and java classes found under `src/`, going
+ * recursively into subfolders. Loaded classes are found in `sourceFiles`. trait `TestResources`
+ * The presentation compiler is available through `compiler`.
+ *
+ * It is easy to test member completion, type and hyperlinking at a given position. Source
+ * files are searched for `TextMarkers`. By default, the completion marker is `/*!*/`, the
+ * typedAt marker is `/*?*/` and the hyperlinking marker is `/*#*/`. Place these markers in
+ * your source files, and the test framework will automatically pick them up and test the
+ * corresponding actions. Sources are reloaded by `askReload(sourceFiles)` (blocking
+ * call). All ask operations are placed on the work queue without waiting for each one to
+ * complete before asking the next. After all asks, it waits for each response in turn and
+ * prints the result. The default timeout is 1 second per operation.
+ *
+ * To define a custom operation you have to:
*
- * It is easy to test member completion and type at a given position. Source
- * files are searched for /markers/. By default, the completion marker is `/*!*/` and the
- * typedAt marker is `/*?*/`. Place these markers in your source files, and call `completionTests`
- * and `typedAtTests` to print the results at all these positions. Sources are reloaded by `reloadSources`
- * (blocking call). All ask operations are placed on the work queue without waiting for each one to
- * complete before asking the next. After all asks, it waits for each response in turn and prints the result.
- * The default timout is 5 seconds per operation.
+ * (1) Define a new marker by extending `TestMarker`
+ * (2) Provide an implementation for the operation you want to check by extending `PresentationCompilerTestDef`
+ * (3) Add the class defined in (1) to the set of executed test actions by calling `++` on `InteractiveTest`.
*
- * The same mechanism can be used for custom operations. Use `askAllSources(marker)(f)(g)`. Give your custom
- * marker, and provide the two functions: one for creating the request, and the second for processing the
- * response, if it didn't time out and there was no error.
+ * Then you can simply use the new defined `marker` in your test sources and the testing
+ * framework will automatically pick it up.
*
* @see Check existing tests under test/files/presentation
*
- * @author Iulian Dragos
+ * @author Iulian Dragos
+ * @author Mirco Dotta
*/
-abstract class InteractiveTest {
-
- val completionMarker = "/*!*/"
- val typedAtMarker = "/*?*/"
- val TIMEOUT = 10000 // timeout in milliseconds
-
- val settings = new Settings
- val reporter= new StoreReporter
-
- /** The root directory for this test suite, usually the test kind ("test/files/presentation"). */
- val outDir = Path(Option(System.getProperty("partest.cwd")).getOrElse("."))
-
- /** The base directory for this test, usually a subdirectory of "test/files/presentation/" */
- val baseDir = Option(System.getProperty("partest.testname")).map(outDir / _).getOrElse(Path("."))
-
- /** If there's a file ending in .opts, read it and parse it for cmd line arguments. */
- val argsString = {
- val optsFile = outDir / "%s.opts".format(System.getProperty("partest.testname"))
- val str = try File(optsFile).slurp() catch {
- case e: java.io.IOException => ""
- }
- str.lines.filter(!_.startsWith("#")).mkString(" ")
- }
-
- /** Prepare the settings object. Load the .opts file and adjust all paths from the
- * Unix-like syntax to the platform specific syntax. This is necessary so that a
- * single .opts file can be used on all platforms.
- *
- * @note Bootclasspath is treated specially. If there is a -bootclasspath option in
- * the file, the 'usejavacp' setting is set to false. This ensures that the
- * bootclasspath takes precedence over the scala-library used to run the current
- * test.
- */
- def prepareSettings() {
- import java.io.File._
- def adjustPaths(paths: settings.PathSetting*) {
- for (p <- paths if argsString.contains(p.name)) p.value = p.value.map {
- case '/' => separatorChar
- case ':' => pathSeparatorChar
- case c => c
- }
- }
-
- // need this so that the classpath comes from what partest
- // instead of scala.home
- settings.usejavacp.value = !argsString.contains("-bootclasspath")
-
- // pass any options coming from outside
- settings.processArgumentString(argsString) match {
- case (false, rest) =>
- println("error processing arguments (unprocessed: %s)".format(rest))
- case _ => ()
- }
- adjustPaths(settings.bootclasspath, settings.classpath, settings.javabootclasspath, settings.sourcepath)
- }
-
- protected def printClassPath() {
- println("\toutDir: %s".format(outDir.path))
- println("\tbaseDir: %s".format(baseDir.path))
- println("\targsString: %s".format(argsString))
- println("\tbootClassPath: %s".format(settings.bootclasspath.value))
- println("\tverbose: %b".format(settings.verbose.value))
- }
-
- lazy val compiler = {
- prepareSettings()
- new Global(settings, reporter)
- }
-
- def sources(filename: String*): Seq[SourceFile] =
- for (f <- filename) yield
- source(if (f.startsWith("/")) Path(f) else baseDir / f)
-
- def source(file: Path) = new BatchSourceFile(AbstractFile.getFile(file.toFile))
- def source(filename: String): SourceFile = new BatchSourceFile(AbstractFile.getFile(filename))
-
- def pos(file: SourceFile, line: Int, col: Int): Position =
- file.position(line, col)
-
- def filesInDir(dir: Path): Iterator[Path] = {
- dir.toDirectory.list.filter(_.isFile)
- }
-
- /** Where source files are placed. */
- val sourceDir = "src"
-
- /** All .scala files below "src" directory. */
- lazy val sourceFiles: Array[SourceFile] =
- filesInDir(baseDir / sourceDir).filter(_.extension == "scala").map(source).toArray
-
- /** All positions of the given string in all source files. */
- def allPositionsOf(sources: Seq[SourceFile] = sourceFiles, str: String): immutable.Map[SourceFile, Seq[Position]] = {
- (for (s <- sources; p <- positionsOf(s, str)) yield p).groupBy(_.source)
- }
-
- /** Return all positions of the given str in the given source file. */
- def positionsOf(source: SourceFile, str: String): Seq[Position] = {
- val buf = new mutable.ListBuffer[Position]
- var pos = source.content.indexOfSlice(str)
- while (pos >= 0) {
-// buf += compiler.rangePos(source, pos - 1, pos - 1, pos - 1)
- buf += source.position(pos - 1) // we need the position before the first character of this marker
- pos = source.content.indexOfSlice(str, pos + 1)
- }
- buf.toList
- }
-
- /** Should askAllSources wait for each ask to finish before issueing the next? */
- val synchronousRequests = true
-
- /** Perform an operation on all sources at all positions that match the given
- * marker string. For instance, askAllSources("/*!*/")(askTypeAt)(println) would
- * ask the tyep at all positions marked with /*!*/ and println the result.
+abstract class InteractiveTest
+ extends AskParse
+ with AskShutdown
+ with AskReload
+ with AskLoadedTyped
+ with PresentationCompilerInstance
+ with CoreTestDefs
+ with InteractiveTestSettings { self =>
+
+ protected val runRandomTests = false
+
+ /** Should askAllSources wait for each ask to finish before issuing the next? */
+ override protected val synchronousRequests = true
+
+ /** The core set of test actions that are executed during each test run are
+ * `CompletionAction`, `TypeAction` and `HyperlinkAction`.
+ * Override this member if you need to change the default set of executed test actions.
*/
- def askAllSourcesAsync[T](marker: String)(askAt: Position => Response[T])(f: (Position, T) => Unit) {
- val positions = allPositionsOf(str = marker).valuesIterator.toList.flatten
- val responses = for (pos <- positions) yield askAt(pos)
-
- for ((pos, r) <- positions zip responses) withResponse(pos, r)(f)
+ protected lazy val testActions: ListBuffer[PresentationCompilerTestDef] = {
+ ListBuffer(new TypeCompletionAction(compiler), new ScopeCompletionAction(compiler), new TypeAction(compiler), new HyperlinkAction(compiler))
}
- /** Synchronous version of askAllSources. Each position is treated in turn, waiting for the
- * response before going to the next one.
+ /** Add new presentation compiler actions to test. Presentation compiler's test
+ * need to extends trait `PresentationCompilerTestDef`.
*/
- def askAllSourcesSync[T](marker: String)(askAt: Position => Response[T])(f: (Position, T) => Unit) {
- val positions = allPositionsOf(str = marker).valuesIterator.toList.flatten
- for (pos <- positions) withResponse(pos, askAt(pos))(f)
+ protected def ++(tests: PresentationCompilerTestDef*) {
+ testActions ++= tests
}
- def askAllSources[T] = if (synchronousRequests) askAllSourcesSync[T] _ else askAllSourcesAsync[T] _
-
- /** Return the filename:line:col version of this position. */
- def showPos(pos: Position): String =
- "%s:%d:%d".format(pos.source.file.name, pos.line, pos.column)
-
- protected def withResponse[T](pos: Position, response: Response[T])(f: (Position, T) => Unit) {
- response.get(TIMEOUT) match {
- case Some(Left(t)) =>
- f(pos, t)
- case None =>
- println("TIMEOUT: " + showPos(pos))
- case Some(r) =>
- println("ERROR: " + r)
- }
- }
-
- /** Ask completion for all marked positions in all sources.
- * A completion position is marked with /*!*/.
- */
- def completionTests() {
- askAllSources(completionMarker) { pos =>
- println("askTypeCompletion at " + pos.source.file.name + ((pos.line, pos.column)))
- val r = new Response[List[compiler.Member]]
- compiler.askTypeCompletion(pos, r)
- r
- } { (pos, members) =>
- println("\n" + "=" * 80)
- println("[response] aksTypeCompletion at " + (pos.line, pos.column))
- // we skip getClass because it changed signature between 1.5 and 1.6, so there is no
- // universal check file that we can provide for this to work
- println("retreived %d members".format(members.size))
- compiler ask { () =>
- println(members.sortBy(_.sym.name.toString).filterNot(_.sym.name.toString == "getClass").mkString("\n"))
- }
- }
+ /** Test's entry point */
+ def main(args: Array[String]) {
+ try execute()
+ finally shutdown()
}
- /** Ask for typedAt for all marker positions in all sources.
- */
- def typeAtTests() {
- askAllSources(typedAtMarker) { pos =>
- println("askTypeAt at " + pos.source.file.name + ((pos.line, pos.column)))
- val r = new Response[compiler.Tree]
- compiler.askTypeAt(pos, r)
- r
- } { (pos, tree) =>
- println("[response] askTypeAt at " + (pos.line, pos.column))
- compiler.ask(() => println(tree))
- }
+ protected def execute(): Unit = {
+ loadSources()
+ runDefaultTests()
}
- /** Reload the given source files and wait for them to be reloaded. */
- def reloadSources(sources: Seq[SourceFile] = sourceFiles) {
-// println("basedir: " + baseDir.path)
-// println("sourcedir: " + (baseDir / sourceDir).path)
- println("reload: " + sourceFiles.mkString("", ", ", ""))
- val reload = new Response[Unit]
- compiler.askReload(sourceFiles.toList, reload)
- reload.get
+ /** Load all sources before executing the test. */
+ protected def loadSources() {
+ // ask the presentation compiler to track all sources. We do
+ // not wait for the file to be entirely typed because we do want
+ // to exercise the presentation compiler on scoped type requests.
+ askReload(sourceFiles)
+ // make sure all sources are parsed before running the test. This
+ // is because test may depend on the sources having been parsed at
+ // least once
+ askParse(sourceFiles)
}
- def runTest(): Unit = {
- if (runRandomTests) randomTests(20, sourceFiles)
- completionTests()
- typeAtTests()
+ /** Run all defined `PresentationCompilerTestDef` */
+ protected def runDefaultTests() {
+ //TODO: integrate random tests!, i.e.: if (runRandomTests) randomTests(20, sourceFiles)
+ testActions.foreach(_.runTest())
}
/** Perform n random tests with random changes. */
- def randomTests(n: Int, files: Array[SourceFile]) {
- val tester = new Tester(n, files, settings)
+ private def randomTests(n: Int, files: Array[SourceFile]) {
+ val tester = new Tester(n, files, settings) {
+ override val compiler = self.compiler
+ override val reporter = new reporters.StoreReporter
+ }
tester.run()
}
- val runRandomTests = false
+ /** shutdown the presentation compiler. */
+ protected def shutdown() {
+ askShutdown()
- def main(args: Array[String]) {
- reloadSources()
- runTest
- compiler.askShutdown()
+ // this is actually needed to force exit on test completion.
+ // Note: May be a bug on either the testing framework or (less likely)
+ // the presentation compiler
+ sys.exit(0)
}
}
-
diff --git a/src/compiler/scala/tools/nsc/interactive/tests/InteractiveTestSettings.scala b/src/compiler/scala/tools/nsc/interactive/tests/InteractiveTestSettings.scala
new file mode 100644
index 0000000..4d85ab9
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/interactive/tests/InteractiveTestSettings.scala
@@ -0,0 +1,70 @@
+package scala.tools.nsc
+package interactive
+package tests
+
+import java.io.File.pathSeparatorChar
+import java.io.File.separatorChar
+import scala.tools.nsc.interactive.tests.core.PresentationCompilerInstance
+import scala.tools.nsc.io.{File,Path}
+import core.Reporter
+import core.TestSettings
+
+trait InteractiveTestSettings extends TestSettings with PresentationCompilerInstance {
+ /** Character delimiter for comments in .opts file */
+ private final val CommentStartDelimiter = "#"
+
+ private final val TestOptionsFileExtension = "flags"
+
+ /** Prepare the settings object. Load the .opts file and adjust all paths from the
+ * Unix-like syntax to the platform specific syntax. This is necessary so that a
+ * single .opts file can be used on all platforms.
+ *
+ * @note Bootclasspath is treated specially. If there is a -bootclasspath option in
+ * the file, the 'usejavacp' setting is set to false. This ensures that the
+ * bootclasspath takes precedence over the scala-library used to run the current
+ * test.
+ */
+ override protected def prepareSettings(settings: Settings) {
+ import java.io.File._
+ def adjustPaths(paths: settings.PathSetting*) {
+ for (p <- paths if argsString.contains(p.name)) p.value = p.value.map {
+ case '/' => separatorChar
+ case ':' => pathSeparatorChar
+ case c => c
+ }
+ }
+
+ // need this so that the classpath comes from what partest
+ // instead of scala.home
+ settings.usejavacp.value = !argsString.contains("-bootclasspath")
+
+ // pass any options coming from outside
+ settings.processArgumentString(argsString) match {
+ case (false, rest) =>
+ println("error processing arguments (unprocessed: %s)".format(rest))
+ case _ => ()
+ }
+
+ // Make the --sourcepath path provided in the .flags file (if any) relative to the test's base directory
+ if(settings.sourcepath.isSetByUser)
+ settings.sourcepath.value = (baseDir / Path(settings.sourcepath.value)).path
+
+ adjustPaths(settings.bootclasspath, settings.classpath, settings.javabootclasspath, settings.sourcepath)
+ }
+
+ /** If there's a file ending in .opts, read it and parse it for cmd line arguments. */
+ protected val argsString = {
+ val optsFile = outDir / "%s.%s".format(System.getProperty("partest.testname"), TestOptionsFileExtension)
+ val str = try File(optsFile).slurp() catch {
+ case e: java.io.IOException => ""
+ }
+ str.lines.filter(!_.startsWith(CommentStartDelimiter)).mkString(" ")
+ }
+
+ override protected def printClassPath(implicit reporter: Reporter) {
+ reporter.println("\toutDir: %s".format(outDir.path))
+ reporter.println("\tbaseDir: %s".format(baseDir.path))
+ reporter.println("\targsString: %s".format(argsString))
+ super.printClassPath(reporter)
+ }
+}
\ No newline at end of file
diff --git a/src/compiler/scala/tools/nsc/interactive/tests/Tester.scala b/src/compiler/scala/tools/nsc/interactive/tests/Tester.scala
index 034db21..26aabbd 100644
--- a/src/compiler/scala/tools/nsc/interactive/tests/Tester.scala
+++ b/src/compiler/scala/tools/nsc/interactive/tests/Tester.scala
@@ -1,15 +1,15 @@
/* NSC -- new Scala compiler
- * Copyright 2009-2011 Scala Solutions and LAMP/EPFL
+ * Copyright 2009-2013 Typesafe/Scala Solutions and LAMP/EPFL
* @author Martin Odersky
*/
package scala.tools.nsc
package interactive
package tests
-import util._
+import scala.reflect.internal.util._
import reporters._
import io.AbstractFile
-import collection.mutable.ArrayBuffer
+import scala.collection.mutable.ArrayBuffer
class Tester(ntests: Int, inputs: Array[SourceFile], settings: Settings) {
@@ -168,7 +168,7 @@ class Tester(ntests: Int, inputs: Array[SourceFile], settings: Settings) {
}
case class ErrorTrace(
- sfidx: Int, changes: Seq[Change], infos: collection.Set[reporter.Info], content: Array[Char]) {
+ sfidx: Int, changes: Seq[Change], infos: scala.collection.Set[reporter.Info], content: Array[Char]) {
override def toString =
"Sourcefile: "+inputs(sfidx)+
"\nChanges:\n "+changes.mkString("\n ")+
diff --git a/src/compiler/scala/tools/nsc/interactive/tests/core/AskCommand.scala b/src/compiler/scala/tools/nsc/interactive/tests/core/AskCommand.scala
new file mode 100644
index 0000000..d5da52b
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/interactive/tests/core/AskCommand.scala
@@ -0,0 +1,122 @@
+/* NSC -- new Scala compiler
+ * Copyright 2009-2013 Typesafe/Scala Solutions and LAMP/EPFL
+ * @author Martin Odersky
+ */
+package scala.tools.nsc
+package interactive
+package tests.core
+
+import scala.tools.nsc.interactive.Response
+import scala.reflect.internal.util.Position
+import scala.reflect.internal.util.SourceFile
+
+/**
+ * A trait for defining commands that can be queried to the
+ * presentation compiler.
+ * */
+trait AskCommand {
+
+ /** presentation compiler's instance. */
+ protected val compiler: Global
+
+ /**
+ * Presentation compiler's `askXXX` actions work by doing side-effects
+ * on a `Response` instance passed as an argument during the `askXXX`
+ * call.
+ * The defined method `ask` is meant to encapsulate this behavior.
+ * */
+ protected def ask[T](op: Response[T] => Unit): Response[T] = {
+ val r = new Response[T]
+ op(r)
+ r
+ }
+}
+
+/** Ask the presentation compiler to shut-down. */
+trait AskShutdown extends AskCommand {
+ def askShutdown() = compiler.askShutdown()
+}
+
+/** Ask the presentation compiler to parse a sequence of `sources` */
+trait AskParse extends AskCommand {
+ import compiler.Tree
+
+ /** `sources` need to be entirely parsed before running the test
+ * (else commands such as `AskTypeCompletionAt` may fail simply because
+ * the source's AST is not yet loaded).
+ */
+ def askParse(sources: Seq[SourceFile]) {
+ val responses = sources map (askParse(_))
+ responses.foreach(_.get) // force source files parsing
+ }
+
+ private def askParse(src: SourceFile, keepLoaded: Boolean = true): Response[Tree] = {
+ ask {
+ compiler.askParsedEntered(src, keepLoaded, _)
+ }
+ }
+}
+
+/** Ask the presentation compiler to reload a sequence of `sources` */
+trait AskReload extends AskCommand {
+
+ /** Reload the given source files and wait for them to be reloaded. */
+ protected def askReload(sources: Seq[SourceFile])(implicit reporter: Reporter): Response[Unit] = {
+ val sortedSources = (sources map (_.file.name)).sorted
+ reporter.println("reload: " + sortedSources.mkString(", "))
+
+ ask {
+ compiler.askReload(sources.toList, _)
+ }
+ }
+}
+
+/** Ask the presentation compiler for completion at a given position. */
+trait AskTypeCompletionAt extends AskCommand {
+ import compiler.Member
+
+ private[tests] def askTypeCompletionAt(pos: Position)(implicit reporter: Reporter): Response[List[Member]] = {
+ reporter.println("\naskTypeCompletion at " + pos.source.file.name + ((pos.line, pos.column)))
+
+ ask {
+ compiler.askTypeCompletion(pos, _)
+ }
+ }
+}
+
+/** Ask the presentation compiler for scope completion at a given position. */
+trait AskScopeCompletionAt extends AskCommand {
+ import compiler.Member
+
+ private[tests] def askScopeCompletionAt(pos: Position)(implicit reporter: Reporter): Response[List[Member]] = {
+ reporter.println("\naskScopeCompletion at " + pos.source.file.name + ((pos.line, pos.column)))
+
+ ask {
+ compiler.askScopeCompletion(pos, _)
+ }
+ }
+}
+
+/** Ask the presentation compiler for type info at a given position. */
+trait AskTypeAt extends AskCommand {
+ import compiler.Tree
+
+ private[tests] def askTypeAt(pos: Position)(implicit reporter: Reporter): Response[Tree] = {
+ reporter.println("\naskType at " + pos.source.file.name + ((pos.line, pos.column)))
+
+ ask {
+ compiler.askTypeAt(pos, _)
+ }
+ }
+}
+
+trait AskLoadedTyped extends AskCommand {
+ import compiler.Tree
+
+ protected def askLoadedTyped(source: SourceFile, keepLoaded: Boolean = false)(implicit reporter: Reporter): Response[Tree] = {
+ ask {
+ compiler.askLoadedTyped(source, keepLoaded, _)
+ }
+ }
+
+}
diff --git a/src/compiler/scala/tools/nsc/interactive/tests/core/CoreTestDefs.scala b/src/compiler/scala/tools/nsc/interactive/tests/core/CoreTestDefs.scala
new file mode 100644
index 0000000..214f7a4
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/interactive/tests/core/CoreTestDefs.scala
@@ -0,0 +1,133 @@
+package scala.tools.nsc
+package interactive
+package tests.core
+
+import scala.reflect.internal.util.Position
+import scala.tools.nsc.interactive.tests.core._
+
+/** Set of core test definitions that are executed for each test run. */
+private[tests] trait CoreTestDefs
+ extends PresentationCompilerRequestsWorkingMode {
+
+ import scala.tools.nsc.interactive.Global
+
+ /** Ask the presentation compiler for completion at all locations
+ * (in all sources) where the defined `marker` is found. */
+ class TypeCompletionAction(override val compiler: Global)
+ extends PresentationCompilerTestDef
+ with AskTypeCompletionAt {
+
+ def memberPrinter(member: compiler.Member): String =
+ "[accessible: %5s] ".format(member.accessible) + "`" + (member.sym.toString() + member.tpe.toString()).trim() + "`"
+
+ override def runTest() {
+ askAllSources(TypeCompletionMarker) { pos =>
+ askTypeCompletionAt(pos)
+ } { (pos, members) =>
+ withResponseDelimiter {
+ reporter.println("[response] askTypeCompletion at " + format(pos))
+ // we skip getClass because it changed signature between 1.5 and 1.6, so there is no
+ // universal check file that we can provide for this to work
+ reporter.println("retrieved %d members".format(members.size))
+ compiler ask { () =>
+ val filtered = members.filterNot(member => member.sym.name.toString == "getClass" || member.sym.isConstructor)
+ reporter.println(filtered.map(memberPrinter).sortBy(_.toString()).mkString("\n"))
+ }
+ }
+ }
+ }
+ }
+
+ /** Ask the presentation compiler for completion at all locations
+ * (in all sources) where the defined `marker` is found. */
+ class ScopeCompletionAction(override val compiler: Global)
+ extends PresentationCompilerTestDef
+ with AskScopeCompletionAt {
+
+ def memberPrinter(member: compiler.Member): String =
+ "[accessible: %5s] ".format(member.accessible) + "`" + (member.sym.toString() + member.tpe.toString()).trim() + "`"
+
+ override def runTest() {
+ askAllSources(ScopeCompletionMarker) { pos =>
+ askScopeCompletionAt(pos)
+ } { (pos, members) =>
+ withResponseDelimiter {
+ reporter.println("[response] askScopeCompletion at " + format(pos))
+ try {
+ // exclude members not from source (don't have position), for more focussed and self contained tests.
+ def eligible(sym: compiler.Symbol) = sym.pos != compiler.NoPosition
+ val filtered = members.filter(member => eligible(member.sym))
+ reporter.println("retrieved %d members".format(filtered.size))
+ compiler ask { () =>
+ reporter.println(filtered.map(memberPrinter).sortBy(_.toString()).mkString("\n"))
+ }
+ } catch {
+ case t: Throwable =>
+ t.printStackTrace()
+ }
+
+ }
+ }
+ }
+ }
+
+ /** Ask the presentation compiler for type info at all locations
+ * (in all sources) where the defined `marker` is found. */
+ class TypeAction(override val compiler: Global)
+ extends PresentationCompilerTestDef
+ with AskTypeAt {
+
+ override def runTest() {
+ askAllSources(TypeMarker) { pos =>
+ askTypeAt(pos)
+ } { (pos, tree) =>
+ withResponseDelimiter {
+ reporter.println("[response] askTypeAt at " + format(pos))
+ compiler.ask(() => reporter.println(tree))
+ }
+ }
+ }
+ }
+
+ /** Ask the presentation compiler for hyperlink at all locations
+ * (in all sources) where the defined `marker` is found. */
+ class HyperlinkAction(override val compiler: Global)
+ extends PresentationCompilerTestDef
+ with AskTypeAt
+ with AskTypeCompletionAt {
+
+ override def runTest() {
+ askAllSources(HyperlinkMarker) { pos =>
+ askTypeAt(pos)(NullReporter)
+ } { (pos, tree) =>
+ if(tree.symbol == compiler.NoSymbol) {
+ reporter.println("\nNo symbol is associated with tree: "+tree)
+ }
+ else {
+ reporter.println("\naskHyperlinkPos for `" + tree.symbol.name + "` at " + format(pos) + " " + pos.source.file.name)
+ val r = new Response[Position]
+ // `tree.symbol.sourceFile` was discovered to be null when testing using virtpatmat on the akka presentation test, where a position had shifted to point to `Int`
+ // askHyperlinkPos for `Int` at (73,19) pi.scala --> class Int in package scala has null sourceFile!
+ val treePath = if (tree.symbol.sourceFile ne null) tree.symbol.sourceFile.path else null
+ val treeName = if (tree.symbol.sourceFile ne null) tree.symbol.sourceFile.name else null
+ val sourceFile = sourceFiles.find(_.path == treePath) match {
+ case Some(source) =>
+ compiler.askLinkPos(tree.symbol, source, r)
+ r.get match {
+ case Left(pos) =>
+ val resolvedPos = if (tree.symbol.pos.isDefined) tree.symbol.pos else pos
+ withResponseDelimiter {
+ reporter.println("[response] found askHyperlinkPos for `" + tree.symbol.name + "` at " + format(resolvedPos) + " " + tree.symbol.sourceFile.name)
+ }
+ case Right(ex) =>
+ ex.printStackTrace()
+ }
+ case None =>
+ reporter.println("[error] could not locate sourcefile `" + treeName + "`." +
+ "Hint: Does the looked up definition come form a binary?")
+ }
+ }
+ }
+ }
+ }
+}
diff --git a/src/compiler/scala/tools/nsc/interactive/tests/core/PresentationCompilerInstance.scala b/src/compiler/scala/tools/nsc/interactive/tests/core/PresentationCompilerInstance.scala
new file mode 100644
index 0000000..f304eda
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/interactive/tests/core/PresentationCompilerInstance.scala
@@ -0,0 +1,35 @@
+package scala.tools.nsc
+package interactive
+package tests.core
+
+import reporters.{Reporter => CompilerReporter}
+import scala.reflect.internal.util.Position
+
+/** Trait encapsulating the creation of a presentation compiler's instance.*/
+private[tests] trait PresentationCompilerInstance extends TestSettings {
+ protected val settings = new Settings
+ protected val withDocComments = false
+
+ protected val compilerReporter: CompilerReporter = new InteractiveReporter {
+ override def compiler = PresentationCompilerInstance.this.compiler
+ }
+
+ protected lazy val compiler: Global = {
+ prepareSettings(settings)
+ new Global(settings, compilerReporter) {
+ override def forScaladoc = withDocComments
+ }
+ }
+
+ /**
+ * Called before instantiating the presentation compiler's instance.
+ * You should provide an implementation of this method if you need
+ * to customize the `settings` used to instantiate the presentation compiler.
+ * */
+ protected def prepareSettings(settings: Settings) {}
+
+ protected def printClassPath(implicit reporter: Reporter) {
+ reporter.println("\tbootClassPath: %s".format(settings.bootclasspath.value))
+ reporter.println("\tverbose: %b".format(settings.verbose.value))
+ }
+}
diff --git a/src/compiler/scala/tools/nsc/interactive/tests/core/PresentationCompilerRequestsWorkingMode.scala b/src/compiler/scala/tools/nsc/interactive/tests/core/PresentationCompilerRequestsWorkingMode.scala
new file mode 100644
index 0000000..b5ae5f2
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/interactive/tests/core/PresentationCompilerRequestsWorkingMode.scala
@@ -0,0 +1,62 @@
+package scala.tools.nsc
+package interactive
+package tests.core
+
+import scala.reflect.internal.util.Position
+import scala.reflect.internal.util.SourceFile
+
+trait PresentationCompilerRequestsWorkingMode extends TestResources {
+
+ protected def synchronousRequests: Boolean
+
+ protected def askAllSources[T] = if (synchronousRequests) askAllSourcesSync[T] _ else askAllSourcesAsync[T] _
+
+ /** Perform an operation on all sources at all positions that match the given
+ * `marker`. For instance, askAllSources(TypeMarker)(askTypeAt)(println) would
+ * ask the type at all positions marked with `TypeMarker.marker` and println the result.
+ */
+ private def askAllSourcesAsync[T](marker: TestMarker)(askAt: Position => Response[T])(f: (Position, T) => Unit) {
+ val positions = allPositionsOf(str = marker.marker)
+ val responses = for (pos <- positions) yield askAt(pos)
+
+ for ((pos, r) <- positions zip responses) withResponse(pos, r)(f)
+ }
+
+ /** Synchronous version of askAllSources. Each position is treated in turn, waiting for the
+ * response before going to the next one.
+ */
+ private def askAllSourcesSync[T](marker: TestMarker)(askAt: Position => Response[T])(f: (Position, T) => Unit) {
+ val positions = allPositionsOf(str = marker.marker)
+ for (pos <- positions) withResponse(pos, askAt(pos))(f)
+ }
+
+ /** All positions of the given string in all source files. */
+ private def allPositionsOf(srcs: Seq[SourceFile] = sourceFiles, str: String): Seq[Position] =
+ for (s <- srcs; p <- positionsOf(s, str)) yield p
+
+ /** Return all positions of the given str in the given source file. */
+ private def positionsOf(source: SourceFile, str: String): Seq[Position] = {
+ val buf = new scala.collection.mutable.ListBuffer[Position]
+ var pos = source.content.indexOfSlice(str)
+ while (pos >= 0) {
+ buf += source.position(pos - 1) // we need the position before the first character of this marker
+ pos = source.content.indexOfSlice(str, pos + 1)
+ }
+ buf.toList
+ }
+
+ private def withResponse[T](pos: Position, response: Response[T])(f: (Position, T) => Unit) {
+ /** Return the filename:line:col version of this position. */
+ def showPos(pos: Position): String =
+ "%s:%d:%d".format(pos.source.file.name, pos.line, pos.column)
+
+ response.get(TIMEOUT) match {
+ case Some(Left(t)) =>
+ f(pos, t)
+ case None =>
+ println("TIMEOUT: " + showPos(pos))
+ case Some(r) =>
+ println("ERROR: " + r)
+ }
+ }
+}
diff --git a/src/compiler/scala/tools/nsc/interactive/tests/core/PresentationCompilerTestDef.scala b/src/compiler/scala/tools/nsc/interactive/tests/core/PresentationCompilerTestDef.scala
new file mode 100644
index 0000000..9cf2aa4
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/interactive/tests/core/PresentationCompilerTestDef.scala
@@ -0,0 +1,19 @@
+package scala.tools.nsc.interactive.tests.core
+
+import scala.tools.nsc.interactive.Global
+import scala.reflect.internal.util.Position
+
+trait PresentationCompilerTestDef {
+
+ private[tests] def runTest(): Unit
+
+ protected def withResponseDelimiter(block: => Unit)(implicit reporter: Reporter) {
+ def printDelimiter() = reporter.println("=" * 80)
+ printDelimiter()
+ block
+ printDelimiter()
+ }
+
+ protected def format(pos: Position): String =
+ (if(pos.isDefined) "(%d,%d)".format(pos.line, pos.column) else "<no position>")
+}
\ No newline at end of file
diff --git a/src/compiler/scala/tools/nsc/interactive/tests/core/Reporter.scala b/src/compiler/scala/tools/nsc/interactive/tests/core/Reporter.scala
new file mode 100644
index 0000000..631504c
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/interactive/tests/core/Reporter.scala
@@ -0,0 +1,15 @@
+package scala.tools.nsc.interactive.tests.core
+
+private[tests] trait Reporter {
+ def println(msg: Any): Unit
+}
+
+/** Reporter that simply prints all messages in the standard output.*/
+private[tests] object ConsoleReporter extends Reporter {
+ def println(msg: Any) { Console.println(msg) }
+}
+
+/** Reporter that swallows all passed message. */
+private[tests] object NullReporter extends Reporter {
+ def println(msg: Any) {}
+}
\ No newline at end of file
diff --git a/src/compiler/scala/tools/nsc/interactive/tests/core/SourcesCollector.scala b/src/compiler/scala/tools/nsc/interactive/tests/core/SourcesCollector.scala
new file mode 100644
index 0000000..e80b741
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/interactive/tests/core/SourcesCollector.scala
@@ -0,0 +1,22 @@
+package scala.tools.nsc.interactive.tests.core
+
+import scala.reflect.internal.util.{SourceFile,BatchSourceFile}
+import scala.tools.nsc.io.{AbstractFile,Path}
+
+private[tests] object SourcesCollector {
+ import Path._
+ type SourceFilter = Path => Boolean
+
+ /**
+ * All files below `base` directory that pass the `filter`.
+ * With the default `filter` only .scala and .java files are collected.
+ * */
+ def apply(base: Path, filter: SourceFilter): Array[SourceFile] = {
+ assert(base.isDirectory)
+ base.walk.filter(filter).map(source).toList.toArray.sortBy(_.file.name)
+ }
+
+ private def source(file: Path): SourceFile = source(AbstractFile.getFile(file.toFile))
+ private def source(filename: String): SourceFile = source(AbstractFile.getFile(filename))
+ private def source(file: AbstractFile): SourceFile = new BatchSourceFile(file)
+}
\ No newline at end of file
diff --git a/src/compiler/scala/tools/nsc/interactive/tests/core/TestMarker.scala b/src/compiler/scala/tools/nsc/interactive/tests/core/TestMarker.scala
new file mode 100644
index 0000000..8698ada
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/interactive/tests/core/TestMarker.scala
@@ -0,0 +1,29 @@
+package scala.tools.nsc.interactive.tests.core
+
+case class DuplicateTestMarker(msg: String) extends Exception(msg)
+
+object TestMarker {
+ import scala.collection.mutable.Map
+ private val markers: Map[String, TestMarker] = Map.empty
+
+ private def checkForDuplicate(marker: TestMarker) {
+ markers.get(marker.marker) match {
+ case None => markers(marker.marker) = marker
+ case Some(otherMarker) =>
+ val msg = "Marker `%s` is already used by %s. Please choose a different marker for %s".format(marker.marker, marker, otherMarker)
+ throw new DuplicateTestMarker(msg)
+ }
+ }
+}
+
+abstract case class TestMarker(val marker: String) {
+ TestMarker.checkForDuplicate(this)
+}
+
+object TypeCompletionMarker extends TestMarker("/*!*/")
+
+object ScopeCompletionMarker extends TestMarker("/*_*/")
+
+object TypeMarker extends TestMarker("/*?*/")
+
+object HyperlinkMarker extends TestMarker("/*#*/")
diff --git a/src/compiler/scala/tools/nsc/interactive/tests/core/TestResources.scala b/src/compiler/scala/tools/nsc/interactive/tests/core/TestResources.scala
new file mode 100644
index 0000000..887c3cf
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/interactive/tests/core/TestResources.scala
@@ -0,0 +1,12 @@
+package scala.tools.nsc.interactive.tests.core
+
+import scala.tools.nsc.io.Path
+import scala.reflect.internal.util.SourceFile
+
+/** Resources used by the test. */
+private[tests] trait TestResources extends TestSettings {
+ /** collected source files that are to be used by the test runner */
+ protected lazy val sourceFiles: Array[SourceFile] = SourcesCollector(baseDir / sourceDir, isScalaOrJavaSource)
+
+ private def isScalaOrJavaSource(file: Path): Boolean = file.extension == "scala" | file.extension == "java"
+}
\ No newline at end of file
diff --git a/src/compiler/scala/tools/nsc/interactive/tests/core/TestSettings.scala b/src/compiler/scala/tools/nsc/interactive/tests/core/TestSettings.scala
new file mode 100644
index 0000000..6812041
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/interactive/tests/core/TestSettings.scala
@@ -0,0 +1,19 @@
+package scala.tools.nsc.interactive.tests.core
+
+import scala.tools.nsc.io.Path
+
+/** Common settings for the test. */
+private[tests] trait TestSettings {
+ protected final val TIMEOUT = 10000 // timeout in milliseconds
+
+ /** The root directory for this test suite, usually the test kind ("test/files/presentation"). */
+ protected val outDir = Path(Option(System.getProperty("partest.cwd")).getOrElse("."))
+
+ /** The base directory for this test, usually a subdirectory of "test/files/presentation/" */
+ protected val baseDir = Option(System.getProperty("partest.testname")).map(outDir / _).getOrElse(Path("."))
+
+ /** Where source files are placed. */
+ protected val sourceDir = "src"
+
+ protected implicit val reporter: Reporter = ConsoleReporter
+}
diff --git a/src/compiler/scala/tools/nsc/interpreter/AbstractFileClassLoader.scala b/src/compiler/scala/tools/nsc/interpreter/AbstractFileClassLoader.scala
index 85dcff2..59508fa 100644
--- a/src/compiler/scala/tools/nsc/interpreter/AbstractFileClassLoader.scala
+++ b/src/compiler/scala/tools/nsc/interpreter/AbstractFileClassLoader.scala
@@ -1,5 +1,5 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
+ * Copyright 2005-2013 LAMP/EPFL
*/
package scala.tools.nsc
@@ -7,14 +7,15 @@ package interpreter
import scala.tools.nsc.io.{ File, AbstractFile }
import util.ScalaClassLoader
-import java.net.URL
+import java.net.{ URL, URLConnection, URLStreamHandler }
+import scala.collection.{ mutable, immutable }
/**
* A class loader that loads files from a {@link scala.tools.nsc.io.AbstractFile}.
*
* @author Lex Spoon
*/
-class AbstractFileClassLoader(root: AbstractFile, parent: ClassLoader)
+class AbstractFileClassLoader(val root: AbstractFile, parent: ClassLoader)
extends ClassLoader(parent)
with ScalaClassLoader
{
@@ -38,22 +39,69 @@ class AbstractFileClassLoader(root: AbstractFile, parent: ClassLoader)
}
}
+ protected def dirNameToPath(name: String): String =
+ name.replace('.', '/')
+
+ protected def findAbstractDir(name: String): AbstractFile = {
+ var file: AbstractFile = root
+ val pathParts = dirNameToPath(name) split '/'
+
+ for (dirPart <- pathParts) {
+ file = file.lookupName(dirPart, true)
+ if (file == null)
+ return null
+ }
+
+ return file
+ }
+
+ // parent delegation in JCL uses getResource; so either add parent.getResAsStream
+ // or implement findResource, which we do here as a study in scarlet (my complexion
+ // after looking at CLs and URLs)
+ override def findResource(name: String): URL = findAbstractFile(name) match {
+ case null => null
+ case file => new URL(null, "repldir:" + file.path, new URLStreamHandler {
+ override def openConnection(url: URL): URLConnection = new URLConnection(url) {
+ override def connect() { }
+ override def getInputStream = file.input
+ }
+ })
+ }
+ // this inverts delegation order: super.getResAsStr calls parent.getRes if we fail
override def getResourceAsStream(name: String) = findAbstractFile(name) match {
case null => super.getResourceAsStream(name)
case file => file.input
}
+ // ScalaClassLoader.classBytes uses getResAsStream, so we'll try again before delegating
override def classBytes(name: String): Array[Byte] = findAbstractFile(name) match {
case null => super.classBytes(name)
case file => file.toByteArray
}
override def findClass(name: String): JClass = {
val bytes = classBytes(name)
- if (bytes.isEmpty) throw new ClassNotFoundException(name)
- else defineClass(name, bytes, 0, bytes.length)
- }
- // Don't know how to construct an URL for something which exists only in memory
- // override def getResource(name: String): URL = findAbstractFile(name) match {
- // case null => super.getResource(name)
- // case file => new URL(...)
- // }
+ if (bytes.length == 0)
+ throw new ClassNotFoundException(name)
+ else
+ defineClass(name, bytes, 0, bytes.length)
+ }
+
+ private val packages = mutable.Map[String, Package]()
+
+ override def definePackage(name: String, specTitle: String, specVersion: String, specVendor: String, implTitle: String, implVersion: String, implVendor: String, sealBase: URL): Package = {
+ throw new UnsupportedOperationException()
+ }
+
+ override def getPackage(name: String): Package = {
+ findAbstractDir(name) match {
+ case null => super.getPackage(name)
+ case file => packages.getOrElseUpdate(name, {
+ val ctor = classOf[Package].getDeclaredConstructor(classOf[String], classOf[String], classOf[String], classOf[String], classOf[String], classOf[String], classOf[String], classOf[URL], classOf[ClassLoader])
+ ctor.setAccessible(true)
+ ctor.newInstance(name, null, null, null, null, null, null, null, this)
+ })
+ }
+ }
+
+ override def getPackages(): Array[Package] =
+ root.iterator.filter(_.isDirectory).map(dir => getPackage(dir.name)).toArray
}
diff --git a/src/compiler/scala/tools/nsc/interpreter/AbstractOrMissingHandler.scala b/src/compiler/scala/tools/nsc/interpreter/AbstractOrMissingHandler.scala
index 2f47685..e66e4ef 100644
--- a/src/compiler/scala/tools/nsc/interpreter/AbstractOrMissingHandler.scala
+++ b/src/compiler/scala/tools/nsc/interpreter/AbstractOrMissingHandler.scala
@@ -1,5 +1,5 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
+ * Copyright 2005-2013 LAMP/EPFL
* @author Paul Phillips
*/
diff --git a/src/compiler/scala/tools/nsc/interpreter/ByteCode.scala b/src/compiler/scala/tools/nsc/interpreter/ByteCode.scala
index 1b2f644..40e9d3d 100644
--- a/src/compiler/scala/tools/nsc/interpreter/ByteCode.scala
+++ b/src/compiler/scala/tools/nsc/interpreter/ByteCode.scala
@@ -1,5 +1,5 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
+ * Copyright 2005-2013 LAMP/EPFL
* @author Paul Phillips
*/
@@ -9,15 +9,16 @@ package interpreter
import java.lang.reflect
import java.util.concurrent.ConcurrentHashMap
import util.ScalaClassLoader
-import ScalaClassLoader.getSystemLoader
+import ScalaClassLoader.appLoader
+import scala.reflect.NameTransformer._
object ByteCode {
/** Until I figure out why I can't get scalap onto the classpath such
* that the compiler will bootstrap, we have to use reflection.
*/
private lazy val DECODER: Option[AnyRef] =
- for (clazz <- getSystemLoader.tryToLoadClass[AnyRef]("scala.tools.scalap.Decode$")) yield
- clazz.getField("MODULE$").get()
+ for (clazz <- appLoader.tryToLoadClass[AnyRef]("scala.tools.scalap.Decode$")) yield
+ clazz.getField(MODULE_INSTANCE_NAME).get(null)
private def decoderMethod(name: String, args: JClass*): Option[reflect.Method] = {
for (decoder <- DECODER ; m <- Option(decoder.getClass.getMethod(name, args: _*))) yield m
diff --git a/src/compiler/scala/tools/nsc/interpreter/CodeHandlers.scala b/src/compiler/scala/tools/nsc/interpreter/CodeHandlers.scala
index 42a4789..1741a82 100644
--- a/src/compiler/scala/tools/nsc/interpreter/CodeHandlers.scala
+++ b/src/compiler/scala/tools/nsc/interpreter/CodeHandlers.scala
@@ -1,5 +1,5 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
+ * Copyright 2005-2013 LAMP/EPFL
* @author Paul Phillips
*/
@@ -20,24 +20,6 @@ trait CodeHandlers[T] {
// Expressions are composed of operators and operands.
def expr(code: String): T
- // A declaration introduces names and assigns them types.
- // It can form part of a class definition (§5.1) or of a refinement in a compound type (§3.2.7).
- // (Ed: aka abstract members.)
- //
- // ‘val’ ValDcl | ‘var’ VarDcl | ‘def’ FunDcl | ‘type’ {nl} TypeDcl
- def decl(code: String): T
-
- // A definition introduces names that denote terms or types.
- // It can form part of an object or class definition or it can be local to a block.
- // (Ed: aka concrete members.)
- //
- // ‘val’ PatDef | ‘var’ VarDef | ‘def’ FunDef | ‘type’ {nl} TypeDef |
- // [‘case’] ‘class’ ClassDef | [‘case’] ‘object’ ObjectDef | ‘trait’ TraitDef
- def defn(code: String): T
-
- // An import clause has the form import p.I where p is a stable identifier (§3.1) and I is an import expression.
- def impt(code: String): T
-
// Statements occur as parts of blocks and templates.
// A statement can be an import, a definition or an expression, or it can be empty.
// Statements used in the template of a class definition can also be declarations.
@@ -53,9 +35,6 @@ trait CodeHandlers[T] {
}
def expr(code: String) = try Some(self.expr(code)) catch handler
- def decl(code: String) = try Some(self.decl(code)) catch handler
- def defn(code: String) = try Some(self.defn(code)) catch handler
- def impt(code: String) = try Some(self.impt(code)) catch handler
def stmt(code: String) = try Some(self.stmt(code)) catch handler
def stmts(code: String) = try (self.stmts(code) map (x => Some(x))) catch handlerSeq
}
diff --git a/src/compiler/scala/tools/nsc/interpreter/CommandLine.scala b/src/compiler/scala/tools/nsc/interpreter/CommandLine.scala
index e96918c..8042f0a 100644
--- a/src/compiler/scala/tools/nsc/interpreter/CommandLine.scala
+++ b/src/compiler/scala/tools/nsc/interpreter/CommandLine.scala
@@ -1,5 +1,5 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
+ * Copyright 2005-2013 LAMP/EPFL
* @author Lex Spoon
*/
diff --git a/src/compiler/scala/tools/nsc/interpreter/Completion.scala b/src/compiler/scala/tools/nsc/interpreter/Completion.scala
index 86f48b9..1dfccbf 100644
--- a/src/compiler/scala/tools/nsc/interpreter/Completion.scala
+++ b/src/compiler/scala/tools/nsc/interpreter/Completion.scala
@@ -1,5 +1,5 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
+ * Copyright 2005-2013 LAMP/EPFL
* @author Paul Phillips
*/
@@ -14,13 +14,11 @@ import Completion._
trait Completion {
type ExecResult
def resetVerbosity(): Unit
- def execute(line: String): Option[ExecResult]
def completer(): ScalaCompleter
}
object NoCompletion extends Completion {
type ExecResult = Nothing
def resetVerbosity() = ()
- def execute(line: String) = None
def completer() = NullCompleter
}
@@ -44,8 +42,6 @@ object Completion {
&& !(code startsWith "./")
&& !(code startsWith "..")
)
- private val pathStarts = """/ \ ./ ../ ~/""" split ' ' toSet
- def looksLikePath(code: String) = (code != null) && (pathStarts exists (code startsWith _))
object Forwarder {
def apply(forwardTo: () => Option[CompletionAware]): CompletionAware = new CompletionAware {
def completions(verbosity: Int) = forwardTo() map (_ completions verbosity) getOrElse Nil
diff --git a/src/compiler/scala/tools/nsc/interpreter/CompletionAware.scala b/src/compiler/scala/tools/nsc/interpreter/CompletionAware.scala
index c33675a..ab96f41 100644
--- a/src/compiler/scala/tools/nsc/interpreter/CompletionAware.scala
+++ b/src/compiler/scala/tools/nsc/interpreter/CompletionAware.scala
@@ -1,5 +1,5 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
+ * Copyright 2005-2013 LAMP/EPFL
* @author Paul Phillips
*/
@@ -12,40 +12,15 @@ import scala.reflect.NameTransformer
* will supply their own candidates and resolve their own paths.
*/
trait CompletionAware {
- /** The delimiters which are meaningful when this CompletionAware
- * object is in control.
- */
- // TODO
- // def delimiters(): List[Char] = List('.')
-
/** The complete list of unqualified Strings to which this
* object will complete.
*/
def completions(verbosity: Int): List[String]
- /** Default filter to apply to completions.
- */
- def filterNotFunction(s: String): Boolean = false
-
- /** Default sort.
- */
- def sortFunction(s1: String, s2: String): Boolean = s1 < s2
-
- /** Default map.
- */
- def mapFunction(s: String) = NameTransformer decode s
-
/** The next completor in the chain.
*/
def follow(id: String): Option[CompletionAware] = None
- /** What to return if this completion is given as a command. It
- * returns None by default, which means to allow the repl to interpret
- * the line normally. Returning Some(_) means the line will never
- * reach the scala interpreter.
- */
- def execute(id: String): Option[Any] = None
-
/** A list of useful information regarding a specific uniquely
* identified completion. This is specifically written for the
* following situation, but should be useful elsewhere too:
@@ -75,45 +50,13 @@ trait CompletionAware {
else comps
else follow(parsed.bufferHead) map (_ completionsFor parsed.bufferTail) getOrElse Nil
- results filterNot filterNotFunction map mapFunction sortWith (sortFunction _)
- }
-
- /** TODO - unify this and completionsFor under a common traverser.
- */
- def executionFor(parsed: Parsed): Option[Any] = {
- import parsed._
-
- if (isUnqualified && !isLastDelimiter && (completions(verbosity) contains buffer)) execute(buffer)
- else if (!isQualified) None
- else follow(bufferHead) flatMap (_ executionFor bufferTail)
+ results.sorted
}
}
object CompletionAware {
val Empty = new CompletionAware { def completions(verbosity: Int) = Nil }
- /** Artificial object demonstrating completion */
- // lazy val replVars = CompletionAware(
- // Map[String, CompletionAware](
- // "ids" -> CompletionAware(() => unqualifiedIds, completionAware _),
- // "synthVars" -> CompletionAware(() => allBoundNames filter isSynthVarName map (_.toString)),
- // "types" -> CompletionAware(() => allSeenTypes map (_.toString)),
- // "implicits" -> CompletionAware(() => allImplicits map (_.toString))
- // )
- // )
-
- // class Forwarder(underlying: CompletionAware) extends CompletionAware {
- // override def completions() = underlying.completions()
- // override def filterNotFunction(s: String) = underlying.filterNotFunction(s)
- // override def sortFunction(s1: String, s2: String) = underlying.sortFunction(s1, s2)
- // override def mapFunction(s: String) = underlying.mapFunction(s)
- // override def follow(id: String) = underlying.follow(id)
- // override def execute(id: String) = underlying.execute(id)
- // override def completionsFor(parsed: Parsed) = underlying.completionsFor(parsed)
- // override def executionFor(parsed: Parsed) = underlying.executionFor(parsed)
- // }
- //
-
def unapply(that: Any): Option[CompletionAware] = that match {
case x: CompletionAware => Some((x))
case _ => None
@@ -134,7 +77,7 @@ object CompletionAware {
/** Convenience factories.
*/
def apply(terms: () => List[String]): CompletionAware = apply(terms, _ => None)
- def apply(map: collection.Map[String, CompletionAware]): CompletionAware =
+ def apply(map: scala.collection.Map[String, CompletionAware]): CompletionAware =
apply(() => map.keys.toList, map.get _)
}
diff --git a/src/compiler/scala/tools/nsc/interpreter/CompletionOutput.scala b/src/compiler/scala/tools/nsc/interpreter/CompletionOutput.scala
index 9c34565..d14b5c7 100644
--- a/src/compiler/scala/tools/nsc/interpreter/CompletionOutput.scala
+++ b/src/compiler/scala/tools/nsc/interpreter/CompletionOutput.scala
@@ -1,5 +1,5 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
+ * Copyright 2005-2013 LAMP/EPFL
* @author Paul Phillips
*/
@@ -15,7 +15,7 @@ trait CompletionOutput {
val global: Global
import global._
- import definitions.{ NothingClass, AnyClass, isTupleTypeOrSubtype, isFunctionType, isRepeatedParamType }
+ import definitions.{ isTupleType, isFunctionType, isRepeatedParamType }
/** Reducing fully qualified noise for some common packages.
*/
@@ -37,8 +37,8 @@ trait CompletionOutput {
val pkg = method.ownerChain find (_.isPackageClass) map (_.fullName) getOrElse ""
def relativize(str: String): String = quietString(str stripPrefix (pkg + "."))
- def relativize(tp: Type): String = relativize(tp.normalize.toString)
- def relativize(sym: Symbol): String = relativize(sym.info)
+ def relativize(tp: Type): String = relativize(tp.dealiasWiden.toString)
+ def relativize(sym: Symbol): String = relativize(sym.info)
def braceList(tparams: List[String]) = if (tparams.isEmpty) "" else (tparams map relativize).mkString("[", ", ", "]")
def parenList(params: List[Any]) = params.mkString("(", ", ", ")")
@@ -48,16 +48,16 @@ trait CompletionOutput {
def typeToString(tp: Type): String = relativize(
tp match {
- case x if isFunctionType(x) => functionString(x)
- case x if isTupleTypeOrSubtype(x) => tupleString(x)
- case x if isRepeatedParamType(x) => typeToString(x.typeArgs.head) + "*"
- case mt @ MethodType(_, _) => methodTypeToString(mt)
- case x => x.toString
+ case x if isFunctionType(x) => functionString(x)
+ case x if isTupleType(x) => tupleString(x)
+ case x if isRepeatedParamType(x) => typeToString(x.typeArgs.head) + "*"
+ case mt @ MethodType(_, _) => methodTypeToString(mt)
+ case x => x.toString
}
)
- def tupleString(tp: Type) = parenList(tp.normalize.typeArgs map relativize)
- def functionString(tp: Type) = tp.normalize.typeArgs match {
+ def tupleString(tp: Type) = parenList(tp.dealiasWiden.typeArgs map relativize)
+ def functionString(tp: Type) = tp.dealiasWiden.typeArgs match {
case List(t, r) => t + " => " + r
case xs => parenList(xs.init) + " => " + xs.last
}
@@ -65,7 +65,7 @@ trait CompletionOutput {
def tparamsString(tparams: List[Symbol]) = braceList(tparams map (_.defString))
def paramsString(params: List[Symbol]) = {
def paramNameString(sym: Symbol) = if (sym.isSynthetic) "" else sym.nameString + ": "
- def paramString(sym: Symbol) = paramNameString(sym) + typeToString(sym.info.normalize)
+ def paramString(sym: Symbol) = paramNameString(sym) + typeToString(sym.info.dealiasWiden)
val isImplicit = params.nonEmpty && params.head.isImplicit
val strs = (params map paramString) match {
diff --git a/src/compiler/scala/tools/nsc/interpreter/ConsoleReaderHelper.scala b/src/compiler/scala/tools/nsc/interpreter/ConsoleReaderHelper.scala
index cfe932a..07e36f4 100644
--- a/src/compiler/scala/tools/nsc/interpreter/ConsoleReaderHelper.scala
+++ b/src/compiler/scala/tools/nsc/interpreter/ConsoleReaderHelper.scala
@@ -1,5 +1,5 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
+ * Copyright 2005-2013 LAMP/EPFL
* @author Paul Phillips
*/
diff --git a/src/compiler/scala/tools/nsc/interpreter/Delimited.scala b/src/compiler/scala/tools/nsc/interpreter/Delimited.scala
index 9d30c47..80debfa 100644
--- a/src/compiler/scala/tools/nsc/interpreter/Delimited.scala
+++ b/src/compiler/scala/tools/nsc/interpreter/Delimited.scala
@@ -1,5 +1,5 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
+ * Copyright 2005-2013 LAMP/EPFL
* @author Paul Phillips
*/
diff --git a/src/compiler/scala/tools/nsc/interpreter/Dossiers.scala b/src/compiler/scala/tools/nsc/interpreter/Dossiers.scala
deleted file mode 100644
index 2c55665..0000000
--- a/src/compiler/scala/tools/nsc/interpreter/Dossiers.scala
+++ /dev/null
@@ -1,53 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
- * @author Paul Phillips
- */
-
-package scala.tools.nsc
-package interpreter
-
-// Coming soon
-trait Dossiers {
- val intp: IMain
-
- import intp._
- import intp.global._
-
- trait Dossier {
- def symbol: Symbol
- def staticType: Type
-
- def id = name.toString
- def name = symbol.name
- def normalizedType = staticType.typeSymbolDirect.tpe.normalize
- def simpleNameOfType = staticType.typeSymbol.simpleName
- def staticTypeString = staticType.toString
-
- override def toString = "Dossier on %s:\n static type %s (normalized %s)".format(
- symbol, staticType, normalizedType
- )
- }
-
- class TypeDossier(val symbol: TypeSymbol, val staticType: Type) extends Dossier {
- override def toString = super.toString
- }
-
- class TermDossier(val symbol: TermSymbol, val staticType: Type, val value: AnyRef) extends Dossier {
- def runtimeClass: JClass = value.getClass
- def runtimeSymbol: Symbol = safeClass(runtimeClass.getName) getOrElse NoSymbol
- def runtimeType: Type = runtimeSymbol.tpe
- def runtimeTypeString = TypeStrings.fromClazz(runtimeClass)
-
- def runtimeTypedParam = NamedParamClass(id, runtimeTypeString, value)
- def staticTypedParam = NamedParamClass(id, staticTypeString, value)
-
- def isRuntimeTypeTighter = runtimeSymbol.ancestors contains normalizedType.typeSymbol
-
- override def toString = super.toString + (
- "\n runtime type %s/%s\n value %s".format(
- runtimeType, runtimeTypeString, value
- )
- )
- }
-}
-
diff --git a/src/compiler/scala/tools/nsc/interpreter/Eval.scala b/src/compiler/scala/tools/nsc/interpreter/Eval.scala
deleted file mode 100644
index 6a59cbb..0000000
--- a/src/compiler/scala/tools/nsc/interpreter/Eval.scala
+++ /dev/null
@@ -1,33 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
- * @author Paul Phillips
- */
-
-package scala.tools.nsc
-package interpreter
-
-trait Eval {
- /** Executes code looking for an implicit conversion from the type
- * of the given identifier to CompletionAware.
- */
- // def completionAwareImplicit[T](id: String) = {
- // val f1string = "%s => %s".format(typeForIdent(id).get, classOf[CompletionAware].getName)
- // val code = """{
- // | def f(implicit x: (%s) = null): %s = x
- // | val f1 = f
- // | if (f1 == null) None else Some(f1(%s))
- // |}""".stripMargin.format(f1string, f1string, id)
- //
- // evalExpr[Option[CompletionAware]](code)
- // }
-
- // Coming soon
- // implicit def string2liftedcode(s: String): LiftedCode = new LiftedCode(s)
- // case class LiftedCode(code: String) {
- // val lifted: String = {
- // beQuietDuring { interpret(code) }
- // eval2[String]("({ " + code + " }).toString")
- // }
- // def >> : String = lifted
- // }
-}
\ No newline at end of file
diff --git a/src/compiler/scala/tools/nsc/interpreter/ExprTyper.scala b/src/compiler/scala/tools/nsc/interpreter/ExprTyper.scala
index e47eefa..827ebe1 100644
--- a/src/compiler/scala/tools/nsc/interpreter/ExprTyper.scala
+++ b/src/compiler/scala/tools/nsc/interpreter/ExprTyper.scala
@@ -1,47 +1,43 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
+ * Copyright 2005-2013 LAMP/EPFL
* @author Paul Phillips
*/
package scala.tools.nsc
package interpreter
-import util.BatchSourceFile
-import ast.parser.Tokens.EOF
+import scala.reflect.internal.util.BatchSourceFile
+import scala.tools.nsc.ast.parser.Tokens.EOF
trait ExprTyper {
val repl: IMain
+
import repl._
- import global.{ reporter => _, _ }
- import syntaxAnalyzer.UnitParser
+ import global.{ reporter => _, Import => _, _ }
+ import definitions._
+ import syntaxAnalyzer.{ UnitParser, UnitScanner, token2name }
import naming.freshInternalVarName
object codeParser extends { val global: repl.global.type = repl.global } with CodeHandlers[Tree] {
def applyRule[T](code: String, rule: UnitParser => T): T = {
reporter.reset()
- val unit = new CompilationUnit(new BatchSourceFile("<console>", code))
- val scanner = new UnitParser(unit)
+ val scanner = newUnitParser(code)
val result = rule(scanner)
+
if (!reporter.hasErrors)
scanner.accept(EOF)
result
}
- def decl(code: String) = CodeHandlers.fail("todo")
- def defn(code: String) = CodeHandlers.fail("todo")
+ def defns(code: String) = stmts(code) collect { case x: DefTree => x }
def expr(code: String) = applyRule(code, _.expr())
- def impt(code: String) = applyRule(code, _.importExpr())
- def impts(code: String) = applyRule(code, _.importClause())
- def stmts(code: String) = applyRule(code, _.templateStatSeq(false)._2)
- def stmt(code: String) = stmts(code) match {
- case List(t) => t
- case xs => CodeHandlers.fail("Not a single statement: " + xs.mkString(", "))
- }
+ def stmts(code: String) = applyRule(code, _.templateStats())
+ def stmt(code: String) = stmts(code).last // guaranteed nonempty
}
/** Parse a line into a sequence of trees. Returns None if the input is incomplete. */
- def parse(line: String): Option[List[Tree]] = {
+ def parse(line: String): Option[List[Tree]] = debugging(s"""parse("$line")""") {
var isIncomplete = false
reporter.withIncompleteHandler((_, _) => isIncomplete = true) {
val trees = codeParser.stmts(line)
@@ -50,76 +46,62 @@ trait ExprTyper {
else Some(trees)
}
}
+ // def parsesAsExpr(line: String) = {
+ // import codeParser._
+ // (opt expr line).isDefined
+ // }
- // TODO: integrate these into a CodeHandler[Type].
-
- // XXX literals.
- // 1) Identifiers defined in the repl.
- // 2) A path loadable via getModule.
- // 3) Try interpreting it as an expression.
- private var typeOfExpressionDepth = 0
- def typeOfExpression(expr: String, silent: Boolean = true): Option[Type] = {
- repltrace("typeOfExpression(" + expr + ")")
- if (typeOfExpressionDepth > 2) {
- repldbg("Terminating typeOfExpression recursion for expression: " + expr)
- return None
- }
+ def symbolOfLine(code: String): Symbol = {
+ def asExpr(): Symbol = {
+ val name = freshInternalVarName()
+ // Typing it with a lazy val would give us the right type, but runs
+ // into compiler bugs with things like existentials, so we compile it
+ // behind a def and strip the NullaryMethodType which wraps the expr.
+ val line = "def " + name + " = " + code
- def asQualifiedImport = {
- val name = expr.takeWhile(_ != '.')
- importedTermNamed(name) flatMap { sym =>
- typeOfExpression(sym.fullName + expr.drop(name.length), true)
+ interpretSynthetic(line) match {
+ case IR.Success =>
+ val sym0 = symbolOfTerm(name)
+ // drop NullaryMethodType
+ sym0.cloneSymbol setInfo afterTyper(sym0.info.finalResultType)
+ case _ => NoSymbol
}
}
- def asModule = safeModule(expr) map (_.tpe)
- def asExpr = {
- val lhs = freshInternalVarName()
- val line = "lazy val " + lhs + " =\n" + expr
+ def asDefn(): Symbol = {
+ val old = repl.definedSymbolList.toSet
- interpret(line, true) match {
- case IR.Success => typeOfExpression(lhs, true)
- case _ => None
+ interpretSynthetic(code) match {
+ case IR.Success =>
+ repl.definedSymbolList filterNot old match {
+ case Nil => NoSymbol
+ case sym :: Nil => sym
+ case syms => NoSymbol.newOverloaded(NoPrefix, syms)
+ }
+ case _ => NoSymbol
}
}
- def evaluate() = {
- typeOfExpressionDepth += 1
- try typeOfTerm(expr) orElse asModule orElse asExpr orElse asQualifiedImport
- finally typeOfExpressionDepth -= 1
+ def asError(): Symbol = {
+ interpretSynthetic(code)
+ NoSymbol
}
+ beSilentDuring(asExpr()) orElse beSilentDuring(asDefn()) orElse asError()
+ }
+ private var typeOfExpressionDepth = 0
+ def typeOfExpression(expr: String, silent: Boolean = true): Type = {
+ if (typeOfExpressionDepth > 2) {
+ repldbg("Terminating typeOfExpression recursion for expression: " + expr)
+ return NoType
+ }
+ typeOfExpressionDepth += 1
// Don't presently have a good way to suppress undesirable success output
// while letting errors through, so it is first trying it silently: if there
// is an error, and errors are desired, then it re-evaluates non-silently
// to induce the error message.
- beSilentDuring(evaluate()) orElse beSilentDuring(typeOfDeclaration(expr)) orElse {
- if (!silent)
- evaluate()
-
- None
+ try beSilentDuring(symbolOfLine(expr).tpe) match {
+ case NoType if !silent => symbolOfLine(expr).tpe // generate error
+ case tpe => tpe
}
+ finally typeOfExpressionDepth -= 1
}
- // Since people will be giving us ":t def foo = 5" even though that is not an
- // expression, we have a means of typing declarations too.
- private def typeOfDeclaration(code: String): Option[Type] = {
- repltrace("typeOfDeclaration(" + code + ")")
- val obname = freshInternalVarName()
-
- interpret("object " + obname + " {\n" + code + "\n}\n", true) match {
- case IR.Success =>
- val sym = symbolOfTerm(obname)
- if (sym == NoSymbol) None else {
- // TODO: bitmap$n is not marked synthetic.
- val decls = sym.tpe.decls.toList filterNot (x => x.isConstructor || x.isPrivate || (x.name.toString contains "$"))
- repltrace("decls: " + decls)
- decls.lastOption map (decl => typeCleanser(sym, decl.name))
- }
- case _ =>
- None
- }
- }
- // def compileAndTypeExpr(expr: String): Option[Typer] = {
- // class TyperRun extends Run {
- // override def stopPhase(name: String) = name == "superaccessors"
- // }
- // }
}
diff --git a/src/compiler/scala/tools/nsc/interpreter/FileCompletion.scala b/src/compiler/scala/tools/nsc/interpreter/FileCompletion.scala
deleted file mode 100644
index e1eb938..0000000
--- a/src/compiler/scala/tools/nsc/interpreter/FileCompletion.scala
+++ /dev/null
@@ -1,56 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
- * @author Paul Phillips
- */
-
-package scala.tools.nsc
-package interpreter
-
-/** TODO
- * Spaces, dots, and other things in filenames are not correctly handled.
- * space-escaping, knowing when we're inside quotes, etc. would be nice.
- */
-
-import io.{ Directory, Path }
-
-/** This isn't 100% clean right now, but it works and is simple. Rather
- * than delegate to new objects on each '/' in the path, we treat the
- * buffer like a path and process it directly.
- */
-object FileCompletion {
- def executionFor(buffer: String): Option[Path] = {
- Some(Directory.Home match {
- case Some(d) if buffer startsWith "~" => d / buffer.tail
- case _ => Path(buffer)
- }) filter (_.exists)
- }
-
- private def fileCompletionForwarder(buffer: String, where: Directory): List[String] = {
- completionsFor(where.path + buffer) map (_ stripPrefix where.path) toList
- }
-
- private def homeCompletions(buffer: String): List[String] = {
- require(buffer startsWith "~/")
- val home = Directory.Home getOrElse (return Nil)
- fileCompletionForwarder(buffer.tail, home) map ("~" + _)
- }
- private def cwdCompletions(buffer: String): List[String] = {
- require(buffer startsWith "./")
- val cwd = Directory.Current getOrElse (return Nil)
- fileCompletionForwarder(buffer.tail, cwd) map ("." + _)
- }
-
- def completionsFor(buffer: String): List[String] =
- if (buffer startsWith "~/") homeCompletions(buffer)
- else if (buffer startsWith "./") cwdCompletions(buffer)
- else {
- val p = Path(buffer)
- val (dir, stub) =
- // don't want /foo/. expanding "."
- if (p.name == ".") (p.parent, ".")
- else if (p.isDirectory) (p.toDirectory, "")
- else (p.parent, p.name)
-
- dir.list filter (_.name startsWith stub) map (_.path) toList
- }
-}
\ No newline at end of file
diff --git a/src/compiler/scala/tools/nsc/interpreter/Formatting.scala b/src/compiler/scala/tools/nsc/interpreter/Formatting.scala
index 6339dca..43e653e 100644
--- a/src/compiler/scala/tools/nsc/interpreter/Formatting.scala
+++ b/src/compiler/scala/tools/nsc/interpreter/Formatting.scala
@@ -1,5 +1,5 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
+ * Copyright 2005-2013 LAMP/EPFL
* @author Paul Phillips
*/
diff --git a/src/compiler/scala/tools/nsc/interpreter/ILoop.scala b/src/compiler/scala/tools/nsc/interpreter/ILoop.scala
index d38f42f..ee45dc5 100644
--- a/src/compiler/scala/tools/nsc/interpreter/ILoop.scala
+++ b/src/compiler/scala/tools/nsc/interpreter/ILoop.scala
@@ -1,5 +1,5 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
+ * Copyright 2005-2013 LAMP/EPFL
* @author Alexander Spoon
*/
@@ -11,13 +11,21 @@ import java.io.{ BufferedReader, FileReader }
import java.util.concurrent.locks.ReentrantLock
import scala.sys.process.Process
import session._
-import scala.tools.util.{ Signallable, Javap }
+import scala.util.Properties.{ envOrNone, javaHome, jdkHome, javaVersion }
+import scala.tools.util.{ Javap }
import scala.annotation.tailrec
import scala.collection.mutable.ListBuffer
import scala.concurrent.ops
import util.{ ClassPath, Exceptional, stringFromWriter, stringFromStream }
import interpreter._
-import io.{ File, Sources }
+import io.{ File, Directory, Path }
+import scala.reflect.NameTransformer._
+import util.ScalaClassLoader
+import ScalaClassLoader._
+import scala.tools.util._
+import scala.language.{implicitConversions, existentials}
+import scala.reflect.{ClassTag, classTag}
+import scala.tools.reflect.StdRuntimeTags._
/** The Scala interactive shell. It provides a read-eval-print loop
* around the Interpreter class.
@@ -43,32 +51,68 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter)
var settings: Settings = _
var intp: IMain = _
- override def echoCommandMessage(msg: String): Unit =
- intp.reporter.printMessage(msg)
+ @deprecated("Use `intp` instead.", "2.9.0") def interpreter = intp
+ @deprecated("Use `intp` instead.", "2.9.0") def interpreter_= (i: Interpreter): Unit = intp = i
- def isAsync = !settings.Yreplsync.value
- lazy val power = Power(this)
+ /** Having inherited the difficult "var-ness" of the repl instance,
+ * I'm trying to work around it by moving operations into a class from
+ * which it will appear a stable prefix.
+ */
+ private def onIntp[T](f: IMain => T): T = f(intp)
+
+ class IMainOps[T <: IMain](val intp: T) {
+ import intp._
+ import global._
+
+ def printAfterTyper(msg: => String) =
+ intp.reporter printUntruncatedMessage afterTyper(msg)
+
+ /** Strip NullaryMethodType artifacts. */
+ private def replInfo(sym: Symbol) = {
+ sym.info match {
+ case NullaryMethodType(restpe) if sym.isAccessor => restpe
+ case info => info
+ }
+ }
+ def echoTypeStructure(sym: Symbol) =
+ printAfterTyper("" + deconstruct.show(replInfo(sym)))
+
+ def echoTypeSignature(sym: Symbol, verbose: Boolean) = {
+ if (verbose) ILoop.this.echo("// Type signature")
+ printAfterTyper("" + replInfo(sym))
+
+ if (verbose) {
+ ILoop.this.echo("\n// Internal Type structure")
+ echoTypeStructure(sym)
+ }
+ }
+ }
+ implicit def stabilizeIMain(intp: IMain) = new IMainOps[intp.type](intp)
- // TODO
- // object opt extends AestheticSettings
- //
- @deprecated("Use `intp` instead.", "2.9.0")
- def interpreter = intp
+ /** TODO -
+ * -n normalize
+ * -l label with case class parameter names
+ * -c complete - leave nothing out
+ */
+ private def typeCommandInternal(expr: String, verbose: Boolean): Result = {
+ onIntp { intp =>
+ val sym = intp.symbolOfLine(expr)
+ if (sym.exists) intp.echoTypeSignature(sym, verbose)
+ else ""
+ }
+ }
- @deprecated("Use `intp` instead.", "2.9.0")
- def interpreter_= (i: Interpreter): Unit = intp = i
+ override def echoCommandMessage(msg: String) {
+ intp.reporter printUntruncatedMessage msg
+ }
+ def isAsync = !settings.Yreplsync.value
+ lazy val power = new Power(intp, new StdReplVals(this))(tagOfStdReplVals, classTag[StdReplVals])
def history = in.history
/** The context class loader at the time this object was created */
protected val originalClassLoader = Thread.currentThread.getContextClassLoader
- // Install a signal handler so we can be prodded.
- private val signallable =
- if (isReplDebug && !settings.Yreplsync.value)
- Signallable("Dump repl state.")(dumpCommand())
- else null
-
// classpath entries added via :cp
var addedClasspath: String = ""
@@ -81,34 +125,31 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter)
/** Record a command for replay should the user request a :replay */
def addReplay(cmd: String) = replayCommandStack ::= cmd
+ def savingReplayStack[T](body: => T): T = {
+ val saved = replayCommandStack
+ try body
+ finally replayCommandStack = saved
+ }
+ def savingReader[T](body: => T): T = {
+ val saved = in
+ try body
+ finally in = saved
+ }
+
/** Close the interpreter and set the var to null. */
def closeInterpreter() {
if (intp ne null) {
- intp.close
+ intp.close()
intp = null
- Thread.currentThread.setContextClassLoader(originalClassLoader)
}
}
class ILoopInterpreter extends IMain(settings, out) {
+ outer =>
+
override lazy val formatting = new Formatting {
def prompt = ILoop.this.prompt
}
- override protected def createLineManager(): Line.Manager =
- if (ReplPropsKludge.noThreadCreation(settings)) null else new Line.Manager {
- override def onRunaway(line: Line[_]): Unit = {
- val template = """
- |// She's gone rogue, captain! Have to take her out!
- |// Calling Thread.stop on runaway %s with offending code:
- |// scala> %s""".stripMargin
-
- echo(template.format(line.thread, line.code))
- // XXX no way to suppress the deprecation warning
- line.thread.stop()
- in.redrawLine()
- }
- }
-
override protected def parentClassLoader =
settings.explicitParentLoader.getOrElse( classOf[ILoop].getClassLoader )
}
@@ -218,43 +259,21 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter)
cmd("imports", "[name name ...]", "show import history, identifying sources of names", importsCommand),
cmd("implicits", "[-v]", "show the implicits in scope", implicitsCommand),
cmd("javap", "<path|class>", "disassemble a file or class name", javapCommand),
- nullary("keybindings", "show how ctrl-[A-Z] and other keys are bound", keybindingsCommand),
cmd("load", "<path>", "load and interpret a Scala file", loadCommand),
nullary("paste", "enter paste mode: all input up to ctrl-D compiled together", pasteCommand),
nullary("power", "enable power user mode", powerCmd),
nullary("quit", "exit the interpreter", () => Result(false, None)),
nullary("replay", "reset execution and replay all previous commands", replay),
+ nullary("reset", "reset the repl to its initial state, forgetting all session entries", resetCommand),
shCommand,
nullary("silent", "disable/enable automatic printing of results", verbosity),
- cmd("type", "<expr>", "display the type of an expression without evaluating it", typeCommand)
+ cmd("type", "[-v] <expr>", "display the type of an expression without evaluating it", typeCommand),
+ nullary("warnings", "show the suppressed warnings from the most recent line which had any", warningsCommand)
)
/** Power user commands */
lazy val powerCommands: List[LoopCommand] = List(
- nullary("dump", "displays a view of the interpreter's internal state", dumpCommand),
- cmd("phase", "<phase>", "set the implicit phase for power commands", phaseCommand),
- cmd("wrap", "<method>", "name of method to wrap around each repl line", wrapCommand) withLongHelp ("""
- |:wrap
- |:wrap clear
- |:wrap <method>
- |
- |Installs a wrapper around each line entered into the repl.
- |Currently it must be the simple name of an existing method
- |with the specific signature shown in the following example.
- |
- |def timed[T](body: => T): T = {
- | val start = System.nanoTime
- | try body
- | finally println((System.nanoTime - start) + " nanos elapsed.")
- |}
- |:wrap timed
- |
- |If given no argument, :wrap names the wrapper installed.
- |An argument of clear will remove the wrapper if any is active.
- |Note that wrappers do not compose (a new one replaces the old
- |one) and also that the :phase command uses the same machinery,
- |so setting :wrap will clear any :phase setting.
- """.stripMargin.trim)
+ cmd("phase", "<phase>", "set the implicit phase for power commands", phaseCommand)
)
private def dumpCommand(): Result = {
@@ -262,6 +281,7 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter)
history.asStrings takeRight 30 foreach echo
in.redrawLine()
}
+ private def valsCommand(): Result = power.valsDescription
private val typeTransforms = List(
"scala.collection.immutable." -> "immutable.",
@@ -296,10 +316,9 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter)
}
}
- private def implicitsCommand(line: String): Result = {
- val intp = ILoop.this.intp
+ private def implicitsCommand(line: String): Result = onIntp { intp =>
import intp._
- import global.Symbol
+ import global._
def p(x: Any) = intp.reporter.printMessage("" + x)
@@ -323,7 +342,7 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter)
// This groups the members by where the symbol is defined
val byOwner = syms groupBy (_.owner)
- val sortedOwners = byOwner.toList sortBy { case (owner, _) => intp.afterTyper(source.info.baseClasses indexOf owner) }
+ val sortedOwners = byOwner.toList sortBy { case (owner, _) => afterTyper(source.info.baseClasses indexOf owner) }
sortedOwners foreach {
case (owner, members) =>
@@ -354,7 +373,47 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter)
}
}
- protected def newJavap() = new Javap(intp.classLoader, new IMain.ReplStrippingWriter(intp)) {
+ private[this] lazy val platformTools: Option[File] = {
+ val jarName = "tools.jar"
+ def jarPath(path: Path) = (path / "lib" / jarName).toFile
+ def jarAt(path: Path) = {
+ val f = jarPath(path)
+ if (f.isFile) Some(f) else None
+ }
+ val jdkDir = {
+ val d = Directory(jdkHome)
+ if (d.isDirectory) Some(d) else None
+ }
+ def deeply(dir: Directory) = dir.deepFiles find (_.name == jarName)
+
+ val home = envOrNone("JDK_HOME") orElse envOrNone("JAVA_HOME") map (p => Path(p))
+ val install = Some(Path(javaHome))
+
+ (home flatMap jarAt) orElse
+ (install flatMap jarAt) orElse
+ (install map (_.parent) flatMap jarAt) orElse
+ (jdkDir flatMap deeply)
+ }
+ private def addToolsJarToLoader() = (
+ if (Javap isAvailable intp.classLoader) {
+ repldbg(":javap available on interpreter class path.")
+ intp.classLoader
+ } else {
+ val cl = platformTools match {
+ case Some(tools) => ScalaClassLoader.fromURLs(Seq(tools.toURL), intp.classLoader)
+ case _ => intp.classLoader
+ }
+ if (Javap isAvailable cl) {
+ repldbg(":javap available on extended class path.")
+ cl
+ } else {
+ repldbg(s":javap unavailable: no tools.jar at $jdkHome")
+ intp.classLoader
+ }
+ }
+ )
+
+ protected def newJavap() = new JavapClass(addToolsJarToLoader(), new IMain.ReplStrippingWriter(intp)) {
override def tryClass(path: String): Array[Byte] = {
val hd :: rest = path split '.' toList;
// If there are dots in the name, the first segment is the
@@ -362,10 +421,10 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter)
if (rest.nonEmpty) {
intp optFlatName hd match {
case Some(flat) =>
- val clazz = flat :: rest mkString "$"
+ val clazz = flat :: rest mkString NAME_JOIN_STRING
val bytes = super.tryClass(clazz)
if (bytes.nonEmpty) bytes
- else super.tryClass(clazz + "$")
+ else super.tryClass(clazz + MODULE_SUFFIX_STRING)
case _ => super.tryClass(path)
}
}
@@ -374,7 +433,7 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter)
// we have to drop the $ to find object Foo, then tack it back onto
// the end of the flattened name.
def className = intp flatName path
- def moduleName = (intp flatName path.stripSuffix("$")) + "$"
+ def moduleName = (intp flatName path.stripSuffix(MODULE_SUFFIX_STRING)) + MODULE_SUFFIX_STRING
val bytes = super.tryClass(className)
if (bytes.nonEmpty) bytes
@@ -382,83 +441,70 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter)
}
}
}
- private lazy val javap =
- try newJavap()
- catch { case _: Exception => null }
+ private lazy val javap = substituteAndLog[Javap]("javap", NoJavap)(newJavap())
// Still todo: modules.
- private def typeCommand(line: String): Result = {
- if (line.trim == "") ":type <expression>"
- else intp.typeOfExpression(line, false) match {
- case Some(tp) => intp.afterTyper(tp.toString)
- case _ => "" // the error message was already printed
+ private def typeCommand(line0: String): Result = {
+ line0.trim match {
+ case "" => ":type [-v] <expression>"
+ case s if s startsWith "-v " => typeCommandInternal(s stripPrefix "-v " trim, true)
+ case s => typeCommandInternal(s, false)
}
}
+ private def warningsCommand(): Result = {
+ if (intp.lastWarnings.isEmpty)
+ "Can't find any cached warnings."
+ else
+ intp.lastWarnings foreach { case (pos, msg) => intp.reporter.warning(pos, msg) }
+ }
+
private def javapCommand(line: String): Result = {
if (javap == null)
- return ":javap unavailable on this platform."
- if (line == "")
- return ":javap [-lcsvp] [path1 path2 ...]"
-
- javap(words(line)) foreach { res =>
- if (res.isError) return "Failed: " + res.value
- else res.show()
- }
- }
- private def keybindingsCommand(): Result = {
- if (in.keyBindings.isEmpty) "Key bindings unavailable."
- else {
- echo("Reading jline properties for default key bindings.")
- echo("Accuracy not guaranteed: treat this as a guideline only.\n")
- in.keyBindings foreach (x => echo ("" + x))
- }
+ ":javap unavailable, no tools.jar at %s. Set JDK_HOME.".format(jdkHome)
+ else if (javaVersion startsWith "1.7")
+ ":javap not yet working with java 1.7"
+ else if (line == "")
+ ":javap [-lcsvp] [path1 path2 ...]"
+ else
+ javap(words(line)) foreach { res =>
+ if (res.isError) return "Failed: " + res.value
+ else res.show()
+ }
}
+
private def wrapCommand(line: String): Result = {
def failMsg = "Argument to :wrap must be the name of a method with signature [T](=> T): T"
- val intp = ILoop.this.intp
- val g: intp.global.type = intp.global
- import g._
-
- words(line) match {
- case Nil =>
- intp.executionWrapper match {
- case "" => "No execution wrapper is set."
- case s => "Current execution wrapper: " + s
- }
- case "clear" :: Nil =>
- intp.executionWrapper match {
- case "" => "No execution wrapper is set."
- case s => intp.clearExecutionWrapper() ; "Cleared execution wrapper."
- }
- case wrapper :: Nil =>
- intp.typeOfExpression(wrapper) match {
- case Some(PolyType(List(targ), MethodType(List(arg), restpe))) =>
- intp setExecutionWrapper intp.pathToTerm(wrapper)
- "Set wrapper to '" + wrapper + "'"
- case Some(x) =>
- failMsg + "\nFound: " + x
- case _ =>
- failMsg + "\nFound: <unknown>"
- }
- case _ => failMsg
+ onIntp { intp =>
+ import intp._
+ import global._
+
+ words(line) match {
+ case Nil =>
+ intp.executionWrapper match {
+ case "" => "No execution wrapper is set."
+ case s => "Current execution wrapper: " + s
+ }
+ case "clear" :: Nil =>
+ intp.executionWrapper match {
+ case "" => "No execution wrapper is set."
+ case s => intp.clearExecutionWrapper() ; "Cleared execution wrapper."
+ }
+ case wrapper :: Nil =>
+ intp.typeOfExpression(wrapper) match {
+ case PolyType(List(targ), MethodType(List(arg), restpe)) =>
+ intp setExecutionWrapper intp.pathToTerm(wrapper)
+ "Set wrapper to '" + wrapper + "'"
+ case tp =>
+ failMsg + "\nFound: <unknown>"
+ }
+ case _ => failMsg
+ }
}
}
private def pathToPhaseWrapper = intp.pathToTerm("$r") + ".phased.atCurrent"
private def phaseCommand(name: String): Result = {
- // This line crashes us in TreeGen:
- //
- // if (intp.power.phased set name) "..."
- //
- // Exception in thread "main" java.lang.AssertionError: assertion failed: ._7.type
- // at scala.Predef$.assert(Predef.scala:99)
- // at scala.tools.nsc.ast.TreeGen.mkAttributedQualifier(TreeGen.scala:69)
- // at scala.tools.nsc.ast.TreeGen.mkAttributedQualifier(TreeGen.scala:44)
- // at scala.tools.nsc.ast.TreeGen.mkAttributedRef(TreeGen.scala:101)
- // at scala.tools.nsc.ast.TreeGen.mkAttributedStableRef(TreeGen.scala:143)
- //
- // But it works like so, type annotated.
val phased: Phased = power.phased
import phased.NoPhaseName
@@ -497,16 +543,8 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter)
|[y/n]
""".trim.stripMargin
- private val crashRecovery: PartialFunction[Throwable, Unit] = {
+ private val crashRecovery: PartialFunction[Throwable, Boolean] = {
case ex: Throwable =>
- if (settings.YrichExes.value) {
- val sources = implicitly[Sources]
- echo("\n" + ex.getMessage)
- echo(
- if (isReplDebug) "[searching " + sources.path + " for exception contexts...]"
- else "[searching for exception contexts...]"
- )
- }
echo(intp.global.throwableAsString(ex))
ex match {
@@ -521,6 +559,7 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter)
if (fn()) replay()
else echo("\nAbandoning crashed session.")
}
+ true
}
/** The main read-eval-print loop for the repl. It calls
@@ -535,7 +574,7 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter)
// return false if repl should exit
def processLine(line: String): Boolean = {
if (isAsync) {
- awaitInitialized()
+ if (!awaitInitialized()) return false
runThunks()
}
if (line eq null) false // assume null means EOF
@@ -545,39 +584,56 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter)
case _ => true
}
}
-
- while (true) {
- try if (!processLine(readOneLine())) return
- catch crashRecovery
+ def innerLoop() {
+ if ( try processLine(readOneLine()) catch crashRecovery )
+ innerLoop()
}
+ innerLoop()
}
/** interpret all lines from a specified file */
def interpretAllFrom(file: File) {
- val oldIn = in
- val oldReplay = replayCommandStack
-
- try file applyReader { reader =>
- in = SimpleReader(reader, out, false)
- echo("Loading " + file + "...")
- loop()
- }
- finally {
- in = oldIn
- replayCommandStack = oldReplay
+ savingReader {
+ savingReplayStack {
+ file applyReader { reader =>
+ in = SimpleReader(reader, out, false)
+ echo("Loading " + file + "...")
+ loop()
+ }
+ }
}
}
- /** create a new interpreter and replay all commands so far */
+ /** create a new interpreter and replay the given commands */
def replay() {
- closeInterpreter()
- createInterpreter()
- for (cmd <- replayCommands) {
+ reset()
+ if (replayCommandStack.isEmpty)
+ echo("Nothing to replay.")
+ else for (cmd <- replayCommands) {
echo("Replaying: " + cmd) // flush because maybe cmd will have its own output
command(cmd)
echo("")
}
}
+ def resetCommand() {
+ echo("Resetting interpreter state.")
+ if (replayCommandStack.nonEmpty) {
+ echo("Forgetting this session history:\n")
+ replayCommands foreach echo
+ echo("")
+ replayCommandStack = Nil
+ }
+ if (intp.namedDefinedTerms.nonEmpty)
+ echo("Forgetting all expression results and named terms: " + intp.namedDefinedTerms.mkString(", "))
+ if (intp.definedTypes.nonEmpty)
+ echo("Forgetting defined types: " + intp.definedTypes.mkString(", "))
+
+ reset()
+ }
+ def reset() {
+ intp.reset()
+ unleashAndSetPhase()
+ }
/** fork a shell and run a command */
lazy val shCommand = new LoopCommand("sh", "run a shell command (result is implicitly => List[String])") {
@@ -624,10 +680,20 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter)
}
def enablePowerMode(isDuringInit: Boolean) = {
replProps.power setValue true
- power.unleash()
- intp.beSilentDuring(phaseCommand("typer"))
- if (isDuringInit) asyncMessage(power.banner)
- else echo(power.banner)
+ unleashAndSetPhase()
+ asyncEcho(isDuringInit, power.banner)
+ }
+ private def unleashAndSetPhase() {
+ if (isReplPower) {
+ power.unleash()
+ // Set the phase to "typer"
+ intp beSilentDuring phaseCommand("typer")
+ }
+ }
+
+ def asyncEcho(async: Boolean, msg: => String) {
+ if (async) asyncMessage(msg)
+ else echo(msg)
}
def verbosity() = {
@@ -731,28 +797,12 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter)
else if (Completion.looksLikeInvocation(code) && intp.mostRecentVar != "") {
interpretStartingWith(intp.mostRecentVar + code)
}
- else {
- def runCompletion = in.completion execute code map (intp bindValue _)
- /** Due to my accidentally letting file completion execution sneak ahead
- * of actual parsing this now operates in such a way that the scala
- * interpretation always wins. However to avoid losing useful file
- * completion I let it fail and then check the others. So if you
- * type /tmp it will echo a failure and then give you a Directory object.
- * It's not pretty: maybe I'll implement the silence bits I need to avoid
- * echoing the failure.
- */
- if (intp isParseable code) {
- val (code, result) = reallyInterpret
- if (power != null && code == IR.Error)
- runCompletion
-
- result
- }
- else runCompletion match {
- case Some(_) => None // completion hit: avoid the latent error
- case _ => reallyInterpret._2 // trigger the latent error
- }
+ else if (code.trim startsWith "//") {
+ // line comment, do nothing
+ None
}
+ else
+ reallyInterpret._2
}
// runs :load `file` on any files passed via -i
@@ -784,7 +834,7 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter)
SimpleReader()
}
}
- def process(settings: Settings): Boolean = {
+ def process(settings: Settings): Boolean = savingContextLoader {
this.settings = settings
createInterpreter()
@@ -798,6 +848,16 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter)
case x => x
}
}
+ // Bind intp somewhere out of the regular namespace where
+ // we can get at it in generated code.
+ addThunk(intp.quietBind(NamedParam[IMain]("$intp", intp)(tagOfIMain, classTag[IMain])))
+ addThunk({
+ import scala.tools.nsc.io._
+ import Properties.userHome
+ import scala.compat.Platform.EOL
+ val autorun = replProps.replAutorunCode.option flatMap (f => io.File(f).safeSlurp())
+ if (autorun.isDefined) intp.quietRun(autorun.get)
+ })
loadFiles(settings)
// it is broken on startup; go ahead and exit
@@ -828,7 +888,7 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter)
/** process command-line arguments and do as they request */
def process(args: Array[String]): Boolean = {
- val command = new CommandLine(args.toList, msg => echo("scala: " + msg))
+ val command = new CommandLine(args.toList, echo)
def neededHelp(): String =
(if (command.settings.help.value) command.usageMsg + "\n" else "") +
(if (command.settings.Xhelp.value) command.xusageMsg + "\n" else "")
@@ -841,13 +901,6 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter)
}
@deprecated("Use `process` instead", "2.9.0")
- def main(args: Array[String]): Unit = {
- if (isReplDebug)
- System.out.println(new java.util.Date)
-
- process(args)
- }
- @deprecated("Use `process` instead", "2.9.0")
def main(settings: Settings): Unit = process(settings)
}
@@ -910,32 +963,4 @@ object ILoop {
}
}
def run(lines: List[String]): String = run(lines map (_ + "\n") mkString)
-
- // provide the enclosing type T
- // in order to set up the interpreter's classpath and parent class loader properly
- def breakIf[T: Manifest](assertion: => Boolean, args: NamedParam*): Unit =
- if (assertion) break[T](args.toList)
-
- // start a repl, binding supplied args
- def break[T: Manifest](args: List[NamedParam]): Unit = {
- val msg = if (args.isEmpty) "" else " Binding " + args.size + " value%s.".format(
- if (args.size == 1) "" else "s"
- )
- echo("Debug repl starting." + msg)
- val repl = new ILoop {
- override def prompt = "\ndebug> "
- }
- repl.settings = new Settings(echo)
- repl.settings.embeddedDefaults[T]
- repl.createInterpreter()
- repl.in = new JLineReader(new JLineCompletion(repl))
-
- // rebind exit so people don't accidentally call sys.exit by way of predef
- repl.quietRun("""def exit = println("Type :quit to resume program execution.")""")
- args foreach (p => repl.bind(p.name, p.tpe, p.value))
- repl.loop()
-
- echo("\nDebug repl exiting.")
- repl.closeInterpreter()
- }
}
diff --git a/src/compiler/scala/tools/nsc/interpreter/ILoopInit.scala b/src/compiler/scala/tools/nsc/interpreter/ILoopInit.scala
index bd0f866..e3c0494 100644
--- a/src/compiler/scala/tools/nsc/interpreter/ILoopInit.scala
+++ b/src/compiler/scala/tools/nsc/interpreter/ILoopInit.scala
@@ -1,13 +1,14 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
+ * Copyright 2005-2013 LAMP/EPFL
* @author Paul Phillips
*/
package scala.tools.nsc
package interpreter
-import scala.tools.util.SignalManager
+import scala.reflect.internal.util.Position
import scala.util.control.Exception.ignoring
+import scala.tools.nsc.util.stackTraceString
/**
* Machinery for the asynchronous initialization of the repl.
@@ -32,34 +33,6 @@ trait ILoopInit {
echoAndRefresh(msg)
}
- /** Try to install sigint handler: ignore failure. Signal handler
- * will interrupt current line execution if any is in progress.
- *
- * Attempting to protect the repl from accidental exit, we only honor
- * a single ctrl-C if the current buffer is empty: otherwise we look
- * for a second one within a short time.
- */
- protected def installSigIntHandler() {
- def onExit() {
- Console.println("") // avoiding "shell prompt in middle of line" syndrome
- sys.exit(1)
- }
- ignoring(classOf[Exception]) {
- SignalManager("INT") = {
- if (intp == null || intp.lineManager == null)
- onExit()
- else if (intp.lineManager.running)
- intp.lineManager.cancel()
- else if (in.currentLine != "") {
- // non-empty buffer, so make them hit ctrl-C a second time
- SignalManager("INT") = onExit()
- io.timer(5)(installSigIntHandler()) // and restore original handler if they don't
- }
- else onExit()
- }
- }
- }
-
private val initLock = new java.util.concurrent.locks.ReentrantLock()
private val initCompilerCondition = initLock.newCondition() // signal the compiler is initialized
private val initLoopCondition = initLock.newCondition() // signal the whole repl is initialized
@@ -72,6 +45,7 @@ trait ILoopInit {
}
// a condition used to ensure serial access to the compiler.
@volatile private var initIsComplete = false
+ @volatile private var initError: String = null
private def elapsed() = "%.3f".format((System.nanoTime - initStart).toDouble / 1000000000L)
// the method to be called when the interpreter is initialized.
@@ -91,25 +65,45 @@ trait ILoopInit {
}
// called from main repl loop
- protected def awaitInitialized() {
+ protected def awaitInitialized(): Boolean = {
if (!initIsComplete)
withLock { while (!initIsComplete) initLoopCondition.await() }
+ if (initError != null) {
+ println("""
+ |Failed to initialize the REPL due to an unexpected error.
+ |This is a bug, please, report it along with the error diagnostics printed below.
+ |%s.""".stripMargin.format(initError)
+ )
+ false
+ } else true
}
+ // private def warningsThunks = List(
+ // () => intp.bind("lastWarnings", "" + typeTag[List[(Position, String)]], intp.lastWarnings _),
+ // )
+
protected def postInitThunks = List[Option[() => Unit]](
Some(intp.setContextClassLoader _),
- if (isReplPower) Some(() => enablePowerMode(true)) else None,
- // do this last to avoid annoying uninterruptible startups
- Some(installSigIntHandler _)
+ if (isReplPower) Some(() => enablePowerMode(true)) else None
).flatten
+ // ++ (
+ // warningsThunks
+ // )
// called once after init condition is signalled
protected def postInitialization() {
- postInitThunks foreach (f => addThunk(f()))
- runThunks()
- initIsComplete = true
+ try {
+ postInitThunks foreach (f => addThunk(f()))
+ runThunks()
+ } catch {
+ case ex: Throwable =>
+ initError = stackTraceString(ex)
+ throw ex
+ } finally {
+ initIsComplete = true
- if (isAsync) {
- asyncMessage("[info] total init time: " + elapsed() + " s.")
- withLock(initLoopCondition.signal())
+ if (isAsync) {
+ asyncMessage("[info] total init time: " + elapsed() + " s.")
+ withLock(initLoopCondition.signal())
+ }
}
}
// code to be executed only after the interpreter is initialized
diff --git a/src/compiler/scala/tools/nsc/interpreter/IMain.scala b/src/compiler/scala/tools/nsc/interpreter/IMain.scala
index af70e74..bed8570 100644
--- a/src/compiler/scala/tools/nsc/interpreter/IMain.scala
+++ b/src/compiler/scala/tools/nsc/interpreter/IMain.scala
@@ -1,5 +1,5 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
+ * Copyright 2005-2013 LAMP/EPFL
* @author Martin Odersky
*/
@@ -7,26 +7,40 @@ package scala.tools.nsc
package interpreter
import Predef.{ println => _, _ }
-import util.{ Set => _, _ }
-import scala.collection.{ mutable, immutable }
+import util.stringFromWriter
+import scala.reflect.internal.util._
+import java.net.URL
import scala.sys.BooleanProp
-import Exceptional.unwrap
-import ScalaClassLoader.URLClassLoader
-import symtab.Flags
import io.VirtualDirectory
import scala.tools.nsc.io.AbstractFile
import reporters._
import symtab.Flags
-import scala.reflect.generic.Names
+import scala.reflect.internal.Names
import scala.tools.util.PathResolver
-import scala.tools.nsc.util.{ ScalaClassLoader, Exceptional }
+import scala.tools.nsc.util.ScalaClassLoader
import ScalaClassLoader.URLClassLoader
-import Exceptional.unwrap
+import scala.tools.nsc.util.Exceptional.unwrap
import scala.collection.{ mutable, immutable }
-import scala.PartialFunction.{ cond, condOpt }
import scala.util.control.Exception.{ ultimately }
-import scala.reflect.NameTransformer
import IMain._
+import java.util.concurrent.Future
+import typechecker.Analyzer
+import scala.language.implicitConversions
+import scala.reflect.runtime.{ universe => ru }
+import scala.reflect.{ ClassTag, classTag }
+import scala.tools.reflect.StdRuntimeTags._
+
+/** directory to save .class files to */
+private class ReplVirtualDirectory(out: JPrintWriter) extends VirtualDirectory("(memory)", None) {
+ private def pp(root: AbstractFile, indentLevel: Int) {
+ val spaces = " " * indentLevel
+ out.println(spaces + root.name)
+ if (root.isDirectory)
+ root.toList sortBy (_.name) foreach (x => pp(x, indentLevel + 1))
+ }
+ // print the contents hierarchically
+ def show() = pp(this, 0)
+}
/** An interpreter for Scala code.
*
@@ -60,9 +74,54 @@ import IMain._
* @author Moez A. Abdel-Gawad
* @author Lex Spoon
*/
-class IMain(val settings: Settings, protected val out: JPrintWriter) extends Imports {
+class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends Imports {
imain =>
+ /** Leading with the eagerly evaluated.
+ */
+ val virtualDirectory: VirtualDirectory = new ReplVirtualDirectory(out) // "directory" for classfiles
+ private var currentSettings: Settings = initialSettings
+ private[nsc] var printResults = true // whether to print result lines
+ private[nsc] var totalSilence = false // whether to print anything
+ private var _initializeComplete = false // compiler is initialized
+ private var _isInitialized: Future[Boolean] = null // set up initialization future
+ private var bindExceptions = true // whether to bind the lastException variable
+ private var _executionWrapper = "" // code to be wrapped around all lines
+
+ /** We're going to go to some trouble to initialize the compiler asynchronously.
+ * It's critical that nothing call into it until it's been initialized or we will
+ * run into unrecoverable issues, but the perceived repl startup time goes
+ * through the roof if we wait for it. So we initialize it with a future and
+ * use a lazy val to ensure that any attempt to use the compiler object waits
+ * on the future.
+ */
+ private var _classLoader: AbstractFileClassLoader = null // active classloader
+ private val _compiler: Global = newCompiler(settings, reporter) // our private compiler
+
+ private val nextReqId = {
+ var counter = 0
+ () => { counter += 1 ; counter }
+ }
+
+ def compilerClasspath: Seq[URL] = (
+ if (isInitializeComplete) global.classPath.asURLs
+ else new PathResolver(settings).result.asURLs // the compiler's classpath
+ )
+ def settings = currentSettings
+ def mostRecentLine = prevRequestList match {
+ case Nil => ""
+ case req :: _ => req.originalLine
+ }
+ // Run the code body with the given boolean settings flipped to true.
+ def withoutWarnings[T](body: => T): T = beQuietDuring {
+ val saved = settings.nowarn.value
+ if (!saved)
+ settings.nowarn.value = true
+
+ try body
+ finally if (!saved) settings.nowarn.value = false
+ }
+
/** construct an interpreter that reports to Console */
def this(settings: Settings) = this(settings, new NewLinePrintWriter(new ConsoleWriter, true))
def this() = this(new Settings())
@@ -76,58 +135,35 @@ class IMain(val settings: Settings, protected val out: JPrintWriter) extends Imp
lazy val formatting: Formatting = new Formatting {
val prompt = Properties.shellPromptString
}
- lazy val reporter: ConsoleReporter = new ReplReporter(this)
+ lazy val reporter: ReplReporter = new ReplReporter(this)
import formatting._
import reporter.{ printMessage, withoutTruncating }
- private[nsc] var printResults: Boolean = true // whether to print result lines
- private[nsc] var totalSilence: Boolean = false // whether to print anything
-
- /** directory to save .class files to */
- val virtualDirectory = new VirtualDirectory("(memory)", None) {
- private def pp(root: io.AbstractFile, indentLevel: Int) {
- val spaces = " " * indentLevel
- out.println(spaces + root.name)
- if (root.isDirectory)
- root.toList sortBy (_.name) foreach (x => pp(x, indentLevel + 1))
- }
- // print the contents hierarchically
- def show() = pp(this, 0)
- }
-
// This exists mostly because using the reporter too early leads to deadlock.
private def echo(msg: String) { Console println msg }
-
- /** We're going to go to some trouble to initialize the compiler asynchronously.
- * It's critical that nothing call into it until it's been initialized or we will
- * run into unrecoverable issues, but the perceived repl startup time goes
- * through the roof if we wait for it. So we initialize it with a future and
- * use a lazy val to ensure that any attempt to use the compiler object waits
- * on the future.
- */
- private val _compiler: Global = newCompiler(settings, reporter)
- private var _initializeComplete = false
private def _initSources = List(new BatchSourceFile("<init>", "class $repl_$init { }"))
private def _initialize() = {
try {
+ // todo. if this crashes, REPL will hang
new _compiler.Run() compileSources _initSources
_initializeComplete = true
true
}
catch AbstractOrMissingHandler()
}
+ private def tquoted(s: String) = "\"\"\"" + s + "\"\"\""
- // set up initialization future
- private var _isInitialized: () => Boolean = null
// argument is a thunk to execute after init is done
- def initialize(postInitSignal: => Unit): Unit = synchronized {
- if (_isInitialized == null)
- _isInitialized = scala.concurrent.ops future {
- val result = _initialize()
- postInitSignal
- result
+ def initialize(postInitSignal: => Unit) {
+ synchronized {
+ if (_isInitialized == null) {
+ _isInitialized = io.spawn {
+ try _initialize()
+ finally postInitSignal
+ }
}
+ }
}
def initializeSynchronous(): Unit = {
if (!isInitializeComplete) {
@@ -147,22 +183,21 @@ class IMain(val settings: Settings, protected val out: JPrintWriter) extends Imp
initialize(())
}
// blocks until it is ; false means catastrophic failure
- if (_isInitialized()) _compiler
+ if (_isInitialized.get()) _compiler
else null
}
}
@deprecated("Use `global` for access to the compiler instance.", "2.9.0")
lazy val compiler: global.type = global
- // import global.{ treeWrapper => _, _ }
import global._
- import definitions.{ ScalaPackage, JavaLangPackage, PredefModule, RootClass }
- //
- // private implicit def privateTreeOps(t: Tree): List[Tree] = {
- // (new Traversable[Tree] {
- // def foreach[U](f: Tree => U): Unit = t foreach { x => f(x) ; () }
- // }).toList
- // }
+ import definitions.{ScalaPackage, JavaLangPackage, termMember, typeMember}
+ import rootMirror.{RootClass, getClassIfDefined, getModuleIfDefined, getRequiredModule, getRequiredClass}
+
+ implicit class ReplTypeOps(tp: Type) {
+ def orElse(other: => Type): Type = if (tp ne NoType) tp else other
+ def andAlso(fn: Type => Type): Type = if (tp eq NoType) tp else fn(tp)
+ }
// TODO: If we try to make naming a lazy val, we run into big time
// scalac unhappiness with what look like cycles. It has not been easy to
@@ -171,28 +206,25 @@ class IMain(val settings: Settings, protected val out: JPrintWriter) extends Imp
val global: imain.global.type = imain.global
} with Naming {
// make sure we don't overwrite their unwisely named res3 etc.
- override def freshUserVarName(): String = {
- val name = super.freshUserVarName()
- if (definedNameMap contains name) freshUserVarName()
+ def freshUserTermName(): TermName = {
+ val name = newTermName(freshUserVarName())
+ if (definedNameMap contains name) freshUserTermName()
else name
}
- def isInternalVarName(name: Name): Boolean = isInternalVarName("" + name)
+ def isUserTermName(name: Name) = isUserVarName("" + name)
+ def isInternalTermName(name: Name) = isInternalVarName("" + name)
}
import naming._
- // object dossiers extends {
- // val intp: imain.type = imain
- // } with Dossiers { }
- // import dossiers._
+ object deconstruct extends {
+ val global: imain.global.type = imain.global
+ } with StructuredTypeStrings
lazy val memberHandlers = new {
val intp: imain.type = imain
} with MemberHandlers
import memberHandlers._
- def atPickler[T](op: => T): T = atPhase(currentRun.picklerPhase)(op)
- def afterTyper[T](op: => T): T = atPhase(currentRun.typerPhase.next)(op)
-
/** Temporarily be quiet */
def beQuietDuring[T](body: => T): T = {
val saved = printResults
@@ -209,52 +241,36 @@ class IMain(val settings: Settings, protected val out: JPrintWriter) extends Imp
def quietRun[T](code: String) = beQuietDuring(interpret(code))
- private def logAndDiscard[T](label: String, alt: => T): PartialFunction[Throwable, T] = {
- case t => repldbg(label + ": " + t) ; alt
- }
-
- /** whether to bind the lastException variable */
- private var bindExceptions = true
/** takes AnyRef because it may be binding a Throwable or an Exceptional */
- private def withLastExceptionLock[T](body: => T): T = {
+ private def withLastExceptionLock[T](body: => T, alt: => T): T = {
assert(bindExceptions, "withLastExceptionLock called incorrectly.")
bindExceptions = false
try beQuietDuring(body)
- catch logAndDiscard("bindLastException", null.asInstanceOf[T])
+ catch logAndDiscard("withLastExceptionLock", alt)
finally bindExceptions = true
}
- /** A string representing code to be wrapped around all lines. */
- private var _executionWrapper: String = ""
def executionWrapper = _executionWrapper
def setExecutionWrapper(code: String) = _executionWrapper = code
def clearExecutionWrapper() = _executionWrapper = ""
- lazy val lineManager = createLineManager()
-
/** interpreter settings */
lazy val isettings = new ISettings(this)
- /** Create a line manager. Overridable. */
- protected def createLineManager(): Line.Manager =
- if (ReplPropsKludge.noThreadCreation(settings)) null else new Line.Manager
-
/** Instantiate a compiler. Overridable. */
- protected def newCompiler(settings: Settings, reporter: Reporter) = {
+ protected def newCompiler(settings: Settings, reporter: Reporter): ReplGlobal = {
settings.outputDirs setSingleOutput virtualDirectory
settings.exposeEmptyPackage.value = true
-
- new Global(settings, reporter)
+ new Global(settings, reporter) with ReplGlobal {
+ override def toString: String = "<global>"
+ }
}
/** Parent classloader. Overridable. */
protected def parentClassLoader: ClassLoader =
settings.explicitParentLoader.getOrElse( this.getClass.getClassLoader() )
- /** the compiler's classpath, as URL's */
- lazy val compilerClasspath = global.classPath.asURLs
-
/* A single class loader is used for all commands interpreted by this Interpreter.
It would also be possible to create a new class loader for each command
to interpret. The advantages of the current approach are:
@@ -268,38 +284,39 @@ class IMain(val settings: Settings, protected val out: JPrintWriter) extends Imp
shadow the old ones, and old code objects refer to the old
definitions.
*/
- private var _classLoader: AbstractFileClassLoader = null
def resetClassLoader() = {
repldbg("Setting new classloader: was " + _classLoader)
- _classLoader = makeClassLoader()
+ _classLoader = null
+ ensureClassLoader()
}
- def classLoader: AbstractFileClassLoader = {
+ final def ensureClassLoader() {
if (_classLoader == null)
- resetClassLoader()
-
+ _classLoader = makeClassLoader()
+ }
+ def classLoader: AbstractFileClassLoader = {
+ ensureClassLoader()
_classLoader
}
- private def makeClassLoader(): AbstractFileClassLoader = {
- val parent =
- if (parentClassLoader == null) ScalaClassLoader fromURLs compilerClasspath
- else new URLClassLoader(compilerClasspath, parentClassLoader)
-
- new AbstractFileClassLoader(virtualDirectory, parent) {
- /** Overridden here to try translating a simple name to the generated
- * class name if the original attempt fails. This method is used by
- * getResourceAsStream as well as findClass.
- */
- override protected def findAbstractFile(name: String): AbstractFile = {
- super.findAbstractFile(name) match {
- // deadlocks on startup if we try to translate names too early
- case null if isInitializeComplete =>
- generatedName(name) map (x => super.findAbstractFile(x)) orNull
- case file =>
- file
- }
+ private class TranslatingClassLoader(parent: ClassLoader) extends AbstractFileClassLoader(virtualDirectory, parent) {
+ /** Overridden here to try translating a simple name to the generated
+ * class name if the original attempt fails. This method is used by
+ * getResourceAsStream as well as findClass.
+ */
+ override protected def findAbstractFile(name: String): AbstractFile = {
+ super.findAbstractFile(name) match {
+ // deadlocks on startup if we try to translate names too early
+ case null if isInitializeComplete =>
+ generatedName(name) map (x => super.findAbstractFile(x)) orNull
+ case file =>
+ file
}
}
}
+ private def makeClassLoader(): AbstractFileClassLoader =
+ new TranslatingClassLoader(parentClassLoader match {
+ case null => ScalaClassLoader fromURLs compilerClasspath
+ case p => new URLClassLoader(compilerClasspath, p)
+ })
def getInterpreterClassLoader() = classLoader
@@ -313,13 +330,13 @@ class IMain(val settings: Settings, protected val out: JPrintWriter) extends Imp
* }}}
*/
def generatedName(simpleName: String): Option[String] = {
- if (simpleName endsWith "$") optFlatName(simpleName.init) map (_ + "$")
+ if (simpleName endsWith nme.MODULE_SUFFIX_STRING) optFlatName(simpleName.init) map (_ + nme.MODULE_SUFFIX_STRING)
else optFlatName(simpleName)
}
def flatName(id: String) = optFlatName(id) getOrElse id
def optFlatName(id: String) = requestForIdent(id) map (_ fullFlatName id)
- def allDefinedNames = definedNameMap.keys.toList sortBy (_.toString)
+ def allDefinedNames = definedNameMap.keys.toList.sorted
def pathToType(id: String): String = pathToName(newTypeName(id))
def pathToTerm(id: String): String = pathToName(newTermName(id))
def pathToName(name: Name): String = {
@@ -332,7 +349,7 @@ class IMain(val settings: Settings, protected val out: JPrintWriter) extends Imp
private def mostRecentlyHandledTree: Option[Tree] = {
prevRequests.reverse foreach { req =>
req.handlers.reverse foreach {
- case x: MemberDefHandler if x.definesValue && !isInternalVarName(x.name) => return Some(x.member)
+ case x: MemberDefHandler if x.definesValue && !isInternalTermName(x.name) => return Some(x.member)
case _ => ()
}
}
@@ -371,9 +388,9 @@ class IMain(val settings: Settings, protected val out: JPrintWriter) extends Imp
oldReq <- definedNameMap get name.companionName
newSym <- req.definedSymbols get name
oldSym <- oldReq.definedSymbols get name.companionName
+ if Seq(oldSym, newSym).permutations exists { case Seq(s1, s2) => s1.isClass && s2.isModule }
} {
- replwarn("warning: previously defined %s is not a companion to %s.".format(
- stripString("" + oldSym), stripString("" + newSym)))
+ afterTyper(replwarn(s"warning: previously defined $oldSym is not a companion to $newSym."))
replwarn("Companions must be defined together; you may wish to use :paste mode for this.")
}
@@ -405,14 +422,18 @@ class IMain(val settings: Settings, protected val out: JPrintWriter) extends Imp
}
}
+ def compileSourcesKeepingRun(sources: SourceFile*) = {
+ val run = new Run()
+ reporter.reset()
+ run compileSources sources.toList
+ (!reporter.hasErrors, run)
+ }
+
/** Compile an nsc SourceFile. Returns true if there are
* no compilation errors, or false otherwise.
*/
- def compileSources(sources: SourceFile*): Boolean = {
- reporter.reset()
- new Run() compileSources sources.toList
- !reporter.hasErrors
- }
+ def compileSources(sources: SourceFile*): Boolean =
+ compileSourcesKeepingRun(sources: _*)._1
/** Compile a string. Returns true if there are no
* compilation errors, or false otherwise.
@@ -422,7 +443,10 @@ class IMain(val settings: Settings, protected val out: JPrintWriter) extends Imp
/** Build a request from the user. `trees` is `line` after being parsed.
*/
- private def buildRequest(line: String, trees: List[Tree]): Request = new Request(line, trees)
+ private def buildRequest(line: String, trees: List[Tree]): Request = {
+ executingRequest = new Request(line, trees)
+ executingRequest
+ }
// rewriting "5 // foo" to "val x = { 5 // foo }" creates broken code because
// the close brace is commented out. Strip single-line comments.
@@ -457,11 +481,19 @@ class IMain(val settings: Settings, protected val out: JPrintWriter) extends Imp
case Some(Nil) => return Left(IR.Error) // parse error or empty input
case Some(trees) => trees
}
- // repltrace(
- // trees map { t =>
- // t map { t0 => t0.getClass + " at " + safePos(t0, -1) + "\n" }
- // } mkString
- // )
+ repltrace(
+ trees map (t => {
+ // [Eugene to Paul] previously it just said `t map ...`
+ // because there was an implicit conversion from Tree to a list of Trees
+ // however Martin and I have removed the conversion
+ // (it was conflicting with the new reflection API),
+ // so I had to rewrite this a bit
+ val subs = t collect { case sub => sub }
+ subs map (t0 =>
+ " " + safePos(t0, -1) + ": " + t0.shortClass + "\n"
+ ) mkString ""
+ }) mkString "\n"
+ )
// If the last tree is a bare expression, pinpoint where it begins using the
// AST node position and snap the line off there. Rewrite the code embodied
// by the last tree as a ValDef instead, so we can access the value.
@@ -519,17 +551,10 @@ class IMain(val settings: Settings, protected val out: JPrintWriter) extends Imp
Right(buildRequest(line, trees))
}
- def typeCleanser(sym: Symbol, memberName: Name): Type = {
- // the types are all =>T; remove the =>
- val tp1 = afterTyper(sym.info.nonPrivateDecl(memberName).tpe match {
- case NullaryMethodType(tp) => tp
- case tp => tp
- })
- // normalize non-public types so we don't see protected aliases like Self
- afterTyper(tp1 match {
- case TypeRef(_, sym, _) if !sym.isPublic => tp1.normalize
- case tp => tp
- })
+ // normalize non-public types so we don't see protected aliases like Self
+ def normalizeNonPublic(tp: Type) = tp match {
+ case TypeRef(_, sym, _) if sym.isAliasType && !sym.isPublic => tp.dealias
+ case _ => tp
}
/**
@@ -541,8 +566,10 @@ class IMain(val settings: Settings, protected val out: JPrintWriter) extends Imp
* e.g. that there were no parse errors.
*/
def interpret(line: String): IR.Result = interpret(line, false)
+ def interpretSynthetic(line: String): IR.Result = interpret(line, true)
def interpret(line: String, synthetic: Boolean): IR.Result = {
def loadAndRunReq(req: Request) = {
+ classLoader.setAsContext()
val (result, succeeded) = req.loadAndRun
/** To our displeasure, ConsoleReporter offers only printMessage,
@@ -586,7 +613,7 @@ class IMain(val settings: Settings, protected val out: JPrintWriter) extends Imp
* @param value the object value to bind to it
* @return an indication of whether the binding succeeded
*/
- def bind(name: String, boundType: String, value: Any): IR.Result = {
+ def bind(name: String, boundType: String, value: Any, modifiers: List[String] = Nil): IR.Result = {
val bindRep = new ReadEvalPrint()
val run = bindRep.compile("""
|object %s {
@@ -602,15 +629,23 @@ class IMain(val settings: Settings, protected val out: JPrintWriter) extends Imp
IR.Error
case Right(_) =>
- val line = "val %s = %s.value".format(name, bindRep.evalPath)
+ val line = "%sval %s = %s.value".format(modifiers map (_ + " ") mkString, name, bindRep.evalPath)
repldbg("Interpreting: " + line)
interpret(line)
}
}
+ def directBind(name: String, boundType: String, value: Any): IR.Result = {
+ val result = bind(name, boundType, value)
+ if (result == IR.Success)
+ directlyBoundNames += newTermName(name)
+ result
+ }
+ def directBind(p: NamedParam): IR.Result = directBind(p.name, p.tpe, p.value)
+ def directBind[T: ru.TypeTag : ClassTag](name: String, value: T): IR.Result = directBind((name, value))
def rebind(p: NamedParam): IR.Result = {
val name = p.name
- val oldType = typeOfTerm(name) getOrElse { return IR.Error }
+ val oldType = typeOfTerm(name) orElse { return IR.Error }
val newType = p.tpe
val tempName = freshInternalVarName()
@@ -622,18 +657,22 @@ class IMain(val settings: Settings, protected val out: JPrintWriter) extends Imp
if (ids.isEmpty) IR.Success
else interpret("import " + ids.mkString(", "))
- def quietBind(p: NamedParam): IR.Result = beQuietDuring(bind(p))
- def bind(p: NamedParam): IR.Result = bind(p.name, p.tpe, p.value)
- def bind[T: Manifest](name: String, value: T): IR.Result = bind((name, value))
- def bindValue(x: Any): IR.Result = bindValue(freshUserVarName(), x)
- def bindValue(name: String, x: Any): IR.Result = bind(name, TypeStrings.fromValue(x), x)
+ def quietBind(p: NamedParam): IR.Result = beQuietDuring(bind(p))
+ def bind(p: NamedParam): IR.Result = bind(p.name, p.tpe, p.value)
+ def bind[T: ru.TypeTag : ClassTag](name: String, value: T): IR.Result = bind((name, value))
+ def bindSyntheticValue(x: Any): IR.Result = bindValue(freshInternalVarName(), x)
+ def bindValue(x: Any): IR.Result = bindValue(freshUserVarName(), x)
+ def bindValue(name: String, x: Any): IR.Result = bind(name, TypeStrings.fromValue(x), x)
/** Reset this interpreter, forgetting all user-specified requests. */
def reset() {
- virtualDirectory.clear()
+ clearExecutionWrapper()
resetClassLoader()
resetAllCreators()
prevRequests.clear()
+ referencedNameMap.clear()
+ definedNameMap.clear()
+ virtualDirectory.clear()
}
/** This instance is no longer needed, so release any resources
@@ -654,39 +693,25 @@ class IMain(val settings: Settings, protected val out: JPrintWriter) extends Imp
class ReadEvalPrint(lineId: Int) {
def this() = this(freshLineId())
+ private var lastRun: Run = _
+ private var evalCaught: Option[Throwable] = None
+ private var conditionalWarnings: List[ConditionalWarning] = Nil
+
val packageName = sessionNames.line + lineId
val readName = sessionNames.read
val evalName = sessionNames.eval
val printName = sessionNames.print
+ val resultName = sessionNames.result
- class LineExceptional(ex: Throwable) extends Exceptional(ex) {
- private def showReplInternal = isettings.showInternalStackTraces
-
- override def spanFn(frame: JavaStackFrame) =
- if (showReplInternal) super.spanFn(frame)
- else !(frame.className startsWith evalPath)
-
- override def contextPrelude = super.contextPrelude + (
- if (showReplInternal) ""
- else "/* The repl internal portion of the stack trace is elided. */\n"
- )
- }
def bindError(t: Throwable) = {
if (!bindExceptions) // avoid looping if already binding
throw t
val unwrapped = unwrap(t)
- withLastExceptionLock {
- if (opt.richExes) {
- val ex = new LineExceptional(unwrapped)
- bind[Exceptional]("lastException", ex)
- ex.contextHead + "\n(access lastException for the full trace)"
- }
- else {
- bind[Throwable]("lastException", unwrapped)
- util.stackTraceString(unwrapped)
- }
- }
+ withLastExceptionLock[String]({
+ directBind[Throwable]("lastException", unwrapped)(tagOfThrowable, classTag[Throwable])
+ util.stackTraceString(unwrapped)
+ }, util.stackTraceString(unwrapped))
}
// TODO: split it out into a package object and a regular
@@ -700,8 +725,14 @@ class IMain(val settings: Settings, protected val out: JPrintWriter) extends Imp
def evalPath = pathTo(evalName)
def printPath = pathTo(printName)
- def call(name: String, args: Any*): AnyRef =
- evalMethod(name).invoke(evalClass, args.map(_.asInstanceOf[AnyRef]): _*)
+ def call(name: String, args: Any*): AnyRef = {
+ val m = evalMethod(name)
+ repldbg("Invoking: " + m)
+ if (args.nonEmpty)
+ repldbg(" with args: " + args.mkString(", "))
+
+ m.invoke(evalClass, args.map(_.asInstanceOf[AnyRef]): _*)
+ }
def callEither(name: String, args: Any*): Either[Throwable, AnyRef] =
try Right(call(name, args: _*))
@@ -718,51 +749,67 @@ class IMain(val settings: Settings, protected val out: JPrintWriter) extends Imp
private def load(path: String): Class[_] = {
try Class.forName(path, true, classLoader)
- catch { case ex => evalError(path, unwrap(ex)) }
+ catch { case ex: Throwable => evalError(path, unwrap(ex)) }
}
- var evalCaught: Option[Throwable] = None
lazy val evalClass = load(evalPath)
- lazy val evalValue = callEither(evalName) match {
+ lazy val evalValue = callEither(resultName) match {
case Left(ex) => evalCaught = Some(ex) ; None
case Right(result) => Some(result)
}
def compile(source: String): Boolean = compileAndSaveRun("<console>", source)
- def lineAfterTyper[T](op: => T): T = {
- assert(lastRun != null, "Internal error: trying to use atPhase, but Run is null." + this)
- atPhase(lastRun.typerPhase.next)(op)
- }
/** The innermost object inside the wrapper, found by
* following accessPath into the outer one.
*/
def resolvePathToSymbol(accessPath: String): Symbol = {
- val readRoot = definitions.getModule(readPath) // the outermost wrapper
- (accessPath split '.').foldLeft(readRoot) { (sym, name) =>
- if (name == "") sym else
- lineAfterTyper(sym.info member newTermName(name))
+ val readRoot = getRequiredModule(readPath) // the outermost wrapper
+ (accessPath split '.').foldLeft(readRoot: Symbol) {
+ case (sym, "") => sym
+ case (sym, name) => afterTyper(termMember(sym, name))
}
}
- private var lastRun: Run = _
+ /** We get a bunch of repeated warnings for reasons I haven't
+ * entirely figured out yet. For now, squash.
+ */
+ private def updateRecentWarnings(run: Run) {
+ def loop(xs: List[(Position, String)]): List[(Position, String)] = xs match {
+ case Nil => Nil
+ case ((pos, msg)) :: rest =>
+ val filtered = rest filter { case (pos0, msg0) =>
+ (msg != msg0) || (pos.lineContent.trim != pos0.lineContent.trim) || {
+ // same messages and same line content after whitespace removal
+ // but we want to let through multiple warnings on the same line
+ // from the same run. The untrimmed line will be the same since
+ // there's no whitespace indenting blowing it.
+ (pos.lineContent == pos0.lineContent)
+ }
+ }
+ ((pos, msg)) :: loop(filtered)
+ }
+ val warnings = loop(run.allConditionalWarnings flatMap (_.warnings))
+ if (warnings.nonEmpty)
+ mostRecentWarnings = warnings
+ }
private def evalMethod(name: String) = evalClass.getMethods filter (_.getName == name) match {
case Array(method) => method
case xs => sys.error("Internal error: eval object " + evalClass + ", " + xs.mkString("\n", "\n", ""))
}
private def compileAndSaveRun(label: String, code: String) = {
showCodeIfDebugging(code)
- reporter.reset()
- lastRun = new Run()
- lastRun.compileSources(List(new BatchSourceFile(label, packaged(code))))
- !reporter.hasErrors
+ val (success, run) = compileSourcesKeepingRun(new BatchSourceFile(label, packaged(code)))
+ updateRecentWarnings(run)
+ lastRun = run
+ success
}
}
/** One line of code submitted by the user for interpretation */
// private
class Request(val line: String, val trees: List[Tree]) {
+ val reqId = nextReqId()
val lineRep = new ReadEvalPrint()
- import lineRep.lineAfterTyper
private var _originalLine: String = null
def withOriginalLine(s: String): this.type = { _originalLine = s ; this }
@@ -770,6 +817,7 @@ class IMain(val settings: Settings, protected val out: JPrintWriter) extends Imp
/** handlers for each tree in this request */
val handlers: List[MemberHandler] = trees map (memberHandlers chooseHandler _)
+ def defHandlers = handlers collect { case x: MemberDefHandler => x }
/** all (public) names defined by these statements */
val definedNames = handlers flatMap (_.definedNames)
@@ -781,6 +829,10 @@ class IMain(val settings: Settings, protected val out: JPrintWriter) extends Imp
def termNames = handlers flatMap (_.definesTerm)
def typeNames = handlers flatMap (_.definesType)
def definedOrImported = handlers flatMap (_.definedOrImported)
+ def definedSymbolList = defHandlers flatMap (_.definedSymbols)
+
+ def definedTypeSymbol(name: String) = definedSymbols(newTypeName(name))
+ def definedTermSymbol(name: String) = definedSymbols(newTermName(name))
/** Code to import bound names from previous lines - accessPath is code to
* append to objectName to access anything bound by request.
@@ -797,7 +849,7 @@ class IMain(val settings: Settings, protected val out: JPrintWriter) extends Imp
* $line5.$iw$$iw$$iw$Bippy // fullFlatName
*/
def fullFlatName(name: String) =
- lineRep.readPath + accessPath.replace('.', '$') + "$" + name
+ lineRep.readPath + accessPath.replace('.', '$') + nme.NAME_JOIN_STRING + name
/** The unmangled symbol name, but supplemented with line info. */
def disambiguated(name: Name): String = name + " (in " + lineRep + ")"
@@ -810,10 +862,25 @@ class IMain(val settings: Settings, protected val out: JPrintWriter) extends Imp
/** generate the source code for the object that computes this request */
private object ObjectSourceCode extends CodeAssembler[MemberHandler] {
+ def path = pathToTerm("$intp")
+ def envLines = {
+ if (!isReplPower) Nil // power mode only for now
+ // $intp is not bound; punt, but include the line.
+ else if (path == "$intp") List(
+ "def $line = " + tquoted(originalLine),
+ "def $trees = Nil"
+ )
+ else List(
+ "def $line = " + tquoted(originalLine),
+ "def $req = %s.requestForReqId(%s).orNull".format(path, reqId),
+ "def $trees = if ($req eq null) Nil else $req.trees".format(lineRep.readName, path, reqId)
+ )
+ }
+
val preamble = """
|object %s {
- |%s%s
- """.stripMargin.format(lineRep.readName, importsPreamble, indentCode(toCompute))
+ |%s%s%s
+ """.stripMargin.format(lineRep.readName, envLines.map(" " + _ + ";\n").mkString, importsPreamble, indentCode(toCompute))
val postamble = importsTrailer + "\n}"
val generate = (m: MemberHandler) => m extraCodeToEvaluate Request.this
}
@@ -826,11 +893,7 @@ class IMain(val settings: Settings, protected val out: JPrintWriter) extends Imp
if (!handlers.last.definesValue) ""
else handlers.last.definesTerm match {
case Some(vname) if typeOf contains vname =>
- """
- |lazy val $result = {
- | %s
- | %s
- |}""".stripMargin.format(lineRep.printName, fullPath(vname))
+ "lazy val %s = %s".format(lineRep.resultName, fullPath(vname))
case _ => ""
}
// first line evaluates object to make sure constructor is run
@@ -875,59 +938,50 @@ class IMain(val settings: Settings, protected val out: JPrintWriter) extends Imp
typeOf
typesOfDefinedTerms
- // compile the result-extraction object
- beSilentDuring {
- lineRep compile ResultObjectSourceCode(handlers)
+ // Assign symbols to the original trees
+ // TODO - just use the new trees.
+ defHandlers foreach { dh =>
+ val name = dh.member.name
+ definedSymbols get name foreach { sym =>
+ dh.member setSymbol sym
+ repldbg("Set symbol of " + name + " to " + sym.defString)
+ }
}
+
+ // compile the result-extraction object
+ withoutWarnings(lineRep compile ResultObjectSourceCode(handlers))
}
}
lazy val resultSymbol = lineRep.resolvePathToSymbol(accessPath)
- def applyToResultMember[T](name: Name, f: Symbol => T) = lineAfterTyper(f(resultSymbol.info.nonPrivateDecl(name)))
+ def applyToResultMember[T](name: Name, f: Symbol => T) = afterTyper(f(resultSymbol.info.nonPrivateDecl(name)))
/* typeOf lookup with encoding */
def lookupTypeOf(name: Name) = typeOf.getOrElse(name, typeOf(global.encode(name.toString)))
def simpleNameOfType(name: TypeName) = (compilerTypeOf get name) map (_.typeSymbol.simpleName)
- private def typeMap[T](f: Type => T): Map[Name, T] =
- termNames ++ typeNames map (x => x -> f(typeCleanser(resultSymbol, x))) toMap
+ private def typeMap[T](f: Type => T) =
+ mapFrom[Name, Name, T](termNames ++ typeNames)(x => f(cleanMemberDecl(resultSymbol, x)))
/** Types of variables defined by this request. */
- lazy val compilerTypeOf = typeMap[Type](x => x)
+ lazy val compilerTypeOf = typeMap[Type](x => x) withDefaultValue NoType
/** String representations of same. */
lazy val typeOf = typeMap[String](tp => afterTyper(tp.toString))
// lazy val definedTypes: Map[Name, Type] = {
// typeNames map (x => x -> afterTyper(resultSymbol.info.nonPrivateDecl(x).tpe)) toMap
// }
- lazy val definedSymbols: Map[Name, Symbol] = (
+ lazy val definedSymbols = (
termNames.map(x => x -> applyToResultMember(x, x => x)) ++
- typeNames.map(x => x -> compilerTypeOf.get(x).map(_.typeSymbol).getOrElse(NoSymbol))
- ).toMap
+ typeNames.map(x => x -> compilerTypeOf(x).typeSymbolDirect)
+ ).toMap[Name, Symbol] withDefaultValue NoSymbol
- lazy val typesOfDefinedTerms: Map[Name, Type] =
- termNames map (x => x -> applyToResultMember(x, _.tpe)) toMap
+ lazy val typesOfDefinedTerms = mapFrom[Name, Name, Type](termNames)(x => applyToResultMember(x, _.tpe))
/** load and run the code using reflection */
def loadAndRun: (String, Boolean) = {
- if (lineManager == null) return {
- try { ("" + (lineRep call sessionNames.print), true) }
- catch { case ex => (lineRep.bindError(ex), false) }
- }
- import interpreter.Line._
-
- try {
- val execution = lineManager.set(originalLine)(lineRep call sessionNames.print)
- execution.await()
-
- execution.state match {
- case Done => ("" + execution.get(), true)
- case Threw => (lineRep.bindError(execution.caught()), false)
- case Cancelled => ("Execution interrupted by signal.\n", false)
- case Running => ("Execution still running! Seems impossible.", false)
- }
- }
- finally lineManager.clear()
+ try { ("" + (lineRep call sessionNames.print), true) }
+ catch { case ex: Throwable => (lineRep.bindError(ex), false) }
}
override def toString = "Request(line=%s, %s trees)".format(line, trees.size)
@@ -946,6 +1000,16 @@ class IMain(val settings: Settings, protected val out: JPrintWriter) extends Imp
case _ => naming.mostRecentVar
})
+ private var mostRecentWarnings: List[(global.Position, String)] = Nil
+ def lastWarnings = mostRecentWarnings
+
+ def treesForRequestId(id: Int): List[Tree] =
+ requestForReqId(id).toList flatMap (_.trees)
+
+ def requestForReqId(id: Int): Option[Request] =
+ if (executingRequest != null && executingRequest.reqId == id) Some(executingRequest)
+ else prevRequests find (_.reqId == id)
+
def requestForName(name: Name): Option[Request] = {
assert(definedNameMap != null, "definedNameMap is null")
definedNameMap get name
@@ -957,113 +1021,118 @@ class IMain(val settings: Settings, protected val out: JPrintWriter) extends Imp
def requestHistoryForName(name: Name): List[Request] =
prevRequests.toList.reverse filter (_.definedNames contains name)
- def safeClass(name: String): Option[Symbol] = {
- try Some(definitions.getClass(newTypeName(name)))
- catch { case _: MissingRequirementError => None }
- }
-
- def safeModule(name: String): Option[Symbol] = {
- try Some(definitions.getModule(newTermName(name)))
- catch { case _: MissingRequirementError => None }
- }
-
def definitionForName(name: Name): Option[MemberHandler] =
requestForName(name) flatMap { req =>
req.handlers find (_.definedNames contains name)
}
def valueOfTerm(id: String): Option[AnyRef] =
- requestForIdent(id) flatMap (_.getEval)
+ requestForName(newTermName(id)) flatMap (_.getEval)
def classOfTerm(id: String): Option[JClass] =
valueOfTerm(id) map (_.getClass)
- def typeOfTerm(id: String): Option[Type] = newTermName(id) match {
- case nme.ROOTPKG => Some(definitions.RootClass.tpe)
- case name => requestForName(name) flatMap (_.compilerTypeOf get name)
+ def typeOfTerm(id: String): Type = newTermName(id) match {
+ case nme.ROOTPKG => RootClass.tpe
+ case name => requestForName(name).fold(NoType: Type)(_ compilerTypeOf name)
}
+ def symbolOfType(id: String): Symbol =
+ requestForName(newTypeName(id)).fold(NoSymbol: Symbol)(_ definedTypeSymbol id)
+
def symbolOfTerm(id: String): Symbol =
- requestForIdent(id) flatMap (_.definedSymbols get newTermName(id)) getOrElse NoSymbol
+ requestForIdent(newTermName(id)).fold(NoSymbol: Symbol)(_ definedTermSymbol id)
def runtimeClassAndTypeOfTerm(id: String): Option[(JClass, Type)] = {
- for {
- clazz <- classOfTerm(id)
- tpe <- runtimeTypeOfTerm(id)
- nonAnon <- clazz.supers find (!_.isScalaAnonymous)
- } yield {
- (nonAnon, tpe)
+ classOfTerm(id) flatMap { clazz =>
+ clazz.supers find (!_.isScalaAnonymous) map { nonAnon =>
+ (nonAnon, runtimeTypeOfTerm(id))
+ }
}
}
- def runtimeTypeOfTerm(id: String): Option[Type] = {
- for {
- tpe <- typeOfTerm(id)
- clazz <- classOfTerm(id)
- val staticSym = tpe.typeSymbol
- runtimeSym <- safeClass(clazz.getName)
- if runtimeSym != staticSym
- if runtimeSym isSubClass staticSym
- } yield {
- runtimeSym.info
+ def runtimeTypeOfTerm(id: String): Type = {
+ typeOfTerm(id) andAlso { tpe =>
+ val clazz = classOfTerm(id) getOrElse { return NoType }
+ val staticSym = tpe.typeSymbol
+ val runtimeSym = getClassIfDefined(clazz.getName)
+
+ if ((runtimeSym != NoSymbol) && (runtimeSym != staticSym) && (runtimeSym isSubClass staticSym))
+ runtimeSym.info
+ else NoType
+ }
+ }
+ def cleanMemberDecl(owner: Symbol, member: Name): Type = afterTyper {
+ normalizeNonPublic {
+ owner.info.nonPrivateDecl(member).tpe match {
+ case NullaryMethodType(tp) => tp
+ case tp => tp
+ }
}
}
- private object exprTyper extends { val repl: IMain.this.type = imain } with ExprTyper { }
+ object exprTyper extends {
+ val repl: IMain.this.type = imain
+ } with ExprTyper { }
+
def parse(line: String): Option[List[Tree]] = exprTyper.parse(line)
- def typeOfExpression(expr: String, silent: Boolean = true): Option[Type] = {
+
+ def symbolOfLine(code: String): Symbol =
+ exprTyper.symbolOfLine(code)
+
+ def typeOfExpression(expr: String, silent: Boolean = true): Type =
exprTyper.typeOfExpression(expr, silent)
- }
protected def onlyTerms(xs: List[Name]) = xs collect { case x: TermName => x }
protected def onlyTypes(xs: List[Name]) = xs collect { case x: TypeName => x }
- def definedTerms = onlyTerms(allDefinedNames) filterNot isInternalVarName
- def definedTypes = onlyTypes(allDefinedNames)
- def definedSymbols = prevRequests.toSet flatMap ((x: Request) => x.definedSymbols.values)
+ def definedTerms = onlyTerms(allDefinedNames) filterNot isInternalTermName
+ def definedTypes = onlyTypes(allDefinedNames)
+ def definedSymbols = prevRequestList.flatMap(_.definedSymbols.values).toSet[Symbol]
+ def definedSymbolList = prevRequestList flatMap (_.definedSymbolList) filterNot (s => isInternalTermName(s.name))
- private def findName(name: Name) = definedSymbols find (_.name == name)
-
- private def missingOpt(op: => Symbol): Option[Symbol] =
- try Some(op)
- catch { case _: MissingRequirementError => None }
- private def missingWrap(op: => Symbol): Symbol =
- try op
- catch { case _: MissingRequirementError => NoSymbol }
+ // Terms with user-given names (i.e. not res0 and not synthetic)
+ def namedDefinedTerms = definedTerms filterNot (x => isUserVarName("" + x) || directlyBoundNames(x))
- def optCompilerClass(name: String) = missingOpt(definitions.getClass(name))
- def optCompilerModule(name: String) = missingOpt(definitions.getModule(name))
- def getCompilerClass(name: String) = missingWrap(definitions.getClass(name))
- def getCompilerModule(name: String) = missingWrap(definitions.getModule(name))
+ private def findName(name: Name) = definedSymbols find (_.name == name) getOrElse NoSymbol
/** Translate a repl-defined identifier into a Symbol.
*/
- def apply(name: String): Symbol = {
- val tpname = newTypeName(name)
- (
- findName(tpname)
- orElse findName(tpname.companionName)
- orElse optCompilerClass(name)
- orElse optCompilerModule(name)
- getOrElse NoSymbol
- )
- }
+ def apply(name: String): Symbol =
+ types(name) orElse terms(name)
+
def types(name: String): Symbol = {
- findName(newTypeName(name)) getOrElse getCompilerClass(name)
+ val tpname = newTypeName(name)
+ findName(tpname) orElse getClassIfDefined(tpname)
}
def terms(name: String): Symbol = {
- findName(newTermName(name)) getOrElse getCompilerModule(name)
+ val termname = newTypeName(name)
+ findName(termname) orElse getModuleIfDefined(termname)
}
+ // [Eugene to Paul] possibly you could make use of TypeTags here
+ def types[T: ClassTag] : Symbol = types(classTag[T].runtimeClass.getName)
+ def terms[T: ClassTag] : Symbol = terms(classTag[T].runtimeClass.getName)
+ def apply[T: ClassTag] : Symbol = apply(classTag[T].runtimeClass.getName)
+
+ def classSymbols = allDefSymbols collect { case x: ClassSymbol => x }
+ def methodSymbols = allDefSymbols collect { case x: MethodSymbol => x }
/** the previous requests this interpreter has processed */
- private lazy val prevRequests = mutable.ListBuffer[Request]()
- private lazy val referencedNameMap = mutable.Map[Name, Request]()
- private lazy val definedNameMap = mutable.Map[Name, Request]()
- protected def prevRequestList = prevRequests.toList
- private def allHandlers = prevRequestList flatMap (_.handlers)
- def allSeenTypes = prevRequestList flatMap (_.typeOf.values.toList) distinct
- def allImplicits = allHandlers filter (_.definesImplicit) flatMap (_.definedNames)
- def importHandlers = allHandlers collect { case x: ImportHandler => x }
+ private var executingRequest: Request = _
+ private val prevRequests = mutable.ListBuffer[Request]()
+ private val referencedNameMap = mutable.Map[Name, Request]()
+ private val definedNameMap = mutable.Map[Name, Request]()
+ private val directlyBoundNames = mutable.Set[Name]()
+
+ def allHandlers = prevRequestList flatMap (_.handlers)
+ def allDefHandlers = allHandlers collect { case x: MemberDefHandler => x }
+ def allDefSymbols = allDefHandlers map (_.symbol) filter (_ ne NoSymbol)
+
+ def lastRequest = if (prevRequests.isEmpty) null else prevRequests.last
+ def prevRequestList = prevRequests.toList
+ def allSeenTypes = prevRequestList flatMap (_.typeOf.values.toList) distinct
+ def allImplicits = allHandlers filter (_.definesImplicit) flatMap (_.definedNames)
+ def importHandlers = allHandlers collect { case x: ImportHandler => x }
def visibleTermNames: List[Name] = definedTerms ++ importedTerms distinct
@@ -1092,9 +1161,14 @@ class IMain(val settings: Settings, protected val out: JPrintWriter) extends Imp
/** Secret bookcase entrance for repl debuggers: end the line
* with "// show" and see what's going on.
*/
- if (repllog.isTrace || (code.lines exists (_.trim endsWith "// show"))) {
- echo(code)
- parse(code) foreach (ts => ts foreach (t => withoutUnwrapping(repldbg(asCompactString(t)))))
+ def isShow = code.lines exists (_.trim endsWith "// show")
+ def isShowRaw = code.lines exists (_.trim endsWith "// raw")
+
+ // old style
+ beSilentDuring(parse(code)) foreach { ts =>
+ ts foreach { t =>
+ withoutUnwrapping(repldbg(asCompactString(t)))
+ }
}
}
@@ -1136,7 +1210,7 @@ object IMain {
def maxStringLength: Int
def isTruncating: Boolean
def truncate(str: String): String = {
- if (isTruncating && str.length > maxStringLength)
+ if (isTruncating && (maxStringLength != 0 && str.length > maxStringLength))
(str take maxStringLength - 3) + "..."
else str
}
diff --git a/src/compiler/scala/tools/nsc/interpreter/ISettings.scala b/src/compiler/scala/tools/nsc/interpreter/ISettings.scala
index 59c9336..a8f77af 100644
--- a/src/compiler/scala/tools/nsc/interpreter/ISettings.scala
+++ b/src/compiler/scala/tools/nsc/interpreter/ISettings.scala
@@ -1,5 +1,5 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
+ * Copyright 2005-2013 LAMP/EPFL
* @author Alexander Spoon
*/
@@ -24,7 +24,7 @@ class ISettings(intp: IMain) {
* more than this number of characters, then the printout is
* truncated.
*/
- var maxPrintString = 800
+ var maxPrintString = replProps.maxPrintString.option.getOrElse(800)
/** The maximum number of completion candidates to print for tab
* completion without requiring confirmation.
diff --git a/src/compiler/scala/tools/nsc/interpreter/Imports.scala b/src/compiler/scala/tools/nsc/interpreter/Imports.scala
index 10b3d82..73d962b 100644
--- a/src/compiler/scala/tools/nsc/interpreter/Imports.scala
+++ b/src/compiler/scala/tools/nsc/interpreter/Imports.scala
@@ -1,5 +1,5 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
+ * Copyright 2005-2013 LAMP/EPFL
* @author Paul Phillips
*/
@@ -16,14 +16,14 @@ trait Imports {
import memberHandlers._
def isNoImports = settings.noimports.value
- def isNoPredef = false // settings.nopredef.value
+ def isNoPredef = settings.nopredef.value
/** Synthetic import handlers for the language defined imports. */
private def makeWildcardImportHandler(sym: Symbol): ImportHandler = {
val hd :: tl = sym.fullName.split('.').toList map newTermName
val tree = Import(
tl.foldLeft(Ident(hd): Tree)((x, y) => Select(x, y)),
- List(ImportSelector(nme.WILDCARD, -1, null, -1))
+ ImportSelector.wildList
)
tree setSymbol sym
new ImportHandler(tree)
@@ -34,8 +34,9 @@ trait Imports {
def languageWildcards: List[Type] = languageWildcardSyms map (_.tpe)
def languageWildcardHandlers = languageWildcardSyms map makeWildcardImportHandler
- def importedTerms = onlyTerms(importHandlers flatMap (_.importedNames))
- def importedTypes = onlyTypes(importHandlers flatMap (_.importedNames))
+ def allImportedNames = importHandlers flatMap (_.importedNames)
+ def importedTerms = onlyTerms(allImportedNames)
+ def importedTypes = onlyTypes(allImportedNames)
/** Types which have been wildcard imported, such as:
* val x = "abc" ; import x._ // type java.lang.String
@@ -49,10 +50,7 @@ trait Imports {
* into the compiler scopes.
*/
def sessionWildcards: List[Type] = {
- importHandlers flatMap {
- case x if x.importsWildcard => x.targetType
- case _ => None
- } distinct
+ importHandlers filter (_.importsWildcard) map (_.targetType) distinct
}
def wildcardTypes = languageWildcards ++ sessionWildcards
@@ -63,14 +61,15 @@ trait Imports {
def importedTypeSymbols = importedSymbols collect { case x: TypeSymbol => x }
def implicitSymbols = importedSymbols filter (_.isImplicit)
- def importedTermNamed(name: String) = importedTermSymbols find (_.name.toString == name)
+ def importedTermNamed(name: String): Symbol =
+ importedTermSymbols find (_.name.toString == name) getOrElse NoSymbol
/** Tuples of (source, imported symbols) in the order they were imported.
*/
def importedSymbolsBySource: List[(Symbol, List[Symbol])] = {
val lang = languageWildcardSyms map (sym => (sym, membersAtPickler(sym)))
- val session = importHandlers filter (_.targetType.isDefined) map { mh =>
- (mh.targetType.get.typeSymbol, mh.importedSymbols)
+ val session = importHandlers filter (_.targetType != NoType) map { mh =>
+ (mh.targetType.typeSymbol, mh.importedSymbols)
}
lang ++ session
@@ -192,5 +191,5 @@ trait Imports {
prevRequestList flatMap (req => req.handlers map (req -> _))
private def membersAtPickler(sym: Symbol): List[Symbol] =
- atPickler(sym.info.nonPrivateMembers)
-}
\ No newline at end of file
+ beforePickler(sym.info.nonPrivateMembers.toList)
+}
diff --git a/src/compiler/scala/tools/nsc/interpreter/InteractiveReader.scala b/src/compiler/scala/tools/nsc/interpreter/InteractiveReader.scala
index 55706f4..8331fdd 100644
--- a/src/compiler/scala/tools/nsc/interpreter/InteractiveReader.scala
+++ b/src/compiler/scala/tools/nsc/interpreter/InteractiveReader.scala
@@ -1,5 +1,5 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
+ * Copyright 2005-2013 LAMP/EPFL
* @author Stepan Koltsov
*/
@@ -22,7 +22,6 @@ trait InteractiveReader {
def history: History
def completion: Completion
- def keyBindings: List[KeyBinding]
def eraseLine(): Unit
def redrawLine(): Unit
def currentLine: String
diff --git a/src/compiler/scala/tools/nsc/interpreter/JLineCompletion.scala b/src/compiler/scala/tools/nsc/interpreter/JLineCompletion.scala
index 8b38782..219cb35 100644
--- a/src/compiler/scala/tools/nsc/interpreter/JLineCompletion.scala
+++ b/src/compiler/scala/tools/nsc/interpreter/JLineCompletion.scala
@@ -1,5 +1,5 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
+ * Copyright 2005-2013 LAMP/EPFL
* @author Paul Phillips
*/
@@ -9,29 +9,29 @@ package interpreter
import scala.tools.jline._
import scala.tools.jline.console.completer._
import Completion._
-import collection.mutable.ListBuffer
+import scala.collection.mutable.ListBuffer
// REPL completor - queries supplied interpreter for valid
// completions based on current contents of buffer.
class JLineCompletion(val intp: IMain) extends Completion with CompletionOutput {
val global: intp.global.type = intp.global
import global._
- import definitions.{ PredefModule, RootClass, AnyClass, AnyRefClass, ScalaPackage, JavaLangPackage }
+ import definitions.{ PredefModule, AnyClass, AnyRefClass, ScalaPackage, JavaLangPackage }
+ import rootMirror.{ RootClass, getModuleIfDefined }
type ExecResult = Any
- import intp.{ debugging, afterTyper }
+ import intp.{ debugging }
// verbosity goes up with consecutive tabs
private var verbosity: Int = 0
def resetVerbosity() = verbosity = 0
- def getType(name: String, isModule: Boolean) = {
- val f = if (isModule) definitions.getModule(_: Name) else definitions.getClass(_: Name)
- try Some(f(name).tpe)
- catch { case _: MissingRequirementError => None }
- }
-
- def typeOf(name: String) = getType(name, false)
- def moduleOf(name: String) = getType(name, true)
+ def getSymbol(name: String, isModule: Boolean) = (
+ if (isModule) getModuleIfDefined(name)
+ else getModuleIfDefined(name)
+ )
+ def getType(name: String, isModule: Boolean) = getSymbol(name, isModule).tpe
+ def typeOf(name: String) = getType(name, false)
+ def moduleOf(name: String) = getType(name, true)
trait CompilerCompletion {
def tp: Type
@@ -46,16 +46,16 @@ class JLineCompletion(val intp: IMain) extends Completion with CompletionOutput
private def anyMembers = AnyClass.tpe.nonPrivateMembers
def anyRefMethodsToShow = Set("isInstanceOf", "asInstanceOf", "toString")
- def tos(sym: Symbol) = sym.name.decode.toString
- def memberNamed(s: String) = members find (x => tos(x) == s)
- def hasMethod(s: String) = methods exists (x => tos(x) == s)
+ def tos(sym: Symbol): String = sym.decodedName
+ def memberNamed(s: String) = afterTyper(effectiveTp member newTermName(s))
+ def hasMethod(s: String) = memberNamed(s).isMethod
// XXX we'd like to say "filterNot (_.isDeprecated)" but this causes the
// compiler to crash for reasons not yet known.
- def members = afterTyper((effectiveTp.nonPrivateMembers ++ anyMembers) filter (_.isPublic))
- def methods = members filter (_.isMethod)
- def packages = members filter (_.isPackage)
- def aliases = members filter (_.isAliasType)
+ def members = afterTyper((effectiveTp.nonPrivateMembers.toList ++ anyMembers) filter (_.isPublic))
+ def methods = members.toList filter (_.isMethod)
+ def packages = members.toList filter (_.isPackage)
+ def aliases = members.toList filter (_.isAliasType)
def memberNames = members map tos
def methodNames = methods map tos
@@ -63,6 +63,13 @@ class JLineCompletion(val intp: IMain) extends Completion with CompletionOutput
def aliasNames = aliases map tos
}
+ object NoTypeCompletion extends TypeMemberCompletion(NoType) {
+ override def memberNamed(s: String) = NoSymbol
+ override def members = Nil
+ override def follow(s: String) = None
+ override def alternativesFor(id: String) = Nil
+ }
+
object TypeMemberCompletion {
def apply(tp: Type, runtimeType: Type, param: NamedParam): TypeMemberCompletion = {
new TypeMemberCompletion(tp) {
@@ -90,7 +97,8 @@ class JLineCompletion(val intp: IMain) extends Completion with CompletionOutput
}
}
def apply(tp: Type): TypeMemberCompletion = {
- if (tp.typeSymbol.isPackageClass) new PackageCompletion(tp)
+ if (tp eq NoType) NoTypeCompletion
+ else if (tp.typeSymbol.isPackageClass) new PackageCompletion(tp)
else new TypeMemberCompletion(tp)
}
def imported(tp: Type) = new ImportCompletion(tp)
@@ -118,7 +126,7 @@ class JLineCompletion(val intp: IMain) extends Completion with CompletionOutput
debugging(tp + " completions ==> ")(filtered(memberNames))
override def follow(s: String): Option[CompletionAware] =
- debugging(tp + " -> '" + s + "' ==> ")(memberNamed(s) map (x => TypeMemberCompletion(x.tpe)))
+ debugging(tp + " -> '" + s + "' ==> ")(Some(TypeMemberCompletion(memberNamed(s).tpe)) filterNot (_ eq NoTypeCompletion))
override def alternativesFor(id: String): List[String] =
debugging(id + " alternatives ==> ") {
@@ -155,28 +163,29 @@ class JLineCompletion(val intp: IMain) extends Completion with CompletionOutput
object ids extends CompletionAware {
override def completions(verbosity: Int) = intp.unqualifiedIds ++ List("classOf") //, "_root_")
// now we use the compiler for everything.
- override def follow(id: String) = {
- if (completions(0) contains id) {
- intp typeOfExpression id map { tpe =>
- def default = TypeMemberCompletion(tpe)
-
- // only rebinding vals in power mode for now.
- if (!isReplPower) default
- else intp runtimeClassAndTypeOfTerm id match {
- case Some((clazz, runtimeType)) =>
- val sym = intp.symbolOfTerm(id)
- if (sym.isStable) {
- val param = new NamedParam.Untyped(id, intp valueOfTerm id getOrElse null)
- TypeMemberCompletion(tpe, runtimeType, param)
- }
- else default
- case _ =>
- default
+ override def follow(id: String): Option[CompletionAware] = {
+ if (!completions(0).contains(id))
+ return None
+
+ val tpe = intp typeOfExpression id
+ if (tpe == NoType)
+ return None
+
+ def default = Some(TypeMemberCompletion(tpe))
+
+ // only rebinding vals in power mode for now.
+ if (!isReplPower) default
+ else intp runtimeClassAndTypeOfTerm id match {
+ case Some((clazz, runtimeType)) =>
+ val sym = intp.symbolOfTerm(id)
+ if (sym.isStable) {
+ val param = new NamedParam.Untyped(id, intp valueOfTerm id getOrElse null)
+ Some(TypeMemberCompletion(tpe, runtimeType, param))
}
- }
+ else default
+ case _ =>
+ default
}
- else
- None
}
override def toString = "<repl ids> (%s)".format(completions(0).size)
}
@@ -186,14 +195,7 @@ class JLineCompletion(val intp: IMain) extends Completion with CompletionOutput
// literal Ints, Strings, etc.
object literals extends CompletionAware {
- def simpleParse(code: String): Tree = {
- val unit = new CompilationUnit(new util.BatchSourceFile("<console>", code))
- val scanner = new syntaxAnalyzer.UnitParser(unit)
- val tss = scanner.templateStatSeq(false)._2
-
- if (tss.size == 1) tss.head else EmptyTree
- }
-
+ def simpleParse(code: String): Tree = newUnitParser(code).templateStats().last
def completions(verbosity: Int) = Nil
override def follow(id: String) = simpleParse(id) match {
@@ -278,19 +280,6 @@ class JLineCompletion(val intp: IMain) extends Completion with CompletionOutput
if (parsed.isEmpty) xs map ("." + _) else xs
}
- // chasing down results which won't parse
- def execute(line: String): Option[ExecResult] = {
- val parsed = Parsed(line)
- def noDotOrSlash = line forall (ch => ch != '.' && ch != '/')
-
- if (noDotOrSlash) None // we defer all unqualified ids to the repl.
- else {
- (ids executionFor parsed) orElse
- (rootClass executionFor parsed) orElse
- (FileCompletion executionFor line)
- }
- }
-
// generic interface for querying (e.g. interpreter loop, testing)
def completions(buf: String): List[String] =
topLevelFor(Parsed.dotted(buf + ".", buf.length + 1))
@@ -354,23 +343,29 @@ class JLineCompletion(val intp: IMain) extends Completion with CompletionOutput
if (!looksLikeInvocation(buf)) None
else tryCompletion(Parsed.dotted(buf drop 1, cursor), lastResultFor)
- def regularCompletion = tryCompletion(mkDotted, topLevelFor)
- def fileCompletion =
- if (!looksLikePath(buf)) None
- else tryCompletion(mkUndelimited, FileCompletion completionsFor _.buffer)
-
- /** This is the kickoff point for all manner of theoretically possible compiler
- * unhappiness - fault may be here or elsewhere, but we don't want to crash the
- * repl regardless. Hopefully catching Exception is enough, but because the
- * compiler still throws some Errors it may not be.
+ def tryAll = (
+ lastResultCompletion
+ orElse tryCompletion(mkDotted, topLevelFor)
+ getOrElse Candidates(cursor, Nil)
+ )
+
+ /**
+ * This is the kickoff point for all manner of theoretically
+ * possible compiler unhappiness. The fault may be here or
+ * elsewhere, but we don't want to crash the repl regardless.
+ * The compiler makes it impossible to avoid catching Throwable
+ * with its unfortunate tendency to throw java.lang.Errors and
+ * AssertionErrors as the hats drop. We take two swings at it
+ * because there are some spots which like to throw an assertion
+ * once, then work after that. Yeah, what can I say.
*/
- try {
- (lastResultCompletion orElse regularCompletion orElse fileCompletion) getOrElse Candidates(cursor, Nil)
- }
- catch {
- case ex: Exception =>
- repldbg("Error: complete(%s, %s) provoked %s".format(buf, cursor, ex))
- Candidates(cursor, List(" ", "<completion error: " + ex.getMessage + ">"))
+ try tryAll
+ catch { case ex: Throwable =>
+ repldbg("Error: complete(%s, %s) provoked".format(buf, cursor) + ex)
+ Candidates(cursor,
+ if (isReplDebug) List("<error:" + ex + ">")
+ else Nil
+ )
}
}
}
diff --git a/src/compiler/scala/tools/nsc/interpreter/JLineReader.scala b/src/compiler/scala/tools/nsc/interpreter/JLineReader.scala
index 2e3dc50..5fd5b41 100644
--- a/src/compiler/scala/tools/nsc/interpreter/JLineReader.scala
+++ b/src/compiler/scala/tools/nsc/interpreter/JLineReader.scala
@@ -1,5 +1,5 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
+ * Copyright 2005-2013 LAMP/EPFL
* @author Stepan Koltsov
*/
@@ -22,9 +22,6 @@ class JLineReader(_completion: => Completion) extends InteractiveReader {
lazy val completion = _completion
lazy val history: JLineHistory = JLineHistory()
- lazy val keyBindings =
- try KeyBinding parse slurp(term.getDefaultBindings)
- catch { case _: Exception => Nil }
private def term = consoleReader.getTerminal()
def reset() = term.reset()
@@ -40,6 +37,9 @@ class JLineReader(_completion: => Completion) extends InteractiveReader {
}
class JLineConsoleReader extends ConsoleReader with ConsoleReaderHelper {
+ if ((history: History) ne NoHistory)
+ this setHistory history
+
// working around protected/trait/java insufficiencies.
def goBack(num: Int): Unit = back(num)
def readOneKey(prompt: String) = {
@@ -54,8 +54,6 @@ class JLineReader(_completion: => Completion) extends InteractiveReader {
// A hook for running code after the repl is done initializing.
lazy val postInit: Unit = {
this setBellEnabled false
- if (history ne NoHistory)
- this setHistory history
if (completion ne NoCompletion) {
val argCompletor: ArgumentCompleter =
diff --git a/src/compiler/scala/tools/nsc/interpreter/KeyBinding.scala b/src/compiler/scala/tools/nsc/interpreter/KeyBinding.scala
deleted file mode 100644
index a7ca3a7..0000000
--- a/src/compiler/scala/tools/nsc/interpreter/KeyBinding.scala
+++ /dev/null
@@ -1,39 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
- * @author Paul Phillips
- */
-
-package scala.tools.nsc
-package interpreter
-
-case class KeyBinding(name: String, code: Int, aliases: List[String], description: String) {
- def nameString = if (aliases.nonEmpty) aliases mkString ", " else name
- override def toString = "%3d %s: %s".format(code, nameString, description)
-}
-
-object KeyBinding {
- def parse(bindings: String): List[KeyBinding] = {
- def loop(xs: List[String]): List[KeyBinding] = {
- val (comment, lines) = xs span (_ startsWith "#")
- val description = comment map (_ drop 1 trim) mkString " "
- val (aliases, desc) = description span (_ != ':') match {
- case (x, y) => (
- x split ',' map (_.trim) toList,
- if (y == "") "" else y.tail.trim
- )
- }
- lines match {
- case Nil => Nil
- case hd :: tl =>
- val kb = (hd indexOf '=') match {
- case -1 => KeyBinding(hd, -1, aliases, desc)
- case idx => KeyBinding(hd drop idx + 1, hd take idx toInt, aliases, desc)
- }
- kb :: loop(tl)
- }
- }
- // This is verrrrrrrry specific to the current contents
- // of the keybindings.properties in jline.
- loop(bindings split "\\n" map (_.trim) dropWhile (_ != "") filterNot (_ == "") toList) sortBy (_.code)
- }
-}
diff --git a/src/compiler/scala/tools/nsc/interpreter/Line.scala b/src/compiler/scala/tools/nsc/interpreter/Line.scala
deleted file mode 100644
index deaeb91..0000000
--- a/src/compiler/scala/tools/nsc/interpreter/Line.scala
+++ /dev/null
@@ -1,108 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
- * @author Paul Phillips
- */
-
-package scala.tools.nsc
-package interpreter
-
-import java.util.concurrent.locks.ReentrantLock
-import scala.tools.nsc.util.Exceptional
-import Exceptional.unwrap
-import Line._
-
-/** Encapsulation of a single line in the repl. The concurrency
- * infrastructure arose to deal with signals so SIGINT could be
- * trapped without losing the repl session, but it will be useful
- * in ways beyond that. Each line obtains a thread and the repl
- * waits on a condition indicating that either the line has
- * completed or failed.
- */
-class Line[+T](val code: String, body: => T) {
- private var _state: State = Running
- private var _result: Any = null
- private var _caught: Throwable = null
- private val lock = new ReentrantLock()
- private val finished = lock.newCondition()
-
- private def withLock[T](body: => T) = {
- lock.lock()
- try body
- finally lock.unlock()
- }
- private def setState(state: State) = withLock {
- _state = state
- finished.signal()
- }
- // private because it should be called by the manager.
- private def cancel() = if (running) setState(Cancelled)
-
- // This is where the line thread is created and started.
- private val _thread = io.daemonize {
- try {
- _result = body
- setState(Done)
- }
- catch {
- case x =>
- _caught = x
- setState(Threw)
- }
- }
-
- def state = _state
- def thread = _thread
- def alive = thread.isAlive
- def runaway = !success && alive
- def success = _state == Done
- def running = _state == Running
-
- def caught() = { await() ; _caught }
- def get() = { await() ; _result }
- def await() = withLock { while (running) finished.await() }
-}
-
-object Line {
- // seconds to let a runaway thread live before calling stop()
- private val HUNTER_KILLER_DELAY = 5
-
- // A line opens in state Running, and will eventually
- // transition to Threw (an exception was caught), Cancelled
- // (the line was explicitly cancelled, presumably by SIGINT)
- // or Done (success).
- sealed abstract class State
- case object Running extends State
- case object Threw extends State
- case object Cancelled extends State
- case object Done extends State
-
- class Manager {
- /** Override to add behavior for runaway lines. This method will
- * be called if a line thread is still running five seconds after
- * it has been cancelled.
- */
- def onRunaway(line: Line[_]): Unit = ()
-
- private var _current: Option[Line[_]] = None
- def current = _current
-
- def clear() = {
- _current foreach (_.cancel())
- _current = None
- }
- def set[T](code: String)(body: => T) = {
- val line = new Line(code, body)
- _current = Some(line)
- line
- }
- def running = _current.isDefined
- def cancel() = {
- current foreach { line =>
- line.thread.interrupt()
- line.cancel()
- if (line.runaway)
- io.timer(HUNTER_KILLER_DELAY) { if (line.alive) onRunaway(line) }
- }
- }
- }
-}
diff --git a/src/compiler/scala/tools/nsc/interpreter/Logger.scala b/src/compiler/scala/tools/nsc/interpreter/Logger.scala
index d6cba5d..aeb25fc 100644
--- a/src/compiler/scala/tools/nsc/interpreter/Logger.scala
+++ b/src/compiler/scala/tools/nsc/interpreter/Logger.scala
@@ -1,5 +1,5 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
+ * Copyright 2005-2013 LAMP/EPFL
* @author Paul Phillips
*/
diff --git a/src/compiler/scala/tools/nsc/interpreter/LoopCommands.scala b/src/compiler/scala/tools/nsc/interpreter/LoopCommands.scala
index 9469baa..60325ec 100644
--- a/src/compiler/scala/tools/nsc/interpreter/LoopCommands.scala
+++ b/src/compiler/scala/tools/nsc/interpreter/LoopCommands.scala
@@ -1,16 +1,17 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
+ * Copyright 2005-2013 LAMP/EPFL
* @author Paul Phillips
*/
package scala.tools.nsc
package interpreter
-import collection.{ mutable, immutable }
+import scala.collection.{ mutable, immutable }
import mutable.ListBuffer
+import scala.language.implicitConversions
class ProcessResult(val line: String) {
- import sys.process._
+ import scala.sys.process._
private val buffer = new ListBuffer[String]
val builder = Process(line)
diff --git a/src/compiler/scala/tools/nsc/interpreter/MemberHandlers.scala b/src/compiler/scala/tools/nsc/interpreter/MemberHandlers.scala
index 0d51c16..67519cf 100644
--- a/src/compiler/scala/tools/nsc/interpreter/MemberHandlers.scala
+++ b/src/compiler/scala/tools/nsc/interpreter/MemberHandlers.scala
@@ -1,5 +1,5 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
+ * Copyright 2005-2013 LAMP/EPFL
* @author Martin Odersky
*/
@@ -8,13 +8,14 @@ package interpreter
import scala.collection.{ mutable, immutable }
import scala.PartialFunction.cond
-import scala.reflect.NameTransformer
-import scala.tools.nsc.util.Chars
+import scala.reflect.internal.Chars
+import scala.reflect.internal.Flags._
+import scala.language.implicitConversions
trait MemberHandlers {
val intp: IMain
- import intp.{ Request, global, naming, atPickler }
+ import intp.{ Request, global, naming }
import global._
import naming._
@@ -64,14 +65,16 @@ trait MemberHandlers {
}
sealed abstract class MemberDefHandler(override val member: MemberDef) extends MemberHandler(member) {
+ def symbol = if (member.symbol eq null) NoSymbol else member.symbol
def name: Name = member.name
def mods: Modifiers = member.mods
def keyword = member.keyword
- def prettyName = NameTransformer.decode(name)
+ def prettyName = name.decode
override def definesImplicit = member.mods.isImplicit
override def definesTerm: Option[TermName] = Some(name.toTermName) filter (_ => name.isTermName)
override def definesType: Option[TypeName] = Some(name.toTypeName) filter (_ => name.isTypeName)
+ override def definedSymbols = if (symbol eq NoSymbol) Nil else List(symbol)
}
/** Class to handle one member among all the members included
@@ -89,6 +92,7 @@ trait MemberHandlers {
def importedNames = List[Name]()
def definedNames = definesTerm.toList ++ definesType.toList
def definedOrImported = definedNames ++ importedNames
+ def definedSymbols = List[Symbol]()
def extraCodeToEvaluate(req: Request): String = ""
def resultExtractionCode(req: Request): String = ""
@@ -112,15 +116,20 @@ trait MemberHandlers {
if (mods.isLazy) codegenln(false, "<lazy>")
else any2stringOf(req fullPath name, maxStringElements)
- """ + "%s: %s = " + %s""".format(prettyName, string2code(req typeOf name), resultString)
+ val vidString =
+ if (replProps.vids) """" + " @ " + "%%8x".format(System.identityHashCode(%s)) + " """.trim.format(req fullPath name)
+ else ""
+
+ """ + "%s%s: %s = " + %s""".format(string2code(prettyName), vidString, string2code(req typeOf name), resultString)
}
}
}
class DefHandler(member: DefDef) extends MemberDefHandler(member) {
private def vparamss = member.vparamss
- // true if 0-arity
- override def definesValue = vparamss.isEmpty || vparamss.head.isEmpty
+ private def isMacro = member.symbol hasFlag MACRO
+ // true if not a macro and 0-arity
+ override def definesValue = !isMacro && flattensToEmpty(vparamss)
override def resultExtractionCode(req: Request) =
if (mods.isPublic) codegenln(name, ": ", req.typeOf(name)) else ""
}
@@ -138,8 +147,7 @@ trait MemberHandlers {
override def resultExtractionCode(req: Request) = {
val lhsType = string2code(req lookupTypeOf name)
val res = string2code(req fullPath name)
-
- """ + "%s: %s = " + %s + "\n" """.format(lhs, lhsType, res) + "\n"
+ """ + "%s: %s = " + %s + "\n" """.format(string2code(lhs.toString), lhsType, res) + "\n"
}
}
@@ -170,7 +178,7 @@ trait MemberHandlers {
class ImportHandler(imp: Import) extends MemberHandler(imp) {
val Import(expr, selectors) = imp
- def targetType = intp.typeOfExpression("" + expr)
+ def targetType: Type = intp.typeOfExpression("" + expr)
override def isLegalTopLevel = true
def createImportForName(name: Name): String = {
@@ -183,7 +191,7 @@ trait MemberHandlers {
// TODO: Need to track these specially to honor Predef masking attempts,
// because they must be the leading imports in the code generated for each
// line. We can use the same machinery as Contexts now, anyway.
- def isPredefImport = false // treeInfo.isPredefExpr(expr)
+ def isPredefImport = isReferenceToPredef(expr)
// wildcard imports, e.g. import foo._
private def selectorWild = selectors filter (_.name == nme.USCOREkw)
@@ -200,10 +208,10 @@ trait MemberHandlers {
def importedSymbols = individualSymbols ++ wildcardSymbols
lazy val individualSymbols: List[Symbol] =
- atPickler(targetType.toList flatMap (tp => individualNames map (tp nonPrivateMember _)))
+ beforePickler(individualNames map (targetType nonPrivateMember _))
lazy val wildcardSymbols: List[Symbol] =
- if (importsWildcard) atPickler(targetType.toList flatMap (_.nonPrivateMembers))
+ if (importsWildcard) beforePickler(targetType.nonPrivateMembers.toList)
else Nil
/** Complete list of names imported by a wildcard */
diff --git a/src/compiler/scala/tools/nsc/interpreter/NamedParam.scala b/src/compiler/scala/tools/nsc/interpreter/NamedParam.scala
index e92888d..eff0ef5 100644
--- a/src/compiler/scala/tools/nsc/interpreter/NamedParam.scala
+++ b/src/compiler/scala/tools/nsc/interpreter/NamedParam.scala
@@ -1,5 +1,5 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
+ * Copyright 2005-2013 LAMP/EPFL
* @author Paul Phillips
*/
@@ -7,24 +7,27 @@ package scala.tools.nsc
package interpreter
import NamedParam._
+import scala.language.implicitConversions
+import scala.reflect.runtime.{universe => ru}
+import scala.reflect.{ClassTag, classTag}
trait NamedParamCreator {
protected def freshName: () => String
def apply(name: String, tpe: String, value: Any): NamedParam = NamedParamClass(name, tpe, value)
- def apply[T: Manifest](name: String, x: T): NamedParam = new Typed[T](name, x)
- def apply[T: Manifest](x: T): NamedParam = apply(freshName(), x)
+ def apply[T: ru.TypeTag : ClassTag](name: String, x: T): NamedParam = new Typed[T](name, x)
+ def apply[T: ru.TypeTag : ClassTag](x: T): NamedParam = apply(freshName(), x)
def clazz(name: String, x: Any): NamedParam = new Untyped(name, x)
def clazz(x: Any): NamedParam = clazz(freshName(), x)
- implicit def namedValue[T: Manifest](name: String, x: T): NamedParam = apply(name, x)
- implicit def tuple[T: Manifest](pair: (String, T)): NamedParam = apply(pair._1, pair._2)
+ implicit def namedValue[T: ru.TypeTag : ClassTag](name: String, x: T): NamedParam = apply(name, x)
+ implicit def tuple[T: ru.TypeTag : ClassTag](pair: (String, T)): NamedParam = apply(pair._1, pair._2)
}
object NamedParam extends NamedParamCreator {
- class Typed[T: Manifest](val name: String, val value: T) extends NamedParam {
- val tpe = TypeStrings.fromManifest[T]
+ class Typed[T: ru.TypeTag : ClassTag](val name: String, val value: T) extends NamedParam {
+ val tpe = TypeStrings.fromTag[T]
}
class Untyped(val name: String, val value: Any) extends NamedParam {
val tpe = TypeStrings.fromValue(value)
@@ -43,4 +46,4 @@ trait NamedParam {
def tpe: String
def value: Any
override def toString = name + ": " + tpe
-}
\ No newline at end of file
+}
diff --git a/src/compiler/scala/tools/nsc/interpreter/Naming.scala b/src/compiler/scala/tools/nsc/interpreter/Naming.scala
index 89868ab..0d03a86 100644
--- a/src/compiler/scala/tools/nsc/interpreter/Naming.scala
+++ b/src/compiler/scala/tools/nsc/interpreter/Naming.scala
@@ -1,5 +1,5 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
+ * Copyright 2005-2013 LAMP/EPFL
* @author Paul Phillips
*/
@@ -11,16 +11,18 @@ package interpreter
*/
trait Naming {
def unmangle(str: String): String = {
+ val ESC = '\u001b'
val cleaned = removeIWPackages(removeLineWrapper(str))
- var ctrlChars = 0
- cleaned map { ch =>
- if (ch.isControl && !ch.isWhitespace) {
- ctrlChars += 1
- if (ctrlChars > 5) return "[line elided for control chars: possibly a scala signature]"
- else '?'
- }
- else ch
- }
+ // Looking to exclude binary data which hoses the terminal, but
+ // let through the subset of it we need, like whitespace and also
+ // <ESC> for ansi codes.
+ val binaryChars = cleaned count (ch => ch < 32 && !ch.isWhitespace && ch != ESC)
+ // Lots of binary chars - translate all supposed whitespace into spaces
+ if (binaryChars > 5)
+ cleaned map (ch => if (ch.isWhitespace) ' ' else if (ch < 32) '?' else ch)
+ // Not lots - preserve whitespace and ESC
+ else
+ cleaned map (ch => if (ch.isWhitespace || ch == ESC) ch else if (ch < 32) '?' else ch)
}
// The two name forms this is catching are the two sides of this assignment:
@@ -39,16 +41,17 @@ trait Naming {
private def removeIWPackages(s: String) = s.replaceAll("""\$iw[$.]""", "")
trait SessionNames {
- // All values are configurable by passing e.g. -Dscala.repl.naming.read=XXX
+ // All values are configurable by passing e.g. -Dscala.repl.name.read=XXX
final def propOr(name: String): String = propOr(name, "$" + name)
final def propOr(name: String, default: String): String =
- sys.props.getOrElse("scala.repl.naming." + name, default)
+ sys.props.getOrElse("scala.repl.name." + name, default)
// Prefixes used in repl machinery. Default to $line, $read, etc.
- def line = propOr("line")
- def read = propOr("read")
- def eval = propOr("eval")
- def print = propOr("print")
+ def line = propOr("line")
+ def read = propOr("read")
+ def eval = propOr("eval")
+ def print = propOr("print")
+ def result = propOr("result")
// The prefix for unnamed results: by default res0, res1, etc.
def res = propOr("res", "res") // INTERPRETER_VAR_PREFIX
@@ -83,7 +86,7 @@ trait Naming {
var x = 0
() => { x += 1 ; x }
}
- def freshUserVarName() = userVar()
+ def freshUserVarName() = userVar()
def freshInternalVarName() = internalVar()
def resetAllCreators() {
diff --git a/src/compiler/scala/tools/nsc/interpreter/Parsed.scala b/src/compiler/scala/tools/nsc/interpreter/Parsed.scala
index 6eccf1e..b0be956 100644
--- a/src/compiler/scala/tools/nsc/interpreter/Parsed.scala
+++ b/src/compiler/scala/tools/nsc/interpreter/Parsed.scala
@@ -1,5 +1,5 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
+ * Copyright 2005-2013 LAMP/EPFL
* @author Paul Phillips
*/
diff --git a/src/compiler/scala/tools/nsc/interpreter/Pasted.scala b/src/compiler/scala/tools/nsc/interpreter/Pasted.scala
index 8332135..f5db3d9 100644
--- a/src/compiler/scala/tools/nsc/interpreter/Pasted.scala
+++ b/src/compiler/scala/tools/nsc/interpreter/Pasted.scala
@@ -1,5 +1,5 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
+ * Copyright 2005-2013 LAMP/EPFL
* @author Paul Phillips
*/
diff --git a/src/compiler/scala/tools/nsc/interpreter/Phased.scala b/src/compiler/scala/tools/nsc/interpreter/Phased.scala
index b3d3332..6389447 100644
--- a/src/compiler/scala/tools/nsc/interpreter/Phased.scala
+++ b/src/compiler/scala/tools/nsc/interpreter/Phased.scala
@@ -1,5 +1,5 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
+ * Copyright 2005-2013 LAMP/EPFL
* @author Paul Phillips
*/
@@ -7,7 +7,7 @@ package scala.tools.nsc
package interpreter
import scala.collection.{ mutable, immutable }
-import immutable.SortedMap
+import scala.language.implicitConversions
/** Mix this into an object and use it as a phasing
* swiss army knife.
@@ -66,8 +66,7 @@ trait Phased {
try parseInternal(str)
catch { case _: Exception => NoPhaseName }
- def apply[T](body: => T): SortedMap[PhaseName, T] =
- SortedMap[PhaseName, T](atMap(PhaseName.all)(body): _*)
+ def apply[T](body: => T) = immutable.SortedMap[PhaseName, T](atMap(PhaseName.all)(body): _*)
def atCurrent[T](body: => T): T = atPhase(get)(body)
def multi[T](body: => T): Seq[T] = multi map (ph => at(ph)(body))
diff --git a/src/compiler/scala/tools/nsc/interpreter/Power.scala b/src/compiler/scala/tools/nsc/interpreter/Power.scala
index 95b42eb..5e6bf88 100644
--- a/src/compiler/scala/tools/nsc/interpreter/Power.scala
+++ b/src/compiler/scala/tools/nsc/interpreter/Power.scala
@@ -1,68 +1,54 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
+ * Copyright 2005-2013 LAMP/EPFL
* @author Paul Phillips
*/
package scala.tools.nsc
package interpreter
-import scala.reflect.{ NameTransformer, AnyValManifest }
import scala.collection.{ mutable, immutable }
import scala.util.matching.Regex
-import scala.tools.nsc.util.{ BatchSourceFile }
+import scala.reflect.internal.util.{ BatchSourceFile }
import session.{ History }
import scala.io.Codec
import java.net.{ URL, MalformedURLException }
import io.{ Path }
+import scala.language.implicitConversions
+import scala.reflect.runtime.{universe => ru}
+import scala.reflect.{ClassTag, classTag}
-trait SharesGlobal {
- type GlobalType <: Global
- val global: GlobalType
-
- // This business gets really old:
- //
- // found : power.intp.global.Symbol
- // required: global.Symbol
- //
- // Have tried many ways to cast it aside, this is the current winner.
- // Todo: figure out a way to abstract over all the type members.
- type AnySymbol = Global#Symbol
- type AnyType = Global#Type
- type AnyName = Global#Name
- type AnyTree = Global#Tree
-
- type Symbol = global.Symbol
- type Type = global.Type
- type Name = global.Name
- type Tree = global.Tree
-
- implicit def upDependentSymbol(x: AnySymbol): Symbol = x.asInstanceOf[Symbol]
- implicit def upDependentType(x: AnyType): Type = x.asInstanceOf[Type]
- implicit def upDependentName(x: AnyName): Name = x.asInstanceOf[Name]
- implicit def upDependentTree(x: AnyTree): Tree = x.asInstanceOf[Tree]
+/** Collecting some power mode examples.
+
+scala> trait F[@specialized(Int) T] { def f: T = ??? }
+defined trait F
+
+scala> trait G[@specialized(Long, Int) T] extends F[T] { override def f: T = super.f }
+defined trait G
+
+scala> changesAfterEachPhase(intp("G").info.members filter (_.name.toString contains "super")) >
+Gained after 1/parser {
+ method super$f
}
-object Power {
- def apply(intp: IMain): Power = apply(null, intp)
- def apply(repl: ILoop): Power = apply(repl, repl.intp)
- def apply(repl: ILoop, intp: IMain): Power =
- new Power(repl, intp) {
- type GlobalType = intp.global.type
- final val global: intp.global.type = intp.global
- }
+Gained after 12/specialize {
+ method super$f$mcJ$sp
+ method super$f$mcI$sp
}
+Lost after 18/flatten {
+ method super$f$mcJ$sp
+ method super$f$mcI$sp
+ method super$f
+}
+*/
+
/** A class for methods to be injected into the intp in power mode.
*/
-abstract class Power(
- val repl: ILoop,
- val intp: IMain
-) extends SharesGlobal {
- import intp.{
- beQuietDuring, typeOfExpression, getCompilerClass, getCompilerModule,
- interpret, parse
- }
- import global._
+class Power[ReplValsImpl <: ReplVals : ru.TypeTag: ClassTag](val intp: IMain, replVals: ReplValsImpl) {
+ import intp.{ beQuietDuring, typeOfExpression, interpret, parse }
+ import intp.global._
+ import definitions.{ compilerTypeFromTag, compilerSymbolFromTag}
+ import rootMirror.{ getClassIfDefined, getModuleIfDefined }
abstract class SymSlurper {
def isKeep(sym: Symbol): Boolean
@@ -76,7 +62,7 @@ abstract class Power(
def discarded = seen.size - keep.size
def members(x: Symbol): List[Symbol] =
- if (x.rawInfo.isComplete) x.info.members
+ if (x.rawInfo.isComplete) x.info.members.toList
else Nil
var lastCount = -1
@@ -107,10 +93,7 @@ abstract class Power(
}
}
- class PackageSlurper(pkgName: String) extends SymSlurper {
- val pkgSymbol = getCompilerModule(pkgName)
- val modClass = pkgSymbol.moduleClass
-
+ class PackageSlurper(packageClass: Symbol) extends SymSlurper {
/** Looking for dwindling returns */
def droppedEnough() = unseenHistory.size >= 4 && {
unseenHistory takeRight 4 sliding 2 forall { it =>
@@ -121,29 +104,37 @@ abstract class Power(
def isRecur(sym: Symbol) = true
def isIgnore(sym: Symbol) = sym.isAnonOrRefinementClass || (sym.name.toString contains "$mc")
- def isKeep(sym: Symbol) = sym.hasTransOwner(modClass)
+ def isKeep(sym: Symbol) = sym.hasTransOwner(packageClass)
def isFinished() = droppedEnough()
- def slurp() = apply(modClass)
+ def slurp() = {
+ if (packageClass.isPackageClass)
+ apply(packageClass)
+ else {
+ repldbg("Not a package class! " + packageClass)
+ Set()
+ }
+ }
}
private def customBanner = replProps.powerBanner.option flatMap (f => io.File(f).safeSlurp())
private def customInit = replProps.powerInitCode.option flatMap (f => io.File(f).safeSlurp())
def banner = customBanner getOrElse """
- |** Power User mode enabled - BEEP BOOP SPIZ **
+ |** Power User mode enabled - BEEP WHIR GYVE **
|** :phase has been set to 'typer'. **
|** scala.tools.nsc._ has been imported **
- |** global._ and definitions._ also imported **
- |** Try :help, vals.<tab>, power.<tab> **
+ |** global._, definitions._ also imported **
+ |** Try :help, :vals, power.<tab> **
""".stripMargin.trim
private def initImports = List(
"scala.tools.nsc._",
"scala.collection.JavaConverters._",
- "global.{ error => _, _ }",
+ "intp.global.{ error => _, _ }",
"definitions.{ getClass => _, _ }",
- "power.Implicits._",
- "power.rutil._"
+ "power.rutil._",
+ "replImplicits._",
+ "treedsl.CODE._"
)
def init = customInit match {
@@ -155,90 +146,106 @@ abstract class Power(
*/
def unleash(): Unit = beQuietDuring {
// First we create the ReplVals instance and bind it to $r
- intp.bind("$r", new ReplVals(repl))
+ intp.bind("$r", replVals)
// Then we import everything from $r.
intp interpret ("import " + intp.pathToTerm("$r") + "._")
// And whatever else there is to do.
init.lines foreach (intp interpret _)
}
+ def valsDescription: String = {
+ def to_str(m: Symbol) = "%12s %s".format(
+ m.decodedName, "" + elimRefinement(m.accessedOrSelf.tpe) stripPrefix "scala.tools.nsc.")
+
+ ( rutil.info[ReplValsImpl].membersDeclared
+ filter (m => m.isPublic && !m.hasModuleFlag && !m.isConstructor)
+ sortBy (_.decodedName)
+ map to_str
+ mkString ("Name and type of values imported into the repl in power mode.\n\n", "\n", "")
+ )
+ }
trait LowPriorityInternalInfo {
- implicit def apply[T: Manifest] : InternalInfo[T] = new InternalInfo[T](None)
+ implicit def apply[T: ru.TypeTag : ClassTag] : InternalInfo[T] = new InternalInfo[T](None)
}
object InternalInfo extends LowPriorityInternalInfo { }
- /** Todos...
- * translate manifest type arguments into applied types
- * customizable symbol filter (had to hardcode no-spec to reduce noise)
+ /** Now dealing with the problem of acidentally calling a method on Type
+ * when you're holding a Symbol and seeing the Symbol converted to the
+ * type of Symbol rather than the type of the thing represented by the
+ * symbol, by only implicitly installing one method, "?", and the rest
+ * of the conveniences exist on that wrapper.
*/
- class InternalInfo[T: Manifest](value: Option[T] = None) {
- // Decided it was unwise to have implicit conversions via commonly
- // used type/symbol methods, because it's too easy to e.g. call
- // "x.tpe" where x is a Type, and rather than failing you get the
- // Type representing Types#Type (or Manifest, or whatever.)
- private def tpe = tpe_
- private def symbol = symbol_
- private def name = name_
-
- // Would love to have stuff like existential types working,
- // but very unfortunately those manifests just stuff the relevant
- // information into the toString method. Boo.
- private def manifestToType(m: Manifest[_]): Type = m match {
- case x: AnyValManifest[_] =>
- getCompilerClass("scala." + x).tpe
- case _ =>
- val name = m.erasure.getName
- if (name endsWith "$") getCompilerModule(name dropRight 1).tpe
- else {
- val sym = getCompilerClass(name)
- val args = m.typeArguments
-
- if (args.isEmpty) sym.tpe
- else typeRef(NoPrefix, sym, args map manifestToType)
- }
- }
+ trait LowPriorityInternalInfoWrapper {
+ implicit def apply[T: ru.TypeTag : ClassTag] : InternalInfoWrapper[T] = new InternalInfoWrapper[T](None)
+ }
+ object InternalInfoWrapper extends LowPriorityInternalInfoWrapper {
- def symbol_ : Symbol = getCompilerClass(erasure.getName)
- def tpe_ : Type = manifestToType(man)
- def name_ : Name = symbol.name
- def companion = symbol.companionSymbol
- def info = symbol.info
- def module = symbol.moduleClass
- def owner = symbol.owner
- def owners = symbol.ownerChain drop 1
- def defn = symbol.defString
-
- def declares = members filter (_.owner == symbol)
- def inherits = members filterNot (_.owner == symbol)
- def types = members filter (_.name.isTypeName)
- def methods = members filter (_.isMethod)
- def overrides = declares filter (_.isOverride)
- def inPackage = owners find (x => x.isPackageClass || x.isPackage) getOrElse definitions.RootPackage
-
- def man = manifest[T]
- def erasure = man.erasure
- def members = tpe.members filterNot (_.name.toString contains "$mc")
- def allMembers = tpe.members
- def bts = info.baseTypeSeq.toList
- def btsmap = bts map (x => (x, x.decls.toList)) toMap
- def pkgName = Option(erasure.getPackage) map (_.getName)
- def pkg = pkgName map getCompilerModule getOrElse NoSymbol
- def pkgmates = pkg.tpe.members
- def pkgslurp = pkgName match {
- case Some(name) => new PackageSlurper(name) slurp()
- case _ => Set()
- }
- def ? = this
+ }
+ class InternalInfoWrapper[T: ru.TypeTag : ClassTag](value: Option[T] = None) {
+ def ? : InternalInfo[T] = new InternalInfo[T](value)
+ }
- def whoHas(name: String) = bts filter (_.decls exists (_.name.toString == name))
- def <:<[U: Manifest](other: U) = tpe <:< InternalInfo[U].tpe
- def lub[U: Manifest](other: U) = global.lub(List(tpe, InternalInfo[U].tpe))
- def glb[U: Manifest](other: U) = global.glb(List(tpe, InternalInfo[U].tpe))
+ /** Todos...
+ * translate tag type arguments into applied types
+ * customizable symbol filter (had to hardcode no-spec to reduce noise)
+ */
+ class InternalInfo[T](value: Option[T] = None)(implicit typeEvidence: ru.TypeTag[T], runtimeClassEvidence: ClassTag[T]) {
+ private def newInfo[U: ru.TypeTag : ClassTag](value: U): InternalInfo[U] = new InternalInfo[U](Some(value))
+ private def isSpecialized(s: Symbol) = s.name.toString contains "$mc"
+ private def isImplClass(s: Symbol) = s.name.toString endsWith "$class"
+
+ /** Standard noise reduction filter. */
+ def excludeMember(s: Symbol) = (
+ isSpecialized(s)
+ || isImplClass(s)
+ || s.isAnonOrRefinementClass
+ || s.isAnonymousFunction
+ )
+ def symbol = compilerSymbolFromTag(tag)
+ def tpe = compilerTypeFromTag(tag)
+ def name = symbol.name
+ def companion = symbol.companionSymbol
+ def info = symbol.info
+ def moduleClass = symbol.moduleClass
+ def owner = symbol.owner
+ def owners = symbol.ownerChain drop 1
+ def signature = symbol.defString
+
+ def decls = info.decls
+ def declsOverride = membersDeclared filter (_.isOverride)
+ def declsOriginal = membersDeclared filterNot (_.isOverride)
+
+ def members = membersUnabridged filterNot excludeMember
+ def membersUnabridged = tpe.members.toList
+ def membersDeclared = members filterNot excludeMember
+ def membersInherited = members filterNot (membersDeclared contains _)
+ def memberTypes = members filter (_.name.isTypeName)
+ def memberMethods = members filter (_.isMethod)
+
+ def pkg = symbol.enclosingPackage
+ def pkgName = pkg.fullName
+ def pkgClass = symbol.enclosingPackageClass
+ def pkgMembers = pkg.info.members filterNot excludeMember
+ def pkgClasses = pkgMembers filter (s => s.isClass && s.isDefinedInPackage)
+ def pkgSymbols = new PackageSlurper(pkgClass).slurp() filterNot excludeMember
+
+ def tag = typeEvidence
+ def runtimeClass = runtimeClassEvidence.runtimeClass
+ def shortClass = runtimeClass.getName split "[$.]" last
+
+ def baseClasses = tpe.baseClasses
+ def baseClassDecls = mapFrom(baseClasses)(_.info.decls.toList.sortBy(_.name))
+ def ancestors = baseClasses drop 1
+ def ancestorDeclares(name: String) = ancestors filter (_.info member newTermName(name) ne NoSymbol)
+ def baseTypes = tpe.baseTypeSeq.toList
+
+ def <:<[U: ru.TypeTag : ClassTag](other: U) = tpe <:< newInfo(other).tpe
+ def lub[U: ru.TypeTag : ClassTag](other: U) = intp.global.lub(List(tpe, newInfo(other).tpe))
+ def glb[U: ru.TypeTag : ClassTag](other: U) = intp.global.glb(List(tpe, newInfo(other).tpe))
- def shortClass = erasure.getName split "[$.]" last
override def toString = value match {
case Some(x) => "%s (%s)".format(x, shortClass)
- case _ => erasure.getName
+ case _ => runtimeClass.getName
}
}
@@ -317,10 +324,11 @@ abstract class Power(
}
class RichReplString(s: String) {
+ // make an url out of the string
def u: URL = (
- if (s contains ":") new java.net.URL(s)
- else if (new java.io.File(s) exists) new java.io.File(s).toURI.toURL
- else new java.net.URL("http://" + s)
+ if (s contains ":") new URL(s)
+ else if (new JFile(s) exists) new JFile(s).toURI.toURL
+ else new URL("http://" + s)
)
}
class RichInputStream(in: InputStream)(implicit codec: Codec) {
@@ -328,11 +336,20 @@ abstract class Power(
def slurp(): String = io.Streamable.slurp(in)
def <<(): String = slurp()
}
+ class RichReplURL(url: URL)(implicit codec: Codec) {
+ def slurp(): String = io.Streamable.slurp(url)
+ }
+ class RichSymbolList(syms: List[Symbol]) {
+ def sigs = syms map (_.defString)
+ def infos = syms map (_.info)
+ }
- protected trait Implicits1 {
+ trait Implicits1 {
// fallback
implicit def replPrinting[T](x: T)(implicit pretty: Prettifier[T] = Prettifier.default[T]) =
new SinglePrettifierClass[T](x)
+
+ implicit def liftToTypeName(s: String): TypeName = newTypeName(s)
}
trait Implicits2 extends Implicits1 {
class RichSymbol(sym: Symbol) {
@@ -345,26 +362,31 @@ abstract class Power(
else if (s1 isLess s2) -1
else 1
}
- implicit lazy val powerNameOrdering: Ordering[Name] = Ordering[String] on (_.toString)
implicit lazy val powerSymbolOrdering: Ordering[Symbol] = Ordering[Name] on (_.name)
implicit lazy val powerTypeOrdering: Ordering[Type] = Ordering[Symbol] on (_.typeSymbol)
- implicit def replInternalInfo[T: Manifest](x: T): InternalInfo[T] = new InternalInfo[T](Some(x))
+ implicit def replInternalInfo[T: ru.TypeTag : ClassTag](x: T): InternalInfoWrapper[T] = new InternalInfoWrapper[T](Some(x))
implicit def replEnhancedStrings(s: String): RichReplString = new RichReplString(s)
implicit def replMultiPrinting[T: Prettifier](xs: TraversableOnce[T]): MultiPrettifierClass[T] =
new MultiPrettifierClass[T](xs.toSeq)
implicit def replPrettifier[T] : Prettifier[T] = Prettifier.default[T]
implicit def replTypeApplication(sym: Symbol): RichSymbol = new RichSymbol(sym)
+
implicit def replInputStream(in: InputStream)(implicit codec: Codec) = new RichInputStream(in)
- implicit def replInputStreamURL(url: URL)(implicit codec: Codec) = new RichInputStream(url.openStream())
+ implicit def replEnhancedURLs(url: URL)(implicit codec: Codec): RichReplURL = new RichReplURL(url)(codec)
+
+ implicit def liftToTermName(s: String): TermName = newTermName(s)
+ implicit def replListOfSymbols(xs: List[Symbol]) = new RichSymbolList(xs)
}
- object Implicits extends Implicits2 { }
trait ReplUtilities {
- def module[T: Manifest] = getCompilerModule(manifest[T].erasure.getName stripSuffix "$")
- def clazz[T: Manifest] = getCompilerClass(manifest[T].erasure.getName)
- def info[T: Manifest] = InternalInfo[T]
- def ?[T: Manifest] = InternalInfo[T]
+ // [Eugene to Paul] needs review!
+ // def module[T: Manifest] = getModuleIfDefined(manifest[T].erasure.getName stripSuffix nme.MODULE_SUFFIX_STRING)
+ // def clazz[T: Manifest] = getClassIfDefined(manifest[T].erasure.getName)
+ def module[T: ru.TypeTag] = ru.typeOf[T].typeSymbol.suchThat(_.isPackage)
+ def clazz[T: ru.TypeTag] = ru.typeOf[T].typeSymbol.suchThat(_.isClass)
+ def info[T: ru.TypeTag : ClassTag] = InternalInfo[T]
+ def ?[T: ru.TypeTag : ClassTag] = InternalInfo[T]
def url(s: String) = {
try new URL(s)
catch { case _: MalformedURLException =>
@@ -387,17 +409,13 @@ abstract class Power(
}
lazy val rutil: ReplUtilities = new ReplUtilities { }
-
- lazy val phased: Phased = new Phased with SharesGlobal {
- type GlobalType = Power.this.global.type
- final val global: Power.this.global.type = Power.this.global
- }
+ lazy val phased: Phased = new { val global: intp.global.type = intp.global } with Phased { }
def context(code: String) = analyzer.rootContext(unit(code))
- def source(code: String) = new BatchSourceFile("<console>", code)
- def unit(code: String) = new CompilationUnit(source(code))
+ def source(code: String) = newSourceFile(code)
+ def unit(code: String) = newCompilationUnit(code)
def trees(code: String) = parse(code) getOrElse Nil
- def typeOf(id: String): Type = intp.typeOfExpression(id) getOrElse NoType
+ def typeOf(id: String) = intp.typeOfExpression(id)
override def toString = """
|** Power mode status **
diff --git a/src/compiler/scala/tools/nsc/interpreter/ProductCompletion.scala b/src/compiler/scala/tools/nsc/interpreter/ProductCompletion.scala
deleted file mode 100644
index dc4582e..0000000
--- a/src/compiler/scala/tools/nsc/interpreter/ProductCompletion.scala
+++ /dev/null
@@ -1,44 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
- * @author Paul Phillips
- */
-
-package scala.tools.nsc
-package interpreter
-
-class SeqCompletion[T](elems: Seq[T]) extends CompletionAware {
- lazy val completions = elems.indices.toList map ("(%d)" format _)
- def completions(verbosity: Int) = completions
- private def elemAt(name: String) =
- if (completions contains name) Some(elems(name drop 1 dropRight 1 toInt)) else None
-
- override def execute(name: String) = elemAt(name)
- override def follow(name: String) = elemAt(name) map (x => ProductCompletion(x))
-}
-
-/** TODO - deal with non-case products by giving them _1 _2 etc. */
-class ProductCompletion(root: Product) extends CompletionAware {
- lazy val caseFields: List[Any] = root.productIterator.toList
- lazy val caseNames: List[String] = ByteCode caseParamNamesForPath root.getClass.getName getOrElse Nil
- private def isValid = caseFields.length == caseNames.length
-
- private def fieldForName(s: String) = (completions indexOf s) match {
- case idx if idx > -1 && isValid => Some(caseFields(idx))
- case _ => None
- }
-
- lazy val completions = caseNames
- def completions(verbosity: Int) = completions
- override def execute(name: String) = fieldForName(name)
- override def follow(name: String) = fieldForName(name) map (x => ProductCompletion(x))
-}
-
-object ProductCompletion {
- /** TODO: other traversables. */
- def apply(elem: Any): CompletionAware = elem match {
- case x: Seq[_] => new SeqCompletion[Any](x)
- case x: Product => new ProductCompletion(x)
- // case x: Map[_, _] =>
- case _ => CompletionAware.Empty
- }
-}
diff --git a/src/compiler/scala/tools/nsc/interpreter/ReplConfig.scala b/src/compiler/scala/tools/nsc/interpreter/ReplConfig.scala
index c4a77e9..7cd0f43 100644
--- a/src/compiler/scala/tools/nsc/interpreter/ReplConfig.scala
+++ b/src/compiler/scala/tools/nsc/interpreter/ReplConfig.scala
@@ -1,11 +1,15 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
+ * Copyright 2005-2013 LAMP/EPFL
* @author Paul Phillips
*/
package scala.tools.nsc
package interpreter
+import scala.util.control.ControlThrowable
+import util.Exceptional.unwrap
+import util.stackTraceString
+
trait ReplConfig {
lazy val replProps = new ReplProps
@@ -24,10 +28,32 @@ trait ReplConfig {
try Console println msg
catch { case x: AssertionError => Console.println("Assertion error printing debugging output: " + x) }
+ private[nsc] def repldbgex(ex: Throwable): Unit = {
+ if (isReplDebug) {
+ echo("Caught/suppressing: " + ex)
+ ex.printStackTrace
+ }
+ }
private[nsc] def repldbg(msg: => String) = if (isReplDebug) echo(msg)
private[nsc] def repltrace(msg: => String) = if (isReplTrace) echo(msg)
private[nsc] def replinfo(msg: => String) = if (isReplInfo) echo(msg)
+ private[nsc] def logAndDiscard[T](label: String, alt: => T): PartialFunction[Throwable, T] = {
+ case t: ControlThrowable => throw t
+ case t: Throwable =>
+ repldbg(label + ": " + unwrap(t))
+ repltrace(stackTraceString(unwrap(t)))
+ alt
+ }
+ private[nsc] def substituteAndLog[T](alt: => T)(body: => T): T =
+ substituteAndLog("" + alt, alt)(body)
+ private[nsc] def substituteAndLog[T](label: String, alt: => T)(body: => T): T = {
+ try body
+ catch logAndDiscard(label, alt)
+ }
+ private[nsc] def squashAndLog(label: String)(body: => Unit): Unit =
+ substituteAndLog(label, ())(body)
+
def isReplTrace: Boolean = replProps.trace
def isReplDebug: Boolean = replProps.debug || isReplTrace
def isReplInfo: Boolean = replProps.info || isReplDebug
diff --git a/src/compiler/scala/tools/nsc/interpreter/ReplGlobal.scala b/src/compiler/scala/tools/nsc/interpreter/ReplGlobal.scala
new file mode 100644
index 0000000..7c698a2
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/interpreter/ReplGlobal.scala
@@ -0,0 +1,57 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2013 LAMP/EPFL
+ * @author Paul Phillips
+ */
+
+package scala.tools.nsc
+package interpreter
+
+import reporters._
+import typechecker.Analyzer
+
+/** A layer on top of Global so I can guarantee some extra
+ * functionality for the repl. It doesn't do much yet.
+ */
+trait ReplGlobal extends Global {
+ // This exists mostly because using the reporter too early leads to deadlock.
+ private def echo(msg: String) { Console println msg }
+
+ override def abort(msg: String): Nothing = {
+ echo("ReplGlobal.abort: " + msg)
+ super.abort(msg)
+ }
+
+ override lazy val analyzer = new {
+ val global: ReplGlobal.this.type = ReplGlobal.this
+ } with Analyzer {
+ override def newTyper(context: Context): Typer = new Typer(context) {
+ override def typed(tree: Tree, mode: Int, pt: Type): Tree = {
+ val res = super.typed(tree, mode, pt)
+ tree match {
+ case Ident(name) if !tree.symbol.hasPackageFlag && !name.toString.startsWith("$") =>
+ repldbg("typed %s: %s".format(name, res.tpe))
+ case _ =>
+ }
+ res
+ }
+ }
+ }
+
+ object replPhase extends SubComponent {
+ val global: ReplGlobal.this.type = ReplGlobal.this
+ val phaseName = "repl"
+ val runsAfter = List[String]("typer")
+ val runsRightAfter = None
+ def newPhase(_prev: Phase): StdPhase = new StdPhase(_prev) {
+ def apply(unit: CompilationUnit) {
+ repldbg("Running replPhase on " + unit.body)
+ // newNamer(rootContext(unit)).enterSym(unit.body)
+ }
+ }
+ }
+
+ override protected def computePhaseDescriptors: List[SubComponent] = {
+ addToPhasesSet(replPhase, "repl")
+ super.computePhaseDescriptors
+ }
+}
diff --git a/src/compiler/scala/tools/nsc/interpreter/ReplProps.scala b/src/compiler/scala/tools/nsc/interpreter/ReplProps.scala
index 99489b7..bc3e7a1 100644
--- a/src/compiler/scala/tools/nsc/interpreter/ReplProps.scala
+++ b/src/compiler/scala/tools/nsc/interpreter/ReplProps.scala
@@ -1,5 +1,5 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
+ * Copyright 2005-2013 LAMP/EPFL
* @author Paul Phillips
*/
@@ -7,26 +7,25 @@ package scala.tools.nsc
package interpreter
import scala.sys._
+import Prop._
class ReplProps {
private def bool(name: String) = BooleanProp.keyExists(name)
+ private def int(name: String) = IntProp(name)
val jlineDebug = bool("scala.tools.jline.internal.Log.debug")
val jlineTrace = bool("scala.tools.jline.internal.Log.trace")
- val noThreads = bool("scala.repl.no-threads")
val info = bool("scala.repl.info")
val debug = bool("scala.repl.debug")
val trace = bool("scala.repl.trace")
val power = bool("scala.repl.power")
- val replInitCode = Prop[JFile]("scala.repl.initcode")
- val powerInitCode = Prop[JFile]("scala.repl.power.initcode")
- val powerBanner = Prop[JFile]("scala.repl.power.banner")
-}
+ val replInitCode = Prop[JFile]("scala.repl.initcode")
+ val replAutorunCode = Prop[JFile]("scala.repl.autoruncode")
+ val powerInitCode = Prop[JFile]("scala.repl.power.initcode")
+ val powerBanner = Prop[JFile]("scala.repl.power.banner")
-object ReplPropsKludge {
- // !!! short term binary compatibility hack for 2.9.1 to put this
- // here - needed a not previously existing object.
- def noThreadCreation(settings: Settings) = replProps.noThreads || settings.Yreplsync.value
+ val vids = bool("scala.repl.vids")
+ val maxPrintString = int("scala.repl.maxprintstring")
}
diff --git a/src/compiler/scala/tools/nsc/interpreter/ReplReporter.scala b/src/compiler/scala/tools/nsc/interpreter/ReplReporter.scala
index e181f98..b20166d 100644
--- a/src/compiler/scala/tools/nsc/interpreter/ReplReporter.scala
+++ b/src/compiler/scala/tools/nsc/interpreter/ReplReporter.scala
@@ -1,5 +1,5 @@
/* NSC -- new Scala compiler
- * Copyright 2002-2011 LAMP/EPFL
+ * Copyright 2002-2013 LAMP/EPFL
* @author Paul Phillips
*/
@@ -9,14 +9,26 @@ package interpreter
import reporters._
import IMain._
-class ReplReporter(intp: IMain) extends ConsoleReporter(intp.settings, null, new ReplStrippingWriter(intp)) {
+/** Like ReplGlobal, a layer for ensuring extra functionality.
+ */
+class ReplReporter(intp: IMain) extends ConsoleReporter(intp.settings, Console.in, new ReplStrippingWriter(intp)) {
+ def printUntruncatedMessage(msg: String) = withoutTruncating(printMessage(msg))
+
override def printMessage(msg: String) {
// Avoiding deadlock if the compiler starts logging before
// the lazy val is complete.
if (intp.isInitializeComplete) {
- if (intp.totalSilence) ()
+ if (intp.totalSilence) {
+ if (isReplTrace)
+ super.printMessage("[silent] " + msg)
+ }
else super.printMessage(msg)
}
else Console.println("[init] " + msg)
}
+
+ override def displayPrompt() {
+ if (intp.totalSilence) ()
+ else super.displayPrompt()
+ }
}
diff --git a/src/compiler/scala/tools/nsc/interpreter/ReplStrings.scala b/src/compiler/scala/tools/nsc/interpreter/ReplStrings.scala
index 9c83db3..f8ecc6c 100644
--- a/src/compiler/scala/tools/nsc/interpreter/ReplStrings.scala
+++ b/src/compiler/scala/tools/nsc/interpreter/ReplStrings.scala
@@ -1,5 +1,5 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
+ * Copyright 2005-2013 LAMP/EPFL
* @author Paul Phillips
*/
@@ -8,19 +8,9 @@ package interpreter
import scala.collection.{ mutable, immutable }
import scala.PartialFunction.cond
-import scala.reflect.NameTransformer
-import scala.tools.nsc.util.Chars
+import scala.reflect.internal.Chars
trait ReplStrings {
- // Longest common prefix
- def longestCommonPrefix(xs: List[String]): String = {
- if (xs.isEmpty || xs.contains("")) ""
- else xs.head.head match {
- case ch =>
- if (xs.tail forall (_.head == ch)) "" + ch + longestCommonPrefix(xs map (_.tail))
- else ""
- }
- }
/** Convert a string into code that can recreate the string.
* This requires replacing all special characters by escape
* codes. It does not add the surrounding " marks. */
diff --git a/src/compiler/scala/tools/nsc/interpreter/ReplVals.scala b/src/compiler/scala/tools/nsc/interpreter/ReplVals.scala
index 2f2489b..53478bd 100644
--- a/src/compiler/scala/tools/nsc/interpreter/ReplVals.scala
+++ b/src/compiler/scala/tools/nsc/interpreter/ReplVals.scala
@@ -1,20 +1,83 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
+ * Copyright 2005-2013 LAMP/EPFL
* @author Paul Phillips
*/
package scala.tools.nsc
package interpreter
-final class ReplVals(r: ILoop) {
- lazy val repl = r
- lazy val intp = r.intp
- lazy val power = r.power
- lazy val reader = r.in
- lazy val vals = this
- lazy val global = intp.global
- lazy val isettings = intp.isettings
- lazy val completion = reader.completion
- lazy val history = reader.history
- lazy val phased = power.phased
+import scala.language.implicitConversions
+import scala.reflect.api.{Universe => ApiUniverse}
+import scala.reflect.runtime.{universe => ru}
+
+/** A class which the repl utilizes to expose predefined objects.
+ * The base implementation is empty; the standard repl implementation
+ * is StdReplVals.
+ */
+abstract class ReplVals { }
+
+class StdReplVals(final val r: ILoop) extends ReplVals {
+ final lazy val repl = r
+ final lazy val intp = r.intp
+ final lazy val power = r.power
+ final lazy val reader = r.in
+ final lazy val vals = this
+ final lazy val global: intp.global.type = intp.global
+ final lazy val isettings = intp.isettings
+ final lazy val completion = reader.completion
+ final lazy val history = reader.history
+ final lazy val phased = power.phased
+ final lazy val analyzer = global.analyzer
+
+ object treedsl extends { val global: intp.global.type = intp.global } with ast.TreeDSL { }
+
+ final lazy val typer = analyzer.newTyper(
+ analyzer.rootContext(
+ power.unit("").asInstanceOf[analyzer.global.CompilationUnit]
+ )
+ )
+ def lastRequest = intp.lastRequest
+
+ class ReplImplicits extends power.Implicits2 {
+ import intp.global._
+
+ private val tagFn = ReplVals.mkCompilerTypeFromTag[intp.global.type](global)
+ implicit def mkCompilerTypeFromTag(sym: Symbol) = tagFn(sym)
+ }
+
+ final lazy val replImplicits = new ReplImplicits
+
+ def typed[T <: analyzer.global.Tree](tree: T): T = typer.typed(tree).asInstanceOf[T]
+}
+
+object ReplVals {
+ /** Latest attempt to work around the challenge of foo.global.Type
+ * not being seen as the same type as bar.global.Type even though
+ * the globals are the same. Dependent method types to the rescue.
+ */
+ def mkCompilerTypeFromTag[T <: Global](global: T) = {
+ import global._
+ import definitions._
+
+ /** We can't use definitions.compilerTypeFromTag directly because we're passing
+ * it to map and the compiler refuses to perform eta expansion on a method
+ * with a dependent return type. (Can this be relaxed?) To get around this
+ * I have this forwarder which widens the type and then cast the result back
+ * to the dependent type.
+ */
+ def compilerTypeFromTag(t: ApiUniverse # WeakTypeTag[_]): Global#Type =
+ definitions.compilerTypeFromTag(t)
+
+ class AppliedTypeFromTags(sym: Symbol) {
+ def apply[M](implicit m1: ru.TypeTag[M]): Type =
+ if (sym eq NoSymbol) NoType
+ else appliedType(sym, compilerTypeFromTag(m1).asInstanceOf[Type])
+
+ def apply[M1, M2](implicit m1: ru.TypeTag[M1], m2: ru.TypeTag[M2]): Type =
+ if (sym eq NoSymbol) NoType
+ else appliedType(sym, compilerTypeFromTag(m1).asInstanceOf[Type], compilerTypeFromTag(m2).asInstanceOf[Type])
+ }
+
+ (sym: Symbol) => new AppliedTypeFromTags(sym)
+ }
}
diff --git a/src/compiler/scala/tools/nsc/interpreter/Results.scala b/src/compiler/scala/tools/nsc/interpreter/Results.scala
index f582d47..e400906 100644
--- a/src/compiler/scala/tools/nsc/interpreter/Results.scala
+++ b/src/compiler/scala/tools/nsc/interpreter/Results.scala
@@ -1,5 +1,5 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
+ * Copyright 2005-2013 LAMP/EPFL
* @author Martin Odersky
*/
diff --git a/src/compiler/scala/tools/nsc/interpreter/RichClass.scala b/src/compiler/scala/tools/nsc/interpreter/RichClass.scala
index 5edc8fd..4371f7f 100644
--- a/src/compiler/scala/tools/nsc/interpreter/RichClass.scala
+++ b/src/compiler/scala/tools/nsc/interpreter/RichClass.scala
@@ -1,24 +1,29 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
+ * Copyright 2005-2013 LAMP/EPFL
* @author Paul Phillips
*/
package scala.tools.nsc
package interpreter
+import scala.reflect.{ ClassTag, classTag }
+
class RichClass[T](val clazz: Class[T]) {
- def toManifest: Manifest[T] = Manifest.classType(clazz)
+ def toTag: ClassTag[T] = ClassTag[T](clazz)
def toTypeString: String = TypeStrings.fromClazz(clazz)
// Sadly isAnonymousClass does not return true for scala anonymous
// classes because our naming scheme is not doing well against the
// jvm's many assumptions.
- def isScalaAnonymous = clazz.isAnonymousClass || (clazz.getName contains "$anon$")
+ def isScalaAnonymous = (
+ try clazz.isAnonymousClass || (clazz.getName contains "$anon$")
+ catch { case _: java.lang.InternalError => false } // good ol' "Malformed class name"
+ )
/** It's not easy... to be... me... */
- def supermans: List[Manifest[_]] = supers map (_.toManifest)
- def superNames: List[String] = supers map (_.getName)
- def interfaces: List[JClass] = supers filter (_.isInterface)
+ def supermans: List[ClassTag[_]] = supers map (_.toTag)
+ def superNames: List[String] = supers map (_.getName)
+ def interfaces: List[JClass] = supers filter (_.isInterface)
def hasAncestorName(f: String => Boolean) = superNames exists f
def hasAncestor(f: JClass => Boolean) = supers exists f
diff --git a/src/compiler/scala/tools/nsc/interpreter/Runner.scala b/src/compiler/scala/tools/nsc/interpreter/Runner.scala
deleted file mode 100644
index f9f75da..0000000
--- a/src/compiler/scala/tools/nsc/interpreter/Runner.scala
+++ /dev/null
@@ -1,11 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
- * @author Paul Phillips
- */
-
-package scala.tools.nsc
-package interpreter
-
-object Runner {
- def main(args: Array[String]): Unit = new ILoop process args
-}
diff --git a/src/compiler/scala/tools/nsc/interpreter/SimpleReader.scala b/src/compiler/scala/tools/nsc/interpreter/SimpleReader.scala
index 992bef8..bccd815 100644
--- a/src/compiler/scala/tools/nsc/interpreter/SimpleReader.scala
+++ b/src/compiler/scala/tools/nsc/interpreter/SimpleReader.scala
@@ -1,5 +1,5 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
+ * Copyright 2005-2013 LAMP/EPFL
* @author Stepan Koltsov
*/
@@ -18,7 +18,6 @@ extends InteractiveReader
{
val history = NoHistory
val completion = NoCompletion
- val keyBindings: List[KeyBinding] = Nil
def init() = ()
def reset() = ()
diff --git a/src/compiler/scala/tools/nsc/interpreter/TypeStrings.scala b/src/compiler/scala/tools/nsc/interpreter/TypeStrings.scala
index fc286b4..60399f5 100644
--- a/src/compiler/scala/tools/nsc/interpreter/TypeStrings.scala
+++ b/src/compiler/scala/tools/nsc/interpreter/TypeStrings.scala
@@ -1,5 +1,5 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
+ * Copyright 2005-2013 LAMP/EPFL
* @author Paul Phillips
*/
@@ -9,6 +9,146 @@ package interpreter
import java.lang.{ reflect => r }
import r.TypeVariable
import scala.reflect.NameTransformer
+import NameTransformer._
+import scala.reflect.runtime.{universe => ru}
+import scala.reflect.{ClassTag, classTag}
+import typechecker.DestructureTypes
+import scala.reflect.internal.util.StringOps.ojoin
+import scala.language.implicitConversions
+
+/** A more principled system for turning types into strings.
+ */
+trait StructuredTypeStrings extends DestructureTypes {
+ val global: Global
+ import global._
+ import definitions._
+
+ case class LabelAndType(label: String, typeName: String) { }
+ object LabelAndType {
+ val empty = LabelAndType("", "")
+ }
+ case class Grouping(ldelim: String, mdelim: String, rdelim: String, labels: Boolean) {
+ def join(elems: String*): String = (
+ if (elems.isEmpty) ""
+ else elems.mkString(ldelim, mdelim, rdelim)
+ )
+ }
+ val NoGrouping = Grouping("", "", "", false)
+ val ListGrouping = Grouping("(", ", ", ")", false)
+ val ProductGrouping = Grouping("(", ", ", ")", true)
+ val ParamGrouping = Grouping("(", ", ", ")", true)
+ val BlockGrouping = Grouping(" { ", "; ", "}", false)
+
+ private implicit def lowerName(n: Name): String = "" + n
+ private def str(level: Int)(body: => String): String = " " * level + body
+ private def block(level: Int, grouping: Grouping)(name: String, nodes: List[TypeNode]): String = {
+ val l1 = str(level)(name + grouping.ldelim)
+ val l2 = nodes.map(_ show level + 1)
+ val l3 = str(level)(grouping.rdelim)
+
+ l1 +: l2 :+ l3 mkString "\n"
+ }
+ private def maybeBlock(level: Int, grouping: Grouping)(name: String, nodes: List[TypeNode]): String = {
+ import grouping._
+ val threshold = 70
+
+ val try1 = str(level)(name + grouping.join(nodes map (_.show(0, grouping.labels)): _*))
+ if (try1.length < threshold) try1
+ else block(level, grouping)(name, nodes)
+ }
+ private def shortClass(x: Any) = {
+ if (opt.debug) {
+ val name = (x.getClass.getName split '.').last
+ val isAnon = name.reverse takeWhile (_ != '$') forall (_.isDigit)
+ val str = if (isAnon) name else (name split '$').last
+
+ " // " + str
+ }
+ else ""
+ }
+
+ sealed abstract class TypeNode {
+ def grouping: Grouping
+ def nodes: List[TypeNode]
+
+ def show(indent: Int, showLabel: Boolean): String = maybeBlock(indent, grouping)(mkPrefix(showLabel), nodes)
+ def show(indent: Int): String = show(indent, true)
+ def show(): String = show(0)
+
+ def withLabel(l: String): this.type = modifyNameInfo(_.copy(label = l))
+ def withType(t: String): this.type = modifyNameInfo(_.copy(typeName = t))
+
+ def label = nameInfo.label
+ def typeName = nameInfo.typeName
+
+ protected def mkPrefix(showLabel: Boolean) = {
+ val pre = if (showLabel && label != "") label + " = " else ""
+ pre + typeName
+ }
+ override def toString = show() // + "(toString)"
+ private var nameInfo: LabelAndType = LabelAndType.empty
+ private def modifyNameInfo(f: LabelAndType => LabelAndType): this.type = {
+ nameInfo = f(nameInfo)
+ this
+ }
+ }
+ case class TypeAtom[T](atom: T) extends TypeNode {
+ def grouping = NoGrouping
+ def nodes = Nil
+ override protected def mkPrefix(showLabel: Boolean) =
+ super.mkPrefix(showLabel) + atom + shortClass(atom)
+ }
+ case class TypeProduct(nodes: List[TypeNode]) extends TypeNode {
+ def grouping: Grouping = ProductGrouping
+ def emptyTypeName = ""
+ override def typeName = if (nodes.isEmpty) emptyTypeName else super.typeName
+ }
+
+ /** For a NullaryMethod, in = TypeEmpty; for MethodType(Nil, _) in = TypeNil */
+ class NullaryFunction(out: TypeNode) extends TypeProduct(List(out)) {
+ override def typeName = "NullaryMethodType"
+ }
+ class MonoFunction(in: TypeNode, out: TypeNode) extends TypeProduct(List(in, out)) {
+ override def typeName = "MethodType"
+ }
+ class PolyFunction(in: TypeNode, out: TypeNode) extends TypeProduct(List(in, out)) {
+ override def typeName = "PolyType"
+ }
+
+ class TypeList(nodes: List[TypeNode]) extends TypeProduct(nodes) {
+ override def grouping = ListGrouping
+ override def emptyTypeName = "Nil"
+ override def typeName = "List"
+ }
+ class TypeScope(nodes: List[TypeNode]) extends TypeProduct(nodes) {
+ override def grouping = BlockGrouping
+ override def typeName = "Scope"
+ override def emptyTypeName = "EmptyScope"
+ }
+
+ object TypeEmpty extends TypeNode {
+ override def grouping = NoGrouping
+ override def nodes = Nil
+ override def label = ""
+ override def typeName = ""
+ override def show(indent: Int, showLabel: Boolean) = ""
+ }
+
+ object intoNodes extends DestructureType[TypeNode] {
+ def withLabel(node: TypeNode, label: String): TypeNode = node withLabel label
+ def withType(node: TypeNode, typeName: String): TypeNode = node withType typeName
+
+ def wrapEmpty = TypeEmpty
+ def wrapSequence(nodes: List[TypeNode]) = new TypeList(nodes)
+ def wrapProduct(nodes: List[TypeNode]) = new TypeProduct(nodes)
+ def wrapPoly(in: TypeNode, out: TypeNode) = new PolyFunction(in, out)
+ def wrapMono(in: TypeNode, out: TypeNode) = if (in == wrapEmpty) new NullaryFunction(out) else new MonoFunction(in, out)
+ def wrapAtom[U](value: U) = new TypeAtom(value)
+ }
+
+ def show(tp: Type): String = intoNodes(tp).show
+}
+
/** Logic for turning a type into a String. The goal is to be
* able to take some arbitrary object 'x' and obtain the most precise
@@ -36,17 +176,17 @@ trait TypeStrings {
} toMap
def scalaName(s: String): String = {
- if (s endsWith "$") s.init + ".type"
+ if (s endsWith MODULE_SUFFIX_STRING) s.init + ".type"
else if (s == "void") "scala.Unit"
else if (primitives(s)) "scala." + s.capitalize
- else primitiveMap.getOrElse(s, NameTransformer decode s)
+ else primitiveMap.getOrElse(s, NameTransformer.decode(s))
}
// Trying to put humpty dumpty back together again.
def scalaName(clazz: JClass): String = {
val name = clazz.getName
val isAnon = clazz.isScalaAnonymous
val enclClass = clazz.getEnclosingClass
- def enclPre = enclClass.getName + "$"
+ def enclPre = enclClass.getName + MODULE_SUFFIX_STRING
def enclMatch = name startsWith enclPre
scalaName(
@@ -54,8 +194,8 @@ trait TypeStrings {
else enclClass.getName + "." + (name stripPrefix enclPre)
)
}
- def scalaName(m: ClassManifest[_]): String = scalaName(m.erasure)
- def anyClass(x: Any): JClass = if (x == null) null else x.asInstanceOf[AnyRef].getClass
+ def scalaName(ct: ClassTag[_]): String = scalaName(ct.runtimeClass)
+ def anyClass(x: Any): JClass = if (x == null) null else x.getClass
private def brackets(tps: String*): String =
if (tps.isEmpty) ""
@@ -71,21 +211,30 @@ trait TypeStrings {
brackets(clazz.getTypeParameters map tvarString: _*)
}
- private def tparamString[T: Manifest] : String =
- brackets(manifest[T].typeArguments map (m => tvarString(List(m.erasure))): _*)
+ private def tparamString[T: ru.TypeTag] : String = {
+ def typeArguments: List[ru.Type] = {
+ import ru.TypeRefTag // otherwise the pattern match will be unchecked
+ // because TypeRef is an abstract type
+ ru.typeOf[T] match { case ru.TypeRef(_, _, args) => args; case _ => Nil }
+ }
+ // [Eugene to Paul] need to use not the `rootMirror`, but a mirror with the REPL's classloader
+ // how do I get to it? acquiring context classloader seems unreliable because of multithreading
+ def typeVariables: List[java.lang.Class[_]] = typeArguments map (targ => ru.rootMirror.runtimeClass(targ))
+ brackets(typeArguments map (jc => tvarString(List(jc))): _*)
+ }
/** Going for an overabundance of caution right now. Later these types
- * can be a lot more precise, but right now the manifests have a habit of
+ * can be a lot more precise, but right now the tags have a habit of
* introducing material which is not syntactically valid as scala source.
* When this happens it breaks the repl. It would be nice if we mandated
- * that manifest toString methods (or some other method, since it's bad
+ * that tag toString methods (or some other method, since it's bad
* practice to rely on toString for correctness) generated the VALID string
* representation of the type.
*/
- def fromTypedValue[T: Manifest](x: T): String = fromManifest[T]
- def fromValue(value: Any): String = if (value == null) "Null" else fromClazz(anyClass(value))
- def fromClazz(clazz: JClass): String = scalaName(clazz) + tparamString(clazz)
- def fromManifest[T: Manifest] : String = scalaName(manifest[T].erasure) + tparamString[T]
+ def fromTypedValue[T: ru.TypeTag : ClassTag](x: T): String = fromTag[T]
+ def fromValue(value: Any): String = if (value == null) "Null" else fromClazz(anyClass(value))
+ def fromClazz(clazz: JClass): String = scalaName(clazz) + tparamString(clazz)
+ def fromTag[T: ru.TypeTag : ClassTag] : String = scalaName(classTag[T].runtimeClass) + tparamString[T]
/** Reducing fully qualified noise for some common packages.
*/
@@ -111,4 +260,4 @@ trait TypeStrings {
)
}
-object TypeStrings extends TypeStrings { }
\ No newline at end of file
+object TypeStrings extends TypeStrings { }
diff --git a/src/compiler/scala/tools/nsc/interpreter/XMLCompletion.scala b/src/compiler/scala/tools/nsc/interpreter/XMLCompletion.scala
deleted file mode 100644
index 9979814..0000000
--- a/src/compiler/scala/tools/nsc/interpreter/XMLCompletion.scala
+++ /dev/null
@@ -1,44 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
- * @author Paul Phillips
- */
-
-package scala.tools.nsc
-package interpreter
-
-import xml.{ XML, Group, Node, NodeSeq }
-import XMLCompletion._
-import scala.collection.{ mutable, immutable }
-
-class XMLCompletion(root: Node) extends CompletionAware {
- private val nodeCache = new mutable.HashMap[String, Node]
- private def getNode(s: String): Option[Node] = {
- completions // make sure cache is populated
- nodeCache get s
- }
-
- lazy val completions: List[String] = {
- def children = root.child.toList
- def uniqueTags = children groupBy (_.label) filter (_._2.size == 1) map (_._1)
- val uniqs = uniqueTags.toList
-
- children.foldLeft(List[String]())((res, node) => {
- val name = node.label
- def count = res filter (_ startsWith (name + "[")) size // ]
- val suffix = if (uniqs contains name) "" else "[%d]" format (count + 1)
- val s = name + suffix
-
- nodeCache(s) = node
-
- s :: res
- }).sorted
- }
- def completions(verbosity: Int) = completions
-
- override def execute(id: String) = getNode(id)
- override def follow(id: String) = getNode(id) map (x => new XMLCompletion(x))
-}
-
-object XMLCompletion {
- def apply(x: Node) = new XMLCompletion(x)
-}
diff --git a/src/compiler/scala/tools/nsc/interpreter/package.scala b/src/compiler/scala/tools/nsc/interpreter/package.scala
index e78e92c..e3440c9 100644
--- a/src/compiler/scala/tools/nsc/interpreter/package.scala
+++ b/src/compiler/scala/tools/nsc/interpreter/package.scala
@@ -1,10 +1,12 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
+ * Copyright 2005-2013 LAMP/EPFL
* @author Paul Phillips
*/
package scala.tools.nsc
+import scala.language.implicitConversions
+
/** The main REPL related classes and values are as follows.
* In addition to standard compiler classes Global and Settings, there are:
*
@@ -33,8 +35,10 @@ package object interpreter extends ReplConfig with ReplStrings {
val IR = Results
+ implicit def postfixOps = scala.language.postfixOps // make all postfix ops in this package compile without warning
+
private[interpreter] implicit def javaCharSeqCollectionToScala(xs: JCollection[_ <: CharSequence]): List[String] = {
- import collection.JavaConverters._
+ import scala.collection.JavaConverters._
xs.asScala.toList map ("" + _)
}
diff --git a/src/compiler/scala/tools/nsc/interpreter/session/FileBackedHistory.scala b/src/compiler/scala/tools/nsc/interpreter/session/FileBackedHistory.scala
index e238bdf..dddfb1b 100644
--- a/src/compiler/scala/tools/nsc/interpreter/session/FileBackedHistory.scala
+++ b/src/compiler/scala/tools/nsc/interpreter/session/FileBackedHistory.scala
@@ -1,5 +1,5 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
+ * Copyright 2005-2013 LAMP/EPFL
* @author Paul Phillips
*/
diff --git a/src/compiler/scala/tools/nsc/interpreter/session/History.scala b/src/compiler/scala/tools/nsc/interpreter/session/History.scala
index 5e1aba2..daa05b8 100644
--- a/src/compiler/scala/tools/nsc/interpreter/session/History.scala
+++ b/src/compiler/scala/tools/nsc/interpreter/session/History.scala
@@ -1,5 +1,5 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
+ * Copyright 2005-2013 LAMP/EPFL
* @author Paul Phillips
*/
diff --git a/src/compiler/scala/tools/nsc/interpreter/session/JLineHistory.scala b/src/compiler/scala/tools/nsc/interpreter/session/JLineHistory.scala
index a091823..18e0ee7 100644
--- a/src/compiler/scala/tools/nsc/interpreter/session/JLineHistory.scala
+++ b/src/compiler/scala/tools/nsc/interpreter/session/JLineHistory.scala
@@ -1,5 +1,5 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
+ * Copyright 2005-2013 LAMP/EPFL
* @author Paul Phillips
*/
@@ -45,11 +45,5 @@ object JLineHistory {
override def toString = "History(size = " + size + ", index = " + index + ")"
}
- def apply(): JLineHistory =
- try { new JLineFileHistory }
- catch { case x: Exception =>
- Console.println("Error creating file history: memory history only. " + x)
- util.Exceptional(x).show()
- new SimpleHistory()
- }
-}
\ No newline at end of file
+ def apply(): JLineHistory = try new JLineFileHistory catch { case x: Exception => new SimpleHistory() }
+}
diff --git a/src/compiler/scala/tools/nsc/interpreter/session/SimpleHistory.scala b/src/compiler/scala/tools/nsc/interpreter/session/SimpleHistory.scala
index 2817353..9f4e2b9 100644
--- a/src/compiler/scala/tools/nsc/interpreter/session/SimpleHistory.scala
+++ b/src/compiler/scala/tools/nsc/interpreter/session/SimpleHistory.scala
@@ -1,5 +1,5 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
+ * Copyright 2005-2013 LAMP/EPFL
* @author Paul Phillips
*/
diff --git a/src/compiler/scala/tools/nsc/interpreter/session/package.scala b/src/compiler/scala/tools/nsc/interpreter/session/package.scala
index 8fbba2f..c62cf21 100644
--- a/src/compiler/scala/tools/nsc/interpreter/session/package.scala
+++ b/src/compiler/scala/tools/nsc/interpreter/session/package.scala
@@ -1,10 +1,11 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
+ * Copyright 2005-2013 LAMP/EPFL
* @author Paul Phillips
*/
package scala.tools.nsc
package interpreter
+import scala.language.implicitConversions
/** Files having to do with the state of a repl session:
* lines of text entered, types and terms defined, etc.
diff --git a/src/compiler/scala/tools/nsc/io/AbstractFile.scala b/src/compiler/scala/tools/nsc/io/AbstractFile.scala
deleted file mode 100644
index 499af54..0000000
--- a/src/compiler/scala/tools/nsc/io/AbstractFile.scala
+++ /dev/null
@@ -1,254 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
- * @author Martin Odersky
- */
-
-
-package scala.tools.nsc
-package io
-
-import java.io.{ FileOutputStream, IOException, InputStream, OutputStream, BufferedOutputStream }
-import java.net.URL
-import scala.collection.mutable.ArrayBuffer
-
-/**
- * @author Philippe Altherr
- * @version 1.0, 23/03/2004
- */
-object AbstractFile {
- /** Returns "getFile(new File(path))". */
- def getFile(path: String): AbstractFile = getFile(File(path))
- def getFile(path: Path): AbstractFile = getFile(path.toFile)
-
- /**
- * If the specified File exists and is a regular file, returns an
- * abstract regular file backed by it. Otherwise, returns <code>null</code>.
- */
- def getFile(file: File): AbstractFile =
- if (file.isFile) new PlainFile(file) else null
-
- /** Returns "getDirectory(new File(path))". */
- def getDirectory(path: Path): AbstractFile = getDirectory(path.toFile)
-
- /**
- * If the specified File exists and is either a directory or a
- * readable zip or jar archive, returns an abstract directory
- * backed by it. Otherwise, returns <code>null</code>.
- *
- * @param file ...
- * @return ...
- */
- def getDirectory(file: File): AbstractFile =
- if (file.isDirectory) new PlainFile(file)
- else if (file.isFile && Path.isExtensionJarOrZip(file.jfile)) ZipArchive fromFile file
- else null
-
- /**
- * If the specified URL exists and is a readable zip or jar archive,
- * returns an abstract directory backed by it. Otherwise, returns
- * <code>null</code>.
- *
- * @param file ...
- * @return ...
- */
- def getURL(url: URL): AbstractFile = {
- if (url == null || !Path.isExtensionJarOrZip(url.getPath)) null
- else ZipArchive fromURL url
- }
-}
-
-/**
- * <p>
- * This class and its children serve to unify handling of files and
- * directories. These files and directories may or may not have some
- * real counter part within the file system. For example, some file
- * handles reference files within a zip archive or virtual ones
- * that exist only in memory.
- * </p>
- * <p>
- * Every abstract file has a path (i.e. a full name) and a name
- * (i.e. a short name) and may be backed by some real File. There are
- * two different kinds of abstract files: regular files and
- * directories. Regular files may be read and have a last modification
- * time. Directories may list their content and look for subfiles with
- * a specified name or path and of a specified kind.
- * </p>
- * <p>
- * The interface does <b>not</b> allow to access the content.
- * The class <code>symtab.classfile.AbstractFileReader</code> accesses
- * bytes, knowing that the character set of classfiles is UTF-8. For
- * all other cases, the class <code>SourceFile</code> is used, which honors
- * <code>global.settings.encoding.value</code>.
- * </p>
- */
-abstract class AbstractFile extends AnyRef with Iterable[AbstractFile] {
-
- /** Returns the name of this abstract file. */
- def name: String
-
- /** Returns the path of this abstract file. */
- def path: String
-
- /** Checks extension case insensitively. */
- def hasExtension(other: String) = extension == other.toLowerCase
- private lazy val extension: String = Path.extension(name)
-
- /** The absolute file, if this is a relative file. */
- def absolute: AbstractFile
-
- /** Returns the containing directory of this abstract file */
- def container : AbstractFile
-
- /** Returns the underlying File if any and null otherwise. */
- def file: JFile
-
- /** An underlying source, if known. Mostly, a zip/jar file. */
- def underlyingSource: Option[AbstractFile] = None
-
- /** Does this abstract file denote an existing file? */
- def exists: Boolean = (file eq null) || file.exists
-
- /** Does this abstract file represent something which can contain classfiles? */
- def isClassContainer = isDirectory || (file != null && (extension == "jar" || extension == "zip"))
-
- /** Create a file on disk, if one does not exist already. */
- def create(): Unit
-
- /** Delete the underlying file or directory (recursively). */
- def delete(): Unit
-
- /** Is this abstract file a directory? */
- def isDirectory: Boolean
-
- /** Returns the time that this abstract file was last modified. */
- def lastModified: Long
-
- /** returns an input stream so the file can be read */
- def input: InputStream
-
- /** Returns an output stream for writing the file */
- def output: OutputStream
-
- /** Returns a buffered output stream for writing the file - defaults to out */
- def bufferedOutput: BufferedOutputStream = new BufferedOutputStream(output)
-
- /** size of this file if it is a concrete file. */
- def sizeOption: Option[Int] = None
-
- def toURL: URL = if (file == null) null else file.toURI.toURL
-
- /** Returns contents of file (if applicable) in a Char array.
- * warning: use <code>Global.getSourceFile()</code> to use the proper
- * encoding when converting to the char array.
- */
- @throws(classOf[IOException])
- def toCharArray = new String(toByteArray).toCharArray
-
- /** Returns contents of file (if applicable) in a byte array.
- */
- @throws(classOf[IOException])
- def toByteArray: Array[Byte] = {
- val in = input
- var rest = sizeOption.get
- val arr = new Array[Byte](rest)
- while (rest > 0) {
- val res = in.read(arr, arr.length - rest, rest)
- if (res == -1)
- throw new IOException("read error")
- rest -= res
- }
- in.close()
- arr
- }
-
- /** Returns all abstract subfiles of this abstract directory. */
- def iterator: Iterator[AbstractFile]
-
- /** Returns the abstract file in this abstract directory with the specified
- * name. If there is no such file, returns <code>null</code>. The argument
- * <code>directory</code> tells whether to look for a directory or
- * a regular file.
- */
- def lookupName(name: String, directory: Boolean): AbstractFile
-
- /** Returns an abstract file with the given name. It does not
- * check that it exists.
- */
- def lookupNameUnchecked(name: String, directory: Boolean): AbstractFile
-
- /** Returns the abstract file in this abstract directory with the specified
- * path relative to it, If there is no such file, returns null. The argument
- * <code>directory</code> tells whether to look for a directory or a regular
- * file.
- *
- * @param path ...
- * @param directory ...
- * @return ...
- */
- def lookupPath(path: String, directory: Boolean): AbstractFile = {
- lookup((f, p, dir) => f.lookupName(p, dir), path, directory)
- }
-
- /** Return an abstract file that does not check that `path' denotes
- * an existing file.
- */
- def lookupPathUnchecked(path: String, directory: Boolean): AbstractFile = {
- lookup((f, p, dir) => f.lookupNameUnchecked(p, dir), path, directory)
- }
-
- private def lookup(getFile: (AbstractFile, String, Boolean) => AbstractFile,
- path0: String,
- directory: Boolean): AbstractFile = {
- val separator = java.io.File.separatorChar
- // trim trailing '/'s
- val path: String = if (path0.last == separator) path0 dropRight 1 else path0
- val length = path.length()
- assert(length > 0 && !(path.last == separator), path)
- var file = this
- var start = 0
- while (true) {
- val index = path.indexOf(separator, start)
- assert(index < 0 || start < index)
- val name = path.substring(start, if (index < 0) length else index)
- file = getFile(file, name, if (index < 0) directory else true)
- if ((file eq null) || index < 0) return file
- start = index + 1
- }
- file
- }
-
- private def fileOrSubdirectoryNamed(name: String, isDir: Boolean): AbstractFile = {
- val lookup = lookupName(name, isDir)
- if (lookup != null) lookup
- else {
- val jfile = new JFile(file, name)
- if (isDir) jfile.mkdirs() else jfile.createNewFile()
- new PlainFile(jfile)
- }
- }
-
- /**
- * Get the file in this directory with the given name,
- * creating an empty file if it does not already existing.
- */
- def fileNamed(name: String): AbstractFile = {
- assert(isDirectory, "Tried to find '%s' in '%s' but it is not a directory".format(name, path))
- fileOrSubdirectoryNamed(name, false)
- }
-
- /**
- * Get the subdirectory with a given name, creating it if it
- * does not already exist.
- */
- def subdirectoryNamed(name: String): AbstractFile = {
- assert (isDirectory, "Tried to find '%s' in '%s' but it is not a directory".format(name, path))
- fileOrSubdirectoryNamed(name, true)
- }
-
- protected def unsupported(): Nothing = unsupported(null)
- protected def unsupported(msg: String): Nothing = throw new UnsupportedOperationException(msg)
-
- /** Returns the path of this abstract file. */
- override def toString() = path
-
-}
diff --git a/src/compiler/scala/tools/nsc/io/ClassAndJarInfo.scala b/src/compiler/scala/tools/nsc/io/ClassAndJarInfo.scala
deleted file mode 100644
index 88efe1e..0000000
--- a/src/compiler/scala/tools/nsc/io/ClassAndJarInfo.scala
+++ /dev/null
@@ -1,39 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
- * @author Paul Phillips
- */
-
-package scala.tools.nsc
-package io
-
-import java.net.URL
-import collection.JavaConverters._
-
-/** A convenience class for finding the jar with the bytecode for
- * a given Class object and similar common tasks.
- */
-class ClassAndJarInfo[T: ClassManifest] {
- val man = classManifest[T]
- def clazz = man.erasure
-
- def baseOfPath(path: String) = path indexOf '!' match {
- case -1 => path stripSuffix internalClassName
- case idx => path take idx
- }
-
- def classUrl = clazz getResource simpleClassName + ".class"
- def codeSource = protectionDomain.getCodeSource()
- def internalClassName = internalName + ".class"
- def internalName = clazz.getName.replace('.', '/')
- def jarManifest = new JManifest(jarManifestUrl.openStream())
- def jarManifestMainAttrs = jarManifest.getMainAttributes().asScala
- def jarManifestUrl = new URL(baseOfPath("" + classUrl) + "!/META-INF/MANIFEST.MF")
- def locationFile = File(locationUrl.toURI.getPath())
- def locationUrl = if (codeSource == null) new URL("file:///") else codeSource.getLocation()
- def protectionDomain = clazz.getProtectionDomain()
- def rootClasspath = rootPossibles find (_.exists)
- def rootFromLocation = Path(locationUrl.toURI.getPath())
- def rootFromResource = Path(baseOfPath(classUrl.getPath) stripPrefix "file:")
- def rootPossibles = Iterator(rootFromResource, rootFromLocation)
- def simpleClassName = clazz.getName split """[$.]""" last
-}
diff --git a/src/compiler/scala/tools/nsc/io/DaemonThreadFactory.scala b/src/compiler/scala/tools/nsc/io/DaemonThreadFactory.scala
index fa1218c..98c3d27 100644
--- a/src/compiler/scala/tools/nsc/io/DaemonThreadFactory.scala
+++ b/src/compiler/scala/tools/nsc/io/DaemonThreadFactory.scala
@@ -1,5 +1,5 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
+ * Copyright 2005-2013 LAMP/EPFL
* @author Paul Phillips
*/
diff --git a/src/compiler/scala/tools/nsc/io/Directory.scala b/src/compiler/scala/tools/nsc/io/Directory.scala
deleted file mode 100644
index b4ceba6..0000000
--- a/src/compiler/scala/tools/nsc/io/Directory.scala
+++ /dev/null
@@ -1,73 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-package scala.tools.nsc
-package io
-
-object Directory {
- import scala.util.Properties.{ tmpDir, userHome, userDir }
-
- private def normalizePath(s: String) = Some(apply(Path(s).normalize))
- def Current: Option[Directory] = if (userDir == "") None else normalizePath(userDir)
- def Home: Option[Directory] = if (userHome == "") None else normalizePath(userHome)
- def TmpDir: Option[Directory] = if (tmpDir == "") None else normalizePath(tmpDir)
-
- def apply(path: Path): Directory = path.toDirectory
-
- // Like File.makeTemp but creates a directory instead
- def makeTemp(prefix: String = Path.randomPrefix, suffix: String = null, dir: JFile = null): Directory = {
- val path = File.makeTemp(prefix, suffix, dir)
- path.delete()
- path.createDirectory()
- }
-}
-import Path._
-
-/** An abstraction for directories.
- *
- * @author Paul Phillips
- * @since 2.8
- */
-class Directory(jfile: JFile) extends Path(jfile) {
- override def toAbsolute: Directory = if (isAbsolute) this else super.toAbsolute.toDirectory
- override def toDirectory: Directory = this
- override def toFile: File = new File(jfile)
- override def isValid = jfile.isDirectory() || !jfile.exists()
- override def normalize: Directory = super.normalize.toDirectory
-
- /** An iterator over the contents of this directory.
- */
- def list: Iterator[Path] =
- jfile.listFiles match {
- case null => Iterator.empty
- case xs => xs.iterator map Path.apply
- }
-
- def dirs: Iterator[Directory] = list collect { case x: Directory => x }
- def files: Iterator[File] = list collect { case x: File => x }
-
- override def walkFilter(cond: Path => Boolean): Iterator[Path] =
- list filter cond flatMap (_ walkFilter cond)
-
- def deepDirs: Iterator[Directory] = Path.onlyDirs(deepList())
- def deepFiles: Iterator[File] = Path.onlyFiles(deepList())
-
- /** If optional depth argument is not given, will recurse
- * until it runs out of contents.
- */
- def deepList(depth: Int = -1): Iterator[Path] =
- if (depth < 0) list ++ (dirs flatMap (_ deepList (depth)))
- else if (depth == 0) Iterator.empty
- else list ++ (dirs flatMap (_ deepList (depth - 1)))
-
- /** An iterator over the directories underneath this directory,
- * to the (optionally) given depth.
- */
- def subdirs(depth: Int = 1): Iterator[Directory] =
- deepList(depth) collect { case x: Directory => x }
-}
diff --git a/src/compiler/scala/tools/nsc/io/File.scala b/src/compiler/scala/tools/nsc/io/File.scala
deleted file mode 100644
index cc51249..0000000
--- a/src/compiler/scala/tools/nsc/io/File.scala
+++ /dev/null
@@ -1,191 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-package scala.tools.nsc
-package io
-
-import java.io.{
- FileInputStream, FileOutputStream, BufferedReader, BufferedWriter, InputStreamReader, OutputStreamWriter,
- BufferedInputStream, BufferedOutputStream, IOException, PrintStream, PrintWriter, Closeable => JCloseable }
-import java.nio.channels.{ Channel, FileChannel }
-import scala.io.Codec
-
-object File {
- def pathSeparator = java.io.File.pathSeparator
- def separator = java.io.File.separator
-
- def apply(path: Path)(implicit codec: Codec) = new File(path.jfile)(codec)
-
- // Create a temporary file, which will be deleted upon jvm exit.
- def makeTemp(prefix: String = Path.randomPrefix, suffix: String = null, dir: JFile = null) = {
- val jfile = java.io.File.createTempFile(prefix, suffix, dir)
- jfile.deleteOnExit()
- apply(jfile)
- }
-
- type HasClose = { def close(): Unit }
-
- def closeQuietly(target: HasClose) {
- try target.close() catch { case e: IOException => }
- }
- def closeQuietly(target: JCloseable) {
- try target.close() catch { case e: IOException => }
- }
-
- // this is a workaround for http://bugs.sun.com/bugdatabase/view_bug.do?bug_id=6503430
- // we are using a static initializer to statically initialize a java class so we don't
- // trigger java.lang.InternalErrors later when using it concurrently. We ignore all
- // the exceptions so as not to cause spurious failures when no write access is available,
- // e.g. google app engine.
- //
- // XXX need to put this behind a setting.
- //
- // try {
- // import Streamable.closing
- // val tmp = java.io.File.createTempFile("bug6503430", null, null)
- // try closing(new FileInputStream(tmp)) { in =>
- // val inc = in.getChannel()
- // closing(new FileOutputStream(tmp, true)) { out =>
- // out.getChannel().transferFrom(inc, 0, 0)
- // }
- // }
- // finally tmp.delete()
- // }
- // catch {
- // case _: IllegalArgumentException | _: IllegalStateException | _: IOException | _: SecurityException => ()
- // }
-}
-import File._
-import Path._
-
-/** An abstraction for files. For character data, a Codec
- * can be supplied at either creation time or when a method
- * involving character data is called (with the latter taking
- * precedence if supplied.) If neither is available, the value
- * of scala.io.Codec.default is used.
- *
- * @author Paul Phillips
- * @since 2.8
- */
-class File(jfile: JFile)(implicit constructorCodec: Codec) extends Path(jfile) with Streamable.Chars {
- override val creationCodec = constructorCodec
- def withCodec(codec: Codec): File = new File(jfile)(codec)
-
- override def addExtension(ext: String): File = super.addExtension(ext).toFile
- override def toAbsolute: File = if (isAbsolute) this else super.toAbsolute.toFile
- override def toDirectory: Directory = new Directory(jfile)
- override def toFile: File = this
- override def normalize: File = super.normalize.toFile
- override def isValid = jfile.isFile() || !jfile.exists()
- override def length = super[Path].length
- override def walkFilter(cond: Path => Boolean): Iterator[Path] =
- if (cond(this)) Iterator.single(this) else Iterator.empty
-
- /** Obtains an InputStream. */
- def inputStream() = new FileInputStream(jfile)
-
- /** Obtains a OutputStream. */
- def outputStream(append: Boolean = false) = new FileOutputStream(jfile, append)
- def bufferedOutput(append: Boolean = false) = new BufferedOutputStream(outputStream(append))
- def printStream(append: Boolean = false) = new PrintStream(outputStream(append), true)
-
- /** Obtains an OutputStreamWriter wrapped around a FileOutputStream.
- * This should behave like a less broken version of java.io.FileWriter,
- * in that unlike the java version you can specify the encoding.
- */
- def writer(): OutputStreamWriter = writer(false)
- def writer(append: Boolean): OutputStreamWriter = writer(append, creationCodec)
- def writer(append: Boolean, codec: Codec): OutputStreamWriter =
- new OutputStreamWriter(outputStream(append), codec.charSet)
-
- /** Wraps a BufferedWriter around the result of writer().
- */
- def bufferedWriter(): BufferedWriter = bufferedWriter(false)
- def bufferedWriter(append: Boolean): BufferedWriter = bufferedWriter(append, creationCodec)
- def bufferedWriter(append: Boolean, codec: Codec): BufferedWriter =
- new BufferedWriter(writer(append, codec))
-
- def printWriter(): PrintWriter = new PrintWriter(bufferedWriter(), true)
- def printWriter(append: Boolean): PrintWriter = new PrintWriter(bufferedWriter(append), true)
-
- /** Creates a new file and writes all the Strings to it. */
- def writeAll(strings: String*): Unit = {
- val out = bufferedWriter()
- try strings foreach (out write _)
- finally out close
- }
-
- def writeBytes(bytes: Array[Byte]): Unit = {
- val out = bufferedOutput()
- try out write bytes
- finally out close
- }
-
- def appendAll(strings: String*): Unit = {
- val out = bufferedWriter(append = true)
- try strings foreach (out write _)
- finally out.close()
- }
-
- /** Calls println on each string (so it adds a newline in the PrintWriter fashion.) */
- def printlnAll(strings: String*): Unit = {
- val out = printWriter()
- try strings foreach (out println _)
- finally out close
- }
-
- def safeSlurp(): Option[String] =
- try Some(slurp())
- catch { case _: IOException => None }
-
- def copyTo(destPath: Path, preserveFileDate: Boolean = false): Boolean = {
- val CHUNK = 1024 * 1024 * 16 // 16 MB
- val dest = destPath.toFile
- if (!isValid) fail("Source %s is not a valid file." format name)
- if (this.normalize == dest.normalize) fail("Source and destination are the same.")
- if (!dest.parent.exists) fail("Destination cannot be created.")
- if (dest.exists && !dest.canWrite) fail("Destination exists but is not writable.")
- if (dest.isDirectory) fail("Destination exists but is a directory.")
-
- lazy val in_s = inputStream()
- lazy val out_s = dest.outputStream()
- lazy val in = in_s.getChannel()
- lazy val out = out_s.getChannel()
-
- try {
- val size = in.size()
- var pos, count = 0L
- while (pos < size) {
- count = (size - pos) min CHUNK
- pos += out.transferFrom(in, pos, count)
- }
- }
- finally List[HasClose](out, out_s, in, in_s) foreach closeQuietly
-
- if (this.length != dest.length)
- fail("Failed to completely copy %s to %s".format(name, dest.name))
-
- if (preserveFileDate)
- dest.lastModified = this.lastModified
-
- true
- }
-
- /** Reflection since we're into the java 6+ API.
- */
- def setExecutable(executable: Boolean, ownerOnly: Boolean = true): Boolean = {
- type JBoolean = java.lang.Boolean
- val method =
- try classOf[JFile].getMethod("setExecutable", classOf[Boolean], classOf[Boolean])
- catch { case _: NoSuchMethodException => return false }
-
- try method.invoke(jfile, executable: JBoolean, ownerOnly: JBoolean).asInstanceOf[JBoolean].booleanValue
- catch { case _: Exception => false }
- }
-}
diff --git a/src/compiler/scala/tools/nsc/io/FileOperationException.scala b/src/compiler/scala/tools/nsc/io/FileOperationException.scala
deleted file mode 100644
index f23658e..0000000
--- a/src/compiler/scala/tools/nsc/io/FileOperationException.scala
+++ /dev/null
@@ -1,13 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-package scala.tools.nsc
-package io
-
-case class FileOperationException(msg: String) extends RuntimeException(msg)
diff --git a/src/compiler/scala/tools/nsc/io/Fileish.scala b/src/compiler/scala/tools/nsc/io/Fileish.scala
index e12fced..7b4e385 100644
--- a/src/compiler/scala/tools/nsc/io/Fileish.scala
+++ b/src/compiler/scala/tools/nsc/io/Fileish.scala
@@ -1,5 +1,5 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
+ * Copyright 2005-2013 LAMP/EPFL
* @author Paul Phillips
*/
diff --git a/src/compiler/scala/tools/nsc/io/Jar.scala b/src/compiler/scala/tools/nsc/io/Jar.scala
index e8eab68..e919621 100644
--- a/src/compiler/scala/tools/nsc/io/Jar.scala
+++ b/src/compiler/scala/tools/nsc/io/Jar.scala
@@ -1,16 +1,17 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
+ * Copyright 2005-2013 LAMP/EPFL
* @author Paul Phillips
*/
package scala.tools.nsc
package io
-import java.io.{ InputStream, OutputStream, IOException, FileNotFoundException, FileInputStream }
+import java.io.{ InputStream, OutputStream, IOException, FileNotFoundException, FileInputStream, DataOutputStream }
import java.util.jar._
-import collection.JavaConverters._
+import scala.collection.JavaConverters._
import Attributes.Name
import util.ClassPath
+import scala.language.implicitConversions
// Attributes.Name instances:
//
@@ -33,19 +34,31 @@ import util.ClassPath
// static Attributes.Name SPECIFICATION_VERSION
class Jar(file: File) extends Iterable[JarEntry] {
+ def this(jfile: JFile) = this(File(jfile))
def this(path: String) = this(File(path))
+
protected def errorFn(msg: String): Unit = Console println msg
lazy val jarFile = new JarFile(file.jfile)
lazy val manifest = withJarInput(s => Option(s.getManifest))
+
def mainClass = manifest map (f => f(Name.MAIN_CLASS))
+ /** The manifest-defined classpath String if available. */
+ def classPathString: Option[String] =
+ for (m <- manifest ; cp <- m.attrs get Name.CLASS_PATH) yield cp
+ def classPathElements: List[String] = classPathString match {
+ case Some(s) => s split "\\s+" toList
+ case _ => Nil
+ }
def withJarInput[T](f: JarInputStream => T): T = {
val in = new JarInputStream(file.inputStream())
try f(in)
finally in.close()
}
- def jarWriter() = new JarWriter(file)
+ def jarWriter(mainAttrs: (Attributes.Name, String)*) = {
+ new JarWriter(file, Jar.WManifest(mainAttrs: _*).underlying)
+ }
override def foreach[U](f: JarEntry => U): Unit = withJarInput { in =>
Iterator continually in.getNextJarEntry() takeWhile (_ != null) foreach f
@@ -60,26 +73,40 @@ class Jar(file: File) extends Iterable[JarEntry] {
override def toString = "" + file
}
-class JarWriter(file: File, val manifest: Manifest = new Manifest()) {
+class JarWriter(val file: File, val manifest: Manifest) {
private lazy val out = new JarOutputStream(file.outputStream(), manifest)
- def writeAllFrom(dir: Directory) = {
+
+ /** Adds a jar entry for the given path and returns an output
+ * stream to which the data should immediately be written.
+ * This unusual interface exists to work with fjbg.
+ */
+ def newOutputStream(path: String): DataOutputStream = {
+ val entry = new JarEntry(path)
+ out putNextEntry entry
+ new DataOutputStream(out)
+ }
+
+ def writeAllFrom(dir: Directory) {
try dir.list foreach (x => addEntry(x, ""))
finally out.close()
-
- file
}
- private def addFile(entry: File, prefix: String) {
- out putNextEntry new JarEntry(prefix + entry.name)
- try transfer(entry.inputStream(), out)
+ def addStream(entry: JarEntry, in: InputStream) {
+ out putNextEntry entry
+ try transfer(in, out)
finally out.closeEntry()
}
- private def addEntry(entry: Path, prefix: String) {
+ def addFile(file: File, prefix: String) {
+ val entry = new JarEntry(prefix + file.name)
+ addStream(entry, file.inputStream())
+ }
+ def addEntry(entry: Path, prefix: String) {
if (entry.isFile) addFile(entry.toFile, prefix)
else addDirectory(entry.toDirectory, prefix + entry.name + "/")
}
- private def addDirectory(entry: Directory, prefix: String) {
+ def addDirectory(entry: Directory, prefix: String) {
entry.list foreach (p => addEntry(p, prefix))
}
+
private def transfer(in: InputStream, out: OutputStream) = {
val buf = new Array[Byte](10240)
def loop(): Unit = in.read(buf, 0, buf.length) match {
@@ -88,9 +115,47 @@ class JarWriter(file: File, val manifest: Manifest = new Manifest()) {
}
loop
}
+
+ def close() = out.close()
}
object Jar {
+ type AttributeMap = java.util.Map[Attributes.Name, String]
+
+ object WManifest {
+ def apply(mainAttrs: (Attributes.Name, String)*): WManifest = {
+ val m = WManifest(new JManifest)
+ for ((k, v) <- mainAttrs)
+ m(k) = v
+
+ m
+ }
+ def apply(manifest: JManifest): WManifest = new WManifest(manifest)
+ implicit def unenrichManifest(x: WManifest): JManifest = x.underlying
+ }
+ class WManifest(manifest: JManifest) {
+ for ((k, v) <- initialMainAttrs)
+ this(k) = v
+
+ def underlying = manifest
+ def attrs = manifest.getMainAttributes().asInstanceOf[AttributeMap].asScala withDefaultValue null
+ def initialMainAttrs: Map[Attributes.Name, String] = {
+ import scala.util.Properties._
+ Map(
+ Name.MANIFEST_VERSION -> "1.0",
+ ScalaCompilerVersion -> versionNumberString
+ )
+ }
+
+ def apply(name: Attributes.Name): String = attrs(name)
+ def apply(name: String): String = apply(new Attributes.Name(name))
+ def update(key: Attributes.Name, value: String) = attrs.put(key, value)
+ def update(key: String, value: String) = attrs.put(new Attributes.Name(key), value)
+
+ def mainClass: String = apply(Name.MAIN_CLASS)
+ def mainClass_=(value: String) = update(Name.MAIN_CLASS, value)
+ }
+
// See http://download.java.net/jdk7/docs/api/java/nio/file/Path.html
// for some ideas.
private val ZipMagicNumber = List[Byte](80, 75, 3, 4)
@@ -100,10 +165,8 @@ object Jar {
def isJarOrZip(f: Path, examineFile: Boolean): Boolean =
f.hasExtension("zip", "jar") || (examineFile && magicNumberIsZip(f))
- def create(file: File, sourceDir: Directory, mainClass: String): File = {
- val writer = new Jar(file).jarWriter()
- writer.manifest(Name.MANIFEST_VERSION) = "1.0"
- writer.manifest(Name.MAIN_CLASS) = mainClass
+ def create(file: File, sourceDir: Directory, mainClass: String) {
+ val writer = new Jar(file).jarWriter(Name.MAIN_CLASS -> mainClass)
writer writeAllFrom sourceDir
}
}
diff --git a/src/compiler/scala/tools/nsc/io/Lexer.scala b/src/compiler/scala/tools/nsc/io/Lexer.scala
index 8f103f9..5ffb5b4 100644
--- a/src/compiler/scala/tools/nsc/io/Lexer.scala
+++ b/src/compiler/scala/tools/nsc/io/Lexer.scala
@@ -281,7 +281,7 @@ class Lexer(rd: Reader) {
/** The current token is a delimiter consisting of given character, reads next token,
* otherwise raises an error.
* @param c the given delimiter character to compare current token with
- * @throws MalformedInput if the the current token `token` is not a delimiter, or
+ * @throws MalformedInput if the current token `token` is not a delimiter, or
* consists of a character different from `c`.
*/
def accept(ch: Char) {
diff --git a/src/compiler/scala/tools/nsc/io/MsilFile.scala b/src/compiler/scala/tools/nsc/io/MsilFile.scala
new file mode 100644
index 0000000..2f0a71f
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/io/MsilFile.scala
@@ -0,0 +1,18 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2013 LAMP/EPFL
+ * @author Paul Phillips
+ */
+
+package scala.tools.nsc
+package io
+
+import ch.epfl.lamp.compiler.msil.{ Type => MsilType, _ }
+
+/** This class wraps an MsilType. It exists only so
+ * ClassPath can treat all of JVM/MSIL/bin/src files
+ * uniformly, as AbstractFiles.
+ */
+class MsilFile(val msilType: MsilType) extends VirtualFile(msilType.FullName, msilType.Namespace) {
+}
+
+object NoMsilFile extends MsilFile(null) { }
diff --git a/src/compiler/scala/tools/nsc/io/NullPrintStream.scala b/src/compiler/scala/tools/nsc/io/NullPrintStream.scala
deleted file mode 100644
index 52c7ddc..0000000
--- a/src/compiler/scala/tools/nsc/io/NullPrintStream.scala
+++ /dev/null
@@ -1,37 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
- * @author Paul Phillips
- */
-
-package scala.tools.nsc
-package io
-
-import java.io.{ PrintStream, ByteArrayOutputStream }
-
-/** A sink for when you want to discard all output.
- */
-class NullPrintStream extends PrintStream(new ByteArrayOutputStream()) { }
-
-object NullPrintStream extends NullPrintStream {
- def setOut() = Console setOut this
- def setErr() = Console setErr this
- def setOutAndErr() = { setOut() ; setErr() }
- def sinkingOutAndErr[T](body: => T): T =
- Console.withOut(this) {
- Console.withErr(this) {
- body
- }
- }
-
- def sinkingSystemOutAndErr[T](body: => T): T = {
- val savedOut = System.out
- val savedErr = System.err
- System setOut NullPrintStream
- System setErr NullPrintStream
- try body
- finally {
- System setOut savedOut
- System setErr savedErr
- }
- }
-}
diff --git a/src/compiler/scala/tools/nsc/io/Path.scala b/src/compiler/scala/tools/nsc/io/Path.scala
deleted file mode 100644
index f08edb1..0000000
--- a/src/compiler/scala/tools/nsc/io/Path.scala
+++ /dev/null
@@ -1,285 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
- * @author Paul Phillips
- */
-
-package scala.tools.nsc
-package io
-
-import java.io.{
- FileInputStream, FileOutputStream, BufferedReader, BufferedWriter, InputStreamReader, OutputStreamWriter,
- BufferedInputStream, BufferedOutputStream, RandomAccessFile }
-import java.net.{ URI, URL }
-import scala.util.Random.alphanumeric
-
-/** An abstraction for filesystem paths. The differences between
- * Path, File, and Directory are primarily to communicate intent.
- * Since the filesystem can change at any time, there is no way to
- * reliably associate Files only with files and so on. Any Path
- * can be converted to a File or Directory (and thus gain access to
- * the additional entity specific methods) by calling toFile or
- * toDirectory, which has no effect on the filesystem.
- *
- * Also available are createFile and createDirectory, which attempt
- * to create the path in question.
- *
- * @author Paul Phillips
- * @since 2.8
- */
-
-object Path {
- def isExtensionJarOrZip(jfile: JFile): Boolean = isExtensionJarOrZip(jfile.getName)
- def isExtensionJarOrZip(name: String): Boolean = {
- val ext = extension(name)
- ext == "jar" || ext == "zip"
- }
- def extension(name: String): String = {
- var i = name.length - 1
- while (i >= 0 && name.charAt(i) != '.')
- i -= 1
-
- if (i < 0) ""
- else name.substring(i + 1).toLowerCase
- }
- def isJarOrZip(f: Path, examineFile: Boolean = true) = Jar.isJarOrZip(f, examineFile)
-
- // not certain these won't be problematic, but looks good so far
- implicit def string2path(s: String): Path = apply(s)
- implicit def jfile2path(jfile: JFile): Path = apply(jfile)
-
- // java 7 style, we don't use it yet
- // object AccessMode extends Enumeration("AccessMode") {
- // val EXECUTE, READ, WRITE = Value
- // }
- // def checkAccess(modes: AccessMode*): Boolean = {
- // modes foreach {
- // case EXECUTE => throw new Exception("Unsupported") // can't check in java 5
- // case READ => if (!jfile.canRead()) return false
- // case WRITE => if (!jfile.canWrite()) return false
- // }
- // true
- // }
-
- def onlyDirs(xs: Iterator[Path]): Iterator[Directory] = xs filter (_.isDirectory) map (_.toDirectory)
- def onlyDirs(xs: List[Path]): List[Directory] = xs filter (_.isDirectory) map (_.toDirectory)
- def onlyFiles(xs: Iterator[Path]): Iterator[File] = xs filter (_.isFile) map (_.toFile)
- def onlyFiles(xs: List[Path]): List[File] = xs filter (_.isFile) map (_.toFile)
-
- def roots: List[Path] = java.io.File.listRoots().toList map Path.apply
-
- def apply(segments: Seq[String]): Path = apply(segments mkString java.io.File.separator)
- def apply(path: String): Path = apply(new JFile(path))
- def apply(jfile: JFile): Path =
- if (jfile.isFile) new File(jfile)
- else if (jfile.isDirectory) new Directory(jfile)
- else new Path(jfile)
-
- /** Avoiding any shell/path issues by only using alphanumerics. */
- private[io] def randomPrefix = alphanumeric take 6 mkString
- private[io] def fail(msg: String) = throw FileOperationException(msg)
-}
-import Path._
-
-/** The Path constructor is private so we can enforce some
- * semantics regarding how a Path might relate to the world.
- */
-class Path private[io] (val jfile: JFile) {
- val separator = java.io.File.separatorChar
- val separatorStr = java.io.File.separator
-
- // Validation: this verifies that the type of this object and the
- // contents of the filesystem are in agreement. All objects are
- // valid except File objects whose path points to a directory and
- // Directory objects whose path points to a file.
- def isValid: Boolean = true
-
- // conversions
- def toFile: File = new File(jfile)
- def toDirectory: Directory = new Directory(jfile)
- def toAbsolute: Path = if (isAbsolute) this else Path(jfile.getAbsolutePath())
- def toCanonical: Path = Path(jfile.getCanonicalPath())
- def toURI: URI = jfile.toURI()
- def toURL: URL = toURI.toURL()
- /** If this path is absolute, returns it: otherwise, returns an absolute
- * path made up of root / this.
- */
- def toAbsoluteWithRoot(root: Path) = if (isAbsolute) this else root.toAbsolute / this
-
- /** Creates a new Path with the specified path appended. Assumes
- * the type of the new component implies the type of the result.
- */
- def /(child: Path): Path = if (isEmpty) child else new Path(new JFile(jfile, child.path))
- def /(child: Directory): Directory = /(child: Path).toDirectory
- def /(child: File): File = /(child: Path).toFile
-
- /** If this path is a container, recursively iterate over its contents.
- * The supplied condition is a filter which is applied to each element,
- * with that branch of the tree being closed off if it is true. So for
- * example if the condition is true for some subdirectory, nothing
- * under that directory will be in the Iterator; but otherwise each
- * file and subdirectory underneath it will appear.
- */
- def walkFilter(cond: Path => Boolean): Iterator[Path] =
- if (isFile) toFile walkFilter cond
- else if (isDirectory) toDirectory walkFilter cond
- else Iterator.empty
-
- /** Equivalent to walkFilter(_ => false).
- */
- def walk: Iterator[Path] = walkFilter(_ => true)
-
- // identity
- def name: String = jfile.getName()
- def path: String = jfile.getPath()
- def normalize: Path = Path(jfile.getAbsolutePath())
- def isRootPath: Boolean = roots exists (_ isSame this)
-
- def resolve(other: Path) = if (other.isAbsolute || isEmpty) other else /(other)
- def relativize(other: Path) = {
- assert(isAbsolute == other.isAbsolute, "Paths not of same type: "+this+", "+other)
-
- def createRelativePath(baseSegs: List[String], otherSegs: List[String]) : String = {
- (baseSegs, otherSegs) match {
- case (b :: bs, o :: os) if b == o => createRelativePath(bs, os)
- case (bs, os) => ((".."+separator)*bs.length)+os.mkString(separatorStr)
- }
- }
-
- Path(createRelativePath(segments, other.segments))
- }
-
- // derived from identity
- def root: Option[Path] = roots find (this startsWith _)
- def segments: List[String] = (path split separator).toList filterNot (_.length == 0)
- /**
- * @return The path of the parent directory, or root if path is already root
- */
- def parent: Directory = path match {
- case "" | "." => Directory("..")
- case _ =>
- // the only solution <-- a comment which could have used elaboration
- if (segments.nonEmpty && segments.last == "..")
- (path / "..").toDirectory
- else jfile.getParent match {
- case null =>
- if (isAbsolute) toDirectory // it should be a root. BTW, don't need to worry about relative pathed root
- else Directory(".") // a dir under pwd
- case x =>
- Directory(x)
- }
- }
- def parents: List[Directory] = {
- val p = parent
- if (p isSame this) Nil else p :: p.parents
- }
- // if name ends with an extension (e.g. "foo.jpg") returns the extension ("jpg"), otherwise ""
- def extension: String = {
- var i = name.length - 1
- while (i >= 0 && name.charAt(i) != '.')
- i -= 1
-
- if (i < 0) ""
- else name.substring(i + 1)
- }
- // def extension: String = (name lastIndexOf '.') match {
- // case -1 => ""
- // case idx => name drop (idx + 1)
- // }
- // compares against extensions in a CASE INSENSITIVE way.
- def hasExtension(ext: String, exts: String*) = {
- val lower = extension.toLowerCase
- ext.toLowerCase == lower || exts.exists(_.toLowerCase == lower)
- }
- // returns the filename without the extension.
- def stripExtension: String = name stripSuffix ("." + extension)
- // returns the Path with the extension.
- def addExtension(ext: String): Path = Path(path + "." + ext)
- // changes the existing extension out for a new one
- def changeExtension(ext: String): Path = Path(path stripSuffix extension) addExtension ext
-
- // conditionally execute
- def ifFile[T](f: File => T): Option[T] = if (isFile) Some(f(toFile)) else None
- def ifDirectory[T](f: Directory => T): Option[T] = if (isDirectory) Some(f(toDirectory)) else None
-
- // Boolean tests
- def canRead = jfile.canRead()
- def canWrite = jfile.canWrite()
- def exists = jfile.exists()
- def notExists = try !jfile.exists() catch { case ex: SecurityException => false }
-
- def isFile = jfile.isFile()
- def isDirectory = jfile.isDirectory()
- def isAbsolute = jfile.isAbsolute()
- def isHidden = jfile.isHidden()
- def isSymlink = {
- val x = parent / name
- x.toCanonical != x.toAbsolute
- }
- def isEmpty = path.length == 0
-
- // Information
- def lastModified = jfile.lastModified()
- def lastModified_=(time: Long) = jfile setLastModified time // should use setXXX function?
- def length = jfile.length()
-
- // Boolean path comparisons
- def endsWith(other: Path) = segments endsWith other.segments
- def startsWith(other: Path) = segments startsWith other.segments
- def isSame(other: Path) = toCanonical == other.toCanonical
- def isFresher(other: Path) = lastModified > other.lastModified
-
- // creations
- def createDirectory(force: Boolean = true, failIfExists: Boolean = false): Directory = {
- val res = if (force) jfile.mkdirs() else jfile.mkdir()
- if (!res && failIfExists && exists) fail("Directory '%s' already exists." format name)
- else if (isDirectory) toDirectory
- else new Directory(jfile)
- }
- def createFile(failIfExists: Boolean = false): File = {
- val res = jfile.createNewFile()
- if (!res && failIfExists && exists) fail("File '%s' already exists." format name)
- else if (isFile) toFile
- else new File(jfile)
- }
-
- // deletions
- def delete() = jfile.delete()
- def deleteIfExists() = if (jfile.exists()) delete() else false
-
- /** Deletes the path recursively. Returns false on failure.
- * Use with caution!
- */
- def deleteRecursively(): Boolean = deleteRecursively(jfile)
- private def deleteRecursively(f: JFile): Boolean = {
- if (f.isDirectory) f.listFiles match {
- case null =>
- case xs => xs foreach deleteRecursively
- }
- f.delete()
- }
-
- def truncate() =
- isFile && {
- val raf = new RandomAccessFile(jfile, "rw")
- raf setLength 0
- raf.close()
- length == 0
- }
-
- def touch(modTime: Long = System.currentTimeMillis) = {
- createFile()
- if (isFile)
- lastModified = modTime
- }
-
- // todo
- // def copyTo(target: Path, options ...): Boolean
- // def moveTo(target: Path, options ...): Boolean
-
- override def toString() = path
- override def equals(other: Any) = other match {
- case x: Path => path == x.path
- case _ => false
- }
- override def hashCode() = path.hashCode()
-}
diff --git a/src/compiler/scala/tools/nsc/io/Pickler.scala b/src/compiler/scala/tools/nsc/io/Pickler.scala
index fe9aa3b..b03a921 100644
--- a/src/compiler/scala/tools/nsc/io/Pickler.scala
+++ b/src/compiler/scala/tools/nsc/io/Pickler.scala
@@ -1,15 +1,17 @@
package scala.tools.nsc.io
-import annotation.unchecked
+import scala.annotation.unchecked
import Lexer._
import java.io.Writer
+import scala.language.implicitConversions
+import scala.reflect.ClassTag
/** An abstract class for writing and reading Scala objects to and
* from a legible representation. The presesentation follows the following grammar:
* {{{
- * Pickled = `true' | `false' | `null' | NumericLit | StringLit |
- * Labelled | Pickled `,' Pickled
- * Labelled = StringLit `(' Pickled? `)'
+ * Pickled = `true` | `false` | `null` | NumericLit | StringLit |
+ * Labelled | Pickled `,` Pickled
+ * Labelled = StringLit `(` Pickled? `)`
* }}}
*
* All ...Lit classes are as in JSON. @see scala.tools.nsc.io.Lexer
@@ -165,17 +167,14 @@ object Pickler {
def pkl[T: Pickler] = implicitly[Pickler[T]]
/** A class represenenting `~`-pairs */
- case class ~[S, T](fst: S, snd: T)
+ case class ~[+S, +T](fst: S, snd: T)
/** A wrapper class to be able to use `~` s an infix method */
- class TildeDecorator[S](x: S) {
+ implicit class TildeDecorator[S](x: S) {
/** Infix method that forms a `~`-pair. */
def ~ [T](y: T): S ~ T = new ~ (x, y)
}
- /** An implicit wrapper that adds `~` as a method to any value. */
- implicit def tildeDecorator[S](x: S): TildeDecorator[S] = new TildeDecorator(x)
-
/** A converter from binary functions to functions over `~`-pairs
*/
implicit def fromTilde[T1, T2, R](f: (T1, T2) => R): T1 ~ T2 => R = { case x1 ~ x2 => f(x1, x2) }
@@ -246,7 +245,7 @@ object Pickler {
p.tryPickle(wr, x) || qq.tryPickle(wr, x)
def pickle(wr: Writer, x: T) =
require(tryPickle(wr, x),
- "no pickler found for "+x+" of class "+x.asInstanceOf[AnyRef].getClass.getName)
+ "no pickler found for "+x+" of class "+x.getClass.getName)
def unpickle(rd: Lexer) = p.unpickle(rd) orElse qq.unpickle(rd)
}
@@ -418,7 +417,7 @@ object Pickler {
iterPickler[T] .wrapped { Vector() ++ _ } { _.iterator } .labelled ("scala.Vector")
/** A pickler for array values */
- implicit def array[T : ClassManifest : Pickler]: Pickler[Array[T]] =
+ implicit def array[T : ClassTag : Pickler]: Pickler[Array[T]] =
iterPickler[T] .wrapped { _.toArray} { _.iterator } .labelled ("scala.Array")
}
diff --git a/src/compiler/scala/tools/nsc/io/PlainFile.scala b/src/compiler/scala/tools/nsc/io/PlainFile.scala
deleted file mode 100644
index 83b8cc3..0000000
--- a/src/compiler/scala/tools/nsc/io/PlainFile.scala
+++ /dev/null
@@ -1,101 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
- * @author Martin Odersky
- */
-
-
-package scala.tools.nsc
-package io
-
-import java.io.{ FileInputStream, FileOutputStream, IOException }
-import PartialFunction._
-
-object PlainFile {
- /**
- * If the specified File exists, returns an abstract file backed
- * by it. Otherwise, returns null.
- */
- def fromPath(file: Path): PlainFile =
- if (file.isDirectory) new PlainDirectory(file.toDirectory)
- else if (file.isFile) new PlainFile(file)
- else null
-}
-
-class PlainDirectory(givenPath: Directory) extends PlainFile(givenPath) {
- override def isDirectory = true
- override def iterator = givenPath.list filter (_.exists) map (x => new PlainFile(x))
- override def delete(): Unit = givenPath.deleteRecursively()
-}
-
-/** This class implements an abstract file backed by a File.
- */
-class PlainFile(val givenPath: Path) extends AbstractFile {
- assert(path ne null)
-
- val file = givenPath.jfile
- override def underlyingSource = Some(this)
-
- private val fpath = givenPath.toAbsolute
-
- /** Returns the name of this abstract file. */
- def name = givenPath.name
-
- /** Returns the path of this abstract file. */
- def path = givenPath.path
-
- /** The absolute file. */
- def absolute = new PlainFile(givenPath.toAbsolute)
-
- override def container: AbstractFile = new PlainFile(givenPath.parent)
- override def input = givenPath.toFile.inputStream()
- override def output = givenPath.toFile.outputStream()
- override def sizeOption = Some(givenPath.length.toInt)
-
- override def hashCode(): Int = fpath.hashCode
- override def equals(that: Any): Boolean = that match {
- case x: PlainFile => fpath == x.fpath
- case _ => false
- }
-
- /** Is this abstract file a directory? */
- def isDirectory: Boolean = givenPath.isDirectory
-
- /** Returns the time that this abstract file was last modified. */
- def lastModified: Long = givenPath.lastModified
-
- /** Returns all abstract subfiles of this abstract directory. */
- def iterator: Iterator[AbstractFile] = {
- if (!isDirectory) Iterator.empty
- else givenPath.toDirectory.list filter (_.exists) map (new PlainFile(_))
- }
-
- /**
- * Returns the abstract file in this abstract directory with the
- * specified name. If there is no such file, returns null. The
- * argument "directory" tells whether to look for a directory or
- * or a regular file.
- *
- * @param name ...
- * @param directory ...
- * @return ...
- */
- def lookupName(name: String, directory: Boolean): AbstractFile = {
- val child = givenPath / name
- if ((child.isDirectory && directory) || (child.isFile && !directory)) new PlainFile(child)
- else null
- }
-
- /** Does this abstract file denote an existing file? */
- def create(): Unit = if (!exists) givenPath.createFile()
-
- /** Delete the underlying file or directory (recursively). */
- def delete(): Unit =
- if (givenPath.isFile) givenPath.delete()
- else if (givenPath.isDirectory) givenPath.toDirectory.deleteRecursively()
-
- /** Returns a plain file with the given name. It does not
- * check that it exists.
- */
- def lookupNameUnchecked(name: String, directory: Boolean): AbstractFile =
- new PlainFile(givenPath / name)
-}
diff --git a/src/compiler/scala/tools/nsc/io/Socket.scala b/src/compiler/scala/tools/nsc/io/Socket.scala
index b792092..e766c1b 100644
--- a/src/compiler/scala/tools/nsc/io/Socket.scala
+++ b/src/compiler/scala/tools/nsc/io/Socket.scala
@@ -1,5 +1,5 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
+ * Copyright 2005-2013 LAMP/EPFL
* @author Paul Phillips
*/
diff --git a/src/compiler/scala/tools/nsc/io/SourceReader.scala b/src/compiler/scala/tools/nsc/io/SourceReader.scala
index 324c5e4..569270f 100644
--- a/src/compiler/scala/tools/nsc/io/SourceReader.scala
+++ b/src/compiler/scala/tools/nsc/io/SourceReader.scala
@@ -1,5 +1,5 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
+ * Copyright 2005-2013 LAMP/EPFL
* @author Martin Odersky
*/
@@ -28,7 +28,7 @@ class SourceReader(decoder: CharsetDecoder, reporter: Reporter) {
private var chars: CharBuffer = CharBuffer.allocate(0x4000)
private def reportEncodingError(filename:String) = {
- reporter.error(util.NoPosition,
+ reporter.error(scala.reflect.internal.util.NoPosition,
"IO error while decoding "+filename+" with "+decoder.charset()+"\n"+
"Please try specifying another one using the -encoding option")
}
diff --git a/src/compiler/scala/tools/nsc/io/Sources.scala b/src/compiler/scala/tools/nsc/io/Sources.scala
deleted file mode 100644
index 35c7a50..0000000
--- a/src/compiler/scala/tools/nsc/io/Sources.scala
+++ /dev/null
@@ -1,73 +0,0 @@
-package scala.tools.nsc
-package io
-
-import util.ClassPath
-import java.util.concurrent.{ Future, ConcurrentHashMap, ExecutionException }
-import java.util.zip.ZipException
-import collection.JavaConverters._
-import Properties.{ envOrElse, propOrElse }
-
-class Sources(val path: String) {
- val expandedPath = ClassPath.join(ClassPath expandPath path: _*)
- val cache = new ConcurrentHashMap[String, List[Fileish]]
- def allNames = cache.keys.asScala.toList.sorted
- def apply(name: String) = get(name)
- def size = cache.asScala.values map (_.length) sum
-
- private var debug = false
- private def dbg(msg: => Any) = if (debug) Console println msg
- private val partitioned = ClassPath toPaths expandedPath partition (_.isDirectory)
-
- val dirs = partitioned._1 map (_.toDirectory)
- val jars = partitioned._2 filter Jar.isJarOrZip map (_.toFile)
- val (isDone, force) = {
- val f1 = spawn(calculateDirs())
- val f2 = spawn(calculateJars())
- val fn1 = () => { f1.isDone() && f2.isDone() }
- val fn2 = () => { f1.get() ; f2.get() ; () }
-
- (fn1, fn2)
- }
-
- private def catchZip(body: => Unit): Unit = {
- try body
- catch { case x: ZipException => dbg("Caught: " + x) }
- }
-
- private def calculateDirs() =
- dirs foreach { d => dbg(d) ; catchZip(addSources(d.deepFiles map (x => Fileish(x)))) }
-
- private def calculateJars() =
- jars foreach { j => dbg(j) ; catchZip(addSources(new Jar(j).fileishIterator)) }
-
- private def addSources(fs: TraversableOnce[Fileish]) =
- fs foreach { f => if (f.isSourceFile) add(f.name, f) }
-
- private def get(key: String): List[Fileish] =
- if (cache containsKey key) cache.get(key) else Nil
-
- private def add(key: String, value: Fileish) = {
- if (cache containsKey key) cache.replace(key, value :: cache.get(key))
- else cache.put(key, List(value))
- }
- override def toString = "Sources(%d dirs, %d jars, %d sources)".format(
- dirs.size, jars.size, cache.asScala.values map (_.length) sum
- )
-}
-
-trait LowPrioritySourcesImplicits {
- self: Sources.type =>
-
- implicit def fallbackSources: Sources = defaultSources
-}
-
-object Sources extends LowPrioritySourcesImplicits {
- private def libraryInits = ClassPath.scalaLibrary.toList flatMap (_.toAbsolute.parents)
- private def librarySourceDir = libraryInits map (_ / "src") find (_.isDirectory)
- private def expandedSourceDir = librarySourceDir.toList flatMap (ClassPath expandDir _.path)
-
- val sourcePathProp = sys.props.traceSourcePath.value
- val defaultSources = apply(expandedSourceDir :+ sourcePathProp: _*)
-
- def apply(paths: String*): Sources = new Sources(ClassPath.join(paths: _*))
-}
diff --git a/src/compiler/scala/tools/nsc/io/Streamable.scala b/src/compiler/scala/tools/nsc/io/Streamable.scala
deleted file mode 100644
index 0331867..0000000
--- a/src/compiler/scala/tools/nsc/io/Streamable.scala
+++ /dev/null
@@ -1,122 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
- * @author Paul Phillips
- */
-
-package scala.tools.nsc
-package io
-
-import java.net.{ URI, URL }
-import java.io.{ BufferedInputStream, InputStream, PrintStream }
-import java.io.{ BufferedReader, InputStreamReader, Closeable => JCloseable }
-import scala.io.{ Codec, BufferedSource, Source }
-import collection.mutable.ArrayBuffer
-import Path.fail
-
-/** Traits for objects which can be represented as Streams.
- *
- * @author Paul Phillips
- * @since 2.8
- */
-
-object Streamable {
- /** Traits which can be viewed as a sequence of bytes. Source types
- * which know their length should override def length: Long for more
- * efficient method implementations.
- */
- trait Bytes {
- def inputStream(): InputStream
- def length: Long = -1
-
- def bufferedInput() = new BufferedInputStream(inputStream())
- def bytes(): Iterator[Byte] = bytesAsInts() map (_.toByte)
- def bytesAsInts(): Iterator[Int] = {
- val in = bufferedInput()
- Iterator continually in.read() takeWhile (_ != -1)
- }
-
- /** This method aspires to be the fastest way to read
- * a stream of known length into memory.
- */
- def toByteArray(): Array[Byte] = {
- // if we don't know the length, fall back on relative inefficiency
- if (length == -1L)
- return (new ArrayBuffer[Byte]() ++= bytes()).toArray
-
- val arr = new Array[Byte](length.toInt)
- val len = arr.length
- lazy val in = bufferedInput()
- var offset = 0
-
- def loop() {
- if (offset < len) {
- val read = in.read(arr, offset, len - offset)
- if (read >= 0) {
- offset += read
- loop()
- }
- }
- }
- try loop()
- finally in.close()
-
- if (offset == arr.length) arr
- else fail("Could not read entire source (%d of %d bytes)".format(offset, len))
- }
- }
-
- /** For objects which can be viewed as Chars.
- */
- trait Chars extends Bytes {
- /** Calls to methods requiring byte<->char transformations should be offered
- * in a form which allows specifying the codec. When it is not specified,
- * the one discovered at creation time will be used, which will always find the
- * one in scala.io.Codec if no other is available. This can be overridden
- * to use a different default.
- */
- def creationCodec: Codec = implicitly[Codec]
-
- def chars(): BufferedSource = chars(creationCodec)
- def chars(codec: Codec): BufferedSource = Source.fromInputStream(inputStream())(codec)
-
- def lines(): Iterator[String] = lines(creationCodec)
- def lines(codec: Codec): Iterator[String] = chars(codec).getLines()
-
- /** Obtains an InputStreamReader wrapped around a FileInputStream.
- */
- def reader(): InputStreamReader = reader(creationCodec)
- def reader(codec: Codec): InputStreamReader = new InputStreamReader(inputStream, codec.charSet)
-
- /** Wraps a BufferedReader around the result of reader().
- */
- def bufferedReader(): BufferedReader = bufferedReader(creationCodec)
- def bufferedReader(codec: Codec) = new BufferedReader(reader(codec))
-
- /** Creates a BufferedReader and applies the closure, automatically closing it on completion.
- */
- def applyReader[T](f: BufferedReader => T): T = {
- val in = bufferedReader()
- try f(in)
- finally in.close()
- }
-
- /** Convenience function to import entire file into a String.
- */
- def slurp(): String = slurp(creationCodec)
- def slurp(codec: Codec) = chars(codec).mkString
- }
-
- /** Call a function on something Closeable, finally closing it. */
- def closing[T <: JCloseable, U](stream: T)(f: T => U): U =
- try f(stream)
- finally stream.close()
-
- def bytes(is: => InputStream): Array[Byte] =
- new Bytes { def inputStream() = is } toByteArray
-
- def slurp(is: => InputStream)(implicit codec: Codec): String =
- new Chars { def inputStream() = is } slurp codec
-
- def slurp(url: URL)(implicit codec: Codec): String =
- slurp(url.openStream())
-}
diff --git a/src/compiler/scala/tools/nsc/io/VirtualDirectory.scala b/src/compiler/scala/tools/nsc/io/VirtualDirectory.scala
deleted file mode 100644
index 0bcb2de..0000000
--- a/src/compiler/scala/tools/nsc/io/VirtualDirectory.scala
+++ /dev/null
@@ -1,70 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
- */
-
-package scala.tools.nsc
-package io
-
-import scala.collection.mutable
-
-/**
- * An in-memory directory.
- *
- * @author Lex Spoon
- */
-class VirtualDirectory(val name: String, maybeContainer: Option[VirtualDirectory])
-extends AbstractFile {
- def path: String =
- maybeContainer match {
- case None => name
- case Some(parent) => parent.path+'/'+ name
- }
-
- def absolute = this
-
- def container = maybeContainer.get
- def isDirectory = true
- var lastModified: Long = System.currentTimeMillis
-
- override def file = null
- override def input = sys.error("directories cannot be read")
- override def output = sys.error("directories cannot be written")
-
- /** Does this abstract file denote an existing file? */
- def create() { unsupported }
-
- /** Delete the underlying file or directory (recursively). */
- def delete() { unsupported }
-
- /** Returns an abstract file with the given name. It does not
- * check that it exists.
- */
- def lookupNameUnchecked(name: String, directory: Boolean): AbstractFile = unsupported
-
- private val files = mutable.Map.empty[String, AbstractFile]
-
- // the toList is so that the directory may continue to be
- // modified while its elements are iterated
- def iterator = files.values.toList.iterator
-
- override def lookupName(name: String, directory: Boolean): AbstractFile =
- files get name filter (_.isDirectory == directory) orNull
-
- override def fileNamed(name: String): AbstractFile =
- Option(lookupName(name, false)) getOrElse {
- val newFile = new VirtualFile(name, path+'/'+name)
- files(name) = newFile
- newFile
- }
-
- override def subdirectoryNamed(name: String): AbstractFile =
- Option(lookupName(name, true)) getOrElse {
- val dir = new VirtualDirectory(name, Some(this))
- files(name) = dir
- dir
- }
-
- def clear() {
- files.clear();
- }
-}
diff --git a/src/compiler/scala/tools/nsc/io/VirtualFile.scala b/src/compiler/scala/tools/nsc/io/VirtualFile.scala
deleted file mode 100644
index 450d86b..0000000
--- a/src/compiler/scala/tools/nsc/io/VirtualFile.scala
+++ /dev/null
@@ -1,100 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
- * @author Martin Odersky
- */
-
-
-package scala.tools.nsc
-package io
-
-import java.io.{ ByteArrayInputStream, ByteArrayOutputStream, InputStream, OutputStream }
-
-/** This class implements an in-memory file.
- *
- * @author Philippe Altherr
- * @version 1.0, 23/03/2004
- */
-class VirtualFile(val name: String, override val path: String) extends AbstractFile {
- /**
- * Initializes this instance with the specified name and an
- * identical path.
- *
- * @param name the name of the virtual file to be created
- * @return the created virtual file
- */
- def this(name: String) = this(name, name)
-
- override def hashCode = path.hashCode
- override def equals(that: Any) = that match {
- case x: VirtualFile => x.path == path
- case _ => false
- }
-
- //########################################################################
- // Private data
- private var content = new Array[Byte](0)
-
- //########################################################################
- // Public Methods
- def absolute = this
-
- /** Returns null. */
- final def file: JFile = null
-
- override def sizeOption: Option[Int] = Some(content.size)
-
- def input : InputStream = new ByteArrayInputStream(content);
-
- override def output: OutputStream = {
- new ByteArrayOutputStream() {
- override def close() {
- super.close()
- content = toByteArray()
- }
- }
- }
-
- def container: AbstractFile = unsupported
-
- /** Is this abstract file a directory? */
- def isDirectory: Boolean = false
-
- /** Returns the time that this abstract file was last modified. */
- private var _lastModified: Long = 0
- def lastModified: Long = _lastModified
- def lastModified_=(x: Long) = _lastModified = x
-
- /** Returns all abstract subfiles of this abstract directory. */
- def iterator: Iterator[AbstractFile] = {
- assert(isDirectory, "not a directory '" + this + "'")
- Iterator.empty
- }
-
- /** Does this abstract file denote an existing file? */
- def create() { unsupported }
-
- /** Delete the underlying file or directory (recursively). */
- def delete() { unsupported }
-
- /**
- * Returns the abstract file in this abstract directory with the
- * specified name. If there is no such file, returns null. The
- * argument "directory" tells whether to look for a directory or
- * or a regular file.
- *
- * @param name ...
- * @param directory ...
- * @return ...
- */
- def lookupName(name: String, directory: Boolean): AbstractFile = {
- assert(isDirectory, "not a directory '" + this + "'")
- null
- }
-
- /** Returns an abstract file with the given name. It does not
- * check that it exists.
- */
- def lookupNameUnchecked(name: String, directory: Boolean) = unsupported
-
- //########################################################################
-}
diff --git a/src/compiler/scala/tools/nsc/io/ZipArchive.scala b/src/compiler/scala/tools/nsc/io/ZipArchive.scala
deleted file mode 100644
index 90cb827..0000000
--- a/src/compiler/scala/tools/nsc/io/ZipArchive.scala
+++ /dev/null
@@ -1,215 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
- * @author Paul Phillips
- */
-
-package scala.tools.nsc
-package io
-
-import java.net.URL
-import java.io.{ IOException, InputStream, ByteArrayInputStream }
-import java.util.zip.{ ZipEntry, ZipFile, ZipInputStream }
-import scala.collection.{ immutable, mutable }
-import annotation.tailrec
-
-/** An abstraction for zip files and streams. Everything is written the way
- * it is for performance: we come through here a lot on every run. Be careful
- * about changing it.
- *
- * @author Philippe Altherr (original version)
- * @author Paul Phillips (this one)
- * @version 2.0,
- */
-object ZipArchive {
- def fromPath(path: String): FileZipArchive = fromFile(new JFile(path))
- def fromPath(path: Path): FileZipArchive = fromFile(path.toFile)
-
- /**
- * @param file a File
- * @return A ZipArchive if `file` is a readable zip file, otherwise null.
- */
- def fromFile(file: File): FileZipArchive = fromFile(file.jfile)
- def fromFile(file: JFile): FileZipArchive =
- try { new FileZipArchive(file) }
- catch { case _: IOException => null }
-
- /**
- * @param url the url of a zip file
- * @return A ZipArchive backed by the given url.
- */
- def fromURL(url: URL): URLZipArchive = new URLZipArchive(url)
- def fromURL(url: String): URLZipArchive = fromURL(new URL(url))
-
- private def dirName(path: String) = splitPath(path, true)
- private def baseName(path: String) = splitPath(path, false)
- private def splitPath(path0: String, front: Boolean): String = {
- val isDir = path0.charAt(path0.length - 1) == '/'
- val path = if (isDir) path0.substring(0, path0.length - 1) else path0
- val idx = path.lastIndexOf('/')
-
- if (idx < 0)
- if (front) "/"
- else path
- else
- if (front) path.substring(0, idx + 1)
- else path.substring(idx + 1)
- }
-}
-import ZipArchive._
-
-abstract class ZipArchive(override val file: JFile) extends AbstractFile with Equals {
- self =>
-
- override def underlyingSource = Some(this)
- def isDirectory = true
- def lookupName(name: String, directory: Boolean) = unsupported
- def lookupNameUnchecked(name: String, directory: Boolean) = unsupported
- def create() = unsupported
- def delete() = unsupported
- def output = unsupported
- def container = unsupported
- def absolute = unsupported
-
- private def walkIterator(its: Iterator[AbstractFile]): Iterator[AbstractFile] = {
- its flatMap { f =>
- if (f.isDirectory) walkIterator(f.iterator)
- else Iterator(f)
- }
- }
- def deepIterator = walkIterator(iterator)
-
- sealed abstract class Entry(path: String) extends VirtualFile(baseName(path), path) {
- // have to keep this name for compat with sbt's compiler-interface
- def getArchive: ZipFile = null
- override def underlyingSource = Some(self)
- override def toString = self.path + "(" + path + ")"
- }
- class DirEntry(path: String) extends Entry(path) {
- val entries = mutable.HashMap[String, Entry]()
-
- override def isDirectory = true
- override def iterator = entries.valuesIterator
- override def lookupName(name: String, directory: Boolean): Entry = {
- if (directory) entries(name + "/")
- else entries(name)
- }
- }
-
- private def ensureDir(dirs: mutable.Map[String, DirEntry], path: String, zipEntry: ZipEntry): DirEntry = {
- dirs.getOrElseUpdate(path, {
- val parent = ensureDir(dirs, dirName(path), null)
- val dir = new DirEntry(path)
- parent.entries(baseName(path)) = dir
- dir
- })
- }
- protected def getDir(dirs: mutable.Map[String, DirEntry], entry: ZipEntry): DirEntry = {
- if (entry.isDirectory) ensureDir(dirs, entry.getName, entry)
- else ensureDir(dirs, dirName(entry.getName), null)
- }
-}
-
-final class FileZipArchive(file: JFile) extends ZipArchive(file) {
- def iterator = {
- val zipFile = new ZipFile(file)
- val root = new DirEntry("/")
- val dirs = mutable.HashMap[String, DirEntry]("/" -> root)
- val enum = zipFile.entries()
-
- while (enum.hasMoreElements) {
- val zipEntry = enum.nextElement
- val dir = getDir(dirs, zipEntry)
- if (zipEntry.isDirectory) dir
- else {
- class FileEntry() extends Entry(zipEntry.getName) {
- override def getArchive = zipFile
- override def lastModified = zipEntry.getTime()
- override def input = getArchive getInputStream zipEntry
- override def sizeOption = Some(zipEntry.getSize().toInt)
- }
- val f = new FileEntry()
- dir.entries(f.name) = f
- }
- }
-
- try root.iterator
- finally dirs.clear()
- }
-
- def name = file.getName
- def path = file.getPath
- def input = File(file).inputStream()
- def lastModified = file.lastModified
-
- override def sizeOption = Some(file.length.toInt)
- override def canEqual(other: Any) = other.isInstanceOf[FileZipArchive]
- override def hashCode() = file.hashCode
- override def equals(that: Any) = that match {
- case x: FileZipArchive => file.getAbsoluteFile == x.file.getAbsoluteFile
- case _ => false
- }
-}
-
-final class URLZipArchive(val url: URL) extends ZipArchive(null) {
- def iterator = {
- val root = new DirEntry("/")
- val dirs = mutable.HashMap[String, DirEntry]("/" -> root)
- val in = new ZipInputStream(new ByteArrayInputStream(Streamable.bytes(input)))
-
- @tailrec def loop() {
- val zipEntry = in.getNextEntry()
- class FileEntry() extends Entry(zipEntry.getName) {
- override val toByteArray: Array[Byte] = {
- val len = zipEntry.getSize().toInt
- val arr = new Array[Byte](len)
- var offset = 0
-
- def loop() {
- if (offset < len) {
- val read = in.read(arr, offset, len - offset)
- if (read >= 0) {
- offset += read
- loop()
- }
- }
- }
- loop()
-
- if (offset == arr.length) arr
- else throw new IOException("Input stream truncated: read %d of %d bytes".format(offset, len))
- }
- override def sizeOption = Some(zipEntry.getSize().toInt)
- }
-
- if (zipEntry != null) {
- val dir = getDir(dirs, zipEntry)
- if (zipEntry.isDirectory)
- dir
- else {
- val f = new FileEntry()
- dir.entries(f.name) = f
- }
- in.closeEntry()
- loop()
- }
- }
-
- loop()
- try root.iterator
- finally dirs.clear()
- }
-
- def name = url.getFile()
- def path = url.getPath()
- def input = url.openStream()
- def lastModified =
- try url.openConnection().getLastModified()
- catch { case _: IOException => 0 }
-
- override def canEqual(other: Any) = other.isInstanceOf[URLZipArchive]
- override def hashCode() = url.hashCode
- override def equals(that: Any) = that match {
- case x: URLZipArchive => url == x.url
- case _ => false
- }
-}
diff --git a/src/compiler/scala/tools/nsc/io/package.scala b/src/compiler/scala/tools/nsc/io/package.scala
index 2c5e50e..711696b 100644
--- a/src/compiler/scala/tools/nsc/io/package.scala
+++ b/src/compiler/scala/tools/nsc/io/package.scala
@@ -1,5 +1,5 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
+ * Copyright 2005-2013 LAMP/EPFL
* @author Paul Phillips
*/
@@ -8,32 +8,44 @@ package scala.tools.nsc
import java.util.concurrent.{ Future, Callable }
import java.util.{ Timer, TimerTask }
import java.util.jar.{ Attributes }
+import scala.language.implicitConversions
package object io {
+ // Forwarders from scala.reflect.io
+ type AbstractFile = scala.reflect.io.AbstractFile
+ val AbstractFile = scala.reflect.io.AbstractFile
+ type Directory = scala.reflect.io.Directory
+ val Directory = scala.reflect.io.Directory
+ type File = scala.reflect.io.File
+ val File = scala.reflect.io.File
+ type Path = scala.reflect.io.Path
+ val Path = scala.reflect.io.Path
+ type PlainFile = scala.reflect.io.PlainFile
+ val PlainFile = scala.reflect.io.PlainFile
+ val Streamable = scala.reflect.io.Streamable
+ type VirtualDirectory = scala.reflect.io.VirtualDirectory
+ type VirtualFile = scala.reflect.io.VirtualFile
+ val ZipArchive = scala.reflect.io.ZipArchive
+ type ZipArchive = scala.reflect.io.ZipArchive
+
+ implicit def postfixOps = scala.language.postfixOps // make all postfix ops in this package compile without warning
+
type JManifest = java.util.jar.Manifest
type JFile = java.io.File
- private[io] implicit def installManifestOps(m: JManifest) = new ManifestOps(m)
-
- class ManifestOps(manifest: JManifest) {
- def attrs = manifest.getMainAttributes()
- def apply(name: Attributes.Name) = "" + attrs.get(name)
- def update(key: Attributes.Name, value: String) = attrs.put(key, value)
- }
+ implicit def enrichManifest(m: JManifest): Jar.WManifest = Jar.WManifest(m)
private lazy val daemonThreadPool = DaemonThreadFactory.newPool()
def runnable(body: => Unit): Runnable = new Runnable { override def run() = body }
def callable[T](body: => T): Callable[T] = new Callable[T] { override def call() = body }
def spawn[T](body: => T): Future[T] = daemonThreadPool submit callable(body)
def submit(runnable: Runnable) = daemonThreadPool submit runnable
- def runnableFn(f: () => Unit): Runnable = runnable(f())
- def callableFn[T](f: () => T): Callable[T] = callable(f())
- def spawnFn[T](f: () => T): Future[T] = spawn(f())
// Create, start, and return a daemon thread
- def daemonize(body: => Unit): Thread = {
+ def daemonize(body: => Unit): Thread = newThread(_ setDaemon true)(body)
+ def newThread(f: Thread => Unit)(body: => Unit): Thread = {
val thread = new Thread(runnable(body))
- thread setDaemon true
+ f(thread)
thread.start
thread
}
@@ -46,4 +58,4 @@ package object io {
alarm.schedule(tt, seconds * 1000)
alarm
}
-}
\ No newline at end of file
+}
diff --git a/src/compiler/scala/tools/nsc/javac/JavaParsers.scala b/src/compiler/scala/tools/nsc/javac/JavaParsers.scala
index e726c21..0779e64 100644
--- a/src/compiler/scala/tools/nsc/javac/JavaParsers.scala
+++ b/src/compiler/scala/tools/nsc/javac/JavaParsers.scala
@@ -1,5 +1,5 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
+ * Copyright 2005-2013 LAMP/EPFL
* @author Martin Odersky
*/
//todo: allow infix type patterns
@@ -8,10 +8,11 @@
package scala.tools.nsc
package javac
-import scala.tools.nsc.util.OffsetPosition
+import scala.reflect.internal.util.OffsetPosition
import scala.collection.mutable.ListBuffer
import symtab.Flags
import JavaTokens._
+import scala.language.implicitConversions
trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners {
val global : Global
@@ -126,11 +127,15 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners {
if (treeInfo.firstConstructor(stats) == EmptyTree) makeConstructor(List()) :: stats
else stats)
- def makeParam(name: String, tpt: Tree) =
- ValDef(Modifiers(Flags.JAVA | Flags.PARAM), newTermName(name), tpt, EmptyTree)
+ def makeSyntheticParam(count: Int, tpt: Tree): ValDef =
+ makeParam(nme.syntheticParamName(count), tpt)
+ def makeParam(name: String, tpt: Tree): ValDef =
+ makeParam(newTypeName(name), tpt)
+ def makeParam(name: TermName, tpt: Tree): ValDef =
+ ValDef(Modifiers(Flags.JAVA | Flags.PARAM), name, tpt, EmptyTree)
def makeConstructor(formals: List[Tree]) = {
- val vparams = formals.zipWithIndex map { case (p, i) => makeParam("x$" + (i + 1), p) }
+ val vparams = mapWithIndex(formals)((p, i) => makeSyntheticParam(i + 1, p))
DefDef(Modifiers(Flags.JAVA), nme.CONSTRUCTOR, List(), List(vparams), TypeTree(), blankExpr)
}
@@ -340,7 +345,7 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners {
List() // don't pass on annotations for now
}
- /** Annotation ::= TypeName [`(' AnnotationArgument {`,' AnnotationArgument} `)']
+ /** Annotation ::= TypeName [`(` AnnotationArgument {`,` AnnotationArgument} `)`]
*/
def annotation() {
val pos = in.currentPos
@@ -389,8 +394,7 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners {
// assumed true unless we see public/private/protected
var isPackageAccess = true
var annots: List[Tree] = Nil
- def addAnnot(sym: Symbol) =
- annots :+= New(TypeTree(sym.tpe), List(Nil))
+ def addAnnot(sym: Symbol) = annots :+= New(sym.tpe)
while (true) {
in.token match {
@@ -416,6 +420,9 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners {
case FINAL =>
flags |= Flags.FINAL
in.nextToken
+ case DEFAULT =>
+ flags |= Flags.DEFAULTMETHOD
+ in.nextToken()
case NATIVE =>
addAnnot(NativeAttr)
in.nextToken
@@ -540,16 +547,17 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners {
val vparams = formalParams()
if (!isVoid) rtpt = optArrayBrackets(rtpt)
optThrows()
+ val bodyOk = !inInterface || (mods hasFlag Flags.DEFAULTMETHOD)
val body =
- if (!inInterface && in.token == LBRACE) {
+ if (bodyOk && in.token == LBRACE) {
methodBody()
} else {
if (parentToken == AT && in.token == DEFAULT) {
val annot =
atPos(pos) {
- New(Select(scalaDot(newTermName("runtime")), tpnme.AnnotationDefaultATTR), List(List()))
+ New(Select(scalaDot(nme.runtime), tpnme.AnnotationDefaultATTR), ListOfNil)
}
- mods1 = Modifiers(mods1.flags, mods1.privateWithin, annot :: mods1.annotations, mods1.positions)
+ mods1 = mods1 withAnnotations List(annot)
skipTo(SEMI)
accept(SEMI)
blankExpr
@@ -577,7 +585,7 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners {
* This one is tricky because a comma might also appear in an
* initializer. Since we don't parse initializers we don't know
* what the comma signifies.
- * We solve this with a second list buffer `maybe' which contains
+ * We solve this with a second list buffer `maybe` which contains
* potential variable definitions.
* Once we have reached the end of the statement, we know whether
* these potential definitions are real or not.
@@ -589,7 +597,7 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners {
in.nextToken
if (in.token == IDENTIFIER) { // if there's an ident after the comma ...
val name = ident()
- if (in.token == ASSIGN || in.token == SEMI) { // ... followed by a `=' or `;', we know it's a real variable definition
+ if (in.token == ASSIGN || in.token == SEMI) { // ... followed by a `=` or `;`, we know it's a real variable definition
buf ++= maybe
buf += varDecl(in.currentPos, mods, tpt.duplicate, name)
maybe.clear()
@@ -636,7 +644,7 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners {
def importCompanionObject(cdef: ClassDef): Tree =
atPos(cdef.pos) {
- Import(Ident(cdef.name.toTermName), List(ImportSelector(nme.WILDCARD, -1, null, -1)))
+ Import(Ident(cdef.name.toTermName), ImportSelector.wildList)
}
// Importing the companion object members cannot be done uncritically: see
@@ -650,15 +658,12 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners {
// leaves auxiliary constructors unable to access members of the companion object
// as unqualified identifiers.
def addCompanionObject(statics: List[Tree], cdef: ClassDef): List[Tree] = {
- def implWithImport(importStmt: Tree) = {
- import cdef.impl._
- treeCopy.Template(cdef.impl, parents, self, importStmt :: body)
- }
+ def implWithImport(importStmt: Tree) = deriveTemplate(cdef.impl)(importStmt :: _)
// if there are no statics we can use the original cdef, but we always
// create the companion so import A._ is not an error (see ticket #1700)
val cdefNew =
if (statics.isEmpty) cdef
- else treeCopy.ClassDef(cdef, cdef.mods, cdef.name, cdef.tparams, implWithImport(importCompanionObject(cdef)))
+ else deriveClassDef(cdef)(_ => implWithImport(importCompanionObject(cdef)))
List(makeCompanionObject(cdefNew, statics), cdefNew)
}
@@ -788,23 +793,18 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners {
val idefs = members.toList ::: (sdefs flatMap forwarders)
(sdefs, idefs)
}
-
+ def annotationParents = List(
+ gen.scalaAnnotationDot(tpnme.Annotation),
+ Select(javaLangDot(nme.annotation), tpnme.Annotation),
+ gen.scalaAnnotationDot(tpnme.ClassfileAnnotation)
+ )
def annotationDecl(mods: Modifiers): List[Tree] = {
accept(AT)
accept(INTERFACE)
val pos = in.currentPos
val name = identForType()
- val parents = List(scalaDot(newTypeName("Annotation")),
- Select(javaLangDot(newTermName("annotation")), newTypeName("Annotation")),
- scalaDot(newTypeName("ClassfileAnnotation")))
val (statics, body) = typeBody(AT, name)
- def getValueMethodType(tree: Tree) = tree match {
- case DefDef(_, nme.value, _, _, tpt, _) => Some(tpt.duplicate)
- case _ => None
- }
- var templ = makeTemplate(parents, body)
- for (stat <- templ.body; tpt <- getValueMethodType(stat))
- templ = makeTemplate(parents, makeConstructor(List(tpt)) :: templ.body)
+ val templ = makeTemplate(annotationParents, body)
addCompanionObject(statics, atPos(pos) {
ClassDef(mods, name, List(), templ)
})
@@ -838,18 +838,18 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners {
}
val predefs = List(
DefDef(
- Modifiers(Flags.JAVA | Flags.STATIC), newTermName("values"), List(),
- List(List()),
+ Modifiers(Flags.JAVA | Flags.STATIC), nme.values, List(),
+ ListOfNil,
arrayOf(enumType),
blankExpr),
DefDef(
- Modifiers(Flags.JAVA | Flags.STATIC), newTermName("valueOf"), List(),
+ Modifiers(Flags.JAVA | Flags.STATIC), nme.valueOf, List(),
List(List(makeParam("x", TypeTree(StringClass.tpe)))),
enumType,
blankExpr))
accept(RBRACE)
val superclazz =
- AppliedTypeTree(javaLangDot(newTypeName("Enum")), List(enumType))
+ AppliedTypeTree(javaLangDot(tpnme.Enum), List(enumType))
addCompanionObject(consts ::: statics ::: predefs, atPos(pos) {
ClassDef(mods, name, List(),
makeTemplate(superclazz :: interfaces, body))
@@ -870,7 +870,10 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners {
skipAhead()
accept(RBRACE)
}
- ValDef(Modifiers(Flags.JAVA | Flags.STATIC), name, enumType, blankExpr)
+ // The STABLE flag is to signal to namer that this was read from a
+ // java enum, and so should be given a Constant type (thereby making
+ // it usable in annotations.)
+ ValDef(Modifiers(Flags.STABLE | Flags.JAVA | Flags.STATIC), name, enumType, blankExpr)
}
}
@@ -906,7 +909,8 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners {
buf ++= importDecl()
while (in.token != EOF && in.token != RBRACE) {
while (in.token == SEMI) in.nextToken
- buf ++= typeDecl(modifiers(false))
+ if (in.token != EOF)
+ buf ++= typeDecl(modifiers(false))
}
accept(EOF)
atPos(pos) {
diff --git a/src/compiler/scala/tools/nsc/javac/JavaScanners.scala b/src/compiler/scala/tools/nsc/javac/JavaScanners.scala
index ed5ebb1..e230585 100644
--- a/src/compiler/scala/tools/nsc/javac/JavaScanners.scala
+++ b/src/compiler/scala/tools/nsc/javac/JavaScanners.scala
@@ -1,15 +1,17 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
+ * Copyright 2005-2013 LAMP/EPFL
* @author Martin Odersky
*/
package scala.tools.nsc
package javac
-import scala.tools.nsc.util._
-import Chars._
+import scala.tools.nsc.util.JavaCharArrayReader
+import scala.reflect.internal.util._
+import scala.reflect.internal.Chars._
import JavaTokens._
import scala.annotation.switch
+import scala.language.implicitConversions
// Todo merge these better with Scanners
trait JavaScanners extends ast.parser.ScannersCommon {
@@ -137,7 +139,7 @@ trait JavaScanners extends ast.parser.ScannersCommon {
kwOffset = offset
arr
}
- final val tokenName = allKeywords map (_.swap) toMap
+ final val tokenName = allKeywords.map(_.swap).toMap
//Token representation -----------------------------------------------------
@@ -776,7 +778,7 @@ trait JavaScanners extends ast.parser.ScannersCommon {
*/
def intVal(negated: Boolean): Long = {
if (token == CHARLIT && !negated) {
- if (name.length > 0) name(0) else 0
+ if (name.length > 0) name.charAt(0) else 0
} else {
var value: Long = 0
val divider = if (base == 10) 1 else 2
@@ -785,7 +787,7 @@ trait JavaScanners extends ast.parser.ScannersCommon {
var i = 0
val len = name.length
while (i < len) {
- val d = digit2int(name(i), base)
+ val d = digit2int(name.charAt(i), base)
if (d < 0) {
syntaxError("malformed integer number")
return 0
@@ -811,7 +813,7 @@ trait JavaScanners extends ast.parser.ScannersCommon {
val limit: Double =
if (token == DOUBLELIT) Double.MaxValue else Float.MaxValue
try {
- val value: Double = java.lang.Double.valueOf(name.toString()).doubleValue()
+ val value: Double = java.lang.Double.valueOf(name.toString).doubleValue()
if (value > limit)
syntaxError("floating point number too large")
if (negated) -value else value
@@ -919,6 +921,7 @@ trait JavaScanners extends ast.parser.ScannersCommon {
def warning(pos: Int, msg: String) = unit.warning(pos, msg)
def error (pos: Int, msg: String) = unit. error(pos, msg)
def incompleteInputError(pos: Int, msg: String) = unit.incompleteInputError(pos, msg)
+ def deprecationWarning(pos: Int, msg: String) = unit.deprecationWarning(pos, msg)
implicit def p2g(pos: Position): Int = if (pos.isDefined) pos.point else -1
implicit def g2p(pos: Int): Position = new OffsetPosition(unit.source, pos)
}
diff --git a/src/compiler/scala/tools/nsc/javac/JavaTokens.scala b/src/compiler/scala/tools/nsc/javac/JavaTokens.scala
index 796f96e..a562de2 100644
--- a/src/compiler/scala/tools/nsc/javac/JavaTokens.scala
+++ b/src/compiler/scala/tools/nsc/javac/JavaTokens.scala
@@ -1,5 +1,5 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
+ * Copyright 2005-2013 LAMP/EPFL
* @author Martin Odersky
*/
diff --git a/src/compiler/scala/tools/nsc/matching/MatchSupport.scala b/src/compiler/scala/tools/nsc/matching/MatchSupport.scala
index beaf631..5ca9fd5 100644
--- a/src/compiler/scala/tools/nsc/matching/MatchSupport.scala
+++ b/src/compiler/scala/tools/nsc/matching/MatchSupport.scala
@@ -1,5 +1,5 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
+ * Copyright 2005-2013 LAMP/EPFL
* Author: Paul Phillips
*/
@@ -7,9 +7,10 @@ package scala.tools.nsc
package matching
import transform.ExplicitOuter
-import ast.{ TreePrinters, Trees }
+import ast.{ Printers, Trees }
import java.io.{ StringWriter, PrintWriter }
-import annotation.elidable
+import scala.annotation.elidable
+import scala.language.postfixOps
/** Ancillary bits of ParallelMatching which are better off
* out of the way.
@@ -29,11 +30,10 @@ trait MatchSupport extends ast.TreeDSL { self: ParallelMatching =>
object Types {
import definitions._
- implicit def enrichType(x: Type): RichType = new RichType(x)
- val subrangeTypes = Set(ByteClass, ShortClass, CharClass, IntClass)
+ val subrangeTypes = Set[Symbol](ByteClass, ShortClass, CharClass, IntClass)
- class RichType(undecodedTpe: Type) {
+ implicit class RichType(undecodedTpe: Type) {
def tpe = decodedEqualsType(undecodedTpe)
def isAnyRef = tpe <:< AnyRefClass.tpe
@@ -57,7 +57,7 @@ trait MatchSupport extends ast.TreeDSL { self: ParallelMatching =>
def symbolToString(s: Symbol): String = s match {
case x => x.toString
}
- def treeToString(t: Tree): String = unbind(t) match {
+ def treeToString(t: Tree): String = treeInfo.unbind(t) match {
case EmptyTree => "?"
case WILD() => "_"
case Literal(Constant(x)) => "LIT(%s)".format(x)
@@ -95,8 +95,6 @@ trait MatchSupport extends ast.TreeDSL { self: ParallelMatching =>
})
}
- @elidable(elidable.FINE) def ifDebug(body: => Unit): Unit = { if (settings.debug.value) body }
- @elidable(elidable.FINE) def DBG(msg: => String): Unit = { ifDebug(println(msg)) }
@elidable(elidable.FINE) def TRACE(f: String, xs: Any*): Unit = {
if (trace) {
val msg = if (xs.isEmpty) f else f.format(xs map pp: _*)
@@ -117,6 +115,10 @@ trait MatchSupport extends ast.TreeDSL { self: ParallelMatching =>
println(fmt.format(xs: _*) + " == " + x)
x
}
+ private[nsc] def debugging[T](fmt: String, xs: Any*)(x: T): T = {
+ if (settings.debug.value) printing(fmt, xs: _*)(x)
+ else x
+ }
def indent(s: Any) = s.toString() split "\n" map (" " + _) mkString "\n"
def indentAll(s: Seq[Any]) = s map (" " + _.toString() + "\n") mkString
diff --git a/src/compiler/scala/tools/nsc/matching/Matrix.scala b/src/compiler/scala/tools/nsc/matching/Matrix.scala
index 83213c4..daefe4c 100644
--- a/src/compiler/scala/tools/nsc/matching/Matrix.scala
+++ b/src/compiler/scala/tools/nsc/matching/Matrix.scala
@@ -1,5 +1,5 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
+ * Copyright 2005-2013 LAMP/EPFL
* Author: Paul Phillips
*/
@@ -9,6 +9,7 @@ package matching
import transform.ExplicitOuter
import symtab.Flags
import scala.collection.mutable
+import scala.language.implicitConversions
trait Matrix extends MatrixAdditions {
self: ExplicitOuter with ParallelMatching =>
@@ -23,9 +24,9 @@ trait Matrix extends MatrixAdditions {
/** Translation of match expressions.
*
- * `p': pattern
- * `g': guard
- * `bx': body index
+ * `p`: pattern
+ * `g`: guard
+ * `bx`: body index
*
* internal representation is (tvars:List[Symbol], rows:List[Row])
*
@@ -122,8 +123,7 @@ trait Matrix extends MatrixAdditions {
private val _syntheticSyms = mutable.HashSet[Symbol]()
def clearSyntheticSyms() = {
_syntheticSyms foreach (_ resetFlag (NO_EXHAUSTIVE|MUTABLE))
- if (settings.debug.value)
- log("Cleared NO_EXHAUSTIVE/MUTABLE on " + _syntheticSyms.size + " synthetic symbols.")
+ debuglog("Cleared NO_EXHAUSTIVE/MUTABLE on " + _syntheticSyms.size + " synthetic symbols.")
_syntheticSyms.clear()
}
def recordSyntheticSym(sym: Symbol): Symbol = {
@@ -199,6 +199,10 @@ trait Matrix extends MatrixAdditions {
class PatternVar(val lhs: Symbol, val rhs: Tree, val checked: Boolean) {
def sym = lhs
def tpe = lhs.tpe
+ if (checked)
+ lhs resetFlag NO_EXHAUSTIVE
+ else
+ lhs setFlag NO_EXHAUSTIVE
// See #1427 for an example of a crash which occurs unless we retype:
// in that instance there is an existential in the pattern.
@@ -208,11 +212,6 @@ trait Matrix extends MatrixAdditions {
override def toString() = "%s: %s = %s".format(lhs, tpe, rhs)
}
- /** Sets the rhs to EmptyTree, which makes the valDef ignored in Scrutinee.
- */
- def specialVar(lhs: Symbol, checked: Boolean) =
- new PatternVar(lhs, EmptyTree, checked)
-
/** Given a tree, creates a new synthetic variable of the same type
* and assigns the tree to it.
*/
@@ -253,7 +252,8 @@ trait Matrix extends MatrixAdditions {
{
val n = if (name == null) cunit.freshTermName("temp") else name
// careful: pos has special meaning
- recordSyntheticSym(owner.newVariable(pos, n) setInfo tpe setFlag (SYNTHETIC.toLong /: flags)(_|_))
+ val flagsLong = (SYNTHETIC.toLong /: flags)(_|_)
+ recordSyntheticSym(owner.newVariable(n, pos, flagsLong) setInfo tpe)
}
}
-}
\ No newline at end of file
+}
diff --git a/src/compiler/scala/tools/nsc/matching/MatrixAdditions.scala b/src/compiler/scala/tools/nsc/matching/MatrixAdditions.scala
index 7024533..7220253 100644
--- a/src/compiler/scala/tools/nsc/matching/MatrixAdditions.scala
+++ b/src/compiler/scala/tools/nsc/matching/MatrixAdditions.scala
@@ -1,5 +1,5 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
+ * Copyright 2005-2013 LAMP/EPFL
* Author: Paul Phillips
*/
@@ -20,7 +20,7 @@ trait MatrixAdditions extends ast.TreeDSL {
import CODE._
import Debug._
import treeInfo._
- import definitions.{ isValueClass }
+ import definitions.{ isPrimitiveValueClass }
/** The Squeezer, responsible for all the squeezing.
*/
@@ -131,29 +131,17 @@ trait MatrixAdditions extends ast.TreeDSL {
import Flags.{ MUTABLE, ABSTRACT, SEALED }
- private case class Combo(index: Int, sym: Symbol) {
- val isBaseClass = sym.tpe.baseClasses.toSet
-
- // is this combination covered by the given pattern?
- def isCovered(p: Pattern) = {
- def coversSym = isBaseClass(decodedEqualsType(p.tpe).typeSymbol)
-
- cond(p.tree) {
- case _: UnApply | _: ArrayValue => true
- case x => p.isDefault || coversSym
- }
- }
- }
+ private case class Combo(index: Int, sym: Symbol) { }
/* True if the patterns in 'row' cover the given type symbol combination, and has no guard. */
private def rowCoversCombo(row: Row, combos: List[Combo]) =
- row.guard.isEmpty && (combos forall (c => c isCovered row.pats(c.index)))
+ row.guard.isEmpty && combos.forall(c => row.pats(c.index) covers c.sym)
private def requiresExhaustive(sym: Symbol) = {
(sym.isMutable) && // indicates that have not yet checked exhaustivity
!(sym hasFlag NO_EXHAUSTIVE) && // indicates @unchecked
(sym.tpe.typeSymbol.isSealed) &&
- !isValueClass(sym.tpe.typeSymbol) // make sure it's not a primitive, else (5: Byte) match { case 5 => ... } sees no Byte
+ !isPrimitiveValueClass(sym.tpe.typeSymbol) // make sure it's not a primitive, else (5: Byte) match { case 5 => ... } sees no Byte
}
private lazy val inexhaustives: List[List[Combo]] = {
@@ -162,10 +150,15 @@ trait MatrixAdditions extends ast.TreeDSL {
val collected = toCollect map { case (pv, i) =>
// okay, now reset the flag
pv.sym resetFlag MUTABLE
- // have to filter out children which cannot match: see ticket #3683 for an example
- val kids = pv.tpe.typeSymbol.sealedDescendants filter (_.tpe matchesPattern pv.tpe)
- i -> kids
+ i -> (
+ pv.tpe.typeSymbol.sealedDescendants.toList sortBy (_.sealedSortName)
+ // symbols which are both sealed and abstract need not be covered themselves, because
+ // all of their children must be and they cannot otherwise be created.
+ filterNot (x => x.isSealed && x.isAbstractClass && !isPrimitiveValueClass(x))
+ // have to filter out children which cannot match: see ticket #3683 for an example
+ filter (_.tpe matchesPattern pv.tpe)
+ )
}
val folded =
diff --git a/src/compiler/scala/tools/nsc/matching/ParallelMatching.scala b/src/compiler/scala/tools/nsc/matching/ParallelMatching.scala
index 65e570f..dbb9b7a 100644
--- a/src/compiler/scala/tools/nsc/matching/ParallelMatching.scala
+++ b/src/compiler/scala/tools/nsc/matching/ParallelMatching.scala
@@ -1,5 +1,5 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
+ * Copyright 2005-2013 LAMP/EPFL
* Copyright 2007 Google Inc. All Rights Reserved.
* Author: bqe at google.com (Burak Emir)
*/
@@ -8,13 +8,14 @@ package scala.tools.nsc
package matching
import PartialFunction._
-import scala.collection.{ mutable, immutable }
-import util.Position
+import scala.collection.{ mutable }
+import scala.reflect.internal.util.Position
import transform.ExplicitOuter
import symtab.Flags
import mutable.ListBuffer
-import immutable.IntMap
-import annotation.elidable
+import scala.annotation.elidable
+import scala.language.postfixOps
+import scala.tools.nsc.settings.ScalaVersion
trait ParallelMatching extends ast.TreeDSL
with MatchSupport
@@ -25,7 +26,10 @@ trait ParallelMatching extends ast.TreeDSL
self: ExplicitOuter =>
import global.{ typer => _, _ }
- import definitions.{ AnyRefClass, NothingClass, IntClass, BooleanClass, SomeClass, getProductArgs, productProj }
+ import definitions.{
+ AnyRefClass, IntClass, BooleanClass, SomeClass, OptionClass,
+ getProductArgs, productProj, Object_eq, Any_asInstanceOf
+ }
import CODE._
import Types._
import Debug._
@@ -43,7 +47,7 @@ trait ParallelMatching extends ast.TreeDSL
lazy val (rows, targets) = expand(roots, cases).unzip
lazy val expansion: Rep = make(roots, rows)
- private val shortCuts = mutable.HashMap[Int, Symbol]()
+ private val shortCuts = perRunCaches.newMap[Int, Symbol]()
final def createShortCut(theLabel: Symbol): Int = {
val key = shortCuts.size + 1
@@ -52,7 +56,7 @@ trait ParallelMatching extends ast.TreeDSL
}
def createLabelDef(namePrefix: String, body: Tree, params: List[Symbol] = Nil, restpe: Type = matchResultType) = {
val labelName = cunit.freshTermName(namePrefix)
- val labelSym = owner.newLabel(owner.pos, labelName)
+ val labelSym = owner.newLabel(labelName, owner.pos)
val labelInfo = MethodType(params, restpe)
LabelDef(labelSym setInfo labelInfo, params, body setType restpe)
@@ -133,7 +137,7 @@ trait ParallelMatching extends ast.TreeDSL
def castedTo(headType: Type) =
if (tpe =:= headType) this
- else new Scrutinee(createVar(headType, lhs => id AS_ANY lhs.tpe))
+ else new Scrutinee(createVar(headType, lhs => gen.mkAsInstanceOf(id, lhs.tpe)))
override def toString() = "(%s: %s)".format(id, tpe)
}
@@ -307,12 +311,12 @@ trait ParallelMatching extends ast.TreeDSL
}
lazy val cases =
- for ((tag, indices) <- literalMap.toList) yield {
+ for ((tag, indices) <- literalMap.toList.sortBy(_._1)) yield {
val newRows = indices map (i => addDefaultVars(i)(rest rows i))
val r = remake(newRows ++ defaultRows, includeScrut = false)
val r2 = make(r.tvars, r.rows map (x => x rebind bindVars(tag, x.subst)))
- CASE(Literal(tag)) ==> r2.toTree
+ CASE(Literal(Constant(tag))) ==> r2.toTree
}
lazy val defaultTree = remake(defaultRows, includeScrut = false).toTree
@@ -356,24 +360,26 @@ trait ParallelMatching extends ast.TreeDSL
lazy val unapplyResult: PatternVar =
scrut.createVar(unMethod.tpe, Apply(unTarget, scrut.id :: trailing) setType _.tpe)
- lazy val cond: Tree =
- if (unapplyResult.tpe.isBoolean) unapplyResult.ident
- else if (unapplyResult.tpe.typeSymbol == SomeClass) TRUE
- else NOT(unapplyResult.ident DOT nme.isEmpty)
+ lazy val cond: Tree = unapplyResult.tpe.normalize match {
+ case TypeRef(_, BooleanClass, _) => unapplyResult.ident
+ case TypeRef(_, SomeClass, _) => TRUE
+ case _ => NOT(unapplyResult.ident DOT nme.isEmpty)
+ }
lazy val failure =
mkFail(zipped.tail filterNot (x => SameUnapplyPattern(x._1)) map { case (pat, r) => r insert pat })
private def doSuccess: (List[PatternVar], List[PatternVar], List[Row]) = {
// pattern variable for the unapply result of Some(x).get
- lazy val pv = scrut.createVar(
- unMethod.tpe typeArgs 0,
- _ => fn(ID(unapplyResult.lhs), nme.get)
- )
+ def unMethodTypeArg = unMethod.tpe.baseType(OptionClass).typeArgs match {
+ case Nil => log("No type argument for unapply result! " + unMethod.tpe) ; NoType
+ case arg :: _ => arg
+ }
+ lazy val pv = scrut.createVar(unMethodTypeArg, _ => fn(ID(unapplyResult.lhs), nme.get))
def tuple = pv.lhs
// at this point it's Some[T1,T2...]
- lazy val tpes = getProductArgs(tuple.tpe).get
+ lazy val tpes = getProductArgs(tuple.tpe)
// one pattern variable per tuple element
lazy val tuplePVs =
@@ -672,7 +678,7 @@ trait ParallelMatching extends ast.TreeDSL
// the val definition's type, or a casted Ident if not.
private def newValIdent(lhs: Symbol, rhs: Symbol) =
if (rhs.tpe <:< lhs.tpe) Ident(rhs)
- else Ident(rhs) AS lhs.tpe
+ else gen.mkTypeApply(Ident(rhs), Any_asInstanceOf, List(lhs.tpe))
protected def newValDefinition(lhs: Symbol, rhs: Symbol) =
typer typedValDef ValDef(lhs, newValIdent(lhs, rhs))
@@ -741,7 +747,7 @@ trait ParallelMatching extends ast.TreeDSL
(others.head :: _column.tail, make(_tvars, _rows))
def mix() = {
- val newScrut = new Scrutinee(specialVar(_pv.sym, _pv.checked))
+ val newScrut = new Scrutinee(new PatternVar(_pv.sym, EmptyTree, _pv.checked))
PatternMatch(newScrut, _ncol) mkRule _nrep
}
}
@@ -816,7 +822,7 @@ trait ParallelMatching extends ast.TreeDSL
// match that's unimportant; so we add an instance check only if there
// is a binding.
def bindingWarning() = {
- if (isBound && settings.Xmigration28.value) {
+ if (isBound && settings.Xmigration.value < ScalaVersion.twoDotEight) {
cunit.warning(scrutTree.pos,
"A bound pattern such as 'x @ Pattern' now matches fewer cases than the same pattern with no binding.")
}
@@ -835,7 +841,7 @@ trait ParallelMatching extends ast.TreeDSL
typer typed {
tpe match {
case ConstantType(Constant(null)) if isRef => scrutTree OBJ_EQ NULL
- case ConstantType(Constant(value)) => scrutTree MEMBER_== Literal(value)
+ case ConstantType(const) => scrutTree MEMBER_== Literal(const)
case SingleType(NoPrefix, sym) => genEquals(sym)
case SingleType(pre, sym) if sym.isStable => genEquals(sym)
case ThisType(sym) if sym.isModule => genEquals(sym)
@@ -853,10 +859,11 @@ trait ParallelMatching extends ast.TreeDSL
case ThisType(clazz) => THIS(clazz)
case pre => REF(pre.prefix, pre.termSymbol)
})
-
outerAccessor(tpe2test.typeSymbol) match {
case NoSymbol => ifDebug(cunit.warning(scrut.pos, "no outer acc for " + tpe2test.typeSymbol)) ; cond
- case outerAcc => cond AND (((scrut AS_ANY tpe2test) DOT outerAcc)() OBJ_EQ theRef)
+ case outerAcc =>
+ val casted = gen.mkAsInstanceOf(scrut, tpe2test, any = true, wrapInApply = true)
+ cond AND ((casted DOT outerAcc)() OBJ_EQ theRef)
}
}
}
diff --git a/src/compiler/scala/tools/nsc/matching/PatternBindings.scala b/src/compiler/scala/tools/nsc/matching/PatternBindings.scala
index bfca609..7b2fcf0 100644
--- a/src/compiler/scala/tools/nsc/matching/PatternBindings.scala
+++ b/src/compiler/scala/tools/nsc/matching/PatternBindings.scala
@@ -1,5 +1,5 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
+ * Copyright 2005-2013 LAMP/EPFL
* Author: Paul Phillips
*/
@@ -7,8 +7,8 @@ package scala.tools.nsc
package matching
import transform.ExplicitOuter
-import collection.immutable.TreeMap
import PartialFunction._
+import scala.language.postfixOps
trait PatternBindings extends ast.TreeDSL
{
@@ -20,9 +20,10 @@ trait PatternBindings extends ast.TreeDSL
import Debug._
/** EqualsPattern **/
- def isEquals(tpe: Type) = cond(tpe) { case TypeRef(_, EqualsPatternClass, _) => true }
+ def isEquals(tpe: Type) = tpe.typeSymbol == EqualsPatternClass
def mkEqualsRef(tpe: Type) = typeRef(NoPrefix, EqualsPatternClass, List(tpe))
- def decodedEqualsType(tpe: Type) = condOpt(tpe) { case TypeRef(_, EqualsPatternClass, List(arg)) => arg } getOrElse (tpe)
+ def decodedEqualsType(tpe: Type) =
+ if (tpe.typeSymbol == EqualsPatternClass) tpe.typeArgs.head else tpe
// A subtype test which creates fresh existentials for type
// parameters on the right hand side.
@@ -37,7 +38,7 @@ trait PatternBindings extends ast.TreeDSL
case _ => newTree
}
- // used as argument to `EqualsPatternClass'
+ // used as argument to `EqualsPatternClass`
case class PseudoType(o: Tree) extends SimpleTypeProxy {
override def underlying: Type = o.tpe
override def safeToString: String = "PseudoType("+o+")"
@@ -133,4 +134,4 @@ trait PatternBindings extends ast.TreeDSL
}
val NoBinding: Bindings = new Bindings(Nil)
-}
\ No newline at end of file
+}
diff --git a/src/compiler/scala/tools/nsc/matching/Patterns.scala b/src/compiler/scala/tools/nsc/matching/Patterns.scala
index 3c629e5..ef41246 100644
--- a/src/compiler/scala/tools/nsc/matching/Patterns.scala
+++ b/src/compiler/scala/tools/nsc/matching/Patterns.scala
@@ -1,5 +1,5 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
+ * Copyright 2005-2013 LAMP/EPFL
* Author: Paul Phillips
*/
@@ -7,7 +7,6 @@ package scala.tools.nsc
package matching
import symtab.Flags
-import scala.reflect.NameTransformer.decode
import PartialFunction._
/** Patterns are wrappers for Trees with enhanced semantics.
@@ -22,21 +21,11 @@ trait Patterns extends ast.TreeDSL {
import definitions._
import CODE._
import Debug._
- import treeInfo.{ unbind, isStar, isVarPattern, isVariableName }
+ import treeInfo.{ unbind, isStar, isVarPattern }
type PatternMatch = MatchMatrix#PatternMatch
private type PatternVar = MatrixContext#PatternVar
- // private def unapplyArgs(x: Any) = x match {
- // case UnApply(Apply(TypeApply(_, targs), args), _) => (targs map (_.symbol), args map (_.symbol))
- // case _ => (Nil, Nil)
- // }
- //
- // private def unapplyCall(x: Any) = x match {
- // case UnApply(t, _) => treeInfo.methPart(t).symbol
- // case _ => NoSymbol
- // }
-
// Fresh patterns
def emptyPatterns(i: Int): List[Pattern] = List.fill(i)(NoPattern)
def emptyTrees(i: Int): List[Tree] = List.fill(i)(EmptyTree)
@@ -54,13 +43,14 @@ trait Patterns extends ast.TreeDSL {
case class VariablePattern(tree: Ident) extends NamePattern {
lazy val Ident(name) = tree
require(isVarPattern(tree) && name != nme.WILDCARD)
-
+ override def covers(sym: Symbol) = true
override def description = "%s".format(name)
}
// 8.1.1 (b)
case class WildcardPattern() extends Pattern {
def tree = EmptyTree
+ override def covers(sym: Symbol) = true
override def isDefault = true
override def description = "_"
}
@@ -69,6 +59,8 @@ trait Patterns extends ast.TreeDSL {
case class TypedPattern(tree: Typed) extends Pattern {
lazy val Typed(expr, tpt) = tree
+ override def covers(sym: Symbol) = newMatchesPattern(sym, tpt.tpe)
+ override def sufficientType = tpt.tpe
override def subpatternsForVars: List[Pattern] = List(Pattern(expr))
override def simplify(pv: PatternVar) = Pattern(expr) match {
case ExtractorPattern(ua) if pv.sym.tpe <:< tpt.tpe => this rebindTo expr
@@ -84,7 +76,7 @@ trait Patterns extends ast.TreeDSL {
def isSwitchable = cond(const.tag) { case ByteTag | ShortTag | IntTag | CharTag => true }
def intValue = const.intValue
override def description = {
- val s = if (value == null) "null" else value.toString()
+ val s = if (value == null) "null" else value.toString
"Lit(%s)".format(s)
}
}
@@ -113,6 +105,7 @@ trait Patterns extends ast.TreeDSL {
}
}
+ override def covers(sym: Symbol) = newMatchesPattern(sym, sufficientType)
override def simplify(pv: PatternVar) = this.rebindToObjectCheck()
override def description = backticked match {
case Some(s) => "this." + s
@@ -131,13 +124,15 @@ trait Patterns extends ast.TreeDSL {
case class ObjectPattern(tree: Apply) extends ApplyPattern { // NamePattern?
require(!fn.isType && isModule)
+ override def covers(sym: Symbol) = newMatchesPattern(sym, sufficientType)
override def sufficientType = tpe.narrow
override def simplify(pv: PatternVar) = this.rebindToObjectCheck()
override def description = "Obj(%s)".format(fn)
}
// 8.1.4 (e)
case class SimpleIdPattern(tree: Ident) extends NamePattern {
- lazy val Ident(name) = tree
+ val Ident(name) = tree
+ override def covers(sym: Symbol) = newMatchesPattern(sym, tpe.narrow)
override def description = "Id(%s)".format(name)
}
@@ -161,6 +156,11 @@ trait Patterns extends ast.TreeDSL {
if (args.isEmpty) this rebindToEmpty tree.tpe
else this
+ override def covers(sym: Symbol) = {
+ debugging("[constructor] Does " + this + " cover " + sym + " ? ") {
+ sym.tpe.typeSymbol == this.tpe.typeSymbol
+ }
+ }
override def description = {
if (isColonColon) "%s :: %s".format(Pattern(args(0)), Pattern(args(1)))
else "%s(%s)".format(name, toPats(args).mkString(", "))
@@ -173,17 +173,12 @@ trait Patterns extends ast.TreeDSL {
// 8.1.7 / 8.1.8 (unapply and unapplySeq calls)
case class ExtractorPattern(tree: UnApply) extends UnapplyPattern {
- override def simplify(pv: PatternVar) = {
- if (pv.sym hasFlag NO_EXHAUSTIVE) ()
- else {
- TRACE("Setting NO_EXHAUSTIVE on " + pv.sym + " due to extractor " + tree)
- pv.sym setFlag NO_EXHAUSTIVE
- }
+ private def uaTyped = Typed(tree, TypeTree(arg.tpe)) setType arg.tpe
+ override def simplify(pv: PatternVar) = {
if (pv.tpe <:< arg.tpe) this
else this rebindTo uaTyped
}
-
override def description = "Unapply(%s => %s)".format(necessaryType, resTypesString)
}
@@ -192,9 +187,9 @@ trait Patterns extends ast.TreeDSL {
// As yet I can't testify this is doing any good relative to using
// tpt.tpe, but it doesn't seem to hurt either.
private lazy val packedType = global.typer.computeType(tpt, tpt.tpe)
- private lazy val consRef = typeRef(NoPrefix, ConsClass, List(packedType))
- private lazy val listRef = typeRef(NoPrefix, ListClass, List(packedType))
- private lazy val seqRef = typeRef(NoPrefix, SeqClass, List(packedType))
+ private lazy val consRef = appliedType(ConsClass, packedType)
+ private lazy val listRef = appliedType(ListClass, packedType)
+ private lazy val seqRef = appliedType(SeqClass, packedType)
private def thisSeqRef = {
val tc = (tree.tpe baseType SeqClass).typeConstructor
@@ -206,7 +201,7 @@ trait Patterns extends ast.TreeDSL {
private def listFolder(hd: Tree, tl: Tree): Tree = unbind(hd) match {
case t @ Star(_) => moveBindings(hd, WILD(t.tpe))
case _ =>
- val dummyMethod = new TermSymbol(NoSymbol, NoPosition, "matching$dummy")
+ val dummyMethod = NoSymbol.newTermSymbol(newTermName("matching$dummy"))
val consType = MethodType(dummyMethod newSyntheticValueParams List(packedType, listRef), consRef)
Apply(TypeTree(consType), List(hd, tl)) setType consRef
@@ -271,8 +266,7 @@ trait Patterns extends ast.TreeDSL {
object Pattern {
// a small tree -> pattern cache
- private val cache = new collection.mutable.HashMap[Tree, Pattern]
- def clear() = cache.clear()
+ private val cache = perRunCaches.newMap[Tree, Pattern]()
def apply(tree: Tree): Pattern = {
if (cache contains tree)
@@ -317,7 +311,7 @@ trait Patterns extends ast.TreeDSL {
case UnApply(
Apply(TypeApply(Select(qual, nme.unapplySeq), List(tpt)), _),
List(ArrayValue(_, elems))) =>
- Some(qual.symbol, tpt, elems)
+ Some((qual.symbol, tpt, elems))
case _ =>
None
}
@@ -349,10 +343,9 @@ trait Patterns extends ast.TreeDSL {
def apply(x: Apply): Pattern = {
val Apply(fn, args) = x
def isModule = x.symbol.isModule || x.tpe.termSymbol.isModule
- def isTuple = isTupleTypeOrSubtype(fn.tpe)
if (fn.isType) {
- if (isTuple) TuplePattern(x)
+ if (isTupleType(fn.tpe)) TuplePattern(x)
else ConstructorPattern(x)
}
else if (args.isEmpty) {
@@ -373,10 +366,10 @@ trait Patterns extends ast.TreeDSL {
lazy val Select(qualifier, name) = select
def pathSegments = getPathSegments(tree)
def backticked: Option[String] = qualifier match {
- case _: This if isVariableName(name) => Some("`%s`".format(name))
+ case _: This if nme.isVariableName(name) => Some("`%s`".format(name))
case _ => None
}
-
+ override def covers(sym: Symbol) = newMatchesPattern(sym, tree.tpe)
protected def getPathSegments(t: Tree): List[Name] = t match {
case Select(q, name) => name :: getPathSegments(q)
case Apply(f, Nil) => getPathSegments(f)
@@ -388,14 +381,20 @@ trait Patterns extends ast.TreeDSL {
def name: Name
override def sufficientType = tpe.narrow
override def simplify(pv: PatternVar) = this.rebindToEqualsCheck()
- override def description = name.toString()
+ override def description = name.toString
}
sealed trait UnapplyPattern extends Pattern {
lazy val UnApply(unfn, args) = tree
lazy val Apply(fn, _) = unfn
lazy val MethodType(List(arg, _*), _) = fn.tpe
- protected def uaTyped = Typed(tree, TypeTree(arg.tpe)) setType arg.tpe
+
+ // Covers if the symbol matches the unapply method's argument type,
+ // and the return type of the unapply is Some.
+ override def covers(sym: Symbol) = newMatchesPattern(sym, arg.tpe)
+
+ // TODO: for alwaysCovers:
+ // fn.tpe.finalResultType.typeSymbol == SomeClass
override def necessaryType = arg.tpe
override def subpatternsForVars = args match {
@@ -403,7 +402,7 @@ trait Patterns extends ast.TreeDSL {
case _ => toPats(args)
}
- def resTypes = analyzer.unapplyTypeList(unfn.symbol, unfn.tpe)
+ def resTypes = analyzer.unapplyTypeList(unfn.pos, unfn.symbol, unfn.tpe, args)
def resTypesString = resTypes match {
case Nil => "Boolean"
case xs => xs.mkString(", ")
@@ -419,6 +418,7 @@ trait Patterns extends ast.TreeDSL {
else emptyPatterns(sufficientType.typeSymbol.caseFieldAccessors.size)
def isConstructorPattern = fn.isType
+ override def covers(sym: Symbol) = newMatchesPattern(sym, fn.tpe)
}
sealed abstract class Pattern extends PatternBindingLogic {
@@ -443,6 +443,15 @@ trait Patterns extends ast.TreeDSL {
// the subpatterns for this pattern (at the moment, that means constructor arguments)
def subpatterns(pm: MatchMatrix#PatternMatch): List[Pattern] = pm.dummies
+ // if this pattern should be considered to cover the given symbol
+ def covers(sym: Symbol): Boolean = newMatchesPattern(sym, sufficientType)
+ def newMatchesPattern(sym: Symbol, pattp: Type) = {
+ debugging("[" + kindString + "] Does " + pattp + " cover " + sym + " ? ") {
+ (sym.isModuleClass && (sym.tpe.typeSymbol eq pattp.typeSymbol)) ||
+ (sym.tpe.baseTypeSeq exists (_ matchesPattern pattp))
+ }
+ }
+
def sym = tree.symbol
def tpe = tree.tpe
def isEmpty = tree.isEmpty
@@ -470,11 +479,12 @@ trait Patterns extends ast.TreeDSL {
case _ => super.equals(other)
}
override def hashCode() = boundTree.hashCode()
- def description = super.toString()
+ def description = super.toString
- final override def toString() = description
+ final override def toString = description
def toTypeString() = "%s <: x <: %s".format(necessaryType, sufficientType)
+ def kindString = ""
}
/*** Extractors ***/
@@ -486,4 +496,4 @@ trait Patterns extends ast.TreeDSL {
}
}
}
-}
\ No newline at end of file
+}
diff --git a/src/compiler/scala/tools/nsc/package.scala b/src/compiler/scala/tools/nsc/package.scala
index 9b9cfe7..00a9f3b 100644
--- a/src/compiler/scala/tools/nsc/package.scala
+++ b/src/compiler/scala/tools/nsc/package.scala
@@ -1,13 +1,19 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
+ * Copyright 2005-2013 LAMP/EPFL
* @author Paul Phillips
*/
package scala.tools
package object nsc {
- @deprecated("Use a class in the scala.tools.nsc.interpreter package.", "2.9.0")
- type InterpreterSettings = interpreter.ISettings
- @deprecated("Use a class in the scala.tools.nsc.interpreter package.", "2.9.0")
- val InterpreterResults = interpreter.Results
-}
\ No newline at end of file
+ type Phase = scala.reflect.internal.Phase
+ val NoPhase = scala.reflect.internal.NoPhase
+
+ type FatalError = scala.reflect.internal.FatalError
+ val FatalError = scala.reflect.internal.FatalError
+
+ type MissingRequirementError = scala.reflect.internal.MissingRequirementError
+ val MissingRequirementError = scala.reflect.internal.MissingRequirementError
+
+ val ListOfNil = List(Nil)
+}
diff --git a/src/compiler/scala/tools/nsc/plugins/Plugin.scala b/src/compiler/scala/tools/nsc/plugins/Plugin.scala
index bfc1c2d..2050ce7 100644
--- a/src/compiler/scala/tools/nsc/plugins/Plugin.scala
+++ b/src/compiler/scala/tools/nsc/plugins/Plugin.scala
@@ -1,12 +1,12 @@
/* NSC -- new Scala compiler
- * Copyright 2007-2011 LAMP/EPFL
+ * Copyright 2007-2013 LAMP/EPFL
* @author Lex Spoon
*/
package scala.tools.nsc
package plugins
-import io.{ File, Path }
+import io.{ File, Path, Jar }
import java.net.URLClassLoader
import java.util.jar.JarFile
import java.util.zip.ZipException
@@ -15,15 +15,13 @@ import scala.collection.mutable
import mutable.ListBuffer
import scala.xml.XML
-/** <p>
- * Information about a plugin loaded from a jar file.
- * </p>
- * <p>
- * The concrete subclass must have a one-argument constructor
- * that accepts an instance of <code>Global</code>.
- * </p><pre>
+/** Information about a plugin loaded from a jar file.
+ *
+ * The concrete subclass must have a one-argument constructor
+ * that accepts an instance of `global`.
+ * {{{
* (val global: Global)
- * </pre>
+ * }}}
*
* @author Lex Spoon
* @version 1.0, 2007-5-21
@@ -42,7 +40,7 @@ abstract class Plugin {
* to a constructor parameter in the concrete subclass. */
val global: Global
- /** Handle any plugin-specific options. The -P:plugname: part
+ /** Handle any plugin-specific options. The `-P:plugname:` part
* will not be present. */
def processOptions(options: List[String], error: String => Unit) {
if (!options.isEmpty)
@@ -50,9 +48,8 @@ abstract class Plugin {
}
/** A description of this plugin's options, suitable as a response
- * to the -help command-line option. Conventionally, the
- * options should be listed with the <code>-P:plugname:</code>
- * part included.
+ * to the -help command-line option. Conventionally, the options
+ * should be listed with the `-P:plugname:` part included.
*/
val optionsHelp: Option[String] = None
}
@@ -105,9 +102,9 @@ object Plugin {
type AnyClass = Class[_]
/** Loads a plugin class from the named jar file.
-
- * @return <code>None</code> if the jar file has no plugin in it or
- * if the plugin is badly formed.
+ *
+ * @return `None` if the jar file has no plugin in it or
+ * if the plugin is badly formed.
*/
def loadFrom(jarfile: Path, loader: ClassLoader): Option[AnyClass] =
loadDescription(jarfile) match {
@@ -124,7 +121,7 @@ object Plugin {
/** Load all plugins found in the argument list, both in the
* jar files explicitly listed, and in the jar files in the
- * directories specified. Skips all plugins in <code>ignoring</code>.
+ * directories specified. Skips all plugins in `ignoring`.
* A single classloader is created and used to load all of them.
*/
def loadAllFrom(
@@ -135,13 +132,14 @@ object Plugin {
val alljars = (jars ::: (for {
dir <- dirs if dir.isDirectory
entry <- dir.toDirectory.files.toList sortBy (_.name)
- if Path.isJarOrZip(entry)
+// was: if Path.isJarOrZip(entry)
+ if Jar.isJarOrZip(entry)
pdesc <- loadDescription(entry)
if !(ignoring contains pdesc.name)
} yield entry)).distinct
val loader = loaderFor(alljars)
- alljars map (loadFrom(_, loader)) flatten
+ (alljars map (loadFrom(_, loader))).flatten
}
/** Instantiate a plugin class, given the class and
diff --git a/src/compiler/scala/tools/nsc/plugins/PluginComponent.scala b/src/compiler/scala/tools/nsc/plugins/PluginComponent.scala
index 3c683d5..4d98b25 100644
--- a/src/compiler/scala/tools/nsc/plugins/PluginComponent.scala
+++ b/src/compiler/scala/tools/nsc/plugins/PluginComponent.scala
@@ -1,5 +1,5 @@
/* NSC -- new Scala compiler
- * Copyright 2007-2011 LAMP/EPFL
+ * Copyright 2007-2013 LAMP/EPFL
* @author Lex Spoon
* Updated by Anders Bach Nielsen
*/
diff --git a/src/compiler/scala/tools/nsc/plugins/PluginDescription.scala b/src/compiler/scala/tools/nsc/plugins/PluginDescription.scala
index a3e6a84..bd56740 100644
--- a/src/compiler/scala/tools/nsc/plugins/PluginDescription.scala
+++ b/src/compiler/scala/tools/nsc/plugins/PluginDescription.scala
@@ -1,13 +1,11 @@
/* NSC -- new Scala compiler
- * Copyright 2007-2011 LAMP/EPFL
+ * Copyright 2007-2013 LAMP/EPFL
* @author Lex Spoon
*/
package scala.tools.nsc
package plugins
-import java.io.File
-
import scala.xml.{Node,NodeSeq}
/** A description of a compiler plugin, suitable for serialization
@@ -29,7 +27,7 @@ abstract class PluginDescription {
/** An XML representation of this description. It can be
* read back using <code>PluginDescription.fromXML</code>.
- * It should be stored inside the jar.
+ * It should be stored inside the jar archive file.
*/
def toXML: Node = {
<plugin>
@@ -39,21 +37,20 @@ abstract class PluginDescription {
}
}
-
/** Utilities for the PluginDescription class.
*
* @author Lex Spoon
* @version 1.0, 2007-5-21
*/
object PluginDescription {
+
def fromXML(xml: Node): Option[PluginDescription] = {
// check the top-level tag
xml match {
case <plugin>{_*}</plugin> => ()
case _ => return None
}
-
- /** Extract one field */
+ // extract one field
def getField(field: String): Option[String] = {
val text = (xml \\ field).text.trim
if (text == "") None else Some(text)
@@ -74,4 +71,5 @@ object PluginDescription {
val classname = classname1
})
}
+
}
diff --git a/src/compiler/scala/tools/nsc/plugins/PluginLoadException.scala b/src/compiler/scala/tools/nsc/plugins/PluginLoadException.scala
index 77c8d44..c5da249 100644
--- a/src/compiler/scala/tools/nsc/plugins/PluginLoadException.scala
+++ b/src/compiler/scala/tools/nsc/plugins/PluginLoadException.scala
@@ -1,5 +1,5 @@
/* NSC -- new Scala compiler
- * Copyright 2007-2011 LAMP/EPFL
+ * Copyright 2007-2013 LAMP/EPFL
* @author Lex Spoon
*/
diff --git a/src/compiler/scala/tools/nsc/plugins/Plugins.scala b/src/compiler/scala/tools/nsc/plugins/Plugins.scala
index 36227c1..736bd82 100644
--- a/src/compiler/scala/tools/nsc/plugins/Plugins.scala
+++ b/src/compiler/scala/tools/nsc/plugins/Plugins.scala
@@ -1,5 +1,5 @@
/* NSC -- new Scala compiler
- * Copyright 2007-2011 LAMP/EPFL
+ * Copyright 2007-2013 LAMP/EPFL
* @author Lex Spoon
* Updated by Anders Bach Nielsen
*/
@@ -28,7 +28,7 @@ trait Plugins {
val dirs = (settings.pluginsDir.value split File.pathSeparator).toList map Path.apply
val classes = Plugin.loadAllFrom(jars, dirs, settings.disable.value)
- // Lach plugin must only be instantiated once. A common pattern
+ // Each plugin must only be instantiated once. A common pattern
// is to register annotation checkers during object construction, so
// creating multiple plugin instances will leave behind stale checkers.
classes map (Plugin.instantiate(_, this))
@@ -70,7 +70,7 @@ trait Plugins {
}
}
- val plugs = pick(roughPluginsList, Set(), phasesSet map (_.phaseName) toSet)
+ val plugs = pick(roughPluginsList, Set(), (phasesSet map (_.phaseName)).toSet)
/** Verify requirements are present. */
for (req <- settings.require.value ; if !(plugs exists (_.name == req)))
@@ -112,5 +112,5 @@ trait Plugins {
def pluginOptionsHelp: String =
(for (plug <- roughPluginsList ; help <- plug.optionsHelp) yield {
"\nOptions for plugin '%s':\n%s\n".format(plug.name, help)
- }) mkString
+ }).mkString
}
diff --git a/src/compiler/scala/tools/nsc/reporters/AbstractReporter.scala b/src/compiler/scala/tools/nsc/reporters/AbstractReporter.scala
index 5127c2e..c7ee11d 100644
--- a/src/compiler/scala/tools/nsc/reporters/AbstractReporter.scala
+++ b/src/compiler/scala/tools/nsc/reporters/AbstractReporter.scala
@@ -1,14 +1,14 @@
/* NSC -- new Scala compiler
- * Copyright 2002-2011 LAMP/EPFL
+ * Copyright 2002-2013 LAMP/EPFL
* @author Martin Odersky
*/
package scala.tools.nsc
package reporters
-import scala.collection.mutable.HashMap
+import scala.collection.mutable
import scala.tools.nsc.Settings
-import scala.tools.nsc.util.Position
+import scala.reflect.internal.util.Position
/**
* This reporter implements filtering.
@@ -18,7 +18,7 @@ abstract class AbstractReporter extends Reporter {
def display(pos: Position, msg: String, severity: Severity): Unit
def displayPrompt(): Unit
- private val positions = new HashMap[Position, Severity]
+ private val positions = new mutable.HashMap[Position, Severity]
override def reset() {
super.reset
@@ -35,15 +35,25 @@ abstract class AbstractReporter extends Reporter {
else _severity
if (severity == INFO) {
- if (isVerbose || force)
+ if (isVerbose || force) {
+ severity.count += 1
display(pos, msg, severity)
+ }
}
else {
val hidden = testAndLog(pos, severity)
if (severity == WARNING && noWarnings) ()
else {
- if (!hidden || isPromptSet) display(pos, msg, severity)
- if (isPromptSet) displayPrompt
+ if (!hidden || isPromptSet) {
+ severity.count += 1
+ display(pos, msg, severity)
+ } else if (settings.debug.value) {
+ severity.count += 1
+ display(pos, "[ suppressed ] " + msg, severity)
+ }
+
+ if (isPromptSet)
+ displayPrompt
}
}
}
diff --git a/src/compiler/scala/tools/nsc/reporters/ConsoleReporter.scala b/src/compiler/scala/tools/nsc/reporters/ConsoleReporter.scala
index a6e661c..e847fb5 100644
--- a/src/compiler/scala/tools/nsc/reporters/ConsoleReporter.scala
+++ b/src/compiler/scala/tools/nsc/reporters/ConsoleReporter.scala
@@ -1,5 +1,5 @@
/* NSC -- new Scala compiler
- * Copyright 2002-2011 LAMP/EPFL
+ * Copyright 2002-2013 LAMP/EPFL
* @author Martin Odersky
*/
@@ -7,8 +7,7 @@ package scala.tools.nsc
package reporters
import java.io.{ BufferedReader, IOException, PrintWriter }
-import util._
-import scala.tools.util.StringOps.countElementsAsString
+import scala.reflect.internal.util._
/**
* This class implements a Reporter that displays messages on a text
@@ -40,27 +39,14 @@ class ConsoleReporter(val settings: Settings, reader: BufferedReader, writer: Pr
* @return ...
*/
private def getCountString(severity: Severity): String =
- countElementsAsString((severity).count, label(severity))
+ StringOps.countElementsAsString((severity).count, label(severity))
/** Prints the message. */
def printMessage(msg: String) { writer.print(msg + "\n"); writer.flush() }
/** Prints the message with the given position indication. */
def printMessage(posIn: Position, msg: String) {
- val pos = if (posIn eq null) NoPosition
- else if (posIn.isDefined) posIn.inUltimateSource(posIn.source)
- else posIn
- pos match {
- case FakePos(fmsg) =>
- printMessage(fmsg+" "+msg)
- case NoPosition =>
- printMessage(msg)
- case _ =>
- val buf = new StringBuilder(msg)
- val file = pos.source.file
- printMessage((if (shortname) file.name else file.path)+":"+pos.line+": "+msg)
- printSourceLine(pos)
- }
+ printMessage(Position.formatMessage(posIn, msg, shortname))
}
def print(pos: Position, msg: String, severity: Severity) {
printMessage(pos, clabel(severity) + msg)
@@ -88,32 +74,25 @@ class ConsoleReporter(val settings: Settings, reader: BufferedReader, writer: Pr
}
def display(pos: Position, msg: String, severity: Severity) {
- severity.count += 1
if (severity != ERROR || severity.count <= ERROR_LIMIT)
print(pos, msg, severity)
}
- def displayPrompt(): Unit = try {
- var continue = true
- while (continue) {
- writer.print("r)esume, a)bort: ")
- writer.flush()
- var line = reader.readLine()
- if (line ne null) {
- line = line.toLowerCase()
- if ("abort" startsWith line)
- abort("user abort")
- if ("resume" startsWith line)
- continue = false
+ def displayPrompt(): Unit = {
+ writer.print("\na)bort, s)tack, r)esume: ")
+ writer.flush()
+ if (reader != null) {
+ val response = reader.read().asInstanceOf[Char].toLower
+ if (response == 'a' || response == 's') {
+ (new Exception).printStackTrace()
+ if (response == 'a')
+ sys exit 1
+
+ writer.print("\n")
+ writer.flush()
}
}
}
- catch {
- case ex: IOException => {
- ex.printStackTrace()
- abort("input read error")
- }
- }
private def abort(msg: String) = throw new Error(msg)
override def flush() { writer.flush() }
diff --git a/src/compiler/scala/tools/nsc/reporters/Reporter.scala b/src/compiler/scala/tools/nsc/reporters/Reporter.scala
index d08e928..8871ae6 100644
--- a/src/compiler/scala/tools/nsc/reporters/Reporter.scala
+++ b/src/compiler/scala/tools/nsc/reporters/Reporter.scala
@@ -1,14 +1,13 @@
/* NSC -- new Scala compiler
- * Copyright 2002-2011 LAMP/EPFL
+ * Copyright 2002-2013 LAMP/EPFL
* @author Martin Odersky
*/
package scala.tools.nsc
package reporters
-import scala.tools.nsc.util._
-import scala.tools.util.StringOps
-import StringOps._
+import scala.reflect.internal.util._
+import scala.reflect.internal.util.StringOps._
/**
* This interface provides methods to issue information, warning and
@@ -21,9 +20,15 @@ abstract class Reporter {
class Severity(val id: Int) extends severity.Value {
var count: Int = 0
}
- val INFO = new Severity(0)
- val WARNING = new Severity(1)
- val ERROR = new Severity(2)
+ val INFO = new Severity(0) {
+ override def toString: String = "INFO"
+ }
+ val WARNING = new Severity(1) {
+ override def toString: String = "WARNING"
+ }
+ val ERROR = new Severity(2) {
+ override def toString: String = "ERROR"
+ }
/** Whether very long lines can be truncated. This exists so important
* debugging information (like printing the classpath) is not rendered
@@ -47,14 +52,23 @@ abstract class Reporter {
finally incompleteHandler = saved
}
- var cancelled = false
- def hasErrors = ERROR.count > 0 || cancelled
- def hasWarnings = WARNING.count > 0
+ var cancelled = false
+ def hasErrors = ERROR.count > 0 || cancelled
+ def hasWarnings = WARNING.count > 0
+
+ /** For sending a message which should not be labeled as a warning/error,
+ * but also shouldn't require -verbose to be visible.
+ */
+ def echo(msg: String): Unit = info(NoPosition, msg, true)
+ def echo(pos: Position, msg: String): Unit = info(pos, msg, true)
+
+ /** Informational messages, suppressed unless -verbose or force=true. */
+ def info(pos: Position, msg: String, force: Boolean): Unit = info0(pos, msg, INFO, force)
- def info(pos: Position, msg: String, force: Boolean) { info0(pos, msg, INFO, force) }
- def warning(pos: Position, msg: String ) { info0(pos, msg, WARNING, false) }
- def error(pos: Position, msg: String ) { info0(pos, msg, ERROR, false) }
- def incompleteInputError(pos: Position, msg: String ) {
+ /** Warnings and errors. */
+ def warning(pos: Position, msg: String): Unit = withoutTruncating(info0(pos, msg, WARNING, false))
+ def error(pos: Position, msg: String): Unit = withoutTruncating(info0(pos, msg, ERROR, false))
+ def incompleteInputError(pos: Position, msg: String): Unit = {
if (incompleteHandled) incompleteHandler(pos, msg)
else error(pos, msg)
}
@@ -69,8 +83,8 @@ abstract class Reporter {
}
// sbt compat
- @deprecated("Moved to scala.tools.util.StringOps", "2.10.0")
+ @deprecated("Moved to scala.reflect.internal.util.StringOps", "2.10.0")
def countElementsAsString(n: Int, elements: String): String = StringOps.countElementsAsString(n, elements)
- @deprecated("Moved to scala.tools.util.StringOps", "2.10.0")
+ @deprecated("Moved to scala.reflect.internal.util.StringOps", "2.10.0")
def countAsString(n: Int): String = StringOps.countAsString(n)
}
diff --git a/src/compiler/scala/tools/nsc/reporters/ReporterTimer.scala b/src/compiler/scala/tools/nsc/reporters/ReporterTimer.scala
deleted file mode 100644
index 800af55..0000000
--- a/src/compiler/scala/tools/nsc/reporters/ReporterTimer.scala
+++ /dev/null
@@ -1,20 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2002-2011 LAMP/EPFL
- * @author Martin Odersky
- */
-
-package scala.tools.nsc
-package reporters
-
-import scala.tools.util.AbstractTimer
-
-/**
- * This class implements a timer that uses a Reporter to issue
- * timings.
- */
-class ReporterTimer(reporter: Reporter) extends AbstractTimer {
-
- def issue(msg: String, duration: Long) =
- reporter.info(null, "[" + msg + " in " + duration + "ms]", false)
-
-}
diff --git a/src/compiler/scala/tools/nsc/reporters/StoreReporter.scala b/src/compiler/scala/tools/nsc/reporters/StoreReporter.scala
index 48601c3..34e2a8a 100644
--- a/src/compiler/scala/tools/nsc/reporters/StoreReporter.scala
+++ b/src/compiler/scala/tools/nsc/reporters/StoreReporter.scala
@@ -1,5 +1,5 @@
/* NSC -- new Scala compiler
- * Copyright 2002-2011 LAMP/EPFL
+ * Copyright 2002-2013 LAMP/EPFL
* @author Martin Odersky
*/
@@ -7,7 +7,7 @@ package scala.tools.nsc
package reporters
import scala.collection.mutable
-import scala.tools.nsc.util.Position
+import scala.reflect.internal.util.Position
/**
* This class implements a Reporter that displays messages on a text
@@ -17,7 +17,7 @@ class StoreReporter extends Reporter {
class Info(val pos: Position, val msg: String, val severity: Severity) {
override def toString() = "pos: " + pos + " " + msg + " " + severity
}
- val infos = new mutable.HashSet[Info]
+ val infos = new mutable.LinkedHashSet[Info]
protected def info0(pos: Position, msg: String, severity: Severity, force: Boolean) {
if (!force) {
infos += new Info(pos, msg, severity)
diff --git a/src/compiler/scala/tools/nsc/scratchpad/Mixer.scala b/src/compiler/scala/tools/nsc/scratchpad/Mixer.scala
new file mode 100644
index 0000000..10e9982
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/scratchpad/Mixer.scala
@@ -0,0 +1,102 @@
+package scala.tools.nsc.scratchpad
+
+import java.io.{FileInputStream, InputStreamReader, IOException}
+
+import scala.runtime.ScalaRunTime.stringOf
+import java.lang.reflect.InvocationTargetException
+import scala.reflect.runtime.ReflectionUtils._
+import scala.collection.mutable.ArrayBuffer
+
+ at deprecated("SI-6458: Instrumentation logic will be moved out of the compiler.","2.10.0")
+class Mixer {
+
+ protected val stdSeparator = "//> "
+ protected val ctdSeparator = "//| "
+ protected val sepColumn = 50
+ protected val tabInc = 8
+
+ type Comments = Seq[(Int, Array[Char])]
+
+ def parseComments(comments: Array[Char]): Iterator[(Int, Array[Char])] = new Iterator[(Int, Array[Char])] {
+ var idx = 0
+ def hasNext = idx < comments.length
+ def next() = {
+ val nextSpace = comments indexOf (' ', idx)
+ var nextNL = comments indexOf ('\n', nextSpace + 1)
+ if (nextNL < 0) nextNL = comments.length
+ val result =
+ (new String(comments.slice(idx, nextSpace)).toInt, comments.slice(nextSpace + 1, nextNL))
+ idx = nextNL + 1
+ result
+ }
+ }
+
+ def mix(source: Array[Char], comments: Array[Char]): Array[Char] = {
+ val mixed = new ArrayBuffer[Char]
+ var written = 0
+ def align() = {
+ var idx = mixed.lastIndexOf('\n') + 1
+ var col = 0
+ while (idx < mixed.length) {
+ col =
+ if (mixed(idx) == '\t') (col / tabInc) * tabInc + tabInc
+ else col + 1
+ idx += 1
+ }
+ if (col > sepColumn) {
+ mixed += '\n'
+ col = 0
+ }
+ while (col < sepColumn) {
+ mixed += ' '
+ col += 1
+ }
+ }
+ for ((offset, cs) <- parseComments(comments)) {
+ val sep =
+ if (written < offset) {
+ for (i <- written until offset) mixed += source(i)
+ written = offset
+ stdSeparator
+ } else {
+ mixed += '\n'
+ ctdSeparator
+ }
+ align()
+ mixed ++= sep ++= cs
+ }
+ mixed ++= source.view(written, source.length)
+ mixed.toArray
+ }
+
+}
+
+object Mixer extends Mixer {
+
+ def contents(name: String): Array[Char] = {
+ val page = new Array[Char](2 << 14)
+ val buf = new ArrayBuffer[Char]
+ val in = new FileInputStream(name)
+ val rdr = new InputStreamReader(in)
+ var nread = 0
+ do {
+ nread = rdr.read(page, 0, page.length)
+ buf ++= (if (nread == page.length) page else page.take(nread))
+ } while (nread >= 0)
+ buf.toArray
+ }
+
+ def main(args: Array[String]) {
+ val mixer = new Mixer
+ try {
+ require(args.length == 2, "required arguments: file1 file2")
+ val source = contents(args(0))
+ val comments = contents(args(1))
+ val mixed = mixer.mix(source, comments)
+ println(mixed.mkString)
+ } catch {
+ case ex: IOException =>
+ println("error: "+ ex.getMessage)
+ }
+ }
+}
diff --git a/src/compiler/scala/tools/nsc/scratchpad/SourceInserter.scala b/src/compiler/scala/tools/nsc/scratchpad/SourceInserter.scala
new file mode 100644
index 0000000..01dccd7
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/scratchpad/SourceInserter.scala
@@ -0,0 +1,23 @@
+package scala.tools.nsc
+package scratchpad
+
+import java.io.Writer
+import scala.reflect.internal.util.SourceFile
+import scala.reflect.internal.Chars._
+
+ at deprecated("SI-6458: Instrumentation logic will be moved out of the compiler.","2.10.0")
+object SourceInserter {
+ def stripRight(cs: Array[Char]): Array[Char] = {
+ val lines =
+ new String(cs) split "\n"
+ def leftPart(str: String) =
+ (str split """//>|//\|""").head
+ def isContinuation(str: String) =
+ ((str contains "//>") || (str contains "//|")) && (leftPart(str) forall isWhitespace)
+ def stripTrailingWS(str: String) =
+ str take (str lastIndexWhere (!isWhitespace(_))) + 1
+ val prefixes =
+ lines filterNot isContinuation map leftPart map stripTrailingWS
+ (prefixes mkString "\n").toArray
+ }
+}
diff --git a/src/compiler/scala/tools/nsc/settings/AbsScalaSettings.scala b/src/compiler/scala/tools/nsc/settings/AbsScalaSettings.scala
index be2f8a3..783e249 100644
--- a/src/compiler/scala/tools/nsc/settings/AbsScalaSettings.scala
+++ b/src/compiler/scala/tools/nsc/settings/AbsScalaSettings.scala
@@ -1,5 +1,5 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
+ * Copyright 2005-2013 LAMP/EPFL
* @author Paul Phillips
*/
@@ -29,7 +29,7 @@ trait AbsScalaSettings {
def MultiStringSetting(name: String, helpArg: String, descr: String): MultiStringSetting
def OutputSetting(outputDirs: OutputDirs, default: String): OutputSetting
def PathSetting(name: String, descr: String, default: String): PathSetting
- def PhasesSetting(name: String, descr: String): PhasesSetting
+ def PhasesSetting(name: String, descr: String, default: String): PhasesSetting
def StringSetting(name: String, helpArg: String, descr: String, default: String): StringSetting
def PrefixSetting(name: String, prefix: String, descr: String): PrefixSetting
diff --git a/src/compiler/scala/tools/nsc/settings/AbsSettings.scala b/src/compiler/scala/tools/nsc/settings/AbsSettings.scala
index 82e3110..adabeb0 100644
--- a/src/compiler/scala/tools/nsc/settings/AbsSettings.scala
+++ b/src/compiler/scala/tools/nsc/settings/AbsSettings.scala
@@ -1,23 +1,21 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
+ * Copyright 2005-2013 LAMP/EPFL
* @author Paul Phillips
*/
package scala.tools.nsc
package settings
-import io.AbstractFile
-
/** A Settings abstraction boiled out of the original highly mutable Settings
* class with the intention of creating an ImmutableSettings which can be used
* interchangeably. Except of course without the mutants.
*/
-trait AbsSettings {
+trait AbsSettings extends scala.reflect.internal.settings.AbsSettings {
type Setting <: AbsSetting // Fix to the concrete Setting type
type ResultOfTryToSet // List[String] in mutable, (Settings, List[String]) in immutable
def errorFn: String => Unit
- protected def allSettings: collection.Set[Setting]
+ protected def allSettings: scala.collection.Set[Setting]
// settings minus internal usage settings
def visibleSettings = allSettings filterNot (_.isInternalOnly)
@@ -32,12 +30,12 @@ trait AbsSettings {
def lookupSetting(cmd: String): Option[Setting] = allSettings find (_ respondsTo cmd)
// two AbsSettings objects are equal if their visible settings are equal.
- override def hashCode() = visibleSettings.hashCode
+ override def hashCode() = visibleSettings.size // going for cheap
override def equals(that: Any) = that match {
case s: AbsSettings => this.userSetSettings == s.userSetSettings
case _ => false
}
- override def toString() = "Settings {\n%s}\n" format (userSetSettings map (" " + _ + "\n") mkString)
+ override def toString() = "Settings {\n%s}\n" format (userSetSettings map (" " + _ + "\n")).mkString
def toConciseString = userSetSettings.mkString("(", " ", ")")
def checkDependencies =
@@ -51,12 +49,6 @@ trait AbsSettings {
implicit lazy val SettingOrdering: Ordering[Setting] = Ordering.ordered
- trait AbsSettingValue {
- type T <: Any
- def value: T
- def isDefault: Boolean
- }
-
trait AbsSetting extends Ordered[Setting] with AbsSettingValue {
def name: String
def helpDescription: String
@@ -140,8 +132,8 @@ trait AbsSettings {
case x: AbsSettings#AbsSetting => (name == x.name) && (value == x.value)
case _ => false
}
- override def hashCode() = (name, value).hashCode
- override def toString() = "%s = %s".format(name, value)
+ override def hashCode() = name.hashCode + value.hashCode
+ override def toString() = name + " = " + value
}
trait InternalSetting extends AbsSetting {
diff --git a/src/compiler/scala/tools/nsc/settings/AdvancedScalaSettings.scala b/src/compiler/scala/tools/nsc/settings/AdvancedScalaSettings.scala
index 8f38470..0bec113 100644
--- a/src/compiler/scala/tools/nsc/settings/AdvancedScalaSettings.scala
+++ b/src/compiler/scala/tools/nsc/settings/AdvancedScalaSettings.scala
@@ -1,5 +1,5 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
+ * Copyright 2005-2013 LAMP/EPFL
* @author Paul Phillips
*/
@@ -20,6 +20,7 @@ trait AdvancedScalaSettings {
val future: BooleanSetting
val generatephasegraph: StringSetting
val logimplicits: BooleanSetting
+ val mainClass: StringSetting
val migration: BooleanSetting
val noforwarders: BooleanSetting
val nojline: BooleanSetting
diff --git a/src/compiler/scala/tools/nsc/settings/AestheticSettings.scala b/src/compiler/scala/tools/nsc/settings/AestheticSettings.scala
index 136e03d..da2c89d 100644
--- a/src/compiler/scala/tools/nsc/settings/AestheticSettings.scala
+++ b/src/compiler/scala/tools/nsc/settings/AestheticSettings.scala
@@ -1,5 +1,5 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
+ * Copyright 2005-2013 LAMP/EPFL
* @author Paul Phillips
*/
@@ -23,12 +23,14 @@ trait AestheticSettings {
def deprecation = settings.deprecation.value
def experimental = settings.Xexperimental.value
def fatalWarnings = settings.fatalWarnings.value
+ def feature = settings.feature.value
+ def future = settings.future.value
def logClasspath = settings.Ylogcp.value
def printStats = settings.Ystatistics.value
- def richExes = settings.YrichExes.value || sys.props.traceSourcePath.isSet
def target = settings.target.value
def unchecked = settings.unchecked.value
def verbose = settings.verbose.value
+ def virtPatmat = !settings.XoldPatmat.value
/** Derived values */
def jvm = target startsWith "jvm"
diff --git a/src/compiler/scala/tools/nsc/settings/FscSettings.scala b/src/compiler/scala/tools/nsc/settings/FscSettings.scala
index bb62750..5c852ae 100644
--- a/src/compiler/scala/tools/nsc/settings/FscSettings.scala
+++ b/src/compiler/scala/tools/nsc/settings/FscSettings.scala
@@ -1,5 +1,5 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
+ * Copyright 2005-2013 LAMP/EPFL
* @author Paul Phillips
*/
@@ -24,7 +24,7 @@ class FscSettings(error: String => Unit) extends Settings(error) {
val server = StringSetting ("-server", "hostname:portnumber", "Specify compile server socket", "")
val preferIPv4 = BooleanSetting("-ipv4", "Use IPv4 rather than IPv6 for the server socket")
val idleMins = IntSetting ("-max-idle", "Set idle timeout in minutes for fsc (use 0 for no timeout)",
- 30, Some(0, Int.MaxValue), (_: String) => None)
+ 30, Some((0, Int.MaxValue)), (_: String) => None)
// For improved help output, separating fsc options from the others.
def fscSpecific = Set[Settings#Setting](
@@ -38,14 +38,25 @@ class FscSettings(error: String => Unit) extends Settings(error) {
private def holdsPath = Set[Settings#Setting](
d, dependencyfile, pluginsDir, Ygenjavap
)
+
+ override def processArguments(arguments: List[String], processAll: Boolean): (Boolean, List[String]) = {
+ val (r, args) = super.processArguments(arguments, processAll)
+ // we need to ensure the files specified with relative locations are absolutized based on the currentDir
+ (r, args map {a => absolutizePath(a)})
+ }
+
+ /**
+ * Take an individual path and if it's not absolute turns it into an absolute path based on currentDir.
+ * If it's already absolute then it's left alone.
+ */
+ private[this] def absolutizePath(p: String) = (Path(currentDir.value) resolve Path(p)).normalize.path
- /** All user set settings rewritten with absolute paths. */
- def absolutize(root: Path) {
- def rewrite(p: String) = (root resolve Path(p)).normalize.path
+ /** All user set settings rewritten with absolute paths based on currentDir */
+ def absolutize() {
userSetSettings foreach {
- case p: OutputSetting => p.outputDirs setSingleOutput AbstractFile.getDirectory(rewrite(p.value))
- case p: PathSetting => p.value = ClassPath.map(p.value, rewrite)
- case p: StringSetting => if (holdsPath(p)) p.value = rewrite(p.value)
+ case p: OutputSetting => p.outputDirs setSingleOutput AbstractFile.getDirectory(absolutizePath(p.value))
+ case p: PathSetting => p.value = ClassPath.map(p.value, absolutizePath)
+ case p: StringSetting => if (holdsPath(p)) p.value = absolutizePath(p.value)
case _ => ()
}
}
diff --git a/src/compiler/scala/tools/nsc/settings/ImmutableSettings.scala b/src/compiler/scala/tools/nsc/settings/ImmutableSettings.scala
deleted file mode 100644
index 4de0c23..0000000
--- a/src/compiler/scala/tools/nsc/settings/ImmutableSettings.scala
+++ /dev/null
@@ -1,11 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
- * @author Paul Phillips
- */
-
-package scala.tools.nsc
-package settings
-
-/** TODO.
- */
-class ImmutableSettings
diff --git a/src/compiler/scala/tools/nsc/settings/MutableSettings.scala b/src/compiler/scala/tools/nsc/settings/MutableSettings.scala
index 5c2388d..e4f9947 100644
--- a/src/compiler/scala/tools/nsc/settings/MutableSettings.scala
+++ b/src/compiler/scala/tools/nsc/settings/MutableSettings.scala
@@ -1,5 +1,5 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
+ * Copyright 2005-2013 LAMP/EPFL
* @author Martin Odersky
*/
// $Id$
@@ -8,16 +8,38 @@ package scala.tools
package nsc
package settings
-import io.{ AbstractFile, Path, PlainFile, VirtualDirectory }
-import scala.tools.util.StringOps
+import io.{ AbstractFile, Jar, Path, PlainFile, VirtualDirectory }
+import scala.reflect.internal.util.StringOps
import scala.collection.mutable.ListBuffer
import scala.io.Source
+import scala.reflect.{ ClassTag, classTag }
/** A mutable Settings object.
*/
-class MutableSettings(val errorFn: String => Unit) extends AbsSettings with ScalaSettings with Mutable {
+class MutableSettings(val errorFn: String => Unit)
+ extends scala.reflect.internal.settings.MutableSettings
+ with AbsSettings
+ with ScalaSettings
+ with Mutable {
type ResultOfTryToSet = List[String]
+ def withErrorFn(errorFn: String => Unit): MutableSettings = {
+ val settings = new MutableSettings(errorFn)
+ copyInto(settings)
+ settings
+ }
+
+ def copyInto(settings: MutableSettings) {
+ allSettings foreach { thisSetting =>
+ val otherSetting = settings.allSettings find { _.name == thisSetting.name }
+ otherSetting foreach { otherSetting =>
+ if (thisSetting.isSetByUser || otherSetting.isSetByUser) {
+ otherSetting.value = thisSetting.value.asInstanceOf[otherSetting.T]
+ }
+ }
+ }
+ }
+
/** Iterates over the arguments applying them to settings where applicable.
* Then verifies setting dependencies are met.
*
@@ -46,7 +68,7 @@ class MutableSettings(val errorFn: String => Unit) extends AbsSettings with Scal
if (isOpt) {
val newArgs = parseParams(args)
if (args eq newArgs) {
- errorFn("bad option: '" + x + "'")
+ errorFn(s"bad option: '$x'")
(false, args)
}
// discard empties, sometimes they appear because of ant or etc.
@@ -74,8 +96,7 @@ class MutableSettings(val errorFn: String => Unit) extends AbsSettings with Scal
*/
def copy(): Settings = {
val s = new Settings()
- val xs = userSetSettings flatMap (_.unparse)
- s.processArguments(xs.toList, true)
+ s.processArguments(recreateArgs, true)
s
}
@@ -116,7 +137,7 @@ class MutableSettings(val errorFn: String => Unit) extends AbsSettings with Scal
val (p, args) = StringOps.splitWhere(s, _ == ':', true) getOrElse (return None)
// any non-Nil return value means failure and we return s unmodified
- tryToSetIfExists(p, args split "," toList, (s: Setting) => s.tryToSetColon _)
+ tryToSetIfExists(p, (args split ",").toList, (s: Setting) => s.tryToSetColon _)
}
// if arg is of form -Xfoo or -Xfoo bar (name = "-Xfoo")
@@ -163,8 +184,8 @@ class MutableSettings(val errorFn: String => Unit) extends AbsSettings with Scal
* The class loader defining `T` should provide resources `app.class.path`
* and `boot.class.path`. These resources should contain the application
* and boot classpaths in the same form as would be passed on the command line.*/
- def embeddedDefaults[T: Manifest]: Unit =
- embeddedDefaults(implicitly[Manifest[T]].erasure.getClassLoader)
+ def embeddedDefaults[T: ClassTag]: Unit =
+ embeddedDefaults(classTag[T].runtimeClass.getClassLoader)
/** Initializes these settings for embedded use by a class from the given class loader.
* The class loader for `T` should provide resources `app.class.path`
@@ -198,8 +219,9 @@ class MutableSettings(val errorFn: String => Unit) extends AbsSettings with Scal
def IntSetting(name: String, descr: String, default: Int, range: Option[(Int, Int)], parser: String => Option[Int]) = add(new IntSetting(name, descr, default, range, parser))
def MultiStringSetting(name: String, arg: String, descr: String) = add(new MultiStringSetting(name, arg, descr))
def OutputSetting(outputDirs: OutputDirs, default: String) = add(new OutputSetting(outputDirs, default))
- def PhasesSetting(name: String, descr: String) = add(new PhasesSetting(name, descr))
+ def PhasesSetting(name: String, descr: String, default: String = "") = add(new PhasesSetting(name, descr, default))
def StringSetting(name: String, arg: String, descr: String, default: String) = add(new StringSetting(name, arg, descr, default))
+ def ScalaVersionSetting(name: String, arg: String, descr: String, default: ScalaVersion) = add(new ScalaVersionSetting(name, arg, descr, default))
def PathSetting(name: String, descr: String, default: String): PathSetting = {
val prepend = StringSetting(name + "/p", "", "", "").internalOnly()
val append = StringSetting(name + "/a", "", "", "").internalOnly()
@@ -208,21 +230,6 @@ class MutableSettings(val errorFn: String => Unit) extends AbsSettings with Scal
}
def PrefixSetting(name: String, prefix: String, descr: String): PrefixSetting = add(new PrefixSetting(name, prefix, descr))
- // basically this is a value which remembers if it's been modified
- trait SettingValue extends AbsSettingValue {
- protected var v: T
- protected var setByUser: Boolean = false
- def postSetHook(): Unit
-
- def isDefault: Boolean = !setByUser
- def value: T = v
- def value_=(arg: T) = {
- setByUser = true
- v = arg
- postSetHook()
- }
- }
-
/** A class for holding mappings from source directories to
* their output location. This functionality can be accessed
* only programmatically. The command line compiler uses a
@@ -249,7 +256,8 @@ class MutableSettings(val errorFn: String => Unit) extends AbsSettings with Scal
private def checkDir(dir: AbstractFile, name: String, allowJar: Boolean = false): AbstractFile = (
if (dir != null && dir.isDirectory)
dir
- else if (allowJar && dir == null && Path.isJarOrZip(name, false))
+// was: else if (allowJar && dir == null && Path.isJarOrZip(name, false))
+ else if (allowJar && dir == null && Jar.isJarOrZip(name, false))
new PlainFile(Path(name))
else
throw new FatalError(name + " does not exist or is not a directory")
@@ -274,7 +282,7 @@ class MutableSettings(val errorFn: String => Unit) extends AbsSettings with Scal
def add(src: AbstractFile, dst: AbstractFile) {
singleOutDir = None
- outputDirs ::= (src, dst)
+ outputDirs ::= ((src, dst))
}
/** Return the list of source-destination directory pairs. */
@@ -335,12 +343,12 @@ class MutableSettings(val errorFn: String => Unit) extends AbsSettings with Scal
}
/** A base class for settings of all types.
- * Subclasses each define a `value' field of the appropriate type.
+ * Subclasses each define a `value` field of the appropriate type.
*/
abstract class Setting(val name: String, val helpDescription: String) extends AbsSetting with SettingValue with Mutable {
/** Will be called after this Setting is set for any extra work. */
private var _postSetHook: this.type => Unit = (x: this.type) => ()
- def postSetHook(): Unit = _postSetHook(this)
+ override def postSetHook(): Unit = _postSetHook(this)
def withPostSetHook(f: this.type => Unit): this.type = { _postSetHook = f ; this }
/** The syntax defining this setting in a help string */
@@ -372,7 +380,8 @@ class MutableSettings(val errorFn: String => Unit) extends AbsSettings with Scal
parser: String => Option[Int])
extends Setting(name, descr) {
type T = Int
- protected var v = default
+ protected var v: Int = default
+ override def value: Int = v
// not stable values!
val IntMin = Int.MinValue
@@ -429,7 +438,8 @@ class MutableSettings(val errorFn: String => Unit) extends AbsSettings with Scal
descr: String)
extends Setting(name, descr) {
type T = Boolean
- protected var v = false
+ protected var v: Boolean = false
+ override def value: Boolean = v
def tryToSet(args: List[String]) = { value = true ; Some(args) }
def unparse: List[String] = if (value) List(name) else Nil
@@ -445,7 +455,7 @@ class MutableSettings(val errorFn: String => Unit) extends AbsSettings with Scal
descr: String)
extends Setting(name, descr) {
type T = List[String]
- protected var v: List[String] = Nil
+ protected var v: T = Nil
def tryToSet(args: List[String]) = args match {
case x :: xs if x startsWith prefix =>
@@ -458,7 +468,7 @@ class MutableSettings(val errorFn: String => Unit) extends AbsSettings with Scal
def unparse: List[String] = value
}
- /** A setting represented by a string, (`default' unless set) */
+ /** A setting represented by a string, (`default` unless set) */
class StringSetting private[nsc](
name: String,
val arg: String,
@@ -466,7 +476,7 @@ class MutableSettings(val errorFn: String => Unit) extends AbsSettings with Scal
val default: String)
extends Setting(name, descr) {
type T = String
- protected var v = default
+ protected var v: T = default
def tryToSet(args: List[String]) = args match {
case Nil => errorAndValue("missing argument", None)
@@ -477,6 +487,35 @@ class MutableSettings(val errorFn: String => Unit) extends AbsSettings with Scal
withHelpSyntax(name + " <" + arg + ">")
}
+ /** A setting represented by a Scala version, (`default` unless set) */
+ class ScalaVersionSetting private[nsc](
+ name: String,
+ val arg: String,
+ descr: String,
+ default: ScalaVersion)
+ extends Setting(name, descr) {
+ import ScalaVersion._
+
+ type T = ScalaVersion
+ protected var v: T = NoScalaVersion
+
+ override def tryToSet(args: List[String]) = {
+ value = default
+ Some(args)
+ }
+
+ override def tryToSetColon(args: List[String]) = args match {
+ case Nil => value = default; Some(Nil)
+ case x :: xs => value = ScalaVersion(x, errorFn) ; Some(xs)
+ }
+
+ override def tryToSetFromPropertyValue(s: String) = tryToSet(List(s))
+
+ def unparse: List[String] = if (value == NoScalaVersion) Nil else List(s"${name}:${value.unparse}")
+
+ withHelpSyntax(s"${name}:<${arg}>")
+ }
+
class PathSetting private[nsc](
name: String,
descr: String,
@@ -516,7 +555,7 @@ class MutableSettings(val errorFn: String => Unit) extends AbsSettings with Scal
descr: String)
extends Setting(name, descr) {
type T = List[String]
- protected var v: List[String] = Nil
+ protected var v: T = Nil
def appendToValue(str: String) { value ++= List(str) }
def tryToSet(args: List[String]) = {
@@ -527,7 +566,7 @@ class MutableSettings(val errorFn: String => Unit) extends AbsSettings with Scal
}
override def tryToSetColon(args: List[String]) = tryToSet(args)
override def tryToSetFromPropertyValue(s: String) = tryToSet(s.trim.split(',').toList)
- def unparse: List[String] = value map { name + ":" + _ }
+ def unparse: List[String] = value map (name + ":" + _)
withHelpSyntax(name + ":<" + arg + ">")
}
@@ -543,7 +582,7 @@ class MutableSettings(val errorFn: String => Unit) extends AbsSettings with Scal
val default: String)
extends Setting(name, descr + choices.mkString(" (", ",", ") default:" + default)) {
type T = String
- protected var v: String = default
+ protected var v: T = default
def indexOfChoice: Int = choices indexOf value
private def usageErrorMessage = {
@@ -565,6 +604,12 @@ class MutableSettings(val errorFn: String => Unit) extends AbsSettings with Scal
withHelpSyntax(name + ":<" + helpArg + ">")
}
+ private def mkPhasesHelp(descr: String, default: String) = {
+ descr + " <phases>" + (
+ if (default == "") "" else " (default: " + default + ")"
+ )
+ }
+
/** A setting represented by a list of strings which should be prefixes of
* phase names. This is not checked here, however. Alternatively the string
* `"all"` can be used to represent all phases.
@@ -572,10 +617,13 @@ class MutableSettings(val errorFn: String => Unit) extends AbsSettings with Scal
*/
class PhasesSetting private[nsc](
name: String,
- descr: String)
- extends Setting(name, descr + " <phase>.") {
+ descr: String,
+ default: String
+ ) extends Setting(name, mkPhasesHelp(descr, default)) {
+ private[nsc] def this(name: String, descr: String) = this(name, descr, "")
+
type T = List[String]
- protected var v: List[String] = Nil
+ protected var v: T = Nil
override def value = if (v contains "all") List("all") else super.value
private lazy val (numericValues, stringValues) =
value filterNot (_ == "" ) partition (_ forall (ch => ch.isDigit || ch == '-'))
@@ -601,9 +649,12 @@ class MutableSettings(val errorFn: String => Unit) extends AbsSettings with Scal
case fns => fns.reduceLeft((f1, f2) => id => f1(id) || f2(id))
}
- def tryToSet(args: List[String]) = errorAndValue("missing phase", None)
+ def tryToSet(args: List[String]) =
+ if (default == "") errorAndValue("missing phase", None)
+ else { tryToSetColon(List(default)) ; Some(args) }
+
override def tryToSetColon(args: List[String]) = args match {
- case Nil => errorAndValue("missing phase", None)
+ case Nil => if (default == "") errorAndValue("missing phase", None) else tryToSetColon(List(default))
case xs => value = (value ++ xs).distinct.sorted ; Some(Nil)
}
// we slightly abuse the usual meaning of "contains" here by returning
@@ -616,6 +667,9 @@ class MutableSettings(val errorFn: String => Unit) extends AbsSettings with Scal
def doAllPhases = stringValues contains "all"
def unparse: List[String] = value map (name + ":" + _)
- withHelpSyntax(name + ":<phase>")
+ withHelpSyntax(
+ if (default == "") name + ":<phases>"
+ else name + "[:phases]"
+ )
}
}
diff --git a/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala b/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala
index 22721d9..dbfaa2c 100644
--- a/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala
+++ b/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala
@@ -1,5 +1,5 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
+ * Copyright 2005-2013 LAMP/EPFL
* @author Martin Odersky
*/
// $Id$
@@ -8,19 +8,44 @@ package scala.tools
package nsc
package settings
-import annotation.elidable
+import scala.annotation.elidable
import scala.tools.util.PathResolver.Defaults
-import scala.collection.mutable.HashSet
+import scala.collection.mutable
+import scala.language.{implicitConversions, existentials}
trait ScalaSettings extends AbsScalaSettings
with StandardScalaSettings
with Warnings {
self: MutableSettings =>
- import Defaults.scalaUserClassPath
-
/** Set of settings */
- protected lazy val allSettings = HashSet[Setting]()
+ protected lazy val allSettings = mutable.HashSet[Setting]()
+
+ /** Against my better judgment, giving in to martin here and allowing
+ * CLASSPATH to be used automatically. So for the user-specified part
+ * of the classpath:
+ *
+ * - If -classpath or -cp is given, it is that
+ * - Otherwise, if CLASSPATH is set, it is that
+ * - If neither of those, then "." is used.
+ */
+ protected def defaultClasspath = sys.env.getOrElse("CLASSPATH", ".")
+
+ /** Enabled under -Xexperimental. */
+ protected def experimentalSettings = List[BooleanSetting](YmethodInfer, overrideObjects, overrideVars)
+
+ /** Enabled under -Xfuture. */
+ protected def futureSettings = List[BooleanSetting]()
+
+ /** Enabled under -optimise. */
+ protected def optimiseSettings = List[BooleanSetting](inline, inlineHandlers, Xcloselim, Xdce)
+
+ /** Internal use - syntax enhancements. */
+ private class EnableSettings[T <: BooleanSetting](val s: T) {
+ def enabling(toEnable: List[BooleanSetting]): s.type = s withPostSetHook (_ => toEnable foreach (_.value = s.value))
+ def andThen(f: s.T => Unit): s.type = s withPostSetHook (setting => f(setting.value))
+ }
+ private implicit def installEnableSettings[T <: BooleanSetting](s: T) = new EnableSettings(s)
/** Disable a setting */
def disable(s: Setting) = allSettings -= s
@@ -35,13 +60,10 @@ trait ScalaSettings extends AbsScalaSettings
*/
// argfiles is only for the help message
val argfiles = BooleanSetting ("@<file>", "A text file containing compiler arguments (options and source files)")
- val classpath = PathSetting ("-classpath", "Specify where to find user class files.", scalaUserClassPath) .
- withAbbreviation ("-cp")
+ val classpath = PathSetting ("-classpath", "Specify where to find user class files.", defaultClasspath) withAbbreviation "-cp"
val d = OutputSetting (outputDirs, ".")
- val optimise = BooleanSetting ("-optimise", "Generates faster bytecode by applying optimisations to the program") .
- withAbbreviation("-optimize") .
- withPostSetHook(set => List(inline, Xcloselim, Xdce) foreach (_.value = set.value))
val nospecialization = BooleanSetting ("-no-specialization", "Ignore @specialize annotations.")
+ val language = MultiStringSetting("-language", "feature", "Enable one or more language features.")
/**
* -X "Advanced" settings
@@ -56,34 +78,39 @@ trait ScalaSettings extends AbsScalaSettings
val elidebelow = IntSetting ("-Xelide-below", "Calls to @elidable methods are omitted if method priority is lower than argument",
elidable.MINIMUM, None, elidable.byName get _)
val noForwarders = BooleanSetting ("-Xno-forwarders", "Do not generate static forwarders in mirror classes.")
- val future = BooleanSetting ("-Xfuture", "Turn on future language features.")
val genPhaseGraph = StringSetting ("-Xgenerate-phase-graph", "file", "Generate the phase graphs (outputs .dot files) to fileX.dot.", "")
val XlogImplicits = BooleanSetting ("-Xlog-implicits", "Show more detail on why some implicits are not applicable.")
- val maxClassfileName = IntSetting ("-Xmax-classfile-name", "Maximum filename length for generated classes", 255, Some(72, 255), _ => None)
- val Xmigration28 = BooleanSetting ("-Xmigration", "Warn about constructs whose behavior may have changed between 2.7 and 2.8.")
+ val logImplicitConv = BooleanSetting ("-Xlog-implicit-conversions", "Print a message whenever an implicit conversion is inserted.")
+ val logReflectiveCalls = BooleanSetting("-Xlog-reflective-calls", "Print a message when a reflective method call is generated")
+ val logFreeTerms = BooleanSetting ("-Xlog-free-terms", "Print a message when reification creates a free term.")
+ val logFreeTypes = BooleanSetting ("-Xlog-free-types", "Print a message when reification resorts to generating a free type.")
+ val maxClassfileName = IntSetting ("-Xmax-classfile-name", "Maximum filename length for generated classes", 255, Some((72, 255)), _ => None)
+ val Xmigration = ScalaVersionSetting("-Xmigration", "version", "Warn about constructs whose behavior may have changed since version.", AnyScalaVersion)
val nouescape = BooleanSetting ("-Xno-uescape", "Disable handling of \\u unicode escapes.")
val Xnojline = BooleanSetting ("-Xnojline", "Do not use JLine for editing.")
- val Xverify = BooleanSetting ("-Xverify", "Verify generic signatures in generated bytecode.")
+ val Xverify = BooleanSetting ("-Xverify", "Verify generic signatures in generated bytecode (asm backend only.)")
val plugin = MultiStringSetting("-Xplugin", "file", "Load one or more plugins from files.")
val disable = MultiStringSetting("-Xplugin-disable", "plugin", "Disable the given plugin(s).")
val showPlugins = BooleanSetting ("-Xplugin-list", "Print a synopsis of loaded plugins.")
val require = MultiStringSetting("-Xplugin-require", "plugin", "Abort unless the given plugin(s) are available.")
val pluginsDir = StringSetting ("-Xpluginsdir", "path", "Path to search compiler plugins.", Defaults.scalaPluginPath)
val Xprint = PhasesSetting ("-Xprint", "Print out program after")
- val writeICode = BooleanSetting ("-Xprint-icode", "Log internal icode to *.icode files.")
+ val writeICode = PhasesSetting ("-Xprint-icode", "Log internal icode to *.icode files after", "icode")
val Xprintpos = BooleanSetting ("-Xprint-pos", "Print tree positions, as offsets.")
val printtypes = BooleanSetting ("-Xprint-types", "Print tree types (debugging option).")
val prompt = BooleanSetting ("-Xprompt", "Display a prompt after each error (debugging option).")
val resident = BooleanSetting ("-Xresident", "Compiler stays resident: read source filenames from standard input.")
val script = StringSetting ("-Xscript", "object", "Treat the source file as a script and wrap it in a main method.", "")
+ val mainClass = StringSetting ("-Xmain-class", "path", "Class for manifest's Main-Class entry (only useful with -d <jar>)", "")
val Xshowcls = StringSetting ("-Xshow-class", "class", "Show internal representation of class.", "")
val Xshowobj = StringSetting ("-Xshow-object", "object", "Show internal representation of object.", "")
val showPhases = BooleanSetting ("-Xshow-phases", "Print a synopsis of compiler phases.")
val sourceReader = StringSetting ("-Xsource-reader", "classname", "Specify a custom method for reading source files.", "")
- // Experimental Extensions
- val Xexperimental = BooleanSetting ("-Xexperimental", "Enable experimental extensions.") .
- withPostSetHook(set => List(YdepMethTpes, YmethodInfer) foreach (_.value = set.value)) //YvirtClasses,
+ val XoldPatmat = BooleanSetting ("-Xoldpatmat", "Use the pre-2.10 pattern matcher. Otherwise, the 'virtualizing' pattern matcher is used in 2.10.")
+ val XnoPatmatAnalysis = BooleanSetting ("-Xno-patmat-analysis", "Don't perform exhaustivity/unreachability analysis. Also, ignore @switch annotation.")
+ val XfullLubs = BooleanSetting ("-Xfull-lubs", "Retains pre 2.10 behavior of less aggressive truncation of least upper bounds.")
+ val Xdivergence211 = BooleanSetting ("-Xdivergence211", "Turn on the 2.11 behavior of implicit divergence not terminating recursive implicit searches (SI-7291).")
/** Compatibility stubs for options whose value name did
* not previously match the option name.
@@ -98,70 +125,89 @@ trait ScalaSettings extends AbsScalaSettings
/**
* -Y "Private" settings
*/
- val Yhelp = BooleanSetting ("-Y", "Print a synopsis of private options.")
- val browse = PhasesSetting ("-Ybrowse", "Browse the abstract syntax tree after")
- val check = PhasesSetting ("-Ycheck", "Check the tree at the end of")
- val Yshow = PhasesSetting ("-Yshow", "(Requires -Xshow-class or -Xshow-object) Show after")
- val Xcloselim = BooleanSetting ("-Yclosure-elim", "Perform closure elimination.")
- val Ycompacttrees = BooleanSetting ("-Ycompact-trees", "Use compact tree printer when displaying trees.")
- val noCompletion = BooleanSetting ("-Yno-completion", "Disable tab-completion in the REPL.")
- val Xdce = BooleanSetting ("-Ydead-code", "Perform dead code elimination.")
- val debug = BooleanSetting ("-Ydebug", "Increase the quantity of debugging output.")
- // val doc = BooleanSetting ("-Ydoc", "Generate documentation")
- val termConflict = ChoiceSetting ("-Yresolve-term-conflict", "strategy", "Resolve term conflicts",
- List("package", "object", "error"), "error")
- val inline = BooleanSetting ("-Yinline", "Perform inlining when possible.")
- val Xlinearizer = ChoiceSetting ("-Ylinearizer", "which", "Linearizer to use", List("normal", "dfs", "rpo", "dump"), "rpo")
- val log = PhasesSetting ("-Ylog", "Log operations during")
- val Ylogcp = BooleanSetting ("-Ylog-classpath", "Output information about what classpath is being applied.")
- val Ynogenericsig = BooleanSetting ("-Yno-generic-signatures", "Suppress generation of generic signatures for Java.")
- val noimports = BooleanSetting ("-Yno-imports", "Compile without any implicit imports.")
- // Not actually doing anything, so disabled.
- // val nopredefs = BooleanSetting ("-Yno-predefs", "Compile without any implicit predefined values.")
- val Yprofile = PhasesSetting ("-Yprofile", "(Requires jvm -agentpath to contain yjgpagent) Profile CPU usage of given phases.")
- val YprofileMem = BooleanSetting ("-Yprofile-memory", "Profile memory, get heap snapshot after each compiler run (requires yjpagent, see above).")
- val YprofileClass = StringSetting ("-Yprofile-class", "class", "Name of profiler class.", "scala.tools.util.YourkitProfiling")
- val Yrecursion = IntSetting ("-Yrecursion", "Set recursion depth used when locking symbols.", 0, Some(0, Int.MaxValue), (_: String) => None)
- val selfInAnnots = BooleanSetting ("-Yself-in-annots", "Include a \"self\" identifier inside of annotations.")
- val Xshowtrees = BooleanSetting ("-Yshow-trees", "(Requires -Xprint:) Print detailed ASTs.")
- val Yshowsyms = BooleanSetting ("-Yshow-syms", "Print the AST symbol hierarchy after each phase.")
- val skip = PhasesSetting ("-Yskip", "Skip")
- val Ygenjavap = StringSetting ("-Ygen-javap", "dir", "Generate a parallel output directory of .javap files.", "")
- val Ynosqueeze = BooleanSetting ("-Yno-squeeze", "Disable creation of compact code in matching.")
- val Ystatistics = BooleanSetting ("-Ystatistics", "Print compiler statistics.") .
- withPostSetHook(set => util.Statistics.enabled = set.value)
- val stopAfter = PhasesSetting ("-Ystop-after", "Stop after given phase") withAbbreviation ("-stop") // backward compat
- val stopBefore = PhasesSetting ("-Ystop-before", "Stop before given phase")
- val refinementMethodDispatch =
- ChoiceSetting ("-Ystruct-dispatch", "policy", "structural method dispatch policy",
- List("no-cache", "mono-cache", "poly-cache", "invoke-dynamic"), "poly-cache")
- val Yrangepos = BooleanSetting ("-Yrangepos", "Use range positions for syntax trees.")
- val YrichExes = BooleanSetting ("-Yrich-exceptions",
- "Fancier exceptions. Set source search path with -D" +
- sys.SystemProperties.traceSourcePath.key)
- val Yidedebug = BooleanSetting ("-Yide-debug", "Generate, validate and output trees using the interactive compiler.")
- val Ybuilderdebug = ChoiceSetting ("-Ybuilder-debug", "manager", "Compile using the specified build manager.", List("none", "refined", "simple"), "none")
- val Ybuildmanagerdebug =
- BooleanSetting ("-Ybuild-manager-debug", "Generate debug information for the Refined Build Manager compiler.")
- val Ytyperdebug = BooleanSetting ("-Ytyper-debug", "Trace all type assignments.")
- val Yinferdebug = BooleanSetting ("-Yinfer-debug", "Trace type inference and implicit search.")
- val Ypmatdebug = BooleanSetting ("-Ypmat-debug", "Trace all pattern matcher activity.")
- val Yreplsync = BooleanSetting ("-Yrepl-sync", "Do not use asynchronous code for repl startup")
- val Yrepldebug = BooleanSetting ("-Yrepl-debug", "Trace all repl activity.") .
- withPostSetHook(_ => interpreter.replProps.debug setValue true)
- val Ycompletion = BooleanSetting ("-Ycompletion-debug", "Trace all tab completion activity.")
- val Ydocdebug = BooleanSetting ("-Ydoc-debug", "Trace all scaladoc activity.")
- val Ypmatnaive = BooleanSetting ("-Ypmat-naive", "Desugar matches as naively as possible.")
- val Ynotnull = BooleanSetting ("-Ynotnull", "Enable (experimental and incomplete) scala.NotNull.")
- val YdepMethTpes = BooleanSetting ("-Ydependent-method-types", "Allow dependent method types.")
- val YmethodInfer = BooleanSetting ("-Yinfer-argument-types", "Infer types for arguments of overriden methods.")
- val noSelfCheck = BooleanSetting ("-Yno-self-type-checks", "Suppress check for self-type conformance among inherited members.")
- val YvirtClasses = false // too embryonic to even expose as a -Y //BooleanSetting ("-Yvirtual-classes", "Support virtual classes")
+ val overrideObjects = BooleanSetting ("-Yoverride-objects", "Allow member objects to be overridden.")
+ val overrideVars = BooleanSetting ("-Yoverride-vars", "Allow vars to be overridden.")
+ val Yhelp = BooleanSetting ("-Y", "Print a synopsis of private options.")
+ val browse = PhasesSetting ("-Ybrowse", "Browse the abstract syntax tree after")
+ val check = PhasesSetting ("-Ycheck", "Check the tree at the end of")
+ val Yshow = PhasesSetting ("-Yshow", "(Requires -Xshow-class or -Xshow-object) Show after")
+ val Xcloselim = BooleanSetting ("-Yclosure-elim", "Perform closure elimination.")
+ val Ycompacttrees = BooleanSetting ("-Ycompact-trees", "Use compact tree printer when displaying trees.")
+ val noCompletion = BooleanSetting ("-Yno-completion", "Disable tab-completion in the REPL.")
+ val Xdce = BooleanSetting ("-Ydead-code", "Perform dead code elimination.")
+ val debug = BooleanSetting ("-Ydebug", "Increase the quantity of debugging output.")
+ //val doc = BooleanSetting ("-Ydoc", "Generate documentation")
+ val termConflict = ChoiceSetting ("-Yresolve-term-conflict", "strategy", "Resolve term conflicts", List("package", "object", "error"), "error")
+ val inline = BooleanSetting ("-Yinline", "Perform inlining when possible.")
+ val inlineHandlers = BooleanSetting ("-Yinline-handlers", "Perform exception handler inlining when possible.")
+ val YinlinerWarnings= BooleanSetting ("-Yinline-warnings", "Emit inlining warnings. (Normally surpressed due to high volume)")
+ val Xlinearizer = ChoiceSetting ("-Ylinearizer", "which", "Linearizer to use", List("normal", "dfs", "rpo", "dump"), "rpo")
+ val log = PhasesSetting ("-Ylog", "Log operations during")
+ val Ylogcp = BooleanSetting ("-Ylog-classpath", "Output information about what classpath is being applied.")
+ val Ynogenericsig = BooleanSetting ("-Yno-generic-signatures", "Suppress generation of generic signatures for Java.")
+ val noimports = BooleanSetting ("-Yno-imports", "Compile without importing scala.*, java.lang.*, or Predef.")
+ val nopredef = BooleanSetting ("-Yno-predef", "Compile without importing Predef.")
+ val noAdaptedArgs = BooleanSetting ("-Yno-adapted-args", "Do not adapt an argument list (either by inserting () or creating a tuple) to match the receiver.")
+ val Yrecursion = IntSetting ("-Yrecursion", "Set recursion depth used when locking symbols.", 0, Some((0, Int.MaxValue)), (_: String) => None)
+ val selfInAnnots = BooleanSetting ("-Yself-in-annots", "Include a \"self\" identifier inside of annotations.")
+ val Xshowtrees = BooleanSetting ("-Yshow-trees", "(Requires -Xprint:) Print detailed ASTs in formatted form.")
+ val XshowtreesCompact
+ = BooleanSetting ("-Yshow-trees-compact", "(Requires -Xprint:) Print detailed ASTs in compact form.")
+ val XshowtreesStringified
+ = BooleanSetting ("-Yshow-trees-stringified", "(Requires -Xprint:) Print stringifications along with detailed ASTs.")
+ val Yshowsyms = BooleanSetting ("-Yshow-syms", "Print the AST symbol hierarchy after each phase.")
+ val Yshowsymkinds = BooleanSetting ("-Yshow-symkinds", "Print abbreviated symbol kinds next to symbol names.")
+ val skip = PhasesSetting ("-Yskip", "Skip")
+ val Ygenjavap = StringSetting ("-Ygen-javap", "dir", "Generate a parallel output directory of .javap files.", "")
+ val Ydumpclasses = StringSetting ("-Ydump-classes", "dir", "Dump the generated bytecode to .class files (useful for reflective compilation that utilizes in-memory classloaders).", "")
+ val Ynosqueeze = BooleanSetting ("-Yno-squeeze", "Disable creation of compact code in matching.")
+ val Ystatistics = BooleanSetting ("-Ystatistics", "Print compiler statistics.") andThen (scala.reflect.internal.util.Statistics.enabled = _)
+ val stopAfter = PhasesSetting ("-Ystop-after", "Stop after") withAbbreviation ("-stop") // backward compat
+ val stopBefore = PhasesSetting ("-Ystop-before", "Stop before")
+ val refinementMethodDispatch
+ = ChoiceSetting ("-Ystruct-dispatch", "policy", "structural method dispatch policy", List("no-cache", "mono-cache", "poly-cache", "invoke-dynamic"), "poly-cache")
+ val Yrangepos = BooleanSetting ("-Yrangepos", "Use range positions for syntax trees.")
+ val Ybuilderdebug = ChoiceSetting ("-Ybuilder-debug", "manager", "Compile using the specified build manager.", List("none", "refined", "simple"), "none")
+ val Yreifycopypaste = BooleanSetting ("-Yreify-copypaste", "Dump the reified trees in copypasteable representation.")
+ val Ymacronoexpand = BooleanSetting ("-Ymacro-no-expand", "Don't expand macros. Might be useful for scaladoc and presentation compiler, but will crash anything which uses macros and gets past typer.")
+ val Yreplsync = BooleanSetting ("-Yrepl-sync", "Do not use asynchronous code for repl startup")
+ val Ynotnull = BooleanSetting ("-Ynotnull", "Enable (experimental and incomplete) scala.NotNull.")
+ val YmethodInfer = BooleanSetting ("-Yinfer-argument-types", "Infer types for arguments of overriden methods.")
+ val etaExpandKeepsStar = BooleanSetting ("-Yeta-expand-keeps-star", "Eta-expand varargs methods to T* rather than Seq[T]. This is a temporary option to ease transition.")
+ val Yinvalidate = StringSetting ("-Yinvalidate", "classpath-entry", "Invalidate classpath entry before run", "")
+ val noSelfCheck = BooleanSetting ("-Yno-self-type-checks", "Suppress check for self-type conformance among inherited members.")
+ val YvirtClasses = false // too embryonic to even expose as a -Y //BooleanSetting ("-Yvirtual-classes", "Support virtual classes")
val exposeEmptyPackage = BooleanSetting("-Yexpose-empty-package", "Internal only: expose the empty package.").internalOnly()
def stop = stopAfter
+ /** Area-specific debug output.
+ */
+ val Ybuildmanagerdebug = BooleanSetting("-Ybuild-manager-debug", "Generate debug information for the Refined Build Manager compiler.")
+ val Ycompletion = BooleanSetting("-Ycompletion-debug", "Trace all tab completion activity.")
+ val Ydocdebug = BooleanSetting("-Ydoc-debug", "Trace all scaladoc activity.")
+ val Yidedebug = BooleanSetting("-Yide-debug", "Generate, validate and output trees using the interactive compiler.")
+ val Yinferdebug = BooleanSetting("-Yinfer-debug", "Trace type inference and implicit search.")
+ val Yissuedebug = BooleanSetting("-Yissue-debug", "Print stack traces when a context issues an error.")
+ val YmacrodebugLite = BooleanSetting("-Ymacro-debug-lite", "Trace essential macro-related activities.")
+ val YmacrodebugVerbose = BooleanSetting("-Ymacro-debug-verbose", "Trace all macro-related activities: compilation, generation of synthetics, classloading, expansion, exceptions.")
+ val Ypmatdebug = BooleanSetting("-Ypmat-debug", "Trace all pattern matcher activity.")
+ val Yposdebug = BooleanSetting("-Ypos-debug", "Trace position validation.")
+ val Yreifydebug = BooleanSetting("-Yreify-debug", "Trace reification.")
+ val Yrepldebug = BooleanSetting("-Yrepl-debug", "Trace all repl activity.") andThen (interpreter.replProps.debug setValue _)
+ val Ytyperdebug = BooleanSetting("-Ytyper-debug", "Trace all type assignments.")
+ val Ypatmatdebug = BooleanSetting("-Ypatmat-debug", "Trace pattern matching translation.")
+
+ /** Groups of Settings.
+ */
+ val future = BooleanSetting("-Xfuture", "Turn on future language features.") enabling futureSettings
+ val optimise = BooleanSetting("-optimise", "Generates faster bytecode by applying optimisations to the program") withAbbreviation "-optimize" enabling optimiseSettings
+ val Xexperimental = BooleanSetting("-Xexperimental", "Enable experimental extensions.") enabling experimentalSettings
+
+ // Feature extensions
+ val XmacroSettings = MultiStringSetting("-Xmacro-settings", "option", "Custom settings for macros.")
+
/**
* IDE-specific settings
*/
@@ -171,11 +217,14 @@ trait ScalaSettings extends AbsScalaSettings
val YpresentationLog = StringSetting("-Ypresentation-log", "file", "Log presentation compiler events into file", "")
val YpresentationReplay = StringSetting("-Ypresentation-replay", "file", "Replay presentation compiler events from file", "")
- val YpresentationDelay = IntSetting("-Ypresentation-delay", "Wait number of ms after typing before starting typechecking", 0, Some(0, 999), str => Some(str.toInt))
+ val YpresentationDelay = IntSetting("-Ypresentation-delay", "Wait number of ms after typing before starting typechecking", 0, Some((0, 999)), str => Some(str.toInt))
/**
* -P "Plugin" settings
*/
val pluginOptions = MultiStringSetting("-P", "plugin:opt", "Pass an option to a plugin") .
withHelpSyntax("-P:<plugin>:<opt>")
+
+ /** Test whether this is scaladoc we're looking at */
+ def isScaladoc = false
}
diff --git a/src/compiler/scala/tools/nsc/settings/ScalaVersion.scala b/src/compiler/scala/tools/nsc/settings/ScalaVersion.scala
new file mode 100644
index 0000000..d6a0149
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/settings/ScalaVersion.scala
@@ -0,0 +1,194 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2013 LAMP/EPFL
+ * @author James Iry
+ */
+// $Id$
+
+package scala.tools.nsc.settings
+
+/**
+ * Represents a single Scala version in a manner that
+ * supports easy comparison and sorting.
+ */
+abstract class ScalaVersion extends Ordered[ScalaVersion] {
+ def unparse: String
+}
+
+/**
+ * A scala version that sorts higher than all actual versions
+ */
+case object NoScalaVersion extends ScalaVersion {
+ def unparse = "none"
+
+ def compare(that: ScalaVersion): Int = that match {
+ case NoScalaVersion => 0
+ case _ => 1
+ }
+}
+
+/**
+ * A specific Scala version, not one of the magic min/max versions. An SpecificScalaVersion
+ * may or may not be a released version - i.e. this same class is used to represent
+ * final, release candidate, milestone, and development builds. The build argument is used
+ * to segregate builds
+ */
+case class SpecificScalaVersion(major: Int, minor: Int, rev: Int, build: ScalaBuild) extends ScalaVersion {
+ def unparse = s"${major}.${minor}.${rev}.${build.unparse}"
+
+ def compare(that: ScalaVersion): Int = that match {
+ case SpecificScalaVersion(thatMajor, thatMinor, thatRev, thatBuild) =>
+ // this could be done more cleanly by importing scala.math.Ordering.Implicits, but we have to do these
+ // comparisons a lot so I'm using brute force direct style code
+ if (major < thatMajor) -1
+ else if (major > thatMajor) 1
+ else if (minor < thatMinor) -1
+ else if (minor > thatMinor) 1
+ else if (rev < thatRev) -1
+ else if (rev > thatRev) 1
+ else build compare thatBuild
+ case AnyScalaVersion => 1
+ case NoScalaVersion => -1
+ }
+}
+
+/**
+ * A Scala version that sorts lower than all actual versions
+ */
+case object AnyScalaVersion extends ScalaVersion {
+ def unparse = "any"
+
+ def compare(that: ScalaVersion): Int = that match {
+ case AnyScalaVersion => 0
+ case _ => -1
+ }
+}
+
+/**
+ * Factory methods for producing ScalaVersions
+ */
+object ScalaVersion {
+ private val dot = "\\."
+ private val dash = "\\-"
+ private def not(s:String) = s"[^${s}]"
+ private val R = s"((${not(dot)}*)(${dot}(${not(dot)}*)(${dot}(${not(dash)}*)(${dash}(.*))?)?)?)".r
+
+ def apply(versionString : String, errorHandler: String => Unit): ScalaVersion = {
+ def errorAndValue() = {
+ errorHandler(
+ s"There was a problem parsing ${versionString}. " +
+ "Versions should be in the form major[.minor[.revision]] " +
+ "where each part is a positive number, as in 2.10.1. " +
+ "The minor and revision parts are optional."
+ )
+ AnyScalaVersion
+ }
+
+ def toInt(s: String) = s match {
+ case null | "" => 0
+ case _ => s.toInt
+ }
+
+ def isInt(s: String) = util.Try(toInt(s)).isSuccess
+
+ def toBuild(s: String) = s match {
+ case null | "FINAL" => Final
+ case s if (s.toUpperCase.startsWith("RC") && isInt(s.substring(2))) => RC(toInt(s.substring(2)))
+ case s if (s.toUpperCase.startsWith("M") && isInt(s.substring(1))) => Milestone(toInt(s.substring(1)))
+ case _ => Development(s)
+ }
+
+ try versionString match {
+ case "none" => NoScalaVersion
+ case "any" => AnyScalaVersion
+ case R(_, majorS, _, minorS, _, revS, _, buildS) =>
+ SpecificScalaVersion(toInt(majorS), toInt(minorS), toInt(revS), toBuild(buildS))
+ case _ =>
+ errorAndValue()
+ } catch {
+ case e: NumberFormatException => errorAndValue()
+ }
+ }
+
+ def apply(versionString: String): ScalaVersion =
+ apply(versionString, msg => throw new NumberFormatException(msg))
+
+ /**
+ * The version of the compiler running now
+ */
+ val current = apply(util.Properties.versionNumberString)
+
+ /**
+ * The 2.8.0 version.
+ */
+ val twoDotEight = SpecificScalaVersion(2, 8, 0, Final)
+}
+
+/**
+ * Represents the data after the dash in major.minor.rev-build
+ */
+abstract class ScalaBuild extends Ordered[ScalaBuild] {
+ /**
+ * Return a version of this build information that can be parsed back into the
+ * same ScalaBuild
+ */
+ def unparse: String
+}
+/**
+ * A development, test, nightly, snapshot or other "unofficial" build
+ */
+case class Development(id: String) extends ScalaBuild {
+ def unparse = s"-${id}"
+
+ def compare(that: ScalaBuild) = that match {
+ // sorting two development builds based on id is reasonably valid for two versions created with the same schema
+ // otherwise it's not correct, but since it's impossible to put a total ordering on development build versions
+ // this is a pragmatic compromise
+ case Development(thatId) => id compare thatId
+ // assume a development build is newer than anything else, that's not really true, but good luck
+ // mapping development build versions to other build types
+ case _ => 1
+ }
+}
+/**
+ * A final final
+ */
+case object Final extends ScalaBuild {
+ def unparse = ""
+
+ def compare(that: ScalaBuild) = that match {
+ case Final => 0
+ // a final is newer than anything other than a development build or another final
+ case Development(_) => -1
+ case _ => 1
+ }
+}
+
+/**
+ * A candidate for final release
+ */
+case class RC(n: Int) extends ScalaBuild {
+ def unparse = s"-RC${n}"
+
+ def compare(that: ScalaBuild) = that match {
+ // compare two rcs based on their RC numbers
+ case RC(thatN) => n - thatN
+ // an rc is older than anything other than a milestone or another rc
+ case Milestone(_) => 1
+ case _ => -1
+ }
+}
+
+/**
+ * An intermediate release
+ */
+case class Milestone(n: Int) extends ScalaBuild {
+ def unparse = s"-M${n}"
+
+ def compare(that: ScalaBuild) = that match {
+ // compare two milestones based on their milestone numbers
+ case Milestone(thatN) => n - thatN
+ // a milestone is older than anything other than another milestone
+ case _ => -1
+
+ }
+}
diff --git a/src/compiler/scala/tools/nsc/settings/StandardScalaSettings.scala b/src/compiler/scala/tools/nsc/settings/StandardScalaSettings.scala
index c5b477c..e866ad6 100644
--- a/src/compiler/scala/tools/nsc/settings/StandardScalaSettings.scala
+++ b/src/compiler/scala/tools/nsc/settings/StandardScalaSettings.scala
@@ -1,5 +1,5 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
+ * Copyright 2005-2013 LAMP/EPFL
* @author Paul Phillips
*/
@@ -32,6 +32,7 @@ trait StandardScalaSettings {
val deprecation = BooleanSetting ("-deprecation", "Emit warning and location for usages of deprecated APIs.")
val encoding = StringSetting ("-encoding", "encoding", "Specify character encoding used by source files.", Properties.sourceEncoding)
val explaintypes = BooleanSetting ("-explaintypes", "Explain type errors in more detail.")
+ val feature = BooleanSetting ("-feature", "Emit warning and location for usages of features that should be imported explicitly.")
val g = ChoiceSetting ("-g", "level", "Set level of generated debugging info.", List("none", "source", "line", "vars", "notailcalls"), "vars")
val help = BooleanSetting ("-help", "Print a synopsis of standard options")
val make = ChoiceSetting ("-make", "policy", "Recompilation detection policy", List("all", "changed", "immediate", "transitive", "transitivenocp"), "all")
@@ -39,8 +40,10 @@ trait StandardScalaSettings {
val nowarn = BooleanSetting ("-nowarn", "Generate no warnings.")
val optimise: BooleanSetting // depends on post hook which mutates other settings
val print = BooleanSetting ("-print", "Print program with Scala-specific features removed.")
- val target = ChoiceSetting ("-target", "target", "Target platform for object files.", List("jvm-1.5", "msil"), "jvm-1.5")
- val unchecked = BooleanSetting ("-unchecked", "Enable detailed unchecked (erasure) warnings.")
+ val target = ChoiceSetting ("-target", "target", "Target platform for object files. All JVM 1.5 targets are deprecated.",
+ List("jvm-1.5", "jvm-1.5-fjbg", "jvm-1.5-asm", "jvm-1.6", "jvm-1.7", "msil"),
+ "jvm-1.6")
+ val unchecked = BooleanSetting ("-unchecked", "Enable additional warnings where generated code depends on assumptions.")
val uniqid = BooleanSetting ("-uniqid", "Uniquely tag all identifiers in debugging output.")
val usejavacp = BooleanSetting ("-usejavacp", "Utilize the java.class.path in classpath resolution.")
val verbose = BooleanSetting ("-verbose", "Output messages about what the compiler is doing.")
diff --git a/src/compiler/scala/tools/nsc/settings/Warnings.scala b/src/compiler/scala/tools/nsc/settings/Warnings.scala
index b11c6f0..9f98792 100644
--- a/src/compiler/scala/tools/nsc/settings/Warnings.scala
+++ b/src/compiler/scala/tools/nsc/settings/Warnings.scala
@@ -1,5 +1,5 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
+ * Copyright 2005-2013 LAMP/EPFL
* @author Paul Phillips
*/
@@ -7,10 +7,6 @@ package scala.tools
package nsc
package settings
-import annotation.elidable
-import scala.tools.util.PathResolver.Defaults
-import scala.collection.mutable.HashSet
-
/** Settings influencing the printing of warnings.
*/
trait Warnings {
@@ -22,6 +18,7 @@ trait Warnings {
// These warnings are all so noisy as to be useless in their
// present form, but have the potential to offer useful info.
protected def allWarnings = lintWarnings ++ List(
+ warnDeadCode,
warnSelectNullable,
warnValueDiscard,
warnNumericWiden
@@ -29,10 +26,11 @@ trait Warnings {
// These warnings should be pretty quiet unless you're doing
// something inadvisable.
protected def lintWarnings = List(
- warnDeadCode,
+ // warnDeadCode,
warnInaccessible,
warnNullaryOverride,
- warnNullaryUnit
+ warnNullaryUnit,
+ warnAdaptedArgs
)
// Warning groups.
@@ -47,6 +45,7 @@ trait Warnings {
// Individual warnings.
val warnSelectNullable = BooleanSetting ("-Xcheck-null", "Warn upon selection of nullable reference.")
+ val warnAdaptedArgs = BooleanSetting ("-Ywarn-adapted-args", "Warn if an argument list is modified to match the receiver.")
val warnDeadCode = BooleanSetting ("-Ywarn-dead-code", "Warn when dead code is identified.")
val warnValueDiscard = BooleanSetting ("-Ywarn-value-discard", "Warn when non-Unit expression results are unused.")
val warnNumericWiden = BooleanSetting ("-Ywarn-numeric-widen", "Warn when numerics are widened.")
diff --git a/src/compiler/scala/tools/nsc/symtab/AnnotationCheckers.scala b/src/compiler/scala/tools/nsc/symtab/AnnotationCheckers.scala
deleted file mode 100644
index d26c5d6..0000000
--- a/src/compiler/scala/tools/nsc/symtab/AnnotationCheckers.scala
+++ /dev/null
@@ -1,120 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2007-2011 LAMP/EPFL
- * @author Martin Odersky
- */
-
-package scala.tools.nsc
-package symtab
-
-/** Additions to the type checker that can be added at
- * run time. Typically these are added by
- * compiler plugins. */
-trait AnnotationCheckers {
- self: SymbolTable =>
-
-
- /** An additional checker for annotations on types.
- * Typically these are registered by compiler plugins
- * with the addAnnotationChecker method. */
- abstract class AnnotationChecker {
- /** Check the annotations on two types conform. */
- def annotationsConform(tpe1: Type, tpe2: Type): Boolean
-
- /** Refine the computed least upper bound of a list of types.
- * All this should do is add annotations. */
- def annotationsLub(tp: Type, ts: List[Type]): Type = tp
-
- /** Refine the computed greatest lower bound of a list of types.
- * All this should do is add annotations. */
- def annotationsGlb(tp: Type, ts: List[Type]): Type = tp
-
- /** Refine the bounds on type parameters to the given type arguments. */
- def adaptBoundsToAnnotations(bounds: List[TypeBounds],
- tparams: List[Symbol], targs: List[Type]): List[TypeBounds] = bounds
-
- /** Modify the type that has thus far been inferred
- * for a tree. All this should do is add annotations. */
- def addAnnotations(tree: Tree, tpe: Type): Type = tpe
-
- /** Decide whether this annotation checker can adapt a tree
- * that has an annotated type to the given type tp, taking
- * into account the given mode (see method adapt in trait Typers).*/
- def canAdaptAnnotations(tree: Tree, mode: Int, pt: Type): Boolean = false
-
- /** Adapt a tree that has an annotated type to the given type tp,
- * taking into account the given mode (see method adapt in trait Typers).
- * An implementation cannot rely on canAdaptAnnotations being called
- * before. If the implementing class cannot do the adaptiong, it
- * should return the tree unchanged.*/
- def adaptAnnotations(tree: Tree, mode: Int, pt: Type): Tree = tree
- }
-
- /** The list of annotation checkers that have been registered */
- private var annotationCheckers: List[AnnotationChecker] = Nil
-
- /** Register an annotation checker. Typically these
- * are added by compiler plugins. */
- def addAnnotationChecker(checker: AnnotationChecker) {
- if (!(annotationCheckers contains checker))
- annotationCheckers = checker :: annotationCheckers
- }
-
- /** Remove all annotation checkers */
- def removeAllAnnotationCheckers() {
- annotationCheckers = Nil
- }
-
- /** Check that the annotations on two types conform. To do
- * so, consult all registered annotation checkers. */
- def annotationsConform(tp1: Type, tp2: Type): Boolean = {
- /* Finish quickly if there are no annotations */
- if (tp1.annotations.isEmpty && tp2.annotations.isEmpty)
- true
- else
- annotationCheckers.forall(
- _.annotationsConform(tp1,tp2))
- }
-
- /** Refine the computed least upper bound of a list of types.
- * All this should do is add annotations. */
- def annotationsLub(tpe: Type, ts: List[Type]): Type = {
- annotationCheckers.foldLeft(tpe)((tpe, checker) =>
- checker.annotationsLub(tpe, ts))
- }
-
- /** Refine the computed greatest lower bound of a list of types.
- * All this should do is add annotations. */
- def annotationsGlb(tpe: Type, ts: List[Type]): Type = {
- annotationCheckers.foldLeft(tpe)((tpe, checker) =>
- checker.annotationsGlb(tpe, ts))
- }
-
- /** Refine the bounds on type parameters to the given type arguments. */
- def adaptBoundsToAnnotations(bounds: List[TypeBounds],
- tparams: List[Symbol], targs: List[Type]): List[TypeBounds] = {
- annotationCheckers.foldLeft(bounds)((bounds, checker) =>
- checker.adaptBoundsToAnnotations(bounds, tparams, targs))
- }
-
- /** Let all annotations checkers add extra annotations
- * to this tree's type. */
- def addAnnotations(tree: Tree, tpe: Type): Type = {
- annotationCheckers.foldLeft(tpe)((tpe, checker) =>
- checker.addAnnotations(tree, tpe))
- }
-
- /** Find out whether any annotation checker can adapt a tree
- * to a given type. Called by Typers.adapt. */
- def canAdaptAnnotations(tree: Tree, mode: Int, pt: Type): Boolean = {
- annotationCheckers.exists(_.canAdaptAnnotations(tree, mode, pt))
- }
-
- /** Let registered annotation checkers adapt a tree
- * to a given type (called by Typers.adapt). Annotation checkers
- * that cannot do the adaption should pass the tree through
- * unchanged. */
- def adaptAnnotations(tree: Tree, mode: Int, pt: Type): Tree = {
- annotationCheckers.foldLeft(tree)((tree, checker) =>
- checker.adaptAnnotations(tree, mode, pt))
- }
-}
diff --git a/src/compiler/scala/tools/nsc/symtab/AnnotationInfos.scala b/src/compiler/scala/tools/nsc/symtab/AnnotationInfos.scala
deleted file mode 100644
index f01de1d..0000000
--- a/src/compiler/scala/tools/nsc/symtab/AnnotationInfos.scala
+++ /dev/null
@@ -1,145 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2007-2011 LAMP/EPFL
- * @author Martin Odersky
- */
-
-package scala.tools.nsc
-package symtab
-
-import scala.tools.nsc.transform.Reifiers
-import util._
-
-/** AnnotationInfo and its helpers */
-trait AnnotationInfos extends reflect.generic.AnnotationInfos { self: SymbolTable =>
-
- /** Arguments to classfile annotations (which are written to
- * bytecode as java annotations) are either:
- *
- * - constants
- * - arrays of constants
- * - or nested classfile annotations
- */
- abstract class ClassfileAnnotArg
-
- /** Represents a compile-time Constant (`Boolean`, `Byte`, `Short`,
- * `Char`, `Int`, `Long`, `Float`, `Double`, `String`, `java.lang.Class` or
- * an instance of a Java enumeration value).
- */
- case class LiteralAnnotArg(const: Constant)
- extends ClassfileAnnotArg {
- override def toString = const.escapedStringValue
- }
-
- object LiteralAnnotArg extends LiteralAnnotArgExtractor
-
- /** Represents an array of classfile annotation arguments */
- case class ArrayAnnotArg(args: Array[ClassfileAnnotArg])
- extends ClassfileAnnotArg {
- override def toString = args.mkString("[", ", ", "]")
- }
-
- object ArrayAnnotArg extends ArrayAnnotArgExtractor
-
- /** A specific annotation argument that encodes an array of bytes as an
- * array of `Long`. The type of the argument declared in the annotation
- * must be `String`. This specialised class is used to encode Scala
- * signatures for reasons of efficiency, both in term of class-file size
- * and in term of compiler performance.
- */
- case class ScalaSigBytes(bytes: Array[Byte]) extends ClassfileAnnotArg {
- override def toString = (bytes map { byte => (byte & 0xff).toHexString }).mkString("[ ", " ", " ]")
- lazy val encodedBytes =
- reflect.generic.ByteCodecs.encode(bytes)
- def isLong: Boolean = (encodedBytes.length > 65535)
- def sigAnnot: Type =
- if (this.isLong)
- definitions.ScalaLongSignatureAnnotation.tpe
- else
- definitions.ScalaSignatureAnnotation.tpe
- }
-
- /** Represents a nested classfile annotation */
- case class NestedAnnotArg(annInfo: AnnotationInfo)
- extends ClassfileAnnotArg {
- // The nested annotation should not have any Scala annotation arguments
- assert(annInfo.args.isEmpty, annInfo.args)
- override def toString = annInfo.toString
- }
-
- object NestedAnnotArg extends NestedAnnotArgExtractor
-
- class AnnotationInfoBase
-
- /** Typed information about an annotation. It can be attached to either
- * a symbol or an annotated type.
- *
- * Annotations are written to the classfile as Java annotations
- * if `atp` conforms to `ClassfileAnnotation` (the classfile parser adds
- * this interface to any Java annotation class).
- *
- * Annotations are pickled (written to scala symtab attribute in the
- * classfile) if `atp` inherits form `StaticAnnotation`.
- *
- * `args` stores arguments to Scala annotations, represented as typed
- * trees. Note that these trees are not transformed by any phases
- * following the type-checker.
- *
- * `assocs` stores arguments to classfile annotations as name-value pairs.
- */
- case class AnnotationInfo(atp: Type, args: List[Tree],
- assocs: List[(Name, ClassfileAnnotArg)])
- extends AnnotationInfoBase {
-
- // Classfile annot: args empty. Scala annot: assocs empty.
- assert(args.isEmpty || assocs.isEmpty)
-
- private var rawpos: Position = NoPosition
- def pos = rawpos
- def setPos(pos: Position): this.type = {
- rawpos = pos
- this
- }
-
- lazy val isTrivial: Boolean = atp.isTrivial && !(args exists (_.exists(_.isInstanceOf[This]))) // see annotationArgRewriter
-
- override def toString: String = atp +
- (if (!args.isEmpty) args.mkString("(", ", ", ")") else "") +
- (if (!assocs.isEmpty) (assocs map { case (x, y) => x+" = "+y } mkString ("(", ", ", ")")) else "")
-
- /** Check whether the type or any of the arguments are erroneous */
- def isErroneous = atp.isErroneous || args.exists(_.isErroneous)
-
- /** Check whether any of the arguments mention a symbol */
- def refsSymbol(sym: Symbol) =
- args.exists(_.exists(_.symbol == sym))
-
- /** Change all ident's with Symbol "from" to instead use symbol "to" */
- def substIdentSyms(from: Symbol, to: Symbol) = {
- val subs = new TreeSymSubstituter(List(from), List(to))
- AnnotationInfo(atp, args.map(subs(_)), assocs).setPos(pos)
- }
-
- // !!! when annotation arguments are not literal strings, but any sort of
- // assembly of strings, there is a fair chance they will turn up here not as
- // Literal(const) but some arbitrary AST.
- def stringArg(index: Int): Option[String] = if(args.size > index) Some(args(index) match {
- case Literal(const) => const.stringValue
- case x => x.toString // should not be necessary, but better than silently ignoring an issue
- }) else None
-
- def intArg(index: Int): Option[Int] = if(args.size > index) Some(args(index)) collect {
- case Literal(Constant(x: Int)) => x
- } else None
- }
-
- object AnnotationInfo extends AnnotationInfoExtractor
-
- lazy val classfileAnnotArgManifest: ClassManifest[ClassfileAnnotArg] =
- reflect.ClassManifest.classType(classOf[ClassfileAnnotArg])
-
- /** Symbol annotations parsed in `Namer` (typeCompleter of
- * definitions) have to be lazy (#1782)
- */
- case class LazyAnnotationInfo(annot: () => AnnotationInfo)
- extends AnnotationInfoBase
-}
diff --git a/src/compiler/scala/tools/nsc/symtab/BaseTypeSeqs.scala b/src/compiler/scala/tools/nsc/symtab/BaseTypeSeqs.scala
deleted file mode 100644
index c3dd5f4..0000000
--- a/src/compiler/scala/tools/nsc/symtab/BaseTypeSeqs.scala
+++ /dev/null
@@ -1,251 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
- * @author Martin Odersky
- */
-package scala.tools.nsc
-package symtab
-
-// todo implement in terms of BitSet
-import scala.collection.mutable.{ListBuffer, BitSet}
-import math.max
-import util.Statistics._
-
-/** A base type sequence (BaseTypeSeq) is an ordered sequence spanning all the base types
- * of a type. It characterized by the following two laws:
- *
- * (1) Each element of `tp.baseTypeSeq' is a basetype of `tp'
- * (2) For each basetype `bt1' of `tp' there is an element `bt' in `tp.baseTypeSeq' such that
- *
- * bt.typeSymbol = bt1.typeSymbol
- * bt <: bt1
- *
- * (3) The type symbols of different elements are different.
- *
- * Elements in the sequence are ordered by Symbol.isLess.
- * @note base type sequences were called closures up to 2.7.1. The name has been changed
- * to avoid confusion with function closures.
- */
-trait BaseTypeSeqs {
- this: SymbolTable =>
- import definitions._
-
- class BaseTypeSeq(parents: List[Type], elems: Array[Type]) {
- self =>
- incCounter(baseTypeSeqCount)
- incCounter(baseTypeSeqLenTotal, elems.length)
-
- /** The number of types in the sequence */
- def length: Int = elems.length
-
- // #3676 shows why we can't store NoType in elems to mark cycles
- // (while NoType is in there to indicate a cycle in this BTS, during the execution of
- // the mergePrefixAndArgs below, the elems get copied without the pending map,
- // so that NoType's are seen instead of the original type --> spurious compile error)
- val pending = new BitSet(length)
-
- /** The type at i'th position in this sequence; lazy types are returned evaluated. */
- def apply(i: Int): Type =
- if(pending contains i) {
- pending.clear()
- throw CyclicInheritance
- } else
- elems(i) match {
- case rtp @ RefinedType(variants, decls) =>
- // can't assert decls.isEmpty; see t0764
- //if (!decls.isEmpty) assert(false, "computing closure of "+this+":"+this.isInstanceOf[RefinedType]+"/"+closureCache(j))
- //Console.println("compute closure of "+this+" => glb("+variants+")")
- pending += i
- try {
- mergePrefixAndArgs(variants, -1, lubDepth(variants)) match {
- case Some(tp0) =>
- pending(i) = false
- elems(i) = tp0
- tp0
- case None =>
- typeError(
- "no common type instance of base types "+(variants mkString ", and ")+" exists.")
- }
- } catch {
- case CyclicInheritance =>
- typeError(
- "computing the common type instance of base types "+(variants mkString ", and ")+" leads to a cycle.")
- }
- case tp =>
- tp
- }
-
- def rawElem(i: Int) = elems(i)
-
- /** The type symbol of the type at i'th position in this sequence;
- * no evaluation needed.
- */
- def typeSymbol(i: Int): Symbol = {
- elems(i) match {
- case RefinedType(v :: vs, _) => v.typeSymbol
- case tp => tp.typeSymbol
- }
- }
-
- /** Return all evaluated types in this sequence as a list */
- def toList: List[Type] = elems.toList
-
- protected def copy(head: Type, offset: Int): BaseTypeSeq = {
- val arr = new Array[Type](elems.length + offset)
- compat.Platform.arraycopy(elems, 0, arr, offset, elems.length)
- arr(0) = head
- new BaseTypeSeq(parents, arr)
- }
-
- /** Compute new base type sequence with `tp' prepended to this sequence */
- def prepend(tp: Type): BaseTypeSeq = copy(tp, 1)
-
- /** Compute new base type sequence with `tp' replacing the head of this sequence */
- def updateHead(tp: Type): BaseTypeSeq = copy(tp, 0)
-
- /** Compute new base type sequence where every element is mapped
- * with function `f'. Lazy types are mapped but not evaluated */
- def map(f: Type => Type): BaseTypeSeq = {
- // inlined `elems map f' for performance
- val len = length
- var arr = new Array[Type](len)
- var i = 0
- while (i < len) {
- arr(i) = f(elems(i))
- i += 1
- }
- new BaseTypeSeq(parents, arr)
- }
-
- def lateMap(f: Type => Type): BaseTypeSeq = new BaseTypeSeq(parents map f, elems) {
- override def apply(i: Int) = f(self.apply(i))
- override def rawElem(i: Int) = f(self.rawElem(i))
- override def typeSymbol(i: Int) = self.typeSymbol(i)
- override def toList = self.toList map f
- override protected def copy(head: Type, offset: Int) = (self map f).copy(head, offset)
- override def map(g: Type => Type) = lateMap(g)
- override def lateMap(g: Type => Type) = self.lateMap(x => g(f(x)))
- override def exists(p: Type => Boolean) = elems exists (x => p(f(x)))
- override protected def maxDepthOfElems: Int = elems map (x => maxDpth(f(x))) max
- override def toString = elems.mkString("MBTS(", ",", ")")
- }
-
- def exists(p: Type => Boolean): Boolean = elems exists p
-
- lazy val maxDepth: Int = maxDepthOfElems
-
- protected def maxDepthOfElems = {
- var d = 0
- for (i <- 0 until length) d = max(d, maxDpth(elems(i)))
- d
- }
-
- /** The maximum depth of type `tp' */
- protected def maxDpth(tp: Type): Int = tp match {
- case TypeRef(pre, sym, args) =>
- max(maxDpth(pre), maxDpth(args) + 1)
- case RefinedType(parents, decls) =>
- max(maxDpth(parents), maxDpth(decls.toList.map(_.info)) + 1)
- case TypeBounds(lo, hi) =>
- max(maxDpth(lo), maxDpth(hi))
- case MethodType(paramtypes, result) =>
- maxDpth(result)
- case NullaryMethodType(result) =>
- maxDpth(result)
- case PolyType(tparams, result) =>
- max(maxDpth(result), maxDpth(tparams map (_.info)) + 1)
- case ExistentialType(tparams, result) =>
- max(maxDpth(result), maxDpth(tparams map (_.info)) + 1)
- case _ =>
- 1
- }
-
- /** The maximum depth of all types `tps' */
- private def maxDpth(tps: Seq[Type]): Int = {
- var d = 0
- for (tp <- tps) d = max(d, maxDpth(tp))
- d
- }
-
- override def toString = elems.mkString("BTS(", ",", ")")
-
- private def typeError(msg: String): Nothing =
- throw new TypeError(
- "the type intersection "+(parents mkString " with ")+" is malformed"+
- "\n --- because ---\n"+msg)
- }
-
- /** A merker object for a base type sequence that's no yet computed.
- * used to catch inheritance cycles
- */
- val undetBaseTypeSeq: BaseTypeSeq = new BaseTypeSeq(List(), Array())
-
- /** Create a base type sequence consisting of a single type */
- def baseTypeSingletonSeq(tp: Type): BaseTypeSeq = new BaseTypeSeq(List(), Array(tp))
-
- /** Create the base type sequence of a compound type wuth given tp.parents */
- def compoundBaseTypeSeq(tp: Type): BaseTypeSeq = {
- val tsym = tp.typeSymbol
- val parents = tp.parents
-// Console.println("computing baseTypeSeq of " + tsym.tpe + " " + parents)//DEBUG
- val buf = new ListBuffer[Type]
- buf += tsym.tpe
- var btsSize = 1
- if (parents.nonEmpty) {
- val nparents = parents.length
- val pbtss = new Array[BaseTypeSeq](nparents)
- val index = new Array[Int](nparents)
- var i = 0
- for (p <- parents) {
- pbtss(i) =
- if (p.baseTypeSeq eq undetBaseTypeSeq) AnyClass.info.baseTypeSeq
- else p.baseTypeSeq
- index(i) = 0
- i += 1
- }
- def nextTypeSymbol(i: Int): Symbol = {
- val j = index(i)
- val pbts = pbtss(i)
- if (j < pbts.length) pbts.typeSymbol(j) else AnyClass
- }
- def nextRawElem(i: Int): Type = {
- val j = index(i)
- val pbts = pbtss(i)
- if (j < pbts.length) pbts.rawElem(j) else AnyClass.tpe
- }
- var minSym: Symbol = NoSymbol
- while (minSym != AnyClass) {
- minSym = nextTypeSymbol(0)
- i = 1
- while (i < nparents) {
- val nextSym = nextTypeSymbol(i)
- if (nextSym isLess minSym)
- minSym = nextSym
- i += 1
- }
- var minTypes: List[Type] = List()
- i = 0
- while (i < nparents) {
- if (nextTypeSymbol(i) == minSym) {
- nextRawElem(i) match {
- case RefinedType(variants, decls) =>
- for (tp <- variants)
- if (!(minTypes exists (tp =:=))) minTypes = tp :: minTypes
- case tp =>
- if (!(minTypes exists (tp =:=))) minTypes = tp :: minTypes
- }
- index(i) = index(i) + 1
- }
- i += 1
- }
- buf += intersectionType(minTypes)
- btsSize += 1
- }
- }
- val elems = new Array[Type](btsSize)
- buf.copyToArray(elems, 0)
-// Console.println("computed baseTypeSeq of " + tsym.tpe + " " + parents + ": "+elems.toString)//DEBUG
- new BaseTypeSeq(parents, elems)
- }
-
- val CyclicInheritance = new Throwable
-}
diff --git a/src/compiler/scala/tools/nsc/symtab/BrowsingLoaders.scala b/src/compiler/scala/tools/nsc/symtab/BrowsingLoaders.scala
index 9c21bac..c7bd678 100644
--- a/src/compiler/scala/tools/nsc/symtab/BrowsingLoaders.scala
+++ b/src/compiler/scala/tools/nsc/symtab/BrowsingLoaders.scala
@@ -1,13 +1,13 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
+ * Copyright 2005-2013 LAMP/EPFL
* @author Martin Odersky
*/
package scala.tools.nsc
package symtab
+import scala.reflect.internal.util.BatchSourceFile
import scala.tools.nsc.io.AbstractFile
-import scala.tools.nsc.util.BatchSourceFile
/** A subclass of SymbolLoaders that implements browsing behavior.
* This class should be used whenever file dependencies and recompile sets
@@ -64,15 +64,25 @@ abstract class BrowsingLoaders extends SymbolLoaders {
addPackagePrefix(pre)
packagePrefix += ("." + name)
case Ident(name) =>
- if (packagePrefix.length != 0) packagePrefix += "."
- packagePrefix += name
+ if (name != nme.EMPTY_PACKAGE_NAME) { // mirrors logic in Namers, see createPackageSymbol
+ if (packagePrefix.length != 0) packagePrefix += "."
+ packagePrefix += name
+ }
case _ =>
throw new MalformedInput(pkg.pos.point, "illegal tree node in package prefix: "+pkg)
}
+
+ private def inPackagePrefix(pkg: Tree)(op: => Unit): Unit = {
+ val oldPrefix = packagePrefix
+ addPackagePrefix(pkg)
+ op
+ packagePrefix = oldPrefix
+ }
+
override def traverse(tree: Tree): Unit = tree match {
case PackageDef(pkg, body) =>
- addPackagePrefix(pkg)
- body foreach traverse
+ inPackagePrefix(pkg) { body foreach traverse }
+
case ClassDef(_, name, _, _) =>
if (packagePrefix == root.fullName) {
enterClass(root, name.toString, new SourcefileLoader(src))
@@ -84,7 +94,7 @@ abstract class BrowsingLoaders extends SymbolLoaders {
entered += 1
if (name == nme.PACKAGEkw) {
println("open package module: "+module)
- loaders.openPackageModule(module)()
+ openPackageModule(module, root)
}
} else println("prefixes differ: "+packagePrefix+","+root.fullName)
case _ =>
@@ -105,7 +115,7 @@ abstract class BrowsingLoaders extends SymbolLoaders {
*/
override def enterToplevelsFromSource(root: Symbol, name: String, src: AbstractFile) {
try {
- if (root == definitions.RootClass || root == definitions.EmptyPackageClass)
+ if (root.isEffectiveRoot || !src.name.endsWith(".scala")) // RootClass or EmptyPackageClass
super.enterToplevelsFromSource(root, name, src)
else
browseTopLevel(root, src)
diff --git a/src/compiler/scala/tools/nsc/symtab/Definitions.scala b/src/compiler/scala/tools/nsc/symtab/Definitions.scala
deleted file mode 100644
index 264df8b..0000000
--- a/src/compiler/scala/tools/nsc/symtab/Definitions.scala
+++ /dev/null
@@ -1,908 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
- * @author Martin Odersky
- */
-
-package scala.tools.nsc
-package symtab
-
-import scala.collection.{ mutable, immutable }
-import scala.collection.mutable.{ HashMap }
-import scala.tools.nsc.util.NoPosition
-import Flags._
-import PartialFunction._
-import classfile.ClassfileConstants
-
-trait Definitions extends reflect.generic.StandardDefinitions {
- self: SymbolTable =>
-
- // the scala value classes
- trait ValueClassDefinitions {
- self: definitions.type =>
-
- private[Definitions] def valueCache(name: Name) = {
- if (name.isTypeName) ScalaPackageClass.info member name
- else ScalaPackageClass.info member name suchThat (_ hasFlag MODULE)
- }
- private[Definitions] def valueModuleMethod(className: Name, methodName: Name): Symbol = {
- valueCache(className.toTermName).moduleClass.tpe member methodName
- }
-
- import ClassfileConstants._
-
- private val nameToWeight = Map[Name, Int](
- tpnme.Byte -> 2,
- tpnme.Char -> 3,
- tpnme.Short -> 4,
- tpnme.Int -> 12,
- tpnme.Long -> 24,
- tpnme.Float -> 48,
- tpnme.Double -> 96
- )
-
- private val nameToTag = Map[Name, Char](
- tpnme.Byte -> BYTE_TAG,
- tpnme.Char -> CHAR_TAG,
- tpnme.Short -> SHORT_TAG,
- tpnme.Int -> INT_TAG,
- tpnme.Long -> LONG_TAG,
- tpnme.Float -> FLOAT_TAG,
- tpnme.Double -> DOUBLE_TAG,
- tpnme.Boolean -> BOOL_TAG,
- tpnme.Unit -> VOID_TAG,
- tpnme.Object -> TVAR_TAG
- )
-
- private def classesMap[T](f: Name => T) = symbolsMap(ScalaValueClassesNoUnit, f)
- private def symbolsMap[T](syms: List[Symbol], f: Name => T): Map[Symbol, T] = syms zip (syms map (x => f(x.name))) toMap
- private def symbolsMapFilt[T](syms: List[Symbol], p: Name => Boolean, f: Name => T) = symbolsMap(syms filter (x => p(x.name)), f)
-
- private def boxedName(name: Name) = sn.Boxed(name.toTypeName)
-
- lazy val abbrvTag = symbolsMap(ObjectClass :: ScalaValueClasses, nameToTag)
- lazy val numericWeight = symbolsMapFilt(ScalaValueClasses, nameToWeight.keySet, nameToWeight)
- lazy val boxedModule = classesMap(x => getModule(boxedName(x)))
- lazy val boxedClass = classesMap(x => getClass(boxedName(x)))
- lazy val refClass = classesMap(x => getClass("scala.runtime." + x + "Ref"))
- lazy val volatileRefClass = classesMap(x => getClass("scala.runtime.Volatile" + x + "Ref"))
- lazy val boxMethod = classesMap(x => valueModuleMethod(x, nme.box))
- lazy val unboxMethod = classesMap(x => valueModuleMethod(x, nme.unbox))
-
- private def newClass(owner: Symbol, name: TypeName, parents: List[Type]): Symbol = {
- val clazz = owner.newClass(NoPosition, name)
- clazz.setInfo(ClassInfoType(parents, new Scope, clazz))
- owner.info.decls.enter(clazz)
- clazz
- }
-
- def isNumericSubClass(sub: Symbol, sup: Symbol) = {
- val cmp = for (w1 <- numericWeight get sub ; w2 <- numericWeight get sup) yield w2 % w1
- cmp exists (_ == 0)
- }
-
- /** Is symbol a numeric value class? */
- def isNumericValueClass(sym: Symbol): Boolean =
- numericWeight contains sym
-
- private[Definitions] def fullNameStrings: List[String] = nme.ScalaValueNames map ("scala." + _)
- private[Definitions] lazy val fullValueName: Set[Name] = {
- val values = nme.ScalaValueNames flatMap (x => List(newTypeName("scala." + x), newTermName("scala." + x)))
- values.toSet + newTypeName("scala.AnyVal")
- }
-
- lazy val AnyValClass = valueCache(tpnme.AnyVal)
- lazy val UnitClass = valueCache(tpnme.Unit)
- lazy val ByteClass = valueCache(tpnme.Byte)
- lazy val ShortClass = valueCache(tpnme.Short)
- lazy val CharClass = valueCache(tpnme.Char)
- lazy val IntClass = valueCache(tpnme.Int)
- lazy val LongClass = valueCache(tpnme.Long)
- lazy val FloatClass = valueCache(tpnme.Float)
- lazy val DoubleClass = valueCache(tpnme.Double)
- lazy val BooleanClass = valueCache(tpnme.Boolean)
- def Boolean_and = getMember(BooleanClass, nme.ZAND)
- def Boolean_or = getMember(BooleanClass, nme.ZOR)
-
- def ScalaValueClassesNoUnit = ScalaValueClasses filterNot (_ eq UnitClass)
- def ScalaValueClasses: List[Symbol] = List(
- UnitClass,
- BooleanClass,
- ByteClass,
- ShortClass,
- CharClass,
- IntClass,
- LongClass,
- FloatClass,
- DoubleClass
- )
- }
-
- object definitions extends AbsDefinitions with ValueClassDefinitions {
- private var isInitialized = false
- def isDefinitionsInitialized = isInitialized
-
- // symbols related to packages
- var emptypackagescope: Scope = null //debug
-
- // This is the package _root_. The actual root cannot be referenced at
- // the source level, but _root_ is essentially a function () => <root>.
- lazy val RootPackage: Symbol = {
- val rp = NoSymbol.newValue(NoPosition, nme.ROOTPKG)
- .setFlag(FINAL | MODULE | PACKAGE | JAVA)
- .setInfo(NullaryMethodType(RootClass.tpe))
- RootClass.sourceModule = rp
- rp
- }
- // This is the actual root of everything, including the package _root_.
- lazy val RootClass: ModuleClassSymbol = NoSymbol.newModuleClass(NoPosition, tpnme.ROOT)
- .setFlag(FINAL | MODULE | PACKAGE | JAVA).setInfo(rootLoader)
-
- // The empty package, which holds all top level types without given packages.
- lazy val EmptyPackage = RootClass.newPackage(NoPosition, nme.EMPTY_PACKAGE_NAME).setFlag(FINAL)
- lazy val EmptyPackageClass = EmptyPackage.moduleClass
-
- lazy val JavaLangPackage = getModule(sn.JavaLang)
- lazy val ScalaPackage = getModule("scala")
- lazy val ScalaPackageClass = ScalaPackage.tpe.typeSymbol
-
- lazy val RuntimePackage = getModule("scala.runtime")
- lazy val RuntimePackageClass = RuntimePackage.tpe.typeSymbol
-
- // convenient one-argument parameter lists
- lazy val anyparam = List(AnyClass.typeConstructor)
- lazy val anyvalparam = List(AnyValClass.typeConstructor)
- lazy val anyrefparam = List(AnyRefClass.typeConstructor)
-
- // private parameter conveniences
- private def booltype = BooleanClass.typeConstructor
- private def boolparam = List(booltype)
- private def bytetype = ByteClass.typeConstructor
- private def byteparam = List(bytetype)
- private def shorttype = ShortClass.typeConstructor
- private def shortparam = List(shorttype)
- private def inttype = IntClass.typeConstructor
- private def intparam = List(inttype)
- private def longtype = LongClass.typeConstructor
- private def longparam = List(longtype)
- private def floattype = FloatClass.typeConstructor
- private def floatparam = List(floattype)
- private def doubletype = DoubleClass.typeConstructor
- private def doubleparam = List(doubletype)
- private def chartype = CharClass.typeConstructor
- private def charparam = List(chartype)
- private def stringtype = StringClass.typeConstructor
-
- // top types
- lazy val AnyClass = newClass(ScalaPackageClass, tpnme.Any, Nil) setFlag (ABSTRACT)
- lazy val AnyRefClass = newAlias(ScalaPackageClass, tpnme.AnyRef, ObjectClass.typeConstructor)
- lazy val ObjectClass = getClass(sn.Object)
- lazy val AnyCompanionClass = getClass("scala.AnyCompanion") setFlag (SEALED | ABSTRACT | TRAIT)
- lazy val AnyValCompanionClass = getClass("scala.AnyValCompanion") setFlag (SEALED | ABSTRACT | TRAIT)
-
- // bottom types
- lazy val NullClass = newClass(ScalaPackageClass, tpnme.Null, anyrefparam) setFlag (ABSTRACT | TRAIT | FINAL)
- lazy val NothingClass = newClass(ScalaPackageClass, tpnme.Nothing, anyparam) setFlag (ABSTRACT | TRAIT | FINAL)
- lazy val RuntimeNothingClass = getClass(ClassfileConstants.SCALA_NOTHING)
- lazy val RuntimeNullClass = getClass(ClassfileConstants.SCALA_NULL)
-
- // exceptions and other throwables
- lazy val ClassCastExceptionClass = getClass("java.lang.ClassCastException")
- lazy val IndexOutOfBoundsExceptionClass = getClass(sn.IOOBException)
- lazy val InvocationTargetExceptionClass = getClass(sn.InvTargetException)
- lazy val MatchErrorClass = getClass("scala.MatchError")
- lazy val NonLocalReturnControlClass = getClass("scala.runtime.NonLocalReturnControl")
- lazy val NullPointerExceptionClass = getClass(sn.NPException)
- lazy val ThrowableClass = getClass(sn.Throwable)
- lazy val UninitializedErrorClass = getClass("scala.UninitializedFieldError")
-
- // annotations
- lazy val AnnotationClass = getClass("scala.annotation.Annotation")
- lazy val ClassfileAnnotationClass = getClass("scala.annotation.ClassfileAnnotation")
- lazy val StaticAnnotationClass = getClass("scala.annotation.StaticAnnotation")
- lazy val uncheckedStableClass = getClass("scala.annotation.unchecked.uncheckedStable")
- lazy val uncheckedVarianceClass = getClass("scala.annotation.unchecked.uncheckedVariance")
- lazy val UncheckedClass = getClass("scala.unchecked")
- lazy val ThrowsClass = getClass("scala.throws")
- lazy val TailrecClass = getClass("scala.annotation.tailrec")
- lazy val SwitchClass = getClass("scala.annotation.switch")
- lazy val ElidableMethodClass = getClass("scala.annotation.elidable")
- lazy val ImplicitNotFoundClass = getClass("scala.annotation.implicitNotFound")
- lazy val VarargsClass = getClass("scala.annotation.varargs")
- lazy val FieldTargetClass = getClass("scala.annotation.target.field")
- lazy val GetterTargetClass = getClass("scala.annotation.target.getter")
- lazy val SetterTargetClass = getClass("scala.annotation.target.setter")
- lazy val BeanGetterTargetClass = getClass("scala.annotation.target.beanGetter")
- lazy val BeanSetterTargetClass = getClass("scala.annotation.target.beanSetter")
- lazy val ParamTargetClass = getClass("scala.annotation.target.param")
- lazy val ScalaInlineClass = getClass("scala.inline")
- lazy val ScalaNoInlineClass = getClass("scala.noinline")
- lazy val SpecializedClass = getClass("scala.specialized")
- lazy val BridgeClass = getClass("scala.annotation.bridge")
-
- // fundamental reference classes
- lazy val ScalaObjectClass = getClass("scala.ScalaObject")
- lazy val PartialFunctionClass = getClass("scala.PartialFunction")
- lazy val SymbolClass = getClass("scala.Symbol")
- lazy val StringClass = getClass(sn.String)
- lazy val StringModule = StringClass.linkedClassOfClass
- lazy val ClassClass = getClass(sn.Class)
- def Class_getMethod = getMember(ClassClass, nme.getMethod_)
- lazy val DynamicClass = getClass("scala.Dynamic")
-
- // fundamental modules
- lazy val PredefModule: Symbol = getModule("scala.Predef")
- lazy val PredefModuleClass = PredefModule.tpe.typeSymbol
- def Predef_AnyRef = getMember(PredefModule, "AnyRef") // used by the specialization annotation
- def Predef_classOf = getMember(PredefModule, nme.classOf)
- def Predef_error = getMember(PredefModule, nme.error)
- def Predef_identity = getMember(PredefModule, nme.identity)
- def Predef_conforms = getMember(PredefModule, nme.conforms)
- def Predef_wrapRefArray = getMember(PredefModule, nme.wrapRefArray)
- lazy val ConsoleModule: Symbol = getModule("scala.Console")
- lazy val ScalaRunTimeModule: Symbol = getModule("scala.runtime.ScalaRunTime")
- lazy val SymbolModule: Symbol = getModule("scala.Symbol")
- lazy val Symbol_apply = getMember(SymbolModule, nme.apply)
- def SeqFactory = getMember(ScalaRunTimeModule, nme.Seq)
- def arrayApplyMethod = getMember(ScalaRunTimeModule, "array_apply")
- def arrayUpdateMethod = getMember(ScalaRunTimeModule, "array_update")
- def arrayLengthMethod = getMember(ScalaRunTimeModule, "array_length")
- def arrayCloneMethod = getMember(ScalaRunTimeModule, "array_clone")
- def ensureAccessibleMethod = getMember(ScalaRunTimeModule, "ensureAccessible")
- def scalaRuntimeHash = getMember(ScalaRunTimeModule, "hash")
- def scalaRuntimeAnyValClass = getMember(ScalaRunTimeModule, "anyValClass")
- def scalaRuntimeSameElements = getMember(ScalaRunTimeModule, nme.sameElements)
-
- // classes with special meanings
- lazy val NotNullClass = getClass("scala.NotNull")
- lazy val DelayedInitClass = getClass("scala.DelayedInit")
- def delayedInitMethod = getMember(DelayedInitClass, nme.delayedInit)
- // a dummy value that communicates that a delayedInit call is compiler-generated
- // from phase UnCurry to phase Constructors
- def delayedInitArgVal = EmptyPackageClass.newValue(NoPosition, nme.delayedInitArg)
- .setInfo(UnitClass.tpe)
-
- lazy val TypeConstraintClass = getClass("scala.annotation.TypeConstraint")
- lazy val SingletonClass = newClass(ScalaPackageClass, tpnme.Singleton, anyparam) setFlag (ABSTRACT | TRAIT | FINAL)
- lazy val SerializableClass = getClass("scala.Serializable")
- lazy val JavaSerializableClass = getClass(sn.JavaSerializable)
- lazy val ComparableClass = getClass("java.lang.Comparable")
-
- lazy val RepeatedParamClass = newCovariantPolyClass(
- ScalaPackageClass,
- tpnme.REPEATED_PARAM_CLASS_NAME,
- tparam => seqType(tparam.typeConstructor)
- )
-
- lazy val JavaRepeatedParamClass = newCovariantPolyClass(
- ScalaPackageClass,
- tpnme.JAVA_REPEATED_PARAM_CLASS_NAME,
- tparam => arrayType(tparam.typeConstructor)
- )
-
- def isByNameParamType(tp: Type) = tp.typeSymbol == ByNameParamClass
- def isScalaRepeatedParamType(tp: Type) = tp.typeSymbol == RepeatedParamClass
- def isJavaRepeatedParamType(tp: Type) = tp.typeSymbol == JavaRepeatedParamClass
- def isRepeatedParamType(tp: Type) = isScalaRepeatedParamType(tp) || isJavaRepeatedParamType(tp)
-
- def isScalaVarArgs(params: List[Symbol]) = params.nonEmpty && isScalaRepeatedParamType(params.last.tpe)
- def isVarArgsList(params: List[Symbol]) = params.nonEmpty && isRepeatedParamType(params.last.tpe)
- def isVarArgTypes(formals: List[Type]) = formals.nonEmpty && isRepeatedParamType(formals.last)
-
- def isPrimitiveArray(tp: Type) = tp match {
- case TypeRef(_, ArrayClass, arg :: Nil) => isValueClass(arg.typeSymbol)
- case _ => false
- }
- def isArrayOfSymbol(tp: Type, elem: Symbol) = tp match {
- case TypeRef(_, ArrayClass, arg :: Nil) => arg.typeSymbol == elem
- case _ => false
- }
-
- lazy val ByNameParamClass = newCovariantPolyClass(
- ScalaPackageClass,
- tpnme.BYNAME_PARAM_CLASS_NAME,
- tparam => AnyClass.typeConstructor
- )
- lazy val EqualsPatternClass = {
- val clazz = newClass(ScalaPackageClass, tpnme.EQUALS_PATTERN_NAME, Nil)
- clazz setInfo polyType(List(newTypeParam(clazz, 0)), ClassInfoType(anyparam, new Scope, clazz))
- }
-
- // collections classes
- lazy val ConsClass = getClass("scala.collection.immutable.$colon$colon")
- lazy val IterableClass = getClass("scala.collection.Iterable")
- lazy val IteratorClass = getClass("scala.collection.Iterator")
- lazy val ListClass = getClass("scala.collection.immutable.List")
- lazy val SeqClass = getClass("scala.collection.Seq")
- lazy val StringBuilderClass = getClass("scala.collection.mutable.StringBuilder")
- lazy val TraversableClass = getClass("scala.collection.Traversable")
-
- lazy val ListModule = getModule("scala.collection.immutable.List")
- lazy val List_apply = getMember(ListModule, nme.apply)
- lazy val NilModule = getModule("scala.collection.immutable.Nil")
- lazy val SeqModule = getModule("scala.collection.Seq")
-
- // arrays and their members
- lazy val ArrayModule = getModule("scala.Array")
- def ArrayModule_overloadedApply = getMember(ArrayModule, nme.apply)
- lazy val ArrayClass = getClass("scala.Array")
- def Array_apply = getMember(ArrayClass, nme.apply)
- def Array_update = getMember(ArrayClass, nme.update)
- def Array_length = getMember(ArrayClass, nme.length)
- lazy val Array_clone = getMember(ArrayClass, nme.clone_)
-
- // reflection / structural types
- lazy val SoftReferenceClass = getClass("java.lang.ref.SoftReference")
- lazy val WeakReferenceClass = getClass("java.lang.ref.WeakReference")
- lazy val MethodClass = getClass(sn.MethodAsObject)
- def methodClass_setAccessible = getMember(MethodClass, nme.setAccessible)
- lazy val EmptyMethodCacheClass = getClass("scala.runtime.EmptyMethodCache")
- lazy val MethodCacheClass = getClass("scala.runtime.MethodCache")
- def methodCache_find = getMember(MethodCacheClass, nme.find_)
- def methodCache_add = getMember(MethodCacheClass, nme.add_)
-
- // scala.reflect
- lazy val PartialManifestClass = getClass("scala.reflect.ClassManifest")
- lazy val PartialManifestModule = getModule("scala.reflect.ClassManifest")
- lazy val FullManifestClass = getClass("scala.reflect.Manifest")
- lazy val FullManifestModule = getModule("scala.reflect.Manifest")
- lazy val OptManifestClass = getClass("scala.reflect.OptManifest")
- lazy val NoManifest = getModule("scala.reflect.NoManifest")
- lazy val CodeClass = getClass(sn.Code)
- lazy val CodeModule = getModule(sn.Code)
- def Code_lift = getMember(CodeModule, nme.lift_)
-
- lazy val ScalaSignatureAnnotation = getClass("scala.reflect.ScalaSignature")
- lazy val ScalaLongSignatureAnnotation = getClass("scala.reflect.ScalaLongSignature")
-
- // invoke dynamic support
- lazy val LinkageModule = getModule("java.dyn.Linkage")
- lazy val Linkage_invalidateCallerClass = getMember(LinkageModule, "invalidateCallerClass")
- lazy val DynamicDispatchClass = getModule("scala.runtime.DynamicDispatch")
- lazy val DynamicDispatch_DontSetTarget = getMember(DynamicDispatchClass, "DontSetTarget")
-
- // Option classes
- lazy val OptionClass: Symbol = getClass("scala.Option")
- lazy val SomeClass: Symbol = getClass("scala.Some")
- lazy val NoneModule: Symbol = getModule("scala.None")
-
- def isOptionType(tp: Type) = cond(tp.normalize) { case TypeRef(_, OptionClass, List(_)) => true }
- def isSomeType(tp: Type) = cond(tp.normalize) { case TypeRef(_, SomeClass, List(_)) => true }
- def isNoneType(tp: Type) = cond(tp.normalize) { case TypeRef(_, NoneModule, List(_)) => true }
-
- def optionType(tp: Type) = typeRef(OptionClass.typeConstructor.prefix, OptionClass, List(tp))
- def someType(tp: Type) = typeRef(SomeClass.typeConstructor.prefix, SomeClass, List(tp))
- def symbolType = typeRef(SymbolClass.typeConstructor.prefix, SymbolClass, List())
- def longType = typeRef(LongClass.typeConstructor.prefix, LongClass, List())
-
- // Product, Tuple, Function
- private def mkArityArray(name: String, arity: Int, countFrom: Int = 1): Array[Symbol] = {
- val list = countFrom to arity map (i => getClass("scala." + name + i))
- if (countFrom == 0) list.toArray
- else (NoSymbol +: list).toArray
- }
-
- val MaxTupleArity, MaxProductArity, MaxFunctionArity = 22
- /** The maximal dimensions of a generic array creation.
- * I.e. new Array[Array[Array[Array[Array[T]]]]] creates a 5 times
- * nested array. More is not allowed.
- */
- val MaxArrayDims = 5
- lazy val TupleClass = mkArityArray("Tuple", MaxTupleArity)
- lazy val ProductClass = mkArityArray("Product", MaxProductArity)
- lazy val FunctionClass = mkArityArray("Function", MaxFunctionArity, 0)
- lazy val AbstractFunctionClass = mkArityArray("runtime.AbstractFunction", MaxFunctionArity, 0)
-
- def tupleField(n: Int, j: Int) = getMember(TupleClass(n), "_" + j)
- def isTupleType(tp: Type): Boolean = isTupleType(tp, false)
- def isTupleTypeOrSubtype(tp: Type): Boolean = isTupleType(tp, true)
- private def isTupleType(tp: Type, subtypeOK: Boolean) = tp.normalize match {
- case TypeRef(_, sym, args) if args.nonEmpty =>
- val len = args.length
- len <= MaxTupleArity && {
- val tsym = TupleClass(len)
- (sym == tsym) || (subtypeOK && !tp.isHigherKinded && sym.isSubClass(tsym))
- }
- case _ => false
- }
-
- def tupleType(elems: List[Type]) = {
- val len = elems.length
- if (len <= MaxTupleArity) {
- val sym = TupleClass(len)
- typeRef(sym.typeConstructor.prefix, sym, elems)
- } else NoType
- }
-
- lazy val ProductRootClass: Symbol = getClass("scala.Product")
- def Product_productArity = getMember(ProductRootClass, nme.productArity)
- def Product_productElement = getMember(ProductRootClass, nme.productElement)
- // def Product_productElementName = getMember(ProductRootClass, nme.productElementName)
- def Product_productPrefix = getMember(ProductRootClass, nme.productPrefix)
- def Product_canEqual = getMember(ProductRootClass, nme.canEqual_)
-
- def productProj(z:Symbol, j: Int): Symbol = getMember(z, nme.productAccessorName(j))
- def productProj(n: Int, j: Int): Symbol = productProj(ProductClass(n), j)
-
- /** returns true if this type is exactly ProductN[T1,...,Tn], not some subclass */
- def isExactProductType(tp: Type): Boolean = cond(tp.normalize) {
- case TypeRef(_, sym, elems) =>
- val len = elems.length
- len <= MaxProductArity && sym == ProductClass(len)
- }
-
- def productType(elems: List[Type]) = {
- if (elems.isEmpty) UnitClass.tpe
- else {
- val len = elems.length
- if (len <= MaxProductArity) {
- val sym = ProductClass(len)
- typeRef(sym.typeConstructor.prefix, sym, elems)
- }
- else NoType
- }
- }
-
- /** if tpe <: ProductN[T1,...,TN], returns Some(T1,...,TN) else None */
- def getProductArgs(tpe: Type): Option[List[Type]] =
- tpe.baseClasses collectFirst { case x if isExactProductType(x.tpe) => tpe.baseType(x).typeArgs }
-
- def unapplyUnwrap(tpe:Type) = (tpe match {
- case PolyType(_,MethodType(_, res)) => res
- case MethodType(_, res) => res
- case tpe => tpe
- }).normalize
-
- def functionApply(n: Int) = getMember(FunctionClass(n), nme.apply)
- def functionType(formals: List[Type], restpe: Type) = {
- val len = formals.length
- if (len <= MaxFunctionArity) {
- val sym = FunctionClass(len)
- typeRef(sym.typeConstructor.prefix, sym, formals :+ restpe)
- } else NoType
- }
-
- def abstractFunctionForFunctionType(tp: Type) = tp.normalize match {
- case tr @ TypeRef(_, _, args) if isFunctionType(tr) =>
- val sym = AbstractFunctionClass(args.length - 1)
- typeRef(sym.typeConstructor.prefix, sym, args)
- case _ =>
- NoType
- }
-
- def isFunctionType(tp: Type): Boolean = tp.normalize match {
- case TypeRef(_, sym, args) if args.nonEmpty =>
- val arity = args.length - 1 // -1 is the return type
- arity <= MaxFunctionArity && sym == FunctionClass(arity)
- case _ =>
- false
- }
-
- def isSeqType(tp: Type) = elementType(SeqClass, tp.normalize) != NoType
-
- def elementType(container: Symbol, tp: Type): Type = tp match {
- case TypeRef(_, `container`, arg :: Nil) => arg
- case _ => NoType
- }
-
- def seqType(arg: Type) = typeRef(SeqClass.typeConstructor.prefix, SeqClass, List(arg))
- def arrayType(arg: Type) = typeRef(ArrayClass.typeConstructor.prefix, ArrayClass, List(arg))
- def byNameType(arg: Type) = appliedType(ByNameParamClass.typeConstructor, List(arg))
-
- def ClassType(arg: Type) =
- if (phase.erasedTypes || forMSIL) ClassClass.tpe
- else appliedType(ClassClass.tpe, List(arg))
-
- //
- // .NET backend
- //
-
- lazy val ComparatorClass = getClass("scala.runtime.Comparator")
- // System.ValueType
- lazy val ValueTypeClass: Symbol = getClass(sn.ValueType)
- // System.MulticastDelegate
- lazy val DelegateClass: Symbol = getClass(sn.Delegate)
- var Delegate_scalaCallers: List[Symbol] = List()
- // Symbol -> (Symbol, Type): scalaCaller -> (scalaMethodSym, DelegateType)
- // var Delegate_scalaCallerInfos: HashMap[Symbol, (Symbol, Type)] = _
- lazy val Delegate_scalaCallerTargets: HashMap[Symbol, Symbol] = new HashMap()
-
- def isCorrespondingDelegate(delegateType: Type, functionType: Type): Boolean = {
- isSubType(delegateType, DelegateClass.tpe) &&
- (delegateType.member(nme.apply).tpe match {
- case MethodType(delegateParams, delegateReturn) =>
- isFunctionType(functionType) &&
- (functionType.normalize match {
- case TypeRef(_, _, args) =>
- (delegateParams.map(pt => {
- if (pt.tpe == AnyClass.tpe) definitions.ObjectClass.tpe else pt})
- ::: List(delegateReturn)) == args
- case _ => false
- })
- case _ => false
- })
- }
-
- // members of class scala.Any
- var Any_== : Symbol = _
- var Any_!= : Symbol = _
- var Any_equals : Symbol = _
- var Any_hashCode : Symbol = _
- var Any_toString : Symbol = _
- var Any_getClass : Symbol = _
- var Any_isInstanceOf: Symbol = _
- var Any_asInstanceOf: Symbol = _
- var Any_## : Symbol = _
-
- // members of class java.lang.{Object, String}
- var Object_eq : Symbol = _
- var Object_ne : Symbol = _
- var Object_== : Symbol = _
- var Object_!= : Symbol = _
- var Object_## : Symbol = _
- var Object_synchronized: Symbol = _
- lazy val Object_isInstanceOf = newPolyMethod(
- ObjectClass, "$isInstanceOf",
- tparam => MethodType(List(), booltype)) setFlag (FINAL | SYNTHETIC)
- lazy val Object_asInstanceOf = newPolyMethod(
- ObjectClass, "$asInstanceOf",
- tparam => MethodType(List(), tparam.typeConstructor)) setFlag (FINAL | SYNTHETIC)
-
- def Object_getClass = getMember(ObjectClass, nme.getClass_)
- def Object_clone = getMember(ObjectClass, nme.clone_)
- def Object_finalize = getMember(ObjectClass, nme.finalize_)
- def Object_notify = getMember(ObjectClass, nme.notify_)
- def Object_notifyAll = getMember(ObjectClass, nme.notifyAll_)
- def Object_equals = getMember(ObjectClass, nme.equals_)
- def Object_hashCode = getMember(ObjectClass, nme.hashCode_)
- def Object_toString = getMember(ObjectClass, nme.toString_)
-
- var String_+ : Symbol = _
-
- // boxed classes
- lazy val ObjectRefClass = getClass("scala.runtime.ObjectRef")
- lazy val VolatileObjectRefClass = getClass("scala.runtime.VolatileObjectRef")
- lazy val BoxesRunTimeClass = getModule("scala.runtime.BoxesRunTime")
- lazy val BoxedNumberClass = getClass(sn.BoxedNumber)
- lazy val BoxedCharacterClass = getClass(sn.BoxedCharacter)
- lazy val BoxedBooleanClass = getClass(sn.BoxedBoolean)
- lazy val BoxedByteClass = getClass("java.lang.Byte")
- lazy val BoxedShortClass = getClass("java.lang.Short")
- lazy val BoxedIntClass = getClass("java.lang.Integer")
- lazy val BoxedLongClass = getClass("java.lang.Long")
- lazy val BoxedFloatClass = getClass("java.lang.Float")
- lazy val BoxedDoubleClass = getClass("java.lang.Double")
-
- lazy val BoxedUnitClass = getClass("scala.runtime.BoxedUnit")
- lazy val BoxedUnitModule = getModule("scala.runtime.BoxedUnit")
- def BoxedUnit_UNIT = getMember(BoxedUnitModule, "UNIT")
- def BoxedUnit_TYPE = getMember(BoxedUnitModule, "TYPE")
-
- // special attributes
- lazy val BeanPropertyAttr: Symbol = getClass(sn.BeanProperty)
- lazy val BooleanBeanPropertyAttr: Symbol = getClass(sn.BooleanBeanProperty)
- lazy val CloneableAttr: Symbol = getClass("scala.cloneable")
- lazy val DeprecatedAttr: Symbol = getClass("scala.deprecated")
- lazy val DeprecatedNameAttr: Symbol = getClass("scala.deprecatedName")
- lazy val MigrationAnnotationClass: Symbol = getClass("scala.annotation.migration")
- lazy val NativeAttr: Symbol = getClass("scala.native")
- lazy val RemoteAttr: Symbol = getClass("scala.remote")
- lazy val ScalaNumberClass: Symbol = getClass("scala.math.ScalaNumber")
- lazy val ScalaStrictFPAttr: Symbol = getClass("scala.annotation.strictfp")
- lazy val SerialVersionUIDAttr: Symbol = getClass("scala.SerialVersionUID")
- lazy val SerializableAttr: Symbol = getClass("scala.annotation.serializable") // @serializable is deprecated
- lazy val TraitSetterAnnotationClass: Symbol = getClass("scala.runtime.TraitSetter")
- lazy val TransientAttr: Symbol = getClass("scala.transient")
- lazy val VolatileAttr: Symbol = getClass("scala.volatile")
-
- lazy val AnnotationDefaultAttr: Symbol = {
- val attr = newClass(RuntimePackageClass, tpnme.AnnotationDefaultATTR, List(AnnotationClass.typeConstructor))
- // This attribute needs a constructor so that modifiers in parsed Java code make sense
- attr.info.decls enter (attr newConstructor NoPosition setInfo MethodType(Nil, attr.tpe))
- attr
- }
-
- def getModule(fullname: Name): Symbol =
- getModuleOrClass(fullname.toTermName)
-
- def getClass(fullname: Name): Symbol = {
- var result = getModuleOrClass(fullname.toTypeName)
- while (result.isAliasType) result = result.info.typeSymbol
- result
- }
-
- def getClassIfDefined(fullname: Name): Symbol =
- try {
- getClass(fullname)
- } catch {
- case ex: MissingRequirementError => NoSymbol
- }
-
- def getMember(owner: Symbol, name: Name): Symbol = {
- if (owner == NoSymbol) NoSymbol
- else owner.info.nonPrivateMember(name) match {
- case NoSymbol => throw new FatalError(owner + " does not have a member " + name)
- case result => result
- }
- }
- def packageExists(packageName: String): Boolean = {
- try getModuleOrClass(newTermName(packageName)).isPackage
- catch { case _: MissingRequirementError => false }
- }
-
- /** If you're looking for a class, pass a type name.
- * If a module, a term name.
- */
- private def getModuleOrClass(path: Name): Symbol = {
- val module = path.isTermName
- val fullname = path.toTermName
- if (fullname == nme.NO_NAME)
- return NoSymbol
-
- var sym: Symbol = RootClass
- var i = 0
- var j = fullname.pos('.', i)
- while (j < fullname.length) {
- sym = sym.info.member(fullname.subName(i, j))
- i = j + 1
- j = fullname.pos('.', i)
- }
- val result =
- if (module) sym.info.member(fullname.subName(i, j)).suchThat(_ hasFlag MODULE)
- else sym.info.member(fullname.subName(i, j).toTypeName)
- if (result == NoSymbol) {
- if (settings.debug.value)
- { log(sym.info); log(sym.info.members) }//debug
- throw new MissingRequirementError((if (module) "object " else "class ") + fullname)
- }
- result
- }
-
- private def newClass(owner: Symbol, name: TypeName, parents: List[Type]): Symbol = {
- val clazz = owner.newClass(NoPosition, name)
- clazz.setInfo(ClassInfoType(parents, new Scope, clazz))
- owner.info.decls.enter(clazz)
- clazz
- }
-
- private def newCovariantPolyClass(owner: Symbol, name: TypeName, parent: Symbol => Type): Symbol = {
- val clazz = newClass(owner, name, List())
- val tparam = newTypeParam(clazz, 0) setFlag COVARIANT
- val p = parent(tparam)
-/* p.typeSymbol.initialize
- println(p.typeSymbol + " flags: " + Flags.flagsToString(p.typeSymbol.flags))
- val parents = /*if (p.typeSymbol.isTrait)
- List(definitions.AnyRefClass.tpe, p)
- else*/ List(p)
- println("creating " + name + " with parents " + parents) */
- clazz.setInfo(
- polyType(
- List(tparam),
- ClassInfoType(List(AnyRefClass.tpe, p), new Scope, clazz)))
- }
-
- private def newAlias(owner: Symbol, name: TypeName, alias: Type): Symbol = {
- val tpsym = owner.newAliasType(NoPosition, name)
- tpsym.setInfo(alias)
- owner.info.decls.enter(tpsym)
- tpsym
- }
-
- private def newMethod(owner: Symbol, name: TermName): Symbol = {
- val msym = owner.newMethod(NoPosition, name.encode)
- owner.info.decls.enter(msym)
- msym
- }
-
- private[Definitions] def newMethod(owner: Symbol, name: TermName, formals: List[Type], restpe: Type): Symbol = {
- val msym = newMethod(owner, name)
- val params = msym.newSyntheticValueParams(formals)
- msym.setInfo(MethodType(params, restpe))
- }
-
- /** tcon receives the type parameter symbol as argument */
- private def newPolyMethod(owner: Symbol, name: TermName, tcon: Symbol => Type): Symbol =
- newPolyMethodCon(owner, name, tparam => msym => tcon(tparam))
-
- /** tcon receives the type parameter symbol and the method symbol as arguments */
- private def newPolyMethodCon(owner: Symbol, name: TermName, tcon: Symbol => Symbol => Type): Symbol = {
- val msym = newMethod(owner, name)
- val tparam = newTypeParam(msym, 0)
- msym.setInfo(polyType(List(tparam), tcon(tparam)(msym)))
- }
-
- private def newParameterlessMethod(owner: Symbol, name: TermName, restpe: Type) =
- newMethod(owner, name).setInfo(NullaryMethodType(restpe))
-
- private def newTypeParam(owner: Symbol, index: Int): Symbol =
- owner.newTypeParameter(NoPosition, newTypeName("T" + index)) setInfo TypeBounds.empty
-
- lazy val boxedClassValues = boxedClass.values.toSet
- lazy val isUnbox = unboxMethod.values.toSet
- lazy val isBox = boxMethod.values.toSet
-
- /** Is symbol a phantom class for which no runtime representation exists? */
- lazy val isPhantomClass = Set[Symbol](AnyClass, AnyValClass, NullClass, NothingClass)
-
- private lazy val scalaValueClassesSet = ScalaValueClasses.toSet
- private lazy val boxedValueClassesSet = boxedClass.values.toSet + BoxedUnitClass
-
- /** Is symbol a value class? */
- def isValueClass(sym: Symbol) = scalaValueClassesSet(sym)
- def isNonUnitValueClass(sym: Symbol) = (sym != UnitClass) && isValueClass(sym)
- def isScalaValueType(tp: Type) = scalaValueClassesSet(tp.typeSymbol)
-
- /** Is symbol a boxed value class, e.g. java.lang.Integer? */
- def isBoxedValueClass(sym: Symbol) = boxedValueClassesSet(sym)
-
- /** If symbol is a value class, return the value class, with the exception
- * that BoxedUnit remains BoxedUnit. If not a value class, NoSymbol.
- */
- def unboxedValueClass(sym: Symbol): Symbol =
- if (isValueClass(sym)) sym
- else if (sym == BoxedUnitClass) sym
- else boxedClass.map(_.swap).getOrElse(sym, NoSymbol)
-
- /** Is type's symbol a numeric value class? */
- def isNumericValueType(tp: Type): Boolean = tp match {
- case TypeRef(_, sym, _) => isNumericValueClass(sym)
- case _ => false
- }
-
- // todo: reconcile with javaSignature!!!
- def signature(tp: Type): String = {
- def erasure(tp: Type): Type = tp match {
- case st: SubType => erasure(st.supertype)
- case RefinedType(parents, _) => erasure(parents.head)
- case _ => tp
- }
- def flatNameString(sym: Symbol, separator: Char): String =
- if (sym == NoSymbol) "" // be more resistant to error conditions, e.g. neg/t3222.scala
- else if (sym.owner.isPackageClass) sym.fullName('.') + (if (sym.isModuleClass) "$" else "")
- else flatNameString(sym.owner, separator) + "$" + sym.simpleName;
- def signature1(etp: Type): String = {
- if (etp.typeSymbol == ArrayClass) "[" + signature1(erasure(etp.normalize.typeArgs.head))
- else if (isValueClass(etp.typeSymbol)) abbrvTag(etp.typeSymbol).toString()
- else "L" + flatNameString(etp.typeSymbol, '/') + ";"
- }
- val etp = erasure(tp)
- if (etp.typeSymbol == ArrayClass) signature1(etp)
- else flatNameString(etp.typeSymbol, '.')
- }
-
- /** getModule2/getClass2 aren't needed at present but may be again,
- * so for now they're mothballed.
- */
- // def getModule2(name1: Name, name2: Name) = {
- // try getModuleOrClass(name1.toTermName)
- // catch { case ex1: FatalError =>
- // try getModuleOrClass(name2.toTermName)
- // catch { case ex2: FatalError => throw ex1 }
- // }
- // }
- // def getClass2(name1: Name, name2: Name) = {
- // try {
- // val result = getModuleOrClass(name1.toTypeName)
- // if (result.isAliasType) getClass(name2) else result
- // }
- // catch { case ex1: FatalError =>
- // try getModuleOrClass(name2.toTypeName)
- // catch { case ex2: FatalError => throw ex1 }
- // }
- // }
-
- /** Surgery on the value classes. Without this, AnyVals defined in source
- * files end up with an AnyRef parent. It is likely there is a better way
- * to evade that AnyRef.
- */
- private def setParents(sym: Symbol, parents: List[Type]): Symbol = sym.rawInfo match {
- case ClassInfoType(_, scope, clazz) =>
- sym setInfo ClassInfoType(parents, scope, clazz)
- case _ =>
- sym
- }
-
- def init() {
- if (isInitialized) return
-
- EmptyPackageClass setInfo ClassInfoType(Nil, new Scope, EmptyPackageClass)
- EmptyPackage setInfo EmptyPackageClass.tpe
-
- RootClass.info.decls enter EmptyPackage
- RootClass.info.decls enter RootPackage
-
- // members of class scala.Any
- Any_== = newMethod(AnyClass, nme.EQ, anyparam, booltype) setFlag FINAL
- Any_!= = newMethod(AnyClass, nme.NE, anyparam, booltype) setFlag FINAL
- Any_equals = newMethod(AnyClass, nme.equals_, anyparam, booltype)
- Any_hashCode = newMethod(AnyClass, nme.hashCode_, Nil, inttype)
- Any_toString = newMethod(AnyClass, nme.toString_, Nil, stringtype)
- Any_## = newMethod(AnyClass, nme.HASHHASH, Nil, inttype) setFlag FINAL
-
- // Any_getClass requires special handling. The return type is determined on
- // a per-call-site basis as if the function being called were actually:
- //
- // // Assuming `target.getClass()`
- // def getClass[T](target: T): Class[_ <: T]
- //
- // Since getClass is not actually a polymorphic method, this requires compiler
- // participation. At the "Any" level, the return type is Class[_] as it is in
- // java.lang.Object. Java also special cases the return type.
- Any_getClass = {
- val eparams = typeParamsToExistentials(ClassClass, ClassClass.typeParams)
- eparams.head setInfo TypeBounds.empty
- val tpe = existentialAbstraction(eparams, appliedType(ClassClass.tpe, List(eparams.head.tpe)))
-
- newMethod(AnyClass, nme.getClass_, Nil, tpe) setFlag DEFERRED
- }
- Any_isInstanceOf = newPolyMethod(
- AnyClass, nme.isInstanceOf_, tparam => NullaryMethodType(booltype)) setFlag FINAL
- Any_asInstanceOf = newPolyMethod(
- AnyClass, nme.asInstanceOf_, tparam => NullaryMethodType(tparam.typeConstructor)) setFlag FINAL
-
- // members of class java.lang.{ Object, String }
- Object_## = newMethod(ObjectClass, nme.HASHHASH, Nil, inttype) setFlag FINAL
- Object_== = newMethod(ObjectClass, nme.EQ, anyrefparam, booltype) setFlag FINAL
- Object_!= = newMethod(ObjectClass, nme.NE, anyrefparam, booltype) setFlag FINAL
- Object_eq = newMethod(ObjectClass, nme.eq, anyrefparam, booltype) setFlag FINAL
- Object_ne = newMethod(ObjectClass, nme.ne, anyrefparam, booltype) setFlag FINAL
- Object_synchronized = newPolyMethodCon(
- ObjectClass, nme.synchronized_,
- tparam => msym => MethodType(msym.newSyntheticValueParams(List(tparam.typeConstructor)), tparam.typeConstructor)) setFlag FINAL
-
- String_+ = newMethod(
- StringClass, "+", anyparam, stringtype) setFlag FINAL
-
- val forced = List( // force initialization of every symbol that is entered as a side effect
- AnnotationDefaultAttr, // #2264
- RepeatedParamClass,
- JavaRepeatedParamClass,
- ByNameParamClass,
- AnyClass,
- AnyRefClass,
- AnyValClass,
- NullClass,
- NothingClass,
- SingletonClass,
- EqualsPatternClass,
- Object_isInstanceOf,
- Object_asInstanceOf
- )
-
- /** Removing the anyref parent they acquire from having a source file.
- */
- setParents(AnyValClass, anyparam)
- ScalaValueClasses foreach { sym =>
- setParents(sym, anyvalparam)
- }
-
- isInitialized = true
- } //init
-
- var nbScalaCallers: Int = 0
- def newScalaCaller(delegateType: Type): Symbol = {
- assert(forMSIL, "scalaCallers can only be created if target is .NET")
- // object: reference to object on which to call (scala-)method
- val paramTypes: List[Type] = List(ObjectClass.tpe)
- val name: String = "$scalaCaller$$" + nbScalaCallers
- // tparam => resultType, which is the resultType of PolyType, i.e. the result type after applying the
- // type parameter =-> a MethodType in this case
- // TODO: set type bounds manually (-> MulticastDelegate), see newTypeParam
- val newCaller = newMethod(DelegateClass, name, paramTypes, delegateType) setFlag (FINAL | STATIC)
- // val newCaller = newPolyMethod(DelegateClass, name,
- // tparam => MethodType(paramTypes, tparam.typeConstructor)) setFlag (FINAL | STATIC)
- Delegate_scalaCallers = Delegate_scalaCallers ::: List(newCaller)
- nbScalaCallers += 1
- newCaller
- }
-
- // def addScalaCallerInfo(scalaCaller: Symbol, methSym: Symbol, delType: Type) {
- // assert(Delegate_scalaCallers contains scalaCaller)
- // Delegate_scalaCallerInfos += (scalaCaller -> (methSym, delType))
- // }
-
- def addScalaCallerInfo(scalaCaller: Symbol, methSym: Symbol) {
- assert(Delegate_scalaCallers contains scalaCaller)
- Delegate_scalaCallerTargets += (scalaCaller -> methSym)
- }
- }
-}
diff --git a/src/compiler/scala/tools/nsc/symtab/Flags.scala b/src/compiler/scala/tools/nsc/symtab/Flags.scala
deleted file mode 100644
index e74de01..0000000
--- a/src/compiler/scala/tools/nsc/symtab/Flags.scala
+++ /dev/null
@@ -1,220 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
- * @author Martin Odersky
- */
-
-package scala.tools.nsc
-package symtab
-
-// Flags at each index of a flags Long. Those marked with /M are used in
-// Parsers/JavaParsers and therefore definitely appear on Modifiers; but the
-// absence of /M on the other flags does not imply they aren't.
-//
-// Generated by mkFlagsTable() at Mon Oct 11 10:01:09 PDT 2010
-//
-// 0: PROTECTED/M
-// 1: OVERRIDE/M
-// 2: PRIVATE/M
-// 3: ABSTRACT/M
-// 4: DEFERRED/M
-// 5: FINAL/M
-// 6: METHOD
-// 7: INTERFACE/M
-// 8: MODULE
-// 9: IMPLICIT/M
-// 10: SEALED/M
-// 11: CASE/M
-// 12: MUTABLE/M
-// 13: PARAM/M
-// 14: PACKAGE
-// 15:
-// 16: BYNAMEPARAM/M CAPTURED COVARIANT/M
-// 17: CONTRAVARIANT/M INCONSTRUCTOR LABEL
-// 18: ABSOVERRIDE/M
-// 19: LOCAL/M
-// 20: JAVA/M
-// 21: SYNTHETIC
-// 22: STABLE
-// 23: STATIC/M
-// 24: CASEACCESSOR/M
-// 25: DEFAULTPARAM/M TRAIT/M
-// 26: BRIDGE
-// 27: ACCESSOR
-// 28: SUPERACCESSOR
-// 29: PARAMACCESSOR/M
-// 30: MODULEVAR
-// 31: LAZY/M
-// 32: IS_ERROR
-// 33: OVERLOADED
-// 34: LIFTED
-// 35: EXISTENTIAL MIXEDIN
-// 36: EXPANDEDNAME
-// 37: IMPLCLASS PRESUPER/M
-// 38: TRANS_FLAG
-// 39: LOCKED
-// 40: SPECIALIZED
-// 41: DEFAULTINIT/M
-// 42: VBRIDGE
-// 43: VARARGS
-// 44:
-// 45:
-// 46:
-// 47:
-// 48:
-// 49: latePRIVATE
-// 50: lateABSTRACT
-// 51: lateDEFERRED
-// 52: lateFINAL
-// 53: lateMETHOD
-// 54: lateINTERFACE
-// 55: lateMODULE
-// 56: notPROTECTED
-// 57: notOVERRIDE
-// 58: notPRIVATE
-// 59: notABSTRACT
-// 60: notDEFERRED
-// 61: notFINAL
-// 62: notMETHOD
-// 63:
-class Flags extends reflect.generic.Flags {
-
- final val InitialFlags = 0x0001FFFFFFFFFFFFL // flags that are enabled from phase 1.
- final val LateFlags = 0x00FE000000000000L // flags that override flags in 0x1FC.
- final val AntiFlags = 0x7F00000000000000L // flags that cancel flags in 0x07F
- final val LateShift = 47L
- final val AntiShift = 56L
-
- // late flags (set by a transformer phase)
- final val latePRIVATE = (PRIVATE: Long) << LateShift
- final val lateABSTRACT = (ABSTRACT: Long) << LateShift
- final val lateDEFERRED = (DEFERRED: Long) << LateShift
- final val lateINTERFACE = (INTERFACE: Long) << LateShift
- final val lateMODULE = (MODULE: Long) << LateShift
- final val lateFINAL = (FINAL: Long) << LateShift
- final val lateMETHOD = (METHOD: Long) << LateShift
-
- final val notFINAL = (FINAL: Long) << AntiShift
- final val notPRIVATE = (PRIVATE: Long) << AntiShift
- final val notDEFERRED = (DEFERRED: Long) << AntiShift
- final val notPROTECTED = (PROTECTED: Long) << AntiShift
- final val notABSTRACT = (ABSTRACT: Long) << AntiShift
- final val notOVERRIDE = (OVERRIDE: Long) << AntiShift
- final val notMETHOD = (METHOD: Long) << AntiShift
-
- final val notFlagMap = Map[Int, Long](
- FINAL -> notFINAL,
- PRIVATE -> notPRIVATE,
- DEFERRED -> notDEFERRED,
- PROTECTED -> notPROTECTED,
- ABSTRACT -> notABSTRACT,
- OVERRIDE -> notOVERRIDE,
- METHOD -> notMETHOD
- )
-
- // masks
- /** These flags can be set when class or module symbol is first created. */
- final val TopLevelCreationFlags: Long =
- MODULE | PACKAGE | FINAL | JAVA
-
- /** These modifiers can be set explicitly in source programs. */
- final val ExplicitFlags: Long =
- PRIVATE | PROTECTED | ABSTRACT | FINAL | SEALED |
- OVERRIDE | CASE | IMPLICIT | ABSOVERRIDE | LAZY
-
- /** These modifiers appear in TreePrinter output. */
- final val PrintableFlags: Long =
- ExplicitFlags | LOCAL | SYNTHETIC | STABLE | CASEACCESSOR |
- ACCESSOR | SUPERACCESSOR | PARAMACCESSOR | BRIDGE | STATIC | VBRIDGE | SPECIALIZED
-
- /** The two bridge flags */
- final val BRIDGES = BRIDGE | VBRIDGE
-
- final val FieldFlags: Long =
- MUTABLE | CASEACCESSOR | PARAMACCESSOR | STATIC | FINAL | PRESUPER | LAZY
-
- final val AccessFlags: Long = PRIVATE | PROTECTED | LOCAL
- final val VarianceFlags = COVARIANT | CONTRAVARIANT
- final val ConstrFlags: Long = JAVA
-
- /** Module flags inherited by their module-class */
- final val ModuleToClassFlags: Long = AccessFlags | MODULE | PACKAGE | CASE | SYNTHETIC | JAVA
-
- def getterFlags(fieldFlags: Long): Long = ACCESSOR + (
- if ((fieldFlags & MUTABLE) != 0) fieldFlags & ~MUTABLE & ~PRESUPER
- else fieldFlags & ~PRESUPER | STABLE
- )
-
- def setterFlags(fieldFlags: Long): Long =
- getterFlags(fieldFlags) & ~STABLE & ~CASEACCESSOR
-
- // Generated by mkFlagToStringMethod() at Mon Oct 11 10:12:36 PDT 2010
- @annotation.switch override def flagToString(flag: Long): String = flag match {
- case PROTECTED => "protected" // (1L << 0)
- case OVERRIDE => "override" // (1L << 1)
- case PRIVATE => "private" // (1L << 2)
- case ABSTRACT => "abstract" // (1L << 3)
- case DEFERRED => "<deferred>" // (1L << 4)
- case FINAL => "final" // (1L << 5)
- case METHOD => "<method>" // (1L << 6)
- case INTERFACE => "<interface>" // (1L << 7)
- case MODULE => "<module>" // (1L << 8)
- case IMPLICIT => "implicit" // (1L << 9)
- case SEALED => "sealed" // (1L << 10)
- case CASE => "case" // (1L << 11)
- case MUTABLE => "<mutable>" // (1L << 12)
- case PARAM => "<param>" // (1L << 13)
- case PACKAGE => "<package>" // (1L << 14)
- case 0x8000L => "" // (1L << 15)
- case BYNAMEPARAM => "<bynameparam/captured/covariant>" // (1L << 16)
- case CONTRAVARIANT => "<contravariant/inconstructor/label>" // (1L << 17)
- case ABSOVERRIDE => "absoverride" // (1L << 18)
- case LOCAL => "<local>" // (1L << 19)
- case JAVA => "<java>" // (1L << 20)
- case SYNTHETIC => "<synthetic>" // (1L << 21)
- case STABLE => "<stable>" // (1L << 22)
- case STATIC => "<static>" // (1L << 23)
- case CASEACCESSOR => "<caseaccessor>" // (1L << 24)
- case DEFAULTPARAM => "<defaultparam/trait>" // (1L << 25)
- case BRIDGE => "<bridge>" // (1L << 26)
- case ACCESSOR => "<accessor>" // (1L << 27)
- case SUPERACCESSOR => "<superaccessor>" // (1L << 28)
- case PARAMACCESSOR => "<paramaccessor>" // (1L << 29)
- case MODULEVAR => "<modulevar>" // (1L << 30)
- case LAZY => "lazy" // (1L << 31)
- case IS_ERROR => "<is_error>" // (1L << 32)
- case OVERLOADED => "<overloaded>" // (1L << 33)
- case LIFTED => "<lifted>" // (1L << 34)
- case EXISTENTIAL => "<existential/mixedin>" // (1L << 35)
- case EXPANDEDNAME => "<expandedname>" // (1L << 36)
- case IMPLCLASS => "<implclass/presuper>" // (1L << 37)
- case TRANS_FLAG => "<trans_flag>" // (1L << 38)
- case LOCKED => "<locked>" // (1L << 39)
- case SPECIALIZED => "<specialized>" // (1L << 40)
- case DEFAULTINIT => "<defaultinit>" // (1L << 41)
- case VBRIDGE => "<vbridge>" // (1L << 42)
- case VARARGS => "<varargs>" // (1L << 43)
- case 0x100000000000L => "" // (1L << 44)
- case 0x200000000000L => "" // (1L << 45)
- case 0x400000000000L => "" // (1L << 46)
- case 0x800000000000L => "" // (1L << 47)
- case 0x1000000000000L => "" // (1L << 48)
- case `latePRIVATE` => "<lateprivate>" // (1L << 49)
- case `lateABSTRACT` => "<lateabstract>" // (1L << 50)
- case `lateDEFERRED` => "<latedeferred>" // (1L << 51)
- case `lateFINAL` => "<latefinal>" // (1L << 52)
- case `lateMETHOD` => "<latemethod>" // (1L << 53)
- case `lateINTERFACE` => "<lateinterface>" // (1L << 54)
- case `lateMODULE` => "<latemodule>" // (1L << 55)
- case `notPROTECTED` => "<notprotected>" // (1L << 56)
- case `notOVERRIDE` => "<notoverride>" // (1L << 57)
- case `notPRIVATE` => "<notprivate>" // (1L << 58)
- case `notABSTRACT` => "<notabstract>" // (1L << 59)
- case `notDEFERRED` => "<notdeferred>" // (1L << 60)
- case `notFINAL` => "<notfinal>" // (1L << 61)
- case `notMETHOD` => "<notmethod>" // (1L << 62)
- case 0x8000000000000000L => "" // (1L << 63)
- case _ => ""
- }
-}
-
-object Flags extends Flags { }
diff --git a/src/compiler/scala/tools/nsc/symtab/InfoTransformers.scala b/src/compiler/scala/tools/nsc/symtab/InfoTransformers.scala
deleted file mode 100644
index 69896c6..0000000
--- a/src/compiler/scala/tools/nsc/symtab/InfoTransformers.scala
+++ /dev/null
@@ -1,47 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
- * @author Martin Odersky
- */
-
-package scala.tools.nsc
-package symtab
-
-trait InfoTransformers {
- self: SymbolTable =>
-
- abstract class InfoTransformer {
- var prev: InfoTransformer = this
- var next: InfoTransformer = this
-
- val pid: Phase#Id
- val changesBaseClasses: Boolean
- def transform(sym: Symbol, tpe: Type): Type
-
- def insert(that: InfoTransformer) {
- assert(this.pid != that.pid)
- if (that.pid < this.pid) {
- prev insert that
- } else if (next.pid <= that.pid && next.pid != NoPhase.id) {
- next insert that
- } else {
- that.next = next
- that.prev = this
- next.prev = that
- this.next = that
- }
- }
-
- /** The InfoTransformer whose (pid == from).
- * If no such exists, the InfoTransformer with the next
- * higher pid.
- */
- def nextFrom(from: Phase#Id): InfoTransformer =
- if (from == this.pid) this
- else if (from < this.pid)
- if (prev.pid < from) this
- else prev.nextFrom(from);
- else if (next.pid == NoPhase.id) next
- else next.nextFrom(from)
- }
-}
-
diff --git a/src/compiler/scala/tools/nsc/symtab/NameManglers.scala b/src/compiler/scala/tools/nsc/symtab/NameManglers.scala
deleted file mode 100644
index c40e176..0000000
--- a/src/compiler/scala/tools/nsc/symtab/NameManglers.scala
+++ /dev/null
@@ -1,171 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
- * @author Martin Odersky
- */
-
-package scala.tools.nsc
-package symtab
-
-import java.security.MessageDigest
-import scala.io.Codec
-import util.Chars.isOperatorPart
-
-/** A trait to encapsulate name mangling. It's intended for the
- * values and methods involved in assembling names out of other names,
- * and not for simple synthetically named locals.
- */
-trait NameManglers {
- self: SymbolTable =>
-
- trait NameManglingCommon {
- self: CompilerCommonNames =>
-
- def flattenedName(segments: Name*): NameType = compactedString(segments mkString "$")
-
- /**
- * COMPACTIFY
- *
- * The hashed name has the form (prefix + marker + md5 + marker + suffix), where
- * - prefix/suffix.length = MaxNameLength / 4
- * - md5.length = 32
- *
- * We obtain the formula:
- *
- * FileNameLength = 2*(MaxNameLength / 4) + 2.marker.length + 32 + 6
- *
- * (+6 for ".class"). MaxNameLength can therefore be computed as follows:
- */
- private final val marker = "$$$$"
- private final val MaxNameLength = math.min(
- settings.maxClassfileName.value - 6,
- 2 * (settings.maxClassfileName.value - 6 - 2*marker.length - 32)
- )
- private lazy val md5 = MessageDigest.getInstance("MD5")
- private def toMD5(s: String, edge: Int) = {
- val prefix = s take edge
- val suffix = s takeRight edge
-
- val cs = s.toArray
- val bytes = Codec toUTF8 cs
- md5 update bytes
- val md5chars = md5.digest() map (b => (b & 0xFF).toHexString) mkString
-
- prefix + marker + md5chars + marker + suffix
- }
- private def compactedString(s: String) =
- if (s.length <= MaxNameLength) s
- else toMD5(s, MaxNameLength / 4)
- }
-
- trait TypeNameMangling extends NameManglingCommon {
- self: tpnme.type =>
-
- }
-
- trait TermNameMangling extends NameManglingCommon {
- self: nme.type =>
-
- val IMPL_CLASS_SUFFIX = "$class"
- val SINGLETON_SUFFIX = ".type"
- val LOCALDUMMY_PREFIX = "<local " // owner of local blocks
- val PROTECTED_PREFIX = "protected$"
- val PROTECTED_SET_PREFIX = PROTECTED_PREFIX + "set"
- val SELECTOR_DUMMY = "<unapply-selector>"
- val SETTER_SUFFIX = encode("_=")
- val SUPER_PREFIX_STRING = "super$"
- val TRAIT_SETTER_SEPARATOR_STRING = "$_setter_$"
-
- def isConstructorName(name: Name) = name == CONSTRUCTOR || name == MIXIN_CONSTRUCTOR
- def isExceptionResultName(name: Name) = name startsWith EXCEPTION_RESULT_PREFIX
- /** !!! Foo$class$1 is an implClassName, I think. */
- def isImplClassName(name: Name) = name endsWith IMPL_CLASS_SUFFIX
- def isLocalDummyName(name: Name) = name startsWith LOCALDUMMY_PREFIX
- def isLocalName(name: Name) = name endsWith LOCAL_SUFFIX_STRING
- def isLoopHeaderLabel(name: Name) = (name startsWith WHILE_PREFIX) || (name startsWith DO_WHILE_PREFIX)
- def isProtectedAccessorName(name: Name) = name startsWith PROTECTED_PREFIX
- def isSetterName(name: Name) = name endsWith SETTER_SUFFIX
- def isTraitSetterName(name: Name) = isSetterName(name) && (name containsName TRAIT_SETTER_SEPARATOR_STRING)
- def isSingletonName(name: Name) = name endsWith SINGLETON_SUFFIX
-
- def isOpAssignmentName(name: Name) = name match {
- case raw.NE | raw.LE | raw.GE | EMPTY => false
- case _ =>
- name.endChar == '=' && name.startChar != '=' && isOperatorPart(name.startChar)
- }
-
- /** The expanded setter name of `name' relative to this class `base`
- */
- def expandedSetterName(name: TermName, base: Symbol): TermName =
- expandedName(name, base, separator = TRAIT_SETTER_SEPARATOR_STRING)
-
- /** If `name' is an expandedName name, the original name.
- * Otherwise `name' itself.
- */
- def originalName(name: Name): Name = {
- var i = name.length
- while (i >= 2 && !(name(i - 1) == '$' && name(i - 2) == '$')) i -= 1
- if (i >= 2) {
- while (i >= 3 && name(i - 3) == '$') i -= 1
- name.subName(i, name.length)
- } else name
- }
-
- /** Return the original name and the types on which this name
- * is specialized. For example,
- * {{{
- * splitSpecializedName("foo$mIcD$sp") == ('foo', "I", "D")
- * }}}
- * `foo$mIcD$sp` is the name of a method specialized on two type
- * parameters, the first one belonging to the method itself, on Int,
- * and another one belonging to the enclosing class, on Double.
- */
- def splitSpecializedName(name: Name): (Name, String, String) =
- if (name.endsWith("$sp")) {
- val name1 = name stripEnd "$sp"
- val idxC = name1 lastIndexOf 'c'
- val idxM = name1 lastIndexOf 'm'
-
- (name1.subName(0, idxM - 1),
- name1.subName(idxC + 1, name1.length).toString,
- name1.subName(idxM + 1, idxC).toString)
- } else
- (name, "", "")
-
- def getterName(name: TermName): TermName = if (isLocalName(name)) localToGetter(name) else name
- def getterToLocal(name: TermName): TermName = name.toTermName append LOCAL_SUFFIX_STRING
- def getterToSetter(name: TermName): TermName = name.toTermName append SETTER_SUFFIX
- def localToGetter(name: TermName): TermName = name stripEnd LOCAL_SUFFIX_STRING toTermName
-
- def setterToGetter(name: TermName): TermName = {
- val p = name.pos(TRAIT_SETTER_SEPARATOR_STRING)
- if (p < name.length)
- setterToGetter(name.subName(p + TRAIT_SETTER_SEPARATOR_STRING.length, name.length))
- else
- name stripEnd SETTER_SUFFIX toTermName
- }
-
- def defaultGetterName(name: Name, pos: Int): TermName = {
- val prefix = if (isConstructorName(name)) "init" else name
- newTermName(prefix + DEFAULT_GETTER_STRING + pos)
- }
- def defaultGetterToMethod(name: Name): TermName = {
- val p = name.pos(DEFAULT_GETTER_STRING)
- if (p < name.length) name.subName(0, p)
- else name
- }
-
- def dropSingletonName(name: Name): TypeName = name stripEnd SINGLETON_SUFFIX toTypeName
- def singletonName(name: Name): TypeName = name append SINGLETON_SUFFIX toTypeName
- def implClassName(name: Name): TypeName = name append IMPL_CLASS_SUFFIX toTypeName
- def interfaceName(implname: Name): TypeName = implname stripEnd IMPL_CLASS_SUFFIX toTypeName
- def localDummyName(clazz: Symbol): TermName = newTermName(LOCALDUMMY_PREFIX + clazz.name + ">")
- def productAccessorName(i: Int): TermName = newTermName("_" + i)
- def superName(name: Name): TermName = newTermName(SUPER_PREFIX_STRING + name)
-
- /** The name of an accessor for protected symbols. */
- def protName(name: Name): TermName = newTermName(PROTECTED_PREFIX + name)
-
- /** The name of a setter for protected symbols. Used for inherited Java fields. */
- def protSetterName(name: Name): TermName = newTermName(PROTECTED_SET_PREFIX + name)
- }
-}
diff --git a/src/compiler/scala/tools/nsc/symtab/Names.scala b/src/compiler/scala/tools/nsc/symtab/Names.scala
deleted file mode 100644
index a51e0a2..0000000
--- a/src/compiler/scala/tools/nsc/symtab/Names.scala
+++ /dev/null
@@ -1,431 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
- * @author Martin Odersky
- */
-
-package scala.tools.nsc
-package symtab
-
-import scala.reflect.NameTransformer
-import scala.io.Codec
-import java.security.MessageDigest
-
-/** The class Names ...
- *
- * @author Martin Odersky
- * @version 1.0, 05/02/2005
- */
-trait Names extends reflect.generic.Names {
-
-// Operations -------------------------------------------------------------
-
- private final val HASH_SIZE = 0x8000
- private final val HASH_MASK = 0x7FFF
- private final val NAME_SIZE = 0x20000
-
- final val nameDebug = false
-
- /** memory to store all names sequentially
- */
- var chrs: Array[Char] = new Array[Char](NAME_SIZE)
- private var nc = 0
-
- /** hashtable for finding term names quickly
- */
- private val termHashtable = new Array[TermName](HASH_SIZE)
-
- /** hashtable for finding type names quickly
- */
- private val typeHashtable = new Array[TypeName](HASH_SIZE)
-
- /** the hashcode of a name
- */
- private def hashValue(cs: Array[Char], offset: Int, len: Int): Int =
- if (len > 0)
- (len * (41 * 41 * 41) +
- cs(offset) * (41 * 41) +
- cs(offset + len - 1) * 41 +
- cs(offset + (len >> 1)))
- else 0;
-
- /** Is (the ASCII representation of) name at given index equal to
- * cs[offset..offset+len-1]?
- */
- private def equals(index: Int, cs: Array[Char], offset: Int, len: Int): Boolean = {
- var i = 0
- while ((i < len) && (chrs(index + i) == cs(offset + i)))
- i += 1;
- i == len
- }
-
- /** enter characters into chrs array
- */
- private def enterChars(cs: Array[Char], offset: Int, len: Int) {
- var i = 0
- while (i < len) {
- if (nc + i == chrs.length) {
- val newchrs = new Array[Char](chrs.length * 2)
- compat.Platform.arraycopy(chrs, 0, newchrs, 0, chrs.length)
- chrs = newchrs
- }
- chrs(nc + i) = cs(offset + i)
- i += 1
- }
- if (len == 0) nc += 1
- else nc = nc + len
- }
-
- /** Create a term name from the characters in cs[offset..offset+len-1].
- */
- def newTermName(cs: Array[Char], offset: Int, len: Int): TermName = {
- val h = hashValue(cs, offset, len) & HASH_MASK
- var n = termHashtable(h)
- while ((n ne null) && (n.length != len || !equals(n.start, cs, offset, len)))
- n = n.next;
- if (n eq null) {
- n = new TermName(nc, len, h)
- enterChars(cs, offset, len)
- }
- n
- }
-
- /** create a term name from string
- */
- def newTermName(s: String): TermName =
- newTermName(s.toCharArray(), 0, s.length())
-
- /** Create a term name from the UTF8 encoded bytes in bs[offset..offset+len-1].
- */
- def newTermName(bs: Array[Byte], offset: Int, len: Int): TermName = {
- val chars = Codec fromUTF8 bs.slice(offset, offset + len)
- newTermName(chars, 0, chars.length)
- }
-
- /** Create a type name from the characters in cs[offset..offset+len-1].
- */
- def newTypeName(cs: Array[Char], offset: Int, len: Int): TypeName =
- newTermName(cs, offset, len).toTypeName
-
- /** Create a type name from string
- */
- def newTypeName(s: String): TypeName =
- newTermName(s).toTypeName
-
- /** Create a type name from the UTF8 encoded bytes in bs[offset..offset+len-1].
- */
- def newTypeName(bs: Array[Byte], offset: Int, len: Int): TypeName =
- newTermName(bs, offset, len).toTypeName
-
- def mkTermName(name: Name): TermName = name.toTermName
- def mkTypeName(name: Name): TypeName = name.toTypeName
- def isTermName(name: Name): Boolean = name.isTermName
- def isTypeName(name: Name): Boolean = name.isTypeName
-
- def nameChars: Array[Char] = chrs
- @deprecated("", "2.9.0") def view(s: String): TermName = newTermName(s)
-
-// Classes ----------------------------------------------------------------------
-
- /** The name class. */
- sealed abstract class Name(protected val index: Int, protected val len: Int) extends Function1[Int, Char] {
- /** Index into name table */
- def start: Int = index
-
- /** next name in the same hash bucket
- */
- def next: Name
-
- /** return the length of this name
- */
- final def length: Int = len
- final def isEmpty = length == 0
- final def nonEmpty = !isEmpty
-
- def isTermName: Boolean
- def isTypeName: Boolean
- def toTermName: TermName
- def toTypeName: TypeName
- def companionName: Name
- def bothNames: List[Name] = List(toTermName, toTypeName)
-
- /** Copy bytes of this name to buffer cs, starting at position `offset`.
- */
- final def copyChars(cs: Array[Char], offset: Int) =
- compat.Platform.arraycopy(chrs, index, cs, offset, len)
-
- /** return the ascii representation of this name
- */
- final def toChars: Array[Char] = {
- val cs = new Array[Char](len)
- copyChars(cs, 0)
- cs
- }
-
- /** return the string representation of this name
- */
- final override def toString(): String = new String(chrs, index, len)
- def debugString() = NameTransformer.decode(toString) + (if (isTypeName) "!" else "")
-
- /** Write to UTF8 representation of this name to given character array.
- * Start copying to index `to'. Return index of next free byte in array.
- * Array must have enough remaining space for all bytes
- * (i.e. maximally 3*length bytes).
- */
- final def copyUTF8(bs: Array[Byte], offset: Int): Int = {
- val bytes = Codec toUTF8 chrs.slice(index, index + len)
- compat.Platform.arraycopy(bytes, 0, bs, offset, bytes.length)
- offset + bytes.length
- }
-
- /** return the hash value of this name
- */
- final override def hashCode(): Int = index
-
- // Presently disabled.
- // override def equals(other: Any) = paranoidEquals(other)
- private def paranoidEquals(other: Any): Boolean = {
- val cmp = this eq other.asInstanceOf[AnyRef]
- if (cmp || !nameDebug)
- return cmp
-
- other match {
- case x: String =>
- Console.println("Compared " + debugString + " and String '" + x + "'")
- case x: Name =>
- if (this.isTermName != x.isTermName) {
- val panic = this.toTermName == x.toTermName
- Console.println("Compared '%s' and '%s', one term, one type.%s".format(this, x,
- if (panic) " And they contain the same name string!"
- else ""
- ))
- }
- case _ =>
- }
- false
- }
-
- /** return the i'th Char of this name
- */
- final def apply(i: Int): Char = chrs(index + i)
-
- /** return the index of first occurrence of char c in this name, length if not found */
- final def pos(c: Char): Int = pos(c, 0)
-
- /** return the index of first occurrence of char c in this name, length if not found */
- final def pos(s: String): Int = pos(s, 0)
-
- /** return the index of the first occurrence of character c in
- * this name from start, length if not found.
- *
- * @param c the character
- * @param start ...
- * @return the index of the first occurrence of c
- */
- final def pos(c: Char, start: Int): Int = {
- var i = start
- while (i < len && chrs(index + i) != c) i += 1
- i
- }
-
- /** return the index of the first occurrence of nonempty string s
- * in this name from start, length if not found.
- *
- * @param s the string
- * @param start ...
- * @return the index of the first occurrence of s
- */
- final def pos(s: String, start: Int): Int = {
- var i = pos(s.charAt(0), start)
- while (i + s.length() <= len) {
- var j = 1
- while (s.charAt(j) == chrs(index + i + j)) {
- j += 1
- if (j == s.length()) return i
- }
- i = pos(s.charAt(0), i + 1)
- }
- len
- }
-
- /** return the index of last occurrence of char c in this
- * name, -1 if not found.
- *
- * @param c the character
- * @return the index of the last occurrence of c
- */
- final def lastPos(c: Char): Int = lastPos(c, len - 1)
-
- final def lastPos(s: String): Int = lastPos(s, len - s.length)
-
- /** return the index of the last occurrence of char c in this
- * name from start, -1 if not found.
- *
- * @param c the character
- * @param start ...
- * @return the index of the last occurrence of c
- */
- final def lastPos(c: Char, start: Int): Int = {
- var i = start
- while (i >= 0 && chrs(index + i) != c) i -= 1
- i
- }
-
- /** return the index of the last occurrence of string s in this
- * name from start, -1 if not found.
- *
- * @param s the string
- * @param start ...
- * @return the index of the last occurrence of s
- */
- final def lastPos(s: String, start: Int): Int = {
- var i = lastPos(s.charAt(0), start)
- while (i >= 0) {
- var j = 1;
- while (s.charAt(j) == chrs(index + i + j)) {
- j += 1
- if (j == s.length()) return i;
- }
- i = lastPos(s.charAt(0), i - 1)
- }
- -s.length()
- }
-
- /** does this name start with prefix?
- */
- final def startsWith(prefix: Name): Boolean = startsWith(prefix, 0)
-
- /** does this name start with prefix at given start index?
- */
- final def startsWith(prefix: Name, start: Int): Boolean = {
- var i = 0
- while (i < prefix.length && start + i < len &&
- chrs(index + start + i) == chrs(prefix.start + i))
- i += 1;
- i == prefix.length
- }
-
- /** does this name end with suffix?
- */
- final def endsWith(suffix: Name): Boolean = endsWith(suffix, len)
-
- /** does this name end with suffix just before given end index?
- */
- final def endsWith(suffix: Name, end: Int): Boolean = {
- var i = 1
- while (i <= suffix.length && i <= end &&
- chrs(index + end - i) == chrs(suffix.start + suffix.length - i))
- i += 1;
- i > suffix.length
- }
-
- final def containsName(subname: String): Boolean = containsName(newTermName(subname))
- final def containsName(subname: Name): Boolean = {
- var start = 0
- val last = len - subname.length
- while (start <= last && !startsWith(subname, start)) start += 1
- start <= last
- }
-
- /** Some thoroughly self-explanatory convenience functions. They
- * assume that what they're being asked to do is known to be valid.
- */
- final def startChar: Char = apply(0)
- final def endChar: Char = apply(len - 1)
- final def startsWith(char: Char): Boolean = len > 0 && startChar == char
- final def startsWith(name: String): Boolean = startsWith(newTermName(name))
- final def endsWith(char: Char): Boolean = len > 0 && endChar == char
- final def endsWith(name: String): Boolean = endsWith(newTermName(name))
- final def stripStart(prefix: Name): Name = subName(prefix.length, len)
- final def stripStart(prefix: String): Name = subName(prefix.length, len)
- final def stripEnd(suffix: Name): Name = subName(0, len - suffix.length)
- final def stripEnd(suffix: String): Name = subName(0, len - suffix.length)
-
- def lastIndexOf(ch: Char) = toChars lastIndexOf ch
-
- /** Return the subname with characters from start to end-1.
- */
- def subName(from: Int, to: Int): Name
-
- /** Replace all occurrences of `from` by `to` in
- * name; result is always a term name.
- */
- def replace(from: Char, to: Char): Name = {
- val cs = new Array[Char](len)
- var i = 0
- while (i < len) {
- val ch = this(i)
- cs(i) = if (ch == from) to else ch
- i += 1
- }
- newTermName(cs, 0, len)
- }
-
- /** Replace operator symbols by corresponding $op_name.
- */
- def encode: Name = {
- val str = toString()
- val res = NameTransformer.encode(str)
- if (res == str) this
- else if (isTypeName) newTypeName(res)
- else newTermName(res)
- }
-
- def append(suffix: String): Name
- def append(suffix: Name): Name
-
- /** Replace $op_name by corresponding operator symbol.
- */
- def decode: String = (
- NameTransformer.decode(toString()) +
- (if (nameDebug && isTypeName) "!" else ""))//debug
-
- def isOperatorName: Boolean = decode != toString
- def nameKind: String = if (isTypeName) "type" else "term"
- def longString: String = nameKind + " " + NameTransformer.decode(toString)
- }
-
- final class TermName(_index: Int, _len: Int, hash: Int) extends Name(_index, _len) {
- var next: TermName = termHashtable(hash)
- termHashtable(hash) = this
- def isTermName: Boolean = true
- def isTypeName: Boolean = false
- def toTermName: TermName = this
- def toTypeName: TypeName = {
- val h = hashValue(chrs, index, len) & HASH_MASK
- var n = typeHashtable(h)
- while ((n ne null) && n.start != index)
- n = n.next;
- if (n eq null)
- n = new TypeName(index, len, h);
- n
- }
- def append(suffix: String): TermName = newTermName(this + suffix)
- def append(suffix: Name): TermName = append(suffix.toString)
- def companionName: TypeName = toTypeName
- def subName(from: Int, to: Int): TermName =
- newTermName(chrs, start + from, to - from)
- }
-
- final class TypeName(_index: Int, _len: Int, hash: Int) extends Name(_index, _len) {
- var next: TypeName = typeHashtable(hash)
- typeHashtable(hash) = this
- def isTermName: Boolean = false
- def isTypeName: Boolean = true
- def toTermName: TermName = {
- val h = hashValue(chrs, index, len) & HASH_MASK
- var n = termHashtable(h)
- while ((n ne null) && n.start != index)
- n = n.next;
- if (n eq null)
- n = new TermName(index, len, h);
- n
- }
- def toTypeName: TypeName = this
-
- def append(suffix: String): TypeName = newTypeName(this + suffix)
- def append(suffix: Name): TypeName = append(suffix.toString)
- def companionName: TermName = toTermName
- def subName(from: Int, to: Int): TypeName =
- newTypeName(chrs, start + from, to - from)
- }
-}
diff --git a/src/compiler/scala/tools/nsc/symtab/Positions.scala b/src/compiler/scala/tools/nsc/symtab/Positions.scala
deleted file mode 100644
index 2290705..0000000
--- a/src/compiler/scala/tools/nsc/symtab/Positions.scala
+++ /dev/null
@@ -1,37 +0,0 @@
-package scala.tools.nsc
-package symtab
-
-import scala.tools.nsc.util.{ SourceFile, Position, OffsetPosition, NoPosition }
-
-trait Positions {
-self: scala.tools.nsc.symtab.SymbolTable =>
-
- def rangePos(source: SourceFile, start: Int, point: Int, end: Int) =
- new OffsetPosition(source, point)
-
- /** A position that wraps a set of trees.
- * The point of the wrapping position is the point of the default position.
- * If some of the trees are ranges, returns a range position enclosing all ranges
- * Otherwise returns default position.
- */
- def wrappingPos(default: Position, trees: List[Tree]): Position = default
-
- /** A position that wraps the non-empty set of trees.
- * The point of the wrapping position is the point of the first trees' position.
- * If all some the trees are non-synthetic, returns a range position enclosing the non-synthetic trees
- * Otherwise returns a synthetic offset position to point.
- */
- def wrappingPos(trees: List[Tree]): Position = trees.head.pos
-
- /** Ensure that given tree has no positions that overlap with
- * any of the positions of `others`. This is done by
- * shortening the range or assigning TransparentPositions
- * to some of the nodes in `tree`.
- */
- def ensureNonOverlapping(tree: Tree, others: List[Tree]) {}
-
- def validatePositions(tree: Tree) {}
-
- type Position = scala.tools.nsc.util.Position
- val NoPosition = scala.tools.nsc.util.NoPosition
-}
diff --git a/src/compiler/scala/tools/nsc/symtab/Scopes.scala b/src/compiler/scala/tools/nsc/symtab/Scopes.scala
deleted file mode 100644
index 183c6f2..0000000
--- a/src/compiler/scala/tools/nsc/symtab/Scopes.scala
+++ /dev/null
@@ -1,350 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
- * @author Martin Odersky
- */
-
-package scala.tools.nsc
-package symtab
-
-// Martin: I am about 1/4 way on a cleanup of scopes.
-// The most important change is that scopes are now Iterables.
-// This removed the need for the various iterators on ScopeEntries.
-// ScopeEntries are conceptually an internal representation detail,
-// so it's better not to return them in public iterators.
-// It's true that other code also references ScopeEntries but that's
-// done for performance (and could be reviewed).
-// Another addition is a lookupAll method that returns all symbols with
-// a name in a scopein an iterator.
-trait Scopes {
- self: SymbolTable =>
-
- class ScopeEntry(val sym: Symbol, val owner: Scope) {
- /** the next entry in the hash bucket
- */
- var tail: ScopeEntry = null
-
- /** the next entry in this scope
- */
- var next: ScopeEntry = null
-
- override def hashCode(): Int = sym.name.start
- override def toString(): String = sym.toString()
- }
-
- /**
- * @param sym ...
- * @param owner ...
- * @return ...
- */
- private def newScopeEntry(sym: Symbol, owner: Scope): ScopeEntry = {
- val e = new ScopeEntry(sym, owner)
- e.next = owner.elems
- owner.elems = e
- e
- }
-
- class Scope(initElems: ScopeEntry) extends AbsScope {
-
- var elems: ScopeEntry = initElems
-
- /** The number of times this scope is nested in another
- */
- private var nestinglevel = 0
-
- /** the hash table
- */
- private var hashtable: Array[ScopeEntry] = null
-
- /** a cache for all elements, to be used by symbol iterator.
- */
- private var elemsCache: List[Symbol] = null
-
- /** size and mask of hash tables
- * todo: make hashtables grow?
- */
- private val HASHSIZE = 0x80
- private val HASHMASK = 0x7f
-
- /** the threshold number of entries from which a hashtable is constructed.
- */
- private val MIN_HASH = 8
-
- if (size >= MIN_HASH) createHash()
-
- def this() = this(null: ScopeEntry)
-
- def this(base: Scope) = {
- this(base.elems)
- nestinglevel = base.nestinglevel + 1
- }
-
- def this(decls: List[Symbol]) = {
- this()
- decls foreach enter
- }
-
- /** Returns a new scope with the same content as this one. */
- def cloneScope: Scope = {
- val clone = new Scope()
- this.toList foreach (clone enter _)
- clone
- }
-
- /** is the scope empty? */
- override def isEmpty: Boolean = elems eq null
-
- /** the number of entries in this scope */
- override def size: Int = {
- var s = 0
- var e = elems
- while (e ne null) {
- s += 1
- e = e.next
- }
- s
- }
-
- /** enter a scope entry
- *
- * @param e ...
- */
- protected def enter(e: ScopeEntry) {
- elemsCache = null
- if (hashtable ne null)
- enterInHash(e)
- else if (size >= MIN_HASH)
- createHash()
- }
-
- private def enterInHash(e: ScopeEntry): Unit = {
- val i = e.sym.name.start & HASHMASK
- e.tail = hashtable(i)
- hashtable(i) = e
- }
-
- /** enter a symbol
- *
- * @param sym ...
- */
- def enter(sym: Symbol): Symbol = { enter(newScopeEntry(sym, this)); sym }
-
- /** enter a symbol, asserting that no symbol with same name exists in scope
- *
- * @param sym ...
- */
- def enterUnique(sym: Symbol) {
- assert(lookup(sym.name) == NoSymbol)
- enter(sym)
- }
-
- private def createHash() {
- hashtable = new Array[ScopeEntry](HASHSIZE)
- enterAllInHash(elems)
- }
-
- private def enterAllInHash(e: ScopeEntry, n: Int = 0) {
- if (e ne null) {
- if (n < maxRecursions) {
- enterAllInHash(e.next, n + 1)
- enterInHash(e)
- } else {
- var entries: List[ScopeEntry] = List()
- var ee = e
- while (ee ne null) {
- entries = ee :: entries
- ee = ee.next
- }
- entries foreach enterInHash
- }
- }
- }
-
- def rehash(sym: Symbol, newname: Name) {
- if (hashtable ne null) {
- val index = sym.name.start & HASHMASK
- var e1 = hashtable(index)
- var e: ScopeEntry = null
- if (e1 != null) {
- if (e1.sym == sym) {
- hashtable(index) = e1.tail
- e = e1
- } else {
- while (e1.tail != null && e1.tail.sym != sym) e1 = e1.tail
- if (e1.tail != null) {
- e = e1.tail
- e1.tail = e.tail
- }
- }
- }
- if (e != null) {
- val newindex = newname.start & HASHMASK
- e.tail = hashtable(newindex)
- hashtable(newindex) = e
- }
- }
- }
-
- /** remove entry
- *
- * @param e ...
- */
- def unlink(e: ScopeEntry) {
- if (elems == e) {
- elems = e.next
- } else {
- var e1 = elems
- while (e1.next != e) e1 = e1.next
- e1.next = e.next
- }
- if (hashtable ne null) {
- val index = e.sym.name.start & HASHMASK
- var e1 = hashtable(index)
- if (e1 == e) {
- hashtable(index) = e.tail
- } else {
- while (e1.tail != e) e1 = e1.tail;
- e1.tail = e.tail
- }
- }
- elemsCache = null
- }
-
- /** remove symbol */
- def unlink(sym: Symbol) {
- var e = lookupEntry(sym.name)
- while (e ne null) {
- if (e.sym == sym) unlink(e);
- e = lookupNextEntry(e)
- }
- }
-
- /** lookup a symbol
- *
- * @param name ...
- * @return ...
- */
- def lookup(name: Name): Symbol = {
- val e = lookupEntry(name)
- if (e eq null) NoSymbol else e.sym
- }
-
- /** Returns an iterator yielding every symbol with given name in this scope.
- */
- def lookupAll(name: Name): Iterator[Symbol] = new Iterator[Symbol] {
- var e = lookupEntry(name)
- def hasNext: Boolean = e ne null
- def next: Symbol = { val r = e.sym; e = lookupNextEntry(e); r }
- }
-
- /** lookup a symbol entry matching given name.
- * @note from Martin: I believe this is a hotspot or will be one
- * in future versions of the type system. I have reverted the previous
- * change to use iterators as too costly.
- */
- def lookupEntry(name: Name): ScopeEntry = {
- var e: ScopeEntry = null
- if (hashtable ne null) {
- e = hashtable(name.start & HASHMASK)
- while ((e ne null) && e.sym.name != name) {
- e = e.tail
- }
- } else {
- e = elems
- while ((e ne null) && e.sym.name != name) {
- e = e.next
- }
- }
- e
- }
-
- /** lookup next entry with same name as this one
- * @note from Martin: I believe this is a hotspot or will be one
- * in future versions of the type system. I have reverted the previous
- * change to use iterators as too costly.
- */
- def lookupNextEntry(entry: ScopeEntry): ScopeEntry = {
- var e = entry
- if (hashtable ne null)
- do { e = e.tail } while ((e ne null) && e.sym.name != entry.sym.name)
- else
- do { e = e.next } while ((e ne null) && e.sym.name != entry.sym.name);
- e
- }
-
- /** Return all symbols as a list in the order they were entered in this scope.
- */
- override def toList: List[Symbol] = {
- if (elemsCache eq null) {
- elemsCache = Nil
- var e = elems
- while ((e ne null) && e.owner == this) {
- elemsCache = e.sym :: elemsCache
- e = e.next
- }
- }
- elemsCache
- }
-
- /** Return the nesting level of this scope, i.e. the number of times this scope
- * was nested in another */
- def nestingLevel = nestinglevel
-
- /** Return all symbols as an iterator in the order they were entered in this scope.
- */
- def iterator: Iterator[Symbol] = toList.iterator
-
-/*
- /** Does this scope contain an entry for `sym`?
- */
- def contains(sym: Symbol): Boolean = lookupAll(sym.name) contains sym
-
- /** A scope that contains all symbols of this scope and that also contains `sym`.
- */
- def +(sym: Symbol): Scope =
- if (contains(sym)) this
- else {
- val result = cloneScope
- result enter sym
- result
- }
-
- /** A scope that contains all symbols of this scope except `sym`.
- */
- def -(sym: Symbol): Scope =
- if (!contains(sym)) this
- else {
- val result = cloneScope
- result unlink sym
- result
- }
-*/
- override def foreach[U](p: Symbol => U): Unit = toList foreach p
-
- override def filter(p: Symbol => Boolean): Scope =
- if (!(toList forall p)) new Scope(toList filter p) else this
-
- override def mkString(start: String, sep: String, end: String) =
- toList.map(_.defString).mkString(start, sep, end)
-
- override def toString(): String = mkString("Scope{\n ", ";\n ", "\n}")
-
- }
-
- def newScope: Scope = new Scope
-
- /** The empty scope (immutable).
- */
- object EmptyScope extends Scope {
- override def enter(e: ScopeEntry) {
- abort("EmptyScope.enter")
- }
- }
-
- /** The error scope.
- */
- class ErrorScope(owner: Symbol) extends Scope(null: ScopeEntry)
-
- private final val maxRecursions = 1000
-
-}
-
diff --git a/src/compiler/scala/tools/nsc/symtab/StdNames.scala b/src/compiler/scala/tools/nsc/symtab/StdNames.scala
deleted file mode 100644
index e114731..0000000
--- a/src/compiler/scala/tools/nsc/symtab/StdNames.scala
+++ /dev/null
@@ -1,588 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
- * @author Martin Odersky
- */
-
-package scala.tools.nsc
-package symtab
-
-import scala.collection.immutable
-
-trait StdNames extends reflect.generic.StdNames with NameManglers {
- self: SymbolTable =>
-
- /** This should be the first trait in the linearization. */
- trait CompilerKeywords {
- private var kws: Set[TermName] = Set()
- private def kw(s: String): TermName = {
- val result = newTermName(s)
- kws = kws + result
- result
- }
-
- final val ABSTRACTkw: TermName = kw("abstract")
- final val CASEkw: TermName = kw("case")
- final val CLASSkw: TermName = kw("class")
- final val CATCHkw: TermName = kw("catch")
- final val DEFkw: TermName = kw("def")
- final val DOkw: TermName = kw("do")
- final val ELSEkw: TermName = kw("else")
- final val EXTENDSkw: TermName = kw("extends")
- final val FALSEkw: TermName = kw("false")
- final val FINALkw: TermName = kw("final")
- final val FINALLYkw: TermName = kw("finally")
- final val FORkw: TermName = kw("for")
- final val FORSOMEkw: TermName = kw("forSome")
- final val IFkw: TermName = kw("if")
- final val IMPLICITkw: TermName = kw("implicit")
- final val IMPORTkw: TermName = kw("import")
- final val LAZYkw: TermName = kw("lazy")
- final val MATCHkw: TermName = kw("match")
- final val NEWkw: TermName = kw("new")
- final val NULLkw: TermName = kw("null")
- final val OBJECTkw: TermName = kw("object")
- final val OVERRIDEkw: TermName = kw("override")
- final val PACKAGEkw: TermName = kw("package")
- final val PRIVATEkw: TermName = kw("private")
- final val PROTECTEDkw: TermName = kw("protected")
- final val RETURNkw: TermName = kw("return")
- final val SEALEDkw: TermName = kw("sealed")
- final val SUPERkw: TermName = kw("super")
- final val THISkw: TermName = kw("this")
- final val THROWkw: TermName = kw("throw")
- final val TRAITkw: TermName = kw("trait")
- final val TRUEkw: TermName = kw("true")
- final val TRYkw: TermName = kw("try")
- final val TYPEkw: TermName = kw("type")
- final val VALkw: TermName = kw("val")
- final val VARkw: TermName = kw("var")
- final val WITHkw: TermName = kw("with")
- final val WHILEkw: TermName = kw("while")
- final val YIELDkw: TermName = kw("yield")
- final val DOTkw: TermName = kw(".")
- final val USCOREkw: TermName = kw("_")
- final val COLONkw: TermName = kw(":")
- final val EQUALSkw: TermName = kw("=")
- final val ARROWkw: TermName = kw("=>")
- final val LARROWkw: TermName = kw("<-")
- final val SUBTYPEkw: TermName = kw("<:")
- final val VIEWBOUNDkw: TermName = kw("<%")
- final val SUPERTYPEkw: TermName = kw(">:")
- final val HASHkw: TermName = kw("#")
- final val ATkw: TermName = kw("@")
-
- final val keywords = {
- val result = kws.toSet
- kws = null
- result
- }
-
- final val javaKeywords = new JavaKeywords()
- }
-
- trait CompilerCommonNames extends LibraryCommonNames {
- // value types are all used as terms as well
- final val Boolean: NameType = "Boolean"
- final val Byte: NameType = "Byte"
- final val Char: NameType = "Char"
- final val Double: NameType = "Double"
- final val Float: NameType = "Float"
- final val Int: NameType = "Int"
- final val Long: NameType = "Long"
- final val Short: NameType = "Short"
- final val Unit: NameType = "Unit"
-
- final val ScalaValueNames: scala.List[NameType] =
- scala.List(Byte, Char, Short, Int, Long, Float, Double, Boolean, Unit)
-
- // types whose companions we utilize
- final val Array: NameType = "Array"
- final val List: NameType = "List"
- final val Seq: NameType = "Seq"
- final val Symbol: NameType = "Symbol"
-
- // fictions we use as both types and terms
- final val ERROR: NameType = "<error>"
- final val NO_NAME: NameType = "<none>" // formerly NOSYMBOL
- final val WILDCARD: NameType = "_"
- }
-
- trait CompilerTypeNames extends CompilerCommonNames {
- final val BYNAME_PARAM_CLASS_NAME: NameType = "<byname>"
- final val EQUALS_PATTERN_NAME: NameType = "<equals>"
- final val JAVA_REPEATED_PARAM_CLASS_NAME: NameType = "<repeated...>"
- final val LOCAL_CHILD: NameType = "<local child>"
- final val REPEATED_PARAM_CLASS_NAME: NameType = "<repeated>"
- final val WILDCARD_STAR: NameType = "_*"
-
- final val Any: NameType = "Any"
- final val AnyRef: NameType = "AnyRef"
- final val AnyVal: NameType = "AnyVal"
- final val Nothing: NameType = "Nothing"
- final val Null: NameType = "Null"
- final val Object: NameType = "Object"
- final val PartialFunction: NameType = "PartialFunction"
- final val Product: NameType = "Product"
- final val ScalaObject: NameType = "ScalaObject"
- final val Serializable: NameType = "Serializable"
- final val Singleton: NameType = "Singleton"
- final val String: NameType = "String"
- final val Throwable: NameType = "Throwable"
-
- // Annotation types
- final val AnnotationDefaultATTR: NameType = "AnnotationDefault"
- final val BridgeATTR: NameType = "Bridge"
- final val ClassfileAnnotationATTR: NameType = "RuntimeInvisibleAnnotations" // RetentionPolicy.CLASS. Currently not used (Apr 2009).
- final val CodeATTR: NameType = "Code"
- final val ConstantValueATTR: NameType = "ConstantValue"
- final val DeprecatedATTR: NameType = "Deprecated"
- final val ExceptionsATTR: NameType = "Exceptions"
- final val InnerClassesATTR: NameType = "InnerClasses"
- final val JacoMetaATTR: NameType = "JacoMeta"
- final val LineNumberTableATTR: NameType = "LineNumberTable"
- final val LocalVariableTableATTR: NameType = "LocalVariableTable"
- final val RuntimeAnnotationATTR: NameType = "RuntimeVisibleAnnotations" // RetentionPolicy.RUNTIME
- final val RuntimeParamAnnotationATTR: NameType = "RuntimeVisibleParameterAnnotations" // RetentionPolicy.RUNTIME (annotations on parameters)
- final val ScalaATTR: NameType = "Scala"
- final val ScalaSignatureATTR: NameType = "ScalaSig"
- final val SignatureATTR: NameType = "Signature"
- final val SourceFileATTR: NameType = "SourceFile"
- final val SyntheticATTR: NameType = "Synthetic"
- }
-
-
- trait CompilerTermNames extends CompilerKeywords with CompilerCommonNames {
- // Compiler internal names
- val ANYNAME: NameType = "<anyname>"
- val CONSTRUCTOR: NameType = "<init>"
- val FAKE_LOCAL_THIS: NameType = "this$"
- val INITIALIZER: NameType = CONSTRUCTOR // Is this buying us something?
- val MIXIN_CONSTRUCTOR: NameType = "$init$"
- val MODULE_INSTANCE_FIELD: NameType = "MODULE$"
- val OUTER: NameType = "$outer"
- val OUTER_LOCAL: NameType = "$outer " // note the space
- val SELF: NameType = "$this"
- val SPECIALIZED_INSTANCE: NameType = "specInstance$"
- val STAR: NameType = "*"
- val THIS: NameType = "_$this"
-
- final val Nil: NameType = "Nil"
- final val Predef: NameType = "Predef"
- final val ScalaRunTime: NameType = "ScalaRunTime"
- final val Some: NameType = "Some"
-
- // Compiler utilized names
- // val productElementName: NameType = "productElementName"
- val TYPE_ : NameType = "TYPE"
- val add_ : NameType = "add"
- val apply: NameType = "apply"
- val arrayValue: NameType = "arrayValue"
- val arraycopy: NameType = "arraycopy"
- val asInstanceOf_ : NameType = "asInstanceOf"
- val assert_ : NameType = "assert"
- val assume_ : NameType = "assume"
- val box: NameType = "box"
- val bytes: NameType = "bytes"
- val canEqual_ : NameType = "canEqual"
- val checkInitialized: NameType = "checkInitialized"
- val classOf: NameType = "classOf"
- val clone_ : NameType = if (forMSIL) "MemberwiseClone" else "clone" // sn.OClone causes checkinit failure
- val conforms: NameType = "conforms"
- val copy: NameType = "copy"
- val delayedInit: NameType = "delayedInit"
- val delayedInitArg: NameType = "delayedInit$body"
- val drop: NameType = "drop"
- val elem: NameType = "elem"
- val eq: NameType = "eq"
- val equals_ : NameType = if (forMSIL) "Equals" else "equals"
- val error: NameType = "error"
- val ex: NameType = "ex"
- val false_ : NameType = "false"
- val filter: NameType = "filter"
- val finalize_ : NameType = if (forMSIL) "Finalize" else "finalize"
- val find_ : NameType = "find"
- val flatMap: NameType = "flatMap"
- val foreach: NameType = "foreach"
- val genericArrayOps: NameType = "genericArrayOps"
- val get: NameType = "get"
- val hasNext: NameType = "hasNext"
- val hashCode_ : NameType = if (forMSIL) "GetHashCode" else "hashCode"
- val hash_ : NameType = "hash"
- val head: NameType = "head"
- val identity: NameType = "identity"
- val inlinedEquals: NameType = "inlinedEquals"
- val applyDynamic: NameType = "applyDynamic"
- val isArray: NameType = "isArray"
- val isDefinedAt: NameType = "isDefinedAt"
- val isEmpty: NameType = "isEmpty"
- val isInstanceOf_ : NameType = "isInstanceOf"
- val java: NameType = "java"
- val lang: NameType = "lang"
- val length: NameType = "length"
- val lengthCompare: NameType = "lengthCompare"
- val lift_ : NameType = "lift"
- val main: NameType = "main"
- val map: NameType = "map"
- val ne: NameType = "ne"
- val newArray: NameType = "newArray"
- val next: NameType = "next"
- val notifyAll_ : NameType = "notifyAll"
- val notify_ : NameType = "notify"
- val null_ : NameType = "null"
- val ofDim: NameType = "ofDim"
- val productArity: NameType = "productArity"
- val productElement: NameType = "productElement"
- val productPrefix: NameType = "productPrefix"
- val readResolve: NameType = "readResolve"
- val sameElements: NameType = "sameElements"
- val scala_ : NameType = "scala"
- val self: NameType = "self"
- val setAccessible: NameType = "setAccessible"
- val synchronized_ : NameType = "synchronized"
- val tail: NameType = "tail"
- val this_ : NameType = "this"
- val throw_ : NameType = "throw"
- val toArray: NameType = "toArray"
- val toList: NameType = "toList"
- val toSeq: NameType = "toSeq"
- val toString_ : NameType = if (forMSIL) "ToString" else "toString"
- val true_ : NameType = "true"
- val unapply: NameType = "unapply"
- val unapplySeq: NameType = "unapplySeq"
- val unbox: NameType = "unbox"
- val update: NameType = "update"
- val value: NameType = "value"
- val view_ : NameType = "view"
- val wait_ : NameType = "wait"
- val withFilter: NameType = "withFilter"
- val wrapRefArray: NameType = "wrapRefArray"
- val zip: NameType = "zip"
-
- // unencoded operators
- object raw {
- final val AMP : NameType = "&"
- final val BANG : NameType = "!"
- final val BAR : NameType = "|"
- final val DOLLAR: NameType = "$"
- final val GE: NameType = ">="
- final val LE: NameType = "<="
- final val MINUS: NameType = "-"
- final val NE: NameType = "!="
- final val PLUS : NameType = "+"
- final val SLASH: NameType = "/"
- final val STAR : NameType = "*"
- final val TILDE: NameType = "~"
-
- final val isUnary: Set[Name] = Set(MINUS, PLUS, TILDE, BANG)
- }
-
- // value-conversion methods
- val toByte: NameType = "toByte"
- val toShort: NameType = "toShort"
- val toChar: NameType = "toChar"
- val toInt: NameType = "toInt"
- val toLong: NameType = "toLong"
- val toFloat: NameType = "toFloat"
- val toDouble: NameType = "toDouble"
- }
-
- object tpnme extends CompilerTypeNames with LibraryTypeNames with TypeNameMangling {
- type NameType = TypeName
- implicit def createNameType(name: String): TypeName = newTypeName(name)
- }
-
- val javanme = nme.javaKeywords
-
- object nme extends CompilerTermNames with LibraryTermNames with TermNameMangling {
- type NameType = TermName
- implicit def createNameType(name: String): TermName = newTermName(name)
-
- /** Translate a String into a list of simple TypeNames and TermNames.
- * In all segments before the last, type/term is determined by whether
- * the following separator char is '.' or '#'. In the last segment,
- * the argument "assumeTerm" determines it. Examples:
- *
- * package foo {
- * object Lorax { object Wog ; class Wog }
- * class Lorax { object Zax ; class Zax }
- * }
- *
- * f("foo.Lorax", true) == List("foo": Term, "Lorax": Term) // object Lorax
- * f("foo.Lorax", false) == List("foo": Term, "Lorax": Type) // class Lorax
- * f("Lorax.Wog", true) == List("Lorax": Term, "Wog": Term) // object Wog
- * f("Lorax.Wog", false) == List("Lorax": Term, "Wog": Type) // class Wog
- * f("Lorax#Zax", true) == List("Lorax": Type, "Zax": Term) // object Zax
- * f("Lorax#Zax", false) == List("Lorax": Type, "Zax": Type) // class Zax
- *
- * Note that in actual scala syntax you cannot refer to object Zax without an
- * instance of Lorax, so Lorax#Zax could only mean the type. One might think
- * that Lorax#Zax.type would work, but this is not accepted by the parser.
- * For the purposes of referencing that object, the syntax is allowed.
- */
- def segments(name: String, assumeTerm: Boolean): List[Name] = {
- def mkName(str: String, term: Boolean): Name =
- if (term) newTermName(str) else newTypeName(str)
-
- name.indexWhere(ch => ch == '.' || ch == '#') match {
- // it's the last segment: the parameter tells us whether type or term
- case -1 => if (name == "") scala.Nil else scala.List(mkName(name, assumeTerm))
- // otherwise, we can tell based on whether '#' or '.' is the following char.
- case idx =>
- val (simple, div, rest) = (name take idx, name charAt idx, name drop (idx + 1))
- mkName(simple, div == '.') :: segments(rest, assumeTerm)
- }
- }
- private def bitmapName(n: Int, suffix: String): TermName =
- newTermName(BITMAP_PREFIX + suffix + n)
-
- /** The name of bitmaps for initialized (public or protected) lazy vals. */
- def bitmapName(n: Int): TermName = bitmapName(n, "")
-
- /** The name of bitmaps for initialized transient lazy vals. */
- def bitmapNameForTransient(n: Int): TermName = bitmapName(n, "trans$")
-
- /** The name of bitmaps for initialized private lazy vals. */
- def bitmapNameForPrivate(n: Int): TermName = bitmapName(n, "priv$")
-
- /** The name of bitmaps for checkinit values */
- def bitmapNameForCheckinit(n: Int): TermName = bitmapName(n, "init$")
-
- /** The name of bitmaps for checkinit values that have transient flag*/
- def bitmapNameForCheckinitTransient(n: Int): TermName = bitmapName(n, "inittrans$")
-
- /** Base strings from which synthetic names are derived. */
- val BITMAP_PREFIX = "bitmap$"
- val CHECK_IF_REFUTABLE_STRING = "check$ifrefutable$"
- val DEFAULT_GETTER_STRING = "$default$"
- val DO_WHILE_PREFIX = "doWhile$"
- val EQEQ_LOCAL_VAR = "eqEqTemp$"
- val EVIDENCE_PARAM_PREFIX = "evidence$"
- val EXCEPTION_RESULT_PREFIX = "exceptionResult"
- val INTERPRETER_IMPORT_WRAPPER = "$iw"
- val INTERPRETER_LINE_PREFIX = "line"
- val INTERPRETER_VAR_PREFIX = "res"
- val INTERPRETER_WRAPPER_SUFFIX = "$object"
- val WHILE_PREFIX = "while$"
-
- def getCause = sn.GetCause
- def getClass_ = sn.GetClass
- def getMethod_ = sn.GetMethod
- def invoke_ = sn.Invoke
-
- val ADD = encode("+")
- val AND = encode("&")
- val ASR = encode(">>")
- val DIV = encode("/")
- val EQ = encode("==")
- val EQL = encode("=")
- val GE = encode(">=")
- val GT = encode(">")
- val HASHHASH = encode("##")
- val LE = encode("<=")
- val LSL = encode("<<")
- val LSR = encode(">>>")
- val LT = encode("<")
- val MINUS = encode("-")
- val MOD = encode("%")
- val MUL = encode("*")
- val NE = encode("!=")
- val OR = encode("|")
- val PLUS = encode("+")
- val SUB = encode("-")
- val XOR = encode("^")
- val ZAND = encode("&&")
- val ZOR = encode("||")
-
- // unary operators
- val UNARY_~ = encode("unary_~")
- val UNARY_+ = encode("unary_+")
- val UNARY_- = encode("unary_-")
- val UNARY_! = encode("unary_!")
- }
-
- abstract class SymbolNames {
- protected implicit def stringToTypeName(s: String): TypeName = newTypeName(s)
-
- val BeanProperty : TypeName
- val BooleanBeanProperty : TypeName
- val BoxedBoolean : TypeName
- val BoxedCharacter : TypeName
- val BoxedNumber : TypeName
- val Class : TypeName
- val Code : TypeName
- val Delegate : TypeName
- val IOOBException : TypeName // IndexOutOfBoundsException
- val InvTargetException : TypeName // InvocationTargetException
- val JavaSerializable : TypeName
- val MethodAsObject : TypeName
- val NPException : TypeName // NullPointerException
- val Object : TypeName
- val String : TypeName
- val Throwable : TypeName
- val ValueType : TypeName
-
- val ForName : TermName
- val GetCause : TermName
- val GetClass : TermName
- val GetMethod : TermName
- val Invoke : TermName
- val JavaLang : TermName
-
- val Boxed: immutable.Map[TypeName, TypeName]
- }
-
- class JavaKeywords {
- private var kws: Set[TermName] = Set()
- private def kw(s: String): TermName = {
- val result = newTermName(s)
- kws = kws + result
- result
- }
-
- final val ABSTRACTkw: TermName = kw("abstract")
- final val ASSERTkw: TermName = kw("assert")
- final val BOOLEANkw: TermName = kw("boolean")
- final val BREAKkw: TermName = kw("break")
- final val BYTEkw: TermName = kw("byte")
- final val CASEkw: TermName = kw("case")
- final val CATCHkw: TermName = kw("catch")
- final val CHARkw: TermName = kw("char")
- final val CLASSkw: TermName = kw("class")
- final val CONSTkw: TermName = kw("const")
- final val CONTINUEkw: TermName = kw("continue")
- final val DEFAULTkw: TermName = kw("default")
- final val DOkw: TermName = kw("do")
- final val DOUBLEkw: TermName = kw("double")
- final val ELSEkw: TermName = kw("else")
- final val ENUMkw: TermName = kw("enum")
- final val EXTENDSkw: TermName = kw("extends")
- final val FINALkw: TermName = kw("final")
- final val FINALLYkw: TermName = kw("finally")
- final val FLOATkw: TermName = kw("float")
- final val FORkw: TermName = kw("for")
- final val IFkw: TermName = kw("if")
- final val GOTOkw: TermName = kw("goto")
- final val IMPLEMENTSkw: TermName = kw("implements")
- final val IMPORTkw: TermName = kw("import")
- final val INSTANCEOFkw: TermName = kw("instanceof")
- final val INTkw: TermName = kw("int")
- final val INTERFACEkw: TermName = kw("interface")
- final val LONGkw: TermName = kw("long")
- final val NATIVEkw: TermName = kw("native")
- final val NEWkw: TermName = kw("new")
- final val PACKAGEkw: TermName = kw("package")
- final val PRIVATEkw: TermName = kw("private")
- final val PROTECTEDkw: TermName = kw("protected")
- final val PUBLICkw: TermName = kw("public")
- final val RETURNkw: TermName = kw("return")
- final val SHORTkw: TermName = kw("short")
- final val STATICkw: TermName = kw("static")
- final val STRICTFPkw: TermName = kw("strictfp")
- final val SUPERkw: TermName = kw("super")
- final val SWITCHkw: TermName = kw("switch")
- final val SYNCHRONIZEDkw: TermName = kw("synchronized")
- final val THISkw: TermName = kw("this")
- final val THROWkw: TermName = kw("throw")
- final val THROWSkw: TermName = kw("throws")
- final val TRANSIENTkw: TermName = kw("transient")
- final val TRYkw: TermName = kw("try")
- final val VOIDkw: TermName = kw("void")
- final val VOLATILEkw: TermName = kw("volatile")
- final val WHILEkw: TermName = kw("while")
-
- final val keywords = {
- val result = kws.toSet
- kws = null
- result
- }
- }
-
- private abstract class JavaNames extends SymbolNames {
- final val BoxedBoolean: TypeName = "java.lang.Boolean"
- final val BoxedByte: TypeName = "java.lang.Byte"
- final val BoxedCharacter: TypeName = "java.lang.Character"
- final val BoxedDouble: TypeName = "java.lang.Double"
- final val BoxedFloat: TypeName = "java.lang.Float"
- final val BoxedInteger: TypeName = "java.lang.Integer"
- final val BoxedLong: TypeName = "java.lang.Long"
- final val BoxedNumber: TypeName = "java.lang.Number"
- final val BoxedShort: TypeName = "java.lang.Short"
- final val Class: TypeName = "java.lang.Class"
- final val Delegate: TypeName = tpnme.NO_NAME
- final val IOOBException: TypeName = "java.lang.IndexOutOfBoundsException"
- final val InvTargetException: TypeName = "java.lang.reflect.InvocationTargetException"
- final val MethodAsObject: TypeName = "java.lang.reflect.Method"
- final val NPException: TypeName = "java.lang.NullPointerException"
- final val Object: TypeName = "java.lang.Object"
- final val String: TypeName = "java.lang.String"
- final val Throwable: TypeName = "java.lang.Throwable"
- final val ValueType: TypeName = tpnme.NO_NAME
-
- final val ForName: TermName = "forName"
- final val GetCause: TermName = "getCause"
- final val GetClass: TermName = "getClass"
- final val GetMethod: TermName = "getMethod"
- final val Invoke: TermName = "invoke"
- final val JavaLang: TermName = "java.lang"
-
- val Boxed = immutable.Map[TypeName, TypeName](
- tpnme.Boolean -> BoxedBoolean,
- tpnme.Byte -> BoxedByte,
- tpnme.Char -> BoxedCharacter,
- tpnme.Short -> BoxedShort,
- tpnme.Int -> BoxedInteger,
- tpnme.Long -> BoxedLong,
- tpnme.Float -> BoxedFloat,
- tpnme.Double -> BoxedDouble
- )
- }
-
- private class MSILNames extends SymbolNames {
- final val BeanProperty: TypeName = tpnme.NO_NAME
- final val BooleanBeanProperty: TypeName = tpnme.NO_NAME
- final val BoxedBoolean: TypeName = "System.IConvertible"
- final val BoxedCharacter: TypeName = "System.IConvertible"
- final val BoxedNumber: TypeName = "System.IConvertible"
- final val Class: TypeName = "System.Type"
- final val Code: TypeName = tpnme.NO_NAME
- final val Delegate: TypeName = "System.MulticastDelegate"
- final val IOOBException: TypeName = "System.IndexOutOfRangeException"
- final val InvTargetException: TypeName = "System.Reflection.TargetInvocationException"
- final val JavaSerializable: TypeName = tpnme.NO_NAME
- final val MethodAsObject: TypeName = "System.Reflection.MethodInfo"
- final val NPException: TypeName = "System.NullReferenceException"
- final val Object: TypeName = "System.Object"
- final val String: TypeName = "System.String"
- final val Throwable: TypeName = "System.Exception"
- final val ValueType: TypeName = "System.ValueType"
-
- final val ForName: TermName = "GetType"
- final val GetCause: TermName = "InnerException" /* System.Reflection.TargetInvocationException.InnerException */
- final val GetClass: TermName = "GetType"
- final val GetMethod: TermName = "GetMethod"
- final val Invoke: TermName = "Invoke"
- final val JavaLang: TermName = "System"
-
- val Boxed = immutable.Map[TypeName, TypeName](
- tpnme.Boolean -> "System.Boolean",
- tpnme.Byte -> "System.SByte", // a scala.Byte is signed and a System.SByte too (unlike a System.Byte)
- tpnme.Char -> "System.Char",
- tpnme.Short -> "System.Int16",
- tpnme.Int -> "System.Int32",
- tpnme.Long -> "System.Int64",
- tpnme.Float -> "System.Single",
- tpnme.Double -> "System.Double"
- )
- }
-
- private class J2SENames extends JavaNames {
- final val BeanProperty: TypeName = "scala.reflect.BeanProperty"
- final val BooleanBeanProperty: TypeName = "scala.reflect.BooleanBeanProperty"
- final val Code: TypeName = "scala.reflect.Code"
- final val JavaSerializable: TypeName = "java.io.Serializable"
- }
-
- lazy val sn: SymbolNames =
- if (forMSIL) new MSILNames
- else new J2SENames
-}
diff --git a/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala b/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala
index 13ec75d..9e5186b 100644
--- a/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala
+++ b/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala
@@ -1,5 +1,5 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
+ * Copyright 2005-2013 LAMP/EPFL
* @author Martin Odersky
*/
@@ -7,14 +7,13 @@ package scala.tools.nsc
package symtab
import java.io.IOException
-import ch.epfl.lamp.compiler.msil.{ Type => MSILType, Attribute => MSILAttribute }
-
import scala.compat.Platform.currentTime
-import scala.tools.nsc.io.AbstractFile
import scala.tools.nsc.util.{ ClassPath }
import classfile.ClassfileParser
-import Flags._
-import util.Statistics._
+import scala.reflect.internal.Flags._
+import scala.reflect.internal.MissingRequirementError
+import scala.reflect.internal.util.Statistics
+import scala.tools.nsc.io.{ AbstractFile, MsilFile }
/** This class ...
*
@@ -24,6 +23,7 @@ import util.Statistics._
abstract class SymbolLoaders {
val global: Global
import global._
+ import SymbolLoadersStats._
protected def enterIfNew(owner: Symbol, member: Symbol, completer: SymbolLoader): Symbol = {
assert(owner.info.decls.lookup(member.name) == NoSymbol, owner.fullName + "." + member.name)
@@ -31,16 +31,11 @@ abstract class SymbolLoaders {
member
}
- private def realOwner(root: Symbol): Symbol = {
- if (root.isRoot) definitions.EmptyPackageClass else root
- }
-
/** Enter class with given `name` into scope of `root`
* and give them `completer` as type.
*/
- def enterClass(root: Symbol, name: String, completer: SymbolLoader): Symbol = {
- val owner = realOwner(root)
- val clazz = owner.newClass(NoPosition, newTypeName(name))
+ def enterClass(owner: Symbol, name: String, completer: SymbolLoader): Symbol = {
+ val clazz = owner.newClass(newTypeName(name))
clazz setInfo completer
enterIfNew(owner, clazz, completer)
}
@@ -48,14 +43,53 @@ abstract class SymbolLoaders {
/** Enter module with given `name` into scope of `root`
* and give them `completer` as type.
*/
- def enterModule(root: Symbol, name: String, completer: SymbolLoader): Symbol = {
- val owner = realOwner(root)
- val module = owner.newModule(NoPosition, newTermName(name))
+ def enterModule(owner: Symbol, name: String, completer: SymbolLoader): Symbol = {
+ val module = owner.newModule(newTermName(name))
module setInfo completer
module.moduleClass setInfo moduleClassLoader
enterIfNew(owner, module, completer)
}
+ /** Enter package with given `name` into scope of `root`
+ * and give them `completer` as type.
+ */
+ def enterPackage(root: Symbol, name: String, completer: SymbolLoader): Symbol = {
+ val pname = newTermName(name)
+ val preExisting = root.info.decls lookup pname
+ if (preExisting != NoSymbol) {
+ // Some jars (often, obfuscated ones) include a package and
+ // object with the same name. Rather than render them unusable,
+ // offer a setting to resolve the conflict one way or the other.
+ // This was motivated by the desire to use YourKit probes, which
+ // require yjp.jar at runtime. See SI-2089.
+ if (settings.termConflict.isDefault)
+ throw new TypeError(
+ root+" contains object and package with same name: "+
+ name+"\none of them needs to be removed from classpath"
+ )
+ else if (settings.termConflict.value == "package") {
+ global.warning(
+ "Resolving package/object name conflict in favor of package " +
+ preExisting.fullName + ". The object will be inaccessible."
+ )
+ root.info.decls.unlink(preExisting)
+ }
+ else {
+ global.warning(
+ "Resolving package/object name conflict in favor of object " +
+ preExisting.fullName + ". The package will be inaccessible."
+ )
+ return NoSymbol
+ }
+ }
+ // todo: find out initialization sequence for pkg/pkg.moduleClass is different from enterModule
+ val pkg = root.newPackage(pname)
+ pkg.moduleClass setInfo completer
+ pkg setInfo pkg.moduleClass.tpe
+ root.info.decls enter pkg
+ pkg
+ }
+
/** Enter class and module with given `name` into scope of `root`
* and give them `completer` as type.
*/
@@ -63,8 +97,15 @@ abstract class SymbolLoaders {
val clazz = enterClass(root, name, completer)
val module = enterModule(root, name, completer)
if (!clazz.isAnonymousClass) {
- assert(clazz.companionModule == module, module)
- assert(module.companionClass == clazz, clazz)
+ // Diagnostic for SI-7147
+ def msg: String = {
+ def symLocation(sym: Symbol) = if (sym == null) "null" else s"${clazz.fullLocationString} (from ${clazz.associatedFile})"
+ sm"""Inconsistent class/module symbol pair for `$name` loaded from ${symLocation(root)}.
+ |clazz = ${symLocation(clazz)}; clazz.companionModule = ${clazz.companionModule}
+ |module = ${symLocation(module)}; module.companionClass = ${module.companionClass}"""
+ }
+ assert(clazz.companionModule == module, msg)
+ assert(module.companionClass == clazz, msg)
}
}
@@ -78,19 +119,48 @@ abstract class SymbolLoaders {
enterClassAndModule(root, name, new SourcefileLoader(src))
}
+ /** The package objects of scala and scala.reflect should always
+ * be loaded in binary if classfiles are available, even if sourcefiles
+ * are newer. Late-compiling these objects from source leads to compilation
+ * order issues.
+ * Note: We do a name-base comparison here because the method is called before we even
+ * have ReflectPackage defined.
+ */
+ def binaryOnly(owner: Symbol, name: String): Boolean =
+ name == "package" &&
+ (owner.fullName == "scala" || owner.fullName == "scala.reflect")
+
+ /** Initialize toplevel class and module symbols in `owner` from class path representation `classRep`
+ */
+ def initializeFromClassPath(owner: Symbol, classRep: ClassPath[platform.BinaryRepr]#ClassRep) {
+ ((classRep.binary, classRep.source) : @unchecked) match {
+ case (Some(bin), Some(src))
+ if platform.needCompile(bin, src) && !binaryOnly(owner, classRep.name) =>
+ if (settings.verbose.value) inform("[symloader] picked up newer source file for " + src.path)
+ global.loaders.enterToplevelsFromSource(owner, classRep.name, src)
+ case (None, Some(src)) =>
+ if (settings.verbose.value) inform("[symloader] no class, picked up source file for " + src.path)
+ global.loaders.enterToplevelsFromSource(owner, classRep.name, src)
+ case (Some(bin), _) =>
+ global.loaders.enterClassAndModule(owner, classRep.name, platform.newClassLoader(bin))
+ }
+ }
+
/**
* A lazy type that completes itself by calling parameter doComplete.
* Any linked modules/classes or module classes are also initialized.
+ * Todo: consider factoring out behavior from TopClassCompleter/SymbolLoader into
+ * supertrait SymLoader
*/
- abstract class SymbolLoader extends LazyType {
+ abstract class SymbolLoader extends SymLoader {
- /** Load source or class file for `root', return */
+ /** Load source or class file for `root`, return */
protected def doComplete(root: Symbol): Unit
def sourcefile: Option[AbstractFile] = None
/**
- * Description of the resource (ClassPath, AbstractFile, MSILType)
+ * Description of the resource (ClassPath, AbstractFile, MsilFile)
* being processed by this loader
*/
protected def description: String
@@ -104,7 +174,20 @@ abstract class SymbolLoaders {
case _ => ()
})
}
- override def complete(root: Symbol) : Unit = {
+
+ override def complete(root: Symbol) {
+ def signalError(ex: Exception) {
+ ok = false
+ if (settings.debug.value) ex.printStackTrace()
+ val msg = ex.getMessage()
+ // SI-5593 Scaladoc's current strategy is to visit all packages in search of user code that can be documented
+ // therefore, it will rummage through the classpath triggering errors whenever it encounters package objects
+ // that are not in their correct place (see bug for details)
+ if (!settings.isScaladoc)
+ globalError(
+ if (msg eq null) "i/o error while loading " + root.name
+ else "error while loading " + root.name + ", " + msg);
+ }
try {
val start = currentTime
val currentphase = phase
@@ -116,12 +199,9 @@ abstract class SymbolLoaders {
setSource(root.companionSymbol) // module -> class, class -> module
} catch {
case ex: IOException =>
- ok = false
- if (settings.debug.value) ex.printStackTrace()
- val msg = ex.getMessage()
- globalError(
- if (msg eq null) "i/o error while loading " + root.name
- else "error while loading " + root.name + ", " + msg);
+ signalError(ex)
+ case ex: MissingRequirementError =>
+ signalError(ex)
}
initRoot(root)
if (!root.isPackageClass) initRoot(root.companionSymbol)
@@ -146,157 +226,30 @@ abstract class SymbolLoaders {
/**
* Load contents of a package
*/
- abstract class PackageLoader[T](classpath: ClassPath[T]) extends SymbolLoader {
+ class PackageLoader(classpath: ClassPath[platform.BinaryRepr]) extends SymbolLoader with FlagAgnosticCompleter {
protected def description = "package loader "+ classpath.name
- def enterPackage(root: Symbol, name: String, completer: SymbolLoader) {
- val preExisting = root.info.decls.lookup(newTermName(name))
- if (preExisting != NoSymbol) {
- // Some jars (often, obfuscated ones) include a package and
- // object with the same name. Rather than render them unusable,
- // offer a setting to resolve the conflict one way or the other.
- // This was motivated by the desire to use YourKit probes, which
- // require yjp.jar at runtime. See SI-2089.
- if (settings.termConflict.isDefault)
- throw new TypeError(
- root+" contains object and package with same name: "+
- name+"\none of them needs to be removed from classpath"
- )
- else if (settings.termConflict.value == "package") {
- global.warning(
- "Resolving package/object name conflict in favor of package " +
- preExisting.fullName + ". The object will be inaccessible."
- )
- root.info.decls.unlink(preExisting)
- }
- else {
- global.warning(
- "Resolving package/object name conflict in favor of object " +
- preExisting.fullName + ". The package will be inaccessible."
- )
- return
- }
- }
- val pkg = root.newPackage(NoPosition, newTermName(name))
- pkg.moduleClass.setInfo(completer)
- pkg.setInfo(pkg.moduleClass.tpe)
- root.info.decls.enter(pkg)
- }
-
- /**
- * Tells whether a class with both a binary and a source representation
- * (found in classpath and in sourcepath) should be re-compiled. Behaves
- * similar to javac, i.e. if the source file is newer than the classfile,
- * a re-compile is triggered.
- */
- protected def needCompile(bin: T, src: AbstractFile): Boolean
-
- /**
- * Tells whether a class should be loaded and entered into the package
- * scope. On .NET, this method returns `false' for all synthetic classes
- * (anonymous classes, implementation classes, module classes), their
- * symtab is encoded in the pickle of another class.
- */
- protected def doLoad(cls: classpath.AnyClassRep): Boolean
-
- protected def newClassLoader(bin: T): SymbolLoader
-
- protected def newPackageLoader(pkg: ClassPath[T]): SymbolLoader
-
protected def doComplete(root: Symbol) {
assert(root.isPackageClass, root)
- root.setInfo(new PackageClassInfoType(new Scope(), root))
+ root.setInfo(new PackageClassInfoType(newScope, root))
val sourcepaths = classpath.sourcepaths
- for (classRep <- classpath.classes if doLoad(classRep)) {
- ((classRep.binary, classRep.source) : @unchecked) match {
- case (Some(bin), Some(src)) if needCompile(bin, src) =>
- if (settings.verbose.value) inform("[symloader] picked up newer source file for " + src.path)
- enterToplevelsFromSource(root, classRep.name, src)
- case (None, Some(src)) =>
- if (settings.verbose.value) inform("[symloader] no class, picked up source file for " + src.path)
- enterToplevelsFromSource(root, classRep.name, src)
- case (Some(bin), _) =>
- enterClassAndModule(root, classRep.name, newClassLoader(bin))
+ if (!root.isRoot) {
+ for (classRep <- classpath.classes if platform.doLoad(classRep)) {
+ initializeFromClassPath(root, classRep)
}
}
+ if (!root.isEmptyPackageClass) {
+ for (pkg <- classpath.packages) {
+ enterPackage(root, pkg.name, new PackageLoader(pkg))
+ }
- for (pkg <- classpath.packages) {
- enterPackage(root, pkg.name, newPackageLoader(pkg))
- }
-
- // if there's a $member object, enter its members as well.
- val pkgModule = root.info.decl(nme.PACKAGEkw)
- if (pkgModule.isModule && !pkgModule.rawInfo.isInstanceOf[SourcefileLoader]) {
- // println("open "+pkgModule)//DEBUG
- openPackageModule(pkgModule)()
+ openPackageModule(root)
}
}
}
- def openPackageModule(module: Symbol)(packageClass: Symbol = module.owner): Unit = {
- // unlink existing symbols in the package
- for (member <- module.info.decls.iterator) {
- if (!member.isPrivate && !member.isConstructor) {
- // todo: handle overlapping definitions in some way: mark as errors
- // or treat as abstractions. For now the symbol in the package module takes precedence.
- for (existing <- packageClass.info.decl(member.name).alternatives)
- packageClass.info.decls.unlink(existing)
- }
- }
- // enter non-private decls the class
- for (member <- module.info.decls.iterator) {
- if (!member.isPrivate && !member.isConstructor) {
- packageClass.info.decls.enter(member)
- }
- }
- // enter decls of parent classes
- for (pt <- module.info.parents; val p = pt.typeSymbol) {
- if (p != definitions.ObjectClass && p != definitions.ScalaObjectClass) {
- openPackageModule(p)(packageClass)
- }
- }
- }
-
- class JavaPackageLoader(classpath: ClassPath[AbstractFile]) extends PackageLoader(classpath) {
- protected def needCompile(bin: AbstractFile, src: AbstractFile) =
- (src.lastModified >= bin.lastModified)
-
- protected def doLoad(cls: classpath.AnyClassRep) = true
-
- protected def newClassLoader(bin: AbstractFile) =
- new ClassfileLoader(bin)
-
- protected def newPackageLoader(pkg: ClassPath[AbstractFile]) =
- new JavaPackageLoader(pkg)
- }
-
- class NamespaceLoader(classpath: ClassPath[MSILType]) extends PackageLoader(classpath) {
- protected def needCompile(bin: MSILType, src: AbstractFile) =
- false // always use compiled file on .net
-
- protected def doLoad(cls: classpath.AnyClassRep) = {
- if (cls.binary.isDefined) {
- val typ = cls.binary.get
- if (typ.IsDefined(clrTypes.SCALA_SYMTAB_ATTR, false)) {
- val attrs = typ.GetCustomAttributes(clrTypes.SCALA_SYMTAB_ATTR, false)
- assert (attrs.length == 1, attrs.length)
- val a = attrs(0).asInstanceOf[MSILAttribute]
- // symtab_constr takes a byte array argument (the pickle), i.e. typ has a pickle.
- // otherwise, symtab_default_constr was used, which marks typ as scala-synthetic.
- a.getConstructor() == clrTypes.SYMTAB_CONSTR
- } else true // always load non-scala types
- } else true // always load source
- }
-
- protected def newClassLoader(bin: MSILType) =
- new MSILTypeLoader(bin)
-
- protected def newPackageLoader(pkg: ClassPath[MSILType]) =
- new NamespaceLoader(pkg)
- }
-
- class ClassfileLoader(val classfile: AbstractFile) extends SymbolLoader {
+ class ClassfileLoader(val classfile: AbstractFile) extends SymbolLoader with FlagAssigningCompleter {
private object classfileParser extends ClassfileParser {
val global: SymbolLoaders.this.global.type = SymbolLoaders.this.global
}
@@ -304,29 +257,41 @@ abstract class SymbolLoaders {
protected def description = "class file "+ classfile.toString
protected def doComplete(root: Symbol) {
- val start = startTimer(classReadNanos)
+ val start = if (Statistics.canEnable) Statistics.startTimer(classReadNanos) else null
classfileParser.parse(classfile, root)
- stopTimer(classReadNanos, start)
+ if (root.associatedFile eq null) {
+ root match {
+ // In fact, the ModuleSymbol forwards its setter to the module class
+ case _: ClassSymbol | _: ModuleSymbol =>
+ debuglog("ClassfileLoader setting %s.associatedFile = %s".format(root.name, classfile))
+ root.associatedFile = classfile
+ case _ =>
+ debuglog("Not setting associatedFile to %s because %s is a %s".format(classfile, root.name, root.shortSymbolClass))
+ }
+ }
+ if (Statistics.canEnable) Statistics.stopTimer(classReadNanos, start)
}
- override def sourcefile = classfileParser.srcfile
+ override def sourcefile: Option[AbstractFile] = classfileParser.srcfile
}
- class MSILTypeLoader(typ: MSILType) extends SymbolLoader {
+ class MsilFileLoader(msilFile: MsilFile) extends SymbolLoader with FlagAssigningCompleter {
+ private def typ = msilFile.msilType
private object typeParser extends clr.TypeParser {
val global: SymbolLoaders.this.global.type = SymbolLoaders.this.global
}
- protected def description = "MSILType "+ typ.FullName + ", assembly "+ typ.Assembly.FullName
+ protected def description = "MsilFile "+ typ.FullName + ", assembly "+ typ.Assembly.FullName
protected def doComplete(root: Symbol) { typeParser.parse(typ, root) }
}
- class SourcefileLoader(val srcfile: AbstractFile) extends SymbolLoader {
+ class SourcefileLoader(val srcfile: AbstractFile) extends SymbolLoader with FlagAssigningCompleter {
protected def description = "source file "+ srcfile.toString
+ override def fromSource = true
override def sourcefile = Some(srcfile)
protected def doComplete(root: Symbol): Unit = global.currentRun.compileLate(srcfile)
}
- object moduleClassLoader extends SymbolLoader {
+ object moduleClassLoader extends SymbolLoader with FlagAssigningCompleter {
protected def description = "module class loader"
protected def doComplete(root: Symbol) { root.sourceModule.initialize }
}
@@ -340,3 +305,8 @@ abstract class SymbolLoaders {
var parentsLevel = 0
var pendingLoadActions: List[() => Unit] = Nil
}
+
+object SymbolLoadersStats {
+ import scala.reflect.internal.TypesStats.typerNanos
+ val classReadNanos = Statistics.newSubTimer ("time classfilereading", typerNanos)
+}
diff --git a/src/compiler/scala/tools/nsc/symtab/SymbolTable.scala b/src/compiler/scala/tools/nsc/symtab/SymbolTable.scala
index c783576..2101a65 100644
--- a/src/compiler/scala/tools/nsc/symtab/SymbolTable.scala
+++ b/src/compiler/scala/tools/nsc/symtab/SymbolTable.scala
@@ -1,162 +1,9 @@
/* NSC -- new scala compiler
- * Copyright 2005-2011 LAMP/EPFL
+ * Copyright 2005-2013 LAMP/EPFL
* @author Martin Odersky
*/
package scala.tools.nsc
package symtab
-import ast.{Trees, TreePrinters, DocComments}
-import scala.collection.{ mutable, immutable }
-import util._
-
-abstract class SymbolTable extends reflect.generic.Universe
- with Names
- with Symbols
- with Types
- with Scopes
- with Definitions
- with reflect.generic.Constants
- with BaseTypeSeqs
- with InfoTransformers
- with StdNames
- with AnnotationInfos
- with AnnotationCheckers
- with Trees
- with TreePrinters
- with Positions
- with DocComments
- with TypeDebugging
-{
- def settings: Settings
- def rootLoader: LazyType
- def log(msg: => AnyRef): Unit
- def abort(msg: String) = throw new Error(msg)
- def abort() = throw new Error()
-
- /** Are we compiling for Java SE ? */
- def forJVM: Boolean
-
- /** Are we compiling for .NET ? */
- def forMSIL: Boolean
-
- /** A period is an ordinal number for a phase in a run.
- * Phases in later runs have higher periods than phases in earlier runs.
- * Later phases have higher periods than earlier phases in the same run.
- */
- type Period = Int
- final val NoPeriod = 0
-
- /** An ordinal number for compiler runs. First run has number 1. */
- type RunId = Int
- final val NoRunId = 0
-
- private var ph: Phase = NoPhase
- private var per = NoPeriod
-
- final def phase: Phase = ph
-
- final def phase_=(p: Phase) {
- //System.out.println("setting phase to " + p)
- assert((p ne null) && p != NoPhase)
- ph = p
- per = (currentRunId << 8) + p.id
- }
-
- /** The current compiler run identifier. */
- def currentRunId: RunId
-
- /** The run identifier of the given period */
- final def runId(period: Period): RunId = period >> 8
-
- /** The phase identifier of the given period */
- final def phaseId(period: Period): Phase#Id = period & 0xFF
-
- /** The period at the start of run that includes `period' */
- final def startRun(period: Period): Period = period & 0xFFFFFF00
-
- /** The current period */
- final def currentPeriod: Period = {
- //assert(per == (currentRunId << 8) + phase.id)
- per
- }
-
- /** The phase associated with given period */
- final def phaseOf(period: Period): Phase = phaseWithId(phaseId(period))
-
- final def period(rid: RunId, pid: Phase#Id): Period =
- (currentRunId << 8) + pid
-
- /** Perform given operation at given phase */
- final def atPhase[T](ph: Phase)(op: => T): T = {
- // Eugene: insert same thread assertion here
- val current = phase
- phase = ph
- try op
- finally phase = current
- }
- final def afterPhase[T](ph: Phase)(op: => T): T =
- atPhase(ph.next)(op)
-
- final def isValid(period: Period): Boolean =
- period != 0 && runId(period) == currentRunId && {
- val pid = phaseId(period)
- if (phase.id > pid) infoTransformers.nextFrom(pid).pid >= phase.id
- else infoTransformers.nextFrom(phase.id).pid >= pid
- }
-
- final def isValidForBaseClasses(period: Period): Boolean = {
- def noChangeInBaseClasses(it: InfoTransformer, limit: Phase#Id): Boolean = (
- it.pid >= limit ||
- !it.changesBaseClasses && noChangeInBaseClasses(it.next, limit)
- );
- period != 0 && runId(period) == currentRunId && {
- val pid = phaseId(period)
- if (phase.id > pid) noChangeInBaseClasses(infoTransformers.nextFrom(pid), phase.id)
- else noChangeInBaseClasses(infoTransformers.nextFrom(phase.id), pid)
- }
- }
-
- object perRunCaches {
- import java.lang.ref.WeakReference
-
- // We can allow ourselves a structural type, these methods
- // amount to a few calls per run at most. This does suggest
- // a "Clearable" trait may be useful.
- private type Clearable = {
- def size: Int
- def clear(): Unit
- }
- // Weak references so the garbage collector will take care of
- // letting us know when a cache is really out of commission.
- private val caches = mutable.HashSet[WeakReference[Clearable]]()
-
- def clearAll() = {
- caches foreach { ref =>
- val cache = ref.get()
- if (cache == null)
- caches -= ref
- else
- cache.clear()
- }
- }
-
- def newMap[K, V]() = { val m = mutable.HashMap[K, V]() ; caches += new WeakReference(m) ; m }
- def newSet[K]() = { val s = mutable.HashSet[K]() ; caches += new WeakReference(s) ; s }
- }
-
- /** Break into repl debugger if assertion is true */
- // def breakIf(assertion: => Boolean, args: Any*): Unit =
- // if (assertion)
- // ILoop.break(args.toList)
-
- /** The set of all installed infotransformers */
- var infoTransformers = new InfoTransformer {
- val pid = NoPhase.id
- val changesBaseClasses = true
- def transform(sym: Symbol, tpe: Type): Type = tpe
- }
-
- /** The phase which has given index as identifier */
- val phaseWithId: Array[Phase]
-}
+abstract class SymbolTable extends scala.reflect.internal.SymbolTable
diff --git a/src/compiler/scala/tools/nsc/symtab/SymbolTrackers.scala b/src/compiler/scala/tools/nsc/symtab/SymbolTrackers.scala
index 1f32355..7a84441 100644
--- a/src/compiler/scala/tools/nsc/symtab/SymbolTrackers.scala
+++ b/src/compiler/scala/tools/nsc/symtab/SymbolTrackers.scala
@@ -1,5 +1,5 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
+ * Copyright 2005-2013 LAMP/EPFL
* @author Paul Phillips
*/
@@ -7,6 +7,8 @@ package scala.tools.nsc
package symtab
import scala.collection.{ mutable, immutable }
+import scala.language.implicitConversions
+import scala.language.postfixOps
/** Printing the symbol graph (for those symbols attached to an AST node)
* after each phase.
@@ -131,7 +133,7 @@ trait SymbolTrackers {
else " (" + Flags.flagsToString(masked) + ")"
}
def symString(sym: Symbol) = (
- if (settings.debug.value && sym.hasRawInfo && sym.rawInfo.isComplete) {
+ if (settings.debug.value && sym.hasCompleteInfo) {
val s = sym.defString take 240
if (s.length == 240) s + "..." else s
}
diff --git a/src/compiler/scala/tools/nsc/symtab/Symbols.scala b/src/compiler/scala/tools/nsc/symtab/Symbols.scala
deleted file mode 100644
index f6c6ef6..0000000
--- a/src/compiler/scala/tools/nsc/symtab/Symbols.scala
+++ /dev/null
@@ -1,2199 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
- * @author Martin Odersky
- */
-
-
-package scala.tools.nsc
-package symtab
-
-import scala.collection.{ mutable, immutable }
-import scala.collection.mutable.ListBuffer
-import io.AbstractFile
-import util.{ Position, NoPosition, BatchSourceFile }
-import util.Statistics._
-import Flags._
-
-trait Symbols extends reflect.generic.Symbols { self: SymbolTable =>
- import definitions._
-
- private var ids = 0
- def symbolCount = ids // statistics
-
- val emptySymbolArray = new Array[Symbol](0)
-
- /** Used for deciding in the IDE whether we can interrupt the compiler */
- //protected var activeLocks = 0
-
- /** Used for debugging only */
- //protected var lockedSyms = collection.immutable.Set[Symbol]()
-
- /** Used to keep track of the recursion depth on locked symbols */
- private var recursionTable = immutable.Map.empty[Symbol, Int]
-
- private var nextexid = 0
- private def freshExistentialName(suffix: String) = {
- nextexid += 1
- newTypeName("_" + nextexid + suffix)
- }
-
- /** The original owner of a class. Used by the backend to generate
- * EnclosingMethod attributes.
- */
- val originalOwner = perRunCaches.newMap[Symbol, Symbol]()
-
- /** The class for all symbols */
- abstract class Symbol(initOwner: Symbol, initPos: Position, initName: Name) extends AbsSymbol {
- var rawowner = initOwner
- var rawname = initName
- var rawflags = 0L
-
- private var rawpos = initPos
- val id = { ids += 1; ids } // identity displayed when -uniqid
-
- var validTo: Period = NoPeriod
-
- def pos = rawpos
- def setPos(pos: Position): this.type = { this.rawpos = pos; this }
-
-// annotations
-
- private var rawannots: List[AnnotationInfoBase] = Nil
- def rawAnnotations = rawannots
-
- /* Used in namer to check whether annotations were already assigned or not */
- def hasAssignedAnnotations = rawannots.nonEmpty
-
- /** After the typer phase (before, look at the definition's Modifiers), contains
- * the annotations attached to member a definition (class, method, type, field).
- */
- def annotations: List[AnnotationInfo] = {
- // .initialize: the type completer of the symbol parses the annotations,
- // see "def typeSig" in Namers
- val annots1 = initialize.rawannots map {
- case x: LazyAnnotationInfo => x.annot()
- case x: AnnotationInfo => x
- } filterNot (_.atp.isError)
- rawannots = annots1
- annots1
- }
-
- def setAnnotations(annots: List[AnnotationInfoBase]): this.type = {
- this.rawannots = annots
- this
- }
-
- override def addAnnotation(annot: AnnotationInfo) {
- setAnnotations(annot :: this.rawannots)
- }
-
- /** Does this symbol have an annotation of the given class? */
- def hasAnnotation(cls: Symbol) =
- getAnnotation(cls).isDefined
-
- def getAnnotation(cls: Symbol): Option[AnnotationInfo] =
- annotations find (_.atp.typeSymbol == cls)
-
- /** Remove all annotations matching the given class. */
- def removeAnnotation(cls: Symbol): Unit =
- setAnnotations(annotations filterNot (_.atp.typeSymbol == cls))
-
- /** See comment in HasFlags for how privateWithin combines with flags.
- */
- private[this] var _privateWithin: Symbol = _
- def privateWithin = _privateWithin
- override def privateWithin_=(sym: Symbol) { _privateWithin = sym }
-
-// Creators -------------------------------------------------------------------
-
- final def newValue(pos: Position, name: TermName) =
- new TermSymbol(this, pos, name)
- final def newValue(name: TermName, pos: Position = NoPosition) =
- new TermSymbol(this, pos, name)
- final def newVariable(pos: Position, name: TermName) =
- newValue(pos, name).setFlag(MUTABLE)
- final def newValueParameter(pos: Position, name: TermName) =
- newValue(pos, name).setFlag(PARAM)
- /** Create local dummy for template (owner of local blocks) */
- final def newLocalDummy(pos: Position) =
- newValue(pos, nme.localDummyName(this)).setInfo(NoType)
- final def newMethod(pos: Position, name: TermName) =
- new MethodSymbol(this, pos, name).setFlag(METHOD)
- final def newMethod(name: TermName, pos: Position = NoPosition) =
- new MethodSymbol(this, pos, name).setFlag(METHOD)
- final def newLabel(pos: Position, name: TermName) =
- newMethod(pos, name).setFlag(LABEL)
- final def newConstructor(pos: Position) =
- newMethod(pos, nme.CONSTRUCTOR)
- final def newModule(pos: Position, name: TermName, clazz: ClassSymbol) =
- new ModuleSymbol(this, pos, name).setFlag(MODULE | FINAL)
- .setModuleClass(clazz)
- final def newModule(name: TermName, clazz: Symbol, pos: Position = NoPosition) =
- new ModuleSymbol(this, pos, name).setFlag(MODULE | FINAL)
- .setModuleClass(clazz.asInstanceOf[ClassSymbol])
- final def newModule(pos: Position, name: TermName) = {
- val m = new ModuleSymbol(this, pos, name).setFlag(MODULE | FINAL)
- m.setModuleClass(new ModuleClassSymbol(m))
- }
- final def newPackage(pos: Position, name: TermName) = {
- assert(name == nme.ROOT || isPackageClass)
- val m = newModule(pos, name).setFlag(JAVA | PACKAGE)
- m.moduleClass.setFlag(JAVA | PACKAGE)
- m
- }
- final def newThisSym(pos: Position) =
- newValue(pos, nme.this_).setFlag(SYNTHETIC)
- final def newImport(pos: Position) =
- newValue(pos, nme.IMPORT)
-
- /** @param pre type relative to which alternatives are seen.
- * for instance:
- * class C[T] {
- * def m(x: T): T
- * def m'(): T
- * }
- * val v: C[Int]
- *
- * Then v.m has symbol TermSymbol(flags = {OVERLOADED},
- * tpe = OverloadedType(C[Int], List(m, m')))
- * You recover the type of m doing a
- *
- * m.tpe.asSeenFrom(pre, C) (generally, owner of m, which is C here).
- *
- * or:
- *
- * pre.memberType(m)
- */
- final def newOverloaded(pre: Type, alternatives: List[Symbol]): Symbol =
- newValue(alternatives.head.pos, alternatives.head.name.toTermName)
- .setFlag(OVERLOADED)
- .setInfo(OverloadedType(pre, alternatives))
-
- /** for explicit outer phase */
- final def newOuterAccessor(pos: Position) = {
- val sym = newMethod(pos, nme.OUTER)
- sym setFlag (STABLE | SYNTHETIC)
- if (isTrait) sym setFlag DEFERRED
- sym.expandName(this)
- sym.referenced = this
- sym
- }
-
- final def newErrorValue(name: TermName) =
- newValue(pos, name).setFlag(SYNTHETIC | IS_ERROR).setInfo(ErrorType)
-
- /** Symbol of a type definition type T = ...
- */
- final def newAliasType(pos: Position, name: TypeName) =
- new TypeSymbol(this, pos, name)
- final def newAliasType(name: TypeName, pos: Position = NoPosition) =
- new TypeSymbol(this, pos, name)
-
- /** Symbol of an abstract type type T >: ... <: ...
- */
- final def newAbstractType(pos: Position, name: TypeName) =
- new TypeSymbol(this, pos, name).setFlag(DEFERRED)
- final def newAbstractType(name: TypeName, pos: Position = NoPosition) =
- new TypeSymbol(this, pos, name).setFlag(DEFERRED)
-
- /** Symbol of a type parameter
- */
- final def newTypeParameter(pos: Position, name: TypeName) =
- newAbstractType(pos, name).setFlag(PARAM)
-
- /** Synthetic value parameters when parameter symbols are not available
- */
- final def newSyntheticValueParamss(argtypess: List[List[Type]]): List[List[Symbol]] = {
- var cnt = 0
- def freshName() = { cnt += 1; newTermName("x$" + cnt) }
- def param(tp: Type) =
- newValueParameter(owner.pos.focus, freshName()).setFlag(SYNTHETIC).setInfo(tp)
- argtypess map (_.map(param))
- }
-
- final def newExistential(pos: Position, name: TypeName): Symbol =
- newAbstractType(pos, name).setFlag(EXISTENTIAL)
-
- final def freshExistential(suffix: String): Symbol =
- newExistential(pos, freshExistentialName(suffix))
-
- /** Synthetic value parameters when parameter symbols are not available.
- * Calling this method multiple times will re-use the same parameter names.
- */
- final def newSyntheticValueParams(argtypes: List[Type]): List[Symbol] =
- newSyntheticValueParamss(List(argtypes)).head
-
- /** Synthetic value parameter when parameter symbol is not available.
- * Calling this method multiple times will re-use the same parameter name.
- */
- final def newSyntheticValueParam(argtype: Type): Symbol =
- newSyntheticValueParams(List(argtype)).head
-
- /** Type skolems are type parameters ``seen from the inside''
- * Assuming a polymorphic method m[T], its type is a PolyType which has a TypeParameter
- * with name `T' in its typeParams list. While type checking the parameters, result type and
- * body of the method, there's a local copy of `T' which is a TypeSkolem.
- */
- final def newTypeSkolem: Symbol =
- new TypeSkolem(owner, pos, name.toTypeName, this)
- .setFlag(flags)
-
- final def newClass(pos: Position, name: TypeName) =
- new ClassSymbol(this, pos, name)
- final def newClass(name: TypeName, pos: Position = NoPosition) =
- new ClassSymbol(this, pos, name)
-
- final def newModuleClass(pos: Position, name: TypeName) =
- new ModuleClassSymbol(this, pos, name)
- final def newModuleClass(name: TypeName, pos: Position = NoPosition) =
- new ModuleClassSymbol(this, pos, name)
-
- final def newAnonymousClass(pos: Position) =
- newClass(pos, tpnme.ANON_CLASS_NAME)
- final def newAnonymousFunctionClass(pos: Position) =
- newClass(pos, tpnme.ANON_FUN_NAME)
-
- /** Refinement types P { val x: String; type T <: Number }
- * also have symbols, they are refinementClasses
- */
- final def newRefinementClass(pos: Position) =
- newClass(pos, tpnme.REFINE_CLASS_NAME)
-
- /** Create a new getter for current symbol (which must be a field)
- */
- final def newGetter: Symbol = {
- val getter = owner.newMethod(pos.focus, nme.getterName(name)).setFlag(getterFlags(flags))
- getter.privateWithin = privateWithin
- getter.setInfo(MethodType(List(), tpe))
- }
-
- final def newErrorClass(name: TypeName) = {
- val clazz = newClass(pos, name).setFlag(SYNTHETIC | IS_ERROR)
- clazz.setInfo(ClassInfoType(List(), new ErrorScope(this), clazz))
- clazz
- }
-
- final def newErrorSymbol(name: Name): Symbol = name match {
- case x: TypeName => newErrorClass(x)
- case x: TermName => newErrorValue(x)
- }
-
-// Locking and unlocking ------------------------------------------------------
-
- // True if the symbol is unlocked.
- // True if the symbol is locked but still below the allowed recursion depth.
- // False otherwise
- def lockOK: Boolean = {
- ((rawflags & LOCKED) == 0L) ||
- ((settings.Yrecursion.value != 0) &&
- (recursionTable get this match {
- case Some(n) => (n <= settings.Yrecursion.value)
- case None => true }))
- }
-
- // Lock a symbol, using the handler if the recursion depth becomes too great.
- def lock(handler: => Unit) = {
- if ((rawflags & LOCKED) != 0L) {
- if (settings.Yrecursion.value != 0) {
- recursionTable get this match {
- case Some(n) =>
- if (n > settings.Yrecursion.value) {
- handler
- } else {
- recursionTable += (this -> (n + 1))
- }
- case None =>
- recursionTable += (this -> 1)
- }
- } else { handler }
- } else {
- rawflags |= LOCKED
-// activeLocks += 1
-// lockedSyms += this
- }
- }
-
- // Unlock a symbol
- def unlock() = {
- if ((rawflags & LOCKED) != 0L) {
-// activeLocks -= 1
-// lockedSyms -= this
- rawflags = rawflags & ~LOCKED
- if (settings.Yrecursion.value != 0)
- recursionTable -= this
- }
- }
-
-// Tests ----------------------------------------------------------------------
-
- /** Is this symbol a type but not a class? */
- def isNonClassType = false
-
- /** Term symbols with the exception of static parts of Java classes and packages.
- */
- final def isValue = isTerm && !(isModule && hasFlag(PACKAGE | JAVA))
-
- final def isVariable = isTerm && isMutable && !isMethod
-
- // interesting only for lambda lift. Captured variables are accessed from inner lambdas.
- final def isCapturedVariable = isVariable && hasFlag(CAPTURED)
-
- final def isGetter = isTerm && hasAccessorFlag && !nme.isSetterName(name)
- // todo: make independent of name, as this can be forged.
- final def isSetter = isTerm && hasAccessorFlag && nme.isSetterName(name)
- def isSetterParameter = isValueParameter && owner.isSetter
-
- final def hasGetter = isTerm && nme.isLocalName(name)
-
- final def isValueParameter = isTerm && hasFlag(PARAM)
- final def isLocalDummy = isTerm && nme.isLocalDummyName(name)
- final def isInitializedToDefault = !isType && hasAllFlags(DEFAULTINIT | ACCESSOR)
- final def isClassConstructor = isTerm && (name == nme.CONSTRUCTOR)
- final def isMixinConstructor = isTerm && (name == nme.MIXIN_CONSTRUCTOR)
- final def isConstructor = isTerm && nme.isConstructorName(name)
- final def isStaticModule = isModule && isStatic && !isMethod
- final def isThisSym = isTerm && owner.thisSym == this
- final def isError = hasFlag(IS_ERROR)
- final def isErroneous = isError || isInitialized && tpe.isErroneous
- override final def isTrait: Boolean = isClass && hasFlag(TRAIT | notDEFERRED) // A virtual class becomes a trait (part of DEVIRTUALIZE)
- final def isTypeParameterOrSkolem = isType && hasFlag(PARAM)
- final def isHigherOrderTypeParameter = owner.isTypeParameterOrSkolem
- final def isTypeSkolem = isSkolem && hasFlag(PARAM)
- // a type symbol bound by an existential type, for instance the T in
- // List[T] forSome { type T }
- final def isExistentialSkolem = isExistentiallyBound && isSkolem
- final def isExistentialQuantified = isExistentiallyBound && !isSkolem
-
- // class C extends D( { class E { ... } ... } ). Here, E is a class local to a constructor
- final def isClassLocalToConstructor = isClass && hasFlag(INCONSTRUCTOR)
-
- final def isAnonymousClass = isClass && (name containsName tpnme.ANON_CLASS_NAME)
- final def isAnonymousFunction = isSynthetic && (name containsName tpnme.ANON_FUN_NAME)
- final def isAnonOrRefinementClass = isAnonymousClass || isRefinementClass
-
- final def isPackageObject = isModule && name == nme.PACKAGEkw && owner.isPackageClass
- final def isPackageObjectClass = isModuleClass && name.toTermName == nme.PACKAGEkw && owner.isPackageClass
- final def definedInPackage = owner.isPackageClass || owner.isPackageObjectClass
- final def isJavaInterface = isJavaDefined && isTrait
- final def needsFlatClasses: Boolean = phase.flatClasses && rawowner != NoSymbol && !rawowner.isPackageClass
-
- // not printed as prefixes
- final def isPredefModule = this == PredefModule
- final def isScalaPackage = (this == ScalaPackage) || (isPackageObject && owner == ScalaPackageClass)
- final def isScalaPackageClass = skipPackageObject == ScalaPackageClass
- def inDefaultNamespace = owner.isPredefModule || owner.isScalaPackageClass
-
- /** If this is a package object or package object class, its owner: otherwise this.
- */
- final def skipPackageObject: Symbol = if (isPackageObjectClass) owner else this
-
- /** If this is a constructor, its owner: otherwise this.
- */
- final def skipConstructor: Symbol = if (isConstructor) owner else this
-
- /** Conditions where we omit the prefix when printing a symbol, to avoid
- * unpleasantries like Predef.String, $iw.$iw.Foo and <empty>.Bippy.
- */
- final def printWithoutPrefix = !settings.debug.value && (
- isScalaPackageClass || isPredefModule || isEffectiveRoot || isAnonOrRefinementClass || isInterpreterWrapper
- )
-
- /** Is symbol a monomorphic type?
- * assumption: if a type starts out as monomorphic, it will not acquire
- * type parameters in later phases.
- */
- final def isMonomorphicType =
- isType && {
- var is = infos
- (is eq null) || {
- while (is.prev ne null) { is = is.prev }
- is.info.isComplete && !is.info.isHigherKinded // was: is.info.typeParams.isEmpty.
- // YourKit listed the call to PolyType.typeParams as a hot spot but it is likely an artefact.
- // The change to isHigherKinded did not reduce the total running time.
- }
- }
-
- def isStrictFP = hasAnnotation(ScalaStrictFPAttr) || (enclClass hasAnnotation ScalaStrictFPAttr)
- def isSerializable = info.baseClasses.exists(p => p == SerializableClass || p == JavaSerializableClass) || hasAnnotation(SerializableAttr) // last part can be removed, @serializable annotation is deprecated
- def isDeprecated = hasAnnotation(DeprecatedAttr)
- def hasBridgeAnnotation = hasAnnotation(BridgeClass)
- def deprecationMessage = getAnnotation(DeprecatedAttr) flatMap (_ stringArg 0)
- def deprecationVersion = getAnnotation(DeprecatedAttr) flatMap (_ stringArg 1)
- // !!! when annotation arguments are not literal strings, but any sort of
- // assembly of strings, there is a fair chance they will turn up here not as
- // Literal(const) but some arbitrary AST. However nothing in the compiler
- // prevents someone from writing a @migration annotation with a calculated
- // string. So this needs attention. For now the fact that migration is
- // private[scala] ought to provide enough protection.
- def hasMigrationAnnotation = hasAnnotation(MigrationAnnotationClass)
- def migrationMessage = getAnnotation(MigrationAnnotationClass) flatMap { _.stringArg(0) }
- def migrationVersion = getAnnotation(MigrationAnnotationClass) flatMap { _.stringArg(1) }
- def elisionLevel = getAnnotation(ElidableMethodClass) flatMap { _.intArg(0) }
- def implicitNotFoundMsg = getAnnotation(ImplicitNotFoundClass) flatMap { _.stringArg(0) }
-
- /** Does this symbol denote a wrapper object of the interpreter or its class? */
- final def isInterpreterWrapper = (
- (isModule || isModuleClass)
- && owner.isPackageClass
- && (name containsName nme.INTERPRETER_IMPORT_WRAPPER)
- )
-
- /** Is this symbol an accessor method for outer? */
- final def isOuterAccessor = {
- hasFlag(STABLE | SYNTHETIC) &&
- originalName == nme.OUTER
- }
-
- /** Is this symbol an accessor method for outer? */
- final def isOuterField = {
- hasFlag(SYNTHETIC) &&
- originalName == nme.OUTER_LOCAL
- }
-
- /** Does this symbol denote a stable value? */
- final def isStable =
- isTerm &&
- !isMutable &&
- (!hasFlag(METHOD | BYNAMEPARAM) || hasFlag(STABLE)) &&
- !(tpe.isVolatile && !hasAnnotation(uncheckedStableClass))
-
- def isVirtualClass =
- hasFlag(DEFERRED) && isClass
-
- def isVirtualTrait =
- hasFlag(DEFERRED) && isTrait
-
- def isLiftedMethod = isMethod && hasFlag(LIFTED)
- def isCaseClass = isClass && isCase
-
- /** Does this symbol denote the primary constructor of its enclosing class? */
- final def isPrimaryConstructor =
- isConstructor && owner.primaryConstructor == this
-
- /** Does this symbol denote an auxiliary constructor of its enclosing class? */
- final def isAuxiliaryConstructor =
- isConstructor && !isPrimaryConstructor
-
- /** Is this symbol a synthetic apply or unapply method in a companion object of a case class? */
- final def isCaseApplyOrUnapply =
- isMethod && isCase && isSynthetic
-
- /** Is this symbol a trait which needs an implementation class? */
- final def needsImplClass: Boolean =
- isTrait && (!isInterface || hasFlag(lateINTERFACE)) && !isImplClass
-
- /** Is this a symbol which exists only in the implementation class, not in its trait? */
- final def isImplOnly: Boolean =
- hasFlag(PRIVATE) ||
- (owner.isImplClass || owner.isTrait) &&
- ((hasFlag(notPRIVATE | LIFTED) && !hasFlag(ACCESSOR | SUPERACCESSOR | MODULE) || isConstructor) ||
- (hasFlag(LIFTED) && isModule && isMethod))
-
- /** Is this symbol a module variable?
- * This used to have to test for MUTABLE to distinguish the overloaded
- * MODULEVAR/SYNTHETICMETH flag, but now SYNTHETICMETH is gone.
- */
- final def isModuleVar = hasFlag(MODULEVAR)
-
- /** Is this symbol static (i.e. with no outer instance)? */
- final def isStatic: Boolean =
- hasFlag(STATIC) || isRoot || owner.isStaticOwner
-
- /** Is this symbol a static constructor? */
- final def isStaticConstructor: Boolean =
- isStaticMember && isClassConstructor
-
- /** Is this symbol a static member of its class? (i.e. needs to be implemented as a Java static?) */
- final def isStaticMember: Boolean =
- hasFlag(STATIC) || owner.isImplClass
-
- /** Does this symbol denote a class that defines static symbols? */
- final def isStaticOwner: Boolean =
- isPackageClass || isModuleClass && isStatic
-
- /** Is this symbol effectively final? I.e, it cannot be overridden */
- final def isEffectivelyFinal: Boolean = isFinal || isTerm && (
- hasFlag(PRIVATE) || isLocal || owner.isClass && owner.hasFlag(FINAL | MODULE))
-
- /** Is this symbol locally defined? I.e. not accessed from outside `this' instance */
- final def isLocal: Boolean = owner.isTerm
-
- /** Is this symbol a constant? */
- final def isConstant: Boolean = isStable && isConstantType(tpe.resultType)
-
- /** Is this class nested in another class or module (not a package)? */
- final def isNestedClass: Boolean =
- isClass && !isRoot && !owner.isPackageClass
-
- /** Is this class locally defined?
- * A class is local, if
- * - it is anonymous, or
- * - its owner is a value
- * - it is defined within a local class
- */
- final def isLocalClass: Boolean =
- isClass && (isAnonOrRefinementClass || isLocal ||
- !owner.isPackageClass && owner.isLocalClass)
-
-/* code for fixing nested objects
- override final def isModuleClass: Boolean =
- super.isModuleClass && !isExpandedModuleClass
-*/
- /** Is this class or type defined as a structural refinement type?
- */
- final def isStructuralRefinement: Boolean =
- (isClass || isType || isModule) && info.normalize/*.underlying*/.isStructuralRefinement
-
-
- /** Is this symbol a member of class `clazz'
- */
- def isMemberOf(clazz: Symbol) =
- clazz.info.member(name).alternatives contains this
-
- /** A a member of class `base' is incomplete if
- * (1) it is declared deferred or
- * (2) it is abstract override and its super symbol in `base' is
- * nonexistent or incomplete.
- *
- * @param base ...
- * @return ...
- */
- final def isIncompleteIn(base: Symbol): Boolean =
- this.isDeferred ||
- (this hasFlag ABSOVERRIDE) && {
- val supersym = superSymbol(base)
- supersym == NoSymbol || supersym.isIncompleteIn(base)
- }
-
- // Does not always work if the rawInfo is a SourcefileLoader, see comment
- // in "def coreClassesFirst" in Global.
- final def exists: Boolean =
- this != NoSymbol && (!owner.isPackageClass || { rawInfo.load(this); rawInfo != NoType })
-
- final def isInitialized: Boolean =
- validTo != NoPeriod
-
- final def isStableClass: Boolean = {
- def hasNoAbstractTypeMember(clazz: Symbol): Boolean =
- (clazz hasFlag STABLE) || {
- var e = clazz.info.decls.elems
- while ((e ne null) && !(e.sym.isAbstractType && info.member(e.sym.name) == e.sym))
- e = e.next
- e == null
- }
- def checkStable() =
- (info.baseClasses forall hasNoAbstractTypeMember) && { setFlag(STABLE); true }
- isClass && (hasFlag(STABLE) || checkStable())
- }
-
-
- /** The variance of this symbol as an integer */
- final def variance: Int =
- if (isCovariant) 1
- else if (isContravariant) -1
- else 0
-
-// Flags, owner, and name attributes --------------------------------------------------------------
-
- def owner: Symbol = rawowner
- override final def owner_=(owner: Symbol) {
- if (originalOwner contains this) ()
- else originalOwner(this) = rawowner
-
- rawowner = owner
- }
- private[Symbols] def flattenName(): Name = {
- // TODO: this assertion causes me a lot of trouble in the interpeter in situations
- // where everything proceeds smoothly if there's no assert. I don't think calling "name"
- // on a symbol is the right place to throw fatal exceptions if things don't look right.
- // It really hampers exploration.
- assert(rawowner.isClass, "fatal: %s has non-class owner %s after flatten.".format(rawname + idString, rawowner))
- nme.flattenedName(rawowner.name, rawname)
- }
-
- def ownerChain: List[Symbol] = this :: owner.ownerChain
- def enclClassChain: List[Symbol] = {
- if (this eq NoSymbol) Nil
- else if (isClass && !isPackageClass) this :: owner.enclClassChain
- else owner.enclClassChain
- }
-
- def ownersIterator: Iterator[Symbol] = new Iterator[Symbol] {
- private var current = Symbol.this
- def hasNext = current ne NoSymbol
- def next = { val r = current; current = current.owner; r }
- }
-
- /** same as ownerChain contains sym, but more efficient, and
- * with a twist for refinement classes. A refinement class
- * has a transowner X if an of its parents has transowner X.
- */
- def hasTransOwner(sym: Symbol): Boolean = {
- var o = this
- while ((o ne sym) && (o ne NoSymbol)) o = o.owner
- (o eq sym) ||
- isRefinementClass && (info.parents exists (_.typeSymbol.hasTransOwner(sym)))
- }
-
- def name: Name = rawname
-
- final def name_=(name: Name) {
- if (name != rawname) {
- if (owner.isClass) {
- var ifs = owner.infos
- while (ifs != null) {
- ifs.info.decls.rehash(this, name)
- ifs = ifs.prev
- }
- }
- rawname = name
- }
- }
-
- /** If this symbol has an expanded name, its original name, otherwise its name itself.
- * @see expandName
- */
- def originalName = nme.originalName(name)
-
- final def flags: Long = {
- val fs = rawflags & phase.flagMask
- (fs | ((fs & LateFlags) >>> LateShift)) & ~(fs >>> AntiShift)
- }
- override final def flags_=(fs: Long) = rawflags = fs
- final def setFlag(mask: Long): this.type = { rawflags = rawflags | mask; this }
- final def resetFlag(mask: Long): this.type = { rawflags = rawflags & ~mask; this }
- final def getFlag(mask: Long): Long = flags & mask
- final def resetFlags() { rawflags = rawflags & TopLevelCreationFlags }
-
- /** The class or term up to which this symbol is accessible,
- * or RootClass if it is public.
- */
- def accessBoundary(base: Symbol): Symbol = {
- if (hasFlag(PRIVATE) || isLocal) owner
- else if (hasAccessBoundary && !phase.erasedTypes) privateWithin
- else if (hasFlag(PROTECTED)) base
- else RootClass
- }
-
- def isLessAccessibleThan(other: Symbol): Boolean = {
- val tb = this.accessBoundary(owner)
- val ob1 = other.accessBoundary(owner)
- val ob2 = ob1.linkedClassOfClass
- var o = tb
- while (o != NoSymbol && o != ob1 && o != ob2) {
- o = o.owner
- }
- o != NoSymbol && o != tb
- }
-
-// Info and Type -------------------------------------------------------------------
-
- private[Symbols] var infos: TypeHistory = null
-
- /** Get type. The type of a symbol is:
- * for a type symbol, the type corresponding to the symbol itself,
- * @M you should use tpeHK for a type symbol with type parameters if
- * the kind of the type need not be *, as tpe introduces dummy arguments
- * to generate a type of kind *
- * for a term symbol, its usual type.
- * See the tpe/tpeHK overrides in TypeSymbol for more.
- */
- override def tpe: Type = info
- def tpeHK: Type = tpe
-
- /** Get type info associated with symbol at current phase, after
- * ensuring that symbol is initialized (i.e. type is completed).
- */
- override def info: Type = try {
- // Eugene: insert same thread assertion here
- var cnt = 0
- while (validTo == NoPeriod) {
- //if (settings.debug.value) System.out.println("completing " + this);//DEBUG
- assert(infos ne null, this.name)
- assert(infos.prev eq null, this.name)
- val tp = infos.info
- //if (settings.debug.value) System.out.println("completing " + this.rawname + tp.getClass());//debug
-
- if ((rawflags & LOCKED) != 0L) { // rolled out once for performance
- lock {
- setInfo(ErrorType)
- throw CyclicReference(this, tp)
- }
- } else {
- rawflags |= LOCKED
-// activeLocks += 1
- // lockedSyms += this
- }
- val current = phase
- try {
- phase = phaseOf(infos.validFrom)
- tp.complete(this)
- } finally {
- unlock()
- phase = current
- }
- cnt += 1
- // allow for two completions:
- // one: sourceCompleter to LazyType, two: LazyType to completed type
- if (cnt == 3) abort("no progress in completing " + this + ":" + tp)
- }
- val result = rawInfo
- result
- } catch {
- case ex: CyclicReference =>
- if (settings.debug.value) println("... trying to complete "+this)
- throw ex
- }
-
- override def info_=(info: Type) {
- assert(info ne null)
- infos = TypeHistory(currentPeriod, info, null)
- unlock()
- validTo = if (info.isComplete) currentPeriod else NoPeriod
- }
-
- /** Set initial info. */
- def setInfo(info: Type): this.type = { info_=(info); this }
-
- def setInfoOwnerAdjusted(info: Type): this.type = setInfo(info.atOwner(this))
-
- /** Set new info valid from start of this phase. */
- final def updateInfo(info: Type): Symbol = {
- assert(phaseId(infos.validFrom) <= phase.id)
- if (phaseId(infos.validFrom) == phase.id) infos = infos.prev
- infos = TypeHistory(currentPeriod, info, infos)
- validTo = if (info.isComplete) currentPeriod else NoPeriod
- this
- }
-
- def hasRawInfo: Boolean = infos ne null
-
- /** Return info without checking for initialization or completing */
- def rawInfo: Type = {
- var infos = this.infos
- assert(infos != null)
- val curPeriod = currentPeriod
- val curPid = phaseId(curPeriod)
-
- if (validTo != NoPeriod) {
- // skip any infos that concern later phases
- while (curPid < phaseId(infos.validFrom) && infos.prev != null)
- infos = infos.prev
-
- if (validTo < curPeriod) {
- // adapt any infos that come from previous runs
- val current = phase
- try {
- infos = adaptInfos(infos)
-
- //assert(runId(validTo) == currentRunId, name)
- //assert(runId(infos.validFrom) == currentRunId, name)
-
- if (validTo < curPeriod) {
- var itr = infoTransformers.nextFrom(phaseId(validTo))
- infoTransformers = itr; // caching optimization
- while (itr.pid != NoPhase.id && itr.pid < current.id) {
- phase = phaseWithId(itr.pid)
- val info1 = itr.transform(this, infos.info)
- if (info1 ne infos.info) {
- infos = TypeHistory(currentPeriod + 1, info1, infos)
- this.infos = infos
- }
- validTo = currentPeriod + 1 // to enable reads from same symbol during info-transform
- itr = itr.next
- }
- validTo = if (itr.pid == NoPhase.id) curPeriod
- else period(currentRunId, itr.pid)
- }
- } finally {
- phase = current
- }
- }
- }
- infos.info
- }
-
- // adapt to new run in fsc.
- private def adaptInfos(infos: TypeHistory): TypeHistory =
- if (infos == null || runId(infos.validFrom) == currentRunId) {
- infos
- } else {
- val prev1 = adaptInfos(infos.prev)
- if (prev1 ne infos.prev) prev1
- else {
- def adaptToNewRun(info: Type): Type =
- if (isPackageClass) info else adaptToNewRunMap(info)
- val pid = phaseId(infos.validFrom)
- validTo = period(currentRunId, pid)
- phase = phaseWithId(pid)
- val info1 = adaptToNewRun(infos.info)
- if (info1 eq infos.info) {
- infos.validFrom = validTo
- infos
- } else {
- this.infos = TypeHistory(validTo, info1, prev1)
- this.infos
- }
- }
- }
-
- /** Initialize the symbol */
- final def initialize: this.type = {
- if (!isInitialized) info
- this
- }
-
- /** Was symbol's type updated during given phase? */
- final def isUpdatedAt(pid: Phase#Id): Boolean = {
- var infos = this.infos
- while ((infos ne null) && phaseId(infos.validFrom) != pid + 1) infos = infos.prev
- infos ne null
- }
-
- /** Was symbol's type updated during given phase? */
- final def hasTypeAt(pid: Phase#Id): Boolean = {
- var infos = this.infos
- while ((infos ne null) && phaseId(infos.validFrom) > pid) infos = infos.prev
- infos ne null
- }
-
- /** Modify term symbol's type so that a raw type C is converted to an existential C[_]
- *
- * This is done in checkAccessible and overriding checks in refchecks
- * We can't do this on class loading because it would result in infinite cycles.
- */
- final def cookJavaRawInfo() {
- if (hasFlag(TRIEDCOOKING)) return else setFlag(TRIEDCOOKING) // only try once...
- val oldInfo = info
- doCookJavaRawInfo()
- }
-
- protected def doCookJavaRawInfo(): Unit
-
-
- /** The type constructor of a symbol is:
- * For a type symbol, the type corresponding to the symbol itself,
- * excluding parameters.
- * Not applicable for term symbols.
- */
- def typeConstructor: Type =
- abort("typeConstructor inapplicable for " + this)
-
- /** The type parameters of this symbol, without ensuring type completion.
- * assumption: if a type starts out as monomorphic, it will not acquire
- * type parameters later.
- */
- def unsafeTypeParams: List[Symbol] =
- if (isMonomorphicType) List()
- else {
- val current = phase
- try {
- while ((phase.prev ne NoPhase) && phase.prev.keepsTypeParams) phase = phase.prev
- if (phase ne current) phase = phase.next
- if (settings.debug.value && settings.verbose.value && (phase ne current))
- log("checking unsafeTypeParams(" + this + ") at: " + current + " reading at: " + phase)
- rawInfo.typeParams
- } finally {
- phase = current
- }
- }
-
- /** The type parameters of this symbol.
- * assumption: if a type starts out as monomorphic, it will not acquire
- * type parameters later.
- */
- def typeParams: List[Symbol] =
- if (isMonomorphicType)
- List()
- else {
- if (validTo == NoPeriod) {
- val current = phase
- try {
- phase = phaseOf(infos.validFrom)
- rawInfo.load(this)
- } finally {
- phase = current
- }
- }
- rawInfo.typeParams
- }
-
- /** The value parameter sections of this symbol.
- */
- def paramss: List[List[Symbol]] = info.paramss
- def hasParamWhich(cond: Symbol => Boolean) = paramss exists (_ exists cond)
-
- /** The least proper supertype of a class; includes all parent types
- * and refinement where needed. You need to compute that in a situation like this:
- * {
- * class C extends P { ... }
- * new C
- * }
- */
- def classBound: Type = {
- val tp = refinedType(info.parents, owner)
- val thistp = tp.typeSymbol.thisType
- val oldsymbuf = new ListBuffer[Symbol]
- val newsymbuf = new ListBuffer[Symbol]
- for (sym <- info.decls.toList) {
- // todo: what about public references to private symbols?
- if (sym.isPublic && !sym.isConstructor) {
- oldsymbuf += sym
- newsymbuf += (
- if (sym.isClass)
- tp.typeSymbol.newAbstractType(sym.pos, sym.name.toTypeName).setInfo(sym.existentialBound)
- else
- sym.cloneSymbol(tp.typeSymbol))
- }
- }
- val oldsyms = oldsymbuf.toList
- val newsyms = newsymbuf.toList
- for (sym <- newsyms) {
- addMember(thistp, tp, sym.setInfo(sym.info.substThis(this, thistp).substSym(oldsyms, newsyms)))
- }
- tp
- }
-
- /** If we quantify existentially over this symbol,
- * the bound of the type variable that stands for it
- * pre: symbol is a term, a class, or an abstract type (no alias type allowed)
- */
- def existentialBound: Type =
- if (this.isClass)
- polyType(this.typeParams, TypeBounds.upper(this.classBound))
- else if (this.isAbstractType)
- this.info
- else if (this.isTerm)
- TypeBounds.upper(intersectionType(List(this.tpe, SingletonClass.tpe)))
- else
- abort("unexpected alias type: "+this)
-
- /** Reset symbol to initial state
- */
- def reset(completer: Type) {
- resetFlags
- infos = null
- validTo = NoPeriod
- //limit = NoPhase.id
- setInfo(completer)
- }
-
- /**
- * Adds the interface scala.Serializable to the parents of a ClassInfoType.
- * Note that the tree also has to be updated accordingly.
- */
- def makeSerializable() {
- info match {
- case ci @ ClassInfoType(_, _, _) =>
- updateInfo(ci.copy(parents = ci.parents ::: List(SerializableClass.tpe)))
- case i =>
- abort("Only ClassInfoTypes can be made serializable: "+ i)
- }
- }
-
-// Comparisons ----------------------------------------------------------------
-
- /** A total ordering between symbols that refines the class
- * inheritance graph (i.e. subclass.isLess(superclass) always holds).
- * the ordering is given by: (_.isType, -_.baseTypeSeq.length) for type symbols, followed by `id'.
- */
- final def isLess(that: Symbol): Boolean = {
- def baseTypeSeqLength(sym: Symbol) =
- if (sym.isAbstractType) 1 + sym.info.bounds.hi.baseTypeSeq.length
- else sym.info.baseTypeSeq.length
- if (this.isType)
- (that.isType &&
- { val diff = baseTypeSeqLength(this) - baseTypeSeqLength(that)
- diff > 0 || diff == 0 && this.id < that.id })
- else
- that.isType || this.id < that.id
- }
-
- /** A partial ordering between symbols.
- * (this isNestedIn that) holds iff this symbol is defined within
- * a class or method defining that symbol
- */
- final def isNestedIn(that: Symbol): Boolean =
- owner == that || owner != NoSymbol && (owner isNestedIn that)
-
- /** Is this class symbol a subclass of that symbol? */
- final def isNonBottomSubClass(that: Symbol): Boolean =
- this == that || this.isError || that.isError ||
- info.baseTypeIndex(that) >= 0
-
- final def isSubClass(that: Symbol): Boolean = (
- isNonBottomSubClass(that) ||
- this == NothingClass ||
- this == NullClass &&
- (that == AnyClass ||
- that != NothingClass && (that isSubClass ObjectClass))
- )
- final def isNumericSubClass(that: Symbol): Boolean =
- definitions.isNumericSubClass(this, that)
-
-// Overloaded Alternatives ---------------------------------------------------------
-
- def alternatives: List[Symbol] =
- if (hasFlag(OVERLOADED)) info.asInstanceOf[OverloadedType].alternatives
- else List(this)
-
- def filter(cond: Symbol => Boolean): Symbol =
- if (hasFlag(OVERLOADED)) {
- //assert(info.isInstanceOf[OverloadedType], "" + this + ":" + info);//DEBUG
- val alts = alternatives
- val alts1 = alts filter cond
- if (alts1 eq alts) this
- else if (alts1.isEmpty) NoSymbol
- else if (alts1.tail.isEmpty) alts1.head
- else owner.newOverloaded(info.prefix, alts1)
- } else if (this == NoSymbol || cond(this)) {
- this
- } else NoSymbol
-
- def suchThat(cond: Symbol => Boolean): Symbol = {
- val result = filter(cond)
- assert(!(result hasFlag OVERLOADED), result.alternatives)
- result
- }
-
-// Cloneing -------------------------------------------------------------------
-
- /** A clone of this symbol */
- final def cloneSymbol: Symbol =
- cloneSymbol(owner)
-
- /** A clone of this symbol, but with given owner */
- final def cloneSymbol(owner: Symbol): Symbol = {
- val newSym = cloneSymbolImpl(owner)
- newSym.privateWithin = privateWithin
- newSym.setInfo(info.cloneInfo(newSym))
- .setFlag(this.rawflags).setAnnotations(this.annotations)
- }
-
- /** Internal method to clone a symbol's implementation without flags or type
- */
- def cloneSymbolImpl(owner: Symbol): Symbol
-
-// Access to related symbols --------------------------------------------------
-
- /** The primary constructor of a class */
- def primaryConstructor: Symbol = {
- var c = info.decl(
- if (isTrait || isImplClass) nme.MIXIN_CONSTRUCTOR
- else nme.CONSTRUCTOR)
- c = if (c hasFlag OVERLOADED) c.alternatives.head else c
- //assert(c != NoSymbol)
- c
- }
-
- /** The self symbol of a class with explicit self type, or else the
- * symbol itself.
- */
- def thisSym: Symbol = this
-
- /** The type of `this' in a class, or else the type of the symbol itself. */
- def typeOfThis = thisSym.tpe
-
- /** If symbol is a class, the type <code>this.type</code> in this class,
- * otherwise <code>NoPrefix</code>.
- * We always have: thisType <:< typeOfThis
- */
- def thisType: Type = NoPrefix
-
- /** Return every accessor of a primary constructor parameter in this case class.
- * The scope declarations may be out of order because fields with less than private
- * access are first given a regular getter, then a new renamed getter which comes
- * later in the declaration list. For this reason we have to pinpoint the
- * right accessors by starting with the original fields (which will be in the right
- * order) and looking for getters with applicable names. The getters may have the
- * standard name "foo" or may have been renamed to "foo$\d+" in SyntheticMethods.
- * See ticket #1373.
- */
- final def caseFieldAccessors: List[Symbol] = {
- val allWithFlag = info.decls.toList filter (_.isCaseAccessor)
- val (accessors, fields) = allWithFlag partition (_.isMethod)
-
- def findAccessor(field: Symbol): Symbol = {
- // There is another renaming the field may have undergone, for instance as in
- // ticket #2175: case class Property[T](private var t: T), t becomes Property$$t.
- // So we use the original name everywhere.
- val getterName = nme.getterName(field.originalName)
-
- // Note this is done in two passes intentionally, to ensure we pick up the original
- // getter if present before looking for the renamed getter.
- def origGetter = accessors find (_.originalName == getterName)
- def renamedGetter = accessors find (_.originalName startsWith (getterName + "$"))
- val accessorName = origGetter orElse renamedGetter
-
- // This fails more gracefully rather than throw an Error as it used to because
- // as seen in #2625, we can reach this point with an already erroneous tree.
- accessorName getOrElse NoSymbol
- // throw new Error("Could not find case accessor for %s in %s".format(field, this))
- }
-
- fields map findAccessor
- }
-
- final def constrParamAccessors: List[Symbol] =
- info.decls.toList filter (sym => !sym.isMethod && sym.isParamAccessor)
-
- /** The symbol accessed by this accessor (getter or setter) function. */
- final def accessed: Symbol = accessed(owner.info)
-
- /** The symbol accessed by this accessor function, but with given owner type */
- final def accessed(ownerTp: Type): Symbol = {
- assert(hasAccessorFlag)
- ownerTp.decl(nme.getterToLocal(if (isSetter) nme.setterToGetter(name) else name))
- }
-
- /** The implementation class of a trait */
- final def implClass: Symbol = owner.info.decl(nme.implClassName(name))
-
- /** The class that is logically an outer class of given `clazz'.
- * This is the enclosing class, except for classes defined locally to constructors,
- * where it is the outer class of the enclosing class
- */
- final def outerClass: Symbol =
- if (owner.isClass) owner
- else if (isClassLocalToConstructor) owner.enclClass.outerClass
- else owner.outerClass
-
- /** For a paramaccessor: a superclass paramaccessor for which this symbol
- * is an alias, NoSymbol for all others
- */
- def alias: Symbol = NoSymbol
-
- /** For a lazy value, its lazy accessor. NoSymbol for all others */
- def lazyAccessor: Symbol = NoSymbol
-
- /** If this is a lazy value, the lazy accessor; otherwise this symbol. */
- def lazyAccessorOrSelf: Symbol = if (isLazy) lazyAccessor else this
-
- /** For an outer accessor: The class from which the outer originates.
- * For all other symbols: NoSymbol
- */
- def outerSource: Symbol = NoSymbol
-
- /** The superclass of this class */
- def superClass: Symbol = if (info.parents.isEmpty) NoSymbol else info.parents.head.typeSymbol
-
- /** The directly or indirectly inherited mixins of this class
- * except for mixin classes inherited by the superclass. Mixin classes appear
- * in linearization order.
- */
- def mixinClasses: List[Symbol] = {
- val sc = superClass
- ancestors takeWhile (sc ne)
- }
-
- /** All directly or indirectly inherited classes.
- */
- def ancestors: List[Symbol] = info.baseClasses drop 1
-
- /** The package class containing this symbol, or NoSymbol if there
- * is not one. */
- def enclosingPackageClass: Symbol =
- if (this == NoSymbol) this else {
- var packSym = this.owner
- while (packSym != NoSymbol && !packSym.isPackageClass)
- packSym = packSym.owner
- packSym
- }
-
- /** The package containing this symbol, or NoSymbol if there
- * is not one. */
- def enclosingPackage: Symbol = {
- val packSym = enclosingPackageClass
- if (packSym != NoSymbol) packSym.companionModule
- else packSym
- }
-
- /** Return the original enclosing method of this symbol. It should return
- * the same thing as enclMethod when called before lambda lift,
- * but it preserves the original nesting when called afterwards.
- */
- def originalEnclosingMethod: Symbol = {
- if (isMethod) this
- else {
- val owner = originalOwner.getOrElse(this, rawowner)
- if (isLocalDummy) owner.enclClass.primaryConstructor
- else owner.originalEnclosingMethod
- }
- }
-
- /** The method or class which logically encloses the current symbol.
- * If the symbol is defined in the initialization part of a template
- * this is the template's primary constructor, otherwise it is
- * the physically enclosing method or class.
- *
- * Example 1:
- *
- * def f() { val x = { def g() = ...; g() } }
- *
- * In this case the owner chain of `g' is `x', followed by `f' and
- * `g.logicallyEnclosingMember == f`.
- *
- * Example 2:
- *
- * class C {
- * def <init> = { ... }
- * val x = { def g() = ...; g() } }
- * }
- *
- * In this case the owner chain of `g' is `x', followed by `C' but
- * g.logicallyEnclosingMember is the primary constructor symbol `<init>'
- * (or, for traits: `$init') of `C'.
- *
- */
- def logicallyEnclosingMember: Symbol =
- if (isLocalDummy) enclClass.primaryConstructor
- else if (isMethod || isClass) this
- else owner.logicallyEnclosingMember
-
- /** The top-level class containing this symbol */
- def toplevelClass: Symbol =
- if (owner.isPackageClass) {
- if (isClass) this else moduleClass
- } else owner.toplevelClass
-
- /** Is this symbol defined in the same scope and compilation unit as `that' symbol?
- */
- def isCoDefinedWith(that: Symbol) = (
- (this.rawInfo ne NoType) &&
- (this.owner == that.owner) && {
- !this.owner.isPackageClass ||
- (this.sourceFile eq null) ||
- (that.sourceFile eq null) ||
- (this.sourceFile == that.sourceFile) || {
- // recognize companion object in separate file and fail, else compilation
- // appears to succeed but highly opaque errors come later: see bug #1286
- if (this.sourceFile.path != that.sourceFile.path)
- throw InvalidCompanions(this, that)
-
- false
- }
- }
- )
-
- /** The internal representation of classes and objects:
- *
- * class Foo is "the class" or sometimes "the plain class"
- * object Foo is "the module"
- * class Foo$ is "the module class" (invisible to the user: it implements object Foo)
- *
- * class Foo <
- * ^ ^ (2) \
- * | | | \
- * | (5) | (3)
- * | | | \
- * (1) v v \
- * object Foo (4)-> > class Foo$
- *
- * (1) companionClass
- * (2) companionModule
- * (3) linkedClassOfClass
- * (4) moduleClass
- * (5) companionSymbol
- */
-
- /** For a module or case factory: the class with the same name in the same package.
- * For all others: NoSymbol
- * Note: does not work for classes owned by methods, see Namers.companionClassOf
- *
- * object Foo . companionClass --> class Foo
- */
- final def companionClass: Symbol = {
- if (this != NoSymbol)
- flatOwnerInfo.decl(name.toTypeName).suchThat(_ isCoDefinedWith this)
- else NoSymbol
- }
-
- /** A helper method that factors the common code used the discover a
- * companion module of a class. If a companion module exists, its symbol is
- * returned, otherwise, `NoSymbol` is returned. The method assumes that
- * `this` symbol has already been checked to be a class (using `isClass`).
- */
- private final def companionModule0: Symbol =
- flatOwnerInfo.decl(name.toTermName).suchThat(
- sym => sym.hasFlag(MODULE) && (sym isCoDefinedWith this) && !sym.isMethod)
-
- /** For a class: the module or case class factory with the same name in the same package.
- * For all others: NoSymbol
- * Note: does not work for modules owned by methods, see Namers.companionModuleOf
- *
- * class Foo . companionModule --> object Foo
- */
- final def companionModule: Symbol =
- if (isClass && !isRefinementClass) companionModule0
- else NoSymbol
-
- /** For a module: its linked class
- * For a plain class: its linked module or case factory.
- * Note: does not work for modules owned by methods, see Namers.companionSymbolOf
- *
- * class Foo <-- companionSymbol --> object Foo
- */
- final def companionSymbol: Symbol =
- if (isTerm) companionClass
- else if (isClass) companionModule0
- else NoSymbol
-
- /** For a module class: its linked class
- * For a plain class: the module class of its linked module.
- *
- * class Foo <-- linkedClassOfClass --> class Foo$
- */
- final def linkedClassOfClass: Symbol =
- if (isModuleClass) companionClass else companionModule.moduleClass
-
- /**
- * Returns the rawInfo of the owner. If the current phase has flat classes,
- * it first applies all pending type maps to this symbol.
- *
- * assume this is the ModuleSymbol for B in the following definition:
- * package p { class A { object B { val x = 1 } } }
- *
- * The owner after flatten is "package p" (see "def owner"). The flatten type map enters
- * symbol B in the decls of p. So to find a linked symbol ("object B" or "class B")
- * we need to apply flatten to B first. Fixes #2470.
- */
- private final def flatOwnerInfo: Type = {
- if (needsFlatClasses)
- info
- owner.rawInfo
- }
-
- /** If this symbol is an implementation class, its interface, otherwise the symbol itself
- * The method follows two strategies to determine the interface.
- * - during or after erasure, it takes the last parent of the implementation class
- * (which is always the interface, by convention)
- * - before erasure, it looks up the interface name in the scope of the owner of the class.
- * This only works for implementation classes owned by other classes or traits.
- */
- final def toInterface: Symbol =
- if (isImplClass) {
- val result =
- if (phase.next.erasedTypes) {
- assert(!tpe.parents.isEmpty, this)
- tpe.parents.last.typeSymbol
- } else {
- owner.info.decl(nme.interfaceName(name))
- }
- assert(result != NoSymbol, this)
- result
- } else this
-
- /** The module class corresponding to this module.
- */
- def moduleClass: Symbol = NoSymbol
-
- /** The non-private symbol whose type matches the type of this symbol
- * in in given class.
- *
- * @param ofclazz The class containing the symbol's definition
- * @param site The base type from which member types are computed
- */
- final def matchingSymbol(ofclazz: Symbol, site: Type): Symbol =
- ofclazz.info.nonPrivateDecl(name).filter(sym =>
- !sym.isTerm || (site.memberType(this) matches site.memberType(sym)))
-
- /** The non-private member of `site' whose type and name match the type of this symbol
- */
- final def matchingSymbol(site: Type, admit: Long = 0L): Symbol =
- site.nonPrivateMemberAdmitting(name, admit).filter(sym =>
- !sym.isTerm || (site.memberType(this) matches site.memberType(sym)))
-
- /** The symbol overridden by this symbol in given class `ofclazz'.
- * @pre 'ofclazz' is a base class of this symbol's owner.
- */
- final def overriddenSymbol(ofclazz: Symbol): Symbol =
- if (isClassConstructor) NoSymbol else matchingSymbol(ofclazz, owner.thisType)
-
- /** The symbol overriding this symbol in given subclass `ofclazz'
- * @pre: `ofclazz' is a subclass of this symbol's owner
- */
- final def overridingSymbol(ofclazz: Symbol): Symbol =
- if (isClassConstructor) NoSymbol else matchingSymbol(ofclazz, ofclazz.thisType)
-
- /** Returns all symbols overriden by this symbol
- */
- final def allOverriddenSymbols: List[Symbol] =
- if (!owner.isClass) Nil
- else owner.ancestors map overriddenSymbol filter (_ != NoSymbol)
-
- /** Returns all symbols overridden by this symbol, plus all matching symbols
- * defined in parents of the selftype
- */
- final def extendedOverriddenSymbols: List[Symbol] =
- if (!owner.isClass) Nil
- else owner.thisSym.ancestors map overriddenSymbol filter (_ != NoSymbol)
-
- /** The symbol accessed by a super in the definition of this symbol when
- * seen from class `base'. This symbol is always concrete.
- * pre: `this.owner' is in the base class sequence of `base'.
- */
- final def superSymbol(base: Symbol): Symbol = {
- var bcs = base.info.baseClasses.dropWhile(owner !=).tail
- var sym: Symbol = NoSymbol
- while (!bcs.isEmpty && sym == NoSymbol) {
- if (!bcs.head.isImplClass)
- sym = matchingSymbol(bcs.head, base.thisType).suchThat(!_.isDeferred)
- bcs = bcs.tail
- }
- sym
- }
-
- /** The getter of this value or setter definition in class `base', or NoSymbol if
- * none exists.
- */
- final def getter(base: Symbol): Symbol = {
- val getterName = if (isSetter) nme.setterToGetter(name) else nme.getterName(name)
- base.info.decl(getterName) filter (_.hasAccessorFlag)
- }
-
- /** The setter of this value or getter definition, or NoSymbol if none exists */
- final def setter(base: Symbol): Symbol = setter(base, false)
-
- final def setter(base: Symbol, hasExpandedName: Boolean): Symbol = {
- var sname = nme.getterToSetter(nme.getterName(name))
- if (hasExpandedName) sname = nme.expandedSetterName(sname, base)
- base.info.decl(sname) filter (_.hasAccessorFlag)
- }
-
- /** The case module corresponding to this case class
- * @pre case class is a member of some other class or package
- */
- final def caseModule: Symbol = {
- var modname = name.toTermName
- if (privateWithin.isClass && !privateWithin.isModuleClass && !hasFlag(EXPANDEDNAME))
- modname = nme.expandedName(modname, privateWithin)
- initialize.owner.info.decl(modname).suchThat(_.isModule)
- }
-
- /** If this symbol is a type parameter skolem (not an existential skolem!)
- * its corresponding type parameter, otherwise this */
- def deSkolemize: Symbol = this
-
- /** If this symbol is an existential skolem the location (a Tree or null)
- * where it was unpacked. Resulttype is AnyRef because trees are not visible here. */
- def unpackLocation: AnyRef = null
-
- /** Remove private modifier from symbol `sym's definition. If `sym' is a
- * term symbol rename it by expanding its name to avoid name clashes
- */
- final def makeNotPrivate(base: Symbol) {
- if (this hasFlag PRIVATE) {
- setFlag(notPRIVATE)
- if (isMethod && !isDeferred) setFlag(lateFINAL)
- if (!isStaticModule && !isClassConstructor) {
- expandName(base)
- if (isModule) moduleClass.makeNotPrivate(base)
- }
- }
- }
-
- /** change name by appending $$<fully-qualified-name-of-class `base'>
- * Do the same for any accessed symbols or setters/getters
- */
- def expandName(base: Symbol) {
- if (this.isTerm && this != NoSymbol && !hasFlag(EXPANDEDNAME)) {
- setFlag(EXPANDEDNAME)
- if (hasAccessorFlag && !isDeferred) {
- accessed.expandName(base)
- } else if (hasGetter) {
- getter(owner).expandName(base)
- setter(owner).expandName(base)
- }
- name = nme.expandedName(name, base)
- if (isType) name = name
- }
- }
-/* code for fixing nested objects
- def expandModuleClassName() {
- name = newTypeName(name.toString + "$")
- }
-
- def isExpandedModuleClass: Boolean = name(name.length - 1) == '$'
-*/
- def sourceFile: AbstractFile =
- if (isModule) moduleClass.sourceFile
- else toplevelClass.sourceFile
-
- def sourceFile_=(f: AbstractFile) {
- abort("sourceFile_= inapplicable for " + this)
- }
-
- /** If this is a sealed class, its known direct subclasses. Otherwise Set.empty */
- def children: List[Symbol] = Nil
-
- /** Recursively finds all sealed descendants and returns a sorted list.
- * Includes this symbol unless it is abstract, but as value classes are
- * marked abstract so they can't be instantiated, they are special cased.
- */
- def sealedDescendants: List[Symbol] = {
- val kids = children flatMap (_.sealedDescendants)
- val all = if (isAbstractClass && !isValueClass(this)) kids else this :: kids
-
- all.distinct sortBy (_.sealedSortName)
- }
-
-// ToString -------------------------------------------------------------------
-
- /** A tag which (in the ideal case) uniquely identifies class symbols */
- final def tag = fullName.##
-
- /** The simple name of this Symbol */
- final def simpleName: Name = name
-
- /** The String used to order otherwise identical sealed symbols.
- * This uses data which is stable across runs and variable classpaths
- * (the initial Name) before falling back on id, which varies depending
- * on exactly when a symbol is loaded.
- */
- final def sealedSortName = initName + "#" + id
-
- /** String representation of symbol's definition key word */
- final def keyString: String =
- if (isJavaInterface) "interface"
- else if (isTrait) "trait"
- else if (isClass) "class"
- else if (isType && !isParameter) "type"
- else if (isVariable) "var"
- else if (isPackage) "package"
- else if (isModule) "object"
- else if (isSourceMethod) "def"
- else if (isTerm && (!isParameter || isParamAccessor)) "val"
- else ""
-
- /** Accurate string representation of symbols' kind, suitable for developers. */
- final def accurateKindString: String =
- if (isPackage) "package"
- else if (isPackageClass) "package class"
- else if (isPackageObject) "package object"
- else if (isPackageObjectClass) "package object class"
- else if (isRefinementClass) "refinement class"
- else if (isModule) "module"
- else if (isModuleClass) "module class"
- else sanitizedKindString
-
- /** String representation of symbol's kind, suitable for the masses. */
- private def sanitizedKindString: String =
- if (isPackage || isPackageClass) "package"
- else if (isModule || isModuleClass) "object"
- else if (isAnonymousClass) "anonymous class"
- else if (isRefinementClass) ""
- else if (isTrait) "trait"
- else if (isClass) "class"
- else if (isType) "type"
- else if (isTerm && isLazy) "lazy value"
- else if (isVariable) "variable"
- else if (isClassConstructor) "constructor"
- else if (isSourceMethod) "method"
- else if (isTerm) "value"
- else ""
-
- final def kindString: String =
- if (settings.debug.value) accurateKindString
- else sanitizedKindString
-
- /** If the name of the symbol's owner should be used when you care about
- * seeing an interesting name: in such cases this symbol is e.g. a method
- * parameter with a synthetic name, a constructor named "this", an object
- * "package", etc. The kind string, if non-empty, will be phrased relative
- * to the name of the owner.
- */
- def hasMeaninglessName = (
- isSetterParameter // x$1
- || isClassConstructor // this
- || isPackageObject // package
- || isPackageObjectClass // package$
- || isRefinementClass // <refinement>
- )
-
- /** String representation of symbol's simple name.
- * If !settings.debug translates expansions of operators back to operator symbol.
- * E.g. $eq => =.
- * If settings.uniqid, adds id.
- */
- def nameString = decodedName + idString
-
- /** If settings.uniqid is set, the symbol's id, else "" */
- final def idString = if (settings.uniqid.value) "#"+id else ""
-
- /** String representation, including symbol's kind e.g., "class Foo", "method Bar".
- * If hasMeaninglessName is true, uses the owner's name to disambiguate identity.
- */
- override def toString = compose(
- kindString,
- if (hasMeaninglessName) owner.nameString else nameString
- )
-
- /** String representation of location.
- */
- def ownsString = {
- val owns = owner.skipPackageObject
- if (owns.isClass && !owns.printWithoutPrefix && !isScalaPackageClass) "" + owns
- else ""
- }
-
- /** String representation of location, plus a preposition. Doesn't do much,
- * for backward compatibility reasons.
- */
- def locationString = ownsString match {
- case "" => ""
- case s => " in " + s
- }
- def fullLocationString = toString + locationString
-
- /** String representation of symbol's definition following its name */
- final def infoString(tp: Type): String = {
- def typeParamsString: String = tp match {
- case PolyType(tparams, _) if tparams.nonEmpty =>
- (tparams map (_.defString)).mkString("[", ",", "]")
- case _ =>
- ""
- }
- if (isClass)
- typeParamsString + " extends " + tp.resultType
- else if (isAliasType)
- typeParamsString + " = " + tp.resultType
- else if (isAbstractType)
- typeParamsString + {
- tp.resultType match {
- case TypeBounds(lo, hi) =>
- (if (lo.typeSymbol == NothingClass) "" else " >: " + lo) +
- (if (hi.typeSymbol == AnyClass) "" else " <: " + hi)
- case rtp =>
- "<: " + rtp
- }
- }
- else if (isModule)
- moduleClass.infoString(tp)
- else
- tp match {
- case PolyType(tparams, res) =>
- typeParamsString + infoString(res)
- case NullaryMethodType(res) =>
- infoString(res)
- case MethodType(params, res) =>
- params.map(_.defString).mkString("(", ",", ")") + infoString(res)
- case _ =>
- ": " + tp
- }
- }
-
- def infosString = infos.toString()
-
- def hasFlagsToString(mask: Long): String = flagsToString(
- flags & mask,
- if (hasAccessBoundary) privateWithin.toString else ""
- )
-
- /** String representation of symbol's variance */
- def varianceString: String =
- if (variance == 1) "+"
- else if (variance == -1) "-"
- else ""
-
- def defaultFlagMask =
- if (settings.debug.value) -1L
- else if (owner.isRefinementClass) ExplicitFlags & ~OVERRIDE
- else ExplicitFlags
-
- def defaultFlagString = hasFlagsToString(defaultFlagMask)
-
- /** String representation of symbol's definition */
- def defString = compose(
- defaultFlagString,
- keyString,
- varianceString + nameString + (
- if (hasRawInfo) infoString(rawInfo) else "<_>"
- )
- )
-
- /** Concatenate strings separated by spaces */
- private def compose(ss: String*) = ss filter (_ != "") mkString " "
-
- def isSingletonExistential =
- nme.isSingletonName(name) && (info.bounds.hi.typeSymbol isSubClass SingletonClass)
-
- /** String representation of existentially bound variable */
- def existentialToString =
- if (isSingletonExistential && !settings.debug.value)
- "val " + nme.dropSingletonName(name) + ": " + dropSingletonType(info.bounds.hi)
- else defString
- }
-
- /** A class for term symbols */
- class TermSymbol(initOwner: Symbol, initPos: Position, initName: TermName)
- extends Symbol(initOwner, initPos, initName) {
- final override def isTerm = true
-
- override def name: TermName = super.name
- privateWithin = NoSymbol
-
- var referenced: Symbol = NoSymbol
-
- def cloneSymbolImpl(owner: Symbol): Symbol =
- new TermSymbol(owner, pos, name).copyAttrsFrom(this)
-
- def copyAttrsFrom(original: TermSymbol): this.type = {
- referenced = original.referenced
- this
- }
-
- private val validAliasFlags = SUPERACCESSOR | PARAMACCESSOR | MIXEDIN | SPECIALIZED
-
- override def alias: Symbol =
- if (hasFlag(validAliasFlags)) initialize.referenced
- else NoSymbol
-
- def setAlias(alias: Symbol): TermSymbol = {
- assert(alias != NoSymbol, this)
- assert(!alias.isOverloaded, alias)
- assert(hasFlag(validAliasFlags), this)
-
- referenced = alias
- this
- }
-
- override def outerSource: Symbol =
- if (name endsWith nme.OUTER) initialize.referenced
- else NoSymbol
-
- override def moduleClass: Symbol =
- if (hasFlag(MODULE)) referenced else NoSymbol
-
- def setModuleClass(clazz: Symbol): TermSymbol = {
- assert(hasFlag(MODULE))
- referenced = clazz
- this
- }
-
- def setLazyAccessor(sym: Symbol): TermSymbol = {
- assert(isLazy && (referenced == NoSymbol || referenced == sym), this)
- referenced = sym
- this
- }
-
- override def lazyAccessor: Symbol = {
- assert(isLazy, this)
- referenced
- }
-
- protected def doCookJavaRawInfo() {
- def cook(sym: Symbol) {
- require(sym hasFlag JAVA)
- // @M: I think this is more desirable, but Martin prefers to leave raw-types as-is as much as possible
- // object rawToExistentialInJava extends TypeMap {
- // def apply(tp: Type): Type = tp match {
- // // any symbol that occurs in a java sig, not just java symbols
- // // see http://lampsvn.epfl.ch/trac/scala/ticket/2454#comment:14
- // case TypeRef(pre, sym, List()) if !sym.typeParams.isEmpty =>
- // val eparams = typeParamsToExistentials(sym, sym.typeParams)
- // existentialAbstraction(eparams, TypeRef(pre, sym, eparams map (_.tpe)))
- // case _ =>
- // mapOver(tp)
- // }
- // }
- val tpe1 = rawToExistential(sym.tpe)
- // println("cooking: "+ sym +": "+ sym.tpe +" to "+ tpe1)
- if (tpe1 ne sym.tpe) {
- sym.setInfo(tpe1)
- }
- }
-
- if (isJavaDefined)
- cook(this)
- else if (hasFlag(OVERLOADED))
- for (sym2 <- alternatives)
- if (sym2 hasFlag JAVA)
- cook(sym2)
- }
- }
-
- /** A class for module symbols */
- class ModuleSymbol(initOwner: Symbol, initPos: Position, initName: TermName)
- extends TermSymbol(initOwner, initPos, initName) {
- private var flatname: TermName = null
- // This method could use a better name from someone clearer on what the condition expresses.
- private def isFlatAdjusted = !isMethod && needsFlatClasses
-
- override def owner: Symbol =
- if (isFlatAdjusted) rawowner.owner
- else rawowner
-
- override def name: TermName =
- if (isFlatAdjusted) {
- if (flatname == null)
- flatname = flattenName().toTermName
-
- flatname
- } else rawname
-
- override def cloneSymbolImpl(owner: Symbol): Symbol =
- new ModuleSymbol(owner, pos, name).copyAttrsFrom(this)
- }
-
- /** A class for method symbols */
- class MethodSymbol(initOwner: Symbol, initPos: Position, initName: TermName)
- extends TermSymbol(initOwner, initPos, initName) {
- private var mtpePeriod = NoPeriod
- private var mtpePre: Type = _
- private var mtpeResult: Type = _
- private var mtpeInfo: Type = _
-
- override def cloneSymbolImpl(owner: Symbol): Symbol =
- new MethodSymbol(owner, pos, name).copyAttrsFrom(this)
-
- def typeAsMemberOf(pre: Type): Type = {
- if (mtpePeriod == currentPeriod) {
- if ((mtpePre eq pre) && (mtpeInfo eq info)) return mtpeResult
- } else if (isValid(mtpePeriod)) {
- mtpePeriod = currentPeriod
- if ((mtpePre eq pre) && (mtpeInfo eq info)) return mtpeResult
- }
- val res = pre.computeMemberType(this)
- mtpePeriod = currentPeriod
- mtpePre = pre
- mtpeInfo = info
- mtpeResult = res
- res
- }
- }
-
- /** A class of type symbols. Alias and abstract types are direct instances
- * of this class. Classes are instances of a subclass.
- */
- class TypeSymbol(initOwner: Symbol, initPos: Position, initName: TypeName)
- extends Symbol(initOwner, initPos, initName) {
- privateWithin = NoSymbol
- private var tyconCache: Type = null
- private var tyconRunId = NoRunId
- private var tpeCache: Type = _
- private var tpePeriod = NoPeriod
-
- override def name: TypeName = super.name.asInstanceOf[TypeName]
- final override def isType = true
- override def isNonClassType = true
- override def isAbstractType = isDeferred
- override def isAliasType = !isDeferred
-
- private def newTypeRef(targs: List[Type]) = {
- val pre = if (hasFlag(PARAM | EXISTENTIAL)) NoPrefix else owner.thisType
- typeRef(pre, this, targs)
- }
-
- /** Let's say you have a type definition
- *
- * type T <: Number
- *
- * and tsym is the symbol corresponding to T. Then
- *
- * tsym.info = TypeBounds(Nothing, Number)
- * tsym.tpe = TypeRef(NoPrefix, T, List())
- */
- override def tpe: Type = {
- if (tpeCache eq NoType) throw CyclicReference(this, typeConstructor)
- if (tpePeriod != currentPeriod) {
- if (isValid(tpePeriod)) {
- tpePeriod = currentPeriod
- } else {
- if (isInitialized) tpePeriod = currentPeriod
- tpeCache = NoType
- val targs =
- if (phase.erasedTypes && this != ArrayClass) List()
- else unsafeTypeParams map (_.typeConstructor)
- //@M! use typeConstructor to generate dummy type arguments,
- // sym.tpe should not be called on a symbol that's supposed to be a higher-kinded type
- // memberType should be used instead, that's why it uses tpeHK and not tpe
- tpeCache = newTypeRef(targs)
- }
- }
- assert(tpeCache ne null/*, "" + this + " " + phase*/)//debug
- tpeCache
- }
-
- /** @M -- tpe vs tpeHK:
- *
- * tpe: creates a TypeRef with dummy type arguments and kind *
- * tpeHK: creates a TypeRef with no type arguments but with type parameters
- *
- * If typeParams is nonEmpty, calling tpe may hide errors or
- * introduce spurious ones. (For example, when deriving a type from
- * the symbol of a type argument that must be higher-kinded.) As far
- * as I can tell, it only makes sense to call tpe in conjunction
- * with a substitution that replaces the generated dummy type
- * arguments by their actual types.
- *
- * TODO: the above conditions desperately need to be enforced by code.
- */
- override def tpeHK = typeConstructor // @M! used in memberType
-
- // needed for experimental code for early types as type parameters
- // def refreshType() { tpePeriod = NoPeriod }
-
- override def typeConstructor: Type = {
- if ((tyconCache eq null) || tyconRunId != currentRunId) {
- tyconCache = newTypeRef(Nil)
- tyconRunId = currentRunId
- }
- assert(tyconCache ne null)
- tyconCache
- }
-
- override def info_=(tp: Type) {
- tpePeriod = NoPeriod
- tyconCache = null
- super.info_=(tp)
- }
-
- override def reset(completer: Type) {
- super.reset(completer)
- tpePeriod = NoPeriod
- tyconRunId = NoRunId
- }
-
- /*** example:
- * public class Test3<T> {}
- * public class Test1<T extends Test3> {}
- * info for T in Test1 should be >: Nothing <: Test3[_]
- */
- protected def doCookJavaRawInfo() {
- if (isJavaDefined || owner.isJavaDefined) {
- val tpe1 = rawToExistential(info)
- // println("cooking type: "+ this +": "+ info +" to "+ tpe1)
- if (tpe1 ne info) {
- setInfo(tpe1)
- }
- }
- }
-
- def cloneSymbolImpl(owner: Symbol): Symbol =
- new TypeSymbol(owner, pos, name) //.toTypeName)
-
- incCounter(typeSymbolCount)
- }
-
- /** A class for type parameters viewed from inside their scopes
- *
- * @param origin Can be either a tree, or a symbol, or null.
- * If skolem got created from newTypeSkolem (called in Namers), origin denotes
- * the type parameter from which the skolem was created. If it got created from
- * skolemizeExistential, origin is either null or a Tree. If it is a Tree, it indicates
- * where the skolem was introduced (this is important for knowing when to pack it
- * again into ab Existential). origin is `null' only in skolemizeExistentials called
- * from <:< or isAsSpecific, because here its value does not matter.
- * I elieve the following invariant holds:
- *
- * origin.isInstanceOf[Symbol] == !hasFlag(EXISTENTIAL)
- */
- class TypeSkolem(initOwner: Symbol, initPos: Position, initName: TypeName, origin: AnyRef)
- extends TypeSymbol(initOwner, initPos, initName) {
-
- /** The skolemization level in place when the skolem was constructed */
- val level = skolemizationLevel
-
- final override def isSkolem = true
-
- /** If typeskolem comes from a type parameter, that parameter, otherwise skolem itself */
- override def deSkolemize = origin match {
- case s: Symbol => s
- case _ => this
- }
-
- /** If type skolem comes from an existential, the tree where it was created */
- override def unpackLocation = origin
-
- //@M! (not deSkolemize.typeParams!!), also can't leave superclass definition: use info, not rawInfo
- override def typeParams = info.typeParams
-
- override def cloneSymbolImpl(owner: Symbol): Symbol =
- new TypeSkolem(owner, pos, name, origin)
-
- override def nameString: String =
- if (settings.debug.value) (super.nameString + "&" + level)
- else super.nameString
- }
-
- /** A class for class symbols */
- class ClassSymbol(initOwner: Symbol, initPos: Position, initName: TypeName)
- extends TypeSymbol(initOwner, initPos, initName) {
-
- private var source: AbstractFile = null
- private var thissym: Symbol = this
-
- final override def isClass = true
- final override def isNonClassType = false
- final override def isAbstractType = false
- final override def isAliasType = false
-
- override def sourceFile =
- if (owner.isPackageClass) source
- else super.sourceFile
- override def sourceFile_=(f: AbstractFile) { source = f }
-
- override def reset(completer: Type) {
- super.reset(completer)
- thissym = this
- }
-
- private var flatname: TypeName = null
-
- override def owner: Symbol =
- if (needsFlatClasses) rawowner.owner
- else rawowner
-
- override def name: TypeName =
- if (needsFlatClasses) {
- if (flatname == null)
- flatname = flattenName().toTypeName
- flatname
- }
- else rawname.asInstanceOf[TypeName]
-
- private var thisTypeCache: Type = _
- private var thisTypePeriod = NoPeriod
-
- private var typeOfThisCache: Type = _
- private var typeOfThisPeriod = NoPeriod
-
- /** the type this.type in this class */
- override def thisType: Type = {
- val period = thisTypePeriod
- if (period != currentPeriod) {
- thisTypePeriod = currentPeriod
- if (!isValid(period)) thisTypeCache = ThisType(this)
- }
- thisTypeCache
- }
-
- /** A symbol carrying the self type of the class as its type */
- override def thisSym: Symbol = thissym
-
- /** the self type of an object foo is foo.type, not class<foo>.this.type
- */
- override def typeOfThis: Type = {
- if (getFlag(MODULE | IMPLCLASS) == MODULE.toLong && owner != NoSymbol) {
- val period = typeOfThisPeriod
- if (period != currentPeriod) {
- typeOfThisPeriod = currentPeriod
- if (!isValid(period))
- typeOfThisCache = singleType(owner.thisType, sourceModule)
- }
- typeOfThisCache
- }
- else thissym.tpe
- }
-
- /** Sets the self type of the class */
- override def typeOfThis_=(tp: Type) {
- thissym = newThisSym(pos).setInfo(tp)
- }
-
- override def cloneSymbolImpl(owner: Symbol): Symbol = {
- val clone = new ClassSymbol(owner, pos, name)
- if (thisSym != this) {
- clone.typeOfThis = typeOfThis
- clone.thisSym.name = thisSym.name
- }
- clone
- }
-
- override def sourceModule =
- if (isModuleClass) companionModule else NoSymbol
-
- private var childSet: Set[Symbol] = Set()
- override def children: List[Symbol] = childSet.toList sortBy (_.sealedSortName)
- override def addChild(sym: Symbol) { childSet = childSet + sym }
-
- incCounter(classSymbolCount)
- }
-
- /** A class for module class symbols
- * Note: Not all module classes are of this type; when unpickled, we get
- * plain class symbols!
- */
- class ModuleClassSymbol(owner: Symbol, pos: Position, name: TypeName)
- extends ClassSymbol(owner, pos, name) {
- private var module: Symbol = null
- def this(module: TermSymbol) = {
- this(module.owner, module.pos, module.name.toTypeName)
- setFlag(module.getFlag(ModuleToClassFlags) | MODULE | FINAL)
- sourceModule = module
- }
- override def sourceModule = module
- private var implicitMembersCacheValue: List[Symbol] = List()
- private var implicitMembersCacheKey1: Type = NoType
- private var implicitMembersCacheKey2: ScopeEntry = null
- def implicitMembers: List[Symbol] = {
- val tp = info
- if ((implicitMembersCacheKey1 ne tp) || (implicitMembersCacheKey2 ne tp.decls.elems)) {
- implicitMembersCacheKey1 = tp
- implicitMembersCacheKey2 = tp.decls.elems
- implicitMembersCacheValue = tp.implicitMembers
- }
- implicitMembersCacheValue
- }
- override def sourceModule_=(module: Symbol) { this.module = module }
- }
-
- /** An object representing a missing symbol */
- object NoSymbol extends Symbol(null, NoPosition, nme.NO_NAME) {
- setInfo(NoType)
- privateWithin = this
- override def info_=(info: Type) {
- infos = TypeHistory(1, NoType, null)
- unlock()
- validTo = currentPeriod
- }
- override def defString: String = toString
- override def locationString: String = ""
- override def enclClass: Symbol = this
- override def toplevelClass: Symbol = this
- override def enclMethod: Symbol = this
- override def owner: Symbol = abort("no-symbol does not have owner")
- override def sourceFile: AbstractFile = null
- override def ownerChain: List[Symbol] = List()
- override def ownersIterator: Iterator[Symbol] = Iterator.empty
- override def alternatives: List[Symbol] = List()
- override def reset(completer: Type) {}
- override def info: Type = NoType
- override def rawInfo: Type = NoType
- protected def doCookJavaRawInfo() {}
- override def accessBoundary(base: Symbol): Symbol = RootClass
- def cloneSymbolImpl(owner: Symbol): Symbol = abort()
- override def originalEnclosingMethod = this
- }
-
- def cloneSymbols[T <: Symbol](syms: List[T]): List[T] = {
- val syms1 = syms map (_.cloneSymbol.asInstanceOf[T])
- for (sym1 <- syms1) sym1.setInfo(sym1.info.substSym(syms, syms1))
- syms1
- }
-
- def cloneSymbols[T <: Symbol](syms: List[T], owner: Symbol): List[T] = {
- val syms1 = syms map (_.cloneSymbol(owner).asInstanceOf[T])
- for (sym1 <- syms1) sym1.setInfo(sym1.info.substSym(syms, syms1))
- syms1
- }
-
- /** An exception for cyclic references of symbol definitions */
- case class CyclicReference(sym: Symbol, info: Type)
- extends TypeError("illegal cyclic reference involving " + sym) {
- // printStackTrace() // debug
- }
-
- case class InvalidCompanions(sym1: Symbol, sym2: Symbol)
- extends Throwable("Companions '" + sym1 + "' and '" + sym2 + "' must be defined in same file") {
- override def toString = getMessage
- }
-
- /** A class for type histories */
- private sealed case class TypeHistory(var validFrom: Period, info: Type, prev: TypeHistory) {
- assert((prev eq null) || phaseId(validFrom) > phaseId(prev.validFrom), this)
- assert(validFrom != NoPeriod)
- override def toString() =
- "TypeHistory(" + phaseOf(validFrom)+":"+runId(validFrom) + "," + info + "," + prev + ")"
- }
-}
diff --git a/src/compiler/scala/tools/nsc/symtab/TypeDebugging.scala b/src/compiler/scala/tools/nsc/symtab/TypeDebugging.scala
deleted file mode 100644
index f7cb430..0000000
--- a/src/compiler/scala/tools/nsc/symtab/TypeDebugging.scala
+++ /dev/null
@@ -1,95 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
- * @author Paul Phillips
- */
-
-package scala.tools.nsc
-package symtab
-
-trait TypeDebugging {
- self: SymbolTable =>
-
- import definitions._
-
- // @M toString that is safe during debugging (does not normalize, ...)
- object typeDebug {
- private def to_s(x: Any): String = x match {
- // otherwise case classes are caught looking like products
- case _: Tree | _: Type => "" + x
- case x: TraversableOnce[_] => x mkString ", "
- case x: Product => x.productIterator mkString ("(", ", ", ")")
- case _ => "" + x
- }
- def ptIndent(x: Any) = ("" + x).replaceAll("\\n", " ")
- def ptBlock(label: String, pairs: (String, Any)*): String = {
- val width = pairs map (_._1.length) max
- val fmt = "%-" + (width + 1) + "s %s"
- val strs = pairs map { case (k, v) => fmt.format(k, to_s(v)) }
-
- strs.mkString(label + " {\n ", "\n ", "\n}")
- }
- def ptLine(label: String, pairs: (String, Any)*): String = {
- val strs = pairs map { case (k, v) => k + "=" + to_s(v) }
- strs.mkString(label + ": ", ", ", "")
- }
- def ptTree(t: Tree) = t match {
- case PackageDef(pid, _) => "package " + pid
- case ModuleDef(_, name, _) => "object " + name
- case ClassDef(_, name, tparams, _) => "class " + name + str.brackets(tparams)
- case _ => to_s(t)
- }
-
- object str {
- def parentheses(xs: List[_]): String = xs.mkString("(", ", ", ")")
- def brackets(xs: List[_]): String = if (xs.isEmpty) "" else xs.mkString("[", ", ", "]")
- def tparams(tparams: List[Type]): String = brackets(tparams map debug)
- def parents(ps: List[Type]): String = (ps map debug).mkString(" with ")
- def refine(defs: Scope): String = defs.toList.mkString("{", " ;\n ", "}")
- }
-
- def dump(tp: Type): Unit = {
- println("** " + tp + " / " + tp.getClass + " **")
- import tp._
-
- println("typeSymbol = " + typeSymbol)
- println("termSymbol = " + termSymbol)
- println("widen = " + widen)
- println("deconst = " + deconst)
- println("typeOfThis = " + typeOfThis)
- println("bounds = " + bounds)
- println("parents = " + parents)
- println("prefixChain = " + prefixChain)
- println("typeConstructor = " + typeConstructor)
- println(" .. typeConstructor.typeParams = " + typeConstructor.typeParams)
- println(" .. _.variance = " + (typeConstructor.typeParams map (_.variance)))
- println("typeArgs = " + typeArgs)
- println("resultType = " + resultType)
- println("finalResultType = " + finalResultType)
- println("paramss = " + paramss)
- println("paramTypes = " + paramTypes)
- println("typeParams = " + typeParams)
- println("boundSyms = " + boundSyms)
- println("baseTypeSeq = " + baseTypeSeq)
- println("baseClasses = " + baseClasses)
- println("toLongString = " + toLongString)
- }
-
- private def debug(tp: Type): String = tp match {
- case TypeRef(pre, sym, args) => debug(pre) + "." + sym.nameString + str.tparams(args)
- case ThisType(sym) => sym.nameString + ".this"
- case SingleType(pre, sym) => debug(pre) +"."+ sym.nameString +".type"
- case RefinedType(parents, defs) => str.parents(parents) + str.refine(defs)
- case ClassInfoType(parents, defs, clazz) => "class "+ clazz.nameString + str.parents(parents) + str.refine(defs)
- case PolyType(tparams, result) => str.brackets(tparams) + " " + debug(result)
- case TypeBounds(lo, hi) => ">: "+ debug(lo) +" <: "+ debug(hi)
- case tv @ TypeVar(_, _) => tv.toString
- case ExistentialType(tparams, qtpe) => "forSome "+ str.brackets(tparams) + " " + debug(qtpe)
- case _ => "?"+tp.getClass.getName+"?"//tp.toString might produce cyclic error...
- }
- def debugString(tp: Type) = debug(tp)
- }
- def paramString(tp: Type) = typeDebug.str parentheses (tp.params map (_.defString))
- def typeParamsString(tp: Type) = typeDebug.str brackets (tp.typeParams map (_.defString))
- def typeArgsString(tp: Type) = typeDebug.str brackets (tp.typeArgs map (_.safeToString))
- def debugString(tp: Type) = typeDebug debugString tp
-}
diff --git a/src/compiler/scala/tools/nsc/symtab/Types.scala b/src/compiler/scala/tools/nsc/symtab/Types.scala
deleted file mode 100644
index 03d8c36..0000000
--- a/src/compiler/scala/tools/nsc/symtab/Types.scala
+++ /dev/null
@@ -1,5940 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
- * @author Martin Odersky
- */
-
-package scala.tools.nsc
-package symtab
-
-import scala.collection.{ mutable, immutable }
-import scala.ref.WeakReference
-import mutable.ListBuffer
-import ast.TreeGen
-import util.{ Position, NoPosition }
-import util.Statistics._
-import Flags._
-import scala.util.control.ControlThrowable
-import scala.annotation.tailrec
-
-/* A standard type pattern match:
- case ErrorType =>
- // internal: error
- case WildcardType =>
- // internal: unknown
- case NoType =>
- case NoPrefix =>
- case ThisType(sym) =>
- // sym.this.type
- case SuperType(thistpe, supertpe) =>
- // super references
- case SingleType(pre, sym) =>
- // pre.sym.type
- case ConstantType(value) =>
- // Int(2)
- case TypeRef(pre, sym, args) =>
- // pre.sym[targs]
- // Outer.this.C would be represented as TypeRef(ThisType(Outer), C, List())
- case RefinedType(parents, defs) =>
- // parent1 with ... with parentn { defs }
- case ExistentialType(tparams, result) =>
- // result forSome { tparams }
- case AnnotatedType(annots, tp, selfsym) =>
- // tp @annots
-
- // the following are non-value types; you cannot write them down in Scala source.
-
- case TypeBounds(lo, hi) =>
- // >: lo <: hi
- case ClassInfoType(parents, defs, clazz) =>
- // same as RefinedType except as body of class
- case MethodType(paramtypes, result) =>
- // (paramtypes)result
- // For instance def m(): T is represented as MethodType(List(), T)
- case NullaryMethodType(result) => // eliminated by uncurry
- // an eval-by-name type
- // For instance def m: T is represented as NullaryMethodType(T)
- case PolyType(tparams, result) =>
- // [tparams]result where result is a (Nullary)MethodType or ClassInfoType
-
- // The remaining types are not used after phase `typer'.
- case OverloadedType(pre, tparams, alts) =>
- // all alternatives of an overloaded ident
- case AntiPolyType(pre, targs) =>
- // rarely used, disappears when combined with a PolyType
- case TypeVar(inst, constr) =>
- // a type variable
- // Replace occurrences of type parameters with type vars, where
- // inst is the instantiation and constr is a list of bounds.
- case DeBruijnIndex(level, index)
- // for dependent method types: a type referring to a method parameter.
- // Not presently used, it seems.
-*/
-
-trait Types extends reflect.generic.Types { self: SymbolTable =>
- import definitions._
-
- //statistics
- def uniqueTypeCount = if (uniques == null) 0 else uniques.size
-
- private var explainSwitch = false
- private final val emptySymbolSet = immutable.Set.empty[Symbol]
-
- private final val alternativeNarrow = false
-
- private final val LogPendingSubTypesThreshold = 50
- private final val LogPendingBaseTypesThreshold = 50
- private final val LogVolatileThreshold = 50
-
- /** A don't care value for the depth parameter in lubs/glbs and related operations */
- private final val AnyDepth = -3
-
- /** Decrement depth unless it is a don't care */
- private final def decr(depth: Int) = if (depth == AnyDepth) AnyDepth else depth - 1
-
- private final val printLubs = false
- /** In case anyone wants to turn off lub verification without reverting anything. */
- private final val verifyLubs = true
-
- /** The current skolemization level, needed for the algorithms
- * in isSameType, isSubType that do constraint solving under a prefix
- */
- var skolemizationLevel = 0
-
- /** A log of type variable with their original constraints. Used in order
- * to undo constraints in the case of isSubType/isSameType failure.
- */
- object undoLog {
- private type UndoLog = List[(TypeVar, TypeConstraint)]
- private[nsc] var log: UndoLog = List()
-
- /** Undo all changes to constraints to type variables upto `limit'
- */
- private def undoTo(limit: UndoLog) {
- while ((log ne limit) && log.nonEmpty) {
- val (tv, constr) = log.head
- tv.constr = constr
- log = log.tail
- }
- }
-
- private[Types] def record(tv: TypeVar) = {
- log ::= (tv, tv.constr.cloneInternal)
- }
- private[nsc] def clear() {
- if (settings.debug.value)
- self.log("Clearing " + log.size + " entries from the undoLog.")
-
- log = Nil
- }
-
- // `block` should not affect constraints on typevars
- def undo[T](block: => T): T = {
- val before = log
-
- try block
- finally undoTo(before)
- }
-
- // if `block` evaluates to false, it should not affect constraints on typevars
- def undoUnless(block: => Boolean): Boolean = {
- val before = log
- var result = false
-
- try result = block
- finally if (!result) undoTo(before)
-
- result
- }
- }
-
- /** A map from lists to compound types that have the given list as parents.
- * This is used to avoid duplication in the computation of base type sequences and baseClasses.
- * It makes use of the fact that these two operations depend only on the parents,
- * not on the refinement.
- */
- val intersectionWitness = new mutable.WeakHashMap[List[Type], WeakReference[Type]]
-
- private object gen extends {
- val global : Types.this.type = Types.this
- } with TreeGen
-
- import gen._
-
- /** A proxy for a type (identified by field `underlying') that forwards most
- * operations to it (for exceptions, see WrappingProxy, which forwards even more operations).
- * every operation that is overridden for some kind of types should be forwarded.
- */
- trait SimpleTypeProxy extends Type {
- def underlying: Type
-
- // the following operations + those in RewrappingTypeProxy are all operations
- // in class Type that are overridden in some subclass
- // Important to keep this up-to-date when new operations are added!
- override def isTrivial = underlying.isTrivial
- override def isHigherKinded: Boolean = underlying.isHigherKinded
- override def typeConstructor: Type = underlying.typeConstructor
- override def isNotNull = underlying.isNotNull
- override def isError = underlying.isError
- override def isErroneous = underlying.isErroneous
- override def isStable: Boolean = underlying.isStable
- override def isVolatile = underlying.isVolatile
- override def finalResultType = underlying.finalResultType
- override def paramSectionCount = underlying.paramSectionCount
- override def paramss = underlying.paramss
- override def params = underlying.params
- override def paramTypes = underlying.paramTypes
- override def termSymbol = underlying.termSymbol
- override def termSymbolDirect = underlying.termSymbolDirect
- override def typeParams = underlying.typeParams
- override def boundSyms = underlying.boundSyms
- override def typeSymbol = underlying.typeSymbol
- override def typeSymbolDirect = underlying.typeSymbolDirect
- override def widen = underlying.widen
- override def typeOfThis = underlying.typeOfThis
- override def bounds = underlying.bounds
- override def parents = underlying.parents
- override def prefix = underlying.prefix
- override def decls = underlying.decls
- override def baseType(clazz: Symbol) = underlying.baseType(clazz)
- override def baseTypeSeq = underlying.baseTypeSeq
- override def baseTypeSeqDepth = underlying.baseTypeSeqDepth
- override def baseClasses = underlying.baseClasses
- }
-
- /** A proxy for a type (identified by field `underlying') that forwards most
- * operations to it. Every operation that is overridden for some kind of types is
- * forwarded here. Some operations are rewrapped again.
- */
- trait RewrappingTypeProxy extends SimpleTypeProxy {
- protected def maybeRewrap(newtp: Type) = if (newtp eq underlying) this else rewrap(newtp)
- protected def rewrap(newtp: Type): Type
-
- // the following are all operations in class Type that are overridden in some subclass
- // Important to keep this up-to-date when new operations are added!
- override def widen = maybeRewrap(underlying.widen)
- override def narrow = underlying.narrow
- override def deconst = maybeRewrap(underlying.deconst)
- override def resultType = maybeRewrap(underlying.resultType)
- override def resultType(actuals: List[Type]) = maybeRewrap(underlying.resultType(actuals))
- override def finalResultType = maybeRewrap(underlying.finalResultType)
- override def paramSectionCount = 0
- override def paramss: List[List[Symbol]] = List()
- override def params: List[Symbol] = List()
- override def paramTypes: List[Type] = List()
- override def typeArgs = underlying.typeArgs
- override def notNull = maybeRewrap(underlying.notNull)
- override def instantiateTypeParams(formals: List[Symbol], actuals: List[Type]) = underlying.instantiateTypeParams(formals, actuals)
- override def skolemizeExistential(owner: Symbol, origin: AnyRef) = underlying.skolemizeExistential(owner, origin)
- override def normalize = maybeRewrap(underlying.normalize)
- override def dealias = maybeRewrap(underlying.dealias)
- override def cloneInfo(owner: Symbol) = maybeRewrap(underlying.cloneInfo(owner))
- override def atOwner(owner: Symbol) = maybeRewrap(underlying.atOwner(owner))
- override def prefixString = underlying.prefixString
- override def isComplete = underlying.isComplete
- override def complete(sym: Symbol) = underlying.complete(sym)
- override def load(sym: Symbol) { underlying.load(sym) }
- override def withAnnotations(annots: List[AnnotationInfo]) = maybeRewrap(underlying.withAnnotations(annots))
- override def withoutAnnotations = maybeRewrap(underlying.withoutAnnotations)
- }
-
- /** The base class for all types */
- abstract class Type extends AbsType {
-
- /** Types for which asSeenFrom always is the identity, no matter what
- * prefix or owner.
- */
- def isTrivial: Boolean = false
-
- /** Is this type higher-kinded, i.e., is it a type constructor @M */
- def isHigherKinded: Boolean = false
-
- /** Does this type denote a stable reference (i.e. singleton type)? */
- def isStable: Boolean = false
-
- /** Is this type dangerous (i.e. it might contain conflicting
- * type information when empty, so that it can be constructed
- * so that type unsoundness results.) A dangerous type has an underlying
- * type of the form T_1 with T_n { decls }, where one of the
- * T_i (i > 1) is an abstract type.
- */
- def isVolatile: Boolean = false
-
- /** Is this type guaranteed not to have `null' as a value? */
- def isNotNull: Boolean = false
-
- /** Is this type a structural refinement type (it 'refines' members that have not been inherited) */
- def isStructuralRefinement: Boolean = false
-
- /** Does this type depend immediately on an enclosing method parameter?
- * i.e., is it a singleton type whose termSymbol refers to an argument of the symbol's owner (which is a method)
- */
- def isImmediatelyDependent: Boolean = false
-
- /** Does this depend on an enclosing method parameter? */
- def isDependent: Boolean = IsDependentCollector.collect(this)
-
- /** True for WildcardType or BoundedWildcardType */
- def isWildcard = false
-
- /** The term symbol associated with the type
- * Note that the symbol of the normalized type is returned (@see normalize)
- */
- def termSymbol: Symbol = NoSymbol
-
- /** The type symbol associated with the type
- * Note that the symbol of the normalized type is returned (@see normalize)
- */
- def typeSymbol: Symbol = NoSymbol
-
- /** The term symbol *directly* associated with the type
- */
- def termSymbolDirect: Symbol = termSymbol
-
- /** The type symbol *directly* associated with the type
- */
- def typeSymbolDirect: Symbol = typeSymbol
-
- /** The base type underlying a type proxy,
- * identity on all other types */
- def underlying: Type = this
-
- /** Widen from singleton type to its underlying non-singleton
- * base type by applying one or more `underlying' dereferences,
- * identity for all other types.
- *
- * class Outer { class C ; val x: C }
- * val o: Outer
- * <o.x.type>.widen = o.C
- */
- def widen: Type = this
-
- /** Map a constant type or not-null-type to its underlying base type,
- * identity for all other types.
- */
- def deconst: Type = this
-
- /** The type of `this' of a class type or reference type
- */
- def typeOfThis: Type = typeSymbol.typeOfThis
-
- /** Map to a singleton type which is a subtype of this type.
- * The fallback implemented here gives
- * T.narrow = (T {}).this.type
- * Overridden where we know more about where types come from.
- *
- * todo: change to singleton type of an existentially defined variable
- * of the right type instead of making this a `this` of a refined type.
- */
- def narrow: Type =
- if (phase.erasedTypes) this
- else if (alternativeNarrow) { // investigate why this does not work!
- val tparam = commonOwner(this) freshExistential ".type" setInfo singletonBounds(this)
- tparam.tpe
- } else {
- val cowner = commonOwner(this)
- refinedType(List(this), cowner, EmptyScope, cowner.pos).narrow
- }
-
- /** For a TypeBounds type, itself;
- * for a reference denoting an abstract type, its bounds,
- * for all other types, a TypeBounds type all of whose bounds are this type.
- */
- def bounds: TypeBounds = TypeBounds(this, this)
-
- /** For a class or intersection type, its parents.
- * For a TypeBounds type, the parents of its hi bound.
- * inherited by typerefs, singleton types, and refinement types,
- * The empty list for all other types */
- def parents: List[Type] = List()
-
- /** For a typeref or single-type, the prefix of the normalized type (@see normalize).
- * NoType for all other types. */
- def prefix: Type = NoType
-
- /** A chain of all typeref or singletype prefixes of this type, longest first.
- * (Only used from safeToString.)
- */
- def prefixChain: List[Type] = this match {
- case TypeRef(pre, _, _) => pre :: pre.prefixChain
- case SingleType(pre, _) => pre :: pre.prefixChain
- case _ => List()
- }
-
- /** This type, without its type arguments @M */
- def typeConstructor: Type = this
-
- /** For a typeref, its arguments. The empty list for all other types */
- def typeArgs: List[Type] = List()
-
- /** For a (nullary) method or poly type, its direct result type,
- * the type itself for all other types. */
- def resultType: Type = this
-
- def resultType(actuals: List[Type]) = this
-
- /** Only used for dependent method types. */
- def resultApprox: Type = if(settings.YdepMethTpes.value) ApproximateDependentMap(resultType) else resultType
-
- /** If this is a TypeRef `clazz`[`T`], return the argument `T`
- * otherwise return this type
- */
- def remove(clazz: Symbol): Type = this
-
- /** For a curried/nullary method or poly type its non-method result type,
- * the type itself for all other types */
- def finalResultType: Type = this
-
- /** For a method type, the number of its value parameter sections,
- * 0 for all other types */
- def paramSectionCount: Int = 0
-
- /** For a method or poly type, a list of its value parameter sections,
- * the empty list for all other types */
- def paramss: List[List[Symbol]] = List()
-
- /** For a method or poly type, its first value parameter section,
- * the empty list for all other types */
- def params: List[Symbol] = List()
-
- /** For a method or poly type, the types of its first value parameter section,
- * the empty list for all other types */
- def paramTypes: List[Type] = List()
-
- /** For a (potentially wrapped) poly type, its type parameters,
- * the empty list for all other types */
- def typeParams: List[Symbol] = List()
-
- /** For a (potentially wrapped) poly or existential type, its bound symbols,
- * the empty list for all other types */
- def boundSyms: immutable.Set[Symbol] = emptySymbolSet
-
- /** Mixin a NotNull trait unless type already has one
- * ...if the option is given, since it is causing typing bugs.
- */
- def notNull: Type =
- if (!settings.Ynotnull.value || isNotNull || phase.erasedTypes) this
- else NotNullType(this)
-
- /** Replace formal type parameter symbols with actual type arguments.
- *
- * Amounts to substitution except for higher-kinded types. (See overridden method in TypeRef) -- @M
- */
- def instantiateTypeParams(formals: List[Symbol], actuals: List[Type]): Type =
- if (sameLength(formals, actuals)) this.subst(formals, actuals) else ErrorType
-
- /** If this type is an existential, turn all existentially bound variables to type skolems.
- * @param owner The owner of the created type skolems
- * @param origin The tree whose type was an existential for which the skolem was created.
- */
- def skolemizeExistential(owner: Symbol, origin: AnyRef): Type = this
-
- /** A simple version of skolemizeExistential for situations where
- * owner or unpack location do not matter (typically used in subtype tests)
- */
- def skolemizeExistential: Type = skolemizeExistential(NoSymbol, null)
-
- /** Reduce to beta eta-long normal form.
- * Expands type aliases and converts higher-kinded TypeRefs to PolyTypes.
- * Functions on types are also implemented as PolyTypes.
- *
- * Example: (in the below, <List> is the type constructor of List)
- * TypeRef(pre, <List>, List()) is replaced by
- * PolyType(X, TypeRef(pre, <List>, List(X)))
- */
- def normalize = this // @MAT
-
- /** Expands type aliases. */
- def dealias = this
-
- /** Is this type produced as a repair for an error? */
- def isError: Boolean = typeSymbol.isError || termSymbol.isError
-
- /** Is this type produced as a repair for an error? */
- def isErroneous: Boolean = ErroneousCollector.collect(this)
-
- /** Does this type denote a reference type which can be null? */
- // def isNullable: Boolean = false
-
- /** For a classtype or refined type, its defined or declared members;
- * inherited by subtypes and typerefs.
- * The empty scope for all other types.
- */
- def decls: Scope = EmptyScope
-
- /** The defined or declared members with name `name' in this type;
- * an OverloadedSymbol if several exist, NoSymbol if none exist.
- * Alternatives of overloaded symbol appear in the order they are declared.
- */
- def decl(name: Name): Symbol = findDecl(name, 0)
-
- /** The non-private defined or declared members with name `name' in this type;
- * an OverloadedSymbol if several exist, NoSymbol if none exist.
- * Alternatives of overloaded symbol appear in the order they are declared.
- */
- def nonPrivateDecl(name: Name): Symbol = findDecl(name, PRIVATE)
-
- /** A list of all members of this type (defined or inherited)
- * Members appear in linearization order of their owners.
- * Members with the same owner appear in reverse order of their declarations.
- */
- def members: List[Symbol] = findMember(nme.ANYNAME, 0, 0, false).alternatives
-
- /** A list of all non-private members of this type (defined or inherited) */
- def nonPrivateMembers: List[Symbol] =
- findMember(nme.ANYNAME, PRIVATE | BRIDGES, 0, false).alternatives
-
- /** A list of all non-private members of this type (defined or inherited),
- * admitting members with given flags `admit`
- */
- def nonPrivateMembersAdmitting(admit: Long): List[Symbol] =
- findMember(nme.ANYNAME, (PRIVATE | BRIDGES) & ~admit, 0, false).alternatives
-
- /** A list of all implicit symbols of this type (defined or inherited) */
- def implicitMembers: List[Symbol] =
- findMember(nme.ANYNAME, BRIDGES, IMPLICIT, false).alternatives
-
- /** A list of all deferred symbols of this type (defined or inherited) */
- def deferredMembers: List[Symbol] =
- findMember(nme.ANYNAME, BRIDGES, DEFERRED, false).alternatives
-
- /** The member with given name,
- * an OverloadedSymbol if several exist, NoSymbol if none exist */
- def member(name: Name): Symbol = findMember(name, BRIDGES, 0, false)
-
- /** The non-private member with given name,
- * an OverloadedSymbol if several exist, NoSymbol if none exist.
- * Bridges are excluded from the result
- */
- def nonPrivateMember(name: Name): Symbol =
- findMember(name, PRIVATE | BRIDGES, 0, false)
-
- /** The non-private member with given name, admitting members with given flags `admit`
- * an OverloadedSymbol if several exist, NoSymbol if none exist
- */
- def nonPrivateMemberAdmitting(name: Name, admit: Long): Symbol =
- findMember(name, (PRIVATE | BRIDGES) & ~admit, 0, false)
-
- /** The non-local member with given name,
- * an OverloadedSymbol if several exist, NoSymbol if none exist */
- def nonLocalMember(name: Name): Symbol =
- findMember(name, LOCAL | BRIDGES, 0, false)
-
- /** The least type instance of given class which is a supertype
- * of this type. Example:
- * class D[T]
- * class C extends p.D[Int]
- * ThisType(C).baseType(D) = p.D[Int]
- */
- def baseType(clazz: Symbol): Type = NoType
-
- /** This type as seen from prefix `pre' and class `clazz'. This means:
- * Replace all thistypes of `clazz' or one of its subclasses
- * by `pre' and instantiate all parameters by arguments of `pre'.
- * Proceed analogously for thistypes referring to outer classes.
- *
- * Example:
- * class D[T] { def m: T }
- * class C extends p.D[Int]
- * T.asSeenFrom(ThisType(C), D) (where D is owner of m)
- * = Int
- */
- def asSeenFrom(pre: Type, clazz: Symbol): Type =
- if (!isTrivial && (!phase.erasedTypes || pre.typeSymbol == ArrayClass)) {
- incCounter(asSeenFromCount)
- val start = startTimer(asSeenFromNanos)
- val m = new AsSeenFromMap(pre.normalize, clazz)
- val tp = m apply this
- val result = existentialAbstraction(m.capturedParams, tp)
- stopTimer(asSeenFromNanos, start)
- result
- } else this
-
- /** The info of `sym', seen as a member of this type.
- *
- * Example:
- * class D[T] { def m: T }
- * class C extends p.D[Int]
- * ThisType(C).memberType(m) = Int
- */
- def memberInfo(sym: Symbol): Type = {
- sym.info.asSeenFrom(this, sym.owner)
- }
-
- /** The type of `sym', seen as a member of this type. */
- def memberType(sym: Symbol): Type = sym match {
- case meth: MethodSymbol =>
- meth.typeAsMemberOf(this)
- case _ =>
- computeMemberType(sym)
- }
-
- def computeMemberType(sym: Symbol): Type = sym.tpeHK match { //@M don't prematurely instantiate higher-kinded types, they will be instantiated by transform, typedTypeApply, etc. when really necessary
- case OverloadedType(_, alts) =>
- OverloadedType(this, alts)
- case tp =>
- tp.asSeenFrom(this, sym.owner)
- }
-
- /** Substitute types `to' for occurrences of references to
- * symbols `from' in this type.
- */
- def subst(from: List[Symbol], to: List[Type]): Type =
- if (from.isEmpty) this
- else new SubstTypeMap(from, to) apply this
-
- /** Substitute symbols `to' for occurrences of symbols
- * `from' in this type.
- * !!! NOTE !!!: If you need to do a substThis and a substSym, the substThis has to come
- * first, as otherwise symbols will immediately get rebound in typeRef to the old
- * symbol.
- */
- def substSym(from: List[Symbol], to: List[Symbol]): Type =
- if (from eq to) this
- else new SubstSymMap(from, to) apply this
-
- /** Substitute all occurrences of `ThisType(from)' in this type
- * by `to'.
- * !!! NOTE !!!: If you need to do a substThis and a substSym, the substThis has to come
- * first, as otherwise symbols will immediately get rebound in typeRef to the old
- * symbol.
- */
- def substThis(from: Symbol, to: Type): Type =
- new SubstThisMap(from, to) apply this
-
- def substSuper(from: Type, to: Type): Type =
- new SubstSuperMap(from, to) apply this
-
- /** Returns all parts of this type which satisfy predicate `p' */
- def filter(p: Type => Boolean): List[Type] = new FilterTypeCollector(p).collect(this).toList
-
- /** Returns optionally first type (in a preorder traversal) which satisfies predicate `p',
- * or None if none exists.
- */
- def find(p: Type => Boolean): Option[Type] = new FindTypeCollector(p).collect(this)
-
- /** Apply `f' to each part of this type */
- def foreach(f: Type => Unit) { new ForEachTypeTraverser(f).traverse(this) }
-
- /** Apply `f' to each part of this type; children get mapped before their parents */
- def map(f: Type => Type): Type = new TypeMap {
- def apply(x: Type) = f(mapOver(x))
- } apply this
-
- /** Is there part of this type which satisfies predicate `p'? */
- def exists(p: Type => Boolean): Boolean = !find(p).isEmpty
-
- /** Does this type contain a reference to this symbol? */
- def contains(sym: Symbol): Boolean = new ContainsCollector(sym).collect(this)
-
- /** Does this type contain a reference to this type */
- def containsTp(tp: Type): Boolean = new ContainsTypeCollector(tp).collect(this)
-
- /** Is this type a subtype of that type? */
- def <:<(that: Type): Boolean = {
- if (util.Statistics.enabled) stat_<:<(that)
- else {
- (this eq that) ||
- (if (explainSwitch) explain("<:", isSubType, this, that)
- else isSubType(this, that, AnyDepth))
- }
- }
-
- /** Can this type only be subtyped by bottom types?
- * This is assessed to be the case if the class is final,
- * and all type parameters (if any) are invariant.
- */
- def isFinalType = (
- typeSymbol.isFinal &&
- (typeSymbol.typeParams forall (_.variance == 0))
- )
-
- /** Is this type a subtype of that type in a pattern context?
- * Any type arguments on the right hand side are replaced with
- * fresh existentials, except for Arrays.
- *
- * See bug1434.scala for an example of code which would fail
- * if only a <:< test were applied.
- */
- def matchesPattern(that: Type): Boolean = {
- (this <:< that) || ((this, that) match {
- case (TypeRef(_, ArrayClass, List(arg1)), TypeRef(_, ArrayClass, List(arg2))) if arg2.typeSymbol.typeParams.nonEmpty =>
- arg1 matchesPattern arg2
- case (_, TypeRef(_, _, args)) =>
- val newtp = existentialAbstraction(args map (_.typeSymbol), that)
- !(that =:= newtp) && (this <:< newtp)
- case _ =>
- false
- })
- }
-
- def stat_<:<(that: Type): Boolean = {
- incCounter(subtypeCount)
- val start = startTimer(subtypeNanos)
- val result =
- (this eq that) ||
- (if (explainSwitch) explain("<:", isSubType, this, that)
- else isSubType(this, that, AnyDepth))
- stopTimer(subtypeNanos, start)
- result
- }
-
- /** Is this type a weak subtype of that type? True also for numeric types, i.e. Int weak_<:< Long.
- */
- def weak_<:<(that: Type): Boolean = {
- incCounter(subtypeCount)
- val start = startTimer(subtypeNanos)
- val result =
- ((this eq that) ||
- (if (explainSwitch) explain("weak_<:", isWeakSubType, this, that)
- else isWeakSubType(this, that)))
- stopTimer(subtypeNanos, start)
- result
- }
-
- /** Is this type equivalent to that type? */
- def =:=(that: Type): Boolean = (
- (this eq that) ||
- (if (explainSwitch) explain("=", isSameType, this, that)
- else isSameType(this, that))
- );
-
- /** Does this type implement symbol `sym' with same or stronger type?
- */
- def specializes(sym: Symbol): Boolean =
- if (explainSwitch) explain("specializes", specializesSym, this, sym)
- else specializesSym(this, sym)
-
- /** Is this type close enough to that type so that members
- * with the two type would override each other?
- * This means:
- * - Either both types are polytypes with the same number of
- * type parameters and their result types match after renaming
- * corresponding type parameters
- * - Or both types are (nullary) method types with equivalent type parameter types
- * and matching result types
- * - Or both types are equivalent
- * - Or phase.erasedTypes is false and both types are neither method nor
- * poly types.
- */
- def matches(that: Type): Boolean = matchesType(this, that, !phase.erasedTypes)
-
- /** Same as matches, except that non-method types are always assumed to match.
- */
- def looselyMatches(that: Type): Boolean = matchesType(this, that, true)
-
- /** The shortest sorted upwards closed array of types that contains
- * this type as first element.
- *
- * A list or array of types ts is upwards closed if
- *
- * for all t in ts:
- * for all typerefs p.s[args] such that t <: p.s[args]
- * there exists a typeref p'.s[args'] in ts such that
- * t <: p'.s['args] <: p.s[args],
- *
- * and
- *
- * for all singleton types p.s such that t <: p.s
- * there exists a singleton type p'.s in ts such that
- * t <: p'.s <: p.s
- *
- * Sorting is with respect to Symbol.isLess() on type symbols.
- */
- def baseTypeSeq: BaseTypeSeq = baseTypeSingletonSeq(this)
-
- /** The maximum depth (@see maxDepth)
- * of each type in the BaseTypeSeq of this type.
- */
- def baseTypeSeqDepth: Int = 1
-
- /** The list of all baseclasses of this type (including its own typeSymbol)
- * in reverse linearization order, starting with the class itself and ending
- * in class Any.
- */
- def baseClasses: List[Symbol] = List()
-
- /**
- * @param sym the class symbol
- * @return the index of given class symbol in the BaseTypeSeq of this type,
- * or -1 if no base type with given class symbol exists.
- */
- def baseTypeIndex(sym: Symbol): Int = {
- val bts = baseTypeSeq
- var lo = 0
- var hi = bts.length - 1
- while (lo <= hi) {
- val mid = (lo + hi) / 2
- val btssym = bts.typeSymbol(mid)
- if (sym == btssym) return mid
- else if (sym isLess btssym) hi = mid - 1
- else if (btssym isLess sym) lo = mid + 1
- else abort()
- }
- -1
- }
-
- /** If this is a poly- or methodtype, a copy with cloned type / value parameters
- * owned by `owner'. Identity for all other types.
- */
- def cloneInfo(owner: Symbol) = this
-
- /** Make sure this type is correct as the info of given owner; clone it if not.
- */
- def atOwner(owner: Symbol) = this
-
- protected def objectPrefix = "object "
- protected def packagePrefix = "package "
-
- def trimPrefix(str: String) = str stripPrefix objectPrefix stripPrefix packagePrefix
-
- /** The string representation of this type used as a prefix */
- def prefixString = trimPrefix(toString) + "#"
-
- /** The string representation of this type, with singletypes explained */
- def toLongString = {
- val str = toString
- if (str endsWith ".type") str + " (with underlying type " + widen + ")"
- else str
- }
-
- /** A test whether a type contains any unification type variables */
- def isGround: Boolean = this match {
- case TypeVar(_, constr) =>
- constr.instValid && constr.inst.isGround
- case TypeRef(pre, sym, args) =>
- sym.isPackageClass || pre.isGround && (args forall (_.isGround))
- case SingleType(pre, sym) =>
- sym.isPackageClass || pre.isGround
- case ThisType(_) | NoPrefix | WildcardType | NoType | ErrorType | ConstantType(_) =>
- true
- case _ =>
- typeVarToOriginMap(this) eq this
- }
-
- /** If this is a symbol loader type, load and assign a new type to
- * `sym'.
- */
- def load(sym: Symbol) {}
-
- private def findDecl(name: Name, excludedFlags: Int): Symbol = {
- var alts: List[Symbol] = List()
- var sym: Symbol = NoSymbol
- var e: ScopeEntry = decls.lookupEntry(name)
- while (e ne null) {
- if (!e.sym.hasFlag(excludedFlags)) {
- if (sym == NoSymbol) sym = e.sym
- else {
- if (alts.isEmpty) alts = List(sym)
- alts = e.sym :: alts
- }
- }
- e = decls.lookupNextEntry(e)
- }
- if (alts.isEmpty) sym
- else (baseClasses.head.newOverloaded(this, alts))
- }
-
- /**
- * Find member(s) in this type. If several members matching criteria are found, they are
- * returned in an OverloadedSymbol
- *
- * @param name The member's name, where nme.ANYNAME means `unspecified'
- * @param excludedFlags Returned members do not have these flags
- * @param requiredFlags Returned members do have these flags
- * @param stableOnly If set, return only members that are types or stable values
- */
- //TODO: use narrow only for modules? (correct? efficiency gain?)
- def findMember(name: Name, excludedFlags: Long, requiredFlags: Long, stableOnly: Boolean): Symbol = {
- var suspension: mutable.HashSet[TypeVar] = null
- // if this type contains type variables, put them to sleep for a while -- don't just wipe them out by
- // replacing them by the corresponding type parameter, as that messes up (e.g.) type variables in type refinements
- // without this, the matchesType call would lead to type variables on both sides
- // of a subtyping/equality judgement, which can lead to recursive types being constructed.
- // See (t0851) for a situation where this happens.
- if (!this.isGround) {
- // PP: The foreach below was formerly expressed as:
- // for(tv @ TypeVar(_, _) <- this) { suspension suspend tv }
- //
- // The tree checker failed this saying a TypeVar is required, but a (Type @unchecked) was found.
- // This is a consequence of using a pattern match and variable binding + ticket #1503, which
- // was addressed by weakening the type of bindings in pattern matches if they occur on the right.
- // So I'm not quite sure why this works at all, as the checker is right that it is mistyped.
- // For now I modified it as below, which achieves the same without error.
- //
- // make each type var in this type use its original type for comparisons instead of collecting constraints
- val susp = new mutable.HashSet[TypeVar] // use a local val so it remains unboxed
- this foreach {
- case tv: TypeVar => tv.suspended = true; susp += tv
- case _ =>
- }
- suspension = susp
- }
-
- incCounter(findMemberCount)
- val start = startTimer(findMemberNanos)
-
- //Console.println("find member " + name.decode + " in " + this + ":" + this.baseClasses)//DEBUG
- var members: Scope = null
- var member: Symbol = NoSymbol
- var excluded = excludedFlags | DEFERRED
- var continue = true
- var self: Type = null
- var membertpe: Type = null
- while (continue) {
- continue = false
- val bcs0 = baseClasses
- var bcs = bcs0
- while (!bcs.isEmpty) {
- val decls = bcs.head.info.decls
- var entry =
- if (name == nme.ANYNAME) decls.elems else decls.lookupEntry(name)
- while (entry ne null) {
- val sym = entry.sym
- if (sym hasAllFlags requiredFlags) {
- val excl = sym.getFlag(excluded)
- if (excl == 0L &&
- (// omit PRIVATE LOCALS unless selector class is contained in class owning the def.
- (bcs eq bcs0) ||
- !sym.isPrivateLocal ||
- (bcs0.head.hasTransOwner(bcs.head)))) {
- if (name.isTypeName || stableOnly && sym.isStable) {
- stopTimer(findMemberNanos, start)
- if (suspension ne null) suspension foreach (_.suspended = false)
- return sym
- } else if (member == NoSymbol) {
- member = sym
- } else if (members eq null) {
- if (member.name != sym.name ||
- !(member == sym ||
- member.owner != sym.owner &&
- !sym.isPrivate && {
- if (self eq null) self = this.narrow
- if (membertpe eq null) membertpe = self.memberType(member)
- (membertpe matches self.memberType(sym))
- })) {
- members = new Scope(List(member, sym))
- }
- } else {
- var prevEntry = members.lookupEntry(sym.name)
- var symtpe: Type = null
- while ((prevEntry ne null) &&
- !(prevEntry.sym == sym ||
- prevEntry.sym.owner != sym.owner &&
- !sym.hasFlag(PRIVATE) && {
- if (self eq null) self = this.narrow
- if (symtpe eq null) symtpe = self.memberType(sym)
- self.memberType(prevEntry.sym) matches symtpe
- })) {
- prevEntry = members lookupNextEntry prevEntry
- }
- if (prevEntry eq null) {
- members enter sym
- }
- }
- } else if (excl == DEFERRED.toLong) {
- continue = true
- }
- }
- entry = if (name == nme.ANYNAME) entry.next else decls lookupNextEntry entry
- } // while (entry ne null)
- // excluded = excluded | LOCAL
- bcs = if (name == nme.CONSTRUCTOR) Nil else bcs.tail
- } // while (!bcs.isEmpty)
- excluded = excludedFlags
- } // while (continue)
- stopTimer(findMemberNanos, start)
- if (suspension ne null) suspension foreach (_.suspended = false)
- if (members eq null) {
- if (member == NoSymbol) incCounter(noMemberCount)
- member
- } else {
- incCounter(multMemberCount)
- baseClasses.head.newOverloaded(this, members.toList)
- }
- }
-
- /** The existential skolems and existentially quantified variables which are free in this type */
- def existentialSkolems: List[Symbol] = {
- var boundSyms: List[Symbol] = List()
- var skolems: List[Symbol] = List()
- for (t <- this) {
- t match {
- case ExistentialType(quantified, qtpe) =>
- boundSyms = boundSyms ::: quantified
- case TypeRef(_, sym, _) =>
- if ((sym hasFlag EXISTENTIAL) && !(boundSyms contains sym) && !(skolems contains sym))
- skolems = sym :: skolems
- case _ =>
- }
- }
- skolems
- }
-
- /** Return the annotations on this type. */
- def annotations: List[AnnotationInfo] = Nil
-
- /** Test for the presence of an annotation */
- def hasAnnotation(clazz: Symbol) = annotations exists { _.atp.typeSymbol == clazz }
-
- /** Add an annotation to this type */
- def withAnnotation(annot: AnnotationInfo) = withAnnotations(List(annot))
-
- /** Add a number of annotations to this type */
- def withAnnotations(annots: List[AnnotationInfo]): Type =
- annots match {
- case Nil => this
- case _ => AnnotatedType(annots, this, NoSymbol)
- }
-
- /** Remove any annotations from this type */
- def withoutAnnotations = this
-
- /** Remove any annotations from this type and from any
- * types embedded in this type. */
- def stripAnnotations = StripAnnotationsMap(this)
-
- /** Set the self symbol of an annotated type, or do nothing
- * otherwise. */
- def withSelfsym(sym: Symbol) = this
-
- /** The selfsym of an annotated type, or NoSymbol of anything else */
- def selfsym: Symbol = NoSymbol
-
- /** The kind of this type; used for debugging */
- def kind: String = "unknown type of class "+getClass()
- }
-
-// Subclasses ------------------------------------------------------------
-
- trait UniqueType {
- override lazy val hashCode: Int = super.hashCode()
- }
-
- /** A base class for types that defer some operations
- * to their immediate supertype.
- */
- abstract class SubType extends Type {
- def supertype: Type
- override def parents: List[Type] = supertype.parents
- override def decls: Scope = supertype.decls
- override def baseType(clazz: Symbol): Type = supertype.baseType(clazz)
- override def baseTypeSeq: BaseTypeSeq = supertype.baseTypeSeq
- override def baseTypeSeqDepth: Int = supertype.baseTypeSeqDepth
- override def baseClasses: List[Symbol] = supertype.baseClasses
- override def isNotNull = supertype.isNotNull
- }
-
- case class NotNullType(override val underlying: Type) extends SubType with RewrappingTypeProxy {
- def supertype = underlying
- protected def rewrap(newtp: Type): Type = NotNullType(newtp)
- override def isNotNull: Boolean = true
- override def notNull = this
- override def deconst: Type = underlying //todo: needed?
- override def safeToString: String = underlying.toString + " with NotNull"
- override def kind = "NotNullType"
- }
-
- /** A base class for types that represent a single value
- * (single-types and this-types).
- */
- abstract class SingletonType extends SubType with SimpleTypeProxy {
- def supertype = underlying
- override def isTrivial = false
- override def isStable = true
- override def isVolatile = underlying.isVolatile
- override def widen: Type = underlying.widen
- override def baseTypeSeq: BaseTypeSeq = {
- incCounter(singletonBaseTypeSeqCount)
- underlying.baseTypeSeq prepend this
- }
- override def isHigherKinded = false // singleton type classifies objects, thus must be kind *
- override def safeToString: String = prefixString + "type"
-/*
- override def typeOfThis: Type = typeSymbol.typeOfThis
- override def bounds: TypeBounds = TypeBounds(this, this)
- override def prefix: Type = NoType
- override def typeArgs: List[Type] = List()
- override def typeParams: List[Symbol] = List()
-*/
- }
-
- /** An object representing an erroneous type */
- case object ErrorType extends Type {
- // todo see whether we can do without
- override def isError: Boolean = true
- override def decls: Scope = new ErrorScope(NoSymbol)
- override def findMember(name: Name, excludedFlags: Long, requiredFlags: Long, stableOnly: Boolean): Symbol = {
- var sym = decls lookup name
- if (sym == NoSymbol) {
- sym = NoSymbol.newErrorSymbol(name)
- decls enter sym
- }
- sym
- }
- override def baseType(clazz: Symbol): Type = this
- override def safeToString: String = "<error>"
- override def narrow: Type = this
- // override def isNullable: Boolean = true
- override def kind = "ErrorType"
- }
-
- /** An object representing an unknown type, used during type inference.
- * If you see WildcardType outside of inference it is almost certainly a bug.
- */
- case object WildcardType extends Type {
- override def isWildcard = true
- override def safeToString: String = "?"
- // override def isNullable: Boolean = true
- override def kind = "WildcardType"
- }
-
- case class BoundedWildcardType(override val bounds: TypeBounds) extends Type {
- override def isWildcard = true
- override def safeToString: String = "?" + bounds
- override def kind = "BoundedWildcardType"
- }
-
- /** An object representing a non-existing type */
- case object NoType extends Type {
- override def isTrivial: Boolean = true
- override def safeToString: String = "<notype>"
- // override def isNullable: Boolean = true
- override def kind = "NoType"
- }
-
- /** An object representing a non-existing prefix */
- case object NoPrefix extends Type {
- override def isTrivial: Boolean = true
- override def isStable: Boolean = true
- override def prefixString = ""
- override def safeToString: String = "<noprefix>"
- // override def isNullable: Boolean = true
- override def kind = "NoPrefixType"
- }
-
- /** A class for this-types of the form <sym>.this.type
- */
- abstract case class ThisType(sym: Symbol) extends SingletonType {
- //assert(sym.isClass && !sym.isModuleClass || sym.isRoot, sym)
- override def isTrivial: Boolean = sym.isPackageClass
- override def isNotNull = true
- override def typeSymbol = sym
- override def underlying: Type = sym.typeOfThis
- override def isVolatile = false
- override def isHigherKinded = sym.isRefinementClass && underlying.isHigherKinded
- override def prefixString =
- if (settings.debug.value) sym.nameString + ".this."
- else if (sym.isAnonOrRefinementClass) "this."
- else if (sym.printWithoutPrefix) ""
- else if (sym.isModuleClass) sym.fullName + "."
- else sym.nameString + ".this."
- override def safeToString: String =
- if (sym.isRoot) "<root>"
- else if (sym.isEmptyPackageClass) "<empty>"
- else super.safeToString
- override def narrow: Type = this
- override def kind = "ThisType"
- }
-
- final class UniqueThisType(sym: Symbol) extends ThisType(sym) with UniqueType { }
- object ThisType extends ThisTypeExtractor {
- def apply(sym: Symbol): Type = {
- if (!phase.erasedTypes) unique(new UniqueThisType(sym))
- else if (sym.isImplClass) sym.typeOfThis
- else sym.tpe
- }
- }
-
- /** A class for singleton types of the form <prefix>.<sym.name>.type.
- * Cannot be created directly; one should always use
- * `singleType' for creation.
- */
- abstract case class SingleType(pre: Type, sym: Symbol) extends SingletonType {
- override val isTrivial: Boolean = pre.isTrivial
- // override def isNullable = underlying.isNullable
- override def isNotNull = underlying.isNotNull
- private var underlyingCache: Type = NoType
- private var underlyingPeriod = NoPeriod
- override def underlying: Type = {
- val period = underlyingPeriod
- if (period != currentPeriod) {
- underlyingPeriod = currentPeriod
- if (!isValid(period)) {
- underlyingCache = pre.memberType(sym).resultType;
- assert(underlyingCache ne this, this)
- }
- }
- underlyingCache
- }
-
- // more precise conceptually, but causes cyclic errors: (paramss exists (_ contains sym))
- override def isImmediatelyDependent = (sym ne NoSymbol) && (sym.owner.isMethod && sym.isValueParameter)
-
- override def isVolatile : Boolean = underlying.isVolatile && !sym.isStable
-/*
- override def narrow: Type = {
- if (phase.erasedTypes) this
- else {
- val thissym = refinedType(List(this), sym.owner, EmptyScope).typeSymbol
- if (sym.owner != NoSymbol) {
- //Console.println("narrowing module " + sym + thissym.owner);
- thissym.typeOfThis = this
- }
- thissym.thisType
- }
- }
-*/
- override def narrow: Type = this
-
- override def termSymbol = sym
- override def prefix: Type = pre
- override def prefixString: String =
- if ((sym.isEmptyPackage || sym.isInterpreterWrapper || sym.isPredefModule || sym.isScalaPackage) && !settings.debug.value) ""
- else pre.prefixString + sym.nameString + "."
- override def kind = "SingleType"
- }
-
- final class UniqueSingleType(pre: Type, sym: Symbol) extends SingleType(pre, sym) with UniqueType { }
- object SingleType extends SingleTypeExtractor {
- def apply(pre: Type, sym: Symbol): Type = {
- unique(new UniqueSingleType(pre, sym))
- }
- }
-
- abstract case class SuperType(thistpe: Type, supertpe: Type) extends SingletonType {
- override val isTrivial: Boolean = thistpe.isTrivial && supertpe.isTrivial
- override def isNotNull = true;
- override def typeSymbol = thistpe.typeSymbol
- override def underlying = supertpe
- override def prefix: Type = supertpe.prefix
- override def prefixString = thistpe.prefixString.replaceAll("""this\.$""", "super.")
- override def narrow: Type = thistpe.narrow
- override def kind = "SuperType"
- }
-
- final class UniqueSuperType(thistp: Type, supertp: Type) extends SuperType(thistp, supertp) with UniqueType { }
- object SuperType extends SuperTypeExtractor {
- def apply(thistp: Type, supertp: Type): Type = {
- if (phase.erasedTypes) supertp
- else unique(new UniqueSuperType(thistp, supertp))
- }
- }
-
- /** A class for the bounds of abstract types and type parameters
- */
- abstract case class TypeBounds(lo: Type, hi: Type) extends SubType {
- def supertype = hi
- override val isTrivial: Boolean = lo.isTrivial && hi.isTrivial
- override def bounds: TypeBounds = this
- def containsType(that: Type) = that match {
- case TypeBounds(_, _) => that <:< this
- case _ => lo <:< that && that <:< hi
- }
- // override def isNullable: Boolean = NullClass.tpe <:< lo;
- override def safeToString = ">: " + lo + " <: " + hi
- override def kind = "TypeBoundsType"
- }
-
- final class UniqueTypeBounds(lo: Type, hi: Type) extends TypeBounds(lo, hi) with UniqueType { }
- object TypeBounds extends TypeBoundsExtractor {
- def empty: TypeBounds = apply(NothingClass.tpe, AnyClass.tpe)
- def upper(hi: Type): TypeBounds = apply(NothingClass.tpe, hi)
- def lower(lo: Type): TypeBounds = apply(lo, AnyClass.tpe)
- def apply(lo: Type, hi: Type): TypeBounds = {
- unique(new UniqueTypeBounds(lo, hi)).asInstanceOf[TypeBounds]
- }
- }
-
- /** A common base class for intersection types and class types
- */
- abstract class CompoundType extends Type {
-
- var baseTypeSeqCache: BaseTypeSeq = _
- private var baseTypeSeqPeriod = NoPeriod
- private var baseClassesCache: List[Symbol] = _
- private var baseClassesPeriod = NoPeriod
-
- override def baseTypeSeq: BaseTypeSeq = {
- val period = baseTypeSeqPeriod;
- if (period != currentPeriod) { // no caching in IDE
- baseTypeSeqPeriod = currentPeriod
- if (!isValidForBaseClasses(period)) {
- if (parents.exists(_.exists(_.isInstanceOf[TypeVar]))) {
- // rename type vars to fresh type params, take base type sequence of
- // resulting type, and rename back all the entries in that sequence
- var tvs = Set[TypeVar]()
- for (p <- parents)
- for (t <- p) t match {
- case tv: TypeVar => tvs += tv
- case _ =>
- }
- val varToParamMap: Map[Type, Symbol] = tvs map (tv => tv -> tv.origin.typeSymbol.cloneSymbol) toMap
- val paramToVarMap = varToParamMap map (_.swap)
- val varToParam = new TypeMap {
- def apply(tp: Type) = varToParamMap get tp match {
- case Some(sym) => sym.tpe
- case _ => mapOver(tp)
- }
- }
- val paramToVar = new TypeMap {
- def apply(tp: Type) = tp match {
- case TypeRef(_, tsym, _) if paramToVarMap.isDefinedAt(tsym) => paramToVarMap(tsym)
- case _ => mapOver(tp)
- }
- }
- val bts = copyRefinedType(this.asInstanceOf[RefinedType], parents map varToParam, varToParam mapOver decls).baseTypeSeq
- baseTypeSeqCache = bts lateMap paramToVar
- } else {
- incCounter(compoundBaseTypeSeqCount)
- baseTypeSeqCache = undetBaseTypeSeq
- baseTypeSeqCache = if (typeSymbol.isRefinementClass)
- memo(compoundBaseTypeSeq(this))(_.baseTypeSeq updateHead typeSymbol.tpe)
- else
- compoundBaseTypeSeq(this)
- // [Martin] suppressing memo-ization solves the problem with "same type after erasure" errors
- // when compiling with
- // scalac scala.collection.IterableViewLike.scala scala.collection.IterableLike.scala
- // I have not yet figured out precisely why this is the case.
- // My current assumption is that taking memos forces baseTypeSeqs to be computed
- // at stale types (i.e. the underlying typeSymbol has already another type).
- // I do not yet see precisely why this would cause a problem, but it looks
- // fishy in any case.
- }
- }
- //Console.println("baseTypeSeq(" + typeSymbol + ") = " + baseTypeSeqCache.toList);//DEBUG
- }
- if (baseTypeSeqCache eq undetBaseTypeSeq)
- throw new TypeError("illegal cyclic inheritance involving " + typeSymbol)
- baseTypeSeqCache
- }
-
- override def baseTypeSeqDepth: Int = baseTypeSeq.maxDepth
-
- override def baseClasses: List[Symbol] = {
- def computeBaseClasses: List[Symbol] =
- if (parents.isEmpty) List(typeSymbol)
- else {
- //Console.println("computing base classes of " + typeSymbol + " at phase " + phase);//DEBUG
- // optimized, since this seems to be performance critical
- val superclazz = parents.head
- var mixins = parents.tail
- val sbcs = superclazz.baseClasses
- var bcs = sbcs
- def isNew(clazz: Symbol): Boolean = (
- superclazz.baseTypeIndex(clazz) < 0 &&
- { var p = bcs;
- while ((p ne sbcs) && (p.head != clazz)) p = p.tail;
- p eq sbcs
- }
- );
- while (!mixins.isEmpty) {
- def addMixinBaseClasses(mbcs: List[Symbol]): List[Symbol] =
- if (mbcs.isEmpty) bcs
- else if (isNew(mbcs.head)) mbcs.head :: addMixinBaseClasses(mbcs.tail)
- else addMixinBaseClasses(mbcs.tail);
- bcs = addMixinBaseClasses(mixins.head.baseClasses)
- mixins = mixins.tail
- }
- typeSymbol :: bcs
- }
- val period = baseClassesPeriod
- if (period != currentPeriod) {
- baseClassesPeriod = currentPeriod
- if (!isValidForBaseClasses(period)) {
- baseClassesCache = null
- baseClassesCache = memo(computeBaseClasses)(typeSymbol :: _.baseClasses.tail)
- }
- }
- if (baseClassesCache eq null)
- throw new TypeError("illegal cyclic reference involving " + typeSymbol)
- baseClassesCache
- }
-
- /** The slightly less idiomatic use of Options is due to
- * performance considerations. A version using for comprehensions
- * might be too slow (this is deemed a hotspot of the type checker).
- *
- * See with Martin before changing this method.
- */
- def memo[A](op1: => A)(op2: Type => A): A = {
- def updateCache(): A = {
- intersectionWitness(parents) = new WeakReference(this)
- op1
- }
-
- intersectionWitness get parents match {
- case Some(ref) =>
- ref.get match {
- case Some(w) => if (w eq this) op1 else op2(w)
- case None => updateCache()
- }
- case None => updateCache()
- }
-
- }
-
- override def baseType(sym: Symbol): Type = {
- val index = baseTypeIndex(sym)
- if (index >= 0) baseTypeSeq(index) else NoType
- }
-
- override def narrow: Type = typeSymbol.thisType
- override def isNotNull: Boolean = parents exists (_.isNotNull)
-
- override def isStructuralRefinement: Boolean =
- typeSymbol.isAnonOrRefinementClass &&
- (decls.toList exists { entry => !entry.isConstructor && entry.allOverriddenSymbols.isEmpty })
-
- // override def isNullable: Boolean =
- // parents forall (p => p.isNullable && !p.typeSymbol.isAbstractType);
-
- override def safeToString: String =
- parents.mkString(" with ") +
- (if (settings.debug.value || parents.isEmpty || (decls.elems ne null))
- decls.mkString("{", "; ", "}") else "")
- }
-
- /** A class representing intersection types with refinements of the form
- * `<parents_0> with ... with <parents_n> { decls }'
- * Cannot be created directly;
- * one should always use `refinedType' for creation.
- */
- case class RefinedType(override val parents: List[Type],
- override val decls: Scope) extends CompoundType {
-
- override def isHigherKinded = (
- parents.nonEmpty &&
- (parents forall (_.isHigherKinded)) &&
- !phase.erasedTypes // @MO to AM: please check this class!
- )
-
- override def typeParams =
- if (isHigherKinded) parents.head.typeParams
- else super.typeParams
-
- //@M may result in an invalid type (references to higher-order args become dangling )
- override def typeConstructor =
- copyRefinedType(this, parents map (_.typeConstructor), decls)
-
- private def dummyArgs = typeParams map (_.typeConstructor)
-
- /* MO to AM: This is probably not correct
- * If they are several higher-kinded parents with different bounds we need
- * to take the intersection of their bounds
- */
- override def normalize = {
- if (isHigherKinded) {
- typeFun(
- typeParams,
- RefinedType(
- parents map {
- case TypeRef(pre, sym, List()) => TypeRef(pre, sym, dummyArgs)
- case p => p
- },
- decls,
- typeSymbol))
- }
- else super.normalize
- }
-
- /** A refined type P1 with ... with Pn { decls } is volatile if
- * one of the parent types Pi is an abstract type, and
- * either i > 1, or decls or a following parent Pj, j > 1, contributes
- * an abstract member.
- * A type contributes an abstract member if it has an abstract member which
- * is also a member of the whole refined type. A scope `decls' contributes
- * an abstract member if it has an abstract definition which is also
- * a member of the whole type.
- */
- override def isVolatile = {
- def isVisible(m: Symbol) =
- this.nonPrivateMember(m.name).alternatives contains m
- def contributesAbstractMembers(p: Type) =
- p.deferredMembers exists isVisible
-
- ((parents exists (_.isVolatile))
- ||
- (parents dropWhile (! _.typeSymbol.isAbstractType) match {
- case ps @ (_ :: ps1) =>
- (ps ne parents) ||
- (ps1 exists contributesAbstractMembers) ||
- (decls.iterator exists (m => m.isDeferred && isVisible(m)))
- case _ =>
- false
- }))
- }
-
- override def kind = "RefinedType"
- }
-
- final class RefinedType0(parents: List[Type], decls: Scope, clazz: Symbol) extends RefinedType(parents, decls) {
- override def typeSymbol = clazz
- }
- object RefinedType extends RefinedTypeExtractor {
- def apply(parents: List[Type], decls: Scope, clazz: Symbol): RefinedType =
- new RefinedType0(parents, decls, clazz)
- }
-
- /** A class representing a class info
- */
- case class ClassInfoType(
- override val parents: List[Type],
- override val decls: Scope,
- override val typeSymbol: Symbol) extends CompoundType
- {
-
- /** refs indices */
- private final val NonExpansive = 0
- private final val Expansive = 1
-
- /** initialization states */
- private final val UnInitialized = 0
- private final val Initializing = 1
- private final val Initialized = 2
-
- private type RefMap = Map[Symbol, immutable.Set[Symbol]]
-
- /** All type parameters reachable from given type parameter
- * by a path which contains at least one expansive reference.
- * @See Kennedy, Pierce: On Decidability of Nominal Subtyping with Variance
- */
- def expansiveRefs(tparam: Symbol) = {
- if (state == UnInitialized) {
- computeRefs()
- while (state != Initialized) propagate()
- }
- getRefs(Expansive, tparam)
- }
-
- /* The rest of this class is auxiliary code for `expansiveRefs'
- */
-
- /** The type parameters which are referenced type parameters of this class.
- * Two entries: refs(0): Non-expansive references
- * refs(1): Expansive references
- */
- private var refs: Array[RefMap] = _
-
- /** The initialization state of the class: UnInialized --> Initializing --> Initialized
- */
- private var state = UnInitialized
-
- /** Get references for given type parameter
- * @param which in {NonExpansive, Expansive}
- * @param from The type parameter from which references originate.
- */
- private def getRefs(which: Int, from: Symbol): Set[Symbol] = refs(which) get from match {
- case Some(set) => set
- case none => Set()
- }
-
- /** Augment existing refs map with reference <pre>from -> to</pre>
- * @param which <- {NonExpansive, Expansive}
- */
- private def addRef(which: Int, from: Symbol, to: Symbol) {
- refs(which) = refs(which) + (from -> (getRefs(which, from) + to))
- }
-
- /** Augment existing refs map with references <pre>from -> sym</pre>, for
- * all elements <pre>sym</pre> of set `to'.
- * @param which <- {NonExpansive, Expansive}
- */
- private def addRefs(which: Int, from: Symbol, to: Set[Symbol]) {
- refs(which) = refs(which) + (from -> (getRefs(which, from) ++ to))
- }
-
- /** The ClassInfoType which belongs to the class containing given type parameter
- */
- private def classInfo(tparam: Symbol): ClassInfoType =
- tparam.owner.info.resultType match {
- case ci: ClassInfoType => ci
- case _ => classInfo(ObjectClass) // something's wrong; fall back to safe value
- // (this can happen only for erroneous programs).
- }
-
- /** Compute initial (one-step) references and set state to `Initializing'.
- */
- private def computeRefs() {
- refs = Array(Map(), Map())
- for (tparam <- typeSymbol.typeParams) {
- val enterRefs = new TypeMap {
- def apply(tp: Type): Type = {
- tp match {
- case TypeRef(_, sym, args) =>
- for ((tparam1, arg) <- sym.info.typeParams zip args)
- if (arg contains tparam) {
- addRef(NonExpansive, tparam, tparam1)
- if (arg.typeSymbol != tparam) addRef(Expansive, tparam, tparam1)
- }
- case _ =>
- }
- mapOver(tp)
- }
- }
- for (p <- parents) enterRefs(p)
- }
- state = Initializing
- }
-
- /** Propagate to form transitive closure.
- * Set state to Initialized if no change resulted from propagation.
- * @return true iff there as a change in last iteration
- */
- private def propagate(): Boolean = {
- if (state == UnInitialized) computeRefs()
- //Console.println("Propagate "+symbol+", initial expansive = "+refs(Expansive)+", nonexpansive = "+refs(NonExpansive))//DEBUG
- val lastRefs = Array(refs(0), refs(1))
- state = Initialized
- var change = false
- for ((from, targets) <- refs(NonExpansive).iterator)
- for (target <- targets) {
- var thatInfo = classInfo(target)
- if (thatInfo.state != Initialized)
- change = change | thatInfo.propagate()
- addRefs(NonExpansive, from, thatInfo.getRefs(NonExpansive, target))
- addRefs(Expansive, from, thatInfo.getRefs(Expansive, target))
- }
- for ((from, targets) <- refs(Expansive).iterator)
- for (target <- targets) {
- var thatInfo = classInfo(target)
- if (thatInfo.state != Initialized)
- change = change | thatInfo.propagate()
- addRefs(Expansive, from, thatInfo.getRefs(NonExpansive, target))
- }
- change = change || refs(0) != lastRefs(0) || refs(1) != lastRefs(1)
- if (change) state = Initializing
- //else Console.println("Propagate "+symbol+", final expansive = "+refs(Expansive)+", nonexpansive = "+refs(NonExpansive))//DEBUG
- change
- }
-
- // override def isNullable: Boolean =
- // symbol == AnyClass ||
- // symbol != NothingClass && (symbol isSubClass ObjectClass) && !(symbol isSubClass NonNullClass);
-
- // override def isNonNull: Boolean = symbol == NonNullClass || super.isNonNull;
- override def kind = "ClassInfoType"
- }
-
- object ClassInfoType extends ClassInfoTypeExtractor
-
- class PackageClassInfoType(decls: Scope, clazz: Symbol)
- extends ClassInfoType(List(), decls, clazz)
-
- /** A class representing a constant type.
- *
- * @param value ...
- */
- abstract case class ConstantType(value: Constant) extends SingletonType {
- override def underlying: Type = value.tpe
- assert(underlying.typeSymbol != UnitClass)
- override def isTrivial: Boolean = true
- override def isNotNull = value.value != null
- override def deconst: Type = underlying
- override def safeToString: String =
- underlying.toString + "(" + value.escapedStringValue + ")"
- // override def isNullable: Boolean = value.value eq null
- // override def isNonNull: Boolean = value.value ne null
- override def kind = "ConstantType"
- }
-
- final class UniqueConstantType(value: Constant) extends ConstantType(value) with UniqueType {
- /** Save the type of 'value'. For Java enums, it depends on finding the linked class,
- * which might not be found after 'flatten'. */
- private lazy val _tpe: Type = value.tpe
- override def underlying: Type = _tpe
- }
- object ConstantType extends ConstantTypeExtractor {
- def apply(value: Constant): ConstantType = {
- unique(new UniqueConstantType(value)).asInstanceOf[ConstantType]
- }
- }
-
- private var volatileRecursions: Int = 0
- private val pendingVolatiles = new mutable.HashSet[Symbol]
-
- /** A class for named types of the form
- * `<prefix>.<sym.name>[args]'
- * Cannot be created directly; one should always use `typeRef'
- * for creation. (@M: Otherwise hashing breaks)
- *
- * @M: a higher-kinded type is represented as a TypeRef with sym.info.typeParams.nonEmpty, but args.isEmpty
- * @param pre ...
- * @param sym ...
- * @param args ...
- */
- abstract case class TypeRef(pre: Type, sym: Symbol, args: List[Type]) extends Type {
-// assert(!sym.isAbstractType || pre.isStable || pre.isError)
-// assert(!pre.isInstanceOf[ClassInfoType], this)
-// assert(!(sym hasFlag (PARAM | EXISTENTIAL)) || pre == NoPrefix, this)
-// assert(args.isEmpty || !sym.info.typeParams.isEmpty, this)
-// assert(args.isEmpty || ((sym ne AnyClass) && (sym ne NothingClass))
-
- private var parentsCache: List[Type] = _
- private var parentsPeriod = NoPeriod
-
- private var baseTypeSeqCache: BaseTypeSeq = _
- private var baseTypeSeqPeriod = NoPeriod
-
- private var symInfoCache: Type = _
- private var memberInfoCache: Type = _
- private var thisInfoCache: Type = _
- private var relativeInfoCache: Type = _
-
- private var normalized: Type = null
-
-
- override def isStable: Boolean = {
- sym == NothingClass ||
- sym == SingletonClass ||
- sym.isAliasType && normalize.isStable ||
- sym.isAbstractType && (bounds.hi.typeSymbol isSubClass SingletonClass)
- }
-
- override def isVolatile: Boolean = {
- sym.isAliasType && normalize.isVolatile ||
- sym.isAbstractType && {
- // need to be careful not to fall into an infinite recursion here
- // because volatile checking is done before all cycles are detected.
- // the case to avoid is an abstract type directly or
- // indirectly upper-bounded by itself. See #2918
- try {
- volatileRecursions += 1
- if (volatileRecursions < LogVolatileThreshold)
- bounds.hi.isVolatile
- else if (pendingVolatiles(sym))
- true // we can return true here, because a cycle will be detected
- // here afterwards and an error will result anyway.
- else
- try {
- pendingVolatiles += sym
- bounds.hi.isVolatile
- } finally {
- pendingVolatiles -= sym
- }
- } finally {
- volatileRecursions -= 1
- }
- }
- }
-
- override lazy val isTrivial: Boolean =
- !sym.isTypeParameter && pre.isTrivial && args.forall(_.isTrivial)
-
- override def isNotNull =
- sym.isModuleClass || sym == NothingClass || isValueClass(sym) || super.isNotNull
-
- // @M: propagate actual type params (args) to `tp', by replacing formal type parameters with actual ones
- // if tp is higher kinded, the "actual" type arguments are types that simply reference the corresponding type parameters (unbound type variables)
- def transform(tp: Type): Type = {
- val res = tp.asSeenFrom(pre, sym.owner)
- if (sym.typeParams.isEmpty || (args exists (_.isError)) || isRaw(sym, args)/*#2266/2305*/) res
- else res.instantiateTypeParams(sym.typeParams, typeArgsOrDummies)
- }
-
- //@M! use appliedType on the polytype that represents the bounds (or if aliastype, the rhs)
- def transformInfo(tp: Type): Type = appliedType(tp.asSeenFrom(pre, sym.owner), typeArgsOrDummies)
-
- def thisInfo: Type =
- if (sym.isAliasType) normalize
- else if (!sym.isNonClassType) sym.info
- else {
- val symInfo = sym.info
- if (thisInfoCache == null || (symInfo ne symInfoCache)) {
- symInfoCache = symInfo
- thisInfoCache = transformInfo(symInfo)
- }
- thisInfoCache
- }
-
- def relativeInfo: Type =
- if (!sym.isNonClassType) pre.memberInfo(sym)
- else {
- val memberInfo = pre.memberInfo(sym)
- if (relativeInfoCache == null || (memberInfo ne memberInfoCache)) {
- memberInfoCache = memberInfo
- relativeInfoCache = transformInfo(memberInfo)
- }
- relativeInfoCache
- }
-
- override def typeSymbol = if (sym.isAliasType) normalize.typeSymbol else sym
- override def termSymbol = if (sym.isAliasType) normalize.termSymbol else super.termSymbol
- override def typeSymbolDirect = sym
- override def termSymbolDirect = super.termSymbol
-
-/* @MAT
-whenever you see `tp.typeSymbol.isXXXX' and then act on tp based on that predicate, you're on thin ice,
-as `typeSymbol' (and `prefix') automatically normalize, but the other inspectors don't.
-In other words, even if `tp.normalize.sym.isXXX' is true, `tp.sym.isXXX' may be false (if sym were a public method to access the non-normalized typeSymbol)...
-
-In retrospect, I think `tp.typeSymbol.isXXX' or (worse) `tp.typeSymbol==XXX' should be replaced by `val tp = tp0.asXXX'.
-A type's typeSymbol should never be inspected directly.
-*/
-
- override def bounds: TypeBounds =
- if (sym.isAbstractType) thisInfo.bounds // transform(thisInfo.bounds).asInstanceOf[TypeBounds] // ??? seems to be doing asSeenFrom twice
- else super.bounds
-
- override def parents: List[Type] = {
- val period = parentsPeriod
- if (period != currentPeriod) {
- parentsPeriod = currentPeriod
- if (!isValidForBaseClasses(period)) {
- parentsCache = thisInfo.parents map transform
- } else if (parentsCache == null) { // seems this can happen if things are currupted enough, see #2641
- parentsCache = List(AnyClass.tpe)
- }
- }
- parentsCache
- }
- override def typeOfThis = transform(sym.typeOfThis)
-
-/*
- override def narrow =
- if (sym.isModuleClass) transform(sym.thisType)
- else if (sym.isAliasType) normalize.narrow
- else super.narrow
-*/
- override def narrow =
- if (sym.isModuleClass) singleType(pre, sym.sourceModule)
- else if (sym.isAliasType) normalize.narrow
- else super.narrow
-
- override def prefix: Type =
- if (sym.isAliasType) normalize.prefix
- else pre
-
- override def typeArgs: List[Type] = args
- private def typeArgsOrDummies = if (!isHigherKinded) args else dummyArgs
- // def hasFishyArgs = args == dummyArgs
-
- // @MAT was typeSymbol.unsafeTypeParams, but typeSymbol normalizes now
- private def typeParamsDirect =
- if (isDefinitionsInitialized) sym.typeParams
- else sym.unsafeTypeParams
-
- // placeholders derived from type params
- private def dummyArgs = {
- // @PP to @AM: this appears to me a place where
- // higher-order tparams are going off the beam.
- // if (sym.isAbstractType) { something goes wrong }
-
- //@M must be .typeConstructor
- typeParamsDirect map (_.typeConstructor)
- }
-
- // (!result.isEmpty) IFF isHigherKinded
- override def typeParams: List[Symbol] = if (isHigherKinded) typeParamsDirect else List()
-
- // note: does not go through typeRef. There's no need to because
- // neither `pre` nor `sym` changes. And there's a performance
- // advantage to call TypeRef directly.
- override def typeConstructor = TypeRef(pre, sym, Nil)
-
- // A reference (in a Scala program) to a type that has type
- // parameters, but where the reference does not include type
- // arguments. Note that it doesn't matter whether the symbol refers
- // to a java or scala symbol, it does matter whether it occurs in
- // java or scala code. TypeRefs w/o type params that occur in java
- // signatures/code are considered raw types, and are represented as
- // existential types.
- override def isHigherKinded = args.isEmpty && typeParamsDirect.nonEmpty
-
- override def instantiateTypeParams(formals: List[Symbol], actuals: List[Type]): Type =
- if (isHigherKinded) {
- val substTps = formals.intersect(typeParams)
-
- if (sameLength(substTps, typeParams))
- copyTypeRef(this, pre, sym, actuals)
- else if (sameLength(formals, actuals)) // partial application (needed in infer when bunching type arguments from classes and methods together)
- copyTypeRef(this, pre, sym, dummyArgs).subst(formals, actuals)
- else ErrorType
- }
- else
- super.instantiateTypeParams(formals, actuals)
-
-
- /** @pre: sym.info.typeParams.length == typeArgs.length */
- @inline private def betaReduce: Type = {
- // isHKSubType0 introduces synthetic type params so that
- // betaReduce can first apply sym.info to typeArgs before calling
- // asSeenFrom. asSeenFrom then skips synthetic type params, which
- // are used to reduce HO subtyping to first-order subtyping, but
- // which can't be instantiated from the given prefix and class.
- transform(sym.info.resultType)
- //
- // this crashes pos/depmet_implicit_tpbetareduce.scala
- // appliedType(sym.info, typeArgs).asSeenFrom(pre, sym.owner)
- }
-
- // @M: initialize (by sym.info call) needed (see test/files/pos/ticket0137.scala)
- @inline private def etaExpand: Type = {
- val tpars = sym.info.typeParams // must go through sym.info for typeParams to initialise symbol
- typeFunAnon(tpars, copyTypeRef(this, pre, sym, tpars map (_.tpeHK))) // todo: also beta-reduce?
- }
-
- override def dealias: Type =
- if (sym.isAliasType && sameLength(sym.info.typeParams, args)) {
- betaReduce.dealias
- } else this
-
- private def normalize0: Type =
- if (pre eq WildcardType) WildcardType // arises when argument-dependent types are approximated (see def depoly in implicits)
- else if (isHigherKinded) etaExpand // eta-expand, subtyping relies on eta-expansion of higher-kinded types
- else if (sym.isAliasType && sameLength(sym.info.typeParams, args))
- betaReduce.normalize // beta-reduce, but don't do partial application -- cycles have been checked in typeRef
- else if (sym.isRefinementClass)
- sym.info.normalize // I think this is okay, but see #1241 (r12414), #2208, and typedTypeConstructor in Typers
- else {
- if(sym.isAliasType) ErrorType //println("!!error: "+(pre, sym, sym.info, sym.info.typeParams, args))
- else super.normalize
- }
-
- // TODO: test case that is compiled in a specific order and in different runs
- override def normalize: Type = {
- if (phase.erasedTypes) normalize0
- else {
- if (normalized == null)
- normalized = normalize0
-
- normalized
- }
- }
-
- override def decls: Scope = {
- sym.info match {
- case TypeRef(_, sym1, _) =>
- assert(sym1 != sym, this) // @MAT was != typeSymbol
- case _ =>
- }
- thisInfo.decls
- }
-
- override def baseType(clazz: Symbol): Type =
- if (sym == clazz) this
- else if (sym.isClass) transform(sym.info.baseType(clazz))
- else
- try {
- basetypeRecursions += 1
- if (basetypeRecursions < LogPendingBaseTypesThreshold)
- relativeInfo.baseType(clazz)
- else if (pendingBaseTypes contains this)
- if (clazz == AnyClass) clazz.tpe else NoType
- else
- try {
- pendingBaseTypes += this
- relativeInfo.baseType(clazz)
- } finally {
- pendingBaseTypes -= this
- }
- } finally {
- basetypeRecursions -= 1
- }
-
- override def baseTypeSeq: BaseTypeSeq = {
- val period = baseTypeSeqPeriod
- if (period != currentPeriod) {
- baseTypeSeqPeriod = currentPeriod
- if (!isValidForBaseClasses(period)) {
- incCounter(typerefBaseTypeSeqCount)
- baseTypeSeqCache = undetBaseTypeSeq
- baseTypeSeqCache =
- if (sym.isAbstractType) transform(bounds.hi).baseTypeSeq prepend this
- else sym.info.baseTypeSeq map transform
- }
- }
- if (baseTypeSeqCache == undetBaseTypeSeq)
- throw new TypeError("illegal cyclic inheritance involving " + sym)
- baseTypeSeqCache
- }
-
- override def baseTypeSeqDepth: Int = baseTypeSeq.maxDepth
-
- override def baseClasses: List[Symbol] = thisInfo.baseClasses
-
- // override def isNullable: Boolean = sym.info.isNullable
-
- override def safeToString: String = {
- if (!settings.debug.value) {
- this match {
- case TypeRef(_, RepeatedParamClass, arg :: _) => return arg + "*"
- case TypeRef(_, ByNameParamClass, arg :: _) => return "=> " + arg
- case _ =>
- if (isFunctionType(this)) {
- val targs = normalize.typeArgs
- // Aesthetics: printing Function1 as T => R rather than (T) => R
- // ...but only if it's not a tuple, so ((T1, T2)) => R is distinguishable
- // from (T1, T2) => R.
- return (targs match {
- case in :: out :: Nil if !isTupleTypeOrSubtype(in) =>
- // A => B => C should be (A => B) => C or A => (B => C)
- val in_s = if (isFunctionType(in)) "(" + in + ")" else "" + in
- val out_s = if (isFunctionType(out)) "(" + out + ")" else "" + out
- in_s + " => " + out_s
- case xs =>
- xs.init.mkString("(", ", ", ")") + " => " + xs.last
- })
- }
- else if (isTupleTypeOrSubtype(this))
- return normalize.typeArgs.mkString("(", ", ", if (hasLength(normalize.typeArgs, 1)) ",)" else ")")
- else if (sym.isAliasType && prefixChain.exists(_.termSymbol.isSynthetic)) {
- val normed = normalize;
- if (normed ne this) return normed.toString
- }
- }
- }
- val monopart =
- if (!settings.debug.value &&
- (shorthands contains sym.fullName) &&
- (sym.ownerChain forall (_.isClass))) // ensure that symbol is not a local copy with a name coincidence
- sym.name.toString
- else
- pre.prefixString + sym.nameString
-
- var str = monopart + (if (args.isEmpty) "" else args.mkString("[", ",", "]"))
- if (sym.isPackageClass)
- packagePrefix + str
- else if (sym.isModuleClass)
- objectPrefix + str
- else if (sym.isAnonymousClass && sym.isInitialized && !settings.debug.value && !phase.erasedTypes)
- thisInfo.parents.mkString(" with ") + {
- if (sym.isStructuralRefinement)
- ((decls.toList filter { entry =>
- !entry.isConstructor && entry.allOverriddenSymbols.isEmpty && !entry.isPrivate
- }) map { entry => entry.defString }).mkString("{", "; ", "}")
- else
- ""
- }
- else if (sym.isRefinementClass && sym.isInitialized)
- thisInfo.toString
- else str
- }
-
- override def prefixString = "" + (
- if (settings.debug.value)
- super.prefixString
- else if (sym.printWithoutPrefix)
- ""
- else if (sym.isPackageClass)
- sym.fullName + "."
- else if (isStable && nme.isSingletonName(sym.name))
- nme.dropSingletonName(sym.name) + "."
- else
- super.prefixString
- )
- override def kind = "TypeRef"
- }
-
- final class UniqueTypeRef(pre: Type, sym: Symbol, args: List[Type]) extends TypeRef(pre, sym, args) with UniqueType { }
- object TypeRef extends TypeRefExtractor {
- def apply(pre: Type, sym: Symbol, args: List[Type]): Type = {
- unique(new UniqueTypeRef(pre, sym, args))
- }
- }
-
- /** A class representing a method type with parameters.
- * Note that a parameterless method is represented by a NullaryMethodType:
- *
- * def m(): Int MethodType(Nil, Int)
- * def m: Int NullaryMethodType(Int)
- */
- case class MethodType(override val params: List[Symbol],
- override val resultType: Type) extends Type {
- override def isTrivial: Boolean = isTrivial0
- private lazy val isTrivial0 =
- resultType.isTrivial && params.forall{p => p.tpe.isTrivial && (
- !settings.YdepMethTpes.value || !(params.exists(_.tpe.contains(p)) || resultType.contains(p)))
- }
-
- def isImplicit = params.nonEmpty && params.head.isImplicit
- def isJava = false // can we do something like for implicits? I.e. do Java methods without parameters need to be recognized?
-
- //assert(paramTypes forall (pt => !pt.typeSymbol.isImplClass))//DEBUG
- override def paramSectionCount: Int = resultType.paramSectionCount + 1
-
- override def paramss: List[List[Symbol]] = params :: resultType.paramss
-
- override def paramTypes = params map (_.tpe)
-
- override def boundSyms = immutable.Set[Symbol](params ++ resultType.boundSyms: _*)
-
- // AM to TR: #dropNonContraintAnnotations
- // this is needed for plugins to work correctly, only TypeConstraint annotations are supposed to be carried over
- // TODO: this should probably be handled in a more structured way in adapt -- remove this map in resultType and watch the continuations tests fail
- object dropNonContraintAnnotations extends TypeMap {
- override val dropNonConstraintAnnotations = true
- def apply(x: Type) = mapOver(x)
- }
-
- override def resultType(actuals: List[Type]) =
- if (isTrivial) dropNonContraintAnnotations(resultType)
- else {
- if (sameLength(actuals, params)) {
- val idm = new InstantiateDependentMap(params, actuals)
- val res = idm(resultType)
- // println("resultTypeDep "+(params, actuals, resultType, idm.existentialsNeeded, "\n= "+ res))
- existentialAbstraction(idm.existentialsNeeded, res)
- } else {
- // Thread.dumpStack()
- // println("resultType "+(params, actuals, resultType))
- if (phase.erasedTypes) resultType
- else existentialAbstraction(params, resultType)
- }
- }
-
- // implicit args can only be depended on in result type: TODO this may be generalised so that the only constraint is dependencies are acyclic
- def approximate: MethodType = MethodType(params, resultApprox)
-
- override def finalResultType: Type = resultType.finalResultType
-
- override def safeToString = paramString(this) + resultType
-
- override def cloneInfo(owner: Symbol) = {
- val vparams = cloneSymbols(params, owner)
- copyMethodType(this, vparams, resultType.substSym(params, vparams).cloneInfo(owner))
- }
-
- override def atOwner(owner: Symbol) =
- if ((params exists (_.owner != owner)) || (resultType.atOwner(owner) ne resultType))
- cloneInfo(owner)
- else
- this
-
- override def kind = "MethodType"
- }
-
- object MethodType extends MethodTypeExtractor
-
- class JavaMethodType(ps: List[Symbol], rt: Type) extends MethodType(ps, rt) {
- override def isJava = true
- }
-
- case class NullaryMethodType(override val resultType: Type) extends Type {
- // AM to TR: #dropNonContraintAnnotations
- // change isTrivial to the commented version and watch continuations-run/t3225.scala fail
- // isTrivial implies asSeenFrom is bypassed, since it's supposed to be the identity map
- // it's not really the identity due to dropNonContraintAnnotations
- override def isTrivial: Boolean = false //resultType.isTrivial -- `false` to make continuations plugin work (so that asSeenFromMap drops non-constrain annotations even when type doesn't change otherwise)
- override def prefix: Type = resultType.prefix
- override def narrow: Type = resultType.narrow
- override def finalResultType: Type = resultType.finalResultType
- override def termSymbol: Symbol = resultType.termSymbol
- override def typeSymbol: Symbol = resultType.typeSymbol
- override def parents: List[Type] = resultType.parents
- override def decls: Scope = resultType.decls
- override def baseTypeSeq: BaseTypeSeq = resultType.baseTypeSeq
- override def baseTypeSeqDepth: Int = resultType.baseTypeSeqDepth
- override def baseClasses: List[Symbol] = resultType.baseClasses
- override def baseType(clazz: Symbol): Type = resultType.baseType(clazz)
- override def boundSyms = resultType.boundSyms
- override def isVolatile = resultType.isVolatile
- override def safeToString: String = "=> "+ resultType
- override def kind = "NullaryMethodType"
- }
-
- object NullaryMethodType extends NullaryMethodTypeExtractor
-
- /** A type function or the type of a polymorphic value (and thus of kind *).
- *
- * Before the introduction of NullaryMethodType, a polymorphic nullary method (e.g, def isInstanceOf[T]: Boolean)
- * used to be typed as PolyType(tps, restpe), and a monomorphic one as PolyType(Nil, restpe)
- * This is now: PolyType(tps, NullaryMethodType(restpe)) and NullaryMethodType(restpe)
- * by symmetry to MethodTypes: PolyType(tps, MethodType(params, restpe)) and MethodType(params, restpe)
- *
- * Thus, a PolyType(tps, TypeRef(...)) unambiguously indicates a type function (which results from eta-expanding a type constructor alias).
- * Similarly, PolyType(tps, ClassInfoType(...)) is a type constructor.
- *
- * A polytype is of kind * iff its resultType is a (nullary) method type.
- */
- case class PolyType(override val typeParams: List[Symbol], override val resultType: Type)
- extends Type {
- //assert(!(typeParams contains NoSymbol), this)
- assert(typeParams nonEmpty, this) // used to be a marker for nullary method type, illegal now (see @NullaryMethodType)
-
- override def paramSectionCount: Int = resultType.paramSectionCount
- override def paramss: List[List[Symbol]] = resultType.paramss
- override def params: List[Symbol] = resultType.params
- override def paramTypes: List[Type] = resultType.paramTypes
- override def parents: List[Type] = resultType.parents
- override def decls: Scope = resultType.decls
- override def termSymbol: Symbol = resultType.termSymbol
- override def typeSymbol: Symbol = resultType.typeSymbol
- override def boundSyms = immutable.Set[Symbol](typeParams ++ resultType.boundSyms: _*)
- override def prefix: Type = resultType.prefix
- override def baseTypeSeq: BaseTypeSeq = resultType.baseTypeSeq
- override def baseTypeSeqDepth: Int = resultType.baseTypeSeqDepth
- override def baseClasses: List[Symbol] = resultType.baseClasses
- override def baseType(clazz: Symbol): Type = resultType.baseType(clazz)
- override def narrow: Type = resultType.narrow
- override def isVolatile = resultType.isVolatile
- override def finalResultType: Type = resultType.finalResultType
-
- /** @M: typeDefSig wraps a TypeBounds in a PolyType
- * to represent a higher-kinded type parameter
- * wrap lo&hi in polytypes to bind variables
- */
- override def bounds: TypeBounds =
- TypeBounds(typeFun(typeParams, resultType.bounds.lo),
- typeFun(typeParams, resultType.bounds.hi))
-
- override def isHigherKinded = !typeParams.isEmpty
-
- override def safeToString = typeParamsString(this) + resultType
-
- override def cloneInfo(owner: Symbol) = {
- val tparams = cloneSymbols(typeParams, owner)
- PolyType(tparams, resultType.substSym(typeParams, tparams).cloneInfo(owner))
- }
-
- override def atOwner(owner: Symbol) =
- if ((typeParams exists (_.owner != owner)) || (resultType.atOwner(owner) ne resultType))
- cloneInfo(owner)
- else
- this
-
- override def kind = "PolyType"
- }
-
- object PolyType extends PolyTypeExtractor
-
- case class ExistentialType(quantified: List[Symbol],
- override val underlying: Type) extends RewrappingTypeProxy
- {
- override protected def rewrap(newtp: Type) = existentialAbstraction(quantified, newtp)
-
- override def isTrivial = false
- override def isStable: Boolean = false
- override def bounds = TypeBounds(maybeRewrap(underlying.bounds.lo), maybeRewrap(underlying.bounds.hi))
- override def parents = underlying.parents map maybeRewrap
- override def boundSyms = quantified.toSet
- override def prefix = maybeRewrap(underlying.prefix)
- override def typeArgs = underlying.typeArgs map maybeRewrap
- override def params = underlying.params mapConserve { param =>
- val tpe1 = rewrap(param.tpe)
- if (tpe1 eq param.tpe) param else param.cloneSymbol.setInfo(tpe1)
- }
- override def paramTypes = underlying.paramTypes map maybeRewrap
- override def instantiateTypeParams(formals: List[Symbol], actuals: List[Type]) = {
-// maybeRewrap(underlying.instantiateTypeParams(formals, actuals))
-
- val quantified1 = new SubstTypeMap(formals, actuals) mapOver quantified
- val underlying1 = underlying.instantiateTypeParams(formals, actuals)
- if ((quantified1 eq quantified) && (underlying1 eq underlying)) this
- else existentialAbstraction(quantified1, underlying1.substSym(quantified, quantified1))
-
- }
- override def baseType(clazz: Symbol) = maybeRewrap(underlying.baseType(clazz))
- override def baseTypeSeq = underlying.baseTypeSeq map maybeRewrap
- override def isHigherKinded = false
-
- override def skolemizeExistential(owner: Symbol, origin: AnyRef) = {
- def mkSkolem(tparam: Symbol): Symbol = {
- val skolem = new TypeSkolem(
- if (owner == NoSymbol) tparam.owner else owner,
- tparam.pos, tparam.name.toTypeName, origin)
- skolem.setInfo(tparam.info.cloneInfo(skolem))
- .setFlag(tparam.flags | EXISTENTIAL)
- .resetFlag(PARAM)
- }
- val skolems = quantified map mkSkolem
- for (skolem <- skolems)
- skolem setInfo skolem.info.substSym(quantified, skolems)
- underlying.substSym(quantified, skolems)
- }
-
- private def wildcardArgsString(available: Set[Symbol], args: List[Type]): List[String] = args match {
- case TypeRef(_, sym, _) :: args1 if (available contains sym) =>
- ("_"+sym.infoString(sym.info)) :: wildcardArgsString(available - sym, args1)
- case arg :: args1 if !(quantified exists (arg contains _)) =>
- arg.toString :: wildcardArgsString(available, args1)
- case _ =>
- List()
- }
-
- override def safeToString: String = {
- if (!(quantified exists (_.isSingletonExistential)) && !settings.debug.value)
- // try to represent with wildcards first
- underlying match {
- case TypeRef(pre, sym, args) if args.nonEmpty =>
- val wargs = wildcardArgsString(quantified.toSet, args)
- if (sameLength(wargs, args))
- return TypeRef(pre, sym, List()) + wargs.mkString("[", ", ", "]")
- case _ =>
- }
- var ustr = underlying.toString
- underlying match {
- case MethodType(_, _) | NullaryMethodType(_) | PolyType(_, _) => ustr = "("+ustr+")"
- case _ =>
- }
- val str =
- ustr+(quantified map (_.existentialToString) mkString(" forSome { ", "; ", " }"))
- if (settings.explaintypes.value) "("+str+")" else str
- }
-
- override def cloneInfo(owner: Symbol) = {
- val tparams = cloneSymbols(quantified, owner)
- ExistentialType(tparams, underlying.substSym(quantified, tparams))
- }
-
- override def atOwner(owner: Symbol) =
- if (quantified exists (_.owner != owner)) cloneInfo(owner) else this
-
- override def kind = "ExistentialType"
-
- def withTypeVars(op: Type => Boolean): Boolean = withTypeVars(op, AnyDepth)
-
- def withTypeVars(op: Type => Boolean, depth: Int): Boolean = {
- val quantifiedFresh = cloneSymbols(quantified)
- val tvars = quantifiedFresh map (tparam => TypeVar(tparam))
- val underlying1 = underlying.instantiateTypeParams(quantified, tvars) // fuse subst quantified -> quantifiedFresh -> tvars
- op(underlying1) && {
- solve(tvars, quantifiedFresh, quantifiedFresh map (x => 0), false, depth) &&
- isWithinBounds(NoPrefix, NoSymbol, quantifiedFresh, tvars map (_.constr.inst))
- }
- }
- }
-
- object ExistentialType extends ExistentialTypeExtractor
-
- /** A class containing the alternatives and type prefix of an overloaded symbol.
- * Not used after phase `typer'.
- */
- case class OverloadedType(pre: Type, alternatives: List[Symbol]) extends Type {
- override def prefix: Type = pre
- override def safeToString =
- (alternatives map pre.memberType).mkString("", " <and> ", "")
- override def kind = "OverloadedType"
- }
-
- /** A class remembering a type instantiation for some a set of overloaded
- * polymorphic symbols.
- * Not used after phase `typer'.
- */
- case class AntiPolyType(pre: Type, targs: List[Type]) extends Type {
- override def safeToString =
- pre.toString + targs.mkString("(with type arguments ", ",", ")");
- override def memberType(sym: Symbol) = appliedType(pre.memberType(sym), targs)
-// override def memberType(sym: Symbol) = pre.memberType(sym) match {
-// case PolyType(tparams, restp) =>
-// restp.subst(tparams, targs)
-// /* I don't think this is needed, as existential types close only over value types
-// case ExistentialType(tparams, qtpe) =>
-// existentialAbstraction(tparams, qtpe.memberType(sym))
-// */
-// case ErrorType =>
-// ErrorType
-// }
- override def kind = "AntiPolyType"
- }
-
- //private var tidCount = 0 //DEBUG
-
- //@M
- // a TypeVar used to be a case class with only an origin and a constr
- // then, constr became mutable (to support UndoLog, I guess),
- // but pattern-matching returned the original constr0 (a bug)
- // now, pattern-matching returns the most recent constr
- object TypeVar {
- // encapsulate suspension so we can automatically link the suspension of cloned
- // typevars to their original if this turns out to be necessary
-/*
- def Suspension = new Suspension
- class Suspension {
- private val suspended = mutable.HashSet[TypeVar]()
- def suspend(tv: TypeVar): Unit = {
- tv.suspended = true
- suspended += tv
- }
- def resumeAll(): Unit = {
- for (tv <- suspended) {
- tv.suspended = false
- }
- suspended.clear()
- }
- }
-*/
- def unapply(tv: TypeVar): Some[(Type, TypeConstraint)] = Some((tv.origin, tv.constr))
- def apply(origin: Type, constr: TypeConstraint) = new TypeVar(origin, constr, List(), List())
- // TODO why not initialise TypeConstraint with bounds of tparam?
- // @PP: I tried that, didn't work out so well for me.
- def apply(tparam: Symbol) = new TypeVar(tparam.tpeHK, new TypeConstraint, List(), tparam.typeParams)
- def apply(origin: Type, constr: TypeConstraint, args: List[Type], params: List[Symbol]) =
- new TypeVar(origin, constr, args, params)
- }
-
- /** A class representing a type variable
- * Not used after phase `typer'.
- * A higher-kinded type variable has type arguments (a list of Type's) and type parameters (list of Symbols)
- * A TypeVar whose list of args is non-empty can only be instantiated by a higher-kinded type that can be applied to these args
- * a typevar is much like a typeref, except it has special logic for type equality/subtyping
- */
- class TypeVar(val origin: Type, val constr0: TypeConstraint, override val typeArgs: List[Type], override val params: List[Symbol]) extends Type {
- // params are needed to keep track of variance (see mapOverArgs in SubstMap)
- assert(typeArgs.isEmpty || sameLength(typeArgs, params))
- // var tid = { tidCount += 1; tidCount } //DEBUG
-
- /** The constraint associated with the variable */
- var constr = constr0
- def instValid = constr.instValid
-
- /** The variable's skolemization level */
- val level = skolemizationLevel
-
- /**
- * two occurrences of a higher-kinded typevar, e.g. ?CC[Int] and ?CC[String], correspond to
- * *two instances* of TypeVar that share the *same* TypeConstraint
- * constr for ?CC only tracks type constructors anyway, so when ?CC[Int] <:< List[Int] and ?CC[String] <:< Iterable[String]
- * ?CC's hibounds contains List and Iterable
- */
- def applyArgs(newArgs: List[Type]): TypeVar =
- if (newArgs.isEmpty) this // SubstMap relies on this (though this check is redundant when called from appliedType...)
- else TypeVar(origin, constr, newArgs, params) // @M TODO: interaction with undoLog??
- // newArgs.length may differ from args.length (could've been empty before)
- // example: when making new typevars, you start out with C[A], then you replace C by ?C, which should yield ?C[A], then A by ?A, ?C[?A]
- // we need to track a TypeVar's arguments, and map over them (see TypeMap::mapOver)
- // TypeVars get applied to different arguments over time (in asSeenFrom)
- // -- see pos/tcpoly_infer_implicit_tuplewrapper.scala
- // thus: make new TypeVar's for every application of a TV to args,
- // inference may generate several TypeVar's for a single type parameter that must be inferred,
- // only one of them is in the set of tvars that need to be solved, but
- // they share the same TypeConstraint instance
-
- // <region name="constraint mutators + undoLog">
- // invariant: before mutating constr, save old state in undoLog (undoLog is used to reset constraints to avoid piling up unrelated ones)
- def setInst(tp: Type) {
-// assert(!(tp containsTp this), this)
- undoLog record this
- constr.inst = tp
- }
-
- def addLoBound(tp: Type, isNumericBound: Boolean = false) {
- assert(tp != this) // implies there is a cycle somewhere (?)
- //println("addLoBound: "+(safeToString, debugString(tp))) //DEBUG
- undoLog record this
- constr.addLoBound(tp, isNumericBound)
- }
-
- def addHiBound(tp: Type, isNumericBound: Boolean = false) {
- // assert(tp != this)
- //println("addHiBound: "+(safeToString, debugString(tp))) //DEBUG
- undoLog record this
- constr.addHiBound(tp, isNumericBound)
- }
- // </region>
-
- // ignore subtyping&equality checks while true -- see findMember
- private[Types] var suspended = false
-
- /** Called when a TypeVar is involved in a subtyping check. Result is whether
- * this TypeVar could plausibly be a [super/sub]type of argument `tp` and if so,
- * tracks tp as a [lower/upper] bound of this TypeVar.
- *
- * if (isLowerBound) this typevar could be a subtype, track tp as a lower bound
- * if (!isLowerBound) this typevar could be a supertype, track tp as an upper bound
- *
- * If isNumericBound is true, the subtype check is performed with weak_<:< instead of <:<.
- */
- def registerBound(tp: Type, isLowerBound: Boolean, isNumericBound: Boolean = false): Boolean = {
- // println("regBound: "+(safeToString, debugString(tp), isLowerBound)) //@MDEBUG
- if (isLowerBound) assert(tp != this)
-
- def checkSubtypeLower(tp1: Type, tp2: Type) =
- if (isNumericBound) tp1 weak_<:< tp2
- else tp1 <:< tp2
-
- // swaps the arguments if it's an upper bound
- def checkSubtype(tp1: Type, tp2: Type) =
- if (isLowerBound) checkSubtypeLower(tp1, tp2)
- else checkSubtypeLower(tp2, tp1)
-
- def addBound(tp: Type) = {
- if (isLowerBound) addLoBound(tp, isNumericBound)
- else addHiBound(tp, isNumericBound)
- // println("addedBound: "+(this, tp)) // @MDEBUG
- true
- }
-
- /** Simple case: type arguments can be ignored, because either this typevar has
- * no type parameters, or we are comparing to Any/Nothing.
- *
- * The latter condition is needed because HK unification is limited to constraints of the shape
- * TC1[T1,..., TN] <: TC2[T'1,...,T'N]
- * which would preclude the following important constraints:
- * Nothing <: ?TC[?T]
- * ?TC[?T] <: Any
- */
- def unifySimple = (params.isEmpty || tp.typeSymbol == NothingClass || tp.typeSymbol == AnyClass) &&
- addBound(tp)
-
- /** Full case: involving a check of the form
- * TC1[T1,..., TN] <: TC2[T'1,...,T'N]
- * Checks subtyping of higher-order type vars, and uses variances as defined in the
- * type parameter we're trying to infer (the result will be sanity-checked later)
- */
- def unifyFull(tp: Type) = sameLength(typeArgs, tp.typeArgs) && { // this is a higher-kinded type var with same arity as tp
- // side effect: adds the type constructor itself as a bound
- addBound(tp.typeConstructor)
- if (isLowerBound) isSubArgs(tp.typeArgs, typeArgs, params)
- else isSubArgs(typeArgs, tp.typeArgs, params)
- }
-
- /** TODO: need positive/negative test cases demonstrating this is correct.
- */
- def unifyParents =
- if (isLowerBound) tp.parents exists unifyFull
- else tp.parents forall unifyFull
-
- // TODO: fancier unification, maybe rewrite constraint as follows?
- // val sym = constr.hiBounds map {_.typeSymbol} find { _.typeParams.length == typeArgs.length}
- // this <: tp.baseType(sym)
- if (suspended) checkSubtype(tp, origin)
- else if (constr.instValid) checkSubtype(tp, constr.inst) // type var is already set
- else isRelatable(tp) && { // gradually let go of some type precision in hopes of finding a type that has the same shape as the type variable
- // okay, this just screams "CLEAN ME UP" -- I think we could use tp.widen instead of tp straight from the get-go in registerBound, since we don't infer singleton types anyway (but maybe that'll change?)
- unifySimple || unifyFull(tp) || unifyFull(tp.dealias) || unifyFull(tp.widen) || unifyFull(tp.widen.dealias) || unifyParents
- }
- }
-
- def registerTypeEquality(tp: Type, typeVarLHS: Boolean): Boolean = {
- //println("regTypeEq: "+(safeToString, debugString(tp), typeVarLHS)) //@MDEBUG
- def checkIsSameType(tp: Type) =
- if(typeVarLHS) constr.inst =:= tp
- else tp =:= constr.inst
-
- if (suspended) tp =:= origin
- else if (constr.instValid) checkIsSameType(tp)
- else isRelatable(tp) && {
- val newInst = wildcardToTypeVarMap(tp)
- if (constr.isWithinBounds(newInst)) {
- setInst(tp)
- true
- } else false
- }
- }
-
- /**
- * ?A.T =:= tp is rewritten as the constraint ?A <: {type T = tp}
- *
- * TODO: make these constraints count (incorporate them into implicit search in applyImplicitArgs)
- * (T corresponds to @param sym)
- */
- def registerTypeSelection(sym: Symbol, tp: Type): Boolean = {
- val bound = refinedType(List(WildcardType), NoSymbol)
- val bsym = bound.typeSymbol.newAliasType(NoPosition, sym.name.toTypeName)
- bsym setInfo tp
- bound.decls enter bsym
- registerBound(bound, false)
- }
-
- /** Can this variable be related in a constraint to type `tp'?
- * This is not the case if `tp' contains type skolems whose
- * skolemization level is higher than the level of this variable.
- */
- def isRelatable(tp: Type): Boolean =
- !tp.exists { t =>
- t.typeSymbol match {
- case ts: TypeSkolem => ts.level > level
- case _ => false
- }
- }
-
- override val isHigherKinded = typeArgs.isEmpty && params.nonEmpty
-
- override def normalize: Type =
- if (constr.instValid) constr.inst
- // get here when checking higher-order subtyping of the typevar by itself
- // TODO: check whether this ever happens?
- else if (isHigherKinded) typeFun(params, applyArgs(params map (_.typeConstructor)))
- else super.normalize
-
- override def typeSymbol = origin.typeSymbol
- override def isStable = origin.isStable
- override def isVolatile = origin.isVolatile
-
- private def levelString = if (settings.explaintypes.value) level else ""
- override def safeToString = constr.inst match {
- case null => "<null " + origin + ">"
- case NoType => "?" + levelString + origin + typeArgsString(this)
- case x => "" + x
- }
- override def kind = "TypeVar"
-
- def cloneInternal = {
- // cloning a suspended type variable when it's suspended will cause the clone
- // to never be resumed with the current implementation
- assert(!suspended)
- TypeVar(origin, constr cloneInternal, typeArgs, params) // @M TODO: clone args/params?
- }
- }
-
- /** A type carrying some annotations. Created by the typechecker
- * when eliminating ``Annotated'' trees (see typedAnnotated).
- *
- * @param annotations the list of annotations on the type
- * @param underlying the type without the annotation
- * @param selfsym a "self" symbol with type <code>underlying</code>;
- * only available if -Yself-in-annots is turned on. Can be NoSymbol
- * if it is not used.
- */
- case class AnnotatedType(override val annotations: List[AnnotationInfo],
- override val underlying: Type,
- override val selfsym: Symbol)
- extends RewrappingTypeProxy {
-
- assert(!annotations.isEmpty)
-
- override protected def rewrap(tp: Type) = AnnotatedType(annotations, tp, selfsym)
-
- override def isTrivial: Boolean = isTrivial0
- private lazy val isTrivial0 = underlying.isTrivial && (annotations forall (_.isTrivial))
-
- override def safeToString: String = {
- val attString =
- if (annotations.isEmpty)
- ""
- else
- annotations.mkString(" @", " @", "")
-
- underlying + attString
- }
-
- /** Add a number of annotations to this type */
- override def withAnnotations(annots: List[AnnotationInfo]): Type =
- copy(annots:::this.annotations)
-
- /** Remove any annotations from this type */
- override def withoutAnnotations = underlying.withoutAnnotations
-
- /** Set the self symbol */
- override def withSelfsym(sym: Symbol) =
- AnnotatedType(annotations, underlying, sym)
-
- /** Drop the annotations on the bounds, unless but the low and high
- * bounds are exactly tp.
- */
- override def bounds: TypeBounds = underlying.bounds match {
- case TypeBounds(_: this.type, _: this.type) => TypeBounds(this, this)
- case oftp => oftp
- }
-
- // ** Replace formal type parameter symbols with actual type arguments. * /
- override def instantiateTypeParams(formals: List[Symbol], actuals: List[Type]) = {
- val annotations1 = annotations.map(info => AnnotationInfo(info.atp.instantiateTypeParams(
- formals, actuals), info.args, info.assocs).setPos(info.pos))
- val underlying1 = underlying.instantiateTypeParams(formals, actuals)
- if ((annotations1 eq annotations) && (underlying1 eq underlying)) this
- else AnnotatedType(annotations1, underlying1, selfsym)
- }
-
- /** Return the base type sequence of tp, dropping the annotations, unless the base type sequence of tp
- * is precisely tp itself. */
- override def baseTypeSeq: BaseTypeSeq = {
- val oftp = underlying.baseTypeSeq
- if ((oftp.length == 1) && (oftp(0) eq underlying))
- baseTypeSingletonSeq(this)
- else
- oftp
- }
-
- override def kind = "AnnotatedType"
- }
-
- object AnnotatedType extends AnnotatedTypeExtractor
-
- /** A class representing types with a name. When an application uses
- * named arguments, the named argument types for calling isApplicable
- * are represented as NamedType.
- */
- case class NamedType(name: Name, tp: Type) extends Type {
- override def safeToString: String = name.toString +": "+ tp
- }
-
- /** A class representing an as-yet unevaluated type.
- */
- abstract class LazyType extends Type with AbsLazyType {
- override def kind = "LazyType"
- }
-
-// Creators ---------------------------------------------------------------
-
- /** Rebind symbol `sym' to an overriding member in type `pre'.
- */
- private def rebind(pre: Type, sym: Symbol): Symbol = {
- val owner = sym.owner
- if (owner.isClass && owner != pre.typeSymbol && !sym.isEffectivelyFinal && !sym.isClass) {
- //Console.println("rebind "+pre+" "+sym)//DEBUG
- val rebind = pre.nonPrivateMember(sym.name).suchThat(sym => sym.isType || sym.isStable)
- if (rebind == NoSymbol) sym
- else {
- // Console.println("rebound "+pre+" "+sym+" to "+rebind)//DEBUG
- rebind
- }
- } else sym
- }
-
- /** Convert a `super' prefix to a this-type if `sym'
- * is abstract or final.
- */
- private def removeSuper(tp: Type, sym: Symbol): Type = tp match {
- case SuperType(thistp, _) =>
- if (sym.isEffectivelyFinal || sym.isDeferred) thistp
- else tp
- case _ =>
- tp
- }
-
- /** The canonical creator for single-types */
- def singleType(pre: Type, sym: Symbol): Type = {
- if (phase.erasedTypes)
- sym.tpe.resultType
- else if (sym.isRootPackage)
- ThisType(RootClass)
- else {
- var sym1 = rebind(pre, sym)
- val pre1 = removeSuper(pre, sym1)
- if (pre1 ne pre) sym1 = rebind(pre1, sym1)
- SingleType(pre1, sym1)
- }
- }
-
- /** the canonical creator for a refined type with a given scope */
- def refinedType(parents: List[Type], owner: Symbol, decls: Scope, pos: Position): Type = {
- if (phase.erasedTypes)
- if (parents.isEmpty) ObjectClass.tpe else parents.head
- else {
- val clazz = owner.newRefinementClass(NoPosition)
- val result = RefinedType(parents, decls, clazz)
- clazz.setInfo(result)
- result
- }
- }
-
- /** The canonical creator for a refined type with an initially empty scope.
- *
- * @param parents ...
- * @param owner ...
- * @return ...
- */
- def refinedType(parents: List[Type], owner: Symbol): Type =
- refinedType(parents, owner, new Scope, owner.pos)
-
- def copyRefinedType(original: RefinedType, parents: List[Type], decls: Scope) =
- if ((parents eq original.parents) && (decls eq original.decls)) original
- else {
- val owner = if (original.typeSymbol == NoSymbol) NoSymbol else original.typeSymbol.owner
- val result = refinedType(parents, owner)
- val syms1 = decls.toList
- for (sym <- syms1)
- result.decls.enter(sym.cloneSymbol(result.typeSymbol))
- val syms2 = result.decls.toList
- val resultThis = result.typeSymbol.thisType
- for (sym <- syms2)
- sym.setInfo(sym.info.substThis(original.typeSymbol, resultThis).substSym(syms1, syms2))
- result
- }
-
- /** The canonical creator for typerefs
- * todo: see how we can clean this up a bit
- */
- def typeRef(pre: Type, sym: Symbol, args: List[Type]): Type = {
- // type alias selections are rebound in TypeMap ("coevolved", actually -- see #3731)
- // e.g., when type parameters that are referenced by the alias are instantiated in
- // the prefix. See pos/depmet_rebind_typealias.
- def rebindTR(pre: Type, sym: Symbol) =
- if (sym.isAbstractType) rebind(pre, sym) else sym
-
- val sym1 = rebindTR(pre, sym)
-
- // we require that object is initialized, thus info.typeParams instead of typeParams.
- if (sym1.isAliasType && sameLength(sym1.info.typeParams, args)) {
- if (sym1.lockOK) TypeRef(pre, sym1, args) // don't expand type alias (cycles checked by lockOK)
- else throw new TypeError("illegal cyclic reference involving " + sym1)
- }
- else {
- val pre1 = removeSuper(pre, sym1)
- if (pre1 ne pre)
- typeRef(pre1, rebindTR(pre1, sym1), args)
- else pre match {
- case _: CompoundType if sym1.isClass =>
- // sharpen prefix so that it is maximal and still contains the class.
- pre.parents.reverse dropWhile (_.member(sym1.name) != sym1) match {
- case Nil => TypeRef(pre, sym1, args)
- case parent :: _ => typeRef(parent, sym1, args)
- }
- case _ =>
- TypeRef(pre, sym1, args)
- }
- }
- }
-
- def copyTypeRef(tp: Type, pre: Type, sym: Symbol, args: List[Type]): Type = tp match {
- case TypeRef(pre0, sym0, args0) =>
- if ((pre == pre0) && (sym.name == sym0.name)) {
-
- val sym1 = sym
- // we require that object is initialized, thus info.typeParams instead of typeParams.
- if (sym1.isAliasType && sameLength(sym1.info.typeParams, args)) {
- if (sym1.lockOK) TypeRef(pre, sym1, args) // don't expand type alias (cycles checked by lockOK)
- else throw new TypeError("illegal cyclic reference involving " + sym1)
- }
- else {
- TypeRef(pre, sym1, args)
- }
-
- } else
- typeRef(pre, sym, args)
- }
-
-
-
-
- /** The canonical creator for implicit method types */
- def JavaMethodType(params: List[Symbol], resultType: Type): JavaMethodType =
- new JavaMethodType(params, resultType) // don't unique this!
-
- /** Create a new MethodType of the same class as tp, i.e. keep JavaMethodType */
- def copyMethodType(tp: Type, params: List[Symbol], restpe: Type): Type = tp match {
- case _: JavaMethodType => JavaMethodType(params, restpe)
- case _ => MethodType(params, restpe)
- }
-
- /** A creator for intersection type where intersections of a single type are
- * replaced by the type itself, and repeated parent classes are merged.
- */
- def intersectionType(tps: List[Type], owner: Symbol): Type = tps match {
- case List(tp) =>
- tp
- case _ =>
- refinedType(tps, owner)
-/*
- def merge(tps: List[Type]): List[Type] = tps match {
- case tp :: tps1 =>
- val tps1a = tps1 filter (_.typeSymbol.==(tp.typeSymbol))
- val tps1b = tps1 filter (_.typeSymbol.!=(tp.typeSymbol))
- mergePrefixAndArgs(tps1a, -1) match {
- case Some(tp1) => tp1 :: merge(tps1b)
- case None => throw new MalformedType(
- "malformed type: "+refinedType(tps, owner)+" has repeated parent class "+
- tp.typeSymbol+" with incompatible prefixes or type arguments")
- }
- case _ => tps
- }
- refinedType(merge(tps), owner)
-*/
- }
-
- /** A creator for intersection type where intersections of a single type are
- * replaced by the type itself. */
- def intersectionType(tps: List[Type]): Type = tps match {
- case List(tp) => tp
- case _ => refinedType(tps, commonOwner(tps))
- }
-
- /** A creator for type applications */
- def appliedType(tycon: Type, args: List[Type]): Type =
- if (args.isEmpty) tycon //@M! `if (args.isEmpty) tycon' is crucial (otherwise we create new types in phases after typer and then they don't get adapted (??))
- else tycon match {
- case TypeRef(pre, sym @ (NothingClass|AnyClass), _) => copyTypeRef(tycon, pre, sym, Nil) //@M drop type args to Any/Nothing
- case TypeRef(pre, sym, _) => copyTypeRef(tycon, pre, sym, args)
- case PolyType(tparams, restpe) => restpe.instantiateTypeParams(tparams, args)
- case ExistentialType(tparams, restpe) => ExistentialType(tparams, appliedType(restpe, args))
- case st: SingletonType => appliedType(st.widen, args) // @M TODO: what to do? see bug1
- case RefinedType(parents, decls) => RefinedType(parents map (appliedType(_, args)), decls) // MO to AM: please check
- case TypeBounds(lo, hi) => TypeBounds(appliedType(lo, args), appliedType(hi, args))
- case tv at TypeVar(_, _) => tv.applyArgs(args)
- case AnnotatedType(annots, underlying, self) => AnnotatedType(annots, appliedType(underlying, args), self)
- case ErrorType => tycon
- case WildcardType => tycon // needed for neg/t0226
- case _ => abort(debugString(tycon))
- }
-
- /** A creator for type parameterizations that strips empty type parameter lists.
- * Use this factory method to indicate the type has kind * (it's a polymorphic value)
- * until we start tracking explicit kinds equivalent to typeFun (except that the latter requires tparams nonEmpty)
- */
- def polyType(tparams: List[Symbol], tpe: Type): Type =
- if (tparams nonEmpty) typeFun(tparams, tpe)
- else tpe // it's okay to be forgiving here
-
- /** A creator for anonymous type functions, where the symbol for the type function still needs to be created
- *
- * TODO:
- * type params of anonymous type functions, which currently can only arise from normalising type aliases, are owned by the type alias of which they are the eta-expansion
- * higher-order subtyping expects eta-expansion of type constructors that arise from a class; here, the type params are owned by that class, but is that the right thing to do?
- */
- def typeFunAnon(tps: List[Symbol], body: Type): Type = typeFun(tps, body)
-
- /** A creator for a type functions, assuming the type parameters tps already have the right owner
- */
- def typeFun(tps: List[Symbol], body: Type): Type = PolyType(tps, body)
-
- /** A creator for existential types. This generates:
- *
- * tpe1 where { tparams }
- *
- * where `tpe1' is the result of extrapolating `tpe' wrt to `tparams'. Extrapolating means
- * that type variables in `tparams' occurring in covariant positions are replaced by upper bounds,
- * (minus any SingletonClass markers),
- * type variables in `tparams' occurring in contravariant positions are replaced by upper bounds,
- * provided the resulting type is legal wrt to stability, and does not contain any
- * type variable in `tparams'.
- * The abstraction drops all type parameters that are not directly or indirectly
- * referenced by type `tpe1'.
- * If there are no remaining type parameters, simply returns result type `tpe'.
- */
- def existentialAbstraction(tparams: List[Symbol], tpe0: Type): Type =
- if (tparams.isEmpty) tpe0
- else {
- var occurCount = emptySymCount ++ (tparams map (_ -> 0))
- val tpe = deAlias(tpe0)
- def countOccs(tp: Type) =
- for (t <- tp) {
- t match {
- case TypeRef(_, sym, _) =>
- occurCount get sym match {
- case Some(count) => occurCount += (sym -> (count + 1))
- case none =>
- }
- case _ =>
- }
- }
- countOccs(tpe)
- for (tparam <- tparams) countOccs(tparam.info)
-
- val extrapolate = new TypeMap {
- variance = 1
- def apply(tp: Type): Type = {
- val tp1 = mapOver(tp)
- tp1 match {
- case TypeRef(pre, sym, args) if (variance != 0) && (occurCount isDefinedAt sym) =>
- val repl = if (variance == 1) dropSingletonType(tp1.bounds.hi) else tp1.bounds.lo
- //println("eliminate "+sym+"/"+repl+"/"+occurCount(sym)+"/"+(tparams exists (repl.contains)))//DEBUG
- if (repl.typeSymbol != NothingClass && repl.typeSymbol != NullClass &&
- occurCount(sym) == 1 && !(tparams exists (repl.contains)))
- repl
- else tp1
- case _ =>
- tp1
- }
- }
- override def mapOver(tp: Type): Type = tp match {
- case SingleType(pre, sym) =>
- if (sym.isPackageClass) tp // short path
- else {
- val pre1 = this(pre)
- if ((pre1 eq pre) || !pre1.isStable) tp
- else singleType(pre1, sym)
- }
- case _ => super.mapOver(tp)
- }
-
- override def mapOver(tree: Tree) =
- tree match {
- case tree:Ident if tree.tpe.isStable =>
- // Do not discard the types of existential ident's.
- // The symbol of the Ident itself cannot be listed
- // in the existential's parameters, so the
- // resulting existential type would be ill-formed.
- Some(tree)
-
- case _ =>
- super.mapOver(tree)
- }
- }
- val tpe1 = extrapolate(tpe)
- var tparams0 = tparams
- var tparams1 = tparams0 filter tpe1.contains
-
- while (tparams1 != tparams0) {
- tparams0 = tparams1
- tparams1 = tparams filter { p =>
- tparams1 exists { p1 => p1 == p || (p1.info contains p) }
- }
- }
- if (tparams1.isEmpty) tpe1
- else tpe1 match {
- case ExistentialType(tparams2, tpe2) => ExistentialType(tparams1 ::: tparams2, tpe2)
- case _ => ExistentialType(tparams1, tpe1)
- }
- }
-
- /** Remove any occurrences of type aliases from this type */
- object deAlias extends TypeMap {
- def apply(tp: Type): Type = mapOver {
- tp match {
- case TypeRef(pre, sym, args) if sym.isAliasType => tp.normalize
- case _ => tp
- }
- }
- }
-
- /** Remove any occurrence of type <singleton> from this type and its parents */
- object dropSingletonType extends TypeMap {
- def apply(tp: Type): Type = {
- tp match {
- case TypeRef(_, SingletonClass, _) =>
- AnyClass.tpe
- case tp1 @ RefinedType(parents, decls) =>
- var parents1 = parents filter (_.typeSymbol != SingletonClass)
- if (parents1.isEmpty) parents1 = List(AnyClass.tpe)
- if (parents1.tail.isEmpty && decls.isEmpty) mapOver(parents1.head)
- else mapOver(copyRefinedType(tp1, parents1, decls))
- case tp1 =>
- mapOver(tp1)
- }
- }
- }
-
-// Hash consing --------------------------------------------------------------
-
- private val initialUniquesCapacity = 4096
- private var uniques: util.HashSet[Type] = _
- private var uniqueRunId = NoRunId
-
- private def unique[T <: Type](tp: T): T = {
- incCounter(rawTypeCount)
- if (uniqueRunId != currentRunId) {
- uniques = util.HashSet[Type]("uniques", initialUniquesCapacity)
- uniqueRunId = currentRunId
- }
- (uniques findEntryOrUpdate tp).asInstanceOf[T]
- }
-
-// Helper Classes ---------------------------------------------------------
-
- /** @PP: Unable to see why these apparently constant types should need vals
- * in every TypeConstraint, I lifted them out.
- */
- private lazy val numericLoBound = IntClass.tpe
- private lazy val numericHiBound = intersectionType(List(ByteClass.tpe, CharClass.tpe), ScalaPackageClass)
-
- /** A class expressing upper and lower bounds constraints of type variables,
- * as well as their instantiations.
- */
- class TypeConstraint(lo0: List[Type], hi0: List[Type], numlo0: Type, numhi0: Type) {
- def this(lo0: List[Type], hi0: List[Type]) = this(lo0, hi0, NoType, NoType)
- def this() = this(List(), List())
-
- private var lobounds = lo0
- private var hibounds = hi0
- private var numlo = numlo0
- private var numhi = numhi0
-
- def loBounds: List[Type] = if (numlo == NoType) lobounds else numlo :: lobounds
- def hiBounds: List[Type] = if (numhi == NoType) hibounds else numhi :: hibounds
-
- def addLoBound(tp: Type, isNumericBound: Boolean = false) {
- if (isNumericBound && isNumericValueType(tp)) {
- if (numlo == NoType || isNumericSubType(numlo, tp))
- numlo = tp
- else if (!isNumericSubType(tp, numlo))
- numlo = numericLoBound
- }
- else lobounds ::= tp
- }
-
- def addHiBound(tp: Type, isNumericBound: Boolean = false) {
- if (isNumericBound && isNumericValueType(tp)) {
- if (numhi == NoType || isNumericSubType(tp, numhi))
- numhi = tp
- else if (!isNumericSubType(numhi, tp))
- numhi = numericHiBound
- }
- else hibounds ::= tp
- }
-
- def isWithinBounds(tp: Type): Boolean =
- lobounds.forall(_ <:< tp) &&
- hibounds.forall(tp <:< _) &&
- (numlo == NoType || (numlo weak_<:< tp)) &&
- (numhi == NoType || (tp weak_<:< numhi))
-
- var inst: Type = NoType // @M reduce visibility?
-
- def instValid = (inst ne null) && (inst ne NoType)
-
- def cloneInternal = {
- val tc = new TypeConstraint(lobounds, hibounds, numlo, numhi)
- tc.inst = inst
- tc
- }
-
- override def toString =
- (loBounds map (_.safeToString)).mkString("[ _>:(", ",", ") ") +
- (hiBounds map (_.safeToString)).mkString("| _<:(", ",", ") ] _= ") +
- inst.safeToString
- }
-
- /** A prototype for mapping a function over all possible types
- */
- abstract class TypeMap extends Function1[Type, Type] {
- // deferred inherited: def apply(tp: Type): Type
-
- /** The variance relative to start. If you want variances to be significant, set
- * variance = 1
- * at the top of the typemap.
- */
- var variance = 0
-
- /** Should this map drop annotations that are not
- * type-constraint annotations?
- */
- val dropNonConstraintAnnotations = false
-
- /** Check whether two lists have elements that are eq-equal */
- def allEq[T <: AnyRef](l1: List[T], l2: List[T]) =
- (l1 corresponds l2)(_ eq _)
-
- // #3731: return sym1 for which holds: pre bound sym.name to sym and pre1 now binds sym.name to sym1, conceptually exactly the same symbol as sym
- // the selection of sym on pre must be updated to the selection of sym1 on pre1,
- // since sym's info was probably updated by the TypeMap to yield a new symbol sym1 with transformed info
- // @returns sym1
- protected def coevolveSym(pre: Type, pre1: Type, sym: Symbol): Symbol =
- if((pre ne pre1) && sym.isAliasType) // only need to rebind type aliases here, as typeRef already handles abstract types (they are allowed to be rebound more liberally)
- (pre, pre1) match {
- case (RefinedType(_, decls), RefinedType(_, decls1)) => // don't look at parents -- it would be an error to override alias types anyway
- //val sym1 =
- decls1.lookup(sym.name)
-// assert(decls.lookupAll(sym.name).toList.length == 1)
-// assert(decls1.lookupAll(sym.name).toList.length == 1)
-// assert(sym1.isAliasType)
-// println("coevolved "+ sym +" : "+ sym.info +" to "+ sym1 +" : "+ sym1.info +" with "+ pre +" -> "+ pre1)
-// sym1
- case _ => // TODO: is there another way a typeref's symbol can refer to a symbol defined in its pre?
-// val sym1 = pre1.nonPrivateMember(sym.name).suchThat(sym => sym.isAliasType)
-// println("??coevolve "+ sym +" : "+ sym.info +" to "+ sym1 +" : "+ sym1.info +" with "+ pre +" -> "+ pre1)
- sym
- }
- else sym
-
- /** Map this function over given type */
- def mapOver(tp: Type): Type = tp match {
- case TypeRef(pre, sym, args) =>
- val pre1 = this(pre)
- //val args1 = args mapConserve this(_)
- val args1 = if (args.isEmpty) args
- else {
- val tparams = sym.typeParams
- if (tparams.isEmpty) args
- else mapOverArgs(args, tparams)
- }
- if ((pre1 eq pre) && (args1 eq args)) tp
- else copyTypeRef(tp, pre1, coevolveSym(pre, pre1, sym), args1)
- case ThisType(_) => tp
- case SingleType(pre, sym) =>
- if (sym.isPackageClass) tp // short path
- else {
- val pre1 = this(pre)
- if (pre1 eq pre) tp
- else singleType(pre1, sym)
- }
- case MethodType(params, result) =>
- variance = -variance
- val params1 = mapOver(params)
- variance = -variance
- val result1 = this(result)
- if ((params1 eq params) && (result1 eq result)) tp
- // for new dependent types: result1.substSym(params, params1)?
- else copyMethodType(tp, params1, result1.substSym(params, params1))
- case PolyType(tparams, result) =>
- variance = -variance
- val tparams1 = mapOver(tparams)
- variance = -variance
- var result1 = this(result)
- if ((tparams1 eq tparams) && (result1 eq result)) tp
- else PolyType(tparams1, result1.substSym(tparams, tparams1))
- case NullaryMethodType(result) =>
- val result1 = this(result)
- if (result1 eq result) tp
- else NullaryMethodType(result1)
- case ConstantType(_) => tp
- case SuperType(thistp, supertp) =>
- val thistp1 = this(thistp)
- val supertp1 = this(supertp)
- if ((thistp1 eq thistp) && (supertp1 eq supertp)) tp
- else SuperType(thistp1, supertp1)
- case TypeBounds(lo, hi) =>
- variance = -variance
- val lo1 = this(lo)
- variance = -variance
- val hi1 = this(hi)
- if ((lo1 eq lo) && (hi1 eq hi)) tp
- else TypeBounds(lo1, hi1)
- case BoundedWildcardType(bounds) =>
- val bounds1 = this(bounds)
- if (bounds1 eq bounds) tp
- else BoundedWildcardType(bounds1.asInstanceOf[TypeBounds])
- case rtp @ RefinedType(parents, decls) =>
- val parents1 = parents mapConserve (this)
- val decls1 = mapOver(decls)
- //if ((parents1 eq parents) && (decls1 eq decls)) tp
- //else refinementOfClass(tp.typeSymbol, parents1, decls1)
- copyRefinedType(rtp, parents1, decls1)
- case ExistentialType(tparams, result) =>
- val tparams1 = mapOver(tparams)
- var result1 = this(result)
- if ((tparams1 eq tparams) && (result1 eq result)) tp
- else ExistentialType(tparams1, result1.substSym(tparams, tparams1))
- case OverloadedType(pre, alts) =>
- val pre1 = if (pre.isInstanceOf[ClassInfoType]) pre else this(pre)
- if (pre1 eq pre) tp
- else OverloadedType(pre1, alts)
- case AntiPolyType(pre, args) =>
- val pre1 = this(pre)
- val args1 = args mapConserve (this)
- if ((pre1 eq pre) && (args1 eq args)) tp
- else AntiPolyType(pre1, args1)
- case tv at TypeVar(_, constr) =>
- if (constr.instValid) this(constr.inst)
- else tv.applyArgs(mapOverArgs(tv.typeArgs, tv.params)) //@M !args.isEmpty implies !typeParams.isEmpty
- case NotNullType(tp) =>
- val tp1 = this(tp)
- if (tp1 eq tp) tp
- else NotNullType(tp1)
- case AnnotatedType(annots, atp, selfsym) =>
- val annots1 = mapOverAnnotations(annots)
- val atp1 = this(atp)
- if ((annots1 eq annots) && (atp1 eq atp)) tp
- else if (annots1.isEmpty) atp1
- else AnnotatedType(annots1, atp1, selfsym)
-/*
- case ErrorType => tp
- case WildcardType => tp
- case NoType => tp
- case NoPrefix => tp
-*/
- case _ =>
- tp
- // throw new Error("mapOver inapplicable for " + tp);
- }
-
- def mapOverArgs(args: List[Type], tparams: List[Symbol]): List[Type] =
- map2Conserve(args, tparams) { (arg, tparam) =>
- val v = variance
- if (tparam.isContravariant) variance = -variance
- else if (!tparam.isCovariant) variance = 0
- val arg1 = this(arg)
- variance = v
- arg1
- }
-
- /** Map this function over given scope */
- def mapOver(scope: Scope): Scope = {
- val elems = scope.toList
- val elems1 = mapOver(elems)
- if (elems1 eq elems) scope
- else new Scope(elems1)
- }
-
- /** Map this function over given list of symbols */
- def mapOver(origSyms: List[Symbol]): List[Symbol] = {
- val change = origSyms exists { sym =>
- val v = variance
- if (sym.isAliasType) variance = 0
- val result = this(sym.info)
- variance = v
- result ne sym.info
- }
- if (!change) origSyms // fast path in case nothing changes due to map
- else { // map is not the identity --> do cloning properly
- val clonedSyms = origSyms map (_.cloneSymbol)
- val clonedInfos = clonedSyms map (_.info.substSym(origSyms, clonedSyms))
- val transformedInfos = clonedInfos mapConserve (this)
- (clonedSyms, transformedInfos).zipped map (_ setInfo _)
-
- clonedSyms
- }
- }
-
-
- def mapOverAnnotations(annots: List[AnnotationInfo])
- : List[AnnotationInfo] = {
- val newAnnots = annots.flatMap(mapOver(_))
- if (allEq(newAnnots, annots))
- annots
- else
- newAnnots
- }
-
- def mapOver(annot: AnnotationInfo): Option[AnnotationInfo] = {
- val AnnotationInfo(atp, args, assocs) = annot
-
- if (dropNonConstraintAnnotations &&
- !(atp.typeSymbol isNonBottomSubClass TypeConstraintClass))
- return None
-
- val atp1 = mapOver(atp)
- val args1 = mapOverAnnotArgs(args)
- // there is no need to rewrite assocs, as they are constants
-
- if ((args eq args1) && (atp eq atp1))
- Some(annot)
- else if (sameLength(args1, args))
- Some(AnnotationInfo(atp1, args1, assocs).setPos(annot.pos))
- else
- None
- }
-
- /** Map over a set of annotation arguments. If any
- * of the arguments cannot be mapped, then return Nil. */
- def mapOverAnnotArgs(args: List[Tree]): List[Tree] = {
- val args1 = args flatMap (x => mapOver(x))
- if (!sameLength(args1, args))
- Nil
- else if (allEq(args, args1))
- args
- else
- args1
- }
-
- def mapOver(tree: Tree): Option[Tree] =
- Some(mapOver(tree, ()=>return None))
-
- /** Map a tree that is part of an annotation argument.
- * If the tree cannot be mapped, then invoke giveup().
- * The default is to transform the tree with
- * TypeMapTransformer.
- */
- def mapOver(tree: Tree, giveup: ()=>Nothing): Tree =
- (new TypeMapTransformer).transform(tree)
-
- /** This transformer leaves the tree alone except to remap
- * its types. */
- class TypeMapTransformer extends Transformer {
- override def transform(tree: Tree) = {
- val tree1 = super.transform(tree)
- val tpe1 = TypeMap.this(tree1.tpe)
- if ((tree eq tree1) && (tree.tpe eq tpe1))
- tree
- else
- tree1.shallowDuplicate.setType(tpe1)
- }
- }
- }
-
- /** A type map that always returns the input type unchanged */
- object IdentityTypeMap extends TypeMap {
- def apply(tp: Type) = tp
- }
-
- abstract class TypeTraverser extends TypeMap {
- def traverse(tp: Type): Unit
- def apply(tp: Type): Type = { traverse(tp); tp }
- }
-
- abstract class TypeCollector[T](initial: T) extends TypeTraverser {
- var result: T = _
- def collect(tp: Type) = {
- result = initial
- traverse(tp)
- result
- }
- }
-
- private val emptySymMap = immutable.Map[Symbol, Symbol]()
- private val emptySymCount = immutable.Map[Symbol, Int]()
-
- def typeParamsToExistentials(clazz: Symbol, tparams: List[Symbol]): List[Symbol] = {
- val eparams = for ((tparam, i) <- tparams.zipWithIndex) yield {
- clazz.newExistential(clazz.pos, newTypeName("?"+i)).setInfo(tparam.info.bounds)
- }
- for (tparam <- eparams) tparam setInfo tparam.info.substSym(tparams, eparams)
- eparams
- }
-
- // note: it's important to write the two tests in this order,
- // as only typeParams forces the classfile to be read. See #400
- private def isRawIfWithoutArgs(sym: Symbol) =
- sym.isClass && sym.typeParams.nonEmpty && sym.isJavaDefined
-
- def isRaw(sym: Symbol, args: List[Type]) =
- !phase.erasedTypes && isRawIfWithoutArgs(sym) && args.isEmpty
-
- /** Is type tp a ``raw type''? */
- def isRawType(tp: Type) = tp match {
- case TypeRef(_, sym, args) => isRaw(sym, args)
- case _ => false
- }
-
- /** The raw to existential map converts a ``raw type'' to an existential type.
- * It is necessary because we might have read a raw type of a
- * parameterized Java class from a class file. At the time we read the type
- * the corresponding class file might still not be read, so we do not
- * know what the type parameters of the type are. Therefore
- * the conversion of raw types to existential types might not have taken place
- * in ClassFileparser.sigToType (where it is usually done)
- */
- object rawToExistential extends TypeMap {
- private var expanded = immutable.Set[Symbol]()
- private var generated = immutable.Set[Type]()
- def apply(tp: Type): Type = tp match {
- case TypeRef(pre, sym, List()) if isRawIfWithoutArgs(sym) =>
- if (expanded contains sym) AnyRefClass.tpe
- else try {
- expanded += sym
- val eparams = mapOver(typeParamsToExistentials(sym, sym.typeParams))
- existentialAbstraction(eparams, typeRef(apply(pre), sym, eparams map (_.tpe)))
- } finally {
- expanded -= sym
- }
- case ExistentialType(_, _) if !(generated contains tp) => // to avoid infinite expansions. todo: not sure whether this is needed
- val result = mapOver(tp)
- generated += result
- result
- case _ =>
- mapOver(tp)
- }
- }
-
- def singletonBounds(hi: Type) = {
- TypeBounds.upper(intersectionType(List(hi, SingletonClass.tpe)))
- }
-
- /** A map to compute the asSeenFrom method */
- class AsSeenFromMap(pre: Type, clazz: Symbol) extends TypeMap {
- override val dropNonConstraintAnnotations = true
-
- var capturedParams: List[Symbol] = List()
-
- override def mapOver(tree: Tree, giveup: ()=>Nothing): Tree = {
- object annotationArgRewriter extends TypeMapTransformer {
- /** Rewrite `This` trees in annotation argument trees */
- def rewriteThis(tree: Tree): Tree =
- tree match {
- case This(_)
- if (tree.symbol isNonBottomSubClass clazz) &&
- (pre.widen.typeSymbol isNonBottomSubClass tree.symbol) =>
- if (pre.isStable) { // XXX why is this in this method? pull it out and guard the call `annotationArgRewriter.transform(tree)`?
- val termSym =
- pre.typeSymbol.owner.newValue(
- pre.typeSymbol.pos,
- pre.typeSymbol.name.toTermName).setInfo(pre) // what symbol should really be used?
- mkAttributedQualifier(pre, termSym)
- } else
- giveup()
-
- case tree => tree
- }
-
- override def transform(tree: Tree): Tree = {
- val tree1 = rewriteThis(super.transform(tree))
- tree1
- }
- }
-
- annotationArgRewriter.transform(tree)
- }
-
- var capturedPre = emptySymMap
-
- def stabilize(pre: Type, clazz: Symbol): Type =
- capturedPre.getOrElse(clazz, {
- val qvar = clazz freshExistential ".type" setInfo singletonBounds(pre)
- capturedPre += (clazz -> qvar)
- capturedParams = qvar :: capturedParams
- qvar
- }).tpe
-
- /** Return pre.baseType(clazz), or if that's NoType and clazz is a refinement, pre itself.
- * See bug397.scala for an example where the second alternative is needed.
- * The problem is that when forming the base type sequence of an abstract type,
- * any refinements in the base type list might be regenerated, and thus acquire
- * new class symbols. However, since refinements always have non-interesting prefixes
- * it looks OK to me to just take the prefix directly. */
- def base(pre: Type, clazz: Symbol) = {
- val b = pre.baseType(clazz)
- if (b == NoType && clazz.isRefinementClass) pre
- else b
- }
-
- def apply(tp: Type): Type =
- if ((pre eq NoType) || (pre eq NoPrefix) || !clazz.isClass) tp
- else tp match {
- case ThisType(sym) =>
- def toPrefix(pre: Type, clazz: Symbol): Type =
- if ((pre eq NoType) || (pre eq NoPrefix) || !clazz.isClass) tp
- else if ((sym isNonBottomSubClass clazz) &&
- (pre.widen.typeSymbol isNonBottomSubClass sym)) {
- val pre1 = pre match {
- case SuperType(thistp, _) => thistp
- case _ => pre
- }
- if (!(pre1.isStable ||
- pre1.typeSymbol.isPackageClass ||
- pre1.typeSymbol.isModuleClass && pre1.typeSymbol.isStatic)) {
- stabilize(pre1, sym)
- } else {
- pre1
- }
- } else {
- toPrefix(base(pre, clazz).prefix, clazz.owner);
- }
- toPrefix(pre, clazz)
- case SingleType(pre, sym) =>
- if (sym.isPackageClass) tp // short path
- else {
- val pre1 = this(pre)
- if (pre1 eq pre) tp
- else if (pre1.isStable) singleType(pre1, sym)
- else pre1.memberType(sym).resultType //todo: this should be rolled into existential abstraction
- }
- // AM: Martin, is this description accurate?
- // walk the owner chain of `clazz` (the original argument to asSeenFrom) until we find the type param's owner (while rewriting pre as we crawl up the owner chain)
- // once we're at the owner, extract the information that pre encodes about the type param,
- // by minimally subsuming pre to the type instance of the class that owns the type param,
- // the type we're looking for is the type instance's type argument at the position corresponding to the type parameter
- // optimisation: skip this type parameter if it's not owned by a class, as those params are not influenced by the prefix through which they are seen
- // (concretely: type params of anonymous type functions, which currently can only arise from normalising type aliases, are owned by the type alias of which they are the eta-expansion)
- // (skolems also aren't affected: they are ruled out by the isTypeParameter check)
- case TypeRef(prefix, sym, args) if (sym.isTypeParameter && sym.owner.isClass) =>
- def toInstance(pre: Type, clazz: Symbol): Type =
- if ((pre eq NoType) || (pre eq NoPrefix) || !clazz.isClass) mapOver(tp)
- //@M! see test pos/tcpoly_return_overriding.scala why mapOver is necessary
- else {
- def throwError = abort("" + tp + sym.locationString + " cannot be instantiated from " + pre.widen)
-
- def instParam(ps: List[Symbol], as: List[Type]): Type =
- if (ps.isEmpty) throwError
- else if (sym eq ps.head)
- // @M! don't just replace the whole thing, might be followed by type application
- appliedType(as.head, args mapConserve (this)) // @M: was as.head
- else instParam(ps.tail, as.tail);
- val symclazz = sym.owner
- if (symclazz == clazz && !pre.isInstanceOf[TypeVar] && (pre.widen.typeSymbol isNonBottomSubClass symclazz)) {
- // have to deconst because it may be a Class[T].
- pre.baseType(symclazz).deconst match {
- case TypeRef(_, basesym, baseargs) =>
- //Console.println("instantiating " + sym + " from " + basesym + " with " + basesym.typeParams + " and " + baseargs+", pre = "+pre+", symclazz = "+symclazz);//DEBUG
- if (sameLength(basesym.typeParams, baseargs)) {
- instParam(basesym.typeParams, baseargs)
- } else {
- throw new TypeError(
- "something is wrong (wrong class file?): "+basesym+
- " with type parameters "+
- basesym.typeParams.map(_.name).mkString("[",",","]")+
- " gets applied to arguments "+baseargs.mkString("[",",","]")+", phase = "+phase)
- }
- case ExistentialType(tparams, qtpe) =>
- capturedParams = capturedParams union tparams
- toInstance(qtpe, clazz)
- case _ =>
- throwError
- }
- } else toInstance(base(pre, clazz).prefix, clazz.owner)
- }
- toInstance(pre, clazz)
- case _ =>
- mapOver(tp)
- }
- }
-
- /** A base class to compute all substitutions */
- abstract class SubstMap[T](from: List[Symbol], to: List[T]) extends TypeMap {
- val fromContains = (x: Symbol) => from.contains(x) //from.toSet <-- traversing short lists seems to be faster than allocating sets
- assert(sameLength(from, to), "Unsound substitution from "+ from +" to "+ to)
-
- /** Are `sym' and `sym1' the same.
- * Can be tuned by subclasses.
- */
- protected def matches(sym: Symbol, sym1: Symbol): Boolean = sym eq sym1
-
- /** Map target to type, can be tuned by subclasses */
- protected def toType(fromtp: Type, tp: T): Type
-
- protected def renameBoundSyms(tp: Type): Type = tp match {
- case MethodType(ps, restp) =>
- val ps1 = cloneSymbols(ps)
- copyMethodType(tp, ps1, renameBoundSyms(restp.substSym(ps, ps1)))
- case PolyType(bs, restp) =>
- val bs1 = cloneSymbols(bs)
- PolyType(bs1, renameBoundSyms(restp.substSym(bs, bs1)))
- case ExistentialType(bs, restp) =>
- val bs1 = cloneSymbols(bs)
- ExistentialType(bs1, restp.substSym(bs, bs1))
- case _ =>
- tp
- }
-
- def apply(tp0: Type): Type = if (from.isEmpty) tp0 else {
- @tailrec def subst(tp: Type, sym: Symbol, from: List[Symbol], to: List[T]): Type =
- if (from.isEmpty) tp
- // else if (to.isEmpty) error("Unexpected substitution on '%s': from = %s but to == Nil".format(tp, from))
- else if (matches(from.head, sym)) toType(tp, to.head)
- else subst(tp, sym, from.tail, to.tail)
-
- val boundSyms = tp0.boundSyms
- val tp1 = if (boundSyms exists fromContains) renameBoundSyms(tp0) else tp0
- val tp = mapOver(tp1)
-
- tp match {
- // @M
- // 1) arguments must also be substituted (even when the "head" of the
- // applied type has already been substituted)
- // example: (subst RBound[RT] from [type RT,type RBound] to
- // [type RT&,type RBound&]) = RBound&[RT&]
- // 2) avoid loops (which occur because alpha-conversion is
- // not performed properly imo)
- // e.g. if in class Iterable[a] there is a new Iterable[(a,b)],
- // we must replace the a in Iterable[a] by (a,b)
- // (must not recurse --> loops)
- // 3) replacing m by List in m[Int] should yield List[Int], not just List
- case TypeRef(NoPrefix, sym, args) =>
- appliedType(subst(tp, sym, from, to), args) // if args.isEmpty, appliedType is the identity
- case SingleType(NoPrefix, sym) =>
- subst(tp, sym, from, to)
- case _ =>
- tp
- }
- }
- }
-
- /** A map to implement the `substSym' method. */
- class SubstSymMap(from: List[Symbol], to: List[Symbol]) extends SubstMap(from, to) {
- protected def toType(fromtp: Type, sym: Symbol) = fromtp match {
- case TypeRef(pre, _, args) => copyTypeRef(fromtp, pre, sym, args)
- case SingleType(pre, _) => singleType(pre, sym)
- }
- override def apply(tp: Type): Type = if (from.isEmpty) tp else {
- @tailrec def subst(sym: Symbol, from: List[Symbol], to: List[Symbol]): Symbol =
- if (from.isEmpty) sym
- // else if (to.isEmpty) error("Unexpected substitution on '%s': from = %s but to == Nil".format(sym, from))
- else if (matches(from.head, sym)) to.head
- else subst(sym, from.tail, to.tail)
- tp match {
- case TypeRef(pre, sym, args) if pre ne NoPrefix =>
- val newSym = subst(sym, from, to)
- // assert(newSym.typeParams.length == sym.typeParams.length, "typars mismatch in SubstSymMap: "+(sym, sym.typeParams, newSym, newSym.typeParams))
- mapOver(copyTypeRef(tp, pre, newSym, args)) // mapOver takes care of subst'ing in args
- case SingleType(pre, sym) if pre ne NoPrefix =>
- mapOver(singleType(pre, subst(sym, from, to)))
- case _ =>
- super.apply(tp)
- }
- }
-
-
- override def mapOver(tree: Tree, giveup: ()=>Nothing): Tree = {
- object trans extends TypeMapTransformer {
-
- def termMapsTo(sym: Symbol) =
- if (fromContains(sym))
- Some(to(from.indexOf(sym)))
- else
- None
-
- override def transform(tree: Tree) =
- tree match {
- case tree at Ident(_) =>
- termMapsTo(tree.symbol) match {
- case Some(tosym) =>
- if (tosym.info.bounds.hi.typeSymbol isSubClass SingletonClass) {
- Ident(tosym.existentialToString)
- .setSymbol(tosym)
- .setPos(tosym.pos)
- .setType(dropSingletonType(tosym.info.bounds.hi))
- } else {
- giveup()
- }
- case none => super.transform(tree)
- }
- case tree => super.transform(tree)
- }
- }
- trans.transform(tree)
- }
- }
-
- /** A map to implement the `subst' method. */
- class SubstTypeMap(from: List[Symbol], to: List[Type])
- extends SubstMap(from, to) {
- protected def toType(fromtp: Type, tp: Type) = tp
-
- override def mapOver(tree: Tree, giveup: ()=>Nothing): Tree = {
- object trans extends TypeMapTransformer {
- override def transform(tree: Tree) =
- tree match {
- case Ident(name) if fromContains(tree.symbol) =>
- val totpe = to(from.indexOf(tree.symbol))
- if (!totpe.isStable) giveup()
- else Ident(name).setPos(tree.pos).setSymbol(tree.symbol).setType(totpe)
-
- case _ => super.transform(tree)
- }
- }
- trans.transform(tree)
- }
-
- }
-
- /** A map to implement the `substThis' method. */
- class SubstThisMap(from: Symbol, to: Type) extends TypeMap {
- def apply(tp: Type): Type = tp match {
- case ThisType(sym) if (sym == from) => to
- case _ => mapOver(tp)
- }
- }
-
- class SubstSuperMap(from: Type, to: Type) extends TypeMap {
- def apply(tp: Type): Type = if (tp eq from) to else mapOver(tp)
- }
-
- class SubstWildcardMap(from: List[Symbol]) extends TypeMap {
- def apply(tp: Type): Type = try {
- tp match {
- case TypeRef(_, sym, _) if from contains sym =>
- BoundedWildcardType(sym.info.bounds)
- case _ =>
- mapOver(tp)
- }
- } catch {
- case ex: MalformedType =>
- WildcardType
- }
- }
-
-// dependent method types
- object IsDependentCollector extends TypeCollector(false) {
- def traverse(tp: Type) {
- if(tp isImmediatelyDependent) result = true
- else if (!result) mapOver(tp)
- }
- }
-
- object ApproximateDependentMap extends TypeMap {
- def apply(tp: Type): Type =
- if(tp isImmediatelyDependent) WildcardType
- else mapOver(tp)
- }
-
- class InstantiateDependentMap(params: List[Symbol], actuals: List[Type]) extends TypeMap {
- private val actualsIndexed = actuals.toIndexedSeq
- override val dropNonConstraintAnnotations = true
-
- object ParamWithActual {
- def unapply(sym: Symbol): Option[Type] = {
- val pid = params indexOf sym
- if(pid != -1) Some(actualsIndexed(pid)) else None
- }
- }
-
- def apply(tp: Type): Type =
- mapOver(tp) match {
- case SingleType(NoPrefix, ParamWithActual(arg)) if arg.isStable => arg // unsound to replace args by unstable actual #3873
- // (soundly) expand type alias selections on implicit arguments, see depmet_implicit_oopsla* test cases -- typically, `param.isImplicit`
- case tp1 at TypeRef(SingleType(NoPrefix, ParamWithActual(arg)), sym, targs) =>
- val res = typeRef(arg, sym, targs)
- if(res.typeSymbolDirect isAliasType) res.dealias
- else tp1
- case tp1 => tp1 // don't return the original `tp`, which may be different from `tp1`, due to `dropNonConstraintAnnotations`
- }
-
- def existentialsNeeded: List[Symbol] = existSyms.filter(_ ne null).toList
-
- private val existSyms: Array[Symbol] = new Array(actualsIndexed.size)
- private def haveExistential(i: Int) = {assert((i >= 0) && (i <= actualsIndexed.size)); existSyms(i) ne null}
-
- /* Return the type symbol for referencing a parameter inside the existential quantifier.
- * (Only needed if the actual is unstable.)
- */
- def existSymFor(actualIdx: Int) =
- if (haveExistential(actualIdx)) existSyms(actualIdx)
- else {
- val oldSym = params(actualIdx)
- val symowner = oldSym.owner
- val bound = singletonBounds(actualsIndexed(actualIdx))
-
- val sym = symowner.newExistential(oldSym.pos, newTypeName(oldSym.name + ".type"))
- sym.setInfo(bound)
- sym.setFlag(oldSym.flags)
-
- existSyms(actualIdx) = sym
- sym
- }
-
- //AM propagate more info to annotations -- this seems a bit ad-hoc... (based on code by spoon)
- override def mapOver(arg: Tree, giveup: ()=>Nothing): Tree = {
- object treeTrans extends Transformer {
- override def transform(tree: Tree): Tree = {
- tree match {
- case RefParamAt(pid) =>
- // TODO: this should be simplified; in the stable case, one can probably
- // just use an Ident to the tree.symbol. Why an existential in the non-stable case?
- val actual = actualsIndexed(pid)
- if (actual.isStable && actual.typeSymbol != NothingClass) {
- mkAttributedQualifier(actualsIndexed(pid), tree.symbol)
- } else {
- val sym = existSymFor(pid)
- (Ident(sym.name)
- copyAttrs tree
- setType typeRef(NoPrefix, sym, Nil))
- }
- case _ => super.transform(tree)
- }
- }
- object RefParamAt {
- def unapply(tree: Tree): Option[Int] = tree match {
- case Ident(_) => Some(params indexOf tree.symbol) filterNot (_ == -1)
- case _ => None
- }
- }
- }
-
- treeTrans.transform(arg)
- }
- }
-
-
- object StripAnnotationsMap extends TypeMap {
- def apply(tp: Type): Type = tp match {
- case AnnotatedType(_, atp, _) =>
- mapOver(atp)
- case tp =>
- mapOver(tp)
- }
- }
-
- /** A map to convert every occurrence of a wildcard type to a fresh
- * type variable */
- object wildcardToTypeVarMap extends TypeMap {
- def apply(tp: Type): Type = tp match {
- case WildcardType =>
- TypeVar(tp, new TypeConstraint)
- case BoundedWildcardType(bounds) =>
- TypeVar(tp, new TypeConstraint(List(bounds.lo), List(bounds.hi)))
- case _ =>
- mapOver(tp)
- }
- }
-
- /** A map to convert every occurrence of a type variable to a
- wildcard type */
- object typeVarToOriginMap extends TypeMap {
- def apply(tp: Type): Type = tp match {
- case TypeVar(origin, _) => origin
- case _ => mapOver(tp)
- }
- }
-
- /** A map to implement the `contains' method */
- class ContainsCollector(sym: Symbol) extends TypeCollector(false) {
- def traverse(tp: Type) {
- if (!result) {
- tp.normalize match {
- case TypeRef(_, sym1, _) if (sym == sym1) => result = true
- case SingleType(_, sym1) if (sym == sym1) => result = true
- case _ => mapOver(tp)
- }
- }
- }
-
- override def mapOver(arg: Tree) = {
- for (t <- arg) {
- traverse(t.tpe)
- if (t.symbol == sym)
- result = true
- }
- Some(arg)
- }
- }
-
- /** A map to implement the `contains' method */
- class ContainsTypeCollector(t: Type) extends TypeCollector(false) {
- def traverse(tp: Type) {
- if (!result) {
- if (tp eq t) result = true
- else mapOver(tp)
- }
- }
- override def mapOver(arg: Tree) = {
- for (t <- arg) {
- traverse(t.tpe)
- }
- Some(arg)
- }
- }
-
- /** A map to implement the `filter' method */
- class FilterTypeCollector(p: Type => Boolean) extends TypeCollector(new ListBuffer[Type]) {
- def traverse(tp: Type) {
- if (p(tp)) result += tp
- mapOver(tp)
- }
- }
-
- class ForEachTypeTraverser(f: Type => Unit) extends TypeTraverser {
- def traverse(tp: Type) {
- f(tp)
- mapOver(tp)
- }
- }
-
- /** A map to implement the `filter' method */
- class FindTypeCollector(p: Type => Boolean) extends TypeCollector[Option[Type]](None) {
- def traverse(tp: Type) {
- if (result.isEmpty) {
- if (p(tp)) result = Some(tp)
- mapOver(tp)
- }
- }
- }
-
- /** A map to implement the `contains' method */
- object ErroneousCollector extends TypeCollector(false) {
- def traverse(tp: Type) {
- if (!result) {
- result = tp.isError
- mapOver(tp)
- }
- }
- }
-
- /** A map to compute the most deeply nested owner that contains all the symbols
- * of thistype or prefixless typerefs/singletype occurrences in given type.
- */
- object commonOwnerMap extends TypeMap {
- var result: Symbol = _
- def init() = { result = NoSymbol }
- def apply(tp: Type): Type = {
- assert(tp ne null)
- tp.normalize match {
- case ThisType(sym) =>
- register(sym)
- case TypeRef(NoPrefix, sym, args) =>
- register(sym.owner); args foreach apply
- case SingleType(NoPrefix, sym) =>
- register(sym.owner)
- case _ =>
- mapOver(tp)
- }
- tp
- }
- private def register(sym: Symbol) {
- while (result != NoSymbol && sym != result && !(sym isNestedIn result))
- result = result.owner;
- }
- }
-
- class MissingAliasControl extends ControlThrowable
- val missingAliasException = new MissingAliasControl
- class MissingTypeControl extends ControlThrowable
-
- object adaptToNewRunMap extends TypeMap {
- private def adaptToNewRun(pre: Type, sym: Symbol): Symbol = {
- if (phase.flatClasses) {
- sym
- } else if (sym.isModuleClass) {
- adaptToNewRun(pre, sym.sourceModule).moduleClass
- } else if ((pre eq NoPrefix) || (pre eq NoType) || sym.isPackageClass) {
- sym
- } else {
- var rebind0 = pre.findMember(sym.name, BRIDGE, 0, true)
- if (rebind0 == NoSymbol) {
- if (sym.isAliasType) throw missingAliasException
- if (settings.debug.value) println(pre+"."+sym+" does no longer exist, phase = "+phase)
- throw new MissingTypeControl // For build manager and presentation compiler purposes
- //assert(false, pre+"."+sym+" does no longer exist, phase = "+phase)
- }
- /** The two symbols have the same fully qualified name */
- def corresponds(sym1: Symbol, sym2: Symbol): Boolean =
- sym1.name == sym2.name && (sym1.isPackageClass || corresponds(sym1.owner, sym2.owner))
- if (!corresponds(sym.owner, rebind0.owner)) {
- if (settings.debug.value)
- log("ADAPT1 pre = "+pre+", sym = "+sym+sym.locationString+", rebind = "+rebind0+rebind0.locationString)
- val bcs = pre.baseClasses.dropWhile(bc => !corresponds(bc, sym.owner));
- if (bcs.isEmpty)
- assert(pre.typeSymbol.isRefinementClass, pre) // if pre is a refinementclass it might be a structural type => OK to leave it in.
- else
- rebind0 = pre.baseType(bcs.head).member(sym.name)
- if (settings.debug.value) log(
- "ADAPT2 pre = " + pre +
- ", bcs.head = " + bcs.head +
- ", sym = " + sym+sym.locationString +
- ", rebind = " + rebind0 + (
- if (rebind0 == NoSymbol) ""
- else rebind0.locationString
- )
- )
- }
- val rebind = rebind0.suchThat(sym => sym.isType || sym.isStable)
- if (rebind == NoSymbol) {
- if (settings.debug.value) log("" + phase + " " +phase.flatClasses+sym.owner+sym.name+" "+sym.isType)
- throw new MalformedType(pre, sym.nameString)
- }
- rebind
- }
- }
- def apply(tp: Type): Type = tp match {
- case ThisType(sym) =>
- try {
- val sym1 = adaptToNewRun(sym.owner.thisType, sym)
- if (sym1 == sym) tp else ThisType(sym1)
- } catch {
- case ex: MissingTypeControl =>
- tp
- }
- case SingleType(pre, sym) =>
- if (sym.isPackage) tp
- else {
- val pre1 = this(pre)
- val sym1 = adaptToNewRun(pre1, sym)
- if ((pre1 eq pre) && (sym1 eq sym)) tp
- else singleType(pre1, sym1)
- }
- case TypeRef(pre, sym, args) =>
- if (sym.isPackageClass) tp
- else {
- val pre1 = this(pre)
- val args1 = args mapConserve (this)
- try {
- val sym1 = adaptToNewRun(pre1, sym)
- if ((pre1 eq pre) && (sym1 eq sym) && (args1 eq args)/* && sym.isExternal*/) {
- tp
- } else if (sym1 == NoSymbol) {
- if (settings.debug.value) println("adapt fail: "+pre+" "+pre1+" "+sym)
- tp
- } else {
- copyTypeRef(tp, pre1, sym1, args1)
- }
- } catch {
- case ex: MissingAliasControl =>
- apply(tp.dealias)
- case _: MissingTypeControl =>
- tp
- }
- }
- case MethodType(params, restp) =>
- val restp1 = this(restp)
- if (restp1 eq restp) tp
- else copyMethodType(tp, params, restp1)
- case NullaryMethodType(restp) =>
- val restp1 = this(restp)
- if (restp1 eq restp) tp
- else NullaryMethodType(restp1)
- case PolyType(tparams, restp) =>
- val restp1 = this(restp)
- if (restp1 eq restp) tp
- else PolyType(tparams, restp1)
-
- // Lukas: we need to check (together) whether we should also include parameter types
- // of PolyType and MethodType in adaptToNewRun
-
- case ClassInfoType(parents, decls, clazz) =>
- if (clazz.isPackageClass) tp
- else {
- val parents1 = parents mapConserve (this)
- if (parents1 eq parents) tp
- else ClassInfoType(parents1, decls, clazz)
- }
- case RefinedType(parents, decls) =>
- val parents1 = parents mapConserve (this)
- if (parents1 eq parents) tp
- else refinedType(parents1, tp.typeSymbol.owner, decls, tp.typeSymbol.owner.pos)
- case SuperType(_, _) => mapOver(tp)
- case TypeBounds(_, _) => mapOver(tp)
- case TypeVar(_, _) => mapOver(tp)
- case AnnotatedType(_,_,_) => mapOver(tp)
- case NotNullType(_) => mapOver(tp)
- case ExistentialType(_, _) => mapOver(tp)
- case _ => tp
- }
- }
-
- class SubTypePair(val tp1: Type, val tp2: Type) {
- override def hashCode = tp1.hashCode * 41 + tp2.hashCode
- override def equals(other: Any) = other match {
- case stp: SubTypePair =>
- (tp1 =:= stp.tp1) && (tp2 =:= stp.tp2)
- case _ =>
- false
- }
- override def toString = tp1+" <:<? "+tp2
- }
-
-// Helper Methods -------------------------------------------------------------
-
- final val LubGlbMargin = 0
-
- /** The maximum allowable depth of lubs or glbs over types `ts'
- * This is the maximum depth of all types in the base type sequences
- * of each of the types `ts', plus LubGlbMargin
- */
- def lubDepth(ts: List[Type]) = {
- var d = 0
- for (tp <- ts) d = math.max(d, tp.baseTypeSeqDepth)
- d + LubGlbMargin
- }
-
- /** Is intersection of given types populated? That is,
- * for all types tp1, tp2 in intersection
- * for all common base classes bc of tp1 and tp2
- * let bt1, bt2 be the base types of tp1, tp2 relative to class bc
- * Then:
- * bt1 and bt2 have the same prefix, and
- * any corresponding non-variant type arguments of bt1 and bt2 are the same
- */
- def isPopulated(tp1: Type, tp2: Type): Boolean = {
- def isConsistent(tp1: Type, tp2: Type): Boolean = (tp1, tp2) match {
- case (TypeRef(pre1, sym1, args1), TypeRef(pre2, sym2, args2)) =>
- assert(sym1 == sym2)
- pre1 =:= pre2 &&
- ((args1, args2, sym1.typeParams).zipped forall {
- (arg1, arg2, tparam) =>
- //if (tparam.variance == 0 && !(arg1 =:= arg2)) Console.println("inconsistent: "+arg1+"!="+arg2)//DEBUG
- if (tparam.variance == 0) arg1 =:= arg2
- else if (arg1.isInstanceOf[TypeVar])
- // if left-hand argument is a typevar, make it compatible with variance
- // this is for more precise pattern matching
- // todo: work this in the spec of this method
- // also: think what happens if there are embedded typevars?
- if (tparam.variance < 0) arg1 <:< arg2 else arg2 <:< arg1
- else true
- })
- case (et: ExistentialType, _) =>
- et.withTypeVars(isConsistent(_, tp2))
- case (_, et: ExistentialType) =>
- et.withTypeVars(isConsistent(tp1, _))
- }
-
- def check(tp1: Type, tp2: Type) =
- if (tp1.typeSymbol.isClass && tp1.typeSymbol.hasFlag(FINAL))
- tp1 <:< tp2 || isNumericValueClass(tp1.typeSymbol) && isNumericValueClass(tp2.typeSymbol)
- else tp1.baseClasses forall (bc =>
- tp2.baseTypeIndex(bc) < 0 || isConsistent(tp1.baseType(bc), tp2.baseType(bc)))
-
- check(tp1, tp2)/* && check(tp2, tp1)*/ // need to investgate why this can't be made symmetric -- neg/gadts1 fails, and run/existials also.
- }
-
- /** Does a pattern of type `patType' need an outer test when executed against
- * selector type `selType' in context defined by `currentOwner'?
- */
- def needsOuterTest(patType: Type, selType: Type, currentOwner: Symbol) = {
- def createDummyClone(pre: Type): Type = {
- val dummy = currentOwner.enclClass.newValue(NoPosition, nme.ANYNAME).setInfo(pre.widen)
- singleType(ThisType(currentOwner.enclClass), dummy)
- }
- def maybeCreateDummyClone(pre: Type, sym: Symbol): Type = pre match {
- case SingleType(pre1, sym1) =>
- if (sym1.isModule && sym1.isStatic) {
- NoType
- } else if (sym1.isModule && sym.owner == sym1.moduleClass) {
- val pre2 = maybeCreateDummyClone(pre1, sym1)
- if (pre2 eq NoType) pre2
- else singleType(pre2, sym1)
- } else {
- createDummyClone(pre)
- }
- case ThisType(clazz) =>
- if (clazz.isModuleClass)
- maybeCreateDummyClone(clazz.typeOfThis, sym)
- else if (sym.owner == clazz && (sym.hasFlag(PRIVATE) || sym.privateWithin == clazz))
- NoType
- else
- createDummyClone(pre)
- case _ =>
- NoType
- }
- patType match {
- case TypeRef(pre, sym, args) =>
- val pre1 = maybeCreateDummyClone(pre, sym)
- (pre1 ne NoType) && isPopulated(copyTypeRef(patType, pre1, sym, args), selType)
- case _ =>
- false
- }
- }
-
- private var subsametypeRecursions: Int = 0
-
- private def isUnifiable(pre1: Type, pre2: Type) =
- (beginsWithTypeVarOrIsRefined(pre1) || beginsWithTypeVarOrIsRefined(pre2)) && (pre1 =:= pre2)
-
- /** Returns true iff we are past phase specialize,
- * sym1 and sym2 are two existential skolems with equal names and bounds,
- * and pre1 and pre2 are equal prefixes
- */
- private def isSameSpecializedSkolem(sym1: Symbol, sym2: Symbol, pre1: Type, pre2: Type) = {
- sym1.isExistentialSkolem && sym2.isExistentialSkolem &&
- sym1.name == sym2.name &&
- phase.specialized &&
- sym1.info =:= sym2.info &&
- pre1 =:= pre2
- }
-
- private def equalSymsAndPrefixes(sym1: Symbol, pre1: Type, sym2: Symbol, pre2: Type): Boolean =
- if (sym1 == sym2) sym1.hasPackageFlag || phase.erasedTypes || pre1 =:= pre2
- else (sym1.name == sym2.name) && isUnifiable(pre1, pre2)
-
- /** Do `tp1' and `tp2' denote equivalent types?
- */
- def isSameType(tp1: Type, tp2: Type): Boolean = try {
- incCounter(sametypeCount)
- subsametypeRecursions += 1
- undoLog undoUnless {
- isSameType1(tp1, tp2)
- }
- } finally {
- subsametypeRecursions -= 1
- // XXX AM TODO: figure out when it is safe and needed to clear the log -- the commented approach below is too eager (it breaks #3281, #3866)
- // it doesn't help to keep separate recursion counts for the three methods that now share it
- // if (subsametypeRecursions == 0) undoLog.clear()
- }
-
- def isDifferentType(tp1: Type, tp2: Type): Boolean = try {
- subsametypeRecursions += 1
- undoLog undo { // undo type constraints that arise from operations in this block
- !isSameType1(tp1, tp2)
- }
- } finally {
- subsametypeRecursions -= 1
- // XXX AM TODO: figure out when it is safe and needed to clear the log -- the commented approach below is too eager (it breaks #3281, #3866)
- // it doesn't help to keep separate recursion counts for the three methods that now share it
- // if (subsametypeRecursions == 0) undoLog.clear()
- }
-
- def isDifferentTypeConstructor(tp1: Type, tp2: Type): Boolean = tp1 match {
- case TypeRef(pre1, sym1, _) =>
- tp2 match {
- case TypeRef(pre2, sym2, _) => sym1 != sym2 || isDifferentType(pre1, pre2)
- case _ => true
- }
- case _ => true
- }
-
- def normalizePlus(tp: Type) =
- if (isRawType(tp)) rawToExistential(tp)
- else tp.normalize
-
- /*
- todo: change to:
- def normalizePlus(tp: Type) = tp match {
- case TypeRef(pre, sym, List()) =>
- if (!sym.isInitialized) sym.rawInfo.load(sym)
- if (sym.isJavaDefined && !sym.typeParams.isEmpty) rawToExistential(tp)
- else tp.normalize
- case _ => tp.normalize
- }
- */
-/*
- private def isSameType0(tp1: Type, tp2: Type): Boolean = {
- if (tp1 eq tp2) return true
- ((tp1, tp2) match {
- case (ErrorType, _) => true
- case (WildcardType, _) => true
- case (_, ErrorType) => true
- case (_, WildcardType) => true
-
- case (NoType, _) => false
- case (NoPrefix, _) => tp2.typeSymbol.isPackageClass
- case (_, NoType) => false
- case (_, NoPrefix) => tp1.typeSymbol.isPackageClass
-
- case (ThisType(sym1), ThisType(sym2))
- if (sym1 == sym2) =>
- true
- case (SingleType(pre1, sym1), SingleType(pre2, sym2))
- if (equalSymsAndPrefixes(sym1, pre1, sym2, pre2)) =>
- true
-/*
- case (SingleType(pre1, sym1), ThisType(sym2))
- if (sym1.isModule &&
- sym1.moduleClass == sym2 &&
- pre1 =:= sym2.owner.thisType) =>
- true
- case (ThisType(sym1), SingleType(pre2, sym2))
- if (sym2.isModule &&
- sym2.moduleClass == sym1 &&
- pre2 =:= sym1.owner.thisType) =>
- true
-*/
- case (ConstantType(value1), ConstantType(value2)) =>
- value1 == value2
- case (TypeRef(pre1, sym1, args1), TypeRef(pre2, sym2, args2)) =>
- equalSymsAndPrefixes(sym1, pre1, sym2, pre2) &&
- ((tp1.isHigherKinded && tp2.isHigherKinded && tp1.normalize =:= tp2.normalize) ||
- isSameTypes(args1, args2))
- // @M! normalize reduces higher-kinded case to PolyType's
- case (RefinedType(parents1, ref1), RefinedType(parents2, ref2)) =>
- def isSubScope(s1: Scope, s2: Scope): Boolean = s2.toList.forall {
- sym2 =>
- var e1 = s1.lookupEntry(sym2.name)
- (e1 ne null) && {
- val substSym = sym2.info.substThis(sym2.owner, e1.sym.owner.thisType)
- var isEqual = false
- while (!isEqual && (e1 ne null)) {
- isEqual = e1.sym.info =:= substSym
- e1 = s1.lookupNextEntry(e1)
- }
- isEqual
- }
- }
- //Console.println("is same? " + tp1 + " " + tp2 + " " + tp1.typeSymbol.owner + " " + tp2.typeSymbol.owner)//DEBUG
- isSameTypes(parents1, parents2) && isSubScope(ref1, ref2) && isSubScope(ref2, ref1)
- case (MethodType(params1, res1), MethodType(params2, res2)) =>
- // new dependent types: probably fix this, use substSym as done for PolyType
- (isSameTypes(tp1.paramTypes, tp2.paramTypes) &&
- res1 =:= res2 &&
- tp1.isImplicit == tp2.isImplicit)
- case (PolyType(tparams1, res1), PolyType(tparams2, res2)) =>
- // assert((tparams1 map (_.typeParams.length)) == (tparams2 map (_.typeParams.length)))
- (tparams1.length == tparams2.length) && (tparams1 corresponds tparams2)(_.info =:= _.info.substSym(tparams2, tparams1)) && // @M looks like it might suffer from same problem as #2210
- res1 =:= res2.substSym(tparams2, tparams1)
- case (ExistentialType(tparams1, res1), ExistentialType(tparams2, res2)) =>
- (tparams1.length == tparams2.length) && (tparams1 corresponds tparams2)(_.info =:= _.info.substSym(tparams2, tparams1)) && // @M looks like it might suffer from same problem as #2210
- res1 =:= res2.substSym(tparams2, tparams1)
- case (TypeBounds(lo1, hi1), TypeBounds(lo2, hi2)) =>
- lo1 =:= lo2 && hi1 =:= hi2
- case (BoundedWildcardType(bounds), _) =>
- bounds containsType tp2
- case (_, BoundedWildcardType(bounds)) =>
- bounds containsType tp1
- case (tv @ TypeVar(_,_), tp) =>
- tv.registerTypeEquality(tp, true)
- case (tp, tv @ TypeVar(_,_)) =>
- tv.registerTypeEquality(tp, false)
- case (AnnotatedType(_,_,_), _) =>
- annotationsConform(tp1, tp2) && annotationsConform(tp2, tp1) && tp1.withoutAnnotations =:= tp2.withoutAnnotations
- case (_, AnnotatedType(_,_,_)) =>
- annotationsConform(tp1, tp2) && annotationsConform(tp2, tp1) && tp1.withoutAnnotations =:= tp2.withoutAnnotations
- case (_: SingletonType, _: SingletonType) =>
- var origin1 = tp1
- while (origin1.underlying.isInstanceOf[SingletonType]) {
- assert(origin1 ne origin1.underlying, origin1)
- origin1 = origin1.underlying
- }
- var origin2 = tp2
- while (origin2.underlying.isInstanceOf[SingletonType]) {
- assert(origin2 ne origin2.underlying, origin2)
- origin2 = origin2.underlying
- }
- ((origin1 ne tp1) || (origin2 ne tp2)) && (origin1 =:= origin2)
- case _ =>
- false
- }) || {
- val tp1n = normalizePlus(tp1)
- val tp2n = normalizePlus(tp2)
- ((tp1n ne tp1) || (tp2n ne tp2)) && isSameType(tp1n, tp2n)
- }
- }
-*/
- private def isSameType1(tp1: Type, tp2: Type): Boolean = {
- if ((tp1 eq tp2) ||
- (tp1 eq ErrorType) || (tp1 eq WildcardType) ||
- (tp2 eq ErrorType) || (tp2 eq WildcardType))
- true
- else if ((tp1 eq NoType) || (tp2 eq NoType))
- false
- else if (tp1 eq NoPrefix)
- tp2.typeSymbol.isPackageClass
- else if (tp2 eq NoPrefix)
- tp1.typeSymbol.isPackageClass
- else {
- isSameType2(tp1, tp2) || {
- val tp1n = normalizePlus(tp1)
- val tp2n = normalizePlus(tp2)
- ((tp1n ne tp1) || (tp2n ne tp2)) && isSameType(tp1n, tp2n)
- }
- }
- }
-
- def isSameType2(tp1: Type, tp2: Type): Boolean = {
- tp1 match {
- case tr1: TypeRef =>
- tp2 match {
- case tr2: TypeRef =>
- return (equalSymsAndPrefixes(tr1.sym, tr1.pre, tr2.sym, tr2.pre) &&
- ((tp1.isHigherKinded && tp2.isHigherKinded && tp1.normalize =:= tp2.normalize) ||
- isSameTypes(tr1.args, tr2.args))) ||
- ((tr1.pre, tr2.pre) match {
- case (tv @ TypeVar(_,_), _) => tv.registerTypeSelection(tr1.sym, tr2)
- case (_, tv @ TypeVar(_,_)) => tv.registerTypeSelection(tr2.sym, tr1)
- case _ => false
- })
- case _ =>
- }
- case tt1: ThisType =>
- tp2 match {
- case tt2: ThisType =>
- if (tt1.sym == tt2.sym) return true
- case _ =>
- }
- case st1: SingleType =>
- tp2 match {
- case st2: SingleType =>
- if (equalSymsAndPrefixes(st1.sym, st1.pre, st2.sym, st2.pre)) return true
- case _ =>
- }
- case ct1: ConstantType =>
- tp2 match {
- case ct2: ConstantType =>
- return (ct1.value == ct2.value)
- case _ =>
- }
- case rt1: RefinedType =>
- tp2 match {
- case rt2: RefinedType => //
- def isSubScope(s1: Scope, s2: Scope): Boolean = s2.toList.forall {
- sym2 =>
- var e1 = s1.lookupEntry(sym2.name)
- (e1 ne null) && {
- val substSym = sym2.info.substThis(sym2.owner, e1.sym.owner.thisType)
- var isEqual = false
- while (!isEqual && (e1 ne null)) {
- isEqual = e1.sym.info =:= substSym
- e1 = s1.lookupNextEntry(e1)
- }
- isEqual
- }
- }
- //Console.println("is same? " + tp1 + " " + tp2 + " " + tp1.typeSymbol.owner + " " + tp2.typeSymbol.owner)//DEBUG
- return isSameTypes(rt1.parents, rt2.parents) && {
- val decls1 = rt1.decls
- val decls2 = rt2.decls
- isSubScope(decls1, decls2) && isSubScope(decls2, decls1)
- }
- case _ =>
- }
- case mt1: MethodType =>
- tp2 match {
- case mt2: MethodType =>
- // DEPMETTODO new dependent types: probably fix this, use substSym as done for PolyType
- return isSameTypes(mt1.paramTypes, mt2.paramTypes) &&
- mt1.resultType =:= mt2.resultType &&
- mt1.isImplicit == mt2.isImplicit
- // note: no case NullaryMethodType(restpe) => return mt1.params.isEmpty && mt1.resultType =:= restpe
- case _ =>
- }
- case NullaryMethodType(restpe1) =>
- tp2 match {
- // note: no case mt2: MethodType => return mt2.params.isEmpty && restpe =:= mt2.resultType
- case NullaryMethodType(restpe2) =>
- return restpe1 =:= restpe2
- case _ =>
- }
- case PolyType(tparams1, res1) =>
- tp2 match {
- case PolyType(tparams2, res2) =>
-// assert((tparams1 map (_.typeParams.length)) == (tparams2 map (_.typeParams.length)))
- // @M looks like it might suffer from same problem as #2210
- return (
- (sameLength(tparams1, tparams2)) && // corresponds does not check length of two sequences before checking the predicate
- (tparams1 corresponds tparams2)(_.info =:= _.info.substSym(tparams2, tparams1)) &&
- res1 =:= res2.substSym(tparams2, tparams1)
- )
- case _ =>
- }
- case ExistentialType(tparams1, res1) =>
- tp2 match {
- case ExistentialType(tparams2, res2) =>
- // @M looks like it might suffer from same problem as #2210
- return (
- // corresponds does not check length of two sequences before checking the predicate -- faster & needed to avoid crasher in #2956
- sameLength(tparams1, tparams2) &&
- (tparams1 corresponds tparams2)(_.info =:= _.info.substSym(tparams2, tparams1)) &&
- res1 =:= res2.substSym(tparams2, tparams1)
- )
- case _ =>
- }
- case TypeBounds(lo1, hi1) =>
- tp2 match {
- case TypeBounds(lo2, hi2) =>
- return lo1 =:= lo2 && hi1 =:= hi2
- case _ =>
- }
- case BoundedWildcardType(bounds) =>
- return bounds containsType tp2
- case _ =>
- }
- tp2 match {
- case BoundedWildcardType(bounds) =>
- return bounds containsType tp1
- case _ =>
- }
- tp1 match {
- case tv @ TypeVar(_,_) =>
- return tv.registerTypeEquality(tp2, true)
- case _ =>
- }
- tp2 match {
- case tv @ TypeVar(_,_) =>
- return tv.registerTypeEquality(tp1, false)
- case _ =>
- }
- tp1 match {
- case _: AnnotatedType =>
- return annotationsConform(tp1, tp2) && annotationsConform(tp2, tp1) && tp1.withoutAnnotations =:= tp2.withoutAnnotations
- case _ =>
- }
- tp2 match {
- case _: AnnotatedType =>
- return annotationsConform(tp1, tp2) && annotationsConform(tp2, tp1) && tp1.withoutAnnotations =:= tp2.withoutAnnotations
- case _ =>
- }
- tp1 match {
- case _: SingletonType =>
- tp2 match {
- case _: SingletonType =>
- @inline def chaseDealiasedUnderlying(tp: Type): Type = {
- var origin = tp
- var next = origin.underlying.dealias
- while (next.isInstanceOf[SingletonType]) {
- assert(origin ne next, origin)
- origin = next
- next = origin.underlying.dealias
- }
- origin
- }
- val origin1 = chaseDealiasedUnderlying(tp1)
- val origin2 = chaseDealiasedUnderlying(tp2)
- ((origin1 ne tp1) || (origin2 ne tp2)) && (origin1 =:= origin2)
- case _ =>
- false
- }
- case _ =>
- false
- }
- }
-
- /** Are `tps1' and `tps2' lists of pairwise equivalent
- * types?
- */
- def isSameTypes(tps1: List[Type], tps2: List[Type]): Boolean = (tps1 corresponds tps2)(_ =:= _)
-
- /** True if two lists have the same length. Since calling length on linear sequences
- * is O(n), it is an inadvisable way to test length equality.
- */
- final def sameLength(xs1: List[_], xs2: List[_]) = compareLengths(xs1, xs2) == 0
- @tailrec final def compareLengths(xs1: List[_], xs2: List[_]): Int =
- if (xs1.isEmpty) { if (xs2.isEmpty) 0 else -1 }
- else if (xs2.isEmpty) 1
- else compareLengths(xs1.tail, xs2.tail)
-
- /** Again avoiding calling length, but the lengthCompare interface is clunky.
- */
- final def hasLength(xs: List[_], len: Int) = xs.lengthCompare(len) == 0
-
- private val pendingSubTypes = new mutable.HashSet[SubTypePair]
- private var basetypeRecursions: Int = 0
- private val pendingBaseTypes = new mutable.HashSet[Type]
-
- def isSubType(tp1: Type, tp2: Type): Boolean = isSubType(tp1, tp2, AnyDepth)
-
- def isSubType(tp1: Type, tp2: Type, depth: Int): Boolean = try {
- subsametypeRecursions += 1
-
- undoLog undoUnless { // if subtype test fails, it should not affect constraints on typevars
- if (subsametypeRecursions >= LogPendingSubTypesThreshold) {
- val p = new SubTypePair(tp1, tp2)
- if (pendingSubTypes(p))
- false
- else
- try {
- pendingSubTypes += p
- isSubType2(tp1, tp2, depth)
- } finally {
- pendingSubTypes -= p
- }
- } else {
- isSubType2(tp1, tp2, depth)
- }
- }
- } finally {
- subsametypeRecursions -= 1
- // XXX AM TODO: figure out when it is safe and needed to clear the log -- the commented approach below is too eager (it breaks #3281, #3866)
- // it doesn't help to keep separate recursion counts for the three methods that now share it
- // if (subsametypeRecursions == 0) undoLog.clear()
- }
-
- /** Does this type have a prefix that begins with a type variable,
- * or is it a refinement type? For type prefixes that fulfil this condition,
- * type selections with the same name of equal (wrt) =:= prefixes are
- * considered equal wrt =:=
- */
- def beginsWithTypeVarOrIsRefined(tp: Type): Boolean = tp match {
- case SingleType(pre, sym) =>
- !(sym hasFlag PACKAGE) && beginsWithTypeVarOrIsRefined(pre)
- case tv at TypeVar(_, constr) =>
- !tv.instValid || beginsWithTypeVarOrIsRefined(constr.inst)
- case RefinedType(_, _) =>
- true
- case _ =>
- false
- }
-
- def instTypeVar(tp: Type): Type = tp match {
- case TypeRef(pre, sym, args) =>
- copyTypeRef(tp, instTypeVar(pre), sym, args)
- case SingleType(pre, sym) =>
- singleType(instTypeVar(pre), sym)
- case TypeVar(_, constr) =>
- instTypeVar(constr.inst)
- case _ =>
- tp
- }
-
- def isErrorOrWildcard(tp: Type) = (tp eq ErrorType) || (tp eq WildcardType)
-
- def isSingleType(tp: Type) = tp match {
- case ThisType(_) | SuperType(_, _) | SingleType(_, _) => true
- case _ => false
- }
-
- def isConstantType(tp: Type) = tp match {
- case ConstantType(_) => true
- case _ => false
- }
-
- // @assume tp1.isHigherKinded || tp2.isHigherKinded
- def isHKSubType0(tp1: Type, tp2: Type, depth: Int): Boolean = (
- tp1.typeSymbol == NothingClass
- ||
- tp2.typeSymbol == AnyClass // @M Any and Nothing are super-type resp. subtype of every well-kinded type
- || // @M! normalize reduces higher-kinded case to PolyType's
- ((tp1.normalize.withoutAnnotations , tp2.normalize.withoutAnnotations) match {
- case (PolyType(tparams1, res1), PolyType(tparams2, res2)) => // @assume tp1.isHigherKinded && tp2.isHigherKinded (as they were both normalized to PolyType)
- sameLength(tparams1, tparams2) && {
- if (tparams1.head.owner.isMethod) { // fast-path: polymorphic method type -- type params cannot be captured
- (tparams1 corresponds tparams2)((p1, p2) => p2.info.substSym(tparams2, tparams1) <:< p1.info) &&
- res1 <:< res2.substSym(tparams2, tparams1)
- } else { // normalized higher-kinded type
- //@M for an example of why we need to generate fresh symbols, see neg/tcpoly_ticket2101.scala
- val tpsFresh = cloneSymbols(tparams1)
-
- (tparams1 corresponds tparams2)((p1, p2) =>
- p2.info.substSym(tparams2, tpsFresh) <:< p1.info.substSym(tparams1, tpsFresh)) &&
- res1.substSym(tparams1, tpsFresh) <:< res2.substSym(tparams2, tpsFresh)
-
- //@M the forall in the previous test could be optimised to the following,
- // but not worth the extra complexity since it only shaves 1s from quick.comp
- // (List.forall2(tpsFresh/*optimisation*/, tparams2)((p1, p2) =>
- // p2.info.substSym(tparams2, tpsFresh) <:< p1.info /*optimisation, == (p1 from tparams1).info.substSym(tparams1, tpsFresh)*/) &&
- // this optimisation holds because inlining cloneSymbols in `val tpsFresh = cloneSymbols(tparams1)` gives:
- // val tpsFresh = tparams1 map (_.cloneSymbol)
- // for (tpFresh <- tpsFresh) tpFresh.setInfo(tpFresh.info.substSym(tparams1, tpsFresh))
- }
- } && annotationsConform(tp1.normalize, tp2.normalize)
- case (_, _) => false // @assume !tp1.isHigherKinded || !tp2.isHigherKinded
- // --> thus, cannot be subtypes (Any/Nothing has already been checked)
- }))
-
- /** True if all three arguments have the same number of elements and
- * the function is true for all the triples.
- */
- @tailrec final def corresponds3[A, B, C](xs1: List[A], xs2: List[B], xs3: List[C], f: (A, B, C) => Boolean): Boolean = {
- if (xs1.isEmpty) xs2.isEmpty && xs3.isEmpty
- else !xs2.isEmpty && !xs3.isEmpty && f(xs1.head, xs2.head, xs3.head) && corresponds3(xs1.tail, xs2.tail, xs3.tail, f)
- }
-
- def isSubArg(t1: Type, t2: Type, variance: Int) =
- (variance > 0 || t2 <:< t1) && (variance < 0 || t1 <:< t2)
-
- def isSubArgs(tps1: List[Type], tps2: List[Type], tparams: List[Symbol]): Boolean =
- corresponds3(tps1, tps2, tparams map (_.variance), isSubArg)
-
- def differentOrNone(tp1: Type, tp2: Type) = if (tp1 eq tp2) NoType else tp1
-
- /** Does type `tp1' conform to `tp2'?
- */
- private def isSubType2(tp1: Type, tp2: Type, depth: Int): Boolean = {
- if ((tp1 eq tp2) || isErrorOrWildcard(tp1) || isErrorOrWildcard(tp2)) return true
- if ((tp1 eq NoType) || (tp2 eq NoType)) return false
- if (tp1 eq NoPrefix) return (tp2 eq NoPrefix) || tp2.typeSymbol.isPackageClass
- if (tp2 eq NoPrefix) return tp1.typeSymbol.isPackageClass
- if (isSingleType(tp1) && isSingleType(tp2) || isConstantType(tp1) && isConstantType(tp2)) return tp1 =:= tp2
- if (tp1.isHigherKinded || tp2.isHigherKinded) return isHKSubType0(tp1, tp2, depth)
-
- /** First try, on the right:
- * - unwrap Annotated types, BoundedWildcardTypes,
- * - bind TypeVars on the right, if lhs is not Annotated nor BoundedWildcard
- * - handle common cases for first-kind TypeRefs on both sides as a fast path.
- */
- def firstTry = { incCounter(ctr1); tp2 match {
- // fast path: two typerefs, none of them HK
- case tr2: TypeRef =>
- tp1 match {
- case tr1: TypeRef =>
- val sym1 = tr1.sym
- val sym2 = tr2.sym
- val pre1 = tr1.pre
- val pre2 = tr2.pre
- (((if (sym1 == sym2) phase.erasedTypes || pre1 <:< pre2
- else (sym1.name == sym2.name && !sym1.isModuleClass && !sym2.isModuleClass &&
- (isUnifiable(pre1, pre2) || isSameSpecializedSkolem(sym1, sym2, pre1, pre2)))) &&
- isSubArgs(tr1.args, tr2.args, sym1.typeParams))
- ||
- sym2.isClass && {
- val base = tr1 baseType sym2
- (base ne tr1) && base <:< tr2
- }
- ||
- thirdTryRef(tr1, tr2))
- case _ =>
- secondTry
- }
- case AnnotatedType(_, _, _) =>
- tp1.withoutAnnotations <:< tp2.withoutAnnotations && annotationsConform(tp1, tp2)
- case BoundedWildcardType(bounds) =>
- tp1 <:< bounds.hi
- case tv2 @ TypeVar(_, constr2) =>
- tp1 match {
- case AnnotatedType(_, _, _) | BoundedWildcardType(_) =>
- secondTry
- case _ =>
- tv2.registerBound(tp1, true)
- }
- case _ =>
- secondTry
- }}
-
- /** Second try, on the left:
- * - unwrap AnnotatedTypes, BoundedWildcardTypes,
- * - bind typevars,
- * - handle existential types by skolemization.
- */
- def secondTry = { incCounter(ctr2); tp1 match {
- case AnnotatedType(_, _, _) =>
- tp1.withoutAnnotations <:< tp2.withoutAnnotations && annotationsConform(tp1, tp2)
- case BoundedWildcardType(bounds) =>
- tp1.bounds.lo <:< tp2
- case tv @ TypeVar(_,_) =>
- tv.registerBound(tp2, false)
- case ExistentialType(_, _) =>
- try {
- skolemizationLevel += 1
- tp1.skolemizeExistential <:< tp2
- } finally {
- skolemizationLevel -= 1
- }
- case _ =>
- thirdTry
- }}
-
- def thirdTryRef(tp1: Type, tp2: TypeRef): Boolean = {
- incCounter(ctr3);
- val sym2 = tp2.sym
- sym2 match {
- case NotNullClass => tp1.isNotNull
- case SingletonClass => tp1.isStable || fourthTry
- case _: ClassSymbol =>
- if (isRaw(sym2, tp2.args))
- isSubType(tp1, rawToExistential(tp2), depth)
- else if (sym2.name == tpnme.REFINE_CLASS_NAME)
- isSubType(tp1, sym2.info, depth)
- else
- fourthTry
- case _: TypeSymbol =>
- if (sym2 hasFlag DEFERRED) {
- val tp2a = tp2.bounds.lo
- isDifferentTypeConstructor(tp2, tp2a) && tp1 <:< tp2a || fourthTry
- } else {
- isSubType(tp1.normalize, tp2.normalize, depth)
- }
- case _ =>
- fourthTry
- }
- }
-
- /** Third try, on the right:
- * - decompose refined types.
- * - handle typerefs, existentials, and notnull types.
- * - handle left+right method types, polytypes, typebounds
- */
- def thirdTry = { incCounter(ctr3); tp2 match {
- case tr2: TypeRef =>
- thirdTryRef(tp1, tr2)
- case rt2: RefinedType =>
- (rt2.parents forall (tp1 <:< _)) &&
- (rt2.decls.toList forall tp1.specializes)
- case et2: ExistentialType =>
- et2.withTypeVars(tp1 <:< _, depth) || fourthTry
- case nn2: NotNullType =>
- tp1.isNotNull && tp1 <:< nn2.underlying
- case mt2: MethodType =>
- tp1 match {
- case mt1 @ MethodType(params1, res1) =>
- val params2 = mt2.params
- val res2 = mt2.resultType
- (sameLength(params1, params2) &&
- matchingParams(params1, params2, mt1.isJava, mt2.isJava) &&
- (res1 <:< res2) &&
- mt1.isImplicit == mt2.isImplicit)
- // TODO: if mt1.params.isEmpty, consider NullaryMethodType?
- case _ =>
- false
- }
- case pt2 @ NullaryMethodType(_) =>
- tp1 match {
- // TODO: consider MethodType mt for which mt.params.isEmpty??
- case pt1 @ NullaryMethodType(_) =>
- pt1.resultType <:< pt2.resultType
- case _ =>
- false
- }
- case TypeBounds(lo2, hi2) =>
- tp1 match {
- case TypeBounds(lo1, hi1) =>
- lo2 <:< lo1 && hi1 <:< hi2
- case _ =>
- false
- }
- case _ =>
- fourthTry
- }}
-
- /** Fourth try, on the left:
- * - handle typerefs, refined types, notnull and singleton types.
- */
- def fourthTry = { incCounter(ctr4); tp1 match {
- case tr1 @ TypeRef(_, sym1, _) =>
- sym1 match {
- case NothingClass => true
- case NullClass =>
- tp2 match {
- case TypeRef(_, sym2, _) =>
- sym2.isClass && (sym2 isNonBottomSubClass ObjectClass) &&
- !(tp2.normalize.typeSymbol isNonBottomSubClass NotNullClass)
- case _ =>
- isSingleType(tp2) && tp1 <:< tp2.widen
- }
- case _: ClassSymbol =>
- if (isRaw(sym1, tr1.args))
- isSubType(rawToExistential(tp1), tp2, depth)
- else
- sym1.name == tpnme.REFINE_CLASS_NAME &&
- isSubType(sym1.info, tp2, depth)
- case _: TypeSymbol =>
- if (sym1 hasFlag DEFERRED) {
- val tp1a = tp1.bounds.hi
- isDifferentTypeConstructor(tp1, tp1a) && tp1a <:< tp2
- } else {
- isSubType(tp1.normalize, tp2.normalize, depth)
- }
- case _ =>
- false
- }
- case RefinedType(parents1, _) =>
- parents1 exists (_ <:< tp2)
- case _: SingletonType | _: NotNullType =>
- tp1.underlying <:< tp2
- case _ =>
- false
- }}
-
- firstTry
- }
-
- /** Are `tps1' and `tps2' lists of equal length such
- * that all elements of `tps1' conform to corresponding elements
- * of `tps2'?
- */
- def isSubTypes(tps1: List[Type], tps2: List[Type]): Boolean = (tps1 corresponds tps2)(_ <:< _)
-
- /** Does type `tp' implement symbol `sym' with same or
- * stronger type? Exact only if `sym' is a member of some
- * refinement type, otherwise we might return false negatives.
- */
- def specializesSym(tp: Type, sym: Symbol): Boolean =
- tp.typeSymbol == NothingClass ||
- tp.typeSymbol == NullClass && (sym.owner isSubClass ObjectClass) ||
- (tp.nonPrivateMember(sym.name).alternatives exists
- (alt => sym == alt || specializesSym(tp.narrow, alt, sym.owner.thisType, sym)))
-
- /** Does member `sym1' of `tp1' have a stronger type
- * than member `sym2' of `tp2'?
- */
- private def specializesSym(tp1: Type, sym1: Symbol, tp2: Type, sym2: Symbol): Boolean = {
- val info1 = tp1.memberInfo(sym1)
- val info2 = tp2.memberInfo(sym2).substThis(tp2.typeSymbol, tp1)
- //System.out.println("specializes "+tp1+"."+sym1+":"+info1+sym1.locationString+" AND "+tp2+"."+sym2+":"+info2)//DEBUG
- sym2.isTerm && (info1 <:< info2) /*&& (!sym2.isStable || sym1.isStable) */ ||
- sym2.isAbstractType && {
- val memberTp1 = tp1.memberType(sym1)
- // println("kinds conform? "+(memberTp1, tp1, sym2, kindsConform(List(sym2), List(memberTp1), tp2, sym2.owner)))
- info2.bounds.containsType(memberTp1) &&
- kindsConform(List(sym2), List(memberTp1), tp1, sym1.owner)
- } ||
- sym2.isAliasType && tp2.memberType(sym2).substThis(tp2.typeSymbol, tp1) =:= tp1.memberType(sym1) //@MAT ok
- }
-
- /** A function implementing `tp1' matches `tp2' */
- final def matchesType(tp1: Type, tp2: Type, alwaysMatchSimple: Boolean): Boolean = {
- def matchesQuantified(tparams1: List[Symbol], tparams2: List[Symbol], res1: Type, res2: Type): Boolean = (
- sameLength(tparams1, tparams2) &&
- matchesType(res1, res2.substSym(tparams2, tparams1), alwaysMatchSimple)
- )
- def lastTry =
- tp2 match {
- case ExistentialType(_, res2) if alwaysMatchSimple =>
- matchesType(tp1, res2, true)
- case MethodType(_, _) =>
- false
- case PolyType(tparams2, res2) =>
- tparams2.isEmpty && matchesType(tp1, res2, alwaysMatchSimple)
- case _ =>
- alwaysMatchSimple || tp1 =:= tp2
- }
- tp1 match {
- case mt1 @ MethodType(params1, res1) =>
- tp2 match {
- case mt2 @ MethodType(params2, res2) =>
- sameLength(params1, params2) && // useful pre-screening optimization
- matchingParams(params1, params2, mt1.isJava, mt2.isJava) &&
- matchesType(res1, res2, alwaysMatchSimple) &&
- mt1.isImplicit == mt2.isImplicit
- case NullaryMethodType(res2) =>
- if (params1.isEmpty) matchesType(res1, res2, alwaysMatchSimple)
- else matchesType(tp1, res2, alwaysMatchSimple)
- case ExistentialType(_, res2) =>
- alwaysMatchSimple && matchesType(tp1, res2, true)
- case _ =>
- false
- }
- case mt1 @ NullaryMethodType(res1) =>
- tp2 match {
- case mt2 @ MethodType(Nil, res2) => // could never match if params nonEmpty, and !mt2.isImplicit is implied by empty param list
- matchesType(res1, res2, alwaysMatchSimple)
- case NullaryMethodType(res2) =>
- matchesType(res1, res2, alwaysMatchSimple)
- case ExistentialType(_, res2) =>
- alwaysMatchSimple && matchesType(tp1, res2, true)
- case _ =>
- matchesType(res1, tp2, alwaysMatchSimple)
- }
- case PolyType(tparams1, res1) =>
- tp2 match {
- case PolyType(tparams2, res2) =>
- matchesQuantified(tparams1, tparams2, res1, res2)
- case ExistentialType(_, res2) =>
- alwaysMatchSimple && matchesType(tp1, res2, true)
- case _ =>
- false // remember that tparams1.nonEmpty is now an invariant of PolyType
- }
- case ExistentialType(tparams1, res1) =>
- tp2 match {
- case ExistentialType(tparams2, res2) =>
- matchesQuantified(tparams1, tparams2, res1, res2)
- case _ =>
- if (alwaysMatchSimple) matchesType(res1, tp2, true)
- else lastTry
- }
- case _ =>
- lastTry
- }
- }
-
-/** matchesType above is an optimized version of the following implementation:
-
- def matchesType2(tp1: Type, tp2: Type, alwaysMatchSimple: Boolean): Boolean = {
- def matchesQuantified(tparams1: List[Symbol], tparams2: List[Symbol], res1: Type, res2: Type): Boolean =
- tparams1.length == tparams2.length &&
- matchesType(res1, res2.substSym(tparams2, tparams1), alwaysMatchSimple)
- (tp1, tp2) match {
- case (MethodType(params1, res1), MethodType(params2, res2)) =>
- params1.length == params2.length && // useful pre-secreening optimization
- matchingParams(params1, params2, tp1.isInstanceOf[JavaMethodType], tp2.isInstanceOf[JavaMethodType]) &&
- matchesType(res1, res2, alwaysMatchSimple) &&
- tp1.isImplicit == tp2.isImplicit
- case (PolyType(tparams1, res1), PolyType(tparams2, res2)) =>
- matchesQuantified(tparams1, tparams2, res1, res2)
- case (NullaryMethodType(rtp1), MethodType(List(), rtp2)) =>
- matchesType(rtp1, rtp2, alwaysMatchSimple)
- case (MethodType(List(), rtp1), NullaryMethodType(rtp2)) =>
- matchesType(rtp1, rtp2, alwaysMatchSimple)
- case (ExistentialType(tparams1, res1), ExistentialType(tparams2, res2)) =>
- matchesQuantified(tparams1, tparams2, res1, res2)
- case (ExistentialType(_, res1), _) if alwaysMatchSimple =>
- matchesType(res1, tp2, alwaysMatchSimple)
- case (_, ExistentialType(_, res2)) if alwaysMatchSimple =>
- matchesType(tp1, res2, alwaysMatchSimple)
- case (NullaryMethodType(rtp1), _) =>
- matchesType(rtp1, tp2, alwaysMatchSimple)
- case (_, NullaryMethodType(rtp2)) =>
- matchesType(tp1, rtp2, alwaysMatchSimple)
- case (MethodType(_, _), _) => false
- case (PolyType(_, _), _) => false
- case (_, MethodType(_, _)) => false
- case (_, PolyType(_, _)) => false
- case _ =>
- alwaysMatchSimple || tp1 =:= tp2
- }
- }
-*/
-
- /** Are `syms1' and `syms2' parameter lists with pairwise equivalent types? */
- private def matchingParams(syms1: List[Symbol], syms2: List[Symbol], syms1isJava: Boolean, syms2isJava: Boolean): Boolean = syms1 match {
- case Nil =>
- syms2.isEmpty
- case sym1 :: rest1 =>
- syms2 match {
- case Nil =>
- false
- case sym2 :: rest2 =>
- val tp1 = sym1.tpe
- val tp2 = sym2.tpe
- (tp1 =:= tp2 ||
- syms1isJava && tp2.typeSymbol == ObjectClass && tp1.typeSymbol == AnyClass ||
- syms2isJava && tp1.typeSymbol == ObjectClass && tp2.typeSymbol == AnyClass) &&
- matchingParams(rest1, rest2, syms1isJava, syms2isJava)
- }
- }
-
- /** like map2, but returns list `xs' itself - instead of a copy - if function
- * `f' maps all elements to themselves.
- */
- def map2Conserve[A <: AnyRef, B](xs: List[A], ys: List[B])(f: (A, B) => A): List[A] =
- if (xs.isEmpty) xs
- else {
- val x1 = f(xs.head, ys.head)
- val xs1 = map2Conserve(xs.tail, ys.tail)(f)
- if ((x1 eq xs.head) && (xs1 eq xs.tail)) xs
- else x1 :: xs1
- }
-
- /** Solve constraint collected in types `tvars'.
- *
- * @param tvars All type variables to be instantiated.
- * @param tparams The type parameters corresponding to `tvars'
- * @param variances The variances of type parameters; need to reverse
- * solution direction for all contravariant variables.
- * @param upper When `true' search for max solution else min.
- */
- def solve(tvars: List[TypeVar], tparams: List[Symbol],
- variances: List[Int], upper: Boolean): Boolean =
- solve(tvars, tparams, variances, upper, AnyDepth)
-
- def solve(tvars: List[TypeVar], tparams: List[Symbol],
- variances: List[Int], upper: Boolean, depth: Int): Boolean = {
- val config = tvars zip (tparams zip variances)
-
- def solveOne(tvar: TypeVar, tparam: Symbol, variance: Int) {
- if (tvar.constr.inst == NoType) {
- val up = if (variance != CONTRAVARIANT) upper else !upper
- tvar.constr.inst = null
- val bound: Type = if (up) tparam.info.bounds.hi else tparam.info.bounds.lo
- //Console.println("solveOne0(tv, tp, v, b)="+(tvar, tparam, variance, bound))
- var cyclic = bound contains tparam
- for ((tvar2, (tparam2, variance2)) <- config) {
- if (tparam2 != tparam &&
- ((bound contains tparam2) ||
- up && (tparam2.info.bounds.lo =:= tparam.tpe) ||
- !up && (tparam2.info.bounds.hi =:= tparam.tpe))) {
- if (tvar2.constr.inst eq null) cyclic = true
- solveOne(tvar2, tparam2, variance2)
- }
- }
- if (!cyclic) {
- if (up) {
- if (bound.typeSymbol != AnyClass)
- tvar addHiBound bound.instantiateTypeParams(tparams, tvars)
- for (tparam2 <- tparams)
- tparam2.info.bounds.lo.dealias match {
- case TypeRef(_, `tparam`, _) =>
- tvar addHiBound tparam2.tpe.instantiateTypeParams(tparams, tvars)
- case _ =>
- }
- } else {
- if (bound.typeSymbol != NothingClass && bound.typeSymbol != tparam) {
- tvar addLoBound bound.instantiateTypeParams(tparams, tvars)
- }
- for (tparam2 <- tparams)
- tparam2.info.bounds.hi.dealias match {
- case TypeRef(_, `tparam`, _) =>
- tvar addLoBound tparam2.tpe.instantiateTypeParams(tparams, tvars)
- case _ =>
- }
- }
- }
- tvar.constr.inst = NoType // necessary because hibounds/lobounds may contain tvar
-
- //println("solving "+tvar+" "+up+" "+(if (up) (tvar.constr.hiBounds) else tvar.constr.loBounds)+((if (up) (tvar.constr.hiBounds) else tvar.constr.loBounds) map (_.widen)))
-
- tvar setInst (
- if (up) {
- if (depth != AnyDepth) glb(tvar.constr.hiBounds, depth) else glb(tvar.constr.hiBounds)
- } else {
- if (depth != AnyDepth) lub(tvar.constr.loBounds, depth) else lub(tvar.constr.loBounds)
- })
-
- //Console.println("solving "+tvar+" "+up+" "+(if (up) (tvar.constr.hiBounds) else tvar.constr.loBounds)+((if (up) (tvar.constr.hiBounds) else tvar.constr.loBounds) map (_.widen))+" = "+tvar.constr.inst)//@MDEBUG
- }
- }
-
- // println("solving "+tvars+"/"+tparams+"/"+(tparams map (_.info)))
- for ((tvar, (tparam, variance)) <- config)
- solveOne(tvar, tparam, variance)
-
- tvars forall (tvar => tvar.constr.isWithinBounds(tvar.constr.inst))
- }
-
- /** Do type arguments `targs' conform to formal parameters
- * `tparams'?
- *
- * @param tparams ...
- * @param targs ...
- * @return ...
- */
- def isWithinBounds(pre: Type, owner: Symbol, tparams: List[Symbol], targs: List[Type]): Boolean = {
- var bounds = instantiatedBounds(pre, owner, tparams, targs)
- if (targs.exists(_.annotations.nonEmpty))
- bounds = adaptBoundsToAnnotations(bounds, tparams, targs)
- (bounds corresponds targs)(_ containsType _)
- }
-
- def instantiatedBounds(pre: Type, owner: Symbol, tparams: List[Symbol], targs: List[Type]): List[TypeBounds] =
- tparams map (_.info.asSeenFrom(pre, owner).instantiateTypeParams(tparams, targs).bounds)
-
- private def findRecursiveBounds(ts: List[Type]): List[(Symbol, Symbol)] = {
- if (ts.isEmpty) Nil
- else {
- val sym = ts.head.typeSymbol
- require(ts.tail forall (_.typeSymbol == sym), ts)
- for (p <- sym.typeParams ; in <- sym.typeParams ; if in.info.bounds contains p) yield
- p -> in
- }
- }
-
-// Lubs and Glbs ---------------------------------------------------------
-
- /** Given a matrix `tsBts` whose columns are basetype sequences (and the symbols `tsParams` that should be interpreted as type parameters in this matrix),
- * compute its least sorted upwards closed upper bound relative to the following ordering <= between lists of types:
- *
- * xs <= ys iff forall y in ys exists x in xs such that x <: y
- *
- *
- * @arg tsParams for each type in the original list of types `ts0`, its list of type parameters (if that type is a type constructor)
- * (these type parameters may be referred to by type arguments in the BTS column of those types,
- * and must be interpreted as bound variables; i.e., under a type lambda that wraps the types that refer to these type params)
- * @arg tsBts a matrix whose columns are basetype sequences
- * the first row is the original list of types for which we're computing the lub
- * (except that type constructors have been applied to their dummyArgs)
- * @See baseTypeSeq for a definition of sorted and upwards closed.
- */
- //
- private def lubList(ts: List[Type], depth: Int): List[Type] = {
- // Matching the type params of one of the initial types means dummies.
- val initialTypeParams = ts map (_.typeParams)
- def isHotForTs(xs: List[Type]) = initialTypeParams contains xs.map(_.typeSymbol)
-
- def elimHigherOrderTypeParam(tp: Type) = tp match {
- case TypeRef(pre, sym, args) if args.nonEmpty && isHotForTs(args) => tp.typeConstructor
- case _ => tp
- }
- var lubListDepth = 0
- def loop(tsBts: List[List[Type]]): List[Type] = {
- lubListDepth += 1
-
- if (tsBts.isEmpty || tsBts.exists(_.isEmpty)) Nil
- else if (tsBts.tail.isEmpty) tsBts.head
- else {
- // ts0 is the 1-dimensional frontier of symbols cutting through 2-dimensional tsBts.
- // Invariant: all symbols "under" (closer to the first row) the frontier
- // are smaller (according to _.isLess) than the ones "on and beyond" the frontier
- val ts0 = tsBts map (_.head)
-
- // Is the frontier made up of types with the same symbol?
- val isUniformFrontier = (ts0: @unchecked) match {
- case t :: ts => ts forall (_.typeSymbol == t.typeSymbol)
- }
-
- // Produce a single type for this frontier by merging the prefixes and arguments of those
- // typerefs that share the same symbol: that symbol is the current maximal symbol for which
- // the invariant holds, i.e., the one that conveys most information wrt subtyping. Before
- // merging, strip targs that refer to bound tparams (when we're computing the lub of type
- // constructors.) Also filter out all types that are a subtype of some other type.
- if (isUniformFrontier) {
- if (settings.debug.value || printLubs) {
- val fbounds = findRecursiveBounds(ts0)
- if (fbounds.nonEmpty) {
- println("Encountered " + fbounds.size + " recursive bounds while lubbing " + ts0.size + " types.")
- for ((p0, p1) <- fbounds) {
- val desc = if (p0 == p1) "its own bounds" else "the bounds of " + p1
-
- println(" " + p0.fullLocationString + " appears in " + desc)
- println(" " + p1 + " " + p1.info.bounds)
- }
- println("")
- }
- }
- val tails = tsBts map (_.tail)
- mergePrefixAndArgs(elimSub(ts0 map elimHigherOrderTypeParam, depth), 1, depth) match {
- case Some(tp) => tp :: loop(tails)
- case _ => loop(tails)
- }
- }
- else {
- // frontier is not uniform yet, move it beyond the current minimal symbol;
- // lather, rinSe, repeat
- val sym = minSym(ts0)
- val newtps = tsBts map (ts => if (ts.head.typeSymbol == sym) ts.tail else ts)
- if (printLubs) {
- val str = (newtps.zipWithIndex map { case (tps, idx) =>
- tps.map(" " + _ + "\n").mkString(" (" + idx + ")\n", "", "\n")
- }).mkString("")
- }
-
- loop(newtps)
- }
- }
- }
-
- loop(ts map (_.baseTypeSeq.toList))
- }
-
- // @AM the following problem is solved by elimHOTparams in lublist
- // @PP lubLists gone bad: lubList(List(
- // List(scala.collection.generic.GenericCompanion[scala.collection.immutable.Seq], ScalaObject, java.lang.Object, Any)
- // List(scala.collection.generic.GenericCompanion[scala.collection.mutable.Seq], ScalaObject, java.lang.Object, Any)
- // )) == (
- // List(scala.collection.generic.GenericCompanion[Seq**[Any]**], ScalaObject, java.lang.Object, Any)
- // )
-
- /** The minimal symbol (wrt Symbol.isLess) of a list of types */
- private def minSym(tps: List[Type]): Symbol =
- (tps.head.typeSymbol /: tps.tail) {
- (sym1, tp2) => if (tp2.typeSymbol isLess sym1) tp2.typeSymbol else sym1
- }
-
- /** A minimal type list which has a given list of types as its base type sequence */
- def spanningTypes(ts: List[Type]): List[Type] = ts match {
- case List() => List()
- case first :: rest =>
- first :: spanningTypes(
- rest filter (t => !first.typeSymbol.isSubClass(t.typeSymbol)))
- }
-
- /** Eliminate from list of types all elements which are a supertype
- * of some other element of the list. */
- private def elimSuper(ts: List[Type]): List[Type] = ts match {
- case List() => List()
- case t :: ts1 =>
- val rest = elimSuper(ts1 filter (t1 => !(t <:< t1)))
- if (rest exists (t1 => t1 <:< t)) rest else t :: rest
- }
- def elimAnonymousClass(t: Type) = t match {
- case TypeRef(pre, clazz, Nil) if clazz.isAnonymousClass =>
- clazz.classBound.asSeenFrom(pre, clazz.owner)
- case _ =>
- t
- }
-
- /** A collector that tests for existential types appearing at given variance in a type */
- class ContainsVariantExistentialCollector(v: Int) extends TypeCollector(false) {
- def traverse(tp: Type) = tp match {
- case ExistentialType(_, _) if (variance == v) => result = true
- case _ => mapOver(tp)
- }
- def init() = {
- variance = 1
- this
- }
- }
-
- val containsCovariantExistentialCollector = new ContainsVariantExistentialCollector(1)
- val containsContravariantExistentialCollector = new ContainsVariantExistentialCollector(-1)
-
- /** Eliminate from list of types all elements which are a subtype
- * of some other element of the list. */
- private def elimSub(ts: List[Type], depth: Int): List[Type] = {
- def elimSub0(ts: List[Type]): List[Type] = ts match {
- case List() => List()
- case t :: ts1 =>
- val rest = elimSub0(ts1 filter (t1 => !isSubType(t1, t, decr(depth))))
- if (rest exists (t1 => isSubType(t, t1, decr(depth)))) rest else t :: rest
- }
- val ts0 = elimSub0(ts)
- if (ts0.isEmpty || ts0.tail.isEmpty) ts0
- else {
- val ts1 = ts0 mapConserve (t => elimAnonymousClass(t.underlying))
- if (ts1 eq ts0) ts0
- else elimSub(ts1, depth)
- }
- }
-
- private def stripExistentialsAndTypeVars(ts: List[Type]): (List[Type], List[Symbol]) = {
- val quantified = ts flatMap {
- case ExistentialType(qs, _) => qs
- case t => List()
- }
- def stripType(tp: Type) = tp match {
- case ExistentialType(_, res) =>
- res
- case TypeVar(_, constr) =>
- if (constr.instValid) constr.inst
- else abort("trying to do lub/glb of typevar "+tp)
- case t => t
- }
- val strippedTypes = ts mapConserve stripType
- (strippedTypes, quantified)
- }
-
- def weakLub(ts: List[Type]) =
- if (ts.nonEmpty && (ts forall isNumericValueType)) (numericLub(ts), true)
- else if (ts.nonEmpty && (ts exists (_.annotations.nonEmpty)))
- (annotationsLub(lub(ts map (_.withoutAnnotations)), ts), true)
- else (lub(ts), false)
-
- def weakGlb(ts: List[Type]) = {
- if (ts.nonEmpty && (ts forall isNumericValueType)) {
- val nglb = numericGlb(ts)
- if (nglb != NoType) (nglb, true)
- else (glb(ts), false)
- } else if (ts.nonEmpty && (ts exists (_.annotations.nonEmpty))) {
- (annotationsGlb(glb(ts map (_.withoutAnnotations)), ts), true)
- } else (glb(ts), false)
- }
-
- def numericLub(ts: List[Type]) =
- ts reduceLeft ((t1, t2) =>
- if (isNumericSubType(t1, t2)) t2
- else if (isNumericSubType(t2, t1)) t1
- else IntClass.tpe)
-
- def numericGlb(ts: List[Type]) =
- ts reduceLeft ((t1, t2) =>
- if (isNumericSubType(t1, t2)) t1
- else if (isNumericSubType(t2, t1)) t2
- else NoType)
-
- def isWeakSubType(tp1: Type, tp2: Type) =
- tp1.deconst.normalize match {
- case TypeRef(_, sym1, _) if isNumericValueClass(sym1) =>
- tp2.deconst.normalize match {
- case TypeRef(_, sym2, _) if isNumericValueClass(sym2) =>
- isNumericSubClass(sym1, sym2)
- case tv2 @ TypeVar(_, _) =>
- tv2.registerBound(tp1, isLowerBound = true, isNumericBound = true)
- case _ =>
- isSubType(tp1, tp2)
- }
- case tv1 @ TypeVar(_, _) =>
- tp2.deconst.normalize match {
- case TypeRef(_, sym2, _) if isNumericValueClass(sym2) =>
- tv1.registerBound(tp2, isLowerBound = false, isNumericBound = true)
- case _ =>
- isSubType(tp1, tp2)
- }
- case _ =>
- isSubType(tp1, tp2)
- }
-
- def isNumericSubType(tp1: Type, tp2: Type) =
- isNumericValueType(tp1) && isNumericValueType(tp2) &&
- isNumericSubClass(tp1.typeSymbol, tp2.typeSymbol)
-
- private val lubResults = new mutable.HashMap[(Int, List[Type]), Type]
- private val glbResults = new mutable.HashMap[(Int, List[Type]), Type]
-
- def lub(ts: List[Type]): Type = try {
- lub(ts, lubDepth(ts))
- } finally {
- lubResults.clear()
- glbResults.clear()
- }
-
- /** The least upper bound wrt <:< of a list of types */
- def lub(ts: List[Type], depth: Int): Type = {
- def lub0(ts0: List[Type]): Type = elimSub(ts0, depth) match {
- case List() => NothingClass.tpe
- case List(t) => t
- case ts @ PolyType(tparams, _) :: _ =>
- val tparams1 = (tparams, matchingBounds(ts, tparams).transpose).zipped map
- ((tparam, bounds) => tparam.cloneSymbol.setInfo(glb(bounds, depth)))
- PolyType(tparams1, lub0(matchingInstTypes(ts, tparams1)))
- case ts @ MethodType(params, _) :: rest =>
- MethodType(params, lub0(matchingRestypes(ts, params map (_.tpe))))
- case ts @ NullaryMethodType(_) :: rest =>
- NullaryMethodType(lub0(matchingRestypes(ts, Nil)))
- case ts @ TypeBounds(_, _) :: rest =>
- TypeBounds(glb(ts map (_.bounds.lo), depth), lub(ts map (_.bounds.hi), depth))
- case ts =>
- lubResults get (depth, ts) match {
- case Some(lubType) =>
- lubType
- case None =>
- lubResults((depth, ts)) = AnyClass.tpe
- val res = if (depth < 0) AnyClass.tpe else lub1(ts)
- lubResults((depth, ts)) = res
- res
- }
- }
- def lub1(ts0: List[Type]): Type = {
- val (ts, tparams) = stripExistentialsAndTypeVars(ts0)
- val lubBaseTypes: List[Type] = lubList(ts, depth)
- val lubParents = spanningTypes(lubBaseTypes)
- val lubOwner = commonOwner(ts)
- val lubBase = intersectionType(lubParents, lubOwner)
- val lubType =
- if (phase.erasedTypes || depth == 0) lubBase
- else {
- val lubRefined = refinedType(lubParents, lubOwner)
- val lubThisType = lubRefined.typeSymbol.thisType
- val narrowts = ts map (_.narrow)
- def lubsym(proto: Symbol): Symbol = {
- val prototp = lubThisType.memberInfo(proto)
- val syms = narrowts map (t =>
- t.nonPrivateMember(proto.name).suchThat(sym =>
- sym.tpe matches prototp.substThis(lubThisType.typeSymbol, t)))
- if (syms contains NoSymbol) NoSymbol
- else {
- val symtypes =
- (narrowts, syms).zipped map ((t, sym) => t.memberInfo(sym).substThis(t.typeSymbol, lubThisType))
- if (proto.isTerm) // possible problem: owner of info is still the old one, instead of new refinement class
- proto.cloneSymbol(lubRefined.typeSymbol).setInfoOwnerAdjusted(lub(symtypes, decr(depth)))
- else if (symtypes.tail forall (symtypes.head =:=))
- proto.cloneSymbol(lubRefined.typeSymbol).setInfoOwnerAdjusted(symtypes.head)
- else {
- def lubBounds(bnds: List[TypeBounds]): TypeBounds =
- TypeBounds(glb(bnds map (_.lo), decr(depth)), lub(bnds map (_.hi), decr(depth)))
- lubRefined.typeSymbol.newAbstractType(proto.pos, proto.name.toTypeName)
- .setInfoOwnerAdjusted(lubBounds(symtypes map (_.bounds)))
- }
- }
- }
- def refines(tp: Type, sym: Symbol): Boolean = {
- val syms = tp.nonPrivateMember(sym.name).alternatives;
- !syms.isEmpty && (syms forall (alt =>
- // todo alt != sym is strictly speaking not correct, but without it we lose
- // efficiency.
- alt != sym && !specializesSym(lubThisType, sym, tp, alt)))
- }
- for (sym <- lubBase.nonPrivateMembers) {
- // add a refinement symbol for all non-class members of lubBase
- // which are refined by every type in ts.
- if (!sym.isClass && !sym.isConstructor && (narrowts forall (t => refines(t, sym))))
- try {
- val lsym = lubsym(sym)
- if (lsym != NoSymbol) addMember(lubThisType, lubRefined, lubsym(sym))
- } catch {
- case ex: NoCommonType =>
- }
- }
- if (lubRefined.decls.isEmpty) lubBase
- else if (!verifyLubs) lubRefined
- else {
- // Verify that every given type conforms to the calculated lub.
- // In theory this should not be necessary, but higher-order type
- // parameters are not handled correctly.
- val ok = ts forall { t =>
- (t <:< lubRefined) || {
- if (settings.debug.value) {
- Console.println(
- "Malformed lub: " + lubRefined + "\n" +
- "Argument " + t + " does not conform. Falling back to " + lubBase
- )
- }
- false
- }
- }
- // If not, fall back on the more conservative calculation.
- if (ok) lubRefined
- else lubBase
- }
- }
- existentialAbstraction(tparams, lubType)
- }
- if (printLubs) {
- println(indent + "lub of " + ts + " at depth "+depth)//debug
- indent = indent + " "
- assert(indent.length <= 100)
- }
- val res = lub0(ts)
- if (printLubs) {
- indent = indent dropRight 2
- println(indent + "lub of " + ts + " is " + res)//debug
- }
- if (ts forall (_.isNotNull)) res.notNull else res
- }
-
- val GlbFailure = new Throwable
-
- /** A global counter for glb calls in the `specializes' query connected to the `addMembers'
- * call in `glb'. There's a possible infinite recursion when `specializes' calls
- * memberType, which calls baseTypeSeq, which calls mergePrefixAndArgs, which calls glb.
- * The counter breaks this recursion after two calls.
- * If the recursion is broken, no member is added to the glb.
- */
- private var globalGlbDepth = 0
- private final val globalGlbLimit = 2
-
- def glb(ts: List[Type]): Type = try {
- glb(ts, lubDepth(ts))
- } finally {
- lubResults.clear()
- glbResults.clear()
- }
-
- /** The greatest lower bound wrt <:< of a list of types */
- private def glb(ts: List[Type], depth: Int): Type = {
- def glb0(ts0: List[Type]): Type = elimSuper(ts0) match {
- case List() => AnyClass.tpe
- case List(t) => t
- case ts @ PolyType(tparams, _) :: _ =>
- val tparams1 = (tparams, matchingBounds(ts, tparams).transpose).zipped map
- ((tparam, bounds) => tparam.cloneSymbol.setInfo(lub(bounds, depth)))
- PolyType(tparams1, glb0(matchingInstTypes(ts, tparams1)))
- case ts @ MethodType(params, _) :: rest =>
- MethodType(params, glb0(matchingRestypes(ts, params map (_.tpe))))
- case ts @ NullaryMethodType(_) :: rest =>
- NullaryMethodType(glb0(matchingRestypes(ts, Nil)))
- case ts @ TypeBounds(_, _) :: rest =>
- TypeBounds(lub(ts map (_.bounds.lo), depth), glb(ts map (_.bounds.hi), depth))
- case ts =>
- glbResults get (depth, ts) match {
- case Some(glbType) =>
- glbType
- case _ =>
- glbResults((depth, ts)) = NothingClass.tpe
- val res = if (depth < 0) NothingClass.tpe else glb1(ts)
- glbResults((depth, ts)) = res
- res
- }
- }
- def glb1(ts0: List[Type]): Type = {
- try {
- val (ts, tparams) = stripExistentialsAndTypeVars(ts0)
- val glbOwner = commonOwner(ts)
- def refinedToParents(t: Type): List[Type] = t match {
- case RefinedType(ps, _) => ps flatMap refinedToParents
- case _ => List(t)
- }
- def refinedToDecls(t: Type): List[Scope] = t match {
- case RefinedType(ps, decls) =>
- val dss = ps flatMap refinedToDecls
- if (decls.isEmpty) dss else decls :: dss
- case _ => List()
- }
- val ts1 = ts flatMap refinedToParents
- val glbBase = intersectionType(ts1, glbOwner)
- val glbType =
- if (phase.erasedTypes || depth == 0) glbBase
- else {
- val glbRefined = refinedType(ts1, glbOwner)
- val glbThisType = glbRefined.typeSymbol.thisType
- def glbsym(proto: Symbol): Symbol = {
- val prototp = glbThisType.memberInfo(proto)
- val syms = for (t <- ts;
- alt <- (t.nonPrivateMember(proto.name).alternatives);
- if glbThisType.memberInfo(alt) matches prototp
- ) yield alt
- val symtypes = syms map glbThisType.memberInfo
- assert(!symtypes.isEmpty)
- proto.cloneSymbol(glbRefined.typeSymbol).setInfoOwnerAdjusted(
- if (proto.isTerm) glb(symtypes, decr(depth))
- else {
- def isTypeBound(tp: Type) = tp match {
- case TypeBounds(_, _) => true
- case _ => false
- }
- def glbBounds(bnds: List[Type]): TypeBounds = {
- val lo = lub(bnds map (_.bounds.lo), decr(depth))
- val hi = glb(bnds map (_.bounds.hi), decr(depth))
- if (lo <:< hi) TypeBounds(lo, hi)
- else throw GlbFailure
- }
- val symbounds = symtypes filter isTypeBound
- var result: Type =
- if (symbounds.isEmpty)
- TypeBounds.empty
- else glbBounds(symbounds)
- for (t <- symtypes if !isTypeBound(t))
- if (result.bounds containsType t) result = t
- else throw GlbFailure
- result
- })
- }
- if (globalGlbDepth < globalGlbLimit)
- try {
- globalGlbDepth += 1
- val dss = ts flatMap refinedToDecls
- for (ds <- dss; val sym <- ds.iterator)
- if (globalGlbDepth < globalGlbLimit && !(glbThisType specializes sym))
- try {
- addMember(glbThisType, glbRefined, glbsym(sym))
- } catch {
- case ex: NoCommonType =>
- }
- } finally {
- globalGlbDepth -= 1
- }
- if (glbRefined.decls.isEmpty) glbBase else glbRefined
- }
- existentialAbstraction(tparams, glbType)
- } catch {
- case GlbFailure =>
- if (ts forall (t => NullClass.tpe <:< t)) NullClass.tpe
- else NothingClass.tpe
- }
- }
- // if (settings.debug.value) { println(indent + "glb of " + ts + " at depth "+depth); indent = indent + " " } //DEBUG
-
- val res = glb0(ts)
-
- // if (settings.debug.value) { indent = indent.substring(0, indent.length() - 2); log(indent + "glb of " + ts + " is " + res) }//DEBUG
-
- if (ts exists (_.isNotNull)) res.notNull else res
- }
-
- /** The most deeply nested owner that contains all the symbols
- * of thistype or prefixless typerefs/singletype occurrences in given type.
- */
- private def commonOwner(t: Type): Symbol = {
- commonOwnerMap.init
- commonOwnerMap.apply(t)
- commonOwnerMap.result
- }
-
- /** The most deeply nested owner that contains all the symbols
- * of thistype or prefixless typerefs/singletype occurrences in given list
- * of types.
- */
- private def commonOwner(tps: List[Type]): Symbol = {
- // if (settings.debug.value) log("computing common owner of types " + tps)//DEBUG
- commonOwnerMap.init
- tps foreach { tp => commonOwnerMap.apply(tp); () }
- commonOwnerMap.result
- }
-
- /** Compute lub (if variance == 1) or glb (if variance == -1) of given list
- * of types `tps'. All types in `tps' are typerefs or singletypes
- * with the same symbol.
- * Return `Some(x)' if the computation succeeds with result `x'.
- * Return `None' if the computation fails.
- */
- def mergePrefixAndArgs(tps: List[Type], variance: Int, depth: Int): Option[Type] = tps match {
- case List(tp) =>
- Some(tp)
- case TypeRef(_, sym, _) :: rest =>
- val pres = tps map (_.prefix) // prefix normalizes automatically
- val pre = if (variance == 1) lub(pres, depth) else glb(pres, depth)
- val argss = tps map (_.normalize.typeArgs) // symbol equality (of the tp in tps) was checked using typeSymbol, which normalizes, so should normalize before retrieving arguments
- val capturedParams = new ListBuffer[Symbol]
- try {
- if (sym == ArrayClass && phase.erasedTypes) {
- // special treatment for lubs of array types after erasure:
- // if argss contain one value type and some other type, the lub is Object
- // if argss contain several reference types, the lub is an array over lub of argtypes
- if (argss exists (_.isEmpty)) {
- None // something is wrong: an array without a type arg.
- } else {
- val args = argss map (_.head)
- if (args.tail forall (_ =:= args.head)) Some(typeRef(pre, sym, List(args.head)))
- else if (args exists (arg => isValueClass(arg.typeSymbol))) Some(ObjectClass.tpe)
- else Some(typeRef(pre, sym, List(lub(args))))
- }
- } else {
- val args = (sym.typeParams, argss.transpose).zipped map { (tparam, as) =>
- if (depth == 0)
- if (tparam.variance == variance) AnyClass.tpe
- else if (tparam.variance == -variance) NothingClass.tpe
- else NoType
- else {
- if (tparam.variance == variance) lub(as, decr(depth))
- else if (tparam.variance == -variance) glb(as, decr(depth))
- else {
- val l = lub(as, decr(depth))
- val g = glb(as, decr(depth))
- if (l <:< g) l
- else { // Martin: I removed this, because incomplete. Not sure there is a good way to fix it. For the moment we
- // just err on the conservative side, i.e. with a bound that is too high.
- // if(!(tparam.info.bounds contains tparam)){ //@M can't deal with f-bounds, see #2251
-
- val qvar = commonOwner(as) freshExistential "" setInfo TypeBounds(g, l)
- capturedParams += qvar
- qvar.tpe
- }
- }
- }
- }
- if (args contains NoType) None
- else Some(existentialAbstraction(capturedParams.toList, typeRef(pre, sym, args)))
- }
- } catch {
- case ex: MalformedType => None
- case ex: IndexOutOfBoundsException => // transpose freaked out because of irregular argss
- // catching just in case (shouldn't happen, but also doesn't cost us)
- if (settings.debug.value) log("transposed irregular matrix!?"+ (tps, argss))
- None
- }
- case SingleType(_, sym) :: rest =>
- val pres = tps map (_.prefix)
- val pre = if (variance == 1) lub(pres, depth) else glb(pres, depth)
- try {
- Some(singleType(pre, sym))
- } catch {
- case ex: MalformedType => None
- }
- case ExistentialType(tparams, quantified) :: rest =>
- mergePrefixAndArgs(quantified :: rest, variance, depth) map (existentialAbstraction(tparams, _))
- case _ =>
- assert(false, tps); None
- }
-
- /** Make symbol `sym' a member of scope `tp.decls'
- * where `thistp' is the narrowed owner type of the scope.
- */
- def addMember(thistp: Type, tp: Type, sym: Symbol) {
- assert(sym != NoSymbol)
- // if (settings.debug.value) log("add member " + sym+":"+sym.info+" to "+thistp) //DEBUG
- if (!(thistp specializes sym)) {
- if (sym.isTerm)
- for (alt <- tp.nonPrivateDecl(sym.name).alternatives)
- if (specializesSym(thistp, sym, thistp, alt))
- tp.decls unlink alt;
- tp.decls enter sym
- }
- }
-
- /** All types in list must be polytypes with type parameter lists of
- * same length as tparams.
- * Returns list of list of bounds infos, where corresponding type
- * parameters are renamed to tparams.
- */
- private def matchingBounds(tps: List[Type], tparams: List[Symbol]): List[List[Type]] = {
- def getBounds(tp: Type): List[Type] = tp match {
- case PolyType(tparams1, _) if sameLength(tparams1, tparams) =>
- tparams1 map (tparam => tparam.info.substSym(tparams1, tparams))
- case tp =>
- if (tp ne tp.normalize) getBounds(tp.normalize)
- else throw new NoCommonType(tps)
- }
- tps map getBounds
- }
-
- /** All types in list must be polytypes with type parameter lists of
- * same length as tparams.
- * Returns list of instance types, where corresponding type
- * parameters are renamed to tparams.
- */
- private def matchingInstTypes(tps: List[Type], tparams: List[Symbol]): List[Type] = {
- def transformResultType(tp: Type): Type = tp match {
- case PolyType(tparams1, restpe) if sameLength(tparams1, tparams) =>
- restpe.substSym(tparams1, tparams)
- case tp =>
- if (tp ne tp.normalize) transformResultType(tp.normalize)
- else throw new NoCommonType(tps)
- }
- tps map transformResultType
- }
-
- /** All types in list must be method types with equal parameter types.
- * Returns list of their result types.
- */
- private def matchingRestypes(tps: List[Type], pts: List[Type]): List[Type] =
- tps map {
- case MethodType(params1, res) if (isSameTypes(params1 map (_.tpe), pts)) =>
- res
- case NullaryMethodType(res) if pts isEmpty =>
- res
- case _ =>
- throw new NoCommonType(tps)
- }
-
-
- // TODO: this desperately needs to be cleaned up
- // plan: split into kind inference and subkinding
- // every Type has a (cached) Kind
- def kindsConform(tparams: List[Symbol], targs: List[Type], pre: Type, owner: Symbol): Boolean =
- checkKindBounds0(tparams, targs, pre, owner, false).isEmpty
-
- /** Check well-kindedness of type application (assumes arities are already checked) -- @M
- *
- * This check is also performed when abstract type members become concrete (aka a "type alias") -- then tparams.length==1
- * (checked one type member at a time -- in that case, prefix is the name of the type alias)
- *
- * Type application is just like value application: it's "contravariant" in the sense that
- * the type parameters of the supplied type arguments must conform to the type parameters of
- * the required type parameters:
- * - their bounds must be less strict
- * - variances must match (here, variances are absolute, the variance of a type parameter does not influence the variance of its higher-order parameters)
- * - @M TODO: are these conditions correct,sufficient&necessary?
- *
- * e.g. class Iterable[t, m[+x <: t]] --> the application Iterable[Int, List] is okay, since
- * List's type parameter is also covariant and its bounds are weaker than <: Int
- */
- def checkKindBounds0(tparams: List[Symbol], targs: List[Type], pre: Type, owner: Symbol, explainErrors: Boolean): List[(Type, Symbol, List[(Symbol, Symbol)], List[(Symbol, Symbol)], List[(Symbol, Symbol)])] = {
- var error = false
-
- def transform(tp: Type, clazz: Symbol): Type = tp.asSeenFrom(pre, clazz) // instantiate type params that come from outside the abstract type we're currently checking
- def transformedBounds(p: Symbol, o: Symbol) = transform(p.info.instantiateTypeParams(tparams, targs).bounds, o)
-
- /** Check whether <arg>sym1</arg>'s variance conforms to <arg>sym2</arg>'s variance
- *
- * If <arg>sym2</arg> is invariant, <arg>sym1</arg>'s variance is irrelevant. Otherwise they must be equal.
- */
- def variancesMatch(sym1: Symbol, sym2: Symbol): Boolean = (sym2.variance==0 || sym1.variance==sym2.variance)
-
- // check that the type parameters <arg>hkargs</arg> to a higher-kinded type conform to the expected params <arg>hkparams</arg>
- def checkKindBoundsHK(
- hkargs: List[Symbol],
- arg: Symbol,
- param: Symbol,
- paramowner: Symbol,
- underHKParams: List[Symbol],
- withHKArgs: List[Symbol]
- ): (List[(Symbol, Symbol)], List[(Symbol, Symbol)], List[(Symbol, Symbol)]) = {
-
- def bindHKParams(tp: Type) = tp.substSym(underHKParams, withHKArgs)
- // @M sometimes hkargs != arg.typeParams, the symbol and the type may have very different type parameters
- val hkparams = param.typeParams
-
- if (settings.debug.value) {
- log("checkKindBoundsHK expected: "+ param +" with params "+ hkparams +" by definition in "+ paramowner)
- log("checkKindBoundsHK supplied: "+ arg +" with params "+ hkargs +" from "+ owner)
- log("checkKindBoundsHK under params: "+ underHKParams +" with args "+ withHKArgs)
- }
-
- if (!sameLength(hkargs, hkparams)) {
- if (arg == AnyClass || arg == NothingClass) (Nil, Nil, Nil) // Any and Nothing are kind-overloaded
- else {error = true; (List((arg, param)), Nil, Nil) } // shortcut: always set error, whether explainTypesOrNot
- }
- else {
- val _arityMismatches = if (explainErrors) new ListBuffer[(Symbol, Symbol)] else null
- val _varianceMismatches = if (explainErrors) new ListBuffer[(Symbol, Symbol)] else null
- val _stricterBounds = if (explainErrors) new ListBuffer[(Symbol, Symbol)] else null
-
- def varianceMismatch(a: Symbol, p: Symbol) { if(explainErrors) _varianceMismatches += ((a, p)) else error = true}
- def stricterBound(a: Symbol, p: Symbol) { if(explainErrors) _stricterBounds += ((a, p)) else error = true }
- def arityMismatches(as: Iterable[(Symbol, Symbol)]) { if(explainErrors) _arityMismatches ++= as }
- def varianceMismatches(as: Iterable[(Symbol, Symbol)]) { if(explainErrors) _varianceMismatches ++= as }
- def stricterBounds(as: Iterable[(Symbol, Symbol)]) { if(explainErrors) _stricterBounds ++= as }
-
- for ((hkarg, hkparam) <- hkargs zip hkparams) {
- if (hkparam.typeParams.isEmpty && hkarg.typeParams.isEmpty) { // base-case: kind *
- if (!variancesMatch(hkarg, hkparam))
- varianceMismatch(hkarg, hkparam)
-
- // instantiateTypeParams(tparams, targs) --> higher-order bounds may contain references to type arguments
- // substSym(hkparams, hkargs) --> these types are going to be compared as types of kind *
- // --> their arguments use different symbols, but are conceptually the same
- // (could also replace the types by polytypes, but can't just strip the symbols, as ordering is lost then)
- val declaredBounds = transformedBounds(hkparam, paramowner)
- val declaredBoundsInst = bindHKParams(declaredBounds)
- val argumentBounds = transform(hkarg.info.bounds, owner)
- if (!(declaredBoundsInst <:< argumentBounds))
- stricterBound(hkarg, hkparam)
-
- if (settings.debug.value) log(
- "checkKindBoundsHK base case: " + hkparam +
- " declared bounds: " + declaredBounds +
- " after instantiating earlier hkparams: " + declaredBoundsInst + "\n" +
- "checkKindBoundsHK base case: "+ hkarg +
- " has bounds: " + argumentBounds
- )
- }
- else {
- if (settings.debug.value)
- log("checkKindBoundsHK recursing to compare params of "+ hkparam +" with "+ hkarg)
- val (am, vm, sb) = checkKindBoundsHK(
- hkarg.typeParams,
- hkarg,
- hkparam,
- paramowner,
- underHKParams ++ hkparam.typeParams,
- withHKArgs ++ hkarg.typeParams
- )
- arityMismatches(am)
- varianceMismatches(vm)
- stricterBounds(sb)
- }
- if (!explainErrors && error) return (Nil, Nil, Nil) // stop as soon as we encountered an error
- }
- if (!explainErrors) (Nil, Nil, Nil)
- else (_arityMismatches.toList, _varianceMismatches.toList, _stricterBounds.toList)
- }
- }
-
- val errors = new ListBuffer[(Type, Symbol, List[(Symbol, Symbol)], List[(Symbol, Symbol)], List[(Symbol, Symbol)])]
- if (tparams.nonEmpty || targs.nonEmpty)
- log("checkKindBounds0(" + tparams + ", " + targs + ", " + pre + ", " + owner + ", " + explainErrors + ")")
-
- for {
- (tparam, targ) <- tparams zip targs
- // Prevent WildcardType from causing kind errors, as typevars may be higher-order
- if (targ != WildcardType) && (targ.isHigherKinded || tparam.typeParams.nonEmpty)
- } {
- // @M must use the typeParams of the *type* targ, not of the *symbol* of targ!!
- targ.typeSymbolDirect.info // force symbol load for #4205
- val tparamsHO = targ.typeParams
-
- val (arityMismatches, varianceMismatches, stricterBounds) = (
- // NOTE: *not* targ.typeSymbol, which normalizes
- checkKindBoundsHK(tparamsHO, targ.typeSymbolDirect, tparam, tparam.owner, tparam.typeParams, tparamsHO)
- )
- if (explainErrors) {
- if (arityMismatches.nonEmpty || varianceMismatches.nonEmpty || stricterBounds.nonEmpty) {
- errors += ((targ, tparam, arityMismatches, varianceMismatches, stricterBounds))
- }
- }
- else if (error)
- return List((NoType, NoSymbol, Nil, Nil, Nil))
- }
-
- errors.toList
- }
-
-// Errors and Diagnostics -----------------------------------------------------
-
- /** A throwable signalling a type error */
- class TypeError(var pos: Position, val msg: String) extends Throwable(msg) {
- def this(msg: String) = this(NoPosition, msg)
- }
-
- class NoCommonType(tps: List[Type]) extends Throwable(
- "lub/glb of incompatible types: " + tps.mkString("", " and ", "")) with ControlThrowable
-
- /** A throwable signalling a malformed type */
- class MalformedType(msg: String) extends TypeError(msg) {
- def this(pre: Type, tp: String) = this("malformed type: " + pre + "#" + tp)
- }
-
- /** An exception signalling a variance annotation/usage conflict */
- class VarianceError(msg: String) extends TypeError(msg)
-
- /** The current indentation string for traces */
- private var indent: String = ""
-
- /** Perform operation `p' on arguments `tp1',
- * `arg2' and print trace of computation.
- */
- private def explain[T](op: String, p: (Type, T) => Boolean, tp1: Type, arg2: T): Boolean = {
- Console.println(indent + tp1 + " " + op + " " + arg2 + "?" /* + "("+tp1.getClass+","+arg2.asInstanceOf[AnyRef].getClass+")"*/)
- indent = indent + " "
- val result = p(tp1, arg2)
- indent = indent dropRight 2
- Console.println(indent + result)
- result
- }
-
- /** If option `explaintypes' is set, print a subtype trace for
- * `found <:< required'.
- */
- def explainTypes(found: Type, required: Type) {
- if (settings.explaintypes.value) withTypesExplained(found <:< required)
- }
-
- /** If option `explaintypes' is set, print a subtype trace for
- * `op(found, required)'.
- */
- def explainTypes(op: (Type, Type) => Any, found: Type, required: Type) {
- if (settings.explaintypes.value) withTypesExplained(op(found, required))
- }
-
- /** Execute `op' while printing a trace of the operations on types executed.
- */
- def withTypesExplained[A](op: => A): A = {
- val s = explainSwitch
- try { explainSwitch = true; op } finally { explainSwitch = s }
- }
-
- def objToAny(tp: Type): Type =
- if (!phase.erasedTypes && tp.typeSymbol == ObjectClass) AnyClass.tpe
- else tp
-
- val shorthands = Set(
- "scala.collection.immutable.List",
- "scala.collection.immutable.Nil",
- "scala.collection.Seq",
- "scala.collection.Traversable",
- "scala.collection.Iterable",
- "scala.collection.mutable.StringBuilder",
- "scala.collection.IndexedSeq",
- "scala.collection.Iterator")
-}
diff --git a/src/compiler/scala/tools/nsc/symtab/classfile/AbstractFileReader.scala b/src/compiler/scala/tools/nsc/symtab/classfile/AbstractFileReader.scala
index a75a03b..427b5bf 100644
--- a/src/compiler/scala/tools/nsc/symtab/classfile/AbstractFileReader.scala
+++ b/src/compiler/scala/tools/nsc/symtab/classfile/AbstractFileReader.scala
@@ -1,5 +1,5 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
+ * Copyright 2005-2013 LAMP/EPFL
* @author Martin Odersky
*/
diff --git a/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileConstants.scala b/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileConstants.scala
deleted file mode 100644
index 6a62258..0000000
--- a/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileConstants.scala
+++ /dev/null
@@ -1,333 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
- * @author Martin Odersky
- */
-
-package scala.tools.nsc
-package symtab
-package classfile
-
-object ClassfileConstants {
-
- final val JAVA_MAGIC = 0xCAFEBABE
- final val JAVA_MAJOR_VERSION = 45
- final val JAVA_MINOR_VERSION = 3
-
- /** <p>
- * (see http://java.sun.com/docs/books/jvms/second_edition/jvms-clarify.html)
- * </p>
- * <p>
- * If the <code>ACC_INTERFACE</code> flag is set, the <code>ACC_ABSTRACT</code>
- * flag must also be set (ch. 2.13.1).
- * </p>
- * <p>
- * A class file cannot have both its <code>ACC_FINAL</code> and
- * <code>ACC_ABSTRACT</code> flags set (ch. 2.8.2).
- * </p>
- * <p>
- * A field may have at most one of its <code>ACC_PRIVATE</code>,
- * <code>ACC_PROTECTED</code>, <code>ACC_PUBLIC</code> flags set (ch. 2.7.4).
- * </p>
- * <p>
- * A field may not have both its <code>ACC_FINAL</code> and
- * <code>ACC_VOLATILE</code> flags set (ch. 2.9.1).
- * </p>
- * <p>
- * If a method has its <code>ACC_ABSTRACT</code> flag set it must not
- * have any of its <code>ACC_FINAL</code>, <code>ACC_NATIVE</code>,
- * <code>ACC_PRIVATE</code>, <code>ACC_STATIC</code>, <code>ACC_STRICT</code>,
- * or <code>ACC_SYNCHRONIZED</code> flags set (ch. 2.13.3.2).
- * </p>
- * <p>
- * All interface methods must have their <code>ACC_ABSTRACT</code> and
- * <code>ACC_PUBLIC</code> flags set.
- * </p>
- */ // Class Field Method
- final val JAVA_ACC_PUBLIC = 0x0001 // X X X
- final val JAVA_ACC_PRIVATE = 0x0002 // X X
- final val JAVA_ACC_PROTECTED = 0x0004 // X X
- final val JAVA_ACC_STATIC = 0x0008 // X X
- final val JAVA_ACC_FINAL = 0x0010 // X X X
- final val JAVA_ACC_SUPER = 0x0020 // X
- final val JAVA_ACC_SYNCHRONIZED = 0x0020 // X
- final val JAVA_ACC_VOLATILE = 0x0040 // X
- final val JAVA_ACC_BRIDGE = 0x0040 // X
- final val JAVA_ACC_TRANSIENT = 0x0080 // X
- final val JAVA_ACC_VARARGS = 0x0080 // X
- final val JAVA_ACC_NATIVE = 0x0100 // X
- final val JAVA_ACC_INTERFACE = 0x0200 // X
- final val JAVA_ACC_ABSTRACT = 0x0400 // X X
- final val JAVA_ACC_STRICT = 0x0800 // X
- final val JAVA_ACC_SYNTHETIC = 0x1000 // X X X
- final val JAVA_ACC_ANNOTATION = 0x2000 // X
- final val JAVA_ACC_ENUM = 0x4000 // X X
-
- // tags describing the type of a literal in the constant pool
- final val CONSTANT_UTF8 = 1
- final val CONSTANT_UNICODE = 2
- final val CONSTANT_INTEGER = 3
- final val CONSTANT_FLOAT = 4
- final val CONSTANT_LONG = 5
- final val CONSTANT_DOUBLE = 6
- final val CONSTANT_CLASS = 7
- final val CONSTANT_STRING = 8
- final val CONSTANT_FIELDREF = 9
- final val CONSTANT_METHODREF = 10
- final val CONSTANT_INTFMETHODREF = 11
- final val CONSTANT_NAMEANDTYPE = 12
-
- // tags describing the type of a literal in attribute values
- final val BYTE_TAG = 'B'
- final val CHAR_TAG = 'C'
- final val DOUBLE_TAG = 'D'
- final val FLOAT_TAG = 'F'
- final val INT_TAG = 'I'
- final val LONG_TAG = 'J'
- final val SHORT_TAG = 'S'
- final val BOOL_TAG = 'Z'
- final val STRING_TAG = 's'
- final val ENUM_TAG = 'e'
- final val CLASS_TAG = 'c'
- final val ARRAY_TAG = '['
- final val VOID_TAG = 'V'
- final val TVAR_TAG = 'T'
- final val ANNOTATION_TAG = '@'
- final val SCALA_NOTHING = "scala.runtime.Nothing$"
- final val SCALA_NULL = "scala.runtime.Null$"
-
-
- // tags describing the type of newarray
- final val T_BOOLEAN = 4
- final val T_CHAR = 5
- final val T_FLOAT = 6
- final val T_DOUBLE = 7
- final val T_BYTE = 8
- final val T_SHORT = 9
- final val T_INT = 10
- final val T_LONG = 11
-
- // JVM mnemonics
- final val nop = 0x00
- final val aconst_null = 0x01
- final val iconst_m1 = 0x02
-
- final val iconst_0 = 0x03
- final val iconst_1 = 0x04
- final val iconst_2 = 0x05
- final val iconst_3 = 0x06
- final val iconst_4 = 0x07
- final val iconst_5 = 0x08
-
- final val lconst_0 = 0x09
- final val lconst_1 = 0x0a
- final val fconst_0 = 0x0b
- final val fconst_1 = 0x0c
- final val fconst_2 = 0x0d
- final val dconst_0 = 0x0e
- final val dconst_1 = 0x0f
-
- final val bipush = 0x10
- final val sipush = 0x11
- final val ldc = 0x12
- final val ldc_w = 0x13
- final val ldc2_w = 0x14
-
- final val iload = 0x15
- final val lload = 0x16
- final val fload = 0x17
- final val dload = 0x18
- final val aload = 0x19
-
- final val iload_0 = 0x1a
- final val iload_1 = 0x1b
- final val iload_2 = 0x1c
- final val iload_3 = 0x1d
- final val lload_0 = 0x1e
- final val lload_1 = 0x1f
- final val lload_2 = 0x20
- final val lload_3 = 0x21
- final val fload_0 = 0x22
- final val fload_1 = 0x23
- final val fload_2 = 0x24
- final val fload_3 = 0x25
- final val dload_0 = 0x26
- final val dload_1 = 0x27
- final val dload_2 = 0x28
- final val dload_3 = 0x29
- final val aload_0 = 0x2a
- final val aload_1 = 0x2b
- final val aload_2 = 0x2c
- final val aload_3 = 0x2d
- final val iaload = 0x2e
- final val laload = 0x2f
- final val faload = 0x30
- final val daload = 0x31
- final val aaload = 0x32
- final val baload = 0x33
- final val caload = 0x34
- final val saload = 0x35
-
- final val istore = 0x36
- final val lstore = 0x37
- final val fstore = 0x38
- final val dstore = 0x39
- final val astore = 0x3a
- final val istore_0 = 0x3b
- final val istore_1 = 0x3c
- final val istore_2 = 0x3d
- final val istore_3 = 0x3e
- final val lstore_0 = 0x3f
- final val lstore_1 = 0x40
- final val lstore_2 = 0x41
- final val lstore_3 = 0x42
- final val fstore_0 = 0x43
- final val fstore_1 = 0x44
- final val fstore_2 = 0x45
- final val fstore_3 = 0x46
- final val dstore_0 = 0x47
- final val dstore_1 = 0x48
- final val dstore_2 = 0x49
- final val dstore_3 = 0x4a
- final val astore_0 = 0x4b
- final val astore_1 = 0x4c
- final val astore_2 = 0x4d
- final val astore_3 = 0x4e
- final val iastore = 0x4f
- final val lastore = 0x50
- final val fastore = 0x51
- final val dastore = 0x52
- final val aastore = 0x53
- final val bastore = 0x54
- final val castore = 0x55
- final val sastore = 0x56
-
- final val pop = 0x57
- final val pop2 = 0x58
- final val dup = 0x59
- final val dup_x1 = 0x5a
- final val dup_x2 = 0x5b
- final val dup2 = 0x5c
- final val dup2_x1 = 0x5d
- final val dup2_x2 = 0x5e
- final val swap = 0x5f
-
- final val iadd = 0x60
- final val ladd = 0x61
- final val fadd = 0x62
- final val dadd = 0x63
- final val isub = 0x64
- final val lsub = 0x65
- final val fsub = 0x66
- final val dsub = 0x67
- final val imul = 0x68
- final val lmul = 0x69
- final val fmul = 0x6a
- final val dmul = 0x6b
- final val idiv = 0x6c
- final val ldiv = 0x6d
- final val fdiv = 0x6e
- final val ddiv = 0x6f
- final val irem = 0x70
- final val lrem = 0x71
- final val frem = 0x72
- final val drem = 0x73
-
- final val ineg = 0x74
- final val lneg = 0x75
- final val fneg = 0x76
- final val dneg = 0x77
-
- final val ishl = 0x78
- final val lshl = 0x79
- final val ishr = 0x7a
- final val lshr = 0x7b
- final val iushr = 0x7c
- final val lushr = 0x7d
- final val iand = 0x7e
- final val land = 0x7f
- final val ior = 0x80
- final val lor = 0x81
- final val ixor = 0x82
- final val lxor = 0x83
- final val iinc = 0x84
-
- final val i2l = 0x85
- final val i2f = 0x86
- final val i2d = 0x87
- final val l2i = 0x88
- final val l2f = 0x89
- final val l2d = 0x8a
- final val f2i = 0x8b
- final val f2l = 0x8c
- final val f2d = 0x8d
- final val d2i = 0x8e
- final val d2l = 0x8f
- final val d2f = 0x90
- final val i2b = 0x91
- final val i2c = 0x92
- final val i2s = 0x93
-
- final val lcmp = 0x94
- final val fcmpl = 0x95
- final val fcmpg = 0x96
- final val dcmpl = 0x97
- final val dcmpg = 0x98
-
- final val ifeq = 0x99
- final val ifne = 0x9a
- final val iflt = 0x9b
- final val ifge = 0x9c
- final val ifgt = 0x9d
- final val ifle = 0x9e
- final val if_icmpeq = 0x9f
- final val if_icmpne = 0xa0
- final val if_icmplt = 0xa1
- final val if_icmpge = 0xa2
- final val if_icmpgt = 0xa3
- final val if_icmple = 0xa4
- final val if_acmpeq = 0xa5
- final val if_acmpne = 0xa6
- final val goto = 0xa7
- final val jsr = 0xa8
- final val ret = 0xa9
- final val tableswitch = 0xaa
- final val lookupswitch = 0xab
- final val ireturn = 0xac
- final val lreturn = 0xad
- final val freturn = 0xae
- final val dreturn = 0xaf
- final val areturn = 0xb0
- final val return_ = 0xb1
-
- final val getstatic = 0xb2
- final val putstatic = 0xb3
- final val getfield = 0xb4
- final val putfield = 0xb5
-
- final val invokevirtual = 0xb6
- final val invokespecial = 0xb7
- final val invokestatic = 0xb8
- final val invokeinterface = 0xb9
- final val xxxunusedxxxx = 0xba
-
- final val new_ = 0xbb
- final val newarray = 0xbc
- final val anewarray = 0xbd
- final val arraylength = 0xbe
- final val athrow = 0xbf
- final val checkcast = 0xc0
- final val instanceof = 0xc1
- final val monitorenter = 0xc2
- final val monitorexit = 0xc3
- final val wide = 0xc4
- final val multianewarray = 0xc5
- final val ifnull = 0xc6
- final val ifnonnull = 0xc7
- final val goto_w = 0xc8
- final val jsr_w = 0xc9
-
- // reserved opcodes
- final val breakpoint = 0xca
- final val impdep1 = 0xfe
- final val impdep2 = 0xff
-}
diff --git a/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala b/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala
index 36112e5..2955986 100644
--- a/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala
+++ b/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala
@@ -1,5 +1,5 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
+ * Copyright 2005-2013 LAMP/EPFL
* @author Martin Odersky
*/
@@ -9,12 +9,11 @@ package classfile
import java.io.{ File, IOException }
import java.lang.Integer.toHexString
-
import scala.collection.{ mutable, immutable }
import scala.collection.mutable.{ ListBuffer, ArrayBuffer }
-import scala.tools.nsc.io.AbstractFile
import scala.annotation.switch
-import reflect.generic.PickleBuffer
+import scala.reflect.internal.pickling.{PickleBuffer, ByteCodecs}
+import scala.tools.nsc.io.AbstractFile
/** This abstract class implements a class file parser.
*
@@ -24,90 +23,83 @@ import reflect.generic.PickleBuffer
abstract class ClassfileParser {
val global: Global
import global._
-
- import ClassfileConstants._
+ import definitions.{ AnnotationClass, ClassfileAnnotationClass }
+ import scala.reflect.internal.ClassfileConstants._
import Flags._
protected var in: AbstractFileReader = _ // the class file reader
protected var clazz: Symbol = _ // the class symbol containing dynamic members
protected var staticModule: Symbol = _ // the module symbol containing static members
- protected var instanceDefs: Scope = _ // the scope of all instance definitions
- protected var staticDefs: Scope = _ // the scope of all static definitions
+ protected var instanceScope: Scope = _ // the scope of all instance definitions
+ protected var staticScope: Scope = _ // the scope of all static definitions
protected var pool: ConstantPool = _ // the classfile's constant pool
protected var isScala: Boolean = _ // does class file describe a scala class?
protected var isScalaAnnot: Boolean = _ // does class file describe a scala class with its pickled info in an annotation?
protected var isScalaRaw: Boolean = _ // this class file is a scala class with no pickled info
- protected var hasMeta: Boolean = _ // does class file contain jaco meta attribute?s
protected var busy: Option[Symbol] = None // lock to detect recursive reads
- private var externalName: Name = _ // JVM name of the current class
+ protected var currentClass: Name = _ // JVM name of the current class
protected var classTParams = Map[Name,Symbol]()
protected var srcfile0 : Option[AbstractFile] = None
+ protected def moduleClass: Symbol = staticModule.moduleClass
def srcfile = srcfile0
- private object metaParser extends MetaParser {
+ private object unpickler extends scala.reflect.internal.pickling.UnPickler {
val global: ClassfileParser.this.global.type = ClassfileParser.this.global
}
- private object unpickler extends UnPickler {
- val global: ClassfileParser.this.global.type = ClassfileParser.this.global
+ private def handleMissing(e: MissingRequirementError) = {
+ if (settings.debug.value) e.printStackTrace
+ throw new IOException(s"Missing dependency '${e.req}', required by ${in.file}")
+ }
+ private def handleError(e: Exception) = {
+ if (settings.debug.value) e.printStackTrace()
+ throw new IOException(s"class file '${in.file}' is broken\n(${e.getClass}/${e.getMessage})")
+ }
+ private def mismatchError(c: Symbol) = {
+ throw new IOException(s"class file '${in.file}' has location not matching its contents: contains $c")
}
- def parse(file: AbstractFile, root: Symbol) = try {
- def handleMissing(e: MissingRequirementError) = {
- if (settings.debug.value) e.printStackTrace
- throw new IOException("Missing dependency '" + e.req + "', required by " + in.file)
+ private def parseErrorHandler[T]: PartialFunction[Throwable, T] = {
+ case e: MissingRequirementError => handleMissing(e)
+ case e: RuntimeException => handleError(e)
+ }
+ @inline private def pushBusy[T](sym: Symbol)(body: => T): T = {
+ busy match {
+ case Some(`sym`) => throw new IOException(s"unsatisfiable cyclic dependency in '$sym'")
+ case Some(sym1) => throw new IOException(s"illegal class file dependency between '$sym' and '$sym1'")
+ case _ => ()
}
- def handleError(e: Exception) = {
- if (settings.debug.value) e.printStackTrace()
- throw new IOException("class file '" + in.file + "' is broken\n(" + {
- if (e.getMessage() != null) e.getMessage()
- else e.getClass.toString
- } + ")")
- }
- assert(!busy.isDefined, {
- val (s1, s2) = (busy.get, root)
- if (s1 eq s2) "unsatisfiable cyclic dependency in '%s'".format(s1)
- else "illegal class file dependency between '%s' and '%s'".format(s1, s2)
- })
-
- busy = Some(root)
- /*root match {
- case cs: ClassSymbol =>
- cs.classFile = file
- case ms: ModuleSymbol =>
- ms.moduleClass.asInstanceOf[ClassSymbol].classFile = file
- case _ =>
- println("Skipping class: " + root + ": " + root.getClass)
- }
-*/
- log("parsing " + file.name)
- this.in = new AbstractFileReader(file)
- if (root.isModule) {
- this.clazz = root.companionClass
- this.staticModule = root
- } else {
- this.clazz = root
- this.staticModule = root.companionModule
- }
- this.isScala = false
- this.hasMeta = false
- try {
+ busy = Some(sym)
+ try body
+ catch parseErrorHandler
+ finally busy = None
+ }
+ @inline private def raiseLoaderLevel[T](body: => T): T = {
+ loaders.parentsLevel += 1
+ try body
+ finally loaders.parentsLevel -= 1
+ }
+
+ def parse(file: AbstractFile, root: Symbol): Unit = {
+ debuglog("[class] >> " + root.fullName)
+
+ pushBusy(root) {
+ this.in = new AbstractFileReader(file)
+ this.clazz = if (root.isModule) root.companionClass else root
+ // WARNING! do no use clazz.companionModule to find staticModule.
+ // In a situation where root can be defined, but its companionClass not,
+ // this would give incorrect results (see SI-5031 in separate compilation scenario)
+ this.staticModule = if (root.isModule) root else root.companionModule
+ this.isScala = false
+
parseHeader
this.pool = new ConstantPool
parseClass()
}
- catch {
- case e: MissingRequirementError => handleMissing(e)
- case e: RuntimeException => handleError(e)
- }
- } finally {
- busy = None
}
- protected def statics: Symbol = staticModule.moduleClass
-
private def parseHeader() {
val magic = in.nextInt
if (magic != JAVA_MAGIC)
@@ -139,10 +131,13 @@ abstract class ClassfileParser {
(in.nextByte.toInt: @switch) match {
case CONSTANT_UTF8 | CONSTANT_UNICODE =>
in.skip(in.nextChar)
- case CONSTANT_CLASS | CONSTANT_STRING =>
+ case CONSTANT_CLASS | CONSTANT_STRING | CONSTANT_METHODTYPE=>
in.skip(2)
+ case CONSTANT_METHODHANDLE =>
+ in.skip(3)
case CONSTANT_FIELDREF | CONSTANT_METHODREF | CONSTANT_INTFMETHODREF
- | CONSTANT_NAMEANDTYPE | CONSTANT_INTEGER | CONSTANT_FLOAT =>
+ | CONSTANT_NAMEANDTYPE | CONSTANT_INTEGER | CONSTANT_FLOAT
+ | CONSTANT_INVOKEDYNAMIC =>
in.skip(4)
case CONSTANT_LONG | CONSTANT_DOUBLE =>
in.skip(8)
@@ -187,8 +182,8 @@ abstract class ClassfileParser {
val start = starts(index)
if (in.buf(start).toInt != CONSTANT_CLASS) errorBadTag(start)
val name = getExternalName(in.getChar(start + 1))
- if (name endsWith '$')
- c = definitions.getModule(name stripEnd "$")
+ if (nme.isModuleName(name))
+ c = rootMirror.getModule(nme.stripModuleSuffix(name))
else
c = classNameToSymbol(name)
@@ -206,12 +201,12 @@ abstract class ClassfileParser {
getExternalName(in.getChar(start + 1))
}
- /** Return the symbol of the class member at <code>index</code>.
+ /** Return the symbol of the class member at `index`.
* The following special cases exist:
- * - If the member refers to special MODULE$ static field, return
+ * - If the member refers to special `MODULE$` static field, return
* the symbol of the corresponding module.
* - If the member is a field, and is not found with the given name,
- * another try is made by appending nme.LOCAL_SUFFIX_STRING
+ * another try is made by appending `nme.LOCAL_SUFFIX_STRING`
* - If no symbol is found in the right tpe, a new try is made in the
* companion class, in case the owner is an implementation class.
*/
@@ -225,53 +220,52 @@ abstract class ClassfileParser {
first != CONSTANT_METHODREF &&
first != CONSTANT_INTFMETHODREF) errorBadTag(start)
val ownerTpe = getClassOrArrayType(in.getChar(start + 1))
- if (settings.debug.value)
- log("getMemberSymbol(static: " + static + "): owner type: " + ownerTpe + " " + ownerTpe.typeSymbol.originalName)
+ debuglog("getMemberSymbol(static: " + static + "): owner type: " + ownerTpe + " " + ownerTpe.typeSymbol.originalName)
val (name0, tpe0) = getNameAndType(in.getChar(start + 3), ownerTpe)
- if (settings.debug.value)
- log("getMemberSymbol: name and tpe: " + name0 + ": " + tpe0)
+ debuglog("getMemberSymbol: name and tpe: " + name0 + ": " + tpe0)
forceMangledName(tpe0.typeSymbol.name, false)
val (name, tpe) = getNameAndType(in.getChar(start + 3), ownerTpe)
-// println("new tpe: " + tpe + " at phase: " + phase)
-
if (name == nme.MODULE_INSTANCE_FIELD) {
val index = in.getChar(start + 1)
val name = getExternalName(in.getChar(starts(index) + 1))
//assert(name.endsWith("$"), "Not a module class: " + name)
- f = forceMangledName(name.subName(0, name.length - 1), true)
+ f = forceMangledName(name dropRight 1, true)
if (f == NoSymbol)
- f = definitions.getModule(name.subName(0, name.length - 1))
+ f = rootMirror.getModule(name dropRight 1)
} else {
val origName = nme.originalName(name)
val owner = if (static) ownerTpe.typeSymbol.linkedClassOfClass else ownerTpe.typeSymbol
-// println("\t" + owner.info.member(name).tpe.widen + " =:= " + tpe)
f = owner.info.findMember(origName, 0, 0, false).suchThat(_.tpe.widen =:= tpe)
if (f == NoSymbol)
f = owner.info.findMember(newTermName(origName + nme.LOCAL_SUFFIX_STRING), 0, 0, false).suchThat(_.tpe =:= tpe)
if (f == NoSymbol) {
// if it's an impl class, try to find it's static member inside the class
if (ownerTpe.typeSymbol.isImplClass) {
-// println("impl class, member: " + owner.tpe.member(origName) + ": " + owner.tpe.member(origName).tpe)
f = ownerTpe.findMember(origName, 0, 0, false).suchThat(_.tpe =:= tpe)
} else {
log("Couldn't find " + name + ": " + tpe + " inside: \n" + ownerTpe)
- f = if (tpe.isInstanceOf[MethodType]) owner.newMethod(owner.pos, name).setInfo(tpe)
- else owner.newValue(owner.pos, name).setInfo(tpe).setFlag(MUTABLE)
+ f = tpe match {
+ case MethodType(_, _) => owner.newMethod(name, owner.pos)
+ case _ => owner.newVariable(name, owner.pos)
+ }
+ f setInfo tpe
log("created fake member " + f.fullName)
}
-// println("\townerTpe.decls: " + ownerTpe.decls)
-// println("Looking for: " + name + ": " + tpe + " inside: " + ownerTpe.typeSymbol + "\n\tand found: " + ownerTpe.members)
}
}
- assert(f != NoSymbol, "could not find\n " + name + ": " + tpe + "\ninside:\n " + ownerTpe.members.mkString(", "))
+ assert(f != NoSymbol,
+ s"could not find $name: $tpe in $ownerTpe" + (
+ if (settings.debug.value) ownerTpe.members.mkString(", members are:\n ", "\n ", "") else ""
+ )
+ )
values(index) = f
}
f
}
/** Return a name and a type at the given index. If the type is a method
- * type, a dummy symbol is created in 'ownerTpe', which is used as the
+ * type, a dummy symbol is created in `ownerTpe`, which is used as the
* owner of its value parameters. This might lead to inconsistencies,
* if a symbol of the given name already exists, and has a different
* type.
@@ -284,7 +278,7 @@ abstract class ClassfileParser {
if (in.buf(start).toInt != CONSTANT_NAMEANDTYPE) errorBadTag(start)
val name = getName(in.getChar(start + 1).toInt)
// create a dummy symbol for method types
- val dummySym = ownerTpe.typeSymbol.newMethod(ownerTpe.typeSymbol.pos, name)
+ val dummySym = ownerTpe.typeSymbol.newMethod(name, ownerTpe.typeSymbol.pos)
var tpe = getType(dummySym, in.getChar(start + 3).toInt)
// fix the return type, which is blindly set to the class currently parsed
@@ -311,7 +305,7 @@ abstract class ClassfileParser {
val start = starts(index)
if (in.buf(start).toInt != CONSTANT_CLASS) errorBadTag(start)
val name = getExternalName(in.getChar(start + 1))
- if (name(0) == ARRAY_TAG) {
+ if (name.charAt(0) == ARRAY_TAG) {
c = sigToType(null, name)
values(index) = c
} else {
@@ -344,7 +338,7 @@ abstract class ClassfileParser {
val start = starts(index)
value = (in.buf(start).toInt: @switch) match {
case CONSTANT_STRING =>
- Constant(getName(in.getChar(start + 1).toInt).toString())
+ Constant(getName(in.getChar(start + 1).toInt).toString)
case CONSTANT_INTEGER =>
Constant(in.getInt(start + 1))
case CONSTANT_FLOAT =>
@@ -367,24 +361,30 @@ abstract class ClassfileParser {
}
}
+ private def getSubArray(bytes: Array[Byte]): Array[Byte] = {
+ val decodedLength = ByteCodecs.decode(bytes)
+ val arr = new Array[Byte](decodedLength)
+ System.arraycopy(bytes, 0, arr, 0, decodedLength)
+ arr
+ }
+
def getBytes(index: Int): Array[Byte] = {
if (index <= 0 || len <= index) errorBadIndex(index)
var value = values(index).asInstanceOf[Array[Byte]]
if (value eq null) {
val start = starts(index)
if (in.buf(start).toInt != CONSTANT_UTF8) errorBadTag(start)
- val len = in.getChar(start + 1)
+ val len = in.getChar(start + 1)
val bytes = new Array[Byte](len)
- Array.copy(in.buf, start + 3, bytes, 0, len)
- val decodedLength = reflect.generic.ByteCodecs.decode(bytes)
- value = bytes.take(decodedLength)
+ System.arraycopy(in.buf, start + 3, bytes, 0, len)
+ value = getSubArray(bytes)
values(index) = value
}
value
}
def getBytes(indices: List[Int]): Array[Byte] = {
- assert(!indices.isEmpty)
+ assert(!indices.isEmpty, indices)
var value = values(indices.head).asInstanceOf[Array[Byte]]
if (value eq null) {
val bytesBuffer = ArrayBuffer.empty[Byte]
@@ -395,9 +395,7 @@ abstract class ClassfileParser {
val len = in.getChar(start + 1)
bytesBuffer ++= in.buf.view(start + 3, start + 3 + len)
}
- val bytes = bytesBuffer.toArray
- val decodedLength = reflect.generic.ByteCodecs.decode(bytes)
- value = bytes.take(decodedLength)
+ value = getSubArray(bytesBuffer.toArray)
values(indices.head) = value
}
value
@@ -417,89 +415,84 @@ abstract class ClassfileParser {
*/
def forceMangledName(name: Name, module: Boolean): Symbol = {
val parts = name.decode.toString.split(Array('.', '$'))
- var sym: Symbol = definitions.RootClass
- atPhase(currentRun.flattenPhase.prev) {
- for (part0 <- parts; if !(part0 == ""); val part = newTermName(part0)) {
- val sym1 = atPhase(currentRun.icodePhase) {
+ var sym: Symbol = rootMirror.RootClass
+
+ // was "at flatten.prev"
+ beforeFlatten {
+ for (part0 <- parts; if !(part0 == ""); part = newTermName(part0)) {
+ val sym1 = beforeIcode {
sym.linkedClassOfClass.info
sym.info.decl(part.encode)
}//.suchThat(module == _.isModule)
- if (sym1 == NoSymbol)
- sym = sym.info.decl(part.encode.toTypeName)
- else
- sym = sym1
+
+ sym = sym1 orElse sym.info.decl(part.encode.toTypeName)
}
}
-// println("found: " + sym)
sym
}
- /** Return the class symbol of the given name. */
- def classNameToSymbol(name: Name): Symbol = {
- def loadClassSymbol(name: Name) = {
- val s = name.toString
- val file = global.classPath findSourceFile s getOrElse {
- throw new MissingRequirementError("class " + s)
- }
- val completer = new global.loaders.ClassfileLoader(file)
- var owner: Symbol = definitions.RootClass
- var sym: Symbol = NoSymbol
- var ss: String = null
- var start = 0
- var end = s indexOf '.'
- while (end > 0) {
- ss = s.substring(start, end)
- sym = owner.info.decls lookup ss
- if (sym == NoSymbol) {
- sym = owner.newPackage(NoPosition, ss) setInfo completer
- sym.moduleClass setInfo completer
- owner.info.decls enter sym
- }
- owner = sym.moduleClass
- start = end + 1
- end = s.indexOf('.', start)
- }
- ss = s substring start
+ private def loadClassSymbol(name: Name): Symbol = {
+ val file = global.classPath findSourceFile ("" +name) getOrElse {
+ // SI-5593 Scaladoc's current strategy is to visit all packages in search of user code that can be documented
+ // therefore, it will rummage through the classpath triggering errors whenever it encounters package objects
+ // that are not in their correct place (see bug for details)
+ if (!settings.isScaladoc)
+ warning(s"Class $name not found - continuing with a stub.")
+ return NoSymbol.newClass(name.toTypeName)
+ }
+ val completer = new global.loaders.ClassfileLoader(file)
+ var owner: Symbol = rootMirror.RootClass
+ var sym: Symbol = NoSymbol
+ var ss: Name = null
+ var start = 0
+ var end = name indexOf '.'
+
+ while (end > 0) {
+ ss = name.subName(start, end)
sym = owner.info.decls lookup ss
if (sym == NoSymbol) {
- sym = owner.newClass(NoPosition, newTypeName(ss)) setInfo completer
+ sym = owner.newPackage(ss) setInfo completer
+ sym.moduleClass setInfo completer
owner.info.decls enter sym
- if (settings.debug.value && settings.verbose.value)
- println("loaded "+sym+" from file "+file)
}
- sym
+ owner = sym.moduleClass
+ start = end + 1
+ end = name.indexOf('.', start)
}
-
- def lookupClass(name: Name) = try {
- if (name.pos('.') == name.length)
- definitions.getMember(definitions.EmptyPackageClass, name.toTypeName)
- else
- definitions.getClass(name) // see tickets #2464, #3756
- } catch {
- case _: FatalError => loadClassSymbol(name)
+ ss = name.subName(0, start)
+ owner.info.decls lookup ss orElse {
+ sym = owner.newClass(ss.toTypeName) setInfoAndEnter completer
+ debuglog("loaded "+sym+" from file "+file)
+ sym
}
+ }
+ /** FIXME - we shouldn't be doing ad hoc lookups in the empty package.
+ * The method called "getClassByName" should either return the class or not.
+ */
+ private def lookupClass(name: Name) = (
+ if (name containsChar '.')
+ rootMirror getClassByName name // see tickets #2464, #3756
+ else
+ definitions.getMember(rootMirror.EmptyPackageClass, name.toTypeName)
+ )
- innerClasses.get(name) match {
- case Some(entry) =>
- //println("found inner class " + name)
- val res = innerClasses.classSymbol(entry.externalName)
- //println("\trouted to: " + res)
- res
- case None =>
- //if (name.toString.contains("$")) println("No inner class: " + name + innerClasses + " while parsing " + in.file.name)
- lookupClass(name)
- }
+ /** Return the class symbol of the given name. */
+ def classNameToSymbol(name: Name): Symbol = {
+ if (innerClasses contains name)
+ innerClasses innerSymbol name
+ else
+ try lookupClass(name)
+ catch { case _: FatalError => loadClassSymbol(name) }
}
var sawPrivateConstructor = false
def parseClass() {
- val jflags = in.nextChar
+ val jflags = in.nextChar
val isAnnotation = hasAnnotation(jflags)
- var sflags = transFlags(jflags, true)
- var nameIdx = in.nextChar
- externalName = pool.getClassName(nameIdx)
- val c = if (externalName.toString.indexOf('$') < 0) pool.getClassSymbol(nameIdx) else clazz
+ var sflags = toScalaClassFlags(jflags)
+ var nameIdx = in.nextChar
+ currentClass = pool.getClassName(nameIdx)
/** Parse parents for Java classes. For Scala, return AnyRef, since the real type will be unpickled.
* Updates the read pointer of 'in'. */
@@ -509,77 +502,71 @@ abstract class ClassfileParser {
val ifaces = in.nextChar
in.bp += ifaces * 2 // .. and iface count interfaces
List(definitions.AnyRefClass.tpe) // dummy superclass, will be replaced by pickled information
- } else {
- try {
- loaders.parentsLevel += 1
- val superType = if (isAnnotation) { in.nextChar; definitions.AnnotationClass.tpe }
- else pool.getSuperClass(in.nextChar).tpe
- val ifaceCount = in.nextChar
- var ifaces = for (i <- List.range(0, ifaceCount)) yield pool.getSuperClass(in.nextChar).tpe
- if (isAnnotation) ifaces = definitions.ClassfileAnnotationClass.tpe :: ifaces
- superType :: ifaces
- } finally {
- loaders.parentsLevel -= 1
- }
+ }
+ else raiseLoaderLevel {
+ val superType = if (isAnnotation) { in.nextChar; definitions.AnnotationClass.tpe }
+ else pool.getSuperClass(in.nextChar).tpe
+ val ifaceCount = in.nextChar
+ var ifaces = for (i <- List.range(0, ifaceCount)) yield pool.getSuperClass(in.nextChar).tpe
+ if (isAnnotation) ifaces = definitions.ClassfileAnnotationClass.tpe :: ifaces
+ superType :: ifaces
}
}
- if (c != clazz && externalName.toString.indexOf("$") < 0) {
- if ((clazz eq NoSymbol) && (c ne NoSymbol)) clazz = c
- else throw new IOException("class file '" + in.file + "' contains wrong " + c)
+ val isTopLevel = !(currentClass containsChar '$') // Java class name; *don't* try to to use Scala name decoding (SI-7532)
+
+ val c = if (isTopLevel) pool.getClassSymbol(nameIdx) else clazz
+ if (isTopLevel) {
+ if (c != clazz) {
+ if ((clazz eq NoSymbol) && (c ne NoSymbol)) clazz = c
+ else mismatchError(c)
+ }
}
addEnclosingTParams(clazz)
- parseInnerClasses() // also sets the isScala / isScalaRaw / hasMeta flags, see r15956
+ parseInnerClasses() // also sets the isScala / isScalaRaw flags, see r15956
// get the class file parser to reuse scopes.
- instanceDefs = new Scope
- staticDefs = new Scope
+ instanceScope = newScope
+ staticScope = newScope
- val classInfo = ClassInfoType(parseParents, instanceDefs, clazz)
- val staticInfo = ClassInfoType(List(), staticDefs, statics)
+ val classInfo = ClassInfoType(parseParents, instanceScope, clazz)
+ val staticInfo = ClassInfoType(List(), staticScope, moduleClass)
- if (!isScala && !isScalaRaw) {
-// println("Entering inner classes for " + clazz)
+ if (!isScala && !isScalaRaw)
enterOwnInnerClasses
- }
+
val curbp = in.bp
skipMembers() // fields
skipMembers() // methods
if (!isScala) {
- clazz.setFlag(sflags)
- setPrivateWithin(clazz, jflags)
- setPrivateWithin(staticModule, jflags)
- if (!hasMeta || isScalaRaw) {
- clazz.setInfo(classInfo)
- }
- statics.setInfo(staticInfo)
- staticModule.setInfo(statics.tpe)
+ clazz setFlag sflags
+ importPrivateWithinFromJavaFlags(clazz, jflags)
+ importPrivateWithinFromJavaFlags(staticModule, jflags)
+ clazz.setInfo(classInfo)
+ moduleClass setInfo staticInfo
+ staticModule.setInfo(moduleClass.tpe)
staticModule.setFlag(JAVA)
staticModule.moduleClass.setFlag(JAVA)
// attributes now depend on having infos set already
parseAttributes(clazz, classInfo)
- loaders.pendingLoadActions = { () =>
+ def queueLoad() {
in.bp = curbp
- val fieldCount = in.nextChar
- for (i <- 0 until fieldCount) parseField()
+ 0 until in.nextChar foreach (_ => parseField())
sawPrivateConstructor = false
- val methodCount = in.nextChar
- for (i <- 0 until methodCount) parseMethod()
- if (!sawPrivateConstructor &&
- (instanceDefs.lookup(nme.CONSTRUCTOR) == NoSymbol &&
- (sflags & INTERFACE) == 0L))
- {
- //Console.println("adding constructor to " + clazz);//DEBUG
- instanceDefs.enter(
- clazz.newConstructor(NoPosition)
- .setFlag(clazz.flags & ConstrFlags)
- .setInfo(MethodType(List(), clazz.tpe)))
- }
- ()
- } :: loaders.pendingLoadActions
+ 0 until in.nextChar foreach (_ => parseMethod())
+ val needsConstructor = (
+ !sawPrivateConstructor
+ && instanceScope.lookup(nme.CONSTRUCTOR) == NoSymbol
+ && (sflags & INTERFACE) == 0
+ )
+ if (needsConstructor)
+ instanceScope enter clazz.newClassConstructor(NoPosition)
+ }
+
+ loaders.pendingLoadActions ::= (queueLoad _)
if (loaders.parentsLevel == 0) {
- while (!loaders.pendingLoadActions.isEmpty) {
+ while (loaders.pendingLoadActions.nonEmpty) {
val item = loaders.pendingLoadActions.head
loaders.pendingLoadActions = loaders.pendingLoadActions.tail
item()
@@ -593,79 +580,89 @@ abstract class ClassfileParser {
def addEnclosingTParams(clazz: Symbol) {
var sym = clazz.owner
while (sym.isClass && !sym.isModuleClass) {
-// println("adding tparams of " + sym)
- for (t <- sym.tpe.typeArgs) {
-// println("\tadding " + (t.typeSymbol.name + "->" + t.typeSymbol))
+ for (t <- sym.tpe.typeArgs)
classTParams = classTParams + (t.typeSymbol.name -> t.typeSymbol)
- }
+
sym = sym.owner
}
}
def parseField() {
val jflags = in.nextChar
- var sflags = transFlags(jflags, false)
- if ((sflags & FINAL) == 0L) sflags = sflags | MUTABLE
- if ((sflags & PRIVATE) != 0L && !global.settings.XO.value) {
+ var sflags = toScalaFieldFlags(jflags)
+ if ((sflags & PRIVATE) != 0L && !global.settings.optimise.value) {
in.skip(4); skipAttributes()
} else {
val name = pool.getName(in.nextChar)
val info = pool.getType(in.nextChar)
- val sym = getOwner(jflags).newValue(NoPosition, name).setFlag(sflags)
+ val sym = getOwner(jflags).newValue(name, NoPosition, sflags)
val isEnum = (jflags & JAVA_ACC_ENUM) != 0
sym setInfo {
if (isEnum) ConstantType(Constant(sym))
else info
}
- setPrivateWithin(sym, jflags)
+ importPrivateWithinFromJavaFlags(sym, jflags)
parseAttributes(sym, info)
getScope(jflags).enter(sym)
- // sealed java enums (experimental)
- if (isEnum && opt.experimental) {
- // need to give singleton type
- sym setInfo info.narrow
- if (!sym.superClass.isSealed)
- sym.superClass setFlag (SEALED | ABSTRACT)
+ // sealed java enums
+ if (isEnum) {
+ val enumClass = sym.owner.linkedClassOfClass
+ if (!enumClass.isSealed)
+ enumClass setFlag (SEALED | ABSTRACT)
- sym.superClass addChild sym
+ enumClass addChild sym
}
}
}
def parseMethod() {
val jflags = in.nextChar.toInt
- var sflags = transFlags(jflags, false)
- if (isPrivate(jflags) && !global.settings.XO.value) {
+ var sflags = toScalaMethodFlags(jflags)
+ if (isPrivate(jflags) && !global.settings.optimise.value) {
val name = pool.getName(in.nextChar)
if (name == nme.CONSTRUCTOR)
sawPrivateConstructor = true
in.skip(2); skipAttributes()
} else {
- if ((jflags & JAVA_ACC_BRIDGE) != 0)
- sflags |= BRIDGE
- if ((sflags & PRIVATE) != 0L && global.settings.XO.value) {
+ if ((sflags & PRIVATE) != 0L && global.settings.optimise.value) { // TODO this should be !optimize, no? See c4181f656d.
in.skip(4); skipAttributes()
} else {
val name = pool.getName(in.nextChar)
- val sym = getOwner(jflags).newMethod(NoPosition, name).setFlag(sflags)
+ val sym = getOwner(jflags).newMethod(name, NoPosition, sflags)
var info = pool.getType(sym, (in.nextChar))
if (name == nme.CONSTRUCTOR)
info match {
case MethodType(params, restpe) =>
// if this is a non-static inner class, remove the explicit outer parameter
- val newParams = innerClasses.get(externalName) match {
+ val paramsNoOuter = innerClasses getEntry currentClass match {
case Some(entry) if !isScalaRaw && !isStatic(entry.jflags) =>
- assert(params.head.tpe.typeSymbol == clazz.owner, params.head.tpe.typeSymbol + ": " + clazz.owner)
+ /* About `clazz.owner.isPackage` below: SI-5957
+ * For every nested java class A$B, there are two symbols in the scala compiler.
+ * 1. created by SymbolLoader, because of the existence of the A$B.class file, owner: package
+ * 2. created by ClassfileParser of A when reading the inner classes, owner: A
+ * If symbol 1 gets completed (e.g. because the compiled source mentions `A$B`, not `A#B`), the
+ * ClassfileParser for 1 executes, and clazz.owner is the package.
+ */
+ assert(params.head.tpe.typeSymbol == clazz.owner || clazz.owner.isPackage, params.head.tpe.typeSymbol + ": " + clazz.owner)
params.tail
case _ =>
params
}
+ val newParams = paramsNoOuter match {
+ case (init :+ tail) if (jflags & JAVA_ACC_SYNTHETIC) != 0L =>
+ // SI-7455 strip trailing dummy argument ("access constructor tag") from synthetic constructors which
+ // are added when an inner class needs to access a private constructor.
+ init
+ case _ =>
+ paramsNoOuter
+ }
+
info = MethodType(newParams, clazz.tpe)
}
sym.setInfo(info)
- setPrivateWithin(sym, jflags)
+ importPrivateWithinFromJavaFlags(sym, jflags)
parseAttributes(sym, info)
if ((jflags & JAVA_ACC_VARARGS) != 0) {
sym.setInfo(arrayToRepeated(sym.info))
@@ -675,43 +672,20 @@ abstract class ClassfileParser {
}
}
- /** Convert array parameters denoting a repeated parameter of a Java method
- * to JavaRepeatedParamClass types.
- */
- private def arrayToRepeated(tp: Type): Type = tp match {
- case MethodType(params, rtpe) =>
- val formals = tp.paramTypes
- assert(formals.last.typeSymbol == definitions.ArrayClass)
- val method = params.last.owner
- val elemtp = formals.last.typeArgs.head match {
- case RefinedType(List(t1, t2), _) if (t1.typeSymbol.isAbstractType && t2.typeSymbol == definitions.ObjectClass) =>
- t1 // drop intersection with Object for abstract types in varargs. UnCurry can handle them.
- case t =>
- t
- }
- val newParams = method.newSyntheticValueParams(
- formals.init :+ appliedType(definitions.JavaRepeatedParamClass.typeConstructor, List(elemtp)))
- MethodType(newParams, rtpe)
- case PolyType(tparams, rtpe) =>
- PolyType(tparams, arrayToRepeated(rtpe))
- }
-
private def sigToType(sym: Symbol, sig: Name): Type = {
var index = 0
val end = sig.length
def accept(ch: Char) {
- assert(sig(index) == ch)
+ assert(sig.charAt(index) == ch, (sig.charAt(index), ch))
index += 1
}
def subName(isDelimiter: Char => Boolean): Name = {
val start = index
- while (!isDelimiter(sig(index))) { index += 1 }
+ while (!isDelimiter(sig.charAt(index))) { index += 1 }
sig.subName(start, index)
}
- def existentialType(tparams: List[Symbol], tp: Type): Type =
- if (tparams.isEmpty) tp else ExistentialType(tparams, tp)
def sig2type(tparams: immutable.Map[Name,Symbol], skiptvs: Boolean): Type = {
- val tag = sig(index); index += 1
+ val tag = sig.charAt(index); index += 1
tag match {
case BYTE_TAG => definitions.ByteClass.tpe
case CHAR_TAG => definitions.CharClass.tpe
@@ -732,50 +706,52 @@ abstract class ClassfileParser {
def processClassType(tp: Type): Type = tp match {
case TypeRef(pre, classSym, args) =>
val existentials = new ListBuffer[Symbol]()
- if (sig(index) == '<') {
+ if (sig.charAt(index) == '<') {
accept('<')
val xs = new ListBuffer[Type]()
var i = 0
- while (sig(index) != '>') {
- sig(index) match {
+ while (sig.charAt(index) != '>') {
+ sig.charAt(index) match {
case variance @ ('+' | '-' | '*') =>
index += 1
val bounds = variance match {
case '+' => TypeBounds.upper(objToAny(sig2type(tparams, skiptvs)))
- case '-' => TypeBounds.lower(sig2type(tparams, skiptvs))
+ case '-' =>
+ val tp = sig2type(tparams, skiptvs)
+ // sig2type seems to return AnyClass regardless of the situation:
+ // we don't want Any as a LOWER bound.
+ if (tp.typeSymbol == definitions.AnyClass) TypeBounds.empty
+ else TypeBounds.lower(tp)
case '*' => TypeBounds.empty
}
- val newtparam = sym.newExistential(sym.pos, newTypeName("?"+i)) setInfo bounds
+ val newtparam = sym.newExistential(newTypeName("?"+i), sym.pos) setInfo bounds
existentials += newtparam
- xs += newtparam.tpe //@M should probably be .tpeHK
+ xs += newtparam.tpeHK
i += 1
case _ =>
xs += sig2type(tparams, skiptvs)
}
}
accept('>')
- assert(xs.length > 0)
- existentialType(existentials.toList, typeRef(pre, classSym, xs.toList))
+ assert(xs.length > 0, tp)
+ newExistentialType(existentials.toList, typeRef(pre, classSym, xs.toList))
} else if (classSym.isMonomorphicType) {
tp
} else {
// raw type - existentially quantify all type parameters
val eparams = typeParamsToExistentials(classSym, classSym.unsafeTypeParams)
- val t = typeRef(pre, classSym, eparams.map(_.tpe))
- val res = existentialType(eparams, t)
- if (settings.debug.value && settings.verbose.value)
- println("raw type " + classSym + " -> " + res)
- res
+ val t = typeRef(pre, classSym, eparams.map(_.tpeHK))
+ newExistentialType(eparams, t)
}
case tp =>
- assert(sig(index) != '<')
+ assert(sig.charAt(index) != '<', s"sig=$sig, index=$index, tp=$tp")
tp
}
val classSym = classNameToSymbol(subName(c => c == ';' || c == '<'))
assert(!classSym.isOverloaded, classSym.alternatives)
var tpe = processClassType(processInner(classSym.tpe))
- while (sig(index) == '.') {
+ while (sig.charAt(index) == '.') {
accept('.')
val name = subName(c => c == ';' || c == '<' || c == '.').toTypeName
val clazz = tpe.member(name)
@@ -784,19 +760,24 @@ abstract class ClassfileParser {
accept(';')
tpe
case ARRAY_TAG =>
- while ('0' <= sig(index) && sig(index) <= '9') index += 1
+ while ('0' <= sig.charAt(index) && sig.charAt(index) <= '9') index += 1
var elemtp = sig2type(tparams, skiptvs)
// make unbounded Array[T] where T is a type variable into Array[T with Object]
// (this is necessary because such arrays have a representation which is incompatible
// with arrays of primitive types.
- if (elemtp.typeSymbol.isAbstractType && !(elemtp <:< definitions.ObjectClass.tpe))
+ // NOTE that the comparison to Object only works for abstract types bounded by classes that are strict subclasses of Object
+ // if the bound is exactly Object, it will have been converted to Any, and the comparison will fail
+ // see also RestrictJavaArraysMap (when compiling java sources directly)
+ if (elemtp.typeSymbol.isAbstractType && !(elemtp <:< definitions.ObjectClass.tpe)) {
elemtp = intersectionType(List(elemtp, definitions.ObjectClass.tpe))
- appliedType(definitions.ArrayClass.tpe, List(elemtp))
+ }
+
+ definitions.arrayType(elemtp)
case '(' =>
// we need a method symbol. given in line 486 by calling getType(methodSym, ..)
- assert(sym ne null)
+ assert(sym ne null, sig)
val paramtypes = new ListBuffer[Type]()
- while (sig(index) != ')') {
+ while (sig.charAt(index) != ')') {
paramtypes += objToAny(sig2type(tparams, skiptvs))
}
index += 1
@@ -816,9 +797,9 @@ abstract class ClassfileParser {
def sig2typeBounds(tparams: immutable.Map[Name, Symbol], skiptvs: Boolean): Type = {
val ts = new ListBuffer[Type]
- while (sig(index) == ':') {
+ while (sig.charAt(index) == ':') {
index += 1
- if (sig(index) != ':') // guard against empty class bound
+ if (sig.charAt(index) != ':') // guard against empty class bound
ts += objToAny(sig2type(tparams, skiptvs))
}
TypeBounds.upper(intersectionType(ts.toList, sym))
@@ -826,19 +807,19 @@ abstract class ClassfileParser {
var tparams = classTParams
val newTParams = new ListBuffer[Symbol]()
- if (sig(index) == '<') {
- assert(sym != null)
+ if (sig.charAt(index) == '<') {
+ assert(sym != null, sig)
index += 1
val start = index
- while (sig(index) != '>') {
+ while (sig.charAt(index) != '>') {
val tpname = subName(':'.==).toTypeName
- val s = sym.newTypeParameter(NoPosition, tpname)
+ val s = sym.newTypeParameter(tpname)
tparams = tparams + (tpname -> s)
sig2typeBounds(tparams, true)
newTParams += s
}
index = start
- while (sig(index) != '>') {
+ while (sig.charAt(index) != '>') {
val tpname = subName(':'.==).toTypeName
val s = tparams(tpname)
s.setInfo(sig2typeBounds(tparams, false))
@@ -857,12 +838,12 @@ abstract class ClassfileParser {
while (index < end) {
parents += sig2type(tparams, false) // here the variance doesnt'matter
}
- ClassInfoType(parents.toList, instanceDefs, sym)
+ ClassInfoType(parents.toList, instanceScope, sym)
}
- polyType(ownTypeParams, tpe)
+ GenPolyType(ownTypeParams, tpe)
} // sigToType
- class TypeParamsType(override val typeParams: List[Symbol]) extends LazyType {
+ class TypeParamsType(override val typeParams: List[Symbol]) extends LazyType with FlagAgnosticCompleter {
override def complete(sym: Symbol) { throw new AssertionError("cyclic type dereferencing") }
}
@@ -882,42 +863,34 @@ abstract class ClassfileParser {
val sig = pool.getExternalName(in.nextChar)
val newType = sigToType(sym, sig)
sym.setInfo(newType)
- if (settings.debug.value && settings.verbose.value)
- println("" + sym + "; signature = " + sig + " type = " + newType)
- hasMeta = true
- } else
- in.skip(attrLen)
+ }
+ else in.skip(attrLen)
case tpnme.SyntheticATTR =>
- sym.setFlag(SYNTHETIC)
+ sym.setFlag(SYNTHETIC | ARTIFACT)
in.skip(attrLen)
case tpnme.BridgeATTR =>
sym.setFlag(BRIDGE)
in.skip(attrLen)
case tpnme.DeprecatedATTR =>
val arg = Literal(Constant("see corresponding Javadoc for more information."))
- sym addAnnotation AnnotationInfo(definitions.DeprecatedAttr.tpe, List(arg, Literal(Constant(""))), Nil)
+ sym.addAnnotation(definitions.DeprecatedAttr, arg, Literal(Constant("")))
in.skip(attrLen)
case tpnme.ConstantValueATTR =>
val c = pool.getConstant(in.nextChar)
val c1 = convertTo(c, symtype)
if (c1 ne null) sym.setInfo(ConstantType(c1))
- else println("failure to convert " + c + " to " + symtype); //debug
+ else debugwarn(s"failure to convert $c to $symtype")
case tpnme.ScalaSignatureATTR =>
if (!isScalaAnnot) {
- if (settings.debug.value)
- log("warning: symbol " + sym.fullName + " has pickled signature in attribute")
- unpickler.unpickle(in.buf, in.bp, clazz, staticModule, in.file.toString())
+ debugwarn(s"symbol ${sym.fullName} has pickled signature in attribute")
+ unpickler.unpickle(in.buf, in.bp, clazz, staticModule, in.file.name)
}
in.skip(attrLen)
case tpnme.ScalaATTR =>
isScalaRaw = true
- case tpnme.JacoMetaATTR =>
- val meta = pool.getName(in.nextChar).toString().trim()
- metaParser.parse(meta, sym, symtype)
- this.hasMeta = true
// Attribute on methods of java annotation classes when that method has a default
case tpnme.AnnotationDefaultATTR =>
- sym.addAnnotation(AnnotationInfo(definitions.AnnotationDefaultAttr.tpe, List(), List()))
+ sym.addAnnotation(definitions.AnnotationDefaultAttr)
in.skip(attrLen)
// Java annotations on classes / methods / fields with RetentionPolicy.RUNTIME
case tpnme.RuntimeAnnotationATTR =>
@@ -928,13 +901,13 @@ abstract class ClassfileParser {
case Some(san: AnnotationInfo) =>
val bytes =
san.assocs.find({ _._1 == nme.bytes }).get._2.asInstanceOf[ScalaSigBytes].bytes
- unpickler.unpickle(bytes, 0, clazz, staticModule, in.file.toString())
+ unpickler.unpickle(bytes, 0, clazz, staticModule, in.file.name)
case None =>
throw new RuntimeException("Scala class file does not contain Scala annotation")
}
- if (settings.debug.value)
- log("" + sym + "; annotations = " + sym.rawAnnotations)
- } else
+ debuglog("[class] << " + sym.fullName + sym.annotationsString)
+ }
+ else
in.skip(attrLen)
// TODO 1: parse runtime visible annotations on parameters
@@ -950,10 +923,16 @@ abstract class ClassfileParser {
val srcfileLeaf = pool.getName(in.nextChar).toString.trim
val srcpath = sym.enclosingPackage match {
case NoSymbol => srcfileLeaf
- case definitions.EmptyPackage => srcfileLeaf
+ case rootMirror.EmptyPackage => srcfileLeaf
case pkg => pkg.fullName(File.separatorChar)+File.separator+srcfileLeaf
}
srcfile0 = settings.outputDirs.srcFilesFor(in.file, srcpath).find(_.exists)
+ case tpnme.CodeATTR =>
+ if (sym.owner.isInterface) {
+ sym setFlag DEFAULTMETHOD
+ log(s"$sym in ${sym.owner} is a java8+ default method.")
+ }
+ in.skip(attrLen)
case _ =>
in.skip(attrLen)
}
@@ -964,7 +943,7 @@ abstract class ClassfileParser {
val index = in.nextChar
tag match {
case STRING_TAG =>
- Some(LiteralAnnotArg(Constant(pool.getName(index).toString())))
+ Some(LiteralAnnotArg(Constant(pool.getName(index).toString)))
case BOOL_TAG | BYTE_TAG | CHAR_TAG | SHORT_TAG | INT_TAG |
LONG_TAG | FLOAT_TAG | DOUBLE_TAG =>
Some(LiteralAnnotArg(pool.getConstant(index)))
@@ -973,9 +952,14 @@ abstract class ClassfileParser {
case ENUM_TAG =>
val t = pool.getType(index)
val n = pool.getName(in.nextChar)
- val s = t.typeSymbol.companionModule.info.decls.lookup(n)
- assert(s != NoSymbol, t)
- Some(LiteralAnnotArg(Constant(s)))
+ val module = t.typeSymbol.companionModule
+ val s = module.info.decls.lookup(n)
+ if (s != NoSymbol) Some(LiteralAnnotArg(Constant(s)))
+ else {
+ warning(s"""While parsing annotations in ${in.file}, could not find $n in enum $module.\nThis is likely due to an implementation restriction: an annotation argument cannot refer to a member of the annotated class (SI-7014).""")
+ None
+ }
+
case ARRAY_TAG =>
val arr = new ArrayBuffer[ClassfileAnnotArg]()
var hasError = false
@@ -993,27 +977,22 @@ abstract class ClassfileParser {
def parseScalaSigBytes: Option[ScalaSigBytes] = {
val tag = in.nextByte.toChar
- assert(tag == STRING_TAG)
- Some(ScalaSigBytes(pool.getBytes(in.nextChar)))
+ assert(tag == STRING_TAG, tag)
+ Some(ScalaSigBytes(pool getBytes in.nextChar))
}
- def parseScalaLongSigBytes: Option[ScalaSigBytes] = try {
+ def parseScalaLongSigBytes: Option[ScalaSigBytes] = {
val tag = in.nextByte.toChar
- assert(tag == ARRAY_TAG)
+ assert(tag == ARRAY_TAG, tag)
val stringCount = in.nextChar
val entries =
for (i <- 0 until stringCount) yield {
val stag = in.nextByte.toChar
- assert(stag == STRING_TAG)
+ assert(stag == STRING_TAG, stag)
in.nextChar.toInt
}
Some(ScalaSigBytes(pool.getBytes(entries.toList)))
}
- catch {
- case e: Throwable =>
- e.printStackTrace
- throw e
- }
/** Parse and return a single annotation. If it is malformed,
* return None.
@@ -1046,13 +1025,21 @@ abstract class ClassfileParser {
}
if (hasError) None
else Some(AnnotationInfo(attrType, List(), nvpairs.toList))
- } catch {
- case f: FatalError => throw f // don't eat fatal errors, they mean a class was not found
- case ex: Throwable =>
- if (settings.debug.value)
- log("dropping annotation on " + sym + ", an error occured during parsing (e.g. annotation class not found)")
-
- None // ignore malformed annotations ==> t1135
+ }
+ catch {
+ case f: FatalError => throw f // don't eat fatal errors, they mean a class was not found
+ case ex: java.lang.Error => throw ex
+ case ex: Throwable =>
+ // We want to be robust when annotations are unavailable, so the very least
+ // we can do is warn the user about the exception
+ // There was a reference to ticket 1135, but that is outdated: a reference to a class not on
+ // the classpath would *not* end up here. A class not found is signaled
+ // with a `FatalError` exception, handled above. Here you'd end up after a NPE (for example),
+ // and that should never be swallowed silently.
+ warning(s"Caught: $ex while parsing annotations in ${in.file}")
+ if (settings.debug.value) ex.printStackTrace()
+
+ None // ignore malformed annotations
}
/**
@@ -1062,10 +1049,12 @@ abstract class ClassfileParser {
def parseExceptions(len: Int) {
val nClasses = in.nextChar
for (n <- 0 until nClasses) {
+ // FIXME: this performs an equivalent of getExceptionTypes instead of getGenericExceptionTypes (SI-7065)
val cls = pool.getClassSymbol(in.nextChar.toInt)
- sym.addAnnotation(AnnotationInfo(definitions.ThrowsClass.tpe,
- Literal(Constant(cls.tpe)) :: Nil,
- Nil))
+ // we call initialize due to the fact that we call Symbol.isMonomorphicType in addThrowsAnnotation
+ // and that method requires Symbol to be forced to give the right answers, see SI-7107 for details
+ cls.initialize
+ sym.addThrowsAnnotation(cls)
}
}
@@ -1088,55 +1077,56 @@ abstract class ClassfileParser {
}
// begin parseAttributes
- val attrCount = in.nextChar
- for (i <- 0 until attrCount) parseAttribute()
+ for (i <- 0 until in.nextChar) parseAttribute()
}
/** Enter own inner classes in the right scope. It needs the scopes to be set up,
* and implicitly current class' superclasses.
*/
private def enterOwnInnerClasses() {
- def className(name: Name): Name = {
+ def className(name: Name): Name =
name.subName(name.lastPos('.') + 1, name.length)
- }
- def enterClassAndModule(entry: InnerClassEntry, completer: global.loaders.SymbolLoader, jflags: Int) {
- val name = entry.originalName
- var sflags = transFlags(jflags, true)
+ def enterClassAndModule(entry: InnerClassEntry, file: AbstractFile, jflags: Int) {
+ val completer = new global.loaders.ClassfileLoader(file)
+ val name = entry.originalName
+ var sflags = toScalaClassFlags(jflags)
+ val owner = getOwner(jflags)
+ val scope = getScope(jflags)
+ val innerClass = owner.newClass(name.toTypeName, NoPosition, sflags) setInfo completer
+ val innerModule = owner.newModule(name.toTermName, NoPosition, sflags) setInfo completer
- val innerClass = getOwner(jflags).newClass(NoPosition, name.toTypeName).setInfo(completer).setFlag(sflags)
- val innerModule = getOwner(jflags).newModule(NoPosition, name.toTermName).setInfo(completer).setFlag(sflags)
- innerModule.moduleClass.setInfo(global.loaders.moduleClassLoader)
+ innerModule.moduleClass setInfo global.loaders.moduleClassLoader
+ List(innerClass, innerModule.moduleClass) foreach (_.associatedFile = file)
- getScope(jflags).enter(innerClass)
- getScope(jflags).enter(innerModule)
+ scope enter innerClass
+ scope enter innerModule
val decls = innerClass.enclosingPackage.info.decls
- val e = decls.lookupEntry(className(entry.externalName))
- if (e ne null) {
- //println("removing " + e)
- decls.unlink(e)
- }
- val e1 = decls.lookupEntry(className(entry.externalName).toTypeName)
- if (e1 ne null) {
- //println("removing " + e1)
- decls.unlink(e1)
+ def unlinkIfPresent(name: Name) = {
+ val e = decls lookupEntry name
+ if (e ne null)
+ decls unlink e
}
+
+ val cName = className(entry.externalName)
+ unlinkIfPresent(cName.toTermName)
+ unlinkIfPresent(cName.toTypeName)
}
- for (entry <- innerClasses.values) {
+ for (entry <- innerClasses.entries) {
// create a new class member for immediate inner classes
- if (entry.outerName == externalName) {
+ if (entry.outerName == currentClass) {
val file = global.classPath.findSourceFile(entry.externalName.toString) getOrElse {
throw new AssertionError(entry.externalName)
}
- enterClassAndModule(entry, new global.loaders.ClassfileLoader(file), entry.jflags)
+ enterClassAndModule(entry, file, entry.jflags)
}
}
}
- /** Parse inner classes. Expects in.bp to point to the superclass entry. Restores the
- * old bp.
+ /** Parse inner classes. Expects `in.bp` to point to the superclass entry.
+ * Restores the old `bp`.
*/
def parseInnerClasses() {
val oldbp = in.bp
@@ -1149,11 +1139,6 @@ abstract class ClassfileParser {
val attrLen = in.nextInt
attrName match {
case tpnme.SignatureATTR =>
- if (!isScala)
- hasMeta = true
- in.skip(attrLen)
- case tpnme.JacoMetaATTR =>
- this.hasMeta = true
in.skip(attrLen)
case tpnme.ScalaSignatureATTR =>
isScala = true
@@ -1167,14 +1152,9 @@ abstract class ClassfileParser {
case tpnme.InnerClassesATTR if !isScala =>
val entries = in.nextChar.toInt
for (i <- 0 until entries) {
- val innerIndex = in.nextChar.toInt
- val outerIndex = in.nextChar.toInt
- val nameIndex = in.nextChar.toInt
- val jflags = in.nextChar.toInt
- if (innerIndex != 0 && outerIndex != 0 && nameIndex != 0) {
- val entry = InnerClassEntry(innerIndex, outerIndex, nameIndex, jflags)
- innerClasses += (pool.getClassName(innerIndex) -> entry)
- }
+ val innerIndex, outerIndex, nameIndex, jflags = in.nextChar.toInt
+ if (innerIndex != 0 && outerIndex != 0 && nameIndex != 0)
+ innerClasses add InnerClassEntry(innerIndex, outerIndex, nameIndex, jflags)
}
case _ =>
in.skip(attrLen)
@@ -1185,83 +1165,78 @@ abstract class ClassfileParser {
/** An entry in the InnerClasses attribute of this class file. */
case class InnerClassEntry(external: Int, outer: Int, name: Int, jflags: Int) {
- def externalName = pool.getClassName(external)
- def outerName = pool.getClassName(outer)
- def originalName = pool.getName(name)
+ def externalName = pool getClassName external
+ def outerName = pool getClassName outer
+ def originalName = pool getName name
+ def isStatic = ClassfileParser.this.isStatic(jflags)
+ def isModule = originalName.isTermName
+ def scope = if (isStatic) staticScope else instanceScope
+ def enclosing = if (isStatic) enclModule else enclClass
+
+ // The name of the outer class, without its trailing $ if it has one.
+ private def strippedOuter = nme stripModuleSuffix outerName
+ private def isInner = innerClasses contains strippedOuter
+ private def enclClass = if (isInner) innerClasses innerSymbol strippedOuter else classNameToSymbol(strippedOuter)
+ private def enclModule = enclClass.companionModule
+
+ private def staticWord = if (isStatic) "static " else ""
+ override def toString = s"$staticWord$originalName in $outerName ($externalName)"
+ }
- override def toString =
- originalName + " in " + outerName + "(" + externalName +")"
+ /** Return the Symbol of the top level class enclosing `name`,
+ * or the symbol of `name` itself if no enclosing classes are found.
+ */
+ def topLevelClass(name: Name): Symbol = innerClasses getEntry name match {
+ case Some(entry) => topLevelClass(entry.outerName)
+ case _ => classNameToSymbol(name)
}
- object innerClasses extends collection.mutable.HashMap[Name, InnerClassEntry] {
- /** Return the Symbol of the top level class enclosing 'name', or 'name's symbol
- * if no entry found for 'name'.
- */
- def topLevelClass(name: Name): Symbol = {
- val tlName = if (isDefinedAt(name)) {
- var entry = this(name)
- while (isDefinedAt(entry.outerName))
- entry = this(entry.outerName)
- entry.outerName
- } else
- name
- classNameToSymbol(tlName)
+ /** Return the class symbol for the given name. It looks it up in its outer class.
+ * Forces all outer class symbols to be completed.
+ *
+ * If the given name is not an inner class, it returns the symbol found in `definitions`.
+ */
+ object innerClasses {
+ private val inners = mutable.HashMap[Name, InnerClassEntry]()
+
+ def contains(name: Name) = inners contains name
+ def getEntry(name: Name) = inners get name
+ def entries = inners.values
+
+ def add(entry: InnerClassEntry): Unit = {
+ inners get entry.externalName foreach (existing =>
+ debugwarn(s"Overwriting inner class entry! Was $existing, now $entry")
+ )
+ inners(entry.externalName) = entry
}
-
- /** Return the class symbol for 'externalName'. It looks it up in its outer class.
- * Forces all outer class symbols to be completed.
- *
- * If the given name is not an inner class, it returns the symbol found in 'definitions'.
- */
- def classSymbol(externalName: Name): Symbol = {
- /** Return the symbol of `innerName', having the given `externalName'. */
- def innerSymbol(externalName: Name, innerName: Name, static: Boolean): Symbol = {
- def getMember(sym: Symbol, name: Name): Symbol =
- if (static)
- if (sym == clazz) staticDefs.lookup(name)
- else sym.companionModule.info.member(name)
- else
- if (sym == clazz) instanceDefs.lookup(name)
- else sym.info.member(name)
-
- innerClasses.get(externalName) match {
- case Some(entry) =>
- val outerName =
- if (entry.outerName.endsWith("$")) entry.outerName.subName(0, entry.outerName.length - 1)
- else entry.outerName
- val sym = classSymbol(outerName)
- val s =
- // if loading during initialization of `definitions' typerPhase is not yet set.
- // in that case we simply load the member at the current phase
- if (currentRun.typerPhase != null)
- atPhase(currentRun.typerPhase)(getMember(sym, innerName.toTypeName))
- else
- getMember(sym, innerName.toTypeName)
- assert(s ne NoSymbol, sym + "." + innerName + " linkedModule: " + sym.companionModule + sym.companionModule.info.members)
- s
-
- case None =>
- val cls = classNameToSymbol(externalName)
- cls
- //if (static) cls.companionClass else cls
- }
- }
-
- get(externalName) match {
- case Some(entry) =>
- val clazz = innerSymbol(entry.externalName, entry.originalName, isStatic(entry.jflags))
- clazz
- case None =>
- classNameToSymbol(externalName)
- }
+ def innerSymbol(externalName: Name): Symbol = this getEntry externalName match {
+ case Some(entry) => innerSymbol(entry)
+ case _ => NoSymbol
+ }
+ // if loading during initialization of `definitions` typerPhase is not yet set.
+ // in that case we simply load the member at the current phase
+ @inline private def enteringTyperIfPossible(body: => Symbol): Symbol =
+ if (currentRun.typerPhase eq null) body else beforeTyper(body)
+
+ private def innerSymbol(entry: InnerClassEntry): Symbol = {
+ val name = entry.originalName.toTypeName
+ val enclosing = entry.enclosing
+ def getMember = (
+ if (enclosing == clazz) entry.scope lookup name
+ else enclosing.info member name
+ )
+ enteringTyperIfPossible(getMember)
+ /** There used to be an assertion that this result is not NoSymbol; changing it to an error
+ * revealed it had been going off all the time, but has been swallowed by a catch t: Throwable
+ * in Repository.scala. Since it has been accomplishing nothing except misleading anyone who
+ * thought it wasn't triggering, I removed it entirely.
+ */
}
}
- class LazyAliasType(alias: Symbol) extends LazyType {
+ class LazyAliasType(alias: Symbol) extends LazyType with FlagAgnosticCompleter {
override def complete(sym: Symbol) {
- alias.initialize
- val tparams1 = cloneSymbols(alias.typeParams)
- sym.setInfo(typeFun(tparams1, alias.tpe.substSym(alias.typeParams, tparams1)))
+ sym setInfo createFromClonedSymbols(alias.initialize.typeParams, alias.tpe)(typeFun)
}
}
@@ -1286,51 +1261,12 @@ abstract class ClassfileParser {
}
protected def getOwner(flags: Int): Symbol =
- if (isStatic(flags)) statics else clazz
+ if (isStatic(flags)) moduleClass else clazz
protected def getScope(flags: Int): Scope =
- if (isStatic(flags)) staticDefs else instanceDefs
-
- protected def transFlags(flags: Int, isClass: Boolean): Long = {
- var res = 0l
- if ((flags & JAVA_ACC_PRIVATE) != 0)
- res = res | PRIVATE
- else if ((flags & JAVA_ACC_PROTECTED) != 0)
- res = res | PROTECTED
- if ((flags & JAVA_ACC_ABSTRACT) != 0 && (flags & JAVA_ACC_ANNOTATION) == 0)
- res = res | DEFERRED
- if ((flags & JAVA_ACC_FINAL) != 0)
- res = res | FINAL
- if (((flags & JAVA_ACC_INTERFACE) != 0) &&
- ((flags & JAVA_ACC_ANNOTATION) == 0))
- res = res | TRAIT | INTERFACE | ABSTRACT
- if ((flags & JAVA_ACC_SYNTHETIC) != 0)
- res = res | SYNTHETIC
- if ((flags & JAVA_ACC_STATIC) != 0)
- res = res | STATIC
- if (isClass && ((res & DEFERRED) != 0L))
- res = res & ~DEFERRED | ABSTRACT
-
- res | JAVA
- }
-
- private def setPrivateWithin(sym: Symbol, jflags: Int) {
- if ((jflags & (JAVA_ACC_PRIVATE | JAVA_ACC_PROTECTED | JAVA_ACC_PUBLIC)) == 0)
- // See ticket #1687 for an example of when topLevelClass is NoSymbol: it
- // apparently occurs when processing v45.3 bytecode.
- if (sym.toplevelClass != NoSymbol)
- sym.privateWithin = sym.toplevelClass.owner
-
- // protected in java means package protected. #3946
- if ((jflags & JAVA_ACC_PROTECTED) != 0)
- if (sym.toplevelClass != NoSymbol)
- sym.privateWithin = sym.toplevelClass.owner
- }
+ if (isStatic(flags)) staticScope else instanceScope
- @inline final private def isPrivate(flags: Int) =
- (flags & JAVA_ACC_PRIVATE) != 0
- @inline final private def isStatic(flags: Int) =
- (flags & JAVA_ACC_STATIC) != 0
- @inline final private def hasAnnotation(flags: Int) =
- (flags & JAVA_ACC_ANNOTATION) != 0
+ private def isPrivate(flags: Int) = (flags & JAVA_ACC_PRIVATE) != 0
+ private def isStatic(flags: Int) = (flags & JAVA_ACC_STATIC) != 0
+ private def hasAnnotation(flags: Int) = (flags & JAVA_ACC_ANNOTATION) != 0
}
diff --git a/src/compiler/scala/tools/nsc/symtab/classfile/ICodeReader.scala b/src/compiler/scala/tools/nsc/symtab/classfile/ICodeReader.scala
index ac2cd9e..d0c540a 100644
--- a/src/compiler/scala/tools/nsc/symtab/classfile/ICodeReader.scala
+++ b/src/compiler/scala/tools/nsc/symtab/classfile/ICodeReader.scala
@@ -1,5 +1,5 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
+ * Copyright 2005-2013 LAMP/EPFL
* @author Iulian Dragos
*/
@@ -10,9 +10,8 @@ package classfile
import scala.collection.{ mutable, immutable }
import mutable.ListBuffer
import backend.icode._
-import io.AbstractFile
import ClassfileConstants._
-import Flags._
+import scala.reflect.internal.Flags._
/** ICode reader from Java bytecode.
*
@@ -26,10 +25,7 @@ abstract class ICodeReader extends ClassfileParser {
var instanceCode: IClass = null // the ICode class for the current symbol
var staticCode: IClass = null // the ICode class static members
- var method: IMethod = _ // the current IMethod
-
- val nothingName = newTermName(SCALA_NOTHING)
- val nullName = newTermName(SCALA_NULL)
+ var method: IMethod = NoIMethod // the current IMethod
var isScalaModule = false
/** Read back bytecode for the given class symbol. It returns
@@ -37,17 +33,16 @@ abstract class ICodeReader extends ClassfileParser {
* for non-static members.
*/
def readClass(cls: Symbol): (IClass, IClass) = {
- var classFile: AbstractFile = null;
- var sym = cls
- sym.info // ensure accurate type information
+ var classFile: io.AbstractFile = null;
+ cls.info // ensure accurate type information
isScalaModule = cls.isModule && !cls.isJavaDefined
- log("Reading class: " + cls + " isScalaModule?: " + isScalaModule)
- val name = cls.fullName('.') + (if (sym.hasFlag(MODULE)) "$" else "")
+ log("ICodeReader reading " + cls)
+ val name = cls.javaClassName
classPath.findSourceFile(name) match {
- case Some(classFile) => parse(classFile, sym)
- case _ => throw new MissingRequirementError("Could not find bytecode for " + cls)
+ case Some(classFile) => parse(classFile, cls)
+ case _ => MissingRequirementError.notFound("Could not find bytecode for " + cls)
}
(staticCode, instanceCode)
@@ -65,9 +60,8 @@ abstract class ICodeReader extends ClassfileParser {
this.staticCode = new IClass(staticModule)
val jflags = in.nextChar
val isAttribute = (jflags & JAVA_ACC_ANNOTATION) != 0
- var sflags = transFlags(jflags, true)
- if ((sflags & DEFERRED) != 0L) sflags = sflags & ~DEFERRED | ABSTRACT
- val c = pool.getClassSymbol(in.nextChar)
+ val sflags = toScalaClassFlags(jflags) // what, this is never used??
+ val c = pool getClassSymbol in.nextChar
parseInnerClasses()
@@ -83,38 +77,31 @@ abstract class ICodeReader extends ClassfileParser {
override def parseField() {
val (jflags, sym) = parseMember(true)
- getCode(jflags).addField(new IField(sym))
+ getCode(jflags) addField new IField(sym)
skipAttributes()
}
private def parseMember(field: Boolean): (Int, Symbol) = {
val jflags = in.nextChar
- val name = pool.getName(in.nextChar)
+ val name = pool getName in.nextChar
val owner = getOwner(jflags)
- val dummySym = owner.newMethod(owner.pos, name) setFlag javaToScalaFlags(jflags)
+ val dummySym = owner.newMethod(name, owner.pos, toScalaMethodFlags(jflags))
try {
- val ch = in.nextChar
- var tpe = pool.getType(dummySym, ch)
+ val ch = in.nextChar
+ val tpe = pool.getType(dummySym, ch)
if ("<clinit>" == name.toString)
(jflags, NoSymbol)
else {
val owner = getOwner(jflags)
- var sym = owner.info.findMember(name, 0, 0, false).suchThat(old => sameType(old.tpe, tpe));
+ var sym = owner.info.findMember(name, 0, 0, false).suchThat(old => sameType(old.tpe, tpe))
if (sym == NoSymbol)
- sym = owner.info.findMember(newTermName(name + nme.LOCAL_SUFFIX_STRING), 0, 0, false).suchThat(old => old.tpe =:= tpe);
+ sym = owner.info.findMember(newTermName(name + nme.LOCAL_SUFFIX_STRING), 0, 0, false).suchThat(_.tpe =:= tpe)
if (sym == NoSymbol) {
- log("Could not find symbol for " + name + ": " + tpe)
- log(owner.info.member(name).tpe + " : " + tpe)
- if (name.toString() == "toMap")
- tpe = pool.getType(dummySym, ch)
- if (field)
- sym = owner.newValue(owner.pos, name).setInfo(tpe).setFlag(MUTABLE | javaToScalaFlags(jflags))
- else
- sym = dummySym.setInfo(tpe)
- owner.info.decls.enter(sym)
- log("added " + sym + ": " + sym.tpe)
+ sym = if (field) owner.newValue(name, owner.pos, toScalaFieldFlags(jflags)) else dummySym
+ sym setInfoAndEnter tpe
+ log(s"ICodeReader could not locate ${name.decode} in $owner. Created ${sym.defString}.")
}
(jflags, sym)
}
@@ -124,20 +111,7 @@ abstract class ICodeReader extends ClassfileParser {
}
}
- private def javaToScalaFlags(flags: Int): Long = {
- import ch.epfl.lamp.fjbg.JAccessFlags._
-
- var res = 0L
- if ((flags & ACC_PRIVATE) != 0) res |= Flags.PRIVATE
- if ((flags & ACC_PROTECTED) != 0) res |= Flags.PROTECTED
- if ((flags & ACC_FINAL) != 0) res |= Flags.FINAL
- if ((flags & ACC_ABSTRACT) != 0) res |= Flags.DEFERRED
- if ((flags & ACC_SYNTHETIC) != 0) res |= Flags.SYNTHETIC
-
- res
- }
-
- /** Checks if tp1 is the same type as tp2, modulo implicit methods.
+ /** Checks if `tp1` is the same type as `tp2`, modulo implicit methods.
* We don't care about the distinction between implicit and explicit
* methods as this point, and we can't get back the information from
* bytecode anyway.
@@ -154,7 +128,7 @@ abstract class ICodeReader extends ClassfileParser {
var beginning = in.bp
try {
if (sym != NoSymbol) {
- this.method = new IMethod(sym);
+ this.method = new IMethod(sym)
this.method.returnType = toTypeKind(sym.tpe.resultType)
getCode(jflags).addMethod(this.method)
if ((jflags & JAVA_ACC_NATIVE) != 0)
@@ -162,13 +136,13 @@ abstract class ICodeReader extends ClassfileParser {
val attributeCount = in.nextChar
for (i <- 0 until attributeCount) parseAttribute()
} else {
- if (settings.debug.value) log("Skipping non-existent method.");
+ debuglog("Skipping non-existent method.");
skipAttributes();
}
} catch {
case e: MissingRequirementError =>
in.bp = beginning; skipAttributes
- if (settings.debug.value) log("Skipping non-existent method. " + e.msg);
+ debuglog("Skipping non-existent method. " + e.msg);
}
}
@@ -184,26 +158,23 @@ abstract class ICodeReader extends ClassfileParser {
}
override def classNameToSymbol(name: Name) = {
- val sym = if (name == nothingName)
+ val sym = if (name == fulltpnme.RuntimeNothing)
definitions.NothingClass
- else if (name == nullName)
+ else if (name == fulltpnme.RuntimeNull)
definitions.NullClass
else if (nme.isImplClassName(name)) {
- val iface = definitions.getClass(nme.interfaceName(name))
+ val iface = rootMirror.getClassByName(tpnme.interfaceName(name))
log("forcing " + iface.owner + " at phase: " + phase + " impl: " + iface.implClass)
iface.owner.info // force the mixin type-transformer
- definitions.getClass(name)
- } else if (name.endsWith("$")) {
- val sym = forceMangledName(name.subName(0, name.length -1).decode, true)
-// println("classNameToSymbol: " + name + " sym: " + sym)
- if (name.toString == "scala.collection.immutable.Stream$$hash$colon$colon$")
- print("")
- if (sym == NoSymbol)
- definitions.getModule(name.subName(0, name.length - 1))
- else sym
- } else {
+ rootMirror.getClassByName(name)
+ }
+ else if (nme.isModuleName(name)) {
+ val strippedName = nme.stripModuleSuffix(name)
+ forceMangledName(newTermName(strippedName.decode), true) orElse rootMirror.getModule(strippedName)
+ }
+ else {
forceMangledName(name, false)
- atPhase(currentRun.flattenPhase.next)(definitions.getClass(name))
+ afterFlatten(rootMirror.getClassByName(name.toTypeName))
}
if (sym.isModule)
sym.moduleClass
@@ -233,7 +204,7 @@ abstract class ICodeReader extends ClassfileParser {
/** Parse 16 bit jump target. */
def parseJumpTarget = {
- size = size + 2
+ size += 2
val offset = in.nextChar.toShort
val target = pc + offset
assert(target >= 0 && target < codeLength, "Illegal jump target: " + target)
@@ -252,22 +223,22 @@ abstract class ICodeReader extends ClassfileParser {
val instr = toUnsignedByte(in.nextByte)
instr match {
case JVM.nop => parseInstruction
- case JVM.aconst_null => code.emit(CONSTANT(Constant(null)))
- case JVM.iconst_m1 => code.emit(CONSTANT(Constant(-1)))
- case JVM.iconst_0 => code.emit(CONSTANT(Constant(0)))
- case JVM.iconst_1 => code.emit(CONSTANT(Constant(1)))
- case JVM.iconst_2 => code.emit(CONSTANT(Constant(2)))
- case JVM.iconst_3 => code.emit(CONSTANT(Constant(3)))
- case JVM.iconst_4 => code.emit(CONSTANT(Constant(4)))
- case JVM.iconst_5 => code.emit(CONSTANT(Constant(5)))
-
- case JVM.lconst_0 => code.emit(CONSTANT(Constant(0l)))
- case JVM.lconst_1 => code.emit(CONSTANT(Constant(1l)))
- case JVM.fconst_0 => code.emit(CONSTANT(Constant(0.0f)))
- case JVM.fconst_1 => code.emit(CONSTANT(Constant(1.0f)))
- case JVM.fconst_2 => code.emit(CONSTANT(Constant(2.0f)))
- case JVM.dconst_0 => code.emit(CONSTANT(Constant(0.0)))
- case JVM.dconst_1 => code.emit(CONSTANT(Constant(1.0)))
+ case JVM.aconst_null => code emit CONSTANT(Constant(null))
+ case JVM.iconst_m1 => code emit CONSTANT(Constant(-1))
+ case JVM.iconst_0 => code emit CONSTANT(Constant(0))
+ case JVM.iconst_1 => code emit CONSTANT(Constant(1))
+ case JVM.iconst_2 => code emit CONSTANT(Constant(2))
+ case JVM.iconst_3 => code emit CONSTANT(Constant(3))
+ case JVM.iconst_4 => code emit CONSTANT(Constant(4))
+ case JVM.iconst_5 => code emit CONSTANT(Constant(5))
+
+ case JVM.lconst_0 => code emit CONSTANT(Constant(0l))
+ case JVM.lconst_1 => code emit CONSTANT(Constant(1l))
+ case JVM.fconst_0 => code emit CONSTANT(Constant(0.0f))
+ case JVM.fconst_1 => code emit CONSTANT(Constant(1.0f))
+ case JVM.fconst_2 => code emit CONSTANT(Constant(2.0f))
+ case JVM.dconst_0 => code emit CONSTANT(Constant(0.0))
+ case JVM.dconst_1 => code emit CONSTANT(Constant(1.0))
case JVM.bipush => code.emit(CONSTANT(Constant(in.nextByte))); size += 1
case JVM.sipush => code.emit(CONSTANT(Constant(in.nextChar))); size += 2
@@ -458,7 +429,7 @@ abstract class ICodeReader extends ClassfileParser {
val padding = if ((pc + size) % 4 != 0) 4 - ((pc + size) % 4) else 0
size += padding
in.bp += padding
- assert((pc + size % 4) != 0)
+ assert((pc + size % 4) != 0, pc)
/* var byte1 = in.nextByte; size += 1;
while (byte1 == 0) { byte1 = in.nextByte; size += 1; }
val default = byte1 << 24 | in.nextByte << 16 | in.nextByte << 8 | in.nextByte;
@@ -478,7 +449,7 @@ abstract class ICodeReader extends ClassfileParser {
val padding = if ((pc + size) % 4 != 0) 4 - ((pc + size) % 4) else 0
size += padding
in.bp += padding
- assert((pc + size % 4) != 0)
+ assert((pc + size % 4) != 0, pc)
val default = pc + in.nextInt; size += 4
val npairs = in.nextInt; size += 4
var tags: List[List[Int]] = Nil
@@ -502,9 +473,9 @@ abstract class ICodeReader extends ClassfileParser {
case JVM.getstatic =>
val field = pool.getMemberSymbol(in.nextChar, true); size += 2
if (field.hasModuleFlag)
- code.emit(LOAD_MODULE(field))
+ code emit LOAD_MODULE(field)
else
- code.emit(LOAD_FIELD(field, true))
+ code emit LOAD_FIELD(field, true)
case JVM.putstatic =>
val field = pool.getMemberSymbol(in.nextChar, true); size += 2
code.emit(STORE_FIELD(field, true))
@@ -518,23 +489,35 @@ abstract class ICodeReader extends ClassfileParser {
case JVM.invokevirtual =>
val m = pool.getMemberSymbol(in.nextChar, false); size += 2
code.emit(CALL_METHOD(m, Dynamic))
+ method.updateRecursive(m)
case JVM.invokeinterface =>
val m = pool.getMemberSymbol(in.nextChar, false); size += 4
in.skip(2)
code.emit(CALL_METHOD(m, Dynamic))
+ // invokeinterface can't be recursive
case JVM.invokespecial =>
val m = pool.getMemberSymbol(in.nextChar, false); size += 2
val style = if (m.name == nme.CONSTRUCTOR || m.isPrivate) Static(true)
else SuperCall(m.owner.name);
code.emit(CALL_METHOD(m, style))
+ method.updateRecursive(m)
case JVM.invokestatic =>
val m = pool.getMemberSymbol(in.nextChar, true); size += 2
if (isBox(m))
code.emit(BOX(toTypeKind(m.info.paramTypes.head)))
else if (isUnbox(m))
code.emit(UNBOX(toTypeKind(m.info.resultType)))
- else
+ else {
code.emit(CALL_METHOD(m, Static(false)))
+ method.updateRecursive(m)
+ }
+ case JVM.invokedynamic =>
+ // TODO, this is just a place holder. A real implementation must parse the class constant entry
+ debuglog("Found JVM invokedynamic instructionm, inserting place holder ICode INVOKE_DYNAMIC.")
+ containsInvokeDynamic = true
+ val poolEntry = in.nextChar
+ in.skip(2)
+ code.emit(INVOKE_DYNAMIC(poolEntry))
case JVM.new_ =>
code.emit(NEW(REFERENCE(pool.getClassSymbol(in.nextChar))))
@@ -590,14 +573,14 @@ abstract class ICodeReader extends ClassfileParser {
case JVM.multianewarray =>
size += 3
- val tpe = toTypeKind(pool.getClassOrArrayType(in.nextChar))
+ val tpe = toTypeKind(pool getClassOrArrayType in.nextChar)
val dim = in.nextByte
// assert(dim == 1, "Cannot handle multidimensional arrays yet.")
- code.emit(CREATE_ARRAY(tpe, dim))
+ code emit CREATE_ARRAY(tpe, dim)
- case JVM.ifnull => code.emit(LCZJUMP(parseJumpTarget, pc + size, EQ, ObjectReference))
- case JVM.ifnonnull => code.emit(LCZJUMP(parseJumpTarget, pc + size, NE, ObjectReference))
- case JVM.goto_w => code.emit(LJUMP(parseJumpTargetW))
+ case JVM.ifnull => code emit LCZJUMP(parseJumpTarget, pc + size, EQ, ObjectReference)
+ case JVM.ifnonnull => code emit LCZJUMP(parseJumpTarget, pc + size, NE, ObjectReference)
+ case JVM.goto_w => code emit LJUMP(parseJumpTargetW)
case JVM.jsr_w => sys.error("Cannot handle jsr/ret")
// case _ => sys.error("Unknown bytecode")
@@ -618,6 +601,7 @@ abstract class ICodeReader extends ClassfileParser {
while (pc < codeLength) parseInstruction
val exceptionEntries = in.nextChar.toInt
+ code.containsEHs = (exceptionEntries != 0)
var i = 0
while (i < exceptionEntries) {
// skip start end PC
@@ -631,13 +615,15 @@ abstract class ICodeReader extends ClassfileParser {
skipAttributes()
code.toBasicBlock
- assert(method.code ne null)
+ assert(method.hasCode, method)
// reverse parameters, as they were prepended during code generation
method.params = method.params.reverse
- if (code.containsDUPX) {
- code.resolveDups
- }
- if (code.containsNEW) code.resolveNEWs
+
+ if (code.containsDUPX)
+ code.resolveDups()
+
+ if (code.containsNEW)
+ code.resolveNEWs()
}
/** Note: these methods are different from the methods of the same name found
@@ -647,11 +633,11 @@ abstract class ICodeReader extends ClassfileParser {
* such as Int.box(5).
*/
def isBox(m: Symbol): Boolean =
- (m.owner == definitions.BoxesRunTimeClass.moduleClass
+ (m.owner == definitions.BoxesRunTimeClass
&& m.name.startsWith("boxTo"))
def isUnbox(m: Symbol): Boolean =
- (m.owner == definitions.BoxesRunTimeClass.moduleClass
+ (m.owner == definitions.BoxesRunTimeClass
&& m.name.startsWith("unboxTo"))
/** Return the icode class that should include members with the given flags.
@@ -664,11 +650,13 @@ abstract class ICodeReader extends ClassfileParser {
class LinearCode {
var instrs: ListBuffer[(Int, Instruction)] = new ListBuffer
- var jmpTargets: mutable.Set[Int] = new mutable.HashSet[Int]
- var locals: mutable.Map[Int, List[(Local, TypeKind)]] = new mutable.HashMap()
+ var jmpTargets: mutable.Set[Int] = perRunCaches.newSet[Int]()
+ var locals: mutable.Map[Int, List[(Local, TypeKind)]] = perRunCaches.newMap()
var containsDUPX = false
var containsNEW = false
+ var containsEHs = false
+ var containsInvokeDynamic = false
def emit(i: Instruction) {
instrs += ((pc, i))
@@ -679,20 +667,22 @@ abstract class ICodeReader extends ClassfileParser {
}
/** Break this linear code in basic block representation
- * As a side effect, it sets the 'code' field of the current
+ * As a side effect, it sets the `code` field of the current
*/
def toBasicBlock: Code = {
import opcodes._
val code = new Code(method)
method.setCode(code)
+ method.bytecodeHasEHs = containsEHs
+ method.bytecodeHasInvokeDynamic = containsInvokeDynamic
var bb = code.startBlock
def makeBasicBlocks: mutable.Map[Int, BasicBlock] =
mutable.Map(jmpTargets.toSeq map (_ -> code.newBlock): _*)
val blocks = makeBasicBlocks
- var otherBlock: BasicBlock = null
+ var otherBlock: BasicBlock = NoBasicBlock
var disableJmpTarget = false
for ((pc, instr) <- instrs.iterator) {
@@ -726,13 +716,13 @@ abstract class ICodeReader extends ClassfileParser {
bb.emitOnly(SWITCH(tags, targets map blocks))
case RETURN(_) =>
- bb.emitOnly(instr)
+ bb emitOnly instr
case THROW(clasz) =>
- bb.emitOnly(instr)
+ bb emitOnly instr
case _ =>
- bb.emit(instr)
+ bb emit instr
}
}
@@ -747,21 +737,18 @@ abstract class ICodeReader extends ClassfileParser {
import analysis.typeFlowLattice.IState
/** Abstract interpretation for one instruction. */
- override def interpret(in: typeFlowLattice.Elem, i: Instruction): typeFlowLattice.Elem = {
- var out = IState(new VarBinding(in.vars), new TypeStack(in.stack));
- val bindings = out.vars;
- val stack = out.stack;
+ override def mutatingInterpret(out: typeFlowLattice.Elem, i: Instruction): typeFlowLattice.Elem = {
+ val bindings = out.vars
+ val stack = out.stack
import stack.push
i match {
case DUP_X1 =>
val (one, two) = stack.pop2
push(one); push(two); push(one);
- out = IState(bindings, stack)
case DUP_X2 =>
val (one, two, three) = stack.pop3
push(one); push(three); push(two); push(one);
- out = IState(bindings, stack)
case DUP2_X1 =>
val (one, two) = stack.pop2
@@ -771,7 +758,6 @@ abstract class ICodeReader extends ClassfileParser {
val three = stack.pop
push(two); push(one); push(three); push(two); push(one);
}
- out = IState(bindings, stack)
case DUP2_X2 =>
val (one, two) = stack.pop2
@@ -790,10 +776,9 @@ abstract class ICodeReader extends ClassfileParser {
push(two); push(one); push(four); push(one); push(three); push(two); push(one);
}
}
- out = IState(bindings, stack)
case _ =>
- out = super.interpret(in, i)
+ super.mutatingInterpret(out, i)
}
out
}
@@ -809,9 +794,9 @@ abstract class ICodeReader extends ClassfileParser {
case DUP_X1 =>
val one = info.stack.types(0)
val two = info.stack.types(1)
- assert(!one.isWideType, "DUP_X1 expects values of size 1 on top of stack " + info.stack);
- val tmp1 = freshLocal(one);
- val tmp2 = freshLocal(two);
+ assert(!one.isWideType, "DUP_X1 expects values of size 1 on top of stack " + info.stack)
+ val tmp1 = freshLocal(one)
+ val tmp2 = freshLocal(two)
bb.replaceInstruction(i, List(STORE_LOCAL(tmp1),
STORE_LOCAL(tmp2),
LOAD_LOCAL(tmp1),
@@ -821,9 +806,9 @@ abstract class ICodeReader extends ClassfileParser {
case DUP_X2 =>
val one = info.stack.types(0)
val two = info.stack.types(1)
- assert (!one.isWideType, "DUP_X2 expects values of size 1 on top of stack " + info.stack);
- val tmp1 = freshLocal(one);
- val tmp2 = freshLocal(two);
+ assert (!one.isWideType, "DUP_X2 expects values of size 1 on top of stack " + info.stack)
+ val tmp1 = freshLocal(one)
+ val tmp2 = freshLocal(two)
if (two.isWideType)
bb.replaceInstruction(i, List(STORE_LOCAL(tmp1),
STORE_LOCAL(tmp2),
@@ -854,7 +839,7 @@ abstract class ICodeReader extends ClassfileParser {
LOAD_LOCAL(tmp2),
LOAD_LOCAL(tmp1)));
} else {
- val tmp3 = freshLocal(info.stack.types(2));
+ val tmp3 = freshLocal(info.stack.types(2))
bb.replaceInstruction(i, List(STORE_LOCAL(tmp1),
STORE_LOCAL(tmp2),
STORE_LOCAL(tmp3),
@@ -867,8 +852,8 @@ abstract class ICodeReader extends ClassfileParser {
case DUP2_X2 =>
val one = info.stack.types(0)
val two = info.stack.types(1)
- val tmp1 = freshLocal(one);
- val tmp2 = freshLocal(two);
+ val tmp1 = freshLocal(one)
+ val tmp2 = freshLocal(two)
if (one.isWideType && two.isWideType) {
bb.replaceInstruction(i, List(STORE_LOCAL(tmp1),
STORE_LOCAL(tmp2),
@@ -924,38 +909,33 @@ abstract class ICodeReader extends ClassfileParser {
/** Recover def-use chains for NEW and initializers. */
def resolveNEWs() {
import opcodes._
-
val rdef = new reachingDefinitions.ReachingDefinitionsAnalysis
rdef.init(method)
rdef.run
- for (bb <- method.code.blocks) {
- var info = rdef.in(bb)
- for ((i, idx) <- bb.toList.zipWithIndex) i match {
- case CALL_METHOD(m, Static(true)) if m.isClassConstructor =>
- val defs = rdef.findDefs(bb, idx, 1, m.info.paramTypes.length)
- if (settings.debug.value) log("ctor: " + i + " found defs: " + defs)
- assert(defs.length == 1, "wrong defs at bb " + bb + "\n" + method.dump + rdef)
- val (bb1, idx1) = defs.head
- var producer = bb1(idx1)
- while (producer.isInstanceOf[DUP]) {
- val (bb2, idx2) = rdef.findDefs(bb1, idx1, 1).head
- producer = bb2(idx2)
- }
- producer match {
- case nw: NEW => nw.init = i.asInstanceOf[CALL_METHOD]
- case _: THIS => () // super constructor call
- case _ => assert(false, producer + "\n" + method.dump)
+ for (bb <- method.code.blocks ; (i, idx) <- bb.toList.zipWithIndex) i match {
+ case cm @ CALL_METHOD(m, Static(true)) if m.isClassConstructor =>
+ def loop(bb0: BasicBlock, idx0: Int, depth: Int = 0): Unit = {
+ rdef.findDefs(bb0, idx0, 1, depth) match {
+ case ((bb1, idx1)) :: _ =>
+ bb1(idx1) match {
+ case _: DUP => loop(bb1, idx1, 0)
+ case x: NEW => x.init = cm
+ case _: THIS => () // super constructor call
+ case producer => dumpMethodAndAbort(method, "producer: " + producer)
+ }
+ case _ => ()
}
- case _ =>
- }
- }
+ }
+ loop(bb, idx, m.info.paramTypes.length)
+ case _ => ()
+ }
}
/** Return the local at given index, with the given type. */
def getLocal(idx: Int, kind: TypeKind): Local = {
- assert(idx < maxLocals, "Index too large for local variable.");
+ assert(idx < maxLocals, "Index too large for local variable.")
def checkValidIndex() {
locals.get(idx - 1) match {
@@ -985,7 +965,7 @@ abstract class ICodeReader extends ClassfileParser {
case None =>
checkValidIndex
val l = freshLocal(idx, kind, false)
- log("Added new local for idx " + idx + ": " + kind)
+ debuglog("Added new local for idx " + idx + ": " + kind)
locals += (idx -> List((l, kind)))
l
}
@@ -996,7 +976,7 @@ abstract class ICodeReader extends ClassfileParser {
/** Return a fresh Local variable for the given index.
*/
private def freshLocal(idx: Int, kind: TypeKind, isArg: Boolean) = {
- val sym = method.symbol.newVariable(NoPosition, "loc" + idx).setInfo(kind.toType);
+ val sym = method.symbol.newVariable(newTermName("loc" + idx)).setInfo(kind.toType);
val l = new Local(sym, kind, isArg)
method.addLocal(l)
l
@@ -1013,9 +993,9 @@ abstract class ICodeReader extends ClassfileParser {
/** add a method param with the given index. */
def enterParam(idx: Int, kind: TypeKind) = {
- val sym = method.symbol.newVariable(NoPosition, "par" + idx).setInfo(kind.toType);
+ val sym = method.symbol.newVariable(newTermName("par" + idx)).setInfo(kind.toType)
val l = new Local(sym, kind, true)
- assert(!locals.isDefinedAt(idx))
+ assert(!locals.isDefinedAt(idx), locals(idx))
locals += (idx -> List((l, kind)))
l
}
@@ -1029,20 +1009,20 @@ abstract class ICodeReader extends ClassfileParser {
case class LJUMP(pc: Int) extends LazyJump(pc);
case class LCJUMP(success: Int, failure: Int, cond: TestOp, kind: TypeKind)
extends LazyJump(success) {
- override def toString(): String ="LCJUMP (" + kind + ") " + success + " : " + failure;
+ override def toString(): String = "LCJUMP (" + kind + ") " + success + " : " + failure
jmpTargets += failure
}
case class LCZJUMP(success: Int, failure: Int, cond: TestOp, kind: TypeKind)
extends LazyJump(success) {
- override def toString(): String ="LCZJUMP (" + kind + ") " + success + " : " + failure;
+ override def toString(): String = "LCZJUMP (" + kind + ") " + success + " : " + failure
jmpTargets += failure
}
case class LSWITCH(tags: List[List[Int]], targets: List[Int]) extends LazyJump(targets.head) {
- override def toString(): String ="LSWITCH (tags: " + tags + ") targets: " + targets;
+ override def toString(): String = "LSWITCH (tags: " + tags + ") targets: " + targets
jmpTargets ++= targets.tail
}
diff --git a/src/compiler/scala/tools/nsc/symtab/classfile/MetaParser.scala b/src/compiler/scala/tools/nsc/symtab/classfile/MetaParser.scala
deleted file mode 100644
index 728593a..0000000
--- a/src/compiler/scala/tools/nsc/symtab/classfile/MetaParser.scala
+++ /dev/null
@@ -1,166 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
- * @author Martin Odersky
- */
-
-package scala.tools.nsc
-package symtab
-package classfile
-
-import java.util.{StringTokenizer, NoSuchElementException}
-
-import scala.collection.mutable.ListBuffer
-
-abstract class MetaParser{
-
- val global: Global
- import global._
-
- private var scanner: StringTokenizer = _
- private var owner: Symbol = _
- private var ownertype: Type = _
- private var token: String = _
- private var locals: Scope = null
-
- def parse(meta: String, sym: Symbol, symtype: Type) {
- //System.out.println("parse meta for " + sym + ":" + meta + ", locals = " + locals);//DEBUG
- this.scanner = new StringTokenizer(meta, "()[], \t<;", true)
- this.owner = sym
- this.ownertype = symtype
- nextToken()
- if (token == "class") parseClass()
- else if (token == "method") parseMethod()
- else if (token == "field") parseField()
- else if (token == "constr") parseConstr()
- else owner.setInfo(symtype);
- }
-
- protected def nextToken() {
- try {
- do { token = scanner.nextToken().trim() } while (token.length() == 0)
- } catch {
- case ex: NoSuchElementException => token = ""
- }
- }
-
- protected def parseType(): Type = {
- val str = token
- nextToken()
- val sym = locals.lookup(newTypeName(str))
- if (sym != NoSymbol) sym.tpe
- else {
- val tp = definitions.getClass(str).tpe;
- if (token != "[") tp
- else {
- val args = new ListBuffer[Type];
- do {
- nextToken(); args += parseType();
- } while (token == ",");
- nextToken();
- appliedType(tp, args.toList)
- }
- }
- }
-
- protected def parseTypeParam(): Symbol = {
- val vflag =
- if (token == "+") { nextToken(); Flags.COVARIANT }
- else if (token == "-") { nextToken(); Flags.CONTRAVARIANT }
- else 0;
- assert(token startsWith "?", token)
- val sym = owner.newTypeParameter(NoPosition, newTypeName(token)).setFlag(vflag)
- nextToken()
- val lo =
- if (token == ">") { nextToken(); parseType() }
- else definitions.NothingClass.tpe
- val hi =
- if (token == "<") { nextToken(); parseType() }
- else definitions.AnyClass.tpe
- sym.setInfo(TypeBounds(lo, hi))
- locals enter sym;
- sym
- }
-
- protected def parseTypeParams(): List[Symbol] = {
- nextToken()
- val syms = new ListBuffer[Symbol]
- if (token != "]") {
- syms += parseTypeParam()
- while (token == ",") {
- nextToken(); syms += parseTypeParam();
- }
- }
- assert(token == "]")
- syms.toList
- }
-
- protected def parseParams(): List[Type] = {
- nextToken()
- val tps = new ListBuffer[Type]
- if (token != ")") {
- tps += parseType()
- while (token == ",") {
- nextToken(); tps += parseType()
- }
- }
- assert(token == ")")
- tps.toList
- }
-
- protected def parseClass() {
- locals = new Scope
- def parse(): Type = {
- nextToken()
- if (token == "[") {
- PolyType(parseTypeParams(), parse())
- } else if (token == "extends") {
- val tps = new ListBuffer[Type]
- do {
- nextToken(); tps += parseType()
- } while (token == "with");
- ownertype match {
- case ClassInfoType(parents, decls, clazz) =>
- ClassInfoType(tps.toList, decls, clazz)
- }
- } else ownertype
- }
- owner.setInfo(parse())
- assert(token == ";")
- }
-
- protected def parseMethod() {
- val globals = locals
- locals = if (locals eq null) new Scope else new Scope(locals)
- def parse(): Type = {
- nextToken();
- if (token == "[") PolyType(parseTypeParams(), parse())
- else if (token == "(") {
- val formals = parseParams()
- MethodType(owner.newSyntheticValueParams(formals), parse())
- }
- else parseType()
- }
- owner.setInfo(parse())
- locals = globals
- assert(token == ";")
- }
-
- protected def parseField() {
- nextToken()
- owner.setInfo(parseType())
- assert(token == ";")
- }
-
- protected def parseConstr() {
- def parse(): Type = {
- nextToken()
- if (token == "(") {
- val formals = parseParams()
- MethodType(owner.newSyntheticValueParams(formals), parse())
- }
- else owner.owner.tpe
- }
- owner.setInfo(parse())
- assert(token == ";")
- }
-}
diff --git a/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala b/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala
index 862e37e..ed7eb6d 100644
--- a/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala
+++ b/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala
@@ -1,5 +1,5 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
+ * Copyright 2005-2013 LAMP/EPFL
* @author Martin Odersky
*/
@@ -10,7 +10,7 @@ package classfile
import java.lang.Float.floatToIntBits
import java.lang.Double.doubleToLongBits
import scala.io.Codec
-import reflect.generic.{ PickleBuffer, PickleFormat }
+import scala.reflect.internal.pickling.{ PickleBuffer, PickleFormat }
import scala.collection.mutable.LinkedHashMap
import PickleFormat._
import Flags._
@@ -30,6 +30,8 @@ abstract class Pickler extends SubComponent {
val phaseName = "pickler"
+ currentRun
+
def newPhase(prev: Phase): StdPhase = new PicklePhase(prev)
class PicklePhase(prev: Phase) extends StdPhase(prev) {
@@ -37,7 +39,7 @@ abstract class Pickler extends SubComponent {
def pickle(tree: Tree) {
def add(sym: Symbol, pickle: Pickle) = {
if (currentRun.compiles(sym) && !currentRun.symData.contains(sym)) {
- if (settings.debug.value) log("pickling " + sym)
+ debuglog("pickling " + sym)
pickle putSymbol sym
currentRun.symData(sym) = pickle
}
@@ -57,12 +59,19 @@ abstract class Pickler extends SubComponent {
}
}
// If there are any erroneous types in the tree, then we will crash
- // when we pickle it: so let's report an erorr instead. We know next
+ // when we pickle it: so let's report an error instead. We know next
// to nothing about what happened, but our supposition is a lot better
// than "bad type: <error>" in terms of explanatory power.
- for (t <- unit.body ; if t.isErroneous) {
- unit.error(t.pos, "erroneous or inaccessible type")
- return
+ for (t <- unit.body) {
+ if (t.isErroneous) {
+ unit.error(t.pos, "erroneous or inaccessible type")
+ return
+ }
+
+ if (!t.isDef && t.hasSymbol && t.symbol.isTermMacro) {
+ unit.error(t.pos, "macro has not been expanded")
+ return
+ }
}
pickle(unit.body)
@@ -75,7 +84,7 @@ abstract class Pickler extends SubComponent {
private var entries = new Array[AnyRef](256)
private var ep = 0
private val index = new LinkedHashMap[AnyRef, Int]
- private lazy val nonClassRoot = root.ownersIterator.find(! _.isClass) getOrElse NoSymbol
+ private lazy val nonClassRoot = findOrElse(root.ownersIterator)(!_.isClass)(NoSymbol)
private def isRootSym(sym: Symbol) =
sym.name.toTermName == rootName && sym.owner == rootOwner
@@ -104,10 +113,6 @@ abstract class Pickler extends SubComponent {
sym.isParameter ||
isLocal(sym.owner))
- private def staticAnnotations(annots: List[AnnotationInfo]) =
- annots filter(ann =>
- ann.atp.typeSymbol isNonBottomSubClass definitions.StaticAnnotationClass)
-
// Phase 1 methods: Populate entries/index ------------------------------------
/** Store entry e in index at next available position unless
@@ -144,19 +149,17 @@ abstract class Pickler extends SubComponent {
putType(sym.typeOfThis);
putSymbol(sym.alias)
if (!sym.children.isEmpty) {
- val (locals, globals) = sym.children.toList.partition(_.isLocalClass)
+ val (locals, globals) = sym.children partition (_.isLocalClass)
val children =
if (locals.isEmpty) globals
- else {
- val localChildDummy = sym.newClass(sym.pos, tpnme.LOCAL_CHILD)
- localChildDummy.setInfo(ClassInfoType(List(sym.tpe), EmptyScope, localChildDummy))
- localChildDummy :: globals
- }
- putChildren(sym, children sortBy (_.sealedSortName))
+ else globals + sym.newClassWithInfo(tpnme.LOCAL_CHILD, List(sym.tpe), EmptyScope, pos = sym.pos)
+
+ putChildren(sym, children.toList sortBy (_.sealedSortName))
}
- for (annot <- staticAnnotations(sym.annotations.reverse))
+ for (annot <- (sym.annotations filter (ann => ann.isStatic && !ann.isErroneous)).reverse)
putAnnotation(sym, annot)
- } else if (sym != NoSymbol) {
+ }
+ else if (sym != NoSymbol) {
putEntry(if (sym.isModuleClass) sym.name.toTermName else sym.name)
if (!sym.owner.isRoot) putSymbol(sym.owner)
}
@@ -191,7 +194,7 @@ abstract class Pickler extends SubComponent {
case RefinedType(parents, decls) =>
val rclazz = tp.typeSymbol
for (m <- decls.iterator)
- if (m.owner != rclazz) assert(false, "bad refinement member "+m+" of "+tp+", owner = "+m.owner)
+ if (m.owner != rclazz) abort("bad refinement member "+m+" of "+tp+", owner = "+m.owner)
putSymbol(rclazz); putTypes(parents); putSymbols(decls.toList)
case ClassInfoType(parents, decls, clazz) =>
putSymbol(clazz); putTypes(parents); putSymbols(decls.toList)
@@ -218,7 +221,7 @@ abstract class Pickler extends SubComponent {
case AnnotatedType(annotations, underlying, selfsym) =>
putType(underlying)
if (settings.selfInAnnots.value) putSymbol(selfsym)
- putAnnotations(staticAnnotations(annotations))
+ putAnnotations(annotations filter (_.isStatic))
case _ =>
throw new FatalError("bad type: " + tp + "(" + tp.getClass + ")")
}
@@ -286,7 +289,6 @@ abstract class Pickler extends SubComponent {
putTree(definition)
*/
case Template(parents, self, body) =>
- writeNat(parents.length)
putTrees(parents)
putTree(self)
putTrees(body)
@@ -420,7 +422,7 @@ abstract class Pickler extends SubComponent {
* argument of some Annotation */
private def putMods(mods: Modifiers) = if (putEntry(mods)) {
// annotations in Modifiers are removed by the typechecker
- val Modifiers(flags, privateWithin, Nil, _) = mods
+ val Modifiers(flags, privateWithin, Nil) = mods
putEntry(privateWithin)
}
@@ -429,7 +431,7 @@ abstract class Pickler extends SubComponent {
private def putConstant(c: Constant) {
if (putEntry(c)) {
if (c.tag == StringTag) putEntry(newTermName(c.stringValue))
- else if (c.tag == ClassTag) putType(c.typeValue)
+ else if (c.tag == ClazzTag) putType(c.typeValue)
else if (c.tag == EnumTag) putSymbol(c.symbolValue)
}
}
@@ -500,7 +502,7 @@ abstract class Pickler extends SubComponent {
private def writeSymInfo(sym: Symbol) {
writeRef(sym.name)
writeRef(localizedOwner(sym))
- writeLongNat((rawFlagsToPickled(sym.flags & PickledFlags)))
+ writeLongNat((rawToPickledFlags(sym.flags & PickledFlags)))
if (sym.hasAccessBoundary) writeRef(sym.privateWithin)
writeRef(sym.info)
}
@@ -509,7 +511,7 @@ abstract class Pickler extends SubComponent {
private def writeName(name: Name) {
ensureCapacity(name.length * 3)
val utfBytes = Codec toUTF8 name.toString
- compat.Platform.arraycopy(utfBytes, 0, bytes, writeIndex, utfBytes.length)
+ scala.compat.Platform.arraycopy(utfBytes, 0, bytes, writeIndex, utfBytes.length)
writeIndex += utfBytes.length
}
@@ -610,21 +612,19 @@ abstract class Pickler extends SubComponent {
else if (c.tag == FloatTag) writeLong(floatToIntBits(c.floatValue))
else if (c.tag == DoubleTag) writeLong(doubleToLongBits(c.doubleValue))
else if (c.tag == StringTag) writeRef(newTermName(c.stringValue))
- else if (c.tag == ClassTag) writeRef(c.typeValue)
+ else if (c.tag == ClazzTag) writeRef(c.typeValue)
else if (c.tag == EnumTag) writeRef(c.symbolValue)
LITERAL + c.tag // also treats UnitTag, NullTag; no value required
case AnnotatedType(annotations, tp, selfsym) =>
- val staticAnnots = staticAnnotations(annotations)
- if (staticAnnots isEmpty) {
- writeBody(tp) // write the underlying type if there are no annotations
- } else {
- if (settings.selfInAnnots.value && selfsym != NoSymbol)
- writeRef(selfsym)
- writeRef(tp)
- writeRefs(staticAnnots)
- ANNOTATEDtpe
+ annotations filter (_.isStatic) match {
+ case Nil => writeBody(tp) // write the underlying type if there are no annotations
+ case staticAnnots =>
+ if (settings.selfInAnnots.value && selfsym != NoSymbol)
+ writeRef(selfsym)
+ writeRef(tp)
+ writeRefs(staticAnnots)
+ ANNOTATEDtpe
}
-
// annotations attached to a symbol (i.e. annots on terms)
case (target: Symbol, annot at AnnotationInfo(_, _, _)) =>
writeRef(target)
@@ -964,8 +964,8 @@ abstract class Pickler extends SubComponent {
writeRefs(whereClauses)
TREE
- case Modifiers(flags, privateWithin, _, _) =>
- val pflags = rawFlagsToPickled(flags)
+ case Modifiers(flags, privateWithin, _) =>
+ val pflags = rawToPickledFlags(flags)
writeNat((pflags >> 32).toInt)
writeNat((pflags & 0xFFFFFFFF).toInt)
writeRef(privateWithin)
@@ -1065,7 +1065,7 @@ abstract class Pickler extends SubComponent {
else if (c.tag == FloatTag) print("Float "+c.floatValue)
else if (c.tag == DoubleTag) print("Double "+c.doubleValue)
else if (c.tag == StringTag) { print("String "); printRef(newTermName(c.stringValue)) }
- else if (c.tag == ClassTag) { print("Class "); printRef(c.typeValue) }
+ else if (c.tag == ClazzTag) { print("Class "); printRef(c.typeValue) }
else if (c.tag == EnumTag) { print("Enum "); printRef(c.symbolValue) }
case AnnotatedType(annots, tp, selfsym) =>
if (settings.selfInAnnots.value) {
diff --git a/src/compiler/scala/tools/nsc/symtab/classfile/UnPickler.scala b/src/compiler/scala/tools/nsc/symtab/classfile/UnPickler.scala
deleted file mode 100644
index e7a3d37..0000000
--- a/src/compiler/scala/tools/nsc/symtab/classfile/UnPickler.scala
+++ /dev/null
@@ -1,103 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
- * @author Martin Odersky
- */
-
-package scala.tools.nsc
-package symtab
-package classfile
-
-import Flags._
-import scala.reflect.generic.PickleFormat._
-import java.io.IOException
-
-/** @author Martin Odersky
- * @version 1.0
- */
-abstract class UnPickler extends reflect.generic.UnPickler {
- val global: Global
- import global._
-
- def scan(bytes: Array[Byte], offset: Int, classRoot: Symbol, moduleRoot: Symbol, filename: String) =
- new CompileScan(bytes, offset, classRoot, moduleRoot, filename).run()
-
- class CompileScan(bytes: Array[Byte], offset: Int, classRoot: Symbol, moduleRoot: Symbol, filename: String)
- extends Scan(bytes, offset, classRoot, moduleRoot, filename) {
-
- protected override def debug = settings.debug.value
-
- // override def noSuchTypeTag(tag: Int, end: Int): Type = {
- // tag match {
- // case DEBRUIJNINDEXtpe =>
- // DeBruijnIndex(readNat(), readNat())
- // case _ =>
- // super.noSuchTypeTag(tag, end)
- // }
- // }
-
- override protected def errorMissingRequirement(name: Name, owner: Symbol) =
- errorMissingRequirement(
- "reference " + (if (name.isTypeName) "type " else "value ") +
- name.decode + " of " + owner.tpe.widen + " refers to nonexisting symbol.")
-
- def inferMethodAlternative(fun: Tree, argtpes: List[Type], restpe: Type) =
- typer.infer.inferMethodAlternative(fun, List(), argtpes, restpe)
-
- def newLazyTypeRef(i: Int): LazyType = new LazyTypeRef(i)
- def newLazyTypeRefAndAlias(i: Int, j: Int): LazyType = new LazyTypeRefAndAlias(i, j)
-
- /** Convert to a type error, that is printed gracefully instead of crashing.
- *
- * Similar in intent to what SymbolLoader does (but here we don't have access to
- * error reporting, so we rely on the typechecker to report the error).
- */
- def toTypeError(e: MissingRequirementError) =
- new TypeError(e.msg)
-
- /** Convert to a type error, that is printed gracefully instead of crashing.
- *
- * Similar in intent to what SymbolLoader does (but here we don't have access to
- * error reporting, so we rely on the typechecker to report the error).
- *
- * @note Unlike 2.10, 2.9 may throw either IOException or MissingRequirementError. This
- * simply tries to make it more robust.
- */
- def toTypeError(e: IOException) =
- new TypeError(e.getMessage)
-
- /** A lazy type which when completed returns type at index `i`. */
- private class LazyTypeRef(i: Int) extends LazyType {
- private val definedAtRunId = currentRunId
- private val p = phase
- override def complete(sym: Symbol) : Unit = try {
- val tp = at(i, () => readType(sym.isTerm)) // after NMT_TRANSITION, revert `() => readType(sym.isTerm)` to `readType`
- if (p != phase) atPhase(p) (sym setInfo tp)
- else sym setInfo tp
- if (currentRunId != definedAtRunId) sym.setInfo(adaptToNewRunMap(tp))
- } catch {
- case e: MissingRequirementError => throw toTypeError(e)
- case e: IOException => throw toTypeError(e)
- }
- override def load(sym: Symbol) { complete(sym) }
- }
-
- /** A lazy type which when completed returns type at index `i` and sets alias
- * of completed symbol to symbol at index `j`.
- */
- private class LazyTypeRefAndAlias(i: Int, j: Int) extends LazyTypeRef(i) {
- override def complete(sym: Symbol) = try {
- super.complete(sym)
- var alias = at(j, readSymbol)
- if (alias.isOverloaded) {
- atPhase(currentRun.picklerPhase) {
- alias = alias suchThat (alt => sym.tpe =:= sym.owner.thisType.memberType(alt))
- }
- }
- sym.asInstanceOf[TermSymbol].setAlias(alias)
- } catch {
- case e: MissingRequirementError => throw toTypeError(e)
- case e: IOException => throw toTypeError(e)
- }
- }
- }
-}
diff --git a/src/compiler/scala/tools/nsc/symtab/classfile/package.scala b/src/compiler/scala/tools/nsc/symtab/classfile/package.scala
new file mode 100644
index 0000000..1f9a823
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/symtab/classfile/package.scala
@@ -0,0 +1,7 @@
+package scala.tools.nsc.symtab
+
+package object classfile {
+
+ val ClassfileConstants = scala.reflect.internal.ClassfileConstants
+
+}
diff --git a/src/compiler/scala/tools/nsc/symtab/clr/CLRTypes.scala b/src/compiler/scala/tools/nsc/symtab/clr/CLRTypes.scala
index cf1f7f1..40189b9 100644
--- a/src/compiler/scala/tools/nsc/symtab/clr/CLRTypes.scala
+++ b/src/compiler/scala/tools/nsc/symtab/clr/CLRTypes.scala
@@ -1,5 +1,5 @@
/* NSC -- new scala compiler
- * Copyright 2004-2011 LAMP/EPFL
+ * Copyright 2004-2013 LAMP/EPFL
*/
@@ -10,11 +10,9 @@ package clr
import java.io.File
import java.util.{Comparator, StringTokenizer}
import scala.util.Sorting
-
import ch.epfl.lamp.compiler.msil._
-
-import scala.collection.mutable.{ListBuffer, Map, HashMap, Set, HashSet}
-import scala.tools.nsc.util.{Position, NoPosition}
+import scala.collection.{ mutable, immutable }
+import scala.reflect.internal.util.{Position, NoPosition}
/**
* Collects all types from all reference assemblies.
@@ -56,13 +54,13 @@ abstract class CLRTypes {
var DELEGATE_COMBINE: MethodInfo = _
var DELEGATE_REMOVE: MethodInfo = _
- val types: Map[Symbol,Type] = new HashMap
- val constructors: Map[Symbol,ConstructorInfo] = new HashMap
- val methods: Map[Symbol,MethodInfo] = new HashMap
- val fields: Map[Symbol, FieldInfo] = new HashMap
- val sym2type: Map[Type,Symbol] = new HashMap
- val addressOfViews: HashSet[Symbol] = new HashSet[Symbol]
- val mdgptrcls4clssym: Map[ /*cls*/ Symbol, /*cls*/ Symbol] = new HashMap
+ val types: mutable.Map[Symbol,Type] = new mutable.HashMap
+ val constructors: mutable.Map[Symbol,ConstructorInfo] = new mutable.HashMap
+ val methods: mutable.Map[Symbol,MethodInfo] = new mutable.HashMap
+ val fields: mutable.Map[Symbol, FieldInfo] = new mutable.HashMap
+ val sym2type: mutable.Map[Type,Symbol] = new mutable.HashMap
+ val addressOfViews = new mutable.HashSet[Symbol]
+ val mdgptrcls4clssym: mutable.Map[ /*cls*/ Symbol, /*cls*/ Symbol] = new mutable.HashMap
def isAddressOf(msym : Symbol) = addressOfViews.contains(msym)
diff --git a/src/compiler/scala/tools/nsc/symtab/clr/TypeParser.scala b/src/compiler/scala/tools/nsc/symtab/clr/TypeParser.scala
index 37b81a8..5a0253c 100644
--- a/src/compiler/scala/tools/nsc/symtab/clr/TypeParser.scala
+++ b/src/compiler/scala/tools/nsc/symtab/clr/TypeParser.scala
@@ -1,19 +1,18 @@
/* NSC -- new scala compiler
- * Copyright 2004-2011 LAMP/EPFL
+ * Copyright 2004-2013 LAMP/EPFL
*/
-
package scala.tools.nsc
package symtab
package clr
import java.io.IOException
-
+import io.MsilFile
import ch.epfl.lamp.compiler.msil.{Type => MSILType, Attribute => MSILAttribute, _}
-
-import scala.collection.mutable.{HashMap, HashSet}
-import classfile.UnPickler
+import scala.collection.{ mutable, immutable }
+import scala.reflect.internal.pickling.UnPickler
import ch.epfl.lamp.compiler.msil.Type.TMVarUsage
+import scala.language.implicitConversions
/**
* @author Nikolay Mihaylov
@@ -65,11 +64,11 @@ abstract class TypeParser {
busy = false
}
- class TypeParamsType(override val typeParams: List[Symbol]) extends LazyType {
+ class TypeParamsType(override val typeParams: List[Symbol]) extends LazyType with FlagAgnosticCompleter {
override def complete(sym: Symbol) { throw new AssertionError("cyclic type dereferencing") }
}
- /* the names `classTParams' and `newTParams' stem from the forJVM version (ClassfileParser.sigToType())
+ /* the names `classTParams` and `newTParams` stem from the forJVM version (ClassfileParser.sigToType())
* but there are differences that should be kept in mind.
* forMSIL, a nested class knows nothing about any type-params in the nesting class,
* therefore newTParams is redundant (other than for recording lexical order),
@@ -108,7 +107,7 @@ abstract class TypeParser {
val method = new ConstructorInfo(declType, attrs, Array[MSILType]())
val flags = Flags.JAVA
val owner = clazz
- val methodSym = owner.newMethod(NoPosition, nme.CONSTRUCTOR).setFlag(flags)
+ val methodSym = owner.newMethod(nme.CONSTRUCTOR, NoPosition, flags)
val rettype = clazz.tpe
val mtype = methodType(Array[MSILType](), rettype);
val mInfo = mtype(methodSym)
@@ -155,8 +154,8 @@ abstract class TypeParser {
val canBeTakenAddressOf = (typ.IsValueType || typ.IsEnum) && (typ.FullName != "System.Enum")
if(canBeTakenAddressOf) {
- clazzBoxed = clazz.owner.newClass(clazz.name.toTypeName append "Boxed")
- clazzMgdPtr = clazz.owner.newClass(clazz.name.toTypeName append "MgdPtr")
+ clazzBoxed = clazz.owner.newClass(clazz.name.toTypeName append newTypeName("Boxed"))
+ clazzMgdPtr = clazz.owner.newClass(clazz.name.toTypeName append newTypeName("MgdPtr"))
clrTypes.mdgptrcls4clssym(clazz) = clazzMgdPtr
/* adding typMgdPtr to clrTypes.sym2type should happen early (before metadata for supertypes is parsed,
before metadata for members are parsed) so that clazzMgdPtr can be found by getClRType. */
@@ -165,7 +164,7 @@ abstract class TypeParser {
clrTypes.sym2type(typMgdPtr) = clazzMgdPtr
/* clazzMgdPtr but not clazzBoxed is mapped by clrTypes.types into an msil.Type instance,
because there's no metadata-level representation for a "boxed valuetype" */
- val instanceDefsMgdPtr = new Scope
+ val instanceDefsMgdPtr = newScope
val classInfoMgdPtr = ClassInfoType(definitions.anyvalparam, instanceDefsMgdPtr, clazzMgdPtr)
clazzMgdPtr.setFlag(flags)
clazzMgdPtr.setInfo(classInfoMgdPtr)
@@ -175,7 +174,7 @@ abstract class TypeParser {
// first pass
for (tvarCILDef <- typ.getSortedTVars() ) {
val tpname = newTypeName(tvarCILDef.Name.replaceAll("!", "")) // TODO are really all type-params named in all assemblies out there? (NO)
- val tpsym = clazz.newTypeParameter(NoPosition, tpname)
+ val tpsym = clazz.newTypeParameter(tpname)
classTParams.put(tvarCILDef.Number, tpsym)
newTParams += tpsym
// TODO wouldn't the following also be needed later, i.e. during getCLRType
@@ -196,8 +195,8 @@ abstract class TypeParser {
}
}
/* END CLR generics (snippet 2) */
- instanceDefs = new Scope
- staticDefs = new Scope
+ instanceDefs = newScope
+ staticDefs = newScope
val classInfoAsInMetadata = {
val ifaces: Array[MSILType] = typ.getInterfaces()
@@ -212,7 +211,7 @@ abstract class TypeParser {
}
// methods, properties, events, fields are entered in a moment
if (canBeTakenAddressOf) {
- val instanceDefsBoxed = new Scope
+ val instanceDefsBoxed = newScope
ClassInfoType(parents.toList, instanceDefsBoxed, clazzBoxed)
} else
ClassInfoType(parents.toList, instanceDefs, clazz)
@@ -224,14 +223,14 @@ abstract class TypeParser {
if (canBeTakenAddressOf) {
clazzBoxed.setInfo( if (ownTypeParams.isEmpty) classInfoAsInMetadata
- else polyType(ownTypeParams, classInfoAsInMetadata) )
+ else genPolyType(ownTypeParams, classInfoAsInMetadata) )
clazzBoxed.setFlag(flags)
val rawValueInfoType = ClassInfoType(definitions.anyvalparam, instanceDefs, clazz)
clazz.setInfo( if (ownTypeParams.isEmpty) rawValueInfoType
- else polyType(ownTypeParams, rawValueInfoType) )
+ else genPolyType(ownTypeParams, rawValueInfoType) )
} else {
clazz.setInfo( if (ownTypeParams.isEmpty) classInfoAsInMetadata
- else polyType(ownTypeParams, classInfoAsInMetadata) )
+ else genPolyType(ownTypeParams, classInfoAsInMetadata) )
}
// TODO I don't remember if statics.setInfo and staticModule.setInfo should also know about type params
@@ -258,9 +257,9 @@ abstract class TypeParser {
for (ntype <- typ.getNestedTypes() if !(ntype.IsNestedPrivate || ntype.IsNestedAssembly || ntype.IsNestedFamANDAssem)
|| ntype.IsInterface /* TODO why shouldn't nested ifaces be type-parsed too? */ )
{
- val loader = new loaders.MSILTypeLoader(ntype)
- val nclazz = statics.newClass(NoPosition, ntype.Name.toTypeName)
- val nmodule = statics.newModule(NoPosition, ntype.Name)
+ val loader = new loaders.MsilFileLoader(new MsilFile(ntype))
+ val nclazz = statics.newClass(ntype.Name)
+ val nmodule = statics.newModule(ntype.Name)
nclazz.setInfo(loader)
nmodule.setInfo(loader)
staticDefs.enter(nclazz)
@@ -284,7 +283,7 @@ abstract class TypeParser {
else
getCLRType(field.FieldType)
val owner = if (field.IsStatic()) statics else clazz;
- val sym = owner.newValue(NoPosition, name).setFlag(flags).setInfo(fieldType);
+ val sym = owner.newValue(name, NoPosition, flags).setInfo(fieldType);
// TODO: set private within!!! -> look at typechecker/Namers.scala
(if (field.IsStatic()) staticDefs else instanceDefs).enter(sym);
clrTypes.fields(sym) = field;
@@ -295,7 +294,7 @@ abstract class TypeParser {
createMethod(constr);
// initially also contains getters and setters of properties.
- val methodsSet = new HashSet[MethodInfo]();
+ val methodsSet = new mutable.HashSet[MethodInfo]();
methodsSet ++= typ.getMethods();
for (prop <- typ.getProperties) {
@@ -310,10 +309,10 @@ abstract class TypeParser {
assert(prop.PropertyType == getter.ReturnType);
val gparams: Array[ParameterInfo] = getter.GetParameters();
gparamsLength = gparams.length;
- val name: Name = if (gparamsLength == 0) prop.Name else nme.apply;
+ val name: TermName = if (gparamsLength == 0) prop.Name else nme.apply;
val flags = translateAttributes(getter);
val owner: Symbol = if (getter.IsStatic) statics else clazz;
- val methodSym = owner.newMethod(NoPosition, name).setFlag(flags)
+ val methodSym = owner.newMethod(name, NoPosition, flags)
val mtype: Type = if (gparamsLength == 0) NullaryMethodType(propType) // .NET properties can't be polymorphic
else methodType(getter, getter.ReturnType)(methodSym)
methodSym.setInfo(mtype);
@@ -332,12 +331,12 @@ abstract class TypeParser {
if(getter != null)
assert(sparams.length == gparamsLength + 1, "" + getter + "; " + setter);
- val name: Name = if (gparamsLength == 0) nme.getterToSetter(prop.Name)
+ val name: TermName = if (gparamsLength == 0) nme.getterToSetter(prop.Name)
else nme.update;
val flags = translateAttributes(setter);
val mtype = methodType(setter, definitions.UnitClass.tpe);
val owner: Symbol = if (setter.IsStatic) statics else clazz;
- val methodSym = owner.newMethod(NoPosition, name).setFlag(flags)
+ val methodSym = owner.newMethod(name, NoPosition, flags)
methodSym.setInfo(mtype(methodSym))
methodSym.setFlag(Flags.ACCESSOR);
(if (setter.IsStatic) staticDefs else instanceDefs).enter(methodSym);
@@ -424,14 +423,14 @@ abstract class TypeParser {
val flags = Flags.JAVA | Flags.FINAL
for (cmpName <- ENUM_CMP_NAMES) {
- val enumCmp = clazz.newMethod(NoPosition, cmpName)
+ val enumCmp = clazz.newMethod(cmpName)
val enumCmpType = JavaMethodType(enumCmp.newSyntheticValueParams(List(clazz.tpe)), definitions.BooleanClass.tpe)
enumCmp.setFlag(flags).setInfo(enumCmpType)
instanceDefs.enter(enumCmp)
}
for (bitLogName <- ENUM_BIT_LOG_NAMES) {
- val enumBitLog = clazz.newMethod(NoPosition, bitLogName)
+ val enumBitLog = clazz.newMethod(bitLogName)
val enumBitLogType = JavaMethodType(enumBitLog.newSyntheticValueParams(List(clazz.tpe)), clazz.tpe /* was classInfo, infinite typer */)
enumBitLog.setFlag(flags).setInfo(enumBitLogType)
instanceDefs.enter(enumBitLog)
@@ -449,7 +448,7 @@ abstract class TypeParser {
// first pass
for (mvarCILDef <- method.getSortedMVars() ) {
val mtpname = newTypeName(mvarCILDef.Name.replaceAll("!", "")) // TODO are really all method-level-type-params named in all assemblies out there? (NO)
- val mtpsym = methodSym.newTypeParameter(NoPosition, mtpname)
+ val mtpsym = methodSym.newTypeParameter(mtpname)
methodTParams.put(mvarCILDef.Number, mtpsym)
newMethodTParams += mtpsym
// TODO wouldn't the following also be needed later, i.e. during getCLRType
@@ -469,7 +468,7 @@ abstract class TypeParser {
val flags = translateAttributes(method);
val owner = if (method.IsStatic()) statics else clazz;
- val methodSym = owner.newMethod(NoPosition, getName(method)).setFlag(flags)
+ val methodSym = owner.newMethod(getName(method), NoPosition, flags)
/* START CLR generics (snippet 3) */
val newMethodTParams = populateMethodTParams(method, methodSym)
/* END CLR generics (snippet 3) */
@@ -480,7 +479,7 @@ abstract class TypeParser {
val mtype = methodType(method, rettype);
if (mtype == null) return;
/* START CLR generics (snippet 4) */
- val mInfo = if (method.IsGeneric) polyType(newMethodTParams, mtype(methodSym))
+ val mInfo = if (method.IsGeneric) genPolyType(newMethodTParams, mtype(methodSym))
else mtype(methodSym)
/* END CLR generics (snippet 4) */
/* START CLR non-generics (snippet 4)
@@ -493,14 +492,14 @@ abstract class TypeParser {
else clrTypes.methods(methodSym) = method.asInstanceOf[MethodInfo];
}
- private def createMethod(name: Name, flags: Long, args: Array[MSILType], retType: MSILType, method: MethodInfo, statik: Boolean): Symbol = {
+ private def createMethod(name: TermName, flags: Long, args: Array[MSILType], retType: MSILType, method: MethodInfo, statik: Boolean): Symbol = {
val mtype = methodType(args, getCLSType(retType))
assert(mtype != null)
createMethod(name, flags, mtype, method, statik)
}
- private def createMethod(name: Name, flags: Long, mtype: Symbol => Type, method: MethodInfo, statik: Boolean): Symbol = {
- val methodSym: Symbol = (if (statik) statics else clazz).newMethod(NoPosition, name)
+ private def createMethod(name: TermName, flags: Long, mtype: Symbol => Type, method: MethodInfo, statik: Boolean): Symbol = {
+ val methodSym: Symbol = (if (statik) statics else clazz).newMethod(name)
methodSym.setFlag(flags).setInfo(mtype(methodSym))
(if (statik) staticDefs else instanceDefs).enter(methodSym)
if (method != null)
@@ -540,7 +539,7 @@ abstract class TypeParser {
s = createMethod(nme.MINUS, flags, args, typ, clrTypes.DELEGATE_REMOVE, false);
}
- private def getName(method: MethodBase): Name = {
+ private def getName(method: MethodBase): TermName = {
def operatorOverload(name : String, paramsArity : Int) : Option[Name] = paramsArity match {
case 1 => name match {
@@ -652,7 +651,7 @@ abstract class TypeParser {
private def getClassType(typ: MSILType): Type = {
assert(typ != null);
- val res = definitions.getClass(typ.FullName.replace('+', '.')).tpe;
+ val res = rootMirror.getClassByName(typ.FullName.replace('+', '.') : TypeName).tpe;
//if (res.isError())
// global.reporter.error("unknown class reference " + type.FullName);
res
diff --git a/src/compiler/scala/tools/nsc/symtab/package.scala b/src/compiler/scala/tools/nsc/symtab/package.scala
new file mode 100644
index 0000000..0e6719f
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/symtab/package.scala
@@ -0,0 +1,7 @@
+package scala.tools.nsc
+
+package object symtab {
+
+ val Flags = scala.reflect.internal.Flags
+
+}
diff --git a/src/compiler/scala/tools/nsc/transform/AddInterfaces.scala b/src/compiler/scala/tools/nsc/transform/AddInterfaces.scala
index 78541db..bacd8c3 100644
--- a/src/compiler/scala/tools/nsc/transform/AddInterfaces.scala
+++ b/src/compiler/scala/tools/nsc/transform/AddInterfaces.scala
@@ -1,5 +1,5 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
+ * Copyright 2005-2013 LAMP/EPFL
* @author Martin Odersky
*/
@@ -9,43 +9,28 @@ package transform
import symtab._
import Flags._
import scala.collection.{ mutable, immutable }
-import collection.mutable.ListBuffer
+import scala.collection.mutable.ListBuffer
-abstract class AddInterfaces extends InfoTransform {
+abstract class AddInterfaces extends InfoTransform { self: Erasure =>
import global._ // the global environment
import definitions._ // standard classes and methods
- /** <p>
- * The phase sets <code>lateINTERFACE</code> for non-interface traits
- * that now become interfaces.
- * </p>
- * <p>
- * It sets <code>lateDEFERRED</code> for formerly concrete methods in
- * such traits.
- * </p>
+ /** The phase sets lateINTERFACE for non-interface traits that now
+ * become interfaces. It sets lateDEFERRED for formerly concrete
+ * methods in such traits.
*/
override def phaseNewFlags: Long = lateDEFERRED | lateINTERFACE
- /** Type reference after erasure; to be defined in subclass
- * <code>Erasure</code>.
- */
- def erasedTypeRef(sym: Symbol): Type
-
- /** Erasure type-map; to be defined in subclass
- * <code>Erasure</code>.
- */
- def erasure: TypeMap
-
/** A lazily constructed map that associates every non-interface trait with
* its implementation class.
*/
- private val implClassMap = new mutable.HashMap[Symbol, Symbol]
+ private val implClassMap = perRunCaches.newMap[Symbol, Symbol]()
/** A lazily constructed map that associates every concrete method in a non-interface
* trait that's currently compiled with its corresponding method in the trait's
* implementation class.
*/
- private val implMethodMap = new mutable.HashMap[Symbol, Symbol]
+ private val implMethodMap = perRunCaches.newMap[Symbol, Symbol]()
override def newPhase(prev: scala.tools.nsc.Phase): StdPhase = {
implClassMap.clear()
@@ -54,101 +39,129 @@ abstract class AddInterfaces extends InfoTransform {
}
/** Is given trait member symbol a member of the trait's interface
- * after this transform is performed? */
- private def isInterfaceMember(sym: Symbol): Boolean = {
- sym.isType ||
- { sym.info; // to set lateMETHOD flag if necessary
- sym.isMethod &&
- !sym.isLabel &&
- !sym.isPrivate &&
- (!(sym hasFlag BRIDGE) || sym.hasBridgeAnnotation) && // count @_$bridge$_ annotated classes as interface members
- !sym.isConstructor &&
- !sym.isImplOnly
+ * after this transform is performed?
+ */
+ private def isInterfaceMember(sym: Symbol) = (
+ sym.isType || {
+ sym.info // initialize to set lateMETHOD flag if necessary
+
+ ( sym.isMethod
+ && !sym.isLabel
+ && !sym.isPrivate
+ && (!(sym hasFlag BRIDGE) || sym.hasBridgeAnnotation) // count @bridge annotated classes as interface members
+ && !sym.isConstructor
+ && !sym.isImplOnly
+ )
}
- }
+ )
/** Does symbol need an implementation method? */
- private def needsImplMethod(sym: Symbol): Boolean =
- sym.isMethod && isInterfaceMember(sym) &&
- (!(sym hasFlag (DEFERRED | SUPERACCESSOR)) || (sym hasFlag lateDEFERRED))
+ private def needsImplMethod(sym: Symbol) = (
+ sym.isMethod
+ && isInterfaceMember(sym)
+ && (!sym.hasFlag(DEFERRED | SUPERACCESSOR) || (sym hasFlag lateDEFERRED))
+ )
def implClassPhase = currentRun.erasurePhase.next
- /** Return the implementation class of a trait; create a new one of one does not yet exist */
- def implClass(iface: Symbol): Symbol = implClassMap.getOrElse(iface, {
- atPhase(implClassPhase) {
- val implName = nme.implClassName(iface.name)
- var impl = if (iface.owner.isClass) iface.owner.info.decl(implName) else NoSymbol
- if (impl != NoSymbol && settings.XO.value) {
- log("unlinking impl class " + impl)
- iface.owner.info.decls.unlink(impl)
- impl = NoSymbol
+ private def newImplClass(iface: Symbol): Symbol = {
+ val inClass = iface.owner.isClass
+ val implName = tpnme.implClassName(iface.name)
+ val implFlags = (iface.flags & ~(INTERFACE | lateINTERFACE)) | IMPLCLASS
+
+ val impl0 = (
+ if (!inClass) NoSymbol
+ else iface.owner.info.decl(implName) match {
+ case NoSymbol => NoSymbol
+ case implSym =>
+ // Unlink a pre-existing symbol only if the implementation class is
+ // visible on the compilation classpath. In general this is true under
+ // -optimise and not otherwise, but the classpath can use arbitrary
+ // logic so the classpath must be queried.
+ if (classPath.context.isValidName(implName + ".class")) {
+ iface.owner.info.decls unlink implSym
+ NoSymbol
+ }
+ else {
+ log(s"not unlinking $iface's existing implClass ${implSym.name} because it is not on the classpath.")
+ implSym
+ }
}
- if (impl == NoSymbol) {
- impl = iface.cloneSymbolImpl(iface.owner)
- impl.name = implName
- impl.sourceFile = iface.sourceFile
- if (iface.owner.isClass)
- iface.owner.info.decls enter impl
+ )
+ val impl = impl0 orElse {
+ val impl = iface.owner.newImplClass(implName, iface.pos, implFlags)
+ if (iface.thisSym != iface) {
+ impl.typeOfThis = iface.typeOfThis
+ impl.thisSym setName iface.thisSym.name
}
- if (currentRun.compiles(iface)) currentRun.symSource(impl) = iface.sourceFile
- impl setPos iface.pos
- impl.flags = iface.flags & ~(INTERFACE | lateINTERFACE) | IMPLCLASS
- impl setInfo new LazyImplClassType(iface)
- implClassMap(iface) = impl
- if (settings.debug.value) log("generating impl class " + impl + " in " + iface.owner)//debug
+ impl.sourceFile = iface.sourceFile
+ if (inClass)
+ iface.owner.info.decls enter impl
+
impl
}
- })
-
- /** <p>
- * A lazy type to set the info of an implementation class
- * The parents of an implementation class for trait <code>iface</code> are:
- * </p>
- * <ul>
- * <li>superclass: <code>Object</code></li>
- * <li>
- * mixin classes: mixin classes of <code>iface</code> where every
- * non-interface trait is mapped to its implementation class, followed
- * by <code>iface</code> itself.
- * </li>
- * </ul>
- * <p>
- * The declarations of a mixin class are:
- * </p>
- * <ul>
- * <li>
- * for every interface member of <code>iface</code> its implementation
- * method, if one is needed.
- * </li>
- * <li>
- * every former member of <code>iface</code> that is implementation only
- * </li>
- * </ul>
+ if (currentRun compiles iface)
+ currentRun.symSource(impl) = iface.sourceFile
+
+ implClassMap(iface) = impl
+ impl setInfo new LazyImplClassType(iface)
+ }
+
+ /** Return the implementation class of a trait; create a new one of one does not yet exist */
+ def implClass(iface: Symbol): Symbol = {
+ iface.info
+
+ implClassMap.getOrElse(iface, atPhase(implClassPhase) {
+ if (iface.implClass eq NoSymbol)
+ debuglog(s"${iface.fullLocationString} has no implClass yet, creating it now.")
+ else
+ log(s"${iface.fullLocationString} impl class is ${iface.implClass.nameString}")
+
+ newImplClass(iface)
+ })
+ }
+
+ /** A lazy type to set the info of an implementation class
+ * The parents of an implementation class for trait iface are:
+ *
+ * - superclass: Object
+ * - mixin classes: mixin classes of iface where every non-interface
+ * trait is mapped to its implementation class, followed by iface itself.
+ *
+ * The declarations of a mixin class are:
+ * - for every interface member of iface: its implementation method, if one is needed
+ * - every former member of iface that is implementation only
*/
- private class LazyImplClassType(iface: Symbol) extends LazyType {
-
- /** Compute the decls of implementation class <code>implClass</code>,
- * given the decls <code>ifaceDecls</code> of its interface.
- *
- * @param implClass ...
- * @param ifaceDecls ...
- * @return ...
+ private class LazyImplClassType(iface: Symbol) extends LazyType with FlagAgnosticCompleter {
+ /** Compute the decls of implementation class implClass,
+ * given the decls ifaceDecls of its interface.
*/
private def implDecls(implClass: Symbol, ifaceDecls: Scope): Scope = {
- val decls = new Scope
- if ((ifaceDecls lookup nme.MIXIN_CONSTRUCTOR) == NoSymbol)
- decls enter (implClass.newMethod(implClass.pos, nme.MIXIN_CONSTRUCTOR)
- setInfo MethodType(List(), UnitClass.tpe))
- for (sym <- ifaceDecls.iterator) {
+ debuglog("LazyImplClassType calculating decls for " + implClass)
+
+ val decls = newScope
+ if ((ifaceDecls lookup nme.MIXIN_CONSTRUCTOR) == NoSymbol) {
+ log("Adding mixin constructor to " + implClass)
+
+ decls enter (
+ implClass.newMethod(nme.MIXIN_CONSTRUCTOR, implClass.pos)
+ setInfo MethodType(Nil, UnitClass.tpe)
+ )
+ }
+
+ for (sym <- ifaceDecls) {
if (isInterfaceMember(sym)) {
if (needsImplMethod(sym)) {
- val impl = sym.cloneSymbol(implClass).resetFlag(lateDEFERRED)
- if (currentRun.compiles(implClass)) implMethodMap(sym) = impl
- decls enter impl
+ val clone = sym.cloneSymbol(implClass).resetFlag(lateDEFERRED)
+ if (currentRun.compiles(implClass)) implMethodMap(sym) = clone
+ decls enter clone
sym setFlag lateDEFERRED
+ if (!sym.isSpecialized)
+ log(s"Cloned ${sym.name} from ${sym.owner} into implClass ${implClass.fullName}")
}
- } else {
+ }
+ else {
+ log(s"Destructively modifying owner of $sym from ${sym.owner} to $implClass")
sym.owner = implClass
// note: OK to destructively modify the owner here,
// because symbol will not be accessible from outside the sourcefile.
@@ -156,49 +169,44 @@ abstract class AddInterfaces extends InfoTransform {
decls enter sym
}
}
+
decls
}
- override def complete(sym: Symbol) {
+ override def complete(implSym: Symbol) {
+ debuglog("LazyImplClassType completing " + implSym)
+
+ /** If `tp` refers to a non-interface trait, return a
+ * reference to its implementation class. Otherwise return `tp`.
+ */
+ def mixinToImplClass(tp: Type): Type = AddInterfaces.this.erasure(implSym) {
+ tp match { //@MATN: no normalize needed (comes after erasure)
+ case TypeRef(pre, sym, _) if sym.needsImplClass =>
+ typeRef(pre, implClass(sym), Nil)
+ case _ =>
+ tp
+ }
+ }
def implType(tp: Type): Type = tp match {
case ClassInfoType(parents, decls, _) =>
- assert(phase == implClassPhase)
- ClassInfoType(
- ObjectClass.tpe :: (parents.tail map mixinToImplClass filter (_.typeSymbol != ObjectClass))
- ::: List(iface.tpe),
- implDecls(sym, decls),
- sym)
- case PolyType(tparams, restpe) =>
+ assert(phase == implClassPhase, tp)
+ // Impl class parents: Object first, matching interface last.
+ val implParents = ObjectClass.tpe +: (parents.tail map mixinToImplClass filter (_.typeSymbol != ObjectClass)) :+ iface.tpe
+ ClassInfoType(implParents, implDecls(implSym, decls), implSym)
+ case PolyType(_, restpe) =>
implType(restpe)
}
- sym.setInfo(implType(atPhase(currentRun.erasurePhase)(iface.info)))
+ implSym setInfo implType(beforeErasure(iface.info))
}
override def load(clazz: Symbol) { complete(clazz) }
}
- /** If type <code>tp</code> refers to a non-interface trait, return a
- * reference to its implementation class. Otherwise return <code>tp</code>
- * itself.
- *
- * @param tp ...
- * @return ...
- */
- private def mixinToImplClass(tp: Type): Type =
- erasure(
- tp match { //@MATN: no normalize needed (comes after erasure)
- case TypeRef(pre, sym, args) if (sym.needsImplClass) =>
- typeRef(pre, implClass(sym), args)
- case _ =>
- tp
- })
-
def transformMixinInfo(tp: Type): Type = tp match {
case ClassInfoType(parents, decls, clazz) =>
- if (clazz.needsImplClass) {
- clazz setFlag lateINTERFACE
- implClass(clazz) // generate an impl class
- }
+ if (clazz.needsImplClass)
+ implClass(clazz setFlag lateINTERFACE) // generate an impl class
+
val parents1 = parents match {
case Nil => Nil
case hd :: tl =>
@@ -206,13 +214,12 @@ abstract class AddInterfaces extends InfoTransform {
if (clazz.isTrait) erasedTypeRef(ObjectClass) :: tl
else parents
}
- val decls1 = decls filter (sym =>
- if (clazz.isInterface) isInterfaceMember(sym)
- else (!sym.isType || sym.isClass))
-
- //if (!clazz.isPackageClass) System.out.println("Decls of "+clazz+" after explicitOuter = " + decls1);//DEBUG
- //if ((parents1 eq parents) && (decls1 eq decls)) tp
- //else
+ val decls1 = scopeTransform(clazz)(
+ decls filter (sym =>
+ if (clazz.isInterface) isInterfaceMember(sym)
+ else sym.isClass || sym.isTerm
+ )
+ )
ClassInfoType(parents1, decls1, clazz)
case _ =>
tp
@@ -224,52 +231,62 @@ abstract class AddInterfaces extends InfoTransform {
extends ChangeOwnerTraverser(oldowner, newowner) {
override def traverse(tree: Tree) {
tree match {
- case Return(expr) =>
- if (tree.symbol == oldowner) tree.symbol = newowner
- case _ =>
+ case _: Return => change(tree.symbol)
+ case _ =>
}
super.traverse(tree)
}
}
- private def ifaceMemberDef(tree: Tree): Tree =
- if (!tree.isDef || !isInterfaceMember(tree.symbol)) EmptyTree
- else if (needsImplMethod(tree.symbol)) DefDef(tree.symbol, EmptyTree)
- else tree
+ private def createMemberDef(tree: Tree, isForInterface: Boolean)(create: Tree => Tree) = {
+ val isInterfaceTree = tree.isDef && isInterfaceMember(tree.symbol)
+ if (isInterfaceTree && needsImplMethod(tree.symbol))
+ create(tree)
+ else if (isInterfaceTree == isForInterface)
+ tree
+ else
+ EmptyTree
+ }
+ private def implMemberDef(tree: Tree): Tree = createMemberDef(tree, false)(implMethodDef)
+ private def ifaceMemberDef(tree: Tree): Tree = createMemberDef(tree, true)(t => DefDef(t.symbol, EmptyTree))
private def ifaceTemplate(templ: Template): Template =
treeCopy.Template(templ, templ.parents, emptyValDef, templ.body map ifaceMemberDef)
- private def implMethodDef(tree: Tree, ifaceMethod: Symbol): Tree =
- implMethodMap.get(ifaceMethod) match {
- case Some(implMethod) =>
- tree.symbol = implMethod
- new ChangeOwnerAndReturnTraverser(ifaceMethod, implMethod)(tree)
- case None =>
- abort("implMethod missing for " + ifaceMethod)
+ /** Transforms the member tree containing the implementation
+ * into a member of the impl class.
+ */
+ private def implMethodDef(tree: Tree): Tree = {
+ val impl = implMethodMap.getOrElse(tree.symbol, abort("implMethod missing for " + tree.symbol))
+
+ val newTree = if (impl.isErroneous) tree else { // e.g. res/t687
+ // SI-5167: Ensure that the tree that we are grafting refers the parameter symbols from the
+ // new method symbol `impl`, rather than the symbols of the original method signature in
+ // the trait. `tree setSymbol impl` does *not* suffice!
+ val DefDef(_, _, _, vparamss, _, _) = tree
+ val oldSyms = vparamss.flatten.map(_.symbol)
+ val newSyms = impl.info.paramss.flatten
+ assert(oldSyms.length == newSyms.length, (oldSyms, impl, impl.info))
+ tree.substituteSymbols(oldSyms, newSyms)
}
-
- private def implMemberDef(tree: Tree): Tree =
- if (!tree.isDef || !isInterfaceMember(tree.symbol)) tree
- else if (needsImplMethod(tree.symbol)) implMethodDef(tree, tree.symbol)
- else EmptyTree
+ new ChangeOwnerAndReturnTraverser(newTree.symbol, impl)(newTree setSymbol impl)
+ }
/** Add mixin constructor definition
* def $init$(): Unit = ()
- * to `stats' unless there is already one.
+ * to `stats` unless there is already one.
*/
private def addMixinConstructorDef(clazz: Symbol, stats: List[Tree]): List[Tree] =
if (treeInfo.firstConstructor(stats) != EmptyTree) stats
- else DefDef(clazz.primaryConstructor, Block(List(), Literal(()))) :: stats
+ else DefDef(clazz.primaryConstructor, Block(List(), Literal(Constant()))) :: stats
private def implTemplate(clazz: Symbol, templ: Template): Template = atPos(templ.pos) {
- val templ1 = atPos(templ.pos) {
- Template(templ.parents, emptyValDef,
- addMixinConstructorDef(clazz, templ.body map implMemberDef))
- .setSymbol(clazz.newLocalDummy(templ.pos))
- }
- new ChangeOwnerTraverser(templ.symbol.owner, clazz)(
- new ChangeOwnerTraverser(templ.symbol, templ1.symbol)(templ1))
+ val templ1 = (
+ Template(templ.parents, emptyValDef, addMixinConstructorDef(clazz, templ.body map implMemberDef))
+ setSymbol clazz.newLocalDummy(templ.pos)
+ )
+ templ1.changeOwner(templ.symbol.owner -> clazz, templ.symbol -> templ1.symbol)
+ templ1
}
def implClassDefs(trees: List[Tree]): List[Tree] = {
@@ -281,9 +298,8 @@ abstract class AddInterfaces extends InfoTransform {
}
/** Add calls to supermixin constructors
- * <blockquote><pre>super[mix].$init$()</pre></blockquote>
- * to <code>tree</code>. <code>tree</code> which is assumed to be the body
- * of a constructor of class <code>clazz</code>.
+ * `super[mix].$init$()`
+ * to tree, which is assumed to be the body of a constructor of class clazz.
*/
private def addMixinConstructorCalls(tree: Tree, clazz: Symbol): Tree = {
def mixinConstructorCall(impl: Symbol): Tree = atPos(tree.pos) {
@@ -291,14 +307,21 @@ abstract class AddInterfaces extends InfoTransform {
}
val mixinConstructorCalls: List[Tree] = {
for (mc <- clazz.mixinClasses.reverse
- if mc.hasFlag(lateINTERFACE) && mc != ScalaObjectClass)
+ if mc.hasFlag(lateINTERFACE))
yield mixinConstructorCall(implClass(mc))
}
- (tree: @unchecked) match {
+ tree match {
+ case Block(Nil, expr) =>
+ // AnyVal constructor - have to provide a real body so the
+ // jvm doesn't throw a VerifyError. But we can't add the
+ // body until now, because the typer knows that Any has no
+ // constructor and won't accept a call to super.init.
+ assert((clazz isSubClass AnyValClass) || clazz.info.parents.isEmpty, clazz)
+ Block(List(Apply(gen.mkSuperSelect, Nil)), expr)
+
case Block(stats, expr) =>
- // needs `hasSymbol' check because `supercall' could be a block (named / default args)
+ // needs `hasSymbol` check because `supercall` could be a block (named / default args)
val (presuper, supercall :: rest) = stats span (t => t.hasSymbolWhich(_ hasFlag PRESUPER))
- //assert(supercall.symbol.isClassConstructor, supercall)
treeCopy.Block(tree, presuper ::: (supercall :: mixinConstructorCalls ::: rest), expr)
}
}
@@ -310,30 +333,26 @@ abstract class AddInterfaces extends InfoTransform {
override def transform(tree: Tree): Tree = {
val sym = tree.symbol
val tree1 = tree match {
- case ClassDef(mods, name, tparams, impl) if (sym.needsImplClass) =>
+ case ClassDef(mods, _, _, impl) if sym.needsImplClass =>
implClass(sym).initialize // to force lateDEFERRED flags
- treeCopy.ClassDef(tree, mods | INTERFACE, name, tparams, ifaceTemplate(impl))
- case DefDef(mods, name, tparams, vparamss, tpt, rhs)
- if (sym.isClassConstructor && sym.isPrimaryConstructor && sym.owner != ArrayClass) =>
- treeCopy.DefDef(tree, mods, name, tparams, vparamss, tpt,
- addMixinConstructorCalls(rhs, sym.owner)) // (3)
+ copyClassDef(tree)(mods = mods | INTERFACE, impl = ifaceTemplate(impl))
+ case DefDef(_,_,_,_,_,_) if sym.isClassConstructor && sym.isPrimaryConstructor && sym.owner != ArrayClass =>
+ deriveDefDef(tree)(addMixinConstructorCalls(_, sym.owner)) // (3)
case Template(parents, self, body) =>
val parents1 = sym.owner.info.parents map (t => TypeTree(t) setPos tree.pos)
treeCopy.Template(tree, parents1, emptyValDef, body)
- case This(_) =>
- if (sym.needsImplClass) {
- val impl = implClass(sym)
- var owner = currentOwner
- while (owner != sym && owner != impl) owner = owner.owner;
- if (owner == impl) This(impl) setPos tree.pos
- else tree
- } else tree
+ case This(_) if sym.needsImplClass =>
+ val impl = implClass(sym)
+ var owner = currentOwner
+ while (owner != sym && owner != impl) owner = owner.owner;
+ if (owner == impl) This(impl) setPos tree.pos
+ else tree
/* !!!
case Super(qual, mix) =>
val mix1 = mix
if (mix == tpnme.EMPTY) mix
else {
- val ps = atPhase(currentRun.erasurePhase) {
+ val ps = beforeErasure {
sym.info.parents dropWhile (p => p.symbol.name != mix)
}
assert(!ps.isEmpty, tree);
diff --git a/src/compiler/scala/tools/nsc/transform/CleanUp.scala b/src/compiler/scala/tools/nsc/transform/CleanUp.scala
index aa829d6..7a0b034 100644
--- a/src/compiler/scala/tools/nsc/transform/CleanUp.scala
+++ b/src/compiler/scala/tools/nsc/transform/CleanUp.scala
@@ -1,5 +1,5 @@
/* NSC -- new Scala compiler
- * Copyrights 2005-2011 LAMP/EPFL
+ * Copyright 2005-2013 LAMP/EPFL
* @author Martin Odersky
*/
@@ -9,11 +9,13 @@ package transform
import symtab._
import Flags._
import scala.collection._
+import scala.language.postfixOps
abstract class CleanUp extends Transform with ast.TreeDSL {
import global._
import definitions._
import CODE._
+ import treeInfo.StripCast
/** the following two members override abstract members in Transform */
val phaseName: String = "cleanup"
@@ -22,27 +24,36 @@ abstract class CleanUp extends Transform with ast.TreeDSL {
new CleanUpTransformer(unit)
class CleanUpTransformer(unit: CompilationUnit) extends Transformer {
- private val newStaticMembers = mutable.Buffer.empty[Tree]
- private val newStaticInits = mutable.Buffer.empty[Tree]
+ private val newStaticMembers = mutable.Buffer.empty[Tree]
+ private val newStaticInits = mutable.Buffer.empty[Tree]
private val symbolsStoredAsStatic = mutable.Map.empty[String, Symbol]
- private def mkTerm(prefix: String): TermName = unit.freshTermName(prefix)
-
- /** Kludge to provide a safe fix for #4560:
- * If we generate a reference in an implementation class, we
- * watch out for embedded This(..) nodes that point to the interface.
- * These must be wrong. We fix them by setting symbol and type to
- * the enclosing implementation class instead.
- */
- def safeREF(sym: Symbol) = {
- def fix(tree: Tree): Unit = tree match {
- case Select(qual @ This(_), name) if qual.symbol != currentClass =>
- qual.setSymbol(currentClass).setType(currentClass.tpe)
- case _ =>
- }
- val tree = REF(sym)
- if (currentClass.isImplClass && sym.owner == currentClass) fix(tree)
- tree
+ private def clearStatics() {
+ newStaticMembers.clear()
+ newStaticInits.clear()
+ symbolsStoredAsStatic.clear()
+ }
+ private def savingStatics[T](body: => T): T = {
+ val savedNewStaticMembers : mutable.Buffer[Tree] = newStaticMembers.clone()
+ val savedNewStaticInits : mutable.Buffer[Tree] = newStaticInits.clone()
+ val savedSymbolsStoredAsStatic : mutable.Map[String, Symbol] = symbolsStoredAsStatic.clone()
+ val result = body
+
+ clearStatics()
+ newStaticMembers ++= savedNewStaticMembers
+ newStaticInits ++= savedNewStaticInits
+ symbolsStoredAsStatic ++= savedSymbolsStoredAsStatic
+
+ result
}
+ private def transformTemplate(tree: Tree) = {
+ val Template(parents, self, body) = tree
+ clearStatics()
+ val newBody = transformTrees(body)
+ val templ = deriveTemplate(tree)(_ => transformTrees(newStaticMembers.toList) ::: newBody)
+ try addStaticInits(templ) // postprocess to include static ctors
+ finally clearStatics()
+ }
+ private def mkTerm(prefix: String): TermName = unit.freshTermName(prefix)
//private val classConstantMeth = new HashMap[String, Symbol]
//private val symbolStaticFields = new HashMap[String, (Symbol, Tree, Tree)]
@@ -59,11 +70,16 @@ abstract class CleanUp extends Transform with ast.TreeDSL {
case "poly-cache" => POLY_CACHE
}
+ def shouldRewriteTry(tree: Try) = {
+ val sym = tree.tpe.typeSymbol
+ forMSIL && (sym != UnitClass) && (sym != NothingClass)
+ }
+
private def typedWithPos(pos: Position)(tree: Tree) =
- localTyper typed { atPos(pos)(tree) }
+ localTyper.typedPos(pos)(tree)
/** A value class is defined to be only Java-compatible values: unit is
- * not part of it, as opposed to isValueClass in definitions. scala.Int is
+ * not part of it, as opposed to isPrimitiveValueClass in definitions. scala.Int is
* a value class, java.lang.Integer is not. */
def isJavaValueClass(sym: Symbol) = boxedClass contains sym
def isJavaValueType(tp: Type) = isJavaValueClass(tp.typeSymbol)
@@ -72,40 +88,12 @@ abstract class CleanUp extends Transform with ast.TreeDSL {
*/
def toBoxedType(tp: Type) = if (isJavaValueType(tp)) boxedClass(tp.typeSymbol).tpe else tp
- override def transform(tree: Tree): Tree = tree match {
+ def transformApplyDynamic(ad: ApplyDynamic) = {
+ val qual0 = ad.qual
+ val params = ad.args
+ if (settings.logReflectiveCalls.value)
+ unit.echo(ad.pos, "method invocation uses reflection")
- /* Transforms dynamic calls (i.e. calls to methods that are undefined
- * in the erased type space) to -- dynamically -- unsafe calls using
- * reflection. This is used for structural sub-typing of refinement
- * types, but may be used for other dynamic calls in the future.
- * For 'a.f(b)' it will generate something like:
- * 'a.getClass().
- * ' getMethod("f", Array(classOf[b.type])).
- * ' invoke(a, Array(b))
- * plus all the necessary casting/boxing/etc. machinery required
- * for type-compatibility (see fixResult).
- *
- * USAGE CONTRACT:
- * There are a number of assumptions made on the way a dynamic apply
- * is used. Assumptions relative to type are handled by the erasure
- * phase.
- * - The applied arguments are compatible with AnyRef, which means
- * that an argument tree typed as AnyVal has already been extended
- * with the necessary boxing calls. This implies that passed
- * arguments might not be strictly compatible with the method's
- * parameter types (a boxed integer while int is expected).
- * - The expected return type is an AnyRef, even when the method's
- * return type is an AnyVal. This means that the tree containing the
- * call has already been extended with the necessary unboxing calls
- * (or is happy with the boxed type).
- * - The type-checker has prevented dynamic applies on methods which
- * parameter's erased types are not statically known at the call site.
- * This is necessary to allow dispatching the call to the correct
- * method (dispatching on parameters is static in Scala). In practice,
- * this limitation only arises when the called method is defined as a
- * refinement, where the refinement defines a parameter based on a
- * type variable. */
- case ad at ApplyDynamic(qual0, params) =>
val typedPos = typedWithPos(ad.pos) _
assert(ad.symbol.isPublic)
@@ -113,41 +101,37 @@ abstract class CleanUp extends Transform with ast.TreeDSL {
/* ### CREATING THE METHOD CACHE ### */
- def addStaticVariableToClass(forName: String, forType: Type, forInit: Tree, isFinal: Boolean): Symbol = {
- val varSym = currentClass.newVariable(ad.pos, mkTerm(forName))
- .setFlag(PRIVATE | STATIC | SYNTHETIC)
- .setInfo(forType)
- if (isFinal) varSym setFlag FINAL else varSym addAnnotation AnnotationInfo(VolatileAttr.tpe, Nil, Nil)
- currentClass.info.decls enter varSym
+ def addStaticVariableToClass(forName: TermName, forType: Type, forInit: Tree, isFinal: Boolean): Symbol = {
+ val flags = PRIVATE | STATIC | SYNTHETIC | (
+ if (isFinal) FINAL else 0
+ )
+
+ val varSym = currentClass.newVariable(mkTerm("" + forName), ad.pos, flags) setInfoAndEnter forType
+ if (!isFinal)
+ varSym.addAnnotation(VolatileAttr)
val varDef = typedPos( VAL(varSym) === forInit )
newStaticMembers append transform(varDef)
- val varInit = typedPos( safeREF(varSym) === forInit )
+ val varInit = typedPos( REF(varSym) === forInit )
newStaticInits append transform(varInit)
varSym
}
- def addStaticMethodToClass(forName: String, forArgsTypes: List[Type], forResultType: Type)
- (forBody: Pair[Symbol, List[Symbol]] => Tree): Symbol = {
- val methSym = currentClass.newMethod(ad.pos, mkTerm(forName))
- .setFlag(STATIC | SYNTHETIC)
-
- methSym.setInfo(MethodType(methSym.newSyntheticValueParams(forArgsTypes), forResultType))
- currentClass.info.decls enter methSym
+ def addStaticMethodToClass(forBody: (Symbol, Symbol) => Tree): Symbol = {
+ val methSym = currentClass.newMethod(mkTerm(nme.reflMethodName), ad.pos, STATIC | SYNTHETIC)
+ val params = methSym.newSyntheticValueParams(List(ClassClass.tpe))
+ methSym setInfoAndEnter MethodType(params, MethodClass.tpe)
- val methDef = typedPos( DefDef(methSym, { forBody(Pair(methSym, methSym.paramss(0))) }) )
+ val methDef = typedPos(DefDef(methSym, forBody(methSym, params.head)))
newStaticMembers append transform(methDef)
-
methSym
}
def fromTypesToClassArrayLiteral(paramTypes: List[Type]): Tree =
ArrayValue(TypeTree(ClassClass.tpe), paramTypes map LIT)
- def theTypeClassArray = arrayType(ClassClass.tpe)
-
/* ... */
def reflectiveMethodCache(method: String, paramTypes: List[Type]): Symbol = dispatchType match {
case NO_CACHE =>
@@ -162,12 +146,11 @@ abstract class CleanUp extends Transform with ast.TreeDSL {
*/
val reflParamsCacheSym: Symbol =
- addStaticVariableToClass("reflParams$Cache", theTypeClassArray, fromTypesToClassArrayLiteral(paramTypes), true)
+ addStaticVariableToClass(nme.reflParamsCacheName, arrayType(ClassClass.tpe), fromTypesToClassArrayLiteral(paramTypes), true)
- addStaticMethodToClass("reflMethod$Method", List(ClassClass.tpe), MethodClass.tpe) {
- case Pair(reflMethodSym, List(forReceiverSym)) =>
- (REF(forReceiverSym) DOT Class_getMethod)(LIT(method), safeREF(reflParamsCacheSym))
- }
+ addStaticMethodToClass((_, forReceiverSym) =>
+ gen.mkMethodCall(REF(forReceiverSym), Class_getMethod, Nil, List(LIT(method), REF(reflParamsCacheSym)))
+ )
case MONO_CACHE =>
@@ -191,30 +174,27 @@ abstract class CleanUp extends Transform with ast.TreeDSL {
*/
val reflParamsCacheSym: Symbol =
- addStaticVariableToClass("reflParams$Cache", theTypeClassArray, fromTypesToClassArrayLiteral(paramTypes), true)
+ addStaticVariableToClass(nme.reflParamsCacheName, arrayType(ClassClass.tpe), fromTypesToClassArrayLiteral(paramTypes), true)
val reflMethodCacheSym: Symbol =
- addStaticVariableToClass("reflMethod$Cache", MethodClass.tpe, NULL, false)
+ addStaticVariableToClass(nme.reflMethodCacheName, MethodClass.tpe, NULL, false)
val reflClassCacheSym: Symbol =
- addStaticVariableToClass("reflClass$Cache", SoftReferenceClass.tpe, NULL, false)
-
- def getMethodSym = ClassClass.tpe member nme.getMethod_
+ addStaticVariableToClass(nme.reflClassCacheName, SoftReferenceClass.tpe, NULL, false)
def isCacheEmpty(receiver: Symbol): Tree =
reflClassCacheSym.IS_NULL() OR (reflClassCacheSym.GET() OBJ_NE REF(receiver))
- addStaticMethodToClass("reflMethod$Method", List(ClassClass.tpe), MethodClass.tpe) {
- case Pair(reflMethodSym, List(forReceiverSym)) =>
- BLOCK(
- IF (isCacheEmpty(forReceiverSym)) THEN BLOCK(
- safeREF(reflMethodCacheSym) === ((REF(forReceiverSym) DOT getMethodSym)(LIT(method), safeREF(reflParamsCacheSym))) ,
- safeREF(reflClassCacheSym) === gen.mkSoftRef(REF(forReceiverSym)),
- UNIT
- ) ENDIF,
- safeREF(reflMethodCacheSym)
- )
- }
+ addStaticMethodToClass((_, forReceiverSym) =>
+ BLOCK(
+ IF (isCacheEmpty(forReceiverSym)) THEN BLOCK(
+ REF(reflMethodCacheSym) === ((REF(forReceiverSym) DOT Class_getMethod)(LIT(method), REF(reflParamsCacheSym))) ,
+ REF(reflClassCacheSym) === gen.mkSoftRef(REF(forReceiverSym)),
+ UNIT
+ ) ENDIF,
+ REF(reflMethodCacheSym)
+ )
+ )
case POLY_CACHE =>
@@ -227,12 +207,17 @@ abstract class CleanUp extends Transform with ast.TreeDSL {
var reflPoly$Cache: SoftReference[scala.runtime.MethodCache] = new SoftReference(new EmptyMethodCache())
def reflMethod$Method(forReceiver: JClass[_]): JMethod = {
- var method: JMethod = reflPoly$Cache.find(forReceiver)
- if (method != null)
+ var methodCache: MethodCache = reflPoly$Cache.find(forReceiver)
+ if (methodCache eq null) {
+ methodCache = new EmptyMethodCache
+ reflPoly$Cache = new SoftReference(methodCache)
+ }
+ var method: JMethod = methodCache.find(forReceiver)
+ if (method ne null)
return method
else {
method = ScalaRunTime.ensureAccessible(forReceiver.getMethod("xyz", reflParams$Cache))
- reflPoly$Cache = new SoftReference(reflPoly$Cache.get.add(forReceiver, method))
+ reflPoly$Cache = new SoftReference(methodCache.add(forReceiver, method))
return method
}
}
@@ -240,84 +225,68 @@ abstract class CleanUp extends Transform with ast.TreeDSL {
*/
val reflParamsCacheSym: Symbol =
- addStaticVariableToClass("reflParams$Cache", theTypeClassArray, fromTypesToClassArrayLiteral(paramTypes), true)
+ addStaticVariableToClass(nme.reflParamsCacheName, arrayType(ClassClass.tpe), fromTypesToClassArrayLiteral(paramTypes), true)
def mkNewPolyCache = gen.mkSoftRef(NEW(TypeTree(EmptyMethodCacheClass.tpe)))
- val reflPolyCacheSym: Symbol = addStaticVariableToClass("reflPoly$Cache", SoftReferenceClass.tpe, mkNewPolyCache, false)
- def getPolyCache = fn(safeREF(reflPolyCacheSym), nme.get) AS_ATTR MethodCacheClass.tpe
-
- addStaticMethodToClass("reflMethod$Method", List(ClassClass.tpe), MethodClass.tpe)
- { case Pair(reflMethodSym, List(forReceiverSym)) =>
- val methodSym = reflMethodSym.newVariable(ad.pos, mkTerm("method")) setInfo MethodClass.tpe
-
- BLOCK(
- IF (getPolyCache OBJ_EQ NULL) THEN (safeREF(reflPolyCacheSym) === mkNewPolyCache) ENDIF,
- VAL(methodSym) === ((getPolyCache DOT methodCache_find)(REF(forReceiverSym))) ,
- IF (REF(methodSym) OBJ_!= NULL) .
- THEN (Return(REF(methodSym)))
- ELSE {
- def methodSymRHS = ((REF(forReceiverSym) DOT Class_getMethod)(LIT(method), safeREF(reflParamsCacheSym)))
- def cacheRHS = ((getPolyCache DOT methodCache_add)(REF(forReceiverSym), REF(methodSym)))
- BLOCK(
- REF(methodSym) === (REF(ensureAccessibleMethod) APPLY (methodSymRHS)),
- safeREF(reflPolyCacheSym) === gen.mkSoftRef(cacheRHS),
- Return(REF(methodSym))
- )
- }
- )
- }
+ val reflPolyCacheSym: Symbol = (
+ addStaticVariableToClass(nme.reflPolyCacheName, SoftReferenceClass.tpe, mkNewPolyCache, false)
+ )
+ def getPolyCache = gen.mkCast(fn(REF(reflPolyCacheSym), nme.get), MethodCacheClass.tpe)
+
+ addStaticMethodToClass((reflMethodSym, forReceiverSym) => {
+ val methodCache = reflMethodSym.newVariable(mkTerm("methodCache"), ad.pos) setInfo MethodCacheClass.tpe
+ val methodSym = reflMethodSym.newVariable(mkTerm("method"), ad.pos) setInfo MethodClass.tpe
+
+ BLOCK(
+ VAR(methodCache) === getPolyCache,
+ IF (REF(methodCache) OBJ_EQ NULL) THEN BLOCK(
+ REF(methodCache) === NEW(TypeTree(EmptyMethodCacheClass.tpe)),
+ REF(reflPolyCacheSym) === gen.mkSoftRef(REF(methodCache))
+ ) ENDIF,
+
+ VAR(methodSym) === (REF(methodCache) DOT methodCache_find)(REF(forReceiverSym)),
+ IF (REF(methodSym) OBJ_NE NULL) .
+ THEN (Return(REF(methodSym)))
+ ELSE {
+ def methodSymRHS = ((REF(forReceiverSym) DOT Class_getMethod)(LIT(method), REF(reflParamsCacheSym)))
+ def cacheRHS = ((REF(methodCache) DOT methodCache_add)(REF(forReceiverSym), REF(methodSym)))
+ BLOCK(
+ REF(methodSym) === (REF(ensureAccessibleMethod) APPLY (methodSymRHS)),
+ REF(reflPolyCacheSym) === gen.mkSoftRef(cacheRHS),
+ Return(REF(methodSym))
+ )
+ }
+ )
+ })
+
}
/* ### HANDLING METHODS NORMALLY COMPILED TO OPERATORS ### */
- val testForNumber: Tree = (qual IS_OBJ BoxedNumberClass.tpe) OR (qual IS_OBJ BoxedCharacterClass.tpe)
- val testForBoolean: Tree = (qual IS_OBJ BoxedBooleanClass.tpe)
- val testForNumberOrBoolean = testForNumber OR testForBoolean
-
- val getPrimitiveReplacementForStructuralCall: PartialFunction[Name, (Symbol, Tree)] = {
- val testsForNumber = Map() ++ List(
- nme.UNARY_+ -> "positive",
- nme.UNARY_- -> "negate",
- nme.UNARY_~ -> "complement",
- nme.ADD -> "add",
- nme.SUB -> "subtract",
- nme.MUL -> "multiply",
- nme.DIV -> "divide",
- nme.MOD -> "takeModulo",
- nme.LSL -> "shiftSignedLeft",
- nme.LSR -> "shiftLogicalRight",
- nme.ASR -> "shiftSignedRight",
- nme.LT -> "testLessThan",
- nme.LE -> "testLessOrEqualThan",
- nme.GE -> "testGreaterOrEqualThan",
- nme.GT -> "testGreaterThan",
- nme.toByte -> "toByte",
- nme.toShort -> "toShort",
- nme.toChar -> "toCharacter",
- nme.toInt -> "toInteger",
- nme.toLong -> "toLong",
- nme.toFloat -> "toFloat",
- nme.toDouble-> "toDouble"
- )
- val testsForBoolean = Map() ++ List(
- nme.UNARY_! -> "takeNot",
- nme.ZOR -> "takeConditionalOr",
- nme.ZAND -> "takeConditionalAnd"
- )
- val testsForNumberOrBoolean = Map() ++ List(
- nme.OR -> "takeOr",
- nme.XOR -> "takeXor",
- nme.AND -> "takeAnd",
- nme.EQ -> "testEqual",
- nme.NE -> "testNotEqual"
+ def testForName(name: Name): Tree => Tree = t => (
+ if (nme.CommonOpNames(name))
+ gen.mkMethodCall(definitions.Boxes_isNumberOrBool, t :: Nil)
+ else if (nme.BooleanOpNames(name))
+ t IS_OBJ BoxedBooleanClass.tpe
+ else
+ gen.mkMethodCall(definitions.Boxes_isNumber, t :: Nil)
+ )
+
+ /** The Tree => Tree function in the return is necessary to prevent the original qual
+ * from being duplicated in the resulting code. It may be a side-effecting expression,
+ * so all the test logic is routed through gen.evalOnce, which creates a block like
+ * { val x$1 = qual; if (x$1.foo || x$1.bar) f1(x$1) else f2(x$1) }
+ * (If the compiler can verify qual is safe to inline, it will not create the block.)
+ */
+ def getPrimitiveReplacementForStructuralCall(name: Name): Option[(Symbol, Tree => Tree)] = {
+ val methodName = (
+ if (params.isEmpty) nme.primitivePostfixMethodName(name)
+ else if (params.tail.isEmpty) nme.primitiveInfixMethodName(name)
+ else nme.NO_NAME
)
- def get(name: String) = getMember(BoxesRunTimeClass, name)
-
- /** Begin partial function. */
- {
- case x if testsForNumber contains x => (get(testsForNumber(x)), testForNumber)
- case x if testsForBoolean contains x => (get(testsForBoolean(x)), testForBoolean)
- case x if testsForNumberOrBoolean contains x => (get(testsForNumberOrBoolean(x)), testForNumberOrBoolean)
+ definitions.getDeclIfDefined(BoxesRunTimeClass, methodName) match {
+ case NoSymbol => None
+ case sym => assert(!sym.isOverloaded, sym) ; Some((sym, testForName(name)))
}
}
@@ -334,108 +303,114 @@ abstract class CleanUp extends Transform with ast.TreeDSL {
/* ### CALLING THE APPLY ### */
def callAsReflective(paramTypes: List[Type], resType: Type): Tree = {
- /* Some info about the type of the method being called. */
- val methSym = ad.symbol
- val boxedResType = toBoxedType(resType) // Int -> Integer
- val resultSym = boxedResType.typeSymbol
- // If this is a primitive method type (like '+' in 5+5=10) then the
- // parameter types and the (unboxed) result type should all be primitive types,
- // and the method name should be in the primitive->structural map.
- def isJavaValueMethod = (
- (resType :: paramTypes forall isJavaValueType) && // issue #1110
- (getPrimitiveReplacementForStructuralCall isDefinedAt methSym.name)
- )
- // Erasure lets Unit through as Unit, but a method returning Any will have an
- // erased return type of Object and should also allow Unit.
- def isDefinitelyUnit = (resultSym == UnitClass)
- def isMaybeUnit = (resultSym == ObjectClass) || isDefinitelyUnit
- // If there's any chance this signature could be met by an Array.
- val isArrayMethodSignature = {
- def typesMatchApply = paramTypes match {
- case List(tp) => tp <:< IntClass.tpe
- case _ => false
+ gen.evalOnce(qual, currentOwner, unit) { qual1 =>
+ /* Some info about the type of the method being called. */
+ val methSym = ad.symbol
+ val boxedResType = toBoxedType(resType) // Int -> Integer
+ val resultSym = boxedResType.typeSymbol
+ // If this is a primitive method type (like '+' in 5+5=10) then the
+ // parameter types and the (unboxed) result type should all be primitive types,
+ // and the method name should be in the primitive->structural map.
+ def isJavaValueMethod = (
+ (resType :: paramTypes forall isJavaValueType) && // issue #1110
+ (getPrimitiveReplacementForStructuralCall(methSym.name).isDefined)
+ )
+ // Erasure lets Unit through as Unit, but a method returning Any will have an
+ // erased return type of Object and should also allow Unit.
+ def isDefinitelyUnit = (resultSym == UnitClass)
+ def isMaybeUnit = (resultSym == ObjectClass) || isDefinitelyUnit
+ // If there's any chance this signature could be met by an Array.
+ val isArrayMethodSignature = {
+ def typesMatchApply = paramTypes match {
+ case List(tp) => tp <:< IntClass.tpe
+ case _ => false
+ }
+ def typesMatchUpdate = paramTypes match {
+ case List(tp1, tp2) => (tp1 <:< IntClass.tpe) && isMaybeUnit
+ case _ => false
+ }
+
+ (methSym.name == nme.length && params.isEmpty) ||
+ (methSym.name == nme.clone_ && params.isEmpty) ||
+ (methSym.name == nme.apply && typesMatchApply) ||
+ (methSym.name == nme.update && typesMatchUpdate)
}
- def typesMatchUpdate = paramTypes match {
- case List(tp1, tp2) => (tp1 <:< IntClass.tpe) && isMaybeUnit
- case _ => false
+
+ /* Some info about the argument at the call site. */
+ val qualSym = qual.tpe.typeSymbol
+ val args = qual1() :: params
+ def isDefinitelyArray = (qualSym == ArrayClass)
+ def isMaybeArray = (qualSym == ObjectClass) || isDefinitelyArray
+ def isMaybeBoxed = platform isMaybeBoxed qualSym
+
+ // This is complicated a bit by trying to handle Arrays correctly.
+ // Under normal circumstances if the erased return type is Object then
+ // we're not going to box it to Unit, but that is the situation with
+ // a signature like def f(x: { def update(x: Int, y: Long): Any })
+ //
+ // However we only want to do that boxing if it has been determined
+ // to be an Array and a method returning Unit. But for this fixResult
+ // could be called in one place: instead it is called separately from the
+ // unconditional outcomes (genValueCall, genArrayCall, genDefaultCall.)
+ def fixResult(tree: Tree, mustBeUnit: Boolean = false) =
+ if (mustBeUnit || resultSym == UnitClass) BLOCK(tree, REF(BoxedUnit_UNIT)) // boxed unit
+ else if (resultSym == ObjectClass) tree // no cast necessary
+ else gen.mkCast(tree, boxedResType) // cast to expected type
+
+ /** Normal non-Array call */
+ def genDefaultCall = {
+ // reflective method call machinery
+ val invokeName = MethodClass.tpe member nme.invoke_ // scala.reflect.Method.invoke(...)
+ def cache = REF(reflectiveMethodCache(ad.symbol.name.toString, paramTypes)) // cache Symbol
+ def lookup = Apply(cache, List(qual1() GETCLASS)) // get Method object from cache
+ def invokeArgs = ArrayValue(TypeTree(ObjectClass.tpe), params) // args for invocation
+ def invocation = (lookup DOT invokeName)(qual1(), invokeArgs) // .invoke(qual1, ...)
+
+ // exception catching machinery
+ val invokeExc = currentOwner.newValue(mkTerm(""), ad.pos) setInfo InvocationTargetExceptionClass.tpe
+ def catchVar = Bind(invokeExc, Typed(Ident(nme.WILDCARD), TypeTree(InvocationTargetExceptionClass.tpe)))
+ def catchBody = Throw(Apply(Select(Ident(invokeExc), nme.getCause), Nil))
+
+ // try { method.invoke } catch { case e: InvocationTargetExceptionClass => throw e.getCause() }
+ fixResult(TRY (invocation) CATCH { CASE (catchVar) ==> catchBody } ENDTRY)
}
- (methSym.name == nme.length && params.isEmpty) ||
- (methSym.name == nme.clone_ && params.isEmpty) ||
- (methSym.name == nme.apply && typesMatchApply) ||
- (methSym.name == nme.update && typesMatchUpdate)
- }
+ /** A possible primitive method call, represented by methods in BoxesRunTime. */
+ def genValueCall(operator: Symbol) = fixResult(REF(operator) APPLY args)
+ def genValueCallWithTest = {
+ getPrimitiveReplacementForStructuralCall(methSym.name) match {
+ case Some((operator, test)) =>
+ IF (test(qual1())) THEN genValueCall(operator) ELSE genDefaultCall
+ case _ =>
+ genDefaultCall
+ }
+ }
- /* Some info about the argument at the call site. */
- val qualSym = qual.tpe.typeSymbol
- val args = qual :: params
- def isDefinitelyArray = (qualSym == ArrayClass)
- def isMaybeArray = (qualSym == ObjectClass) || isDefinitelyArray
- def isMaybeBoxed = platform isMaybeBoxed qualSym
-
- // This is complicated a bit by trying to handle Arrays correctly.
- // Under normal circumstances if the erased return type is Object then
- // we're not going to box it to Unit, but that is the situation with
- // a signature like def f(x: { def update(x: Int, y: Long): Any })
- //
- // However we only want to do that boxing if it has been determined
- // to be an Array and a method returning Unit. But for this fixResult
- // could be called in one place: instead it is called separately from the
- // unconditional outcomes (genValueCall, genArrayCall, genDefaultCall.)
- def fixResult(tree: Tree, mustBeUnit: Boolean = false) =
- if (mustBeUnit || resultSym == UnitClass) BLOCK(tree, REF(BoxedUnit_UNIT)) // boxed unit
- else if (resultSym == ObjectClass) tree // no cast necessary
- else tree AS_ATTR boxedResType // cast to expected type
-
- /** Normal non-Array call */
- def genDefaultCall = {
- // reflective method call machinery
- val invokeName = MethodClass.tpe member nme.invoke_ // reflect.Method.invoke(...)
- def cache = safeREF(reflectiveMethodCache(ad.symbol.name.toString, paramTypes)) // cache Symbol
- def lookup = Apply(cache, List(qual GETCLASS)) // get Method object from cache
- def invokeArgs = ArrayValue(TypeTree(ObjectClass.tpe), params) // args for invocation
- def invocation = (lookup DOT invokeName)(qual, invokeArgs) // .invoke(qual, ...)
-
- // exception catching machinery
- val invokeExc = currentOwner.newValue(ad.pos, mkTerm("")) setInfo InvocationTargetExceptionClass.tpe
- def catchVar = Bind(invokeExc, Typed(Ident(nme.WILDCARD), TypeTree(InvocationTargetExceptionClass.tpe)))
- def catchBody = Throw(Apply(Select(Ident(invokeExc), nme.getCause), Nil))
-
- // try { method.invoke } catch { case e: InvocationTargetExceptionClass => throw e.getCause() }
- fixResult(TRY (invocation) CATCH { CASE (catchVar) ==> catchBody } ENDTRY)
- }
+ /** A native Array call. */
+ def genArrayCall = fixResult(
+ methSym.name match {
+ case nme.length => REF(boxMethod(IntClass)) APPLY (REF(arrayLengthMethod) APPLY args)
+ case nme.update => REF(arrayUpdateMethod) APPLY List(args(0), (REF(unboxMethod(IntClass)) APPLY args(1)), args(2))
+ case nme.apply => REF(arrayApplyMethod) APPLY List(args(0), (REF(unboxMethod(IntClass)) APPLY args(1)))
+ case nme.clone_ => REF(arrayCloneMethod) APPLY List(args(0))
+ },
+ mustBeUnit = methSym.name == nme.update
+ )
- /** A possible primitive method call, represented by methods in BoxesRunTime. */
- def genValueCall(operator: Symbol) = fixResult(REF(operator) APPLY args)
- def genValueCallWithTest = {
- val (operator, test) = getPrimitiveReplacementForStructuralCall(methSym.name)
- IF (test) THEN genValueCall(operator) ELSE genDefaultCall
+ /** A conditional Array call, when we can't determine statically if the argument is
+ * an Array, but the structural type method signature is consistent with an Array method
+ * so we have to generate both kinds of code.
+ */
+ def genArrayCallWithTest =
+ IF ((qual1() GETCLASS()) DOT nme.isArray) THEN genArrayCall ELSE genDefaultCall
+
+ localTyper typed (
+ if (isMaybeBoxed && isJavaValueMethod) genValueCallWithTest
+ else if (isArrayMethodSignature && isDefinitelyArray) genArrayCall
+ else if (isArrayMethodSignature && isMaybeArray) genArrayCallWithTest
+ else genDefaultCall
+ )
}
-
- /** A native Array call. */
- def genArrayCall = fixResult(
- methSym.name match {
- case nme.length => REF(boxMethod(IntClass)) APPLY (REF(arrayLengthMethod) APPLY args)
- case nme.update => REF(arrayUpdateMethod) APPLY List(args(0), (REF(unboxMethod(IntClass)) APPLY args(1)), args(2))
- case nme.apply => REF(arrayApplyMethod) APPLY List(args(0), (REF(unboxMethod(IntClass)) APPLY args(1)))
- case nme.clone_ => REF(arrayCloneMethod) APPLY List(args(0))
- },
- mustBeUnit = methSym.name == nme.update
- )
-
- /** A conditional Array call, when we can't determine statically if the argument is
- * an Array, but the structural type method signature is consistent with an Array method
- * so we have to generate both kinds of code.
- */
- def genArrayCallWithTest =
- IF ((qual GETCLASS()) DOT nme.isArray) THEN genArrayCall ELSE genDefaultCall
-
- localTyper typed (
- if (isMaybeBoxed && isJavaValueMethod) genValueCallWithTest
- else if (isArrayMethodSignature && isDefinitelyArray) genArrayCall
- else if (isArrayMethodSignature && isMaybeArray) genArrayCallWithTest
- else genDefaultCall
- )
}
if (settings.refinementMethodDispatch.value == "invoke-dynamic") {
@@ -475,18 +450,31 @@ abstract class CleanUp extends Transform with ast.TreeDSL {
* is a value type (int et al.) in which case it must cast to the boxed version
* because invoke only returns object and erasure made sure the result is
* expected to be an AnyRef. */
- val t: Tree = ad.symbol.tpe match {
- case MethodType(mparams, resType) =>
- assert(params.length == mparams.length)
- typedPos {
- val sym = currentOwner.newValue(ad.pos, mkTerm("qual")) setInfo qual0.tpe
- qual = safeREF(sym)
+ val t: Tree = {
+ val (mparams, resType) = ad.symbol.tpe match {
+ case MethodType(mparams, resType) =>
+ assert(params.length == mparams.length, ((params, mparams)))
+ (mparams, resType)
+ case tpe @ OverloadedType(pre, alts) =>
+ unit.warning(ad.pos, s"Overloaded type reached the backend! This is a bug in scalac.\n Symbol: ${ad.symbol}\n Overloads: $tpe\n Arguments: " + ad.args.map(_.tpe))
+ alts filter (_.paramss.flatten.size == params.length) map (_.tpe) match {
+ case mt @ MethodType(mparams, resType) :: Nil =>
+ unit.warning(NoPosition, "Only one overload has the right arity, proceeding with overload " + mt)
+ (mparams, resType)
+ case _ =>
+ unit.error(ad.pos, "Cannot resolve overload.")
+ (Nil, NoType)
+ }
+ }
+ typedPos {
+ val sym = currentOwner.newValue(mkTerm("qual"), ad.pos) setInfo qual0.tpe
+ qual = REF(sym)
- BLOCK(
- VAL(sym) === qual0,
- callAsReflective(mparams map (_.tpe), resType)
- )
- }
+ BLOCK(
+ VAL(sym) === qual0,
+ callAsReflective(mparams map (_.tpe), resType)
+ )
+ }
}
/* For testing purposes, the dynamic application's condition
@@ -518,6 +506,44 @@ abstract class CleanUp extends Transform with ast.TreeDSL {
transform(t)
}
/* ### END OF DYNAMIC APPLY TRANSFORM ### */
+ }
+
+ override def transform(tree: Tree): Tree = tree match {
+
+ /* Transforms dynamic calls (i.e. calls to methods that are undefined
+ * in the erased type space) to -- dynamically -- unsafe calls using
+ * reflection. This is used for structural sub-typing of refinement
+ * types, but may be used for other dynamic calls in the future.
+ * For 'a.f(b)' it will generate something like:
+ * 'a.getClass().
+ * ' getMethod("f", Array(classOf[b.type])).
+ * ' invoke(a, Array(b))
+ * plus all the necessary casting/boxing/etc. machinery required
+ * for type-compatibility (see fixResult).
+ *
+ * USAGE CONTRACT:
+ * There are a number of assumptions made on the way a dynamic apply
+ * is used. Assumptions relative to type are handled by the erasure
+ * phase.
+ * - The applied arguments are compatible with AnyRef, which means
+ * that an argument tree typed as AnyVal has already been extended
+ * with the necessary boxing calls. This implies that passed
+ * arguments might not be strictly compatible with the method's
+ * parameter types (a boxed integer while int is expected).
+ * - The expected return type is an AnyRef, even when the method's
+ * return type is an AnyVal. This means that the tree containing the
+ * call has already been extended with the necessary unboxing calls
+ * (or is happy with the boxed type).
+ * - The type-checker has prevented dynamic applies on methods which
+ * parameter's erased types are not statically known at the call site.
+ * This is necessary to allow dispatching the call to the correct
+ * method (dispatching on parameters is static in Scala). In practice,
+ * this limitation only arises when the called method is defined as a
+ * refinement, where the refinement defines a parameter based on a
+ * type variable. */
+
+ case tree: ApplyDynamic =>
+ transformApplyDynamic(tree)
/* Some cleanup transformations add members to templates (classes, traits, etc).
* When inside a template (i.e. the body of one of its members), two maps
@@ -529,36 +555,13 @@ abstract class CleanUp extends Transform with ast.TreeDSL {
* constructor. */
case Template(parents, self, body) =>
localTyper = typer.atOwner(tree, currentClass)
- var savedNewStaticMembers : mutable.Buffer[Tree] = null
- var savedNewStaticInits : mutable.Buffer[Tree] = null
- var savedSymbolsStoredAsStatic : mutable.Map[String, Symbol] = null
- if(forMSIL) {
- savedNewStaticMembers = newStaticMembers.clone
- savedNewStaticInits = newStaticInits.clone
- savedSymbolsStoredAsStatic = symbolsStoredAsStatic.clone
- }
- newStaticMembers.clear
- newStaticInits.clear
- symbolsStoredAsStatic.clear
- val transformedTemplate: Template = {
- var newBody = transformTrees(body)
- treeCopy.Template(tree, parents, self, transformTrees(newStaticMembers.toList) ::: newBody)
- }
- val res = addStaticInits(transformedTemplate) // postprocess to include static ctors
- newStaticMembers.clear
- newStaticInits.clear
- symbolsStoredAsStatic.clear
- if(forMSIL) {
- newStaticMembers ++= savedNewStaticMembers
- newStaticInits ++= savedNewStaticInits
- symbolsStoredAsStatic ++= savedSymbolsStoredAsStatic
- }
- res
+ if (forMSIL) savingStatics( transformTemplate(tree) )
+ else transformTemplate(tree)
- case Literal(c) if (c.tag == ClassTag) && !forMSIL=>
+ case Literal(c) if (c.tag == ClazzTag) && !forMSIL=>
val tpe = c.typeValue
typedWithPos(tree.pos) {
- if (isValueClass(tpe.typeSymbol)) {
+ if (isPrimitiveValueClass(tpe.typeSymbol)) {
if (tpe.typeSymbol == UnitClass)
REF(BoxedUnit_TYPE)
else
@@ -572,10 +575,10 @@ abstract class CleanUp extends Transform with ast.TreeDSL {
* Hence, we here rewrite all try blocks with a result != {Unit, All} such that they
* store their result in a local variable. The catch blocks are adjusted as well.
* The try tree is subsituted by a block whose result expression is read of that variable. */
- case theTry @ Try(block, catches, finalizer)
- if theTry.tpe.typeSymbol != definitions.UnitClass && theTry.tpe.typeSymbol != definitions.NothingClass =>
+ case theTry @ Try(block, catches, finalizer) if shouldRewriteTry(theTry) =>
+ def transformTry = {
val tpe = theTry.tpe.widen
- val tempVar = currentOwner.newVariable(theTry.pos, mkTerm(nme.EXCEPTION_RESULT_PREFIX)).setInfo(tpe)
+ val tempVar = currentOwner.newVariable(mkTerm(nme.EXCEPTION_RESULT_PREFIX), theTry.pos).setInfo(tpe)
def assignBlock(rhs: Tree) = super.transform(BLOCK(Ident(tempVar) === transform(rhs)))
val newBlock = assignBlock(block)
@@ -584,7 +587,8 @@ abstract class CleanUp extends Transform with ast.TreeDSL {
val newTry = Try(newBlock, newCatches, super.transform(finalizer))
typedWithPos(theTry.pos)(BLOCK(VAL(tempVar) === EmptyTree, newTry, Ident(tempVar)))
-
+ }
+ transformTry
/*
* This transformation should identify Scala symbol invocations in the tree and replace them
* with references to a static member. Also, whenever a class has at least a single symbol invocation
@@ -616,25 +620,27 @@ abstract class CleanUp extends Transform with ast.TreeDSL {
* And, finally, be advised - scala symbol literal and the Symbol class of the compiler
* have little in common.
*/
- case symapp @ Apply(Select(Select(a @ Ident(nme.scala_), b @ nme.Symbol), nme.apply),
- List(Literal(Constant(symname: String)))) =>
+ case Apply(fn, (arg @ Literal(Constant(symname: String))) :: Nil) if fn.symbol == Symbol_apply =>
+ def transformApply = {
// add the symbol name to a map if it's not there already
- val rhs = gen.mkCast(Apply(gen.scalaDot(nme.Symbol), List(Literal(Constant(symname)))), symbolType)
- val staticFieldSym = getSymbolStaticField(symapp.pos, symname, rhs, symapp)
-
+ val rhs = gen.mkMethodCall(Symbol_apply, arg :: Nil)
+ val staticFieldSym = getSymbolStaticField(tree.pos, symname, rhs, tree)
// create a reference to a static field
- val ntree = typedWithPos(symapp.pos)(safeREF(staticFieldSym))
-
+ val ntree = typedWithPos(tree.pos)(REF(staticFieldSym))
super.transform(ntree)
-
- // This transform replaces Array(Predef.wrapArray(Array(...)), <manifest>)
- // with just Array(...)
- case Apply(appMeth, List(Apply(wrapRefArrayMeth, List(array)), _))
- if (wrapRefArrayMeth.symbol == Predef_wrapRefArray &&
- appMeth.symbol == ArrayModule_overloadedApply.suchThat {
- _.tpe.resultType.dealias.typeSymbol == ObjectClass
- }) =>
- super.transform(array)
+ }
+ transformApply
+
+ // Replaces `Array(Predef.wrapArray(ArrayValue(...).$asInstanceOf[...]), <tag>)`
+ // with just `ArrayValue(...).$asInstanceOf[...]`
+ //
+ // See SI-6611; we must *only* do this for literal vararg arrays.
+ case Apply(appMeth, List(Apply(wrapRefArrayMeth, List(arg @ StripCast(ArrayValue(_, _)))), _))
+ if wrapRefArrayMeth.symbol == Predef_wrapRefArray && appMeth.symbol == ArrayModule_genericApply =>
+ super.transform(arg)
+ case Apply(appMeth, List(elem0, Apply(wrapArrayMeth, List(rest @ ArrayValue(elemtpt, _)))))
+ if wrapArrayMeth.symbol == Predef_wrapArray(elemtpt.tpe) && appMeth.symbol == ArrayModule_apply(elemtpt.tpe) =>
+ super.transform(treeCopy.ArrayValue(rest, rest.elemtpt, elem0 :: rest.elems))
case _ =>
super.transform(tree)
@@ -644,19 +650,20 @@ abstract class CleanUp extends Transform with ast.TreeDSL {
* If it doesn't exist, i.e. the symbol is encountered the first time,
* it creates a new static field definition and initialization and returns it.
*/
- private def getSymbolStaticField(pos: Position, symname: String, rhs: Tree, tree: Tree): Symbol =
+ private def getSymbolStaticField(pos: Position, symname: String, rhs: Tree, tree: Tree): Symbol = {
symbolsStoredAsStatic.getOrElseUpdate(symname, {
val theTyper = typer.atOwner(tree, currentClass)
// create a symbol for the static field
- val stfieldSym = currentClass.newVariable(pos, mkTerm("symbol$"))
- .setFlag(PRIVATE | STATIC | SYNTHETIC | FINAL)
- .setInfo(symbolType)
+ val stfieldSym = (
+ currentClass.newVariable(mkTerm("symbol$"), pos, PRIVATE | STATIC | SYNTHETIC | FINAL)
+ setInfo SymbolClass.tpe
+ )
currentClass.info.decls enter stfieldSym
// create field definition and initialization
- val stfieldDef = theTyper.typed { atPos(pos)(VAL(stfieldSym) === rhs) }
- val stfieldInit = theTyper.typed { atPos(pos)(safeREF(stfieldSym) === rhs) }
+ val stfieldDef = theTyper.typedPos(pos)(VAL(stfieldSym) === rhs)
+ val stfieldInit = theTyper.typedPos(pos)(REF(stfieldSym) === rhs)
// add field definition to new defs
newStaticMembers append stfieldDef
@@ -664,6 +671,7 @@ abstract class CleanUp extends Transform with ast.TreeDSL {
stfieldSym
})
+ }
/* finds the static ctor DefDef tree within the template if it exists. */
private def findStaticCtor(template: Template): Option[Tree] =
@@ -675,16 +683,16 @@ abstract class CleanUp extends Transform with ast.TreeDSL {
/* changes the template for the class so that it contains a static constructor with symbol fields inits,
* augments an existing static ctor if one already existed.
*/
- private def addStaticInits(template: Template): Template =
+ private def addStaticInits(template: Template): Template = {
if (newStaticInits.isEmpty)
template
else {
val newCtor = findStaticCtor(template) match {
// in case there already were static ctors - augment existing ones
// currently, however, static ctors aren't being generated anywhere else
- case Some(ctor @ DefDef(mods, name, tparams, vparamss, tpt, rhs)) =>
+ case Some(ctor @ DefDef(_,_,_,_,_,_)) =>
// modify existing static ctor
- val newBlock = rhs match {
+ deriveDefDef(ctor) {
case block @ Block(stats, expr) =>
// need to add inits to existing block
treeCopy.Block(block, newStaticInits.toList ::: stats, expr)
@@ -692,18 +700,16 @@ abstract class CleanUp extends Transform with ast.TreeDSL {
// need to create a new block with inits and the old term
treeCopy.Block(term, newStaticInits.toList, term)
}
- treeCopy.DefDef(ctor, mods, name, tparams, vparamss, tpt, newBlock)
- case None =>
+ case _ =>
// create new static ctor
- val staticCtorSym = currentClass.newConstructor(template.pos)
- .setFlag(STATIC)
- .setInfo(UnitClass.tpe)
- val rhs = Block(newStaticInits.toList, Literal(()))
- val staticCtorTree = DefDef(staticCtorSym, rhs)
- localTyper.typed { atPos(template.pos)(staticCtorTree) }
+ val staticCtorSym = currentClass.newStaticConstructor(template.pos)
+ val rhs = Block(newStaticInits.toList, Literal(Constant(())))
+
+ localTyper.typedPos(template.pos)(DefDef(staticCtorSym, rhs))
}
- treeCopy.Template(template, template.parents, template.self, newCtor :: template.body)
+ deriveTemplate(template)(newCtor :: _)
}
+ }
} // CleanUpTransformer
diff --git a/src/compiler/scala/tools/nsc/transform/Constructors.scala b/src/compiler/scala/tools/nsc/transform/Constructors.scala
index 1d5efbd..1a1137f 100644
--- a/src/compiler/scala/tools/nsc/transform/Constructors.scala
+++ b/src/compiler/scala/tools/nsc/transform/Constructors.scala
@@ -1,5 +1,5 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
+ * Copyright 2005-2013 LAMP/EPFL
* @author
*/
@@ -44,11 +44,14 @@ abstract class Constructors extends Transform with ast.TreeDSL {
)
// decompose primary constructor into the three entities above.
val constrInfo: ConstrInfo = {
- val primary = stats find (_.symbol.isPrimaryConstructor)
- assert(primary.isDefined, "no constructor in template: impl = " + impl)
-
- val ddef @ DefDef(_, _, _, List(vparams), _, rhs @ Block(_, _)) = primary.get
+ stats find (_.symbol.isPrimaryConstructor) match {
+ case Some(ddef @ DefDef(_, _, _, List(vparams), _, rhs @ Block(_, _))) =>
ConstrInfo(ddef, vparams map (_.symbol), rhs)
+ case x =>
+ // AnyVal constructor is OK
+ assert(clazz eq AnyValClass, "no constructor in template: impl = " + impl)
+ return impl
+ }
}
import constrInfo._
@@ -60,12 +63,12 @@ abstract class Constructors extends Transform with ast.TreeDSL {
parameterNamed(nme.getterName(acc.originalName))
// The constructor parameter with given name. This means the parameter
- // has given name, or starts with given name, and continues with a `$' afterwards.
+ // has given name, or starts with given name, and continues with a `$` afterwards.
def parameterNamed(name: Name): Symbol = {
- def matchesName(param: Symbol) = param.name == name || param.name.startsWith(name + "$")
+ def matchesName(param: Symbol) = param.name == name || param.name.startsWith(name + nme.NAME_JOIN_STRING)
(constrParams filter matchesName) match {
- case Nil => assert(false, name + " not in " + constrParams) ; null
+ case Nil => abort(name + " not in " + constrParams)
case p :: _ => p
}
}
@@ -103,10 +106,9 @@ abstract class Constructors extends Transform with ast.TreeDSL {
}
}
- // Move tree into constructor, take care of changing owner from `oldowner' to constructor symbol
+ // Move tree into constructor, take care of changing owner from `oldowner` to constructor symbol
def intoConstructor(oldowner: Symbol, tree: Tree) =
- intoConstructorTransformer.transform(
- new ChangeOwnerTraverser(oldowner, constr.symbol)(tree))
+ intoConstructorTransformer transform tree.changeOwner(oldowner -> constr.symbol)
// Should tree be moved in front of super constructor call?
def canBeMoved(tree: Tree) = tree match {
@@ -114,7 +116,7 @@ abstract class Constructors extends Transform with ast.TreeDSL {
case _ => false
}
- // Create an assignment to class field `to' with rhs `from'
+ // Create an assignment to class field `to` with rhs `from`
def mkAssign(to: Symbol, from: Tree): Tree =
localTyper.typedPos(to.pos) { Assign(Select(This(clazz), to), from) }
@@ -125,9 +127,10 @@ abstract class Constructors extends Transform with ast.TreeDSL {
import CODE._
val result = mkAssign(to, Ident(from))
- if (from.name != nme.OUTER) result
+ if (from.name != nme.OUTER ||
+ from.tpe.typeSymbol.isPrimitiveValueClass) result
else localTyper.typedPos(to.pos) {
- IF (from OBJ_EQ NULL) THEN THROW(NullPointerExceptionClass) ELSE result
+ IF (from OBJ_EQ NULL) THEN Throw(NullPointerExceptionClass.tpe) ELSE result
}
}
@@ -165,20 +168,18 @@ abstract class Constructors extends Transform with ast.TreeDSL {
// Triage all template definitions to go into defBuf/auxConstructorBuf, constrStatBuf, or constrPrefixBuf.
for (stat <- stats) stat match {
- case DefDef(mods, name, tparams, vparamss, tpt, rhs) =>
+ case DefDef(_,_,_,_,_,rhs) =>
// methods with constant result type get literals as their body
// all methods except the primary constructor go into template
stat.symbol.tpe match {
case MethodType(List(), tp @ ConstantType(c)) =>
- defBuf += treeCopy.DefDef(
- stat, mods, name, tparams, vparamss, tpt,
- Literal(c) setPos rhs.pos setType tp)
+ defBuf += deriveDefDef(stat)(Literal(c) setPos _.pos setType tp)
case _ =>
if (stat.symbol.isPrimaryConstructor) ()
else if (stat.symbol.isConstructor) auxConstructorBuf += stat
else defBuf += stat
}
- case ValDef(mods, name, tpt, rhs) =>
+ case ValDef(_, _, _, rhs) =>
// val defs with constant right-hand sides are eliminated.
// for all other val defs, an empty valdef goes into the template and
// the initializer goes as an assignment into the constructor
@@ -191,7 +192,7 @@ abstract class Constructors extends Transform with ast.TreeDSL {
(if (canBeMoved(stat)) constrPrefixBuf else constrStatBuf) += mkAssign(
stat.symbol, rhs1)
}
- defBuf += treeCopy.ValDef(stat, mods, name, tpt, EmptyTree)
+ defBuf += deriveValDef(stat)(_ => EmptyTree)
}
case ClassDef(_, _, _, _) =>
// classes are treated recursively, and left in the template
@@ -215,7 +216,7 @@ abstract class Constructors extends Transform with ast.TreeDSL {
// the symbol is an outer accessor of a final class which does not override another outer accessor. )
def maybeOmittable(sym: Symbol) = sym.owner == clazz && (
sym.isParamAccessor && sym.isPrivateLocal ||
- sym.isOuterAccessor && sym.owner.isFinal && sym.allOverriddenSymbols.isEmpty &&
+ sym.isOuterAccessor && sym.owner.isEffectivelyFinal && !sym.isOverridingSymbol &&
!(clazz isSubClass DelayedInitClass)
)
@@ -228,12 +229,12 @@ abstract class Constructors extends Transform with ast.TreeDSL {
override def traverse(tree: Tree) = {
tree match {
case DefDef(_, _, _, _, _, body)
- if (tree.symbol.isOuterAccessor && tree.symbol.owner == clazz && clazz.isFinal) =>
- log("outerAccessors += " + tree.symbol.fullName)
- outerAccessors ::= (tree.symbol, body)
+ if (tree.symbol.isOuterAccessor && tree.symbol.owner == clazz && clazz.isEffectivelyFinal) =>
+ debuglog("outerAccessors += " + tree.symbol.fullName)
+ outerAccessors ::= ((tree.symbol, body))
case Select(_, _) =>
if (!mustbeKept(tree.symbol)) {
- log("accessedSyms += " + tree.symbol.fullName)
+ debuglog("accessedSyms += " + tree.symbol.fullName)
accessedSyms addEntry tree.symbol
}
super.traverse(tree)
@@ -254,26 +255,18 @@ abstract class Constructors extends Transform with ast.TreeDSL {
for ((accSym, accBody) <- outerAccessors)
if (mustbeKept(accSym)) accessTraverser.traverse(accBody)
- // Conflicting symbol list from parents: see bug #1960.
- // It would be better to mangle the constructor parameter name since
- // it can only be used internally, but I think we need more robust name
- // mangling before we introduce more of it.
- val parentSymbols = Map((for {
- p <- impl.parents
- if p.symbol.isTrait
- sym <- p.symbol.info.nonPrivateMembers
- if sym.isGetter && !sym.isOuterField
- } yield sym.name -> p): _*)
-
// Initialize all parameters fields that must be kept.
- val paramInits =
- for (acc <- paramAccessors if mustbeKept(acc)) yield {
- if (parentSymbols contains acc.name)
- unit.error(acc.pos, "parameter '%s' requires field but conflicts with %s in '%s'".format(
- acc.name, acc.name, parentSymbols(acc.name)))
-
- copyParam(acc, parameter(acc))
- }
+ val paramInits = paramAccessors filter mustbeKept map { acc =>
+ // Check for conflicting symbol amongst parents: see bug #1960.
+ // It would be better to mangle the constructor parameter name since
+ // it can only be used internally, but I think we need more robust name
+ // mangling before we introduce more of it.
+ val conflict = clazz.info.nonPrivateMember(acc.name) filter (s => s.isGetter && !s.isOuterField && s.enclClass.isTrait)
+ if (conflict ne NoSymbol)
+ unit.error(acc.pos, "parameter '%s' requires field but conflicts with %s".format(acc.name, conflict.fullLocationString))
+
+ copyParam(acc, parameter(acc))
+ }
/** Return a single list of statements, merging the generic class constructor with the
* specialized stats. The original statements are retyped in the current class, and
@@ -285,10 +278,11 @@ abstract class Constructors extends Transform with ast.TreeDSL {
specBuf ++= specializedStats
def specializedAssignFor(sym: Symbol): Option[Tree] =
- specializedStats.find {
- case Assign(sel @ Select(This(_), _), rhs) if sel.symbol.hasFlag(SPECIALIZED) =>
- val (generic, _, _) = nme.splitSpecializedName(nme.localToGetter(sel.symbol.name))
- generic == nme.localToGetter(sym.name)
+ specializedStats find {
+ case Assign(sel @ Select(This(_), _), rhs) =>
+ ( (sel.symbol hasFlag SPECIALIZED)
+ && (nme.unspecializedName(nme.localToGetter(sel.symbol.name)) == nme.localToGetter(sym.name))
+ )
case _ => false
}
@@ -298,11 +292,10 @@ abstract class Constructors extends Transform with ast.TreeDSL {
* be an error to pass it to array_update(.., .., Object).
*/
def rewriteArrayUpdate(tree: Tree): Tree = {
- val array_update = definitions.ScalaRunTimeModule.info.member("array_update")
val adapter = new Transformer {
override def transform(t: Tree): Tree = t match {
- case Apply(fun @ Select(receiver, method), List(xs, idx, v)) if fun.symbol == array_update =>
- localTyper.typed(Apply(gen.mkAttributedSelect(xs, definitions.Array_update), List(idx, v)))
+ case Apply(fun @ Select(receiver, method), List(xs, idx, v)) if fun.symbol == arrayUpdateMethod =>
+ localTyper.typed(Apply(gen.mkAttributedSelect(xs, arrayUpdateMethod), List(idx, v)))
case _ => super.transform(t)
}
}
@@ -310,7 +303,7 @@ abstract class Constructors extends Transform with ast.TreeDSL {
}
log("merging: " + originalStats.mkString("\n") + "\nwith\n" + specializedStats.mkString("\n"))
- val res = for (s <- originalStats; val stat = s.duplicate) yield {
+ val res = for (s <- originalStats; stat = s.duplicate) yield {
log("merge: looking at " + stat)
val stat1 = stat match {
case Assign(sel @ Select(This(_), field), _) =>
@@ -329,9 +322,9 @@ abstract class Constructors extends Transform with ast.TreeDSL {
val stat2 = rewriteArrayUpdate(stat1)
// statements coming from the original class need retyping in the current context
- if (settings.debug.value) log("retyping " + stat2)
+ debuglog("retyping " + stat2)
- val d = new specializeTypes.Duplicator
+ val d = new specializeTypes.Duplicator(Map[Symbol, Type]())
d.retyped(localTyper.context1.asInstanceOf[d.Context],
stat2,
genericClazz,
@@ -370,19 +363,19 @@ abstract class Constructors extends Transform with ast.TreeDSL {
val tree =
If(
Apply(
- Select(
- Apply(gen.mkAttributedRef(specializedFlag), List()),
- definitions.getMember(definitions.BooleanClass, nme.UNARY_!)),
+ CODE.NOT (
+ Apply(gen.mkAttributedRef(specializedFlag), List())),
List()),
- Block(stats, Literal(())),
+ Block(stats, Literal(Constant())),
EmptyTree)
List(localTyper.typed(tree))
- } else if (clazz.hasFlag(SPECIALIZED)) {
+ }
+ else if (clazz.hasFlag(SPECIALIZED)) {
// add initialization from its generic class constructor
- val (genericName, _, _) = nme.splitSpecializedName(clazz.name)
+ val genericName = nme.unspecializedName(clazz.name)
val genericClazz = clazz.owner.info.decl(genericName.toTypeName)
- assert(genericClazz != NoSymbol)
+ assert(genericClazz != NoSymbol, clazz)
guardedCtorStats.get(genericClazz) match {
case Some(stats1) => mergeConstructors(genericClazz, stats1, stats)
@@ -400,23 +393,15 @@ abstract class Constructors extends Transform with ast.TreeDSL {
/** Create a getter or a setter and enter into `clazz` scope
*/
def addAccessor(sym: Symbol, name: TermName, flags: Long) = {
- val m = clazz.newMethod(sym.pos, name)
- .setFlag(flags & ~LOCAL & ~PRIVATE)
- m.privateWithin = clazz
- clazz.info.decls.enter(m)
- m
+ val m = clazz.newMethod(name, sym.pos, flags & ~(LOCAL | PRIVATE)) setPrivateWithin clazz
+ clazz.info.decls enter m
}
def addGetter(sym: Symbol): Symbol = {
val getr = addAccessor(
sym, nme.getterName(sym.name), getterFlags(sym.flags))
getr setInfo MethodType(List(), sym.tpe)
- defBuf += localTyper.typed {
- //util.trace("adding getter def for "+getr) {
- atPos(sym.pos) {
- DefDef(getr, Select(This(clazz), sym))
- }//}
- }
+ defBuf += localTyper.typedPos(sym.pos)(DefDef(getr, Select(This(clazz), sym)))
getr
}
@@ -460,28 +445,24 @@ abstract class Constructors extends Transform with ast.TreeDSL {
def delayedInitClosure(stats: List[Tree]) =
localTyper.typed {
atPos(impl.pos) {
- val closureClass = clazz.newClass(impl.pos, nme.delayedInitArg.toTypeName)
- .setFlag(SYNTHETIC | FINAL)
- val closureParents = List(AbstractFunctionClass(0).tpe, ScalaObjectClass.tpe)
- closureClass.setInfo(new ClassInfoType(closureParents, new Scope, closureClass))
-
- val outerField = closureClass.newValue(impl.pos, nme.OUTER)
- .setFlag(PRIVATE | LOCAL | PARAMACCESSOR)
- .setInfo(clazz.tpe)
-
- val applyMethod = closureClass.newMethod(impl.pos, nme.apply)
- .setFlag(FINAL)
- .setInfo(MethodType(List(), ObjectClass.tpe))
-
- closureClass.info.decls enter outerField
- closureClass.info.decls enter applyMethod
-
- val outerFieldDef = ValDef(outerField)
-
- val changeOwner = new ChangeOwnerTraverser(impl.symbol, applyMethod)
-
+ val closureClass = clazz.newClass(nme.delayedInitArg.toTypeName, impl.pos, SYNTHETIC | FINAL)
+ val closureParents = List(AbstractFunctionClass(0).tpe)
+
+ closureClass setInfoAndEnter new ClassInfoType(closureParents, newScope, closureClass)
+
+ val outerField = (
+ closureClass
+ newValue(nme.OUTER, impl.pos, PrivateLocal | PARAMACCESSOR)
+ setInfoAndEnter clazz.tpe
+ )
+ val applyMethod = (
+ closureClass
+ newMethod(nme.apply, impl.pos, FINAL)
+ setInfoAndEnter MethodType(Nil, ObjectClass.tpe)
+ )
+ val outerFieldDef = ValDef(outerField)
val closureClassTyper = localTyper.atOwner(closureClass)
- val applyMethodTyper = closureClassTyper.atOwner(applyMethod)
+ val applyMethodTyper = closureClassTyper.atOwner(applyMethod)
val constrStatTransformer = new Transformer {
override def transform(tree: Tree): Tree = tree match {
@@ -513,8 +494,7 @@ abstract class Constructors extends Transform with ast.TreeDSL {
}
else tree
case _ =>
- changeOwner.changeOwner(tree)
- tree
+ tree.changeOwner(impl.symbol -> applyMethod)
}
}
}
@@ -524,31 +504,29 @@ abstract class Constructors extends Transform with ast.TreeDSL {
val applyMethodDef = DefDef(
sym = applyMethod,
- vparamss = List(List()),
+ vparamss = ListOfNil,
rhs = Block(applyMethodStats, gen.mkAttributedRef(BoxedUnit_UNIT)))
ClassDef(
sym = closureClass,
constrMods = Modifiers(0),
vparamss = List(List(outerFieldDef)),
- argss = List(List()),
+ argss = ListOfNil,
body = List(applyMethodDef),
superPos = impl.pos)
}
}
- def delayedInitCall(closure: Tree) =
- localTyper.typed {
- atPos(impl.pos) {
- Apply(
- Select(This(clazz), delayedInitMethod),
- List(New(TypeTree(closure.symbol.tpe), List(List(This(clazz))))))
- }
- }
+ def delayedInitCall(closure: Tree) = localTyper.typedPos(impl.pos) {
+ gen.mkMethodCall(This(clazz), delayedInitMethod, Nil, List(New(closure.symbol.tpe, This(clazz))))
+ }
/** Return a pair consisting of (all statements up to and including superclass and trait constr calls, rest) */
def splitAtSuper(stats: List[Tree]) = {
- def isConstr(tree: Tree) = (tree.symbol ne null) && tree.symbol.isConstructor
+ def isConstr(tree: Tree): Boolean = tree match {
+ case Block(_, expr) => isConstr(expr) // SI-6481 account for named argument blocks
+ case _ => (tree.symbol ne null) && tree.symbol.isConstructor
+ }
val (pre, rest0) = stats span (!isConstr(_))
val (supercalls, rest) = rest0 span (isConstr(_))
(pre ::: supercalls, rest)
@@ -556,6 +534,13 @@ abstract class Constructors extends Transform with ast.TreeDSL {
var (uptoSuperStats, remainingConstrStats) = splitAtSuper(constrStatBuf.toList)
+ /** XXX This is not corect: remainingConstrStats.nonEmpty excludes too much,
+ * but excluding it includes too much. The constructor sequence being mimicked
+ * needs to be reproduced with total fidelity.
+ *
+ * See test case files/run/bug4680.scala, the output of which is wrong in many
+ * particulars.
+ */
val needsDelayedInit =
(clazz isSubClass DelayedInitClass) /*&& !(defBuf exists isInitDef)*/ && remainingConstrStats.nonEmpty
@@ -566,33 +551,28 @@ abstract class Constructors extends Transform with ast.TreeDSL {
}
// Assemble final constructor
- defBuf += treeCopy.DefDef(
- constr, constr.mods, constr.name, constr.tparams, constr.vparamss, constr.tpt,
+ defBuf += deriveDefDef(constr)(_ =>
treeCopy.Block(
constrBody,
paramInits ::: constrPrefixBuf.toList ::: uptoSuperStats :::
guardSpecializedInitializer(remainingConstrStats),
- constrBody.expr));
+ constrBody.expr))
// Followed by any auxiliary constructors
defBuf ++= auxConstructorBuf
// Unlink all fields that can be dropped from class scope
- for (sym <- clazz.info.decls.toList)
- if (!mustbeKept(sym)) {
- // println("dropping "+sym+sym.locationString)
- clazz.info.decls unlink sym
- }
+ for (sym <- clazz.info.decls ; if !mustbeKept(sym))
+ clazz.info.decls unlink sym
// Eliminate all field definitions that can be dropped from template
- treeCopy.Template(impl, impl.parents, impl.self,
- defBuf.toList filter (stat => mustbeKept(stat.symbol)))
+ deriveTemplate(impl)(_ => defBuf.toList filter (stat => mustbeKept(stat.symbol)))
} // transformClassTemplate
override def transform(tree: Tree): Tree =
tree match {
- case ClassDef(mods, name, tparams, impl) if !tree.symbol.isInterface && !isValueClass(tree.symbol) =>
- treeCopy.ClassDef(tree, mods, name, tparams, transformClassTemplate(impl))
+ case ClassDef(_,_,_,_) if !tree.symbol.isInterface && !isPrimitiveValueClass(tree.symbol) =>
+ deriveClassDef(tree)(transformClassTemplate)
case _ =>
super.transform(tree)
}
diff --git a/src/compiler/scala/tools/nsc/transform/Erasure.scala b/src/compiler/scala/tools/nsc/transform/Erasure.scala
index b32cba7..df220b7 100644
--- a/src/compiler/scala/tools/nsc/transform/Erasure.scala
+++ b/src/compiler/scala/tools/nsc/transform/Erasure.scala
@@ -1,17 +1,18 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
+ * Copyright 2005-2013 LAMP/EPFL
* @author Martin Odersky
*/
package scala.tools.nsc
package transform
-import scala.tools.nsc.symtab.classfile.ClassfileConstants._
+import scala.reflect.internal.ClassfileConstants._
import scala.collection.{ mutable, immutable }
import symtab._
import Flags._
abstract class Erasure extends AddInterfaces
+ with scala.reflect.internal.transform.Erasure
with typechecker.Analyzer
with TypingTransformers
with ast.TreeDSL
@@ -29,182 +30,13 @@ abstract class Erasure extends AddInterfaces
// -------- erasure on types --------------------------------------------------------
- /** An extractor object for generic arrays */
- object GenericArray {
+ // convert a numeric with a toXXX method
+ def numericConversion(tree: Tree, numericSym: Symbol): Tree = {
+ val mname = newTermName("to" + numericSym.name)
+ val conversion = tree.tpe member mname
- /** Is `tp` an unbounded generic type (i.e. which could be instantiated
- * with primitive as well as class types)?.
- */
- private def genericCore(tp: Type): Type = tp.normalize match {
- case TypeRef(_, sym, _) if sym.isAbstractType && !sym.owner.isJavaDefined =>
- tp
- case ExistentialType(tparams, restp) =>
- genericCore(restp)
- case _ =>
- NoType
- }
-
- /** If `tp` is of the form Array[...Array[T]...] where `T` is an abstract type
- * then Some(N, T) where N is the number of Array constructors enclosing `T`,
- * otherwise None. Existentials on any level are ignored.
- */
- def unapply(tp: Type): Option[(Int, Type)] = tp.normalize match {
- case TypeRef(_, ArrayClass, List(arg)) =>
- genericCore(arg) match {
- case NoType =>
- unapply(arg) match {
- case Some((level, core)) => Some((level + 1, core))
- case None => None
- }
- case core =>
- Some(1, core)
- }
- case ExistentialType(tparams, restp) =>
- unapply(restp)
- case _ =>
- None
- }
- }
-
- // A type function from T => Class[U], used to determine the return
- // type of getClass calls. The returned type is:
- //
- // 1. If T is a value type, Class[T].
- // 2. If T is anonymous or a refinement type, calculate the intersection
- // dominator of the parents T', and Class[_ <: T'].
- // 3. If T is a phantom type (Any or AnyVal), Class[_].
- // 4. Otherwise, Class[_ <: T].
- //
- // Note: AnyVal cannot be Class[_ <: AnyVal] because if the static type of the
- // receiver is AnyVal, it implies the receiver is boxed, so the correct
- // class object is that of java.lang.Integer, not Int.
- //
- // TODO: If T is final, return type could be Class[T]. Should it?
- def getClassReturnType(tp: Type): Type = {
- def mkClass(targs: List[Type]) = typeRef(ClassClass.tpe.prefix, ClassClass, targs)
- val tparams = ClassClass.typeParams
- val sym = tp.typeSymbol
-
- if (tparams.isEmpty) mkClass(Nil) // call must be coming post-erasure
- else if (isValueClass(sym)) mkClass(List(tp.widen))
- else if (sym.isLocalClass) getClassReturnType(erasure.intersectionDominator(tp.parents))
- else {
- val eparams = typeParamsToExistentials(ClassClass, tparams)
- val upperBound = if (isPhantomClass(sym)) AnyClass.tpe else tp.widen
-
- existentialAbstraction(
- eparams,
- mkClass(List(eparams.head setInfo TypeBounds.upper(upperBound) tpe))
- )
- }
- }
-
- private def unboundedGenericArrayLevel(tp: Type): Int = tp match {
- case GenericArray(level, core) if !(core <:< AnyRefClass.tpe) => level
- case _ => 0
- }
-
- // @M #2585 when generating a java generic signature that includes a selection of an inner class p.I, (p = `pre`, I = `cls`)
- // must rewrite to p'.I, where p' refers to the class that directly defines the nested class I
- // see also #2585 marker in javaSig: there, type arguments must be included (use pre.baseType(cls.owner))
- // requires cls.isClass
- @inline private def rebindInnerClass(pre: Type, cls: Symbol): Type =
- if (cls.owner.isClass) cls.owner.tpe else pre // why not cls.isNestedClass?
-
- /** The erasure |T| of a type T. This is:
- *
- * - For a constant type, itself.
- * - For a type-bounds structure, the erasure of its upper bound.
- * - For every other singleton type, the erasure of its supertype.
- * - For a typeref scala.Array+[T] where T is an abstract type, AnyRef.
- * - For a typeref scala.Array+[T] where T is not an abstract type, scala.Array+[|T|].
- * - For a typeref scala.Any or scala.AnyVal, java.lang.Object.
- * - For a typeref scala.Unit, scala.runtime.BoxedUnit.
- * - For a typeref P.C[Ts] where C refers to a class, |P|.C.
- * (Where P is first rebound to the class that directly defines C.)
- * - For a typeref P.C[Ts] where C refers to an alias type, the erasure of C's alias.
- * - For a typeref P.C[Ts] where C refers to an abstract type, the
- * erasure of C's upper bound.
- * - For a non-empty type intersection (possibly with refinement),
- * the erasure of its first parent.
- * - For an empty type intersection, java.lang.Object.
- * - For a method type (Fs)scala.Unit, (|Fs|)scala#Unit.
- * - For any other method type (Fs)Y, (|Fs|)|T|.
- * - For a polymorphic type, the erasure of its result type.
- * - For the class info type of java.lang.Object, the same type without any parents.
- * - For a class info type of a value class, the same type without any parents.
- * - For any other class info type with parents Ps, the same type with
- * parents |Ps|, but with duplicate references of Object removed.
- * - for all other types, the type itself (with any sub-components erased)
- */
- object erasure extends TypeMap {
- // Compute the dominant part of the intersection type with given `parents` according to new spec.
- def intersectionDominator(parents: List[Type]): Type =
- if (parents.isEmpty) ObjectClass.tpe
- else {
- val psyms = parents map (_.typeSymbol)
- if (psyms contains ArrayClass) {
- // treat arrays specially
- arrayType(
- intersectionDominator(
- parents filter (_.typeSymbol == ArrayClass) map (_.typeArgs.head)))
- } else {
- // implement new spec for erasure of refined types.
- def isUnshadowed(psym: Symbol) =
- !(psyms exists (qsym => (psym ne qsym) && (qsym isNonBottomSubClass psym)))
- val cs = parents.iterator.filter { p => // isUnshadowed is a bit expensive, so try classes first
- val psym = p.typeSymbol
- psym.initialize
- psym.isClass && !psym.isTrait && isUnshadowed(psym)
- }
- (if (cs.hasNext) cs else parents.iterator.filter(p => isUnshadowed(p.typeSymbol))).next()
- }
- }
-
- def apply(tp: Type): Type = {
- tp match {
- case ConstantType(_) =>
- tp
- case st: SubType =>
- apply(st.supertype)
- case TypeRef(pre, sym, args) =>
- if (sym == ArrayClass)
- if (unboundedGenericArrayLevel(tp) == 1) ObjectClass.tpe
- else if (args.head.typeSymbol == NothingClass || args.head.typeSymbol == NullClass) arrayType(ObjectClass.tpe)
- else typeRef(apply(pre), sym, args map this)
- else if (sym == AnyClass || sym == AnyValClass || sym == SingletonClass || sym == NotNullClass) erasedTypeRef(ObjectClass)
- else if (sym == UnitClass) erasedTypeRef(BoxedUnitClass)
- else if (sym.isRefinementClass) apply(intersectionDominator(tp.parents))
- else if (sym.isClass) typeRef(apply(rebindInnerClass(pre, sym)), sym, List()) // #2585
- else apply(sym.info) // alias type or abstract type
- case PolyType(tparams, restpe) =>
- apply(restpe)
- case ExistentialType(tparams, restpe) =>
- apply(restpe)
- case mt @ MethodType(params, restpe) =>
- MethodType(
- cloneSymbols(params) map (p => p.setInfo(apply(p.tpe))),
- if (restpe.typeSymbol == UnitClass)
- erasedTypeRef(UnitClass)
- else if (settings.YdepMethTpes.value)
- // this replaces each typeref that refers to an argument by the type `p.tpe` of the actual argument p (p in params)
- apply(mt.resultType(params map (_.tpe)))
- else
- apply(restpe))
- case RefinedType(parents, decls) =>
- apply(intersectionDominator(parents))
- case AnnotatedType(_, atp, _) =>
- apply(atp)
- case ClassInfoType(parents, decls, clazz) =>
- ClassInfoType(
- if (clazz == ObjectClass || isValueClass(clazz)) Nil
- else if (clazz == ArrayClass) List(erasedTypeRef(ObjectClass))
- else removeDoubleObject(parents map this),
- decls, clazz)
- case _ =>
- mapOver(tp)
- }
- }
+ assert(conversion != NoSymbol, tree + " => " + numericSym)
+ atPos(tree.pos)(Apply(Select(tree, conversion), Nil))
}
private object NeedsSigCollector extends TypeCollector(false) {
@@ -220,8 +52,8 @@ abstract class Erasure extends AddInterfaces
else if (!sym.owner.isPackageClass) traverse(pre)
case PolyType(_, _) | ExistentialType(_, _) =>
result = true
- case RefinedType(parents, decls) =>
- if (!parents.isEmpty) traverse(parents.head)
+ case RefinedType(parents, _) =>
+ parents foreach traverse
case ClassInfoType(parents, _, _) =>
parents foreach traverse
case AnnotatedType(_, atp, _) =>
@@ -233,6 +65,7 @@ abstract class Erasure extends AddInterfaces
}
}
+ override protected def verifyJavaErasure = settings.Xverify.value || settings.debug.value
def needsJavaSig(tp: Type) = !settings.Ynogenericsig.value && NeedsSigCollector.collect(tp)
// only refer to type params that will actually make it into the sig, this excludes:
@@ -243,9 +76,7 @@ abstract class Erasure extends AddInterfaces
!sym.isHigherOrderTypeParameter &&
sym.isTypeParameterOrSkolem && (
(initialSymbol.enclClassChain.exists(sym isNestedIn _)) ||
- traceSig("isMethod", (initialSymbol, initialSymbol.typeParams)) {
- (initialSymbol.isMethod && initialSymbol.typeParams.contains(sym))
- }
+ (initialSymbol.isMethod && initialSymbol.typeParams.contains(sym))
)
)
@@ -262,12 +93,9 @@ abstract class Erasure extends AddInterfaces
case ch => last = ch ; ch
}
}
- // for debugging signatures: traces logic given system property
- private val traceProp = (sys.BooleanProp keyExists "scalac.sigs.trace").value // performance: get the value here
- private val traceSig = util.Tracer(traceProp)
/** This object is only used for sanity testing when -check:genjvm is set.
- * In that case we make sure that the erasure of the `normalized' type
+ * In that case we make sure that the erasure of the `normalized` type
* is the same as the erased type that's generated. Normalization means
* unboxing some primitive types and further simplifications as they are done in jsig.
*/
@@ -334,16 +162,51 @@ abstract class Erasure extends AddInterfaces
}
}
+ private def hiBounds(bounds: TypeBounds): List[Type] = bounds.hi.normalize match {
+ case RefinedType(parents, _) => parents map (_.normalize)
+ case tp => tp :: Nil
+ }
+
+ private def isErasedValueType(tpe: Type) = tpe.isInstanceOf[ErasedValueType]
+
/** The Java signature of type 'info', for symbol sym. The symbol is used to give the right return
* type for constructors.
*/
- def javaSig(sym0: Symbol, info: Type): Option[String] = atPhase(currentRun.erasurePhase) {
+ def javaSig(sym0: Symbol, info: Type): Option[String] = beforeErasure {
+ val isTraitSignature = sym0.enclClass.isTrait
+
+ def superSig(parents: List[Type]) = {
+ val ps = (
+ if (isTraitSignature) {
+ // java is unthrilled about seeing interfaces inherit from classes
+ val ok = parents filter (p => p.typeSymbol.isTrait || p.typeSymbol.isInterface)
+ // traits should always list Object.
+ if (ok.isEmpty || ok.head.typeSymbol != ObjectClass) ObjectClass.tpe :: ok
+ else ok
+ }
+ else parents
+ )
+ (ps map boxedSig).mkString
+ }
def boxedSig(tp: Type) = jsig(tp, primitiveOK = false)
-
- def hiBounds(bounds: TypeBounds): List[Type] = bounds.hi.normalize match {
- case RefinedType(parents, _) => parents map normalize
- case tp => tp :: Nil
+ def boundsSig(bounds: List[Type]) = {
+ val (isTrait, isClass) = bounds partition (_.typeSymbol.isTrait)
+ val classPart = isClass match {
+ case Nil => ":" // + boxedSig(ObjectClass.tpe)
+ case x :: _ => ":" + boxedSig(x)
+ }
+ classPart :: (isTrait map boxedSig) mkString ":"
}
+ def paramSig(tsym: Symbol) = tsym.name + boundsSig(hiBounds(tsym.info.bounds))
+ def polyParamSig(tparams: List[Symbol]) = (
+ if (tparams.isEmpty) ""
+ else tparams map paramSig mkString ("<", "", ">")
+ )
+
+ // Anything which could conceivably be a module (i.e. isn't known to be
+ // a type parameter or similar) must go through here or the signature is
+ // likely to end up with Foo<T>.Empty where it needs Foo<T>.Empty$.
+ def fullNameInSig(sym: Symbol) = "L" + beforeIcode(sym.javaBinaryName)
def jsig(tp0: Type, existentiallyBound: List[Symbol] = Nil, toplevel: Boolean = false, primitiveOK: Boolean = true): String = {
val tp = tp0.dealias
@@ -362,19 +225,33 @@ abstract class Erasure extends AddInterfaces
} else {
boxedSig(tp)
}
- def classSig: String =
- "L"+atPhase(currentRun.icodePhase)(sym.fullName + global.genJVM.moduleSuffix(sym)).replace('.', '/')
- def classSigSuffix: String =
- "."+sym.name
+ def classSig = {
+ val preRebound = pre.baseType(sym.owner) // #2585
+ dotCleanup(
+ (
+ if (needsJavaSig(preRebound)) {
+ val s = jsig(preRebound, existentiallyBound)
+ if (s.charAt(0) == 'L') s.substring(0, s.length - 1) + "." + sym.javaSimpleName
+ else fullNameInSig(sym)
+ }
+ else fullNameInSig(sym)
+ ) + (
+ if (args.isEmpty) "" else
+ "<"+(args map argSig).mkString+">"
+ ) + (
+ ";"
+ )
+ )
+ }
- // If args isEmpty, Array is being used as a higher-kinded type
+ // If args isEmpty, Array is being used as a type constructor
if (sym == ArrayClass && args.nonEmpty) {
if (unboundedGenericArrayLevel(tp) == 1) jsig(ObjectClass.tpe)
else ARRAY_TAG.toString+(args map (jsig(_))).mkString
}
else if (isTypeParameterInSig(sym, sym0)) {
assert(!sym.isAliasType, "Unexpected alias type: " + sym)
- TVAR_TAG.toString+sym.name+";"
+ "" + TVAR_TAG + sym.name + ";"
}
else if (sym == AnyClass || sym == AnyValClass || sym == SingletonClass)
jsig(ObjectClass.tpe)
@@ -384,88 +261,62 @@ abstract class Erasure extends AddInterfaces
jsig(RuntimeNothingClass.tpe)
else if (sym == NullClass)
jsig(RuntimeNullClass.tpe)
- else if (isValueClass(sym)) {
+ else if (isPrimitiveValueClass(sym)) {
if (!primitiveOK) jsig(ObjectClass.tpe)
else if (sym == UnitClass) jsig(BoxedUnitClass.tpe)
else abbrvTag(sym).toString
}
- else if (sym.isClass) {
- val preRebound = pre.baseType(sym.owner) // #2585
- traceSig("sym.isClass", (sym.ownerChain, preRebound, sym0.enclClassChain)) {
- dotCleanup(
- (
- if (needsJavaSig(preRebound)) {
- val s = jsig(preRebound, existentiallyBound)
- if (s.charAt(0) == 'L') s.substring(0, s.length - 1) + classSigSuffix
- else classSig
- }
- else classSig
- ) + (
- if (args.isEmpty) "" else
- "<"+(args map argSig).mkString+">"
- ) + (
- ";"
- )
- )
+ else if (sym.isDerivedValueClass) {
+ val unboxed = sym.derivedValueClassUnbox.info.finalResultType
+ val unboxedSeen = (tp memberType sym.derivedValueClassUnbox).finalResultType
+ def unboxedMsg = if (unboxed == unboxedSeen) "" else s", seen within ${sym.simpleName} as $unboxedSeen"
+ logResult(s"Erasure of value class $sym (underlying type $unboxed$unboxedMsg) is") {
+ if (isPrimitiveValueType(unboxedSeen) && !primitiveOK)
+ classSig
+ else
+ jsig(unboxedSeen, existentiallyBound, toplevel, primitiveOK)
}
}
- else jsig(erasure(tp), existentiallyBound, toplevel, primitiveOK)
+ else if (sym.isClass)
+ classSig
+ else
+ jsig(erasure(sym0)(tp), existentiallyBound, toplevel, primitiveOK)
case PolyType(tparams, restpe) =>
assert(tparams.nonEmpty)
- def boundSig(bounds: List[Type]) = {
- val (isTrait, isClass) = bounds partition (_.typeSymbol.isTrait)
-
- ":" + (
- if (isClass.isEmpty) "" else boxedSig(isClass.head)
- ) + (
- isTrait map (x => ":" + boxedSig(x)) mkString
- )
- }
- def paramSig(tsym: Symbol) = tsym.name + boundSig(hiBounds(tsym.info.bounds))
+ val poly = if (toplevel) polyParamSig(tparams) else ""
+ poly + jsig(restpe)
- val paramString = if (toplevel) tparams map paramSig mkString ("<", "", ">") else ""
- traceSig("PolyType", (tparams, restpe))(paramString + jsig(restpe))
case MethodType(params, restpe) =>
- "("+(params map (_.tpe) map (jsig(_))).mkString+")"+
- (if (restpe.typeSymbol == UnitClass || sym0.isConstructor) VOID_TAG.toString else jsig(restpe))
+ val buf = new StringBuffer("(")
+ params foreach (p => buf append jsig(p.tpe))
+ buf append ")"
+ buf append (if (restpe.typeSymbol == UnitClass || sym0.isConstructor) VOID_TAG.toString else jsig(restpe))
+ buf.toString
+
case RefinedType(parent :: _, decls) =>
boxedSig(parent)
case ClassInfoType(parents, _, _) =>
- (parents map (boxedSig(_))).mkString
+ superSig(parents)
case AnnotatedType(_, atp, _) =>
jsig(atp, existentiallyBound, toplevel, primitiveOK)
case BoundedWildcardType(bounds) =>
println("something's wrong: "+sym0+":"+sym0.tpe+" has a bounded wildcard type")
jsig(bounds.hi, existentiallyBound, toplevel, primitiveOK)
case _ =>
- val etp = erasure(tp)
+ val etp = erasure(sym0)(tp)
if (etp eq tp) throw new UnknownSig
else jsig(etp)
}
}
- traceSig("javaSig", (sym0, info)) {
- if (needsJavaSig(info)) {
- try Some(jsig(info, toplevel = true))
- catch { case ex: UnknownSig => None }
- }
- else None
+ if (needsJavaSig(info)) {
+ try Some(jsig(info, toplevel = true))
+ catch { case ex: UnknownSig => None }
}
+ else None
}
class UnknownSig extends Exception
- /** Type reference after erasure */
- def erasedTypeRef(sym: Symbol): Type =
- typeRef(erasure(sym.owner.tpe), sym, List())
-
- /** Remove duplicate references to class Object in a list of parent classes */
- private def removeDoubleObject(tps: List[Type]): List[Type] = tps match {
- case List() => List()
- case tp :: tps1 =>
- if (tp.typeSymbol == ObjectClass) tp :: tps1.filter(_.typeSymbol != ObjectClass)
- else tp :: removeDoubleObject(tps1)
- }
-
/** The symbol's erased info. This is the type's erasure, except for the following symbols:
*
* - For $asInstanceOf : [T]T
@@ -474,99 +325,265 @@ abstract class Erasure extends AddInterfaces
* - For Array[T].<init> : {scala#Int)Array[T]
* - For a type parameter : A type bounds type consisting of the erasures of its bounds.
*/
- def transformInfo(sym: Symbol, tp: Type): Type = {
- if (sym == Object_asInstanceOf)
- sym.info
- else if (sym == Object_isInstanceOf || sym == ArrayClass)
- PolyType(sym.info.typeParams, erasure(sym.info.resultType))
- else if (sym.isAbstractType)
- TypeBounds(WildcardType, WildcardType)
- else if (sym.isTerm && sym.owner == ArrayClass) {
- if (sym.isClassConstructor)
- tp match {
- case MethodType(params, TypeRef(pre, sym, args)) =>
- MethodType(cloneSymbols(params) map (p => p.setInfo(erasure(p.tpe))),
- typeRef(erasure(pre), sym, args))
- }
- else if (sym.name == nme.apply)
- tp
- else if (sym.name == nme.update)
- (tp: @unchecked) match {
- case MethodType(List(index, tvar), restpe) =>
- MethodType(List(index.cloneSymbol.setInfo(erasure(index.tpe)), tvar),
- erasedTypeRef(UnitClass))
- }
- else erasure(tp)
- } else if (
- sym.owner != NoSymbol &&
- sym.owner.owner == ArrayClass &&
- sym == Array_update.paramss.head(1)) {
- // special case for Array.update: the non-erased type remains, i.e. (Int,A)Unit
- // since the erasure type map gets applied to every symbol, we have to catch the
- // symbol here
- tp
- } else {
-/*
- val erased =
- if (sym.isGetter && sym.tpe.isInstanceOf[MethodType])
- erasure mapOver sym.tpe // for getters, unlike for normal methods, always convert Unit to BoxedUnit.
- else
- erasure(tp)
-*/
- transformMixinInfo(erasure(tp))
- }
- }
+ override def transformInfo(sym: Symbol, tp: Type): Type =
+ transformMixinInfo(super.transformInfo(sym, tp))
val deconstMap = new TypeMap {
+ // For some reason classOf[Foo] creates ConstantType(Constant(tpe)) with an actual Type for tpe,
+ // which is later translated to a Class. Unfortunately that means we have bugs like the erasure
+ // of Class[Foo] and classOf[Bar] not being seen as equivalent, leading to duplicate method
+ // generation and failing bytecode. See ticket #4753.
def apply(tp: Type): Type = tp match {
- case PolyType(_, _) => mapOver(tp)
- case MethodType(_, _) => mapOver(tp) // nullarymethod was eliminated during uncurry
- case _ => tp.deconst
+ case PolyType(_, _) => mapOver(tp)
+ case MethodType(_, _) => mapOver(tp) // nullarymethod was eliminated during uncurry
+ case ConstantType(Constant(_: Type)) => ClassClass.tpe // all classOfs erase to Class
+ case _ => tp.deconst
}
}
+
+ // ## requires a little translation
+ private lazy val poundPoundMethods = Set[Symbol](Any_##, Object_##)
+
// Methods on Any/Object which we rewrite here while we still know what
// is a primitive and what arrived boxed.
- private lazy val interceptedMethods = Set[Symbol](Any_##, Object_##, Any_getClass) ++ (
- // Each value class has its own getClass for ultra-precise class object typing.
- ScalaValueClasses map (_.tpe member nme.getClass_)
- )
+ private lazy val interceptedMethods = poundPoundMethods ++ primitiveGetClassMethods
// -------- erasure on trees ------------------------------------------
override def newTyper(context: Context) = new Eraser(context)
- /** An extractor object for boxed expressions
+ private def safeToRemoveUnbox(cls: Symbol): Boolean =
+ (cls == definitions.NullClass) || isBoxedValueClass(cls)
+
+ /** An extractor object for unboxed expressions (maybe subsumed by posterasure?) */
+ object Unboxed {
+ def unapply(tree: Tree): Option[Tree] = tree match {
+ case Apply(fn, List(arg)) if isUnbox(fn.symbol) && safeToRemoveUnbox(arg.tpe.typeSymbol) =>
+ Some(arg)
+ case Apply(
+ TypeApply(
+ cast @ Select(
+ Apply(
+ sel @ Select(arg, acc),
+ List()),
+ asinstanceof),
+ List(tpt)),
+ List())
+ if cast.symbol == Object_asInstanceOf &&
+ tpt.tpe.typeSymbol.isDerivedValueClass &&
+ sel.symbol == tpt.tpe.typeSymbol.derivedValueClassUnbox =>
+ Some(arg)
+ case _ =>
+ None
+ }
+ }
+
+ /** An extractor object for boxed expressions (maybe subsumed by posterasure?) */
object Boxed {
def unapply(tree: Tree): Option[Tree] = tree match {
+ case Apply(Select(New(tpt), nme.CONSTRUCTOR), List(arg)) if (tpt.tpe.typeSymbol.isDerivedValueClass) =>
+ Some(arg)
case LabelDef(name, params, Boxed(rhs)) =>
Some(treeCopy.LabelDef(tree, name, params, rhs) setType rhs.tpe)
- case Select(_, _) if tree.symbol == BoxedUnit_UNIT =>
- Some(Literal(()) setPos tree.pos setType UnitClass.tpe)
- case Block(List(unboxed), ret @ Select(_, _)) if ret.symbol == BoxedUnit_UNIT =>
- Some(if (unboxed.tpe.typeSymbol == UnitClass) tree
- else Block(List(unboxed), Literal(()) setPos tree.pos setType UnitClass.tpe))
- case Apply(fn, List(unboxed)) if isBox(fn.symbol) =>
- Some(unboxed)
case _ =>
None
}
}
- */
+
+ class ComputeBridges(unit: CompilationUnit, root: Symbol) {
+ assert(phase == currentRun.erasurePhase, phase)
+
+ var toBeRemoved = immutable.Set[Symbol]()
+ val site = root.thisType
+ val bridgesScope = newScope
+ val bridgeTarget = mutable.HashMap[Symbol, Symbol]()
+ var bridges = List[Tree]()
+
+ val opc = beforeExplicitOuter {
+ new overridingPairs.Cursor(root) {
+ override def parents = List(root.info.firstParent)
+ override def exclude(sym: Symbol) = !sym.isMethod || sym.isPrivate || super.exclude(sym)
+ }
+ }
+
+ def compute(): (List[Tree], immutable.Set[Symbol]) = {
+ while (opc.hasNext) {
+ val member = opc.overriding
+ val other = opc.overridden
+ //println("bridge? " + member + ":" + member.tpe + member.locationString + " to " + other + ":" + other.tpe + other.locationString)//DEBUG
+ if (beforeExplicitOuter(!member.isDeferred))
+ checkPair(member, other)
+
+ opc.next
+ }
+ (bridges, toBeRemoved)
+ }
+
+ /** Check that a bridge only overrides members that are also overridden by the original member.
+ * This test is necessary only for members that have a value class in their type.
+ * Such members are special because their types after erasure and after post-erasure differ/.
+ * This means we generate them after erasure, but the post-erasure transform might introduce
+ * a name clash. The present method guards against these name clashes.
+ *
+ * @param member The original member
+ * @param other The overidden symbol for which the bridge was generated
+ * @param bridge The bridge
+ */
+ def checkBridgeOverrides(member: Symbol, other: Symbol, bridge: Symbol): Boolean = {
+ def fulldef(sym: Symbol) =
+ if (sym == NoSymbol) sym.toString
+ else s"$sym: ${sym.tpe} in ${sym.owner}"
+ var noclash = true
+ def clashError(what: String) = {
+ noclash = false
+ unit.error(
+ if (member.owner == root) member.pos else root.pos,
+ sm"""bridge generated for member ${fulldef(member)}
+ |which overrides ${fulldef(other)}
+ |clashes with definition of $what;
+ |both have erased type ${afterPostErasure(bridge.tpe)}""")
+ }
+ for (bc <- root.baseClasses) {
+ if (settings.debug.value)
+ afterPostErasure(println(
+ sm"""check bridge overrides in $bc
+ |${bc.info.nonPrivateDecl(bridge.name)}
+ |${site.memberType(bridge)}
+ |${site.memberType(bc.info.nonPrivateDecl(bridge.name) orElse IntClass)}
+ |${(bridge.matchingSymbol(bc, site))}"""))
+
+ def overriddenBy(sym: Symbol) =
+ sym.matchingSymbol(bc, site).alternatives filter (sym => !sym.isBridge)
+ for (overBridge <- afterPostErasure(overriddenBy(bridge))) {
+ if (overBridge == member) {
+ clashError("the member itself")
+ } else {
+ val overMembers = overriddenBy(member)
+ if (!overMembers.exists(overMember =>
+ afterPostErasure(overMember.tpe =:= overBridge.tpe))) {
+ clashError(fulldef(overBridge))
+ }
+ }
+ }
+ }
+ noclash
+ }
+
+ def checkPair(member: Symbol, other: Symbol) {
+ val otpe = specialErasure(root)(other.tpe)
+ val bridgeNeeded = afterErasure (
+ !member.isMacro &&
+ !(other.tpe =:= member.tpe) &&
+ !(deconstMap(other.tpe) =:= deconstMap(member.tpe)) &&
+ { var e = bridgesScope.lookupEntry(member.name)
+ while ((e ne null) && !((e.sym.tpe =:= otpe) && (bridgeTarget(e.sym) == member)))
+ e = bridgesScope.lookupNextEntry(e)
+ (e eq null)
+ }
+ )
+ if (!bridgeNeeded)
+ return
+
+ val newFlags = (member.flags | BRIDGE) & ~(ACCESSOR | DEFERRED | LAZY | lateDEFERRED)
+ val bridge = other.cloneSymbolImpl(root, newFlags) setPos root.pos
+
+ debuglog("generating bridge from %s (%s): %s to %s: %s".format(
+ other, flagsToString(newFlags),
+ otpe + other.locationString, member,
+ specialErasure(root)(member.tpe) + member.locationString)
+ )
+
+ // the parameter symbols need to have the new owner
+ bridge setInfo (otpe cloneInfo bridge)
+ bridgeTarget(bridge) = member
+
+ if (!(member.tpe exists (_.typeSymbol.isDerivedValueClass)) ||
+ checkBridgeOverrides(member, other, bridge)) {
+ afterErasure(root.info.decls enter bridge)
+ if (other.owner == root) {
+ afterErasure(root.info.decls.unlink(other))
+ toBeRemoved += other
+ }
+
+ bridgesScope enter bridge
+ bridges ::= makeBridgeDefDef(bridge, member, other)
+ }
+ }
+
+ def makeBridgeDefDef(bridge: Symbol, member: Symbol, other: Symbol) = afterErasure {
+ // type checking ensures we can safely call `other`, but unless `member.tpe <:< other.tpe`,
+ // calling `member` is not guaranteed to succeed in general, there's
+ // nothing we can do about this, except for an unapply: when this subtype test fails,
+ // return None without calling `member`
+ //
+ // TODO: should we do this for user-defined unapplies as well?
+ // does the first argument list have exactly one argument -- for user-defined unapplies we can't be sure
+ def maybeWrap(bridgingCall: Tree): Tree = {
+ val guardExtractor = ( // can't statically know which member is going to be selected, so don't let this depend on member.isSynthetic
+ (member.name == nme.unapply || member.name == nme.unapplySeq)
+ && !afterErasure((member.tpe <:< other.tpe))) // no static guarantees (TODO: is the subtype test ever true?)
+
+ import CODE._
+ val _false = FALSE_typed
+ val pt = member.tpe.resultType
+ lazy val zero =
+ if (_false.tpe <:< pt) _false
+ else if (NoneModule.tpe <:< pt) REF(NoneModule)
+ else EmptyTree
+
+ if (guardExtractor && (zero ne EmptyTree)) {
+ val typeTest = gen.mkIsInstanceOf(REF(bridge.firstParam), member.tpe.params.head.tpe)
+ IF (typeTest) THEN bridgingCall ELSE zero
+ } else bridgingCall
+ }
+ val rhs = member.tpe match {
+ case MethodType(Nil, ConstantType(c)) => Literal(c)
+ case _ =>
+ val sel: Tree = Select(This(root), member)
+ val bridgingCall = (sel /: bridge.paramss)((fun, vparams) => Apply(fun, vparams map Ident))
+
+ maybeWrap(bridgingCall)
+ }
+ atPos(bridge.pos)(DefDef(bridge, rhs))
+ }
+ }
/** The modifier typer which retypes with erased types. */
- class Eraser(context: Context) extends Typer(context) {
- private def safeToRemoveUnbox(cls: Symbol): Boolean =
- (cls == definitions.NullClass) || isBoxedValueClass(cls)
-
- /** Box `tree' of unboxed type */
- private def box(tree: Tree): Tree = tree match {
- case LabelDef(name, params, rhs) =>
- val rhs1 = box(rhs)
- treeCopy.LabelDef(tree, name, params, rhs1) setType rhs1.tpe
+ class Eraser(_context: Context) extends Typer(_context) {
+
+ private def isPrimitiveValueType(tpe: Type) = isPrimitiveValueClass(tpe.typeSymbol)
+
+ private def isDifferentErasedValueType(tpe: Type, other: Type) =
+ isErasedValueType(tpe) && (tpe ne other)
+
+ private def isPrimitiveValueMember(sym: Symbol) =
+ sym != NoSymbol && isPrimitiveValueClass(sym.owner)
+
+ @inline private def box(tree: Tree, target: => String): Tree = {
+ val result = box1(tree)
+ log(s"boxing ${tree.summaryString}: ${tree.tpe} into $target: ${result.tpe}")
+ result
+ }
+
+ /** Box `tree` of unboxed type */
+ private def box1(tree: Tree): Tree = tree match {
+ case LabelDef(_, _, _) =>
+ val ldef = deriveLabelDef(tree)(box1)
+ ldef setType ldef.rhs.tpe
case _ =>
- typedPos(tree.pos)(tree.tpe.typeSymbol match {
+ val tree1 = tree.tpe match {
+ case ErasedValueType(tref) =>
+ val clazz = tref.sym
+ tree match {
+ case Unboxed(arg) if arg.tpe.typeSymbol == clazz =>
+ log("shortcircuiting unbox -> box "+arg); arg
+ case _ =>
+ New(clazz, cast(tree, underlyingOfValueClass(clazz)))
+ }
+ case _ =>
+ tree.tpe.typeSymbol match {
case UnitClass =>
- if (treeInfo isPureExpr tree) REF(BoxedUnit_UNIT)
+ if (treeInfo isExprSafeToInline tree) REF(BoxedUnit_UNIT)
else BLOCK(tree, REF(BoxedUnit_UNIT))
case NothingClass => tree // a non-terminating expression doesn't need boxing
case x =>
@@ -579,12 +596,20 @@ abstract class Erasure extends AddInterfaces
* fields (see TupleX). (ID)
*/
case Apply(boxFun, List(arg)) if isUnbox(tree.symbol) && safeToRemoveUnbox(arg.tpe.typeSymbol) =>
- log("boxing an unbox: " + tree + " and replying with " + arg)
+ log(s"boxing an unbox: ${tree.symbol} -> ${arg.tpe}")
arg
case _ =>
(REF(boxMethod(x)) APPLY tree) setPos (tree.pos) setType ObjectClass.tpe
}
- })
+ }
+ }
+ typedPos(tree.pos)(tree1)
+ }
+
+ private def unbox(tree: Tree, pt: Type): Tree = {
+ val result = unbox1(tree, pt)
+ log(s"unboxing ${tree.summaryString}: ${tree.tpe} with pt=$pt as type ${result.tpe}")
+ result
}
/** Unbox `tree` of boxed type to expected type `pt`.
@@ -593,41 +618,66 @@ abstract class Erasure extends AddInterfaces
* @param pt the expected type.
* @return the unboxed tree
*/
- private def unbox(tree: Tree, pt: Type): Tree = tree match {
+ private def unbox1(tree: Tree, pt: Type): Tree = tree match {
/*
case Boxed(unboxed) =>
println("unbox shorten: "+tree) // this never seems to kick in during build and test; therefore disabled.
adaptToType(unboxed, pt)
*/
- case LabelDef(name, params, rhs) =>
- val rhs1 = unbox(rhs, pt)
- treeCopy.LabelDef(tree, name, params, rhs1) setType rhs1.tpe
+ case LabelDef(_, _, _) =>
+ val ldef = deriveLabelDef(tree)(unbox(_, pt))
+ ldef setType ldef.rhs.tpe
case _ =>
- typedPos(tree.pos)(pt.typeSymbol match {
- case UnitClass =>
- if (treeInfo isPureExpr tree) UNIT
- else BLOCK(tree, UNIT)
- case x =>
- assert(x != ArrayClass)
- (REF(unboxMethod(pt.typeSymbol)) APPLY tree) setType pt
- })
+ val tree1 = pt match {
+ case ErasedValueType(tref) =>
+ tree match {
+ case Boxed(arg) if arg.tpe.isInstanceOf[ErasedValueType] =>
+ log("shortcircuiting box -> unbox "+arg)
+ arg
+ case _ =>
+ val clazz = tref.sym
+ log("not boxed: "+tree)
+ lazy val underlying = underlyingOfValueClass(clazz)
+ val tree0 =
+ if (tree.tpe.typeSymbol == NullClass &&
+ isPrimitiveValueClass(underlying.typeSymbol)) {
+ // convert `null` directly to underlying type, as going
+ // via the unboxed type would yield a NPE (see SI-5866)
+ unbox1(tree, underlying)
+ } else
+ Apply(Select(adaptToType(tree, clazz.tpe), clazz.derivedValueClassUnbox), List())
+ cast(tree0, pt)
+ }
+ case _ =>
+ pt.typeSymbol match {
+ case UnitClass =>
+ if (treeInfo isExprSafeToInline tree) UNIT
+ else BLOCK(tree, UNIT)
+ case x =>
+ assert(x != ArrayClass)
+ // don't `setType pt` the Apply tree, as the Apply's fun won't be typechecked if the Apply tree already has a type
+ Apply(unboxMethod(pt.typeSymbol), tree)
+ }
+ }
+ typedPos(tree.pos)(tree1)
}
/** Generate a synthetic cast operation from tree.tpe to pt.
* @pre pt eq pt.normalize
*/
- private def cast(tree: Tree, pt: Type): Tree = {
+ private def cast(tree: Tree, pt: Type): Tree = logResult(s"cast($tree, $pt)") {
if (pt.typeSymbol == UnitClass) {
// See SI-4731 for one example of how this occurs.
log("Attempted to cast to Unit: " + tree)
tree.duplicate setType pt
- }
- else tree AS_ATTR pt
+ } else if (tree.tpe != null && tree.tpe.typeSymbol == ArrayClass && pt.typeSymbol == ArrayClass) {
+ // See SI-2386 for one example of when this might be necessary.
+ val needsExtraCast = isPrimitiveValueType(tree.tpe.typeArgs.head) && !isPrimitiveValueType(pt.typeArgs.head)
+ val tree1 = if (needsExtraCast) gen.mkRuntimeCall(nme.toObjectArray, List(tree)) else tree
+ gen.mkAttributedCast(tree1, pt)
+ } else gen.mkAttributedCast(tree, pt)
}
- private def isUnboxedValueMember(sym: Symbol) =
- sym != NoSymbol && isValueClass(sym.owner)
-
/** Adapt `tree` to expected type `pt`.
*
* @param tree the given tree
@@ -639,29 +689,31 @@ abstract class Erasure extends AddInterfaces
log("adapting " + tree + ":" + tree.tpe + " : " + tree.tpe.parents + " to " + pt)//debug
if (tree.tpe <:< pt)
tree
- else if (isValueClass(tree.tpe.typeSymbol) && !isValueClass(pt.typeSymbol))
- adaptToType(box(tree), pt)
- else if (tree.tpe.isInstanceOf[MethodType] && tree.tpe.params.isEmpty) {
- assert(tree.symbol.isStable, "adapt "+tree+":"+tree.tpe+" to "+pt)
+ else if (isDifferentErasedValueType(tree.tpe, pt))
+ adaptToType(box(tree, pt.toString), pt)
+ else if (isDifferentErasedValueType(pt, tree.tpe))
+ adaptToType(unbox(tree, pt), pt)
+ else if (isPrimitiveValueType(tree.tpe) && !isPrimitiveValueType(pt)) {
+ adaptToType(box(tree, pt.toString), pt)
+ } else if (isMethodTypeWithEmptyParams(tree.tpe)) {
+ // [H] this assert fails when trying to typecheck tree !(SomeClass.this.bitmap) for single lazy val
+ //assert(tree.symbol.isStable, "adapt "+tree+":"+tree.tpe+" to "+pt)
adaptToType(Apply(tree, List()) setPos tree.pos setType tree.tpe.resultType, pt)
- } else if (pt <:< tree.tpe)
- cast(tree, pt)
- else if (isValueClass(pt.typeSymbol) && !isValueClass(tree.tpe.typeSymbol))
+// } else if (pt <:< tree.tpe)
+// cast(tree, pt)
+ } else if (isPrimitiveValueType(pt) && !isPrimitiveValueType(tree.tpe))
adaptToType(unbox(tree, pt), pt)
else
cast(tree, pt)
}
- // @PP 1/25/2011: This is less inaccurate than it was (I removed
- // BoxedAnyArray, asInstanceOf$erased, and other long ago eliminated symbols)
- // but I do not think it yet describes the code beneath it.
-
/** Replace member references as follows:
*
* - `x == y` for == in class Any becomes `x equals y` with equals in class Object.
* - `x != y` for != in class Any becomes `!(x equals y)` with equals in class Object.
* - x.asInstanceOf[T] becomes x.$asInstanceOf[T]
* - x.isInstanceOf[T] becomes x.$isInstanceOf[T]
+ * - x.isInstanceOf[ErasedValueType(tref)] becomes x.isInstanceOf[tref.sym.tpe]
* - x.m where m is some other member of Any becomes x.m where m is a member of class Object.
* - x.m where x has unboxed value type T and m is not a directly translated member of T becomes T.box(x).m
* - x.m where x is a reference type and m is a directly translated member of value type T becomes x.TValue().m
@@ -671,44 +723,83 @@ abstract class Erasure extends AddInterfaces
private def adaptMember(tree: Tree): Tree = {
//Console.println("adaptMember: " + tree);
tree match {
- case Apply(TypeApply(sel @ Select(qual, name), List(targ)), List()) if tree.symbol == Any_asInstanceOf =>
+ case Apply(TypeApply(sel @ Select(qual, name), List(targ)), List())
+ if tree.symbol == Any_asInstanceOf =>
val qual1 = typedQualifier(qual, NOmode, ObjectClass.tpe) // need to have an expected type, see #3037
val qualClass = qual1.tpe.typeSymbol
- val targClass = targ.tpe.typeSymbol
/*
+ val targClass = targ.tpe.typeSymbol
+
if (isNumericValueClass(qualClass) && isNumericValueClass(targClass))
// convert numeric type casts
atPos(tree.pos)(Apply(Select(qual1, "to" + targClass.name), List()))
else
*/
- if (isValueClass(targClass)) unbox(qual1, targ.tpe)
- else tree
- case Select(qual, name) if (name != nme.CONSTRUCTOR) =>
- if (tree.symbol == NoSymbol)
+ if (isPrimitiveValueType(targ.tpe) || isErasedValueType(targ.tpe)) {
+ val noNullCheckNeeded = targ.tpe match {
+ case ErasedValueType(tref) =>
+ atPhase(currentRun.erasurePhase) {
+ isPrimitiveValueClass(erasedValueClassArg(tref).typeSymbol)
+ }
+ case _ =>
+ true
+ }
+ if (noNullCheckNeeded) unbox(qual1, targ.tpe)
+ else {
+ def nullConst = Literal(Constant(null)) setType NullClass.tpe
+ val untyped =
+// util.trace("new asinstanceof test") {
+ gen.evalOnce(qual1, context.owner, context.unit) { qual =>
+ If(Apply(Select(qual(), nme.eq), List(Literal(Constant(null)) setType NullClass.tpe)),
+ Literal(Constant(null)) setType targ.tpe,
+ unbox(qual(), targ.tpe))
+ }
+// }
+ typed(untyped)
+ }
+ } else tree
+ case Apply(TypeApply(sel @ Select(qual, name), List(targ)), List())
+ if tree.symbol == Any_isInstanceOf =>
+ targ.tpe match {
+ case ErasedValueType(tref) => targ.setType(tref.sym.tpe)
+ case _ =>
+ }
tree
- else if (tree.symbol == Any_asInstanceOf)
+ case Select(qual, name) =>
+ if (tree.symbol == NoSymbol) {
+ tree
+ } else if (name == nme.CONSTRUCTOR) {
+ if (tree.symbol.owner == AnyValClass) tree.symbol = ObjectClass.primaryConstructor
+ tree
+ } else if (tree.symbol == Any_asInstanceOf)
adaptMember(atPos(tree.pos)(Select(qual, Object_asInstanceOf)))
else if (tree.symbol == Any_isInstanceOf)
adaptMember(atPos(tree.pos)(Select(qual, Object_isInstanceOf)))
else if (tree.symbol.owner == AnyClass)
- adaptMember(atPos(tree.pos)(Select(qual, getMember(ObjectClass, name))))
+ adaptMember(atPos(tree.pos)(Select(qual, getMember(ObjectClass, tree.symbol.name))))
else {
var qual1 = typedQualifier(qual)
- if ((isValueClass(qual1.tpe.typeSymbol) && !isUnboxedValueMember(tree.symbol)))
- qual1 = box(qual1)
- else if (!isValueClass(qual1.tpe.typeSymbol) && isUnboxedValueMember(tree.symbol))
+ if ((isPrimitiveValueType(qual1.tpe) && !isPrimitiveValueMember(tree.symbol)) ||
+ isErasedValueType(qual1.tpe))
+ qual1 = box(qual1, "owner "+tree.symbol.owner)
+ else if (!isPrimitiveValueType(qual1.tpe) && isPrimitiveValueMember(tree.symbol))
qual1 = unbox(qual1, tree.symbol.owner.tpe)
- if (isValueClass(tree.symbol.owner) && !isValueClass(qual1.tpe.typeSymbol))
+ def selectFrom(qual: Tree) = treeCopy.Select(tree, qual, name)
+
+ if (isPrimitiveValueMember(tree.symbol) && !isPrimitiveValueType(qual1.tpe)) {
tree.symbol = NoSymbol
- else if (qual1.tpe.isInstanceOf[MethodType] && qual1.tpe.params.isEmpty) {
+ selectFrom(qual1)
+ } else if (isMethodTypeWithEmptyParams(qual1.tpe)) {
assert(qual1.symbol.isStable, qual1.symbol);
- qual1 = Apply(qual1, List()) setPos qual1.pos setType qual1.tpe.resultType
+ val applied = Apply(qual1, List()) setPos qual1.pos setType qual1.tpe.resultType
+ adaptMember(selectFrom(applied))
} else if (!(qual1.isInstanceOf[Super] || (qual1.tpe.typeSymbol isSubClass tree.symbol.owner))) {
assert(tree.symbol.owner != ArrayClass)
- qual1 = cast(qual1, tree.symbol.owner.tpe)
+ selectFrom(cast(qual1, tree.symbol.owner.tpe))
+ } else {
+ selectFrom(qual1)
}
- treeCopy.Select(tree, qual1, name)
}
case SelectFromArray(qual, name, erasure) =>
var qual1 = typedQualifier(qual)
@@ -724,25 +815,41 @@ abstract class Erasure extends AddInterfaces
override protected def adapt(tree: Tree, mode: Int, pt: Type, original: Tree = EmptyTree): Tree =
adaptToType(tree, pt)
- /** A replacement for the standard typer's `typed1' method.
+ /** A replacement for the standard typer's `typed1` method.
*/
- override protected def typed1(tree: Tree, mode: Int, pt: Type): Tree = {
+ override def typed1(tree: Tree, mode: Int, pt: Type): Tree = {
val tree1 = try {
- super.typed1(adaptMember(tree), mode, pt)
+ tree match {
+ case InjectDerivedValue(arg) =>
+ (tree.attachments.get[TypeRefAttachment]: @unchecked) match {
+ case Some(itype) =>
+ val tref = itype.tpe
+ val argPt = atPhase(currentRun.erasurePhase)(erasedValueClassArg(tref))
+ log(s"transforming inject $arg -> $tref/$argPt")
+ val result = typed(arg, mode, argPt)
+ log(s"transformed inject $arg -> $tref/$argPt = $result:${result.tpe}")
+ return result setType ErasedValueType(tref)
+
+ }
+ case _ =>
+ super.typed1(adaptMember(tree), mode, pt)
+ }
} catch {
case er: TypeError =>
- Console.println("exception when typing " + tree)
+ Console.println("exception when typing " + tree+"/"+tree.getClass)
Console.println(er.msg + " in file " + context.owner.sourceFile)
er.printStackTrace
- abort()
+ abort("unrecoverable error")
case ex: Exception =>
//if (settings.debug.value)
- Console.println("exception when typing " + tree);
+ try Console.println("exception when typing " + tree)
+ finally throw ex
throw ex
}
+
def adaptCase(cdef: CaseDef): CaseDef = {
- val body1 = adaptToType(cdef.body, tree1.tpe)
- treeCopy.CaseDef(cdef, cdef.pat, cdef.guard, body1) setType body1.tpe
+ val newCdef = deriveCaseDef(cdef)(adaptToType(_, tree1.tpe))
+ newCdef setType newCdef.body.tpe
}
def adaptBranch(branch: Tree): Tree =
if (branch == EmptyTree) branch else adaptToType(branch, tree1.tpe);
@@ -770,6 +877,11 @@ abstract class Erasure extends AddInterfaces
tree1
}
}
+
+ private def isMethodTypeWithEmptyParams(tpe: Type) = tpe match {
+ case MethodType(Nil, _) => true
+ case _ => false
+ }
}
/** The erasure transformer */
@@ -783,33 +895,48 @@ abstract class Erasure extends AddInterfaces
* but their erased types are the same.
*/
private def checkNoDoubleDefs(root: Symbol) {
+ def sameTypeAfterErasure(sym1: Symbol, sym2: Symbol) =
+ afterPostErasure(sym1.info =:= sym2.info) && !sym1.isMacro && !sym2.isMacro
+
def doubleDefError(sym1: Symbol, sym2: Symbol) {
// the .toString must also be computed at the earlier phase
- def atRefc[T](op: => T) = atPhase[T](currentRun.refchecksPhase.next)(op)
- val tpe1 = atRefc(root.thisType.memberType(sym1))
- val tpe2 = atRefc(root.thisType.memberType(sym2))
+ val tpe1 = afterRefchecks(root.thisType.memberType(sym1))
+ val tpe2 = afterRefchecks(root.thisType.memberType(sym2))
if (!tpe1.isErroneous && !tpe2.isErroneous)
unit.error(
if (sym1.owner == root) sym1.pos else root.pos,
(if (sym1.owner == sym2.owner) "double definition:\n"
else if (sym1.owner == root) "name clash between defined and inherited member:\n"
else "name clash between inherited members:\n") +
- sym1 + ":" + atRefc(tpe1.toString) +
+ sym1 + ":" + afterRefchecks(tpe1.toString) +
(if (sym1.owner == root) "" else sym1.locationString) + " and\n" +
- sym2 + ":" + atRefc(tpe2.toString) +
+ sym2 + ":" + afterRefchecks(tpe2.toString) +
(if (sym2.owner == root) " at line " + (sym2.pos).line else sym2.locationString) +
"\nhave same type" +
- (if (atRefc(tpe1 =:= tpe2)) "" else " after erasure: " + atPhase(phase.next)(sym1.tpe)))
+ (if (afterRefchecks(tpe1 =:= tpe2)) "" else " after erasure: " + afterPostErasure(sym1.tpe)))
sym1.setInfo(ErrorType)
}
val decls = root.info.decls
+
+ // SI-8010 force infos, otherwise makeNotPrivate in ExplicitOuter info transformer can trigger
+ // a scope rehash while were iterating and we can see the same entry twice!
+ // Inspection of SymbolPairs (the basis of OverridingPairs), suggests that it is immune
+ // from this sort of bug as it copies the symbols into a temporary scope *before* any calls to `.info`,
+ // ie, no variant of it calls `info` or `tpe` in `SymbolPair#exclude`.
+ //
+ // Why not just create a temporary scope here? We need to force the name changes in any case before
+ // we do these checks, so that we're comparing same-named methods based on the expanded names that actually
+ // end up in the bytecode.
+ afterPostErasure(decls.foreach(_.info))
+
var e = decls.elems
while (e ne null) {
if (e.sym.isTerm) {
var e1 = decls.lookupNextEntry(e)
while (e1 ne null) {
- if (atPhase(phase.next)(e1.sym.info =:= e.sym.info)) doubleDefError(e.sym, e1.sym)
+ assert(e.sym ne e1.sym, s"Internal error: encountered ${e.sym.debugLocationString} twice during scope traversal. This might be related to SI-8010.")
+ if (sameTypeAfterErasure(e1.sym, e.sym)) doubleDefError(e.sym, e1.sym)
e1 = decls.lookupNextEntry(e1)
}
}
@@ -823,14 +950,14 @@ abstract class Erasure extends AddInterfaces
|| !sym.hasTypeAt(currentRun.refchecksPhase.id))
override def matches(sym1: Symbol, sym2: Symbol): Boolean =
- atPhase(phase.next)(sym1.tpe =:= sym2.tpe)
+ afterPostErasure(sym1.tpe =:= sym2.tpe)
}
while (opc.hasNext) {
- if (!atPhase(currentRun.refchecksPhase.next)(
+ if (!afterRefchecks(
root.thisType.memberType(opc.overriding) matches
- root.thisType.memberType(opc.overridden))) {
- if (settings.debug.value)
- log("" + opc.overriding.locationString + " " +
+ root.thisType.memberType(opc.overridden)) &&
+ sameTypeAfterErasure(opc.overriding, opc.overridden)) {
+ debuglog("" + opc.overriding.locationString + " " +
opc.overriding.infosString +
opc.overridden.locationString + " " +
opc.overridden.infosString)
@@ -846,10 +973,10 @@ abstract class Erasure extends AddInterfaces
for (member <- root.info.nonPrivateMember(other.name).alternatives) {
if (member != other &&
!(member hasFlag BRIDGE) &&
- atPhase(phase.next)(member.tpe =:= other.tpe) &&
- !atPhase(refchecksPhase.next)(
+ afterErasure(member.tpe =:= other.tpe) &&
+ !afterRefchecks(
root.thisType.memberType(member) matches root.thisType.memberType(other))) {
- if (settings.debug.value) log("" + member.locationString + " " + member.infosString + other.locationString + " " + other.infosString);
+ debuglog("" + member.locationString + " " + member.infosString + other.locationString + " " + other.infosString);
doubleDefError(member, other)
}
}
@@ -870,85 +997,10 @@ abstract class Erasure extends AddInterfaces
* type of `m1` in the template.
*/
private def bridgeDefs(owner: Symbol): (List[Tree], immutable.Set[Symbol]) = {
- var toBeRemoved: immutable.Set[Symbol] = immutable.Set()
- //println("computing bridges for " + owner)//DEBUG
- assert(phase == currentRun.erasurePhase)
- val site = owner.thisType
- val bridgesScope = new Scope
- val bridgeTarget = new mutable.HashMap[Symbol, Symbol]
- var bridges: List[Tree] = List()
- val opc = atPhase(currentRun.explicitouterPhase) {
- new overridingPairs.Cursor(owner) {
- override def parents: List[Type] = List(owner.info.parents.head)
- override def exclude(sym: Symbol): Boolean =
- !sym.isMethod || sym.isPrivate || super.exclude(sym)
- }
- }
- while (opc.hasNext) {
- val member = opc.overriding
- val other = opc.overridden
- //Console.println("bridge? " + member + ":" + member.tpe + member.locationString + " to " + other + ":" + other.tpe + other.locationString)//DEBUG
- if (atPhase(currentRun.explicitouterPhase)(!member.isDeferred)) {
- val otpe = erasure(other.tpe)
- val bridgeNeeded = atPhase(phase.next) (
- !(other.tpe =:= member.tpe) &&
- !(deconstMap(other.tpe) =:= deconstMap(member.tpe)) &&
- { var e = bridgesScope.lookupEntry(member.name)
- while ((e ne null) && !((e.sym.tpe =:= otpe) && (bridgeTarget(e.sym) == member)))
- e = bridgesScope.lookupNextEntry(e)
- (e eq null)
- }
- );
- if (bridgeNeeded) {
- val bridge = other.cloneSymbolImpl(owner)
- .setPos(owner.pos)
- .setFlag(member.flags | BRIDGE)
- .resetFlag(ACCESSOR | DEFERRED | LAZY | lateDEFERRED)
- // the parameter symbols need to have the new owner
- bridge.setInfo(otpe.cloneInfo(bridge))
- bridgeTarget(bridge) = member
- atPhase(phase.next) { owner.info.decls.enter(bridge) }
- if (other.owner == owner) {
- //println("bridge to same: "+other+other.locationString)//DEBUG
- atPhase(phase.next) { owner.info.decls.unlink(other) }
- toBeRemoved += other
- }
- bridgesScope enter bridge
- bridges =
- atPhase(phase.next) {
- atPos(bridge.pos) {
- val bridgeDef =
- DefDef(bridge,
- member.tpe match {
- case MethodType(List(), ConstantType(c)) => Literal(c)
- case _ =>
- (((Select(This(owner), member): Tree) /: bridge.paramss)
- ((fun, vparams) => Apply(fun, vparams map Ident)))
- });
- if (settings.debug.value)
- log("generating bridge from " + other + "(" + Flags.flagsToString(bridge.flags) + ")" + ":" + otpe + other.locationString + " to " + member + ":" + erasure(member.tpe) + member.locationString + " =\n " + bridgeDef);
- bridgeDef
- }
- } :: bridges
- }
- }
- opc.next
- }
- (bridges, toBeRemoved)
+ assert(phase == currentRun.erasurePhase, phase)
+ debuglog("computing bridges for " + owner)
+ new ComputeBridges(unit, owner) compute()
}
-/*
- for (bc <- site.baseClasses.tail; other <- bc.info.decls.toList) {
- if (other.isMethod && !other.isConstructor) {
- for (member <- site.nonPrivateMember(other.name).alternatives) {
- if (member != other &&
- !(member hasFlag DEFERRED) &&
- (site.memberType(member) matches site.memberType(other)) &&
- !(site.parents exists (p =>
- (p.symbol isSubClass member.owner) && (p.symbol isSubClass other.owner)))) {
-...
- }
- }
-*/
def addBridges(stats: List[Tree], base: Symbol): List[Tree] =
if (base.isTrait) stats
@@ -972,108 +1024,53 @@ abstract class Erasure extends AddInterfaces
* - Given a selection q.s, where the owner of `s` is not accessible but the
* type symbol of q's type qT is accessible, insert a cast (q.asInstanceOf[qT]).s
* This prevents illegal access errors (see #4283).
+ * - Remove all instance creations new C(arg) where C is an inlined class.
* - Reset all other type attributes to null, thus enforcing a retyping.
*/
private val preTransformer = new TypingTransformer(unit) {
- def preErase(tree: Tree): Tree = tree match {
- case ClassDef(mods, name, tparams, impl) =>
- if (settings.debug.value)
- log("defs of " + tree.symbol + " = " + tree.symbol.info.decls)
- treeCopy.ClassDef(tree, mods, name, List(), impl)
- case DefDef(mods, name, tparams, vparamss, tpt, rhs) =>
- treeCopy.DefDef(tree, mods, name, List(), vparamss, tpt, rhs)
- case TypeDef(_, _, _, _) =>
- EmptyTree
- case Apply(instanceOf @ TypeApply(fun @ Select(qual, name), args @ List(arg)), List()) // !!! todo: simplify by having GenericArray also extract trees
- if ((fun.symbol == Any_isInstanceOf || fun.symbol == Object_isInstanceOf) &&
- unboundedGenericArrayLevel(arg.tpe) > 0) =>
- val level = unboundedGenericArrayLevel(arg.tpe)
- def isArrayTest(arg: Tree) =
- gen.mkRuntimeCall("isArray", List(arg, Literal(Constant(level))))
- global.typer.typedPos(tree.pos) {
- if (level == 1) isArrayTest(qual)
- else
- gen.evalOnce(qual, currentOwner, unit) { qual1 =>
- gen.mkAnd(
- Apply(TypeApply(Select(qual1(), fun.symbol),
- List(TypeTree(erasure(arg.tpe)))),
- List()),
- isArrayTest(qual1()))
- }
- }
- case TypeApply(fun, args) if (fun.symbol.owner != AnyClass &&
- fun.symbol != Object_asInstanceOf &&
- fun.symbol != Object_isInstanceOf) =>
- // leave all other type tests/type casts, remove all other type applications
- preErase(fun)
- case Apply(fn @ Select(qual, name), args) if (fn.symbol.owner == ArrayClass) =>
- if (unboundedGenericArrayLevel(qual.tpe.widen) == 1)
- // convert calls to apply/update/length on generic arrays to
- // calls of ScalaRunTime.array_xxx method calls
- global.typer.typedPos(tree.pos) { gen.mkRuntimeCall("array_"+name, qual :: args) }
- else
- // store exact array erasure in map to be retrieved later when we might
- // need to do the cast in adaptMember
- treeCopy.Apply(
- tree,
- SelectFromArray(qual, name, erasure(qual.tpe)).copyAttrs(fn),
- args)
-
- case Apply(fn @ Select(qual, _), Nil) if interceptedMethods(fn.symbol) =>
- if (fn.symbol == Any_## || fn.symbol == Object_##) {
- // This is unattractive, but without it we crash here on ().## because after
- // erasure the ScalaRunTime.hash overload goes from Unit => Int to BoxedUnit => Int.
- // This must be because some earlier transformation is being skipped on ##, but so
- // far I don't know what. For null we now define null.## == 0.
- val arg = qual.tpe.typeSymbolDirect match {
- case UnitClass => BLOCK(qual, REF(BoxedUnit_UNIT)) // ({ expr; UNIT }).##
- case NullClass => LIT(0) // (null: Object).##
- case _ => qual
- }
- Apply(gen.mkAttributedRef(scalaRuntimeHash), List(arg))
+
+ private def preEraseNormalApply(tree: Apply) = {
+ val fn = tree.fun
+ val args = tree.args
+
+ def qualifier = fn match {
+ case Select(qual, _) => qual
+ case TypeApply(Select(qual, _), _) => qual
+ }
+
+ def preEraseAsInstanceOf = {
+ (fn: @unchecked) match {
+ case TypeApply(Select(qual, _), List(targ)) =>
+ if (qual.tpe <:< targ.tpe)
+ atPos(tree.pos) { Typed(qual, TypeTree(targ.tpe)) }
+ else if (isNumericValueClass(qual.tpe.typeSymbol) && isNumericValueClass(targ.tpe.typeSymbol))
+ atPos(tree.pos)(numericConversion(qual, targ.tpe.typeSymbol))
+ else
+ tree
}
- // Rewrite 5.getClass to ScalaRunTime.anyValClass(5)
- else if (isValueClass(qual.tpe.typeSymbol))
- Apply(gen.mkAttributedRef(scalaRuntimeAnyValClass), List(qual))
- else
- tree
+ // todo: also handle the case where the singleton type is buried in a compound
+ }
- case Apply(fn, args) =>
- if (fn.symbol == Any_asInstanceOf)
- (fn: @unchecked) match {
- case TypeApply(Select(qual, _), List(targ)) =>
- if (qual.tpe <:< targ.tpe) {
- atPos(tree.pos) { Typed(qual, TypeTree(targ.tpe)) }
- } else if (isNumericValueClass(qual.tpe.typeSymbol) &&
- isNumericValueClass(targ.tpe.typeSymbol)) {
- // convert numeric type casts
- val cname = newTermName("to" + targ.tpe.typeSymbol.name)
- val csym = qual.tpe.member(cname)
- assert(csym != NoSymbol)
- atPos(tree.pos) { Apply(Select(qual, csym), List()) }
- } else
- tree
- }
- // todo: also handle the case where the singleton type is buried in a compound
- else if (fn.symbol == Any_isInstanceOf) {
- fn match {
- case TypeApply(sel @ Select(qual, name), List(targ)) =>
- if (qual.tpe != null && isValueClass(qual.tpe.typeSymbol) && targ.tpe != null && targ.tpe <:< AnyRefClass.tpe)
- unit.error(sel.pos, "isInstanceOf cannot test if value types are references.")
-
- def mkIsInstanceOf(q: () => Tree)(tp: Type): Tree =
- Apply(
- TypeApply(
- Select(q(), Object_isInstanceOf) setPos sel.pos,
- List(TypeTree(tp) setPos targ.pos)) setPos fn.pos,
- List()) setPos tree.pos
- targ.tpe match {
- case SingleType(_, _) | ThisType(_) | SuperType(_, _) =>
- val cmpOp = if (targ.tpe <:< AnyValClass.tpe) Any_equals else Object_eq
- atPos(tree.pos) {
- Apply(Select(qual, cmpOp), List(gen.mkAttributedQualifier(targ.tpe)))
- }
- case RefinedType(parents, decls) if (parents.length >= 2) =>
+ def preEraseIsInstanceOf = {
+ fn match {
+ case TypeApply(sel @ Select(qual, name), List(targ)) =>
+ if (qual.tpe != null && isPrimitiveValueClass(qual.tpe.typeSymbol) && targ.tpe != null && targ.tpe <:< AnyRefClass.tpe)
+ unit.error(sel.pos, "isInstanceOf cannot test if value types are references.")
+
+ def mkIsInstanceOf(q: () => Tree)(tp: Type): Tree =
+ Apply(
+ TypeApply(
+ Select(q(), Object_isInstanceOf) setPos sel.pos,
+ List(TypeTree(tp) setPos targ.pos)) setPos fn.pos,
+ List()) setPos tree.pos
+ targ.tpe match {
+ case SingleType(_, _) | ThisType(_) | SuperType(_, _) =>
+ val cmpOp = if (targ.tpe <:< AnyValClass.tpe) Any_equals else Object_eq
+ atPos(tree.pos) {
+ Apply(Select(qual, cmpOp), List(gen.mkAttributedQualifier(targ.tpe)))
+ }
+ case RefinedType(parents, decls) if (parents.length >= 2) =>
+ gen.evalOnce(qual, currentOwner, unit) { q =>
// Optimization: don't generate isInstanceOf tests if the static type
// conforms, because it always succeeds. (Or at least it had better.)
// At this writing the pattern matcher generates some instance tests
@@ -1083,51 +1080,165 @@ abstract class Erasure extends AddInterfaces
val parentTests = parents filterNot (qual.tpe <:< _)
if (parentTests.isEmpty) Literal(Constant(true))
- else gen.evalOnce(qual, currentOwner, unit) { q =>
- atPos(tree.pos) {
- parentTests map mkIsInstanceOf(q) reduceRight gen.mkAnd
- }
+ else atPos(tree.pos) {
+ parentTests map mkIsInstanceOf(q) reduceRight gen.mkAnd
}
- case _ =>
- tree
- }
- case _ => tree
- }
+ }
+ case _ =>
+ tree
+ }
+ case _ => tree
}
- else {
- def doDynamic(fn: Tree, qual: Tree): Tree = {
- if (fn.symbol.owner.isRefinementClass && fn.symbol.allOverriddenSymbols.isEmpty)
- ApplyDynamic(qual, args) setSymbol fn.symbol setPos tree.pos
- else tree
+ }
+
+ if (fn.symbol == Any_asInstanceOf) {
+ preEraseAsInstanceOf
+ } else if (fn.symbol == Any_isInstanceOf) {
+ preEraseIsInstanceOf
+ } else if (fn.symbol.owner.isRefinementClass && !fn.symbol.isOverridingSymbol) {
+ ApplyDynamic(qualifier, args) setSymbol fn.symbol setPos tree.pos
+ } else if (fn.symbol.isMethodWithExtension && !fn.symbol.tpe.isErroneous) {
+ Apply(gen.mkAttributedRef(extensionMethods.extensionMethod(fn.symbol)), qualifier :: args)
+ } else {
+ tree
+ }
+ }
+
+ private def preEraseApply(tree: Apply) = {
+ tree.fun match {
+ case TypeApply(fun @ Select(qual, name), args @ List(arg))
+ if ((fun.symbol == Any_isInstanceOf || fun.symbol == Object_isInstanceOf) &&
+ unboundedGenericArrayLevel(arg.tpe) > 0) => // !!! todo: simplify by having GenericArray also extract trees
+ val level = unboundedGenericArrayLevel(arg.tpe)
+ def isArrayTest(arg: Tree) =
+ gen.mkRuntimeCall(nme.isArray, List(arg, Literal(Constant(level))))
+
+ global.typer.typedPos(tree.pos) {
+ if (level == 1) isArrayTest(qual)
+ else gen.evalOnce(qual, currentOwner, unit) { qual1 =>
+ gen.mkAnd(
+ gen.mkMethodCall(
+ qual1(),
+ fun.symbol,
+ List(specialErasure(fun.symbol)(arg.tpe)),
+ Nil
+ ),
+ isArrayTest(qual1())
+ )
+ }
}
- fn match {
- case Select(qual, _) => doDynamic(fn, qual)
- case TypeApply(fni at Select(qual, _), _) => doDynamic(fni, qual)// type parameters are irrelevant in case of dynamic call
- case _ =>
+ case fn @ Select(qual, name) =>
+ val args = tree.args
+ if (fn.symbol.owner == ArrayClass) {
+ // Have to also catch calls to abstract types which are bounded by Array.
+ if (unboundedGenericArrayLevel(qual.tpe.widen) == 1 || qual.tpe.typeSymbol.isAbstractType) {
+ // convert calls to apply/update/length on generic arrays to
+ // calls of ScalaRunTime.array_xxx method calls
+ global.typer.typedPos(tree.pos) {
+ val arrayMethodName = name match {
+ case nme.apply => nme.array_apply
+ case nme.length => nme.array_length
+ case nme.update => nme.array_update
+ case nme.clone_ => nme.array_clone
+ case _ => unit.error(tree.pos, "Unexpected array member, no translation exists.") ; nme.NO_NAME
+ }
+ gen.mkRuntimeCall(arrayMethodName, qual :: args)
+ }
+ } else {
+ // store exact array erasure in map to be retrieved later when we might
+ // need to do the cast in adaptMember
+ // Note: No specialErasure needed here because we simply cast, on
+ // elimination of SelectFromArray, no boxing or unboxing is done there.
+ treeCopy.Apply(
+ tree,
+ SelectFromArray(qual, name, erasure(tree.symbol)(qual.tpe)).copyAttrs(fn),
+ args)
+ }
+ } else if (args.isEmpty && interceptedMethods(fn.symbol)) {
+ if (poundPoundMethods.contains(fn.symbol)) {
+ // This is unattractive, but without it we crash here on ().## because after
+ // erasure the ScalaRunTime.hash overload goes from Unit => Int to BoxedUnit => Int.
+ // This must be because some earlier transformation is being skipped on ##, but so
+ // far I don't know what. For null we now define null.## == 0.
+ qual.tpe.typeSymbol match {
+ case UnitClass | NullClass => LIT(0)
+ case IntClass => qual
+ case s @ (ShortClass | ByteClass | CharClass) => numericConversion(qual, s)
+ case BooleanClass => If(qual, LIT(true.##), LIT(false.##))
+ case _ =>
+ global.typer.typed(gen.mkRuntimeCall(nme.hash_, List(qual)))
+ }
+ } else if (isPrimitiveValueClass(qual.tpe.typeSymbol)) {
+ // Rewrite 5.getClass to ScalaRunTime.anyValClass(5)
+ global.typer.typed(gen.mkRuntimeCall(nme.anyValClass, List(qual, typer.resolveClassTag(tree.pos, qual.tpe.widen))))
+ } else if (primitiveGetClassMethods.contains(fn.symbol)) {
+ // if we got here then we're trying to send a primitive getClass method to either
+ // a) an Any, in which cage Object_getClass works because Any erases to object. Or
+ //
+ // b) a non-primitive, e.g. because the qualifier's type is a refinement type where one parent
+ // of the refinement is a primitive and another is AnyRef. In that case
+ // we get a primitive form of _getClass trying to target a boxed value
+ // so we need replace that method name with Object_getClass to get correct behavior.
+ // See SI-5568.
+ tree setSymbol Object_getClass
+ } else {
+ debugwarn(s"The symbol '${fn.symbol}' was interecepted but didn't match any cases, that means the intercepted methods set doesn't match the code")
tree
+ }
+ } else qual match {
+ case New(tpt) if name == nme.CONSTRUCTOR && tpt.tpe.typeSymbol.isDerivedValueClass =>
+ // println("inject derived: "+arg+" "+tpt.tpe)
+ val List(arg) = args
+ val attachment = new TypeRefAttachment(tree.tpe.asInstanceOf[TypeRef])
+ InjectDerivedValue(arg) updateAttachment attachment
+ case _ =>
+ preEraseNormalApply(tree)
}
- }
+
+ case _ =>
+ preEraseNormalApply(tree)
+ }
+ }
+
+ def preErase(tree: Tree): Tree = tree match {
+ case tree: Apply =>
+ preEraseApply(tree)
+
+ case TypeApply(fun, args) if (fun.symbol.owner != AnyClass &&
+ fun.symbol != Object_asInstanceOf &&
+ fun.symbol != Object_isInstanceOf) =>
+ // leave all other type tests/type casts, remove all other type applications
+ preErase(fun)
case Select(qual, name) =>
val owner = tree.symbol.owner
// println("preXform: "+ (tree, tree.symbol, tree.symbol.owner, tree.symbol.owner.isRefinementClass))
if (owner.isRefinementClass) {
- val overridden = tree.symbol.allOverriddenSymbols
- assert(!overridden.isEmpty, tree.symbol)
- tree.symbol = overridden.head
+ val overridden = tree.symbol.nextOverriddenSymbol
+ assert(overridden != NoSymbol, tree.symbol)
+ tree.symbol = overridden
}
+
def isAccessible(sym: Symbol) = localTyper.context.isAccessible(sym, sym.owner.thisType)
if (!isAccessible(owner) && qual.tpe != null) {
- // Todo: Figure out how qual.tpe could be null in the check above (it does appear in build where SwingWorker.this
- // has a null type).
- val qualSym = qual.tpe.widen.typeSymbol
- if (isAccessible(qualSym) && !qualSym.isPackageClass && !qualSym.isPackageObjectClass) {
- // insert cast to prevent illegal access error (see #4283)
- // util.trace("insert erasure cast ") (*/
- treeCopy.Select(tree, qual AS_ATTR qual.tpe.widen, name) //)
- } else tree
+ qual match {
+ case Super(_, _) =>
+ // Insert a cast here at your peril -- see SI-5162. Bail out if the target method is defined in
+ // Java, otherwise, we'd get an IllegalAccessError at runtime. If the target method is defined in
+ // Scala, however, we should have access.
+ if (owner.isJavaDefined) unit.error(tree.pos, s"Unable to access ${tree.symbol.fullLocationString} with a super reference.")
+ tree
+ case _ =>
+ // Todo: Figure out how qual.tpe could be null in the check above (it does appear in build where SwingWorker.this
+ // has a null type).
+ val qualSym = qual.tpe.widen.typeSymbol
+ if (isAccessible(qualSym) && !qualSym.isPackageClass && !qualSym.isPackageObjectClass) {
+ // insert cast to prevent illegal access error (see #4283)
+ // util.trace("insert erasure cast ") (*/
+ treeCopy.Select(tree, gen.mkAttributedCast(qual, qual.tpe.widen), name) //)
+ } else tree
+ }
} else tree
-
case Template(parents, self, body) =>
assert(!currentOwner.isImplClass)
//Console.println("checking no dble defs " + tree)//DEBUG
@@ -1137,9 +1248,21 @@ abstract class Erasure extends AddInterfaces
case Match(selector, cases) =>
Match(Typed(selector, TypeTree(selector.tpe)), cases)
- case Literal(ct) if ct.tag == ClassTag
+ case Literal(ct) if ct.tag == ClazzTag
&& ct.typeValue.typeSymbol != definitions.UnitClass =>
- treeCopy.Literal(tree, Constant(erasure(ct.typeValue)))
+ val erased = ct.typeValue match {
+ case TypeRef(pre, clazz, args) if clazz.isDerivedValueClass => scalaErasure.eraseNormalClassRef(pre, clazz)
+ case tpe => specialScalaErasure(tpe)
+ }
+ treeCopy.Literal(tree, Constant(erased))
+
+ case ClassDef(_,_,_,_) =>
+ debuglog("defs of " + tree.symbol + " = " + tree.symbol.info.decls)
+ copyClassDef(tree)(tparams = Nil)
+ case DefDef(_,_,_,_,_,_) =>
+ copyDefDef(tree)(tparams = Nil)
+ case TypeDef(_, _, _, _) =>
+ EmptyTree
case _ =>
tree
@@ -1157,10 +1280,13 @@ abstract class Erasure extends AddInterfaces
val tree1 = preErase(tree)
tree1 match {
case EmptyTree | TypeTree() =>
- tree1 setType erasure(tree1.tpe)
+ tree1 setType specialScalaErasure(tree1.tpe)
+ case ArrayValue(elemtpt, trees) =>
+ treeCopy.ArrayValue(
+ tree1, elemtpt setType specialScalaErasure.applyInArray(elemtpt.tpe), trees map transform) setType null
case DefDef(_, _, _, _, tpt, _) =>
val result = super.transform(tree1) setType null
- tpt.tpe = erasure(tree1.symbol.tpe).resultType
+ tpt.tpe = specialErasure(tree1.symbol)(tree1.symbol.tpe).resultType
result
case _ =>
super.transform(tree1) setType null
@@ -1174,13 +1300,15 @@ abstract class Erasure extends AddInterfaces
*/
override def transform(tree: Tree): Tree = {
val tree1 = preTransformer.transform(tree)
- atPhase(phase.next) {
+ // log("tree after pretransform: "+tree1)
+ afterErasure {
val tree2 = mixinTransformer.transform(tree1)
- if (settings.debug.value)
- log("tree after addinterfaces: \n" + tree2)
+ // debuglog("tree after addinterfaces: \n" + tree2)
newTyper(rootContext(unit, tree, true)).typed(tree2)
}
}
}
+
+ private class TypeRefAttachment(val tpe: TypeRef)
}
diff --git a/src/compiler/scala/tools/nsc/transform/ExplicitOuter.scala b/src/compiler/scala/tools/nsc/transform/ExplicitOuter.scala
index 6737c3e..970519a 100644
--- a/src/compiler/scala/tools/nsc/transform/ExplicitOuter.scala
+++ b/src/compiler/scala/tools/nsc/transform/ExplicitOuter.scala
@@ -1,5 +1,5 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
+ * Copyright 2005-2013 LAMP/EPFL
* @author Martin Odersky
*/
@@ -8,8 +8,10 @@ package transform
import symtab._
import Flags.{ CASE => _, _ }
+import scala.collection.mutable
import scala.collection.mutable.ListBuffer
import matching.{ Patterns, ParallelMatching }
+import scala.tools.nsc.settings.ScalaVersion
/** This class ...
*
@@ -28,7 +30,7 @@ abstract class ExplicitOuter extends InfoTransform
import Debug.TRACE
/** The following flags may be set by this phase: */
- override def phaseNewFlags: Long = notPRIVATE | notPROTECTED | lateFINAL
+ override def phaseNewFlags: Long = notPROTECTED
/** the name of the phase: */
val phaseName: String = "explicitouter"
@@ -43,23 +45,24 @@ abstract class ExplicitOuter extends InfoTransform
private def isInner(clazz: Symbol) =
!clazz.isPackageClass && !clazz.outerClass.isStaticOwner
- private def haveSameOuter(parent: Type, clazz: Symbol) = parent match {
- case TypeRef(pre, sym, _) =>
- val owner = clazz.owner
+ private def haveSameOuter(parent: Type, clazz: Symbol) = {
+ val owner = clazz.owner
+ val parentSym = parent.typeSymbol
- sym.isClass && owner.isClass &&
- owner == sym.owner &&
- owner.thisType =:= pre
- case _ => false
+ parentSym.isClass && owner.isClass &&
+ (owner isSubClass parentSym.owner) &&
+ owner.thisType =:= parent.prefix
}
/** Does given clazz define an outer field? */
def hasOuterField(clazz: Symbol) = {
- val parents = clazz.info.parents
+ val parent = clazz.info.firstParent
- isInner(clazz) && !clazz.isTrait && {
- parents.isEmpty || !haveSameOuter(parents.head, clazz)
- }
+ // space optimization: inherit the $outer pointer from the parent class if
+ // we know that it will point to the correct instance.
+ def canReuseParentOuterField = !parent.typeSymbol.isJavaDefined && haveSameOuter(parent, clazz)
+
+ isInner(clazz) && !clazz.isTrait && !canReuseParentOuterField
}
private def outerField(clazz: Symbol): Symbol = {
@@ -69,6 +72,8 @@ abstract class ExplicitOuter extends InfoTransform
result
}
+ private val innerClassConstructorParamName: TermName = newTermName("arg" + nme.OUTER)
+
class RemoveBindingsTransformer(toRemove: Set[Symbol]) extends Transformer {
override def transform(tree: Tree) = tree match {
case Bind(_, body) if toRemove(tree.symbol) =>
@@ -87,8 +92,46 @@ abstract class ExplicitOuter extends InfoTransform
def outerAccessor(clazz: Symbol): Symbol = {
val firstTry = clazz.info.decl(nme.expandedName(nme.OUTER, clazz))
if (firstTry != NoSymbol && firstTry.outerSource == clazz) firstTry
- else clazz.info.decls find (_.outerSource == clazz) getOrElse NoSymbol
- }
+ else findOrElse(clazz.info.decls)(_.outerSource == clazz)(NoSymbol)
+ }
+ def newOuterAccessor(clazz: Symbol) = {
+ val accFlags = SYNTHETIC | ARTIFACT | METHOD | STABLE | ( if (clazz.isTrait) DEFERRED else 0 )
+ val sym = clazz.newMethod(nme.OUTER, clazz.pos, accFlags)
+ val restpe = if (clazz.isTrait) clazz.outerClass.tpe else clazz.outerClass.thisType
+
+ sym expandName clazz
+ sym.referenced = clazz
+ sym setInfo MethodType(Nil, restpe)
+ }
+ def newOuterField(clazz: Symbol) = {
+ val accFlags = SYNTHETIC | ARTIFACT | PARAMACCESSOR | ( if (clazz.isEffectivelyFinal) PrivateLocal else PROTECTED )
+ val sym = clazz.newValue(nme.OUTER_LOCAL, clazz.pos, accFlags)
+
+ sym setInfo clazz.outerClass.thisType
+ }
+
+ /**
+ * Will the outer accessor of the `clazz` subsume the outer accessor of
+ * `mixin`?
+ *
+ * This arises when an inner object mixes in its companion trait.
+ *
+ * {{{
+ * class C {
+ * trait T { C.this } // C$T$$$outer$ : C
+ * object T extends T { C.this } // C$T$$$outer$ : C.this.type
+ * }
+ * }}}
+ *
+ * See SI-7242.
+ }}
+ */
+ private def skipMixinOuterAccessor(clazz: Symbol, mixin: Symbol) = {
+ // Reliant on the current scheme for name expansion, the expanded name
+ // of the outer accessors in a trait and its companion object are the same.
+ // If the assumption is one day falsified, run/t7424.scala will let us know.
+ clazz.fullName == mixin.fullName
+ }
/** <p>
* The type transformation method:
@@ -123,6 +166,8 @@ abstract class ExplicitOuter extends InfoTransform
* Remove protected flag from all members of traits.
* </li>
* </ol>
+ * Note: this transformInfo need not be reflected as the JVM reflection already
+ * elides outer pointers.
*/
def transformInfo(sym: Symbol, tp: Type): Type = tp match {
case MethodType(params, restpe1) =>
@@ -132,7 +177,7 @@ abstract class ExplicitOuter extends InfoTransform
}
if (sym.owner.isTrait && sym.isProtected) sym setFlag notPROTECTED // 6
if (sym.isClassConstructor && isInner(sym.owner)) { // 1
- val p = sym.newValueParameter(sym.pos, "arg" + nme.OUTER)
+ val p = sym.newValueParameter(innerClassConstructorParamName, sym.pos)
.setInfo(sym.owner.outerClass.thisType)
MethodType(p :: params, restpe)
} else if (restpe ne restpe1)
@@ -142,28 +187,25 @@ abstract class ExplicitOuter extends InfoTransform
var decls1 = decls
if (isInner(clazz) && !clazz.isInterface) {
decls1 = decls.cloneScope
- val outerAcc = clazz.newMethod(clazz.pos, nme.OUTER) // 3
+ val outerAcc = clazz.newMethod(nme.OUTER, clazz.pos) // 3
outerAcc expandName clazz
- val restpe = if (clazz.isTrait) clazz.outerClass.tpe else clazz.outerClass.thisType
- decls1 enter (clazz.newOuterAccessor(clazz.pos) setInfo MethodType(Nil, restpe))
- if (hasOuterField(clazz)) { //2
- val access = if (clazz.isFinal) PRIVATE | LOCAL else PROTECTED
- decls1 enter (
- clazz.newValue(clazz.pos, nme.OUTER_LOCAL)
- setFlag (SYNTHETIC | PARAMACCESSOR | access)
- setInfo clazz.outerClass.thisType
- )
- }
+ decls1 enter newOuterAccessor(clazz)
+ if (hasOuterField(clazz)) //2
+ decls1 enter newOuterField(clazz)
}
if (!clazz.isTrait && !parents.isEmpty) {
for (mc <- clazz.mixinClasses) {
- val mixinOuterAcc: Symbol = atPhase(phase.next)(outerAccessor(mc))
+ val mixinOuterAcc: Symbol = afterExplicitOuter(outerAccessor(mc))
if (mixinOuterAcc != NoSymbol) {
- if (decls1 eq decls) decls1 = decls.cloneScope
- val newAcc = mixinOuterAcc.cloneSymbol(clazz)
- newAcc resetFlag DEFERRED setInfo (clazz.thisType memberType mixinOuterAcc)
- decls1 enter newAcc
+ if (skipMixinOuterAccessor(clazz, mc))
+ debuglog(s"Reusing outer accessor symbol of $clazz for the mixin outer accessor of $mc")
+ else {
+ if (decls1 eq decls) decls1 = decls.cloneScope
+ val newAcc = mixinOuterAcc.cloneSymbol(clazz, mixinOuterAcc.flags & ~DEFERRED)
+ newAcc setInfo (clazz.thisType memberType mixinOuterAcc)
+ decls1 enter newAcc
+ }
}
}
}
@@ -194,6 +236,8 @@ abstract class ExplicitOuter extends InfoTransform
/** The first outer selection from currently transformed tree.
* The result is typed but not positioned.
+ *
+ * Will return `EmptyTree` if there is no outer accessor because of a premature self reference.
*/
protected def outerValue: Tree =
if (outerParam != NoSymbol) ID(outerParam)
@@ -203,25 +247,34 @@ abstract class ExplicitOuter extends InfoTransform
* The result is typed but not positioned.
* If the outer access is from current class and current class is final
* take outer field instead of accessor
+ *
+ * Will return `EmptyTree` if there is no outer accessor because of a premature self reference.
*/
private def outerSelect(base: Tree): Tree = {
- val outerAcc = outerAccessor(base.tpe.typeSymbol.toInterface)
- val currentClass = this.currentClass //todo: !!! if this line is removed, we get a build failure that protected$currentClass need an override modifier
- // outerFld is the $outer field of the current class, if the reference can
- // use it (i.e. reference is allowed to be of the form this.$outer),
- // otherwise it is NoSymbol
- val outerFld =
- if (outerAcc.owner == currentClass &&
+ val baseSym = base.tpe.typeSymbol.toInterface
+ val outerAcc = outerAccessor(baseSym)
+ if (outerAcc == NoSymbol && baseSym.ownersIterator.exists(isUnderConstruction)) {
+ // e.g neg/t6666.scala
+ // The caller will report the error with more information.
+ EmptyTree
+ } else {
+ val currentClass = this.currentClass //todo: !!! if this line is removed, we get a build failure that protected$currentClass need an override modifier
+ // outerFld is the $outer field of the current class, if the reference can
+ // use it (i.e. reference is allowed to be of the form this.$outer),
+ // otherwise it is NoSymbol
+ val outerFld =
+ if (outerAcc.owner == currentClass &&
base.tpe =:= currentClass.thisType &&
- outerAcc.owner.isFinal)
- outerField(currentClass) suchThat (_.owner == currentClass)
- else
- NoSymbol
- val path =
- if (outerFld != NoSymbol) Select(base, outerFld)
- else Apply(Select(base, outerAcc), Nil)
-
- localTyper typed path
+ outerAcc.owner.isEffectivelyFinal)
+ outerField(currentClass) suchThat (_.owner == currentClass)
+ else
+ NoSymbol
+ val path =
+ if (outerFld != NoSymbol) Select(base, outerFld)
+ else Apply(Select(base, outerAcc), Nil)
+
+ localTyper typed path
+ }
}
/** The path
@@ -241,6 +294,17 @@ abstract class ExplicitOuter extends InfoTransform
else outerPath(outerSelect(base), from.outerClass, to)
}
+
+ /** The stack of class symbols in which a call to this() or to the super
+ * constructor, or early definition is active
+ */
+ protected def isUnderConstruction(clazz: Symbol) = selfOrSuperCalls contains clazz
+ protected val selfOrSuperCalls = mutable.Stack[Symbol]()
+ @inline protected def inSelfOrSuperCall[A](sym: Symbol)(a: => A) = {
+ selfOrSuperCalls push sym
+ try a finally selfOrSuperCalls.pop()
+ }
+
override def transform(tree: Tree): Tree = {
val savedOuterParam = outerParam
try {
@@ -254,7 +318,10 @@ abstract class ExplicitOuter extends InfoTransform
}
case _ =>
}
- super.transform(tree)
+ if ((treeInfo isSelfOrSuperConstrCall tree) || (treeInfo isEarlyDef tree))
+ inSelfOrSuperCall(currentOwner.owner)(super.transform(tree))
+ else
+ super.transform(tree)
}
finally outerParam = savedOuterParam
}
@@ -320,7 +387,8 @@ abstract class ExplicitOuter extends InfoTransform
/** The definition tree of the outer accessor of current class
*/
- def outerFieldDef: Tree = VAL(outerField(currentClass)) === EmptyTree
+ def outerFieldDef: Tree =
+ VAL(outerField(currentClass)) === EmptyTree
/** The definition tree of the outer accessor of current class
*/
@@ -344,42 +412,39 @@ abstract class ExplicitOuter extends InfoTransform
* @pre mixinClass is an inner class
*/
def mixinOuterAccessorDef(mixinClass: Symbol): Tree = {
- val outerAcc = outerAccessor(mixinClass) overridingSymbol currentClass
- assert(outerAcc != NoSymbol)
- val path =
- if (mixinClass.owner.isTerm) THIS(mixinClass.owner.enclClass)
- else gen.mkAttributedQualifier(currentClass.thisType baseType mixinClass prefix)
-
+ val outerAcc = outerAccessor(mixinClass) overridingSymbol currentClass
+ def mixinPrefix = (currentClass.thisType baseType mixinClass).prefix
+ assert(outerAcc != NoSymbol, "No outer accessor for inner mixin " + mixinClass + " in " + currentClass)
+ assert(outerAcc.alternatives.size == 1, s"Multiple outer accessors match inner mixin $mixinClass in $currentClass : ${outerAcc.alternatives.map(_.defString)}")
+ // I added the mixinPrefix.typeArgs.nonEmpty condition to address the
+ // crash in SI-4970. I feel quite sure this can be improved.
+ val path = (
+ if (mixinClass.owner.isTerm) gen.mkAttributedThis(mixinClass.owner.enclClass)
+ else if (mixinPrefix.typeArgs.nonEmpty) gen.mkAttributedThis(mixinPrefix.typeSymbol)
+ else gen.mkAttributedQualifier(mixinPrefix)
+ )
localTyper typed {
(DEF(outerAcc) withPos currentClass.pos) === {
// Need to cast for nested outer refs in presence of self-types. See ticket #3274.
- transformer.transform(path) AS_ANY outerAcc.info.resultType
+ gen.mkCast(transformer.transform(path), outerAcc.info.resultType)
}
}
}
- /** If FLAG is set on symbol, sets notFLAG (this exists in anticipation of generalizing). */
- def setNotFlags(sym: Symbol, flags: Int*) {
- for (f <- flags ; notFlag <- notFlagMap get f)
- if (sym hasFlag f)
- sym setFlag notFlag
- }
-
+ // requires settings.XoldPatmat.value
def matchTranslation(tree: Match) = {
val Match(selector, cases) = tree
var nselector = transform(selector)
def makeGuardDef(vs: List[Symbol], guard: Tree) = {
val gdname = unit.freshTermName("gd")
- val method = currentOwner.newMethod(tree.pos, gdname) setFlag SYNTHETIC
- val fmls = vs map (_.tpe)
- val tpe = new MethodType(method newSyntheticValueParams fmls, BooleanClass.tpe)
- method setInfo tpe
-
- localTyper typed (DEF(method) === {
- new ChangeOwnerTraverser(currentOwner, method) traverse guard
- new TreeSymSubstituter(vs, method.paramss.head) transform (guard)
- })
+ val method = currentOwner.newMethod(gdname, tree.pos, SYNTHETIC)
+ val params = method newSyntheticValueParams vs.map(_.tpe)
+ method setInfo new MethodType(params, BooleanClass.tpe)
+
+ localTyper typed {
+ DEF(method) === guard.changeOwner(currentOwner -> method).substituteSymbols(vs, params)
+ }
}
val nguard = new ListBuffer[Tree]
@@ -403,22 +468,21 @@ abstract class ExplicitOuter extends InfoTransform
(CASE(transform(strippedPat)) IF gdcall) ==> transform(body)
}
- def isUncheckedAnnotation(tpe: Type) = tpe hasAnnotation UncheckedClass
- def isSwitchAnnotation(tpe: Type) = tpe hasAnnotation SwitchClass
-
val (checkExhaustive, requireSwitch) = nselector match {
case Typed(nselector1, tpt) =>
- val unchecked = isUncheckedAnnotation(tpt.tpe)
+ val unchecked = tpt.tpe hasAnnotation UncheckedClass
if (unchecked)
nselector = nselector1
- (!unchecked, isSwitchAnnotation(tpt.tpe))
+ // Don't require a tableswitch if there are 1-2 casedefs
+ // since the matcher intentionally emits an if-then-else.
+ (!unchecked, treeInfo.isSwitchAnnotation(tpt.tpe) && ncases.size > 2)
case _ =>
(true, false)
}
val t = atPos(tree.pos) {
- val context = MatrixContext(currentRun.currentUnit, transform, localTyper, currentOwner, tree.tpe)
+ val context = MatrixContext(currentUnit, transform, localTyper, currentOwner, tree.tpe)
val t_untyped = handlePattern(nselector, ncases, checkExhaustive, context)
/* if @switch annotation is present, verify the resulting tree is a Match */
@@ -438,9 +502,10 @@ abstract class ExplicitOuter extends InfoTransform
/** The main transformation method */
override def transform(tree: Tree): Tree = {
val sym = tree.symbol
- if (sym != null && sym.isType) //(9)
- setNotFlags(sym, PRIVATE, PROTECTED)
-
+ if (sym != null && sym.isType) { //(9)
+ if (sym.isPrivate) sym setFlag notPRIVATE
+ if (sym.isProtected) sym setFlag notPROTECTED
+ }
tree match {
case Template(parents, self, decls) =>
val newDefs = new ListBuffer[Tree]
@@ -453,15 +518,17 @@ abstract class ExplicitOuter extends InfoTransform
}
if (!currentClass.isTrait)
for (mc <- currentClass.mixinClasses)
- if (outerAccessor(mc) != NoSymbol)
+ if (outerAccessor(mc) != NoSymbol && !skipMixinOuterAccessor(currentClass, mc))
newDefs += mixinOuterAccessorDef(mc)
}
}
super.transform(
- treeCopy.Template(tree, parents, self,
- if (newDefs.isEmpty) decls else decls ::: newDefs.toList)
+ deriveTemplate(tree)(decls =>
+ if (newDefs.isEmpty) decls
+ else decls ::: newDefs.toList
+ )
)
- case DefDef(mods, name, tparams, vparamss, tpt, rhs) =>
+ case DefDef(_, _, _, vparamss, _, rhs) =>
if (sym.isClassConstructor) {
rhs match {
case Literal(_) =>
@@ -470,11 +537,14 @@ abstract class ExplicitOuter extends InfoTransform
val clazz = sym.owner
val vparamss1 =
if (isInner(clazz)) { // (4)
+ if (isUnderConstruction(clazz.outerClass)) {
+ reporter.error(tree.pos, s"Implementation restriction: ${clazz.fullLocationString} requires premature access to ${clazz.outerClass}.")
+ }
val outerParam =
- sym.newValueParameter(sym.pos, nme.OUTER) setInfo outerField(clazz).info
+ sym.newValueParameter(nme.OUTER, sym.pos) setInfo clazz.outerClass.thisType
((ValDef(outerParam) setType NoType) :: vparamss.head) :: vparamss.tail
} else vparamss
- super.transform(treeCopy.DefDef(tree, mods, name, tparams, vparamss1, tpt, rhs))
+ super.transform(copyDefDef(tree)(vparamss = vparamss1))
}
} else
super.transform(tree)
@@ -484,8 +554,15 @@ abstract class ExplicitOuter extends InfoTransform
else atPos(tree.pos)(outerPath(outerValue, currentClass.outerClass, sym)) // (5)
case Select(qual, name) =>
- if (currentClass != sym.owner) // (3)
+ // make not private symbol acessed from inner classes, as well as
+ // symbols accessed from @inline methods
+ //
+ // See SI-6552 for an example of why `sym.owner.enclMethod hasAnnotation ScalaInlineClass`
+ // is not suitable; if we make a method-local class non-private, it mangles outer pointer names.
+ if (currentClass != sym.owner ||
+ (closestEnclMethod(currentOwner) hasAnnotation ScalaInlineClass))
sym.makeNotPrivate(sym.owner)
+
val qsym = qual.tpe.widen.typeSymbol
if (sym.isProtected && //(4)
(qsym.isTrait || !(qual.isInstanceOf[Super] || (qsym isSubClass currentClass))))
@@ -496,7 +573,7 @@ abstract class ExplicitOuter extends InfoTransform
val outerVal = atPos(tree.pos)(qual match {
// it's a call between constructors of same class
case _: This =>
- assert(outerParam != NoSymbol)
+ assert(outerParam != NoSymbol, tree)
outerValue
case _ =>
gen.mkAttributedQualifier(qual.tpe.prefix match {
@@ -507,11 +584,33 @@ abstract class ExplicitOuter extends InfoTransform
super.transform(treeCopy.Apply(tree, sel, outerVal :: args))
// entry point for pattern matcher translation
- case mch: Match =>
- matchTranslation(mch)
+ case m: Match if settings.XoldPatmat.value => // the new pattern matcher runs in its own phase right after typer
+ matchTranslation(m)
+
+ // for the new pattern matcher
+ // base.<outer>.eq(o) --> base.$outer().eq(o) if there's an accessor, else the whole tree becomes TRUE
+ // TODO remove the synthetic `<outer>` method from outerFor??
+ case Apply(eqsel at Select(eqapp at Apply(sel at Select(base, nme.OUTER_SYNTH), Nil), eq), args) if !settings.XoldPatmat.value =>
+ val outerFor = sel.symbol.owner.toInterface // TODO: toInterface necessary?
+ val acc = outerAccessor(outerFor)
+
+ if (acc == NoSymbol ||
+ // since we can't fix SI-4440 properly (we must drop the outer accessors of final classes when there's no immediate reference to them in sight)
+ // at least don't crash... this duplicates maybeOmittable from constructors
+ (acc.owner.isEffectivelyFinal && !acc.isOverridingSymbol)) {
+ unit.uncheckedWarning(tree.pos, "The outer reference in this type test cannot be checked at run time.")
+ return transform(TRUE) // urgh... drop condition if there's no accessor (or if it may disappear after constructors)
+ } else {
+ // println("(base, acc)= "+(base, acc))
+ val outerSelect = localTyper typed Apply(Select(base, acc), Nil)
+ // achieves the same as: localTyper typed atPos(tree.pos)(outerPath(base, base.tpe.typeSymbol, outerFor.outerClass))
+ // println("(b, tpsym, outerForI, outerFor, outerClass)= "+ (base, base.tpe.typeSymbol, outerFor, sel.symbol.owner, outerFor.outerClass))
+ // println("outerSelect = "+ outerSelect)
+ return transform(treeCopy.Apply(tree, treeCopy.Select(eqsel, outerSelect, eq), args))
+ }
case _ =>
- if (settings.Xmigration28.value) tree match {
+ if (settings.Xmigration.value < ScalaVersion.twoDotEight) tree match {
case TypeApply(fn @ Select(qual, _), args) if fn.symbol == Object_isInstanceOf || fn.symbol == Any_isInstanceOf =>
if (isArraySeqTest(qual.tpe, args.head.tpe))
unit.warning(tree.pos, "An Array will no longer match as Seq[_].")
@@ -526,7 +625,7 @@ abstract class ExplicitOuter extends InfoTransform
/** The transformation method for whole compilation units */
override def transformUnit(unit: CompilationUnit) {
- atPhase(phase.next)(super.transformUnit(unit))
+ afterExplicitOuter(super.transformUnit(unit))
}
}
@@ -535,9 +634,5 @@ abstract class ExplicitOuter extends InfoTransform
class Phase(prev: scala.tools.nsc.Phase) extends super.Phase(prev) {
override val checkable = false
- override def run() {
- super.run
- Pattern.clear() // clear the cache
- }
}
}
diff --git a/src/compiler/scala/tools/nsc/transform/ExtensionMethods.scala b/src/compiler/scala/tools/nsc/transform/ExtensionMethods.scala
new file mode 100644
index 0000000..e0c0cd0
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/transform/ExtensionMethods.scala
@@ -0,0 +1,274 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2013 LAMP/EPFL
+ * @author Martin Odersky
+ */
+package scala.tools.nsc
+package transform
+
+import symtab._
+import Flags._
+import scala.collection.{ mutable, immutable }
+import scala.collection.mutable
+import scala.tools.nsc.util.FreshNameCreator
+import scala.runtime.ScalaRunTime.{ isAnyVal, isTuple }
+
+/**
+ * Perform Step 1 in the inline classes SIP: Creates extension methods for all
+ * methods in a value class, except parameter or super accessors, or constructors.
+ *
+ * @author Martin Odersky
+ * @version 2.10
+ */
+abstract class ExtensionMethods extends Transform with TypingTransformers {
+
+ import global._ // the global environment
+ import definitions._ // standard classes and methods
+ import typer.{ typed, atOwner } // methods to type trees
+
+ /** the following two members override abstract members in Transform */
+ val phaseName: String = "extmethods"
+
+ def newTransformer(unit: CompilationUnit): Transformer =
+ new Extender(unit)
+
+ /** Generate stream of possible names for the extension version of given instance method `imeth`.
+ * If the method is not overloaded, this stream consists of just "extension$imeth".
+ * If the method is overloaded, the stream has as first element "extensionX$imeth", where X is the
+ * index of imeth in the sequence of overloaded alternatives with the same name. This choice will
+ * always be picked as the name of the generated extension method.
+ * After this first choice, all other possible indices in the range of 0 until the number
+ * of overloaded alternatives are returned. The secondary choices are used to find a matching method
+ * in `extensionMethod` if the first name has the wrong type. We thereby gain a level of insensitivity
+ * of how overloaded types are ordered between phases and picklings.
+ */
+ private def extensionNames(imeth: Symbol): Stream[Name] = {
+ val decl = imeth.owner.info.decl(imeth.name)
+
+ // Bridge generation is done at phase `erasure`, but new scopes are only generated
+ // for the phase after that. So bridges are visible in earlier phases.
+ //
+ // `info.member(imeth.name)` filters these out, but we need to use `decl`
+ // to restrict ourselves to members defined in the current class, so we
+ // must do the filtering here.
+ val declTypeNoBridge = decl.filter(sym => !sym.isBridge).tpe
+
+ declTypeNoBridge match {
+ case OverloadedType(_, alts) =>
+ val index = alts indexOf imeth
+ assert(index >= 0, alts+" does not contain "+imeth)
+ def altName(index: Int) = newTermName(imeth.name+"$extension"+index)
+ altName(index) #:: ((0 until alts.length).toStream filter (index != _) map altName)
+ case tpe =>
+ assert(tpe != NoType, imeth.name+" not found in "+imeth.owner+"'s decls: "+imeth.owner.info.decls)
+ Stream(newTermName(imeth.name+"$extension"))
+ }
+ }
+
+ private def companionModuleForce(sym: Symbol) = {
+ sym.andAlso(_.owner.initialize) // See SI-6976. `companionModule` only calls `rawInfo`. (Why?)
+ sym.companionModule
+ }
+
+ /** Return the extension method that corresponds to given instance method `meth`. */
+ def extensionMethod(imeth: Symbol): Symbol = atPhase(currentRun.refchecksPhase) {
+ val companionInfo = companionModuleForce(imeth.owner).info
+ val candidates = extensionNames(imeth) map (companionInfo.decl(_)) filter (_.exists)
+ val matching = candidates filter (alt => normalize(alt.tpe, imeth.owner) matches imeth.tpe)
+ assert(matching.nonEmpty,
+ sm"""|no extension method found for:
+ |
+ | $imeth:${imeth.tpe}
+ |
+ | Candidates:
+ |
+ | ${candidates.map(c => c.name+":"+c.tpe).mkString("\n")}
+ |
+ | Candidates (signatures normalized):
+ |
+ | ${candidates.map(c => c.name+":"+normalize(c.tpe, imeth.owner)).mkString("\n")}
+ |
+ | Eligible Names: ${extensionNames(imeth).mkString(",")}"""")
+ matching.head
+ }
+
+ /** Recognize a MethodType which represents an extension method.
+ *
+ * It may have a curried parameter list with the `$this` alone in the first
+ * parameter list, in which case that parameter list is dropped. Or, since
+ * the curried lists disappear during uncurry, it may have a single parameter
+ * list with `$this` as the first parameter, in which case that parameter is
+ * removed from the list.
+ */
+ object ExtensionMethodType {
+ def unapply(tp: Type) = tp match {
+ case MethodType(thiz :: rest, restpe) if thiz.name == nme.SELF =>
+ Some((thiz, if (rest.isEmpty) restpe else MethodType(rest, restpe) ))
+ case _ =>
+ None
+ }
+ }
+
+ /** This method removes the `$this` argument from the parameter list a method.
+ *
+ * A method may be a `PolyType`, in which case we tear out the `$this` and the class
+ * type params from its nested `MethodType`. Or it may be a MethodType, as
+ * described at the ExtensionMethodType extractor.
+ */
+ private def normalize(stpe: Type, clazz: Symbol): Type = stpe match {
+ case PolyType(tparams, restpe) =>
+ // method type parameters, class type parameters
+ val (mtparams, ctparams) = tparams splitAt (tparams.length - clazz.typeParams.length)
+ GenPolyType(mtparams,
+ normalize(restpe.substSym(ctparams, clazz.typeParams), clazz))
+ case ExtensionMethodType(thiz, etpe) =>
+ etpe.substituteTypes(thiz :: Nil, clazz.thisType :: Nil)
+ case _ =>
+ stpe
+ }
+
+ class Extender(unit: CompilationUnit) extends TypingTransformer(unit) {
+ private val extensionDefs = mutable.Map[Symbol, mutable.ListBuffer[Tree]]()
+
+ def checkNonCyclic(pos: Position, seen: Set[Symbol], clazz: Symbol): Unit =
+ if (seen contains clazz)
+ unit.error(pos, "value class may not unbox to itself")
+ else {
+ val unboxed = erasure.underlyingOfValueClass(clazz).typeSymbol
+ if (unboxed.isDerivedValueClass) checkNonCyclic(pos, seen + clazz, unboxed)
+ }
+
+ /** We will need to clone the info of the original method (which obtains clones
+ * of the method type parameters), clone the type parameters of the value class,
+ * and create a new polymethod with the union of all those type parameters, with
+ * their infos adjusted to be consistent with their new home. Example:
+ *
+ * class Foo[+A <: AnyRef](val xs: List[A]) extends AnyVal {
+ * def baz[B >: A](x: B): List[B] = x :: xs
+ * // baz has to be transformed into this extension method, where
+ * // A is cloned from class Foo and B is cloned from method baz:
+ * // def extension$baz[B >: A <: Any, A >: Nothing <: AnyRef]($this: Foo[A])(x: B): List[B]
+ * }
+ *
+ * TODO: factor out the logic for consolidating type parameters from a class
+ * and a method for re-use elsewhere, because nobody will get this right without
+ * some higher level facilities.
+ */
+ def extensionMethInfo(extensionMeth: Symbol, origInfo: Type, clazz: Symbol): Type = {
+ val GenPolyType(tparamsFromMethod, methodResult) = origInfo cloneInfo extensionMeth
+ // Start with the class type parameters - clones will be method type parameters
+ // so must drop their variance.
+ val tparamsFromClass = cloneSymbolsAtOwner(clazz.typeParams, extensionMeth) map (_ resetFlag COVARIANT | CONTRAVARIANT)
+
+ val thisParamType = appliedType(clazz, tparamsFromClass map (_.tpeHK): _*)
+ val thisParam = extensionMeth.newValueParameter(nme.SELF, extensionMeth.pos) setInfo thisParamType
+ val resultType = MethodType(List(thisParam), dropNullaryMethod(methodResult))
+ val selfParamType = singleType(currentOwner.companionModule.thisType, thisParam)
+
+ def fixres(tp: Type) = tp substThisAndSym (clazz, selfParamType, clazz.typeParams, tparamsFromClass)
+ def fixtparam(tp: Type) = tp substSym (clazz.typeParams, tparamsFromClass)
+
+ // We can't substitute symbols on the entire polytype because we
+ // need to modify the bounds of the cloned type parameters, but we
+ // don't want to substitute for the cloned type parameters themselves.
+ val tparams = tparamsFromMethod ::: tparamsFromClass
+ GenPolyType(tparams map (_ modifyInfo fixtparam), fixres(resultType))
+
+ // For reference, calling fix on the GenPolyType plays out like this:
+ // error: scala.reflect.internal.Types$TypeError: type arguments [B#7344,A#6966]
+ // do not conform to method extension$baz#16148's type parameter bounds
+ //
+ // And the difference is visible here. See how B is bounded from below by A#16149
+ // in both cases, but in the failing case, the other type parameter has turned into
+ // a different A. (What is that A? It is a clone of the original A created in
+ // SubstMap during the call to substSym, but I am not clear on all the particulars.)
+ //
+ // bad: [B#16154 >: A#16149, A#16155 <: AnyRef#2189]($this#16156: Foo#6965[A#16155])(x#16157: B#16154)List#2457[B#16154]
+ // good: [B#16151 >: A#16149, A#16149 <: AnyRef#2189]($this#16150: Foo#6965[A#16149])(x#16153: B#16151)List#2457[B#16151]
+ }
+ override def transform(tree: Tree): Tree = {
+ tree match {
+ case Template(_, _, _) =>
+ if (currentOwner.isDerivedValueClass) {
+ /* This is currently redundant since value classes may not
+ wrap over other value classes anyway.
+ checkNonCyclic(currentOwner.pos, Set(), currentOwner) */
+ extensionDefs(currentOwner.companionModule) = new mutable.ListBuffer[Tree]
+ currentOwner.primaryConstructor.makeNotPrivate(NoSymbol)
+ super.transform(tree)
+ } else if (currentOwner.isStaticOwner) {
+ super.transform(tree)
+ } else tree
+ case DefDef(_, _, tparams, vparamss, _, rhs) if tree.symbol.isMethodWithExtension =>
+ val origMeth = tree.symbol
+ val origThis = currentOwner
+ val origTpeParams = tparams.map(_.symbol) ::: origThis.typeParams // method type params ++ class type params
+ val origParams = vparamss.flatten map (_.symbol)
+ val companion = origThis.companionModule
+
+ def makeExtensionMethodSymbol = {
+ val extensionName = extensionNames(origMeth).head
+ val extensionMeth = (
+ companion.moduleClass.newMethod(extensionName, origMeth.pos, origMeth.flags & ~OVERRIDE & ~PROTECTED | FINAL)
+ setAnnotations origMeth.annotations
+ )
+ companion.info.decls.enter(extensionMeth)
+ }
+
+ val extensionMeth = makeExtensionMethodSymbol
+ val newInfo = extensionMethInfo(extensionMeth, origMeth.info, origThis)
+ extensionMeth setInfo newInfo
+
+ log(s"Value class $origThis spawns extension method.\n Old: ${origMeth.defString}\n New: ${extensionMeth.defString}")
+
+ val GenPolyType(extensionTpeParams, MethodType(thiz :: Nil, extensionMono)) = newInfo
+ val extensionParams = allParameters(extensionMono)
+ val extensionThis = gen.mkAttributedStableRef(thiz setPos extensionMeth.pos)
+
+ val extensionBody = (
+ rhs
+ .substituteSymbols(origTpeParams, extensionTpeParams)
+ .substituteSymbols(origParams, extensionParams)
+ .substituteThis(origThis, extensionThis)
+ .changeOwner(origMeth -> extensionMeth)
+ )
+ val castBody =
+ if (extensionBody.tpe <:< extensionMono.finalResultType)
+ extensionBody
+ else
+ gen.mkCastPreservingAnnotations(extensionBody, extensionMono.finalResultType) // SI-7818 e.g. mismatched existential skolems
+
+ // Record the extension method ( FIXME: because... ? )
+ extensionDefs(companion) += atPos(tree.pos)(DefDef(extensionMeth, castBody))
+
+ // These three lines are assembling Foo.bar$extension[T1, T2, ...]($this)
+ // which leaves the actual argument application for extensionCall.
+ val sel = Select(gen.mkAttributedRef(companion), extensionMeth)
+ val targs = origTpeParams map (_.tpeHK)
+ val callPrefix = gen.mkMethodCall(sel, targs, This(origThis) :: Nil)
+
+ // Apply all the argument lists.
+ deriveDefDef(tree)(_ =>
+ atOwner(origMeth)(
+ localTyper.typedPos(rhs.pos)(
+ gen.mkForwarder(callPrefix, mmap(vparamss)(_.symbol))
+ )
+ )
+ )
+ case _ =>
+ super.transform(tree)
+ }
+ }
+
+ override def transformStats(stats: List[Tree], exprOwner: Symbol): List[Tree] =
+ super.transformStats(stats, exprOwner) map {
+ case md @ ModuleDef(_, _, _) if extensionDefs contains md.symbol =>
+ val defns = extensionDefs(md.symbol).toList map (member =>
+ atOwner(md.symbol)(localTyper.typedPos(md.pos.focus)(member))
+ )
+ extensionDefs -= md.symbol
+ deriveModuleDef(md)(tmpl => deriveTemplate(tmpl)(_ ++ defns))
+ case stat =>
+ stat
+ }
+ }
+}
diff --git a/src/compiler/scala/tools/nsc/transform/Flatten.scala b/src/compiler/scala/tools/nsc/transform/Flatten.scala
index 27e226f..cd26f95 100644
--- a/src/compiler/scala/tools/nsc/transform/Flatten.scala
+++ b/src/compiler/scala/tools/nsc/transform/Flatten.scala
@@ -1,5 +1,5 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
+ * Copyright 2005-2013 LAMP/EPFL
* @author Martin Odersky
*/
@@ -18,44 +18,72 @@ abstract class Flatten extends InfoTransform {
/** the following two members override abstract members in Transform */
val phaseName: String = "flatten"
+ /** Updates the owning scope with the given symbol; returns the old symbol.
+ */
+ private def replaceSymbolInCurrentScope(sym: Symbol): Symbol = afterFlatten {
+ val scope = sym.owner.info.decls
+ val old = scope lookup sym.name andAlso scope.unlink
+ scope enter sym
+
+ if (old eq NoSymbol)
+ log(s"lifted ${sym.fullLocationString}")
+ else
+ log(s"lifted ${sym.fullLocationString} after unlinking existing $old from scope.")
+
+ old
+ }
+
private def liftClass(sym: Symbol) {
- if (!(sym hasFlag LIFTED)) {
+ if (!sym.isLifted) {
sym setFlag LIFTED
- atPhase(phase.next) {
- if (settings.debug.value) log("re-enter " + sym + " in " + sym.owner)
- assert(sym.owner.isPackageClass, sym) //debug
- val scope = sym.owner.info.decls
- val old = scope lookup sym.name
- if (old != NoSymbol) scope unlink old
- scope enter sym
- }
+ debuglog("re-enter " + sym.fullLocationString)
+ replaceSymbolInCurrentScope(sym)
+ }
+ }
+ private def liftSymbol(sym: Symbol) {
+ liftClass(sym)
+ if (sym.needsImplClass)
+ liftClass(erasure implClass sym)
+ }
+ // This is a short-term measure partially working around objects being
+ // lifted out of parameterized classes, leaving them referencing
+ // invisible type parameters.
+ private def isFlattenablePrefix(pre: Type) = {
+ val clazz = pre.typeSymbol
+ clazz.isClass && !clazz.isPackageClass && {
+ // Cannot flatten here: class A[T] { object B }
+ // was "at erasurePhase.prev"
+ beforeErasure(clazz.typeParams.isEmpty)
}
}
private val flattened = new TypeMap {
def apply(tp: Type): Type = tp match {
- case TypeRef(pre, sym, args) if (pre.typeSymbol.isClass && !pre.typeSymbol.isPackageClass) =>
- assert(args.isEmpty)
- assert(sym.toplevelClass != NoSymbol, sym.ownerChain)
- typeRef(sym.toplevelClass.owner.thisType, sym, args)
+ case TypeRef(pre, sym, args) if isFlattenablePrefix(pre) =>
+ assert(args.isEmpty && sym.enclosingTopLevelClass != NoSymbol, sym.ownerChain)
+ typeRef(sym.enclosingTopLevelClass.owner.thisType, sym, Nil)
case ClassInfoType(parents, decls, clazz) =>
var parents1 = parents
- val decls1 = new Scope
- if (clazz.isPackageClass) {
- atPhase(phase.next)(decls.toList foreach (sym => decls1 enter sym))
- } else {
- val oldowner = clazz.owner
- atPhase(phase.next)(oldowner.info)
- parents1 = parents mapConserve (this)
- for (sym <- decls.toList) {
- if (sym.isTerm && !sym.isStaticModule) {
- decls1 enter sym
- if (sym.isModule) sym.moduleClass setFlag LIFTED
- } else if (sym.isClass) {
- liftClass(sym)
- if (sym.needsImplClass) liftClass(erasure.implClass(sym))
+ val decls1 = scopeTransform(clazz) {
+ val decls1 = newScope
+ if (clazz.isPackageClass) {
+ afterFlatten { decls foreach (decls1 enter _) }
+ }
+ else {
+ val oldowner = clazz.owner
+ afterFlatten { oldowner.info }
+ parents1 = parents mapConserve (this)
+
+ for (sym <- decls) {
+ if (sym.isTerm && !sym.isStaticModule) {
+ decls1 enter sym
+ if (sym.isModule)
+ sym.moduleClass setFlag LIFTED
+ } else if (sym.isClass)
+ liftSymbol(sym)
}
}
+ decls1
}
ClassInfoType(parents1, decls1, clazz)
case MethodType(params, restp) =>
@@ -74,7 +102,6 @@ abstract class Flatten extends InfoTransform {
protected def newTransformer(unit: CompilationUnit): Transformer = new Flattener
class Flattener extends Transformer {
-
/** Buffers for lifted out classes */
private val liftedDefs = perRunCaches.newMap[Symbol, ListBuffer[Tree]]()
@@ -82,7 +109,7 @@ abstract class Flatten extends InfoTransform {
tree match {
case PackageDef(_, _) =>
liftedDefs(tree.symbol.moduleClass) = new ListBuffer
- case Template(_, _, _) if tree.symbol.owner.hasPackageFlag =>
+ case Template(_, _, _) if tree.symbol.isDefinedInPackage =>
liftedDefs(tree.symbol.owner) = new ListBuffer
case _ =>
}
@@ -93,14 +120,10 @@ abstract class Flatten extends InfoTransform {
val sym = tree.symbol
val tree1 = tree match {
case ClassDef(_, _, _, _) if sym.isNestedClass =>
- liftedDefs(sym.toplevelClass.owner) += tree
+ liftedDefs(sym.enclosingTopLevelClass.owner) += tree
EmptyTree
case Select(qual, name) if (sym.isStaticModule && !sym.owner.isPackageClass) =>
- atPhase(phase.next) {
- atPos(tree.pos) {
- gen.mkAttributedRef(sym)
- }
- }
+ afterFlatten(atPos(tree.pos)(gen.mkAttributedRef(sym)))
case _ =>
tree
}
diff --git a/src/compiler/scala/tools/nsc/transform/InfoTransform.scala b/src/compiler/scala/tools/nsc/transform/InfoTransform.scala
index 0905fa8..b6dbaca 100644
--- a/src/compiler/scala/tools/nsc/transform/InfoTransform.scala
+++ b/src/compiler/scala/tools/nsc/transform/InfoTransform.scala
@@ -1,5 +1,5 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
+ * Copyright 2005-2013 LAMP/EPFL
* @author
*/
@@ -37,7 +37,7 @@ trait InfoTransform extends Transform {
val changesBaseClasses = InfoTransform.this.changesBaseClasses
def transform(sym: Symbol, tpe: Type): Type = transformInfo(sym, tpe)
}
- infoTransformers.insert(infoTransformer)
+ infoTransformers insert infoTransformer
}
}
}
diff --git a/src/compiler/scala/tools/nsc/transform/InlineErasure.scala b/src/compiler/scala/tools/nsc/transform/InlineErasure.scala
new file mode 100644
index 0000000..0af3cf7
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/transform/InlineErasure.scala
@@ -0,0 +1,9 @@
+package scala.tools.nsc
+package transform
+
+trait InlineErasure { self: Erasure =>
+
+ import global._
+ import definitions._
+
+}
\ No newline at end of file
diff --git a/src/compiler/scala/tools/nsc/transform/LambdaLift.scala b/src/compiler/scala/tools/nsc/transform/LambdaLift.scala
index 6764de8..631468d 100644
--- a/src/compiler/scala/tools/nsc/transform/LambdaLift.scala
+++ b/src/compiler/scala/tools/nsc/transform/LambdaLift.scala
@@ -1,5 +1,5 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
+ * Copyright 2005-2013 LAMP/EPFL
* @author
*/
@@ -9,7 +9,8 @@ package transform
import symtab._
import Flags._
import util.TreeSet
-import scala.collection.mutable.{ LinkedHashMap, ListBuffer }
+import scala.collection.{ mutable, immutable }
+import scala.collection.mutable.{ LinkedHashMap, LinkedHashSet }
abstract class LambdaLift extends InfoTransform {
import global._
@@ -31,7 +32,9 @@ abstract class LambdaLift extends InfoTransform {
}
}
- def transformInfo(sym: Symbol, tp: Type): Type = lifted(tp)
+ def transformInfo(sym: Symbol, tp: Type): Type =
+ if (sym.isCapturedVariable) capturedVariableType(sym, tpe = lifted(tp), erasedTypes = true)
+ else lifted(tp)
protected def newTransformer(unit: CompilationUnit): Transformer =
new LambdaLifter(unit)
@@ -47,9 +50,42 @@ abstract class LambdaLift extends InfoTransform {
/** A hashtable storing calls between functions */
private val called = new LinkedHashMap[Symbol, SymSet]
+ /** Symbols that are called from an inner class. */
+ private val calledFromInner = new LinkedHashSet[Symbol]
+
/** The set of symbols that need to be renamed. */
private val renamable = newSymSet
+ /**
+ * The new names for free variables proxies. If we simply renamed the
+ * free variables, we would transform:
+ * {{{
+ * def closure(x: Int) = { () => x }
+ * }}}
+ *
+ * To:
+ * {{{
+ * def closure(x$1: Int) = new anonFun$1(this, x$1)
+ * class anonFun$1(outer$: Outer, x$1: Int) { def apply() => x$1 }
+ * }}}
+ *
+ * This is fatally bad for named arguments (0e170e4b), extremely impolite to tools
+ * reflecting on the method parameter names in the generated bytecode (SI-6028),
+ * and needlessly bothersome to anyone using a debugger.
+ *
+ * Instead, we transform to:
+ * {{{
+ * def closure(x: Int) = new anonFun$1(this, x)
+ * class anonFun$1(outer$: Outer, x$1: Int) { def apply() => x$1 }
+ * }}}
+ */
+ private val proxyNames = mutable.HashMap[Symbol, Name]()
+
+ // (trait, name) -> owner
+ private val localTraits = mutable.HashMap[(Symbol, Name), Symbol]()
+ // (owner, name) -> implClass
+ private val localImplClasses = mutable.HashMap[(Symbol, Name), Symbol]()
+
/** A flag to indicate whether new free variables have been found */
private var changedFreeVars: Boolean = _
@@ -66,14 +102,14 @@ abstract class LambdaLift extends InfoTransform {
private def isSameOwnerEnclosure(sym: Symbol) =
sym.owner.logicallyEnclosingMember == currentOwner.logicallyEnclosingMember
- /** Mark symbol `sym' as being free in `enclosure', unless `sym'
- * is defined in `enclosure' or there is a class between `enclosure's owner
- * and the owner of `sym'.
- * Return `true' if there is no class between `enclosure' and
+ /** Mark symbol `sym` as being free in `enclosure`, unless `sym`
+ * is defined in `enclosure` or there is a class between `enclosure`s owner
+ * and the owner of `sym`.
+ * Return `true` if there is no class between `enclosure` and
* the owner of sym.
* pre: sym.isLocal, (enclosure.isMethod || enclosure.isClass)
*
- * The idea of `markFree' is illustrated with an example:
+ * The idea of `markFree` is illustrated with an example:
*
* def f(x: int) = {
* class C {
@@ -83,8 +119,8 @@ abstract class LambdaLift extends InfoTransform {
* }
* }
*
- * In this case `x' is free in the primary constructor of class `C'.
- * but it is not free in `D', because after lambda lift the code would be transformed
+ * In this case `x` is free in the primary constructor of class `C`.
+ * but it is not free in `D`, because after lambda lift the code would be transformed
* as follows:
*
* def f(x$0: int) {
@@ -97,47 +133,28 @@ abstract class LambdaLift extends InfoTransform {
* }
*/
private def markFree(sym: Symbol, enclosure: Symbol): Boolean = {
- if (settings.debug.value)
- log("mark free: " + sym + " of " + sym.owner + " marked free in " + enclosure)
- if (enclosure == sym.owner.logicallyEnclosingMember) true
- else if (enclosure.isPackageClass || !markFree(sym, enclosure.skipConstructor.owner.logicallyEnclosingMember)) false
- else {
- val ss = symSet(free, enclosure)
- if (!ss(sym)) {
- ss addEntry sym
- renamable addEntry sym
- atPhase(currentRun.picklerPhase) {
- // The param symbol in the MethodType should not be renamed, only the symbol in scope. This way,
- // parameter names for named arguments are not changed. Example: without cloning the MethodType,
- // def closure(x: Int) = { () => x }
- // would have the signature
- // closure: (x$1: Int)() => Int
- if (sym.isParameter && sym.owner.info.paramss.exists(_ contains sym))
- sym.owner.setInfo(sym.owner.info.cloneInfo(sym.owner))
- }
- changedFreeVars = true
- if (settings.debug.value) log("" + sym + " is free in " + enclosure);
- if ((sym.isVariable || (sym.isValue && sym.isLazy)) && !sym.hasFlag(CAPTURED)) {
- sym setFlag CAPTURED
- val symClass = sym.tpe.typeSymbol
- atPhase(phase.next) {
- sym updateInfo (
- if (sym.hasAnnotation(VolatileAttr))
- if (isValueClass(symClass)) volatileRefClass(symClass).tpe else VolatileObjectRefClass.tpe
- else
- if (isValueClass(symClass)) refClass(symClass).tpe else ObjectRefClass.tpe
- )
- }
+ debuglog("mark free: " + sym.fullLocationString + " marked free in " + enclosure)
+ (enclosure == sym.owner.logicallyEnclosingMember) || {
+ debuglog("%s != %s".format(enclosure, sym.owner.logicallyEnclosingMember))
+ if (enclosure.isPackageClass || !markFree(sym, enclosure.skipConstructor.owner.logicallyEnclosingMember)) false
+ else {
+ val ss = symSet(free, enclosure)
+ if (!ss(sym)) {
+ ss addEntry sym
+ renamable addEntry sym
+ changedFreeVars = true
+ debuglog("" + sym + " is free in " + enclosure);
+ if (sym.isVariable) sym setFlag CAPTURED
}
+ !enclosure.isClass
}
- !enclosure.isClass
}
}
private def markCalled(sym: Symbol, owner: Symbol) {
- if (settings.debug.value)
- log("mark called: " + sym + " of " + sym.owner + " is called by " + owner)
+ debuglog("mark called: " + sym + " of " + sym.owner + " is called by " + owner)
symSet(called, owner) addEntry sym
+ if (sym.enclClass != owner.enclClass) calledFromInner += sym
}
/** The traverse function */
@@ -148,11 +165,30 @@ abstract class LambdaLift extends InfoTransform {
tree match {
case ClassDef(_, _, _, _) =>
liftedDefs(tree.symbol) = Nil
- if (sym.isLocal) renamable addEntry sym
+ if (sym.isLocal) {
+ // Don't rename implementation classes independently of their interfaces. If
+ // the interface is to be renamed, then we will rename the implementation
+ // class at that time. You'd think we could call ".implClass" on the trait
+ // rather than collecting them in another map, but that seems to fail for
+ // exactly the traits being renamed here (i.e. defined in methods.)
+ //
+ // !!! - it makes no sense to have methods like "implClass" and
+ // "companionClass" which fail for an arbitrary subset of nesting
+ // arrangements, and then have separate methods which attempt to compensate
+ // for that failure. There should be exactly one method for any given
+ // entity which always gives the right answer.
+ if (sym.isImplClass)
+ localImplClasses((sym.owner, tpnme.interfaceName(sym.name))) = sym
+ else {
+ renamable addEntry sym
+ if (sym.isTrait)
+ localTraits((sym, sym.name)) = sym.owner
+ }
+ }
case DefDef(_, _, _, _, _, _) =>
if (sym.isLocal) {
renamable addEntry sym
- sym setFlag (PRIVATE | LOCAL | FINAL)
+ sym setFlag (PrivateLocal | FINAL)
} else if (sym.isPrimaryConstructor) {
symSet(called, sym) addEntry sym.owner
}
@@ -179,7 +215,7 @@ abstract class LambdaLift extends InfoTransform {
}
}
- /** Compute free variables map `fvs'.
+ /** Compute free variables map `fvs`.
* Also assign unique names to all
* value/variable/let that are free in some function or class, and to
* all class/function symbols that are owned by some function.
@@ -193,65 +229,120 @@ abstract class LambdaLift extends InfoTransform {
markFree(fv, caller)
} while (changedFreeVars)
- for (sym <- renamable) {
+ def renameSym(sym: Symbol) {
+ val originalName = sym.name
+ sym setName newName(sym)
+ debuglog("renaming in %s: %s => %s".format(sym.owner.fullLocationString, originalName, sym.name))
+ }
+
+ def newName(sym: Symbol): Name = {
val originalName = sym.name
- val base = sym.name + "$" + (
- if (sym.isAnonymousFunction && sym.owner.isMethod)
- sym.owner.name + "$"
- else ""
- )
- sym.name =
- if (sym.name.isTypeName) unit.freshTypeName(base)
- else unit.freshTermName(base)
-
- if (settings.debug.value)
- log("renaming in %s: %s => %s".format(sym.owner.fullLocationString, originalName, sym.name))
+ def freshen(prefix: String): Name =
+ if (originalName.isTypeName) unit.freshTypeName(prefix)
+ else unit.freshTermName(prefix)
+
+ if (sym.isAnonymousFunction && sym.owner.isMethod) {
+ freshen(sym.name + nme.NAME_JOIN_STRING + sym.owner.name + nme.NAME_JOIN_STRING)
+ } else {
+ // SI-5652 If the lifted symbol is accessed from an inner class, it will be made public. (where?)
+ // Generating a a unique name, mangled with the enclosing class name, avoids a VerifyError
+ // in the case that a sub-class happens to lifts out a method with the *same* name.
+ val name = freshen(sym.name + nme.NAME_JOIN_STRING)
+ if (originalName.isTermName && !sym.enclClass.isImplClass && calledFromInner(sym)) nme.expandedName(name, sym.enclClass)
+ else name
+ }
+ }
+
+ /** Rename a trait's interface and implementation class in coordinated fashion.
+ */
+ def renameTrait(traitSym: Symbol, implSym: Symbol) {
+ val originalImplName = implSym.name
+ renameSym(traitSym)
+ implSym setName tpnme.implClassName(traitSym.name)
+
+ debuglog("renaming impl class in step with %s: %s => %s".format(traitSym, originalImplName, implSym.name))
+ }
+
+ val allFree: Set[Symbol] = free.values.flatMap(_.iterator).toSet
+
+ for (sym <- renamable) {
+ // If we renamed a trait from Foo to Foo$1, we must rename the implementation
+ // class from Foo$class to Foo$1$class. (Without special consideration it would
+ // become Foo$class$1 instead.) Since the symbols are being renamed out from
+ // under us, and there's no reliable link between trait symbol and impl symbol,
+ // we have maps from ((trait, name)) -> owner and ((owner, name)) -> impl.
+ localTraits remove ((sym, sym.name)) match {
+ case None =>
+ if (allFree(sym)) proxyNames(sym) = newName(sym)
+ else renameSym(sym)
+ case Some(owner) =>
+ localImplClasses remove ((owner, sym.name)) match {
+ case Some(implSym) => renameTrait(sym, implSym)
+ case _ => renameSym(sym) // pure interface, no impl class
+ }
+ }
}
- atPhase(phase.next) {
+ afterOwnPhase {
for ((owner, freeValues) <- free.toList) {
- if (settings.debug.value)
- log("free var proxy: %s, %s".format(owner.fullLocationString, freeValues.toList.mkString(", ")))
-
- proxies(owner) =
- for (fv <- freeValues.toList) yield {
- val proxy = owner.newValue(owner.pos, fv.name)
- .setFlag(if (owner.isClass) PARAMACCESSOR | PRIVATE | LOCAL else PARAM)
- .setFlag(SYNTHETIC)
- .setInfo(fv.info);
- if (owner.isClass) owner.info.decls enter proxy;
- proxy
- }
+ val newFlags = SYNTHETIC | ( if (owner.isClass) PARAMACCESSOR | PrivateLocal else PARAM )
+ debuglog("free var proxy: %s, %s".format(owner.fullLocationString, freeValues.toList.mkString(", ")))
+ proxies(owner) =
+ for (fv <- freeValues.toList) yield {
+ val proxyName = proxyNames.getOrElse(fv, fv.name)
+ val proxy = owner.newValue(proxyName, owner.pos, newFlags) setInfo fv.info
+ if (owner.isClass) owner.info.decls enter proxy
+ proxy
+ }
}
}
}
private def proxy(sym: Symbol) = {
- def searchIn(searchee: Symbol): Symbol = {
- if (settings.debug.value)
- log("searching for " + sym + "(" + sym.owner + ") in " + searchee + " " + searchee.logicallyEnclosingMember)
+ def searchIn(enclosure: Symbol): Symbol = {
+ if (enclosure eq NoSymbol) throw new IllegalArgumentException("Could not find proxy for "+ sym.defString +" in "+ sym.ownerChain +" (currentOwner= "+ currentOwner +" )")
+ debuglog("searching for " + sym + "(" + sym.owner + ") in " + enclosure + " " + enclosure.logicallyEnclosingMember)
- val ps = (proxies get searchee.logicallyEnclosingMember).toList.flatten filter (_.name == sym.name)
- if (ps.isEmpty) searchIn(searchee.skipConstructor.owner)
- else ps.head
+ val proxyName = proxyNames.getOrElse(sym, sym.name)
+ val ps = (proxies get enclosure.logicallyEnclosingMember).toList.flatten find (_.name == proxyName)
+ ps getOrElse searchIn(enclosure.skipConstructor.owner)
}
- if (settings.debug.value)
- log("proxy " + sym + " in " + sym.owner + " from " + currentOwner.ownerChain.mkString(" -> ") +
- " " + sym.owner.logicallyEnclosingMember)
+ debuglog("proxy %s from %s has logical enclosure %s".format(
+ sym.debugLocationString,
+ currentOwner.debugLocationString,
+ sym.owner.logicallyEnclosingMember.debugLocationString)
+ )
if (isSameOwnerEnclosure(sym)) sym
else searchIn(currentOwner)
}
- private def memberRef(sym: Symbol) = {
+ private def memberRef(sym: Symbol): Tree = {
val clazz = sym.owner.enclClass
//Console.println("memberRef from "+currentClass+" to "+sym+" in "+clazz)
- val qual = if (clazz == currentClass) gen.mkAttributedThis(clazz)
- else {
- sym resetFlag(LOCAL | PRIVATE)
- if (clazz.isStaticOwner) gen.mkAttributedQualifier(clazz.thisType)
- else outerPath(outerValue, currentClass.outerClass, clazz)
- }
+ def prematureSelfReference() {
+ val what =
+ if (clazz.isStaticOwner) clazz.fullLocationString
+ else s"the unconstructed `this` of ${clazz.fullLocationString}"
+ val msg = s"Implementation restriction: access of ${sym.fullLocationString} from ${currentClass.fullLocationString}, would require illegal premature access to $what"
+ currentUnit.error(curTree.pos, msg)
+ }
+ val qual =
+ if (clazz == currentClass) gen.mkAttributedThis(clazz)
+ else {
+ sym resetFlag (LOCAL | PRIVATE)
+ if (isUnderConstruction(clazz)) {
+ prematureSelfReference()
+ EmptyTree
+ }
+ else if (clazz.isStaticOwner) gen.mkAttributedQualifier(clazz.thisType)
+ else {
+ outerValue match {
+ case EmptyTree => prematureSelfReference(); return EmptyTree
+ case o => outerPath(o, currentClass.outerClass, clazz)
+ }
+ }
+ }
Select(qual, sym) setType sym.tpe
}
@@ -271,12 +362,14 @@ abstract class LambdaLift extends InfoTransform {
case Some(ps) =>
val freeParams = ps map (p => ValDef(p) setPos tree.pos setType NoType)
tree match {
- case DefDef(mods, name, tparams, vparamss, tpt, rhs) =>
+ case DefDef(_, _, _, vparams :: _, _, _) =>
val addParams = cloneSymbols(ps).map(_.setFlag(PARAM))
sym.updateInfo(
lifted(MethodType(sym.info.params ::: addParams, sym.info.resultType)))
- treeCopy.DefDef(tree, mods, name, tparams, List(vparamss.head ++ freeParams), tpt, rhs)
- case ClassDef(mods, name, tparams, impl @ Template(parents, self, body)) =>
+
+ copyDefDef(tree)(vparamss = List(vparams ++ freeParams))
+ case ClassDef(_, _, _, _) =>
+ // SI-6231
// Disabled attempt to to add getters to freeParams
// this does not work yet. Problem is that local symbols need local names
// and references to local symbols need to be transformed into
@@ -288,14 +381,13 @@ abstract class LambdaLift extends InfoTransform {
// DefDef(getter, rhs) setPos tree.pos setType NoType
// }
// val newDefs = if (sym.isTrait) freeParams ::: (ps map paramGetter) else freeParams
- treeCopy.ClassDef(tree, mods, name, tparams,
- treeCopy.Template(impl, parents, self, body ::: freeParams))
+ deriveClassDef(tree)(impl => deriveTemplate(impl)(_ ::: freeParams))
}
case None =>
tree
}
-/* Something like this will be necessary to eliminate the implementation
+/* SI-6231: Something like this will be necessary to eliminate the implementation
* restiction from paramGetter above:
* We need to pass getters to the interface of an implementation class.
private def fixTraitGetters(lifted: List[Tree]): List[Tree] =
@@ -337,11 +429,11 @@ abstract class LambdaLift extends InfoTransform {
if (sym.isMethod) sym setFlag LIFTED
liftedDefs(sym.owner) ::= tree
sym.owner.info.decls enterUnique sym
- if (settings.debug.value) log("lifted: " + sym + " from " + oldOwner + " to " + sym.owner)
+ debuglog("lifted: " + sym + " from " + oldOwner + " to " + sym.owner)
EmptyTree
}
- private def postTransform(tree: Tree): Tree = {
+ private def postTransform(tree: Tree, isBoxedRef: Boolean = false): Tree = {
val sym = tree.symbol
tree match {
case ClassDef(_, _, _, _) =>
@@ -356,17 +448,53 @@ abstract class LambdaLift extends InfoTransform {
/* Creating a constructor argument if one isn't present. */
val constructorArg = rhs match {
case EmptyTree =>
- sym.primaryConstructor.info.paramTypes match {
+ sym.tpe.typeSymbol.primaryConstructor.info.paramTypes match {
case List(tp) => gen.mkZero(tp)
case _ =>
- log("Couldn't determine how to properly construct " + sym)
+ debugwarn("Couldn't determine how to properly construct " + sym)
rhs
}
case arg => arg
}
- treeCopy.ValDef(tree, mods, name, tpt1, typer.typedPos(rhs.pos) {
- Apply(Select(New(TypeTree(sym.tpe)), nme.CONSTRUCTOR), List(constructorArg))
- })
+
+ /** Wrap expr argument in new *Ref(..) constructor. But try/catch
+ * is a problem because a throw will clear the stack and post catch
+ * we would expect the partially-constructed object to be on the stack
+ * for the call to init. So we recursively
+ * search for "leaf" result expressions where we know its safe
+ * to put the new *Ref(..) constructor or, if all else fails, transform
+ * an expr to { val temp=expr; new *Ref(temp) }.
+ * The reason we narrowly look for try/catch in captured var definitions
+ * is because other try/catch expression have already been lifted
+ * see SI-6863
+ */
+ def refConstr(expr: Tree): Tree = typer.typedPos(expr.pos) {expr match {
+ // very simple expressions can be wrapped in a new *Ref(expr) because they can't have
+ // a try/catch in final expression position.
+ case Ident(_) | Apply(_, _) | Literal(_) | New(_) | Select(_, _) | Throw(_) | Assign(_, _) | ValDef(_, _, _, _) | Return(_) | EmptyTree =>
+ New(sym.tpe, expr)
+ case Try(block, catches, finalizer) =>
+ Try(refConstr(block), catches map refConstrCase, finalizer)
+ case Block(stats, expr) =>
+ Block(stats, refConstr(expr))
+ case If(cond, trueBranch, falseBranch) =>
+ If(cond, refConstr(trueBranch), refConstr(falseBranch))
+ case Match(selector, cases) =>
+ Match(selector, cases map refConstrCase)
+ // if we can't figure out what else to do, turn expr into {val temp1 = expr; new *Ref(temp1)} to avoid
+ // any possibility of try/catch in the *Ref constructor. This should be a safe tranformation as a default
+ // though it potentially wastes a variable slot. In particular this case handles LabelDefs.
+ case _ =>
+ debuglog("assigning expr to temp: " + (expr.pos))
+ val tempSym = currentOwner.newValue(unit.freshTermName("temp"), expr.pos) setInfo expr.tpe
+ val tempDef = ValDef(tempSym, expr) setPos expr.pos
+ val tempRef = Ident(tempSym) setPos expr.pos
+ Block(tempDef, New(sym.tpe, tempRef))
+ }}
+ def refConstrCase(cdef: CaseDef): CaseDef =
+ CaseDef(cdef.pat, cdef.guard, refConstr(cdef.body))
+
+ treeCopy.ValDef(tree, mods, name, tpt1, refConstr(constructorArg))
} else tree
case Return(Block(stats, value)) =>
Block(stats, treeCopy.Return(tree, value)) setType tree.tpe setPos tree.pos
@@ -389,7 +517,7 @@ abstract class LambdaLift extends InfoTransform {
atPos(tree.pos)(proxyRef(sym))
else tree
else tree
- if (sym.isCapturedVariable)
+ if (sym.isCapturedVariable && !isBoxedRef)
atPos(tree.pos) {
val tp = tree.tpe
val elemTree = typer typed Select(tree1 setType sym.tpe, nme.elem)
@@ -397,34 +525,41 @@ abstract class LambdaLift extends InfoTransform {
}
else tree1
case Block(stats, expr0) =>
- val (lzyVals, rest) = stats.partition {
- case stat at ValDef(_, _, _, _) if stat.symbol.isLazy => true
- case stat at ValDef(_, _, _, _) if stat.symbol.hasFlag(MODULEVAR) => true
- case _ => false
- }
- treeCopy.Block(tree, lzyVals:::rest, expr0)
+ val (lzyVals, rest) = stats partition {
+ case stat: ValDef => stat.symbol.isLazy || stat.symbol.isModuleVar
+ case _ => false
+ }
+ if (lzyVals.isEmpty) tree
+ else treeCopy.Block(tree, lzyVals ::: rest, expr0)
case _ =>
tree
}
}
- override def transform(tree: Tree): Tree =
- postTransform(super.transform(tree) setType lifted(tree.tpe))
+ private def preTransform(tree: Tree) = super.transform(tree) setType lifted(tree.tpe)
+
+ override def transform(tree: Tree): Tree = tree match {
+ case Select(ReferenceToBoxed(idt), elem) if elem == nme.elem =>
+ postTransform(preTransform(idt), isBoxedRef = false)
+ case ReferenceToBoxed(idt) =>
+ postTransform(preTransform(idt), isBoxedRef = true)
+ case _ =>
+ postTransform(preTransform(tree))
+ }
/** Transform statements and add lifted definitions to them. */
override def transformStats(stats: List[Tree], exprOwner: Symbol): List[Tree] = {
def addLifted(stat: Tree): Tree = stat match {
- case ClassDef(mods, name, tparams, impl @ Template(parents, self, body)) =>
+ case ClassDef(_, _, _, _) =>
val lifted = liftedDefs get stat.symbol match {
case Some(xs) => xs reverseMap addLifted
case _ => log("unexpectedly no lifted defs for " + stat.symbol) ; Nil
}
- val result = treeCopy.ClassDef(
- stat, mods, name, tparams, treeCopy.Template(impl, parents, self, body ::: lifted))
- liftedDefs -= stat.symbol
- result
- case DefDef(mods, name, tp, vp, tpt, Block(Nil, expr)) if !stat.symbol.isConstructor =>
- treeCopy.DefDef(stat, mods, name, tp, vp, tpt, expr)
+ try deriveClassDef(stat)(impl => deriveTemplate(impl)(_ ::: lifted))
+ finally liftedDefs -= stat.symbol
+
+ case DefDef(_, _, _, _, _, Block(Nil, expr)) if !stat.symbol.isConstructor =>
+ deriveDefDef(stat)(_ => expr)
case _ =>
stat
}
@@ -432,8 +567,10 @@ abstract class LambdaLift extends InfoTransform {
}
override def transformUnit(unit: CompilationUnit) {
- computeFreeVars
- atPhase(phase.next)(super.transformUnit(unit))
+ computeFreeVars()
+ afterOwnPhase {
+ super.transformUnit(unit)
+ }
assert(liftedDefs.isEmpty, liftedDefs.keys mkString ", ")
}
} // class LambdaLifter
diff --git a/src/compiler/scala/tools/nsc/transform/LazyVals.scala b/src/compiler/scala/tools/nsc/transform/LazyVals.scala
index f53cc9b..21213cf 100644
--- a/src/compiler/scala/tools/nsc/transform/LazyVals.scala
+++ b/src/compiler/scala/tools/nsc/transform/LazyVals.scala
@@ -4,7 +4,7 @@ package transform
import scala.collection.{ mutable, immutable }
abstract class LazyVals extends Transform with TypingTransformers with ast.TreeDSL {
- // inherits abstract value `global' and class `Phase' from Transform
+ // inherits abstract value `global` and class `Phase` from Transform
import global._ // the global environment
import definitions._ // standard classes and methods
@@ -12,7 +12,8 @@ abstract class LazyVals extends Transform with TypingTransformers with ast.TreeD
import CODE._
val phaseName: String = "lazyvals"
- val FLAGS_PER_WORD: Int
+ private val FLAGS_PER_BYTE: Int = 8 // Byte
+ private def bitmapKind = ByteClass
def newTransformer(unit: CompilationUnit): Transformer =
new LazyValues(unit)
@@ -53,7 +54,6 @@ abstract class LazyVals extends Transform with TypingTransformers with ast.TreeD
private val lazyVals = perRunCaches.newMap[Symbol, Int]() withDefaultValue 0
import symtab.Flags._
- import lazyVals._
/** Perform the following transformations:
* - for a lazy accessor inside a method, make it check the initialization bitmap
@@ -68,8 +68,20 @@ abstract class LazyVals extends Transform with TypingTransformers with ast.TreeD
curTree = tree
tree match {
- case DefDef(mods, name, tparams, vparams, tpt, rhs) => atOwner(tree.symbol) {
- val res = if (!sym.owner.isClass && sym.isLazy) {
+
+ case Block(_, _) =>
+ val block1 = super.transform(tree)
+ val Block(stats, expr) = block1
+ val stats1 = stats.flatMap(_ match {
+ case Block(List(d1 at DefDef(_, n1, _, _, _, _)), d2 at DefDef(_, n2, _, _, _, _)) if (nme.newLazyValSlowComputeName(n2) == n1) =>
+ List(d1, d2)
+ case stat =>
+ List(stat)
+ })
+ treeCopy.Block(block1, stats1, expr)
+
+ case DefDef(_, _, _, _, _, rhs) => atOwner(tree.symbol) {
+ val (res, slowPathDef) = if (!sym.owner.isClass && sym.isLazy) {
val enclosingClassOrDummyOrMethod = {
val enclMethod = sym.enclMethod
@@ -82,32 +94,33 @@ abstract class LazyVals extends Transform with TypingTransformers with ast.TreeD
} else
sym.owner
}
+ debuglog(s"determined enclosing class/dummy/method for lazy val as $enclosingClassOrDummyOrMethod given symbol $sym")
val idx = lazyVals(enclosingClassOrDummyOrMethod)
lazyVals(enclosingClassOrDummyOrMethod) = idx + 1
- val rhs1 = mkLazyDef(enclosingClassOrDummyOrMethod, super.transform(rhs), idx, sym)
+ val (rhs1, sDef) = mkLazyDef(enclosingClassOrDummyOrMethod, transform(rhs), idx, sym)
sym.resetFlag((if (lazyUnit(sym)) 0 else LAZY) | ACCESSOR)
- rhs1
- } else
- super.transform(rhs)
-
- treeCopy.DefDef(tree, mods, name, tparams, vparams, tpt,
- if (LocalLazyValFinder.find(res)) typed(addBitmapDefs(sym, res)) else res)
+ (rhs1, sDef)
+ } else
+ (transform(rhs), EmptyTree)
+
+ val ddef1 = deriveDefDef(tree)(_ => if (LocalLazyValFinder.find(res)) typed(addBitmapDefs(sym, res)) else res)
+ if (slowPathDef != EmptyTree) Block(slowPathDef, ddef1) else ddef1
}
- case Template(parents, self, body) => atOwner(currentOwner) {
+ case Template(_, _, body) => atOwner(currentOwner) {
val body1 = super.transformTrees(body)
var added = false
val stats =
for (stat <- body1) yield stat match {
- case Block(_, _) | Apply(_, _) | If(_, _, _) if !added =>
+ case Block(_, _) | Apply(_, _) | If(_, _, _) | Try(_, _, _) if !added =>
// Avoid adding bitmaps when they are fully overshadowed by those
// that are added inside loops
if (LocalLazyValFinder.find(stat)) {
added = true
typed(addBitmapDefs(sym, stat))
} else stat
- case ValDef(mods, name, tpt, rhs) =>
- typed(treeCopy.ValDef(stat, mods, name, tpt, addBitmapDefs(stat.symbol, rhs)))
+ case ValDef(_, _, _, _) =>
+ typed(deriveValDef(stat)(addBitmapDefs(stat.symbol, _)))
case _ =>
stat
}
@@ -122,29 +135,29 @@ abstract class LazyVals extends Transform with TypingTransformers with ast.TreeD
})
toAdd0
} else List()
- treeCopy.Template(tree, parents, self, innerClassBitmaps ++ stats)
+ deriveTemplate(tree)(_ => innerClassBitmaps ++ stats)
}
- case ValDef(mods, name, tpt, rhs0) if (!sym.owner.isModule && !sym.owner.isClass) =>
- val rhs = super.transform(rhs0)
- treeCopy.ValDef(tree, mods, name, tpt,
- if (LocalLazyValFinder.find(rhs)) typed(addBitmapDefs(sym, rhs)) else rhs)
+ case ValDef(_, _, _, _) if !sym.owner.isModule && !sym.owner.isClass =>
+ deriveValDef(tree) { rhs0 =>
+ val rhs = transform(rhs0)
+ if (LocalLazyValFinder.find(rhs)) typed(addBitmapDefs(sym, rhs)) else rhs
+ }
case l at LabelDef(name0, params0, ifp0 at If(_, _, _)) if name0.startsWith(nme.WHILE_PREFIX) =>
val ifp1 = super.transform(ifp0)
val If(cond0, thenp0, elsep0) = ifp1
+
if (LocalLazyValFinder.find(thenp0))
- treeCopy.LabelDef(l, name0, params0,
- treeCopy.If(ifp1, cond0, typed(addBitmapDefs(sym.owner, thenp0)), elsep0))
+ deriveLabelDef(l)(_ => treeCopy.If(ifp1, cond0, typed(addBitmapDefs(sym.owner, thenp0)), elsep0))
else
l
- case l at LabelDef(name0, params0, block at Block(stats0, _))
+ case l at LabelDef(name0, params0, block at Block(stats0, expr))
if name0.startsWith(nme.WHILE_PREFIX) || name0.startsWith(nme.DO_WHILE_PREFIX) =>
val stats1 = super.transformTrees(stats0)
if (LocalLazyValFinder.find(stats1))
- treeCopy.LabelDef(l, name0, params0,
- treeCopy.Block(block, typed(addBitmapDefs(sym.owner, stats1.head))::stats1.tail, block.expr))
+ deriveLabelDef(l)(_ => treeCopy.Block(block, typed(addBitmapDefs(sym.owner, stats1.head))::stats1.tail, expr))
else
l
@@ -169,13 +182,32 @@ abstract class LazyVals extends Transform with TypingTransformers with ast.TreeD
def isMatch(params: List[Ident]) = (params.tail corresponds methSym.tpe.params)(_.tpe == _.tpe)
if (bmps.isEmpty) rhs else rhs match {
- case Block(assign, l @ LabelDef(name, params, rhs1))
+ case Block(assign, l @ LabelDef(name, params, _))
if name.toString == ("_" + methSym.name) && isMatch(params) =>
- Block(assign, treeCopy.LabelDef(l, name, params, typed(prependStats(bmps, rhs1))))
+ Block(assign, deriveLabelDef(l)(rhs => typed(prependStats(bmps, rhs))))
case _ => prependStats(bmps, rhs)
}
}
+
+ def mkSlowPathDef(clazz: Symbol, lzyVal: Symbol, cond: Tree, syncBody: List[Tree],
+ stats: List[Tree], retVal: Tree): Tree = {
+ val defSym = clazz.newMethod(nme.newLazyValSlowComputeName(lzyVal.name), lzyVal.pos, STABLE | PRIVATE)
+ defSym setInfo MethodType(List(), lzyVal.tpe.resultType)
+ defSym.owner = lzyVal.owner
+ debuglog(s"crete slow compute path $defSym with owner ${defSym.owner} for lazy val $lzyVal")
+ if (bitmaps.contains(lzyVal))
+ bitmaps(lzyVal).map(_.owner = defSym)
+ val rhs: Tree = (gen.mkSynchronizedCheck(clazz, cond, syncBody, stats)).changeOwner(currentOwner -> defSym)
+ DEF(defSym).mkTree(addBitmapDefs(lzyVal, BLOCK(rhs, retVal))) setSymbol defSym
+ }
+
+
+ def mkFastPathBody(clazz: Symbol, lzyVal: Symbol, cond: Tree, syncBody: List[Tree],
+ stats: List[Tree], retVal: Tree): (Tree, Tree) = {
+ val slowPathDef: Tree = mkSlowPathDef(clazz, lzyVal, cond, syncBody, stats, retVal)
+ (If(cond, Apply(ID(slowPathDef.symbol), List()), retVal), slowPathDef)
+ }
/** return a 'lazified' version of rhs. Rhs should conform to the
* following schema:
@@ -186,33 +218,39 @@ abstract class LazyVals extends Transform with TypingTransformers with ast.TreeD
* <rhs> when the lazy value has type Unit (for which there is no field
* to cache it's value.
*
- * The result will be a tree of the form
- * {
- * if ((bitmap$n & MASK) == 0) {
+ * Similarly as for normal lazy val members (see Mixin), the result will be a tree of the form
+ * { if ((bitmap&n & MASK) == 0) this.l$compute()
+ * else l$
+ *
+ * def l$compute() = { synchronized(enclosing_class_or_dummy) {
+ * if ((bitmap$n & MASK) == 0) {
* l$ = <rhs>
* bitmap$n = bimap$n | MASK
+ * }}
+ * l$
* }
- * l$
* }
- * where bitmap$n is an int value acting as a bitmap of initialized values. It is
- * the 'n' is (offset / 32), the MASK is (1 << (offset % 32)). If the value has type
- * unit, no field is used to cache the value, so the resulting code is:
+ * where bitmap$n is a byte value acting as a bitmap of initialized values. It is
+ * the 'n' is (offset / 8), the MASK is (1 << (offset % 8)). If the value has type
+ * unit, no field is used to cache the value, so the l$compute will now look as following:
* {
- * if ((bitmap$n & MASK) == 0) {
+ * def l$compute() = { synchronized(enclosing_class_or_dummy) {
+ * if ((bitmap$n & MASK) == 0) {
* <rhs>;
* bitmap$n = bimap$n | MASK
- * }
+ * }}
* ()
+ * }
* }
*/
- private def mkLazyDef(methOrClass: Symbol, tree: Tree, offset: Int, lazyVal: Symbol): Tree = {
+ private def mkLazyDef(methOrClass: Symbol, tree: Tree, offset: Int, lazyVal: Symbol): (Tree, Tree) = {
val bitmapSym = getBitmapFor(methOrClass, offset)
- val mask = LIT(1 << (offset % FLAGS_PER_WORD))
+ val mask = LIT(1 << (offset % FLAGS_PER_BYTE))
val bitmapRef = if (methOrClass.isClass) Select(This(methOrClass), bitmapSym) else Ident(bitmapSym)
def mkBlock(stmt: Tree) = BLOCK(stmt, mkSetFlag(bitmapSym, mask, bitmapRef), UNIT)
-
+ debuglog(s"create complete lazy def in $methOrClass for $lazyVal")
val (block, res) = tree match {
case Block(List(assignment), res) if !lazyUnit(lazyVal) =>
(mkBlock(assignment), res)
@@ -220,16 +258,13 @@ abstract class LazyVals extends Transform with TypingTransformers with ast.TreeD
(mkBlock(rhs), UNIT)
}
- val cond = (bitmapRef INT_& mask) INT_== ZERO
-
- atPos(tree.pos)(localTyper.typed {
- def body = gen.mkDoubleCheckedLocking(methOrClass.enclClass, cond, List(block), Nil)
- BLOCK(body, res)
- })
+ val cond = (bitmapRef GEN_& (mask, bitmapKind)) GEN_== (ZERO, bitmapKind)
+ val lazyDefs = mkFastPathBody(methOrClass.enclClass, lazyVal, cond, List(block), Nil, res)
+ (atPos(tree.pos)(localTyper.typed {lazyDefs._1 }), atPos(tree.pos)(localTyper.typed {lazyDefs._2 }))
}
private def mkSetFlag(bmp: Symbol, mask: Tree, bmpRef: Tree): Tree =
- bmpRef === (bmpRef INT_| mask)
+ bmpRef === (bmpRef GEN_| (mask, bitmapKind))
val bitmaps = mutable.Map[Symbol, List[Symbol]]() withDefaultValue Nil
@@ -237,14 +272,14 @@ abstract class LazyVals extends Transform with TypingTransformers with ast.TreeD
* given offset.
*/
private def getBitmapFor(meth: Symbol, offset: Int): Symbol = {
- val n = offset / FLAGS_PER_WORD
+ val n = offset / FLAGS_PER_BYTE
val bmps = bitmaps(meth)
if (bmps.length > n)
bmps(n)
else {
- val sym = meth.newVariable(meth.pos, nme.bitmapName(n)).setInfo(IntClass.tpe)
- atPhase(currentRun.typerPhase) {
- sym addAnnotation AnnotationInfo(VolatileAttr.tpe, Nil, Nil)
+ val sym = meth.newVariable(nme.newBitmapName(nme.BITMAP_NORMAL, n), meth.pos).setInfo(ByteClass.tpe)
+ beforeTyper {
+ sym addAnnotation VolatileAttr
}
bitmaps(meth) = (sym :: bmps).reverse
diff --git a/src/compiler/scala/tools/nsc/transform/LiftCode.scala b/src/compiler/scala/tools/nsc/transform/LiftCode.scala
deleted file mode 100644
index b52419f..0000000
--- a/src/compiler/scala/tools/nsc/transform/LiftCode.scala
+++ /dev/null
@@ -1,148 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
- * @author Gilles Dubochet
- */
-
-package scala.tools.nsc
-package transform
-
-import symtab._
-import Flags._
-import scala.collection.{ mutable, immutable }
-import scala.collection.mutable.ListBuffer
-import scala.tools.nsc.util.FreshNameCreator
-
-/** Translate expressions of the form reflect.Code.lift(exp)
- * to the lifted "reflect trees" representation of exp.
- *
- * @author Gilles Dubochet
- * @version 1.0
- */
-abstract class LiftCode extends Transform with Reifiers {
-
- import global._ // the global environment
- import definitions._ // standard classes and methods
- import typer.{typed, atOwner} // methods to type trees
-
- val symbols: global.type = global
-
- /** the following two members override abstract members in Transform */
- val phaseName: String = "liftcode"
-
- def newTransformer(unit: CompilationUnit): Transformer =
- new AddRefFields(unit)
-
- class AddRefFields(unit: CompilationUnit) extends Transformer {
- override def transform(tree: Tree): Tree = tree match {
- case Apply(lift, List(tree))
- if lift.symbol == Code_lift =>
- typed(atPos(tree.pos)(codify(tree)))
- case _ =>
- super.transform(tree)
- }
- }
-
-
- type InjectEnvironment = immutable.ListMap[reflect.Symbol, Name]
-
- class Injector(env: InjectEnvironment, fresh: FreshNameCreator) {
-
- // todo replace className by caseName in CaseClass once we have switched to nsc.
- def className(value: AnyRef): String = value match {
- case _ :: _ => "scala.$colon$colon"
- case reflect.MethodType(_, _) =>
- "scala.reflect.MethodType"
- case x:Product =>
- "scala.reflect."+x.productPrefix //caseName
- //case _ => // bq:unreachable code
- // ""
- }
-
- def objectName(value: Any): String = value match {
- case Nil => "scala.collection.immutable.Nil"
- case reflect.NoSymbol => "scala.reflect.NoSymbol"
- case reflect.RootSymbol => "scala.reflect.RootSymbol"
- case reflect.NoPrefix => "scala.reflect.NoPrefix"
- case reflect.NoType => "scala.reflect.NoType"
- case _ => ""
- }
-
- def inject(value: Any): Tree = {
- def treatProduct(c: Product) = {
- val name = objectName(c)
- if (name.length() != 0)
- gen.mkAttributedRef(definitions.getModule(name))
- else {
- val name = className(c)
- if (name.length() == 0) abort("don't know how to inject " + value)
- val injectedArgs = new ListBuffer[Tree]
- for (i <- 0 until c.productArity)
- injectedArgs += inject(c.productElement(i))
- New(Ident(definitions.getClass(name)), List(injectedArgs.toList))
- }
- }
- value match {
- case FreeValue(tree) =>
- New(Ident(definitions.getClass("scala.reflect.Literal")), List(List(tree)))
- case () => Literal(Constant(()))
- case x: String => Literal(Constant(x))
- case x: Boolean => Literal(Constant(x))
- case x: Byte => Literal(Constant(x))
- case x: Short => Literal(Constant(x))
- case x: Char => Literal(Constant(x))
- case x: Int => Literal(Constant(x))
- case x: Long => Literal(Constant(x))
- case x: Float => Literal(Constant(x))
- case x: Double => Literal(Constant(x))
- case c: Product => treatProduct(c)
- case null =>
- gen.mkAttributedRef(definitions.getModule("scala.reflect.NoType"))
- case _ =>
- abort("don't know how to inject " + value)
- }
- }
- } // Injector
-
-
- def inject(code: reflect.Tree): Tree =
- new Injector(immutable.ListMap.empty, new FreshNameCreator.Default).inject(code)
-
- def codify (tree: Tree): Tree =
- New(TypeTree(appliedType(definitions.CodeClass.typeConstructor,
- List(tree.tpe))),
- List(List(inject(reify(tree)))))
-
-}
-
-// case EmptyTree =>
-// case LiftPoint(tree) =>
-// case PackageDef(pid, stats) =>
-// case ClassDef(mods, name, tparams, impl) =>
-// case ValDef(mods, name, tpt, rhs) =>
-// case DefDef(mods, name, tparams, vparamss, tpt, rhs) =>
-// case TypeDef(mods, name, tparams, rhs) =>
-// case LabelDef(name, params, rhs) =>
-// case Template(parents, self, body) =>
-// case Block(stats, expr) =>
-// case ArrayValue(elemtpt, trees) =>
-// case Assign(lhs, rhs) =>
-// case If(cond, thenp, elsep) =>
-// case Match(selector, cases) =>
-// case Return(expr) =>
-// case Try(block, catches, finalizer) =>
-// case Throw(expr) =>
-// case New(tpt) =>
-// case Typed(expr, tpt) =>
-// case TypeApply(fun, args) =>
-// case Apply(fun, args) =>
-// case Super(qual, mix) =>
-// case This(qual) =>
-// case Select(qualifier, selector) =>
-// case Ident(name) =>
-// case Literal(value) =>
-// case TypeTree() =>
-// /* Pattern matching */
-// case CaseDef(pat, guard, body) =>
-// case Alternative(trees) =>
-// case Star(elem) =>
-// case Bind(name, body) =>
diff --git a/src/compiler/scala/tools/nsc/transform/Mixin.scala b/src/compiler/scala/tools/nsc/transform/Mixin.scala
index c805eb9..e92450c 100644
--- a/src/compiler/scala/tools/nsc/transform/Mixin.scala
+++ b/src/compiler/scala/tools/nsc/transform/Mixin.scala
@@ -1,5 +1,5 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
+ * Copyright 2005-2013 LAMP/EPFL
* @author Martin Odersky
*/
@@ -9,7 +9,6 @@ package transform
import symtab._
import Flags._
import scala.collection.{ mutable, immutable }
-import scala.collection.mutable.ListBuffer
abstract class Mixin extends InfoTransform with ast.TreeDSL {
import global._
@@ -20,12 +19,15 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
val phaseName: String = "mixin"
/** The phase might set the following new flags: */
- override def phaseNewFlags: Long = lateMODULE | notABSTRACT
+ override def phaseNewFlags: Long = lateMODULE | notOVERRIDE
/** This map contains a binding (class -> info) if
* the class with this info at phase mixinPhase has been treated for mixin composition
*/
- private val treatedClassInfos = perRunCaches.newMap[Symbol, Type]()
+ private val treatedClassInfos = perRunCaches.newMap[Symbol, Type]() withDefaultValue NoType
+
+ /** Map a lazy, mixedin field accessor to it's trait member accessor */
+ private val initializer = perRunCaches.newMap[Symbol, Symbol]
// --------- helper functions -----------------------------------------------
@@ -40,10 +42,12 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
* - field accessors and superaccessors, except for lazy value accessors which become initializer
* methods in the impl class (because they can have arbitrary initializers)
*/
- private def isImplementedStatically(sym: Symbol) =
- sym.owner.isImplClass && sym.isMethod &&
- (!sym.isModule || sym.hasFlag(PRIVATE | LIFTED)) &&
- (!(sym hasFlag (ACCESSOR | SUPERACCESSOR)) || sym.isLazy)
+ private def isImplementedStatically(sym: Symbol) = (
+ sym.owner.isImplClass
+ && sym.isMethod
+ && (!sym.isModule || sym.hasFlag(PRIVATE | LIFTED))
+ && (!(sym hasFlag (ACCESSOR | SUPERACCESSOR)) || sym.isLazy)
+ )
/** A member of a trait is static only if it belongs only to the
* implementation class, not the interface, and it is implemented
@@ -64,15 +68,41 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
* maps all other types to themselves.
*/
private def toInterface(tp: Type): Type =
- atPhase(currentRun.mixinPhase)(tp.typeSymbol.toInterface).tpe
+ beforeMixin(tp.typeSymbol.toInterface).tpe
+
+ private def isFieldWithBitmap(field: Symbol) = {
+ field.info // ensure that nested objects are transformed
+ // For checkinit consider normal value getters
+ // but for lazy values only take into account lazy getters
+ field.isLazy && field.isMethod && !field.isDeferred
+ }
+
+ /** Does this field require an initialized bit?
+ * Note: fields of classes inheriting DelayedInit are not checked.
+ * This is because the they are neither initialized in the constructor
+ * nor do they have a setter (not if they are vals anyway). The usual
+ * logic for setting bitmaps does therefor not work for such fields.
+ * That's why they are excluded.
+ * Note: The `checkinit` option does not check if transient fields are initialized.
+ */
+ private def needsInitFlag(sym: Symbol) = (
+ settings.checkInit.value
+ && sym.isGetter
+ && !sym.isInitializedToDefault
+ && !sym.hasFlag(PARAMACCESSOR | SPECIALIZED | LAZY)
+ && !sym.accessed.hasFlag(PRESUPER)
+ && !sym.isOuterAccessor
+ && !(sym.owner isSubClass DelayedInitClass)
+ && !(sym.accessed hasAnnotation TransientAttr)
+ )
/** Maps all parts of this type that refer to implementation classes to
* their corresponding interfaces.
*/
private val toInterfaceMap = new TypeMap {
def apply(tp: Type): Type = mapOver( tp match {
- case TypeRef(pre, sym, args) if (sym.isImplClass) =>
- typeRef(pre, atPhase(currentRun.mixinPhase)(sym.toInterface), args)
+ case TypeRef(pre, sym, args) if sym.isImplClass =>
+ typeRef(pre, beforeMixin(sym.toInterface), args)
case _ => tp
})
}
@@ -80,10 +110,7 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
/** The implementation class corresponding to a currently compiled interface.
* todo: try to use Symbol.implClass instead?
*/
- private def implClass(iface: Symbol): Symbol = {
- val impl = iface.implClass
- if (impl != NoSymbol) impl else erasure.implClass(iface)
- }
+ private def implClass(iface: Symbol) = iface.implClass orElse (erasure implClass iface)
/** Returns the symbol that is accessed by a super-accessor in a mixin composition.
*
@@ -92,22 +119,20 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
* @param mixinClass The mixin class that produced the superaccessor
*/
private def rebindSuper(base: Symbol, member: Symbol, mixinClass: Symbol): Symbol =
- atPhase(currentRun.picklerPhase.next) {
- var bcs = base.info.baseClasses.dropWhile(mixinClass !=).tail
+ afterSpecialize {
+ var bcs = base.info.baseClasses.dropWhile(mixinClass != _).tail
var sym: Symbol = NoSymbol
- if (settings.debug.value)
- log("starting rebindsuper " + base + " " + member + ":" + member.tpe +
+ debuglog("starting rebindsuper " + base + " " + member + ":" + member.tpe +
" " + mixinClass + " " + base.info.baseClasses + "/" + bcs)
while (!bcs.isEmpty && sym == NoSymbol) {
if (settings.debug.value) {
val other = bcs.head.info.nonPrivateDecl(member.name);
- log("rebindsuper " + bcs.head + " " + other + " " + other.tpe +
+ debuglog("rebindsuper " + bcs.head + " " + other + " " + other.tpe +
" " + other.isDeferred)
}
sym = member.matchingSymbol(bcs.head, base.thisType).suchThat(sym => !sym.hasFlag(DEFERRED | BRIDGE))
bcs = bcs.tail
}
- assert(sym != NoSymbol, member)
sym
}
@@ -116,8 +141,8 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
def isConcreteAccessor(member: Symbol) =
member.hasAccessorFlag && (!member.isDeferred || (member hasFlag lateDEFERRED))
- /** Is member overridden (either directly or via a bridge) in base class sequence `bcs'? */
- def isOverriddenAccessor(member: Symbol, bcs: List[Symbol]): Boolean = atPhase(ownPhase) {
+ /** Is member overridden (either directly or via a bridge) in base class sequence `bcs`? */
+ def isOverriddenAccessor(member: Symbol, bcs: List[Symbol]): Boolean = beforeOwnPhase {
def hasOverridingAccessor(clazz: Symbol) = {
clazz.info.nonPrivateDecl(member.name).alternatives.exists(
sym =>
@@ -125,16 +150,39 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
!sym.hasFlag(MIXEDIN) &&
matchesType(sym.tpe, member.tpe, true))
}
- bcs.head != member.owner &&
- (hasOverridingAccessor(bcs.head) || isOverriddenAccessor(member, bcs.tail))
+ ( bcs.head != member.owner
+ && (hasOverridingAccessor(bcs.head) || isOverriddenAccessor(member, bcs.tail))
+ )
}
/** Add given member to given class, and mark member as mixed-in.
*/
def addMember(clazz: Symbol, member: Symbol): Symbol = {
- if (settings.debug.value) log("new member of " + clazz + ":" + member.defString)
- clazz.info.decls enter member
- member.setFlag(MIXEDIN)
+ debuglog("new member of " + clazz + ":" + member.defString)
+ clazz.info.decls enter member setFlag MIXEDIN
+ }
+ def cloneAndAddMember(mixinClass: Symbol, mixinMember: Symbol, clazz: Symbol): Symbol =
+ addMember(clazz, cloneBeforeErasure(mixinClass, mixinMember, clazz))
+
+ def cloneBeforeErasure(mixinClass: Symbol, mixinMember: Symbol, clazz: Symbol): Symbol = {
+ val newSym = beforeErasure {
+ // since we used `mixinMember` from the interface that represents the trait that's
+ // being mixed in, have to instantiate the interface type params (that may occur in mixinMember's
+ // info) as they are seen from the class. We can't use the member that we get from the
+ // implementation class, as it's a clone that was made after erasure, and thus it does not
+ // know its info at the beginning of erasure anymore.
+ // Optimize: no need if mixinClass has no typeparams.
+ mixinMember cloneSymbol clazz modifyInfo (info =>
+ if (mixinClass.typeParams.isEmpty) info
+ else (clazz.thisType baseType mixinClass) memberInfo mixinMember
+ )
+ }
+ // clone before erasure got rid of type info we'll need to generate a javaSig
+ // now we'll have the type info at (the beginning of) erasure in our history,
+ // and now newSym has the info that's been transformed to fit this period
+ // (no need for asSeenFrom as phase.erasedTypes)
+ // TODO: verify we need the updateInfo and document why
+ newSym updateInfo (mixinMember.info cloneInfo newSym)
}
def needsExpandedSetterName(field: Symbol) = !field.isLazy && (
@@ -149,21 +197,20 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
* - lazy fields don't get a setter.
*/
def addLateInterfaceMembers(clazz: Symbol) {
- if ((treatedClassInfos get clazz) != Some(clazz.info)) {
+ def makeConcrete(member: Symbol) =
+ member setPos clazz.pos resetFlag (DEFERRED | lateDEFERRED)
+
+ if (treatedClassInfos(clazz) != clazz.info) {
treatedClassInfos(clazz) = clazz.info
- assert(phase == currentRun.mixinPhase)
+ assert(phase == currentRun.mixinPhase, phase)
/** Create a new getter. Getters are never private or local. They are
* always accessors and deferred. */
def newGetter(field: Symbol): Symbol = {
// println("creating new getter for "+ field +" : "+ field.info +" at "+ field.locationString+(field hasFlag MUTABLE))
- // atPhase(currentRun.erasurePhase){
- // println("before erasure: "+ (field.info))
- // }
- clazz.newMethod(field.pos, nme.getterName(field.name))
- .setFlag(field.flags & ~(PRIVATE | LOCAL) | ACCESSOR | lateDEFERRED |
- (if (field.isMutable) 0 else STABLE))
- .setInfo(MethodType(List(), field.info)) // TODO preserve pre-erasure info?
+ val newFlags = field.flags & ~PrivateLocal | ACCESSOR | lateDEFERRED | ( if (field.isMutable) 0 else STABLE )
+ // TODO preserve pre-erasure info?
+ clazz.newMethod(nme.getterName(field.name), field.pos, newFlags) setInfo MethodType(Nil, field.info)
}
/** Create a new setter. Setters are never private or local. They are
@@ -171,21 +218,20 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
def newSetter(field: Symbol): Symbol = {
//println("creating new setter for "+field+field.locationString+(field hasFlag MUTABLE))
val setterName = nme.getterToSetter(nme.getterName(field.name))
- val setter = clazz.newMethod(field.pos, setterName)
- .setFlag(field.flags & ~(PRIVATE | LOCAL) | ACCESSOR | lateDEFERRED)
- setter.setInfo(MethodType(setter.newSyntheticValueParams(List(field.info)), UnitClass.tpe)) // TODO preserve pre-erasure info?
- if (needsExpandedSetterName(field)) {
- //println("creating expanded setter from "+field)
+ val newFlags = field.flags & ~PrivateLocal | ACCESSOR | lateDEFERRED
+ val setter = clazz.newMethod(setterName, field.pos, newFlags)
+ // TODO preserve pre-erasure info?
+ setter setInfo MethodType(setter.newSyntheticValueParams(List(field.info)), UnitClass.tpe)
+ if (needsExpandedSetterName(field))
setter.name = nme.expandedSetterName(setter.name, clazz)
- }
+
setter
}
clazz.info // make sure info is up to date, so that implClass is set.
- val impl = implClass(clazz)
- assert(impl != NoSymbol)
+ val impl = implClass(clazz) orElse abort("No impl class for " + clazz)
- for (member <- impl.info.decls.toList) {
+ for (member <- impl.info.decls) {
if (!member.isMethod && !member.isModule && !member.isModuleVar) {
assert(member.isTerm && !member.isDeferred, member)
if (member.getter(impl).isPrivate) {
@@ -199,16 +245,10 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
}
}
}
- if (settings.debug.value) log("new defs of " + clazz + " = " + clazz.info.decls);
+ debuglog("new defs of " + clazz + " = " + clazz.info.decls);
}
}
- /** Map a lazy, mixedin field accessor to it's trait member accessor */
- val initializer = perRunCaches.newMap[Symbol, Symbol]
-
- /** Deferred bitmaps that will be added during the transformation of a class */
- val deferredBitmaps = perRunCaches.newMap[Symbol, List[Tree]]()
-
/** Add all members to be mixed in into a (non-trait-) class
* These are:
* for every mixin trait T that is not also inherited by the superclass:
@@ -220,135 +260,112 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
* - for every super accessor in T, add an implementation of that accessor
* - for every module in T, add a module
*/
- def addMixedinMembers(clazz: Symbol, unit : CompilationUnit) {
- def cloneBeforeErasure(iface: Symbol, clazz: Symbol, imember: Symbol): Symbol = {
- val newSym = atPhase(currentRun.erasurePhase){
- val res = imember.cloneSymbol(clazz)
- // since we used the member (imember) from the interface that represents the trait that's being mixed in,
- // have to instantiate the interface type params (that may occur in imember's info) as they are seen from the class
- // we can't use the member that we get from the implementation class, as it's a clone that was made after erasure,
- // and thus it does not know its info at the beginning of erasure anymore
- // optimize: no need if iface has no typeparams
- if(iface.typeParams nonEmpty) res.setInfo(clazz.thisType.baseType(iface).memberInfo(imember))
- res
- } // clone before erasure got rid of type info we'll need to generate a javaSig
- // now we'll have the type info at (the beginning of) erasure in our history,
- newSym.updateInfo(imember.info.cloneInfo(newSym)) // and now newSym has the info that's been transformed to fit this period (no need for asSeenFrom as phase.erasedTypes)
- newSym // TODO: verify we need the updateInfo and document why
+ def addMixedinMembers(clazz: Symbol, unit: CompilationUnit) {
+ def cloneAndAddMixinMember(mixinClass: Symbol, mixinMember: Symbol): Symbol = (
+ cloneAndAddMember(mixinClass, mixinMember, clazz)
+ setPos clazz.pos
+ resetFlag DEFERRED | lateDEFERRED
+ )
+
+ /** Mix in members of implementation class mixinClass into class clazz */
+ def mixinImplClassMembers(mixinClass: Symbol, mixinInterface: Symbol) {
+ if (!mixinClass.isImplClass) debugwarn ("Impl class flag is not set " +
+ ((mixinClass.debugLocationString, mixinInterface.debugLocationString)))
+
+ for (member <- mixinClass.info.decls ; if isForwarded(member)) {
+ val imember = member overriddenSymbol mixinInterface
+ imember overridingSymbol clazz match {
+ case NoSymbol =>
+ if (clazz.info.findMember(member.name, 0, lateDEFERRED, false).alternatives contains imember)
+ cloneAndAddMixinMember(mixinInterface, imember).asInstanceOf[TermSymbol] setAlias member
+ case _ =>
+ }
+ }
}
- if (!(clazz hasFlag JAVA) && (treatedClassInfos get clazz) != Some(clazz.info)) {
- treatedClassInfos(clazz) = clazz.info
+ /** Mix in members of trait mixinClass into class clazz. Also,
+ * for each lazy field in mixinClass, add a link from its mixed in member to its
+ * initializer method inside the implclass.
+ */
+ def mixinTraitMembers(mixinClass: Symbol) {
+ // For all members of a trait's interface do:
+ for (mixinMember <- mixinClass.info.decls) {
+ if (isConcreteAccessor(mixinMember)) {
+ if (isOverriddenAccessor(mixinMember, clazz.info.baseClasses))
+ debugwarn("!!! is overridden val: "+mixinMember.fullLocationString)
+ else {
+ // mixin field accessors
+ val mixedInAccessor = cloneAndAddMixinMember(mixinClass, mixinMember)
+ if (mixinMember.isLazy) {
+ initializer(mixedInAccessor) = (
+ implClass(mixinClass).info.decl(mixinMember.name)
+ orElse abort("Could not find initializer for " + mixinMember.name)
+ )
+ }
+ if (!mixinMember.isSetter)
+ mixinMember.tpe match {
+ case MethodType(Nil, ConstantType(_)) =>
+ // mixinMember is a constant; only getter is needed
+ ;
+ case MethodType(Nil, TypeRef(_, UnitClass, _)) =>
+ // mixinMember is a value of type unit. No field needed
+ ;
+ case _ => // otherwise mixin a field as well
+ // atPhase: the private field is moved to the implementation class by erasure,
+ // so it can no longer be found in the mixinMember's owner (the trait)
+ val accessed = beforePickler(mixinMember.accessed)
+ // #3857, need to retain info before erasure when cloning (since cloning only
+ // carries over the current entry in the type history)
+ val sym = beforeErasure {
+ // so we have a type history entry before erasure
+ clazz.newValue(nme.getterToLocal(mixinMember.name), mixinMember.pos).setInfo(mixinMember.tpe.resultType)
+ }
+ sym updateInfo mixinMember.tpe.resultType // info at current phase
- assert(!clazz.isTrait, clazz)
- assert(clazz.info.parents.nonEmpty, clazz)
-
- // first complete the superclass with mixed in members
- addMixedinMembers(clazz.superClass, unit)
-
- //Console.println("adding members of " + clazz.info.baseClasses.tail.takeWhile(superclazz !=) + " to " + clazz);//DEBUG
-
- /** Mix in members of implementation class mixinClass into class clazz */
- def mixinImplClassMembers(impl: Symbol, iface: Symbol) {
- assert(
- // XXX this should be impl.isImplClass, except that we get impl classes
- // coming through under -optimise which do not agree that they are (because
- // the IMPLCLASS flag is unset, I believe.) See ticket #4285.
- nme.isImplClassName(impl.name) || impl.isImplClass,
- "%s (%s) is not a an implementation class, it cannot mix in %s".format(
- impl, impl.defaultFlagString, iface)
- )
- for (member <- impl.info.decls.toList) {
- if (isForwarded(member)) {
- val imember = member.overriddenSymbol(iface)
- // atPhase(currentRun.erasurePhase){
- // println(""+(clazz, iface, clazz.typeParams, iface.typeParams, imember, clazz.thisType.baseType(iface), clazz.thisType.baseType(iface).memberInfo(imember), imember.info substSym(iface.typeParams, clazz.typeParams) ))
- // }
- // Console.println("mixin member "+member+":"+member.tpe+member.locationString+" "+imember+" "+imember.overridingSymbol(clazz)+" to "+clazz+" with scope "+clazz.info.decls)//DEBUG
- if (imember.overridingSymbol(clazz) == NoSymbol &&
- clazz.info.findMember(member.name, 0, lateDEFERRED, false).alternatives.contains(imember)) {
- val member1 = addMember(
- clazz,
- cloneBeforeErasure(iface, clazz, imember) setPos clazz.pos resetFlag (DEFERRED | lateDEFERRED))
- member1.asInstanceOf[TermSymbol] setAlias member;
- }
- }
- }
- }
+ val newFlags = (
+ ( PrivateLocal )
+ | ( mixinMember getFlag MUTABLE | LAZY)
+ | ( if (mixinMember.hasStableFlag) 0 else MUTABLE )
+ )
- /** Mix in members of trait mixinClass into class clazz. Also,
- * for each lazy field in mixinClass, add a link from its mixed in member to its
- * initializer method inside the implclass.
- */
- def mixinTraitMembers(mixinClass: Symbol) {
- // For all members of a trait's interface do:
- for (member <- mixinClass.info.decls.toList) {
- if (isConcreteAccessor(member)) {
- if (isOverriddenAccessor(member, clazz.info.baseClasses)) {
- if (settings.debug.value)
- println("!!! is overridden val: "+member)
- } else {
- // mixin field accessors
- val member1 = addMember(
- clazz,
- cloneBeforeErasure(mixinClass, clazz, member) //member.cloneSymbol(clazz)
- setPos clazz.pos
- resetFlag (DEFERRED | lateDEFERRED))
- // println("mixing in: "+ (member, member.info, member1.info))
- // atPhase(currentRun.erasurePhase){
- // println("before erasure: "+ (member.info, member1.info))
- // }
- if (member.isLazy) {
- var init = implClass(mixinClass).info.decl(member.name)
- assert(init != NoSymbol, "Could not find initializer for " + member.name)
- initializer(member1) = init
+ addMember(clazz, sym setFlag newFlags setAnnotations accessed.annotations)
}
- if (!member.isSetter)
- member.tpe match {
- case MethodType(Nil, ConstantType(_)) =>
- // member is a constant; only getter is needed
- ;
- case MethodType(Nil, TypeRef(_, UnitClass, _)) =>
- // member is a value of type unit. No field needed
- ;
- case _ => // otherwise mixin a field as well
- // atPhase: the private field is moved to the implementation class by erasure,
- // so it can no longer be found in the member's owner (the trait)
- val accessed = atPhase(currentRun.picklerPhase)(member.accessed)
- val sym = atPhase(currentRun.erasurePhase){ // #3857, need to retain info before erasure when cloning (since cloning only carries over the current entry in the type history)
- clazz.newValue(member.pos, nme.getterToLocal(member.name)).setInfo(member.tpe.resultType) // so we have a type history entry before erasure
- }
- sym.updateInfo(member.tpe.resultType) // info at current phase
- addMember(clazz,
- sym
- setFlag (LOCAL | PRIVATE | member.getFlag(MUTABLE | LAZY))
- setFlag (if (!member.hasStableFlag) MUTABLE else 0)
- setAnnotations accessed.annotations)
- }
- }
- } else if (member.isSuperAccessor) { // mixin super accessors
- val member1 = addMember(clazz, member.cloneSymbol(clazz)) setPos clazz.pos
- assert(member1.alias != NoSymbol, member1)
- val alias1 = rebindSuper(clazz, member.alias, mixinClass)
- member1.asInstanceOf[TermSymbol] setAlias alias1
-
- } else if (member.isMethod && member.isModule && member.hasNoFlags(LIFTED | BRIDGE)) {
- // mixin objects: todo what happens with abstract objects?
- addMember(clazz, member.cloneSymbol(clazz))
- .setPos(clazz.pos)
- .resetFlag(DEFERRED | lateDEFERRED)
}
}
+ else if (mixinMember.isSuperAccessor) { // mixin super accessors
+ val superAccessor = addMember(clazz, mixinMember.cloneSymbol(clazz)) setPos clazz.pos
+ assert(superAccessor.alias != NoSymbol, superAccessor)
+
+ rebindSuper(clazz, mixinMember.alias, mixinClass) match {
+ case NoSymbol =>
+ unit.error(clazz.pos, "Member %s of mixin %s is missing a concrete super implementation.".format(
+ mixinMember.alias, mixinClass))
+ case alias1 =>
+ superAccessor.asInstanceOf[TermSymbol] setAlias alias1
+ }
+ }
+ else if (mixinMember.isMethod && mixinMember.isModule && mixinMember.hasNoFlags(LIFTED | BRIDGE)) {
+ // mixin objects: todo what happens with abstract objects?
+ addMember(clazz, mixinMember.cloneSymbol(clazz, mixinMember.flags & ~(DEFERRED | lateDEFERRED)) setPos clazz.pos)
+ }
}
+ }
- for (mc <- clazz.mixinClasses)
- if (mc hasFlag lateINTERFACE) {
- // @SEAN: adding trait tracking so we don't have to recompile transitive closures
- unit.depends += mc
- addLateInterfaceMembers(mc)
- mixinTraitMembers(mc)
- mixinImplClassMembers(implClass(mc), mc)
- }
+ if (clazz.isJavaDefined || treatedClassInfos(clazz) == clazz.info)
+ return
+
+ treatedClassInfos(clazz) = clazz.info
+ assert(!clazz.isTrait && clazz.info.parents.nonEmpty, clazz)
+
+ // first complete the superclass with mixed in members
+ addMixedinMembers(clazz.superClass, unit)
+
+ for (mc <- clazz.mixinClasses ; if mc hasFlag lateINTERFACE) {
+ // @SEAN: adding trait tracking so we don't have to recompile transitive closures
+ unit.depends += mc
+ addLateInterfaceMembers(mc)
+ mixinTraitMembers(mc)
+ mixinImplClassMembers(implClass(mc), mc)
}
}
@@ -356,26 +373,28 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
* - The parents of every class are mapped from implementation class to interface
* - Implementation classes become modules that inherit nothing
* and that define all.
- *
- * @param sym ...
- * @param tp ...
- * @return ...
*/
override def transformInfo(sym: Symbol, tp: Type): Type = tp match {
case ClassInfoType(parents, decls, clazz) =>
var parents1 = parents
var decls1 = decls
if (!clazz.isPackageClass) {
- atPhase(phase.next)(clazz.owner.info)
+ afterMixin(clazz.owner.info)
if (clazz.isImplClass) {
clazz setFlag lateMODULE
var sourceModule = clazz.owner.info.decls.lookup(sym.name.toTermName)
if (sourceModule != NoSymbol) {
sourceModule setPos sym.pos
- sourceModule.flags = MODULE | FINAL
- } else {
- sourceModule = clazz.owner.newModule(
- sym.pos, sym.name.toTermName, sym.asInstanceOf[ClassSymbol])
+ if (sourceModule.flags != MODULE) {
+ log("!!! Directly setting sourceModule flags from %s to MODULE".format(flagsToString(sourceModule.flags)))
+ sourceModule.flags = MODULE
+ }
+ }
+ else {
+ sourceModule = (
+ clazz.owner.newModuleSymbol(sym.name.toTermName, sym.pos, MODULE)
+ setModuleClass sym.asInstanceOf[ClassSymbol]
+ )
clazz.owner.info.decls enter sourceModule
}
sourceModule setInfo sym.tpe
@@ -383,12 +402,12 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
assert(clazz.sourceModule != NoSymbol || clazz.isAnonymousClass,
clazz + " has no sourceModule: sym = " + sym + " sym.tpe = " + sym.tpe)
parents1 = List()
- decls1 = new Scope(decls.toList filter isImplementedStatically)
+ decls1 = newScopeWith(decls.toList filter isImplementedStatically: _*)
} else if (!parents.isEmpty) {
parents1 = parents.head :: (parents.tail map toInterface)
}
}
- //decls1 = atPhase(phase.next)(new Scope(decls1.toList))//debug
+ //decls1 = atPhase(phase.next)(newScopeWith(decls1.toList: _*))//debug
if ((parents1 eq parents) && (decls1 eq decls)) tp
else ClassInfoType(parents1, decls1, clazz)
@@ -404,18 +423,14 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
tp
}
- import scala.collection._
-
/** Return a map of single-use fields to the lazy value that uses them during initialization.
* Each field has to be private and defined in the enclosing class, and there must
* be exactly one lazy value using it.
*
* Such fields will be nulled after the initializer has memoized the lazy value.
*/
- def singleUseFields(templ: Template): collection.Map[Symbol, List[Symbol]] = {
- val usedIn = new mutable.HashMap[Symbol, List[Symbol]] {
- override def default(key: Symbol) = Nil
- }
+ def singleUseFields(templ: Template): scala.collection.Map[Symbol, List[Symbol]] = {
+ val usedIn = mutable.HashMap[Symbol, List[Symbol]]() withDefaultValue Nil
object SingleUseTraverser extends Traverser {
override def traverse(tree: Tree) {
@@ -427,11 +442,11 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
if ((sym.hasAccessorFlag || (sym.isTerm && !sym.isMethod))
&& sym.isPrivate
&& !(currentOwner.isGetter && currentOwner.accessed == sym) // getter
- && !definitions.isValueClass(sym.tpe.resultType.typeSymbol)
+ && !definitions.isPrimitiveValueClass(sym.tpe.resultType.typeSymbol)
&& sym.owner == templ.symbol.owner
&& !sym.isLazy
&& !tree.isDef) {
- log("added use in: " + currentOwner + " -- " + tree)
+ debuglog("added use in: " + currentOwner + " -- " + tree)
usedIn(sym) ::= currentOwner
}
@@ -441,7 +456,7 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
}
}
SingleUseTraverser(templ)
- log("usedIn: " + usedIn)
+ debuglog("usedIn: " + usedIn)
usedIn filter {
case (_, member :: Nil) => member.isValue && member.isLazy
case _ => false
@@ -454,24 +469,22 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
new MixinTransformer(unit)
class MixinTransformer(unit : CompilationUnit) extends Transformer {
-
/** Within a static implementation method: the parameter referring to the
- * current object undefined everywhere else.
+ * current object. Undefined everywhere else.
*/
private var self: Symbol = _
/** The rootContext used for typing */
private val rootContext =
- erasure.NoContext.make(EmptyTree, RootClass, new Scope)
+ erasure.NoContext.make(EmptyTree, rootMirror.RootClass, newScope)
/** The typer */
private var localTyper: erasure.Typer = _
- private def typedPos(pos: Position)(tree: Tree) = localTyper typed { atPos(pos)(tree) }
+ private def typedPos(pos: Position)(tree: Tree): Tree = localTyper.typedPos(pos)(tree)
+ private def localTyped(pos: Position, tree: Tree, pt: Type) = localTyper.typed(atPos(pos)(tree), pt)
/** Map lazy values to the fields they should null after initialization. */
- private var lazyValNullables: mutable.MultiMap[Symbol, Symbol] = _
-
- import scala.collection._
+ private var lazyValNullables: Map[Symbol, Set[Symbol]] = _
/** Map a field symbol to a unique integer denoting its position in the class layout.
* For each class, fields defined by the class come after inherited fields. Mixed-in
@@ -479,6 +492,19 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
*/
private val fieldOffset = perRunCaches.newMap[Symbol, Int]()
+ private val bitmapKindForCategory = perRunCaches.newMap[Name, ClassSymbol]()
+
+ // ByteClass, IntClass, LongClass
+ private def bitmapKind(field: Symbol): ClassSymbol = bitmapKindForCategory(bitmapCategory(field))
+
+ private def flagsPerBitmap(field: Symbol): Int = bitmapKind(field) match {
+ case BooleanClass => 1
+ case ByteClass => 8
+ case IntClass => 32
+ case LongClass => 64
+ }
+
+
/** The first transform; called in a pre-order traversal at phase mixin
* (that is, every node is processed before its children).
* What transform does:
@@ -486,7 +512,7 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
* - For every trait, add all late interface members to the class info
* - For every static implementation method:
* - remove override flag
- * - create a new method definition that also has a `self' parameter
+ * - create a new method definition that also has a `self` parameter
* (which comes first) Iuli: this position is assumed by tail call elimination
* on a different receiver. Storing a new 'this' assumes it is located at
* index 0 in the local variable table. See 'STORE_THIS' and GenJVM/GenMSIL.
@@ -498,29 +524,27 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
tree match {
case Template(parents, self, body) =>
localTyper = erasure.newTyper(rootContext.make(tree, currentOwner))
- atPhase(phase.next)(currentOwner.owner.info)//todo: needed?
+ afterMixin(currentOwner.owner.info)//todo: needed?
- if (!currentOwner.isTrait && !isValueClass(currentOwner))
+ if (!currentOwner.isTrait && !isPrimitiveValueClass(currentOwner))
addMixedinMembers(currentOwner, unit)
else if (currentOwner hasFlag lateINTERFACE)
addLateInterfaceMembers(currentOwner)
tree
- case DefDef(mods, name, tparams, List(vparams), tpt, rhs) =>
+ case DefDef(_, _, _, vparams :: Nil, _, _) =>
if (currentOwner.isImplClass) {
if (isImplementedStatically(sym)) {
sym setFlag notOVERRIDE
- self = sym.newValue(sym.pos, nme.SELF)
- .setFlag(PARAM)
- .setInfo(toInterface(currentOwner.typeOfThis));
+ self = sym.newValueParameter(nme.SELF, sym.pos) setInfo toInterface(currentOwner.typeOfThis)
val selfdef = ValDef(self) setType NoType
- treeCopy.DefDef(tree, mods, name, tparams, List(selfdef :: vparams), tpt, rhs)
- } else {
- EmptyTree
+ copyDefDef(tree)(vparamss = List(selfdef :: vparams))
}
- } else {
- if (currentOwner.isTrait && sym.isSetter && !atPhase(currentRun.picklerPhase)(sym.isDeferred)) {
- sym.addAnnotation(AnnotationInfo(TraitSetterAnnotationClass.tpe, List(), List()))
+ else EmptyTree
+ }
+ else {
+ if (currentOwner.isTrait && sym.isSetter && !beforePickler(sym.isDeferred)) {
+ sym.addAnnotation(TraitSetterAnnotationClass)
}
tree
}
@@ -540,8 +564,6 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
}
/** Create an identifier which references self parameter.
- *
- * @param pos ...
*/
private def selfRef(pos: Position) =
gen.mkAttributedIdent(self) setPos pos
@@ -572,21 +594,39 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
* form <code>M.sym</code> where M is the symbol's implementation module.
*/
private def staticRef(sym: Symbol): Tree = {
- sym.owner.info //todo: needed?
- sym.owner.owner.info //todo: needed?
- if (sym.owner.sourceModule == NoSymbol)
- assert(false, "" + sym + " in " + sym.owner + " in " + sym.owner.owner +
- " " + sym.owner.owner.info.decls.toList)//debug
+ sym.owner.info //todo: needed?
+ sym.owner.owner.info //todo: needed?
+
+ assert(
+ sym.owner.sourceModule ne NoSymbol,
+ "" + sym.fullLocationString + " in " + sym.owner.owner + " " + sym.owner.owner.info.decls
+ )
REF(sym.owner.sourceModule) DOT sym
}
- @inline private def bitmapOperation[T](field: Symbol, transientCase: => T, privateCase: => T, rest: => T): T =
- if (field.accessed.hasAnnotation(TransientAttr))
- transientCase
- else if (field.hasFlag(PRIVATE | notPRIVATE))
- privateCase
- else
- rest
+ def needsInitAndHasOffset(sym: Symbol) =
+ needsInitFlag(sym) && (fieldOffset contains sym)
+
+ /** Examines the symbol and returns a name indicating what brand of
+ * bitmap it requires. The possibilities are the BITMAP_* vals
+ * defined in StdNames. If it needs no bitmap, nme.NO_NAME.
+ */
+ def bitmapCategory(field: Symbol): Name = {
+ import nme._
+ val isNormal = (
+ if (isFieldWithBitmap(field)) true
+ // bitmaps for checkinit fields are not inherited
+ else if (needsInitFlag(field) && !field.isDeferred) false
+ else return NO_NAME
+ )
+ if (field.accessed hasAnnotation TransientAttr) {
+ if (isNormal) BITMAP_TRANSIENT
+ else BITMAP_CHECKINIT_TRANSIENT
+ } else {
+ if (isNormal) BITMAP_NORMAL
+ else BITMAP_CHECKINIT
+ }
+ }
/** Add all new definitions to a non-trait class
* These fall into the following categories:
@@ -605,16 +645,17 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
* @param clazz The class to which definitions are added
*/
private def addNewDefs(clazz: Symbol, stats: List[Tree]): List[Tree] = {
- val newDefs = new ListBuffer[Tree]
+ val newDefs = mutable.ListBuffer[Tree]()
/** Attribute given tree and anchor at given position */
def attributedDef(pos: Position, tree: Tree): Tree = {
- if (settings.debug.value) log("add new def to " + clazz + ": " + tree)
+ debuglog("add new def to " + clazz + ": " + tree)
typedPos(pos)(tree)
}
/** The position of given symbol, or, if this is undefined,
- * the position of the current class. */
+ * the position of the current class.
+ */
def position(sym: Symbol) =
if (sym.pos == NoPosition) clazz.pos else sym.pos
@@ -625,15 +666,13 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
/** Add new method definition.
*
- * @param sym The method
- * @param rhs A function that maps formal parameters to the method's
- * right-hand side
+ * @param sym The method symbol.
+ * @param rhs The method body.
*/
- def addDefDef(sym: Symbol, rhs: List[Symbol] => Tree) {
- addDef(position(sym), DefDef(sym, rhs(sym.paramss.head)))
- }
+ def addDefDef(sym: Symbol, rhs: Tree = EmptyTree) = addDef(position(sym), DefDef(sym, rhs))
+ def addValDef(sym: Symbol, rhs: Tree = EmptyTree) = addDef(position(sym), ValDef(sym, rhs))
- /** Add `newdefs' to `stats', removing any abstract method definitions
+ /** Add `newdefs` to `stats`, removing any abstract method definitions
* in <code>stats</code> that are matched by some symbol defined in
* <code>newDefs</code>.
*/
@@ -648,15 +687,10 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
true
}
if (newDefs.isEmpty) stats
- else newDefs ::: stats.filter(isNotDuplicate)
+ else newDefs ::: (stats filter isNotDuplicate)
}
- def addDeferredBitmap(clazz: Symbol, tree: Tree) {
- // Append the set of deffered defs
- deferredBitmaps(clazz) = typedPos(clazz.pos)(tree)::deferredBitmaps.getOrElse(clazz, List())
- }
-
- /** If `stat' is a superaccessor, complete it by adding a right-hand side.
+ /** If `stat` is a superaccessor, complete it by adding a right-hand side.
* Note: superaccessors are always abstract until this point.
* The method to call in a superaccessor is stored in the accessor symbol's alias field.
* The rhs is:
@@ -664,130 +698,125 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
* This rhs is typed and then mixin transformed.
*/
def completeSuperAccessor(stat: Tree) = stat match {
- case DefDef(mods, name, tparams, List(vparams), tpt, EmptyTree)
- if (stat.symbol.isSuperAccessor) =>
+ case DefDef(_, _, _, vparams :: Nil, _, EmptyTree) if stat.symbol.isSuperAccessor =>
val rhs0 = (Super(clazz, tpnme.EMPTY) DOT stat.symbol.alias)(vparams map (v => Ident(v.symbol)): _*)
- val rhs1 = localTyper.typed(atPos(stat.pos)(rhs0), stat.symbol.tpe.resultType)
- val rhs2 = atPhase(currentRun.mixinPhase)(transform(rhs1))
- if (settings.debug.value)
- log("complete super acc " + stat.symbol + stat.symbol.locationString +
- " " + rhs1 + " " + stat.symbol.alias + stat.symbol.alias.locationString +
- "/" + stat.symbol.alias.owner.hasFlag(lateINTERFACE))//debug
- treeCopy.DefDef(stat, mods, name, tparams, List(vparams), tpt, rhs2)
+ val rhs1 = localTyped(stat.pos, rhs0, stat.symbol.tpe.resultType)
+
+ deriveDefDef(stat)(_ => beforeMixin(transform(rhs1)))
case _ =>
stat
}
- import lazyVals._
-
- /**
- * Private or transient lazy vals use bitmaps that are private for the class context,
- * unlike public or protected vals, which can use inherited bitmaps.
- * Similarly fields in the checkinit mode use private bitmaps.
- */
- def localBitmapField(field: Symbol) =
- field.accessed.hasAnnotation(TransientAttr) || field.hasFlag(PRIVATE | notPRIVATE) || checkinitField(field)
-
/**
* Return the bitmap field for 'offset'. Depending on the hierarchy it is possible to reuse
* the bitmap of its parents. If that does not exist yet we create one.
*/
- def bitmapFor(clazz0: Symbol, offset: Int, field: Symbol, searchParents:Boolean = true): Symbol = {
- def bitmapLazyName: Name =
- bitmapOperation(field, nme.bitmapNameForTransient(offset / FLAGS_PER_WORD),
- nme.bitmapNameForPrivate(offset / FLAGS_PER_WORD),
- nme.bitmapName(offset / FLAGS_PER_WORD))
- def bitmapCheckinitName: Name =
- bitmapOperation(field, nme.bitmapNameForCheckinitTransient(offset / FLAGS_PER_WORD),
- nme.bitmapNameForCheckinit(offset / FLAGS_PER_WORD),
- nme.bitmapNameForCheckinit(offset / FLAGS_PER_WORD))
- val checkinitField = !field.isLazy
- val bitmapName = if (checkinitField) bitmapCheckinitName else bitmapLazyName
+ def bitmapFor(clazz0: Symbol, offset: Int, field: Symbol): Symbol = {
+ val category = bitmapCategory(field)
+ val bitmapName = nme.newBitmapName(category, offset / flagsPerBitmap(field))
+ val sym = clazz0.info.decl(bitmapName)
+
+ assert(!sym.isOverloaded, sym)
def createBitmap: Symbol = {
- val sym = clazz0.newVariable(clazz0.pos, bitmapName).setInfo(IntClass.tpe)
- atPhase(currentRun.typerPhase) {
- sym addAnnotation AnnotationInfo(VolatileAttr.tpe, Nil, Nil)
- }
+ val bitmapKind = bitmapKindForCategory(category)
+ val sym = clazz0.newVariable(bitmapName, clazz0.pos) setInfo bitmapKind.tpe
+ beforeTyper(sym addAnnotation VolatileAttr)
- bitmapOperation(field,
- {sym.addAnnotation(AnnotationInfo(TransientAttr.tpe, Nil, Nil)); sym.setFlag(PRIVATE | LOCAL)},
- sym.setFlag(PRIVATE | LOCAL),
- sym.setFlag(if (checkinitField) (PRIVATE | LOCAL) else PROTECTED))
+ category match {
+ case nme.BITMAP_TRANSIENT | nme.BITMAP_CHECKINIT_TRANSIENT => sym addAnnotation TransientAttr
+ case _ =>
+ }
+ val init = bitmapKind match {
+ case BooleanClass => VAL(sym) === FALSE
+ case _ => VAL(sym) === ZERO
+ }
+ sym setFlag PrivateLocal
clazz0.info.decls.enter(sym)
- if (clazz0 == clazz)
- addDef(clazz.pos, VAL(sym) === ZERO)
- else {
- //FIXME: the assertion below will not work because of the way bitmaps are added.
- // They should be added during infoTransform, so that in separate compilation, bitmap
- // is a member of clazz and doesn't fail the condition couple lines below.
- // This works, as long as we assume that the previous classes were compiled correctly.
- //assert(clazz0.sourceFile != null)
- addDeferredBitmap(clazz0, VAL(sym) === ZERO)
- }
+ addDef(clazz0.pos, init)
sym
}
- var sym = clazz0.info.member(bitmapName)
- assert(!sym.hasFlag(OVERLOADED))
- if (sym == NoSymbol) {
- if (searchParents && !localBitmapField(field))
- bitmapForParents(clazz0, offset, field) match {
- case Some(bitmap) =>
- sym = bitmap
- case None =>
- sym = createBitmap
- }
- else
- sym = createBitmap
- }
- sym
- }
-
- def bitmapForParents(clazz0: Symbol, offset: Int, valSym: Symbol): Option[Symbol] = {
- def requiredBitmaps(fs: Int): Int = if (fs == 0) -1 else (fs - 1) / FLAGS_PER_WORD
- var res:Option[Symbol] = None
- val bitmapNum = offset / FLAGS_PER_WORD
-
-
- // filter private and transient
- // since we do not inherit normal values (in checkinit mode) also filter them out
- for (cl <- clazz0.info.baseClasses.tail.filter(c => !c.isTrait && !c.hasFlag(JAVA))
- if res == None) {
- val fields0 = usedBits(cl)
+ if (sym ne NoSymbol)
+ sym
+ else
+ createBitmap
+ }
- if (requiredBitmaps(fields0) < bitmapNum) {
- val fields1 = cl.info.decls.filter(decl => fieldWithBitmap(decl) && !localBitmapField(decl)).size
- if (requiredBitmaps(fields0 + fields1) >= bitmapNum)
- res = Some(bitmapFor(cl, offset, valSym, false))
- else return None // Don't waste time, since we won't find bitmap anyway
- }
- }
- res
+ def maskForOffset(offset: Int, sym: Symbol, kind: ClassSymbol): Tree = {
+ def realOffset = offset % flagsPerBitmap(sym)
+ if (kind == LongClass ) LIT(1L << realOffset) else LIT(1 << realOffset)
}
/** Return an (untyped) tree of the form 'Clazz.this.bmp = Clazz.this.bmp | mask'. */
- def mkSetFlag(clazz: Symbol, offset: Int, valSym: Symbol): Tree = {
- val bmp = bitmapFor(clazz, offset, valSym)
- val mask = LIT(1 << (offset % FLAGS_PER_WORD))
- def x = This(clazz) DOT bmp
+ def mkSetFlag(clazz: Symbol, offset: Int, valSym: Symbol, kind: ClassSymbol): Tree = {
+ val bmp = bitmapFor(clazz, offset, valSym)
+ def mask = maskForOffset(offset, valSym, kind)
+ def x = This(clazz) DOT bmp
+ def newValue = if (kind == BooleanClass) TRUE else (x GEN_| (mask, kind))
- x === (x INT_| mask)
+ x === newValue
}
/** Return an (untyped) tree of the form 'clazz.this.bitmapSym & mask (==|!=) 0', the
* precise comparison operator depending on the value of 'equalToZero'.
*/
- def mkTest(clazz: Symbol, mask: Tree, bitmapSym: Symbol, equalToZero: Boolean): Tree = {
- def lhs = (This(clazz) DOT bitmapSym) INT_& mask
- if (equalToZero) lhs INT_== ZERO
- else lhs INT_!= ZERO
+ def mkTest(clazz: Symbol, mask: Tree, bitmapSym: Symbol, equalToZero: Boolean, kind: ClassSymbol): Tree = {
+ val bitmapTree = (This(clazz) DOT bitmapSym)
+ def lhs = bitmapTree GEN_& (mask, kind)
+ kind match {
+ case BooleanClass =>
+ if (equalToZero) NOT(bitmapTree)
+ else bitmapTree
+ case _ =>
+ if (equalToZero) lhs GEN_== (ZERO, kind)
+ else lhs GEN_!= (ZERO, kind)
+ }
}
+ def mkSlowPathDef(clazz: Symbol, lzyVal: Symbol, cond: Tree, syncBody: List[Tree],
+ stats: List[Tree], retVal: Tree, attrThis: Tree, args: List[Tree]): Symbol = {
+ val defSym = clazz.newMethod(nme.newLazyValSlowComputeName(lzyVal.name), lzyVal.pos, PRIVATE)
+ val params = defSym newSyntheticValueParams args.map(_.symbol.tpe)
+ defSym setInfoAndEnter MethodType(params, lzyVal.tpe.resultType)
+ val rhs: Tree = (gen.mkSynchronizedCheck(attrThis, cond, syncBody, stats)).changeOwner(currentOwner -> defSym)
+ val strictSubst = new TreeSymSubstituterWithCopying(args.map(_.symbol), params)
+ addDef(position(defSym), DEF(defSym).mkTree(strictSubst(BLOCK(rhs, retVal))) setSymbol defSym)
+ defSym
+ }
+
+ def mkFastPathLazyBody(clazz: Symbol, lzyVal: Symbol, cond: Tree, syncBody: List[Tree],
+ stats: List[Tree], retVal: Tree): Tree = {
+ mkFastPathBody(clazz, lzyVal, cond, syncBody, stats, retVal, gen.mkAttributedThis(clazz), List())
+ }
+
+ def mkFastPathBody(clazz: Symbol, lzyVal: Symbol, cond: Tree, syncBody: List[Tree],
+ stats: List[Tree], retVal: Tree, attrThis: Tree, args: List[Tree]): Tree = {
+ val slowPathSym: Symbol = mkSlowPathDef(clazz, lzyVal, cond, syncBody, stats, retVal, attrThis, args)
+ If(cond, fn (This(clazz), slowPathSym, args.map(arg => Ident(arg.symbol)): _*), retVal)
+ }
+
+
+ /** Always copy the tree if we are going to perform sym substitution,
+ * otherwise we will side-effect on the tree that is used in the fast path
+ */
+ class TreeSymSubstituterWithCopying(from: List[Symbol], to: List[Symbol]) extends TreeSymSubstituter(from, to) {
+ override def transform(tree: Tree): Tree =
+ if (tree.hasSymbol && from.contains(tree.symbol))
+ super.transform(tree.duplicate)
+ else super.transform(tree.duplicate)
+
+ override def apply[T <: Tree](tree: T): T = if (from.isEmpty) tree else super.apply(tree)
+ }
+
/** return a 'lazified' version of rhs. It uses double-checked locking to ensure
- * initialization is performed at most once. Private fields used only in this
- * initializer are subsequently set to null.
+ * initialization is performed at most once. For performance reasons the double-checked
+ * locking is split into two parts, the first (fast) path checks the bitmap without
+ * synchronizing, and if that fails it initializes the lazy val within the
+ * synchronization block (slow path). This way the inliner should optimize
+ * the fast path because the method body is small enough.
+ * Private fields used only in this initializer are subsequently set to null.
*
* @param clazz The class symbol
* @param init The tree which initializes the field ( f = <rhs> )
@@ -795,322 +824,273 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
* @param offset The offset of this field in the flags bitmap
*
* The result will be a tree of the form
- * {
- * if ((bitmap$n & MASK) == 0) {
- * synchronized(this) {
- * if ((bitmap$n & MASK) == 0) {
- * init // l$ = <rhs>
- * bitmap$n = bimap$n | MASK
- * }
- * }
- * this.f1 = null
- * ... this.fn = null
+ * { if ((bitmap&n & MASK) == 0) this.l$compute()
+ * else l$
+ *
+ * ...
+ * def l$compute() = { synchronized(this) {
+ * if ((bitmap$n & MASK) == 0) {
+ * init // l$ = <rhs>
+ * bitmap$n = bimap$n | MASK
+ * }}
+ * l$
* }
- * l$
+ *
+ * ...
+ * this.f1 = null
+ * ... this.fn = null
* }
- * where bitmap$n is an int value acting as a bitmap of initialized values. It is
- * the 'n' is (offset / 32), the MASK is (1 << (offset % 32)).
+ * where bitmap$n is a byte, int or long value acting as a bitmap of initialized values.
+ * The kind of the bitmap determines how many bit indicators for lazy vals are stored in it.
+ * For Int bitmap it is 32 and then 'n' in the above code is: (offset / 32),
+ * the MASK is (1 << (offset % 32)).
+ * If the class contains only a single lazy val then the bitmap is represented
+ * as a Boolean and the condition checking is a simple bool test.
*/
def mkLazyDef(clazz: Symbol, lzyVal: Symbol, init: List[Tree], retVal: Tree, offset: Int): Tree = {
- def nullify(sym: Symbol): Tree = {
- val sym1 = if (sym.hasAccessorFlag) sym.accessed else sym
- Select(This(clazz), sym1) === LIT(null)
- }
+ def nullify(sym: Symbol) = Select(This(clazz), sym.accessedOrSelf) === LIT(null)
val bitmapSym = bitmapFor(clazz, offset, lzyVal)
- val mask = LIT(1 << (offset % FLAGS_PER_WORD))
- def cond = mkTest(clazz, mask, bitmapSym, true)
- val nulls = (lazyValNullables(lzyVal).toList sortBy (_.id) map nullify)
- def syncBody = init ::: List(mkSetFlag(clazz, offset, lzyVal), UNIT)
-
- log("nulling fields inside " + lzyVal + ": " + nulls)
- val result = gen.mkDoubleCheckedLocking(clazz, cond, syncBody, nulls)
- typedPos(init.head.pos)(BLOCK(result, retVal))
+ val kind = bitmapKind(lzyVal)
+ val mask = maskForOffset(offset, lzyVal, kind)
+ def cond = mkTest(clazz, mask, bitmapSym, true, kind)
+ val nulls = lazyValNullables(lzyVal).toList sortBy (_.id) map nullify
+ def syncBody = init ::: List(mkSetFlag(clazz, offset, lzyVal, kind), UNIT)
+
+ if (nulls.nonEmpty)
+ log("nulling fields inside " + lzyVal + ": " + nulls)
+
+ typedPos(init.head.pos)(mkFastPathLazyBody(clazz, lzyVal, cond, syncBody, nulls, retVal))
}
- def mkInnerClassAccessorDoubleChecked(attrThis: Tree, rhs: Tree): Tree =
+ def mkInnerClassAccessorDoubleChecked(attrThis: Tree, rhs: Tree, moduleSym: Symbol, args: List[Tree]): Tree =
rhs match {
case Block(List(assign), returnTree) =>
val Assign(moduleVarRef, _) = assign
- val cond = Apply(Select(moduleVarRef, nme.eq),List(Literal(Constant(null))))
- val doubleSynchrTree = gen.mkDoubleCheckedLocking(attrThis, cond, List(assign), Nil)
- Block(List(doubleSynchrTree), returnTree)
+ val cond = Apply(Select(moduleVarRef, Object_eq), List(NULL))
+ mkFastPathBody(clazz, moduleSym, cond, List(assign), List(NULL), returnTree, attrThis, args)
case _ =>
- assert(false, "Invalid getter " + rhs + " for module in class " + clazz)
- EmptyTree
+ abort("Invalid getter " + rhs + " for module in class " + clazz)
}
def mkCheckedAccessor(clazz: Symbol, retVal: Tree, offset: Int, pos: Position, fieldSym: Symbol): Tree = {
- val bitmapSym = bitmapFor(clazz, offset, fieldSym.getter(fieldSym.owner))
- val mask = LIT(1 << (offset % FLAGS_PER_WORD))
+ val sym = fieldSym.getter(fieldSym.owner)
+ val bitmapSym = bitmapFor(clazz, offset, sym)
+ val kind = bitmapKind(sym)
+ val mask = maskForOffset(offset, sym, kind)
val msg = "Uninitialized field: " + unit.source + ": " + pos.line
val result =
- IF (mkTest(clazz, mask, bitmapSym, false)) .
+ IF (mkTest(clazz, mask, bitmapSym, false, kind)) .
THEN (retVal) .
ELSE (THROW(UninitializedErrorClass, LIT(msg)))
typedPos(pos)(BLOCK(result, retVal))
}
- /** Complete lazy field accessors. Applies only to classes, for it's own (non inherited) lazy fields.
- * If 'checkinit' is enabled, getters that check for the initialized bit are generated, and
- * the class constructor is changed to set the initialized bits.
+ /** Complete lazy field accessors. Applies only to classes,
+ * for it's own (non inherited) lazy fields. If 'checkinit'
+ * is enabled, getters that check for the initialized bit are
+ * generated, and the class constructor is changed to set the
+ * initialized bits.
*/
def addCheckedGetters(clazz: Symbol, stats: List[Tree]): List[Tree] = {
+ def dd(stat: DefDef) = {
+ val sym = stat.symbol
+ def isUnit = sym.tpe.resultType.typeSymbol == UnitClass
+ def isEmpty = stat.rhs == EmptyTree
- val stats1 = for (stat <- stats; sym = stat.symbol) yield stat match {
- case DefDef(mods, name, tp, vp, tpt, rhs)
- if sym.isLazy && rhs != EmptyTree && !clazz.isImplClass =>
- assert(fieldOffset.isDefinedAt(sym))
- val rhs1 = if (sym.tpe.resultType.typeSymbol == UnitClass)
- mkLazyDef(clazz, sym, List(rhs), UNIT, fieldOffset(sym))
- else {
- val Block(stats, res) = rhs
+ if (sym.isLazy && !isEmpty && !clazz.isImplClass) {
+ assert(fieldOffset contains sym, sym)
+ deriveDefDef(stat) {
+ case t if isUnit => mkLazyDef(clazz, sym, List(t), UNIT, fieldOffset(sym))
+
+ case Block(stats, res) =>
mkLazyDef(clazz, sym, stats, Select(This(clazz), res.symbol), fieldOffset(sym))
- }
- treeCopy.DefDef(stat, mods, name, tp, vp, tpt, rhs1)
- case DefDef(mods, name, tp, vp, tpt, rhs)
- if needsInitFlag(sym) && rhs != EmptyTree && !clazz.isImplClass && !clazz.isTrait =>
- assert(fieldOffset.isDefinedAt(sym))
- val rhs1 = (mkCheckedAccessor(clazz, _: Tree, fieldOffset(sym), stat.pos, sym))(
- if (sym.tpe.resultType.typeSymbol == UnitClass) UNIT else rhs
+ case t => t // pass specialized lazy vals through
+ }
+ }
+ else if (needsInitFlag(sym) && !isEmpty && !clazz.hasFlag(IMPLCLASS | TRAIT)) {
+ assert(fieldOffset contains sym, sym)
+ deriveDefDef(stat)(rhs =>
+ (mkCheckedAccessor(clazz, _: Tree, fieldOffset(sym), stat.pos, sym))(
+ if (sym.tpe.resultType.typeSymbol == UnitClass) UNIT
+ else rhs
)
- treeCopy.DefDef(stat, mods, name, tp, vp, tpt, rhs1)
-
- case DefDef(mods, name, tp, vp, tpt, rhs) if sym.isConstructor =>
- treeCopy.DefDef(stat, mods, name, tp, vp, tpt, addInitBits(clazz, rhs))
-
- case DefDef(mods, name, tp, vp, tpt, rhs)
- if settings.checkInit.value && !clazz.isTrait && sym.isSetter =>
- val getter = sym.getter(clazz)
- if (needsInitFlag(getter) && fieldOffset.isDefinedAt(getter))
- treeCopy.DefDef(stat, mods, name, tp, vp, tpt,
- Block(List(rhs, localTyper.typed(mkSetFlag(clazz, fieldOffset(getter), getter))), UNIT))
- else
- stat
- case DefDef(mods, name, tp, vp, tpt, rhs)
- if sym.isModule && (!clazz.isTrait || clazz.isImplClass) && !sym.hasFlag(BRIDGE) =>
- val attrThis =
- if (clazz.isImplClass) {
- gen.mkAttributedIdent(vp.head.head.symbol)
- // Martin to Hubert I think this can be replaced by selfRef(tree.pos)
- } else
- gen.mkAttributedThis(clazz)
- val rhs1 = mkInnerClassAccessorDoubleChecked(attrThis, rhs)
- treeCopy.DefDef(stat, mods, name, tp, vp, tpt, typedPos(stat.pos)(rhs1))
- case _ => stat
+ )
+ }
+ else if (sym.isConstructor) {
+ deriveDefDef(stat)(addInitBits(clazz, _))
+ }
+ else if (settings.checkInit.value && !clazz.isTrait && sym.isSetter) {
+ val getter = sym.getter(clazz)
+ if (needsInitFlag(getter) && fieldOffset.isDefinedAt(getter))
+ deriveDefDef(stat)(rhs => Block(List(rhs, localTyper.typed(mkSetFlag(clazz, fieldOffset(getter), getter, bitmapKind(getter)))), UNIT))
+ else stat
+ }
+ else if (sym.isModule && (!clazz.isTrait || clazz.isImplClass) && !sym.isBridge) {
+ deriveDefDef(stat)(rhs =>
+ typedPos(stat.pos)(
+ mkInnerClassAccessorDoubleChecked(
+ // Martin to Hubert: I think this can be replaced by selfRef(tree.pos)
+ // @PP: It does not seem so, it crashes for me trying to bootstrap.
+ if (clazz.isImplClass) gen.mkAttributedIdent(stat.vparamss.head.head.symbol) else gen.mkAttributedThis(clazz),
+ rhs, sym, stat.vparamss.head
+ )
+ )
+ )
+ }
+ else stat
+ }
+ stats map {
+ case defn: DefDef => dd(defn)
+ case stat => stat
}
- stats1
}
- /** Does this field require an initialized bit?
- * Note: fields of classes inheriting DelayedInit are not checked.
- * This is because the they are neither initialized in the constructor
- * nor do they have a setter (not if they are vals anyway). The usual
- * logic for setting bitmaps does therefor not work for such fields.
- * That's why they are excluded.
- */
- def needsInitFlag(sym: Symbol) = {
- val res = (settings.checkInit.value
- && sym.isGetter
- && !sym.isInitializedToDefault
- && !sym.hasFlag(PARAMACCESSOR | SPECIALIZED | LAZY)
- && !sym.accessed.hasFlag(PRESUPER)
- && !sym.isOuterAccessor
- && !(sym.owner isSubClass DelayedInitClass))
-
-// if (settings.debug.value) {
-// log("needsInitFlag(" + sym.fullName + "): " + res)
-// log("\tsym.isGetter: " + sym.isGetter)
-// log("\t!isInitializedToDefault: " + !sym.isInitializedToDefault + sym.hasFlag(DEFAULTINIT) + sym.hasAccessorFlag + sym.isTerm)
-// log("\t!sym.isParamAccessor: " + !sym.isParamAccessor)
-// //println("\t!sym.accessed.hasFlag(PRESUPER): " + !sym.accessed.hasFlag(PRESUPER))
-// log("\t!sym.isOuterAccessor: " + !sym.isOuterAccessor)
-// }
-
- res
+ class AddInitBitsTransformer(clazz: Symbol) extends Transformer {
+ private def checkedGetter(lhs: Tree) = {
+ val sym = clazz.info decl lhs.symbol.getterName suchThat (_.isGetter)
+ if (needsInitAndHasOffset(sym)) {
+ debuglog("adding checked getter for: " + sym + " " + lhs.symbol.flagString)
+ List(localTyper typed mkSetFlag(clazz, fieldOffset(sym), sym, bitmapKind(sym)))
+ }
+ else Nil
+ }
+ override def transformStats(stats: List[Tree], exprOwner: Symbol) = {
+ // !!! Ident(self) is never referenced, is it supposed to be confirming
+ // that self is anything in particular?
+ super.transformStats(
+ stats flatMap {
+ case stat @ Assign(lhs @ Select(This(_), _), rhs) => stat :: checkedGetter(lhs)
+ // remove initialization for default values
+ case Apply(lhs @ Select(Ident(self), _), EmptyTree.asList) if lhs.symbol.isSetter => Nil
+ case stat => List(stat)
+ },
+ exprOwner
+ )
+ }
}
/** Adds statements to set the 'init' bit for each field initialized
- * in the body of a constructor.
+ * in the body of a constructor.
*/
- def addInitBits(clazz: Symbol, rhs: Tree): Tree = {
- new Transformer {
- override def transformStats(stats: List[Tree], exprOwner: Symbol) = {
- val stats1 = stats flatMap { stat => stat match {
- case Assign(lhs @ Select(This(_), _), rhs) =>
- val sym = clazz.info.decl(nme.getterName(lhs.symbol.name))
- .suchThat(_.isGetter)
- if (rhs == EmptyTree)
- List()
- else if (sym != NoSymbol && needsInitFlag(sym) && fieldOffset.isDefinedAt(sym)) {
- log("adding checked getter for: " + sym + " " + Flags.flagsToString(lhs.symbol.flags))
- List(stat, localTyper.typed(mkSetFlag(clazz, fieldOffset(sym), sym)))
- } else {
- List(stat)
- }
- case Apply(setter @ Select(Ident(self), _), List(EmptyTree)) if setter.symbol.isSetter =>
- // remove initialization for default values
- List()
- case _ => List(stat)
- }
- }
- super.transformStats(stats1, exprOwner)
- }
- }.transform(rhs)
- }
+ def addInitBits(clazz: Symbol, rhs: Tree): Tree =
+ new AddInitBitsTransformer(clazz) transform rhs
- def fieldWithBitmap(field: Symbol) = {
- field.info // ensure that nested objects are transformed
- // For checkinit consider normal value getters
- // but for lazy values only take into account lazy getters
- field.isLazy && field.isMethod && !field.isDeferred
- }
-
- def checkinitField(field: Symbol) =
+ def isCheckInitField(field: Symbol) =
needsInitFlag(field) && !field.isDeferred
- /**
- * Return the number of bits used by superclass fields.
- */
- def usedBits(clazz0: Symbol): Int = {
- def needsBitmap(field: Symbol) = field.owner != clazz0 && fieldWithBitmap(field)
- var bits = 0
- for {
- cl <- clazz0.info.baseClasses.tail
- if !cl.isTrait && !cl.hasFlag(JAVA)
- field <- cl.info.decls.iterator
- if needsBitmap(field) && !localBitmapField(field)
- } bits += 1
-
- bits
- }
+ def superClassesToCheck(clazz: Symbol) =
+ clazz.ancestors filterNot (_ hasFlag TRAIT | JAVA)
+
+ // begin addNewDefs
/** Fill the map from fields to offset numbers.
* Instead of field symbols, the map keeps their getter symbols. This makes
* code generation easier later.
*/
- def buildFieldPositions(clazz0: Symbol) {
- var fields = usedBits(clazz0)
- var fieldsPrivate = 0
- var fieldsTransient = 0
- var fieldsCheckinit = 0
- var fieldsCheckinitTransient = 0
-
- for (f <- clazz0.info.decls.iterator) {
- if (settings.debug.value) log(f.fullName + " -> " + fields)
-
- if (fieldWithBitmap(f)) {
- val (idx, _) =
- bitmapOperation(f, (fieldsTransient, fieldsTransient += 1),
- (fieldsPrivate, fieldsPrivate += 1),
- (fields, fields += 1))
- fieldOffset(f) = idx
- } else if (checkinitField(f)) {
- // bitmaps for checkinit fields are not inherited
- val (idx, _) =
- bitmapOperation(f, (fieldsCheckinitTransient, fieldsCheckinitTransient += 1),
- (fieldsCheckinit, fieldsCheckinit += 1),
- (fieldsCheckinit, fieldsCheckinit += 1))
+ def buildBitmapOffsets() {
+ def fold(fields: List[Symbol], category: Name) = {
+ var idx = 0
+ fields foreach { f =>
fieldOffset(f) = idx
+ idx += 1
}
+
+ if (idx == 0) ()
+ else if (idx == 1) bitmapKindForCategory(category) = BooleanClass
+ else if (idx < 9) bitmapKindForCategory(category) = ByteClass
+ else if (idx < 33) bitmapKindForCategory(category) = IntClass
+ else bitmapKindForCategory(category) = LongClass
+ }
+ clazz.info.decls.toList groupBy bitmapCategory foreach {
+ case (nme.NO_NAME, _) => ()
+ case (category, fields) => fold(fields, category)
}
}
-
- // begin addNewDefs
- buildFieldPositions(clazz)
+ buildBitmapOffsets()
var stats1 = addCheckedGetters(clazz, stats)
- // add deffered bitmaps
- deferredBitmaps.remove(clazz) match {
- case Some(deferred) =>
- stats1 = add(stats1, deferred)
- case None =>
+ def accessedReference(sym: Symbol) = sym.tpe match {
+ case MethodType(Nil, ConstantType(c)) => Literal(c)
+ case _ =>
+ // if it is a mixed-in lazy value, complete the accessor
+ if (sym.isLazy && sym.isGetter) {
+ val isUnit = sym.tpe.resultType.typeSymbol == UnitClass
+ val initCall = Apply(staticRef(initializer(sym)), gen.mkAttributedThis(clazz) :: Nil)
+ val selection = Select(This(clazz), sym.accessed)
+ val init = if (isUnit) initCall else atPos(sym.pos)(Assign(selection, initCall))
+ val returns = if (isUnit) UNIT else selection
+
+ mkLazyDef(clazz, sym, List(init), returns, fieldOffset(sym))
+ }
+ else sym.getter(sym.owner).tpe.resultType.typeSymbol match {
+ case UnitClass => UNIT
+ case _ => Select(This(clazz), sym.accessed)
+ }
}
+ def isOverriddenSetter(sym: Symbol) =
+ nme.isTraitSetterName(sym.name) && {
+ val other = sym.nextOverriddenSymbol
+ isOverriddenAccessor(other.getter(other.owner), clazz.info.baseClasses)
+ }
- // for all symbols `sym' in the class definition, which are mixed in:
- for (sym <- clazz.info.decls.toList) {
- if (sym hasFlag MIXEDIN) {
- if (clazz hasFlag lateINTERFACE) {
- // if current class is a trait interface, add an abstract method for accessor `sym'
- addDefDef(sym, vparamss => EmptyTree)
- } else if (!clazz.isTrait) {
- // if class is not a trait add accessor definitions
- if ((sym hasFlag ACCESSOR) &&
- (!(sym hasFlag DEFERRED) || (sym hasFlag lateDEFERRED))) {
- // add accessor definitions
- addDefDef(sym, vparams => {
- val accessedRef = sym.tpe match {
- case MethodType(List(), ConstantType(c)) => Literal(c)
+ // for all symbols `sym` in the class definition, which are mixed in:
+ for (sym <- clazz.info.decls ; if sym hasFlag MIXEDIN) {
+ // if current class is a trait interface, add an abstract method for accessor `sym`
+ if (clazz hasFlag lateINTERFACE) {
+ addDefDef(sym)
+ }
+ // if class is not a trait add accessor definitions
+ else if (!clazz.isTrait) {
+ if (sym.hasAccessorFlag && (!sym.isDeferred || sym.hasFlag(lateDEFERRED))) {
+ // add accessor definitions
+ addDefDef(sym, {
+ val accessedRef = accessedReference(sym)
+ if (sym.isSetter) {
+ if (isOverriddenSetter(sym)) UNIT
+ else accessedRef match {
+ case Literal(_) => accessedRef
case _ =>
- // if it is a mixed-in lazy value, complete the accessor
- if (sym.isLazy && sym.isGetter) {
- val rhs1 =
- if (sym.tpe.resultType.typeSymbol == UnitClass)
- mkLazyDef(clazz, sym, List(Apply(staticRef(initializer(sym)), List(gen.mkAttributedThis(clazz)))), UNIT, fieldOffset(sym))
- else {
- val assign = atPos(sym.pos) {
- Assign(Select(This(sym.accessed.owner), sym.accessed) /*gen.mkAttributedRef(sym.accessed)*/ ,
- Apply(staticRef(initializer(sym)), gen.mkAttributedThis(clazz) :: Nil))
- }
- mkLazyDef(clazz, sym, List(assign), Select(This(clazz), sym.accessed), fieldOffset(sym))
- }
- rhs1
- } else if (sym.getter(sym.owner).tpe.resultType.typeSymbol == UnitClass) {
- UNIT
- } else {
- Select(This(clazz), sym.accessed)
- }
+ val init = Assign(accessedRef, Ident(sym.firstParam))
+ val getter = sym.getter(clazz)
+
+ if (!needsInitFlag(getter)) init
+ else Block(init, mkSetFlag(clazz, fieldOffset(getter), getter, bitmapKind(getter)), UNIT)
}
- if (sym.isSetter) {
- val isOverriddenSetter =
- nme.isTraitSetterName(sym.name) && {
- sym.allOverriddenSymbols match {
- case other :: _ =>
- isOverriddenAccessor(other.getter(other.owner), clazz.info.baseClasses)
- case _ =>
- false
- }
- }
- if (isOverriddenSetter) UNIT
- else accessedRef match {
- case Literal(_) => accessedRef
- case _ =>
- val init = Assign(accessedRef, Ident(vparams.head))
- val getter = sym.getter(clazz)
- if (needsInitFlag(getter))
- Block(List(init, mkSetFlag(clazz, fieldOffset(getter), getter)), UNIT)
- else
- init
- }
- } else if (needsInitFlag(sym)) {
- mkCheckedAccessor(clazz, accessedRef, fieldOffset(sym), sym.pos, sym)
- } else
- gen.mkCheckInit(accessedRef)
- })
- } else if (sym.isModule && !(sym hasFlag LIFTED | BRIDGE)) {
- // add modules
- val vdef = gen.mkModuleVarDef(sym)
- addDef(position(sym), vdef)
-
- val rhs = gen.newModule(sym, vdef.symbol.tpe)
- val assignAndRet = gen.mkAssignAndReturn(vdef.symbol, rhs)
- val attrThis = gen.mkAttributedThis(clazz)
- val rhs1 = mkInnerClassAccessorDoubleChecked(attrThis, assignAndRet)
- addDef(position(sym), DefDef(sym, rhs1))
- } else if (!sym.isMethod) {
- // add fields
- addDef(position(sym), ValDef(sym))
- } else if (sym.isSuperAccessor) {
- // add superaccessors
- addDefDef(sym, vparams => EmptyTree)
- } else {
- // add forwarders
- assert(sym.alias != NoSymbol, sym)
- addDefDef(sym, vparams =>
- Apply(staticRef(sym.alias), gen.mkAttributedThis(clazz) :: (vparams map Ident)))
- }
+ }
+ else if (needsInitFlag(sym))
+ mkCheckedAccessor(clazz, accessedRef, fieldOffset(sym), sym.pos, sym)
+ else
+ gen.mkCheckInit(accessedRef)
+ })
+ }
+ else if (sym.isModule && !(sym hasFlag LIFTED | BRIDGE)) {
+ // add modules
+ val vdef = gen.mkModuleVarDef(sym)
+ addDef(position(sym), vdef)
+
+ val rhs = gen.newModule(sym, vdef.symbol.tpe)
+ val assignAndRet = gen.mkAssignAndReturn(vdef.symbol, rhs)
+ val attrThis = gen.mkAttributedThis(clazz)
+ val rhs1 = mkInnerClassAccessorDoubleChecked(attrThis, assignAndRet, sym, List())
+
+ addDefDef(sym, rhs1)
+ }
+ else if (!sym.isMethod) {
+ // add fields
+ addValDef(sym)
+ }
+ else if (sym.isSuperAccessor) {
+ // add superaccessors
+ addDefDef(sym)
+ }
+ else {
+ // add forwarders
+ assert(sym.alias != NoSymbol, sym)
+ // debuglog("New forwarder: " + sym.defString + " => " + sym.alias.defString)
+ if (!sym.isTermMacro) addDefDef(sym, Apply(staticRef(sym.alias), gen.mkAttributedThis(clazz) :: sym.paramss.head.map(Ident)))
}
}
}
@@ -1119,20 +1099,18 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
stats1
}
- private def nullableFields(templ: Template) = {
- val nullables = new mutable.HashMap[Symbol, mutable.Set[Symbol]] with mutable.MultiMap[Symbol, Symbol] {
- override def default(key: Symbol) = mutable.Set.empty
- }
-
+ private def nullableFields(templ: Template): Map[Symbol, Set[Symbol]] = {
+ val scope = templ.symbol.owner.info.decls
// if there are no lazy fields, take the fast path and save a traversal of the whole AST
- if (templ.symbol.owner.info.decls.exists(_.isLazy)) {
+ if (scope exists (_.isLazy)) {
+ val map = mutable.Map[Symbol, Set[Symbol]]() withDefaultValue Set()
// check what fields can be nulled for
- val uses = singleUseFields(templ)
- for ((field, users) <- uses; lazyFld <- users) {
- nullables.addBinding(lazyFld, field)
- }
+ for ((field, users) <- singleUseFields(templ); lazyFld <- users)
+ map(lazyFld) += field
+
+ map.toMap
}
- nullables
+ else Map()
}
/** The transform that gets applied to a tree after it has been completely
@@ -1147,32 +1125,29 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
* to static calls of methods in implementation modules (@see staticCall)
* - change super calls to methods in implementation classes to static calls
* (@see staticCall)
- * - change `this' in implementation modules to references to the self parameter
- * - refer to fields in some implementation class vie an abstract method in the interface.
+ * - change `this` in implementation modules to references to the self parameter
+ * - refer to fields in some implementation class via an abstract method in the interface.
*/
private def postTransform(tree: Tree): Tree = {
+ def siteWithinImplClass = currentOwner.enclClass.isImplClass
val sym = tree.symbol
// change every node type that refers to an implementation class to its
// corresponding interface, unless the node's symbol is an implementation class.
- if (tree.tpe.typeSymbol.isImplClass &&
- ((tree.symbol eq null) || !tree.symbol.isImplClass))
- tree.tpe = toInterface(tree.tpe);
+ if (tree.tpe.typeSymbol.isImplClass && ((sym eq null) || !sym.isImplClass))
+ tree.tpe = toInterface(tree.tpe)
tree match {
- case Template(parents, self, body) =>
+ case templ @ Template(parents, self, body) =>
// change parents of templates to conform to parents in the symbol info
val parents1 = currentOwner.info.parents map (t => TypeTree(t) setPos tree.pos)
-
- lazyValNullables = nullableFields(tree.asInstanceOf[Template])
+ // mark fields which can be nulled afterward
+ lazyValNullables = nullableFields(templ) withDefaultValue Set()
// add all new definitions to current class or interface
- val body1 = addNewDefs(currentOwner, body)
-
- treeCopy.Template(tree, parents1, self, body1)
+ treeCopy.Template(tree, parents1, self, addNewDefs(currentOwner, body))
- case Apply(TypeApply(sel @ Select(qual, name), List(targ)), List())
- if (tree.symbol == Object_asInstanceOf && (qual.tpe <:< targ.tpe)) =>
- // remove widening casts
+ // remove widening casts
+ case Apply(TypeApply(Select(qual, _), targ :: _), _) if isCastSymbol(sym) && (qual.tpe <:< targ.tpe) =>
qual
case Apply(Select(qual, _), args) =>
@@ -1183,37 +1158,46 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
* - if qual != super, qual itself
* - if qual == super, and we are in an implementation class,
* the current self parameter.
- * - if qual == super, and we are not in an implementation class, `this'
+ * - if qual == super, and we are not in an implementation class, `this`
*/
def staticCall(target: Symbol) = {
- if (target == NoSymbol)
- assert(false, "" + sym + ":" + sym.tpe + " " + sym.owner + " " + implClass(sym.owner) + " " + implClass(sym.owner).info.member(sym.name) + " " + atPhase(phase.prev)(implClass(sym.owner).info.member(sym.name).tpe) + " " + phase);//debug
-
+ def implSym = implClass(sym.owner).info.member(sym.name)
+ assert(target ne NoSymbol,
+ List(sym + ":", sym.tpe, sym.owner, implClass(sym.owner), implSym,
+ beforePrevPhase(implSym.tpe), phase) mkString " "
+ )
typedPos(tree.pos)(Apply(staticRef(target), transformSuper(qual) :: args))
}
+
if (isStaticOnly(sym)) {
// change calls to methods which are defined only in implementation
// classes to static calls of methods in implementation modules
staticCall(sym)
- } else qual match {
+ }
+ else qual match {
case Super(_, mix) =>
// change super calls to methods in implementation classes to static calls.
// Transform references super.m(args) as follows:
- // - if `m' refers to a trait, insert a static call to the corresponding static
+ // - if `m` refers to a trait, insert a static call to the corresponding static
// implementation
// - otherwise return tree unchanged
- if (mix == tpnme.EMPTY && currentOwner.enclClass.isImplClass)
- assert(false, "illegal super in trait: " + currentOwner.enclClass + " " + tree);
+ assert(
+ !(mix == tpnme.EMPTY && siteWithinImplClass),
+ "illegal super in trait: " + currentOwner.enclClass + " " + tree
+ )
+
if (sym.owner hasFlag lateINTERFACE) {
if (sym.hasAccessorFlag) {
- assert(args.isEmpty)
+ assert(args.isEmpty, args)
val sym1 = sym.overridingSymbol(currentOwner.enclClass)
typedPos(tree.pos)((transformSuper(qual) DOT sym1)())
- } else {
- staticCall(atPhase(phase.prev)(sym.overridingSymbol(implClass(sym.owner))))
}
- } else {
- assert(!currentOwner.enclClass.isImplClass)
+ else {
+ staticCall(beforePrevPhase(sym.overridingSymbol(implClass(sym.owner))))
+ }
+ }
+ else {
+ assert(!siteWithinImplClass, currentOwner.enclClass)
tree
}
case _ =>
@@ -1228,13 +1212,27 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
case Select(qual, name) if sym.owner.isImplClass && !isStaticOnly(sym) =>
assert(!sym.isMethod, "no method allowed here: %s%s %s".format(sym, sym.isImplOnly, flagsToString(sym.flags)))
-
// refer to fields in some implementation class via an abstract
// getter in the interface.
- val iface = toInterface(sym.owner.tpe).typeSymbol
- val getter = sym.getter(iface)
- assert(getter != NoSymbol)
- typedPos(tree.pos)((qual DOT getter)())
+ val iface = toInterface(sym.owner.tpe).typeSymbol
+ val ifaceGetter = sym getter iface
+
+ def si6231Restriction() {
+ // See SI-6231 comments in LamdaLift for ideas on how to lift the restriction.
+ val msg = sm"""Implementation restriction: local ${iface.fullLocationString} is unable to automatically capture the
+ |free variable ${sym} on behalf of ${currentClass}. You can manually assign it to a val inside the trait,
+ |and refer that that val in ${currentClass}. For more details, see SI-6231."""
+ reporter.error(tree.pos, msg)
+ }
+
+ if (ifaceGetter == NoSymbol) {
+ if (sym.isParamAccessor) {
+ si6231Restriction()
+ EmptyTree
+ }
+ else abort("No getter for " + sym + " in " + iface)
+ }
+ else typedPos(tree.pos)((qual DOT ifaceGetter)())
case Assign(Apply(lhs @ Select(qual, _), List()), rhs) =>
// assign to fields in some implementation class via an abstract
@@ -1244,7 +1242,7 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
needsExpandedSetterName(lhs.symbol)
) setPos lhs.pos
- typedPos(tree.pos) { (qual DOT setter)(rhs) }
+ typedPos(tree.pos)((qual DOT setter)(rhs))
case _ =>
tree
@@ -1256,19 +1254,12 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
* when coming back, it performs a postTransform at phase after.
*/
override def transform(tree: Tree): Tree = {
- try { //debug
- val outerTyper = localTyper
- val tree1 = super.transform(preTransform(tree))
- val res = atPhase(phase.next)(postTransform(tree1))
- // needed when not flattening inner classes. parts after an
- // inner class will otherwise be typechecked with a wrong scope
- localTyper = outerTyper
- res
- } catch {
- case ex: Throwable =>
- if (settings.debug.value) Console.println("exception when traversing " + tree)
- throw ex
- }
+ val saved = localTyper
+ val tree1 = super.transform(preTransform(tree))
+ // localTyper needed when not flattening inner classes. parts after an
+ // inner class will otherwise be typechecked with a wrong scope
+ try afterMixin(postTransform(tree1))
+ finally localTyper = saved
}
}
}
diff --git a/src/compiler/scala/tools/nsc/transform/OverridingPairs.scala b/src/compiler/scala/tools/nsc/transform/OverridingPairs.scala
index 1710e8e..67be81b 100644
--- a/src/compiler/scala/tools/nsc/transform/OverridingPairs.scala
+++ b/src/compiler/scala/tools/nsc/transform/OverridingPairs.scala
@@ -1,15 +1,15 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
+ * Copyright 2005-2013 LAMP/EPFL
* @author Martin Odersky
*/
package scala.tools.nsc
package transform
-import collection.mutable.HashMap
+import scala.collection.mutable
import symtab.Flags._
import util.HashSet
-import annotation.tailrec
+import scala.annotation.tailrec
/** A class that yields a kind of iterator (`Cursor`),
* which yields all pairs of overriding/overridden symbols
@@ -45,8 +45,14 @@ abstract class OverridingPairs {
* Types always match. Term symbols match if their membertypes
* relative to <base>.this do
*/
- protected def matches(sym1: Symbol, sym2: Symbol): Boolean =
- sym1.isType || (self.memberType(sym1) matches self.memberType(sym2))
+ protected def matches(sym1: Symbol, sym2: Symbol): Boolean = {
+ def tp_s(s: Symbol) = self.memberType(s) + "/" + self.memberType(s).getClass
+ val result = sym1.isType || (self.memberType(sym1) matches self.memberType(sym2))
+ debuglog("overriding-pairs? %s matches %s (%s vs. %s) == %s".format(
+ sym1.fullLocationString, sym2.fullLocationString, tp_s(sym1), tp_s(sym2), result))
+
+ result
+ }
/** An implementation of BitSets as arrays (maybe consider collection.BitSet
* for that?) The main purpose of this is to implement
@@ -74,7 +80,7 @@ abstract class OverridingPairs {
}
/** The symbols that can take part in an overriding pair */
- private val decls = new Scope
+ private val decls = newScope
// fill `decls` with overriding shadowing overridden */
{ def fillDecls(bcs: List[Symbol], deferredflag: Int) {
@@ -98,8 +104,11 @@ abstract class OverridingPairs {
/** A map from baseclasses of <base> to ints, with smaller ints meaning lower in
* linearization order.
+ * symbols that are not baseclasses map to -1.
*/
- private val index = new HashMap[Symbol, Int]
+ private val index = new mutable.HashMap[Symbol, Int] {
+ override def default(key: Symbol) = -1
+ }
// Note: overridingPairs can be called at odd instances by the Eclipse plugin
// Soemtimes symbols are not yet defined and we get missing keys.
@@ -127,29 +136,30 @@ abstract class OverridingPairs {
{ for (i <- List.range(0, size))
subParents(i) = new BitSet(size);
for (p <- parents) {
- index get p.typeSymbol match {
- case Some(pIndex) =>
- for (bc <- p.baseClasses)
- if (p.baseType(bc) =:= self.baseType(bc))
- index get bc match {
- case Some(bcIndex) =>
- include(subParents(bcIndex), pIndex)
- case None =>
- }
- else if (settings.debug.value)
- log("SKIPPING "+p+" -> "+p.baseType(bc)+" / "+self.baseType(bc)+" from "+base)
- case None =>
- }
+ val pIndex = index(p.typeSymbol)
+ if (pIndex >= 0)
+ for (bc <- p.baseClasses)
+ if (p.baseType(bc) =:= self.baseType(bc)) {
+ val bcIndex = index(bc)
+ if (bcIndex >= 0)
+ include(subParents(bcIndex), pIndex)
+ }
}
}
/** Do `sym1` and `sym2` have a common subclass in `parents`?
* In that case we do not follow their overriding pairs
*/
- private def hasCommonParentAsSubclass(sym1: Symbol, sym2: Symbol) = (
- for (index1 <- index get sym1.owner ; index2 <- index get sym2.owner) yield
- intersectionContainsElementLeq(subParents(index1), subParents(index2), index1 min index2)
- ).exists(_ == true)
+ private def hasCommonParentAsSubclass(sym1: Symbol, sym2: Symbol) = {
+ val index1 = index(sym1.owner)
+ (index1 >= 0) && {
+ val index2 = index(sym2.owner)
+ (index2 >= 0) && {
+ intersectionContainsElementLeq(
+ subParents(index1), subParents(index2), index1 min index2)
+ }
+ }
+ }
/** The scope entries that have already been visited as overridden
* (maybe excluded because of hasCommonParentAsSubclass).
diff --git a/src/compiler/scala/tools/nsc/transform/PostErasure.scala b/src/compiler/scala/tools/nsc/transform/PostErasure.scala
new file mode 100644
index 0000000..3ef32ca
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/transform/PostErasure.scala
@@ -0,0 +1,71 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2013 LAMP/EPFL
+ * @author Martin odersky
+ */
+package scala.tools.nsc
+package transform
+
+/** This phase maps ErasedValueTypes to the underlying unboxed representation and
+ * performs peephole optimizations.
+ */
+trait PostErasure extends InfoTransform with TypingTransformers {
+
+ val global: Global
+ import global._
+ import definitions._
+
+ val phaseName: String = "posterasure"
+
+ def newTransformer(unit: CompilationUnit): Transformer = new PostErasureTransformer(unit)
+ override def changesBaseClasses = false
+
+ object elimErasedValueType extends TypeMap {
+ def apply(tp: Type) = tp match {
+ case ConstantType(Constant(tp: Type)) =>
+ ConstantType(Constant(apply(tp)))
+ case ErasedValueType(tref) =>
+ atPhase(currentRun.erasurePhase)(erasure.erasedValueClassArg(tref))
+ case _ => mapOver(tp)
+ }
+ }
+
+ def transformInfo(sym: Symbol, tp: Type) = elimErasedValueType(tp)
+
+ class PostErasureTransformer(unit: CompilationUnit) extends TypingTransformer(unit) {
+
+ override def transform(tree: Tree) =
+ super.transform(tree) setType elimErasedValueType(tree.tpe) match {
+ case // new C(arg).underlying ==> arg
+ Apply(sel @ Select(
+ Apply(Select(New(tpt), nme.CONSTRUCTOR), List(arg)),
+ acc), List())
+ if atPhase(currentRun.erasurePhase) {
+ tpt.tpe.typeSymbol.isDerivedValueClass &&
+ sel.symbol == tpt.tpe.typeSymbol.derivedValueClassUnbox
+ } =>
+ if (settings.debug.value) log("Removing "+tree+" -> "+arg)
+ arg
+ case // new C(arg1) == new C(arg2) ==> arg1 == arg2
+ Apply(sel @ Select(
+ Apply(Select(New(tpt1), nme.CONSTRUCTOR), List(arg1)),
+ cmp),
+ List(Apply(Select(New(tpt2), nme.CONSTRUCTOR), List(arg2))))
+ if atPhase(currentRun.erasurePhase) {
+ tpt1.tpe.typeSymbol.isDerivedValueClass &&
+ (sel.symbol == Object_== || sel.symbol == Object_!=) &&
+ tpt2.tpe.typeSymbol == tpt1.tpe.typeSymbol
+ } =>
+ val result = Apply(Select(arg1, cmp) setPos sel.pos, List(arg2)) setPos tree.pos
+ log("shortcircuiting equality "+tree+" -> "+result)
+ localTyper.typed(result)
+
+ case // arg.asInstanceOf[T] ==> arg if arg.tpe == T
+ Apply(TypeApply(cast @ Select(arg, asinstanceof), List(tpt)), List())
+ if cast.symbol == Object_asInstanceOf && arg.tpe =:= tpt.tpe => // !!! <:< ?
+ if (settings.debug.value) log("Shortening "+tree+" -> "+arg)
+ arg
+ case tree1 =>
+ tree1
+ }
+ }
+}
diff --git a/src/compiler/scala/tools/nsc/transform/Reifiers.scala b/src/compiler/scala/tools/nsc/transform/Reifiers.scala
deleted file mode 100644
index 6b0d441..0000000
--- a/src/compiler/scala/tools/nsc/transform/Reifiers.scala
+++ /dev/null
@@ -1,330 +0,0 @@
-package scala.tools.nsc
-package transform
-
-import scala.tools.nsc.symtab.SymbolTable
-import scala.reflect
-import collection.mutable.HashMap
-
-/** Functions to reify (and un-reify) symbols, types, and trees.
- * These can be used with only a symbol table; they do not
- * need a full compiler.
- *
- * @author Gilles Dubochet, Lex Spoon
- */
-trait Reifiers {
- val symbols: SymbolTable
- import symbols._
-
- private def mkGlobalSymbol(fullname: String, sym: Symbol): reflect.Symbol =
- if (sym.isClass) reflect.Class(fullname)
- else if (sym.isType) reflect.TypeField(fullname, reify(sym.info))
- else if (sym.isMethod) reflect.Method(fullname, reify(sym.info))
- else if (sym.isValueParameter) reflect.LocalValue(reflect.NoSymbol, fullname, reify(sym.info))
- else reflect.Field(fullname, reify(sym.info));
-
- def reify(sym: Symbol): reflect.Symbol = {
- if (sym.isRoot || sym.isRootPackage || sym.isEmptyPackageClass || sym.isEmptyPackage)
- reflect.RootSymbol
- else if (sym.isValueParameter)
- mkGlobalSymbol(sym.name.toString, sym)
- else if (sym.owner.isTerm)
- reflect.NoSymbol
- else reify(sym.owner) match {
- case reflect.NoSymbol =>
- reflect.NoSymbol;
- case reflect.RootSymbol =>
- mkGlobalSymbol(sym.name.toString(), sym)
- case reflect.Class(ownername) =>
- mkGlobalSymbol(ownername + "." + sym.name, sym)
- case _ =>
- reflect.NoSymbol
- }
- }
-
- var _log_reify_type_ = false
-
- def reify(tp: Type): reflect.Type = tp match {
- case ErrorType =>
- reflect.NoType
- case WildcardType =>
- if (_log_reify_type_) println("cannot handle WildcardType")
- reflect.NoType
- case NoType =>
- reflect.NoType
- case NoPrefix =>
- reflect.NoType
- case ThisType(sym) =>
- val rsym = reify(sym)
- reflect.ThisType(rsym)
- case SingleType(pre, sym) =>
- reflect.SingleType(reify(pre), reify(sym))
- case ConstantType(value) =>
- reify(value.tpe)
- case TypeRef(pre, sym, args) =>
- val rpre = reify(pre)
- val rsym = reify(sym)
- val rargs = args map reify
- val beforeArgs = reflect.PrefixedType(rpre, rsym)
- if (rargs.isEmpty)
- beforeArgs
- else if (rpre == reflect.NoType || rsym == reflect.NoSymbol)
- beforeArgs
- else
- reflect.AppliedType(beforeArgs, rargs)
- case TypeBounds(lo, hi) =>
- reflect.TypeBounds(reify(lo), reify(hi))
- case RefinedType(parents, defs) =>
- if (_log_reify_type_) println("cannot handle RefinedType "+tp); reflect.NoType
- case ClassInfoType(parents, defs, clazz) =>
- if (_log_reify_type_) println("cannot handle ClassInfoType "+tp); reflect.NoType
- case MethodType(params, result) =>
- reflect.MethodType(params.map(reify), reify(result))
- case NullaryMethodType(result) =>
- reflect.NullaryMethodType(reify(result))
- case PolyType(tparams, result) =>
- val boundss =
- for {
- param <- tparams
- TypeBounds(lo,hi) = param.info.bounds
- } yield (reify(lo), reify(hi))
-
- reflect.PolyType(
- tparams.map(reify),
- boundss,
- reify(result))
- //todo: treat ExistentialType
- case AnnotatedType(annots, tp, _) =>
- reify(tp)
- case _ =>
- println("could not reify: " + tp)
- reflect.NoType
- }
-
-
- /** This is woefully incomplete. It is barely enough
- * to process the types of Constant's .
- */
- def unreify(tpe: reflect.Type): Type =
- tpe match {
- case reflect.NoPrefix => NoPrefix
- case reflect.NoType => NoType
- case reflect.NamedType(fullname) =>
- //NamedType(fullname)
- println("NamedType: " + fullname)
- NoType
- case reflect.PrefixedType(_, reflect.Class("scala.Array")) =>
- definitions.ArrayClass.tpe
- case reflect.PrefixedType(_, reflect.Class("java.lang.String")) =>
- definitions.StringClass.tpe
- case reflect.PrefixedType(_, reflect.Class("scala.Unit")) =>
- definitions.UnitClass.tpe
- case reflect.PrefixedType(_, reflect.Class("scala.Boolean")) =>
- definitions.BooleanClass.tpe
- case reflect.PrefixedType(_, reflect.Class("scala.Byte")) =>
- definitions.ByteClass.tpe
- case reflect.PrefixedType(_, reflect.Class("scala.Short")) =>
- definitions.ShortClass.tpe
- case reflect.PrefixedType(_, reflect.Class("scala.Int")) =>
- definitions.IntClass.tpe
- case reflect.PrefixedType(_, reflect.Class("scala.Long")) =>
- definitions.LongClass.tpe
- case reflect.PrefixedType(_, reflect.Class("scala.Float")) =>
- definitions.FloatClass.tpe
- case reflect.PrefixedType(_, reflect.Class("scala.Double")) =>
- definitions.DoubleClass.tpe
- case reflect.PrefixedType(pre, sym) =>
- NoType
- case reflect.SingleType(pre, sym) =>
- SingleType(unreify(pre), unreify(sym))
- case reflect.ThisType(clazz) =>
- ThisType(unreify(clazz))
- case reflect.AppliedType(tpe, args) =>
- val untpe = unreify(tpe)
- if (untpe == NoType)
- NoType
- else
- appliedType(untpe, args.map(unreify))
- case reflect.TypeBounds(lo, hi) =>
- TypeBounds(unreify(lo), unreify(hi))
- case reflect.MethodType(params, restpe) =>
- MethodType(params.map(unreify), unreify(restpe))
- case reflect.NullaryMethodType(restpe) =>
- NullaryMethodType(unreify(restpe))
- case reflect.PolyType(typeParams, typeBounds, resultType) =>
- PolyType(typeParams.map(unreify), unreify(resultType))
- //todo: treat ExistentialType
- case _ => NoType
- }
-
-
- /** This is woefully incomplete. It is barely enough
- * to process the types of Constant's .
- */
- def unreify(symbol: reflect.Symbol): Symbol =
- symbol match {
- case reflect.Class(fullname) =>
- fullname match {
- case "scala.Unit" => definitions.UnitClass
- case "scala.Boolean" => definitions.BooleanClass
- case "scala.Byte" => definitions.ByteClass
- case "scala.Short" => definitions.ShortClass
- case "scala.Int" => definitions.IntClass
- case "scala.Long" => definitions.LongClass
- case "scala.Float" => definitions.FloatClass
- case "scala.Double" => definitions.DoubleClass
-
- case "scala.Array" => definitions.ArrayClass
-
- case _ => NoSymbol
-
- }
-
- case _ => NoSymbol
- }
-
- case class FreeValue(tree: Tree) extends reflect.Tree
-
- class ReifyEnvironment extends HashMap[Symbol, reflect.Symbol] {
- var targets = new HashMap[String, Option[reflect.LabelSymbol]]()
- def addTarget(name: String, target: reflect.LabelSymbol): Unit =
- targets.update(name, Some(target))
- def getTarget(name: String): Option[reflect.LabelSymbol] =
- targets.get(name) match {
- case None =>
- targets.update(name, None)
- None
- //case Some(None) => None //bq:redundant
- case Some(tgt) => tgt
- }
- def hasAllTargets: Boolean =
- targets.iterator.map(_._2).forall {
- case Some(_) => true
- case None => false
- }
- override def update(sym: Symbol, rsym: reflect.Symbol) =
- super.update(sym,rsym)
- }
-
-
- class Reifier(env: ReifyEnvironment, currentOwner: reflect.Symbol)
- {
- def reify(tree: Tree): reflect.Tree = tree match {
- case Ident(_) =>
- val rsym = reify(tree.symbol);
- //Console.println("LiftCode: seen ident")
- if (rsym == reflect.NoSymbol) {
- //Console.println(" free = "+tree)
- FreeValue(tree)
- } else {
- //Console.println(" rsym = "+rsym)
- reflect.Ident(rsym)
- }
- case Select(qual, _) =>
- val rsym = reify(tree.symbol);
- if (rsym == reflect.NoSymbol) throw new TypeError("cannot reify symbol: " + tree.symbol)
- else reflect.Select(reify(qual), reify(tree.symbol))
-
- case Literal(constant) =>
- reflect.Literal(constant.value)
-
- case Apply(name, args) if name.toString().startsWith("label$") =>
- env.getTarget(name.toString()) match {
- case None => throw new TypeError("cannot reify tree (no forward jumps allowed): " + tree)
- case Some(label) => reflect.Goto(label)
- }
-
- case Apply(fun, args) =>
- reflect.Apply(reify(fun), args map reify)
-
- case TypeApply(fun, args) =>
- reflect.TypeApply(reify(fun), args map (_.tpe) map reify)
-
- case Function(vparams, body) =>
- var env1 = env
- for (vparam <- vparams) {
- val local = reflect.LocalValue(
- currentOwner, vparam.symbol.name.toString(), reify(vparam.symbol.tpe));
- env1.update(vparam.symbol, local);
- }
- reflect.Function(vparams map (_.symbol) map env1,
- new Reifier(env1, currentOwner).reify(body))
- case tree at This(_) if tree.symbol.isModule =>
- // there is no reflect node for a module's this, so
- // represent it as a selection of the module
- reify(
- Select(This(tree.symbol.owner), tree.symbol.name))
- case This(_) =>
- reflect.This(reify(tree.symbol))
- case Block(stats, expr) =>
- reflect.Block(stats.map(reify), reify(expr))
- case New(clazz) if (clazz.isType) =>
- val reifiedSymbol = reify(clazz.symbol)
- reflect.New(reflect.Ident(reifiedSymbol))
- case New(clazz) =>
- val reifiedClass = reify(clazz)
- reflect.New(reifiedClass)
- case Typed(t, _) =>
- reify(t)
- case If(cond, thenp, elsep) =>
- reflect.If(reify(cond), reify(thenp), reify(elsep))
- case Assign(lhs, rhs) =>
- reflect.Assign(reify(lhs), reify(rhs))
-
- case LabelDef(name, Nil, body) =>
- val sym = new reflect.LabelSymbol(name.toString())
- env.addTarget(name.toString(), sym)
- val res = reflect.Target(sym, reify(body))
- res
-
- case vd @ ValDef(mods, name, tpt, rhs) =>
- val rtpe = reify(vd.tpe) // will return null, currently?!
- val sym = reflect.LocalValue(currentOwner, name.toString(), rtpe)
- env(vd.symbol) = sym // bq: despite Scala's scoping rules, this should work because references to vd.symbol were type checked.
- val rhs_ = reify(rhs)
- reflect.ValDef(sym, rhs_)
-
- case cd @ ClassDef(mods, name, tparams, impl) =>
- if(!tparams.isEmpty)
- throw new TypeError("cannot handle polymorphic ClassDef ("+name+"): " + tparams)
- val rsym = reify(cd.symbol)
- val rimp = reify(impl)
- val rtpe = reify(impl.self.tpt.tpe) //todo: update
- reflect.ClassDef(rsym, rtpe, rimp.asInstanceOf[reflect.Template])
-
- case tmpl @ Template(parents, self, body) =>
- val rparents = for (p <- parents) yield { reify(p.tpe) }
- //todo: add self to reified templates
- reflect.Template(rparents, body.map(reify))
-
- case dd @ DefDef(mods, name, tparams, vparamss, tpt, rhs) =>
- if(!tparams.isEmpty)
- throw new TypeError("cannot handle polymorphic DefDef ("+name+"): " + tparams)
- val rsym = reify(dd.symbol)
- val rparss = vparamss map { x => x map (reify) }
- val rret = reify(tpt.tpe)
- val rrhs = reify(rhs)
- reflect.DefDef(rsym, rparss, rret, rrhs)
-
- case sp @ Super(qual, mix) =>
- val rsym = reify(sp.symbol)
- reflect.Super(rsym)
-
- case _ =>
- throw new TypeError("cannot reify tree ("+tree.getClass()+"): " + tree)
- }
-
- def reify(sym: Symbol): reflect.Symbol =
- env.get(sym) match {
- case Some(rsym) =>
- rsym
- case None =>
- Reifiers.this.reify(sym)
- }
-
- def reify(tpe: Type): reflect.Type =
- Reifiers.this.reify(tpe)
- }
-
- def reify(tree: Tree): reflect.Tree =
- new Reifier(new ReifyEnvironment(), reflect.NoSymbol).reify(tree)
-}
diff --git a/src/compiler/scala/tools/nsc/transform/SampleTransform.scala b/src/compiler/scala/tools/nsc/transform/SampleTransform.scala
index e7e394b..44d8860 100644
--- a/src/compiler/scala/tools/nsc/transform/SampleTransform.scala
+++ b/src/compiler/scala/tools/nsc/transform/SampleTransform.scala
@@ -1,5 +1,5 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
+ * Copyright 2005-2013 LAMP/EPFL
* @author Martin Odersky
*/
@@ -9,7 +9,7 @@ package transform
/** A sample transform.
*/
abstract class SampleTransform extends Transform {
- // inherits abstract value `global' and class `Phase' from Transform
+ // inherits abstract value `global` and class `Phase` from Transform
import global._ // the global environment
import definitions._ // standard classes and methods
@@ -24,21 +24,21 @@ abstract class SampleTransform extends Transform {
class SampleTransformer(unit: CompilationUnit) extends Transformer {
override def transform(tree: Tree): Tree = {
- val tree1 = super.transform(tree); // transformers always maintain `currentOwner'.
+ val tree1 = super.transform(tree); // transformers always maintain `currentOwner`.
tree1 match {
case Block(List(), expr) => // a simple optimization
expr
case Block(defs, sup @ Super(qual, mix)) => // A hypthothetic transformation, which replaces
// {super} by {super.sample}
- treeCopy.Block( // `copy' is the usual lazy tree copier
+ treeCopy.Block( // `copy` is the usual lazy tree copier
tree1, defs,
- typed( // `typed' assigns types to its tree argument
- atPos(tree1.pos)( // `atPos' fills in position of its tree argument
- Select( // The `Select' factory method is defined in class `Trees'
+ typed( // `typed` assigns types to its tree argument
+ atPos(tree1.pos)( // `atPos` fills in position of its tree argument
+ Select( // The `Select` factory method is defined in class `Trees`
sup,
- currentOwner.newValue( // creates a new term symbol owned by `currentowner'
- tree1.pos,
- newTermName("sample")))))) // The standard term name creator
+ currentOwner.newValue( // creates a new term symbol owned by `currentowner`
+ newTermName("sample"), // The standard term name creator
+ tree1.pos)))))
case _ =>
tree1
}
diff --git a/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala b/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala
index 3285ef4..7e85647 100644
--- a/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala
+++ b/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala
@@ -1,5 +1,5 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
+ * Copyright 2005-2013 LAMP/EPFL
* @author Iulian Dragos
*/
@@ -8,8 +8,45 @@ package transform
import scala.tools.nsc.symtab.Flags
import scala.collection.{ mutable, immutable }
+import scala.language.postfixOps
+import scala.language.existentials
/** Specialize code on types.
+ *
+ * Make sure you've read the thesis:
+ *
+ * Iulian Dragos: Compiling Scala for Performance (chapter 4)
+ *
+ * There are some things worth noting, (possibly) not mentioned there:
+ * 0) Make sure you understand the meaning of various `SpecializedInfo` descriptors
+ * defined below.
+ *
+ * 1) Specializing traits by introducing bridges in specialized methods
+ * of the specialized trait may introduce problems during mixin composition.
+ * Concretely, it may cause cyclic calls and result in a stack overflow.
+ * See ticket #4351.
+ * This was solved by introducing an `Abstract` specialized info descriptor.
+ * Instead of generating a bridge in the trait, an abstract method is generated.
+ *
+ * 2) Specialized private members sometimes have to be switched to protected.
+ * In some cases, even this is not enough. Example:
+ *
+ * {{{
+ * class A[@specialized T](protected val d: T) {
+ * def foo(that: A[T]) = that.d
+ * }
+ * }}}
+ *
+ * Specialization will generate a specialized class and a specialized method:
+ *
+ * {{{
+ * class A$mcI$sp(protected val d: Int) extends A[Int] {
+ * def foo(that: A[Int]) = foo$mcI$sp(that)
+ * def foo(that: A[Int]) = that.d
+ * }
+ * }}}
+ *
+ * Above, `A$mcI$sp` cannot access `d`, so the method cannot be typechecked.
*/
abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
import global._
@@ -17,49 +54,79 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
/** the name of the phase: */
val phaseName: String = "specialize"
+ /** The following flags may be set by this phase: */
+ override def phaseNewFlags: Long = notPRIVATE | lateFINAL
+
/** This phase changes base classes. */
override def changesBaseClasses = true
override def keepsTypeParams = true
type TypeEnv = immutable.Map[Symbol, Type]
def emptyEnv: TypeEnv = Map[Symbol, Type]()
+
private implicit val typeOrdering: Ordering[Type] = Ordering[String] on ("" + _.typeSymbol.name)
import definitions.{
- RootClass, BooleanClass, UnitClass, ArrayClass,
- ScalaValueClasses, isValueClass, isScalaValueType,
- SpecializedClass, RepeatedParamClass, JavaRepeatedParamClass,
- AnyRefClass, ObjectClass, Predef_AnyRef,
- uncheckedVarianceClass
+ BooleanClass, UnitClass, ArrayClass,
+ ScalaValueClasses, isPrimitiveValueClass, isPrimitiveValueType,
+ SpecializedClass, UnspecializedClass, AnyRefClass, ObjectClass,
+ GroupOfSpecializable, uncheckedVarianceClass, ScalaInlineClass
}
+ import rootMirror.RootClass
+
+ /** TODO - this is a lot of maps.
+ */
+
+ /** For a given class and concrete type arguments, give its specialized class */
+ val specializedClass = perRunCaches.newMap[(Symbol, TypeEnv), Symbol]
+
+ /** Map a method symbol to a list of its specialized overloads in the same class. */
+ private val overloads = perRunCaches.newMap[Symbol, List[Overload]]() withDefaultValue Nil
- private def isSpecialized(sym: Symbol) = sym hasAnnotation SpecializedClass
- private def hasSpecializedFlag(sym: Symbol) = sym hasFlag SPECIALIZED
- private def specializedTypes(tps: List[Symbol]) = tps filter isSpecialized
- private def specializedOn(sym: Symbol) = sym getAnnotation SpecializedClass match {
- case Some(AnnotationInfo(_, args, _)) => args
- case _ => Nil
+ /** Map a symbol to additional information on specialization. */
+ private val info = perRunCaches.newMap[Symbol, SpecializedInfo]()
+
+ /** Map class symbols to the type environments where they were created. */
+ private val typeEnv = perRunCaches.newMap[Symbol, TypeEnv]() withDefaultValue emptyEnv
+
+ // Key: a specialized class or method
+ // Value: a map from tparams in the original class to tparams in the specialized class.
+ private val anyrefSpecCache = perRunCaches.newMap[Symbol, mutable.Map[Symbol, Symbol]]()
+
+ // holds mappings from members to the type variables in the class
+ // that they were already specialized for, so that they don't get
+ // specialized twice (this is for AnyRef specializations)
+ private val wasSpecializedForTypeVars = perRunCaches.newMap[Symbol, Set[Symbol]]() withDefaultValue Set()
+
+ /** Concrete methods that use a specialized type, or override such methods. */
+ private val concreteSpecMethods = perRunCaches.newWeakSet[Symbol]()
+
+ private def specializedTypes(tps: List[Symbol]) = tps filter (_.isSpecialized)
+ private def specializedOn(sym: Symbol): List[Symbol] = {
+ sym getAnnotation SpecializedClass match {
+ case Some(AnnotationInfo(_, Nil, _)) => specializableTypes.map(_.typeSymbol)
+ case Some(ann @ AnnotationInfo(_, args, _)) => {
+ args map (_.tpe) flatMap { tp =>
+ tp baseType GroupOfSpecializable match {
+ case TypeRef(_, GroupOfSpecializable, arg :: Nil) =>
+ arg.typeArgs map (_.typeSymbol)
+ case _ =>
+ tp.typeSymbol :: Nil
+ }
+ }
+ }
+ case _ => Nil
+ }
}
// If we replace `isBoundedGeneric` with (tp <:< AnyRefClass.tpe),
// then pos/spec-List.scala fails - why? Does this kind of check fail
// for similar reasons? Does `sym.isAbstractType` make a difference?
- private def isSpecializedAnyRefSubtype(tp: Type, sym: Symbol) = (
- specializedOn(sym).exists(_.symbol == Predef_AnyRef) // specialized on AnyRef
- && !isValueClass(tp.typeSymbol)
- && isBoundedGeneric(tp)
- )
- private def isBoundedGeneric(tp: Type) = tp match {
- case TypeRef(_, sym, _) if sym.isAbstractType => (tp <:< AnyRefClass.tpe)
- case TypeRef(_, sym, _) => !isValueClass(sym)
- case _ => false
- }
-
- @inline private def debuglog(msg: => String) {
- if (settings.debug.value) log(msg)
- }
- @inline private def ifDebug(body: => Unit) {
- if (settings.debug.value) { body }
+ private def isSpecializedAnyRefSubtype(tp: Type, sym: Symbol) = {
+ specializedOn(sym).exists(s => !isPrimitiveValueClass(s)) &&
+ !isPrimitiveValueClass(tp.typeSymbol) &&
+ isBoundedGeneric(tp)
+ //(tp <:< AnyRefClass.tpe)
}
object TypeEnv {
@@ -69,10 +136,10 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
def fromSpecialization(sym: Symbol, args: List[Type]): TypeEnv = {
ifDebug(assert(sym.info.typeParams.length == args.length, sym + " args: " + args))
- emptyEnv ++ (sym.info.typeParams zip args filter (kv => isSpecialized(kv._1)))
+ emptyEnv ++ collectMap2(sym.info.typeParams, args)((k, v) => k.isSpecialized)
}
- /** Is typeenv `t1` included in `t2`? All type variables in `t1`
+ /** Does typeenv `t1` include `t2`? All type variables in `t1`
* are defined in `t2` and:
* - are bound to the same type, or
* - are an AnyRef specialization and `t2` is bound to a subtype of AnyRef
@@ -80,7 +147,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
def includes(t1: TypeEnv, t2: TypeEnv) = t1 forall {
case (sym, tpe) =>
t2 get sym exists { t2tp =>
- (tpe == t2tp) || !(isScalaValueType(tpe) || isScalaValueType(t2tp)) // u.t.b. (t2tp <:< AnyRefClass.tpe)
+ (tpe == t2tp) || !(isPrimitiveValueType(tpe) || isPrimitiveValueType(t2tp)) // u.t.b. (t2tp <:< AnyRefClass.tpe)
}
}
@@ -90,11 +157,11 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
/** Is the given environment a valid specialization for sym?
* It is valid if each binding is from a @specialized type parameter in sym (or its owner)
- * to a type for which `sym' is specialized.
+ * to a type for which `sym` is specialized.
*/
def isValid(env: TypeEnv, sym: Symbol): Boolean = {
env forall { case (tvar, tpe) =>
- isSpecialized(tvar) && (concreteTypes(tvar) contains tpe) && {
+ tvar.isSpecialized && (concreteTypes(tvar) contains tpe) && {
(sym.typeParams contains tvar) ||
(sym.owner != RootClass && (sym.owner.typeParams contains tvar))
}
@@ -102,24 +169,23 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
}
}
- /** For a given class and concrete type arguments, give its specialized class */
- val specializedClass: mutable.Map[(Symbol, TypeEnv), Symbol] = new mutable.LinkedHashMap
-
- /** Returns the generic class that was specialized to 'cls', or
- * 'cls' itself if cls is not a specialized subclass.
+ /** Returns the generic class that was specialized to 'sClass', or
+ * 'sClass' itself if sClass is not a specialized subclass.
*/
- def genericClass(cls: Symbol): Symbol =
- if (hasSpecializedFlag(cls)) cls.info.parents.head.typeSymbol
- else cls
-
- /** Map a method symbol to a list of its specialized overloads in the same class. */
- private val overloads: mutable.Map[Symbol, List[Overload]] =
- new mutable.HashMap[Symbol, List[Overload]] {
- override def default(key: Symbol): List[Overload] = Nil
- }
+ def genericClass(sClass: Symbol): Symbol =
+ if (sClass.isSpecialized) sClass.superClass
+ else sClass
case class Overload(sym: Symbol, env: TypeEnv) {
override def toString = "specialized overload " + sym + " in " + env
+ def matchesSym(other: Symbol) = sym.tpe =:= other.tpe
+ def matchesEnv(env1: TypeEnv) = TypeEnv.includes(env, env1)
+ }
+ private def newOverload(method: Symbol, specializedMethod: Symbol, env: TypeEnv) = {
+ assert(!specializedMethod.isOverloaded, specializedMethod.defString)
+ val om = Overload(specializedMethod, env)
+ overloads(method) ::= om
+ om
}
/** Just to mark uncheckable */
@@ -155,7 +221,12 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
def target = t
}
- /** Symbol is a specialized accessor for the `target' field. */
+ /** Symbol is a specialized abstract method, either specialized or original. The original `t` is abstract. */
+ case class Abstract(t: Symbol) extends SpecializedInfo {
+ def target = t
+ }
+
+ /** Symbol is a specialized accessor for the `target` field. */
case class SpecializedAccessor(target: Symbol) extends SpecializedInfo {
override def isAccessor = true
}
@@ -163,7 +234,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
/** Symbol is a specialized method whose body should be the target's method body. */
case class Implementation(target: Symbol) extends SpecializedInfo
- /** Symbol is a specialized override paired with `target'. */
+ /** Symbol is a specialized override paired with `target`. */
case class SpecialOverride(target: Symbol) extends SpecializedInfo
/** A specialized inner class that specializes original inner class `target` on a type parameter of the enclosing class, in the typeenv `env`. */
@@ -175,7 +246,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
/** Type bounds of a @specialized type var are now in the environment. */
override def typeBoundsIn(env: TypeEnv): Boolean = {
target.info.typeParams exists { tvar =>
- isSpecialized(tvar) && (specializedTypeVars(tvar.info.bounds) exists env.isDefinedAt)
+ tvar.isSpecialized && (specializedTypeVars(tvar.info.bounds) exists env.isDefinedAt)
}
}
@@ -183,32 +254,29 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
val stvTypeParams = specializedTypeVars(target.info.typeParams map (_.info))
val stvResult = specializedTypeVars(target.info.resultType)
- log("degenerate: " + target + " stv tparams: " + stvTypeParams + " stv info: " + stvResult)
+ debuglog("degenerate: " + target + " stv tparams: " + stvTypeParams + " stv info: " + stvResult)
(stvTypeParams -- stvResult).nonEmpty
}
}
- /** Map a symbol to additional information on specialization. */
- private val info: mutable.Map[Symbol, SpecializedInfo] = perRunCaches.newMap[Symbol, SpecializedInfo]()
-
- /** Has `clazz' any type parameters that need be specialized? */
+ /** Has `clazz` any type parameters that need be specialized? */
def hasSpecializedParams(clazz: Symbol) =
- clazz.info.typeParams exists isSpecialized
+ clazz.info.typeParams exists (_.isSpecialized)
/** Return specialized type parameters. */
def specializedParams(sym: Symbol): List[Symbol] =
- sym.info.typeParams filter isSpecialized
+ sym.info.typeParams filter (_.isSpecialized)
def splitParams(tps: List[Symbol]) =
- tps partition isSpecialized
+ tps partition (_.isSpecialized)
/** Given an original class symbol and a list of types its type parameters are instantiated at
* returns a list of type parameters that should remain in the TypeRef when instantiating a
* specialized type.
*/
def survivingArgs(sym: Symbol, args: List[Type]): List[Type] =
- for ((tvar, tpe) <- sym.info.typeParams.zip(args) if !isSpecialized(tvar) || !isScalaValueType(tpe))
+ for ((tvar, tpe) <- sym.info.typeParams.zip(args) if !tvar.isSpecialized || !isPrimitiveValueType(tpe))
yield tpe
val specializedType = new TypeMap {
@@ -217,10 +285,10 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
val pre1 = this(pre)
// when searching for a specialized class, take care to map all
// type parameters that are subtypes of AnyRef to AnyRef
- val args1 = (args zip sym.typeParams) map {
- case (tp, orig) if isSpecializedAnyRefSubtype(tp, orig) => AnyRefClass.tpe
- case (tp, _) => tp
- }
+ val args1 = map2(args, sym.info.typeParams)((tp, orig) =>
+ if (isSpecializedAnyRefSubtype(tp, orig)) AnyRefClass.tpe
+ else tp
+ )
specializedClass.get((sym, TypeEnv.fromSpecialization(sym, args1))) match {
case Some(sym1) => typeRef(pre1, sym1, survivingArgs(sym, args))
case None => typeRef(pre1, sym, args)
@@ -229,10 +297,6 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
}
}
- /** Return the specialized overload of sym in the given env, if any. */
- def overload(sym: Symbol, env: TypeEnv) =
- overloads(sym).find(ov => TypeEnv.includes(ov.env, env))
-
/** Return the specialized name of 'sym' in the given environment. It
* guarantees the same result regardless of the map order by sorting
* type variables alphabetically.
@@ -243,7 +307,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
else specializedTypeVars(sym).intersect(env.keySet)
)
val (methparams, others) = tvars.toList sortBy ("" + _.name) partition (_.owner.isMethod)
- debuglog("specName(" + sym + ") env: " + env + " tvars: " + tvars)
+ // debuglog("specName(" + sym + ") env: " + env + " tvars: " + tvars)
specializedName(sym.name, methparams map env, others map env)
}
@@ -260,65 +324,75 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
nme.getterToLocal(specializedName(nme.localToGetter(name), types1, types2))
else {
val (base, cs, ms) = nme.splitSpecializedName(name)
- val abbrevs = definitions.abbrvTag withDefaultValue definitions.abbrvTag(ObjectClass)
newTermName(base.toString + "$"
- + "m" + ms + types1.map(t => abbrevs(t.typeSymbol)).mkString("", "", "")
- + "c" + cs + types2.map(t => abbrevs(t.typeSymbol)).mkString("", "", "$sp"))
+ + "m" + ms + types1.map(t => definitions.abbrvTag(t.typeSymbol)).mkString("", "", "")
+ + "c" + cs + types2.map(t => definitions.abbrvTag(t.typeSymbol)).mkString("", "", "$sp"))
}
}
- lazy val primitiveTypes = ScalaValueClasses map (_.tpe)
+ lazy val specializableTypes = ScalaValueClasses map (_.tpe) sorted
- /** Return the types `sym' should be specialized at. This may be some of the primitive types
+ /** If the symbol is the companion of a value class, the value class.
+ * Otherwise, AnyRef.
+ */
+ def specializesClass(sym: Symbol): Symbol = {
+ val c = sym.companionClass
+ if (isPrimitiveValueClass(c)) c else AnyRefClass
+ }
+
+ /** Return the types `sym` should be specialized at. This may be some of the primitive types
* or AnyRef. AnyRef means that a new type parameter T will be generated later, known to be a
* subtype of AnyRef (T <: AnyRef).
* These are in a meaningful order for stability purposes.
*/
- def concreteTypes(sym: Symbol): List[Type] = (
- if (!isSpecialized(sym)) Nil // no @specialized Annotation
- else specializedOn(sym) match {
- case Nil => primitiveTypes // specialized on everything
- case args => // specialized on args
- (args map { tp =>
- if (tp.symbol == Predef_AnyRef) {
- if (isBoundedGeneric(sym.tpe))
- reporter.warning(sym.pos, sym + " is always a subtype of " + AnyRefClass.tpe + ".")
- AnyRefClass.tpe
- }
- else tp.symbol.companionClass.tpe
- }).sorted
- }
- )
+ def concreteTypes(sym: Symbol): List[Type] = {
+ val types = if (!sym.isSpecialized)
+ Nil // no @specialized Annotation
+ else
+ specializedOn(sym) map (s => specializesClass(s).tpe) sorted
+
+ if (isBoundedGeneric(sym.tpe) && (types contains AnyRefClass))
+ reporter.warning(sym.pos, sym + " is always a subtype of " + AnyRefClass.tpe + ".")
+
+ types
+ }
/** Return a list of all type environments for all specializations
- * of @specialized types in `tps'.
+ * of @specialized types in `tps`.
*/
private def specializations(tps: List[Symbol]): List[TypeEnv] = {
// the keys in each TypeEnv
- val keys: List[Symbol] = tps filter isSpecialized
+ val keys: List[Symbol] = tps filter (_.isSpecialized)
// creating each permutation of concrete types
def loop(ctypes: List[List[Type]]): List[List[Type]] = ctypes match {
case Nil => Nil
- case set :: Nil => set map (x => List(x))
+ case set :: Nil => set map (_ :: Nil)
case set :: sets => for (x <- set ; xs <- loop(sets)) yield x :: xs
}
- // zip the keys with each permutation to create a TypeEnv
- loop(keys map concreteTypes) map (keys zip _ toMap)
+ // zip the keys with each permutation to create a TypeEnv.
+ // If we don't exclude the "all AnyRef" specialization, we will
+ // incur duplicate members and crash during mixin.
+ loop(keys map concreteTypes) filterNot (_ forall (_ <:< AnyRefClass.tpe)) map (xss => Map(keys zip xss: _*))
}
- /** Does the given tpe need to be specialized in the environment 'env'?
+ /** Does the given 'sym' need to be specialized in the environment 'env'?
* Specialization is needed for
* - members with specialized type parameters found in the given environment
* - constructors of specialized classes
* - normalized members whose type bounds appear in the environment
+ * But suppressed for:
+ * - any member with the @unspecialized annotation, or which has an
+ * enclosing member with the annotation.
*/
- private def needsSpecialization(env: TypeEnv, sym: Symbol): Boolean = {
- specializedTypeVars(sym).intersect(env.keySet).diff(wasSpecializedForTypeVars(sym)).nonEmpty ||
- (sym.isClassConstructor && (sym.enclClass.typeParams exists isSpecialized)) ||
- (isNormalizedMember(sym) && info(sym).typeBoundsIn(env))
- }
+ private def needsSpecialization(env: TypeEnv, sym: Symbol): Boolean = (
+ !sym.ownerChain.exists(_ hasAnnotation UnspecializedClass) && (
+ specializedTypeVars(sym).intersect(env.keySet).diff(wasSpecializedForTypeVars(sym)).nonEmpty
+ || sym.isClassConstructor && (sym.enclClass.typeParams exists (_.isSpecialized))
+ || isNormalizedMember(sym) && info(sym).typeBoundsIn(env)
+ )
+ )
- def isNormalizedMember(m: Symbol) = hasSpecializedFlag(m) && (info get m exists {
+ def isNormalizedMember(m: Symbol) = m.isSpecialized && (info get m exists {
case NormalizedMember(_) => true
case _ => false
})
@@ -327,8 +401,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
tpes foreach (tp => buf ++= specializedTypeVars(tp))
buf.result
}
- def specializedTypeVars(sym: Symbol): immutable.Set[Symbol] =
- atPhase(currentRun.typerPhase)(specializedTypeVars(sym.info))
+ def specializedTypeVars(sym: Symbol): immutable.Set[Symbol] = beforeTyper(specializedTypeVars(sym.info))
/** Return the set of @specialized type variables mentioned by the given type.
* It only counts type variables that appear:
@@ -340,12 +413,14 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
case TypeRef(pre, sym, args) =>
if (sym.isAliasType)
specializedTypeVars(tpe.normalize)
- else if (sym.isTypeParameter && isSpecialized(sym) || (sym.isTypeSkolem && isSpecialized(sym.deSkolemize)))
+ else if (sym.isTypeParameter && sym.isSpecialized || (sym.isTypeSkolem && sym.deSkolemize.isSpecialized))
Set(sym)
else if (sym == ArrayClass)
specializedTypeVars(args)
+ else if (args.isEmpty)
+ Set()
else
- specializedTypeVars(sym.typeParams zip args collect { case (tp, arg) if isSpecialized(tp) => arg })
+ specializedTypeVars(sym.typeParams zip args collect { case (tp, arg) if tp.isSpecialized => arg })
case PolyType(tparams, resTpe) => specializedTypeVars(resTpe :: tparams.map(_.info))
// since this method may be run at phase typer (before uncurry, where NMTs are eliminated)
@@ -353,45 +428,38 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
case MethodType(argSyms, resTpe) => specializedTypeVars(resTpe :: argSyms.map(_.tpe))
case ExistentialType(_, res) => specializedTypeVars(res)
case AnnotatedType(_, tp, _) => specializedTypeVars(tp)
- case TypeBounds(lo, hi) => specializedTypeVars(List(lo, hi))
+ case TypeBounds(lo, hi) => specializedTypeVars(lo :: hi :: Nil)
+ case RefinedType(parents, _) => parents flatMap specializedTypeVars toSet
case _ => Set()
}
- // holds mappings from regular type parameter symbols to symbols of
- // specialized type parameters which are subtypes of AnyRef
- private val anyrefSpecCache = perRunCaches.newMap[Symbol, Symbol]()
-
- /** Returns the type parameter in the specialized class `cls` that corresponds to type parameter
- * `sym` in the original class. It will create it if needed or use the one from the cache.
+ /** Returns the type parameter in the specialized class `sClass` that corresponds to type parameter
+ * `tparam` in the original class. It will create it if needed or use the one from the cache.
*/
- private def typeParamSubAnyRef(sym: Symbol, cls: Symbol) = (
- anyrefSpecCache.getOrElseUpdate(sym,
- cls.newTypeParameter(sym.pos, newTypeName(sym.name + "$sp"))
- setInfo TypeBounds(sym.info.bounds.lo, AnyRefClass.tpe)
+ private def typeParamSubAnyRef(tparam: Symbol, sClass: Symbol): Type = {
+ val sClassMap = anyrefSpecCache.getOrElseUpdate(sClass, mutable.Map[Symbol, Symbol]())
+
+ sClassMap.getOrElseUpdate(tparam,
+ tparam.cloneSymbol(sClass, tparam.flags, tparam.name append tpnme.SPECIALIZED_SUFFIX)
+ modifyInfo (info => TypeBounds(info.bounds.lo, AnyRefClass.tpe))
).tpe
- )
+ }
/** Cleans the anyrefSpecCache of all type parameter symbols of a class.
*/
- private def cleanAnyRefSpecCache(cls: Symbol, decls: List[Symbol]) = (
+ private def cleanAnyRefSpecCache(clazz: Symbol, decls: List[Symbol]) {
// remove class type parameters and those of normalized members.
- cls :: decls foreach {
- _.tpe match {
- case PolyType(tparams, _) => anyrefSpecCache --= tparams
- case _ => ()
- }
- }
- )
-
- // holds mappings from members to the type variables in the class
- // that they were already specialized for, so that they don't get
- // specialized twice (this is for AnyRef specializations)
- private val wasSpecializedForTypeVars =
- perRunCaches.newMap[Symbol, immutable.Set[Symbol]]() withDefaultValue immutable.Set[Symbol]()
+ clazz :: decls foreach (anyrefSpecCache remove _)
+ }
/** Type parameters that survive when specializing in the specified environment. */
def survivingParams(params: List[Symbol], env: TypeEnv) =
- params.filter(p => !isSpecialized(p) || !isScalaValueType(env(p)))
+ params filter {
+ p =>
+ !p.isSpecialized ||
+ !env.contains(p) ||
+ !isPrimitiveValueType(env(p))
+ }
/** Produces the symbols from type parameters `syms` of the original owner,
* in the given type environment `env`. The new owner is `nowner`.
@@ -404,12 +472,12 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
def produceTypeParameters(syms: List[Symbol], nowner: Symbol, env: TypeEnv) = {
val cloned = for (s <- syms) yield if (!env.contains(s)) s.cloneSymbol(nowner) else env(s).typeSymbol
// log("producing type params: " + cloned.map(t => (t, t.tpe.bounds.hi)))
- for ((orig, cln) <- syms zip cloned) {
+ foreach2(syms, cloned) { (orig, cln) =>
cln.removeAnnotation(SpecializedClass)
- if (env.contains(orig)) cln.setInfo(TypeBounds(cln.info.bounds.lo, AnyRefClass.tpe))
+ if (env.contains(orig))
+ cln modifyInfo (info => TypeBounds(info.bounds.lo, AnyRefClass.tpe))
}
- for (sym <- cloned) sym.setInfo(sym.info.substSym(syms, cloned))
- cloned
+ cloned map (_ substInfo (syms, cloned))
}
/** Maps AnyRef bindings from a raw environment (holding AnyRefs) into type parameters from
@@ -438,38 +506,45 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
*/
def specializeClass(clazz: Symbol, outerEnv: TypeEnv): List[Symbol] = {
def specializedClass(env0: TypeEnv, normMembers: List[Symbol]): Symbol = {
- val cls = clazz.owner.newClass(clazz.pos, specializedName(clazz, env0).toTypeName)
- .setFlag(SPECIALIZED | clazz.flags)
- .resetFlag(CASE)
- cls.sourceFile = clazz.sourceFile
- currentRun.symSource(cls) = clazz.sourceFile // needed later on by mixin
+ /** It gets hard to follow all the clazz and cls, and specializedClass
+ * was both already used for a map and mucho long. So "sClass" is the
+ * specialized subclass of "clazz" throughout this file.
+ */
- val env = mapAnyRefsInSpecSym(env0, clazz, cls)
+ // SI-5545: Eliminate classes with the same name loaded from the bytecode already present - all we need to do is
+ // to force .info on them, as their lazy type will be evaluated and the symbols will be eliminated. Unfortunately
+ // evaluating the info after creating the specialized class will mess the specialized class signature, so we'd
+ // better evaluate it before creating the new class symbol
+ val clazzName = specializedName(clazz, env0).toTypeName
+ val bytecodeClazz = clazz.owner.info.decl(clazzName)
+ // debuglog("Specializing " + clazz + ", but found " + bytecodeClazz + " already there")
+ bytecodeClazz.info
- typeEnv(cls) = env
- this.specializedClass((clazz, env0)) = cls
+ val sClass = clazz.owner.newClass(clazzName, clazz.pos, (clazz.flags | SPECIALIZED) & ~CASE)
- // declarations of the newly specialized class 'cls'
- val decls1 = new Scope
+ def cloneInSpecializedClass(member: Symbol, flagFn: Long => Long, newName: Name = null) =
+ member.cloneSymbol(sClass, flagFn(member.flags | SPECIALIZED), newName)
- // original unspecialized type parameters
- var oldClassTParams: List[Symbol] = Nil
+ sClass.sourceFile = clazz.sourceFile
+ currentRun.symSource(sClass) = clazz.sourceFile // needed later on by mixin
- // unspecialized type parameters of 'cls' (cloned)
- var newClassTParams: List[Symbol] = Nil
+ val env = mapAnyRefsInSpecSym(env0, clazz, sClass)
+ typeEnv(sClass) = env
+ this.specializedClass((clazz, env0)) = sClass
+
+ val decls1 = newScope // declarations of the newly specialized class 'sClass'
+ var oldClassTParams: List[Symbol] = Nil // original unspecialized type parameters
+ var newClassTParams: List[Symbol] = Nil // unspecialized type parameters of 'specializedClass' (cloned)
// has to be a val in order to be computed early. It is later called
// within 'atPhase(next)', which would lead to an infinite cycle otherwise
val specializedInfoType: Type = {
- // val (_, unspecParams) = splitParams(clazz.info.typeParams)
- // oldClassTParams = unspecParams
- val survivedParams = survivingParams(clazz.info.typeParams, env)
- oldClassTParams = survivedParams
- newClassTParams = produceTypeParameters(survivedParams, cls, env) map subst(env)
+ oldClassTParams = survivingParams(clazz.info.typeParams, env)
+ newClassTParams = produceTypeParameters(oldClassTParams, sClass, env) map subst(env)
// log("new tparams " + newClassTParams.zip(newClassTParams map {s => (s.tpe, s.tpe.bounds.hi)}) + ", in env: " + env)
def applyContext(tpe: Type) =
- subst(env, tpe).subst(survivedParams, newClassTParams map (_.tpe))
+ subst(env, tpe).instantiateTypeParams(oldClassTParams, newClassTParams map (_.tpe))
/** Return a list of specialized parents to be re-mixed in a specialized subclass.
* Assuming env = [T -> Int] and
@@ -479,54 +554,63 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
* so that class Integral$mci extends Integral[Int] with Numeric$mcI.
*/
def specializedParents(parents: List[Type]): List[Type] = {
- val res = new mutable.ListBuffer[Type]
- // log(cls + ": seeking specialized parents of class with parents: " + parents.map(_.typeSymbol))
+ var res: List[Type] = Nil
+ // log(specializedClass + ": seeking specialized parents of class with parents: " + parents.map(_.typeSymbol))
for (p <- parents) {
- // log(p.typeSymbol)
- val stp = atPhase(phase.next)(specializedType(p))
+ val stp = afterSpecialize(specializedType(p))
if (stp != p)
- if (p.typeSymbol.isTrait) res += stp
+ if (p.typeSymbol.isTrait) res ::= stp
else if (currentRun.compiles(clazz))
reporter.warning(clazz.pos, p.typeSymbol + " must be a trait. Specialized version of "
+ clazz + " will inherit generic " + p) // TODO change to error
}
- res.reverse.toList
+ res
}
- var parents = List(applyContext(atPhase(currentRun.typerPhase)(clazz.tpe)))
+ var parents = List(applyContext(beforeTyper(clazz.tpe)))
// log("!!! Parents: " + parents + ", sym: " + parents.map(_.typeSymbol))
if (parents.head.typeSymbol.isTrait)
parents = parents.head.parents.head :: parents
- val extraSpecializedMixins = specializedParents(clazz.info.parents.map(applyContext))
- log("extraSpecializedMixins: " + extraSpecializedMixins)
- val infoType = ClassInfoType(parents ::: extraSpecializedMixins, decls1, cls)
- if (newClassTParams.isEmpty) infoType else PolyType(newClassTParams, infoType)
+ val extraSpecializedMixins = specializedParents(clazz.info.parents map applyContext)
+ if (extraSpecializedMixins.nonEmpty)
+ debuglog("extra specialized mixins for %s: %s".format(clazz.name.decode, extraSpecializedMixins.mkString(", ")))
+ // If the class being specialized has a self-type, the self type may
+ // require specialization. First exclude classes whose self types have
+ // the same type constructor as the class itself, since they will
+ // already be covered. Then apply the current context to the self-type
+ // as with the parents and assign it to typeOfThis.
+ if (clazz.typeOfThis.typeConstructor ne clazz.typeConstructor) {
+ sClass.typeOfThis = applyContext(clazz.typeOfThis)
+ debuglog("Rewriting self-type for specialized class:\n" +
+ " " + clazz.defStringSeenAs(clazz.typeOfThis) + "\n" +
+ " => " + sClass.defStringSeenAs(sClass.typeOfThis)
+ )
+ }
+ GenPolyType(newClassTParams, ClassInfoType(parents ::: extraSpecializedMixins, decls1, sClass))
}
- atPhase(phase.next)(cls.setInfo(specializedInfoType))
+ afterSpecialize(sClass setInfo specializedInfoType)
val fullEnv = outerEnv ++ env
/** Enter 'sym' in the scope of the current specialized class. It's type is
* mapped through the active environment, binding type variables to concrete
- * types. The existing typeEnv for `sym' is composed with the current active
+ * types. The existing typeEnv for `sym` is composed with the current active
* environment
*/
def enterMember(sym: Symbol): Symbol = {
typeEnv(sym) = fullEnv ++ typeEnv(sym) // append the full environment
- sym.setInfo(sym.info.substThis(clazz, ThisType(cls)).subst(oldClassTParams, newClassTParams map (_.tpe)))
-
+ sym modifyInfo (_.substThis(clazz, sClass).instantiateTypeParams(oldClassTParams, newClassTParams map (_.tpe)))
// we remove any default parameters. At this point, they have been all
// resolved by the type checker. Later on, erasure re-typechecks everything and
// chokes if it finds default parameters for specialized members, even though
// they are never needed.
- sym.info.paramss.flatten foreach (_.resetFlag(DEFAULTPARAM))
-
- decls1.enter(subst(fullEnv)(sym))
+ mapParamss(sym)(_ resetFlag DEFAULTPARAM)
+ decls1 enter subst(fullEnv)(sym)
}
- /** Create and enter in scope an overridden symbol m1 for `m' that forwards
- * to `om'. `om' is a fresh, special overload of m1 that is an implementation
- * of `m'. For example, for a
+ /** Create and enter in scope an overridden symbol m1 for `m` that forwards
+ * to `om`. `om` is a fresh, special overload of m1 that is an implementation
+ * of `m`. For example, for a
*
* class Foo[@specialized A] {
* def m(x: A) = <body> // m
@@ -538,13 +622,8 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
* def m$I(x: Int) = <body>/adapted to env {A -> Int} // om
*/
def forwardToOverload(m: Symbol): Symbol = {
- val specMember = (
- enterMember(m cloneSymbol cls)
- setFlag (OVERRIDE | SPECIALIZED)
- resetFlag (DEFERRED | CASEACCESSOR)
- ) // m1
-
- val om = specializedOverload(cls, m, env).setFlag(OVERRIDE)
+ val specMember = enterMember(cloneInSpecializedClass(m, f => (f | OVERRIDE) & ~(DEFERRED | CASEACCESSOR)))
+ val om = specializedOverload(sClass, m, env).setFlag(OVERRIDE)
val original = info.get(m) match {
case Some(NormalizedMember(tg)) => tg
case _ => m
@@ -553,7 +632,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
info(om) = if (original.isDeferred) Forward(original) else Implementation(original)
typeEnv(om) = env ++ typeEnv(m) // add the environment for any method tparams
- overloads(specMember) ::= Overload(om, typeEnv(om))
+ newOverload(specMember, om, typeEnv(om))
enterMember(om)
}
@@ -565,79 +644,76 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
m.resetFlag(PRIVATE).setFlag(PROTECTED)
if (m.isConstructor) {
- val specCtor = enterMember(m.cloneSymbol(cls) setFlag SPECIALIZED)
+ val specCtor = enterMember(cloneInSpecializedClass(m, x => x))
info(specCtor) = Forward(m)
}
else if (isNormalizedMember(m)) { // methods added by normalization
val NormalizedMember(original) = info(m)
if (nonConflicting(env ++ typeEnv(m))) {
if (info(m).degenerate) {
- debuglog("degenerate normalized member " + m + " info(m): " + info(m))
- val specMember = enterMember(m.cloneSymbol(cls)).setFlag(SPECIALIZED).resetFlag(DEFERRED)
+ debuglog("degenerate normalized member " + m.defString)
+ val specMember = enterMember(cloneInSpecializedClass(m, _ & ~DEFERRED))
info(specMember) = Implementation(original)
typeEnv(specMember) = env ++ typeEnv(m)
- }
- else debuglog({
+ } else {
val om = forwardToOverload(m)
- "normalizedMember " + m + " om: " + om + " typeEnv(om): " + typeEnv(om)
- })
+ debuglog("normalizedMember " + m + " om: " + om + " " + pp(typeEnv(om)))
+ }
}
else
- log("conflicting env for " + m + " env: " + env)
+ debuglog("conflicting env for " + m + " env: " + env)
}
else if (m.isDeferred) { // abstract methods
- val specMember = enterMember(m.cloneSymbol(cls)).setFlag(SPECIALIZED).resetFlag(DEFERRED)
- debuglog("deferred " + specMember.fullName + " is forwarded")
-
- info(specMember) = new Forward(specMember) {
- override def target = m.owner.info.member(specializedName(m, env))
- }
+ val specMember = enterMember(cloneInSpecializedClass(m, _ | DEFERRED))
+ // debuglog("deferred " + specMember.fullName + " remains abstract")
+ info(specMember) = new Abstract(specMember)
+ // was: new Forward(specMember) {
+ // override def target = m.owner.info.member(specializedName(m, env))
+ // }
} else if (m.isMethod && !m.hasAccessorFlag) { // other concrete methods
// log("other concrete " + m)
forwardToOverload(m)
- } else if (m.isValue && !m.isMethod) { // concrete value definition
+ } else if (m.isMethod && m.hasFlag(LAZY)) {
+ forwardToOverload(m)
+
+ } else if (m.isValue && !m.isMethod && !m.hasFlag(LAZY)) { // concrete value definition
def mkAccessor(field: Symbol, name: Name) = {
- val sym = (
- cls.newMethod(field.pos, name)
- setFlag (SPECIALIZED | m.getter(clazz).flags)
- resetFlag (LOCAL | PARAMACCESSOR | CASEACCESSOR | LAZY)
- // we rely on the super class to initialize param accessors
- )
+ val newFlags = (SPECIALIZED | m.getter(clazz).flags) & ~(LOCAL | CASEACCESSOR | PARAMACCESSOR)
+ // we rely on the super class to initialize param accessors
+ val sym = sClass.newMethod(name, field.pos, newFlags)
info(sym) = SpecializedAccessor(field)
sym
}
def overrideIn(clazz: Symbol, sym: Symbol) = {
- val sym1 = (
- sym cloneSymbol clazz
- setFlag (OVERRIDE | SPECIALIZED)
- resetFlag (DEFERRED | CASEACCESSOR | PARAMACCESSOR | LAZY)
- )
- sym1 setInfo sym1.info.asSeenFrom(clazz.tpe, sym1.owner)
+ val newFlags = (sym.flags | OVERRIDE | SPECIALIZED) & ~(DEFERRED | CASEACCESSOR | PARAMACCESSOR)
+ val sym1 = sym.cloneSymbol(clazz, newFlags)
+ sym1 modifyInfo (_ asSeenFrom (clazz.tpe, sym1.owner))
}
- val specVal = specializedOverload(cls, m, env)
+ val specVal = specializedOverload(sClass, m, env)
addConcreteSpecMethod(m)
specVal.asInstanceOf[TermSymbol].setAlias(m)
enterMember(specVal)
// create accessors
- debuglog("m: " + m + " isLocal: " + nme.isLocalName(m.name) + " specVal: " + specVal.name + " isLocal: " + nme.isLocalName(specVal.name))
+ // debuglog("m: " + m + " isLocal: " + nme.isLocalName(m.name) + " specVal: " + specVal.name + " isLocal: " + nme.isLocalName(specVal.name))
+
if (nme.isLocalName(m.name)) {
- val specGetter = mkAccessor(specVal, nme.localToGetter(specVal.name)).setInfo(MethodType(List(), specVal.info))
- val origGetter = overrideIn(cls, m.getter(clazz))
+ val specGetter = mkAccessor(specVal, nme.localToGetter(specVal.name)) setInfo MethodType(Nil, specVal.info)
+ val origGetter = overrideIn(sClass, m.getter(clazz))
info(origGetter) = Forward(specGetter)
enterMember(specGetter)
enterMember(origGetter)
- debuglog("created accessors: " + specGetter + " orig: " + origGetter)
+ debuglog("specialize accessor in %s: %s -> %s".format(sClass.name.decode, origGetter.name.decode, specGetter.name.decode))
clazz.caseFieldAccessors.find(_.name.startsWith(m.name)) foreach { cfa =>
- val cfaGetter = overrideIn(cls, cfa)
+ val cfaGetter = overrideIn(sClass, cfa)
info(cfaGetter) = SpecializedAccessor(specVal)
enterMember(cfaGetter)
- debuglog("found case field accessor for " + m + " added override " + cfaGetter);
+ debuglog("override case field accessor %s -> %s".format(m.name.decode, cfaGetter.name.decode))
}
if (specVal.isVariable && m.setter(clazz) != NoSymbol) {
@@ -645,25 +721,29 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
.resetFlag(STABLE)
specSetter.setInfo(MethodType(specSetter.newSyntheticValueParams(List(specVal.info)),
UnitClass.tpe))
- val origSetter = overrideIn(cls, m.setter(clazz))
+ val origSetter = overrideIn(sClass, m.setter(clazz))
info(origSetter) = Forward(specSetter)
enterMember(specSetter)
enterMember(origSetter)
}
- } else { // if there are no accessors, specialized methods will need to access this field in specialized subclasses
+ }
+ else { // if there are no accessors, specialized methods will need to access this field in specialized subclasses
m.resetFlag(PRIVATE)
specVal.resetFlag(PRIVATE)
+ debuglog("no accessors for %s/%s, specialized methods must access field in subclass".format(
+ m.name.decode, specVal.name.decode))
}
- } else if (m.isClass) {
- val specClass: Symbol = m.cloneSymbol(cls).setFlag(SPECIALIZED)
+ }
+ else if (m.isClass) {
+ val specClass: Symbol = cloneInSpecializedClass(m, x => x)
typeEnv(specClass) = fullEnv
- specClass.name = specializedName(specClass, fullEnv).toTypeName
+ specClass setName specializedName(specClass, fullEnv).toTypeName
enterMember(specClass)
- log("entered specialized class " + specClass.fullName)
+ debuglog("entered specialized class " + specClass.fullName)
info(specClass) = SpecializedInnerClass(m, fullEnv)
}
}
- cls
+ sClass
}
val decls1 = clazz.info.decls.toList flatMap { m: Symbol =>
@@ -687,7 +767,8 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
}
val subclasses = specializations(clazz.info.typeParams) filter satisfiable
- subclasses foreach { env =>
+ subclasses foreach {
+ env =>
val spc = specializedClass(env, decls1)
val existing = clazz.owner.info.decl(spc.name)
@@ -696,14 +777,14 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
if (existing != NoSymbol)
clazz.owner.info.decls.unlink(existing)
- atPhase(phase.next)(clazz.owner.info.decls enter spc) //!!! assumes fully specialized classes
+ afterSpecialize(clazz.owner.info.decls enter spc) //!!! assumes fully specialized classes
}
if (subclasses.nonEmpty) clazz.resetFlag(FINAL)
cleanAnyRefSpecCache(clazz, decls1)
decls1
}
- /** Expand member `sym' to a set of normalized members. Normalized members
+ /** Expand member `sym` to a set of normalized members. Normalized members
* are monomorphic or polymorphic only in non-specialized types.
*
* Given method m[@specialized T, U](x: T, y: U) it returns
@@ -713,13 +794,21 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
* // etc.
*/
private def normalizeMember(owner: Symbol, sym: Symbol, outerEnv: TypeEnv): List[Symbol] = {
- debuglog("normalizeMember: " + sym.fullName)
sym :: (
- if (!sym.isMethod || atPhase(currentRun.typerPhase)(sym.typeParams.isEmpty)) Nil
- else {
+ if (!sym.isMethod || beforeTyper(sym.typeParams.isEmpty)) Nil
+ else if (sym.hasDefault) {
+ /* Specializing default getters is useless, also see SI-7329 . */
+ sym.resetFlag(SPECIALIZED)
+ Nil
+ } else {
+ // debuglog("normalizeMember: " + sym.fullNameAsName('.').decode)
var specializingOn = specializedParams(sym)
val unusedStvars = specializingOn filterNot specializedTypeVars(sym.info)
+ // I think the last condition should be !sym.isArtifact, but that made the
+ // compiler start warning about Tuple1.scala and Tuple2.scala claiming
+ // their type parameters are used in non-specializable positions. Why is
+ // unusedStvars.nonEmpty for these classes???
if (unusedStvars.nonEmpty && currentRun.compiles(sym) && !sym.isSynthetic) {
reporter.warning(sym.pos,
"%s %s unused or used in non-specializable positions.".format(
@@ -730,36 +819,54 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
specializingOn = specializingOn filterNot (unusedStvars contains)
}
for (env0 <- specializations(specializingOn) if needsSpecialization(env0, sym)) yield {
+ // !!! Can't this logic be structured so that the new symbol's name is
+ // known when the symbol is cloned? It is much cleaner not to be mutating
+ // names after the fact. And it adds about a billion lines of
+ // "Renaming value _1 in class Tuple2 to _1$mcZ$sp" to obscure the small
+ // number of other (important) actual symbol renamings.
val tps = survivingParams(sym.info.typeParams, env0)
- val specMember = sym.cloneSymbol(owner).setFlag(SPECIALIZED).resetFlag(DEFERRED)
+ val specMember = sym.cloneSymbol(owner, (sym.flags | SPECIALIZED) & ~DEFERRED) // <-- this needs newName = ...
val env = mapAnyRefsInSpecSym(env0, sym, specMember)
val (keys, vals) = env.toList.unzip
- specMember.name = specializedName(sym, env)
- log("normalizing: " + sym + " to " + specMember + " with params " + tps)
+ specMember setName specializedName(sym, env) // <-- but the name is calculated based on the cloned symbol
+ // debuglog("%s normalizes to %s%s".format(sym, specMember,
+ // if (tps.isEmpty) "" else " with params " + tps.mkString(", ")))
typeEnv(specMember) = outerEnv ++ env
val tps1 = produceTypeParameters(tps, specMember, env)
- tps1 foreach (tp => tp.setInfo(tp.info.subst(keys, vals)))
+ tps1 foreach (_ modifyInfo (_.instantiateTypeParams(keys, vals)))
// the cloneInfo is necessary so that method parameter symbols are cloned at the new owner
- val methodType = sym.info.resultType.subst(keys ++ tps, vals ++ tps1.map(_.tpe)).cloneInfo(specMember)
- specMember setInfo polyType(tps1, methodType)
+ val methodType = sym.info.resultType.instantiateTypeParams(keys ++ tps, vals ++ tps1.map(_.tpe)).cloneInfo(specMember)
+ specMember setInfo GenPolyType(tps1, methodType)
- debuglog("expanded member: " + sym + ": " + sym.info +
- " -> " + specMember +
- ": " + specMember.info +
- " env: " + env
- )
+ debuglog("%s expands to %s in %s".format(sym, specMember.name.decode, pp(env)))
info(specMember) = NormalizedMember(sym)
- overloads(sym) ::= Overload(specMember, env)
+ newOverload(sym, specMember, env)
+ // if this is a class, we insert the normalized member in scope,
+ // if this is a method, there's no attached scope for it (EmptyScope)
+ val decls = owner.info.decls
+ if (decls != EmptyScope)
+ decls.enter(specMember)
specMember
}
}
)
}
- /** Specialize member `m' w.r.t. to the outer environment and the type
+ // concise printing of type env
+ private def pp(env: TypeEnv): String = {
+ env.toList.sortBy(_._1.name) map {
+ case (k, v) =>
+ val vsym = v.typeSymbol
+ if (k == vsym) "" + k.name
+ else k.name + ":" + vsym.name
+
+ } mkString ("env(", ", ", ")")
+ }
+
+ /** Specialize member `m` w.r.t. to the outer environment and the type
* parameters of the innermost enclosing class.
*
* Turns 'private' into 'protected' for members that need specialization.
@@ -767,62 +874,61 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
* Return a list of symbols that are specializations of 'sym', owned by 'owner'.
*/
private def specializeMember(owner: Symbol, sym: Symbol, outerEnv: TypeEnv, tps: List[Symbol]): List[Symbol] = {
- def specializeOn(tparams: List[Symbol]): List[Symbol] =
- for (spec0 <- specializations(tparams)) yield {
- val spec = mapAnyRefsInOrigCls(spec0, owner)
- if (sym.isPrivate)
- sym.resetFlag(PRIVATE).setFlag(PROTECTED)
-
- sym.resetFlag(FINAL)
- val specMember = subst(outerEnv)(specializedOverload(owner, sym, spec))
- typeEnv(specMember) = typeEnv(sym) ++ outerEnv ++ spec
- wasSpecializedForTypeVars(specMember) ++= spec collect { case (s, tp) if s.tpe == tp => s }
-
- log("sym " + specMember + " was specialized for type vars " + wasSpecializedForTypeVars(specMember))
- debuglog("added specialized overload: %s in env: %s".format(specMember, typeEnv(specMember)))
-
- overloads(sym) ::= Overload(specMember, spec)
- specMember
+ def specializeOn(tparams: List[Symbol]): List[Symbol] = specializations(tparams) map { spec0 =>
+ val spec = mapAnyRefsInOrigCls(spec0, owner)
+ if (sym.isPrivate) {
+ sym.resetFlag(PRIVATE).setFlag(PROTECTED)
+ debuglog("Set %s to private[%s]".format(sym, sym.enclosingPackage))
}
+ val specMember = subst(outerEnv)(specializedOverload(owner, sym, spec))
+ typeEnv(specMember) = typeEnv(sym) ++ outerEnv ++ spec
+ wasSpecializedForTypeVars(specMember) ++= spec collect { case (s, tp) if s.tpe == tp => s }
+
+ val wasSpec = wasSpecializedForTypeVars(specMember)
+ if (wasSpec.nonEmpty)
+ debuglog("specialized overload for %s in %s".format(specMember, pp(typeEnv(specMember))))
+
+ newOverload(sym, specMember, spec)
+ info(specMember) = SpecialOverload(sym, typeEnv(specMember))
+ specMember
+ }
+
if (sym.isMethod) {
- debuglog("specializeMember %s with tps: %s stvars(sym): %s".format(sym, tps, specializedTypeVars(sym)))
+ val stvars = specializedTypeVars(sym)
+ if (stvars.nonEmpty)
+ debuglog("specialized %s on %s".format(sym.fullLocationString, stvars.map(_.name).mkString(", ")))
val tps1 = if (sym.isConstructor) tps filter (sym.info.paramTypes contains _) else tps
- val tps2 = tps1 intersect specializedTypeVars(sym).toList
+ val tps2 = tps1 filter stvars
if (!sym.isDeferred)
addConcreteSpecMethod(sym)
- val ms = specializeOn(tps2)
- ms foreach (m => info(m) = SpecialOverload(sym, typeEnv(m)))
- ms
+ specializeOn(tps2)
}
else Nil
}
- /** Return the specialized overload of `m', in the given environment. */
+ /** Return the specialized overload of `m`, in the given environment. */
private def specializedOverload(owner: Symbol, sym: Symbol, env: TypeEnv): Symbol = {
- val specMember = sym.cloneSymbol(owner) // this method properly duplicates the symbol's info
- specMember.name = specializedName(sym, env)
-
- (specMember
- setInfo subst(env, specMember.info.asSeenFrom(owner.thisType, sym.owner))
- setFlag (SPECIALIZED)
- resetFlag (DEFERRED | CASEACCESSOR | ACCESSOR | LAZY)
+ val newFlags = (sym.flags | SPECIALIZED) & ~(DEFERRED | CASEACCESSOR)
+ // this method properly duplicates the symbol's info
+ ( sym.cloneSymbol(owner, newFlags, newName = specializedName(sym, env))
+ modifyInfo (info => subst(env, info.asSeenFrom(owner.thisType, sym.owner)))
)
}
- /** For each method m that overrides inherited method m', add a special
- * overload method `om' that overrides the corresponding overload in the
+ /** For each method m that overrides an inherited method m', add a special
+ * overload method `om` that overrides the corresponding overload in the
* superclass. For the following example:
*
* class IntFun extends Function1[Int, Int] {
- * def apply(x: Int): Int = ..
+ * def apply(x: Int): Int = ..
* }
*
- * this method will return List('apply$spec$II')
+ * this method will return List('apply$mcII$sp')
*/
- private def specialOverrides(clazz: Symbol): List[Symbol] = {
+ private def specialOverrides(clazz: Symbol) = logResultIf[List[Symbol]]("specialized overrides in " + clazz, _.nonEmpty) {
/** Return the overridden symbol in syms that needs a specialized overriding symbol,
* together with its specialization environment. The overridden symbol may not be
* the closest to 'overriding', in a given hierarchy.
@@ -833,7 +939,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
*/
def needsSpecialOverride(overriding: Symbol): (Symbol, TypeEnv) = {
def checkOverriddenTParams(overridden: Symbol) {
- for ((baseTvar, derivedTvar) <- overridden.info.typeParams.zip(overriding.info.typeParams)) {
+ foreach2(overridden.info.typeParams, overriding.info.typeParams) { (baseTvar, derivedTvar) =>
val missing = concreteTypes(baseTvar).toSet -- concreteTypes(derivedTvar).toSet
if (missing.nonEmpty) {
reporter.error(derivedTvar.pos,
@@ -845,24 +951,21 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
}
if (!overriding.isParamAccessor) {
for (overridden <- overriding.allOverriddenSymbols) {
- debuglog(
- "Overridden: " + overridden.fullName +
- ": " + overridden.info +
- "\n by " + overriding.fullName +
- ": " + overriding.info
- )
val stvars = specializedTypeVars(overridden.info)
if (stvars.nonEmpty) {
- debuglog("\t\tspecializedTVars: " + stvars)
+ debuglog("specialized override of %s by %s%s".format(overridden.fullLocationString, overriding.fullLocationString,
+ if (stvars.isEmpty) "" else stvars.map(_.name).mkString("(", ", ", ")")))
+
if (currentRun compiles overriding)
checkOverriddenTParams(overridden)
- val env = unify(overridden.info, overriding.info, emptyEnv, false)
- def atNext = atPhase(phase.next)(overridden.owner.info.decl(specializedName(overridden, env)))
+ val env = unify(overridden.info, overriding.info, emptyEnv, false, true)
+ def atNext = afterSpecialize(overridden.owner.info.decl(specializedName(overridden, env)))
- debuglog("\t\tenv: " + env + "isValid: " + TypeEnv.isValid(env, overridden) + "found: " + atNext)
- if (TypeEnv.restrict(env, stvars).nonEmpty && TypeEnv.isValid(env, overridden) && atNext != NoSymbol)
+ if (TypeEnv.restrict(env, stvars).nonEmpty && TypeEnv.isValid(env, overridden) && atNext != NoSymbol) {
+ debuglog(" " + pp(env) + " found " + atNext)
return (overridden, env)
+ }
}
}
}
@@ -873,32 +976,34 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
case (NoSymbol, _) => None
case (overridden, env) =>
val om = specializedOverload(clazz, overridden, env)
- log("Added specialized overload %s for %s in env: %s with type: %s".format(om, overriding.fullName, env, om.info))
+ foreachWithIndex(om.paramss) { (params, i) =>
+ foreachWithIndex(params) { (param, j) =>
+ param.name = overriding.paramss(i)(j).name // SI-6555 Retain the parameter names from the subclass.
+ }
+ }
+ debuglog("specialized overload %s for %s in %s: %s".format(om, overriding.name.decode, pp(env), om.info))
typeEnv(om) = env
addConcreteSpecMethod(overriding)
- info(om) = (
- if (overriding.isDeferred) { // abstract override
- debuglog("abstract override " + overriding.fullName + " with specialized " + om.fullName)
- Forward(overriding)
- }
- else {
- // if the override is a normalized member, 'om' gets the
- // implementation from its original target, and adds the
- // environment of the normalized member (that is, any
- // specialized /method/ type parameter bindings)
- val impl = info get overriding match {
- case Some(NormalizedMember(target)) =>
- typeEnv(om) = env ++ typeEnv(overriding)
- target
- case _ =>
- overriding
- }
- info(overriding) = Forward(om setPos overriding.pos)
- SpecialOverride(impl)
+ if (overriding.isDeferred) { // abstract override
+ debuglog("abstract override " + overriding.fullName + " with specialized " + om.fullName)
+ info(om) = Forward(overriding)
+ }
+ else {
+ // if the override is a normalized member, 'om' gets the
+ // implementation from its original target, and adds the
+ // environment of the normalized member (that is, any
+ // specialized /method/ type parameter bindings)
+ info get overriding match {
+ case Some(NormalizedMember(target)) =>
+ typeEnv(om) = env ++ typeEnv(overriding)
+ info(om) = Forward(target)
+ case _ =>
+ info(om) = SpecialOverride(overriding)
}
- )
- overloads(overriding) ::= Overload(om, env)
- ifDebug(atPhase(phase.next)(assert(
+ info(overriding) = Forward(om setPos overriding.pos)
+ }
+ newOverload(overriding, om, env)
+ ifDebug(afterSpecialize(assert(
overridden.owner.info.decl(om.name) != NoSymbol,
"Could not find " + om.name + " in " + overridden.owner.info.decls))
)
@@ -908,70 +1013,83 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
}
case object UnifyError extends scala.util.control.ControlThrowable
+ private[this] def unifyError(tp1: Any, tp2: Any): Nothing = {
+ log("unifyError" + ((tp1, tp2)))
+ throw UnifyError
+ }
/** Return the most general type environment that specializes tp1 to tp2.
* It only allows binding of type parameters annotated with @specialized.
* Fails if such an environment cannot be found.
*
* If `strict` is true, a UnifyError is thrown if unification is impossible.
+ *
+ * If `tparams` is true, then the methods tries to unify over type params in polytypes as well.
*/
- private def unify(tp1: Type, tp2: Type, env: TypeEnv, strict: Boolean): TypeEnv = (tp1, tp2) match {
- case (TypeRef(_, sym1, _), _) if isSpecialized(sym1) =>
- log("Unify - basic case: " + tp1 + ", " + tp2)
- if (isValueClass(tp2.typeSymbol) || isSpecializedAnyRefSubtype(tp2, sym1))
+ private def unify(tp1: Type, tp2: Type, env: TypeEnv, strict: Boolean, tparams: Boolean = false): TypeEnv = (tp1, tp2) match {
+ case (TypeRef(_, sym1, _), _) if sym1.isSpecialized =>
+ debuglog("Unify " + tp1 + ", " + tp2)
+ if (isPrimitiveValueClass(tp2.typeSymbol) || isSpecializedAnyRefSubtype(tp2, sym1))
env + ((sym1, tp2))
+ else if (isSpecializedAnyRefSubtype(tp2, sym1))
+ env + ((sym1, tp2)) // env + ((sym1, AnyRefClass.tpe))
+ else if (strict)
+ unifyError(tp1, tp2)
else
- if (strict) throw UnifyError else env
+ env
case (TypeRef(_, sym1, args1), TypeRef(_, sym2, args2)) =>
- log("Unify - both type refs: " + tp1 + " and " + tp2 + " with args " + (args1, args2) + " - ")
- if (strict && args1.length != args2.length) throw UnifyError
+ if (args1.nonEmpty || args2.nonEmpty)
+ debuglog("Unify types " + tp1 + " and " + tp2)
+
+ if (strict && args1.length != args2.length) unifyError(tp1, tp2)
val e = unify(args1, args2, env, strict)
- log("unified to: " + e)
+ if (e.nonEmpty) debuglog("unified to: " + e)
e
case (TypeRef(_, sym1, _), _) if sym1.isTypeParameterOrSkolem =>
env
case (MethodType(params1, res1), MethodType(params2, res2)) =>
- if (strict && params1.length != params2.length) throw UnifyError
- log("Unify - method types: " + tp1 + " and " + tp2)
+ if (strict && params1.length != params2.length) unifyError(tp1, tp2)
+ debuglog("Unify methods " + tp1 + " and " + tp2)
unify(res1 :: (params1 map (_.tpe)), res2 :: (params2 map (_.tpe)), env, strict)
case (PolyType(tparams1, res1), PolyType(tparams2, res2)) =>
- if (strict && tparams1.length != tparams2.length) throw UnifyError
- log("Unify - poly types: " + tp1 + " and " + tp2)
- unify(res1, res2, env, strict)
- case (PolyType(_, res), other) =>
- unify(res, other, env, strict)
- case (ThisType(_), ThisType(_)) => env
- case (_, SingleType(_, _)) => unify(tp1, tp2.underlying, env, strict)
- case (SingleType(_, _), _) => unify(tp1.underlying, tp2, env, strict)
- case (ThisType(_), _) => unify(tp1.widen, tp2, env, strict)
- case (_, ThisType(_)) => unify(tp1, tp2.widen, env, strict)
- case (RefinedType(_, _), RefinedType(_, _)) => env
- case (AnnotatedType(_, tp1, _), tp2) => unify(tp2, tp1, env, strict)
- case (ExistentialType(_, res1), _) => unify(tp2, res1, env, strict)
+ debuglog("Unify polytypes " + tp1 + " and " + tp2)
+ if (strict && tparams1.length != tparams2.length)
+ unifyError(tp1, tp2)
+ else if (tparams && tparams1.length == tparams2.length)
+ unify(res1 :: tparams1.map(_.info), res2 :: tparams2.map(_.info), env, strict)
+ else
+ unify(res1, res2, env, strict)
+ case (PolyType(_, res), other) => unify(res, other, env, strict)
+ case (ThisType(_), ThisType(_)) => env
+ case (_, SingleType(_, _)) => unify(tp1, tp2.underlying, env, strict)
+ case (SingleType(_, _), _) => unify(tp1.underlying, tp2, env, strict)
+ case (ThisType(_), _) => unify(tp1.widen, tp2, env, strict)
+ case (_, ThisType(_)) => unify(tp1, tp2.widen, env, strict)
+ case (RefinedType(_, _), RefinedType(_, _)) => env
+ case (AnnotatedType(_, tp1, _), tp2) => unify(tp2, tp1, env, strict)
+ case (ExistentialType(_, res1), _) => unify(tp2, res1, env, strict)
+ case (TypeBounds(lo1, hi1), TypeBounds(lo2, hi2)) => unify(List(lo1, hi1), List(lo2, hi2), env, strict)
case _ =>
- log("don't know how to unify %s [%s] with %s [%s]".format(tp1, tp1.getClass, tp2, tp2.getClass))
+ debuglog("don't know how to unify %s [%s] with %s [%s]".format(tp1, tp1.getClass, tp2, tp2.getClass))
env
}
- private def unify(tp1: List[Type], tp2: List[Type], env: TypeEnv, strict: Boolean): TypeEnv =
- tp1.zip(tp2).foldLeft(env) { (env, args) =>
+ private def unify(tp1: List[Type], tp2: List[Type], env: TypeEnv, strict: Boolean): TypeEnv = {
+ if (tp1.isEmpty || tp2.isEmpty) env
+ else (tp1 zip tp2).foldLeft(env) { (env, args) =>
if (!strict) unify(args._1, args._2, env, strict)
else {
val nenv = unify(args._1, args._2, emptyEnv, strict)
if (env.keySet intersect nenv.keySet isEmpty) env ++ nenv
else {
- log("could not unify: u(" + args._1 + ", " + args._2 + ") yields " + nenv + ", env: " + env)
- throw UnifyError
+ debuglog("could not unify: u(" + args._1 + ", " + args._2 + ") yields " + nenv + ", env: " + env)
+ unifyError(tp1, tp2)
}
}
}
-
- /** Map class symbols to the type environments where they were created. */
- val typeEnv: mutable.Map[Symbol, TypeEnv] = new mutable.HashMap[Symbol, TypeEnv] {
- override def default(key: Symbol) = emptyEnv
}
- /** Apply type bindings in the given environment `env' to all declarations. */
+ /** Apply type bindings in the given environment `env` to all declarations. */
private def subst(env: TypeEnv, decls: List[Symbol]): List[Symbol] =
decls map subst(env)
@@ -981,40 +1099,29 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
* primitive type losing the annotation.
*/
private def subst(env: TypeEnv, tpe: Type): Type = {
- class FullTypeMap(from: List[Symbol], to: List[Type]) extends SubstTypeMap(from, to) {
+ class FullTypeMap(from: List[Symbol], to: List[Type]) extends SubstTypeMap(from, to) with AnnotationFilter {
+ def keepAnnotation(annot: AnnotationInfo) = !(annot matches uncheckedVarianceClass)
+
override def mapOver(tp: Type): Type = tp match {
case ClassInfoType(parents, decls, clazz) =>
val parents1 = parents mapConserve this
- val declsList = decls.toList
- val decls1 = mapOver(declsList)
-
- if ((parents1 eq parents) && (decls1 eq declsList)) tp
- else ClassInfoType(parents1, new Scope(decls1), clazz)
-
- case AnnotatedType(annots, atp, selfsym) =>
- val annots1 = mapOverAnnotations(annots)
- val atp1 = this(atp)
-
- if ((annots1 eq annots) && (atp1 eq atp)) tp
- else if (annots1.isEmpty) atp1
- else if (atp1 eq atp) AnnotatedType(annots1, atp1, selfsym)
- else annots1.filter(_.atp.typeSymbol != uncheckedVarianceClass) match {
- case Nil => atp1
- case annots2 => AnnotatedType(annots2, atp1, selfsym)
- }
- case _ => super.mapOver(tp)
+ val decls1 = mapOver(decls)
+
+ if ((parents1 eq parents) && (decls1 eq decls)) tp
+ else ClassInfoType(parents1, decls1, clazz)
+ case _ =>
+ super.mapOver(tp)
}
}
val (keys, values) = env.toList.unzip
(new FullTypeMap(keys, values))(tpe)
}
- private def subst(env: TypeEnv)(decl: Symbol): Symbol = {
- decl setInfo (subst(env, decl.info) match {
- case MethodType(args, _) if decl.isConstructor => MethodType(args, decl.owner.tpe)
- case tpe => tpe
- })
- }
+ private def subst(env: TypeEnv)(decl: Symbol): Symbol =
+ decl modifyInfo (info =>
+ if (decl.isConstructor) MethodType(subst(env, info).params, decl.owner.tpe)
+ else subst(env, info)
+ )
/** Checks if the type parameter symbol is not specialized
* and is used as type parameters when extending a class with a specialized
@@ -1039,41 +1146,39 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
case _ => //log("nope")
}
+ private def unspecializableClass(tp: Type) = (
+ definitions.isRepeatedParamType(tp) // ???
+ || tp.typeSymbol.isJavaDefined
+ || tp.typeSymbol.isPackageClass
+ )
+
/** Type transformation. It is applied to all symbols, compiled or loaded.
* If it is a 'no-specialization' run, it is applied only to loaded symbols.
*/
override def transformInfo(sym: Symbol, tpe: Type): Type = {
if (settings.nospecialization.value && currentRun.compiles(sym)) tpe
- else tpe match {
- case PolyType(targs, ClassInfoType(base, decls, clazz))
- if clazz != RepeatedParamClass
- && clazz != JavaRepeatedParamClass
- && !clazz.isJavaDefined =>
- val parents = base map specializedType
- debuglog("transformInfo (poly) " + clazz + " with parents1: " + parents + " ph: " + phase)
-
- polyType(targs, ClassInfoType(
- parents,
- new Scope(specializeClass(clazz, typeEnv(clazz)) ++ specialOverrides(clazz)),
- clazz)
- )
- case ClassInfoType(base, decls, clazz) if !clazz.isPackageClass && !clazz.isJavaDefined =>
- atPhase(phase.next)(base map (_.typeSymbol.info))
- // side effecting? parents is not used except to log.
- val parents = base map specializedType
- debuglog("transformInfo " + clazz + " with parents1: " + parents + " ph: " + phase)
- ClassInfoType(
- base map specializedType,
- new Scope(specializeClass(clazz, typeEnv(clazz)) ++ specialOverrides(clazz)),
- clazz
- )
+ else tpe.resultType match {
+ case cinfo @ ClassInfoType(parents, decls, clazz) if !unspecializableClass(cinfo) =>
+ val tparams = tpe.typeParams
+ if (tparams.isEmpty)
+ afterSpecialize(parents map (_.typeSymbol.info))
+
+ val parents1 = parents mapConserve specializedType
+ if (parents ne parents1) {
+ debuglog("specialization transforms %s%s parents to %s".format(
+ if (tparams.nonEmpty) "(poly) " else "", clazz, parents1)
+ )
+ }
+ val newScope = newScopeWith(specializeClass(clazz, typeEnv(clazz)) ++ specialOverrides(clazz): _*)
+ // If tparams.isEmpty, this is just the ClassInfoType.
+ GenPolyType(tparams, ClassInfoType(parents1, newScope, clazz))
case _ =>
tpe
}
}
- /** Is any type variable in `env' conflicting with any if its type bounds, when
- * type bindings in `env' are taken into account?
+ /** Is any type variable in `env` conflicting with any if its type bounds, when
+ * type bindings in `env` are taken into account?
*
* A conflicting type environment could still be satisfiable.
*/
@@ -1103,7 +1208,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
if (warnings)
reporter.warning(tvar.pos, "Bounds prevent specialization of " + tvar)
- log("specvars: " +
+ debuglog("specvars: " +
tvar.info.bounds.lo + ": " +
specializedTypeVars(tvar.info.bounds.lo) + " " +
subst(env, tvar.info.bounds.hi) + ": " +
@@ -1114,9 +1219,85 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
}
}
- class Duplicator extends {
+ def satisfiabilityConstraints(env: TypeEnv): Option[TypeEnv] = {
+ val noconstraints = Some(emptyEnv)
+ def matches(tpe1: Type, tpe2: Type): Option[TypeEnv] = {
+ val t1 = subst(env, tpe1)
+ val t2 = subst(env, tpe2)
+ // log("---------> " + tpe1 + " matches " + tpe2)
+ // log(t1 + ", " + specializedTypeVars(t1))
+ // log(t2 + ", " + specializedTypeVars(t2))
+ // log("unify: " + unify(t1, t2, env, false, false) + " in " + env)
+ if (t1 <:< t2) noconstraints
+ else if (specializedTypeVars(t1).nonEmpty) Some(unify(t1, t2, env, false, false) -- env.keys)
+ else if (specializedTypeVars(t2).nonEmpty) Some(unify(t2, t1, env, false, false) -- env.keys)
+ else None
+ }
+
+ env.foldLeft[Option[TypeEnv]](noconstraints) {
+ case (constraints, (tvar, tpe)) =>
+ val loconstraints = matches(tvar.info.bounds.lo, tpe)
+ val hiconstraints = matches(tpe, tvar.info.bounds.hi)
+ val allconstraints = for (c <- constraints; l <- loconstraints; h <- hiconstraints) yield c ++ l ++ h
+ allconstraints
+ }
+ }
+
+ /** This duplicator additionally performs casts of expressions if that is allowed by the `casts` map. */
+ class Duplicator(casts: Map[Symbol, Type]) extends {
val global: SpecializeTypes.this.global.type = SpecializeTypes.this.global
- } with typechecker.Duplicators
+ } with typechecker.Duplicators {
+ private val (castfrom, castto) = casts.unzip
+ private object CastMap extends SubstTypeMap(castfrom.toList, castto.toList)
+
+ class BodyDuplicator(_context: Context) extends super.BodyDuplicator(_context) {
+ override def castType(tree: Tree, pt: Type): Tree = {
+ // log(" expected type: " + pt)
+ // log(" tree type: " + tree.tpe)
+ tree.tpe = if (tree.tpe != null) fixType(tree.tpe) else null
+ // log(" tree type: " + tree.tpe)
+ val ntree = if (tree.tpe != null && !(tree.tpe <:< pt)) {
+ val casttpe = CastMap(tree.tpe)
+ if (casttpe <:< pt) gen.mkCast(tree, casttpe)
+ else if (casttpe <:< CastMap(pt)) gen.mkCast(tree, pt)
+ else tree
+ } else tree
+ ntree.tpe = null
+ ntree
+ }
+ }
+
+ protected override def newBodyDuplicator(context: Context) = new BodyDuplicator(context)
+ }
+
+ /** Introduced to fix SI-7343: Phase ordering problem between Duplicators and Specialization.
+ * brief explanation: specialization rewires class parents during info transformation, and
+ * the new info then guides the tree changes. But if a symbol is created during duplication,
+ * which runs after specialization, its info is not visited and thus the corresponding tree
+ * is not specialized. One manifestation is the following:
+ * ```
+ * object Test {
+ * class Parent[@specialized(Int) T]
+ *
+ * def spec_method[@specialized(Int) T](t: T, expectedXSuper: String) = {
+ * class X extends Parent[T]()
+ * // even in the specialized variant, the local X class
+ * // doesn't extend Parent$mcI$sp, since its symbol has
+ * // been created after specialization and was not seen
+ * // by specialzation's info transformer.
+ * ...
+ * }
+ * }
+ * ```
+ * We fix this by forcing duplication to take place before specialization.
+ *
+ * Note: The constructors phase (which also uses duplication) comes after erasure and uses the
+ * post-erasure typer => we must protect it from the beforeSpecialization phase shifting.
+ */
+ class SpecializationDuplicator(casts: Map[Symbol, Type]) extends Duplicator(casts) {
+ override def retyped(context: Context, tree: Tree, oldThis: Symbol, newThis: Symbol, env: scala.collection.Map[Symbol, Type]): Tree =
+ beforeSpecialize(super.retyped(context, tree, oldThis, newThis, env))
+ }
/** A tree symbol substituter that substitutes on type skolems.
* If a type parameter is a skolem, it looks for the original
@@ -1166,16 +1347,15 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
/** Return the generic class corresponding to this specialized class. */
def originalClass(clazz: Symbol): Symbol =
- if (hasSpecializedFlag(clazz)) {
+ if (clazz.isSpecialized) {
val (originalName, _, _) = nme.splitSpecializedName(clazz.name)
clazz.owner.info.decl(originalName).suchThat(_.isClass)
} else NoSymbol
- def illegalSpecializedInheritance(clazz: Symbol): Boolean = {
- hasSpecializedFlag(clazz) && originalClass(clazz).info.parents.exists { p =>
- hasSpecializedParams(p.typeSymbol) && !p.typeSymbol.isTrait
- }
- }
+ def illegalSpecializedInheritance(clazz: Symbol): Boolean = (
+ clazz.isSpecialized
+ && originalClass(clazz).parentSymbols.exists(p => hasSpecializedParams(p) && !p.isTrait)
+ )
def specializeCalls(unit: CompilationUnit) = new TypingTransformer(unit) {
/** Map a specializable method to it's rhs, when not deferred. */
@@ -1188,12 +1368,12 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
*/
class CollectMethodBodies extends Traverser {
override def traverse(tree: Tree) = tree match {
- case DefDef(mods, name, tparams, vparams :: Nil, tpt, rhs) =>
+ case DefDef(_, _, _, vparams :: Nil, _, rhs) =>
if (concreteSpecMethods(tree.symbol) || tree.symbol.isConstructor) {
- debuglog("!!! adding body of a defdef %s, symbol %s: %s".format(tree, tree.symbol, rhs))
+ // debuglog("!!! adding body of a defdef %s, symbol %s: %s".format(tree, tree.symbol, rhs))
body(tree.symbol) = rhs
// body(tree.symbol) = tree // whole method
- parameters(tree.symbol) = vparams map (_.symbol)
+ parameters(tree.symbol) = vparams.map(_.symbol)
concreteSpecMethods -= tree.symbol
} // no need to descend further down inside method bodies
@@ -1206,132 +1386,195 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
}
}
- def doesConform(origSymbol: Symbol, treeType: Type, memberType: Type, env: TypeEnv) =
+ def doesConform(origSymbol: Symbol, treeType: Type, memberType: Type, env: TypeEnv) = {
(treeType =:= memberType) || { // anyref specialization
memberType match {
case PolyType(_, resTpe) =>
- log("Conformance for anyref - polytype with result type: " + resTpe + " and " + treeType + "\nOrig. sym.: " + origSymbol)
+ debuglog("Conformance for anyref - polytype with result type: " + resTpe + " and " + treeType + "\nOrig. sym.: " + origSymbol)
try {
val e = unify(origSymbol.tpe, memberType, emptyEnv, true)
- log("obtained env: " + e)
+ debuglog("obtained env: " + e)
e.keySet == env.keySet
} catch {
- case _ =>
- log("Could not unify.")
+ case _: Throwable =>
+ debuglog("Could not unify.")
false
}
case _ => false
}
}
+ }
+
+ def reportError[T](body: =>T)(handler: TypeError => T): T =
+ try body
+ catch {
+ case te: TypeError =>
+ reporter.error(te.pos, te.msg)
+ handler(te)
+ }
- override def transform(tree: Tree): Tree = {
+ override def transform(tree: Tree): Tree =
+ reportError { transform1(tree) } {_ => tree}
+
+ def transform1(tree: Tree) = {
val symbol = tree.symbol
/** The specialized symbol of 'tree.symbol' for tree.tpe, if there is one */
def specSym(qual: Tree): Option[Symbol] = {
val env = unify(symbol.tpe, tree.tpe, emptyEnv, false)
- log("[specSym] checking for rerouting: %s with \n\tsym.tpe: %s, \n\ttree.tpe: %s \n\tenv: %s \n\tname: %s"
+ debuglog("[specSym] checking for rerouting: %s with \n\tsym.tpe: %s, \n\ttree.tpe: %s \n\tenv: %s \n\tname: %s"
.format(tree, symbol.tpe, tree.tpe, env, specializedName(symbol, env)))
- if (!env.isEmpty) { // a method?
+ if (env.nonEmpty) { // a method?
val specCandidates = qual.tpe.member(specializedName(symbol, env))
- val specMember = specCandidates suchThat (s => doesConform(symbol, tree.tpe, s.tpe, env))
- log("[specSym] found: " + specCandidates.tpe + ", instantiated as: " + tree.tpe)
- log("[specSym] found specMember: " + specMember)
+ val specMember = specCandidates suchThat { s =>
+ doesConform(symbol, tree.tpe, qual.tpe.memberType(s), env)
+ }
+
+ debuglog("[specSym] found: " + specCandidates.tpe + ", instantiated as: " + tree.tpe)
+ debuglog("[specSym] found specMember: " + specMember)
if (specMember ne NoSymbol)
if (TypeEnv.includes(typeEnv(specMember), env)) Some(specMember)
else {
- log("wrong environments for specialized member: \n\ttypeEnv(%s) = %s\n\tenv = %s".format(specMember, typeEnv(specMember), env))
+ debuglog("wrong environments for specialized member: \n\ttypeEnv(%s) = %s\n\tenv = %s".format(specMember, typeEnv(specMember), env))
None
}
else None
} else None
}
- def maybeTypeApply(fun: Tree, targs: List[Tree]) =
- if (targs.isEmpty) fun else TypeApply(fun, targs)
+ /** Computes residual type parameters after rewiring, like "String" in the following example:
+ * ```
+ * def specMe[@specialized T, U](t: T, u: U) = ???
+ * specMe[Int, String](1, "2") => specMe$mIc$sp[String](1, "2")
+ * ```
+ */
+ def computeResidualTypeVars(baseTree: Tree, specTree: Tree, baseTargs: List[Tree], env: TypeEnv) = {
+ val baseSym: Symbol = baseTree.symbol
+ val specSym: Symbol = specTree.symbol
+ val residualTargs = baseSym.info.typeParams zip baseTargs collect {
+ case (tvar, targ) if !env.contains(tvar) || !isPrimitiveValueClass(env(tvar).typeSymbol) => targ
+ }
+
+ if (specSym.info.typeParams.isEmpty && residualTargs.nonEmpty) {
+ log("!!! Type args to be applied, but symbol says no parameters: " + ((specSym.defString, residualTargs)))
+ baseTree
+ }
+ else {
+ ifDebug(assert(residualTargs.length == specSym.info.typeParams.length,
+ "residual: %s, tparams: %s, env: %s".format(residualTargs, specSym.info.typeParams, env))
+ )
+
+ val tree1 = gen.mkTypeApply(specTree, residualTargs)
+ debuglog("rewrote " + tree + " to " + tree1)
+ localTyper.typedOperator(atPos(tree.pos)(tree1))
+ }
+ }
curTree = tree
tree match {
case Apply(Select(New(tpt), nme.CONSTRUCTOR), args) =>
- if (findSpec(tpt.tpe).typeSymbol ne tpt.tpe.typeSymbol) {
- log("** instantiated specialized type: " + findSpec(tpt.tpe))
- atPos(tree.pos)(
- localTyper.typed(
- Apply(
- Select(New(TypeTree(findSpec(tpt.tpe))), nme.CONSTRUCTOR),
- transformTrees(args))))
+ def transformNew = {
+ debuglog("Attempting to specialize new %s(%s)".format(tpt, args.mkString(", ")))
+ val found = findSpec(tpt.tpe)
+ if (found.typeSymbol ne tpt.tpe.typeSymbol) {
+ // the ctor can be specialized
+ debuglog("** instantiated specialized type: " + found)
+ reportError {
+ localTyper.typedPos(tree.pos)(New(found, transformTrees(args): _*))
+ } {
+ _ => super.transform(tree)
+ }
} else super.transform(tree)
+ }
+ transformNew
+
+ case Apply(sel @ Select(sup @ Super(qual, name), name1), args)
+ if (sup.symbol.info.parents != beforePrevPhase(sup.symbol.info.parents)) =>
+ def transformSuperApply = {
+
+ def parents = sup.symbol.info.parents
+ debuglog(tree + " parents changed from: " + beforePrevPhase(parents) + " to: " + parents)
- case TypeApply(Select(qual, name), targs)
- if (!specializedTypeVars(symbol.info).isEmpty && name != nme.CONSTRUCTOR) =>
+ val res = localTyper.typed(
+ Apply(Select(Super(qual, name) setPos sup.pos, name1) setPos sel.pos, transformTrees(args)) setPos tree.pos)
+ debuglog("retyping call to super, from: " + symbol + " to " + res.symbol)
+ res
+ }
+ transformSuperApply
+
+ // This rewires calls to specialized methods defined in a class (which have a receiver)
+ // class C {
+ // def foo[@specialized T](t: T): T = t
+ // C.this.foo(3) // TypeApply(Select(This(C), foo), List(Int)) => C.this.foo$mIc$sp(3)
+ // }
+ case TypeApply(sel @ Select(qual, name), targs)
+ if (specializedTypeVars(symbol.info).nonEmpty && name != nme.CONSTRUCTOR) =>
debuglog("checking typeapp for rerouting: " + tree + " with sym.tpe: " + symbol.tpe + " tree.tpe: " + tree.tpe)
val qual1 = transform(qual)
- // log(">>> TypeApply: " + tree + ", qual1: " + qual1)
+ log(">>> TypeApply: " + tree + ", qual1: " + qual1)
specSym(qual1) match {
case Some(specMember) =>
debuglog("found " + specMember.fullName)
ifDebug(assert(symbol.info.typeParams.length == targs.length, symbol.info.typeParams + " / " + targs))
val env = typeEnv(specMember)
- val residualTargs = symbol.info.typeParams zip targs collect {
- case (tvar, targ) if !env.contains(tvar) || !isValueClass(env(tvar).typeSymbol) => targ
- }
+ computeResidualTypeVars(tree, gen.mkAttributedSelect(qual1, specMember), targs, env)
- ifDebug(assert(residualTargs.length == specMember.info.typeParams.length,
- "residual: %s, tparams: %s, env: %s".format(residualTargs, symbol.info.typeParams, env))
- )
-
- val tree1 = maybeTypeApply(Select(qual1, specMember), residualTargs)
- log("rewrote " + tree + " to " + tree1)
- localTyper.typedOperator(atPos(tree.pos)(tree1)) // being polymorphic, it must be a method
+ case None =>
+ treeCopy.TypeApply(tree, treeCopy.Select(sel, qual1, name), super.transformTrees(targs))
+ // See pos/exponential-spec.scala - can't call transform on the whole tree again.
+ // super.transform(tree)
+ }
- case None => super.transform(tree)
+ // This rewires calls to specialized methods defined in the local scope. For example:
+ // def outerMethod = {
+ // def foo[@specialized T](t: T): T = t
+ // foo(3) // TypeApply(Ident(foo), List(Int)) => foo$mIc$sp(3)
+ // }
+ case TypeApply(sel @ Ident(name), targs) if name != nme.CONSTRUCTOR =>
+ val env = unify(symbol.tpe, tree.tpe, emptyEnv, false)
+ if (env.isEmpty) super.transform(tree)
+ else {
+ overloads(symbol) find (_ matchesEnv env) match {
+ case Some(Overload(specMember, _)) => computeResidualTypeVars(tree, Ident(specMember), targs, env)
+ case _ => super.transform(tree)
+ }
}
- case Select(Super(_, _), name) if illegalSpecializedInheritance(currentClass) =>
+ case Select(Super(_, _), _) if illegalSpecializedInheritance(currentClass) =>
val pos = tree.pos
- log(pos.source.file.name+":"+pos.line+": not specializing call to super inside illegal specialized inheritance class.")
- log(pos.lineContent)
+ debuglog(pos.source.file.name+":"+pos.line+": not specializing call to super inside illegal specialized inheritance class.\n" + pos.lineContent)
tree
- case Select(qual, name) =>
- debuglog("[%s] looking at Select: %s sym: %s: %s [tree.tpe: %s]".format(
- tree.pos.line, tree, symbol, symbol.info, tree.tpe))
-
- //log("!!! select " + tree + " -> " + symbol.info + " specTypeVars: " + specializedTypeVars(symbol.info))
- if (specializedTypeVars(symbol.info).nonEmpty && name != nme.CONSTRUCTOR) {
- // log("!!! unifying " + (symbol, symbol.tpe) + " and " + (tree, tree.tpe))
- val env = unify(symbol.tpe, tree.tpe, emptyEnv, false)
- // log("!!! found env: " + env + "; overloads: " + overloads(symbol))
- debuglog("checking for rerouting: " + tree + " with sym.tpe: " + symbol.tpe + " tree.tpe: " + tree.tpe + " env: " + env)
- if (!env.isEmpty) {
- val specMember = overload(symbol, env)
- //log("!!! found member: " + specMember)
- if (specMember.isDefined) {
- // log("** routing " + tree + " to " + specMember.get.sym.fullName)
- localTyper.typedOperator(atPos(tree.pos)(Select(transform(qual), specMember.get.sym.name)))
- } else {
- val qual1 = transform(qual)
+ case Select(qual, name) if name != nme.CONSTRUCTOR && specializedTypeVars(symbol.info).nonEmpty =>
+ debuglog("specializing Select %s [tree.tpe: %s]".format(symbol.defString, tree.tpe))
+ val env = unify(symbol.tpe, tree.tpe, emptyEnv, false)
+ if (env.isEmpty) super.transform(tree)
+ else {
+ val qual1 = transform(qual)
+ def reselect(member: Symbol) = {
+ val newSelect = atPos(tree.pos)(Select(qual1, member))
+ if (member.isMethod) localTyper typedOperator newSelect
+ else localTyper typed newSelect
+ }
+ overloads(symbol) find (_ matchesEnv env) match {
+ case Some(Overload(member, _)) => reselect(member)
+ case _ =>
val specMember = qual1.tpe.member(specializedName(symbol, env)).suchThat(_.tpe matches subst(env, symbol.tpe))
- if (specMember ne NoSymbol) {
- // log("** using spec member " + specMember + ": " + specMember.tpe)
- val tree1 = atPos(tree.pos)(Select(qual1, specMember))
- if (specMember.isMethod)
- localTyper.typedOperator(tree1)
- else
- localTyper.typed(tree1)
- } else
+ if (specMember ne NoSymbol)
+ reselect(specMember)
+ else
treeCopy.Select(tree, qual1, name)
- }
- } else
+ }
+ }
+ case Select(qual, _) =>
+ overloads(symbol) find (_ matchesSym symbol) match {
+ case Some(Overload(member, _)) =>
+ val newTree = Select(transform(qual), member)
+ debuglog(s"** routing $tree to ${member.fullName} tree: $newTree")
+ localTyper.typedOperator(atPos(tree.pos)(newTree))
+ case None =>
super.transform(tree)
- } else overloads(symbol).find(_.sym.info =:= symbol.info) match {
- case Some(specMember) =>
- val qual1 = transform(qual)
- debuglog("** routing " + tree + " to " + specMember.sym.fullName + " tree: " + Select(qual1, specMember.sym))
- localTyper.typedOperator(atPos(tree.pos)(Select(qual1, specMember.sym)))
- case None =>
- super.transform(tree)
}
case PackageDef(pid, stats) =>
@@ -1342,153 +1585,185 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
}
case Template(parents, self, body) =>
+ def transformTemplate = {
val specMembers = makeSpecializedMembers(tree.symbol.enclClass) ::: (implSpecClasses(body) map localTyper.typed)
if (!symbol.isPackageClass)
(new CollectMethodBodies)(tree)
- val parents1 = currentOwner.info.parents.zipWithIndex.map {
- case (tpe, idx) => TypeTree(tpe) setPos parents(idx).pos
- }
+ val parents1 = map2(currentOwner.info.parents, parents)((tpe, parent) =>
+ TypeTree(tpe) setPos parent.pos)
+
treeCopy.Template(tree,
parents1 /*currentOwner.info.parents.map(tpe => TypeTree(tpe) setPos parents.head.pos)*/ ,
self,
atOwner(currentOwner)(transformTrees(body ::: specMembers)))
+ }
+ transformTemplate
- case ddef @ DefDef(mods, name, tparams, vparamss, tpt, rhs) if info.isDefinedAt(symbol) =>
+ case ddef @ DefDef(_, _, _, vparamss, _, _) if info.isDefinedAt(symbol) =>
+ def transformDefDef = {
// log("--> method: " + ddef + " in " + ddef.symbol.owner + ", " + info(symbol))
+ def reportTypeError(body: =>Tree) = reportError(body)(_ => ddef)
+
if (symbol.isConstructor) {
- val t = atOwner(symbol) {
- val superRef: Tree = Select(Super(This(tpnme.EMPTY), tpnme.EMPTY), nme.CONSTRUCTOR)
- forwardCtorCall(tree.pos, superRef, vparamss, symbol.owner)
- }
- if (symbol.isPrimaryConstructor) localTyper typed {
- atPos(symbol.pos)(treeCopy.DefDef(tree, mods, name, tparams, vparamss, tpt, Block(List(t), Literal(()))))
- } else {
- // duplicate the original constructor
- duplicateBody(ddef, info(symbol).target)
- }
- } else info(symbol) match {
+ val t = atOwner(symbol)(forwardCtorCall(tree.pos, gen.mkSuperSelect, vparamss, symbol.owner))
+ if (symbol.isPrimaryConstructor)
+ localTyper.typedPos(symbol.pos)(deriveDefDef(tree)(_ => Block(List(t), Literal(Constant()))))
+ else // duplicate the original constructor
+ reportTypeError(duplicateBody(ddef, info(symbol).target))
+ }
+ else info(symbol) match {
case Implementation(target) =>
assert(body.isDefinedAt(target), "sym: " + symbol.fullName + " target: " + target.fullName)
// we have an rhs, specialize it
- val tree1 = duplicateBody(ddef, target)
+ val tree1 = reportTypeError {
+ duplicateBody(ddef, target)
+ }
debuglog("implementation: " + tree1)
- val DefDef(mods, name, tparams, vparamss, tpt, rhs) = tree1
- treeCopy.DefDef(tree1, mods, name, tparams, vparamss, tpt, transform(rhs))
+ deriveDefDef(tree1)(transform)
case NormalizedMember(target) =>
- log("Normalized member: " + symbol + ", target: " + target)
- if (target.isDeferred || conflicting(typeEnv(symbol))) {
- treeCopy.DefDef(tree, mods, name, tparams, vparamss, tpt,
- localTyper.typed(
- Apply(gen.mkAttributedRef(definitions.Predef_error),
- List(Literal("boom! you stepped on a bug. This method should never be called.")))))
+ val constraints = satisfiabilityConstraints(typeEnv(symbol))
+ log("constraints: " + constraints)
+ if (target.isDeferred || constraints == None) {
+ deriveDefDef(tree)(_ => localTyper typed gen.mkSysErrorCall("Fatal error in code generation: this should never be called."))
} else {
// we have an rhs, specialize it
- val tree1 = duplicateBody(ddef, target)
+ val tree1 = reportTypeError {
+ duplicateBody(ddef, target, constraints.get)
+ }
debuglog("implementation: " + tree1)
- val DefDef(mods, name, tparams, vparamss, tpt, rhs) = tree1
- treeCopy.DefDef(tree1, mods, name, tparams, vparamss, tpt, transform(rhs))
+ deriveDefDef(tree1)(transform)
}
case SpecialOverride(target) =>
assert(body.isDefinedAt(target), "sym: " + symbol.fullName + " target: " + target.fullName)
- //if (settings.debug.value)
- log("moving implementation, body of target " + target + ": " + body(target))
- log("%s is param accessor? %b".format(ddef.symbol, ddef.symbol.isParamAccessor))
+ //debuglog("moving implementation, body of target " + target + ": " + body(target))
+ debuglog("%s is param accessor? %b".format(ddef.symbol, ddef.symbol.isParamAccessor))
// we have an rhs, specialize it
val tree1 = addBody(ddef, target)
(new ChangeOwnerTraverser(target, tree1.symbol))(tree1.rhs)
debuglog("changed owners, now: " + tree1)
- val DefDef(mods, name, tparams, vparamss, tpt, rhs) = tree1
- treeCopy.DefDef(tree1, mods, name, tparams, vparamss, tpt, transform(rhs))
-
+ deriveDefDef(tree1)(transform)
case SpecialOverload(original, env) =>
debuglog("completing specialized " + symbol.fullName + " calling " + original)
- log("special overload " + original + " -> " + env)
+ debuglog("special overload " + original + " -> " + env)
val t = DefDef(symbol, { vparamss =>
val fun = Apply(Select(This(symbol.owner), original),
makeArguments(original, vparamss.head))
- log("inside defdef: " + symbol + "; type: " + symbol.tpe + "; owner: " + symbol.owner)
+ debuglog("inside defdef: " + symbol + "; type: " + symbol.tpe + "; owner: " + symbol.owner)
gen.maybeMkAsInstanceOf(fun,
symbol.owner.thisType.memberType(symbol).finalResultType,
symbol.owner.thisType.memberType(original).finalResultType)
})
- log("created special overload tree " + t)
+ debuglog("created special overload tree " + t)
debuglog("created " + t)
- localTyper.typed(t)
+ reportError {
+ localTyper.typed(t)
+ } {
+ _ => super.transform(tree)
+ }
case fwd @ Forward(_) =>
- log("forward: " + fwd + ", " + ddef)
+ debuglog("forward: " + fwd + ", " + ddef)
val rhs1 = forwardCall(tree.pos, gen.mkAttributedRef(symbol.owner.thisType, fwd.target), vparamss)
- debuglog("completed forwarder to specialized overload: " + fwd.target + ": " + rhs1)
- localTyper.typed(treeCopy.DefDef(tree, mods, name, tparams, vparamss, tpt, rhs1))
+ debuglog("-->d completed forwarder to specialized overload: " + fwd.target + ": " + rhs1)
+ reportError {
+ localTyper.typed(deriveDefDef(tree)(_ => rhs1))
+ } {
+ _ => super.transform(tree)
+ }
case SpecializedAccessor(target) =>
val rhs1 = if (symbol.isGetter)
gen.mkAttributedRef(target)
else
Assign(gen.mkAttributedRef(target), Ident(vparamss.head.head.symbol))
- log("specialized accessor: " + target + " -> " + rhs1)
- localTyper.typed(treeCopy.DefDef(tree, mods, name, tparams, vparamss, tpt, rhs1))
+ debuglog("specialized accessor: " + target + " -> " + rhs1)
+ localTyper.typed(deriveDefDef(tree)(_ => rhs1))
+
+ case Abstract(targ) =>
+ debuglog("abstract: " + targ)
+ localTyper.typed(deriveDefDef(tree)(rhs => rhs))
+ }
}
+ expandInnerNormalizedMembers(transformDefDef)
- case ValDef(mods, name, tpt, rhs) if symbol.hasFlag(SPECIALIZED) && !symbol.isParamAccessor =>
+ case ddef @ DefDef(_, _, _, _, _, _) =>
+ val tree1 = expandInnerNormalizedMembers(tree)
+ super.transform(tree1)
+
+ case ValDef(_, _, _, _) if symbol.hasFlag(SPECIALIZED) && !symbol.isParamAccessor =>
+ def transformValDef = {
assert(body.isDefinedAt(symbol.alias), body)
- val tree1 = treeCopy.ValDef(tree, mods, name, tpt, body(symbol.alias).duplicate)
+ val tree1 = deriveValDef(tree)(_ => body(symbol.alias).duplicate)
debuglog("now typing: " + tree1 + " in " + tree.symbol.owner.fullName)
- val d = new Duplicator
- val ValDef(mods1, name1, tpt1, rhs1) = d.retyped(
+
+ val d = new SpecializationDuplicator(emptyEnv)
+ val newValDef = d.retyped(
localTyper.context1.asInstanceOf[d.Context],
tree1,
symbol.alias.enclClass,
symbol.enclClass,
typeEnv(symbol.alias) ++ typeEnv(tree.symbol)
)
- val t = treeCopy.ValDef(tree1, mods1, name1, tpt1, transform(rhs1))
- log("valdef " + tree + " -> " + t)
- t
-
-// val tree1 =
-// treeCopy.ValDef(tree, mods, name, tpt,
-// localTyper.typed(
-// Apply(Select(Super(currentClass, nme.EMPTY), symbol.alias.getter(symbol.alias.owner)),
-// List())))
-// debuglog("replaced ValDef: " + tree1 + " in " + tree.symbol.owner.fullName)
-// tree1
-
- case Apply(sel @ Select(sup @ Super(qual, name), name1), args)
- if (sup.symbol.info.parents != atPhase(phase.prev)(sup.symbol.info.parents)) =>
-
- def parents = sup.symbol.info.parents
- debuglog(tree + " parents changed from: " + atPhase(phase.prev)(parents) + " to: " + parents)
-
- val res = localTyper.typed(
- Apply(Select(Super(qual, name) setPos sup.pos, name1) setPos sel.pos, transformTrees(args)) setPos tree.pos)
- debuglog("retyping call to super, from: " + symbol + " to " + res.symbol)
- res
+ deriveValDef(newValDef)(transform)
+ }
+ transformValDef
case _ =>
super.transform(tree)
}
}
- private def reskolemize(tparams: List[TypeDef]): (List[Symbol], List[Symbol]) = {
- val tparams1 = tparams map (_.symbol)
- localTyper.namer.skolemize(tparams)
- (tparams1, tparams map (_.symbol))
+ /**
+ * This performs method specialization inside a scope other than a {class, trait, object}: could be another method
+ * or a value. This specialization is much simpler, since there is no need to record the new members in the class
+ * signature, their signatures are only visible locally. It works according to the usual logic:
+ * - we use normalizeMember to create the specialized symbols
+ * - we leave DefDef stubs in the tree that are later filled in by tree duplication and adaptation
+ * @see duplicateBody
+ */
+ private def expandInnerNormalizedMembers(tree: Tree) = tree match {
+ case ddef @ DefDef(_, _, _, vparams :: Nil, _, rhs)
+ if ddef.symbol.owner.isMethod &&
+ specializedTypeVars(ddef.symbol.info).nonEmpty &&
+ !ddef.symbol.hasFlag(SPECIALIZED) =>
+
+ val sym = ddef.symbol
+ val owner = sym.owner
+ val norm = normalizeMember(owner, sym, emptyEnv)
+
+ if (norm.length > 1) {
+ // record the body for duplication
+ body(sym) = rhs
+ parameters(sym) = vparams.map(_.symbol)
+ // to avoid revisiting the member, we can set the SPECIALIZED
+ // flag. nobody has to see this anyway :)
+ sym.setFlag(SPECIALIZED)
+ // create empty bodies for specializations
+ localTyper.typed(Block(norm.tail.map(sym => DefDef(sym, { vparamss => EmptyTree })), ddef))
+ } else
+ tree
+ case _ =>
+ tree
}
-
- private def duplicateBody(tree: DefDef, source: Symbol) = {
+ /** Duplicate the body of the given method `tree` to the new symbol `source`.
+ *
+ * Knowing that the method can be invoked only in the `castmap` type environment,
+ * this method will insert casts for all the expressions of types mappend in the
+ * `castmap`.
+ */
+ private def duplicateBody(tree: DefDef, source: Symbol, castmap: TypeEnv = emptyEnv) = {
val symbol = tree.symbol
val meth = addBody(tree, source)
- debuglog("now typing: " + meth + " in " + symbol.owner.fullName)
- val d = new Duplicator
+ val d = new SpecializationDuplicator(castmap)
+ debuglog("-->d DUPLICATING: " + meth)
d.retyped(
localTyper.context1.asInstanceOf[d.Context],
meth,
@@ -1506,21 +1781,22 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
*/
private def addBody(tree: DefDef, source: Symbol): DefDef = {
val symbol = tree.symbol
- debuglog("specializing body of" + symbol.fullName + ": " + symbol.info)
- val DefDef(mods, name, tparams, vparamss, tpt, _) = tree
+ debuglog("specializing body of" + symbol.defString)
+ val DefDef(_, _, tparams, vparams :: Nil, tpt, _) = tree
// val (_, origtparams) = splitParams(source.typeParams)
val env = typeEnv(symbol)
val boundTvars = env.keySet
- val origtparams = source.typeParams.filter(tparam => !boundTvars(tparam) || !isScalaValueType(env(tparam)))
- debuglog("substituting " + origtparams + " for " + symbol.typeParams)
+ val origtparams = source.typeParams.filter(tparam => !boundTvars(tparam) || !isPrimitiveValueType(env(tparam)))
+ if (origtparams.nonEmpty || symbol.typeParams.nonEmpty)
+ debuglog("substituting " + origtparams + " for " + symbol.typeParams)
// skolemize type parameters
- val (oldtparams, newtparams) = reskolemize(tparams)
+ val oldtparams = tparams map (_.symbol)
+ val newtparams = deriveFreshSkolems(oldtparams)
+ map2(tparams, newtparams)(_ setSymbol _)
// create fresh symbols for value parameters to hold the skolem types
- val vparamss1 = List(for (vdef <- vparamss.head; param = vdef.symbol) yield {
- ValDef(param.cloneSymbol(symbol).setInfo(param.info.substSym(oldtparams, newtparams)))
- })
+ val newSyms = cloneSymbolsAtOwnerAndModify(vparams map (_.symbol), symbol, _.substSym(oldtparams, newtparams))
// replace value and type parameters of the old method with the new ones
// log("Adding body for " + tree.symbol + " - origtparams: " + origtparams + "; tparams: " + tparams)
@@ -1529,51 +1805,51 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
// log("newtparams: " + newtparams)
val symSubstituter = new ImplementationAdapter(
parameters(source) ::: origtparams,
- vparamss1.flatten.map(_.symbol) ::: newtparams,
+ newSyms ::: newtparams,
source.enclClass,
false) // don't make private fields public
- val tmp = symSubstituter(body(source).duplicate)
+
+ val newBody = symSubstituter(body(source).duplicate)
tpt.tpe = tpt.tpe.substSym(oldtparams, newtparams)
- treeCopy.DefDef(tree, mods, name, tparams, vparamss1, tpt, tmp)
+ copyDefDef(tree)(vparamss = List(newSyms map ValDef), rhs = newBody)
}
- /** Create trees for specialized members of 'cls', based on the
+ /** Create trees for specialized members of 'sClass', based on the
* symbols that are already there.
*/
- private def makeSpecializedMembers(cls: Symbol): List[Tree] = {
+ private def makeSpecializedMembers(sClass: Symbol): List[Tree] = {
// add special overrides first
-// if (!cls.hasFlag(SPECIALIZED))
-// for (m <- specialOverrides(cls)) cls.info.decls.enter(m)
+// if (!specializedClass.hasFlag(SPECIALIZED))
+// for (m <- specialOverrides(specializedClass)) specializedClass.info.decls.enter(m)
val mbrs = new mutable.ListBuffer[Tree]
var hasSpecializedFields = false
- for (m <- cls.info.decls.toList
+ for (m <- sClass.info.decls
if m.hasFlag(SPECIALIZED)
&& (m.sourceFile ne null)
- && satisfiable(typeEnv(m), !cls.hasFlag(SPECIALIZED))) {
- log("creating tree for " + m.fullName)
+ && satisfiable(typeEnv(m), !sClass.hasFlag(SPECIALIZED))) {
+ debuglog("creating tree for " + m.fullName)
if (m.isMethod) {
if (info(m).target.hasAccessorFlag) hasSpecializedFields = true
if (m.isClassConstructor) {
val origParams = parameters(info(m).target)
-
- val vparams =
- for ((tp, sym) <- m.info.paramTypes zip origParams)
- yield m.newValue(sym.pos, specializedName(sym, typeEnv(cls)))
- .setInfo(tp)
- .setFlag(sym.flags)
-
+ val vparams = (
+ map2(m.info.paramTypes, origParams)((tp, sym) =>
+ m.newValue(specializedName(sym, typeEnv(sClass)), sym.pos, sym.flags) setInfo tp
+ )
+ )
// param accessors for private members (the others are inherited from the generic class)
- if (m.isPrimaryConstructor)
- for (param <- vparams if cls.info.nonPrivateMember(param.name) == NoSymbol;
- val acc = param.cloneSymbol(cls).setFlag(PARAMACCESSOR | PRIVATE)) {
- cls.info.decls.enter(acc)
+ if (m.isPrimaryConstructor) {
+ for (param <- vparams ; if sClass.info.nonPrivateMember(param.name) == NoSymbol) {
+ val acc = param.cloneSymbol(sClass, param.flags | PARAMACCESSOR | PRIVATE)
+ sClass.info.decls.enter(acc)
mbrs += ValDef(acc, EmptyTree).setType(NoType).setPos(m.pos)
}
+ }
// ctor
- mbrs += atPos(m.pos)(DefDef(m, Modifiers(m.flags), List(vparams) map (_ map ValDef), EmptyTree))
+ mbrs += atPos(m.pos)(DefDef(m, Modifiers(m.flags), mmap(List(vparams))(ValDef), EmptyTree))
} else {
mbrs += atPos(m.pos)(DefDef(m, { paramss => EmptyTree }))
}
@@ -1587,12 +1863,11 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
}
}
if (hasSpecializedFields) {
- val isSpecializedInstance = cls.hasFlag(SPECIALIZED) || cls.info.parents.exists(_.typeSymbol.hasFlag(SPECIALIZED))
- val sym = cls.newMethod(nme.SPECIALIZED_INSTANCE, cls.pos)
- .setInfo(MethodType(Nil, BooleanClass.tpe))
- cls.info.decls.enter(sym)
+ val isSpecializedInstance = sClass :: sClass.parentSymbols exists (_ hasFlag SPECIALIZED)
+ val sym = sClass.newMethod(nme.SPECIALIZED_INSTANCE, sClass.pos) setInfoAndEnter MethodType(Nil, BooleanClass.tpe)
+
mbrs += atPos(sym.pos) {
- DefDef(sym, Literal(isSpecializedInstance).setType(BooleanClass.tpe)).setType(NoType)
+ DefDef(sym, Literal(Constant(isSpecializedInstance)).setType(BooleanClass.tpe)).setType(NoType)
}
}
mbrs.toList
@@ -1600,27 +1875,23 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
/** Create specialized class definitions */
def implSpecClasses(trees: List[Tree]): List[Tree] = {
- val buf = new mutable.ListBuffer[Tree]
- for (tree <- trees)
- tree match {
- case ClassDef(_, _, _, impl) =>
- tree.symbol.info // force specialization
- for (((sym1, env), specCls) <- specializedClass if sym1 == tree.symbol) {
- val parents = specCls.info.parents.map(TypeTree)
- buf +=
- ClassDef(specCls, atPos(impl.pos)(Template(parents, emptyValDef, List()))
- .setSymbol(specCls.newLocalDummy(sym1.pos))) setPos tree.pos
- log("created synthetic class: " + specCls + " of " + sym1 + " in env: " + env)
- }
- case _ =>
- }
- buf.toList
+ trees flatMap {
+ case tree @ ClassDef(_, _, _, impl) =>
+ tree.symbol.info // force specialization
+ for (((sym1, env), specCls) <- specializedClass if sym1 == tree.symbol) yield {
+ debuglog("created synthetic class: " + specCls + " of " + sym1 + " in " + pp(env))
+ val parents = specCls.info.parents.map(TypeTree)
+ ClassDef(specCls, atPos(impl.pos)(Template(parents, emptyValDef, List()))
+ .setSymbol(specCls.newLocalDummy(sym1.pos))) setPos tree.pos
+ }
+ case _ => Nil
+ } sortBy (_.name.decoded)
}
}
- private def forwardCall(pos: util.Position, receiver: Tree, paramss: List[List[ValDef]]): Tree = {
- val argss = paramss map (_ map (x => Ident(x.symbol)))
- atPos(pos) { (receiver /: argss) (Apply) }
+ private def forwardCall(pos: scala.reflect.internal.util.Position, receiver: Tree, paramss: List[List[ValDef]]): Tree = {
+ val argss = mmap(paramss)(x => Ident(x.symbol))
+ atPos(pos) { (receiver /: argss) (Apply.apply) }
}
/** Forward to the generic class constructor. If the current class initializes
@@ -1642,31 +1913,29 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
* }
* }}
*/
- private def forwardCtorCall(pos: util.Position, receiver: Tree, paramss: List[List[ValDef]], clazz: Symbol): Tree = {
+ private def forwardCtorCall(pos: scala.reflect.internal.util.Position, receiver: Tree, paramss: List[List[ValDef]], clazz: Symbol): Tree = {
- /** A constructor parameter `f' initializes a specialized field
+ /** A constructor parameter `f` initializes a specialized field
* iff:
* - it is specialized itself
* - there is a getter for the original (non-specialized) field in the same class
* - there is a getter for the specialized field in the same class
*/
- def initializesSpecializedField(f: Symbol): Boolean =
- (f.name.endsWith("$sp")
- && clazz.info.member(nme.originalName(f.name)).isPublic
- && (clazz.info.decl(f.name).suchThat(_.isGetter) != NoSymbol))
+ def initializesSpecializedField(f: Symbol) = (
+ (f.name endsWith nme.SPECIALIZED_SUFFIX)
+ && clazz.info.member(nme.originalName(f.name)).isPublic
+ && clazz.info.decl(f.name).suchThat(_.isGetter) != NoSymbol
+ )
- val argss = paramss map (_ map (x =>
+ val argss = mmap(paramss)(x =>
if (initializesSpecializedField(x.symbol))
gen.mkAsInstanceOf(Literal(Constant(null)), x.symbol.tpe)
else
- Ident(x.symbol))
+ Ident(x.symbol)
)
- atPos(pos) { (receiver /: argss) (Apply) }
+ atPos(pos) { (receiver /: argss) (Apply.apply) }
}
- /** Concrete methods that use a specialized type, or override such methods. */
- private val concreteSpecMethods: mutable.Set[Symbol] = new mutable.HashSet
-
/** Add method m to the set of symbols for which we need an implementation tree
* in the tree transformer.
*
@@ -1679,8 +1948,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
private def makeArguments(fun: Symbol, vparams: List[Symbol]): List[Tree] = (
//! TODO: make sure the param types are seen from the right prefix
- for ((tp, arg) <- fun.info.paramTypes zip vparams) yield
- gen.maybeMkAsInstanceOf(Ident(arg), tp, arg.tpe)
+ map2(fun.info.paramTypes, vparams)((tp, arg) => gen.maybeMkAsInstanceOf(Ident(arg), tp, arg.tpe))
)
private def findSpec(tp: Type): Type = tp match {
case TypeRef(pre, sym, _ :: _) => specializedType(tp)
@@ -1689,9 +1957,27 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
class SpecializationTransformer(unit: CompilationUnit) extends Transformer {
informProgress("specializing " + unit)
- override def transform(tree: Tree) =
- if (settings.nospecialization.value) tree
- else atPhase(phase.next)(specializeCalls(unit).transform(tree))
+ override def transform(tree: Tree) = {
+ val resultTree = if (settings.nospecialization.value) tree
+ else afterSpecialize(specializeCalls(unit).transform(tree))
+
+ // Remove the final modifier and @inline annotation from anything in the
+ // original class (since it's being overridden in at least onesubclass).
+ //
+ // We do this here so that the specialized subclasses will correctly copy
+ // final and @inline.
+ info.foreach {
+ case (sym, SpecialOverload(target, _)) => {
+ sym.resetFlag(FINAL)
+ target.resetFlag(FINAL)
+ sym.removeAnnotation(ScalaInlineClass)
+ target.removeAnnotation(ScalaInlineClass)
+ }
+ case _ => {}
+ }
+
+ resultTree
+ }
}
def printSpecStats() {
diff --git a/src/compiler/scala/tools/nsc/transform/TailCalls.scala b/src/compiler/scala/tools/nsc/transform/TailCalls.scala
index 5745ccc..9384992 100644
--- a/src/compiler/scala/tools/nsc/transform/TailCalls.scala
+++ b/src/compiler/scala/tools/nsc/transform/TailCalls.scala
@@ -1,5 +1,5 @@
/* NSC -- new scala compiler
- * Copyright 2005-2011 LAMP/EPFL
+ * Copyright 2005-2013 LAMP/EPFL
* @author Iulian Dragos
*/
@@ -36,6 +36,8 @@ abstract class TailCalls extends Transform {
}
}
+ import treeInfo.hasSynthCaseSymbol
+
/**
* A Tail Call Transformer
*
@@ -87,10 +89,22 @@ abstract class TailCalls extends Transform {
class TailCallElimination(unit: CompilationUnit) extends Transformer {
private val defaultReason = "it contains a recursive call not in tail position"
+ /** Has the label been accessed? Then its symbol is in this set. */
+ private val accessed = new scala.collection.mutable.HashSet[Symbol]()
+ // `accessed` was stored as boolean in the current context -- this is no longer tenable
+ // with jumps to labels in tailpositions now considered in tailposition,
+ // a downstream context may access the label, and the upstream one will be none the wiser
+ // this is necessary because tail-calls may occur in places where syntactically they seem impossible
+ // (since we now consider jumps to labels that are in tailposition, such as matchEnd(x) {x})
+
+
class Context() {
/** The current method */
var method: Symbol = NoSymbol
+ // symbols of label defs in this method that are in tail position
+ var tailLabels: Set[Symbol] = Set()
+
/** The current tail-call label */
var label: Symbol = NoSymbol
@@ -104,36 +118,32 @@ abstract class TailCalls extends Transform {
var failReason = defaultReason
var failPos = method.pos
- /** Has the label been accessed? */
- var accessed = false
-
def this(that: Context) = {
this()
this.method = that.method
this.tparams = that.tparams
this.tailPos = that.tailPos
- this.accessed = that.accessed
this.failPos = that.failPos
this.label = that.label
+ this.tailLabels = that.tailLabels
}
def this(dd: DefDef) {
this()
this.method = dd.symbol
this.tparams = dd.tparams map (_.symbol)
this.tailPos = true
- this.accessed = false
this.failPos = dd.pos
/** Create a new method symbol for the current method and store it in
* the label field.
*/
this.label = {
- val label = method.newLabel(method.pos, "_" + method.name)
+ val label = method.newLabel(newTermName("_" + method.name), method.pos)
val thisParam = method.newSyntheticValueParam(currentClass.typeOfThis)
label setInfo MethodType(thisParam :: method.tpe.params, method.tpe.finalResultType)
}
if (isEligible)
- label setInfo label.tpe.substSym(method.tpe.typeParams, tparams)
+ label substInfo (method.tpe.typeParams, tparams)
}
def enclosingType = method.enclClass.typeOfThis
@@ -141,14 +151,17 @@ abstract class TailCalls extends Transform {
def isEligible = method.isEffectivelyFinal
// @tailrec annotation indicates mandatory transformation
def isMandatory = method.hasAnnotation(TailrecClass) && !forMSIL
- def isTransformed = isEligible && accessed
+ def isTransformed = isEligible && accessed(label)
def tailrecFailure() = unit.error(failPos, "could not optimize @tailrec annotated " + method + ": " + failReason)
- def newThis(pos: Position) = method.newValue(pos, nme.THIS) setInfo currentClass.typeOfThis setFlag SYNTHETIC
+ def newThis(pos: Position) = logResult("Creating new `this` during tailcalls\n method: %s\n current class: %s".format(
+ method.ownerChain.mkString(" -> "), currentClass.ownerChain.mkString(" -> "))) {
+ method.newValue(nme.THIS, pos, SYNTHETIC) setInfo currentClass.typeOfThis
+ }
override def toString(): String = (
"" + method.name + " tparams: " + tparams + " tailPos: " + tailPos +
- " accessed: " + accessed + "\nLabel: " + label + "\nLabel type: " + label.info
+ " Label: " + label + " Label type: " + label.info
)
}
@@ -192,11 +205,10 @@ abstract class TailCalls extends Transform {
* Position is unchanged (by default, the method definition.)
*/
def fail(reason: String) = {
- if (settings.debug.value)
- log("Cannot rewrite recursive call at: " + fun.pos + " because: " + reason)
+ debuglog("Cannot rewrite recursive call at: " + fun.pos + " because: " + reason)
ctx.failReason = reason
- treeCopy.Apply(tree, target, transformArgs)
+ treeCopy.Apply(tree, noTailTransform(target), transformArgs)
}
/** Position of failure is that of the tree being considered.
*/
@@ -205,15 +217,18 @@ abstract class TailCalls extends Transform {
fail(reason)
}
def rewriteTailCall(recv: Tree): Tree = {
- log("Rewriting tail recursive method call at: " + fun.pos)
+ debuglog("Rewriting tail recursive call: " + fun.pos.lineContent.trim)
- ctx.accessed = true
- typedPos(fun.pos)(Apply(Ident(ctx.label), recv :: transformArgs))
+ accessed += ctx.label
+ typedPos(fun.pos) {
+ val args = mapWithIndex(transformArgs)((arg, i) => mkAttributedCastHack(arg, ctx.label.info.params(i + 1).tpe))
+ Apply(Ident(ctx.label), noTailTransform(recv) :: args)
+ }
}
if (!ctx.isEligible) fail("it is neither private nor final so can be overridden")
else if (!isRecursiveCall) {
- if (receiverIsSuper) failHere("it contains a recursive call targetting a supertype")
+ if (receiverIsSuper) failHere("it contains a recursive call targeting supertype " + receiver.tpe)
else failHere(defaultReason)
}
else if (!matchesTypeArgs) failHere("it is called recursively with different type arguments")
@@ -224,14 +239,35 @@ abstract class TailCalls extends Transform {
}
tree match {
- case dd @ DefDef(mods, name, tparams, vparams, tpt, rhs) =>
- log("Entering DefDef: " + name)
+ case ValDef(_, _, _, _) =>
+ if (tree.symbol.isLazy && tree.symbol.hasAnnotation(TailrecClass))
+ unit.error(tree.pos, "lazy vals are not tailcall transformed")
+
+ super.transform(tree)
+
+ case dd @ DefDef(_, _, _, vparamss0, _, rhs0) if !dd.symbol.hasAccessorFlag =>
val newCtx = new Context(dd)
+ def isRecursiveCall(t: Tree) = {
+ val sym = t.symbol
+ (sym != null) && {
+ sym.isMethod && (dd.symbol.name == sym.name) && (dd.symbol.enclClass isSubClass sym.enclClass)
+ }
+ }
+ if (newCtx.isMandatory) {
+ if (!rhs0.exists(isRecursiveCall)) {
+ unit.error(tree.pos, "@tailrec annotated method contains no recursive calls")
+ }
+ }
- log("Considering " + name + " for tailcalls")
- val newRHS = transform(rhs, newCtx)
+ // labels are local to a method, so only traverse the rhs of a defdef
+ val collectTailPosLabels = new TailPosLabelsTraverser
+ collectTailPosLabels traverse rhs0
+ newCtx.tailLabels = collectTailPosLabels.tailLabels.toSet
- treeCopy.DefDef(tree, mods, name, tparams, vparams, tpt, {
+ debuglog("Considering " + dd.name + " for tailcalls, with labels in tailpos: "+ newCtx.tailLabels)
+ val newRHS = transform(rhs0, newCtx)
+
+ deriveDefDef(tree){rhs =>
if (newCtx.isTransformed) {
/** We have rewritten the tree, but there may be nested recursive calls remaining.
* If @tailrec is given we need to fail those now.
@@ -243,20 +279,34 @@ abstract class TailCalls extends Transform {
}
}
val newThis = newCtx.newThis(tree.pos)
- val vpSyms = vparams.flatten map (_.symbol)
+ val vpSyms = vparamss0.flatten map (_.symbol)
typedPos(tree.pos)(Block(
List(ValDef(newThis, This(currentClass))),
- LabelDef(newCtx.label, newThis :: vpSyms, newRHS)
+ LabelDef(newCtx.label, newThis :: vpSyms, mkAttributedCastHack(newRHS, newCtx.label.tpe.resultType))
))
}
else {
- if (newCtx.isMandatory)
+ if (newCtx.isMandatory && newRHS.exists(isRecursiveCall))
newCtx.tailrecFailure()
newRHS
}
- })
+ }
+
+ // a translated match
+ case Block(stats, expr) if stats forall hasSynthCaseSymbol =>
+ // the assumption is once we encounter a case, the remainder of the block will consist of cases
+ // the prologue may be empty, usually it is the valdef that stores the scrut
+ val (prologue, cases) = stats span (s => !s.isInstanceOf[LabelDef])
+ treeCopy.Block(tree,
+ noTailTransforms(prologue) ++ transformTrees(cases),
+ transform(expr)
+ )
+
+ // a translated casedef
+ case LabelDef(_, _, body) if hasSynthCaseSymbol(tree) =>
+ deriveLabelDef(tree)(transform)
case Block(stats, expr) =>
treeCopy.Block(tree,
@@ -265,11 +315,7 @@ abstract class TailCalls extends Transform {
)
case CaseDef(pat, guard, body) =>
- treeCopy.CaseDef(tree,
- pat,
- guard,
- transform(body)
- )
+ deriveCaseDef(tree)(transform)
case If(cond, thenp, elsep) =>
treeCopy.If(tree,
@@ -284,8 +330,16 @@ abstract class TailCalls extends Transform {
transformTrees(cases).asInstanceOf[List[CaseDef]]
)
+ case Try(block, catches, finalizer @ EmptyTree) =>
+ // SI-1672 Catches are in tail position when there is no finalizer
+ treeCopy.Try(tree,
+ noTailTransform(block),
+ transformTrees(catches).asInstanceOf[List[CaseDef]],
+ EmptyTree
+ )
+
case Try(block, catches, finalizer) =>
- // no calls inside a try are in tail position, but keep recursing for nested functions
+ // no calls inside a try are in tail position if there is a finalizer, but keep recursing for nested functions
treeCopy.Try(tree,
noTailTransform(block),
noTailTransforms(catches).asInstanceOf[List[CaseDef]],
@@ -298,16 +352,111 @@ abstract class TailCalls extends Transform {
case Apply(fun, args) =>
if (fun.symbol == Boolean_or || fun.symbol == Boolean_and)
treeCopy.Apply(tree, fun, transformTrees(args))
- else
- rewriteApply(fun, fun, Nil, args)
+ else if (fun.symbol.isLabel && args.nonEmpty && args.tail.isEmpty && ctx.tailLabels(fun.symbol)) {
+ // this is to detect tailcalls in translated matches
+ // it's a one-argument call to a label that is in a tailposition and that looks like label(x) {x}
+ // thus, the argument to the call is in tailposition
+ val saved = ctx.tailPos
+ ctx.tailPos = true
+ debuglog("in tailpos label: "+ args.head)
+ val res = transform(args.head)
+ ctx.tailPos = saved
+ if (res ne args.head) {
+ // we tail-called -- TODO: shield from false-positives where we rewrite but don't tail-call
+ // must leave the jump to the original tailpos-label (fun)!
+ // there might be *a* tailcall *in* res, but it doesn't mean res *always* tailcalls
+ treeCopy.Apply(tree, fun, List(res))
+ }
+ else rewriteApply(fun, fun, Nil, args)
+ } else rewriteApply(fun, fun, Nil, args)
case Alternative(_) | Star(_) | Bind(_, _) =>
sys.error("We should've never gotten inside a pattern")
- case EmptyTree | Super(_, _) | This(_) | Select(_, _) | Ident(_) | Literal(_) | Function(_, _) | TypeTree() =>
+ case Select(qual, name) =>
+ treeCopy.Select(tree, noTailTransform(qual), name)
+ case EmptyTree | Super(_, _) | This(_) | Ident(_) | Literal(_) | Function(_, _) | TypeTree() =>
tree
case _ =>
super.transform(tree)
}
}
+
+ // Workaround for SI-6900. Uncurry installs an InfoTransformer and a tree Transformer.
+ // These leave us with conflicting view on method signatures; the parameter symbols in
+ // the MethodType can be clones of the ones originally found on the parameter ValDef, and
+ // consequently appearing in the typechecked RHS of the method.
+ private def mkAttributedCastHack(tree: Tree, tpe: Type) =
+ gen.mkAttributedCast(tree, tpe)
+ }
+
+ // collect the LabelDefs (generated by the pattern matcher) in a DefDef that are in tail position
+ // the labels all look like: matchEnd(x) {x}
+ // then, in a forward jump `matchEnd(expr)`, `expr` is considered in tail position (and the matchEnd jump is replaced by the jump generated by expr)
+ class TailPosLabelsTraverser extends Traverser {
+ val tailLabels = new scala.collection.mutable.HashSet[Symbol]()
+
+ private var maybeTail: Boolean = true // since we start in the rhs of a DefDef
+
+ def traverse(tree: Tree, maybeTailNew: Boolean): Unit = {
+ val saved = maybeTail
+ maybeTail = maybeTailNew
+ try traverse(tree)
+ finally maybeTail = saved
+ }
+
+ def traverseNoTail(tree: Tree) = traverse(tree, false)
+ def traverseTreesNoTail(trees: List[Tree]) = trees foreach traverseNoTail
+
+ override def traverse(tree: Tree) = tree match {
+ // we're looking for label(x){x} in tail position, since that means `a` is in tail position in a call `label(a)`
+ case LabelDef(_, List(arg), body at Ident(_)) if arg.symbol == body.symbol =>
+ if (maybeTail) tailLabels += tree.symbol
+
+ // jumps to matchEnd are transparent; need this case for nested matches
+ // (and the translated match case below does things in reverse for this case's sake)
+ case Apply(fun, arg :: Nil) if hasSynthCaseSymbol(fun) && tailLabels(fun.symbol) =>
+ traverse(arg)
+
+ case Apply(fun, args) if (fun.symbol == Boolean_or || fun.symbol == Boolean_and) =>
+ traverseTrees(args)
+
+ // a translated casedef
+ case LabelDef(_, _, body) if hasSynthCaseSymbol(tree) =>
+ traverse(body)
+
+ // a translated match
+ case Block(stats, expr) if stats forall hasSynthCaseSymbol =>
+ // the assumption is once we encounter a case, the remainder of the block will consist of cases
+ // the prologue may be empty, usually it is the valdef that stores the scrut
+ val (prologue, cases) = stats span (s => !s.isInstanceOf[LabelDef])
+ traverse(expr)
+ traverseTrees(cases.reverse) // reverse so that we enter the matchEnd LabelDef before we see jumps to it
+ traverseTreesNoTail(prologue) // selector (may be absent)
+
+ case CaseDef(pat, guard, body) =>
+ traverse(body)
+
+ case Match(selector, cases) =>
+ traverseNoTail(selector)
+ traverseTrees(cases)
+
+ case dd @ DefDef(_, _, _, _, _, _) => // we are run per-method
+
+ case Block(stats, expr) =>
+ traverseTreesNoTail(stats)
+ traverse(expr)
+
+ case If(cond, thenp, elsep) =>
+ traverse(thenp)
+ traverse(elsep)
+
+ case Try(block, catches, finalizer) =>
+ traverseNoTail(block)
+ traverseTreesNoTail(catches)
+ traverseNoTail(finalizer)
+
+ case Apply(_, _) | EmptyTree | Super(_, _) | This(_) | Select(_, _) | Ident(_) | Literal(_) | Function(_, _) | TypeTree() =>
+ case _ => super.traverse(tree)
+ }
}
}
diff --git a/src/compiler/scala/tools/nsc/transform/Transform.scala b/src/compiler/scala/tools/nsc/transform/Transform.scala
index 0f793fd..4e69fbc 100644
--- a/src/compiler/scala/tools/nsc/transform/Transform.scala
+++ b/src/compiler/scala/tools/nsc/transform/Transform.scala
@@ -1,5 +1,5 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
+ * Copyright 2005-2013 LAMP/EPFL
* @author Martin Odersky
*/
diff --git a/src/compiler/scala/tools/nsc/transform/TypingTransformers.scala b/src/compiler/scala/tools/nsc/transform/TypingTransformers.scala
index 7a4da41..c7bc16f 100644
--- a/src/compiler/scala/tools/nsc/transform/TypingTransformers.scala
+++ b/src/compiler/scala/tools/nsc/transform/TypingTransformers.scala
@@ -1,5 +1,5 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
+ * Copyright 2005-2013 LAMP/EPFL
* @author Martin Odersky
*/
@@ -25,19 +25,14 @@ trait TypingTransformers {
protected var curTree: Tree = _
protected def typedPos(pos: Position)(tree: Tree) = localTyper typed { atPos(pos)(tree) }
- /** a typer for each enclosing class */
- val typers: mutable.Map[Symbol, analyzer.Typer] = new mutable.HashMap
-
- override def atOwner[A](owner: Symbol)(trans: => A): A = atOwner(curTree, owner)(trans)
+ override final def atOwner[A](owner: Symbol)(trans: => A): A = atOwner(curTree, owner)(trans)
def atOwner[A](tree: Tree, owner: Symbol)(trans: => A): A = {
val savedLocalTyper = localTyper
// println("transformer atOwner: " + owner + " isPackage? " + owner.isPackage)
localTyper = localTyper.atOwner(tree, if (owner.isModule) owner.moduleClass else owner)
- typers += Pair(owner, localTyper)
val result = super.atOwner(owner)(trans)
localTyper = savedLocalTyper
- typers -= owner
result
}
diff --git a/src/compiler/scala/tools/nsc/transform/UnCurry.scala b/src/compiler/scala/tools/nsc/transform/UnCurry.scala
index e278bcc..e61b35a 100644
--- a/src/compiler/scala/tools/nsc/transform/UnCurry.scala
+++ b/src/compiler/scala/tools/nsc/transform/UnCurry.scala
@@ -1,5 +1,5 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
+ * Copyright 2005-2013 LAMP/EPFL
* @author
*/
@@ -8,6 +8,7 @@ package transform
import symtab.Flags._
import scala.collection.{ mutable, immutable }
+import scala.language.postfixOps
/*<export> */
/** - uncurry all symbol and tree types (@see UnCurryPhase) -- this includes normalizing all proper types.
@@ -19,7 +20,7 @@ import scala.collection.{ mutable, immutable }
* - for every use of a def-parameter: x ==> x.apply()
* - for every argument to a def parameter `x: => T':
* if argument is not a reference to a def parameter:
- * convert argument `e' to (expansion of) `() => e'
+ * convert argument `e` to (expansion of) `() => e'
* - for every repeated Scala parameter `x: T*' --> x: Seq[T].
* - for every repeated Java parameter `x: T...' --> x: Array[T], except:
* if T is an unbounded abstract type, replace --> x: Array[Object]
@@ -32,9 +33,21 @@ import scala.collection.{ mutable, immutable }
* - convert implicit method types to method types
* - convert non-trivial catches in try statements to matches
* - convert non-local returns to throws with enclosing try statements.
+ * - convert try-catch expressions in contexts where there might be values on the stack to
+ * a local method and a call to it (since an exception empties the evaluation stack):
+ *
+ * meth(x_1,..., try { x_i } catch { ..}, .. x_b0) ==>
+ * {
+ * def liftedTry$1 = try { x_i } catch { .. }
+ * meth(x_1, .., liftedTry$1(), .. )
+ * }
*/
/*</export> */
-abstract class UnCurry extends InfoTransform with TypingTransformers with ast.TreeDSL {
+abstract class UnCurry extends InfoTransform
+ with scala.reflect.internal.transform.UnCurry
+ with TypingTransformers with ast.TreeDSL {
+ val global: Global // need to repeat here because otherwise last mixin defines global as
+ // SymbolTable. If we had DOT this would not be an issue
import global._ // the global environment
import definitions._ // standard classes and methods
import CODE._
@@ -47,66 +60,9 @@ abstract class UnCurry extends InfoTransform with TypingTransformers with ast.Tr
// ------ Type transformation --------------------------------------------------------
// uncurry and uncurryType expand type aliases
- private def expandAlias(tp: Type): Type = if (!tp.isHigherKinded) tp.normalize else tp
-
- private def isUnboundedGeneric(tp: Type) = tp match {
- case t @ TypeRef(_, sym, _) => sym.isAbstractType && !(t <:< AnyRefClass.tpe)
- case _ => false
- }
-
- private val uncurry: TypeMap = new TypeMap {
- def apply(tp0: Type): Type = {
- // tp0.typeSymbolDirect.initialize
- val tp = expandAlias(tp0)
- tp match {
- case MethodType(params, MethodType(params1, restpe)) =>
- apply(MethodType(params ::: params1, restpe))
- case MethodType(params, ExistentialType(tparams, restpe @ MethodType(_, _))) =>
- assert(false, "unexpected curried method types with intervening existential")
- tp0
- case MethodType(h :: t, restpe) if h.isImplicit =>
- apply(MethodType(h.cloneSymbol.resetFlag(IMPLICIT) :: t, restpe))
- case NullaryMethodType(restpe) =>
- apply(MethodType(List(), restpe))
- case TypeRef(pre, ByNameParamClass, List(arg)) =>
- apply(functionType(List(), arg))
- case TypeRef(pre, RepeatedParamClass, args) =>
- apply(appliedType(SeqClass.typeConstructor, args))
- case TypeRef(pre, JavaRepeatedParamClass, args) =>
- apply(arrayType(
- if (isUnboundedGeneric(args.head)) ObjectClass.tpe else args.head))
- case _ =>
- expandAlias(mapOver(tp))
- }
- }
- }
-
- private val uncurryType = new TypeMap {
- def apply(tp0: Type): Type = {
- val tp = expandAlias(tp0)
- tp match {
- case ClassInfoType(parents, decls, clazz) =>
- val parents1 = parents mapConserve uncurry
- if (parents1 eq parents) tp
- else ClassInfoType(parents1, decls, clazz) // @MAT normalize in decls??
- case PolyType(_, _) =>
- mapOver(tp)
- case _ =>
- tp
- }
- }
- }
-
- /** - return symbol's transformed type,
- * - if symbol is a def parameter with transformed type T, return () => T
- *
- * @MAT: starting with this phase, the info of every symbol will be normalized
- */
- def transformInfo(sym: Symbol, tp: Type): Type =
- if (sym.isType) uncurryType(tp) else uncurry(tp)
/** Traverse tree omitting local method definitions.
- * If a `return' is encountered, set `returnFound' to true.
+ * If a `return` is encountered, set `returnFound` to true.
* Used for MSIL only.
*/
private object lookForReturns extends Traverser {
@@ -124,45 +80,62 @@ abstract class UnCurry extends InfoTransform with TypingTransformers with ast.Tr
}
class UnCurryTransformer(unit: CompilationUnit) extends TypingTransformer(unit) {
-
- private var needTryLift = false
- private var inPattern = false
+ private var needTryLift = false
+ private var inPattern = false
private var inConstructorFlag = 0L
- private val byNameArgs = new mutable.HashSet[Tree]
- private val noApply = new mutable.HashSet[Tree]
- private val newMembers = mutable.ArrayBuffer[Tree]()
- private val repeatedParams = mutable.Map[Symbol, List[ValDef]]()
+ private val byNameArgs = mutable.HashSet[Tree]()
+ private val noApply = mutable.HashSet[Tree]()
+ private val newMembers = mutable.Map[Symbol, mutable.Buffer[Tree]]()
+ private val repeatedParams = mutable.Map[Symbol, List[ValDef]]()
- private lazy val serialVersionUIDAnnotation =
- AnnotationInfo(SerialVersionUIDAttr.tpe, List(Literal(Constant(0))), List())
+ /** Add a new synthetic member for `currentOwner` */
+ private def addNewMember(t: Tree): Unit =
+ newMembers.getOrElseUpdate(currentOwner, mutable.Buffer()) += t
+
+ /** Process synthetic members for `owner`. They are removed form the `newMembers` as a side-effect. */
+ @inline private def useNewMembers[T](owner: Symbol)(f: List[Tree] => T): T =
+ f(newMembers.remove(owner).getOrElse(Nil).toList)
+
+ @inline private def withInPattern[T](value: Boolean)(body: => T): T = {
+ inPattern = value
+ try body
+ finally inPattern = !value
+ }
- override def transformUnit(unit: CompilationUnit) {
- freeMutableVars.clear()
- freeLocalsTraverser(unit.body)
- super.transformUnit(unit)
+ private def newFunction0(body: Tree): Tree = {
+ val result = localTyper.typedPos(body.pos)(Function(Nil, body)).asInstanceOf[Function]
+ log("Change owner from %s to %s in %s".format(currentOwner, result.symbol, result.body))
+ result.body changeOwner (currentOwner -> result.symbol)
+ transformFunction(result)
}
+ private lazy val serialVersionUIDAnnotation =
+ AnnotationInfo(SerialVersionUIDAttr.tpe, List(Literal(Constant(0))), List())
+
private var nprinted = 0
- override def transform(tree: Tree): Tree = try { //debug
- postTransform(mainTransform(tree))
- } catch {
- case ex: Throwable =>
- if (nprinted < 10) {
- Console.println("exception when traversing " + tree)
- nprinted += 1
- }
- throw ex
- }
+ // I don't have a clue why I'm catching TypeErrors here, but it's better
+ // than spewing stack traces at end users for internal errors. Examples
+ // which hit at this point should not be hard to come by, but the immediate
+ // motivation can be seen in continuations-neg/t3718.
+ override def transform(tree: Tree): Tree = (
+ try postTransform(mainTransform(tree))
+ catch { case ex: TypeError =>
+ unit.error(ex.pos, ex.msg)
+ debugStack(ex)
+ EmptyTree
+ }
+ )
- /* Is tree a reference `x' to a call by name parameter that needs to be converted to
- * x.apply()? Note that this is not the case if `x' is used as an argument to another
+ /* Is tree a reference `x` to a call by name parameter that needs to be converted to
+ * x.apply()? Note that this is not the case if `x` is used as an argument to another
* call by name parameter.
*/
- def isByNameRef(tree: Tree): Boolean =
- tree.isTerm && tree.hasSymbol &&
- isByNameParamType(tree.symbol.tpe) &&
- !byNameArgs(tree)
+ def isByNameRef(tree: Tree) = (
+ tree.isTerm
+ && !byNameArgs(tree)
+ && tree.hasSymbolWhich(s => isByNameParamType(s.tpe))
+ )
/** Uncurry a type of a tree node.
* This function is sensitive to whether or not we are in a pattern -- when in a pattern
@@ -179,18 +152,16 @@ abstract class UnCurry extends InfoTransform with TypingTransformers with ast.Tr
/** The type of a non-local return expression with given argument type */
private def nonLocalReturnExceptionType(argtype: Type) =
- appliedType(NonLocalReturnControlClass.typeConstructor, List(argtype))
+ appliedType(NonLocalReturnControlClass, argtype)
/** A hashmap from method symbols to non-local return keys */
private val nonLocalReturnKeys = perRunCaches.newMap[Symbol, Symbol]()
/** Return non-local return key for given method */
private def nonLocalReturnKey(meth: Symbol) =
- nonLocalReturnKeys.getOrElseUpdate(meth, {
- meth.newValue(meth.pos, unit.freshTermName("nonLocalReturnKey"))
- .setFlag (SYNTHETIC)
- .setInfo (ObjectClass.tpe)
- })
+ nonLocalReturnKeys.getOrElseUpdate(meth,
+ meth.newValue(unit.freshTermName("nonLocalReturnKey"), meth.pos, SYNTHETIC) setInfo ObjectClass.tpe
+ )
/** Generate a non-local return throw with given return expression from given method.
* I.e. for the method's non-local return key, generate:
@@ -199,13 +170,13 @@ abstract class UnCurry extends InfoTransform with TypingTransformers with ast.Tr
* todo: maybe clone a pre-existing exception instead?
* (but what to do about exceptions that miss their targets?)
*/
- private def nonLocalReturnThrow(expr: Tree, meth: Symbol) =
- localTyper.typed {
- Throw(
- New(
- TypeTree(nonLocalReturnExceptionType(expr.tpe)),
- List(List(Ident(nonLocalReturnKey(meth)), expr))))
- }
+ private def nonLocalReturnThrow(expr: Tree, meth: Symbol) = localTyper typed {
+ Throw(
+ nonLocalReturnExceptionType(expr.tpe.widen),
+ Ident(nonLocalReturnKey(meth)),
+ expr
+ )
+ }
/** Transform (body, key) to:
*
@@ -214,38 +185,26 @@ abstract class UnCurry extends InfoTransform with TypingTransformers with ast.Tr
* try {
* body
* } catch {
- * case ex: NonLocalReturnControl[_] =>
+ * case ex: NonLocalReturnControl[T @unchecked] =>
* if (ex.key().eq(key)) ex.value()
* else throw ex
* }
* }
*/
private def nonLocalReturnTry(body: Tree, key: Symbol, meth: Symbol) = {
- localTyper.typed {
- val extpe = nonLocalReturnExceptionType(meth.tpe.finalResultType)
- val ex = meth.newValue(body.pos, nme.ex) setInfo extpe
- val pat = Bind(ex,
- Typed(Ident(nme.WILDCARD),
- AppliedTypeTree(Ident(NonLocalReturnControlClass),
- List(Bind(tpnme.WILDCARD,
- EmptyTree)))))
- val rhs =
- If(
- Apply(
- Select(
- Apply(Select(Ident(ex), "key"), List()),
- Object_eq),
- List(Ident(key))),
- Apply(
- TypeApply(
- Select(
- Apply(Select(Ident(ex), "value"), List()),
- Any_asInstanceOf),
- List(TypeTree(meth.tpe.finalResultType))),
- List()),
- Throw(Ident(ex)))
- val keyDef = ValDef(key, New(TypeTree(ObjectClass.tpe), List(List())))
- val tryCatch = Try(body, List(CaseDef(pat, EmptyTree, rhs)), EmptyTree)
+ localTyper typed {
+ val extpe = nonLocalReturnExceptionType(meth.tpe.finalResultType)
+ val ex = meth.newValue(nme.ex, body.pos) setInfo extpe
+ val argType = meth.tpe.finalResultType withAnnotation (AnnotationInfo marker UncheckedClass.tpe)
+ val pat = gen.mkBindForCase(ex, NonLocalReturnControlClass, List(argType))
+ val rhs = (
+ IF ((ex DOT nme.key)() OBJ_EQ Ident(key))
+ THEN ((ex DOT nme.value)())
+ ELSE (Throw(Ident(ex)))
+ )
+ val keyDef = ValDef(key, New(ObjectClass.tpe))
+ val tryCatch = Try(body, pat -> rhs)
+
Block(List(keyDef), tryCatch)
}
}
@@ -254,11 +213,6 @@ abstract class UnCurry extends InfoTransform with TypingTransformers with ast.Tr
/** Undo eta expansion for parameterless and nullary methods */
def deEta(fun: Function): Tree = fun match {
- case Function(List(), Apply(expr, List())) if treeInfo.isPureExpr(expr) =>
- if (expr hasSymbolWhich (_.isLazy))
- fun
- else
- expr
case Function(List(), expr) if isByNameRef(expr) =>
noApply += expr
expr
@@ -267,20 +221,80 @@ abstract class UnCurry extends InfoTransform with TypingTransformers with ast.Tr
}
- /* Transform a function node (x_1,...,x_n) => body of type FunctionN[T_1, .., T_N, R] to
+ /** Transform a function node (x_1,...,x_n) => body of type FunctionN[T_1, .., T_N, R] to
*
- * class $anon() extends Object() with FunctionN[T_1, .., T_N, R] with ScalaObject {
+ * class $anon() extends AbstractFunctionN[T_1, .., T_N, R] with Serializable {
* def apply(x_1: T_1, ..., x_N: T_n): R = body
* }
* new $anon()
*
- * transform a function node (x => body) of type PartialFunction[T, R] where
+ * If `settings.XoldPatmat.value`, also synthesized AbstractPartialFunction subclasses (see synthPartialFunction).
+ *
+ */
+ def transformFunction(fun: Function): Tree = {
+ fun.tpe match {
+ // can happen when analyzer plugins assign refined types to functions, e.g.
+ // (() => Int) { def apply(): Int @typeConstraint }
+ case RefinedType(List(funTp), decls) =>
+ debuglog(s"eliminate refinement from function type ${fun.tpe}")
+ fun.tpe = funTp
+ case _ =>
+ ()
+ }
+
+ deEta(fun) match {
+ // nullary or parameterless
+ case fun1 if fun1 ne fun => fun1
+ case _ if fun.tpe.typeSymbol == PartialFunctionClass =>
+ // only get here when running under -Xoldpatmat
+ synthPartialFunction(fun)
+ case _ =>
+ val parents = addSerializable(abstractFunctionForFunctionType(fun.tpe))
+ val anonClass = fun.symbol.owner newAnonymousFunctionClass(fun.pos, inConstructorFlag) addAnnotation serialVersionUIDAnnotation
+ anonClass setInfo ClassInfoType(parents, newScope, anonClass)
+
+ val targs = fun.tpe.typeArgs
+ val (formals, restpe) = (targs.init, targs.last)
+
+ val applyMethodDef = {
+ val methSym = anonClass.newMethod(nme.apply, fun.pos, FINAL)
+ val paramSyms = map2(formals, fun.vparams) {
+ (tp, param) => methSym.newSyntheticValueParam(tp, param.name)
+ }
+ methSym setInfoAndEnter MethodType(paramSyms, restpe)
+
+ fun.vparams foreach (_.symbol.owner = methSym)
+ fun.body changeOwner (fun.symbol -> methSym)
+
+ val body = localTyper.typedPos(fun.pos)(fun.body)
+ val methDef = DefDef(methSym, List(fun.vparams), body)
+
+ // Have to repack the type to avoid mismatches when existentials
+ // appear in the result - see SI-4869.
+ methDef.tpt setType localTyper.packedType(body, methSym)
+ methDef
+ }
+
+ localTyper.typedPos(fun.pos) {
+ Block(
+ List(ClassDef(anonClass, NoMods, ListOfNil, ListOfNil, List(applyMethodDef), fun.pos)),
+ Typed(New(anonClass.tpe), TypeTree(fun.tpe)))
+ }
+
+ }
+ }
+
+ /** Transform a function node (x => body) of type PartialFunction[T, R] where
* body = expr match { case P_i if G_i => E_i }_i=1..n
- * to:
+ * to (assuming none of the cases is a default case):
*
- * class $anon() extends Object() with PartialFunction[T, R] with ScalaObject {
- * def apply(x: T): R = (expr: @unchecked) match {
- * { case P_i if G_i => E_i }_i=1..n
+ * class $anon() extends AbstractPartialFunction[T, R] with Serializable {
+ * def applyOrElse[A1 <: A, B1 >: B](x: A1, default: A1 => B1): B1 = (expr: @unchecked) match {
+ * case P_1 if G_1 => E_1
+ * ...
+ * case P_n if G_n => E_n
+ * case _ => default(expr)
+ * }
* def isDefinedAt(x: T): boolean = (x: @unchecked) match {
* case P_1 if G_1 => true
* ...
@@ -290,77 +304,107 @@ abstract class UnCurry extends InfoTransform with TypingTransformers with ast.Tr
* }
* new $anon()
*
- * However, if one of the patterns P_i if G_i is a default pattern, generate instead
- *
- * def isDefinedAt(x: T): boolean = true
+ * If there's a default case, the original match is used for applyOrElse, and isDefinedAt returns `true`
*/
- def transformFunction(fun: Function): Tree = {
- val fun1 = deEta(fun)
- def owner = fun.symbol.owner
- def targs = fun.tpe.typeArgs
- def isPartial = fun.tpe.typeSymbol == PartialFunctionClass
-
- if (fun1 ne fun) fun1
- else {
- val (formals, restpe) = (targs.init, targs.last)
- val anonClass = owner newAnonymousFunctionClass fun.pos setFlag (FINAL | SYNTHETIC | inConstructorFlag)
- def parents =
- if (isFunctionType(fun.tpe)) List(abstractFunctionForFunctionType(fun.tpe), SerializableClass.tpe)
- else List(ObjectClass.tpe, fun.tpe, SerializableClass.tpe)
-
- anonClass setInfo ClassInfoType(parents, new Scope, anonClass)
- val applyMethod = anonClass.newMethod(fun.pos, nme.apply) setFlag FINAL
- applyMethod setInfo MethodType(applyMethod newSyntheticValueParams formals, restpe)
- anonClass.info.decls enter applyMethod
- anonClass.addAnnotation(serialVersionUIDAnnotation)
-
- fun.vparams foreach (_.symbol.owner = applyMethod)
- new ChangeOwnerTraverser(fun.symbol, applyMethod) traverse fun.body
-
- def mkUnchecked(tree: Tree) = {
- def newUnchecked(expr: Tree) = Annotated(New(gen.scalaDot(UncheckedClass.name), List(Nil)), expr)
- tree match {
- case Match(selector, cases) => atPos(tree.pos) { Match(newUnchecked(selector), cases) }
- case _ => tree
+ def synthPartialFunction(fun: Function) = {
+ if (!settings.XoldPatmat.value) debugwarn("Under the new pattern matching scheme, PartialFunction should have been synthesized during typers.")
+
+ val targs = fun.tpe.typeArgs
+ val (formals, restpe) = (targs.init, targs.last)
+
+ val anonClass = fun.symbol.owner newAnonymousFunctionClass(fun.pos, inConstructorFlag) addAnnotation serialVersionUIDAnnotation
+ val parents = addSerializable(appliedType(AbstractPartialFunctionClass, targs: _*))
+ anonClass setInfo ClassInfoType(parents, newScope, anonClass)
+
+ // duplicate before applyOrElseMethodDef is run so that it does not mess up our trees and label symbols (we have a fresh set)
+ // otherwise `TreeSymSubstituter(fun.vparams map (_.symbol), params)` won't work as the subst has been run already
+ val bodyForIDA = {
+ val duped = fun.body.duplicate
+ val oldParams = new mutable.ListBuffer[Symbol]()
+ val newParams = new mutable.ListBuffer[Symbol]()
+
+ val oldSyms0 =
+ duped filter {
+ case l at LabelDef(_, params, _) =>
+ params foreach {p =>
+ val oldSym = p.symbol
+ p.symbol = oldSym.cloneSymbol
+ oldParams += oldSym
+ newParams += p.symbol
+ }
+ true
+ case _ => false
+ } map (_.symbol)
+ val oldSyms = oldParams.toList ++ oldSyms0
+ val newSyms = newParams.toList ++ (oldSyms0 map (_.cloneSymbol))
+ // println("duping "+ oldSyms +" --> "+ (newSyms map (_.ownerChain)))
+
+ val substLabels = new TreeSymSubstituter(oldSyms, newSyms)
+
+ substLabels(duped)
+ }
+
+ // def applyOrElse[A1 <: A, B1 >: B](x: A1, default: A1 => B1): B1 =
+ val applyOrElseMethodDef = {
+ val methSym = anonClass.newMethod(fun.pos, nme.applyOrElse) setFlag (FINAL | OVERRIDE)
+
+ val List(argtpe) = formals
+ val A1 = methSym newTypeParameter(newTypeName("A1")) setInfo TypeBounds.upper(argtpe)
+ val B1 = methSym newTypeParameter(newTypeName("B1")) setInfo TypeBounds.lower(restpe)
+ val methFormals = List(A1.tpe, functionType(List(A1.tpe), B1.tpe))
+ val params at List(x, default) = methSym newSyntheticValueParams methFormals
+ methSym setInfoAndEnter polyType(List(A1, B1), MethodType(params, B1.tpe))
+
+ val substParam = new TreeSymSubstituter(fun.vparams map (_.symbol), List(x))
+ val body = localTyper.typedPos(fun.pos) { import CODE._
+ def defaultAction(scrut: Tree) = REF(default) APPLY (REF(x))
+
+ substParam(fun.body) match {
+ case orig at Match(selector, cases) =>
+ if (cases exists treeInfo.isDefaultCase) orig
+ else {
+ val defaultCase = CaseDef(Ident(nme.WILDCARD), EmptyTree, defaultAction(selector.duplicate))
+ Match(/*gen.mkUnchecked*/(selector), cases :+ defaultCase)
+ }
+
}
}
+ body.changeOwner(fun.symbol -> methSym)
- def applyMethodDef() = {
- val body = if (isPartial) mkUnchecked(fun.body) else fun.body
- DefDef(Modifiers(FINAL), nme.apply, Nil, List(fun.vparams), TypeTree(restpe), body) setSymbol applyMethod
- }
- def isDefinedAtMethodDef() = {
- val m = anonClass.newMethod(fun.pos, nme.isDefinedAt) setFlag FINAL
- m setInfo MethodType(m newSyntheticValueParams formals, BooleanClass.tpe)
- anonClass.info.decls enter m
-
- val Match(selector, cases) = fun.body
- val vparam = fun.vparams.head.symbol
- val idparam = m.paramss.head.head
- val substParam = new TreeSymSubstituter(List(vparam), List(idparam))
- def substTree[T <: Tree](t: T): T = substParam(resetLocalAttrs(t))
-
- def transformCase(cdef: CaseDef): CaseDef =
- substTree(CaseDef(cdef.pat.duplicate, cdef.guard.duplicate, Literal(true)))
- def defaultCase = CaseDef(Ident(nme.WILDCARD), EmptyTree, Literal(false))
-
- DefDef(m, mkUnchecked(
- if (cases exists treeInfo.isDefaultCase) Literal(true)
- else Match(substTree(selector.duplicate), (cases map transformCase) :+ defaultCase)
- ))
- }
+ val methDef = DefDef(methSym, body)
- val members =
- if (isPartial) List(applyMethodDef, isDefinedAtMethodDef)
- else List(applyMethodDef)
+ // Have to repack the type to avoid mismatches when existentials
+ // appear in the result - see SI-4869.
+ methDef.tpt setType localTyper.packedType(body, methSym)
+ methDef
+ }
+
+ val isDefinedAtMethodDef = {
+ val methSym = anonClass.newMethod(nme.isDefinedAt, fun.pos, FINAL | SYNTHETIC)
+ val params = methSym newSyntheticValueParams formals
+ methSym setInfoAndEnter MethodType(params, BooleanClass.tpe)
+
+ val substParam = new TreeSymSubstituter(fun.vparams map (_.symbol), params)
+ def doSubst(x: Tree) = substParam(resetLocalAttrsKeepLabels(x)) // see pos/t1761 for why `resetLocalAttrs`, but must keep label symbols around
+
+ val body = bodyForIDA match {
+ case Match(selector, cases) =>
+ if (cases exists treeInfo.isDefaultCase) TRUE_typed
+ else
+ doSubst(Match(/*gen.mkUnchecked*/(selector),
+ (cases map (c => deriveCaseDef(c)(x => TRUE_typed))) :+ (
+ DEFAULT ==> FALSE_typed)))
- localTyper.typedPos(fun.pos) {
- Block(
- List(ClassDef(anonClass, NoMods, List(List()), List(List()), members, fun.pos)),
- Typed(
- New(TypeTree(anonClass.tpe), List(List())),
- TypeTree(fun.tpe)))
}
+ body.changeOwner(fun.symbol -> methSym)
+
+ DefDef(methSym, body)
+ }
+
+ localTyper.typedPos(fun.pos) {
+ Block(
+ List(ClassDef(anonClass, NoMods, ListOfNil, ListOfNil, List(applyOrElseMethodDef, isDefinedAtMethodDef), fun.pos)),
+ Typed(New(anonClass.tpe), TypeTree(fun.tpe)))
}
}
@@ -372,7 +416,7 @@ abstract class UnCurry extends InfoTransform with TypingTransformers with ast.Tr
// when calling into scala varargs, make sure it's a sequence.
def arrayToSequence(tree: Tree, elemtp: Type) = {
- atPhase(phase.next) {
+ afterUncurry {
localTyper.typedPos(pos) {
val pt = arrayType(elemtp)
val adaptedTree = // might need to cast to Array[elemtp], as arrays are not covariant
@@ -388,28 +432,35 @@ abstract class UnCurry extends InfoTransform with TypingTransformers with ast.Tr
def sequenceToArray(tree: Tree) = {
val toArraySym = tree.tpe member nme.toArray
assert(toArraySym != NoSymbol)
- def getManifest(tp: Type): Tree = {
- val manifestOpt = localTyper.findManifest(tp, false)
- if (!manifestOpt.tree.isEmpty) manifestOpt.tree
- else if (tp.bounds.hi ne tp) getManifest(tp.bounds.hi)
- else localTyper.getManifestTree(tree.pos, tp, false)
+ def getClassTag(tp: Type): Tree = {
+ val tag = localTyper.resolveClassTag(tree.pos, tp)
+ // Don't want bottom types getting any further than this (SI-4024)
+ if (tp.typeSymbol.isBottomClass) getClassTag(AnyClass.tpe)
+ else if (!tag.isEmpty) tag
+ else if (tp.bounds.hi ne tp) getClassTag(tp.bounds.hi)
+ else localTyper.TyperErrorGen.MissingClassTagError(tree, tp)
}
- atPhase(phase.next) {
+ def traversableClassTag(tpe: Type): Tree = {
+ (tpe baseType TraversableClass).typeArgs match {
+ case targ :: _ => getClassTag(targ)
+ case _ => EmptyTree
+ }
+ }
+ afterUncurry {
localTyper.typedPos(pos) {
- Apply(gen.mkAttributedSelect(tree, toArraySym),
- List(getManifest(tree.tpe.baseType(TraversableClass).typeArgs.head)))
+ gen.mkMethodCall(tree, toArraySym, Nil, List(traversableClassTag(tree.tpe)))
}
}
}
var suffix: Tree =
if (treeInfo isWildcardStarArgList args) {
- val Typed(tree, _) = args.last;
+ val Typed(tree, _) = args.last
if (isJava)
if (tree.tpe.typeSymbol == ArrayClass) tree
else sequenceToArray(tree)
else
- if (tree.tpe.typeSymbol isSubClass TraversableClass) tree // @PP: I suspect this should be SeqClass
+ if (tree.tpe.typeSymbol isSubClass SeqClass) tree
else arrayToSequence(tree, varargsElemType)
}
else {
@@ -419,10 +470,11 @@ abstract class UnCurry extends InfoTransform with TypingTransformers with ast.Tr
else arrayToSequence(mkArray, varargsElemType)
}
- atPhase(phase.next) {
- if (isJava && isPrimitiveArray(suffix.tpe) && isArrayOfSymbol(fun.tpe.params.last.tpe, ObjectClass)) {
+ afterUncurry {
+ if (isJava && !isReferenceArray(suffix.tpe) && isArrayOfSymbol(fun.tpe.params.last.tpe, ObjectClass)) {
+ // The array isn't statically known to be a reference array, so call ScalaRuntime.toObjectArray.
suffix = localTyper.typedPos(pos) {
- gen.mkRuntimeCall("toObjectArray", List(suffix))
+ gen.mkRuntimeCall(nme.toObjectArray, List(suffix))
}
}
}
@@ -431,40 +483,61 @@ abstract class UnCurry extends InfoTransform with TypingTransformers with ast.Tr
val args1 = if (isVarArgTypes(formals)) transformVarargs(formals.last.typeArgs.head) else args
- (formals, args1).zipped map { (formal, arg) =>
- if (!isByNameParamType(formal)) {
+ map2(formals, args1) { (formal, arg) =>
+ if (!isByNameParamType(formal))
arg
- } else if (isByNameRef(arg)) {
+ else if (isByNameRef(arg)) {
byNameArgs += arg
- arg setType functionType(List(), arg.tpe)
- } else {
- if (opt.verboseDebug) {
- val posstr = arg.pos.source.path + ":" + arg.pos.line
- val permstr = if (fun.isPrivate) "private" else "notprivate"
- log("byname | %s | %s | %s".format(posstr, fun.fullName, permstr))
+ arg setType functionType(Nil, arg.tpe)
+ }
+ else {
+ log(s"Argument '$arg' at line ${arg.pos.safeLine} is $formal from ${fun.fullName}")
+ def canUseDirectly(recv: Tree) = (
+ recv.tpe.typeSymbol.isSubClass(FunctionClass(0))
+ && treeInfo.isExprSafeToInline(recv)
+ )
+ arg match {
+ // don't add a thunk for by-name argument if argument already is an application of
+ // a Function0. We can then remove the application and use the existing Function0.
+ case Apply(Select(recv, nme.apply), Nil) if canUseDirectly(recv) =>
+ recv
+ case _ =>
+ newFunction0(arg)
}
-
- val result = localTyper.typed(
- Function(Nil, arg) setPos arg.pos).asInstanceOf[Function]
- new ChangeOwnerTraverser(currentOwner, result.symbol).traverse(arg)
- transformFunction(result)
}
}
}
- /** For removing calls to specially designated methods.
+ /** Called if a tree's symbol is elidable. If it's a DefDef,
+ * replace only the body/rhs with 0/false/()/null; otherwise replace
+ * the whole tree with it.
*/
- def elideIntoUnit(tree: Tree): Tree = Literal(()) setPos tree.pos setType UnitClass.tpe
- def isElidable(tree: Tree) = {
- val sym = treeInfo.methPart(tree).symbol
- // XXX settings.noassertions.value temporarily retained to avoid
- // breakage until a reasonable interface is settled upon.
- sym != null && sym.elisionLevel.exists(x => x < settings.elidebelow.value || settings.noassertions.value) && {
- log("Eliding call from " + tree.symbol.owner + " to " + sym + " based on its elision threshold of " + sym.elisionLevel.get)
- true
+ private def replaceElidableTree(tree: Tree): Tree = {
+ tree match {
+ case DefDef(_,_,_,_,_,_) =>
+ deriveDefDef(tree)(rhs => Block(Nil, gen.mkZero(rhs.tpe)) setType rhs.tpe) setSymbol tree.symbol setType tree.tpe
+ case _ =>
+ gen.mkZero(tree.tpe) setType tree.tpe
}
}
+ private def isSelfSynchronized(ddef: DefDef) = ddef.rhs match {
+ case Apply(fn @ TypeApply(Select(sel, _), _), _) =>
+ fn.symbol == Object_synchronized && sel.symbol == ddef.symbol.enclClass && !ddef.symbol.enclClass.isTrait
+ case _ => false
+ }
+
+ /** If an eligible method is entirely wrapped in a call to synchronized
+ * locked on the same instance, remove the synchronized scaffolding and
+ * mark the method symbol SYNCHRONIZED for bytecode generation.
+ */
+ private def translateSynchronized(tree: Tree) = tree match {
+ case dd @ DefDef(_, _, _, _, _, Apply(fn, body :: Nil)) if isSelfSynchronized(dd) =>
+ log("Translating " + dd.symbol.defString + " into synchronized method")
+ dd.symbol setFlag SYNCHRONIZED
+ deriveDefDef(dd)(_ => body)
+ case _ => tree
+ }
def isNonLocalReturn(ret: Return) = ret.symbol != currentOwner.enclMethod || currentOwner.isLazy
// ------ The tree transformers --------------------------------------------------------
@@ -484,16 +557,15 @@ abstract class UnCurry extends InfoTransform with TypingTransformers with ast.Tr
def shouldBeLiftedAnyway(tree: Tree) = false && // buggy, see #1981
forMSIL && lookForReturns.found(tree)
- /** Transform tree `t' to { def f = t; f } where `f' is a fresh name
+ /** Transform tree `t` to { def f = t; f } where `f` is a fresh name
*/
def liftTree(tree: Tree) = {
- if (settings.debug.value)
- log("lifting tree at: " + (tree.pos))
- val sym = currentOwner.newMethod(tree.pos, unit.freshTermName("liftedTree"))
+ debuglog("lifting tree at: " + (tree.pos))
+ val sym = currentOwner.newMethod(unit.freshTermName("liftedTree"), tree.pos)
sym.setInfo(MethodType(List(), tree.tpe))
- new ChangeOwnerTraverser(currentOwner, sym).traverse(tree)
+ tree.changeOwner(currentOwner -> sym)
localTyper.typedPos(tree.pos)(Block(
- List(DefDef(sym, List(Nil), tree)),
+ List(DefDef(sym, ListOfNil, tree)),
Apply(Ident(sym), Nil)
))
}
@@ -505,193 +577,220 @@ abstract class UnCurry extends InfoTransform with TypingTransformers with ast.Tr
finally this.inConstructorFlag = saved
}
- if (isElidable(tree)) elideIntoUnit(tree)
- else tree match {
- case dd @ DefDef(mods, name, tparams, vparamss, tpt, rhs) =>
- if (dd.symbol hasAnnotation VarargsClass) saveRepeatedParams(dd)
- withNeedLift(false) {
- if (tree.symbol.isClassConstructor) {
- atOwner(tree.symbol) {
- val rhs1 = (rhs: @unchecked) match {
- case Block(stats, expr) =>
- def transformInConstructor(stat: Tree) =
- withInConstructorFlag(INCONSTRUCTOR) { transform(stat) }
- val presupers = treeInfo.preSuperFields(stats) map transformInConstructor
- val rest = stats drop presupers.length
- val supercalls = rest take 1 map transformInConstructor
- val others = rest drop 1 map transform
- treeCopy.Block(rhs, presupers ::: supercalls ::: others, transform(expr))
+ val sym = tree.symbol
+ val result = (
+ // TODO - settings.noassertions.value temporarily retained to avoid
+ // breakage until a reasonable interface is settled upon.
+ if ((sym ne null) && (sym.elisionLevel.exists (_ < settings.elidebelow.value || settings.noassertions.value)))
+ replaceElidableTree(tree)
+ else translateSynchronized(tree) match {
+ case dd @ DefDef(mods, name, tparams, _, tpt, rhs) =>
+ // Remove default argument trees from parameter ValDefs, SI-4812
+ val vparamssNoRhs = dd.vparamss mapConserve (_ mapConserve {p =>
+ treeCopy.ValDef(p, p.mods, p.name, p.tpt, EmptyTree)
+ })
+
+ if (dd.symbol hasAnnotation VarargsClass) saveRepeatedParams(dd)
+
+ withNeedLift(false) {
+ if (dd.symbol.isClassConstructor) {
+ atOwner(sym) {
+ val rhs1 = (rhs: @unchecked) match {
+ case Block(stats, expr) =>
+ def transformInConstructor(stat: Tree) =
+ withInConstructorFlag(INCONSTRUCTOR) { transform(stat) }
+ val presupers = treeInfo.preSuperFields(stats) map transformInConstructor
+ val rest = stats drop presupers.length
+ val supercalls = rest take 1 map transformInConstructor
+ val others = rest drop 1 map transform
+ treeCopy.Block(rhs, presupers ::: supercalls ::: others, transform(expr))
+ }
+ treeCopy.DefDef(
+ dd, mods, name, transformTypeDefs(tparams),
+ transformValDefss(vparamssNoRhs), transform(tpt), rhs1)
}
- treeCopy.DefDef(
- tree, mods, name, transformTypeDefs(tparams),
- transformValDefss(vparamss), transform(tpt), rhs1)
+ } else {
+ super.transform(treeCopy.DefDef(dd, mods, name, tparams, vparamssNoRhs, tpt, rhs))
}
- } else {
+ }
+ case ValDef(_, _, _, rhs) =>
+ if (sym eq NoSymbol) throw new IllegalStateException("Encountered Valdef without symbol: "+ tree + " in "+ unit)
+ if (!sym.owner.isSourceMethod)
+ withNeedLift(true) { super.transform(tree) }
+ else
super.transform(tree)
+ case UnApply(fn, args) =>
+ val fn1 = withInPattern(false)(transform(fn))
+ val args1 = transformTrees(fn.symbol.name match {
+ case nme.unapply => args
+ case nme.unapplySeq => transformArgs(tree.pos, fn.symbol, args, analyzer.unapplyTypeList(fn.pos, fn.symbol, fn.tpe, args))
+ case _ => sys.error("internal error: UnApply node has wrong symbol")
+ })
+ treeCopy.UnApply(tree, fn1, args1)
+
+ case Apply(fn, args) =>
+ if (fn.symbol == Object_synchronized && shouldBeLiftedAnyway(args.head))
+ transform(treeCopy.Apply(tree, fn, List(liftTree(args.head))))
+ else {
+ val needLift = needTryLift || !fn.symbol.isLabel // SI-6749, no need to lift in args to label jumps.
+ withNeedLift(needLift) {
+ val formals = fn.tpe.paramTypes
+ treeCopy.Apply(tree, transform(fn), transformTrees(transformArgs(tree.pos, fn.symbol, args, formals)))
+ }
}
- }
- case ValDef(_, _, _, rhs) =>
- val sym = tree.symbol
- // a local variable that is mutable and free somewhere later should be lifted
- // as lambda lifting (coming later) will wrap 'rhs' in an Ref object.
- if (!sym.owner.isSourceMethod || (sym.isVariable && freeMutableVars(sym)))
+ case Assign(_: RefTree, _) =>
withNeedLift(true) { super.transform(tree) }
- else
- super.transform(tree)
-/*
- case Apply(Select(Block(List(), Function(vparams, body)), nme.apply), args) =>
- // perform beta-reduction; this helps keep view applications small
- println("beta-reduce1: "+tree)
- withNeedLift(true) {
- mainTransform(new TreeSubstituter(vparams map (_.symbol), args).transform(body))
- }
- case Apply(Select(Function(vparams, body), nme.apply), args) =>
-// if (List.forall2(vparams, args)((vparam, arg) => treeInfo.isAffineIn(body) ||
-// treeInfo.isPureExpr(arg))) =>
- // perform beta-reduction; this helps keep view applications small
- println("beta-reduce2: "+tree)
- withNeedLift(true) {
- mainTransform(new TreeSubstituter(vparams map (_.symbol), args).transform(body))
- }
-*/
- case UnApply(fn, args) =>
- inPattern = false
- val fn1 = transform(fn)
- inPattern = true
- val args1 = transformTrees(fn.symbol.name match {
- case nme.unapply => args
- case nme.unapplySeq => transformArgs(tree.pos, fn.symbol, args, analyzer.unapplyTypeListFromReturnTypeSeq(fn.tpe))
- case _ => sys.error("internal error: UnApply node has wrong symbol")
- })
- treeCopy.UnApply(tree, fn1, args1)
-
- case Apply(fn, args) =>
- if (fn.symbol == Object_synchronized && shouldBeLiftedAnyway(args.head))
- transform(treeCopy.Apply(tree, fn, List(liftTree(args.head))))
- else
- withNeedLift(true) {
- val formals = fn.tpe.paramTypes
- treeCopy.Apply(tree, transform(fn), transformTrees(transformArgs(tree.pos, fn.symbol, args, formals)))
- }
+ case Assign(lhs, _) if lhs.symbol.owner != currentMethod || lhs.symbol.hasFlag(LAZY | ACCESSOR) =>
+ withNeedLift(true) { super.transform(tree) }
- case Assign(Select(_, _), _) =>
- withNeedLift(true) { super.transform(tree) }
+ case ret @ Return(_) if (isNonLocalReturn(ret)) =>
+ withNeedLift(true) { super.transform(ret) }
- case Assign(lhs, _) if lhs.symbol.owner != currentMethod || lhs.symbol.hasFlag(LAZY | ACCESSOR) =>
- withNeedLift(true) { super.transform(tree) }
+ case Try(_, Nil, _) =>
+ // try-finally does not need lifting: lifting is needed only for try-catch
+ // expressions that are evaluated in a context where the stack might not be empty.
+ // `finally` does not attempt to continue evaluation after an exception, so the fact
+ // that values on the stack are 'lost' does not matter
+ super.transform(tree)
- case ret @ Return(_) if (isNonLocalReturn(ret)) =>
- withNeedLift(true) { super.transform(ret) }
+ case Try(block, catches, finalizer) =>
+ if (needTryLift || shouldBeLiftedAnyway(tree)) transform(liftTree(tree))
+ else super.transform(tree)
- case Try(block, catches, finalizer) =>
- if (needTryLift || shouldBeLiftedAnyway(tree)) transform(liftTree(tree))
- else super.transform(tree)
+ case CaseDef(pat, guard, body) =>
+ val pat1 = withInPattern(true)(transform(pat))
+ treeCopy.CaseDef(tree, pat1, transform(guard), transform(body))
- case CaseDef(pat, guard, body) =>
- inPattern = true
- val pat1 = transform(pat)
- inPattern = false
- treeCopy.CaseDef(tree, pat1, transform(guard), transform(body))
+ case fun @ Function(_, _) =>
+ mainTransform(transformFunction(fun))
- case fun @ Function(_, _) =>
- mainTransform(transformFunction(fun))
+ case Template(_, _, _) =>
+ withInConstructorFlag(0) { super.transform(tree) }
- case Template(_, _, _) =>
- withInConstructorFlag(0) { super.transform(tree) }
-
- case _ =>
- val tree1 = super.transform(tree)
- if (isByNameRef(tree1)) {
- val tree2 = tree1 setType functionType(Nil, tree1.tpe)
- return {
- if (noApply contains tree2) tree2
- else localTyper.typedPos(tree1.pos)(Apply(Select(tree2, nme.apply), Nil))
+ case _ =>
+ val tree1 = super.transform(tree)
+ if (isByNameRef(tree1)) {
+ val tree2 = tree1 setType functionType(Nil, tree1.tpe)
+ return {
+ if (noApply contains tree2) tree2
+ else localTyper.typedPos(tree1.pos)(Apply(Select(tree2, nme.apply), Nil))
+ }
}
- }
- tree1
- }
- } setType {
- assert(tree.tpe != null, tree + " tpe is null")
- uncurryTreeType(tree.tpe)
+ tree1
+ }
+ )
+ assert(result.tpe != null, result + " tpe is null")
+ result setType uncurryTreeType(result.tpe)
}
- def postTransform(tree: Tree): Tree = atPhase(phase.next) {
+ def postTransform(tree: Tree): Tree = afterUncurry {
def applyUnary(): Tree = {
- def needsParens = tree.symbol.isMethod && !tree.tpe.isInstanceOf[PolyType] // TODO_NMT: verify that the inner tree of a type-apply also gets parens if the whole tree is a polymorphic nullary method application
- def repair = {
- if (!tree.tpe.isInstanceOf[MethodType]) // i.e., it's a NullaryMethodType
- tree.tpe = MethodType(Nil, tree.tpe.resultType) // TODO_NMT: I think the original `tree.tpe` was wrong, since that would set the method's resulttype to PolyType(Nil, restp) instead of restp
-
- atPos(tree.pos)(Apply(tree, Nil) setType tree.tpe.resultType)
+ // TODO_NMT: verify that the inner tree of a type-apply also gets parens if the
+ // whole tree is a polymorphic nullary method application
+ def removeNullary() = tree.tpe match {
+ case MethodType(_, _) => tree
+ case tp => tree setType MethodType(Nil, tp.resultType)
}
+ if (tree.symbol.isMethod && !tree.tpe.isInstanceOf[PolyType])
+ gen.mkApplyIfNeeded(removeNullary())
+ else if (tree.isType)
+ TypeTree(tree.tpe) setPos tree.pos
+ else
+ tree
+ }
- if (needsParens) repair
- else if (tree.isType) TypeTree(tree.tpe) setPos tree.pos
- else tree
+ def isThrowable(pat: Tree): Boolean = pat match {
+ case Typed(Ident(nme.WILDCARD), tpt) =>
+ tpt.tpe =:= ThrowableClass.tpe
+ case Bind(_, pat) =>
+ isThrowable(pat)
+ case _ =>
+ false
+ }
+
+ def isDefaultCatch(cdef: CaseDef) = isThrowable(cdef.pat) && cdef.guard.isEmpty
+
+ def postTransformTry(tree: Try) = {
+ val body = tree.block
+ val catches = tree.catches
+ val finalizer = tree.finalizer
+ if (opt.virtPatmat) {
+ if (catches exists (cd => !treeInfo.isCatchCase(cd)))
+ debugwarn("VPM BUG! illegal try/catch " + catches)
+ tree
+ } else if (catches forall treeInfo.isCatchCase) {
+ tree
+ } else {
+ val exname = unit.freshTermName("ex$")
+ val cases =
+ if ((catches exists treeInfo.isDefaultCase) || isDefaultCatch(catches.last)) catches
+ else catches :+ CaseDef(Ident(nme.WILDCARD), EmptyTree, Throw(Ident(exname)))
+ val catchall =
+ atPos(tree.pos) {
+ CaseDef(
+ Bind(exname, Ident(nme.WILDCARD)),
+ EmptyTree,
+ Match(Ident(exname), cases))
+ }
+ debuglog("rewrote try: " + catches + " ==> " + catchall);
+ val catches1 = localTyper.typedCases(
+ List(catchall), ThrowableClass.tpe, WildcardType)
+ treeCopy.Try(tree, body, catches1, finalizer)
+ }
}
tree match {
/* Some uncurry post transformations add members to templates.
- * When inside a template, the following sequence is available:
- * - newMembers
- * Any entry in this sequence will be added into the template
+ *
+ * Members registered by `addMembers` for the current template are added
* once the template transformation has finished.
*
* In particular, this case will add:
* - synthetic Java varargs forwarders for repeated parameters
*/
- case Template(parents, self, body) =>
+ case Template(_, _, _) =>
localTyper = typer.atOwner(tree, currentClass)
- val tmpl = if (!forMSIL || forMSIL) {
- treeCopy.Template(tree, parents, self, transformTrees(newMembers.toList) ::: body)
- } else super.transform(tree).asInstanceOf[Template]
- newMembers.clear
- tmpl
- case dd @ DefDef(mods, name, tparams, vparamss, tpt, rhs) =>
- val rhs1 = nonLocalReturnKeys.get(tree.symbol) match {
- case None => rhs
- case Some(k) => atPos(rhs.pos)(nonLocalReturnTry(rhs, k, tree.symbol))
+ useNewMembers(currentClass) {
+ newMembers =>
+ deriveTemplate(tree)(transformTrees(newMembers) ::: _)
}
- val flatdd = treeCopy.DefDef(tree, mods, name, tparams, List(vparamss.flatten), tpt, rhs1)
- if (dd.symbol hasAnnotation VarargsClass) addJavaVarargsForwarders(dd, flatdd, tree)
- flatdd
- case Try(body, catches, finalizer) =>
- if (catches forall treeInfo.isCatchCase) tree
- else {
- val exname = unit.freshTermName("ex$")
- val cases =
- if ((catches exists treeInfo.isDefaultCase) || (catches.last match { // bq: handle try { } catch { ... case ex:Throwable => ...}
- case CaseDef(Typed(Ident(nme.WILDCARD), tpt), EmptyTree, _) if (tpt.tpe =:= ThrowableClass.tpe) =>
- true
- case CaseDef(Bind(_, Typed(Ident(nme.WILDCARD), tpt)), EmptyTree, _) if (tpt.tpe =:= ThrowableClass.tpe) =>
- true
- case _ =>
- false
- })) catches
- else catches :+ CaseDef(Ident(nme.WILDCARD), EmptyTree, Throw(Ident(exname)))
- val catchall =
- atPos(tree.pos) {
- CaseDef(
- Bind(exname, Ident(nme.WILDCARD)),
- EmptyTree,
- Match(Ident(exname), cases))
+
+ case dd @ DefDef(_, _, _, vparamss0, _, rhs0) =>
+ val (newParamss, newRhs): (List[List[ValDef]], Tree) =
+ if (dependentParamTypeErasure isDependent dd)
+ dependentParamTypeErasure erase dd
+ else {
+ val vparamss1 = vparamss0 match {
+ case _ :: Nil => vparamss0
+ case _ => vparamss0.flatten :: Nil
}
- if (settings.debug.value) log("rewrote try: " + catches + " ==> " + catchall);
- val catches1 = localTyper.typedCases(
- tree, List(catchall), ThrowableClass.tpe, WildcardType)
- treeCopy.Try(tree, body, catches1, finalizer)
- }
+ (vparamss1, rhs0)
+ }
+
+ val flatdd = copyDefDef(dd)(
+ vparamss = newParamss,
+ rhs = nonLocalReturnKeys get dd.symbol match {
+ case Some(k) => atPos(newRhs.pos)(nonLocalReturnTry(newRhs, k, dd.symbol))
+ case None => newRhs
+ }
+ )
+ addJavaVarargsForwarders(dd, flatdd)
+
+ case tree: Try =>
+ postTransformTry(tree)
+
case Apply(Apply(fn, args), args1) =>
treeCopy.Apply(tree, fn, args ::: args1)
+
case Ident(name) =>
- assert(name != tpnme.WILDCARD_STAR)
+ assert(name != tpnme.WILDCARD_STAR, tree)
applyUnary()
case Select(_, _) | TypeApply(_, _) =>
applyUnary()
- case ret @ Return(expr) if (isNonLocalReturn(ret)) =>
- if (settings.debug.value) log("non local return in "+ret.symbol+" from "+currentOwner.enclMethod)
+ case ret @ Return(expr) if isNonLocalReturn(ret) =>
+ log("non-local return from %s to %s".format(currentOwner.enclMethod, ret.symbol))
atPos(ret.pos)(nonLocalReturnThrow(expr, ret.symbol))
case TypeTree() =>
tree
@@ -700,6 +799,99 @@ abstract class UnCurry extends InfoTransform with TypingTransformers with ast.Tr
}
}
+ /**
+ * When we concatenate parameter lists, formal parameter types that were dependent
+ * on prior parameter values will no longer be correctly scoped.
+ *
+ * For example:
+ *
+ * {{{
+ * def foo(a: A)(b: a.B): a.type = {b; b}
+ * // after uncurry
+ * def foo(a: A, b: a/* NOT IN SCOPE! */.B): a.B = {b; b}
+ * }}}
+ *
+ * This violates the principle that each compiler phase should produce trees that
+ * can be retyped (see [[scala.tools.nsc.typechecker.TreeCheckers]]), and causes
+ * a practical problem in `erasure`: it is not able to correctly determine if
+ * such a signature overrides a corresponding signature in a parent. (SI-6443).
+ *
+ * This transformation erases the dependent method types by:
+ * - Widening the formal parameter type to existentially abstract
+ * over the prior parameters (using `packSymbols`). This transformation
+ * is performed in the the `InfoTransform`er [[scala.reflect.internal.transform.UnCurry]].
+ * - Inserting casts in the method body to cast to the original,
+ * precise type.
+ *
+ * For the example above, this results in:
+ *
+ * {{{
+ * def foo(a: A, b: a.B forSome { val a: A }): a.B = { val b$1 = b.asInstanceOf[a.B]; b$1; b$1 }
+ * }}}
+ */
+ private object dependentParamTypeErasure {
+ sealed abstract class ParamTransform {
+ def param: ValDef
+ }
+ final case class Identity(param: ValDef) extends ParamTransform
+ final case class Packed(param: ValDef, tempVal: ValDef) extends ParamTransform
+
+ def isDependent(dd: DefDef): Boolean =
+ beforeUncurry {
+ val methType = dd.symbol.info
+ methType.isDependentMethodType && mexists(methType.paramss)(_.info exists (_.isImmediatelyDependent))
+ }
+
+ /**
+ * @return (newVparamss, newRhs)
+ */
+ def erase(dd: DefDef): (List[List[ValDef]], Tree) = {
+ import dd.{ vparamss, rhs }
+ val paramTransforms: List[ParamTransform] =
+ map2(vparamss.flatten, dd.symbol.info.paramss.flatten) { (p, infoParam) =>
+ val packedType = infoParam.info
+ if (packedType =:= p.symbol.info) Identity(p)
+ else {
+ // The Uncurry info transformer existentially abstracted over value parameters
+ // from the previous parameter lists.
+
+ // Change the type of the param symbol
+ p.symbol updateInfo packedType
+
+ // Create a new param tree
+ val newParam: ValDef = copyValDef(p)(tpt = TypeTree(packedType))
+
+ // Within the method body, we'll cast the parameter to the originally
+ // declared type and assign this to a synthetic val. Later, we'll patch
+ // the method body to refer to this, rather than the parameter.
+ val tempVal: ValDef = {
+ val tempValName = unit freshTermName (p.name + "$")
+ val newSym = dd.symbol.newTermSymbol(tempValName, p.pos, SYNTHETIC).setInfo(p.symbol.info)
+ atPos(p.pos)(ValDef(newSym, gen.mkAttributedCast(Ident(p.symbol), p.symbol.info)))
+ }
+ Packed(newParam, tempVal)
+ }
+ }
+
+ val allParams = paramTransforms map (_.param)
+ val (packedParams, tempVals) = paramTransforms.collect {
+ case Packed(param, tempVal) => (param, tempVal)
+ }.unzip
+
+ val rhs1 = if (tempVals.isEmpty) rhs else {
+ localTyper.typedPos(rhs.pos) {
+ // Patch the method body to refer to the temp vals
+ val rhsSubstituted = rhs.substituteSymbols(packedParams map (_.symbol), tempVals map (_.symbol))
+ // The new method body: { val p$1 = p.asInstanceOf[<dependent type>]; ...; <rhsSubstituted> }
+ Block(tempVals, rhsSubstituted)
+ }
+ }
+
+ (allParams :: Nil, rhs1)
+ }
+ }
+
+
/* Analyzes repeated params if method is annotated as `varargs`.
* If the repeated params exist, it saves them into the `repeatedParams` map,
* which is used later.
@@ -716,11 +908,11 @@ abstract class UnCurry extends InfoTransform with TypingTransformers with ast.Tr
/* Called during post transform, after the method argument lists have been flattened.
* It looks for the method in the `repeatedParams` map, and generates a Java-style
- * varargs forwarder. It then adds the forwarder to the `newMembers` sequence.
+ * varargs forwarder.
*/
- private def addJavaVarargsForwarders(dd: DefDef, flatdd: DefDef, tree: Tree): Unit = {
- if (!repeatedParams.contains(dd.symbol))
- return
+ private def addJavaVarargsForwarders(dd: DefDef, flatdd: DefDef): DefDef = {
+ if (!dd.symbol.hasAnnotation(VarargsClass) || !repeatedParams.contains(dd.symbol))
+ return flatdd
def toSeqType(tp: Type): Type = {
val arg = elementType(ArrayClass, tp)
@@ -739,10 +931,10 @@ abstract class UnCurry extends InfoTransform with TypingTransformers with ast.Tr
)
}
- val reps = repeatedParams(dd.symbol)
- val rpsymbols = reps.map(_.symbol).toSet
- val theTyper = typer.atOwner(tree, currentClass)
- val flatparams = flatdd.vparamss.head
+ val reps = repeatedParams(dd.symbol)
+ val rpsymbols = reps.map(_.symbol).toSet
+ val theTyper = typer.atOwner(dd, currentClass)
+ val flatparams = flatdd.vparamss.head
// create the type
val forwformals = flatparams map {
@@ -750,8 +942,8 @@ abstract class UnCurry extends InfoTransform with TypingTransformers with ast.Tr
case p => p.symbol.tpe
}
val forwresult = dd.symbol.tpe.finalResultType
- val forwformsyms = (forwformals, flatparams).zipped map ((tp, oldparam) =>
- currentClass.newValueParameter(oldparam.symbol.pos, oldparam.name).setInfo(tp)
+ val forwformsyms = map2(forwformals, flatparams)((tp, oldparam) =>
+ currentClass.newValueParameter(oldparam.name, oldparam.symbol.pos).setInfo(tp)
)
def mono = MethodType(forwformsyms, forwresult)
val forwtype = dd.symbol.tpe match {
@@ -760,15 +952,11 @@ abstract class UnCurry extends InfoTransform with TypingTransformers with ast.Tr
}
// create the symbol
- val forwsym = (
- currentClass.newMethod(dd.pos, dd.name)
- . setFlag (VARARGS | SYNTHETIC | flatdd.symbol.flags)
- . setInfo (forwtype)
- )
+ val forwsym = currentClass.newMethod(dd.name, dd.pos, VARARGS | SYNTHETIC | flatdd.symbol.flags) setInfo forwtype
// create the tree
val forwtree = theTyper.typedPos(dd.pos) {
- val locals = (forwsym ARGS, flatparams).zipped map {
+ val locals = map2(forwsym ARGS, flatparams) {
case (_, fp) if !rpsymbols(fp.symbol) => null
case (argsym, fp) =>
Block(Nil,
@@ -778,7 +966,7 @@ abstract class UnCurry extends InfoTransform with TypingTransformers with ast.Tr
)
)
}
- val seqargs = (locals, forwsym ARGS).zipped map {
+ val seqargs = map2(locals, forwsym ARGS) {
case (null, argsym) => Ident(argsym)
case (l, _) => l
}
@@ -797,64 +985,10 @@ abstract class UnCurry extends InfoTransform with TypingTransformers with ast.Tr
case None =>
// enter symbol into scope
currentClass.info.decls enter forwsym
-
- // add the method to `newMembers`
- newMembers += forwtree
+ addNewMember(forwtree)
}
- }
- }
-
- /** Set of mutable local variables that are free in some inner method. */
- private val freeMutableVars: mutable.Set[Symbol] = new mutable.HashSet
- /** PP: There is apparently some degree of overlap between the CAPTURED
- * flag and the role being filled here. I think this is how this was able
- * to go for so long looking only at DefDef and Ident nodes, as bugs
- * would only emerge under more complicated conditions such as #3855.
- * I'll try to figure it all out, but if someone who already knows the
- * whole story wants to fill it in, that too would be great.
- */
- private val freeLocalsTraverser = new Traverser {
- var currentMethod: Symbol = NoSymbol
- var maybeEscaping = false
-
- def withEscaping(body: => Unit) {
- val saved = maybeEscaping
- maybeEscaping = true
- try body
- finally maybeEscaping = saved
- }
-
- override def traverse(tree: Tree) = tree match {
- case DefDef(_, _, _, _, _, _) =>
- val lastMethod = currentMethod
- currentMethod = tree.symbol
- super.traverse(tree)
- currentMethod = lastMethod
- /** A method call with a by-name parameter represents escape. */
- case Apply(fn, args) if fn.symbol.paramss.nonEmpty =>
- traverse(fn)
- (fn.symbol.paramss.head, args).zipped foreach { (param, arg) =>
- if (param.tpe != null && isByNameParamType(param.tpe))
- withEscaping(traverse(arg))
- else
- traverse(arg)
- }
- /** The rhs of a closure represents escape. */
- case Function(vparams, body) =>
- vparams foreach traverse
- withEscaping(traverse(body))
-
- /** The appearance of an ident outside the method where it was defined or
- * anytime maybeEscaping is true implies escape.
- */
- case Ident(_) =>
- val sym = tree.symbol
- if (sym.isVariable && sym.owner.isMethod && (maybeEscaping || sym.owner != currentMethod))
- freeMutableVars += sym
- case _ =>
- super.traverse(tree)
+ flatdd
}
}
-
}
diff --git a/src/compiler/scala/tools/nsc/transform/patmat/Logic.scala b/src/compiler/scala/tools/nsc/transform/patmat/Logic.scala
new file mode 100644
index 0000000..dbe0831
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/transform/patmat/Logic.scala
@@ -0,0 +1,644 @@
+/* NSC -- new Scala compiler
+ *
+ * Copyright 2011-2013 LAMP/EPFL
+ * @author Adriaan Moors
+ */
+
+package scala.tools.nsc.transform.patmat
+
+import scala.tools.nsc.symtab._
+import scala.language.postfixOps
+import scala.collection.mutable
+import scala.reflect.internal.util.Statistics
+import scala.reflect.internal.util.Position
+import scala.reflect.internal.util.HashSet
+
+
+trait Logic extends Debugging {
+ import PatternMatchingStats._
+
+ private def max(xs: Seq[Int]) = if (xs isEmpty) 0 else xs max
+ private def alignedColumns(cols: Seq[AnyRef]): Seq[String] = {
+ def toString(x: AnyRef) = if (x eq null) "" else x.toString
+ if (cols.isEmpty || cols.tails.isEmpty) cols map toString
+ else {
+ val colLens = cols map (c => toString(c).length)
+ val maxLen = max(colLens)
+ val avgLen = colLens.sum/colLens.length
+ val goalLen = maxLen min avgLen*2
+ def pad(s: String) = {
+ val toAdd = ((goalLen - s.length) max 0) + 2
+ (" " * (toAdd/2)) + s + (" " * (toAdd/2 + (toAdd%2)))
+ }
+ cols map (x => pad(toString(x)))
+ }
+ }
+
+ def alignAcrossRows(xss: List[List[AnyRef]], sep: String, lineSep: String = "\n"): String = {
+ val maxLen = max(xss map (_.length))
+ val padded = xss map (xs => xs ++ List.fill(maxLen - xs.length)(null))
+ padded.transpose.map(alignedColumns).transpose map (_.mkString(sep)) mkString(lineSep)
+ }
+
+ // http://www.cis.upenn.edu/~cis510/tcl/chap3.pdf
+ // http://users.encs.concordia.ca/~ta_ahmed/ms_thesis.pdf
+ // propositional logic with constants and equality
+ trait PropositionalLogic {
+ type Type
+ type Tree
+
+ class Prop
+ case class Eq(p: Var, q: Const) extends Prop
+
+ type Const
+
+ type TypeConst <: Const
+ def TypeConst: TypeConstExtractor
+ trait TypeConstExtractor {
+ def apply(tp: Type): Const
+ }
+
+ type ValueConst <: Const
+ def ValueConst: ValueConstExtractor
+ trait ValueConstExtractor {
+ def apply(p: Tree): Const
+ }
+
+ val NullConst: Const
+
+ type Var <: AbsVar
+ val Var: VarExtractor
+ trait VarExtractor {
+ def apply(x: Tree): Var
+ def unapply(v: Var): Some[Tree]
+ }
+
+ // resets hash consing -- only supposed to be called by TreeMakersToProps
+ def prepareNewAnalysis(): Unit
+
+ trait AbsVar {
+ // indicate we may later require a prop for V = C
+ def registerEquality(c: Const): Unit
+
+ // call this to indicate null is part of the domain
+ def registerNull(): Unit
+
+ // can this variable be null?
+ def mayBeNull: Boolean
+
+ // compute the domain and return it (call registerNull first!)
+ def domainSyms: Option[Set[Sym]]
+
+ // the symbol for this variable being equal to its statically known type
+ // (only available if registerEquality has been called for that type before)
+ def symForStaticTp: Option[Sym]
+
+ // for this var, call it V, turn V = C into the equivalent proposition in boolean logic
+ // registerEquality(c) must have been called prior to this call
+ // in fact, all equalities relevant to this variable must have been registered
+ def propForEqualsTo(c: Const): Prop
+
+ // populated by registerEquality
+ // once implications has been called, must not call registerEquality anymore
+ def implications: List[(Sym, List[Sym], List[Sym])]
+ }
+
+ // would be nice to statically check whether a prop is equational or pure,
+ // but that requires typing relations like And(x: Tx, y: Ty) : (if(Tx == PureProp && Ty == PureProp) PureProp else Prop)
+ case class And(a: Prop, b: Prop) extends Prop
+ case class Or(a: Prop, b: Prop) extends Prop
+ case class Not(a: Prop) extends Prop
+
+ case object True extends Prop
+ case object False extends Prop
+
+ // symbols are propositions
+ abstract case class Sym(val variable: Var, val const: Const) extends Prop {
+ private[this] val id = Sym.nextSymId
+
+ override def toString = variable +"="+ const +"#"+ id
+ }
+ class UniqueSym(variable: Var, const: Const) extends Sym(variable, const)
+ object Sym {
+ private val uniques: HashSet[Sym] = new HashSet("uniques", 512)
+ def apply(variable: Var, const: Const): Sym = {
+ val newSym = new UniqueSym(variable, const)
+ (uniques findEntryOrUpdate newSym)
+ }
+ private def nextSymId = {_symId += 1; _symId}; private var _symId = 0
+ }
+
+ def /\(props: Iterable[Prop]) = if (props.isEmpty) True else props.reduceLeft(And(_, _))
+ def \/(props: Iterable[Prop]) = if (props.isEmpty) False else props.reduceLeft(Or(_, _))
+
+ trait PropTraverser {
+ def apply(x: Prop): Unit = x match {
+ case And(a, b) => apply(a); apply(b)
+ case Or(a, b) => apply(a); apply(b)
+ case Not(a) => apply(a)
+ case Eq(a, b) => applyVar(a); applyConst(b)
+ case _ =>
+ }
+ def applyVar(x: Var): Unit = {}
+ def applyConst(x: Const): Unit = {}
+ }
+
+ def gatherVariables(p: Prop): Set[Var] = {
+ val vars = new mutable.HashSet[Var]()
+ (new PropTraverser {
+ override def applyVar(v: Var) = vars += v
+ })(p)
+ vars.toSet
+ }
+
+ trait PropMap {
+ def apply(x: Prop): Prop = x match { // TODO: mapConserve
+ case And(a, b) => And(apply(a), apply(b))
+ case Or(a, b) => Or(apply(a), apply(b))
+ case Not(a) => Not(apply(a))
+ case p => p
+ }
+ }
+
+ // to govern how much time we spend analyzing matches for unreachability/exhaustivity
+ object AnalysisBudget {
+ import scala.tools.cmd.FromString.IntFromString
+ val max = sys.props.get("scalac.patmat.analysisBudget").collect(IntFromString.orElse{case "off" => Integer.MAX_VALUE}).getOrElse(256)
+
+ abstract class Exception(val advice: String) extends RuntimeException("CNF budget exceeded")
+
+ object exceeded extends Exception(
+ s"(The analysis required more space than allowed. Please try with scalac -Dscalac.patmat.analysisBudget=${AnalysisBudget.max*2} or -Dscalac.patmat.analysisBudget=off.)")
+
+ }
+
+ // convert finite domain propositional logic with subtyping to pure boolean propositional logic
+ // a type test or a value equality test are modelled as a variable being equal to some constant
+ // a variable V may be assigned multiple constants, as long as they do not contradict each other
+ // according to subtyping, e.g., V = ConstantType(1) and V = Int are valid assignments
+ // we rewrite V = C to a fresh boolean symbol, and model what we know about the variable's domain
+ // in a prelude (the equality axioms)
+ // 1. a variable with a closed domain (of a sealed type) must be assigned one of the instantiatable types in its domain
+ // 2. for each variable V in props, and each constant C it is compared to,
+ // compute which assignments imply each other (as in the example above: V = 1 implies V = Int)
+ // and which assignments are mutually exclusive (V = String implies -(V = Int))
+ //
+ // note that this is a conservative approximation: V = Constant(A) and V = Constant(B)
+ // are considered mutually exclusive (and thus both cases are considered reachable in {case A => case B =>}),
+ // even though A may be equal to B (and thus the second case is not "dynamically reachable")
+ //
+ // TODO: for V1 representing x1 and V2 standing for x1.head, encode that
+ // V1 = Nil implies -(V2 = Ci) for all Ci in V2's domain (i.e., it is unassignable)
+ // may throw an AnalysisBudget.Exception
+ def removeVarEq(props: List[Prop], modelNull: Boolean = false): (Formula, List[Formula]) = {
+ val start = if (Statistics.canEnable) Statistics.startTimer(patmatAnaVarEq) else null
+
+ val vars = new scala.collection.mutable.HashSet[Var]
+
+ object gatherEqualities extends PropTraverser {
+ override def apply(p: Prop) = p match {
+ case Eq(v, c) =>
+ vars += v
+ v.registerEquality(c)
+ case _ => super.apply(p)
+ }
+ }
+
+ object rewriteEqualsToProp extends PropMap {
+ override def apply(p: Prop) = p match {
+ case Eq(v, c) => v.propForEqualsTo(c)
+ case _ => super.apply(p)
+ }
+ }
+
+ props foreach gatherEqualities.apply
+ if (modelNull) vars foreach (_.registerNull)
+
+ val pure = props map (p => eqFreePropToSolvable(rewriteEqualsToProp(p)))
+
+ val eqAxioms = formulaBuilder
+ @inline def addAxiom(p: Prop) = addFormula(eqAxioms, eqFreePropToSolvable(p))
+
+ debug.patmat("removeVarEq vars: "+ vars)
+ vars.foreach { v =>
+ // if v.domainSyms.isEmpty, we must consider the domain to be infinite
+ // otherwise, since the domain fully partitions the type of the value,
+ // exactly one of the types (and whatever it implies, imposed separately) must be chosen
+ // consider X ::= A | B | C, and A => B
+ // coverage is formulated as: A \/ B \/ C and the implications are
+ v.domainSyms foreach { dsyms => addAxiom(\/(dsyms)) }
+
+ // when this variable cannot be null the equality corresponding to the type test `(x: T)`, where T is x's static type,
+ // is always true; when the variable may be null we use the implication `(x != null) => (x: T)` for the axiom
+ v.symForStaticTp foreach { symForStaticTp =>
+ if (v.mayBeNull) addAxiom(Or(v.propForEqualsTo(NullConst), symForStaticTp))
+ else addAxiom(symForStaticTp)
+ }
+
+ v.implications foreach { case (sym, implied, excluded) =>
+ // when sym is true, what must hold...
+ implied foreach (impliedSym => addAxiom(Or(Not(sym), impliedSym)))
+ // ... and what must not?
+ excluded foreach (excludedSym => addAxiom(Or(Not(sym), Not(excludedSym))))
+ }
+ }
+
+ debug.patmat("eqAxioms:\n"+ cnfString(toFormula(eqAxioms)))
+ debug.patmat("pure:"+ pure.map(p => cnfString(p)).mkString("\n"))
+
+ if (Statistics.canEnable) Statistics.stopTimer(patmatAnaVarEq, start)
+
+ (toFormula(eqAxioms), pure)
+ }
+
+
+ // an interface that should be suitable for feeding a SAT solver when the time comes
+ type Formula
+ type FormulaBuilder
+
+ // creates an empty formula builder to which more formulae can be added
+ def formulaBuilder: FormulaBuilder
+
+ // val f = formulaBuilder; addFormula(f, f1); ... addFormula(f, fN)
+ // toFormula(f) == andFormula(f1, andFormula(..., fN))
+ def addFormula(buff: FormulaBuilder, f: Formula): Unit
+ def toFormula(buff: FormulaBuilder): Formula
+
+ // the conjunction of formulae `a` and `b`
+ def andFormula(a: Formula, b: Formula): Formula
+
+ // equivalent formula to `a`, but simplified in a lightweight way (drop duplicate clauses)
+ def simplifyFormula(a: Formula): Formula
+
+ // may throw an AnalysisBudget.Exception
+ def propToSolvable(p: Prop): Formula = {
+ val (eqAxioms, pure :: Nil) = removeVarEq(List(p), modelNull = false)
+ andFormula(eqAxioms, pure)
+ }
+
+ // may throw an AnalysisBudget.Exception
+ def eqFreePropToSolvable(p: Prop): Formula
+ def cnfString(f: Formula): String
+
+ type Model = Map[Sym, Boolean]
+ val EmptyModel: Model
+ val NoModel: Model
+
+ def findModelFor(f: Formula): Model
+ def findAllModelsFor(f: Formula): List[Model]
+ }
+}
+
+trait ScalaLogic extends Interface with Logic with TreeAndTypeAnalysis {
+ trait TreesAndTypesDomain extends PropositionalLogic with CheckableTreeAndTypeAnalysis {
+ type Type = global.Type
+ type Tree = global.Tree
+
+ // resets hash consing -- only supposed to be called by TreeMakersToProps
+ def prepareNewAnalysis(): Unit = { Var.resetUniques(); Const.resetUniques() }
+
+ object Var extends VarExtractor {
+ private var _nextId = 0
+ def nextId = {_nextId += 1; _nextId}
+
+ def resetUniques() = {_nextId = 0; uniques.clear()}
+ private val uniques = new mutable.HashMap[Tree, Var]
+ def apply(x: Tree): Var = uniques getOrElseUpdate(x, new Var(x, x.tpe))
+ def unapply(v: Var) = Some(v.path)
+ }
+ class Var(val path: Tree, staticTp: Type) extends AbsVar {
+ private[this] val id: Int = Var.nextId
+
+ // private[this] var canModify: Option[Array[StackTraceElement]] = None
+ private[this] def ensureCanModify() = {} //if (canModify.nonEmpty) debug.patmat("BUG!"+ this +" modified after having been observed: "+ canModify.get.mkString("\n"))
+
+ private[this] def observed() = {} //canModify = Some(Thread.currentThread.getStackTrace)
+
+ // don't access until all potential equalities have been registered using registerEquality
+ private[this] val symForEqualsTo = new mutable.HashMap[Const, Sym]
+
+ // when looking at the domain, we only care about types we can check at run time
+ val staticTpCheckable: Type = checkableType(staticTp)
+
+ private[this] var _mayBeNull = false
+ def registerNull(): Unit = { ensureCanModify; if (NullTp <:< staticTpCheckable) _mayBeNull = true }
+ def mayBeNull: Boolean = _mayBeNull
+
+ // case None => domain is unknown,
+ // case Some(List(tps: _*)) => domain is exactly tps
+ // we enumerate the subtypes of the full type, as that allows us to filter out more types statically,
+ // once we go to run-time checks (on Const's), convert them to checkable types
+ // TODO: there seems to be bug for singleton domains (variable does not show up in model)
+ lazy val domain: Option[Set[Const]] = {
+ val subConsts = enumerateSubtypes(staticTp).map{ tps =>
+ tps.toSet[Type].map{ tp =>
+ val domainC = TypeConst(tp)
+ registerEquality(domainC)
+ domainC
+ }
+ }
+
+ val allConsts =
+ if (mayBeNull) {
+ registerEquality(NullConst)
+ subConsts map (_ + NullConst)
+ } else
+ subConsts
+
+ observed; allConsts
+ }
+
+ // populate equalitySyms
+ // don't care about the result, but want only one fresh symbol per distinct constant c
+ def registerEquality(c: Const): Unit = {ensureCanModify; symForEqualsTo getOrElseUpdate(c, Sym(this, c))}
+
+ // return the symbol that represents this variable being equal to the constant `c`, if it exists, otherwise False (for robustness)
+ // (registerEquality(c) must have been called prior, either when constructing the domain or from outside)
+ def propForEqualsTo(c: Const): Prop = {observed; symForEqualsTo.getOrElse(c, False)}
+
+ // [implementation NOTE: don't access until all potential equalities have been registered using registerEquality]p
+ /** the information needed to construct the boolean proposition that encods the equality proposition (V = C)
+ *
+ * that models a type test pattern `_: C` or constant pattern `C`, where the type test gives rise to a TypeConst C,
+ * and the constant pattern yields a ValueConst C
+ *
+ * for exhaustivity, we really only need implication (e.g., V = 1 implies that V = 1 /\ V = Int, if both tests occur in the match,
+ * and thus in this variable's equality symbols), but reachability also requires us to model things like V = 1 precluding V = "1"
+ */
+ lazy val implications = {
+ /** when we know V = C, which other equalities must hold
+ *
+ * in general, equality to some type implies equality to its supertypes
+ * (this multi-valued kind of equality is necessary for unreachability)
+ * note that we use subtyping as a model for implication between instanceof tests
+ * i.e., when S <:< T we assume x.isInstanceOf[S] implies x.isInstanceOf[T]
+ * unfortunately this is not true in general (see e.g. SI-6022)
+ */
+ def implies(lower: Const, upper: Const): Boolean =
+ // values and null
+ lower == upper ||
+ // type implication
+ (lower != NullConst && !upper.isValue &&
+ instanceOfTpImplies(if (lower.isValue) lower.wideTp else lower.tp, upper.tp))
+
+ // if(r) debug.patmat("implies : "+(lower, lower.tp, upper, upper.tp))
+ // else debug.patmat("NOT implies: "+(lower, upper))
+
+
+ /** Does V=A preclude V=B?
+ *
+ * (0) A or B must be in the domain to draw any conclusions.
+ *
+ * For example, knowing the the scrutinee is *not* true does not
+ * statically exclude it from being `X`, because that is an opaque
+ * Boolean.
+ *
+ * val X = true
+ * (true: Boolean) match { case true => case X <reachable> }
+ *
+ * (1) V = null excludes assignment to any other constant (modulo point #0). This includes
+ * both values and type tests (which are both modelled here as `Const`)
+ * (2) V = A and V = B, for A and B domain constants, are mutually exclusive unless A == B
+ *
+ * (3) We only reason about test tests as being excluded by null assignments, otherwise we
+ * only consider value assignments.
+ * TODO: refine this, a == 0 excludes a: String, or `a: Int` excludes `a: String`
+ * (since no value can be of both types. See also SI-7211)
+ *
+ * NOTE: V = 1 does not preclude V = Int, or V = Any, it could be said to preclude
+ * V = String, but we don't model that.
+ */
+ def excludes(a: Const, b: Const): Boolean = {
+ val bothInDomain = domain exists (d => d(a) && d(b))
+ val eitherIsNull = a == NullConst || b == NullConst
+ val bothAreValues = a.isValue && b.isValue
+ bothInDomain && (eitherIsNull || bothAreValues) && (a != b)
+ }
+
+ // if(r) debug.patmat("excludes : "+(a, a.tp, b, b.tp))
+ // else debug.patmat("NOT excludes: "+(a, b))
+
+/*
+[ HALF BAKED FANCINESS: //!equalitySyms.exists(common => implies(common.const, a) && implies(common.const, b)))
+ when type tests are involved, we reason (conservatively) under a closed world assumption,
+ since we are really only trying to counter the effects of the symbols that we introduce to model type tests
+ we don't aim to model the whole subtyping hierarchy, simply to encode enough about subtyping to do unreachability properly
+
+ consider the following hierarchy:
+
+ trait A
+ trait B
+ trait C
+ trait AB extends B with A
+
+ // two types are mutually exclusive if there is no equality symbol whose constant implies both
+ object Test extends App {
+ def foo(x: Any) = x match {
+ case _ : C => println("C")
+ case _ : AB => println("AB")
+ case _ : (A with B) => println("AB'")
+ case _ : B => println("B")
+ case _ : A => println("A")
+ }
+
+ of course this kind of reasoning is not true in general,
+ but we can safely pretend types are mutually exclusive as long as there are no counter-examples in the match we're analyzing}
+*/
+
+ val excludedPair = new mutable.HashSet[ExcludedPair]
+
+ case class ExcludedPair(a: Const, b: Const) {
+ override def equals(o: Any) = o match {
+ case ExcludedPair(aa, bb) => (a == aa && b == bb) || (a == bb && b == aa)
+ case _ => false
+ }
+ // make ExcludedPair(a, b).hashCode == ExcludedPair(b, a).hashCode
+ override def hashCode = a.hashCode ^ b.hashCode
+ }
+
+ equalitySyms map { sym =>
+ // if we've already excluded the pair at some point (-A \/ -B), then don't exclude the symmetric one (-B \/ -A)
+ // (nor the positive implications -B \/ A, or -A \/ B, which would entail the equality axioms falsifying the whole formula)
+ val todo = equalitySyms filterNot (b => (b.const == sym.const) || excludedPair(ExcludedPair(b.const, sym.const)))
+ val (excluded, notExcluded) = todo partition (b => excludes(sym.const, b.const))
+ val implied = notExcluded filter (b => implies(sym.const, b.const))
+
+ debug.patmat("eq axioms for: "+ sym.const)
+ debug.patmat("excluded: "+ excluded)
+ debug.patmat("implied: "+ implied)
+
+ excluded foreach { excludedSym => excludedPair += ExcludedPair(sym.const, excludedSym.const)}
+
+ (sym, implied, excluded)
+ }
+ }
+
+ // accessing after calling registerNull will result in inconsistencies
+ lazy val domainSyms: Option[Set[Sym]] = domain map { _ map symForEqualsTo }
+
+ lazy val symForStaticTp: Option[Sym] = symForEqualsTo.get(TypeConst(staticTpCheckable))
+
+ // don't access until all potential equalities have been registered using registerEquality
+ private lazy val equalitySyms = {observed; symForEqualsTo.values.toList}
+
+ // don't call until all equalities have been registered and registerNull has been called (if needed)
+ def describe = {
+ def domain_s = domain match {
+ case Some(d) => d mkString (" ::= ", " | ", "// "+ symForEqualsTo.keys)
+ case _ => symForEqualsTo.keys mkString (" ::= ", " | ", " | ...")
+ }
+ s"$this: ${staticTp}${domain_s} // = $path"
+ }
+ override def toString = "V"+ id
+ }
+
+
+ import global.{ConstantType, Constant, SingletonType, Literal, Ident, singleType}
+ import global.definitions.{AnyClass, UnitClass}
+
+
+ // all our variables range over types
+ // a literal constant becomes ConstantType(Constant(v)) when the type allows it (roughly, anyval + string + null)
+ // equality between variables: SingleType(x) (note that pattern variables cannot relate to each other -- it's always patternVar == nonPatternVar)
+ object Const {
+ def resetUniques() = {_nextTypeId = 0; _nextValueId = 0; uniques.clear() ; trees.clear()}
+
+ private var _nextTypeId = 0
+ def nextTypeId = {_nextTypeId += 1; _nextTypeId}
+
+ private var _nextValueId = 0
+ def nextValueId = {_nextValueId += 1; _nextValueId}
+
+ private val uniques = new mutable.HashMap[Type, Const]
+ private[TreesAndTypesDomain] def unique(tp: Type, mkFresh: => Const): Const =
+ uniques.get(tp).getOrElse(
+ uniques.find {case (oldTp, oldC) => oldTp =:= tp} match {
+ case Some((_, c)) =>
+ debug.patmat("unique const: "+ (tp, c))
+ c
+ case _ =>
+ val fresh = mkFresh
+ debug.patmat("uniqued const: "+ (tp, fresh))
+ uniques(tp) = fresh
+ fresh
+ })
+
+ private val trees = mutable.HashSet.empty[Tree]
+
+ // hashconsing trees (modulo value-equality)
+ private[TreesAndTypesDomain] def uniqueTpForTree(t: Tree): Type =
+ // a new type for every unstable symbol -- only stable value are uniqued
+ // technically, an unreachable value may change between cases
+ // thus, the failure of a case that matches on a mutable value does not exclude the next case succeeding
+ // (and thuuuuus, the latter case must be considered reachable)
+ if (!t.symbol.isStable) t.tpe.narrow
+ else trees find (a => a.correspondsStructure(t)(sameValue)) match {
+ case Some(orig) =>
+ debug.patmat("unique tp for tree: "+ (orig, orig.tpe))
+ orig.tpe
+ case _ =>
+ // duplicate, don't mutate old tree (TODO: use a map tree -> type instead?)
+ val treeWithNarrowedType = t.duplicate setType t.tpe.narrow
+ debug.patmat("uniqued: "+ (t, t.tpe, treeWithNarrowedType.tpe))
+ trees += treeWithNarrowedType
+ treeWithNarrowedType.tpe
+ }
+ }
+
+ sealed abstract class Const {
+ def tp: Type
+ def wideTp: Type
+
+ def isAny = wideTp.typeSymbol == AnyClass
+ def isValue: Boolean //= tp.isStable
+
+ // note: use reference equality on Const since they're hash-consed (doing type equality all the time is too expensive)
+ // the equals inherited from AnyRef does just this
+ }
+
+ // find most precise super-type of tp that is a class
+ // we skip non-class types (singleton types, abstract types) so that we can
+ // correctly compute how types relate in terms of the values they rule out
+ // e.g., when we know some value must be of type T, can it still be of type S? (this is the positive formulation of what `excludes` on Const computes)
+ // since we're talking values, there must have been a class involved in creating it, so rephrase our types in terms of classes
+ // (At least conceptually: `true` is an instance of class `Boolean`)
+ private def widenToClass(tp: Type): Type =
+ if (tp.typeSymbol.isClass) tp
+ else tp.baseType(tp.baseClasses.head)
+
+ object TypeConst extends TypeConstExtractor {
+ def apply(tp: Type) = {
+ if (tp =:= NullTp) NullConst
+ else if (tp.isInstanceOf[SingletonType]) ValueConst.fromType(tp)
+ else Const.unique(tp, new TypeConst(tp))
+ }
+ def unapply(c: TypeConst): Some[Type] = Some(c.tp)
+ }
+
+ // corresponds to a type test that does not imply any value-equality (well, except for outer checks, which we don't model yet)
+ sealed class TypeConst(val tp: Type) extends Const {
+ assert(!(tp =:= NullTp))
+ /*private[this] val id: Int = */ Const.nextTypeId
+
+ val wideTp = widenToClass(tp)
+ def isValue = false
+ override def toString = tp.toString //+"#"+ id
+ }
+
+ // p is a unique type or a constant value
+ object ValueConst extends ValueConstExtractor {
+ def fromType(tp: Type) = {
+ assert(tp.isInstanceOf[SingletonType])
+ val toString = tp match {
+ case ConstantType(c) => c.escapedStringValue
+ case _ if tp.typeSymbol.isModuleClass => tp.typeSymbol.name.toString
+ case _ => tp.toString
+ }
+ Const.unique(tp, new ValueConst(tp, tp.widen, toString))
+ }
+ def apply(p: Tree) = {
+ val tp = p.tpe.normalize
+ if (tp =:= NullTp) NullConst
+ else {
+ val wideTp = widenToClass(tp)
+
+ val narrowTp =
+ if (tp.isInstanceOf[SingletonType]) tp
+ else p match {
+ case Literal(c) =>
+ if (c.tpe.typeSymbol == UnitClass) c.tpe
+ else ConstantType(c)
+ case Ident(_) if p.symbol.isStable =>
+ // for Idents, can encode uniqueness of symbol as uniqueness of the corresponding singleton type
+ // for Selects, which are handled by the next case, the prefix of the select varies independently of the symbol (see pos/virtpatmat_unreach_select.scala)
+ singleType(tp.prefix, p.symbol)
+ case _ =>
+ Const.uniqueTpForTree(p)
+ }
+
+ val toString =
+ if (hasStableSymbol(p)) p.symbol.name.toString // tp.toString
+ else p.toString //+"#"+ id
+
+ Const.unique(narrowTp, new ValueConst(narrowTp, checkableType(wideTp), toString)) // must make wide type checkable so that it is comparable to types from TypeConst
+ }
+ }
+ }
+ sealed class ValueConst(val tp: Type, val wideTp: Type, override val toString: String) extends Const {
+ // debug.patmat("VC"+(tp, wideTp, toString))
+ assert(!(tp =:= NullTp)) // TODO: assert(!tp.isStable)
+ /*private[this] val id: Int = */Const.nextValueId
+ def isValue = true
+ }
+
+
+ lazy val NullTp = ConstantType(Constant(null))
+ case object NullConst extends Const {
+ def tp = NullTp
+ def wideTp = NullTp
+
+ def isValue = true
+ override def toString = "null"
+ }
+ }
+}
diff --git a/src/compiler/scala/tools/nsc/transform/patmat/MatchAnalysis.scala b/src/compiler/scala/tools/nsc/transform/patmat/MatchAnalysis.scala
new file mode 100644
index 0000000..9558542
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/transform/patmat/MatchAnalysis.scala
@@ -0,0 +1,709 @@
+/* NSC -- new Scala compiler
+ *
+ * Copyright 2011-2013 LAMP/EPFL
+ * @author Adriaan Moors
+ */
+
+package scala.tools.nsc.transform.patmat
+
+import scala.language.postfixOps
+import scala.collection.mutable
+import scala.reflect.internal.util.Statistics
+import scala.reflect.internal.util.Position
+
+trait TreeAndTypeAnalysis extends Debugging {
+ import global.{Tree, Type, Symbol, definitions, analyzer,
+ ConstantType, Literal, Constant, appliedType, WildcardType, TypeRef, ModuleClassSymbol,
+ nestedMemberType, TypeMap, Ident}
+
+ import definitions._
+ import analyzer.Typer
+
+
+ // we use subtyping as a model for implication between instanceof tests
+ // i.e., when S <:< T we assume x.isInstanceOf[S] implies x.isInstanceOf[T]
+ // unfortunately this is not true in general:
+ // SI-6022 expects instanceOfTpImplies(ProductClass.tpe, AnyRefClass.tpe)
+ def instanceOfTpImplies(tp: Type, tpImplied: Type) = {
+ val tpValue = tp.typeSymbol.isPrimitiveValueClass
+
+ // pretend we're comparing to Any when we're actually comparing to AnyVal or AnyRef
+ // (and the subtype is respectively a value type or not a value type)
+ // this allows us to reuse subtyping as a model for implication between instanceOf tests
+ // the latter don't see a difference between AnyRef, Object or Any when comparing non-value types -- SI-6022
+ val tpImpliedNormalizedToAny =
+ if (tpImplied =:= (if (tpValue) AnyValClass.tpe else AnyRefClass.tpe)) AnyClass.tpe
+ else tpImplied
+
+ tp <:< tpImpliedNormalizedToAny
+ }
+
+ // TODO: improve, e.g., for constants
+ def sameValue(a: Tree, b: Tree): Boolean = (a eq b) || ((a, b) match {
+ case (_ : Ident, _ : Ident) => a.symbol eq b.symbol
+ case _ => false
+ })
+
+ trait CheckableTreeAndTypeAnalysis {
+ val typer: Typer
+
+ // TODO: domain of other feasibly enumerable built-in types (char?)
+ def enumerateSubtypes(tp: Type): Option[List[Type]] =
+ tp.typeSymbol match {
+ // TODO case _ if tp.isTupleType => // recurse into component types?
+ case UnitClass =>
+ Some(List(UnitClass.tpe))
+ case BooleanClass =>
+ Some((List(ConstantType(Constant(true)), ConstantType(Constant(false)))))
+ // TODO case _ if tp.isTupleType => // recurse into component types
+ case modSym: ModuleClassSymbol =>
+ Some(List(tp))
+ // make sure it's not a primitive, else (5: Byte) match { case 5 => ... } sees no Byte
+ case sym if !sym.isSealed || isPrimitiveValueClass(sym) =>
+ debug.patmat("enum unsealed "+ (tp, sym, sym.isSealed, isPrimitiveValueClass(sym)))
+ None
+ case sym =>
+ val subclasses = (
+ sym.sealedDescendants.toList sortBy (_.sealedSortName)
+ // symbols which are both sealed and abstract need not be covered themselves, because
+ // all of their children must be and they cannot otherwise be created.
+ filterNot (x => x.isSealed && x.isAbstractClass && !isPrimitiveValueClass(x)))
+ debug.patmat("enum sealed -- subclasses: "+ (sym, subclasses))
+
+ val tpApprox = typer.infer.approximateAbstracts(tp)
+ val pre = tpApprox.prefix
+ // valid subtypes are turned into checkable types, as we are entering the realm of the dynamic
+ val validSubTypes = (subclasses flatMap {sym =>
+ // have to filter out children which cannot match: see ticket #3683 for an example
+ // compare to the fully known type `tp` (modulo abstract types),
+ // so that we can rule out stuff like: sealed trait X[T]; class XInt extends X[Int] --> XInt not valid when enumerating X[String]
+ // however, must approximate abstract types in
+
+ val memberType = nestedMemberType(sym, pre, tpApprox.typeSymbol.owner)
+ val subTp = appliedType(memberType, sym.typeParams.map(_ => WildcardType))
+ val subTpApprox = typer.infer.approximateAbstracts(subTp) // TODO: needed?
+ // debug.patmat("subtp"+(subTpApprox <:< tpApprox, subTpApprox, tpApprox))
+ if (subTpApprox <:< tpApprox) Some(checkableType(subTp))
+ else None
+ })
+ debug.patmat("enum sealed "+ (tp, tpApprox) + " as "+ validSubTypes)
+ Some(validSubTypes)
+ }
+
+ // approximate a type to the static type that is fully checkable at run time,
+ // hiding statically known but dynamically uncheckable information using existential quantification
+ // TODO: this is subject to the availability of TypeTags (since an abstract type with a type tag is checkable at run time)
+ def checkableType(tp: Type): Type = {
+ // TODO: this is extremely rough...
+ // replace type args by wildcards, since they can't be checked (don't use existentials: overkill)
+ // TODO: when type tags are available, we will check -- when this is implemented, can we take that into account here?
+ // similar to typer.infer.approximateAbstracts
+ object typeArgsToWildcardsExceptArray extends TypeMap {
+ // SI-6771 dealias would be enough today, but future proofing with the dealiasWiden.
+ // See neg/t6771b.scala for elaboration
+ def apply(tp: Type): Type = tp.dealiasWiden match {
+ case TypeRef(pre, sym, args) if args.nonEmpty && (sym ne ArrayClass) =>
+ TypeRef(pre, sym, args map (_ => WildcardType))
+ case _ =>
+ mapOver(tp)
+ }
+ }
+
+ val res = typeArgsToWildcardsExceptArray(tp)
+ debug.patmat("checkable "+(tp, res))
+ res
+ }
+
+ // a type is "uncheckable" (for exhaustivity) if we don't statically know its subtypes (i.e., it's unsealed)
+ // we consider tuple types with at least one component of a checkable type as a checkable type
+ def uncheckableType(tp: Type): Boolean = {
+ def tupleComponents(tp: Type) = tp.normalize.typeArgs
+ val checkable = (
+ (isTupleType(tp) && tupleComponents(tp).exists(tp => !uncheckableType(tp)))
+ || enumerateSubtypes(tp).nonEmpty)
+ // if (!checkable) debug.patmat("deemed uncheckable: "+ tp)
+ !checkable
+ }
+ }
+}
+
+trait MatchApproximation extends TreeAndTypeAnalysis with ScalaLogic with MatchTreeMaking {
+ import global.{Tree, Type, NoType, Symbol, NoSymbol, ConstantType, Literal, Constant, Ident, UniqueType, RefinedType, EmptyScope}
+ import global.definitions.{ListClass, NilModule}
+
+ /**
+ * Represent a match as a formula in propositional logic that encodes whether the match matches (abstractly: we only consider types)
+ *
+ */
+ trait MatchApproximator extends TreeMakers with TreesAndTypesDomain {
+ object Test {
+ var currId = 0
+ }
+ case class Test(prop: Prop, treeMaker: TreeMaker) {
+ // private val reusedBy = new scala.collection.mutable.HashSet[Test]
+ var reuses: Option[Test] = None
+ def registerReuseBy(later: Test): Unit = {
+ assert(later.reuses.isEmpty, later.reuses)
+ // reusedBy += later
+ later.reuses = Some(this)
+ }
+
+ val id = { Test.currId += 1; Test.currId}
+ override def toString =
+ "T"+ id + "C("+ prop +")" //+ (reuses map ("== T"+_.id) getOrElse (if(reusedBy.isEmpty) treeMaker else reusedBy mkString (treeMaker+ " -->(", ", ",")")))
+ }
+
+
+ class TreeMakersToPropsIgnoreNullChecks(root: Symbol) extends TreeMakersToProps(root) {
+ override def uniqueNonNullProp(p: Tree): Prop = True
+ }
+
+ // returns (tree, tests), where `tree` will be used to refer to `root` in `tests`
+ class TreeMakersToProps(val root: Symbol) {
+ prepareNewAnalysis() // reset hash consing for Var and Const
+
+ private[this] val uniqueEqualityProps = new scala.collection.mutable.HashMap[(Tree, Tree), Eq]
+ private[this] val uniqueNonNullProps = new scala.collection.mutable.HashMap[Tree, Not]
+ private[this] val uniqueTypeProps = new scala.collection.mutable.HashMap[(Tree, Type), Eq]
+
+ def uniqueEqualityProp(testedPath: Tree, rhs: Tree): Prop =
+ uniqueEqualityProps getOrElseUpdate((testedPath, rhs), Eq(Var(testedPath), ValueConst(rhs)))
+
+ // overridden in TreeMakersToPropsIgnoreNullChecks
+ def uniqueNonNullProp (testedPath: Tree): Prop =
+ uniqueNonNullProps getOrElseUpdate(testedPath, Not(Eq(Var(testedPath), NullConst)))
+
+ def uniqueTypeProp(testedPath: Tree, pt: Type): Prop =
+ uniqueTypeProps getOrElseUpdate((testedPath, pt), Eq(Var(testedPath), TypeConst(checkableType(pt))))
+
+ // a variable in this set should never be replaced by a tree that "does not consist of a selection on a variable in this set" (intuitively)
+ private val pointsToBound = mutable.HashSet(root)
+ private val trees = mutable.HashSet.empty[Tree]
+
+ // the substitution that renames variables to variables in pointsToBound
+ private var normalize: Substitution = EmptySubstitution
+ private var substitutionComputed = false
+
+ // replaces a variable (in pointsToBound) by a selection on another variable in pointsToBound
+ // in the end, instead of having x1, x1.hd, x2, x2.hd, ... flying around,
+ // we want something like x1, x1.hd, x1.hd.tl, x1.hd.tl.hd, so that we can easily recognize when
+ // we're testing the same variable
+ // TODO check:
+ // pointsToBound -- accumSubst.from == Set(root) && (accumSubst.from.toSet -- pointsToBound) isEmpty
+ private var accumSubst: Substitution = EmptySubstitution
+
+ // hashconsing trees (modulo value-equality)
+ def unique(t: Tree, tpOverride: Type = NoType): Tree =
+ trees find (a => a.correspondsStructure(t)(sameValue)) match {
+ case Some(orig) =>
+ // debug.patmat("unique: "+ (t eq orig, orig))
+ orig
+ case _ =>
+ trees += t
+ if (tpOverride != NoType) t setType tpOverride
+ else t
+ }
+
+ def uniqueTp(tp: Type): Type = tp match {
+ // typerefs etc are already hashconsed
+ case _ : UniqueType => tp
+ case tp at RefinedType(parents, EmptyScope) => tp.memo(tp: Type)(identity) // TODO: does this help?
+ case _ => tp
+ }
+
+ // produce the unique tree used to refer to this binder
+ // the type of the binder passed to the first invocation
+ // determines the type of the tree that'll be returned for that binder as of then
+ final def binderToUniqueTree(b: Symbol) =
+ unique(accumSubst(normalize(CODE.REF(b))), b.tpe)
+
+ // note that the sequencing of operations is important: must visit in same order as match execution
+ // binderToUniqueTree uses the type of the first symbol that was encountered as the type for all future binders
+ abstract class TreeMakerToProp extends (TreeMaker => Prop) {
+ // requires(if (!substitutionComputed))
+ def updateSubstitution(subst: Substitution): Unit = {
+ // find part of substitution that replaces bound symbols by new symbols, and reverse that part
+ // so that we don't introduce new aliases for existing symbols, thus keeping the set of bound symbols minimal
+ val (boundSubst, unboundSubst) = (subst.from zip subst.to) partition {
+ case (f, t) =>
+ t.isInstanceOf[Ident] && (t.symbol ne NoSymbol) && pointsToBound(f)
+ }
+ val (boundFrom, boundTo) = boundSubst.unzip
+ val (unboundFrom, unboundTo) = unboundSubst.unzip
+
+ // reverse substitution that would otherwise replace a variable we already encountered by a new variable
+ // NOTE: this forgets the more precise type we have for these later variables, but that's probably okay
+ normalize >>= Substitution(boundTo map (_.symbol), boundFrom map (CODE.REF(_)))
+ // debug.patmat ("normalize subst: "+ normalize)
+
+ val okSubst = Substitution(unboundFrom, unboundTo map (normalize(_))) // it's important substitution does not duplicate trees here -- it helps to keep hash consing simple, anyway
+ pointsToBound ++= ((okSubst.from, okSubst.to).zipped filter { (f, t) => pointsToBound exists (sym => t.exists(_.symbol == sym)) })._1
+ // debug.patmat("pointsToBound: "+ pointsToBound)
+
+ accumSubst >>= okSubst
+ // debug.patmat("accumSubst: "+ accumSubst)
+ }
+
+ def handleUnknown(tm: TreeMaker): Prop
+
+ /** apply itself must render a faithful representation of the TreeMaker
+ *
+ * Concretely, True must only be used to represent a TreeMaker that is sure to match and that does not do any computation at all
+ * e.g., doCSE relies on apply itself being sound in this sense (since it drops TreeMakers that are approximated to True -- SI-6077)
+ *
+ * handleUnknown may be customized by the caller to approximate further
+ *
+ * TODO: don't ignore outer-checks
+ */
+ def apply(tm: TreeMaker): Prop = {
+ if (!substitutionComputed) updateSubstitution(tm.subPatternsAsSubstitution)
+
+ tm match {
+ case ttm at TypeTestTreeMaker(prevBinder, testedBinder, pt, _) =>
+ object condStrategy extends TypeTestTreeMaker.TypeTestCondStrategy {
+ type Result = Prop
+ def and(a: Result, b: Result) = And(a, b)
+ def outerTest(testedBinder: Symbol, expectedTp: Type) = True // TODO OuterEqProp(testedBinder, expectedType)
+ def typeTest(b: Symbol, pt: Type) = { // a type test implies the tested path is non-null (null.isInstanceOf[T] is false for all T)
+ val p = binderToUniqueTree(b); And(uniqueNonNullProp(p), uniqueTypeProp(p, uniqueTp(pt)))
+ }
+ def nonNullTest(testedBinder: Symbol) = uniqueNonNullProp(binderToUniqueTree(testedBinder))
+ def equalsTest(pat: Tree, testedBinder: Symbol) = uniqueEqualityProp(binderToUniqueTree(testedBinder), unique(pat))
+ def eqTest(pat: Tree, testedBinder: Symbol) = uniqueEqualityProp(binderToUniqueTree(testedBinder), unique(pat)) // TODO: eq, not ==
+ def tru = True
+ }
+ ttm.renderCondition(condStrategy)
+ case EqualityTestTreeMaker(prevBinder, patTree, _) => uniqueEqualityProp(binderToUniqueTree(prevBinder), unique(patTree))
+ case AlternativesTreeMaker(_, altss, _) => \/(altss map (alts => /\(alts map this)))
+ case ProductExtractorTreeMaker(testedBinder, None) => uniqueNonNullProp(binderToUniqueTree(testedBinder))
+ case SubstOnlyTreeMaker(_, _) => True
+ case GuardTreeMaker(guard) =>
+ guard.tpe match {
+ case ConstantType(Constant(true)) => True
+ case ConstantType(Constant(false)) => False
+ case _ => handleUnknown(tm)
+ }
+ case ExtractorTreeMaker(_, _, _) |
+ ProductExtractorTreeMaker(_, _) |
+ BodyTreeMaker(_, _) => handleUnknown(tm)
+ }
+ }
+ }
+
+
+ private val irrefutableExtractor: PartialFunction[TreeMaker, Prop] = {
+ // the extra condition is None, the extractor's result indicates it always succeeds,
+ // (the potential type-test for the argument is represented by a separate TypeTestTreeMaker)
+ case IrrefutableExtractorTreeMaker(_, _) => True
+ }
+
+ // special-case: interpret pattern `List()` as `Nil`
+ // TODO: make it more general List(1, 2) => 1 :: 2 :: Nil -- not sure this is a good idea...
+ private val rewriteListPattern: PartialFunction[TreeMaker, Prop] = {
+ case p @ ExtractorTreeMaker(_, _, testedBinder)
+ if testedBinder.tpe.typeSymbol == ListClass && p.checkedLength == Some(0) =>
+ uniqueEqualityProp(binderToUniqueTree(p.prevBinder), unique(Ident(NilModule) setType NilModule.tpe))
+ }
+ val fullRewrite = (irrefutableExtractor orElse rewriteListPattern)
+ val refutableRewrite = irrefutableExtractor
+
+ @inline def onUnknown(handler: TreeMaker => Prop) = new TreeMakerToProp {
+ def handleUnknown(tm: TreeMaker) = handler(tm)
+ }
+
+ // used for CSE -- rewrite all unknowns to False (the most conserative option)
+ object conservative extends TreeMakerToProp {
+ def handleUnknown(tm: TreeMaker) = False
+ }
+
+ final def approximateMatch(cases: List[List[TreeMaker]], treeMakerToProp: TreeMakerToProp = conservative) ={
+ val testss = cases.map { _ map (tm => Test(treeMakerToProp(tm), tm)) }
+ substitutionComputed = true // a second call to approximateMatch should not re-compute the substitution (would be wrong)
+ testss
+ }
+ }
+
+ def approximateMatchConservative(root: Symbol, cases: List[List[TreeMaker]]): List[List[Test]] =
+ (new TreeMakersToProps(root)).approximateMatch(cases)
+
+ // turns a case (represented as a list of abstract tests)
+ // into a proposition that is satisfiable if the case may match
+ protected final def caseWithoutBodyToProp(tests: List[Test]): Prop =
+ /\(tests.takeWhile(t => !t.treeMaker.isInstanceOf[BodyTreeMaker]).map(t => t.prop))
+
+ def showTreeMakers(cases: List[List[TreeMaker]]) = {
+ debug.patmat("treeMakers:")
+ debug.patmat(alignAcrossRows(cases, ">>"))
+ }
+
+ def showTests(testss: List[List[Test]]) = {
+ debug.patmat("tests: ")
+ debug.patmat(alignAcrossRows(testss, "&"))
+ }
+ }
+
+}
+
+trait MatchAnalysis extends MatchApproximation {
+ import PatternMatchingStats._
+ import global.{Tree, Type, Symbol, NoSymbol, Ident, Select}
+ import global.definitions.{isPrimitiveValueClass, ConsClass, isTupleSymbol}
+
+ trait MatchAnalyzer extends MatchApproximator {
+ def uncheckedWarning(pos: Position, msg: String) = global.currentUnit.uncheckedWarning(pos, msg)
+ def warn(pos: Position, ex: AnalysisBudget.Exception, kind: String) = uncheckedWarning(pos, s"Cannot check match for $kind.\n${ex.advice}")
+
+ // TODO: model dependencies between variables: if V1 corresponds to (x: List[_]) and V2 is (x.hd), V2 cannot be assigned when V1 = null or V1 = Nil
+ // right now hackily implement this by pruning counter-examples
+ // unreachability would also benefit from a more faithful representation
+
+
+ // reachability (dead code)
+
+ // computes the first 0-based case index that is unreachable (if any)
+ // a case is unreachable if it implies its preceding cases
+ // call C the formula that is satisfiable if the considered case matches
+ // call P the formula that is satisfiable if the cases preceding it match
+ // the case is reachable if there is a model for -P /\ C,
+ // thus, the case is unreachable if there is no model for -(-P /\ C),
+ // or, equivalently, P \/ -C, or C => P
+ def unreachableCase(prevBinder: Symbol, cases: List[List[TreeMaker]], pt: Type): Option[Int] = {
+ val start = if (Statistics.canEnable) Statistics.startTimer(patmatAnaReach) else null
+
+ // use the same approximator so we share variables,
+ // but need different conditions depending on whether we're conservatively looking for failure or success
+ // don't rewrite List-like patterns, as List() and Nil need to distinguished for unreachability
+ val approx = new TreeMakersToProps(prevBinder)
+ def approximate(default: Prop) = approx.approximateMatch(cases, approx.onUnknown { tm =>
+ approx.refutableRewrite.applyOrElse(tm, (_: TreeMaker) => default )
+ })
+
+ val propsCasesOk = approximate(True) map caseWithoutBodyToProp
+ val propsCasesFail = approximate(False) map (t => Not(caseWithoutBodyToProp(t)))
+
+ try {
+ val (eqAxiomsFail, symbolicCasesFail) = removeVarEq(propsCasesFail, modelNull = true)
+ val (eqAxiomsOk, symbolicCasesOk) = removeVarEq(propsCasesOk, modelNull = true)
+ val eqAxioms = simplifyFormula(andFormula(eqAxiomsOk, eqAxiomsFail)) // I'm pretty sure eqAxiomsOk == eqAxiomsFail, but not 100% sure.
+
+ val prefix = formulaBuilder
+ addFormula(prefix, eqAxioms)
+
+ var prefixRest = symbolicCasesFail
+ var current = symbolicCasesOk
+ var reachable = true
+ var caseIndex = 0
+
+ debug.patmat("reachability, vars:\n"+ ((propsCasesFail flatMap gatherVariables).distinct map (_.describe) mkString ("\n")))
+ debug.patmat("equality axioms:\n"+ cnfString(eqAxiomsOk))
+
+ // invariant (prefixRest.length == current.length) && (prefix.reverse ++ prefixRest == symbolicCasesFail)
+ // termination: prefixRest.length decreases by 1
+ while (prefixRest.nonEmpty && reachable) {
+ val prefHead = prefixRest.head
+ caseIndex += 1
+ prefixRest = prefixRest.tail
+ if (prefixRest.isEmpty) reachable = true
+ else {
+ addFormula(prefix, prefHead)
+ current = current.tail
+ val model = findModelFor(andFormula(current.head, toFormula(prefix)))
+
+ // debug.patmat("trying to reach:\n"+ cnfString(current.head) +"\nunder prefix:\n"+ cnfString(prefix))
+ // if (NoModel ne model) debug.patmat("reached: "+ modelString(model))
+
+ reachable = NoModel ne model
+ }
+ }
+
+ if (Statistics.canEnable) Statistics.stopTimer(patmatAnaReach, start)
+
+ if (reachable) None else Some(caseIndex)
+ } catch {
+ case ex: AnalysisBudget.Exception =>
+ warn(prevBinder.pos, ex, "unreachability")
+ None // CNF budget exceeded
+ }
+ }
+
+ // exhaustivity
+
+ def exhaustive(prevBinder: Symbol, cases: List[List[TreeMaker]], pt: Type): List[String] = if (uncheckableType(prevBinder.info)) Nil else {
+ // customize TreeMakersToProps (which turns a tree of tree makers into a more abstract DAG of tests)
+ // - approximate the pattern `List()` (unapplySeq on List with empty length) as `Nil`,
+ // otherwise the common (xs: List[Any]) match { case List() => case x :: xs => } is deemed unexhaustive
+ // - back off (to avoid crying exhaustive too often) when:
+ // - there are guards -->
+ // - there are extractor calls (that we can't secretly/soundly) rewrite
+ val start = if (Statistics.canEnable) Statistics.startTimer(patmatAnaExhaust) else null
+ var backoff = false
+
+ val approx = new TreeMakersToPropsIgnoreNullChecks(prevBinder)
+ val symbolicCases = approx.approximateMatch(cases, approx.onUnknown { tm =>
+ approx.fullRewrite.applyOrElse[TreeMaker, Prop](tm, {
+ case BodyTreeMaker(_, _) => True // irrelevant -- will be discarded by symbolCase later
+ case _ => // debug.patmat("backing off due to "+ tm)
+ backoff = true
+ False
+ })
+ }) map caseWithoutBodyToProp
+
+ if (backoff) Nil else {
+ val prevBinderTree = approx.binderToUniqueTree(prevBinder)
+
+ // TODO: null tests generate too much noise, so disabled them -- is there any way to bring them back?
+ // assuming we're matching on a non-null scrutinee (prevBinder), when does the match fail?
+ // val nonNullScrutineeCond =
+ // assume non-null for all the components of the tuple we're matching on (if we're matching on a tuple)
+ // if (isTupleType(prevBinder.tpe))
+ // prevBinder.tpe.typeArgs.mapWithIndex{case (_, i) => NonNullProp(codegen.tupleSel(prevBinderTree)(i))}.reduceLeft(And)
+ // else
+ // NonNullProp(prevBinderTree)
+ // val matchFails = And(symbolic(nonNullScrutineeCond), Not(symbolicCases reduceLeft (Or(_, _))))
+
+ // when does the match fail?
+ val matchFails = Not(\/(symbolicCases))
+
+ // debug output:
+ debug.patmat("analysing:")
+ showTreeMakers(cases)
+
+ // debug.patmat("\nvars:\n"+ (vars map (_.describe) mkString ("\n")))
+ // debug.patmat("\nmatchFails as CNF:\n"+ cnfString(propToSolvable(matchFails)))
+
+ try {
+ // find the models (under which the match fails)
+ val matchFailModels = findAllModelsFor(propToSolvable(matchFails))
+
+ val scrutVar = Var(prevBinderTree)
+ val counterExamples = matchFailModels.map(modelToCounterExample(scrutVar))
+
+ val pruned = CounterExample.prune(counterExamples).map(_.toString).sorted
+
+ if (Statistics.canEnable) Statistics.stopTimer(patmatAnaExhaust, start)
+ pruned
+ } catch {
+ case ex : AnalysisBudget.Exception =>
+ warn(prevBinder.pos, ex, "exhaustivity")
+ Nil // CNF budget exceeded
+ }
+ }
+ }
+
+ object CounterExample {
+ def prune(examples: List[CounterExample]): List[CounterExample] = {
+ val distinct = examples.filterNot(_ == NoExample).toSet
+ distinct.filterNot(ce => distinct.exists(other => (ce ne other) && ce.coveredBy(other))).toList
+ }
+ }
+
+ // a way to construct a value that will make the match fail: a constructor invocation, a constant, an object of some type)
+ class CounterExample {
+ protected[MatchAnalyzer] def flattenConsArgs: List[CounterExample] = Nil
+ def coveredBy(other: CounterExample): Boolean = this == other || other == WildcardExample
+ }
+ case class ValueExample(c: ValueConst) extends CounterExample { override def toString = c.toString }
+ case class TypeExample(c: Const) extends CounterExample { override def toString = "(_ : "+ c +")" }
+ case class NegativeExample(eqTo: Const, nonTrivialNonEqualTo: List[Const]) extends CounterExample {
+ // require(nonTrivialNonEqualTo.nonEmpty, nonTrivialNonEqualTo)
+ override def toString = {
+ val negation =
+ if (nonTrivialNonEqualTo.tail.isEmpty) nonTrivialNonEqualTo.head.toString
+ else nonTrivialNonEqualTo.map(_.toString).sorted.mkString("(", ", ", ")")
+ "(x: "+ eqTo +" forSome x not in "+ negation +")"
+ }
+ }
+ case class ListExample(ctorArgs: List[CounterExample]) extends CounterExample {
+ protected[MatchAnalyzer] override def flattenConsArgs: List[CounterExample] = ctorArgs match {
+ case hd :: tl :: Nil => hd :: tl.flattenConsArgs
+ case _ => Nil
+ }
+ protected[MatchAnalyzer] lazy val elems = flattenConsArgs
+
+ override def coveredBy(other: CounterExample): Boolean =
+ other match {
+ case other at ListExample(_) =>
+ this == other || ((elems.length == other.elems.length) && (elems zip other.elems).forall{case (a, b) => a coveredBy b})
+ case _ => super.coveredBy(other)
+ }
+
+ override def toString = elems.mkString("List(", ", ", ")")
+ }
+ case class TupleExample(ctorArgs: List[CounterExample]) extends CounterExample {
+ override def toString = ctorArgs.mkString("(", ", ", ")")
+
+ override def coveredBy(other: CounterExample): Boolean =
+ other match {
+ case TupleExample(otherArgs) =>
+ this == other || ((ctorArgs.length == otherArgs.length) && (ctorArgs zip otherArgs).forall{case (a, b) => a coveredBy b})
+ case _ => super.coveredBy(other)
+ }
+ }
+ case class ConstructorExample(cls: Symbol, ctorArgs: List[CounterExample]) extends CounterExample {
+ override def toString = cls.decodedName + (if (cls.isModuleClass) "" else ctorArgs.mkString("(", ", ", ")"))
+ }
+
+ case object WildcardExample extends CounterExample { override def toString = "_" }
+ case object NoExample extends CounterExample { override def toString = "??" }
+
+ def modelToVarAssignment(model: Model): Map[Var, (Seq[Const], Seq[Const])] =
+ model.toSeq.groupBy{f => f match {case (sym, value) => sym.variable} }.mapValues{ xs =>
+ val (trues, falses) = xs.partition(_._2)
+ (trues map (_._1.const), falses map (_._1.const))
+ // should never be more than one value in trues...
+ }
+
+ def varAssignmentString(varAssignment: Map[Var, (Seq[Const], Seq[Const])]) =
+ varAssignment.toSeq.sortBy(_._1.toString).map { case (v, (trues, falses)) =>
+ val assignment = "== "+ (trues mkString("(", ", ", ")")) +" != ("+ (falses mkString(", ")) +")"
+ v +"(="+ v.path +": "+ v.staticTpCheckable +") "+ assignment
+ }.mkString("\n")
+
+ // return constructor call when the model is a true counter example
+ // (the variables don't take into account type information derived from other variables,
+ // so, naively, you might try to construct a counter example like _ :: Nil(_ :: _, _ :: _),
+ // since we didn't realize the tail of the outer cons was a Nil)
+ def modelToCounterExample(scrutVar: Var)(model: Model): CounterExample = {
+ // x1 = ...
+ // x1.hd = ...
+ // x1.tl = ...
+ // x1.hd.hd = ...
+ // ...
+ val varAssignment = modelToVarAssignment(model)
+
+ debug.patmat("var assignment for model "+ model +":\n"+ varAssignmentString(varAssignment))
+
+ // chop a path into a list of symbols
+ def chop(path: Tree): List[Symbol] = path match {
+ case Ident(_) => List(path.symbol)
+ case Select(pre, name) => chop(pre) :+ path.symbol
+ case _ =>
+ // debug.patmat("don't know how to chop "+ path)
+ Nil
+ }
+
+ // turn the variable assignments into a tree
+ // the root is the scrutinee (x1), edges are labelled by the fields that are assigned
+ // a node is a variable example (which is later turned into a counter example)
+ object VariableAssignment {
+ private def findVar(path: List[Symbol]) = path match {
+ case List(root) if root == scrutVar.path.symbol => Some(scrutVar)
+ case _ => varAssignment.find{case (v, a) => chop(v.path) == path}.map(_._1)
+ }
+
+ private val uniques = new mutable.HashMap[Var, VariableAssignment]
+ private def unique(variable: Var): VariableAssignment =
+ uniques.getOrElseUpdate(variable, {
+ val (eqTo, neqTo) = varAssignment.getOrElse(variable, (Nil, Nil)) // TODO
+ VariableAssignment(variable, eqTo.toList, neqTo.toList, mutable.HashMap.empty)
+ })
+
+ def apply(variable: Var): VariableAssignment = {
+ val path = chop(variable.path)
+ val pre = path.init
+ val field = path.last
+
+ val newCtor = unique(variable)
+
+ if (pre.isEmpty) newCtor
+ else {
+ findVar(pre) foreach { preVar =>
+ val outerCtor = this(preVar)
+ outerCtor.fields(field) = newCtor
+ }
+ newCtor
+ }
+ }
+ }
+
+ // node in the tree that describes how to construct a counter-example
+ case class VariableAssignment(variable: Var, equalTo: List[Const], notEqualTo: List[Const], fields: scala.collection.mutable.Map[Symbol, VariableAssignment]) {
+ // need to prune since the model now incorporates all super types of a constant (needed for reachability)
+ private lazy val uniqueEqualTo = equalTo filterNot (subsumed => equalTo.exists(better => (better ne subsumed) && instanceOfTpImplies(better.tp, subsumed.tp)))
+ private lazy val prunedEqualTo = uniqueEqualTo filterNot (subsumed => variable.staticTpCheckable <:< subsumed.tp)
+ private lazy val ctor = (prunedEqualTo match { case List(TypeConst(tp)) => tp case _ => variable.staticTpCheckable }).typeSymbol.primaryConstructor
+ private lazy val ctorParams = if (ctor == NoSymbol || ctor.paramss.isEmpty) Nil else ctor.paramss.head
+ private lazy val cls = if (ctor == NoSymbol) NoSymbol else ctor.owner
+ private lazy val caseFieldAccs = if (cls == NoSymbol) Nil else cls.caseFieldAccessors
+
+
+ def allFieldAssignmentsLegal: Boolean =
+ (fields.keySet subsetOf caseFieldAccs.toSet) && fields.values.forall(_.allFieldAssignmentsLegal)
+
+ private lazy val nonTrivialNonEqualTo = notEqualTo.filterNot{c => c.isAny }
+
+ // NoExample if the constructor call is ill-typed
+ // (thus statically impossible -- can we incorporate this into the formula?)
+ // beBrief is used to suppress negative information nested in tuples -- it tends to get too noisy
+ def toCounterExample(beBrief: Boolean = false): CounterExample =
+ if (!allFieldAssignmentsLegal) NoExample
+ else {
+ debug.patmat("describing "+ (variable, equalTo, notEqualTo, fields, cls, allFieldAssignmentsLegal))
+ val res = prunedEqualTo match {
+ // a definite assignment to a value
+ case List(eq: ValueConst) if fields.isEmpty => ValueExample(eq)
+
+ // constructor call
+ // or we did not gather any information about equality but we have information about the fields
+ // --> typical example is when the scrutinee is a tuple and all the cases first unwrap that tuple and only then test something interesting
+ case _ if cls != NoSymbol && !isPrimitiveValueClass(cls) &&
+ ( uniqueEqualTo.nonEmpty
+ || (fields.nonEmpty && prunedEqualTo.isEmpty && notEqualTo.isEmpty)) =>
+
+ def args(brevity: Boolean = beBrief) = {
+ // figure out the constructor arguments from the field assignment
+ val argLen = (caseFieldAccs.length min ctorParams.length)
+
+ (0 until argLen).map(i => fields.get(caseFieldAccs(i)).map(_.toCounterExample(brevity)) getOrElse WildcardExample).toList
+ }
+
+ cls match {
+ case ConsClass => ListExample(args())
+ case _ if isTupleSymbol(cls) => TupleExample(args(true))
+ case _ => ConstructorExample(cls, args())
+ }
+
+ // a definite assignment to a type
+ case List(eq) if fields.isEmpty => TypeExample(eq)
+
+ // negative information
+ case Nil if nonTrivialNonEqualTo.nonEmpty =>
+ // negation tends to get pretty verbose
+ if (beBrief) WildcardExample
+ else {
+ val eqTo = equalTo.headOption getOrElse TypeConst(variable.staticTpCheckable)
+ NegativeExample(eqTo, nonTrivialNonEqualTo)
+ }
+
+ // not a valid counter-example, possibly since we have a definite type but there was a field mismatch
+ // TODO: improve reasoning -- in the mean time, a false negative is better than an annoying false positive
+ case _ => NoExample
+ }
+ debug.patmat("described as: "+ res)
+ res
+ }
+
+ override def toString = toCounterExample().toString
+ }
+
+ // slurp in information from other variables
+ varAssignment.keys.foreach{ v => if (v != scrutVar) VariableAssignment(v) }
+
+ // this is the variable we want a counter example for
+ VariableAssignment(scrutVar).toCounterExample()
+ }
+
+ def analyzeCases(prevBinder: Symbol, cases: List[List[TreeMaker]], pt: Type, suppression: Suppression): Unit = {
+ if (!suppression.unreachable) {
+ unreachableCase(prevBinder, cases, pt) foreach { caseIndex =>
+ reportUnreachable(cases(caseIndex).last.pos)
+ }
+ }
+ if (!suppression.exhaustive) {
+ val counterExamples = exhaustive(prevBinder, cases, pt)
+ if (counterExamples.nonEmpty)
+ reportMissingCases(prevBinder.pos, counterExamples)
+ }
+ }
+ }
+}
diff --git a/src/compiler/scala/tools/nsc/transform/patmat/MatchCodeGen.scala b/src/compiler/scala/tools/nsc/transform/patmat/MatchCodeGen.scala
new file mode 100644
index 0000000..57fab4e
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/transform/patmat/MatchCodeGen.scala
@@ -0,0 +1,258 @@
+/* NSC -- new Scala compiler
+ *
+ * Copyright 2011-2013 LAMP/EPFL
+ * @author Adriaan Moors
+ */
+
+package scala.tools.nsc.transform.patmat
+
+import scala.tools.nsc.symtab.Flags.SYNTHETIC
+import scala.language.postfixOps
+import scala.reflect.internal.util.Statistics
+import scala.reflect.internal.util.Position
+import scala.reflect.internal.util.NoPosition
+
+/** Factory methods used by TreeMakers to make the actual trees.
+ *
+ * We have two modes in which to emit trees: optimized (the default)
+ * and pure (aka "virtualized": match is parametric in its monad).
+ */
+trait MatchCodeGen extends Interface {
+ import PatternMatchingStats._
+ import global.{nme, treeInfo, definitions, gen, Tree, Type, Symbol, NoSymbol,
+ appliedType, NoType, MethodType, newTermName, Name,
+ Block, Literal, Constant, EmptyTree, Function, Typed, ValDef, LabelDef}
+ import definitions._
+
+ ///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
+ // generate actual trees
+ ///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
+ trait CodegenCore extends MatchMonadInterface {
+ private var ctr = 0
+ def freshName(prefix: String) = {ctr += 1; vpmName.counted(prefix, ctr)}
+
+ // assert(owner ne null); assert(owner ne NoSymbol)
+ def freshSym(pos: Position, tp: Type = NoType, prefix: String = "x") =
+ NoSymbol.newTermSymbol(freshName(prefix), pos, newFlags = SYNTHETIC) setInfo tp
+
+ def newSynthCaseLabel(name: String) =
+ NoSymbol.newLabel(freshName(name), NoPosition) setFlag treeInfo.SYNTH_CASE_FLAGS
+
+ // codegen relevant to the structure of the translation (how extractors are combined)
+ trait AbsCodegen {
+ def matcher(scrut: Tree, scrutSym: Symbol, restpe: Type)(cases: List[Casegen => Tree], matchFailGen: Option[Tree => Tree]): Tree
+
+ // local / context-free
+ def _asInstanceOf(b: Symbol, tp: Type): Tree
+ def _equals(checker: Tree, binder: Symbol): Tree
+ def _isInstanceOf(b: Symbol, tp: Type): Tree
+ def drop(tgt: Tree)(n: Int): Tree
+ def index(tgt: Tree)(i: Int): Tree
+ def mkZero(tp: Type): Tree
+ def tupleSel(binder: Symbol)(i: Int): Tree
+ }
+
+ // structure
+ trait Casegen extends AbsCodegen { import CODE._
+ def one(res: Tree): Tree
+
+ def flatMap(prev: Tree, b: Symbol, next: Tree): Tree
+ def flatMapCond(cond: Tree, res: Tree, nextBinder: Symbol, next: Tree): Tree
+ def flatMapGuard(cond: Tree, next: Tree): Tree
+ def ifThenElseZero(c: Tree, thenp: Tree): Tree = IF (c) THEN thenp ELSE zero
+ protected def zero: Tree
+ }
+
+ def codegen: AbsCodegen
+
+ abstract class CommonCodegen extends AbsCodegen { import CODE._
+ def fun(arg: Symbol, body: Tree): Tree = Function(List(ValDef(arg)), body)
+ def tupleSel(binder: Symbol)(i: Int): Tree = (REF(binder) DOT nme.productAccessorName(i)) // make tree that accesses the i'th component of the tuple referenced by binder
+ def index(tgt: Tree)(i: Int): Tree = tgt APPLY (LIT(i))
+ def drop(tgt: Tree)(n: Int): Tree = (tgt DOT vpmName.drop) (LIT(n))
+ def _equals(checker: Tree, binder: Symbol): Tree = checker MEMBER_== REF(binder) // NOTE: checker must be the target of the ==, that's the patmat semantics for ya
+
+ // the force is needed mainly to deal with the GADT typing hack (we can't detect it otherwise as tp nor pt need contain an abstract type, we're just casting wildly)
+ def _asInstanceOf(b: Symbol, tp: Type): Tree = if (b.info <:< tp) REF(b) else gen.mkCastPreservingAnnotations(REF(b), tp)
+ def _isInstanceOf(b: Symbol, tp: Type): Tree = gen.mkIsInstanceOf(REF(b), tp.withoutAnnotations, true, false)
+
+ // duplicated out of frustration with cast generation
+ def mkZero(tp: Type): Tree = {
+ tp.typeSymbol match {
+ case UnitClass => Literal(Constant())
+ case BooleanClass => Literal(Constant(false))
+ case FloatClass => Literal(Constant(0.0f))
+ case DoubleClass => Literal(Constant(0.0d))
+ case ByteClass => Literal(Constant(0.toByte))
+ case ShortClass => Literal(Constant(0.toShort))
+ case IntClass => Literal(Constant(0))
+ case LongClass => Literal(Constant(0L))
+ case CharClass => Literal(Constant(0.toChar))
+ case _ => gen.mkAsInstanceOf(Literal(Constant(null)), tp, any = true, wrapInApply = false) // the magic incantation is true/false here
+ }
+ }
+ }
+ }
+
+ trait PureMatchMonadInterface extends MatchMonadInterface {
+ val matchStrategy: Tree
+
+ def inMatchMonad(tp: Type): Type = appliedType(oneSig, List(tp)).finalResultType
+ def pureType(tp: Type): Type = appliedType(oneSig, List(tp)).paramTypes.headOption getOrElse NoType // fail gracefully (otherwise we get crashes)
+ protected def matchMonadSym = oneSig.finalResultType.typeSymbol
+
+ import CODE._
+ def _match(n: Name): SelectStart = matchStrategy DOT n
+
+ private lazy val oneSig: Type = typer.typedOperator(_match(vpmName.one)).tpe // TODO: error message
+ }
+
+ trait PureCodegen extends CodegenCore with PureMatchMonadInterface {
+ def codegen: AbsCodegen = pureCodegen
+
+ object pureCodegen extends CommonCodegen with Casegen { import CODE._
+ //// methods in MatchingStrategy (the monad companion) -- used directly in translation
+ // __match.runOrElse(`scrut`)(`scrutSym` => `matcher`)
+ // TODO: consider catchAll, or virtualized matching will break in exception handlers
+ def matcher(scrut: Tree, scrutSym: Symbol, restpe: Type)(cases: List[Casegen => Tree], matchFailGen: Option[Tree => Tree]): Tree =
+ _match(vpmName.runOrElse) APPLY (scrut) APPLY (fun(scrutSym, cases map (f => f(this)) reduceLeft typedOrElse))
+
+ // __match.one(`res`)
+ def one(res: Tree): Tree = (_match(vpmName.one)) (res)
+ // __match.zero
+ protected def zero: Tree = _match(vpmName.zero)
+ // __match.guard(`c`, `then`)
+ def guard(c: Tree, thenp: Tree): Tree = _match(vpmName.guard) APPLY (c, thenp)
+
+ //// methods in the monad instance -- used directly in translation
+ // `prev`.flatMap(`b` => `next`)
+ def flatMap(prev: Tree, b: Symbol, next: Tree): Tree = (prev DOT vpmName.flatMap)(fun(b, next))
+ // `thisCase`.orElse(`elseCase`)
+ def typedOrElse(thisCase: Tree, elseCase: Tree): Tree = (thisCase DOT vpmName.orElse) APPLY (elseCase)
+ // __match.guard(`cond`, `res`).flatMap(`nextBinder` => `next`)
+ def flatMapCond(cond: Tree, res: Tree, nextBinder: Symbol, next: Tree): Tree = flatMap(guard(cond, res), nextBinder, next)
+ // __match.guard(`guardTree`, ()).flatMap((_: P[Unit]) => `next`)
+ def flatMapGuard(guardTree: Tree, next: Tree): Tree = flatMapCond(guardTree, CODE.UNIT, freshSym(guardTree.pos, pureType(UnitClass.tpe)), next)
+ }
+ }
+
+ trait OptimizedMatchMonadInterface extends MatchMonadInterface {
+ override def inMatchMonad(tp: Type): Type = optionType(tp)
+ override def pureType(tp: Type): Type = tp
+ override protected def matchMonadSym = OptionClass
+ }
+
+ trait OptimizedCodegen extends CodegenCore with TypedSubstitution with OptimizedMatchMonadInterface {
+ override def codegen: AbsCodegen = optimizedCodegen
+
+ // when we know we're targetting Option, do some inlining the optimizer won't do
+ // for example, `o.flatMap(f)` becomes `if(o == None) None else f(o.get)`, similarly for orElse and guard
+ // this is a special instance of the advanced inlining optimization that takes a method call on
+ // an object of a type that only has two concrete subclasses, and inlines both bodies, guarded by an if to distinguish the two cases
+ object optimizedCodegen extends CommonCodegen { import CODE._
+
+ /** Inline runOrElse and get rid of Option allocations
+ *
+ * runOrElse(scrut: scrutTp)(matcher): resTp = matcher(scrut) getOrElse ${catchAll(`scrut`)}
+ * the matcher's optional result is encoded as a flag, keepGoing, where keepGoing == true encodes result.isEmpty,
+ * if keepGoing is false, the result Some(x) of the naive translation is encoded as matchRes == x
+ */
+ def matcher(scrut: Tree, scrutSym: Symbol, restpe: Type)(cases: List[Casegen => Tree], matchFailGen: Option[Tree => Tree]): Tree = {
+ val matchEnd = newSynthCaseLabel("matchEnd")
+ val matchRes = NoSymbol.newValueParameter(newTermName("x"), NoPosition, newFlags = SYNTHETIC) setInfo restpe.withoutAnnotations
+ matchEnd setInfo MethodType(List(matchRes), restpe)
+
+ def newCaseSym = newSynthCaseLabel("case") setInfo MethodType(Nil, restpe)
+ var _currCase = newCaseSym
+
+ val caseDefs = cases map { (mkCase: Casegen => Tree) =>
+ val currCase = _currCase
+ val nextCase = newCaseSym
+ _currCase = nextCase
+
+ LabelDef(currCase, Nil, mkCase(new OptimizedCasegen(matchEnd, nextCase)))
+ }
+
+ // must compute catchAll after caseLabels (side-effects nextCase)
+ // catchAll.isEmpty iff no synthetic default case needed (the (last) user-defined case is a default)
+ // if the last user-defined case is a default, it will never jump to the next case; it will go immediately to matchEnd
+ val catchAllDef = matchFailGen map { matchFailGen =>
+ val scrutRef = if(scrutSym ne NoSymbol) REF(scrutSym) else EmptyTree // for alternatives
+
+ LabelDef(_currCase, Nil, matchEnd APPLY (matchFailGen(scrutRef)))
+ } toList // at most 1 element
+
+ // scrutSym == NoSymbol when generating an alternatives matcher
+ val scrutDef = if(scrutSym ne NoSymbol) List(VAL(scrutSym) === scrut) else Nil // for alternatives
+
+ // the generated block is taken apart in TailCalls under the following assumptions
+ // the assumption is once we encounter a case, the remainder of the block will consist of cases
+ // the prologue may be empty, usually it is the valdef that stores the scrut
+ // val (prologue, cases) = stats span (s => !s.isInstanceOf[LabelDef])
+ Block(
+ scrutDef ++ caseDefs ++ catchAllDef,
+ LabelDef(matchEnd, List(matchRes), REF(matchRes))
+ )
+ }
+
+ class OptimizedCasegen(matchEnd: Symbol, nextCase: Symbol) extends CommonCodegen with Casegen {
+ def matcher(scrut: Tree, scrutSym: Symbol, restpe: Type)(cases: List[Casegen => Tree], matchFailGen: Option[Tree => Tree]): Tree =
+ optimizedCodegen.matcher(scrut, scrutSym, restpe)(cases, matchFailGen)
+
+ // only used to wrap the RHS of a body
+ // res: T
+ // returns MatchMonad[T]
+ def one(res: Tree): Tree = matchEnd APPLY (res) // a jump to a case label is special-cased in typedApply
+ protected def zero: Tree = nextCase APPLY ()
+
+ // prev: MatchMonad[T]
+ // b: T
+ // next: MatchMonad[U]
+ // returns MatchMonad[U]
+ def flatMap(prev: Tree, b: Symbol, next: Tree): Tree = {
+ val tp = inMatchMonad(b.tpe)
+ val prevSym = freshSym(prev.pos, tp, "o")
+ val isEmpty = tp member vpmName.isEmpty
+ val get = tp member vpmName.get
+
+ BLOCK(
+ VAL(prevSym) === prev,
+ // must be isEmpty and get as we don't control the target of the call (prev is an extractor call)
+ ifThenElseZero(NOT(prevSym DOT isEmpty), Substitution(b, prevSym DOT get)(next))
+ )
+ }
+
+ // cond: Boolean
+ // res: T
+ // nextBinder: T
+ // next == MatchMonad[U]
+ // returns MatchMonad[U]
+ def flatMapCond(cond: Tree, res: Tree, nextBinder: Symbol, next: Tree): Tree = {
+ val rest =
+ // only emit a local val for `nextBinder` if it's actually referenced in `next`
+ if (next.exists(_.symbol eq nextBinder))
+ BLOCK(
+ VAL(nextBinder) === res,
+ next
+ )
+ else next
+ ifThenElseZero(cond, rest)
+ }
+
+ // guardTree: Boolean
+ // next: MatchMonad[T]
+ // returns MatchMonad[T]
+ def flatMapGuard(guardTree: Tree, next: Tree): Tree =
+ ifThenElseZero(guardTree, next)
+
+ def flatMapCondStored(cond: Tree, condSym: Symbol, res: Tree, nextBinder: Symbol, next: Tree): Tree =
+ ifThenElseZero(cond, BLOCK(
+ condSym === mkTRUE,
+ nextBinder === res,
+ next
+ ))
+ }
+
+ }
+ }
+}
\ No newline at end of file
diff --git a/src/compiler/scala/tools/nsc/transform/patmat/MatchOptimization.scala b/src/compiler/scala/tools/nsc/transform/patmat/MatchOptimization.scala
new file mode 100644
index 0000000..c570dd8
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/transform/patmat/MatchOptimization.scala
@@ -0,0 +1,615 @@
+/* NSC -- new Scala compiler
+ *
+ * Copyright 2011-2013 LAMP/EPFL
+ * @author Adriaan Moors
+ */
+
+package scala.tools.nsc.transform.patmat
+
+import scala.tools.nsc.symtab.Flags.MUTABLE
+import scala.language.postfixOps
+import scala.collection.mutable
+import scala.reflect.internal.util.Statistics
+import scala.reflect.internal.util.Position
+import scala.reflect.internal.util.NoPosition
+
+/** Optimize and analyze matches based on their TreeMaker-representation.
+ *
+ * The patmat translation doesn't rely on this, so it could be disabled in principle.
+ * - well, not quite: the backend crashes if we emit duplicates in switches (e.g. SI-7290)
+ */
+// TODO: split out match analysis
+trait MatchOptimization extends MatchTreeMaking with MatchAnalysis {
+ import PatternMatchingStats._
+ import global.{Tree, Type, Symbol, NoSymbol, CaseDef, atPos,
+ ConstantType, Literal, Constant, gen, EmptyTree, distinctBy,
+ Typed, treeInfo, nme, Ident,
+ Apply, If, Bind, lub, Alternative, deriveCaseDef, Match, MethodType, LabelDef, TypeTree, Throw}
+
+ import global.definitions._
+
+
+ ////
+ trait CommonSubconditionElimination extends OptimizedCodegen with MatchApproximator {
+ /** a flow-sensitive, generalised, common sub-expression elimination
+ * reuse knowledge from performed tests
+ * the only sub-expressions we consider are the conditions and results of the three tests (type, type&equality, equality)
+ * when a sub-expression is shared, it is stored in a mutable variable
+ * the variable is floated up so that its scope includes all of the program that shares it
+ * we generalize sharing to implication, where b reuses a if a => b and priors(a) => priors(b) (the priors of a sub expression form the path through the decision tree)
+ */
+ def doCSE(prevBinder: Symbol, cases: List[List[TreeMaker]], pt: Type): List[List[TreeMaker]] = {
+ debug.patmat("before CSE:")
+ showTreeMakers(cases)
+
+ val testss = approximateMatchConservative(prevBinder, cases)
+
+ // interpret:
+ val dependencies = new mutable.LinkedHashMap[Test, Set[Prop]]
+ val tested = new mutable.HashSet[Prop]
+
+ // TODO: use SAT solver instead of hashconsing props and approximating implication by subset/equality
+ def storeDependencies(test: Test) = {
+ val cond = test.prop
+
+ def simplify(c: Prop): Set[Prop] = c match {
+ case And(a, b) => simplify(a) ++ simplify(b)
+ case Or(_, _) => Set(False) // TODO: make more precise
+ case Not(Eq(Var(_), NullConst)) => Set(True) // not worth remembering
+ case _ => Set(c)
+ }
+ val conds = simplify(cond)
+
+ if (conds(False)) false // stop when we encounter a definite "no" or a "not sure"
+ else {
+ val nonTrivial = conds filterNot (_ == True)
+ if (nonTrivial nonEmpty) {
+ tested ++= nonTrivial
+
+ // is there an earlier test that checks our condition and whose dependencies are implied by ours?
+ dependencies find {
+ case (priorTest, deps) =>
+ ((simplify(priorTest.prop) == nonTrivial) || // our conditions are implied by priorTest if it checks the same thing directly
+ (nonTrivial subsetOf deps) // or if it depends on a superset of our conditions
+ ) && (deps subsetOf tested) // the conditions we've tested when we are here in the match satisfy the prior test, and hence what it tested
+ } foreach {
+ case (priorTest, _) =>
+ // if so, note the dependency in both tests
+ priorTest registerReuseBy test
+ }
+
+ dependencies(test) = tested.toSet // copies
+ }
+ true
+ }
+ }
+
+
+ testss foreach { tests =>
+ tested.clear()
+ tests dropWhile storeDependencies
+ }
+ debug.patmat("dependencies: "+ dependencies)
+
+ // find longest prefix of tests that reuse a prior test, and whose dependent conditions monotonically increase
+ // then, collapse these contiguous sequences of reusing tests
+ // store the result of the final test and the intermediate results in hoisted mutable variables (TODO: optimize: don't store intermediate results that aren't used)
+ // replace each reference to a variable originally bound by a collapsed test by a reference to the hoisted variable
+ val reused = new mutable.HashMap[TreeMaker, ReusedCondTreeMaker]
+ var okToCall = false
+ val reusedOrOrig = (tm: TreeMaker) => {assert(okToCall); reused.getOrElse(tm, tm)}
+
+ // maybe collapse: replace shared prefix of tree makers by a ReusingCondTreeMaker
+ // once this has been computed, we'll know which tree makers are reused,
+ // and we'll replace those by the ReusedCondTreeMakers we've constructed (and stored in the reused map)
+ val collapsed = testss map { tests =>
+ // map tests to the equivalent list of treemakers, replacing shared prefixes by a reusing treemaker
+ // if there's no sharing, simply map to the tree makers corresponding to the tests
+ var currDeps = Set[Prop]()
+ val (sharedPrefix, suffix) = tests span { test =>
+ (test.prop == True) || (for(
+ reusedTest <- test.reuses;
+ nextDeps <- dependencies.get(reusedTest);
+ diff <- (nextDeps -- currDeps).headOption;
+ _ <- Some(currDeps = nextDeps))
+ yield diff).nonEmpty
+ }
+
+ val collapsedTreeMakers =
+ if (sharedPrefix.isEmpty) None
+ else { // even sharing prefixes of length 1 brings some benefit (overhead-percentage for compiler: 26->24%, lib: 19->16%)
+ for (test <- sharedPrefix; reusedTest <- test.reuses) reusedTest.treeMaker match {
+ case reusedCTM: CondTreeMaker => reused(reusedCTM) = ReusedCondTreeMaker(reusedCTM)
+ case _ =>
+ }
+
+ debug.patmat("sharedPrefix: "+ sharedPrefix)
+ debug.patmat("suffix: "+ sharedPrefix)
+ // if the shared prefix contains interesting conditions (!= True)
+ // and the last of such interesting shared conditions reuses another treemaker's test
+ // replace the whole sharedPrefix by a ReusingCondTreeMaker
+ for (lastShared <- sharedPrefix.reverse.dropWhile(_.prop == True).headOption;
+ lastReused <- lastShared.reuses)
+ yield ReusingCondTreeMaker(sharedPrefix, reusedOrOrig) :: suffix.map(_.treeMaker)
+ }
+
+ collapsedTreeMakers getOrElse tests.map(_.treeMaker) // sharedPrefix need not be empty (but it only contains True-tests, which are dropped above)
+ }
+ okToCall = true // TODO: remove (debugging)
+
+ // replace original treemakers that are reused (as determined when computing collapsed),
+ // by ReusedCondTreeMakers
+ val reusedMakers = collapsed mapConserve (_ mapConserve reusedOrOrig)
+ debug.patmat("after CSE:")
+ showTreeMakers(reusedMakers)
+ reusedMakers
+ }
+
+ object ReusedCondTreeMaker {
+ def apply(orig: CondTreeMaker) = new ReusedCondTreeMaker(orig.prevBinder, orig.nextBinder, orig.cond, orig.res, orig.pos)
+ }
+ class ReusedCondTreeMaker(prevBinder: Symbol, val nextBinder: Symbol, cond: Tree, res: Tree, val pos: Position) extends TreeMaker { import CODE._
+ lazy val localSubstitution = Substitution(List(prevBinder), List(CODE.REF(nextBinder)))
+ lazy val storedCond = freshSym(pos, BooleanClass.tpe, "rc") setFlag MUTABLE
+ lazy val treesToHoist: List[Tree] = {
+ nextBinder setFlag MUTABLE
+ List(storedCond, nextBinder) map { b => VAL(b) === codegen.mkZero(b.info) }
+ }
+
+ // TODO: finer-grained duplication
+ def chainBefore(next: Tree)(casegen: Casegen): Tree = // assert(codegen eq optimizedCodegen)
+ atPos(pos)(casegen.asInstanceOf[optimizedCodegen.OptimizedCasegen].flatMapCondStored(cond, storedCond, res, nextBinder, substitution(next).duplicate))
+
+ override def toString = "Memo"+(nextBinder.name, storedCond.name, cond, res, substitution)
+ }
+
+ case class ReusingCondTreeMaker(sharedPrefix: List[Test], toReused: TreeMaker => TreeMaker) extends TreeMaker { import CODE._
+ val pos = sharedPrefix.last.treeMaker.pos
+
+ lazy val localSubstitution = {
+ // replace binder of each dropped treemaker by corresponding binder bound by the most recent reused treemaker
+ var mostRecentReusedMaker: ReusedCondTreeMaker = null
+ def mapToStored(droppedBinder: Symbol) = if (mostRecentReusedMaker eq null) Nil else List((droppedBinder, REF(mostRecentReusedMaker.nextBinder)))
+ val (from, to) = sharedPrefix.flatMap { dropped =>
+ dropped.reuses.map(test => toReused(test.treeMaker)).foreach {
+ case reusedMaker: ReusedCondTreeMaker =>
+ mostRecentReusedMaker = reusedMaker
+ case _ =>
+ }
+
+ // TODO: have super-trait for retrieving the variable that's operated on by a tree maker
+ // and thus assumed in scope, either because it binds it or because it refers to it
+ dropped.treeMaker match {
+ case dropped: FunTreeMaker =>
+ mapToStored(dropped.nextBinder)
+ case _ => Nil
+ }
+ }.unzip
+ val rerouteToReusedBinders = Substitution(from, to)
+
+ val collapsedDroppedSubst = sharedPrefix map (t => (toReused(t.treeMaker).substitution))
+
+ collapsedDroppedSubst.foldLeft(rerouteToReusedBinders)(_ >> _)
+ }
+
+ lazy val lastReusedTreeMaker = sharedPrefix.reverse.flatMap(tm => tm.reuses map (test => toReused(test.treeMaker))).collectFirst{case x: ReusedCondTreeMaker => x}.head
+
+ def chainBefore(next: Tree)(casegen: Casegen): Tree = {
+ // TODO: finer-grained duplication -- MUST duplicate though, or we'll get VerifyErrors since sharing trees confuses lambdalift,
+ // and in its confusion it emits illegal casts (diagnosed by Grzegorz: checkcast T ; invokevirtual S.m, where T not a subtype of S)
+ casegen.ifThenElseZero(REF(lastReusedTreeMaker.storedCond), substitution(next).duplicate)
+ }
+ override def toString = "R"+(lastReusedTreeMaker.storedCond.name, substitution)
+ }
+ }
+
+
+ //// DCE
+// trait DeadCodeElimination extends TreeMakers {
+// // TODO: non-trivial dead-code elimination
+// // e.g., the following match should compile to a simple instanceof:
+// // case class Ident(name: String)
+// // for (Ident(name) <- ts) println(name)
+// def doDCE(prevBinder: Symbol, cases: List[List[TreeMaker]], pt: Type): List[List[TreeMaker]] = {
+// // do minimal DCE
+// cases
+// }
+// }
+
+ //// SWITCHES -- TODO: operate on Tests rather than TreeMakers
+ trait SwitchEmission extends TreeMakers with OptimizedMatchMonadInterface {
+ import treeInfo.isGuardedCase
+
+ abstract class SwitchMaker {
+ abstract class SwitchableTreeMakerExtractor { def unapply(x: TreeMaker): Option[Tree] }
+ val SwitchableTreeMaker: SwitchableTreeMakerExtractor
+
+ def alternativesSupported: Boolean
+
+ // when collapsing guarded switch cases we may sometimes need to jump to the default case
+ // however, that's not supported in exception handlers, so when we can't jump when we need it, don't emit a switch
+ // TODO: make more fine-grained, as we don't always need to jump
+ def canJump: Boolean
+
+ /** Should exhaustivity analysis be skipped? */
+ def unchecked: Boolean
+
+
+ def isDefault(x: CaseDef): Boolean
+ def defaultSym: Symbol
+ def defaultBody: Tree
+ def defaultCase(scrutSym: Symbol = defaultSym, guard: Tree = EmptyTree, body: Tree = defaultBody): CaseDef
+
+ private def sequence[T](xs: List[Option[T]]): Option[List[T]] =
+ if (xs exists (_.isEmpty)) None else Some(xs.flatten)
+
+ object GuardAndBodyTreeMakers {
+ def unapply(tms: List[TreeMaker]): Option[(Tree, Tree)] = {
+ tms match {
+ case (btm at BodyTreeMaker(body, _)) :: Nil => Some((EmptyTree, btm.substitution(body)))
+ case (gtm at GuardTreeMaker(guard)) :: (btm at BodyTreeMaker(body, _)) :: Nil => Some((gtm.substitution(guard), btm.substitution(body)))
+ case _ => None
+ }
+ }
+ }
+
+ private val defaultLabel: Symbol = newSynthCaseLabel("default")
+
+ /** Collapse guarded cases that switch on the same constant (the last case may be unguarded).
+ *
+ * Cases with patterns A and B switch on the same constant iff for all values x that match A also match B and vice versa.
+ * (This roughly corresponds to equality on trees modulo alpha renaming and reordering of alternatives.)
+ *
+ * The rewrite only applies if some of the cases are guarded (this must be checked before invoking this method).
+ *
+ * The rewrite goes through the switch top-down and merges each case with the subsequent cases it is implied by
+ * (i.e. it matches if they match, not taking guards into account)
+ *
+ * If there are no unreachable cases, all cases can be uniquely assigned to a partition of such 'overlapping' cases,
+ * save for the default case (thus we jump to it rather than copying it several times).
+ * (The cases in a partition are implied by the principal element of the partition.)
+ *
+ * The overlapping cases are merged into one case with their guards pushed into the body as follows
+ * (with P the principal element of the overlapping patterns Pi):
+ *
+ * `{case Pi if(G_i) => B_i }*` is rewritten to `case P => {if(G_i) B_i}*`
+ *
+ * The rewrite fails (and returns Nil) when:
+ * (1) there is a subsequence of overlapping cases that has an unguarded case in the middle;
+ * only the last case of each subsequence of overlapping cases may be unguarded (this is implied by unreachability)
+ *
+ * (2) there are overlapping cases that differ (tested by `caseImpliedBy`)
+ * cases with patterns A and B are overlapping if for SOME value x, A matches x implies B matches y OR vice versa <-- note the difference with case equality defined above
+ * for example `case 'a' | 'b' =>` and `case 'b' =>` are different and overlapping (overlapping and equality disregard guards)
+ *
+ * The second component of the returned tuple indicates whether we'll need to emit a labeldef to jump to the default case.
+ */
+ private def collapseGuardedCases(cases: List[CaseDef]): (List[CaseDef], Boolean) = {
+ // requires(same.forall(caseEquals(same.head)))
+ // requires(same.nonEmpty, same)
+ def collapse(same: List[CaseDef], isDefault: Boolean): CaseDef = {
+ val commonPattern = same.head.pat
+ // jump to default case (either the user-supplied one or the synthetic one)
+ // unless we're collapsing the default case: then we re-use the same body as the synthetic catchall (throwing a matcherror, rethrowing the exception)
+ val jumpToDefault: Tree =
+ if (isDefault || !canJump) defaultBody
+ else Apply(Ident(defaultLabel), Nil)
+
+ val guardedBody = same.foldRight(jumpToDefault){
+ // the last case may be un-guarded (we know it's the last one since fold's accum == jumpToDefault)
+ // --> replace jumpToDefault by the un-guarded case's body
+ case (CaseDef(_, EmptyTree, b), `jumpToDefault`) => b
+ case (cd at CaseDef(_, g, b), els) if isGuardedCase(cd) => If(g, b, els)
+ }
+
+ // if the cases that we're going to collapse bind variables,
+ // must replace them by the single binder introduced by the collapsed case
+ val binders = same.collect{case CaseDef(x at Bind(_, _), _, _) if x.symbol != NoSymbol => x.symbol}
+ val (pat, guardedBodySubst) =
+ if (binders.isEmpty) (commonPattern, guardedBody)
+ else {
+ // create a single fresh binder to subsume the old binders (and their types)
+ // TODO: I don't think the binder's types can actually be different (due to checks in caseEquals)
+ // if they do somehow manage to diverge, the lub might not be precise enough and we could get a type error
+ // TODO: reuse name exactly if there's only one binder in binders
+ val binder = freshSym(binders.head.pos, lub(binders.map(_.tpe)), binders.head.name.toString)
+
+ // the patterns in same are equal (according to caseEquals)
+ // we can thus safely pick the first one arbitrarily, provided we correct binding
+ val origPatWithoutBind = commonPattern match {
+ case Bind(b, orig) => orig
+ case o => o
+ }
+ // need to replace `defaultSym` as well -- it's used in `defaultBody` (see `jumpToDefault` above)
+ val unifiedBody = guardedBody.substituteSymbols(defaultSym :: binders, binder :: binders.map(_ => binder))
+ (Bind(binder, origPatWithoutBind), unifiedBody)
+ }
+
+ atPos(commonPattern.pos)(CaseDef(pat, EmptyTree, guardedBodySubst))
+ }
+
+ // requires cases.exists(isGuardedCase) (otherwise the rewrite is pointless)
+ var remainingCases = cases
+ val collapsed = scala.collection.mutable.ListBuffer.empty[CaseDef]
+
+ // when some of collapsed cases (except for the default case itself) did not include an un-guarded case
+ // we'll need to emit a labeldef for the default case
+ var needDefault = false
+
+ while (remainingCases.nonEmpty) {
+ val currCase = remainingCases.head
+ val currIsDefault = isDefault(CaseDef(currCase.pat, EmptyTree, EmptyTree))
+ val (impliesCurr, others) =
+ // the default case is implied by all cases, no need to partition (and remainingCases better all be default cases as well)
+ if (currIsDefault) (remainingCases.tail, Nil)
+ else remainingCases.tail partition (caseImplies(currCase))
+
+ val unguardedComesLastOrAbsent =
+ (!isGuardedCase(currCase) && impliesCurr.isEmpty) || { val LastImpliesCurr = impliesCurr.length - 1
+ impliesCurr.indexWhere(oc => !isGuardedCase(oc)) match {
+ // if all cases are guarded we will have to jump to the default case in the final else
+ // (except if we're collapsing the default case itself)
+ case -1 =>
+ if (!currIsDefault) needDefault = true
+ true
+
+ // last case is not guarded, no need to jump to the default here
+ // note: must come after case -1 => (since LastImpliesCurr may be -1)
+ case LastImpliesCurr => true
+
+ case _ => false
+ }}
+
+ if (unguardedComesLastOrAbsent /*(1)*/ && impliesCurr.forall(caseEquals(currCase)) /*(2)*/) {
+ collapsed += (
+ if (impliesCurr.isEmpty && !isGuardedCase(currCase)) currCase
+ else collapse(currCase :: impliesCurr, currIsDefault)
+ )
+
+ remainingCases = others
+ } else { // fail
+ collapsed.clear()
+ remainingCases = Nil
+ }
+ }
+
+ (collapsed.toList, needDefault)
+ }
+
+ private def caseEquals(x: CaseDef)(y: CaseDef) = patternEquals(x.pat)(y.pat)
+ private def patternEquals(x: Tree)(y: Tree): Boolean = (x, y) match {
+ case (Alternative(xs), Alternative(ys)) =>
+ xs.forall(x => ys.exists(patternEquals(x))) &&
+ ys.forall(y => xs.exists(patternEquals(y)))
+ case (Alternative(pats), _) => pats.forall(p => patternEquals(p)(y))
+ case (_, Alternative(pats)) => pats.forall(q => patternEquals(x)(q))
+ // regular switch
+ case (Literal(Constant(cx)), Literal(Constant(cy))) => cx == cy
+ case (Ident(nme.WILDCARD), Ident(nme.WILDCARD)) => true
+ // type-switch for catch
+ case (Bind(_, Typed(Ident(nme.WILDCARD), tpX)), Bind(_, Typed(Ident(nme.WILDCARD), tpY))) => tpX.tpe =:= tpY.tpe
+ case _ => false
+ }
+
+ // if y matches then x matches for sure (thus, if x comes before y, y is unreachable)
+ private def caseImplies(x: CaseDef)(y: CaseDef) = patternImplies(x.pat)(y.pat)
+ private def patternImplies(x: Tree)(y: Tree): Boolean = (x, y) match {
+ // since alternatives are flattened, must treat them as separate cases
+ case (Alternative(pats), _) => pats.exists(p => patternImplies(p)(y))
+ case (_, Alternative(pats)) => pats.exists(q => patternImplies(x)(q))
+ // regular switch
+ case (Literal(Constant(cx)), Literal(Constant(cy))) => cx == cy
+ case (Ident(nme.WILDCARD), _) => true
+ // type-switch for catch
+ case (Bind(_, Typed(Ident(nme.WILDCARD), tpX)),
+ Bind(_, Typed(Ident(nme.WILDCARD), tpY))) => instanceOfTpImplies(tpY.tpe, tpX.tpe)
+ case _ => false
+ }
+
+ private def noGuards(cs: List[CaseDef]): Boolean = !cs.exists(isGuardedCase)
+
+ // must do this before removing guards from cases and collapsing (SI-6011, SI-6048)
+ private def unreachableCase(cs: List[CaseDef]): Option[CaseDef] = {
+ var cases = cs
+ var unreachable: Option[CaseDef] = None
+
+ while (cases.nonEmpty && unreachable.isEmpty) {
+ val currCase = cases.head
+ if (isDefault(currCase) && cases.tail.nonEmpty) // subsumed by the `else if` that follows, but faster
+ unreachable = Some(cases.tail.head)
+ else if (!isGuardedCase(currCase) || currCase.guard.tpe =:= ConstantType(Constant(true)))
+ unreachable = cases.tail.find(caseImplies(currCase))
+ else if (currCase.guard.tpe =:= ConstantType(Constant(false)))
+ unreachable = Some(currCase)
+
+ cases = cases.tail
+ }
+
+ unreachable
+ }
+
+ // empty list ==> failure
+ def apply(cases: List[(Symbol, List[TreeMaker])], pt: Type): List[CaseDef] =
+ // generate if-then-else for 1 case switch (avoids verify error... can't imagine a one-case switch being faster than if-then-else anyway)
+ if (cases.isEmpty || cases.tail.isEmpty) Nil
+ else {
+ val caseDefs = cases map { case (scrutSym, makers) =>
+ makers match {
+ // default case
+ case GuardAndBodyTreeMakers(guard, body) =>
+ Some(defaultCase(scrutSym, guard, body))
+ // constant (or typetest for typeSwitch)
+ case SwitchableTreeMaker(pattern) :: GuardAndBodyTreeMakers(guard, body) =>
+ Some(CaseDef(pattern, guard, body))
+ // alternatives
+ case AlternativesTreeMaker(_, altss, pos) :: GuardAndBodyTreeMakers(guard, body) if alternativesSupported =>
+ val switchableAlts = altss map {
+ case SwitchableTreeMaker(pattern) :: Nil =>
+ Some(pattern)
+ case _ =>
+ None
+ }
+
+ // succeed if they were all switchable
+ sequence(switchableAlts) map { switchableAlts =>
+ def extractConst(t: Tree) = t match {
+ case Literal(const) => const
+ case _ => t
+ }
+ // SI-7290 Discard duplicate alternatives that would crash the backend
+ val distinctAlts = distinctBy(switchableAlts)(extractConst)
+ if (distinctAlts.size < switchableAlts.size) {
+ val duplicated = switchableAlts.groupBy(extractConst).flatMap(_._2.drop(1).take(1)) // report the first duplicated
+ global.currentUnit.warning(pos, s"Pattern contains duplicate alternatives: ${duplicated.mkString(", ")}")
+ }
+ CaseDef(Alternative(distinctAlts), guard, body)
+ }
+ case _ =>
+ // debug.patmat("can't emit switch for "+ makers)
+ None //failure (can't translate pattern to a switch)
+ }
+ }
+
+ val caseDefsWithGuards = sequence(caseDefs) match {
+ case None => return Nil
+ case Some(cds) => cds
+ }
+
+ // a switch with duplicate cases yields a verify error,
+ // and a switch with duplicate cases and guards cannot soundly be rewritten to an unguarded switch
+ // (even though the verify error would disappear, the behaviour would change)
+ val allReachable = unreachableCase(caseDefsWithGuards) map (cd => reportUnreachable(cd.body.pos)) isEmpty
+
+ if (!allReachable) Nil
+ else if (noGuards(caseDefsWithGuards)) {
+ if (isDefault(caseDefsWithGuards.last)) caseDefsWithGuards
+ else caseDefsWithGuards :+ defaultCase()
+ } else {
+ // collapse identical cases with different guards, push guards into body for all guarded cases
+ // this translation is only sound if there are no unreachable (duplicate) cases
+ // it should only be run if there are guarded cases, and on failure it returns Nil
+ val (collapsed, needDefaultLabel) = collapseGuardedCases(caseDefsWithGuards)
+
+ if (collapsed.isEmpty || (needDefaultLabel && !canJump)) Nil
+ else {
+ def wrapInDefaultLabelDef(cd: CaseDef): CaseDef =
+ if (needDefaultLabel) deriveCaseDef(cd){ b =>
+ // TODO: can b.tpe ever be null? can't really use pt, see e.g. pos/t2683 or cps/match1.scala
+ defaultLabel setInfo MethodType(Nil, if (b.tpe != null) b.tpe else pt)
+ LabelDef(defaultLabel, Nil, b)
+ } else cd
+
+ val last = collapsed.last
+ if (isDefault(last)) {
+ if (!needDefaultLabel) collapsed
+ else collapsed.init :+ wrapInDefaultLabelDef(last)
+ } else collapsed :+ wrapInDefaultLabelDef(defaultCase())
+ }
+ }
+ }
+ }
+
+ class RegularSwitchMaker(scrutSym: Symbol, matchFailGenOverride: Option[Tree => Tree], val unchecked: Boolean) extends SwitchMaker {
+ val switchableTpe = Set(ByteClass.tpe, ShortClass.tpe, IntClass.tpe, CharClass.tpe)
+ val alternativesSupported = true
+ val canJump = true
+
+ // Constant folding sets the type of a constant tree to `ConstantType(Constant(folded))`
+ // The tree itself can be a literal, an ident, a selection, ...
+ object SwitchablePattern { def unapply(pat: Tree): Option[Tree] = pat.tpe match {
+ case ConstantType(const) if const.isIntRange =>
+ Some(Literal(Constant(const.intValue))) // TODO: Java 7 allows strings in switches
+ case _ => None
+ }}
+
+ object SwitchableTreeMaker extends SwitchableTreeMakerExtractor {
+ def unapply(x: TreeMaker): Option[Tree] = x match {
+ case EqualityTestTreeMaker(_, SwitchablePattern(const), _) => Some(const)
+ case _ => None
+ }
+ }
+
+ def isDefault(x: CaseDef): Boolean = x match {
+ case CaseDef(Ident(nme.WILDCARD), EmptyTree, _) => true
+ case _ => false
+ }
+
+ def defaultSym: Symbol = scrutSym
+ def defaultBody: Tree = { import CODE._; matchFailGenOverride map (gen => gen(REF(scrutSym))) getOrElse MATCHERROR(REF(scrutSym)) }
+ def defaultCase(scrutSym: Symbol = defaultSym, guard: Tree = EmptyTree, body: Tree = defaultBody): CaseDef = { import CODE._; atPos(body.pos) {
+ (DEFAULT IF guard) ==> body
+ }}
+ }
+
+ override def emitSwitch(scrut: Tree, scrutSym: Symbol, cases: List[List[TreeMaker]], pt: Type, matchFailGenOverride: Option[Tree => Tree], unchecked: Boolean): Option[Tree] = { import CODE._
+ val regularSwitchMaker = new RegularSwitchMaker(scrutSym, matchFailGenOverride, unchecked)
+ // TODO: if patterns allow switch but the type of the scrutinee doesn't, cast (type-test) the scrutinee to the corresponding switchable type and switch on the result
+ if (regularSwitchMaker.switchableTpe(dealiasWiden(scrutSym.tpe))) {
+ val caseDefsWithDefault = regularSwitchMaker(cases map {c => (scrutSym, c)}, pt)
+ if (caseDefsWithDefault isEmpty) None // not worth emitting a switch.
+ else {
+ // match on scrutSym -- converted to an int if necessary -- not on scrut directly (to avoid duplicating scrut)
+ val scrutToInt: Tree =
+ if (scrutSym.tpe =:= IntClass.tpe) REF(scrutSym)
+ else (REF(scrutSym) DOT (nme.toInt))
+ Some(BLOCK(
+ VAL(scrutSym) === scrut,
+ Match(scrutToInt, caseDefsWithDefault) // a switch
+ ))
+ }
+ } else None
+ }
+
+ // for the catch-cases in a try/catch
+ private object typeSwitchMaker extends SwitchMaker {
+ val unchecked = false
+ val alternativesSupported = false // TODO: needs either back-end support of flattening of alternatives during typers
+ val canJump = false
+
+ // TODO: there are more treemaker-sequences that can be handled by type tests
+ // analyze the result of approximateTreeMaker rather than the TreeMaker itself
+ object SwitchableTreeMaker extends SwitchableTreeMakerExtractor {
+ def unapply(x: TreeMaker): Option[Tree] = x match {
+ case tm at TypeTestTreeMaker(_, _, pt, _) if tm.isPureTypeTest => // -- TODO: use this if binder does not occur in the body
+ Some(Bind(tm.nextBinder, Typed(Ident(nme.WILDCARD), TypeTree(pt)) /* not used by back-end */))
+ case _ =>
+ None
+ }
+ }
+
+ def isDefault(x: CaseDef): Boolean = x match {
+ case CaseDef(Typed(Ident(nme.WILDCARD), tpt), EmptyTree, _) if (tpt.tpe =:= ThrowableClass.tpe) => true
+ case CaseDef(Bind(_, Typed(Ident(nme.WILDCARD), tpt)), EmptyTree, _) if (tpt.tpe =:= ThrowableClass.tpe) => true
+ case CaseDef(Ident(nme.WILDCARD), EmptyTree, _) => true
+ case _ => false
+ }
+
+ lazy val defaultSym: Symbol = freshSym(NoPosition, ThrowableClass.tpe)
+ def defaultBody: Tree = Throw(CODE.REF(defaultSym))
+ def defaultCase(scrutSym: Symbol = defaultSym, guard: Tree = EmptyTree, body: Tree = defaultBody): CaseDef = { import CODE._; atPos(body.pos) {
+ (CASE (Bind(scrutSym, Typed(Ident(nme.WILDCARD), TypeTree(ThrowableClass.tpe)))) IF guard) ==> body
+ }}
+ }
+
+ // TODO: drop null checks
+ override def emitTypeSwitch(bindersAndCases: List[(Symbol, List[TreeMaker])], pt: Type): Option[List[CaseDef]] = {
+ val caseDefsWithDefault = typeSwitchMaker(bindersAndCases, pt)
+ if (caseDefsWithDefault isEmpty) None
+ else Some(caseDefsWithDefault)
+ }
+ }
+
+ trait MatchOptimizer extends OptimizedCodegen
+ with SwitchEmission
+ with CommonSubconditionElimination {
+ override def optimizeCases(prevBinder: Symbol, cases: List[List[TreeMaker]], pt: Type): (List[List[TreeMaker]], List[Tree]) = {
+ // TODO: do CSE on result of doDCE(prevBinder, cases, pt)
+ val optCases = doCSE(prevBinder, cases, pt)
+ val toHoist = (
+ for (treeMakers <- optCases)
+ yield treeMakers.collect{case tm: ReusedCondTreeMaker => tm.treesToHoist}
+ ).flatten.flatten.toList
+ (optCases, toHoist)
+ }
+ }
+}
diff --git a/src/compiler/scala/tools/nsc/transform/patmat/MatchTranslation.scala b/src/compiler/scala/tools/nsc/transform/patmat/MatchTranslation.scala
new file mode 100644
index 0000000..90c52e3
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/transform/patmat/MatchTranslation.scala
@@ -0,0 +1,674 @@
+/* NSC -- new Scala compiler
+ *
+ * Copyright 2011-2013 LAMP/EPFL
+ * @author Adriaan Moors
+ */
+
+package scala.tools.nsc.transform.patmat
+
+import scala.language.postfixOps
+import scala.collection.mutable
+import scala.reflect.internal.util.Statistics
+
+/** Translate typed Trees that represent pattern matches into the patternmatching IR, defined by TreeMakers.
+ */
+trait MatchTranslation { self: PatternMatching =>
+ import PatternMatchingStats._
+ import global.{phase, currentRun, Symbol,
+ Apply, Bind, CaseDef, ClassInfoType, Ident, Literal, Match,
+ Alternative, Constant, EmptyTree, Select, Star, This, Throw, Typed, UnApply,
+ Type, MethodType, WildcardType, PolyType, ErrorType, NoType, TypeRef, typeRef,
+ Name, NoSymbol, Position, Tree, atPos, glb, rootMirror, treeInfo, nme, Transformer,
+ elimAnonymousClass, asCompactDebugString, hasLength}
+ import global.definitions.{ThrowableClass, SeqClass, ScalaPackageClass, BooleanClass, UnitClass, RepeatedParamClass,
+ repeatedToSeq, isRepeatedParamType, getProductArgs}
+ import global.analyzer.{ErrorUtils, formalTypes}
+
+ trait MatchTranslator extends TreeMakers {
+ import typer.context
+
+ // Why is it so difficult to say "here's a name and a context, give me any
+ // matching symbol in scope" ? I am sure this code is wrong, but attempts to
+ // use the scopes of the contexts in the enclosing context chain discover
+ // nothing. How to associate a name with a symbol would would be a wonderful
+ // linkage for which to establish a canonical acquisition mechanism.
+ def matchingSymbolInScope(pat: Tree): Symbol = {
+ def declarationOfName(tpe: Type, name: Name): Symbol = tpe match {
+ case PolyType(tparams, restpe) => tparams find (_.name == name) getOrElse declarationOfName(restpe, name)
+ case MethodType(params, restpe) => params find (_.name == name) getOrElse declarationOfName(restpe, name)
+ case ClassInfoType(_, _, clazz) => clazz.rawInfo member name
+ case _ => NoSymbol
+ }
+ pat match {
+ case Bind(name, _) =>
+ context.enclosingContextChain.foldLeft(NoSymbol: Symbol)((res, ctx) =>
+ res orElse declarationOfName(ctx.owner.rawInfo, name))
+ case _ => NoSymbol
+ }
+ }
+
+ // Issue better warnings than "unreachable code" when people mis-use
+ // variable patterns thinking they bind to existing identifiers.
+ //
+ // Possible TODO: more deeply nested variable patterns, like
+ // case (a, b) => 1 ; case (c, d) => 2
+ // However this is a pain (at least the way I'm going about it)
+ // and I have to think these detailed errors are primarily useful
+ // for beginners, not people writing nested pattern matches.
+ def checkMatchVariablePatterns(cases: List[CaseDef]) {
+ // A string describing the first variable pattern
+ var vpat: String = null
+ // Using an iterator so we can recognize the last case
+ val it = cases.iterator
+
+ def addendum(pat: Tree) = {
+ matchingSymbolInScope(pat) match {
+ case NoSymbol => ""
+ case sym =>
+ val desc = if (sym.isParameter) s"parameter ${sym.nameString} of" else sym + " in"
+ s"\nIf you intended to match against $desc ${sym.owner}, you must use backticks, like: case `${sym.nameString}` =>"
+ }
+ }
+
+ while (it.hasNext) {
+ val cdef = it.next
+ // If a default case has been seen, then every succeeding case is unreachable.
+ if (vpat != null)
+ context.unit./*error*/warning(cdef.body.pos, "unreachable code due to " + vpat + addendum(cdef.pat))
+ // If this is a default case and more cases follow, warn about this one so
+ // we have a reason to mention its pattern variable name and any corresponding
+ // symbol in scope. Errors will follow from the remaining cases, at least
+ // once we make the above warning an error.
+ else if (it.hasNext && (treeInfo isDefaultCase cdef)) {
+ val vpatName = cdef.pat match {
+ case Bind(name, _) => s" '$name'"
+ case _ => ""
+ }
+ vpat = s"variable pattern$vpatName on line ${cdef.pat.pos.line}"
+ context.unit.warning(cdef.pos, s"patterns after a variable pattern cannot match (SLS 8.1.1)" + addendum(cdef.pat))
+ }
+ }
+ }
+
+ // duplicated from CPSUtils (avoid dependency from compiler -> cps plugin...)
+ private lazy val MarkerCPSAdaptPlus = rootMirror.getClassIfDefined("scala.util.continuations.cpsPlus")
+ private lazy val MarkerCPSAdaptMinus = rootMirror.getClassIfDefined("scala.util.continuations.cpsMinus")
+ private lazy val MarkerCPSSynth = rootMirror.getClassIfDefined("scala.util.continuations.cpsSynth")
+ private lazy val stripTriggerCPSAnns = List(MarkerCPSSynth, MarkerCPSAdaptMinus, MarkerCPSAdaptPlus)
+ private lazy val MarkerCPSTypes = rootMirror.getClassIfDefined("scala.util.continuations.cpsParam")
+ private lazy val strippedCPSAnns = MarkerCPSTypes :: stripTriggerCPSAnns
+ private def removeCPSAdaptAnnotations(tp: Type) = tp filterAnnotations (ann => !(strippedCPSAnns exists (ann matches _)))
+
+ /** Implement a pattern match by turning its cases (including the implicit failure case)
+ * into the corresponding (monadic) extractors, and combining them with the `orElse` combinator.
+ *
+ * For `scrutinee match { case1 ... caseN }`, the resulting tree has the shape
+ * `runOrElse(scrutinee)(x => translateCase1(x).orElse(translateCase2(x)).....orElse(zero))`
+ *
+ * NOTE: the resulting tree is not type checked, nor are nested pattern matches transformed
+ * thus, you must typecheck the result (and that will in turn translate nested matches)
+ * this could probably optimized... (but note that the matchStrategy must be solved for each nested patternmatch)
+ */
+ def translateMatch(match_ : Match): Tree = {
+ val Match(selector, cases) = match_
+
+ val (nonSyntheticCases, defaultOverride) = cases match {
+ case init :+ last if treeInfo isSyntheticDefaultCase last =>
+ (init, Some(((scrut: Tree) => last.body)))
+ case _ =>
+ (cases, None)
+ }
+
+ checkMatchVariablePatterns(nonSyntheticCases)
+
+ // we don't transform after uncurry
+ // (that would require more sophistication when generating trees,
+ // and the only place that emits Matches after typers is for exception handling anyway)
+ if (phase.id >= currentRun.uncurryPhase.id)
+ devWarning(s"running translateMatch past uncurry (at $phase) on $selector match $cases")
+
+ debug.patmat("translating "+ cases.mkString("{", "\n", "}"))
+
+ val start = if (Statistics.canEnable) Statistics.startTimer(patmatNanos) else null
+
+ val selectorTp = repeatedToSeq(elimAnonymousClass(selector.tpe.widen.withoutAnnotations))
+
+ val origPt = match_.tpe
+ // when one of the internal cps-type-state annotations is present, strip all CPS annotations
+ // a cps-type-state-annotated type makes no sense as an expected type (matchX.tpe is used as pt in translateMatch)
+ // (only test availability of MarkerCPSAdaptPlus assuming they are either all available or none of them are)
+ val ptUnCPS =
+ if (MarkerCPSAdaptPlus != NoSymbol && (stripTriggerCPSAnns exists origPt.hasAnnotation))
+ removeCPSAdaptAnnotations(origPt)
+ else origPt
+
+ // relevant test cases: pos/existentials-harmful.scala, pos/gadt-gilles.scala, pos/t2683.scala, pos/virtpatmat_exist4.scala
+ // pt is the skolemized version
+ val pt = repeatedToSeq(ptUnCPS)
+
+ // val packedPt = repeatedToSeq(typer.packedType(match_, context.owner))
+ val selectorSym = freshSym(selector.pos, pureType(selectorTp)) setFlag treeInfo.SYNTH_CASE_FLAGS
+
+ // pt = Any* occurs when compiling test/files/pos/annotDepMethType.scala with -Xexperimental
+ val combined = combineCases(selector, selectorSym, nonSyntheticCases map translateCase(selectorSym, pt), pt, matchOwner, defaultOverride)
+
+ if (Statistics.canEnable) Statistics.stopTimer(patmatNanos, start)
+ combined
+ }
+
+ // return list of typed CaseDefs that are supported by the backend (typed/bind/wildcard)
+ // we don't have a global scrutinee -- the caught exception must be bound in each of the casedefs
+ // there's no need to check the scrutinee for null -- "throw null" becomes "throw new NullPointerException"
+ // try to simplify to a type-based switch, or fall back to a catch-all case that runs a normal pattern match
+ // unlike translateMatch, we type our result before returning it
+ def translateTry(caseDefs: List[CaseDef], pt: Type, pos: Position): List[CaseDef] =
+ // if they're already simple enough to be handled by the back-end, we're done
+ if (caseDefs forall treeInfo.isCatchCase) caseDefs
+ else {
+ val swatches = { // switch-catches
+ val bindersAndCases = caseDefs map { caseDef =>
+ // generate a fresh symbol for each case, hoping we'll end up emitting a type-switch (we don't have a global scrut there)
+ // if we fail to emit a fine-grained switch, have to do translateCase again with a single scrutSym (TODO: uniformize substitution on treemakers so we can avoid this)
+ val caseScrutSym = freshSym(pos, pureType(ThrowableClass.tpe))
+ (caseScrutSym, propagateSubstitution(translateCase(caseScrutSym, pt)(caseDef), EmptySubstitution))
+ }
+
+ for(cases <- emitTypeSwitch(bindersAndCases, pt).toList;
+ if cases forall treeInfo.isCatchCase; // must check again, since it's not guaranteed -- TODO: can we eliminate this? e.g., a type test could test for a trait or a non-trivial prefix, which are not handled by the back-end
+ cse <- cases) yield fixerUpper(matchOwner, pos)(cse).asInstanceOf[CaseDef]
+ }
+
+ val catches = if (swatches.nonEmpty) swatches else {
+ val scrutSym = freshSym(pos, pureType(ThrowableClass.tpe))
+ val casesNoSubstOnly = caseDefs map { caseDef => (propagateSubstitution(translateCase(scrutSym, pt)(caseDef), EmptySubstitution))}
+
+ val exSym = freshSym(pos, pureType(ThrowableClass.tpe), "ex")
+
+ List(
+ atPos(pos) {
+ CaseDef(
+ Bind(exSym, Ident(nme.WILDCARD)), // TODO: does this need fixing upping?
+ EmptyTree,
+ combineCasesNoSubstOnly(CODE.REF(exSym), scrutSym, casesNoSubstOnly, pt, matchOwner, Some(scrut => Throw(CODE.REF(exSym))))
+ )
+ })
+ }
+
+ typer.typedCases(catches, ThrowableClass.tpe, WildcardType)
+ }
+
+
+
+ /** The translation of `pat if guard => body` has two aspects:
+ * 1) the substitution due to the variables bound by patterns
+ * 2) the combination of the extractor calls using `flatMap`.
+ *
+ * 2) is easy -- it looks like: `translatePattern_1.flatMap(translatePattern_2....flatMap(translatePattern_N.flatMap(translateGuard.flatMap((x_i) => success(Xbody(x_i)))))...)`
+ * this must be right-leaning tree, as can be seen intuitively by considering the scope of bound variables:
+ * variables bound by pat_1 must be visible from the function inside the left-most flatMap right up to Xbody all the way on the right
+ * 1) is tricky because translatePattern_i determines the shape of translatePattern_i+1:
+ * zoom in on `translatePattern_1.flatMap(translatePattern_2)` for example -- it actually looks more like:
+ * `translatePattern_1(x_scrut).flatMap((x_1) => {y_i -> x_1._i}translatePattern_2)`
+ *
+ * `x_1` references the result (inside the monad) of the extractor corresponding to `pat_1`,
+ * this result holds the values for the constructor arguments, which translatePattern_1 has extracted
+ * from the object pointed to by `x_scrut`. The `y_i` are the symbols bound by `pat_1` (in order)
+ * in the scope of the remainder of the pattern, and they must thus be replaced by:
+ * - (for 1-ary unapply) x_1
+ * - (for n-ary unapply, n > 1) selection of the i'th tuple component of `x_1`
+ * - (for unapplySeq) x_1.apply(i)
+ *
+ * in the treemakers,
+ *
+ * Thus, the result type of `translatePattern_i`'s extractor must conform to `M[(T_1,..., T_n)]`.
+ *
+ * Operationally, phase 1) is a foldLeft, since we must consider the depth-first-flattening of
+ * the transformed patterns from left to right. For every pattern ast node, it produces a transformed ast and
+ * a function that will take care of binding and substitution of the next ast (to the right).
+ *
+ */
+ def translateCase(scrutSym: Symbol, pt: Type)(caseDef: CaseDef) = caseDef match { case CaseDef(pattern, guard, body) =>
+ translatePattern(scrutSym, pattern) ++ translateGuard(guard) :+ translateBody(body, pt)
+ }
+
+ def translatePattern(patBinder: Symbol, patTree: Tree): List[TreeMaker] = {
+ // a list of TreeMakers that encode `patTree`, and a list of arguments for recursive invocations of `translatePattern` to encode its subpatterns
+ type TranslationStep = (List[TreeMaker], List[(Symbol, Tree)])
+ def withSubPats(treeMakers: List[TreeMaker], subpats: (Symbol, Tree)*): TranslationStep = (treeMakers, subpats.toList)
+ def noFurtherSubPats(treeMakers: TreeMaker*): TranslationStep = (treeMakers.toList, Nil)
+
+ val pos = patTree.pos
+
+ def translateExtractorPattern(extractor: ExtractorCall): TranslationStep = {
+ if (!extractor.isTyped) ErrorUtils.issueNormalTypeError(patTree, "Could not typecheck extractor call: "+ extractor)(context)
+ // if (extractor.resultInMonad == ErrorType) throw new TypeError(pos, "Unsupported extractor type: "+ extractor.tpe)
+
+ debug.patmat("translateExtractorPattern checking parameter type: "+ (patBinder, patBinder.info.widen, extractor.paramType, patBinder.info.widen <:< extractor.paramType))
+
+ // must use type `tp`, which is provided by extractor's result, not the type expected by binder,
+ // as b.info may be based on a Typed type ascription, which has not been taken into account yet by the translation
+ // (it will later result in a type test when `tp` is not a subtype of `b.info`)
+ // TODO: can we simplify this, together with the Bound case?
+ (extractor.subPatBinders, extractor.subPatTypes).zipped foreach { case (b, tp) =>
+ debug.patmat("changing "+ b +" : "+ b.info +" -> "+ tp)
+ b setInfo tp
+ }
+
+ // example check: List[Int] <:< ::[Int]
+ // TODO: extractor.paramType may contain unbound type params (run/t2800, run/t3530)
+ // `patBinderOrCasted` is assigned the result of casting `patBinder` to `extractor.paramType`
+ val (typeTestTreeMaker, patBinderOrCasted, binderKnownNonNull) =
+ if (patBinder.info.widen <:< extractor.paramType) {
+ // no type test needed, but the tree maker relies on `patBinderOrCasted` having type `extractor.paramType` (and not just some type compatible with it)
+ // SI-6624 shows this is necessary because apparently patBinder may have an unfortunate type (.decls don't have the case field accessors)
+ // TODO: get to the bottom of this -- I assume it happens when type checking infers a weird type for an unapply call
+ // by going back to the parameterType for the extractor call we get a saner type, so let's just do that for now
+ /* TODO: uncomment when `settings.developer` and `devWarning` become available
+ if (settings.developer.value && !(patBinder.info =:= extractor.paramType))
+ devWarning(s"resetting info of $patBinder: ${patBinder.info} to ${extractor.paramType}")
+ */
+ (Nil, patBinder setInfo extractor.paramType, false)
+ } else {
+ // chain a type-testing extractor before the actual extractor call
+ // it tests the type, checks the outer pointer and casts to the expected type
+ // TODO: the outer check is mandated by the spec for case classes, but we do it for user-defined unapplies as well [SPEC]
+ // (the prefix of the argument passed to the unapply must equal the prefix of the type of the binder)
+ val treeMaker = TypeTestTreeMaker(patBinder, patBinder, extractor.paramType, extractor.paramType)(pos, extractorArgTypeTest = true)
+
+ // check whether typetest implies patBinder is not null,
+ // even though the eventual null check will be on patBinderOrCasted
+ // it'll be equal to patBinder casted to extractor.paramType anyway (and the type test is on patBinder)
+ (List(treeMaker), treeMaker.nextBinder, treeMaker.impliesBinderNonNull(patBinder))
+ }
+
+ withSubPats(typeTestTreeMaker :+ extractor.treeMaker(patBinderOrCasted, binderKnownNonNull, pos), extractor.subBindersAndPatterns: _*)
+ }
+
+
+ object MaybeBoundTyped {
+ /** Decompose the pattern in `tree`, of shape C(p_1, ..., p_N), into a list of N symbols, and a list of its N sub-trees
+ * The list of N symbols contains symbols for every bound name as well as the un-named sub-patterns (fresh symbols are generated here for these).
+ * The returned type is the one inferred by inferTypedPattern (`owntype`)
+ *
+ * @arg patBinder symbol used to refer to the result of the previous pattern's extractor (will later be replaced by the outer tree with the correct tree to refer to that patterns result)
+ */
+ def unapply(tree: Tree): Option[(Symbol, Type)] = tree match {
+ // the Ident subpattern can be ignored, subpatBinder or patBinder tell us all we need to know about it
+ case Bound(subpatBinder, typed at Typed(Ident(_), tpt)) if typed.tpe ne null => Some((subpatBinder, typed.tpe))
+ case Bind(_, typed at Typed(Ident(_), tpt)) if typed.tpe ne null => Some((patBinder, typed.tpe))
+ case Typed(Ident(_), tpt) if tree.tpe ne null => Some((patBinder, tree.tpe))
+ case _ => None
+ }
+ }
+
+ val (treeMakers, subpats) = patTree match {
+ // skip wildcard trees -- no point in checking them
+ case WildcardPattern() => noFurtherSubPats()
+ case UnApply(unfun, args) =>
+ // TODO: check unargs == args
+ // debug.patmat("unfun: "+ (unfun.tpe, unfun.symbol.ownerChain, unfun.symbol.info, patBinder.info))
+ translateExtractorPattern(ExtractorCall(unfun, args))
+
+ /** A constructor pattern is of the form c(p1, ..., pn) where n ≥ 0.
+ It consists of a stable identifier c, followed by element patterns p1, ..., pn.
+ The constructor c is a simple or qualified name which denotes a case class (§5.3.2).
+
+ If the case class is monomorphic, then it must conform to the expected type of the pattern,
+ and the formal parameter types of x’s primary constructor (§5.3) are taken as the expected types of the element patterns p1, ..., pn.
+
+ If the case class is polymorphic, then its type parameters are instantiated so that the instantiation of c conforms to the expected type of the pattern.
+ The instantiated formal parameter types of c’s primary constructor are then taken as the expected types of the component patterns p1, ..., pn.
+
+ The pattern matches all objects created from constructor invocations c(v1, ..., vn) where each element pattern pi matches the corresponding value vi .
+ A special case arises when c’s formal parameter types end in a repeated parameter. This is further discussed in (§8.1.9).
+ **/
+ case Apply(fun, args) =>
+ ExtractorCall.fromCaseClass(fun, args) map translateExtractorPattern getOrElse {
+ ErrorUtils.issueNormalTypeError(patTree, "Could not find unapply member for "+ fun +" with args "+ args)(context)
+ noFurtherSubPats()
+ }
+
+ /** A typed pattern x : T consists of a pattern variable x and a type pattern T.
+ The type of x is the type pattern T, where each type variable and wildcard is replaced by a fresh, unknown type.
+ This pattern matches any value matched by the type pattern T (§8.2); it binds the variable name to that value.
+ **/
+ // must treat Typed and Bind together -- we need to know the patBinder of the Bind pattern to get at the actual type
+ case MaybeBoundTyped(subPatBinder, pt) =>
+ val next = glb(List(dealiasWiden(patBinder.info), pt)).normalize
+ // a typed pattern never has any subtrees
+ noFurtherSubPats(TypeTestTreeMaker(subPatBinder, patBinder, pt, next)(pos))
+
+ /** A pattern binder x at p consists of a pattern variable x and a pattern p.
+ The type of the variable x is the static type T of the pattern p.
+ This pattern matches any value v matched by the pattern p,
+ provided the run-time type of v is also an instance of T, <-- TODO! https://issues.scala-lang.org/browse/SI-1503
+ and it binds the variable name to that value.
+ **/
+ case Bound(subpatBinder, p) =>
+ // replace subpatBinder by patBinder (as if the Bind was not there)
+ withSubPats(List(SubstOnlyTreeMaker(subpatBinder, patBinder)),
+ // must be patBinder, as subpatBinder has the wrong info: even if the bind assumes a better type, this is not guaranteed until we cast
+ (patBinder, p)
+ )
+
+ /** 8.1.4 Literal Patterns
+ A literal pattern L matches any value that is equal (in terms of ==) to the literal L.
+ The type of L must conform to the expected type of the pattern.
+
+ 8.1.5 Stable Identifier Patterns (a stable identifier r (see §3.1))
+ The pattern matches any value v such that r == v (§12.1).
+ The type of r must conform to the expected type of the pattern.
+ **/
+ case Literal(Constant(_)) | Ident(_) | Select(_, _) | This(_) =>
+ noFurtherSubPats(EqualityTestTreeMaker(patBinder, patTree, pos))
+
+ case Alternative(alts) =>
+ noFurtherSubPats(AlternativesTreeMaker(patBinder, alts map (translatePattern(patBinder, _)), alts.head.pos))
+
+ /* TODO: Paul says about future version: I think this should work, and always intended to implement if I can get away with it.
+ case class Foo(x: Int, y: String)
+ case class Bar(z: Int)
+
+ def f(x: Any) = x match { case Foo(x, _) | Bar(x) => x } // x is lub of course.
+ */
+
+ case Bind(n, p) => // this happens in certain ill-formed programs, there'll be an error later
+ debug.patmat("WARNING: Bind tree with unbound symbol "+ patTree)
+ noFurtherSubPats() // there's no symbol -- something's wrong... don't fail here though (or should we?)
+
+ // case Star(_) | ArrayValue => error("stone age pattern relics encountered!")
+
+ case _ =>
+ typer.context.unit.error(patTree.pos, s"unsupported pattern: $patTree (a ${patTree.getClass}).\n This is a scalac bug. Tree diagnostics: ${asCompactDebugString(patTree)}.")
+ noFurtherSubPats()
+ }
+
+ treeMakers ++ subpats.flatMap { case (binder, pat) =>
+ translatePattern(binder, pat) // recurse on subpatterns
+ }
+ }
+
+ def translateGuard(guard: Tree): List[TreeMaker] =
+ if (guard == EmptyTree) Nil
+ else List(GuardTreeMaker(guard))
+
+ // TODO: 1) if we want to support a generalisation of Kotlin's patmat continue, must not hard-wire lifting into the monad (which is now done by codegen.one),
+ // so that user can generate failure when needed -- use implicit conversion to lift into monad on-demand?
+ // to enable this, probably need to move away from Option to a monad specific to pattern-match,
+ // so that we can return Option's from a match without ambiguity whether this indicates failure in the monad, or just some result in the monad
+ // 2) body.tpe is the type of the body after applying the substitution that represents the solution of GADT type inference
+ // need the explicit cast in case our substitutions in the body change the type to something that doesn't take GADT typing into account
+ def translateBody(body: Tree, matchPt: Type): TreeMaker =
+ BodyTreeMaker(body, matchPt)
+
+
+///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
+// helper methods: they analyze types and trees in isolation, but they are not (directly) concerned with the structure of the overall translation
+///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
+
+ object ExtractorCall {
+ def apply(unfun: Tree, args: List[Tree]): ExtractorCall = new ExtractorCallRegular(unfun, args)
+ def fromCaseClass(fun: Tree, args: List[Tree]): Option[ExtractorCall] = Some(new ExtractorCallProd(fun, args))
+ }
+
+ abstract class ExtractorCall(val args: List[Tree]) {
+ val nbSubPats = args.length
+
+ // everything okay, captain?
+ def isTyped : Boolean
+
+ def isSeq: Boolean
+ lazy val lastIsStar = (nbSubPats > 0) && treeInfo.isStar(args.last)
+
+ // to which type should the previous binder be casted?
+ def paramType : Type
+
+ /** Create the TreeMaker that embodies this extractor call
+ *
+ * `binder` has been casted to `paramType` if necessary
+ * `binderKnownNonNull` indicates whether the cast implies `binder` cannot be null
+ * when `binderKnownNonNull` is `true`, `ProductExtractorTreeMaker` does not do a (redundant) null check on binder
+ */
+ def treeMaker(binder: Symbol, binderKnownNonNull: Boolean, pos: Position): TreeMaker
+
+ // `subPatBinders` are the variables bound by this pattern in the following patterns
+ // subPatBinders are replaced by references to the relevant part of the extractor's result (tuple component, seq element, the result as-is)
+ lazy val subPatBinders = args map {
+ case Bound(b, p) => b
+ case p => freshSym(p.pos, prefix = "p")
+ }
+
+ lazy val subBindersAndPatterns: List[(Symbol, Tree)] = (subPatBinders zip args) map {
+ case (b, Bound(_, p)) => (b, p)
+ case bp => bp
+ }
+
+ // never store these in local variables (for PreserveSubPatBinders)
+ lazy val ignoredSubPatBinders = (subPatBinders zip args).collect{
+ case (b, PatternBoundToUnderscore()) => b
+ }.toSet
+
+ def subPatTypes: List[Type] =
+ if(isSeq) {
+ val TypeRef(pre, SeqClass, args) = seqTp
+ // do repeated-parameter expansion to match up with the expected number of arguments (in casu, subpatterns)
+ val formalsWithRepeated = rawSubPatTypes.init :+ typeRef(pre, RepeatedParamClass, args)
+
+ if (lastIsStar) formalTypes(formalsWithRepeated, nbSubPats - 1) :+ seqTp
+ else formalTypes(formalsWithRepeated, nbSubPats)
+ } else rawSubPatTypes
+
+ protected def rawSubPatTypes: List[Type]
+
+ protected def seqTp = rawSubPatTypes.last baseType SeqClass
+ protected def seqLenCmp = rawSubPatTypes.last member nme.lengthCompare
+ protected lazy val firstIndexingBinder = rawSubPatTypes.length - 1 // rawSubPatTypes.last is the Seq, thus there are `rawSubPatTypes.length - 1` non-seq elements in the tuple
+ protected lazy val lastIndexingBinder = if(lastIsStar) nbSubPats-2 else nbSubPats-1
+ protected lazy val expectedLength = lastIndexingBinder - firstIndexingBinder + 1
+ protected lazy val minLenToCheck = if(lastIsStar) 1 else 0
+ protected def seqTree(binder: Symbol) = tupleSel(binder)(firstIndexingBinder+1)
+ protected def tupleSel(binder: Symbol)(i: Int): Tree = codegen.tupleSel(binder)(i)
+
+ // the trees that select the subpatterns on the extractor's result, referenced by `binder`
+ // require isSeq
+ protected def subPatRefsSeq(binder: Symbol): List[Tree] = {
+ val indexingIndices = (0 to (lastIndexingBinder-firstIndexingBinder))
+ val nbIndexingIndices = indexingIndices.length
+
+ // this error-condition has already been checked by checkStarPatOK:
+ // if(isSeq) assert(firstIndexingBinder + nbIndexingIndices + (if(lastIsStar) 1 else 0) == nbSubPats, "(resultInMonad, ts, subPatTypes, subPats)= "+(resultInMonad, ts, subPatTypes, subPats))
+ // there are `firstIndexingBinder` non-seq tuple elements preceding the Seq
+ (((1 to firstIndexingBinder) map tupleSel(binder)) ++
+ // then we have to index the binder that represents the sequence for the remaining subpatterns, except for...
+ (indexingIndices map codegen.index(seqTree(binder))) ++
+ // the last one -- if the last subpattern is a sequence wildcard: drop the prefix (indexed by the refs on the line above), return the remainder
+ (if(!lastIsStar) Nil else List(
+ if(nbIndexingIndices == 0) seqTree(binder)
+ else codegen.drop(seqTree(binder))(nbIndexingIndices)))).toList
+ }
+
+ // the trees that select the subpatterns on the extractor's result, referenced by `binder`
+ // require (nbSubPats > 0 && (!lastIsStar || isSeq))
+ protected def subPatRefs(binder: Symbol): List[Tree] =
+ if (nbSubPats == 0) Nil
+ else if (isSeq) subPatRefsSeq(binder)
+ else ((1 to nbSubPats) map tupleSel(binder)).toList
+
+ protected def lengthGuard(binder: Symbol): Option[Tree] =
+ // no need to check unless it's an unapplySeq and the minimal length is non-trivially satisfied
+ checkedLength map { expectedLength => import CODE._
+ // `binder.lengthCompare(expectedLength)`
+ def checkExpectedLength = (seqTree(binder) DOT seqLenCmp)(LIT(expectedLength))
+
+ // the comparison to perform
+ // when the last subpattern is a wildcard-star the expectedLength is but a lower bound
+ // (otherwise equality is required)
+ def compareOp: (Tree, Tree) => Tree =
+ if (lastIsStar) _ INT_>= _
+ else _ INT_== _
+
+ // `if (binder != null && $checkExpectedLength [== | >=] 0) then else zero`
+ (seqTree(binder) ANY_!= NULL) AND compareOp(checkExpectedLength, ZERO)
+ }
+
+ def checkedLength: Option[Int] =
+ // no need to check unless it's an unapplySeq and the minimal length is non-trivially satisfied
+ if (!isSeq || (expectedLength < minLenToCheck)) None
+ else Some(expectedLength)
+
+ }
+
+ // TODO: to be called when there's a def unapplyProd(x: T): U
+ // U must have N members _1,..., _N -- the _i are type checked, call their type Ti,
+ //
+ // for now only used for case classes -- pretending there's an unapplyProd that's the identity (and don't call it)
+ class ExtractorCallProd(fun: Tree, args: List[Tree]) extends ExtractorCall(args) {
+ // TODO: fix the illegal type bound in pos/t602 -- type inference messes up before we get here:
+ /*override def equals(x$1: Any): Boolean = ...
+ val o5: Option[com.mosol.sl.Span[Any]] = // Span[Any] --> Any is not a legal type argument for Span!
+ */
+ // private val orig = fun match {case tpt: TypeTree => tpt.original case _ => fun}
+ // private val origExtractorTp = unapplyMember(orig.symbol.filter(sym => reallyExists(unapplyMember(sym.tpe))).tpe).tpe
+ // private val extractorTp = if (wellKinded(fun.tpe)) fun.tpe else existentialAbstraction(origExtractorTp.typeParams, origExtractorTp.resultType)
+ // debug.patmat("ExtractorCallProd: "+ (fun.tpe, existentialAbstraction(origExtractorTp.typeParams, origExtractorTp.resultType)))
+ // debug.patmat("ExtractorCallProd: "+ (fun.tpe, args map (_.tpe)))
+ private def constructorTp = fun.tpe
+
+ def isTyped = fun.isTyped
+
+ // to which type should the previous binder be casted?
+ def paramType = constructorTp.finalResultType
+
+ def isSeq: Boolean = rawSubPatTypes.nonEmpty && isRepeatedParamType(rawSubPatTypes.last)
+ protected def rawSubPatTypes = constructorTp.paramTypes
+
+ /** Create the TreeMaker that embodies this extractor call
+ *
+ * `binder` has been casted to `paramType` if necessary
+ * `binderKnownNonNull` indicates whether the cast implies `binder` cannot be null
+ * when `binderKnownNonNull` is `true`, `ProductExtractorTreeMaker` does not do a (redundant) null check on binder
+ */
+ def treeMaker(binder: Symbol, binderKnownNonNull: Boolean, pos: Position): TreeMaker = {
+ val paramAccessors = binder.constrParamAccessors
+ // binders corresponding to mutable fields should be stored (SI-5158, SI-6070)
+ // make an exception for classes under the scala package as they should be well-behaved,
+ // to optimize matching on List
+ val mutableBinders =
+ if (!binder.info.typeSymbol.hasTransOwner(ScalaPackageClass) &&
+ (paramAccessors exists (_.isMutable)))
+ subPatBinders.zipWithIndex.collect{ case (binder, idx) if paramAccessors(idx).isMutable => binder }
+ else Nil
+
+ // checks binder ne null before chaining to the next extractor
+ ProductExtractorTreeMaker(binder, lengthGuard(binder))(subPatBinders, subPatRefs(binder), mutableBinders, binderKnownNonNull, ignoredSubPatBinders)
+ }
+
+ // reference the (i-1)th case accessor if it exists, otherwise the (i-1)th tuple component
+ override protected def tupleSel(binder: Symbol)(i: Int): Tree = { import CODE._
+ val accessors = binder.caseFieldAccessors
+ if (accessors isDefinedAt (i-1)) REF(binder) DOT accessors(i-1)
+ else codegen.tupleSel(binder)(i) // this won't type check for case classes, as they do not inherit ProductN
+ }
+
+ override def toString(): String = "case class "+ (if (constructorTp eq null) fun else paramType.typeSymbol) +" with arguments "+ args
+ }
+
+ class ExtractorCallRegular(extractorCallIncludingDummy: Tree, args: List[Tree]) extends ExtractorCall(args) {
+ private lazy val Some(Apply(extractorCall, _)) = extractorCallIncludingDummy.find{ case Apply(_, List(Ident(nme.SELECTOR_DUMMY))) => true case _ => false }
+
+ def tpe = extractorCall.tpe
+ def isTyped = (tpe ne NoType) && extractorCall.isTyped && (resultInMonad ne ErrorType)
+ def paramType = tpe.paramTypes.head
+ def resultType = tpe.finalResultType
+ def isSeq = extractorCall.symbol.name == nme.unapplySeq
+
+ /** Create the TreeMaker that embodies this extractor call
+ *
+ * `binder` has been casted to `paramType` if necessary
+ * `binderKnownNonNull` is not used in this subclass
+ *
+ * TODO: implement review feedback by @retronym:
+ * Passing the pair of values around suggests:
+ * case class Binder(sym: Symbol, knownNotNull: Boolean).
+ * Perhaps it hasn't reached critical mass, but it would already clean things up a touch.
+ */
+ def treeMaker(patBinderOrCasted: Symbol, binderKnownNonNull: Boolean, pos: Position): TreeMaker = {
+ // the extractor call (applied to the binder bound by the flatMap corresponding to the previous (i.e., enclosing/outer) pattern)
+ val extractorApply = atPos(pos)(spliceApply(patBinderOrCasted))
+ val binder = freshSym(pos, pureType(resultInMonad)) // can't simplify this when subPatBinders.isEmpty, since UnitClass.tpe is definitely wrong when isSeq, and resultInMonad should always be correct since it comes directly from the extractor's result type
+ ExtractorTreeMaker(extractorApply, lengthGuard(binder), binder)(subPatBinders, subPatRefs(binder), resultType.typeSymbol == BooleanClass, checkedLength, patBinderOrCasted, ignoredSubPatBinders)
+ }
+
+ override protected def seqTree(binder: Symbol): Tree =
+ if (firstIndexingBinder == 0) CODE.REF(binder)
+ else super.seqTree(binder)
+
+ // the trees that select the subpatterns on the extractor's result, referenced by `binder`
+ // require (nbSubPats > 0 && (!lastIsStar || isSeq))
+ override protected def subPatRefs(binder: Symbol): List[Tree] =
+ if (!isSeq && nbSubPats == 1) List(CODE.REF(binder)) // special case for extractors
+ else super.subPatRefs(binder)
+
+ protected def spliceApply(binder: Symbol): Tree = {
+ object splice extends Transformer {
+ override def transform(t: Tree) = t match {
+ case Apply(x, List(i @ Ident(nme.SELECTOR_DUMMY))) =>
+ treeCopy.Apply(t, x, List(CODE.REF(binder).setPos(i.pos)))
+ case _ => super.transform(t)
+ }
+ }
+ splice.transform(extractorCallIncludingDummy)
+ }
+
+ // what's the extractor's result type in the monad?
+ // turn an extractor's result type into something `monadTypeToSubPatTypesAndRefs` understands
+ protected lazy val resultInMonad: Type = if(!hasLength(tpe.paramTypes, 1)) ErrorType else {
+ if (resultType.typeSymbol == BooleanClass) UnitClass.tpe
+ else matchMonadResult(resultType)
+ }
+
+ protected lazy val rawSubPatTypes =
+ if (resultInMonad.typeSymbol eq UnitClass) Nil
+ else if(!isSeq && nbSubPats == 1) List(resultInMonad)
+ else getProductArgs(resultInMonad) match {
+ case Nil => List(resultInMonad)
+ case x => x
+ }
+
+ override def toString() = extractorCall +": "+ extractorCall.tpe +" (symbol= "+ extractorCall.symbol +")."
+ }
+
+ /** A conservative approximation of which patterns do not discern anything.
+ * They are discarded during the translation.
+ */
+ object WildcardPattern {
+ def unapply(pat: Tree): Boolean = pat match {
+ case Bind(nme.WILDCARD, WildcardPattern()) => true // don't skip when binding an interesting symbol!
+ case Ident(nme.WILDCARD) => true
+ case Star(WildcardPattern()) => true
+ case x: Ident => treeInfo.isVarPattern(x)
+ case Alternative(ps) => ps forall (WildcardPattern.unapply(_))
+ case EmptyTree => true
+ case _ => false
+ }
+ }
+
+ object PatternBoundToUnderscore {
+ def unapply(pat: Tree): Boolean = pat match {
+ case Bind(nme.WILDCARD, _) => true // don't skip when binding an interesting symbol!
+ case Ident(nme.WILDCARD) => true
+ case Alternative(ps) => ps forall (PatternBoundToUnderscore.unapply(_))
+ case Typed(PatternBoundToUnderscore(), _) => true
+ case _ => false
+ }
+ }
+
+ object Bound {
+ def unapply(t: Tree): Option[(Symbol, Tree)] = t match {
+ case t at Bind(n, p) if (t.symbol ne null) && (t.symbol ne NoSymbol) => // pos/t2429 does not satisfy these conditions
+ Some((t.symbol, p))
+ case _ => None
+ }
+ }
+ }
+}
\ No newline at end of file
diff --git a/src/compiler/scala/tools/nsc/transform/patmat/MatchTreeMaking.scala b/src/compiler/scala/tools/nsc/transform/patmat/MatchTreeMaking.scala
new file mode 100644
index 0000000..202f344
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/transform/patmat/MatchTreeMaking.scala
@@ -0,0 +1,614 @@
+/* NSC -- new Scala compiler
+ *
+ * Copyright 2011-2013 LAMP/EPFL
+ * @author Adriaan Moors
+ */
+
+package scala.tools.nsc.transform.patmat
+
+import scala.tools.nsc.symtab.Flags.{SYNTHETIC, ARTIFACT}
+import scala.language.postfixOps
+import scala.collection.mutable
+import scala.reflect.internal.util.Statistics
+import scala.reflect.internal.util.Position
+import scala.reflect.internal.util.NoPosition
+
+/** Translate our IR (TreeMakers) into actual Scala Trees using the factory methods in MatchCodeGen.
+ *
+ * The IR is mostly concerned with sequencing, substitution, and rendering all necessary conditions,
+ * mostly agnostic to whether we're in optimized/pure (virtualized) mode.
+ */
+trait MatchTreeMaking extends MatchCodeGen with Debugging {
+ import PatternMatchingStats._
+ import global.{Tree, Type, Symbol, CaseDef, atPos, settings,
+ Select, Block, ThisType, SingleType, NoPrefix, NoType, needsOuterTest,
+ ConstantType, Literal, Constant, gen, This, EmptyTree, map2, NoSymbol, Traverser,
+ Function, Typed, treeInfo, TypeRef, DefTree, Ident, nme}
+
+ import global.definitions.{SomeClass, AnyRefClass, UncheckedClass, BooleanClass}
+
+ final case class Suppression(exhaustive: Boolean, unreachable: Boolean)
+ object Suppression {
+ val NoSuppression = Suppression(false, false)
+ }
+
+///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
+// the making of the trees
+///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
+ trait TreeMakers extends TypedSubstitution with CodegenCore {
+ def optimizeCases(prevBinder: Symbol, cases: List[List[TreeMaker]], pt: Type): (List[List[TreeMaker]], List[Tree])
+ def analyzeCases(prevBinder: Symbol, cases: List[List[TreeMaker]], pt: Type, suppression: Suppression): Unit
+
+ def emitSwitch(scrut: Tree, scrutSym: Symbol, cases: List[List[TreeMaker]], pt: Type, matchFailGenOverride: Option[Tree => Tree], unchecked: Boolean): Option[Tree] =
+ None
+
+ // for catch (no need to customize match failure)
+ def emitTypeSwitch(bindersAndCases: List[(Symbol, List[TreeMaker])], pt: Type): Option[List[CaseDef]] =
+ None
+
+ abstract class TreeMaker {
+ def pos: Position
+
+ /** captures the scope and the value of the bindings in patterns
+ * important *when* the substitution happens (can't accumulate and do at once after the full matcher has been constructed)
+ */
+ def substitution: Substitution =
+ if (currSub eq null) localSubstitution
+ else currSub
+
+ protected def localSubstitution: Substitution
+
+ private[TreeMakers] def incorporateOuterSubstitution(outerSubst: Substitution): Unit = {
+ if (currSub ne null) {
+ debug.patmat("BUG: incorporateOuterSubstitution called more than once for "+ (this, currSub, outerSubst))
+ Thread.dumpStack()
+ }
+ else currSub = outerSubst >> substitution
+ }
+ private[this] var currSub: Substitution = null
+
+ /** The substitution that specifies the trees that compute the values of the subpattern binders.
+ *
+ * Should not be used to perform actual substitution!
+ * Only used to reason symbolically about the values the subpattern binders are bound to.
+ * See TreeMakerToCond#updateSubstitution.
+ *
+ * Overridden in PreserveSubPatBinders to pretend it replaces the subpattern binders by subpattern refs
+ * (Even though we don't do so anymore -- see SI-5158, SI-5739 and SI-6070.)
+ *
+ * TODO: clean this up, would be nicer to have some higher-level way to compute
+ * the binders bound by this tree maker and the symbolic values that correspond to them
+ */
+ def subPatternsAsSubstitution: Substitution = substitution
+
+ // build Tree that chains `next` after the current extractor
+ def chainBefore(next: Tree)(casegen: Casegen): Tree
+ }
+
+ trait NoNewBinders extends TreeMaker {
+ protected val localSubstitution: Substitution = EmptySubstitution
+ }
+
+ case class TrivialTreeMaker(tree: Tree) extends TreeMaker with NoNewBinders {
+ def pos = tree.pos
+
+ def chainBefore(next: Tree)(casegen: Casegen): Tree = tree
+ }
+
+ case class BodyTreeMaker(body: Tree, matchPt: Type) extends TreeMaker with NoNewBinders {
+ def pos = body.pos
+
+ def chainBefore(next: Tree)(casegen: Casegen): Tree = // assert(next eq EmptyTree)
+ atPos(body.pos)(casegen.one(substitution(body))) // since SubstOnly treemakers are dropped, need to do it here
+ override def toString = "B"+(body, matchPt)
+ }
+
+ case class SubstOnlyTreeMaker(prevBinder: Symbol, nextBinder: Symbol) extends TreeMaker {
+ val pos = NoPosition
+
+ val localSubstitution = Substitution(prevBinder, CODE.REF(nextBinder))
+ def chainBefore(next: Tree)(casegen: Casegen): Tree = substitution(next)
+ override def toString = "S"+ localSubstitution
+ }
+
+ abstract class FunTreeMaker extends TreeMaker {
+ val nextBinder: Symbol
+ def pos = nextBinder.pos
+ }
+
+ abstract class CondTreeMaker extends FunTreeMaker {
+ val prevBinder: Symbol
+ val nextBinderTp: Type
+ val cond: Tree
+ val res: Tree
+
+ lazy val nextBinder = freshSym(pos, nextBinderTp)
+ lazy val localSubstitution = Substitution(List(prevBinder), List(CODE.REF(nextBinder)))
+
+ def chainBefore(next: Tree)(casegen: Casegen): Tree =
+ atPos(pos)(casegen.flatMapCond(cond, res, nextBinder, substitution(next)))
+ }
+
+ // unless we're optimizing, emit local variable bindings for all subpatterns of extractor/case class patterns
+ protected val debugInfoEmitVars = !settings.optimise.value
+
+ trait PreserveSubPatBinders extends TreeMaker {
+ val subPatBinders: List[Symbol]
+ val subPatRefs: List[Tree]
+ val ignoredSubPatBinders: Set[Symbol]
+
+ // unless `debugInfoEmitVars`, this set should contain the bare minimum for correctness
+ // mutable case class fields need to be stored regardless (SI-5158, SI-6070) -- see override in ProductExtractorTreeMaker
+ // sub patterns bound to wildcard (_) are never stored as they can't be referenced
+ // dirty debuggers will have to get dirty to see the wildcards
+ lazy val storedBinders: Set[Symbol] =
+ (if (debugInfoEmitVars) subPatBinders.toSet else Set.empty) ++ extraStoredBinders -- ignoredSubPatBinders
+
+ // e.g., mutable fields of a case class in ProductExtractorTreeMaker
+ def extraStoredBinders: Set[Symbol]
+
+ def emitVars = storedBinders.nonEmpty
+
+ private lazy val (stored, substed) = (subPatBinders, subPatRefs).zipped.partition{ case (sym, _) => storedBinders(sym) }
+
+ protected lazy val localSubstitution: Substitution = if (!emitVars) Substitution(subPatBinders, subPatRefs)
+ else {
+ val (subPatBindersSubstituted, subPatRefsSubstituted) = substed.unzip
+ Substitution(subPatBindersSubstituted.toList, subPatRefsSubstituted.toList)
+ }
+
+ /** The substitution that specifies the trees that compute the values of the subpattern binders.
+ *
+ * We pretend to replace the subpattern binders by subpattern refs
+ * (Even though we don't do so anymore -- see SI-5158, SI-5739 and SI-6070.)
+ */
+ override def subPatternsAsSubstitution =
+ Substitution(subPatBinders, subPatRefs) >> super.subPatternsAsSubstitution
+
+ import CODE._
+ def bindSubPats(in: Tree): Tree =
+ if (!emitVars) in
+ else {
+ // binders in `subPatBindersStored` that are referenced by tree `in`
+ val usedBinders = new mutable.HashSet[Symbol]()
+ // all potentially stored subpat binders
+ val potentiallyStoredBinders = stored.unzip._1.toSet
+ // compute intersection of all symbols in the tree `in` and all potentially stored subpat binders
+ in.foreach(t => if (potentiallyStoredBinders(t.symbol)) usedBinders += t.symbol)
+
+ if (usedBinders.isEmpty) in
+ else {
+ // only store binders actually used
+ val (subPatBindersStored, subPatRefsStored) = stored.filter{case (b, _) => usedBinders(b)}.unzip
+ Block(map2(subPatBindersStored.toList, subPatRefsStored.toList)(VAL(_) === _), in)
+ }
+ }
+ }
+
+ /**
+ * Make a TreeMaker that will result in an extractor call specified by `extractor`
+ * the next TreeMaker (here, we don't know which it'll be) is chained after this one by flatMap'ing
+ * a function with binder `nextBinder` over our extractor's result
+ * the function's body is determined by the next TreeMaker
+ * (furthermore, the interpretation of `flatMap` depends on the codegen instance we're using).
+ *
+ * The values for the subpatterns, as computed by the extractor call in `extractor`,
+ * are stored in local variables that re-use the symbols in `subPatBinders`.
+ * This makes extractor patterns more debuggable (SI-5739).
+ */
+ case class ExtractorTreeMaker(extractor: Tree, extraCond: Option[Tree], nextBinder: Symbol)(
+ val subPatBinders: List[Symbol],
+ val subPatRefs: List[Tree],
+ extractorReturnsBoolean: Boolean,
+ val checkedLength: Option[Int],
+ val prevBinder: Symbol,
+ val ignoredSubPatBinders: Set[Symbol]
+ ) extends FunTreeMaker with PreserveSubPatBinders {
+
+ def extraStoredBinders: Set[Symbol] = Set()
+
+ def chainBefore(next: Tree)(casegen: Casegen): Tree = {
+ val condAndNext = extraCond match {
+ case Some(cond) =>
+ casegen.ifThenElseZero(substitution(cond), bindSubPats(substitution(next)))
+ case _ =>
+ bindSubPats(substitution(next))
+ }
+ atPos(extractor.pos)(
+ if (extractorReturnsBoolean) casegen.flatMapCond(extractor, CODE.UNIT, nextBinder, condAndNext)
+ else casegen.flatMap(extractor, nextBinder, condAndNext)
+ )
+ }
+
+ override def toString = "X"+(extractor, nextBinder.name)
+ }
+
+ /**
+ * An optimized version of ExtractorTreeMaker for Products.
+ * For now, this is hard-coded to case classes, and we simply extract the case class fields.
+ *
+ * The values for the subpatterns, as specified by the case class fields at the time of extraction,
+ * are stored in local variables that re-use the symbols in `subPatBinders`.
+ * This makes extractor patterns more debuggable (SI-5739) as well as
+ * avoiding mutation after the pattern has been matched (SI-5158, SI-6070)
+ *
+ * TODO: make this user-definable as follows
+ * When a companion object defines a method `def unapply_1(x: T): U_1`, but no `def unapply` or `def unapplySeq`,
+ * the extractor is considered to match any non-null value of type T
+ * the pattern is expected to have as many sub-patterns as there are `def unapply_I(x: T): U_I` methods,
+ * and the type of the I'th sub-pattern is `U_I`.
+ * The same exception for Seq patterns applies: if the last extractor is of type `Seq[U_N]`,
+ * the pattern must have at least N arguments (exactly N if the last argument is annotated with `: _*`).
+ * The arguments starting at N (and beyond) are taken from the sequence returned by apply_N,
+ * and it is checked that that sequence has enough elements to provide values for all expected sub-patterns.
+ *
+ * For a case class C, the implementation is assumed to be `def unapply_I(x: C) = x._I`,
+ * and the extractor call is inlined under that assumption.
+ */
+ case class ProductExtractorTreeMaker(prevBinder: Symbol, extraCond: Option[Tree])(
+ val subPatBinders: List[Symbol],
+ val subPatRefs: List[Tree],
+ val mutableBinders: List[Symbol],
+ binderKnownNonNull: Boolean,
+ val ignoredSubPatBinders: Set[Symbol]
+ ) extends FunTreeMaker with PreserveSubPatBinders {
+
+ import CODE._
+ val nextBinder = prevBinder // just passing through
+
+ // mutable binders must be stored to avoid unsoundness or seeing mutation of fields after matching (SI-5158, SI-6070)
+ def extraStoredBinders: Set[Symbol] = mutableBinders.toSet
+
+ def chainBefore(next: Tree)(casegen: Casegen): Tree = {
+ val nullCheck = REF(prevBinder) OBJ_NE NULL
+ val cond =
+ if (binderKnownNonNull) extraCond
+ else (extraCond map (nullCheck AND _)
+ orElse Some(nullCheck))
+
+ cond match {
+ case Some(cond) =>
+ casegen.ifThenElseZero(cond, bindSubPats(substitution(next)))
+ case _ =>
+ bindSubPats(substitution(next))
+ }
+ }
+
+ override def toString = "P"+(prevBinder.name, extraCond getOrElse "", localSubstitution)
+ }
+
+ object IrrefutableExtractorTreeMaker {
+ // will an extractor with unapply method of methodtype `tp` always succeed?
+ // note: this assumes the other side-conditions implied by the extractor are met
+ // (argument of the right type, length check succeeds for unapplySeq,...)
+ def irrefutableExtractorType(tp: Type): Boolean = tp.resultType.dealias match {
+ case TypeRef(_, SomeClass, _) => true
+ // probably not useful since this type won't be inferred nor can it be written down (yet)
+ case ConstantType(Constant(true)) => true
+ case _ => false
+ }
+
+ def unapply(xtm: ExtractorTreeMaker): Option[(Tree, Symbol)] = xtm match {
+ case ExtractorTreeMaker(extractor, None, nextBinder) if irrefutableExtractorType(extractor.tpe) =>
+ Some((extractor, nextBinder))
+ case _ =>
+ None
+ }
+ }
+
+ object TypeTestTreeMaker {
+ // factored out so that we can consistently generate other representations of the tree that implements the test
+ // (e.g. propositions for exhaustivity and friends, boolean for isPureTypeTest)
+ trait TypeTestCondStrategy {
+ type Result
+
+ def outerTest(testedBinder: Symbol, expectedTp: Type): Result
+ // TODO: can probably always widen
+ def typeTest(testedBinder: Symbol, expectedTp: Type): Result
+ def nonNullTest(testedBinder: Symbol): Result
+ def equalsTest(pat: Tree, testedBinder: Symbol): Result
+ def eqTest(pat: Tree, testedBinder: Symbol): Result
+ def and(a: Result, b: Result): Result
+ def tru: Result
+ }
+
+ object treeCondStrategy extends TypeTestCondStrategy { import CODE._
+ type Result = Tree
+
+ def and(a: Result, b: Result): Result = a AND b
+ def tru = mkTRUE
+ def typeTest(testedBinder: Symbol, expectedTp: Type) = codegen._isInstanceOf(testedBinder, expectedTp)
+ def nonNullTest(testedBinder: Symbol) = REF(testedBinder) OBJ_NE NULL
+ def equalsTest(pat: Tree, testedBinder: Symbol) = codegen._equals(pat, testedBinder)
+ def eqTest(pat: Tree, testedBinder: Symbol) = REF(testedBinder) OBJ_EQ pat
+
+ def outerTest(testedBinder: Symbol, expectedTp: Type): Tree = {
+ val expectedOuter = expectedTp.prefix match {
+ case ThisType(clazz) => THIS(clazz)
+ case pre if pre != NoType => REF(pre.prefix, pre.termSymbol)
+ case _ => mkTRUE // fallback for SI-6183
+ }
+
+ // ExplicitOuter replaces `Select(q, outerSym) OBJ_EQ expectedPrefix` by `Select(q, outerAccessor(outerSym.owner)) OBJ_EQ expectedPrefix`
+ // if there's an outer accessor, otherwise the condition becomes `true` -- TODO: can we improve needsOuterTest so there's always an outerAccessor?
+ val outer = expectedTp.typeSymbol.newMethod(vpmName.outer, newFlags = SYNTHETIC | ARTIFACT) setInfo expectedTp.prefix
+
+ (Select(codegen._asInstanceOf(testedBinder, expectedTp), outer)) OBJ_EQ expectedOuter
+ }
+ }
+
+ object pureTypeTestChecker extends TypeTestCondStrategy {
+ type Result = Boolean
+
+ def typeTest(testedBinder: Symbol, expectedTp: Type): Result = true
+
+ def outerTest(testedBinder: Symbol, expectedTp: Type): Result = false
+ def nonNullTest(testedBinder: Symbol): Result = false
+ def equalsTest(pat: Tree, testedBinder: Symbol): Result = false
+ def eqTest(pat: Tree, testedBinder: Symbol): Result = false
+ def and(a: Result, b: Result): Result = false // we don't and type tests, so the conjunction must include at least one false
+ def tru = true
+ }
+
+ def nonNullImpliedByTestChecker(binder: Symbol) = new TypeTestCondStrategy {
+ type Result = Boolean
+
+ def typeTest(testedBinder: Symbol, expectedTp: Type): Result = testedBinder eq binder
+ def outerTest(testedBinder: Symbol, expectedTp: Type): Result = false
+ def nonNullTest(testedBinder: Symbol): Result = testedBinder eq binder
+ def equalsTest(pat: Tree, testedBinder: Symbol): Result = false // could in principle analyse pat and see if it's statically known to be non-null
+ def eqTest(pat: Tree, testedBinder: Symbol): Result = false // could in principle analyse pat and see if it's statically known to be non-null
+ def and(a: Result, b: Result): Result = a || b
+ def tru = false
+ }
+ }
+
+ /** implements the run-time aspects of (§8.2) (typedPattern has already done the necessary type transformations)
+ *
+ * Type patterns consist of types, type variables, and wildcards. A type pattern T is of one of the following forms:
+ - A reference to a class C, p.C, or T#C.
+ This type pattern matches any non-null instance of the given class.
+ Note that the prefix of the class, if it is given, is relevant for determining class instances.
+ For instance, the pattern p.C matches only instances of classes C which were created with the path p as prefix.
+ The bottom types scala.Nothing and scala.Null cannot be used as type patterns, because they would match nothing in any case.
+
+ - A singleton type p.type.
+ This type pattern matches only the value denoted by the path p
+ (that is, a pattern match involved a comparison of the matched value with p using method eq in class AnyRef). // TODO: the actual pattern matcher uses ==, so that's what I'm using for now
+ // https://issues.scala-lang.org/browse/SI-4577 "pattern matcher, still disappointing us at equality time"
+
+ - A compound type pattern T1 with ... with Tn where each Ti is a type pat- tern.
+ This type pattern matches all values that are matched by each of the type patterns Ti.
+
+ - A parameterized type pattern T[a1,...,an], where the ai are type variable patterns or wildcards _.
+ This type pattern matches all values which match T for some arbitrary instantiation of the type variables and wildcards.
+ The bounds or alias type of these type variable are determined as described in (§8.3).
+
+ - A parameterized type pattern scala.Array[T1], where T1 is a type pattern. // TODO
+ This type pattern matches any non-null instance of type scala.Array[U1], where U1 is a type matched by T1.
+ **/
+ case class TypeTestTreeMaker(prevBinder: Symbol, testedBinder: Symbol, expectedTp: Type, nextBinderTp: Type)(override val pos: Position, extractorArgTypeTest: Boolean = false) extends CondTreeMaker {
+ import TypeTestTreeMaker._
+ debug.patmat("TTTM"+(prevBinder, extractorArgTypeTest, testedBinder, expectedTp, nextBinderTp))
+
+ lazy val outerTestNeeded = (
+ !((expectedTp.prefix eq NoPrefix) || expectedTp.prefix.typeSymbol.isPackageClass)
+ && needsOuterTest(expectedTp, testedBinder.info, matchOwner))
+
+ // the logic to generate the run-time test that follows from the fact that
+ // a `prevBinder` is expected to have type `expectedTp`
+ // the actual tree-generation logic is factored out, since the analyses generate Cond(ition)s rather than Trees
+ // TODO: `null match { x : T }` will yield a check that (indirectly) tests whether `null ne null`
+ // don't bother (so that we don't end up with the warning "comparing values of types Null and Null using `ne' will always yield false")
+ def renderCondition(cs: TypeTestCondStrategy): cs.Result = {
+ import cs._
+
+ def default =
+ // do type test first to ensure we won't select outer on null
+ if (outerTestNeeded) and(typeTest(testedBinder, expectedTp), outerTest(testedBinder, expectedTp))
+ else typeTest(testedBinder, expectedTp)
+
+ // propagate expected type
+ def expTp(t: Tree): t.type = t setType expectedTp
+
+ // true when called to type-test the argument to an extractor
+ // don't do any fancy equality checking, just test the type
+ if (extractorArgTypeTest) default
+ else expectedTp match {
+ // TODO: [SPEC] the spec requires `eq` instead of `==` for singleton types
+ // this implies sym.isStable
+ case SingleType(_, sym) => and(equalsTest(gen.mkAttributedQualifier(expectedTp), testedBinder), typeTest(testedBinder, expectedTp.widen))
+ // must use == to support e.g. List() == Nil
+ case ThisType(sym) if sym.isModule => and(equalsTest(CODE.REF(sym), testedBinder), typeTest(testedBinder, expectedTp.widen))
+ case ConstantType(Constant(null)) if testedBinder.info.widen <:< AnyRefClass.tpe
+ => eqTest(expTp(CODE.NULL), testedBinder)
+ case ConstantType(const) => equalsTest(expTp(Literal(const)), testedBinder)
+ case ThisType(sym) => eqTest(expTp(This(sym)), testedBinder)
+
+ // TODO: verify that we don't need to special-case Array
+ // I think it's okay:
+ // - the isInstanceOf test includes a test for the element type
+ // - Scala's arrays are invariant (so we don't drop type tests unsoundly)
+ case _ if testedBinder.info.widen <:< expectedTp =>
+ // if the expected type is a primitive value type, it cannot be null and it cannot have an outer pointer
+ // since the types conform, no further checking is required
+ if (expectedTp.typeSymbol.isPrimitiveValueClass) tru
+ // have to test outer and non-null only when it's a reference type
+ else if (expectedTp <:< AnyRefClass.tpe) {
+ // do non-null check first to ensure we won't select outer on null
+ if (outerTestNeeded) and(nonNullTest(testedBinder), outerTest(testedBinder, expectedTp))
+ else nonNullTest(testedBinder)
+ } else default
+
+ case _ => default
+ }
+ }
+
+ val cond = renderCondition(treeCondStrategy)
+ val res = codegen._asInstanceOf(testedBinder, nextBinderTp)
+
+ // is this purely a type test, e.g. no outer check, no equality tests (used in switch emission)
+ def isPureTypeTest = renderCondition(pureTypeTestChecker)
+
+ def impliesBinderNonNull(binder: Symbol) = renderCondition(nonNullImpliedByTestChecker(binder))
+
+ override def toString = "TT"+(expectedTp, testedBinder.name, nextBinderTp)
+ }
+
+ // need to substitute to deal with existential types -- TODO: deal with existentials better, don't substitute (see RichClass during quick.comp)
+ case class EqualityTestTreeMaker(prevBinder: Symbol, patTree: Tree, override val pos: Position) extends CondTreeMaker {
+ val nextBinderTp = prevBinder.info.widen
+
+ // NOTE: generate `patTree == patBinder`, since the extractor must be in control of the equals method (also, patBinder may be null)
+ // equals need not be well-behaved, so don't intersect with pattern's (stabilized) type (unlike MaybeBoundTyped's accumType, where it's required)
+ val cond = codegen._equals(patTree, prevBinder)
+ val res = CODE.REF(prevBinder)
+ override def toString = "ET"+(prevBinder.name, patTree)
+ }
+
+ case class AlternativesTreeMaker(prevBinder: Symbol, var altss: List[List[TreeMaker]], pos: Position) extends TreeMaker with NoNewBinders {
+ // don't substitute prevBinder to nextBinder, a set of alternatives does not need to introduce a new binder, simply reuse the previous one
+
+ override private[TreeMakers] def incorporateOuterSubstitution(outerSubst: Substitution): Unit = {
+ super.incorporateOuterSubstitution(outerSubst)
+ altss = altss map (alts => propagateSubstitution(alts, substitution))
+ }
+
+ def chainBefore(next: Tree)(codegenAlt: Casegen): Tree = { import CODE._
+ atPos(pos){
+ // one alternative may still generate multiple trees (e.g., an extractor call + equality test)
+ // (for now,) alternatives may not bind variables (except wildcards), so we don't care about the final substitution built internally by makeTreeMakers
+ val combinedAlts = altss map (altTreeMakers =>
+ ((casegen: Casegen) => combineExtractors(altTreeMakers :+ TrivialTreeMaker(casegen.one(mkTRUE)))(casegen))
+ )
+
+ val findAltMatcher = codegenAlt.matcher(EmptyTree, NoSymbol, BooleanClass.tpe)(combinedAlts, Some(x => mkFALSE))
+ codegenAlt.ifThenElseZero(findAltMatcher, substitution(next))
+ }
+ }
+ }
+
+ case class GuardTreeMaker(guardTree: Tree) extends TreeMaker with NoNewBinders {
+ val pos = guardTree.pos
+
+ def chainBefore(next: Tree)(casegen: Casegen): Tree = casegen.flatMapGuard(substitution(guardTree), next)
+ override def toString = "G("+ guardTree +")"
+ }
+
+ // combineExtractors changes the current substitution's of the tree makers in `treeMakers`
+ // requires propagateSubstitution(treeMakers) has been called
+ def combineExtractors(treeMakers: List[TreeMaker])(casegen: Casegen): Tree =
+ treeMakers.foldRight(EmptyTree: Tree)((a, b) => a.chainBefore(b)(casegen))
+
+
+ def removeSubstOnly(makers: List[TreeMaker]) = makers filterNot (_.isInstanceOf[SubstOnlyTreeMaker])
+
+ // a foldLeft to accumulate the localSubstitution left-to-right
+ // it drops SubstOnly tree makers, since their only goal in life is to propagate substitutions to the next tree maker, which is fullfilled by propagateSubstitution
+ def propagateSubstitution(treeMakers: List[TreeMaker], initial: Substitution): List[TreeMaker] = {
+ var accumSubst: Substitution = initial
+ treeMakers foreach { maker =>
+ maker incorporateOuterSubstitution accumSubst
+ accumSubst = maker.substitution
+ }
+ removeSubstOnly(treeMakers)
+ }
+
+ // calls propagateSubstitution on the treemakers
+ def combineCases(scrut: Tree, scrutSym: Symbol, casesRaw: List[List[TreeMaker]], pt: Type, owner: Symbol, matchFailGenOverride: Option[Tree => Tree]): Tree = {
+ // drops SubstOnlyTreeMakers, since their effect is now contained in the TreeMakers that follow them
+ val casesNoSubstOnly = casesRaw map (propagateSubstitution(_, EmptySubstitution))
+ combineCasesNoSubstOnly(scrut, scrutSym, casesNoSubstOnly, pt, owner, matchFailGenOverride)
+ }
+
+ // pt is the fully defined type of the cases (either pt or the lub of the types of the cases)
+ def combineCasesNoSubstOnly(scrut: Tree, scrutSym: Symbol, casesNoSubstOnly: List[List[TreeMaker]], pt: Type, owner: Symbol, matchFailGenOverride: Option[Tree => Tree]): Tree =
+ fixerUpper(owner, scrut.pos){
+ def matchFailGen = (matchFailGenOverride orElse Some(CODE.MATCHERROR(_: Tree)))
+ debug.patmat("combining cases: "+ (casesNoSubstOnly.map(_.mkString(" >> ")).mkString("{", "\n", "}")))
+
+ val (suppression, requireSwitch): (Suppression, Boolean) =
+ if (settings.XnoPatmatAnalysis.value) (Suppression.NoSuppression, false)
+ else scrut match {
+ case Typed(tree, tpt) =>
+ val suppressExhaustive = tpt.tpe hasAnnotation UncheckedClass
+ val supressUnreachable = tree match {
+ case Ident(name) if name startsWith nme.CHECK_IF_REFUTABLE_STRING => true // SI-7183 don't warn for withFilter's that turn out to be irrefutable.
+ case _ => false
+ }
+ val suppression = Suppression(suppressExhaustive, supressUnreachable)
+ // matches with two or fewer cases need not apply for switchiness (if-then-else will do)
+ val requireSwitch = treeInfo.isSwitchAnnotation(tpt.tpe) && casesNoSubstOnly.lengthCompare(2) > 0
+ (suppression, requireSwitch)
+ case _ =>
+ (Suppression.NoSuppression, false)
+ }
+
+ emitSwitch(scrut, scrutSym, casesNoSubstOnly, pt, matchFailGenOverride, suppression.exhaustive).getOrElse{
+ if (requireSwitch) typer.context.unit.warning(scrut.pos, "could not emit switch for @switch annotated match")
+
+ if (casesNoSubstOnly nonEmpty) {
+ // before optimizing, check casesNoSubstOnly for presence of a default case,
+ // since DCE will eliminate trivial cases like `case _ =>`, even if they're the last one
+ // exhaustivity and reachability must be checked before optimization as well
+ // TODO: improve notion of trivial/irrefutable -- a trivial type test before the body still makes for a default case
+ // ("trivial" depends on whether we're emitting a straight match or an exception, or more generally, any supertype of scrutSym.tpe is a no-op)
+ // irrefutability checking should use the approximation framework also used for CSE, unreachability and exhaustivity checking
+ val synthCatchAll =
+ if (casesNoSubstOnly.nonEmpty && {
+ val nonTrivLast = casesNoSubstOnly.last
+ nonTrivLast.nonEmpty && nonTrivLast.head.isInstanceOf[BodyTreeMaker]
+ }) None
+ else matchFailGen
+
+ analyzeCases(scrutSym, casesNoSubstOnly, pt, suppression)
+
+ val (cases, toHoist) = optimizeCases(scrutSym, casesNoSubstOnly, pt)
+
+ val matchRes = codegen.matcher(scrut, scrutSym, pt)(cases map combineExtractors, synthCatchAll)
+
+ if (toHoist isEmpty) matchRes else Block(toHoist, matchRes)
+ } else {
+ codegen.matcher(scrut, scrutSym, pt)(Nil, matchFailGen)
+ }
+ }
+ }
+
+ // TODO: do this during tree construction, but that will require tracking the current owner in treemakers
+ // TODO: assign more fine-grained positions
+ // fixes symbol nesting, assigns positions
+ protected def fixerUpper(origOwner: Symbol, pos: Position) = new Traverser {
+ currentOwner = origOwner
+
+ override def traverse(t: Tree) {
+ if (t != EmptyTree && t.pos == NoPosition) {
+ t.setPos(pos)
+ }
+ t match {
+ case Function(_, _) if t.symbol == NoSymbol =>
+ t.symbol = currentOwner.newAnonymousFunctionValue(t.pos)
+ debug.patmat("new symbol for "+ (t, t.symbol.ownerChain))
+ case Function(_, _) if (t.symbol.owner == NoSymbol) || (t.symbol.owner == origOwner) =>
+ debug.patmat("fundef: "+ (t, t.symbol.ownerChain, currentOwner.ownerChain))
+ t.symbol.owner = currentOwner
+ case d : DefTree if (d.symbol != NoSymbol) && ((d.symbol.owner == NoSymbol) || (d.symbol.owner == origOwner)) => // don't indiscriminately change existing owners! (see e.g., pos/t3440, pos/t3534, pos/unapplyContexts2)
+ debug.patmat("def: "+ (d, d.symbol.ownerChain, currentOwner.ownerChain))
+ if(d.symbol.moduleClass ne NoSymbol)
+ d.symbol.moduleClass.owner = currentOwner
+
+ d.symbol.owner = currentOwner
+ // case _ if (t.symbol != NoSymbol) && (t.symbol ne null) =>
+ debug.patmat("untouched "+ (t, t.getClass, t.symbol.ownerChain, currentOwner.ownerChain))
+ case _ =>
+ }
+ super.traverse(t)
+ }
+
+ // override def apply
+ // debug.patmat("before fixerupper: "+ xTree)
+ // currentRun.trackerFactory.snapshot()
+ // debug.patmat("after fixerupper")
+ // currentRun.trackerFactory.snapshot()
+ }
+ }
+}
\ No newline at end of file
diff --git a/src/compiler/scala/tools/nsc/transform/patmat/PatternMatching.scala b/src/compiler/scala/tools/nsc/transform/patmat/PatternMatching.scala
new file mode 100644
index 0000000..df4e699
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/transform/patmat/PatternMatching.scala
@@ -0,0 +1,256 @@
+/* NSC -- new Scala compiler
+ *
+ * Copyright 2011-2013 LAMP/EPFL
+ * @author Adriaan Moors
+ */
+
+package scala.tools.nsc.transform.patmat
+
+import scala.tools.nsc.Global
+import scala.tools.nsc.ast
+import scala.language.postfixOps
+import scala.tools.nsc.transform.TypingTransformers
+import scala.tools.nsc.transform.Transform
+import scala.reflect.internal.util.Statistics
+import scala.reflect.internal.Types
+import scala.reflect.internal.util.Position
+
+/** Translate pattern matching.
+ *
+ * Either into optimized if/then/else's, or virtualized as method calls (these methods form a zero-plus monad),
+ * similar in spirit to how for-comprehensions are compiled.
+ *
+ * For each case, express all patterns as extractor calls, guards as 0-ary extractors, and sequence them using `flatMap`
+ * (lifting the body of the case into the monad using `one`).
+ *
+ * Cases are combined into a pattern match using the `orElse` combinator (the implicit failure case is expressed using the monad's `zero`).
+ *
+ * TODO:
+ * - DCE (on irrefutable patterns)
+ * - update spec and double check it's implemented correctly (see TODO's)
+ *
+ * (longer-term) TODO:
+ * - user-defined unapplyProd
+ * - recover GADT typing by locally inserting implicit witnesses to type equalities derived from the current case, and considering these witnesses during subtyping (?)
+ * - recover exhaustivity/unreachability of user-defined extractors by partitioning the types they match on using an HList or similar type-level structure
+ */
+trait PatternMatching extends Transform with TypingTransformers
+ with Debugging
+ with Interface
+ with MatchTranslation
+ with MatchTreeMaking
+ with MatchCodeGen
+ with ScalaLogic
+ with Solving
+ with MatchAnalysis
+ with MatchOptimization {
+ import global._
+
+ val phaseName: String = "patmat"
+
+ def newTransformer(unit: CompilationUnit): Transformer =
+ if (opt.virtPatmat) new MatchTransformer(unit)
+ else noopTransformer
+
+ class MatchTransformer(unit: CompilationUnit) extends TypingTransformer(unit) {
+ override def transform(tree: Tree): Tree = tree match {
+ case Match(sel, cases) =>
+ val origTp = tree.tpe
+ // setType origTp intended for CPS -- TODO: is it necessary?
+ val translated = translator.translateMatch(treeCopy.Match(tree, transform(sel), transformTrees(cases).asInstanceOf[List[CaseDef]]))
+ try {
+ localTyper.typed(translated) setType origTp
+ } catch {
+ case x: (Types#TypeError) =>
+ // TODO: this should never happen; error should've been reported during type checking
+ unit.error(tree.pos, "error during expansion of this match (this is a scalac bug).\nThe underlying error was: "+ x.msg)
+ translated
+ }
+ case Try(block, catches, finalizer) =>
+ treeCopy.Try(tree, transform(block), translator.translateTry(transformTrees(catches).asInstanceOf[List[CaseDef]], tree.tpe, tree.pos), transform(finalizer))
+ case _ => super.transform(tree)
+ }
+
+ // TODO: only instantiate new match translator when localTyper has changed
+ // override def atOwner[A](tree: Tree, owner: Symbol)(trans: => A): A
+ // as this is the only time TypingTransformer changes it
+ def translator: MatchTranslator with CodegenCore = {
+ new OptimizingMatchTranslator(localTyper)
+ }
+ }
+
+ class PureMatchTranslator(val typer: analyzer.Typer, val matchStrategy: Tree) extends MatchTranslator with PureCodegen {
+ def optimizeCases(prevBinder: Symbol, cases: List[List[TreeMaker]], pt: Type) = (cases, Nil)
+ def analyzeCases(prevBinder: Symbol, cases: List[List[TreeMaker]], pt: Type, suppression: Suppression): Unit = {}
+ }
+
+ class OptimizingMatchTranslator(val typer: analyzer.Typer) extends MatchTranslator
+ with MatchOptimizer
+ with MatchAnalyzer
+ with Solver
+}
+
+trait Debugging {
+ val global: Global
+
+ // TODO: the inliner fails to inline the closures to debug.patmat unless the method is nested in an object
+ object debug {
+ val printPatmat = global.settings.Ypatmatdebug.value
+ @inline final def patmat(s: => String) = if (printPatmat) println(s)
+ }
+}
+
+trait Interface extends ast.TreeDSL {
+ import global.{newTermName, analyzer, Type, ErrorType, Symbol, Tree}
+ import analyzer.Typer
+
+ // 2.10/2.11 compatibility
+ protected final def dealiasWiden(tp: Type) = tp.dealias // 2.11: dealiasWiden
+ protected final def mkTRUE = CODE.TRUE_typed // 2.11: CODE.TRUE
+ protected final def mkFALSE = CODE.FALSE_typed // 2.11: CODE.FALSE
+ protected final def hasStableSymbol(p: Tree) = p.hasSymbol && p.symbol.isStable // 2.11: p.hasSymbolField && p.symbol.isStable
+ protected final def devWarning(str: String) = global.debugwarn(str) // 2.11: omit
+
+ object vpmName {
+ val one = newTermName("one")
+ val drop = newTermName("drop")
+ val flatMap = newTermName("flatMap")
+ val get = newTermName("get")
+ val guard = newTermName("guard")
+ val isEmpty = newTermName("isEmpty")
+ val orElse = newTermName("orElse")
+ val outer = newTermName("<outer>")
+ val runOrElse = newTermName("runOrElse")
+ val zero = newTermName("zero")
+ val _match = newTermName("__match") // don't call the val __match, since that will trigger virtual pattern matching...
+
+ def counted(str: String, i: Int) = newTermName(str + i)
+ }
+
+///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
+// talking to userland
+///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
+
+ /** Interface with user-defined match monad?
+ * if there's a `__match` in scope, we use this as the match strategy, assuming it conforms to MatchStrategy as defined below:
+
+ type Matcher[P[_], M[+_], A] = {
+ def flatMap[B](f: P[A] => M[B]): M[B]
+ def orElse[B >: A](alternative: => M[B]): M[B]
+ }
+
+ abstract class MatchStrategy[P[_], M[+_]] {
+ // runs the matcher on the given input
+ def runOrElse[T, U](in: P[T])(matcher: P[T] => M[U]): P[U]
+
+ def zero: M[Nothing]
+ def one[T](x: P[T]): M[T]
+ def guard[T](cond: P[Boolean], then: => P[T]): M[T]
+ }
+
+ * P and M are derived from one's signature (`def one[T](x: P[T]): M[T]`)
+
+
+ * if no `__match` is found, we assume the following implementation (and generate optimized code accordingly)
+
+ object __match extends MatchStrategy[({type Id[x] = x})#Id, Option] {
+ def zero = None
+ def one[T](x: T) = Some(x)
+ // NOTE: guard's return type must be of the shape M[T], where M is the monad in which the pattern match should be interpreted
+ def guard[T](cond: Boolean, then: => T): Option[T] = if(cond) Some(then) else None
+ def runOrElse[T, U](x: T)(f: T => Option[U]): U = f(x) getOrElse (throw new MatchError(x))
+ }
+
+ */
+ trait MatchMonadInterface {
+ val typer: Typer
+ val matchOwner = typer.context.owner
+
+ def reportUnreachable(pos: Position) = typer.context.unit.warning(pos, "unreachable code")
+ def reportMissingCases(pos: Position, counterExamples: List[String]) = {
+ val ceString =
+ if (counterExamples.tail.isEmpty) "input: " + counterExamples.head
+ else "inputs: " + counterExamples.mkString(", ")
+
+ typer.context.unit.warning(pos, "match may not be exhaustive.\nIt would fail on the following "+ ceString)
+ }
+
+ def inMatchMonad(tp: Type): Type
+ def pureType(tp: Type): Type
+ final def matchMonadResult(tp: Type): Type =
+ tp.baseType(matchMonadSym).typeArgs match {
+ case arg :: Nil => arg
+ case _ => ErrorType
+ }
+
+ protected def matchMonadSym: Symbol
+ }
+
+
+///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
+// substitution
+///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
+ trait TypedSubstitution extends MatchMonadInterface {
+ object Substitution {
+ def apply(from: Symbol, to: Tree) = new Substitution(List(from), List(to))
+ // requires sameLength(from, to)
+ def apply(from: List[Symbol], to: List[Tree]) =
+ if (from nonEmpty) new Substitution(from, to) else EmptySubstitution
+ }
+
+ class Substitution(val from: List[Symbol], val to: List[Tree]) {
+ import global.{Transformer, Ident, NoType}
+
+ // We must explicitly type the trees that we replace inside some other tree, since the latter may already have been typed,
+ // and will thus not be retyped. This means we might end up with untyped subtrees inside bigger, typed trees.
+ def apply(tree: Tree): Tree = {
+ // according to -Ystatistics 10% of translateMatch's time is spent in this method...
+ // since about half of the typedSubst's end up being no-ops, the check below shaves off 5% of the time spent in typedSubst
+ if (!tree.exists { case i at Ident(_) => from contains i.symbol case _ => false}) tree
+ else (new Transformer {
+ private def typedIfOrigTyped(to: Tree, origTp: Type): Tree =
+ if (origTp == null || origTp == NoType) to
+ // important: only type when actually substing and when original tree was typed
+ // (don't need to use origTp as the expected type, though, and can't always do this anyway due to unknown type params stemming from polymorphic extractors)
+ else typer.typed(to)
+
+ override def transform(tree: Tree): Tree = {
+ def subst(from: List[Symbol], to: List[Tree]): Tree =
+ if (from.isEmpty) tree
+ else if (tree.symbol == from.head) typedIfOrigTyped(to.head.shallowDuplicate.setPos(tree.pos), tree.tpe)
+ else subst(from.tail, to.tail)
+
+ tree match {
+ case Ident(_) => subst(from, to)
+ case _ => super.transform(tree)
+ }
+ }
+ }).transform(tree)
+ }
+
+
+ // the substitution that chains `other` before `this` substitution
+ // forall t: Tree. this(other(t)) == (this >> other)(t)
+ def >>(other: Substitution): Substitution = {
+ val (fromFiltered, toFiltered) = (from, to).zipped filter { (f, t) => !other.from.contains(f) }
+ new Substitution(other.from ++ fromFiltered, other.to.map(apply) ++ toFiltered) // a quick benchmarking run indicates the `.map(apply)` is not too costly
+ }
+ override def toString = (from.map(_.name) zip to) mkString("Substitution(", ", ", ")")
+ }
+
+ object EmptySubstitution extends Substitution(Nil, Nil) {
+ override def apply(tree: Tree): Tree = tree
+ override def >>(other: Substitution): Substitution = other
+ }
+ }
+}
+
+object PatternMatchingStats {
+ val patmatNanos = Statistics.newTimer ("time spent in patmat", "patmat")
+ val patmatAnaDPLL = Statistics.newSubTimer (" of which DPLL", patmatNanos)
+ val patmatCNF = Statistics.newSubTimer (" of which in CNF conversion", patmatNanos)
+ val patmatCNFSizes = Statistics.newQuantMap[Int, Statistics.Counter](" CNF size counts", "patmat")(Statistics.newCounter(""))
+ val patmatAnaVarEq = Statistics.newSubTimer (" of which variable equality", patmatNanos)
+ val patmatAnaExhaust = Statistics.newSubTimer (" of which in exhaustivity", patmatNanos)
+ val patmatAnaReach = Statistics.newSubTimer (" of which in unreachability", patmatNanos)
+}
diff --git a/src/compiler/scala/tools/nsc/transform/patmat/Solving.scala b/src/compiler/scala/tools/nsc/transform/patmat/Solving.scala
new file mode 100644
index 0000000..ec66bf6
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/transform/patmat/Solving.scala
@@ -0,0 +1,243 @@
+/* NSC -- new Scala compiler
+ *
+ * Copyright 2011-2013 LAMP/EPFL
+ * @author Adriaan Moors
+ */
+
+package scala.tools.nsc.transform.patmat
+
+import scala.collection.mutable
+import scala.reflect.internal.util.Statistics
+
+// naive CNF translation and simple DPLL solver
+trait Solving extends Logic {
+ import PatternMatchingStats._
+ trait CNF extends PropositionalLogic {
+
+ /** Override Array creation for efficiency (to not go through reflection). */
+ private implicit val clauseTag: scala.reflect.ClassTag[Clause] = new scala.reflect.ClassTag[Clause] {
+ def runtimeClass: java.lang.Class[Clause] = classOf[Clause]
+ final override def newArray(len: Int): Array[Clause] = new Array[Clause](len)
+ }
+
+ import scala.collection.mutable.ArrayBuffer
+ type FormulaBuilder = ArrayBuffer[Clause]
+ def formulaBuilder = ArrayBuffer[Clause]()
+ def formulaBuilderSized(init: Int) = new ArrayBuffer[Clause](init)
+ def addFormula(buff: FormulaBuilder, f: Formula): Unit = buff ++= f
+ def toFormula(buff: FormulaBuilder): Formula = buff
+
+ // CNF: a formula is a conjunction of clauses
+ type Formula = FormulaBuilder
+ def formula(c: Clause*): Formula = ArrayBuffer(c: _*)
+
+ type Clause = Set[Lit]
+ // a clause is a disjunction of distinct literals
+ def clause(l: Lit*): Clause = l.toSet
+
+ type Lit
+ def Lit(sym: Sym, pos: Boolean = true): Lit
+
+ def andFormula(a: Formula, b: Formula): Formula = a ++ b
+ def simplifyFormula(a: Formula): Formula = a.distinct
+
+ private def merge(a: Clause, b: Clause) = a ++ b
+
+ // throws an AnalysisBudget.Exception when the prop results in a CNF that's too big
+ // TODO: be smarter/more efficient about this (http://lara.epfl.ch/w/sav09:tseitin_s_encoding)
+ def eqFreePropToSolvable(p: Prop): Formula = {
+ def negationNormalFormNot(p: Prop, budget: Int): Prop =
+ if (budget <= 0) throw AnalysisBudget.exceeded
+ else p match {
+ case And(a, b) => Or(negationNormalFormNot(a, budget - 1), negationNormalFormNot(b, budget - 1))
+ case Or(a, b) => And(negationNormalFormNot(a, budget - 1), negationNormalFormNot(b, budget - 1))
+ case Not(p) => negationNormalForm(p, budget - 1)
+ case True => False
+ case False => True
+ case s: Sym => Not(s)
+ }
+
+ def negationNormalForm(p: Prop, budget: Int = AnalysisBudget.max): Prop =
+ if (budget <= 0) throw AnalysisBudget.exceeded
+ else p match {
+ case And(a, b) => And(negationNormalForm(a, budget - 1), negationNormalForm(b, budget - 1))
+ case Or(a, b) => Or(negationNormalForm(a, budget - 1), negationNormalForm(b, budget - 1))
+ case Not(negated) => negationNormalFormNot(negated, budget - 1)
+ case True
+ | False
+ | (_ : Sym) => p
+ }
+
+ val TrueF = formula()
+ val FalseF = formula(clause())
+ def lit(s: Sym) = formula(clause(Lit(s)))
+ def negLit(s: Sym) = formula(clause(Lit(s, false)))
+
+ def conjunctiveNormalForm(p: Prop, budget: Int = AnalysisBudget.max): Formula = {
+ def distribute(a: Formula, b: Formula, budget: Int): Formula =
+ if (budget <= 0) throw AnalysisBudget.exceeded
+ else
+ (a, b) match {
+ // true \/ _ = true
+ // _ \/ true = true
+ case (trueA, trueB) if trueA.size == 0 || trueB.size == 0 => TrueF
+ // lit \/ lit
+ case (a, b) if a.size == 1 && b.size == 1 => formula(merge(a(0), b(0)))
+ // (c1 /\ ... /\ cn) \/ d = ((c1 \/ d) /\ ... /\ (cn \/ d))
+ // d \/ (c1 /\ ... /\ cn) = ((d \/ c1) /\ ... /\ (d \/ cn))
+ case (cs, ds) =>
+ val (big, small) = if (cs.size > ds.size) (cs, ds) else (ds, cs)
+ big flatMap (c => distribute(formula(c), small, budget - (big.size*small.size)))
+ }
+
+ if (budget <= 0) throw AnalysisBudget.exceeded
+
+ p match {
+ case True => TrueF
+ case False => FalseF
+ case s: Sym => lit(s)
+ case Not(s: Sym) => negLit(s)
+ case And(a, b) =>
+ val cnfA = conjunctiveNormalForm(a, budget - 1)
+ val cnfB = conjunctiveNormalForm(b, budget - cnfA.size)
+ cnfA ++ cnfB
+ case Or(a, b) =>
+ val cnfA = conjunctiveNormalForm(a)
+ val cnfB = conjunctiveNormalForm(b)
+ distribute(cnfA, cnfB, budget - (cnfA.size + cnfB.size))
+ }
+ }
+
+ val start = if (Statistics.canEnable) Statistics.startTimer(patmatCNF) else null
+ val res = conjunctiveNormalForm(negationNormalForm(p))
+
+ if (Statistics.canEnable) Statistics.stopTimer(patmatCNF, start)
+
+ //
+ if (Statistics.canEnable) patmatCNFSizes(res.size).value += 1
+
+// debug.patmat("cnf for\n"+ p +"\nis:\n"+cnfString(res))
+ res
+ }
+ }
+
+ // simple solver using DPLL
+ trait Solver extends CNF {
+ // a literal is a (possibly negated) variable
+ def Lit(sym: Sym, pos: Boolean = true) = new Lit(sym, pos)
+ class Lit(val sym: Sym, val pos: Boolean) {
+ override def toString = if (!pos) "-"+ sym.toString else sym.toString
+ override def equals(o: Any) = o match {
+ case o: Lit => (o.sym eq sym) && (o.pos == pos)
+ case _ => false
+ }
+ override def hashCode = sym.hashCode + pos.hashCode
+
+ def unary_- = Lit(sym, !pos)
+ }
+
+ def cnfString(f: Formula) = alignAcrossRows(f map (_.toList) toList, "\\/", " /\\\n")
+
+ // adapted from http://lara.epfl.ch/w/sav10:simple_sat_solver (original by Hossein Hojjat)
+ val EmptyModel = Map.empty[Sym, Boolean]
+ val NoModel: Model = null
+
+ // returns all solutions, if any (TODO: better infinite recursion backstop -- detect fixpoint??)
+ def findAllModelsFor(f: Formula): List[Model] = {
+ val vars: Set[Sym] = f.flatMap(_ collect {case l: Lit => l.sym}).toSet
+ // debug.patmat("vars "+ vars)
+ // the negation of a model -(S1=True/False /\ ... /\ SN=True/False) = clause(S1=False/True, ...., SN=False/True)
+ def negateModel(m: Model) = clause(m.toSeq.map{ case (sym, pos) => Lit(sym, !pos) } : _*)
+
+ def findAllModels(f: Formula, models: List[Model], recursionDepthAllowed: Int = 10): List[Model]=
+ if (recursionDepthAllowed == 0) models
+ else {
+ debug.patmat("find all models for\n"+ cnfString(f))
+ val model = findModelFor(f)
+ // if we found a solution, conjunct the formula with the model's negation and recurse
+ if (model ne NoModel) {
+ val unassigned = (vars -- model.keySet).toList
+ debug.patmat("unassigned "+ unassigned +" in "+ model)
+ def force(lit: Lit) = {
+ val model = withLit(findModelFor(dropUnit(f, lit)), lit)
+ if (model ne NoModel) List(model)
+ else Nil
+ }
+ val forced = unassigned flatMap { s =>
+ force(Lit(s, true)) ++ force(Lit(s, false))
+ }
+ debug.patmat("forced "+ forced)
+ val negated = negateModel(model)
+ findAllModels(f :+ negated, model :: (forced ++ models), recursionDepthAllowed - 1)
+ }
+ else models
+ }
+
+ findAllModels(f, Nil)
+ }
+
+ private def withLit(res: Model, l: Lit): Model = if (res eq NoModel) NoModel else res + (l.sym -> l.pos)
+ private def dropUnit(f: Formula, unitLit: Lit): Formula = {
+ val negated = -unitLit
+ // drop entire clauses that are trivially true
+ // (i.e., disjunctions that contain the literal we're making true in the returned model),
+ // and simplify clauses by dropping the negation of the literal we're making true
+ // (since False \/ X == X)
+ val dropped = formulaBuilderSized(f.size)
+ for {
+ clause <- f
+ if !(clause contains unitLit)
+ } dropped += (clause - negated)
+ dropped
+ }
+
+ def findModelFor(f: Formula): Model = {
+ @inline def orElse(a: Model, b: => Model) = if (a ne NoModel) a else b
+
+ debug.patmat("DPLL\n"+ cnfString(f))
+
+ val start = if (Statistics.canEnable) Statistics.startTimer(patmatAnaDPLL) else null
+
+ val satisfiableWithModel: Model =
+ if (f isEmpty) EmptyModel
+ else if(f exists (_.isEmpty)) NoModel
+ else f.find(_.size == 1) match {
+ case Some(unitClause) =>
+ val unitLit = unitClause.head
+ // debug.patmat("unit: "+ unitLit)
+ withLit(findModelFor(dropUnit(f, unitLit)), unitLit)
+ case _ =>
+ // partition symbols according to whether they appear in positive and/or negative literals
+ // SI-7020 Linked- for deterministic counter examples.
+ val pos = new mutable.LinkedHashSet[Sym]()
+ val neg = new mutable.LinkedHashSet[Sym]()
+ f.foreach{_.foreach{ lit =>
+ if (lit.pos) pos += lit.sym else neg += lit.sym
+ }}
+ // appearing in both positive and negative
+ val impures: mutable.LinkedHashSet[Sym] = pos intersect neg
+ // appearing only in either positive/negative positions
+ val pures: mutable.LinkedHashSet[Sym] = (pos ++ neg) -- impures
+
+ if (pures nonEmpty) {
+ val pureSym = pures.head
+ // turn it back into a literal
+ // (since equality on literals is in terms of equality
+ // of the underlying symbol and its positivity, simply construct a new Lit)
+ val pureLit = Lit(pureSym, pos(pureSym))
+ // debug.patmat("pure: "+ pureLit +" pures: "+ pures +" impures: "+ impures)
+ val simplified = f.filterNot(_.contains(pureLit))
+ withLit(findModelFor(simplified), pureLit)
+ } else {
+ val split = f.head.head
+ // debug.patmat("split: "+ split)
+ orElse(findModelFor(f :+ clause(split)), findModelFor(f :+ clause(-split)))
+ }
+ }
+
+ if (Statistics.canEnable) Statistics.stopTimer(patmatAnaDPLL, start)
+
+ satisfiableWithModel
+ }
+ }
+}
diff --git a/src/compiler/scala/tools/nsc/typechecker/Adaptations.scala b/src/compiler/scala/tools/nsc/typechecker/Adaptations.scala
new file mode 100644
index 0000000..62c584e
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/typechecker/Adaptations.scala
@@ -0,0 +1,83 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2013 LAMP/EPFL
+ * @author Paul Phillips
+ */
+
+package scala.tools.nsc
+package typechecker
+
+/** This trait provides logic for assessing the validity of argument
+ * adaptations, such as tupling, unit-insertion, widening, etc. Such
+ * logic is spread around the compiler, without much ability on the
+ * part of the user to tighten the potentially dangerous bits.
+ *
+ * TODO: unifying/consolidating said logic under consistent management.
+ *
+ * @author Paul Phillips
+ */
+trait Adaptations {
+ self: Analyzer =>
+
+ import global._
+ import definitions._
+
+ trait Adaptation {
+ self: Typer =>
+
+ def checkValidAdaptation(t: Tree, args: List[Tree]): Boolean = {
+ def applyArg = t match {
+ case Apply(_, arg :: Nil) => arg
+ case _ => EmptyTree
+ }
+ def callString = (
+ ( if (t.symbol.isConstructor) "new " else "" ) +
+ ( t.symbol.owner.decodedName ) +
+ ( if (t.symbol.isConstructor || t.symbol.name == nme.apply) "" else "." + t.symbol.decodedName )
+ )
+ def sigString = t.symbol.owner.decodedName + (
+ if (t.symbol.isConstructor) t.symbol.signatureString
+ else "." + t.symbol.decodedName + t.symbol.signatureString
+ )
+ def givenString = if (args.isEmpty) "<none>" else args.mkString(", ")
+ def adaptedArgs = if (args.isEmpty) "(): Unit" else args.mkString("(", ", ", "): " + applyArg.tpe)
+
+ def adaptWarning(msg: String) = context.warning(t.pos, msg +
+ "\n signature: " + sigString +
+ "\n given arguments: " + givenString +
+ "\n after adaptation: " + callString + "(" + adaptedArgs + ")"
+ )
+ // A one-argument method accepting Object (which may look like "Any"
+ // at this point if the class is java defined) is a "leaky target" for
+ // which we should be especially reluctant to insert () or auto-tuple.
+ def isLeakyTarget = {
+ val oneArgObject = t.symbol.paramss match {
+ case (param :: Nil) :: Nil => ObjectClass isSubClass param.tpe.typeSymbol
+ case _ => false
+ }
+ // Unfortunately various "universal" methods and the manner in which
+ // they are used limits our ability to enforce anything sensible until
+ // an opt-in compiler option is given.
+ oneArgObject && !(
+ isStringAddition(t.symbol)
+ || isArrowAssoc(t.symbol)
+ || t.symbol.name == nme.equals_
+ || t.symbol.name == nme.EQ
+ || t.symbol.name == nme.NE
+ )
+ }
+
+ if (settings.noAdaptedArgs.value)
+ adaptWarning("No automatic adaptation here: use explicit parentheses.")
+ else if (settings.warnAdaptedArgs.value)
+ adaptWarning(
+ if (args.isEmpty) "Adapting argument list by inserting (): " + (
+ if (isLeakyTarget) "leaky (Object-receiving) target makes this especially dangerous."
+ else "this is unlikely to be what you want."
+ )
+ else "Adapting argument list by creating a " + args.size + "-tuple: this may not be what you want."
+ )
+
+ !settings.noAdaptedArgs.value
+ }
+ }
+}
diff --git a/src/compiler/scala/tools/nsc/typechecker/Analyzer.scala b/src/compiler/scala/tools/nsc/typechecker/Analyzer.scala
index 1635a9a..b504863 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Analyzer.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Analyzer.scala
@@ -1,12 +1,12 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
+ * Copyright 2005-2013 LAMP/EPFL
* @author Martin Odersky
*/
package scala.tools.nsc
package typechecker
-import util.Statistics._
+import scala.reflect.internal.util.Statistics
/** The main attribution phase.
*/
@@ -20,8 +20,12 @@ trait Analyzer extends AnyRef
with EtaExpansion
with SyntheticMethods
with Unapplies
+ with Macros
with NamesDefaults
with TypeDiagnostics
+ with ContextErrors
+ with StdAttachments
+ with AnalyzerPlugins
{
val global : Global
import global._
@@ -55,7 +59,7 @@ trait Analyzer extends AnyRef
override def traverse(tree: Tree): Unit = tree match {
case ModuleDef(_, _, _) =>
if (tree.symbol.name == nme.PACKAGEkw) {
- loaders.openPackageModule(tree.symbol)()
+ openPackageModule(tree.symbol, tree.symbol.owner)
}
case ClassDef(_, _, _, _) => () // make it fast
case _ => super.traverse(tree)
@@ -69,6 +73,7 @@ trait Analyzer extends AnyRef
}
object typerFactory extends SubComponent {
+ import scala.reflect.internal.TypesStats.typerNanos
val global: Analyzer.this.global.type = Analyzer.this.global
val phaseName = "typer"
val runsAfter = List[String]()
@@ -81,13 +86,13 @@ trait Analyzer extends AnyRef
// compiler run). This is good enough for the resident compiler, which was the most affected.
undoLog.clear()
override def run() {
- val start = startTimer(typerNanos)
+ val start = if (Statistics.canEnable) Statistics.startTimer(typerNanos) else null
global.echoPhaseSummary(this)
currentRun.units foreach applyPhase
undoLog.clear()
// need to clear it after as well or 10K+ accumulated entries are
// uncollectable the rest of the way.
- stopTimer(typerNanos, start)
+ if (Statistics.canEnable) Statistics.stopTimer(typerNanos, start)
}
def apply(unit: CompilationUnit) {
try {
diff --git a/src/compiler/scala/tools/nsc/typechecker/AnalyzerPlugins.scala b/src/compiler/scala/tools/nsc/typechecker/AnalyzerPlugins.scala
new file mode 100644
index 0000000..28f620d
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/typechecker/AnalyzerPlugins.scala
@@ -0,0 +1,225 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2013 LAMP/EPFL
+ * @author Martin Odersky
+ */
+
+package scala.tools.nsc
+package typechecker
+
+/**
+ * @author Lukas Rytz
+ * @version 1.0
+ */
+trait AnalyzerPlugins { self: Analyzer =>
+ import global._
+
+
+ trait AnalyzerPlugin {
+ /**
+ * Selectively activate this analyzer plugin, e.g. according to the compiler phase.
+ *
+ * Note that the current phase can differ from the global compiler phase (look for `enteringPhase`
+ * invocations in the compiler). For instance, lazy types created by the UnPickler are completed
+ * at the phase in which their symbol is created. Observations show that this can even be the
+ * parser phase. Since symbol completion can trigger subtyping, typing etc, your plugin might
+ * need to be active also in phases other than namer and typer.
+ *
+ * Typically, this method can be implemented as
+ *
+ * global.phase.id < global.currentRun.picklerPhase.id
+ */
+ def isActive(): Boolean = true
+
+ /**
+ * Let analyzer plugins change the expected type before type checking a tree.
+ */
+ def pluginsPt(pt: Type, typer: Typer, tree: Tree, mode: Int): Type = pt
+
+ /**
+ * Let analyzer plugins modify the type that has been computed for a tree.
+ *
+ * @param tpe The type inferred by the type checker, initially (for first plugin) `tree.tpe`
+ * @param typer The yper that type checked `tree`
+ * @param tree The type-checked tree
+ * @param mode Mode that was used for typing `tree`
+ * @param pt Expected type that was used for typing `tree`
+ */
+ def pluginsTyped(tpe: Type, typer: Typer, tree: Tree, mode: Int, pt: Type): Type = tpe
+
+ /**
+ * Let analyzer plugins change the types assigned to definitions. For definitions that have
+ * an annotated type, the assigned type is obtained by typing that type tree. Otherwise, the
+ * type is inferred by typing the definition's righthand side.
+ *
+ * In order to know if the type was inferred, you can query the `wasEmpty` field in the `tpt`
+ * TypeTree of the definition (for DefDef and ValDef).
+ *
+ * (*) If the type of a method or value is inferred, the type-checked tree is stored in the
+ * `analyzer.transformed` hash map, indexed by the definition's rhs tree.
+ *
+ * NOTE: Invoking the type checker can lead to cyclic reference errors. For instance, if this
+ * method is called from the type completer of a recursive method, type checking the mehtod
+ * rhs will invoke the same completer again. It might be possible to avoid this situation by
+ * assigning `tpe` to `defTree.symbol` (untested) - the final type computed by this method
+ * will then be assigned to the definition's symbol by monoTypeCompleter (in Namers).
+ *
+ * The hooks into `typeSig` allow analyzer plugins to add annotations to (or change the types
+ * of) definition symbols. This cannot not be achieved by using `pluginsTyped`: this method
+ * is only called during type checking, so changing the type of a symbol at this point is too
+ * late: references to the symbol might already be typed and therefore obtain the the original
+ * type assigned during naming.
+ *
+ * @param defTree is the definition for which the type was computed. The different cases are
+ * outlined below. Note that this type is untyped (for methods and values with inferred type,
+ * the typed rhs trees are available in analyzer.transformed).
+ *
+ * Case defTree: Template
+ * - tpe : A ClassInfoType for the template
+ * - typer: The typer for template members, i.e. expressions and definitions of defTree.body
+ * - pt : WildcardType
+ * - the class symbol is accessible through typer.context.owner
+ *
+ * Case defTree: ClassDef
+ * - tpe : A ClassInfoType, or a PolyType(params, ClassInfoType) for polymorphic classes.
+ * The class type is the one computed by templateSig, i.e. through the above case
+ * - typer: The typer for the class. Note that this typer has a different context than the
+ * typer for the template.
+ * - pt : WildcardType
+ *
+ * Case defTree: ModuleDef
+ * - tpe : A ClassInfoType computed by templateSig
+ * - typer: The typer for the module. context.owner of this typer is the module class symbol
+ * - pt : WildcardType
+ *
+ * Case defTree: DefDef
+ * - tpe : The type of the method (MethodType, PolyType or NullaryMethodType). (*)
+ * - typer: The typer the rhs of this method
+ * - pt : If tpt.isEmpty, either the result type from the overridden method, or WildcardType.
+ * Otherwise the type obtained from typing tpt.
+ * - Note that for constructors, pt is the class type which the constructor creates. To type
+ * check the rhs of the constructor however, the expected type has to be WildcardType (see
+ * Typers.typedDefDef)
+ *
+ * Case defTree: ValDef
+ * - tpe : The type of this value. (*)
+ * - typer: The typer for the rhs of this value
+ * - pt : If tpt.isEmpty, WildcardType. Otherwise the type obtained from typing tpt.
+ * - Note that pluginsTypeSig might be called multiple times for the same ValDef since it is
+ * used to compute the types of the accessor methods (see `pluginsTypeSigAccessor`)
+ *
+ * Case defTree: TypeDef
+ * - tpe : The type obtained from typing rhs (PolyType if the TypeDef defines a polymorphic type)
+ * - typer: The typer for the rhs of this type
+ * - pt : WildcardType
+ */
+ def pluginsTypeSig(tpe: Type, typer: Typer, defTree: Tree, pt: Type): Type = tpe
+
+ /**
+ * Modify the types of field accessors. The namer phase creates method types for getters and
+ * setters based on the type of the corresponding field.
+ *
+ * Note: in order to compute the method type of an accessor, the namer calls `typeSig` on the
+ * `ValDef` tree of the corresponding field. This implies that the `pluginsTypeSig` method
+ * is potentially called multiple times for the same ValDef tree.
+ *
+ * @param tpe The method type created by the namer for the accessor
+ * @param typer The typer for the ValDef (not for the rhs)
+ * @param tree The ValDef corresponding to the accessor
+ * @param sym The accessor method symbol (getter, setter, beanGetter or beanSetter)
+ */
+ def pluginsTypeSigAccessor(tpe: Type, typer: Typer, tree: ValDef, sym: Symbol): Type = tpe
+
+ /**
+ * Decide whether this analyzer plugin can adapt a tree that has an annotated type to the
+ * given type tp, taking into account the given mode (see method adapt in trait Typers).
+ */
+ def canAdaptAnnotations(tree: Tree, typer: Typer, mode: Int, pt: Type): Boolean = false
+
+ /**
+ * Adapt a tree that has an annotated type to the given type tp, taking into account the given
+ * mode (see method adapt in trait Typers).
+ *
+ * An implementation cannot rely on canAdaptAnnotations being called before. If the implementing
+ * class cannot do the adapting, it should return the tree unchanged.
+ */
+ def adaptAnnotations(tree: Tree, typer: Typer, mode: Int, pt: Type): Tree = tree
+
+ /**
+ * Modify the type of a return expression. By default, return expressions have type
+ * NothingClass.tpe.
+ *
+ * @param tpe The type of the return expression
+ * @param typer The typer that was used for typing the return tree
+ * @param tree The typed return expression tree
+ * @param pt The return type of the enclosing method
+ */
+ def pluginsTypedReturn(tpe: Type, typer: Typer, tree: Return, pt: Type): Type = tpe
+ }
+
+
+
+ /** A list of registered analyzer plugins */
+ private var analyzerPlugins: List[AnalyzerPlugin] = Nil
+
+ /** Registers a new analyzer plugin */
+ def addAnalyzerPlugin(plugin: AnalyzerPlugin) {
+ if (!analyzerPlugins.contains(plugin))
+ analyzerPlugins = plugin :: analyzerPlugins
+ }
+
+
+ /** @see AnalyzerPlugin.pluginsPt */
+ def pluginsPt(pt: Type, typer: Typer, tree: Tree, mode: Int): Type =
+ if (analyzerPlugins.isEmpty) pt
+ else analyzerPlugins.foldLeft(pt)((pt, plugin) =>
+ if (!plugin.isActive()) pt else plugin.pluginsPt(pt, typer, tree, mode))
+
+ /** @see AnalyzerPlugin.pluginsTyped */
+ def pluginsTyped(tpe: Type, typer: Typer, tree: Tree, mode: Int, pt: Type): Type = {
+ // support deprecated methods in annotation checkers
+ val annotCheckersTpe = addAnnotations(tree, tpe)
+ if (analyzerPlugins.isEmpty) annotCheckersTpe
+ else analyzerPlugins.foldLeft(annotCheckersTpe)((tpe, plugin) =>
+ if (!plugin.isActive()) tpe else plugin.pluginsTyped(tpe, typer, tree, mode, pt))
+ }
+
+ /** @see AnalyzerPlugin.pluginsTypeSig */
+ def pluginsTypeSig(tpe: Type, typer: Typer, defTree: Tree, pt: Type): Type =
+ if (analyzerPlugins.isEmpty) tpe
+ else analyzerPlugins.foldLeft(tpe)((tpe, plugin) =>
+ if (!plugin.isActive()) tpe else plugin.pluginsTypeSig(tpe, typer, defTree, pt))
+
+ /** @see AnalyzerPlugin.pluginsTypeSigAccessor */
+ def pluginsTypeSigAccessor(tpe: Type, typer: Typer, tree: ValDef, sym: Symbol): Type =
+ if (analyzerPlugins.isEmpty) tpe
+ else analyzerPlugins.foldLeft(tpe)((tpe, plugin) =>
+ if (!plugin.isActive()) tpe else plugin.pluginsTypeSigAccessor(tpe, typer, tree, sym))
+
+ /** @see AnalyzerPlugin.canAdaptAnnotations */
+ def canAdaptAnnotations(tree: Tree, typer: Typer, mode: Int, pt: Type): Boolean = {
+ // support deprecated methods in annotation checkers
+ val annotCheckersExists = global.canAdaptAnnotations(tree, mode, pt)
+ annotCheckersExists || {
+ if (analyzerPlugins.isEmpty) false
+ else analyzerPlugins.exists(plugin =>
+ plugin.isActive() && plugin.canAdaptAnnotations(tree, typer, mode, pt))
+ }
+ }
+
+ /** @see AnalyzerPlugin.adaptAnnotations */
+ def adaptAnnotations(tree: Tree, typer: Typer, mode: Int, pt: Type): Tree = {
+ // support deprecated methods in annotation checkers
+ val annotCheckersTree = global.adaptAnnotations(tree, mode, pt)
+ if (analyzerPlugins.isEmpty) annotCheckersTree
+ else analyzerPlugins.foldLeft(annotCheckersTree)((tree, plugin) =>
+ if (!plugin.isActive()) tree else plugin.adaptAnnotations(tree, typer, mode, pt))
+ }
+
+ /** @see AnalyzerPlugin.pluginsTypedReturn */
+ def pluginsTypedReturn(tpe: Type, typer: Typer, tree: Return, pt: Type): Type = {
+ val annotCheckersType = adaptTypeOfReturn(tree.expr, pt, tpe)
+ if (analyzerPlugins.isEmpty) annotCheckersType
+ else analyzerPlugins.foldLeft(annotCheckersType)((tpe, plugin) =>
+ if (!plugin.isActive()) tpe else plugin.pluginsTypedReturn(tpe, typer, tree, pt))
+ }
+}
diff --git a/src/compiler/scala/tools/nsc/typechecker/Checkable.scala b/src/compiler/scala/tools/nsc/typechecker/Checkable.scala
new file mode 100644
index 0000000..d30b5c2
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/typechecker/Checkable.scala
@@ -0,0 +1,283 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2013 LAMP/EPFL
+ * @author Paul Phillips
+ */
+
+package scala.tools.nsc
+package typechecker
+
+import scala.collection.{ mutable, immutable }
+import scala.collection.mutable.ListBuffer
+import scala.util.control.ControlThrowable
+import symtab.Flags._
+import scala.annotation.tailrec
+import Checkability._
+
+/** On pattern matcher checkability:
+ *
+ * Consider a pattern match of this form: (x: X) match { case _: P => }
+ *
+ * There are four possibilities to consider:
+ * [P1] X will always conform to P
+ * [P2] x will never conform to P
+ * [P3] X <: P if some runtime test is true
+ * [P4] X cannot be checked against P
+ *
+ * The first two cases correspond to those when there is enough
+ * static information to say X <: P or that (x ∈ X) ⇒ (x ∉ P).
+ * The fourth case includes unknown abstract types or structural
+ * refinements appearing within a pattern.
+ *
+ * The third case is the interesting one. We designate another type, XR,
+ * which is essentially the intersection of X and |P|, where |P| is
+ * the erasure of P. If XR <: P, then no warning is emitted.
+ *
+ * Examples of how this info is put to use:
+ * sealed trait A[T] ; class B[T] extends A[T]
+ * def f(x: B[Int]) = x match { case _: A[Int] if true => }
+ * def g(x: A[Int]) = x match { case _: B[Int] => }
+ *
+ * `f` requires no warning because X=B[Int], P=A[Int], and B[Int] <:< A[Int].
+ * `g` requires no warning because X=A[Int], P=B[Int], XR=B[Int], and B[Int] <:< B[Int].
+ * XR=B[Int] because a value of type A[Int] which is tested to be a B can
+ * only be a B[Int], due to the definition of B (B[T] extends A[T].)
+ *
+ * This is something like asSeenFrom, only rather than asking what a type looks
+ * like from the point of view of one of its base classes, we ask what it looks
+ * like from the point of view of one of its subclasses.
+ */
+trait Checkable {
+ self: Analyzer =>
+
+ import global._
+ import definitions._
+ import CheckabilityChecker.{ isNeverSubType, isNeverSubClass }
+
+ /** The applied type of class 'to' after inferring anything
+ * possible from the knowledge that 'to' must also be of the
+ * type given in 'from'.
+ */
+ def propagateKnownTypes(from: Type, to: Symbol): Type = {
+ def tparams = to.typeParams
+ val tvars = tparams map (p => TypeVar(p))
+ val tvarType = appliedType(to, tvars: _*)
+ val bases = from.baseClasses filter (to.baseClasses contains _)
+
+ bases foreach { bc =>
+ val tps1 = (from baseType bc).typeArgs
+ val tps2 = (tvarType baseType bc).typeArgs
+ (tps1, tps2).zipped foreach (_ =:= _)
+ // Alternate, variance respecting formulation causes
+ // neg/unchecked3.scala to fail (abstract types). TODO -
+ // figure it out. It seems there is more work to do if I
+ // allow for variance, because the constraints accumulate
+ // as bounds and "tvar.instValid" is false.
+ //
+ // foreach3(tps1, tps2, bc.typeParams)((tp1, tp2, tparam) =>
+ // if (tparam.initialize.isCovariant) tp1 <:< tp2
+ // else if (tparam.isContravariant) tp2 <:< tp1
+ // else tp1 =:= tp2
+ // )
+ }
+
+ val resArgs = tparams zip tvars map {
+ case (_, tvar) if tvar.instValid => tvar.constr.inst
+ case (tparam, _) => tparam.tpe
+ }
+ appliedType(to, resArgs: _*)
+ }
+
+ private def isUnwarnableTypeArgSymbol(sym: Symbol) = (
+ sym.isTypeParameter // dummy
+ || (sym.name.toTermName == nme.WILDCARD) // _
+ || nme.isVariableName(sym.name) // type variable
+ )
+ private def isUnwarnableTypeArg(arg: Type) = (
+ uncheckedOk(arg) // @unchecked T
+ || isUnwarnableTypeArgSymbol(arg.typeSymbolDirect) // has to be direct: see pos/t1439
+ )
+ private def uncheckedOk(tp: Type) = tp hasAnnotation UncheckedClass
+
+ private def typeArgsInTopLevelType(tp: Type): List[Type] = {
+ val tps = tp match {
+ case RefinedType(parents, _) => parents flatMap typeArgsInTopLevelType
+ case TypeRef(_, ArrayClass, arg :: Nil) => typeArgsInTopLevelType(arg)
+ case TypeRef(pre, sym, args) => typeArgsInTopLevelType(pre) ++ args
+ case ExistentialType(tparams, underlying) => tparams.map(_.tpe) ++ typeArgsInTopLevelType(underlying)
+ case _ => Nil
+ }
+ tps filterNot isUnwarnableTypeArg
+ }
+
+ private class CheckabilityChecker(val X: Type, val P: Type) {
+ def Xsym = X.typeSymbol
+ def Psym = P.typeSymbol
+ def XR = propagateKnownTypes(X, Psym)
+ // sadly the spec says (new java.lang.Boolean(true)).isInstanceOf[scala.Boolean]
+ def P1 = X matchesPattern P
+ def P2 = !Psym.isPrimitiveValueClass && isNeverSubType(X, P)
+ def P3 = isNonRefinementClassType(P) && (XR matchesPattern P)
+ def P4 = !(P1 || P2 || P3)
+
+ def summaryString = f"""
+ |Checking checkability of (x: $X) against pattern $P
+ |[P1] $P1%-6s X <: P // $X <: $P
+ |[P2] $P2%-6s x ∉ P // (x ∈ $X) ⇒ (x ∉ $P)
+ |[P3] $P3%-6s XR <: P // $XR <: $P
+ |[P4] $P4%-6s None of the above // !(P1 || P2 || P3)
+ """.stripMargin.trim
+
+ val result = (
+ if (X.isErroneous || P.isErroneous) CheckabilityError
+ else if (P1) StaticallyTrue
+ else if (P2) StaticallyFalse
+ else if (P3) RuntimeCheckable
+ else if (uncheckableType == NoType) {
+ // Avoid warning (except ourselves) if we can't pinpoint the uncheckable type
+ debugwarn("Checkability checker says 'Uncheckable', but uncheckable type cannot be found:\n" + summaryString)
+ CheckabilityError
+ }
+ else Uncheckable
+ )
+ lazy val uncheckableType = if (Psym.isAbstractType) P else {
+ val possibles = typeArgsInTopLevelType(P).toSet
+ val opt = possibles find { targ =>
+ // Create a derived type with every possibly uncheckable type replaced
+ // with a WildcardType, except for 'targ'. If !(XR <: derived) then
+ // 'targ' is uncheckable.
+ val derived = P map (tp => if (possibles(tp) && !(tp =:= targ)) WildcardType else tp)
+ !(XR <:< derived)
+ }
+ opt getOrElse NoType
+ }
+
+ def neverSubClass = isNeverSubClass(Xsym, Psym)
+ def neverMatches = result == StaticallyFalse
+ def isUncheckable = result == Uncheckable
+ def uncheckableMessage = uncheckableType match {
+ case NoType => "something"
+ case tp @ RefinedType(_, _) => "refinement " + tp
+ case TypeRef(_, sym, _) if sym.isAbstractType => "abstract type " + sym.name
+ case tp => "non-variable type argument " + tp
+ }
+ }
+
+ /** X, P, [P1], etc. are all explained at the top of the file.
+ */
+ private object CheckabilityChecker {
+ /** Are these symbols classes with no subclass relationship? */
+ def areUnrelatedClasses(sym1: Symbol, sym2: Symbol) = (
+ sym1.isClass
+ && sym2.isClass
+ && !(sym1 isSubClass sym2)
+ && !(sym2 isSubClass sym1)
+ )
+ /** Are all children of these symbols pairwise irreconcilable? */
+ def allChildrenAreIrreconcilable(sym1: Symbol, sym2: Symbol) = (
+ sym1.children.toList forall (c1 =>
+ sym2.children.toList forall (c2 =>
+ areIrreconcilableAsParents(c1, c2)
+ )
+ )
+ )
+ /** Is it impossible for the given symbols to be parents in the same class?
+ * This means given A and B, can there be an instance of A with B? This is the
+ * case if neither A nor B is a subclass of the other, and one of the following
+ * additional conditions holds:
+ * - either A or B is effectively final
+ * - neither A nor B is a trait (i.e. both are actual classes, not eligible for mixin)
+ * - both A and B are sealed, and every possible pairing of their children is irreconcilable
+ *
+ * TODO: the last two conditions of the last possibility (that the symbols are not of
+ * classes being compiled in the current run) are because this currently runs too early,
+ * and .children returns Nil for sealed classes because their children will not be
+ * populated until typer. It was too difficult to move things around for the moment,
+ * so I will consult with moors about the optimal time to be doing this.
+ */
+ def areIrreconcilableAsParents(sym1: Symbol, sym2: Symbol): Boolean = areUnrelatedClasses(sym1, sym2) && (
+ sym1.initialize.isEffectivelyFinal // initialization important
+ || sym2.initialize.isEffectivelyFinal
+ || !sym1.isTrait && !sym2.isTrait
+ || sym1.isSealed && sym2.isSealed && allChildrenAreIrreconcilable(sym1, sym2) && !currentRun.compiles(sym1) && !currentRun.compiles(sym2)
+ )
+ def isNeverSubClass(sym1: Symbol, sym2: Symbol) = areIrreconcilableAsParents(sym1, sym2)
+
+ private def isNeverSubArgs(tps1: List[Type], tps2: List[Type], tparams: List[Symbol]): Boolean = /*logResult(s"isNeverSubArgs($tps1, $tps2, $tparams)")*/ {
+ def isNeverSubArg(t1: Type, t2: Type, variance: Int) = {
+ if (variance > 0) isNeverSubType(t2, t1)
+ else if (variance < 0) isNeverSubType(t1, t2)
+ else isNeverSameType(t1, t2)
+ }
+ exists3(tps1, tps2, tparams map (_.variance))(isNeverSubArg)
+ }
+ private def isNeverSameType(tp1: Type, tp2: Type): Boolean = (tp1, tp2) match {
+ case (TypeRef(_, sym1, args1), TypeRef(_, sym2, args2)) =>
+ isNeverSubClass(sym1, sym2) || ((sym1 == sym2) && isNeverSubArgs(args1, args2, sym1.typeParams))
+ case _ =>
+ false
+ }
+ // Important to dealias at any entry point (this is the only one at this writing.)
+ def isNeverSubType(tp1: Type, tp2: Type): Boolean = /*logResult(s"isNeverSubType($tp1, $tp2)")*/((tp1.dealias, tp2.dealias) match {
+ case (TypeRef(_, sym1, args1), TypeRef(_, sym2, args2)) =>
+ isNeverSubClass(sym1, sym2) || {
+ (sym1 isSubClass sym2) && {
+ val tp1seen = tp1 baseType sym2
+ isNeverSubArgs(tp1seen.typeArgs, args2, sym2.typeParams)
+ }
+ }
+ case _ => false
+ })
+ }
+
+ trait InferCheckable {
+ self: Inferencer =>
+
+ /** TODO: much better error positions.
+ * Kind of stuck right now because they just pass us the one tree.
+ * TODO: Eliminate inPattern, canRemedy, which have no place here.
+ */
+ def checkCheckable(tree: Tree, P0: Type, X0: Type, inPattern: Boolean, canRemedy: Boolean = false) {
+ if (uncheckedOk(P0)) return
+ def where = if (inPattern) "pattern " else ""
+
+ // singleton types not considered here
+ val P = P0.widen
+ val X = X0.widen
+
+ P match {
+ // Prohibit top-level type tests for these, but they are ok nested (e.g. case Foldable[Nothing] => ... )
+ case TypeRef(_, NothingClass | NullClass | AnyValClass, _) =>
+ InferErrorGen.TypePatternOrIsInstanceTestError(tree, P)
+ // If top-level abstract types can be checked using a classtag extractor, don't warn about them
+ case TypeRef(_, sym, _) if sym.isAbstractType && canRemedy =>
+ ;
+ // Matching on types like case _: AnyRef { def bippy: Int } => doesn't work -- yet.
+ case RefinedType(_, decls) if !decls.isEmpty =>
+ getContext.unit.warning(tree.pos, s"a pattern match on a refinement type is unchecked")
+ case _ =>
+ val checker = new CheckabilityChecker(X, P)
+ log(checker.summaryString)
+ if (checker.neverMatches) {
+ val addendum = if (checker.neverSubClass) "" else " (but still might match its erasure)"
+ getContext.unit.warning(tree.pos, s"fruitless type test: a value of type $X cannot also be a $P$addendum")
+ }
+ else if (checker.isUncheckable) {
+ val msg = (
+ if (checker.uncheckableType =:= P) s"abstract type $where$P"
+ else s"${checker.uncheckableMessage} in type $where$P"
+ )
+ getContext.unit.warning(tree.pos, s"$msg is unchecked since it is eliminated by erasure")
+ }
+ }
+ }
+ }
+}
+
+private[typechecker] final class Checkability(val value: Int) extends AnyVal { }
+private[typechecker] object Checkability {
+ val StaticallyTrue = new Checkability(0)
+ val StaticallyFalse = new Checkability(1)
+ val RuntimeCheckable = new Checkability(2)
+ val Uncheckable = new Checkability(3)
+ val CheckabilityError = new Checkability(4)
+}
diff --git a/src/compiler/scala/tools/nsc/typechecker/ConstantFolder.scala b/src/compiler/scala/tools/nsc/typechecker/ConstantFolder.scala
index 5b9ad61..89e2ee4 100644
--- a/src/compiler/scala/tools/nsc/typechecker/ConstantFolder.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/ConstantFolder.scala
@@ -1,5 +1,5 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
+ * Copyright 2005-2013 LAMP/EPFL
* @author Martin Odersky
*/
@@ -27,7 +27,7 @@ abstract class ConstantFolder {
case _ => null
})
- /** If tree is a constant value that can be converted to type `pt', perform
+ /** If tree is a constant value that can be converted to type `pt`, perform
* the conversion.
*
* @param tree ...
diff --git a/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala b/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala
new file mode 100644
index 0000000..a7b0e47
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala
@@ -0,0 +1,1372 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2013 LAMP/EPFL
+ * @author Martin Odersky
+ */
+
+package scala.tools.nsc
+package typechecker
+
+import scala.collection.{ mutable, immutable }
+import scala.reflect.internal.util.StringOps.{ countElementsAsString, countAsString }
+import symtab.Flags.{ PRIVATE, PROTECTED, IS_ERROR }
+import scala.compat.Platform.EOL
+import scala.reflect.runtime.ReflectionUtils
+import scala.reflect.macros.runtime.AbortMacroException
+import scala.util.control.NonFatal
+import scala.tools.nsc.util.stackTraceString
+
+trait ContextErrors {
+ self: Analyzer =>
+
+ import global._
+ import definitions._
+ import treeInfo._
+
+ object ErrorKinds extends Enumeration {
+ type ErrorKind = Value
+ val Normal, Access, Ambiguous, Divergent = Value
+ }
+
+ import ErrorKinds.ErrorKind
+
+ trait AbsTypeError extends Throwable {
+ def errPos: Position
+ def errMsg: String
+ def kind: ErrorKind
+ }
+
+ case class NormalTypeError(underlyingTree: Tree, errMsg: String, kind: ErrorKind = ErrorKinds.Normal)
+ extends AbsTypeError {
+
+ def errPos:Position = underlyingTree.pos
+ override def toString() = "[Type error at:" + underlyingTree.pos + "] " + errMsg
+ }
+
+ case class SymbolTypeError(underlyingSym: Symbol, errMsg: String, kind: ErrorKind = ErrorKinds.Normal)
+ extends AbsTypeError {
+
+ def errPos = underlyingSym.pos
+ }
+
+ case class TypeErrorWrapper(ex: TypeError, kind: ErrorKind = ErrorKinds.Normal)
+ extends AbsTypeError {
+ def errMsg = ex.msg
+ def errPos = ex.pos
+ }
+
+ case class TypeErrorWithUnderlyingTree(tree: Tree, ex: TypeError, kind: ErrorKind = ErrorKinds.Normal)
+ extends AbsTypeError {
+ def errMsg = ex.msg
+ def errPos = tree.pos
+ }
+
+ // Unlike other type errors diverging implicit expansion
+ // will be re-issued explicitly on failed implicit argument search.
+ // This is because we want to:
+ // 1) provide better error message than just "implicit not found"
+ // 2) provide the type of the implicit parameter for which we got diverging expansion
+ // (pt at the point of divergence gives less information to the user)
+ // Note: it is safe to delay error message generation in this case
+ // becasue we don't modify implicits' infos.
+ // only issued when -Xdivergence211 is turned on
+ case class DivergentImplicitTypeError(tree: Tree, pt0: Type, sym: Symbol) extends AbsTypeError {
+ def errPos: Position = tree.pos
+ def errMsg: String = errMsgForPt(pt0)
+ def kind = ErrorKinds.Divergent
+ def withPt(pt: Type): AbsTypeError = NormalTypeError(tree, errMsgForPt(pt), kind)
+ private def errMsgForPt(pt: Type) =
+ s"diverging implicit expansion for type ${pt}\nstarting with ${sym.fullLocationString}"
+ }
+
+ case class AmbiguousTypeError(underlyingTree: Tree, errPos: Position, errMsg: String, kind: ErrorKind = ErrorKinds.Ambiguous) extends AbsTypeError
+
+ case class PosAndMsgTypeError(errPos: Position, errMsg: String, kind: ErrorKind = ErrorKinds.Normal) extends AbsTypeError
+
+ object ErrorUtils {
+ def issueNormalTypeError(tree: Tree, msg: String)(implicit context: Context) {
+ issueTypeError(NormalTypeError(tree, msg))
+ }
+
+ def issueSymbolTypeError(sym: Symbol, msg: String)(implicit context: Context) {
+ issueTypeError(SymbolTypeError(sym, msg))
+ }
+
+ // only called when -Xdivergence211 is turned off
+ def issueDivergentImplicitsError(tree: Tree, msg: String)(implicit context: Context) {
+ issueTypeError(NormalTypeError(tree, msg, ErrorKinds.Divergent))
+ }
+
+ def issueAmbiguousTypeError(pre: Type, sym1: Symbol, sym2: Symbol, err: AmbiguousTypeError)(implicit context: Context) {
+ context.issueAmbiguousError(pre, sym1, sym2, err)
+ }
+
+ def issueTypeError(err: AbsTypeError)(implicit context: Context) { context.issue(err) }
+
+ def typeErrorMsg(found: Type, req: Type, possiblyMissingArgs: Boolean) = {
+ def missingArgsMsg = if (possiblyMissingArgs) "\n possible cause: missing arguments for method or constructor" else ""
+
+ "type mismatch" + foundReqMsg(found, req) + missingArgsMsg
+ }
+ }
+
+ def notAnyRefMessage(found: Type): String = {
+ val tp = found.widen
+ def name = tp.typeSymbol.nameString
+ def parents = tp.parents filterNot isTrivialTopType
+ def onlyAny = tp.parents forall (_.typeSymbol == AnyClass)
+ def parents_s = ( if (parents.isEmpty) tp.parents else parents ) mkString ", "
+ def what = (
+ if (tp.typeSymbol.isAbstractType) {
+ val descr = if (onlyAny) "unbounded" else "bounded only by " + parents_s
+ s"$name is $descr, which means AnyRef is not a known parent"
+ }
+ else if (tp.typeSymbol.isAnonOrRefinementClass)
+ s"the parents of this type ($parents_s) extend Any, not AnyRef"
+ else
+ s"$name extends Any, not AnyRef"
+ )
+ if (isPrimitiveValueType(found) || isTrivialTopType(tp)) "" else "\n" +
+ sm"""|Note that $what.
+ |Such types can participate in value classes, but instances
+ |cannot appear in singleton types or in reference comparisons."""
+ }
+
+ import ErrorUtils._
+
+ trait TyperContextErrors {
+ self: Typer =>
+
+ import infer.setError
+
+ object TyperErrorGen {
+ implicit val contextTyperErrorGen: Context = infer.getContext
+
+ def UnstableTreeError(tree: Tree) = {
+ def addendum = {
+ "\n Note that "+tree.symbol+" is not stable because its type, "+tree.tpe+", is volatile."
+ }
+ issueNormalTypeError(tree,
+ "stable identifier required, but "+tree+" found." + (
+ if (isStableExceptVolatile(tree)) addendum else ""))
+ setError(tree)
+ }
+
+ def NoImplicitFoundError(tree: Tree, param: Symbol) = {
+ def errMsg = {
+ val paramName = param.name
+ val paramTp = param.tpe
+ paramTp.typeSymbolDirect match {
+ case ImplicitNotFoundMsg(msg) => msg.format(paramName, paramTp)
+ case _ =>
+ "could not find implicit value for "+
+ (if (paramName startsWith nme.EVIDENCE_PARAM_PREFIX) "evidence parameter of type "
+ else "parameter "+paramName+": ")+paramTp
+ }
+ }
+ issueNormalTypeError(tree, errMsg)
+ }
+
+ def AdaptTypeError(tree: Tree, found: Type, req: Type) = {
+ // If the expected type is a refinement type, and the found type is a refinement or an anon
+ // class, we can greatly improve the error message by retyping the tree to recover the actual
+ // members present, then display along with the expected members. This is done here because
+ // this is the last point where we still have access to the original tree, rather than just
+ // the found/req types.
+ val foundType: Type = req.normalize match {
+ case RefinedType(parents, decls) if !decls.isEmpty && found.typeSymbol.isAnonOrRefinementClass =>
+ val retyped = typed (tree.duplicate setType null)
+ val foundDecls = retyped.tpe.decls filter (sym => !sym.isConstructor && !sym.isSynthetic)
+
+ if (foundDecls.isEmpty || (found.typeSymbol eq NoSymbol)) found
+ else {
+ // The members arrive marked private, presumably because there was no
+ // expected type and so they're considered members of an anon class.
+ foundDecls foreach (_.makePublic)
+ // TODO: if any of the found parents match up with required parents after normalization,
+ // print the error so that they match. The major beneficiary there would be
+ // java.lang.Object vs. AnyRef.
+ refinedType(found.parents, found.typeSymbol.owner, foundDecls, tree.pos)
+ }
+ case _ =>
+ found
+ }
+ assert(!found.isErroneous && !req.isErroneous, (found, req))
+
+ issueNormalTypeError(tree, withAddendum(tree.pos)(typeErrorMsg(found, req, infer.isPossiblyMissingArgs(found, req))) )
+ if (settings.explaintypes.value)
+ explainTypes(found, req)
+ }
+
+ def WithFilterError(tree: Tree, ex: AbsTypeError) = {
+ issueTypeError(ex)
+ setError(tree)
+ }
+
+ def ParentTypesError(templ: Template, ex: TypeError) = {
+ templ.tpe = null
+ issueNormalTypeError(templ, ex.getMessage())
+ }
+
+ // additional parentTypes errors
+ def ConstrArgsInTraitParentTpeError(arg: Tree, parent: Symbol) =
+ issueNormalTypeError(arg, parent + " is a trait; does not take constructor arguments")
+
+ def MissingTypeArgumentsParentTpeError(supertpt: Tree) =
+ issueNormalTypeError(supertpt, "missing type arguments")
+
+ // typedIdent
+ def AmbiguousIdentError(tree: Tree, name: Name, msg: String) =
+ NormalTypeError(tree, "reference to " + name + " is ambiguous;\n" + msg)
+
+ def SymbolNotFoundError(tree: Tree, name: Name, owner: Symbol, startingIdentCx: Context) = {
+ NormalTypeError(tree, "not found: "+decodeWithKind(name, owner))
+ }
+
+ // typedAppliedTypeTree
+ def AppliedTypeNoParametersError(tree: Tree, errTpe: Type) = {
+ issueNormalTypeError(tree, errTpe + " does not take type parameters")
+ setError(tree)
+ }
+
+ def AppliedTypeWrongNumberOfArgsError(tree: Tree, tpt: Tree, tparams: List[Symbol]) = {
+ val tptSafeString: String = try {
+ tpt.tpe.toString()
+ } catch {
+ case _: CyclicReference =>
+ tpt.toString()
+ }
+ val msg = "wrong number of type arguments for "+tptSafeString+", should be "+tparams.length
+ issueNormalTypeError(tree, msg)
+ setError(tree)
+ }
+
+ // typedTypeDef
+ def LowerBoundError(tree: TypeDef, lowB: Type, highB: Type) =
+ issueNormalTypeError(tree, "lower bound "+lowB+" does not conform to upper bound "+highB)
+
+ def HiddenSymbolWithError[T <: Tree](tree: T): T =
+ setError(tree)
+
+ def SymbolEscapesScopeError[T <: Tree](tree: T, badSymbol: Symbol): T = {
+ val modifierString = if (badSymbol.isPrivate) "private " else ""
+ issueNormalTypeError(tree, modifierString + badSymbol + " escapes its defining scope as part of type "+tree.tpe)
+ setError(tree)
+ }
+
+ // typedDefDef
+ def StarParamNotLastError(param: Tree) =
+ issueNormalTypeError(param, "*-parameter must come last")
+
+ def StarWithDefaultError(meth: Symbol) =
+ issueSymbolTypeError(meth, "a parameter section with a `*'-parameter is not allowed to have default arguments")
+
+ def InvalidConstructorDefError(ddef: Tree) =
+ issueNormalTypeError(ddef, "constructor definition not allowed here")
+
+ def DeprecatedParamNameError(param: Symbol, name: Name) =
+ issueSymbolTypeError(param, "deprecated parameter name "+ name +" has to be distinct from any other parameter name (deprecated or not).")
+
+ // computeParamAliases
+ def SuperConstrReferenceError(tree: Tree) =
+ NormalTypeError(tree, "super constructor cannot be passed a self reference unless parameter is declared by-name")
+
+ def SuperConstrArgsThisReferenceError(tree: Tree) =
+ ConstrArgsThisReferenceError("super", tree)
+
+ def SelfConstrArgsThisReferenceError(tree: Tree) =
+ ConstrArgsThisReferenceError("self", tree)
+
+ private def ConstrArgsThisReferenceError(prefix: String, tree: Tree) =
+ NormalTypeError(tree, s"$prefix constructor arguments cannot reference unconstructed `this`")
+
+ def TooManyArgumentListsForConstructor(tree: Tree) = {
+ issueNormalTypeError(tree, "too many argument lists for constructor invocation")
+ setError(tree)
+ }
+
+ // typedValDef
+ def VolatileValueError(vdef: Tree) =
+ issueNormalTypeError(vdef, "values cannot be volatile")
+
+ def LocalVarUninitializedError(vdef: Tree) =
+ issueNormalTypeError(vdef, "local variables must be initialized")
+
+ //typedAssign
+ def AssignmentError(tree: Tree, varSym: Symbol) = {
+ issueNormalTypeError(tree,
+ if (varSym != null && varSym.isValue) "reassignment to val"
+ else "assignment to non variable")
+ setError(tree)
+ }
+
+ def UnexpectedTreeAssignmentConversionError(tree: Tree) = {
+ issueNormalTypeError(tree, "Unexpected tree during assignment conversion.")
+ setError(tree)
+ }
+
+ //typedSuper
+ def MixinMissingParentClassNameError(tree: Tree, mix: Name, clazz: Symbol) =
+ issueNormalTypeError(tree, mix+" does not name a parent class of "+clazz)
+
+ def AmbiguousParentClassError(tree: Tree) =
+ issueNormalTypeError(tree, "ambiguous parent class qualifier")
+
+ //typedSelect
+ def NotAMemberError(sel: Tree, qual: Tree, name: Name) = {
+ def errMsg = {
+ val owner = qual.tpe.typeSymbol
+ val target = qual.tpe.widen
+ def targetKindString = if (owner.isTypeParameterOrSkolem) "type parameter " else ""
+ def nameString = decodeWithKind(name, owner)
+ /** Illuminating some common situations and errors a bit further. */
+ def addendum = {
+ val companion = {
+ if (name.isTermName && owner.isPackageClass) {
+ target.member(name.toTypeName) match {
+ case NoSymbol => ""
+ case sym => "\nNote: %s exists, but it has no companion object.".format(sym)
+ }
+ }
+ else ""
+ }
+ val semicolon = (
+ if (linePrecedes(qual, sel))
+ "\npossible cause: maybe a semicolon is missing before `"+nameString+"'?"
+ else
+ ""
+ )
+ val notAnyRef = (
+ if (ObjectClass.info.member(name).exists) notAnyRefMessage(target)
+ else ""
+ )
+ companion + notAnyRef + semicolon
+ }
+ def targetStr = targetKindString + target.directObjectString
+ withAddendum(qual.pos)(
+ if (name == nme.CONSTRUCTOR) s"$target does not have a constructor"
+ else s"$nameString is not a member of $targetStr$addendum"
+ )
+ }
+ issueNormalTypeError(sel, errMsg)
+ // the error has to be set for the copied tree, otherwise
+ // the error remains persistent acros multiple compilations
+ // and causes problems
+ //setError(sel)
+ }
+
+ //typedNew
+ def IsAbstractError(tree: Tree, sym: Symbol) = {
+ issueNormalTypeError(tree, sym + " is abstract; cannot be instantiated")
+ setError(tree)
+ }
+
+ def DoesNotConformToSelfTypeError(tree: Tree, sym: Symbol, tpe0: Type) = {
+ issueNormalTypeError(tree, sym + " cannot be instantiated because it does not conform to its self-type " + tpe0)
+ setError(tree)
+ }
+
+ //typedEta
+ def UnderscoreEtaError(tree: Tree) = {
+ issueNormalTypeError(tree, "_ must follow method; cannot follow " + tree.tpe)
+ setError(tree)
+ }
+
+ //typedReturn
+ def ReturnOutsideOfDefError(tree: Tree) = {
+ issueNormalTypeError(tree, "return outside method definition")
+ setError(tree)
+ }
+
+ def ReturnWithoutTypeError(tree: Tree, owner: Symbol) = {
+ issueNormalTypeError(tree, owner + " has return statement; needs result type")
+ setError(tree)
+ }
+
+ //typedBind
+ def VariableInPatternAlternativeError(tree: Tree) = {
+ issueNormalTypeError(tree, "illegal variable in pattern alternative")
+ //setError(tree)
+ }
+
+ //typedCase
+ def StarPositionInPatternError(tree: Tree) =
+ issueNormalTypeError(tree, "_* may only come last")
+
+ //typedFunction
+ def MaxFunctionArityError(fun: Tree) = {
+ issueNormalTypeError(fun, "implementation restricts functions to " + definitions.MaxFunctionArity + " parameters")
+ setError(fun)
+ }
+
+ def WrongNumberOfParametersError(tree: Tree, argpts: List[Type]) = {
+ issueNormalTypeError(tree, "wrong number of parameters; expected = " + argpts.length)
+ setError(tree)
+ }
+
+ def MissingParameterTypeError(fun: Tree, vparam: ValDef, pt: Type) =
+ if (vparam.mods.isSynthetic) fun match {
+ case Function(_, Match(_, _)) => MissingParameterTypeAnonMatchError(vparam, pt)
+ case _ => issueNormalTypeError(vparam, "missing parameter type for expanded function " + fun)
+ } else issueNormalTypeError(vparam, "missing parameter type")
+
+ def MissingParameterTypeAnonMatchError(vparam: Tree, pt: Type) =
+ issueNormalTypeError(vparam, "missing parameter type for expanded function\n"+
+ "The argument types of an anonymous function must be fully known. (SLS 8.5)\n"+
+ "Expected type was: " + pt.toLongString)
+
+ def ConstructorsOrderError(tree: Tree) = {
+ issueNormalTypeError(tree, "called constructor's definition must precede calling constructor's definition")
+ setError(tree)
+ }
+
+ def OnlyDeclarationsError(tree: Tree) = {
+ issueNormalTypeError(tree, "only declarations allowed here")
+ setError(tree)
+ }
+
+ // typedAnnotation
+ def AnnotationNotAConstantError(tree: Tree) =
+ NormalTypeError(tree, "annotation argument needs to be a constant; found: " + tree)
+
+ def AnnotationArgNullError(tree: Tree) =
+ NormalTypeError(tree, "annotation argument cannot be null")
+
+ def ArrayConstantsError(tree: Tree) =
+ NormalTypeError(tree, "Array constants have to be specified using the `Array(...)' factory method")
+
+ def ArrayConstantsTypeMismatchError(tree: Tree, pt: Type) =
+ NormalTypeError(tree, "found array constant, expected argument of type " + pt)
+
+ def UnexpectedTreeAnnotation(tree: Tree) =
+ NormalTypeError(tree, "unexpected tree in annotation: "+ tree)
+
+ def AnnotationTypeMismatchError(tree: Tree, expected: Type, found: Type) =
+ NormalTypeError(tree, "expected annotation of type " + expected + ", found " + found)
+
+ def MultipleArgumentListForAnnotationError(tree: Tree) =
+ NormalTypeError(tree, "multiple argument lists on classfile annotation")
+
+ def UnknownAnnotationNameError(tree: Tree, name: Name) =
+ NormalTypeError(tree, "unknown annotation argument name: " + name)
+
+ def DuplicateValueAnnotationError(tree: Tree, name: Name) =
+ NormalTypeError(tree, "duplicate value for annotation argument " + name)
+
+ def ClassfileAnnotationsAsNamedArgsError(tree: Tree) =
+ NormalTypeError(tree, "classfile annotation arguments have to be supplied as named arguments")
+
+ def AnnotationMissingArgError(tree: Tree, annType: Type, sym: Symbol) =
+ NormalTypeError(tree, "annotation " + annType.typeSymbol.fullName + " is missing argument " + sym.name)
+
+ def NestedAnnotationError(tree: Tree, annType: Type) =
+ NormalTypeError(tree, "nested classfile annotations must be defined in java; found: "+ annType)
+
+ def UnexpectedTreeAnnotationError(tree: Tree, unexpected: Tree) =
+ NormalTypeError(tree, "unexpected tree after typing annotation: "+ unexpected)
+
+ //typedExistentialTypeTree
+ def AbstractionFromVolatileTypeError(vd: ValDef) =
+ issueNormalTypeError(vd, "illegal abstraction from value with volatile type "+vd.symbol.tpe)
+
+ private[ContextErrors] def TypedApplyWrongNumberOfTpeParametersErrorMessage(fun: Tree) =
+ "wrong number of type parameters for "+treeSymTypeMsg(fun)
+
+ def TypedApplyWrongNumberOfTpeParametersError(tree: Tree, fun: Tree) = {
+ issueNormalTypeError(tree, TypedApplyWrongNumberOfTpeParametersErrorMessage(fun))
+ setError(tree)
+ }
+
+ def TypedApplyDoesNotTakeTpeParametersError(tree: Tree, fun: Tree) = {
+ issueNormalTypeError(tree, treeSymTypeMsg(fun)+" does not take type parameters.")
+ setError(tree)
+ }
+
+ // doTypeApply
+ //tryNamesDefaults
+ def NamedAndDefaultArgumentsNotSupportedForMacros(tree: Tree, fun: Tree) =
+ NormalTypeError(tree, "macros application do not support named and/or default arguments")
+
+ def TooManyArgsNamesDefaultsError(tree: Tree, fun: Tree) =
+ NormalTypeError(tree, "too many arguments for "+treeSymTypeMsg(fun))
+
+ // can it still happen? see test case neg/overloaded-unapply.scala
+ def OverloadedUnapplyError(tree: Tree) =
+ issueNormalTypeError(tree, "cannot resolve overloaded unapply")
+
+ def UnapplyWithSingleArgError(tree: Tree) =
+ issueNormalTypeError(tree, "an unapply method must accept a single argument.")
+
+ def MultipleVarargError(tree: Tree) =
+ NormalTypeError(tree, "when using named arguments, the vararg parameter has to be specified exactly once")
+
+ def ModuleUsingCompanionClassDefaultArgsErrror(tree: Tree) =
+ NormalTypeError(tree, "module extending its companion class cannot use default constructor arguments")
+
+ def NotEnoughArgsError(tree: Tree, fun0: Tree, missing0: List[Symbol]) = {
+ def notEnoughArgumentsMsg(fun: Tree, missing: List[Symbol]) = {
+ val suffix = {
+ if (missing.isEmpty) ""
+ else {
+ val keep = missing take 3 map (_.name)
+ ".\nUnspecified value parameter%s %s".format(
+ if (missing.tail.isEmpty) "" else "s",
+ if ((missing drop 3).nonEmpty) (keep :+ "...").mkString(", ")
+ else keep.mkString("", ", ", ".")
+ )
+ }
+ }
+
+ "not enough arguments for " + treeSymTypeMsg(fun) + suffix
+ }
+ NormalTypeError(tree, notEnoughArgumentsMsg(fun0, missing0))
+ }
+
+ //doTypedApply - patternMode
+ def TooManyArgsPatternError(fun: Tree) =
+ NormalTypeError(fun, "too many arguments for unapply pattern, maximum = "+definitions.MaxTupleArity)
+
+ def WrongNumberOfArgsError(tree: Tree, fun: Tree) =
+ NormalTypeError(tree, "wrong number of arguments for "+ treeSymTypeMsg(fun))
+
+ def ApplyWithoutArgsError(tree: Tree, fun: Tree) =
+ NormalTypeError(tree, fun.tpe+" does not take parameters")
+
+ // Dynamic
+ def DynamicVarArgUnsupported(tree: Tree, name: String) =
+ issueNormalTypeError(tree, name+ " does not support passing a vararg parameter")
+
+ def DynamicRewriteError(tree: Tree, err: AbsTypeError) = {
+ issueTypeError(PosAndMsgTypeError(err.errPos, err.errMsg +
+ s"\nerror after rewriting to $tree\npossible cause: maybe a wrong Dynamic method signature?"))
+ setError(tree)
+ }
+
+ //checkClassType
+ def TypeNotAStablePrefixError(tpt: Tree, pre: Type) = {
+ issueNormalTypeError(tpt, "type "+pre+" is not a stable prefix")
+ setError(tpt)
+ }
+
+ def ClassTypeRequiredError(tree: Tree, found: AnyRef) = {
+ issueNormalTypeError(tree, "class type required but "+found+" found")
+ setError(tree)
+ }
+
+ // validateParentClasses
+ def ParentSuperSubclassError(parent: Tree, superclazz: Symbol,
+ parentSym: Symbol, mixin: Symbol) =
+ NormalTypeError(parent, "illegal inheritance; super"+superclazz+
+ "\n is not a subclass of the super"+parentSym+
+ "\n of the mixin " + mixin)
+
+ def ParentNotATraitMixinError(parent: Tree, mixin: Symbol) =
+ NormalTypeError(parent, mixin+" needs to be a trait to be mixed in")
+
+ def ParentFinalInheritanceError(parent: Tree, mixin: Symbol) =
+ NormalTypeError(parent, "illegal inheritance from final "+mixin)
+
+ def ParentSealedInheritanceError(parent: Tree, psym: Symbol) =
+ NormalTypeError(parent, "illegal inheritance from sealed " + psym )
+
+ def ParentSelfTypeConformanceError(parent: Tree, selfType: Type) =
+ NormalTypeError(parent,
+ "illegal inheritance;\n self-type "+selfType+" does not conform to "+
+ parent +"'s selftype "+parent.tpe.typeOfThis)
+
+ def ParentInheritedTwiceError(parent: Tree, parentSym: Symbol) =
+ NormalTypeError(parent, parentSym+" is inherited twice")
+
+ //adapt
+ def MissingArgsForMethodTpeError(tree: Tree, meth: Symbol) = {
+ issueNormalTypeError(tree,
+ "missing arguments for " + meth.fullLocationString + (
+ if (meth.isConstructor) ""
+ else ";\nfollow this method with `_' if you want to treat it as a partially applied function"
+ ))
+ setError(tree)
+ }
+
+ def MissingTypeParametersError(tree: Tree) = {
+ issueNormalTypeError(tree, tree.symbol+" takes type parameters")
+ setError(tree)
+ }
+
+ def KindArityMismatchError(tree: Tree, pt: Type) = {
+ issueNormalTypeError(tree,
+ tree.tpe+" takes "+countElementsAsString(tree.tpe.typeParams.length, "type parameter")+
+ ", expected: "+countAsString(pt.typeParams.length))
+ setError(tree)
+ }
+
+ def CaseClassConstructorError(tree: Tree) = {
+ issueNormalTypeError(tree, tree.symbol + " is not a case class constructor, nor does it have an unapply/unapplySeq method")
+ setError(tree)
+ }
+
+ def ConstructorPrefixError(tree: Tree, restpe: Type) = {
+ issueNormalTypeError(tree, restpe.prefix+" is not a legal prefix for a constructor")
+ setError(tree)
+ }
+
+ // typedPattern
+ def PatternMustBeValue(pat: Tree, pt: Type) =
+ issueNormalTypeError(pat, s"pattern must be a value: $pat"+ typePatternAdvice(pat.tpe.typeSymbol, pt.typeSymbol))
+
+ // SelectFromTypeTree
+ def TypeSelectionFromVolatileTypeError(tree: Tree, qual: Tree) = {
+ val hiBound = qual.tpe.bounds.hi
+ val addendum = if (hiBound =:= qual.tpe) "" else s" (with upper bound ${hiBound})"
+ issueNormalTypeError(tree, s"illegal type selection from volatile type ${qual.tpe}${addendum}")
+ setError(tree)
+ }
+
+ // packedType
+ def InferTypeWithVolatileTypeSelectionError(tree: Tree, pre: Type) =
+ issueNormalTypeError(tree, "Inferred type "+tree.tpe+" contains type selection from volatile type "+pre)
+
+ def AbstractExistentiallyOverParamerizedTpeError(tree: Tree, tp: Type) =
+ issueNormalTypeError(tree, "can't existentially abstract over parameterized type " + tp)
+
+ // resolveClassTag
+ def MissingClassTagError(tree: Tree, tp: Type) = {
+ issueNormalTypeError(tree, "cannot find class tag for element type "+tp)
+ setError(tree)
+ }
+
+ // cases where we do not necessarily return trees
+ def DependentMethodTpeConversionToFunctionError(tree: Tree, tp: Type) =
+ issueNormalTypeError(tree, "method with dependent type "+tp+" cannot be converted to function value")
+
+ //checkStarPatOK
+ def StarPatternWithVarargParametersError(tree: Tree) =
+ issueNormalTypeError(tree, "star patterns must correspond with varargs parameters")
+
+ def FinitaryError(tparam: Symbol) =
+ issueSymbolTypeError(tparam, "class graph is not finitary because type parameter "+tparam.name+" is expansively recursive")
+
+ def QualifyingClassError(tree: Tree, qual: Name) = {
+ issueNormalTypeError(tree,
+ if (qual.isEmpty) tree + " can be used only in a class, object, or template"
+ else qual + " is not an enclosing class")
+ setError(tree)
+ }
+
+ // def stabilize
+ def NotAValueError(tree: Tree, sym: Symbol) = {
+ issueNormalTypeError(tree, sym.kindString + " " + sym.fullName + " is not a value")
+ setError(tree)
+ }
+
+ def DefDefinedTwiceError(sym0: Symbol, sym1: Symbol) = {
+ // Most of this hard work is associated with SI-4893.
+ val isBug = sym0.isAbstractType && sym1.isAbstractType && (sym0.name startsWith "_$")
+ val addendums = List(
+ if (sym0.associatedFile eq sym1.associatedFile)
+ Some("conflicting symbols both originated in file '%s'".format(sym0.associatedFile.canonicalPath))
+ else if ((sym0.associatedFile ne null) && (sym1.associatedFile ne null))
+ Some("conflicting symbols originated in files '%s' and '%s'".format(sym0.associatedFile.canonicalPath, sym1.associatedFile.canonicalPath))
+ else None ,
+ if (isBug) Some("Note: this may be due to a bug in the compiler involving wildcards in package objects") else None
+ )
+ val addendum = addendums.flatten match {
+ case Nil => ""
+ case xs => xs.mkString("\n ", "\n ", "")
+ }
+
+ issueSymbolTypeError(sym0, sym1+" is defined twice" + addendum)
+ }
+
+ // cyclic errors
+ def CyclicAliasingOrSubtypingError(errPos: Position, sym0: Symbol) =
+ issueTypeError(PosAndMsgTypeError(errPos, "cyclic aliasing or subtyping involving "+sym0))
+
+ def CyclicReferenceError(errPos: Position, lockedSym: Symbol) =
+ issueTypeError(PosAndMsgTypeError(errPos, "illegal cyclic reference involving " + lockedSym))
+
+ // macro-related errors (also see MacroErrors below)
+
+ def MacroEtaError(tree: Tree) = {
+ issueNormalTypeError(tree, "macros cannot be eta-expanded")
+ setError(tree)
+ }
+
+ // same reason as for MacroBodyTypecheckException
+ case object MacroExpansionException extends Exception with scala.util.control.ControlThrowable
+
+ private def macroExpansionError(expandee: Tree, msg: String = null, pos: Position = NoPosition) = {
+ def msgForLog = if (msg != null && (msg contains "exception during macro expansion")) msg.split(EOL).drop(1).headOption.getOrElse("?") else msg
+ macroLogLite("macro expansion has failed: %s".format(msgForLog))
+ val errorPos = if (pos != NoPosition) pos else (if (expandee.pos != NoPosition) expandee.pos else enclosingMacroPosition)
+ if (msg != null) context.error(errorPos, msg) // issueTypeError(PosAndMsgTypeError(..)) won't work => swallows positions
+ setError(expandee)
+ throw MacroExpansionException
+ }
+
+ def MacroPartialApplicationError(expandee: Tree) = {
+ // macroExpansionError won't work => swallows positions, hence needed to do issueTypeError
+ // kinda contradictory to the comment in `macroExpansionError`, but this is how it works
+ issueNormalTypeError(expandee, "macros cannot be partially applied")
+ setError(expandee)
+ throw MacroExpansionException
+ }
+
+ def MacroGeneratedAbort(expandee: Tree, ex: AbortMacroException) = {
+ // errors have been reported by the macro itself, so we do nothing here
+ macroLogVerbose("macro expansion has been aborted")
+ macroExpansionError(expandee, ex.msg, ex.pos)
+ }
+
+ def MacroGeneratedTypeError(expandee: Tree, err: TypeError = null) =
+ if (err == null) {
+ // errors have been reported by the macro itself, so we do nothing here
+ macroExpansionError(expandee, null)
+ } else {
+ macroLogLite("macro expansion has failed: %s at %s".format(err.msg, err.pos))
+ throw err // this error must be propagated, don't report
+ }
+
+ def MacroGeneratedException(expandee: Tree, ex: Throwable) = {
+ val realex = ReflectionUtils.unwrapThrowable(ex)
+ val message = {
+ try {
+ // [Eugene] is there a better way?
+ // [Paul] See Exceptional.scala and Origins.scala.
+ val relevancyThreshold = realex.getStackTrace().indexWhere(_.getMethodName endsWith "macroExpand1")
+ if (relevancyThreshold == -1) None
+ else {
+ var relevantElements = realex.getStackTrace().take(relevancyThreshold + 1)
+ def isMacroInvoker(este: StackTraceElement) = este.isNativeMethod || (este.getClassName != null && (este.getClassName contains "fastTrack"))
+ var threshold = relevantElements.reverse.indexWhere(isMacroInvoker) + 1
+ while (threshold != relevantElements.length && isMacroInvoker(relevantElements(relevantElements.length - threshold - 1))) threshold += 1
+ relevantElements = relevantElements dropRight threshold
+
+ realex.setStackTrace(relevantElements)
+ Some(EOL + stackTraceString(realex))
+ }
+ } catch {
+ // the code above tries various tricks to detect the relevant portion of the stack trace
+ // if these tricks fail, just fall back to uninformative, but better than nothing, getMessage
+ case NonFatal(ex) => // currently giving a spurious warning, see SI-6994
+ macroLogVerbose("got an exception when processing a macro generated exception\n" +
+ "offender = " + stackTraceString(realex) + "\n" +
+ "error = " + stackTraceString(ex))
+ None
+ }
+ } getOrElse {
+ val msg = realex.getMessage
+ if (msg != null) msg else realex.getClass.getName
+ }
+ macroExpansionError(expandee, "exception during macro expansion: " + message)
+ }
+
+ def MacroFreeSymbolError(expandee: Tree, sym: FreeSymbol) = {
+ def template(kind: String) = (
+ s"Macro expansion contains free $kind variable %s. Have you forgotten to use %s? "
+ + s"If you have troubles tracking free $kind variables, consider using -Xlog-free-${kind}s"
+ )
+ val forgotten = (
+ if (sym.isTerm) "splice when splicing this variable into a reifee"
+ else "c.WeakTypeTag annotation for this type parameter"
+ )
+ macroExpansionError(expandee, template(sym.name.nameKind).format(sym.name + " " + sym.origin, forgotten))
+ }
+
+ def MacroExpansionIsNotExprError(expandee: Tree, expanded: Any) =
+ macroExpansionError(expandee,
+ "macro must return a compiler-specific expr; returned value is " + (
+ if (expanded == null) "null"
+ else if (expanded.isInstanceOf[Expr[_]]) " Expr, but it doesn't belong to this compiler's universe"
+ else " of " + expanded.getClass
+ ))
+
+ def MacroImplementationNotFoundError(expandee: Tree) = {
+ val message =
+ "macro implementation not found: " + expandee.symbol.name + " " +
+ "(the most common reason for that is that you cannot use macro implementations in the same compilation run that defines them)" +
+ (if (forScaladoc) ". When generating scaladocs for multiple projects at once, consider using -Ymacro-no-expand to disable macro expansions altogether."
+ else "")
+ macroExpansionError(expandee, message)
+ }
+ }
+ }
+
+ trait InferencerContextErrors {
+ self: Inferencer =>
+
+ private def applyErrorMsg(tree: Tree, msg: String, argtpes: List[Type], pt: Type) = {
+ def asParams(xs: List[Any]) = xs.mkString("(", ", ", ")")
+
+ def resType = if (pt.isWildcard) "" else " with expected result type " + pt
+ def allTypes = (alternatives(tree) flatMap (_.paramTypes)) ++ argtpes :+ pt
+ def locals = alternatives(tree) flatMap (_.typeParams)
+
+ withDisambiguation(locals, allTypes: _*) {
+ treeSymTypeMsg(tree) + msg + asParams(argtpes) + resType
+ }
+ }
+
+ object InferErrorGen {
+
+ implicit val contextInferErrorGen = getContext
+
+ object PolyAlternativeErrorKind extends Enumeration {
+ type ErrorType = Value
+ val WrongNumber, NoParams, ArgsDoNotConform = Value
+ }
+
+ private def ambiguousErrorMsgPos(pos: Position, pre: Type, sym1: Symbol, sym2: Symbol, rest: String) =
+ if (sym1.hasDefault && sym2.hasDefault && sym1.enclClass == sym2.enclClass) {
+ val methodName = nme.defaultGetterToMethod(sym1.name)
+ (sym1.enclClass.pos,
+ "in "+ sym1.enclClass +", multiple overloaded alternatives of " + methodName +
+ " define default arguments")
+ } else {
+ (pos,
+ ("ambiguous reference to overloaded definition,\n" +
+ "both " + sym1 + sym1.locationString + " of type " + pre.memberType(sym1) +
+ "\nand " + sym2 + sym2.locationString + " of type " + pre.memberType(sym2) +
+ "\nmatch " + rest)
+ )
+ }
+
+ def AccessError(tree: Tree, sym: Symbol, pre: Type, owner0: Symbol, explanation: String) = {
+ def errMsg = {
+ val location = if (sym.isClassConstructor) owner0 else pre.widen.directObjectString
+
+ underlyingSymbol(sym).fullLocationString + " cannot be accessed in " +
+ location + explanation
+ }
+ NormalTypeError(tree, errMsg, ErrorKinds.Access)
+ }
+
+ def NoMethodInstanceError(fn: Tree, args: List[Tree], msg: String) =
+ issueNormalTypeError(fn,
+ "no type parameters for " +
+ applyErrorMsg(fn, " exist so that it can be applied to arguments ", args map (_.tpe.widen), WildcardType) +
+ "\n --- because ---\n" + msg)
+
+ // TODO: no test case
+ def NoConstructorInstanceError(tree: Tree, restpe: Type, pt: Type, msg: String) = {
+ issueNormalTypeError(tree,
+ "constructor of type " + restpe +
+ " cannot be uniquely instantiated to expected type " + pt +
+ "\n --- because ---\n" + msg)
+ setError(tree)
+ }
+
+ def ConstrInstantiationError(tree: Tree, restpe: Type, pt: Type) = {
+ issueNormalTypeError(tree,
+ "constructor cannot be instantiated to expected type" + foundReqMsg(restpe, pt))
+ setError(tree)
+ }
+
+ // side-effect on the tree, break the overloaded type cycle in infer
+ private def setErrorOnLastTry(lastTry: Boolean, tree: Tree) = if (lastTry) setError(tree)
+
+ def NoBestMethodAlternativeError(tree: Tree, argtpes: List[Type], pt: Type, lastTry: Boolean) = {
+ issueNormalTypeError(tree,
+ applyErrorMsg(tree, " cannot be applied to ", argtpes, pt))
+ // since inferMethodAlternative modifies the state of the tree
+ // we have to set the type of tree to ErrorType only in the very last
+ // fallback action that is done in the inference.
+ // This avoids entering infinite loop in doTypeApply.
+ setErrorOnLastTry(lastTry, tree)
+ }
+
+ def AmbiguousMethodAlternativeError(tree: Tree, pre: Type, best: Symbol,
+ firstCompeting: Symbol, argtpes: List[Type], pt: Type, lastTry: Boolean) = {
+
+ if (!(argtpes exists (_.isErroneous)) && !pt.isErroneous) {
+ val msg0 =
+ "argument types " + argtpes.mkString("(", ",", ")") +
+ (if (pt == WildcardType) "" else " and expected result type " + pt)
+ val (pos, msg) = ambiguousErrorMsgPos(tree.pos, pre, best, firstCompeting, msg0)
+ issueAmbiguousTypeError(pre, best, firstCompeting, AmbiguousTypeError(tree, pos, msg))
+ setErrorOnLastTry(lastTry, tree)
+ } else setError(tree) // do not even try further attempts because they should all fail
+ // even if this is not the last attempt (because of the SO's possibility on the horizon)
+
+ }
+
+ def NoBestExprAlternativeError(tree: Tree, pt: Type, lastTry: Boolean) = {
+ issueNormalTypeError(tree, withAddendum(tree.pos)(typeErrorMsg(tree.symbol.tpe, pt, isPossiblyMissingArgs(tree.symbol.tpe, pt))))
+ setErrorOnLastTry(lastTry, tree)
+ }
+
+ def AmbiguousExprAlternativeError(tree: Tree, pre: Type, best: Symbol, firstCompeting: Symbol, pt: Type, lastTry: Boolean) = {
+ val (pos, msg) = ambiguousErrorMsgPos(tree.pos, pre, best, firstCompeting, "expected type " + pt)
+ issueAmbiguousTypeError(pre, best, firstCompeting, AmbiguousTypeError(tree, pos, msg))
+ setErrorOnLastTry(lastTry, tree)
+ }
+
+ // checkBounds
+ def KindBoundErrors(tree: Tree, prefix: String, targs: List[Type],
+ tparams: List[Symbol], kindErrors: List[String]) = {
+ issueNormalTypeError(tree,
+ prefix + "kinds of the type arguments " + targs.mkString("(", ",", ")") +
+ " do not conform to the expected kinds of the type parameters "+
+ tparams.mkString("(", ",", ")") + tparams.head.locationString+ "." +
+ kindErrors.toList.mkString("\n", ", ", ""))
+ }
+
+ private[ContextErrors] def NotWithinBoundsErrorMessage(prefix: String, targs: List[Type], tparams: List[Symbol], explaintypes: Boolean) = {
+ if (explaintypes) {
+ val bounds = tparams map (tp => tp.info.instantiateTypeParams(tparams, targs).bounds)
+ (targs, bounds).zipped foreach ((targ, bound) => explainTypes(bound.lo, targ))
+ (targs, bounds).zipped foreach ((targ, bound) => explainTypes(targ, bound.hi))
+ ()
+ }
+
+ prefix + "type arguments " + targs.mkString("[", ",", "]") +
+ " do not conform to " + tparams.head.owner + "'s type parameter bounds " +
+ (tparams map (_.defString)).mkString("[", ",", "]")
+ }
+
+ def NotWithinBounds(tree: Tree, prefix: String, targs: List[Type],
+ tparams: List[Symbol], kindErrors: List[String]) =
+ issueNormalTypeError(tree,
+ NotWithinBoundsErrorMessage(prefix, targs, tparams, settings.explaintypes.value))
+
+ //substExpr
+ def PolymorphicExpressionInstantiationError(tree: Tree, undetparams: List[Symbol], pt: Type) =
+ issueNormalTypeError(tree,
+ "polymorphic expression cannot be instantiated to expected type" +
+ foundReqMsg(GenPolyType(undetparams, skipImplicit(tree.tpe)), pt))
+
+ //checkCheckable
+ def TypePatternOrIsInstanceTestError(tree: Tree, tp: Type) =
+ issueNormalTypeError(tree, "type "+tp+" cannot be used in a type pattern or isInstanceOf test")
+
+ def PatternTypeIncompatibleWithPtError1(tree: Tree, pattp: Type, pt: Type) =
+ issueNormalTypeError(tree, "pattern type is incompatible with expected type" + foundReqMsg(pattp, pt))
+
+ def IncompatibleScrutineeTypeError(tree: Tree, pattp: Type, pt: Type) =
+ issueNormalTypeError(tree, "scrutinee is incompatible with pattern type" + foundReqMsg(pattp, pt))
+
+ def PatternTypeIncompatibleWithPtError2(pat: Tree, pt1: Type, pt: Type) =
+ issueNormalTypeError(pat,
+ "pattern type is incompatible with expected type"+ foundReqMsg(pat.tpe, pt) +
+ typePatternAdvice(pat.tpe.typeSymbol, pt1.typeSymbol))
+
+ def PolyAlternativeError(tree: Tree, argtypes: List[Type], sym: Symbol, err: PolyAlternativeErrorKind.ErrorType) = {
+ import PolyAlternativeErrorKind._
+ val msg =
+ err match {
+ case WrongNumber =>
+ "wrong number of type parameters for " + treeSymTypeMsg(tree)
+ case NoParams =>
+ treeSymTypeMsg(tree) + " does not take type parameters"
+ case ArgsDoNotConform =>
+ "type arguments " + argtypes.mkString("[", ",", "]") +
+ " conform to the bounds of none of the overloaded alternatives of\n "+sym+
+ ": "+sym.info
+ }
+ issueNormalTypeError(tree, msg)
+ ()
+ }
+ }
+ }
+
+ trait NamerContextErrors {
+ self: Namer =>
+
+ object NamerErrorGen {
+
+ implicit val contextNamerErrorGen = context
+
+ object SymValidateErrors extends Enumeration {
+ val ImplicitConstr, ImplicitNotTermOrClass, ImplicitAtToplevel,
+ OverrideClass, SealedNonClass, AbstractNonClass,
+ OverrideConstr, AbstractOverride, AbstractOverrideOnTypeMember, LazyAndEarlyInit,
+ ByNameParameter, AbstractVar = Value
+ }
+
+ object DuplicatesErrorKinds extends Enumeration {
+ val RenamedTwice, AppearsTwice = Value
+ }
+
+ import SymValidateErrors._
+ import DuplicatesErrorKinds._
+ import symtab.Flags
+
+ def TypeSigError(tree: Tree, ex: TypeError) = {
+ ex match {
+ case CyclicReference(_, _) if tree.symbol.isTermMacro =>
+ // say, we have a macro def `foo` and its macro impl `impl`
+ // if impl: 1) omits return type, 2) has anything implicit in its body, 3) sees foo
+ //
+ // then implicit search will trigger an error
+ // (note that this is not a compilation error, it's an artifact of implicit search algorithm)
+ // normally, such "errors" are discarded by `isCyclicOrErroneous` in Implicits.scala
+ // but in our case this won't work, because isCyclicOrErroneous catches CyclicReference exceptions
+ // while our error will present itself as a "recursive method needs a return type"
+ //
+ // hence we (together with reportTypeError in TypeDiagnostics) make sure that this CyclicReference
+ // evades all the handlers on its way and successfully reaches `isCyclicOrErroneous` in Implicits
+ throw ex
+ case CyclicReference(sym, info: TypeCompleter) =>
+ issueNormalTypeError(tree, typer.cyclicReferenceMessage(sym, info.tree) getOrElse ex.getMessage())
+ case _ =>
+ contextNamerErrorGen.issue(TypeErrorWithUnderlyingTree(tree, ex))
+ }
+ }
+
+ def GetterDefinedTwiceError(getter: Symbol) =
+ issueSymbolTypeError(getter, getter+" is defined twice")
+
+ def ValOrValWithSetterSuffixError(tree: Tree) =
+ issueNormalTypeError(tree, "Names of vals or vars may not end in `_='")
+
+ def PrivateThisCaseClassParameterError(tree: Tree) =
+ issueNormalTypeError(tree, "private[this] not allowed for case class parameters")
+
+ def BeanPropertyAnnotationLimitationError(tree: Tree) =
+ issueNormalTypeError(tree, "implementation limitation: the BeanProperty annotation cannot be used in a type alias or renamed import")
+
+ def BeanPropertyAnnotationFieldWithoutLetterError(tree: Tree) =
+ issueNormalTypeError(tree, "`BeanProperty' annotation can be applied only to fields that start with a letter")
+
+ def BeanPropertyAnnotationPrivateFieldError(tree: Tree) =
+ issueNormalTypeError(tree, "`BeanProperty' annotation can be applied only to non-private fields")
+
+ def DoubleDefError(currentSym: Symbol, prevSym: Symbol) = {
+ val s1 = if (prevSym.isModule) "case class companion " else ""
+ val s2 = if (prevSym.isSynthetic) "(compiler-generated) " + s1 else ""
+ val s3 = if (prevSym.isCase) "case class " + prevSym.name else "" + prevSym
+ val where = if (currentSym.owner.isPackageClass != prevSym.owner.isPackageClass) {
+ val inOrOut = if (prevSym.owner.isPackageClass) "outside of" else "in"
+ " %s package object %s".format(inOrOut, ""+prevSym.effectiveOwner.name)
+ } else ""
+
+ issueSymbolTypeError(currentSym, prevSym.name + " is already defined as " + s2 + s3 + where)
+ }
+
+ def MaxParametersCaseClassError(tree: Tree) =
+ issueNormalTypeError(tree, "Implementation restriction: case classes cannot have more than " + definitions.MaxFunctionArity + " parameters.")
+
+ def InheritsItselfError(tree: Tree) =
+ issueNormalTypeError(tree, tree.tpe.typeSymbol+" inherits itself")
+
+ def MissingParameterOrValTypeError(vparam: Tree) =
+ issueNormalTypeError(vparam, "missing parameter type")
+
+ def RootImportError(tree: Tree) =
+ issueNormalTypeError(tree, "_root_ cannot be imported")
+
+ def SymbolValidationError(sym: Symbol, errKind: SymValidateErrors.Value) {
+ val msg = errKind match {
+ case ImplicitConstr =>
+ "`implicit' modifier not allowed for constructors"
+
+ case ImplicitNotTermOrClass =>
+ "`implicit' modifier can be used only for values, variables, methods and classes"
+
+ case ImplicitAtToplevel =>
+ "`implicit' modifier cannot be used for top-level objects"
+
+ case OverrideClass =>
+ "`override' modifier not allowed for classes"
+
+ case SealedNonClass =>
+ "`sealed' modifier can be used only for classes"
+
+ case AbstractNonClass =>
+ "`abstract' modifier can be used only for classes; it should be omitted for abstract members"
+
+ case OverrideConstr =>
+ "`override' modifier not allowed for constructors"
+
+ case AbstractOverride =>
+ "`abstract override' modifier only allowed for members of traits"
+
+ case AbstractOverrideOnTypeMember =>
+ "`abstract override' modifier not allowed for type members"
+
+ case LazyAndEarlyInit =>
+ "`lazy' definitions may not be initialized early"
+
+ case ByNameParameter =>
+ "pass-by-name arguments not allowed for case class parameters"
+
+ case AbstractVar =>
+ "only classes can have declared but undefined members" + abstractVarMessage(sym)
+
+ }
+ issueSymbolTypeError(sym, msg)
+ }
+
+
+ def AbstractMemberWithModiferError(sym: Symbol, flag: Int) =
+ issueSymbolTypeError(sym, "abstract member may not have " + Flags.flagsToString(flag) + " modifier")
+
+ def IllegalModifierCombination(sym: Symbol, flag1: Int, flag2: Int) =
+ issueSymbolTypeError(sym, "illegal combination of modifiers: %s and %s for: %s".format(
+ Flags.flagsToString(flag1), Flags.flagsToString(flag2), sym))
+
+ def IllegalDependentMethTpeError(sym: Symbol)(context: Context) = {
+ val errorAddendum =
+ ": parameter appears in the type of another parameter in the same section or an earlier one"
+ issueSymbolTypeError(sym, "illegal dependent method type" + errorAddendum)(context)
+ }
+
+ def DuplicatesError(tree: Tree, name: Name, kind: DuplicatesErrorKinds.Value) = {
+ val msg = kind match {
+ case RenamedTwice =>
+ "is renamed twice"
+ case AppearsTwice =>
+ "appears twice as a target of a renaming"
+ }
+
+ issueNormalTypeError(tree, name.decode + " " + msg)
+ }
+ }
+ }
+
+ trait ImplicitsContextErrors {
+ self: ImplicitSearch =>
+
+ import definitions._
+
+ def AmbiguousImplicitError(info1: ImplicitInfo, info2: ImplicitInfo,
+ pre1: String, pre2: String, trailer: String)
+ (isView: Boolean, pt: Type, tree: Tree)(implicit context0: Context) = {
+ if (!info1.tpe.isErroneous && !info2.tpe.isErroneous) {
+ def coreMsg =
+ sm"""| $pre1 ${info1.sym.fullLocationString} of type ${info1.tpe}
+ | $pre2 ${info2.sym.fullLocationString} of type ${info2.tpe}
+ | $trailer"""
+ def viewMsg = {
+ val found :: req :: _ = pt.typeArgs
+ def explanation = {
+ val sym = found.typeSymbol
+ // Explain some common situations a bit more clearly. Some other
+ // failures which have nothing to do with implicit conversions
+ // per se, but which manifest as implicit conversion conflicts
+ // involving Any, are further explained from foundReqMsg.
+ if (AnyRefClass.tpe <:< req) (
+ if (sym == AnyClass || sym == UnitClass) (
+ sm"""|Note: ${sym.name} is not implicitly converted to AnyRef. You can safely
+ |pattern match `x: AnyRef` or cast `x.asInstanceOf[AnyRef]` to do so."""
+ )
+ else boxedClass get sym map (boxed =>
+ sm"""|Note: an implicit exists from ${sym.fullName} => ${boxed.fullName}, but
+ |methods inherited from Object are rendered ambiguous. This is to avoid
+ |a blanket implicit which would convert any ${sym.fullName} to any AnyRef.
+ |You may wish to use a type ascription: `x: ${boxed.fullName}`."""
+ ) getOrElse ""
+ )
+ else
+ sm"""|Note that implicit conversions are not applicable because they are ambiguous:
+ |${coreMsg}are possible conversion functions from $found to $req"""
+ }
+ typeErrorMsg(found, req, infer.isPossiblyMissingArgs(found, req)) + (
+ if (explanation == "") "" else "\n" + explanation
+ )
+ }
+ context.issueAmbiguousError(AmbiguousTypeError(tree, tree.pos,
+ if (isView) viewMsg
+ else s"ambiguous implicit values:\n${coreMsg}match expected type $pt")
+ )
+ }
+ }
+
+ def DivergingImplicitExpansionError(tree: Tree, pt: Type, sym: Symbol)(implicit context0: Context) =
+ if (settings.Xdivergence211.value) {
+ issueTypeError(DivergentImplicitTypeError(tree, pt, sym))
+ } else {
+ issueDivergentImplicitsError(tree,
+ "diverging implicit expansion for type "+pt+"\nstarting with "+
+ sym.fullLocationString)
+ }
+ }
+
+ object NamesDefaultsErrorsGen {
+ import typer.infer.setError
+
+ def NameClashError(sym: Symbol, arg: Tree)(implicit context: Context) = {
+ setError(arg) // to distinguish it from ambiguous reference error
+
+ def errMsg =
+ "%s definition needs %s because '%s' is used as a named argument in its body.".format(
+ "variable", // "method"
+ "type", // "result type"
+ sym.name)
+ issueSymbolTypeError(sym, errMsg)
+ }
+
+ def AmbiguousReferenceInNamesDefaultError(arg: Tree, name: Name)(implicit context: Context) = {
+ if (!arg.isErroneous) { // check if name clash wasn't reported already
+ issueNormalTypeError(arg,
+ "reference to "+ name +" is ambiguous; it is both a method parameter "+
+ "and a variable in scope.")
+ setError(arg)
+ } else arg
+ }
+
+ def WarnAfterNonSilentRecursiveInference(param: Symbol, arg: Tree)(implicit context: Context) = {
+ val note = "type-checking the invocation of "+ param.owner +" checks if the named argument expression '"+ param.name + " = ...' is a valid assignment\n"+
+ "in the current scope. The resulting type inference error (see above) can be fixed by providing an explicit type in the local definition for "+ param.name +"."
+ context.warning(arg.pos, note)
+ }
+
+ def UnknownParameterNameNamesDefaultError(arg: Tree, name: Name)(implicit context: Context) = {
+ issueNormalTypeError(arg, "unknown parameter name: " + name)
+ setError(arg)
+ }
+
+ def DoubleParamNamesDefaultError(arg: Tree, name: Name, pos: Int, otherName: Option[Name])(implicit context: Context) = {
+ val annex = otherName match {
+ case Some(oName) => "\nNote that that '"+ oName +"' is not a parameter name of the invoked method."
+ case None => ""
+ }
+ issueNormalTypeError(arg, "parameter '"+ name +"' is already specified at parameter position "+ pos + annex)
+ setError(arg)
+ }
+
+ def PositionalAfterNamedNamesDefaultError(arg: Tree)(implicit context: Context) = {
+ issueNormalTypeError(arg, "positional after named argument.")
+ setError(arg)
+ }
+ }
+
+ // using an exception here is actually a good idea
+ // because the lifespan of this exception is extremely small and controlled
+ // moreover exceptions let us avoid an avalanche of "if (!hasError) do stuff" checks
+ case object MacroBodyTypecheckException extends Exception with scala.util.control.ControlThrowable
+
+ trait MacroErrors {
+ self: MacroTyper =>
+
+ private implicit val context0 = typer.context
+ val context = typer.context
+
+ // helpers
+
+ private def lengthMsg(flavor: String, violation: String, extra: Symbol) = {
+ val noun = if (flavor == "value") "parameter" else "type parameter"
+ val message = noun + " lists have different length, " + violation + " extra " + noun
+ val suffix = if (extra ne NoSymbol) " " + extra.defString else ""
+ message + suffix
+ }
+
+ private def abbreviateCoreAliases(s: String): String = List("WeakTypeTag", "Expr").foldLeft(s)((res, x) => res.replace("c.universe." + x, "c." + x))
+
+ private def showMeth(pss: List[List[Symbol]], restpe: Type, abbreviate: Boolean) = {
+ var argsPart = (pss map (ps => ps map (_.defString) mkString ("(", ", ", ")"))).mkString
+ if (abbreviate) argsPart = abbreviateCoreAliases(argsPart)
+ var retPart = restpe.toString
+ if (abbreviate || macroDdef.tpt.tpe == null) retPart = abbreviateCoreAliases(retPart)
+ argsPart + ": " + retPart
+ }
+
+ // not exactly an error generator, but very related
+ // and I dearly wanted to push it away from Macros.scala
+ private def checkSubType(slot: String, rtpe: Type, atpe: Type) = {
+ val ok = if (macroDebugVerbose || settings.explaintypes.value) {
+ if (rtpe eq atpe) println(rtpe + " <: " + atpe + "?" + EOL + "true")
+ withTypesExplained(rtpe <:< atpe)
+ } else rtpe <:< atpe
+ if (!ok) {
+ compatibilityError("type mismatch for %s: %s does not conform to %s".format(slot, abbreviateCoreAliases(rtpe.toString), abbreviateCoreAliases(atpe.toString)))
+ }
+ }
+
+ // errors
+
+ private def fail() = {
+ // need to set the IS_ERROR flag to prohibit spurious expansions
+ if (macroDef != null) macroDef setFlag IS_ERROR
+ // not setting ErrorSymbol as in `infer.setError`, because we still need to know that it's a macro
+ // otherwise assignTypeToTree in Namers might fail if macroDdef.tpt == EmptyTree
+ macroDdef setType ErrorType
+ throw MacroBodyTypecheckException
+ }
+
+ private def genericError(tree: Tree, message: String) = {
+ issueNormalTypeError(tree, message)
+ fail()
+ }
+
+ private def implRefError(message: String) = {
+ val treeInfo.Applied(implRef, _, _) = macroDdef.rhs
+ genericError(implRef, message)
+ }
+
+ private def compatibilityError(message: String) =
+ implRefError(
+ "macro implementation has incompatible shape:"+
+ "\n required: " + showMeth(rparamss, rret, abbreviate = true) +
+ "\n found : " + showMeth(aparamss, aret, abbreviate = false) +
+ "\n" + message)
+
+ // Phase I: sanity checks
+
+ def MacroDefIsFastTrack() = {
+ macroLogVerbose("typecheck terminated unexpectedly: macro is fast track")
+ assert(!macroDdef.tpt.isEmpty, "fast track macros must provide result type")
+ throw MacroBodyTypecheckException // don't call fail, because we don't need IS_ERROR
+ }
+
+ def MacroDefIsQmarkQmarkQmark() = {
+ macroLogVerbose("typecheck terminated unexpectedly: macro is ???")
+ throw MacroBodyTypecheckException
+ }
+
+ def MacroFeatureNotEnabled() = {
+ macroLogVerbose("typecheck terminated unexpectedly: language.experimental.macros feature is not enabled")
+ fail()
+ }
+
+ // Phase II: typecheck the right-hand side of the macro def
+
+ // do nothing, just fail. relevant typecheck errors have already been reported
+ def MacroDefUntypeableBodyError() = fail()
+
+ def MacroDefInvalidBodyError() = genericError(macroDdef, "macro body has wrong shape:\n required: macro [<implementation object>].<method name>[[<type args>]]")
+
+ def MacroImplNotPublicError() = implRefError("macro implementation must be public")
+
+ def MacroImplOverloadedError() = implRefError("macro implementation cannot be overloaded")
+
+ def MacroImplWrongNumberOfTypeArgumentsError(macroImplRef: Tree) = {
+ val MacroImplReference(owner, meth, targs) = macroImplRef
+ val diagnostic = if (meth.typeParams.length > targs.length) "has too few type arguments" else "has too many arguments"
+ implRefError(s"macro implementation reference $diagnostic for " + treeSymTypeMsg(macroImplRef))
+ }
+
+ def MacroImplNotStaticError() = implRefError("macro implementation must be in statically accessible object")
+
+ // Phase III: check compatibility between the macro def and its macro impl
+ // aXXX (e.g. aparams) => characteristics of the macro impl ("a" stands for "actual")
+ // rXXX (e.g. rparams) => characteristics of a reference macro impl signature synthesized from the macro def ("r" stands for "reference")
+
+ def MacroImplNonTagImplicitParameters(params: List[Symbol]) = implRefError("macro implementations cannot have implicit parameters other than WeakTypeTag evidences")
+
+ def MacroImplParamssMismatchError() = compatibilityError("number of parameter sections differ")
+
+ def MacroImplExtraParamsError(aparams: List[Symbol], rparams: List[Symbol]) = compatibilityError(lengthMsg("value", "found", aparams(rparams.length)))
+
+ def MacroImplMissingParamsError(aparams: List[Symbol], rparams: List[Symbol]) = compatibilityError(abbreviateCoreAliases(lengthMsg("value", "required", rparams(aparams.length))))
+
+ def checkMacroImplParamTypeMismatch(atpe: Type, rparam: Symbol) = checkSubType("parameter " + rparam.name, rparam.tpe, atpe)
+
+ def checkMacroImplResultTypeMismatch(atpe: Type, rret: Type) = checkSubType("return type", atpe, rret)
+
+ def MacroImplParamNameMismatchError(aparam: Symbol, rparam: Symbol) = compatibilityError("parameter names differ: " + rparam.name + " != " + aparam.name)
+
+ def MacroImplVarargMismatchError(aparam: Symbol, rparam: Symbol) = {
+ if (isRepeated(rparam) && !isRepeated(aparam))
+ compatibilityError("types incompatible for parameter " + rparam.name + ": corresponding is not a vararg parameter")
+ if (!isRepeated(rparam) && isRepeated(aparam))
+ compatibilityError("types incompatible for parameter " + aparam.name + ": corresponding is not a vararg parameter")
+ }
+
+ def MacroImplTargMismatchError(atargs: List[Type], atparams: List[Symbol]) =
+ compatibilityError(typer.infer.InferErrorGen.NotWithinBoundsErrorMessage("", atargs, atparams, macroDebugVerbose || settings.explaintypes.value))
+
+ def MacroImplTparamInstantiationError(atparams: List[Symbol], ex: NoInstance) =
+ compatibilityError(
+ "type parameters "+(atparams map (_.defString) mkString ", ")+" cannot be instantiated\n"+
+ ex.getMessage)
+ }
+}
diff --git a/src/compiler/scala/tools/nsc/typechecker/Contexts.scala b/src/compiler/scala/tools/nsc/typechecker/Contexts.scala
index 7910d30..3fe98ed 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Contexts.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Contexts.scala
@@ -1,5 +1,5 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
+ * Copyright 2005-2013 LAMP/EPFL
* @author Martin Odersky
*/
@@ -7,76 +7,73 @@ package scala.tools.nsc
package typechecker
import symtab.Flags._
-import scala.collection.mutable.ListBuffer
-import annotation.tailrec
+import scala.collection.mutable.{LinkedHashSet, Set}
+import scala.annotation.tailrec
-/** This trait ...
- *
+/**
* @author Martin Odersky
* @version 1.0
*/
trait Contexts { self: Analyzer =>
import global._
- val NoContext = new Context {
- override def implicitss: List[List[ImplicitInfo]] = List()
- outer = this
+ object NoContext extends Context {
+ outer = this
+ enclClass = this
+ enclMethod = this
+
+ override def nextEnclosing(p: Context => Boolean): Context = this
+ override def enclosingContextChain: List[Context] = Nil
+ override def implicitss: List[List[ImplicitInfo]] = Nil
+ override def toString = "NoContext"
+ }
+ private object RootImports {
+ import definitions._
+ // Possible lists of root imports
+ val javaList = JavaLangPackage :: Nil
+ val javaAndScalaList = JavaLangPackage :: ScalaPackage :: Nil
+ val completeList = JavaLangPackage :: ScalaPackage :: PredefModule :: Nil
}
- NoContext.enclClass = NoContext
- NoContext.enclMethod = NoContext
- private val startContext = {
+ private lazy val startContext = {
NoContext.make(
- global.Template(List(), emptyValDef, List()) setSymbol global.NoSymbol setType global.NoType,
- global.definitions.RootClass,
- global.definitions.RootClass.info.decls)
+ Template(List(), emptyValDef, List()) setSymbol global.NoSymbol setType global.NoType,
+ rootMirror.RootClass,
+ rootMirror.RootClass.info.decls)
}
var lastAccessCheckDetails: String = ""
- /** List of objects and packages to import from in
- * a root context. This list is sensitive to the
- * compiler settings.
+ /** List of symbols to import from in a root context. Typically that
+ * is `java.lang`, `scala`, and [[scala.Predef]], in that order. Exceptions:
+ *
+ * - if option `-Yno-imports` is given, nothing is imported
+ * - if the unit is java defined, only `java.lang` is imported
+ * - if option `-Yno-predef` is given, if the unit body has an import of Predef
+ * among its leading imports, or if the tree is [[scala.Predef]], `Predef` is not imported.
*/
- protected def rootImports(unit: CompilationUnit, tree: Tree): List[Symbol] = {
- import definitions._
- val imps = new ListBuffer[Symbol]
- if (!settings.noimports.value) {
- assert(isDefinitionsInitialized)
- imps += JavaLangPackage
- if (!unit.isJava) {
- assert(ScalaPackage ne null, "Scala package is null")
- imps += ScalaPackage
- if (!(treeInfo.isUnitInScala(unit.body, nme.Predef) ||
- treeInfo.isUnitInScala(unit.body, tpnme.ScalaObject) ||
- treeInfo.containsLeadingPredefImport(List(unit.body))))
- imps += PredefModule
- }
- }
- imps.toList
- }
+ protected def rootImports(unit: CompilationUnit): List[Symbol] = {
+ assert(definitions.isDefinitionsInitialized, "definitions uninitialized")
- def rootContext(unit: CompilationUnit): Context =
- rootContext(unit, EmptyTree, false)
+ if (settings.noimports.value) Nil
+ else if (unit.isJava) RootImports.javaList
+ else if (settings.nopredef.value || treeInfo.noPredefImportForUnit(unit.body)) RootImports.javaAndScalaList
+ else RootImports.completeList
+ }
+ def rootContext(unit: CompilationUnit): Context = rootContext(unit, EmptyTree, false)
+ def rootContext(unit: CompilationUnit, tree: Tree): Context = rootContext(unit, tree, false)
def rootContext(unit: CompilationUnit, tree: Tree, erasedTypes: Boolean): Context = {
import definitions._
var sc = startContext
- def addImport(pkg: Symbol) {
- assert(pkg ne null)
- val qual = gen.mkAttributedStableRef(pkg)
- sc = sc.makeNewImport(
- Import(qual, List(ImportSelector(nme.WILDCARD, -1, null, -1)))
- .setSymbol(NoSymbol.newImport(NoPosition).setFlag(SYNTHETIC).setInfo(ImportType(qual)))
- .setType(NoType))
+ for (sym <- rootImports(unit)) {
+ sc = sc.makeNewImport(sym)
sc.depth += 1
}
- for (imp <- rootImports(unit, tree))
- addImport(imp)
val c = sc.make(unit, tree, sc.owner, sc.scope, sc.imports)
- c.reportAmbiguousErrors = !erasedTypes
- c.reportGeneralErrors = !erasedTypes
+ if (erasedTypes) c.setThrowErrors() else c.setReportErrors()
c.implicitsEnabled = !erasedTypes
+ c.enrichmentEnabled = c.implicitsEnabled
c
}
@@ -87,25 +84,43 @@ trait Contexts { self: Analyzer =>
case Import(qual, _) => qual.tpe = singleType(qual.symbol.owner.thisType, qual.symbol)
case _ =>
}
+ sc.flushAndReturnBuffer()
+ sc.flushAndReturnWarningsBuffer()
sc = sc.outer
}
}
+ private object Errors {
+ final val ReportErrors = 1 << 0
+ final val BufferErrors = 1 << 1
+ final val AmbiguousErrors = 1 << 2
+ final val notThrowMask = ReportErrors | BufferErrors
+ final val AllMask = ReportErrors | BufferErrors | AmbiguousErrors
+ }
+
class Context private[typechecker] {
- var unit: CompilationUnit = _
+ import Errors._
+
+ var unit: CompilationUnit = NoCompilationUnit
var tree: Tree = _ // Tree associated with this context
var owner: Symbol = NoSymbol // The current owner
var scope: Scope = _ // The current scope
var outer: Context = _ // The next outer context
var enclClass: Context = _ // The next outer context whose tree is a
// template or package definition
+ @inline final def savingEnclClass[A](c: Context)(a: => A): A = {
+ val saved = enclClass
+ enclClass = c
+ try a finally enclClass = saved
+ }
+
var enclMethod: Context = _ // The next outer context whose tree is a method
var variance: Int = _ // Variance relative to enclosing class
private var _undetparams: List[Symbol] = List() // Undetermined type parameters,
// not inherited to child contexts
var depth: Int = 0
var imports: List[ImportInfo] = List() // currently visible imports
- var openImplicits: List[(Type,Symbol)] = List() // types for which implicit arguments
+ var openImplicits: List[OpenImplicit] = List() // types for which implicit arguments
// are currently searched
// for a named application block (Tree) the corresponding NamedApplyInfo
var namedApplyBlockInfo: Option[(Tree, NamedApplyInfo)] = None
@@ -117,10 +132,10 @@ trait Contexts { self: Analyzer =>
// (the call to the super or self constructor in the first line of a constructor)
// in this context the object's fields should not be in scope
- var reportAmbiguousErrors = false
- var reportGeneralErrors = false
var diagnostic: List[String] = Nil // these messages are printed when issuing an error
var implicitsEnabled = false
+ var macrosEnabled = true
+ var enrichmentEnabled = false // to selectively allow enrichment in patterns, where other kinds of implicit conversions are not allowed
var checking = false
var retyping = false
@@ -130,12 +145,18 @@ trait Contexts { self: Analyzer =>
var typingIndentLevel: Int = 0
def typingIndent = " " * typingIndentLevel
- def undetparamsString = if (undetparams.isEmpty) "" else undetparams.mkString("undetparams=", ", ", "")
+ var buffer: Set[AbsTypeError] = _
+ var warningsBuffer: Set[(Position, String)] = _
+
+ def enclClassOrMethod: Context =
+ if ((owner eq NoSymbol) || (owner.isClass) || (owner.isMethod)) this
+ else outer.enclClassOrMethod
+
+ def undetparamsString =
+ if (undetparams.isEmpty) ""
+ else undetparams.mkString("undetparams=", ", ", "")
def undetparams = _undetparams
- def undetparams_=(ps: List[Symbol]) = {
- //System.out.println("undetparams = " + ps);//debug
- _undetparams = ps
- }
+ def undetparams_=(ps: List[Symbol]) = { _undetparams = ps }
def extractUndetparams() = {
val tparams = undetparams
@@ -143,36 +164,99 @@ trait Contexts { self: Analyzer =>
tparams
}
- def withoutReportingErrors[T](op: => T): T = {
- val saved = reportGeneralErrors
- reportGeneralErrors = false
+ private[this] var mode = 0
+
+ def errBuffer = buffer
+ def hasErrors = buffer.nonEmpty
+ def hasWarnings = warningsBuffer.nonEmpty
+
+ def state: Int = mode
+ def restoreState(state0: Int) = mode = state0
+
+ def reportErrors = (state & ReportErrors) != 0
+ def bufferErrors = (state & BufferErrors) != 0
+ def ambiguousErrors = (state & AmbiguousErrors) != 0
+ def throwErrors = (state & notThrowMask) == 0
+
+ def setReportErrors() = mode = (ReportErrors | AmbiguousErrors)
+ def setBufferErrors() = {
+ //assert(bufferErrors || !hasErrors, "When entering the buffer state, context has to be clean. Current buffer: " + buffer)
+ mode = BufferErrors
+ }
+ def setThrowErrors() = mode &= (~AllMask)
+ def setAmbiguousErrors(report: Boolean) = if (report) mode |= AmbiguousErrors else mode &= notThrowMask
+
+ def updateBuffer(errors: Set[AbsTypeError]) = buffer ++= errors
+ def condBufferFlush(removeP: AbsTypeError => Boolean) {
+ val elems = buffer.filter(removeP)
+ buffer --= elems
+ }
+ def flushBuffer() { buffer.clear() }
+ def flushAndReturnBuffer(): Set[AbsTypeError] = {
+ val current = buffer.clone()
+ buffer.clear()
+ current
+ }
+ def flushAndReturnWarningsBuffer(): Set[(Position, String)] = {
+ val current = warningsBuffer.clone()
+ warningsBuffer.clear()
+ current
+ }
+
+ def logError(err: AbsTypeError) = buffer += err
+
+ def withImplicitsEnabled[T](op: => T): T = {
+ val saved = implicitsEnabled
+ implicitsEnabled = true
try op
- finally reportGeneralErrors = saved
+ finally implicitsEnabled = saved
}
def withImplicitsDisabled[T](op: => T): T = {
val saved = implicitsEnabled
implicitsEnabled = false
+ val savedP = enrichmentEnabled
+ enrichmentEnabled = false
try op
- finally implicitsEnabled = saved
+ finally {
+ implicitsEnabled = saved
+ enrichmentEnabled = savedP
+ }
+ }
+
+ def withImplicitsDisabledAllowEnrichment[T](op: => T): T = {
+ val saved = implicitsEnabled
+ implicitsEnabled = false
+ val savedP = enrichmentEnabled
+ enrichmentEnabled = true
+ try op
+ finally {
+ implicitsEnabled = saved
+ enrichmentEnabled = savedP
+ }
+ }
+
+ def withMacrosEnabled[T](op: => T): T = {
+ val saved = macrosEnabled
+ macrosEnabled = true
+ try op
+ finally macrosEnabled = saved
+ }
+
+ def withMacrosDisabled[T](op: => T): T = {
+ val saved = macrosEnabled
+ macrosEnabled = false
+ try op
+ finally macrosEnabled = saved
}
- /**
- * @param unit ...
- * @param tree ...
- * @param owner ...
- * @param scope ...
- * @param imports ...
- * @return ...
- */
def make(unit: CompilationUnit, tree: Tree, owner: Symbol,
scope: Scope, imports: List[ImportInfo]): Context = {
- val c = new Context
- c.unit = unit
- c.tree = tree
+ val c = new Context
+ c.unit = unit
+ c.tree = tree
c.owner = owner
c.scope = scope
-
c.outer = this
tree match {
@@ -197,39 +281,46 @@ trait Contexts { self: Analyzer =>
c.depth = if (scope == this.scope) this.depth else this.depth + 1
c.imports = imports
c.inSelfSuperCall = inSelfSuperCall
- c.reportAmbiguousErrors = this.reportAmbiguousErrors
- c.reportGeneralErrors = this.reportGeneralErrors
+ c.restoreState(this.state)
c.diagnostic = this.diagnostic
c.typingIndentLevel = typingIndentLevel
c.implicitsEnabled = this.implicitsEnabled
+ c.macrosEnabled = this.macrosEnabled
+ c.enrichmentEnabled = this.enrichmentEnabled
c.checking = this.checking
c.retyping = this.retyping
c.openImplicits = this.openImplicits
+ c.buffer = if (this.buffer == null) LinkedHashSet[AbsTypeError]() else this.buffer // need to initialize
+ c.warningsBuffer = if (this.warningsBuffer == null) LinkedHashSet[(Position, String)]() else this.warningsBuffer
registerContext(c.asInstanceOf[analyzer.Context])
+ debuglog("[context] ++ " + c.unit + " / " + tree.summaryString)
c
}
+ // TODO: remove? Doesn't seem to be used
def make(unit: CompilationUnit): Context = {
val c = make(unit, EmptyTree, owner, scope, imports)
- c.reportAmbiguousErrors = true
- c.reportGeneralErrors = true
+ c.setReportErrors()
c.implicitsEnabled = true
+ c.macrosEnabled = true
c
}
+ def makeNewImport(sym: Symbol): Context =
+ makeNewImport(gen.mkWildcardImport(sym))
+
def makeNewImport(imp: Import): Context =
make(unit, imp, owner, scope, new ImportInfo(imp, depth) :: imports)
- def make(tree: Tree, owner: Symbol, scope: Scope): Context = {
+ def make(tree: Tree, owner: Symbol, scope: Scope): Context =
if (tree == this.tree && owner == this.owner && scope == this.scope) this
else make0(tree, owner, scope)
- }
- private def make0(tree : Tree, owner : Symbol, scope : Scope) : Context = {
+
+ private def make0(tree: Tree, owner: Symbol, scope: Scope): Context =
make(unit, tree, owner, scope, imports)
- }
def makeNewScope(tree: Tree, owner: Symbol): Context =
- make(tree, owner, new Scope(scope))
+ make(tree, owner, newNestedScope(scope))
// IDE stuff: distinguish between scopes created for typing and scopes created for naming.
def make(tree: Tree, owner: Symbol): Context =
@@ -240,26 +331,36 @@ trait Contexts { self: Analyzer =>
def makeSilent(reportAmbiguousErrors: Boolean, newtree: Tree = tree): Context = {
val c = make(newtree)
- c.reportGeneralErrors = false
- c.reportAmbiguousErrors = reportAmbiguousErrors
+ c.setBufferErrors()
+ c.setAmbiguousErrors(reportAmbiguousErrors)
+ c.buffer = new LinkedHashSet[AbsTypeError]()
c
}
def makeImplicit(reportAmbiguousErrors: Boolean) = {
val c = makeSilent(reportAmbiguousErrors)
c.implicitsEnabled = false
+ c.enrichmentEnabled = false
c
}
+ /**
+ * A context for typing constructor parameter ValDefs, super or self invocation arguments and default getters
+ * of constructors. These expressions need to be type checked in a scope outside the class, cf. spec 5.3.1.
+ *
+ * This method is called by namer / typer where `this` is the context for the constructor DefDef. The
+ * owner of the resulting (new) context is the outer context for the Template, i.e. the context for the
+ * ClassDef. This means that class type parameters will be in scope. The value parameters of the current
+ * constructor are also entered into the new constructor scope. Members of the class however will not be
+ * accessible.
+ */
def makeConstructorContext = {
var baseContext = enclClass.outer
- //todo: find out why we need next line
while (baseContext.tree.isInstanceOf[Template])
baseContext = baseContext.outer
val argContext = baseContext.makeNewScope(tree, owner)
argContext.inSelfSuperCall = true
- argContext.reportGeneralErrors = this.reportGeneralErrors
- argContext.reportAmbiguousErrors = this.reportAmbiguousErrors
+ argContext.restoreState(this.state)
def enterElems(c: Context) {
def enterLocalElems(e: ScopeEntry) {
if (e != null && e.owner == c.scope) {
@@ -272,64 +373,62 @@ trait Contexts { self: Analyzer =>
enterLocalElems(c.scope.elems)
}
}
+ // Enter the scope elements of this (the scope for the constructor DefDef) into the new constructor scope.
+ // Concretely, this will enter the value parameters of constructor.
enterElems(this)
argContext
}
- private def diagString =
- if (diagnostic.isEmpty) ""
- else diagnostic.mkString("\n","\n", "")
-
private def addDiagString(msg: String) = {
- val ds = diagString
+ val ds =
+ if (diagnostic.isEmpty) ""
+ else diagnostic.mkString("\n","\n", "")
if (msg endsWith ds) msg else msg + ds
}
private def unitError(pos: Position, msg: String) =
unit.error(pos, if (checking) "\n**** ERROR DURING INTERNAL CHECKING ****\n" + msg else msg)
+ @inline private def issueCommon(err: AbsTypeError)(pf: PartialFunction[AbsTypeError, Unit]) {
+ debugwarn("issue error: " + err.errMsg)
+ if (settings.Yissuedebug.value) (new Exception).printStackTrace()
+ if (pf isDefinedAt err) pf(err)
+ else if (bufferErrors) { buffer += err }
+ else throw new TypeError(err.errPos, err.errMsg)
+ }
+
+ def issue(err: AbsTypeError) {
+ issueCommon(err) { case _ if reportErrors =>
+ unitError(err.errPos, addDiagString(err.errMsg))
+ }
+ }
+
+ def issueAmbiguousError(pre: Type, sym1: Symbol, sym2: Symbol, err: AbsTypeError) {
+ issueCommon(err) { case _ if ambiguousErrors =>
+ if (!pre.isErroneous && !sym1.isErroneous && !sym2.isErroneous)
+ unitError(err.errPos, err.errMsg)
+ }
+ }
+
+ def issueAmbiguousError(err: AbsTypeError) {
+ issueCommon(err) { case _ if ambiguousErrors => unitError(err.errPos, addDiagString(err.errMsg)) }
+ }
+
+ // TODO remove
def error(pos: Position, err: Throwable) =
- if (reportGeneralErrors) unitError(pos, addDiagString(err.getMessage()))
+ if (reportErrors) unitError(pos, addDiagString(err.getMessage()))
else throw err
def error(pos: Position, msg: String) = {
val msg1 = addDiagString(msg)
- if (reportGeneralErrors) unitError(pos, msg1)
+ if (reportErrors) unitError(pos, msg1)
else throw new TypeError(pos, msg1)
}
- def warning(pos: Position, msg: String) = {
- if (reportGeneralErrors) unit.warning(pos, msg)
- }
-
- /**
- * @param pos ...
- * @param pre ...
- * @param sym1 ...
- * @param sym2 ...
- * @param rest ...
- */
- def ambiguousError(pos: Position, pre: Type, sym1: Symbol, sym2: Symbol, rest: String) {
- val (reportPos, msg) = (
- if (sym1.hasDefaultFlag && sym2.hasDefaultFlag && sym1.enclClass == sym2.enclClass) {
- val methodName = nme.defaultGetterToMethod(sym1.name)
- (sym1.enclClass.pos,
- "in "+ sym1.enclClass +", multiple overloaded alternatives of " + methodName +
- " define default arguments")
- }
- else {
- (pos,
- ("ambiguous reference to overloaded definition,\n" +
- "both " + sym1 + sym1.locationString + " of type " + pre.memberType(sym1) +
- "\nand " + sym2 + sym2.locationString + " of type " + pre.memberType(sym2) +
- "\nmatch " + rest)
- )
- }
- )
- if (reportAmbiguousErrors) {
- if (!pre.isErroneous && !sym1.isErroneous && !sym2.isErroneous)
- unit.error(reportPos, msg)
- } else throw new TypeError(pos, msg)
+ def warning(pos: Position, msg: String): Unit = warning(pos, msg, false)
+ def warning(pos: Position, msg: String, force: Boolean) {
+ if (reportErrors || force) unit.warning(pos, msg)
+ else if (bufferErrors) warningsBuffer += ((pos, msg))
}
def isLocal(): Boolean = tree match {
@@ -339,6 +438,17 @@ trait Contexts { self: Analyzer =>
case _ => outer.isLocal()
}
+ /** Fast path for some slow checks (ambiguous assignment in Refchecks, and
+ * existence of __match for MatchTranslation in virtpatmat.) This logic probably
+ * needs improvement.
+ */
+ def isNameInScope(name: Name) = (
+ enclosingContextChain exists (ctx =>
+ (ctx.scope.lookupEntry(name) != null)
+ || (ctx.owner.rawInfo.member(name) != NoSymbol)
+ )
+ )
+
// nextOuter determines which context is searched next for implicits
// (after `this`, which contributes `newImplicits` below.) In
// most cases, it is simply the outer context: if we're owned by
@@ -359,21 +469,21 @@ trait Contexts { self: Analyzer =>
}
def nextEnclosing(p: Context => Boolean): Context =
- if (this == NoContext || p(this)) this else outer.nextEnclosing(p)
+ if (p(this)) this else outer.nextEnclosing(p)
- override def toString = (
- if (this == NoContext) "NoContext"
- else "Context(%s@%s scope=%s)".format(owner.fullName, tree.getClass.getName split "[.$]" last, scope.##)
- )
+ def enclosingContextChain: List[Context] = this :: outer.enclosingContextChain
- /** Is `sub' a subclass of `base' or a companion object of such a subclass?
+ override def toString = "Context(%s@%s unit=%s scope=%s errors=%b, reportErrors=%b, throwErrors=%b)".format(
+ owner.fullName, tree.shortClass, unit, scope.##, hasErrors, reportErrors, throwErrors
+ )
+ /** Is `sub` a subclass of `base` or a companion object of such a subclass?
*/
def isSubClassOrCompanion(sub: Symbol, base: Symbol) =
sub.isNonBottomSubClass(base) ||
sub.isModuleClass && sub.linkedClassOfClass.isNonBottomSubClass(base)
- /** Return closest enclosing context that defines a superclass of `clazz', or a
- * companion module of a superclass of `clazz', or NoContext if none exists */
+ /** Return closest enclosing context that defines a superclass of `clazz`, or a
+ * companion module of a superclass of `clazz`, or NoContext if none exists */
def enclosingSuperClassContext(clazz: Symbol): Context = {
var c = this.enclClass
while (c != NoContext &&
@@ -383,8 +493,8 @@ trait Contexts { self: Analyzer =>
c
}
- /** Return closest enclosing context that defines a subclass of `clazz' or a companion
- * object thereof, or NoContext if no such context exists
+ /** Return the closest enclosing context that defines a subclass of `clazz`
+ * or a companion object thereof, or `NoContext` if no such context exists.
*/
def enclosingSubClassContext(clazz: Symbol): Context = {
var c = this.enclClass
@@ -393,19 +503,14 @@ trait Contexts { self: Analyzer =>
c
}
- /** Is <code>sym</code> accessible as a member of tree `site' with type
- * <code>pre</code> in current context?
- *
- * @param sym ...
- * @param pre ...
- * @param superAccess ...
- * @return ...
+ /** Is `sym` accessible as a member of tree `site` with type
+ * `pre` in current context?
*/
def isAccessible(sym: Symbol, pre: Type, superAccess: Boolean = false): Boolean = {
lastAccessCheckDetails = ""
// Console.println("isAccessible(%s, %s, %s)".format(sym, pre, superAccess))
- @inline def accessWithinLinked(ab: Symbol) = {
+ def accessWithinLinked(ab: Symbol) = {
val linked = ab.linkedClassOfClass
// don't have access if there is no linked class
// (before adding the `ne NoSymbol` check, this was a no-op when linked eq NoSymbol,
@@ -413,7 +518,7 @@ trait Contexts { self: Analyzer =>
(linked ne NoSymbol) && accessWithin(linked)
}
- /** Are we inside definition of `ab'? */
+ /** Are we inside definition of `ab`? */
def accessWithin(ab: Symbol) = {
// #3663: we must disregard package nesting if sym isJavaDefined
if (sym.isJavaDefined) {
@@ -428,14 +533,14 @@ trait Contexts { self: Analyzer =>
/*
var c = this
while (c != NoContext && c.owner != owner) {
- if (c.outer eq null) assert(false, "accessWithin(" + owner + ") " + c);//debug
- if (c.outer.enclClass eq null) assert(false, "accessWithin(" + owner + ") " + c);//debug
+ if (c.outer eq null) abort("accessWithin(" + owner + ") " + c);//debug
+ if (c.outer.enclClass eq null) abort("accessWithin(" + owner + ") " + c);//debug
c = c.outer.enclClass
}
c != NoContext
}
*/
- /** Is `clazz' a subclass of an enclosing class? */
+ /** Is `clazz` a subclass of an enclosing class? */
def isSubClassOfEnclosing(clazz: Symbol): Boolean =
enclosingSuperClassContext(clazz) != NoContext
@@ -450,25 +555,30 @@ trait Contexts { self: Analyzer =>
if (c == NoContext)
lastAccessCheckDetails =
"\n Access to protected "+target+" not permitted because"+
- "\n "+"enclosing class "+this.enclClass.owner+this.enclClass.owner.locationString+" is not a subclass of "+
+ "\n "+"enclosing "+this.enclClass.owner+
+ this.enclClass.owner.locationString+" is not a subclass of "+
"\n "+sym.owner+sym.owner.locationString+" where target is defined"
- c != NoContext && {
- val res =
- isSubClassOrCompanion(pre.widen.typeSymbol, c.owner) ||
- c.owner.isModuleClass &&
- isSubClassOrCompanion(pre.widen.typeSymbol, c.owner.linkedClassOfClass)
- if (!res)
- lastAccessCheckDetails =
- "\n Access to protected "+target+" not permitted because"+
- "\n prefix type "+pre.widen+" does not conform to"+
- "\n "+c.owner+c.owner.locationString+" where the access take place"
- res
+ c != NoContext &&
+ {
+ target.isType || { // allow accesses to types from arbitrary subclasses fixes #4737
+ val res =
+ isSubClassOrCompanion(pre.widen.typeSymbol, c.owner) ||
+ c.owner.isModuleClass &&
+ isSubClassOrCompanion(pre.widen.typeSymbol, c.owner.linkedClassOfClass)
+ if (!res)
+ lastAccessCheckDetails =
+ "\n Access to protected "+target+" not permitted because"+
+ "\n prefix type "+pre.widen+" does not conform to"+
+ "\n "+c.owner+c.owner.locationString+" where the access take place"
+ res
+ }
}
}
(pre == NoPrefix) || {
val ab = sym.accessBoundary(sym.owner)
- ( (ab.isTerm || ab == definitions.RootClass)
+
+ ( (ab.isTerm || ab == rootMirror.RootClass)
|| (accessWithin(ab) || accessWithinLinked(ab)) &&
( !sym.hasLocalFlag
|| sym.owner.isImplClass // allow private local accesses to impl classes
@@ -497,7 +607,7 @@ trait Contexts { self: Analyzer =>
def restoreTypeBounds(tp: Type): Type = {
var current = tp
for ((sym, info) <- savedTypeBounds) {
- if (settings.debug.value) log("resetting " + sym + " to " + info);
+ debuglog("resetting " + sym + " to " + info);
sym.info match {
case TypeBounds(lo, hi) if (hi <:< lo && lo <:< hi) =>
current = current.instantiateTypeParams(List(sym), List(lo))
@@ -523,20 +633,27 @@ trait Contexts { self: Analyzer =>
* it is accessible, and if it is imported there is not already a local symbol
* with the same names. Local symbols override imported ones. This fixes #2866.
*/
- private def isQualifyingImplicit(sym: Symbol, pre: Type, imported: Boolean) =
+ private def isQualifyingImplicit(name: Name, sym: Symbol, pre: Type, imported: Boolean) =
sym.isImplicit &&
isAccessible(sym, pre) &&
!(imported && {
- val e = scope.lookupEntry(sym.name)
+ val e = scope.lookupEntry(name)
(e ne null) && (e.owner == scope)
})
- private def collectImplicits(syms: List[Symbol], pre: Type, imported: Boolean = false): List[ImplicitInfo] =
- for (sym <- syms if isQualifyingImplicit(sym, pre, imported)) yield
+ private def collectImplicits(syms: Scope, pre: Type, imported: Boolean = false): List[ImplicitInfo] =
+ for (sym <- syms.toList if isQualifyingImplicit(sym.name, sym, pre, imported)) yield
new ImplicitInfo(sym.name, pre, sym)
private def collectImplicitImports(imp: ImportInfo): List[ImplicitInfo] = {
- val pre = imp.qual.tpe
+ val qual = imp.qual
+
+ val pre =
+ if (qual.tpe.typeSymbol.isPackageClass)
+ // SI-6225 important if the imported symbol is inherited by the the package object.
+ singleType(qual.tpe, qual.tpe member nme.PACKAGE)
+ else
+ qual.tpe
def collect(sels: List[ImportSelector]): List[ImplicitInfo] = sels match {
case List() =>
List()
@@ -546,34 +663,40 @@ trait Contexts { self: Analyzer =>
var impls = collect(sels1) filter (info => info.name != from)
if (to != nme.WILDCARD) {
for (sym <- imp.importedSymbol(to).alternatives)
- if (isQualifyingImplicit(sym, pre, imported = true))
+ if (isQualifyingImplicit(to, sym, pre, imported = true))
impls = new ImplicitInfo(to, pre, sym) :: impls
}
impls
}
- //if (settings.debug.value) log("collect implicit imports " + imp + "=" + collect(imp.tree.selectors))//DEBUG
+ //debuglog("collect implicit imports " + imp + "=" + collect(imp.tree.selectors))//DEBUG
collect(imp.tree.selectors)
}
+ /* SI-5892 / SI-4270: `implicitss` can return results which are not accessible at the
+ * point where implicit search is triggered. Example: implicits in (annotations of)
+ * class type parameters (SI-5892). The `context.owner` is the class symbol, therefore
+ * `implicitss` will return implicit conversions defined inside the class. These are
+ * filtered out later by `eligibleInfos` (SI-4270 / 9129cfe9), as they don't type-check.
+ */
def implicitss: List[List[ImplicitInfo]] = {
-
if (implicitsRunId != currentRunId) {
implicitsRunId = currentRunId
implicitsCache = List()
val newImplicits: List[ImplicitInfo] =
if (owner != nextOuter.owner && owner.isClass && !owner.isPackageClass && !inSelfSuperCall) {
if (!owner.isInitialized) return nextOuter.implicitss
- // if (settings.debug.value) log("collect member implicits " + owner + ", implicit members = " + owner.thisType.implicitMembers)//DEBUG
- val savedEnclClass = enclClass
- this.enclClass = this
- val res = collectImplicits(owner.thisType.implicitMembers, owner.thisType)
- this.enclClass = savedEnclClass
- res
+ // debuglog("collect member implicits " + owner + ", implicit members = " + owner.thisType.implicitMembers)//DEBUG
+ savingEnclClass(this) {
+ // !!! In the body of `class C(implicit a: A) { }`, `implicitss` returns `List(List(a), List(a), List(<predef..)))`
+ // it handled correctly by implicit search, which considers the second `a` to be shadowed, but should be
+ // remedied nonetheless.
+ collectImplicits(owner.thisType.implicitMembers, owner.thisType)
+ }
} else if (scope != nextOuter.scope && !owner.isPackageClass) {
- if (settings.debug.value) log("collect local implicits " + scope.toList)//DEBUG
- collectImplicits(scope.toList, NoPrefix)
+ debuglog("collect local implicits " + scope.toList)//DEBUG
+ collectImplicits(scope, NoPrefix)
} else if (imports != nextOuter.imports) {
- assert(imports.tail == nextOuter.imports)
+ assert(imports.tail == nextOuter.imports, (imports, nextOuter.imports))
collectImplicitImports(imports.head)
} else if (owner.isPackageClass) {
// the corresponding package object may contain implicit members.
@@ -597,8 +720,8 @@ trait Contexts { self: Analyzer =>
def lookup(name: Name, expectedOwner: Symbol) = {
var res: Symbol = NoSymbol
var ctx = this
- while(res == NoSymbol && ctx.outer != ctx) {
- val s = ctx.scope.lookup(name)
+ while (res == NoSymbol && ctx.outer != ctx) {
+ val s = ctx.scope lookup name
if (s != NoSymbol && s.owner == expectedOwner)
res = s
else
@@ -606,21 +729,21 @@ trait Contexts { self: Analyzer =>
}
res
}
- }
+ } //class Context
+
class ImportInfo(val tree: Import, val depth: Int) {
/** The prefix expression */
def qual: Tree = tree.symbol.info match {
case ImportType(expr) => expr
case ErrorType => tree setType NoType // fix for #2870
- case _ => throw new FatalError("symbol " + tree.symbol + " has bad type: " + tree.symbol.info);//debug
+ case _ => throw new FatalError("symbol " + tree.symbol + " has bad type: " + tree.symbol.info) //debug
}
/** Is name imported explicitly, not via wildcard? */
def isExplicitImport(name: Name): Boolean =
tree.selectors exists (_.rename == name.toTermName)
- /** The symbol with name <code>name</code> imported from import clause
- * <code>tree</code>.
+ /** The symbol with name `name` imported from import clause `tree`.
*/
def importedSymbol(name: Name): Symbol = {
var result: Symbol = NoSymbol
@@ -639,15 +762,15 @@ trait Contexts { self: Analyzer =>
result
}
- def allImportedSymbols: List[Symbol] =
+ def allImportedSymbols: Iterable[Symbol] =
qual.tpe.members flatMap (transformImport(tree.selectors, _))
private def transformImport(selectors: List[ImportSelector], sym: Symbol): List[Symbol] = selectors match {
case List() => List()
case List(ImportSelector(nme.WILDCARD, _, _, _)) => List(sym)
- case ImportSelector(from, _, to, _) :: _ if (from == sym.name) =>
+ case ImportSelector(from, _, to, _) :: _ if from == sym.name =>
if (to == nme.WILDCARD) List()
- else { val sym1 = sym.cloneSymbol; sym1.name = to; List(sym1) }
+ else List(sym.cloneSymbol(sym.owner, sym.rawflags, to))
case _ :: rest => transformImport(rest, sym)
}
diff --git a/src/compiler/scala/tools/nsc/typechecker/DeVirtualize.scala b/src/compiler/scala/tools/nsc/typechecker/DeVirtualize.scala
deleted file mode 100644
index 4b08460..0000000
--- a/src/compiler/scala/tools/nsc/typechecker/DeVirtualize.scala
+++ /dev/null
@@ -1,645 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
- * @author Martin Odersky
- */
-
-package scala.tools.nsc
-package typechecker
-
-import symtab.Flags._
-import transform.{ InfoTransform, TypingTransformers }
-import scala.collection.mutable.ListBuffer
-
-abstract class DeVirtualize /* extends InfoTransform with TypingTransformers {
-
- import global._
- import definitions._
- import typer.{typed, typedOperator, atOwner}
-
- /** the following two members override abstract members in Transform */
- val phaseName: String = "devirtualize"
-
- /** The phase might set the following new flags: */
- override def phaseNextFlags: Long = notDEFERRED | notOVERRIDE | notFINAL | lateABSTRACT
-
- def newTransformer(unit: CompilationUnit): DeVirtualizeTransformer =
- new DeVirtualizeTransformer(unit)
-
- /** The class does not change base-classes of existing classes */
- override def changesBaseClasses = false
-
- def transformInfo(sym: Symbol, tp: Type): Type =
- if (sym.isThisSym && sym.owner.isVirtualClass) {
- val clazz = sym.owner
- intersectionType(
- List(
- appliedType(abstractType(clazz).typeConstructor, clazz.typeParams map (_.tpe)),
- clazz.tpe))
- } else devirtualizeMap(tp)
-
- /* todo:
- handle constructor arguments
- check: overriding classes must have same type params
- virtual classes cannot have self types
- */
-
- /** Do the following transformations everywhere in a type:
- *
- * 1. Replace a virtual class
- *
- * attrs mods class VC[Ts] <: Ps { decls }
- *
- * by the following symbols
- *
- * attrs mods1 type VC[Ts] <: dvm(Ps) with VC$trait[Ts]
- * attrs mods2 trait VC$trait[Ts] extends AnyRef with ScalaObject {
- * this: VC[Ts] with VC$trait[Ts] => decls1
- * }
- *
- * The class symbol VC becomes the symbol of the workertrait.
- *
- * dvm is the devirtalization mapping which converts refs to
- * virtual classes to refs to their abstract types (@see devirtualize)
- * mods1 are the modifiers inherited to abstract types
- * mods2 are the modifiers inherited to worker traits
- * decls1 is decls but members that have an override modifier
- * lose it and any final modifier as well.
- *
- * 2. For all virtual member classes VC which
- * are not abstract and which are or inherit from a virtual class defined in current class
- * add a factory (@see mkFactory)
- * 3. Convert TypeRef's to VC where VC is a virtual class to TypeRef's to AT, where AT
- * is the abstract type corresponding to VC.
- *
- * Note: If a class inherits vc's from two different paths, a vc in the
- * inheriting class has to be created beforehand. This is done in phase ??? (NOT YET DONE!)
- *
- * Note: subclasses of virtual classes are treated as if they are virtual.
- * isVirtualClass returns true for them also.
- */
- object devirtualizeMap extends TypeMap {
- def apply(tp: Type): Type = mapOver(tp) match {
- case tp1 @ ClassInfoType(parents, decls0, clazz) =>
- var decls = decls0
- def enter(sym: Symbol) = // at next phase because names of worker traits change
- atPhase(ownPhase.next) { decls.enter(sym) }
- if (containsVirtuals(clazz)) {
- decls = new Scope
- for (m <- decls0.toList) {
- if (m.isVirtualClass) {
- m.setFlag(notDEFERRED | notFINAL | lateABSTRACT)
- enter(mkAbstractType(m))
- }
- enter(m)
- }
- for (m <- classesInNeedOfFactories(clazz))
- enter(mkFactory(m, clazz))
- }
- if (clazz.isVirtualClass) {
- println("virtual class: "+clazz+clazz.locationString)
- transformOwnerInfo(clazz)
- decls = new Scope
-
- // add virtual fields for all primary constructor parameters
- for (row <- paramTypesAndIndices(clazz.primaryConstructor.tpe, 0))
- for ((pt, i) <- row)
- enter(mkParamField(clazz, i, devirtualizeMap(pt)))
-
- // remove OVERRIDE from all workertrait members, except if they override a member in Object
- for (m <- decls0.toList) {
- if (!m.isConstructor) {
- if ((m hasFlag OVERRIDE) && m.overriddenSymbol(ObjectClass) == NoSymbol)
- m setFlag (notOVERRIDE | notFINAL)
- enter(m)
- }
- }
- if (clazz.thisSym == clazz) clazz.typeOfThis = clazz.thisType
- // ... to give a hook on which we can hang selftype transformers
- ClassInfoType(List(ObjectClass.tpe, ScalaObjectClass.tpe), decls, clazz)
- } else {
- ClassInfoType(parents map this, decls, clazz)
- }
- case tp1 @ TypeRef(pre, clazz, args) if clazz.isVirtualClass =>
- TypeRef(pre, abstractType(clazz), args)
- case tp1 =>
- tp1
- }
- }
-
- /** Transform owner of given clazz symbol */
- protected def transformOwnerInfo(clazz: Symbol) { atPhase(ownPhase.next) { clazz.owner.info } }
-
- /** Names of derived classes and factories */
- protected def concreteClassName(clazz: Symbol) =
- atPhase(ownPhase) { newTypeName(clazz.name+"$fix") }
- protected def factoryName(clazz: Symbol) =
- atPhase(ownPhase) { newTermName("new$"+clazz.name) }
-
- /** Does `clazz' contain virtual classes? */
- protected def containsVirtuals(clazz: Symbol) = clazz.info.decls.toList exists (_.isVirtualClass)
-
- /** The inner classes that need factory methods in `clazz'
- * This is intended to catch situations like the following
- *
- * abstract class C {
- * class V <: {...}
- * class W extends V
- * }
- * class D extends C {
- * class V <: {...}
- * // factories needed for V and W!
- * }
- */
- protected def classesInNeedOfFactories(clazz: Symbol) = atPhase(ownPhase) {
- def isOverriddenVirtual(c: Symbol) =
- c.isVirtualClass && clazz.info.decl(c.name).isVirtualClass
- val xs = clazz.info.members.toList filter (x => x.isVirtualClass && !x.hasFlag(ABSTRACT))
- for (m <- clazz.info.members.toList;
- if (m.isVirtualClass && !(m hasFlag ABSTRACT) &&
- (m.info.baseClasses exists isOverriddenVirtual))) yield m
- }
-
- /** The abstract type corresponding to a virtual class. */
- protected def abstractType(clazz: Symbol): Symbol = atPhase(ownPhase.next) {
- val abstpe = clazz.owner.info.decl(atPhase(ownPhase) { clazz.name })
- assert(abstpe.isAbstractType)
- abstpe
- }
-
- /** The factory corresponding to a virtual class. */
- protected def factory(clazz: Symbol, owner: Symbol) = atPhase(ownPhase.next) {
- val fsym = owner.info.member(factoryName(clazz))
- assert(fsym.isMethod, clazz)
- fsym
- }
-
- /** The name of the field representing a constructor parameter of a virtual class */
- protected def paramFieldName(clazz: Symbol, index: Int) = atPhase(ownPhase) {
- nme.expandedName(newTermName("param$"+index), clazz)
- }
-
- /** The name of the field representing a constructor parameter of a virtual class */
- protected def fixParamName(index: Int) = newTermName("fix$"+index)
-
- /** The field representing a constructor parameter of a virtual class */
- protected def paramField(clazz: Symbol, index: Int) = atPhase(ownPhase.next) {
- clazz.info.decl(paramFieldName(clazz, index))
- }
-
- /** The flags that an abstract type can inherit from its virtual class */
- protected val absTypeFlagMask = AccessFlags | DEFERRED
-
- /** The flags that a factory method can inherit from its virtual class */
- protected val factoryFlagMask = AccessFlags
-
- /** Create a polytype with given type parameters and given type, or return just the type
- * if type params is empty. */
- protected def mkPolyType(tparams: List[Symbol], tp: Type) =
- if (tparams.isEmpty) tp else PolyType(tparams, tp)
-
- /** A lazy type to complete `sym', which is is generated for virtual class
- * `clazz'.
- * The info of the symbol is computed by method `getInfo'.
- * It is wrapped in copies of the type parameters of `clazz'.
- */
- abstract class PolyTypeCompleter(sym: Symbol, clazz: Symbol) extends LazyType {
- def getInfo: Type
- override val typeParams = cloneSymbols(clazz.typeParams, sym)
- override def complete(sym: Symbol) {
- sym.setInfo(
- mkPolyType(typeParams, getInfo.substSym(clazz.typeParams, typeParams)))
- }
- }
-
- protected def wasVirtualClass(sym: Symbol) = {
- sym.isVirtualClass || {
- sym.info
- sym hasFlag notDEFERRED
- }
- }
-
- protected def addOverriddenVirtuals(clazz: Symbol) = {
- (clazz.allOverriddenSymbols filter wasVirtualClass) ::: List(clazz)
- }
-
- protected def addOverriddenVirtuals(tpe: Type) = tpe match {
- case TypeRef(pre, sym, args) =>
- { for (vc <- sym.allOverriddenSymbols if wasVirtualClass(vc))
- yield typeRef(pre, vc, args) }.reverse ::: List(tpe)
- }
-
- protected def mkParamField(clazz: Symbol, index: Int, tpe: Type): Symbol = {
- val param = clazz.newMethod(clazz.pos, paramFieldName(clazz, index))
- .setFlag(PROTECTED | LOCAL | DEFERRED | EXPANDEDNAME | SYNTHETIC | STABLE)
- atPhase(ownPhase.next) {
- param.setInfo(NullaryMethodType(tpe))
- }
- param
- }
-
- protected def mkAbstractType(clazz: Symbol): Symbol = {
- val cabstype = clazz.owner.newAbstractType(clazz.pos, clazz.name)
- .setFlag(clazz.flags & absTypeFlagMask | SYNTHETIC)
- .setAnnotations(clazz.annotations)
- atPhase(ownPhase.next) {
- cabstype setInfo new PolyTypeCompleter(cabstype, clazz) {
- def getInfo = {
- val parents1 = clazz.info.parents map {
- p => devirtualizeMap(p.substSym(clazz.typeParams, typeParams))
- }
- val parents2 = addOverriddenVirtuals(clazz) map {
- c => typeRef(clazz.owner.thisType, c, typeParams map (_.tpe))
- }
- TypeBounds(NothingClass.tpe, intersectionType(parents1 ::: parents2))
- }
- }
- }
- }
-
- protected def paramTypesAndIndices(tpe: Type, start: Int): List[List[(Type, Int)]] = tpe match {
- case PolyType(_, restpe) => paramTypesAndIndices(restpe, start)
- case MethodType(params, restpe) =>
- val end = start + params.length
- (tpe.paramTypes zip List.range(start, end)) :: paramTypesAndIndices(restpe, end)
- case _ =>
- List()
- }
-
- /* Add a factory symbol for a virtual class
- *
- * attrs mods class VC[Ts] <: Ps { decls }
- * with base classes BC[Us]'s
- *
- * which corresponds to the following definition :
- *
- * attrs mods3 def new$VC[Ts](): VC[Ts] = {
- * class VC$fix extends v2w(BC's[Ts]) with VC$trait[Ts] { ... }
- * new VC$fix
- * }
- *
- * where
- *
- * mods3 are the modifiers inherited to factories
- * v2w maps every virtual class to its workertrait and leaves other types alone.
- *
- * @param clazz The virtual class for which factory is added
- * @param owner The owner for which factory is added as a member
- * @param scope The scope into which factory is entered
- */
- def mkFactory(clazz: Symbol, owner: Symbol): Symbol = {
- val pos = if (clazz.owner == owner) clazz.pos else owner.pos
- val factory = owner.newMethod(pos, factoryName(clazz))
- .setFlag(clazz.flags & factoryFlagMask | SYNTHETIC)
- .setAnnotations(clazz.annotations)
- factory setInfo new PolyTypeCompleter(factory, clazz) {
- private def copyType(tpe: Type): Type = tpe match {
- case MethodType(formals, restpe) => MethodType(formals, copyType(restpe))
- case NullaryMethodType(restpe) => NullaryMethodType(copyType(restpe))
- case PolyType(_, _) => abort("bad case: "+tpe)
- case _ => owner.thisType.memberType(abstractType(clazz))
- }
- def getInfo = copyType(clazz.primaryConstructor.tpe)
- }
- factory
- }
-
- def distinct(ts: List[Type]): List[Type] = ts match {
- case List() => List()
- case t :: ts1 => t :: distinct(ts1 filter (_.typeSymbol != t.typeSymbol))
- }
-
- /** The concrete class symbol VC$fix in the factory symbol (@see mkFactory)
- * @param clazz the virtual class
- * @param factory the factory which returns an instance of this class
- */
- protected def mkConcreteClass(clazz: Symbol, factory: Symbol) = {
- val cclazz = factory.newClass(clazz.pos, concreteClassName(clazz))
- .setFlag(FINAL | SYNTHETIC)
- .setAnnotations(clazz.annotations)
-
- cclazz setInfo new LazyType {
- override def complete(sym: Symbol) {
- val parents1 = atPhase(ownPhase) {
- var superclazz = clazz
- do {
- superclazz = superclazz.info.parents.head.typeSymbol
- } while (wasVirtualClass(superclazz))
- val bcs = superclazz :: (clazz.info.baseClasses takeWhile (superclazz != )).reverse
- println("MKConcrete1 "+cclazz+factory.locationString+" "+bcs+" from "+clazz+clazz.locationString)
- println("MKConcrete2 "+cclazz+factory.locationString+" "+(bcs map factory.owner.thisType.memberType))
- bcs map factory.owner.thisType.memberType
- }
- atPhase(ownPhase.next) {
- val parents2 =
- distinct(parents1.flatMap(addOverriddenVirtuals))
- .map(_.substSym(clazz.typeParams, factory.typeParams))
- sym setInfo ClassInfoType(parents2, new Scope, cclazz)
- }
- }
- }
-
- cclazz
- }
-
- /** Perform the following tree transformations:
- *
- * 1. Add trees for abstract types (@see devirtualize),
- * worker traits (@see devirtualize)
- * and factories (@see mkFactory)
- *
- * 2. Replace a new VC().init(...) where VC is a virtual class with new$VC(...)
- *
- * 3. Replace references to VC.this and VC.super where VC is a virtual class
- * with VC$trait.this and VC$trait.super
- *
- * 4. Transform type references to virtual classes to type references of corresponding
- * abstract types.
- */
- class DeVirtualizeTransformer(unit: CompilationUnit) extends TypingTransformer(unit) {
- // all code is executed at phase ownPhase.next
-
- /** Add trees for abstract types, worker traits, and factories (@see mkFactory)
- * to template body `stats'
- */
- override def transformStats(stats: List[Tree], exprOwner: Symbol): List[Tree] = {
- val stats1 = stats flatMap transformStat
- val fclasses = atPhase(ownPhase) {
- if (currentOwner.isClass && containsVirtuals(currentOwner)) classesInNeedOfFactories(currentOwner)
- else List()
- }
- val newDefs = fclasses map factoryDef
- if (newDefs.isEmpty) stats1 else stats1 ::: newDefs
- }
-
- def fixClassDef(clazz: Symbol, factory: Symbol): Tree = {
- val cclazz = mkConcreteClass(clazz, factory)
- val overrideBridges =
- for (m <- clazz.info.decls.toList if m hasFlag notOVERRIDE)
- yield overrideBridge(m, cclazz)
-
- val vparamss: List[List[ValDef]] = atPhase(ownPhase) {
- paramTypesAndIndices(clazz.primaryConstructor.tpe, 0) map {
- _ map {
- case (pt, i) =>
- atPos(factory.pos) {
- ValDef(Modifiers(PARAMACCESSOR | PRIVATE | LOCAL), fixParamName(i),
- TypeTree(devirtualizeMap(pt)), EmptyTree)
- }
- }
- }
- }
- val pfields: List[DefDef] = atPhase(ownPhase) {
- paramTypesAndIndices(clazz.primaryConstructor.tpe, 0) flatMap {
- _ map {
- case (pt, i) =>
- val pfield = cclazz.newMethod(cclazz.pos, paramFieldName(clazz, i))
- .setFlag(PROTECTED | LOCAL | EXPANDEDNAME | SYNTHETIC | STABLE)
- .setInfo(NullaryMethodType(pt))
- cclazz.info.decls enter pfield
- atPos(factory.pos) {
- DefDef(pfield, Ident(fixParamName(i)))
- }
- }
- }
- }
- atPos(clazz.pos) {
- ClassDef(cclazz, Modifiers(0), vparamss, List(List()), pfields ::: overrideBridges, clazz.pos.focus)
- }
- }
-
-
- /** The factory definition for virtual class `clazz' (@see mkFactory)
- * For a virtual class
- *
- * attrs mods class VC[Ts] <: Ps { decls }
- * with overridden classes _VC[Us]'s
- *
- * we need the following factory:
- *
- * attrs mods3 def new$VC[Ts](): VC[Ts] = {
- * class VC$fix extends _VC$trait's[Ts] with VC$trait[Ts] {
- * override-bridges
- * }
- * new VC$fix.asInstanceOf[VC[Ts]]
- * }
- *
- * where
- *
- * mods3 are the modifiers inherited to factories
- * override-bridges are definitions that link every symbol in a worker trait
- * that was overriding something to the overridden symbol
- * //todo: not sure what happens with abstract override?
- */
- def factoryDef(clazz: Symbol): Tree = {
- val factorySym = factory(clazz, currentOwner)
- val cclazzDef = fixClassDef(clazz, factorySym)
- println("Concrete: "+cclazzDef)
- val abstpeSym = abstractType(clazz)
- localTyper.typed {
- atPos(factorySym.pos) {
- DefDef(factorySym,
- Block(
- List(cclazzDef),
- TypeApply(
- Select(
- gen.mkForwarder(
- Select(New(TypeTree(cclazzDef.symbol.tpe)), nme.CONSTRUCTOR),
- factorySym.paramss),
- Any_asInstanceOf),
- List(
- TypeTree(
- currentOwner.thisType.memberType(abstpeSym)
- .substSym(abstpeSym.typeParams, factorySym.typeParams))))))
- }
- }
- }
-
- /** Create an override bridge for method `meth' in concrete class `cclazz'.
- * An override bridge has the form
- *
- * override f(xs1)...(xsN) = super.f(xs)...(xsN)
- */
- def overrideBridge(meth: Symbol, cclazz: Symbol) = atPos(meth.pos) {
- val bridge = meth.cloneSymbol(cclazz)
- .resetFlag(notOVERRIDE | notFINAL)
- cclazz.info.decls.enter(bridge)
- val superRef: Tree = Select(Super(cclazz, tpnme.EMPTY), meth)
- DefDef(bridge, gen.mkForwarder(superRef, bridge.paramss))
- }
-
- /** Replace definitions of virtual classes by definitions of corresponding
- * abstract type and worker traits.
- * Eliminate constructors of former virtual classes because these are now traits.
- */
- protected def transformStat(tree: Tree): List[Tree] = {
- val sym = tree.symbol
- tree match {
- case ClassDef(mods, name, tparams, templ)
- if (wasVirtualClass(sym)) =>
- val clazz = sym
- val absTypeSym = abstractType(clazz)
- val abstypeDef = TypeDef(absTypeSym)
- List(localTyper.typed(abstypeDef), transform(tree))
- case DefDef(_, nme.CONSTRUCTOR, _, _, _, _)
- if (wasVirtualClass(sym.owner)) =>
- if (atPhase(ownPhase)(sym != sym.owner.primaryConstructor))
- unit.error(tree.pos, "virtual classes cannot have auxiliary constructors")
- List()
- case _ =>
- List(transform(tree))
- }
- }
-
- override def transform(tree0: Tree): Tree = {
- val tree = super.transform(tree0)
- val sym = tree.symbol
- tree match {
- // Replace a new VC().init() where VC is a virtual class with new$VC
- case Apply(Select(New(tpt), name), args) if (sym.isConstructor && wasVirtualClass(sym.owner)) =>
- val clazz = sym.owner
- val fn =
- Select(
- gen.mkAttributedQualifier(tpt.tpe.prefix),
- factory(clazz, clazz.owner).name)
- println("fac "+factory(clazz, clazz.owner).tpe)
- val targs = tpt.tpe.typeArgs
- atPos(tree.pos) {
- localTyper.typed {
- val res =
- Apply(if (targs.isEmpty) fn else TypeApply(fn, targs map TypeTree), args)
- println("typing "+res+" from "+args)
- res
- }
- }
-
- case Template(parents, self, body) if (wasVirtualClass(sym.owner)) =>
- // add param field accessors
- val paramFieldAccessors = new ListBuffer[Tree]
- val paramFields = new ListBuffer[Tree]
- val presupers = new ListBuffer[Tree]
- val others = new ListBuffer[Tree]
- var paramFieldCount = 0
- for (stat <- body) {
- if (stat.symbol != null && (stat.symbol hasFlag PARAMACCESSOR))
- stat match {
- case pacc @ ValDef(mods, name, tpt, rhs) =>
- pacc.symbol resetFlag PARAMACCESSOR setFlag PRESUPER
- val pfield = paramField(sym.owner, paramFieldCount)
- paramFieldCount += 1
- pfield setPos pacc.pos
- paramFields += localTyper.typed(DefDef(pfield, EmptyTree))
- val pfieldRef = localTyper.typed {
- atPos(pacc.pos) {
- Select(This(sym.owner), pfield)
- }
- }
- paramFieldAccessors += treeCopy.ValDef(pacc, mods, name, tpt, pfieldRef)
- case _ =>
- stat.symbol resetFlag PARAMACCESSOR // ??? can we do this
- others += stat
- }
- else
- (if (stat.symbol != null && (stat.symbol hasFlag PRESUPER)) presupers else others) += stat
- }
- treeCopy.Template(tree, parents, self,
- paramFieldAccessors.toList :::
- presupers.toList :::
- paramFields.toList :::
- others.toList)
- case _ =>
- tree setType atPhase(ownPhase)(devirtualizeMap(tree.tpe))
- }
- }
- override def transformUnit(unit: CompilationUnit) = atPhase(ownPhase.next) {
- super.transformUnit(unit)
- }
- }
-}
-
-
-
-/*
- class A {
- trait C[X, Y] <: {
- var x: X
- def f(y: Y): X = { println("A.T"); x }
- }
- class D[X](xp: X) extends C[X, Int] {
- var x: X = xp
- override def f(y: Int) = { println(y); super.f(y) }
- }
- }
- class B extends A {
- override trait C[X, Y] <: {
- override def f(y: Y): X = { println("B.T"); super.f(y) }
- def g: X = x
- }
- }
- object Test extends B {
- val c = new D[String]("OK")
- println(c.g)
- println(c.f(42))
- }
-
-maps to:
-
- class A {
- type C[X, Y] <: C$trait[X, Y]
-
- trait C$trait[X, Y] { this: C with C$trait =>
- var x: X
- def f(y: Y): X = { println("A.T"); x }
- }
-
- class D[X](xp: X) extends C[X, Int] {
- var x: X = xp
- override def f(y: Int) = { println(y); super.f(y) }
- }
-
- protected[this] val x: Int; val y = x; def f(z:Int) = z + 1 }
-
- type D <: C with DT
-
- trait DT extends { self: D => def f(z:Int) = z + 2 }
-
- trait preDT extends { self: D => val z: Int; val x = f(z) }
-
- def newC(x: Int): C
- def newD(x: Int): D
-
- //type C = CT
- //type D = C with DT
-
- class CC(_x:Int) extends { val x = _x } with CT
-
- def newC[X, Y](x:Int): C =
- new CC(x).asInstanceOf[C]
-
- class DC(_z:Int) extends { val z = _z } with preDT with CT with DT {
- override def f(z:Int) = super.f(z)
- }
-
- def newD(z:Int):D = new DC(z).asInstanceOf[D]
- }
-
- class B extends A {
- type C <: CT with CT2
-
- trait CT2 { self : C => def g = 2 }
-
- //type C = CT with CT2
- //type D = C with DT
-
- class CC2(_x:Int) extends { val x = _x } with CT with CT2
-
- def newC(x:Int): C = new CC2(x).asInstanceOf[C]
-
- class DC2(_z:Int) extends { val z = _z } with preDT with CT with CT2
- with DT { override def f(z:Int) = super.f(z) }
-
- def newD(z:Int): D = new DC2(z).asInstanceOf[D]
- }
-
-*/
-
-*/
\ No newline at end of file
diff --git a/src/compiler/scala/tools/nsc/typechecker/DestructureTypes.scala b/src/compiler/scala/tools/nsc/typechecker/DestructureTypes.scala
new file mode 100644
index 0000000..3e249e5
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/typechecker/DestructureTypes.scala
@@ -0,0 +1,208 @@
+/* NSC -- new Scala compiler
+* Copyright 2005-2013 LAMP/EPFL
+* @author Paul Phillips
+*/
+
+package scala.tools.nsc
+package typechecker
+
+import scala.language.implicitConversions
+
+/** A generic means of breaking down types into their subcomponents.
+ * Types are decomposed top down, and recognizable substructure is
+ * dispatched via self-apparently named methods. Those methods can
+ * be overridden for custom behavior, but only the abstract methods
+ * require implementations, each of which must create some unknown
+ * "Node" type from its inputs.
+ *
+ * - wrapProduct create Node from a product of Nodes
+ * - wrapSequence create Node from a sequence of Nodes
+ * - wrapAtom create Node from an arbitrary value
+ *
+ * This is a work in progress.
+ */
+trait DestructureTypes {
+ val global: Global
+ import global._
+ import definitions.{ NothingClass, AnyClass }
+
+ trait DestructureType[Node] extends (Type => Node) {
+ def withLabel(node: Node, label: String): Node
+ def withType(node: Node, typeName: String): Node
+
+ def wrapEmpty: Node
+ def wrapPoly(in: Node, out: Node): Node
+ def wrapMono(in: Node, out: Node): Node
+ def wrapProduct(nodes: List[Node]): Node
+ def wrapSequence(nodes: List[Node]): Node
+ def wrapAtom[U](value: U): Node
+
+ private implicit def liftToTerm(name: String): TermName = newTermName(name)
+
+ private val openSymbols = scala.collection.mutable.Set[Symbol]()
+
+ private def nodeList[T](elems: List[T], mkNode: T => Node): Node =
+ if (elems.isEmpty) wrapEmpty else list(elems map mkNode)
+
+ private def scopeMemberList(elems: List[Symbol]): Node = nodeList(elems, wrapAtom)
+ private def typeList(elems: List[Type]): Node = nodeList(elems, this)
+ private def symbolList(elems: List[Symbol]): Node = nodeList(elems, wrapSymbolInfo)
+ private def treeList(elems: List[Tree]): Node = nodeList(elems, wrapTree)
+ private def annotationList(annots: List[AnnotationInfo]): Node = nodeList(annots, annotation)
+
+ private def assocsNode(ann: AnnotationInfo): Node = {
+ val (names, args) = ann.assocs.toIndexedSeq.unzip
+ if (names.isEmpty) wrapEmpty
+ else node("assocs", nodeList(names.indices.toList, (i: Int) => atom(names(i).toString, args(i))))
+ }
+ private def typeTypeName(tp: Type) = tp match {
+ case mt @ MethodType(_, _) if mt.isImplicit => "ImplicitMethodType"
+ case TypeRef(_, sym, _) => typeRefType(sym)
+ case _ => tp.kind
+ }
+
+ def wrapTree(tree: Tree): Node = withType(
+ tree match {
+ case x: NameTree => atom(x.name.toString, x)
+ case _ => wrapAtom(tree)
+ },
+ tree.productPrefix
+ )
+ def wrapSymbol(label: String, sym: Symbol): Node = {
+ if (sym eq NoSymbol) wrapEmpty
+ else atom(label, sym)
+ }
+ def wrapInfo(sym: Symbol) = sym.info match {
+ case TypeBounds(lo, hi) => typeBounds(lo, hi)
+ case PolyType(tparams, restpe) => polyFunction(tparams, restpe)
+ case _ => wrapEmpty
+ }
+ def wrapSymbolInfo(sym: Symbol): Node = {
+ if ((sym eq NoSymbol) || openSymbols(sym)) wrapEmpty
+ else {
+ openSymbols += sym
+ try product(symbolType(sym), wrapAtom(sym.defString))
+ finally openSymbols -= sym
+ }
+ }
+
+ def list(nodes: List[Node]): Node = wrapSequence(nodes)
+ def product(tp: Type, nodes: Node*): Node = product(typeTypeName(tp), nodes: _*)
+ def product(typeName: String, nodes: Node*): Node = (
+ nodes.toList filterNot (_ == wrapEmpty) match {
+ case Nil => wrapEmpty
+ case xs => withType(wrapProduct(xs), typeName)
+ }
+ )
+
+ def atom[U](label: String, value: U): Node = node(label, wrapAtom(value))
+ def constant(label: String, const: Constant): Node = atom(label, const)
+
+ def scope(decls: Scope): Node = node("decls", scopeMemberList(decls.toList))
+ def const[T](named: (String, T)): Node = constant(named._1, Constant(named._2))
+
+ def resultType(restpe: Type): Node = this("resultType", restpe)
+ def typeParams(tps: List[Symbol]): Node = node("typeParams", symbolList(tps))
+ def valueParams(params: List[Symbol]): Node = node("params", symbolList(params))
+ def typeArgs(tps: List[Type]): Node = node("args", typeList(tps))
+ def parentList(tps: List[Type]): Node = node("parents", typeList(tps))
+
+ def polyFunction(tparams: List[Symbol], restpe: Type): Node = wrapPoly(typeParams(tparams), resultType(restpe))
+ def monoFunction(params: List[Symbol], restpe: Type): Node = wrapMono(valueParams(params), resultType(restpe))
+ def nullaryFunction(restpe: Type): Node = wrapMono(wrapEmpty, this(restpe))
+
+ def prefix(pre: Type): Node = pre match {
+ case NoPrefix => wrapEmpty
+ case _ => this("pre", pre)
+ }
+ def typeBounds(lo0: Type, hi0: Type): Node = {
+ val lo = if ((lo0 eq WildcardType) || (lo0.typeSymbol eq NothingClass)) wrapEmpty else this("lo", lo0)
+ val hi = if ((hi0 eq WildcardType) || (hi0.typeSymbol eq AnyClass)) wrapEmpty else this("hi", hi0)
+
+ product("TypeBounds", lo, hi)
+ }
+
+ def annotation(ann: AnnotationInfo): Node = product(
+ "AnnotationInfo",
+ this("atp", ann.atp),
+ node("args", treeList(ann.args)),
+ assocsNode(ann)
+ )
+ def typeConstraint(constr: TypeConstraint): Node = product(
+ "TypeConstraint",
+ node("lo", typeList(constr.loBounds)),
+ node("hi", typeList(constr.hiBounds)),
+ this("inst", constr.inst)
+ )
+ def annotatedType(annotations: List[AnnotationInfo], underlying: Type) = product(
+ "AnnotatedType",
+ node("annotations", annotationList(annotations)),
+ this("underlying", underlying)
+ )
+
+ /** This imposes additional structure beyond that which is visible in
+ * the case class hierarchy. In particular, (too) many different constructs
+ * are encoded in TypeRefs; here they are partitioned somewhat before
+ * being dispatched.
+ *
+ * For example, a typical type parameter is encoded as TypeRef(NoPrefix, sym, Nil)
+ * with its upper and lower bounds stored in the info of the symbol. Viewing the
+ * TypeRef naively we are treated to both too much information (useless prefix, usually
+ * empty args) and too little (bounds hidden behind indirection.) So drop the prefix
+ * and promote the bounds.
+ */
+ def typeRef(tp: TypeRef) = {
+ val TypeRef(pre, sym, args) = tp
+ // Filtered down to elements with "interesting" content
+ product(
+ tp,
+ if (sym.isDefinedInPackage) wrapEmpty else prefix(pre),
+ wrapSymbolInfo(sym),
+ typeArgs(args),
+ if (tp ne tp.normalize) this("normalize", tp.normalize) else wrapEmpty
+ )
+ }
+
+ def symbolType(sym: Symbol) = (
+ if (sym.isRefinementClass) "Refinement"
+ else if (sym.isAliasType) "Alias"
+ else if (sym.isTypeSkolem) "TypeSkolem"
+ else if (sym.isTypeParameter) "TypeParam"
+ else if (sym.isAbstractType) "AbstractType"
+ else if (sym.isType) "TypeSymbol"
+ else "TermSymbol"
+ )
+ def typeRefType(sym: Symbol) = (
+ if (sym.isRefinementClass) "RefinementTypeRef"
+ else if (sym.isAliasType) "AliasTypeRef"
+ else if (sym.isTypeSkolem) "SkolemTypeRef"
+ else if (sym.isTypeParameter) "TypeParamTypeRef"
+ else if (sym.isAbstractType) "AbstractTypeRef"
+ else "TypeRef"
+ ) + ( if (sym.isFBounded) "(F-Bounded)" else "" )
+
+ def node(label: String, node: Node): Node = withLabel(node, label)
+ def apply(label: String, tp: Type): Node = withLabel(this(tp), label)
+
+ def apply(tp: Type): Node = tp match {
+ case AntiPolyType(pre, targs) => product(tp, prefix(pre), typeArgs(targs))
+ case ClassInfoType(parents, decls, clazz) => product(tp, parentList(parents), scope(decls), wrapAtom(clazz))
+ case ConstantType(const) => product(tp, constant("value", const))
+ case DeBruijnIndex(level, index, args) => product(tp, const("level" -> level), const("index" -> index), typeArgs(args))
+ case OverloadedType(pre, alts) => product(tp, prefix(pre), node("alts", typeList(alts map pre.memberType)))
+ case RefinedType(parents, decls) => product(tp, parentList(parents), scope(decls))
+ case SingleType(pre, sym) => product(tp, prefix(pre), wrapAtom(sym))
+ case SuperType(thistp, supertp) => product(tp, this("this", thistp), this("super", supertp))
+ case ThisType(clazz) => product(tp, wrapAtom(clazz))
+ case TypeVar(inst, constr) => product(tp, this("inst", inst), typeConstraint(constr))
+ case AnnotatedType(annotations, underlying, _) => annotatedType(annotations, underlying)
+ case ExistentialType(tparams, underlying) => polyFunction(tparams, underlying)
+ case PolyType(tparams, restpe) => polyFunction(tparams, restpe)
+ case MethodType(params, restpe) => monoFunction(params, restpe)
+ case NullaryMethodType(restpe) => nullaryFunction(restpe)
+ case TypeBounds(lo, hi) => typeBounds(lo, hi)
+ case tr @ TypeRef(pre, sym, args) => typeRef(tr)
+ case _ => wrapAtom(tp) // XXX see what this is
+ }
+ }
+}
diff --git a/src/compiler/scala/tools/nsc/typechecker/Duplicators.scala b/src/compiler/scala/tools/nsc/typechecker/Duplicators.scala
index e75e69d..25a1228 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Duplicators.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Duplicators.scala
@@ -1,19 +1,27 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2013 LAMP/EPFL
+ * @author Martin Odersky
+ */
+
package scala.tools.nsc
package typechecker
import scala.tools.nsc.symtab.Flags
-
import scala.collection.{ mutable, immutable }
/** Duplicate trees and re-type check them, taking care to replace
* and create fresh symbols for new local definitions.
+ *
+ * @author Iulian Dragos
+ * @version 1.0
*/
abstract class Duplicators extends Analyzer {
import global._
+ import definitions.{ AnyRefClass, AnyValClass }
def retyped(context: Context, tree: Tree): Tree = {
resetClassOwners
- (new BodyDuplicator(context)).typed(tree)
+ (newBodyDuplicator(context)).typed(tree)
}
/** Retype the given tree in the given context. Use this method when retyping
@@ -21,23 +29,23 @@ abstract class Duplicators extends Analyzer {
* the old class with the new class, and map symbols through the given 'env'. The
* environment is a map from type skolems to concrete types (see SpecializedTypes).
*/
- def retyped(context: Context, tree: Tree, oldThis: Symbol, newThis: Symbol, env: collection.Map[Symbol, Type]): Tree = {
+ def retyped(context: Context, tree: Tree, oldThis: Symbol, newThis: Symbol, env: scala.collection.Map[Symbol, Type]): Tree = {
if (oldThis ne newThis) {
oldClassOwner = oldThis
newClassOwner = newThis
} else resetClassOwners
envSubstitution = new SubstSkolemsTypeMap(env.keysIterator.toList, env.valuesIterator.toList)
- log("retyped with env: " + env)
- (new BodyDuplicator(context)).typed(tree)
+ debuglog("retyped with env: " + env)
+
+ newBodyDuplicator(context).typed(tree)
}
- def retypedMethod(context: Context, tree: Tree, oldThis: Symbol, newThis: Symbol): Tree =
- (new BodyDuplicator(context)).retypedMethod(tree.asInstanceOf[DefDef], oldThis, newThis)
+ protected def newBodyDuplicator(context: Context) = new BodyDuplicator(context)
/** Return the special typer for duplicate method bodies. */
override def newTyper(context: Context): Typer =
- new BodyDuplicator(context)
+ newBodyDuplicator(context)
private def resetClassOwners() {
oldClassOwner = null
@@ -61,7 +69,7 @@ abstract class Duplicators extends Analyzer {
* tree, except for TypeTrees, are erased prior to type checking. TypeTrees
* are fixed by substituting invalid symbols for the new ones.
*/
- class BodyDuplicator(context: Context) extends Typer(context: Context) {
+ class BodyDuplicator(_context: Context) extends Typer(_context) {
class FixInvalidSyms extends TypeMap {
@@ -71,17 +79,27 @@ abstract class Duplicators extends Analyzer {
override def mapOver(tpe: Type): Type = tpe match {
case TypeRef(NoPrefix, sym, args) if sym.isTypeParameterOrSkolem =>
- val sym1 = context.scope.lookup(sym.name)
+ var sym1 = context.scope.lookup(sym.name)
+ if (sym1 eq NoSymbol) {
+ // try harder (look in outer scopes)
+ // with virtpatmat, this can happen when the sym is referenced in the scope of a LabelDef but is defined in the scope of an outer DefDef (e.g., in AbstractPartialFunction's andThen)
+ BodyDuplicator.super.silent(_.typedType(Ident(sym.name))) match {
+ case SilentResultValue(t) =>
+ sym1 = t.symbol
+ debuglog("fixed by trying harder: "+(sym, sym1, context))
+ case _ =>
+ }
+ }
// assert(sym1 ne NoSymbol, tpe)
if ((sym1 ne NoSymbol) && (sym1 ne sym)) {
- log("fixing " + sym + " -> " + sym1)
+ debuglog("fixing " + sym + " -> " + sym1)
typeRef(NoPrefix, sym1, mapOverArgs(args, sym1.typeParams))
} else super.mapOver(tpe)
case TypeRef(pre, sym, args) =>
val newsym = updateSym(sym)
if (newsym ne sym) {
- log("fixing " + sym + " -> " + newsym)
+ debuglog("fixing " + sym + " -> " + newsym)
typeRef(mapOver(pre), newsym, mapOverArgs(args, newsym.typeParams))
} else
super.mapOver(tpe)
@@ -89,7 +107,7 @@ abstract class Duplicators extends Analyzer {
case SingleType(pre, sym) =>
val sym1 = updateSym(sym)
if (sym1 ne sym) {
- log("fixing " + sym + " -> " + sym1)
+ debuglog("fixing " + sym + " -> " + sym1)
singleType(mapOver(pre), sym1)
} else
super.mapOver(tpe)
@@ -97,7 +115,7 @@ abstract class Duplicators extends Analyzer {
case ThisType(sym) =>
val sym1 = updateSym(sym)
if (sym1 ne sym) {
- log("fixing " + sym + " -> " + sym1)
+ debuglog("fixing " + sym + " -> " + sym1)
ThisType(sym1)
} else
super.mapOver(tpe)
@@ -118,31 +136,42 @@ abstract class Duplicators extends Analyzer {
tpe3
}
- /** Return the new symbol corresponding to `sym'. */
+ /** Return the new symbol corresponding to `sym`. */
private def updateSym(sym: Symbol): Symbol =
if (invalidSyms.isDefinedAt(sym))
invalidSyms(sym).symbol
else
sym
- private def invalidate(tree: Tree) {
+ private def invalidate(tree: Tree, owner: Symbol = NoSymbol) {
+ debuglog("attempting to invalidate " + tree.symbol)
if (tree.isDef && tree.symbol != NoSymbol) {
- log("invalid " + tree.symbol)
+ debuglog("invalid " + tree.symbol)
invalidSyms(tree.symbol) = tree
tree match {
case ldef @ LabelDef(name, params, rhs) =>
- log("LabelDef " + name + " sym.info: " + ldef.symbol.info)
+ debuglog("LabelDef " + name + " sym.info: " + ldef.symbol.info)
invalidSyms(ldef.symbol) = ldef
// breakIf(true, this, ldef, context)
val newsym = ldef.symbol.cloneSymbol(context.owner)
newsym.setInfo(fixType(ldef.symbol.info))
ldef.symbol = newsym
- log("newsym: " + newsym + " info: " + newsym.info)
+ debuglog("newsym: " + newsym + " info: " + newsym.info)
+
+ case vdef @ ValDef(mods, name, _, rhs) if mods.hasFlag(Flags.LAZY) =>
+ debuglog("ValDef " + name + " sym.info: " + vdef.symbol.info)
+ invalidSyms(vdef.symbol) = vdef
+ val newowner = if (owner != NoSymbol) owner else context.owner
+ val newsym = vdef.symbol.cloneSymbol(newowner)
+ newsym.setInfo(fixType(vdef.symbol.info))
+ vdef.symbol = newsym
+ debuglog("newsym: " + newsym + " info: " + newsym.info + ", owner: " + newsym.owner + ", " + newsym.owner.isClass)
+ if (newsym.owner.isClass) newsym.owner.info.decls enter newsym
case DefDef(_, name, tparams, vparamss, _, rhs) =>
// invalidate parameters
- invalidate(tparams ::: vparamss.flatten)
+ invalidateAll(tparams ::: vparamss.flatten)
tree.symbol = NoSymbol
case _ =>
@@ -151,23 +180,8 @@ abstract class Duplicators extends Analyzer {
}
}
- private def invalidate(stats: List[Tree]) {
- stats foreach invalidate
- }
-
-
- def retypedMethod(ddef: DefDef, oldThis: Symbol, newThis: Symbol): Tree = {
- oldClassOwner = oldThis
- newClassOwner = newThis
- invalidate(ddef.tparams)
- for (vdef <- ddef.vparamss.flatten) {
- invalidate(vdef)
- vdef.tpe = null
- }
- ddef.symbol = NoSymbol
- enterSym(context, ddef)
- log("remapping this of " + oldClassOwner + " to " + newClassOwner)
- typed(ddef)
+ private def invalidateAll(stats: List[Tree], owner: Symbol = NoSymbol) {
+ stats.foreach(invalidate(_, owner))
}
private def inspectTpe(tpe: Type) = {
@@ -181,6 +195,11 @@ abstract class Duplicators extends Analyzer {
}
}
+ /** Optionally cast this tree into some other type, if required.
+ * Unless overridden, just returns the tree.
+ */
+ def castType(tree: Tree, pt: Type): Tree = tree
+
/** Special typer method for re-type checking trees. It expects a typed tree.
* Returns a typed tree that has fresh symbols for all definitions in the original tree.
*
@@ -196,30 +215,32 @@ abstract class Duplicators extends Analyzer {
* namer/typer handle them, or Idents that refer to them.
*/
override def typed(tree: Tree, mode: Int, pt: Type): Tree = {
- if (settings.debug.value) log("typing " + tree + ": " + tree.tpe)
+ debuglog("typing " + tree + ": " + tree.tpe + ", " + tree.getClass)
+ val origtreesym = tree.symbol
if (tree.hasSymbol && tree.symbol != NoSymbol
&& !tree.symbol.isLabel // labels cannot be retyped by the type checker as LabelDef has no ValDef/return type trees
&& invalidSyms.isDefinedAt(tree.symbol)) {
- if (settings.debug.value) log("removed symbol " + tree.symbol)
+ debuglog("removed symbol " + tree.symbol)
tree.symbol = NoSymbol
}
tree match {
case ttree @ TypeTree() =>
- log("fixing tpe: " + tree.tpe + " with sym: " + tree.tpe.typeSymbol)
+ // log("fixing tpe: " + tree.tpe + " with sym: " + tree.tpe.typeSymbol)
ttree.tpe = fixType(ttree.tpe)
ttree
+
case Block(stats, res) =>
- log("invalidating block")
- invalidate(stats)
+ debuglog("invalidating block")
+ invalidateAll(stats)
invalidate(res)
tree.tpe = null
super.typed(tree, mode, pt)
case ClassDef(_, _, _, tmpl @ Template(parents, _, stats)) =>
-// log("invalidating classdef " + tree.tpe)
+ // log("invalidating classdef " + tree)
tmpl.symbol = tree.symbol.newLocalDummy(tree.pos)
- invalidate(stats)
+ invalidateAll(stats, tree.symbol)
tree.tpe = null
super.typed(tree, mode, pt)
@@ -228,17 +249,38 @@ abstract class Duplicators extends Analyzer {
ddef.tpe = null
super.typed(ddef, mode, pt)
- case vdef @ ValDef(_, _, tpt, rhs) =>
-// log("vdef fixing tpe: " + tree.tpe + " with sym: " + tree.tpe.typeSymbol + " and " + invalidSyms)
+ case vdef @ ValDef(mods, name, tpt, rhs) =>
+ // log("vdef fixing tpe: " + tree.tpe + " with sym: " + tree.tpe.typeSymbol + " and " + invalidSyms)
+ //if (mods.hasFlag(Flags.LAZY)) vdef.symbol.resetFlag(Flags.MUTABLE) // Martin to Iulian: lazy vars can now appear because they are no longer boxed; Please check that deleting this statement is OK.
vdef.tpt.tpe = fixType(vdef.tpt.tpe)
vdef.tpe = null
super.typed(vdef, mode, pt)
case ldef @ LabelDef(name, params, rhs) =>
// log("label def: " + ldef)
+ // in case the rhs contains any definitions -- TODO: is this necessary?
+ invalidate(rhs)
ldef.tpe = null
- val params1 = params map { p => Ident(updateSym(p.symbol)) }
- super.typed(treeCopy.LabelDef(tree, name, params1, rhs), mode, pt)
+
+ // is this LabelDef generated by tailcalls?
+ val isTailLabel = (ldef.params.length >= 1) && (ldef.params.head.name == nme.THIS)
+
+ // the typer does not create the symbols for a LabelDef's params, so unless they were created before we need
+ // to do it manually here -- but for the tailcalls-generated labels, ValDefs are created before the LabelDef,
+ // so we just need to change the tree to point to the updated symbols
+ def newParam(p: Tree): Ident =
+ if (isTailLabel)
+ Ident(updateSym(p.symbol))
+ else {
+ val newsym = p.symbol.cloneSymbol //(context.owner) // TODO owner?
+ Ident(newsym.setInfo(fixType(p.symbol.info)))
+ }
+
+ val params1 = params map newParam
+ val rhs1 = (new TreeSubstituter(params map (_.symbol), params1) transform rhs) // TODO: duplicate?
+ rhs1.tpe = null
+
+ super.typed(treeCopy.LabelDef(tree, name, params1, rhs1), mode, pt)
case Bind(name, _) =>
// log("bind: " + tree)
@@ -247,33 +289,59 @@ abstract class Duplicators extends Analyzer {
super.typed(tree, mode, pt)
case Ident(_) if tree.symbol.isLabel =>
- log("Ident to labeldef " + tree + " switched to ")
+ debuglog("Ident to labeldef " + tree + " switched to ")
tree.symbol = updateSym(tree.symbol)
tree.tpe = null
super.typed(tree, mode, pt)
+ case Ident(_) if (origtreesym ne null) && origtreesym.isLazy =>
+ debuglog("Ident to a lazy val " + tree + ", " + tree.symbol + " updated to " + origtreesym)
+ tree.symbol = updateSym(origtreesym)
+ tree.tpe = null
+ super.typed(tree, mode, pt)
+
case Select(th @ This(_), sel) if (oldClassOwner ne null) && (th.symbol == oldClassOwner) =>
- // log("selection on this, no type ascription required")
- // we use the symbol name instead of the tree name because the symbol may have been
- // name mangled, rendering the tree name obsolete
- // log(tree)
- val t = super.typed(atPos(tree.pos)(Select(This(newClassOwner), tree.symbol.name)), mode, pt)
- // log("typed to: " + t + "; tpe = " + t.tpe + "; " + inspectTpe(t.tpe))
- t
+ // We use the symbol name instead of the tree name because the symbol
+ // may have been name mangled, rendering the tree name obsolete.
+ // ...but you can't just do a Select on a name because if the symbol is
+ // overloaded, you will crash in the backend.
+ val memberByName = newClassOwner.thisType.member(tree.symbol.name)
+ def nameSelection = Select(This(newClassOwner), tree.symbol.name)
+ val newTree = (
+ if (memberByName.isOverloaded) {
+ // Find the types of the overload alternatives as seen in the new class,
+ // and filter the list down to those which match the old type (after
+ // fixing the old type so it is seen as if from the new class.)
+ val typeInNewClass = fixType(oldClassOwner.info memberType tree.symbol)
+ val alts = memberByName.alternatives
+ val memberTypes = alts map (newClassOwner.info memberType _)
+ val memberString = memberByName.defString
+ alts zip memberTypes filter (_._2 =:= typeInNewClass) match {
+ case ((alt, tpe)) :: Nil =>
+ log(s"Arrested overloaded type in Duplicators, narrowing to ${alt.defStringSeenAs(tpe)}\n Overload was: $memberString")
+ Select(This(newClassOwner), alt)
+ case _ =>
+ log(s"Could not disambiguate $memberString in Duplicators. Attempting name-based selection, but this may not end well...")
+ nameSelection
+ }
+ }
+ else nameSelection
+ )
+ super.typed(atPos(tree.pos)(newTree), mode, pt)
case This(_) if (oldClassOwner ne null) && (tree.symbol == oldClassOwner) =>
// val tree1 = Typed(This(newClassOwner), TypeTree(fixType(tree.tpe.widen)))
// log("selection on this: " + tree)
val tree1 = This(newClassOwner)
// log("tree1: " + tree1)
- if (settings.debug.value) log("mapped " + tree + " to " + tree1)
- super.typed(atPos(tree.pos)(tree1), mode, pt)
+ debuglog("mapped " + tree + " to " + tree1)
+ super.typedPos(tree.pos, mode, pt)(tree1)
case This(_) =>
- // log("selection on this, plain: " + tree)
+ debuglog("selection on this, plain: " + tree)
tree.symbol = updateSym(tree.symbol)
- tree.tpe = null
- val tree1 = super.typed(tree, mode, pt)
+ val ntree = castType(tree, pt)
+ val tree1 = super.typed(ntree, mode, pt)
// log("plain this typed to: " + tree1)
tree1
/* no longer needed, because Super now contains a This(...)
@@ -283,17 +351,26 @@ abstract class Duplicators extends Analyzer {
super.typed(atPos(tree.pos)(tree1))
*/
case Match(scrut, cases) =>
- val scrut1 = typed(scrut, EXPRmode | BYVALmode, WildcardType)
+ val scrut1 = typed(scrut, EXPRmode | BYVALmode, WildcardType)
val scrutTpe = scrut1.tpe.widen
- val cases1 = if (scrutTpe.isFinalType) cases filter {
- case CaseDef(Bind(_, pat @ Typed(_, tpt)), EmptyTree, body) =>
- // the typed pattern is not incompatible with the scrutinee type
- scrutTpe.matchesPattern(fixType(tpt.tpe))
- case CaseDef(Typed(_, tpt), EmptyTree, body) =>
- // the typed pattern is not incompatible with the scrutinee type
- scrutTpe.matchesPattern(fixType(tpt.tpe))
- case _ => true
- } else cases
+ val cases1 = {
+ if (scrutTpe.isFinalType) cases filter {
+ case CaseDef(Bind(_, pat @ Typed(_, tpt)), EmptyTree, body) =>
+ // the typed pattern is not incompatible with the scrutinee type
+ scrutTpe matchesPattern fixType(tpt.tpe)
+ case CaseDef(Typed(_, tpt), EmptyTree, body) =>
+ // the typed pattern is not incompatible with the scrutinee type
+ scrutTpe matchesPattern fixType(tpt.tpe)
+ case _ => true
+ }
+ // Without this, AnyRef specializations crash on patterns like
+ // case _: Boolean => ...
+ // Not at all sure this is safe.
+ else if (scrutTpe <:< AnyRefClass.tpe)
+ cases filterNot (_.pat.tpe <:< AnyValClass.tpe)
+ else
+ cases
+ }
super.typed(atPos(tree.pos)(Match(scrut, cases1)), mode, pt)
@@ -302,14 +379,17 @@ abstract class Duplicators extends Analyzer {
tree
case _ =>
- // log("default: " + tree)
+ debuglog("Duplicators default case: " + tree.summaryString)
+ debuglog(" ---> " + tree)
if (tree.hasSymbol && tree.symbol != NoSymbol && (tree.symbol.owner == definitions.AnyClass)) {
tree.symbol = NoSymbol // maybe we can find a more specific member in a subclass of Any (see AnyVal members, like ==)
}
- tree.tpe = null
- super.typed(tree, mode, pt)
+ val ntree = castType(tree, pt)
+ val res = super.typed(ntree, mode, pt)
+ res
}
}
+
}
}
diff --git a/src/compiler/scala/tools/nsc/typechecker/EtaExpansion.scala b/src/compiler/scala/tools/nsc/typechecker/EtaExpansion.scala
index eee75e1..57b9dfe 100644
--- a/src/compiler/scala/tools/nsc/typechecker/EtaExpansion.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/EtaExpansion.scala
@@ -1,5 +1,5 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
+ * Copyright 2005-2013 LAMP/EPFL
* @author Martin Odersky
*/
@@ -47,7 +47,7 @@ trait EtaExpansion { self: Analyzer =>
* tree is already attributed
* </p>
*/
- def etaExpand(unit : CompilationUnit, tree: Tree): Tree = {
+ def etaExpand(unit : CompilationUnit, tree: Tree, typer: Typer): Tree = {
val tpe = tree.tpe
var cnt = 0 // for NoPosition
def freshName() = {
@@ -63,15 +63,22 @@ trait EtaExpansion { self: Analyzer =>
* @return ...
*/
def liftoutPrefix(tree: Tree): Tree = {
- def liftout(tree: Tree): Tree =
- if (treeInfo.isPureExpr(tree)) tree
+ def liftout(tree: Tree, byName: Boolean): Tree =
+ if (treeInfo.isExprSafeToInline(tree)) tree
else {
val vname: Name = freshName()
// Problem with ticket #2351 here
defs += atPos(tree.pos) {
- ValDef(Modifiers(SYNTHETIC), vname.toTermName, TypeTree(), tree)
+ val rhs = if (byName) {
+ val res = typer.typed(Function(List(), tree))
+ new ChangeOwnerTraverser(typer.context.owner, res.symbol) traverse tree // SI-6274
+ res
+ } else tree
+ ValDef(Modifiers(SYNTHETIC), vname.toTermName, TypeTree(), rhs)
+ }
+ atPos(tree.pos.focus) {
+ if (byName) Apply(Ident(vname), List()) else Ident(vname)
}
- Ident(vname) setPos tree.pos.focus
}
val tree1 = tree match {
// a partial application using named arguments has the following form:
@@ -80,16 +87,22 @@ trait EtaExpansion { self: Analyzer =>
// [...]
// val x$n = argn
// qual$1.fun(x$1, ..)..(.., x$n) }
- // Eta-expansion has to be performed on `fun'
+ // Eta-expansion has to be performed on `fun`
case Block(stats, fun) =>
defs ++= stats
liftoutPrefix(fun)
case Apply(fn, args) =>
- treeCopy.Apply(tree, liftoutPrefix(fn), args mapConserve (liftout)) setType null
+ val byName: Int => Option[Boolean] = fn.tpe.params.map(p => definitions.isByNameParamType(p.tpe)).lift
+ val newArgs = mapWithIndex(args) { (arg, i) =>
+ // with repeated params, there might be more or fewer args than params
+ liftout(arg, byName(i).getOrElse(false))
+ }
+ treeCopy.Apply(tree, liftoutPrefix(fn), newArgs) setType null
case TypeApply(fn, args) =>
treeCopy.TypeApply(tree, liftoutPrefix(fn), args) setType null
case Select(qual, name) =>
- treeCopy.Select(tree, liftout(qual), name) setSymbol NoSymbol setType null
+ val name = tree.symbol.name // account for renamed imports, SI-7233
+ treeCopy.Select(tree, liftout(qual, false), name) setSymbol NoSymbol setType null
case Ident(name) =>
tree
}
@@ -101,11 +114,20 @@ trait EtaExpansion { self: Analyzer =>
*/
def expand(tree: Tree, tpe: Type): Tree = tpe match {
case mt @ MethodType(paramSyms, restpe) if !mt.isImplicit =>
- val params = paramSyms map (sym =>
- ValDef(Modifiers(SYNTHETIC | PARAM),
- sym.name.toTermName, TypeTree(sym.tpe) , EmptyTree))
+ val params: List[(ValDef, Boolean)] = paramSyms.map {
+ sym =>
+ val origTpe = sym.tpe
+ val isRepeated = definitions.isRepeatedParamType(origTpe)
+ // SI-4176 Don't leak A* in eta-expanded function types. See t4176b.scala
+ val droppedStarTpe = if (settings.etaExpandKeepsStar.value) origTpe else dropRepeatedParamType(origTpe)
+ val valDef = ValDef(Modifiers(SYNTHETIC | PARAM), sym.name.toTermName, TypeTree(droppedStarTpe), EmptyTree)
+ (valDef, isRepeated)
+ }
atPos(tree.pos.makeTransparent) {
- Function(params, expand(Apply(tree, params map gen.paramToArg), restpe))
+ val args = params.map {
+ case (valDef, isRepeated) => gen.paramToArg(Ident(valDef.name), isRepeated)
+ }
+ Function(params.map(_._1), expand(Apply(tree, args), restpe))
}
case _ =>
tree
diff --git a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala
index b6e5c7f..35a4461 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala
@@ -1,5 +1,5 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
+ * Copyright 2005-2013 LAMP/EPFL
* @author Martin Odersky
*/
@@ -11,12 +11,13 @@
package scala.tools.nsc
package typechecker
-import annotation.tailrec
+import scala.annotation.tailrec
import scala.collection.{ mutable, immutable }
-import mutable.{ HashMap, LinkedHashMap, ListBuffer }
+import mutable.{ LinkedHashMap, ListBuffer }
import scala.util.matching.Regex
import symtab.Flags._
-import util.Statistics._
+import scala.reflect.internal.util.Statistics
+import scala.language.implicitConversions
/** This trait provides methods to find various kinds of implicits.
*
@@ -28,25 +29,39 @@ trait Implicits {
import global._
import definitions._
+ import ImplicitsStats._
import typeDebug.{ ptTree, ptBlock, ptLine }
import global.typer.{ printTyping, deindentTyping, indentTyping, printInference }
+ def inferImplicit(tree: Tree, pt: Type, reportAmbiguous: Boolean, isView: Boolean, context: Context): SearchResult =
+ inferImplicit(tree, pt, reportAmbiguous, isView, context, true, tree.pos)
+
+ def inferImplicit(tree: Tree, pt: Type, reportAmbiguous: Boolean, isView: Boolean, context: Context, saveAmbiguousDivergent: Boolean): SearchResult =
+ inferImplicit(tree, pt, reportAmbiguous, isView, context, saveAmbiguousDivergent, tree.pos)
+
/** Search for an implicit value. See the comment on `result` at the end of class `ImplicitSearch`
* for more info how the search is conducted.
- * @param tree The tree for which the implicit needs to be inserted.
- * (the inference might instantiate some of the undetermined
- * type parameters of that tree.
- * @param pt The expected type of the implicit.
- * @param reportAmbiguous Should ambiguous implicit errors be reported?
- * False iff we search for a view to find out
- * whether one type is coercible to another.
- * @param isView We are looking for a view
- * @param context The current context
- * @return A search result
+ * @param tree The tree for which the implicit needs to be inserted.
+ * (the inference might instantiate some of the undetermined
+ * type parameters of that tree.
+ * @param pt The expected type of the implicit.
+ * @param reportAmbiguous Should ambiguous implicit errors be reported?
+ * False iff we search for a view to find out
+ * whether one type is coercible to another.
+ * @param isView We are looking for a view
+ * @param context The current context
+ * @param saveAmbiguousDivergent False if any divergent/ambiguous errors should be ignored after
+ * implicits search,
+ * true if they should be reported (used in further typechecking).
+ * @param pos Position that is should be used for tracing and error reporting
+ * (useful when we infer synthetic stuff and pass EmptyTree in the `tree` argument)
+ * If it's set NoPosition, then position-based services will use `tree.pos`
+ * @return A search result
*/
- def inferImplicit(tree: Tree, pt: Type, reportAmbiguous: Boolean, isView: Boolean, context: Context): SearchResult = {
- printInference("[inferImplicit%s] pt = %s".format(
- if (isView) " view" else "", pt)
+ def inferImplicit(tree: Tree, pt: Type, reportAmbiguous: Boolean, isView: Boolean, context: Context, saveAmbiguousDivergent: Boolean, pos: Position): SearchResult = {
+ printInference("[infer %s] %s with pt=%s in %s".format(
+ if (isView) "view" else "implicit",
+ tree, pt, context.owner.enclClass)
)
printTyping(
ptBlock("infer implicit" + (if (isView) " view" else ""),
@@ -57,33 +72,83 @@ trait Implicits {
)
indentTyping()
- val rawTypeStart = startCounter(rawTypeImpl)
- val findMemberStart = startCounter(findMemberImpl)
- val subtypeStart = startCounter(subtypeImpl)
- val start = startTimer(implicitNanos)
+ val rawTypeStart = if (Statistics.canEnable) Statistics.startCounter(rawTypeImpl) else null
+ val findMemberStart = if (Statistics.canEnable) Statistics.startCounter(findMemberImpl) else null
+ val subtypeStart = if (Statistics.canEnable) Statistics.startCounter(subtypeImpl) else null
+ val start = if (Statistics.canEnable) Statistics.startTimer(implicitNanos) else null
if (printInfers && !tree.isEmpty && !context.undetparams.isEmpty)
printTyping("typing implicit: %s %s".format(tree, context.undetparamsString))
+ val implicitSearchContext = context.makeImplicit(reportAmbiguous)
+ val result = new ImplicitSearch(tree, pt, isView, implicitSearchContext, pos).bestImplicit
+ if ((result.isFailure || !settings.Xdivergence211.value) && saveAmbiguousDivergent && implicitSearchContext.hasErrors) {
+ context.updateBuffer(implicitSearchContext.errBuffer.filter(err => err.kind == ErrorKinds.Ambiguous || err.kind == ErrorKinds.Divergent))
+ debugwarn("update buffer: " + implicitSearchContext.errBuffer)
+ }
+ printInference("[infer implicit] inferred " + result)
+ context.undetparams = context.undetparams filterNot result.subst.from.contains
- val result = new ImplicitSearch(tree, pt, isView, context.makeImplicit(reportAmbiguous)).bestImplicit
- printInference("[inferImplicit] result: " + result)
- context.undetparams = context.undetparams filterNot result.subst.fromContains
-
- stopTimer(implicitNanos, start)
- stopCounter(rawTypeImpl, rawTypeStart)
- stopCounter(findMemberImpl, findMemberStart)
- stopCounter(subtypeImpl, subtypeStart)
+ if (Statistics.canEnable) Statistics.stopTimer(implicitNanos, start)
+ if (Statistics.canEnable) Statistics.stopCounter(rawTypeImpl, rawTypeStart)
+ if (Statistics.canEnable) Statistics.stopCounter(findMemberImpl, findMemberStart)
+ if (Statistics.canEnable) Statistics.stopCounter(subtypeImpl, subtypeStart)
deindentTyping()
printTyping("Implicit search yielded: "+ result)
result
}
+ /** A friendly wrapper over inferImplicit to be used in macro contexts and toolboxes.
+ */
+ def inferImplicit(tree: Tree, pt: Type, isView: Boolean, context: Context, silent: Boolean, withMacrosDisabled: Boolean, pos: Position, onError: (Position, String) => Unit): Tree = {
+ val wrapper1 = if (!withMacrosDisabled) (context.withMacrosEnabled[SearchResult] _) else (context.withMacrosDisabled[SearchResult] _)
+ def wrapper(inference: => SearchResult) = wrapper1(inference)
+ def fail(reason: Option[String]) = {
+ if (!silent) {
+ if (context.hasErrors) onError(context.errBuffer.head.errPos, context.errBuffer.head.errMsg)
+ else onError(pos, reason getOrElse "implicit search has failed. to find out the reason, turn on -Xlog-implicits")
+ }
+ EmptyTree
+ }
+ try {
+ wrapper(inferImplicit(tree, pt, reportAmbiguous = true, isView = isView, context = context, saveAmbiguousDivergent = !silent, pos = pos)) match {
+ case failure if failure.tree.isEmpty => fail(None)
+ case success => success.tree
+ }
+ } catch {
+ case ex: DivergentImplicit =>
+ if (settings.Xdivergence211.value)
+ debugwarn("this shouldn't happen. DivergentImplicit exception has been thrown with -Xdivergence211 turned on: "+ex)
+ fail(Some("divergent implicit expansion"))
+ }
+ }
+
+ /** Find all views from type `tp` (in which `tpars` are free)
+ *
+ * Note that the trees in the search results in the returned list share the same type variables.
+ * Ignore their constr field! The list of type constraints returned along with each tree specifies the constraints that
+ * must be met by the corresponding type parameter in `tpars` (for the returned implicit view to be valid).
+ *
+ * @arg tp from-type for the implicit conversion
+ * @arg context search implicits here
+ * @arg tpars symbols that should be considered free type variables
+ * (implicit search should not try to solve them, just track their constraints)
+ */
+ def allViewsFrom(tp: Type, context: Context, tpars: List[Symbol]): List[(SearchResult, List[TypeConstraint])] = {
+ // my untouchable typevars are better than yours (they can't be constrained by them)
+ val tvars = tpars map (TypeVar untouchable _)
+ val tpSubsted = tp.subst(tpars, tvars)
+
+ val search = new ImplicitSearch(EmptyTree, functionType(List(tpSubsted), AnyClass.tpe), true, context.makeImplicit(false))
+
+ search.allImplicitsPoly(tvars)
+ }
+
private final val sizeLimit = 50000
private type Infos = List[ImplicitInfo]
private type Infoss = List[List[ImplicitInfo]]
private type InfoMap = LinkedHashMap[Symbol, List[ImplicitInfo]] // A map from class symbols to their associated implicits
private val implicitsCache = new LinkedHashMap[Type, Infoss]
private val infoMapCache = new LinkedHashMap[Symbol, InfoMap]
- private val improvesCache = new HashMap[(ImplicitInfo, ImplicitInfo), Boolean]
+ private val improvesCache = perRunCaches.newMap[(ImplicitInfo, ImplicitInfo), Boolean]()
def resetImplicits() {
implicitsCache.clear()
@@ -91,7 +156,15 @@ trait Implicits {
improvesCache.clear()
}
- private val ManifestSymbols = Set(PartialManifestClass, FullManifestClass, OptManifestClass)
+ /* Map a polytype to one in which all type parameters and argument-dependent types are replaced by wildcards.
+ * Consider `implicit def b(implicit x: A): x.T = error("")`. We need to approximate DebruijnIndex types
+ * when checking whether `b` is a valid implicit, as we haven't even searched a value for the implicit arg `x`,
+ * so we have to approximate (otherwise it is excluded a priori).
+ */
+ private def depoly(tp: Type): Type = tp match {
+ case PolyType(tparams, restpe) => deriveTypeWithWildcards(tparams)(ApproximateDependentMap(restpe))
+ case _ => ApproximateDependentMap(tp)
+ }
/** The result of an implicit search
* @param tree The tree representing the implicit
@@ -101,9 +174,28 @@ trait Implicits {
class SearchResult(val tree: Tree, val subst: TreeTypeSubstituter) {
override def toString = "SearchResult(%s, %s)".format(tree,
if (subst.isEmpty) "" else subst)
+
+ def isFailure = false
+ def isAmbiguousFailure = false
+ // only used when -Xdivergence211 is turned on
+ def isDivergent = false
+ final def isSuccess = !isFailure
}
- lazy val SearchFailure = new SearchResult(EmptyTree, EmptyTreeTypeSubstituter)
+ lazy val SearchFailure = new SearchResult(EmptyTree, EmptyTreeTypeSubstituter) {
+ override def isFailure = true
+ }
+
+ // only used when -Xdivergence211 is turned on
+ lazy val DivergentSearchFailure = new SearchResult(EmptyTree, EmptyTreeTypeSubstituter) {
+ override def isFailure = true
+ override def isDivergent = true
+ }
+
+ lazy val AmbiguousSearchFailure = new SearchResult(EmptyTree, EmptyTreeTypeSubstituter) {
+ override def isFailure = true
+ override def isAmbiguousFailure = true
+ }
/** A class that records an available implicit
* @param name The name of the implicit
@@ -119,6 +211,10 @@ trait Implicits {
tpeCache
}
+ def isCyclicOrErroneous =
+ try sym.hasFlag(LOCKED) || containsError(tpe)
+ catch { case _: CyclicReference => true }
+
var useCountArg: Int = 0
var useCountView: Int = 0
@@ -129,15 +225,21 @@ trait Implicits {
containsError(restpe)
case NullaryMethodType(restpe) =>
containsError(restpe)
- case MethodType(params, restpe) =>
- params.exists(_.tpe.isError) || containsError(restpe)
+ case mt @ MethodType(_, restpe) =>
+ (mt.paramTypes exists typeIsError) || containsError(restpe)
case _ =>
tp.isError
}
- def isCyclicOrErroneous =
- try containsError(tpe)
- catch { case _: CyclicReference => true }
+ /** Todo reconcile with definition of stability given in Types.scala */
+ private def isStable(tp: Type): Boolean = tp match {
+ case TypeRef(pre, sym, _) =>
+ sym.isPackageClass ||
+ sym.isModuleClass && isStable(pre) /*||
+ sym.isAliasType && isStable(tp.normalize)*/
+ case _ => tp.isStable
+ }
+ def isStablePrefix = isStable(pre)
override def equals(other: Any) = other match {
case that: ImplicitInfo =>
@@ -150,6 +252,10 @@ trait Implicits {
override def toString = name + ": " + tpe
}
+ /** A class which is used to track pending implicits to prevent infinite implicit searches.
+ */
+ case class OpenImplicit(info: ImplicitInfo, pt: Type, tree: Tree)
+
/** A sentinel indicating no implicit was found */
val NoImplicitInfo = new ImplicitInfo(null, NoType, NoSymbol) {
// equals used to be implemented in ImplicitInfo with an `if(this eq NoImplicitInfo)`
@@ -158,43 +264,36 @@ trait Implicits {
override def hashCode = 1
}
- /** A constructor for types ?{ name: tp }, used in infer view to member
+ /** A constructor for types ?{ def/type name: tp }, used in infer view to member
* searches.
*/
def memberWildcardType(name: Name, tp: Type) = {
val result = refinedType(List(WildcardType), NoSymbol)
- var psym = name match {
- case x: TypeName => result.typeSymbol.newAbstractType(NoPosition, x)
- case x: TermName => result.typeSymbol.newValue(NoPosition, x)
+ name match {
+ case x: TermName => result.typeSymbol.newMethod(x) setInfoAndEnter tp
+ case x: TypeName => result.typeSymbol.newAbstractType(x) setInfoAndEnter tp
}
- psym setInfo tp
- result.decls enter psym
result
}
/** An extractor for types of the form ? { name: ? }
*/
object HasMember {
- private val hasMemberCache = new mutable.HashMap[Name, Type]
+ private val hasMemberCache = perRunCaches.newMap[Name, Type]()
def apply(name: Name): Type = hasMemberCache.getOrElseUpdate(name, memberWildcardType(name, WildcardType))
def unapply(pt: Type): Option[Name] = pt match {
- case RefinedType(List(WildcardType), decls) =>
- decls.toList match {
- case List(sym) if sym.tpe == WildcardType => Some(sym.name)
- case _ => None
- }
- case _ =>
- None
+ case RefinedType(List(WildcardType), Scope(sym)) if sym.tpe == WildcardType => Some(sym.name)
+ case _ => None
}
}
/** An extractor for types of the form ? { name: (? >: argtpe <: Any*)restp }
*/
object HasMethodMatching {
+ val dummyMethod = NoSymbol.newTermSymbol(newTermName("typer$dummy"))
+ def templateArgType(argtpe: Type) = new BoundedWildcardType(TypeBounds.lower(argtpe))
+
def apply(name: Name, argtpes: List[Type], restpe: Type): Type = {
- def templateArgType(argtpe: Type) =
- new BoundedWildcardType(TypeBounds(argtpe, AnyClass.tpe))
- val dummyMethod = new TermSymbol(NoSymbol, NoPosition, "typer$dummy")
val mtpe = MethodType(dummyMethod.newSyntheticValueParams(argtpes map templateArgType), restpe)
memberWildcardType(name, mtpe)
}
@@ -219,7 +318,7 @@ trait Implicits {
object Function1 {
val Sym = FunctionClass(1)
def unapply(tp: Type) = tp match {
- case TypeRef(_, Sym, arg1 :: arg2 :: _) => Some(arg1, arg2)
+ case TypeRef(_, Sym, arg1 :: arg2 :: _) => Some((arg1, arg2))
case _ => None
}
}
@@ -229,9 +328,12 @@ trait Implicits {
* @param pt The original expected type of the implicit.
* @param isView We are looking for a view
* @param context0 The context used for the implicit search
+ * @param pos0 Position that is preferable for use in tracing and error reporting
+ * (useful when we infer synthetic stuff and pass EmptyTree in the `tree` argument)
+ * If it's set to NoPosition, then position-based services will use `tree.pos`
*/
- class ImplicitSearch(tree: Tree, pt: Type, isView: Boolean, context0: Context)
- extends Typer(context0) {
+ class ImplicitSearch(tree: Tree, pt: Type, isView: Boolean, context0: Context, pos0: Position = NoPosition)
+ extends Typer(context0) with ImplicitsContextErrors {
printTyping(
ptBlock("new ImplicitSearch",
"tree" -> tree,
@@ -242,17 +344,24 @@ trait Implicits {
)
)
// assert(tree.isEmpty || tree.pos.isDefined, tree)
+ def pos = if (pos0 != NoPosition) pos0 else tree.pos
+
+ def failure(what: Any, reason: String, pos: Position = this.pos): SearchResult = {
+ if (settings.XlogImplicits.value)
+ reporter.echo(pos, what+" is not a valid implicit value for "+pt+" because:\n"+reason)
+ SearchFailure
+ }
import infer._
/** Is implicit info `info1` better than implicit info `info2`?
*/
def improves(info1: ImplicitInfo, info2: ImplicitInfo) = {
- incCounter(improvesCount)
+ if (Statistics.canEnable) Statistics.incCounter(improvesCount)
(info2 == NoImplicitInfo) ||
(info1 != NoImplicitInfo) && {
if (info1.sym.isStatic && info2.sym.isStatic) {
improvesCache get (info1, info2) match {
- case Some(b) => incCounter(improvesCachedCount); b
+ case Some(b) => if (Statistics.canEnable) Statistics.incCounter(improvesCachedCount); b
case None =>
val result = isStrictlyMoreSpecific(info1.tpe, info2.tpe, info1.sym, info2.sym)
improvesCache((info1, info2)) = result
@@ -261,23 +370,8 @@ trait Implicits {
} else isStrictlyMoreSpecific(info1.tpe, info2.tpe, info1.sym, info2.sym)
}
}
-
- /** Map all type params in given list to WildcardType
- * @param tp The type in which to do the mapping
- * @param tparams The list of type parameters to map
- */
- private def tparamsToWildcards(tp: Type, tparams: List[Symbol]) =
- tp.instantiateTypeParams(tparams, tparams map (t => WildcardType))
-
- /* Map a polytype to one in which all type parameters and argument-dependent types are replaced by wildcards.
- * Consider `implicit def b(implicit x: A): x.T = error("")`. We need to approximate DebruijnIndex types
- * when checking whether `b` is a valid implicit, as we haven't even searched a value for the implicit arg `x`,
- * so we have to approximate (otherwise it is excluded a priori).
- */
- private def depoly(tp: Type): Type = tp match {
- case PolyType(tparams, restpe) => tparamsToWildcards(ApproximateDependentMap(restpe), tparams)
- case _ => ApproximateDependentMap(tp)
- }
+ def isPlausiblyCompatible(tp: Type, pt: Type) = checkCompatibility(fast = true, tp, pt)
+ def normSubType(tp: Type, pt: Type) = checkCompatibility(fast = false, tp, pt)
/** Does type `dtor` dominate type `dted`?
* This is the case if the stripped cores `dtor1` and `dted1` of both types are
@@ -291,7 +385,7 @@ trait Implicits {
* The _complexity_ of a stripped core type corresponds roughly to the number of
* nodes in its ast, except that singleton types are widened before taking the complexity.
* Two types overlap if they have the same type symbol, or
- * if one or both are intersection types with a pair of overlapiing parent types.
+ * if one or both are intersection types with a pair of overlapping parent types.
*/
private def dominates(dtor: Type, dted: Type): Boolean = {
def core(tp: Type): Type = tp.normalize match {
@@ -302,15 +396,18 @@ trait Implicits {
case _ => tp
}
def stripped(tp: Type): Type = {
- val tparams = freeTypeParametersNoSkolems.collect(tp)
- tp.subst(tparams, tparams map (t => WildcardType))
+ // `t.typeSymbol` returns the symbol of the normalized type. If that normalized type
+ // is a `PolyType`, the symbol of the result type is collected. This is precisely
+ // what we require for SI-5318.
+ val syms = for (t <- tp; if t.typeSymbol.isTypeParameter) yield t.typeSymbol
+ deriveTypeWithWildcards(syms.distinct)(tp)
}
def sum(xs: List[Int]) = (0 /: xs)(_ + _)
def complexity(tp: Type): Int = tp.normalize match {
case NoPrefix =>
0
case SingleType(pre, sym) =>
- if (sym.isPackage) 0 else complexity(tp.widen)
+ if (sym.isPackage) 0 else complexity(tp.normalize.widen)
case TypeRef(pre, sym, args) =>
complexity(pre) + sum(args map complexity) + 1
case RefinedType(parents, _) =>
@@ -328,59 +425,13 @@ trait Implicits {
overlaps(dtor1, dted1) && (dtor1 =:= dted1 || complexity(dtor1) > complexity(dted1))
}
- incCounter(implicitSearchCount)
-
- /** Issues an error signalling ambiguous implicits */
- private def ambiguousImplicitError(info1: ImplicitInfo, info2: ImplicitInfo,
- pre1: String, pre2: String, trailer: String) =
- if (!info1.tpe.isErroneous && !info2.tpe.isErroneous) {
- val coreMsg =
- pre1+" "+info1.sym+info1.sym.locationString+" of type "+info1.tpe+"\n "+
- pre2+" "+info2.sym+info2.sym.locationString+" of type "+info2.tpe+"\n "+
- trailer
- error(tree.pos,
- if (isView) {
- val found = pt.typeArgs(0)
- val req = pt.typeArgs(1)
- def defaultExplanation =
- "Note that implicit conversions are not applicable because they are ambiguous:\n "+
- coreMsg+"are possible conversion functions from "+ found+" to "+req
-
- def explanation = {
- val sym = found.typeSymbol
- // Explain some common situations a bit more clearly.
- if (AnyRefClass.tpe <:< req) {
- if (sym == AnyClass || sym == UnitClass) {
- "Note: " + sym.name + " is not implicitly converted to AnyRef. You can safely\n" +
- "pattern match `x: AnyRef` or cast `x.asInstanceOf[AnyRef]` to do so."
- }
- else boxedClass get sym match {
- case Some(boxed) =>
- "Note: an implicit exists from " + sym.fullName + " => " + boxed.fullName + ", but\n" +
- "methods inherited from Object are rendered ambiguous. This is to avoid\n" +
- "a blanket implicit which would convert any " + sym.fullName + " to any AnyRef.\n" +
- "You may wish to use a type ascription: `x: " + boxed.fullName + "`."
- case _ =>
- defaultExplanation
- }
- }
- else defaultExplanation
- }
-
- typeErrorMsg(found, req) + "\n" + explanation
- }
- else {
- "ambiguous implicit values:\n "+coreMsg + "match expected type "+pt
- })
- }
+ if (Statistics.canEnable) Statistics.incCounter(implicitSearchCount)
/** The type parameters to instantiate */
val undetParams = if (isView) List() else context.outer.undetparams
- def approximate(tp: Type) =
- if (undetParams.isEmpty) tp
- else tp.instantiateTypeParams(undetParams, undetParams map (_ => WildcardType))
-
+ /** The expected type with all undetermined type parameters replaced with wildcards. */
+ def approximate(tp: Type) = deriveTypeWithWildcards(undetParams)(tp)
val wildPt = approximate(pt)
/** Try to construct a typed tree from given implicit info with given
@@ -388,27 +439,39 @@ trait Implicits {
* Detect infinite search trees for implicits.
*
* @param info The given implicit info describing the implicit definition
+ * @param isLocal Is the implicit in the local scope of the call site?
* @pre `info.tpe` does not contain an error
*/
- private def typedImplicit(info: ImplicitInfo, ptChecked: Boolean): SearchResult = {
- printInference("[typedImplicit] " + info)
- (context.openImplicits find { case (tp, sym) => sym == tree.symbol && dominates(pt, tp)}) match {
+ private def typedImplicit(info: ImplicitInfo, ptChecked: Boolean, isLocal: Boolean): SearchResult = {
+ // SI-7167 let implicit macros decide what amounts for a divergent implicit search
+ // imagine a macro writer which wants to synthesize a complex implicit Complex[T] by making recursive calls to Complex[U] for its parts
+ // e.g. we have `class Foo(val bar: Bar)` and `class Bar(val x: Int)`
+ // then it's quite reasonable for the macro writer to synthesize Complex[Foo] by calling `inferImplicitValue(typeOf[Complex[Bar])`
+ // however if we didn't insert the `info.sym.isMacro` check here, then under some circumstances
+ // (e.g. as described here http://groups.google.com/group/scala-internals/browse_thread/thread/545462b377b0ac0a)
+ // `dominates` might decide that `Bar` dominates `Foo` and therefore a recursive implicit search should be prohibited
+ // now when we yield control of divergent expansions to the macro writer, what happens next?
+ // in the worst case, if the macro writer is careless, we'll get a StackOverflowException from repeated macro calls
+ // otherwise, the macro writer could check `c.openMacros` and `c.openImplicits` and do `c.abort` when expansions are deemed to be divergent
+ // upon receiving `c.abort` the typechecker will decide that the corresponding implicit search has failed
+ // which will fail the entire stack of implicit searches, producing a nice error message provided by the programmer
+ (context.openImplicits find { case OpenImplicit(info, tp, tree1) => !info.sym.isMacro && tree1.symbol == tree.symbol && dominates(pt, tp)}) match {
case Some(pending) =>
- // println("Pending implicit "+pending+" dominates "+pt+"/"+undetParams) //@MDEBUG
- throw DivergentImplicit
+ //println("Pending implicit "+pending+" dominates "+pt+"/"+undetParams) //@MDEBUG
+ if (settings.Xdivergence211.value) DivergentSearchFailure
+ else throw DivergentImplicit
case None =>
+ def pre211DivergenceLogic() = {
try {
- context.openImplicits = (pt, tree.symbol) :: context.openImplicits
+ context.openImplicits = OpenImplicit(info, pt, tree) :: context.openImplicits
// println(" "*context.openImplicits.length+"typed implicit "+info+" for "+pt) //@MDEBUG
- typedImplicit0(info, ptChecked)
+ typedImplicit0(info, ptChecked, isLocal)
} catch {
case ex: DivergentImplicit =>
- // println("DivergentImplicit for pt:"+ pt +", open implicits:"+context.openImplicits) //@MDEBUG
+ //println("DivergentImplicit for pt:"+ pt +", open implicits:"+context.openImplicits) //@MDEBUG
if (context.openImplicits.tail.isEmpty) {
- if (!(pt.isErroneous))
- context.unit.error(
- tree.pos, "diverging implicit expansion for type "+pt+"\nstarting with "+
- info.sym+info.sym.locationString)
+ if (!pt.isErroneous && !info.sym.isMacro)
+ DivergingImplicitExpansionError(tree, pt, info.sym)(context)
SearchFailure
} else {
throw DivergentImplicit
@@ -416,39 +479,51 @@ trait Implicits {
} finally {
context.openImplicits = context.openImplicits.tail
}
+ }
+ def post211DivergenceLogic() = {
+ try {
+ context.openImplicits = OpenImplicit(info, pt, tree) :: context.openImplicits
+ // println(" "*context.openImplicits.length+"typed implicit "+info+" for "+pt) //@MDEBUG
+ val result = typedImplicit0(info, ptChecked, isLocal)
+ if (result.isDivergent) {
+ //println("DivergentImplicit for pt:"+ pt +", open implicits:"+context.openImplicits) //@MDEBUG
+ if (context.openImplicits.tail.isEmpty && !pt.isErroneous)
+ DivergingImplicitExpansionError(tree, pt, info.sym)(context)
+ }
+ result
+ } finally {
+ context.openImplicits = context.openImplicits.tail
+ }
+ }
+ if (settings.Xdivergence211.value) post211DivergenceLogic()
+ else pre211DivergenceLogic()
}
}
- /** Todo reconcile with definition of stability given in Types.scala */
- private def isStable(tp: Type): Boolean = tp match {
- case TypeRef(pre, sym, _) =>
- sym.isPackageClass ||
- sym.isModuleClass && isStable(pre) /*||
- sym.isAliasType && isStable(tp.normalize)*/
- case _ => tp.isStable
- }
-
- /** Does type `tp' match expected type `pt'
- * This is the case if either `pt' is a unary function type with a
- * HasMethodMatching type as result, and `tp' is a unary function
+ /** Does type `tp` match expected type `pt`
+ * This is the case if either `pt` is a unary function type with a
+ * HasMethodMatching type as result, and `tp` is a unary function
* or method type whose result type has a method whose name and type
* correspond to the HasMethodMatching type,
- * or otherwise if `tp' is compatible with `pt'.
+ * or otherwise if `tp` is compatible with `pt`.
* This method is performance critical: 5-8% of typechecking time.
*/
- private def matchesPt(tp: Type, pt: Type, undet: List[Symbol]) = {
- val start = startTimer(matchesPtNanos)
+ private def matchesPt(tp: Type, pt: Type, undet: List[Symbol]): Boolean = {
+ val start = if (Statistics.canEnable) Statistics.startTimer(matchesPtNanos) else null
val result = normSubType(tp, pt) || isView && {
pt match {
- case TypeRef(_, Function1.Sym, args) =>
- matchesPtView(tp, args.head, args.tail.head, undet)
+ case TypeRef(_, Function1.Sym, arg1 :: arg2 :: Nil) =>
+ matchesPtView(tp, arg1, arg2, undet)
case _ =>
false
}
}
- stopTimer(matchesPtNanos, start)
+ if (Statistics.canEnable) Statistics.stopTimer(matchesPtNanos, start)
result
}
+ private def matchesPt(info: ImplicitInfo): Boolean = (
+ info.isStablePrefix && matchesPt(depoly(info.tpe), wildPt, Nil)
+ )
private def matchesPtView(tp: Type, ptarg: Type, ptres: Type, undet: List[Symbol]): Boolean = tp match {
case MethodType(p :: _, restpe) if p.isImplicit => matchesPtView(restpe, ptarg, ptres, undet)
@@ -469,46 +544,118 @@ trait Implicits {
}
}
- private def typedImplicit0(info: ImplicitInfo, ptChecked: Boolean): SearchResult = {
- incCounter(plausiblyCompatibleImplicits)
- printTyping(
+ /** Capturing the overlap between isPlausiblyCompatible and normSubType.
+ * This is a faithful translation of the code which was there, but it
+ * seems likely the methods are intended to be even more similar than
+ * they are: perhaps someone more familiar with the intentional distinctions
+ * can examine the now much smaller concrete implementations below.
+ */
+ private def checkCompatibility(fast: Boolean, tp0: Type, pt0: Type): Boolean = {
+ @tailrec def loop(tp: Type, pt: Type): Boolean = tp match {
+ case mt @ MethodType(params, restpe) =>
+ if (mt.isImplicit)
+ loop(restpe, pt)
+ else pt match {
+ case tr @ TypeRef(pre, sym, args) =>
+ if (sym.isAliasType) loop(tp, pt.dealias)
+ else if (sym.isAbstractType) loop(tp, pt.bounds.lo)
+ else {
+ val len = args.length - 1
+ hasLength(params, len) &&
+ sym == FunctionClass(len) && {
+ var ps = params
+ var as = args
+ if (fast) {
+ while (ps.nonEmpty && as.nonEmpty) {
+ if (!isPlausiblySubType(as.head, ps.head.tpe))
+ return false
+ ps = ps.tail
+ as = as.tail
+ }
+ } else {
+ while (ps.nonEmpty && as.nonEmpty) {
+ if (!(as.head <:< ps.head.tpe))
+ return false
+ ps = ps.tail
+ as = as.tail
+ }
+ }
+ ps.isEmpty && as.nonEmpty && {
+ val lastArg = as.head
+ as.tail.isEmpty && loop(restpe, lastArg)
+ }
+ }
+ }
+
+ case _ => if (fast) false else tp <:< pt
+ }
+ case NullaryMethodType(restpe) => loop(restpe, pt)
+ case PolyType(_, restpe) => loop(restpe, pt)
+ case ExistentialType(_, qtpe) => if (fast) loop(qtpe, pt) else normalize(tp) <:< pt // is !fast case needed??
+ case _ => if (fast) isPlausiblySubType(tp, pt) else tp <:< pt
+ }
+ loop(tp0, pt0)
+ }
+
+ /** This expresses more cleanly in the negative: there's a linear path
+ * to a final true or false.
+ */
+ private def isPlausiblySubType(tp1: Type, tp2: Type) = !isImpossibleSubType(tp1, tp2)
+ private def isImpossibleSubType(tp1: Type, tp2: Type) = tp1.dealiasWiden match {
+ // We can only rule out a subtype relationship if the left hand
+ // side is a class, else we may not know enough.
+ case tr1 @ TypeRef(_, sym1, _) if sym1.isClass =>
+ tp2.dealiasWiden match {
+ case TypeRef(_, sym2, _) => sym2.isClass && !(sym1 isWeakSubClass sym2)
+ case RefinedType(parents, decls) => decls.nonEmpty && tr1.member(decls.head.name) == NoSymbol
+ case _ => false
+ }
+ case _ => false
+ }
+
+ private def typedImplicit0(info: ImplicitInfo, ptChecked: Boolean, isLocal: Boolean): SearchResult = {
+ if (Statistics.canEnable) Statistics.incCounter(plausiblyCompatibleImplicits)
+ printTyping (
ptBlock("typedImplicit0",
"info.name" -> info.name,
- "info.tpe" -> depoly(info.tpe),
"ptChecked" -> ptChecked,
"pt" -> wildPt,
"orig" -> ptBlock("info",
- "matchesPt" -> matchesPt(depoly(info.tpe), wildPt, Nil),
"undetParams" -> undetParams,
- "isPlausiblyCompatible" -> isPlausiblyCompatible(info.tpe, wildPt),
- "info.pre" -> info.pre,
- "isStable" -> isStable(info.pre)
+ "info.pre" -> info.pre
).replaceAll("\\n", "\n ")
)
)
- if (ptChecked || matchesPt(depoly(info.tpe), wildPt, Nil) && isStable(info.pre))
- typedImplicit1(info)
+ if (ptChecked || matchesPt(info))
+ typedImplicit1(info, isLocal)
else
SearchFailure
}
- private def typedImplicit1(info: ImplicitInfo): SearchResult = {
- incCounter(matchingImplicits)
+ private def typedImplicit1(info: ImplicitInfo, isLocal: Boolean): SearchResult = {
+ if (Statistics.canEnable) Statistics.incCounter(matchingImplicits)
+
+ val itree = atPos(pos.focus) {
+ // workaround for deficient context provided by ModelFactoryImplicitSupport#makeImplicitConstraints
+ val isScalaDoc = context.tree == EmptyTree
- val itree = atPos(tree.pos.focus) {
- if (info.pre == NoPrefix) Ident(info.name)
- else Select(gen.mkAttributedQualifier(info.pre), info.name)
+ if (isLocal && !isScalaDoc) {
+ // SI-4270 SI-5376 Always use an unattributed Ident for implicits in the local scope,
+ // rather than an attributed Select, to detect shadowing.
+ Ident(info.name)
+ } else {
+ assert(info.pre != NoPrefix, info)
+ // SI-2405 Not info.name, which might be an aliased import
+ val implicitMemberName = info.sym.name
+ Select(gen.mkAttributedQualifier(info.pre), implicitMemberName)
+ }
}
printTyping("typedImplicit1 %s, pt=%s, from implicit %s:%s".format(
typeDebug.ptTree(itree), wildPt, info.name, info.tpe)
)
- def fail(reason: String): SearchResult = {
- if (settings.XlogImplicits.value)
- inform(itree+" is not a valid implicit value for "+pt+" because:\n"+reason)
- SearchFailure
- }
+ def fail(reason: String): SearchResult = failure(itree, reason)
try {
val itree1 =
if (isView) {
@@ -522,7 +669,10 @@ trait Implicits {
else
typed1(itree, EXPRmode, wildPt)
- incCounter(typedImplicits)
+ if (context.hasErrors)
+ return fail(context.errBuffer.head.errMsg)
+
+ if (Statistics.canEnable) Statistics.incCounter(typedImplicits)
printTyping("typed implicit %s:%s, pt=%s".format(itree1, itree1.tpe, wildPt))
val itree2 = if (isView) (itree1: @unchecked) match { case Apply(fun, _) => fun }
@@ -541,23 +691,22 @@ trait Implicits {
}
}
- if (itree2.tpe.isError)
- SearchFailure
- else if (!hasMatchingSymbol(itree1))
- fail("candidate implicit %s is shadowed by other implicit %s".format(
- info.sym + info.sym.locationString, itree1.symbol + itree1.symbol.locationString))
+ if (context.hasErrors)
+ fail("hasMatchingSymbol reported error: " + context.errBuffer.head.errMsg)
+ else if (isLocal && !hasMatchingSymbol(itree1))
+ fail("candidate implicit %s is shadowed by %s".format(
+ info.sym.fullLocationString, itree1.symbol.fullLocationString))
else {
val tvars = undetParams map freshVar
+ def ptInstantiated = pt.instantiateTypeParams(undetParams, tvars)
- if (matchesPt(itree2.tpe, pt.instantiateTypeParams(undetParams, tvars), undetParams)) {
- printInference(
- ptBlock("matchesPt",
- "itree1" -> itree1,
- "tvars" -> tvars,
- "undetParams" -> undetParams
- )
- )
+ printInference("[search] considering %s (pt contains %s) trying %s against pt=%s".format(
+ if (undetParams.isEmpty) "no tparams" else undetParams.map(_.name).mkString(", "),
+ typeVarsInType(ptInstantiated) filterNot (_.isGround) match { case Nil => "no tvars" ; case tvs => tvs.mkString(", ") },
+ itree2.tpe, pt
+ ))
+ if (matchesPt(itree2.tpe, ptInstantiated, undetParams)) {
if (tvars.nonEmpty)
printTyping(ptLine("" + info.sym, "tvars" -> tvars, "tvars.constr" -> tvars.map(_.constr)))
@@ -565,17 +714,21 @@ trait Implicits {
false, lubDepth(List(itree2.tpe, pt)))
// #2421: check that we correctly instantiated type parameters outside of the implicit tree:
- checkBounds(itree2.pos, NoPrefix, NoSymbol, undetParams, targs, "inferred ")
+ checkBounds(itree2, NoPrefix, NoSymbol, undetParams, targs, "inferred ")
+ if (context.hasErrors)
+ return fail("type parameters weren't correctly instantiated outside of the implicit tree: " + context.errBuffer.head.errMsg)
// filter out failures from type inference, don't want to remove them from undetParams!
// we must be conservative in leaving type params in undetparams
// prototype == WildcardType: want to remove all inferred Nothings
- val AdjustedTypeArgs(okParams, okArgs) = adjustTypeArgs(undetParams, targs)
+ val AdjustedTypeArgs(okParams, okArgs) = adjustTypeArgs(undetParams, tvars, targs)
+
val subst: TreeTypeSubstituter =
if (okParams.isEmpty) EmptyTreeTypeSubstituter
else {
val subst = new TreeTypeSubstituter(okParams, okArgs)
subst traverse itree2
+ notifyUndetparamsInferred(okParams, okArgs)
subst
}
@@ -590,45 +743,33 @@ trait Implicits {
// re-typecheck)
// TODO: the return tree is ignored. This seems to make
// no difference, but it's bad practice regardless.
- itree2 match {
+
+
+ val checked = itree2 match {
case TypeApply(fun, args) => typedTypeApply(itree2, EXPRmode, fun, args)
case Apply(TypeApply(fun, args), _) => typedTypeApply(itree2, EXPRmode, fun, args) // t2421c
case t => t
}
- val result = new SearchResult(itree2, subst)
- incCounter(foundImplicits)
- printInference("[typedImplicit1] SearchResult: " + result)
- result
+
+ if (context.hasErrors)
+ fail("typing TypeApply reported errors for the implicit tree: " + context.errBuffer.head.errMsg)
+ else {
+ val result = new SearchResult(itree2, subst)
+ if (Statistics.canEnable) Statistics.incCounter(foundImplicits)
+ printInference("[success] found %s for pt %s".format(result, ptInstantiated))
+ result
+ }
}
- else fail("incompatible: %s does not match expected type %s".format(
- itree2.tpe, pt.instantiateTypeParams(undetParams, tvars)))
+ else fail("incompatible: %s does not match expected type %s".format(itree2.tpe, ptInstantiated))
}
}
catch {
- case ex: TypeError => fail(ex.getMessage())
+ case ex: TypeError =>
+ fail(ex.getMessage())
}
}
- // #3453: in addition to the implicit symbols that may shadow the implicit with
- // name `name`, this method tests whether there's a non-implicit symbol with name
- // `name` in scope. Inspired by logic in typedIdent.
- private def nonImplicitSynonymInScope(name: Name) = {
- // the implicit ones are handled by the `shadowed` set above
- context.scope.lookupEntry(name) match {
- case x: ScopeEntry => reallyExists(x.sym) && !x.sym.isImplicit
- case _ => false
- }
- }
-
- /** Is `sym' the standard conforms method in Predef?
- * Note: DON't replace this by sym == Predef_conforms, as Predef_conforms is a `def'
- * which does a member lookup (it can't be a lazy val because we might reload Predef
- * during resident compilations).
- */
- private def isConformsMethod(sym: Symbol) =
- sym.name == nme.conforms && sym.owner == PredefModule.moduleClass
-
- /** Should implicit definition symbol `sym' be considered for applicability testing?
+ /** Should implicit definition symbol `sym` be considered for applicability testing?
* This is the case if one of the following holds:
* - the symbol's type is initialized
* - the symbol comes from a classfile
@@ -654,7 +795,7 @@ trait Implicits {
def comesBefore(sym: Symbol, owner: Symbol) = {
val ownerPos = owner.pos.pointOrElse(Int.MaxValue)
sym.pos.pointOrElse(0) < ownerPos && (
- if (sym hasAccessorFlag) {
+ if (sym.hasAccessorFlag) {
val symAcc = sym.accessed // #3373
symAcc.pos.pointOrElse(0) < ownerPos &&
!(owner.ownerChain exists (o => (o eq sym) || (o eq symAcc))) // probably faster to iterate only once, don't feel like duplicating hasTransOwner for this case
@@ -671,22 +812,48 @@ trait Implicits {
/** Prune ImplicitInfos down to either all the eligible ones or the best one.
*
* @param iss list of list of infos
- * @param shadowed set in which to record names that are shadowed by implicit infos
- * If it is null, no shadowing.
+ * @param isLocal if true, `iss` represents in-scope implicits, which must respect the normal rules of
+ * shadowing. The head of the list `iss` must represent implicits from the closest
+ * enclosing scope, and so on.
*/
- class ImplicitComputation(iss: Infoss, shadowed: util.HashSet[Name]) {
+ class ImplicitComputation(iss: Infoss, isLocal: Boolean) {
+ abstract class Shadower {
+ def addInfos(infos: Infos)
+ def isShadowed(name: Name): Boolean
+ }
+ private val shadower: Shadower = {
+ /** Used for exclude implicits from outer scopes that are shadowed by same-named implicits */
+ final class LocalShadower extends Shadower {
+ val shadowed = util.HashSet[Name](512)
+ def addInfos(infos: Infos) {
+ shadowed addEntries infos.map(_.name)
+ }
+ def isShadowed(name: Name) = shadowed(name)
+ }
+ /** Used for the implicits of expected type, when no shadowing checks are needed. */
+ object NoShadower extends Shadower {
+ def addInfos(infos: Infos) {}
+ def isShadowed(name: Name) = false
+ }
+ if (isLocal) new LocalShadower else NoShadower
+ }
+
private var best: SearchResult = SearchFailure
+ private def isIneligible(info: ImplicitInfo) = (
+ info.isCyclicOrErroneous
+ || isView && isPredefMemberNamed(info.sym, nme.conforms)
+ || shadower.isShadowed(info.name)
+ || (!context.macrosEnabled && info.sym.isTermMacro)
+ )
+
/** True if a given ImplicitInfo (already known isValid) is eligible.
*/
- def survives(info: ImplicitInfo): Boolean = {
- !info.isCyclicOrErroneous &&
- !(isView && isConformsMethod(info.sym)) &&
- isPlausiblyCompatible(info.tpe, wildPt) && // <--- cheaper than matchesPt
- matchesPt(depoly(info.tpe), wildPt, Nil) &&
- isStable(info.pre) &&
- (shadowed == null || (!shadowed(info.name) && !nonImplicitSynonymInScope(info.name)))
- }
+ def survives(info: ImplicitInfo) = (
+ !isIneligible(info) // cyclic, erroneous, shadowed, or specially excluded
+ && isPlausiblyCompatible(info.tpe, wildPt) // optimization to avoid matchesPt
+ && matchesPt(info) // stable and matches expected type
+ )
/** The implicits that are not valid because they come later in the source and
* lack an explicit result type. Used for error diagnostics only.
*/
@@ -698,14 +865,41 @@ trait Implicits {
/** Preventing a divergent implicit from terminating implicit search,
* so that if there is a best candidate it can still be selected.
+ *
+ * The old way of handling divergence.
+ * Only enabled when -Xdivergence211 is turned off.
*/
private var divergence = false
- private val MaxDiverges = 1 // not sure if this should be > 1
- private val divergenceHandler = util.Exceptional.expiringHandler(MaxDiverges) {
- case x: DivergentImplicit =>
- divergence = true
- log("discarding divergent implicit during implicit search")
- SearchFailure
+ private val divergenceHandler: PartialFunction[Throwable, SearchResult] = {
+ var remaining = 1;
+ { case x: DivergentImplicit if remaining > 0 =>
+ remaining -= 1
+ divergence = true
+ log("discarding divergent implicit during implicit search")
+ SearchFailure
+ }
+ }
+
+ /** Preventing a divergent implicit from terminating implicit search,
+ * so that if there is a best candidate it can still be selected.
+ *
+ * The new way of handling divergence.
+ * Only enabled when -Xdivergence211 is turned on.
+ */
+ object DivergentImplicitRecovery {
+ // symbol of the implicit that caused the divergence.
+ // Initially null, will be saved on first diverging expansion.
+ private var implicitSym: Symbol = _
+ private var countdown: Int = 1
+
+ def sym: Symbol = implicitSym
+ def apply(search: SearchResult, i: ImplicitInfo): SearchResult =
+ if (search.isDivergent && countdown > 0) {
+ countdown -= 1
+ implicitSym = i.sym
+ log("discarding divergent implicit ${implicitSym} during implicit search")
+ SearchFailure
+ } else search
}
/** Sorted list of eligible implicits.
@@ -713,25 +907,18 @@ trait Implicits {
val eligible = {
val matches = iss flatMap { is =>
val result = is filter (info => checkValid(info.sym) && survives(info))
- if (shadowed ne null)
- shadowed addEntries (is map (_.name))
-
+ shadower addInfos is
result
}
// most frequent one first
matches sortBy (x => if (isView) -x.useCountView else -x.useCountArg)
}
- def eligibleString = {
- val args = List(
- "search" -> pt,
- "target" -> tree,
- "isView" -> isView
- ) ++ eligible.map("eligible" -> _)
-
- ptBlock("Implicit search in " + context, args: _*)
- }
- printInference(eligibleString)
+ if (eligible.nonEmpty)
+ printInference("[search%s] %s with pt=%s in %s, eligible:\n %s".format(
+ if (isView) " view" else "",
+ tree, pt, context.owner.enclClass, eligible.mkString("\n "))
+ )
/** Faster implicit search. Overall idea:
* - prune aggressively
@@ -741,18 +928,38 @@ trait Implicits {
@tailrec private def rankImplicits(pending: Infos, acc: Infos): Infos = pending match {
case Nil => acc
case i :: is =>
- def tryImplicitInfo(i: ImplicitInfo) =
- try typedImplicit(i, true)
+ def pre211tryImplicitInfo(i: ImplicitInfo) =
+ try typedImplicit(i, ptChecked = true, isLocal)
catch divergenceHandler
- tryImplicitInfo(i) match {
- case SearchFailure => rankImplicits(is, acc)
+ def post211tryImplicitInfo(i: ImplicitInfo) =
+ DivergentImplicitRecovery(typedImplicit(i, ptChecked = true, isLocal), i)
+
+ {
+ if (settings.Xdivergence211.value) post211tryImplicitInfo(i)
+ else pre211tryImplicitInfo(i)
+ } match {
+ // only used if -Xdivergence211 is turned on
+ case sr if sr.isDivergent =>
+ Nil
+ case sr if sr.isFailure =>
+ // We don't want errors that occur during checking implicit info
+ // to influence the check of further infos.
+ context.condBufferFlush(_.kind != ErrorKinds.Divergent)
+ rankImplicits(is, acc)
case newBest =>
best = newBest
val newPending = undoLog undo {
is filterNot (alt => alt == i || {
try improves(i, alt)
- catch { case e: CyclicReference => true }
+ catch {
+ case e: CyclicReference =>
+ if (printInfers) {
+ println(i+" discarded because cyclic reference occurred")
+ e.printStackTrace()
+ }
+ true
+ }
})
}
rankImplicits(newPending, i :: acc)
@@ -761,7 +968,7 @@ trait Implicits {
/** Returns all eligible ImplicitInfos and their SearchResults in a map.
*/
- def findAll() = eligible map (info => (info, typedImplicit(info, false))) toMap
+ def findAll() = mapFrom(eligible)(typedImplicit(_, ptChecked = false, isLocal))
/** Returns the SearchResult of the best match.
*/
@@ -774,24 +981,27 @@ trait Implicits {
case chosen :: rest =>
rest find (alt => !improves(chosen, alt)) match {
case Some(competing) =>
- ambiguousImplicitError(chosen, competing, "both", "and", "")
+ AmbiguousImplicitError(chosen, competing, "both", "and", "")(isView, pt, tree)(context)
+ return AmbiguousSearchFailure // Stop the search once ambiguity is encountered, see t4457_2.scala
case _ =>
if (isView) chosen.useCountView += 1
else chosen.useCountArg += 1
}
}
- if (best == SearchFailure) {
+ if (best.isFailure) {
/** If there is no winner, and we witnessed and caught divergence,
* now we can throw it for the error message.
*/
- if (divergence)
- throw DivergentImplicit
+ if (divergence || DivergentImplicitRecovery.sym != null) {
+ if (settings.Xdivergence211.value) DivergingImplicitExpansionError(tree, pt, DivergentImplicitRecovery.sym)(context)
+ else throw DivergentImplicit
+ }
if (invalidImplicits.nonEmpty)
- setAddendum(tree.pos, () =>
- "\n Note: implicit "+invalidImplicits.head+" is not applicable here"+
- " because it comes after the application point and it lacks an explicit result type")
+ setAddendum(pos, () =>
+ s"\n Note: implicit ${invalidImplicits.head} is not applicable here because it comes after the application point and it lacks an explicit result type"
+ )
}
best
@@ -808,11 +1018,11 @@ trait Implicits {
* @return map from infos to search results
*/
def applicableInfos(iss: Infoss, isLocal: Boolean): Map[ImplicitInfo, SearchResult] = {
- val start = startCounter(subtypeAppInfos)
- val computation = new ImplicitComputation(iss, if (isLocal) util.HashSet[Name](512) else null) { }
+ val start = if (Statistics.canEnable) Statistics.startCounter(subtypeAppInfos) else null
+ val computation = new ImplicitComputation(iss, isLocal) { }
val applicable = computation.findAll()
- stopCounter(subtypeAppInfos, start)
+ if (Statistics.canEnable) Statistics.stopCounter(subtypeAppInfos, start)
applicable
}
@@ -827,7 +1037,7 @@ trait Implicits {
*/
def searchImplicit(implicitInfoss: Infoss, isLocal: Boolean): SearchResult =
if (implicitInfoss.forall(_.isEmpty)) SearchFailure
- else new ImplicitComputation(implicitInfoss, if (isLocal) util.HashSet[Name](128) else null) findBest()
+ else new ImplicitComputation(implicitInfoss, isLocal) findBest()
/** Produce an implicict info map, i.e. a map from the class symbols C of all parts of this type to
* the implicit infos in the companion objects of these class symbols C.
@@ -838,8 +1048,8 @@ trait Implicits {
* - for alias types and abstract types, we take instead the parts
* - of their upper bounds.
* @return For those parts that refer to classes with companion objects that
- * can be accessed with unambiguous stable prefixes, the implicits infos
- * which are members of these companion objects.
+ * can be accessed with unambiguous stable prefixes that are not existentially
+ * bound, the implicits infos which are members of these companion objects.
*/
private def companionImplicitMap(tp: Type): InfoMap = {
@@ -851,16 +1061,16 @@ trait Implicits {
infoMap get sym match {
case Some(infos1) =>
if (infos1.nonEmpty && !(pre =:= infos1.head.pre.prefix)) {
- println("amb prefix: "+pre+"#"+sym+" "+infos1.head.pre.prefix+"#"+sym)
+ log(s"Ignoring implicit members of $pre#$sym as it is also visible via another prefix: ${infos1.head.pre.prefix}")
infoMap(sym) = List() // ambiguous prefix - ignore implicit members
}
case None =>
- if (pre.isStable) {
- val companion = sym.companionModule
+ if (pre.isStable && !pre.typeSymbol.isExistentiallyBound) {
+ val companion = companionSymbolOf(sym, context)
companion.moduleClass match {
case mc: ModuleClassSymbol =>
val infos =
- for (im <- mc.implicitMembers) yield new ImplicitInfo(im.name, singleType(pre, companion), im)
+ for (im <- mc.implicitMembers.toList) yield new ImplicitInfo(im.name, singleType(pre, companion), im)
if (infos.nonEmpty)
infoMap += (sym -> infos)
case _ =>
@@ -910,7 +1120,7 @@ trait Implicits {
args foreach (getParts(_))
}
} else if (sym.isAliasType) {
- getParts(tp.normalize)
+ getParts(tp.normalize) // SI-7180 Normalize needed to expand HK type refs
} else if (sym.isAbstractType) {
getParts(tp.bounds.hi)
}
@@ -935,135 +1145,123 @@ trait Implicits {
val infoMap = new InfoMap
getParts(tp)(infoMap, new mutable.HashSet(), Set())
- printInference("[companionImplicitMap] "+tp+" = "+infoMap)
+ printInference(
+ ptBlock("companionImplicitMap " + tp, infoMap.toSeq.map({ case (k, v) => ("" + k, v.mkString(", ")) }): _*)
+ )
infoMap
}
- /** The parts of a type is the smallest set of types that contains
- * - the type itself
- * - the parts of its immediate components (prefix and argument)
- * - the parts of its base types
- * - for alias types and abstract types, we take instead the parts
- * - of their upper bounds.
- * @return For those parts that refer to classes with companion objects that
- * can be accessed with unambiguous stable prefixes, the implicits infos
- * which are members of these companion objects.
+ /** The implicits made available by type `pt`.
+ * These are all implicits found in companion objects of classes C
+ * such that some part of `tp` has C as one of its superclasses.
+ */
+ private def implicitsOfExpectedType: Infoss = {
+ if (Statistics.canEnable) Statistics.incCounter(implicitCacheAccs)
+ implicitsCache get pt match {
+ case Some(implicitInfoss) =>
+ if (Statistics.canEnable) Statistics.incCounter(implicitCacheHits)
+ implicitInfoss
+ case None =>
+ val start = if (Statistics.canEnable) Statistics.startTimer(subtypeETNanos) else null
+ // val implicitInfoss = companionImplicits(pt)
+ val implicitInfoss1 = companionImplicitMap(pt).valuesIterator.toList
+ // val is1 = implicitInfoss.flatten.toSet
+ // val is2 = implicitInfoss1.flatten.toSet
+ // for (i <- is1)
+ // if (!(is2 contains i)) println("!!! implicit infos of "+pt+" differ, new does not contain "+i+",\nold: "+implicitInfoss+",\nnew: "+implicitInfoss1)
+ // for (i <- is2)
+ // if (!(is1 contains i)) println("!!! implicit infos of "+pt+" differ, old does not contain "+i+",\nold: "+implicitInfoss+",\nnew: "+implicitInfoss1)
+ if (Statistics.canEnable) Statistics.stopTimer(subtypeETNanos, start)
+ implicitsCache(pt) = implicitInfoss1
+ if (implicitsCache.size >= sizeLimit)
+ implicitsCache -= implicitsCache.keysIterator.next
+ implicitInfoss1
+ }
+ }
- private def companionImplicits(tp: Type): Infoss = {
- val partMap = new LinkedHashMap[Symbol, Type]
- val seen = mutable.HashSet[Type]() // cycle detection
+ private def TagSymbols = TagMaterializers.keySet
+ private val TagMaterializers = Map[Symbol, Symbol](
+ ClassTagClass -> materializeClassTag,
+ WeakTypeTagClass -> materializeWeakTypeTag,
+ TypeTagClass -> materializeTypeTag
+ )
- /** Enter all parts of `tp` into `parts` set.
- * This method is performance critical: about 2-4% of all type checking is spent here
- */
- def getParts(tp: Type) {
- if (seen(tp))
- return
- seen += tp
- tp match {
- case TypeRef(pre, sym, args) =>
- if (sym.isClass) {
- if (!((sym.name == tpnme.REFINE_CLASS_NAME) ||
- (sym.name startsWith tpnme.ANON_CLASS_NAME) ||
- (sym.name == tpnme.ROOT)))
- partMap get sym match {
- case Some(pre1) =>
- if (!(pre =:= pre1)) partMap(sym) = NoType // ambiguous prefix - ignore implicit members
- case None =>
- if (pre.isStable) partMap(sym) = pre
- val bts = tp.baseTypeSeq
- var i = 1
- while (i < bts.length) {
- getParts(bts(i))
- i += 1
- }
- getParts(pre)
- args foreach getParts
- }
- } else if (sym.isAliasType) {
- getParts(tp.normalize)
- } else if (sym.isAbstractType) {
- getParts(tp.bounds.hi)
- }
- case ThisType(_) =>
- getParts(tp.widen)
- case _: SingletonType =>
- getParts(tp.widen)
- case RefinedType(ps, _) =>
- for (p <- ps) getParts(p)
- case AnnotatedType(_, t, _) =>
- getParts(t)
- case ExistentialType(_, t) =>
- getParts(t)
- case PolyType(_, t) =>
- getParts(t)
- case _ =>
+ /** Creates a tree will produce a tag of the requested flavor.
+ * An EmptyTree is returned if materialization fails.
+ */
+ private def tagOfType(pre: Type, tp: Type, tagClass: Symbol): SearchResult = {
+ def success(arg: Tree) = {
+ def isMacroException(msg: String): Boolean =
+ // [Eugene] very unreliable, ask Hubert about a better way
+ msg contains "exception during macro expansion"
+
+ def processMacroExpansionError(pos: Position, msg: String): SearchResult = {
+ // giving up and reporting all macro exceptions regardless of their source
+ // this might lead to an avalanche of errors if one of your implicit macros misbehaves
+ if (isMacroException(msg)) context.error(pos, msg)
+ failure(arg, "failed to typecheck the materialized tag: %n%s".format(msg), pos)
}
- }
- getParts(tp)
-
- val buf = new ListBuffer[Infos]
- for ((clazz, pre) <- partMap) {
- if (pre != NoType) {
- val companion = clazz.companionModule
- companion.moduleClass match {
- case mc: ModuleClassSymbol =>
- buf += (mc.implicitMembers map (im =>
- new ImplicitInfo(im.name, singleType(pre, companion), im)))
- case _ =>
- }
+ try {
+ val tree1 = typedPos(pos.focus)(arg)
+ if (context.hasErrors) processMacroExpansionError(context.errBuffer.head.errPos, context.errBuffer.head.errMsg)
+ else new SearchResult(tree1, EmptyTreeTypeSubstituter)
+ } catch {
+ case ex: TypeError =>
+ processMacroExpansionError(ex.pos, ex.msg)
}
}
- //println("companion implicits of "+tp+" = "+buf.toList) // DEBUG
- buf.toList
- }
-
-*/
- /** The implicits made available by type `pt`.
- * These are all implicits found in companion objects of classes C
- * such that some part of `tp` has C as one of its superclasses.
- */
- private def implicitsOfExpectedType: Infoss = implicitsCache get pt match {
- case Some(implicitInfoss) =>
- incCounter(implicitCacheHits)
- implicitInfoss
- case None =>
- incCounter(implicitCacheMisses)
- val start = startTimer(subtypeETNanos)
-// val implicitInfoss = companionImplicits(pt)
- val implicitInfoss1 = companionImplicitMap(pt).valuesIterator.toList
-// val is1 = implicitInfoss.flatten.toSet
-// val is2 = implicitInfoss1.flatten.toSet
-// for (i <- is1)
-// if (!(is2 contains i)) println("!!! implicit infos of "+pt+" differ, new does not contain "+i+",\nold: "+implicitInfoss+",\nnew: "+implicitInfoss1)
-// for (i <- is2)
-// if (!(is1 contains i)) println("!!! implicit infos of "+pt+" differ, old does not contain "+i+",\nold: "+implicitInfoss+",\nnew: "+implicitInfoss1)
- stopTimer(subtypeETNanos, start)
- implicitsCache(pt) = implicitInfoss1
- if (implicitsCache.size >= sizeLimit)
- implicitsCache -= implicitsCache.keysIterator.next
- implicitInfoss1
+ val prefix = (
+ // ClassTags are not path-dependent, so their materializer doesn't care about prefixes
+ if (tagClass eq ClassTagClass) EmptyTree
+ else pre match {
+ case SingleType(prePre, preSym) =>
+ gen.mkAttributedRef(prePre, preSym) setType pre
+ // necessary only to compile typetags used inside the Universe cake
+ case ThisType(thisSym) =>
+ gen.mkAttributedThis(thisSym)
+ case _ =>
+ // if ``pre'' is not a PDT, e.g. if someone wrote
+ // implicitly[scala.reflect.macros.Context#TypeTag[Int]]
+ // then we need to fail, because we don't know the prefix to use during type reification
+ // upd. we also need to fail silently, because this is a very common situation
+ // e.g. quite often we're searching for BaseUniverse#TypeTag, e.g. for a type tag in any universe
+ // so that if we find one, we could convert it to whatever universe we need by the means of the `in` method
+ // if no tag is found in scope, we end up here, where we ask someone to materialize the tag for us
+ // however, since the original search was about a tag with no particular prefix, we cannot proceed
+ // this situation happens very often, so emitting an error message here (even if only for -Xlog-implicits) would be too much
+ //return failure(tp, "tag error: unsupported prefix type %s (%s)".format(pre, pre.kind))
+ return SearchFailure
+ }
+ )
+ // todo. migrate hardcoded materialization in Implicits to corresponding implicit macros
+ var materializer = atPos(pos.focus)(gen.mkMethodCall(TagMaterializers(tagClass), List(tp), if (prefix != EmptyTree) List(prefix) else List()))
+ if (settings.XlogImplicits.value) reporter.echo(pos, "materializing requested %s.%s[%s] using %s".format(pre, tagClass.name, tp, materializer))
+ if (context.macrosEnabled) success(materializer)
+ // don't call `failure` here. if macros are disabled, we just fail silently
+ // otherwise -Xlog-implicits will spam the long with zillions of "macros are disabled"
+ // this is ugly but temporary, since all this code will be removed once I fix implicit macros
+ else SearchFailure
}
+ private val ManifestSymbols = Set[Symbol](PartialManifestClass, FullManifestClass, OptManifestClass)
+
/** Creates a tree that calls the relevant factory method in object
- * reflect.Manifest for type 'tp'. An EmptyTree is returned if
+ * scala.reflect.Manifest for type 'tp'. An EmptyTree is returned if
* no manifest is found. todo: make this instantiate take type params as well?
*/
- private def manifestOfType(tp: Type, full: Boolean): SearchResult = {
+ private def manifestOfType(tp: Type, flavor: Symbol): SearchResult = {
+ val full = flavor == FullManifestClass
+ val opt = flavor == OptManifestClass
- /** Creates a tree that calls the factory method called constructor in object reflect.Manifest */
+ /** Creates a tree that calls the factory method called constructor in object scala.reflect.Manifest */
def manifestFactoryCall(constructor: String, tparg: Type, args: Tree*): Tree =
if (args contains EmptyTree) EmptyTree
else typedPos(tree.pos.focus) {
- Apply(
- TypeApply(
- Select(gen.mkAttributedRef(if (full) FullManifestModule else PartialManifestModule), constructor),
- List(TypeTree(tparg))
- ),
- args.toList
- )
+ val mani = gen.mkManifestFactoryCall(full, constructor, tparg, args.toList)
+ if (settings.debug.value) println("generated manifest: "+mani) // DEBUG
+ mani
}
/** Creates a tree representing one of the singleton manifests.*/
@@ -1073,21 +1271,23 @@ trait Implicits {
/** Re-wraps a type in a manifest before calling inferImplicit on the result */
def findManifest(tp: Type, manifestClass: Symbol = if (full) FullManifestClass else PartialManifestClass) =
- inferImplicit(tree, appliedType(manifestClass.typeConstructor, List(tp)), true, false, context).tree
+ inferImplicit(tree, appliedType(manifestClass, tp), true, false, context).tree
def findSubManifest(tp: Type) = findManifest(tp, if (full) FullManifestClass else OptManifestClass)
def mot(tp0: Type, from: List[Symbol], to: List[Type]): SearchResult = {
implicit def wrapResult(tree: Tree): SearchResult =
if (tree == EmptyTree) SearchFailure else new SearchResult(tree, if (from.isEmpty) EmptyTreeTypeSubstituter else new TreeTypeSubstituter(from, to))
- val tp1 = tp0.normalize
+ val tp1 = tp0.dealias
tp1 match {
- case ThisType(_) | SingleType(_, _) if !(tp1 exists {tp => tp.typeSymbol.isExistentiallyBound}) => // can't generate a reference to a value that's abstracted over by an existential
- manifestFactoryCall("singleType", tp, gen.mkAttributedQualifier(tp1))
+ case ThisType(_) | SingleType(_, _) =>
+ // can't generate a reference to a value that's abstracted over by an existential
+ if (containsExistential(tp1)) EmptyTree
+ else manifestFactoryCall("singleType", tp, gen.mkAttributedQualifier(tp1))
case ConstantType(value) =>
- manifestOfType(tp1.deconst, full)
+ manifestOfType(tp1.deconst, FullManifestClass)
case TypeRef(pre, sym, args) =>
- if (isValueClass(sym) || isPhantomClass(sym)) {
+ if (isPrimitiveValueClass(sym) || isPhantomClass(sym)) {
findSingletonManifest(sym.name.toString)
} else if (sym == ObjectClass || sym == AnyRefClass) {
findSingletonManifest("Object")
@@ -1098,11 +1298,8 @@ trait Implicits {
} else if (sym.isClass) {
val classarg0 = gen.mkClassOf(tp1)
val classarg = tp match {
- case ExistentialType(_, _) =>
- TypeApply(Select(classarg0, Any_asInstanceOf),
- List(TypeTree(appliedType(ClassClass.typeConstructor, List(tp)))))
- case _ =>
- classarg0
+ case _: ExistentialType => gen.mkCast(classarg0, ClassType(tp))
+ case _ => classarg0
}
val suffix = classarg :: (args map findSubManifest)
manifestFactoryCall(
@@ -1122,73 +1319,128 @@ trait Implicits {
// a manifest should have been found by normal searchImplicit
EmptyTree
}
- case RefinedType(parents, decls) =>
+ case RefinedType(parents, decls) => // !!! not yet: if !full || decls.isEmpty =>
// refinement is not generated yet
if (hasLength(parents, 1)) findManifest(parents.head)
else if (full) manifestFactoryCall("intersectionType", tp, parents map findSubManifest: _*)
- else mot(erasure.erasure.intersectionDominator(parents), from, to)
+ else mot(erasure.intersectionDominator(parents), from, to)
case ExistentialType(tparams, result) =>
mot(tp1.skolemizeExistential, from, to)
case _ =>
EmptyTree
- }
+ }
}
- mot(tp, Nil, Nil)
+ if (full) {
+ val tagInScope = resolveTypeTag(pos, NoType, tp, concrete = true, allowMaterialization = false)
+ if (tagInScope.isEmpty) mot(tp, Nil, Nil)
+ else {
+ if (ReflectRuntimeUniverse == NoSymbol) {
+ // todo. write a test for this
+ context.error(pos,
+ sm"""to create a manifest here, it is necessary to interoperate with the type tag `$tagInScope` in scope.
+ |however typetag -> manifest conversion requires Scala reflection, which is not present on the classpath.
+ |to proceed put scala-reflect.jar on your compilation classpath and recompile.""")
+ return SearchFailure
+ }
+ if (resolveClassTag(pos, tp, allowMaterialization = true) == EmptyTree) {
+ context.error(pos,
+ sm"""to create a manifest here, it is necessary to interoperate with the type tag `$tagInScope` in scope.
+ |however typetag -> manifest conversion requires a class tag for the corresponding type to be present.
+ |to proceed add a class tag to the type `$tp` (e.g. by introducing a context bound) and recompile.""")
+ return SearchFailure
+ }
+ val cm = typed(Ident(ReflectRuntimeCurrentMirror))
+ val interop = gen.mkMethodCall(ReflectRuntimeUniverse, nme.typeTagToManifest, List(tp), List(cm, tagInScope))
+ wrapResult(interop)
+ }
+ } else {
+ mot(tp, Nil, Nil) match {
+ case SearchFailure if opt => wrapResult(gen.mkAttributedRef(NoManifest))
+ case result => result
+ }
+ }
}
def wrapResult(tree: Tree): SearchResult =
if (tree == EmptyTree) SearchFailure else new SearchResult(tree, EmptyTreeTypeSubstituter)
- /** The manifest corresponding to type `pt`, provided `pt` is an instance of Manifest.
+ /** Materializes implicits of predefined types (currently, manifests and tags).
+ * Will be replaced by implicit macros once we fix them.
*/
- private def implicitManifestOrOfExpectedType(pt: Type): SearchResult = pt.dealias match {
- case TypeRef(_, sym, args) if ManifestSymbols(sym) =>
- manifestOfType(args.head, sym == FullManifestClass) match {
- case SearchFailure if sym == OptManifestClass => wrapResult(gen.mkAttributedRef(NoManifest))
- case result => result
- }
- case tp at TypeRef(_, sym, _) if sym.isAbstractType =>
- implicitManifestOrOfExpectedType(tp.bounds.lo) // #3977: use tp (==pt.dealias), not pt (if pt is a type alias, pt.bounds.lo == pt)
- case _ =>
- searchImplicit(implicitsOfExpectedType, false)
- // shouldn't we pass `pt` to `implicitsOfExpectedType`, or is the recursive case
- // for an abstract type really only meant for manifests?
- }
+ private def materializeImplicit(pt: Type): SearchResult =
+ pt match {
+ case TypeRef(_, sym, _) if sym.isAbstractType =>
+ materializeImplicit(pt.dealias.bounds.lo) // #3977: use pt.dealias, not pt (if pt is a type alias, pt.bounds.lo == pt)
+ case pt @ TypeRef(pre, sym, arg :: Nil) =>
+ sym match {
+ case sym if ManifestSymbols(sym) => manifestOfType(arg, sym)
+ case sym if TagSymbols(sym) => tagOfType(pre, arg, sym)
+ // as of late ClassManifest is an alias of ClassTag
+ // hence we need to take extra care when performing dealiasing
+ // because it might destroy the flavor of the manifest requested by the user
+ // when the user wants ClassManifest[T], we should invoke `manifestOfType` not `tagOfType`
+ // hence we don't do `pt.dealias` as we did before, but rather do `pt.betaReduce`
+ // unlike `dealias`, `betaReduce` performs at most one step of dealiasing
+ // while dealias pops all aliases in a single invocation
+ case sym if sym.isAliasType => materializeImplicit(pt.betaReduce)
+ case _ => SearchFailure
+ }
+ case _ =>
+ SearchFailure
+ }
/** The result of the implicit search:
* First search implicits visible in current context.
* If that fails, search implicits in expected type `pt`.
- * If that fails, and `pt` is an instance of Manifest, try to construct a manifest.
+ *
+ * todo. the following lines should be deleted after we migrate delegate tag materialization to implicit macros
+ * If that fails, and `pt` is an instance of a ClassTag, try to construct a class tag.
+ * If that fails, and `pt` is an instance of a TypeTag, try to construct a type tag.
+ * If that fails, and `pt` is an instance of a ClassManifest, try to construct a class manifest.
+ * If that fails, and `pt` is an instance of a Manifest, try to construct a manifest.
+ * If that fails, and `pt` is an instance of a OptManifest, try to construct a class manifest and return NoManifest if construction fails.
* If all fails return SearchFailure
*/
def bestImplicit: SearchResult = {
- val failstart = startTimer(inscopeFailNanos)
- val succstart = startTimer(inscopeSucceedNanos)
+ val failstart = if (Statistics.canEnable) Statistics.startTimer(inscopeFailNanos) else null
+ val succstart = if (Statistics.canEnable) Statistics.startTimer(inscopeSucceedNanos) else null
var result = searchImplicit(context.implicitss, true)
- if (result == SearchFailure) {
- stopTimer(inscopeFailNanos, failstart)
+ if (result.isFailure) {
+ if (Statistics.canEnable) Statistics.stopTimer(inscopeFailNanos, failstart)
} else {
- stopTimer(inscopeSucceedNanos, succstart)
- incCounter(inscopeImplicitHits)
+ if (Statistics.canEnable) Statistics.stopTimer(inscopeSucceedNanos, succstart)
+ if (Statistics.canEnable) Statistics.incCounter(inscopeImplicitHits)
}
- if (result == SearchFailure) {
- val failstart = startTimer(oftypeFailNanos)
- val succstart = startTimer(oftypeSucceedNanos)
+ if (result.isFailure) {
+ val previousErrs = context.flushAndReturnBuffer()
+ val failstart = if (Statistics.canEnable) Statistics.startTimer(oftypeFailNanos) else null
+ val succstart = if (Statistics.canEnable) Statistics.startTimer(oftypeSucceedNanos) else null
+
+ val wasAmbigious = result.isAmbiguousFailure // SI-6667, never search companions after an ambiguous error in in-scope implicits
+ result = materializeImplicit(pt)
- result = implicitManifestOrOfExpectedType(pt)
+ // `materializeImplicit` does some preprocessing for `pt`
+ // is it only meant for manifests/tags or we need to do the same for `implicitsOfExpectedType`?
+ if (result.isFailure) result = searchImplicit(implicitsOfExpectedType, false)
- if (result == SearchFailure) {
- stopTimer(oftypeFailNanos, failstart)
+ if (result.isFailure) {
+ context.updateBuffer(previousErrs)
+ if (Statistics.canEnable) Statistics.stopTimer(oftypeFailNanos, failstart)
} else {
- stopTimer(oftypeSucceedNanos, succstart)
- incCounter(oftypeImplicitHits)
+ if (wasAmbigious && settings.lint.value)
+ reporter.warning(tree.pos,
+ "Search of in-scope implicits was ambiguous, and the implicit scope was searched. In Scala 2.11.0, this code will not compile. See SI-6667. \n" +
+ previousErrs.map(_.errMsg).mkString("\n"))
+
+ if (Statistics.canEnable) Statistics.stopTimer(oftypeSucceedNanos, succstart)
+ if (Statistics.canEnable) Statistics.incCounter(oftypeImplicitHits)
}
}
- if (result == SearchFailure && settings.debug.value)
+ if (result.isFailure && settings.debug.value)
log("no implicits found for "+pt+" "+pt.typeSymbol.info.baseClasses+" "+implicitsOfExpectedType)
result
@@ -1198,15 +1450,47 @@ trait Implicits {
def search(iss: Infoss, isLocal: Boolean) = applicableInfos(iss, isLocal).values
(search(context.implicitss, true) ++ search(implicitsOfExpectedType, false)).toList.filter(_.tree ne EmptyTree)
}
+
+ // find all implicits for some type that contains type variables
+ // collect the constraints that result from typing each implicit
+ def allImplicitsPoly(tvars: List[TypeVar]): List[(SearchResult, List[TypeConstraint])] = {
+ def resetTVars() = tvars foreach { _.constr = new TypeConstraint }
+
+ def eligibleInfos(iss: Infoss, isLocal: Boolean) = {
+ val eligible = new ImplicitComputation(iss, isLocal).eligible
+ eligible.toList.flatMap {
+ (ii: ImplicitInfo) =>
+ // each ImplicitInfo contributes a distinct set of constraints (generated indirectly by typedImplicit)
+ // thus, start each type var off with a fresh for every typedImplicit
+ resetTVars()
+ // any previous errors should not affect us now
+ context.flushBuffer()
+
+ val res = typedImplicit(ii, ptChecked = false, isLocal)
+ if (res.tree ne EmptyTree) List((res, tvars map (_.constr)))
+ else Nil
+ }
+ }
+ eligibleInfos(context.implicitss, isLocal = true) ++ eligibleInfos(implicitsOfExpectedType, isLocal = false)
+ }
}
object ImplicitNotFoundMsg {
- def unapply(sym: Symbol): Option[(Message)] = sym.implicitNotFoundMsg map (m => (new Message(sym, m)))
+ def unapply(sym: Symbol): Option[(Message)] = sym.implicitNotFoundMsg match {
+ case Some(m) => Some(new Message(sym, m))
+ case None if sym.isAliasType =>
+ // perform exactly one step of dealiasing
+ // this is necessary because ClassManifests are now aliased to ClassTags
+ // but we don't want to intimidate users by showing unrelated error messages
+ unapply(sym.info.resultType.betaReduce.typeSymbolDirect)
+ case _ => None
+ }
+
// check the message's syntax: should be a string literal that may contain occurrences of the string "${X}",
// where `X` refers to a type parameter of `sym`
def check(sym: Symbol): Option[String] =
sym.getAnnotation(ImplicitNotFoundClass).flatMap(_.stringArg(0) match {
- case Some(m) => new Message(sym, m) validate
+ case Some(m) => new Message(sym, m).validate
case None => Some("Missing argument `msg` on implicitNotFound annotation.")
})
@@ -1225,13 +1509,13 @@ trait Implicits {
interpolate(msg, Map((typeParamNames zip typeArgs): _*)) // TODO: give access to the name and type of the implicit argument, etc?
def validate: Option[String] = {
- import scala.util.matching.Regex; import collection.breakOut
+ import scala.util.matching.Regex; import scala.collection.breakOut
// is there a shorter way to avoid the intermediate toList?
val refs = """\$\{([^}]+)\}""".r.findAllIn(msg).matchData.map(_ group 1).toSet
val decls = typeParamNames.toSet
(refs &~ decls) match {
- case s if s isEmpty => None
+ case s if s.isEmpty => None
case unboundNames =>
val singular = unboundNames.size == 1
Some("The type parameter"+( if(singular) " " else "s " )+ unboundNames.mkString(", ") +
@@ -1242,5 +1526,38 @@ trait Implicits {
}
}
}
+
+object ImplicitsStats {
+
+ import scala.reflect.internal.TypesStats._
+
+ val rawTypeImpl = Statistics.newSubCounter (" of which in implicits", rawTypeCount)
+ val subtypeImpl = Statistics.newSubCounter(" of which in implicit", subtypeCount)
+ val findMemberImpl = Statistics.newSubCounter(" of which in implicit", findMemberCount)
+ val subtypeAppInfos = Statistics.newSubCounter(" of which in app impl", subtypeCount)
+ val subtypeImprovCount = Statistics.newSubCounter(" of which in improves", subtypeCount)
+ val implicitSearchCount = Statistics.newCounter ("#implicit searches", "typer")
+ val triedImplicits = Statistics.newSubCounter(" #tried", implicitSearchCount)
+ val plausiblyCompatibleImplicits
+ = Statistics.newSubCounter(" #plausibly compatible", implicitSearchCount)
+ val matchingImplicits = Statistics.newSubCounter(" #matching", implicitSearchCount)
+ val typedImplicits = Statistics.newSubCounter(" #typed", implicitSearchCount)
+ val foundImplicits = Statistics.newSubCounter(" #found", implicitSearchCount)
+ val improvesCount = Statistics.newSubCounter("implicit improves tests", implicitSearchCount)
+ val improvesCachedCount = Statistics.newSubCounter("#implicit improves cached ", implicitSearchCount)
+ val inscopeImplicitHits = Statistics.newSubCounter("#implicit inscope hits", implicitSearchCount)
+ val oftypeImplicitHits = Statistics.newSubCounter("#implicit oftype hits ", implicitSearchCount)
+ val implicitNanos = Statistics.newSubTimer ("time spent in implicits", typerNanos)
+ val inscopeSucceedNanos = Statistics.newSubTimer (" successful in scope", typerNanos)
+ val inscopeFailNanos = Statistics.newSubTimer (" failed in scope", typerNanos)
+ val oftypeSucceedNanos = Statistics.newSubTimer (" successful of type", typerNanos)
+ val oftypeFailNanos = Statistics.newSubTimer (" failed of type", typerNanos)
+ val subtypeETNanos = Statistics.newSubTimer (" assembling parts", typerNanos)
+ val matchesPtNanos = Statistics.newSubTimer (" matchesPT", typerNanos)
+ val implicitCacheAccs = Statistics.newCounter ("implicit cache accesses", "typer")
+ val implicitCacheHits = Statistics.newSubCounter("implicit cache hits", implicitCacheAccs)
+}
+
+// only used when -Xdivergence211 is turned off
class DivergentImplicit extends Exception
object DivergentImplicit extends DivergentImplicit
diff --git a/src/compiler/scala/tools/nsc/typechecker/Infer.scala b/src/compiler/scala/tools/nsc/typechecker/Infer.scala
index 8e2a815..55e0a95 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Infer.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Infer.scala
@@ -1,5 +1,5 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
+ * Copyright 2005-2013 LAMP/EPFL
* @author Martin Odersky
*/
@@ -9,7 +9,6 @@ package typechecker
import scala.collection.{ mutable, immutable }
import scala.collection.mutable.ListBuffer
import scala.util.control.ControlThrowable
-import scala.tools.util.StringOps.{ countAsString, countElementsAsString }
import symtab.Flags._
import scala.annotation.tailrec
@@ -18,7 +17,7 @@ import scala.annotation.tailrec
* @author Martin Odersky
* @version 1.0
*/
-trait Infer {
+trait Infer extends Checkable {
self: Analyzer =>
import global._
@@ -45,11 +44,101 @@ trait Infer {
case formal => formal
} else formals
if (isVarArgTypes(formals1) && (removeRepeated || formals.length != nargs)) {
- val ft = formals1.last.normalize.typeArgs.head
+ val ft = formals1.last.dealiasWiden.typeArgs.head
formals1.init ::: (for (i <- List.range(formals1.length - 1, nargs)) yield ft)
} else formals1
}
+ /** Returns `(formals, formalsExpanded)` where `formalsExpanded` are the expected types
+ * for the `nbSubPats` sub-patterns of an extractor pattern, of which the corresponding
+ * unapply[Seq] call is assumed to have result type `resTp`.
+ *
+ * `formals` are the formal types before expanding a potential repeated parameter (must come last in `formals`, if at all)
+ *
+ * @param nbSubPats The number of arguments to the extractor pattern
+ * @param effectiveNbSubPats `nbSubPats`, unless there is one sub-pattern which, after unwrapping
+ * bind patterns, is a Tuple pattern, in which case it is the number of
+ * elements. Used to issue warnings about binding a `TupleN` to a single value.
+ * @throws TypeError when the unapply[Seq] definition is ill-typed
+ * @returns (null, null) when the expected number of sub-patterns cannot be satisfied by the given extractor
+ *
+ * This is the spec currently implemented -- TODO: update it.
+ *
+ * 8.1.8 ExtractorPatterns
+ *
+ * An extractor pattern x(p1, ..., pn) where n ≥ 0 is of the same syntactic form as a constructor pattern.
+ * However, instead of a case class, the stable identifier x denotes an object which has a member method named unapply or unapplySeq that matches the pattern.
+ *
+ * An `unapply` method with result type `R` in an object `x` matches the
+ * pattern `x(p_1, ..., p_n)` if it takes exactly one argument and, either:
+ * - `n = 0` and `R =:= Boolean`, or
+ * - `n = 1` and `R <:< Option[T]`, for some type `T`.
+ * The argument pattern `p1` is typed in turn with expected type `T`.
+ * - Or, `n > 1` and `R <:< Option[Product_n[T_1, ..., T_n]]`, for some
+ * types `T_1, ..., T_n`. The argument patterns `p_1, ..., p_n` are
+ * typed with expected types `T_1, ..., T_n`.
+ *
+ * An `unapplySeq` method in an object `x` matches the pattern `x(p_1, ..., p_n)`
+ * if it takes exactly one argument and its result type is of the form `Option[S]`,
+ * where either:
+ * - `S` is a subtype of `Seq[U]` for some element type `U`, (set `m = 0`)
+ * - or `S` is a `ProductX[T_1, ..., T_m]` and `T_m <: Seq[U]` (`m <= n`).
+ *
+ * The argument patterns `p_1, ..., p_n` are typed with expected types
+ * `T_1, ..., T_m, U, ..., U`. Here, `U` is repeated `n-m` times.
+ *
+ */
+ def extractorFormalTypes(pos: Position, resTp: Type, nbSubPats: Int,
+ unappSym: Symbol, effectiveNbSubPats: Int): (List[Type], List[Type]) = {
+ val isUnapplySeq = unappSym.name == nme.unapplySeq
+ val booleanExtractor = resTp.typeSymbolDirect == BooleanClass
+
+ def seqToRepeatedChecked(tp: Type) = {
+ val toRepeated = seqToRepeated(tp)
+ if (tp eq toRepeated) throw new TypeError("(the last tuple-component of) the result type of an unapplySeq must be a Seq[_]")
+ else toRepeated
+ }
+
+ // empty list --> error, otherwise length == 1
+ lazy val optionArgs = resTp.baseType(OptionClass).typeArgs
+ // empty list --> not a ProductN, otherwise product element types
+ def productArgs = getProductArgs(optionArgs.head)
+
+ val formals =
+ // convert Seq[T] to the special repeated argument type
+ // so below we can use formalTypes to expand formals to correspond to the number of actuals
+ if (isUnapplySeq) {
+ if (optionArgs.nonEmpty)
+ productArgs match {
+ case Nil => List(seqToRepeatedChecked(optionArgs.head))
+ case normalTps :+ seqTp => normalTps :+ seqToRepeatedChecked(seqTp)
+ }
+ else throw new TypeError(s"result type $resTp of unapplySeq defined in ${unappSym.fullLocationString} does not conform to Option[_]")
+ } else {
+ if (booleanExtractor && nbSubPats == 0) Nil
+ else if (optionArgs.nonEmpty)
+ if (nbSubPats == 1) {
+ val productArity = productArgs.size
+ if (settings.lint.value && productArity > 1 && productArity != effectiveNbSubPats)
+ global.currentUnit.warning(pos,
+ s"extractor pattern binds a single value to a Product${productArity} of type ${optionArgs.head}")
+ optionArgs
+ }
+ // TODO: update spec to reflect we allow any ProductN, not just TupleN
+ else productArgs
+ else
+ throw new TypeError(s"result type $resTp of unapply defined in ${unappSym.fullLocationString} does not conform to Option[_] or Boolean")
+ }
+
+ // for unapplySeq, replace last vararg by as many instances as required by nbSubPats
+ val formalsExpanded =
+ if (isUnapplySeq && formals.nonEmpty) formalTypes(formals, nbSubPats)
+ else formals
+
+ if (formalsExpanded.lengthCompare(nbSubPats) != 0) (null, null)
+ else (formals, formalsExpanded)
+ }
+
def actualTypes(actuals: List[Type], nformals: Int): List[Type] =
if (nformals == 1 && !hasLength(actuals, 1))
List(if (actuals.isEmpty) UnitClass.tpe else tupleType(actuals))
@@ -68,7 +157,7 @@ trait Infer {
*/
def freshVar(tparam: Symbol): TypeVar = TypeVar(tparam)
- private class NoInstance(msg: String) extends Throwable(msg) with ControlThrowable { }
+ class NoInstance(msg: String) extends Throwable(msg) with ControlThrowable { }
private class DeferredNoInstance(getmsg: () => String) extends NoInstance("") {
override def getMessage(): String = getmsg()
}
@@ -84,7 +173,7 @@ trait Infer {
def apply(tp: Type): Type = tp match {
case WildcardType | BoundedWildcardType(_) | NoType =>
throw new NoInstance("undetermined type")
- case tv @ TypeVar(origin, constr) =>
+ case tv @ TypeVar(origin, constr) if !tv.untouchable =>
if (constr.inst == NoType) {
throw new DeferredNoInstance(() =>
"no unique instantiation of type variable " + origin + " could be found")
@@ -139,6 +228,9 @@ trait Infer {
def solvedTypes(tvars: List[TypeVar], tparams: List[Symbol],
variances: List[Int], upper: Boolean, depth: Int): List[Type] = {
+ if (tvars.nonEmpty)
+ printInference("[solve types] solving for " + tparams.map(_.name).mkString(", ") + " in " + tvars.mkString(", "))
+
if (!solve(tvars, tparams, variances, upper, depth)) {
// no panic, it's good enough to just guess a solution, we'll find out
// later whether it works. *ZAP* @M danger, Will Robinson! this means
@@ -156,7 +248,7 @@ trait Infer {
// such as T <: T gets completed. See #360
tvar.constr.inst = ErrorType
else
- assert(false, tvar.origin+" at "+tvar.origin.typeSymbol.owner)
+ abort(tvar.origin+" at "+tvar.origin.typeSymbol.owner)
}
tvars map instantiate
}
@@ -175,26 +267,38 @@ trait Infer {
def normalize(tp: Type): Type = tp match {
case mt @ MethodType(params, restpe) if mt.isImplicit =>
normalize(restpe)
- case mt @ MethodType(params, restpe) if !restpe.isDependent =>
- functionType(params map (_.tpe), normalize(restpe))
+ case mt @ MethodType(_, restpe) if !mt.isDependentMethodType =>
+ functionType(mt.paramTypes, normalize(restpe))
case NullaryMethodType(restpe) =>
normalize(restpe)
case ExistentialType(tparams, qtpe) =>
- ExistentialType(tparams, normalize(qtpe))
+ newExistentialType(tparams, normalize(qtpe))
case tp1 =>
tp1 // @MAT aliases already handled by subtyping
}
- private val stdErrorClass = RootClass.newErrorClass(tpnme.ERROR)
- private val stdErrorValue = stdErrorClass.newErrorValue(nme.ERROR)
+ private lazy val stdErrorClass = rootMirror.RootClass.newErrorClass(tpnme.ERROR)
+ private lazy val stdErrorValue = stdErrorClass.newErrorValue(nme.ERROR)
/** The context-dependent inferencer part */
- class Inferencer(context: Context) {
+ class Inferencer(context: Context) extends InferencerContextErrors with InferCheckable {
+ import InferErrorGen._
+
/* -- Error Messages --------------------------------------------------- */
def setError[T <: Tree](tree: T): T = {
- def name = newTermName("<error: " + tree.symbol + ">")
- def errorClass = if (context.reportGeneralErrors) context.owner.newErrorClass(name.toTypeName) else stdErrorClass
- def errorValue = if (context.reportGeneralErrors) context.owner.newErrorValue(name) else stdErrorValue
+ debuglog("set error: "+ tree)
+ // this breaks -Ydebug pretty radically
+ // if (settings.debug.value) { // DEBUG
+ // println("set error: "+tree);
+ // throw new Error()
+ // }
+ def name = {
+ val sym = tree.symbol
+ val nameStr = try sym.toString catch { case _: CyclicReference => sym.nameString }
+ newTermName(s"<error: $nameStr>")
+ }
+ def errorClass = if (context.reportErrors) context.owner.newErrorClass(name.toTypeName) else stdErrorClass
+ def errorValue = if (context.reportErrors) context.owner.newErrorValue(name) else stdErrorValue
def errorSym = if (tree.isType) errorClass else errorValue
if (tree.hasSymbol)
@@ -203,35 +307,20 @@ trait Infer {
tree setType ErrorType
}
- def error(pos: Position, msg: String) {
- context.error(pos, msg)
- }
+ def getContext = context
- def errorTree(tree: Tree, msg: String): Tree = {
- if (!tree.isErroneous) error(tree.pos, msg)
- setError(tree)
- }
+ def issue(err: AbsTypeError): Unit = context.issue(err)
- def typeError(pos: Position, found: Type, req: Type) {
- if (!found.isErroneous && !req.isErroneous) {
- error(pos, withAddendum(pos)(typeErrorMsg(found, req)))
-
- if (settings.explaintypes.value)
- explainTypes(found, req)
- }
- }
-
- def typeErrorMsg(found: Type, req: Type) = {
- def isPossiblyMissingArgs = (found.resultApprox ne found) && isWeaklyCompatible(found.resultApprox, req)
- def missingArgsMsg = if (isPossiblyMissingArgs) "\n possible cause: missing arguments for method or constructor" else ""
+ def isPossiblyMissingArgs(found: Type, req: Type) = (
+ false
+ /** However it is that this condition is expected to imply
+ * "is possibly missing args", it is too weak. It is
+ * better to say nothing than to offer misleading guesses.
- "type mismatch" + foundReqMsg(found, req) + missingArgsMsg
- }
-
- def typeErrorTree(tree: Tree, found: Type, req: Type): Tree = {
- typeError(tree.pos, found, req)
- setError(tree)
- }
+ (found.resultApprox ne found)
+ && isWeaklyCompatible(found.resultApprox, req)
+ */
+ )
def explainTypes(tp1: Type, tp2: Type) =
withDisambiguation(List(), tp1, tp2)(global.explainTypes(tp1, tp2))
@@ -248,13 +337,12 @@ trait Infer {
if (sym.isError) {
tree setSymbol sym setType ErrorType
} else {
- val topClass = context.owner.toplevelClass
- if (context.unit != null)
- context.unit.depends += sym.toplevelClass
+ val topClass = context.owner.enclosingTopLevelClass
+ if (context.unit.exists)
+ context.unit.depends += sym.enclosingTopLevelClass
var sym1 = sym filter (alt => context.isAccessible(alt, pre, site.isInstanceOf[Super]))
// Console.println("check acc " + (sym, sym1) + ":" + (sym.tpe, sym1.tpe) + " from " + pre);//DEBUG
-
if (sym1 == NoSymbol && sym.isJavaDefined && context.unit.isJava) // don't try to second guess Java; see #4402
sym1 = sym
@@ -264,13 +352,13 @@ trait Infer {
Console.println(tree)
Console.println("" + pre + " " + sym.owner + " " + context.owner + " " + context.outer.enclClass.owner + " " + sym.owner.thisType + (pre =:= sym.owner.thisType))
}
- new AccessError(tree, sym, pre,
+ ErrorUtils.issueTypeError(AccessError(tree, sym, pre, context.enclClass.owner,
if (settings.check.isDefault)
analyzer.lastAccessCheckDetails
else
ptBlock("because of an internal error (no accessible symbol)",
"sym.ownerChain" -> sym.ownerChain,
- "underlying(sym)" -> underlying(sym),
+ "underlyingSymbol(sym)" -> underlyingSymbol(sym),
"pre" -> pre,
"site" -> site,
"tree" -> tree,
@@ -278,172 +366,63 @@ trait Infer {
"context.owner" -> context.owner,
"context.outer.enclClass.owner" -> context.outer.enclClass.owner
)
- )
+ ))(context)
+ setError(tree)
}
else {
- if(sym1.isTerm)
+ if (context.owner.isTermMacro && (sym1 hasFlag LOCKED)) {
+ // we must not let CyclicReference to be thrown from sym1.info
+ // because that would mark sym1 erroneous, which it is not
+ // but if it's a true CyclicReference then macro def will report it
+ // see comments to TypeSigError for an explanation of this special case
+ // [Eugene] is there a better way?
+ val dummy = new TypeCompleter { val tree = EmptyTree; override def complete(sym: Symbol) {} }
+ throw CyclicReference(sym1, dummy)
+ }
+
+ if (sym1.isTerm)
sym1.cookJavaRawInfo() // xform java rawtypes into existentials
- var owntype = try{
- pre.memberType(sym1)
- } catch {
- case ex: MalformedType =>
- if (settings.debug.value) ex.printStackTrace
- val sym2 = underlying(sym1)
- val itype = pre.memberType(sym2)
- new AccessError(tree, sym, pre,
+ val owntype = {
+ try pre.memberType(sym1)
+ catch {
+ case ex: MalformedType =>
+ if (settings.debug.value) ex.printStackTrace
+ val sym2 = underlyingSymbol(sym1)
+ val itype = pre.memberType(sym2)
+ ErrorUtils.issueTypeError(
+ AccessError(tree, sym, pre, context.enclClass.owner,
"\n because its instance type "+itype+
(if ("malformed type: "+itype.toString==ex.msg) " is malformed"
- else " contains a "+ex.msg)).emit()
- ErrorType
- }
- if (pre.isInstanceOf[SuperType])
- owntype = owntype.substSuper(pre, site.symbol.thisType)
- tree setSymbol sym1 setType owntype
- }
- }
-
- /** Capturing the overlap between isPlausiblyCompatible and normSubType.
- * This is a faithful translation of the code which was there, but it
- * seems likely the methods are intended to be even more similar than
- * they are: perhaps someone more familiar with the intentional distinctions
- * can examine the now much smaller concrete implementations below.
- */
-/*
- abstract class CompatibilityChecker {
- def resultTypeCheck(restpe: Type, arg: Type): Boolean
- def argumentCheck(arg: Type, param: Type): Boolean
- def lastChanceCheck(tp: Type, pt: Type): Boolean
-
- final def mtcheck(tp: MethodType, pt: TypeRef): Boolean = {
- val MethodType(params, restpe) = tp
- val TypeRef(pre, sym, args) = pt
-
- if (sym.isAliasType) apply(tp, pt.normalize)
- else if (sym.isAbstractType) apply(tp, pt.bounds.lo)
- else {
- val len = args.length - 1
- hasLength(params, len) &&
- sym == FunctionClass(len) && {
- val ps = params.iterator
- val as = args.iterator
- while (ps.hasNext && as.hasNext) {
- if (!argumentCheck(as.next, ps.next.tpe))
- return false
+ else " contains a "+ex.msg)))(context)
+ ErrorType
}
- ps.isEmpty && as.hasNext && {
- val lastArg = as.next
- as.isEmpty && resultTypeCheck(restpe, lastArg)
+ }
+ tree setSymbol sym1 setType {
+ pre match {
+ case _: SuperType => owntype map (tp => if (tp eq pre) site.symbol.thisType else tp)
+ case _ => owntype
}
}
}
}
- def apply(tp: Type, pt: Type): Boolean = tp match {
- case mt @ MethodType(_, restpe) =>
- if (mt.isImplicit)
- apply(restpe, pt)
- else pt match {
- case tr: TypeRef => mtcheck(mt, tr)
- case _ => lastChanceCheck(tp, pt)
- }
- case NullaryMethodType(restpe) => apply(restpe, pt)
- case PolyType(_, restpe) => apply(restpe, pt)
- case ExistentialType(_, qtpe) => apply(qtpe, pt)
- case _ => argumentCheck(tp, pt)
- }
- }
-
- object isPlausiblyCompatible extends CompatibilityChecker {
- def resultTypeCheck(restpe: Type, arg: Type) = isPlausiblyCompatible(restpe, arg)
- def argumentCheck(arg: Type, param: Type) = isPlausiblySubType(arg, param)
- def lastChanceCheck(tp: Type, pt: Type) = false
- }
- object normSubType extends CompatibilityChecker {
- def resultTypeCheck(restpe: Type, arg: Type) = normSubType(restpe, arg)
- def argumentCheck(arg: Type, param: Type) = arg <:< param
- def lastChanceCheck(tp: Type, pt: Type) = tp <:< pt
-
- override def apply(tp: Type, pt: Type): Boolean = tp match {
- case ExistentialType(_, _) => normalize(tp) <:< pt
- case _ => super.apply(tp, pt)
- }
- }
-*/
- def isPlausiblyCompatible(tp: Type, pt: Type) = checkCompatibility(true, tp, pt)
- def normSubType(tp: Type, pt: Type) = checkCompatibility(false, tp, pt)
-
- @tailrec private def checkCompatibility(fast: Boolean, tp: Type, pt: Type): Boolean = tp match {
- case mt @ MethodType(params, restpe) =>
- if (mt.isImplicit)
- checkCompatibility(fast, restpe, pt)
- else pt match {
- case tr @ TypeRef(pre, sym, args) =>
-
- if (sym.isAliasType) checkCompatibility(fast, tp, pt.normalize)
- else if (sym.isAbstractType) checkCompatibility(fast, tp, pt.bounds.lo)
- else {
- val len = args.length - 1
- hasLength(params, len) &&
- sym == FunctionClass(len) && {
- var ps = params
- var as = args
- if (fast) {
- while (ps.nonEmpty && as.nonEmpty) {
- if (!isPlausiblySubType(as.head, ps.head.tpe))
- return false
- ps = ps.tail
- as = as.tail
- }
- } else {
- while (ps.nonEmpty && as.nonEmpty) {
- if (!(as.head <:< ps.head.tpe))
- return false
- ps = ps.tail
- as = as.tail
- }
- }
- ps.isEmpty && as.nonEmpty && {
- val lastArg = as.head
- as.tail.isEmpty && checkCompatibility(fast, restpe, lastArg)
- }
- }
- }
-
- case _ => if (fast) false else tp <:< pt
- }
- case NullaryMethodType(restpe) => checkCompatibility(fast, restpe, pt)
- case PolyType(_, restpe) => checkCompatibility(fast, restpe, pt)
- case ExistentialType(_, qtpe) => if (fast) checkCompatibility(fast, qtpe, pt) else normalize(tp) <:< pt // is !fast case needed??
- case _ => if (fast) isPlausiblySubType(tp, pt) else tp <:< pt
- }
-
- /** This expresses more cleanly in the negative: there's a linear path
- * to a final true or false.
+ /** "Compatible" means conforming after conversions.
+ * "Raising to a thunk" is not implicit; therefore, for purposes of applicability and
+ * specificity, an arg type `A` is considered compatible with cbn formal parameter type `=>A`.
+ * For this behavior, the type `pt` must have cbn params preserved; for instance, `formalTypes(removeByName = false)`.
+ *
+ * `isAsSpecific` no longer prefers A by testing applicability to A for both m(A) and m(=>A)
+ * since that induces a tie between m(=>A) and m(=>A,B*) [SI-3761]
*/
- private def isPlausiblySubType(tp1: Type, tp2: Type) = !isImpossibleSubType(tp1, tp2)
- private def isImpossibleSubType(tp1: Type, tp2: Type) = tp1.normalize.widen match {
- case tr1 @ TypeRef(_, sym1, _) =>
- // If the lhs is an abstract type, we can't rule out a subtype
- // relationship because we don't know what it is.
- !sym1.isAbstractType && (tp2.normalize.widen match {
- case TypeRef(_, sym2, _) =>
- sym1.isClass &&
- sym2.isClass &&
- !(sym1 isSubClass sym2) &&
- !(sym1 isNumericSubClass sym2)
- case RefinedType(parents, decls) =>
- decls.nonEmpty &&
- tr1.member(decls.head.name) == NoSymbol
- case _ => false
- })
- case _ => false
- }
-
- def isCompatible(tp: Type, pt: Type): Boolean = {
+ private def isCompatible(tp: Type, pt: Type): Boolean = {
+ def isCompatibleByName(tp: Type, pt: Type): Boolean = pt match {
+ case TypeRef(_, ByNameParamClass, List(res)) if !isByNameParamType(tp) => isCompatible(tp, res)
+ case _ => false
+ }
val tp1 = normalize(tp)
- (tp1 weak_<:< pt) || isCoercible(tp1, pt)
+ (tp1 weak_<:< pt) || isCoercible(tp1, pt) || isCompatibleByName(tp, pt)
}
def isCompatibleArgs(tps: List[Type], pts: List[Type]) =
(tps corresponds pts)(isCompatible)
@@ -456,6 +435,19 @@ trait Infer {
/** Like weakly compatible but don't apply any implicit conversions yet.
* Used when comparing the result type of a method with its prototype.
+ *
+ * [Martin] I think Infer is also created by Erasure, with the default
+ * implementation of isCoercible
+ * [Paulp] (Assuming the above must refer to my comment on isCoercible)
+ * Nope, I examined every occurrence of Inferencer in trunk. It
+ * appears twice as a self-type, once at its definition, and once
+ * where it is instantiated in Typers. There are no others.
+ *
+ % ack -A0 -B0 --no-filename '\bInferencer\b' src
+ self: Inferencer =>
+ self: Inferencer =>
+ class Inferencer(context: Context) extends InferencerContextErrors with InferCheckable {
+ val infer = new Inferencer(context0) {
*/
def isConservativelyCompatible(tp: Type, pt: Type): Boolean =
context.withImplicitsDisabled(isWeaklyCompatible(tp, pt))
@@ -474,10 +466,7 @@ trait Infer {
def makeFullyDefined(tp: Type): Type = {
val tparams = new ListBuffer[Symbol]
def addTypeParam(bounds: TypeBounds): Type = {
- val tparam =
- context.owner.newAbstractType(context.tree.pos.focus, newTypeName("_"+tparams.size))
- .setFlag(EXISTENTIAL)
- .setInfo(bounds)
+ val tparam = context.owner.newExistential(newTypeName("_"+tparams.size), context.tree.pos.focus) setInfo bounds
tparams += tparam
tparam.tpe
}
@@ -501,7 +490,7 @@ trait Infer {
* @param pt ...
* @return ...
*/
- private def exprTypeArgs(tparams: List[Symbol], restpe: Type, pt: Type, useWeaklyCompatible: Boolean = false): List[Type] = {
+ private def exprTypeArgs(tparams: List[Symbol], restpe: Type, pt: Type, useWeaklyCompatible: Boolean = false): (List[Type], List[TypeVar]) = {
val tvars = tparams map freshVar
val instResTp = restpe.instantiateTypeParams(tparams, tvars)
if ( if (useWeaklyCompatible) isWeaklyCompatible(instResTp, pt) else isCompatible(instResTp, pt) ) {
@@ -516,12 +505,12 @@ trait Infer {
restpe
}
//println("try to solve "+tvars+" "+tparams)
- solvedTypes(tvars, tparams, tparams map varianceInType(varianceType),
- false, lubDepth(List(restpe, pt)))
+ (solvedTypes(tvars, tparams, tparams map varianceInType(varianceType),
+ false, lubDepth(List(restpe, pt))), tvars)
} catch {
- case ex: NoInstance => null
+ case ex: NoInstance => (null, null)
}
- } else null
+ } else (null, null)
}
/** Return inferred proto-type arguments of function, given
@@ -541,7 +530,7 @@ trait Infer {
*/
def protoTypeArgs(tparams: List[Symbol], formals: List[Type], restpe: Type,
pt: Type): List[Type] = {
- /** Map type variable to its instance, or, if `variance' is covariant/contravariant,
+ /** Map type variable to its instance, or, if `variance` is covariant/contravariant,
* to its upper/lower bound */
def instantiateToBound(tvar: TypeVar, variance: Int): Type = try {
lazy val hiBounds = tvar.constr.hiBounds
@@ -569,17 +558,21 @@ trait Infer {
}
val tvars = tparams map freshVar
if (isConservativelyCompatible(restpe.instantiateTypeParams(tparams, tvars), pt))
- (tparams, tvars).zipped map ((tparam, tvar) =>
+ map2(tparams, tvars)((tparam, tvar) =>
instantiateToBound(tvar, varianceInTypes(formals)(tparam)))
else
tvars map (tvar => WildcardType)
}
+ /** [Martin] Can someone comment this please? I have no idea what it's for
+ * and the code is not exactly readable.
+ */
object AdjustedTypeArgs {
- type Result = collection.mutable.LinkedHashMap[Symbol, Option[Type]]
+ val Result = scala.collection.mutable.LinkedHashMap
+ type Result = scala.collection.mutable.LinkedHashMap[Symbol, Option[Type]]
def unapply(m: Result): Some[(List[Symbol], List[Type])] = Some(toLists(
- m collect {case (p, Some(a)) => (p, a)} unzip ))
+ (m collect {case (p, Some(a)) => (p, a)}).unzip ))
object Undets {
def unapply(m: Result): Some[(List[Symbol], List[Type], List[Symbol])] = Some(toLists{
@@ -597,9 +590,9 @@ trait Infer {
})
}
- @inline private def toLists[A1, A2](pxs: (Iterable[A1], Iterable[A2])) = (pxs._1.toList, pxs._2.toList)
- @inline private def toLists[A1, A2, A3](pxs: (Iterable[A1], Iterable[A2], Iterable[A3])) = (pxs._1.toList, pxs._2.toList, pxs._3.toList)
- @inline private def toLists[A1, A2, A3, A4](pxs: (Iterable[A1], Iterable[A2], Iterable[A3], Iterable[A4])) = (pxs._1.toList, pxs._2.toList, pxs._3.toList, pxs._4.toList)
+ private def toLists[A1, A2](pxs: (Iterable[A1], Iterable[A2])) = (pxs._1.toList, pxs._2.toList)
+ private def toLists[A1, A2, A3](pxs: (Iterable[A1], Iterable[A2], Iterable[A3])) = (pxs._1.toList, pxs._2.toList, pxs._3.toList)
+ private def toLists[A1, A2, A3, A4](pxs: (Iterable[A1], Iterable[A2], Iterable[A3], Iterable[A4])) = (pxs._1.toList, pxs._2.toList, pxs._3.toList, pxs._4.toList)
}
/** Retract arguments that were inferred to Nothing because inference failed. Correct types for repeated params.
@@ -615,31 +608,36 @@ trait Infer {
*
* Rewrite for repeated param types: Map T* entries to Seq[T].
* @return map from tparams to inferred arg, if inference was successful, tparams that map to None are considered left undetermined
- * type parameters that are inferred as `scala.Nothing' and that are not covariant in <code>restpe</code> are taken to be undetermined
+ * type parameters that are inferred as `scala.Nothing` and that are not covariant in <code>restpe</code> are taken to be undetermined
*/
- def adjustTypeArgs(tparams: List[Symbol], targs: List[Type], restpe: Type = WildcardType): AdjustedTypeArgs.Result = {
- @inline def notCovariantIn(tparam: Symbol, restpe: Type) =
- (varianceInType(restpe)(tparam) & COVARIANT) == 0 // tparam occurred non-covariantly (in invariant or contravariant position)
-
- (tparams, targs).zipped.map{ (tparam, targ) =>
- if (targ.typeSymbol == NothingClass &&
- (restpe.isWildcard || notCovariantIn(tparam, restpe))) {
- tparam -> None
- } else {
- tparam -> Some(
- if (targ.typeSymbol == RepeatedParamClass) targ.baseType(SeqClass)
+ def adjustTypeArgs(tparams: List[Symbol], tvars: List[TypeVar], targs: List[Type], restpe: Type = WildcardType): AdjustedTypeArgs.Result = {
+ val buf = AdjustedTypeArgs.Result.newBuilder[Symbol, Option[Type]]
+
+ foreach3(tparams, tvars, targs) { (tparam, tvar, targ) =>
+ val retract = (
+ targ.typeSymbol == NothingClass // only retract Nothings
+ && (restpe.isWildcard || (varianceInType(restpe)(tparam) & COVARIANT) == 0) // don't retract covariant occurrences
+ )
+
+ // checks opt.virtPatmat directly so one need not run under -Xexperimental to use virtpatmat
+ buf += ((tparam,
+ if (retract) None
+ else Some(
+ if (targ.typeSymbol == RepeatedParamClass) targ.baseType(SeqClass)
else if (targ.typeSymbol == JavaRepeatedParamClass) targ.baseType(ArrayClass)
- else if (targ.typeSymbol.isModuleClass) targ // this infers Foo.type instead of "object Foo" (see also widenIfNecessary)
+ // this infers Foo.type instead of "object Foo" (see also widenIfNecessary)
+ else if (targ.typeSymbol.isModuleClass || ((opt.experimental || opt.virtPatmat) && tvar.constr.avoidWiden)) targ
else targ.widen
)
- }
- }(collection.breakOut)
+ ))
+ }
+ buf.result
}
/** Return inferred type arguments, given type parameters, formal parameters,
* argument types, result type and expected result type.
* If this is not possible, throw a <code>NoInstance</code> exception.
- * Undetermined type arguments are represented by `definitions.NothingClass.tpe'.
+ * Undetermined type arguments are represented by `definitions.NothingClass.tpe`.
* No check that inferred parameters conform to their bounds is made here.
*
* @param tparams the type parameters of the method
@@ -658,18 +656,6 @@ trait Infer {
throw new NoInstance("parameter lists differ in length")
val restpeInst = restpe.instantiateTypeParams(tparams, tvars)
- printInference(
- ptBlock("methTypeArgs",
- "tparams" -> tparams,
- "formals" -> formals,
- "restpe" -> restpe,
- "restpeInst" -> restpeInst,
- "argtpes" -> argtpes,
- "pt" -> pt,
- "tvars" -> tvars,
- "constraints" -> tvars.map(_.constr)
- )
- )
// first check if typevars can be fully defined from the expected type.
// The return value isn't used so I'm making it obvious that this side
@@ -692,7 +678,7 @@ trait Infer {
if (!isFullyDefined(tvar)) tvar.constr.inst = NoType
// Then define remaining type variables from argument types.
- (argtpes, formals).zipped map { (argtpe, formal) =>
+ map2(argtpes, formals) { (argtpe, formal) =>
val tp1 = argtpe.deconst.instantiateTypeParams(tparams, tvars)
val pt1 = formal.instantiateTypeParams(tparams, tvars)
@@ -707,17 +693,7 @@ trait Infer {
tvars, tparams, tparams map varianceInTypes(formals),
false, lubDepth(formals) max lubDepth(argtpes)
)
- val result = adjustTypeArgs(tparams, targs, restpe)
-
- printInference(
- ptBlock("methTypeArgs result",
- "tvars" -> tvars,
- "constraints" -> tvars.map(_.constr),
- "targs" -> targs,
- "adjusted type args" -> result
- )
- )
- result
+ adjustTypeArgs(tparams, tvars, targs, restpe)
}
private[typechecker] def followApply(tp: Type): Type = tp match {
@@ -725,7 +701,13 @@ trait Infer {
val restp1 = followApply(restp)
if (restp1 eq restp) tp else restp1
case _ =>
- val appmeth = tp.nonPrivateMember(nme.apply) filter (_.isPublic)
+ val appmeth = {
+ //OPT cut down on #closures by special casing non-overloaded case
+ // was: tp.nonPrivateMember(nme.apply) filter (_.isPublic)
+ val result = tp.nonPrivateMember(nme.apply)
+ if ((result eq NoSymbol) || !result.isOverloaded && result.isPublic) result
+ else result filter (_.isPublic)
+ }
if (appmeth == NoSymbol) tp
else OverloadedType(tp, appmeth.alternatives)
}
@@ -756,7 +738,8 @@ trait Infer {
val argtpes1 = argtpes map {
case NamedType(name, tp) => // a named argument
var res = tp
- val pos = params.indexWhere(p => (p.name == name || deprecatedName(p) == Some(name)) && !p.isSynthetic)
+ val pos = params.indexWhere(p => paramMatchesName(p, name) && !p.isSynthetic)
+
if (pos == -1) {
if (positionalAllowed) { // treat assignment as positional argument
argPos(index) = index
@@ -766,7 +749,8 @@ trait Infer {
} else if (argPos.contains(pos)) { // parameter specified twice
namesOK = false
} else {
- positionalAllowed = false
+ if (index != pos)
+ positionalAllowed = false
argPos(index) = pos
}
index += 1
@@ -796,8 +780,8 @@ trait Infer {
* @param undetparams ...
* @param ftpe the type of the function (often a MethodType)
* @param argtpes the argument types; a NamedType(name, tp) for named
- * arguments. For each NamedType, if `name' does not exist in `ftpe', that
- * type is set to `Unit', i.e. the corresponding argument is treated as
+ * arguments. For each NamedType, if `name` does not exist in `ftpe`, that
+ * type is set to `Unit`, i.e. the corresponding argument is treated as
* an assignment expression (@see checkNames).
* @param pt ...
* @return ...
@@ -809,14 +793,8 @@ trait Infer {
alts exists (alt => isApplicable(undetparams, pre.memberType(alt), argtpes0, pt))
case ExistentialType(tparams, qtpe) =>
isApplicable(undetparams, qtpe, argtpes0, pt)
- case MethodType(params, _) =>
- val formals0 = params map { param =>
- param.tpe match {
- case TypeRef(_, sym, List(tpe)) if sym isNonBottomSubClass CodeClass => tpe
- case tpe => tpe
- }
- }
- val formals = formalTypes(formals0, argtpes0.length)
+ case mt @ MethodType(params, _) =>
+ val formals = formalTypes(mt.paramTypes, argtpes0.length, removeByName = false)
def tryTupleApply: Boolean = {
// if 1 formal, 1 argtpe (a tuple), otherwise unmodified argtpes0
@@ -838,7 +816,7 @@ trait Infer {
try {
val AdjustedTypeArgs.Undets(okparams, okargs, leftUndet) = methTypeArgs(undetparams, formals, restpe, argtpes, pt)
// #2665: must use weak conformance, not regular one (follow the monomorphic case above)
- (exprTypeArgs(leftUndet, restpe.instantiateTypeParams(okparams, okargs), pt, useWeaklyCompatible = true) ne null) &&
+ (exprTypeArgs(leftUndet, restpe.instantiateTypeParams(okparams, okargs), pt, useWeaklyCompatible = true)._1 ne null) &&
isWithinBounds(NoPrefix, NoSymbol, okparams, okargs)
} catch {
case ex: NoInstance => false
@@ -863,13 +841,14 @@ trait Infer {
typesCompatible(reorderArgs(argtpes1, argPos))
)
}
- } else {
+ }
+ else {
// not enough arguments, check if applicable using defaults
val missing = missingParams[Type](argtpes0, params, {
case NamedType(name, _) => Some(name)
case _ => None
})._1
- if (missing forall (_.hasDefaultFlag)) {
+ if (missing forall (_.hasDefault)) {
// add defaults as named arguments
val argtpes1 = argtpes0 ::: (missing map (p => NamedType(p.name, p.tpe)))
isApplicable(undetparams, ftpe, argtpes1, pt)
@@ -880,33 +859,27 @@ trait Infer {
case NullaryMethodType(restpe) => // strip nullary method type, which used to be done by the polytype case below
isApplicable(undetparams, restpe, argtpes0, pt)
case PolyType(tparams, restpe) =>
- val tparams1 = cloneSymbols(tparams)
- isApplicable(tparams1 ::: undetparams, restpe.substSym(tparams, tparams1), argtpes0, pt)
+ createFromClonedSymbols(tparams, restpe)((tps1, restpe1) => isApplicable(tps1 ::: undetparams, restpe1, argtpes0, pt))
case ErrorType =>
true
case _ =>
false
}
- /** Todo: Try to make isApplicable always safe (i.e. not cause TypeErrors).
+ /**
+ * Todo: Try to make isApplicable always safe (i.e. not cause TypeErrors).
+ * The chance of TypeErrors should be reduced through context errors
*/
private[typechecker] def isApplicableSafe(undetparams: List[Symbol], ftpe: Type,
argtpes0: List[Type], pt: Type): Boolean = {
- val reportAmbiguousErrors = context.reportAmbiguousErrors
- context.reportAmbiguousErrors = false
- try {
- isApplicable(undetparams, ftpe, argtpes0, pt)
- } catch {
- case ex: TypeError =>
- try {
- isApplicable(undetparams, ftpe, argtpes0, WildcardType)
- } catch {
- case ex: TypeError =>
- false
- }
- } finally {
- context.reportAmbiguousErrors = reportAmbiguousErrors
- }
+ val silentContext = context.makeSilent(false)
+ val typer0 = newTyper(silentContext)
+ val res1 = typer0.infer.isApplicable(undetparams, ftpe, argtpes0, pt)
+ if (pt != WildcardType && silentContext.hasErrors) {
+ silentContext.flushBuffer()
+ val res2 = typer0.infer.isApplicable(undetparams, ftpe, argtpes0, WildcardType)
+ if (silentContext.hasErrors) false else res2
+ } else res1
}
/** Is type <code>ftpe1</code> strictly more specific than type <code>ftpe2</code>
@@ -927,8 +900,8 @@ trait Infer {
isAsSpecific(res, ftpe2)
case mt: MethodType if mt.isImplicit =>
isAsSpecific(ftpe1.resultType, ftpe2)
- case MethodType(params, _) if params nonEmpty =>
- var argtpes = params map (_.tpe)
+ case mt @ MethodType(params, _) if params.nonEmpty =>
+ var argtpes = mt.paramTypes
if (isVarArgsList(params) && isVarArgsList(ftpe2.params))
argtpes = argtpes map (argtpe =>
if (isRepeatedParamType(argtpe)) argtpe.typeArgs.head else argtpe)
@@ -937,8 +910,8 @@ trait Infer {
isAsSpecific(PolyType(tparams, res), ftpe2)
case PolyType(tparams, mt: MethodType) if mt.isImplicit =>
isAsSpecific(PolyType(tparams, mt.resultType), ftpe2)
- case PolyType(_, MethodType(params, _)) if params nonEmpty =>
- isApplicable(List(), ftpe2, params map (_.tpe), WildcardType)
+ case PolyType(_, (mt @ MethodType(params, _))) if params.nonEmpty =>
+ isApplicable(List(), ftpe2, mt.paramTypes, WildcardType)
// case NullaryMethodType(res) =>
// isAsSpecific(res, ftpe2)
case ErrorType =>
@@ -981,10 +954,13 @@ trait Infer {
/** Is sym1 (or its companion class in case it is a module) a subclass of
* sym2 (or its companion class in case it is a module)?
*/
- def isProperSubClassOrObject(sym1: Symbol, sym2: Symbol): Boolean =
- sym1 != sym2 && sym1 != NoSymbol && (sym1 isSubClass sym2) ||
- sym1.isModuleClass && isProperSubClassOrObject(sym1.linkedClassOfClass, sym2) ||
- sym2.isModuleClass && isProperSubClassOrObject(sym1, sym2.linkedClassOfClass)
+ def isProperSubClassOrObject(sym1: Symbol, sym2: Symbol): Boolean = (
+ (sym1 != sym2) && (sym1 != NoSymbol) && (
+ (sym1 isSubClass sym2)
+ || (sym1.isModuleClass && isProperSubClassOrObject(sym1.linkedClassOfClass, sym2))
+ || (sym2.isModuleClass && isProperSubClassOrObject(sym1, sym2.linkedClassOfClass))
+ )
+ )
/** is symbol `sym1` defined in a proper subclass of symbol `sym2`?
*/
@@ -1026,7 +1002,7 @@ trait Infer {
false
}
/*
- /** Is type `tpe1' a strictly better expression alternative than type `tpe2'?
+ /** Is type `tpe1` a strictly better expression alternative than type `tpe2`?
*/
def isStrictlyBetterExpr(tpe1: Type, tpe2: Type) = {
isMethod(tpe2) && !isMethod(tpe1) ||
@@ -1034,7 +1010,7 @@ trait Infer {
isStrictlyBetter(tpe1, tpe2)
}
- /** Is type `tpe1' a strictly better alternative than type `tpe2'?
+ /** Is type `tpe1` a strictly better alternative than type `tpe2`?
* non-methods are always strictly better than methods
* nullary methods are always strictly better than non-nullary
* if both are non-nullary methods, then tpe1 is strictly better than tpe2 if
@@ -1068,76 +1044,37 @@ trait Infer {
*/
/** error if arguments not within bounds. */
- def checkBounds(pos: Position, pre: Type, owner: Symbol,
- tparams: List[Symbol], targs: List[Type], prefix: String) = {
- //@M validate variances & bounds of targs wrt variances & bounds of tparams
- //@M TODO: better place to check this?
- //@M TODO: errors for getters & setters are reported separately
- val kindErrors = checkKindBounds(tparams, targs, pre, owner)
-
- if(!kindErrors.isEmpty) {
- error(pos,
- prefix + "kinds of the type arguments " + targs.mkString("(", ",", ")") +
- " do not conform to the expected kinds of the type parameters "+ tparams.mkString("(", ",", ")") + tparams.head.locationString+ "." +
- kindErrors.toList.mkString("\n", ", ", ""))
- } else if (!isWithinBounds(pre, owner, tparams, targs)) {
- if (!(targs exists (_.isErroneous)) && !(tparams exists (_.isErroneous))) {
- //val bounds = instantiatedBounds(pre, owner, tparams, targs)//DEBUG
- //println("bounds = "+bounds+", targs = "+targs+", targclasses = "+(targs map (_.getClass))+", parents = "+(targs map (_.parents)))
- //println(List.map2(bounds, targs)((bound, targ) => bound containsType targ))
- error(pos,
- prefix + "type arguments " + targs.mkString("[", ",", "]") +
- " do not conform to " + tparams.head.owner + "'s type parameter bounds " +
- (tparams map (_.defString)).mkString("[", ",", "]"))
- if (settings.explaintypes.value) {
- val bounds = tparams map (tp => tp.info.instantiateTypeParams(tparams, targs).bounds)
- (targs, bounds).zipped foreach ((targ, bound) => explainTypes(bound.lo, targ))
- (targs, bounds).zipped foreach ((targ, bound) => explainTypes(targ, bound.hi))
- ()
- }
+ def checkBounds(tree: Tree, pre: Type, owner: Symbol,
+ tparams: List[Symbol], targs: List[Type], prefix: String): Boolean =
+ if ((targs exists (_.isErroneous)) || (tparams exists (_.isErroneous))) true
+ else {
+ //@M validate variances & bounds of targs wrt variances & bounds of tparams
+ //@M TODO: better place to check this?
+ //@M TODO: errors for getters & setters are reported separately
+ val kindErrors = checkKindBounds(tparams, targs, pre, owner)
+ kindErrors match {
+ case Nil =>
+ def notWithinBounds() = NotWithinBounds(tree, prefix, targs, tparams, Nil)
+ isWithinBounds(pre, owner, tparams, targs) || {notWithinBounds(); false}
+ case errors =>
+ def kindBoundErrors() = KindBoundErrors(tree, prefix, targs, tparams, errors)
+ (targs contains WildcardType) || {kindBoundErrors(); false}
}
}
- }
-
def checkKindBounds(tparams: List[Symbol], targs: List[Type], pre: Type, owner: Symbol): List[String] = {
- // @M TODO this method is duplicated all over the place (varianceString)
- def varStr(s: Symbol): String =
- if (s.isCovariant) "covariant"
- else if (s.isContravariant) "contravariant"
- else "invariant";
-
- def qualify(a0: Symbol, b0: Symbol): String = if (a0.toString != b0.toString) "" else {
- if((a0 eq b0) || (a0.owner eq b0.owner)) ""
- else {
- var a = a0; var b = b0
- while (a.owner.name == b.owner.name) { a = a.owner; b = b.owner}
- if (a.locationString ne "") " (" + a.locationString.trim + ")" else ""
- }
+ checkKindBounds0(tparams, targs, pre, owner, true) map {
+ case (targ, tparam, kindErrors) =>
+ kindErrors.errorMessage(targ, tparam)
}
-
- val errors = checkKindBounds0(tparams, targs, pre, owner, true)
- val errorMessages = new ListBuffer[String]
- errors foreach {case (targ, tparam, arityMismatches, varianceMismatches, stricterBounds) => errorMessages +=
- (targ+"'s type parameters do not match "+tparam+"'s expected parameters: "+
- (for ((a, p) <- arityMismatches)
- yield a+qualify(a,p)+ " has "+countElementsAsString(a.typeParams.length, "type parameter")+", but "+
- p+qualify(p,a)+" has "+countAsString(p.typeParams.length)).toList.mkString(", ") +
- (for ((a, p) <- varianceMismatches)
- yield a+qualify(a,p)+ " is "+varStr(a)+", but "+
- p+qualify(p,a)+" is declared "+varStr(p)).toList.mkString(", ") +
- (for ((a, p) <- stricterBounds)
- yield a+qualify(a,p)+"'s bounds "+a.info+" are stricter than "+
- p+qualify(p,a)+"'s declared bounds "+p.info).toList.mkString(", "))
- }
- errorMessages.toList
}
- /** Substitute free type variables `undetparams' of polymorphic argument
- * expression `tree', given two prototypes `strictPt', and `lenientPt'.
- * `strictPt' is the first attempt prototype where type parameters
- * are left unchanged. `lenientPt' is the fall-back prototype where type
- * parameters are replaced by `WildcardType's. We try to instantiate
- * first to `strictPt' and then, if this fails, to `lenientPt'. If both
+
+ /** Substitute free type variables `undetparams` of polymorphic argument
+ * expression `tree`, given two prototypes `strictPt`, and `lenientPt`.
+ * `strictPt` is the first attempt prototype where type parameters
+ * are left unchanged. `lenientPt` is the fall-back prototype where type
+ * parameters are replaced by `WildcardType`s. We try to instantiate
+ * first to `strictPt` and then, if this fails, to `lenientPt`. If both
* attempts fail, an error is produced.
*/
def inferArgumentInstance(tree: Tree, undetparams: List[Symbol], strictPt: Type, lenientPt: Type) {
@@ -1150,10 +1087,10 @@ trait Infer {
"lenientPt" -> lenientPt
)
)
- var targs = exprTypeArgs(undetparams, tree.tpe, strictPt)
- if ((targs eq null) || !(tree.tpe.subst(undetparams, targs) <:< strictPt)) {
- targs = exprTypeArgs(undetparams, tree.tpe, lenientPt)
- }
+ var targs = exprTypeArgs(undetparams, tree.tpe, strictPt)._1
+ if ((targs eq null) || !(tree.tpe.subst(undetparams, targs) <:< strictPt))
+ targs = exprTypeArgs(undetparams, tree.tpe, lenientPt)._1
+
substExpr(tree, undetparams, targs, lenientPt)
printInference("[inferArgumentInstance] finished, targs = " + targs)
}
@@ -1165,21 +1102,23 @@ trait Infer {
*/
def inferExprInstance(tree: Tree, tparams: List[Symbol], pt: Type = WildcardType, treeTp0: Type = null, keepNothings: Boolean = true, useWeaklyCompatible: Boolean = false): List[Symbol] = {
val treeTp = if(treeTp0 eq null) tree.tpe else treeTp0 // can't refer to tree in default for treeTp0
+ val (targs, tvars) = exprTypeArgs(tparams, treeTp, pt, useWeaklyCompatible)
printInference(
ptBlock("inferExprInstance",
"tree" -> tree,
"tree.tpe"-> tree.tpe,
"tparams" -> tparams,
- "pt" -> pt
+ "pt" -> pt,
+ "targs" -> targs,
+ "tvars" -> tvars
)
)
- val targs = exprTypeArgs(tparams, treeTp, pt, useWeaklyCompatible)
if (keepNothings || (targs eq null)) { //@M: adjustTypeArgs fails if targs==null, neg/t0226
substExpr(tree, tparams, targs, pt)
List()
} else {
- val AdjustedTypeArgs.Undets(okParams, okArgs, leftUndet) = adjustTypeArgs(tparams, targs)
+ val AdjustedTypeArgs.Undets(okParams, okArgs, leftUndet) = adjustTypeArgs(tparams, tvars, targs)
printInference(
ptBlock("inferExprInstance/AdjustedTypeArgs",
"okParams" -> okParams,
@@ -1192,8 +1131,8 @@ trait Infer {
}
}
- /** Substitute free type variables `undetparams' of polymorphic argument
- * expression <code>tree</code> to `targs', Error if `targs' is null
+ /** Substitute free type variables `undetparams` of polymorphic argument
+ * expression `tree` to `targs`, Error if `targs` is null.
*
* @param tree ...
* @param undetparams ...
@@ -1204,10 +1143,10 @@ trait Infer {
targs: List[Type], pt: Type) {
if (targs eq null) {
if (!tree.tpe.isErroneous && !pt.isErroneous)
- error(tree.pos, "polymorphic expression cannot be instantiated to expected type" +
- foundReqMsg(polyType(undetparams, skipImplicit(tree.tpe)), pt))
+ PolymorphicExpressionInstantiationError(tree, undetparams, pt)
} else {
new TreeTypeSubstituter(undetparams, targs).traverse(tree)
+ notifyUndetparamsInferred(undetparams, targs)
}
}
@@ -1223,66 +1162,47 @@ trait Infer {
*/
def inferMethodInstance(fn: Tree, undetparams: List[Symbol],
args: List[Tree], pt0: Type): List[Symbol] = fn.tpe match {
- case MethodType(params0, _) =>
- printInference(
- ptBlock("inferMethodInstance",
- "fn" -> fn,
- "undetparams" -> undetparams,
- "args" -> args,
- "pt0" -> pt0
- )
- )
-
+ case mt @ MethodType(params0, _) =>
try {
val pt = if (pt0.typeSymbol == UnitClass) WildcardType else pt0
- val formals = formalTypes(params0 map (_.tpe), args.length)
+ val formals = formalTypes(mt.paramTypes, args.length)
val argtpes = actualTypes(args map (x => elimAnonymousClass(x.tpe.deconst)), formals.length)
val restpe = fn.tpe.resultType(argtpes)
val AdjustedTypeArgs.AllArgsAndUndets(okparams, okargs, allargs, leftUndet) =
methTypeArgs(undetparams, formals, restpe, argtpes, pt)
- checkBounds(fn.pos, NoPrefix, NoSymbol, undetparams, allargs, "inferred ")
- val treeSubst = new TreeTypeSubstituter(okparams, okargs)
- treeSubst traverseTrees fn :: args
-
- val result = leftUndet match {
- case Nil => Nil
- case xs =>
- // #3890
- val xs1 = treeSubst.typeSubst mapOver xs
- if (xs ne xs1)
- new TreeSymSubstTraverser(xs, xs1) traverseTrees fn :: args
-
- xs1
- }
- if (result.nonEmpty)
- printInference("inferMethodInstance, still undetermined: " + result)
-
- result
+ printInference("[infer method] solving for %s in %s based on (%s)%s (%s)".format(
+ undetparams.map(_.name).mkString(", "),
+ fn.tpe,
+ argtpes.mkString(", "),
+ restpe,
+ (okparams map (_.name), okargs).zipped.map(_ + "=" + _).mkString("solved: ", ", ", "")
+ ))
+
+ if (checkBounds(fn, NoPrefix, NoSymbol, undetparams, allargs, "inferred ")) {
+ val treeSubst = new TreeTypeSubstituter(okparams, okargs)
+ treeSubst traverseTrees fn :: args
+ notifyUndetparamsInferred(okparams, okargs)
+
+ leftUndet match {
+ case Nil => Nil
+ case xs =>
+ // #3890
+ val xs1 = treeSubst.typeMap mapOver xs
+ if (xs ne xs1)
+ new TreeSymSubstTraverser(xs, xs1) traverseTrees fn :: args
+
+ xs1
+ }
+ } else Nil
}
catch ifNoInstance { msg =>
- errorTree(fn, "no type parameters for " +
- applyErrorMsg(fn, " exist so that it can be applied to arguments ", args map (_.tpe.widen), WildcardType) +
- "\n --- because ---\n" + msg
- )
- Nil
+ NoMethodInstanceError(fn, args, msg); List()
}
}
- /** Type with all top-level occurrences of abstract types replaced by their bounds */
- def widen(tp: Type): Type = tp match { // @M don't normalize here (compiler loops on pos/bug1090.scala )
- case TypeRef(_, sym, _) if sym.isAbstractType =>
- widen(tp.bounds.hi)
- case TypeRef(_, sym, _) if sym.isAliasType =>
- widen(tp.normalize)
- case rtp @ RefinedType(parents, decls) =>
- copyRefinedType(rtp, parents mapConserve widen, decls)
- case AnnotatedType(_, underlying, _) =>
- widen(underlying)
- case _ =>
- tp
- }
+ def widen(tp: Type): Type = abstractTypesToBounds(tp)
/** Substitute free type variables <code>undetparams</code> of type constructor
* <code>tree</code> in pattern, given prototype <code>pt</code>.
@@ -1292,53 +1212,77 @@ trait Infer {
* @param pt the expected result type of the instance
*/
def inferConstructorInstance(tree: Tree, undetparams: List[Symbol], pt0: Type) {
- val pt = widen(pt0)
- //println("infer constr inst "+tree+"/"+undetparams+"/"+pt0)
- var restpe = tree.tpe.finalResultType
- var tvars = undetparams map freshVar
+ val pt = widen(pt0)
+ val ptparams = freeTypeParamsOfTerms(pt)
+ val ctorTp = tree.tpe
+ val resTp = ctorTp.finalResultType
- /** Compute type arguments for undetermined params and substitute them in given tree.
+ debuglog("infer constr inst "+ tree +"/"+ undetparams +"/ pt= "+ pt +" pt0= "+ pt0 +" resTp: "+ resTp)
+
+ /** Compute type arguments for undetermined params
*/
- def computeArgs =
- try {
- val targs = solvedTypes(tvars, undetparams, undetparams map varianceInType(restpe),
- true, lubDepth(List(restpe, pt)))
-// checkBounds(tree.pos, NoPrefix, NoSymbol, undetparams, targs, "inferred ")
-// no checkBounds here. If we enable it, test bug602 fails.
- new TreeTypeSubstituter(undetparams, targs).traverse(tree)
- } catch {
- case ex: NoInstance =>
- errorTree(tree, "constructor of type " + restpe +
- " cannot be uniquely instantiated to expected type " + pt +
- "\n --- because ---\n" + ex.getMessage())
+ def inferFor(pt: Type): Option[List[Type]] = {
+ val tvars = undetparams map freshVar
+ val resTpV = resTp.instantiateTypeParams(undetparams, tvars)
+
+ if (resTpV <:< pt) {
+ try {
+ // debuglog("TVARS "+ (tvars map (_.constr)))
+ // look at the argument types of the primary constructor corresponding to the pattern
+ val variances =
+ if (ctorTp.paramTypes.isEmpty) undetparams map varianceInType(ctorTp)
+ else undetparams map varianceInTypes(ctorTp.paramTypes)
+ val targs = solvedTypes(tvars, undetparams, variances, true, lubDepth(List(resTp, pt)))
+ // checkBounds(tree, NoPrefix, NoSymbol, undetparams, targs, "inferred ")
+ // no checkBounds here. If we enable it, test bug602 fails.
+ // TODO: reinstate checkBounds, return params that fail to meet their bounds to undetparams
+ Some(targs)
+ } catch ifNoInstance { msg =>
+ debuglog("NO INST "+ (tvars, tvars map (_.constr)))
+ NoConstructorInstanceError(tree, resTp, pt, msg)
+ None
+ }
+ } else {
+ debuglog("not a subtype: "+ resTpV +" </:< "+ pt)
+ None
}
- def instError = {
- if (settings.debug.value) Console.println("ici " + tree + " " + undetparams + " " + pt)
- if (settings.explaintypes.value) explainTypes(restpe.instantiateTypeParams(undetparams, tvars), pt)
- errorTree(tree, "constructor cannot be instantiated to expected type" +
- foundReqMsg(restpe, pt))
}
- if (restpe.instantiateTypeParams(undetparams, tvars) <:< pt) {
- computeArgs
- } else if (isFullyDefined(pt)) {
- if (settings.debug.value) log("infer constr " + tree + ":" + restpe + ", pt = " + pt)
- var ptparams = freeTypeParamsOfTerms.collect(pt)
- if (settings.debug.value) log("free type params = " + ptparams)
- val ptWithWildcards = pt.instantiateTypeParams(ptparams, ptparams map (ptparam => WildcardType))
- tvars = undetparams map freshVar
- if (restpe.instantiateTypeParams(undetparams, tvars) <:< ptWithWildcards) {
- computeArgs
- restpe = skipImplicit(tree.tpe.resultType)
- if (settings.debug.value) log("new tree = " + tree + ":" + restpe)
- val ptvars = ptparams map freshVar
- val pt1 = pt.instantiateTypeParams(ptparams, ptvars)
- if (isPopulated(restpe, pt1)) {
- ptvars foreach instantiateTypeVar
- } else { if (settings.debug.value) Console.println("no instance: "); instError }
- } else { if (settings.debug.value) Console.println("not a subtype " + restpe.instantiateTypeParams(undetparams, tvars) + " of " + ptWithWildcards); instError }
- } else { if (settings.debug.value) Console.println("not fully defined: " + pt); instError }
+
+ def inferForApproxPt =
+ if (isFullyDefined(pt)) {
+ inferFor(pt.instantiateTypeParams(ptparams, ptparams map (x => WildcardType))) flatMap { targs =>
+ val ctorTpInst = tree.tpe.instantiateTypeParams(undetparams, targs)
+ val resTpInst = skipImplicit(ctorTpInst.finalResultType)
+ val ptvars =
+ ptparams map {
+ // since instantiateTypeVar wants to modify the skolem that corresponds to the method's type parameter,
+ // and it uses the TypeVar's origin to locate it, deskolemize the existential skolem to the method tparam skolem
+ // (the existential skolem was created by adaptConstrPattern to introduce the type slack necessary to soundly deal with variant type parameters)
+ case skolem if skolem.isGADTSkolem => freshVar(skolem.deSkolemize.asInstanceOf[TypeSymbol])
+ case p => freshVar(p)
+ }
+
+ val ptV = pt.instantiateTypeParams(ptparams, ptvars)
+
+ if (isPopulated(resTpInst, ptV)) {
+ ptvars foreach instantiateTypeVar
+ debuglog("isPopulated "+ resTpInst +", "+ ptV +" vars= "+ ptvars)
+ Some(targs)
+ } else None
+ }
+ } else None
+
+ (inferFor(pt) orElse inferForApproxPt) map { targs =>
+ new TreeTypeSubstituter(undetparams, targs).traverse(tree)
+ notifyUndetparamsInferred(undetparams, targs)
+ } getOrElse {
+ debugwarn("failed inferConstructorInstance for "+ tree +" : "+ tree.tpe +" under "+ undetparams +" pt = "+ pt +(if(isFullyDefined(pt)) " (fully defined)" else " (not fully defined)"))
+ // if (settings.explaintypes.value) explainTypes(resTp.instantiateTypeParams(undetparams, tvars), pt)
+ ConstrInstantiationError(tree, resTp, pt)
+ }
}
+
def instBounds(tvar: TypeVar): (Type, Type) = {
val tparam = tvar.origin.typeSymbol
val instType = toOrigin(tvar.constr.inst)
@@ -1368,7 +1312,7 @@ trait Infer {
context.nextEnclosing(_.tree.isInstanceOf[CaseDef]).pushTypeBounds(tparam)
tparam setInfo tvar.constr.inst
tparam resetFlag DEFERRED
- if (settings.debug.value) log("new alias of " + tparam + " = " + tparam.info)
+ debuglog("new alias of " + tparam + " = " + tparam.info)
} else {
val (lo, hi) = instBounds(tvar)
if (lo <:< hi) {
@@ -1376,67 +1320,61 @@ trait Infer {
&& tparam != lo.typeSymbolDirect && tparam != hi.typeSymbolDirect) { // don't create illegal cycles
context.nextEnclosing(_.tree.isInstanceOf[CaseDef]).pushTypeBounds(tparam)
tparam setInfo TypeBounds(lo, hi)
- if (settings.debug.value) log("new bounds of " + tparam + " = " + tparam.info)
+ debuglog("new bounds of " + tparam + " = " + tparam.info)
} else {
- if (settings.debug.value) log("redundant: "+tparam+" "+tparam.info+"/"+lo+" "+hi)
+ debuglog("redundant: "+tparam+" "+tparam.info+"/"+lo+" "+hi)
}
} else {
- if (settings.debug.value) log("inconsistent: "+tparam+" "+lo+" "+hi)
+ debuglog("inconsistent: "+tparam+" "+lo+" "+hi)
}
}
}
- def checkCheckable(pos: Position, tp: Type, kind: String) {
- def patternWarning(tp0: Type, prefix: String) = {
- context.unit.uncheckedWarning(pos, prefix+tp0+" in type "+kind+tp+" is unchecked since it is eliminated by erasure")
- }
- def check(tp: Type, bound: List[Symbol]) {
- def isLocalBinding(sym: Symbol) =
- sym.isAbstractType &&
- ((bound contains sym) ||
- sym.name == tpnme.WILDCARD || {
- val e = context.scope.lookupEntry(sym.name)
- (e ne null) && e.sym == sym && !e.sym.isTypeParameterOrSkolem && e.owner == context.scope
- })
- tp match {
+ /** Does `tp` contain any types that cannot be checked at run-time (i.e., after erasure, will isInstanceOf[erased(tp)] imply conceptualIsInstanceOf[tp]?)
+ * we should find a way to ask erasure: hey, is `tp` going to make it through you with all of its isInstanceOf resolving powers intact?
+ * TODO: at the very least, reduce duplication wrt checkCheckable
+ */
+ def containsUnchecked(tp: Type): Boolean = {
+ def check(tp: Type, bound: List[Symbol]): Boolean = {
+ def isSurroundingTypeParam(sym: Symbol) = {
+ val e = context.scope.lookupEntry(sym.name)
+ ( (e ne null)
+ && (e.sym == sym )
+ && !e.sym.isTypeParameterOrSkolem
+ && (e.owner == context.scope)
+ )
+ }
+ def isLocalBinding(sym: Symbol) = (
+ sym.isAbstractType && (
+ (bound contains sym)
+ || (sym.name == tpnme.WILDCARD)
+ || isSurroundingTypeParam(sym)
+ )
+ )
+ tp.normalize match {
case SingleType(pre, _) =>
check(pre, bound)
- case TypeRef(pre, sym, args) =>
- if (sym.isAbstractType) {
- if (!isLocalBinding(sym)) patternWarning(tp, "abstract type ")
- } else if (sym.isAliasType) {
- check(tp.normalize, bound)
- } else if (sym == NothingClass || sym == NullClass || sym == AnyValClass) {
- error(pos, "type "+tp+" cannot be used in a type pattern or isInstanceOf test")
- } else {
- for (arg <- args) {
- if (sym == ArrayClass) check(arg, bound)
- else if (arg.typeArgs.nonEmpty) () // avoid spurious warnings with higher-kinded types
- else arg match {
- case TypeRef(_, sym, _) if isLocalBinding(sym) =>
- ;
- case _ =>
- patternWarning(arg, "non variable type-argument ")
- }
- }
- }
- check(pre, bound)
- case RefinedType(parents, decls) =>
- if (decls.isEmpty) for (p <- parents) check(p, bound)
- else patternWarning(tp, "refinement ")
+ case TypeRef(_, ArrayClass, arg :: _) =>
+ check(arg, bound)
+ case tp @ TypeRef(pre, sym, args) =>
+ ( (sym.isAbstractType && !isLocalBinding(sym))
+ || (args exists (x => !isLocalBinding(x.typeSymbol)))
+ || check(pre, bound)
+ )
+ // case RefinedType(_, decls) if decls.nonEmpty =>
+ // patternWarning(tp, "refinement ")
+ case RefinedType(parents, _) =>
+ parents exists (p => check(p, bound))
case ExistentialType(quantified, tp1) =>
check(tp1, bound ::: quantified)
- case ThisType(_) =>
- ;
- case NoPrefix =>
- ;
case _ =>
- patternWarning(tp, "type ")
+ false
}
}
- check(tp, List())
+ check(tp, Nil)
}
+
/** Type intersection of simple type tp1 with general type tp2.
* The result eliminates some redundancies.
*/
@@ -1454,10 +1392,10 @@ trait Infer {
}
}
- def inferTypedPattern(pos: Position, pattp: Type, pt0: Type): Type = {
+ def inferTypedPattern(tree0: Tree, pattp: Type, pt0: Type, canRemedy: Boolean): Type = {
val pt = widen(pt0)
- val ptparams = freeTypeParamsOfTerms.collect(pt)
- val tpparams = freeTypeParamsOfTerms.collect(pattp)
+ val ptparams = freeTypeParamsOfTerms(pt)
+ val tpparams = freeTypeParamsOfTerms(pattp)
def ptMatchesPattp = pt matchesPattern pattp.widen
def pattpMatchesPt = pattp matchesPattern pt
@@ -1466,14 +1404,15 @@ trait Infer {
* This is the case if the scrutinee has no unresolved type arguments
* and is a "final type", meaning final + invariant in all type parameters.
*/
- if (pt.isFinalType && ptparams.isEmpty && !ptMatchesPattp)
- error(pos, "scrutinee is incompatible with pattern type" + foundReqMsg(pattp, pt))
+ if (pt.isFinalType && ptparams.isEmpty && !ptMatchesPattp) {
+ IncompatibleScrutineeTypeError(tree0, pattp, pt)
+ return ErrorType
+ }
- checkCheckable(pos, pattp, "pattern ")
+ checkCheckable(tree0, pattp, pt, inPattern = true, canRemedy)
if (pattp <:< pt) ()
else {
- if (settings.debug.value)
- log("free type params (1) = " + tpparams)
+ debuglog("free type params (1) = " + tpparams)
var tvars = tpparams map freshVar
var tp = pattp.instantiateTypeParams(tpparams, tvars)
@@ -1483,8 +1422,7 @@ trait Infer {
tvars = tpparams map freshVar
tp = pattp.instantiateTypeParams(tpparams, tvars)
- if (settings.debug.value)
- log("free type params (2) = " + ptparams)
+ debuglog("free type params (2) = " + ptparams)
val ptvars = ptparams map freshVar
val pt1 = pt.instantiateTypeParams(ptparams, ptvars)
@@ -1494,8 +1432,8 @@ trait Infer {
if (isPopulated(tp, pt1) && isInstantiatable(tvars ++ ptvars) || pattpMatchesPt)
ptvars foreach instantiateTypeVar
else {
- error(pos, "pattern type is incompatible with expected type" + foundReqMsg(pattp, pt))
- return pattp
+ PatternTypeIncompatibleWithPtError1(tree0, pattp, pt)
+ return ErrorType
}
}
tvars foreach instantiateTypeVar
@@ -1510,14 +1448,14 @@ trait Infer {
def inferModulePattern(pat: Tree, pt: Type) =
if (!(pat.tpe <:< pt)) {
- val ptparams = freeTypeParamsOfTerms.collect(pt)
- if (settings.debug.value) log("free type params (2) = " + ptparams)
+ val ptparams = freeTypeParamsOfTerms(pt)
+ debuglog("free type params (2) = " + ptparams)
val ptvars = ptparams map freshVar
val pt1 = pt.instantiateTypeParams(ptparams, ptvars)
if (pat.tpe <:< pt1)
ptvars foreach instantiateTypeVar
else
- error(pat.pos, "pattern type is incompatible with expected type"+foundReqMsg(pat.tpe, pt))
+ PatternTypeIncompatibleWithPtError2(pat, pt1, pt)
}
object toOrigin extends TypeMap {
@@ -1527,42 +1465,37 @@ trait Infer {
}
}
- abstract class SymCollector extends TypeCollector(List[Symbol]()) {
- protected def includeCondition(sym: Symbol): Boolean
-
- def traverse(tp: Type) {
- tp.normalize match {
- case TypeRef(_, sym, _) =>
- if (includeCondition(sym) && !result.contains(sym)) result = sym :: result
- case _ =>
- }
- mapOver(tp)
- }
- }
-
object approximateAbstracts extends TypeMap {
- def apply(tp: Type): Type = tp.normalize match {
+ def apply(tp: Type): Type = tp.dealiasWiden match {
case TypeRef(pre, sym, _) if sym.isAbstractType => WildcardType
- case _ => mapOver(tp)
+ case _ => mapOver(tp)
}
}
- /** A traverser to collect type parameters referred to in a type
- */
- object freeTypeParamsOfTerms extends SymCollector {
- protected def includeCondition(sym: Symbol): Boolean =
- sym.isAbstractType && sym.owner.isTerm
- }
-
- /** A traverser to collect type parameters referred to in a type
+ /** Collects type parameters referred to in a type.
*/
- object freeTypeParametersNoSkolems extends SymCollector {
- protected def includeCondition(sym: Symbol): Boolean =
- sym.isTypeParameter && sym.owner.isTerm
- }
+ def freeTypeParamsOfTerms(tp: Type): List[Symbol] = {
+ // An inferred type which corresponds to an unknown type
+ // constructor creates a file/declaration order-dependent crasher
+ // situation, the behavior of which depends on the state at the
+ // time the typevar is created. Until we can deal with these
+ // properly, we can avoid it by ignoring type parameters which
+ // have type constructors amongst their bounds. See SI-4070.
+ def isFreeTypeParamOfTerm(sym: Symbol) = (
+ sym.isAbstractType
+ && sym.owner.isTerm
+ && !sym.info.bounds.exists(_.typeParams.nonEmpty)
+ )
- object typeRefs extends SymCollector {
- protected def includeCondition(sym: Symbol): Boolean = true
+ // Intentionally *not* using `Type#typeSymbol` here, which would normalize `tp`
+ // and collect symbols from the result type of any resulting `PolyType`s, which
+ // are not free type parameters of `tp`.
+ //
+ // Contrast with `isFreeTypeParamNoSkolem`.
+ val syms = tp collect {
+ case TypeRef(_, sym, _) if isFreeTypeParamOfTerm(sym) => sym
+ }
+ syms.distinct
}
/* -- Overload Resolution ---------------------------------------------- */
@@ -1582,14 +1515,14 @@ trait Infer {
/** Assign <code>tree</code> the symbol and type of the alternative which
* matches prototype <code>pt</code>, if it exists.
- * If several alternatives match `pt', take parameterless one.
- * If no alternative matches `pt', take the parameterless one anyway.
+ * If several alternatives match `pt`, take parameterless one.
+ * If no alternative matches `pt`, take the parameterless one anyway.
*/
- def inferExprAlternative(tree: Tree, pt: Type): Unit = tree.tpe match {
- case OverloadedType(pre, alts) => tryTwice {
- val alts0 = alts filter (alt => isWeaklyCompatible(pre.memberType(alt), pt))
- val secondTry = alts0.isEmpty
- val alts1 = if (secondTry) alts else alts0
+ def inferExprAlternative(tree: Tree, pt: Type) = tree.tpe match {
+ case OverloadedType(pre, alts) => tryTwice { isSecondTry =>
+ val alts0 = alts filter (alt => isWeaklyCompatible(pre.memberType(alt), pt))
+ val noAlternatives = alts0.isEmpty
+ val alts1 = if (noAlternatives) alts else alts0
//println("trying "+alts1+(alts1 map (_.tpe))+(alts1 map (_.locationString))+" for "+pt)
def improves(sym1: Symbol, sym2: Symbol): Boolean =
@@ -1616,13 +1549,16 @@ trait Infer {
case _ =>
}
}
- typeErrorTree(tree, tree.symbol.tpe, pt)
+ // todo: missing test case
+ NoBestExprAlternativeError(tree, pt, isSecondTry)
} else if (!competing.isEmpty) {
- if (secondTry) {
- typeErrorTree(tree, tree.symbol.tpe, pt)
- } else {
- if (!pt.isErroneous)
- context.ambiguousError(tree.pos, pre, best, competing.head, "expected type " + pt)
+ if (noAlternatives) NoBestExprAlternativeError(tree, pt, isSecondTry)
+ else if (!pt.isErroneous) AmbiguousExprAlternativeError(tree, pre, best, competing.head, pt, isSecondTry)
+ else {
+ // SI-6912 Don't give up and leave an OverloadedType on the tree.
+ // Originally I wrote this as `if (secondTry) ... `, but `tryTwice` won't attempt the second try
+ // unless an error is issued. We're not issuing an error, in the assumption that it would be
+ // spurious in light of the erroneous expected type
setError(tree)
}
} else {
@@ -1634,8 +1570,62 @@ trait Infer {
}
}
+ @inline private def inSilentMode(context: Context)(expr: => Boolean): Boolean = {
+ val oldState = context.state
+ context.setBufferErrors()
+ val res = expr
+ val contextWithErrors = context.hasErrors
+ context.flushBuffer()
+ context.restoreState(oldState)
+ res && !contextWithErrors
+ }
+
+ // Checks against the name of the parameter and also any @deprecatedName.
+ private def paramMatchesName(param: Symbol, name: Name) =
+ param.name == name || param.deprecatedParamName.exists(_ == name)
+
+ // Check the first parameter list the same way.
+ private def methodMatchesName(method: Symbol, name: Name) = method.paramss match {
+ case ps :: _ => ps exists (p => paramMatchesName(p, name))
+ case _ => false
+ }
+
+ private def resolveOverloadedMethod(argtpes: List[Type], eligible: List[Symbol]) = {
+ // If there are any foo=bar style arguments, and any of the overloaded
+ // methods has a parameter named `foo`, then only those methods are considered.
+ val namesOfArgs = argtpes collect { case NamedType(name, _) => name }
+ val namesMatch = (
+ if (namesOfArgs.isEmpty) Nil
+ else eligible filter { m =>
+ namesOfArgs forall { name =>
+ methodMatchesName(m, name)
+ }
+ }
+ )
+
+ if (namesMatch.nonEmpty) namesMatch
+ else if (eligible.isEmpty || eligible.tail.isEmpty) eligible
+ else eligible filter { alt =>
+ // for functional values, the `apply` method might be overloaded
+ val mtypes = followApply(alt.tpe) match {
+ case OverloadedType(_, alts) => alts map (_.tpe)
+ case t => t :: Nil
+ }
+ // Drop those that use a default; keep those that use vararg/tupling conversion.
+ mtypes exists (t =>
+ !t.typeSymbol.hasDefaultFlag && (
+ compareLengths(t.params, argtpes) < 0 // tupling (*)
+ || hasExactlyNumParams(t, argtpes.length) // same nb or vararg
+ )
+ )
+ // (*) more arguments than parameters, but still applicable: tupling conversion works.
+ // todo: should not return "false" when paramTypes = (Unit) no argument is given
+ // (tupling would work)
+ }
+ }
+
/** Assign <code>tree</code> the type of an alternative which is applicable
- * to <code>argtpes</code>, and whose result type is compatible with `pt'.
+ * to <code>argtpes</code>, and whose result type is compatible with `pt`.
* If several applicable alternatives exist, drop the alternatives which use
* default arguments, then select the most specialized one.
* If no applicable alternative exists, and pt != WildcardType, try again
@@ -1645,69 +1635,42 @@ trait Infer {
* @param argtpes contains the argument types. If an argument is named, as
* "a = 3", the corresponding type is `NamedType("a", Int)'. If the name
* of some NamedType does not exist in an alternative's parameter names,
- * the type is replaces by `Unit', i.e. the argument is treated as an
+ * the type is replaces by `Unit`, i.e. the argument is treated as an
* assignment expression.
*/
def inferMethodAlternative(tree: Tree, undetparams: List[Symbol],
- argtpes: List[Type], pt0: Type, varArgsOnly: Boolean = false): Unit = tree.tpe match {
+ argtpes: List[Type], pt0: Type, varArgsOnly: Boolean = false, lastInferAttempt: Boolean = true): Unit = tree.tpe match {
case OverloadedType(pre, alts) =>
val pt = if (pt0.typeSymbol == UnitClass) WildcardType else pt0
- tryTwice {
- if (settings.debug.value)
- log("infer method alt "+ tree.symbol +" with alternatives "+
- (alts map pre.memberType) +", argtpes = "+ argtpes +", pt = "+ pt)
-
- var allApplicable = alts filter (alt =>
- // TODO: this will need to be re-written once we substitute throwing exceptions
- // with generating error trees. We wrap this applicability in try/catch because of #4457.
- try {isApplicable(undetparams, followApply(pre.memberType(alt)), argtpes, pt)} catch {case _: TypeError => false})
-
- //log("applicable: "+ (allApplicable map pre.memberType))
-
- if (varArgsOnly)
- allApplicable = allApplicable filter (alt => isVarArgsList(alt.tpe.params))
-
- // if there are multiple, drop those that use a default
- // (keep those that use vararg / tupling conversion)
- val applicable =
- if (allApplicable.lengthCompare(1) <= 0) allApplicable
- else allApplicable filter (alt => {
- val mtypes = followApply(alt.tpe) match {
- // for functional values, the `apply' method might be overloaded
- case OverloadedType(_, alts) => alts map (_.tpe)
- case t => List(t)
- }
- mtypes exists (t =>
- compareLengths(t.params, argtpes) < 0 || // tupling (*)
- hasExactlyNumParams(t, argtpes.length) // same nb or vararg
- )
- // (*) more arguments than parameters, but still applicable: tuplig conversion works.
- // todo: should not return "false" when paramTypes = (Unit) no argument is given
- // (tupling would work)
- })
+ tryTwice { isSecondTry =>
+ debuglog(s"infer method alt ${tree.symbol} with alternatives ${alts map pre.memberType} argtpes=$argtpes pt=$pt")
+
+ def varargsApplicableCheck(alt: Symbol) = !varArgsOnly || (
+ isVarArgsList(alt.tpe.params)
+ && (argtpes.size >= alt.tpe.params.size) // must be checked now due to SI-5859
+ )
+ val applicable = resolveOverloadedMethod(argtpes,
+ alts filter (alt =>
+ varargsApplicableCheck(alt)
+ && inSilentMode(context)(isApplicable(undetparams, followApply(pre memberType alt), argtpes, pt))
+ )
+ )
- def improves(sym1: Symbol, sym2: Symbol) =
-// util.trace("improve "+sym1+sym1.locationString+" on "+sym2+sym2.locationString)(
+ def improves(sym1: Symbol, sym2: Symbol) = {
+ // util.trace("improve "+sym1+sym1.locationString+" on "+sym2+sym2.locationString)
sym2 == NoSymbol || sym2.isError || sym2.hasAnnotation(BridgeClass) ||
isStrictlyMoreSpecific(followApply(pre.memberType(sym1)),
followApply(pre.memberType(sym2)), sym1, sym2)
+ }
val best = ((NoSymbol: Symbol) /: applicable) ((best, alt) =>
if (improves(alt, best)) alt else best)
val competing = applicable.dropWhile(alt => best == alt || improves(best, alt))
if (best == NoSymbol) {
- if (pt == WildcardType) {
- errorTree(tree, applyErrorMsg(tree, " cannot be applied to ", argtpes, pt))
- } else {
- inferMethodAlternative(tree, undetparams, argtpes, WildcardType)
- }
+ if (pt == WildcardType) NoBestMethodAlternativeError(tree, argtpes, pt, isSecondTry && lastInferAttempt)
+ else inferMethodAlternative(tree, undetparams, argtpes, WildcardType, lastInferAttempt = isSecondTry)
} else if (!competing.isEmpty) {
- if (!(argtpes exists (_.isErroneous)) && !pt.isErroneous)
- context.ambiguousError(tree.pos, pre, best, competing.head,
- "argument types " + argtpes.mkString("(", ",", ")") +
- (if (pt == WildcardType) "" else " and expected result type " + pt))
- setError(tree)
- ()
+ AmbiguousMethodAlternativeError(tree, pre, best, competing.head, argtpes, pt, isSecondTry && lastInferAttempt)
} else {
// checkNotShadowed(tree.pos, pre, best, applicable)
tree.setSymbol(best).setType(pre.memberType(best))
@@ -1721,25 +1684,42 @@ trait Infer {
*
* @param infer ...
*/
- def tryTwice(infer: => Unit) {
+ def tryTwice(infer: Boolean => Unit): Unit = {
if (context.implicitsEnabled) {
- val reportGeneralErrors = context.reportGeneralErrors
- context.reportGeneralErrors = false
- try context.withImplicitsDisabled(infer)
- catch {
+ val saved = context.state
+ var fallback = false
+ context.setBufferErrors()
+ // We cache the current buffer because it is impossible to
+ // distinguish errors that occurred before entering tryTwice
+ // and our first attempt in 'withImplicitsDisabled'. If the
+ // first attempt fails we try with implicits on *and* clean
+ // buffer but that would also flush any pre-tryTwice valid
+ // errors, hence some manual buffer tweaking is necessary.
+ val errorsToRestore = context.flushAndReturnBuffer()
+ try {
+ context.withImplicitsDisabled(infer(false))
+ if (context.hasErrors) {
+ fallback = true
+ context.restoreState(saved)
+ context.flushBuffer()
+ infer(true)
+ }
+ } catch {
case ex: CyclicReference => throw ex
- case ex: TypeError =>
- context.reportGeneralErrors = reportGeneralErrors
- infer
+ case ex: TypeError => // recoverable cyclic references
+ context.restoreState(saved)
+ if (!fallback) infer(true) else ()
+ } finally {
+ context.restoreState(saved)
+ context.updateBuffer(errorsToRestore)
}
- context.reportGeneralErrors = reportGeneralErrors
}
- else infer
+ else infer(true)
}
- /** Assign <code>tree</code> the type of unique polymorphic alternative
+ /** Assign <code>tree</code> the type of all polymorphic alternatives
* with <code>nparams</code> as the number of type parameters, if it exists.
- * If several or none such polymorphic alternatives exist, error.
+ * If no such polymorphic alternative exist, error.
*
* @param tree ...
* @param nparams ...
@@ -1747,13 +1727,13 @@ trait Infer {
def inferPolyAlternatives(tree: Tree, argtypes: List[Type]): Unit = {
val OverloadedType(pre, alts) = tree.tpe
val sym0 = tree.symbol filter (alt => sameLength(alt.typeParams, argtypes))
- def fail(msg: String): Unit = error(tree.pos, msg)
+ def fail(kind: PolyAlternativeErrorKind.ErrorType) =
+ PolyAlternativeError(tree, argtypes, sym0, kind)
- if (sym0 == NoSymbol) return fail(
+ if (sym0 == NoSymbol) return (
if (alts exists (_.typeParams.nonEmpty))
- "wrong number of type parameters for " + treeSymTypeMsg(tree)
- else treeSymTypeMsg(tree) + " does not take type parameters"
- )
+ fail(PolyAlternativeErrorKind.WrongNumber)
+ else fail(PolyAlternativeErrorKind.NoParams))
val (resSym, resTpe) = {
if (!sym0.isOverloaded)
@@ -1761,11 +1741,8 @@ trait Infer {
else {
val sym = sym0 filter (alt => isWithinBounds(pre, alt.owner, alt.typeParams, argtypes))
if (sym == NoSymbol) {
- if (argtypes forall (x => !x.isErroneous)) fail(
- "type arguments " + argtypes.mkString("[", ",", "]") +
- " conform to the bounds of none of the overloaded alternatives of\n "+sym0+
- ": "+sym0.info
- )
+ if (argtypes forall (x => !x.isErroneous))
+ fail(PolyAlternativeErrorKind.ArgsDoNotConform)
return
}
else if (sym.isOverloaded) {
@@ -1782,21 +1759,6 @@ trait Infer {
// Side effects tree with symbol and type
tree setSymbol resSym setType resTpe
}
-
- case class AccessError(tree: Tree, sym: Symbol, pre: Type, explanation: String) extends Tree {
- override def pos = tree.pos
- override def hasSymbol = tree.hasSymbol
- override def symbol = tree.symbol
- override def symbol_=(x: Symbol) = tree.symbol = x
- setError(this)
-
- def emit(): Tree = {
- val realsym = underlying(sym)
- errorTree(tree, realsym + realsym.locationString + " cannot be accessed in " +
- (if (sym.isClassConstructor) context.enclClass.owner else pre.widen) +
- explanation)
- }
- }
}
}
diff --git a/src/compiler/scala/tools/nsc/typechecker/Macros.scala b/src/compiler/scala/tools/nsc/typechecker/Macros.scala
new file mode 100644
index 0000000..d6ec5f2
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/typechecker/Macros.scala
@@ -0,0 +1,948 @@
+package scala.tools.nsc
+package typechecker
+
+import symtab.Flags._
+import scala.tools.nsc.util._
+import scala.tools.nsc.util.ClassPath._
+import scala.reflect.runtime.ReflectionUtils
+import scala.collection.mutable.ListBuffer
+import scala.compat.Platform.EOL
+import scala.reflect.internal.util.Statistics
+import scala.reflect.macros.util._
+import java.lang.{Class => jClass}
+import java.lang.reflect.{Array => jArray, Method => jMethod}
+import scala.reflect.internal.util.Collections._
+import scala.util.control.ControlThrowable
+import scala.reflect.macros.runtime.AbortMacroException
+
+/**
+ * Code to deal with macros, namely with:
+ * * Compilation of macro definitions
+ * * Expansion of macro applications
+ *
+ * Say we have in a class C:
+ *
+ * def foo[T](xs: List[T]): T = macro fooBar
+ *
+ * Then fooBar needs to point to a static method of the following form:
+ *
+ * def fooBar[T: c.WeakTypeTag] // type tag annotation is optional
+ * (c: scala.reflect.macros.Context)
+ * (xs: c.Expr[List[T]])
+ * : c.Expr[T] = {
+ * ...
+ * }
+ *
+ * Then, if foo is called in qual.foo[Int](elems), where qual: D,
+ * the macro application is expanded to a reflective invocation of fooBar with parameters:
+ *
+ * (simpleMacroContext{ type PrefixType = D; val prefix = qual })
+ * (Expr(elems))
+ * (TypeTag(Int))
+ */
+trait Macros extends scala.tools.reflect.FastTrack with Traces {
+ self: Analyzer =>
+
+ import global._
+ import definitions._
+ import treeInfo.{isRepeatedParamType => _, _}
+ import MacrosStats._
+ def globalSettings = global.settings
+
+ /** `MacroImplBinding` and its companion module are responsible for
+ * serialization/deserialization of macro def -> impl bindings.
+ *
+ * The first officially released version of macros persisted these bindings across compilation runs
+ * using a neat trick. The right-hand side of a macro definition (which contains a reference to a macro impl)
+ * was typechecked and then put verbatim into an annotation on the macro definition.
+ *
+ * This solution is very simple, but unfortunately it's also lacking. If we use it, then
+ * signatures of macro defs become transitively dependent on scala-reflect.jar
+ * (because they refer to macro impls, and macro impls refer to scala.reflect.macros.Context defined in scala-reflect.jar).
+ * More details can be found in comments to https://issues.scala-lang.org/browse/SI-5940.
+ *
+ * Therefore we have to avoid putting macro impls into binding pickles and come up with our own serialization format.
+ * Situation is further complicated by the fact that it's not enough to just pickle macro impl's class name and method name,
+ * because macro expansion needs some knowledge about the shape of macro impl's signature (which we can't pickle).
+ * Hence we precompute necessary stuff (e.g. the layout of type parameters) when compiling macro defs.
+ */
+
+ /** Represents all the information that a macro definition needs to know about its implementation.
+ * Includes a path to load the implementation via Java reflection,
+ * and various accounting information necessary when composing an argument list for the reflective invocation.
+ */
+ private case class MacroImplBinding(
+ // Java class name of the class that contains the macro implementation
+ // is used to load the corresponding object with Java reflection
+ val className: String,
+ // method name of the macro implementation
+ // `className` and `methName` are all we need to reflectively invoke a macro implementation
+ // because macro implementations cannot be overloaded
+ val methName: String,
+ // flattens the macro impl's parameter lists having symbols replaced with metadata
+ // currently metadata is an index of the type parameter corresponding to that type tag (if applicable)
+ // f.ex. for: def impl[T: WeakTypeTag, U: WeakTypeTag, V](c: Context)(x: c.Expr[T]): (U, V) = ???
+ // `signature` will be equal to List(-1, -1, 0, 1)
+ val signature: List[Int],
+ // type arguments part of a macro impl ref (the right-hand side of a macro definition)
+ // these trees don't refer to a macro impl, so we can pickle them as is
+ val targs: List[Tree])
+
+ /** Macro def -> macro impl bindings are serialized into a `macroImpl` annotation
+ * with synthetic content that carries the payload described in `MacroImplBinding`.
+ *
+ * For example, for a pair of macro definition and macro implementation:
+ * def impl(c: scala.reflect.macros.Context): c.Expr[Unit] = c.literalUnit;
+ * def foo: Unit = macro impl
+ *
+ * We will have the following annotation added on the macro definition `foo`:
+ *
+ * @scala.reflect.macros.internal.macroImpl(
+ * `macro`(
+ * "signature" = List(-1),
+ * "methodName" = "impl",
+ * "versionFormat" = 1,
+ * "className" = "Macros$"))
+ */
+ private object MacroImplBinding {
+ val versionFormat = 1
+
+ def pickleAtom(obj: Any): Tree =
+ obj match {
+ case list: List[_] => Apply(Ident(ListModule), list map pickleAtom)
+ case s: String => Literal(Constant(s))
+ case i: Int => Literal(Constant(i))
+ }
+
+ def unpickleAtom(tree: Tree): Any =
+ tree match {
+ case Apply(list @ Ident(_), args) if list.symbol == ListModule => args map unpickleAtom
+ case Literal(Constant(s: String)) => s
+ case Literal(Constant(i: Int)) => i
+ }
+
+ def pickle(macroImplRef: Tree): Tree = {
+ val MacroImplReference(owner, macroImpl, targs) = macroImplRef
+ val paramss = macroImpl.paramss
+
+ // todo. refactor when fixing SI-5498
+ def className: String = {
+ def loop(sym: Symbol): String = sym match {
+ case sym if sym.owner.isPackageClass =>
+ val suffix = if (sym.isModuleClass) "$" else ""
+ sym.fullName + suffix
+ case sym =>
+ val separator = if (sym.owner.isModuleClass) "" else "$"
+ loop(sym.owner) + separator + sym.javaSimpleName.toString
+ }
+
+ loop(owner)
+ }
+
+ def signature: List[Int] = {
+ val transformed = transformTypeTagEvidenceParams(paramss, (param, tparam) => tparam)
+ transformed.flatten map (p => if (p.isTerm) -1 else p.paramPos)
+ }
+
+ val payload = List[(String, Any)](
+ "versionFormat" -> versionFormat,
+ "className" -> className,
+ "methodName" -> macroImpl.name.toString,
+ "signature" -> signature
+ )
+
+ // the shape of the nucleus is chosen arbitrarily. it doesn't carry any payload.
+ // it's only necessary as a stub `fun` for an Apply node that carries metadata in its `args`
+ // so don't try to find a program element named "macro" that corresponds to the nucleus
+ // I just named it "macro", because it's macro-related, but I could as well name it "foobar"
+ val nucleus = Ident(newTermName("macro"))
+ val wrapped = Apply(nucleus, payload map { case (k, v) => Assign(pickleAtom(k), pickleAtom(v)) })
+ val pickle = gen.mkTypeApply(wrapped, targs map (_.duplicate))
+
+ // assign NoType to all freshly created AST nodes
+ // otherwise pickler will choke on tree.tpe being null
+ // there's another gotcha
+ // if you don't assign a ConstantType to a constant
+ // then pickling will crash
+ new Transformer {
+ override def transform(tree: Tree) = {
+ tree match {
+ case Literal(const @ Constant(x)) if tree.tpe == null => tree setType ConstantType(const)
+ case _ if tree.tpe == null => tree setType NoType
+ case _ => ;
+ }
+ super.transform(tree)
+ }
+ }.transform(pickle)
+ }
+
+ def unpickle(pickle: Tree): MacroImplBinding = {
+ val (wrapped, targs) =
+ pickle match {
+ case TypeApply(wrapped, targs) => (wrapped, targs)
+ case wrapped => (wrapped, Nil)
+ }
+ val Apply(_, pickledPayload) = wrapped
+ val payload = pickledPayload.map{ case Assign(k, v) => (unpickleAtom(k), unpickleAtom(v)) }.toMap
+
+ val pickleVersionFormat = payload("versionFormat").asInstanceOf[Int]
+ if (versionFormat != pickleVersionFormat) throw new Error("macro impl binding format mismatch: expected $versionFormat, actual $pickleVersionFormat")
+
+ val className = payload("className").asInstanceOf[String]
+ val methodName = payload("methodName").asInstanceOf[String]
+ val signature = payload("signature").asInstanceOf[List[Int]]
+ MacroImplBinding(className, methodName, signature, targs)
+ }
+ }
+
+ private def bindMacroImpl(macroDef: Symbol, macroImplRef: Tree): Unit = {
+ val pickle = MacroImplBinding.pickle(macroImplRef)
+ macroDef withAnnotation AnnotationInfo(MacroImplAnnotation.tpe, List(pickle), Nil)
+ }
+
+ private def loadMacroImplBinding(macroDef: Symbol): MacroImplBinding = {
+ val Some(AnnotationInfo(_, List(pickle), _)) = macroDef.getAnnotation(MacroImplAnnotation)
+ MacroImplBinding.unpickle(pickle)
+ }
+
+ /** Transforms parameters lists of a macro impl.
+ * The `transform` function is invoked only for WeakTypeTag evidence parameters.
+ *
+ * The transformer takes two arguments: a value parameter from the parameter list
+ * and a type parameter that is witnesses by the value parameter.
+ *
+ * If the transformer returns a NoSymbol, the value parameter is not included from the result.
+ * If the transformer returns something else, this something else is included in the result instead of the value parameter.
+ *
+ * Despite of being highly esoteric, this function significantly simplifies signature analysis.
+ * For example, it can be used to strip macroImpl.paramss from the evidences (necessary when checking def <-> impl correspondence)
+ * or to streamline creation of the list of macro arguments.
+ */
+ private def transformTypeTagEvidenceParams(paramss: List[List[Symbol]], transform: (Symbol, Symbol) => Symbol): List[List[Symbol]] = {
+ if (paramss.isEmpty || paramss.last.isEmpty) return paramss // no implicit parameters in the signature => nothing to do
+ if (paramss.head.isEmpty || !(paramss.head.head.tpe <:< MacroContextClass.tpe)) return paramss // no context parameter in the signature => nothing to do
+ def transformTag(param: Symbol): Symbol = param.tpe.dealias match {
+ case TypeRef(SingleType(SingleType(NoPrefix, c), universe), WeakTypeTagClass, targ :: Nil)
+ if c == paramss.head.head && universe == MacroContextUniverse =>
+ transform(param, targ.typeSymbol)
+ case _ =>
+ param
+ }
+ val transformed = paramss.last map transformTag filter (_ ne NoSymbol)
+ if (transformed.isEmpty) paramss.init else paramss.init :+ transformed
+ }
+
+ def computeMacroDefTypeFromMacroImpl(macroDdef: DefDef, macroImpl: Symbol): Type = {
+ // Step I. Transform c.Expr[T] to T
+ var runtimeType = macroImpl.tpe.finalResultType.dealias match {
+ case TypeRef(_, ExprClass, runtimeType :: Nil) => runtimeType
+ case _ => AnyTpe // so that macro impls with rhs = ??? don't screw up our inference
+ }
+
+ // Step II. Transform type parameters of a macro implementation into type arguments in a macro definition's body
+ runtimeType = runtimeType.substituteTypes(macroImpl.typeParams, loadMacroImplBinding(macroDdef.symbol).targs.map(_.tpe))
+
+ // Step III. Transform c.prefix.value.XXX to this.XXX and implParam.value.YYY to defParam.YYY
+ def unsigma(tpe: Type): Type =
+ transformTypeTagEvidenceParams(macroImpl.paramss, (param, tparam) => NoSymbol) match {
+ case (implCtxParam :: Nil) :: implParamss =>
+ val implToDef = flatMap2(implParamss, macroDdef.vparamss)(map2(_, _)((_, _))).toMap
+ object UnsigmaTypeMap extends TypeMap {
+ def apply(tp: Type): Type = tp match {
+ case TypeRef(pre, sym, args) =>
+ val pre1 = pre match {
+ case SingleType(SingleType(SingleType(NoPrefix, c), prefix), value) if c == implCtxParam && prefix == MacroContextPrefix && value == ExprValue =>
+ ThisType(macroDdef.symbol.owner)
+ case SingleType(SingleType(NoPrefix, implParam), value) if value == ExprValue =>
+ implToDef get implParam map (defParam => SingleType(NoPrefix, defParam.symbol)) getOrElse pre
+ case _ =>
+ pre
+ }
+ val args1 = args map mapOver
+ TypeRef(pre1, sym, args1)
+ case _ =>
+ mapOver(tp)
+ }
+ }
+
+ UnsigmaTypeMap(tpe)
+ case _ =>
+ tpe
+ }
+
+ unsigma(runtimeType)
+ }
+
+ /** A reference macro implementation signature compatible with a given macro definition.
+ *
+ * In the example above for the following macro def:
+ * def foo[T](xs: List[T]): T = macro fooBar
+ *
+ * This function will return:
+ * (c: scala.reflect.macros.Context)(xs: c.Expr[List[T]]): c.Expr[T]
+ *
+ * Note that type tag evidence parameters are not included into the result.
+ * Type tag context bounds for macro impl tparams are optional.
+ * Therefore compatibility checks ignore such parameters, and we don't need to bother about them here.
+ *
+ * @param macroDef The macro definition symbol
+ * @param tparams The type parameters of the macro definition
+ * @param vparamss The value parameters of the macro definition
+ * @param retTpe The return type of the macro definition
+ */
+ private def macroImplSig(macroDef: Symbol, tparams: List[TypeDef], vparamss: List[List[ValDef]], retTpe: Type): (List[List[Symbol]], Type) = {
+ // had to move method's body to an object because of the recursive dependencies between sigma and param
+ object SigGenerator {
+ def sigma(tpe: Type): Type = {
+ class SigmaTypeMap extends TypeMap {
+ def apply(tp: Type): Type = tp match {
+ case TypeRef(pre, sym, args) =>
+ val pre1 = pre match {
+ case ThisType(sym) if sym == macroDef.owner =>
+ SingleType(SingleType(SingleType(NoPrefix, ctxParam), MacroContextPrefix), ExprValue)
+ case SingleType(NoPrefix, sym) =>
+ mfind(vparamss)(_.symbol == sym) match {
+ case Some(macroDefParam) => SingleType(SingleType(NoPrefix, param(macroDefParam)), ExprValue)
+ case _ => pre
+ }
+ case _ =>
+ pre
+ }
+ TypeRef(pre1, sym, args map mapOver)
+ case _ =>
+ mapOver(tp)
+ }
+ }
+
+ new SigmaTypeMap() apply tpe
+ }
+
+ def makeParam(name: Name, pos: Position, tpe: Type, flags: Long = 0L) =
+ macroDef.newValueParameter(name, pos, flags) setInfo tpe
+ val ctxParam = makeParam(nme.macroContext, macroDef.pos, MacroContextClass.tpe, SYNTHETIC)
+ def implType(isType: Boolean, origTpe: Type): Type =
+ if (isRepeatedParamType(origTpe))
+ appliedType(
+ RepeatedParamClass.typeConstructor,
+ List(implType(isType, sigma(origTpe.typeArgs.head))))
+ else {
+ val tsym = getMember(MacroContextClass, if (isType) tpnme.WeakTypeTag else tpnme.Expr)
+ typeRef(singleType(NoPrefix, ctxParam), tsym, List(sigma(origTpe)))
+ }
+ val paramCache = scala.collection.mutable.Map[Symbol, Symbol]()
+ def param(tree: Tree): Symbol =
+ paramCache.getOrElseUpdate(tree.symbol, {
+ val sym = tree.symbol
+ makeParam(sym.name, sym.pos, implType(sym.isType, sym.tpe), sym.flags)
+ })
+
+ val paramss = List(ctxParam) :: mmap(vparamss)(param)
+ val implRetTpe = typeRef(singleType(NoPrefix, ctxParam), getMember(MacroContextClass, tpnme.Expr), List(sigma(retTpe)))
+ }
+
+ import SigGenerator._
+ macroLogVerbose(sm"""
+ |generating macroImplSigs for: $macroDef
+ |tparams are: $tparams
+ |vparamss are: $vparamss
+ |retTpe is: $retTpe
+ |macroImplSig is: $paramss, $implRetTpe
+ """.trim)
+ (paramss, implRetTpe)
+ }
+
+ /** Verifies that the body of a macro def typechecks to a reference to a static public non-overloaded method,
+ * and that that method is signature-wise compatible with the given macro definition.
+ *
+ * @return Typechecked rhs of the given macro definition if everything is okay.
+ * EmptyTree if an error occurs.
+ */
+ def typedMacroBody(typer: Typer, macroDdef: DefDef): Tree =
+ try new MacroTyper(typer, macroDdef).typed
+ catch { case MacroBodyTypecheckException => EmptyTree }
+
+ class MacroTyper(val typer: Typer, val macroDdef: DefDef) extends MacroErrors {
+ // Phase I: sanity checks
+ val macroDef = macroDdef.symbol
+ macroLogVerbose("typechecking macro def %s at %s".format(macroDef, macroDdef.pos))
+ assert(macroDef.isTermMacro, macroDdef)
+ if (fastTrack contains macroDef) MacroDefIsFastTrack()
+ if (!typer.checkFeature(macroDdef.pos, MacrosFeature, immediate = true)) MacroFeatureNotEnabled()
+
+ // we use typed1 instead of typed, because otherwise adapt is going to mess us up
+ // if adapt sees <qualifier>.<method>, it will want to perform eta-expansion and will fail
+ // unfortunately, this means that we have to manually trigger macro expansion
+ // because it's adapt which is responsible for automatic expansion during typechecking
+ def typecheckRhs(rhs: Tree): Tree = {
+ try {
+ // interestingly enough, just checking isErroneous doesn't cut it
+ // e.g. a "type arguments [U] do not conform to method foo's type parameter bounds" error
+ // doesn't manifest itself as an error in the resulting tree
+ val prevNumErrors = reporter.ERROR.count
+ var rhs1 = typer.typed1(rhs, EXPRmode, WildcardType)
+ def rhsNeedsMacroExpansion = rhs1.symbol != null && rhs1.symbol.isTermMacro && !rhs1.symbol.isErroneous
+ while (rhsNeedsMacroExpansion) {
+ rhs1 = macroExpand1(typer, rhs1) match {
+ case Success(expanded) =>
+ try {
+ val typechecked = typer.typed1(expanded, EXPRmode, WildcardType)
+ macroLogVerbose("typechecked1:%n%s%n%s".format(typechecked, showRaw(typechecked)))
+ typechecked
+ } finally {
+ popMacroContext()
+ }
+ case Delay(delayed) =>
+ typer.instantiate(delayed, EXPRmode, WildcardType)
+ case Fallback(fallback) =>
+ typer.typed1(fallback, EXPRmode, WildcardType)
+ case Other(result) =>
+ result
+ }
+ }
+ val typecheckedWithErrors = (rhs1 exists (_.isErroneous)) || reporter.ERROR.count != prevNumErrors
+ if (typecheckedWithErrors) MacroDefUntypeableBodyError()
+ rhs1
+ } catch {
+ case ex: TypeError =>
+ typer.reportTypeError(context, rhs.pos, ex)
+ MacroDefUntypeableBodyError()
+ }
+ }
+
+ // Phase II: typecheck the right-hand side of the macro def
+ val typed = typecheckRhs(macroDdef.rhs)
+ typed match {
+ case MacroImplReference(_, meth, _) if meth == Predef_??? =>
+ bindMacroImpl(macroDef, typed)
+ MacroDefIsQmarkQmarkQmark()
+ case MacroImplReference(owner, meth, targs) =>
+ if (!meth.isMethod) MacroDefInvalidBodyError()
+ if (!meth.isPublic) MacroImplNotPublicError()
+ if (meth.isOverloaded) MacroImplOverloadedError()
+ if (!owner.isStaticOwner && !owner.moduleClass.isStaticOwner) MacroImplNotStaticError()
+ if (meth.typeParams.length != targs.length) MacroImplWrongNumberOfTypeArgumentsError(typed)
+ bindMacroImpl(macroDef, typed)
+ case _ =>
+ MacroDefInvalidBodyError()
+ }
+
+ // Phase III: check compatibility between the macro def and its macro impl
+ // this check ignores type tag evidence parameters, because type tag context bounds are optional
+ // aXXX (e.g. aparamss) => characteristics of the macro impl ("a" stands for "actual")
+ // rXXX (e.g. rparamss) => characteristics of a reference macro impl signature synthesized from the macro def ("r" stands for "reference")
+ val macroImpl = typed.symbol
+ val aparamss = transformTypeTagEvidenceParams(macroImpl.paramss, (param, tparam) => NoSymbol)
+ val aret = macroImpl.tpe.finalResultType
+ val macroDefRet =
+ if (!macroDdef.tpt.isEmpty) typer.typedType(macroDdef.tpt).tpe
+ else computeMacroDefTypeFromMacroImpl(macroDdef, macroImpl)
+ val (rparamss, rret) = macroImplSig(macroDef, macroDdef.tparams, macroDdef.vparamss, macroDefRet)
+
+ val implicitParams = aparamss.flatten filter (_.isImplicit)
+ if (implicitParams.nonEmpty) MacroImplNonTagImplicitParameters(implicitParams)
+ if (aparamss.length != rparamss.length) MacroImplParamssMismatchError()
+
+ val atparams = macroImpl.typeParams
+ val atvars = atparams map freshVar
+ def atpeToRtpe(atpe: Type) = atpe.substSym(aparamss.flatten, rparamss.flatten).instantiateTypeParams(atparams, atvars)
+
+ try {
+ map2(aparamss, rparamss)((aparams, rparams) => {
+ if (aparams.length < rparams.length) MacroImplMissingParamsError(aparams, rparams)
+ if (rparams.length < aparams.length) MacroImplExtraParamsError(aparams, rparams)
+ })
+
+ // cannot fuse these loops because if aparamss.flatten != rparamss.flatten
+ // then `atpeToRtpe` is going to fail with an unsound substitution
+ map2(aparamss.flatten, rparamss.flatten)((aparam, rparam) => {
+ if (aparam.name != rparam.name && !rparam.isSynthetic) MacroImplParamNameMismatchError(aparam, rparam)
+ if (isRepeated(aparam) ^ isRepeated(rparam)) MacroImplVarargMismatchError(aparam, rparam)
+ val aparamtpe = aparam.tpe.dealias match {
+ case RefinedType(List(tpe), Scope(sym)) if tpe =:= MacroContextClass.tpe && sym.allOverriddenSymbols.contains(MacroContextPrefixType) => tpe
+ case tpe => tpe
+ }
+ checkMacroImplParamTypeMismatch(atpeToRtpe(aparamtpe), rparam)
+ })
+
+ checkMacroImplResultTypeMismatch(atpeToRtpe(aret), rret)
+
+ val maxLubDepth = lubDepth(aparamss.flatten map (_.tpe)) max lubDepth(rparamss.flatten map (_.tpe))
+ val atargs = solvedTypes(atvars, atparams, atparams map varianceInType(aret), upper = false, depth = maxLubDepth)
+ val boundsOk = typer.silent(_.infer.checkBounds(macroDdef, NoPrefix, NoSymbol, atparams, atargs, ""))
+ boundsOk match {
+ case SilentResultValue(true) => // do nothing, success
+ case SilentResultValue(false) | SilentTypeError(_) => MacroImplTargMismatchError(atargs, atparams)
+ }
+ } catch {
+ case ex: NoInstance => MacroImplTparamInstantiationError(atparams, ex)
+ }
+ }
+
+ /** Macro classloader that is used to resolve and run macro implementations.
+ * Loads classes from from -cp (aka the library classpath).
+ * Is also capable of detecting REPL and reusing its classloader.
+ */
+ lazy val macroClassloader: ClassLoader = {
+ if (global.forMSIL)
+ throw new UnsupportedOperationException("Scala reflection not available on this platform")
+
+ val classpath = global.classPath.asURLs
+ macroLogVerbose("macro classloader: initializing from -cp: %s".format(classpath))
+ val loader = ScalaClassLoader.fromURLs(classpath, self.getClass.getClassLoader)
+
+ // a heuristic to detect the REPL
+ if (global.settings.exposeEmptyPackage.value) {
+ macroLogVerbose("macro classloader: initializing from a REPL classloader".format(global.classPath.asURLs))
+ import scala.tools.nsc.interpreter._
+ val virtualDirectory = global.settings.outputDirs.getSingleOutput.get
+ new AbstractFileClassLoader(virtualDirectory, loader) {}
+ } else {
+ loader
+ }
+ }
+
+ /** Produces a function that can be used to invoke macro implementation for a given macro definition:
+ * 1) Looks up macro implementation symbol in this universe.
+ * 2) Loads its enclosing class from the macro classloader.
+ * 3) Loads the companion of that enclosing class from the macro classloader.
+ * 4) Resolves macro implementation within the loaded companion.
+ *
+ * @return Requested runtime if macro implementation can be loaded successfully from either of the mirrors,
+ * `null` otherwise.
+ */
+ type MacroRuntime = MacroArgs => Any
+ private val macroRuntimesCache = perRunCaches.newWeakMap[Symbol, MacroRuntime]
+ private def macroRuntime(macroDef: Symbol): MacroRuntime = {
+ macroLogVerbose(s"looking for macro implementation: $macroDef")
+ if (fastTrack contains macroDef) {
+ macroLogVerbose("macro expansion is serviced by a fast track")
+ fastTrack(macroDef)
+ } else {
+ macroRuntimesCache.getOrElseUpdate(macroDef, {
+ val binding = loadMacroImplBinding(macroDef)
+ val className = binding.className
+ val methName = binding.methName
+ macroLogVerbose(s"resolved implementation as $className.$methName")
+
+ if (binding.className == Predef_???.owner.fullName.toString && binding.methName == Predef_???.name.encoded) {
+ args => throw new AbortMacroException(args.c.enclosingPosition, "macro implementation is missing")
+ } else {
+ // I don't use Scala reflection here, because it seems to interfere with JIT magic
+ // whenever you instantiate a mirror (and not do anything with in, just instantiate), performance drops by 15-20%
+ // I'm not sure what's the reason - for me it's pure voodoo
+ // upd. my latest experiments show that everything's okay
+ // it seems that in 2.10.1 we can easily switch to Scala reflection
+ try {
+ macroLogVerbose(s"loading implementation class: $className")
+ macroLogVerbose(s"classloader is: ${ReflectionUtils.show(macroClassloader)}")
+ val implObj = ReflectionUtils.staticSingletonInstance(macroClassloader, className)
+ // relies on the fact that macro impls cannot be overloaded
+ // so every methName can resolve to at maximum one method
+ val implMeths = implObj.getClass.getDeclaredMethods.find(_.getName == methName)
+ val implMeth = implMeths getOrElse { throw new NoSuchMethodException(s"$className.$methName") }
+ macroLogVerbose(s"successfully loaded macro impl as ($implObj, $implMeth)")
+ args => implMeth.invoke(implObj, ((args.c +: args.others) map (_.asInstanceOf[AnyRef])): _*)
+ } catch {
+ case ex: Exception =>
+ macroLogVerbose(s"macro runtime failed to load: ${ex.toString}")
+ macroDef setFlag IS_ERROR
+ null
+ }
+ }
+ })
+ }
+ }
+
+ private def macroContext(typer: Typer, prefixTree: Tree, expandeeTree: Tree): MacroContext =
+ new {
+ val universe: self.global.type = self.global
+ val callsiteTyper: universe.analyzer.Typer = typer.asInstanceOf[global.analyzer.Typer]
+ val expandee = expandeeTree
+ } with UnaffiliatedMacroContext {
+ val prefix = Expr[Nothing](prefixTree)(TypeTag.Nothing)
+ override def toString = "MacroContext(%s@%s +%d)".format(expandee.symbol.name, expandee.pos, enclosingMacros.length - 1 /* exclude myself */)
+ }
+
+ /** Calculate the arguments to pass to a macro implementation when expanding the provided tree.
+ */
+ case class MacroArgs(c: MacroContext, others: List[Any])
+ private def macroArgs(typer: Typer, expandee: Tree): MacroArgs = {
+ val macroDef = expandee.symbol
+ val prefixTree = expandee.collect{ case Select(qual, name) => qual }.headOption.getOrElse(EmptyTree)
+ val context = expandee.attachments.get[MacroRuntimeAttachment].flatMap(_.macroContext).getOrElse(macroContext(typer, prefixTree, expandee))
+ var typeArgs = List[Tree]()
+ val exprArgs = ListBuffer[List[Expr[_]]]()
+ def collectMacroArgs(tree: Tree): Unit = tree match {
+ case Apply(fn, args) =>
+ // todo. infer precise typetag for this Expr, namely the declared type of the corresponding macro impl argument
+ exprArgs.prepend(args map (arg => context.Expr[Nothing](arg)(TypeTag.Nothing)))
+ collectMacroArgs(fn)
+ case TypeApply(fn, args) =>
+ typeArgs = args
+ collectMacroArgs(fn)
+ case _ =>
+ }
+ collectMacroArgs(expandee)
+
+ val argcDoesntMatch = macroDef.paramss.length != exprArgs.length
+ val nullaryArgsEmptyParams = exprArgs.isEmpty && macroDef.paramss == ListOfNil
+ if (argcDoesntMatch && !nullaryArgsEmptyParams) { typer.TyperErrorGen.MacroPartialApplicationError(expandee) }
+
+ val argss: List[List[Any]] = exprArgs.toList
+ macroLogVerbose(s"context: $context")
+ macroLogVerbose(s"argss: $argss")
+
+ val preparedArgss: List[List[Any]] =
+ if (fastTrack contains macroDef) {
+ if (fastTrack(macroDef) validate context) argss
+ else typer.TyperErrorGen.MacroPartialApplicationError(expandee)
+ } else {
+ // if paramss have typetag context bounds, add an arglist to argss if necessary and instantiate the corresponding evidences
+ // consider the following example:
+ //
+ // class D[T] {
+ // class C[U] {
+ // def foo[V] = macro Impls.foo[T, U, V]
+ // }
+ // }
+ //
+ // val outer1 = new D[Int]
+ // val outer2 = new outer1.C[String]
+ // outer2.foo[Boolean]
+ //
+ // then T and U need to be inferred from the lexical scope of the call using `asSeenFrom`
+ // whereas V won't be resolved by asSeenFrom and need to be loaded directly from `expandee` which needs to contain a TypeApply node
+ // also, macro implementation reference may contain a regular type as a type argument, then we pass it verbatim
+ val binding = loadMacroImplBinding(macroDef)
+ macroLogVerbose(s"binding: $binding")
+ val tags = binding.signature filter (_ != -1) map (paramPos => {
+ val targ = binding.targs(paramPos).tpe.typeSymbol
+ val tpe = if (targ.isTypeParameterOrSkolem) {
+ if (targ.owner == macroDef) {
+ // doesn't work when macro def is compiled separately from its usages
+ // then targ is not a skolem and isn't equal to any of macroDef.typeParams
+ // val argPos = targ.deSkolemize.paramPos
+ val argPos = macroDef.typeParams.indexWhere(_.name == targ.name)
+ typeArgs(argPos).tpe
+ } else
+ targ.tpe.asSeenFrom(
+ if (prefixTree == EmptyTree) macroDef.owner.tpe else prefixTree.tpe,
+ macroDef.owner)
+ } else
+ targ.tpe
+ context.WeakTypeTag(tpe)
+ })
+ macroLogVerbose(s"tags: $tags")
+
+ // transforms argss taking into account varargness of paramss
+ // note that typetag context bounds are only declared on macroImpls
+ // so this optional arglist might not match macroDef's paramlist
+ // nb! varargs can apply to any parameter section, not necessarily to the last one
+ mapWithIndex(argss :+ tags)((as, i) => {
+ val mapsToParamss = macroDef.paramss.indices contains i
+ if (mapsToParamss) {
+ val ps = macroDef.paramss(i)
+ if (isVarArgsList(ps)) {
+ val (normal, varargs) = as splitAt (ps.length - 1)
+ normal :+ varargs // pack all varargs into a single List argument
+ } else as
+ } else as
+ })
+ }
+ macroLogVerbose(s"preparedArgss: $preparedArgss")
+ MacroArgs(context, preparedArgss.flatten)
+ }
+
+ /** Keeps track of macros in-flight.
+ * See more informations in comments to `openMacros` in `scala.reflect.macros.Context`.
+ */
+ private var _openMacros = List[MacroContext]()
+ def openMacros = _openMacros
+ private def pushMacroContext(c: MacroContext) = _openMacros ::= c
+ private def popMacroContext() = _openMacros = _openMacros.tail
+ def enclosingMacroPosition = openMacros map (_.macroApplication.pos) find (_ ne NoPosition) getOrElse NoPosition
+
+ private sealed abstract class MacroExpansionResult
+ private case class Success(expanded: Tree) extends MacroExpansionResult
+ private case class Delay(delayed: Tree) extends MacroExpansionResult
+ private case class Fallback(fallback: Tree) extends MacroExpansionResult { currentRun.seenMacroExpansionsFallingBack = true }
+ private case class Other(result: Tree) extends MacroExpansionResult
+ private def Skip(expanded: Tree) = Other(expanded)
+ private def Cancel(expandee: Tree) = Other(expandee)
+ private def Failure(expandee: Tree) = Other(expandee)
+
+ /** Performs macro expansion:
+ * 1) Checks whether the expansion needs to be delayed (see `mustDelayMacroExpansion`)
+ * 2) Loads macro implementation using `macroMirror`
+ * 3) Synthesizes invocation arguments for the macro implementation
+ * 4) Checks that the result is a tree bound to this universe
+ * 5) Typechecks the result against the return type of the macro definition
+ *
+ * If -Ymacro-debug-lite is enabled, you will get basic notifications about macro expansion
+ * along with macro expansions logged in the form that can be copy/pasted verbatim into REPL.
+ *
+ * If -Ymacro-debug-verbose is enabled, you will get detailed log of how exactly this function
+ * performs class loading and method resolution in order to load the macro implementation.
+ * The log will also include other non-trivial steps of macro expansion.
+ *
+ * @return
+ * the expansion result if the expansion has been successful,
+ * the fallback method invocation if the expansion has been unsuccessful, but there is a fallback,
+ * the expandee unchanged if the expansion has been delayed,
+ * the expandee fully expanded if the expansion has been delayed before and has been expanded now,
+ * the expandee with an error marker set if the expansion has been cancelled due malformed arguments or implementation
+ * the expandee with an error marker set if there has been an error
+ */
+ def macroExpand(typer: Typer, expandee: Tree, mode: Int = EXPRmode, pt: Type = WildcardType): Tree = {
+ if (settings.Ymacronoexpand.value) return expandee // SI-6812
+ val start = if (Statistics.canEnable) Statistics.startTimer(macroExpandNanos) else null
+ if (Statistics.canEnable) Statistics.incCounter(macroExpandCount)
+ try {
+ macroExpand1(typer, expandee) match {
+ case Success(expanded) =>
+ try {
+ def typecheck(phase: String, tree: Tree, pt: Type): Tree = {
+ if (tree.isErroneous) return tree
+ macroLogVerbose(s"typechecking against $phase $pt: $expanded")
+ val numErrors = reporter.ERROR.count
+ def hasNewErrors = reporter.ERROR.count > numErrors
+ val result = typer.context.withImplicitsEnabled(typer.typed(tree, EXPRmode, pt))
+ macroLogVerbose(s"""${if (hasNewErrors) "failed to typecheck" else "successfully typechecked"} against $phase $pt:\n$result""")
+ result
+ }
+
+ var expectedTpe = expandee.tpe
+ if (isNullaryInvocation(expandee)) expectedTpe = expectedTpe.finalResultType
+ // also see http://groups.google.com/group/scala-internals/browse_thread/thread/492560d941b315cc
+ val expanded0 = duplicateAndKeepPositions(expanded)
+ val expanded1 = typecheck("macro def return type", expanded0, expectedTpe)
+ val expanded2 = typecheck("expected type", expanded1, pt)
+ expanded2
+ } finally {
+ popMacroContext()
+ }
+ case Delay(delayed) =>
+ // =========== THE SITUATION ===========
+ //
+ // If we've been delayed (i.e. bailed out of the expansion because of undetermined type params present in the expandee),
+ // then there are two possible situations we're in:
+ //
+ // 1) We're in POLYmode, when the typer tests the waters wrt type inference
+ // (e.g. as in typedArgToPoly in doTypedApply).
+ //
+ // 2) We're out of POLYmode, which means that the typer is out of tricks to infer our type
+ // (e.g. if we're an argument to a function call, then this means that no previous argument lists
+ // can determine our type variables for us).
+ //
+ // Situation #1 is okay for us, since there's no pressure. In POLYmode we're just verifying that
+ // there's nothing outrageously wrong with our undetermined type params (from what I understand!).
+ //
+ // Situation #2 requires measures to be taken. If we're in it, then noone's going to help us infer
+ // the undetermined type params. Therefore we need to do something ourselves or otherwise this
+ // expandee will forever remaing not expanded (see SI-5692).
+ //
+ // A traditional way out of this conundrum is to call `instantiate` and let the inferencer
+ // try to find the way out. It works for simple cases, but sometimes, if the inferencer lacks
+ // information, it will be forced to approximate.
+ //
+ // =========== THE PROBLEM ===========
+ //
+ // Consider the following example (thanks, Miles!):
+ //
+ // // Iso represents an isomorphism between two datatypes:
+ // // 1) An arbitrary one (e.g. a random case class)
+ // // 2) A uniform representation for all datatypes (e.g. an HList)
+ // trait Iso[T, U] {
+ // def to(t : T) : U
+ // def from(u : U) : T
+ // }
+ // implicit def materializeIso[T, U]: Iso[T, U] = macro ???
+ //
+ // case class Foo(i: Int, s: String, b: Boolean)
+ // def foo[C, L](c: C)(implicit iso: Iso[C, L]): L = iso.to(c)
+ // foo(Foo(23, "foo", true))
+ //
+ // In the snippet above, even though we know that there's a fundep going from T to U
+ // (in a sense that a datatype's uniform representation is unambiguously determined by the datatype,
+ // e.g. for Foo it will be Int :: String :: Boolean :: HNil), there's no way to convey this information
+ // to the typechecker. Therefore the typechecker will infer Nothing for L, which is hardly what we want.
+ val shouldInstantiate = typer.context.undetparams.nonEmpty && !inPolyMode(mode)
+ if (shouldInstantiate) typer.instantiatePossiblyExpectingUnit(delayed, mode, pt)
+ else delayed
+ case Fallback(fallback) =>
+ typer.context.withImplicitsEnabled(typer.typed(fallback, EXPRmode, pt))
+ case Other(result) =>
+ result
+ }
+ } finally {
+ if (Statistics.canEnable) Statistics.stopTimer(macroExpandNanos, start)
+ }
+ }
+
+ /** Does the same as `macroExpand`, but without typechecking the expansion
+ * Meant for internal use within the macro infrastructure, don't use it elsewhere.
+ */
+ private def macroExpand1(typer: Typer, expandee: Tree): MacroExpansionResult =
+ // verbose printing might cause recursive macro expansions, so I'm shutting it down here
+ withInfoLevel(nodePrinters.InfoLevel.Quiet) {
+ if (expandee.symbol.isErroneous || (expandee exists (_.isErroneous))) {
+ val reason = if (expandee.symbol.isErroneous) "not found or incompatible macro implementation" else "erroneous arguments"
+ macroLogVerbose(s"cancelled macro expansion because of $reason: $expandee")
+ return Cancel(typer.infer.setError(expandee))
+ }
+
+ try {
+ val runtime = macroRuntime(expandee.symbol)
+ if (runtime != null) macroExpandWithRuntime(typer, expandee, runtime)
+ else macroExpandWithoutRuntime(typer, expandee)
+ } catch {
+ case typer.TyperErrorGen.MacroExpansionException => Failure(expandee)
+ }
+ }
+
+ /** Expands a macro when a runtime (i.e. the macro implementation) can be successfully loaded
+ * Meant for internal use within the macro infrastructure, don't use it elsewhere.
+ */
+ private def macroExpandWithRuntime(typer: Typer, expandee: Tree, runtime: MacroRuntime): MacroExpansionResult = {
+ val wasDelayed = isDelayed(expandee)
+ val undetparams = calculateUndetparams(expandee)
+ val nowDelayed = !typer.context.macrosEnabled || undetparams.nonEmpty
+
+ (wasDelayed, nowDelayed) match {
+ case (true, true) =>
+ Delay(expandee)
+ case (true, false) =>
+ val expanded = macroExpandAll(typer, expandee)
+ if (expanded exists (_.isErroneous)) Failure(expandee)
+ else Skip(expanded)
+ case (false, true) =>
+ macroLogLite("macro expansion is delayed: %s".format(expandee))
+ delayed += expandee -> undetparams
+ expandee updateAttachment MacroRuntimeAttachment(delayed = true, typerContext = typer.context, macroContext = Some(macroArgs(typer, expandee).c))
+ Delay(expandee)
+ case (false, false) =>
+ import typer.TyperErrorGen._
+ macroLogLite("performing macro expansion %s at %s".format(expandee, expandee.pos))
+ val args = macroArgs(typer, expandee)
+ try {
+ val numErrors = reporter.ERROR.count
+ def hasNewErrors = reporter.ERROR.count > numErrors
+ val expanded = { pushMacroContext(args.c); runtime(args) }
+ if (hasNewErrors) MacroGeneratedTypeError(expandee)
+ expanded match {
+ case expanded: Expr[_] =>
+ macroLogVerbose("original:")
+ macroLogLite("" + expanded.tree + "\n" + showRaw(expanded.tree))
+ val freeSyms = expanded.tree.freeTerms ++ expanded.tree.freeTypes
+ freeSyms foreach (sym => MacroFreeSymbolError(expandee, sym))
+ Success(atPos(enclosingMacroPosition.focus)(expanded.tree updateAttachment MacroExpansionAttachment(expandee)))
+ case _ =>
+ MacroExpansionIsNotExprError(expandee, expanded)
+ }
+ } catch {
+ case ex: Throwable =>
+ popMacroContext()
+ val realex = ReflectionUtils.unwrapThrowable(ex)
+ realex match {
+ case ex: AbortMacroException => MacroGeneratedAbort(expandee, ex)
+ case ex: ControlThrowable => throw ex
+ case ex: TypeError => MacroGeneratedTypeError(expandee, ex)
+ case _ => MacroGeneratedException(expandee, realex)
+ }
+ } finally {
+ expandee.removeAttachment[MacroRuntimeAttachment]
+ }
+ }
+ }
+
+ /** Expands a macro when a runtime (i.e. the macro implementation) cannot be loaded
+ * Meant for internal use within the macro infrastructure, don't use it elsewhere.
+ */
+ private def macroExpandWithoutRuntime(typer: Typer, expandee: Tree): MacroExpansionResult = {
+ import typer.TyperErrorGen._
+ val fallbackSym = expandee.symbol.nextOverriddenSymbol orElse MacroImplementationNotFoundError(expandee)
+ macroLogLite(s"falling back to: $fallbackSym")
+
+ def mkFallbackTree(tree: Tree): Tree = {
+ tree match {
+ case Select(qual, name) => Select(qual, name) setPos tree.pos setSymbol fallbackSym
+ case Apply(fn, args) => Apply(mkFallbackTree(fn), args) setPos tree.pos
+ case TypeApply(fn, args) => TypeApply(mkFallbackTree(fn), args) setPos tree.pos
+ }
+ }
+ Fallback(mkFallbackTree(expandee))
+ }
+
+ /** Without any restrictions on macro expansion, macro applications will expand at will,
+ * and when type inference is involved, expansions will end up using yet uninferred type params.
+ *
+ * For some macros this might be ok (thanks to TreeTypeSubstituter that replaces
+ * the occurrences of undetparams with their inferred values), but in general case this won't work.
+ * E.g. for reification simple substitution is not enough - we actually need to re-reify inferred types.
+ *
+ * Luckily, there exists a very simple way to fix the problem: delay macro expansion until everything is inferred.
+ * Here are the exact rules. Macro application gets delayed if any of its subtrees contain:
+ * 1) type vars (tpe.isInstanceOf[TypeVar]) // [Eugene] this check is disabled right now, because TypeVars seem to be created from undetparams anyways
+ * 2) undetparams (sym.isTypeParameter && !sym.isSkolem)
+ */
+ var hasPendingMacroExpansions = false
+ private val delayed = perRunCaches.newWeakMap[Tree, scala.collection.mutable.Set[Int]]
+ private def isDelayed(expandee: Tree) = delayed contains expandee
+ private def calculateUndetparams(expandee: Tree): scala.collection.mutable.Set[Int] =
+ delayed.get(expandee).getOrElse {
+ val calculated = scala.collection.mutable.Set[Symbol]()
+ expandee foreach (sub => {
+ def traverse(sym: Symbol) = if (sym != null && (undetparams contains sym.id)) calculated += sym
+ if (sub.symbol != null) traverse(sub.symbol)
+ if (sub.tpe != null) sub.tpe foreach (sub => traverse(sub.typeSymbol))
+ })
+ macroLogVerbose("calculateUndetparams: %s".format(calculated))
+ calculated map (_.id)
+ }
+ private val undetparams = perRunCaches.newSet[Int]
+ def notifyUndetparamsAdded(newUndets: List[Symbol]): Unit = {
+ undetparams ++= newUndets map (_.id)
+ if (macroDebugVerbose) newUndets foreach (sym => println("undetParam added: %s".format(sym)))
+ }
+ def notifyUndetparamsInferred(undetNoMore: List[Symbol], inferreds: List[Type]): Unit = {
+ undetparams --= undetNoMore map (_.id)
+ if (macroDebugVerbose) (undetNoMore zip inferreds) foreach { case (sym, tpe) => println("undetParam inferred: %s as %s".format(sym, tpe))}
+ if (!delayed.isEmpty)
+ delayed.toList foreach {
+ case (expandee, undetparams) if !undetparams.isEmpty =>
+ undetparams --= undetNoMore map (_.id)
+ if (undetparams.isEmpty) {
+ hasPendingMacroExpansions = true
+ macroLogVerbose(s"macro expansion is pending: $expandee")
+ }
+ case _ =>
+ // do nothing
+ }
+ }
+
+ /** Performs macro expansion on all subtrees of a given tree.
+ * Innermost macros are expanded first, outermost macros are expanded last.
+ * See the documentation for `macroExpand` for more information.
+ */
+ def macroExpandAll(typer: Typer, expandee: Tree): Tree =
+ new Transformer {
+ override def transform(tree: Tree) = super.transform(tree match {
+ // todo. expansion should work from the inside out
+ case tree if (delayed contains tree) && calculateUndetparams(tree).isEmpty =>
+ val context = tree.attachments.get[MacroRuntimeAttachment].get.typerContext
+ delayed -= tree
+ context.implicitsEnabled = typer.context.implicitsEnabled
+ context.enrichmentEnabled = typer.context.enrichmentEnabled
+ context.macrosEnabled = typer.context.macrosEnabled
+ macroExpand(newTyper(context), tree, EXPRmode, WildcardType)
+ case _ =>
+ tree
+ })
+ }.transform(expandee)
+}
+
+object MacrosStats {
+ import scala.reflect.internal.TypesStats.typerNanos
+ val macroExpandCount = Statistics.newCounter ("#macro expansions", "typer")
+ val macroExpandNanos = Statistics.newSubTimer("time spent in macroExpand", typerNanos)
+}
diff --git a/src/compiler/scala/tools/nsc/typechecker/MethodSynthesis.scala b/src/compiler/scala/tools/nsc/typechecker/MethodSynthesis.scala
new file mode 100644
index 0000000..99557d1
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/typechecker/MethodSynthesis.scala
@@ -0,0 +1,594 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2013 LAMP/EPFL
+ * @author Paul Phillips
+ */
+package scala.tools.nsc
+package typechecker
+
+import symtab.Flags._
+import scala.collection.{ mutable, immutable }
+import scala.reflect.internal.util.StringOps.{ ojoin }
+import scala.reflect.ClassTag
+import scala.reflect.runtime.{ universe => ru }
+import scala.language.higherKinds
+
+/** Logic related to method synthesis which involves cooperation between
+ * Namer and Typer.
+ */
+trait MethodSynthesis {
+ self: Analyzer =>
+
+ import global._
+ import definitions._
+ import CODE._
+
+ object synthesisUtil {
+ type TT[T] = ru.TypeTag[T]
+ type CT[T] = ClassTag[T]
+
+ def ValOrDefDef(sym: Symbol, body: Tree) =
+ if (sym.isLazy) ValDef(sym, body)
+ else DefDef(sym, body)
+
+ def applyTypeInternal(tags: List[TT[_]]): Type = {
+ val symbols = tags map compilerSymbolFromTag
+ val container :: args = symbols
+ val tparams = container.typeConstructor.typeParams
+
+ // Conservative at present - if manifests were more usable this could do a lot more.
+ // [Eugene to Paul] all right, they are now. what do you have in mind?
+ require(symbols forall (_ ne NoSymbol), "Must find all tags: " + symbols)
+ require(container.owner.isPackageClass, "Container must be a top-level class in a package: " + container)
+ require(tparams.size == args.size, "Arguments must match type constructor arity: " + tparams + ", " + args)
+
+ appliedType(container, args map (_.tpe): _*)
+ }
+
+ def companionType[T](implicit ct: CT[T]) =
+ rootMirror.getRequiredModule(ct.runtimeClass.getName).tpe
+
+ // Use these like `applyType[List, Int]` or `applyType[Map, Int, String]`
+ def applyType[CC](implicit t1: TT[CC]): Type =
+ applyTypeInternal(List(t1))
+
+ def applyType[CC[X1], X1](implicit t1: TT[CC[_]], t2: TT[X1]): Type =
+ applyTypeInternal(List(t1, t2))
+
+ def applyType[CC[X1, X2], X1, X2](implicit t1: TT[CC[_,_]], t2: TT[X1], t3: TT[X2]): Type =
+ applyTypeInternal(List(t1, t2, t3))
+
+ def applyType[CC[X1, X2, X3], X1, X2, X3](implicit t1: TT[CC[_,_,_]], t2: TT[X1], t3: TT[X2], t4: TT[X3]): Type =
+ applyTypeInternal(List(t1, t2, t3, t4))
+
+ def newMethodType[F](owner: Symbol)(implicit t: TT[F]): Type = {
+ val fnSymbol = compilerSymbolFromTag(t)
+ val formals = compilerTypeFromTag(t).typeArguments
+ assert(fnSymbol isSubClass FunctionClass(formals.size - 1), (owner, t))
+ val params = owner newSyntheticValueParams formals
+ MethodType(params, formals.last)
+ }
+
+ /** The annotations amongst those found on the original symbol which
+ * should be propagated to this kind of accessor.
+ */
+ def deriveAnnotations(initial: List[AnnotationInfo], category: Symbol, keepClean: Boolean): List[AnnotationInfo] = {
+ initial filter { ann =>
+ // There are no meta-annotation arguments attached to `ann`
+ if (ann.metaAnnotations.isEmpty) {
+ // A meta-annotation matching `annotKind` exists on `ann`'s definition.
+ (ann.defaultTargets contains category) ||
+ // `ann`'s definition has no meta-annotations, and `keepClean` is true.
+ (ann.defaultTargets.isEmpty && keepClean)
+ }
+ // There are meta-annotation arguments, and one of them matches `annotKind`
+ else ann.metaAnnotations exists (_ matches category)
+ }
+ }
+ }
+ import synthesisUtil._
+
+ class ClassMethodSynthesis(val clazz: Symbol, localTyper: Typer) {
+ def mkThis = This(clazz) setPos clazz.pos.focus
+ def mkThisSelect(sym: Symbol) = atPos(clazz.pos.focus)(Select(mkThis, sym))
+
+ private def isOverride(name: TermName) =
+ clazzMember(name).alternatives exists (sym => !sym.isDeferred && (sym.owner != clazz))
+
+ def newMethodFlags(name: TermName) = {
+ val overrideFlag = if (isOverride(name)) OVERRIDE else 0L
+ overrideFlag | SYNTHETIC
+ }
+ def newMethodFlags(method: Symbol) = {
+ val overrideFlag = if (isOverride(method.name)) OVERRIDE else 0L
+ (method.flags | overrideFlag | SYNTHETIC) & ~DEFERRED
+ }
+
+ private def finishMethod(method: Symbol, f: Symbol => Tree): Tree =
+ localTyper typed ValOrDefDef(method, f(method))
+
+ private def createInternal(name: Name, f: Symbol => Tree, info: Type): Tree = {
+ val m = clazz.newMethod(name.toTermName, clazz.pos.focus, newMethodFlags(name))
+ finishMethod(m setInfoAndEnter info, f)
+ }
+ private def createInternal(name: Name, f: Symbol => Tree, infoFn: Symbol => Type): Tree = {
+ val m = clazz.newMethod(name.toTermName, clazz.pos.focus, newMethodFlags(name))
+ finishMethod(m setInfoAndEnter infoFn(m), f)
+ }
+ private def cloneInternal(original: Symbol, f: Symbol => Tree, name: Name): Tree = {
+ val m = original.cloneSymbol(clazz, newMethodFlags(original), name) setPos clazz.pos.focus
+ finishMethod(clazz.info.decls enter m, f)
+ }
+
+ private def cloneInternal(original: Symbol, f: Symbol => Tree): Tree =
+ cloneInternal(original, f, original.name)
+
+ def clazzMember(name: Name) = clazz.info nonPrivateMember name
+ def typeInClazz(sym: Symbol) = clazz.thisType memberType sym
+
+ /** Function argument takes the newly created method symbol of
+ * the same type as `name` in clazz, and returns the tree to be
+ * added to the template.
+ */
+ def overrideMethod(name: Name)(f: Symbol => Tree): Tree =
+ overrideMethod(clazzMember(name))(f)
+
+ def overrideMethod(original: Symbol)(f: Symbol => Tree): Tree =
+ cloneInternal(original, sym => f(sym setFlag OVERRIDE))
+
+ def deriveMethod(original: Symbol, nameFn: Name => Name)(f: Symbol => Tree): Tree =
+ cloneInternal(original, f, nameFn(original.name))
+
+ def createMethod(name: Name, paramTypes: List[Type], returnType: Type)(f: Symbol => Tree): Tree =
+ createInternal(name, f, (m: Symbol) => MethodType(m newSyntheticValueParams paramTypes, returnType))
+
+ def createMethod(name: Name, returnType: Type)(f: Symbol => Tree): Tree =
+ createInternal(name, f, NullaryMethodType(returnType))
+
+ def createMethod(original: Symbol)(f: Symbol => Tree): Tree =
+ createInternal(original.name, f, original.info)
+
+ def forwardMethod(original: Symbol, newMethod: Symbol)(transformArgs: List[Tree] => List[Tree]): Tree =
+ createMethod(original)(m => gen.mkMethodCall(newMethod, transformArgs(m.paramss.head map Ident)))
+
+ def createSwitchMethod(name: Name, range: Seq[Int], returnType: Type)(f: Int => Tree) = {
+ createMethod(name, List(IntClass.tpe), returnType) { m =>
+ val arg0 = Ident(m.firstParam)
+ val default = DEFAULT ==> THROW(IndexOutOfBoundsExceptionClass, arg0)
+ val cases = range.map(num => CASE(LIT(num)) ==> f(num)).toList :+ default
+
+ Match(arg0, cases)
+ }
+ }
+
+ // def foo() = constant
+ def constantMethod(name: Name, value: Any): Tree = {
+ val constant = Constant(value)
+ createMethod(name, Nil, constant.tpe)(_ => Literal(constant))
+ }
+ // def foo = constant
+ def constantNullary(name: Name, value: Any): Tree = {
+ val constant = Constant(value)
+ createMethod(name, constant.tpe)(_ => Literal(constant))
+ }
+ }
+
+ /** There are two key methods in here.
+ *
+ * 1) Enter methods such as enterGetterSetterare called
+ * from Namer with a tree which may generate further trees such as accessors or
+ * implicit wrappers. Some setup is performed. In general this creates symbols
+ * and enters them into the scope of the owner.
+ *
+ * 2) addDerivedTrees is called from Typer when a Template is typed.
+ * It completes the job, returning a list of trees with their symbols
+ * set to those created in the enter methods. Those trees then become
+ * part of the typed template.
+ */
+ trait MethodSynth {
+ self: Namer =>
+
+ import NamerErrorGen._
+
+ def enterImplicitWrapper(tree: ClassDef) {
+ ImplicitClassWrapper(tree).createAndEnterSymbol()
+ }
+
+ def enterGetterSetter(tree: ValDef) {
+ val ValDef(mods, name, _, _) = tree
+ if (nme.isSetterName(name))
+ ValOrValWithSetterSuffixError(tree)
+
+ tree.symbol = (
+ if (mods.isLazy) {
+ val lazyValGetter = LazyValGetter(tree).createAndEnterSymbol()
+ enterLazyVal(tree, lazyValGetter)
+ } else {
+ if (mods.isPrivateLocal)
+ PrivateThisCaseClassParameterError(tree)
+ val getter = Getter(tree).createAndEnterSymbol()
+ // Create the setter if necessary.
+ if (mods.isMutable)
+ Setter(tree).createAndEnterSymbol()
+
+ // If abstract, the tree gets the getter's symbol. Otherwise, create a field.
+ if (mods.isDeferred) getter setPos tree.pos
+ else enterStrictVal(tree)
+ }
+ )
+
+ enterBeans(tree)
+ }
+
+ def addDerivedTrees(typer: Typer, stat: Tree): List[Tree] = stat match {
+ case vd @ ValDef(mods, name, tpt, rhs) if !noFinishGetterSetter(vd) =>
+ // If we don't save the annotations, they seem to wander off.
+ val annotations = stat.symbol.initialize.annotations
+ ( allValDefDerived(vd)
+ map (acc => atPos(vd.pos.focus)(acc derive annotations))
+ filterNot (_ eq EmptyTree)
+ )
+ case cd @ ClassDef(mods, _, _, _) if mods.isImplicit =>
+ val annotations = stat.symbol.initialize.annotations
+ // TODO: need to shuffle annotations between wrapper and class.
+ val wrapper = ImplicitClassWrapper(cd)
+ val meth = wrapper.derivedSym
+ context.unit.synthetics get meth match {
+ case Some(mdef) =>
+ context.unit.synthetics -= meth
+ meth setAnnotations deriveAnnotations(annotations, MethodTargetClass, keepClean = false)
+ cd.symbol setAnnotations deriveAnnotations(annotations, ClassTargetClass, keepClean = true)
+ List(cd, mdef)
+ case _ =>
+ // Shouldn't happen, but let's give ourselves a reasonable error when it does
+ abort("No synthetics for " + meth + ": synthetics contains " + context.unit.synthetics.keys.mkString(", "))
+ }
+ case _ =>
+ stat :: Nil
+ }
+
+ def standardAccessors(vd: ValDef): List[DerivedFromValDef] = (
+ if (vd.mods.isMutable && !vd.mods.isLazy) List(Getter(vd), Setter(vd))
+ else if (vd.mods.isLazy) List(LazyValGetter(vd))
+ else List(Getter(vd))
+ )
+ def beanAccessors(vd: ValDef): List[DerivedFromValDef] = {
+ val setter = if (vd.mods.isMutable) List(BeanSetter(vd)) else Nil
+ if (forMSIL) Nil
+ else if (vd.symbol hasAnnotation BeanPropertyAttr)
+ BeanGetter(vd) :: setter
+ else if (vd.symbol hasAnnotation BooleanBeanPropertyAttr)
+ BooleanBeanGetter(vd) :: setter
+ else Nil
+ }
+ def allValDefDerived(vd: ValDef) = {
+ val field = if (vd.mods.isDeferred || (vd.mods.isLazy && hasUnitType(vd.symbol))) Nil
+ else List(Field(vd))
+ field ::: standardAccessors(vd) ::: beanAccessors(vd)
+ }
+
+ // Take into account annotations so that we keep annotated unit lazy val
+ // to get better error message already from the cps plugin itself
+ def hasUnitType(sym: Symbol) = (sym.tpe.typeSymbol == UnitClass) && sym.tpe.annotations.isEmpty
+
+ /** This trait assembles what's needed for synthesizing derived methods.
+ * Important: Typically, instances of this trait are created TWICE for each derived
+ * symbol; once form Namers in an enter method, and once from Typers in addDerivedTrees.
+ * So it's important that creating an instance of Derived does not have a side effect,
+ * or if it has a side effect, control that it is done only once.
+ */
+ trait Derived {
+
+ /** The tree from which we are deriving a synthetic member. Typically, that's
+ * given as an argument of the instance. */
+ def tree: Tree
+
+ /** The name of the method */
+ def name: TermName
+
+ /** The flags that are retained from the original symbol */
+
+ def flagsMask: Long
+
+ /** The flags that the derived symbol has in addition to those retained from
+ * the original symbol*/
+ def flagsExtra: Long
+
+ /** type completer for the synthetic member.
+ */
+ def completer(sym: Symbol): Type
+
+ /** The derived symbol. It is assumed that this symbol already exists and has been
+ * entered in the parent scope when derivedSym is called */
+ def derivedSym: Symbol
+
+ /** The definition tree of the derived symbol. */
+ def derivedTree: Tree
+ }
+
+ trait DerivedFromMemberDef extends Derived {
+ def tree: MemberDef
+ def enclClass: Symbol
+
+ // Final methods to make the rest easier to reason about.
+ final def mods = tree.mods
+ final def basisSym = tree.symbol
+ final def derivedFlags: Long = basisSym.flags & flagsMask | flagsExtra
+ }
+
+ trait DerivedFromClassDef extends DerivedFromMemberDef {
+ def tree: ClassDef
+ final def enclClass = basisSym.owner.enclClass
+ }
+
+ trait DerivedFromValDef extends DerivedFromMemberDef {
+ def tree: ValDef
+ final def enclClass = basisSym.enclClass
+
+ /** Which meta-annotation is associated with this kind of entity.
+ * Presently one of: field, getter, setter, beanGetter, beanSetter, param.
+ */
+ def category: Symbol
+
+ /* Explicit isSetter required for bean setters (beanSetterSym.isSetter is false) */
+ final def completer(sym: Symbol) = namerOf(sym).accessorTypeCompleter(tree, isSetter)
+ final def fieldSelection = Select(This(enclClass), basisSym)
+ final def derivedMods: Modifiers = mods & flagsMask | flagsExtra mapAnnotations (_ => Nil)
+
+ def derivedSym: Symbol = tree.symbol
+ def derivedTree: Tree = EmptyTree
+
+ def isSetter = false
+ def isDeferred = mods.isDeferred
+ def keepClean = false // whether annotations whose definitions are not meta-annotated should be kept.
+ def validate() { }
+ def createAndEnterSymbol(): Symbol = {
+ val sym = owner.newMethod(name, tree.pos.focus, (tree.mods.flags & flagsMask) | flagsExtra)
+ setPrivateWithin(tree, sym)
+ enterInScope(sym)
+ sym setInfo completer(sym)
+ }
+ private def logDerived(result: Tree): Tree = {
+ debuglog("[+derived] " + ojoin(mods.flagString, basisSym.accurateKindString, basisSym.getterName.decode)
+ + " (" + derivedSym + ")\n " + result)
+
+ result
+ }
+ final def derive(initial: List[AnnotationInfo]): Tree = {
+ validate()
+ derivedSym setAnnotations deriveAnnotations(initial, category, keepClean)
+ logDerived(derivedTree)
+ }
+ }
+ trait DerivedGetter extends DerivedFromValDef {
+ // TODO
+ }
+ trait DerivedSetter extends DerivedFromValDef {
+ override def isSetter = true
+ private def setterParam = derivedSym.paramss match {
+ case (p :: Nil) :: _ => p
+ case _ => NoSymbol
+ }
+ private def setterRhs = (
+ if (mods.isDeferred || derivedSym.isOverloaded) EmptyTree
+ else Assign(fieldSelection, Ident(setterParam))
+ )
+ private def setterDef = DefDef(derivedSym, setterRhs)
+ override def derivedTree: Tree = if (setterParam == NoSymbol) EmptyTree else setterDef
+ }
+
+ /** A synthetic method which performs the implicit conversion implied by
+ * the declaration of an implicit class.
+ */
+ case class ImplicitClassWrapper(tree: ClassDef) extends DerivedFromClassDef {
+ def completer(sym: Symbol): Type = ??? // not needed
+ def createAndEnterSymbol(): Symbol = enterSyntheticSym(derivedTree)
+ def derivedSym: Symbol = {
+ // Only methods will do! Don't want to pick up any stray
+ // companion objects of the same name.
+ val result = enclClass.info decl name suchThat (x => x.isMethod && x.isSynthetic)
+ assert(result != NoSymbol, "not found: "+name+" in "+enclClass+" "+enclClass.info.decls)
+ result
+ }
+ def derivedTree: DefDef =
+ factoryMeth(mods & flagsMask | flagsExtra, name, tree)
+ def flagsExtra: Long = METHOD | IMPLICIT | SYNTHETIC
+ def flagsMask: Long = AccessFlags
+ def name: TermName = tree.name.toTermName
+ }
+
+ abstract class BaseGetter(tree: ValDef) extends DerivedGetter {
+ def name = tree.name
+ def category = GetterTargetClass
+ def flagsMask = GetterFlags
+ def flagsExtra = ACCESSOR | ( if (tree.mods.isMutable) 0 else STABLE )
+
+ override def validate() {
+ assert(derivedSym != NoSymbol, tree)
+ if (derivedSym.isOverloaded)
+ GetterDefinedTwiceError(derivedSym)
+
+ super.validate()
+ }
+ }
+ case class Getter(tree: ValDef) extends BaseGetter(tree) {
+ override def derivedSym = (
+ if (mods.isDeferred) basisSym
+ else basisSym.getter(enclClass)
+ )
+
+ override def derivedTree: DefDef = {
+ // For existentials, don't specify a type for the getter, even one derived
+ // from the symbol! This leads to incompatible existentials for the field and
+ // the getter. Let the typer do all the work. You might think "why only for
+ // existentials, why not always," and you would be right, except: a single test
+ // fails, but it looked like some work to deal with it. Test neg/t0606.scala
+ // starts compiling (instead of failing like it's supposed to) because the typer
+ // expects to be able to identify escaping locals in typedDefDef, and fails to
+ // spot that brand of them. In other words it's an artifact of the implementation.
+ val tpt = derivedSym.tpe.finalResultType match {
+ case ExistentialType(_, _) => TypeTree()
+ case _ if mods.isDeferred => TypeTree()
+ case tp => TypeTree(tp)
+ }
+ tpt setPos derivedSym.pos.focus
+ // keep type tree of original abstract field
+ if (mods.isDeferred)
+ tpt setOriginal tree.tpt
+
+ // TODO - reconcile this with the DefDef creator in Trees (which
+ // at this writing presented no way to pass a tree in for tpt.)
+ atPos(derivedSym.pos) {
+ DefDef(
+ Modifiers(derivedSym.flags),
+ derivedSym.name.toTermName,
+ Nil,
+ Nil,
+ tpt,
+ if (mods.isDeferred) EmptyTree else gen.mkCheckInit(fieldSelection)
+ ) setSymbol derivedSym
+ }
+ }
+ }
+ /** Implements lazy value accessors:
+ * - for lazy values of type Unit and all lazy fields inside traits,
+ * the rhs is the initializer itself
+ * - for all other lazy values z the accessor is a block of this form:
+ * { z = <rhs>; z } where z can be an identifier or a field.
+ */
+ case class LazyValGetter(tree: ValDef) extends BaseGetter(tree) {
+ class ChangeOwnerAndModuleClassTraverser(oldowner: Symbol, newowner: Symbol)
+ extends ChangeOwnerTraverser(oldowner, newowner) {
+
+ override def traverse(tree: Tree) {
+ tree match {
+ case _: DefTree => change(tree.symbol.moduleClass)
+ case _ =>
+ }
+ super.traverse(tree)
+ }
+ }
+
+ // todo: in future this should be enabled but now other phases still depend on the flag for various reasons
+ //override def flagsMask = (super.flagsMask & ~LAZY)
+ override def derivedSym = basisSym.lazyAccessor
+ override def derivedTree: DefDef = {
+ val ValDef(_, _, tpt0, rhs0) = tree
+ val rhs1 = transformed.getOrElse(rhs0, rhs0)
+ val body = (
+ if (tree.symbol.owner.isTrait || hasUnitType(basisSym)) rhs1
+ else gen.mkAssignAndReturn(basisSym, rhs1)
+ )
+ derivedSym.setPos(tree.pos) // cannot set it at createAndEnterSymbol because basisSym can possible stil have NoPosition
+ val ddefRes = atPos(tree.pos)(DefDef(derivedSym, new ChangeOwnerAndModuleClassTraverser(basisSym, derivedSym)(body)))
+ // ValDef will have its position focused whereas DefDef will have original correct rangepos
+ // ideally positions would be correct at the creation time but lazy vals are really a special case
+ // here so for the sake of keeping api clean we fix positions manually in LazyValGetter
+ ddefRes.tpt.setPos(tpt0.pos)
+ tpt0.setPos(tpt0.pos.focus)
+ ddefRes
+ }
+ }
+ case class Setter(tree: ValDef) extends DerivedSetter {
+ def name = nme.getterToSetter(tree.name)
+ def category = SetterTargetClass
+ def flagsMask = SetterFlags
+ def flagsExtra = ACCESSOR
+
+ override def derivedSym = basisSym.setter(enclClass)
+ }
+ case class Field(tree: ValDef) extends DerivedFromValDef {
+ def name = nme.getterToLocal(tree.name)
+ def category = FieldTargetClass
+ def flagsMask = FieldFlags
+ def flagsExtra = PrivateLocal
+ // By default annotations go to the field, except if the field is
+ // generated for a class parameter (PARAMACCESSOR).
+ override def keepClean = !mods.isParamAccessor
+ override def derivedTree = (
+ if (mods.isDeferred) EmptyTree
+ else if (mods.isLazy) copyValDef(tree)(mods = mods | flagsExtra, name = this.name, rhs = EmptyTree).setPos(tree.pos.focus)
+ else copyValDef(tree)(mods = mods | flagsExtra, name = this.name)
+ )
+ }
+ case class Param(tree: ValDef) extends DerivedFromValDef {
+ def name = tree.name
+ def category = ParamTargetClass
+ def flagsMask = -1L
+ def flagsExtra = 0L
+ override def keepClean = true
+ override def derivedTree = EmptyTree
+ }
+ def validateParam(tree: ValDef) {
+ Param(tree).derive(tree.symbol.annotations)
+ }
+
+ sealed abstract class BeanAccessor(bean: String) extends DerivedFromValDef {
+ val name = newTermName(bean + tree.name.toString.capitalize)
+ def flagsMask = BeanPropertyFlags
+ def flagsExtra = 0
+ override def derivedSym = enclClass.info decl name
+ }
+ trait AnyBeanGetter extends BeanAccessor with DerivedGetter {
+ def category = BeanGetterTargetClass
+ override def validate() {
+ if (derivedSym == NoSymbol) {
+ // the namer decides whether to generate these symbols or not. at that point, we don't
+ // have symbolic information yet, so we only look for annotations named "BeanProperty".
+ BeanPropertyAnnotationLimitationError(tree)
+ }
+ super.validate()
+ }
+ }
+ trait NoSymbolBeanGetter extends AnyBeanGetter {
+ // Derives a tree without attempting to use the original tree's symbol.
+ override def derivedTree = {
+ atPos(tree.pos.focus) {
+ DefDef(derivedMods, name, Nil, ListOfNil, tree.tpt.duplicate,
+ if (isDeferred) EmptyTree else Select(This(owner), tree.name)
+ )
+ }
+ }
+ override def createAndEnterSymbol(): Symbol = enterSyntheticSym(derivedTree)
+ }
+ case class BooleanBeanGetter(tree: ValDef) extends BeanAccessor("is") with AnyBeanGetter { }
+ case class BeanGetter(tree: ValDef) extends BeanAccessor("get") with AnyBeanGetter { }
+ case class BeanSetter(tree: ValDef) extends BeanAccessor("set") with DerivedSetter {
+ def category = BeanSetterTargetClass
+ }
+
+ // No Symbols available.
+ private def beanAccessorsFromNames(tree: ValDef) = {
+ val ValDef(mods, name, tpt, _) = tree
+ val hasBP = mods hasAnnotationNamed tpnme.BeanPropertyAnnot
+ val hasBoolBP = mods hasAnnotationNamed tpnme.BooleanBeanPropertyAnnot
+
+ if (hasBP || hasBoolBP) {
+ val getter = (
+ if (hasBP) new BeanGetter(tree) with NoSymbolBeanGetter
+ else new BooleanBeanGetter(tree) with NoSymbolBeanGetter
+ )
+ getter :: {
+ if (mods.isMutable) List(BeanSetter(tree)) else Nil
+ }
+ }
+ else Nil
+ }
+
+ protected def enterBeans(tree: ValDef) {
+ if (forMSIL)
+ return
+
+ val ValDef(mods, name, _, _) = tree
+ val beans = beanAccessorsFromNames(tree)
+ if (beans.nonEmpty) {
+ if (!name.charAt(0).isLetter)
+ BeanPropertyAnnotationFieldWithoutLetterError(tree)
+ else if (mods.isPrivate) // avoids name clashes with private fields in traits
+ BeanPropertyAnnotationPrivateFieldError(tree)
+
+ // Create and enter the symbols here, add the trees in finishGetterSetter.
+ beans foreach (_.createAndEnterSymbol())
+ }
+ }
+ }
+}
diff --git a/src/compiler/scala/tools/nsc/typechecker/Modes.scala b/src/compiler/scala/tools/nsc/typechecker/Modes.scala
index ad4be46..d650762 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Modes.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Modes.scala
@@ -1,5 +1,5 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
+ * Copyright 2005-2013 LAMP/EPFL
* @author Martin Odersky
*/
@@ -86,6 +86,10 @@ trait Modes {
*/
final val TYPEPATmode = 0x10000
+ /** RETmode is set when we are typing a return expression.
+ */
+ final val RETmode = 0x20000
+
final private val StickyModes = EXPRmode | PATTERNmode | TYPEmode | ALTmode
final def onlyStickyModes(mode: Int) =
@@ -105,6 +109,9 @@ trait Modes {
final def inFunMode(mode: Int) = (mode & FUNmode) != 0
final def inPolyMode(mode: Int) = (mode & POLYmode) != 0
final def inPatternMode(mode: Int) = (mode & PATTERNmode) != 0
+ final def inExprModeOr(mode: Int, others: Int) = (mode & (EXPRmode | others)) != 0
+ final def inExprModeButNot(mode: Int, prohibited: Int) =
+ (mode & (EXPRmode | prohibited)) == EXPRmode
/** Translates a mask of mode flags into something readable.
*/
@@ -130,4 +137,4 @@ trait Modes {
def modeString(mode: Int): String =
if (mode == 0) "NOmode"
else (modeNameMap filterKeys (bit => inAllModes(mode, bit))).values mkString " "
-}
\ No newline at end of file
+}
diff --git a/src/compiler/scala/tools/nsc/typechecker/Namers.scala b/src/compiler/scala/tools/nsc/typechecker/Namers.scala
index fddf115..bb93807 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Namers.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Namers.scala
@@ -1,167 +1,197 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
+ * Copyright 2005-2013 LAMP/EPFL
* @author Martin Odersky
*/
package scala.tools.nsc
package typechecker
-import scala.collection.mutable.{HashMap, WeakHashMap}
+import scala.collection.mutable
+import scala.annotation.tailrec
import scala.ref.WeakReference
-import symtab.Flags
import symtab.Flags._
+import scala.tools.nsc.io.AbstractFile
/** This trait declares methods to create symbols and to enter them into scopes.
*
* @author Martin Odersky
* @version 1.0
*/
-trait Namers { self: Analyzer =>
+trait Namers extends MethodSynthesis {
+ self: Analyzer =>
+
import global._
import definitions._
- /** Convert to corresponding type parameters all skolems of method parameters
- * which appear in `tparams`.
+ private var _lockedCount = 0
+ def lockedCount = this._lockedCount
+
+ /** Replaces any Idents for which cond is true with fresh TypeTrees().
+ * Does the same for any trees containing EmptyTrees.
*/
- class DeSkolemizeMap(tparams: List[Symbol]) extends TypeMap {
- def apply(tp: Type): Type = tp match {
- case TypeRef(pre, sym, args)
- if (sym.isTypeSkolem && (tparams contains sym.deSkolemize)) =>
-// println("DESKOLEMIZING "+sym+" in "+sym.owner)
- mapOver(typeRef(NoPrefix, sym.deSkolemize, args))
-/*
- case PolyType(tparams1, restpe) =>
- new DeSkolemizeMap(tparams1 ::: tparams).mapOver(tp)
- case ClassInfoType(parents, decls, clazz) =>
- val parents1 = parents mapConserve (this)
- if (parents1 eq parents) tp else ClassInfoType(parents1, decls, clazz)
-*/
- case _ =>
- mapOver(tp)
+ private class TypeTreeSubstituter(cond: Name => Boolean) extends Transformer {
+ override def transform(tree: Tree): Tree = tree match {
+ case Ident(name) if cond(name) => TypeTree()
+ case _ => super.transform(tree)
+ }
+ def apply(tree: Tree) = {
+ val r = transform(tree)
+ if (r.exists(_.isEmpty)) TypeTree()
+ else r
}
}
- private class NormalNamer(context : Context) extends Namer(context)
- def newNamer(context : Context) : Namer = new NormalNamer(context)
-
- // In the typeCompleter (templateSig) of a case class (resp it's module),
- // synthetic `copy' (reps `apply', `unapply') methods are added. To compute
- // their signatures, the corresponding ClassDef is needed.
- // During naming, for each case class module symbol, the corresponding ClassDef
- // is stored in this map. The map is cleared lazily, i.e. when the new symbol
- // is created with the same name, the old one (if present) is wiped out, or the
- // entry is deleted when it is used and no longer needed.
- private val caseClassOfModuleClass = new WeakHashMap[Symbol, WeakReference[ClassDef]]
-
- // Default getters of constructors are added to the companion object in the
- // typeCompleter of the constructor (methodSig). To compute the signature,
- // we need the ClassDef. To create and enter the symbols into the companion
- // object, we need the templateNamer of that module class.
- // This map is extended during naming of classes, the Namer is added in when
- // it's available, i.e. in the type completer (templateSig) of the module class.
- private[typechecker] val classAndNamerOfModule = new HashMap[Symbol, (ClassDef, Namer)]
-
- def resetNamer() {
- classAndNamerOfModule.clear
+ private def isTemplateContext(ctx: Context): Boolean = ctx.tree match {
+ case Template(_, _, _) => true
+ case Import(_, _) => isTemplateContext(ctx.outer)
+ case _ => false
}
- abstract class Namer(val context: Context) {
+ private class NormalNamer(context: Context) extends Namer(context)
+ def newNamer(context: Context): Namer = new NormalNamer(context)
+ def newNamerFor(context: Context, tree: Tree): Namer =
+ newNamer(context.makeNewScope(tree, tree.symbol))
- val typer = newTyper(context)
+ abstract class Namer(val context: Context) extends MethodSynth with NamerContextErrors { thisNamer =>
- def setPrivateWithin[Sym <: Symbol](tree: Tree, sym: Sym, mods: Modifiers): Sym = {
- if (mods.hasAccessBoundary)
- sym.privateWithin = typer.qualifyingClass(tree, mods.privateWithin, true)
- sym
- }
-
- def inConstructorFlag: Long =
- if (context.owner.isConstructor && !context.inConstructorSuffix || context.owner.isEarlyInitialized) INCONSTRUCTOR
- else 0l
+ import NamerErrorGen._
+ val typer = newTyper(context)
- def moduleClassFlags(moduleFlags: Long) =
- (moduleFlags & ModuleToClassFlags) | FINAL | inConstructorFlag
+ private lazy val innerNamer =
+ if (isTemplateContext(context)) createInnerNamer() else this
- def updatePosFlags(sym: Symbol, pos: Position, flags: Long): Symbol = {
- if (settings.debug.value) log("overwriting " + sym)
- val lockedFlag = sym.flags & LOCKED
- sym.reset(NoType)
- sym setPos pos
- sym.flags = flags | lockedFlag
- if (sym.isModule && sym.moduleClass != NoSymbol)
- updatePosFlags(sym.moduleClass, pos, moduleClassFlags(flags))
- var companion: Symbol = NoSymbol
- if (sym.owner.isPackageClass && {companion = companionSymbolOf(sym, context); companion != NoSymbol} &&
- (companion.rawInfo.isInstanceOf[loaders.SymbolLoader] ||
- companion.rawInfo.isComplete && runId(sym.validTo) != currentRunId))
- // pre-set linked symbol to NoType, in case it is not loaded together with this symbol.
- companion.setInfo(NoType)
- sym
+ def createNamer(tree: Tree): Namer = {
+ val sym = tree match {
+ case ModuleDef(_, _, _) => tree.symbol.moduleClass
+ case _ => tree.symbol
+ }
+ def isConstrParam(vd: ValDef) = {
+ (sym hasFlag PARAM | PRESUPER) &&
+ !vd.mods.isJavaDefined &&
+ sym.owner.isConstructor
+ }
+ val ownerCtx = tree match {
+ case vd: ValDef if isConstrParam(vd) =>
+ context.makeConstructorContext
+ case _ =>
+ context
+ }
+ newNamer(ownerCtx.makeNewScope(tree, sym))
}
+ def createInnerNamer() = {
+ newNamer(context.make(context.tree, owner, newScope))
+ }
+ def createPrimaryConstructorParameterNamer: Namer = { //todo: can we merge this with SCCmode?
+ val classContext = context.enclClass
+ val outerContext = classContext.outer.outer
+ val paramContext = outerContext.makeNewScope(outerContext.tree, outerContext.owner)
- private def isCopyGetter(meth: Symbol) = {
- meth.name startsWith (nme.copy + nme.DEFAULT_GETTER_STRING)
+ owner.unsafeTypeParams foreach (paramContext.scope enter _)
+ newNamer(paramContext)
}
- private def isTemplateContext(context: Context): Boolean = context.tree match {
- case Template(_, _, _) => true
- case Import(_, _) => isTemplateContext(context.outer)
- case _ => false
+
+ def enclosingNamerWithScope(scope: Scope) = {
+ var cx = context
+ while (cx != NoContext && cx.scope != scope) cx = cx.outer
+ if (cx == NoContext || cx == context) thisNamer
+ else newNamer(cx)
}
- private var innerNamerCache: Namer = null
- protected def makeConstructorScope(classContext : Context) : Context = {
- val outerContext = classContext.outer.outer
- outerContext.makeNewScope(outerContext.tree, outerContext.owner)
+ def enterValueParams(vparamss: List[List[ValDef]]): List[List[Symbol]] = {
+ mmap(vparamss) { param =>
+ val sym = assignSymbol(param, param.name, mask = ValueParameterFlags)
+ setPrivateWithin(param, sym)
+ enterInScope(sym)
+ sym setInfo monoTypeCompleter(param)
+ }
}
- def namerOf(sym: Symbol): Namer = {
+ protected def owner = context.owner
+ private def contextFile = context.unit.source.file
+ private def typeErrorHandler[T](tree: Tree, alt: T): PartialFunction[Throwable, T] = {
+ case ex: TypeError =>
+ // H@ need to ensure that we handle only cyclic references
+ TypeSigError(tree, ex)
+ alt
+ }
+ // PRIVATE | LOCAL are fields generated for primary constructor arguments
+ // @PP: ...or fields declared as private[this]. PARAMACCESSOR marks constructor arguments.
+ // Neither gets accessors so the code is as far as I know still correct.
+ def noEnterGetterSetter(vd: ValDef) = !vd.mods.isLazy && (
+ !owner.isClass
+ || (vd.mods.isPrivateLocal && !vd.mods.isCaseAccessor)
+ || (vd.name startsWith nme.OUTER)
+ || (context.unit.isJava)
+ )
+ def noFinishGetterSetter(vd: ValDef) = (
+ (vd.mods.isPrivateLocal && !vd.mods.isLazy) // all lazy vals need accessors, even private[this]
+ || vd.symbol.isModuleVar)
+
+ def setPrivateWithin[T <: Symbol](tree: Tree, sym: T, mods: Modifiers): T =
+ if (sym.isPrivateLocal || !mods.hasAccessBoundary) sym
+ else sym setPrivateWithin typer.qualifyingClass(tree, mods.privateWithin, packageOK = true)
+
+ def setPrivateWithin(tree: MemberDef, sym: Symbol): Symbol =
+ setPrivateWithin(tree, sym, tree.mods)
+
+ def inConstructorFlag: Long = {
+ val termOwnedContexts: List[Context] = context.enclosingContextChain.takeWhile(_.owner.isTerm)
+ val constructorNonSuffix = termOwnedContexts exists (c => c.owner.isConstructor && !c.inConstructorSuffix)
+ val earlyInit = termOwnedContexts exists (_.owner.isEarlyInitialized)
+ if (constructorNonSuffix || earlyInit) INCONSTRUCTOR else 0L
+ }
- def innerNamer: Namer = {
- if (innerNamerCache eq null)
- innerNamerCache =
- if (!isTemplateContext(context)) this
- else newNamer(context.make(context.tree, context.owner, new Scope))
- innerNamerCache
- }
+ def moduleClassFlags(moduleFlags: Long) =
+ (moduleFlags & ModuleToClassFlags) | inConstructorFlag
- def primaryConstructorParamNamer: Namer = { //todo: can we merge this with SCCmode?
- val classContext = context.enclClass
- val paramContext = makeConstructorScope(classContext)
- val unsafeTypeParams = context.owner.unsafeTypeParams
- unsafeTypeParams foreach(sym => paramContext.scope.enter(sym))
- newNamer(paramContext)
+ def updatePosFlags(sym: Symbol, pos: Position, flags: Long): Symbol = {
+ debuglog("[overwrite] " + sym)
+ val newFlags = (sym.flags & LOCKED) | flags
+ sym reset NoType setFlag newFlags setPos pos
+ sym.moduleClass andAlso (updatePosFlags(_, pos, moduleClassFlags(flags)))
+
+ if (sym.owner.isPackageClass) {
+ companionSymbolOf(sym, context) andAlso { companion =>
+ val assignNoType = companion.rawInfo match {
+ case _: SymLoader => true
+ case tp => tp.isComplete && (runId(sym.validTo) != currentRunId)
+ }
+ // pre-set linked symbol to NoType, in case it is not loaded together with this symbol.
+ if (assignNoType)
+ companion setInfo NoType
+ }
}
-
- def usePrimary = sym.isTerm && (
- (sym.isParamAccessor) ||
- (sym.isParameter && sym.owner.isPrimaryConstructor)
+ sym
+ }
+ def namerOf(sym: Symbol): Namer = {
+ val usePrimary = sym.isTerm && (
+ (sym.isParamAccessor)
+ || (sym.isParameter && sym.owner.isPrimaryConstructor)
)
- if (usePrimary) primaryConstructorParamNamer
+ if (usePrimary) createPrimaryConstructorParameterNamer
else innerNamer
}
- protected def conflict(newS : Symbol, oldS : Symbol) : Boolean = {
- (!oldS.isSourceMethod ||
- nme.isSetterName(newS.name) ||
- newS.owner.isPackageClass) &&
- !((newS.owner.isTypeParameter || newS.owner.isAbstractType) &&
- newS.name.length==1 && newS.name(0)=='_') //@M: allow repeated use of `_' for higher-order type params
- }
-
- private def setInfo[Sym <: Symbol](sym : Sym)(tpe : LazyType) : Sym = sym.setInfo(tpe)
-
- private def doubleDefError(pos: Position, sym: Symbol) {
- context.error(pos,
- sym.name.toString() + " is already defined as " +
- (if (sym.isSynthetic)
- "(compiler-generated) "+ (if (sym.isModule) "case class companion " else "")
- else "") +
- (if (sym.isCase) "case class " + sym.name else sym.toString()))
- }
+ protected def conflict(newS: Symbol, oldS: Symbol) = (
+ ( !oldS.isSourceMethod
+ || nme.isSetterName(newS.name)
+ || newS.owner.isPackageClass
+ ) &&
+ !( // @M: allow repeated use of `_` for higher-order type params
+ (newS.owner.isTypeParameter || newS.owner.isAbstractType)
+ // FIXME: name comparisons not successful, are these underscores
+ // sometimes nme.WILDCARD and sometimes tpnme.WILDCARD?
+ && (newS.name.toString == nme.WILDCARD.toString)
+ )
+ )
+
+ private def allowsOverload(sym: Symbol) = (
+ sym.isSourceMethod && sym.owner.isClass && !sym.owner.isPackageClass
+ )
private def inCurrentScope(m: Symbol): Boolean = {
- if (context.owner.isClass) context.owner == m.owner
+ if (owner.isClass) owner == m.owner
else m.owner.isClass && context.scope == m.owner.info.decls
}
@@ -171,435 +201,582 @@ trait Namers { self: Analyzer =>
/** Enter symbol into given scope and return symbol itself */
def enterInScope(sym: Symbol, scope: Scope): Symbol = {
// allow for overloaded methods
- if (!(sym.isSourceMethod && sym.owner.isClass && !sym.owner.isPackageClass)) {
- var prev = scope.lookupEntry(sym.name)
+ if (!allowsOverload(sym)) {
+ val prev = scope.lookupEntry(sym.name)
if ((prev ne null) && prev.owner == scope && conflict(sym, prev.sym)) {
- doubleDefError(sym.pos, prev.sym)
+ if (sym.isSynthetic || prev.sym.isSynthetic) {
+ handleSyntheticNameConflict(sym, prev.sym)
+ handleSyntheticNameConflict(prev.sym, sym)
+ }
+ DoubleDefError(sym, prev.sym)
sym setInfo ErrorType
scope unlink prev.sym // let them co-exist...
- scope enter sym
- } else scope enter sym
- } else scope enter sym
+ // FIXME: The comment "let them co-exist" is confusing given that the
+ // line it comments unlinks one of them. What does it intend?
+ }
+ }
+ scope enter sym
}
- def enterPackageSymbol(pos: Position, pid: RefTree, pkgOwner: Symbol): Symbol = {
- val owner = pid match {
- case Ident(name) =>
- pkgOwner
- case Select(qual: RefTree, name) =>
- enterPackageSymbol(pos, qual, pkgOwner).moduleClass
+ /** Logic to handle name conflicts of synthetically generated symbols
+ * We handle right now: t6227
+ */
+ def handleSyntheticNameConflict(sym1: Symbol, sym2: Symbol) = {
+ if (sym1.isImplicit && sym1.isMethod && sym2.isModule && sym2.companionClass.isCaseClass)
+ validate(sym2.companionClass)
+ }
+
+ def enterSym(tree: Tree): Context = {
+ def dispatch() = {
+ var returnContext = this.context
+ tree match {
+ case tree @ PackageDef(_, _) => enterPackage(tree)
+ case tree @ ClassDef(_, _, _, _) => enterClassDef(tree)
+ case tree @ ModuleDef(_, _, _) => enterModuleDef(tree)
+ case tree @ ValDef(_, _, _, _) => enterValDef(tree)
+ case tree @ DefDef(_, _, _, _, _, _) => enterDefDef(tree)
+ case tree @ TypeDef(_, _, _, _) => enterTypeDef(tree)
+ case DocDef(_, defn) => enterSym(defn)
+ case tree @ Import(_, _) =>
+ assignSymbol(tree)
+ returnContext = context.makeNewImport(tree)
+ case _ =>
+ }
+ returnContext
}
- var pkg = owner.info.decls.lookup(pid.name)
- if (!pkg.isPackage || owner != pkg.owner) {
- pkg = owner.newPackage(pos, pid.name.toTermName)
- pkg.moduleClass.setInfo(new PackageClassInfoType(new Scope, pkg.moduleClass))
- pkg.setInfo(pkg.moduleClass.tpe)
- enterInScope(pkg, owner.info.decls)
+ tree.symbol match {
+ case NoSymbol => try dispatch() catch typeErrorHandler(tree, this.context)
+ case sym => enterExistingSym(sym)
}
- pkg
}
- def enterClassSymbol(tree : ClassDef): Symbol = {
- var c: Symbol = context.scope.lookup(tree.name)
- if (c.isType && c.owner.isPackageClass && context.scope == c.owner.info.decls && currentRun.canRedefine(c)) {
- updatePosFlags(c, tree.pos, tree.mods.flags)
- setPrivateWithin(tree, c, tree.mods)
- } else {
- var sym = context.owner.newClass(tree.pos, tree.name)
- sym = sym.setFlag(tree.mods.flags | inConstructorFlag)
- sym = setPrivateWithin(tree, sym, tree.mods)
- c = enterInScope(sym)
- }
- if (c.owner.isPackageClass) {
- val file = context.unit.source.file
- val clazz = c.asInstanceOf[ClassSymbol]
- if (settings.debug.value && (clazz.sourceFile ne null) && !clazz.sourceFile.equals(file)) {
- Console.err.println("SOURCE MISMATCH: " + clazz.sourceFile + " vs. " + file + " SYM=" + c);
- }
- clazz.sourceFile = file
- if (clazz.sourceFile ne null) {
- assert(currentRun.canRedefine(clazz) || clazz.sourceFile == currentRun.symSource(c));
- currentRun.symSource(c) = clazz.sourceFile
- }
- registerTopLevelSym(clazz)
+ /** Creates a new symbol and assigns it to the tree, returning the symbol
+ */
+ def assignSymbol(tree: Tree): Symbol =
+ logAssignSymbol(tree, tree match {
+ case PackageDef(pid, _) => createPackageSymbol(tree.pos, pid)
+ case Import(_, _) => createImportSymbol(tree)
+ case mdef: MemberDef => createMemberSymbol(mdef, mdef.name, -1L)
+ case _ => abort("Unexpected tree: " + tree)
+ })
+ def assignSymbol(tree: MemberDef, name: Name, mask: Long): Symbol =
+ logAssignSymbol(tree, createMemberSymbol(tree, name, mask))
+
+ def assignAndEnterSymbol(tree: MemberDef): Symbol = {
+ val sym = assignSymbol(tree, tree.name, -1L)
+ setPrivateWithin(tree, sym)
+ enterInScope(sym)
+ }
+ def assignAndEnterFinishedSymbol(tree: MemberDef): Symbol = {
+ val sym = assignAndEnterSymbol(tree)
+ sym setInfo completerOf(tree)
+ // log("[+info] " + sym.fullLocationString)
+ sym
+ }
+
+ private def logAssignSymbol(tree: Tree, sym: Symbol): Symbol = {
+ sym.name.toTermName match {
+ case nme.IMPORT | nme.OUTER | nme.ANON_CLASS_NAME | nme.ANON_FUN_NAME | nme.CONSTRUCTOR => ()
+ case _ =>
+ log("[+symbol] " + sym.debugLocationString)
}
- assert(c.name.toString.indexOf('(') == -1)
- c
+ tree.symbol = sym
+ sym
}
- /** Enter a module symbol. The tree parameter can be either a module definition
- * or a class definition */
- def enterModuleSymbol(tree : ModuleDef): Symbol = {
- // .pos, mods.flags | MODULE | FINAL, name
- var m: Symbol = context.scope.lookup(tree.name)
- val moduleFlags = tree.mods.flags | MODULE | FINAL
- if (m.isModule && !m.isPackage && inCurrentScope(m) &&
- (currentRun.canRedefine(m) || m.isSynthetic)) {
- updatePosFlags(m, tree.pos, moduleFlags)
- setPrivateWithin(tree, m, tree.mods)
- if (m.moduleClass != NoSymbol)
- setPrivateWithin(tree, m.moduleClass, tree.mods)
+ /** Create a new symbol at the context owner based on the given tree.
+ * A different name can be given. If the modifier flags should not be
+ * be transferred to the symbol as they are, supply a mask containing
+ * the flags to keep.
+ */
+ private def createMemberSymbol(tree: MemberDef, name: Name, mask: Long): Symbol = {
+ val pos = tree.pos
+ val isParameter = tree.mods.isParameter
+ val flags = tree.mods.flags & mask
+
+ tree match {
+ case TypeDef(_, _, _, _) if isParameter => owner.newTypeParameter(name.toTypeName, pos, flags)
+ case TypeDef(_, _, _, _) => owner.newTypeSymbol(name.toTypeName, pos, flags)
+ case DefDef(_, nme.CONSTRUCTOR, _, _, _, _) => owner.newConstructor(pos, flags)
+ case DefDef(_, _, _, _, _, _) => owner.newMethod(name.toTermName, pos, flags)
+ case ClassDef(_, _, _, _) => owner.newClassSymbol(name.toTypeName, pos, flags)
+ case ModuleDef(_, _, _) => owner.newModule(name, pos, flags)
+ case PackageDef(pid, _) => createPackageSymbol(pos, pid)
+ case ValDef(_, _, _, _) =>
+ if (isParameter) owner.newValueParameter(name, pos, flags)
+ else owner.newValue(name, pos, flags)
+ }
+ }
+ private def createFieldSymbol(tree: ValDef): TermSymbol =
+ owner.newValue(nme.getterToLocal(tree.name), tree.pos, tree.mods.flags & FieldFlags | PrivateLocal)
- context.unit.synthetics -= m
- } else {
- m = context.owner.newModule(tree.pos, tree.name)
- m.setFlag(moduleFlags)
- m = setPrivateWithin(tree, m, tree.mods)
- m = enterInScope(m)
+ private def createImportSymbol(tree: Tree) =
+ NoSymbol.newImport(tree.pos) setInfo completerOf(tree)
- m.moduleClass.setFlag(moduleClassFlags(moduleFlags))
- setPrivateWithin(tree, m.moduleClass, tree.mods)
+ /** All PackageClassInfoTypes come from here. */
+ private def createPackageSymbol(pos: Position, pid: RefTree): Symbol = {
+ val pkgOwner = pid match {
+ case Ident(_) => if (owner.isEmptyPackageClass) rootMirror.RootClass else owner
+ case Select(qual: RefTree, _) => createPackageSymbol(pos, qual).moduleClass
}
- if (m.owner.isPackageClass && !m.isPackage) {
- m.moduleClass.sourceFile = context.unit.source.file
- currentRun.symSource(m) = m.moduleClass.sourceFile
- registerTopLevelSym(m)
+ val existing = pkgOwner.info.decls.lookup(pid.name)
+
+ if (existing.isPackage && pkgOwner == existing.owner)
+ existing
+ else {
+ val pkg = pkgOwner.newPackage(pid.name.toTermName, pos)
+ val pkgClass = pkg.moduleClass
+ val pkgClassInfo = new PackageClassInfoType(newPackageScope(pkgClass), pkgClass)
+
+ pkgClass setInfo pkgClassInfo
+ pkg setInfo pkgClass.tpe
+ enterInScope(pkg, pkgOwner.info.decls)
}
- m
}
- def enterSyms(trees: List[Tree]): Namer = {
- var namer : Namer = this
- for (tree <- trees) {
- val txt = namer.enterSym(tree)
- if (txt ne namer.context) namer = newNamer(txt)
+ private def enterClassSymbol(tree: ClassDef, clazz: ClassSymbol): Symbol = {
+ val file = contextFile
+ if (clazz.sourceFile != null && clazz.sourceFile != contextFile)
+ debugwarn("!!! Source mismatch in " + clazz + ": " + clazz.sourceFile + " vs. " + contextFile)
+
+ clazz.sourceFile = contextFile
+ if (clazz.sourceFile != null) {
+ assert(currentRun.canRedefine(clazz) || clazz.sourceFile == currentRun.symSource(clazz), clazz.sourceFile)
+ currentRun.symSource(clazz) = clazz.sourceFile
}
- namer
+ registerTopLevelSym(clazz)
+ assert(clazz.name.toString.indexOf('(') < 0, clazz.name) // )
+ clazz
}
- def newTypeSkolems(tparams: List[Symbol]): List[Symbol] = {
- val tskolems = tparams map (_.newTypeSkolem)
- val ltp = new LazyType {
- override def complete(sym: Symbol) {
- sym setInfo sym.deSkolemize.info.substSym(tparams, tskolems) //@M the info of a skolem is the skolemized info of the actual type parameter of the skolem
+ def enterClassSymbol(tree: ClassDef): Symbol = {
+ val existing = context.scope.lookup(tree.name)
+ val isRedefinition = (
+ existing.isType
+ && existing.owner.isPackageClass
+ && context.scope == existing.owner.info.decls
+ && currentRun.canRedefine(existing)
+ )
+ val clazz: Symbol = {
+ if (isRedefinition) {
+ updatePosFlags(existing, tree.pos, tree.mods.flags)
+ setPrivateWithin(tree, existing)
+ existing
}
+ else assignAndEnterSymbol(tree) setFlag inConstructorFlag
+ }
+ clazz match {
+ case csym: ClassSymbol if csym.owner.isPackageClass => enterClassSymbol(tree, csym)
+ case _ => clazz
}
- tskolems foreach (_.setInfo(ltp))
- tskolems
}
- /** Replace type parameters with their TypeSkolems, which can later be deskolemized to the original type param
- * (a skolem is a representation of a bound variable when viewed inside its scope)
- * !!!Adriaan: this does not work for hk types.
+ /** Given a ClassDef or ModuleDef, verifies there isn't a companion which
+ * has been defined in a separate file.
*/
- def skolemize(tparams: List[TypeDef]) {
- val tskolems = newTypeSkolems(tparams map (_.symbol))
- for ((tparam, tskolem) <- tparams zip tskolems) tparam.symbol = tskolem
+ private def validateCompanionDefs(tree: ImplDef) {
+ val sym = tree.symbol
+ if (sym eq NoSymbol) return
+
+ val ctx = if (context.owner.isPackageObjectClass) context.outer else context
+ val module = if (sym.isModule) sym else ctx.scope lookup tree.name.toTermName
+ val clazz = if (sym.isClass) sym else ctx.scope lookup tree.name.toTypeName
+ val fails = (
+ module.isModule
+ && clazz.isClass
+ && !module.isSynthetic
+ && !clazz.isSynthetic
+ && (clazz.sourceFile ne null)
+ && (module.sourceFile ne null)
+ && !(module isCoDefinedWith clazz)
+ && module.exists
+ && clazz.exists
+ )
+ if (fails) {
+ context.unit.error(tree.pos, (
+ s"Companions '$clazz' and '$module' must be defined in same file:\n"
+ + s" Found in ${clazz.sourceFile.canonicalPath} and ${module.sourceFile.canonicalPath}")
+ )
+ }
+ }
+
+ def enterModuleDef(tree: ModuleDef) = {
+ val sym = enterModuleSymbol(tree)
+ sym.moduleClass setInfo namerOf(sym).moduleClassTypeCompleter(tree)
+ sym setInfo completerOf(tree)
+ validateCompanionDefs(tree)
+ sym
+ }
+
+ /** Enter a module symbol. The tree parameter can be either
+ * a module definition or a class definition.
+ */
+ def enterModuleSymbol(tree : ModuleDef): Symbol = {
+ var m: Symbol = context.scope lookupAll tree.name find (_.isModule) getOrElse NoSymbol
+ val moduleFlags = tree.mods.flags | MODULE
+ if (m.isModule && !m.isPackage && inCurrentScope(m) && (currentRun.canRedefine(m) || m.isSynthetic)) {
+ updatePosFlags(m, tree.pos, moduleFlags)
+ setPrivateWithin(tree, m)
+ m.moduleClass andAlso (setPrivateWithin(tree, _))
+ context.unit.synthetics -= m
+ tree.symbol = m
+ }
+ else {
+ m = assignAndEnterSymbol(tree)
+ m.moduleClass setFlag moduleClassFlags(moduleFlags)
+ setPrivateWithin(tree, m.moduleClass)
+ }
+ if (m.owner.isPackageClass && !m.isPackage) {
+ m.moduleClass.sourceFile = contextFile
+ currentRun.symSource(m) = m.moduleClass.sourceFile
+ registerTopLevelSym(m)
+ }
+ m
}
+ def enterSyms(trees: List[Tree]): Namer = {
+ trees.foldLeft(this: Namer) { (namer, t) =>
+ val ctx = namer enterSym t
+ // for Import trees, enterSym returns a changed context, so we need a new namer
+ if (ctx eq namer.context) namer
+ else newNamer(ctx)
+ }
+ }
def applicableTypeParams(owner: Symbol): List[Symbol] =
- if (owner.isTerm || owner.isPackageClass) List()
+ if (owner.isTerm || owner.isPackageClass) Nil
else applicableTypeParams(owner.owner) ::: owner.typeParams
/** If no companion object for clazz exists yet, create one by applying `creator` to
* class definition tree.
* @return the companion object symbol.
*/
- def ensureCompanionObject(tree: ClassDef, creator: => Tree): Symbol = {
- val m = companionModuleOf(tree.symbol, context)
- // @luc: not sure why "currentRun.compiles(m)" is needed, things breaks
- // otherwise. documentation welcome.
- if (m != NoSymbol && currentRun.compiles(m)) m
- else enterSyntheticSym(creator)
- }
-
- private def enterSymFinishWith(tree: Tree, tparams: List[TypeDef]) {
- val sym = tree.symbol
- if (settings.debug.value) log("entered " + sym + " in " + context.owner + ", scope-id = " + context.scope.## )
- var ltype = namerOf(sym).typeCompleter(tree)
- if (tparams nonEmpty) {
- //@M! TypeDef's type params are handled differently
- //@M e.g., in [A[x <: B], B], A and B are entered first as both are in scope in the definition of x
- //@M x is only in scope in `A[x <: B]'
- if(!sym.isAbstractType) //@M TODO: change to isTypeMember ?
- newNamer(context.makeNewScope(tree, sym)).enterSyms(tparams)
-
- ltype = new PolyTypeCompleter(tparams, ltype, tree, sym, context) //@M
- if (sym.isTerm) skolemize(tparams)
- }
-
- if (sym.name == nme.copy || isCopyGetter(sym)) {
- // it could be a compiler-generated copy method or one of its default getters
- setInfo(sym)(mkTypeCompleter(tree)(copySym => {
- def copyIsSynthetic() = sym.owner.info.member(nme.copy).isSynthetic
- if (sym.isSynthetic && (!sym.hasDefaultFlag || copyIsSynthetic())) {
- // the 'copy' method of case classes needs a special type completer to make bug0054.scala (and others)
- // work. the copy method has to take exactly the same parameter types as the primary constructor.
- val constrType = copySym.owner.primaryConstructor.tpe
- val subst = new SubstSymMap(copySym.owner.typeParams, tparams map (_.symbol))
- for ((params, cparams) <- tree.asInstanceOf[DefDef].vparamss.zip(constrType.paramss);
- (param, cparam) <- params.zip(cparams)) {
- // need to clone the type cparam.tpe??? problem is: we don't have the new owner yet (the new param symbol)
- param.tpt.setType(subst(cparam.tpe))
- }
- }
- ltype.complete(sym)
- }))
- } else setInfo(sym)(ltype)
+ def ensureCompanionObject(cdef: ClassDef, creator: ClassDef => Tree = companionModuleDef(_)): Symbol = {
+ val m = companionSymbolOf(cdef.symbol, context)
+ // @luc: not sure why "currentRun.compiles(m)" is needed, things breaks
+ // otherwise. documentation welcome.
+ //
+ // @PP: I tried to reverse engineer said documentation. The only tests
+ // which fail are buildmanager tests, as follows. Given A.scala:
+ // case class Foo()
+ // If you recompile A.scala, the Changes Map is
+ // Map(class Foo -> Nil, object Foo -> Nil)
+ // But if you remove the 'currentRun.compiles(m)' condition, it is
+ // Map(class Foo -> Nil)
+ // What exactly this implies and whether this is a sensible way to
+ // enforce it, I don't know.
+ //
+ // @martin: currentRun.compiles is needed because we might have a stale
+ // companion object from another run in scope. In that case we should still
+ // overwrite the object. I.e.
+ // Compile run #1: object Foo { ... }
+ // Compile run #2: case class Foo ...
+ // The object Foo is still in scope, but because it is not compiled in current run
+ // it should be ditched and a new one created.
+ if (m != NoSymbol && currentRun.compiles(m)) m
+ else enterSyntheticSym(atPos(cdef.pos.focus)(creator(cdef)))
}
- def enterIfNotThere(sym: Symbol) {
- val scope = context.scope
- var e = scope.lookupEntry(sym.name)
- while ((e ne null) && (e.owner eq scope) && (e.sym ne sym)) e = e.tail
- if (!((e ne null) && (e.owner eq scope))) context.scope.enter(sym)
- }
+ private def checkSelectors(tree: Import): Unit = {
+ import DuplicatesErrorKinds._
+ val Import(expr, selectors) = tree
+ val base = expr.tpe
- def enterSym(tree: Tree): Context = {
- def finishWith(tparams: List[TypeDef]) { enterSymFinishWith(tree, tparams) }
- def finish() = finishWith(Nil)
- def sym = tree.symbol
- if (sym != NoSymbol) {
- if (forInteractive && sym != null && sym.owner.isTerm) {
- // this logic is needed in case typer was interrupted half way through and then comes
- // back to do the tree again. In that case the definitions that were already
- // attributed as well as any default parameters of such methods need to be
- // re-entered in the current scope.
- enterIfNotThere(sym)
- if (sym.isLazy) {
- val acc = sym.lazyAccessor
- if (acc != NoSymbol) enterIfNotThere(acc)
+ def checkNotRedundant(pos: Position, from: Name, to0: Name) {
+ def check(to: Name) = {
+ val e = context.scope.lookupEntry(to)
+
+ if (e != null && e.owner == context.scope && e.sym.exists)
+ typer.permanentlyHiddenWarning(pos, to0, e.sym)
+ else if (context ne context.enclClass) {
+ val defSym = context.prefix.member(to) filter (
+ sym => sym.exists && context.isAccessible(sym, context.prefix, false))
+
+ defSym andAlso (typer.permanentlyHiddenWarning(pos, to0, _))
}
- defaultParametersOfMethod(sym) foreach enterIfNotThere
}
- return this.context
+ if (!tree.symbol.isSynthetic && expr.symbol != null && !expr.symbol.isInterpreterWrapper) {
+ if (base.member(from) != NoSymbol)
+ check(to0)
+ if (base.member(from.toTypeName) != NoSymbol)
+ check(to0.toTypeName)
+ }
}
- try {
- val owner = context.owner
- tree match {
- case PackageDef(pid, stats) =>
- tree.symbol = enterPackageSymbol(tree.pos, pid,
- if (context.owner == EmptyPackageClass) RootClass else context.owner)
- val namer = newNamer(context.make(tree, sym.moduleClass, sym.info.decls))
- namer enterSyms stats
-
- case tree @ ClassDef(mods, name, tparams, impl) =>
- tree.symbol = enterClassSymbol(tree)
- finishWith(tparams)
- if (mods.isCase) {
- if (treeInfo.firstConstructorArgs(impl.body).size > MaxFunctionArity)
- context.error(tree.pos, "Implementation restriction: case classes cannot have more than " + MaxFunctionArity + " parameters.")
-
- val m = ensureCompanionObject(tree, caseModuleDef(tree))
- caseClassOfModuleClass(m.moduleClass) = new WeakReference(tree)
- }
- val hasDefault = impl.body exists {
- case DefDef(_, nme.CONSTRUCTOR, _, vparamss, _, _) => vparamss.flatten exists (_.mods.hasDefault)
- case _ => false
+ def checkSelector(s: ImportSelector) = {
+ val ImportSelector(from, fromPos, to, _) = s
+ def isValid(original: Name) =
+ original.bothNames forall (x => (base nonLocalMember x) == NoSymbol)
+
+ if (from != nme.WILDCARD && base != ErrorType) {
+ if (isValid(from)) {
+ // for Java code importing Scala objects
+ if (!nme.isModuleName(from) || isValid(nme.stripModuleSuffix(from))) {
+ typer.TyperErrorGen.NotAMemberError(tree, expr, from)
}
+ }
+ // Setting the position at the import means that if there is
+ // more than one hidden name, the second will not be warned.
+ // So it is the position of the actual hidden name.
+ //
+ // Note: java imports have precence over definitions in the same package
+ // so don't warn for them. There is a corresponding special treatment
+ // in the shadowing rules in typedIdent to (SI-7232). In any case,
+ // we shouldn't be emitting warnings for .java source files.
+ if (!context.unit.isJava)
+ checkNotRedundant(tree.pos withPoint fromPos, from, to)
+ }
+ }
- if (hasDefault) {
- val m = ensureCompanionObject(tree, companionModuleDef(tree))
- classAndNamerOfModule(m) = (tree, null)
- }
- case tree @ ModuleDef(mods, name, _) =>
- tree.symbol = enterModuleSymbol(tree)
- sym.moduleClass setInfo namerOf(sym).moduleClassTypeCompleter(tree)
- finish
-
- case vd @ ValDef(mods, name, tp, rhs) =>
- if ((!context.owner.isClass ||
- (mods.isPrivateLocal && !mods.isCaseAccessor) ||
- name.startsWith(nme.OUTER) ||
- context.unit.isJava) &&
- !mods.isLazy) {
- val vsym = owner.newValue(tree.pos, name).setFlag(mods.flags);
- if(context.unit.isJava) setPrivateWithin(tree, vsym, mods) // #3663 -- for Scala fields we assume private[this]
- tree.symbol = enterInScope(vsym)
- finish
- } else {
- val mods1 =
- if (mods.isPrivateLocal && !mods.isLazy) {
- context.error(tree.pos, "private[this] not allowed for case class parameters")
- mods &~ LOCAL
- } else mods
- // add getter and possibly also setter
- if (nme.isSetterName(name))
- context.error(tree.pos, "Names of vals or vars may not end in `_='")
- // .isInstanceOf[..]: probably for (old) IDE hook. is this obsolete?
- val getter = enterAccessorMethod(tree, name, getterFlags(mods1.flags), mods1)
- setInfo(getter)(namerOf(getter).getterTypeCompleter(vd))
- if (mods1.isMutable) {
- val setter = enterAccessorMethod(tree, nme.getterToSetter(name), setterFlags(mods1.flags), mods1)
- setInfo(setter)(namerOf(setter).setterTypeCompleter(vd))
- }
+ def noDuplicates(names: List[Name], check: DuplicatesErrorKinds.Value) {
+ def loop(xs: List[Name]): Unit = xs match {
+ case Nil => ()
+ case hd :: tl =>
+ if (hd == nme.WILDCARD || !(tl contains hd)) loop(tl)
+ else DuplicatesError(tree, hd, check)
+ }
+ loop(names filterNot (x => x == null || x == nme.WILDCARD))
+ }
+ selectors foreach checkSelector
- tree.symbol =
- if (mods1.isDeferred) {
- getter setPos tree.pos // unfocus getter position, because there won't be a separate value
- } else {
- val vsym =
- if (!context.owner.isClass) {
- assert(mods1.isLazy) // if not a field, it has to be a lazy val
- owner.newValue(tree.pos, name + "$lzy" ).setFlag((mods1.flags | MUTABLE) & ~IMPLICIT)
- } else {
- val mFlag = if (mods1.isLazy) MUTABLE else 0
- val lFlag = if (mods.isPrivateLocal) 0 else LOCAL
- val newflags = mods1.flags & FieldFlags | PRIVATE | lFlag | mFlag
- owner.newValue(tree.pos, nme.getterToLocal(name)) setFlag newflags
- }
- enterInScope(vsym)
- setInfo(vsym)(namerOf(vsym).typeCompleter(tree))
- if (mods1.isLazy)
- vsym.setLazyAccessor(getter)
+ // checks on the whole set
+ noDuplicates(selectors map (_.name), RenamedTwice)
+ noDuplicates(selectors map (_.rename), AppearsTwice)
+ }
- vsym
- }
- addBeanGetterSetter(vd, getter)
- }
- case DefDef(mods, nme.CONSTRUCTOR, tparams, _, _, _) =>
- val sym = owner.newConstructor(tree.pos).setFlag(mods.flags | owner.getFlag(ConstrFlags))
- setPrivateWithin(tree, sym, mods)
- tree.symbol = enterInScope(sym)
- finishWith(tparams)
- case DefDef(mods, name, tparams, _, _, _) =>
- tree.symbol = enterNewMethod(tree, name, mods.flags, mods, tree.pos)
- if (mods.annotations.exists(ann => isAnn(ann, "bridge")))
- tree.symbol setFlag BRIDGE
- finishWith(tparams)
- case TypeDef(mods, name, tparams, _) =>
- var flags: Long = mods.flags
- if ((flags & PARAM) != 0L) flags |= DEFERRED
- val sym = new TypeSymbol(owner, tree.pos, name).setFlag(flags)
- setPrivateWithin(tree, sym, mods)
- tree.symbol = enterInScope(sym)
- finishWith(tparams)
- case DocDef(_, defn) =>
- enterSym(defn)
- case imp @ Import(_, _) =>
- tree.symbol = NoSymbol.newImport(tree.pos)
- setInfo(sym)(namerOf(sym).typeCompleter(tree))
- return context.makeNewImport(imp)
- case _ =>
+ def enterCopyMethod(copyDef: DefDef): Symbol = {
+ val sym = copyDef.symbol
+ val lazyType = completerOf(copyDef)
+
+ /** Assign the types of the class parameters to the parameters of the
+ * copy method. See comment in `Unapplies.caseClassCopyMeth` */
+ def assignParamTypes() {
+ val clazz = sym.owner
+ val constructorType = clazz.primaryConstructor.tpe
+ val subst = new SubstSymMap(clazz.typeParams, copyDef.tparams map (_.symbol))
+ val classParamss = constructorType.paramss
+
+ map2(copyDef.vparamss, classParamss)((copyParams, classParams) =>
+ map2(copyParams, classParams)((copyP, classP) =>
+ copyP.tpt setType subst(classP.tpe)
+ )
+ )
+ }
+
+ sym setInfo {
+ mkTypeCompleter(copyDef) { sym =>
+ assignParamTypes()
+ lazyType complete sym
}
}
- catch {
- case ex: TypeError =>
- //Console.println("caught " + ex + " in enterSym")//DEBUG
- typer.reportTypeError(tree.pos, ex)
- this.context
+ }
+
+ def completerOf(tree: Tree): TypeCompleter = {
+ val mono = namerOf(tree.symbol) monoTypeCompleter tree
+ val tparams = treeInfo.typeParameters(tree)
+ if (tparams.isEmpty) mono
+ else {
+ /* @M! TypeDef's type params are handled differently, e.g., in `type T[A[x <: B], B]`, A and B are entered
+ * first as both are in scope in the definition of x. x is only in scope in `A[x <: B]`.
+ * No symbols are created for the abstract type's params at this point, i.e. the following assertion holds:
+ * !tree.symbol.isAbstractType || { tparams.forall(_.symbol == NoSymbol)
+ * (tested with the above example, `trait C { type T[A[X <: B], B] }`). See also comment in PolyTypeCompleter.
+ */
+ if (!tree.symbol.isAbstractType) //@M TODO: change to isTypeMember ?
+ createNamer(tree) enterSyms tparams
+
+ new PolyTypeCompleter(tparams, mono, context) //@M
}
- this.context
}
- def enterSyntheticSym(tree: Tree): Symbol = {
- enterSym(tree)
- context.unit.synthetics(tree.symbol) = tree
- tree.symbol
+ def enterIfNotThere(sym: Symbol) {
+ val scope = context.scope
+ @tailrec def search(e: ScopeEntry) {
+ if ((e eq null) || (e.owner ne scope))
+ scope enter sym
+ else if (e.sym ne sym) // otherwise, aborts since we found sym
+ search(e.tail)
+ }
+ search(scope lookupEntry sym.name)
}
- def enterNewMethod(tree: Tree, name: Name, flags: Long, mods: Modifiers, pos: Position): TermSymbol = {
- val sym = context.owner.newMethod(pos, name.toTermName).setFlag(flags)
- setPrivateWithin(tree, sym, mods)
+ def enterValDef(tree: ValDef) {
+ if (noEnterGetterSetter(tree))
+ assignAndEnterFinishedSymbol(tree)
+ else
+ enterGetterSetter(tree)
+
+ // When java enums are read from bytecode, they are known to have
+ // constant types by the jvm flag and assigned accordingly. When
+ // they are read from source, the java parser marks them with the
+ // STABLE flag, and now we receive that signal.
+ if (tree.symbol hasAllFlags STABLE | JAVA)
+ tree.symbol setInfo ConstantType(Constant(tree.symbol))
+ }
+
+ def enterLazyVal(tree: ValDef, lazyAccessor: Symbol): TermSymbol = {
+ // If the owner is not a class, this is a lazy val from a method,
+ // with no associated field. It has an accessor with $lzy appended to its name and
+ // its flags are set differently. The implicit flag is reset because otherwise
+ // a local implicit "lazy val x" will create an ambiguity with itself
+ // via "x$lzy" as can be seen in test #3927.
+ val sym = (
+ if (owner.isClass) createFieldSymbol(tree)
+ else owner.newValue(tree.name append nme.LAZY_LOCAL, tree.pos, tree.mods.flags & ~IMPLICIT)
+ )
+ enterValSymbol(tree, sym setFlag MUTABLE setLazyAccessor lazyAccessor)
+ }
+ def enterStrictVal(tree: ValDef): TermSymbol = {
+ enterValSymbol(tree, createFieldSymbol(tree))
+ }
+ def enterValSymbol(tree: ValDef, sym: TermSymbol): TermSymbol = {
enterInScope(sym)
- sym
+ sym setInfo namerOf(sym).monoTypeCompleter(tree)
+ }
+ def enterPackage(tree: PackageDef) {
+ val sym = assignSymbol(tree)
+ newNamer(context.make(tree, sym.moduleClass, sym.info.decls)) enterSyms tree.stats
+ }
+ def enterTypeDef(tree: TypeDef) = assignAndEnterFinishedSymbol(tree)
+
+ def enterDefDef(tree: DefDef): Unit = tree match {
+ case DefDef(_, nme.CONSTRUCTOR, _, _, _, _) =>
+ assignAndEnterFinishedSymbol(tree)
+ case DefDef(mods, name, tparams, _, _, _) =>
+ val bridgeFlag = if (mods hasAnnotationNamed tpnme.bridgeAnnot) BRIDGE else 0
+ val sym = assignAndEnterSymbol(tree) setFlag bridgeFlag
+
+ if (name == nme.copy && sym.isSynthetic)
+ enterCopyMethod(tree)
+ else
+ sym setInfo completerOf(tree)
}
- def enterAccessorMethod(tree: Tree, name: Name, flags: Long, mods: Modifiers): TermSymbol =
- enterNewMethod(tree, name, flags, mods, tree.pos.focus)
-
- def isAnn(ann: Tree, demand: String) = ann match {
- case Apply(Select(New(Ident(name)), _), _) =>
- name.toString == demand
- case Apply(Select(New(Select(pre, name)), _), _) =>
- name.toString == demand
- case _ => false
- }
-
- private def addBeanGetterSetter(vd: ValDef, getter: Symbol) {
- val ValDef(mods, name, tpt, _) = vd
- val hasBP = mods.annotations.exists(isAnn(_, "BeanProperty"))
- val hasBoolBP = mods.annotations.exists(isAnn(_, "BooleanBeanProperty"))
- if ((hasBP || hasBoolBP) && !forMSIL) {
- if (!name(0).isLetter)
- context.error(vd.pos, "`BeanProperty' annotation can be applied "+
- "only to fields that start with a letter")
- else if (mods.isPrivate)
- // avoids name clashes with private fields in traits
- context.error(vd.pos, "`BeanProperty' annotation can only be applied "+
- "to non-private fields")
- else {
- val flags = mods.flags & (DEFERRED | OVERRIDE | STATIC)
- val beanName = name.toString.capitalize
-
- val getterName = if (hasBoolBP) "is" + beanName
- else "get" + beanName
- val getterMods = Modifiers(flags, mods.privateWithin, Nil, mods.positions)
- val beanGetterDef = atPos(vd.pos.focus) {
- DefDef(getterMods, getterName, Nil, List(Nil), tpt.duplicate,
- if (mods.isDeferred) EmptyTree
- else Select(This(getter.owner.name.toTypeName), name)) }
- enterSyntheticSym(beanGetterDef)
-
- if (mods.isMutable) {
- // can't use "enterSyntheticSym", because the parameter type is not yet
- // known. instead, uses the same machinery as for the non-bean setter:
- // create and enter the symbol here, add the tree in Typer.addGettterSetter.
- val setterName = "set" + beanName
- val setter = enterAccessorMethod(vd, setterName, flags, mods)
- .setPos(vd.pos.focus)
- setInfo(setter)(namerOf(setter).setterTypeCompleter(vd))
- }
+ def enterClassDef(tree: ClassDef) {
+ val ClassDef(mods, name, tparams, impl) = tree
+ val primaryConstructorArity = treeInfo.firstConstructorArgs(impl.body).size
+ tree.symbol = enterClassSymbol(tree)
+ tree.symbol setInfo completerOf(tree)
+
+ if (mods.isCase) {
+ if (primaryConstructorArity > MaxFunctionArity)
+ MaxParametersCaseClassError(tree)
+
+ val m = ensureCompanionObject(tree, caseModuleDef)
+ m.moduleClass.updateAttachment(new ClassForCaseCompanionAttachment(tree))
+ }
+ val hasDefault = impl.body exists {
+ case DefDef(_, nme.CONSTRUCTOR, _, vparamss, _, _) => mexists(vparamss)(_.mods.hasDefault)
+ case _ => false
+ }
+ if (hasDefault) {
+ val m = ensureCompanionObject(tree)
+ m.updateAttachment(new ConstructorDefaultsAttachment(tree, null))
+ }
+ val owner = tree.symbol.owner
+ if (settings.lint.value && owner.isPackageObjectClass && !mods.isImplicit) {
+ context.unit.warning(tree.pos,
+ "it is not recommended to define classes/objects inside of package objects.\n" +
+ "If possible, define " + tree.symbol + " in " + owner.skipPackageObject + " instead."
+ )
+ }
+
+ // Suggested location only.
+ if (mods.isImplicit) {
+ if (primaryConstructorArity == 1) {
+ log("enter implicit wrapper "+tree+", owner = "+owner)
+ enterImplicitWrapper(tree)
}
+ else context.unit.error(tree.pos, "implicit classes must accept exactly one primary constructor parameter")
+ }
+ validateCompanionDefs(tree)
+ }
+
+ // this logic is needed in case typer was interrupted half
+ // way through and then comes back to do the tree again. In
+ // that case the definitions that were already attributed as
+ // well as any default parameters of such methods need to be
+ // re-entered in the current scope.
+ protected def enterExistingSym(sym: Symbol): Context = {
+ if (forInteractive && sym != null && sym.owner.isTerm) {
+ enterIfNotThere(sym)
+ if (sym.isLazy)
+ sym.lazyAccessor andAlso enterIfNotThere
+
+ for (defAtt <- sym.attachments.get[DefaultsOfLocalMethodAttachment])
+ defAtt.defaultGetters foreach enterIfNotThere
}
+ this.context
+ }
+
+ def enterSyntheticSym(tree: Tree): Symbol = {
+ enterSym(tree)
+ context.unit.synthetics(tree.symbol) = tree
+ tree.symbol
}
// --- Lazy Type Assignment --------------------------------------------------
- def typeCompleter(tree: Tree) = mkTypeCompleter(tree) { sym =>
- if (settings.debug.value) log("defining " + sym + Flags.flagsToString(sym.flags)+sym.locationString)
- val tp = typeSig(tree)
+ def initializeLowerBounds(tp: Type): Type = {
tp match {
- case TypeBounds(lo, hi) =>
+ case TypeBounds(lo, _) =>
// check that lower bound is not an F-bound
- for (t <- lo) {
- t match {
- case TypeRef(_, sym, _) => sym.initialize
- case _ =>
- }
- }
+ for (TypeRef(_, sym, _) <- lo)
+ sym.initialize
case _ =>
}
- sym.setInfo(if (sym.isJavaDefined) RestrictJavaArraysMap(tp) else tp)
- if ((sym.isAliasType || sym.isAbstractType) && !sym.isParameter &&
- !typer.checkNonCyclic(tree.pos, tp))
- sym.setInfo(ErrorType) // this early test is there to avoid infinite baseTypes when
- // adding setters and getters --> bug798
- if (settings.debug.value) log("defined " + sym);
- validate(sym)
+ tp
+ }
+
+ def monoTypeCompleter(tree: Tree) = mkTypeCompleter(tree) { sym =>
+ logAndValidate(sym) {
+ val tp = initializeLowerBounds(typeSig(tree))
+ sym setInfo {
+ if (sym.isJavaDefined) RestrictJavaArraysMap(tp)
+ else tp
+ }
+ // this early test is there to avoid infinite baseTypes when
+ // adding setters and getters --> bug798
+ val needsCycleCheck = (sym.isAliasType || sym.isAbstractType) && !sym.isParameter
+ if (needsCycleCheck && !typer.checkNonCyclic(tree.pos, tp))
+ sym setInfo ErrorType
+ }
+ // tree match {
+ // case ClassDef(_, _, _, impl) =>
+ // val parentsOK = (
+ // treeInfo.isInterface(sym, impl.body)
+ // || (sym eq ArrayClass)
+ // || (sym isSubClass AnyValClass)
+ // )
+ // if (!parentsOK)
+ // ensureParent(sym, AnyRefClass)
+ // case _ => ()
+ // }
}
- def moduleClassTypeCompleter(tree: Tree) = {
+ def moduleClassTypeCompleter(tree: ModuleDef) = {
mkTypeCompleter(tree) { sym =>
val moduleSymbol = tree.symbol
- assert(moduleSymbol.moduleClass == sym)
+ assert(moduleSymbol.moduleClass == sym, moduleSymbol.moduleClass)
moduleSymbol.info // sets moduleClass info as a side effect.
- //assert(sym.rawInfo.isComplete)
}
}
- def getterTypeCompleter(vd: ValDef) = mkTypeCompleter(vd) { sym =>
- if (settings.debug.value) log("defining " + sym)
- val tp = typeSig(vd)
- sym.setInfo(NullaryMethodType(tp))
- if (settings.debug.value) log("defined " + sym)
- validate(sym)
- }
-
- def setterTypeCompleter(vd: ValDef) = mkTypeCompleter(vd) { sym =>
- if (settings.debug.value) log("defining " + sym)
- val param = sym.newSyntheticValueParam(typeSig(vd))
- sym.setInfo(MethodType(List(param), UnitClass.tpe))
- if (settings.debug.value) log("defined " + sym)
- validate(sym)
+ /* Explicit isSetter required for bean setters (beanSetterSym.isSetter is false) */
+ def accessorTypeCompleter(tree: ValDef, isSetter: Boolean) = mkTypeCompleter(tree) { sym =>
+ logAndValidate(sym) {
+ sym setInfo {
+ val tp = if (isSetter) MethodType(List(sym.newSyntheticValueParam(typeSig(tree))), UnitClass.tpe)
+ else NullaryMethodType(typeSig(tree))
+ pluginsTypeSigAccessor(tp, typer, tree, sym)
+ }
+ }
}
def selfTypeCompleter(tree: Tree) = mkTypeCompleter(tree) { sym =>
- var selftpe = typer.typedType(tree).tpe
- if (!(selftpe.typeSymbol isNonBottomSubClass sym.owner))
- selftpe = intersectionType(List(sym.owner.tpe, selftpe))
-// println("completing self of "+sym.owner+": "+selftpe)
- sym.setInfo(selftpe)
+ val selftpe = typer.typedType(tree).tpe
+ sym setInfo {
+ if (selftpe.typeSymbol isNonBottomSubClass sym.owner) selftpe
+ else intersectionType(List(sym.owner.tpe, selftpe))
+ }
}
/** This method has a big impact on the eventual compiled code.
@@ -631,7 +808,7 @@ trait Namers { self: Analyzer =>
false
}
- val tpe1 = tpe.deconst
+ val tpe1 = dropRepeatedParamType(tpe.deconst)
val tpe2 = tpe1.widen
// This infers Foo.type instead of "object Foo"
@@ -648,144 +825,85 @@ trait Namers { self: Analyzer =>
else if (!sym.isFinal) tpe1
else tpe
}
+ /** Computes the type of the body in a ValDef or DefDef, and
+ * assigns the type to the tpt's node. Returns the type.
+ */
+ private def assignTypeToTree(tree: ValOrDefDef, defnTyper: Typer, pt: Type): Type = {
+ val rhsTpe =
+ if (tree.symbol.isTermMacro) defnTyper.computeMacroDefType(tree, pt)
+ else defnTyper.computeType(tree.rhs, pt)
+
+ val defnTpe = widenIfNecessary(tree.symbol, rhsTpe, pt)
+ tree.tpt defineType defnTpe setPos tree.pos.focus
+ tree.tpt.tpe
+ }
- // sets each ValDef's symbol
- def enterValueParams(owner: Symbol, vparamss: List[List[ValDef]]): List[List[Symbol]] = {
- def enterValueParam(param: ValDef): Symbol = {
- param.symbol = setInfo(
- enterInScope{
- val sym = owner.newValueParameter(param.pos, param.name).
- setFlag(param.mods.flags & (BYNAMEPARAM | IMPLICIT | DEFAULTPARAM))
- setPrivateWithin(param, sym, param.mods)
- })(typeCompleter(param))
- param.symbol
- }
- vparamss.map(_.map(enterValueParam))
+ // owner is the class with the self type
+ def enterSelf(self: ValDef) {
+ val ValDef(_, name, tpt, _) = self
+ if (self eq emptyValDef)
+ return
+
+ val hasName = name != nme.WILDCARD
+ val hasType = !tpt.isEmpty
+ if (!hasType)
+ tpt defineType NoType
+
+ val sym = (
+ if (hasType || hasName) {
+ owner.typeOfThis = if (hasType) selfTypeCompleter(tpt) else owner.tpe
+ val selfSym = owner.thisSym setPos self.pos
+ if (hasName) selfSym setName name else selfSym
+ }
+ else {
+ val symName = if (name != nme.WILDCARD) name else nme.this_
+ owner.newThisSym(symName, owner.pos) setInfo owner.tpe
+ }
+ )
+ self.symbol = context.scope enter sym
}
private def templateSig(templ: Template): Type = {
val clazz = context.owner
def checkParent(tpt: Tree): Type = {
val tp = tpt.tpe
- if (tp.typeSymbol == context.owner) {
- context.error(tpt.pos, ""+tp.typeSymbol+" inherits itself")
- AnyRefClass.tpe
- } else if (tp.isError) {
- AnyRefClass.tpe
- } else {
- tp
- }
- }
- def enterSelf(self: ValDef) {
- if (!self.tpt.isEmpty) {
- clazz.typeOfThis = selfTypeCompleter(self.tpt)
- self.symbol = clazz.thisSym.setPos(self.pos)
- } else {
- self.tpt defineType NoType
- if (self.name != nme.WILDCARD) {
- clazz.typeOfThis = clazz.tpe
- self.symbol = clazz.thisSym
- } else if (self ne emptyValDef) {
- self.symbol = clazz.newThisSym(self.pos) setInfo clazz.tpe
- }
- }
- if (self.name != nme.WILDCARD) {
- self.symbol.name = self.name
- self.symbol = context.scope enter self.symbol
- }
- }
+ val inheritsSelf = tp.typeSymbol == owner
+ if (inheritsSelf)
+ InheritsItselfError(tpt)
- /* experimental code for allowiong early types as type parameters
- val earlyTypes = templ.body filter (treeInfo.isEarlyTypeDef)
-
- val parentTyper =
- if (earlyTypes.isEmpty) typer
- else {
- val earlyContext = context.outer.makeNewScope(context.tree, context.outer.owner.newLocalDummy(templ.pos))
- newNamer(earlyContext).enterSyms(earlyTypes)
- newTyper(earlyContext).typedStats(earlyTypes, context.owner)
-
- val parentContext = context.makeNewScope(context.tree, context.owner)
- for (etdef <- earlyTypes) parentContext.scope enter etdef.symbol
- newTyper(parentContext)
- }
- var parents = parentTyper.parentTypes(templ) map checkParent
- if (!earlyTypes.isEmpty) {
- val earlyMap = new EarlyMap(context.owner)
- for (etdef <- earlyTypes) {
- val esym = etdef.symbol
- esym.owner = context.owner
- esym.asInstanceOf[TypeSymbol].refreshType()
- esym setInfo earlyMap(esym.info)
- }
-
-/*
- println("earlies: "+(earlyTypes map (_.symbol)))
- println("earlies: "+(earlyTypes map (_.symbol.tpe)))
- println("earlies: "+(earlyTypes map (_.symbol.info)))
- println("parents: "+parents)
- println(templ)
+ if (inheritsSelf || tp.isError) AnyRefClass.tpe
+ else tp
+ }
-*/
+ val parents = typer.parentTypes(templ) map checkParent
- }
-*/
- var parents = typer.parentTypes(templ) map checkParent
enterSelf(templ.self)
- val decls = new Scope
-// for (etdef <- earlyTypes) decls enter etdef.symbol
+
+ val decls = newScope
val templateNamer = newNamer(context.make(templ, clazz, decls))
- .enterSyms(templ.body)
-
- /* add overridden virtuals to parents
- val overridden = clazz.overriddenVirtuals
- if (!overridden.isEmpty)
- parents = parents ::: ( overridden map (
- sym => TypeRef(clazz.owner.thisType, sym, clazz.typeParams map (_.tpe))))
- println("Parents of "+clazz+":"+parents)
-
- // check that virtual classes are only defined as members of templates
- if (clazz.isVirtualClass && !clazz.owner.isClass)
- context.error(
- clazz.pos,
- "virtual traits and their subclasses must be defined as members of some other class")
-
- // make subclasses of virtual classes virtual as well; check that
- // they are defined in same scope.
- val virtualParents = parents map (_.typeSymbol) filter (_.isVirtualClass)
- virtualParents find {
- vp => !(clazz.owner.isClass && (clazz.owner isSubClass vp.owner))
- } match {
- case Some(vp) =>
- context.error(
- clazz.pos,
- "subclass of virtual "+vp+
- " needs to be defined at same level,\nas member of "+vp.owner)
- case None =>
- if (!virtualParents.isEmpty) clazz setFlag DEFERRED // make it virtual
- }
- */
+ templateNamer enterSyms templ.body
// add apply and unapply methods to companion objects of case classes,
// unless they exist already; here, "clazz" is the module class
if (clazz.isModuleClass) {
- Namers.this.caseClassOfModuleClass get clazz foreach { cdefRef =>
- val cdef = cdefRef()
+ clazz.attachments.get[ClassForCaseCompanionAttachment] foreach { cma =>
+ val cdef = cma.caseClass
+ assert(cdef.mods.isCase, "expected case class: "+ cdef)
addApplyUnapply(cdef, templateNamer)
- caseClassOfModuleClass -= clazz
}
}
// add the copy method to case classes; this needs to be done here, not in SyntheticMethods, because
// the namer phase must traverse this copy method to create default getters for its parameters.
- // here, clazz is the ClassSymbol of the case class (not the module).
- // @check: this seems to work only if the type completer of the class runs before the one of the
- // module class: the one from the module class removes the entry form caseClassOfModuleClass (see above).
- if (clazz.isClass && !clazz.hasModuleFlag) {
- Namers.this.caseClassOfModuleClass get companionModuleOf(clazz, context).moduleClass map { cdefRef =>
- val cdef = cdefRef()
+ // here, clazz is the ClassSymbol of the case class (not the module). (!clazz.hasModuleFlag) excludes
+ // the moduleClass symbol of the companion object when the companion is a "case object".
+ if (clazz.isCaseClass && !clazz.hasModuleFlag) {
+ val modClass = companionSymbolOf(clazz, context).moduleClass
+ modClass.attachments.get[ClassForCaseCompanionAttachment] foreach { cma =>
+ val cdef = cma.caseClass
def hasCopy(decls: Scope) = (decls lookup nme.copy) != NoSymbol
- if (!hasCopy(decls) &&
+ // SI-5956 needs (cdef.symbol == clazz): there can be multiple class symbols with the same name
+ if (cdef.symbol == clazz && !hasCopy(decls) &&
!parents.exists(p => hasCopy(p.typeSymbol.info.decls)) &&
!parents.flatMap(_.baseClasses).distinct.exists(bc => hasCopy(bc.info.decls)))
addCopyMethod(cdef, templateNamer)
@@ -795,173 +913,261 @@ trait Namers { self: Analyzer =>
// if default getters (for constructor defaults) need to be added to that module, here's the namer
// to use. clazz is the ModuleClass. sourceModule works also for classes defined in methods.
val module = clazz.sourceModule
- classAndNamerOfModule get module match {
- case Some((cdef, _)) => classAndNamerOfModule(module) = (cdef, templateNamer)
- case None =>
+ for (cda <- module.attachments.get[ConstructorDefaultsAttachment]) {
+ debuglog(s"Storing the template namer in the ConstructorDefaultsAttachment of ${module.debugLocationString}.")
+ cda.companionModuleClassNamer = templateNamer
}
+ val classTp = ClassInfoType(parents, decls, clazz)
+ pluginsTypeSig(classTp, templateNamer.typer, templ, WildcardType)
+ }
- if (opt.verbose) {
- log(
- "ClassInfoType(\n%s,\n%s,\n%s)".format(
- " " + (parents map (_.typeSymbol) mkString ", "),
- if (global.opt.debug) decls.toList map (">> " + _) mkString("\n", "\n", "") else " <decls>",
- " " + clazz)
- )
+ private def classSig(cdef: ClassDef): Type = {
+ val clazz = cdef.symbol
+ val ClassDef(_, _, tparams, impl) = cdef
+ val tparams0 = typer.reenterTypeParams(tparams)
+ val resultType = templateSig(impl)
+
+ val res = GenPolyType(tparams0, resultType)
+ val pluginsTp = pluginsTypeSig(res, typer, cdef, WildcardType)
+
+ // Already assign the type to the class symbol (monoTypeCompleter will do it again).
+ // Allows isDerivedValueClass to look at the info.
+ clazz setInfo pluginsTp
+ if (clazz.isDerivedValueClass) {
+ log("Ensuring companion for derived value class " + cdef.name + " at " + cdef.pos.show)
+ clazz setFlag FINAL
+ // Don't force the owner's info lest we create cycles as in SI-6357.
+ enclosingNamerWithScope(clazz.owner.rawInfo.decls).ensureCompanionObject(cdef)
}
- ClassInfoType(parents, decls, clazz)
+ pluginsTp
}
- private def classSig(tparams: List[TypeDef], impl: Template): Type =
- polyType(typer.reenterTypeParams(tparams), templateSig(impl))
+ private def moduleSig(mdef: ModuleDef): Type = {
+ val moduleSym = mdef.symbol
+ // The info of both the module and the moduleClass symbols need to be assigned. monoTypeCompleter assigns
+ // the result of typeSig to the module symbol. The module class info is assigned here as a side-effect.
+ val result = templateSig(mdef.impl)
+ val pluginsTp = pluginsTypeSig(result, typer, mdef, WildcardType)
+ // Assign the moduleClass info (templateSig returns a ClassInfoType)
+ val clazz = moduleSym.moduleClass
+ clazz setInfo pluginsTp
+ // clazz.tpe returns a `ModuleTypeRef(clazz)`, a typeRef that links to the module class `clazz`
+ // (clazz.info would the ClassInfoType, which is not what should be assigned to the module symbol)
+ clazz.tpe
+ }
- private def methodSig(mods: Modifiers, tparams: List[TypeDef],
- vparamss: List[List[ValDef]], tpt: Tree, rhs: Tree): Type = {
- val meth = context.owner
+ /**
+ * The method type for `ddef`.
+ *
+ * If a PolyType(tparams, restp) is returned, `tparams` are the external symbols (not type skolems),
+ * i.e. instances of AbstractTypeSymbol. All references in `restp` to the type parameters are TypeRefs
+ * to these non-skolems.
+ *
+ * For type-checking the rhs (in case the result type is inferred), the type skolems of the type parameters
+ * are entered in scope. Equally, the parameter symbols entered into scope have types which refer to those
+ * skolems: when type-checking the rhs, references to parameters need to have types that refer to the skolems.
+ * In summary, typing an rhs happens with respect to the skolems.
+ *
+ * This means that the method's result type computed by the typer refers to skolems. In order to put it
+ * into the method type (the result of methodSig), typeRefs to skolems have to be replaced by references
+ * to the non-skolems.
+ */
+ private def methodSig(ddef: DefDef): Type = {
- // enters the skolemized version into scope, returns the deSkolemized symbols
- val tparamSyms = typer.reenterTypeParams(tparams)
- // since the skolemized tparams are in scope, the TypeRefs in vparamSymss refer to skolemized tparams
- var vparamSymss = enterValueParams(meth, vparamss)
// DEPMETTODO: do we need to skolemize value parameter symbols?
- if (tpt.isEmpty && meth.name == nme.CONSTRUCTOR) {
- tpt defineType context.enclClass.owner.tpe
- tpt setPos meth.pos.focus
- }
+ val DefDef(_, _, tparams, vparamss, tpt, _) = ddef
+ val meth = owner
+ val methOwner = meth.owner
+ val site = methOwner.thisType
- /** Called for all value parameter lists, right to left
- * @param vparams the symbols of one parameter list
- * @param restpe the result type (possibly a MethodType)
+ /* tparams already have symbols (created in enterDefDef/completerOf), namely the skolemized ones (created
+ * by the PolyTypeCompleter constructor, and assigned to tparams). reenterTypeParams enters the type skolems
+ * into scope and returns the non-skolems.
*/
- def makeMethodType(vparams: List[Symbol], restpe: Type) = {
- // TODODEPMET: check that we actually don't need to do anything here
- // new dependent method types: probably OK already, since 'enterValueParams' above
- // enters them in scope, and all have a lazy type. so they may depend on other params. but: need to
- // check that params only depend on ones in earlier sections, not the same. (done by checkDependencies,
- // so re-use / adapt that)
- val params = vparams map (vparam =>
- if (meth hasFlag JAVA) vparam.setInfo(objToAny(vparam.tpe)) else vparam)
- // TODODEPMET necessary?? new dependent types: replace symbols in restpe with the ones in vparams
- if (meth hasFlag JAVA) JavaMethodType(params, restpe)
- else MethodType(params, restpe)
- }
-
- def thisMethodType(restpe: Type) = {
- import scala.collection.mutable.ListBuffer
- val okParams = ListBuffer[Symbol]()
- // can we relax these restrictions? see test/files/pos/depmet_implicit_oopsla_session_2.scala and neg/depmet_try_implicit.scala for motivation
- // should allow forward references since type selections on implicit args are like type parameters:
- // def foo[T](a: T, x: w.T2)(implicit w: ComputeT2[T])
- // is more compact than: def foo[T, T2](a: T, x: T2)(implicit w: ComputeT2[T, T2])
- // moreover, the latter is not an encoding of the former, which hides type inference of T2, so you can specify T while T2 is purely computed
- val checkDependencies: TypeTraverser = new TypeTraverser {
- def traverse(tp: Type) = {
- tp match {
- case SingleType(_, sym) =>
- if (sym.owner == meth && sym.isValueParameter && !(okParams contains sym))
- context.error(
- sym.pos,
- "illegal dependent method type"+
- (if (settings.YdepMethTpes.value)
- ": parameter appears in the type of another parameter in the same section or an earlier one"
- else ""))
- case _ =>
- mapOver(tp)
- }
- this
- }
- }
- for(vps <- vparamSymss) {
- for(p <- vps) checkDependencies(p.info)
- if(settings.YdepMethTpes.value) okParams ++= vps // can only refer to symbols in earlier parameter sections (if the extension is enabled)
+ val tparamSyms = typer.reenterTypeParams(tparams)
+
+ val tparamSkolems = tparams.map(_.symbol)
+
+ /* since the skolemized tparams are in scope, the TypeRefs in types of vparamSymss refer to the type skolems
+ * note that for parameters with missing types, `methodSig` reassigns types of these symbols (the parameter
+ * types from the overridden method).
+ */
+ var vparamSymss = enterValueParams(vparamss)
+
+
+ /**
+ * Creates a method type using tparamSyms and vparamsSymss as argument symbols and `respte` as result type.
+ * All typeRefs to type skolems are replaced by references to the corresponding non-skolem type parameter,
+ * so the resulting type is a valid external method type, it does not contain (references to) skolems.
+ */
+ def thisMethodType(restpe: Type) = {
+ val checkDependencies = new DependentTypeChecker(context)(this)
+ checkDependencies check vparamSymss
+ // DEPMETTODO: check not needed when they become on by default
+ checkDependencies(restpe)
+
+ val makeMethodType = (vparams: List[Symbol], restpe: Type) => {
+ // TODODEPMET: check that we actually don't need to do anything here
+ // new dependent method types: probably OK already, since 'enterValueParams' above
+ // enters them in scope, and all have a lazy type. so they may depend on other params. but: need to
+ // check that params only depend on ones in earlier sections, not the same. (done by checkDependencies,
+ // so re-use / adapt that)
+ if (meth.isJavaDefined)
+ // TODODEPMET necessary?? new dependent types: replace symbols in restpe with the ones in vparams
+ JavaMethodType(vparams map (p => p setInfo objToAny(p.tpe)), restpe)
+ else
+ MethodType(vparams, restpe)
}
- checkDependencies(restpe) // DEPMETTODO: check not needed when they become on by default
- polyType(
+
+ val res = GenPolyType(
tparamSyms, // deSkolemized symbols -- TODO: check that their infos don't refer to method args?
if (vparamSymss.isEmpty) NullaryMethodType(restpe)
// vparamss refer (if they do) to skolemized tparams
- else (vparamSymss :\ restpe) (makeMethodType))
+ else (vparamSymss :\ restpe) (makeMethodType)
+ )
+ res.substSym(tparamSkolems, tparamSyms)
}
- var resultPt = if (tpt.isEmpty) WildcardType else typer.typedType(tpt).tpe
- val site = meth.owner.thisType
-
- def overriddenSymbol = intersectionType(meth.owner.info.parents).nonPrivateMember(meth.name).filter(sym => {
- // luc: added .substSym from skolemized to deSkolemized
- // site.memberType(sym): PolyType(tparams, MethodType(..., ...)) ==> all references to tparams are deSkolemized
- // thisMethodType: tparams in PolyType are deSkolemized, the references in the MethodTypes are skolemized. ==> the two didn't match
- // for instance, B.foo would not override A.foo, and the default on parameter b would not be inherited
- // class A { def foo[T](a: T)(b: T = a) = a }
- // class B extends A { override def foo[U](a: U)(b: U) = b }
- sym != NoSymbol && (site.memberType(sym) matches thisMethodType(resultPt).substSym(tparams map (_.symbol), tparamSyms))
- })
+ /**
+ * Creates a schematic method type which has WildcardTypes for non specified
+ * return or parameter types. For instance, in `def f[T](a: T, b) = ...`, the
+ * type schema is
+ *
+ * PolyType(T, MethodType(List(a: T, b: WildcardType), WildcardType))
+ *
+ * where T are non-skolems.
+ */
+ def methodTypeSchema(resTp: Type) = {
+ // for all params without type set WildcaradType
+ mforeach(vparamss)(v => if (v.tpt.isEmpty) v.symbol setInfo WildcardType)
+ thisMethodType(resTp)
+ }
- // fill in result type and parameter types from overridden symbol if there is a unique one.
- if (meth.owner.isClass && (tpt.isEmpty || vparamss.exists(_.exists(_.tpt.isEmpty)))) {
- // try to complete from matching definition in base type
- for (vparams <- vparamss; vparam <- vparams)
- if (vparam.tpt.isEmpty) vparam.symbol setInfo WildcardType
- val overridden = overriddenSymbol
- if (overridden != NoSymbol && !overridden.isOverloaded) {
+ def overriddenSymbol(resTp: Type) = {
+ intersectionType(methOwner.info.parents).nonPrivateMember(meth.name).filter { sym =>
+ sym != NoSymbol && (site.memberType(sym) matches methodTypeSchema(resTp))
+ }
+ }
+ // TODO: see whether this or something similar would work instead:
+ // def overriddenSymbol = meth.nextOverriddenSymbol
+
+
+ /**
+ * If `meth` doesn't have an explicit return type, extracts the return type from the method
+ * overridden by `meth` (if there's an unique one). This type is lateron used as the expected
+ * type for computing the type of the rhs. The resulting type references type skolems for
+ * type parameters (consistent with the result of `typer.typedType(tpt).tpe`).
+ *
+ * As a first side effect, this method assigns a MethodType constructed using this
+ * return type to `meth`. This allows omitting the result type for recursive methods.
+ *
+ * As another side effect, this method also assigns paramter types from the overridden
+ * method to parameters of `meth` that have missing types (the parser accepts missing
+ * parameter types under -Yinfer-argument-types).
+ */
+ def typesFromOverridden(methResTp: Type): Type = {
+ val overridden = overriddenSymbol(methResTp)
+ if (overridden == NoSymbol || overridden.isOverloaded) {
+ methResTp
+ } else {
overridden.cookJavaRawInfo() // #3404 xform java rawtypes into existentials
- resultPt = site.memberType(overridden) match {
- case PolyType(tparams, rt) => rt.substSym(tparams, tparamSyms)
- case mt => mt
- }
-
+ var overriddenTp = site.memberType(overridden) match {
+ case PolyType(tparams, rt) => rt.substSym(tparams, tparamSkolems)
+ case mt => mt
+ }
for (vparams <- vparamss) {
- var pps = resultPt.params
+ var overriddenParams = overriddenTp.params
for (vparam <- vparams) {
if (vparam.tpt.isEmpty) {
- val paramtpe = pps.head.tpe
- vparam.symbol setInfo paramtpe
- vparam.tpt defineType paramtpe
- vparam.tpt setPos vparam.pos.focus
+ val overriddenParamTp = overriddenParams.head.tpe
+ // references to type parameteres in overriddenParamTp link to the type skolems, so the
+ // assigned type is consistent with the other / existing parameter types in vparamSymss.
+ vparam.symbol setInfo overriddenParamTp
+ vparam.tpt defineType overriddenParamTp setPos vparam.pos.focus
}
- pps = pps.tail
+ overriddenParams = overriddenParams.tail
}
- resultPt = resultPt.resultType
+ overriddenTp = overriddenTp.resultType
}
- resultPt match {
- case NullaryMethodType(rtpe) => resultPt = rtpe
- case MethodType(List(), rtpe) => resultPt = rtpe
+
+ overriddenTp match {
+ case NullaryMethodType(rtpe) => overriddenTp = rtpe
+ case MethodType(List(), rtpe) => overriddenTp = rtpe
case _ =>
}
+
if (tpt.isEmpty) {
- // provisionally assign `meth' a method type with inherited result type
+ // provisionally assign `meth` a method type with inherited result type
// that way, we can leave out the result type even if method is recursive.
- meth setInfo thisMethodType(resultPt)
+ meth setInfo thisMethodType(overriddenTp)
+ overriddenTp
+ } else {
+ methResTp
}
}
}
- // Add a () parameter section if this overrides some method with () parameters.
- if (meth.owner.isClass && vparamss.isEmpty && overriddenSymbol.alternatives.exists(
- _.info.isInstanceOf[MethodType])) {
- vparamSymss = List(List())
+
+ if (tpt.isEmpty && meth.name == nme.CONSTRUCTOR) {
+ tpt defineType context.enclClass.owner.tpe
+ tpt setPos meth.pos.focus
}
- for (vparams <- vparamss; vparam <- vparams if vparam.tpt.isEmpty) {
- context.error(vparam.pos, "missing parameter type")
- vparam.tpt defineType ErrorType
+
+ val methResTp = if (tpt.isEmpty) WildcardType else typer.typedType(tpt).tpe
+ val resTpFromOverride = if (methOwner.isClass && (tpt.isEmpty || mexists(vparamss)(_.tpt.isEmpty))) {
+ typesFromOverridden(methResTp)
+ } else {
+ methResTp
+ }
+
+ // Add a () parameter section if this overrides some method with () parameters
+ if (methOwner.isClass && vparamss.isEmpty &&
+ overriddenSymbol(methResTp).alternatives.exists(_.info.isInstanceOf[MethodType])) {
+ vparamSymss = ListOfNil
}
- addDefaultGetters(meth, vparamss, tparams, overriddenSymbol)
+ // issue an error for missing parameter types
+ mforeach(vparamss) { vparam =>
+ if (vparam.tpt.isEmpty) {
+ MissingParameterOrValTypeError(vparam)
+ vparam.tpt defineType ErrorType
+ }
+ }
+
+ addDefaultGetters(meth, vparamss, tparams, overriddenSymbol(methResTp))
+
+ // fast track macros, i.e. macros defined inside the compiler, are hardcoded
+ // hence we make use of that and let them have whatever right-hand side they need
+ // (either "macro ???" as they used to or just "???" to maximally simplify their compilation)
+ if (fastTrack contains meth) meth setFlag MACRO
+
+ // macro defs need to be typechecked in advance
+ // because @macroImpl annotation only gets assigned during typechecking
+ // otherwise macro defs wouldn't be able to robustly coexist with their clients
+ // because a client could be typechecked before a macro def that it uses
+ if (meth.isTermMacro) {
+ typer.computeMacroDefType(ddef, resTpFromOverride)
+ }
- thisMethodType({
- val rt = if (tpt.isEmpty) {
- // replace deSkolemized symbols with skolemized ones (for resultPt computed by looking at overridden symbol, right?)
- val pt = resultPt.substSym(tparamSyms, tparams map (_.symbol))
- // compute result type from rhs
- tpt defineType widenIfNecessary(meth, typer.computeType(rhs, pt), pt)
- tpt setPos meth.pos.focus
- tpt.tpe
- } else typer.typedType(tpt).tpe
+ val res = thisMethodType({
+ val rt = (
+ if (!tpt.isEmpty) {
+ methResTp
+ } else {
+ // return type is inferred, we don't just use resTpFromOverride. Here, C.f has type String:
+ // trait T { def f: Object }; class C <: T { def f = "" }
+ // using resTpFromOverride as expected type allows for the following (C.f has type A):
+ // trait T { def f: A }; class C <: T { implicit def b2a(t: B): A = ???; def f = new B }
+ assignTypeToTree(ddef, typer, resTpFromOverride)
+ })
// #2382: return type of default getters are always @uncheckedVariance
- if (meth.hasDefaultFlag)
- rt.withAnnotation(AnnotationInfo(definitions.uncheckedVarianceClass.tpe, List(), List()))
+ if (meth.hasDefault)
+ rt.withAnnotation(AnnotationInfo(uncheckedVarianceClass.tpe, List(), List()))
else rt
})
+ pluginsTypeSig(res, typer, ddef, methResTp)
}
/**
@@ -973,72 +1179,84 @@ trait Namers { self: Analyzer =>
* flag.
*/
private def addDefaultGetters(meth: Symbol, vparamss: List[List[ValDef]], tparams: List[TypeDef], overriddenSymbol: => Symbol) {
-
- val isConstr = meth.isConstructor
- val overridden = if (isConstr || !meth.owner.isClass) NoSymbol
- else overriddenSymbol
- val overrides = overridden != NoSymbol && !overridden.isOverloaded
+ val methOwner = meth.owner
+ val isConstr = meth.isConstructor
+ val overridden = if (isConstr || !methOwner.isClass) NoSymbol else overriddenSymbol
+ val overrides = overridden != NoSymbol && !overridden.isOverloaded
// value parameters of the base class (whose defaults might be overridden)
- var baseParamss = overridden.tpe.paramss
+ var baseParamss = (vparamss, overridden.tpe.paramss) match {
// match empty and missing parameter list
- if (vparamss.isEmpty && baseParamss == List(Nil)) baseParamss = Nil
- if (vparamss == List(Nil) && baseParamss.isEmpty) baseParamss = List(Nil)
- assert(!overrides || vparamss.length == baseParamss.length, ""+ meth.fullName + ", "+ overridden.fullName)
+ case (Nil, List(Nil)) => Nil
+ case (List(Nil), Nil) => ListOfNil
+ case (_, paramss) => paramss
+ }
+ assert(
+ !overrides || vparamss.length == baseParamss.length,
+ "" + meth.fullName + ", "+ overridden.fullName
+ )
// cache the namer used for entering the default getter symbols
var ownerNamer: Option[Namer] = None
var moduleNamer: Option[(ClassDef, Namer)] = None
-
var posCounter = 1
- // for each value parameter, create the getter method if it has a default argument. previous
- // denotes the parameter lists which are on the left side of the current one. these get added
- // to the default getter. Example: "def foo(a: Int)(b: Int = a)" gives "foo$default$1(a: Int) = a"
- (List[List[ValDef]]() /: (vparamss))((previous: List[List[ValDef]], vparams: List[ValDef]) => {
+ // For each value parameter, create the getter method if it has a
+ // default argument. previous denotes the parameter lists which
+ // are on the left side of the current one. These get added to the
+ // default getter. Example:
+ //
+ // def foo(a: Int)(b: Int = a) becomes
+ // foo$default$1(a: Int) = a
+ //
+ vparamss.foldLeft(Nil: List[List[ValDef]]) { (previous, vparams) =>
assert(!overrides || vparams.length == baseParamss.head.length, ""+ meth.fullName + ", "+ overridden.fullName)
var baseParams = if (overrides) baseParamss.head else Nil
for (vparam <- vparams) {
val sym = vparam.symbol
// true if the corresponding parameter of the base class has a default argument
- val baseHasDefault = overrides && baseParams.head.hasDefaultFlag
- if (sym.hasDefaultFlag) {
+ val baseHasDefault = overrides && baseParams.head.hasDefault
+ if (sym.hasDefault) {
// generate a default getter for that argument
val oflag = if (baseHasDefault) OVERRIDE else 0
val name = nme.defaultGetterName(meth.name, posCounter)
// Create trees for the defaultGetter. Uses tools from Unapplies.scala
var deftParams = tparams map copyUntyped[TypeDef]
- val defvParamss = previous map (_.map(p => {
+ val defvParamss = mmap(previous) { p =>
// in the default getter, remove the default parameter
val p1 = atPos(p.pos.focus) { ValDef(p.mods &~ DEFAULTPARAM, p.name, p.tpt.duplicate, EmptyTree) }
UnTyper.traverse(p1)
p1
- }))
+ }
val parentNamer = if (isConstr) {
val (cdef, nmr) = moduleNamer.getOrElse {
- val module = companionModuleOf(meth.owner, context)
+ val module = companionSymbolOf(methOwner, context)
module.initialize // call type completer (typedTemplate), adds the
// module's templateNamer to classAndNamerOfModule
- classAndNamerOfModule get module match {
- case s @ Some((cdef, nmr)) if nmr != null =>
- moduleNamer = s
- (cdef, nmr)
+ module.attachments.get[ConstructorDefaultsAttachment] match {
+ // by martin: the null case can happen in IDE; this is really an ugly hack on top of an ugly hack but it seems to work
+ case Some(cda) =>
+ if (cda.companionModuleClassNamer == null) {
+ debugwarn(s"SI-6576 The companion module namer for $meth was unexpectedly null")
+ return
+ }
+ val p = (cda.classWithDefault, cda.companionModuleClassNamer)
+ moduleNamer = Some(p)
+ p
case _ =>
return // fix #3649 (prevent crash in erroneous source code)
- // nmr == null can happen in IDE; this is really an ugly hack on top[ of an ugly hack but it seems to work
}
}
deftParams = cdef.tparams map copyUntypedInvariant
nmr
- } else {
- ownerNamer.getOrElse {
- val ctx = context.nextEnclosing(c => c.scope.toList.contains(meth))
- assert(ctx != NoContext)
- val nmr = newNamer(ctx)
- ownerNamer = Some(nmr)
- nmr
- }
+ }
+ else ownerNamer getOrElse {
+ val ctx = context.nextEnclosing(c => c.scope.toList.contains(meth))
+ assert(ctx != NoContext, meth)
+ val nmr = newNamer(ctx)
+ ownerNamer = Some(nmr)
+ nmr
}
// If the parameter type mentions any type parameter of the method, let the compiler infer the
@@ -1047,19 +1265,7 @@ trait Namers { self: Analyzer =>
// def f(i: Int, m: Int => Int = identity _) = m(i)
// if we use Wildcard as expected, we get "Nothing => Nothing", and the default is not usable.
val names = deftParams map { case TypeDef(_, name, _, _) => name }
- object subst extends Transformer {
- override def transform(tree: Tree): Tree = tree match {
- case Ident(name) if (names contains name) =>
- TypeTree()
- case _ =>
- super.transform(tree)
- }
- def apply(tree: Tree) = {
- val r = transform(tree)
- if (r.exists(_.isEmpty)) TypeTree()
- else r
- }
- }
+ val subst = new TypeTreeSubstituter(names contains _)
val defTpt = subst(copyUntyped(vparam.tpt match {
// default getter for by-name params
@@ -1070,57 +1276,107 @@ trait Namers { self: Analyzer =>
val defaultTree = atPos(vparam.pos.focus) {
DefDef(
- Modifiers(meth.flags & (PRIVATE | PROTECTED | FINAL)) | SYNTHETIC | DEFAULTPARAM | oflag,
+ Modifiers(meth.flags & DefaultGetterFlags) | SYNTHETIC | DEFAULTPARAM | oflag,
name, deftParams, defvParamss, defTpt, defRhs)
}
if (!isConstr)
- meth.owner.resetFlag(INTERFACE) // there's a concrete member now
+ methOwner.resetFlag(INTERFACE) // there's a concrete member now
val default = parentNamer.enterSyntheticSym(defaultTree)
if (forInteractive && default.owner.isTerm) {
- // enter into map from method symbols to default arguments.
- // if compiling the same local block several times (which can happen in interactive mode)
- // we might otherwise not find the default symbol, because the second time it the
- // method symbol will be re-entered in the scope but the default parameter will not.
- defaultParametersOfMethod(meth) += default
+ // save the default getters as attachments in the method symbol. if compiling the
+ // same local block several times (which can happen in interactive mode) we might
+ // otherwise not find the default symbol, because the second time it the method
+ // symbol will be re-entered in the scope but the default parameter will not.
+ val att = meth.attachments.get[DefaultsOfLocalMethodAttachment] match {
+ case Some(att) => att.defaultGetters += default
+ case None => meth.updateAttachment(new DefaultsOfLocalMethodAttachment(default))
+ }
}
} else if (baseHasDefault) {
- // the parameter does not have a default itself, but the corresponding parameter
- // in the base class does.
+ // the parameter does not have a default itself, but the
+ // corresponding parameter in the base class does.
sym.setFlag(DEFAULTPARAM)
}
posCounter += 1
if (overrides) baseParams = baseParams.tail
}
if (overrides) baseParamss = baseParamss.tail
- previous ::: List(vparams)
- })
+ previous :+ vparams
+ }
}
- //@M! an abstract type definition (abstract type member/type parameter) may take type parameters, which are in scope in its bounds
- private def typeDefSig(tpsym: Symbol, tparams: List[TypeDef], rhs: Tree) = {
+ private def valDefSig(vdef: ValDef) = {
+ val ValDef(_, _, tpt, rhs) = vdef
+ val result = if (tpt.isEmpty) {
+ if (rhs.isEmpty) {
+ MissingParameterOrValTypeError(tpt)
+ ErrorType
+ }
+ else assignTypeToTree(vdef, typer, WildcardType)
+ } else {
+ typer.typedType(tpt).tpe
+ }
+ pluginsTypeSig(result, typer, vdef, if (tpt.isEmpty) WildcardType else result)
+
+ }
+
+ //@M! an abstract type definition (abstract type member/type parameter)
+ // may take type parameters, which are in scope in its bounds
+ private def typeDefSig(tdef: TypeDef) = {
+ val TypeDef(_, _, tparams, rhs) = tdef
+ // log("typeDefSig(" + tpsym + ", " + tparams + ")")
val tparamSyms = typer.reenterTypeParams(tparams) //@M make tparams available in scope (just for this abstypedef)
val tp = typer.typedType(rhs).tpe match {
case TypeBounds(lt, rt) if (lt.isError || rt.isError) =>
TypeBounds.empty
- case tp @ TypeBounds(lt, rt) if (tpsym hasFlag JAVA) =>
+ case tp @ TypeBounds(lt, rt) if (tdef.symbol hasFlag JAVA) =>
TypeBounds(lt, objToAny(rt))
case tp =>
tp
}
-
// see neg/bug1275, #3419
// used to do a rudimentary kind check here to ensure overriding in refinements
- // doesn't change a type member's arity (number of type parameters),
- // e.g. trait T { type X[A] }; type S = T{type X}; val x: S
- // X in x.X[A] will get rebound to the X in the refinement, which does not take any type parameters
- // this mismatch does not crash the compiler (anymore), but leads to weird type errors,
- // as x.X[A] will become NoType internally
- // it's not obvious the errror refers to the X in the refinement and not the original X
- // however, separate compilation requires the symbol info to be loaded to do this check,
- // but loading the info will probably lead to spurious cyclic errors --> omit the check
- polyType(tparamSyms, tp)
+ // doesn't change a type member's arity (number of type parameters), e.g.
+ //
+ // trait T { type X[A] }
+ // type S = T { type X }
+ // val x: S
+ //
+ // X in x.X[A] will get rebound to the X in the refinement, which
+ // does not take any type parameters. This mismatch does not crash
+ // the compiler (anymore), but leads to weird type errors, as
+ // x.X[A] will become NoType internally. It's not obvious the
+ // error refers to the X in the refinement and not the original X.
+ //
+ // However, separate compilation requires the symbol info to be
+ // loaded to do this check, but loading the info will probably
+ // lead to spurious cyclic errors. So omit the check.
+ val res = GenPolyType(tparamSyms, tp)
+ pluginsTypeSig(res, typer, tdef, WildcardType)
+ }
+
+ private def importSig(imp: Import) = {
+ val Import(expr, selectors) = imp
+ val expr1 = typer.typedQualifier(expr)
+ typer checkStable expr1
+ if (expr1.symbol != null && expr1.symbol.isRootPackage)
+ RootImportError(imp)
+
+ if (expr1.isErrorTyped)
+ ErrorType
+ else {
+ val newImport = treeCopy.Import(imp, expr1, selectors).asInstanceOf[Import]
+ checkSelectors(newImport)
+ transformed(imp) = newImport
+ // copy symbol and type attributes back into old expression
+ // so that the structure builder will find it.
+ expr.symbol = expr1.symbol
+ expr.tpe = expr1.tpe
+ ImportType(expr1)
+ }
}
+
/** Given a case class
* case class C[Ts] (ps: Us)
* Add the following methods to toScope:
@@ -1141,167 +1397,123 @@ trait Namers { self: Analyzer =>
}
def addCopyMethod(cdef: ClassDef, namer: Namer) {
- caseClassCopyMeth(cdef) foreach (namer.enterSyntheticSym(_))
+ caseClassCopyMeth(cdef) foreach namer.enterSyntheticSym
}
-
+ /**
+ * TypeSig is invoked by monoTypeCompleters. It returns the type of a definition which
+ * is then assigned to the corresponding symbol (typeSig itself does not need to assign
+ * the type to the symbol, but it can if necessary).
+ */
def typeSig(tree: Tree): Type = {
-
+ // log("typeSig " + tree)
/** For definitions, transform Annotation trees to AnnotationInfos, assign
* them to the sym's annotations. Type annotations: see Typer.typedAnnotated
* We have to parse definition annotations here (not in the typer when traversing
* the MemberDef tree): the typer looks at annotations of certain symbols; if
- * they were added only in typer, depending on the compilation order, they would
- * be visible or not
+ * they were added only in typer, depending on the compilation order, they may
+ * or may not be visible.
*/
def annotate(annotated: Symbol) = {
// typeSig might be called multiple times, e.g. on a ValDef: val, getter, setter
// parse the annotations only once.
if (!annotated.isInitialized) tree match {
case defn: MemberDef =>
- val ainfos = defn.mods.annotations filter { _ != null } map { ann =>
- // need to be lazy, #1782
- LazyAnnotationInfo(() => typer.typedAnnotation(ann))
+ val ainfos = defn.mods.annotations filterNot (_ eq null) map { ann =>
+ val ctx = typer.context
+ val annCtx = ctx.make(ann)
+ annCtx.setReportErrors()
+ // need to be lazy, #1782. beforeTyper to allow inferView in annotation args, SI-5892.
+ AnnotationInfo lazily {
+ beforeTyper(newTyper(annCtx) typedAnnotation ann)
+ }
+ }
+ if (ainfos.nonEmpty) {
+ annotated setAnnotations ainfos
+ if (annotated.isTypeSkolem)
+ annotated.deSkolemize setAnnotations ainfos
}
- if (!ainfos.isEmpty)
- annotated.setAnnotations(ainfos)
- if (annotated.isTypeSkolem)
- annotated.deSkolemize.setAnnotations(ainfos)
case _ =>
}
}
val sym: Symbol = tree.symbol
- // @Lukas: I am not sure this is the right way to do things.
- // We used to only decorate the module class with annotations, which is
- // clearly wrong. Now we decorate both the class and the object.
- // But maybe some annotations are only meant for one of these but not for the other?
+ // TODO: meta-annotations to indicate where module annotations should go (module vs moduleClass)
annotate(sym)
if (sym.isModule) annotate(sym.moduleClass)
- val result =
- try {
- tree match {
- case ClassDef(_, _, tparams, impl) =>
- newNamer(context.makeNewScope(tree, sym)).classSig(tparams, impl)
-
- case ModuleDef(_, _, impl) =>
- val clazz = sym.moduleClass
- clazz.setInfo(newNamer(context.makeNewScope(tree, clazz)).templateSig(impl))
- //clazz.typeOfThis = singleType(sym.owner.thisType, sym);
- clazz.tpe
-
- case DefDef(mods, _, tparams, vparamss, tpt, rhs) =>
- newNamer(context.makeNewScope(tree, sym)).methodSig(mods, tparams, vparamss, tpt, rhs)
-
- case vdef @ ValDef(mods, name, tpt, rhs) =>
- val typer1 = typer.constrTyperIf(sym.hasFlag(PARAM | PRESUPER) && !mods.isJavaDefined && sym.owner.isConstructor)
- if (tpt.isEmpty) {
- if (rhs.isEmpty) {
- context.error(tpt.pos, "missing parameter type");
- ErrorType
- } else {
- tpt defineType widenIfNecessary(
- sym,
- newTyper(typer1.context.make(vdef, sym)).computeType(rhs, WildcardType),
- WildcardType)
- tpt setPos vdef.pos.focus
- tpt.tpe
- }
- } else typer1.typedType(tpt).tpe
-
- case TypeDef(_, _, tparams, rhs) =>
- newNamer(context.makeNewScope(tree, sym)).typeDefSig(sym, tparams, rhs) //@M!
-
- case Import(expr, selectors) =>
- val expr1 = typer.typedQualifier(expr)
- val base = expr1.tpe
- typer.checkStable(expr1)
- if ((expr1.symbol ne null) && expr1.symbol.isRootPackage) context.error(tree.pos, "_root_ cannot be imported")
- def checkNotRedundant(pos: Position, from: Name, to: Name): Boolean = {
- if (!tree.symbol.isSynthetic &&
- !((expr1.symbol ne null) && expr1.symbol.isInterpreterWrapper) &&
- base.member(from) != NoSymbol) {
- val e = context.scope.lookupEntry(to)
- def warnRedundant(sym: Symbol) =
- context.unit.warning(pos, "imported `"+to+
- "' is permanently hidden by definition of "+sym+
- sym.locationString)
- if ((e ne null) && e.owner == context.scope && e.sym.exists) {
- warnRedundant(e.sym); return false
- } else if (context eq context.enclClass) {
- val defSym = context.prefix.member(to) filter (
- sym => sym.exists && context.isAccessible(sym, context.prefix, false))
- if (defSym != NoSymbol) { warnRedundant(defSym); return false }
- }
- }
- true
- }
+ def getSig = tree match {
+ case cdef: ClassDef =>
+ createNamer(tree).classSig(cdef)
- def isValidSelector(from: Name)(fun : => Unit) {
- if (from.bothNames forall (x => (base nonLocalMember x) == NoSymbol))
- fun
- }
+ case mdef: ModuleDef =>
+ createNamer(tree).moduleSig(mdef)
- def checkSelectors(selectors: List[ImportSelector]): Unit = selectors match {
- case ImportSelector(from, _, to, _) :: rest =>
- if (from != nme.WILDCARD && base != ErrorType) {
- isValidSelector(from) {
- if (currentRun.compileSourceFor(expr, from)) {
- // XXX This used to be "return typeSig(tree)" but since this method
- // returns Unit, that is deceptive at best. Just in case it is side-effecting
- // somehow, I left the call in before the return; if you know it is
- // not side effecting, please delete the call.
- typeSig(tree)
- return
- }
-
- def notMember = context.error(tree.pos, from.decode + " is not a member of " + expr)
- // for Java code importing Scala objects
- if (from endsWith nme.raw.DOLLAR)
- isValidSelector(from stripEnd "$")(notMember)
- else
- notMember
- }
+ case ddef: DefDef =>
+ createNamer(tree).methodSig(ddef)
- if (checkNotRedundant(tree.pos, from, to))
- checkNotRedundant(tree.pos, from.toTypeName, to.toTypeName)
- }
- if (from != nme.WILDCARD && (rest.exists (sel => sel.name == from)))
- context.error(tree.pos, from.decode + " is renamed twice")
- if ((to ne null) && to != nme.WILDCARD && (rest exists (sel => sel.rename == to)))
- context.error(tree.pos, to.decode + " appears twice as a target of a renaming")
- checkSelectors(rest)
- case Nil =>
- }
- checkSelectors(selectors)
- transformed(tree) = treeCopy.Import(tree, expr1, selectors)
- ImportType(expr1)
- }
- } catch {
- case ex: TypeError =>
- //Console.println("caught " + ex + " in typeSig")
- typer.reportTypeError(tree.pos, ex)
- ErrorType
- }
- result match {
- case PolyType(tparams @ (tp :: _), _) if tp.owner.isTerm =>
- new DeSkolemizeMap(tparams) mapOver result
- case _ =>
- result
+ case vdef: ValDef =>
+ createNamer(tree).valDefSig(vdef)
+
+ case tdef: TypeDef =>
+ createNamer(tree).typeDefSig(tdef) //@M!
+
+ case imp: Import =>
+ importSig(imp)
+ }
+
+ try getSig
+ catch typeErrorHandler(tree, ErrorType)
+ }
+
+ def includeParent(tpe: Type, parent: Symbol): Type = tpe match {
+ case PolyType(tparams, restpe) =>
+ PolyType(tparams, includeParent(restpe, parent))
+ case ClassInfoType(parents, decls, clazz) =>
+ if (parents exists (_.typeSymbol == parent)) tpe
+ else ClassInfoType(parents :+ parent.tpe, decls, clazz)
+ case _ =>
+ tpe
+ }
+
+ def ensureParent(clazz: Symbol, parent: Symbol) = {
+ val info0 = clazz.info
+ val info1 = includeParent(info0, parent)
+ if (info0 ne info1) clazz setInfo info1
+ }
+
+ class LogTransitions[S](onEnter: S => String, onExit: S => String) {
+ val enabled = settings.debug.value
+ @inline final def apply[T](entity: S)(body: => T): T = {
+ if (enabled) log(onEnter(entity))
+ try body
+ finally if (enabled) log(onExit(entity))
}
}
+ private val logDefinition = new LogTransitions[Symbol](
+ sym => "[define] >> " + sym.flagString + " " + sym.fullLocationString,
+ sym => "[define] << " + sym
+ )
+ private def logAndValidate(sym: Symbol)(body: => Unit) {
+ logDefinition(sym)(body)
+ validate(sym)
+ }
/** Convert Java generic array type T[] to (T with Object)[]
* (this is necessary because such arrays have a representation which is incompatible
* with arrays of primitive types.)
+ *
+ * @note the comparison to Object only works for abstract types bounded by classes that are strict subclasses of Object
+ * if the bound is exactly Object, it will have been converted to Any, and the comparison will fail
+ *
+ * see also sigToType
*/
private object RestrictJavaArraysMap extends TypeMap {
def apply(tp: Type): Type = tp match {
case TypeRef(pre, ArrayClass, List(elemtp))
- if elemtp.typeSymbol.isAbstractType && !(elemtp <:< definitions.ObjectClass.tpe) =>
- TypeRef(pre, ArrayClass, List(intersectionType(List(elemtp, definitions.ObjectClass.tpe))))
+ if elemtp.typeSymbol.isAbstractType && !(elemtp <:< ObjectClass.tpe) =>
+ TypeRef(pre, ArrayClass, List(intersectionType(List(elemtp, ObjectClass.tpe))))
case _ =>
mapOver(tp)
}
@@ -1309,63 +1521,84 @@ trait Namers { self: Analyzer =>
/** Check that symbol's definition is well-formed. This means:
* - no conflicting modifiers
- * - `abstract' modifier only for classes
- * - `override' modifier never for classes
- * - `def' modifier never for parameters of case classes
+ * - `abstract` modifier only for classes
+ * - `override` modifier never for classes
+ * - `def` modifier never for parameters of case classes
* - declarations only in mixins or abstract classes (when not @native)
*/
def validate(sym: Symbol) {
+ import SymValidateErrors._
+ def fail(kind: SymValidateErrors.Value) = SymbolValidationError(sym, kind)
+
+ def checkWithDeferred(flag: Int) {
+ if (sym hasFlag flag)
+ AbstractMemberWithModiferError(sym, flag)
+ }
def checkNoConflict(flag1: Int, flag2: Int) {
- if (sym.hasFlag(flag1) && sym.hasFlag(flag2))
- context.error(sym.pos,
- if (flag1 == DEFERRED)
- "abstract member may not have " + Flags.flagsToString(flag2) + " modifier";
- else
- "illegal combination of modifiers: " +
- Flags.flagsToString(flag1) + " and " + Flags.flagsToString(flag2) +
- " for: " + sym);
- }
-
- if (sym.hasFlag(IMPLICIT) && !sym.isTerm)
- context.error(sym.pos, "`implicit' modifier can be used only for values, variables and methods")
- if (sym.hasFlag(IMPLICIT) && sym.owner.isPackageClass)
- context.error(sym.pos, "`implicit' modifier cannot be used for top-level objects")
- if (sym.hasFlag(SEALED) && !sym.isClass)
- context.error(sym.pos, "`sealed' modifier can be used only for classes")
- if (sym.hasFlag(ABSTRACT) && !sym.isClass)
- context.error(sym.pos, "`abstract' modifier can be used only for classes; " +
- "\nit should be omitted for abstract members")
- if (sym.hasFlag(OVERRIDE | ABSOVERRIDE) && !sym.hasFlag(TRAIT) && sym.isClass)
- context.error(sym.pos, "`override' modifier not allowed for classes")
- if (sym.hasFlag(OVERRIDE | ABSOVERRIDE) && sym.isConstructor)
- context.error(sym.pos, "`override' modifier not allowed for constructors")
- if (sym.hasFlag(ABSOVERRIDE) && !sym.owner.isTrait)
- context.error(sym.pos, "`abstract override' modifier only allowed for members of traits")
+ if (sym hasAllFlags flag1 | flag2)
+ IllegalModifierCombination(sym, flag1, flag2)
+ }
+ if (sym.isImplicit) {
+ if (sym.isConstructor)
+ fail(ImplicitConstr)
+ if (!(sym.isTerm || (sym.isClass && !sym.isTrait)))
+ fail(ImplicitNotTermOrClass)
+ if (sym.owner.isPackageClass)
+ fail(ImplicitAtToplevel)
+ }
+ if (sym.isClass) {
+ checkNoConflict(IMPLICIT, CASE)
+ if (sym.isAnyOverride && !sym.hasFlag(TRAIT))
+ fail(OverrideClass)
+ } else {
+ if (sym.isSealed)
+ fail(SealedNonClass)
+ if (sym.hasFlag(ABSTRACT))
+ fail(AbstractNonClass)
+ }
+
+ if (sym.isConstructor && sym.isAnyOverride)
+ fail(OverrideConstr)
+ if (sym.isAbstractOverride) {
+ if (!sym.owner.isTrait)
+ fail(AbstractOverride)
+ if(sym.isType)
+ fail(AbstractOverrideOnTypeMember)
+ }
if (sym.isLazy && sym.hasFlag(PRESUPER))
- context.error(sym.pos, "`lazy' definitions may not be initialized early")
- if (sym.info.typeSymbol == FunctionClass(0) &&
- sym.isValueParameter && sym.owner.isCaseClass)
- context.error(sym.pos, "pass-by-name arguments not allowed for case class parameters")
- if (sym hasFlag DEFERRED) { // virtual classes count, too
- if (sym.hasAnnotation(definitions.NativeAttr))
- sym.resetFlag(DEFERRED)
- else if (!sym.isValueParameter && !sym.isTypeParameterOrSkolem &&
- !context.tree.isInstanceOf[ExistentialTypeTree] &&
- (!sym.owner.isClass || sym.owner.isModuleClass || sym.owner.isAnonymousClass)) {
- context.error(sym.pos,
- "only classes can have declared but undefined members" + varNotice(sym))
- sym.resetFlag(DEFERRED)
- }
+ fail(LazyAndEarlyInit)
+ if (sym.info.typeSymbol == FunctionClass(0) && sym.isValueParameter && sym.owner.isCaseClass)
+ fail(ByNameParameter)
+ if (sym.isTrait && sym.isFinal && !sym.isSubClass(AnyValClass))
+ checkNoConflict(ABSTRACT, FINAL)
+
+ if (sym.isDeferred) {
+ // Is this symbol type always allowed the deferred flag?
+ def symbolAllowsDeferred = (
+ sym.isValueParameter
+ || sym.isTypeParameterOrSkolem
+ || context.tree.isInstanceOf[ExistentialTypeTree]
+ )
+ // Does the symbol owner require no undefined members?
+ def ownerRequiresConcrete = (
+ !sym.owner.isClass
+ || sym.owner.isModuleClass
+ || sym.owner.isAnonymousClass
+ )
+ if (sym hasAnnotation NativeAttr)
+ sym resetFlag DEFERRED
+ else if (!symbolAllowsDeferred && ownerRequiresConcrete)
+ fail(AbstractVar)
+
+ checkWithDeferred(PRIVATE)
+ checkWithDeferred(FINAL)
}
- checkNoConflict(DEFERRED, PRIVATE)
checkNoConflict(FINAL, SEALED)
checkNoConflict(PRIVATE, PROTECTED)
// checkNoConflict(PRIVATE, OVERRIDE) // this one leads to bad error messages like #4174, so catch in refchecks
// checkNoConflict(PRIVATE, FINAL) // can't do this because FINAL also means compile-time constant
- checkNoConflict(ABSTRACT, FINAL)
- checkNoConflict(DEFERRED, FINAL)
-
+ // checkNoConflict(ABSTRACT, FINAL) // this one gives a bad error for non- at inline classes which extend AnyVal
// @PP: I added this as a sanity check because these flags are supposed to be
// converted to ABSOVERRIDE before arriving here.
checkNoConflict(ABSTRACT, OVERRIDE)
@@ -1376,90 +1609,107 @@ trait Namers { self: Analyzer =>
val tree: Tree
}
- var lockedCount = 0
-
- def mkTypeCompleter(t: Tree)(c: Symbol => Unit) = new TypeCompleter {
+ def mkTypeCompleter(t: Tree)(c: Symbol => Unit) = new LockingTypeCompleter {
val tree = t
- override def complete(sym: Symbol) = try {
- lockedCount += 1
- c(sym)
- } finally {
- lockedCount -= 1
- }
+ def completeImpl(sym: Symbol) = c(sym)
}
- /** A class representing a lazy type with known type parameters.
- */
- class PolyTypeCompleter(tparams: List[Tree], restp: TypeCompleter, owner: Tree, ownerSym: Symbol, ctx: Context) extends TypeCompleter {
- override val typeParams: List[Symbol]= tparams map (_.symbol) //@M
- override val tree = restp.tree
- override def complete(sym: Symbol) = try {
- lockedCount += 1
- if(ownerSym.isAbstractType) //@M an abstract type's type parameters are entered -- TODO: change to isTypeMember ?
- newNamer(ctx.makeNewScope(owner, ownerSym)).enterSyms(tparams) //@M
- restp.complete(sym)
- } finally {
- lockedCount -= 1
+ trait LockingTypeCompleter extends TypeCompleter {
+ def completeImpl(sym: Symbol): Unit
+
+ override def complete(sym: Symbol) = {
+ _lockedCount += 1
+ try completeImpl(sym)
+ finally _lockedCount -= 1
}
}
- /** The symbol that which this accessor represents (possibly in part).
- * This is used for error messages, where we want to speak in terms
- * of the actual declaration or definition, not in terms of the generated setters
- * and getters */
- def underlying(member: Symbol): Symbol =
- if (member.hasAccessorFlag) {
- if (member.isDeferred) {
- val getter = if (member.isSetter) member.getter(member.owner) else member
- val result = getter.owner.newValue(getter.pos, getter.name.toTermName)
- .setInfo(getter.tpe.resultType)
- .setFlag(DEFERRED)
- if (getter.setter(member.owner) != NoSymbol) result.setFlag(MUTABLE)
- result
- } else member.accessed
- } else member
-
/**
- * Finds the companion module of a class symbol. Calling .companionModule
- * does not work for classes defined inside methods.
+ * A class representing a lazy type with known type parameters. `ctx` is the namer context in which the
+ * `owner` is defined.
+ *
+ * Constructing a PolyTypeCompleter for a DefDef creates type skolems for the type parameters and
+ * assigns them to the `tparams` trees.
*/
- def companionModuleOf(clazz: Symbol, context: Context): Symbol = {
- try {
- var res = clazz.companionModule
- if (res == NoSymbol)
- res = context.lookup(clazz.name.toTermName, clazz.owner).suchThat(sym =>
- sym.hasModuleFlag && sym.isCoDefinedWith(clazz))
- res
- } catch {
- case e: InvalidCompanions =>
- context.error(clazz.pos, e.getMessage)
- NoSymbol
+ class PolyTypeCompleter(tparams: List[TypeDef], restp: TypeCompleter, ctx: Context) extends LockingTypeCompleter with FlagAgnosticCompleter {
+ // @M. If `owner` is an abstract type member, `typeParams` are all NoSymbol (see comment in `completerOf`),
+ // otherwise, the non-skolemized (external) type parameter symbols
+ override val typeParams = tparams map (_.symbol)
+
+ /* The definition tree (poly ClassDef, poly DefDef or HK TypeDef) */
+ override val tree = restp.tree
+
+ private val defnSym = tree.symbol
+
+ if (defnSym.isTerm) {
+ // for polymorphic DefDefs, create type skolems and assign them to the tparam trees.
+ val skolems = deriveFreshSkolems(tparams map (_.symbol))
+ map2(tparams, skolems)(_ setSymbol _)
+ }
+
+ def completeImpl(sym: Symbol) = {
+ // @M an abstract type's type parameters are entered.
+ // TODO: change to isTypeMember ?
+ if (defnSym.isAbstractType)
+ newNamerFor(ctx, tree) enterSyms tparams //@M
+ restp complete sym
}
}
- def companionClassOf(module: Symbol, context: Context): Symbol = {
- try {
- var res = module.companionClass
- if (res == NoSymbol)
- res = context.lookup(module.name.toTypeName, module.owner).suchThat(_.isCoDefinedWith(module))
- res
- } catch {
- case e: InvalidCompanions =>
- context.error(module.pos, e.getMessage)
- NoSymbol
+ // Can we relax these restrictions? For motivation, see
+ // test/files/pos/depmet_implicit_oopsla_session_2.scala
+ // neg/depmet_try_implicit.scala
+ //
+ // We should allow forward references since type selections on
+ // implicit args are like type parameters.
+ // def foo[T](a: T, x: w.T2)(implicit w: ComputeT2[T])
+ // is more compact than:
+ // def foo[T, T2](a: T, x: T2)(implicit w: ComputeT2[T, T2])
+ // moreover, the latter is not an encoding of the former, which hides type
+ // inference of T2, so you can specify T while T2 is purely computed
+ private class DependentTypeChecker(ctx: Context)(namer: Namer) extends TypeTraverser {
+ private[this] val okParams = mutable.Set[Symbol]()
+ private[this] val method = ctx.owner
+
+ def traverse(tp: Type) = tp match {
+ case SingleType(_, sym) =>
+ if (sym.owner == method && sym.isValueParameter && !okParams(sym))
+ namer.NamerErrorGen.IllegalDependentMethTpeError(sym)(ctx)
+
+ case _ => mapOver(tp)
+ }
+ def check(vparamss: List[List[Symbol]]) {
+ for (vps <- vparamss) {
+ for (p <- vps)
+ this(p.info)
+ // can only refer to symbols in earlier parameter sections
+ // (if the extension is enabled)
+ okParams ++= vps
+ }
}
}
- def companionSymbolOf(sym: Symbol, context: Context) =
- if (sym.isTerm) companionClassOf(sym, context)
- else if (sym.isClass) companionModuleOf(sym, context)
- else NoSymbol
-
- /** An explanatory note to be added to error messages
- * when there's a problem with abstract var defs */
- def varNotice(sym: Symbol): String =
- if (underlying(sym).isVariable)
- "\n(Note that variables need to be initialized to be defined)"
- else ""
+ @deprecated("Use underlyingSymbol instead", "2.10.0")
+ def underlying(member: Symbol): Symbol = underlyingSymbol(member)
+ @deprecated("Use `companionSymbolOf` instead", "2.10.0")
+ def companionClassOf(module: Symbol, ctx: Context): Symbol = companionSymbolOf(module, ctx)
+ @deprecated("Use `companionSymbolOf` instead", "2.10.0")
+ def companionModuleOf(clazz: Symbol, ctx: Context): Symbol = companionSymbolOf(clazz, ctx)
+
+ /** The companion class or companion module of `original`.
+ * Calling .companionModule does not work for classes defined inside methods.
+ *
+ * !!! Then why don't we fix companionModule? Does the presence of these
+ * methods imply all the places in the compiler calling sym.companionModule are
+ * bugs waiting to be reported? If not, why not? When exactly do we need to
+ * call this method?
+ */
+ def companionSymbolOf(original: Symbol, ctx: Context): Symbol = {
+ original.companionSymbol orElse {
+ ctx.lookup(original.name.companionName, original.owner).suchThat(sym =>
+ (original.isTerm || sym.hasModuleFlag) &&
+ (sym isCoDefinedWith original)
+ )
+ }
+ }
}
-
diff --git a/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala b/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala
index e8c78db..70f2f41 100644
--- a/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala
@@ -1,5 +1,5 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
+ * Copyright 2005-2013 LAMP/EPFL
* @author Martin Odersky
*/
@@ -7,9 +7,9 @@ package scala.tools.nsc
package typechecker
import symtab.Flags._
-
-import scala.collection.mutable.{ListBuffer, WeakHashMap}
-import scala.collection.immutable.Set
+import scala.collection.mutable
+import scala.ref.WeakReference
+import scala.reflect.ClassTag
/**
* @author Lukas Rytz
@@ -19,13 +19,29 @@ trait NamesDefaults { self: Analyzer =>
import global._
import definitions._
-
- val defaultParametersOfMethod = new WeakHashMap[Symbol, Set[Symbol]] {
- override def default(key: Symbol) = Set()
+ import NamesDefaultsErrorsGen._
+
+ // Default getters of constructors are added to the companion object in the
+ // typeCompleter of the constructor (methodSig). To compute the signature,
+ // we need the ClassDef. To create and enter the symbols into the companion
+ // object, we need the templateNamer of that module class. These two are stored
+ // as an attachment in the companion module symbol
+ class ConstructorDefaultsAttachment(val classWithDefault: ClassDef, var companionModuleClassNamer: Namer)
+
+ // To attach the default getters of local (term-owned) methods to the method symbol.
+ // Used in Namer.enterExistingSym: it needs to re-enter the method symbol and also
+ // default getters, which could not be found otherwise.
+ class DefaultsOfLocalMethodAttachment(val defaultGetters: mutable.Set[Symbol]) {
+ def this(default: Symbol) = this(mutable.Set(default))
}
- case class NamedApplyInfo(qual: Option[Tree], targs: List[Tree],
- vargss: List[List[Tree]], blockTyper: Typer)
+ case class NamedApplyInfo(
+ qual: Option[Tree],
+ targs: List[Tree],
+ vargss: List[List[Tree]],
+ blockTyper: Typer
+ ) { }
+
val noApplyInfo = NamedApplyInfo(None, Nil, Nil, null)
def nameOf(arg: Tree) = arg match {
@@ -34,24 +50,20 @@ trait NamesDefaults { self: Analyzer =>
}
def isNamed(arg: Tree) = nameOf(arg).isDefined
- /** @param pos maps indicies from old to new */
- def reorderArgs[T: ClassManifest](args: List[T], pos: Int => Int): List[T] = {
+ /** @param pos maps indices from old to new */
+ def reorderArgs[T: ClassTag](args: List[T], pos: Int => Int): List[T] = {
val res = new Array[T](args.length)
- // (hopefully) faster than zipWithIndex
- (0 /: args) { case (index, arg) => res(pos(index)) = arg; index + 1 }
+ foreachWithIndex(args)((arg, index) => res(pos(index)) = arg)
res.toList
}
- /** @param pos maps indicies from new to old (!) */
- def reorderArgsInv[T: ClassManifest](args: List[T], pos: Int => Int): List[T] = {
+ /** @param pos maps indices from new to old (!) */
+ def reorderArgsInv[T: ClassTag](args: List[T], pos: Int => Int): List[T] = {
val argsArray = args.toArray
- val res = new ListBuffer[T]
- for (i <- 0 until argsArray.length)
- res += argsArray(pos(i))
- res.toList
+ (argsArray.indices map (i => argsArray(pos(i)))).toList
}
- /** returns `true' if every element is equal to its index */
+ /** returns `true` if every element is equal to its index */
def isIdentity(a: Array[Int]) = (0 until a.length).forall(i => a(i) == i)
/**
@@ -106,7 +118,7 @@ trait NamesDefaults { self: Analyzer =>
* Transform a function into a block, and passing context.namedApplyBlockInfo to
* the new block as side-effect.
*
- * `baseFun' is typed, the resulting block must be typed as well.
+ * `baseFun` is typed, the resulting block must be typed as well.
*
* Fun is transformed in the following way:
* - Ident(f) ==> Block(Nil, Ident(f))
@@ -114,7 +126,7 @@ trait NamesDefaults { self: Analyzer =>
* - Select(qual, f) otherwise ==> Block(ValDef(qual$1, qual), Select(qual$1, f))
* - TypeApply(fun, targs) ==> Block(Nil or qual$1, TypeApply(fun, targs))
* - Select(New(TypeTree()), <init>) ==> Block(Nil, Select(New(TypeTree()), <init>))
- * - Select(New(Select(qual, typeName)), <init>) ==> Block(Nil, Select(...)) NOTE: qual must be stable in a `new'
+ * - Select(New(Select(qual, typeName)), <init>) ==> Block(Nil, Select(...)) NOTE: qual must be stable in a `new`
*/
def baseFunBlock(baseFun: Tree): Tree = {
val isConstr = baseFun.symbol.isConstructor
@@ -152,25 +164,29 @@ trait NamesDefaults { self: Analyzer =>
// never used for constructor calls, they always have a stable qualifier
def blockWithQualifier(qual: Tree, selected: Name) = {
- val sym = blockTyper.context.owner.newValue(qual.pos, unit.freshTermName("qual$"))
- .setInfo(qual.tpe)
- blockTyper.context.scope.enter(sym)
- val vd = atPos(sym.pos)(ValDef(sym, qual).setType(NoType))
+ val sym = blockTyper.context.owner.newValue(unit.freshTermName("qual$"), qual.pos) setInfo uncheckedBounds(qual.tpe) setPos (qual.pos.makeTransparent)
+ blockTyper.context.scope enter sym
+ val vd = atPos(sym.pos)(ValDef(sym, qual) setType NoType)
+ // it stays in Vegas: SI-5720, SI-5727
+ qual changeOwner (blockTyper.context.owner -> sym)
+ val newQual = atPos(qual.pos.focus)(blockTyper.typedQualifier(Ident(sym.name)))
var baseFunTransformed = atPos(baseFun.pos.makeTransparent) {
- // don't use treeCopy: it would assign opaque position.
- val f = Select(gen.mkAttributedRef(sym), selected)
- .setType(baseFun1.tpe).setSymbol(baseFun1.symbol)
+ // setSymbol below is important because the 'selected' function might be overloaded. by
+ // assigning the correct method symbol, typedSelect will just assign the type. the reason
+ // to still call 'typed' is to correctly infer singleton types, SI-5259.
+ val selectPos =
+ if(qual.pos.isRange && baseFun.pos.isRange) qual.pos.union(baseFun.pos).withStart(Math.min(qual.pos.end, baseFun.pos.end))
+ else baseFun.pos
+ val f = blockTyper.typedOperator(Select(newQual, selected).setSymbol(baseFun1.symbol).setPos(selectPos))
if (funTargs.isEmpty) f
else TypeApply(f, funTargs).setType(baseFun.tpe)
}
val b = Block(List(vd), baseFunTransformed)
- .setType(baseFunTransformed.tpe).setPos(baseFun.pos)
-
- val defaultQual = Some(atPos(qual.pos.focus)(gen.mkAttributedRef(sym)))
+ .setType(baseFunTransformed.tpe).setPos(baseFun.pos.makeTransparent)
context.namedApplyBlockInfo =
- Some((b, NamedApplyInfo(defaultQual, defaultTargs, Nil, blockTyper)))
+ Some((b, NamedApplyInfo(Some(newQual), defaultTargs, Nil, blockTyper)))
b
}
@@ -187,9 +203,14 @@ trait NamesDefaults { self: Analyzer =>
if (pre == NoType) {
None
} else {
- val module = companionModuleOf(baseFun.symbol.owner, context)
+ val module = companionSymbolOf(baseFun.symbol.owner, context)
if (module == NoSymbol) None
- else Some(atPos(pos.focus)(gen.mkAttributedRef(pre, module)))
+ else {
+ val ref = atPos(pos.focus)(gen.mkAttributedRef(pre, module))
+ if (module.isStable && pre.isStable) // fixes #4524. the type checker does the same for
+ ref.setType(singleType(pre, module)) // typedSelect, it calls "stabilize" on the result.
+ Some(ref)
+ }
}
}
@@ -210,22 +231,22 @@ trait NamesDefaults { self: Analyzer =>
case Select(New(tp @ Select(qual, _)), _) if isConstr =>
// in `new q.C()', q is always stable
- assert(treeInfo.isPureExpr(qual), qual)
+ assert(treeInfo.isExprSafeToInline(qual), qual)
// 'moduleQual' fixes #2057
blockWithoutQualifier(moduleQual(tp.pos, tp.tpe))
case Select(TypeApply(New(tp @ Select(qual, _)), _), _) if isConstr =>
- assert(treeInfo.isPureExpr(qual), qual)
+ assert(treeInfo.isExprSafeToInline(qual), qual)
blockWithoutQualifier(moduleQual(tp.pos, tp.tpe))
// super constructor calls
case Select(sp @ Super(_, _), _) if isConstr =>
// 'moduleQual' fixes #3207. selection of the companion module of the
- // superclass needs to have the same prefix as the the superclass.
- blockWithoutQualifier(moduleQual(baseFun.pos, sp.symbol.tpe.parents.head))
+ // superclass needs to have the same prefix as the superclass.
+ blockWithoutQualifier(moduleQual(baseFun.pos, sp.symbol.tpe.firstParent))
// self constructor calls (in secondary constructors)
case Select(tp, name) if isConstr =>
- assert(treeInfo.isPureExpr(tp), tp)
+ assert(treeInfo.isExprSafeToInline(tp), tp)
blockWithoutQualifier(moduleQual(tp.pos, tp.tpe))
// other method calls
@@ -234,7 +255,7 @@ trait NamesDefaults { self: Analyzer =>
blockWithoutQualifier(None)
case Select(qual, name) =>
- if (treeInfo.isPureExpr(qual))
+ if (treeInfo.isExprSafeToInline(qual))
blockWithoutQualifier(Some(qual.duplicate))
else
blockWithQualifier(qual, name)
@@ -250,38 +271,54 @@ trait NamesDefaults { self: Analyzer =>
*
* For by-name parameters, create a value
* x$n: () => T = () => arg
+ *
+ * For Ident(<unapply-selector>) arguments, no ValDef is created (SI-3353).
*/
- def argValDefs(args: List[Tree], paramTypes: List[Type], blockTyper: Typer): List[ValDef] = {
+ def argValDefs(args: List[Tree], paramTypes: List[Type], blockTyper: Typer): List[Option[ValDef]] = {
val context = blockTyper.context
- val symPs = (args, paramTypes).zipped map ((arg, tpe) => {
- val byName = isByNameParamType(tpe)
- val (argTpe, repeated) =
- if (isScalaRepeatedParamType(tpe)) arg match {
- case Typed(expr, Ident(tpnme.WILDCARD_STAR)) =>
- (expr.tpe, true)
- case _ =>
- (seqType(arg.tpe), true)
- } else (arg.tpe, false)
- val s = context.owner.newValue(arg.pos, unit.freshTermName("x$"))
- val valType = if (byName) functionType(List(), argTpe)
- else if (repeated) argTpe
- else argTpe
- s.setInfo(valType)
- (context.scope.enter(s), byName, repeated)
+ val symPs = map2(args, paramTypes)((arg, paramTpe) => arg match {
+ case Ident(nme.SELECTOR_DUMMY) =>
+ None // don't create a local ValDef if the argument is <unapply-selector>
+ case _ =>
+ val byName = isByNameParamType(paramTpe)
+ val repeated = isScalaRepeatedParamType(paramTpe)
+ val argTpe = (
+ if (repeated) arg match {
+ case Typed(expr, Ident(tpnme.WILDCARD_STAR)) => expr.tpe
+ case _ => seqType(arg.tpe)
+ }
+ else {
+ // TODO In 83c9c764b, we tried to a stable type here to fix SI-7234. But the resulting TypeTree over a
+ // singleton type without an original TypeTree fails to retypecheck after a resetLocalAttrs (SI-7516),
+ // which is important for (at least) macros.
+ arg.tpe
+ }
+ ).widen // have to widen or types inferred from literal defaults will be singletons
+ val s = context.owner.newValue(unit.freshTermName("x$"), arg.pos) setInfo {
+ val tp = if (byName) functionType(Nil, argTpe) else argTpe
+ uncheckedBounds(tp)
+ }
+ Some((context.scope.enter(s), byName, repeated))
})
- (symPs, args).zipped map {
- case ((sym, byName, repeated), arg) =>
- // resetAttrs required for #2290. given a block { val x = 1; x }, when wrapping into a function
- // () => { val x = 1; x }, the owner of symbol x must change (to the apply method of the function).
- val body = if (byName) blockTyper.typed(Function(List(), resetLocalAttrs(arg)))
- else if (repeated) arg match {
- case Typed(expr, Ident(tpnme.WILDCARD_STAR)) =>
- expr
- case _ =>
- val factory = Select(gen.mkAttributedRef(SeqModule), nme.apply)
- blockTyper.typed(Apply(factory, List(resetLocalAttrs(arg))))
- } else arg
- atPos(body.pos)(ValDef(sym, body).setType(NoType))
+ map2(symPs, args) {
+ case (None, _) => None
+ case (Some((sym, byName, repeated)), arg) =>
+ val body =
+ if (byName) {
+ val res = blockTyper.typed(Function(List(), arg))
+ new ChangeOwnerTraverser(context.owner, res.symbol) traverse arg // fixes #2290
+ res
+ } else {
+ new ChangeOwnerTraverser(context.owner, sym) traverse arg // fixes #4502
+ if (repeated) arg match {
+ case Typed(expr, Ident(tpnme.WILDCARD_STAR)) =>
+ expr
+ case _ =>
+ val factory = Select(gen.mkAttributedRef(SeqModule), nme.apply)
+ blockTyper.typed(Apply(factory, List(resetLocalAttrs(arg))))
+ } else arg
+ }
+ Some(atPos(body.pos)(ValDef(sym, body).setType(NoType)))
}
}
@@ -289,7 +326,7 @@ trait NamesDefaults { self: Analyzer =>
if (isNamedApplyBlock(tree)) {
context.namedApplyBlockInfo.get._1
} else tree match {
- // `fun' is typed. `namelessArgs' might be typed or not, if they are types are kept.
+ // `fun` is typed. `namelessArgs` might be typed or not, if they are types are kept.
case Apply(fun, namelessArgs) =>
val transformedFun = transformNamedApplication(typer, mode, pt)(fun, x => x)
if (transformedFun.isErroneous) setError(tree)
@@ -301,35 +338,41 @@ trait NamesDefaults { self: Analyzer =>
// type the application without names; put the arguments in definition-site order
val typedApp = doTypedApply(tree, funOnly, reorderArgs(namelessArgs, argPos), mode, pt)
-
- if (typedApp.tpe.isError) setError(tree)
- else typedApp match {
- // Extract the typed arguments, restore the call-site evaluation order (using
- // ValDef's in the block), change the arguments to these local values.
+ typedApp match {
+ case Apply(expr, typedArgs) if (typedApp :: typedArgs).exists(_.isErrorTyped) =>
+ setError(tree) // bail out with and erroneous Apply *or* erroneous arguments, see SI-7238, SI-7509
case Apply(expr, typedArgs) =>
+ // Extract the typed arguments, restore the call-site evaluation order (using
+ // ValDef's in the block), change the arguments to these local values.
+
// typedArgs: definition-site order
- val formals = formalTypes(expr.tpe.paramTypes, typedArgs.length, false, false)
+ val formals = formalTypes(expr.tpe.paramTypes, typedArgs.length, removeByName = false, removeRepeated = false)
// valDefs: call-site order
val valDefs = argValDefs(reorderArgsInv(typedArgs, argPos),
reorderArgsInv(formals, argPos),
blockTyper)
// refArgs: definition-site order again
- val refArgs = (reorderArgs(valDefs, argPos), formals).zipped map ((vDef, tpe) => {
- val ref = gen.mkAttributedRef(vDef.symbol)
- atPos(vDef.pos.focus) {
- // for by-name parameters, the local value is a nullary function returning the argument
- if (isByNameParamType(tpe)) Apply(ref, List())
- else if (isScalaRepeatedParamType(tpe)) Typed(ref, Ident(tpnme.WILDCARD_STAR))
- else ref
- }
+ val refArgs = map3(reorderArgs(valDefs, argPos), formals, typedArgs)((vDefOpt, tpe, origArg) => vDefOpt match {
+ case None => origArg
+ case Some(vDef) =>
+ val ref = gen.mkAttributedRef(vDef.symbol)
+ atPos(vDef.pos.focus) {
+ // for by-name parameters, the local value is a nullary function returning the argument
+ tpe.typeSymbol match {
+ case ByNameParamClass => Apply(ref, Nil)
+ case RepeatedParamClass => Typed(ref, Ident(tpnme.WILDCARD_STAR))
+ case _ => ref
+ }
+ }
})
// cannot call blockTyper.typedBlock here, because the method expr might be partially applied only
val res = blockTyper.doTypedApply(tree, expr, refArgs, mode, pt)
res.setPos(res.pos.makeTransparent)
- val block = Block(stats ::: valDefs, res).setType(res.tpe).setPos(tree.pos)
+ val block = Block(stats ::: valDefs.flatten, res).setType(res.tpe).setPos(tree.pos.makeTransparent)
context.namedApplyBlockInfo =
- Some((block, NamedApplyInfo(qual, targs, vargss ::: List(refArgs), blockTyper)))
+ Some((block, NamedApplyInfo(qual, targs, vargss :+ refArgs, blockTyper)))
block
+ case _ => tree
}
}
@@ -355,7 +398,7 @@ trait NamesDefaults { self: Analyzer =>
}
/**
- * Extend the argument list `givenArgs' with default arguments. Defaults are added
+ * Extend the argument list `givenArgs` with default arguments. Defaults are added
* as named arguments calling the corresponding default getter.
*
* Example: given
@@ -365,12 +408,13 @@ trait NamesDefaults { self: Analyzer =>
*/
def addDefaults(givenArgs: List[Tree], qual: Option[Tree], targs: List[Tree],
previousArgss: List[List[Tree]], params: List[Symbol],
- pos: util.Position, context: Context): (List[Tree], List[Symbol]) = {
+ pos: scala.reflect.internal.util.Position, context: Context): (List[Tree], List[Symbol]) = {
if (givenArgs.length < params.length) {
val (missing, positional) = missingParams(givenArgs, params)
- if (missing forall (_.hasDefaultFlag)) {
+ if (missing forall (_.hasDefault)) {
val defaultArgs = missing flatMap (p => {
val defGetter = defaultGetter(p, context)
+ // TODO #3649 can create spurious errors when companion object is gone (because it becomes unlinked from scope)
if (defGetter == NoSymbol) None // prevent crash in erroneous trees, #3649
else {
var default1 = qual match {
@@ -389,7 +433,7 @@ trait NamesDefaults { self: Analyzer =>
}
})
(givenArgs ::: defaultArgs, Nil)
- } else (givenArgs, missing filterNot (_.hasDefaultFlag))
+ } else (givenArgs, missing filterNot (_.hasDefault))
} else (givenArgs, Nil)
}
@@ -402,7 +446,7 @@ trait NamesDefaults { self: Analyzer =>
if (i > 0) {
val defGetterName = nme.defaultGetterName(param.owner.name, i)
if (param.owner.isConstructor) {
- val mod = companionModuleOf(param.owner.owner, context)
+ val mod = companionSymbolOf(param.owner.owner, context)
mod.info.member(defGetterName)
}
else {
@@ -419,149 +463,138 @@ trait NamesDefaults { self: Analyzer =>
} else NoSymbol
}
+ private def savingUndeterminedTParams[T](context: Context)(fn: List[Symbol] => T): T = {
+ val savedParams = context.extractUndetparams()
+ val savedReporting = context.ambiguousErrors
+
+ context.setAmbiguousErrors(false)
+ try fn(savedParams)
+ finally {
+ context.setAmbiguousErrors(savedReporting)
+ //@M note that we don't get here when an ambiguity was detected (during the computation of res),
+ // as errorTree throws an exception
+ context.undetparams = savedParams
+ }
+ }
+
+ /** A full type check is very expensive; let's make sure there's a name
+ * somewhere which could potentially be ambiguous before we go that route.
+ */
+ private def isAmbiguousAssignment(typer: Typer, param: Symbol, arg: Tree) = {
+ import typer.context
+ (context isNameInScope param.name) && {
+ // for named arguments, check whether the assignment expression would
+ // typecheck. if it does, report an ambiguous error.
+ val paramtpe = param.tpe.cloneInfo(param)
+ // replace type parameters by wildcard. in the below example we need to
+ // typecheck (x = 1) with wildcard (not T) so that it succeeds.
+ // def f[T](x: T) = x
+ // var x = 0
+ // f(x = 1) << "x = 1" typechecks with expected type WildcardType
+ savingUndeterminedTParams(context) { udp =>
+ val subst = new SubstTypeMap(udp, udp map (_ => WildcardType)) {
+ override def apply(tp: Type): Type = super.apply(tp match {
+ case TypeRef(_, ByNameParamClass, x :: Nil) => x
+ case _ => tp
+ })
+ }
+ // This throws an exception which is caught in `tryTypedApply` (as it
+ // uses `silent`) - unfortunately, tryTypedApply recovers from the
+ // exception if you use errorTree(arg, ...) and conforms is allowed as
+ // a view (see tryImplicit in Implicits) because it tries to produce a
+ // new qualifier (if the old one was P, the new one will be
+ // conforms.apply(P)), and if that works, it pretends nothing happened.
+ //
+ // To make sure tryTypedApply fails, we would like to pass EmptyTree
+ // instead of arg, but can't do that because eventually setType(ErrorType)
+ // is called, and EmptyTree can only be typed NoType. Thus we need to
+ // disable conforms as a view...
+ val errsBefore = reporter.ERROR.count
+ try typer.silent { tpr =>
+ val res = tpr.typed(arg.duplicate, subst(paramtpe))
+ // better warning for SI-5044: if `silent` was not actually silent give a hint to the user
+ // [H]: the reason why `silent` is not silent is because the cyclic reference exception is
+ // thrown in a context completely different from `context` here. The exception happens while
+ // completing the type, and TypeCompleter is created/run with a non-silent Namer `context`
+ // and there is at the moment no way to connect the two unless we go through some global state.
+ if (errsBefore < reporter.ERROR.count)
+ WarnAfterNonSilentRecursiveInference(param, arg)(context)
+ res
+ } match {
+ case SilentResultValue(t) => !t.isErroneous // #4041
+ case _ => false
+ }
+ catch {
+ // `silent` only catches and returns TypeErrors which are not
+ // CyclicReferences. Fix for #3685
+ case cr @ CyclicReference(sym, _) =>
+ (sym.name == param.name) && sym.accessedOrSelf.isVariable && {
+ NameClashError(sym, arg)(context)
+ true
+ }
+ }
+ }
+ }
+ }
+
/**
* Removes name assignments from args. Additionally, returns an array mapping
- * argument indicies from call-site-order to definition-site-order.
+ * argument indices from call-site-order to definition-site-order.
*
* Verifies that names are not specified twice, positional args don't appear
* after named ones.
*/
def removeNames(typer: Typer)(args: List[Tree], params: List[Symbol]): (List[Tree], Array[Int]) = {
- import typer.infer.errorTree
-
- // maps indicies from (order written by user) to (order of definition)
- val argPos = (new Array[Int](args.length)) map (x => -1)
+ implicit val context0 = typer.context
+ // maps indices from (order written by user) to (order of definition)
+ val argPos = Array.fill(args.length)(-1)
var positionalAllowed = true
- val namelessArgs = for ((arg, index) <- (args.zipWithIndex)) yield arg match {
- case a @ AssignOrNamedArg(Ident(name), rhs) =>
- val (pos, newName) = paramPos(params, name)
- newName.foreach(n => {
- typer.context.unit.deprecationWarning(arg.pos, "the parameter name "+ name +" has been deprecated. Use "+ n +" instead.")
- })
- if (pos == -1) {
- if (positionalAllowed) {
- argPos(index) = index
- // prevent isNamed from being true when calling doTypedApply recursively,
- // treat the arg as an assignment of type Unit
- Assign(a.lhs, rhs).setPos(arg.pos)
- } else {
- errorTree(arg, "unknown parameter name: "+ name)
- }
- } else if (argPos contains pos) {
- errorTree(arg, "parameter specified twice: "+ name)
- } else {
- // for named arguments, check whether the assignment expression would
- // typecheck. if it does, report an ambiguous error.
- val param = params(pos)
- val paramtpe = params(pos).tpe.cloneInfo(param)
- // replace type parameters by wildcard. in the below example we need to
- // typecheck (x = 1) with wildcard (not T) so that it succeeds.
- // def f[T](x: T) = x
- // var x = 0
- // f(x = 1) << "x = 1" typechecks with expected type WildcardType
- val udp = typer.context.extractUndetparams()
- val subst = new SubstTypeMap(udp, udp map (_ => WildcardType)) {
- override def apply(tp: Type): Type = tp match {
- case TypeRef(_, ByNameParamClass, List(arg)) => super.apply(arg)
- case _ => super.apply(tp)
+ val namelessArgs = mapWithIndex(args) { (arg, argIndex) =>
+ arg match {
+ case arg @ AssignOrNamedArg(Ident(name), rhs) =>
+ def matchesName(param: Symbol) = !param.isSynthetic && (
+ (param.name == name) || (param.deprecatedParamName match {
+ case Some(`name`) =>
+ context0.unit.deprecationWarning(arg.pos,
+ "the parameter name "+ name +" has been deprecated. Use "+ param.name +" instead.")
+ true
+ case _ => false
+ })
+ )
+ val paramPos = params indexWhere matchesName
+ if (paramPos == -1) {
+ if (positionalAllowed) {
+ argPos(argIndex) = argIndex
+ // prevent isNamed from being true when calling doTypedApply recursively,
+ // treat the arg as an assignment of type Unit
+ Assign(arg.lhs, rhs) setPos arg.pos
}
+ else UnknownParameterNameNamesDefaultError(arg, name)
}
- val reportAmbiguousErrors = typer.context.reportAmbiguousErrors
- typer.context.reportAmbiguousErrors = false
-
- var variableNameClash = false
- val typedAssign = try {
- typer.silent(_.typed(arg, subst(paramtpe)))
- } catch {
- // `silent` only catches and returns TypeErrors which are not
- // CyclicReferences. Fix for #3685
- case cr @ CyclicReference(sym, info) if sym.name == param.name =>
- if (sym.isVariable || sym.isGetter && sym.accessed.isVariable) {
- // named arg not allowed
- variableNameClash = true
- typer.context.error(sym.pos,
- "%s definition needs %s because '%s' is used as a named argument in its body.".format(
- "variable", // "method"
- "type", // "result type"
- sym.name
- )
- )
- typer.infer.setError(arg)
- }
- else cr
- }
-
- def applyNamedArg = {
+ else if (argPos contains paramPos) {
+ val existingArgIndex = argPos.indexWhere(_ == paramPos)
+ val otherName = args(paramPos) match {
+ case AssignOrNamedArg(Ident(oName), rhs) if oName != name => Some(oName)
+ case _ => None
+ }
+ DoubleParamNamesDefaultError(arg, name, existingArgIndex+1, otherName)
+ } else if (isAmbiguousAssignment(typer, params(paramPos), arg))
+ AmbiguousReferenceInNamesDefaultError(arg, name)
+ else {
// if the named argument is on the original parameter
// position, positional after named is allowed.
- if (index != pos)
+ if (argIndex != paramPos)
positionalAllowed = false
- argPos(index) = pos
+ argPos(argIndex) = paramPos
rhs
}
-
- val res = typedAssign match {
- case _: TypeError => applyNamedArg
-
- case t: Tree =>
- if (t.isErroneous && !variableNameClash) {
- applyNamedArg
- } else if (t.isErroneous) {
- t // name clash with variable. error was already reported above.
- } else {
- // This throws an exception which is caught in `tryTypedApply` (as it
- // uses `silent`) - unfortunately, tryTypedApply recovers from the
- // exception if you use errorTree(arg, ...) and conforms is allowed as
- // a view (see tryImplicit in Implicits) because it tries to produce a
- // new qualifier (if the old one was P, the new one will be
- // conforms.apply(P)), and if that works, it pretends nothing happened.
- //
- // To make sure tryTypedApply fails, we would like to pass EmptyTree
- // instead of arg, but can't do that because eventually setType(ErrorType)
- // is called, and EmptyTree can only be typed NoType. Thus we need to
- // disable conforms as a view...
- errorTree(arg, "reference to "+ name +" is ambiguous; it is both, a parameter\n"+
- "name of the method and the name of a variable currently in scope.")
- }
- }
-
- typer.context.reportAmbiguousErrors = reportAmbiguousErrors
- //@M note that we don't get here when an ambiguity was detected (during the computation of res),
- // as errorTree throws an exception
- typer.context.undetparams = udp
- res
- }
- case _ =>
- argPos(index) = index
- if (positionalAllowed) arg
- else errorTree(arg, "positional after named argument.")
- }
- (namelessArgs, argPos)
- }
-
- /**
- * Returns
- * - the position of the parameter named `name`
- * - optionally, if `name` is @deprecatedName, the new name
- */
- def paramPos(params: List[Symbol], name: Name): (Int, Option[Name]) = {
- var i = 0
- var rest = params
- while (!rest.isEmpty) {
- val p = rest.head
- if (!p.isSynthetic) {
- if (p.name == name) return (i, None)
- if (deprecatedName(p) == Some(name)) return (i, Some(p.name))
+ case _ =>
+ argPos(argIndex) = argIndex
+ if (positionalAllowed) arg
+ else PositionalAfterNamedNamesDefaultError(arg)
}
- i += 1
- rest = rest.tail
}
- (-1, None)
- }
- def deprecatedName(sym: Symbol): Option[Name] =
- sym.getAnnotation(DeprecatedNameAttr).map(ann => (ann.args(0): @unchecked) match {
- case Apply(fun, Literal(str) :: Nil) if (fun.symbol == Symbol_apply) =>
- newTermName(str.stringValue)
- })
+ (namelessArgs, argPos)
+ }
}
diff --git a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala
index 921b242..fea234d 100644
--- a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala
@@ -1,5 +1,5 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
+ * Copyright 2005-2013 LAMP/EPFL
* @author Martin Odersky
*/
@@ -7,9 +7,13 @@ package scala.tools.nsc
package typechecker
import symtab.Flags._
-import collection.{ mutable, immutable }
+import scala.collection.{ mutable, immutable }
import transform.InfoTransform
import scala.collection.mutable.ListBuffer
+import scala.language.postfixOps
+import scala.tools.nsc.settings.ScalaVersion
+import scala.tools.nsc.settings.AnyScalaVersion
+import scala.tools.nsc.settings.NoScalaVersion
/** <p>
* Post-attribution checking and transformation.
@@ -37,7 +41,10 @@ import scala.collection.mutable.ListBuffer
*
* @todo Check whether we always check type parameter bounds.
*/
-abstract class RefChecks extends InfoTransform {
+abstract class RefChecks extends InfoTransform with scala.reflect.internal.transform.RefChecks {
+
+ val global: Global // need to repeat here because otherwise last mixin defines global as
+ // SymbolTable. If we had DOT this would not be an issue
import global._
import definitions._
@@ -51,35 +58,41 @@ abstract class RefChecks extends InfoTransform {
new RefCheckTransformer(unit)
override def changesBaseClasses = false
- def transformInfo(sym: Symbol, tp: Type): Type =
- if (sym.isModule && !sym.isStatic) {
- sym setFlag (lateMETHOD | STABLE)
- NullaryMethodType(tp)
- } else tp
-
- val toJavaRepeatedParam = new TypeMap {
- def apply(tp: Type) = tp match {
- case TypeRef(pre, RepeatedParamClass, args) =>
- typeRef(pre, JavaRepeatedParamClass, args)
- case _ =>
- mapOver(tp)
- }
+ override def transformInfo(sym: Symbol, tp: Type): Type = {
+ if (sym.isModule && !sym.isStatic) sym setFlag (lateMETHOD | STABLE)
+ super.transformInfo(sym, tp)
}
- val toScalaRepeatedParam = new TypeMap {
- def apply(tp: Type): Type = tp match {
- case TypeRef(pre, JavaRepeatedParamClass, args) =>
- typeRef(pre, RepeatedParamClass, args)
- case _ =>
- mapOver(tp)
- }
- }
+ val toJavaRepeatedParam = new SubstSymMap(RepeatedParamClass -> JavaRepeatedParamClass)
+ val toScalaRepeatedParam = new SubstSymMap(JavaRepeatedParamClass -> RepeatedParamClass)
def accessFlagsToString(sym: Symbol) = flagsToString(
sym getFlag (PRIVATE | PROTECTED),
if (sym.hasAccessBoundary) "" + sym.privateWithin.name else ""
)
+ def overridesTypeInPrefix(tp1: Type, tp2: Type, prefix: Type): Boolean = (tp1.normalize, tp2.normalize) match {
+ case (MethodType(List(), rtp1), NullaryMethodType(rtp2)) =>
+ rtp1 <:< rtp2
+ case (NullaryMethodType(rtp1), MethodType(List(), rtp2)) =>
+ rtp1 <:< rtp2
+ case (TypeRef(_, sym, _), _) if sym.isModuleClass =>
+ overridesTypeInPrefix(NullaryMethodType(tp1), tp2, prefix)
+ case _ =>
+ def classBoundAsSeen(tp: Type) = tp.typeSymbol.classBound.asSeenFrom(prefix, tp.typeSymbol.owner)
+
+ (tp1 <:< tp2) || ( // object override check
+ tp1.typeSymbol.isModuleClass && tp2.typeSymbol.isModuleClass && {
+ val cb1 = classBoundAsSeen(tp1)
+ val cb2 = classBoundAsSeen(tp2)
+ (cb1 <:< cb2) && {
+ log("Allowing %s to override %s because %s <:< %s".format(tp1, tp2, cb1, cb2))
+ true
+ }
+ }
+ )
+ }
+
class RefCheckTransformer(unit: CompilationUnit) extends Transformer {
var localTyper: analyzer.Typer = typer;
@@ -94,12 +107,16 @@ abstract class RefChecks extends InfoTransform {
// those with the DEFAULTPARAM flag, and infer the methods. Looking for the methods
// directly requires inspecting the parameter list of every one. That modification
// shaved 95% off the time spent in this method.
- val defaultGetters = clazz.info.findMember(nme.ANYNAME, 0L, DEFAULTPARAM, false).alternatives
+ val defaultGetters = clazz.info.findMembers(0L, DEFAULTPARAM)
val defaultMethodNames = defaultGetters map (sym => nme.defaultGetterToMethod(sym.name))
- defaultMethodNames.distinct foreach { name =>
+ defaultMethodNames.toList.distinct foreach { name =>
val methods = clazz.info.findMember(name, 0L, METHOD, false).alternatives
- val haveDefaults = methods filter (sym => sym.hasParamWhich(_.hasDefaultFlag) && !nme.isProtectedAccessorName(sym.name))
+ def hasDefaultParam(tpe: Type): Boolean = tpe match {
+ case MethodType(params, restpe) => (params exists (_.hasDefault)) || hasDefaultParam(restpe)
+ case _ => false
+ }
+ val haveDefaults = methods filter (sym => hasDefaultParam(sym.info) && !nme.isProtectedAccessorName(sym.name))
if (haveDefaults.lengthCompare(1) > 0) {
val owners = haveDefaults map (_.owner)
@@ -116,71 +133,86 @@ abstract class RefChecks extends InfoTransform {
}
}
}
- clazz.info.decls filter (x => x.isImplicit && x.typeParams.nonEmpty) foreach { sym =>
- val alts = clazz.info.decl(sym.name).alternatives
- if (alts.size > 1)
- alts foreach (x => unit.warning(x.pos, "parameterized overloaded implicit methods are not visible as view bounds"))
+ if (settings.lint.value) {
+ clazz.info.decls filter (x => x.isImplicit && x.typeParams.nonEmpty) foreach { sym =>
+ // implicit classes leave both a module symbol and a method symbol as residue
+ val alts = clazz.info.decl(sym.name).alternatives filterNot (_.isModule)
+ if (alts.size > 1)
+ alts foreach (x => unit.warning(x.pos, "parameterized overloaded implicit methods are not visible as view bounds"))
+ }
}
}
// Override checking ------------------------------------------------------------
- def hasRepeatedParam(tp: Type): Boolean = tp match {
- case MethodType(formals, restpe) => isScalaVarArgs(formals) || hasRepeatedParam(restpe)
- case PolyType(_, restpe) => hasRepeatedParam(restpe)
- case _ => false
- }
-
/** Add bridges for vararg methods that extend Java vararg methods
*/
def addVarargBridges(clazz: Symbol): List[Tree] = {
- val self = clazz.thisType
- val bridges = new ListBuffer[Tree]
-
- def varargBridge(member: Symbol, bridgetpe: Type): Tree = {
- val bridge = member.cloneSymbolImpl(clazz)
- .setPos(clazz.pos).setFlag(member.flags | VBRIDGE)
- bridge.setInfo(bridgetpe.cloneInfo(bridge))
- clazz.info.decls enter bridge
- val List(params) = bridge.paramss
- val TypeRef(_, JavaRepeatedParamClass, List(elemtp)) = params.last.tpe
- val (initargs, List(lastarg0)) = (params map Ident) splitAt (params.length - 1)
- val lastarg = gen.wildcardStar(gen.mkWrapArray(lastarg0, elemtp))
- val body = Apply(Select(This(clazz), member), initargs ::: List(lastarg))
- localTyper.typed {
- /*util.trace("generating varargs bridge")*/(DefDef(bridge, body))
+ // This is quite expensive, so attempt to skip it completely.
+ // Insist there at least be a java-defined ancestor which
+ // defines a varargs method. TODO: Find a cheaper way to exclude.
+ if (inheritsJavaVarArgsMethod(clazz)) {
+ log("Found java varargs ancestor in " + clazz.fullLocationString + ".")
+ val self = clazz.thisType
+ val bridges = new ListBuffer[Tree]
+
+ def varargBridge(member: Symbol, bridgetpe: Type): Tree = {
+ log(s"Generating varargs bridge for ${member.fullLocationString} of type $bridgetpe")
+
+ val newFlags = (member.flags | VBRIDGE | ARTIFACT) & ~PRIVATE
+ val bridge = member.cloneSymbolImpl(clazz, newFlags) setPos clazz.pos
+ bridge.setInfo(bridgetpe.cloneInfo(bridge))
+ clazz.info.decls enter bridge
+
+ val params = bridge.paramss.head
+ val elemtp = params.last.tpe.typeArgs.head
+ val idents = params map Ident
+ val lastarg = gen.wildcardStar(gen.mkWrapArray(idents.last, elemtp))
+ val body = Apply(Select(This(clazz), member), idents.init :+ lastarg)
+
+ localTyper typed DefDef(bridge, body)
}
- }
- // For all concrete non-private members that have a (Scala) repeated parameter:
- // compute the corresponding method type `jtpe` with a Java repeated parameter
- // if a method with type `jtpe` exists and that method is not a varargs bridge
- // then create a varargs bridge of type `jtpe` that forwards to the
- // member method with the Scala vararg type.
- for (member <- clazz.info.nonPrivateMembers) {
- if (!(member hasFlag DEFERRED) && hasRepeatedParam(member.info)) {
- val jtpe = toJavaRepeatedParam(self.memberType(member))
- val inherited = clazz.info.nonPrivateMemberAdmitting(member.name, VBRIDGE) filter (
- sym => (self.memberType(sym) matches jtpe) && !(sym hasFlag VBRIDGE)
- // this is a bit tortuous: we look for non-private members or bridges
- // if we find a bridge everything is OK. If we find another member,
- // we need to create a bridge
- )
- if (inherited.exists) {
- bridges += varargBridge(member, jtpe)
+ // For all concrete non-private members (but: see below) that have a (Scala) repeated
+ // parameter: compute the corresponding method type `jtpe` with a Java repeated parameter
+ // if a method with type `jtpe` exists and that method is not a varargs bridge
+ // then create a varargs bridge of type `jtpe` that forwards to the
+ // member method with the Scala vararg type.
+ //
+ // @PP: Can't call nonPrivateMembers because we will miss refinement members,
+ // which have been marked private. See SI-4729.
+ for (member <- nonTrivialMembers(clazz)) {
+ log(s"Considering $member for java varargs bridge in $clazz")
+ if (!member.isDeferred && member.isMethod && hasRepeatedParam(member.info)) {
+ val inherited = clazz.info.nonPrivateMemberAdmitting(member.name, VBRIDGE)
+
+ // Delaying calling memberType as long as possible
+ if (inherited ne NoSymbol) {
+ val jtpe = toJavaRepeatedParam(self memberType member)
+ // this is a bit tortuous: we look for non-private members or bridges
+ // if we find a bridge everything is OK. If we find another member,
+ // we need to create a bridge
+ val inherited1 = inherited filter (sym => !(sym hasFlag VBRIDGE) && (self memberType sym matches jtpe))
+ if (inherited1.exists)
+ bridges += varargBridge(member, jtpe)
+ }
}
}
- }
- bridges.toList
+ if (bridges.size > 0)
+ log(s"Adding ${bridges.size} bridges for methods extending java varargs.")
+
+ bridges.toList
+ }
+ else Nil
}
- /** 1. Check all members of class `clazz' for overriding conditions.
+ /** 1. Check all members of class `clazz` for overriding conditions.
* That is for overriding member M and overridden member O:
*
* 1.1. M must have the same or stronger access privileges as O.
* 1.2. O must not be final.
- * 1.3. O is deferred, or M has `override' modifier.
+ * 1.3. O is deferred, or M has `override` modifier.
* 1.4. If O is stable, then so is M.
* // @M: LIFTED 1.5. Neither M nor O are a parameterized type alias
* 1.6. If O is a type alias, then M is an alias of O.
@@ -193,13 +225,19 @@ abstract class RefChecks extends InfoTransform {
* 1.8.1 M's type is a subtype of O's type, or
* 1.8.2 M is of type []S, O is of type ()T and S <: T, or
* 1.8.3 M is of type ()S, O is of type []T and S <: T, or
+ * 1.9. If M is a macro def, O cannot be deferred unless there's a concrete method overriding O.
+ * 1.10. If M is not a macro def, O cannot be a macro def.
* 2. Check that only abstract classes have deferred members
* 3. Check that concrete classes do not have deferred definitions
* that are not implemented in a subclass.
- * 4. Check that every member with an `override' modifier
+ * 4. Check that every member with an `override` modifier
* overrides some other member.
*/
private def checkAllOverrides(clazz: Symbol, typesOnly: Boolean = false) {
+ val self = clazz.thisType
+ def classBoundAsSeen(tp: Type) = {
+ tp.typeSymbol.classBound.asSeenFrom(self, tp.typeSymbol.owner)
+ }
case class MixinOverrideError(member: Symbol, msg: String)
@@ -219,64 +257,72 @@ abstract class RefChecks extends InfoTransform {
}
}
- val self = clazz.thisType
-
- def isAbstractTypeWithoutFBound(sym: Symbol) = // (part of DEVIRTUALIZE)
- sym.isAbstractType && !isFBounded(sym)
-
- def isFBounded(tsym: Symbol) =
- tsym.info.baseTypeSeq exists (_ contains tsym)
-
def infoString(sym: Symbol) = infoString0(sym, sym.owner != clazz)
def infoStringWithLocation(sym: Symbol) = infoString0(sym, true)
def infoString0(sym: Symbol, showLocation: Boolean) = {
- val sym1 = analyzer.underlying(sym)
+ val sym1 = analyzer.underlyingSymbol(sym)
sym1.toString() +
(if (showLocation)
sym1.locationString +
(if (sym1.isAliasType) ", which equals "+self.memberInfo(sym1)
- else if (sym1.isAbstractType) " with bounds "+self.memberInfo(sym1)
+ else if (sym1.isAbstractType) " with bounds"+self.memberInfo(sym1)
+ else if (sym1.isModule) ""
else if (sym1.isTerm) " of type "+self.memberInfo(sym1)
else "")
else "")
}
- def overridesType(tp1: Type, tp2: Type): Boolean = (tp1.normalize, tp2.normalize) match {
- case (MethodType(List(), rtp1), NullaryMethodType(rtp2)) =>
- rtp1 <:< rtp2
- case (NullaryMethodType(rtp1), MethodType(List(), rtp2)) =>
- rtp1 <:< rtp2
- case (TypeRef(_, sym, _), _) if (sym.isModuleClass) =>
- overridesType(NullaryMethodType(tp1), tp2)
- case _ =>
- tp1 <:< tp2
- }
-
/** Check that all conditions for overriding `other` by `member`
* of class `clazz` are met.
*/
- def checkOverride(clazz: Symbol, member: Symbol, other: Symbol) {
+ def checkOverride(member: Symbol, other: Symbol) {
+ debuglog("Checking validity of %s overriding %s".format(member.fullLocationString, other.fullLocationString))
+
+ def memberTp = self.memberType(member)
+ def otherTp = self.memberType(other)
def noErrorType = other.tpe != ErrorType && member.tpe != ErrorType
- def isRootOrNone(sym: Symbol) = sym == RootClass || sym == NoSymbol
+ def isRootOrNone(sym: Symbol) = sym != null && sym.isRoot || sym == NoSymbol
+ def isNeitherInClass = (member.owner != clazz) && (other.owner != clazz)
+ def objectOverrideErrorMsg = (
+ "overriding " + other.fullLocationString + " with " + member.fullLocationString + ":\n" +
+ "an overriding object must conform to the overridden object's class bound" +
+ analyzer.foundReqMsg(classBoundAsSeen(member.tpe), classBoundAsSeen(other.tpe))
+ )
+
+ def overrideErrorMsg(msg: String): String = {
+ val isConcreteOverAbstract =
+ (other.owner isSubClass member.owner) && other.isDeferred && !member.isDeferred
+ val addendum =
+ if (isConcreteOverAbstract)
+ ";\n (Note that %s is abstract,\n and is therefore overridden by concrete %s)".format(
+ infoStringWithLocation(other),
+ infoStringWithLocation(member)
+ )
+ else if (settings.debug.value)
+ analyzer.foundReqMsg(member.tpe, other.tpe)
+ else ""
+
+ "overriding %s;\n %s %s%s".format(
+ infoStringWithLocation(other), infoString(member), msg, addendum
+ )
+ }
+ def emitOverrideError(fullmsg: String) {
+ if (member.owner == clazz) unit.error(member.pos, fullmsg)
+ else mixinOverrideErrors += new MixinOverrideError(member, fullmsg)
+ }
def overrideError(msg: String) {
- if (noErrorType) {
- val fullmsg =
- "overriding "+infoStringWithLocation(other)+";\n "+
- infoString(member)+" "+msg+
- (if ((other.owner isSubClass member.owner) && other.isDeferred && !member.isDeferred)
- ";\n (Note that "+infoStringWithLocation(other)+" is abstract,"+
- "\n and is therefore overridden by concrete "+infoStringWithLocation(member)+")"
- else "")
- if (member.owner == clazz) unit.error(member.pos, fullmsg)
- else mixinOverrideErrors += new MixinOverrideError(member, fullmsg)
- }
+ if (noErrorType)
+ emitOverrideError(overrideErrorMsg(msg))
}
def overrideTypeError() {
if (noErrorType) {
- overrideError("has incompatible type")
+ emitOverrideError(
+ if (member.isModule && other.isModule) objectOverrideErrorMsg
+ else overrideErrorMsg("has incompatible type")
+ )
}
}
@@ -289,27 +335,28 @@ abstract class RefChecks extends InfoTransform {
// return if we already checked this combination elsewhere
if (member.owner != clazz) {
- if ((member.owner isSubClass other.owner) && (member.isDeferred || !other.isDeferred)) {
+ def deferredCheck = member.isDeferred || !other.isDeferred
+ def subOther(s: Symbol) = s isSubClass other.owner
+ def subMember(s: Symbol) = s isSubClass member.owner
+
+ if (subOther(member.owner) && deferredCheck) {
//Console.println(infoString(member) + " shadows1 " + infoString(other) " in " + clazz);//DEBUG
- return;
+ return
+ }
+ if (clazz.parentSymbols exists (p => subOther(p) && subMember(p) && deferredCheck)) {
+ //Console.println(infoString(member) + " shadows2 " + infoString(other) + " in " + clazz);//DEBUG
+ return
+ }
+ if (clazz.parentSymbols forall (p => subOther(p) == subMember(p))) {
+ //Console.println(infoString(member) + " shadows " + infoString(other) + " in " + clazz);//DEBUG
+ return
}
- if (clazz.info.parents exists (parent =>
- (parent.typeSymbol isSubClass other.owner) && (parent.typeSymbol isSubClass member.owner) &&
- (member.isDeferred || !other.isDeferred))) {
- //Console.println(infoString(member) + " shadows2 " + infoString(other) + " in " + clazz);//DEBUG
- return;
- }
- if (clazz.info.parents forall (parent =>
- (parent.typeSymbol isSubClass other.owner) == (parent.typeSymbol isSubClass member.owner))) {
- //Console.println(infoString(member) + " shadows " + infoString(other) + " in " + clazz);//DEBUG
- return;
- }
}
/** Is the intersection between given two lists of overridden symbols empty?
*/
def intersectionIsEmpty(syms1: List[Symbol], syms2: List[Symbol]) =
- !(syms1 exists (syms2 contains))
+ !(syms1 exists (syms2 contains _))
if (typesOnly) checkOverrideTypes()
else {
@@ -330,20 +377,33 @@ abstract class RefChecks extends InfoTransform {
}
if (!isOverrideAccessOK) {
overrideAccessError()
- } else if (other.isClass || other.isModule) {
- overrideError("cannot be used here - classes and objects cannot be overridden");
- } else if (!other.isDeferred && (member.isClass || member.isModule)) {
- overrideError("cannot be used here - classes and objects can only override abstract types");
- } else if (other hasFlag FINAL) { // (1.2)
+ } else if (other.isClass) {
+ overrideError("cannot be used here - class definitions cannot be overridden");
+ } else if (!other.isDeferred && member.isClass) {
+ overrideError("cannot be used here - classes can only override abstract types");
+ } else if (other.isEffectivelyFinal) { // (1.2)
overrideError("cannot override final member");
- } else if (!other.isDeferred && !(member hasFlag (OVERRIDE | ABSOVERRIDE | SYNTHETIC))) { // (1.3), SYNTHETIC because of DEVIRTUALIZE
- overrideError("needs `override' modifier");
- } else if ((other hasFlag ABSOVERRIDE) && other.isIncompleteIn(clazz) && !(member hasFlag ABSOVERRIDE)) {
+ } else if (!other.isDeferredOrDefault && !member.isAnyOverride && !member.isSynthetic) { // (*)
+ // (*) Synthetic exclusion for (at least) default getters, fixes SI-5178. We cannot assign the OVERRIDE flag to
+ // the default getter: one default getter might sometimes override, sometimes not. Example in comment on ticket.
+ if (isNeitherInClass && !(other.owner isSubClass member.owner))
+ emitOverrideError(
+ clazz + " inherits conflicting members:\n "
+ + infoStringWithLocation(other) + " and\n " + infoStringWithLocation(member)
+ + "\n(Note: this can be resolved by declaring an override in " + clazz + ".)"
+ )
+ else
+ overrideError("needs `override' modifier")
+ } else if (other.isAbstractOverride && other.isIncompleteIn(clazz) && !member.isAbstractOverride) {
overrideError("needs `abstract override' modifiers")
- } else if ((member hasFlag (OVERRIDE | ABSOVERRIDE)) &&
- (other hasFlag ACCESSOR) && other.accessed.isVariable && !other.accessed.isLazy) {
- overrideError("cannot override a mutable variable")
- } else if ((member hasFlag (OVERRIDE | ABSOVERRIDE)) &&
+ }
+ else if (member.isAnyOverride && (other hasFlag ACCESSOR) && other.accessed.isVariable && !other.accessed.isLazy) {
+ // !?! this is not covered by the spec. We need to resolve this either by changing the spec or removing the test here.
+ // !!! is there a !?! convention? I'm !!!ing this to make sure it turns up on my searches.
+ if (!settings.overrideVars.value)
+ overrideError("cannot override a mutable variable")
+ }
+ else if (member.isAnyOverride &&
!(member.owner.thisType.baseClasses exists (_ isSubClass other.owner)) &&
!member.isDeferred && !other.isDeferred &&
intersectionIsEmpty(member.extendedOverriddenSymbols, other.extendedOverriddenSymbols)) {
@@ -351,14 +411,19 @@ abstract class RefChecks extends InfoTransform {
"(this rule is designed to prevent ``accidental overrides'')")
} else if (other.isStable && !member.isStable) { // (1.4)
overrideError("needs to be a stable, immutable value")
- } else if (member.isValue && (member hasFlag LAZY) &&
- other.isValue && !other.isSourceMethod && !other.isDeferred && !(other hasFlag LAZY)) {
+ } else if (member.isValue && member.isLazy &&
+ other.isValue && !other.isSourceMethod && !other.isDeferred && !other.isLazy) {
overrideError("cannot override a concrete non-lazy value")
- } else if (other.isValue && (other hasFlag LAZY) && !other.isSourceMethod && !other.isDeferred &&
- member.isValue && !(member hasFlag LAZY)) {
+ } else if (other.isValue && other.isLazy && !other.isSourceMethod && !other.isDeferred &&
+ member.isValue && !member.isLazy) {
overrideError("must be declared lazy to override a concrete lazy value")
+ } else if (other.isDeferred && member.isTermMacro && member.extendedOverriddenSymbols.forall(_.isDeferred)) { // (1.9)
+ overrideError("cannot override an abstract method")
+ } else if (other.isTermMacro && !member.isTermMacro) { // (1.10)
+ overrideError("cannot override a macro")
} else {
checkOverrideTypes()
+ checkOverrideDeprecated()
if (settings.warnNullaryOverride.value) {
if (other.paramss.isEmpty && !member.paramss.isEmpty) {
unit.warning(member.pos, "non-nullary method overrides nullary method")
@@ -375,14 +440,14 @@ abstract class RefChecks extends InfoTransform {
// overrideError("may not override parameterized type");
// @M: substSym
- if( !(sameLength(member.typeParams, other.typeParams) && (self.memberType(member).substSym(member.typeParams, other.typeParams) =:= self.memberType(other))) ) // (1.6)
+ if( !(sameLength(member.typeParams, other.typeParams) && (memberTp.substSym(member.typeParams, other.typeParams) =:= otherTp)) ) // (1.6)
overrideTypeError();
- } else if (other.isAbstractType) {
+ }
+ else if (other.isAbstractType) {
//if (!member.typeParams.isEmpty) // (1.7) @MAT
// overrideError("may not be parameterized");
-
- val memberTp = self.memberType(member)
val otherTp = self.memberInfo(other)
+
if (!(otherTp.bounds containsType memberTp)) { // (1.7.1)
overrideTypeError(); // todo: do an explaintypes with bounds here
explainTypes(_.bounds containsType _, otherTp, memberTp)
@@ -401,6 +466,7 @@ abstract class RefChecks extends InfoTransform {
// check a type alias's RHS corresponds to its declaration
// this overlaps somewhat with validateVariance
if(member.isAliasType) {
+ // println("checkKindBounds" + ((List(member), List(memberTp.normalize), self, member.owner)))
val kindErrors = typer.infer.checkKindBounds(List(member), List(memberTp.normalize), self, member.owner)
if(!kindErrors.isEmpty)
@@ -416,13 +482,13 @@ abstract class RefChecks extends InfoTransform {
other.cookJavaRawInfo() // #2454
val memberTp = self.memberType(member)
val otherTp = self.memberType(other)
- if (!overridesType(memberTp, otherTp)) { // 8
+ if (!overridesTypeInPrefix(memberTp, otherTp, self)) { // 8
overrideTypeError()
explainTypes(memberTp, otherTp)
}
if (member.isStable && !otherTp.isVolatile) {
- if (memberTp.isVolatile)
+ if (memberTp.isVolatile)
overrideError("has a volatile type; cannot override a member with non-volatile type")
else memberTp.normalize.resultType match {
case rt: RefinedType if !(rt =:= otherTp) && !(checkedCombinations contains rt.parents) =>
@@ -436,19 +502,27 @@ abstract class RefChecks extends InfoTransform {
}
}
}
+
+ def checkOverrideDeprecated() {
+ if (other.hasDeprecatedOverridingAnnotation) {
+ val suffix = other.deprecatedOverridingMessage map (": " + _) getOrElse ""
+ val msg = s"overriding ${other.fullLocationString} is deprecated$suffix"
+ unit.deprecationWarning(member.pos, msg)
+ }
+ }
}
val opc = new overridingPairs.Cursor(clazz)
while (opc.hasNext) {
//Console.println(opc.overriding/* + ":" + opc.overriding.tpe*/ + " in "+opc.overriding.fullName + " overrides " + opc.overridden/* + ":" + opc.overridden.tpe*/ + " in "+opc.overridden.fullName + "/"+ opc.overridden.hasFlag(DEFERRED));//debug
- if (!opc.overridden.isClass) checkOverride(clazz, opc.overriding, opc.overridden);
+ if (!opc.overridden.isClass) checkOverride(opc.overriding, opc.overridden);
opc.next
}
printMixinOverrideErrors()
// Verifying a concrete class has nothing unimplemented.
- if (clazz.isClass && !clazz.isTrait && !(clazz hasFlag ABSTRACT) && !typesOnly) {
+ if (clazz.isConcreteClass && !typesOnly) {
val abstractErrors = new ListBuffer[String]
def abstractErrorMessage =
// a little formatting polish
@@ -468,46 +542,84 @@ abstract class RefChecks extends InfoTransform {
def javaErasedOverridingSym(sym: Symbol): Symbol =
clazz.tpe.nonPrivateMemberAdmitting(sym.name, BRIDGE).filter(other =>
- !other.isDeferred && other.isJavaDefined && {
- def uncurryAndErase(tp: Type) = erasure.erasure(uncurry.transformInfo(sym, tp)) // #3622: erasure operates on uncurried types -- note on passing sym in both cases: only sym.isType is relevant for uncurry.transformInfo
+ !other.isDeferred && other.isJavaDefined && !sym.enclClass.isSubClass(other.enclClass) && {
+ // #3622: erasure operates on uncurried types --
+ // note on passing sym in both cases: only sym.isType is relevant for uncurry.transformInfo
+ // !!! erasure.erasure(sym, uncurry.transformInfo(sym, tp)) gives erreneous of inaccessible type - check whether that's still the case!
+ def uncurryAndErase(tp: Type) = erasure.erasure(sym)(uncurry.transformInfo(sym, tp))
val tp1 = uncurryAndErase(clazz.thisType.memberType(sym))
val tp2 = uncurryAndErase(clazz.thisType.memberType(other))
- atPhase(currentRun.erasurePhase.next)(tp1 matches tp2)
+ afterErasure(tp1 matches tp2)
})
- def ignoreDeferred(member: Symbol) =
- isAbstractTypeWithoutFBound(member) ||
- (member.isJavaDefined &&
- (currentRun.erasurePhase == NoPhase || // the test requires atPhase(erasurePhase.next) so shouldn't be done if the compiler has no erasure phase available
- javaErasedOverridingSym(member) != NoSymbol))
+ def ignoreDeferred(member: Symbol) = (
+ (member.isAbstractType && !member.isFBounded) || (
+ member.isJavaDefined &&
+ // the test requires afterErasure so shouldn't be
+ // done if the compiler has no erasure phase available
+ (currentRun.erasurePhase == NoPhase || javaErasedOverridingSym(member) != NoSymbol)
+ )
+ )
// 2. Check that only abstract classes have deferred members
- def checkNoAbstractMembers() = {
+ def checkNoAbstractMembers(): Unit = {
// Avoid spurious duplicates: first gather any missing members.
- def memberList = clazz.tpe.nonPrivateMembersAdmitting(VBRIDGE)
- val (missing, rest) = memberList partition (m => m.isDeferred && !ignoreDeferred(m))
- // Group missing members by the underlying symbol.
- val grouped = missing groupBy (analyzer underlying _ name)
+ def memberList = clazz.info.nonPrivateMembersAdmitting(VBRIDGE)
+ val (missing, rest) = memberList partition (m => m.isDeferredNotDefault && !ignoreDeferred(m))
+ // Group missing members by the name of the underlying symbol,
+ // to consolidate getters and setters.
+ val grouped = missing groupBy (sym => analyzer.underlyingSymbol(sym).name)
+ val missingMethods = grouped.toList flatMap {
+ case (name, syms) =>
+ if (syms exists (_.isSetter)) syms filterNot (_.isGetter)
+ else syms
+ }
+
+ def stubImplementations: List[String] = {
+ // Grouping missing methods by the declaring class
+ val regrouped = missingMethods.groupBy(_.owner).toList
+ def membersStrings(members: List[Symbol]) =
+ members.sortBy("" + _.name) map (m => m.defStringSeenAs(clazz.tpe memberType m) + " = ???")
+
+ if (regrouped.tail.isEmpty)
+ membersStrings(regrouped.head._2)
+ else (regrouped.sortBy("" + _._1.name) flatMap {
+ case (owner, members) =>
+ ("// Members declared in " + owner.fullName) +: membersStrings(members) :+ ""
+ }).init
+ }
+
+ // If there are numerous missing methods, we presume they are aware of it and
+ // give them a nicely formatted set of method signatures for implementing.
+ if (missingMethods.size > 1) {
+ abstractClassError(false, "it has " + missingMethods.size + " unimplemented members.")
+ val preface =
+ """|/** As seen from %s, the missing signatures are as follows.
+ | * For convenience, these are usable as stub implementations.
+ | */
+ |""".stripMargin.format(clazz)
+ abstractErrors += stubImplementations.map(" " + _ + "\n").mkString(preface, "", "")
+ return
+ }
for (member <- missing) {
def undefined(msg: String) = abstractClassError(false, infoString(member) + " is not defined" + msg)
- val underlying = analyzer.underlying(member)
+ val underlying = analyzer.underlyingSymbol(member)
// Give a specific error message for abstract vars based on why it fails:
// It could be unimplemented, have only one accessor, or be uninitialized.
if (underlying.isVariable) {
+ val isMultiple = grouped.getOrElse(underlying.name, Nil).size > 1
+
// If both getter and setter are missing, squelch the setter error.
- val isMultiple = grouped(underlying.name).size > 1
- // TODO: messages shouldn't be spread over two files, and varNotice is not a clear name
if (member.isSetter && isMultiple) ()
else undefined(
if (member.isSetter) "\n(Note that an abstract var requires a setter in addition to the getter)"
else if (member.isGetter && !isMultiple) "\n(Note that an abstract var requires a getter in addition to the setter)"
- else analyzer.varNotice(member)
+ else analyzer.abstractVarMessage(member)
)
}
else if (underlying.isMethod) {
-
// If there is a concrete method whose name matches the unimplemented
// abstract method, and a cursory examination of the difference reveals
// something obvious to us, let's make it more obvious to them.
@@ -522,24 +634,35 @@ abstract class RefChecks extends InfoTransform {
matchingArity match {
// So far so good: only one candidate method
- case concrete :: Nil =>
+ case Scope(concrete) =>
val mismatches = abstractParams zip concrete.tpe.paramTypes filterNot { case (x, y) => x =:= y }
mismatches match {
// Only one mismatched parameter: say something useful.
case (pa, pc) :: Nil =>
- val addendum =
- if (pa.typeSymbol == pc.typeSymbol) {
+ val abstractSym = pa.typeSymbol
+ val concreteSym = pc.typeSymbol
+ def subclassMsg(c1: Symbol, c2: Symbol) = (
+ ": %s is a subclass of %s, but method parameter types must match exactly.".format(
+ c1.fullLocationString, c2.fullLocationString)
+ )
+ val addendum = (
+ if (abstractSym == concreteSym) {
// TODO: what is the optimal way to test for a raw type at this point?
// Compilation has already failed so we shouldn't have to worry overmuch
// about forcing types.
- if (underlying.isJavaDefined && pa.typeArgs.isEmpty && pa.typeSymbol.typeParams.nonEmpty)
+ if (underlying.isJavaDefined && pa.typeArgs.isEmpty && abstractSym.typeParams.nonEmpty)
". To implement a raw type, use %s[_]".format(pa)
else if (pa.prefix =:= pc.prefix)
": their type parameters differ"
else
": their prefixes (i.e. enclosing instances) differ"
}
+ else if (abstractSym isSubClass concreteSym)
+ subclassMsg(abstractSym, concreteSym)
+ else if (concreteSym isSubClass abstractSym)
+ subclassMsg(concreteSym, abstractSym)
else ""
+ )
undefined("\n(Note that %s does not match %s%s)".format(pa, pc, addendum))
case xs =>
@@ -553,7 +676,7 @@ abstract class RefChecks extends InfoTransform {
}
// Check the remainder for invalid absoverride.
- for (member <- rest ; if ((member hasFlag ABSOVERRIDE) && member.isIncompleteIn(clazz))) {
+ for (member <- rest ; if (member.isAbstractOverride && member.isIncompleteIn(clazz))) {
val other = member.superSymbol(clazz)
val explanation =
if (other != NoSymbol) " and overrides incomplete superclass member " + infoString(other)
@@ -572,18 +695,17 @@ abstract class RefChecks extends InfoTransform {
//
// (3) is violated but not (2).
def checkNoAbstractDecls(bc: Symbol) {
- for (decl <- bc.info.decls.iterator) {
+ for (decl <- bc.info.decls) {
if (decl.isDeferred && !ignoreDeferred(decl)) {
val impl = decl.matchingSymbol(clazz.thisType, admit = VBRIDGE)
if (impl == NoSymbol || (decl.owner isSubClass impl.owner)) {
abstractClassError(false, "there is a deferred declaration of "+infoString(decl)+
- " which is not implemented in a subclass"+analyzer.varNotice(decl))
+ " which is not implemented in a subclass"+analyzer.abstractVarMessage(decl))
}
}
}
- val parents = bc.info.parents
- if (!parents.isEmpty && parents.head.typeSymbol.hasFlag(ABSTRACT))
- checkNoAbstractDecls(parents.head.typeSymbol)
+ if (bc.superClass hasFlag ABSTRACT)
+ checkNoAbstractDecls(bc.superClass)
}
checkNoAbstractMembers()
@@ -592,8 +714,10 @@ abstract class RefChecks extends InfoTransform {
if (abstractErrors.nonEmpty)
unit.error(clazz.pos, abstractErrorMessage)
- } else if (clazz.isTrait) {
- // prevent abstract methods in interfaces that override final members in Object; see #4431
+ }
+ else if (clazz.isTrait && !(clazz isSubClass AnyValClass)) {
+ // For non-AnyVal classes, prevent abstract methods in interfaces that override
+ // final members in Object; see #4431
for (decl <- clazz.info.decls.iterator) {
val overridden = decl.overriddenSymbol(ObjectClass)
if (overridden.isFinal)
@@ -644,13 +768,21 @@ abstract class RefChecks extends InfoTransform {
(inclazz != clazz) && (matchingSyms != NoSymbol)
}
- // 4. Check that every defined member with an `override' modifier overrides some other member.
- for (member <- clazz.info.decls.toList)
- if ((member hasFlag (OVERRIDE | ABSOVERRIDE)) &&
- !(clazz.thisType.baseClasses exists (hasMatchingSym(_, member)))) {
+ // 4. Check that every defined member with an `override` modifier overrides some other member.
+ for (member <- clazz.info.decls)
+ if (member.isAnyOverride && !(clazz.thisType.baseClasses exists (hasMatchingSym(_, member)))) {
// for (bc <- clazz.info.baseClasses.tail) Console.println("" + bc + " has " + bc.info.decl(member.name) + ":" + bc.info.decl(member.name).tpe);//DEBUG
- unit.error(member.pos, member.toString() + " overrides nothing");
- member resetFlag OVERRIDE
+
+ val nonMatching: List[Symbol] = clazz.info.member(member.name).alternatives.filterNot(_.owner == clazz).filterNot(_.isFinal)
+ def issueError(suffix: String) = unit.error(member.pos, member.toString() + " overrides nothing" + suffix);
+ nonMatching match {
+ case Nil =>
+ issueError("")
+ case ms =>
+ val superSigs = ms.map(m => m.defStringSeenAs(clazz.tpe memberType m)).mkString("\n")
+ issueError(s".\nNote: the super classes of ${member.owner} contain the following, non final members named ${member.name}:\n${superSigs}")
+ }
+ member resetFlag (OVERRIDE | ABSOVERRIDE) // Any Override
}
}
@@ -690,9 +822,9 @@ abstract class RefChecks extends InfoTransform {
for (i <- 0 until seenTypes.length) {
val baseClass = clazz.info.baseTypeSeq(i).typeSymbol
seenTypes(i) match {
- case List() =>
+ case Nil =>
println("??? base "+baseClass+" not found in basetypes of "+clazz)
- case List(_) =>
+ case _ :: Nil =>
;// OK
case tp1 :: tp2 :: _ =>
unit.error(clazz.pos, "illegal inheritance;\n " + clazz +
@@ -717,6 +849,9 @@ abstract class RefChecks extends InfoTransform {
/** Validate variance of info of symbol `base` */
private def validateVariance(base: Symbol) {
+ // A flag for when we're in a refinement, meaning method parameter types
+ // need to be checked.
+ var inRefinement = false
def varianceString(variance: Int): String =
if (variance == 1) "covariant"
@@ -752,8 +887,7 @@ abstract class RefChecks extends InfoTransform {
// However, if `sym` does override a type in a base class
// we have to assume NoVariance, as there might then be
// references to the type parameter that are not variance checked.
- state = if (sym.allOverriddenSymbols.isEmpty) AnyVariance
- else NoVariance
+ state = if (sym.isOverridingSymbol) NoVariance else AnyVariance
}
sym = sym.owner
}
@@ -764,20 +898,24 @@ abstract class RefChecks extends InfoTransform {
* the type occurs itself at variance position given by `variance`
*/
def validateVariance(tp: Type, variance: Int): Unit = tp match {
- case ErrorType => ;
- case WildcardType => ;
- case NoType => ;
- case NoPrefix => ;
- case ThisType(_) => ;
- case ConstantType(_) => ;
- // case DeBruijnIndex(_, _) => ;
+ case ErrorType =>
+ case WildcardType =>
+ case BoundedWildcardType(bounds) =>
+ validateVariance(bounds, variance)
+ case NoType =>
+ case NoPrefix =>
+ case ThisType(_) =>
+ case ConstantType(_) =>
+ // case DeBruijnIndex(_, _) =>
case SingleType(pre, sym) =>
validateVariance(pre, variance)
+ case TypeRef(_, sym, _) if sym.isAliasType =>
+ // okay to ignore pre/args here. In 2.10.3 we used to check them in addition to checking
+ // the normalized type, which led to exponential time type checking, see pos/t8152-performance.scala
+ validateVariance(tp.normalize, variance)
case TypeRef(pre, sym, args) =>
// println("validate "+sym+" at "+relativeVariance(sym))
- if (sym.isAliasType/* && relativeVariance(sym) == AnyVariance*/)
- validateVariance(tp.normalize, variance)
- else if (sym.variance != NoVariance) {
+ if (sym.variance != NoVariance) {
val v = relativeVariance(sym)
if (v != AnyVariance && sym.variance != v * variance) {
//Console.println("relativeVariance(" + base + "," + sym + ") = " + v);//DEBUG
@@ -792,19 +930,26 @@ abstract class RefChecks extends InfoTransform {
}
}
validateVariance(pre, variance)
- validateVarianceArgs(args, variance, sym.typeParams) //@M for higher-kinded typeref, args.isEmpty
+ // @M for higher-kinded typeref, args.isEmpty
// However, these args respect variances by construction anyway
// -- the interesting case is in type application, see checkKindBounds in Infer
+ if (args.nonEmpty)
+ validateVarianceArgs(args, variance, sym.typeParams)
case ClassInfoType(parents, decls, symbol) =>
validateVariances(parents, variance)
case RefinedType(parents, decls) =>
validateVariances(parents, variance)
- for (sym <- decls.toList)
+ val saved = inRefinement
+ inRefinement = true
+ for (sym <- decls)
validateVariance(sym.info, if (sym.isAliasType) NoVariance else variance)
+ inRefinement = saved
case TypeBounds(lo, hi) =>
validateVariance(lo, -variance)
validateVariance(hi, variance)
- case MethodType(formals, result) =>
+ case mt @ MethodType(formals, result) =>
+ if (inRefinement)
+ validateVariances(mt.paramTypes, -variance)
validateVariance(result, variance)
case NullaryMethodType(result) =>
validateVariance(result, variance)
@@ -815,7 +960,7 @@ abstract class RefChecks extends InfoTransform {
validateVariances(tparams map (_.info), variance)
validateVariance(result, variance)
case AnnotatedType(annots, tp, selfsym) =>
- if (!(annots exists (_.atp.typeSymbol.isNonBottomSubClass(uncheckedVarianceClass))))
+ if (!annots.exists(_ matches uncheckedVarianceClass))
validateVariance(tp, variance)
}
@@ -824,9 +969,7 @@ abstract class RefChecks extends InfoTransform {
}
def validateVarianceArgs(tps: List[Type], variance: Int, tparams: List[Symbol]) {
- (tps zip tparams) foreach {
- case (tp, tparam) => validateVariance(tp, variance * tparam.variance)
- }
+ foreach2(tps, tparams)((tp, tparam) => validateVariance(tp, variance * tparam.variance))
}
validateVariance(base.info, CoVariance)
@@ -834,16 +977,20 @@ abstract class RefChecks extends InfoTransform {
override def traverse(tree: Tree) {
tree match {
- case ClassDef(_, _, _, _) |
- TypeDef(_, _, _, _) =>
+ case ClassDef(_, _, _, _) | TypeDef(_, _, _, _) =>
validateVariance(tree.symbol)
super.traverse(tree)
// ModuleDefs need not be considered because they have been eliminated already
case ValDef(_, _, _, _) =>
- validateVariance(tree.symbol)
- case DefDef(_, _, tparams, vparamss, tpt, rhs) =>
- validateVariance(tree.symbol)
- traverseTrees(tparams); traverseTreess(vparamss)
+ if (!tree.symbol.hasLocalFlag)
+ validateVariance(tree.symbol)
+ case DefDef(_, _, tparams, vparamss, _, _) =>
+ // No variance check for object-private/protected methods/values.
+ if (!tree.symbol.hasLocalFlag) {
+ validateVariance(tree.symbol)
+ traverseTrees(tparams)
+ traverseTreess(vparamss)
+ }
case Template(_, _, _) =>
super.traverse(tree)
case _ =>
@@ -854,14 +1001,14 @@ abstract class RefChecks extends InfoTransform {
// Forward reference checking ---------------------------------------------------
class LevelInfo(val outer: LevelInfo) {
- val scope: Scope = if (outer eq null) new Scope else new Scope(outer.scope)
+ val scope: Scope = if (outer eq null) newScope else newNestedScope(outer.scope)
var maxindex: Int = Int.MinValue
var refpos: Position = _
var refsym: Symbol = _
}
private var currentLevel: LevelInfo = null
- private val symIndex = new mutable.HashMap[Symbol, Int]
+ private val symIndex = perRunCaches.newMap[Symbol, Int]()
private def pushLevel() {
currentLevel = new LevelInfo(currentLevel)
@@ -874,15 +1021,18 @@ abstract class RefChecks extends InfoTransform {
private def enterSyms(stats: List[Tree]) {
var index = -1
for (stat <- stats) {
- index = index + 1;
+ index = index + 1
+ def enterSym(sym: Symbol) = if (sym.isLocal) {
+ currentLevel.scope.enter(sym)
+ symIndex(sym) = index
+ }
+
stat match {
+ case DefDef(_, _, _, _, _, _) if stat.symbol.isLazy =>
+ enterSym(stat.symbol)
case ClassDef(_, _, _, _) | DefDef(_, _, _, _, _, _) | ModuleDef(_, _, _) | ValDef(_, _, _, _) =>
//assert(stat.symbol != NoSymbol, stat);//debug
- val sym = stat.symbol.lazyAccessorOrSelf
- if (sym.isLocal) {
- currentLevel.scope.enter(sym)
- symIndex(sym) = index;
- }
+ enterSym(stat.symbol.lazyAccessorOrSelf)
case _ =>
}
}
@@ -909,109 +1059,197 @@ abstract class RefChecks extends InfoTransform {
def apply(tp: Type) = mapOver(tp).normalize
}
+ def checkImplicitViewOptionApply(pos: Position, fn: Tree, args: List[Tree]): Unit = if (settings.lint.value) (fn, args) match {
+ case (tap at TypeApply(fun, targs), List(view: ApplyImplicitView)) if fun.symbol == Option_apply =>
+ unit.warning(pos, s"Suspicious application of an implicit view (${view.fun}) in the argument to Option.apply.") // SI-6567
+ case _ =>
+ }
+
+ private def isObjectOrAnyComparisonMethod(sym: Symbol) = sym match {
+ case Object_eq | Object_ne | Object_== | Object_!= | Any_== | Any_!= => true
+ case _ => false
+ }
def checkSensible(pos: Position, fn: Tree, args: List[Tree]) = fn match {
- case Select(qual, name @ (nme.EQ | nme.NE | nme.eq | nme.ne)) if args.length == 1 =>
- def isReferenceOp = name == nme.eq || name == nme.ne
+ case Select(qual, name @ (nme.EQ | nme.NE | nme.eq | nme.ne)) if args.length == 1 && isObjectOrAnyComparisonMethod(fn.symbol) =>
+ def isReferenceOp = fn.symbol == Object_eq || fn.symbol == Object_ne
def isNew(tree: Tree) = tree match {
case Function(_, _)
| Apply(Select(New(_), nme.CONSTRUCTOR), _) => true
case _ => false
}
def underlyingClass(tp: Type): Symbol = {
- var sym = tp.widen.typeSymbol
- while (sym.isAbstractType)
- sym = sym.info.bounds.hi.widen.typeSymbol
- sym
+ val sym = tp.widen.typeSymbol
+ if (sym.isAbstractType) underlyingClass(sym.info.bounds.hi)
+ else sym
}
val actual = underlyingClass(args.head.tpe)
val receiver = underlyingClass(qual.tpe)
def onTrees[T](f: List[Tree] => T) = f(List(qual, args.head))
def onSyms[T](f: List[Symbol] => T) = f(List(receiver, actual))
- // @MAT normalize for consistency in error message, otherwise only part is normalized due to use of `typeSymbol'
+ // @MAT normalize for consistency in error message, otherwise only part is normalized due to use of `typeSymbol`
def typesString = normalizeAll(qual.tpe.widen)+" and "+normalizeAll(args.head.tpe.widen)
/** Symbols which limit the warnings we can issue since they may be value types */
- val isMaybeValue = Set(AnyClass, AnyRefClass, AnyValClass, ObjectClass, ComparableClass, SerializableClass)
+ val isMaybeValue = Set[Symbol](AnyClass, AnyRefClass, AnyValClass, ObjectClass, ComparableClass, JavaSerializableClass)
- // Whether def equals(other: Any) is overridden
- def isUsingDefaultEquals = {
+ // Whether def equals(other: Any) has known behavior: it is the default
+ // inherited from java.lang.Object, or it is a synthetically generated
+ // case equals. TODO - more cases are warnable if the target is a synthetic
+ // equals.
+ def isUsingWarnableEquals = {
val m = receiver.info.member(nme.equals_)
- (m == Object_equals) || (m == Any_equals)
+ ((m == Object_equals) || (m == Any_equals) || isMethodCaseEquals(m))
}
+ def isMethodCaseEquals(m: Symbol) = m.isSynthetic && m.owner.isCase
+ def isCaseEquals = isMethodCaseEquals(receiver.info.member(nme.equals_))
// Whether this == or != is one of those defined in Any/AnyRef or an overload from elsewhere.
def isUsingDefaultScalaOp = {
val s = fn.symbol
(s == Object_==) || (s == Object_!=) || (s == Any_==) || (s == Any_!=)
}
+ def haveSubclassRelationship = (actual isSubClass receiver) || (receiver isSubClass actual)
+
// Whether the operands+operator represent a warnable combo (assuming anyrefs)
- def isWarnable = isReferenceOp || (isUsingDefaultEquals && isUsingDefaultScalaOp)
+ // Looking for comparisons performed with ==/!= in combination with either an
+ // equals method inherited from Object or a case class synthetic equals (for
+ // which we know the logic.)
+ def isWarnable = isReferenceOp || (isUsingDefaultScalaOp && isUsingWarnableEquals)
def isEitherNullable = (NullClass.tpe <:< receiver.info) || (NullClass.tpe <:< actual.info)
+ def isEitherValueClass = actual.isDerivedValueClass || receiver.isDerivedValueClass
def isBoolean(s: Symbol) = unboxedValueClass(s) == BooleanClass
def isUnit(s: Symbol) = unboxedValueClass(s) == UnitClass
- def isNumeric(s: Symbol) = isNumericValueClass(unboxedValueClass(s)) || (s isSubClass ScalaNumberClass)
+ def isNumeric(s: Symbol) = isNumericValueClass(unboxedValueClass(s)) || isAnyNumber(s)
+ def isScalaNumber(s: Symbol) = s isSubClass ScalaNumberClass
+ // test is behind a platform guard
+ def isJavaNumber(s: Symbol) = !forMSIL && (s isSubClass JavaNumberClass)
+ // includes java.lang.Number if appropriate [SI-5779]
+ def isAnyNumber(s: Symbol) = isScalaNumber(s) || isJavaNumber(s)
+ def isMaybeAnyValue(s: Symbol) = isPrimitiveValueClass(unboxedValueClass(s)) || isMaybeValue(s)
+ // used to short-circuit unrelatedTypes check if both sides are special
+ def isSpecial(s: Symbol) = isMaybeAnyValue(s) || isAnyNumber(s)
+ // unused
def possibleNumericCount = onSyms(_ filter (x => isNumeric(x) || isMaybeValue(x)) size)
val nullCount = onSyms(_ filter (_ == NullClass) size)
+ def isNonsenseValueClassCompare = (
+ !haveSubclassRelationship
+ && isUsingDefaultScalaOp
+ && isEitherValueClass
+ && !isCaseEquals
+ )
def nonSensibleWarning(what: String, alwaysEqual: Boolean) = {
val msg = alwaysEqual == (name == nme.EQ || name == nme.eq)
unit.warning(pos, "comparing "+what+" using `"+name.decode+"' will always yield " + msg)
}
-
def nonSensible(pre: String, alwaysEqual: Boolean) =
nonSensibleWarning(pre+"values of types "+typesString, alwaysEqual)
+ def nonSensiblyEq() = nonSensible("", true)
+ def nonSensiblyNeq() = nonSensible("", false)
+ def nonSensiblyNew() = nonSensibleWarning("a fresh object", false)
- def unrelatedTypes() =
- unit.warning(pos, typesString + " are unrelated: should not compare equal")
+ def unrelatedMsg = name match {
+ case nme.EQ | nme.eq => "never compare equal"
+ case _ => "always compare unequal"
+ }
+ def unrelatedTypes() = {
+ val weaselWord = if (isEitherValueClass) "" else " most likely"
+ unit.warning(pos, s"$typesString are unrelated: they will$weaselWord $unrelatedMsg")
+ }
- if (nullCount == 2)
- nonSensible("", true) // null == null
+ if (nullCount == 2) // null == null
+ nonSensiblyEq()
else if (nullCount == 1) {
- if (onSyms(_ exists isValueClass)) // null == 5
- nonSensible("", false)
+ if (onSyms(_ exists isPrimitiveValueClass)) // null == 5
+ nonSensiblyNeq()
else if (onTrees( _ exists isNew)) // null == new AnyRef
- nonSensibleWarning("a fresh object", false)
+ nonSensiblyNew()
}
else if (isBoolean(receiver)) {
if (!isBoolean(actual) && !isMaybeValue(actual)) // true == 5
- nonSensible("", false)
+ nonSensiblyNeq()
}
else if (isUnit(receiver)) {
if (isUnit(actual)) // () == ()
- nonSensible("", true)
+ nonSensiblyEq()
else if (!isUnit(actual) && !isMaybeValue(actual)) // () == "abc"
- nonSensible("", false)
+ nonSensiblyNeq()
}
else if (isNumeric(receiver)) {
if (!isNumeric(actual) && !forMSIL)
if (isUnit(actual) || isBoolean(actual) || !isMaybeValue(actual)) // 5 == "abc"
- nonSensible("", false)
+ nonSensiblyNeq()
}
- else if (isWarnable) {
+ else if (isWarnable && !isCaseEquals) {
if (isNew(qual)) // new X == y
- nonSensibleWarning("a fresh object", false)
- else if (isNew(args.head) && (receiver.isFinal || isReferenceOp)) // object X ; X == new Y
- nonSensibleWarning("a fresh object", false)
- else if (receiver.isFinal && !(receiver isSubClass actual)) { // object X, Y; X == Y
+ nonSensiblyNew()
+ else if (isNew(args.head) && (receiver.isEffectivelyFinal || isReferenceOp)) // object X ; X == new Y
+ nonSensiblyNew()
+ else if (receiver.isEffectivelyFinal && !(receiver isSubClass actual) && !actual.isRefinementClass) { // object X, Y; X == Y
if (isEitherNullable)
nonSensible("non-null ", false)
else
- nonSensible("", false)
+ nonSensiblyNeq()
}
}
- // Warning on types without a parental relationship. Uncovers a lot of
- // bugs, but not always right to warn.
- if (false) {
- if (nullCount == 0 && possibleNumericCount < 2 && !(receiver isSubClass actual) && !(actual isSubClass receiver))
- unrelatedTypes()
- }
+ // warn if one but not the other is a derived value class
+ // this is especially important to enable transitioning from
+ // regular to value classes without silent failures.
+ if (isNonsenseValueClassCompare)
+ unrelatedTypes()
+ // possibleNumericCount is insufficient or this will warn on e.g. Boolean == j.l.Boolean
+ else if (isWarnable && nullCount == 0 && !(isSpecial(receiver) && isSpecial(actual))) {
+ // better to have lubbed and lost
+ def warnIfLubless(): Unit = {
+ val common = global.lub(List(actual.tpe, receiver.tpe))
+ if (ObjectClass.tpe <:< common)
+ unrelatedTypes()
+ }
+ // warn if actual has a case parent that is not same as receiver's;
+ // if actual is not a case, then warn if no common supertype, as below
+ if (isCaseEquals) {
+ def thisCase = receiver.info.member(nme.equals_).owner
+ actual.info.baseClasses.find(_.isCase) match {
+ case Some(p) if p != thisCase => nonSensible("case class ", false)
+ case None =>
+ // stronger message on (Some(1) == None)
+ //if (receiver.isCase && receiver.isEffectivelyFinal && !(receiver isSubClass actual)) nonSensiblyNeq()
+ //else
+ // if a class, it must be super to thisCase (and receiver) since not <: thisCase
+ if (!actual.isTrait && !(receiver isSubClass actual)) nonSensiblyNeq()
+ else if (!haveSubclassRelationship) warnIfLubless()
+ case _ =>
+ }
+ }
+ // warn only if they have no common supertype below Object
+ else if (!haveSubclassRelationship) {
+ warnIfLubless()
+ }
+ }
case _ =>
}
+ // SI-6276 warn for `def foo = foo` or `val bar: X = bar`, which come up more frequently than you might think.
+ def checkInfiniteLoop(valOrDef: ValOrDefDef) {
+ def callsSelf = valOrDef.rhs match {
+ case t @ (Ident(_) | Select(This(_), _)) =>
+ t hasSymbolWhich (_.accessedOrSelf == valOrDef.symbol)
+ case _ => false
+ }
+ val trivialInifiniteLoop = (
+ !valOrDef.isErroneous
+ && !valOrDef.symbol.isValueParameter
+ && valOrDef.symbol.paramss.isEmpty
+ && callsSelf
+ )
+ if (trivialInifiniteLoop)
+ unit.warning(valOrDef.rhs.pos, s"${valOrDef.symbol.fullLocationString} does nothing other than call itself recursively")
+ }
+
// Transformation ------------------------------------------------------------
- /* Convert a reference to a case factory of type `tpe' to a new of the class it produces. */
+ /* Convert a reference to a case factory of type `tpe` to a new of the class it produces. */
def toConstructor(pos: Position, tpe: Type): Tree = {
var rtpe = tpe.finalResultType
assert(rtpe.typeSymbol hasFlag CASE, tpe);
@@ -1042,33 +1280,33 @@ abstract class RefChecks extends InfoTransform {
*/
private def eliminateModuleDefs(tree: Tree): List[Tree] = {
val ModuleDef(mods, name, impl) = tree
- val sym = tree.symbol
-
- val classSym = sym.moduleClass
- val cdef = ClassDef(mods | MODULE, name.toTypeName, Nil, impl) setSymbol classSym setType NoType
+ val sym = tree.symbol
+ val classSym = sym.moduleClass
+ val cdef = ClassDef(mods | MODULE, name.toTypeName, Nil, impl) setSymbol classSym setType NoType
def findOrCreateModuleVar() = localTyper.typedPos(tree.pos) {
- lazy val createModuleVar = gen.mkModuleVarDef(sym)
- sym.owner.info.decl(nme.moduleVarName(sym.name.toTermName)) match {
- // In case we are dealing with local symbol then we already have
- // to correct error with forward reference
- case NoSymbol => createModuleVar
- case vsym => ValDef(vsym)
- }
+ // See SI-5012, SI-6712.
+ val vsym = (
+ if (sym.owner.isTerm) NoSymbol
+ else sym.enclClass.info.decl(nme.moduleVarName(sym.name.toTermName))
+ )
+ // In case we are dealing with local symbol then we already have
+ // to correct error with forward reference
+ if (vsym == NoSymbol) gen.mkModuleVarDef(sym)
+ else ValDef(vsym)
}
- def createStaticModuleAccessor() = atPhase(phase.next) {
+ def createStaticModuleAccessor() = afterRefchecks {
val method = (
- sym.owner.newMethod(sym.pos, sym.name.toTermName)
- setFlag (sym.flags | STABLE) resetFlag MODULE setInfo NullaryMethodType(sym.moduleClass.tpe)
+ sym.owner.newMethod(sym.name.toTermName, sym.pos, (sym.flags | STABLE) & ~MODULE)
+ setInfoAndEnter NullaryMethodType(sym.moduleClass.tpe)
)
- sym.owner.info.decls enter method
localTyper.typedPos(tree.pos)(gen.mkModuleAccessDef(method, sym))
}
def createInnerModuleAccessor(vdef: Tree) = List(
vdef,
localTyper.typedPos(tree.pos) {
val vsym = vdef.symbol
- atPhase(phase.next) {
+ afterRefchecks {
val rhs = gen.newModule(sym, vsym.tpe)
val body = if (sym.owner.isTrait) rhs else gen.mkAssignAndReturn(vsym, rhs)
DefDef(sym, body.changeOwner(vsym -> sym))
@@ -1076,66 +1314,47 @@ abstract class RefChecks extends InfoTransform {
}
)
transformTrees(cdef :: {
- if (sym.isStatic)
- if (sym.allOverriddenSymbols.isEmpty) Nil
- else List(createStaticModuleAccessor())
- else createInnerModuleAccessor(findOrCreateModuleVar)
+ if (!sym.isStatic)
+ createInnerModuleAccessor(findOrCreateModuleVar)
+ else if (sym.isOverridingSymbol)
+ List(createStaticModuleAccessor())
+ else
+ Nil
})
}
- /** Implements lazy value accessors:
- * - for lazy values of type Unit and all lazy fields inside traits,
- * the rhs is the initializer itself
- * - for all other lazy values z the accessor is a block of this form:
- * { z = <rhs>; z } where z can be an identifier or a field.
- */
- private def makeLazyAccessor(tree: Tree, rhs: Tree): List[Tree] = {
- val vsym = tree.symbol
- assert(vsym.isTerm, vsym)
- val hasUnitType = vsym.tpe.typeSymbol == UnitClass
- val lazySym = vsym.lazyAccessor
- assert(lazySym != NoSymbol, vsym)
-
- // for traits, this is further transformed in mixins
- val body = (
- if (tree.symbol.owner.isTrait || hasUnitType) rhs
- else gen.mkAssignAndReturn(vsym, rhs)
- )
- val lazyDef = atPos(tree.pos)(DefDef(lazySym, body.changeOwner(vsym -> lazySym)))
- log("Made lazy def: " + lazyDef)
-
- if (hasUnitType) List(typed(lazyDef))
- else List(
- typed(ValDef(vsym)),
- atPhase(phase.next)(typed(lazyDef))
- )
- }
-
def transformStat(tree: Tree, index: Int): List[Tree] = tree match {
+ case t if treeInfo.isSelfConstrCall(t) =>
+ assert(index == 0, index)
+ try transform(tree) :: Nil
+ finally if (currentLevel.maxindex > 0) {
+ // An implementation restriction to avoid VerifyErrors and lazyvals mishaps; see SI-4717
+ debuglog("refsym = " + currentLevel.refsym)
+ unit.error(currentLevel.refpos, "forward reference not allowed from self constructor invocation")
+ }
case ModuleDef(_, _, _) => eliminateModuleDefs(tree)
case ValDef(_, _, _, _) =>
val tree1 @ ValDef(_, _, _, rhs) = transform(tree) // important to do before forward reference check
- if (tree.symbol.isLazy)
- makeLazyAccessor(tree, rhs)
+ if (tree1.symbol.isLazy) tree1 :: Nil
else {
val lazySym = tree.symbol.lazyAccessorOrSelf
if (lazySym.isLocal && index <= currentLevel.maxindex) {
- if (settings.debug.value)
- Console.println(currentLevel.refsym)
+ debuglog("refsym = " + currentLevel.refsym)
unit.error(currentLevel.refpos, "forward reference extends over definition of " + lazySym)
}
- List(tree1)
+ tree1 :: Nil
}
- case Import(_, _) => Nil
- case _ => List(transform(tree))
+ case Import(_, _) => Nil
+ case DefDef(mods, _, _, _, _, _) if (mods hasFlag MACRO) || (tree.symbol hasFlag MACRO) => Nil
+ case _ => transform(tree) :: Nil
}
/* Check whether argument types conform to bounds of type parameters */
- private def checkBounds(pre: Type, owner: Symbol, tparams: List[Symbol], argtps: List[Type], pos: Position): Unit =
- try typer.infer.checkBounds(pos, pre, owner, tparams, argtps, "")
+ private def checkBounds(tree0: Tree, pre: Type, owner: Symbol, tparams: List[Symbol], argtps: List[Type]): Unit =
+ try typer.infer.checkBounds(tree0, pre, owner, tparams, argtps, "")
catch {
case ex: TypeError =>
- unit.error(pos, ex.getMessage());
+ unit.error(tree0.pos, ex.getMessage())
if (settings.explaintypes.value) {
val bounds = tparams map (tp => tp.info.instantiateTypeParams(tparams, argtps).bounds)
(argtps, bounds).zipped map ((targ, bound) => explainTypes(bound.lo, targ))
@@ -1174,14 +1393,43 @@ abstract class RefChecks extends InfoTransform {
* indicating it has changed semantics between versions.
*/
private def checkMigration(sym: Symbol, pos: Position) = {
- if (sym.hasMigrationAnnotation)
- unit.warning(pos, "%s has changed semantics in version %s:\n%s".format(
- sym.fullLocationString, sym.migrationVersion.get, sym.migrationMessage.get)
- )
+ if (sym.hasMigrationAnnotation) {
+ val changed = try
+ settings.Xmigration.value < ScalaVersion(sym.migrationVersion.get)
+ catch {
+ case e : NumberFormatException =>
+ unit.warning(pos, s"${sym.fullLocationString} has an unparsable version number: ${e.getMessage()}")
+ // if we can't parse the format on the migration annotation just conservatively assume it changed
+ true
+ }
+ if (changed)
+ unit.warning(pos, s"${sym.fullLocationString} has changed semantics in version ${sym.migrationVersion.get}:\n${sym.migrationMessage.get}")
+ }
+ }
+
+ private def checkCompileTimeOnly(sym: Symbol, pos: Position) = {
+ if (sym.isCompileTimeOnly) {
+ def defaultMsg =
+ sm"""Reference to ${sym.fullLocationString} should not have survived past type checking,
+ |it should have been processed and eliminated during expansion of an enclosing macro."""
+ // The getOrElse part should never happen, it's just here as a backstop.
+ unit.error(pos, sym.compileTimeOnlyMessage getOrElse defaultMsg)
+ }
+ }
+
+ private def checkDelayedInitSelect(qual: Tree, sym: Symbol, pos: Position) = {
+ def isLikelyUninitialized = (
+ (sym.owner isSubClass DelayedInitClass)
+ && !qual.tpe.isInstanceOf[ThisType]
+ && sym.accessedOrSelf.isVal
+ )
+ if (settings.lint.value && isLikelyUninitialized)
+ unit.warning(pos, s"Selecting ${sym} from ${sym.owner}, which extends scala.DelayedInit, is likely to yield an uninitialized value")
}
private def lessAccessible(otherSym: Symbol, memberSym: Symbol): Boolean = (
(otherSym != NoSymbol)
+ && !otherSym.isProtected
&& !otherSym.isTypeParameterOrSkolem
&& !otherSym.isExistentiallyBound
&& (otherSym isLessAccessibleThan memberSym)
@@ -1216,22 +1464,27 @@ abstract class RefChecks extends InfoTransform {
otherSym.decodedName, cannot, memberSym.decodedName)
)
}
+
/** Warn about situations where a method signature will include a type which
* has more restrictive access than the method itself.
*/
private def checkAccessibilityOfReferencedTypes(tree: Tree) {
val member = tree.symbol
- // types of the value parameters
- member.paramss.flatten foreach { p =>
- val normalized = p.tpe.normalize
- if ((normalized ne p.tpe) && lessAccessibleSymsInType(normalized, member).isEmpty) ()
- else lessAccessibleSymsInType(p.tpe, member) foreach (sym => warnLessAccessible(sym, member))
+ def checkAccessibilityOfType(tpe: Type) {
+ val inaccessible = lessAccessibleSymsInType(tpe, member)
+ // if the unnormalized type is accessible, that's good enough
+ if (inaccessible.isEmpty) ()
+ // or if the normalized type is, that's good too
+ else if ((tpe ne tpe.normalize) && lessAccessibleSymsInType(tpe.normalize, member).isEmpty) ()
+ // otherwise warn about the inaccessible syms in the unnormalized type
+ else inaccessible foreach (sym => warnLessAccessible(sym, member))
}
+
+ // types of the value parameters
+ mapParamss(member)(p => checkAccessibilityOfType(p.tpe))
// upper bounds of type parameters
- member.typeParams.map(_.info.bounds.hi.widen) foreach { tp =>
- lessAccessibleSymsInType(tp, member) foreach (sym => warnLessAccessible(sym, member))
- }
+ member.typeParams.map(_.info.bounds.hi.widen) foreach checkAccessibilityOfType
}
/** Check that a deprecated val or def does not override a
@@ -1253,28 +1506,53 @@ abstract class RefChecks extends InfoTransform {
}
private def isRepeatedParamArg(tree: Tree) = currentApplication match {
case Apply(fn, args) =>
- !args.isEmpty && (args.last eq tree) &&
- fn.tpe.params.length == args.length && isRepeatedParamType(fn.tpe.params.last.tpe)
+ ( args.nonEmpty
+ && (args.last eq tree)
+ && (fn.tpe.params.length == args.length)
+ && isRepeatedParamType(fn.tpe.params.last.tpe)
+ )
case _ =>
false
}
- private def checkTypeRef(tp: Type, pos: Position) = tp match {
+ private def checkTypeRef(tp: Type, tree: Tree, skipBounds: Boolean) = tp match {
case TypeRef(pre, sym, args) =>
- checkDeprecated(sym, pos)
+ tree match {
+ case tt: TypeTree if tt.original == null => // SI-7783 don't warn about inferred types
+ case _ =>
+ checkDeprecated(sym, tree.pos)
+ }
if(sym.isJavaDefined)
sym.typeParams foreach (_.cookJavaRawInfo())
- if (!tp.isHigherKinded)
- checkBounds(pre, sym.owner, sym.typeParams, args, pos)
+ if (!tp.isHigherKinded && !skipBounds)
+ checkBounds(tree, pre, sym.owner, sym.typeParams, args)
case _ =>
}
- private def checkAnnotations(tpes: List[Type], pos: Position) = tpes foreach (tp => checkTypeRef(tp, pos))
+ private def checkTypeRefBounds(tp: Type, tree: Tree) = {
+ var skipBounds = false
+ tp match {
+ case AnnotatedType(ann :: Nil, underlying, selfSym) if ann.symbol == UncheckedBoundsClass =>
+ skipBounds = true
+ underlying
+ case TypeRef(pre, sym, args) =>
+ if (!tp.isHigherKinded && !skipBounds)
+ checkBounds(tree, pre, sym.owner, sym.typeParams, args)
+ tp
+ case _ =>
+ tp
+ }
+ }
+
+ private def checkAnnotations(tpes: List[Type], tree: Tree) = tpes foreach { tp =>
+ checkTypeRef(tp, tree, skipBounds = false)
+ checkTypeRefBounds(tp, tree)
+ }
private def doTypeTraversal(tree: Tree)(f: Type => Unit) = if (!inPattern) tree.tpe foreach f
private def applyRefchecksToAnnotations(tree: Tree): Unit = {
def applyChecks(annots: List[AnnotationInfo]) = {
- checkAnnotations(annots map (_.atp), tree.pos)
+ checkAnnotations(annots map (_.atp), tree)
transformTrees(annots flatMap (_.args))
}
@@ -1286,17 +1564,20 @@ abstract class RefChecks extends InfoTransform {
analyzer.ImplicitNotFoundMsg.check(sym) foreach { warn =>
unit.warning(tree.pos, "Invalid implicitNotFound message for %s%s:\n%s".format(sym, sym.locationString, warn))
}
+
case tpt at TypeTree() =>
if(tpt.original != null) {
tpt.original foreach {
- case dc at TypeTreeWithDeferredRefCheck() => applyRefchecksToAnnotations(dc.check()) // #2416
+ case dc at TypeTreeWithDeferredRefCheck() =>
+ applyRefchecksToAnnotations(dc.check()) // #2416
case _ =>
}
}
doTypeTraversal(tree) {
- case AnnotatedType(annots, _, _) => applyChecks(annots)
- case _ =>
+ case tp @ AnnotatedType(annots, _, _) =>
+ applyChecks(annots)
+ case tp =>
}
case _ =>
}
@@ -1305,8 +1586,29 @@ abstract class RefChecks extends InfoTransform {
private def transformCaseApply(tree: Tree, ifNot: => Unit) = {
val sym = tree.symbol
- if (sym.isSourceMethod && sym.isCase && sym.name == nme.apply)
+ def isClassTypeAccessible(tree: Tree): Boolean = tree match {
+ case TypeApply(fun, targs) =>
+ isClassTypeAccessible(fun)
+ case Select(module, apply) =>
+ // Fixes SI-5626. Classes in refinement types cannot be constructed with `new`. In this case,
+ // the companion class is actually not a ClassSymbol, but a reference to an abstract type.
+ module.symbol.companionClass.isClass
+ }
+
+ val doTransform =
+ sym.isSourceMethod &&
+ sym.isCase &&
+ sym.name == nme.apply &&
+ isClassTypeAccessible(tree)
+
+ if (doTransform) {
+ tree foreach {
+ case i at Ident(_) =>
+ enterReference(i.pos, i.symbol) // SI-5390 need to `enterReference` for `a` in `a.B()`
+ case _ =>
+ }
toConstructor(tree.pos, tree.tpe)
+ }
else {
ifNot
tree
@@ -1315,7 +1617,7 @@ abstract class RefChecks extends InfoTransform {
private def transformApply(tree: Apply): Tree = tree match {
case Apply(
- Select(qual, nme.filter),
+ Select(qual, nme.filter | nme.withFilter),
List(Function(
List(ValDef(_, pname, tpt, _)),
Match(_, CaseDef(pat1, _, _) :: _))))
@@ -1324,28 +1626,12 @@ abstract class RefChecks extends InfoTransform {
transform(qual)
- case Apply(Select(New(tpt), name), args)
- if (tpt.tpe.typeSymbol == ArrayClass && args.length >= 2) =>
- unit.deprecationWarning(tree.pos,
- "new Array(...) with multiple dimensions has been deprecated; use Array.ofDim(...) instead")
- val manif = {
- var etpe = tpt.tpe
- for (_ <- args) { etpe = etpe.typeArgs.headOption.getOrElse(NoType) }
- if (etpe == NoType) {
- unit.error(tree.pos, "too many dimensions for array creation")
- Literal(Constant(null))
- } else {
- localTyper.getManifestTree(tree.pos, etpe, false)
- }
- }
- val newResult = localTyper.typedPos(tree.pos) {
- new ApplyToImplicitArgs(Apply(Select(gen.mkAttributedRef(ArrayModule), nme.ofDim), args), List(manif))
- }
- currentApplication = tree
- newResult
-
case Apply(fn, args) =>
- checkSensible(tree.pos, fn, args)
+ // sensicality should be subsumed by the unreachability/exhaustivity/irrefutability analyses in the pattern matcher
+ if (!inPattern) {
+ checkImplicitViewOptionApply(tree.pos, fn, args)
+ checkSensible(tree.pos, fn, args)
+ }
currentApplication = tree
tree
}
@@ -1360,10 +1646,15 @@ abstract class RefChecks extends InfoTransform {
* arbitrarily choose one as more important than the other.
*/
checkDeprecated(sym, tree.pos)
- if (settings.Xmigration28.value)
+ if(settings.Xmigration.value != NoScalaVersion)
checkMigration(sym, tree.pos)
+ checkCompileTimeOnly(sym, tree.pos)
+ checkDelayedInitSelect(qual, sym, tree.pos)
- if (currentClass != sym.owner && sym.hasLocalFlag) {
+ if (sym eq NoSymbol) {
+ unit.warning(tree.pos, "Select node has NoSymbol! " + tree + " / " + tree.tpe)
+ }
+ else if (currentClass != sym.owner && sym.hasLocalFlag) {
var o = currentClass
var hidden = false
while (!hidden && o != sym.owner && o != sym.owner.moduleClass && !o.isPackage) {
@@ -1375,7 +1666,7 @@ abstract class RefChecks extends InfoTransform {
def checkSuper(mix: Name) =
// term should have been eliminated by super accessors
- assert(!(qual.symbol.isTrait && sym.isTerm && mix == tpnme.EMPTY))
+ assert(!(qual.symbol.isTrait && sym.isTerm && mix == tpnme.EMPTY), (qual.symbol, sym, mix))
transformCaseApply(tree,
qual match {
@@ -1387,7 +1678,7 @@ abstract class RefChecks extends InfoTransform {
private def transformIf(tree: If): Tree = {
val If(cond, thenpart, elsepart) = tree
def unitIfEmpty(t: Tree): Tree =
- if (t == EmptyTree) Literal(()).setPos(tree.pos).setType(UnitClass.tpe) else t
+ if (t == EmptyTree) Literal(Constant()).setPos(tree.pos).setType(UnitClass.tpe) else t
cond.tpe match {
case ConstantType(value) =>
@@ -1402,12 +1693,24 @@ abstract class RefChecks extends InfoTransform {
case NullaryMethodType(restpe) if restpe.typeSymbol == UnitClass =>
// this may be the implementation of e.g. a generic method being parameterized
// on Unit, in which case we had better let it slide.
- if (sym.isGetter || sym.allOverriddenSymbols.exists(over => !(over.tpe.resultType =:= sym.tpe.resultType))) ()
- else unit.warning(sym.pos,
- "side-effecting nullary methods are discouraged: suggest defining as `def %s()` instead".format(
- sym.name.decode)
+ val isOk = (
+ sym.isGetter
+ || sym.allOverriddenSymbols.exists(over => !(over.tpe.resultType =:= sym.tpe.resultType))
+ || (sym.name containsName nme.DEFAULT_GETTER_STRING)
)
- case _ => ()
+ if (!isOk)
+ unit.warning(sym.pos, s"side-effecting nullary methods are discouraged: suggest defining as `def ${sym.name.decode}()` instead")
+ case _ => ()
+ }
+
+ // Verify classes extending AnyVal meet the requirements
+ private def checkAnyValSubclass(clazz: Symbol) = {
+ if ((clazz isSubClass AnyValClass) && !isPrimitiveValueClass(clazz)) {
+ if (clazz.isTrait)
+ unit.error(clazz.pos, "Only classes (not traits) are allowed to extend AnyVal")
+ else if ((clazz != AnyValClass) && clazz.hasFlag(ABSTRACT))
+ unit.error(clazz.pos, "`abstract' modifier cannot be used with value classes")
+ }
}
override def transform(tree: Tree): Tree = {
@@ -1421,13 +1724,13 @@ abstract class RefChecks extends InfoTransform {
// inside annotations.
applyRefchecksToAnnotations(tree)
var result: Tree = tree match {
- case DefDef(mods, name, tparams, vparams, tpt, EmptyTree) if tree.symbol.hasAnnotation(NativeAttr) =>
- tree.symbol.resetFlag(DEFERRED)
- transform(treeCopy.DefDef(tree, mods, name, tparams, vparams, tpt,
- typed(Apply(gen.mkAttributedRef(Predef_error), List(Literal("native method stub"))))))
+ case DefDef(_, _, _, _, _, EmptyTree) if sym hasAnnotation NativeAttr =>
+ sym resetFlag DEFERRED
+ transform(deriveDefDef(tree)(_ => typed(gen.mkSysErrorCall("native method stub"))))
case ValDef(_, _, _, _) | DefDef(_, _, _, _, _, _) =>
checkDeprecatedOvers(tree)
+ checkInfiniteLoop(tree.asInstanceOf[ValOrDefDef])
if (settings.warnNullaryUnit.value)
checkNullaryMethodReturnType(sym)
if (settings.warnInaccessible.value) {
@@ -1442,11 +1745,10 @@ abstract class RefChecks extends InfoTransform {
checkOverloadedRestrictions(currentOwner)
val bridges = addVarargBridges(currentOwner)
checkAllOverrides(currentOwner)
+ checkAnyValSubclass(currentOwner)
+ if (bridges.nonEmpty) deriveTemplate(tree)(_ ::: bridges) else tree
- if (bridges.nonEmpty) treeCopy.Template(tree, parents, self, body ::: bridges)
- else tree
-
- case dc at TypeTreeWithDeferredRefCheck() => assert(false, "adapt should have turned dc: TypeTreeWithDeferredRefCheck into tpt: TypeTree, with tpt.original == dc"); dc
+ case dc at TypeTreeWithDeferredRefCheck() => abort("adapt should have turned dc: TypeTreeWithDeferredRefCheck into tpt: TypeTree, with tpt.original == dc")
case tpt at TypeTree() =>
if(tpt.original != null) {
tpt.original foreach {
@@ -1458,20 +1760,31 @@ abstract class RefChecks extends InfoTransform {
}
val existentialParams = new ListBuffer[Symbol]
- doTypeTraversal(tree) { // check all bounds, except those that are
- // existential type parameters
- case ExistentialType(tparams, tpe) =>
+ var skipBounds = false
+ // check all bounds, except those that are existential type parameters
+ // or those within typed annotated with @uncheckedBounds
+ doTypeTraversal(tree) {
+ case tp @ ExistentialType(tparams, tpe) =>
existentialParams ++= tparams
- case t: TypeRef =>
- val exparams = existentialParams.toList
- val wildcards = exparams map (_ => WildcardType)
- checkTypeRef(t.subst(exparams, wildcards), tree.pos)
+ case ann: AnnotatedType if ann.hasAnnotation(UncheckedBoundsClass) =>
+ // SI-7694 Allow code synthetizers to disable checking of bounds for TypeTrees based on inferred LUBs
+ // which might not conform to the constraints.
+ skipBounds = true
+ case tp: TypeRef =>
+ val tpWithWildcards = deriveTypeWithWildcards(existentialParams.toList)(tp)
+ checkTypeRef(tpWithWildcards, tree, skipBounds)
case _ =>
}
+ if (skipBounds) {
+ tree.tpe = tree.tpe.map {
+ _.filterAnnotations(_.symbol != UncheckedBoundsClass)
+ }
+ }
+
tree
case TypeApply(fn, args) =>
- checkBounds(NoPrefix, NoSymbol, fn.tpe.typeParams, args map (_.tpe), tree.pos)
+ checkBounds(tree, NoPrefix, NoSymbol, fn.tpe.typeParams, args map (_.tpe))
transformCaseApply(tree, ())
case x @ Apply(_, _) =>
@@ -1509,12 +1822,21 @@ abstract class RefChecks extends InfoTransform {
case _ => tree
}
+ // skip refchecks in patterns....
result = result match {
case CaseDef(pat, guard, body) =>
inPattern = true
val pat1 = transform(pat)
inPattern = false
treeCopy.CaseDef(tree, pat1, transform(guard), transform(body))
+ case LabelDef(_, _, _) if treeInfo.hasSynthCaseSymbol(result) =>
+ val old = inPattern
+ inPattern = true
+ val res = deriveLabelDef(result)(transform) // TODO SI-7756 Too broad! The code from the original case body should be fully refchecked!
+ inPattern = old
+ res
+ case ValDef(_, _, _, _) if treeInfo.hasSynthCaseSymbol(result) =>
+ deriveValDef(result)(transform) // SI-7716 Don't refcheck the tpt of the synthetic val that holds the selector.
case _ =>
super.transform(result)
}
@@ -1528,7 +1850,7 @@ abstract class RefChecks extends InfoTransform {
result
} catch {
case ex: TypeError =>
- if (settings.debug.value) ex.printStackTrace();
+ if (settings.debug.value) ex.printStackTrace()
unit.error(tree.pos, ex.getMessage())
tree
} finally {
diff --git a/src/compiler/scala/tools/nsc/typechecker/StdAttachments.scala b/src/compiler/scala/tools/nsc/typechecker/StdAttachments.scala
new file mode 100644
index 0000000..64c5b41
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/typechecker/StdAttachments.scala
@@ -0,0 +1,10 @@
+package scala.tools.nsc
+package typechecker
+
+trait StdAttachments {
+ self: Analyzer =>
+
+ type UnaffiliatedMacroContext = scala.reflect.macros.runtime.Context
+ type MacroContext = UnaffiliatedMacroContext { val universe: self.global.type }
+ case class MacroRuntimeAttachment(delayed: Boolean, typerContext: Context, macroContext: Option[MacroContext])
+}
\ No newline at end of file
diff --git a/src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala b/src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala
index fb4f335..bad4938 100644
--- a/src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala
@@ -1,64 +1,88 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
+ * Copyright 2005-2013 LAMP/EPFL
* @author Martin Odersky
*/
package scala.tools.nsc
package typechecker
-import scala.collection.mutable.ListBuffer
+import scala.collection.{ mutable, immutable }
+import mutable.ListBuffer
import symtab.Flags._
-/** This phase adds super accessors for all super calls that
- * either appear in a trait or have as a target a member of some outer class.
- * It also replaces references to parameter accessors with aliases by super
- * references to these aliases.
- * The phase also checks that symbols accessed from super are not abstract,
- * or are overridden by an abstract override.
- * Finally, the phase also mangles the names of class-members which are private
- * up to an enclosing non-package class, in order to avoid overriding conflicts.
+/** This phase adds super accessors for all super calls that either
+ * appear in a trait or have as a target a member of some outer class.
+ * It also replaces references to parameter accessors with aliases
+ * by super references to these aliases. The phase also checks that
+ * symbols accessed from super are not abstract, or are overridden by
+ * an abstract override. Finally, the phase also mangles the names
+ * of class-members which are private up to an enclosing non-package
+ * class, in order to avoid overriding conflicts.
+ *
+ * This phase also sets SPECIALIZED flag on type parameters with
+ * `@specialized` annotation. We put this logic here because the
+ * flag must be set before pickling.
*
* @author Martin Odersky
* @version 1.0
*/
abstract class SuperAccessors extends transform.Transform with transform.TypingTransformers {
- // inherits abstract value `global' and class `Phase' from Transform
-
import global._
- import definitions.{ UnitClass, isRepeatedParamType, isByNameParamType, Any_asInstanceOf }
+ import definitions.{ UnitClass, ObjectClass, isRepeatedParamType, isByNameParamType, Any_asInstanceOf }
import analyzer.{ restrictionError }
/** the following two members override abstract members in Transform */
val phaseName: String = "superaccessors"
+ /** The following flags may be set by this phase: */
+ override def phaseNewFlags: Long = notPRIVATE
+
protected def newTransformer(unit: CompilationUnit): Transformer =
new SuperAccTransformer(unit)
class SuperAccTransformer(unit: CompilationUnit) extends TypingTransformer(unit) {
+ /** validCurrentOwner arrives undocumented, but I reverse engineer it to be
+ * a flag for needsProtectedAccessor which is false while transforming either
+ * a by-name argument block or a closure. This excludes them from being
+ * considered able to access protected members via subclassing (why?) which in turn
+ * increases the frequency with which needsProtectedAccessor will be true.
+ */
private var validCurrentOwner = true
- private var accDefs: List[(Symbol, ListBuffer[Tree])] = List()
+ private val accDefs = mutable.Map[Symbol, ListBuffer[Tree]]()
- private def accDefBuf(clazz: Symbol) =
- accDefs collectFirst { case (`clazz`, buf) => buf } getOrElse sys.error("no acc def buf for "+clazz)
+ private def storeAccessorDefinition(clazz: Symbol, tree: Tree) = {
+ val buf = accDefs.getOrElse(clazz, sys.error("no acc def buf for "+clazz))
+ buf += typers(clazz) typed tree
+ }
+ private def ensureAccessor(sel: Select) = {
+ val Select(qual, name) = sel
+ val sym = sel.symbol
+ val clazz = qual.symbol
+ val supername = nme.superName(name)
+ val superAcc = clazz.info.decl(supername).suchThat(_.alias == sym) orElse {
+ debuglog("add super acc " + sym + sym.locationString + " to `" + clazz);//debug
+ val acc = clazz.newMethod(supername, sel.pos, SUPERACCESSOR | PRIVATE) setAlias sym
+ val tpe = clazz.thisType memberType sym match {
+ case t if sym.isModule && !sym.isMethod => NullaryMethodType(t)
+ case t => t
+ }
+ acc setInfoAndEnter (tpe cloneInfo acc)
+ // Diagnostic for SI-7091
+ if (!accDefs.contains(clazz))
+ reporter.error(sel.pos, s"Internal error: unable to store accessor definition in ${clazz}. clazz.isPackage=${clazz.isPackage}. Accessor required for ${sel} (${showRaw(sel)})")
+ else storeAccessorDefinition(clazz, DefDef(acc, EmptyTree))
+ acc
+ }
+
+ atPos(sel.pos)(Select(gen.mkAttributedThis(clazz), superAcc) setType sel.tpe)
+ }
- private def transformArgs(args: List[Tree], params: List[Symbol]) =
- ((args, params).zipped map { (arg, param) =>
+ private def transformArgs(params: List[Symbol], args: List[Tree]) = {
+ treeInfo.mapMethodParamsAndArgs(params, args) { (param, arg) =>
if (isByNameParamType(param.tpe))
- withInvalidOwner { checkPackedConforms(transform(arg), param.tpe.typeArgs.head) }
+ withInvalidOwner(transform(arg))
else transform(arg)
- }) :::
- (args drop params.length map transform)
-
- private def checkPackedConforms(tree: Tree, pt: Type): Tree = {
- if (tree.tpe exists (_.typeSymbol.isExistentialSkolem)) {
- val packed = localTyper.packedType(tree, NoSymbol)
- if (!(packed <:< pt)) {
- val errorContext = localTyper.context.make(localTyper.context.tree)
- errorContext.reportGeneralErrors = true
- analyzer.newTyper(errorContext).infer.typeError(tree.pos, packed, pt)
- }
}
- tree
}
/** Check that a class and its companion object to not both define
@@ -78,54 +102,36 @@ abstract class SuperAccessors extends transform.Transform with transform.TypingT
}
}
- private def transformSuperSelect(tree: Tree): Tree = tree match {
- case Select(sup @ Super(_, mix), name) =>
- val sym = tree.symbol
- val clazz = sup.symbol
-
- if (sym.isDeferred) {
- val member = sym.overridingSymbol(clazz);
- if (mix != tpnme.EMPTY || member == NoSymbol ||
- !((member hasFlag ABSOVERRIDE) && member.isIncompleteIn(clazz)))
- unit.error(tree.pos, ""+sym+sym.locationString+" is accessed from super. It may not be abstract "+
- "unless it is overridden by a member declared `abstract' and `override'");
- }
- if (tree.isTerm && mix == tpnme.EMPTY &&
- (clazz.isTrait || clazz != currentOwner.enclClass || !validCurrentOwner)) {
- val supername = nme.superName(sym.name)
- var superAcc = clazz.info.decl(supername).suchThat(_.alias == sym)
- if (superAcc == NoSymbol) {
- if (settings.debug.value) log("add super acc " + sym + sym.locationString + " to `" + clazz);//debug
- superAcc =
- clazz.newMethod(tree.pos, supername)
- .setFlag(SUPERACCESSOR | PRIVATE)
- .setAlias(sym)
- var superAccTpe = clazz.thisType.memberType(sym)
- if (sym.isModule && !sym.isMethod) {
- // the super accessor always needs to be a method. See #231
- superAccTpe = NullaryMethodType(superAccTpe)
- }
- superAcc.setInfo(superAccTpe.cloneInfo(superAcc))
- //println("creating super acc "+superAcc+":"+superAcc.tpe)//DEBUG
- clazz.info.decls enter superAcc;
- accDefBuf(clazz) += typers(clazz).typed(DefDef(superAcc, EmptyTree))
- }
- atPos(sup.pos) {
- Select(gen.mkAttributedThis(clazz), superAcc) setType tree.tpe;
- }
- } else {
- tree
+ private def transformSuperSelect(sel: Select): Tree = {
+ val Select(sup @ Super(_, mix), name) = sel
+ val sym = sel.symbol
+ val clazz = sup.symbol
+
+ if (sym.isDeferred) {
+ val member = sym.overridingSymbol(clazz);
+ if (mix != tpnme.EMPTY || member == NoSymbol ||
+ !(member.isAbstractOverride && member.isIncompleteIn(clazz)))
+ unit.error(sel.pos, ""+sym.fullLocationString+" is accessed from super. It may not be abstract "+
+ "unless it is overridden by a member declared `abstract' and `override'");
+ } else if (mix == tpnme.EMPTY && !sym.owner.isTrait){
+ // SI-4989 Check if an intermediate class between `clazz` and `sym.owner` redeclares the method as abstract.
+ val intermediateClasses = clazz.info.baseClasses.tail.takeWhile(_ != sym.owner)
+ intermediateClasses.map(sym.overridingSymbol).find(s => s.isDeferred && !s.isAbstractOverride && !s.owner.isTrait).foreach {
+ absSym =>
+ unit.error(sel.pos, s"${sym.fullLocationString} cannot be directly accessed from ${clazz} because ${absSym.owner} redeclares it as abstract")
}
- case _ =>
- assert(tree.tpe.isError, tree)
- tree
+ }
+
+ if (name.isTermName && mix == tpnme.EMPTY && (clazz.isTrait || clazz != currentClass || !validCurrentOwner))
+ ensureAccessor(sel)
+ else sel
}
// Disallow some super.XX calls targeting Any methods which would
// otherwise lead to either a compiler crash or runtime failure.
private lazy val isDisallowed = {
import definitions._
- Set(Any_isInstanceOf, Object_isInstanceOf, Any_asInstanceOf, Object_asInstanceOf, Object_==, Object_!=, Object_##)
+ Set[Symbol](Any_isInstanceOf, Object_isInstanceOf, Any_asInstanceOf, Object_asInstanceOf, Object_==, Object_!=, Object_##)
}
override def transform(tree: Tree): Tree = {
@@ -133,8 +139,7 @@ abstract class SuperAccessors extends transform.Transform with transform.TypingT
def mayNeedProtectedAccessor(sel: Select, args: List[Tree], goToSuper: Boolean) =
if (needsProtectedAccessor(sym, tree.pos)) {
- if (settings.debug.value)
- log("Adding protected accessor for " + tree)
+ debuglog("Adding protected accessor for " + tree)
transform(makeAccessor(sel, args))
}
@@ -147,14 +152,17 @@ abstract class SuperAccessors extends transform.Transform with transform.TypingT
treeCopy.CaseDef(tree, pat, transform(guard), transform(body))
case ClassDef(_, _, _, _) =>
+ def transformClassDef = {
checkCompanionNameClashes(sym)
val decls = sym.info.decls
- for (s <- decls.toList) {
+ for (s <- decls) {
if (s.privateWithin.isClass && !s.isProtected && !s.privateWithin.isModuleClass &&
!s.hasFlag(EXPANDEDNAME) && !s.isConstructor) {
+ val savedName = s.name
decls.unlink(s)
s.expandName(s.privateWithin)
decls.enter(s)
+ log("Expanded '%s' to '%s' in %s".format(savedName, s.name, sym))
}
}
if (settings.verbose.value && forScaladoc && !sym.isAnonymousClass) {
@@ -170,75 +178,162 @@ abstract class SuperAccessors extends transform.Transform with transform.TypingT
}
}
super.transform(tree)
+ }
+ transformClassDef
+
case ModuleDef(_, _, _) =>
checkCompanionNameClashes(sym)
super.transform(tree)
- case Template(parents, self, body) =>
- val ownAccDefs = new ListBuffer[Tree];
- accDefs = (currentOwner, ownAccDefs) :: accDefs;
+
+ case Template(_, _, body) =>
+ def transformTemplate = {
+ val ownAccDefs = new ListBuffer[Tree]
+ accDefs(currentOwner) = ownAccDefs
// ugly hack... normally, the following line should not be
// necessary, the 'super' method taking care of that. but because
// that one is iterating through parents (and we dont want that here)
// we need to inline it.
curTree = tree
- val body1 = atOwner(currentOwner) { transformTrees(body) }
- accDefs = accDefs.tail;
- treeCopy.Template(tree, parents, self, ownAccDefs.toList ::: body1)
+ val body1 = atOwner(currentOwner)(transformTrees(body))
+ accDefs -= currentOwner
+ ownAccDefs ++= body1
+ deriveTemplate(tree)(_ => ownAccDefs.toList)
+ }
+ transformTemplate
case TypeApply(sel @ Select(This(_), name), args) =>
mayNeedProtectedAccessor(sel, args, false)
- case sel @ Select(qual @ This(_), name) =>
- // direct calls to aliases of param accessors to the superclass in order to avoid
- // duplicating fields.
- if (sym.isParamAccessor && sym.alias != NoSymbol) {
- val result = localTyper.typed {
- Select(
- Super(qual, tpnme.EMPTY/*qual.symbol.info.parents.head.symbol.name*/) setPos qual.pos,
- sym.alias) setPos tree.pos
- }
- if (settings.debug.value)
- log("alias replacement: " + tree + " ==> " + result);//debug
- localTyper.typed(gen.maybeMkAsInstanceOf(transformSuperSelect(result), sym.tpe, sym.alias.tpe, true))
- }
- else mayNeedProtectedAccessor(sel, List(EmptyTree), false)
+ // set a flag for all type parameters with `@specialized` annotation so it can be pickled
+ case typeDef: TypeDef if typeDef.symbol.deSkolemize.hasAnnotation(definitions.SpecializedClass) =>
+ debuglog("setting SPECIALIZED flag on typeDef.symbol.deSkolemize where typeDef = " + typeDef)
+ // we need to deSkolemize symbol so we get the same symbol as others would get when
+ // inspecting type parameter from "outside"; see the discussion of skolems here:
+ // https://groups.google.com/d/topic/scala-internals/0j8laVNTQsI/discussion
+ typeDef.symbol.deSkolemize.setFlag(SPECIALIZED)
+ typeDef
+
+ case sel @ Select(qual, name) =>
+ def transformSelect = {
+
+ // FIXME Once Inliners is modified with the "'meta-knowledge' that all fields accessed by @inline will be made public" [1]
+ // this can be removed; the correct place for this in in ExplicitOuter.
+ //
+ // [1] https://groups.google.com/forum/#!topic/scala-internals/iPkMCygzws4
+ //
+ if (closestEnclMethod(currentOwner) hasAnnotation definitions.ScalaInlineClass)
+ sym.makeNotPrivate(sym.owner)
+
+ qual match {
+ case This(_) =>
+ // warn if they are selecting a private[this] member which
+ // also exists in a superclass, because they may be surprised
+ // to find out that a constructor parameter will shadow a
+ // field. See SI-4762.
+ if (settings.lint.value) {
+ if (sym.isPrivateLocal && sym.paramss.isEmpty) {
+ qual.symbol.ancestors foreach { parent =>
+ parent.info.decls filterNot (x => x.isPrivate || x.hasLocalFlag) foreach { m2 =>
+ if (sym.name == m2.name && m2.isGetter && m2.accessed.isMutable) {
+ unit.warning(sel.pos,
+ sym.accessString + " " + sym.fullLocationString + " shadows mutable " + m2.name
+ + " inherited from " + m2.owner + ". Changes to " + m2.name + " will not be visible within "
+ + sym.owner + " - you may want to give them distinct names.")
+ }
+ }
+ }
+ }
+ }
- case Select(Super(_, mix), name) =>
- if (sym.isValue && !sym.isMethod || sym.hasAccessorFlag) {
- unit.error(tree.pos, "super may be not be used on "+
- (if (sym.hasAccessorFlag) sym.accessed else sym))
+
+ def isAccessibleFromSuper(sym: Symbol) = {
+ val pre = SuperType(sym.owner.tpe, qual.tpe)
+ localTyper.context.isAccessible(sym, pre, superAccess = true)
+ }
+
+ // Direct calls to aliases of param accessors to the superclass in order to avoid
+ // duplicating fields.
+ // ... but, only if accessible (SI-6793)
+ if (sym.isParamAccessor && sym.alias != NoSymbol && isAccessibleFromSuper(sym.alias)) {
+ val result = (localTyper.typedPos(tree.pos) {
+ Select(Super(qual, tpnme.EMPTY) setPos qual.pos, sym.alias)
+ }).asInstanceOf[Select]
+ debuglog("alias replacement: " + tree + " ==> " + result); //debug
+ localTyper.typed(gen.maybeMkAsInstanceOf(transformSuperSelect(result), sym.tpe, sym.alias.tpe, true))
+ } else {
+ /**
+ * A trait which extends a class and accesses a protected member
+ * of that class cannot implement the necessary accessor method
+ * because its implementation is in an implementation class (e.g.
+ * Foo$class) which inherits nothing, and jvm access restrictions
+ * require the call site to be in an actual subclass. So non-trait
+ * classes inspect their ancestors for any such situations and
+ * generate the accessors. See SI-2296.
+ */
+ // FIXME - this should be unified with needsProtectedAccessor, but some
+ // subtlety which presently eludes me is foiling my attempts.
+ val shouldEnsureAccessor = (
+ currentClass.isTrait
+ && sym.isProtected
+ && sym.enclClass != currentClass
+ && !sym.owner.isPackageClass // SI-7091 no accessor needed package owned (ie, top level) symbols
+ && !sym.owner.isTrait
+ && (sym.owner.enclosingPackageClass != currentClass.enclosingPackageClass)
+ && (qual.symbol.info.member(sym.name) ne NoSymbol)
+ && !needsProtectedAccessor(sym, tree.pos))
+ if (shouldEnsureAccessor) {
+ log("Ensuring accessor for call to protected " + sym.fullLocationString + " from " + currentClass)
+ ensureAccessor(sel)
+ }
+ else
+ mayNeedProtectedAccessor(sel, EmptyTree.asList, false)
+ }
+
+ case Super(_, mix) =>
+ if (sym.isValue && !sym.isMethod || sym.hasAccessorFlag) {
+ if (!settings.overrideVars.value)
+ unit.error(tree.pos, "super may be not be used on " + sym.accessedOrSelf)
+ } else if (isDisallowed(sym)) {
+ unit.error(tree.pos, "super not allowed here: use this." + name.decode + " instead")
+ }
+ transformSuperSelect(sel)
+
+ case _ =>
+ mayNeedProtectedAccessor(sel, EmptyTree.asList, true)
}
- else if (isDisallowed(sym)) {
- unit.error(tree.pos, "super not allowed here: use this." + name.decode + " instead")
}
- transformSuperSelect(tree)
+ transformSelect
+
+ case DefDef(mods, name, tparams, vparamss, tpt, rhs) if tree.symbol.isMethodWithExtension =>
+ treeCopy.DefDef(tree, mods, name, tparams, vparamss, tpt, withInvalidOwner(transform(rhs)))
case TypeApply(sel @ Select(qual, name), args) =>
mayNeedProtectedAccessor(sel, args, true)
- case sel @ Select(qual, name) =>
- mayNeedProtectedAccessor(sel, List(EmptyTree), true)
-
case Assign(lhs @ Select(qual, name), rhs) =>
+ def transformAssign = {
if (lhs.symbol.isVariable &&
lhs.symbol.isJavaDefined &&
needsProtectedAccessor(lhs.symbol, tree.pos)) {
- if (settings.debug.value) log("Adding protected setter for " + tree)
+ debuglog("Adding protected setter for " + tree)
val setter = makeSetter(lhs);
- if (settings.debug.value)
- log("Replaced " + tree + " with " + setter);
+ debuglog("Replaced " + tree + " with " + setter);
transform(localTyper.typed(Apply(setter, List(qual, rhs))))
} else
super.transform(tree)
+ }
+ transformAssign
case Apply(fn, args) =>
assert(fn.tpe != null, tree)
- treeCopy.Apply(tree, transform(fn), transformArgs(args, fn.tpe.params))
+ treeCopy.Apply(tree, transform(fn), transformArgs(fn.tpe.params, args))
+
case Function(vparams, body) =>
withInvalidOwner {
treeCopy.Function(tree, vparams, transform(body))
}
+
case _ =>
super.transform(tree)
}
@@ -252,42 +347,46 @@ abstract class SuperAccessors extends transform.Transform with transform.TypingT
}
}
- override def atOwner[A](owner: Symbol)(trans: => A): A = {
+ /** a typer for each enclosing class */
+ private var typers = immutable.Map[Symbol, analyzer.Typer]()
+
+ /** Specialized here for performance; the previous blanked
+ * introduction of typers in TypingTransformer caused a >5%
+ * performance hit for the compiler as a whole.
+ */
+ override def atOwner[A](tree: Tree, owner: Symbol)(trans: => A): A = {
if (owner.isClass) validCurrentOwner = true
- super.atOwner(owner)(trans)
+ val savedLocalTyper = localTyper
+ localTyper = localTyper.atOwner(tree, if (owner.isModule) owner.moduleClass else owner)
+ typers = typers updated (owner, localTyper)
+ val result = super.atOwner(tree, owner)(trans)
+ localTyper = savedLocalTyper
+ typers -= owner
+ result
}
private def withInvalidOwner[A](trans: => A): A = {
- val prevValidCurrentOwner = validCurrentOwner
+ val saved = validCurrentOwner
validCurrentOwner = false
- val result = trans
- validCurrentOwner = prevValidCurrentOwner
- result
+ try trans
+ finally validCurrentOwner = saved
}
/** Add a protected accessor, if needed, and return a tree that calls
- * the accessor and returns the the same member. The result is already
+ * the accessor and returns the same member. The result is already
* typed.
*/
private def makeAccessor(tree: Select, targs: List[Tree]): Tree = {
val Select(qual, name) = tree
val sym = tree.symbol
- val clazz = hostForAccessorOf(sym, currentOwner.enclClass)
-
- /** Return a list of list of types of all value parameter sections. */
- def allParamTypes(tpe: Type): List[List[Type]] = tpe match {
- case PolyType(_, restpe) => allParamTypes(restpe)
- case MethodType(params, res) => params.map(_.tpe) :: allParamTypes(res)
- case _ => Nil
- }
-
+ val clazz = hostForAccessorOf(sym, currentClass)
assert(clazz != NoSymbol, sym)
- if (settings.debug.value) log("Decided for host class: " + clazz)
+ debuglog("Decided for host class: " + clazz)
- val accName = nme.protName(sym.originalName)
- val hasArgs = sym.tpe.paramTypes != Nil
- val memberType = refchecks.toScalaRepeatedParam(sym.tpe) // fix for #2413
+ val accName = nme.protName(sym.originalName)
+ val hasArgs = sym.tpe.paramSectionCount > 0
+ val memberType = refChecks.toScalaRepeatedParam(sym.tpe) // fix for #2413
// if the result type depends on the this type of an enclosing class, the accessor
// has to take an object of exactly this type, otherwise it's more general
@@ -302,31 +401,31 @@ abstract class SuperAccessors extends transform.Transform with transform.TypingT
memberType.cloneInfo(protAcc).asSeenFrom(qual.tpe, sym.owner))
}
- var protAcc = clazz.info.decl(accName).suchThat(s => s == NoSymbol || s.tpe =:= accType(s))
- if (protAcc == NoSymbol) {
- protAcc = clazz.newMethod(tree.pos, nme.protName(sym.originalName))
- protAcc.setInfo(accType(protAcc))
- clazz.info.decls.enter(protAcc);
- val code = DefDef(protAcc, {
- val obj = protAcc.paramss.head.head // receiver
- protAcc.paramss.tail.zip(allParamTypes(sym.tpe)).foldLeft(Select(Ident(obj), sym): Tree) (
- (fun, pvparams) => {
- Apply(fun, (pvparams._1, pvparams._2).zipped map (makeArg(_, obj, _)))
- })
+ val protAcc = clazz.info.decl(accName).suchThat(s => s == NoSymbol || s.tpe =:= accType(s)) orElse {
+ val newAcc = clazz.newMethod(nme.protName(sym.originalName), tree.pos)
+ newAcc setInfoAndEnter accType(newAcc)
+
+ val code = DefDef(newAcc, {
+ val (receiver :: _) :: tail = newAcc.paramss
+ val base: Tree = Select(Ident(receiver), sym)
+ val allParamTypes = mapParamss(sym)(_.tpe)
+ val args = map2(tail, allParamTypes)((params, tpes) => map2(params, tpes)(makeArg(_, receiver, _)))
+ args.foldLeft(base)(Apply(_, _))
})
- if (settings.debug.value)
- log(code)
- accDefBuf(clazz) += typers(clazz).typed(code)
+ debuglog("" + code)
+ storeAccessorDefinition(clazz, code)
+ newAcc
}
- var res: Tree = atPos(tree.pos) {
- if (targs.head == EmptyTree)
- Apply(Select(This(clazz), protAcc), List(qual))
- else
- Apply(TypeApply(Select(This(clazz), protAcc), targs), List(qual))
+ val selection = Select(This(clazz), protAcc)
+ def mkApply(fn: Tree) = Apply(fn, qual :: Nil)
+ val res = atPos(tree.pos) {
+ targs.head match {
+ case EmptyTree => mkApply(selection)
+ case _ => mkApply(TypeApply(selection, targs))
+ }
}
- if (settings.debug.value)
- log("Replaced " + tree + " with " + res)
+ debuglog("Replaced " + tree + " with " + res)
if (hasArgs) localTyper.typedOperator(res) else localTyper.typed(res)
}
@@ -341,33 +440,17 @@ abstract class SuperAccessors extends transform.Transform with transform.TypingT
*
* Also, it calls using repeated parameters are ascribed with ': _*'
*/
- private def makeArg(v: Symbol, obj: Symbol, expectedTpe: Type): Tree = {
- var res: Tree = Ident(v)
- val sym = obj.tpe.typeSymbol
- var ownerClass: Symbol = NoSymbol
-
- val isDependentType = expectedTpe match {
- case TypeRef(path, _, _) =>
- ownerClass = thisTypeOfPath(path)
- if (sym.isSubClass(ownerClass)) true else false
- case _ => false
- }
- if (isRepeatedParamType(v.info)) {
- res = gen.wildcardStar(res)
- log("adapted to wildcard star: " + res)
+ private def makeArg(v: Symbol, obj: Symbol, pt: Type): Tree = {
+ // owner class
+ val clazz = pt match {
+ case TypeRef(pre, _, _) => thisTypeOfPath(pre)
+ case _ => NoSymbol
}
- if (isDependentType) {
- val preciseTpe = expectedTpe.asSeenFrom(singleType(NoPrefix, obj), ownerClass) //typeRef(singleType(NoPrefix, obj), v.tpe.symbol, List())
- TypeApply(Select(res, Any_asInstanceOf),
- List(TypeTree(preciseTpe)))
- } else res
- }
-
- /** For a path-dependent type, return the this type. */
- private def thisTypeOfPath(path: Type): Symbol = path match {
- case ThisType(outerSym) => outerSym
- case SingleType(rest, _) => thisTypeOfPath(rest)
- case _ => NoSymbol
+ val result = gen.paramToArg(v)
+ if (clazz != NoSymbol && (obj.tpe.typeSymbol isSubClass clazz)) // path-dependent type
+ gen.mkAsInstanceOf(result, pt.asSeenFrom(singleType(NoPrefix, obj), clazz))
+ else
+ result
}
/** Add an accessor for field, if needed, and return a selection tree for it .
@@ -375,34 +458,27 @@ abstract class SuperAccessors extends transform.Transform with transform.TypingT
*/
private def makeSetter(tree: Select): Tree = {
val field = tree.symbol
- val clazz = hostForAccessorOf(field, currentOwner.enclClass)
+ val clazz = hostForAccessorOf(field, currentClass)
assert(clazz != NoSymbol, field)
- if (settings.debug.value)
- log("Decided for host class: " + clazz)
+ debuglog("Decided for host class: " + clazz)
+
val accName = nme.protSetterName(field.originalName)
- var protAcc = clazz.info.decl(accName)
- if (protAcc == NoSymbol) {
- protAcc = clazz.newMethod(field.pos, nme.protSetterName(field.originalName))
- protAcc.setInfo(MethodType(protAcc.newSyntheticValueParams(List(clazz.typeOfThis, field.tpe)),
- UnitClass.tpe))
- clazz.info.decls.enter(protAcc)
- val code = DefDef(protAcc, {
- val obj :: value :: Nil = protAcc.paramss.head;
- atPos(tree.pos) {
- Assign(
- Select(Ident(obj), field.name),
- Ident(value))
- }
- })
- if (settings.debug.value)
- log(code);
- accDefBuf(clazz) += typers(clazz).typed(code)
+ val protectedAccessor = clazz.info decl accName orElse {
+ val protAcc = clazz.newMethod(accName, field.pos)
+ val paramTypes = List(clazz.typeOfThis, field.tpe)
+ val params = protAcc newSyntheticValueParams paramTypes
+ val accessorType = MethodType(params, UnitClass.tpe)
+
+ protAcc setInfoAndEnter accessorType
+ val obj :: value :: Nil = params
+ storeAccessorDefinition(clazz, DefDef(protAcc, Assign(Select(Ident(obj), field.name), Ident(value))))
+
+ protAcc
}
- var res: Tree = atPos(tree.pos) { Select(This(clazz), protAcc) }
- res
+ atPos(tree.pos)(Select(This(clazz), protectedAccessor))
}
- /** Does `sym' need an accessor when accessed from `currentOwner'?
+ /** Does `sym` need an accessor when accessed from `currentClass`?
* A special case arises for classes with explicit self-types. If the
* self type is a Java class, and a protected accessor is needed, we issue
* an error. If the self type is a Scala class, we don't add an accessor.
@@ -416,23 +492,20 @@ abstract class SuperAccessors extends transform.Transform with transform.TypingT
* classes, this has to be signaled as error.
*/
private def needsProtectedAccessor(sym: Symbol, pos: Position): Boolean = {
- val clazz = currentOwner.enclClass
+ val clazz = currentClass
def accessibleThroughSubclassing =
validCurrentOwner && clazz.thisSym.isSubClass(sym.owner) && !clazz.isTrait
- def packageAccessBoundry(sym: Symbol) = {
- val b = sym.accessBoundary(sym.owner)
- if (b.isPackageClass) b
- else b.enclosingPackageClass
- }
+ def packageAccessBoundry(sym: Symbol) =
+ sym.accessBoundary(sym.enclosingPackageClass)
val isCandidate = (
sym.isProtected
&& sym.isJavaDefined
- && !sym.definedInPackage
+ && !sym.isDefinedInPackage
&& !accessibleThroughSubclassing
- && (sym.owner.enclosingPackageClass != currentOwner.enclosingPackageClass)
- && (sym.owner.enclosingPackageClass == packageAccessBoundry(sym))
+ && (sym.enclosingPackageClass != currentClass.enclosingPackageClass)
+ && (sym.enclosingPackageClass == sym.accessBoundary(sym.enclosingPackageClass))
)
val host = hostForAccessorOf(sym, clazz)
def isSelfType = !(host.tpe <:< host.typeOfThis) && {
@@ -444,9 +517,8 @@ abstract class SuperAccessors extends transform.Transform with transform.TypingT
}
def isJavaProtected = host.isTrait && sym.isJavaDefined && {
restrictionError(pos, unit,
- """|%s accesses protected %s inside a concrete trait method.
- |Add an accessor in a class extending %s as a workaround.""".stripMargin.format(
- clazz, sym, sym.enclClass)
+ sm"""$clazz accesses protected $sym inside a concrete trait method.
+ |Add an accessor in a class extending ${sym.enclClass} as a workaround."""
)
true
}
@@ -462,23 +534,28 @@ abstract class SuperAccessors extends transform.Transform with transform.TypingT
if (referencingClass.isSubClass(sym.owner.enclClass)
|| referencingClass.thisSym.isSubClass(sym.owner.enclClass)
|| referencingClass.enclosingPackageClass == sym.owner.enclosingPackageClass) {
- assert(referencingClass.isClass)
+ assert(referencingClass.isClass, referencingClass)
referencingClass
} else if(referencingClass.owner.enclClass != NoSymbol)
hostForAccessorOf(sym, referencingClass.owner.enclClass)
else referencingClass
}
+ /** For a path-dependent type, return the this type. */
+ private def thisTypeOfPath(path: Type): Symbol = path match {
+ case ThisType(outerSym) => outerSym
+ case SingleType(rest, _) => thisTypeOfPath(rest)
+ case _ => NoSymbol
+ }
+
/** Is 'tpe' the type of a member of an enclosing class? */
private def isThisType(tpe: Type): Boolean = tpe match {
- case ThisType(sym) => (sym.isClass && !sym.isPackageClass)
- case TypeRef(pref, _, _) => isThisType(pref)
- case SingleType(pref, _) => isThisType(pref)
- case RefinedType(parents, defs) =>
- parents.exists(isThisType(_))
- case AnnotatedType(_, tp, _) =>
- isThisType(tp)
- case _ => false
+ case ThisType(sym) => sym.isClass && !sym.isPackageClass
+ case TypeRef(pre, _, _) => isThisType(pre)
+ case SingleType(pre, _) => isThisType(pre)
+ case RefinedType(parents, _) => parents exists isThisType
+ case AnnotatedType(_, tp, _) => isThisType(tp)
+ case _ => false
}
}
}
diff --git a/src/compiler/scala/tools/nsc/typechecker/SyntheticMethods.scala b/src/compiler/scala/tools/nsc/typechecker/SyntheticMethods.scala
index 0917646..242eb9c 100644
--- a/src/compiler/scala/tools/nsc/typechecker/SyntheticMethods.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/SyntheticMethods.scala
@@ -1,5 +1,5 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
+ * Copyright 2005-2013 LAMP/EPFL
* @author Martin Odersky
*/
@@ -32,302 +32,375 @@ import scala.collection.mutable.ListBuffer
trait SyntheticMethods extends ast.TreeDSL {
self: Analyzer =>
- import global._ // the global environment
- import definitions._ // standard classes and methods
-
- /** In general case classes/objects are not given synthetic equals methods if some
- * non-AnyRef implementation is inherited. However if you let a case object inherit
- * an implementation from a case class, it creates an asymmetric equals with all the
- * associated badness: see ticket #883. So if it sees such a thing this has happened
- * (by virtue of the symbol being in createdMethodSymbols) it re-overrides it with
- * reference equality.
- *
- * TODO: remove once (deprecated) case class inheritance is dropped form nsc.
- */
- private val createdMethodSymbols = new mutable.HashSet[Symbol]
-
- /** Clear the cache of createdMethodSymbols. */
- def resetSynthetics() {
- createdMethodSymbols.clear()
+ import global._
+ import definitions._
+ import CODE._
+
+ private lazy val productSymbols = List(Product_productPrefix, Product_productArity, Product_productElement, Product_iterator, Product_canEqual)
+ private lazy val valueSymbols = List(Any_hashCode, Any_equals)
+ private lazy val caseSymbols = List(Object_hashCode, Object_toString) ::: productSymbols
+ private lazy val caseValueSymbols = Any_toString :: valueSymbols ::: productSymbols
+ private lazy val caseObjectSymbols = Object_equals :: caseSymbols
+ private def symbolsToSynthesize(clazz: Symbol): List[Symbol] = {
+ if (clazz.isCase) {
+ if (clazz.isDerivedValueClass) caseValueSymbols
+ else if (clazz.isModuleClass) caseSymbols
+ else caseObjectSymbols
+ }
+ else if (clazz.isDerivedValueClass) valueSymbols
+ else Nil
}
+ private lazy val renamedCaseAccessors = perRunCaches.newMap[Symbol, mutable.Map[TermName, TermName]]()
+ /** Does not force the info of `caseclazz` */
+ final def caseAccessorName(caseclazz: Symbol, paramName: TermName) =
+ (renamedCaseAccessors get caseclazz).fold(paramName)(_(paramName))
- /** Add the synthetic methods to case classes. Note that a lot of the
- * complexity herein is a consequence of case classes inheriting from
- * case classes, which has been deprecated as of Sep 11 2009. So when
- * the opportunity for removal arises, this can be simplified.
+ /** Add the synthetic methods to case classes.
*/
- def addSyntheticMethods(templ: Template, clazz: Symbol, context: Context): Template = {
- val localTyper = newTyper(
- if (reporter.hasErrors) context makeSilent false else context
- )
-
- def hasOverridingImplementation(meth: Symbol): Boolean = {
- val sym = clazz.info nonPrivateMember meth.name
- def isOverride(s: Symbol) = {
- s != meth && !s.isDeferred && !s.isSynthetic && !createdMethodSymbols(s) &&
- (clazz.thisType.memberType(s) matches clazz.thisType.memberType(meth))
+ def addSyntheticMethods(templ: Template, clazz0: Symbol, context: Context): Template = {
+ val syntheticsOk = (phase.id <= currentRun.typerPhase.id) && {
+ symbolsToSynthesize(clazz0) filter (_ matchingSymbol clazz0.info isSynthetic) match {
+ case Nil => true
+ case syms => log("Not adding synthetic methods: already has " + syms.mkString(", ")) ; false
}
- sym.alternatives exists isOverride
}
+ if (!syntheticsOk)
+ return templ
- def syntheticMethod(name: Name, flags: Int, tpeCons: Symbol => Type) =
- newSyntheticMethod(name, flags | OVERRIDE, tpeCons)
+ val synthesizer = new ClassMethodSynthesis(
+ clazz0,
+ newTyper( if (reporter.hasErrors) context makeSilent false else context )
+ )
+ import synthesizer._
- def newSyntheticMethod(name: Name, flags: Int, tpeCons: Symbol => Type) = {
- val method = clazz.newMethod(clazz.pos.focus, name.toTermName) setFlag flags
- createdMethodSymbols += method
- method setInfo tpeCons(method)
- clazz.info.decls.enter(method)
+ if (clazz0 == AnyValClass || isPrimitiveValueClass(clazz0)) return {
+ if ((clazz0.info member nme.getClass_).isDeferred) {
+ // XXX dummy implementation for now
+ val getClassMethod = createMethod(nme.getClass_, getClassReturnType(clazz.tpe))(_ => NULL)
+ deriveTemplate(templ)(_ :+ getClassMethod)
+ }
+ else templ
}
- def makeNoArgConstructor(res: Type) =
- (sym: Symbol) => MethodType(Nil, res)
- def makeTypeConstructor(args: List[Type], res: Type) =
- (sym: Symbol) => MethodType(sym newSyntheticValueParams args, res)
- def makeEqualityMethod(name: Name) =
- syntheticMethod(name, 0, makeTypeConstructor(List(AnyClass.tpe), BooleanClass.tpe))
+ def accessors = clazz.caseFieldAccessors
+ val arity = accessors.size
+ // If this is ProductN[T1, T2, ...], accessorLub is the lub of T1, T2, ..., .
+ // !!! Hidden behind -Xexperimental due to bummer type inference bugs.
+ // Refining from Iterator[Any] leads to types like
+ //
+ // Option[Int] { def productIterator: Iterator[String] }
+ //
+ // appearing legitimately, but this breaks invariant places
+ // like Tags and Arrays which are not robust and infer things
+ // which they shouldn't.
+ val accessorLub = (
+ if (opt.experimental) {
+ global.weakLub(accessors map (_.tpe.finalResultType))._1 match {
+ case RefinedType(parents, decls) if !decls.isEmpty => intersectionType(parents)
+ case tp => tp
+ }
+ }
+ else AnyClass.tpe
+ )
- import CODE._
+ def forwardToRuntime(method: Symbol): Tree =
+ forwardMethod(method, getMember(ScalaRunTimeModule, (method.name prepend "_")))(mkThis :: _)
- def productPrefixMethod: Tree = typer.typed {
- val method = syntheticMethod(nme.productPrefix, 0, sym => NullaryMethodType(StringClass.tpe))
- DEF(method) === LIT(clazz.name.decode)
+ def callStaticsMethod(name: String)(args: Tree*): Tree = {
+ val method = termMember(RuntimeStaticsModule, name)
+ Apply(gen.mkAttributedRef(method), args.toList)
}
- def productArityMethod(nargs: Int): Tree = {
- val method = syntheticMethod(nme.productArity, 0, sym => NullaryMethodType(IntClass.tpe))
- typer typed { DEF(method) === LIT(nargs) }
+ // Any concrete member, including private
+ def hasConcreteImpl(name: Name) =
+ clazz.info.member(name).alternatives exists (m => !m.isDeferred)
+
+ def hasOverridingImplementation(meth: Symbol) = {
+ val sym = clazz.info nonPrivateMember meth.name
+ sym.alternatives exists { m0 =>
+ (m0 ne meth) && !m0.isDeferred && !m0.isSynthetic && (m0.owner != AnyValClass) && (typeInClazz(m0) matches typeInClazz(meth))
+ }
+ }
+ def readConstantValue[T](name: String, default: T = null.asInstanceOf[T]): T = {
+ clazzMember(newTermName(name)).info match {
+ case NullaryMethodType(ConstantType(Constant(value))) => value.asInstanceOf[T]
+ case _ => default
+ }
+ }
+ def productIteratorMethod = {
+ createMethod(nme.productIterator, iteratorOfType(accessorLub))(_ =>
+ gen.mkMethodCall(ScalaRunTimeModule, nme.typedProductIterator, List(accessorLub), List(mkThis))
+ )
+ }
+ def projectionMethod(accessor: Symbol, num: Int) = {
+ createMethod(nme.productAccessorName(num), accessor.tpe.resultType)(_ => REF(accessor))
}
/** Common code for productElement and (currently disabled) productElementName
*/
- def perElementMethod(accs: List[Symbol], methodName: Name, resType: Type, caseFn: Symbol => Tree): Tree = {
- val symToTpe = makeTypeConstructor(List(IntClass.tpe), resType)
- val method = syntheticMethod(methodName, 0, symToTpe)
- val arg = method ARG 0
- val default = List(DEFAULT ==> THROW(IndexOutOfBoundsExceptionClass, arg))
- val cases =
- for ((sym, i) <- accs.zipWithIndex) yield
- CASE(LIT(i)) ==> caseFn(sym)
-
- typer typed {
- DEF(method) === {
- arg MATCH { cases ::: default : _* }
- }
- }
- }
- def productElementMethod(accs: List[Symbol]): Tree =
- perElementMethod(accs, nme.productElement, AnyClass.tpe, x => Ident(x))
+ def perElementMethod(name: Name, returnType: Type)(caseFn: Symbol => Tree): Tree =
+ createSwitchMethod(name, accessors.indices, returnType)(idx => caseFn(accessors(idx)))
- // def productElementNameMethod(accs: List[Symbol]): Tree =
- // perElementMethod(accs, nme.productElementName, StringClass.tpe, x => Literal(x.name.toString))
+ // def productElementNameMethod = perElementMethod(nme.productElementName, StringClass.tpe)(x => LIT(x.name.toString))
- def moduleToStringMethod: Tree = {
- val method = syntheticMethod(nme.toString_, FINAL, makeNoArgConstructor(StringClass.tpe))
- typer typed { DEF(method) === LIT(clazz.name.decode) }
- }
- def moduleHashCodeMethod: Tree = {
- val method = syntheticMethod(nme.hashCode_, FINAL, makeNoArgConstructor(IntClass.tpe))
- // The string being used as hashcode basis is also productPrefix.
- val code = clazz.name.decode.hashCode
+ var syntheticCanEqual = false
- typer typed { DEF(method) === LIT(code) }
+ /** The canEqual method for case classes.
+ * def canEqual(that: Any) = that.isInstanceOf[This]
+ */
+ def canEqualMethod: Tree = {
+ syntheticCanEqual = true
+ createMethod(nme.canEqual_, List(AnyClass.tpe), BooleanClass.tpe)(m =>
+ Ident(m.firstParam) IS_OBJ classExistentialType(clazz))
}
- def forwardingMethod(name: Name, targetName: Name): Tree = {
- val target = getMember(ScalaRunTimeModule, targetName)
- val paramtypes = target.tpe.paramTypes drop 1
- val method = syntheticMethod(name, 0, makeTypeConstructor(paramtypes, target.tpe.resultType))
-
- typer typed {
- DEF(method) === {
- Apply(REF(target), This(clazz) :: (method ARGNAMES))
- }
- }
+ /** that match { case _: this.C => true ; case _ => false }
+ * where `that` is the given method's first parameter.
+ *
+ * An isInstanceOf test is insufficient because it has weaker
+ * requirements than a pattern match. Given an inner class Foo and
+ * two different instantiations of the container, an x.Foo and and a y.Foo
+ * are both .isInstanceOf[Foo], but the one does not match as the other.
+ */
+ def thatTest(eqmeth: Symbol): Tree = {
+ Match(
+ Ident(eqmeth.firstParam),
+ List(
+ CaseDef(Typed(Ident(nme.WILDCARD), TypeTree(clazz.tpe)), EmptyTree, TRUE),
+ CaseDef(WILD.empty, EmptyTree, FALSE)
+ )
+ )
}
- /** The equality method for case modules:
- * def equals(that: Any) = this eq that
+ /** (that.asInstanceOf[this.C])
+ * where that is the given methods first parameter.
*/
- def equalsModuleMethod: Tree = {
- val method = makeEqualityMethod(nme.equals_)
- val that = method ARG 0
-
- localTyper typed {
- DEF(method) === (This(clazz) ANY_EQ that)
- }
+ def thatCast(eqmeth: Symbol): Tree =
+ gen.mkCast(Ident(eqmeth.firstParam), clazz.tpe)
+
+ /** The equality method core for case classes and inline clases.
+ * 1+ args:
+ * (that.isInstanceOf[this.C]) && {
+ * val x$1 = that.asInstanceOf[this.C]
+ * (this.arg_1 == x$1.arg_1) && (this.arg_2 == x$1.arg_2) && ... && (x$1 canEqual this)
+ * }
+ * Drop canBuildFrom part if class is final and canBuildFrom is synthesized
+ */
+ def equalsCore(eqmeth: Symbol, accessors: List[Symbol]) = {
+ val otherName = context.unit.freshTermName(clazz.name + "$")
+ val otherSym = eqmeth.newValue(otherName, eqmeth.pos, SYNTHETIC) setInfo clazz.tpe
+ val pairwise = accessors map (acc => fn(Select(mkThis, acc), acc.tpe member nme.EQ, Select(Ident(otherSym), acc)))
+ val canEq = gen.mkMethodCall(otherSym, nme.canEqual_, Nil, List(mkThis))
+ val tests = if (clazz.isDerivedValueClass || clazz.isFinal && syntheticCanEqual) pairwise else pairwise :+ canEq
+
+ thatTest(eqmeth) AND Block(
+ ValDef(otherSym, thatCast(eqmeth)),
+ AND(tests: _*)
+ )
}
- /** The canEqual method for case classes. Note that if we spot
- * a user-supplied equals implementation, we simply return true
- * so as not to interfere.
+ /** The equality method for case classes.
+ * 0 args:
+ * def equals(that: Any) = that.isInstanceOf[this.C] && that.asInstanceOf[this.C].canEqual(this)
+ * 1+ args:
+ * def equals(that: Any) = (this eq that.asInstanceOf[AnyRef]) || {
+ * (that.isInstanceOf[this.C]) && {
+ * val x$1 = that.asInstanceOf[this.C]
+ * (this.arg_1 == x$1.arg_1) && (this.arg_2 == x$1.arg_2) && ... && (x$1 canEqual this)
+ * }
+ * }
*/
- def canEqualMethod: Tree = {
- val method = makeEqualityMethod(nme.canEqual_)
- val that = method ARG 0
+ def equalsCaseClassMethod: Tree = createMethod(nme.equals_, List(AnyClass.tpe), BooleanClass.tpe) { m =>
+ if (accessors.isEmpty)
+ if (clazz.isFinal) thatTest(m)
+ else thatTest(m) AND ((thatCast(m) DOT nme.canEqual_)(mkThis))
+ else
+ (mkThis ANY_EQ Ident(m.firstParam)) OR equalsCore(m, accessors)
+ }
- typer typed (DEF(method) === (that IS_OBJ clazz.tpe))
+ /** The equality method for value classes
+ * def equals(that: Any) = (this.asInstanceOf[AnyRef]) eq that.asInstanceOf[AnyRef]) || {
+ * (that.isInstanceOf[this.C]) && {
+ * val x$1 = that.asInstanceOf[this.C]
+ * (this.underlying == that.underlying
+ */
+ def equalsDerivedValueClassMethod: Tree = createMethod(nme.equals_, List(AnyClass.tpe), BooleanClass.tpe) { m =>
+ equalsCore(m, List(clazz.derivedValueClassUnbox))
}
- /** The equality method for case classes. The argument is an Any,
- * but because of boxing it will always be an Object, so a check
- * is neither necessary nor useful before the cast.
- *
- * def equals(that: Any) =
- * (this eq that.asInstanceOf[AnyRef]) ||
- * (that match {
- * case x @ this.C(this.arg_1, ..., this.arg_n) => x canEqual this
- * case _ => false
- * })
+ /** The hashcode method for value classes
+ * def hashCode(): Int = this.underlying.hashCode
*/
- def equalsClassMethod: Tree = {
- val method = makeEqualityMethod(nme.equals_)
- val that = method ARG 0
- val constrParamTypes = clazz.primaryConstructor.tpe.paramTypes
-
- // returns (Apply, Bind)
- def makeTrees(acc: Symbol, cpt: Type): (Tree, Bind) = {
- val varName = context.unit.freshTermName(acc.name + "$")
- val isRepeated = isRepeatedParamType(cpt)
- val binding = if (isRepeated) Star(WILD()) else WILD()
- val eqMethod: Tree =
- if (isRepeated) gen.mkRuntimeCall(nme.sameElements, List(Ident(varName), Ident(acc)))
- else (Ident(varName) DOT nme.EQ)(Ident(acc))
-
- (eqMethod, Bind(varName, binding))
- }
+ def hashCodeDerivedValueClassMethod: Tree = createMethod(nme.hashCode_, Nil, IntClass.tpe) { m =>
+ Select(mkThisSelect(clazz.derivedValueClassUnbox), nme.hashCode_)
+ }
- // Creates list of parameters and a guard for each
- val (guards, params) = (clazz.caseFieldAccessors, constrParamTypes).zipped map makeTrees unzip
+ /** The _1, _2, etc. methods to implement ProductN, disabled
+ * until we figure out how to introduce ProductN without cycles.
+ */
+ def productNMethods = {
+ val accs = accessors.toIndexedSeq
+ 1 to arity map (num => productProj(arity, num) -> (() => projectionMethod(accs(num - 1), num)))
+ }
- // Verify with canEqual method before returning true.
- def canEqualCheck() = {
- val that: Tree = (method ARG 0) AS clazz.tpe
- val canEqualOther: Symbol = clazz.info nonPrivateMember nme.canEqual_
+ // methods for both classes and objects
+ def productMethods = {
+ List(
+ Product_productPrefix -> (() => constantNullary(nme.productPrefix, clazz.name.decode)),
+ Product_productArity -> (() => constantNullary(nme.productArity, arity)),
+ Product_productElement -> (() => perElementMethod(nme.productElement, accessorLub)(mkThisSelect)),
+ Product_iterator -> (() => productIteratorMethod),
+ Product_canEqual -> (() => canEqualMethod)
+ // This is disabled pending a reimplementation which doesn't add any
+ // weight to case classes (i.e. inspects the bytecode.)
+ // Product_productElementName -> (() => productElementNameMethod(accessors)),
+ )
+ }
- typer typed {
- (that DOT canEqualOther)(This(clazz))
- }
+ def hashcodeImplementation(sym: Symbol): Tree = {
+ sym.tpe.finalResultType.typeSymbol match {
+ case UnitClass | NullClass => Literal(Constant(0))
+ case BooleanClass => If(Ident(sym), Literal(Constant(1231)), Literal(Constant(1237)))
+ case IntClass | ShortClass | ByteClass | CharClass => Ident(sym)
+ case LongClass => callStaticsMethod("longHash")(Ident(sym))
+ case DoubleClass => callStaticsMethod("doubleHash")(Ident(sym))
+ case FloatClass => callStaticsMethod("floatHash")(Ident(sym))
+ case _ => callStaticsMethod("anyHash")(Ident(sym))
}
+ }
- // Pattern is classname applied to parameters, and guards are all logical and-ed
- val (guard, pat) = (AND(guards: _*), Ident(clazz.name.toTermName) APPLY params)
+ def specializedHashcode = {
+ createMethod(nme.hashCode_, Nil, IntClass.tpe) { m =>
+ val accumulator = m.newVariable(newTermName("acc"), m.pos, SYNTHETIC) setInfo IntClass.tpe
+ val valdef = ValDef(accumulator, Literal(Constant(0xcafebabe)))
+ val mixes = accessors map (acc =>
+ Assign(
+ Ident(accumulator),
+ callStaticsMethod("mix")(Ident(accumulator), hashcodeImplementation(acc))
+ )
+ )
+ val finish = callStaticsMethod("finalizeHash")(Ident(accumulator), Literal(Constant(arity)))
- localTyper typed {
- DEF(method) === {
- (This(clazz) ANY_EQ that) OR (that MATCH(
- (CASE(pat) IF guard) ==> canEqualCheck() ,
- DEFAULT ==> FALSE
- ))
- }
+ Block(valdef :: mixes, finish)
}
}
-
- def newAccessorMethod(tree: Tree): Tree = tree match {
- case DefDef(_, _, _, _, _, rhs) =>
- var newAcc = tree.symbol.cloneSymbol
- newAcc.name = context.unit.freshTermName(tree.symbol.name + "$")
- newAcc setFlag SYNTHETIC resetFlag (ACCESSOR | PARAMACCESSOR | PRIVATE | PROTECTED)
- newAcc.privateWithin = NoSymbol
- newAcc = newAcc.owner.info.decls enter newAcc
- val result = typer typed { DEF(newAcc) === rhs.duplicate }
- log("new accessor method " + result)
- result
+ def chooseHashcode = {
+ if (accessors exists (x => isPrimitiveValueType(x.tpe.finalResultType)))
+ specializedHashcode
+ else
+ forwardToRuntime(Object_hashCode)
}
- def needsReadResolve = (
- // only nested objects inside objects should get readResolve automatically
- // otherwise after de-serialization we get null references for lazy accessors (nested object -> lazy val + class def)
- // since the bitmap gets serialized but the moduleVar not
- clazz.isSerializable && (clazz.owner.isPackageClass || clazz.owner.isModuleClass)
+ def valueClassMethods = List(
+ Any_hashCode -> (() => hashCodeDerivedValueClassMethod),
+ Any_equals -> (() => equalsDerivedValueClassMethod)
)
- // A buffer collecting additional methods for the template body
- val ts = new ListBuffer[Tree]
-
- if (!phase.erasedTypes) try {
- if (clazz.isCase) {
- val isTop = clazz.ancestors forall (x => !x.isCase)
-
- if (isTop) {
- // If this case class has fields with less than public visibility, their getter at this
- // point also has those permissions. In that case we create a new, public accessor method
- // with a new name and remove the CASEACCESSOR flag from the existing getter. This complicates
- // the retrieval of the case field accessors (see def caseFieldAccessors in Symbols.)
- def needsService(s: Symbol) = s.isMethod && s.isCaseAccessor && !s.isPublic
- for (stat <- templ.body ; if stat.isDef && needsService(stat.symbol)) {
- ts += newAccessorMethod(stat)
- stat.symbol resetFlag CASEACCESSOR
- }
- }
+ def caseClassMethods = productMethods ++ /*productNMethods ++*/ Seq(
+ Object_hashCode -> (() => chooseHashcode),
+ Object_toString -> (() => forwardToRuntime(Object_toString)),
+ Object_equals -> (() => equalsCaseClassMethod)
+ )
- // methods for case classes only
- def classMethods = List(
- Object_hashCode -> (() => forwardingMethod(nme.hashCode_, "_" + nme.hashCode_)),
- Object_toString -> (() => forwardingMethod(nme.toString_, "_" + nme.toString_)),
- Object_equals -> (() => equalsClassMethod)
- )
- // methods for case objects only
- def objectMethods = List(
- Object_hashCode -> (() => moduleHashCodeMethod),
- Object_toString -> (() => moduleToStringMethod)
- )
- // methods for both classes and objects
- def everywhereMethods = {
- val accessors = clazz.caseFieldAccessors
- List(
- Product_productPrefix -> (() => productPrefixMethod),
- Product_productArity -> (() => productArityMethod(accessors.length)),
- Product_productElement -> (() => productElementMethod(accessors)),
- // This is disabled pending a reimplementation which doesn't add any
- // weight to case classes (i.e. inspects the bytecode.)
- // Product_productElementName -> (() => productElementNameMethod(accessors)),
- Product_canEqual -> (() => canEqualMethod)
- )
- }
+ def valueCaseClassMethods = productMethods ++ /*productNMethods ++*/ valueClassMethods ++ Seq(
+ Any_toString -> (() => forwardToRuntime(Object_toString))
+ )
- if (clazz.isModuleClass) {
- // if there's a synthetic method in a parent case class, override its equality
- // with eq (see #883)
- val otherEquals = clazz.info.nonPrivateMember(Object_equals.name)
- if (otherEquals.owner != clazz && createdMethodSymbols(otherEquals)) ts += equalsModuleMethod
- }
+ def caseObjectMethods = productMethods ++ Seq(
+ Object_hashCode -> (() => constantMethod(nme.hashCode_, clazz.name.decode.hashCode)),
+ Object_toString -> (() => constantMethod(nme.toString_, clazz.name.decode))
+ // Not needed, as reference equality is the default.
+ // Object_equals -> (() => createMethod(Object_equals)(m => This(clazz) ANY_EQ Ident(m.firstParam)))
+ )
- val methods = (if (clazz.isModuleClass) objectMethods else classMethods) ++ everywhereMethods
- for ((m, impl) <- methods ; if !hasOverridingImplementation(m))
- ts += impl()
- }
+ /** If you serialize a singleton and then deserialize it twice,
+ * you will have two instances of your singleton unless you implement
+ * readResolve. Here it is implemented for all objects which have
+ * no implementation and which are marked serializable (which is true
+ * for all case objects.)
+ */
+ def needsReadResolve = (
+ clazz.isModuleClass
+ && clazz.isSerializable
+ && !hasConcreteImpl(nme.readResolve)
+ )
- if (clazz.isModuleClass) {
- def hasReadResolve = {
- val sym = clazz.info member nme.readResolve // any member, including private
- sym.isTerm && !sym.isDeferred
- }
+ def synthesize(): List[Tree] = {
+ val methods = (
+ if (clazz.isCase)
+ if (clazz.isDerivedValueClass) valueCaseClassMethods
+ else if (clazz.isModuleClass) caseObjectMethods
+ else caseClassMethods
+ else if (clazz.isDerivedValueClass) valueClassMethods
+ else Nil
+ )
- /** If you serialize a singleton and then deserialize it twice,
- * you will have two instances of your singleton, unless you implement
- * the readResolve() method (see http://www.javaworld.com/javaworld/
- * jw-04-2003/jw-0425-designpatterns_p.html)
- */
- if (!hasReadResolve && needsReadResolve){
- // PP: To this day I really can't figure out what this next comment is getting at:
- // the !!! normally means there is something broken, but if so, what is it?
- //
- // !!! the synthetic method "readResolve" should be private, but then it is renamed !!!
- val method = newSyntheticMethod(nme.readResolve, PROTECTED, makeNoArgConstructor(ObjectClass.tpe))
- ts += typer typed (DEF(method) === REF(clazz.sourceModule))
+ /** Always generate overrides for equals and hashCode in value classes,
+ * so they can appear in universal traits without breaking value semantics.
+ */
+ def impls = {
+ def shouldGenerate(m: Symbol) = {
+ !hasOverridingImplementation(m) || {
+ clazz.isDerivedValueClass && (m == Any_hashCode || m == Any_equals) && {
+ if (settings.lint.value) {
+ (clazz.info nonPrivateMember m.name) filter (m => (m.owner != AnyClass) && (m.owner != clazz) && !m.isDeferred) andAlso { m =>
+ currentUnit.warning(clazz.pos, s"Implementation of ${m.name} inherited from ${m.owner} overridden in $clazz to enforce value class semantics")
+ }
+ }
+ true
+ }
+ }
}
+ for ((m, impl) <- methods ; if shouldGenerate(m)) yield impl()
}
- } catch {
- case ex: TypeError =>
- if (!reporter.hasErrors) throw ex
+ def extras = (
+ if (needsReadResolve) {
+ // Aha, I finally decoded the original comment.
+ // This method should be generated as private, but apparently if it is, then
+ // it is name mangled afterward. (Wonder why that is.) So it's only protected.
+ // For sure special methods like "readResolve" should not be mangled.
+ List(createMethod(nme.readResolve, Nil, ObjectClass.tpe)(m => { m setFlag PRIVATE ; REF(clazz.sourceModule) }))
+ }
+ else Nil
+ )
+
+ try impls ++ extras
+ catch { case _: TypeError if reporter.hasErrors => Nil }
}
- if (phase.id <= currentRun.typerPhase.id) {
- treeCopy.Template(templ, templ.parents, templ.self,
- if (ts.isEmpty) templ.body else templ.body ++ ts // avoid copying templ.body if empty
- )
+ /** If this case class has any less than public accessors,
+ * adds new accessors at the correct locations to preserve ordering.
+ * Note that this must be done before the other method synthesis
+ * because synthesized methods need refer to the new symbols.
+ * Care must also be taken to preserve the case accessor order.
+ */
+ def caseTemplateBody(): List[Tree] = {
+ val lb = ListBuffer[Tree]()
+ def isRewrite(sym: Symbol) = sym.isCaseAccessorMethod && !sym.isPublic
+
+ for (ddef @ DefDef(_, _, _, _, _, _) <- templ.body ; if isRewrite(ddef.symbol)) {
+ val original = ddef.symbol
+ val newAcc = deriveMethod(ddef.symbol, name => context.unit.freshTermName(name + "$")) { newAcc =>
+ newAcc.makePublic
+ newAcc resetFlag (ACCESSOR | PARAMACCESSOR)
+ ddef.rhs.duplicate
+ }
+ // TODO: shouldn't the next line be: `original resetFlag CASEACCESSOR`?
+ ddef.symbol resetFlag CASEACCESSOR
+ lb += logResult("case accessor new")(newAcc)
+ val renamedInClassMap = renamedCaseAccessors.getOrElseUpdate(clazz, mutable.Map() withDefault(x => x))
+ renamedInClassMap(original.name.toTermName) = newAcc.symbol.name.toTermName
+ }
+
+ (lb ++= templ.body ++= synthesize()).toList
}
- else templ
+
+ deriveTemplate(templ)(body =>
+ if (clazz.isCase) caseTemplateBody()
+ else synthesize() match {
+ case Nil => body // avoiding unnecessary copy
+ case ms => body ++ ms
+ }
+ )
}
}
diff --git a/src/compiler/scala/tools/nsc/typechecker/Tags.scala b/src/compiler/scala/tools/nsc/typechecker/Tags.scala
new file mode 100644
index 0000000..d82fbd7
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/typechecker/Tags.scala
@@ -0,0 +1,72 @@
+package scala.tools.nsc
+package typechecker
+
+trait Tags {
+ self: Analyzer =>
+
+ import global._
+ import definitions._
+
+ trait Tag {
+ self: Typer =>
+
+ private def resolveTag(pos: Position, taggedTp: Type, allowMaterialization: Boolean) = beforeTyper {
+ def wrapper (tree: => Tree): Tree = if (allowMaterialization) (context.withMacrosEnabled[Tree](tree)) else (context.withMacrosDisabled[Tree](tree))
+ wrapper(inferImplicit(
+ EmptyTree,
+ taggedTp,
+ /*reportAmbiguous =*/ true,
+ /*isView =*/ false,
+ /*context =*/ context,
+ /*saveAmbiguousDivergent =*/ true,
+ /*pos =*/ pos
+ ).tree)
+ }
+
+ /** Finds in scope or materializes a ClassTag.
+ * Should be used instead of ClassManifest every time compiler needs to persist an erasure.
+ *
+ * Once upon a time, we had an `ErasureTag` which was to `ClassTag` the same that `WeakTypeTag` is for `TypeTag`.
+ * However we found out that we don't really need this concept, so it got removed.
+ *
+ * @param pos Position for error reporting. Please, provide meaningful value.
+ * @param tp Type we're looking a ClassTag for, e.g. resolveClassTag(pos, IntClass.tpe) will look for ClassTag[Int].
+ * @param allowMaterialization If true (default) then the resolver is allowed to launch materialization macros when there's no class tag in scope.
+ * If false then materialization macros are prohibited from running.
+ *
+ * @returns Tree that represents an `scala.reflect.ClassTag` for `tp` if everything is okay.
+ * EmptyTree if the result contains unresolved (i.e. not spliced) type parameters and abstract type members.
+ * EmptyTree if `allowMaterialization` is false, and there is no class tag in scope.
+ */
+ def resolveClassTag(pos: Position, tp: Type, allowMaterialization: Boolean = true): Tree = {
+ val taggedTp = appliedType(ClassTagClass.typeConstructor, List(tp))
+ resolveTag(pos, taggedTp, allowMaterialization)
+ }
+
+ /** Finds in scope or materializes an WeakTypeTag (if `concrete` is false) or a TypeTag (if `concrete` is true).
+ *
+ * @param pos Position for error reporting. Please, provide meaningful value.
+ * @param pre Prefix that represents a universe this type tag will be bound to.
+ * If `pre` is set to `NoType`, then any type tag in scope will do, regardless of its affiliation.
+ * If `pre` is set to `NoType`, and tag resolution involves materialization, then `mkRuntimeUniverseRef` will be used.
+ * @param tp Type we're looking a TypeTag for, e.g. resolveTypeTag(pos, mkRuntimeUniverseRef, IntClass.tpe, false) will look for scala.reflect.runtime.universe.TypeTag[Int].
+ * @param concrete If true then the result must not contain unresolved (i.e. not spliced) type parameters and abstract type members.
+ * If false then the function will always succeed (abstract types will be reified as free types).
+ * @param allowMaterialization If true (default) then the resolver is allowed to launch materialization macros when there's no type tag in scope.
+ * If false then materialization macros are prohibited from running.
+ *
+ * @returns Tree that represents a `scala.reflect.TypeTag` for `tp` if everything is okay.
+ * EmptyTree if `concrete` is true and the result contains unresolved (i.e. not spliced) type parameters and abstract type members.
+ * EmptyTree if `allowMaterialization` is false, and there is no array tag in scope.
+ */
+ def resolveTypeTag(pos: Position, pre: Type, tp: Type, concrete: Boolean, allowMaterialization: Boolean = true): Tree =
+ // if someone requests a type tag, but scala-reflect.jar isn't on the library classpath, then bail
+ if (pre == NoType && ApiUniverseClass == NoSymbol) EmptyTree
+ else {
+ val tagSym = if (concrete) TypeTagClass else WeakTypeTagClass
+ val tagTp = if (pre == NoType) TypeRef(ApiUniverseClass.toTypeConstructor, tagSym, List(tp)) else singleType(pre, pre member tagSym.name)
+ val taggedTp = appliedType(tagTp, List(tp))
+ resolveTag(pos, taggedTp, allowMaterialization)
+ }
+ }
+}
\ No newline at end of file
diff --git a/src/compiler/scala/tools/nsc/typechecker/TreeCheckers.scala b/src/compiler/scala/tools/nsc/typechecker/TreeCheckers.scala
index 75483f7..88d10f1 100644
--- a/src/compiler/scala/tools/nsc/typechecker/TreeCheckers.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/TreeCheckers.scala
@@ -1,5 +1,5 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
+ * Copyright 2005-2013 LAMP/EPFL
* @author Martin Odersky
*/
@@ -8,13 +8,13 @@ package typechecker
import scala.tools.nsc.symtab.Flags._
import scala.collection.mutable
-import mutable.{ HashMap, HashSet, ListBuffer }
+import mutable.ListBuffer
import util.returning
abstract class TreeCheckers extends Analyzer {
import global._
- private def classstr(x: AnyRef) = x.getClass.getName split """\\.|\\$""" last;
+ private def classstr(x: AnyRef) = (x.getClass.getName split """\\.|\\$""").last
private def typestr(x: Type) = " (tpe = " + x + ")"
private def treestr(t: Tree) = t + " [" + classstr(t) + "]" + typestr(t.tpe)
private def ownerstr(s: Symbol) = "'" + s + "'" + s.locationString
@@ -33,12 +33,13 @@ abstract class TreeCheckers extends Analyzer {
/** This is a work in progress, don't take it too seriously.
*/
object SymbolTracker extends Traverser {
- type PhaseMap = HashMap[Symbol, List[Tree]]
- val maps: ListBuffer[(Phase, PhaseMap)] = ListBuffer()
+ type PhaseMap = mutable.HashMap[Symbol, List[Tree]]
+
+ val maps = ListBuffer[(Phase, PhaseMap)]()
def prev = maps.init.last._2
def latest = maps.last._2
- val defSyms = new HashMap[Symbol, List[DefTree]]
- val newSyms = new HashSet[Symbol]
+ val defSyms = mutable.HashMap[Symbol, List[DefTree]]()
+ val newSyms = mutable.HashSet[Symbol]()
val movedMsgs = new ListBuffer[String]
def sortedNewSyms = newSyms.toList.distinct sortBy (_.name.toString)
@@ -68,12 +69,12 @@ abstract class TreeCheckers extends Analyzer {
def reportChanges(): Unit = {
// new symbols
if (newSyms.nonEmpty) {
- val str =
- if (settings.debug.value) "New symbols: " + (sortedNewSyms mkString " ")
- else newSyms.size + " new symbols."
+ informFn(newSyms.size + " new symbols.")
+ val toPrint = if (settings.debug.value) sortedNewSyms mkString " " else ""
newSyms.clear()
- errorFn(str)
+ if (toPrint != "")
+ informFn(toPrint)
}
// moved symbols
@@ -110,23 +111,37 @@ abstract class TreeCheckers extends Analyzer {
}
}
- lazy val tpeOfTree = new HashMap[Tree, Type]
+ lazy val tpeOfTree = mutable.HashMap[Tree, Type]()
def posstr(p: Position) =
try p.source.path + ":" + p.line
catch { case _: UnsupportedOperationException => p.toString }
- def errorFn(msg: Any): Unit = println("[check: %s] %s".format(phase.prev, msg))
+ private var hasError: Boolean = false
+ def errorFn(msg: Any): Unit = {hasError = true; println("[check: %s] %s".format(phase.prev, msg))}
def errorFn(pos: Position, msg: Any): Unit = errorFn(posstr(pos) + ": " + msg)
+ def informFn(msg: Any) {
+ if (settings.verbose.value || settings.debug.value)
+ println("[check: %s] %s".format(phase.prev, msg))
+ }
def assertFn(cond: Boolean, msg: => Any) =
if (!cond) errorFn(msg)
+ private def wrap[T](msg: => Any)(body: => Unit) {
+ try body
+ catch { case x: Throwable =>
+ Console.println("Caught " + x)
+ Console.println(msg)
+ x.printStackTrace
+ }
+ }
+
def checkTrees() {
if (settings.verbose.value)
Console.println("[consistency check at the beginning of phase " + phase + "]")
- currentRun.units foreach check
+ currentRun.units foreach (x => wrap(x)(check(x)))
}
def printingTypings[T](body: => T): T = {
@@ -137,11 +152,12 @@ abstract class TreeCheckers extends Analyzer {
result
}
def runWithUnit[T](unit: CompilationUnit)(body: => Unit): Unit = {
- val unit0 = currentRun.currentUnit
+ hasError = false
+ val unit0 = currentUnit
currentRun.currentUnit = unit
body
currentRun.advanceUnit
- assertFn(currentRun.currentUnit == unit, "currentUnit is " + currentRun.currentUnit + ", but unit is " + unit)
+ assertFn(currentUnit == unit, "currentUnit is " + currentUnit + ", but unit is " + unit)
currentRun.currentUnit = unit0
}
def check(unit: CompilationUnit) {
@@ -155,13 +171,14 @@ abstract class TreeCheckers extends Analyzer {
checker.precheck.traverse(unit.body)
checker.typed(unit.body)
checker.postcheck.traverse(unit.body)
+ if (hasError) unit.warning(NoPosition, "TreeCheckers detected non-compliant trees in " + unit)
}
}
override def newTyper(context: Context): Typer = new TreeChecker(context)
class TreeChecker(context0: Context) extends Typer(context0) {
- override protected def typerAddSyntheticMethods(templ: Template, clazz: Symbol, context: Context): Template = {
+ override protected def finishMethodSynthesis(templ: Template, clazz: Symbol, context: Context): Template = {
// If we don't intercept this all the synthetics get added at every phase,
// with predictably unfortunate results.
templ
@@ -195,16 +212,19 @@ abstract class TreeCheckers extends Analyzer {
tree.tpe = null
saved
})
- super.typed(tree, mode, pt) match {
+ wrap(tree)(super.typed(tree, mode, pt) match {
case _: Literal => ()
case x if x ne tree => treesDiffer(tree, x)
case _ => ()
- }
+ })
case _ => ()
}
- object precheck extends Traverser {
+ object precheck extends TreeStackTraverser {
override def traverse(tree: Tree) {
+ checkSymbolRefsRespectScope(tree)
+ checkReturnReferencesDirectlyEnclosingDef(tree)
+
val sym = tree.symbol
def accessed = sym.accessed
def fail(msg: String) = errorFn(tree.pos, msg + classstr(tree) + " / " + tree)
@@ -228,7 +248,7 @@ abstract class TreeCheckers extends Analyzer {
}
}
case ValDef(_, _, _, _) =>
- if (sym.hasGetter && !sym.isOuterField) {
+ if (sym.hasGetter && !sym.isOuterField && !sym.isOuterAccessor) {
assertFn(sym.getter(sym.owner) != NoSymbol, ownerstr(sym) + " has getter but cannot be found. " + sym.ownerChain)
}
case Apply(fn, args) =>
@@ -258,20 +278,61 @@ abstract class TreeCheckers extends Analyzer {
tree match {
case x: PackageDef =>
- if (sym.ownerChain contains currentOwner) ()
- else fail(sym + " owner chain does not contain currentOwner " + currentOwner)
+ if ((sym.ownerChain contains currentOwner) || currentOwner.isEmptyPackageClass) ()
+ else fail(sym + " owner chain does not contain currentOwner " + currentOwner + sym.ownerChain)
case _ =>
def cond(s: Symbol) = !s.isTerm || s.isMethod || s == sym.owner
if (sym.owner != currentOwner) {
val expected = currentOwner.ownerChain find (x => cond(x)) getOrElse fail("DefTree can't find owner: ")
if (sym.owner != expected)
- fail("Expected owner %s (out of %s), found %s: ".format(expected, currentOwner.ownerChain, sym.owner))
+ fail(sm"""|
+ | currentOwner chain: ${currentOwner.ownerChain take 3 mkString " -> "}
+ | symbol chain: ${sym.ownerChain mkString " -> "}"""
+ )
}
}
}
super.traverse(tree)
}
+
+ private def checkSymbolRefsRespectScope(tree: Tree) {
+ def symbolOf(t: Tree): Symbol = Option(tree.symbol).getOrElse(NoSymbol)
+ def definedSymbolOf(t: Tree): Symbol = if (t.isDef) symbolOf(t) else NoSymbol
+ val info = Option(symbolOf(tree).info).getOrElse(NoType)
+ val referencedSymbols: List[Symbol] = {
+ val directRef = tree match {
+ case _: RefTree => symbolOf(tree).toOption
+ case _ => None
+ }
+ def referencedSyms(tp: Type) = (tp collect {
+ case TypeRef(_, sym, _) => sym
+ }).toList
+ val indirectRefs = referencedSyms(info)
+ (indirectRefs ++ directRef).distinct
+ }
+ for {
+ sym <- referencedSymbols
+ // Accessors are known to steal the type of the underlying field without cloning existential symbols at the new owner.
+ // This happens in Namer#accessorTypeCompleter. We just look the other way here.
+ if !tree.symbol.isAccessor
+ if (sym.isTypeParameter || sym.isLocal) && !(tree.symbol hasTransOwner sym.owner)
+ } errorFn(s"The symbol, tpe or info of tree `(${tree}) : ${info}` refers to a out-of-scope symbol, ${sym.fullLocationString}. tree.symbol.ownerChain: ${tree.symbol.ownerChain.mkString(", ")}")
+ }
+
+ private def checkReturnReferencesDirectlyEnclosingDef(tree: Tree) {
+ tree match {
+ case _: Return =>
+ path.collectFirst {
+ case dd: DefDef => dd
+ } match {
+ case None => errorFn(s"Return node ($tree) must be enclosed in a DefDef")
+ case Some(dd) =>
+ if (tree.symbol != dd.symbol) errorFn(s"Return symbol (${tree.symbol}} does not reference directly enclosing DefDef (${dd.symbol})")
+ }
+ case _ =>
+ }
+ }
}
object postcheck extends Traverser {
diff --git a/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala b/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala
index 81caf38..2270e81 100644
--- a/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala
@@ -1,5 +1,5 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
+ * Copyright 2005-2013 LAMP/EPFL
* @author Paul Phillips
*/
@@ -37,15 +37,7 @@ trait TypeDiagnostics {
import global._
import definitions._
- import global.typer.infer
-
- private def currentUnit = currentRun.currentUnit
-
- /** It can be quite difficult to know which of the many functions called "error"
- * is being called at any given point in the compiler. To alleviate this I am
- * renaming such functions inside this trait based on where it originated.
- */
- def inferError(pos: Position, msg: String) = infer.error(pos, msg)
+ import global.typer.{ infer, context }
/** The common situation of making sure nothing is erroneous could be
* nicer if Symbols, Types, and Trees all implemented some common interface
@@ -67,6 +59,19 @@ trait TypeDiagnostics {
* the map, the addendum should also be printed.
*/
private var addendums = perRunCaches.newMap[Position, () => String]()
+ private var isTyperInPattern = false
+
+ /** Devising new ways of communicating error info out of
+ * desperation to work on error messages. This is used
+ * by typedPattern to wrap its business so we can generate
+ * a sensible error message when things go south.
+ */
+ def typingInPattern[T](body: => T): T = {
+ val saved = isTyperInPattern
+ isTyperInPattern = true
+ try body
+ finally isTyperInPattern = saved
+ }
def setAddendum(pos: Position, msg: () => String) =
if (pos != NoPosition)
@@ -85,50 +90,30 @@ trait TypeDiagnostics {
/** Does the positioned line assigned to t1 precede that of t2?
*/
- def linePrecedes(t1: Tree, t2: Tree) = t1.pos.isDefined && t1.pos.isDefined && t1.pos.line < t2.pos.line
-
- def notAMember(sel: Tree, qual: Tree, name: Name) = {
- val owner = qual.tpe.typeSymbol
- val target = qual.tpe.widen
- def targetKindString = if (owner.isTypeParameterOrSkolem) "type parameter " else ""
- def nameString = decodeWithKind(name, owner)
- /** Illuminating some common situations and errors a bit further. */
- def addendum = {
- val companion = {
- if (name.isTermName && owner.isPackageClass) {
- target.member(name.toTypeName) match {
- case NoSymbol => ""
- case sym => "\nNote: %s exists, but it has no companion object.".format(sym)
- }
- }
- else ""
- }
- val semicolon = (
- if (linePrecedes(qual, sel))
- "\npossible cause: maybe a semicolon is missing before `"+nameString+"'?"
- else
- ""
- )
- companion + semicolon
+ def posPrecedes(p1: Position, p2: Position) = p1.isDefined && p2.isDefined && p1.line < p2.line
+ def linePrecedes(t1: Tree, t2: Tree) = posPrecedes(t1.pos, t2.pos)
+
+ private object DealiasedType extends TypeMap {
+ def apply(tp: Type): Type = tp match {
+ // Avoid "explaining" that String is really java.lang.String,
+ // while still dealiasing types from non-default namespaces.
+ case TypeRef(pre, sym, args) if sym.isAliasType && !sym.isInDefaultNamespace =>
+ mapOver(tp.dealias)
+ case _ =>
+ mapOver(tp)
}
-
- inferError(
- sel.pos,
- withAddendum(qual.pos)(
- if (name == nme.CONSTRUCTOR) target + " does not have a constructor"
- else nameString + " is not a member of " + targetKindString + target + addendum
- )
- )
}
- /** Only prints the parameter names if they're not synthetic,
- * since "x$1: Int" does not offer any more information than "Int".
- */
+ /** An explanatory note to be added to error messages
+ * when there's a problem with abstract var defs */
+ def abstractVarMessage(sym: Symbol): String =
+ if (underlyingSymbol(sym).isVariable)
+ "\n(Note that variables need to be initialized to be defined)"
+ else ""
+
private def methodTypeErrorString(tp: Type) = tp match {
case mt @ MethodType(params, resultType) =>
- def forString =
- if (params exists (_.isSynthetic)) params map (_.tpe)
- else params map (_.defString)
+ def forString = params map (_.defString)
forString.mkString("(", ",", ")") + resultType
case x => x.toString
@@ -141,74 +126,50 @@ trait TypeDiagnostics {
def alternativesString(tree: Tree) =
alternatives(tree) map (x => " " + methodTypeErrorString(x)) mkString ("", " <and>\n", "\n")
- def missingParameterTypeMsg(fun: Tree, vparam: ValDef, pt: Type) = {
- def anonMessage = (
- "\nThe argument types of an anonymous function must be fully known. (SLS 8.5)" +
- "\nExpected type was: " + pt.toLongString
- )
- val suffix =
- if (!vparam.mods.isSynthetic) ""
- else " for expanded function" + (fun match {
- case Function(_, Match(_, _)) => anonMessage
- case _ => " " + fun
- })
-
- "missing parameter type" + suffix
- }
+ /** The symbol which the given accessor represents (possibly in part).
+ * This is used for error messages, where we want to speak in terms
+ * of the actual declaration or definition, not in terms of the generated setters
+ * and getters.
+ */
+ def underlyingSymbol(member: Symbol): Symbol =
+ if (!member.hasAccessorFlag) member
+ else if (!member.isDeferred) member.accessed
+ else {
+ val getter = if (member.isSetter) member.getter(member.owner) else member
+ val flags = if (getter.setter(member.owner) != NoSymbol) DEFERRED | MUTABLE else DEFERRED
+
+ getter.owner.newValue(getter.name.toTermName, getter.pos, flags) setInfo getter.tpe.resultType
+ }
def treeSymTypeMsg(tree: Tree): String = {
val sym = tree.symbol
def hasParams = tree.tpe.paramSectionCount > 0
def preResultString = if (hasParams) ": " else " of type "
- def nullMessage = "expression of type " + tree.tpe
- def overloadedMessage = "overloaded method " + sym + " with alternatives:\n" + alternativesString(tree)
+ def patternMessage = "pattern " + tree.tpe.finalResultType + valueParamsString(tree.tpe)
+ def exprMessage = "expression of type " + tree.tpe
+ def overloadedMessage = s"overloaded method $sym with alternatives:\n" + alternativesString(tree)
def moduleMessage = "" + sym
def defaultMessage = moduleMessage + preResultString + tree.tpe
def applyMessage = defaultMessage + tree.symbol.locationString
- if (sym == null) nullMessage
+ if ((sym eq null) || (sym eq NoSymbol)) {
+ if (isTyperInPattern) patternMessage
+ else exprMessage
+ }
else if (sym.isOverloaded) overloadedMessage
else if (sym.isModule) moduleMessage
else if (sym.name == nme.apply) applyMessage
else defaultMessage
}
- def notEnoughArgumentsMsg(fun: Tree, missing: List[Symbol]): String = {
- val suffix = {
- if (missing.isEmpty) ""
- else {
- val keep = missing take 3 map (_.name)
- ".\nUnspecified value parameter%s %s".format(
- if (missing.tail.isEmpty) "" else "s",
- if (missing drop 3 nonEmpty) (keep :+ "...").mkString(", ")
- else keep.mkString("", ", ", ".")
- )
- }
- }
-
- "not enough arguments for " + treeSymTypeMsg(fun) + suffix
- }
-
- def applyErrorMsg(tree: Tree, msg: String, argtpes: List[Type], pt: Type) = {
- def asParams(xs: List[Any]) = xs.mkString("(", ", ", ")")
-
- def resType = if (pt isWildcard) "" else " with expected result type " + pt
- def allTypes = (alternatives(tree) flatMap (_.paramTypes)) ++ argtpes :+ pt
- def locals = alternatives(tree) flatMap (_.typeParams)
-
- withDisambiguation(locals, allTypes: _*) {
- treeSymTypeMsg(tree) + msg + asParams(argtpes) + resType
- }
- }
-
def disambiguate(ss: List[String]) = ss match {
case Nil => Nil
case s :: ss => s :: (ss map { case `s` => "(some other)"+s ; case x => x })
}
// todo: use also for other error messages
- def existentialContext(tp: Type) = tp.existentialSkolems match {
+ def existentialContext(tp: Type) = tp.skolemsExceptMethodTypeParams match {
case Nil => ""
case xs => " where " + (disambiguate(xs map (_.existentialToString)) mkString ", ")
}
@@ -218,6 +179,18 @@ trait TypeDiagnostics {
else if (sym.variance == -1) "contravariant"
else "invariant"
+ def explainAlias(tp: Type) = {
+ // Don't automatically normalize standard aliases; they still will be
+ // expanded if necessary to disambiguate simple identifiers.
+ if ((tp eq tp.normalize) || tp.typeSymbolDirect.isInDefaultNamespace) ""
+ else {
+ // A sanity check against expansion being identical to original.
+ val s = "" + DealiasedType(tp)
+ if (s == "" + tp) ""
+ else "\n (which expands to) " + s
+ }
+ }
+
/** Look through the base types of the found type for any which
* might have been valid subtypes if given conformant type arguments.
* Examine those for situations where the type error would have been
@@ -292,11 +265,43 @@ trait TypeDiagnostics {
"" // no elaborable variance situation found
}
+ // For found/required errors where AnyRef would have sufficed:
+ // explain in greater detail.
+ def explainAnyVsAnyRef(found: Type, req: Type): String = {
+ if (AnyRefClass.tpe <:< req) notAnyRefMessage(found) else ""
+ }
+
+ // TODO - figure out how to avoid doing any work at all
+ // when the message will never be seen. I though context.reportErrors
+ // being false would do that, but if I return "<suppressed>" under
+ // that condition, I see it.
def foundReqMsg(found: Type, req: Type): String = {
- (withDisambiguation(List(), found, req) {
- ";\n found : " + found.toLongString + existentialContext(found) +
- "\n required: " + req + existentialContext(req)
- }) + explainVariance(found, req)
+ def baseMessage = (
+ ";\n found : " + found.toLongString + existentialContext(found) + explainAlias(found) +
+ "\n required: " + req + existentialContext(req) + explainAlias(req)
+ )
+ ( withDisambiguation(Nil, found, req)(baseMessage)
+ + explainVariance(found, req)
+ + explainAnyVsAnyRef(found, req)
+ )
+ }
+
+ def typePatternAdvice(sym: Symbol, ptSym: Symbol) = {
+ val clazz = if (sym.isModuleClass) sym.companionClass else sym
+ val caseString =
+ if (clazz.isCaseClass && (clazz isSubClass ptSym))
+ ( clazz.caseFieldAccessors
+ map (_ => "_") // could use the actual param names here
+ mkString (s"`case ${clazz.name}(", ",", ")`")
+ )
+ else
+ "`case _: " + (clazz.typeParams match {
+ case Nil => "" + clazz.name
+ case xs => xs map (_ => "_") mkString (clazz.name + "[", ",", "]")
+ })+ "`"
+
+ "\nNote: if you intended to match against the class, try "+ caseString
+
}
case class TypeDiag(tp: Type, sym: Symbol) extends Ordered[TypeDiag] {
@@ -305,11 +310,13 @@ trait TypeDiagnostics {
private val savedName = sym.name
def restoreName() = sym.name = savedName
def isAltered = sym.name != savedName
- def modifyName(f: String => String) =
- sym.name = newTypeName(f(sym.name.toString))
+ def modifyName(f: String => String) = sym setName newTypeName(f(sym.name.toString))
- def scalaQualify() = {
- val intersect = Set(trueOwner, aliasOwner) intersect Set(ScalaPackageClass, PredefModuleClass)
+ /** Prepend java.lang, scala., or Predef. if this type originated
+ * in one of those.
+ */
+ def qualifyDefaultNamespaces() = {
+ val intersect = Set(trueOwner, aliasOwner) intersect UnqualifiedOwners
if (intersect.nonEmpty) preQualify()
}
@@ -319,8 +326,8 @@ trait TypeDiagnostics {
def typeQualify() = if (sym.isTypeParameterOrSkolem) postQualify()
def nameQualify() = if (trueOwner.isPackageClass) preQualify() else postQualify()
- def trueOwner = tp.typeSymbol.owner.skipPackageObject
- def aliasOwner = tp.typeSymbolDirect.owner.skipPackageObject
+ def trueOwner = tp.typeSymbol.effectiveOwner
+ def aliasOwner = tp.typeSymbolDirect.effectiveOwner
def sym_==(other: TypeDiag) = tp.typeSymbol == other.tp.typeSymbol
def owner_==(other: TypeDiag) = trueOwner == other.trueOwner
@@ -344,16 +351,40 @@ trait TypeDiagnostics {
)
}
}
- private def typeDiags(locals: List[Symbol], types: Type*): List[TypeDiag] = {
- object SymExtractor {
- def unapply(x: Any) = x match {
- case t @ ConstantType(_) => Some(t -> t.underlying.typeSymbol)
- case t @ TypeRef(_, sym, _) => if (locals contains sym) None else Some(t -> sym)
- case _ => None
+ /** This is tricky stuff - we need to traverse types deeply to
+ * explain name ambiguities, which may occur anywhere. However
+ * when lub explosions come through it knocks us into an n^2
+ * disaster, see SI-5580. This is trying to perform the initial
+ * filtering of possibly ambiguous types in a sufficiently
+ * aggressive way that the state space won't explode.
+ */
+ private def typeDiags(locals: List[Symbol], types0: Type*): List[TypeDiag] = {
+ val types = types0.toList
+ // If two different type diag instances are seen for a given
+ // key (either the string representation of a type, or the simple
+ // name of a symbol) then keep them for disambiguation.
+ val strings = mutable.Map[String, Set[TypeDiag]]() withDefaultValue Set()
+ val names = mutable.Map[Name, Set[TypeDiag]]() withDefaultValue Set()
+
+ val localsSet = locals.toSet
+
+ def record(t: Type, sym: Symbol) = {
+ if (!localsSet(sym)) {
+ val diag = TypeDiag(t, sym)
+ strings("" + t) += diag
+ names(sym.name) += diag
+ }
+ }
+ for (tpe <- types ; t <- tpe) {
+ t match {
+ case ConstantType(_) => record(t, t.underlying.typeSymbol)
+ case TypeRef(_, sym, _) => record(t, sym)
+ case _ => ()
}
}
- for (tp <- types.toList; SymExtractor(t, sym) <- tp) yield TypeDiag(t, sym)
+ val collisions = strings.values ++ names.values filter (_.size > 1)
+ collisions.flatten.toList
}
/** The distinct pairs from an ordered list. */
@@ -384,7 +415,7 @@ trait TypeDiagnostics {
// scala package or predef, qualify with scala so it is not confusing why
// e.g. java.util.Iterator and Iterator are different types.
if (td1 name_== td2)
- tds foreach (_.scalaQualify())
+ tds foreach (_.qualifyDefaultNamespaces())
// If they still print identically:
// a) If they are type parameters with different owners, append (in <owner>)
@@ -404,30 +435,39 @@ trait TypeDiagnostics {
trait TyperDiagnostics {
self: Typer =>
- private def contextError(pos: Position, msg: String) = context.error(pos, msg)
- private def contextError(pos: Position, err: Throwable) = context.error(pos, err)
+ private def contextError(context0: Analyzer#Context, pos: Position, msg: String) = context0.error(pos, msg)
+ private def contextError(context0: Analyzer#Context, pos: Position, err: Throwable) = context0.error(pos, err)
+ private def contextWarning(pos: Position, msg: String) = context.unit.warning(pos, msg)
+
+ def permanentlyHiddenWarning(pos: Position, hidden: Name, defn: Symbol) =
+ contextWarning(pos, "imported `%s' is permanently hidden by definition of %s".format(hidden, defn.fullLocationString))
object checkDead {
- private var expr: Symbol = NoSymbol
- private def exprOK = expr != Object_synchronized
- private def treeOK(tree: Tree) = tree.tpe != null && tree.tpe.typeSymbol == NothingClass
+ private val exprStack: mutable.Stack[Symbol] = mutable.Stack(NoSymbol)
+ // The method being applied to `tree` when `apply` is called.
+ private def expr = exprStack.top
+
+ private def exprOK =
+ (expr != Object_synchronized) &&
+ !(expr.isLabel && treeInfo.isSynthCaseSymbol(expr)) // it's okay to jump to matchEnd (or another case) with an argument of type nothing
- def updateExpr(fn: Tree) = {
- if (fn.symbol != null && fn.symbol.isMethod && !fn.symbol.isConstructor)
- checkDead.expr = fn.symbol
+ private def treeOK(tree: Tree) = {
+ val isLabelDef = tree match { case _: LabelDef => true; case _ => false}
+ tree.tpe != null && tree.tpe.typeSymbol == NothingClass && !isLabelDef
+ }
+
+ @inline def updateExpr[A](fn: Tree)(f: => A) = {
+ if (fn.symbol != null && fn.symbol.isMethod && !fn.symbol.isConstructor) {
+ exprStack push fn.symbol
+ try f finally exprStack.pop()
+ } else f
}
def apply(tree: Tree): Tree = {
// Error suppression will squash some of these warnings unless we circumvent it.
// It is presumed if you are using a -Y option you would really like to hear
// the warnings you've requested.
- if (settings.warnDeadCode.value && context.unit != null && treeOK(tree) && exprOK) {
- val saved = context.reportGeneralErrors
- try {
- context.reportGeneralErrors = true
- context.warning(tree.pos, "dead code following this construct")
- }
- finally context.reportGeneralErrors = saved
- }
+ if (settings.warnDeadCode.value && context.unit.exists && treeOK(tree) && exprOK)
+ context.warning(tree.pos, "dead code following this construct", true)
tree
}
@@ -439,8 +479,8 @@ trait TypeDiagnostics {
}
}
- def symWasOverloaded(sym: Symbol) = sym.owner.isClass && sym.owner.info.member(sym.name).isOverloaded
- def cyclicAdjective(sym: Symbol) = if (symWasOverloaded(sym)) "overloaded" else "recursive"
+ private def symWasOverloaded(sym: Symbol) = sym.owner.isClass && sym.owner.info.member(sym.name).isOverloaded
+ private def cyclicAdjective(sym: Symbol) = if (symWasOverloaded(sym)) "overloaded" else "recursive"
/** Returns Some(msg) if the given tree is untyped apparently due
* to a cyclic reference, and None otherwise.
@@ -448,6 +488,11 @@ trait TypeDiagnostics {
def cyclicReferenceMessage(sym: Symbol, tree: Tree) = condOpt(tree) {
case ValDef(_, _, tpt, _) if tpt.tpe == null => "recursive "+sym+" needs type"
case DefDef(_, _, _, _, tpt, _) if tpt.tpe == null => List(cyclicAdjective(sym), sym, "needs result type") mkString " "
+ case Import(expr, selectors) =>
+ ( "encountered unrecoverable cycle resolving import." +
+ "\nNote: this is often due in part to a class depending on a definition nested within its companion." +
+ "\nIf applicable, you may wish to try moving some members into another object."
+ )
}
/** Report a type error.
@@ -455,19 +500,31 @@ trait TypeDiagnostics {
* @param pos0 The position where to report the error
* @param ex The exception that caused the error
*/
- def reportTypeError(pos: Position, ex: TypeError) {
+ def reportTypeError(context0: Context, pos: Position, ex: TypeError) {
if (ex.pos == NoPosition) ex.pos = pos
- if (!context.reportGeneralErrors) throw ex
+ // TODO: should be replaced by throwErrors
+ // but it seems that throwErrors excludes some of the errors that should actually be
+ // buffered, causing TypeErrors to fly around again. This needs some more investigation.
+ if (!context0.reportErrors) throw ex
if (settings.debug.value) ex.printStackTrace()
ex match {
case CyclicReference(sym, info: TypeCompleter) =>
- contextError(ex.pos, cyclicReferenceMessage(sym, info.tree) getOrElse ex.getMessage())
-
- if (sym == ObjectClass)
- throw new FatalError("cannot redefine root "+sym)
+ if (context0.owner.isTermMacro) {
+ // see comments to TypeSigError for an explanation of this special case
+ throw ex
+ } else {
+ val pos = info.tree match {
+ case Import(expr, _) => expr.pos
+ case _ => ex.pos
+ }
+ contextError(context0, pos, cyclicReferenceMessage(sym, info.tree) getOrElse ex.getMessage())
+
+ if (sym == ObjectClass)
+ throw new FatalError("cannot redefine root "+sym)
+ }
case _ =>
- contextError(ex.pos, ex)
+ contextError(context0, ex.pos, ex)
}
}
}
diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala
index b1b5601..e09a509 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala
@@ -1,5 +1,5 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
+ * Copyright 2005-2013 LAMP/EPFL
* @author Martin Odersky
*/
@@ -12,13 +12,10 @@
package scala.tools.nsc
package typechecker
-import scala.collection.{ mutable, immutable }
-import scala.tools.nsc.util.BatchSourceFile
+import scala.collection.mutable
+import scala.reflect.internal.util.{ BatchSourceFile, Statistics }
import mutable.ListBuffer
import symtab.Flags._
-import util.Statistics
-import util.Statistics._
-import scala.tools.util.StringOps.{ countAsString, countElementsAsString }
// Suggestion check whether we can do without priming scopes with symbols of outer scopes,
// like the IDE does.
@@ -27,11 +24,12 @@ import scala.tools.util.StringOps.{ countAsString, countElementsAsString }
* @author Martin Odersky
* @version 1.0
*/
-trait Typers extends Modes {
+trait Typers extends Modes with Adaptations with Tags {
self: Analyzer =>
import global._
import definitions._
+ import TypersStats._
final def forArgMode(fun: Tree, mode: Int) =
if (treeInfo.isSelfOrSuperConstrCall(fun)) mode | SCCmode
@@ -47,10 +45,9 @@ trait Typers extends Modes {
def resetTyper() {
//println("resetTyper called")
resetContexts()
- resetNamer()
resetImplicits()
transformed.clear()
- resetSynthetics()
+ clearDocComments()
}
object UnTyper extends Traverser {
@@ -60,7 +57,7 @@ trait Typers extends Modes {
super.traverse(tree)
}
}
-/* needed for experimental version where eraly types can be type arguments
+/* needed for experimental version where early types can be type arguments
class EarlyMap(clazz: Symbol) extends TypeMap {
def apply(tp: Type): Type = tp match {
case TypeRef(NoPrefix, sym, List()) if (sym hasFlag PRESUPER) =>
@@ -71,6 +68,10 @@ trait Typers extends Modes {
}
*/
+ sealed abstract class SilentResult[+T]
+ case class SilentTypeError(err: AbsTypeError) extends SilentResult[Nothing] { }
+ case class SilentResultValue[+T](value: T) extends SilentResult[T] { }
+
def newTyper(context: Context): Typer = new NormalTyper(context)
private class NormalTyper(context : Context) extends Typer(context)
@@ -78,16 +79,32 @@ trait Typers extends Modes {
// that are turned private by typedBlock
private final val SYNTHETIC_PRIVATE = TRANS_FLAG
- abstract class Typer(context0: Context) extends TyperDiagnostics {
+ private def isPastTyper = phase.id > currentRun.typerPhase.id
+
+ // To enable decent error messages when the typer crashes.
+ // TODO - this only catches trees which go through def typed,
+ // but there are all kinds of back ways - typedClassDef, etc. etc.
+ // Funnel everything through one doorway.
+ var lastTreeToTyper: Tree = EmptyTree
+
+ // when true:
+ // - we may virtualize matches (if -Xexperimental and there's a suitable __match in scope)
+ // - we synthesize PartialFunction implementations for `x => x match {...}` and `match {...}` when the expected type is PartialFunction
+ // this is disabled by: -Xoldpatmat or interactive compilation (we run it for scaladoc due to SI-5933)
+ private def newPatternMatching = opt.virtPatmat && !forInteractive //&& !forScaladoc && (phase.id < currentRun.uncurryPhase.id)
+
+ abstract class Typer(context0: Context) extends TyperDiagnostics with Adaptation with Tag with TyperContextErrors {
import context0.unit
import typeDebug.{ ptTree, ptBlock, ptLine }
+ import TyperErrorGen._
val infer = new Inferencer(context0) {
override def isCoercible(tp: Type, pt: Type): Boolean = undoLog undo { // #3281
tp.isError || pt.isError ||
context0.implicitsEnabled && // this condition prevents chains of views
inferView(EmptyTree, tp, pt, false) != EmptyTree
- }}
+ }
+ }
/** Find implicit arguments and pass them to given tree.
*/
@@ -95,20 +112,14 @@ trait Typers extends Modes {
case MethodType(params, _) =>
val argResultsBuff = new ListBuffer[SearchResult]()
val argBuff = new ListBuffer[Tree]()
+ // paramFailed cannot be initialized with params.exists(_.tpe.isError) because that would
+ // hide some valid errors for params preceding the erroneous one.
+ var paramFailed = false
def mkPositionalArg(argTree: Tree, paramName: Name) = argTree
def mkNamedArg(argTree: Tree, paramName: Name) = atPos(argTree.pos)(new AssignOrNamedArg(Ident(paramName), (argTree)))
var mkArg: (Tree, Name) => Tree = mkPositionalArg
- def errorMessage(paramName: Name, paramTp: Type) =
- paramTp.typeSymbol match {
- case ImplicitNotFoundMsg(msg) => msg.format(paramName, paramTp)
- case _ =>
- "could not find implicit value for "+
- (if (paramName startsWith nme.EVIDENCE_PARAM_PREFIX) "evidence parameter of type "
- else "parameter "+paramName+": ")+paramTp
- }
-
// DEPMETTODO: instantiate type vars that depend on earlier implicit args (see adapt (4.1))
//
// apply the substitutions (undet type param -> type) that were determined
@@ -118,15 +129,32 @@ trait Typers extends Modes {
for(ar <- argResultsBuff)
paramTp = paramTp.subst(ar.subst.from, ar.subst.to)
- val res = inferImplicit(fun, paramTp, true, false, context)
+ val res = if (paramFailed || (paramTp.isError && {paramFailed = true; true})) SearchFailure else inferImplicit(fun, paramTp, context.reportErrors, false, context)
argResultsBuff += res
- if (res != SearchFailure) {
+ if (res.isSuccess) {
argBuff += mkArg(res.tree, param.name)
} else {
mkArg = mkNamedArg // don't pass the default argument (if any) here, but start emitting named arguments for the following args
- if (!param.hasDefault)
- context.error(fun.pos, errorMessage(param.name, param.tpe))
+ if (!param.hasDefault && !paramFailed) {
+ context.errBuffer.find(_.kind == ErrorKinds.Divergent) match {
+ case Some(divergentImplicit) if !settings.Xdivergence211.value =>
+ // DivergentImplicit error has higher priority than "no implicit found"
+ // no need to issue the problem again if we are still in silent mode
+ if (context.reportErrors) {
+ context.issue(divergentImplicit)
+ context.condBufferFlush(_.kind == ErrorKinds.Divergent)
+ }
+ case Some(divergentImplicit: DivergentImplicitTypeError) if settings.Xdivergence211.value =>
+ if (context.reportErrors) {
+ context.issue(divergentImplicit.withPt(paramTp))
+ context.condBufferFlush(_.kind == ErrorKinds.Divergent)
+ }
+ case None =>
+ NoImplicitFoundError(fun, param)
+ }
+ paramFailed = true
+ }
/* else {
TODO: alternative (to expose implicit search failure more) -->
resolve argument, do type inference, keep emitting positional args, infer type params based on default value for arg
@@ -148,6 +176,9 @@ trait Typers extends Modes {
fun
}
+ def inferView(tree: Tree, from: Type, to: Type, reportAmbiguous: Boolean): Tree =
+ inferView(tree, from, to, reportAmbiguous, true)
+
/** Infer an implicit conversion (``view'') between two types.
* @param tree The tree which needs to be converted.
* @param from The source type of the conversion
@@ -155,23 +186,27 @@ trait Typers extends Modes {
* @param reportAmbiguous Should ambiguous implicit errors be reported?
* False iff we search for a view to find out
* whether one type is coercible to another.
+ * @param saveErrors Should ambiguous and divergent implicit errors that were buffered
+ * during the inference of a view be put into the original buffer.
+ * False iff we don't care about them.
*/
- def inferView(tree: Tree, from: Type, to: Type, reportAmbiguous: Boolean): Tree = {
- if (settings.debug.value) log("infer view from "+from+" to "+to)//debug
- if (phase.id > currentRun.typerPhase.id) EmptyTree
+ def inferView(tree: Tree, from: Type, to: Type, reportAmbiguous: Boolean, saveErrors: Boolean): Tree = {
+ debuglog("infer view from "+from+" to "+to)//debug
+ if (isPastTyper) EmptyTree
else from match {
case MethodType(_, _) => EmptyTree
case OverloadedType(_, _) => EmptyTree
case PolyType(_, _) => EmptyTree
case _ =>
def wrapImplicit(from: Type): Tree = {
- val result = inferImplicit(tree, functionType(List(from), to), reportAmbiguous, true, context)
- if (result.subst != EmptyTreeTypeSubstituter) result.subst traverse tree
+ val result = inferImplicit(tree, functionType(from :: Nil, to), reportAmbiguous, true, context, saveErrors)
+ if (result.subst != EmptyTreeTypeSubstituter) {
+ result.subst traverse tree
+ notifyUndetparamsInferred(result.subst.from, result.subst.to)
+ }
result.tree
}
- val result = wrapImplicit(from)
- if (result != EmptyTree) result
- else wrapImplicit(appliedType(ByNameParamClass.typeConstructor, List(from)))
+ wrapImplicit(from) orElse wrapImplicit(byNameType(from))
}
}
@@ -187,53 +222,76 @@ trait Typers extends Modes {
var context = context0
def context1 = context
+ def dropExistential(tp: Type): Type = tp match {
+ case ExistentialType(tparams, tpe) =>
+ new SubstWildcardMap(tparams).apply(tp)
+ case TypeRef(_, sym, _) if sym.isAliasType =>
+ val tp0 = tp.dealias
+ if (tp eq tp0) {
+ debugwarn(s"dropExistential did not progress dealiasing $tp, see SI-7126")
+ tp
+ } else {
+ val tp1 = dropExistential(tp0)
+ if (tp1 eq tp0) tp else tp1
+ }
+ case _ => tp
+ }
+
/** Check that <code>tree</code> is a stable expression.
*
* @param tree ...
* @return ...
*/
- def checkStable(tree: Tree): Tree =
- if (treeInfo.isPureExpr(tree)) tree
- else errorTree(
- tree,
- "stable identifier required, but "+tree+" found."+
- (if (isStableExceptVolatile(tree)) {
- val tpe = tree.symbol.tpe match {
- case PolyType(_, rtpe) => rtpe
- case t => t
- }
- "\n Note that "+tree.symbol+" is not stable because its type, "+tree.tpe+", is volatile."
- } else ""))
+ def checkStable(tree: Tree): Tree = (
+ if (treeInfo.isExprSafeToInline(tree)) tree
+ else if (tree.isErrorTyped) tree
+ else UnstableTreeError(tree)
+ )
/** Would tree be a stable (i.e. a pure expression) if the type
* of its symbol was not volatile?
*/
- private def isStableExceptVolatile(tree: Tree) = {
+ protected def isStableExceptVolatile(tree: Tree) = {
tree.hasSymbol && tree.symbol != NoSymbol && tree.tpe.isVolatile &&
{ val savedTpe = tree.symbol.info
val savedSTABLE = tree.symbol getFlag STABLE
tree.symbol setInfo AnyRefClass.tpe
tree.symbol setFlag STABLE
- val result = treeInfo.isPureExpr(tree)
+ val result = treeInfo.isExprSafeToInline(tree)
tree.symbol setInfo savedTpe
tree.symbol setFlag savedSTABLE
result
}
}
+ private def errorNotClass(tpt: Tree, found: Type) = { ClassTypeRequiredError(tpt, found); false }
+ private def errorNotStable(tpt: Tree, found: Type) = { TypeNotAStablePrefixError(tpt, found); false }
+
+ /** Check that `tpt` refers to a non-refinement class type */
+ def checkClassType(tpt: Tree): Boolean = {
+ val tpe = unwrapToClass(tpt.tpe)
+ isNonRefinementClassType(tpe) || errorNotClass(tpt, tpe)
+ }
- /** Check that `tpt' refers to a non-refinement class type */
- def checkClassType(tpt: Tree, existentialOK: Boolean, stablePrefix: Boolean) {
- def check(tpe: Type): Unit = tpe.normalize match {
- case TypeRef(pre, sym, _) if sym.isClass && !sym.isRefinementClass =>
- if (stablePrefix && phase.id <= currentRun.typerPhase.id && !pre.isStable)
- error(tpt.pos, "type "+pre+" is not a stable prefix")
- case ErrorType => ;
- case PolyType(_, restpe) => check(restpe)
- case ExistentialType(_, restpe) if existentialOK => check(restpe)
- case AnnotatedType(_, underlying, _) => check(underlying)
- case t => error(tpt.pos, "class type required but "+t+" found")
- }
- check(tpt.tpe)
+ /** Check that `tpt` refers to a class type with a stable prefix. */
+ def checkStablePrefixClassType(tpt: Tree): Boolean = {
+ val tpe = unwrapToStableClass(tpt.tpe)
+ def prefixIsStable = {
+ def checkPre = tpe match {
+ case TypeRef(pre, _, _) => pre.isStable || errorNotStable(tpt, pre)
+ case _ => false
+ }
+ // A type projection like X#Y can get by the stable check if the
+ // prefix is singleton-bounded, so peek at the tree too.
+ def checkTree = tpt match {
+ case SelectFromTypeTree(qual, _) => isSingleType(qual.tpe) || errorNotClass(tpt, tpe)
+ case _ => true
+ }
+ checkPre && checkTree
+ }
+
+ ( (isNonRefinementClassType(tpe) || errorNotClass(tpt, tpe))
+ && (isPastTyper || prefixIsStable)
+ )
}
/** Check that type <code>tp</code> is not a subtype of itself.
@@ -243,17 +301,17 @@ trait Typers extends Modes {
* @return <code>true</code> if <code>tp</code> is not a subtype of itself.
*/
def checkNonCyclic(pos: Position, tp: Type): Boolean = {
- def checkNotLocked(sym: Symbol): Boolean = {
+ def checkNotLocked(sym: Symbol) = {
sym.initialize
- sym.lockOK || {error(pos, "cyclic aliasing or subtyping involving "+sym); false}
+ sym.lockOK || { CyclicAliasingOrSubtypingError(pos, sym); false }
}
tp match {
case TypeRef(pre, sym, args) =>
- (checkNotLocked(sym)) && (
- !sym.isNonClassType ||
- checkNonCyclic(pos, appliedType(pre.memberInfo(sym), args), sym) // @M! info for a type ref to a type parameter now returns a polytype
- // @M was: checkNonCyclic(pos, pre.memberInfo(sym).subst(sym.typeParams, args), sym)
- )
+ checkNotLocked(sym) &&
+ ((!sym.isNonClassType) || checkNonCyclic(pos, appliedType(pre.memberInfo(sym), args), sym))
+ // @M! info for a type ref to a type parameter now returns a polytype
+ // @M was: checkNonCyclic(pos, pre.memberInfo(sym).subst(sym.typeParams, args), sym)
+
case SingleType(pre, sym) =>
checkNotLocked(sym)
/*
@@ -272,10 +330,8 @@ trait Typers extends Modes {
}
def checkNonCyclic(pos: Position, tp: Type, lockedSym: Symbol): Boolean = try {
- lockedSym.lock {
- throw new TypeError("illegal cyclic reference involving " + lockedSym)
- }
- checkNonCyclic(pos, tp)
+ if (!lockedSym.lock(CyclicReferenceError(pos, lockedSym))) false
+ else checkNonCyclic(pos, tp)
} finally {
lockedSym.unlock()
}
@@ -291,26 +347,24 @@ trait Typers extends Modes {
}
}
- def checkParamsConvertible(pos: Position, tpe: Type) {
- tpe match {
- case MethodType(formals, restpe) =>
- /*
- if (formals.exists(_.typeSymbol == ByNameParamClass) && formals.length != 1)
- error(pos, "methods with `=>'-parameter can be converted to function values only if they take no other parameters")
- if (formals exists (isRepeatedParamType(_)))
- error(pos, "methods with `*'-parameters cannot be converted to function values");
- */
- if (restpe.isDependent)
- error(pos, "method with dependent type "+tpe+" cannot be converted to function value")
- checkParamsConvertible(pos, restpe)
- case _ =>
- }
+ def checkParamsConvertible(tree: Tree, tpe0: Type) {
+ def checkParamsConvertible0(tpe: Type) =
+ tpe match {
+ case MethodType(formals, restpe) =>
+ /*
+ if (formals.exists(_.typeSymbol == ByNameParamClass) && formals.length != 1)
+ error(pos, "methods with `=>`-parameter can be converted to function values only if they take no other parameters")
+ if (formals exists (isRepeatedParamType(_)))
+ error(pos, "methods with `*`-parameters cannot be converted to function values");
+ */
+ if (tpe.isDependentMethodType)
+ DependentMethodTpeConversionToFunctionError(tree, tpe)
+ checkParamsConvertible(tree, restpe)
+ case _ =>
+ }
+ checkParamsConvertible0(tpe0)
}
- def checkStarPatOK(pos: Position, mode: Int) =
- if ((mode & STARmode) == 0 && phase.id <= currentRun.typerPhase.id)
- error(pos, "star patterns must correspond with varargs parameters")
-
/** Check that type of given tree does not contain local or private
* components.
*/
@@ -341,24 +395,21 @@ trait Typers extends Modes {
def locals[T <: Tree](scope: Scope, pt: Type, tree: T): T =
check(NoSymbol, scope, pt, tree)
- def check[T <: Tree](owner: Symbol, scope: Scope, pt: Type, tree: T): T = {
+ private def check[T <: Tree](owner: Symbol, scope: Scope, pt: Type, tree: T): T = {
this.owner = owner
this.scope = scope
hiddenSymbols = List()
val tp1 = apply(tree.tpe)
if (hiddenSymbols.isEmpty) tree setType tp1
- else if (hiddenSymbols exists (_.isErroneous)) setError(tree)
- else if (isFullyDefined(pt)) tree setType pt //todo: eliminate
- else if (tp1.typeSymbol.isAnonymousClass) // todo: eliminate
+ else if (hiddenSymbols exists (_.isErroneous)) HiddenSymbolWithError(tree)
+ else if (isFullyDefined(pt)) tree setType pt
+ else if (tp1.typeSymbol.isAnonymousClass)
check(owner, scope, pt, tree setType tp1.typeSymbol.classBound)
else if (owner == NoSymbol)
tree setType packSymbols(hiddenSymbols.reverse, tp1)
else if (!phase.erasedTypes) { // privates
val badSymbol = hiddenSymbols.head
- error(tree.pos,
- (if (badSymbol.isPrivate) "private " else "") + badSymbol +
- " escapes its defining scope as part of type "+tree.tpe)
- setError(tree)
+ SymbolEscapesScopeError(tree, badSymbol)
} else tree
}
@@ -395,7 +446,7 @@ trait Typers extends Modes {
if (!hiddenSymbols.isEmpty && hiddenSymbols.head == sym &&
sym.isAliasType && sameLength(sym.typeParams, args)) {
hiddenSymbols = hiddenSymbols.tail
- t.normalize
+ t.dealias
} else t
case SingleType(_, sym) =>
checkNoEscape(sym)
@@ -409,39 +460,69 @@ trait Typers extends Modes {
def reenterValueParams(vparamss: List[List[ValDef]]) {
for (vparams <- vparamss)
for (vparam <- vparams)
- vparam.symbol = context.scope enter vparam.symbol
+ context.scope enter vparam.symbol
}
def reenterTypeParams(tparams: List[TypeDef]): List[Symbol] =
for (tparam <- tparams) yield {
- tparam.symbol = context.scope enter tparam.symbol
+ context.scope enter tparam.symbol
tparam.symbol.deSkolemize
}
/** The qualifying class
* of a this or super with prefix <code>qual</code>.
+ * packageOk is equal false when qualifying class symbol
*/
- def qualifyingClass(tree: Tree, qual: Name, packageOK: Boolean): Symbol =
+ def qualifyingClass(tree: Tree, qual: Name, packageOK: Boolean) =
context.enclClass.owner.ownerChain.find(o => qual.isEmpty || o.isClass && o.name == qual) match {
- case Some(c) if packageOK || !c.isPackageClass =>
- c
- case _ =>
- error(
- tree.pos,
- if (qual.isEmpty) tree+" can be used only in a class, object, or template"
- else qual+" is not an enclosing class")
- NoSymbol
+ case Some(c) if packageOK || !c.isPackageClass => c
+ case _ => QualifyingClassError(tree, qual) ; NoSymbol
}
/** The typer for an expression, depending on where we are. If we are before a superclass
* call, this is a typer over a constructor context; otherwise it is the current typer.
*/
- def constrTyperIf(inConstr: Boolean): Typer =
+ final def constrTyperIf(inConstr: Boolean): Typer =
if (inConstr) {
- assert(context.undetparams.isEmpty)
+ assert(context.undetparams.isEmpty, context.undetparams)
newTyper(context.makeConstructorContext)
} else this
+ @inline
+ final def withCondConstrTyper[T](inConstr: Boolean)(f: Typer => T): T =
+ if (inConstr) {
+ assert(context.undetparams.isEmpty, context.undetparams)
+ val c = context.makeConstructorContext
+ typerWithLocalContext(c)(f)
+ } else {
+ f(this)
+ }
+
+ @inline
+ final def typerWithCondLocalContext[T](c: => Context)(cond: Boolean)(f: Typer => T): T =
+ if (cond) typerWithLocalContext(c)(f) else f(this)
+
+ @inline
+ final def typerWithLocalContext[T](c: Context)(f: Typer => T): T = {
+ val res = f(newTyper(c))
+ if (c.hasErrors)
+ context.updateBuffer(c.flushAndReturnBuffer())
+ res
+ }
+
+ @inline
+ final def typerReportAnyContextErrors[T](c: Context)(f: Typer => T): T = {
+ f(newTyper(c))
+ }
+
+ @inline
+ final def withSavedContext[T](c: Context)(f: => T) = {
+ val savedErrors = c.flushAndReturnBuffer()
+ val res = f
+ c.updateBuffer(savedErrors)
+ res
+ }
+
/** The typer for a label definition. If this is part of a template we
* first have to enter the label definition.
*/
@@ -493,19 +574,19 @@ trait Typers extends Modes {
private def makeAccessible(tree: Tree, sym: Symbol, pre: Type, site: Tree): (Tree, Type) =
if (isInPackageObject(sym, pre.typeSymbol)) {
if (pre.typeSymbol == ScalaPackageClass && sym.isTerm) {
- // short cut some aliases. It seems that without that pattern matching
- // fails to notice exhaustiveness and to generate good code when
+ // short cut some aliases. It seems pattern matching needs this
+ // to notice exhaustiveness and to generate good code when
// List extractors are mixed with :: patterns. See Test5 in lists.scala.
def dealias(sym: Symbol) =
- ({ val t = gen.mkAttributedRef(sym) ; t.setPos(tree.pos) ; t }, sym.owner.thisType)
+ (atPos(tree.pos.makeTransparent) {gen.mkAttributedRef(sym)} setPos tree.pos, sym.owner.thisType)
sym.name match {
case nme.List => return dealias(ListModule)
- case nme.Seq => return dealias(SeqModule)
- case nme.Nil => return dealias(NilModule)
+ case nme.Seq => return dealias(SeqModule)
+ case nme.Nil => return dealias(NilModule)
case _ =>
}
}
- val qual = typedQualifier { atPos(tree.pos.focusStart) {
+ val qual = typedQualifier { atPos(tree.pos.makeTransparent) {
tree match {
case Ident(_) => Ident(nme.PACKAGEkw)
case Select(qual, _) => Select(qual, nme.PACKAGEkw)
@@ -526,53 +607,69 @@ trait Typers extends Modes {
/** Is `sym` defined in package object of package `pkg`?
*/
- private def isInPackageObject(sym: Symbol, pkg: Symbol) =
- pkg.isPackageClass && {
- sym.alternatives forall { sym =>
- !sym.owner.isPackage && {
- sym.owner.isPackageObjectClass &&
+ private def isInPackageObject(sym: Symbol, pkg: Symbol) = {
+ def isInPkgObj(sym: Symbol) =
+ !sym.owner.isPackage && {
+ sym.owner.isPackageObjectClass &&
sym.owner.owner == pkg ||
pkg.isInitialized && {
// need to be careful here to not get a cyclic reference during bootstrap
val pkgobj = pkg.info.member(nme.PACKAGEkw)
pkgobj.isInitialized &&
- (pkgobj.info.member(sym.name).alternatives contains sym)
+ (pkgobj.info.member(sym.name).alternatives contains sym)
}
- }
}
+ pkg.isPackageClass && {
+ if (sym.isOverloaded) sym.alternatives forall isInPkgObj
+ else isInPkgObj(sym)
}
+ }
/** Post-process an identifier or selection node, performing the following:
* 1. Check that non-function pattern expressions are stable
* 2. Check that packages and static modules are not used as values
* 3. Turn tree type into stable type if possible and required by context.
+ * 4. Give getClass calls a more precise type based on the type of the target of the call.
*/
private def stabilize(tree: Tree, pre: Type, mode: Int, pt: Type): Tree = {
if (tree.symbol.isOverloaded && !inFunMode(mode))
inferExprAlternative(tree, pt)
+
val sym = tree.symbol
+ def fail() = NotAValueError(tree, sym)
- if (tree.tpe.isError) tree
+ if (tree.isErrorTyped) tree
else if ((mode & (PATTERNmode | FUNmode)) == PATTERNmode && tree.isTerm) { // (1)
- if (sym.isValue) checkStable(tree)
- else errorTree(tree, sym+" is not a value")
+ if (sym.isValue) {
+ val tree1 = checkStable(tree)
+ // A module reference in a pattern has type Foo.type, not "object Foo"
+ if (sym.isModule && !sym.isMethod) tree1 setType singleType(pre, sym)
+ else tree1
+ }
+ else fail()
} else if ((mode & (EXPRmode | QUALmode)) == EXPRmode && !sym.isValue && !phase.erasedTypes) { // (2)
- errorTree(tree, sym+" is not a value")
+ fail()
} else {
if (sym.isStable && pre.isStable && !isByNameParamType(tree.tpe) &&
(isStableContext(tree, mode, pt) || sym.isModule && !sym.isMethod))
tree.setType(singleType(pre, sym))
- else tree
+ // To fully benefit from special casing the return type of
+ // getClass, we have to catch it immediately so expressions
+ // like x.getClass().newInstance() are typed with the type of x.
+ else if ( isGetClass(tree.symbol)
+ // TODO: If the type of the qualifier is inaccessible, we can cause private types
+ // to escape scope here, e.g. pos/t1107. I'm not sure how to properly handle this
+ // so for now it requires the type symbol be public.
+ && pre.typeSymbol.isPublic)
+ tree setType MethodType(Nil, getClassReturnType(pre))
+ else
+ tree
}
}
- private def isNarrowable(tpe: Type): Boolean = tpe match {
+ private def isNarrowable(tpe: Type): Boolean = unwrapWrapperTypes(tpe) match {
case TypeRef(_, _, _) | RefinedType(_, _) => true
- case ExistentialType(_, tpe1) => isNarrowable(tpe1)
- case AnnotatedType(_, tpe1, _) => isNarrowable(tpe1)
- case PolyType(_, tpe1) => isNarrowable(tpe1)
- case NullaryMethodType(tpe1) => isNarrowable(tpe1)
- case _ => !phase.erasedTypes
+ case _ => !phase.erasedTypes
}
/**
@@ -589,7 +686,7 @@ trait Typers extends Modes {
}
if (tree.tpe.isInstanceOf[MethodType] && pre.isStable && sym.tpe.params.isEmpty &&
(isStableContext(tree, mode, pt) || sym.isModule))
- tree.setType(MethodType(List(), singleType(pre, sym)))
+ tree.setType(MethodType(List(), singleType(pre, sym))) // TODO: should this be a NullaryMethodType?
else tree
}
@@ -606,15 +703,21 @@ trait Typers extends Modes {
}
def silent[T](op: Typer => T,
- reportAmbiguousErrors: Boolean = context.reportAmbiguousErrors,
- newtree: Tree = context.tree): Any /* in fact, TypeError or T */ = {
- val rawTypeStart = startCounter(rawTypeFailed)
- val findMemberStart = startCounter(findMemberFailed)
- val subtypeStart = startCounter(subtypeFailed)
- val failedSilentStart = startTimer(failedSilentNanos)
+ reportAmbiguousErrors: Boolean = context.ambiguousErrors,
+ newtree: Tree = context.tree): SilentResult[T] = {
+ val rawTypeStart = if (Statistics.canEnable) Statistics.startCounter(rawTypeFailed) else null
+ val findMemberStart = if (Statistics.canEnable) Statistics.startCounter(findMemberFailed) else null
+ val subtypeStart = if (Statistics.canEnable) Statistics.startCounter(subtypeFailed) else null
+ val failedSilentStart = if (Statistics.canEnable) Statistics.startTimer(failedSilentNanos) else null
+ def stopStats() = {
+ if (Statistics.canEnable) Statistics.stopCounter(rawTypeFailed, rawTypeStart)
+ if (Statistics.canEnable) Statistics.stopCounter(findMemberFailed, findMemberStart)
+ if (Statistics.canEnable) Statistics.stopCounter(subtypeFailed, subtypeStart)
+ if (Statistics.canEnable) Statistics.stopTimer(failedSilentNanos, failedSilentStart)
+ }
try {
- if (context.reportGeneralErrors ||
- reportAmbiguousErrors != context.reportAmbiguousErrors ||
+ if (context.reportErrors ||
+ reportAmbiguousErrors != context.ambiguousErrors ||
newtree != context.tree) {
val context1 = context.makeSilent(reportAmbiguousErrors, newtree)
context1.undetparams = context.undetparams
@@ -625,40 +728,93 @@ trait Typers extends Modes {
context.undetparams = context1.undetparams
context.savedTypeBounds = context1.savedTypeBounds
context.namedApplyBlockInfo = context1.namedApplyBlockInfo
- result
+ if (context1.hasErrors) {
+ stopStats()
+ SilentTypeError(context1.errBuffer.head)
+ } else {
+ // If we have a successful result, emit any warnings it created.
+ if (context1.hasWarnings) {
+ context1.flushAndReturnWarningsBuffer() foreach {
+ case (pos, msg) => unit.warning(pos, msg)
+ }
+ }
+ SilentResultValue(result)
+ }
} else {
- op(this)
+ assert(context.bufferErrors || isPastTyper, "silent mode is not available past typer")
+ withSavedContext(context){
+ val res = op(this)
+ val errorsToReport = context.flushAndReturnBuffer()
+ if (errorsToReport.isEmpty) SilentResultValue(res) else SilentTypeError(errorsToReport.head)
+ }
}
} catch {
case ex: CyclicReference => throw ex
case ex: TypeError =>
- stopCounter(rawTypeFailed, rawTypeStart)
- stopCounter(findMemberFailed, findMemberStart)
- stopCounter(subtypeFailed, subtypeStart)
- stopTimer(failedSilentNanos, failedSilentStart)
- ex
+ // fallback in case TypeError is still thrown
+ // @H this happens for example in cps annotation checker
+ stopStats()
+ SilentTypeError(TypeErrorWrapper(ex))
}
}
- /** Utility method: Try op1 on tree. If that gives an error try op2 instead.
+ /** Check whether feature given by `featureTrait` is enabled.
+ * If it is not, issue an error or a warning depending on whether the feature is required.
+ * @param construct A string expression that is substituted for "#" in the feature description string
+ * @param immediate When set, feature check is run immediately, otherwise it is run
+ * at the end of the typechecking run for the enclosing unit. This
+ * is done to avoid potential cyclic reference errors by implicits
+ * that are forced too early.
+ * @return if feature check is run immediately: true if feature is enabled, false otherwise
+ * if feature check is delayed or suppressed because we are past typer: true
*/
- def tryBoth(tree: Tree)(op1: (Typer, Tree) => Tree)(op2: (Typer, Tree) => Tree): Tree =
- silent(op1(_, tree)) match {
- case result1: Tree =>
- result1
- case ex1: TypeError =>
- silent(op2(_, resetAllAttrs(tree))) match {
- case result2: Tree =>
-// println("snd succeeded: "+result2)
- result2
- case ex2: TypeError =>
- reportTypeError(tree.pos, ex1)
- setError(tree)
+ def checkFeature(pos: Position, featureTrait: Symbol, construct: => String = "", immediate: Boolean = false): Boolean =
+ if (isPastTyper) true
+ else {
+ val nestedOwners =
+ featureTrait.owner.ownerChain.takeWhile(_ != languageFeatureModule.moduleClass).reverse
+ val featureName = (nestedOwners map (_.name + ".")).mkString + featureTrait.name
+ def action(): Boolean = {
+ def hasImport = inferImplicit(EmptyTree: Tree, featureTrait.tpe, true, false, context).isSuccess
+ def hasOption = settings.language.value exists (s => s == featureName || s == "_")
+ val OK = hasImport || hasOption
+ if (!OK) {
+ val Some(AnnotationInfo(_, List(Literal(Constant(featureDesc: String)), Literal(Constant(required: Boolean))), _)) =
+ featureTrait getAnnotation LanguageFeatureAnnot
+ val req = if (required) "needs to" else "should"
+ val fqname = "scala.language." + featureName
+ val explain = (
+ if (currentRun.reportedFeature contains featureTrait) "" else
+ s"""|
+ |This can be achieved by adding the import clause 'import $fqname'
+ |or by setting the compiler option -language:$featureName.
+ |See the Scala docs for value $fqname for a discussion
+ |why the feature $req be explicitly enabled.""".stripMargin
+ )
+ currentRun.reportedFeature += featureTrait
+
+ val msg = s"$featureDesc $req be enabled\nby making the implicit value $fqname visible.$explain" replace ("#", construct)
+ if (required) unit.error(pos, msg)
+ else currentRun.featureWarnings.warn(pos, msg)
}
+ OK
+ }
+ if (immediate) {
+ action()
+ } else {
+ unit.toCheck += action
+ true
+ }
}
- /** Perform the following adaptations of expression, pattern or type `tree' wrt to
- * given mode `mode' and given prototype `pt':
+ def checkExistentialsFeature(pos: Position, tpe: Type, prefix: String) = tpe match {
+ case extp: ExistentialType if !extp.isRepresentableWithWildcards =>
+ checkFeature(pos, ExistentialsFeature, prefix+" "+tpe)
+ case _ =>
+ }
+
+ /** Perform the following adaptations of expression, pattern or type `tree` wrt to
+ * given mode `mode` and given prototype `pt`:
* (-1) For expressions with annotated types, let AnnotationCheckers decide what to do
* (0) Convert expressions with constant types to literals (unless in interactive/scaladoc mode)
* (1) Resolve overloading, unless mode contains FUNmode
@@ -668,9 +824,9 @@ trait Typers extends Modes {
* unless followed by explicit type application.
* (4) Do the following to unapplied methods used as values:
* (4.1) If the method has only implicit parameters pass implicit arguments
- * (4.2) otherwise, if `pt' is a function type and method is not a constructor,
+ * (4.2) otherwise, if `pt` is a function type and method is not a constructor,
* convert to function by eta-expansion,
- * (4.3) otherwise, if the method is nullary with a result type compatible to `pt'
+ * (4.3) otherwise, if the method is nullary with a result type compatible to `pt`
* and it is not a constructor, apply it to ()
* otherwise issue an error
* (5) Convert constructors in a pattern as follows:
@@ -693,88 +849,59 @@ trait Typers extends Modes {
* (14) When in mode EXPRmode, apply a view
* If all this fails, error
*/
- protected def adapt(tree: Tree, mode: Int, pt: Type, original: Tree = EmptyTree): Tree = tree.tpe match {
- case atp @ AnnotatedType(_, _, _) if canAdaptAnnotations(tree, mode, pt) => // (-1)
- adaptAnnotations(tree, mode, pt)
- case ct @ ConstantType(value) if inNoModes(mode, TYPEmode | FUNmode) && (ct <:< pt) && !forScaladoc && !forInteractive => // (0)
- val sym = tree.symbol
- if (sym != null && sym.isDeprecated) {
- val msg = sym.toString + sym.locationString +" is deprecated: "+ sym.deprecationMessage.getOrElse("")
- unit.deprecationWarning(tree.pos, msg)
- }
- treeCopy.Literal(tree, value)
- case OverloadedType(pre, alts) if !inFunMode(mode) => // (1)
- inferExprAlternative(tree, pt)
- adapt(tree, mode, pt, original)
- case NullaryMethodType(restpe) => // (2)
- adapt(tree setType restpe, mode, pt, original)
- case TypeRef(_, ByNameParamClass, List(arg))
- if ((mode & EXPRmode) != 0) => // (2)
- adapt(tree setType arg, mode, pt, original)
- case tr @ TypeRef(_, sym, _)
- if sym.isAliasType && tr.normalize.isInstanceOf[ExistentialType] &&
- ((mode & (EXPRmode | LHSmode)) == EXPRmode) =>
- adapt(tree setType tr.normalize.skolemizeExistential(context.owner, tree), mode, pt, original)
- case et @ ExistentialType(_, _) if ((mode & (EXPRmode | LHSmode)) == EXPRmode) =>
- adapt(tree setType et.skolemizeExistential(context.owner, tree), mode, pt, original)
- case PolyType(tparams, restpe) if inNoModes(mode, TAPPmode | PATTERNmode | HKmode) => // (3)
- // assert((mode & HKmode) == 0) //@M a PolyType in HKmode represents an anonymous type function,
- // we're in HKmode since a higher-kinded type is expected --> hence, don't implicitly apply it to type params!
- // ticket #2197 triggered turning the assert into a guard
- // I guess this assert wasn't violated before because type aliases weren't expanded as eagerly
- // (the only way to get a PolyType for an anonymous type function is by normalisation, which applies eta-expansion)
- // -- are we sure we want to expand aliases this early?
- // -- what caused this change in behaviour??
- val tparams1 = cloneSymbols(tparams)
- val tree1 = if (tree.isType) tree
- else TypeApply(tree, tparams1 map (tparam =>
- TypeTree(tparam.tpeHK) setPos tree.pos.focus)) setPos tree.pos //@M/tcpolyinfer: changed tparam.tpe to tparam.tpeHK
- context.undetparams ++= tparams1
- adapt(tree1 setType restpe.substSym(tparams, tparams1), mode, pt, original)
- case mt: MethodType if mt.isImplicit && ((mode & (EXPRmode | FUNmode | LHSmode)) == EXPRmode) => // (4.1)
- if (context.undetparams nonEmpty) { // (9) -- should revisit dropped condition `(mode & POLYmode) == 0`
- // dropped so that type args of implicit method are inferred even if polymorphic expressions are allowed
- // needed for implicits in 2.8 collection library -- maybe once #3346 is fixed, we can reinstate the condition?
- context.undetparams =
- inferExprInstance(tree, context.extractUndetparams(), pt,
- // approximate types that depend on arguments since dependency on implicit argument is like dependency on type parameter
- if(settings.YdepMethTpes.value) mt.approximate else mt,
- // if we are looking for a manifest, instantiate type to Nothing anyway,
- // as we would get ambiguity errors otherwise. Example
- // Looking for a manifest of Nil: This has many potential types,
- // so we need to instantiate to minimal type List[Nothing].
- keepNothings = false, // retract Nothing's that indicate failure, ambiguities in manifests are dealt with in manifestOfType
- useWeaklyCompatible = true) // #3808
- }
-
- val typer1 = constrTyperIf(treeInfo.isSelfOrSuperConstrCall(tree))
- if (original != EmptyTree && pt != WildcardType)
- typer1.silent(tpr => tpr.typed(tpr.applyImplicitArgs(tree), mode, pt)) match {
- case result: Tree => result
- case ex: TypeError =>
- if (settings.debug.value) log("fallback on implicits: "+tree+"/"+resetAllAttrs(original))
- val tree1 = typed(resetAllAttrs(original), mode, WildcardType)
- tree1.tpe = addAnnotations(tree1, tree1.tpe)
- if (tree1.isEmpty) tree1 else adapt(tree1, mode, pt, EmptyTree)
- }
- else
- typer1.typed(typer1.applyImplicitArgs(tree), mode, pt)
- case mt: MethodType
- if (((mode & (EXPRmode | FUNmode | LHSmode)) == EXPRmode) &&
- (context.undetparams.isEmpty || inPolyMode(mode))) =>
+ protected def adapt(tree: Tree, mode: Int, pt: Type, original: Tree = EmptyTree): Tree = {
+
+ def adaptToImplicitMethod(mt: MethodType): Tree = {
+ if (context.undetparams.nonEmpty) { // (9) -- should revisit dropped condition `(mode & POLYmode) == 0`
+ // dropped so that type args of implicit method are inferred even if polymorphic expressions are allowed
+ // needed for implicits in 2.8 collection library -- maybe once #3346 is fixed, we can reinstate the condition?
+ context.undetparams = inferExprInstance(tree, context.extractUndetparams(), pt,
+ // approximate types that depend on arguments since dependency on implicit argument is like dependency on type parameter
+ mt.approximate,
+ keepNothings = false,
+ useWeaklyCompatible = true) // #3808
+ }
+
+ // avoid throwing spurious DivergentImplicit errors
+ if (context.hasErrors)
+ return setError(tree)
+
+ withCondConstrTyper(treeInfo.isSelfOrSuperConstrCall(tree)){ typer1 =>
+ if (original != EmptyTree && pt != WildcardType)
+ typer1.silent(tpr => {
+ val withImplicitArgs = tpr.applyImplicitArgs(tree)
+ if (tpr.context.hasErrors) tree // silent will wrap it in SilentTypeError anyway
+ else tpr.typed(withImplicitArgs, mode, pt)
+ }) match {
+ case SilentResultValue(result) =>
+ result
+ case _ =>
+ val resetTree = resetLocalAttrs(original)
+ debuglog(s"fallback on implicits: ${tree}/$resetTree")
+ val tree1 = typed(resetTree, mode, WildcardType)
+ // Q: `typed` already calls `pluginsTyped` and `adapt`. the only difference here is that
+ // we pass `EmptyTree` as the `original`. intended? added in 2009 (53d98e7d42) by martin.
+ tree1.tpe = pluginsTyped(tree1.tpe, this, tree1, mode, pt)
+ if (tree1.isEmpty) tree1 else adapt(tree1, mode, pt, EmptyTree)
+ }
+ else
+ typer1.typed(typer1.applyImplicitArgs(tree), mode, pt)
+ }
+ }
+ def instantiateToMethodType(mt: MethodType): Tree = {
val meth = tree match {
// a partial named application is a block (see comment in EtaExpansion)
case Block(_, tree1) => tree1.symbol
- case _ => tree.symbol
+ case _ => tree.symbol
}
- if (!meth.isConstructor && isFunctionType(pt)) { // (4.2)
- if (settings.debug.value) log("eta-expanding "+tree+":"+tree.tpe+" to "+pt)
- checkParamsConvertible(tree.pos, tree.tpe)
- val tree0 = etaExpand(context.unit, tree)
+ if (!meth.isConstructor && !meth.isTermMacro && isFunctionType(pt)) { // (4.2)
+ debuglog("eta-expanding " + tree + ":" + tree.tpe + " to " + pt)
+ checkParamsConvertible(tree, tree.tpe)
+ val tree0 = etaExpand(context.unit, tree, this)
// println("eta "+tree+" ---> "+tree0+":"+tree0.tpe+" undet: "+context.undetparams+ " mode: "+Integer.toHexString(mode))
- if (meth.typeParams.nonEmpty) {
+ if (context.undetparams.nonEmpty) {
// #2624: need to infer type arguments for eta expansion of a polymorphic method
// context.undetparams contains clones of meth.typeParams (fresh ones were generated in etaExpand)
// need to run typer on tree0, since etaExpansion sets the tpe's of its subtrees to null
@@ -787,271 +914,498 @@ trait Typers extends Modes {
} else if (!meth.isConstructor && mt.params.isEmpty) { // (4.3)
adapt(typed(Apply(tree, List()) setPos tree.pos), mode, pt, original)
} else if (context.implicitsEnabled) {
- errorTree(tree, "missing arguments for "+meth+meth.locationString+
- (if (meth.isConstructor) ""
- else ";\nfollow this method with `_' if you want to treat it as a partially applied function"))
+ MissingArgsForMethodTpeError(tree, meth)
} else {
setError(tree)
}
- case _ =>
- def applyPossible = {
- def applyMeth = member(adaptToName(tree, nme.apply), nme.apply)
- if ((mode & TAPPmode) != 0)
- tree.tpe.typeParams.isEmpty && applyMeth.filter(! _.tpe.typeParams.isEmpty) != NoSymbol
- else
- applyMeth.filter(_.tpe.paramSectionCount > 0) != NoSymbol
+ }
+
+ def adaptType(): Tree = {
+ if (inFunMode(mode)) {
+ // todo. the commented line below makes sense for typechecking, say, TypeApply(Ident(`some abstract type symbol`), List(...))
+ // because otherwise Ident will have its tpe set to a TypeRef, not to a PolyType, and `typedTypeApply` will fail
+ // but this needs additional investigation, because it crashes t5228, gadts1 and maybe something else
+ // tree setType tree.tpe.normalize
+ tree
+ } else if (tree.hasSymbol && !tree.symbol.typeParams.isEmpty && !inHKMode(mode) &&
+ !(tree.symbol.isJavaDefined && context.unit.isJava)) { // (7)
+ // @M When not typing a higher-kinded type ((mode & HKmode) == 0)
+ // or raw type (tree.symbol.isJavaDefined && context.unit.isJava), types must be of kind *,
+ // and thus parameterized types must be applied to their type arguments
+ // @M TODO: why do kind-* tree's have symbols, while higher-kinded ones don't?
+ MissingTypeParametersError(tree)
+ } else if ( // (7.1) @M: check kind-arity
+ // @M: removed check for tree.hasSymbol and replace tree.symbol by tree.tpe.symbol (TypeTree's must also be checked here, and they don't directly have a symbol)
+ (inHKMode(mode)) &&
+ // @M: don't check tree.tpe.symbol.typeParams. check tree.tpe.typeParams!!!
+ // (e.g., m[Int] --> tree.tpe.symbol.typeParams.length == 1, tree.tpe.typeParams.length == 0!)
+ !sameLength(tree.tpe.typeParams, pt.typeParams) &&
+ !(tree.tpe.typeSymbol == AnyClass ||
+ tree.tpe.typeSymbol == NothingClass ||
+ pt == WildcardType)) {
+ // Check that the actual kind arity (tree.symbol.typeParams.length) conforms to the expected
+ // kind-arity (pt.typeParams.length). Full checks are done in checkKindBounds in Infer.
+ // Note that we treat Any and Nothing as kind-polymorphic.
+ // We can't perform this check when typing type arguments to an overloaded method before the overload is resolved
+ // (or in the case of an error type) -- this is indicated by pt == WildcardType (see case TypeApply in typed1).
+ KindArityMismatchError(tree, pt)
+ } else tree match { // (6)
+ case TypeTree() => tree
+ case _ => TypeTree(tree.tpe) setOriginal tree
}
- if (tree.isType) {
- if (inFunMode(mode)) {
- tree
- } else if (tree.hasSymbol && !tree.symbol.typeParams.isEmpty && !inHKMode(mode) &&
- !(tree.symbol.isJavaDefined && context.unit.isJava)) { // (7)
- // @M When not typing a higher-kinded type ((mode & HKmode) == 0)
- // or raw type (tree.symbol.isJavaDefined && context.unit.isJava), types must be of kind *,
- // and thus parameterized types must be applied to their type arguments
- // @M TODO: why do kind-* tree's have symbols, while higher-kinded ones don't?
- errorTree(tree, tree.symbol+" takes type parameters")
- tree setType tree.tpe
- } else if ( // (7.1) @M: check kind-arity
- // @M: removed check for tree.hasSymbol and replace tree.symbol by tree.tpe.symbol (TypeTree's must also be checked here, and they don't directly have a symbol)
- (inHKMode(mode)) &&
- // @M: don't check tree.tpe.symbol.typeParams. check tree.tpe.typeParams!!!
- // (e.g., m[Int] --> tree.tpe.symbol.typeParams.length == 1, tree.tpe.typeParams.length == 0!)
- !sameLength(tree.tpe.typeParams, pt.typeParams) &&
- !(tree.tpe.typeSymbol==AnyClass ||
- tree.tpe.typeSymbol==NothingClass ||
- pt == WildcardType )) {
- // Check that the actual kind arity (tree.symbol.typeParams.length) conforms to the expected
- // kind-arity (pt.typeParams.length). Full checks are done in checkKindBounds in Infer.
- // Note that we treat Any and Nothing as kind-polymorphic.
- // We can't perform this check when typing type arguments to an overloaded method before the overload is resolved
- // (or in the case of an error type) -- this is indicated by pt == WildcardType (see case TypeApply in typed1).
- errorTree(tree, tree.tpe+" takes "+countElementsAsString(tree.tpe.typeParams.length, "type parameter")+
- ", expected: "+countAsString(pt.typeParams.length))
- tree setType tree.tpe
- } else tree match { // (6)
- case TypeTree() => tree
- case _ => TypeTree(tree.tpe) setOriginal(tree)
+ }
+
+ /**
+ * To deal with the type slack between actual (run-time) types and statically known types, for each abstract type T,
+ * reflect its variance as a skolem that is upper-bounded by T (covariant position), or lower-bounded by T (contravariant).
+ *
+ * Consider the following example:
+ *
+ * class AbsWrapperCov[+A]
+ * case class Wrapper[B](x: Wrapped[B]) extends AbsWrapperCov[B]
+ *
+ * def unwrap[T](x: AbsWrapperCov[T]): Wrapped[T] = x match {
+ * case Wrapper(wrapped) => // Wrapper's type parameter must not be assumed to be equal to T, it's *upper-bounded* by it
+ * wrapped // : Wrapped[_ <: T]
+ * }
+ *
+ * this method should type check if and only if Wrapped is covariant in its type parameter
+ *
+ * when inferring Wrapper's type parameter B from x's type AbsWrapperCov[T],
+ * we must take into account that x's actual type is AbsWrapperCov[Tactual] forSome {type Tactual <: T}
+ * as AbsWrapperCov is covariant in A -- in other words, we must not assume we know T exactly, all we know is its upper bound
+ *
+ * since method application is the only way to generate this slack between run-time and compile-time types (TODO: right!?),
+ * we can simply replace skolems that represent method type parameters as seen from the method's body
+ * by other skolems that are (upper/lower)-bounded by that type-parameter skolem
+ * (depending on the variance position of the skolem in the statically assumed type of the scrutinee, pt)
+ *
+ * see test/files/../t5189*.scala
+ */
+ def adaptConstrPattern(): Tree = { // (5)
+ def hasUnapplyMember(tp: Type) = reallyExists(unapplyMember(tp))
+ val overloadedExtractorOfObject = tree.symbol filter (sym => hasUnapplyMember(sym.tpe))
+ // if the tree's symbol's type does not define an extractor, maybe the tree's type does
+ // this is the case when we encounter an arbitrary tree as the target of an unapply call (rather than something that looks like a constructor call)
+ // (for now, this only happens due to wrapClassTagUnapply, but when we support parameterized extractors, it will become more common place)
+ val extractor = overloadedExtractorOfObject orElse unapplyMember(tree.tpe)
+ if (extractor != NoSymbol) {
+ // if we did some ad-hoc overloading resolution, update the tree's symbol
+ // do not update the symbol if the tree's symbol's type does not define an unapply member
+ // (e.g. since it's some method that returns an object with an unapply member)
+ if (overloadedExtractorOfObject != NoSymbol)
+ tree setSymbol overloadedExtractorOfObject
+
+ tree.tpe match {
+ case OverloadedType(pre, alts) => tree.tpe = overloadedType(pre, alts filter (alt => hasUnapplyMember(alt.tpe)))
+ case _ =>
}
- } else if ((mode & (PATTERNmode | FUNmode)) == (PATTERNmode | FUNmode)) { // (5)
- val extractor = tree.symbol.filter(sym => reallyExists(unapplyMember(sym.tpe)))
- if (extractor != NoSymbol) {
- tree setSymbol extractor
- val unapply = unapplyMember(extractor.tpe)
- val clazz = unapplyParameterType(unapply)
-
- if (unapply.isCase && clazz.isCase && !(clazz.ancestors exists (_.isCase))) {
- // convert synthetic unapply of case class to case class constructor
- val prefix = tree.tpe.prefix
- val tree1 = TypeTree(clazz.primaryConstructor.tpe.asSeenFrom(prefix, clazz.owner))
- .setOriginal(tree)
-
- inferConstructorInstance(tree1, clazz.typeParams, pt)
- tree1
- } else {
- tree
+ val unapply = unapplyMember(extractor.tpe)
+ val clazz = unapplyParameterType(unapply)
+
+ if (unapply.isCase && clazz.isCase && !(clazz.ancestors exists (_.isCase))) {
+ // convert synthetic unapply of case class to case class constructor
+ val prefix = tree.tpe.prefix
+ val tree1 = TypeTree(clazz.primaryConstructor.tpe.asSeenFrom(prefix, clazz.owner))
+ .setOriginal(tree)
+
+ val skolems = new mutable.ListBuffer[TypeSymbol]
+ object variantToSkolem extends VariantTypeMap {
+ def apply(tp: Type) = mapOver(tp) match {
+ case TypeRef(NoPrefix, tpSym, Nil) if variance != 0 && tpSym.isTypeParameterOrSkolem && tpSym.owner.isTerm =>
+ // must initialize or tpSym.tpe might see random type params!!
+ // without this, we'll get very weird types inferred in test/scaladoc/run/SI-5933.scala
+ // TODO: why is that??
+ tpSym.initialize
+ val bounds = if (variance == 1) TypeBounds.upper(tpSym.tpe) else TypeBounds.lower(tpSym.tpe)
+ // origin must be the type param so we can deskolemize
+ val skolem = context.owner.newGADTSkolem(unit.freshTypeName("?"+tpSym.name), tpSym, bounds)
+ // println("mapping "+ tpSym +" to "+ skolem + " : "+ bounds +" -- pt= "+ pt +" in "+ context.owner +" at "+ context.tree )
+ skolems += skolem
+ skolem.tpe
+ case tp1 => tp1
+ }
}
+
+ // have to open up the existential and put the skolems in scope
+ // can't simply package up pt in an ExistentialType, because that takes us back to square one (List[_ <: T] == List[T] due to covariance)
+ val ptSafe = variantToSkolem(pt) // TODO: pt.skolemizeExistential(context.owner, tree) ?
+ val freeVars = skolems.toList
+
+ // use "tree" for the context, not context.tree: don't make another CaseDef context,
+ // as instantiateTypeVar's bounds would end up there
+ val ctorContext = context.makeNewScope(tree, context.owner)
+ freeVars foreach ctorContext.scope.enter
+ newTyper(ctorContext).infer.inferConstructorInstance(tree1, clazz.typeParams, ptSafe)
+
+ // simplify types without losing safety,
+ // so that we get rid of unnecessary type slack, and so that error messages don't unnecessarily refer to skolems
+ val extrapolate = new ExistentialExtrapolation(freeVars) extrapolate (_: Type)
+ val extrapolated = tree1.tpe match {
+ case MethodType(ctorArgs, res) => // ctorArgs are actually in a covariant position, since this is the type of the subpatterns of the pattern represented by this Apply node
+ ctorArgs foreach (p => p.info = extrapolate(p.info)) // no need to clone, this is OUR method type
+ copyMethodType(tree1.tpe, ctorArgs, extrapolate(res))
+ case tp => tp
+ }
+
+ // once the containing CaseDef has been type checked (see typedCase),
+ // tree1's remaining type-slack skolems will be deskolemized (to the method type parameter skolems)
+ tree1 setType extrapolated
} else {
- errorTree(tree, tree.symbol + " is not a case class constructor, nor does it have an unapply/unapplySeq method")
- }
- } else if (inAllModes(mode, EXPRmode | FUNmode) &&
- !tree.tpe.isInstanceOf[MethodType] &&
- !tree.tpe.isInstanceOf[OverloadedType] &&
- applyPossible) {
- assert(!inHKMode(mode)) //@M
- val qual = adaptToName(tree, nme.apply) match {
- case id @ Ident(_) =>
- val pre = if (id.symbol.owner.isPackageClass) id.symbol.owner.thisType
- else if (id.symbol.owner.isClass)
- context.enclosingSubClassContext(id.symbol.owner).prefix
- else NoPrefix
- stabilize(id, pre, EXPRmode | QUALmode, WildcardType)
- case sel @ Select(qualqual, _) =>
- stabilize(sel, qualqual.tpe, EXPRmode | QUALmode, WildcardType)
- case other =>
- other
+ tree
}
- typed(atPos(tree.pos)(Select(qual, nme.apply)), mode, pt)
- } else if (!context.undetparams.isEmpty && !inPolyMode(mode)) { // (9)
- assert(!inHKMode(mode)) //@M
- instantiate(tree, mode, pt)
- } else if (tree.tpe <:< pt) {
- tree
} else {
- if (inPatternMode(mode)) {
- if ((tree.symbol ne null) && tree.symbol.isModule)
- inferModulePattern(tree, pt)
- if (isPopulated(tree.tpe, approximateAbstracts(pt)))
- return tree
+ CaseClassConstructorError(tree)
+ }
+ }
+
+ def insertApply(): Tree = {
+ assert(!inHKMode(mode), modeString(mode)) //@M
+ val adapted = adaptToName(tree, nme.apply)
+ def stabilize0(pre: Type): Tree = stabilize(adapted, pre, EXPRmode | QUALmode, WildcardType)
+ // TODO reconcile the overlap between Typers#stablize and TreeGen.stabilize
+ val qual = adapted match {
+ case This(_) =>
+ gen.stabilize(adapted)
+ case Ident(_) =>
+ val owner = adapted.symbol.owner
+ val pre =
+ if (owner.isPackageClass) owner.thisType
+ else if (owner.isClass) context.enclosingSubClassContext(owner).prefix
+ else NoPrefix
+ stabilize0(pre)
+ case Select(qualqual, _) =>
+ stabilize0(qualqual.tpe)
+ case other =>
+ other
+ }
+ typedPos(tree.pos, mode, pt) {
+ Select(qual setPos tree.pos.makeTransparent, nme.apply)
+ }
+ }
+
+ // begin adapt
+ tree.tpe match {
+ case atp @ AnnotatedType(_, _, _) if canAdaptAnnotations(tree, this, mode, pt) => // (-1)
+ adaptAnnotations(tree, this, mode, pt)
+ case ct @ ConstantType(value) if inNoModes(mode, TYPEmode | FUNmode) && (ct <:< pt) && !forScaladoc && !forInteractive => // (0)
+ val sym = tree.symbol
+ if (sym != null && sym.isDeprecated) {
+ val msg = sym.toString + sym.locationString + " is deprecated: " + sym.deprecationMessage.getOrElse("")
+ unit.deprecationWarning(tree.pos, msg)
}
- val tree1 = constfold(tree, pt) // (10) (11)
- if (tree1.tpe <:< pt) adapt(tree1, mode, pt, original)
- else {
- if ((mode & (EXPRmode | FUNmode)) == EXPRmode) {
- pt.normalize match {
- case TypeRef(_, sym, _) =>
- // note: was if (pt.typeSymbol == UnitClass) but this leads to a potentially
- // infinite expansion if pt is constant type ()
- if (sym == UnitClass && tree.tpe <:< AnyClass.tpe) { // (12)
- if (settings.warnValueDiscard.value)
- context.unit.warning(tree.pos, "discarded non-Unit value")
- return typed(atPos(tree.pos)(Block(List(tree), Literal(()))), mode, pt)
+ treeCopy.Literal(tree, value)
+ case OverloadedType(pre, alts) if !inFunMode(mode) => // (1)
+ inferExprAlternative(tree, pt)
+ adapt(tree, mode, pt, original)
+ case NullaryMethodType(restpe) => // (2)
+ adapt(tree setType restpe, mode, pt, original)
+ case TypeRef(_, ByNameParamClass, List(arg)) if ((mode & EXPRmode) != 0) => // (2)
+ adapt(tree setType arg, mode, pt, original)
+ case tr @ TypeRef(_, sym, _) if sym.isAliasType && tr.dealias.isInstanceOf[ExistentialType] &&
+ ((mode & (EXPRmode | LHSmode)) == EXPRmode) =>
+ adapt(tree setType tr.dealias.skolemizeExistential(context.owner, tree), mode, pt, original)
+ case et @ ExistentialType(_, _) if ((mode & (EXPRmode | LHSmode)) == EXPRmode) =>
+ adapt(tree setType et.skolemizeExistential(context.owner, tree), mode, pt, original)
+ case PolyType(tparams, restpe) if inNoModes(mode, TAPPmode | PATTERNmode | HKmode) => // (3)
+ // assert((mode & HKmode) == 0) //@M a PolyType in HKmode represents an anonymous type function,
+ // we're in HKmode since a higher-kinded type is expected --> hence, don't implicitly apply it to type params!
+ // ticket #2197 triggered turning the assert into a guard
+ // I guess this assert wasn't violated before because type aliases weren't expanded as eagerly
+ // (the only way to get a PolyType for an anonymous type function is by normalisation, which applies eta-expansion)
+ // -- are we sure we want to expand aliases this early?
+ // -- what caused this change in behaviour??
+ val tparams1 = cloneSymbols(tparams)
+ val tree1 = if (tree.isType) tree
+ else TypeApply(tree, tparams1 map (tparam =>
+ TypeTree(tparam.tpeHK) setPos tree.pos.focus)) setPos tree.pos
+ context.undetparams ++= tparams1
+ notifyUndetparamsAdded(tparams1)
+ adapt(tree1 setType restpe.substSym(tparams, tparams1), mode, pt, original)
+ case mt: MethodType if mt.isImplicit && ((mode & (EXPRmode | FUNmode | LHSmode)) == EXPRmode) => // (4.1)
+ adaptToImplicitMethod(mt)
+
+ case mt: MethodType if (((mode & (EXPRmode | FUNmode | LHSmode)) == EXPRmode) &&
+ (context.undetparams.isEmpty || inPolyMode(mode))) && !(tree.symbol != null && tree.symbol.isTermMacro) =>
+ instantiateToMethodType(mt)
+
+ case _ =>
+ def shouldInsertApply(tree: Tree) = inAllModes(mode, EXPRmode | FUNmode) && (tree.tpe match {
+ case _: MethodType | _: OverloadedType | _: PolyType => false
+ case _ => applyPossible
+ })
+ def applyPossible = {
+ def applyMeth = member(adaptToName(tree, nme.apply), nme.apply)
+ dyna.acceptsApplyDynamic(tree.tpe) || (
+ if ((mode & TAPPmode) != 0)
+ tree.tpe.typeParams.isEmpty && applyMeth.filter(!_.tpe.typeParams.isEmpty) != NoSymbol
+ else
+ applyMeth.filter(_.tpe.paramSectionCount > 0) != NoSymbol
+ )
+ }
+ if (tree.isType)
+ adaptType()
+ else if (
+ inExprModeButNot(mode, FUNmode) && !tree.isDef && // typechecking application
+ tree.symbol != null && tree.symbol.isTermMacro && // of a macro
+ !isMacroExpansionSuppressed(tree))
+ macroExpand(this, tree, mode, pt)
+ else if ((mode & (PATTERNmode | FUNmode)) == (PATTERNmode | FUNmode))
+ adaptConstrPattern()
+ else if (shouldInsertApply(tree))
+ insertApply()
+ else if (context.undetparams.nonEmpty && !inPolyMode(mode)) { // (9)
+ assert(!inHKMode(mode), modeString(mode)) //@M
+ instantiatePossiblyExpectingUnit(tree, mode, pt)
+ } else if (tree.tpe <:< pt) {
+ tree
+ } else {
+ def fallBack: Tree = {
+ if (inPatternMode(mode)) {
+ if ((tree.symbol ne null) && tree.symbol.isModule)
+ inferModulePattern(tree, pt)
+ if (isPopulated(tree.tpe, approximateAbstracts(pt)))
+ return tree
+ }
+ val tree1 = constfold(tree, pt) // (10) (11)
+ if (tree1.tpe <:< pt) adapt(tree1, mode, pt, original)
+ else {
+ if (inExprModeButNot(mode, FUNmode)) {
+ pt.dealias match {
+ case TypeRef(_, sym, _) =>
+ // note: was if (pt.typeSymbol == UnitClass) but this leads to a potentially
+ // infinite expansion if pt is constant type ()
+ if (sym == UnitClass && tree.tpe <:< AnyClass.tpe) { // (12)
+ if (settings.warnValueDiscard.value)
+ context.unit.warning(tree.pos, "discarded non-Unit value")
+ return typedPos(tree.pos, mode, pt) {
+ Block(List(tree), Literal(Constant()))
+ }
+ } else if (isNumericValueClass(sym) && isNumericSubType(tree.tpe, pt)) {
+ if (settings.warnNumericWiden.value)
+ context.unit.warning(tree.pos, "implicit numeric widening")
+ return typedPos(tree.pos, mode, pt) {
+ Select(tree, "to" + sym.name)
+ }
+ }
+ case AnnotatedType(_, _, _) if canAdaptAnnotations(tree, this, mode, pt) => // (13)
+ return typed(adaptAnnotations(tree, this, mode, pt), mode, pt)
+ case _ =>
}
- else if (isNumericValueClass(sym) && isNumericSubType(tree.tpe, pt)) {
- if (settings.warnNumericWiden.value)
- context.unit.warning(tree.pos, "implicit numeric widening")
- return typed(atPos(tree.pos)(Select(tree, "to"+sym.name)), mode, pt)
+ if (!context.undetparams.isEmpty) {
+ return instantiate(tree, mode, pt)
+ }
+ if (context.implicitsEnabled && !pt.isError && !tree.isErrorTyped) {
+ // (14); the condition prevents chains of views
+ debuglog("inferring view from " + tree.tpe + " to " + pt)
+ val coercion = inferView(tree, tree.tpe, pt, true)
+ // convert forward views of delegate types into closures wrapped around
+ // the delegate's apply method (the "Invoke" method, which was translated into apply)
+ if (forMSIL && coercion != null && isCorrespondingDelegate(tree.tpe, pt)) {
+ val meth: Symbol = tree.tpe.member(nme.apply)
+ debuglog("replacing forward delegate view with: " + meth + ":" + meth.tpe)
+ return typed(Select(tree, meth), mode, pt)
+ }
+ if (coercion != EmptyTree) {
+ def msg = "inferred view from " + tree.tpe + " to " + pt + " = " + coercion + ":" + coercion.tpe
+ if (settings.logImplicitConv.value)
+ unit.echo(tree.pos, msg)
+
+ debuglog(msg)
+ val silentContext = context.makeImplicit(context.ambiguousErrors)
+ val res = newTyper(silentContext).typed(
+ new ApplyImplicitView(coercion, List(tree)) setPos tree.pos, mode, pt)
+ if (silentContext.hasErrors) context.issue(silentContext.errBuffer.head) else return res
+ }
}
- case AnnotatedType(_, _, _) if canAdaptAnnotations(tree, mode, pt) => // (13)
- return typed(adaptAnnotations(tree, mode, pt), mode, pt)
- case _ =>
- }
- if (!context.undetparams.isEmpty) {
- return instantiate(tree, mode, pt)
- }
- if (context.implicitsEnabled && !tree.tpe.isError && !pt.isError) {
- // (14); the condition prevents chains of views
- if (settings.debug.value) log("inferring view from "+tree.tpe+" to "+pt)
- val coercion = inferView(tree, tree.tpe, pt, true)
- // convert forward views of delegate types into closures wrapped around
- // the delegate's apply method (the "Invoke" method, which was translated into apply)
- if (forMSIL && coercion != null && isCorrespondingDelegate(tree.tpe, pt)) {
- val meth: Symbol = tree.tpe.member(nme.apply)
- if(settings.debug.value)
- log("replacing forward delegate view with: " + meth + ":" + meth.tpe)
- return typed(Select(tree, meth), mode, pt)
}
- if (coercion != EmptyTree) {
- if (settings.debug.value) log("inferred view from "+tree.tpe+" to "+pt+" = "+coercion+":"+coercion.tpe)
- return newTyper(context.makeImplicit(context.reportAmbiguousErrors)).typed(
- new ApplyImplicitView(coercion, List(tree)) setPos tree.pos, mode, pt)
+ if (settings.debug.value) {
+ log("error tree = " + tree)
+ if (settings.explaintypes.value) explainTypes(tree.tpe, pt)
+ }
+
+ val found = tree.tpe
+ if (!found.isErroneous && !pt.isErroneous) {
+ if ((!context.reportErrors && isPastTyper) || tree.attachments.get[MacroExpansionAttachment].isDefined) {
+ val (bound, req) = pt match {
+ case ExistentialType(qs, tpe) => (qs, tpe)
+ case _ => (Nil, pt)
+ }
+ val boundOrSkolems = bound ++ pt.skolemsExceptMethodTypeParams
+ if (boundOrSkolems.nonEmpty) {
+ // Ignore type errors raised in later phases that are due to mismatching types with existential skolems
+ // We have lift crashing in 2.9 with an adapt failure in the pattern matcher.
+ // Here's my hypothsis why this happens. The pattern matcher defines a variable of type
+ //
+ // val x: T = expr
+ //
+ // where T is the type of expr, but T contains existential skolems ts.
+ // In that case, this value definition does not typecheck.
+ // The value definition
+ //
+ // val x: T forSome { ts } = expr
+ //
+ // would typecheck. Or one can simply leave out the type of the `val`:
+ //
+ // val x = expr
+ //
+ // SI-6029 shows another case where we also fail (in uncurry), but this time the expected
+ // type is an existential type.
+ //
+ // The reason for both failures have to do with the way we (don't) transform
+ // skolem types along with the trees that contain them. We'd need a
+ // radically different approach to do it. But before investing a lot of time to
+ // to do this (I have already sunk 3 full days with in the end futile attempts
+ // to consistently transform skolems and fix 6029), I'd like to
+ // investigate ways to avoid skolems completely.
+ //
+ // upd. The same problem happens when we try to typecheck the result of macro expansion against its expected type
+ // (which is the return type of the macro definition instantiated in the context of expandee):
+ //
+ // Test.scala:2: error: type mismatch;
+ // found : $u.Expr[Class[_ <: Object]]
+ // required: reflect.runtime.universe.Expr[Class[?0(in value <local Test>)]] where type ?0(in value <local Test>) <: Object
+ // scala.reflect.runtime.universe.reify(new Object().getClass)
+ // ^
+ // Therefore following Martin's advice I use this logic to recover from skolem errors after macro expansions
+ // (by adding the ` || tree.attachments.get[MacroExpansionAttachment].isDefined` clause to the conditional above).
+ //
+ log("recovering from existential or skolem type error in tree \n" + tree + "\nwith type " + tree.tpe + "\n expected type = " + pt + "\n context = " + context.tree)
+ return adapt(tree, mode, deriveTypeWithWildcards(boundOrSkolems)(pt))
+ }
+ }
+ // create an actual error
+ AdaptTypeError(tree, found, pt)
}
+ setError(tree)
}
}
- if (settings.debug.value) {
- log("error tree = "+tree)
- if (settings.explaintypes.value) explainTypes(tree.tpe, pt)
- }
- try {
- typeErrorTree(tree, tree.tpe, pt)
- } catch {
- case ex: TypeError =>
- if (phase.id > currentRun.typerPhase.id &&
- pt.existentialSkolems.nonEmpty) {
- // Ignore type errors raised in later phases that are due to mismatching types with existential skolems
- // We have lift crashing in 2.9 with an adapt failure in the pattern matcher.
- // Here's my hypothsis why this happens. The pattern matcher defines a variable of type
- //
- // val x: T = expr
- //
- // where T is the type of expr, but T contains existential skolems ts.
- // In that case, this value definition does not typecheck.
- // The value definition
- //
- // val x: T forSome { ts } = expr
- //
- // would typecheck. Or one can simply leave out the type of the `val`:
- //
- // val x = expr
- context.unit.warning(tree.pos, "recovering from existential Skolem type error in tree \n"+tree+"\nwith type "+tree.tpe+"\n expected type = "+pt+"\n context = "+context.tree)
- adapt(tree, mode, pt.subst(pt.existentialSkolems, pt.existentialSkolems map (_ => WildcardType)))
- } else
- throw ex
- }
+ fallBack
}
- }
+ }
}
- /**
- * @param tree ...
- * @param mode ...
- * @param pt ...
- * @return ...
- */
def instantiate(tree: Tree, mode: Int, pt: Type): Tree = {
inferExprInstance(tree, context.extractUndetparams(), pt)
adapt(tree, mode, pt)
}
+ /** If the expected type is Unit: try instantiating type arguments
+ * with expected type Unit, but if that fails, try again with pt = WildcardType
+ * and discard the expression.
+ */
+ def instantiateExpectingUnit(tree: Tree, mode: Int): Tree = {
+ val savedUndetparams = context.undetparams
+ silent(_.instantiate(tree, mode, UnitClass.tpe)) match {
+ case SilentResultValue(t) => t
+ case _ =>
+ context.undetparams = savedUndetparams
+ val valueDiscard = atPos(tree.pos)(Block(List(instantiate(tree, mode, WildcardType)), Literal(Constant())))
+ typed(valueDiscard, mode, UnitClass.tpe)
+ }
+ }
- def adaptToMember(qual: Tree, searchTemplate: Type): Tree = {
- var qtpe = qual.tpe.widen
- if (qual.isTerm &&
- ((qual.symbol eq null) || !qual.symbol.isTerm || qual.symbol.isValue) &&
- phase.id <= currentRun.typerPhase.id && !qtpe.isError &&
- qtpe.typeSymbol != NullClass && qtpe.typeSymbol != NothingClass && qtpe != WildcardType &&
- !qual.isInstanceOf[ApplyImplicitView] && // don't chain views
- context.implicitsEnabled) { // don't try to adapt a top-level type that's the subject of an implicit search
- // this happens because, if isView, typedImplicit tries to apply the "current" implicit value to
- // a value that needs to be coerced, so we check whether the implicit value has an `apply` method
- // (if we allow this, we get divergence, e.g., starting at `conforms` during ant quick.bin)
- // note: implicit arguments are still inferred (this kind of "chaining" is allowed)
- if (qtpe.normalize.isInstanceOf[ExistentialType]) {
- qtpe = qtpe.normalize.skolemizeExistential(context.owner, qual) // open the existential
- qual setType qtpe
- }
- val coercion = inferView(qual, qual.tpe, searchTemplate, true)
- if (coercion != EmptyTree)
- typedQualifier(atPos(qual.pos)(new ApplyImplicitView(coercion, List(qual))))
- else
- qual
- } else {
- qual
+ def instantiatePossiblyExpectingUnit(tree: Tree, mode: Int, pt: Type): Tree = {
+ if (inExprModeButNot(mode, FUNmode) && pt.typeSymbol == UnitClass)
+ instantiateExpectingUnit(tree, mode)
+ else
+ instantiate(tree, mode, pt)
+ }
+
+ private def isAdaptableWithView(qual: Tree) = {
+ val qtpe = qual.tpe.widen
+ ( !isPastTyper
+ && qual.isTerm
+ && !qual.isInstanceOf[Super]
+ && ((qual.symbol eq null) || !qual.symbol.isTerm || qual.symbol.isValue)
+ && !qtpe.isError
+ && !qtpe.typeSymbol.isBottomClass
+ && qtpe != WildcardType
+ && !qual.isInstanceOf[ApplyImplicitView] // don't chain views
+ && (context.implicitsEnabled || context.enrichmentEnabled)
+ // Elaborating `context.implicitsEnabled`:
+ // don't try to adapt a top-level type that's the subject of an implicit search
+ // this happens because, if isView, typedImplicit tries to apply the "current" implicit value to
+ // a value that needs to be coerced, so we check whether the implicit value has an `apply` method.
+ // (If we allow this, we get divergence, e.g., starting at `conforms` during ant quick.bin)
+ // Note: implicit arguments are still inferred (this kind of "chaining" is allowed)
+ )
+ }
+
+ def adaptToMember(qual: Tree, searchTemplate: Type, reportAmbiguous: Boolean = true, saveErrors: Boolean = true): Tree = {
+ if (isAdaptableWithView(qual)) {
+ qual.tpe.dealiasWiden match {
+ case et: ExistentialType =>
+ qual setType et.skolemizeExistential(context.owner, qual) // open the existential
+ case _ =>
+ }
+ inferView(qual, qual.tpe, searchTemplate, reportAmbiguous, saveErrors) match {
+ case EmptyTree => qual
+ case coercion =>
+ if (settings.logImplicitConv.value)
+ unit.echo(qual.pos,
+ "applied implicit conversion from %s to %s = %s".format(
+ qual.tpe, searchTemplate, coercion.symbol.defString))
+
+ typedQualifier(atPos(qual.pos)(new ApplyImplicitView(coercion, List(qual))))
+ }
}
+ else qual
}
- /** Try to apply an implicit conversion to `qual' to that it contains
- * a method `name` which can be applied to arguments `args' with expected type `pt'.
- * If `pt' is defined, there is a fallback to try again with pt = ?.
+ /** Try to apply an implicit conversion to `qual` to that it contains
+ * a method `name` which can be applied to arguments `args` with expected type `pt`.
+ * If `pt` is defined, there is a fallback to try again with pt = ?.
* This helps avoiding propagating result information too far and solves
* #1756.
- * If no conversion is found, return `qual' unchanged.
+ * If no conversion is found, return `qual` unchanged.
*
*/
- def adaptToArguments(qual: Tree, name: Name, args: List[Tree], pt: Type): Tree = {
+ def adaptToArguments(qual: Tree, name: Name, args: List[Tree], pt: Type, reportAmbiguous: Boolean, saveErrors: Boolean): Tree = {
def doAdapt(restpe: Type) =
//util.trace("adaptToArgs "+qual+", name = "+name+", argtpes = "+(args map (_.tpe))+", pt = "+pt+" = ")
- adaptToMember(qual, HasMethodMatching(name, args map (_.tpe), restpe))
+ adaptToMember(qual, HasMethodMatching(name, args map (_.tpe), restpe), reportAmbiguous, saveErrors)
if (pt != WildcardType) {
silent(_ => doAdapt(pt)) match {
- case result: Tree if result != qual =>
+ case SilentResultValue(result) if result != qual =>
result
case _ =>
- if (settings.debug.value) log("fallback on implicits in adaptToArguments: "+qual+" . "+name)
+ debuglog("fallback on implicits in adaptToArguments: "+qual+" . "+name)
doAdapt(WildcardType)
}
} else
doAdapt(pt)
}
- /** Try o apply an implicit conversion to `qual' to that it contains
- * a method `name`. If that's ambiguous try taking arguments into account using `adaptToArguments`.
+ /** Try to apply an implicit conversion to `qual` so that it contains
+ * a method `name`. If that's ambiguous try taking arguments into
+ * account using `adaptToArguments`.
*/
- def adaptToMemberWithArgs(tree: Tree, qual: Tree, name: Name, mode: Int): Tree = {
- try {
- adaptToMember(qual, HasMember(name))
- } catch {
- case ex: TypeError =>
- // this happens if implicits are ambiguous; try again with more context info.
- // println("last ditch effort: "+qual+" . "+name)
+ def adaptToMemberWithArgs(tree: Tree, qual: Tree, name: Name, mode: Int, reportAmbiguous: Boolean, saveErrors: Boolean): Tree = {
+ def onError(reportError: => Tree): Tree = {
context.tree match {
- case Apply(tree1, args) if (tree1 eq tree) && args.nonEmpty => // try handling the arguments
- // println("typing args: "+args)
- silent(_.typedArgs(args, mode)) match {
- case args: List[_] =>
- adaptToArguments(qual, name, args.asInstanceOf[List[Tree]], WildcardType)
- case _ =>
- throw ex
+ case Apply(tree1, args) if (tree1 eq tree) && args.nonEmpty =>
+ silent(_.typedArgs(args.map(_.duplicate), mode)) match {
+ case SilentResultValue(args) =>
+ if (args exists (_.isErrorTyped))
+ reportError
+ else
+ adaptToArguments(qual, name, args, WildcardType, reportAmbiguous, saveErrors)
+ case _ =>
+ reportError
}
case _ =>
- // println("not in an apply: "+context.tree+"/"+tree)
- throw ex
+ reportError
}
}
+ silent(_.adaptToMember(qual, HasMember(name), false)) match {
+ case SilentResultValue(res) => res
+ case SilentTypeError(err) => onError({if (reportAmbiguous) { context.issue(err) }; setError(tree)})
+ }
}
- /** Try to apply an implicit conversion to `qual' to that it contains a
+ /** Try to apply an implicit conversion to `qual` to that it contains a
* member `name` of arbitrary type.
- * If no conversion is found, return `qual' unchanged.
+ * If no conversion is found, return `qual` unchanged.
*/
def adaptToName(qual: Tree, name: Name) =
if (member(qual, name) != NoSymbol) qual
@@ -1060,27 +1414,115 @@ trait Typers extends Modes {
private def typePrimaryConstrBody(clazz : Symbol, cbody: Tree, tparams: List[Symbol], enclTparams: List[Symbol], vparamss: List[List[ValDef]]): Tree = {
// XXX: see about using the class's symbol....
enclTparams foreach (sym => context.scope.enter(sym))
- namer.enterValueParams(context.owner, vparamss)
+ namer.enterValueParams(vparamss)
typed(cbody)
}
private def validateNoCaseAncestor(clazz: Symbol) = {
if (!phase.erasedTypes) {
for (ancestor <- clazz.ancestors find (_.isCase)) {
- unit.deprecationWarning(clazz.pos, (
- "case class `%s' has case ancestor `%s'. Case-to-case inheritance has potentially "+
- "dangerous bugs which are unlikely to be fixed. You are strongly encouraged to "+
- "instead use extractors to pattern match on non-leaf nodes."
- ).format(clazz, ancestor))
+ unit.error(clazz.pos, (
+ "case %s has case ancestor %s, but case-to-case inheritance is prohibited."+
+ " To overcome this limitation, use extractors to pattern match on non-leaf nodes."
+ ).format(clazz, ancestor.fullName))
+ }
+ }
+ }
+
+ private def checkEphemeral(clazz: Symbol, body: List[Tree]) = {
+ // NOTE: Code appears to be messy in this method for good reason: it clearly
+ // communicates the fact that it implements rather ad-hoc, arbitrary and
+ // non-regular set of rules that identify features that interact badly with
+ // value classes. This code can be cleaned up a lot once implementation
+ // restrictions are addressed.
+ val isValueClass = !clazz.isTrait
+ def where = if (isValueClass) "value class" else "universal trait extending from class Any"
+ def implRestriction(tree: Tree, what: String) =
+ unit.error(tree.pos, s"implementation restriction: $what is not allowed in $where" +
+ "\nThis restriction is planned to be removed in subsequent releases.")
+ /**
+ * Deeply traverses the tree in search of constructs that are not allowed
+ * in value classes (at any nesting level).
+ *
+ * All restrictions this object imposes are probably not fundamental but require
+ * fair amount of work and testing. We are conservative for now when it comes
+ * to allowing language features to interact with value classes.
+ * */
+ object checkEphemeralDeep extends Traverser {
+ override def traverse(tree: Tree): Unit = if (isValueClass) {
+ tree match {
+ case _: ModuleDef =>
+ //see https://issues.scala-lang.org/browse/SI-6359
+ implRestriction(tree, "nested object")
+ //see https://issues.scala-lang.org/browse/SI-6444
+ //see https://issues.scala-lang.org/browse/SI-6463
+ case cd: ClassDef if !cd.symbol.isAnonymousClass => // Don't warn about partial functions, etc. SI-7571
+ implRestriction(tree, "nested class") // avoiding Type Tests that might check the $outer pointer.
+ case Select(sup @ Super(qual, mix), selector) if selector != nme.CONSTRUCTOR && qual.symbol == clazz && mix != tpnme.EMPTY =>
+ //see https://issues.scala-lang.org/browse/SI-6483
+ implRestriction(sup, "qualified super reference")
+ case _ =>
+ }
+ super.traverse(tree)
+ }
+ }
+ for (stat <- body) {
+ def notAllowed(what: String) = unit.error(stat.pos, s"$what is not allowed in $where")
+ stat match {
+ // see https://issues.scala-lang.org/browse/SI-6444
+ // see https://issues.scala-lang.org/browse/SI-6463
+ case ClassDef(mods, _, _, _) if isValueClass =>
+ implRestriction(stat, s"nested ${ if (mods.isTrait) "trait" else "class" }")
+ case _: Import | _: ClassDef | _: TypeDef | EmptyTree => // OK
+ case DefDef(_, name, _, _, _, rhs) =>
+ if (stat.symbol.isAuxiliaryConstructor)
+ notAllowed("secondary constructor")
+ else if (isValueClass && (name == nme.equals_ || name == nme.hashCode_) && !stat.symbol.isSynthetic)
+ notAllowed(s"redefinition of $name method. See SIP-15, criterion 4.")
+ else if (stat.symbol != null && stat.symbol.isParamAccessor)
+ notAllowed("additional parameter")
+ checkEphemeralDeep.traverse(rhs)
+ case _: ValDef =>
+ notAllowed("field definition")
+ case _: ModuleDef =>
+ //see https://issues.scala-lang.org/browse/SI-6359
+ implRestriction(stat, "nested object")
+ case _ =>
+ notAllowed("this statement")
+ }
+ }
+ }
+
+ private def validateDerivedValueClass(clazz: Symbol, body: List[Tree]) = {
+ if (clazz.isTrait)
+ unit.error(clazz.pos, "only classes (not traits) are allowed to extend AnyVal")
+ if (!clazz.isStatic)
+ unit.error(clazz.pos, "value class may not be a "+
+ (if (clazz.owner.isTerm) "local class" else "member of another class"))
+ if (!clazz.isPrimitiveValueClass) {
+ clazz.info.decls.toList.filter(acc => acc.isMethod && acc.isParamAccessor) match {
+ case List(acc) =>
+ def isUnderlyingAcc(sym: Symbol) =
+ sym == acc || acc.hasAccessorFlag && sym == acc.accessed
+ if (acc.accessBoundary(clazz) != rootMirror.RootClass)
+ unit.error(acc.pos, "value class needs to have a publicly accessible val parameter")
+ else if (acc.tpe.typeSymbol.isDerivedValueClass)
+ unit.error(acc.pos, "value class may not wrap another user-defined value class")
+ checkEphemeral(clazz, body filterNot (stat => isUnderlyingAcc(stat.symbol)))
+ case x =>
+ unit.error(clazz.pos, "value class needs to have exactly one public val parameter")
}
}
+
+ for (tparam <- clazz.typeParams)
+ if (tparam hasAnnotation definitions.SpecializedClass)
+ unit.error(tparam.pos, "type parameter of value class may not be specialized")
}
def parentTypes(templ: Template): List[Tree] =
- if (templ.parents.isEmpty) List()
+ if (templ.parents.isEmpty) List(atPos(templ.pos)(TypeTree(AnyRefClass.tpe)))
else try {
val clazz = context.owner
-
// Normalize supertype and mixins so that supertype is always a class, not a trait.
var supertpt = typedTypeConstructor(templ.parents.head)
val firstParent = supertpt.tpe.typeSymbol
@@ -1088,11 +1530,13 @@ trait Typers extends Modes {
// If first parent is a trait, make it first mixin and add its superclass as first parent
while ((supertpt.tpe.typeSymbol ne null) && supertpt.tpe.typeSymbol.initialize.isTrait) {
val supertpt1 = typedType(supertpt)
- if (!supertpt1.tpe.isError) {
+ if (!supertpt1.isErrorTyped) {
mixins = supertpt1 :: mixins
- supertpt = TypeTree(supertpt1.tpe.parents.head) setPos supertpt.pos.focus
+ supertpt = TypeTree(supertpt1.tpe.firstParent) setPos supertpt.pos.focus
}
}
+ if (supertpt.tpe.typeSymbol == AnyClass && firstParent.isTrait)
+ supertpt.tpe = AnyRefClass.tpe
// Determine
// - supertparams: Missing type parameters from supertype
@@ -1100,7 +1544,7 @@ trait Typers extends Modes {
val supertparams = if (supertpt.hasSymbol) supertpt.symbol.typeParams else List()
var supertpe = supertpt.tpe
if (!supertparams.isEmpty)
- supertpe = PolyType(supertparams, appliedType(supertpe, supertparams map (_.tpe)))
+ supertpe = PolyType(supertparams, appliedType(supertpe, supertparams map (_.tpeHK)))
// A method to replace a super reference by a New in a supercall
def transformSuperCall(scall: Tree): Tree = (scall: @unchecked) match {
@@ -1116,42 +1560,45 @@ trait Typers extends Modes {
treeInfo.firstConstructor(templ.body) match {
case constr @ DefDef(_, _, _, vparamss, _, cbody @ Block(cstats, cunit)) =>
// Convert constructor body to block in environment and typecheck it
- val (preSuperStats, rest) = cstats span (!treeInfo.isSuperConstrCall(_))
- val (scall, upToSuperStats) =
- if (rest.isEmpty) (EmptyTree, preSuperStats)
- else (rest.head, preSuperStats :+ rest.head)
- val cstats1: List[Tree] = upToSuperStats map (_.duplicate)
- val cbody1 = scall match {
- case Apply(_, _) =>
- treeCopy.Block(cbody, cstats1.init,
- if (supertparams.isEmpty) cunit.duplicate
- else transformSuperCall(scall))
- case _ =>
- treeCopy.Block(cbody, cstats1, cunit.duplicate)
+ val (preSuperStats, superCall) = {
+ val (stats, rest) = cstats span (x => !treeInfo.isSuperConstrCall(x))
+ (stats map (_.duplicate), if (rest.isEmpty) EmptyTree else rest.head.duplicate)
}
-
+ val cstats1 = if (superCall == EmptyTree) preSuperStats else preSuperStats :+ superCall
+ val cbody1 = treeCopy.Block(cbody, preSuperStats, superCall match {
+ case Apply(_, _) if supertparams.nonEmpty => transformSuperCall(superCall)
+ case _ => cunit.duplicate
+ })
val outercontext = context.outer
- assert(clazz != NoSymbol)
+
+ assert(clazz != NoSymbol, templ)
val cscope = outercontext.makeNewScope(constr, outercontext.owner)
val cbody2 = newTyper(cscope) // called both during completion AND typing.
.typePrimaryConstrBody(clazz,
cbody1, supertparams, clazz.unsafeTypeParams, vparamss map (_.map(_.duplicate)))
- scall match {
+ superCall match {
case Apply(_, _) =>
- val sarg = treeInfo.firstArgument(scall)
+ val treeInfo.Applied(_, _, argss) = superCall
+ val sarg = argss.flatten.headOption.getOrElse(EmptyTree)
if (sarg != EmptyTree && supertpe.typeSymbol != firstParent)
- error(sarg.pos, firstParent+" is a trait; does not take constructor arguments")
- if (!supertparams.isEmpty) supertpt = TypeTree(cbody2.tpe) setPos supertpt.pos.focus
+ ConstrArgsInTraitParentTpeError(sarg, firstParent)
+ if (!supertparams.isEmpty)
+ supertpt = TypeTree(cbody2.tpe) setPos supertpt.pos
case _ =>
- if (!supertparams.isEmpty) error(supertpt.pos, "missing type arguments")
+ if (!supertparams.isEmpty)
+ MissingTypeArgumentsParentTpeError(supertpt)
}
- (cstats1, treeInfo.preSuperFields(templ.body)).zipped map {
- (ldef, gdef) => gdef.tpt.tpe = ldef.symbol.tpe
- }
+ val preSuperVals = treeInfo.preSuperFields(templ.body)
+ if (preSuperVals.isEmpty && preSuperStats.nonEmpty)
+ debugwarn("Wanted to zip empty presuper val list with " + preSuperStats)
+ else
+ map2(preSuperStats, preSuperVals)((ldef, gdef) => gdef.tpt.tpe = ldef.symbol.tpe)
+
case _ =>
- if (!supertparams.isEmpty) error(supertpt.pos, "missing type arguments")
+ if (!supertparams.isEmpty)
+ MissingTypeArgumentsParentTpeError(supertpt)
}
/* experimental: early types as type arguments
val hasEarlyTypes = templ.body exists (treeInfo.isEarlyTypeDef)
@@ -1163,11 +1610,33 @@ trait Typers extends Modes {
*/
//Console.println("parents("+clazz") = "+supertpt :: mixins);//DEBUG
- supertpt :: mixins mapConserve (tpt => checkNoEscaping.privates(clazz, tpt))
- } catch {
+
+ // Certain parents are added in the parser before it is known whether
+ // that class also declared them as parents. For instance, this is an
+ // error unless we take corrective action here:
+ //
+ // case class Foo() extends Serializable
+ //
+ // So we strip the duplicates before typer.
+ def fixDuplicates(remaining: List[Tree]): List[Tree] = remaining match {
+ case Nil => Nil
+ case x :: xs =>
+ val sym = x.symbol
+ x :: fixDuplicates(
+ if (isPossibleSyntheticParent(sym)) xs filterNot (_.symbol == sym)
+ else xs
+ )
+ }
+
+ fixDuplicates(supertpt :: mixins) mapConserve (tpt => checkNoEscaping.privates(clazz, tpt))
+ }
+ catch {
case ex: TypeError =>
- templ.tpe = null
- reportTypeError(templ.pos, ex)
+ // fallback in case of cyclic errors
+ // @H none of the tests enter here but I couldn't rule it out
+ log("Type error calculating parents in template " + templ)
+ log("Error: " + ex)
+ ParentTypesError(templ, ex)
List(TypeTree(AnyRefClass.tpe))
}
@@ -1186,32 +1655,41 @@ trait Typers extends Modes {
* </ul>
*/
def validateParentClasses(parents: List[Tree], selfType: Type) {
+ val pending = ListBuffer[AbsTypeError]()
+ def validateDynamicParent(parent: Symbol, parentPos: Position) =
+ if (parent == DynamicClass) checkFeature(parentPos, DynamicsFeature)
- def validateParentClass(parent: Tree, superclazz: Symbol) {
- if (!parent.tpe.isError) {
+ def validateParentClass(parent: Tree, superclazz: Symbol) =
+ if (!parent.isErrorTyped) {
val psym = parent.tpe.typeSymbol.initialize
- checkClassType(parent, false, true)
+
+ checkStablePrefixClassType(parent)
+
if (psym != superclazz) {
if (psym.isTrait) {
val ps = psym.info.parents
if (!ps.isEmpty && !superclazz.isSubClass(ps.head.typeSymbol))
- error(parent.pos, "illegal inheritance; super"+superclazz+
- "\n is not a subclass of the super"+ps.head.typeSymbol+
- "\n of the mixin " + psym)
+ pending += ParentSuperSubclassError(parent, superclazz, ps.head.typeSymbol, psym)
} else {
- error(parent.pos, psym+" needs to be a trait to be mixed in")
+ pending += ParentNotATraitMixinError(parent, psym)
}
}
- if (psym.isFinal) {
- error(parent.pos, "illegal inheritance from final "+psym)
+
+ if (psym.isFinal)
+ pending += ParentFinalInheritanceError(parent, psym)
+
+ if (psym.hasDeprecatedInheritanceAnnotation) {
+ val suffix = psym.deprecatedInheritanceMessage map (": " + _) getOrElse ""
+ val msg = s"inheritance from ${psym.fullLocationString} is deprecated$suffix"
+ unit.deprecationWarning(parent.pos, msg)
}
- if (psym.isSealed && !phase.erasedTypes) {
- // AnyVal is sealed, but we have to let the value classes through manually
- if (context.unit.source.file == psym.sourceFile || isValueClass(context.owner))
+
+ if (psym.isSealed && !phase.erasedTypes)
+ if (context.unit.source.file == psym.sourceFile)
psym addChild context.owner
else
- error(parent.pos, "illegal inheritance from sealed "+psym)
- }
+ pending += ParentSealedInheritanceError(parent, psym)
+
if (!(selfType <:< parent.tpe.typeOfThis) &&
!phase.erasedTypes &&
!context.owner.isSynthetic && // don't check synthetic concrete classes for virtuals (part of DEVIRTUALIZE)
@@ -1222,18 +1700,20 @@ trait Typers extends Modes {
//Console.println(context.owner);//DEBUG
//Console.println(context.owner.unsafeTypeParams);//DEBUG
//Console.println(List.fromArray(context.owner.info.closure));//DEBUG
- error(parent.pos, "illegal inheritance;\n self-type "+
- selfType+" does not conform to "+parent +
- "'s selftype "+parent.tpe.typeOfThis)
+ pending += ParentSelfTypeConformanceError(parent, selfType)
if (settings.explaintypes.value) explainTypes(selfType, parent.tpe.typeOfThis)
}
+
if (parents exists (p => p != parent && p.tpe.typeSymbol == psym && !psym.isError))
- error(parent.pos, psym+" is inherited twice")
+ pending += ParentInheritedTwiceError(parent, psym)
+
+ validateDynamicParent(psym, parent.pos)
}
- }
- if (!parents.isEmpty && !parents.head.tpe.isError)
- for (p <- parents) validateParentClass(p, parents.head.tpe.typeSymbol)
+ if (!parents.isEmpty && parents.forall(!_.isErrorTyped)) {
+ val superclazz = parents.head.tpe.typeSymbol
+ for (p <- parents) validateParentClass(p, superclazz)
+ }
/*
if (settings.Xshowcls.value != "" &&
@@ -1242,13 +1722,14 @@ trait Typers extends Modes {
", baseclasses = "+(context.owner.info.baseClasses map (_.fullName))+
", lin = "+(context.owner.info.baseClasses map (context.owner.thisType.baseType)))
*/
+ pending.foreach(ErrorUtils.issueTypeError)
}
def checkFinitary(classinfo: ClassInfoType) {
val clazz = classinfo.typeSymbol
+
for (tparam <- clazz.typeParams) {
if (classinfo.expansiveRefs(tparam) contains tparam) {
- error(tparam.pos, "class graph is not finitary because type parameter "+tparam.name+" is expansively recursive")
val newinfo = ClassInfoType(
classinfo.parents map (_.instantiateTypeParams(List(tparam), List(AnyRefClass.tpe))),
classinfo.decls,
@@ -1259,6 +1740,7 @@ trait Typers extends Modes {
case _ => newinfo
}
}
+ FinitaryError(tparam)
}
}
}
@@ -1270,22 +1752,23 @@ trait Typers extends Modes {
def typedClassDef(cdef: ClassDef): Tree = {
// attributes(cdef)
val clazz = cdef.symbol
- val typedMods = removeAnnotations(cdef.mods)
- assert(clazz != NoSymbol)
+ val typedMods = typedModifiers(cdef.mods)
+ assert(clazz != NoSymbol, cdef)
reenterTypeParams(cdef.tparams)
val tparams1 = cdef.tparams mapConserve (typedTypeDef)
- val impl1 = newTyper(context.make(cdef.impl, clazz, new Scope))
- .typedTemplate(cdef.impl, parentTypes(cdef.impl))
- val impl2 = typerAddSyntheticMethods(impl1, clazz, context)
+ val impl1 = newTyper(context.make(cdef.impl, clazz, newScope)).typedTemplate(cdef.impl, parentTypes(cdef.impl))
+ val impl2 = finishMethodSynthesis(impl1, clazz, context)
+ if (clazz.isTrait && clazz.info.parents.nonEmpty && clazz.info.firstParent.typeSymbol == AnyClass)
+ checkEphemeral(clazz, impl2.body)
if ((clazz != ClassfileAnnotationClass) &&
(clazz isNonBottomSubClass ClassfileAnnotationClass))
restrictionWarning(cdef.pos, unit,
"subclassing Classfile does not\n"+
"make your annotation visible at runtime. If that is what\n"+
"you want, you must write the annotation class in Java.")
- if (phase.id <= currentRun.typerPhase.id) {
+ if (!isPastTyper) {
for (ann <- clazz.getAnnotation(DeprecatedAttr)) {
- val m = companionModuleOf(clazz, context)
+ val m = companionSymbolOf(clazz, context)
if (m != NoSymbol)
m.moduleClass.addAnnotation(AnnotationInfo(ann.atp, ann.args, List()))
}
@@ -1299,169 +1782,88 @@ trait Typers extends Modes {
* @return ...
*/
def typedModuleDef(mdef: ModuleDef): Tree = {
- //Console.println("sourcefile of " + mdef.symbol + "=" + mdef.symbol.sourceFile)
-// attributes(mdef)
// initialize all constructors of the linked class: the type completer (Namer.methodSig)
// might add default getters to this object. example: "object T; class T(x: Int = 1)"
- val linkedClass = companionClassOf(mdef.symbol, context)
+ val linkedClass = companionSymbolOf(mdef.symbol, context)
if (linkedClass != NoSymbol)
- for (c <- linkedClass.info.decl(nme.CONSTRUCTOR).alternatives)
- c.initialize
- val clazz = mdef.symbol.moduleClass
- val maybeAddSerializable = (l: List[Tree]) =>
- if (linkedClass == NoSymbol || !linkedClass.isSerializable || clazz.isSerializable) l
- else {
- clazz.makeSerializable()
- l :+ TypeTree(SerializableClass.tpe)
+ linkedClass.info.decl(nme.CONSTRUCTOR).alternatives foreach (_.initialize)
+
+ val clazz = mdef.symbol.moduleClass
+ val typedMods = typedModifiers(mdef.mods)
+ assert(clazz != NoSymbol, mdef)
+ val noSerializable = (
+ (linkedClass eq NoSymbol)
+ || linkedClass.isErroneous
+ || !linkedClass.isSerializable
+ || clazz.isSerializable
+ )
+ val impl1 = newTyper(context.make(mdef.impl, clazz, newScope)).typedTemplate(mdef.impl, {
+ parentTypes(mdef.impl) ++ (
+ if (noSerializable) Nil
+ else {
+ clazz.makeSerializable()
+ List(TypeTree(SerializableClass.tpe) setPos clazz.pos.focus)
+ }
+ )
+ })
+
+ val impl2 = finishMethodSynthesis(impl1, clazz, context)
+
+ // SI-5954. On second compile of a companion class contained in a package object we end up
+ // with some confusion of names which leads to having two symbols with the same name in the
+ // same owner. Until that can be straightened out we will warn on companion objects in package
+ // objects. But this code also tries to be friendly by distinguishing between case classes and
+ // user written companion pairs
+ def warnPackageObjectMembers(mdef : ModuleDef) = for (m <- mdef.symbol.info.members) {
+ // ignore synthetic objects, because the "companion" object to a case class is synthetic and
+ // we only want one error per case class
+ if (!m.isSynthetic) {
+ // can't handle case classes in package objects
+ if (m.isCaseClass) pkgObjectWarning(m, mdef, "case")
+ // can't handle companion class/object pairs in package objects
+ else if ((m.isClass && m.companionModule != NoSymbol && !m.companionModule.isSynthetic) ||
+ (m.isModule && m.companionClass != NoSymbol && !m.companionClass.isSynthetic))
+ pkgObjectWarning(m, mdef, "companion")
+ }
+
+ def pkgObjectWarning(m : Symbol, mdef : ModuleDef, restricted : String) = {
+ val pkgName = mdef.symbol.ownerChain find (_.isPackage) map (_.decodedName) getOrElse mdef.symbol.toString
+ val pos = if (m.pos.isDefined) m.pos else mdef.pos
+ debugwarn(s"${m} should be placed directly in package ${pkgName} instead of package object ${pkgName}. Under some circumstances companion objects and case classes in package objects can fail to recompile. See https://issues.scala-lang.org/browse/SI-5954.")
+ debugwarn(pos.lineContent + (if (pos.isDefined) " " * (pos.column - 1) + "^" else ""))
}
- val typedMods = removeAnnotations(mdef.mods)
- assert(clazz != NoSymbol)
- val impl1 = newTyper(context.make(mdef.impl, clazz, new Scope))
- .typedTemplate(mdef.impl, maybeAddSerializable(parentTypes(mdef.impl)))
- val impl2 = typerAddSyntheticMethods(impl1, clazz, context)
+ }
+
+ if (mdef.symbol.isPackageObject)
+ warnPackageObjectMembers(mdef)
treeCopy.ModuleDef(mdef, typedMods, mdef.name, impl2) setType NoType
}
/** In order to override this in the TreeCheckers Typer so synthetics aren't re-added
* all the time, it is exposed here the module/class typing methods go through it.
+ * ...but it turns out it's also the ideal spot for namer/typer coordination for
+ * the tricky method synthesis scenarios, so we'll make it that.
*/
- protected def typerAddSyntheticMethods(templ: Template, clazz: Symbol, context: Context): Template = {
+ protected def finishMethodSynthesis(templ: Template, clazz: Symbol, context: Context): Template = {
addSyntheticMethods(templ, clazz, context)
}
-
- /**
- * @param stat ...
- * @return ...
+ /** For flatMapping a list of trees when you want the DocDefs and Annotated
+ * to be transparent.
*/
- def addGetterSetter(stat: Tree): List[Tree] = stat match {
- case ValDef(mods, name, tpt, rhs)
- // PRIVATE | LOCAL are fields generated for primary constructor arguments
- if !mods.isPrivateLocal && !stat.symbol.isModuleVar =>
- val isDeferred = mods.isDeferred
- val value = stat.symbol
- val allAnnots = value.annotations
- if (!isDeferred)
- // keepClean: by default annotations go to the field, except if the field is
- // generated for a class parameter (PARAMACCESSOR).
- value.setAnnotations(memberAnnots(allAnnots, FieldTargetClass, keepClean = !mods.isParamAccessor))
-
- val getter = if (isDeferred) value else value.getter(value.owner)
- assert(getter != NoSymbol, stat)
- if (getter.isOverloaded)
- error(getter.pos, getter+" is defined twice")
-
- getter.setAnnotations(memberAnnots(allAnnots, GetterTargetClass))
-
- if (value.isLazy) List(stat)
- else {
- val vdef = treeCopy.ValDef(stat, mods | PRIVATE | LOCAL, nme.getterToLocal(name), tpt, rhs)
- val getterDef: DefDef = atPos(vdef.pos.focus) {
- if (isDeferred) {
- val r = DefDef(getter, EmptyTree)
- r.tpt.asInstanceOf[TypeTree].setOriginal(tpt) // keep type tree of original abstract field
- r
- } else {
- val rhs = gen.mkCheckInit(Select(This(value.owner), value))
- val r = typed {
- atPos(getter.pos.focus) {
- DefDef(getter, rhs)
- }
- }.asInstanceOf[DefDef]
- r.tpt.setPos(tpt.pos.focus)
- r
- }
- }
- checkNoEscaping.privates(getter, getterDef.tpt)
- def setterDef(setter: Symbol, isBean: Boolean = false): DefDef = {
- setter setAnnotations memberAnnots(allAnnots, if (isBean) BeanSetterTargetClass else SetterTargetClass)
- val defTree =
- if ((mods hasFlag DEFERRED) || (setter hasFlag OVERLOADED)) EmptyTree
- else Assign(Select(This(value.owner), value), Ident(setter.paramss.head.head))
+ def rewrappingWrapperTrees(f: Tree => List[Tree]): Tree => List[Tree] = {
+ case dd @ DocDef(comment, defn) => f(defn) map (stat => DocDef(comment, stat) setPos dd.pos)
+ case Annotated(annot, defn) => f(defn) map (stat => Annotated(annot, stat))
+ case tree => f(tree)
+ }
+ protected def enterSyms(txt: Context, trees: List[Tree]) = {
+ var txt0 = txt
+ for (tree <- trees) txt0 = enterSym(txt0, tree)
+ }
- typedPos(vdef.pos.focus)(DefDef(setter, defTree)).asInstanceOf[DefDef]
- }
-
- val gs = new ListBuffer[DefDef]
- gs.append(getterDef)
- if (mods.isMutable) {
- val setter = getter.setter(value.owner)
- gs.append(setterDef(setter))
- }
- if (!forMSIL && (value.hasAnnotation(BeanPropertyAttr) ||
- value.hasAnnotation(BooleanBeanPropertyAttr))) {
- val nameSuffix = name.toString().capitalize
- val beanGetterName =
- (if (value.hasAnnotation(BooleanBeanPropertyAttr)) "is" else "get") +
- nameSuffix
- val beanGetter = value.owner.info.decl(beanGetterName)
- if (beanGetter == NoSymbol) {
- // the namer decides wether to generate these symbols or not. at that point, we don't
- // have symbolic information yet, so we only look for annotations named "BeanProperty".
- unit.error(stat.pos, "implementation limitation: the BeanProperty annotation cannot be used in a type alias or renamed import")
- }
- beanGetter.setAnnotations(memberAnnots(allAnnots, BeanGetterTargetClass))
- if (mods.isMutable && beanGetter != NoSymbol) {
- val beanSetterName = "set" + nameSuffix
- val beanSetter = value.owner.info.decl(beanSetterName)
- // unlike for the beanGetter, the beanSetter body is generated here. see comment in Namers.
- gs.append(setterDef(beanSetter, isBean = true))
- }
- }
- if (mods.isDeferred) gs.toList else vdef :: gs.toList
- }
- case dd @ DocDef(comment, defn) =>
- addGetterSetter(defn) map (stat => DocDef(comment, stat) setPos dd.pos)
-
- case Annotated(annot, defn) =>
- addGetterSetter(defn) map (stat => Annotated(annot, stat))
-
- case _ =>
- List(stat)
- }
-
- /**
- * The annotations amongst `annots` that should go on a member of class
- * `memberClass` (field, getter, setter, beanGetter, beanSetter, param)
- * If 'keepClean' is true, annotations without any meta-annotation are kept
- */
- protected def memberAnnots(annots: List[AnnotationInfo], memberClass: Symbol, keepClean: Boolean = false) = {
-
- def hasMatching(metaAnnots: List[AnnotationInfo], orElse: => Boolean) = {
- // either one of the meta-annotations matches the `memberClass`
- metaAnnots.exists(_.atp.typeSymbol == memberClass) ||
- // else, if there is no `target` meta-annotation at all, use the default case
- (metaAnnots.forall(ann => {
- val annClass = ann.atp.typeSymbol
- annClass != FieldTargetClass && annClass != GetterTargetClass &&
- annClass != SetterTargetClass && annClass != BeanGetterTargetClass &&
- annClass != BeanSetterTargetClass && annClass != ParamTargetClass
- }) && orElse)
- }
-
- // there was no meta-annotation on `ann`. Look if the class annotations of
- // `ann` has a `target` annotation, otherwise put `ann` only on fields.
- def noMetaAnnot(ann: AnnotationInfo) = {
- hasMatching(ann.atp.typeSymbol.annotations, keepClean)
- }
-
- annots.filter(ann => ann.atp match {
- // the annotation type has meta-annotations, e.g. @(foo @getter)
- case AnnotatedType(metaAnnots, _, _) =>
- hasMatching(metaAnnots, noMetaAnnot(ann))
- // there are no meta-annotations, e.g. @foo
- case _ => noMetaAnnot(ann)
- })
- }
-
- protected def enterSyms(txt: Context, trees: List[Tree]) = {
- var txt0 = txt
- for (tree <- trees) txt0 = enterSym(txt0, tree)
- }
-
- protected def enterSym(txt: Context, tree: Tree): Context =
- if (txt eq context) namer.enterSym(tree)
- else newNamer(txt).enterSym(tree)
+ protected def enterSym(txt: Context, tree: Tree): Context =
+ if (txt eq context) namer.enterSym(tree)
+ else newNamer(txt).enterSym(tree)
/**
* @param templ ...
@@ -1473,29 +1875,32 @@ trait Typers extends Modes {
*/
def typedTemplate(templ: Template, parents1: List[Tree]): Template = {
val clazz = context.owner
- // complete lazy annotations
- val annots = clazz.annotations
+ clazz.annotations.map(_.completeInfo)
if (templ.symbol == NoSymbol)
templ setSymbol clazz.newLocalDummy(templ.pos)
val self1 = templ.self match {
- case vd @ ValDef(mods, name, tpt, EmptyTree) =>
- val tpt1 =
- checkNoEscaping.privates(
- clazz.thisSym,
- treeCopy.TypeTree(tpt).setOriginal(tpt) setType vd.symbol.tpe)
- treeCopy.ValDef(vd, mods, name, tpt1, EmptyTree) setType NoType
- }
-// was:
-// val tpt1 = checkNoEscaping.privates(clazz.thisSym, typedType(tpt))
-// treeCopy.ValDef(vd, mods, name, tpt1, EmptyTree) setType NoType
-// but this leads to cycles for existential self types ==> #2545
- if (self1.name != nme.WILDCARD) context.scope enter self1.symbol
- val selfType =
+ case vd @ ValDef(_, _, tpt, EmptyTree) =>
+ val tpt1 = checkNoEscaping.privates(
+ clazz.thisSym,
+ treeCopy.TypeTree(tpt).setOriginal(tpt) setType vd.symbol.tpe
+ )
+ copyValDef(vd)(tpt = tpt1, rhs = EmptyTree) setType NoType
+ }
+ // was:
+ // val tpt1 = checkNoEscaping.privates(clazz.thisSym, typedType(tpt))
+ // treeCopy.ValDef(vd, mods, name, tpt1, EmptyTree) setType NoType
+ // but this leads to cycles for existential self types ==> #2545
+ if (self1.name != nme.WILDCARD)
+ context.scope enter self1.symbol
+
+ val selfType = (
if (clazz.isAnonymousClass && !phase.erasedTypes)
intersectionType(clazz.info.parents, clazz.owner)
- else clazz.typeOfThis
+ else
+ clazz.typeOfThis
+ )
// the following is necessary for templates generated later
- assert(clazz.info.decls != EmptyScope)
+ assert(clazz.info.decls != EmptyScope, clazz)
enterSyms(context.outer.make(templ, clazz, clazz.info.decls), templ.body)
validateParentClasses(parents1, selfType)
if (clazz.isCase)
@@ -1503,55 +1908,80 @@ trait Typers extends Modes {
if ((clazz isSubClass ClassfileAnnotationClass) && !clazz.owner.isPackageClass)
unit.error(clazz.pos, "inner classes cannot be classfile annotations")
+
if (!phase.erasedTypes && !clazz.info.resultType.isError) // @S: prevent crash for duplicated type members
checkFinitary(clazz.info.resultType.asInstanceOf[ClassInfoType])
+
val body =
- if (phase.id <= currentRun.typerPhase.id && !reporter.hasErrors)
- templ.body flatMap addGetterSetter
- else templ.body
+ if (isPastTyper || reporter.hasErrors) templ.body
+ else templ.body flatMap rewrappingWrapperTrees(namer.addDerivedTrees(Typer.this, _))
+
val body1 = typedStats(body, templ.symbol)
+
+ if (clazz.info.firstParent.typeSymbol == AnyValClass)
+ validateDerivedValueClass(clazz, body1)
+
+ if (clazz.isTrait) {
+ for (decl <- clazz.info.decls if decl.isTerm && decl.isEarlyInitialized) {
+ unit.warning(decl.pos, "Implementation restriction: early definitions in traits are not initialized before the super class is initialized.")
+ }
+ }
+
treeCopy.Template(templ, parents1, self1, body1) setType clazz.tpe
}
/** Remove definition annotations from modifiers (they have been saved
* into the symbol's ``annotations'' in the type completer / namer)
+ *
+ * However reification does need annotation definitions to proceed.
+ * Unfortunately, AnnotationInfo doesn't provide enough info to reify it in general case.
+ * The biggest problem is with the "atp: Type" field, which cannot be reified in some situations
+ * that involve locally defined annotations. See more about that in Reifiers.scala.
+ *
+ * That's why the original tree gets saved into ``original'' field of AnnotationInfo (happens elsewhere).
+ * The field doesn't get pickled/unpickled and exists only during a single compilation run.
+ * This simultaneously allows us to reify annotations and to preserve backward compatibility.
*/
- def removeAnnotations(mods: Modifiers): Modifiers =
- mods.copy(annotations = Nil)
+ def typedModifiers(mods: Modifiers): Modifiers =
+ mods.copy(annotations = Nil) setPositions mods.positions
/**
* @param vdef ...
* @return ...
*/
def typedValDef(vdef: ValDef): ValDef = {
-// attributes(vdef)
val sym = vdef.symbol
- val typer1 = constrTyperIf(sym.isParameter && sym.owner.isConstructor)
- val typedMods = removeAnnotations(vdef.mods)
+ val valDefTyper = {
+ val maybeConstrCtx =
+ if ((sym.isParameter || sym.isEarlyInitialized) && sym.owner.isConstructor) context.makeConstructorContext
+ else context
+ newTyper(maybeConstrCtx.makeNewScope(vdef, sym))
+ }
+ valDefTyper.typedValDefImpl(vdef)
+ }
- // complete lazy annotations
- val annots = sym.annotations
+ // use typedValDef instead. this version is called after creating a new context for the ValDef
+ private def typedValDefImpl(vdef: ValDef) = {
+ val sym = vdef.symbol.initialize
+ val typedMods = typedModifiers(vdef.mods)
- var tpt1 = checkNoEscaping.privates(sym, typer1.typedType(vdef.tpt))
+ sym.annotations.map(_.completeInfo)
+ val tpt1 = checkNoEscaping.privates(sym, typedType(vdef.tpt))
checkNonCyclic(vdef, tpt1)
- if (sym.hasAnnotation(definitions.VolatileAttr)) {
- if (!sym.isMutable)
- error(vdef.pos, "values cannot be volatile")
- else if (sym.isFinal)
- error(vdef.pos, "final vars cannot be volatile")
- }
+
+ if (sym.hasAnnotation(definitions.VolatileAttr) && !sym.isMutable)
+ VolatileValueError(vdef)
+
val rhs1 =
if (vdef.rhs.isEmpty) {
- if (sym.isVariable && sym.owner.isTerm && phase.id <= currentRun.typerPhase.id)
- error(vdef.pos, "local variables must be initialized")
+ if (sym.isVariable && sym.owner.isTerm && !sym.isLazy && !isPastTyper)
+ LocalVarUninitializedError(vdef)
vdef.rhs
} else {
val tpt2 = if (sym.hasDefault) {
// When typechecking default parameter, replace all type parameters in the expected type by Wildcard.
// This allows defining "def foo[T](a: T = 1)"
- val tparams =
- if (sym.owner.isConstructor) sym.owner.owner.info.typeParams
- else sym.owner.tpe.typeParams
+ val tparams = sym.owner.skipConstructor.info.typeParams
val subst = new SubstTypeMap(tparams, tparams map (_ => WildcardType)) {
override def matches(sym: Symbol, sym1: Symbol) =
if (sym.isSkolem) matches(sym.deSkolemize, sym1)
@@ -1564,7 +1994,7 @@ trait Typers extends Modes {
else subst(tpt1.tpe.typeArgs(0))
else subst(tpt1.tpe)
} else tpt1.tpe
- newTyper(typer1.context.make(vdef, sym)).transformedOrTyped(vdef.rhs, EXPRmode | BYVALmode, tpt2)
+ transformedOrTyped(vdef.rhs, EXPRmode | BYVALmode, tpt2)
}
treeCopy.ValDef(vdef, typedMods, vdef.name, tpt1, checkDead(rhs1)) setType NoType
}
@@ -1576,97 +2006,167 @@ trait Typers extends Modes {
* @param rhs ...
*/
def computeParamAliases(clazz: Symbol, vparamss: List[List[ValDef]], rhs: Tree) {
- if (settings.debug.value) log("computing param aliases for "+clazz+":"+clazz.primaryConstructor.tpe+":"+rhs)//debug
+ debuglog(s"computing param aliases for $clazz:${clazz.primaryConstructor.tpe}:$rhs")
+ val pending = ListBuffer[AbsTypeError]()
+
+ // !!! This method is redundant with other, less buggy ones.
def decompose(call: Tree): (Tree, List[Tree]) = call match {
+ case _ if call.isErrorTyped => // e.g. SI-7636
+ (call, Nil)
case Apply(fn, args) =>
- val (superConstr, args1) = decompose(fn)
+ // an object cannot be allowed to pass a reference to itself to a superconstructor
+ // because of initialization issues; SI-473, SI-3913, SI-6928.
+ foreachSubTreeBoundTo(args, clazz) { tree =>
+ if (tree.symbol.isModule)
+ pending += SuperConstrReferenceError(tree)
+ tree match {
+ case This(qual) =>
+ pending += SuperConstrArgsThisReferenceError(tree)
+ case _ => ()
+ }
+ }
+ val (superConstr, preArgs) = decompose(fn)
val params = fn.tpe.params
- val args2 = if (params.isEmpty || !isRepeatedParamType(params.last.tpe)) args
- else args.take(params.length - 1) :+ EmptyTree
- assert(sameLength(args2, params), "mismatch " + clazz + " " + (params map (_.tpe)) + " " + args2)//debug
- (superConstr, args1 ::: args2)
- case Block(stats, expr) if !stats.isEmpty =>
- decompose(stats.last)
+ // appending a dummy tree to represent Nil for an empty varargs (is this really necessary?)
+ val applyArgs = if (args.length < params.length) args :+ EmptyTree else args take params.length
+
+ assert(sameLength(applyArgs, params) || call.isErrorTyped,
+ s"arity mismatch but call is not error typed: $clazz (params=$params, args=$applyArgs)")
+
+ (superConstr, preArgs ::: applyArgs)
+ case Block(_ :+ superCall, _) =>
+ decompose(superCall)
case _ =>
- (call, List())
+ (call, Nil)
}
val (superConstr, superArgs) = decompose(rhs)
- assert(superConstr.symbol ne null)//debug
-
- // an object cannot be allowed to pass a reference to itself to a superconstructor
- // because of initialization issues; bug #473
- for (arg <- superArgs ; tree <- arg) {
- val sym = tree.symbol
- if (sym != null && (sym.info.baseClasses contains clazz)) {
- if (sym.isModule)
- error(tree.pos, "super constructor cannot be passed a self reference unless parameter is declared by-name")
- tree match {
- case This(qual) =>
- error(tree.pos, "super constructor arguments cannot reference unconstructed `this`")
- case _ => ()
- }
- }
- }
-
- if (superConstr.symbol.isPrimaryConstructor) {
- val superClazz = superConstr.symbol.owner
- if (!superClazz.isJavaDefined) {
- val superParamAccessors = superClazz.constrParamAccessors
- if (sameLength(superParamAccessors, superArgs)) {
- (superParamAccessors, superArgs).zipped map { (superAcc, superArg) =>
- superArg match {
- case Ident(name) =>
- if (vparamss.exists(_.exists(_.symbol == superArg.symbol))) {
- var alias = superAcc.initialize.alias
- if (alias == NoSymbol)
- alias = superAcc.getter(superAcc.owner)
- if (alias != NoSymbol &&
- superClazz.info.nonPrivateMember(alias.name) != alias)
- alias = NoSymbol
- if (alias != NoSymbol) {
- var ownAcc = clazz.info.decl(name).suchThat(_.isParamAccessor)
- if ((ownAcc hasFlag ACCESSOR) && !ownAcc.isDeferred)
- ownAcc = ownAcc.accessed
- if (!ownAcc.isVariable && !alias.accessed.isVariable) {
- if (settings.debug.value)
- log("" + ownAcc + " has alias "+alias + alias.locationString) //debug
- ownAcc.asInstanceOf[TermSymbol].setAlias(alias)
- }
- }
- }
+ assert(superConstr.symbol ne null, superConstr)//debug
+ def superClazz = superConstr.symbol.owner
+ def superParamAccessors = superClazz.constrParamAccessors
+
+ // associate superclass paramaccessors with their aliases
+ if (superConstr.symbol.isPrimaryConstructor && !superClazz.isJavaDefined && sameLength(superParamAccessors, superArgs)) {
+ for ((superAcc, superArg @ Ident(name)) <- superParamAccessors zip superArgs) {
+ if (mexists(vparamss)(_.symbol == superArg.symbol)) {
+ val alias = (
+ superAcc.initialize.alias
+ orElse (superAcc getter superAcc.owner)
+ filter (alias => superClazz.info.nonPrivateMember(alias.name) == alias)
+ )
+ if (alias.exists && !alias.accessed.isVariable) {
+ val ownAcc = clazz.info decl name suchThat (_.isParamAccessor) match {
+ case acc if !acc.isDeferred && acc.hasAccessorFlag => acc.accessed
+ case acc => acc
+ }
+ ownAcc match {
+ case acc: TermSymbol if !acc.isVariable =>
+ debuglog(s"$acc has alias ${alias.fullLocationString}")
+ acc setAlias alias
case _ =>
}
- ()
}
}
}
}
+ pending.foreach(ErrorUtils.issueTypeError)
+ }
+
+ // Check for SI-4842.
+ private def checkSelfConstructorArgs(ddef: DefDef, clazz: Symbol) {
+ val pending = ListBuffer[AbsTypeError]()
+ ddef.rhs match {
+ case Block(stats, expr) =>
+ val selfConstructorCall = stats.headOption.getOrElse(expr)
+ foreachSubTreeBoundTo(List(selfConstructorCall), clazz) {
+ case tree @ This(qual) =>
+ pending += SelfConstrArgsThisReferenceError(tree)
+ case _ => ()
+ }
+ case _ =>
+ }
+ pending.foreach(ErrorUtils.issueTypeError)
}
- /** Check if a method is defined in such a way that it can be called.
- * A method cannot be called if it is a non-private member of a structural type
- * and if its parameter's types are not one of
- * - this.type
- * - a type member of the structural type
- * - an abstract type declared outside of the structural type. */
- def checkMethodStructuralCompatible(meth: Symbol): Unit =
- if (meth.owner.isStructuralRefinement && meth.allOverriddenSymbols.isEmpty && !(meth.isPrivate || meth.hasAccessBoundary)) {
- val tp: Type = meth.tpe match {
- case mt: MethodType => mt
- case NullaryMethodType(res) => res
- // TODO_NMT: drop NullaryMethodType from resultType?
- case pt: PolyType => pt.resultType
- case _ => NoType
- }
- for (paramType <- tp.paramTypes) {
- if (paramType.typeSymbol.isAbstractType && !(paramType.typeSymbol.hasTransOwner(meth.owner)))
- unit.error(meth.pos,"Parameter type in structural refinement may not refer to an abstract type defined outside that refinement")
- else if (paramType.typeSymbol.isAbstractType && !(paramType.typeSymbol.hasTransOwner(meth)))
- unit.error(meth.pos,"Parameter type in structural refinement may not refer to a type member of that refinement")
- else if (paramType.isInstanceOf[ThisType] && paramType.typeSymbol == meth.owner)
- unit.error(meth.pos,"Parameter type in structural refinement may not refer to the type of that refinement (self type)")
+ /**
+ * Run the provided function for each sub tree of `trees` that
+ * are bound to a symbol with `clazz` as a base class.
+ *
+ * @param f This function can assume that `tree.symbol` is non null
+ */
+ private def foreachSubTreeBoundTo[A](trees: List[Tree], clazz: Symbol)(f: Tree => Unit): Unit =
+ for {
+ tree <- trees
+ subTree <- tree
+ } {
+ val sym = subTree.symbol
+ if (sym != null && sym.info.baseClasses.contains(clazz))
+ f(subTree)
+ }
+
+ /** Check if a structurally defined method violates implementation restrictions.
+ * A method cannot be called if it is a non-private member of a refinement type
+ * and if its parameter's types are any of:
+ * - the self-type of the refinement
+ * - a type member of the refinement
+ * - an abstract type declared outside of the refinement.
+ * - an instance of a value class
+ * Furthermore, the result type may not be a value class either
+ */
+ def checkMethodStructuralCompatible(ddef: DefDef): Unit = {
+ val meth = ddef.symbol
+ def parentString = meth.owner.parentSymbols filterNot (_ == ObjectClass) match {
+ case Nil => ""
+ case xs => xs.map(_.nameString).mkString(" (of ", " with ", ")")
+ }
+ def fail(pos: Position, msg: String): Boolean = {
+ unit.error(pos, msg)
+ false
+ }
+ /** Have to examine all parameters in all lists.
+ */
+ def paramssTypes(tp: Type): List[List[Type]] = tp match {
+ case mt @ MethodType(_, restpe) => mt.paramTypes :: paramssTypes(restpe)
+ case PolyType(_, restpe) => paramssTypes(restpe)
+ case _ => Nil
+ }
+ def resultType = meth.tpe.finalResultType
+ def nthParamPos(n1: Int, n2: Int) =
+ try ddef.vparamss(n1)(n2).pos catch { case _: IndexOutOfBoundsException => meth.pos }
+
+ def failStruct(pos: Position, what: String, where: String = "Parameter type") =
+ fail(pos, s"$where in structural refinement may not refer to $what")
+
+ foreachWithIndex(paramssTypes(meth.tpe)) { (paramList, listIdx) =>
+ foreachWithIndex(paramList) { (paramType, paramIdx) =>
+ val sym = paramType.typeSymbol
+ def paramPos = nthParamPos(listIdx, paramIdx)
+
+ /** Not enough to look for abstract types; have to recursively check the bounds
+ * of each abstract type for more abstract types. Almost certainly there are other
+ * exploitable type soundness bugs which can be seen by bounding a type parameter
+ * by an abstract type which itself is bounded by an abstract type.
+ */
+ def checkAbstract(tp0: Type, what: String): Boolean = {
+ def check(sym: Symbol): Boolean = !sym.isAbstractType || {
+ log(s"""checking $tp0 in refinement$parentString at ${meth.owner.owner.fullLocationString}""")
+ ( (!sym.hasTransOwner(meth.owner) && failStruct(paramPos, "an abstract type defined outside that refinement", what))
+ || (!sym.hasTransOwner(meth) && failStruct(paramPos, "a type member of that refinement", what))
+ || checkAbstract(sym.info.bounds.hi, "Type bound")
+ )
+ }
+ tp0.dealiasWidenChain forall (t => check(t.typeSymbol))
+ }
+ checkAbstract(paramType, "Parameter type")
+
+ if (sym.isDerivedValueClass)
+ failStruct(paramPos, "a user-defined value class")
+ if (paramType.isInstanceOf[ThisType] && sym == meth.owner)
+ failStruct(paramPos, "the type of that refinement (self type)")
}
}
+ if (resultType.typeSymbol.isDerivedValueClass)
+ failStruct(ddef.tpt.pos, "a user-defined value class", where = "Result type")
+ }
def typedUseCase(useCase: UseCase) {
def stringParser(str: String): syntaxAnalyzer.Parser = {
@@ -1685,10 +2185,11 @@ trait Typers extends Modes {
lookupVariable(name.toString.substring(1), enclClass) match {
case Some(repl) =>
silent(_.typedTypeConstructor(stringParser(repl).typ())) match {
- case tpt: Tree =>
- val alias = enclClass.newAliasType(useCase.pos, name.toTypeName)
- val tparams = cloneSymbols(tpt.tpe.typeSymbol.typeParams, alias)
- alias setInfo typeFun(tparams, appliedType(tpt.tpe, tparams map (_.tpe)))
+ case SilentResultValue(tpt) =>
+ val alias = enclClass.newAliasType(name.toTypeName, useCase.pos)
+ val tparams = cloneSymbolsAtOwner(tpt.tpe.typeSymbol.typeParams, alias)
+ val newInfo = genPolyType(tparams, appliedType(tpt.tpe, tparams map (_.tpe)))
+ alias setInfo newInfo
context.scope.enter(alias)
case _ =>
}
@@ -1713,17 +2214,16 @@ trait Typers extends Modes {
* @return ...
*/
def typedDefDef(ddef: DefDef): DefDef = {
- val meth = ddef.symbol
+ val meth = ddef.symbol.initialize
reenterTypeParams(ddef.tparams)
reenterValueParams(ddef.vparamss)
// for `val` and `var` parameter, look at `target` meta-annotation
- if (phase.id <= currentRun.typerPhase.id && meth.isPrimaryConstructor) {
+ if (!isPastTyper && meth.isPrimaryConstructor) {
for (vparams <- ddef.vparamss; vd <- vparams) {
if (vd.mods.isParamAccessor) {
- val sym = vd.symbol
- sym.setAnnotations(memberAnnots(sym.annotations, ParamTargetClass, keepClean = true))
+ namer.validateParam(vd)
}
}
}
@@ -1731,77 +2231,81 @@ trait Typers extends Modes {
val tparams1 = ddef.tparams mapConserve typedTypeDef
val vparamss1 = ddef.vparamss mapConserve (_ mapConserve typedValDef)
- // complete lazy annotations
- val annots = meth.annotations
+ meth.annotations.map(_.completeInfo)
for (vparams1 <- vparamss1; vparam1 <- vparams1 dropRight 1)
if (isRepeatedParamType(vparam1.symbol.tpe))
- error(vparam1.pos, "*-parameter must come last")
+ StarParamNotLastError(vparam1)
var tpt1 = checkNoEscaping.privates(meth, typedType(ddef.tpt))
- if (!settings.YdepMethTpes.value) {
- for (vparams <- vparamss1; vparam <- vparams) {
- checkNoEscaping.locals(context.scope, WildcardType, vparam.tpt); ()
- }
- checkNoEscaping.locals(context.scope, WildcardType, tpt1)
- }
checkNonCyclic(ddef, tpt1)
ddef.tpt.setType(tpt1.tpe)
- val typedMods = removeAnnotations(ddef.mods)
+ val typedMods = typedModifiers(ddef.mods)
var rhs1 =
if (ddef.name == nme.CONSTRUCTOR && !ddef.symbol.hasStaticFlag) { // need this to make it possible to generate static ctors
if (!meth.isPrimaryConstructor &&
(!meth.owner.isClass ||
meth.owner.isModuleClass ||
meth.owner.isAnonOrRefinementClass))
- error(ddef.pos, "constructor definition not allowed here")
+ InvalidConstructorDefError(ddef)
typed(ddef.rhs)
+ } else if (meth.isTermMacro) {
+ // typechecking macro bodies is sort of unconventional
+ // that's why we employ our custom typing scheme orchestrated outside of the typer
+ transformedOr(ddef.rhs, typedMacroBody(this, ddef))
} else {
transformedOrTyped(ddef.rhs, EXPRmode, tpt1.tpe)
}
- if (meth.isPrimaryConstructor && meth.isClassConstructor &&
- phase.id <= currentRun.typerPhase.id && !reporter.hasErrors)
- computeParamAliases(meth.owner, vparamss1, rhs1)
+ if (meth.isClassConstructor && !isPastTyper && !meth.owner.isSubClass(AnyValClass)) {
+ // At this point in AnyVal there is no supercall, which will blow up
+ // in computeParamAliases; there's nothing to be computed for Anyval anyway.
+ if (meth.isPrimaryConstructor)
+ computeParamAliases(meth.owner, vparamss1, rhs1)
+ else
+ checkSelfConstructorArgs(ddef, meth.owner)
+ }
+
if (tpt1.tpe.typeSymbol != NothingClass && !context.returnsSeen && rhs1.tpe.typeSymbol != NothingClass)
rhs1 = checkDead(rhs1)
- if (phase.id <= currentRun.typerPhase.id && meth.owner.isClass &&
- meth.paramss.exists(ps => ps.exists(_.hasDefaultFlag) && isRepeatedParamType(ps.last.tpe)))
- error(meth.pos, "a parameter section with a `*'-parameter is not allowed to have default arguments")
+ if (!isPastTyper && meth.owner.isClass &&
+ meth.paramss.exists(ps => ps.exists(_.hasDefault) && isRepeatedParamType(ps.last.tpe)))
+ StarWithDefaultError(meth)
- if (phase.id <= currentRun.typerPhase.id) {
+ if (!isPastTyper) {
val allParams = meth.paramss.flatten
for (p <- allParams) {
- deprecatedName(p).foreach(n => {
- if (allParams.exists(p1 => p1.name == n || (p != p1 && deprecatedName(p1) == Some(n))))
- error(p.pos, "deprecated parameter name "+ n +" has to be distinct from any other parameter name (deprecated or not).")
- })
+ for (n <- p.deprecatedParamName) {
+ if (allParams.exists(p1 => p1.name == n || (p != p1 && p1.deprecatedParamName.exists(_ == n))))
+ DeprecatedParamNameError(p, n)
+ }
}
}
+ if (meth.isStructuralRefinementMember)
+ checkMethodStructuralCompatible(ddef)
- checkMethodStructuralCompatible(meth)
+ if (meth.isImplicit && !meth.isSynthetic) meth.info.paramss match {
+ case List(param) :: _ if !param.isImplicit =>
+ checkFeature(ddef.pos, ImplicitConversionsFeature, meth.toString)
+ case _ =>
+ }
treeCopy.DefDef(ddef, typedMods, ddef.name, tparams1, vparamss1, tpt1, rhs1) setType NoType
}
- def typedTypeDef(tdef: TypeDef): TypeDef = {
- def typeDefTyper = {
- if(tdef.tparams isEmpty) Typer.this
- else newTyper(context.makeNewScope(tdef, tdef.symbol))
+ def typedTypeDef(tdef: TypeDef): TypeDef =
+ typerWithCondLocalContext(context.makeNewScope(tdef, tdef.symbol))(tdef.tparams.nonEmpty) {
+ _.typedTypeDefImpl(tdef)
}
- typeDefTyper.typedTypeDef0(tdef)
- }
- // call typedTypeDef instead
- // a TypeDef with type parameters must always be type checked in a new scope
- private def typedTypeDef0(tdef: TypeDef): TypeDef = {
+ // use typedTypeDef instead. this version is called after creating a new context for the TypeDef
+ private def typedTypeDefImpl(tdef: TypeDef): TypeDef = {
tdef.symbol.initialize
reenterTypeParams(tdef.tparams)
- val tparams1 = tdef.tparams mapConserve {typedTypeDef(_)}
- val typedMods = removeAnnotations(tdef.mods)
- // complete lazy annotations
- val annots = tdef.symbol.annotations
+ val tparams1 = tdef.tparams mapConserve typedTypeDef
+ val typedMods = typedModifiers(tdef.mods)
+ tdef.symbol.annotations.map(_.completeInfo)
// @specialized should not be pickled when compiling with -no-specialize
if (settings.nospecialization.value && currentRun.compiles(tdef.symbol)) {
@@ -1813,11 +2317,13 @@ trait Typers extends Modes {
checkNonCyclic(tdef.symbol)
if (tdef.symbol.owner.isType)
rhs1.tpe match {
- case TypeBounds(lo1, hi1) =>
- if (!(lo1 <:< hi1))
- error(tdef.pos, "lower bound "+lo1+" does not conform to upper bound "+hi1)
- case _ =>
+ case TypeBounds(lo1, hi1) if (!(lo1 <:< hi1)) => LowerBoundError(tdef, lo1, hi1)
+ case _ => ()
}
+
+ if (tdef.symbol.isDeferred && tdef.symbol.info.isHigherKinded)
+ checkFeature(tdef.pos, HigherKindsFeature)
+
treeCopy.TypeDef(tdef, typedMods, tdef.name, tparams1, rhs1) setType NoType
}
@@ -1826,18 +2332,19 @@ trait Typers extends Modes {
case ldef @ LabelDef(_, _, _) =>
if (ldef.symbol == NoSymbol)
ldef.symbol = namer.enterInScope(
- context.owner.newLabel(ldef.pos, ldef.name) setInfo MethodType(List(), UnitClass.tpe))
+ context.owner.newLabel(ldef.name, ldef.pos) setInfo MethodType(List(), UnitClass.tpe))
case _ =>
}
}
def typedLabelDef(ldef: LabelDef): LabelDef = {
- if (!nme.isLoopHeaderLabel(ldef.symbol.name) || phase.id > currentRun.typerPhase.id) {
+ if (!nme.isLoopHeaderLabel(ldef.symbol.name) || isPastTyper) {
val restpe = ldef.symbol.tpe.resultType
val rhs1 = typed(ldef.rhs, restpe)
ldef.params foreach (param => param.tpe = param.symbol.tpe)
- treeCopy.LabelDef(ldef, ldef.name, ldef.params, rhs1) setType restpe
- } else {
+ deriveLabelDef(ldef)(_ => rhs1) setType restpe
+ }
+ else {
val initpe = ldef.symbol.tpe.resultType
val rhs1 = typed(ldef.rhs)
val restpe = rhs1.tpe
@@ -1847,10 +2354,10 @@ trait Typers extends Modes {
} else {
context.scope.unlink(ldef.symbol)
val sym2 = namer.enterInScope(
- context.owner.newLabel(ldef.pos, ldef.name) setInfo MethodType(List(), restpe))
+ context.owner.newLabel(ldef.name, ldef.pos) setInfo MethodType(List(), restpe))
val rhs2 = typed(resetAllAttrs(ldef.rhs), restpe)
ldef.params foreach (param => param.tpe = param.symbol.tpe)
- treeCopy.LabelDef(ldef, ldef.name, ldef.params, rhs2) setSymbol sym2 setType restpe
+ deriveLabelDef(ldef)(_ => rhs2) setSymbol sym2 setType restpe
}
}
}
@@ -1868,62 +2375,68 @@ trait Typers extends Modes {
for (stat <- block.stats) enterLabelDef(stat)
if (phaseId(currentPeriod) <= currentRun.typerPhase.id) {
- // This is very tricky stuff, because we are navigating
- // the Skylla and Charybdis of anonymous classes and what to return
- // from them here. On the one hand, we cannot admit
- // every non-private member of an anonymous class as a part of
- // the structural type of the enclosing block. This runs afoul of
- // the restriction that a structural type may not refer to an enclosing
- // type parameter or abstract types (which in turn is necessitated
- // by what can be done in Java reflection. On the other hand,
- // making every term member private conflicts with private escape checking
- // see ticket #3174 for an example.
- // The cleanest way forward is if we would find a way to suppress
- // structural type checking for these members and maybe defer
- // type errors to the places where members are called. But that would
- // be a big refactoring and also a big departure from existing code.
- // The probably safest fix for 2.8 is to keep members of an anonymous
- // class that are not mentioned in a parent type private (as before)
- // but to disable escape checking for code that's in the same anonymous class.
- // That's what's done here.
- // We really should go back and think hard whether we find a better
- // way to address the problem of escaping idents on the one hand and well-formed
- // structural types on the other.
+ // This is very tricky stuff, because we are navigating the Skylla and Charybdis of
+ // anonymous classes and what to return from them here. On the one hand, we cannot admit
+ // every non-private member of an anonymous class as a part of the structural type of the
+ // enclosing block. This runs afoul of the restriction that a structural type may not
+ // refer to an enclosing type parameter or abstract types (which in turn is necessitated
+ // by what can be done in Java reflection). On the other hand, making every term member
+ // private conflicts with private escape checking - see ticket #3174 for an example.
+ //
+ // The cleanest way forward is if we would find a way to suppress structural type checking
+ // for these members and maybe defer type errors to the places where members are called.
+ // But that would be a big refactoring and also a big departure from existing code. The
+ // probably safest fix for 2.8 is to keep members of an anonymous class that are not
+ // mentioned in a parent type private (as before) but to disable escape checking for code
+ // that's in the same anonymous class. That's what's done here.
+ //
+ // We really should go back and think hard whether we find a better way to address the
+ // problem of escaping idents on the one hand and well-formed structural types on the
+ // other.
block match {
- case block @ Block(List(classDef @ ClassDef(_, _, _, _)), newInst @ Apply(Select(New(_), _), _)) =>
- // The block is an anonymous class definitions/instantiation pair
- // -> members that are hidden by the type of the block are made private
+ case Block(List(classDef @ ClassDef(_, _, _, _)), Apply(Select(New(_), _), _)) =>
+ val classDecls = classDef.symbol.info.decls
val visibleMembers = pt match {
- case WildcardType => classDef.symbol.info.decls.toList
- case BoundedWildcardType(TypeBounds(lo, hi)) => lo.members
- case _ => pt.members
+ case WildcardType => classDecls.toList
+ case BoundedWildcardType(TypeBounds(lo, _)) => lo.members
+ case _ => pt.members
}
- for (member <- classDef.symbol.info.decls.toList
- if member.isTerm && !member.isConstructor &&
- member.allOverriddenSymbols.isEmpty &&
- (!member.isPrivate && !member.hasAccessBoundary) &&
- !(visibleMembers exists { visible =>
- visible.name == member.name &&
- member.tpe <:< visible.tpe.substThis(visible.owner, ThisType(classDef.symbol))
- })
- ) {
- member.resetFlag(PROTECTED)
- member.resetFlag(LOCAL)
- member.setFlag(PRIVATE | SYNTHETIC_PRIVATE)
- syntheticPrivates += member
- member.privateWithin = NoSymbol
+ def matchesVisibleMember(member: Symbol) = visibleMembers exists { vis =>
+ (member.name == vis.name) &&
+ (member.tpe <:< vis.tpe.substThis(vis.owner, classDef.symbol))
}
+ // The block is an anonymous class definitions/instantiation pair
+ // -> members that are hidden by the type of the block are made private
+ val toHide = (
+ classDecls filter (member =>
+ member.isTerm
+ && member.isPossibleInRefinement
+ && member.isPublic
+ && !matchesVisibleMember(member)
+ ) map (member => member
+ resetFlag (PROTECTED | LOCAL)
+ setFlag (PRIVATE | SYNTHETIC_PRIVATE)
+ setPrivateWithin NoSymbol
+ )
+ )
+ syntheticPrivates ++= toHide
case _ =>
}
}
- val stats1 = typedStats(block.stats, context.owner)
+ val stats1 = if (isPastTyper) block.stats else
+ block.stats.flatMap(stat => stat match {
+ case vd at ValDef(_, _, _, _) if vd.symbol.isLazy =>
+ namer.addDerivedTrees(Typer.this, vd)
+ case _ => stat::Nil
+ })
+ val stats2 = typedStats(stats1, context.owner)
val expr1 = typed(block.expr, mode & ~(FUNmode | QUALmode), pt)
- treeCopy.Block(block, stats1, expr1)
- .setType(if (treeInfo.isPureExpr(block)) expr1.tpe else expr1.tpe.deconst)
+ treeCopy.Block(block, stats2, expr1)
+ .setType(if (treeInfo.isExprSafeToInline(block)) expr1.tpe else expr1.tpe.deconst)
} finally {
// enable escaping privates checking from the outside and recycle
// transient flag
- for (sym <- syntheticPrivates) sym resetFlag SYNTHETIC_PRIVATE
+ syntheticPrivates foreach (_ resetFlag SYNTHETIC_PRIVATE)
}
}
@@ -1936,9 +2449,12 @@ trait Typers extends Modes {
def typedCase(cdef: CaseDef, pattpe: Type, pt: Type): CaseDef = {
// verify no _* except in last position
for (Apply(_, xs) <- cdef.pat ; x <- xs dropRight 1 ; if treeInfo isStar x)
- error(x.pos, "_* may only come last")
+ StarPositionInPatternError(x)
- val pat1: Tree = typedPattern(cdef.pat, pattpe)
+ // withoutAnnotations - see continuations-run/z1673.scala
+ // This adjustment is awfully specific to continuations, but AFAICS the
+ // whole AnnotationChecker framework is.
+ val pat1 = typedPattern(cdef.pat, pattpe.withoutAnnotations)
// When case classes have more than two parameter lists, the pattern ends
// up typed as a method. We only pattern match on the first parameter
// list, so substitute the final result type of the method, i.e. the type
@@ -1955,66 +2471,345 @@ trait Typers extends Modes {
val guard1: Tree = if (cdef.guard == EmptyTree) EmptyTree
else typed(cdef.guard, BooleanClass.tpe)
var body1: Tree = typed(cdef.body, pt)
- if (!context.savedTypeBounds.isEmpty) {
- body1.tpe = context.restoreTypeBounds(body1.tpe)
- if (isFullyDefined(pt) && !(body1.tpe <:< pt)) {
- body1 =
- typed {
- atPos(body1.pos) {
- TypeApply(Select(body1, Any_asInstanceOf), List(TypeTree(pt))) // @M no need for pt.normalize here, is done in erasure
- }
- }
- }
+
+ val contextWithTypeBounds = context.nextEnclosing(_.tree.isInstanceOf[CaseDef])
+ if (contextWithTypeBounds.savedTypeBounds.nonEmpty) {
+ body1.tpe = contextWithTypeBounds restoreTypeBounds body1.tpe
+
+ // insert a cast if something typechecked under the GADT constraints,
+ // but not in real life (i.e., now that's we've reset the method's type skolems'
+ // infos back to their pre-GADT-constraint state)
+ if (isFullyDefined(pt) && !(body1.tpe <:< pt))
+ body1 = typedPos(body1.pos)(gen.mkCast(body1, pt.normalize))
+
}
+
// body1 = checkNoEscaping.locals(context.scope, pt, body1)
treeCopy.CaseDef(cdef, pat1, guard1, body1) setType body1.tpe
}
- def typedCases(tree: Tree, cases: List[CaseDef], pattp: Type, pt: Type): List[CaseDef] =
+ // undo adaptConstrPattern's evil deeds, as they confuse the old pattern matcher
+ // the flags are used to avoid accidentally deskolemizing unrelated skolems of skolems
+ object deskolemizeGADTSkolems extends TypeMap {
+ def apply(tp: Type): Type = mapOver(tp) match {
+ case TypeRef(pre, sym, args) if sym.isGADTSkolem =>
+ typeRef(NoPrefix, sym.deSkolemize, args)
+ case tp1 => tp1
+ }
+ }
+
+ def typedCases(cases: List[CaseDef], pattp: Type, pt: Type): List[CaseDef] =
cases mapConserve { cdef =>
newTyper(context.makeNewScope(cdef, context.owner)).typedCase(cdef, pattp, pt)
}
+ def adaptCase(cdef: CaseDef, mode: Int, tpe: Type): CaseDef = deriveCaseDef(cdef)(adapt(_, mode, tpe))
+
+ def ptOrLub(tps: List[Type], pt: Type ) = if (isFullyDefined(pt)) (pt, false) else weakLub(tps map (_.deconst))
+ def ptOrLubPacked(trees: List[Tree], pt: Type) = if (isFullyDefined(pt)) (pt, false) else weakLub(trees map (c => packedType(c, context.owner).deconst))
+
+ // takes untyped sub-trees of a match and type checks them
+ def typedMatch(selector: Tree, cases: List[CaseDef], mode: Int, pt: Type, tree: Tree = EmptyTree): Match = {
+ val selector1 = checkDead(typed(selector, EXPRmode | BYVALmode, WildcardType))
+ val selectorTp = packCaptured(selector1.tpe.widen).skolemizeExistential(context.owner, selector)
+ val casesTyped = typedCases(cases, selectorTp, pt)
+
+ val (resTp, needAdapt) =
+ if (opt.virtPatmat) ptOrLubPacked(casesTyped, pt)
+ else ptOrLub(casesTyped map (_.tpe), pt)
+
+ val casesAdapted = if (!needAdapt) casesTyped else casesTyped map (adaptCase(_, mode, resTp))
+
+ val matchTyped = treeCopy.Match(tree, selector1, casesAdapted) setType resTp
+ if (!newPatternMatching) // TODO: remove this in 2.11 -- only needed for old pattern matcher
+ new TypeMapTreeSubstituter(deskolemizeGADTSkolems).traverse(matchTyped)
+ matchTyped
+ }
+
+ // match has been typed -- virtualize it if we're feeling experimental
+ // (virtualized matches are expanded during type checking so they have the full context available)
+ // otherwise, do nothing: matches are translated during phase `patmat` (unless -Xoldpatmat)
+ def virtualizedMatch(match_ : Match, mode: Int, pt: Type) = {
+ import patmat.{vpmName, PureMatchTranslator, OptimizingMatchTranslator}
+
+ // TODO: add fallback __match sentinel to predef
+ val matchStrategy: Tree =
+ if (!(newPatternMatching && opt.experimental && context.isNameInScope(vpmName._match))) null // fast path, avoiding the next line if there's no __match to be seen
+ else newTyper(context.makeImplicit(reportAmbiguousErrors = false)).silent(_.typed(Ident(vpmName._match), EXPRmode, WildcardType), reportAmbiguousErrors = false) match {
+ case SilentResultValue(ms) => ms
+ case _ => null
+ }
+
+ if (matchStrategy ne null) // virtualize
+ typed((new PureMatchTranslator(this.asInstanceOf[patmat.global.analyzer.Typer] /*TODO*/, matchStrategy)).translateMatch(match_), mode, pt)
+ else
+ match_ // will be translated in phase `patmat`
+ }
+
+ /** synthesize and type check a PartialFunction implementation based on the match in `tree`
+ *
+ * `param => sel match { cases }` becomes:
+ *
+ * new AbstractPartialFunction[$argTp, $matchResTp] {
+ * def applyOrElse[A1 <: $argTp, B1 >: $matchResTp]($param: A1, default: A1 => B1): B1 =
+ * $selector match { $cases }
+ * def isDefinedAt(x: $argTp): Boolean =
+ * $selector match { $casesTrue }
+ * }
+ *
+ * TODO: it would be nicer to generate the tree specified above at once and type it as a whole,
+ * there are two gotchas:
+ * - matchResTp may not be known until we've typed the match (can only use resTp when it's fully defined),
+ * - if we typed the match in isolation first, you'd know its result type, but would have to re-jig the owner structure
+ * - could we use a type variable for matchResTp and backpatch it?
+ * - occurrences of `this` in `cases` or `sel` must resolve to the this of the class originally enclosing the match,
+ * not of the anonymous partial function subclass
+ *
+ * an alternative TODO: add partial function AST node or equivalent and get rid of this synthesis --> do everything in uncurry (or later)
+ * however, note that pattern matching codegen is designed to run *before* uncurry
+ */
+ def synthesizePartialFunction(paramName: TermName, paramPos: Position, tree: Tree, mode: Int, pt0: Type): Tree = {
+ assert(pt0.typeSymbol == PartialFunctionClass, s"PartialFunction synthesis for match in $tree requires PartialFunction expected type, but got $pt0.")
+
+ val pt = deskolemizeGADTSkolems(pt0)
+ val targs = pt.normalize.typeArgs
+
+ // if targs.head isn't fully defined, we can translate --> error
+ targs match {
+ case argTp :: _ if isFullyDefined(argTp) => // ok
+ case _ => // uh-oh
+ MissingParameterTypeAnonMatchError(tree, pt)
+ return setError(tree)
+ }
+
+ // NOTE: resTp still might not be fully defined
+ val argTp :: resTp :: Nil = targs
+
+ // targs must conform to Any for us to synthesize an applyOrElse (fallback to apply otherwise -- typically for @cps annotated targs)
+ val targsValidParams = targs forall (_ <:< AnyClass.tpe)
+
+ val anonClass = (context.owner
+ newAnonymousFunctionClass tree.pos
+ addAnnotation AnnotationInfo(SerialVersionUIDAttr.tpe, List(Literal(Constant(0))), List()))
+
+ import CODE._
+
+ val Match(sel, cases) = tree
+
+ // need to duplicate the cases before typing them to generate the apply method, or the symbols will be all messed up
+ val casesTrue = cases map (c => deriveCaseDef(c)(x => atPos(x.pos.focus)(TRUE_typed)).duplicate.asInstanceOf[CaseDef])
+
+ // must generate a new tree every time
+ def selector: Tree = gen.mkUnchecked(
+ if (sel != EmptyTree) sel.duplicate
+ else atPos(tree.pos.focusStart)(
+ // SI-6925: subsume type of the selector to `argTp`
+ // we don't want/need the match to see the `A1` type that we must use for variance reasons in the method signature
+ //
+ // this failed: replace `selector` by `Typed(selector, TypeTree(argTp))` -- as it's an upcast, this should never fail,
+ // `(x: A1): A` doesn't always type check, even though `A1 <: A`, due to singleton types (test/files/pos/t4269.scala)
+ // hence the cast, which will be erased in posterasure
+ // (the cast originally caused extremely weird types to show up
+ // in test/scaladoc/run/SI-5933.scala because `variantToSkolem` was missing `tpSym.initialize`)
+ gen.mkCastPreservingAnnotations(Ident(paramName), argTp)
+ ))
+
+ def mkParam(methodSym: Symbol, tp: Type = argTp) =
+ methodSym.newValueParameter(paramName, paramPos.focus, SYNTHETIC) setInfo tp
+
+ def mkDefaultCase(body: Tree) =
+ atPos(tree.pos.makeTransparent) {
+ CaseDef(Bind(nme.DEFAULT_CASE, Ident(nme.WILDCARD)), body)
+ }
+
+ // `def applyOrElse[A1 <: $argTp, B1 >: $matchResTp](x: A1, default: A1 => B1): B1 =
+ // ${`$selector match { $cases; case default$ => default(x) }`
+ def applyOrElseMethodDef = {
+ val methodSym = anonClass.newMethod(nme.applyOrElse, tree.pos, FINAL | OVERRIDE)
+
+ // create the parameter that corresponds to the function's parameter
+ val A1 = methodSym newTypeParameter (newTypeName("A1")) setInfo TypeBounds.upper(argTp)
+ val x = mkParam(methodSym, A1.tpe)
+
+ // applyOrElse's default parameter:
+ val B1 = methodSym newTypeParameter (newTypeName("B1")) setInfo TypeBounds.empty
+ val default = methodSym newValueParameter (newTermName("default"), tree.pos.focus, SYNTHETIC) setInfo functionType(List(A1.tpe), B1.tpe)
+
+ val paramSyms = List(x, default)
+ methodSym setInfo polyType(List(A1, B1), MethodType(paramSyms, B1.tpe))
+
+ val methodBodyTyper = newTyper(context.makeNewScope(context.tree, methodSym))
+ // should use the DefDef for the context's tree, but it doesn't exist yet (we need the typer we're creating to create it)
+ paramSyms foreach (methodBodyTyper.context.scope enter _)
+
+ // First, type without the default case; only the cases provided
+ // by the user are typed. The LUB of these becomes `B`, the lower
+ // bound of `B1`, which in turn is the result type of the default
+ // case
+ val match0 = methodBodyTyper.typedMatch(selector, cases, mode, resTp)
+ val matchResTp = match0.tpe
+
+ B1 setInfo TypeBounds.lower(matchResTp) // patch info
+
+ // the default uses applyOrElse's first parameter since the scrut's type has been widened
+ val match_ = {
+ val defaultCase = methodBodyTyper.typedCase(
+ mkDefaultCase(methodBodyTyper.typed1(REF(default) APPLY (REF(x)), mode, B1.tpe).setType(B1.tpe)), argTp, B1.tpe)
+ treeCopy.Match(match0, match0.selector, match0.cases :+ defaultCase)
+ }
+ match_ setType B1.tpe
+
+ // SI-6187 Do you really want to know? Okay, here's what's going on here.
+ //
+ // Well behaved trees satisfy the property:
+ //
+ // typed(tree) == typed(resetLocalAttrs(typed(tree))
+ //
+ // Trees constructed without low-level symbol manipulation get this for free;
+ // references to local symbols are cleared by `ResetAttrs`, but bind to the
+ // corresponding symbol in the re-typechecked tree. But PartialFunction synthesis
+ // doesn't play by these rules.
+ //
+ // During typechecking of method bodies, references to method type parameter from
+ // the declared types of the value parameters should bind to a fresh set of skolems,
+ // which have been entered into scope by `Namer#methodSig`. A comment therein:
+ //
+ // "since the skolemized tparams are in scope, the TypeRefs in vparamSymss refer to skolemized tparams"
+ //
+ // But, if we retypecheck the reset `applyOrElse`, the TypeTree of the `default`
+ // parameter contains no type. Somehow (where?!) it recovers a type that is _almost_ okay:
+ // `A1 => B1`. But it should really be `A1&0 => B1&0`. In the test, run/t6187.scala, this
+ // difference results in a type error, as `default.apply(x)` types as `B1`, which doesn't
+ // conform to the required `B1&0`
+ //
+ // I see three courses of action.
+ //
+ // 1) synthesize a `asInstanceOf[B1]` below (I tried this first. But... ewwww.)
+ // 2) install an 'original' TypeTree that will used after ResetAttrs (the solution below)
+ // 3) Figure out how the almost-correct type is recovered on re-typechecking, and
+ // substitute in the skolems.
+ //
+ // For 2.11, we'll probably shift this transformation back a phase or two, so macros
+ // won't be affected. But in any case, we should satisfy retypecheckability.
+ //
+ val originals: Map[Symbol, Tree] = {
+ def typedIdent(sym: Symbol) = methodBodyTyper.typedType(Ident(sym), mode)
+ val A1Tpt = typedIdent(A1)
+ val B1Tpt = typedIdent(B1)
+ Map(
+ x -> A1Tpt,
+ default -> gen.scalaFunctionConstr(List(A1Tpt), B1Tpt)
+ )
+ }
+ val rhs = methodBodyTyper.virtualizedMatch(match_, mode, B1.tpe)
+ val defdef = DefDef(methodSym, Modifiers(methodSym.flags), originals, rhs)
+
+ (defdef, matchResTp)
+ }
+
+ // `def isDefinedAt(x: $argTp): Boolean = ${`$selector match { $casesTrue; case default$ => false } }`
+ def isDefinedAtMethod = {
+ val methodSym = anonClass.newMethod(nme.isDefinedAt, tree.pos.makeTransparent, FINAL)
+ val paramSym = mkParam(methodSym)
+
+ val methodBodyTyper = newTyper(context.makeNewScope(context.tree, methodSym)) // should use the DefDef for the context's tree, but it doesn't exist yet (we need the typer we're creating to create it)
+ methodBodyTyper.context.scope enter paramSym
+ methodSym setInfo MethodType(List(paramSym), BooleanClass.tpe)
+
+ val defaultCase = mkDefaultCase(FALSE_typed)
+ val match_ = methodBodyTyper.typedMatch(selector, casesTrue :+ defaultCase, mode, BooleanClass.tpe)
+
+ DefDef(methodSym, methodBodyTyper.virtualizedMatch(match_, mode, BooleanClass.tpe))
+ }
+
+ // only used for @cps annotated partial functions
+ // `def apply(x: $argTp): $matchResTp = $selector match { $cases }`
+ def applyMethod = {
+ val methodSym = anonClass.newMethod(nme.apply, tree.pos, FINAL | OVERRIDE)
+ val paramSym = mkParam(methodSym)
+
+ methodSym setInfo MethodType(List(paramSym), AnyClass.tpe)
+
+ val methodBodyTyper = newTyper(context.makeNewScope(context.tree, methodSym))
+ // should use the DefDef for the context's tree, but it doesn't exist yet (we need the typer we're creating to create it)
+ methodBodyTyper.context.scope enter paramSym
+
+ val match_ = methodBodyTyper.typedMatch(selector, cases, mode, resTp)
+
+ val matchResTp = match_.tpe
+ methodSym setInfo MethodType(List(paramSym), matchResTp) // patch info
+
+ (DefDef(methodSym, methodBodyTyper.virtualizedMatch(match_, mode, matchResTp)), matchResTp)
+ }
+
+ def parents(resTp: Type) = addSerializable(appliedType(AbstractPartialFunctionClass.typeConstructor, List(argTp, resTp)))
+
+ val members = {
+ val (applyMeth, matchResTp) = {
+ // rig the show so we can get started typing the method body -- later we'll correct the infos...
+ // targs were type arguments for PartialFunction, so we know they will work for AbstractPartialFunction as well
+ anonClass setInfo ClassInfoType(parents(resTp), newScope, anonClass)
+
+ // somehow @cps annotations upset the typer when looking at applyOrElse's signature, but not apply's
+ // TODO: figure out the details (T @cps[U] is not a subtype of Any, but then why does it work for the apply method?)
+ if (targsValidParams) applyOrElseMethodDef
+ else applyMethod
+ }
+
+ // patch info to the class's definitive info
+ anonClass setInfo ClassInfoType(parents(matchResTp), newScope, anonClass)
+ List(applyMeth, isDefinedAtMethod)
+ }
+
+ members foreach (m => anonClass.info.decls enter m.symbol)
+
+ val typedBlock = typedPos(tree.pos, mode, pt) {
+ Block(ClassDef(anonClass, NoMods, ListOfNil, ListOfNil, members, tree.pos.focus), atPos(tree.pos.focus)(
+ Apply(Select(New(Ident(anonClass.name).setSymbol(anonClass)), nme.CONSTRUCTOR), List())
+ ))
+ }
+
+ if (typedBlock.isErrorTyped) typedBlock
+ else // Don't leak implementation details into the type, see SI-6575
+ typedPos(tree.pos, mode, pt) {
+ Typed(typedBlock, TypeTree(typedBlock.tpe baseType PartialFunctionClass))
+ }
+ }
+
+
/**
* @param fun ...
* @param mode ...
* @param pt ...
* @return ...
*/
- def typedFunction(fun: Function, mode: Int, pt: Type): Tree = {
+ private def typedFunction(fun: Function, mode: Int, pt: Type): Tree = {
val numVparams = fun.vparams.length
- val codeExpected = !forMSIL && (pt.typeSymbol isNonBottomSubClass CodeClass)
-
if (numVparams > definitions.MaxFunctionArity)
- return errorTree(fun, "implementation restricts functions to " + definitions.MaxFunctionArity + " parameters")
+ return MaxFunctionArityError(fun)
def decompose(pt: Type): (Symbol, List[Type], Type) =
- if ((isFunctionType(pt)
- ||
- pt.typeSymbol == PartialFunctionClass &&
- numVparams == 1 && fun.body.isInstanceOf[Match])
- && // see bug901 for a reason why next conditions are needed
- (pt.normalize.typeArgs.length - 1 == numVparams
- ||
- fun.vparams.exists(_.tpt.isEmpty)))
+ if ((isFunctionType(pt) || (pt.typeSymbol == PartialFunctionClass && numVparams == 1 && fun.body.isInstanceOf[Match])) && // see bug901 for a reason why next conditions are needed
+ ( pt.normalize.typeArgs.length - 1 == numVparams
+ || fun.vparams.exists(_.tpt.isEmpty)
+ ))
(pt.typeSymbol, pt.normalize.typeArgs.init, pt.normalize.typeArgs.last)
else
(FunctionClass(numVparams), fun.vparams map (x => NoType), WildcardType)
- val (clazz, argpts, respt) = decompose(if (codeExpected) pt.normalize.typeArgs.head else pt)
-
+ val (clazz, argpts, respt) = decompose(pt)
if (argpts.lengthCompare(numVparams) != 0)
- errorTree(fun, "wrong number of parameters; expected = " + argpts.length)
+ WrongNumberOfParametersError(fun, argpts)
else {
- val vparamSyms = (fun.vparams, argpts).zipped map { (vparam, argpt) =>
+ foreach2(fun.vparams, argpts) { (vparam, argpt) =>
if (vparam.tpt.isEmpty) {
vparam.tpt.tpe =
if (isFullyDefined(argpt)) argpt
else {
fun match {
- case etaExpansion(vparams, fn, args) if !codeExpected =>
+ case etaExpansion(vparams, fn, args) =>
silent(_.typed(fn, forFunMode(mode), pt)) match {
- case fn1: Tree if context.undetparams.isEmpty =>
+ case SilentResultValue(fn1) if context.undetparams.isEmpty =>
// if context,undetparams is not empty, the function was polymorphic,
// so we need the missing arguments to infer its type. See #871
//println("typing eta "+fun+":"+fn1.tpe+"/"+context.undetparams)
@@ -2025,41 +2820,58 @@ trait Typers extends Modes {
}
case _ =>
}
- error(vparam.pos, missingParameterTypeMsg(fun, vparam, pt))
+ MissingParameterTypeError(fun, vparam, pt)
ErrorType
}
if (!vparam.tpt.pos.isDefined) vparam.tpt setPos vparam.pos.focus
}
- enterSym(context, vparam)
- if (context.retyping) context.scope enter vparam.symbol
- vparam.symbol
- }
-
- val vparams = fun.vparams mapConserve (typedValDef)
-// for (vparam <- vparams) {
-// checkNoEscaping.locals(context.scope, WildcardType, vparam.tpt); ()
-// }
- var body = typed(fun.body, respt)
- val formals = vparamSyms map (_.tpe)
- val restpe = packedType(body, fun.symbol).deconst
- val funtpe = typeRef(clazz.tpe.prefix, clazz, formals :+ restpe)
-// body = checkNoEscaping.locals(context.scope, restpe, body)
- val fun1 = treeCopy.Function(fun, vparams, body).setType(funtpe)
- if (codeExpected) {
- val liftPoint = Apply(Select(Ident(CodeModule), nme.lift_), List(fun1))
- typed(atPos(fun.pos)(liftPoint))
- } else fun1
+ }
+
+ fun.body match {
+ // translate `x => x match { <cases> }` : PartialFunction to
+ // `new PartialFunction { def applyOrElse(x, default) = x match { <cases> } def isDefinedAt(x) = ... }`
+ case Match(sel, cases) if (sel ne EmptyTree) && newPatternMatching && (pt.typeSymbol == PartialFunctionClass) =>
+ // go to outer context -- must discard the context that was created for the Function since we're discarding the function
+ // thus, its symbol, which serves as the current context.owner, is not the right owner
+ // you won't know you're using the wrong owner until lambda lift crashes (unless you know better than to use the wrong owner)
+ val outerTyper = newTyper(context.outer)
+ val p = fun.vparams.head
+ if (p.tpt.tpe == null) p.tpt setType outerTyper.typedType(p.tpt).tpe
+
+ outerTyper.synthesizePartialFunction(p.name, p.pos, fun.body, mode, pt)
+ case _ =>
+ val vparamSyms = fun.vparams map { vparam =>
+ enterSym(context, vparam)
+ if (context.retyping) context.scope enter vparam.symbol
+ vparam.symbol
+ }
+ val vparams = fun.vparams mapConserve (typedValDef)
+ // for (vparam <- vparams) {
+ // checkNoEscaping.locals(context.scope, WildcardType, vparam.tpt); ()
+ // }
+ val formals = vparamSyms map (_.tpe)
+ val body1 = typed(fun.body, respt)
+ val restpe = packedType(body1, fun.symbol).deconst.resultType
+ val funtpe = typeRef(clazz.tpe.prefix, clazz, formals :+ restpe)
+ // body = checkNoEscaping.locals(context.scope, restpe, body)
+ treeCopy.Function(fun, vparams, body1).setType(funtpe)
+ }
}
}
- def typedRefinement(stats: List[Tree]) {
+ def typedRefinement(templ: Template) {
+ val stats = templ.body
namer.enterSyms(stats)
+
// need to delay rest of typedRefinement to avoid cyclic reference errors
unit.toCheck += { () =>
- // go to next outer context which is not silent, see #3614
- var c = context
- while (!c.reportGeneralErrors) c = c.outer
- val stats1 = newTyper(c).typedStats(stats, NoSymbol)
+ val stats1 = typedStats(stats, NoSymbol)
+ // this code kicks in only after typer, so `stats` will never be filled in time
+ // as a result, most of compound type trees with non-empty stats will fail to reify
+ // todo. investigate whether something can be done about this
+ val att = templ.attachments.get[CompoundTypeTreeOriginalAttachment].getOrElse(CompoundTypeTreeOriginalAttachment(Nil, Nil))
+ templ.removeAttachment[CompoundTypeTreeOriginalAttachment]
+ templ updateAttachment att.copy(stats = stats1)
for (stat <- stats1 if stat.isDef) {
val member = stat.symbol
if (!(context.owner.ancestors forall
@@ -2072,37 +2884,59 @@ trait Typers extends Modes {
def typedImport(imp : Import) : Import = (transformed remove imp) match {
case Some(imp1: Import) => imp1
- case None => log("unhandled import: "+imp+" in "+unit); imp
+ case _ => log("unhandled import: "+imp+" in "+unit); imp
+ }
+ private def isWarnablePureExpression(tree: Tree) = tree match {
+ case EmptyTree | Literal(Constant(())) => false
+ case _ =>
+ !tree.isErrorTyped && (treeInfo isExprSafeToInline tree) && {
+ val sym = tree.symbol
+ (sym == null) || !(sym.isModule || sym.isLazy) || {
+ debuglog("'Pure' but side-effecting expression in statement position: " + tree)
+ false
+ }
+ }
}
def typedStats(stats: List[Tree], exprOwner: Symbol): List[Tree] = {
val inBlock = exprOwner == context.owner
def includesTargetPos(tree: Tree) =
- tree.pos.isRange && context.unit != null && (tree.pos includes context.unit.targetPos)
+ tree.pos.isRange && context.unit.exists && (tree.pos includes context.unit.targetPos)
val localTarget = stats exists includesTargetPos
def typedStat(stat: Tree): Tree = {
- if (context.owner.isRefinementClass && !treeInfo.isDeclaration(stat))
- errorTree(stat, "only declarations allowed here")
+ if (context.owner.isRefinementClass && !treeInfo.isDeclarationOrTypeDef(stat))
+ OnlyDeclarationsError(stat)
else
stat match {
case imp @ Import(_, _) =>
- context = context.makeNewImport(imp)
imp.symbol.initialize
- typedImport(imp)
+ if (!imp.symbol.isError) {
+ context = context.makeNewImport(imp)
+ typedImport(imp)
+ } else EmptyTree
case _ =>
if (localTarget && !includesTargetPos(stat)) {
// skip typechecking of statements in a sequence where some other statement includes
// the targetposition
stat
} else {
- val localTyper = if (inBlock || (stat.isDef && !stat.isInstanceOf[LabelDef])) this
- else newTyper(context.make(stat, exprOwner))
+ val localTyper = if (inBlock || (stat.isDef && !stat.isInstanceOf[LabelDef])) {
+ this
+ } else newTyper(context.make(stat, exprOwner))
+ // XXX this creates a spurious dead code warning if an exception is thrown
+ // in a constructor, even if it is the only thing in the constructor.
val result = checkDead(localTyper.typed(stat, EXPRmode | BYVALmode, WildcardType))
+
if (treeInfo.isSelfOrSuperConstrCall(result)) {
context.inConstructorSuffix = true
if (treeInfo.isSelfConstrCall(result) && result.symbol.pos.pointOrElse(0) >= exprOwner.enclMethod.pos.pointOrElse(0))
- error(stat.pos, "called constructor's definition must precede calling constructor's definition")
+ ConstructorsOrderError(stat)
}
+
+ if (isWarnablePureExpression(result)) context.warning(stat.pos,
+ "a pure expression does nothing in statement position; " +
+ "you may be omitting necessary parentheses"
+ )
result
}
}
@@ -2112,61 +2946,74 @@ trait Typers extends Modes {
* follow the logic, so I renamed one to something distinct.
*/
def accesses(looker: Symbol, accessed: Symbol) = accessed.hasLocalFlag && (
- accessed.isParamAccessor || (looker.hasAccessorFlag && !accessed.hasAccessorFlag && accessed.isPrivate)
+ (accessed.isParamAccessor)
+ || (looker.hasAccessorFlag && !accessed.hasAccessorFlag && accessed.isPrivate)
)
- def checkNoDoubleDefsAndAddSynthetics(stats: List[Tree]): List[Tree] = {
+ def checkNoDoubleDefs(stats: List[Tree]): Unit = {
val scope = if (inBlock) context.scope else context.owner.info.decls
- var newStats = new ListBuffer[Tree]
- var needsCheck = true
- var moreToAdd = true
- while (moreToAdd) {
- val initSize = scope.size
- var e = scope.elems
- while ((e ne null) && e.owner == scope) {
-
- // check no double def
- if (needsCheck) {
- var e1 = scope.lookupNextEntry(e)
- while ((e1 ne null) && e1.owner == scope) {
- if (!accesses(e.sym, e1.sym) && !accesses(e1.sym, e.sym) &&
- (e.sym.isType || inBlock || (e.sym.tpe matches e1.sym.tpe)))
- // default getters are defined twice when multiple overloads have defaults. an
- // error for this is issued in RefChecks.checkDefaultsInOverloaded
- if (!e.sym.isErroneous && !e1.sym.isErroneous && !e.sym.hasDefaultFlag &&
- !e.sym.hasAnnotation(BridgeClass) && !e1.sym.hasAnnotation(BridgeClass)) {
- error(e.sym.pos, e1.sym+" is defined twice"+
- {if(!settings.debug.value) "" else " in "+unit.toString})
- scope.unlink(e1) // need to unlink to avoid later problems with lub; see #2779
- }
- e1 = scope.lookupNextEntry(e1)
+ var e = scope.elems
+ while ((e ne null) && e.owner == scope) {
+ var e1 = scope.lookupNextEntry(e)
+ while ((e1 ne null) && e1.owner == scope) {
+ if (!accesses(e.sym, e1.sym) && !accesses(e1.sym, e.sym) &&
+ (e.sym.isType || inBlock || (e.sym.tpe matches e1.sym.tpe)))
+ // default getters are defined twice when multiple overloads have defaults. an
+ // error for this is issued in RefChecks.checkDefaultsInOverloaded
+ if (!e.sym.isErroneous && !e1.sym.isErroneous && !e.sym.hasDefaultFlag &&
+ !e.sym.hasAnnotation(BridgeClass) && !e1.sym.hasAnnotation(BridgeClass)) {
+ log("Double definition detected:\n " +
+ ((e.sym.getClass, e.sym.info, e.sym.ownerChain)) + "\n " +
+ ((e1.sym.getClass, e1.sym.info, e1.sym.ownerChain)))
+
+ DefDefinedTwiceError(e.sym, e1.sym)
+ scope.unlink(e1) // need to unlink to avoid later problems with lub; see #2779
}
- }
-
- // add synthetics
- context.unit.synthetics get e.sym foreach { tree =>
- newStats += typedStat(tree) // might add even more synthetics to the scope
- context.unit.synthetics -= e.sym
+ e1 = scope.lookupNextEntry(e1)
}
-
e = e.next
}
- needsCheck = false
- // the type completer of a synthetic might add more synthetics. example: if the
- // factory method of a case class (i.e. the constructor) has a default.
- moreToAdd = initSize != scope.size
+ }
+
+ def addSynthetics(stats: List[Tree]): List[Tree] = {
+ val scope = if (inBlock) context.scope else context.owner.info.decls
+ var newStats = new ListBuffer[Tree]
+ var moreToAdd = true
+ while (moreToAdd) {
+ val initElems = scope.elems
+ // SI-5877 The decls of a package include decls of the package object. But we don't want to add
+ // the corresponding synthetics to the package class, only to the package object class.
+ def shouldAdd(sym: Symbol) =
+ inBlock || !isInPackageObject(sym, context.owner)
+ for (sym <- scope if shouldAdd(sym))
+ for (tree <- context.unit.synthetics get sym) {
+ newStats += typedStat(tree) // might add even more synthetics to the scope
+ context.unit.synthetics -= sym
+ }
+ // the type completer of a synthetic might add more synthetics. example: if the
+ // factory method of a case class (i.e. the constructor) has a default.
+ moreToAdd = scope.elems ne initElems
}
if (newStats.isEmpty) stats
else {
// put default getters next to the method they belong to,
// same for companion objects. fixes #2489 and #4036.
+ // [Martin] This is pretty ugly. I think we could avoid
+ // this code by associating defaults and companion objects
+ // with the original tree instead of the new symbol.
def matches(stat: Tree, synt: Tree) = (stat, synt) match {
+ // synt is default arg for stat
case (DefDef(_, statName, _, _, _, _), DefDef(mods, syntName, _, _, _, _)) =>
mods.hasDefaultFlag && syntName.toString.startsWith(statName.toString)
+ // synt is companion module
case (ClassDef(_, className, _, _), ModuleDef(_, moduleName, _)) =>
className.toTermName == moduleName
+ // synt is implicit def for implicit class (#6278)
+ case (ClassDef(cmods, cname, _, _), DefDef(dmods, dname, _, _, _, _)) =>
+ cmods.isImplicit && dmods.isImplicit && cname.toTermName == dname
+
case _ => false
}
@@ -2181,47 +3028,68 @@ trait Typers extends Modes {
}) ::: newStats.toList
}
}
- val result = stats mapConserve (typedStat)
- if (phase.erasedTypes) result
- else checkNoDoubleDefsAndAddSynthetics(result)
+
+ val stats1 = stats mapConserve typedStat
+ if (phase.erasedTypes) stats1
+ else {
+ checkNoDoubleDefs(stats1)
+ addSynthetics(stats1)
+ }
}
def typedArg(arg: Tree, mode: Int, newmode: Int, pt: Type): Tree = {
val typedMode = onlyStickyModes(mode) | newmode
- val t = constrTyperIf((mode & SCCmode) != 0).typed(arg, typedMode, pt)
+ val t = withCondConstrTyper((mode & SCCmode) != 0)(_.typed(arg, typedMode, pt))
checkDead.inMode(typedMode, t)
}
def typedArgs(args: List[Tree], mode: Int) =
args mapConserve (arg => typedArg(arg, mode, 0, WildcardType))
- def typedArgs(args: List[Tree], mode: Int, originalFormals: List[Type], adaptedFormals: List[Type]) = {
- var newmodes = originalFormals map (tp => if (isByNameParamType(tp)) 0 else BYVALmode)
- if (isVarArgTypes(originalFormals)) // TR check really necessary?
- newmodes = newmodes.init ++ List.fill(args.length - originalFormals.length + 1)(STARmode | BYVALmode)
-
- (args, adaptedFormals, newmodes).zipped map { (arg, formal, m) =>
- typedArg(arg, mode, m, formal)
+ /** Type trees in `args0` against corresponding expected type in `adapted0`.
+ *
+ * The mode in which each argument is typed is derived from `mode` and
+ * whether the arg was originally by-name or var-arg (need `formals0` for that)
+ * the default is by-val, of course.
+ *
+ * (docs reverse-engineered -- AM)
+ */
+ def typedArgs(args0: List[Tree], mode: Int, formals0: List[Type], adapted0: List[Type]): List[Tree] = {
+ val sticky = onlyStickyModes(mode)
+ def loop(args: List[Tree], formals: List[Type], adapted: List[Type]): List[Tree] = {
+ if (args.isEmpty || adapted.isEmpty) Nil
+ else {
+ // No formals left or * indicates varargs.
+ val isVarArgs = formals.isEmpty || formals.tail.isEmpty && isRepeatedParamType(formals.head)
+ val typedMode = sticky | (
+ if (isVarArgs) STARmode | BYVALmode
+ else if (isByNameParamType(formals.head)) 0
+ else BYVALmode
+ )
+ val tree = typedArg(args.head, mode, typedMode, adapted.head)
+ // formals may be empty, so don't call tail
+ tree :: loop(args.tail, formals drop 1, adapted.tail)
+ }
}
+ loop(args0, formals0, adapted0)
}
/** Does function need to be instantiated, because a missing parameter
* in an argument closure overlaps with an uninstantiated formal?
*/
def needsInstantiation(tparams: List[Symbol], formals: List[Type], args: List[Tree]) = {
- def isLowerBounded(tparam: Symbol) = {
- val losym = tparam.info.bounds.lo.typeSymbol
- losym != NothingClass && losym != NullClass
- }
- (formals, args).zipped exists {
+ def isLowerBounded(tparam: Symbol) = !tparam.info.bounds.lo.typeSymbol.isBottomClass
+
+ exists2(formals, args) {
case (formal, Function(vparams, _)) =>
(vparams exists (_.tpt.isEmpty)) &&
vparams.length <= MaxFunctionArity &&
(formal baseType FunctionClass(vparams.length) match {
case TypeRef(_, _, formalargs) =>
- (formalargs, vparams).zipped.exists ((formalarg, vparam) =>
- vparam.tpt.isEmpty && (tparams exists (formalarg contains))) &&
- (tparams forall isLowerBounded)
+ ( exists2(formalargs, vparams)((formal, vparam) =>
+ vparam.tpt.isEmpty && (tparams exists formal.contains))
+ && (tparams forall isLowerBounded)
+ )
case _ =>
false
})
@@ -2230,7 +3098,7 @@ trait Typers extends Modes {
}
}
- /** Is `tree' a block created by a named application?
+ /** Is `tree` a block created by a named application?
*/
def isNamedApplyBlock(tree: Tree) =
context.namedApplyBlockInfo exists (_._1 == tree)
@@ -2241,80 +3109,96 @@ trait Typers extends Modes {
(methCtx != NoContext) && {
val contextFun = methCtx.tree.symbol
contextFun.isPrimaryConstructor && contextFun.owner.isModuleClass &&
- companionModuleOf(calledFun.owner, context).moduleClass == contextFun.owner
+ companionSymbolOf(calledFun.owner, context).moduleClass == contextFun.owner
}
}
}
def doTypedApply(tree: Tree, fun0: Tree, args: List[Tree], mode: Int, pt: Type): Tree = {
// TODO_NMT: check the assumption that args nonEmpty
- def errTree = setError(treeCopy.Apply(tree, fun0, args))
- def errorTree(msg: String) = { error(tree.pos, msg); errTree }
-
- var fun = fun0
- if (fun.hasSymbol && fun.symbol.isOverloaded) {
- // remove alternatives with wrong number of parameters without looking at types.
- // less expensive than including them in inferMethodAlternatvie (see below).
- def shapeType(arg: Tree): Type = arg match {
- case Function(vparams, body) =>
- functionType(vparams map (vparam => AnyClass.tpe), shapeType(body))
- case AssignOrNamedArg(Ident(name), rhs) =>
- NamedType(name, shapeType(rhs))
- case _ =>
- NothingClass.tpe
- }
- val argtypes = args map shapeType
- val pre = fun.symbol.tpe.prefix
-
- var sym = fun.symbol filter { alt =>
- // must use pt as expected type, not WildcardType (a tempting quick fix to #2665)
- // now fixed by using isWeaklyCompatible in exprTypeArgs
- // TODO: understand why exactly -- some types were not inferred anymore (`ant clean quick.bin` failed)
- // (I had expected inferMethodAlternative to pick up the slack introduced by using WildcardType here)
- isApplicableSafe(context.undetparams, followApply(pre.memberType(alt)), argtypes, pt)
- }
- if (sym.isOverloaded) {
- val sym1 = sym filter (alt => {
- // eliminate functions that would result from tupling transforms
- // keeps alternatives with repeated params
- hasExactlyNumParams(followApply(alt.tpe), argtypes.length) ||
- // also keep alts which define at least one default
- alt.tpe.paramss.exists(_.exists(_.hasDefault))
- })
- if (sym1 != NoSymbol) sym = sym1
- }
- if (sym != NoSymbol)
- fun = adapt(fun setSymbol sym setType pre.memberType(sym), forFunMode(mode), WildcardType)
+ def duplErrTree = setError(treeCopy.Apply(tree, fun0, args))
+ def duplErrorTree(err: AbsTypeError) = { issue(err); duplErrTree }
+
+ def preSelectOverloaded(fun: Tree): Tree = {
+ if (fun.hasSymbol && fun.symbol.isOverloaded) {
+ // remove alternatives with wrong number of parameters without looking at types.
+ // less expensive than including them in inferMethodAlternatvie (see below).
+ def shapeType(arg: Tree): Type = arg match {
+ case Function(vparams, body) =>
+ functionType(vparams map (vparam => AnyClass.tpe), shapeType(body))
+ case AssignOrNamedArg(Ident(name), rhs) =>
+ NamedType(name, shapeType(rhs))
+ case _ =>
+ NothingClass.tpe
+ }
+ val argtypes = args map shapeType
+ val pre = fun.symbol.tpe.prefix
+
+ var sym = fun.symbol filter { alt =>
+ // must use pt as expected type, not WildcardType (a tempting quick fix to #2665)
+ // now fixed by using isWeaklyCompatible in exprTypeArgs
+ // TODO: understand why exactly -- some types were not inferred anymore (`ant clean quick.bin` failed)
+ // (I had expected inferMethodAlternative to pick up the slack introduced by using WildcardType here)
+ //
+ // @PP responds: I changed it to pass WildcardType instead of pt and only one line in
+ // trunk (excluding scalacheck, which had another) failed to compile. It was this line in
+ // Types: "refs = Array(Map(), Map())". I determined that inference fails if there are at
+ // least two invariant type parameters. See the test case I checked in to help backstop:
+ // pos/isApplicableSafe.scala.
+ isApplicableSafe(context.undetparams, followApply(pre.memberType(alt)), argtypes, pt)
+ }
+ if (sym.isOverloaded) {
+ val sym1 = sym filter (alt => {
+ // eliminate functions that would result from tupling transforms
+ // keeps alternatives with repeated params
+ hasExactlyNumParams(followApply(alt.tpe), argtypes.length) ||
+ // also keep alts which define at least one default
+ alt.tpe.paramss.exists(_.exists(_.hasDefault))
+ })
+ if (sym1 != NoSymbol) sym = sym1
+ }
+ if (sym == NoSymbol) fun
+ else adapt(fun setSymbol sym setType pre.memberType(sym), forFunMode(mode), WildcardType)
+ } else fun
}
+ val fun = preSelectOverloaded(fun0)
+
fun.tpe match {
case OverloadedType(pre, alts) =>
- val undetparams = context.extractUndetparams()
-
- val argtpes = new ListBuffer[Type]
- val amode = forArgMode(fun, mode)
- val args1 = args map {
- case arg @ AssignOrNamedArg(Ident(name), rhs) =>
- // named args: only type the righthand sides ("unknown identifier" errors otherwise)
- val rhs1 = typedArg(rhs, amode, BYVALmode, WildcardType)
- argtpes += NamedType(name, rhs1.tpe.deconst)
- // the assign is untyped; that's ok because we call doTypedApply
- atPos(arg.pos) { new AssignOrNamedArg(arg.lhs , rhs1) }
- case arg =>
- val arg1 = typedArg(arg, amode, BYVALmode, WildcardType)
- argtpes += arg1.tpe.deconst
- arg1
+ def handleOverloaded = {
+ val undetparams = context.extractUndetparams()
+
+ val argtpes = new ListBuffer[Type]
+ val amode = forArgMode(fun, mode)
+ val args1 = args map {
+ case arg @ AssignOrNamedArg(Ident(name), rhs) =>
+ // named args: only type the righthand sides ("unknown identifier" errors otherwise)
+ val rhs1 = typedArg(rhs, amode, BYVALmode, WildcardType)
+ argtpes += NamedType(name, rhs1.tpe.deconst)
+ // the assign is untyped; that's ok because we call doTypedApply
+ atPos(arg.pos) { new AssignOrNamedArg(arg.lhs, rhs1) }
+ case arg =>
+ val arg1 = typedArg(arg, amode, BYVALmode, WildcardType)
+ argtpes += arg1.tpe.deconst
+ arg1
+ }
+ context.undetparams = undetparams
+ if (context.hasErrors)
+ setError(tree)
+ else {
+ inferMethodAlternative(fun, undetparams, argtpes.toList, pt, varArgsOnly = treeInfo.isWildcardStarArgList(args))
+ doTypedApply(tree, adapt(fun, forFunMode(mode), WildcardType), args1, mode, pt)
+ }
}
- context.undetparams = undetparams
- inferMethodAlternative(fun, undetparams, argtpes.toList, pt, varArgsOnly = treeInfo.isWildcardStarArgList(args))
- doTypedApply(tree, adapt(fun, forFunMode(mode), WildcardType), args1, mode, pt)
+ handleOverloaded
case mt @ MethodType(params, _) =>
val paramTypes = mt.paramTypes
// repeat vararg as often as needed, remove by-name
val formals = formalTypes(paramTypes, args.length)
- /** Try packing all arguments into a Tuple and apply `fun'
+ /** Try packing all arguments into a Tuple and apply `fun`
* to that. This is the last thing which is tried (after
* default arguments)
*/
@@ -2327,10 +3211,15 @@ trait Typers extends Modes {
// the inner "doTypedApply" does "extractUndetparams" => restore when it fails
val savedUndetparams = context.undetparams
silent(_.doTypedApply(tree, fun, tupleArgs, mode, pt)) match {
- case t: Tree =>
-// println("tuple conversion to "+t+" for "+mt)//DEBUG
- Some(t)
- case ex =>
+ case SilentResultValue(t) =>
+ // Depending on user options, may warn or error here if
+ // a Unit or tuple was inserted.
+ Some(t) filter (tupledTree =>
+ !inExprModeButNot(mode, FUNmode)
+ || tupledTree.symbol == null
+ || checkValidAdaptation(tupledTree, args)
+ )
+ case _ =>
context.undetparams = savedUndetparams
None
}
@@ -2345,26 +3234,33 @@ trait Typers extends Modes {
def tryNamesDefaults: Tree = {
val lencmp = compareLengths(args, formals)
- if (mt.isErroneous) errTree
- else if (inPatternMode(mode))
+ def checkNotMacro() = {
+ if (fun.symbol != null && fun.symbol.filter(sym => sym != null && sym.isTermMacro && !sym.isErroneous) != NoSymbol)
+ tryTupleApply getOrElse duplErrorTree(NamedAndDefaultArgumentsNotSupportedForMacros(tree, fun))
+ }
+
+ if (mt.isErroneous) duplErrTree
+ else if (inPatternMode(mode)) {
// #2064
- errorTree("wrong number of arguments for "+ treeSymTypeMsg(fun))
- else if (lencmp > 0) {
- tryTupleApply getOrElse errorTree("too many arguments for "+treeSymTypeMsg(fun))
+ duplErrorTree(WrongNumberOfArgsError(tree, fun))
+ } else if (lencmp > 0) {
+ tryTupleApply getOrElse duplErrorTree(TooManyArgsNamesDefaultsError(tree, fun))
} else if (lencmp == 0) {
// we don't need defaults. names were used, so this application is transformed
// into a block (@see transformNamedApplication in NamesDefaults)
val (namelessArgs, argPos) = removeNames(Typer.this)(args, params)
if (namelessArgs exists (_.isErroneous)) {
- errTree
+ duplErrTree
} else if (!isIdentity(argPos) && !sameLength(formals, params))
// !isIdentity indicates that named arguments are used to re-order arguments
- errorTree("when using named arguments, the vararg parameter has to be specified exactly once")
+ duplErrorTree(MultipleVarargError(tree))
else if (isIdentity(argPos) && !isNamedApplyBlock(fun)) {
// if there's no re-ordering, and fun is not transformed, no need to transform
// more than an optimization, e.g. important in "synchronized { x = update-x }"
+ checkNotMacro()
doTypedApply(tree, fun, namelessArgs, mode, pt)
} else {
+ checkNotMacro()
transformNamedApplication(Typer.this, mode, pt)(
treeCopy.Apply(tree, fun, namelessArgs), argPos)
}
@@ -2372,8 +3268,31 @@ trait Typers extends Modes {
// defaults are needed. they are added to the argument list in named style as
// calls to the default getters. Example:
// foo[Int](a)() ==> foo[Int](a)(b = foo$qual.foo$default$2[Int](a))
+ checkNotMacro()
+
+ // SI-8111 transformNamedApplication eagerly shuffles around the application to preserve
+ // evaluation order. During this process, it calls `changeOwner` on symbols that
+ // are transplanted underneath synthetic temporary vals.
+ //
+ // Here, we keep track of the symbols owned by `context.owner` to enable us to
+ // rollback, so that we don't end up with "orphaned" symbols.
+ //
+ // TODO: Find a better way!
+ //
+ // Note that duplicating trees would not be enough to fix this problem, we would also need to
+ // clone local symbols in the duplicated tree to truly isolate things (in the spirit of BodyDuplicator),
+ // or, better yet, disentangle the logic in `transformNamedApplication` so that we could
+ // determine whether names/defaults is viable *before* transforming trees.
+ def ownerOf(sym: Symbol) = if (sym == null || sym == NoSymbol) NoSymbol else sym.owner
+ val symsOwnedByContextOwner = tree.collect {
+ case t @ (_: DefTree | _: Function) if ownerOf(t.symbol) == context.owner => t.symbol
+ }
+ def rollbackNamesDefaultsOwnerChanges() {
+ symsOwnedByContextOwner foreach (_.owner = context.owner)
+ }
+
val fun1 = transformNamedApplication(Typer.this, mode, pt)(fun, x => x)
- if (fun1.isErroneous) errTree
+ if (fun1.isErroneous) duplErrTree
else {
assert(isNamedApplyBlock(fun1), fun1)
val NamedApplyInfo(qual, targs, previousArgss, _) = context.namedApplyBlockInfo.get._2
@@ -2390,17 +3309,18 @@ trait Typers extends Modes {
val lencmp2 = compareLengths(allArgs, formals)
if (!sameLength(allArgs, args) && callToCompanionConstr(context, funSym)) {
- errorTree("module extending its companion class cannot use default constructor arguments")
+ duplErrorTree(ModuleUsingCompanionClassDefaultArgsErrror(tree))
} else if (lencmp2 > 0) {
removeNames(Typer.this)(allArgs, params) // #3818
- errTree
+ duplErrTree
} else if (lencmp2 == 0) {
// useful when a default doesn't match parameter type, e.g. def f[T](x:T="a"); f[Int]()
val note = "Error occurred in an application involving default arguments."
if (!(context.diagnostic contains note)) context.diagnostic = note :: context.diagnostic
doTypedApply(tree, if (blockIsEmpty) fun else fun1, allArgs, mode, pt)
} else {
- tryTupleApply getOrElse errorTree(notEnoughArgumentsMsg(fun, missing))
+ rollbackNamesDefaultsOwnerChanges()
+ tryTupleApply getOrElse duplErrorTree(NotEnoughArgsError(tree, fun, missing))
}
}
}
@@ -2410,91 +3330,112 @@ trait Typers extends Modes {
(args exists isNamed) || // uses a named argument
isNamedApplyBlock(fun)) { // fun was transformed to a named apply block =>
// integrate this application into the block
- tryNamesDefaults
+ if (dyna.isApplyDynamicNamed(fun)) dyna.typedNamedApply(tree, fun, args, mode, pt)
+ else tryNamesDefaults
} else {
val tparams = context.extractUndetparams()
if (tparams.isEmpty) { // all type params are defined
- // In order for checkDead not to be misled by the unfortunate special
- // case of AnyRef#synchronized (which is implemented with signature T => T
- // but behaves as if it were (=> T) => T) we need to know what is the actual
- // target of a call. Since this information is no longer available from
- // typedArg, it is recorded here.
- checkDead.updateExpr(fun)
- val args1 = typedArgs(args, forArgMode(fun, mode), paramTypes, formals)
- // instantiate dependent method types, must preserve singleton types where possible (stableTypeFor) -- example use case:
- // val foo = "foo"; def precise(x: String)(y: x.type): x.type = {...}; val bar : foo.type = precise(foo)(foo)
- // precise(foo) : foo.type => foo.type
- val restpe = mt.resultType(args1 map (arg => gen.stableTypeFor(arg) getOrElse arg.tpe))
- def ifPatternSkipFormals(tp: Type) = tp match {
- case MethodType(_, rtp) if (inPatternMode(mode)) => rtp
- case _ => tp
- }
-
- // Replace the Delegate-Chainer methods += and -= with corresponding
- // + and - calls, which are translated in the code generator into
- // Combine and Remove
- if (forMSIL) {
- fun match {
- case Select(qual, name) =>
- if (isSubType(qual.tpe, DelegateClass.tpe)
- && (name == encode("+=") || name == encode("-=")))
- {
- val n = if (name == encode("+=")) nme.PLUS else nme.MINUS
- val f = Select(qual, n)
- // the compiler thinks, the PLUS method takes only one argument,
- // but he thinks it's an instance method -> still two ref's on the stack
- // -> translated by backend
- val rhs = treeCopy.Apply(tree, f, args)
- return typed(Assign(qual, rhs))
- }
- case _ => ()
+ def handleMonomorphicCall: Tree = {
+ // In order for checkDead not to be misled by the unfortunate special
+ // case of AnyRef#synchronized (which is implemented with signature T => T
+ // but behaves as if it were (=> T) => T) we need to know what is the actual
+ // target of a call. Since this information is no longer available from
+ // typedArg, it is recorded here.
+ val args1 =
+ // no expected type when jumping to a match label -- anything goes (this is ok since we're typing the translation of well-typed code)
+ // ... except during erasure: we must take the expected type into account as it drives the insertion of casts!
+ // I've exhausted all other semi-clean approaches I could think of in balancing GADT magic, SI-6145, CPS type-driven transforms and other existential trickiness
+ // (the right thing to do -- packing existential types -- runs into limitations in subtyping existential types,
+ // casting breaks SI-6145,
+ // not casting breaks GADT typing as it requires sneaking ill-typed trees past typer)
+ if (!phase.erasedTypes && fun.symbol.isLabel && treeInfo.isSynthCaseSymbol(fun.symbol))
+ typedArgs(args, forArgMode(fun, mode))
+ else
+ typedArgs(args, forArgMode(fun, mode), paramTypes, formals)
+
+ // instantiate dependent method types, must preserve singleton types where possible (stableTypeFor) -- example use case:
+ // val foo = "foo"; def precise(x: String)(y: x.type): x.type = {...}; val bar : foo.type = precise(foo)(foo)
+ // precise(foo) : foo.type => foo.type
+ val restpe = mt.resultType(args1 map (arg => gen.stableTypeFor(arg) getOrElse arg.tpe))
+ def ifPatternSkipFormals(tp: Type) = tp match {
+ case MethodType(_, rtp) if (inPatternMode(mode)) => rtp
+ case _ => tp
}
- }
- /** This is translating uses of List() into Nil. This is less
- * than ideal from a consistency standpoint, but it shouldn't be
- * altered without due caution.
- */
- if (fun.symbol == List_apply && args.isEmpty && !forInteractive)
- atPos(tree.pos)(gen.mkNil setType restpe)
- else
- constfold(treeCopy.Apply(tree, fun, args1) setType ifPatternSkipFormals(restpe))
+ // Replace the Delegate-Chainer methods += and -= with corresponding
+ // + and - calls, which are translated in the code generator into
+ // Combine and Remove
+ if (forMSIL) {
+ fun match {
+ case Select(qual, name) =>
+ if (isSubType(qual.tpe, DelegateClass.tpe)
+ && (name == encode("+=") || name == encode("-="))) {
+ val n = if (name == encode("+=")) nme.PLUS else nme.MINUS
+ val f = Select(qual, n)
+ // the compiler thinks, the PLUS method takes only one argument,
+ // but he thinks it's an instance method -> still two ref's on the stack
+ // -> translated by backend
+ val rhs = treeCopy.Apply(tree, f, args)
+ return typed(Assign(qual, rhs))
+ }
+ case _ => ()
+ }
+ }
+ /**
+ * This is translating uses of List() into Nil. This is less
+ * than ideal from a consistency standpoint, but it shouldn't be
+ * altered without due caution.
+ * ... this also causes bootstrapping cycles if List_apply is
+ * forced during kind-arity checking, so it is guarded by additional
+ * tests to ensure we're sufficiently far along.
+ */
+ if (args.isEmpty && !forInteractive && fun.symbol.isInitialized && ListModule.hasCompleteInfo && (fun.symbol == List_apply))
+ atPos(tree.pos)(gen.mkNil setType restpe)
+ else
+ constfold(treeCopy.Apply(tree, fun, args1) setType ifPatternSkipFormals(restpe))
+ }
+ checkDead.updateExpr(fun) {
+ handleMonomorphicCall
+ }
} else if (needsInstantiation(tparams, formals, args)) {
//println("needs inst "+fun+" "+tparams+"/"+(tparams map (_.info)))
inferExprInstance(fun, tparams)
doTypedApply(tree, fun, args, mode, pt)
} else {
- assert(!inPatternMode(mode)) // this case cannot arise for patterns
- val lenientTargs = protoTypeArgs(tparams, formals, mt.resultApprox, pt)
- val strictTargs = (lenientTargs, tparams).zipped map ((targ, tparam) =>
- if (targ == WildcardType) tparam.tpe else targ) //@M TODO: should probably be .tpeHK
- var remainingParams = paramTypes
- def typedArgToPoly(arg: Tree, formal: Type): Tree = { //TR TODO: cleanup
- val lenientPt = formal.instantiateTypeParams(tparams, lenientTargs)
- val newmode =
- if (isByNameParamType(remainingParams.head)) POLYmode
- else POLYmode | BYVALmode
- if (remainingParams.tail.nonEmpty) remainingParams = remainingParams.tail
- val arg1 = typedArg(arg, forArgMode(fun, mode), newmode, lenientPt)
- val argtparams = context.extractUndetparams()
- if (!argtparams.isEmpty) {
- val strictPt = formal.instantiateTypeParams(tparams, strictTargs)
- inferArgumentInstance(arg1, argtparams, strictPt, lenientPt)
+ def handlePolymorphicCall = {
+ assert(!inPatternMode(mode), modeString(mode)) // this case cannot arise for patterns
+ val lenientTargs = protoTypeArgs(tparams, formals, mt.resultApprox, pt)
+ val strictTargs = map2(lenientTargs, tparams)((targ, tparam) =>
+ if (targ == WildcardType) tparam.tpeHK else targ)
+ var remainingParams = paramTypes
+ def typedArgToPoly(arg: Tree, formal: Type): Tree = { //TR TODO: cleanup
+ val lenientPt = formal.instantiateTypeParams(tparams, lenientTargs)
+ val newmode =
+ if (isByNameParamType(remainingParams.head)) POLYmode
+ else POLYmode | BYVALmode
+ if (remainingParams.tail.nonEmpty) remainingParams = remainingParams.tail
+ val arg1 = typedArg(arg, forArgMode(fun, mode), newmode, lenientPt)
+ val argtparams = context.extractUndetparams()
+ if (!argtparams.isEmpty) {
+ val strictPt = formal.instantiateTypeParams(tparams, strictTargs)
+ inferArgumentInstance(arg1, argtparams, strictPt, lenientPt)
+ arg1
+ } else arg1
+ }
+ val args1 = map2(args, formals)(typedArgToPoly)
+ if (args1 exists { _.isErrorTyped }) duplErrTree
+ else {
+ debuglog("infer method inst " + fun + ", tparams = " + tparams + ", args = " + args1.map(_.tpe) + ", pt = " + pt + ", lobounds = " + tparams.map(_.tpe.bounds.lo) + ", parambounds = " + tparams.map(_.info)) //debug
+ // define the undetparams which have been fixed by this param list, replace the corresponding symbols in "fun"
+ // returns those undetparams which have not been instantiated.
+ val undetparams = inferMethodInstance(fun, tparams, args1, pt)
+ val result = doTypedApply(tree, fun, args1, mode, pt)
+ context.undetparams = undetparams
+ result
}
- arg1
- }
- val args1 = (args, formals).zipped map typedArgToPoly
- if (args1 exists (_.tpe.isError)) errTree
- else {
- if (settings.debug.value) log("infer method inst "+fun+", tparams = "+tparams+", args = "+args1.map(_.tpe)+", pt = "+pt+", lobounds = "+tparams.map(_.tpe.bounds.lo)+", parambounds = "+tparams.map(_.info)) //debug
- // define the undetparams which have been fixed by this param list, replace the corresponding symbols in "fun"
- // returns those undetparams which have not been instantiated.
- val undetparams = inferMethodInstance(fun, tparams, args1, pt)
- val result = doTypedApply(tree, fun, args1, mode, pt)
- context.undetparams = undetparams
- result
}
+ handlePolymorphicCall
}
}
@@ -2502,78 +3443,139 @@ trait Typers extends Modes {
doTypedApply(tree, fun setType fun.tpe.widen, args, mode, pt)
case ErrorType =>
- setError(treeCopy.Apply(tree, fun, args))
- /* --- begin unapply --- */
+ if (!tree.isErrorTyped) setError(tree) else tree
+ // @H change to setError(treeCopy.Apply(tree, fun, args))
case otpe if inPatternMode(mode) && unapplyMember(otpe).exists =>
- if (args.length > MaxTupleArity)
- error(fun.pos, "too many arguments for unapply pattern, maximum = "+MaxTupleArity)
-
- def freshArgType(tp: Type): (Type, List[Symbol]) = (tp: @unchecked) match {
- case MethodType(param :: _, _) =>
- (param.tpe, Nil)
- case PolyType(tparams, restype) =>
- val tparams1 = cloneSymbols(tparams)
- (freshArgType(restype)._1.substSym(tparams, tparams1), tparams1)
- case OverloadedType(_, _) =>
- error(fun.pos, "cannot resolve overloaded unapply")
- (ErrorType, Nil)
- }
+ doTypedUnapply(tree, fun0, fun, args, mode, pt)
- val unapp = unapplyMember(otpe)
- val unappType = otpe.memberType(unapp)
- val argDummy = context.owner.newValue(fun.pos, nme.SELECTOR_DUMMY) setFlag SYNTHETIC setInfo pt
- val arg = Ident(argDummy) setType pt
-
- if (!isApplicableSafe(Nil, unappType, List(pt), WildcardType)) {
- //Console.println("UNAPP: need to typetest, arg.tpe = "+arg.tpe+", unappType = "+unappType)
- val (unappFormal, freeVars) = freshArgType(unappType.skolemizeExistential(context.owner, tree))
- val context1 = context.makeNewScope(context.tree, context.owner)
- freeVars foreach context1.scope.enter
-
- val typer1 = newTyper(context1)
- val pattp = typer1.infer.inferTypedPattern(tree.pos, unappFormal, arg.tpe)
-
- // turn any unresolved type variables in freevars into existential skolems
- val skolems = freeVars map { fv =>
- val skolem = new TypeSkolem(context1.owner, fun.pos, fv.name.toTypeName, fv)
- skolem.setInfo(fv.info.cloneInfo(skolem))
- .setFlag(fv.flags | EXISTENTIAL).resetFlag(PARAM)
- skolem
- }
- arg.tpe = pattp.substSym(freeVars, skolems)
- argDummy setInfo arg.tpe
- }
+ case _ =>
+ duplErrorTree(ApplyWithoutArgsError(tree, fun))
+ }
+ }
- // setType null is necessary so that ref will be stabilized; see bug 881
- val fun1 = typedPos(fun.pos)(Apply(Select(fun setType null, unapp), List(arg)))
+ def doTypedUnapply(tree: Tree, fun0: Tree, fun: Tree, args: List[Tree], mode: Int, pt: Type): Tree = {
+ def duplErrTree = setError(treeCopy.Apply(tree, fun0, args))
+ def duplErrorTree(err: AbsTypeError) = { issue(err); duplErrTree }
+
+ val otpe = fun.tpe
+
+ if (args.length > MaxTupleArity)
+ return duplErrorTree(TooManyArgsPatternError(fun))
+
+ //
+ def freshArgType(tp: Type): (List[Symbol], Type) = tp match {
+ case MethodType(param :: _, _) =>
+ (Nil, param.tpe)
+ case PolyType(tparams, restpe) =>
+ createFromClonedSymbols(tparams, freshArgType(restpe)._2)((ps, t) => ((ps, t)))
+ // No longer used, see test case neg/t960.scala (#960 has nothing to do with it)
+ case OverloadedType(_, _) =>
+ OverloadedUnapplyError(fun)
+ (Nil, ErrorType)
+ case _ =>
+ UnapplyWithSingleArgError(fun)
+ (Nil, ErrorType)
+ }
- if (fun1.tpe.isErroneous) errTree
- else {
- val formals0 = unapplyTypeList(fun1.symbol, fun1.tpe)
- val formals1 = formalTypes(formals0, args.length)
- if (sameLength(formals1, args)) {
- val args1 = typedArgs(args, mode, formals0, formals1)
- // This used to be the following (failing) assert:
- // assert(isFullyDefined(pt), tree+" ==> "+UnApply(fun1, args1)+", pt = "+pt)
- // I modified as follows. See SI-1048.
- val pt1 = if (isFullyDefined(pt)) pt else makeFullyDefined(pt)
-
- val itype = glb(List(pt1, arg.tpe))
- arg.tpe = pt1 // restore type (arg is a dummy tree, just needs to pass typechecking)
- UnApply(fun1, args1) setPos tree.pos setType itype
- }
- else {
- errorTree("wrong number of arguments for "+treeSymTypeMsg(fun))
- }
- }
+ val unapp = unapplyMember(otpe)
+ val unappType = otpe.memberType(unapp)
+ val argDummy = context.owner.newValue(nme.SELECTOR_DUMMY, fun.pos, SYNTHETIC) setInfo pt
+ val arg = Ident(argDummy) setType pt
+
+ val uncheckedTypeExtractor =
+ if (unappType.paramTypes.nonEmpty)
+ extractorForUncheckedType(tree.pos, unappType.paramTypes.head)
+ else None
+
+ if (!isApplicableSafe(Nil, unappType, List(pt), WildcardType)) {
+ //Console.println("UNAPP: need to typetest, arg.tpe = "+arg.tpe+", unappType = "+unappType)
+ val (freeVars, unappFormal) = freshArgType(unappType.skolemizeExistential(context.owner, tree))
+ val unapplyContext = context.makeNewScope(context.tree, context.owner)
+ freeVars foreach unapplyContext.scope.enter
+
+ val typer1 = newTyper(unapplyContext)
+ val pattp = typer1.infer.inferTypedPattern(tree, unappFormal, arg.tpe, canRemedy = uncheckedTypeExtractor.nonEmpty)
+
+ // turn any unresolved type variables in freevars into existential skolems
+ val skolems = freeVars map (fv => unapplyContext.owner.newExistentialSkolem(fv, fv))
+ arg.tpe = pattp.substSym(freeVars, skolems)
+ argDummy setInfo arg.tpe
+ }
-/* --- end unapply --- */
- case _ =>
- errorTree(fun.tpe+" does not take parameters")
+ // setType null is necessary so that ref will be stabilized; see bug 881
+ val fun1 = typedPos(fun.pos)(Apply(Select(fun setType null, unapp), List(arg)))
+
+ if (fun1.tpe.isErroneous) duplErrTree
+ else {
+ val resTp = fun1.tpe.finalResultType.normalize
+ val nbSubPats = args.length
+ val (formals, formalsExpanded) =
+ extractorFormalTypes(fun0.pos, resTp, nbSubPats, fun1.symbol, treeInfo.effectivePatternArity(args))
+ if (formals == null) duplErrorTree(WrongNumberOfArgsError(tree, fun))
+ else {
+ val args1 = typedArgs(args, mode, formals, formalsExpanded)
+ // This used to be the following (failing) assert:
+ // assert(isFullyDefined(pt), tree+" ==> "+UnApply(fun1, args1)+", pt = "+pt)
+ // I modified as follows. See SI-1048.
+ val pt1 = if (isFullyDefined(pt)) pt else makeFullyDefined(pt)
+
+ val itype = glb(List(pt1, arg.tpe))
+ arg.tpe = pt1 // restore type (arg is a dummy tree, just needs to pass typechecking)
+ val unapply = UnApply(fun1, args1) setPos tree.pos setType itype
+
+ // if the type that the unapply method expects for its argument is uncheckable, wrap in classtag extractor
+ // skip if the unapply's type is not a method type with (at least, but really it should be exactly) one argument
+ // also skip if we already wrapped a classtag extractor (so we don't keep doing that forever)
+ if (uncheckedTypeExtractor.isEmpty || fun1.symbol.owner.isNonBottomSubClass(ClassTagClass)) unapply
+ else wrapClassTagUnapply(unapply, uncheckedTypeExtractor.get, unappType.paramTypes.head)
+ }
}
}
+ def wrapClassTagUnapply(uncheckedPattern: Tree, classTagExtractor: Tree, pt: Type): Tree = {
+ // TODO: disable when in unchecked match
+ // we don't create a new Context for a Match, so find the CaseDef, then go out one level and navigate back to the match that has this case
+ // val thisCase = context.nextEnclosing(_.tree.isInstanceOf[CaseDef])
+ // val unchecked = thisCase.outer.tree.collect{case Match(selector, cases) if cases contains thisCase => selector} match {
+ // case List(Typed(_, tpt)) if tpt.tpe hasAnnotation UncheckedClass => true
+ // case t => println("outer tree: "+ (t, thisCase, thisCase.outer.tree)); false
+ // }
+ // println("wrapClassTagUnapply"+ (!isPastTyper && infer.containsUnchecked(pt), pt, uncheckedPattern))
+ // println("wrapClassTagUnapply: "+ extractor)
+ // println(util.Position.formatMessage(uncheckedPattern.pos, "made unchecked type test into a checked one", true))
+
+ val args = List(uncheckedPattern)
+ val app = atPos(uncheckedPattern.pos)(Apply(classTagExtractor, args))
+ // must call doTypedUnapply directly, as otherwise we get undesirable rewrites
+ // and re-typechecks of the target of the unapply call in PATTERNmode,
+ // this breaks down when the classTagExtractor (which defineds the unapply member) is not a simple reference to an object,
+ // but an arbitrary tree as is the case here
+ doTypedUnapply(app, classTagExtractor, classTagExtractor, args, PATTERNmode, pt)
+ }
+
+ // if there's a ClassTag that allows us to turn the unchecked type test for `pt` into a checked type test
+ // return the corresponding extractor (an instance of ClassTag[`pt`])
+ def extractorForUncheckedType(pos: Position, pt: Type): Option[Tree] = if (!opt.virtPatmat || isPastTyper) None else {
+ // only look at top-level type, can't (reliably) do anything about unchecked type args (in general)
+ pt.normalize.typeConstructor match {
+ // if at least one of the types in an intersection is checkable, use the checkable ones
+ // this avoids problems as in run/matchonseq.scala, where the expected type is `Coll with scala.collection.SeqLike`
+ // Coll is an abstract type, but SeqLike of course is not
+ case RefinedType(parents, _) if (parents.length >= 2) && (parents.exists(tp => !infer.containsUnchecked(tp))) =>
+ None
+
+ case ptCheckable if infer.containsUnchecked(ptCheckable) =>
+ val classTagExtractor = resolveClassTag(pos, ptCheckable)
+
+ if (classTagExtractor != EmptyTree && unapplyMember(classTagExtractor.tpe) != NoSymbol)
+ Some(classTagExtractor)
+ else None
+
+ case _ => None
+ }
+ }
+
/**
* Convert an annotation constructor call into an AnnotationInfo.
*
@@ -2582,8 +3584,10 @@ trait Typers extends Modes {
def typedAnnotation(ann: Tree, mode: Int = EXPRmode, selfsym: Symbol = NoSymbol, annClass: Symbol = AnnotationClass, requireJava: Boolean = false): AnnotationInfo = {
lazy val annotationError = AnnotationInfo(ErrorType, Nil, Nil)
var hasError: Boolean = false
- def error(pos: Position, msg: String) = {
- context.error(pos, msg)
+ val pending = ListBuffer[AbsTypeError]()
+
+ def reportAnnotationError(err: AbsTypeError) = {
+ pending += err
hasError = true
annotationError
}
@@ -2599,13 +3603,12 @@ trait Typers extends Modes {
case tpe => null
}
}
- def fail(msg: String) = { error(tr.pos, msg) ; None }
- if (const == null)
- fail("annotation argument needs to be a constant; found: " + tr)
- else if (const.value == null)
- fail("annotation argument cannot be null")
- else
+ if (const == null) {
+ reportAnnotationError(AnnotationNotAConstantError(tr)); None
+ } else if (const.value == null) {
+ reportAnnotationError(AnnotationArgNullError(tr)); None
+ } else
Some(LiteralAnnotArg(const))
}
@@ -2614,18 +3617,14 @@ trait Typers extends Modes {
*/
def tree2ConstArg(tree: Tree, pt: Type): Option[ClassfileAnnotArg] = tree match {
case Apply(Select(New(tpt), nme.CONSTRUCTOR), args) if (pt.typeSymbol == ArrayClass) =>
- error(tree.pos, "Array constants have to be specified using the `Array(...)' factory method")
- None
+ reportAnnotationError(ArrayConstantsError(tree)); None
case ann @ Apply(Select(New(tpt), nme.CONSTRUCTOR), args) =>
val annInfo = typedAnnotation(ann, mode, NoSymbol, pt.typeSymbol, true)
- if (annInfo.atp.isErroneous) {
- // recursive typedAnnotation call already printed an error, so don't call "error"
- hasError = true
- None
- } else Some(NestedAnnotArg(annInfo))
+ if (annInfo.atp.isErroneous) { hasError = true; None }
+ else Some(NestedAnnotArg(annInfo))
- // use of Array.apply[T: ClassManifest](xs: T*): Array[T]
+ // use of Array.apply[T: ClassTag](xs: T*): Array[T]
// and Array.apply(x: Int, xs: Int*): Array[Int] (and similar)
case Apply(fun, args) =>
val typedFun = typed(fun, forFunMode(mode), WildcardType)
@@ -2638,13 +3637,13 @@ trait Typers extends Modes {
// BT = Int, .., String, Class[_], JavaAnnotClass
// T = BT | Array[BT]
// So an array literal as argument can only be valid if pt is Array[_]
- error(tree.pos, "found array constant, expected argument of type "+ pt)
+ reportAnnotationError(ArrayConstantsTypeMismatchError(tree, pt))
None
}
- else
- tryConst(tree, pt)
+ else tryConst(tree, pt)
- case Typed(t, _) => tree2ConstArg(t, pt)
+ case Typed(t, _) =>
+ tree2ConstArg(t, pt)
case tree =>
tryConst(tree, pt)
@@ -2664,13 +3663,13 @@ trait Typers extends Modes {
case Select(New(tpt), nme.CONSTRUCTOR) =>
(fun, outerArgss)
case _ =>
- error(fun.pos, "unexpected tree in annotation: "+ fun)
+ reportAnnotationError(UnexpectedTreeAnnotation(fun))
(setError(fun), outerArgss)
}
extract(ann, List())
}
- if (fun.isErroneous) annotationError
+ val res = if (fun.isErroneous) annotationError
else {
val typedFun @ Select(New(tpt), _) = typed(fun, forFunMode(mode), WildcardType)
val annType = tpt.tpe
@@ -2680,28 +3679,31 @@ trait Typers extends Modes {
// annotation to be saved as java classfile annotation
val isJava = typedFun.symbol.owner.isJavaDefined
if (!annType.typeSymbol.isNonBottomSubClass(annClass)) {
- error(tpt.pos, "expected annotation of type "+ annClass.tpe +", found "+ annType)
+ reportAnnotationError(AnnotationTypeMismatchError(tpt, annClass.tpe, annType))
} else if (argss.length > 1) {
- error(ann.pos, "multiple argument lists on classfile annotation")
+ reportAnnotationError(MultipleArgumentListForAnnotationError(ann))
} else {
- val args =
- if (argss.head.length == 1 && !isNamed(argss.head.head))
- List(new AssignOrNamedArg(Ident(nme.value), argss.head.head))
- else argss.head
val annScope = annType.decls
.filter(sym => sym.isMethod && !sym.isConstructor && sym.isJavaDefined)
- val names = new collection.mutable.HashSet[Symbol]
+ val names = new scala.collection.mutable.HashSet[Symbol]
+ def hasValue = names exists (_.name == nme.value)
names ++= (if (isJava) annScope.iterator
else typedFun.tpe.params.iterator)
+ val args = argss match {
+ case List(List(arg)) if !isNamed(arg) && hasValue =>
+ List(new AssignOrNamedArg(Ident(nme.value), arg))
+ case as :: _ => as
+ }
+
val nvPairs = args map {
case arg @ AssignOrNamedArg(Ident(name), rhs) =>
val sym = if (isJava) annScope.lookup(name)
else typedFun.tpe.params.find(p => p.name == name).getOrElse(NoSymbol)
if (sym == NoSymbol) {
- error(arg.pos, "unknown annotation argument name: " + name)
+ reportAnnotationError(UnknownAnnotationNameError(arg, name))
(nme.ERROR, None)
} else if (!names.contains(sym)) {
- error(arg.pos, "duplicate value for annotation argument " + name)
+ reportAnnotationError(DuplicateValueAnnotationError(arg, name))
(nme.ERROR, None)
} else {
names -= sym
@@ -2710,24 +3712,26 @@ trait Typers extends Modes {
(sym.name, annArg)
}
case arg =>
- error(arg.pos, "classfile annotation arguments have to be supplied as named arguments")
+ reportAnnotationError(ClassfileAnnotationsAsNamedArgsError(arg))
(nme.ERROR, None)
}
-
- for (name <- names) {
- if (!name.annotations.contains(AnnotationInfo(AnnotationDefaultAttr.tpe, List(), List())) &&
- !name.hasDefaultFlag)
- error(ann.pos, "annotation " + annType.typeSymbol.fullName + " is missing argument " + name.name)
+ for (sym <- names) {
+ // make sure the flags are up to date before erroring (jvm/t3415 fails otherwise)
+ sym.initialize
+ if (!sym.hasAnnotation(AnnotationDefaultAttr) && !sym.hasDefault)
+ reportAnnotationError(AnnotationMissingArgError(ann, annType, sym))
}
if (hasError) annotationError
- else AnnotationInfo(annType, List(), nvPairs map {p => (p._1, p._2.get)}).setPos(ann.pos)
+ else AnnotationInfo(annType, List(), nvPairs map {p => (p._1, p._2.get)}).setOriginal(Apply(typedFun, args).setPos(ann.pos))
}
} else if (requireJava) {
- error(ann.pos, "nested classfile annotations must be defined in java; found: "+ annType)
+ reportAnnotationError(NestedAnnotationError(ann, annType))
} else {
val typedAnn = if (selfsym == NoSymbol) {
- typed(ann, mode, annClass.tpe)
+ // local dummy fixes SI-5544
+ val localTyper = newTyper(context.make(ann, context.owner.newLocalDummy(ann.pos)))
+ localTyper.typed(ann, mode, annClass.tpe)
} else {
// Since a selfsym is supplied, the annotation should have
// an extra "self" identifier in scope for type checking.
@@ -2761,7 +3765,7 @@ trait Typers extends Modes {
def annInfo(t: Tree): AnnotationInfo = t match {
case Apply(Select(New(tpt), nme.CONSTRUCTOR), args) =>
- AnnotationInfo(annType, args, List()).setPos(t.pos)
+ AnnotationInfo(annType, args, List()).setOriginal(typedAnn).setPos(t.pos)
case Block(stats, expr) =>
context.warning(t.pos, "Usage of named or default arguments transformed this annotation\n"+
@@ -2776,7 +3780,7 @@ trait Typers extends Modes {
annInfo(fun)
case _ =>
- error(t.pos, "unexpected tree after typing annotation: "+ typedAnn)
+ reportAnnotationError(UnexpectedTreeAnnotationError(t, typedAnn))
}
if (annType.typeSymbol == DeprecatedAttr && argss.flatten.size < 2)
@@ -2786,64 +3790,42 @@ trait Typers extends Modes {
else annInfo(typedAnn)
}
}
- }
-
- def isRawParameter(sym: Symbol) = // is it a type parameter leaked by a raw type?
- sym.isTypeParameter && sym.owner.isJavaDefined
- /** Given a set `rawSyms` of term- and type-symbols, and a type
- * `tp`, produce a set of fresh type parameters and a type so that
- * it can be abstracted to an existential type. Every type symbol
- * `T` in `rawSyms` is mapped to a clone. Every term symbol `x` of
- * type `T` in `rawSyms` is given an associated type symbol of the
- * following form:
- *
- * type x.type <: T with Singleton
- *
- * The name of the type parameter is `x.type`, to produce nice
- * diagnostics. The Singleton parent ensures that the type
- * parameter is still seen as a stable type. Type symbols in
- * rawSyms are fully replaced by the new symbols. Term symbols are
- * also replaced, except for term symbols of an Ident tree, where
- * only the type of the Ident is changed.
- */
- protected def existentialTransform(rawSyms: List[Symbol], tp: Type) = {
- val typeParams: List[Symbol] = rawSyms map { sym =>
- val name = sym.name match {
- case x: TypeName => x
- case x => newTypeName(x + ".type")
- }
- val bound = sym.existentialBound
- val sowner = if (isRawParameter(sym)) context.owner else sym.owner
- val quantified = sowner.newExistential(sym.pos, name)
-
- quantified setInfo bound.cloneInfo(quantified)
- }
- // Higher-kinded existentials are not yet supported, but this is
- // tpeHK for when they are: "if a type constructor is expected/allowed,
- // tpeHK must be called instead of tpe."
- val typeParamTypes = typeParams map (_.tpeHK)
- (
- typeParams map (tparam => tparam setInfo tparam.info.subst(rawSyms, typeParamTypes)),
- tp.subst(rawSyms, typeParamTypes)
- )
+ if (hasError) {
+ pending.foreach(ErrorUtils.issueTypeError)
+ annotationError
+ } else res
}
- /** Compute an existential type from raw hidden symbols `syms' and type `tp'
+ /** Compute an existential type from raw hidden symbols `syms` and type `tp`
*/
- def packSymbols(hidden: List[Symbol], tp: Type): Type =
- if (hidden.isEmpty) tp
- else {
-// Console.println("original type: "+tp)
-// Console.println("hidden symbols: "+hidden)
- val (tparams, tp1) = existentialTransform(hidden, tp)
-// Console.println("tparams: "+tparams+", result: "+tp1)
- val res = existentialAbstraction(tparams, tp1)
-// Console.println("final result: "+res)
- res
+ def packSymbols(hidden: List[Symbol], tp: Type): Type = global.packSymbols(hidden, tp, Some(context0.owner))
+
+ def isReferencedFrom(ctx: Context, sym: Symbol): Boolean =
+ ctx.owner.isTerm &&
+ (ctx.scope.exists { dcl => dcl.isInitialized && (dcl.info contains sym) }) ||
+ {
+ var ctx1 = ctx.outer
+ while ((ctx1 != NoContext) && (ctx1.scope eq ctx.scope)) ctx1 = ctx1.outer
+ (ctx1 != NoContext) && isReferencedFrom(ctx1, sym)
}
- /** convert skolems to existentials */
+ def isCapturedExistential(sym: Symbol) =
+ (sym hasAllFlags (EXISTENTIAL | CAPTURED)) && {
+ val start = if (Statistics.canEnable) Statistics.startTimer(isReferencedNanos) else null
+ try !isReferencedFrom(context, sym)
+ finally if (Statistics.canEnable) Statistics.stopTimer(isReferencedNanos, start)
+ }
+
+ def packCaptured(tpe: Type): Type = {
+ val captured = mutable.Set[Symbol]()
+ for (tp <- tpe)
+ if (isCapturedExistential(tp.typeSymbol))
+ captured += tp.typeSymbol
+ existentialAbstraction(captured.toList, tpe)
+ }
+
+ /** convert local symbols and skolems to existentials */
def packedType(tree: Tree, owner: Symbol): Type = {
def defines(tree: Tree, sym: Symbol) =
sym.isExistentialSkolem && sym.unpackLocation == tree ||
@@ -2856,28 +3838,28 @@ trait Typers extends Modes {
while (o != owner && o != NoSymbol && !o.hasPackageFlag) o = o.owner
o == owner && !isVisibleParameter(sym)
}
- var localSyms = collection.immutable.Set[Symbol]()
- var boundSyms = collection.immutable.Set[Symbol]()
+ var localSyms = scala.collection.immutable.Set[Symbol]()
+ var boundSyms = scala.collection.immutable.Set[Symbol]()
def isLocal(sym: Symbol): Boolean =
if (sym == NoSymbol || sym.isRefinementClass || sym.isLocalDummy) false
else if (owner == NoSymbol) tree exists (defines(_, sym))
- else containsDef(owner, sym) || isRawParameter(sym)
+ else containsDef(owner, sym) || isRawParameter(sym) || isCapturedExistential(sym)
def containsLocal(tp: Type): Boolean =
tp exists (t => isLocal(t.typeSymbol) || isLocal(t.termSymbol))
val normalizeLocals = new TypeMap {
def apply(tp: Type): Type = tp match {
case TypeRef(pre, sym, args) =>
- if (sym.isAliasType && containsLocal(tp)) apply(tp.normalize)
+ if (sym.isAliasType && containsLocal(tp) && (tp.dealias ne tp)) apply(tp.dealias)
else {
if (pre.isVolatile)
- context.error(tree.pos, "Inferred type "+tree.tpe+" contains type selection from volatile type "+pre)
+ InferTypeWithVolatileTypeSelectionError(tree, pre)
mapOver(tp)
}
case _ =>
mapOver(tp)
}
}
- // add all local symbols of `tp' to `localSyms'
+ // add all local symbols of `tp` to `localSyms`
// TODO: expand higher-kinded types into individual copies for each instance.
def addLocals(tp: Type) {
val remainingSyms = new ListBuffer[Symbol]
@@ -2887,8 +3869,7 @@ trait Typers extends Modes {
localSyms += sym
remainingSyms += sym
} else {
- unit.error(tree.pos,
- "can't existentially abstract over parameterized type " + tp)
+ AbstractExistentiallyOverParamerizedTpeError(tree, tp)
}
}
}
@@ -2923,6 +3904,10 @@ trait Typers extends Modes {
packSymbols(localSyms.toList, normalizedTpe)
}
+ def typedClassOf(tree: Tree, tpt: Tree, noGen: Boolean = false) =
+ if (!checkClassType(tpt) && noGen) tpt
+ else atPos(tree.pos)(gen.mkClassOf(tpt.tpe))
+
protected def typedExistentialTypeTree(tree: ExistentialTypeTree, mode: Int): Tree = {
for (wc <- tree.whereClauses)
if (wc.symbol == NoSymbol) { namer.enterSym(wc); wc.symbol setFlag EXISTENTIAL }
@@ -2930,11 +3915,19 @@ trait Typers extends Modes {
val whereClauses1 = typedStats(tree.whereClauses, context.owner)
for (vd @ ValDef(_, _, _, _) <- tree.whereClauses)
if (vd.symbol.tpe.isVolatile)
- error(vd.pos, "illegal abstraction from value with volatile type "+vd.symbol.tpe)
+ AbstractionFromVolatileTypeError(vd)
val tpt1 = typedType(tree.tpt, mode)
- val (typeParams, tpe) = existentialTransform(tree.whereClauses map (_.symbol), tpt1.tpe)
- //println(tpe + ": " + tpe.getClass )
- TypeTree(ExistentialType(typeParams, tpe)) setOriginal tree
+ existentialTransform(tree.whereClauses map (_.symbol), tpt1.tpe)((tparams, tp) => {
+ val original = tpt1 match {
+ case tpt : TypeTree => atPos(tree.pos)(ExistentialTypeTree(tpt.original, tree.whereClauses))
+ case _ => {
+ debuglog(s"cannot reconstruct the original for $tree, because $tpt1 is not a TypeTree")
+ tree
+ }
+ }
+ TypeTree(newExistentialType(tparams, tp)) setOriginal original
+ }
+ )
}
// lifted out of typed1 because it's needed in typedImplicit0
@@ -2947,13 +3940,13 @@ trait Typers extends Modes {
// as we don't know which alternative to choose... here we do
map2Conserve(args, tparams) {
//@M! the polytype denotes the expected kind
- (arg, tparam) => typedHigherKindedType(arg, mode, polyType(tparam.typeParams, AnyClass.tpe))
+ (arg, tparam) => typedHigherKindedType(arg, mode, GenPolyType(tparam.typeParams, AnyClass.tpe))
}
} else // @M: there's probably something wrong when args.length != tparams.length... (triggered by bug #320)
// Martin, I'm using fake trees, because, if you use args or arg.map(typedType),
// inferPolyAlternatives loops... -- I have no idea why :-(
// ...actually this was looping anyway, see bug #278.
- return errorTree(fun, "wrong number of type parameters for "+treeSymTypeMsg(fun))
+ return TypedApplyWrongNumberOfTpeParametersError(fun, fun)
typedTypeApply(tree, mode, fun, args1)
case SingleType(_, _) =>
@@ -2961,14 +3954,17 @@ trait Typers extends Modes {
case PolyType(tparams, restpe) if tparams.nonEmpty =>
if (sameLength(tparams, args)) {
val targs = args map (_.tpe)
- checkBounds(tree.pos, NoPrefix, NoSymbol, tparams, targs, "")
- if (fun.symbol == Predef_classOf) {
- checkClassType(args.head, true, false)
- atPos(tree.pos) { gen.mkClassOf(targs.head) }
- } else {
- if (phase.id <= currentRun.typerPhase.id &&
- fun.symbol == Any_isInstanceOf && !targs.isEmpty)
- checkCheckable(tree.pos, targs.head, "")
+ checkBounds(tree, NoPrefix, NoSymbol, tparams, targs, "")
+ if (fun.symbol == Predef_classOf)
+ typedClassOf(tree, args.head, true)
+ else {
+ if (!isPastTyper && fun.symbol == Any_isInstanceOf && targs.nonEmpty) {
+ val scrutineeType = fun match {
+ case Select(qual, _) => qual.tpe
+ case _ => AnyClass.tpe
+ }
+ checkCheckable(tree, targs.head, scrutineeType, inPattern = false)
+ }
val resultpe = restpe.instantiateTypeParams(tparams, targs)
//@M substitution in instantiateParams needs to be careful!
//@M example: class Foo[a] { def foo[m[x]]: m[a] = error("") } (new Foo[Int]).foo[List] : List[Int]
@@ -2978,17 +3974,141 @@ trait Typers extends Modes {
//println("instantiating type params "+restpe+" "+tparams+" "+targs+" = "+resultpe)
treeCopy.TypeApply(tree, fun, args) setType resultpe
}
- } else {
- errorTree(tree, "wrong number of type parameters for "+treeSymTypeMsg(fun))
+ }
+ else {
+ TypedApplyWrongNumberOfTpeParametersError(tree, fun)
}
case ErrorType =>
setError(treeCopy.TypeApply(tree, fun, args))
case _ =>
- errorTree(tree, treeSymTypeMsg(fun)+" does not take type parameters.")
+ fun match {
+ // drop the application for an applyDynamic or selectDynamic call since it has been pushed down
+ case treeInfo.DynamicApplication(_, _) => fun
+ case _ => TypedApplyDoesNotTakeTpeParametersError(tree, fun)
+ }
}
- @inline final def deindentTyping() = context.typingIndentLevel -= 2
- @inline final def indentTyping() = context.typingIndentLevel += 2
+ object dyna {
+ import treeInfo.{isApplyDynamicName, DynamicUpdate, DynamicApplicationNamed}
+
+ def acceptsApplyDynamic(tp: Type) = tp.typeSymbol isNonBottomSubClass DynamicClass
+
+ /** Returns `Some(t)` if `name` can be selected dynamically on `qual`, `None` if not.
+ * `t` specifies the type to be passed to the applyDynamic/selectDynamic call (unless it is NoType)
+ * NOTE: currently either returns None or Some(NoType) (scala-virtualized extends this to Some(t) for selections on staged Structs)
+ */
+ def acceptsApplyDynamicWithType(qual: Tree, name: Name): Option[Type] =
+ // don't selectDynamic selectDynamic, do select dynamic at unknown type,
+ // in scala-virtualized, we may return a Some(tp) where tp ne NoType
+ if (!isApplyDynamicName(name) && acceptsApplyDynamic(qual.tpe.widen)) Some(NoType)
+ else None
+
+ def isDynamicallyUpdatable(tree: Tree) = tree match {
+ case DynamicUpdate(qual, name) =>
+ // if the qualifier is a Dynamic, that's all we need to know
+ acceptsApplyDynamic(qual.tpe)
+ case _ => false
+ }
+
+ def isApplyDynamicNamed(fun: Tree): Boolean = fun match {
+ case DynamicApplicationNamed(qual, _) if acceptsApplyDynamic(qual.tpe.widen) => true
+ case _ => false
+ // look deeper?
+ // val treeInfo.Applied(methPart, _, _) = fun
+ // println("methPart of "+ fun +" is "+ methPart)
+ // if (methPart ne fun) isApplyDynamicNamed(methPart)
+ // else false
+ }
+
+ def typedNamedApply(orig: Tree, fun: Tree, args: List[Tree], mode: Int, pt: Type): Tree = {
+ def argToBinding(arg: Tree): Tree = arg match {
+ case AssignOrNamedArg(i @ Ident(name), rhs) =>
+ atPos(i.pos.withEnd(rhs.pos.endOrPoint)) {
+ gen.mkTuple(List(atPos(i.pos)(CODE.LIT(name.toString)), rhs))
+ }
+ case _ =>
+ gen.mkTuple(List(CODE.LIT(""), arg))
+ }
+
+ val t = treeCopy.Apply(orig, fun, args map argToBinding)
+ wrapErrors(t, _.typed(t, mode, pt))
+ }
+
+ /** Translate selection that does not typecheck according to the normal rules into a selectDynamic/applyDynamic.
+ *
+ * foo.method("blah") ~~> foo.applyDynamic("method")("blah")
+ * foo.method(x = "blah") ~~> foo.applyDynamicNamed("method")(("x", "blah"))
+ * foo.varia = 10 ~~> foo.updateDynamic("varia")(10)
+ * foo.field ~~> foo.selectDynamic("field")
+ * foo.arr(10) = 13 ~~> foo.selectDynamic("arr").update(10, 13)
+ *
+ * what if we want foo.field == foo.selectDynamic("field") == 1, but `foo.field = 10` == `foo.selectDynamic("field").update(10)` == ()
+ * what would the signature for selectDynamic be? (hint: it needs to depend on whether an update call is coming or not)
+ *
+ * need to distinguish selectDynamic and applyDynamic somehow: the former must return the selected value, the latter must accept an apply or an update
+ * - could have only selectDynamic and pass it a boolean whether more is to come,
+ * so that it can either return the bare value or something that can handle the apply/update
+ * HOWEVER that makes it hard to return unrelated values for the two cases
+ * --> selectDynamic's return type is now dependent on the boolean flag whether more is to come
+ * - simplest solution: have two method calls
+ *
+ */
+ def mkInvoke(cxTree: Tree, tree: Tree, qual: Tree, name: Name): Option[Tree] = {
+ log(s"dyna.mkInvoke($cxTree, $tree, $qual, $name)")
+ val treeInfo.Applied(treeSelection, _, _) = tree
+ def isDesugaredApply = treeSelection match {
+ case Select(`qual`, nme.apply) => true
+ case _ => false
+ }
+ acceptsApplyDynamicWithType(qual, name) map { tp =>
+ // If tp == NoType, pass only explicit type arguments to applyXXX. Not used at all
+ // here - it is for scala-virtualized, where tp will be passed as an argument (for
+ // selection on a staged Struct)
+ def hasNamed(args: List[Tree]): Boolean = args exists (_.isInstanceOf[AssignOrNamedArg])
+ // not supported: foo.bar(a1,..., an: _*)
+ def hasStar(args: List[Tree]) = treeInfo.isWildcardStarArgList(args)
+ def applyOp(args: List[Tree]) = if (hasNamed(args)) nme.applyDynamicNamed else nme.applyDynamic
+ def matches(t: Tree) = isDesugaredApply || treeInfo.dissectApplied(t).core == treeSelection
+
+ /** Note that the trees which arrive here are potentially some distance from
+ * the trees of direct interest. `cxTree` is some enclosing expression which
+ * may apparently be arbitrarily larger than `tree`; and `tree` itself is
+ * too small, having at least in some cases lost its explicit type parameters.
+ * This logic is designed to use `tree` to pinpoint the immediately surrounding
+ * Apply/TypeApply/Select node, and only then creates the dynamic call.
+ * See SI-6731 among others.
+ */
+ def findSelection(t: Tree): Option[(TermName, Tree)] = t match {
+ case Apply(fn, args) if hasStar(args) => DynamicVarArgUnsupported(tree, applyOp(args)) ; None
+ case Apply(fn, args) if matches(fn) => Some((applyOp(args), fn))
+ case Assign(lhs, _) if matches(lhs) => Some((nme.updateDynamic, lhs))
+ case _ if matches(t) => Some((nme.selectDynamic, t))
+ case _ => t.children flatMap findSelection headOption
+ }
+ findSelection(cxTree) match {
+ case Some((opName, treeInfo.Applied(_, targs, _))) =>
+ val fun = gen.mkTypeApply(Select(qual, opName), targs)
+ if (opName == nme.updateDynamic) suppressMacroExpansion(fun) // SI-7617
+ val nameStringLit = atPos(treeSelection.pos.withStart(treeSelection.pos.point).makeTransparent) {
+ Literal(Constant(name.decode))
+ }
+ atPos(qual.pos)(Apply(fun, List(nameStringLit)))
+ case _ =>
+ setError(tree)
+ }
+ }
+ }
+
+ def wrapErrors(tree: Tree, typeTree: Typer => Tree): Tree = {
+ silent(typeTree) match {
+ case SilentResultValue(r) => r
+ case SilentTypeError(err) => DynamicRewriteError(tree, err)
+ }
+ }
+ }
+
+ final def deindentTyping() = context.typingIndentLevel -= 2
+ final def indentTyping() = context.typingIndentLevel += 2
@inline final def printTyping(s: => String) = {
if (printTypings)
println(context.typingIndent + s.replaceAll("\n", "\n" + context.typingIndent))
@@ -2998,12 +4118,10 @@ trait Typers extends Modes {
println(s)
}
- protected def typed1(tree: Tree, mode: Int, pt: Type): Tree = {
+ def typed1(tree: Tree, mode: Int, pt: Type): Tree = {
def isPatternMode = inPatternMode(mode)
//Console.println("typed1("+tree.getClass()+","+Integer.toHexString(mode)+","+pt+")")
- def ptOrLub(tps: List[Type]) = if (isFullyDefined(pt)) (pt, false) else weakLub(tps map (_.deconst))
-
//@M! get the type of the qualifier in a Select tree, otherwise: NoType
def prefixType(fun: Tree): Type = fun match {
case Select(qualifier, _) => qualifier.tpe
@@ -3011,10 +4129,24 @@ trait Typers extends Modes {
case _ => NoType
}
- def typedAnnotated(ann: Tree, arg1: Tree): Tree = {
+ def typedAnnotated(atd: Annotated): Tree = {
+ val ann = atd.annot
+ val arg1 = typed(atd.arg, mode, pt)
/** mode for typing the annotation itself */
val annotMode = mode & ~TYPEmode | EXPRmode
+ def resultingTypeTree(tpe: Type) = {
+ // we need symbol-ful originals for reification
+ // hence we go the extra mile to hand-craft tis guy
+ val original = arg1 match {
+ case tt @ TypeTree() if tt.original != null => Annotated(ann, tt.original)
+ // this clause is needed to correctly compile stuff like "new C @D" or "@(inline @getter)"
+ case _ => Annotated(ann, arg1)
+ }
+ original setType ann.tpe
+ TypeTree(tpe) setOriginal original setPos tree.pos.focus
+ }
+
if (arg1.isType) {
// make sure the annotation is only typechecked once
if (ann.tpe == null) {
@@ -3023,26 +4155,24 @@ trait Typers extends Modes {
if (!settings.selfInAnnots.value)
NoSymbol
else
- arg1.tpe.selfsym match {
- case NoSymbol =>
- /* Implementation limitation: Currently this
- * can cause cyclical reference errors even
- * when the self symbol is not referenced at all.
- * Surely at least some of these cases can be
- * fixed by proper use of LazyType's. Lex tinkered
- * on this but did not succeed, so is leaving
- * it alone for now. Example code with the problem:
- * class peer extends Annotation
- * class NPE[T <: NPE[T] @peer]
- *
- * (Note: -Yself-in-annots must be on to see the problem)
- * */
- val sym =
- context.owner.newLocalDummy(ann.pos)
- .newValue(ann.pos, nme.self)
- sym.setInfo(arg1.tpe.withoutAnnotations)
- sym
- case sym => sym
+ arg1.tpe.selfsym orElse {
+ /* Implementation limitation: Currently this
+ * can cause cyclical reference errors even
+ * when the self symbol is not referenced at all.
+ * Surely at least some of these cases can be
+ * fixed by proper use of LazyType's. Lex tinkered
+ * on this but did not succeed, so is leaving
+ * it alone for now. Example code with the problem:
+ * class peer extends Annotation
+ * class NPE[T <: NPE[T] @peer]
+ *
+ * (Note: -Yself-in-annots must be on to see the problem)
+ * */
+ ( context.owner
+ newLocalDummy (ann.pos)
+ newValue (nme.self, ann.pos)
+ setInfo (arg1.tpe.withoutAnnotations)
+ )
}
val ainfo = typedAnnotation(ann, annotMode, selfsym)
@@ -3055,88 +4185,101 @@ trait Typers extends Modes {
// this annotation did not need it
if (ainfo.isErroneous)
+ // Erroneous annotations were already reported in typedAnnotation
arg1 // simply drop erroneous annotations
else {
ann.tpe = atype
- TypeTree(atype) setOriginal tree
+ resultingTypeTree(atype)
}
} else {
// the annotation was typechecked before
- TypeTree(ann.tpe) setOriginal tree
+ resultingTypeTree(ann.tpe)
}
- } else {
+ }
+ else {
if (ann.tpe == null) {
val annotInfo = typedAnnotation(ann, annotMode)
ann.tpe = arg1.tpe.withAnnotation(annotInfo)
}
val atype = ann.tpe
- Typed(arg1, TypeTree(atype) setOriginal tree setPos tree.pos.focus) setPos tree.pos setType atype
+ Typed(arg1, resultingTypeTree(atype)) setPos tree.pos setType atype
}
}
- def typedBind(name: Name, body: Tree) = {
- var vble = tree.symbol
- def typedBindType(name: TypeName) = {
- assert(body == EmptyTree, context.unit + " typedBind: " + name.debugString + " " + body + " " + body.getClass)
- if (vble == NoSymbol)
- vble =
- if (isFullyDefined(pt))
- context.owner.newAliasType(tree.pos, name) setInfo pt
- else
- context.owner.newAbstractType(tree.pos, name) setInfo TypeBounds.empty
- val rawInfo = vble.rawInfo
- vble = if (vble.name == tpnme.WILDCARD) context.scope.enter(vble)
- else namer.enterInScope(vble)
- tree setSymbol vble setType vble.tpe
- }
- def typedBindTerm(name: TermName) = {
- if (vble == NoSymbol)
- vble = context.owner.newValue(tree.pos, name)
- if (vble.name.toTermName != nme.WILDCARD) {
- if ((mode & ALTmode) != 0)
- error(tree.pos, "illegal variable in pattern alternative")
- vble = namer.enterInScope(vble)
- }
- val body1 = typed(body, mode, pt)
- vble.setInfo(
- if (treeInfo.isSequenceValued(body)) seqType(body1.tpe)
- else body1.tpe)
- treeCopy.Bind(tree, name, body1) setSymbol vble setType body1.tpe // burak, was: pt
- }
+ def typedBind(tree: Bind) = {
+ val name = tree.name
+ val body = tree.body
name match {
- case x: TypeName => typedBindType(x)
- case x: TermName => typedBindTerm(x)
+ case name: TypeName => assert(body == EmptyTree, context.unit + " typedBind: " + name.debugString + " " + body + " " + body.getClass)
+ val sym =
+ if (tree.symbol != NoSymbol) tree.symbol
+ else {
+ if (isFullyDefined(pt))
+ context.owner.newAliasType(name, tree.pos) setInfo pt
+ else
+ context.owner.newAbstractType(name, tree.pos) setInfo TypeBounds.empty
+ }
+
+ if (name != tpnme.WILDCARD) namer.enterInScope(sym)
+ else context.scope.enter(sym)
+
+ tree setSymbol sym setType sym.tpe
+
+ case name: TermName =>
+ val sym =
+ if (tree.symbol != NoSymbol) tree.symbol
+ else context.owner.newValue(name, tree.pos)
+
+ if (name != nme.WILDCARD) {
+ if ((mode & ALTmode) != 0) VariableInPatternAlternativeError(tree)
+ namer.enterInScope(sym)
+ }
+
+ val body1 = typed(body, mode, pt)
+ val symTp =
+ if (treeInfo.isSequenceValued(body)) seqType(body1.tpe)
+ else body1.tpe
+ sym setInfo symTp
+
+ // have to imperatively set the symbol for this bind to keep it in sync with the symbols used in the body of a case
+ // when type checking a case we imperatively update the symbols in the body of the case
+ // those symbols are bound by the symbols in the Binds in the pattern of the case,
+ // so, if we set the symbols in the case body, but not in the patterns,
+ // then re-type check the casedef (for a second try in typedApply for example -- SI-1832),
+ // we are no longer in sync: the body has symbols set that do not appear in the patterns
+ // since body1 is not necessarily equal to body, we must return a copied tree,
+ // but we must still mutate the original bind
+ tree setSymbol sym
+ treeCopy.Bind(tree, name, body1) setSymbol sym setType body1.tpe
}
}
- def typedArrayValue(elemtpt: Tree, elems: List[Tree]) = {
- val elemtpt1 = typedType(elemtpt, mode)
- val elems1 = elems mapConserve (elem => typed(elem, mode, elemtpt1.tpe))
+ def typedArrayValue(tree: ArrayValue) = {
+ val elemtpt1 = typedType(tree.elemtpt, mode)
+ val elems1 = tree.elems mapConserve (elem => typed(elem, mode, elemtpt1.tpe))
treeCopy.ArrayValue(tree, elemtpt1, elems1)
.setType(
(if (isFullyDefined(pt) && !phase.erasedTypes) pt
- else appliedType(ArrayClass.typeConstructor, List(elemtpt1.tpe))).notNull)
+ else arrayType(elemtpt1.tpe)).notNull)
}
def typedAssign(lhs: Tree, rhs: Tree): Tree = {
- val lhs1 = typed(lhs, EXPRmode | LHSmode, WildcardType)
+ // see SI-7617 for an explanation of why macro expansion is suppressed
+ def typedLhs(lhs: Tree) = typed(lhs, EXPRmode | LHSmode, WildcardType)
+ val lhs1 = unsuppressMacroExpansion(typedLhs(suppressMacroExpansion(lhs)))
val varsym = lhs1.symbol
- def failMsg =
- if (varsym != null && varsym.isValue) "reassignment to val"
- else "assignment to non variable"
- def fail = {
- if (!lhs1.tpe.isError)
- error(tree.pos, failMsg)
+ // see #2494 for double error message example
+ def fail() =
+ if (lhs1.isErrorTyped) lhs1
+ else AssignmentError(tree, varsym)
- setError(tree)
- }
if (varsym == null)
- return fail
+ return fail()
if (treeInfo.mayBeVarGetter(varsym)) {
- treeInfo.methPart(lhs1) match {
- case Select(qual, name) =>
+ lhs1 match {
+ case treeInfo.Applied(Select(qual, name), _, _) =>
val sel = Select(qual, nme.getterToSetter(name.toTermName)) setPos lhs.pos
val app = Apply(sel, List(rhs)) setPos tree.pos
return typed(app, mode, pt)
@@ -3144,22 +4287,49 @@ trait Typers extends Modes {
case _ =>
}
}
+// if (varsym.isVariable ||
+// // setter-rewrite has been done above, so rule out methods here, but, wait a minute, why are we assigning to non-variables after erasure?!
+// (phase.erasedTypes && varsym.isValue && !varsym.isMethod)) {
if (varsym.isVariable || varsym.isValue && phase.erasedTypes) {
val rhs1 = typed(rhs, EXPRmode | BYVALmode, lhs1.tpe)
treeCopy.Assign(tree, lhs1, checkDead(rhs1)) setType UnitClass.tpe
}
- else fail
+ else if(dyna.isDynamicallyUpdatable(lhs1)) {
+ val rhs1 = typed(rhs, EXPRmode | BYVALmode, WildcardType)
+ val t = atPos(lhs1.pos.withEnd(rhs1.pos.endOrPoint)) {
+ Apply(lhs1, List(rhs1))
+ }
+ dyna.wrapErrors(t, _.typed1(t, mode, pt))
+ }
+ else fail()
}
- def typedIf(cond: Tree, thenp: Tree, elsep: Tree) = {
- val cond1 = checkDead(typed(cond, EXPRmode | BYVALmode, BooleanClass.tpe))
+ def typedIf(tree: If) = {
+ val cond1 = checkDead(typed(tree.cond, EXPRmode | BYVALmode, BooleanClass.tpe))
+ val thenp = tree.thenp
+ val elsep = tree.elsep
if (elsep.isEmpty) { // in the future, should be unnecessary
val thenp1 = typed(thenp, UnitClass.tpe)
treeCopy.If(tree, cond1, thenp1, elsep) setType thenp1.tpe
} else {
var thenp1 = typed(thenp, pt)
var elsep1 = typed(elsep, pt)
- val (owntype, needAdapt) = ptOrLub(List(thenp1.tpe, elsep1.tpe))
+ def thenTp = packedType(thenp1, context.owner)
+ def elseTp = packedType(elsep1, context.owner)
+
+ // println("typedIf: "+(thenp1.tpe, elsep1.tpe, ptOrLub(List(thenp1.tpe, elsep1.tpe)),"\n", thenTp, elseTp, thenTp =:= elseTp))
+ val (owntype, needAdapt) =
+ // in principle we should pack the types of each branch before lubbing, but lub doesn't really work for existentials anyway
+ // in the special (though common) case where the types are equal, it pays to pack before comparing
+ // especially virtpatmat needs more aggressive unification of skolemized types
+ // this breaks src/library/scala/collection/immutable/TrieIterator.scala
+ if ( opt.virtPatmat && !isPastTyper
+ && thenp1.tpe.annotations.isEmpty && elsep1.tpe.annotations.isEmpty // annotated types need to be lubbed regardless (at least, continations break if you by pass them like this)
+ && thenTp =:= elseTp
+ ) (thenp1.tpe.deconst, false) // use unpacked type. Important to deconst, as is done in ptOrLub, otherwise `if (???) 0 else 0` evaluates to 0 (SI-6331)
+ // TODO: skolemize (lub of packed types) when that no longer crashes on files/pos/t4070b.scala
+ else ptOrLub(thenp1.tpe :: elsep1.tpe :: Nil, pt)
+
if (needAdapt) { //isNumericValueType(owntype)) {
thenp1 = adapt(thenp1, mode, owntype)
elsep1 = adapt(elsep1, mode, owntype)
@@ -3168,20 +4338,46 @@ trait Typers extends Modes {
}
}
- def typedReturn(expr: Tree) = {
+ // under -Xexperimental (and not -Xoldpatmat), and when there's a suitable __match in scope, virtualize the pattern match
+ // otherwise, type the Match and leave it until phase `patmat` (immediately after typer)
+ // empty-selector matches are transformed into synthetic PartialFunction implementations when the expected type demands it
+ def typedVirtualizedMatch(tree: Match): Tree = {
+ val selector = tree.selector
+ val cases = tree.cases
+ if (selector == EmptyTree) {
+ if (newPatternMatching && (pt.typeSymbol == PartialFunctionClass))
+ synthesizePartialFunction(newTermName(context.unit.fresh.newName("x")), tree.pos, tree, mode, pt)
+ else {
+ val arity = if (isFunctionType(pt)) pt.normalize.typeArgs.length - 1 else 1
+ val params = for (i <- List.range(0, arity)) yield
+ atPos(tree.pos.focusStart) {
+ ValDef(Modifiers(PARAM | SYNTHETIC),
+ unit.freshTermName("x" + i + "$"), TypeTree(), EmptyTree)
+ }
+ val ids = for (p <- params) yield Ident(p.name)
+ val selector1 = atPos(tree.pos.focusStart) { if (arity == 1) ids.head else gen.mkTuple(ids) }
+ val body = treeCopy.Match(tree, selector1, cases)
+ typed1(atPos(tree.pos) { Function(params, body) }, mode, pt)
+ }
+ } else
+ virtualizedMatch(typedMatch(selector, cases, mode, pt, tree), mode, pt)
+ }
+
+ def typedReturn(tree: Return) = {
+ val expr = tree.expr
val enclMethod = context.enclMethod
if (enclMethod == NoContext ||
enclMethod.owner.isConstructor ||
context.enclClass.enclMethod == enclMethod // i.e., we are in a constructor of a local class
) {
- errorTree(tree, "return outside method definition")
+ ReturnOutsideOfDefError(tree)
} else {
val DefDef(_, name, _, _, restpt, _) = enclMethod.tree
- if (restpt.tpe eq null)
- errorTree(tree, enclMethod.owner + " has return statement; needs result type")
- else {
+ if (restpt.tpe eq null) {
+ ReturnWithoutTypeError(tree, enclMethod.owner)
+ } else {
context.enclMethod.returnsSeen = true
- val expr1: Tree = typed(expr, EXPRmode | BYVALmode, restpt.tpe)
+ val expr1: Tree = typed(expr, EXPRmode | BYVALmode | RETmode, restpt.tpe)
// Warn about returning a value if no value can be returned.
if (restpt.tpe.typeSymbol == UnitClass) {
// The typing in expr1 says expr is Unit (it has already been coerced if
@@ -3190,24 +4386,35 @@ trait Typers extends Modes {
if (typed(expr).tpe.typeSymbol != UnitClass)
unit.warning(tree.pos, "enclosing method " + name + " has result type Unit: return value discarded")
}
- treeCopy.Return(tree, checkDead(expr1)) setSymbol enclMethod.owner setType NothingClass.tpe
+ val res = treeCopy.Return(tree, checkDead(expr1)).setSymbol(enclMethod.owner)
+ val tp = pluginsTypedReturn(NothingClass.tpe, this, res, restpt.tpe)
+ res.setType(tp)
}
}
}
- def typedNew(tpt: Tree) = {
+ def typedNew(tree: New) = {
+ val tpt = tree.tpt
val tpt1 = {
- val tpt0 = typedTypeConstructor(tpt)
- checkClassType(tpt0, false, true)
- if (tpt0.hasSymbol && !tpt0.symbol.typeParams.isEmpty) {
- context.undetparams = cloneSymbols(tpt0.symbol.typeParams)
- TypeTree().setOriginal(tpt0)
- .setType(appliedType(tpt0.tpe, context.undetparams map (_.tpeHK))) // @PP: tpeHK! #3343, #4018, #4347.
- } else tpt0
+ // This way typedNew always returns a dealiased type. This used to happen by accident
+ // for instantiations without type arguments due to ad hoc code in typedTypeConstructor,
+ // and annotations depended on it (to the extent that they worked, which they did
+ // not when given a parameterized type alias which dealiased to an annotation.)
+ // typedTypeConstructor dealiases nothing now, but it makes sense for a "new" to always be
+ // given a dealiased type.
+ val tpt0 = typedTypeConstructor(tpt) modifyType (_.dealias)
+ if (checkStablePrefixClassType(tpt0))
+ if (tpt0.hasSymbol && !tpt0.symbol.typeParams.isEmpty) {
+ context.undetparams = cloneSymbols(tpt0.symbol.typeParams)
+ notifyUndetparamsAdded(context.undetparams)
+ TypeTree().setOriginal(tpt0)
+ .setType(appliedType(tpt0.tpe, context.undetparams map (_.tpeHK))) // @PP: tpeHK! #3343, #4018, #4347.
+ } else tpt0
+ else tpt0
}
/** If current tree <tree> appears in <val x(: T)? = <tree>>
- * return `tp with x.type' else return `tp'.
+ * return `tp with x.type' else return `tp`.
*/
def narrowRhs(tp: Type) = { val sym = context.tree.symbol
context.tree match {
@@ -3220,19 +4427,27 @@ trait Typers extends Modes {
}}
val tp = tpt1.tpe
- val sym = tp.typeSymbol
+ val sym = tp.typeSymbol.initialize
if (sym.isAbstractType || sym.hasAbstractFlag)
- error(tree.pos, sym + " is abstract; cannot be instantiated")
- else if (!( tp == sym.initialize.thisSym.tpe // when there's no explicit self type -- with (#3612) or without self variable
+ IsAbstractError(tree, sym)
+ else if (isPrimitiveValueClass(sym)) {
+ NotAMemberError(tpt, TypeTree(tp), nme.CONSTRUCTOR)
+ setError(tpt)
+ }
+ else if (!( tp == sym.thisSym.tpe // when there's no explicit self type -- with (#3612) or without self variable
// sym.thisSym.tpe == tp.typeOfThis (except for objects)
|| narrowRhs(tp) <:< tp.typeOfThis
|| phase.erasedTypes
)) {
- error(tree.pos, sym +
- " cannot be instantiated because it does not conform to its self-type "+
- tp.typeOfThis)
- }
- treeCopy.New(tree, tpt1).setType(tp)
+ DoesNotConformToSelfTypeError(tree, sym, tp.typeOfThis)
+ } else
+ treeCopy.New(tree, tpt1).setType(tp)
+ }
+
+ def functionTypeWildcard(tree: Tree, arity: Int): Type = {
+ val tp = functionType(List.fill(arity)(WildcardType), WildcardType)
+ if (tp == NoType) MaxFunctionArityError(tree)
+ tp
}
def typedEta(expr1: Tree): Tree = expr1.tpe match {
@@ -3246,7 +4461,7 @@ trait Typers extends Modes {
typed1(expr2, mode, pt)
case PolyType(_, MethodType(formals, _)) =>
if (isFunctionType(pt)) expr1
- else adapt(expr1, mode, functionType(formals map (t => WildcardType), WildcardType))
+ else adapt(expr1, mode, functionTypeWildcard(expr1, formals.length))
case MethodType(formals, _) =>
if (isFunctionType(pt)) expr1
else expr1 match {
@@ -3265,27 +4480,33 @@ trait Typers extends Modes {
val rhs = Apply(f, args)
typed(rhs)
case _ =>
- adapt(expr1, mode, functionType(formals map (t => WildcardType), WildcardType))
+ adapt(expr1, mode, functionTypeWildcard(expr1, formals.length))
}
case ErrorType =>
expr1
case _ =>
- errorTree(expr1, "_ must follow method; cannot follow " + expr1.tpe)
+ UnderscoreEtaError(expr1)
}
/**
* @param args ...
* @return ...
*/
- def tryTypedArgs(args: List[Tree], mode: Int, other: TypeError): List[Tree] = {
+ def tryTypedArgs(args: List[Tree], mode: Int): Option[List[Tree]] = {
val c = context.makeSilent(false)
c.retyping = true
try {
- newTyper(c).typedArgs(args, mode)
+ val res = newTyper(c).typedArgs(args, mode)
+ if (c.hasErrors) None else Some(res)
} catch {
- case ex: CyclicReference => throw ex
- case ex: TypeError =>
- null
+ case ex: CyclicReference =>
+ throw ex
+ case te: TypeError =>
+ // @H some of typer erros can still leak,
+ // for instance in continuations
+ None
+ } finally {
+ c.flushBuffer()
}
}
@@ -3293,12 +4514,10 @@ trait Typers extends Modes {
* insert an implicit conversion.
*/
def tryTypedApply(fun: Tree, args: List[Tree]): Tree = {
- val start = startTimer(failedApplyNanos)
- silent(_.doTypedApply(tree, fun, args, mode, pt)) match {
- case t: Tree =>
- t
- case ex: TypeError =>
- stopTimer(failedApplyNanos, start)
+ val start = if (Statistics.canEnable) Statistics.startTimer(failedApplyNanos) else null
+
+ def onError(typeError: AbsTypeError): Tree = {
+ if (Statistics.canEnable) Statistics.stopTimer(failedApplyNanos, start)
// If the problem is with raw types, copnvert to existentials and try again.
// See #4712 for a case where this situation arises,
@@ -3320,58 +4539,93 @@ trait Typers extends Modes {
case Typed(r, Function(Nil, EmptyTree)) => treesInResult(r)
case _ => Nil
})
- def errorInResult(tree: Tree) = treesInResult(tree) exists (_.pos == ex.pos)
- val retry = fun :: tree :: args exists errorInResult
+ def errorInResult(tree: Tree) = treesInResult(tree) exists (_.pos == typeError.errPos)
+
+ val retry = (typeError.errPos != null) && (fun :: tree :: args exists errorInResult)
printTyping {
val funStr = ptTree(fun) + " and " + (args map ptTree mkString ", ")
if (retry) "second try: " + funStr
- else "no second try: " + funStr + " because error not in result: " + ex.pos+"!="+tree.pos
+ else "no second try: " + funStr + " because error not in result: " + typeError.errPos+"!="+tree.pos
}
if (retry) {
val Select(qual, name) = fun
- val args1 = tryTypedArgs(args, forArgMode(fun, mode), ex)
- val qual1 =
- if ((args1 ne null) && !pt.isError) adaptToArguments(qual, name, args1, pt)
- else qual
- if (qual1 ne qual) {
- val tree1 = Apply(Select(qual1, name) setPos fun.pos, args1) setPos tree.pos
- return typed1(tree1, mode | SNDTRYmode, pt)
+ tryTypedArgs(args, forArgMode(fun, mode)) match {
+ case Some(args1) =>
+ val qual1 =
+ if (!pt.isError) adaptToArguments(qual, name, args1, pt, true, true)
+ else qual
+ if (qual1 ne qual) {
+ val tree1 = Apply(Select(qual1, name) setPos fun.pos, args1) setPos tree.pos
+ return typed1(tree1, mode | SNDTRYmode, pt)
+ }
+ case _ => ()
}
}
- reportTypeError(tree.pos, ex)
+ issue(typeError)
setError(treeCopy.Apply(tree, fun, args))
}
+
+ silent(_.doTypedApply(tree, fun, args, mode, pt)) match {
+ case SilentResultValue(t) =>
+ t
+ case SilentTypeError(err) =>
+ onError(err)
+ }
}
- def typedApply(fun: Tree, args: List[Tree]) = {
+ def normalTypedApply(tree: Tree, fun: Tree, args: List[Tree]) = {
val stableApplication = (fun.symbol ne null) && fun.symbol.isMethod && fun.symbol.isStable
- if (stableApplication && isPatternMode) {
+ if (args.isEmpty && stableApplication && isPatternMode) {
// treat stable function applications f() as expressions.
+ //
+ // [JZ] According to Martin, this is related to the old pattern matcher, which
+ // needs to typecheck after a the translation of `x.f` to `x.f()` in a prior
+ // compilation phase. As part of SI-7377, this has been tightened with `args.isEmpty`,
+ // but we should remove it altogether in Scala 2.11.
typed1(tree, mode & ~PATTERNmode | EXPRmode, pt)
} else {
val funpt = if (isPatternMode) pt else WildcardType
- val appStart = startTimer(failedApplyNanos)
- val opeqStart = startTimer(failedOpEqNanos)
+ val appStart = if (Statistics.canEnable) Statistics.startTimer(failedApplyNanos) else null
+ val opeqStart = if (Statistics.canEnable) Statistics.startTimer(failedOpEqNanos) else null
+
+ def onError(reportError: => Tree): Tree = {
+ fun match {
+ case Select(qual, name)
+ if !isPatternMode && nme.isOpAssignmentName(newTermName(name.decode)) =>
+ val qual1 = typedQualifier(qual)
+ if (treeInfo.isVariableOrGetter(qual1)) {
+ if (Statistics.canEnable) Statistics.stopTimer(failedOpEqNanos, opeqStart)
+ convertToAssignment(fun, qual1, name, args)
+ } else {
+ if (Statistics.canEnable) Statistics.stopTimer(failedApplyNanos, appStart)
+ reportError
+ }
+ case _ =>
+ if (Statistics.canEnable) Statistics.stopTimer(failedApplyNanos, appStart)
+ reportError
+ }
+ }
silent(_.typed(fun, forFunMode(mode), funpt),
- if ((mode & EXPRmode) != 0) false else context.reportAmbiguousErrors,
+ if ((mode & EXPRmode) != 0) false else context.ambiguousErrors,
if ((mode & EXPRmode) != 0) tree else context.tree) match {
- case fun1: Tree =>
+ case SilentResultValue(fun1) =>
val fun2 = if (stableApplication) stabilizeFun(fun1, mode, pt) else fun1
- incCounter(typedApplyCount)
+ if (Statistics.canEnable) Statistics.incCounter(typedApplyCount)
def isImplicitMethod(tpe: Type) = tpe match {
case mt: MethodType => mt.isImplicit
case _ => false
}
+ val useTry = (
+ !isPastTyper
+ && fun2.isInstanceOf[Select]
+ && !isImplicitMethod(fun2.tpe)
+ && ((fun2.symbol eq null) || !fun2.symbol.isConstructor)
+ && (mode & (EXPRmode | SNDTRYmode)) == EXPRmode
+ )
val res =
- if (phase.id <= currentRun.typerPhase.id &&
- fun2.isInstanceOf[Select] &&
- !isImplicitMethod(fun2.tpe) &&
- ((fun2.symbol eq null) || !fun2.symbol.isConstructor) &&
- (mode & (EXPRmode | SNDTRYmode)) == EXPRmode) {
- tryTypedApply(fun2, args)
- } else {
- doTypedApply(tree, fun2, args, mode, pt)
- }
+ if (useTry) tryTypedApply(fun2, args)
+ else doTypedApply(tree, fun2, args, mode, pt)
+
/*
if (fun2.hasSymbol && fun2.symbol.isConstructor && (mode & EXPRmode) != 0) {
res.tpe = res.tpe.notNull
@@ -3381,40 +4635,53 @@ trait Typers extends Modes {
//if (fun2.hasSymbol && fun2.symbol.name == nme.apply && fun2.symbol.owner == ArrayClass) {
// But this causes cyclic reference for Array class in Cleanup. It is easy to overcome this
// by calling ArrayClass.info here (or some other place before specialize).
- if (fun2.symbol == Array_apply) {
+ if (fun2.symbol == Array_apply && !res.isErrorTyped) {
val checked = gen.mkCheckInit(res)
// this check is needed to avoid infinite recursion in Duplicators
// (calling typed1 more than once for the same tree)
if (checked ne res) typed { atPos(tree.pos)(checked) }
else res
- } else res
- case ex: TypeError =>
- fun match {
- case Select(qual, name)
- if !isPatternMode && nme.isOpAssignmentName(name.decode) =>
- val qual1 = typedQualifier(qual)
- if (treeInfo.isVariableOrGetter(qual1)) {
- stopTimer(failedOpEqNanos, opeqStart)
- convertToAssignment(fun, qual1, name, args, ex)
- } else {
- stopTimer(failedApplyNanos, appStart)
- if ((qual1.symbol ne null) && qual1.symbol.isValue)
- error(tree.pos, "reassignment to val")
- else
- reportTypeError(fun.pos, ex)
- setError(tree)
- }
- case _ =>
- stopTimer(failedApplyNanos, appStart)
- reportTypeError(fun.pos, ex)
- setError(tree)
- }
+ } else
+ res
+ case SilentTypeError(err) =>
+ onError({issue(err); setError(tree)})
}
}
}
- def convertToAssignment(fun: Tree, qual: Tree, name: Name, args: List[Tree], ex: TypeError): Tree = {
- val prefix = name.subName(0, name.length - nme.EQL.length)
+ def typedApply(tree: Apply) = {
+ val fun = tree.fun
+ val args = tree.args
+ fun match {
+ case Block(stats, expr) =>
+ typed1(atPos(tree.pos)(Block(stats, Apply(expr, args) setPos tree.pos.makeTransparent)), mode, pt)
+ case _ =>
+ normalTypedApply(tree, fun, args) match {
+ case Apply(Select(New(tpt), name), args)
+ if (tpt.tpe != null &&
+ tpt.tpe.typeSymbol == ArrayClass &&
+ args.length == 1 &&
+ erasure.GenericArray.unapply(tpt.tpe).isDefined) => // !!! todo simplify by using extractor
+ // convert new Array[T](len) to evidence[ClassTag[T]].newArray(len)
+ // convert new Array^N[T](len) for N > 1 to evidence[ClassTag[Array[...Array[T]...]]].newArray(len), where Array HK gets applied (N-1) times
+ // [Eugene] no more MaxArrayDims. ClassTags are flexible enough to allow creation of arrays of arbitrary dimensionality (w.r.t JVM restrictions)
+ val Some((level, componentType)) = erasure.GenericArray.unapply(tpt.tpe)
+ val tagType = List.iterate(componentType, level)(tpe => appliedType(ArrayClass.toTypeConstructor, List(tpe))).last
+ atPos(tree.pos) {
+ val tag = resolveClassTag(tree.pos, tagType)
+ if (tag.isEmpty) MissingClassTagError(tree, tagType)
+ else typed(new ApplyToImplicitArgs(Select(tag, nme.newArray), args))
+ }
+ case Apply(Select(fun, nme.apply), _) if treeInfo.isSuperConstrCall(fun) => //SI-5696
+ TooManyArgumentListsForConstructor(tree)
+ case tree1 =>
+ tree1
+ }
+ }
+ }
+
+ def convertToAssignment(fun: Tree, qual: Tree, name: Name, args: List[Tree]): Tree = {
+ val prefix = name.toTermName stripSuffix nme.EQL
def mkAssign(vble: Tree): Tree =
Assign(
vble,
@@ -3423,22 +4690,20 @@ trait Typers extends Modes {
) setPos tree.pos
def mkUpdate(table: Tree, indices: List[Tree]) = {
- gen.evalOnceAll(table :: indices, context.owner, context.unit) { ts =>
- val tab = ts.head
- val is = ts.tail
- Apply(
- Select(tab(), nme.update) setPos table.pos,
- ((is map (i => i())) ::: List(
- Apply(
- Select(
- Apply(
- Select(tab(), nme.apply) setPos table.pos,
- is map (i => i())) setPos qual.pos,
- prefix) setPos fun.pos,
- args) setPos tree.pos)
- )
- ) setPos tree.pos
- }
+ gen.evalOnceAll(table :: indices, context.owner, context.unit) {
+ case tab :: is =>
+ def mkCall(name: Name, extraArgs: Tree*) = (
+ Apply(
+ Select(tab(), name) setPos table.pos,
+ is.map(i => i()) ++ extraArgs
+ ) setPos tree.pos
+ )
+ mkCall(
+ nme.update,
+ Apply(Select(mkCall(nme.apply), prefix) setPos fun.pos, args) setPos tree.pos
+ )
+ case _ => EmptyTree
+ }
}
val tree1 = qual match {
@@ -3452,29 +4717,17 @@ trait Typers extends Modes {
}
case Apply(fn, indices) =>
- treeInfo.methPart(fn) match {
- case Select(table, nme.apply) => mkUpdate(table, indices)
- case _ => errorTree(qual, "Unexpected tree during assignment conversion.")
+ fn match {
+ case treeInfo.Applied(Select(table, nme.apply), _, _) => mkUpdate(table, indices)
+ case _ => UnexpectedTreeAssignmentConversionError(qual)
}
}
typed1(tree1, mode, pt)
-/*
- if (settings.debug.value) log("retry assign: "+tree1)
- silent(_.typed1(tree1, mode, pt)) match {
- case t: Tree =>
- t
- case _ =>
- reportTypeError(tree.pos, ex)
- setError(tree)
- }
-*/
}
- def qualifyingClassSym(qual: Name): Symbol =
- if (tree.symbol != NoSymbol) tree.symbol else qualifyingClass(tree, qual, false)
-
- def typedSuper(qual: Tree, mix: TypeName) = {
- val qual1 = typed(qual)
+ def typedSuper(tree: Super) = {
+ val mix = tree.mix
+ val qual1 = typed(tree.qual)
val clazz = qual1 match {
case This(_) => qual1.symbol
@@ -3487,8 +4740,7 @@ trait Typers extends Modes {
if (ps.isEmpty)
ps = site.parents filter (_.typeSymbol.toInterface.name == mix)
if (ps.isEmpty) {
- if (settings.debug.value)
- Console.println(site.parents map (_.typeSymbol.name))//debug
+ debuglog("Fatal: couldn't find site " + site + " in " + site.parents.map(_.typeSymbol.name))
if (phase.erasedTypes && context.enclClass.owner.isImplClass) {
// println(qual1)
// println(clazz)
@@ -3497,40 +4749,34 @@ trait Typers extends Modes {
// println(mix)
// the reference to super class got lost during erasure
restrictionError(tree.pos, unit, "traits may not select fields or methods from super[C] where C is a class")
+ ErrorType
} else {
- error(tree.pos, mix+" does not name a parent class of "+clazz)
+ MixinMissingParentClassNameError(tree, mix, clazz)
+ ErrorType
}
- ErrorType
} else if (!ps.tail.isEmpty) {
- error(tree.pos, "ambiguous parent class qualifier")
+ AmbiguousParentClassError(tree)
ErrorType
} else {
ps.head
}
}
- val owntype =
- if (mix.isEmpty) {
- if ((mode & SUPERCONSTRmode) != 0)
- if (clazz.info.parents.isEmpty) AnyRefClass.tpe // can happen due to cyclic references ==> #1036
- else clazz.info.parents.head
- else intersectionType(clazz.info.parents)
- } else {
- findMixinSuper(clazz.tpe)
- }
-
- treeCopy.Super(tree, qual1, mix) setType SuperType(clazz.thisType, owntype)
- }
+ val owntype = (
+ if (!mix.isEmpty) findMixinSuper(clazz.tpe)
+ else if ((mode & SUPERCONSTRmode) != 0) clazz.info.firstParent
+ else intersectionType(clazz.info.parents)
+ )
+ treeCopy.Super(tree, qual1, mix) setType SuperType(clazz.thisType, owntype)
+ }
- def typedThis(qual: Name) = {
- val clazz = qualifyingClassSym(qual)
- if (clazz == NoSymbol) setError(tree)
- else {
- tree setSymbol clazz setType clazz.thisType.underlying
- if (isStableContext(tree, mode, pt)) tree setType clazz.thisType
- tree
+ def typedThis(tree: This) =
+ tree.symbol orElse qualifyingClass(tree, tree.qual, packageOK = false) match {
+ case NoSymbol => tree
+ case clazz =>
+ tree setSymbol clazz setType clazz.thisType.underlying
+ if (isStableContext(tree, mode, pt)) tree setType clazz.thisType else tree
}
- }
/** Attribute a selection where <code>tree</code> is <code>qual.name</code>.
* <code>qual</code> is already attributed.
@@ -3539,84 +4785,83 @@ trait Typers extends Modes {
* @param name ...
* @return ...
*/
- def typedSelect(qual: Tree, name: Name): Tree = {
- val sym =
- if (tree.symbol != NoSymbol) {
- if (phase.erasedTypes && qual.isInstanceOf[Super])
- qual.tpe = tree.symbol.owner.tpe
- if (false && settings.debug.value) { // todo: replace by settings.check.value?
- val alts = qual.tpe.member(tree.symbol.name).alternatives
- if (!(alts exists (alt =>
- alt == tree.symbol || alt.isTerm && (alt.tpe matches tree.symbol.tpe))))
- assert(false, "symbol "+tree.symbol+tree.symbol.locationString+" not in "+alts+" of "+qual.tpe+
- "\n members = "+qual.tpe.members+
- "\n type history = "+qual.tpe.termSymbol.infosString+
- "\n phase = "+phase)
- }
- tree.symbol
- } else {
- member(qual, name)
- }
- if (sym == NoSymbol && name != nme.CONSTRUCTOR && (mode & EXPRmode) != 0) {
- val qual1 =
- if (member(qual, name) != NoSymbol) qual
- else adaptToMemberWithArgs(tree, qual, name, mode)
- if (qual1 ne qual) return typed(treeCopy.Select(tree, qual1, name), mode, pt)
+ def typedSelect(tree: Tree, qual: Tree, name: Name): Tree = {
+ def asDynamicCall = dyna.mkInvoke(context.tree, tree, qual, name) map { t =>
+ dyna.wrapErrors(t, (_.typed1(t, mode, pt)))
}
- if (!reallyExists(sym)) {
- if (context.owner.toplevelClass.isJavaDefined && name.isTypeName) {
- val tree1 = atPos(tree.pos) { gen.convertToSelectFromType(qual, name) }
- if (tree1 != EmptyTree) return typed1(tree1, mode, pt)
+ val sym = tree.symbol orElse member(qual, name) orElse {
+ // symbol not found? --> try to convert implicitly to a type that does have the required
+ // member. Added `| PATTERNmode` to allow enrichment in patterns (so we can add e.g., an
+ // xml member to StringContext, which in turn has an unapply[Seq] method)
+ if (name != nme.CONSTRUCTOR && inExprModeOr(mode, PATTERNmode)) {
+ val qual1 = adaptToMemberWithArgs(tree, qual, name, mode, true, true)
+ if ((qual1 ne qual) && !qual1.isErrorTyped)
+ return typed(treeCopy.Select(tree, qual1, name), mode, pt)
}
+ NoSymbol
+ }
+ if (phase.erasedTypes && qual.isInstanceOf[Super] && tree.symbol != NoSymbol)
+ qual.tpe = tree.symbol.owner.tpe
- // try to expand according to Dynamic rules.
-
- if (settings.Xexperimental.value && (qual.tpe.widen.typeSymbol isNonBottomSubClass DynamicClass)) {
- var dynInvoke = Apply(Select(qual, nme.applyDynamic), List(Literal(Constant(name.decode))))
- context.tree match {
- case Apply(tree1, args) if tree1 eq tree =>
- ;
- case _ =>
- dynInvoke = Apply(dynInvoke, List())
+ if (!reallyExists(sym)) {
+ def handleMissing: Tree = {
+ if (context.unit.isJava && name.isTypeName) {
+ // SI-3120 Java uses the same syntax, A.B, to express selection from the
+ // value A and from the type A. We have to try both.
+ val tree1 = atPos(tree.pos) { gen.convertToSelectFromType(qual, name) }
+ if (tree1 != EmptyTree) return typed1(tree1, mode, pt)
}
- return typed1(util.trace("dynatype: ")(dynInvoke), mode, pt)
- }
- if (settings.debug.value) {
- log(
- "qual = "+qual+":"+qual.tpe+
- "\nSymbol="+qual.tpe.termSymbol+"\nsymbol-info = "+qual.tpe.termSymbol.info+
- "\nscope-id = "+qual.tpe.termSymbol.info.decls.hashCode()+"\nmembers = "+qual.tpe.members+
- "\nname = "+name+"\nfound = "+sym+"\nowner = "+context.enclClass.owner
- )
- }
+ // try to expand according to Dynamic rules.
+ asDynamicCall foreach (x => return x)
+
+ debuglog(
+ "qual = " + qual + ":" + qual.tpe +
+ "\nSymbol=" + qual.tpe.termSymbol + "\nsymbol-info = " + qual.tpe.termSymbol.info +
+ "\nscope-id = " + qual.tpe.termSymbol.info.decls.hashCode() + "\nmembers = " + qual.tpe.members +
+ "\nname = " + name + "\nfound = " + sym + "\nowner = " + context.enclClass.owner)
- def makeErrorTree = {
- val tree1 = tree match {
- case Select(_, _) => treeCopy.Select(tree, qual, name)
- case SelectFromTypeTree(_, _) => treeCopy.SelectFromTypeTree(tree, qual, name)
+ def makeInteractiveErrorTree = {
+ val tree1 = tree match {
+ case Select(_, _) => treeCopy.Select(tree, qual, name)
+ case SelectFromTypeTree(_, _) => treeCopy.SelectFromTypeTree(tree, qual, name)
+ }
+ setError(tree1)
}
- setError(tree1)
- }
- if (name == nme.ERROR && forInteractive)
- return makeErrorTree
+ if (name == nme.ERROR && forInteractive)
+ return makeInteractiveErrorTree
- if (!qual.tpe.widen.isErroneous)
- notAMember(tree, qual, name)
+ if (!qual.tpe.widen.isErroneous) {
+ if ((mode & QUALmode) != 0) {
+ val lastTry = rootMirror.missingHook(qual.tpe.typeSymbol, name)
+ if (lastTry != NoSymbol) return typed1(tree setSymbol lastTry, mode, pt)
+ }
+ NotAMemberError(tree, qual, name)
+ }
- if (forInteractive) makeErrorTree else setError(tree)
+ if (forInteractive) makeInteractiveErrorTree else setError(tree)
+ }
+ handleMissing
} else {
val tree1 = tree match {
case Select(_, _) => treeCopy.Select(tree, qual, name)
case SelectFromTypeTree(_, _) => treeCopy.SelectFromTypeTree(tree, qual, name)
}
- val (tree2, pre2) = makeAccessible(tree1, sym, qual.tpe, qual)
- val result = stabilize(tree2, pre2, mode, pt)
+ val (result, accessibleError) = silent(_.makeAccessible(tree1, sym, qual.tpe, qual)) match {
+ case SilentTypeError(err) =>
+ if (err.kind != ErrorKinds.Access) {
+ context issue err
+ return setError(tree)
+ }
+ else (tree1, Some(err))
+ case SilentResultValue(treeAndPre) =>
+ (stabilize(treeAndPre._1, treeAndPre._2, mode, pt), None)
+ }
def isPotentialNullDeference() = {
- phase.id <= currentRun.typerPhase.id &&
+ !isPastTyper &&
!sym.isConstructor &&
!(qual.tpe <:< NotNullClass.tpe) && !qual.tpe.isNotNull &&
!(List(Any_isInstanceOf, Any_asInstanceOf) contains result.symbol) // null.is/as is not a dereference
@@ -3625,41 +4870,75 @@ trait Typers extends Modes {
if (settings.warnSelectNullable.value && isPotentialNullDeference && unit != null)
unit.warning(tree.pos, "potential null pointer dereference: "+tree)
- val selection = result match {
+ result match {
// could checkAccessible (called by makeAccessible) potentially have skipped checking a type application in qual?
- case SelectFromTypeTree(qual at TypeTree(), name) if qual.tpe.typeArgs nonEmpty => // TODO: somehow the new qual is not checked in refchecks
+ case SelectFromTypeTree(qual at TypeTree(), name) if qual.tpe.typeArgs.nonEmpty => // TODO: somehow the new qual is not checked in refchecks
treeCopy.SelectFromTypeTree(
result,
(TypeTreeWithDeferredRefCheck(){ () => val tp = qual.tpe; val sym = tp.typeSymbolDirect
// will execute during refchecks -- TODO: make private checkTypeRef in refchecks public and call that one?
- checkBounds(qual.pos, tp.prefix, sym.owner, sym.typeParams, tp.typeArgs, "")
+ checkBounds(qual, tp.prefix, sym.owner, sym.typeParams, tp.typeArgs, "")
qual // you only get to see the wrapped tree after running this check :-p
- }) setType qual.tpe,
+ }) setType qual.tpe setPos qual.pos,
name)
- case accErr: Inferencer#AccessError =>
- val qual1 =
- try adaptToMemberWithArgs(tree, qual, name, mode)
- catch { case _: TypeError => qual }
- if (qual1 ne qual) typed(Select(qual1, name) setPos tree.pos, mode, pt)
- else accErr.emit()
+ case _ if accessibleError.isDefined =>
+ // don't adapt constructor, SI-6074
+ val qual1 = if (name == nme.CONSTRUCTOR) qual
+ else adaptToMemberWithArgs(tree, qual, name, mode, false, false)
+ if (!qual1.isErrorTyped && (qual1 ne qual))
+ typed(Select(qual1, name) setPos tree.pos, mode, pt)
+ else
+ // before failing due to access, try a dynamic call.
+ asDynamicCall getOrElse {
+ issue(accessibleError.get)
+ setError(tree)
+ }
case _ =>
result
}
- // To fully benefit from special casing the return type of
- // getClass, we have to catch it immediately so expressions
- // like x.getClass().newInstance() are typed with the type of x.
- val isRefinableGetClass = (
- selection.symbol.name == nme.getClass_
- && selection.tpe.params.isEmpty
- // TODO: If the type of the qualifier is inaccessible, we can cause private types
- // to escape scope here, e.g. pos/t1107. I'm not sure how to properly handle this
- // so for now it requires the type symbol be public.
- && qual.tpe.typeSymbol.isPublic
- )
- if (isRefinableGetClass)
- selection setType MethodType(Nil, erasure.getClassReturnType(qual.tpe))
- else
- selection
+ }
+ }
+
+ def typedSelectOrSuperCall(tree: Select) = {
+ val qual = tree.qualifier
+ val name = tree.name
+ qual match {
+ case _: Super if name == nme.CONSTRUCTOR =>
+ val qual1 =
+ typed(qual, EXPRmode | QUALmode | POLYmode | SUPERCONSTRmode, WildcardType)
+ // the qualifier type of a supercall constructor is its first parent class
+ typedSelect(tree, qual1, nme.CONSTRUCTOR)
+ case _ =>
+ if (Statistics.canEnable) Statistics.incCounter(typedSelectCount)
+ var qual1 = checkDead(typedQualifier(qual, mode))
+ if (name.isTypeName) qual1 = checkStable(qual1)
+
+ val tree1 = // temporarily use `filter` and an alternative for `withFilter`
+ if (name == nme.withFilter)
+ silent(_ => typedSelect(tree, qual1, name)) match {
+ case SilentResultValue(result) =>
+ result
+ case _ =>
+ silent(_ => typed1(Select(qual1, nme.filter) setPos tree.pos, mode, pt)) match {
+ case SilentResultValue(result2) =>
+ unit.deprecationWarning(
+ tree.pos, "`withFilter' method does not yet exist on " + qual1.tpe.widen +
+ ", using `filter' method instead")
+ result2
+ case SilentTypeError(err) =>
+ WithFilterError(tree, err)
+ }
+ }
+ else
+ typedSelect(tree, qual1, name)
+
+ if (tree.isInstanceOf[PostfixSelect])
+ checkFeature(tree.pos, PostfixOpsFeature, name.decode)
+ if (tree1.symbol != null && tree1.symbol.isOnlyRefinementMember)
+ checkFeature(tree1.pos, ReflectiveCallsFeature, tree1.symbol.toString)
+
+ if (qual1.hasSymbolWhich(_.isRootPackage)) treeCopy.Ident(tree1, name)
+ else tree1
}
}
@@ -3670,9 +4949,16 @@ trait Typers extends Modes {
* Transformations: (1) Prefix class members with this.
* (2) Change imported symbols to selections
*/
- def typedIdent(name: Name): Tree = {
- def ambiguousError(msg: String) =
- error(tree.pos, "reference to " + name + " is ambiguous;\n" + msg)
+ def typedIdent(tree: Tree, name: Name): Tree = {
+ var errorContainer: AbsTypeError = null
+ def ambiguousError(msg: String) = {
+ assert(errorContainer == null, "Cannot set ambiguous error twice for identifier")
+ errorContainer = AmbiguousIdentError(tree, name, msg)
+ }
+ def identError(tree: AbsTypeError) = {
+ assert(errorContainer == null, "Cannot set ambiguous error twice for identifier")
+ errorContainer = tree
+ }
var defSym: Symbol = tree.symbol // the directly found symbol
var pre: Type = NoPrefix // the prefix type of defSym, if a class member
@@ -3685,10 +4971,14 @@ trait Typers extends Modes {
// last ditch effort before failing. This method sets defSym and returns
// true if a member of the given name exists.
def checkEmptyPackage(): Boolean = {
- defSym = EmptyPackageClass.tpe.nonPrivateMember(name)
+ defSym = rootMirror.EmptyPackageClass.tpe.nonPrivateMember(name)
defSym != NoSymbol
}
-
+ def startingIdentContext = (
+ // ignore current variable scope in patterns to enforce linearity
+ if ((mode & (PATTERNmode | TYPEPATmode)) == 0) context
+ else context.outer
+ )
// A symbol qualifies if it exists and is not stale. Stale symbols
// are made to disappear here. In addition,
// if we are in a constructor of a pattern, we ignore all definitions
@@ -3696,6 +4986,7 @@ trait Typers extends Modes {
// case x :: xs in class List would return the :: method)
// unless they are stable or are accessors (the latter exception is for better error messages).
def qualifies(sym: Symbol): Boolean = {
+ sym.hasRawInfo && // this condition avoids crashing on self-referential pattern variables
reallyExists(sym) &&
((mode & PATTERNmode | FUNmode) != (PATTERNmode | FUNmode) || !sym.isSourceMethod || sym.hasFlag(ACCESSOR))
}
@@ -3703,19 +4994,34 @@ trait Typers extends Modes {
if (defSym == NoSymbol) {
var defEntry: ScopeEntry = null // the scope entry of defSym, if defined in a local scope
- var cx = context
- if ((mode & (PATTERNmode | TYPEPATmode)) != 0) {
- // println("ignoring scope: "+name+" "+cx.scope+" "+cx.outer.scope)
- // ignore current variable scope in patterns to enforce linearity
- cx = cx.outer
- }
-
- while (defSym == NoSymbol && cx != NoContext) {
- currentRun.compileSourceFor(context.asInstanceOf[analyzer.Context], name)
+ var cx = startingIdentContext
+ while (defSym == NoSymbol && cx != NoContext && (cx.scope ne null)) { // cx.scope eq null arises during FixInvalidSyms in Duplicators
pre = cx.enclClass.prefix
defEntry = cx.scope.lookupEntry(name)
if ((defEntry ne null) && qualifies(defEntry.sym)) {
- defSym = defEntry.sym
+ // Right here is where SI-1987, overloading in package objects, can be
+ // seen to go wrong. There is an overloaded symbol, but when referring
+ // to the unqualified identifier from elsewhere in the package, only
+ // the last definition is visible. So overloading mis-resolves and is
+ // definition-order dependent, bad things. See run/t1987.scala.
+ //
+ // I assume the actual problem involves how/where these symbols are entered
+ // into the scope. But since I didn't figure out how to fix it that way, I
+ // catch it here by looking up package-object-defined symbols in the prefix.
+ if (isInPackageObject(defEntry.sym, pre.typeSymbol)) {
+ defSym = pre.member(defEntry.sym.name)
+ if (defSym ne defEntry.sym) {
+ qual = gen.mkAttributedQualifier(pre)
+ log(sm"""
+ | !!! Overloaded package object member resolved incorrectly.
+ | prefix: $pre
+ | Discarded: ${defEntry.sym.defString}
+ | Using: ${defSym.defString}
+ """)
+ }
+ }
+ else
+ defSym = defEntry.sym
}
else {
cx = cx.enclClass
@@ -3735,22 +5041,40 @@ trait Typers extends Modes {
else cx.depth - (cx.scope.nestingLevel - defEntry.owner.nestingLevel)
var impSym: Symbol = NoSymbol // the imported symbol
var imports = context.imports // impSym != NoSymbol => it is imported from imports.head
- while (!reallyExists(impSym) && !imports.isEmpty && imports.head.depth > symDepth) {
+
+ // Java: A single-type-import declaration d in a compilation unit c of package p
+ // that imports a type named n shadows, throughout c, the declarations of:
+ //
+ // 1) any top level type named n declared in another compilation unit of p
+ //
+ // A type-import-on-demand declaration never causes any other declaration to be shadowed.
+ //
+ // Scala: Bindings of different kinds have a precedence defined on them:
+ //
+ // 1) Definitions and declarations that are local, inherited, or made available by a
+ // package clause in the same compilation unit where the definition occurs have
+ // highest precedence.
+ // 2) Explicit imports have next highest precedence.
+ def depthOk(imp: ImportInfo) = (
+ imp.depth > symDepth
+ || (unit.isJava && imp.isExplicitImport(name) && imp.depth == symDepth)
+ )
+ while (!reallyExists(impSym) && !imports.isEmpty && depthOk(imports.head)) {
impSym = imports.head.importedSymbol(name)
if (!impSym.exists) imports = imports.tail
}
// detect ambiguous definition/import,
- // update `defSym' to be the final resolved symbol,
- // update `pre' to be `sym's prefix type in case it is an imported member,
+ // update `defSym` to be the final resolved symbol,
+ // update `pre` to be `sym`s prefix type in case it is an imported member,
// and compute value of:
if (defSym.exists && impSym.exists) {
// imported symbols take precedence over package-owned symbols in different
// compilation units. Defined symbols take precedence over erroneous imports.
- if (defSym.definedInPackage &&
+ if (defSym.isDefinedInPackage &&
(!currentRun.compiles(defSym) ||
- (context.unit ne null) && defSym.sourceFile != context.unit.source.file))
+ context.unit.exists && defSym.sourceFile != context.unit.source.file))
defSym = NoSymbol
else if (impSym.isError || impSym.name == nme.CONSTRUCTOR)
impSym = NoSymbol
@@ -3766,17 +5090,53 @@ trait Typers extends Modes {
qual = atPos(tree.pos.focusStart)(gen.mkAttributedQualifier(pre))
} else {
if (impSym.exists) {
- var impSym1 = NoSymbol
+ var impSym1: Symbol = NoSymbol
var imports1 = imports.tail
+
+ /** It's possible that seemingly conflicting identifiers are
+ * identifiably the same after type normalization. In such cases,
+ * allow compilation to proceed. A typical example is:
+ * package object foo { type InputStream = java.io.InputStream }
+ * import foo._, java.io._
+ */
def ambiguousImport() = {
- if (!(imports.head.qual.tpe =:= imports1.head.qual.tpe))
- ambiguousError(
- "it is imported twice in the same scope by\n"+imports.head + "\nand "+imports1.head)
+ // The types of the qualifiers from which the ambiguous imports come.
+ // If the ambiguous name is a value, these must be the same.
+ def t1 = imports.head.qual.tpe
+ def t2 = imports1.head.qual.tpe
+ // The types of the ambiguous symbols, seen as members of their qualifiers.
+ // If the ambiguous name is a monomorphic type, we can relax this far.
+ def mt1 = t1 memberType impSym
+ def mt2 = t2 memberType impSym1
+ def characterize = List(
+ s"types: $t1 =:= $t2 ${t1 =:= t2} members: ${mt1 =:= mt2}",
+ s"member type 1: $mt1",
+ s"member type 2: $mt2",
+ s"$impSym == $impSym1 ${impSym == impSym1}",
+ s"${impSym.debugLocationString} ${impSym.getClass}",
+ s"${impSym1.debugLocationString} ${impSym1.getClass}"
+ ).mkString("\n ")
+
+ // The symbol names are checked rather than the symbols themselves because
+ // each time an overloaded member is looked up it receives a new symbol.
+ // So foo.member("x") != foo.member("x") if x is overloaded. This seems
+ // likely to be the cause of other bugs too...
+ if (t1 =:= t2 && impSym.name == impSym1.name)
+ log(s"Suppressing ambiguous import: $t1 =:= $t2 && $impSym == $impSym1")
+ // Monomorphism restriction on types is in part because type aliases could have the
+ // same target type but attach different variance to the parameters. Maybe it can be
+ // relaxed, but doesn't seem worth it at present.
+ else if (mt1 =:= mt2 && name.isTypeName && impSym.isMonomorphicType && impSym1.isMonomorphicType)
+ log(s"Suppressing ambiguous import: $mt1 =:= $mt2 && $impSym and $impSym1 are equivalent")
+ else {
+ log(s"Import is genuinely ambiguous:\n " + characterize)
+ ambiguousError(s"it is imported twice in the same scope by\n${imports.head}\nand ${imports1.head}")
+ }
}
- while (!imports1.isEmpty &&
+ while (errorContainer == null && !imports1.isEmpty &&
(!imports.head.isExplicitImport(name) ||
imports1.head.depth == imports.head.depth)) {
- var impSym1 = imports1.head.importedSymbol(name)
+ impSym1 = imports1.head.importedSymbol(name)
if (reallyExists(impSym1)) {
if (imports1.head.isExplicitImport(name)) {
if (imports.head.isExplicitImport(name) ||
@@ -3797,70 +5157,110 @@ trait Typers extends Modes {
else if (settings.exposeEmptyPackage.value && checkEmptyPackage())
log("Allowing empty package member " + name + " due to settings.")
else {
+ if ((mode & QUALmode) != 0) {
+ val lastTry = rootMirror.missingHook(rootMirror.RootClass, name)
+ if (lastTry != NoSymbol) return typed1(tree setSymbol lastTry, mode, pt)
+ }
if (settings.debug.value) {
log(context.imports)//debug
}
if (inaccessibleSym eq NoSymbol) {
- error(tree.pos, "not found: "+decodeWithKind(name, context.owner))
- }
- else new AccessError(
- tree, inaccessibleSym, context.enclClass.owner.thisType,
- inaccessibleExplanation
- ).emit()
+ // Avoiding some spurious error messages: see SI-2388.
+ if (reporter.hasErrors && (name startsWith tpnme.ANON_CLASS_NAME)) ()
+ else identError(SymbolNotFoundError(tree, name, context.owner, startingIdentContext))
+ } else
+ identError(InferErrorGen.AccessError(
+ tree, inaccessibleSym, context.enclClass.owner.thisType, context.enclClass.owner,
+ inaccessibleExplanation
+ ))
defSym = context.owner.newErrorSymbol(name)
}
}
}
- if (defSym.owner.isPackageClass) pre = defSym.owner.thisType
- if (defSym.isThisSym) {
- typed1(This(defSym.owner) setPos tree.pos, mode, pt)
+ if (errorContainer != null) {
+ ErrorUtils.issueTypeError(errorContainer)
+ setError(tree)
} else {
- val tree1 = if (qual == EmptyTree) tree
- else atPos(tree.pos)(Select(qual, name))
- // atPos necessary because qualifier might come from startContext
- val (tree2, pre2) = makeAccessible(tree1, defSym, pre, qual)
- // assert(pre.typeArgs isEmpty) // no need to add #2416-style check here, right?
- stabilize(tree2, pre2, mode, pt) match {
- case accErr: Inferencer#AccessError => accErr.emit()
- case result => result
+ if (defSym.owner.isPackageClass)
+ pre = defSym.owner.thisType
+
+ // Inferring classOf type parameter from expected type.
+ if (defSym.isThisSym) {
+ typed1(This(defSym.owner) setPos tree.pos, mode, pt)
+ }
+ // Inferring classOf type parameter from expected type. Otherwise an
+ // actual call to the stubbed classOf method is generated, returning null.
+ else if (isPredefMemberNamed(defSym, nme.classOf) && pt.typeSymbol == ClassClass && pt.typeArgs.nonEmpty)
+ typedClassOf(tree, TypeTree(pt.typeArgs.head))
+ else {
+ val tree1 = (
+ if (qual == EmptyTree) tree
+ // atPos necessary because qualifier might come from startContext
+ else atPos(tree.pos)(Select(qual, name) setAttachments tree.attachments)
+ )
+ val (tree2, pre2) = makeAccessible(tree1, defSym, pre, qual)
+ // assert(pre.typeArgs isEmpty) // no need to add #2416-style check here, right?
+ val tree3 = stabilize(tree2, pre2, mode, pt)
+ // SI-5967 Important to replace param type A* with Seq[A] when seen from from a reference, to avoid
+ // inference errors in pattern matching.
+ tree3 setType dropRepeatedParamType(tree3.tpe)
}
}
}
- def typedCompoundTypeTree(templ: Template) = {
+ def typedIdentOrWildcard(tree: Ident) = {
+ val name = tree.name
+ if (Statistics.canEnable) Statistics.incCounter(typedIdentCount)
+ if ((name == nme.WILDCARD && (mode & (PATTERNmode | FUNmode)) == PATTERNmode) ||
+ (name == tpnme.WILDCARD && (mode & TYPEmode) != 0))
+ tree setType makeFullyDefined(pt)
+ else
+ typedIdent(tree, name)
+ }
+
+ def typedCompoundTypeTree(tree: CompoundTypeTree) = {
+ val templ = tree.templ
val parents1 = templ.parents mapConserve (typedType(_, mode))
- if (parents1 exists (_.tpe.isError)) tree setType ErrorType
+
+ // This is also checked later in typedStats, but that is too late for SI-5361, so
+ // we eagerly check this here.
+ for (stat <- templ.body if !treeInfo.isDeclarationOrTypeDef(stat))
+ OnlyDeclarationsError(stat)
+
+ if ((parents1 ++ templ.body) exists (_.isErrorTyped)) tree setType ErrorType
else {
- val decls = new Scope
+ val decls = newScope
//Console.println("Owner: " + context.enclClass.owner + " " + context.enclClass.owner.id)
val self = refinedType(parents1 map (_.tpe), context.enclClass.owner, decls, templ.pos)
- newTyper(context.make(templ, self.typeSymbol, decls)).typedRefinement(templ.body)
- tree setType self
+ newTyper(context.make(templ, self.typeSymbol, decls)).typedRefinement(templ)
+ templ updateAttachment CompoundTypeTreeOriginalAttachment(parents1, Nil) // stats are set elsewhere
+ tree setType (if (templ.exists(_.isErroneous)) ErrorType else self) // Being conservative to avoid SI-5361
}
}
- def typedAppliedTypeTree(tpt: Tree, args: List[Tree]) = {
+ def typedAppliedTypeTree(tree: AppliedTypeTree) = {
+ val tpt = tree.tpt
+ val args = tree.args
val tpt1 = typed1(tpt, mode | FUNmode | TAPPmode, WildcardType)
- if (tpt1.tpe.isError) {
- setError(tree)
+ if (tpt1.isErrorTyped) {
+ tpt1
} else if (!tpt1.hasSymbol) {
- errorTree(tree, tpt1.tpe+" does not take type parameters")
+ AppliedTypeNoParametersError(tree, tpt1.tpe)
} else {
val tparams = tpt1.symbol.typeParams
if (sameLength(tparams, args)) {
// @M: kind-arity checking is done here and in adapt, full kind-checking is in checkKindBounds (in Infer)
val args1 =
- if(!tpt1.symbol.rawInfo.isComplete)
+ if (!tpt1.symbol.rawInfo.isComplete)
args mapConserve (typedHigherKindedType(_, mode))
// if symbol hasn't been fully loaded, can't check kind-arity
- else map2Conserve(args, tparams) {
- (arg, tparam) =>
- typedHigherKindedType(arg, mode, polyType(tparam.typeParams, AnyClass.tpe))
- //@M! the polytype denotes the expected kind
+ else map2Conserve(args, tparams) { (arg, tparam) =>
+ //@M! the polytype denotes the expected kind
+ typedHigherKindedType(arg, mode, GenPolyType(tparam.typeParams, AnyClass.tpe))
}
val argtypes = args1 map (_.tpe)
- (args, tparams).zipped foreach { (arg, tparam) => arg match {
+ foreach2(args, tparams)((arg, tparam) => arg match {
// note: can't use args1 in selector, because Bind's got replaced
case Bind(_, _) =>
if (arg.symbol.isAbstractType)
@@ -3869,357 +5269,340 @@ trait Typers extends Modes {
lub(List(arg.symbol.info.bounds.lo, tparam.info.bounds.lo.subst(tparams, argtypes))),
glb(List(arg.symbol.info.bounds.hi, tparam.info.bounds.hi.subst(tparams, argtypes))))
case _ =>
- }}
+ })
val original = treeCopy.AppliedTypeTree(tree, tpt1, args1)
- val result = TypeTree(appliedType(tpt1.tpe, argtypes)) setOriginal original
+ val result = TypeTree(appliedType(tpt1.tpe, argtypes)) setOriginal original
if(tpt1.tpe.isInstanceOf[PolyType]) // did the type application (performed by appliedType) involve an unchecked beta-reduction?
- (TypeTreeWithDeferredRefCheck(){ () =>
+ TypeTreeWithDeferredRefCheck(){ () =>
// wrap the tree and include the bounds check -- refchecks will perform this check (that the beta reduction was indeed allowed) and unwrap
// we can't simply use original in refchecks because it does not contains types
// (and the only typed trees we have have been mangled so they're not quite the original tree anymore)
- checkBounds(result.pos, tpt1.tpe.prefix, tpt1.symbol.owner, tpt1.symbol.typeParams, argtypes, "")
+ checkBounds(result, tpt1.tpe.prefix, tpt1.symbol.owner, tpt1.symbol.typeParams, argtypes, "")
result // you only get to see the wrapped tree after running this check :-p
- }).setType(result.tpe)
+ } setType (result.tpe) setPos(result.pos)
else result
} else if (tparams.isEmpty) {
- errorTree(tree, tpt1.tpe+" does not take type parameters")
+ AppliedTypeNoParametersError(tree, tpt1.tpe)
} else {
//Console.println("\{tpt1}:\{tpt1.symbol}:\{tpt1.symbol.info}")
if (settings.debug.value) Console.println(tpt1+":"+tpt1.symbol+":"+tpt1.symbol.info)//debug
- errorTree(tree, "wrong number of type arguments for "+tpt1.tpe+", should be "+tparams.length)
+ AppliedTypeWrongNumberOfArgsError(tree, tpt1, tparams)
}
}
}
- def adaptCase(cdef: CaseDef, tpe: Type): CaseDef =
- treeCopy.CaseDef(cdef, cdef.pat, cdef.guard, adapt(cdef.body, mode, tpe))
-
- // begin typed1
val sym: Symbol = tree.symbol
if ((sym ne null) && (sym ne NoSymbol)) sym.initialize
- //if (settings.debug.value && tree.isDef) log("typing definition of "+sym);//DEBUG
- tree match {
- case PackageDef(pid, stats) =>
- val pid1 = typedQualifier(pid).asInstanceOf[RefTree]
- assert(sym.moduleClass ne NoSymbol, sym)
- // complete lazy annotations
- val annots = sym.annotations
- val stats1 = newTyper(context.make(tree, sym.moduleClass, sym.info.decls))
- .typedStats(stats, NoSymbol)
- treeCopy.PackageDef(tree, pid1, stats1) setType NoType
-
- case tree @ ClassDef(_, _, _, _) =>
- newTyper(context.makeNewScope(tree, sym)).typedClassDef(tree)
- case tree @ ModuleDef(_, _, _) =>
- newTyper(context.makeNewScope(tree, sym.moduleClass)).typedModuleDef(tree)
-
- case vdef @ ValDef(_, _, _, _) =>
- typedValDef(vdef)
-
- case ddef @ DefDef(_, _, _, _, _, _) =>
- newTyper(context.makeNewScope(tree, sym)).typedDefDef(ddef)
-
- case tdef @ TypeDef(_, _, _, _) =>
- typedTypeDef(tdef)
+ def typedPackageDef(pdef: PackageDef) = {
+ val pid1 = typedQualifier(pdef.pid).asInstanceOf[RefTree]
+ assert(sym.moduleClass ne NoSymbol, sym)
+ val stats1 = newTyper(context.make(tree, sym.moduleClass, sym.info.decls))
+ .typedStats(pdef.stats, NoSymbol)
+ treeCopy.PackageDef(tree, pid1, stats1) setType NoType
+ }
- case ldef @ LabelDef(_, _, _) =>
- labelTyper(ldef).typedLabelDef(ldef)
-
- case ddef @ DocDef(comment, defn) =>
- if (forScaladoc && (sym ne null) && (sym ne NoSymbol)) {
- docComments(sym) = comment
- comment.defineVariables(sym)
- val typer1 = newTyper(context.makeNewScope(tree, context.owner))
- for (useCase <- comment.useCases) {
- typer1.silent(_.typedUseCase(useCase)) match {
- case ex: TypeError =>
- unit.warning(useCase.pos, ex.msg)
- case _ =>
- }
- for (useCaseSym <- useCase.defined) {
- if (sym.name != useCaseSym.name)
- unit.warning(useCase.pos, "@usecase " + useCaseSym.name.decode + " does not match commented symbol: " + sym.name.decode)
- }
+ def typedDocDef(docdef: DocDef) = {
+ if (forScaladoc && (sym ne null) && (sym ne NoSymbol)) {
+ val comment = docdef.comment
+ fillDocComment(sym, comment)
+ val typer1 = newTyper(context.makeNewScope(tree, context.owner))
+ for (useCase <- comment.useCases) {
+ typer1.silent(_.typedUseCase(useCase)) match {
+ case SilentTypeError(err) =>
+ unit.warning(useCase.pos, err.errMsg)
+ case _ =>
+ }
+ for (useCaseSym <- useCase.defined) {
+ if (sym.name != useCaseSym.name)
+ unit.warning(useCase.pos, "@usecase " + useCaseSym.name.decode + " does not match commented symbol: " + sym.name.decode)
}
}
- typed(defn, mode, pt)
-
- case Annotated(constr, arg) =>
- typedAnnotated(constr, typed(arg, mode, pt))
+ }
+ typed(docdef.definition, mode, pt)
+ }
- case tree @ Block(_, _) =>
- newTyper(context.makeNewScope(tree, context.owner))
- .typedBlock(tree, mode, pt)
+ /**
+ * The typer with the correct context for a method definition. If the method is a default getter for
+ * a constructor default, the resulting typer has a constructor context (fixes SI-5543).
+ */
+ def defDefTyper(ddef: DefDef) = {
+ val isConstrDefaultGetter = ddef.mods.hasDefaultFlag && sym.owner.isModuleClass &&
+ nme.defaultGetterToMethod(sym.name) == nme.CONSTRUCTOR
+ newTyper(context.makeNewScope(ddef, sym)).constrTyperIf(isConstrDefaultGetter)
+ }
- case Alternative(alts) =>
- val alts1 = alts mapConserve (alt => typed(alt, mode | ALTmode, pt))
- treeCopy.Alternative(tree, alts1) setType pt
+ def typedAlternative(alt: Alternative) = {
+ val alts1 = alt.trees mapConserve (alt => typed(alt, mode | ALTmode, pt))
+ treeCopy.Alternative(tree, alts1) setType pt
+ }
- case Star(elem) =>
- checkStarPatOK(tree.pos, mode)
- val elem1 = typed(elem, mode, pt)
- treeCopy.Star(tree, elem1) setType makeFullyDefined(pt)
+ def typedStar(tree: Star) = {
+ if ((mode & STARmode) == 0 && !isPastTyper)
+ StarPatternWithVarargParametersError(tree)
+ treeCopy.Star(tree, typed(tree.elem, mode, pt)) setType makeFullyDefined(pt)
+ }
- case Bind(name, body) =>
- typedBind(name, body)
+ def typedUnApply(tree: UnApply) = {
+ val fun1 = typed(tree.fun)
+ val tpes = formalTypes(unapplyTypeList(tree.fun.pos, tree.fun.symbol, fun1.tpe, tree.args), tree.args.length)
+ val args1 = map2(tree.args, tpes)(typedPattern)
+ treeCopy.UnApply(tree, fun1, args1) setType pt
+ }
- case UnApply(fun, args) =>
- val fun1 = typed(fun)
- val tpes = formalTypes(unapplyTypeList(fun.symbol, fun1.tpe), args.length)
- val args1 = (args, tpes).zipped map typedPattern
- treeCopy.UnApply(tree, fun1, args1) setType pt
+ def typedTry(tree: Try) = {
+ var block1 = typed(tree.block, pt)
+ var catches1 = typedCases(tree.catches, ThrowableClass.tpe, pt)
- case ArrayValue(elemtpt, elems) =>
- typedArrayValue(elemtpt, elems)
+ for (cdef <- catches1 if !isPastTyper && cdef.guard.isEmpty) {
+ def warn(name: Name) = context.warning(cdef.pat.pos, s"This catches all Throwables. If this is really intended, use `case ${name.decoded} : Throwable` to clear this warning.")
+ def unbound(t: Tree) = t.symbol == null || t.symbol == NoSymbol
+ cdef.pat match {
+ case Bind(name, i @ Ident(_)) if unbound(i) => warn(name)
+ case i @ Ident(name) if unbound(i) => warn(name)
+ case _ =>
+ }
+ }
- case tree @ Function(_, _) =>
- if (tree.symbol == NoSymbol)
- tree.symbol = context.owner.newValue(tree.pos, nme.ANON_FUN_NAME)
- .setFlag(SYNTHETIC).setInfo(NoType)
- newTyper(context.makeNewScope(tree, tree.symbol)).typedFunction(tree, mode, pt)
+ val finalizer1 =
+ if (tree.finalizer.isEmpty) tree.finalizer
+ else typed(tree.finalizer, UnitClass.tpe)
+ val (owntype, needAdapt) = ptOrLub(block1.tpe :: (catches1 map (_.tpe)), pt)
+ if (needAdapt) {
+ block1 = adapt(block1, mode, owntype)
+ catches1 = catches1 map (adaptCase(_, mode, owntype))
+ }
- case Assign(lhs, rhs) =>
- typedAssign(lhs, rhs)
+ treeCopy.Try(tree, block1, catches1, finalizer1) setType owntype
+ }
- case AssignOrNamedArg(lhs, rhs) => // called by NamesDefaults in silent typecheck
- typedAssign(lhs, rhs)
+ def typedThrow(tree: Throw) = {
+ val expr1 = typed(tree.expr, EXPRmode | BYVALmode, ThrowableClass.tpe)
+ treeCopy.Throw(tree, expr1) setType NothingClass.tpe
+ }
- case If(cond, thenp, elsep) =>
- typedIf(cond, thenp, elsep)
+ def typedTyped(tree: Typed) = {
+ val expr = tree.expr
+ val tpt = tree.tpt
+ tpt match {
+ case Function(List(), EmptyTree) =>
+ // find out whether the programmer is trying to eta-expand a macro def
+ // to do that we need to typecheck the tree first (we need a symbol of the eta-expandee)
+ // that typecheck must not trigger macro expansions, so we explicitly prohibit them
+ // however we cannot do `context.withMacrosDisabled`
+ // because `expr` might contain nested macro calls (see SI-6673)
+ val exprTyped = typed1(suppressMacroExpansion(expr), mode, pt)
+ exprTyped match {
+ case macroDef if macroDef.symbol != null && macroDef.symbol.isTermMacro && !macroDef.symbol.isErroneous =>
+ MacroEtaError(exprTyped)
+ case _ =>
+ typedEta(checkDead(exprTyped))
+ }
- case tree @ Match(selector, cases) =>
- if (selector == EmptyTree) {
- val arity = if (isFunctionType(pt)) pt.normalize.typeArgs.length - 1 else 1
- val params = for (i <- List.range(0, arity)) yield
- atPos(tree.pos.focusStart) {
- ValDef(Modifiers(PARAM | SYNTHETIC),
- unit.freshTermName("x" + i + "$"), TypeTree(), EmptyTree)
+ case Ident(tpnme.WILDCARD_STAR) =>
+ val exprTyped = typed(expr, onlyStickyModes(mode), WildcardType)
+ def subArrayType(pt: Type) =
+ if (isPrimitiveValueClass(pt.typeSymbol) || !isFullyDefined(pt)) arrayType(pt)
+ else {
+ val tparam = context.owner freshExistential "" setInfo TypeBounds.upper(pt)
+ newExistentialType(List(tparam), arrayType(tparam.tpe))
}
- val ids = for (p <- params) yield Ident(p.name)
- val selector1 = atPos(tree.pos.focusStart) { if (arity == 1) ids.head else gen.mkTuple(ids) }
- val body = treeCopy.Match(tree, selector1, cases)
- typed1(atPos(tree.pos) { Function(params, body) }, mode, pt)
- } else {
- val selector1 = checkDead(typed(selector, EXPRmode | BYVALmode, WildcardType))
- var cases1 = typedCases(tree, cases, selector1.tpe.widen, pt)
- val (owntype, needAdapt) = ptOrLub(cases1 map (_.tpe))
- if (needAdapt) {
- cases1 = cases1 map (adaptCase(_, owntype))
+
+ val (exprAdapted, baseClass) = exprTyped.tpe.typeSymbol match {
+ case ArrayClass => (adapt(exprTyped, onlyStickyModes(mode), subArrayType(pt)), ArrayClass)
+ case _ => (adapt(exprTyped, onlyStickyModes(mode), seqType(pt)), SeqClass)
+ }
+ exprAdapted.tpe.baseType(baseClass) match {
+ case TypeRef(_, _, List(elemtp)) =>
+ treeCopy.Typed(tree, exprAdapted, tpt setType elemtp) setType elemtp
+ case _ =>
+ setError(tree)
}
- treeCopy.Match(tree, selector1, cases1) setType owntype
- }
- case Return(expr) =>
- typedReturn(expr)
-
- case Try(block, catches, finalizer) =>
- var block1 = typed(block, pt)
- var catches1 = typedCases(tree, catches, ThrowableClass.tpe, pt)
- val finalizer1 = if (finalizer.isEmpty) finalizer
- else typed(finalizer, UnitClass.tpe)
- val (owntype, needAdapt) = ptOrLub(block1.tpe :: (catches1 map (_.tpe)))
- if (needAdapt) {
- block1 = adapt(block1, mode, owntype)
- catches1 = catches1 map (adaptCase(_, owntype))
- }
- treeCopy.Try(tree, block1, catches1, finalizer1) setType owntype
+ case _ =>
+ val tptTyped = typedType(tpt, mode)
+ val exprTyped = typed(expr, onlyStickyModes(mode), tptTyped.tpe.deconst)
+ val treeTyped = treeCopy.Typed(tree, exprTyped, tptTyped)
- case Throw(expr) =>
- val expr1 = typed(expr, EXPRmode | BYVALmode, ThrowableClass.tpe)
- treeCopy.Throw(tree, expr1) setType NothingClass.tpe
+ if (isPatternMode) {
+ val uncheckedTypeExtractor = extractorForUncheckedType(tpt.pos, tptTyped.tpe)
- case New(tpt: Tree) =>
- typedNew(tpt)
+ // make fully defined to avoid bounded wildcard types that may be in pt from calling dropExistential (SI-2038)
+ val ptDefined = if (isFullyDefined(pt)) pt else makeFullyDefined(pt)
+ val ownType = inferTypedPattern(tptTyped, tptTyped.tpe, ptDefined, canRemedy = uncheckedTypeExtractor.nonEmpty)
+ treeTyped setType ownType
- case Typed(expr, Function(List(), EmptyTree)) =>
- typedEta(checkDead(typed1(expr, mode, pt)))
+ uncheckedTypeExtractor match {
+ case None => treeTyped
+ case Some(extractor) => wrapClassTagUnapply(treeTyped, extractor, tptTyped.tpe)
+ }
+ } else
+ treeTyped setType tptTyped.tpe
+ }
+ }
- case Typed(expr, tpt @ Ident(tpnme.WILDCARD_STAR)) =>
- val expr0 = typed(expr, onlyStickyModes(mode), WildcardType)
- def subArrayType(pt: Type) =
- if (isValueClass(pt.typeSymbol) || !isFullyDefined(pt)) arrayType(pt)
- else {
- val tparam = context.owner freshExistential "" setInfo TypeBounds.upper(pt)
- ExistentialType(List(tparam), arrayType(tparam.tpe))
- }
- val (expr1, baseClass) = expr0.tpe.typeSymbol match {
- case ArrayClass => (adapt(expr0, onlyStickyModes(mode), subArrayType(pt)), ArrayClass)
- case _ => (adapt(expr0, onlyStickyModes(mode), seqType(pt)), SeqClass)
- }
- expr1.tpe.baseType(baseClass) match {
- case TypeRef(_, _, List(elemtp)) =>
- treeCopy.Typed(tree, expr1, tpt setType elemtp) setType elemtp
- case _ =>
- setError(tree)
- }
+ def typedTypeApply(tree: TypeApply) = {
+ val fun = tree.fun
+ val args = tree.args
+ // @M: kind-arity checking is done here and in adapt, full kind-checking is in checkKindBounds (in Infer)
+ //@M! we must type fun in order to type the args, as that requires the kinds of fun's type parameters.
+ // However, args should apparently be done first, to save context.undetparams. Unfortunately, the args
+ // *really* have to be typed *after* fun. We escape from this classic Catch-22 by simply saving&restoring undetparams.
- case Typed(expr, tpt) =>
- val tpt1 = typedType(tpt, mode)
- val expr1 = typed(expr, onlyStickyModes(mode), tpt1.tpe.deconst)
- val owntype =
- if (isPatternMode) inferTypedPattern(tpt1.pos, tpt1.tpe, pt)
- else tpt1.tpe
- //Console.println(typed pattern: "+tree+":"+", tp = "+tpt1.tpe+", pt = "+pt+" ==> "+owntype)//DEBUG
- treeCopy.Typed(tree, expr1, tpt1) setType owntype
-
- case TypeApply(fun, args) =>
- // @M: kind-arity checking is done here and in adapt, full kind-checking is in checkKindBounds (in Infer)
- //@M! we must type fun in order to type the args, as that requires the kinds of fun's type parameters.
- // However, args should apparently be done first, to save context.undetparams. Unfortunately, the args
- // *really* have to be typed *after* fun. We escape from this classic Catch-22 by simply saving&restoring undetparams.
-
- // @M TODO: the compiler still bootstraps&all tests pass when this is commented out..
- //val undets = context.undetparams
-
- // @M: fun is typed in TAPPmode because it is being applied to its actual type parameters
- val fun1 = typed(fun, forFunMode(mode) | TAPPmode, WildcardType)
- val tparams = fun1.symbol.typeParams
-
- //@M TODO: val undets_fun = context.undetparams ?
- // "do args first" (by restoring the context.undetparams) in order to maintain context.undetparams on the function side.
-
- // @M TODO: the compiler still bootstraps when this is commented out.. TODO: run tests
- //context.undetparams = undets
-
- // @M maybe the well-kindedness check should be done when checking the type arguments conform to the type parameters' bounds?
- val args1 = if (sameLength(args, tparams)) map2Conserve(args, tparams) {
- //@M! the polytype denotes the expected kind
- (arg, tparam) => typedHigherKindedType(arg, mode, polyType(tparam.typeParams, AnyClass.tpe))
- } else {
- //@M this branch is correctly hit for an overloaded polymorphic type. It also has to handle erroneous cases.
- // Until the right alternative for an overloaded method is known, be very liberal,
- // typedTypeApply will find the right alternative and then do the same check as
- // in the then-branch above. (see pos/tcpoly_overloaded.scala)
- // this assert is too strict: be tolerant for errors like trait A { def foo[m[x], g]=error(""); def x[g] = foo[g/*ERR: missing argument type*/] }
- //assert(fun1.symbol.info.isInstanceOf[OverloadedType] || fun1.symbol.isError) //, (fun1.symbol,fun1.symbol.info,fun1.symbol.info.getClass,args,tparams))
- args mapConserve (typedHigherKindedType(_, mode))
- }
+ // @M TODO: the compiler still bootstraps&all tests pass when this is commented out..
+ //val undets = context.undetparams
- //@M TODO: context.undetparams = undets_fun ?
- typedTypeApply(tree, mode, fun1, args1)
+ // @M: fun is typed in TAPPmode because it is being applied to its actual type parameters
+ val fun1 = typed(fun, forFunMode(mode) | TAPPmode, WildcardType)
+ val tparams = fun1.symbol.typeParams
- case Apply(Block(stats, expr), args) =>
- typed1(atPos(tree.pos)(Block(stats, Apply(expr, args))), mode, pt)
+ //@M TODO: val undets_fun = context.undetparams ?
+ // "do args first" (by restoring the context.undetparams) in order to maintain context.undetparams on the function side.
- case Apply(fun, args) =>
- typedApply(fun, args) match {
- case Apply(Select(New(tpt), name), args)
- if (tpt.tpe != null &&
- tpt.tpe.typeSymbol == ArrayClass &&
- args.length == 1 &&
- erasure.GenericArray.unapply(tpt.tpe).isDefined) => // !!! todo simplify by using extractor
- // convert new Array[T](len) to evidence[ClassManifest[T]].newArray(len)
- // convert new Array^N[T](len) for N > 1 to evidence[ClassManifest[T]].newArrayN(len)
- val Some((level, manifType)) = erasure.GenericArray.unapply(tpt.tpe)
- if (level > MaxArrayDims)
- error(tree.pos, "cannot create a generic multi-dimensional array of more than "+MaxArrayDims+" dimensions")
- val newArrayApp = atPos(tree.pos) {
- val manif = getManifestTree(tree.pos, manifType, false)
- new ApplyToImplicitArgs(Select(manif, if (level == 1) "newArray" else "newArray"+level), args)
- }
- typed(newArrayApp, mode, pt)
- case tree1 =>
- tree1
- }
+ // @M TODO: the compiler still bootstraps when this is commented out.. TODO: run tests
+ //context.undetparams = undets
- case ApplyDynamic(qual, args) =>
- val reflectiveCalls = !(settings.refinementMethodDispatch.value == "invoke-dynamic")
- val qual1 = typed(qual, AnyRefClass.tpe)
- val args1 = args mapConserve (arg => if (reflectiveCalls) typed(arg, AnyRefClass.tpe) else typed(arg))
- treeCopy.ApplyDynamic(tree, qual1, args1) setType (if (reflectiveCalls) AnyRefClass.tpe else tree.symbol.info.resultType)
-
- case Super(qual, mix) =>
- typedSuper(qual, mix)
-
- case This(qual) =>
- typedThis(qual)
-
- case Select(qual @ Super(_, _), nme.CONSTRUCTOR) =>
- val qual1 =
- typed(qual, EXPRmode | QUALmode | POLYmode | SUPERCONSTRmode, WildcardType)
- // the qualifier type of a supercall constructor is its first parent class
- typedSelect(qual1, nme.CONSTRUCTOR)
-
- case Select(qual, name) =>
- incCounter(typedSelectCount)
- var qual1 = checkDead(typedQualifier(qual, mode))
- if (name.isTypeName) qual1 = checkStable(qual1)
-
- val tree1 = // temporarily use `filter' and an alternative for `withFilter'
- if (name == nme.withFilter)
- silent(_ => typedSelect(qual1, name)) match {
- case result1: Tree =>
- result1
- case ex1: TypeError =>
- silent(_ => typed1(Select(qual1, nme.filter) setPos tree.pos, mode, pt)) match {
- case result2: Tree =>
- unit.deprecationWarning(
- tree.pos, "`withFilter' method does not yet exist on "+qual1.tpe.widen+
- ", using `filter' method instead")
- result2
- case ex2: TypeError =>
- reportTypeError(tree.pos, ex1)
- setError(tree)
- }
- }
- else
- typedSelect(qual1, name)
+ // @M maybe the well-kindedness check should be done when checking the type arguments conform to the type parameters' bounds?
+ val args1 = if (sameLength(args, tparams)) map2Conserve(args, tparams) {
+ //@M! the polytype denotes the expected kind
+ (arg, tparam) => typedHigherKindedType(arg, mode, GenPolyType(tparam.typeParams, AnyClass.tpe))
+ }
+ else {
+ //@M this branch is correctly hit for an overloaded polymorphic type. It also has to handle erroneous cases.
+ // Until the right alternative for an overloaded method is known, be very liberal,
+ // typedTypeApply will find the right alternative and then do the same check as
+ // in the then-branch above. (see pos/tcpoly_overloaded.scala)
+ // this assert is too strict: be tolerant for errors like trait A { def foo[m[x], g]=error(""); def x[g] = foo[g/*ERR: missing argument type*/] }
+ //assert(fun1.symbol.info.isInstanceOf[OverloadedType] || fun1.symbol.isError) //, (fun1.symbol,fun1.symbol.info,fun1.symbol.info.getClass,args,tparams))
+ args mapConserve (typedHigherKindedType(_, mode))
+ }
- if (qual1.symbol == RootPackage) treeCopy.Ident(tree1, name)
- else tree1
+ //@M TODO: context.undetparams = undets_fun ?
+ Typer.this.typedTypeApply(tree, mode, fun1, args1)
+ }
- case Ident(name) =>
- incCounter(typedIdentCount)
- if ((name == nme.WILDCARD && (mode & (PATTERNmode | FUNmode)) == PATTERNmode) ||
- (name == tpnme.WILDCARD && (mode & TYPEmode) != 0))
- tree setType makeFullyDefined(pt)
- else
- typedIdent(name)
+ def typedApplyDynamic(tree: ApplyDynamic) = {
+ assert(phase.erasedTypes)
+ val reflectiveCalls = !(settings.refinementMethodDispatch.value == "invoke-dynamic")
+ val qual1 = typed(tree.qual, AnyRefClass.tpe)
+ val args1 = tree.args mapConserve (arg => if (reflectiveCalls) typed(arg, AnyRefClass.tpe) else typed(arg))
+ treeCopy.ApplyDynamic(tree, qual1, args1) setType (if (reflectiveCalls) AnyRefClass.tpe else tree.symbol.info.resultType)
+ }
+
+ def typedReferenceToBoxed(tree: ReferenceToBoxed) = {
+ val id = tree.ident
+ val id1 = typed1(id, mode, pt) match { case id: Ident => id }
+ // [Eugene] am I doing it right?
+ val erasedTypes = phaseId(currentPeriod) >= currentRun.erasurePhase.id
+ val tpe = capturedVariableType(id.symbol, erasedTypes = erasedTypes)
+ treeCopy.ReferenceToBoxed(tree, id1) setType tpe
+ }
- case Literal(value) =>
- tree setType (
- if (value.tag == UnitTag) UnitClass.tpe
- else ConstantType(value))
+ def typedLiteral(tree: Literal) = {
+ val value = tree.value
+ tree setType (
+ if (value.tag == UnitTag) UnitClass.tpe
+ else ConstantType(value))
+ }
- case SingletonTypeTree(ref) =>
- val ref1 = checkStable(
- typed(ref, EXPRmode | QUALmode | (mode & TYPEPATmode), AnyRefClass.tpe))
- tree setType ref1.tpe.resultType
+ def typedSingletonTypeTree(tree: SingletonTypeTree) = {
+ val ref1 = checkStable(
+ context.withImplicitsDisabled(
+ typed(tree.ref, EXPRmode | QUALmode | (mode & TYPEPATmode), AnyRefClass.tpe)
+ )
+ )
+ tree setType ref1.tpe.resultType
+ }
- case SelectFromTypeTree(qual, selector) =>
- val qual1 = typedType(qual, mode)
- if (qual1.tpe.isVolatile) error(tree.pos, "illegal type selection from volatile type "+qual.tpe)
- typedSelect(qual1, selector)
+ def typedSelectFromTypeTree(tree: SelectFromTypeTree) = {
+ val qual1 = typedType(tree.qualifier, mode)
+ if (qual1.tpe.isVolatile) TypeSelectionFromVolatileTypeError(tree, qual1)
+ else typedSelect(tree, qual1, tree.name)
+ }
- case CompoundTypeTree(templ) =>
- typedCompoundTypeTree(templ)
+ def typedTypeBoundsTree(tree: TypeBoundsTree) = {
+ val lo1 = typedType(tree.lo, mode)
+ val hi1 = typedType(tree.hi, mode)
+ treeCopy.TypeBoundsTree(tree, lo1, hi1) setType TypeBounds(lo1.tpe, hi1.tpe)
+ }
- case AppliedTypeTree(tpt, args) =>
- typedAppliedTypeTree(tpt, args)
+ def typedExistentialTypeTree(tree: ExistentialTypeTree) = {
+ val tree1 = typerWithLocalContext(context.makeNewScope(tree, context.owner)){
+ _.typedExistentialTypeTree(tree, mode)
+ }
+ checkExistentialsFeature(tree1.pos, tree1.tpe, "the existential type")
+ tree1
+ }
- case TypeBoundsTree(lo, hi) =>
- val lo1 = typedType(lo, mode)
- val hi1 = typedType(hi, mode)
- treeCopy.TypeBoundsTree(tree, lo1, hi1) setType TypeBounds(lo1.tpe, hi1.tpe)
+ def typedTypeTree(tree: TypeTree) = {
+ if (tree.original != null) {
+ val newTpt = typedType(tree.original, mode)
+ tree setType newTpt.tpe
+ newTpt match {
+ case tt @ TypeTree() => tree setOriginal tt.original
+ case _ => tree
+ }
+ }
+ else
+ // we should get here only when something before failed
+ // and we try again (@see tryTypedApply). In that case we can assign
+ // whatever type to tree; we just have to survive until a real error message is issued.
+ tree setType AnyClass.tpe
+ }
+ def typedFunction(fun: Function) = {
+ if (fun.symbol == NoSymbol)
+ fun.symbol = context.owner.newAnonymousFunctionValue(fun.pos)
- case etpt @ ExistentialTypeTree(_, _) =>
- newTyper(context.makeNewScope(tree, context.owner)).typedExistentialTypeTree(etpt, mode)
+ typerWithLocalContext(context.makeNewScope(fun, fun.symbol))(_.typedFunction(fun, mode, pt))
+ }
- case dc at TypeTreeWithDeferredRefCheck() => dc // TODO: should we re-type the wrapped tree? then we need to change TypeTreeWithDeferredRefCheck's representation to include the wrapped tree explicitly (instead of in its closure)
- case tpt @ TypeTree() =>
- if (tpt.original != null)
- tree setType typedType(tpt.original, mode).tpe
- else
- // we should get here only when something before failed
- // and we try again (@see tryTypedApply). In that case we can assign
- // whatever type to tree; we just have to survive until a real error message is issued.
- tree setType AnyClass.tpe
- case Import(expr, selectors) =>
- assert(forInteractive) // should not happen in normal circumstances.
- tree setType tree.symbol.tpe
- case _ =>
- abort("unexpected tree: " + tree.getClass + "\n" + tree)//debug
+ // begin typed1
+ //if (settings.debug.value && tree.isDef) log("typing definition of "+sym);//DEBUG
+ tree match {
+ case tree: Ident => typedIdentOrWildcard(tree)
+ case tree: Select => typedSelectOrSuperCall(tree)
+ case tree: Apply => typedApply(tree)
+ case tree: TypeTree => typedTypeTree(tree)
+ case tree: Literal => typedLiteral(tree)
+ case tree: This => typedThis(tree)
+ case tree: ValDef => typedValDef(tree)
+ case tree: DefDef => defDefTyper(tree).typedDefDef(tree)
+ case tree: Block => typerWithLocalContext(context.makeNewScope(tree, context.owner))(_.typedBlock(tree, mode, pt))
+ case tree: If => typedIf(tree)
+ case tree: TypeApply => typedTypeApply(tree)
+ case tree: AppliedTypeTree => typedAppliedTypeTree(tree)
+ case tree: Bind => typedBind(tree)
+ case tree: Function => typedFunction(tree)
+ case tree: Match => typedVirtualizedMatch(tree)
+ case tree: New => typedNew(tree)
+ case tree: Assign => typedAssign(tree.lhs, tree.rhs)
+ case tree: AssignOrNamedArg => typedAssign(tree.lhs, tree.rhs) // called by NamesDefaults in silent typecheck
+ case tree: Super => typedSuper(tree)
+ case tree: TypeBoundsTree => typedTypeBoundsTree(tree)
+ case tree: Typed => typedTyped(tree)
+ case tree: ClassDef => newTyper(context.makeNewScope(tree, sym)).typedClassDef(tree)
+ case tree: ModuleDef => newTyper(context.makeNewScope(tree, sym.moduleClass)).typedModuleDef(tree)
+ case tree: TypeDef => typedTypeDef(tree)
+ case tree: LabelDef => labelTyper(tree).typedLabelDef(tree)
+ case tree: PackageDef => typedPackageDef(tree)
+ case tree: DocDef => typedDocDef(tree)
+ case tree: Annotated => typedAnnotated(tree)
+ case tree: SingletonTypeTree => typedSingletonTypeTree(tree)
+ case tree: SelectFromTypeTree => typedSelectFromTypeTree(tree)
+ case tree: CompoundTypeTree => typedCompoundTypeTree(tree)
+ case tree: ExistentialTypeTree => typedExistentialTypeTree(tree)
+ case tree: Return => typedReturn(tree)
+ case tree: Try => typedTry(tree)
+ case tree: Throw => typedThrow(tree)
+ case tree: Alternative => typedAlternative(tree)
+ case tree: Star => typedStar(tree)
+ case tree: UnApply => typedUnApply(tree)
+ case tree: ArrayValue => typedArrayValue(tree)
+ case tree: ApplyDynamic => typedApplyDynamic(tree)
+ case tree: ReferenceToBoxed => typedReferenceToBoxed(tree)
+ case tree: TypeTreeWithDeferredRefCheck => tree // TODO: retype the wrapped tree? TTWDRC would have to change to hold the wrapped tree (not a closure)
+ case tree: Import => assert(forInteractive, "!forInteractive") ; tree setType tree.symbol.tpe // should not happen in normal circumstances.
+ case _ => abort(s"unexpected tree: ${tree.getClass}\n$tree")
}
}
@@ -4230,91 +5613,79 @@ trait Typers extends Modes {
* @return ...
*/
def typed(tree: Tree, mode: Int, pt: Type): Tree = {
+ lastTreeToTyper = tree
indentTyping()
- def dropExistential(tp: Type): Type = tp match {
- case ExistentialType(tparams, tpe) =>
- if (settings.debug.value)
- log("Dropping existential: " + tree + " " + tp)
- new SubstWildcardMap(tparams).apply(tp)
- case TypeRef(_, sym, _) if sym.isAliasType =>
- val tp0 = tp.normalize
- val tp1 = dropExistential(tp0)
- if (tp1 eq tp0) tp else tp1
- case _ => tp
- }
+ val ptPlugins = pluginsPt(pt, this, tree, mode)
- var alreadyTyped = false
+ val startByType = if (Statistics.canEnable) Statistics.pushTimer(byTypeStack, byTypeNanos(tree.getClass)) else null
+ if (Statistics.canEnable) Statistics.incCounter(visitsByType, tree.getClass)
try {
- if (Statistics.enabled) {
- val t = currentTime()
- if (pendingTreeTypes.nonEmpty) {
- microsByType(pendingTreeTypes.head) += ((t - typerTime) / 1000).toInt
- }
- typerTime = t
- pendingTreeTypes = tree.getClass :: pendingTreeTypes
- }
if (context.retyping &&
- (tree.tpe ne null) && (tree.tpe.isErroneous || !(tree.tpe <:< pt))) {
+ (tree.tpe ne null) && (tree.tpe.isErroneous || !(tree.tpe <:< ptPlugins))) {
tree.tpe = null
if (tree.hasSymbol) tree.symbol = NoSymbol
}
- alreadyTyped = tree.tpe ne null
+ val alreadyTyped = tree.tpe ne null
var tree1: Tree = if (alreadyTyped) tree else {
printTyping(
- ptLine("typing %s: pt = %s".format(ptTree(tree), pt),
+ ptLine("typing %s: pt = %s".format(ptTree(tree), ptPlugins),
"undetparams" -> context.undetparams,
"implicitsEnabled" -> context.implicitsEnabled,
- "silent" -> !context.reportGeneralErrors,
+ "enrichmentEnabled" -> context.enrichmentEnabled,
+ "mode" -> modeString(mode),
+ "silent" -> context.bufferErrors,
"context.owner" -> context.owner
)
)
- val tree1 = typed1(tree, mode, dropExistential(pt))
+ typed1(tree, mode, dropExistential(ptPlugins))
+ }
+ // Can happen during erroneous compilation - error(s) have been
+ // reported, but we need to avoid causing an NPE with this tree
+ if (tree1.tpe eq null)
+ return setError(tree)
+
+ if (!alreadyTyped) {
printTyping("typed %s: %s%s".format(
ptTree(tree1), tree1.tpe,
if (isSingleType(tree1.tpe)) " with underlying "+tree1.tpe.widen else "")
)
- tree1
}
- tree1.tpe = addAnnotations(tree1, tree1.tpe)
- val result = if (tree1.isEmpty) tree1 else adapt(tree1, mode, pt, tree)
+ tree1.tpe = pluginsTyped(tree1.tpe, this, tree1, mode, ptPlugins)
+ val result =
+ if (tree1.isEmpty) tree1
+ else {
+ val result = adapt(tree1, mode, ptPlugins, tree)
+ if (hasPendingMacroExpansions) macroExpandAll(this, result) else result
+ }
if (!alreadyTyped) {
printTyping("adapted %s: %s to %s, %s".format(
- tree1, tree1.tpe.widen, pt, context.undetparamsString)
+ tree1, tree1.tpe.widen, ptPlugins, context.undetparamsString)
) //DEBUG
}
-
-// for (t <- tree1.tpe) assert(t != WildcardType)
-// if ((mode & TYPEmode) != 0) println("type: "+tree1+" has type "+tree1.tpe)
- if (phase.id <= currentRun.typerPhase.id) signalDone(context.asInstanceOf[analyzer.Context], tree, result)
+ if (!isPastTyper) signalDone(context.asInstanceOf[analyzer.Context], tree, result)
result
} catch {
case ex: TypeError =>
tree.tpe = null
+ // The only problematic case are (recoverable) cyclic reference errors which can pop up almost anywhere.
printTyping("caught %s: while typing %s".format(ex, tree)) //DEBUG
- reportTypeError(tree.pos, ex)
+
+ reportTypeError(context, tree.pos, ex)
setError(tree)
case ex: Exception =>
if (settings.debug.value) // @M causes cyclic reference error
- Console.println("exception when typing "+tree+", pt = "+pt)
- if ((context ne null) && (context.unit ne null) &&
- (context.unit.source ne null) && (tree ne null))
+ Console.println("exception when typing "+tree+", pt = "+ptPlugins)
+ if (context != null && context.unit.exists && tree != null)
logError("AT: " + (tree.pos).dbgString, ex)
throw ex
}
finally {
deindentTyping()
-
- if (Statistics.enabled) {
- val t = currentTime()
- microsByType(pendingTreeTypes.head) += ((t - typerTime) / 1000).toInt
- visitsByType(pendingTreeTypes.head) += 1
- typerTime = t
- pendingTreeTypes = pendingTreeTypes.tail
- }
+ if (Statistics.canEnable) Statistics.popTimer(byTypeStack, startByType)
}
}
@@ -4334,7 +5705,11 @@ trait Typers extends Modes {
ret
}
+ def typedPos(pos: Position, mode: Int, pt: Type)(tree: Tree) = typed(atPos(pos)(tree), mode, pt)
def typedPos(pos: Position)(tree: Tree) = typed(atPos(pos)(tree))
+ // TODO: see if this formulation would impose any penalty, since
+ // it makes for a lot less casting.
+ // def typedPos[T <: Tree](pos: Position)(tree: T): T = typed(atPos(pos)(tree)).asInstanceOf[T]
/** Types expression <code>tree</code> with given prototype <code>pt</code>.
*
@@ -4368,7 +5743,25 @@ trait Typers extends Modes {
// We disable implicits because otherwise some constructs will
// type check which should not. The pattern matcher does not
// perform implicit conversions in an attempt to consummate a match.
- context.withImplicitsDisabled(typed(tree, PATTERNmode, pt))
+
+ // on the one hand,
+ // "abc" match { case Seq('a', 'b', 'c') => true }
+ // should be ruled out statically, otherwise this is a runtime
+ // error both because there is an implicit from String to Seq
+ // (even though such implicits are not used by the matcher) and
+ // because the typer is fine with concluding that "abc" might
+ // be of type "String with Seq[T]" and thus eligible for a call
+ // to unapplySeq.
+
+ // on the other hand, we want to be able to use implicits to add members retro-actively (e.g., add xml to StringContext)
+
+ // as a compromise, context.enrichmentEnabled tells adaptToMember to go ahead and enrich,
+ // but arbitrary conversions (in adapt) are disabled
+ // TODO: can we achieve the pattern matching bit of the string interpolation SIP without this?
+ typingInPattern(context.withImplicitsDisabledAllowEnrichment(typed(tree, PATTERNmode, pt))) match {
+ case tpt if tpt.isType => PatternMustBeValue(tpt, pt); tpt
+ case pat => pat
+ }
}
/** Types a (fully parameterized) type tree */
@@ -4392,60 +5785,66 @@ trait Typers extends Modes {
def typedTypeConstructor(tree: Tree, mode: Int): Tree = {
val result = typed(tree, forTypeMode(mode) | FUNmode, WildcardType)
- val restpe = result.tpe.normalize // normalize to get rid of type aliases for the following check (#1241)
- if (!phase.erasedTypes && restpe.isInstanceOf[TypeRef] && !restpe.prefix.isStable && !context.unit.isJava) {
- // The isJava exception if OK only because the only type constructors scalac gets
- // to see are those in the signatures. These do not need a unique object as a prefix.
- // The situation is different for new's and super's, but scalac does not look deep
- // enough to see those. See #3938
- error(tree.pos, restpe.prefix+" is not a legal prefix for a constructor")
- }
-
- //@M fix for #2208
- // if there are no type arguments, normalization does not bypass any checks, so perform it to get rid of AnyRef
- if(result.tpe.typeArgs.isEmpty) {
- // minimal check: if(result.tpe.typeSymbolDirect eq AnyRefClass) {
- // must expand the fake AnyRef type alias, because bootstrapping (init in Definitions) is not
- // designed to deal with the cycles in the scala package (ScalaObject extends
- // AnyRef, but the AnyRef type alias is entered after the scala package is
- // loaded and completed, so that ScalaObject is unpickled while AnyRef is not
- // yet defined )
- result setType(restpe)
- } else { // must not normalize: type application must be (bounds-)checked (during RefChecks), see #2208
- // during uncurry (after refchecks), all types are normalized
- result
+ // get rid of type aliases for the following check (#1241)
+ result.tpe.dealias match {
+ case restpe @ TypeRef(pre, _, _) if !phase.erasedTypes && !pre.isStable && !context.unit.isJava =>
+ // The isJava exception if OK only because the only type constructors scalac gets
+ // to see are those in the signatures. These do not need a unique object as a prefix.
+ // The situation is different for new's and super's, but scalac does not look deep
+ // enough to see those. See #3938
+ ConstructorPrefixError(tree, restpe)
+ case _ =>
+ // must not normalize: type application must be (bounds-)checked (during RefChecks), see #2208
+ // during uncurry (after refchecks), all types are normalized
+ result
}
}
def typedTypeConstructor(tree: Tree): Tree = typedTypeConstructor(tree, NOmode)
def computeType(tree: Tree, pt: Type): Type = {
+ // macros employ different logic of `computeType`
+ assert(!context.owner.isTermMacro, context.owner)
val tree1 = typed(tree, pt)
transformed(tree) = tree1
- packedType(tree1, context.owner)
+ val tpe = packedType(tree1, context.owner)
+ checkExistentialsFeature(tree.pos, tpe, "inferred existential type")
+ tpe
}
- def transformedOrTyped(tree: Tree, mode: Int, pt: Type): Tree = transformed.get(tree) match {
- case Some(tree1) => transformed -= tree; tree1
- case None => typed(tree, mode, pt)
+ def computeMacroDefType(tree: Tree, pt: Type): Type = {
+ assert(context.owner.isTermMacro, context.owner)
+ assert(tree.symbol.isTermMacro, tree.symbol)
+ assert(tree.isInstanceOf[DefDef], tree.getClass)
+ val ddef = tree.asInstanceOf[DefDef]
+
+ val tree1 =
+ if (transformed contains ddef.rhs) {
+ // macro defs are typechecked in `methodSig` (by calling this method) in order to establish their link to macro implementation asap
+ // if a macro def doesn't have explicitly specified return type, this method will be called again by `assignTypeToTree`
+ // here we guard against this case
+ transformed(ddef.rhs)
+ } else {
+ val tree1 = typedMacroBody(this, ddef)
+ transformed(ddef.rhs) = tree1
+ tree1
+ }
+
+ val isMacroBodyOkay = !tree.symbol.isErroneous && !(tree1 exists (_.isErroneous)) && tree1 != EmptyTree
+ val shouldInheritMacroImplReturnType = ddef.tpt.isEmpty
+ if (isMacroBodyOkay && shouldInheritMacroImplReturnType) computeMacroDefTypeFromMacroImpl(ddef, tree1.symbol) else AnyClass.tpe
}
- def findManifest(tp: Type, full: Boolean) = atPhase(currentRun.typerPhase) {
- inferImplicit(
- EmptyTree,
- appliedType((if (full) FullManifestClass else PartialManifestClass).typeConstructor, List(tp)),
- true, false, context)
+ def transformedOr(tree: Tree, op: => Tree): Tree = transformed.get(tree) match {
+ case Some(tree1) => transformed -= tree; tree1
+ case None => op
}
- def getManifestTree(pos: Position, tp: Type, full: Boolean): Tree = {
- val manifestOpt = findManifest(tp, full)
- if (manifestOpt.tree.isEmpty) {
- error(pos, "cannot find "+(if (full) "" else "class ")+"manifest for element type "+tp)
- Literal(Constant(null))
- } else {
- manifestOpt.tree
- }
+ def transformedOrTyped(tree: Tree, mode: Int, pt: Type): Tree = transformed.get(tree) match {
+ case Some(tree1) => transformed -= tree; tree1
+ case None => typed(tree, mode, pt)
}
+
/*
def convertToTypeTree(tree: Tree): Tree = tree match {
case TypeTree() => tree
@@ -4455,3 +5854,23 @@ trait Typers extends Modes {
}
}
+object TypersStats {
+ import scala.reflect.internal.TypesStats._
+ import scala.reflect.internal.BaseTypeSeqsStats._
+ val typedIdentCount = Statistics.newCounter("#typechecked identifiers")
+ val typedSelectCount = Statistics.newCounter("#typechecked selections")
+ val typedApplyCount = Statistics.newCounter("#typechecked applications")
+ val rawTypeFailed = Statistics.newSubCounter (" of which in failed", rawTypeCount)
+ val subtypeFailed = Statistics.newSubCounter(" of which in failed", subtypeCount)
+ val findMemberFailed = Statistics.newSubCounter(" of which in failed", findMemberCount)
+ val compoundBaseTypeSeqCount = Statistics.newSubCounter(" of which for compound types", baseTypeSeqCount)
+ val typerefBaseTypeSeqCount = Statistics.newSubCounter(" of which for typerefs", baseTypeSeqCount)
+ val singletonBaseTypeSeqCount = Statistics.newSubCounter(" of which for singletons", baseTypeSeqCount)
+ val failedSilentNanos = Statistics.newSubTimer("time spent in failed", typerNanos)
+ val failedApplyNanos = Statistics.newSubTimer(" failed apply", typerNanos)
+ val failedOpEqNanos = Statistics.newSubTimer(" failed op=", typerNanos)
+ val isReferencedNanos = Statistics.newSubTimer("time spent ref scanning", typerNanos)
+ val visitsByType = Statistics.newByClass("#visits by tree node", "typer")(Statistics.newCounter(""))
+ val byTypeNanos = Statistics.newByClass("time spent by tree node", "typer")(Statistics.newStackableTimer("", typerNanos))
+ val byTypeStack = Statistics.newTimerStack()
+}
diff --git a/src/compiler/scala/tools/nsc/typechecker/Unapplies.scala b/src/compiler/scala/tools/nsc/typechecker/Unapplies.scala
index baf790e..31c5a61 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Unapplies.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Unapplies.scala
@@ -1,5 +1,5 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
+ * Copyright 2005-2013 LAMP/EPFL
* @author Martin Odersky
*/
@@ -21,54 +21,29 @@ trait Unapplies extends ast.TreeDSL
import CODE.{ CASE => _, _ }
import treeInfo.{ isRepeatedParamType, isByNameParamType }
- /** returns type list for return type of the extraction */
- def unapplyTypeList(ufn: Symbol, ufntpe: Type) = {
- assert(ufn.isMethod)
- //Console.println("utl "+ufntpe+" "+ufntpe.typeSymbol)
- ufn.name match {
- case nme.unapply => unapplyTypeListFromReturnType(ufntpe)
- case nme.unapplySeq => unapplyTypeListFromReturnTypeSeq(ufntpe)
- case _ => throw new TypeError(ufn+" is not an unapply or unapplySeq")
- }
- }
- /** (the inverse of unapplyReturnTypeSeq)
- * for type Boolean, returns Nil
- * for type Option[T] or Some[T]:
- * - returns T0...Tn if n>0 and T <: Product[T0...Tn]]
- * - returns T otherwise
- */
- def unapplyTypeListFromReturnType(tp1: Type): List[Type] = {
- val tp = unapplyUnwrap(tp1)
- tp.typeSymbol match { // unapplySeqResultToMethodSig
- case BooleanClass => Nil
- case OptionClass | SomeClass =>
- val prod = tp.typeArgs.head
- getProductArgs(prod) match {
- case Some(xs) if xs.size > 1 => xs // n > 1
- case _ => List(prod) // special n == 0 || n == 1
- }
- case _ =>
- throw new TypeError("result type "+tp+" of unapply not in {Boolean, Option[_], Some[_]}")
- }
- }
+ private val unapplyParamName = nme.x_0
+
+
+ // In the typeCompleter (templateSig) of a case class (resp it's module),
+ // synthetic `copy` (reps `apply`, `unapply`) methods are added. To compute
+ // their signatures, the corresponding ClassDef is needed. During naming (in
+ // `enterClassDef`), the case class ClassDef is added as an attachment to the
+ // moduleClass symbol of the companion module.
+ class ClassForCaseCompanionAttachment(val caseClass: ClassDef)
- /** let type be the result type of the (possibly polymorphic) unapply method
- * for type Option[T] or Some[T]
- * -returns T0...Tn-1,Tn* if n>0 and T <: Product[T0...Tn-1,Seq[Tn]]],
- * -returns R* if T = Seq[R]
+ /** returns type list for return type of the extraction
+ * @see extractorFormalTypes
*/
- def unapplyTypeListFromReturnTypeSeq(tp1: Type): List[Type] = {
- val tp = unapplyUnwrap(tp1)
- tp.typeSymbol match {
- case OptionClass | SomeClass =>
- val ts = unapplyTypeListFromReturnType(tp1)
- val last1 = (ts.last baseType SeqClass) match {
- case TypeRef(pre, SeqClass, args) => typeRef(pre, RepeatedParamClass, args)
- case _ => throw new TypeError("last not seq")
- }
- ts.init :+ last1
- case _ =>
- throw new TypeError("result type "+tp+" of unapply not in {Option[_], Some[_]}")
+ def unapplyTypeList(pos: Position, ufn: Symbol, ufntpe: Type, args: List[Tree]) = {
+ assert(ufn.isMethod, ufn)
+ val nbSubPats = args.length
+ //Console.println("utl "+ufntpe+" "+ufntpe.typeSymbol)
+ ufn.name match {
+ case nme.unapply | nme.unapplySeq =>
+ val (formals, _) = extractorFormalTypes(pos, unapplyUnwrap(ufntpe), nbSubPats, ufn, treeInfo.effectivePatternArity(args))
+ if (formals == null) throw new TypeError(s"$ufn of type $ufntpe cannot extract $nbSubPats sub-patterns")
+ else formals
+ case _ => throw new TypeError(ufn+" is not an unapply or unapplySeq")
}
}
@@ -106,25 +81,47 @@ trait Unapplies extends ast.TreeDSL
private def toIdent(x: DefTree) = Ident(x.name) setPos x.pos.focus
private def classType(cdef: ClassDef, tparams: List[TypeDef]): Tree = {
- val tycon = REF(cdef.symbol)
+ // SI-7033 Unattributed to avoid forcing `cdef.symbol.info`.
+ val tycon = Ident(cdef.symbol)
if (tparams.isEmpty) tycon else AppliedTypeTree(tycon, tparams map toIdent)
}
private def constrParamss(cdef: ClassDef): List[List[ValDef]] = {
val DefDef(_, _, _, vparamss, _, _) = treeInfo firstConstructor cdef.impl.body
- vparamss map (_ map copyUntyped[ValDef])
+ mmap(vparamss)(copyUntyped[ValDef])
}
/** The return value of an unapply method of a case class C[Ts]
* @param param The name of the parameter of the unapply method, assumed to be of type C[Ts]
* @param caseclazz The case class C[Ts]
*/
- private def caseClassUnapplyReturnValue(param: Name, caseclazz: Symbol) = {
- def caseFieldAccessorValue(selector: Symbol): Tree = Ident(param) DOT selector
+ private def caseClassUnapplyReturnValue(param: Name, caseclazz: ClassDef) = {
+ def caseFieldAccessorValue(selector: ValDef): Tree = {
+ val accessorName = selector.name
+ val privateLocalParamAccessor = caseclazz.impl.body.collectFirst {
+ case dd: ValOrDefDef if dd.name == accessorName && dd.mods.isPrivateLocal => dd.symbol
+ }
+ privateLocalParamAccessor match {
+ case None =>
+ // Selecting by name seems to be the most straight forward way here to
+ // avoid forcing the symbol of the case class in order to list the accessors.
+ val maybeRenamedAccessorName = caseAccessorName(caseclazz.symbol, accessorName)
+ Ident(param) DOT maybeRenamedAccessorName
+ case Some(sym) =>
+ // But, that gives a misleading error message in neg/t1422.scala, where a case
+ // class has an illegal private[this] parameter. We can detect this by checking
+ // the modifiers on the param accessors.
+ //
+ // We just generate a call to that param accessor here, which gives us an inaccessible
+ // symbol error, as before.
+ Ident(param) DOT sym
+ }
+ }
- caseclazz.caseFieldAccessors match {
- case Nil => TRUE
- case xs => SOME(xs map caseFieldAccessorValue: _*)
+ // Working with trees, rather than symbols, to avoid cycles like SI-5082
+ constrParamss(caseclazz).take(1).flatten match {
+ case Nil => TRUE
+ case xs => SOME(xs map caseFieldAccessorValue: _*)
}
}
@@ -138,10 +135,10 @@ trait Unapplies extends ast.TreeDSL
def createFun = gen.scalaFunctionConstr(primaries, toIdent(cdef), abstractFun = true)
def parents = if (inheritFromFun) List(createFun) else Nil
def toString = DefDef(
- Modifiers(OVERRIDE | FINAL),
+ Modifiers(OVERRIDE | FINAL | SYNTHETIC),
nme.toString_,
Nil,
- List(Nil),
+ ListOfNil,
TypeTree(),
Literal(Constant(cdef.name.decode)))
@@ -149,65 +146,102 @@ trait Unapplies extends ast.TreeDSL
}
def companionModuleDef(cdef: ClassDef, parents: List[Tree] = Nil, body: List[Tree] = Nil): ModuleDef = atPos(cdef.pos.focus) {
- val allParents = parents :+ gen.scalaScalaObjectConstr
ModuleDef(
Modifiers(cdef.mods.flags & AccessFlags | SYNTHETIC, cdef.mods.privateWithin),
cdef.name.toTermName,
- Template(allParents, emptyValDef, NoMods, Nil, List(Nil), body, cdef.impl.pos.focus))
+ Template(parents, emptyValDef, NoMods, Nil, ListOfNil, body, cdef.impl.pos.focus))
}
private val caseMods = Modifiers(SYNTHETIC | CASE)
/** The apply method corresponding to a case class
*/
- def caseModuleApplyMeth(cdef: ClassDef): DefDef = {
+ def factoryMeth(mods: Modifiers, name: TermName, cdef: ClassDef): DefDef = {
val tparams = cdef.tparams map copyUntypedInvariant
val cparamss = constrParamss(cdef)
+ def classtpe = classType(cdef, tparams)
atPos(cdef.pos.focus)(
- DefDef(caseMods, nme.apply, tparams, cparamss, classType(cdef, tparams),
- New(classType(cdef, tparams), cparamss map (_ map gen.paramToArg)))
+ DefDef(mods, name, tparams, cparamss, classtpe,
+ New(classtpe, mmap(cparamss)(gen.paramToArg)))
)
}
+ /** The apply method corresponding to a case class
+ */
+ def caseModuleApplyMeth(cdef: ClassDef): DefDef = factoryMeth(caseMods, nme.apply, cdef)
+
/** The unapply method corresponding to a case class
*/
def caseModuleUnapplyMeth(cdef: ClassDef): DefDef = {
val tparams = cdef.tparams map copyUntypedInvariant
- val paramName = newTermName("x$0")
val method = constrParamss(cdef) match {
case xs :: _ if xs.nonEmpty && isRepeatedParamType(xs.last.tpt) => nme.unapplySeq
case _ => nme.unapply
}
- val cparams = List(ValDef(Modifiers(PARAM | SYNTHETIC), paramName, classType(cdef, tparams), EmptyTree))
+ val cparams = List(ValDef(Modifiers(PARAM | SYNTHETIC), unapplyParamName, classType(cdef, tparams), EmptyTree))
val ifNull = if (constrParamss(cdef).head.isEmpty) FALSE else REF(NoneModule)
- val body = nullSafe({ case Ident(x) => caseClassUnapplyReturnValue(x, cdef.symbol) }, ifNull)(Ident(paramName))
+ val body = nullSafe({ case Ident(x) => caseClassUnapplyReturnValue(x, cdef) }, ifNull)(Ident(unapplyParamName))
atPos(cdef.pos.focus)(
DefDef(caseMods, method, tparams, List(cparams), TypeTree(), body)
)
}
+ /**
+ * Generates copy methods for case classes. Copy only has defaults on the first
+ * parameter list, as of SI-5009.
+ *
+ * The parameter types of the copy method need to be exactly the same as the parameter
+ * types of the primary constructor. Just copying the TypeTree is not enough: a type `C`
+ * might refer to something else *inside* the class (i.e. as parameter type of `copy`)
+ * than *outside* the class (i.e. in the class parameter list).
+ *
+ * One such example is t0054.scala:
+ * class A {
+ * case class B(x: C) extends A { def copy(x: C = x) = ... }
+ * class C {} ^ ^
+ * } (1) (2)
+ *
+ * The reference (1) to C is `A.this.C`. The reference (2) is `B.this.C` - not the same.
+ *
+ * This is fixed with a hack currently. `Unapplies.caseClassCopyMeth`, which creates the
+ * copy method, uses empty `TypeTree()` nodes for parameter types.
+ *
+ * In `Namers.enterDefDef`, the copy method gets a special type completer (`enterCopyMethod`).
+ * Before computing the body type of `copy`, the class parameter types are assigned the copy
+ * method parameters.
+ *
+ * This attachment class stores the copy method parameter ValDefs as an attachment in the
+ * ClassDef of the case class.
+ */
def caseClassCopyMeth(cdef: ClassDef): Option[DefDef] = {
def isDisallowed(vd: ValDef) = isRepeatedParamType(vd.tpt) || isByNameParamType(vd.tpt)
- val cparamss = constrParamss(cdef)
- val flat = cparamss flatten
+ val classParamss = constrParamss(cdef)
- if (flat.isEmpty || cdef.symbol.hasAbstractFlag || (flat exists isDisallowed)) None
+ if (cdef.symbol.hasAbstractFlag || mexists(classParamss)(isDisallowed)) None
else {
+ def makeCopyParam(vd: ValDef, putDefault: Boolean) = {
+ val rhs = if (putDefault) toIdent(vd) else EmptyTree
+ val flags = PARAM | (vd.mods.flags & IMPLICIT) | (if (putDefault) DEFAULTPARAM else 0)
+ // empty tpt: see comment above
+ val tpt = atPos(vd.pos.focus)(TypeTree() setOriginal vd.tpt)
+ treeCopy.ValDef(vd, Modifiers(flags), vd.name, tpt, rhs)
+ }
+
val tparams = cdef.tparams map copyUntypedInvariant
- // the parameter types have to be exactly the same as the constructor's parameter types; so it's
- // not good enough to just duplicated the (untyped) tpt tree; the parameter types are removed here
- // and re-added in ``finishWith'' in the namer.
- def paramWithDefault(vd: ValDef) =
- treeCopy.ValDef(vd, vd.mods | DEFAULTPARAM, vd.name, atPos(vd.pos.focus)(TypeTree() setOriginal vd.tpt), toIdent(vd))
-
- val paramss = cparamss map (_ map paramWithDefault)
- val classTpe = classType(cdef, tparams)
-
- Some(atPos(cdef.pos.focus)(
- DefDef(Modifiers(SYNTHETIC), nme.copy, tparams, paramss, classTpe,
- New(classTpe, paramss map (_ map toIdent)))
- ))
+ val paramss = classParamss match {
+ case Nil => Nil
+ case ps :: pss =>
+ ps.map(makeCopyParam(_, putDefault = true)) :: mmap(pss)(makeCopyParam(_, putDefault = false))
+ }
+
+ val classTpe = classType(cdef, tparams)
+ val argss = mmap(paramss)(toIdent)
+ val body: Tree = New(classTpe, argss)
+ val copyDefDef = atPos(cdef.pos.focus)(
+ DefDef(Modifiers(SYNTHETIC), nme.copy, tparams, paramss, TypeTree(), body)
+ )
+ Some(copyDefDef)
}
}
}
diff --git a/src/compiler/scala/tools/nsc/typechecker/Variances.scala b/src/compiler/scala/tools/nsc/typechecker/Variances.scala
index 0391188..ea436a7 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Variances.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Variances.scala
@@ -1,5 +1,5 @@
/* NSC -- new scala compiler
- * Copyright 2005-2011 LAMP/EPFL
+ * Copyright 2005-2013 LAMP/EPFL
* @author Martin Odersky
*/
@@ -26,21 +26,21 @@ trait Variances {
private def cut(v: Int): Int =
if (v == VARIANCES) v else 0
- /** Compute variance of type parameter `tparam' in types of all symbols `sym'. */
+ /** Compute variance of type parameter `tparam` in types of all symbols `sym`. */
def varianceInSyms(syms: List[Symbol])(tparam: Symbol): Int =
(VARIANCES /: syms) ((v, sym) => v & varianceInSym(sym)(tparam))
- /** Compute variance of type parameter `tparam' in type of symbol `sym'. */
+ /** Compute variance of type parameter `tparam` in type of symbol `sym`. */
def varianceInSym(sym: Symbol)(tparam: Symbol): Int =
if (sym.isAliasType) cut(varianceInType(sym.info)(tparam))
else varianceInType(sym.info)(tparam)
- /** Compute variance of type parameter `tparam' in all types `tps'. */
+ /** Compute variance of type parameter `tparam` in all types `tps`. */
def varianceInTypes(tps: List[Type])(tparam: Symbol): Int =
(VARIANCES /: tps) ((v, tp) => v & varianceInType(tp)(tparam))
- /** Compute variance of type parameter `tparam' in all type arguments
- * <code>tps</code> which correspond to formal type parameters `tparams1'.
+ /** Compute variance of type parameter `tparam` in all type arguments
+ * <code>tps</code> which correspond to formal type parameters `tparams1`.
*/
def varianceInArgs(tps: List[Type], tparams1: List[Symbol])(tparam: Symbol): Int = {
var v: Int = VARIANCES;
@@ -53,12 +53,12 @@ trait Variances {
v
}
- /** Compute variance of type parameter `tparam' in all type annotations `annots'. */
+ /** Compute variance of type parameter `tparam` in all type annotations `annots`. */
def varianceInAttribs(annots: List[AnnotationInfo])(tparam: Symbol): Int = {
(VARIANCES /: annots) ((v, annot) => v & varianceInAttrib(annot)(tparam))
}
- /** Compute variance of type parameter `tparam' in type annotation `annot'. */
+ /** Compute variance of type parameter `tparam` in type annotation `annot`. */
def varianceInAttrib(annot: AnnotationInfo)(tparam: Symbol): Int = {
varianceInType(annot.atp)(tparam)
}
@@ -67,10 +67,14 @@ trait Variances {
def varianceInType(tp: Type)(tparam: Symbol): Int = tp match {
case ErrorType | WildcardType | NoType | NoPrefix | ThisType(_) | ConstantType(_) =>
VARIANCES
+ case BoundedWildcardType(bounds) =>
+ varianceInType(bounds)(tparam)
case SingleType(pre, sym) =>
varianceInType(pre)(tparam)
case TypeRef(pre, sym, args) =>
if (sym == tparam) COVARIANT
+ // tparam cannot occur in tp's args if tp is a type constructor (those don't have args)
+ else if (tp.isHigherKinded) varianceInType(pre)(tparam)
else varianceInType(pre)(tparam) & varianceInArgs(args, sym.typeParams)(tparam)
case TypeBounds(lo, hi) =>
flip(varianceInType(lo)(tparam)) & varianceInType(hi)(tparam)
diff --git a/src/compiler/scala/tools/nsc/util/BitSet.scala b/src/compiler/scala/tools/nsc/util/BitSet.scala
deleted file mode 100644
index a8d54c2..0000000
--- a/src/compiler/scala/tools/nsc/util/BitSet.scala
+++ /dev/null
@@ -1,164 +0,0 @@
-package scala.tools.nsc
-package util
-
-import BitSet._
-
-abstract class BitSet {
-
- protected def nwords: Int
- protected def word(idx: Int): Long
- protected def updateWord(idx: Int, w: Long): BitSet
-
- def + (elem: Int): BitSet = {
- require(elem >= 0)
- if (contains(elem)) this
- else {
- val idx = elem >> LogWL
- updateWord(idx, word(idx) | (1L << elem))
- }
- }
-
- def - (elem: Int): BitSet = {
- require(elem >= 0)
- if (contains(elem)) {
- val idx = elem >> LogWL
- updateWord(idx, word(idx) & ~(1L << elem))
- } else this
- }
-
- def | (other: BitSet): BitSet = {
- val len = this.nwords max other.nwords
- val words = new Array[Long](len)
- for (idx <- 0 until len)
- words(idx) = this.word(idx) | other.word(idx)
- fromArray(words)
- }
-
- def & (other: BitSet): BitSet = {
- val len = this.nwords min other.nwords
- val words = new Array[Long](len)
- for (idx <- 0 until len)
- words(idx) = this.word(idx) & other.word(idx)
- fromArray(words)
- }
-
- def &~ (other: BitSet): BitSet = {
- val len = this.nwords
- val words = new Array[Long](len)
- for (idx <- 0 until len)
- words(idx) = this.word(idx) & ~other.word(idx)
- fromArray(words)
- }
-
- def ^ (other: BitSet): BitSet = {
- val len = this.nwords max other.nwords
- val words = new Array[Long](len)
- for (idx <- 0 until len)
- words(idx) = this.word(idx) ^ other.word(idx)
- fromArray(words)
- }
-
- def contains(elem: Int): Boolean =
- 0 <= elem && (word(elem >> LogWL) & (1L << elem)) != 0L
-
- def subSet(other: BitSet): Boolean =
- (0 until nwords) forall (idx => (this.word(idx) & ~ other.word(idx)) == 0L)
-
- override def equals(other: Any) = other match {
- case that: BitSet =>
- (0 until (this.nwords max that.nwords)) forall (idx => this.word(idx) == that.word(idx))
- case _ =>
- false
- }
-
- override def hashCode: Int = {
- import scala.util.MurmurHash._
- var h = startHash(hashSeed)
- var c = startMagicA
- var k = startMagicB
- for (idx <- 0 until nwords) {
- val w = word(idx)
- h = extendHash(h, (w>>>32).toInt, c, k)
- c = nextMagicA(c)
- k = nextMagicB(k)
- h = extendHash(h, w.toInt, c, k)
- c = nextMagicA(c)
- k = nextMagicB(k)
- }
- finalizeHash(h)
- }
-
- def addString(sb: StringBuilder, start: String, sep: String, end: String) {
- sb append start
- var pre = ""
- for (i <- 0 until nwords * WordLength)
- if (contains(i)) {
- sb append pre append i
- pre = sep
- }
- sb append end
- }
-
- def mkString(start: String, sep: String, end: String) = {
- val sb = new StringBuilder
- addString(sb, start, sep, end)
- sb.toString
- }
-
- override def toString = mkString("BitSet(", ", ", ")")
-}
-
-object BitSet {
-
- private final val WordLength = 64
- private final val LogWL = 6
- private val hashSeed = "BitSet".hashCode
-
- val empty: BitSet = new BitSet1(0L)
-
- def apply(elems: Int*) = (empty /: elems) (_ + _)
-
- def fromArray(elems: Array[Long]) = {
- val len = elems.length
- if (len == 0) empty
- else if (len == 1) new BitSet1(elems(0))
- else if (len == 2) new BitSet2(elems(0), elems(1))
- else new BitSetN(elems)
- }
-
- private def updateArray(elems: Array[Long], idx: Int, w: Long): BitSet = {
- var len = elems.length
- while (len > 0 && (elems(len - 1) == 0L || w == 0L && idx == len - 1)) len -= 1
- var newlen = len
- if (idx >= newlen && w != 0L) newlen = idx + 1
- val newelems = new Array[Long](newlen)
- Array.copy(elems, 0, newelems, 0, len)
- if (idx < newlen) newelems(idx) = w
- else assert(w == 0L)
- fromArray(newelems)
- }
-
- class BitSet1(val elems: Long) extends BitSet {
- protected def nwords = 1
- protected def word(idx: Int) = if (idx == 0) elems else 0L
- protected def updateWord(idx: Int, w: Long): BitSet =
- if (idx == 0) new BitSet1(w)
- else if (idx == 1) new BitSet2(elems, w)
- else updateArray(Array(elems), idx, w)
- }
-
- class BitSet2(val elems0: Long, elems1: Long) extends BitSet {
- protected def nwords = 2
- protected def word(idx: Int) = if (idx == 0) elems0 else if (idx == 1) elems1 else 0L
- protected def updateWord(idx: Int, w: Long): BitSet =
- if (idx == 0) new BitSet2(w, elems1)
- else if (idx == 1) new BitSet2(elems0, w)
- else updateArray(Array(elems0, elems1), idx, w)
- }
-
- class BitSetN(val elems: Array[Long]) extends BitSet {
- protected def nwords = elems.length
- protected def word(idx: Int) = if (idx < nwords) elems(idx) else 0L
- protected def updateWord(idx: Int, w: Long): BitSet = updateArray(elems, idx, w)
- }
-}
diff --git a/src/compiler/scala/tools/nsc/util/CharArrayReader.scala b/src/compiler/scala/tools/nsc/util/CharArrayReader.scala
index 6f929a0..5c6f525 100644
--- a/src/compiler/scala/tools/nsc/util/CharArrayReader.scala
+++ b/src/compiler/scala/tools/nsc/util/CharArrayReader.scala
@@ -1,12 +1,12 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
+ * Copyright 2005-2013 LAMP/EPFL
* @author Martin Odersky
*/
package scala.tools.nsc
package util
-import Chars._
+import scala.reflect.internal.Chars._
abstract class CharArrayReader { self =>
diff --git a/src/compiler/scala/tools/nsc/util/Chars.scala b/src/compiler/scala/tools/nsc/util/Chars.scala
deleted file mode 100644
index 297afb8..0000000
--- a/src/compiler/scala/tools/nsc/util/Chars.scala
+++ /dev/null
@@ -1,94 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2006-2011 LAMP/EPFL
- * @author Martin Odersky
- */
-
-package scala.tools.nsc
-package util
-
-import annotation.{ tailrec, switch }
-import java.lang.{ Character => JCharacter }
-
-/** Contains constants and classifier methods for characters */
-trait Chars {
- // Be very careful touching these.
- // Apparently trivial changes to the way you write these constants
- // will cause Scanners.scala to go from a nice efficient switch to
- // a ghastly nested if statement which will bring the type checker
- // to its knees. See ticket #1456
- // Martin: (this should be verified now that the pattern rules have been redesigned).
- final val LF = '\u000A'
- final val FF = '\u000C'
- final val CR = '\u000D'
- final val SU = '\u001A'
-
- /** Convert a character digit to an Int according to given base,
- * -1 if no success */
- def digit2int(ch: Char, base: Int): Int = {
- if ('0' <= ch && ch <= '9' && ch < '0' + base)
- ch - '0'
- else if ('A' <= ch && ch < 'A' + base - 10)
- ch - 'A' + 10
- else if ('a' <= ch && ch < 'a' + base - 10)
- ch - 'a' + 10
- else
- -1
- }
-
- /** Convert a character to a backslash-u escape */
- def char2uescape(c: Char): String = {
- var rest = c.toInt
- val buf = new StringBuilder
- for (i <- 1 to 4) {
- buf ++= (rest % 16).toHexString
- rest = rest / 16
- }
- "\\u" + buf.toString.reverse
- }
-
- /** Is character a line break? */
- @inline def isLineBreakChar(c: Char) = (c: @switch) match {
- case LF|FF|CR|SU => true
- case _ => false
- }
-
- /** Is character a whitespace character (but not a new line)? */
- def isWhitespace(c: Char) =
- c == ' ' || c == '\t' || c == CR
-
- /** Can character form part of a doc comment variable $xxx? */
- def isVarPart(c: Char) =
- '0' <= c && c <= '9' || 'A' <= c && c <= 'Z' || 'a' <= c && c <= 'z'
-
- /** Can character start an alphanumeric Scala identifier? */
- def isIdentifierStart(c: Char): Boolean =
- (c == '_') || (c == '$') || Character.isUnicodeIdentifierStart(c)
-
- /** Can character form part of an alphanumeric Scala identifier? */
- def isIdentifierPart(c: Char) =
- (c == '$') || Character.isUnicodeIdentifierPart(c)
-
- /** Is character a math or other symbol in Unicode? */
- def isSpecial(c: Char) = {
- val chtp = Character.getType(c)
- chtp == Character.MATH_SYMBOL.toInt || chtp == Character.OTHER_SYMBOL.toInt
- }
-
- private final val otherLetters = Set[Char]('\u0024', '\u005F') // '$' and '_'
- private final val letterGroups = {
- import JCharacter._
- Set[Byte](LOWERCASE_LETTER, UPPERCASE_LETTER, OTHER_LETTER, TITLECASE_LETTER, LETTER_NUMBER)
- }
- def isScalaLetter(ch: Char) = letterGroups(JCharacter.getType(ch).toByte) || otherLetters(ch)
-
- /** Can character form part of a Scala operator name? */
- def isOperatorPart(c : Char) : Boolean = (c: @switch) match {
- case '~' | '!' | '@' | '#' | '%' |
- '^' | '*' | '+' | '-' | '<' |
- '>' | '?' | ':' | '=' | '&' |
- '|' | '/' | '\\' => true
- case c => isSpecial(c)
- }
-}
-
-object Chars extends Chars { }
\ No newline at end of file
diff --git a/src/compiler/scala/tools/nsc/util/ClassPath.scala b/src/compiler/scala/tools/nsc/util/ClassPath.scala
index 23b53fb..a62c87e 100644
--- a/src/compiler/scala/tools/nsc/util/ClassPath.scala
+++ b/src/compiler/scala/tools/nsc/util/ClassPath.scala
@@ -1,5 +1,5 @@
/* NSC -- new Scala compiler
- * Copyright 2006-2011 LAMP/EPFL
+ * Copyright 2006-2013 LAMP/EPFL
* @author Martin Odersky
*/
@@ -9,10 +9,14 @@ package util
import java.net.URL
import scala.collection.{ mutable, immutable }
-import io.{ File, Directory, Path, Jar, AbstractFile, ClassAndJarInfo }
-import scala.tools.util.StringOps.splitWhere
+import io.{ File, Directory, Path, Jar, AbstractFile }
+import scala.reflect.internal.util.StringOps.splitWhere
+import scala.reflect.ClassTag
import Jar.isJarOrZip
import File.pathSeparator
+import java.net.MalformedURLException
+import java.util.regex.PatternSyntaxException
+import scala.reflect.runtime.ReflectionUtils
/** <p>
* This module provides star expansion of '-classpath' option arguments, behaves the same as
@@ -22,14 +26,6 @@ import File.pathSeparator
* @author Stepan Koltsov
*/
object ClassPath {
- def scalaLibrary = locate[ScalaObject]
- def scalaCompiler = locate[Global]
-
- def info[T: ClassManifest] = new ClassAndJarInfo[T]
- def locate[T: ClassManifest] = info[T] rootClasspath
- def locateJar[T: ClassManifest] = info[T].rootPossibles find (x => isJarOrZip(x)) map (x => File(x))
- def locateDir[T: ClassManifest] = info[T].rootPossibles find (_.isDirectory) map (_.toDirectory)
-
/** Expand single path entry */
private def expandS(pattern: String): List[String] = {
val wildSuffix = File.separator + "*"
@@ -45,32 +41,15 @@ object ClassPath {
if (pattern == "*") lsDir(Directory("."))
else if (pattern endsWith wildSuffix) lsDir(Directory(pattern dropRight 2))
else if (pattern contains '*') {
- val regexp = ("^%s$" format pattern.replaceAll("""\*""", """.*""")).r
- lsDir(Directory(pattern).parent, regexp findFirstIn _ isDefined)
+ try {
+ val regexp = ("^" + pattern.replaceAllLiterally("""\*""", """.*""") + "$").r
+ lsDir(Directory(pattern).parent, regexp findFirstIn _ isDefined)
+ }
+ catch { case _: PatternSyntaxException => List(pattern) }
}
else List(pattern)
}
- /** Return duplicated classpath entries as
- * (name, list of origins)
- * in the order they occur on the path.
- */
- // def findDuplicates(cp: ClassPath[_]) = {
- // def toFullName(x: (String, _, cp.AnyClassRep)) = x._1 + "." + x._3.name
- // def toOriginString(x: ClassPath[_]) = x.origin getOrElse x.name
- //
- // /** Flatten everything into tuples, recombine grouped by name, filter down to 2+ entries. */
- // val flattened = (
- // for ((pkgName, pkg) <- cp.allPackagesWithNames ; clazz <- pkg.classes) yield
- // (pkgName, pkg, clazz)
- // )
- // val multipleAppearingEntries = flattened groupBy toFullName filter (_._2.size > 1)
- //
- // /** Extract results. */
- // for (name <- flattened map toFullName distinct ; dups <- multipleAppearingEntries get name) yield
- // (name, dups map { case (_, cp, _) => toOriginString(cp) })
- // }
-
/** Split classpath using platform-dependent path separator */
def split(path: String): List[String] = (path split pathSeparator).toList filterNot (_ == "") distinct
@@ -108,11 +87,22 @@ object ClassPath {
case dir => dir filter (_.isClassContainer) map (x => new java.io.File(dir.file, x.name) getPath) toList
}
}
+ /** Expand manifest jar classpath entries: these are either urls, or paths
+ * relative to the location of the jar.
+ */
+ def expandManifestPath(jarPath: String): List[URL] = {
+ val file = File(jarPath)
+ if (!file.isFile) return Nil
+
+ val baseDir = file.parent
+ new Jar(file).classPathElements map (elem =>
+ specToURL(elem) getOrElse (baseDir / elem).toURL
+ )
+ }
/** A useful name filter. */
- def isTraitImplementation(name: String) = name endsWith "$class.class"
+ def isTraitImplementation(name: String) = ReflectionUtils.isTraitImplementation(name)
- import java.net.MalformedURLException
def specToURL(spec: String): Option[URL] =
try Some(new URL(spec))
catch { case _: MalformedURLException => None }
@@ -172,12 +162,12 @@ object ClassPath {
}
object DefaultJavaContext extends JavaContext {
- override def isValidName(name: String) = !isTraitImplementation(name)
+ override def isValidName(name: String) = !ReflectionUtils.scalacShouldntLoadClassfile(name)
}
- @inline private def endsClass(s: String) = s.length > 6 && s.substring(s.length - 6) == ".class"
- @inline private def endsScala(s: String) = s.length > 6 && s.substring(s.length - 6) == ".scala"
- @inline private def endsJava(s: String) = s.length > 5 && s.substring(s.length - 5) == ".java"
+ private def endsClass(s: String) = s.length > 6 && s.substring(s.length - 6) == ".class"
+ private def endsScala(s: String) = s.length > 6 && s.substring(s.length - 6) == ".scala"
+ private def endsJava(s: String) = s.length > 5 && s.substring(s.length - 5) == ".java"
/** From the source file to its identifier.
*/
@@ -226,28 +216,8 @@ abstract class ClassPath[T] {
def packages: IndexedSeq[ClassPath[T]]
def sourcepaths: IndexedSeq[AbstractFile]
- /** Information which entails walking the tree. This is probably only
- * necessary for tracking down problems - it's normally not used.
- */
- // def allPackages: List[ClassPath[T]] = packages ::: (packages flatMap (_.allPackages))
- // def allPackageNames: List[String] = {
- // def subpackages(prefix: String, cp: ClassPath[T]): List[String] = (
- // (cp.packages map (prefix + _.name)) :::
- // (cp.packages flatMap (x => subpackages(prefix + x.name + ".", x)))
- // )
- // subpackages("", this)
- // }
- // def allPackagesWithNames: List[(String, ClassPath[T])] = {
- // val root = packages map (p => p.name -> p)
- // val subs =
- // for ((prefix, p) <- root ; (k, v) <- p.allPackagesWithNames) yield
- // (prefix + "." + k, v)
- //
- // root ::: subs
- // }
-
/**
- * Represents classes which can be loaded with a ClassfileLoader/MSILTypeLoader
+ * Represents classes which can be loaded with a ClassfileLoader/MsilFileLoader
* and / or a SourcefileLoader.
*/
case class ClassRep(binary: Option[T], source: Option[AbstractFile]) {
@@ -348,6 +318,13 @@ class DirectoryClassPath(val dir: AbstractFile, val context: ClassPathContext[Ab
override def toString() = "directory classpath: "+ origin.getOrElse("?")
}
+class DeltaClassPath[T](original: MergedClassPath[T], subst: Map[ClassPath[T], ClassPath[T]])
+extends MergedClassPath[T](original.entries map (e => subst getOrElse (e, e)), original.context) {
+ // not sure we should require that here. Commented out for now.
+ // require(subst.keySet subsetOf original.entries.toSet)
+ // We might add specialized operations for computing classes packages here. Not sure it's worth it.
+}
+
/**
* A classpath unifying multiple class- and sourcepath entries.
*/
@@ -359,7 +336,7 @@ extends ClassPath[T] {
this(entries.toIndexedSeq, context)
def name = entries.head.name
- def asURLs = entries flatMap (_.asURLs) toList
+ def asURLs = (entries flatMap (_.asURLs)).toList
lazy val sourcepaths: IndexedSeq[AbstractFile] = entries flatMap (_.sourcepaths)
override def origin = Some(entries map (x => x.origin getOrElse x.name) mkString ("Merged(", ", ", ")"))
@@ -417,41 +394,10 @@ extends ClassPath[T] {
}
new MergedClassPath[T](newEntries, context)
}
- //
- // override def allPackages: List[ClassPath[T]] = entries flatMap (_.allPackages)
- // override def allPackageNames = entries flatMap (_.allPackageNames)
- // override def allPackagesWithNames = entries flatMap (_.allPackagesWithNames)
- //
- // def duplicatedClasses = {
- // def toFullName(x: (String, _, AnyClassRep)) = x._1 + "." + x._3.name
- //
- // /** Flatten everything into tuples, recombine grouped by name, filter down to 2+ entries. */
- // val flattened = (
- // for ((pkgName, pkg) <- allPackagesWithNames ; clazz <- pkg.classes) yield
- // (pkgName, pkg, clazz)
- // )
- // val multipleAppearingEntries = flattened groupBy toFullName filter (_._2.size > 1)
- //
- // /** Using original name list as reference point, return duplicated entries as
- // * (name, list of origins)
- // * in the order they occur on the path.
- // */
- // for (name <- flattened map toFullName distinct ; dups <- multipleAppearingEntries get name) yield
- // (name, dups map {
- // case (_, cp, _) if cp.origin.isDefined => cp.origin.get
- // case (_, cp, _) => cp.asURLs.mkString
- // })
- // }
- //
def show() {
println("ClassPath %s has %d entries and results in:\n".format(name, entries.size))
asClasspathString split ':' foreach (x => println(" " + x))
}
- // def showDuplicates() =
- // ClassPath findDuplicates this foreach {
- // case (name, xs) => println(xs.mkString(name + ":\n ", "\n ", "\n"))
- // }
- //
override def toString() = "merged classpath "+ entries.mkString("(", "\n", ")")
}
@@ -462,5 +408,16 @@ extends ClassPath[T] {
class JavaClassPath(
containers: IndexedSeq[ClassPath[AbstractFile]],
context: JavaContext)
-extends MergedClassPath[AbstractFile](containers, context) {
+extends MergedClassPath[AbstractFile](containers, context) { }
+
+object JavaClassPath {
+ def fromURLs(urls: Seq[URL], context: JavaContext): JavaClassPath = {
+ val containers = {
+ for (url <- urls ; f = AbstractFile getURL url ; if f != null) yield
+ new DirectoryClassPath(f, context)
+ }
+ new JavaClassPath(containers.toIndexedSeq, context)
+ }
+ def fromURLs(urls: Seq[URL]): JavaClassPath =
+ fromURLs(urls, ClassPath.DefaultJavaContext)
}
diff --git a/src/compiler/scala/tools/nsc/util/CommandLineParser.scala b/src/compiler/scala/tools/nsc/util/CommandLineParser.scala
index 00fe49d..9cf2c53 100644
--- a/src/compiler/scala/tools/nsc/util/CommandLineParser.scala
+++ b/src/compiler/scala/tools/nsc/util/CommandLineParser.scala
@@ -1,5 +1,5 @@
/* NEST (New Scala Test)
- * Copyright 2007-2011 LAMP/EPFL
+ * Copyright 2007-2013 LAMP/EPFL
* @author Paul Phillips
*/
@@ -20,11 +20,10 @@ import scala.collection.mutable.ListBuffer
*/
trait ParserUtil extends Parsers {
- class ParserPlus[+T](underlying: Parser[T]) {
+ protected implicit class ParserPlus[+T](underlying: Parser[T]) {
def !~>[U](p: => Parser[U]): Parser[U] = (underlying ~! p) ^^ { case a~b => b }
def <~: Parser[T] = (underlying ~! p) ^^ { case a~b => a }
}
- protected implicit def parser2parserPlus[T](p: Parser[T]): ParserPlus[T] = new ParserPlus(p)
}
case class CommandLine(
diff --git a/src/compiler/scala/tools/nsc/util/DocStrings.scala b/src/compiler/scala/tools/nsc/util/DocStrings.scala
old mode 100644
new mode 100755
index ecac4f7..dde53dc
--- a/src/compiler/scala/tools/nsc/util/DocStrings.scala
+++ b/src/compiler/scala/tools/nsc/util/DocStrings.scala
@@ -1,14 +1,12 @@
/* NSC -- new Scala compiler
- * Copyright 2006-2011 LAMP/EPFL
+ * Copyright 2006-2013 LAMP/EPFL
* @author Martin Odersky
*/
-
package scala.tools.nsc
package util
-import Chars._
-import scala.collection.mutable.{HashMap, ListBuffer, StringBuilder}
+import scala.reflect.internal.Chars._
/** Utilitity methods for doc comment strings
*/
@@ -28,9 +26,17 @@ object DocStrings {
if (start < str.length && isIdentifierPart(str charAt start)) skipIdent(str, start + 1)
else start
+ /** Returns index of string `str` following `start` skipping
+ * sequence of identifier characters.
+ */
+ def skipTag(str: String, start: Int): Int =
+ if (start < str.length && (str charAt start) == '@') skipIdent(str, start + 1)
+ else start
+
+
/** Returns index of string `str` after `start` skipping longest
* sequence of space and tab characters, possibly also containing
- * a single `*' character or the `/``**` sequence.
+ * a single `*` character or the `/``**` sequence.
* @pre start == str.length || str(start) == `\n'
*/
def skipLineLead(str: String, start: Int): Int =
@@ -51,7 +57,7 @@ object DocStrings {
else start
/** Returns first index following `start` and starting a line (i.e. after skipLineLead) or starting the comment
- * which satisfies predicate `p'.
+ * which satisfies predicate `p`.
*/
def findNext(str: String, start: Int)(p: Int => Boolean): Int = {
val idx = skipLineLead(str, skipToEol(str, start))
@@ -60,7 +66,7 @@ object DocStrings {
}
/** Return first index following `start` and starting a line (i.e. after skipLineLead)
- * which satisfies predicate `p'.
+ * which satisfies predicate `p`.
*/
def findAll(str: String, start: Int)(p: Int => Boolean): List[Int] = {
val idx = findNext(str, start)(p)
@@ -70,15 +76,45 @@ object DocStrings {
/** Produces a string index, which is a list of ``sections'', i.e
* pairs of start/end positions of all tagged sections in the string.
- * Every section starts with a `@' and extends to the next `@', or
- * to the end of the comment string, but excluding the final two
+ * Every section starts with an at sign and extends to the next at sign,
+ * or to the end of the comment string, but excluding the final two
* characters which terminate the comment.
+ *
+ * Also take usecases into account - they need to expand until the next
+ * usecase or the end of the string, as they might include other sections
+ * of their own
*/
- def tagIndex(str: String, p: Int => Boolean = (idx => true)): List[(Int, Int)] =
- findAll(str, 0) (idx => str(idx) == '@' && p(idx)) match {
+ def tagIndex(str: String, p: Int => Boolean = (idx => true)): List[(Int, Int)] = {
+ var indices = findAll(str, 0) (idx => str(idx) == '@' && p(idx))
+ indices = mergeUsecaseSections(str, indices)
+ indices = mergeInheritdocSections(str, indices)
+
+ indices match {
case List() => List()
- case idxs => idxs zip (idxs.tail ::: List(str.length - 2))
+ case idxs => idxs zip (idxs.tail ::: List(str.length - 2))
}
+ }
+
+ /**
+ * Merge sections following an usecase into the usecase comment, so they
+ * can override the parent symbol's sections
+ */
+ def mergeUsecaseSections(str: String, idxs: List[Int]): List[Int] = {
+ idxs.indexWhere(str.startsWith("@usecase", _)) match {
+ case firstUCIndex if firstUCIndex != -1 =>
+ val commentSections = idxs.take(firstUCIndex)
+ val usecaseSections = idxs.drop(firstUCIndex).filter(str.startsWith("@usecase", _))
+ commentSections ::: usecaseSections
+ case _ =>
+ idxs
+ }
+ }
+
+ /**
+ * Merge the inheritdoc sections, as they never make sense on their own
+ */
+ def mergeInheritdocSections(str: String, idxs: List[Int]): List[Int] =
+ idxs.filterNot(str.startsWith("@inheritdoc", _))
/** Does interval `iv` start with given `tag`?
*/
@@ -88,12 +124,11 @@ object DocStrings {
def startsWithTag(str: String, start: Int, tag: String): Boolean =
str.startsWith(tag, start) && !isIdentifierPart(str charAt (start + tag.length))
-
/** The first start tag of a list of tag intervals,
* or the end of the whole comment string - 2 if list is empty
*/
def startTag(str: String, sections: List[(Int, Int)]) = sections match {
- case List() => str.length - 2
+ case Nil => str.length - 2
case (start, _) :: _ => start
}
@@ -109,6 +144,12 @@ object DocStrings {
}
/** Optionally start and end index of return section in `str`, or `None`
+ * if `str` does not have a @group. */
+ def groupDoc(str: String, sections: List[(Int, Int)]): Option[(Int, Int)] =
+ sections find (startsWithTag(str, _, "@group"))
+
+
+ /** Optionally start and end index of return section in `str`, or `None`
* if `str` does not have a @return.
*/
def returnDoc(str: String, sections: List[(Int, Int)]): Option[(Int, Int)] =
@@ -135,4 +176,48 @@ object DocStrings {
idx
}
}
+
+ /** A map from the section tag to section parameters */
+ def sectionTagMap(str: String, sections: List[(Int, Int)]): Map[String, (Int, Int)] =
+ Map() ++ {
+ for (section <- sections) yield
+ extractSectionTag(str, section) -> section
+ }
+
+ /** Extract the section tag, treating the section tag as an indentifier */
+ def extractSectionTag(str: String, section: (Int, Int)): String =
+ str.substring(section._1, skipTag(str, section._1))
+
+ /** Extract the section parameter */
+ def extractSectionParam(str: String, section: (Int, Int)): String = {
+ val (beg, _) = section
+ assert(str.startsWith("@param", beg) ||
+ str.startsWith("@tparam", beg) ||
+ str.startsWith("@throws", beg))
+
+ val start = skipWhitespace(str, skipTag(str, beg))
+ val finish = skipIdent(str, start)
+
+ str.substring(start, finish)
+ }
+
+ /** Extract the section text, except for the tag and comment newlines */
+ def extractSectionText(str: String, section: (Int, Int)): (Int, Int) = {
+ val (beg, end) = section
+ if (str.startsWith("@param", beg) ||
+ str.startsWith("@tparam", beg) ||
+ str.startsWith("@throws", beg))
+ (skipWhitespace(str, skipIdent(str, skipWhitespace(str, skipTag(str, beg)))), end)
+ else
+ (skipWhitespace(str, skipTag(str, beg)), end)
+ }
+
+ /** Cleanup section text */
+ def cleanupSectionText(str: String) = {
+ var result = str.trim.replaceAll("\n\\s+\\*\\s+", " \n")
+ while (result.endsWith("\n"))
+ result = result.substring(0, str.length - 1)
+ result
+ }
+
}
diff --git a/src/compiler/scala/tools/nsc/util/Exceptional.scala b/src/compiler/scala/tools/nsc/util/Exceptional.scala
index 459881b..3434426 100644
--- a/src/compiler/scala/tools/nsc/util/Exceptional.scala
+++ b/src/compiler/scala/tools/nsc/util/Exceptional.scala
@@ -3,140 +3,10 @@ package util
import java.util.concurrent.ExecutionException
import java.lang.reflect.{ InvocationTargetException, UndeclaredThrowableException }
-import io.{ Sources, Fileish }
-import scala.tools.util.StringOps._
-
-/** A simple throwable wrapper so it looks more like a parade of
- * glittering frame-shaped beauties than the other thing.
- */
-class Exceptional(val ex: Throwable)(implicit prefs: ScalaPrefs) {
- val formatter = prefs.exceptionFormatter(ex)
- val unwrapped = Exceptional.unwrap(ex)
- val table = formatter.newTable(unwrapped)
- def rawTrace() = unwrapped.printStackTrace()
- def isScanDone = prefs.codeSources.isDone()
-
- /** Block until the scanning is complete. */
- def force(): this.type = {
- prefs.codeSources.force()
- this
- }
-
- /** Stack frame contexts are only shown as long as this is true. */
- def spanFn(frame: JavaStackFrame): Boolean = true
-
- /** The result of this will be printed before a context trace. */
- def contextPrelude: String =
- if (isScanDone) ""
- else "/* Still scanning source path: there may be more momentarily. */\n"
-
- /** Frames with surrounding context. */
- private def contextFrames = toList takeWhile spanFn
- def contextHead(): String = contextElems.headOption getOrElse ""
- def contextElems() = contextFrames map formatter.inContext
- def context(): String = context(length)
- def context(num: Int): String = contextPrelude + ojoinOr(contextFrames take num map formatter.inContext, "\n", "No stack trace.")
-
- /** Exceptional doesn't extend Seq because it turns out to be super
- * annoying in the repl: tab-completion sees all the Seq methods.
- */
- def length = toList.length
- def toList = table.toList
- def iterator = table.iterator
- def apply(index: Int) = table(index)
-
- def causes = Exceptional.causes(ex)
- def summary = unwrapped.toString + "\n at " + apply(0).shortNameString
-
- private def println(msg: Any) = {
- Console println msg
- Console.flush()
- }
-
- def show(): Unit = println(context())
- def show(num: Int): Unit = println(context(num))
- def showCauses() = println((ex :: causes).mkString("", "\n caused by -> ", ""))
- def showTable() = println(table)
- def showSummary() = println(summary)
-
- override def toString = summary
-}
-
+import scala.reflect.internal.util.StringOps._
+import scala.language.implicitConversions
object Exceptional {
- type Catcher[+T] = PartialFunction[Throwable, T]
-
- /** Creates an exception handler which will only ever catch the given
- * number of exceptions (if the given pf is defined there) and after
- * that will disable itself.
- */
- def expiringHandler[T](numCatches: Int)(pf: Catcher[T]): Catcher[T] = {
- var remaining = numCatches;
- { case ex: Throwable if remaining > 0 && pf.isDefinedAt(ex) =>
- remaining -= 1
- pf(ex)
- }
- }
-
- /** The Throwable => Exceptional implicit plus the associated factory. */
- implicit def throwableToExceptional(ex: Throwable)(implicit prefs: ScalaPrefs): Exceptional = apply(ex)(prefs)
- def apply(ex: Throwable)(implicit prefs: ScalaPrefs) = new Exceptional(ex)(prefs)
-
- /** Some handy functions. */
- def stack() = JavaStackFrame frames ((new Throwable).getStackTrace dropWhile isLocal)
- def showme() = apply(new Throwable).show()
- def showstack() = apply(new Throwable).showTable()
-
- /** A frame formatter with more refined aesthetics than the default.
- * Come, let us be civilized.
- */
- object ScalaFormat extends TableDef[JavaStackFrame] {
- >> ("file" -> (_.fileName)) >+ ":"
- << ("line" -> (_.line))
- >> ("class" -> (_.shortestName)) >+ "."
- << ("method" -> (_.methodName))
- }
-
- trait Calibrated {
- def newTable(ex: Throwable): TableDef[JavaStackFrame]#Table
- def inContext(frame: JavaStackFrame): String
- }
- trait Formatter extends (Throwable => Calibrated) {
- def apply(ex: Throwable): Calibrated
- }
- object Formatter {
- def apply(implicit prefs: ScalaPrefs): Formatter = new Formatter {
- def apply(ex: Throwable) = new Calibrated {
- def newTable(ex: Throwable) = new ScalaFormat.Table(JavaStackFrame frames ex)
- def inContext(frame: JavaStackFrame) = new FrameContext(frame, prefs.codeSources) toString
- }
- }
- }
-
- /** Java stack traces have the interesting property of using only the name
- * of the file, no paths. This makes it a bit of a gamble to try to associate
- * a stack frame with a specific file. Witness the heuristic.
- */
- def locateSources(sources: Sources, frame: JavaStackFrame): List[Fileish] = {
- // if only one has a matching path, that's fairly sure to be the one
- val matches = sources(frame.fileName) filter (_.pkgFromPath endsWith frame.pkgName)
- if (matches.isEmpty || matches.tail.isEmpty)
- return matches
-
- // otherwise we'll drink them in and look for a package name
- matches filter (_.pkgFromSource endsWith frame.pkgName)
- }
-
- /** Right now this punts if more than one match and it accepts the first at random.
- */
- def locateSource(sources: Sources, frame: JavaStackFrame): Option[Fileish] =
- locateSources(sources, frame).headOption
-
- def isLocal(ste: StackTraceElement) = ste.getClassName startsWith this.getClass.getName
- def causes(x: Throwable): List[Throwable] = x.getCause match {
- case null => Nil
- case ex => x :: causes(ex)
- }
def unwrap(x: Throwable): Throwable = x match {
case _: InvocationTargetException |
_: ExceptionInInitializerError |
diff --git a/src/compiler/scala/tools/nsc/util/FlagsUtil.scala b/src/compiler/scala/tools/nsc/util/FlagsUtil.scala
deleted file mode 100644
index eb3e8ea..0000000
--- a/src/compiler/scala/tools/nsc/util/FlagsUtil.scala
+++ /dev/null
@@ -1,236 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
- * @author Paul Phillips
- */
-
-package scala.tools.nsc
-package util
-
-// Overloading invariants: these are "pseudoinvariants" because many of the
-// methods do not exist on Modifiers, only on Symbol, not to mention it is only
-// speculative that they are mutually exclusive: but is here to provide a
-// starting point for further refinement.
-//
-// 16: BYNAMEPARAM CAPTURED COVARIANT
-// x.isParameter ==> BYNAMEPARAM
-// x.isMutable ==> CAPTURED
-// x.isType ==> COVARIANT
-//
-// 17: CONTRAVARIANT INCONSTRUCTOR LABEL
-// x.isType ==> CONTRAVARIANT
-// x.isClass ==> INCONSTRUCTOR
-// x.isMethod ==> LABEL
-//
-// 25: DEFAULTPARAM TRAIT
-// x.isParameter ==> DEFAULTPARAM
-// x.isClass ==> TRAIT
-//
-// 35: EXISTENTIAL MIXEDIN
-// x.isType ==> EXISTENTIAL
-// x.isTerm ==> MIXEDIN
-//
-// 37: IMPLCLASS PRESUPER
-// x.isClass ==> IMPLCLASS
-// x.isTerm ==> PRESUPER
-
-import scala.collection.{ mutable, immutable }
-import symtab.Flags.ExplicitFlags
-
-class TransFlagManager[T <: Global](val global: T) {
- import global._
- import definitions._
-
- private var trackerStack: List[FlagTracker] = Nil
- private def trackerString = trackerStack.mkString(" ")
-
- class FlagTracker(val name: String) {
- private val mask = symtab.Flags.TRANS_FLAG
- private val seen = new mutable.HashSet[Symbol]
-
- private def debug(msg: String) = if (settings.debug.value) log(msg)
- private def trace(msg: String) = if (settings.debug.value && settings.verbose.value) log(msg)
- private def isDebug = settings.debug.value
- private def doWeOwnFlag = trackerStack.headOption exists (_ eq this)
- private def isOK = trackerStack.isEmpty || (trackerStack.head eq this)
-
- def apply(sym: Symbol) = {
- if (!isOK)
- log("Testing " + sym.name + " for " + name + " flag, but not at top of stack: " + trackerString)
-
- sym hasFlag mask
- }
- def set(sym: Symbol) = {
- if (!isOK)
- log("Tried to set " + name + " but not at top of stack: " + trackerString)
-
- seen += sym
- sym setFlag mask
- }
- def reset(sym: Symbol) = {
- if (!isOK)
- log("Tried to clear " + name + " but not at top of stack: " + trackerString)
-
- seen -= sym
- sym resetFlag mask
- }
- def clear() {
- if (!doWeOwnFlag && seen.nonEmpty)
- log("Clearing " + seen.size + " " + name + " flags even though the stack is: " + trackerString)
-
- seen foreach (_ resetFlag mask)
- seen.clear()
- }
- }
-
- def forceClear() = {
- if (trackerStack.nonEmpty) {
- log("Warning: force clearing the stack at " + phase + ": " + trackerString)
- trackerStack foreach (_.clear())
- trackerStack = Nil
- }
- }
-
- def claimTransFlag(label: String): FlagTracker = {
- if (trackerStack.isEmpty || trackerStack.head.name != label)
- trackerStack ::= new FlagTracker(label)
-
- trackerStack.head
- }
- def releaseTransFlag(label: String): Boolean = {
- trackerStack.isEmpty || {
- if (trackerStack.head.name == label) {
- trackerStack.head.clear()
- trackerStack = trackerStack.tail
- true
- }
- else {
- log("Warning: trying to release " + label + " flag but the stack is: " + trackerStack.mkString(" "))
- false
- }
- }
- }
- def holdingTransFlag[U](label: String)(f: FlagTracker => U): U = {
- try {
- val t = claimTransFlag(label)
- f(t)
- }
- finally {
- releaseTransFlag(label)
- }
- }
-}
-
-
-/** Some functions for generating comments and methods involving flags,
- * with the output determined by reflection so we can have a little more
- * assurance that documentation and debugging output match up with reality.
- *
- * For the compiler, the output can be generated with:
- * scala scala.tools.nsc.util.FlagsUtilCompiler
- */
-class FlagsUtil(flagsObject: AnyRef) {
- /** Override to tweak flag strings before output. */
- def addFlagMetadata(name: String) = name
-
- /** Runs the generative methods in this class. */
- def reflectiveAnalyzeFlags() = {
- mkFlagsTable()
- println("")
- mkFlagToStringMethod()
- }
- /** A list of the flag names found at each bit position.
- */
- def reflectiveFlagNames: List[List[String]] = {
- val pairs = flagMethods map { m =>
- m.getName -> ((m invoke flagsObject) match {
- case x: java.lang.Integer => x.intValue: Long
- case x: java.lang.Long => x.longValue
- })
- }
- (0 to 63).toList map { idx =>
- pairs collect { case (name, value) if value == (1L << idx) => name }
- }
- }
- /** Prints a comment table identifying all the flags (as seen
- * via reflection) and at what bit each is located.
- */
- def mkFlagsTable() = {
- val markedFlagNames = reflectiveFlagNames map (_ map addFlagMetadata)
-
- val widths = 0 to 2 map { column =>
- markedFlagNames collect { case xs if xs.length > column =>
- xs(column).length
- } max
- }
- val fmt = "// %2d: " + (widths map ("%" + _ + "s") mkString " ")
- def padded(xs: List[String]) = xs match {
- case Nil => List("", "", "")
- case x :: Nil => List(x, "", "")
- case x1 :: x2 :: Nil => List(x1, x2, "")
- case _ => xs take 3
- }
- println("// Generated by mkFlagsTable() at " + now + "\n//")
- // prints the grid showing which flags are at each index
- for ((names, idx) <- markedFlagNames.zipWithIndex) {
- println(fmt.format(idx :: padded(names) : _*))
- }
- }
- /** Prints an implementation of flagToString based on the reflectively
- * determined contents of the flags class.
- */
- def mkFlagToStringMethod() = {
- def key(xs: List[String], flag: Long) = xs match {
- case Nil => "%19s".format("0x" + "%x".format(flag) + "L")
- case x :: _ =>
- if (x.head.isLower) "`" + x + "`"
- else x
- }
- def value(xs: List[String], flag: Long) = "\"" + (xs match {
- case Nil => ""
- case x :: Nil if (flag & ExplicitFlags) != 0 => x.toLowerCase
- case xs => xs.map(_.toLowerCase).mkString("<", "/", ">")
- }) + "\""
- val pairs: List[(String, String)] = reflectiveFlagNames.zipWithIndex map {
- case (xs, idx) => (key(xs, 1L << idx), value(xs, 1L << idx))
- }
- val keyWidth = pairs map (_._1.length) max
- val bodyWidth = pairs map (_._2.length) max
- val fmt = " case %" + keyWidth + "s => %-" + bodyWidth + "s // (1L << %d)"
-
- println("// Generated by mkFlagToStringMethod() at " + now)
- println("@annotation.switch override def flagToString(flag: Long): String = flag match {")
- for (((key, body), idx) <- pairs.zipWithIndex) {
- print(fmt.format(key, body, idx))
- println("")
- }
- println(" case _ => \"\"")
- println("}")
- }
-
- def isFlagName(s: String) = s stripPrefix "late" stripPrefix "not" forall (x => x.isUpper || x == '_')
- def flagMethods = flagsObject.getClass.getMethods.toList filter (x => isFlagName(x.getName)) sortBy (_.getName)
- private def now = new java.util.Date toString
-}
-
-object FlagsUtil {
- import reflect.generic.ModifierFlags
-
- trait MarkModifiers extends FlagsUtil {
- lazy val isModifiersFlag = classOf[ModifierFlags].getMethods map (_.getName) filter isFlagName toSet
- override def addFlagMetadata(name: String) = {
- if (isModifiersFlag(name)) name + "/M"
- else name
- }
- }
-}
-
-/** Convenience standalone programs.
- */
-object FlagsUtilCompiler extends FlagsUtil(symtab.Flags) with FlagsUtil.MarkModifiers {
- def main(args: Array[String]): Unit = reflectiveAnalyzeFlags()
-}
-
-object FlagsUtilLibrary extends FlagsUtil(reflect.generic.Flags) with FlagsUtil.MarkModifiers {
- def main(args: Array[String]): Unit = reflectiveAnalyzeFlags()
-}
-
diff --git a/src/compiler/scala/tools/nsc/util/FreshNameCreator.scala b/src/compiler/scala/tools/nsc/util/FreshNameCreator.scala
index 998e169..5421843 100644
--- a/src/compiler/scala/tools/nsc/util/FreshNameCreator.scala
+++ b/src/compiler/scala/tools/nsc/util/FreshNameCreator.scala
@@ -1,12 +1,12 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
+ * Copyright 2005-2013 LAMP/EPFL
* @author Martin Odersky
*/
package scala.tools.nsc
package util
-import scala.collection.mutable.HashMap
+import scala.collection.mutable
trait FreshNameCreator {
/** Do not call before after type checking ends.
@@ -16,15 +16,15 @@ trait FreshNameCreator {
def newName(prefix: String): String
@deprecated("use newName(prefix)", "2.9.0")
- def newName(pos: util.Position, prefix: String): String = newName(prefix)
+ def newName(pos: scala.reflect.internal.util.Position, prefix: String): String = newName(prefix)
@deprecated("use newName()", "2.9.0")
- def newName(pos: util.Position): String = newName()
+ def newName(pos: scala.reflect.internal.util.Position): String = newName()
}
object FreshNameCreator {
class Default extends FreshNameCreator {
protected var counter = 0
- protected val counters = new HashMap[String, Int] withDefaultValue 0
+ protected val counters = mutable.HashMap[String, Int]() withDefaultValue 0
/**
* Create a fresh name with the given prefix. It is guaranteed
diff --git a/src/compiler/scala/tools/nsc/util/HashSet.scala b/src/compiler/scala/tools/nsc/util/HashSet.scala
deleted file mode 100644
index a173216..0000000
--- a/src/compiler/scala/tools/nsc/util/HashSet.scala
+++ /dev/null
@@ -1,107 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
- * @author Martin Odersky
- */
-
-package scala.tools.nsc
-package util
-
-object HashSet {
- def apply[T >: Null <: AnyRef](): HashSet[T] = this(16)
- def apply[T >: Null <: AnyRef](label: String): HashSet[T] = this(label, 16)
- def apply[T >: Null <: AnyRef](initialCapacity: Int): HashSet[T] = this("No Label", initialCapacity)
- def apply[T >: Null <: AnyRef](label: String, initialCapacity: Int): HashSet[T] =
- new HashSet[T](label, initialCapacity)
-}
-
-class HashSet[T >: Null <: AnyRef](val label: String, initialCapacity: Int) extends Set[T] {
- private var used = 0
- private var table = new Array[AnyRef](initialCapacity)
- private def index(x: Int): Int = math.abs(x % table.length)
-
- def size: Int = used
- def clear() {
- used = 0
- table = new Array[AnyRef](initialCapacity)
- }
-
- def findEntryOrUpdate(x: T): T = {
- var h = index(x.##)
- var entry = table(h)
- while (entry ne null) {
- if (x == entry)
- return entry.asInstanceOf[T]
-
- h = index(h + 1)
- entry = table(h)
- }
- table(h) = x
- used += 1
- if (used > (table.length >> 2)) growTable()
- x
- }
-
- def findEntry(x: T): T = {
- var h = index(x.##)
- var entry = table(h)
- while ((entry ne null) && x != entry) {
- h = index(h + 1)
- entry = table(h)
- }
- entry.asInstanceOf[T]
- }
-
- def addEntry(x: T) {
- var h = index(x.##)
- var entry = table(h)
- while (entry ne null) {
- if (x == entry) return
- h = index(h + 1)
- entry = table(h)
- }
- table(h) = x
- used += 1
- if (used > (table.length >> 2)) growTable()
- }
- def addEntries(xs: TraversableOnce[T]) {
- xs foreach addEntry
- }
-
- def iterator = new Iterator[T] {
- private var i = 0
- def hasNext: Boolean = {
- while (i < table.length && (table(i) eq null)) i += 1
- i < table.length
- }
- def next: T =
- if (hasNext) { i += 1; table(i - 1).asInstanceOf[T] }
- else null
- }
-
- private def addOldEntry(x: T) {
- var h = index(x.##)
- var entry = table(h)
- while (entry ne null) {
- h = index(h + 1)
- entry = table(h)
- }
- table(h) = x
- }
-
- private def growTable() {
- val oldtable = table
- val growthFactor =
- if (table.length <= initialCapacity) 8
- else if (table.length <= (initialCapacity * 8)) 4
- else 2
-
- table = new Array[AnyRef](table.length * growthFactor)
- var i = 0
- while (i < oldtable.length) {
- val entry = oldtable(i)
- if (entry ne null) addOldEntry(entry.asInstanceOf[T])
- i += 1
- }
- }
- override def toString() = "HashSet %s(%d / %d)".format(label, used, table.length)
-}
diff --git a/src/compiler/scala/tools/nsc/util/InterruptReq.scala b/src/compiler/scala/tools/nsc/util/InterruptReq.scala
index 2857823..b1b81d0 100644
--- a/src/compiler/scala/tools/nsc/util/InterruptReq.scala
+++ b/src/compiler/scala/tools/nsc/util/InterruptReq.scala
@@ -2,6 +2,7 @@ package scala.tools.nsc
package util
/** A class of work items to be used in interrupt requests.
+ * Todo: we should replace the Eithers by Futures or Try's.
*/
abstract class InterruptReq {
/** The result type of the operation
@@ -11,17 +12,24 @@ abstract class InterruptReq {
/** The operation to be performed */
protected val todo: () => R
+ type Continuation = Either[R, Throwable] => Unit
+
/** The result provided */
private var result: Option[Either[R, Throwable]] = None
+ /** The continuations waiting asynchronously on a provided result */
+ private var waiting: List[Continuation] = Nil
+
/** To be called from interrupted server to execute demanded task */
def execute(): Unit = synchronized {
try {
result = Some(Left(todo()))
} catch {
- case t => result = Some(Right(t))
+ case t: Throwable => result = Some(Right(t))
+ } finally {
+ notify()
+ for (k <- waiting.reverse) k(result.get)
}
- notify()
}
/** To be called from interrupting client to get result for interrupt */
@@ -37,6 +45,13 @@ abstract class InterruptReq {
case Right(t) => throw new FailedInterrupt(t)
}
}
+
+ def onComplete(k: Continuation) = synchronized {
+ if (result.isDefined)
+ k(result.get)
+ else
+ waiting = k :: waiting
+ }
}
class FailedInterrupt(cause: Throwable) extends Exception("Compiler exception during call to 'ask'", cause)
diff --git a/src/compiler/scala/tools/nsc/util/JavaCharArrayReader.scala b/src/compiler/scala/tools/nsc/util/JavaCharArrayReader.scala
index f27d25d..b7ed790 100644
--- a/src/compiler/scala/tools/nsc/util/JavaCharArrayReader.scala
+++ b/src/compiler/scala/tools/nsc/util/JavaCharArrayReader.scala
@@ -1,12 +1,12 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
+ * Copyright 2005-2013 LAMP/EPFL
* @author Martin Odersky
*/
package scala.tools.nsc
package util
-import Chars._
+import scala.reflect.internal.Chars._
class JavaCharArrayReader(buf: IndexedSeq[Char], start: Int, /* startline: int, startcol: int, */
decodeUni: Boolean, error: String => Unit) extends Iterator[Char] with Cloneable {
@@ -58,7 +58,7 @@ class JavaCharArrayReader(buf: IndexedSeq[Char], start: Int, /* startline: int,
def last: Char = if (bp > start + 2) buf(bp - 2) else ' ' // XML literals
- def next: Char = {
+ def next(): Char = {
//cline = nextline
//ccol = nextcol
val buf = this.buf.asInstanceOf[collection.mutable.WrappedArray[Char]].array
diff --git a/src/compiler/scala/tools/nsc/util/JavaStackFrame.scala b/src/compiler/scala/tools/nsc/util/JavaStackFrame.scala
deleted file mode 100644
index d25698e..0000000
--- a/src/compiler/scala/tools/nsc/util/JavaStackFrame.scala
+++ /dev/null
@@ -1,71 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
- * @author Paul Phillips
- */
-
-package scala.tools.nsc
-package util
-
-import io.{ Fileish, Sources }
-import Exceptional._
-
-class FrameContext(frame: JavaStackFrame, codeSources: Sources) {
- val sourceFile = locateSource(codeSources, frame)
- import frame._
-
- def windowWidth = 3
- def windowSize = windowWidth * 2 + 1
-
- lazy val context = sourceFile collect {
- case f if line > 0 =>
- val start = math.max(0, line - windowWidth)
- f.lines().toList.slice(start, start + windowSize)
- } getOrElse Nil
-
- protected def fallbackContext = "%s (%s:%s)".format(tag, fileName, line)
-
- private def linestr(index: Int) = {
- val current = line - windowWidth + index
- val marker = if (current == line) "*" else " "
- marker + current
- }
- private def contextLines = context.zipWithIndex map {
- case (l, i) => linestr(i) + ": " + l + "\n"
- }
- override def toString =
- if (context.isEmpty) fallbackContext
- else contextLines.mkString(tag + "\n", "", "")
-}
-
-object FrameContext {
- def apply(elem: StackTraceElement): FrameContext = apply(new JavaStackFrame(elem))
- def apply(frame: JavaStackFrame): FrameContext = new FrameContext(frame, Sources())
-}
-
-class JavaStackFrame(val elem: StackTraceElement) {
- def className: String = elem.getClassName()
- def methodName: String = elem.getMethodName()
- def fileName: String = elem.getFileName()
- def line: Int = elem.getLineNumber()
-
- private def segs = className takeWhile (ch => ch != '$' && ch != '(') split '.' toList ;
- lazy val pkgName = segs.init mkString "."
- lazy val shortName = segs.last
- lazy val shortestName = if (fileName startsWith (shortName + ".")) "<--" else shortName
-
- private def standardString(which: String) =
- "%s.%s(%s:%s)".format(which, methodName, fileName, line)
-
- def locationString = fileName + ":" + line
- def javaString = standardString(className)
- def shortNameString = standardString(shortName)
- def tag = "[%s.%s]".format(shortName, methodName)
-
- override def toString = shortNameString
-}
-
-object JavaStackFrame {
- def apply(elem: StackTraceElement) = new JavaStackFrame(elem)
- def frames(xs: Array[StackTraceElement]): Array[JavaStackFrame] = xs map (x => new JavaStackFrame(x))
- def frames(t: Throwable): Array[JavaStackFrame] = frames(Exceptional.unwrap(t).getStackTrace)
-}
diff --git a/src/compiler/scala/tools/nsc/util/MsilClassPath.scala b/src/compiler/scala/tools/nsc/util/MsilClassPath.scala
index 13fb318..77a19d3 100644
--- a/src/compiler/scala/tools/nsc/util/MsilClassPath.scala
+++ b/src/compiler/scala/tools/nsc/util/MsilClassPath.scala
@@ -1,5 +1,5 @@
/* NSC -- new Scala compiler
- * Copyright 2006-2011 LAMP/EPFL
+ * Copyright 2006-2013 LAMP/EPFL
* @author Martin Odersky
*/
@@ -12,12 +12,11 @@ import java.io.File
import java.net.URL
import java.util.StringTokenizer
import scala.util.Sorting
-
-import scala.collection.mutable.{ ListBuffer, HashSet => MutHashSet }
-import scala.tools.nsc.io.AbstractFile
-
+import scala.collection.mutable
+import scala.tools.nsc.io.{ AbstractFile, MsilFile }
import ch.epfl.lamp.compiler.msil.{ Type => MSILType, Assembly }
-import ClassPath.{ ClassPathContext, isTraitImplementation }
+import ClassPath.ClassPathContext
+import scala.reflect.runtime.ReflectionUtils.isTraitImplementation
/** Keeping the MSIL classpath code in its own file is important to make sure
* we don't accidentally introduce a dependency on msil.jar in the jvm.
@@ -47,15 +46,15 @@ object MsilClassPath {
new MsilClassPath(assemextdirs.value, assemrefs.value, sourcepath.value, context)
}
- class MsilContext extends ClassPathContext[MSILType] {
- def toBinaryName(rep: MSILType) = rep.Name
+ class MsilContext extends ClassPathContext[MsilFile] {
+ def toBinaryName(rep: MsilFile) = rep.msilType.Name
def newClassPath(assemFile: AbstractFile) = new AssemblyClassPath(MsilClassPath collectTypes assemFile, "", this)
}
- private def assembleEntries(ext: String, user: String, source: String, context: MsilContext): List[ClassPath[MSILType]] = {
+ private def assembleEntries(ext: String, user: String, source: String, context: MsilContext): List[ClassPath[MsilFile]] = {
import ClassPath._
- val etr = new ListBuffer[ClassPath[MSILType]]
- val names = new MutHashSet[String]
+ val etr = new mutable.ListBuffer[ClassPath[MsilFile]]
+ val names = new mutable.HashSet[String]
// 1. Assemblies from -Xassem-extdirs
for (dirName <- expandPath(ext, expandStar = false)) {
@@ -94,7 +93,7 @@ object MsilClassPath {
// 3. Source path
for (dirName <- expandPath(source, expandStar = false)) {
val file = AbstractFile.getDirectory(dirName)
- if (file ne null) etr += new SourcePath[MSILType](file, context)
+ if (file ne null) etr += new SourcePath[MsilFile](file, context)
}
etr.toList
@@ -105,7 +104,7 @@ import MsilClassPath._
/**
* A assembly file (dll / exe) containing classes and namespaces
*/
-class AssemblyClassPath(types: Array[MSILType], namespace: String, val context: MsilContext) extends ClassPath[MSILType] {
+class AssemblyClassPath(types: Array[MSILType], namespace: String, val context: MsilContext) extends ClassPath[MsilFile] {
def name = {
val i = namespace.lastIndexOf('.')
if (i < 0) namespace
@@ -127,19 +126,19 @@ class AssemblyClassPath(types: Array[MSILType], namespace: String, val context:
}
lazy val classes = {
- val cls = new ListBuffer[ClassRep]
+ val cls = new mutable.ListBuffer[ClassRep]
var i = first
while (i < types.length && types(i).Namespace.startsWith(namespace)) {
// CLRTypes used to exclude java.lang.Object and java.lang.String (no idea why..)
if (types(i).Namespace == namespace)
- cls += ClassRep(Some(types(i)), None)
+ cls += ClassRep(Some(new MsilFile(types(i))), None)
i += 1
}
cls.toIndexedSeq
}
lazy val packages = {
- val nsSet = new MutHashSet[String]
+ val nsSet = new mutable.HashSet[String]
var i = first
while (i < types.length && types(i).Namespace.startsWith(namespace)) {
val subns = types(i).Namespace
@@ -168,4 +167,4 @@ class AssemblyClassPath(types: Array[MSILType], namespace: String, val context:
* MSILType values.
*/
class MsilClassPath(ext: String, user: String, source: String, context: MsilContext)
-extends MergedClassPath[MSILType](MsilClassPath.assembleEntries(ext, user, source, context), context) { }
\ No newline at end of file
+extends MergedClassPath[MsilFile](MsilClassPath.assembleEntries(ext, user, source, context), context) { }
\ No newline at end of file
diff --git a/src/compiler/scala/tools/nsc/util/MultiHashMap.scala b/src/compiler/scala/tools/nsc/util/MultiHashMap.scala
index 719d18c..67987c6 100644
--- a/src/compiler/scala/tools/nsc/util/MultiHashMap.scala
+++ b/src/compiler/scala/tools/nsc/util/MultiHashMap.scala
@@ -1,10 +1,9 @@
package scala.tools.nsc.util
-import collection.mutable.HashMap
-import collection.immutable
+import scala.collection.{ mutable, immutable }
/** A hashmap with set-valued values, and an empty set as default value
*/
-class MultiHashMap[K, V] extends HashMap[K, immutable.Set[V]] {
+class MultiHashMap[K, V] extends mutable.HashMap[K, immutable.Set[V]] {
override def default(key: K): immutable.Set[V] = Set()
}
diff --git a/src/compiler/scala/tools/nsc/util/Origins.scala b/src/compiler/scala/tools/nsc/util/Origins.scala
deleted file mode 100644
index 847a9b6..0000000
--- a/src/compiler/scala/tools/nsc/util/Origins.scala
+++ /dev/null
@@ -1,105 +0,0 @@
-/* NSC -- new scala compiler
- * Copyright 2005-2011 LAMP/EPFL
- * @author Paul Phillips
- */
-
-package scala.tools.nsc
-package util
-
-/** A debugging class for logging from whence a method is being called.
- * Say you wanted to discover who was calling phase_= in SymbolTable.
- * You could do this:
- *
- * {{{
- * private lazy val origins = Origins[SymbolTable]("phase_=")
- * // Commented out original enclosed for contrast
- * // final def phase_=(p: Phase): Unit = {
- * final def phase_=(p: Phase): Unit = origins {
- * }}}
- *
- * And that's it. When the JVM exits it would issue a report something like this:
- {{{
- >> Origins scala.tools.nsc.symtab.SymbolTable.phase_= logged 145585 calls from 51 distinguished sources.
-
- 71114 scala.tools.nsc.symtab.Symbols$Symbol.unsafeTypeParams(Symbols.scala:862)
- 16584 scala.tools.nsc.symtab.Symbols$Symbol.rawInfo(Symbols.scala:757)
- 15411 scala.tools.nsc.symtab.Symbols$Symbol.unsafeTypeParams(Symbols.scala:869)
- 11507 scala.tools.nsc.symtab.Symbols$Symbol.rawInfo(Symbols.scala:770)
- 10285 scala.tools.nsc.symtab.Symbols$Symbol.unsafeTypeParams(Symbols.scala:864)
- 6860 scala.tools.nsc.transform.SpecializeTypes.specializedTypeVars(SpecializeTypes.scala:304)
- ...
- }}}
- *
- */
-
-import scala.collection.{ mutable, immutable }
-import Origins._
-
-abstract class Origins {
- type Rep
- def newRep(xs: StackSlice): Rep
- def repString(rep: Rep): String
- def originClass: String
-
- private var _tag: String = null
- def tag: String = _tag
- def setTag(tag: String): this.type = {
- _tag = tag
- this
- }
-
- private val origins = new mutable.HashMap[Rep, Int] withDefaultValue 0
- private def add(xs: Rep) = origins(xs) += 1
- private def total = origins.values.foldLeft(0L)(_ + _)
-
- // We find the right line by dropping any from around here and any
- // from the method's origin class.
- private def dropStackElement(cn: String) =
- (cn startsWith OriginsName) || (cn startsWith originClass)
-
- // Create a stack and whittle it down to the interesting part.
- private def readStack(): Array[StackTraceElement] =
- (new Throwable).getStackTrace dropWhile (el => dropStackElement(el.getClassName))
-
- def apply[T](body: => T): T = {
- add(newRep(readStack()))
- body
- }
- def clear() = origins.clear()
- def show() = {
- println("\n>> Origins %s.%s logged %s calls from %s distinguished sources.\n".format(originClass, tag, total, origins.keys.size))
- origins.toList sortBy (-_._2) foreach {
- case (k, v) => println("%7s %s".format(v, repString(k)))
- }
- }
- def purge() = {
- show()
- clear()
- }
-}
-
-object Origins {
- private type StackSlice = Array[StackTraceElement]
- private val OriginsName = classOf[Origins].getName
- private val counters = new mutable.HashSet[Origins]
-
- {
- // Console.println("\nOrigins loaded: registering shutdown hook to display results.")
- sys.addShutdownHook(counters foreach (_.purge()))
- }
-
- def apply[T: Manifest](tag: String): Origins = apply(tag, manifest[T].erasure)
- def apply(tag: String, clazz: Class[_]): Origins = apply(tag, new OneLine(clazz))
- def apply(tag: String, orElse: => Origins): Origins = {
- counters find (_.tag == tag) getOrElse {
- returning(orElse setTag tag)(counters += _)
- }
- }
-
- class OneLine(clazz: Class[_]) extends Origins {
- type Rep = StackTraceElement
- val originClass = clazz.getName stripSuffix "$"
- def newRep(xs: StackSlice): Rep = xs(0)
- def repString(rep: Rep) = " " + rep
- }
-}
diff --git a/src/compiler/scala/tools/nsc/util/Position.scala b/src/compiler/scala/tools/nsc/util/Position.scala
deleted file mode 100644
index 49e9c12..0000000
--- a/src/compiler/scala/tools/nsc/util/Position.scala
+++ /dev/null
@@ -1,285 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
- * @author Martin Odersky
- *
- */
-
-package scala.tools.nsc
-package util
-
-object Position {
- val tabInc = 8
-}
-/** The Position class and its subclasses represent positions of ASTs and symbols.
- * Except for NoPosition and FakePos, every position refers to a SourceFile
- * and to an offset in the sourcefile (its `point'). For batch compilation,
- * that's all. For interactive IDE's there are also RangePositions
- * and TransparentPositions. A RangePosition indicates a start and an end
- * in addition to its point. TransparentPositions are a subclass of RangePositions.
- * Range positions that are not transparent are called opaque.
- * Trees with RangePositions need to satisfy the following invariants.
- *
- * INV1: A tree with an offset position never contains a child
- * with a range position
- * INV2: If the child of a tree with a range position also has a range position,
- * then the child's range is contained in the parent's range.
- * INV3: Opaque range positions of children of the same node are non-overlapping
- * (this means their overlap is at most a single point).
- *
- * The following tests are useful on positions:
- *
- * pos.isDefined true if position is not a NoPosition nor a FakePosition
- * pos.isRange true if position is a range
- * pos.isOpaqueRange true if position is an opaque range
- *
- * The following accessor methods are provided:
- *
- * pos.source The source file of the position, which must be defined
- * pos.point The offset of the position's point, which must be defined
- * pos.start The start of the position, which must be a range
- * pos.end The end of the position, which must be a range
- *
- * There are also convenience methods, such as
- *
- * pos.startOrPoint
- * pos.endOrPoint
- * pos.pointOrElse(default)
- *
- * These are less strict about the kind of position on which they can be applied.
- *
- * The following conversion methods are often used:
- *
- * pos.focus converts a range position to an offset position, keeping its point;
- * returns all other positions unchanged.
- * pos.makeTransparent converts an opaque range position into a transparent one.
- * returns all other positions unchanged.
- */
-trait Position {
-
- /** An optional value containing the source file referred to by this position, or
- * None if not defined.
- */
- def source: SourceFile = throw new UnsupportedOperationException("Position.source")
-
- /** Is this position neither a NoPosition nor a FakePosition?
- * If isDefined is true, offset and source are both defined.
- */
- def isDefined: Boolean = false
-
- /** Is this position a transparent position? */
- def isTransparent: Boolean = false
-
- /** Is this position a range position? */
- def isRange: Boolean = false
-
- /** Is this position a non-transparent range position? */
- def isOpaqueRange: Boolean = false
-
- /** if opaque range, make this position transparent */
- def makeTransparent: Position = this
-
- /** The start of the position's range, error if not a range position */
- def start: Int = throw new UnsupportedOperationException("Position.start")
-
- /** The start of the position's range, or point if not a range position */
- def startOrPoint: Int = point
-
- /** The point (where the ^ is) of the position */
- def point: Int = throw new UnsupportedOperationException("Position.point")
-
- /** The point (where the ^ is) of the position, or else `default' if undefined */
- def pointOrElse(default: Int): Int = default
-
- /** The end of the position's range, error if not a range position */
- def end: Int = throw new UnsupportedOperationException("Position.end")
-
- /** The end of the position's range, or point if not a range position */
- def endOrPoint: Int = point
-
- @deprecated("use point instead", "2.9.0")
- def offset: Option[Int] = if (isDefined) Some(point) else None
-
- /** The same position with a different start value (if a range) */
- def withStart(off: Int) = this
-
- /** The same position with a different end value (if a range) */
- def withEnd(off: Int) = this
-
- /** The same position with a different point value (if a range or offset) */
- def withPoint(off: Int) = this
-
- /** The same position with a different source value, and its values shifted by given offset */
- def withSource(source: SourceFile, shift: Int) = this
-
- /** If this is a range, the union with the other range, with the point of this position.
- * Otherwise, this position
- */
- def union(pos: Position) = this
-
- /** If this is a range position, the offset position of its start.
- * Otherwise the position itself
- */
- def focusStart = this
-
- /** If this is a range position, the offset position of its point.
- * Otherwise the position itself
- */
- def focus = this
-
- /** If this is a range position, the offset position of its end.
- * Otherwise the position itself
- */
- def focusEnd = this
-
- /** Does this position include the given position `pos`.
- * This holds if `this` is a range position and its range [start..end]
- * is the same or covers the range of the given position, which may or may not be a range position.
- */
- def includes(pos: Position) = false
-
- /** Does this position properly include the given position `pos` ("properly" meaning their
- * ranges are not the same)?
- */
- def properlyIncludes(pos: Position) =
- includes(pos) && (start < pos.startOrPoint || pos.endOrPoint < end)
-
- /** Does this position precede that position?
- * This holds if both positions are defined and the end point of this position
- * is not larger than the start point of the given position.
- */
- def precedes(pos: Position) =
- isDefined && pos.isDefined && endOrPoint <= pos.startOrPoint
-
- /** Does this position properly precede the given position `pos` ("properly" meaning their ranges
- * do not share a common point).
- */
- def properlyPrecedes(pos: Position) =
- isDefined && pos.isDefined && endOrPoint < pos.startOrPoint
-
- /** Does this position overlap with that position?
- * This holds if both positions are ranges and there is an interval of
- * non-zero length that is shared by both position ranges.
- */
- def overlaps(pos: Position) =
- isRange && pos.isRange &&
- ((pos.start < end && start < pos.end) || (start < pos.end && pos.start < end))
-
- /** Does this position cover the same range as that position?
- * Holds only if both position are ranges
- */
- def sameRange(pos: Position) =
- isRange && pos.isRange && start == pos.start && end == pos.end
-
- def line: Int = throw new UnsupportedOperationException("Position.line")
-
- def column: Int = throw new UnsupportedOperationException("Position.column")
-
- /** Convert this to a position around `point` that spans a single source line */
- def toSingleLine: Position = this
-
- def lineContent: String =
- if (isDefined) source.lineToString(line - 1)
- else "NO_LINE"
-
- /** Map this position to a position in an original source
- * file. If the SourceFile is a normal SourceFile, simply
- * return this.
- */
- def inUltimateSource(source : SourceFile) =
- if (source == null) this else source.positionInUltimateSource(this)
-
- def dbgString = toString
-
- def show: String = "["+toString+"]"
-}
-
-case object NoPosition extends Position {
- override def dbgString = toString
-}
-
-case class FakePos(msg: String) extends Position {
- override def toString = msg
-}
-
-class OffsetPosition(override val source: SourceFile, override val point: Int) extends Position {
- override def isDefined = true
- override def pointOrElse(default: Int): Int = point
- override def withPoint(off: Int) = new OffsetPosition(source, off)
- override def withSource(source: SourceFile, shift: Int) = new OffsetPosition(source, point + shift)
-
- override def line: Int = source.offsetToLine(point) + 1
-
- override def column: Int = {
- var idx = source.lineToOffset(source.offsetToLine(point))
- var col = 0
- while (idx != point) {
- col += (if (source.content(idx) == '\t') Position.tabInc - col % Position.tabInc else 1)
- idx += 1
- }
- col + 1
- }
-
- override def union(pos: Position) =
- if (pos.isRange) pos else this
-
- override def equals(that : Any) = that match {
- case that : OffsetPosition => point == that.point && source.file == that.source.file
- case that => false
- }
- override def hashCode = point * 37 + source.file.hashCode
-
- override def toString = {
- val pointmsg = if (point > source.length) "out-of-bounds-" else "offset="
- "source-%s,line-%s,%s%s".format(source.path, line, pointmsg, point)
- }
- override def show = "["+point+"]"
-}
-
-/** new for position ranges */
-class RangePosition(source: SourceFile, override val start: Int, point: Int, override val end: Int)
-extends OffsetPosition(source, point) {
- if (start > end) assert(false, "bad position: "+show)
- override def isRange: Boolean = true
- override def isOpaqueRange: Boolean = true
- override def startOrPoint: Int = start
- override def endOrPoint: Int = end
- override def withStart(off: Int) = new RangePosition(source, off, point, end)
- override def withEnd(off: Int) = new RangePosition(source, start, point, off)
- override def withPoint(off: Int) = new RangePosition(source, start, off, end)
- override def withSource(source: SourceFile, shift: Int) = new RangePosition(source, start + shift, point + shift, end + shift)
- override def focusStart = new OffsetPosition(source, start)
- override def focus = {
- if (focusCache eq NoPosition) focusCache = new OffsetPosition(source, point)
- focusCache
- }
- override def focusEnd = new OffsetPosition(source, end)
- override def makeTransparent = new TransparentPosition(source, start, point, end)
- override def includes(pos: Position) = pos.isDefined && start <= pos.startOrPoint && pos.endOrPoint <= end
- override def union(pos: Position) =
- if (pos.isRange) new RangePosition(source, start min pos.start, point, end max pos.end) else this
-
- override def toSingleLine: Position = source match {
- case bs: BatchSourceFile
- if end > 0 && bs.offsetToLine(start) < bs.offsetToLine(end - 1) =>
- val pointLine = bs.offsetToLine(point)
- new RangePosition(source, bs.lineToOffset(pointLine), point, bs.lineToOffset(pointLine + 1))
- case _ => this
- }
-
- override def toString = "RangePosition("+source+", "+start+", "+point+", "+end+")"
- override def show = "["+start+":"+end+"]"
- private var focusCache: Position = NoPosition
-}
-
-class TransparentPosition(source: SourceFile, start: Int, point: Int, end: Int) extends RangePosition(source, start, point, end) {
- override def isOpaqueRange: Boolean = false
- override def isTransparent = true
- override def makeTransparent = this
- override def show = "<"+start+":"+end+">"
-}
-
-
-
-
-
-
diff --git a/src/compiler/scala/tools/nsc/util/RegexCache.scala b/src/compiler/scala/tools/nsc/util/RegexCache.scala
deleted file mode 100644
index 896c1a5..0000000
--- a/src/compiler/scala/tools/nsc/util/RegexCache.scala
+++ /dev/null
@@ -1,40 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
- * @author Lex Spoon
- */
-
-package scala.tools.nsc
-package util
-import java.util.regex.Pattern
-import scala.collection.mutable
-
-object RegexCache {
- /** Maps patterns to compiled regexes */
- private val regexMap = mutable.Map.empty[String, Pattern]
-
- /** Lists the regexes that have been recorded in order */
- private val regexList = new mutable.Queue[String]
-
- private val regexesToCache = 1000
-
- /** Compile a regex and add it to the cache */
- private def compileAndAdd(regex: String): Pattern = {
- val pattern = Pattern.compile(regex)
-
- regexMap += (regex -> pattern)
- regexList += regex
-
- if (regexMap.size > regexesToCache)
- regexMap -= regexList.dequeue()
-
- pattern
- }
-
-
- /** Compile a regex, caching */
- def apply(regex: String): Pattern =
- regexMap.get(regex) match {
- case Some(pattern) => pattern
- case None => compileAndAdd(regex)
- }
-}
diff --git a/src/compiler/scala/tools/nsc/util/ScalaClassLoader.scala b/src/compiler/scala/tools/nsc/util/ScalaClassLoader.scala
index d6d19ee..1f6fa68 100644
--- a/src/compiler/scala/tools/nsc/util/ScalaClassLoader.scala
+++ b/src/compiler/scala/tools/nsc/util/ScalaClassLoader.scala
@@ -1,31 +1,37 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
+ * Copyright 2005-2013 LAMP/EPFL
* @author Paul Phillips
*/
package scala.tools.nsc
package util
-import java.lang.{ ClassLoader => JavaClassLoader }
+import java.lang.{ ClassLoader => JClassLoader }
import java.lang.reflect.{ Constructor, Modifier, Method }
+import java.io.{ File => JFile }
+import java.net.{ URLClassLoader => JURLClassLoader }
import java.net.URL
+import scala.reflect.runtime.ReflectionUtils.unwrapHandler
import ScalaClassLoader._
import scala.util.control.Exception.{ catching }
+import scala.language.implicitConversions
+import scala.reflect.{ ClassTag, classTag }
-trait ScalaClassLoader extends JavaClassLoader {
- /** Override to see classloader activity traced */
- protected def trace: Boolean = false
+trait HasClassPath {
+ def classPathURLs: Seq[URL]
+}
+/** A wrapper around java.lang.ClassLoader to lower the annoyance
+ * of java reflection.
+ */
+trait ScalaClassLoader extends JClassLoader {
/** Executing an action with this classloader as context classloader */
def asContext[T](action: => T): T = {
- val oldLoader = getContextLoader
- try {
- setContextLoader(this)
- action
- }
- finally setContextLoader(oldLoader)
+ val saved = contextLoader
+ try { setContext(this) ; action }
+ finally setContext(saved)
}
- def setAsContext() { setContextLoader(this) }
+ def setAsContext() { setContext(this) }
/** Load and link a class with this classloader */
def tryToLoadClass[T <: AnyRef](path: String): Option[Class[T]] = tryClass(path, false)
@@ -40,20 +46,8 @@ trait ScalaClassLoader extends JavaClassLoader {
def create(path: String): AnyRef =
tryToInitializeClass[AnyRef](path) map (_.newInstance()) orNull
- override def findClass(name: String) = {
- val result = super.findClass(name)
- if (trace) println("findClass(%s) = %s".format(name, result))
- result
- }
-
- override def loadClass(name: String, resolve: Boolean) = {
- val result = super.loadClass(name, resolve)
- if (trace) println("loadClass(%s, %s) = %s".format(name, resolve, result))
- result
- }
-
- def constructorsOf[T <: AnyRef : Manifest]: List[Constructor[T]] =
- manifest[T].erasure.getConstructors.toList map (_.asInstanceOf[Constructor[T]])
+ def constructorsOf[T <: AnyRef : ClassTag]: List[Constructor[T]] =
+ classTag[T].runtimeClass.getConstructors.toList map (_.asInstanceOf[Constructor[T]])
/** The actual bytes for a class file, or an empty array if it can't be found. */
def classBytes(className: String): Array[Byte] = classAsStream(className) match {
@@ -70,71 +64,105 @@ trait ScalaClassLoader extends JavaClassLoader {
val clsToRun = tryToInitializeClass(objectName) getOrElse (
throw new ClassNotFoundException(objectName)
)
-
val method = clsToRun.getMethod("main", classOf[Array[String]])
if (!Modifier.isStatic(method.getModifiers))
throw new NoSuchMethodException(objectName + ".main is not static")
- asContext(method.invoke(null, Array(arguments.toArray: AnyRef): _*)) // !!! : AnyRef shouldn't be necessary
+ try asContext(method.invoke(null, Array(arguments.toArray: AnyRef): _*)) // !!! : AnyRef shouldn't be necessary
+ catch unwrapHandler({ case ex => throw ex })
}
+
+ /** A list comprised of this classloader followed by all its
+ * (non-null) parent classloaders, if any.
+ */
+ def loaderChain: List[ScalaClassLoader] = this :: (getParent match {
+ case null => Nil
+ case p => p.loaderChain
+ })
}
+/** Methods for obtaining various classloaders.
+ * appLoader: the application classloader. (Also called the java system classloader.)
+ * extLoader: the extension classloader.
+ * bootLoader: the boot classloader.
+ * contextLoader: the context classloader.
+ */
object ScalaClassLoader {
- implicit def apply(cl: JavaClassLoader): ScalaClassLoader = {
- val loader = if (cl == null) JavaClassLoader.getSystemClassLoader() else cl
- new JavaClassLoader(loader) with ScalaClassLoader
+ /** Returns loaders which are already ScalaClassLoaders unaltered,
+ * and translates java.net.URLClassLoaders into scala URLClassLoaders.
+ * Otherwise creates a new wrapper.
+ */
+ implicit def apply(cl: JClassLoader): ScalaClassLoader = cl match {
+ case cl: ScalaClassLoader => cl
+ case cl: JURLClassLoader => new URLClassLoader(cl.getURLs.toSeq, cl.getParent)
+ case _ => new JClassLoader(cl) with ScalaClassLoader
+ }
+ def contextLoader = apply(Thread.currentThread.getContextClassLoader)
+ def appLoader = apply(JClassLoader.getSystemClassLoader)
+ def extLoader = apply(appLoader.getParent)
+ def bootLoader = apply(null)
+ def contextChain = loaderChain(contextLoader)
+
+ def pathToErasure[T: ClassTag] = pathToClass(classTag[T].runtimeClass)
+ def pathToClass(clazz: Class[_]) = clazz.getName.replace('.', JFile.separatorChar) + ".class"
+ def locate[T: ClassTag] = contextLoader getResource pathToErasure[T]
+
+ /** Tries to guess the classpath by type matching the context classloader
+ * and its parents, looking for any classloaders which will reveal their
+ * classpath elements as urls. It it can't find any, creates a classpath
+ * from the supplied string.
+ */
+ def guessClassPathString(default: String = ""): String = {
+ val classpathURLs = contextChain flatMap {
+ case x: HasClassPath => x.classPathURLs
+ case x: JURLClassLoader => x.getURLs.toSeq
+ case _ => Nil
+ }
+ if (classpathURLs.isEmpty) default
+ else JavaClassPath.fromURLs(classpathURLs).asClasspathString
+ }
+
+ def loaderChain(head: JClassLoader) = {
+ def loop(cl: JClassLoader): List[JClassLoader] =
+ if (cl == null) Nil else cl :: loop(cl.getParent)
+
+ loop(head)
+ }
+ def setContext(cl: JClassLoader) =
+ Thread.currentThread.setContextClassLoader(cl)
+ def savingContextLoader[T](body: => T): T = {
+ val saved = contextLoader
+ try body
+ finally setContext(saved)
}
- class URLClassLoader(urls: Seq[URL], parent: JavaClassLoader)
- extends java.net.URLClassLoader(urls.toArray, parent)
- with ScalaClassLoader {
+ class URLClassLoader(urls: Seq[URL], parent: JClassLoader)
+ extends JURLClassLoader(urls.toArray, parent)
+ with ScalaClassLoader
+ with HasClassPath {
- private var classloaderURLs = urls.toList
+ private var classloaderURLs: Seq[URL] = urls
private def classpathString = ClassPath.fromURLs(urls: _*)
+ def classPathURLs: Seq[URL] = classloaderURLs
+ def classPath: ClassPath[_] = JavaClassPath fromURLs classPathURLs
/** Override to widen to public */
override def addURL(url: URL) = {
- classloaderURLs +:= url
+ classloaderURLs :+= url
super.addURL(url)
}
- override def run(objectName: String, arguments: Seq[String]) {
- try super.run(objectName, arguments)
- catch { case x: ClassNotFoundException =>
- throw new ClassNotFoundException(objectName +
- " (args = %s, classpath = %s)".format(arguments mkString ", ", classpathString))
- }
- }
- override def toString = urls.mkString("URLClassLoader(\n ", "\n ", "\n)\n")
+ def toLongString = urls.mkString("URLClassLoader(\n ", "\n ", "\n)\n")
}
- def setContextLoader(cl: JavaClassLoader) = Thread.currentThread.setContextClassLoader(cl)
- def getContextLoader() = Thread.currentThread.getContextClassLoader()
- def getSystemLoader(): ScalaClassLoader = ScalaClassLoader(null)
- def defaultParentClassLoader() = findExtClassLoader()
-
- def fromURLs(urls: Seq[URL], parent: ClassLoader = defaultParentClassLoader()): URLClassLoader =
- new URLClassLoader(urls.toList, parent)
+ def fromURLs(urls: Seq[URL], parent: ClassLoader = null): URLClassLoader =
+ new URLClassLoader(urls, parent)
/** True if supplied class exists in supplied path */
def classExists(urls: Seq[URL], name: String): Boolean =
- (fromURLs(urls) tryToLoadClass name).isDefined
-
- // we cannot use the app classloader here or we get what looks to
- // be classloader deadlock, but if we pass null we bypass the extension
- // classloader and our extensions, so we search the hierarchy to find
- // the classloader whose parent is null. Resolves bug #857.
- def findExtClassLoader(): JavaClassLoader = {
- def search(cl: JavaClassLoader): JavaClassLoader = {
- if (cl == null) null
- else if (cl.getParent == null) cl
- else search(cl.getParent)
- }
-
- search(getContextLoader())
- }
+ fromURLs(urls) tryToLoadClass name isDefined
/** Finding what jar a clazz or instance came from */
- def origin(x: Any): Option[URL] = originOfClass(x.asInstanceOf[AnyRef].getClass)
+ def origin(x: Any): Option[URL] = originOfClass(x.getClass)
def originOfClass(x: Class[_]): Option[URL] =
Option(x.getProtectionDomain.getCodeSource) flatMap (x => Option(x.getLocation))
}
diff --git a/src/compiler/scala/tools/nsc/util/ScalaPrefs.scala b/src/compiler/scala/tools/nsc/util/ScalaPrefs.scala
deleted file mode 100644
index 03e0f54..0000000
--- a/src/compiler/scala/tools/nsc/util/ScalaPrefs.scala
+++ /dev/null
@@ -1,25 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
- * @author Paul Phillips
- */
-
-package scala.tools.nsc
-package util
-
-import io.Sources
-
-trait ScalaPrefs {
- def codeSources: Sources
- def exceptionFormatter: Exceptional.Formatter
-}
-
-trait LowPriorityScalaPrefs {
- implicit object DefaultScalaPrefs extends ScalaPrefs {
- def codeSources = Sources.defaultSources
- def exceptionFormatter = Exceptional.Formatter(this)
- }
-}
-
-object ScalaPrefs extends LowPriorityScalaPrefs {
- def apply(implicit prefs: ScalaPrefs): ScalaPrefs = prefs
-}
diff --git a/src/compiler/scala/tools/nsc/util/Set.scala b/src/compiler/scala/tools/nsc/util/Set.scala
deleted file mode 100644
index 2bc2526..0000000
--- a/src/compiler/scala/tools/nsc/util/Set.scala
+++ /dev/null
@@ -1,30 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
- * @author Martin Odersky
- */
-
-package scala.tools.nsc
-package util
-
-/** A common class for lightweight sets.
- */
-abstract class Set[T <: AnyRef] {
-
- def findEntry(x: T): T
-
- def addEntry(x: T): Unit
-
- def iterator: Iterator[T]
-
- def foreach[U](f: T => U): Unit = iterator foreach f
-
- def apply(x: T): Boolean = contains(x)
-
- @deprecated("use `iterator' instead", "2.9.0") def elements = iterator
-
- def contains(x: T): Boolean =
- findEntry(x) ne null
-
- def toList = iterator.toList
-
-}
diff --git a/src/compiler/scala/tools/nsc/util/ShowPickled.scala b/src/compiler/scala/tools/nsc/util/ShowPickled.scala
index a8499fc..2b87280 100644
--- a/src/compiler/scala/tools/nsc/util/ShowPickled.scala
+++ b/src/compiler/scala/tools/nsc/util/ShowPickled.scala
@@ -1,5 +1,5 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
+ * Copyright 2005-2013 LAMP/EPFL
* @author Martin Odersky
*/
@@ -7,14 +7,12 @@ package scala.tools
package nsc
package util
-import java.io.{File, FileInputStream, PrintStream, IOException}
+import java.io.{File, FileInputStream, PrintStream}
import java.lang.Long.toHexString
import java.lang.Float.intBitsToFloat
import java.lang.Double.longBitsToDouble
-
-import cmd.program.Simple
-import symtab.{ Flags, Names }
-import scala.reflect.generic.{ PickleBuffer, PickleFormat }
+import scala.reflect.internal.{Flags, Names}
+import scala.reflect.internal.pickling.{ PickleBuffer, PickleFormat }
import interpreter.ByteCode.scalaSigBytesForPath
object ShowPickled extends Names {
@@ -119,21 +117,18 @@ object ShowPickled extends Names {
result.toInt
}
- def printFile(buf: PickleBuffer, out: PrintStream): Unit = printFile(buf, out, false)
- def printFile(buf: PickleBuffer, out: PrintStream, bare: Boolean) {
+ def printFile(buf: PickleBuffer, out: PrintStream) {
out.println("Version " + buf.readNat() + "." + buf.readNat())
val index = buf.createIndex
val entryList = makeEntryList(buf, index)
buf.readIndex = 0
- /** A print wrapper which discards everything if bare is true.
- */
- def p(s: String) = if (!bare) out print s
+ def p(s: String) = out print s
def printNameRef() {
val idx = buf.readNat()
val name = entryList nameAt idx
- val toPrint = if (bare) " " + name else " %s(%s)".format(idx, name)
+ val toPrint = " %s(%s)".format(idx, name)
out print toPrint
}
@@ -156,7 +151,7 @@ object ShowPickled extends Names {
val accessBoundary = (
for (idx <- privateWithin) yield {
val s = entryList nameAt idx
- if (bare) s else idx + "(" + s + ")"
+ idx + "(" + s + ")"
}
)
val flagString = {
@@ -197,7 +192,7 @@ object ShowPickled extends Names {
tag match {
case TERMname =>
out.print(" ")
- out.print(newTermName(buf.bytes, buf.readIndex, len).toString())
+ out.print(newTermName(buf.bytes, buf.readIndex, len).toString)
buf.readIndex = end
case TYPEname =>
out.print(" ")
@@ -283,31 +278,18 @@ object ShowPickled extends Names {
try Some(new PickleBuffer(data, 0, data.length))
catch { case _: Exception => None }
- def show(what: String, pickle: PickleBuffer, bare: Boolean) = {
+ def show(what: String, pickle: PickleBuffer) = {
Console.println(what)
val saved = pickle.readIndex
pickle.readIndex = 0
- printFile(pickle, Console.out, bare)
+ printFile(pickle, Console.out)
pickle.readIndex = saved
}
- private lazy val ShowPickledSpec =
- Simple(
- Simple.scalaProgramInfo("showPickled", "Usage: showPickled [--bare] <classname>"),
- List("--bare" -> "suppress numbers in output"),
- Nil,
- null
- )
-
- /** Option --bare suppresses numbers so the output can be diffed.
- */
def main(args: Array[String]) {
- val runner = ShowPickledSpec instance args
- import runner._
-
- residualArgs foreach { arg =>
+ args foreach { arg =>
(fromFile(arg) orElse fromName(arg)) match {
- case Some(pb) => show(arg + ":", pb, parsed isSet "--bare")
+ case Some(pb) => show(arg + ":", pb)
case _ => Console.println("Cannot read " + arg)
}
}
diff --git a/src/compiler/scala/tools/nsc/util/SimpleTracer.scala b/src/compiler/scala/tools/nsc/util/SimpleTracer.scala
new file mode 100644
index 0000000..2601798
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/util/SimpleTracer.scala
@@ -0,0 +1,19 @@
+package scala.tools.nsc
+package util
+
+// todo: We should unify this with Tracer. I'd do it but Tracer is
+// too complicated for me to understand quickly.
+import java.io.PrintStream
+
+/** A simple tracer
+ * @param out: The print stream where trace info shoul be sent
+ * @param enabled: A condition that must be true for trace info to be produced.
+ */
+class SimpleTracer(out: PrintStream, enabled: Boolean = true) {
+ def apply[T](msg: => String)(value: T): T = {
+ if (enabled) out.println(msg+value)
+ value
+ }
+ def withOutput(out: PrintStream) = new SimpleTracer(out, enabled)
+ def when(enabled: Boolean): SimpleTracer = new SimpleTracer(out, enabled)
+}
diff --git a/src/compiler/scala/tools/nsc/util/SourceFile.scala b/src/compiler/scala/tools/nsc/util/SourceFile.scala
deleted file mode 100644
index bc0f706..0000000
--- a/src/compiler/scala/tools/nsc/util/SourceFile.scala
+++ /dev/null
@@ -1,145 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
- * @author Martin Odersky
- */
-
-
-package scala.tools.nsc
-package util
-
-import io.{ AbstractFile, VirtualFile }
-import scala.collection.mutable.ArrayBuffer
-import annotation.tailrec
-import java.util.regex.Pattern
-import java.io.IOException
-import Chars._
-
-/** abstract base class of a source file used in the compiler */
-abstract class SourceFile {
- def content : Array[Char] // normalized, must end in SU
- def file : AbstractFile
- def isLineBreak(idx : Int) : Boolean
- def isSelfContained: Boolean
- def length : Int
- def position(offset: Int) : Position = {
- assert(offset < length, file + ": " + offset + " >= " + length)
- new OffsetPosition(this, offset)
- }
- def position(line: Int, column: Int) : Position = new OffsetPosition(this, lineToOffset(line) + column)
- def offsetToLine(offset: Int): Int
- def lineToOffset(index : Int): Int
- /** Map a position to a position in the underlying source file.
- * For regular source files, simply return the argument.
- */
- def positionInUltimateSource(position: Position) = position
- override def toString(): String = file.name /* + ":" + content.length */
- def dbg(offset: Int) = (new OffsetPosition(this, offset)).dbgString
- def path = file.path
-
- def beginsWith(offset: Int, text: String): Boolean =
- (content drop offset) startsWith text
-
- def lineToString(index: Int): String =
- content drop lineToOffset(index) takeWhile (c => !isLineBreakChar(c.toChar)) mkString
-
- @tailrec
- final def skipWhitespace(offset: Int): Int =
- if (content(offset).isWhitespace) skipWhitespace(offset + 1) else offset
-
- def identifier(pos: Position): Option[String] = None
-}
-
-object ScriptSourceFile {
- /** Length of the script header from the given content, if there is one.
- * The header begins with "#!" or "::#!" and ends with a line starting
- * with "!#" or "::!#".
- */
- def headerLength(cs: Array[Char]): Int = {
- val headerPattern = Pattern.compile("""^(::)?!#.*(\r|\n|\r\n)""", Pattern.MULTILINE)
- val headerStarts = List("#!", "::#!")
-
- if (headerStarts exists (cs startsWith _)) {
- val matcher = headerPattern matcher cs.mkString
- if (matcher.find) matcher.end
- else throw new IOException("script file does not close its header with !# or ::!#")
- }
- else 0
- }
- def stripHeader(cs: Array[Char]): Array[Char] = cs drop headerLength(cs)
-
- def apply(file: AbstractFile, content: Array[Char]) = {
- val underlying = new BatchSourceFile(file, content)
- val headerLen = headerLength(content)
- val stripped = new ScriptSourceFile(underlying, content drop headerLen, headerLen)
-
- stripped
- }
-}
-import ScriptSourceFile._
-
-class ScriptSourceFile(underlying: BatchSourceFile, content: Array[Char], override val start: Int) extends BatchSourceFile(underlying.file, content) {
- override def isSelfContained = false
-
- override def positionInUltimateSource(pos: Position) =
- if (!pos.isDefined) super.positionInUltimateSource(pos)
- else new OffsetPosition(underlying, pos.point + start)
-}
-
-/** a file whose contents do not change over time */
-class BatchSourceFile(val file : AbstractFile, val content: Array[Char]) extends SourceFile {
-
- def this(_file: AbstractFile) = this(_file, _file.toCharArray)
- def this(sourceName: String, cs: Seq[Char]) = this(new VirtualFile(sourceName), cs.toArray)
- def this(file: AbstractFile, cs: Seq[Char]) = this(file, cs.toArray)
-
- override def equals(that : Any) = that match {
- case that : BatchSourceFile => file.path == that.file.path && start == that.start
- case _ => false
- }
- override def hashCode = file.path.## + start.##
- val length = content.length
- def start = 0
- def isSelfContained = true
-
- override def identifier(pos: Position) =
- if (pos.isDefined && pos.source == this && pos.point != -1) {
- def isOK(c: Char) = isIdentifierPart(c) || isOperatorPart(c)
- Some(new String(content drop pos.point takeWhile isOK))
- } else {
- super.identifier(pos)
- }
-
- def isLineBreak(idx: Int) =
- if (idx >= length) false else {
- val ch = content(idx)
- // don't identify the CR in CR LF as a line break, since LF will do.
- if (ch == CR) (idx + 1 == length) || (content(idx + 1) != LF)
- else isLineBreakChar(ch)
- }
-
- def calculateLineIndices(cs: Array[Char]) = {
- val buf = new ArrayBuffer[Int]
- buf += 0
- for (i <- 0 until cs.length) if (isLineBreak(i)) buf += i + 1
- buf += cs.length // sentinel, so that findLine below works smoother
- buf.toArray
- }
- private lazy val lineIndices: Array[Int] = calculateLineIndices(content)
-
- def lineToOffset(index : Int): Int = lineIndices(index)
-
- private var lastLine = 0
-
- /** Convert offset to line in this source file
- * Lines are numbered from 0
- */
- def offsetToLine(offset: Int): Int = {
- val lines = lineIndices
- def findLine(lo: Int, hi: Int, mid: Int): Int =
- if (offset < lines(mid)) findLine(lo, mid - 1, (lo + mid - 1) / 2)
- else if (offset >= lines(mid + 1)) findLine(mid + 1, hi, (mid + 1 + hi) / 2)
- else mid
- lastLine = findLine(0, lines.length, lastLine)
- lastLine
- }
-}
diff --git a/src/compiler/scala/tools/nsc/util/Statistics.scala b/src/compiler/scala/tools/nsc/util/Statistics.scala
deleted file mode 100644
index b6e61a4..0000000
--- a/src/compiler/scala/tools/nsc/util/Statistics.scala
+++ /dev/null
@@ -1,280 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
- * @author Martin Odersky
- */
-
-package scala.tools.nsc
-package util
-
-object Statistics {
-
- private var _enabled = false
-
- def enabled = _enabled
- def enabled_=(cond: Boolean) = {
- if (cond && !_enabled) {
- val test = new Timer()
- val start = System.nanoTime()
- var total = 0L
- for (i <- 1 to 10000) {
- val time = System.nanoTime()
- total += System.nanoTime() - time
- }
- val total2 = System.nanoTime() - start
- println("Enabling statistics, measuring overhead = "+
- total/10000.0+"ns to "+total2/10000.0+"ns per timer")
- _enabled = true
- }
- }
-
- var phasesShown = List("parser", "typer", "erasure", "cleanup")
-
- def currentTime() =
- if (_enabled) System.nanoTime() else 0L
-
- private def showPercent(x: Double, base: Double) =
- if (base == 0) "" else " ("+"%2.1f".format(x / base * 100)+"%)"
-
- def incCounter(c: Counter) {
- if (_enabled) c.value += 1
- }
-
- def incCounter(c: Counter, delta: Int) {
- if (_enabled) c.value += delta
- }
-
- def startCounter(sc: SubCounter): IntPair =
- if (_enabled) sc.start() else null
-
- def stopCounter(sc: SubCounter, start: IntPair) {
- if (_enabled) sc.stop(start)
- }
-
- def startTimer(tm: Timer): LongPair =
- if (_enabled) tm.start() else null
-
- def stopTimer(tm: Timer, start: LongPair) {
- if (_enabled) tm.stop(start)
- }
-
- case class IntPair(x: Int, y: Int)
- case class LongPair(x: Long, y: Long)
-
- class Counter {
- var value: Int = 0
- override def toString = value.toString
- }
-
- class SubCounter(c: Counter) {
- var value: Int = 0
- def start(): IntPair =
- if (_enabled) IntPair(value, c.value) else null
- def stop(prev: IntPair) {
- if (_enabled) {
- val IntPair(value0, cvalue0) = prev
- value = value0 + c.value - cvalue0
- }
- }
- override def toString =
- value+showPercent(value, c.value)
- }
-
- class Timer {
- var nanos: Long = 0
- var timings = 0
- def start(): LongPair =
- if (_enabled) {
- timings += 1
- LongPair(nanos, System.nanoTime())
- } else null
- def stop(prev: LongPair) {
- if (_enabled) {
- val LongPair(nanos0, start) = prev
- nanos = nanos0 + System.nanoTime() - start
- timings += 1
- }
- }
- override def toString = (timings/2)+" spans, "+nanos.toString+"ns"
- }
-
- class ClassCounts extends scala.collection.mutable.HashMap[Class[_], Int] {
- override def default(key: Class[_]) = 0
- }
-
- var nodeByType = new ClassCounts
-
- var microsByType = new ClassCounts
- var visitsByType = new ClassCounts
- var pendingTreeTypes: List[Class[_]] = List()
- var typerTime: Long = 0L
-
- val singletonBaseTypeSeqCount = new Counter
- val compoundBaseTypeSeqCount = new Counter
- val typerefBaseTypeSeqCount = new Counter
- val findMemberCount = new Counter
- val noMemberCount = new Counter
- val multMemberCount = new Counter
- val findMemberNanos = new Timer
- val asSeenFromCount = new Counter
- val asSeenFromNanos = new Timer
- val subtypeCount = new Counter
- val subtypeNanos = new Timer
- val sametypeCount = new Counter
- val rawTypeCount = new Counter
- val rawTypeFailed = new SubCounter(rawTypeCount)
- val findMemberFailed = new SubCounter(findMemberCount)
- val subtypeFailed = new SubCounter(subtypeCount)
- val rawTypeImpl = new SubCounter(rawTypeCount)
- val findMemberImpl = new SubCounter(findMemberCount)
- val subtypeImpl = new SubCounter(subtypeCount)
- val baseTypeSeqCount = new Counter
- val baseTypeSeqLenTotal = new Counter
- val typeSymbolCount = new Counter
- val classSymbolCount = new Counter
- val typedApplyCount = new Counter
- val typedIdentCount = new Counter
- val typedSelectCount = new Counter
- val typerNanos = new Timer
- val classReadNanos = new Timer
-
- val failedApplyNanos = new Timer
- val failedOpEqNanos = new Timer
- val failedSilentNanos = new Timer
-
- val implicitSearchCount = new Counter
- val implicitNanos = new Timer
- val oftypeImplicitHits = new Counter
- val inscopeImplicitHits = new Counter
-
- val triedImplicits = new Counter
- val plausiblyCompatibleImplicits = new Counter
- val matchingImplicits = new Counter
- val typedImplicits = new Counter
- val foundImplicits = new Counter
-
- val inscopeSucceedNanos = new Timer
- val inscopeFailNanos = new Timer
- val oftypeSucceedNanos = new Timer
- val oftypeFailNanos = new Timer
- val implicitCacheHits = new Counter
- val implicitCacheMisses = new Counter
- val improvesCount = new Counter
- val improvesCachedCount = new Counter
- val subtypeAppInfos = new SubCounter(subtypeCount)
- val subtypeImprovCount = new SubCounter(subtypeCount)
- val subtypeETNanos = new Timer
- val matchesPtNanos = new Timer
- val ctr1 = new Counter
- val ctr2 = new Counter
- val ctr3 = new Counter
- val ctr4 = new Counter
- val counter1: SubCounter = new SubCounter(subtypeCount)
- val counter2: SubCounter = new SubCounter(subtypeCount)
- val timer1: Timer = new Timer
- val timer2: Timer = new Timer
-}
-
-abstract class Statistics {
-
- import Statistics._
-
- val global: Global
- import global._
-
- def countNodes(tree: Tree, counts: ClassCounts) {
- for (t <- tree) counts(t.getClass) += 1
- counts
- }
-
- def showRelative(base: Long)(value: Long) =
- value+showPercent(value, base)
-
- def showRelTyper(timer: Timer) =
- timer+showPercent(timer.nanos, typerNanos.nanos)
-
- def showCounts(counts: ClassCounts) =
- counts.toSeq.sortWith(_._2 > _._2).map {
- case (cls, cnt) =>
- cls.toString.substring(cls.toString.lastIndexOf("$") + 1)+": "+cnt
- }
-
- def print(phase: Phase) = if (phasesShown contains phase.name) {
- inform("*** Cumulative statistics at phase " + phase)
- inform("#created tree nodes : " + nodeCount)
- inform("#created tree nodes by type: "+showCounts(nodeByType))
- if (phase.name != "parser") {
- val counts = new ClassCounts
- for (u <- currentRun.units; t <- u.body) counts(t.getClass) += 1
- inform("#retained nodes : " + counts.values.sum)
- inform("#retained nodes by type : " + showCounts(counts))
- inform("#typechecked identifiers : " + typedIdentCount)
- inform("#typechecked selections : " + typedSelectCount)
- inform("#typechecked applications: " + typedApplyCount)
- inform("#raw type creations : " + rawTypeCount)
- inform(" of which in failed : " + rawTypeFailed)
- inform(" of which in implicits : " + rawTypeImpl)
- inform("#unique types : " + uniqueTypeCount)
- inform("#symbols : " + symbolCount)
- inform(" of which type symbols : " + typeSymbolCount)
- inform(" of which class symbols : " + classSymbolCount)
- inform("#base type seqs : " + baseTypeSeqCount)
- inform("avg base type seq length : " + baseTypeSeqLenTotal.value.toFloat / baseTypeSeqCount.value)
- inform("#singleton base type seqs: " + singletonBaseTypeSeqCount)
- inform("#compound base type seqs : " + compoundBaseTypeSeqCount)
- inform("#typeref base type seqs : " + typerefBaseTypeSeqCount)
- inform("#findMember ops : " + findMemberCount)
- inform(" of which in failed : " + findMemberFailed)
- inform(" of which in implicits : " + findMemberImpl)
- inform("#notfound member : " + noMemberCount)
- inform("#multiple member : " + multMemberCount)
- inform("#asSeenFrom ops : " + asSeenFromCount)
- inform("#subtype : " + subtypeCount)
- inform(" of which in failed : " + subtypeFailed)
- inform(" of which in implicits : " + subtypeImpl)
- inform(" of which in app impl : " + subtypeAppInfos)
- inform(" of which in improv : " + subtypeImprovCount)
- inform("#sametype : " + sametypeCount)
- inform("ms type-flow-analysis: " + analysis.timer.millis)
-
- if (phase.name == "typer") {
- inform("time spent typechecking : "+showRelTyper(typerNanos))
- inform("time classfilereading : "+showRelTyper(classReadNanos))
- inform("time spent in implicits : "+showRelTyper(implicitNanos))
- inform(" successful in scope : "+showRelTyper(inscopeSucceedNanos))
- inform(" failed in scope : "+showRelTyper(inscopeFailNanos))
- inform(" successful of type : "+showRelTyper(oftypeSucceedNanos))
- inform(" failed of type : "+showRelTyper(oftypeFailNanos))
- inform(" assembling parts : "+showRelTyper(subtypeETNanos))
- inform(" matchesPT : "+showRelTyper(matchesPtNanos))
- inform("implicit cache hits : "+showRelative(implicitCacheHits.value + implicitCacheMisses.value)(implicitCacheHits.value))
- inform("time spent in failed : "+showRelTyper(failedSilentNanos))
- inform(" failed apply : "+showRelTyper(failedApplyNanos))
- inform(" failed op= : "+showRelTyper(failedOpEqNanos))
- inform("micros by tree node : "+showCounts(microsByType))
- inform("#visits by tree node : "+showCounts(visitsByType))
- val average = new ClassCounts
- for (c <- microsByType.keysIterator) average(c) = microsByType(c)/visitsByType(c)
- inform("avg micros by tree node : "+showCounts(average))
- inform("time spent in <:< : "+showRelTyper(subtypeNanos))
- inform("time spent in findmember : "+showRelTyper(findMemberNanos))
- inform("time spent in asSeenFrom : "+showRelTyper(asSeenFromNanos))
- inform("#implicit searches : " + implicitSearchCount)
- inform("#tried, plausible, matching, typed, found implicits: "+triedImplicits+", "+plausiblyCompatibleImplicits+", "+matchingImplicits+", "+typedImplicits+", "+foundImplicits)
- inform("#implicit improves tests : " + improvesCount)
- inform("#implicit improves cached: " + improvesCachedCount)
- inform("#implicit inscope hits : " + inscopeImplicitHits)
- inform("#implicit oftype hits : " + oftypeImplicitHits)
- }
-
- if (ctr1 != null) inform("#ctr1 : " + ctr1)
- if (ctr2 != null) inform("#ctr2 : " + ctr2)
- if (ctr3 != null) inform("#ctr3 : " + ctr3)
- if (ctr4 != null) inform("#ctr4 : " + ctr4)
- if (counter1 != null) inform("#counter1 : " + counter1)
- if (counter2 != null) inform("#counter2 : " + counter2)
- if (timer1 != null) inform("#timer1 : " + timer1)
- if (timer2 != null) inform("#timer2 : " + timer2)
- //for (t <- uniques.iterator) println("unique: "+t)
- }
- }
-}
diff --git a/src/compiler/scala/tools/nsc/util/StatisticsInfo.scala b/src/compiler/scala/tools/nsc/util/StatisticsInfo.scala
new file mode 100644
index 0000000..225f6ca
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/util/StatisticsInfo.scala
@@ -0,0 +1,38 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2013 LAMP/EPFL
+ * @author Martin Odersky
+ */
+
+package scala.tools.nsc
+package util
+
+import scala.reflect.internal.util.Statistics
+
+abstract class StatisticsInfo {
+
+ val global: Global
+ import global._
+ import scala.reflect.internal.TreesStats.nodeByType
+
+ val phasesShown = List("parser", "typer", "patmat", "erasure", "cleanup")
+
+ val retainedCount = Statistics.newCounter("#retained tree nodes")
+ val retainedByType = Statistics.newByClass("#retained tree nodes by type")(Statistics.newCounter(""))
+
+ def print(phase: Phase) = if (phasesShown contains phase.name) {
+ inform("*** Cumulative statistics at phase " + phase)
+ retainedCount.value = 0
+ for (c <- retainedByType.keys)
+ retainedByType(c).value = 0
+ for (u <- currentRun.units; t <- u.body) {
+ retainedCount.value += 1
+ retainedByType(t.getClass).value += 1
+ }
+
+ val quants =
+ if (phase.name == "parser") Seq(treeNodeCount, nodeByType, retainedCount, retainedByType)
+ else Statistics.allQuantities
+
+ for (q <- quants if q.showAt(phase.name)) inform(q.line)
+ }
+}
diff --git a/src/compiler/scala/tools/nsc/util/TableDef.scala b/src/compiler/scala/tools/nsc/util/TableDef.scala
deleted file mode 100644
index 10c63ee..0000000
--- a/src/compiler/scala/tools/nsc/util/TableDef.scala
+++ /dev/null
@@ -1,95 +0,0 @@
-package scala.tools.nsc
-package util
-
-import TableDef._
-
-/** A class for representing tabular data in a way that preserves
- * its inner beauty. See Exceptional for an example usage.
- * One creates an instance of TableDef by defining the columns of
- * the table, then uses that to create an instance of Table by
- * passing in a sequence of rows.
- */
-class TableDef[T](_cols: Column[T]*) {
- /** These operators are about all there is to it.
- *
- * ~ appends a column to the table
- * >> creates a right-justified column and appends it
- * << creates a left-justified column and appends it
- * >+ specifies a string to separate the previous column from the next.
- * if none is specified, a space is used.
- */
- def ~(next: Column[T]) = retThis(cols :+= next)
- def >>(pair: (String, T => Any)) = this ~ Column(pair._1, pair._2, false)
- def <<(pair: (String, T => Any)) = this ~ Column(pair._1, pair._2, true)
- def >+(sep: String) = retThis(separators += ((cols.size - 1, sep)))
-
- /** Below this point should all be considered private/internal.
- */
- private var cols: List[Column[T]] = _cols.toList
- private var separators: Map[Int, String] = Map()
-
- def defaultSep(index: Int) = if (index > (cols.size - 2)) "" else " "
- def sepAfter(i: Int): String = separators.getOrElse(i, defaultSep(i))
- def sepWidths = cols.indices map (i => sepAfter(i).length)
-
- def columns = cols
- def colNames = cols map (_.name)
- def colFunctions = cols map (_.f)
- def colApply(el: T) = colFunctions map (f => f(el))
- def retThis(body: => Unit): this.type = { body ; this }
-
- class Table(val rows: Seq[T]) extends Seq[T] {
- def iterator = rows.iterator
- def apply(index: Int) = rows(index)
- def length = rows.length
-
- def maxColWidth(col: Column[T]) = col.name +: (rows map col.f) map (_.toString.length) max
- def specs = cols map (_ formatSpec rows)
-
- val colWidths = cols map maxColWidth
- val rowFormat = mkFormatString(sepAfter)
- val headFormat = mkFormatString(i => " " * sepWidths(i))
- val argLists = rows map colApply
-
- val headers = List(
- headFormat.format(colNames: _*),
- (colWidths, sepWidths).zipped map ((w1, w2) => "-" * w1 + " " * w2) mkString
- )
-
- def mkFormatString(sepf: Int => String): String =
- specs.zipWithIndex map { case (c, i) => c + sepf(i) } mkString
-
- def pp(): Unit = allToSeq foreach println
-
- def toFormattedSeq = argLists map (xs => rowFormat.format(xs: _*))
- def allToSeq = headers ++ toFormattedSeq
-
- override def toString = allToSeq mkString "\n"
- }
-
- def formatterFor(rows: Seq[T]): T => String = {
- val formatStr = new Table(rows).rowFormat
-
- x => formatStr.format(colApply(x) : _*)
- }
-
- def table(rows: Seq[T]) = new Table(rows)
-
- override def toString = cols.mkString("TableDef(", ", ", ")")
-}
-
-object TableDef {
- case class Column[-T](name: String, f: T => Any, left: Boolean) {
- def maxWidth(elems: Seq[T]): Int = name +: (elems map f) map (_.toString.length) max
- def formatSpec(elems: Seq[T]): String = {
- val justify = if (left) "-" else ""
- "%" + justify + maxWidth(elems) + "s"
- }
- override def toString = {
- val justify = if (left) "<<" else ">>"
- justify + "(" + name + ")"
- }
- }
-
- def apply[T](cols: Column[T]*) = new TableDef[T](cols: _*)
-}
diff --git a/src/compiler/scala/tools/nsc/util/Tracer.scala b/src/compiler/scala/tools/nsc/util/Tracer.scala
deleted file mode 100644
index 1dd21b2..0000000
--- a/src/compiler/scala/tools/nsc/util/Tracer.scala
+++ /dev/null
@@ -1,63 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
- * @author Paul Phillips
- */
-
-package scala.tools.nsc
-package util
-
-import java.io.PrintStream
-
-class Tracer(enabled: () => Boolean) {
- def out: PrintStream = System.out
- def intoString(x: Any): String = "" + x
- def stringify(x: Any): String = x match {
- case null => "null"
- case x: TraversableOnce[_] => x map stringify mkString ", "
- case x: Product => stringify(x.productIterator)
- case x: AnyRef => intoString(x)
- }
-
- private val LBRACE = "{"
- private val RBRACE = "}"
- private var indentLevel = 0
- private def ind(s: String) = (" " * (indentLevel * 2)) + s
- private def indented[T](body: => T): T = {
- indentLevel += 1
- try body
- finally indentLevel -= 1
- }
- private def p(s: String) = {
- out.print(s)
- out.flush()
- }
- private def pin[T](x: T): T = {
- p(ind("" + x))
- x
- }
- def apply[T](name: String, args: => Any)(body: => T): T = {
- val result = body
- if (enabled()) {
- // concise output optimization
- val boolResult = result match {
- case x: Boolean => Some(x)
- case _ => None
- }
- p(ind("%s(%s) = %s\n".format(
- name,
- stringify(args),
- boolResult getOrElse LBRACE))
- )
- if (boolResult.isEmpty) {
- indented(pin(result))
- p("\n" + ind(RBRACE))
- }
- result
- }
- else result
- }
-}
-
-object Tracer {
- def apply(enabled: => Boolean): Tracer = new Tracer(() => enabled)
-}
diff --git a/src/compiler/scala/tools/nsc/util/TreeSet.scala b/src/compiler/scala/tools/nsc/util/TreeSet.scala
index c816d1e..d2e9238 100644
--- a/src/compiler/scala/tools/nsc/util/TreeSet.scala
+++ b/src/compiler/scala/tools/nsc/util/TreeSet.scala
@@ -1,5 +1,5 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
+ * Copyright 2005-2013 LAMP/EPFL
* @author Martin Odersky
*/
@@ -40,12 +40,22 @@ class TreeSet[T >: Null <: AnyRef](less: (T, T) => Boolean) extends Set[T] {
tree = add(tree)
}
- def iterator = {
- def elems(t: Tree): Iterator[T] = {
- if (t eq null) Iterator.empty
- else elems(t.l) ++ (Iterator single t.elem) ++ elems(t.r)
+ def iterator = toList.iterator
+
+ override def foreach[U](f: T => U) {
+ def loop(t: Tree) {
+ if (t ne null) {
+ loop(t.l)
+ f(t.elem)
+ loop(t.r)
+ }
}
- elems(tree)
+ loop(tree)
+ }
+ override def toList = {
+ val xs = scala.collection.mutable.ListBuffer[T]()
+ foreach(xs += _)
+ xs.toList
}
override def toString(): String = {
diff --git a/src/compiler/scala/tools/nsc/util/WorkScheduler.scala b/src/compiler/scala/tools/nsc/util/WorkScheduler.scala
index aa1bb73..b1f4696 100644
--- a/src/compiler/scala/tools/nsc/util/WorkScheduler.scala
+++ b/src/compiler/scala/tools/nsc/util/WorkScheduler.scala
@@ -1,15 +1,15 @@
package scala.tools.nsc
package util
-import scala.collection.mutable.Queue
+import scala.collection.mutable
class WorkScheduler {
type Action = () => Unit
- private var todo = new Queue[Action]
- private var throwables = new Queue[Throwable]
- private var interruptReqs = new Queue[InterruptReq]
+ private var todo = new mutable.Queue[Action]
+ private var throwables = new mutable.Queue[Throwable]
+ private var interruptReqs = new mutable.Queue[InterruptReq]
/** Called from server: block until one of todo list, throwables or interruptReqs is nonempty */
def waitForMoreWork() = synchronized {
@@ -30,6 +30,10 @@ class WorkScheduler {
todo.dequeueAll(a => f(a).isDefined).map(a => f(a).get)
}
+ def dequeueAllInterrupts(f: InterruptReq => Unit): Unit = synchronized {
+ interruptReqs.dequeueAll { iq => f(iq); true }
+ }
+
/** Called from server: return optional exception posted by client
* Reset to no exception.
*/
@@ -50,6 +54,11 @@ class WorkScheduler {
/** Called from client: have interrupt executed by server and return result */
def doQuickly[A](op: () => A): A = {
+ val ir = askDoQuickly(op)
+ ir.getResult()
+ }
+
+ def askDoQuickly[A](op: () => A): InterruptReq { type R = A } = {
val ir = new InterruptReq {
type R = A
val todo = op
@@ -58,7 +67,7 @@ class WorkScheduler {
interruptReqs enqueue ir
notify()
}
- ir.getResult()
+ ir
}
/** Called from client: have action executed by server */
diff --git a/src/compiler/scala/tools/nsc/util/package.scala b/src/compiler/scala/tools/nsc/util/package.scala
index facac56..d34d4ee 100644
--- a/src/compiler/scala/tools/nsc/util/package.scala
+++ b/src/compiler/scala/tools/nsc/util/package.scala
@@ -1,5 +1,5 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
+ * Copyright 2005-2013 LAMP/EPFL
* @author Paul Phillips
*/
@@ -8,6 +8,16 @@ package scala.tools.nsc
import java.io.{ OutputStream, PrintStream, ByteArrayOutputStream, PrintWriter, StringWriter }
package object util {
+
+ implicit def postfixOps = scala.language.postfixOps // make all postfix ops in this package compile without warning
+
+ // forwarder for old code that builds against 2.9 and 2.10
+ val Chars = scala.reflect.internal.Chars
+
+ type Set[T <: AnyRef] = scala.reflect.internal.util.Set[T]
+ type HashSet[T >: Null <: AnyRef] = scala.reflect.internal.util.HashSet[T]
+ val HashSet = scala.reflect.internal.util.HashSet
+
def onull[T](value: T, orElse: => T): T = if (value == null) orElse else value
/** Apply a function and return the passed value */
@@ -18,19 +28,32 @@ package object util {
def freqrank[T](xs: Traversable[(T, Int)]): List[(Int, T)] = xs.toList map (_.swap) sortBy (-_._1)
- /** Execute code and then wait for all Threads created during its
- * execution to complete.
+ /** Execute code and then wait for all non-daemon Threads
+ * created and begun during its execution to complete.
*/
def waitingForThreads[T](body: => T) = {
- val ts1 = sys.allThreads()
- val result = body
- val ts2 = sys.allThreads()
- val newThreads = ts2.toSet -- ts1 filterNot (_.isDaemon())
+ val (result, created) = trackingThreads(body)
+ val threads = created filterNot (_.isDaemon)
+
+ // As long as there are non-daemon, live threads (the latter
+ // condition should exclude shutdown hooks) we will wait.
+ while (threads exists (_.isAlive))
+ threads filter (_.isAlive) foreach (_.join())
- newThreads foreach (_.join())
result
}
+ /** Executes the code and returns the result and any threads
+ * which were created during its execution.
+ */
+ def trackingThreads[T](body: => T): (T, Seq[Thread]) = {
+ val ts1 = sys.allThreads()
+ val result = body
+ val ts2 = sys.allThreads()
+
+ (result, ts2 filterNot (ts1 contains _))
+ }
+
/** Given a function and a block of code, evaluates code block,
* calls function with milliseconds elapsed, and returns block result.
*/
@@ -59,4 +82,61 @@ package object util {
bs.toString()
}
def stackTraceString(ex: Throwable): String = stringFromWriter(ex printStackTrace _)
+
+ lazy val trace = new SimpleTracer(System.out)
+ lazy val errtrace = new SimpleTracer(System.err)
+
+ @deprecated("Moved to scala.reflect.internal.util.StringOps", "2.10.0")
+ val StringOps = scala.reflect.internal.util.StringOps
+
+ @deprecated("Moved to scala.reflect.internal.util.StringOps", "2.10.0")
+ type StringOps = scala.reflect.internal.util.StringOps
+
+ @deprecated("Moved to scala.reflect.internal.util.TableDef", "2.10.0")
+ val TableDef = scala.reflect.internal.util.TableDef
+
+ @deprecated("Moved to scala.reflect.internal.util.TableDef", "2.10.0")
+ type TableDef[T] = scala.reflect.internal.util.TableDef[T]
+
+ @deprecated("scala.reflect.internal.util.WeakHashSet", "2.10.0")
+ type WeakHashSet[T <: AnyRef] = scala.reflect.internal.util.WeakHashSet[T]
+
+ @deprecated("Moved to scala.reflect.internal.util.Position", "2.10.0")
+ val Position = scala.reflect.internal.util.Position
+
+ @deprecated("Moved to scala.reflect.internal.util.Position", "2.10.0")
+ type Position = scala.reflect.internal.util.Position
+
+ @deprecated("Moved to scala.reflect.internal.util.NoPosition", "2.10.0")
+ val NoPosition = scala.reflect.internal.util.NoPosition
+
+ @deprecated("Moved to scala.reflect.internal.util.FakePos", "2.10.0")
+ val FakePos = scala.reflect.internal.util.FakePos
+
+ @deprecated("Moved to scala.reflect.internal.util.FakePos", "2.10.0")
+ type FakePos = scala.reflect.internal.util.FakePos
+
+ @deprecated("Moved to scala.reflect.internal.util.OffsetPosition", "2.10.0")
+ type OffsetPosition = scala.reflect.internal.util.OffsetPosition
+
+ @deprecated("Moved to scala.reflect.internal.util.RangePosition", "2.10.0")
+ type RangePosition = scala.reflect.internal.util.RangePosition
+
+ @deprecated("Moved to scala.reflect.internal.util.SourceFile", "2.10.0")
+ type SourceFile = scala.reflect.internal.util.SourceFile
+
+ @deprecated("Moved to scala.reflect.internal.util.NoSourceFile", "2.10.0")
+ val NoSourceFile = scala.reflect.internal.util.NoSourceFile
+
+ @deprecated("Moved to scala.reflect.internal.util.NoFile", "2.10.0")
+ val NoFile = scala.reflect.internal.util.NoFile
+
+ @deprecated("Moved to scala.reflect.internal.util.ScriptSourceFile", "2.10.0")
+ val ScriptSourceFile = scala.reflect.internal.util.ScriptSourceFile
+
+ @deprecated("Moved to scala.reflect.internal.util.ScriptSourceFile", "2.10.0")
+ type ScriptSourceFile = scala.reflect.internal.util.ScriptSourceFile
+
+ @deprecated("Moved to scala.reflect.internal.util.BatchSourceFile", "2.10.0")
+ type BatchSourceFile = scala.reflect.internal.util.BatchSourceFile
}
diff --git a/src/compiler/scala/tools/nsc/util/trace.scala b/src/compiler/scala/tools/nsc/util/trace.scala
deleted file mode 100644
index 97b3123..0000000
--- a/src/compiler/scala/tools/nsc/util/trace.scala
+++ /dev/null
@@ -1,13 +0,0 @@
-package scala.tools.nsc
-package util
-
-object trace {
- def apply[T](msg: String)(value: T): T = {
- println(msg+value)
- value
- }
- def withFun[T, U](msg: String)(value: T)(fun: T => U): T = {
- println(msg+fun(value))
- value
- }
-}
diff --git a/src/compiler/scala/tools/reflect/FastTrack.scala b/src/compiler/scala/tools/reflect/FastTrack.scala
new file mode 100644
index 0000000..d35ac43
--- /dev/null
+++ b/src/compiler/scala/tools/reflect/FastTrack.scala
@@ -0,0 +1,41 @@
+package scala.tools
+package reflect
+
+import scala.reflect.reify.Taggers
+import scala.tools.nsc.typechecker.{Analyzer, Macros}
+
+/** Optimizes system macro expansions by hardwiring them directly to their implementations
+ * bypassing standard reflective load and invoke to avoid the overhead of Java/Scala reflection.
+ */
+trait FastTrack {
+ self: Macros with Analyzer =>
+
+ import global._
+ import definitions._
+
+ import scala.language.implicitConversions
+ private implicit def context2taggers(c0: MacroContext): Taggers { val c: c0.type } = new { val c: c0.type = c0 } with Taggers
+ private implicit def context2macroimplementations(c0: MacroContext): MacroImplementations { val c: c0.type } = new { val c: c0.type = c0 } with MacroImplementations
+
+ implicit def fastTrackEntry2MacroRuntime(entry: FastTrackEntry): MacroRuntime = args => entry.run(args.c)
+ type FastTrackExpander = PartialFunction[(MacroContext, Tree), Tree]
+ case class FastTrackEntry(sym: Symbol, expander: FastTrackExpander) {
+ def validate(c: MacroContext): Boolean = expander.isDefinedAt((c, c.expandee))
+ def run(c: MacroContext): Any = {
+ val result = expander((c, c.expandee))
+ c.Expr[Nothing](result)(c.WeakTypeTag.Nothing)
+ }
+ }
+
+ lazy val fastTrack: Map[Symbol, FastTrackEntry] = {
+ var registry = Map[Symbol, FastTrackEntry]()
+ implicit class BindTo(sym: Symbol) { def bindTo(expander: FastTrackExpander): Unit = if (sym != NoSymbol) registry += sym -> FastTrackEntry(sym, expander) }
+ materializeClassTag bindTo { case (c, Apply(TypeApply(_, List(tt)), List())) => c.materializeClassTag(tt.tpe) }
+ materializeWeakTypeTag bindTo { case (c, Apply(TypeApply(_, List(tt)), List(u))) => c.materializeTypeTag(u, EmptyTree, tt.tpe, concrete = false) }
+ materializeTypeTag bindTo { case (c, Apply(TypeApply(_, List(tt)), List(u))) => c.materializeTypeTag(u, EmptyTree, tt.tpe, concrete = true) }
+ ApiUniverseReify bindTo { case (c, Apply(TypeApply(_, List(tt)), List(expr))) => c.materializeExpr(c.prefix.tree, EmptyTree, expr) }
+ ReflectRuntimeCurrentMirror bindTo { case (c, _) => scala.reflect.runtime.Macros.currentMirror(c).tree }
+ StringContext_f bindTo { case (c, app at Apply(Select(Apply(_, parts), _), args)) => c.macro_StringInterpolation_f(parts, args, app.pos) }
+ registry
+ }
+}
diff --git a/src/compiler/scala/tools/reflect/FrontEnd.scala b/src/compiler/scala/tools/reflect/FrontEnd.scala
new file mode 100644
index 0000000..f0d3d59
--- /dev/null
+++ b/src/compiler/scala/tools/reflect/FrontEnd.scala
@@ -0,0 +1,50 @@
+package scala.tools
+package reflect
+
+import scala.reflect.internal.util.Position
+
+trait FrontEnd {
+ object severity extends Enumeration
+ class Severity(val id: Int) extends severity.Value {
+ var count: Int = 0
+ override def toString() = this match {
+ case INFO => "INFO"
+ case WARNING => "WARNING"
+ case ERROR => "ERROR"
+ case _ => "<unknown>"
+ }
+ }
+ val INFO = new Severity(0)
+ val WARNING = new Severity(1)
+ val ERROR = new Severity(2)
+
+ def hasErrors = ERROR.count > 0
+ def hasWarnings = WARNING.count > 0
+
+ case class Info(val pos: Position, val msg: String, val severity: Severity)
+ val infos = new scala.collection.mutable.LinkedHashSet[Info]
+
+ /** Handles incoming info */
+ def log(pos: Position, msg: String, severity: Severity) {
+ infos += new Info(pos, msg, severity)
+ severity.count += 1
+ display(infos.last)
+ }
+
+ /** Displays incoming info */
+ def display(info: Info): Unit
+
+ /** Services a request to drop into interactive mode */
+ def interactive(): Unit
+
+ /** Refreshes the UI */
+ def flush(): Unit = {}
+
+ /** Resets the reporter */
+ def reset(): Unit = {
+ INFO.count = 0
+ WARNING.count = 0
+ ERROR.count = 0
+ infos.clear()
+ }
+}
diff --git a/src/compiler/scala/tools/reflect/Invoked.scala b/src/compiler/scala/tools/reflect/Invoked.scala
deleted file mode 100644
index fbc528e..0000000
--- a/src/compiler/scala/tools/reflect/Invoked.scala
+++ /dev/null
@@ -1,52 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
- * @author Paul Phillips
- */
-
-package scala.tools
-package reflect
-
-import java.lang.reflect.{ Method, Proxy }
-
-/** A class representing a single method call. It is primarily for use
- * in tandem with Mock. If the invocation did not target an InvocationHandler,
- * proxy will be null.
- */
-class Invoked private (val proxy: AnyRef, val m: Method, val args: List[AnyRef]) {
- def name = m.getName
- def arity = m.getParameterTypes.size
- def returnType = m.getReturnType
- def returns[T: Manifest] = returnType == manifest[T].erasure
-
- def invokeOn(target: AnyRef) = m.invoke(target, args: _*)
- def isObjectMethod = Set("toString", "equals", "hashCode") contains name
-
- override def toString = "Invoked: %s called with %s".format(
- m.getName,
- if (args.isEmpty) "no args" else "args '%s'".format(args mkString ", ")
- )
-}
-
-object Invoked {
- def apply(m: Method, args: Seq[Any]): Invoked = apply(null, m, args)
- def apply(proxy: AnyRef, m: Method, args: Seq[Any]): Invoked = {
- val fixedArgs = if (args == null) Nil else args.toList map (_.asInstanceOf[AnyRef])
- new Invoked(proxy, m, fixedArgs)
- }
- def unapply(x: Any) = x match {
- case x: Invoked => Some(x.proxy, x.m, x.args)
- case _ => None
- }
- object NameAndArgs {
- def unapply(x: Any) = x match {
- case x: Invoked => Some(x.name, x.args)
- case _ => None
- }
- }
- object NameAndArity {
- def unapply(x: Any) = x match {
- case x: Invoked => Some(x.name, x.arity)
- case _ => None
- }
- }
-}
diff --git a/src/compiler/scala/tools/reflect/MacroImplementations.scala b/src/compiler/scala/tools/reflect/MacroImplementations.scala
new file mode 100644
index 0000000..f4f385f
--- /dev/null
+++ b/src/compiler/scala/tools/reflect/MacroImplementations.scala
@@ -0,0 +1,171 @@
+package scala.tools.reflect
+
+import scala.reflect.macros.{ReificationException, UnexpectedReificationException}
+import scala.reflect.macros.runtime.Context
+import scala.collection.mutable.ListBuffer
+import scala.collection.mutable.Stack
+import scala.reflect.internal.util.OffsetPosition
+
+abstract class MacroImplementations {
+ val c: Context
+
+ import c.universe._
+ import definitions._
+
+ def macro_StringInterpolation_f(parts: List[Tree], args: List[Tree], origApplyPos: c.universe.Position): Tree = {
+ // the parts all have the same position information (as the expression is generated by the compiler)
+ // the args have correct position information
+
+ // the following conditions can only be violated if invoked directly
+ if (parts.length != args.length + 1) {
+ if(parts.length == 0)
+ c.abort(c.prefix.tree.pos, "too few parts")
+ else if(args.length + 1 < parts.length)
+ c.abort(if(args.length==0) c.enclosingPosition else args.last.pos,
+ "too few arguments for interpolated string")
+ else
+ c.abort(args(parts.length-1).pos,
+ "too many arguments for interpolated string")
+ }
+
+ val pi = parts.iterator
+ val bldr = new java.lang.StringBuilder
+ val evals = ListBuffer[ValDef]()
+ val ids = ListBuffer[Ident]()
+ val argStack = Stack(args : _*)
+
+ def defval(value: Tree, tpe: Type): Unit = {
+ val freshName = newTermName(c.fresh("arg$"))
+ evals += ValDef(Modifiers(), freshName, TypeTree(tpe) setPos value.pos.focus, value) setPos value.pos
+ ids += Ident(freshName)
+ }
+
+ def isFlag(ch: Char): Boolean = {
+ ch match {
+ case '-' | '#' | '+' | ' ' | '0' | ',' | '(' => true
+ case _ => false
+ }
+ }
+
+ def checkType(arg: Tree, variants: Type*): Option[Type] = {
+ variants.find(arg.tpe <:< _).orElse(
+ variants.find(c.inferImplicitView(arg, arg.tpe, _) != EmptyTree).orElse(
+ Some(variants(0))
+ )
+ )
+ }
+
+ val stdContextTags = new { val tc: c.type = c } with StdContextTags
+ import stdContextTags._
+
+ def conversionType(ch: Char, arg: Tree): Option[Type] = {
+ ch match {
+ case 'b' | 'B' =>
+ if(arg.tpe <:< NullTpe) Some(NullTpe) else Some(BooleanTpe)
+ case 'h' | 'H' =>
+ Some(AnyTpe)
+ case 's' | 'S' =>
+ Some(AnyTpe)
+ case 'c' | 'C' =>
+ checkType(arg, CharTpe, ByteTpe, ShortTpe, IntTpe)
+ case 'd' | 'o' | 'x' | 'X' =>
+ checkType(arg, IntTpe, LongTpe, ByteTpe, ShortTpe, tagOfBigInt.tpe)
+ case 'e' | 'E' | 'g' | 'G' | 'f' | 'a' | 'A' =>
+ checkType(arg, DoubleTpe, FloatTpe, tagOfBigDecimal.tpe)
+ case 't' | 'T' =>
+ checkType(arg, LongTpe, tagOfCalendar.tpe, tagOfDate.tpe)
+ case _ => None
+ }
+ }
+
+ def copyString(first: Boolean): Unit = {
+ val strTree = pi.next()
+ val rawStr = strTree match {
+ case Literal(Constant(str: String)) => str
+ case _ => throw new IllegalArgumentException("internal error: argument parts must be a list of string literals")
+ }
+ val str = StringContext.treatEscapes(rawStr)
+ val strLen = str.length
+ val strIsEmpty = strLen == 0
+ def charAtIndexIs(idx: Int, ch: Char) = idx < strLen && str(idx) == ch
+ def isPercent(idx: Int) = charAtIndexIs(idx, '%')
+ def isConversion(idx: Int) = isPercent(idx) && !charAtIndexIs(idx + 1, 'n') && !charAtIndexIs(idx + 1, '%')
+ var idx = 0
+
+ def errorAtIndex(idx: Int, msg: String) = c.error(new OffsetPosition(strTree.pos.source, strTree.pos.point + idx), msg)
+ def wrongConversionString(idx: Int) = errorAtIndex(idx, "wrong conversion string")
+ def illegalConversionCharacter(idx: Int) = errorAtIndex(idx, "illegal conversion character")
+ def nonEscapedPercent(idx: Int) = errorAtIndex(idx, "percent signs not directly following splicees must be escaped")
+
+ // STEP 1: handle argument conversion
+ // 1) "...${smth}" => okay, equivalent to "...${smth}%s"
+ // 2) "...${smth}blahblah" => okay, equivalent to "...${smth}%sblahblah"
+ // 3) "...${smth}%" => error
+ // 4) "...${smth}%n" => okay, equivalent to "...${smth}%s%n"
+ // 5) "...${smth}%%" => okay, equivalent to "...${smth}%s%%"
+ // 6) "...${smth}[%legalJavaConversion]" => okay, according to http://docs.oracle.com/javase/1.5.0/docs/api/java/util/Formatter.html
+ // 7) "...${smth}[%illegalJavaConversion]" => error
+ if (!first) {
+ val arg = argStack.pop
+ if (isConversion(0)) {
+ // PRE str is not empty and str(0) == '%'
+ // argument index parameter is not allowed, thus parse
+ // [flags][width][.precision]conversion
+ var pos = 1
+ while (pos < strLen && isFlag(str charAt pos)) pos += 1
+ while (pos < strLen && Character.isDigit(str charAt pos)) pos += 1
+ if (pos < strLen && str.charAt(pos) == '.') {
+ pos += 1
+ while (pos < strLen && Character.isDigit(str charAt pos)) pos += 1
+ }
+ if (pos < strLen) {
+ conversionType(str charAt pos, arg) match {
+ case Some(tpe) => defval(arg, tpe)
+ case None => illegalConversionCharacter(pos)
+ }
+ } else {
+ wrongConversionString(pos - 1)
+ }
+ idx = 1
+ } else {
+ bldr append "%s"
+ defval(arg, AnyTpe)
+ }
+ }
+
+ // STEP 2: handle the rest of the text
+ // 1) %n tokens are left as is
+ // 2) %% tokens are left as is
+ // 3) other usages of percents are reported as errors
+ if (!strIsEmpty) {
+ while (idx < strLen) {
+ if (isPercent(idx)) {
+ if (isConversion(idx)) nonEscapedPercent(idx)
+ else idx += 1 // skip n and % in %n and %%
+ }
+ idx += 1
+ }
+ bldr append (str take idx)
+ }
+ }
+
+ copyString(first = true)
+ while (pi.hasNext) {
+ copyString(first = false)
+ }
+
+ val fstring = bldr.toString
+// val expr = c.reify(fstring.format((ids.map(id => Expr(id).eval)) : _*))
+// https://issues.scala-lang.org/browse/SI-5824, therefore
+ val expr =
+ Apply(
+ Select(
+ Literal(Constant(fstring)),
+ newTermName("format")),
+ List(ids: _* )
+ );
+
+ Block(evals.toList, atPos(origApplyPos.focus)(expr)) setPos origApplyPos.makeTransparent
+ }
+
+}
\ No newline at end of file
diff --git a/src/compiler/scala/tools/reflect/Mock.scala b/src/compiler/scala/tools/reflect/Mock.scala
deleted file mode 100644
index 5301816..0000000
--- a/src/compiler/scala/tools/reflect/Mock.scala
+++ /dev/null
@@ -1,59 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
- * @author Paul Phillips
- */
-
-package scala.tools
-package reflect
-
-import java.lang.reflect.{ Method, Proxy, InvocationHandler }
-
-/** A wrapper around java dynamic proxies to make it easy to pose
- * as an interface. See SignalManager for an example usage.
- */
-trait Mock extends (Invoked => AnyRef) {
- mock =>
-
- def interfaces: List[Class[_]]
- def classLoader: ClassLoader
- def apply(invoked: Invoked): AnyRef
-
- def newProxyInstance(handler: InvocationHandler): AnyRef =
- Proxy.newProxyInstance(classLoader, interfaces.toArray, handler)
- def newProxyInstance(): AnyRef =
- newProxyInstance(newInvocationHandler())
-
- def newInvocationHandler() = new InvocationHandler {
- def invoke(proxy: AnyRef, method: Method, args: Array[AnyRef]) =
- mock(Invoked(proxy, method, args))
- }
-}
-
-/** The methods in Mock create the actual proxy instance which can be used
- * in place of the associated interface(s).
- */
-object Mock {
- /** The default implementation calls the partial function if defined, and
- * routes Object methods to the proxy: otherwise it throws an exception.
- */
- def fromInterfaces(clazz: Class[_], clazzes: Class[_]*)(pf: PartialFunction[Invoked, AnyRef]): AnyRef = {
- val ints = clazz :: clazzes.toList
- require(ints forall (_.isInterface), "All class objects must represent interfaces")
-
- val mock = new Mock {
- val interfaces = ints
- def classLoader = clazz.getClassLoader
- def apply(invoked: Invoked) =
- if (pf.isDefinedAt(invoked)) pf(invoked)
- else if (invoked.isObjectMethod) invoked invokeOn this
- else throw new NoSuchMethodException("" + invoked)
- }
- mock.newProxyInstance()
- }
- /** Tries to implement all the class's interfaces.
- */
- def fromClass(clazz: Class[_])(pf: PartialFunction[Invoked, AnyRef]): AnyRef = allInterfaces(clazz) match {
- case Nil => sys.error(clazz + " implements no interfaces.")
- case x :: xs => fromInterfaces(x, xs: _*)(pf)
- }
-}
diff --git a/src/compiler/scala/tools/reflect/ReflectGlobal.scala b/src/compiler/scala/tools/reflect/ReflectGlobal.scala
new file mode 100644
index 0000000..f8ded56
--- /dev/null
+++ b/src/compiler/scala/tools/reflect/ReflectGlobal.scala
@@ -0,0 +1,40 @@
+package scala.tools
+package reflect
+
+import scala.tools.nsc.Global
+import scala.tools.nsc.reporters.Reporter
+import scala.tools.nsc.Settings
+
+/** A version of Global that uses reflection to get class
+ * infos, instead of reading class or source files.
+ */
+class ReflectGlobal(currentSettings: Settings, reporter: Reporter, override val rootClassLoader: ClassLoader)
+ extends Global(currentSettings, reporter) with scala.tools.reflect.ReflectSetup with scala.reflect.runtime.SymbolTable {
+
+ override def transformedType(sym: Symbol) =
+ erasure.transformInfo(sym,
+ uncurry.transformInfo(sym,
+ refChecks.transformInfo(sym, sym.info)))
+
+ override def isCompilerUniverse = true
+
+ // Typically `runtimeMirror` creates a new mirror for every new classloader
+ // and shares symbols between the created mirrors.
+ //
+ // However we can't do that for the compiler.
+ // The problem is that symbol sharing violates owner chain assumptions that the compiler has.
+ //
+ // For example, we can easily end up with a situation when:
+ //
+ // Predef defined in package scala loaded by the classloader that has scala-library.jar
+ //
+ // cannot be accessed in:
+ //
+ // package scala for the rootMirror of ReflectGlobal that might correspond to a different classloader
+ //
+ // This happens because, despite the fact that `Predef` is shared between multiple `scala` packages (i.e. multiple scopes)
+ // (each mirror has its own set package symbols, because of the peculiarities of symbol loading in scala),
+ // that `Predef` symbol only has a single owner, and this messes up visibility, which is calculated based on owners, not scopes.
+ override def runtimeMirror(cl: ClassLoader): Mirror = rootMirror
+}
+
diff --git a/src/compiler/scala/tools/reflect/ReflectMain.scala b/src/compiler/scala/tools/reflect/ReflectMain.scala
new file mode 100644
index 0000000..116ae24
--- /dev/null
+++ b/src/compiler/scala/tools/reflect/ReflectMain.scala
@@ -0,0 +1,19 @@
+package scala.tools
+package reflect
+
+import scala.tools.nsc.Driver
+import scala.tools.nsc.Global
+import scala.tools.nsc.Settings
+import scala.tools.nsc.util.ClassPath.DefaultJavaContext
+import scala.tools.nsc.util.ScalaClassLoader
+import scala.tools.util.PathResolver
+
+object ReflectMain extends Driver {
+
+ private def classloaderFromSettings(settings: Settings) = {
+ val classpath = new PathResolver(settings).result
+ ScalaClassLoader.fromURLs(classpath.asURLs, getClass.getClassLoader)
+ }
+
+ override def newCompiler(): Global = new ReflectGlobal(settings, reporter, classloaderFromSettings(settings))
+}
\ No newline at end of file
diff --git a/src/compiler/scala/tools/reflect/ReflectSetup.scala b/src/compiler/scala/tools/reflect/ReflectSetup.scala
new file mode 100644
index 0000000..f18c114
--- /dev/null
+++ b/src/compiler/scala/tools/reflect/ReflectSetup.scala
@@ -0,0 +1,10 @@
+package scala.tools
+package reflect
+
+import scala.tools.nsc.Global
+
+/** A helper trait to initialize things that need to be set before JavaMirrors and other
+ * reflect specific traits are initialized */
+private[reflect] trait ReflectSetup { this: Global =>
+ phase = new Run().typerPhase
+}
\ No newline at end of file
diff --git a/src/compiler/scala/tools/reflect/Shield.scala b/src/compiler/scala/tools/reflect/Shield.scala
deleted file mode 100644
index 1973079..0000000
--- a/src/compiler/scala/tools/reflect/Shield.scala
+++ /dev/null
@@ -1,44 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
- * @author Paul Phillips
- */
-
-package scala.tools
-package reflect
-
-import java.lang.reflect.Constructor
-import nsc.util.ScalaClassLoader
-
-/** A support class for simplifying the otherwise disbelief-inspiring
- * process of working with classes completely reflectively. This is
- * the case with e.g. sun.misc.Signal* due to environments which are
- * antagonistic to their use. See SignalManager for an example.
- *
- * The name "Shield" is a reference to shielding the JVM from knowledge
- * of what we're doing.
- */
-trait Shield {
- def className: String
- def classLoader: ScalaClassLoader
-
- // Override this if you are more ambitious about logging or throwing.
- def onError[T >: Null](msg: String): T = null
-
- /** This is handy because all reflective calls want back an AnyRef but
- * we will often be generating Units.
- */
- protected implicit def boxedUnit(x: Unit): AnyRef = scala.runtime.BoxedUnit.UNIT
-
- lazy val clazz: Class[_] = classLoader.tryToLoadClass(className) getOrElse onError("Failed to load " + className)
- lazy val methods = clazz.getMethods.toList
-
- def constructor(paramTypes: Class[_]*) = clazz.getConstructor(paramTypes: _*).asInstanceOf[Constructor[AnyRef]]
- def method(name: String, arity: Int) = uniqueMethod(name, arity)
- def field(name: String) = clazz getField name
-
- def matchingMethods(name: String, arity: Int) = methods filter (m => nameAndArity(m) == (name, arity))
- def uniqueMethod(name: String, arity: Int) = matchingMethods(name, arity) match {
- case List(x) => x
- case _ => onError("No unique match for " + name)
- }
-}
diff --git a/src/compiler/scala/tools/reflect/SigParser.scala b/src/compiler/scala/tools/reflect/SigParser.scala
deleted file mode 100644
index 5d85778..0000000
--- a/src/compiler/scala/tools/reflect/SigParser.scala
+++ /dev/null
@@ -1,42 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
- * @author Paul Phillips
- */
-
-package scala.tools
-package reflect
-
-import java.lang.reflect.{ GenericSignatureFormatError, Method }
-
-/** The usual reflection song and dance to avoid referencing
- * any sun.* classes.
- */
-class SigParser {
- val SunSignatureParser = "sun.reflect.generics.parser.SignatureParser"
- private lazy val makeMethod: Method =
- try Class.forName(SunSignatureParser) getMethod "make"
- catch { case t => null }
-
- def make() = makeMethod.invoke(null).asInstanceOf[SignatureParserInterface]
-
- private def wrap(op: => Any) =
- try { op ; true }
- catch { case _: GenericSignatureFormatError => false }
-
- def isParserAvailable = makeMethod != null
- def verifyClass(s: String) = isParserAvailable && wrap(make() parseClassSig s)
- def verifyMethod(s: String) = isParserAvailable && wrap(make() parseMethodSig s)
- def verifyType(s: String) = isParserAvailable && wrap(make() parseTypeSig s)
-
- type ClassSignature <: AnyRef
- type MethodTypeSignature <: AnyRef
- type TypeSignature <: AnyRef
-
- type SignatureParserInterface = {
- def isParserAvailable: Boolean
- def parseClassSig(s: String): ClassSignature
- def parseMethodSig(s: String): MethodTypeSignature
- def parseTypeSig(s: String): TypeSignature
- }
-}
-object SigParser extends SigParser { }
diff --git a/src/compiler/scala/tools/reflect/StdTags.scala b/src/compiler/scala/tools/reflect/StdTags.scala
new file mode 100644
index 0000000..a3bc9b9
--- /dev/null
+++ b/src/compiler/scala/tools/reflect/StdTags.scala
@@ -0,0 +1,58 @@
+package scala.tools
+package reflect
+
+import java.lang.{Class => jClass}
+import scala.reflect.{ClassTag, classTag}
+import scala.reflect.api.{Mirror, TypeCreator, Universe => ApiUniverse}
+
+// [Eugene++] Before 2.10 is released, I suggest we don't rely on automated type tag generation
+// sure, it's convenient, but then refactoring reflection / reification becomes a pain
+// `ClassTag` tags are fine, because they don't need a reifier to be generated
+
+trait StdTags {
+ val u: ApiUniverse with Singleton
+ val m: Mirror[u.type]
+
+ lazy val tagOfListOfString: u.TypeTag[List[String]] =
+ u.TypeTag[List[String]](
+ m,
+ new TypeCreator {
+ def apply[U <: ApiUniverse with Singleton](m: Mirror[U]): U # Type = {
+ val u = m.universe
+ val pre = u.ThisType(m.staticPackage("scala.collection.immutable").moduleClass.asInstanceOf[u.Symbol])
+ u.TypeRef(pre, u.definitions.ListClass, List(u.definitions.StringClass.toTypeConstructor))
+ }
+ })
+
+ private def tagOfStaticClass[T: ClassTag]: u.TypeTag[T] =
+ u.TypeTag[T](
+ m,
+ new TypeCreator {
+ def apply[U <: ApiUniverse with Singleton](m: Mirror[U]): U # Type =
+ m.staticClass(classTag[T].runtimeClass.getName).toTypeConstructor.asInstanceOf[U # Type]
+ })
+ lazy val tagOfInt = u.TypeTag.Int
+ lazy val tagOfString = tagOfStaticClass[String]
+ lazy val tagOfFile = tagOfStaticClass[scala.tools.nsc.io.File]
+ lazy val tagOfDirectory = tagOfStaticClass[scala.tools.nsc.io.Directory]
+ lazy val tagOfStdReplVals = tagOfStaticClass[scala.tools.nsc.interpreter.StdReplVals]
+ lazy val tagOfIMain = tagOfStaticClass[scala.tools.nsc.interpreter.IMain]
+ lazy val tagOfThrowable = tagOfStaticClass[java.lang.Throwable]
+ lazy val tagOfClassLoader = tagOfStaticClass[java.lang.ClassLoader]
+ lazy val tagOfBigInt = tagOfStaticClass[BigInt]
+ lazy val tagOfBigDecimal = tagOfStaticClass[BigDecimal]
+ lazy val tagOfCalendar = tagOfStaticClass[java.util.Calendar]
+ lazy val tagOfDate = tagOfStaticClass[java.util.Date]
+}
+
+object StdRuntimeTags extends StdTags {
+ val u: scala.reflect.runtime.universe.type = scala.reflect.runtime.universe
+ val m = u.runtimeMirror(getClass.getClassLoader)
+ // we need getClass.getClassLoader to support the stuff from scala-compiler.jar
+}
+
+abstract class StdContextTags extends StdTags {
+ val tc: scala.reflect.macros.Context
+ val u: tc.universe.type = tc.universe
+ val m = tc.mirror
+}
diff --git a/src/compiler/scala/tools/reflect/ToolBox.scala b/src/compiler/scala/tools/reflect/ToolBox.scala
new file mode 100644
index 0000000..ab814b6
--- /dev/null
+++ b/src/compiler/scala/tools/reflect/ToolBox.scala
@@ -0,0 +1,104 @@
+package scala.tools
+package reflect
+
+trait ToolBox[U <: scala.reflect.api.Universe] {
+
+ /** Underlying universe of a ToolBox
+ */
+ val u: U
+
+ /** Underlying mirror of a ToolBox
+ */
+ val mirror: u.Mirror
+
+ /** Front end of the toolbox.
+ *
+ * Accumulates and displays warnings and errors, can drop to interactive mode (if supported).
+ * The latter can be useful to study the typechecker or to debug complex macros.
+ *
+ * [[scala.tools.reflect]] provides two predefined front ends that can be created using
+ * [[scala.tools.reflect.mkSilentFrontEnd]] and [[scala.tools.reflect.mkConsoleFrontEnd]].
+ */
+ def frontEnd: FrontEnd
+
+ /** Typechecks a tree using this ToolBox.
+ * This populates symbols and types of the tree and possibly transforms it to reflect certain desugarings.
+ *
+ * If the tree has unresolved type variables (represented as instances of `FreeTypeSymbol` symbols),
+ * then they all have to be resolved first using `Tree.substituteTypes`, or an error occurs.
+ *
+ * If `silent` is false, `TypeError` will be thrown in case of a typecheck error.
+ * If `silent` is true, the typecheck is silent and will return `EmptyTree` if an error occurs.
+ * Such errors don't vanish and can be inspected by turning on -Ydebug.
+ *
+ * Typechecking can be steered with the following optional parameters:
+ * `withImplicitViewsDisabled` recursively prohibits implicit views (though, implicit vals will still be looked up and filled in), default value is false
+ * `withMacrosDisabled` recursively prohibits macro expansions and macro-based implicits, default value is false
+ */
+ def typeCheck(tree: u.Tree, pt: u.Type = u.WildcardType, silent: Boolean = false, withImplicitViewsDisabled: Boolean = false, withMacrosDisabled: Boolean = false): u.Tree
+
+ /** Infers an implicit value of the expected type `pt` in top-level context.
+ * Optional `pos` parameter provides a position that will be associated with the implicit search.
+ *
+ * As mentioned in https://groups.google.com/forum/#!topic/scala-internals/ta-vbUT6JE8
+ * this API won't take into account the lexical context of the callsite, because
+ * currently it's impossible to reify it.
+ *
+ * If `silent` is false, `TypeError` will be thrown in case of an inference error.
+ * If `silent` is true, the typecheck is silent and will return `EmptyTree` if an error occurs.
+ * Such errors don't vanish and can be inspected by turning on -Xlog-implicits.
+ * Unlike in `typeCheck`, `silent` is true by default.
+ */
+ def inferImplicitValue(pt: u.Type, silent: Boolean = true, withMacrosDisabled: Boolean = false, pos: u.Position = u.NoPosition): u.Tree
+
+ /** Infers an implicit view from the provided tree `tree` from the type `from` to the type `to` in the toplevel context.
+ * Optional `pos` parameter provides a position that will be associated with the implicit search.
+ *
+ * As mentioned in https://groups.google.com/forum/#!topic/scala-internals/ta-vbUT6JE8
+ * this API won't take into account the lexical context of the callsite, because
+ * currently it's impossible to reify it.
+ *
+ * If `silent` is false, `TypeError` will be thrown in case of an inference error.
+ * If `silent` is true, the typecheck is silent and will return `EmptyTree` if an error occurs.
+ * Such errors don't vanish and can be inspected by turning on -Xlog-implicits.
+ * Unlike in `typeCheck`, `silent` is true by default.
+ */
+ def inferImplicitView(tree: u.Tree, from: u.Type, to: u.Type, silent: Boolean = true, withMacrosDisabled: Boolean = false, pos: u.Position = u.NoPosition): u.Tree
+
+ /** Recursively resets symbols and types in a given tree.
+ *
+ * Note that this does not revert the tree to its pre-typer shape.
+ * For more info, read up https://issues.scala-lang.org/browse/SI-5464.
+ */
+ def resetAllAttrs(tree: u.Tree): u.Tree
+
+ /** Recursively resets locally defined symbols and types in a given tree.
+ *
+ * Note that this does not revert the tree to its pre-typer shape.
+ * For more info, read up https://issues.scala-lang.org/browse/SI-5464.
+ */
+ def resetLocalAttrs(tree: u.Tree): u.Tree
+
+ /** .. */
+ def parse(code: String): u.Tree
+
+ /** Compiles a tree using this ToolBox.
+ *
+ * If the tree has unresolved type variables (represented as instances of `FreeTypeSymbol` symbols),
+ * then they all have to be resolved first using `Tree.substituteTypes`, or an error occurs.
+ *
+ * This spawns the compiler at the Namer phase, and pipelines the tree through that compiler.
+ * Currently `compile` does not accept trees that already typechecked, because typechecking isn't idempotent.
+ * For more info, take a look at https://issues.scala-lang.org/browse/SI-5464.
+ */
+ def compile(tree: u.Tree): () => Any
+
+ /** Compiles and runs a tree using this ToolBox.
+ * Is equivalent to `compile(tree)()`.
+ */
+ def eval(tree: u.Tree): Any
+}
+
+/** Represents an error during toolboxing
+ */
+case class ToolBoxError(val message: String, val cause: Throwable = null) extends Throwable(message, cause)
diff --git a/src/compiler/scala/tools/reflect/ToolBoxFactory.scala b/src/compiler/scala/tools/reflect/ToolBoxFactory.scala
new file mode 100644
index 0000000..8803980
--- /dev/null
+++ b/src/compiler/scala/tools/reflect/ToolBoxFactory.scala
@@ -0,0 +1,421 @@
+package scala.tools
+package reflect
+
+import scala.tools.nsc.reporters._
+import scala.tools.nsc.CompilerCommand
+import scala.tools.nsc.Global
+import scala.tools.nsc.typechecker.Modes
+import scala.tools.nsc.io.VirtualDirectory
+import scala.tools.nsc.interpreter.AbstractFileClassLoader
+import scala.tools.nsc.util.FreshNameCreator
+import scala.tools.nsc.ast.parser.Tokens.EOF
+import scala.reflect.internal.Flags._
+import scala.reflect.internal.util.{BatchSourceFile, NoSourceFile, NoFile}
+import java.lang.{Class => jClass}
+import scala.compat.Platform.EOL
+import scala.reflect.NameTransformer
+import scala.reflect.api.JavaUniverse
+import scala.reflect.io.NoAbstractFile
+import scala.tools.nsc.interactive.RangePositions
+
+abstract class ToolBoxFactory[U <: JavaUniverse](val u: U) { factorySelf =>
+
+ val mirror: u.Mirror
+
+ def mkToolBox(frontEnd: FrontEnd = mkSilentFrontEnd(), options: String = ""): ToolBox[U] =
+ new ToolBoxImpl(frontEnd, options)
+
+ private class ToolBoxImpl(val frontEnd: FrontEnd, val options: String) extends ToolBox[U] { toolBoxSelf =>
+
+ val u: factorySelf.u.type = factorySelf.u
+
+ lazy val classLoader = new AbstractFileClassLoader(virtualDirectory, factorySelf.mirror.classLoader)
+ lazy val mirror: u.Mirror = u.runtimeMirror(classLoader)
+
+ class ToolBoxGlobal(settings: scala.tools.nsc.Settings, reporter: Reporter)
+ extends ReflectGlobal(settings, reporter, toolBoxSelf.classLoader) {
+ import definitions._
+
+ private val trace = scala.tools.nsc.util.trace when settings.debug.value
+
+ private var wrapCount = 0
+
+ private final val wrapperMethodName = "wrapper"
+
+ private def nextWrapperModuleName() = {
+ wrapCount += 1
+ // we need to use UUIDs here, because our toolbox might be spawned by another toolbox
+ // that already has, say, __wrapper$1 in its virtual directory, which will shadow our codegen
+ newTermName("__wrapper$" + wrapCount + "$" + java.util.UUID.randomUUID.toString.replace("-", ""))
+ }
+
+ // should be called after every use of ToolBoxGlobal in order to prevent leaks
+ // there's the `withCleanupCaches` method defined below, which provides a convenient interface for that
+ def cleanupCaches(): Unit = {
+ perRunCaches.clearAll()
+ undoLog.clear()
+ analyzer.lastTreeToTyper = EmptyTree
+ lastSeenSourceFile = NoSourceFile
+ lastSeenContext = null
+ }
+
+ def withCleanupCaches[T](body: => T): T =
+ try body
+ finally cleanupCaches()
+
+ def verify(expr: Tree): Unit = {
+ // Previously toolboxes used to typecheck their inputs before compiling.
+ // Actually, the initial demo by Martin first typechecked the reified tree,
+ // then ran it, which typechecked it again, and only then launched the
+ // reflective compiler.
+ //
+ // However, as observed in https://issues.scala-lang.org/browse/SI-5464
+ // current implementation typechecking is not always idempotent.
+ // That's why we cannot allow inputs of toolboxes to be typechecked,
+ // at least not until the aforementioned issue is closed.
+ val typed = expr filter (t => t.tpe != null && t.tpe != NoType && !t.isInstanceOf[TypeTree])
+ if (!typed.isEmpty) throw ToolBoxError("reflective toolbox has failed: cannot operate on trees that are already typed")
+
+ val freeTypes = expr.freeTypes
+ if (freeTypes.length > 0) {
+ var msg = "reflective toolbox has failed:" + EOL
+ msg += "unresolved free type variables (namely: " + (freeTypes map (ft => "%s %s".format(ft.name, ft.origin)) mkString ", ") + "). "
+ msg += "have you forgot to use TypeTag annotations for type parameters external to a reifee? "
+ msg += "if you have troubles tracking free type variables, consider using -Xlog-free-types"
+ throw ToolBoxError(msg)
+ }
+ }
+
+ def wrapIntoTerm(tree: Tree): Tree =
+ if (!tree.isTerm) Block(List(tree), Literal(Constant(()))) else tree
+
+ def unwrapFromTerm(tree: Tree): Tree = tree match {
+ case Block(List(tree), Literal(Constant(()))) => tree
+ case tree => tree
+ }
+
+ def extractFreeTerms(expr0: Tree, wrapFreeTermRefs: Boolean): (Tree, scala.collection.mutable.LinkedHashMap[FreeTermSymbol, TermName]) = {
+ val freeTerms = expr0.freeTerms
+ val freeTermNames = scala.collection.mutable.LinkedHashMap[FreeTermSymbol, TermName]()
+ freeTerms foreach (ft => {
+ var name = ft.name.toString
+ val namesakes = freeTerms takeWhile (_ != ft) filter (ft2 => ft != ft2 && ft.name == ft2.name)
+ if (namesakes.length > 0) name += ("$" + (namesakes.length + 1))
+ freeTermNames += (ft -> newTermName(name + nme.REIFY_FREE_VALUE_SUFFIX))
+ })
+ var expr = new Transformer {
+ override def transform(tree: Tree): Tree =
+ if (tree.hasSymbol && tree.symbol.isFreeTerm) {
+ tree match {
+ case Ident(_) =>
+ val freeTermRef = Ident(freeTermNames(tree.symbol.asFreeTerm))
+ if (wrapFreeTermRefs) Apply(freeTermRef, List()) else freeTermRef
+ case _ =>
+ throw new Error("internal error: %s (%s, %s) is not supported".format(tree, tree.productPrefix, tree.getClass))
+ }
+ } else {
+ super.transform(tree)
+ }
+ }.transform(expr0)
+ (expr, freeTermNames)
+ }
+
+ def transformDuringTyper(expr0: Tree, withImplicitViewsDisabled: Boolean, withMacrosDisabled: Boolean)(transform: (analyzer.Typer, Tree) => Tree): Tree = {
+ verify(expr0)
+
+ // need to wrap the expr, because otherwise you won't be able to typecheck macros against something that contains free vars
+ var (expr, freeTerms) = extractFreeTerms(expr0, wrapFreeTermRefs = false)
+ val dummies = freeTerms.map{ case (freeTerm, name) => ValDef(NoMods, name, TypeTree(freeTerm.info), Select(Ident(PredefModule), newTermName("$qmark$qmark$qmark"))) }.toList
+ expr = Block(dummies, wrapIntoTerm(expr))
+
+ // [Eugene] how can we implement that?
+ // !!! Why is this is in the empty package? If it's only to make
+ // it inaccessible then please put it somewhere designed for that
+ // rather than polluting the empty package with synthetics.
+ val ownerClass = rootMirror.EmptyPackageClass.newClassSymbol(newTypeName("<expression-owner>"))
+ build.setTypeSignature(ownerClass, ClassInfoType(List(ObjectClass.tpe), newScope, ownerClass))
+ val owner = ownerClass.newLocalDummy(expr.pos)
+ var currentTyper = analyzer.newTyper(analyzer.rootContext(NoCompilationUnit, EmptyTree).make(expr, owner))
+ val wrapper1 = if (!withImplicitViewsDisabled) (currentTyper.context.withImplicitsEnabled[Tree] _) else (currentTyper.context.withImplicitsDisabled[Tree] _)
+ val wrapper2 = if (!withMacrosDisabled) (currentTyper.context.withMacrosEnabled[Tree] _) else (currentTyper.context.withMacrosDisabled[Tree] _)
+ def wrapper (tree: => Tree) = wrapper1(wrapper2(tree))
+
+ val run = new Run
+ run.symSource(ownerClass) = NoAbstractFile // need to set file to something different from null, so that currentRun.defines works
+ phase = run.typerPhase // need to set a phase to something <= typerPhase, otherwise implicits in typedSelect will be disabled
+ currentTyper.context.setReportErrors() // need to manually set context mode, otherwise typer.silent will throw exceptions
+ reporter.reset()
+
+ val expr1 = wrapper(transform(currentTyper, expr))
+ var (dummies1, unwrapped) = expr1 match {
+ case Block(dummies, unwrapped) => (dummies, unwrapped)
+ case unwrapped => (Nil, unwrapped)
+ }
+ var invertedIndex = freeTerms map (_.swap)
+ // todo. also fixup singleton types
+ unwrapped = new Transformer {
+ override def transform(tree: Tree): Tree =
+ tree match {
+ case Ident(name) if invertedIndex contains name =>
+ Ident(invertedIndex(name)) setType tree.tpe
+ case _ =>
+ super.transform(tree)
+ }
+ }.transform(unwrapped)
+ new TreeTypeSubstituter(dummies1 map (_.symbol), dummies1 map (dummy => SingleType(NoPrefix, invertedIndex(dummy.symbol.name)))).traverse(unwrapped)
+ unwrapped = if (expr0.isTerm) unwrapped else unwrapFromTerm(unwrapped)
+ unwrapped
+ }
+
+ def typeCheck(expr: Tree, pt: Type, silent: Boolean, withImplicitViewsDisabled: Boolean, withMacrosDisabled: Boolean): Tree =
+ transformDuringTyper(expr, withImplicitViewsDisabled = withImplicitViewsDisabled, withMacrosDisabled = withMacrosDisabled)(
+ (currentTyper, expr) => {
+ trace("typing (implicit views = %s, macros = %s): ".format(!withImplicitViewsDisabled, !withMacrosDisabled))(showAttributed(expr, true, true, settings.Yshowsymkinds.value))
+ currentTyper.silent(_.typed(expr, analyzer.EXPRmode, pt), reportAmbiguousErrors = false) match {
+ case analyzer.SilentResultValue(result) =>
+ trace("success: ")(showAttributed(result, true, true, settings.Yshowsymkinds.value))
+ result
+ case error @ analyzer.SilentTypeError(_) =>
+ trace("failed: ")(error.err.errMsg)
+ if (!silent) throw ToolBoxError("reflective typecheck has failed: %s".format(error.err.errMsg))
+ EmptyTree
+ }
+ })
+
+ def inferImplicit(tree: Tree, pt: Type, isView: Boolean, silent: Boolean, withMacrosDisabled: Boolean, pos: Position): Tree =
+ transformDuringTyper(tree, withImplicitViewsDisabled = false, withMacrosDisabled = withMacrosDisabled)(
+ (currentTyper, tree) => {
+ trace("inferring implicit %s (macros = %s): ".format(if (isView) "view" else "value", !withMacrosDisabled))(showAttributed(pt, true, true, settings.Yshowsymkinds.value))
+ analyzer.inferImplicit(tree, pt, isView, currentTyper.context, silent, withMacrosDisabled, pos, (pos, msg) => throw ToolBoxError(msg))
+ })
+
+ def compile(expr0: Tree): () => Any = {
+ val expr = wrapIntoTerm(expr0)
+
+ val freeTerms = expr.freeTerms // need to calculate them here, because later on they will be erased
+ val thunks = freeTerms map (fte => () => fte.value) // need to be lazy in order not to distort evaluation order
+ verify(expr)
+
+ def wrap(expr0: Tree): ModuleDef = {
+ val (expr, freeTerms) = extractFreeTerms(expr0, wrapFreeTermRefs = true)
+
+ val (obj, mclazz) = rootMirror.EmptyPackageClass.newModuleAndClassSymbol(
+ nextWrapperModuleName())
+
+ val minfo = ClassInfoType(List(ObjectClass.tpe), newScope, obj.moduleClass)
+ obj.moduleClass setInfo minfo
+ obj setInfo obj.moduleClass.tpe
+
+ val meth = obj.moduleClass.newMethod(newTermName(wrapperMethodName))
+ def makeParam(schema: (FreeTermSymbol, TermName)) = {
+ // see a detailed explanation of the STABLE trick in `GenSymbols.reifyFreeTerm`
+ val (fv, name) = schema
+ meth.newValueParameter(name, newFlags = if (fv.hasStableFlag) STABLE else 0) setInfo appliedType(definitions.FunctionClass(0).tpe, List(fv.tpe.resultType))
+ }
+ meth setInfo MethodType(freeTerms.map(makeParam).toList, AnyClass.tpe)
+ minfo.decls enter meth
+ def defOwner(tree: Tree): Symbol = tree find (_.isDef) map (_.symbol) match {
+ case Some(sym) if sym != null && sym != NoSymbol => sym.owner
+ case _ => NoSymbol
+ }
+ trace("wrapping ")(defOwner(expr) -> meth)
+ val methdef = DefDef(meth, expr changeOwner (defOwner(expr) -> meth))
+
+ val moduledef = ModuleDef(
+ obj,
+ Template(
+ List(TypeTree(ObjectClass.tpe)),
+ emptyValDef,
+ NoMods,
+ List(),
+ List(List()),
+ List(methdef),
+ NoPosition))
+ trace("wrapped: ")(showAttributed(moduledef, true, true, settings.Yshowsymkinds.value))
+
+ var cleanedUp = resetLocalAttrs(moduledef)
+ trace("cleaned up: ")(showAttributed(cleanedUp, true, true, settings.Yshowsymkinds.value))
+ cleanedUp.asInstanceOf[ModuleDef]
+ }
+
+ val mdef = wrap(expr)
+ val pdef = PackageDef(Ident(mdef.name), List(mdef))
+ val unit = new CompilationUnit(NoSourceFile)
+ unit.body = pdef
+
+ val run = new Run
+ reporter.reset()
+ run.compileUnits(List(unit), run.namerPhase)
+ throwIfErrors()
+
+ val className = mdef.symbol.fullName
+ if (settings.debug.value) println("generated: "+className)
+ def moduleFileName(className: String) = className + "$"
+ val jclazz = jClass.forName(moduleFileName(className), true, classLoader)
+ val jmeth = jclazz.getDeclaredMethods.find(_.getName == wrapperMethodName).get
+ val jfield = jclazz.getDeclaredFields.find(_.getName == NameTransformer.MODULE_INSTANCE_NAME).get
+ val singleton = jfield.get(null)
+
+ // @odersky writes: Not sure we will be able to drop this. I forgot the reason why we dereference () functions,
+ // but there must have been one. So I propose to leave old version in comments to be resurrected if the problem resurfaces.
+ // @Eugene writes: this dates back to the days when one could only reify functions
+ // hence, blocks were translated into nullary functions, so
+ // presumably, it was useful to immediately evaluate them to get the result of a block
+ // @Eugene writes: anyways, I'll stash the old sources here in comments in case anyone wants to revive them
+ // val result = jmeth.invoke(singleton, freeTerms map (sym => sym.asInstanceOf[FreeTermVar].value.asInstanceOf[AnyRef]): _*)
+ // if (etpe.typeSymbol != FunctionClass(0)) result
+ // else {
+ // val applyMeth = result.getClass.getMethod("apply")
+ // applyMeth.invoke(result)
+ // }
+ () => {
+ val result = jmeth.invoke(singleton, thunks map (_.asInstanceOf[AnyRef]): _*)
+ if (jmeth.getReturnType == java.lang.Void.TYPE) ()
+ else result
+ }
+ }
+
+ def parse(code: String): Tree = {
+ val run = new Run
+ reporter.reset()
+ val file = new BatchSourceFile("<toolbox>", code)
+ val unit = new CompilationUnit(file)
+ phase = run.parserPhase
+ val parser = new syntaxAnalyzer.UnitParser(unit)
+ val parsed = parser.templateStats()
+ parser.accept(EOF)
+ throwIfErrors()
+ parsed match {
+ case expr :: Nil => expr
+ case stats :+ expr => Block(stats, expr)
+ }
+ }
+
+ def showAttributed(artifact: Any, printTypes: Boolean = true, printIds: Boolean = true, printKinds: Boolean = false): String = {
+ val saved1 = settings.printtypes.value
+ val saved2 = settings.uniqid.value
+ val saved3 = settings.Yshowsymkinds.value
+ try {
+ settings.printtypes.value = printTypes
+ settings.uniqid.value = printIds
+ settings.Yshowsymkinds.value = printKinds
+ artifact.toString
+ } finally {
+ settings.printtypes.value = saved1
+ settings.uniqid.value = saved2
+ settings.Yshowsymkinds.value = saved3
+ }
+ }
+
+ // reporter doesn't accumulate errors, but the front-end does
+ def throwIfErrors() = {
+ if (frontEnd.hasErrors) {
+ var msg = "reflective compilation has failed: " + EOL + EOL
+ msg += frontEnd.infos map (_.msg) mkString EOL
+ throw ToolBoxError(msg)
+ }
+ }
+ }
+
+ // todo. is not going to work with quoted arguments with embedded whitespaces
+ lazy val arguments = options.split(" ")
+
+ lazy val virtualDirectory =
+ (arguments zip arguments.tail).collect{ case ("-d", dir) => dir }.lastOption match {
+ case Some(outDir) => scala.tools.nsc.io.AbstractFile.getDirectory(outDir)
+ case None => new VirtualDirectory("(memory)", None)
+ }
+
+ lazy val compiler: ToolBoxGlobal = {
+ try {
+ val errorFn: String => Unit = msg => frontEnd.log(scala.reflect.internal.util.NoPosition, msg, frontEnd.ERROR)
+ val command = new CompilerCommand(arguments.toList, errorFn)
+ val settings = command.settings
+ settings.outputDirs setSingleOutput virtualDirectory
+ val reporter = frontEndToReporter(frontEnd, command.settings)
+ val instance =
+ if (settings.Yrangepos.value) new ToolBoxGlobal(settings, reporter) with RangePositions
+ else new ToolBoxGlobal(settings, reporter)
+ if (frontEnd.hasErrors) {
+ var msg = "reflective compilation has failed: cannot initialize the compiler: " + EOL + EOL
+ msg += frontEnd.infos map (_.msg) mkString EOL
+ throw ToolBoxError(msg)
+ }
+ instance
+ } catch {
+ case ex: Throwable =>
+ var msg = "reflective compilation has failed: cannot initialize the compiler due to %s".format(ex.toString)
+ throw ToolBoxError(msg, ex)
+ }
+ }
+
+ lazy val importer = compiler.mkImporter(u)
+ lazy val exporter = importer.reverse
+
+ def typeCheck(tree: u.Tree, expectedType: u.Type, silent: Boolean = false, withImplicitViewsDisabled: Boolean = false, withMacrosDisabled: Boolean = false): u.Tree = compiler.withCleanupCaches {
+ if (compiler.settings.verbose.value) println("importing "+tree+", expectedType = "+expectedType)
+ var ctree: compiler.Tree = importer.importTree(tree)
+ var cexpectedType: compiler.Type = importer.importType(expectedType)
+
+ if (compiler.settings.verbose.value) println("typing "+ctree+", expectedType = "+expectedType)
+ val ttree: compiler.Tree = compiler.typeCheck(ctree, cexpectedType, silent = silent, withImplicitViewsDisabled = withImplicitViewsDisabled, withMacrosDisabled = withMacrosDisabled)
+ val uttree = exporter.importTree(ttree)
+ uttree
+ }
+
+ def inferImplicitValue(pt: u.Type, silent: Boolean = true, withMacrosDisabled: Boolean = false, pos: u.Position = u.NoPosition): u.Tree = {
+ inferImplicit(u.EmptyTree, pt, isView = false, silent = silent, withMacrosDisabled = withMacrosDisabled, pos = pos)
+ }
+
+ def inferImplicitView(tree: u.Tree, from: u.Type, to: u.Type, silent: Boolean = true, withMacrosDisabled: Boolean = false, pos: u.Position = u.NoPosition): u.Tree = {
+ val viewTpe = u.appliedType(u.definitions.FunctionClass(1).toTypeConstructor, List(from, to))
+ inferImplicit(tree, viewTpe, isView = true, silent = silent, withMacrosDisabled = withMacrosDisabled, pos = pos)
+ }
+
+ private def inferImplicit(tree: u.Tree, pt: u.Type, isView: Boolean, silent: Boolean, withMacrosDisabled: Boolean, pos: u.Position): u.Tree = compiler.withCleanupCaches {
+ if (compiler.settings.verbose.value) println("importing "+pt, ", tree = "+tree+", pos = "+pos)
+ var ctree: compiler.Tree = importer.importTree(tree)
+ var cpt: compiler.Type = importer.importType(pt)
+ var cpos: compiler.Position = importer.importPosition(pos)
+
+ if (compiler.settings.verbose.value) println("inferring implicit %s of type %s, macros = %s".format(if (isView) "view" else "value", pt, !withMacrosDisabled))
+ val itree: compiler.Tree = compiler.inferImplicit(ctree, cpt, isView = isView, silent = silent, withMacrosDisabled = withMacrosDisabled, pos = cpos)
+ val uitree = exporter.importTree(itree)
+ uitree
+ }
+
+ def resetAllAttrs(tree: u.Tree): u.Tree = {
+ val ctree: compiler.Tree = importer.importTree(tree)
+ val ttree: compiler.Tree = compiler.resetAllAttrs(ctree)
+ val uttree = exporter.importTree(ttree)
+ uttree
+ }
+
+ def resetLocalAttrs(tree: u.Tree): u.Tree = {
+ val ctree: compiler.Tree = importer.importTree(tree)
+ val ttree: compiler.Tree = compiler.resetLocalAttrs(ctree)
+ val uttree = exporter.importTree(ttree)
+ uttree
+ }
+
+ def showAttributed(tree: u.Tree, printTypes: Boolean = true, printIds: Boolean = true, printKinds: Boolean = false): String =
+ compiler.showAttributed(importer.importTree(tree), printTypes, printIds, printKinds)
+
+ def parse(code: String): u.Tree = {
+ if (compiler.settings.verbose.value) println("parsing "+code)
+ val ctree: compiler.Tree = compiler.parse(code)
+ val utree = exporter.importTree(ctree)
+ utree
+ }
+
+ def compile(tree: u.Tree): () => Any = {
+ if (compiler.settings.verbose.value) println("importing "+tree)
+ val ctree: compiler.Tree = importer.importTree(tree)
+
+ if (compiler.settings.verbose.value) println("compiling "+ctree)
+ compiler.compile(ctree)
+ }
+
+ def eval(tree: u.Tree): Any = compile(tree)()
+ }
+}
diff --git a/src/compiler/scala/tools/reflect/UniversalFn.scala b/src/compiler/scala/tools/reflect/UniversalFn.scala
deleted file mode 100644
index 9ccd580..0000000
--- a/src/compiler/scala/tools/reflect/UniversalFn.scala
+++ /dev/null
@@ -1,59 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
- * @author Paul Phillips
- */
-
-package scala.tools
-package reflect
-
-import java.lang.reflect.{ Method, InvocationTargetException }
-import java.{ lang => jl }
-
-/** For certain reflection tasks it is convenient to treat all methods
- * as having the same signature: (Seq[AnyRef]) => AnyRef
- *
- * That is the "universal signature" and UniversalFn exists to provide
- * it without abandoning the information we had before we needed it.
- * One place this is used: closures can pose as arbitrary interfaces,
- * and this is how we route the arguments from the actual method
- * invocation (which targets a proxy object) to the original closure.
- */
-class UniversalFn private (val closure: AnyRef, val method: Method) extends (Seq[AnyRef] => AnyRef) {
- universal =>
-
- /** Given an interface type argument, creates a proxy object of the
- * type of the interface which implements all its methods by routing
- * them to this universal function. Will throw an exception in the
- * face of any bad data.
- */
- def as[T: Manifest] : T = {
- val clazz = manifest[T].erasure
- require(clazz.isInterface, "Type argument must be an interface.")
-
- val interfaceMethods = clazz.getDeclaredMethods.toSet
- val proxy = Mock.fromInterfaces(clazz) {
- case Invoked(_, m, args) if interfaceMethods(m) => universal(args)
- }
- proxy.asInstanceOf[T]
- }
-
- def apply(xs: Seq[AnyRef]): AnyRef =
- try method.invoke(closure, xs: _*)
- catch { case x: InvocationTargetException => throw x.getCause() }
-}
-
-object UniversalFn {
- /** We use a private constructor so we can enforce some rules: we don't want
- * universal functions to stack up, and right now we will only allow objects
- * which appear to be closures (there's no reason not to eventually lift
- * this restriction, but it should be harder to shoot your foot first.)
- */
- def apply(closure: AnyRef): UniversalFn = closure match {
- case x: UniversalFn => x
- case _ =>
- val m = uniqueApply(closure) getOrElse {
- throw new IllegalArgumentException("Argument must have exactly one non-bridge apply method.")
- }
- new UniversalFn(closure, m)
- }
-}
diff --git a/src/compiler/scala/tools/reflect/WrappedProperties.scala b/src/compiler/scala/tools/reflect/WrappedProperties.scala
index e70cf32..7ce0171 100644
--- a/src/compiler/scala/tools/reflect/WrappedProperties.scala
+++ b/src/compiler/scala/tools/reflect/WrappedProperties.scala
@@ -1,5 +1,5 @@
/* NSC -- new Scala compiler
- * Copyright 2006-2011 LAMP/EPFL
+ * Copyright 2006-2013 LAMP/EPFL
* @author Paul Phillips
*/
@@ -21,14 +21,18 @@ trait WrappedProperties extends PropertiesTrait {
override def propIsSet(name: String) = wrap(super.propIsSet(name)) exists (x => x)
override def propOrElse(name: String, alt: String) = wrap(super.propOrElse(name, alt)) getOrElse alt
- override def setProp(name: String, value: String) = wrap(super.setProp(name, value)) orNull
- override def clearProp(name: String) = wrap(super.clearProp(name)) orNull
+ override def setProp(name: String, value: String) = wrap(super.setProp(name, value)).orNull
+ override def clearProp(name: String) = wrap(super.clearProp(name)).orNull
override def envOrElse(name: String, alt: String) = wrap(super.envOrElse(name, alt)) getOrElse alt
- override def envOrNone(name: String) = wrap(super.envOrNone(name)) match { case Some(x) => x ; case _ => None }
+ override def envOrNone(name: String) = wrap(super.envOrNone(name)).flatten
- def systemProperties: Iterator[(String, String)] = {
+ def systemProperties: List[(String, String)] = {
import scala.collection.JavaConverters._
- wrap(System.getProperties.asScala.iterator) getOrElse Iterator.empty
+ wrap {
+ val props = System.getProperties
+ // SI-7269 Be careful to avoid `ConcurrentModificationException` if another thread modifies the properties map
+ props.stringPropertyNames().asScala.toList.map(k => (k, props.get(k).asInstanceOf[String]))
+ } getOrElse Nil
}
}
diff --git a/src/compiler/scala/tools/reflect/package.scala b/src/compiler/scala/tools/reflect/package.scala
index 8f5e099..3f880bf 100644
--- a/src/compiler/scala/tools/reflect/package.scala
+++ b/src/compiler/scala/tools/reflect/package.scala
@@ -1,43 +1,119 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
+ * Copyright 2005-2013 LAMP/EPFL
* @author Paul Phillips
*/
package scala.tools
-import java.lang.reflect.Method
-import java.{ lang => jl }
+import scala.reflect.api.JavaUniverse
+import scala.reflect.internal.util.Position
+import scala.language.implicitConversions
+import scala.tools.nsc.reporters._
+import scala.tools.nsc.Settings
package object reflect {
- def nameAndArity(m: Method) = (m.getName, m.getParameterTypes.size)
- def allInterfaces(cl: Class[_]): List[Class[_]] =
- if (cl == null) Nil
- else cl.getInterfaces.toList ++ allInterfaces(cl.getSuperclass) distinct
+ // [todo: can we generalize this?
+ import scala.reflect.runtime.{universe => ru}
+ implicit def ToolBox(mirror0: ru.Mirror): ToolBoxFactory[ru.type] =
+ new ToolBoxFactory[ru.type](mirror0.universe) {
+ lazy val mirror = mirror0
+ }
+
+ // todo. replace this with an implicit class, once the pesky warning is gone
+ // we don't provide `Eval` for trees, because it's unclear where to get an evaluation mirror from
+ implicit def Eval[T](expr: JavaUniverse # Expr[T]): Eval[T] = new Eval[T](expr)
- def methodsNamed(target: AnyRef, name: String): List[Method] =
- target.getClass.getMethods filter (x => x.getName == name) toList
+ /** Creates a UI-less reporter that simply accumulates all the messages
+ */
+ def mkSilentFrontEnd(): FrontEnd = new FrontEnd {
+ def display(info: Info) {}
+ def interactive() {}
+ }
- /** If there is a single non-bridge apply method in the given instance,
- * return it: otherwise None.
+ /** Creates a reporter that prints messages to the console according to the settings.
+ *
+ * ``minSeverity'' determines minimum severity of the messages to be printed.
+ * 0 stands for INFO, 1 stands for WARNING and 2 stands for ERROR.
*/
- def uniqueApply(target: AnyRef) = {
- methodsNamed(target, "apply") filterNot (_.isBridge) match {
- case List(x) => Some(x)
- case _ => None
+ // todo. untangle warningsAsErrors from Reporters. I don't feel like moving this flag here!
+ def mkConsoleFrontEnd(minSeverity: Int = 1): FrontEnd = {
+ val settings = new Settings()
+ if (minSeverity <= 0) settings.verbose.value = true
+ if (minSeverity > 1) settings.nowarn.value = true
+ reporterToFrontEnd(new ConsoleReporter(settings))
+ }
+
+ private[reflect] def reporterToFrontEnd(reporter: Reporter): FrontEnd = new FrontEnd {
+ val API_INFO = INFO
+ val API_WARNING = WARNING
+ val API_ERROR = ERROR
+
+ override def hasErrors = reporter.hasErrors
+ override def hasWarnings = reporter.hasWarnings
+
+ def display(info: Info): Unit = info.severity match {
+ case API_INFO => reporter.info(info.pos, info.msg, false)
+ case API_WARNING => reporter.warning(info.pos, info.msg)
+ case API_ERROR => reporter.error(info.pos, info.msg)
+ }
+
+ def interactive(): Unit = reporter match {
+ case reporter: AbstractReporter => reporter.displayPrompt()
+ case _ => // do nothing
+ }
+
+ override def flush(): Unit = {
+ super.flush()
+ reporter.flush()
+ }
+
+ override def reset(): Unit = {
+ super.reset()
+ reporter.reset()
}
}
- def zeroOfClass(clazz: Class[_]) = zeroOf(Manifest.classType(clazz))
- def zeroOf[T](implicit m: Manifest[T]): AnyRef = {
- if (m == manifest[Boolean] || m == manifest[jl.Boolean]) false: jl.Boolean
- else if (m == manifest[Unit] || m == manifest[jl.Void] || m == manifest[scala.runtime.BoxedUnit]) scala.runtime.BoxedUnit.UNIT
- else if (m == manifest[Char] || m == manifest[jl.Character]) 0.toChar: jl.Character
- else if (m == manifest[Byte] || m == manifest[jl.Byte]) 0.toByte: jl.Byte
- else if (m == manifest[Short] || m == manifest[jl.Short]) 0.toShort: jl.Short
- else if (m == manifest[Int] || m == manifest[jl.Integer]) 0: jl.Integer
- else if (m == manifest[Long] || m == manifest[jl.Long]) 0l: jl.Long
- else if (m == manifest[Float] || m == manifest[jl.Float]) 0f: jl.Float
- else if (m == manifest[Double] || m == manifest[jl.Double]) 0d: jl.Double
- else null
+ private[reflect] def frontEndToReporter(frontEnd: FrontEnd, settings0: Settings): Reporter = new AbstractReporter {
+ val settings = settings0
+
+ import frontEnd.{Severity => ApiSeverity}
+ val API_INFO = frontEnd.INFO
+ val API_WARNING = frontEnd.WARNING
+ val API_ERROR = frontEnd.ERROR
+
+ type NscSeverity = Severity
+ val NSC_INFO = INFO
+ val NSC_WARNING = WARNING
+ val NSC_ERROR = ERROR
+
+ def display(pos: Position, msg: String, nscSeverity: NscSeverity): Unit =
+ frontEnd.log(pos, msg, nscSeverity match {
+ case NSC_INFO => API_INFO
+ case NSC_WARNING => API_WARNING
+ case NSC_ERROR => API_ERROR
+ })
+
+ def displayPrompt(): Unit =
+ frontEnd.interactive()
+
+ override def flush(): Unit = {
+ super.flush()
+ frontEnd.flush()
+ }
+
+ override def reset(): Unit = {
+ super.reset()
+ frontEnd.reset()
+ }
+ }
+}
+
+package reflect {
+ class Eval[T](expr: JavaUniverse # Expr[T]) {
+ def eval: T = {
+ val factory = new ToolBoxFactory[JavaUniverse](expr.mirror.universe) { val mirror = expr.mirror.asInstanceOf[this.u.Mirror] }
+ val toolBox = factory.mkToolBox()
+ toolBox.eval(expr.tree.asInstanceOf[toolBox.u.Tree]).asInstanceOf[T]
+ }
}
}
diff --git a/src/compiler/scala/tools/util/AbstractTimer.scala b/src/compiler/scala/tools/util/AbstractTimer.scala
deleted file mode 100644
index 219b5d2..0000000
--- a/src/compiler/scala/tools/util/AbstractTimer.scala
+++ /dev/null
@@ -1,53 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-package scala.tools.util
-
-import compat.Platform.currentTime
-import scala.collection.mutable.Stack
-
-/**
- * This abstract class implements the collection of timings. How the
- * collected timings are issued has to be implemented in subclasses.
- *
- * @author Philippe Altherr
- * @version 1.0
- */
-abstract class AbstractTimer {
-
- //########################################################################
- // Private Fields
-
- /** A stack for maintaining start times */
- private val starts = new Stack[Long]()
-
- //########################################################################
- // Public Methods
-
- /** Issues a timing information (duration in milliseconds). */
- def issue(message: String, duration: Long): Unit
-
- /** Starts a new timer. */
- def start() {
- starts push currentTime
- }
-
- /** Ends the current timer. */
- def stop(message: String) {
- val stop = currentTime
- issue(message, stop - starts.pop)
- }
-
- /** Drops the current timer. */
- def drop() {
- starts.pop
- }
-
- //########################################################################
-}
diff --git a/src/compiler/scala/tools/util/ClassPathSettings.scala b/src/compiler/scala/tools/util/ClassPathSettings.scala
deleted file mode 100644
index d202279..0000000
--- a/src/compiler/scala/tools/util/ClassPathSettings.scala
+++ /dev/null
@@ -1,32 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2006-2011 LAMP/EPFL
- * @author Paul Phillips
- */
-
-package scala.tools
-package util
-
-trait ClassPathSettings {
- def javabootclasspath: String // -javabootclasspath
- def javaextdirs: String // -javaextdirs
- def bootclasspath: String // -bootclasspath
- def extdirs: String // -extdirs
- def classpath: String // -classpath
- def sourcepath: String // -sourcepath
-}
-
-// val debugLogger = {
-// val f = File("/tmp/path-resolve-log.txt")
-// if (f.exists) f.truncate()
-// else f.createFile()
-//
-// val res = f.bufferedWriter()
-// res write ("Started debug log: %s\n".format(new java.util.Date))
-// res
-// }
-// def log(msg: Any) = {
-// Console println msg
-// debugLogger.write(msg.toString + "\n")
-// debugLogger flush
-// }
-
diff --git a/src/compiler/scala/tools/util/Javap.scala b/src/compiler/scala/tools/util/Javap.scala
index 581cc9d..21137ac 100644
--- a/src/compiler/scala/tools/util/Javap.scala
+++ b/src/compiler/scala/tools/util/Javap.scala
@@ -1,5 +1,5 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
+ * Copyright 2005-2013 LAMP/EPFL
* @author Paul Phillips
*/
@@ -10,46 +10,63 @@ import java.lang.reflect.{ GenericSignatureFormatError, Method, Constructor }
import java.lang.{ ClassLoader => JavaClassLoader }
import scala.tools.nsc.util.ScalaClassLoader
import java.io.{ InputStream, PrintWriter, ByteArrayInputStream, FileNotFoundException }
-import scala.tools.nsc.io.{ File, NullPrintStream }
+import scala.tools.nsc.io.File
import Javap._
+import scala.language.reflectiveCalls
+
+trait Javap {
+ def loader: ScalaClassLoader
+ def printWriter: PrintWriter
+ def apply(args: Seq[String]): List[JpResult]
+ def tryFile(path: String): Option[Array[Byte]]
+ def tryClass(path: String): Array[Byte]
+}
+
+object NoJavap extends Javap {
+ def loader: ScalaClassLoader = getClass.getClassLoader
+ def printWriter: PrintWriter = new PrintWriter(System.err, true)
+ def apply(args: Seq[String]): List[JpResult] = Nil
+ def tryFile(path: String): Option[Array[Byte]] = None
+ def tryClass(path: String): Array[Byte] = Array()
+}
-class Javap(
- val loader: ScalaClassLoader = ScalaClassLoader.getSystemLoader(),
+class JavapClass(
+ val loader: ScalaClassLoader = ScalaClassLoader.appLoader,
val printWriter: PrintWriter = new PrintWriter(System.out, true)
-) {
+) extends Javap {
lazy val parser = new JpOptions
+ val EnvClass = loader.tryToInitializeClass[FakeEnvironment](Env).orNull
+ val PrinterClass = loader.tryToInitializeClass[FakePrinter](Printer).orNull
+ private def failed = (EnvClass eq null) || (PrinterClass eq null)
+
+ val PrinterCtr = (
+ if (failed) null
+ else PrinterClass.getConstructor(classOf[InputStream], classOf[PrintWriter], EnvClass)
+ )
+
def findBytes(path: String): Array[Byte] =
tryFile(path) getOrElse tryClass(path)
def apply(args: Seq[String]): List[JpResult] = {
- args.toList filterNot (_ startsWith "-") map { path =>
+ if (failed) List(new JpError("Could not load javap tool. Check that JAVA_HOME is correct."))
+ else args.toList filterNot (_ startsWith "-") map { path =>
val bytes = findBytes(path)
if (bytes.isEmpty) new JpError("Could not find class bytes for '%s'".format(path))
else new JpSuccess(newPrinter(new ByteArrayInputStream(bytes), newEnv(args)))
}
}
- // "documentation"
- type FakeEnvironment = AnyRef
- type FakePrinter = AnyRef
-
- val Env = "sun.tools.javap.JavapEnvironment"
- val EnvClass = loader.tryToInitializeClass[FakeEnvironment](Env).orNull
- val EnvCtr = EnvClass.getConstructor(List[Class[_]](): _*)
-
- val Printer = "sun.tools.javap.JavapPrinter"
- val PrinterClass = loader.tryToInitializeClass[FakePrinter](Printer).orNull
- val PrinterCtr = PrinterClass.getConstructor(classOf[InputStream], classOf[PrintWriter], EnvClass)
-
def newPrinter(in: InputStream, env: FakeEnvironment): FakePrinter =
- PrinterCtr.newInstance(in, printWriter, env)
+ if (failed) null
+ else PrinterCtr.newInstance(in, printWriter, env)
def newEnv(opts: Seq[String]): FakeEnvironment = {
- val env: FakeEnvironment = EnvClass.newInstance()
+ lazy val env: FakeEnvironment = EnvClass.newInstance()
- parser(opts) foreach { case (name, value) =>
+ if (failed) null
+ else parser(opts) foreach { case (name, value) =>
val field = EnvClass getDeclaredField name
field setAccessible true
field.set(env, value.asInstanceOf[AnyRef])
@@ -82,8 +99,18 @@ class Javap(
}
object Javap {
+ val Env = "sun.tools.javap.JavapEnvironment"
+ val Printer = "sun.tools.javap.JavapPrinter"
+
+ def isAvailable(cl: ScalaClassLoader = ScalaClassLoader.appLoader) =
+ cl.tryToInitializeClass[AnyRef](Env).isDefined
+
+ // "documentation"
+ type FakeEnvironment = AnyRef
+ type FakePrinter = AnyRef
+
def apply(path: String): Unit = apply(Seq(path))
- def apply(args: Seq[String]): Unit = new Javap() apply args foreach (_.show())
+ def apply(args: Seq[String]): Unit = new JavapClass() apply args foreach (_.show())
sealed trait JpResult {
type ResultType
@@ -117,15 +144,15 @@ object Javap {
}
private val envActionMap: Map[String, (String, Any)] = {
val map = Map(
- "-l" -> ("showLineAndLocal", true),
- "-c" -> ("showDisassembled", true),
- "-s" -> ("showInternalSigs", true),
- "-verbose" -> ("showVerbose", true),
- "-private" -> ("showAccess", Access.PRIVATE),
- "-package" -> ("showAccess", Access.PACKAGE),
- "-protected" -> ("showAccess", Access.PROTECTED),
- "-public" -> ("showAccess", Access.PUBLIC),
- "-all" -> ("showallAttr", true)
+ "-l" -> (("showLineAndLocal", true)),
+ "-c" -> (("showDisassembled", true)),
+ "-s" -> (("showInternalSigs", true)),
+ "-verbose" -> (("showVerbose", true)),
+ "-private" -> (("showAccess", Access.PRIVATE)),
+ "-package" -> (("showAccess", Access.PACKAGE)),
+ "-protected" -> (("showAccess", Access.PROTECTED)),
+ "-public" -> (("showAccess", Access.PUBLIC)),
+ "-all" -> (("showallAttr", true))
)
map ++ List(
"-v" -> map("-verbose"),
diff --git a/src/compiler/scala/tools/util/PathResolver.scala b/src/compiler/scala/tools/util/PathResolver.scala
index f003e94..0af1011 100644
--- a/src/compiler/scala/tools/util/PathResolver.scala
+++ b/src/compiler/scala/tools/util/PathResolver.scala
@@ -1,5 +1,5 @@
/* NSC -- new Scala compiler
- * Copyright 2006-2011 LAMP/EPFL
+ * Copyright 2006-2013 LAMP/EPFL
* @author Paul Phillips
*/
@@ -13,9 +13,10 @@ import nsc.util.{ ClassPath, JavaClassPath, ScalaClassLoader }
import nsc.io.{ File, Directory, Path, AbstractFile }
import ClassPath.{ JavaContext, DefaultJavaContext, join, split }
import PartialFunction.condOpt
+import scala.language.postfixOps
// Loosely based on the draft specification at:
-// https://lampsvn.epfl.ch/trac/scala/wiki/Classpath
+// https://wiki.scala-lang.org/display/SW/Classpath
object PathResolver {
// Imports property/environment functions which suppress
@@ -77,13 +78,7 @@ object PathResolver {
* to the path resolution specification.
*/
object Defaults {
- /* Against my better judgment, giving in to martin here and allowing
- * CLASSPATH as the default if no -cp is given. Only if there is no
- * command line option or environment variable is "." used.
- */
- def scalaUserClassPath = firstNonEmpty(Environment.classPathEnv, ".")
- def scalaSourcePath = Environment.sourcePathEnv
-
+ def scalaSourcePath = Environment.sourcePathEnv
def javaBootClassPath = Environment.javaBootClassPath
def javaUserClassPath = Environment.javaUserClassPath
def javaExtDirs = Environment.javaExtDirs
@@ -198,8 +193,29 @@ class PathResolver(settings: Settings, context: JavaContext) {
def javaUserClassPath = if (useJavaClassPath) Defaults.javaUserClassPath else ""
def scalaBootClassPath = cmdLineOrElse("bootclasspath", Defaults.scalaBootClassPath)
def scalaExtDirs = cmdLineOrElse("extdirs", Defaults.scalaExtDirs)
- def userClassPath = cmdLineOrElse("classpath", Defaults.scalaUserClassPath)
- def sourcePath = cmdLineOrElse("sourcepath", Defaults.scalaSourcePath)
+ /** Scaladoc doesn't need any bootstrapping, otherwise will create errors such as:
+ * [scaladoc] ../scala-trunk/src/reflect/scala/reflect/macros/Reifiers.scala:89: error: object api is not a member of package reflect
+ * [scaladoc] case class ReificationException(val pos: reflect.api.PositionApi, val msg: String) extends Throwable(msg)
+ * [scaladoc] ^
+ * because the bootstrapping will look at the sourcepath and create package "reflect" in "<root>"
+ * and then when typing relative names, instead of picking <root>.scala.relect, typedIdentifier will pick up the
+ * <root>.reflect package created by the bootstrapping. Thus, no bootstrapping for scaladoc!
+ * TODO: we should refactor this as a separate -bootstrap option to have a clean implementation, no? */
+ def sourcePath = if (!settings.isScaladoc) cmdLineOrElse("sourcepath", Defaults.scalaSourcePath) else ""
+
+ /** Against my better judgment, giving in to martin here and allowing
+ * CLASSPATH to be used automatically. So for the user-specified part
+ * of the classpath:
+ *
+ * - If -classpath or -cp is given, it is that
+ * - Otherwise, if CLASSPATH is set, it is that
+ * - If neither of those, then "." is used.
+ */
+ def userClassPath = (
+ if (!settings.classpath.isDefault)
+ settings.classpath.value
+ else sys.env.getOrElse("CLASSPATH", ".")
+ )
import context._
diff --git a/src/compiler/scala/tools/util/Profiling.scala b/src/compiler/scala/tools/util/Profiling.scala
deleted file mode 100644
index 44393ee..0000000
--- a/src/compiler/scala/tools/util/Profiling.scala
+++ /dev/null
@@ -1,52 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
- * @author Paul Phillips
- */
-
-package scala.tools
-package util
-
-/** This is a (very) minimal stub for profiling, the purpose
- * of which is making it possible to integrate profiling hooks in
- * the compiler without creating a dependency on any particular
- * profiler. You can specify a profiler class (which must be an
- * instance of this class) like so:
- *
- * // or -Yprofile:phase to profile individual phases
- * scalac -Yprofile-class your.profiler.Class -Yprofile:all <files>
- *
- */
-abstract class Profiling {
- def isActive: Boolean
- def startProfiling(): Unit
- def stopProfiling(): Unit
- def captureSnapshot(): Unit
-
- def allocationFreq: Option[Int] // record every Nth allocation
- def startRecordingAllocations(): Unit
- def stopRecordingAllocations(): Unit
-
- def profile[T](body: => T): T = profileCPU(body)
-
- def profileCPU[T](body: => T): T = {
- startProfiling()
- val result = body
- stopProfiling()
- captureSnapshot()
- result
- }
-
- def profileMem[T](body: => T): T = {
- startRecordingAllocations()
- val result = body
- stopRecordingAllocations()
- result
- }
-
- /** Advance the current object generation.
- *
- * Each object on the heap is associated to a generation number. Generations
- * start at 1, and are automatically advanced on each snapshot capture.
- */
- def advanceGeneration(desc: String = ""): Unit
-}
diff --git a/src/compiler/scala/tools/util/SignalManager.scala b/src/compiler/scala/tools/util/SignalManager.scala
deleted file mode 100644
index 19b5348..0000000
--- a/src/compiler/scala/tools/util/SignalManager.scala
+++ /dev/null
@@ -1,275 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
- * @author Paul Phillips
- */
-
-package scala.tools
-package util
-
-import java.lang.reflect.{ Method, Constructor }
-import scala.tools.reflect._
-import scala.collection.{ mutable, immutable }
-import nsc.io.timer
-import nsc.util.{ ScalaClassLoader, Exceptional }
-import Exceptional.unwrap
-import scala.util.Random
-
-/** Signal handling code. 100% clean of any references to sun.misc:
- * it's all reflection and proxies and invocation handlers and lasers,
- * so even the choosiest runtimes will be cool with it.
- *
- * Sun/Oracle says sun.misc.* is unsupported and therefore so is all
- * of this. Simple examples:
- * {{{
- val manager = scala.tools.util.SignalManager // or you could make your own
- // Assignment clears any old handlers; += chains them.
- manager("HUP") = println("HUP 1!")
- manager("HUP") += println("HUP 2!")
- // Use raise() to raise a signal: this will print both lines
- manager("HUP").raise()
- // See a report on every signal's current handler
- manager.dump()
- * }}}
- */
-class SignalManager(classLoader: ScalaClassLoader) {
- def this() = this(ScalaClassLoader.getSystemLoader)
- private val illegalArgHandler: PartialFunction[Throwable, Boolean] = {
- case x if unwrap(x).isInstanceOf[IllegalArgumentException] => false
- }
- private def fail(msg: String) = new SignalError(msg)
-
- object rSignalHandler extends Shield {
- val className = "sun.misc.SignalHandler"
- val classLoader = SignalManager.this.classLoader
-
- lazy val SIG_DFL = field("SIG_DFL") get null
- lazy val SIG_IGN = field("SIG_IGN") get null
-
- /** Create a new signal handler based on the function.
- */
- def apply(action: Invoked => Unit) = Mock.fromInterfaces(clazz) {
- case inv @ Invoked.NameAndArgs("handle", _ :: Nil) => action(inv)
- }
- def empty = rSignalHandler(_ => ())
- }
- import rSignalHandler.{ SIG_DFL, SIG_IGN }
-
- object rSignal extends Shield {
- val className = "sun.misc.Signal"
- val classLoader = SignalManager.this.classLoader
-
- lazy val handleMethod = method("handle", 2)
- lazy val raiseMethod = method("raise", 1)
- lazy val numberMethod = method("getNumber", 0)
-
- /** Create a new Signal with the given name.
- */
- def apply(name: String) = constructor(classOf[String]) newInstance name
- def handle(signal: AnyRef, current: AnyRef) = {
- if (signal == null || current == null) fail("Signals cannot be null")
- else handleMethod.invoke(null, signal, current)
- }
- def raise(signal: AnyRef) = {
- if (signal == null) fail("Signals cannot be null")
- else raiseMethod.invoke(null, signal)
- }
- def number(signal: AnyRef): Int = numberMethod.invoke(signal).asInstanceOf[Int]
-
- class WSignal(val name: String) {
- lazy val signal = rSignal apply name
- def number = rSignal number signal
- def raise() = rSignal raise signal
- def handle(handler: AnyRef) = rSignal.handle(signal, handler)
-
- def isError = false
- def setTo(body: => Unit) = register(name, false, body)
- def +=(body: => Unit) = register(name, true, body)
-
- /** It's hard to believe there's no way to get a signal's current
- * handler without replacing it, but if there is I couldn't find
- * it, so we have this swapping code.
- */
- def withCurrentHandler[T](f: AnyRef => T): T = {
- val swap = handle(rSignalHandler.empty)
-
- try f(swap)
- finally handle(swap)
- }
- def isDefault = try withCurrentHandler {
- case SIG_DFL => true
- case _ => false
- } catch illegalArgHandler
- def isIgnored = try withCurrentHandler {
- case SIG_IGN => true
- case _ => false
- } catch illegalArgHandler
- def isSetTo(ref: AnyRef) =
- try withCurrentHandler { _ eq ref }
- catch illegalArgHandler
-
- def handlerString() = withCurrentHandler {
- case SIG_DFL => "Default"
- case SIG_IGN => "Ignore"
- case x => "" + x
- }
-
- override def toString = "%10s %s".format("SIG" + name,
- try handlerString()
- catch { case x: Exception => "VM threw " + unwrap(x) }
- )
- override def equals(other: Any) = other match {
- case x: WSignal => name == x.name
- case _ => false
- }
- override def hashCode = name.##
- }
- }
- type WSignal = rSignal.WSignal
-
- /** Adds a handler for the named signal. If shouldChain is true,
- * the installed handler will call the previous handler after the
- * new one has executed. If false, the old handler is dropped.
- */
- private def register(name: String, shouldChain: Boolean, body: => Unit) = {
- val signal = rSignal(name)
- val current = rSignalHandler(_ => body)
- val prev = rSignal.handle(signal, current)
-
- if (shouldChain) {
- val chainer = rSignalHandler { inv =>
- val signal = inv.args.head
-
- inv invokeOn current
- prev match {
- case SIG_IGN | SIG_DFL => ()
- case _ => inv invokeOn prev
- }
- }
- rSignal.handle(signal, chainer)
- chainer
- }
- else current
- }
-
- /** Use apply and update to get and set handlers.
- */
- def apply(name: String): WSignal =
- try { new WSignal(name) }
- catch { case x: IllegalArgumentException => new SignalError(x.getMessage) }
-
- def update(name: String, body: => Unit): Unit = apply(name) setTo body
-
- class SignalError(message: String) extends WSignal("") {
- override def isError = true
- override def toString = message
- }
-
- def public(name: String, description: String)(body: => Unit): Unit = {
- try {
- val wsig = apply(name)
- if (wsig.isError)
- return
-
- wsig setTo body
- registerInfoHandler()
- addPublicHandler(wsig, description)
- }
- catch {
- case x: Exception => () // ignore failure
- }
- }
- /** Makes sure the info handler is registered if we see activity. */
- private def registerInfoHandler() = {
- val INFO = apply("INFO")
- if (publicHandlers.isEmpty && INFO.isDefault) {
- INFO setTo Console.println(info())
- addPublicHandler(INFO, "Print signal handler registry on console.")
- }
- }
- private def addPublicHandler(wsig: WSignal, description: String) = {
- if (publicHandlers contains wsig) ()
- else publicHandlers = publicHandlers.updated(wsig, description)
- }
- private var publicHandlers: Map[WSignal, String] = Map()
- def info(): String = {
- registerInfoHandler()
- val xs = publicHandlers.toList sortBy (_._1.name) map {
- case (wsig, descr) => " %2d %5s %s".format(wsig.number, wsig.name, descr)
- }
-
- xs.mkString("\nSignal handler registry:\n", "\n", "")
- }
-}
-
-object SignalManager extends SignalManager {
- private implicit def mkWSignal(name: String): WSignal = this(name)
- private lazy val signalNumberMap = all map (x => x.number -> x) toMap
-
- def all = List(
- HUP, INT, QUIT, ILL, TRAP, ABRT, EMT, FPE, // 1-8
- KILL, BUS, SEGV, SYS, PIPE, ALRM, TERM, URG, // 9-15
- STOP, TSTP, CONT, CHLD, TTIN, TTOU, IO, XCPU, // 16-23
- XFSZ, VTALRM, PROF, WINCH, INFO, USR1, USR2 // 24-31
- )
- /** Signals which are either inaccessible or which seem like
- * particularly bad choices when looking for an open one.
- */
- def reserved = Set(QUIT, TRAP, ABRT, KILL, BUS, SEGV, ALRM, STOP, INT)
- def unreserved = all filterNot reserved
- def defaultSignals() = unreserved filter (_.isDefault)
- def ignoredSignals() = unreserved filter (_.isIgnored)
- def findOpenSignal() = Random.shuffle(defaultSignals()).head
-
- def dump() = all foreach (x => println("%2s %s".format(x.number, x)))
-
- def apply(sigNumber: Int): WSignal = signalNumberMap(sigNumber)
-
- def HUP: WSignal = "HUP"
- def INT: WSignal = "INT"
- def QUIT: WSignal = "QUIT"
- def ILL: WSignal = "ILL"
- def TRAP: WSignal = "TRAP"
- def ABRT: WSignal = "ABRT"
- def EMT: WSignal = "EMT"
- def FPE: WSignal = "FPE"
- def KILL: WSignal = "KILL"
- def BUS: WSignal = "BUS"
- def SEGV: WSignal = "SEGV"
- def SYS: WSignal = "SYS"
- def PIPE: WSignal = "PIPE"
- def ALRM: WSignal = "ALRM"
- def TERM: WSignal = "TERM"
- def URG: WSignal = "URG"
- def STOP: WSignal = "STOP"
- def TSTP: WSignal = "TSTP"
- def CONT: WSignal = "CONT"
- def CHLD: WSignal = "CHLD"
- def TTIN: WSignal = "TTIN"
- def TTOU: WSignal = "TTOU"
- def IO: WSignal = "IO"
- def XCPU: WSignal = "XCPU"
- def XFSZ: WSignal = "XFSZ"
- def VTALRM: WSignal = "VTALRM"
- def PROF: WSignal = "PROF"
- def WINCH: WSignal = "WINCH"
- def INFO: WSignal = "INFO"
- def USR1: WSignal = "USR1"
- def USR2: WSignal = "USR2"
-
- /** Given a number of seconds, a signal, and a function: sets up a handler which upon
- * receiving the signal once, calls the function with argument true, and if the
- * signal is received again within the allowed time, calls it with argument false.
- * (Otherwise it calls it with true and starts the timer over again.)
- */
- def requireInterval(seconds: Int, wrapper: WSignal)(fn: Boolean => Unit) = {
- var received = false
- wrapper setTo {
- if (received) fn(false)
- else {
- received = true
- fn(true)
- timer(seconds)(received = false)
- }
- }
- }
-}
diff --git a/src/compiler/scala/tools/util/Signallable.scala b/src/compiler/scala/tools/util/Signallable.scala
deleted file mode 100644
index af98bfa..0000000
--- a/src/compiler/scala/tools/util/Signallable.scala
+++ /dev/null
@@ -1,65 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
- * @author Paul Phillips
- */
-
-package scala.tools
-package util
-
-import java.security.AccessControlException
-
-/** A class for things which are signallable.
- */
-abstract class Signallable[T] private (val signal: String, val description: String) {
- private var last: Option[T] = None
- private def lastString = last match {
- case Some(()) => ""
- case Some(x) => "" + x
- case _ => ""
- }
-
- /** The most recent result from the signal handler. */
- def lastResult: Option[T] = last
-
- /** Method to be executed when the associated signal is received. */
- def onSignal(): T
-
- // todo:
- // def unregister(): Boolean
-
- override def toString = " SIG(%s) => %s%s".format(
- signal, description, if (lastString == "") "" else " (" + lastString + ")"
- )
-}
-
-object Signallable {
- /** Same as the other apply, but an open signal is found for you.
- */
- def apply[T](description: String)(body: => T): Signallable[T] = wrap {
- apply(SignalManager.findOpenSignal().name, description)(body)
- }
-
- /** Given a signal name, a description, and a handler body, this
- * registers a signal handler and returns the Signallable instance.
- * The signal handler registry is thereafter available by calling
- * SignalManager.info(), or sending SIGINFO to the manager will
- * dump it to console.
- */
- def apply[T](signal: String, description: String)(body: => T): Signallable[T] = wrap {
- val result = create[T](signal, description, body)
- SignalManager.public(signal, description)(result.onSignal())
- result
- }
-
- private def wrap[T](body: => Signallable[T]): Signallable[T] =
- try body catch { case _: AccessControlException => null }
-
- private def create[T](signal: String, description: String, body: => T): Signallable[T] =
- new Signallable[T](signal, description) {
- def onSignal = {
- val result = body
- last = Some(result)
- result
- }
- }
-}
diff --git a/src/compiler/scala/tools/util/SocketConnection.scala b/src/compiler/scala/tools/util/SocketConnection.scala
deleted file mode 100644
index 6b56be5..0000000
--- a/src/compiler/scala/tools/util/SocketConnection.scala
+++ /dev/null
@@ -1,52 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-package scala.tools.util
-
-import java.io.{PrintWriter, InputStreamReader, BufferedReader}
-import java.io.IOException
-import java.net.{Socket, InetAddress}
-import java.net.UnknownHostException
-
-/** This class implements the connection to the server.
- *
- * @author Martin Odersky
- * @version 1.0
- */
-class SocketConnection(hostname: String, port: Int) {
-
- def this(port: Int) = this(InetAddress.getLocalHost().getHostName(), port)
-
- private var socket: Socket = _
- var out: PrintWriter = _
- var in: BufferedReader = _
- var errorMessage: String = _
-
- def open(): Boolean = {
- try {
- socket = new Socket(hostname, port)
- out = new PrintWriter(socket.getOutputStream(), true)
- in = new BufferedReader(new InputStreamReader(socket.getInputStream()))
- true
- } catch {
- case e: UnknownHostException =>
- errorMessage = "Don't know about host: " + hostname + "."
- false
- case e: IOException =>
- errorMessage = "Couldn't get I/O for the connection to: " + hostname + "."
- false
- }
- }
-
- def close() {
- in.close()
- out.close()
- socket.close()
- }
-}
diff --git a/src/compiler/scala/tools/util/SocketServer.scala b/src/compiler/scala/tools/util/SocketServer.scala
index fba3d4c..1b06ce2 100644
--- a/src/compiler/scala/tools/util/SocketServer.scala
+++ b/src/compiler/scala/tools/util/SocketServer.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -16,8 +16,8 @@ trait CompileOutputCommon {
def verbose: Boolean
def info(msg: String) = if (verbose) echo(msg)
- def echo(msg: String) = Console println msg
- def warn(msg: String) = System.err println msg
+ def echo(msg: String) = {Console println msg; Console.flush}
+ def warn(msg: String) = {Console.err println msg; Console.flush}
def fatal(msg: String) = { warn(msg) ; sys.exit(1) }
}
diff --git a/src/compiler/scala/tools/util/StringOps.scala b/src/compiler/scala/tools/util/StringOps.scala
deleted file mode 100644
index 65ff582..0000000
--- a/src/compiler/scala/tools/util/StringOps.scala
+++ /dev/null
@@ -1,89 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-package scala.tools
-package util
-
-/** This object provides utility methods to extract elements
- * from Strings.
- *
- * @author Martin Odersky
- * @version 1.0
- */
-trait StringOps {
- def onull(s: String) = if (s == null) "" else s
- def oempty(xs: String*) = xs filterNot (x => x == null || x == "")
- def ojoin(xs: Seq[String], sep: String) = oempty(xs: _*) mkString sep
- def ojoinOr(xs: Seq[String], sep: String, orElse: String) = {
- val ys = oempty(xs: _*)
- if (ys.isEmpty) orElse else ys mkString sep
- }
-
- def decompose(str: String, sep: Char): List[String] = {
- def ws(start: Int): List[String] =
- if (start == str.length) List()
- else if (str.charAt(start) == sep) ws(start + 1)
- else {
- val end = str.indexOf(sep, start)
- if (end < 0) List(str.substring(start))
- else str.substring(start, end) :: ws(end + 1)
- }
- ws(0)
- }
-
- def words(str: String): List[String] = decompose(str, ' ')
-
- def stripPrefixOpt(str: String, prefix: String): Option[String] =
- if (str startsWith prefix) Some(str drop prefix.length)
- else None
-
- def stripSuffixOpt(str: String, suffix: String): Option[String] =
- if (str endsWith suffix) Some(str dropRight suffix.length)
- else None
-
- def splitWhere(str: String, f: Char => Boolean, doDropIndex: Boolean = false): Option[(String, String)] =
- splitAt(str, str indexWhere f, doDropIndex)
-
- def splitAt(str: String, idx: Int, doDropIndex: Boolean = false): Option[(String, String)] =
- if (idx == -1) None
- else Some(str take idx, str drop (if (doDropIndex) idx + 1 else idx))
-
- /** Returns a string meaning "n elements".
- *
- * @param n ...
- * @param elements ...
- * @return ...
- */
- def countElementsAsString(n: Int, elements: String): String =
- n match {
- case 0 => "no " + elements + "s"
- case 1 => "one " + elements
- case 2 => "two " + elements + "s"
- case 3 => "three " + elements + "s"
- case 4 => "four " + elements + "s"
- case _ => "" + n + " " + elements + "s"
- }
-
- /** Turns a count into a friendly English description if n<=4.
- *
- * @param n ...
- * @return ...
- */
- def countAsString(n: Int): String =
- n match {
- case 0 => "none"
- case 1 => "one"
- case 2 => "two"
- case 3 => "three"
- case 4 => "four"
- case _ => "" + n
- }
-}
-
-object StringOps extends StringOps { }
diff --git a/src/compiler/scala/tools/util/VerifyClass.scala b/src/compiler/scala/tools/util/VerifyClass.scala
new file mode 100644
index 0000000..d208a9f
--- /dev/null
+++ b/src/compiler/scala/tools/util/VerifyClass.scala
@@ -0,0 +1,53 @@
+package scala.tools.util
+
+import scala.tools.nsc.io._
+import java.net.URLClassLoader
+import scala.collection.JavaConverters._
+
+
+object VerifyClass {
+
+ // Returns the error if there's a failure
+ private def checkClass(name : String, cl: ClassLoader) : (String, Option[String]) = {
+ try {
+ Class.forName(name, true, cl)
+ (name, None)
+ } catch {
+ case x: Throwable => // TODO: only catch VerifyError (and related) + ExceptionInInitializationError (for static objects that bomb on classload)
+ (name, Some(x.toString))
+ }
+ }
+
+ def checkClassesInJar(name: String, cl: ClassLoader) = new Jar(File(name)) filter (_.getName.endsWith(".class")) map { x =>
+ checkClass(x.getName.stripSuffix(".class").replace('/', '.'), cl)
+ } toMap
+
+ def checkClassesInDir(name: String, cl: ClassLoader) = (for {
+ file <- Path(name).walk
+ if file.name endsWith ".class"
+ } yield checkClass(name, cl)) toMap
+
+ def checkClasses(name: String, cl: ClassLoader) =
+ if (name endsWith ".jar") checkClassesInJar(name, cl)
+ else checkClassesInDir(name, cl)
+
+ /** Attempts to load all classes on the classpath defined in the args string array. This method is meant to be used via reflection from tools like SBT or Ant. */
+ def run(args: Array[String]): java.util.Map[String, String] = {
+ val urls = args.map(Path.apply).map(_.toFile.toURI.toURL).toArray
+ println("As urls: " + urls.mkString(","))
+ val cl = URLClassLoader.newInstance(urls, null)
+ val results = args.flatMap(n => checkClasses(n, cl)).toMap
+ (for { (name, result) <- results } yield (name, result.getOrElse(null))).asJava
+ }
+
+
+ def main(args: Array[String]): Unit = {
+ val results = run(args).asScala
+ println("Processed " + results.size + " classes.")
+ val errors = results.filter(_._2 != null)
+ for( (name, result) <- results; if result != null) {
+ println(name + " had error: " + result)
+ }
+ System.exit(if(errors.size > 0) 1 else 0)
+ }
+}
diff --git a/src/compiler/scala/tools/util/Which.scala b/src/compiler/scala/tools/util/Which.scala
deleted file mode 100644
index 1cafe15..0000000
--- a/src/compiler/scala/tools/util/Which.scala
+++ /dev/null
@@ -1,38 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
- * @author Paul Phillips
- */
-
-package scala.tools
-package util
-
-import scala.tools.nsc._
-
-/** A tool for identifying which classfile is being used.
- * under the given conditions.
- */
-object Which {
- def main(args: Array[String]): Unit = {
- val settings = new Settings()
- val names = settings.processArguments(args.toList, true)._2
- val global = new Global(settings)
- val cp = global.classPath
-
- import cp._
-
- for (name <- names) {
- def fail() = println("Could not find: %s".format(name))
- (cp findClass name) match {
- case Some(classRep) => classRep.binary match {
- case Some(f) => println("%s is %s".format(name, f))
- case _ => fail
- }
- case _ => fail
- }
- }
- }
-}
-
-
-
-
diff --git a/src/continuations/library/scala/util/continuations/ControlContext.scala b/src/continuations/library/scala/util/continuations/ControlContext.scala
index f183f99..44a5b53 100644
--- a/src/continuations/library/scala/util/continuations/ControlContext.scala
+++ b/src/continuations/library/scala/util/continuations/ControlContext.scala
@@ -1,8 +1,26 @@
-// $Id$
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2010-2013, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
package scala.util.continuations
-import annotation.{ Annotation, StaticAnnotation, TypeConstraint }
+import scala.annotation.{ Annotation, StaticAnnotation, TypeConstraint }
+
+/** This annotation is used to mark a parameter as part of a continuation
+ * context.
+ *
+ * The type `A @cpsParam[B,C]` is desugared to `ControlContext[A,B,C]` at compile
+ * time.
+ *
+ * @tparam B The type of computation state after computation has executed, and
+ * before control is returned to the shift.
+ * @tparam C The eventual return type of this delimited compuation.
+ * @see scala.util.continuations.ControlContext
+ */
class cpsParam[-B,+C] extends StaticAnnotation with TypeConstraint
private class cpsSym[B] extends Annotation // implementation detail
@@ -13,7 +31,55 @@ private class cpsPlus extends StaticAnnotation with TypeConstraint // implementa
private class cpsMinus extends Annotation // implementation detail
-
+/**
+ * This class represent a portion of computation that has a 'hole' in it. The
+ * class has the ability to compute state up until a certain point where the
+ * state has the `A` type. If this context is given a function of type
+ * `A => B` to move the state to the `B` type, then the entire computation can
+ * be completed resulting in a value of type `C`.
+ *
+ * An Example: {{{
+ * val cc = new ControlContext[String, String, String](
+ * fun = { (f: String=>String, err: Exception => String) =>
+ * val updatedState =
+ * try f("State")
+ * catch {
+ * case e: Exception => err(e)
+ * }
+ * updatedState + "-Complete!"
+ * },
+ * x = null.asIntanceOf[String]
+ * }
+ * cc.foreach(_ + "-Continued") // Results in "State-Continued-Complete!"
+ * }}}
+ *
+ * This class is used to transform calls to `shift` in the `continuations`
+ * package. Direct use and instantiation is possible, but usually reserved
+ * for advanced cases.
+ *
+ *
+ * A context may either be ''trivial'' or ''non-trivial''. A ''trivial''
+ * context '''just''' has a state of type `A`. When completing the computation,
+ * it's only necessary to use the function of type `A => B` directly against
+ * the trivial value. A ''non-trivial'' value stores a computation '''around'''
+ * the state transformation of type `A => B` and cannot be short-circuited.
+ *
+ * @param fun The captured computation so far. The type
+ * `(A => B, Exception => B) => C` is a function where:
+ * - The first parameter `A=>B` represents the computation defined against
+ * the current state held in the ControlContext.
+ * - The second parameter `Exception => B` represents a computation to
+ * perform if an exception is thrown from the first parameter's computation.
+ * - The return value is the result of the entire computation contained in this
+ * `ControlContext`.
+ * @param x The current state stored in this context. Allowed to be null if
+ * the context is non-trivial.
+ * @tparam A The type of the state currently held in the context.
+ * @tparam B The type of the transformed state needed to complete this computation.
+ * @tparam C The return type of the entire computation stored in this context.
+ * @note `fun` and `x` are allowed to be `null`.
+ * @see scala.util.continutations.shiftR
+ */
final class ControlContext[+A,-B,+C](val fun: (A => B, Exception => B) => C, val x: A) extends Serializable {
/*
@@ -26,11 +92,16 @@ final class ControlContext[+A,-B,+C](val fun: (A => B, Exception => B) => C, val
}
*/
-
+ /**
+ * Modifies the currently captured state in this `ControlContext`.
+ * @tparam A1 The new type of state in this context.
+ * @param f A transformation function on the current state of the `ControlContext`.
+ * @return The new `ControlContext`.
+ */
@noinline final def map[A1](f: A => A1): ControlContext[A1,B,C] = {
if (fun eq null)
try {
- new ControlContext(null, f(x)) // TODO: only alloc if f(x) != x
+ new ControlContext[A1,B,C](null, f(x)) // TODO: only alloc if f(x) != x
} catch {
case ex: Exception =>
new ControlContext((k: A1 => B, thr: Exception => B) => thr(ex).asInstanceOf[C], null.asInstanceOf[A1])
@@ -55,6 +126,17 @@ final class ControlContext[+A,-B,+C](val fun: (A => B, Exception => B) => C, val
// it would be nice if @inline would turn the trivial path into a tail call.
// unfortunately it doesn't, so we do it ourselves in SelectiveCPSTransform
+ /**
+ * Maps and flattens this `ControlContext` with another `ControlContext` generated from the current state.
+ * @note The resulting comuptation is still the type `C`.
+ * @tparam A1 The new type of the contained state.
+ * @tparam B1 The new type of the state after the stored continuation has executed.
+ * @tparam C1 The result type of the nested `ControlContext`. Because the nested `ControlContext` is executed within
+ * the outer `ControlContext`, this type must `>: B` so that the resulting nested computation can be fed through
+ * the current continuation.
+ * @param f A transformation function from the current state to a nested `ControlContext`.
+ * @return The transformed `ControlContext`.
+ */
@noinline final def flatMap[A1,B1,C1<:B](f: (A => ControlContext[A1,B1,C1])): ControlContext[A1,B1,C] = {
if (fun eq null)
try {
@@ -80,6 +162,11 @@ final class ControlContext[+A,-B,+C](val fun: (A => B, Exception => B) => C, val
}, null.asInstanceOf[A1])
}
+ /**
+ * Runs the computation against the state stored in this `ControlContext`.
+ * @param f the computation that modifies the current state of the context.
+ * @note This method could throw exceptions from the computations.
+ */
final def foreach(f: A => B) = foreachFull(f, throw _)
def foreachFull(f: A => B, g: Exception => B): C = {
@@ -89,8 +176,9 @@ final class ControlContext[+A,-B,+C](val fun: (A => B, Exception => B) => C, val
fun(f, g)
}
-
+ /** @return true if this context only stores a state value and not any deferred computation. */
final def isTrivial = fun eq null
+ /** @return The current state value. */
final def getTrivialValue = x.asInstanceOf[A]
// need filter or other functions?
diff --git a/src/continuations/library/scala/util/continuations/package.scala b/src/continuations/library/scala/util/continuations/package.scala
index aa4681a..90bab56 100644
--- a/src/continuations/library/scala/util/continuations/package.scala
+++ b/src/continuations/library/scala/util/continuations/package.scala
@@ -1,21 +1,134 @@
-// $Id$
-
-
-// TODO: scaladoc
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2010-2013, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
package scala.util
+/* TODO: better documentation of return-type modification.
+ * (Especially what means "Illegal answer type modification: ... andThen ...")
+ */
+
+/**
+ * Delimited continuations are a feature for modifying the usual control flow
+ * of a program. To use continuations, provide the option `-P:continuations:enable`
+ * to the Scala compiler or REPL to activate the compiler plugin.
+ *
+ * Below is an example of using continuations to suspend execution while awaiting
+ * user input. Similar facilities are used in so-called continuation-based web frameworks.
+ *
+ * {{{
+ * def go =
+ * reset {
+ * println("Welcome!")
+ * val first = ask("Please give me a number")
+ * val second = ask("Please enter another number")
+ * printf("The sum of your numbers is: %d\n", first + second)
+ * }
+ * }}}
+ *
+ * The `reset` is provided by this package and delimits the extent of the
+ * transformation. The `ask` is a function that will be defined below. Its
+ * effect is to issue a prompt and then suspend execution awaiting user input.
+ * Once the user provides an input value, execution of the suspended block
+ * resumes.
+ *
+ * {{{
+ * val sessions = new HashMap[UUID, Int=>Unit]
+ * def ask(prompt: String): Int @cps[Unit] =
+ * shift {
+ * k: (Int => Unit) => {
+ * val id = uuidGen
+ * printf("%s\nrespond with: submit(0x%x, ...)\n", prompt, id)
+ * sessions += id -> k
+ * }
+ * }
+ * }}}
+ *
+ * The type of `ask` includes a `@cps` annotation which drives the transformation.
+ * The type signature `Int @cps[Unit]` means that `ask` should be used in a
+ * context requiring an `Int`, but actually it will suspend and return `Unit`.
+ *
+ * The computation leading up to the first `ask` is executed normally. The
+ * remainder of the reset block is wrapped into a closure that is passed as
+ * the parameter `k` to the `shift` function, which can then decide whether
+ * and how to execute the continuation. In this example, the continuation is
+ * stored in a sessions map for later execution. This continuation includes a
+ * second call to `ask`, which is treated likewise once the execution resumes.
+ *
+ * <h2>CPS Annotation</h2>
+ *
+ * The aforementioned `@cps[A]` annotation is an alias for the more general
+ * `@cpsParam[B,C]` where `B=C`. The type `A @cpsParam[B,C]` describes a term
+ * which yields a value of type `A` within an evaluation context producing a
+ * value of type `B`. After the CPS transformation, this return type is
+ * modified to `C`.
+ *
+ * The `@cpsParam` annotations are introduced by `shift` blocks, and propagate
+ * via the return types to the dynamically enclosing context. The propagation
+ * stops upon reaching a `reset` block.
+ */
+
package object continuations {
+ /** An annotation that denotes a type is part of a continuation context.
+ * `@cps[A]` is shorthand for `cpsParam[A,A]`.
+ * @tparam A The return type of the continuation context.
+ */
type cps[A] = cpsParam[A,A]
+ /** An annotation that denotes a type is part of a side effecting continuation context.
+ * `@suspendable` is shorthand notation for `@cpsParam[Unit,Unit]` or `@cps[Unit]`.
+ */
type suspendable = cps[Unit]
-
+ /**
+ * The `shift` function captures the remaining computation in a `reset` block
+ * and passes it to a closure provided by the user.
+ *
+ * For example:
+ * {{{
+ * reset {
+ * shift { (k: Int => Int) => k(5) } + 1
+ * }
+ * }}}
+ *
+ * In this example, `shift` is used in the expression `shift ... + 1`.
+ * The compiler will alter this expression so that the call
+ * to `shift` becomes a parameter to a function, creating something like:
+ * {{{
+ * { (k: Int => Int) => k(5) } apply { _ + 1 }
+ * }}}
+ * The result of this expression is 6.
+ *
+ * There can be more than one `shift` call in a `reset` block. Each call
+ * to `shift` can alter the return type of expression within the reset block,
+ * but will not change the return type of the entire `reset { block }`
+ * expression.
+ *
+ * @param fun A function where
+ * - The parameter is the remainder of computation within the current
+ * `reset` block. This is passed as a function `A => B`.
+ * - The return is the return value of the `ControlContext` which is
+ * generated from this inversion.
+ * @note Must be invoked in the context of a call to `reset` This context
+ * may not be far up the stack, but a call to reset is needed to
+ * eventually remove the `@cps` annotations from types.
+ */
def shift[A,B,C](fun: (A => B) => C): A @cpsParam[B,C] = {
throw new NoSuchMethodException("this code has to be compiled with the Scala continuations plugin enabled")
}
-
+ /** Creates a context for continuations captured within the argument closure
+ * of this `reset` call and returns the result of the entire transformed
+ * computation. Within an expression of the form `reset { block }`,
+ * the closure expression (`block`) will be modified such that at each
+ * call to `shift` the remainder of the expression is transformed into a
+ * function to be passed into the shift.
+ * @return The result of a block of code that uses `shift` to capture continuations.
+ */
def reset[A,C](ctx: =>(A @cpsParam[A,C])): C = {
val ctxR = reify[A,A,C](ctx)
if (ctxR.isTrivial)
@@ -46,6 +159,9 @@ package object continuations {
throw new NoSuchMethodException("this code has to be compiled with the Scala continuations plugin enabled")
}
+ /** This method converts from the sugared `A @cpsParam[B,C]` type to the desugared
+ * `ControlContext[A,B,C]` type. The underlying data is not changed.
+ */
def reify[A,B,C](ctx: =>(A @cpsParam[B,C])): ControlContext[A,B,C] = {
throw new NoSuchMethodException("this code has to be compiled with the Scala continuations plugin enabled")
}
@@ -54,6 +170,12 @@ package object continuations {
new ControlContext(null, x)
}
+ /**
+ * Captures a computation into a `ControlContext`.
+ * @param fun The function which accepts the inverted computation and returns
+ * a final result.
+ * @see shift
+ */
def shiftR[A,B,C](fun: (A => B) => C): ControlContext[A,B,C] = {
new ControlContext((f:A=>B,g:Exception=>B) => fun(f), null.asInstanceOf[A])
}
diff --git a/src/continuations/plugin/scala/tools/selectivecps/CPSAnnotationChecker.scala b/src/continuations/plugin/scala/tools/selectivecps/CPSAnnotationChecker.scala
index 27c9f9a..00c72cf 100644
--- a/src/continuations/plugin/scala/tools/selectivecps/CPSAnnotationChecker.scala
+++ b/src/continuations/plugin/scala/tools/selectivecps/CPSAnnotationChecker.scala
@@ -3,14 +3,13 @@
package scala.tools.selectivecps
import scala.tools.nsc.Global
+import scala.tools.nsc.typechecker.Modes
+import scala.tools.nsc.MissingRequirementError
-import scala.collection.mutable.{Map, HashMap}
-
-import java.io.{StringWriter, PrintWriter}
-
-abstract class CPSAnnotationChecker extends CPSUtils {
+abstract class CPSAnnotationChecker extends CPSUtils with Modes {
val global: Global
import global._
+ import analyzer.{AnalyzerPlugin, Typer}
import definitions._
//override val verbose = true
@@ -20,6 +19,13 @@ abstract class CPSAnnotationChecker extends CPSUtils {
* Checks whether @cps annotations conform
*/
object checker extends AnnotationChecker {
+ private[CPSAnnotationChecker] def addPlusMarker(tp: Type) = tp withAnnotation newPlusMarker()
+ private[CPSAnnotationChecker] def addMinusMarker(tp: Type) = tp withAnnotation newMinusMarker()
+
+ private[CPSAnnotationChecker] def cleanPlus(tp: Type) =
+ removeAttribs(tp, MarkerCPSAdaptPlus, MarkerCPSTypes)
+ private[CPSAnnotationChecker] def cleanPlusWith(tp: Type)(newAnnots: AnnotationInfo*) =
+ cleanPlus(tp) withAnnotations newAnnots.toList
/** Check annotations to decide whether tpe1 <:< tpe2 */
def annotationsConform(tpe1: Type, tpe2: Type): Boolean = {
@@ -31,95 +37,98 @@ abstract class CPSAnnotationChecker extends CPSUtils {
if (tpe1.typeSymbol eq NothingClass)
return true
- val annots1 = filterAttribs(tpe1,MarkerCPSTypes)
- val annots2 = filterAttribs(tpe2,MarkerCPSTypes)
+ val annots1 = cpsParamAnnotation(tpe1)
+ val annots2 = cpsParamAnnotation(tpe2)
// @plus and @minus should only occur at the left, and never together
// TODO: insert check
- val adaptPlusAnnots1 = filterAttribs(tpe1,MarkerCPSAdaptPlus)
- val adaptMinusAnnots1 = filterAttribs(tpe1,MarkerCPSAdaptMinus)
// @minus @cps is the same as no annotations
- if (!adaptMinusAnnots1.isEmpty)
+ if (hasMinusMarker(tpe1))
return annots2.isEmpty
// to handle answer type modification, we must make @plus <:< @cps
- if (!adaptPlusAnnots1.isEmpty && annots1.isEmpty)
+ if (hasPlusMarker(tpe1) && annots1.isEmpty)
return true
// @plus @cps will fall through and compare the @cps type args
-
// @cps parameters must match exactly
if ((annots1 corresponds annots2)(_.atp <:< _.atp))
return true
- // Need to handle uninstantiated type vars specially:
-
+ // Need to handle uninstantiated type vars specially:
+
// g map (x => x) with expected type List[Int] @cps
// results in comparison ?That <:< List[Int] @cps
-
+
// Instantiating ?That to an annotated type would fail during
// transformation.
-
+
// Instead we force-compare tpe1 <:< tpe2.withoutAnnotations
// to trigger instantiation of the TypeVar to the base type
-
+
// This is a bit unorthodox (we're only supposed to look at
// annotations here) but seems to work.
-
+
if (!annots2.isEmpty && !tpe1.isGround)
return tpe1 <:< tpe2.withoutAnnotations
-
+
false
}
-
/** Refine the computed least upper bound of a list of types.
* All this should do is add annotations. */
override def annotationsLub(tpe: Type, ts: List[Type]): Type = {
if (!cpsEnabled) return tpe
- val annots1 = filterAttribs(tpe, MarkerCPSTypes)
- val annots2 = ts flatMap (filterAttribs(_, MarkerCPSTypes))
+ val annots1 = cpsParamAnnotation(tpe)
+ val annots2 = ts flatMap cpsParamAnnotation
if (annots2.nonEmpty) {
- val cpsLub = AnnotationInfo(global.lub(annots1:::annots2 map (_.atp)), Nil, Nil)
+ val cpsLub = newMarker(global.lub(annots1:::annots2 map (_.atp)))
val tpe1 = if (annots1.nonEmpty) removeAttribs(tpe, MarkerCPSTypes) else tpe
tpe1.withAnnotation(cpsLub)
- } else tpe
+ }
+ else tpe
}
/** Refine the bounds on type parameters to the given type arguments. */
override def adaptBoundsToAnnotations(bounds: List[TypeBounds], tparams: List[Symbol], targs: List[Type]): List[TypeBounds] = {
if (!cpsEnabled) return bounds
- val anyAtCPS = AnnotationInfo(appliedType(MarkerCPSTypes.tpe, List(NothingClass.tpe, AnyClass.tpe)), Nil, Nil)
- if (isFunctionType(tparams.head.owner.tpe) || tparams.head.owner == PartialFunctionClass) {
+ val anyAtCPS = newCpsParamsMarker(NothingClass.tpe, AnyClass.tpe)
+ if (isFunctionType(tparams.head.owner.tpe) || isPartialFunctionType(tparams.head.owner.tpe)) {
vprintln("function bound: " + tparams.head.owner.tpe + "/"+bounds+"/"+targs)
- if (targs.last.hasAnnotation(MarkerCPSTypes))
+ if (hasCpsParamTypes(targs.last))
bounds.reverse match {
- case res::b if !res.hi.hasAnnotation(MarkerCPSTypes) =>
+ case res::b if !hasCpsParamTypes(res.hi) =>
(TypeBounds(res.lo, res.hi.withAnnotation(anyAtCPS))::b).reverse
case _ => bounds
}
else
bounds
- } else if (tparams.head.owner == ByNameParamClass) {
+ }
+ else if (tparams.head.owner == ByNameParamClass) {
vprintln("byname bound: " + tparams.head.owner.tpe + "/"+bounds+"/"+targs)
- if (targs.head.hasAnnotation(MarkerCPSTypes) && !bounds.head.hi.hasAnnotation(MarkerCPSTypes))
- TypeBounds(bounds.head.lo, bounds.head.hi.withAnnotation(anyAtCPS))::Nil
+ val TypeBounds(lo, hi) = bounds.head
+ if (hasCpsParamTypes(targs.head) && !hasCpsParamTypes(hi))
+ TypeBounds(lo, hi withAnnotation anyAtCPS) :: Nil
else bounds
} else
bounds
}
+ }
+
+ object plugin extends AnalyzerPlugin {
+ import checker._
- override def canAdaptAnnotations(tree: Tree, mode: Int, pt: Type): Boolean = {
+ override def canAdaptAnnotations(tree: Tree, typer: Typer, mode: Int, pt: Type): Boolean = {
if (!cpsEnabled) return false
vprintln("can adapt annotations? " + tree + " / " + tree.tpe + " / " + Integer.toHexString(mode) + " / " + pt)
- val annots1 = filterAttribs(tree.tpe,MarkerCPSTypes)
- val annots2 = filterAttribs(pt,MarkerCPSTypes)
+ val annots1 = cpsParamAnnotation(tree.tpe)
+ val annots2 = cpsParamAnnotation(pt)
if ((mode & global.analyzer.PATTERNmode) != 0) {
//println("can adapt pattern annotations? " + tree + " / " + tree.tpe + " / " + Integer.toHexString(mode) + " / " + pt)
@@ -145,15 +154,12 @@ abstract class CPSAnnotationChecker extends CPSUtils {
}
*/
if ((mode & global.analyzer.EXPRmode) != 0) {
- if ((annots1 corresponds annots2) { case (a1,a2) => a1.atp <:< a2.atp }) {
+ if ((annots1 corresponds annots2)(_.atp <:< _.atp)) {
vprintln("already same, can't adapt further")
- return false
- }
-
- if (annots1.isEmpty && !annots2.isEmpty && ((mode & global.analyzer.BYVALmode) == 0)) {
+ false
+ } else if (annots1.isEmpty && !annots2.isEmpty && ((mode & global.analyzer.BYVALmode) == 0)) {
//println("can adapt annotations? " + tree + " / " + tree.tpe + " / " + Integer.toHexString(mode) + " / " + pt)
- val adapt = AnnotationInfo(MarkerCPSAdaptPlus.tpe, Nil, Nil)
- if (!tree.tpe.annotations.contains(adapt)) {
+ if (!hasPlusMarker(tree.tpe)) {
// val base = tree.tpe <:< removeAllCPSAnnotations(pt)
// val known = global.analyzer.isFullyDefined(pt)
// println(same + "/" + base + "/" + known)
@@ -161,81 +167,95 @@ abstract class CPSAnnotationChecker extends CPSUtils {
// TBD: use same or not?
//if (same) {
vprintln("yes we can!! (unit)")
- return true
+ true
//}
- }
- } else if (!annots1.isEmpty && ((mode & global.analyzer.BYVALmode) != 0)) {
- if (!tree.tpe.hasAnnotation(MarkerCPSAdaptMinus)) {
+ } else false
+ } else if (!hasPlusMarker(tree.tpe) && annots1.isEmpty && !annots2.isEmpty && ((mode & global.analyzer.RETmode) != 0)) {
+ vprintln("checking enclosing method's result type without annotations")
+ tree.tpe <:< pt.withoutAnnotations
+ } else if (!hasMinusMarker(tree.tpe) && !annots1.isEmpty && ((mode & global.analyzer.BYVALmode) != 0)) {
+ val optCpsTypes: Option[(Type, Type)] = cpsParamTypes(tree.tpe)
+ val optExpectedCpsTypes: Option[(Type, Type)] = cpsParamTypes(pt)
+ if (optCpsTypes.isEmpty || optExpectedCpsTypes.isEmpty) {
vprintln("yes we can!! (byval)")
- return true
+ true
+ } else { // check cps param types
+ val cpsTpes = optCpsTypes.get
+ val cpsPts = optExpectedCpsTypes.get
+ // class cpsParam[-B,+C], therefore:
+ cpsPts._1 <:< cpsTpes._1 && cpsTpes._2 <:< cpsPts._2
}
- }
- }
- false
+ } else false
+ } else false
}
-
- override def adaptAnnotations(tree: Tree, mode: Int, pt: Type): Tree = {
+ override def adaptAnnotations(tree: Tree, typer: Typer, mode: Int, pt: Type): Tree = {
if (!cpsEnabled) return tree
- vprintln("adapt annotations " + tree + " / " + tree.tpe + " / " + Integer.toHexString(mode) + " / " + pt)
+ vprintln("adapt annotations " + tree + " / " + tree.tpe + " / " + modeString(mode) + " / " + pt)
- val annots1 = filterAttribs(tree.tpe,MarkerCPSTypes)
- val annots2 = filterAttribs(pt,MarkerCPSTypes)
+ val patMode = (mode & global.analyzer.PATTERNmode) != 0
+ val exprMode = (mode & global.analyzer.EXPRmode) != 0
+ val byValMode = (mode & global.analyzer.BYVALmode) != 0
+ val retMode = (mode & global.analyzer.RETmode) != 0
- if ((mode & global.analyzer.PATTERNmode) != 0) {
- if (!annots1.isEmpty) {
- return tree.setType(removeAllCPSAnnotations(tree.tpe))
- }
- }
+ val annotsTree = cpsParamAnnotation(tree.tpe)
+ val annotsExpected = cpsParamAnnotation(pt)
-/*
+ // not sure I rephrased this comment correctly:
+ // replacing `patMode` in the condition below by `patMode || ((mode & global.analyzer.TYPEmode) != 0 && (mode & global.analyzer.BYVALmode))`
// doesn't work correctly -- still relying on addAnnotations to remove things from ValDef symbols
- if ((mode & global.analyzer.TYPEmode) != 0 && (mode & global.analyzer.BYVALmode) != 0) {
- if (!annots1.isEmpty) {
- println("removing annotation from " + tree + "/" + tree.tpe)
- val s = tree.setType(removeAllCPSAnnotations(tree.tpe))
- println(s)
- s
- }
- }
-*/
-
- if ((mode & global.analyzer.EXPRmode) != 0) {
- if (annots1.isEmpty && !annots2.isEmpty && ((mode & global.analyzer.BYVALmode) == 0)) { // shiftUnit
- // add a marker annotation that will make tree.tpe behave as pt, subtyping wise
- // tree will look like having any possible annotation
- //println("adapt annotations " + tree + " / " + tree.tpe + " / " + Integer.toHexString(mode) + " / " + pt)
-
- val adapt = AnnotationInfo(MarkerCPSAdaptPlus.tpe, Nil, Nil)
- //val same = annots2 forall { case AnnotationInfo(atp: TypeRef, _, _) => atp.typeArgs(0) =:= atp.typeArgs(1) }
- // TBD: use same or not? see infer0.scala/infer1.scala
-
- // CAVEAT:
- // for monomorphic answer types we want to have @plus @cps (for better checking)
- // for answer type modification we want to have only @plus (because actual answer type may differ from pt)
-
- //val known = global.analyzer.isFullyDefined(pt)
-
- if (/*same &&*/ !tree.tpe.annotations.contains(adapt)) {
- //if (known)
- return tree.setType(tree.tpe.withAnnotations(adapt::annots2)) // needed for #1807
- //else
- // return tree.setType(tree.tpe.withAnnotations(adapt::Nil))
- }
- tree
- } else if (!annots1.isEmpty && ((mode & global.analyzer.BYVALmode) != 0)) { // dropping annotation
- // add a marker annotation that will make tree.tpe behave as pt, subtyping wise
- // tree will look like having no annotation
- if (!tree.tpe.hasAnnotation(MarkerCPSAdaptMinus)) {
- val adapt = AnnotationInfo(MarkerCPSAdaptMinus.tpe, Nil, Nil)
- return tree.setType(tree.tpe.withAnnotations(adapt::Nil))
- }
- }
- }
- tree
+ if (patMode && !annotsTree.isEmpty) tree modifyType removeAllCPSAnnotations
+ else if (exprMode && !byValMode && !hasPlusMarker(tree.tpe) && annotsTree.isEmpty && annotsExpected.nonEmpty) { // shiftUnit
+ // add a marker annotation that will make tree.tpe behave as pt, subtyping wise
+ // tree will look like having any possible annotation
+ //println("adapt annotations " + tree + " / " + tree.tpe + " / " + Integer.toHexString(mode) + " / " + pt)
+
+ // CAVEAT:
+ // for monomorphic answer types we want to have @plus @cps (for better checking)
+ // for answer type modification we want to have only @plus (because actual answer type may differ from pt)
+
+ val res = tree modifyType (_ withAnnotations newPlusMarker() :: annotsExpected) // needed for #1807
+ vprintln("adapted annotations (not by val) of " + tree + " to " + res.tpe)
+ res
+ } else if (exprMode && byValMode && !hasMinusMarker(tree.tpe) && annotsTree.nonEmpty) { // dropping annotation
+ // add a marker annotation that will make tree.tpe behave as pt, subtyping wise
+ // tree will look like having no annotation
+ val res = tree modifyType addMinusMarker
+ vprintln("adapted annotations (by val) of " + tree + " to " + res.tpe)
+ res
+ } else if (retMode && !hasPlusMarker(tree.tpe) && annotsTree.isEmpty && annotsExpected.nonEmpty) {
+ // add a marker annotation that will make tree.tpe behave as pt, subtyping wise
+ // tree will look like having any possible annotation
+
+ // note 1: we are only adding a plus marker if the method's result type is a cps type
+ // (annotsExpected.nonEmpty == cpsParamAnnotation(pt).nonEmpty)
+ // note 2: we are not adding the expected cps annotations, since they will be added
+ // by adaptTypeOfReturn (see below).
+ val res = tree modifyType (_ withAnnotations List(newPlusMarker()))
+ vprintln("adapted annotations (return) of " + tree + " to " + res.tpe)
+ res
+ } else tree
}
+ /** Returns an adapted type for a return expression if the method's result type (pt) is a CPS type.
+ * Otherwise, it returns the `default` type (`typedReturn` passes `NothingClass.tpe`).
+ *
+ * A return expression in a method that has a CPS result type is an error unless the return
+ * is in tail position. Therefore, we are making sure that only the types of return expressions
+ * are adapted which will either be removed, or lead to an error.
+ */
+ override def pluginsTypedReturn(default: Type, typer: Typer, tree: Return, pt: Type): Type = {
+ val expr = tree.expr
+ // only adapt if method's result type (pt) is cps type
+ val annots = cpsParamAnnotation(pt)
+ if (annots.nonEmpty) {
+ // return type of `expr` without plus marker, but only if it doesn't have other cps annots
+ if (hasPlusMarker(expr.tpe) && !hasCpsParamTypes(expr.tpe))
+ expr.setType(removeAttribs(expr.tpe, MarkerCPSAdaptPlus))
+ expr.tpe
+ } else default
+ }
def updateAttributesFromChildren(tpe: Type, childAnnots: List[AnnotationInfo], byName: List[Tree]): Type = {
tpe match {
@@ -250,7 +270,7 @@ abstract class CPSAnnotationChecker extends CPSUtils {
case MethodType(params, restpe) => tpe
case PolyType(params, restpe) => tpe
case _ =>
- assert(childAnnots forall (_.atp.typeSymbol == MarkerCPSTypes), childAnnots)
+ assert(childAnnots forall (_ matches MarkerCPSTypes), childAnnots)
/*
[] + [] = []
plus + [] = plus
@@ -267,56 +287,50 @@ abstract class CPSAnnotationChecker extends CPSUtils {
synth cps + cps = synth cps! <- unify
*/
- val plus = tpe.hasAnnotation(MarkerCPSAdaptPlus) || (tpe.hasAnnotation(MarkerCPSTypes) &&
- byName.nonEmpty && byName.forall(_.tpe.hasAnnotation(MarkerCPSAdaptPlus)))
+ val plus = hasPlusMarker(tpe) || (
+ hasCpsParamTypes(tpe)
+ && byName.nonEmpty
+ && (byName forall (t => hasPlusMarker(t.tpe)))
+ )
// move @plus annotations outward from by-name children
- if (childAnnots.isEmpty) {
+ if (childAnnots.isEmpty) return {
if (plus) { // @plus or @plus @cps
- for (t <- byName) {
- //println("removeAnnotation " + t + " / " + t.tpe)
- t.setType(removeAttribs(t.tpe, MarkerCPSAdaptPlus, MarkerCPSTypes))
- }
- return tpe.withAnnotation(AnnotationInfo(MarkerCPSAdaptPlus.tpe, Nil, Nil))
- } else
- return tpe
+ byName foreach (_ modifyType cleanPlus)
+ addPlusMarker(tpe)
+ }
+ else tpe
}
- val annots1 = filterAttribs(tpe, MarkerCPSTypes)
+ val annots1 = cpsParamAnnotation(tpe)
if (annots1.isEmpty) { // nothing or @plus
- val synth = MarkerCPSSynth.tpe
- val annots2 = List(linearize(childAnnots))
- removeAttribs(tpe,MarkerCPSAdaptPlus).withAnnotations(AnnotationInfo(synth, Nil, Nil)::annots2)
- } else {
+ cleanPlusWith(tpe)(newSynthMarker(), linearize(childAnnots))
+ }
+ else {
val annot1 = single(annots1)
if (plus) { // @plus @cps
- val synth = AnnotationInfo(MarkerCPSSynth.tpe, Nil, Nil)
val annot2 = linearize(childAnnots)
- if (!(annot2.atp <:< annot1.atp))
- throw new TypeError(annot2 + " is not a subtype of " + annot1)
- val res = removeAttribs(tpe, MarkerCPSAdaptPlus, MarkerCPSTypes).withAnnotations(List(synth, annot2))
- for (t <- byName) {
- //println("removeAnnotation " + t + " / " + t.tpe)
- t.setType(removeAttribs(t.tpe, MarkerCPSAdaptPlus, MarkerCPSTypes))
+
+ if (annot2.atp <:< annot1.atp) {
+ try cleanPlusWith(tpe)(newSynthMarker(), annot2)
+ finally byName foreach (_ modifyType cleanPlus)
}
- res
- } else if (tpe.hasAnnotation(MarkerCPSSynth)) { // @synth @cps
+ else throw new TypeError(annot2 + " is not a subtype of " + annot1)
+ }
+ else if (hasSynthMarker(tpe)) { // @synth @cps
val annot2 = linearize(childAnnots)
- if (!(annot2.atp <:< annot1.atp))
+ if (annot2.atp <:< annot1.atp)
+ cleanPlusWith(tpe)(annot2)
+ else
throw new TypeError(annot2 + " is not a subtype of " + annot1)
- removeAttribs(tpe, MarkerCPSTypes).withAnnotation(annot2)
- } else { // @cps
- removeAttribs(tpe, MarkerCPSTypes).withAnnotation(linearize(childAnnots:::annots1))
}
+ else // @cps
+ cleanPlusWith(tpe)(linearize(childAnnots:::annots1))
}
}
}
-
-
-
-
def transArgList(fun: Tree, args: List[Tree]): List[List[Tree]] = {
val formals = fun.tpe.paramTypes
val overshoot = args.length - formals.length
@@ -367,8 +381,8 @@ abstract class CPSAnnotationChecker extends CPSUtils {
}
case _ => Nil
}
-
- val types = filterAttribs(t.tpe, MarkerCPSTypes)
+
+ val types = cpsParamAnnotation(t.tpe)
// TODO: check that it has been adapted and if so correctly
extra ++ emptyOrSingleList(types)
}
@@ -386,9 +400,10 @@ abstract class CPSAnnotationChecker extends CPSUtils {
/** Modify the type that has thus far been inferred
* for a tree. All this should do is add annotations. */
- override def addAnnotations(tree: Tree, tpe: Type): Type = {
+ override def pluginsTyped(tpe: Type, typer: Typer, tree: Tree, mode: Int, pt: Type): Type = {
+ import scala.util.control._
if (!cpsEnabled) {
- if (tpe.annotations.nonEmpty && tpe.hasAnnotation(MarkerCPSTypes))
+ if (Exception.failAsValue(classOf[MissingRequirementError])(false)(hasCpsParamTypes(tpe)))
global.reporter.error(tree.pos, "this code must be compiled with the Scala continuations plugin enabled")
return tpe
}
@@ -443,7 +458,7 @@ abstract class CPSAnnotationChecker extends CPSUtils {
// we cannot safely annotate these. so we just ignore these cases and
// clean up later in the Apply/TypeApply trees.
- if (qual.tpe.hasAnnotation(MarkerCPSTypes)) {
+ if (hasCpsParamTypes(qual.tpe)) {
// however there is one special case:
// if it's a method without parameters, just apply it. normally done in adapt, but
// we have to do it here so we don't lose the cps information (wouldn't trigger our
@@ -465,16 +480,15 @@ abstract class CPSAnnotationChecker extends CPSUtils {
transChildrenInOrder(tree, tpe, List(cond), List(thenp, elsep))
case Match(select, cases) =>
- // TODO: can there be cases that are not CaseDefs?? check collect vs map!
- transChildrenInOrder(tree, tpe, List(select), cases:::(cases collect { case CaseDef(_, _, body) => body }))
+ transChildrenInOrder(tree, tpe, List(select), cases:::(cases map { case CaseDef(_, _, body) => body }))
case Try(block, catches, finalizer) =>
- val tpe1 = transChildrenInOrder(tree, tpe, Nil, block::catches:::(catches collect { case CaseDef(_, _, body) => body }))
+ val tpe1 = transChildrenInOrder(tree, tpe, Nil, block::catches:::(catches map { case CaseDef(_, _, body) => body }))
- val annots = filterAttribs(tpe1, MarkerCPSTypes)
+ val annots = cpsParamAnnotation(tpe1)
if (annots.nonEmpty) {
val ann = single(annots)
- val atp0::atp1::Nil = ann.atp.normalize.typeArgs
+ val (atp0, atp1) = annTypes(ann)
if (!(atp0 =:= atp1))
throw new TypeError("only simple cps types allowed in try/catch blocks (found: " + tpe1 + ")")
if (!finalizer.isEmpty) // no finalizers allowed. see explanation in SelectiveCPSTransform
@@ -489,10 +503,14 @@ abstract class CPSAnnotationChecker extends CPSUtils {
case ValDef(mods, name, tpt, rhs) =>
vprintln("[checker] checking valdef " + name + "/"+tpe+"/"+tpt+"/"+tree.symbol.tpe)
// ValDef symbols must *not* have annotations!
- if (hasAnswerTypeAnn(tree.symbol.info)) { // is it okay to modify sym here?
+ // lazy vals are currently not supported
+ // but if we erase here all annotations, compiler will complain only
+ // when generating bytecode.
+ // This way lazy vals will be reported as unsupported feature later rather than weird type error.
+ if (hasAnswerTypeAnn(tree.symbol.info) && !mods.isLazy) { // is it okay to modify sym here?
vprintln("removing annotation from sym " + tree.symbol + "/" + tree.symbol.tpe + "/" + tpt)
- tpt.setType(removeAllCPSAnnotations(tpt.tpe))
- tree.symbol.setInfo(removeAllCPSAnnotations(tree.symbol.info))
+ tpt modifyType removeAllCPSAnnotations
+ tree.symbol modifyInfo removeAllCPSAnnotations
}
tpe
diff --git a/src/continuations/plugin/scala/tools/selectivecps/CPSUtils.scala b/src/continuations/plugin/scala/tools/selectivecps/CPSUtils.scala
index d1a35df..46c644b 100644
--- a/src/continuations/plugin/scala/tools/selectivecps/CPSUtils.scala
+++ b/src/continuations/plugin/scala/tools/selectivecps/CPSUtils.scala
@@ -13,96 +13,107 @@ trait CPSUtils {
val verbose: Boolean = System.getProperty("cpsVerbose", "false") == "true"
def vprintln(x: =>Any): Unit = if (verbose) println(x)
- lazy val MarkerCPSSym = definitions.getClass("scala.util.continuations.cpsSym")
- lazy val MarkerCPSTypes = definitions.getClass("scala.util.continuations.cpsParam")
- lazy val MarkerCPSSynth = definitions.getClass("scala.util.continuations.cpsSynth")
-
- lazy val MarkerCPSAdaptPlus = definitions.getClass("scala.util.continuations.cpsPlus")
- lazy val MarkerCPSAdaptMinus = definitions.getClass("scala.util.continuations.cpsMinus")
-
+ object cpsNames {
+ val catches = newTermName("$catches")
+ val ex = newTermName("$ex")
+ val flatMapCatch = newTermName("flatMapCatch")
+ val getTrivialValue = newTermName("getTrivialValue")
+ val isTrivial = newTermName("isTrivial")
+ val reify = newTermName("reify")
+ val reifyR = newTermName("reifyR")
+ val shift = newTermName("shift")
+ val shiftR = newTermName("shiftR")
+ val shiftSuffix = newTermName("$shift")
+ val shiftUnit0 = newTermName("shiftUnit0")
+ val shiftUnit = newTermName("shiftUnit")
+ val shiftUnitR = newTermName("shiftUnitR")
+ }
- lazy val Context = definitions.getClass("scala.util.continuations.ControlContext")
+ lazy val MarkerCPSSym = rootMirror.getRequiredClass("scala.util.continuations.cpsSym")
+ lazy val MarkerCPSTypes = rootMirror.getRequiredClass("scala.util.continuations.cpsParam")
+ lazy val MarkerCPSSynth = rootMirror.getRequiredClass("scala.util.continuations.cpsSynth")
+ lazy val MarkerCPSAdaptPlus = rootMirror.getRequiredClass("scala.util.continuations.cpsPlus")
+ lazy val MarkerCPSAdaptMinus = rootMirror.getRequiredClass("scala.util.continuations.cpsMinus")
- lazy val ModCPS = definitions.getModule("scala.util.continuations")
- lazy val MethShiftUnit = definitions.getMember(ModCPS, "shiftUnit")
- lazy val MethShiftUnitR = definitions.getMember(ModCPS, "shiftUnitR")
- lazy val MethShift = definitions.getMember(ModCPS, "shift")
- lazy val MethShiftR = definitions.getMember(ModCPS, "shiftR")
- lazy val MethReify = definitions.getMember(ModCPS, "reify")
- lazy val MethReifyR = definitions.getMember(ModCPS, "reifyR")
+ lazy val Context = rootMirror.getRequiredClass("scala.util.continuations.ControlContext")
+ lazy val ModCPS = rootMirror.getRequiredPackage("scala.util.continuations")
+ lazy val MethShiftUnit = definitions.getMember(ModCPS, cpsNames.shiftUnit)
+ lazy val MethShiftUnit0 = definitions.getMember(ModCPS, cpsNames.shiftUnit0)
+ lazy val MethShiftUnitR = definitions.getMember(ModCPS, cpsNames.shiftUnitR)
+ lazy val MethShift = definitions.getMember(ModCPS, cpsNames.shift)
+ lazy val MethShiftR = definitions.getMember(ModCPS, cpsNames.shiftR)
+ lazy val MethReify = definitions.getMember(ModCPS, cpsNames.reify)
+ lazy val MethReifyR = definitions.getMember(ModCPS, cpsNames.reifyR)
lazy val allCPSAnnotations = List(MarkerCPSSym, MarkerCPSTypes, MarkerCPSSynth,
MarkerCPSAdaptPlus, MarkerCPSAdaptMinus)
+ // TODO - needed? Can these all use the same annotation info?
+ protected def newSynthMarker() = newMarker(MarkerCPSSynth)
+ protected def newPlusMarker() = newMarker(MarkerCPSAdaptPlus)
+ protected def newMinusMarker() = newMarker(MarkerCPSAdaptMinus)
+ protected def newMarker(tpe: Type): AnnotationInfo = AnnotationInfo marker tpe
+ protected def newMarker(sym: Symbol): AnnotationInfo = AnnotationInfo marker sym.tpe
+
+ protected def newCpsParamsMarker(tp1: Type, tp2: Type) =
+ newMarker(appliedType(MarkerCPSTypes.tpe, List(tp1, tp2)))
+
// annotation checker
+ protected def annTypes(ann: AnnotationInfo): (Type, Type) = {
+ val tp0 :: tp1 :: Nil = ann.atp.normalize.typeArgs
+ ((tp0, tp1))
+ }
+ protected def hasMinusMarker(tpe: Type) = tpe hasAnnotation MarkerCPSAdaptMinus
+ protected def hasPlusMarker(tpe: Type) = tpe hasAnnotation MarkerCPSAdaptPlus
+ protected def hasSynthMarker(tpe: Type) = tpe hasAnnotation MarkerCPSSynth
+ protected def hasCpsParamTypes(tpe: Type) = tpe hasAnnotation MarkerCPSTypes
+ protected def cpsParamTypes(tpe: Type) = tpe getAnnotation MarkerCPSTypes map annTypes
+
def filterAttribs(tpe:Type, cls:Symbol) =
- tpe.annotations.filter(_.atp.typeSymbol == cls)
+ tpe.annotations filter (_ matches cls)
- def removeAttribs(tpe:Type, cls:Symbol*) =
- tpe.withoutAnnotations.withAnnotations(tpe.annotations.filterNot(cls contains _.atp.typeSymbol))
+ def removeAttribs(tpe: Type, classes: Symbol*) =
+ tpe filterAnnotations (ann => !(classes exists (ann matches _)))
def removeAllCPSAnnotations(tpe: Type) = removeAttribs(tpe, allCPSAnnotations:_*)
+ def cpsParamAnnotation(tpe: Type) = filterAttribs(tpe, MarkerCPSTypes)
+
def linearize(ann: List[AnnotationInfo]): AnnotationInfo = {
- ann.reduceLeft { (a, b) =>
- val atp0::atp1::Nil = a.atp.normalize.typeArgs
- val btp0::btp1::Nil = b.atp.normalize.typeArgs
- val (u0,v0) = (atp0, atp1)
- val (u1,v1) = (btp0, btp1)
-/*
- val (u0,v0) = (a.atp.typeArgs(0), a.atp.typeArgs(1))
- val (u1,v1) = (b.atp.typeArgs(0), b.atp.typeArgs(1))
- vprintln("check lin " + a + " andThen " + b)
-*/
- vprintln("check lin " + a + " andThen " + b)
- if (!(v1 <:< u0))
+ ann reduceLeft { (a, b) =>
+ val (u0,v0) = annTypes(a)
+ val (u1,v1) = annTypes(b)
+ // vprintln("check lin " + a + " andThen " + b)
+
+ if (v1 <:< u0)
+ newCpsParamsMarker(u1, v0)
+ else
throw new TypeError("illegal answer type modification: " + a + " andThen " + b)
- // TODO: improve error message (but it is not very common)
- AnnotationInfo(appliedType(MarkerCPSTypes.tpe, List(u1,v0)),Nil,Nil)
}
}
// anf transform
def getExternalAnswerTypeAnn(tp: Type) = {
- tp.annotations.find(a => a.atp.typeSymbol == MarkerCPSTypes) match {
- case Some(AnnotationInfo(atp, _, _)) =>
- val atp0::atp1::Nil = atp.normalize.typeArgs
- Some((atp0, atp1))
- case None =>
- if (tp.hasAnnotation(MarkerCPSAdaptPlus))
- global.warning("trying to instantiate type " + tp + " to unknown cps type")
- None
+ cpsParamTypes(tp) orElse {
+ if (hasPlusMarker(tp))
+ global.warning("trying to instantiate type " + tp + " to unknown cps type")
+ None
}
}
- def getAnswerTypeAnn(tp: Type) = {
- tp.annotations.find(a => a.atp.typeSymbol == MarkerCPSTypes) match {
- case Some(AnnotationInfo(atp, _, _)) =>
- if (!tp.hasAnnotation(MarkerCPSAdaptPlus)) {//&& !tp.hasAnnotation(MarkerCPSAdaptMinus))
- val atp0::atp1::Nil = atp.normalize.typeArgs
- Some((atp0, atp1))
- } else
- None
- case None => None
- }
- }
+ def getAnswerTypeAnn(tp: Type): Option[(Type, Type)] =
+ cpsParamTypes(tp) filterNot (_ => hasPlusMarker(tp))
- def hasAnswerTypeAnn(tp: Type) = {
- tp.hasAnnotation(MarkerCPSTypes) && !tp.hasAnnotation(MarkerCPSAdaptPlus) /*&&
- !tp.hasAnnotation(MarkerCPSAdaptMinus)*/
- }
-
- def hasSynthAnn(tp: Type) = {
- tp.annotations.exists(a => a.atp.typeSymbol == MarkerCPSSynth)
- }
+ def hasAnswerTypeAnn(tp: Type) =
+ hasCpsParamTypes(tp) && !hasPlusMarker(tp)
def updateSynthFlag(tree: Tree) = { // remove annotations if *we* added them (@synth present)
- if (hasSynthAnn(tree.tpe)) {
+ if (hasSynthMarker(tree.tpe)) {
log("removing annotation from " + tree)
- tree.setType(removeAllCPSAnnotations(tree.tpe))
+ tree modifyType removeAllCPSAnnotations
} else
tree
}
@@ -124,7 +135,4 @@ trait CPSUtils {
case _ => None
}
}
-
- // cps transform
-
-}
\ No newline at end of file
+}
diff --git a/src/continuations/plugin/scala/tools/selectivecps/SelectiveANFTransform.scala b/src/continuations/plugin/scala/tools/selectivecps/SelectiveANFTransform.scala
index 107f7ad..8b39bf3 100644
--- a/src/continuations/plugin/scala/tools/selectivecps/SelectiveANFTransform.scala
+++ b/src/continuations/plugin/scala/tools/selectivecps/SelectiveANFTransform.scala
@@ -14,7 +14,7 @@ import scala.tools.nsc.ast._
*/
abstract class SelectiveANFTransform extends PluginComponent with Transform with
TypingTransformers with CPSUtils {
- // inherits abstract value `global' and class `Phase' from Transform
+ // inherits abstract value `global` and class `Phase` from Transform
import global._ // the global environment
import definitions._ // standard classes and methods
@@ -32,6 +32,55 @@ abstract class SelectiveANFTransform extends PluginComponent with Transform with
implicit val _unit = unit // allow code in CPSUtils.scala to report errors
var cpsAllowed: Boolean = false // detect cps code in places we do not handle (yet)
+ object RemoveTailReturnsTransformer extends Transformer {
+ override def transform(tree: Tree): Tree = tree match {
+ case Block(stms, r @ Return(expr)) =>
+ treeCopy.Block(tree, stms, expr)
+
+ case Block(stms, expr) =>
+ treeCopy.Block(tree, stms, transform(expr))
+
+ case If(cond, r1 @ Return(thenExpr), r2 @ Return(elseExpr)) =>
+ treeCopy.If(tree, cond, transform(thenExpr), transform(elseExpr))
+
+ case If(cond, r1 @ Return(thenExpr), elseExpr) =>
+ treeCopy.If(tree, cond, transform(thenExpr), transform(elseExpr))
+
+ case If(cond, thenExpr, r2 @ Return(elseExpr)) =>
+ treeCopy.If(tree, cond, transform(thenExpr), transform(elseExpr))
+
+ case If(cond, thenExpr, elseExpr) =>
+ treeCopy.If(tree, cond, transform(thenExpr), transform(elseExpr))
+
+ case Try(block, catches, finalizer) =>
+ treeCopy.Try(tree,
+ transform(block),
+ (catches map (t => transform(t))).asInstanceOf[List[CaseDef]],
+ transform(finalizer))
+
+ case CaseDef(pat, guard, r @ Return(expr)) =>
+ treeCopy.CaseDef(tree, pat, guard, expr)
+
+ case CaseDef(pat, guard, body) =>
+ treeCopy.CaseDef(tree, pat, guard, transform(body))
+
+ case Return(_) =>
+ unit.error(tree.pos, "return expressions in CPS code must be in tail position")
+ tree
+
+ case _ =>
+ super.transform(tree)
+ }
+ }
+
+ def removeTailReturns(body: Tree): Tree = {
+ // support body with single return expression
+ body match {
+ case Return(expr) => expr
+ case _ => RemoveTailReturnsTransformer.transform(body)
+ }
+ }
+
override def transform(tree: Tree): Tree = {
if (!cpsEnabled) return tree
@@ -46,21 +95,24 @@ abstract class SelectiveANFTransform extends PluginComponent with Transform with
// this would cause infinite recursion. But we could remove the
// ValDef case here.
- case dd @ DefDef(mods, name, tparams, vparamss, tpt, rhs) =>
- log("transforming " + dd.symbol)
+ case dd @ DefDef(mods, name, tparams, vparamss, tpt, rhs0) =>
+ debuglog("transforming " + dd.symbol)
atOwner(dd.symbol) {
- val rhs1 = transExpr(rhs, None, getExternalAnswerTypeAnn(tpt.tpe))
+ val rhs =
+ if (cpsParamTypes(tpt.tpe).nonEmpty) removeTailReturns(rhs0)
+ else rhs0
+ val rhs1 = transExpr(rhs, None, getExternalAnswerTypeAnn(tpt.tpe))(getExternalAnswerTypeAnn(tpt.tpe).isDefined)
- log("result "+rhs1)
- log("result is of type "+rhs1.tpe)
+ debuglog("result "+rhs1)
+ debuglog("result is of type "+rhs1.tpe)
treeCopy.DefDef(dd, mods, name, transformTypeDefs(tparams), transformValDefss(vparamss),
transform(tpt), rhs1)
}
case ff @ Function(vparams, body) =>
- log("transforming anon function " + ff.symbol)
+ debuglog("transforming anon function " + ff.symbol)
atOwner(ff.symbol) {
@@ -71,31 +123,53 @@ abstract class SelectiveANFTransform extends PluginComponent with Transform with
// { x => x match { case A => ... }} to
// { x => shiftUnit(x match { case A => ... })}
// which Uncurry cannot handle (see function6.scala)
+ // thus, we push down the shiftUnit to each of the case bodies
val ext = getExternalAnswerTypeAnn(body.tpe)
+ val pureBody = getAnswerTypeAnn(body.tpe).isEmpty
+ implicit val isParentImpure = ext.isDefined
+
+ def transformPureMatch(tree: Tree, selector: Tree, cases: List[CaseDef]) = {
+ val caseVals = cases map { case cd @ CaseDef(pat, guard, body) =>
+ // if (!hasPlusMarker(body.tpe)) body.tpe = body.tpe withAnnotation newPlusMarker() // TODO: to avoid warning
+ val bodyVal = transExpr(body, None, ext) // ??? triggers "cps-transformed unexpectedly" warning in transTailValue
+ treeCopy.CaseDef(cd, transform(pat), transform(guard), bodyVal)
+ }
+ treeCopy.Match(tree, transform(selector), caseVals)
+ }
+
+ def transformPureVirtMatch(body: Block, selDef: ValDef, cases: List[Tree], matchEnd: Tree) = {
+ val stats = transform(selDef) :: (cases map (transExpr(_, None, ext)))
+ treeCopy.Block(body, stats, transExpr(matchEnd, None, ext))
+ }
val body1 = body match {
- case Match(selector, cases) if (ext.isDefined && getAnswerTypeAnn(body.tpe).isEmpty) =>
- val cases1 = for {
- cd @ CaseDef(pat, guard, caseBody) <- cases
- val caseBody1 = transExpr(body, None, ext)
- } yield {
- treeCopy.CaseDef(cd, transform(pat), transform(guard), caseBody1)
- }
- treeCopy.Match(tree, transform(selector), cases1)
+ case Match(selector, cases) if ext.isDefined && pureBody =>
+ transformPureMatch(body, selector, cases)
+
+ // virtpatmat switch
+ case Block(List(selDef: ValDef), mat at Match(selector, cases)) if ext.isDefined && pureBody =>
+ treeCopy.Block(body, List(transform(selDef)), transformPureMatch(mat, selector, cases))
+
+ // virtpatmat
+ case b at Block(matchStats@((selDef: ValDef) :: cases), matchEnd) if ext.isDefined && pureBody && (matchStats forall treeInfo.hasSynthCaseSymbol) =>
+ transformPureVirtMatch(b, selDef, cases, matchEnd)
+
+ // virtpatmat that stores the scrut separately -- TODO: can we eliminate this case??
+ case Block(List(selDef0: ValDef), mat at Block(matchStats@((selDef: ValDef) :: cases), matchEnd)) if ext.isDefined && pureBody && (matchStats forall treeInfo.hasSynthCaseSymbol)=>
+ treeCopy.Block(body, List(transform(selDef0)), transformPureVirtMatch(mat, selDef, cases, matchEnd))
case _ =>
transExpr(body, None, ext)
}
- log("result "+body1)
- log("result is of type "+body1.tpe)
+ debuglog("anf result "+body1+"\nresult is of type "+body1.tpe)
treeCopy.Function(ff, transformValDefs(vparams), body1)
}
case vd @ ValDef(mods, name, tpt, rhs) => // object-level valdefs
- log("transforming valdef " + vd.symbol)
+ debuglog("transforming valdef " + vd.symbol)
if (getExternalAnswerTypeAnn(tpt.tpe).isEmpty) {
@@ -120,11 +194,13 @@ abstract class SelectiveANFTransform extends PluginComponent with Transform with
transExpr(tree, None, None)
case _ =>
-
if (hasAnswerTypeAnn(tree.tpe)) {
- if (!cpsAllowed)
- unit.error(tree.pos, "cps code not allowed here / " + tree.getClass + " / " + tree)
-
+ if (!cpsAllowed) {
+ if (tree.symbol.isLazy)
+ unit.error(tree.pos, "implementation restriction: cps annotations not allowed on lazy value definitions")
+ else
+ unit.error(tree.pos, "cps code not allowed here / " + tree.getClass + " / " + tree)
+ }
log(tree)
}
@@ -134,8 +210,8 @@ abstract class SelectiveANFTransform extends PluginComponent with Transform with
}
- def transExpr(tree: Tree, cpsA: CPSInfo, cpsR: CPSInfo): Tree = {
- transTailValue(tree, cpsA, cpsR) match {
+ def transExpr(tree: Tree, cpsA: CPSInfo, cpsR: CPSInfo)(implicit isAnyParentImpure: Boolean = false): Tree = {
+ transTailValue(tree, cpsA, cpsR)(cpsR.isDefined || isAnyParentImpure) match {
case (Nil, b) => b
case (a, b) =>
treeCopy.Block(tree, a,b)
@@ -143,7 +219,7 @@ abstract class SelectiveANFTransform extends PluginComponent with Transform with
}
- def transArgList(fun: Tree, args: List[Tree], cpsA: CPSInfo): (List[List[Tree]], List[Tree], CPSInfo) = {
+ def transArgList(fun: Tree, args: List[Tree], cpsA: CPSInfo)(implicit isAnyParentImpure: Boolean): (List[List[Tree]], List[Tree], CPSInfo) = {
val formals = fun.tpe.paramTypes
val overshoot = args.length - formals.length
@@ -152,7 +228,8 @@ abstract class SelectiveANFTransform extends PluginComponent with Transform with
val (stm,expr) = (for ((a,tp) <- args.zip(formals ::: List.fill(overshoot)(NoType))) yield {
tp match {
case TypeRef(_, ByNameParamClass, List(elemtp)) =>
- (Nil, transExpr(a, None, getAnswerTypeAnn(elemtp)))
+ // note that we're not passing just isAnyParentImpure
+ (Nil, transExpr(a, None, getAnswerTypeAnn(elemtp))(getAnswerTypeAnn(elemtp).isDefined || isAnyParentImpure))
case _ =>
val (valStm, valExpr, valSpc) = transInlineValue(a, spc)
spc = valSpc
@@ -164,73 +241,81 @@ abstract class SelectiveANFTransform extends PluginComponent with Transform with
}
- def transValue(tree: Tree, cpsA: CPSInfo, cpsR: CPSInfo): (List[Tree], Tree, CPSInfo) = {
+ // precondition: cpsR.isDefined "implies" isAnyParentImpure
+ def transValue(tree: Tree, cpsA: CPSInfo, cpsR: CPSInfo)(implicit isAnyParentImpure: Boolean): (List[Tree], Tree, CPSInfo) = {
// return value: (stms, expr, spc), where spc is CPSInfo after stms but *before* expr
implicit val pos = tree.pos
tree match {
case Block(stms, expr) =>
val (cpsA2, cpsR2) = (cpsA, linearize(cpsA, getAnswerTypeAnn(tree.tpe))) // tbd
-// val (cpsA2, cpsR2) = (None, getAnswerTypeAnn(tree.tpe))
- val (a, b) = transBlock(stms, expr, cpsA2, cpsR2)
+ // val (cpsA2, cpsR2) = (None, getAnswerTypeAnn(tree.tpe))
- val tree1 = (treeCopy.Block(tree, a, b)) // no updateSynthFlag here!!!
+ val (a, b) = transBlock(stms, expr, cpsA2, cpsR2)(cpsR2.isDefined || isAnyParentImpure)
+ val tree1 = (treeCopy.Block(tree, a, b)) // no updateSynthFlag here!!!
(Nil, tree1, cpsA)
- case If(cond, thenp, elsep) =>
- /* possible situations:
- cps before (cpsA)
- cps in condition (spc) <-- synth flag set if *only* here!
- cps in (one or both) branches */
- val (condStats, condVal, spc) = transInlineValue(cond, cpsA)
- val (cpsA2, cpsR2) = if (tree.tpe hasAnnotation MarkerCPSSynth)
- (spc, linearize(spc, getAnswerTypeAnn(tree.tpe))) else
- (None, getAnswerTypeAnn(tree.tpe)) // if no cps in condition, branches must conform to tree.tpe directly
- val thenVal = transExpr(thenp, cpsA2, cpsR2)
- val elseVal = transExpr(elsep, cpsA2, cpsR2)
-
- // check that then and else parts agree (not necessary any more, but left as sanity check)
- if (cpsR.isDefined) {
- if (elsep == EmptyTree)
- unit.error(tree.pos, "always need else part in cps code")
- }
- if (hasAnswerTypeAnn(thenVal.tpe) != hasAnswerTypeAnn(elseVal.tpe)) {
- unit.error(tree.pos, "then and else parts must both be cps code or neither of them")
- }
-
- (condStats, updateSynthFlag(treeCopy.If(tree, condVal, thenVal, elseVal)), spc)
+ case If(cond, thenp, elsep) =>
+ /* possible situations:
+ cps before (cpsA)
+ cps in condition (spc) <-- synth flag set if *only* here!
+ cps in (one or both) branches */
+ val (condStats, condVal, spc) = transInlineValue(cond, cpsA)
+ val (cpsA2, cpsR2) = if (hasSynthMarker(tree.tpe))
+ (spc, linearize(spc, getAnswerTypeAnn(tree.tpe))) else
+ (None, getAnswerTypeAnn(tree.tpe)) // if no cps in condition, branches must conform to tree.tpe directly
+ val thenVal = transExpr(thenp, cpsA2, cpsR2)(cpsR2.isDefined || isAnyParentImpure)
+ val elseVal = transExpr(elsep, cpsA2, cpsR2)(cpsR2.isDefined || isAnyParentImpure)
+
+ // check that then and else parts agree (not necessary any more, but left as sanity check)
+ if (cpsR.isDefined) {
+ if (elsep == EmptyTree)
+ unit.error(tree.pos, "always need else part in cps code")
+ }
+ if (hasAnswerTypeAnn(thenVal.tpe) != hasAnswerTypeAnn(elseVal.tpe)) {
+ unit.error(tree.pos, "then and else parts must both be cps code or neither of them")
+ }
- case Match(selector, cases) =>
+ (condStats, updateSynthFlag(treeCopy.If(tree, condVal, thenVal, elseVal)), spc)
- val (selStats, selVal, spc) = transInlineValue(selector, cpsA)
- val (cpsA2, cpsR2) = if (tree.tpe hasAnnotation MarkerCPSSynth)
- (spc, linearize(spc, getAnswerTypeAnn(tree.tpe))) else
- (None, getAnswerTypeAnn(tree.tpe))
+ case Match(selector, cases) =>
+ val (selStats, selVal, spc) = transInlineValue(selector, cpsA)
+ val (cpsA2, cpsR2) =
+ if (hasSynthMarker(tree.tpe)) (spc, linearize(spc, getAnswerTypeAnn(tree.tpe)))
+ else (None, getAnswerTypeAnn(tree.tpe))
- val caseVals = for {
- cd @ CaseDef(pat, guard, body) <- cases
- val bodyVal = transExpr(body, cpsA2, cpsR2)
- } yield {
- treeCopy.CaseDef(cd, transform(pat), transform(guard), bodyVal)
- }
+ val caseVals = cases map { case cd @ CaseDef(pat, guard, body) =>
+ val bodyVal = transExpr(body, cpsA2, cpsR2)(cpsR2.isDefined || isAnyParentImpure)
+ treeCopy.CaseDef(cd, transform(pat), transform(guard), bodyVal)
+ }
- (selStats, updateSynthFlag(treeCopy.Match(tree, selVal, caseVals)), spc)
+ (selStats, updateSynthFlag(treeCopy.Match(tree, selVal, caseVals)), spc)
+ // this is utterly broken: LabelDefs need to be considered together when transforming them to DefDefs:
+ // suppose a Block {L1; ... ; LN}
+ // this should become {D1def ; ... ; DNdef ; D1()}
+ // where D$idef = def L$i(..) = {L$i.body; L${i+1}(..)}
case ldef @ LabelDef(name, params, rhs) =>
+ // println("trans LABELDEF "+(name, params, tree.tpe, hasAnswerTypeAnn(tree.tpe)))
+ // TODO why does the labeldef's type have a cpsMinus annotation, whereas the rhs does not? (BYVALmode missing/too much somewhere?)
if (hasAnswerTypeAnn(tree.tpe)) {
- val sym = currentOwner.newMethod(tree.pos, name)
- .setInfo(ldef.symbol.info)
- .setFlag(Flags.SYNTHETIC)
-
- val rhs1 = new TreeSymSubstituter(List(ldef.symbol), List(sym)).transform(rhs)
- val rhsVal = transExpr(rhs1, None, getAnswerTypeAnn(tree.tpe))
- new ChangeOwnerTraverser(currentOwner, sym) traverse rhsVal
+ // currentOwner.newMethod(name, tree.pos, Flags.SYNTHETIC) setInfo ldef.symbol.info
+ val sym = ldef.symbol resetFlag Flags.LABEL
+ val rhs1 = rhs //new TreeSymSubstituter(List(ldef.symbol), List(sym)).transform(rhs)
+ val rhsVal = transExpr(rhs1, None, getAnswerTypeAnn(tree.tpe))(getAnswerTypeAnn(tree.tpe).isDefined || isAnyParentImpure) changeOwner (currentOwner -> sym)
val stm1 = localTyper.typed(DefDef(sym, rhsVal))
- val expr = localTyper.typed(Apply(Ident(sym), List()))
-
- (List(stm1), expr, cpsA)
+ // since virtpatmat does not rely on fall-through, don't call the labels it emits
+ // transBlock will take care of calling the first label
+ // calling each labeldef is wrong, since some labels may be jumped over
+ // we can get away with this for now since the only other labels we emit are for tailcalls/while loops,
+ // which do not have consecutive labeldefs (and thus fall-through is irrelevant)
+ if (treeInfo.hasSynthCaseSymbol(ldef)) (List(stm1), localTyper.typed{Literal(Constant(()))}, cpsA)
+ else {
+ assert(params.isEmpty, "problem in ANF transforming label with non-empty params "+ ldef)
+ (List(stm1), localTyper.typed{Apply(Ident(sym), List())}, cpsA)
+ }
} else {
val rhsVal = transExpr(rhs, None, None)
(Nil, updateSynthFlag(treeCopy.LabelDef(tree, name, params, rhsVal)), cpsA)
@@ -242,7 +327,7 @@ abstract class SelectiveANFTransform extends PluginComponent with Transform with
val catchVals = for {
cd @ CaseDef(pat, guard, body) <- catches
- val bodyVal = transExpr(body, cpsA, cpsR)
+ bodyVal = transExpr(body, cpsA, cpsR)
} yield {
treeCopy.CaseDef(cd, transform(pat), transform(guard), bodyVal)
}
@@ -257,6 +342,8 @@ abstract class SelectiveANFTransform extends PluginComponent with Transform with
(stms, updateSynthFlag(treeCopy.Assign(tree, transform(lhs), expr)), spc)
case Return(expr0) =>
+ if (isAnyParentImpure)
+ unit.error(tree.pos, "return expression not allowed, since method calls CPS method")
val (stms, expr, spc) = transInlineValue(expr0, cpsA)
(stms, updateSynthFlag(treeCopy.Return(tree, expr)), spc)
@@ -294,7 +381,8 @@ abstract class SelectiveANFTransform extends PluginComponent with Transform with
}
}
- def transTailValue(tree: Tree, cpsA: CPSInfo, cpsR: CPSInfo): (List[Tree], Tree) = {
+ // precondition: cpsR.isDefined "implies" isAnyParentImpure
+ def transTailValue(tree: Tree, cpsA: CPSInfo, cpsR: CPSInfo)(implicit isAnyParentImpure: Boolean): (List[Tree], Tree) = {
val (stms, expr, spc) = transValue(tree, cpsA, cpsR)
@@ -306,23 +394,44 @@ abstract class SelectiveANFTransform extends PluginComponent with Transform with
if (!expr.isEmpty && (expr.tpe.typeSymbol ne NothingClass)) {
// must convert!
- log("cps type conversion (has: " + cpsA + "/" + spc + "/" + expr.tpe + ")")
- log("cps type conversion (expected: " + cpsR.get + "): " + expr)
+ debuglog("cps type conversion (has: " + cpsA + "/" + spc + "/" + expr.tpe + ")")
+ debuglog("cps type conversion (expected: " + cpsR.get + "): " + expr)
- if (!expr.tpe.hasAnnotation(MarkerCPSAdaptPlus))
+ if (!hasPlusMarker(expr.tpe))
unit.warning(tree.pos, "expression " + tree + " is cps-transformed unexpectedly")
try {
val Some((a, b)) = cpsR
-
- val res = localTyper.typed(atPos(tree.pos) {
- Apply(TypeApply(gen.mkAttributedRef(MethShiftUnit),
- List(TypeTree(plainTpe), TypeTree(a), TypeTree(b))),
- List(expr))
- })
- return (stms, res)
-
- } catch {
+ /** Since shiftUnit is bounded [A,B,C>:B] this may not typecheck
+ * if C is overly specific. So if !(B <:< C), call shiftUnit0
+ * instead, which takes only two type arguments.
+ */
+ val conforms = a <:< b
+ val call = localTyper.typedPos(tree.pos)(
+ Apply(
+ TypeApply(
+ gen.mkAttributedRef( if (conforms) MethShiftUnit else MethShiftUnit0 ),
+ List(TypeTree(plainTpe), TypeTree(a)) ++ ( if (conforms) List(TypeTree(b)) else Nil )
+ ),
+ List(expr)
+ )
+ )
+ // This is today's sick/meaningless heuristic for spotting breakdown so
+ // we don't proceed until stack traces start draping themselves over everything.
+ // If there are wildcard types in the tree and B == Nothing, something went wrong.
+ // (I thought WildcardTypes would be enough, but nope. 'reset0 { 0 }' has them.)
+ //
+ // Code as simple as reset((_: String).length)
+ // will crash meaninglessly without this check. See SI-3718.
+ //
+ // TODO - obviously this should be done earlier, differently, or with
+ // a more skilled hand. Most likely, all three.
+ if ((b.typeSymbol eq NothingClass) && call.tpe.exists(_ eq WildcardType))
+ unit.error(tree.pos, "cannot cps-transform malformed (possibly in shift/reset placement) expression")
+ else
+ return ((stms, call))
+ }
+ catch {
case ex:TypeError =>
unit.error(ex.pos, "cannot cps-transform expression " + tree + ": " + ex.msg)
}
@@ -330,7 +439,7 @@ abstract class SelectiveANFTransform extends PluginComponent with Transform with
} else if (!cpsR.isDefined && bot.isDefined) {
// error!
- log("cps type error: " + expr)
+ debuglog("cps type error: " + expr)
//println("cps type error: " + expr + "/" + expr.tpe + "/" + getAnswerTypeAnn(expr.tpe))
//println(cpsR + "/" + spc + "/" + bot)
@@ -339,9 +448,9 @@ abstract class SelectiveANFTransform extends PluginComponent with Transform with
} else {
// all is well
- if (expr.tpe.hasAnnotation(MarkerCPSAdaptPlus)) {
+ if (hasPlusMarker(expr.tpe)) {
unit.warning(tree.pos, "expression " + expr + " of type " + expr.tpe + " is not expected to have a cps type")
- expr.setType(removeAllCPSAnnotations(expr.tpe))
+ expr modifyType removeAllCPSAnnotations
}
// TODO: sanity check that types agree
@@ -350,7 +459,7 @@ abstract class SelectiveANFTransform extends PluginComponent with Transform with
(stms, expr)
}
- def transInlineValue(tree: Tree, cpsA: CPSInfo): (List[Tree], Tree, CPSInfo) = {
+ def transInlineValue(tree: Tree, cpsA: CPSInfo)(implicit isAnyParentImpure: Boolean): (List[Tree], Tree, CPSInfo) = {
val (stms, expr, spc) = transValue(tree, cpsA, None) // never required to be cps
@@ -359,12 +468,12 @@ abstract class SelectiveANFTransform extends PluginComponent with Transform with
val valueTpe = removeAllCPSAnnotations(expr.tpe)
- val sym = currentOwner.newValue(tree.pos, unit.fresh.newName("tmp"))
- .setInfo(valueTpe)
- .setFlag(Flags.SYNTHETIC)
- .setAnnotations(List(AnnotationInfo(MarkerCPSSym.tpe, Nil, Nil)))
-
- new ChangeOwnerTraverser(currentOwner, sym) traverse expr
+ val sym: Symbol = (
+ currentOwner.newValue(newTermName(unit.fresh.newName("tmp")), tree.pos, Flags.SYNTHETIC)
+ setInfo valueTpe
+ setAnnotations List(AnnotationInfo(MarkerCPSSym.tpe, Nil, Nil))
+ )
+ expr.changeOwner(currentOwner -> sym)
(stms ::: List(ValDef(sym, expr) setType(NoType)),
Ident(sym) setType(valueTpe) setPos(tree.pos), linearize(spc, spcVal)(unit, tree.pos))
@@ -377,7 +486,7 @@ abstract class SelectiveANFTransform extends PluginComponent with Transform with
- def transInlineStm(stm: Tree, cpsA: CPSInfo): (List[Tree], CPSInfo) = {
+ def transInlineStm(stm: Tree, cpsA: CPSInfo)(implicit isAnyParentImpure: Boolean): (List[Tree], CPSInfo) = {
stm match {
// TODO: what about DefDefs?
@@ -407,19 +516,34 @@ abstract class SelectiveANFTransform extends PluginComponent with Transform with
}
}
- def transBlock(stms: List[Tree], expr: Tree, cpsA: CPSInfo, cpsR: CPSInfo): (List[Tree], Tree) = {
- stms match {
- case Nil =>
- transTailValue(expr, cpsA, cpsR)
-
- case stm::rest =>
- var (rest2, expr2) = (rest, expr)
- val (headStms, headSpc) = transInlineStm(stm, cpsA)
- val (restStms, restExpr) = transBlock(rest2, expr2, headSpc, cpsR)
- (headStms:::restStms, restExpr)
- }
+ // precondition: cpsR.isDefined "implies" isAnyParentImpure
+ def transBlock(stms: List[Tree], expr: Tree, cpsA: CPSInfo, cpsR: CPSInfo)(implicit isAnyParentImpure: Boolean): (List[Tree], Tree) = {
+ def rec(currStats: List[Tree], currAns: CPSInfo, accum: List[Tree]): (List[Tree], Tree) =
+ currStats match {
+ case Nil =>
+ val (anfStats, anfExpr) = transTailValue(expr, currAns, cpsR)
+ (accum ++ anfStats, anfExpr)
+
+ case stat :: rest =>
+ val (stats, nextAns) = transInlineStm(stat, currAns)
+ rec(rest, nextAns, accum ++ stats)
+ }
+
+ val (anfStats, anfExpr) = rec(stms, cpsA, List())
+ // println("\nanf-block:\n"+ ((stms :+ expr) mkString ("{", "\n", "}")) +"\nBECAME\n"+ ((anfStats :+ anfExpr) mkString ("{", "\n", "}")))
+ // println("synth case? "+ (anfStats map (t => (t, t.isDef, treeInfo.hasSynthCaseSymbol(t)))))
+ // SUPER UGLY HACK: handle virtpatmat-style matches, whose labels have already been turned into DefDefs
+ if (anfStats.nonEmpty && (anfStats forall (t => !t.isDef || treeInfo.hasSynthCaseSymbol(t)))) {
+ val (prologue, rest) = (anfStats :+ anfExpr) span (s => !s.isInstanceOf[DefDef]) // find first case
+ // println("rest: "+ rest)
+ // val (defs, calls) = rest partition (_.isInstanceOf[DefDef])
+ if (rest.nonEmpty) {
+ // the filter drops the ()'s emitted when transValue encountered a LabelDef
+ val stats = prologue ++ (rest filter (_.isInstanceOf[DefDef])).reverse // ++ calls
+ // println("REVERSED "+ (stats mkString ("{", "\n", "}")))
+ (stats, localTyper.typed{Apply(Ident(rest.head.symbol), List())}) // call first label to kick-start the match
+ } else (anfStats, anfExpr)
+ } else (anfStats, anfExpr)
}
-
-
}
}
diff --git a/src/continuations/plugin/scala/tools/selectivecps/SelectiveCPSPlugin.scala b/src/continuations/plugin/scala/tools/selectivecps/SelectiveCPSPlugin.scala
index a16e9b9..2371597 100644
--- a/src/continuations/plugin/scala/tools/selectivecps/SelectiveCPSPlugin.scala
+++ b/src/continuations/plugin/scala/tools/selectivecps/SelectiveCPSPlugin.scala
@@ -23,6 +23,7 @@ class SelectiveCPSPlugin(val global: Global) extends Plugin {
val cpsPhase = new SelectiveCPSTransform() {
val global = SelectiveCPSPlugin.this.global
val runsAfter = List("selectiveanf")
+ override val runsBefore = List("uncurry")
}
@@ -32,6 +33,7 @@ class SelectiveCPSPlugin(val global: Global) extends Plugin {
val global: SelectiveCPSPlugin.this.global.type = SelectiveCPSPlugin.this.global
}
global.addAnnotationChecker(checker.checker)
+ global.analyzer.addAnalyzerPlugin(checker.plugin)
global.log("instantiated cps plugin: " + this)
diff --git a/src/continuations/plugin/scala/tools/selectivecps/SelectiveCPSTransform.scala b/src/continuations/plugin/scala/tools/selectivecps/SelectiveCPSTransform.scala
index 78cc8f7..4482bf2 100644
--- a/src/continuations/plugin/scala/tools/selectivecps/SelectiveCPSTransform.scala
+++ b/src/continuations/plugin/scala/tools/selectivecps/SelectiveCPSTransform.scala
@@ -15,8 +15,8 @@ import scala.tools.nsc.ast._
* In methods marked @cps, CPS-transform assignments introduced by ANF-transform phase.
*/
abstract class SelectiveCPSTransform extends PluginComponent with
- InfoTransform with TypingTransformers with CPSUtils {
- // inherits abstract value `global' and class `Phase' from Transform
+ InfoTransform with TypingTransformers with CPSUtils with TreeDSL {
+ // inherits abstract value `global` and class `Phase` from Transform
import global._ // the global environment
import definitions._ // standard classes and methods
@@ -39,10 +39,10 @@ abstract class SelectiveCPSTransform extends PluginComponent with
val newtp = transformCPSType(tp)
if (newtp != tp)
- log("transformInfo changed type for " + sym + " to " + newtp);
+ debuglog("transformInfo changed type for " + sym + " to " + newtp);
if (sym == MethReifyR)
- log("transformInfo (not)changed type for " + sym + " to " + newtp);
+ debuglog("transformInfo (not)changed type for " + sym + " to " + newtp);
newtp
}
@@ -65,6 +65,7 @@ abstract class SelectiveCPSTransform extends PluginComponent with
class CPSTransformer(unit: CompilationUnit) extends TypingTransformer(unit) {
+ private val patmatTransformer = patmat.newTransformer(unit)
override def transform(tree: Tree): Tree = {
if (!cpsEnabled) return tree
@@ -83,13 +84,13 @@ abstract class SelectiveCPSTransform extends PluginComponent with
case Apply(TypeApply(fun, targs), args)
if (fun.symbol == MethShift) =>
- log("found shift: " + tree)
+ debuglog("found shift: " + tree)
atPos(tree.pos) {
val funR = gen.mkAttributedRef(MethShiftR) // TODO: correct?
//gen.mkAttributedSelect(gen.mkAttributedSelect(gen.mkAttributedSelect(gen.mkAttributedIdent(ScalaPackage),
//ScalaPackage.tpe.member("util")), ScalaPackage.tpe.member("util").tpe.member("continuations")), MethShiftR)
//gen.mkAttributedRef(ModCPS.tpe, MethShiftR) // TODO: correct?
- log(funR.tpe)
+ debuglog("funR.tpe = " + funR.tpe)
Apply(
TypeApply(funR, targs).setType(appliedType(funR.tpe, targs.map((t:Tree) => t.tpe))),
args.map(transform(_))
@@ -98,10 +99,10 @@ abstract class SelectiveCPSTransform extends PluginComponent with
case Apply(TypeApply(fun, targs), args)
if (fun.symbol == MethShiftUnit) =>
- log("found shiftUnit: " + tree)
+ debuglog("found shiftUnit: " + tree)
atPos(tree.pos) {
val funR = gen.mkAttributedRef(MethShiftUnitR) // TODO: correct?
- log(funR.tpe)
+ debuglog("funR.tpe = " + funR.tpe)
Apply(
TypeApply(funR, List(targs(0), targs(1))).setType(appliedType(funR.tpe,
List(targs(0).tpe, targs(1).tpe))),
@@ -114,7 +115,7 @@ abstract class SelectiveCPSTransform extends PluginComponent with
log("found reify: " + tree)
atPos(tree.pos) {
val funR = gen.mkAttributedRef(MethReifyR) // TODO: correct?
- log(funR.tpe)
+ debuglog("funR.tpe = " + funR.tpe)
Apply(
TypeApply(funR, targs).setType(appliedType(funR.tpe, targs.map((t:Tree) => t.tpe))),
args.map(transform(_))
@@ -190,29 +191,29 @@ abstract class SelectiveCPSTransform extends PluginComponent with
val targettp = transformCPSType(tree.tpe)
-// val expr2 = if (catches.nonEmpty) {
- val pos = catches.head.pos
- val argSym = currentOwner.newValueParameter(pos, "$ex").setInfo(ThrowableClass.tpe)
- val rhs = Match(Ident(argSym), catches1)
- val fun = Function(List(ValDef(argSym)), rhs)
- val funSym = currentOwner.newValueParameter(pos, "$catches").setInfo(appliedType(PartialFunctionClass.tpe, List(ThrowableClass.tpe, targettp)))
- val funDef = localTyper.typed(atPos(pos) { ValDef(funSym, fun) })
- val expr2 = localTyper.typed(atPos(pos) { Apply(Select(expr1, expr1.tpe.member("flatMapCatch")), List(Ident(funSym))) })
+ val pos = catches.head.pos
+ val funSym = currentOwner.newValueParameter(cpsNames.catches, pos).setInfo(appliedType(PartialFunctionClass.tpe, List(ThrowableClass.tpe, targettp)))
+ val funDef = localTyper.typedPos(pos) {
+ ValDef(funSym, Match(EmptyTree, catches1))
+ }
+ val expr2 = localTyper.typedPos(pos) {
+ Apply(Select(expr1, expr1.tpe.member(cpsNames.flatMapCatch)), List(Ident(funSym)))
+ }
- argSym.owner = fun.symbol
- val chown = new ChangeOwnerTraverser(currentOwner, fun.symbol)
- chown.traverse(rhs)
+ val exSym = currentOwner.newValueParameter(cpsNames.ex, pos).setInfo(ThrowableClass.tpe)
- val exSym = currentOwner.newValueParameter(pos, "$ex").setInfo(ThrowableClass.tpe)
- val catch2 = { localTyper.typedCases(tree, List(
- CaseDef(Bind(exSym, Typed(Ident("_"), TypeTree(ThrowableClass.tpe))),
- Apply(Select(Ident(funSym), "isDefinedAt"), List(Ident(exSym))),
- Apply(Ident(funSym), List(Ident(exSym))))
- ), ThrowableClass.tpe, targettp) }
+ import CODE._
+ // generate a case that is supported directly by the back-end
+ val catchIfDefined = CaseDef(
+ Bind(exSym, Ident(nme.WILDCARD)),
+ EmptyTree,
+ IF ((REF(funSym) DOT nme.isDefinedAt)(REF(exSym))) THEN (REF(funSym) APPLY (REF(exSym))) ELSE Throw(REF(exSym))
+ )
- //typedCases(tree, catches, ThrowableClass.tpe, pt)
+ val catch2 = localTyper.typedCases(List(catchIfDefined), ThrowableClass.tpe, targettp)
+ //typedCases(tree, catches, ThrowableClass.tpe, pt)
- localTyper.typed(Block(List(funDef), treeCopy.Try(tree, treeCopy.Block(block1, stms, expr2), catch2, finalizer1)))
+ patmatTransformer.transform(localTyper.typed(Block(List(funDef), treeCopy.Try(tree, treeCopy.Block(block1, stms, expr2), catch2, finalizer1))))
/*
@@ -222,7 +223,7 @@ abstract class SelectiveCPSTransform extends PluginComponent with
val pos = finalizer.pos
val finalizer2 = duplicateTree(finalizer1)
val fun = Function(List(), finalizer2)
- val expr3 = localTyper.typed(atPos(pos) { Apply(Select(expr2, expr2.tpe.member("mapFinally")), List(fun)) })
+ val expr3 = localTyper.typedPos(pos) { Apply(Select(expr2, expr2.tpe.member("mapFinally")), List(fun)) }
val chown = new ChangeOwnerTraverser(currentOwner, fun.symbol)
chown.traverse(finalizer2)
@@ -259,18 +260,17 @@ abstract class SelectiveCPSTransform extends PluginComponent with
case vd @ ValDef(mods, name, tpt, rhs)
if (vd.symbol.hasAnnotation(MarkerCPSSym)) =>
- log("found marked ValDef "+name+" of type " + vd.symbol.tpe)
+ debuglog("found marked ValDef "+name+" of type " + vd.symbol.tpe)
val tpe = vd.symbol.tpe
val rhs1 = atOwner(vd.symbol) { transform(rhs) }
+ rhs1.changeOwner(vd.symbol -> currentOwner) // TODO: don't traverse twice
- new ChangeOwnerTraverser(vd.symbol, currentOwner).traverse(rhs1) // TODO: don't traverse twice
+ debuglog("valdef symbol " + vd.symbol + " has type " + tpe)
+ debuglog("right hand side " + rhs1 + " has type " + rhs1.tpe)
- log("valdef symbol " + vd.symbol + " has type " + tpe)
- log("right hand side " + rhs1 + " has type " + rhs1.tpe)
-
- log("currentOwner: " + currentOwner)
- log("currentMethod: " + currentMethod)
+ debuglog("currentOwner: " + currentOwner)
+ debuglog("currentMethod: " + currentMethod)
val (bodyStms, bodyExpr) = transBlock(rest, expr)
// FIXME: result will later be traversed again by TreeSymSubstituter and
@@ -290,7 +290,7 @@ abstract class SelectiveCPSTransform extends PluginComponent with
val body1 = (new TreeSymSubstituter(List(vd.symbol), List(ctxValSym)))(body)
- val body2 = localTyper.typed(atPos(vd.symbol.pos) { body1 })
+ val body2 = localTyper.typedPos(vd.symbol.pos) { body1 }
// in theory it would be nicer to look for an @cps annotation instead
// of testing for Context
@@ -302,42 +302,42 @@ abstract class SelectiveCPSTransform extends PluginComponent with
}
def applyCombinatorFun(ctxR: Tree, body: Tree) = {
- val arg = currentOwner.newValueParameter(ctxR.pos, name).setInfo(tpe)
+ val arg = currentOwner.newValueParameter(name, ctxR.pos).setInfo(tpe)
val body1 = (new TreeSymSubstituter(List(vd.symbol), List(arg)))(body)
- val fun = localTyper.typed(atPos(vd.symbol.pos) { Function(List(ValDef(arg)), body1) }) // types body as well
+ val fun = localTyper.typedPos(vd.symbol.pos) { Function(List(ValDef(arg)), body1) } // types body as well
arg.owner = fun.symbol
- new ChangeOwnerTraverser(currentOwner, fun.symbol).traverse(body1)
+ body1.changeOwner(currentOwner -> fun.symbol)
// see note about multiple traversals above
- log("fun.symbol: "+fun.symbol)
- log("fun.symbol.owner: "+fun.symbol.owner)
- log("arg.owner: "+arg.owner)
+ debuglog("fun.symbol: "+fun.symbol)
+ debuglog("fun.symbol.owner: "+fun.symbol.owner)
+ debuglog("arg.owner: "+arg.owner)
- log("fun.tpe:"+fun.tpe)
- log("return type of fun:"+body1.tpe)
+ debuglog("fun.tpe:"+fun.tpe)
+ debuglog("return type of fun:"+body1.tpe)
- var methodName = "map"
+ var methodName = nme.map
if (body1.tpe != null) {
if (body1.tpe.typeSymbol == Context)
- methodName = "flatMap"
+ methodName = nme.flatMap
}
else
unit.error(rhs.pos, "cannot compute type for CPS-transformed function result")
- log("will use method:"+methodName)
+ debuglog("will use method:"+methodName)
- localTyper.typed(atPos(vd.symbol.pos) {
+ localTyper.typedPos(vd.symbol.pos) {
Apply(Select(ctxR, ctxR.tpe.member(methodName)), List(fun))
- })
+ }
}
def mkBlock(stms: List[Tree], expr: Tree) = if (stms.nonEmpty) Block(stms, expr) else expr
try {
if (specialCaseTrivial) {
- log("will optimize possible tail call: " + bodyExpr)
+ debuglog("will optimize possible tail call: " + bodyExpr)
// FIXME: flatMap impl has become more complicated due to
// exceptions. do we need to put a try/catch in the then part??
@@ -347,17 +347,17 @@ abstract class SelectiveCPSTransform extends PluginComponent with
// val <lhs> = ctx.getTrivialValue; ... <--- TODO: try/catch ??? don't bother for the moment...
// else
// ctx.flatMap { <lhs> => ... }
- val ctxSym = currentOwner.newValue(vd.symbol.name + "$shift").setInfo(rhs1.tpe)
+ val ctxSym = currentOwner.newValue(newTermName("" + vd.symbol.name + cpsNames.shiftSuffix)).setInfo(rhs1.tpe)
val ctxDef = localTyper.typed(ValDef(ctxSym, rhs1))
def ctxRef = localTyper.typed(Ident(ctxSym))
val argSym = currentOwner.newValue(vd.symbol.name).setInfo(tpe)
- val argDef = localTyper.typed(ValDef(argSym, Select(ctxRef, ctxRef.tpe.member("getTrivialValue"))))
- val switchExpr = localTyper.typed(atPos(vd.symbol.pos) {
- val body2 = duplicateTree(mkBlock(bodyStms, bodyExpr)) // dup before typing!
- If(Select(ctxRef, ctxSym.tpe.member("isTrivial")),
+ val argDef = localTyper.typed(ValDef(argSym, Select(ctxRef, ctxRef.tpe.member(cpsNames.getTrivialValue))))
+ val switchExpr = localTyper.typedPos(vd.symbol.pos) {
+ val body2 = mkBlock(bodyStms, bodyExpr).duplicate // dup before typing!
+ If(Select(ctxRef, ctxSym.tpe.member(cpsNames.isTrivial)),
applyTrivial(argSym, mkBlock(argDef::bodyStms, bodyExpr)),
applyCombinatorFun(ctxRef, body2))
- })
+ }
(List(ctxDef), switchExpr)
} else {
// ctx.flatMap { <lhs> => ... }
diff --git a/src/dbc/scala/dbc/DataType.scala b/src/dbc/scala/dbc/DataType.scala
deleted file mode 100644
index 677621e..0000000
--- a/src/dbc/scala/dbc/DataType.scala
+++ /dev/null
@@ -1,69 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-
-package scala.dbc;
-
-
-/** An ISO-9075:2003 (SQL) data type. Mappings between SQL types and
- * database specific types should be provided by the database driver.
- */
- at deprecated(DbcIsDeprecated, "2.9.0") abstract class DataType {
-
- /** Tests whether this datatype is equivalent to another. Usually, two
- * types are defined as equivalent if they are equal. Two types can be
- * equivalent without being equal if values of those types will be
- * encoded in the same native Scala type.
- */
- def isEquivalent(datatype: DataType): Boolean;
-
- /** Tests whether this datatype is equivalent or a subtype of another
- * datatype. Type <code>A</code> is said to be subtype of type
- * <code>B</code> if any value of type <code>A</code> can be
- * represented as a value of type <code>B</code>.
- */
- def isSubtypeOf(datatype: DataType): Boolean;
-
- /** The native Scala type in which values of this SQL type will be
- * encoded.
- */
- type NativeType <: Any;
-
- /** The native Scala type in which values of this SQL type will be
- * encoded. This must point to the same type as <code>NativeType</code>.
- */
- def nativeTypeId: DataType.Id;
-
- /** Whether the value can take the null value, None when this property is
- * unknown.
- */
- def nullable: Option[Boolean] = None;
-
- /** The SQL name of the type */
- def sqlString: String = "UNDEFINED DATA TYPE"
-
-}
-
- at deprecated(DbcIsDeprecated, "2.9.0") object DataType {
-
- type Id = Int;
-
- val OBJECT : Id = 10;
- val BOOLEAN : Id = 20;
- val BYTE : Id = 30;
- val SHORT : Id = 31;
- val INT : Id = 32;
- val LONG : Id = 33;
- val BIG_INTEGER: Id = 34;
- val BIG_DECIMAL: Id = 35;
- val FLOAT : Id = 40;
- val DOUBLE : Id = 41;
- val STRING : Id = 50;
-
-}
diff --git a/src/dbc/scala/dbc/Database.scala b/src/dbc/scala/dbc/Database.scala
deleted file mode 100644
index 60e1636..0000000
--- a/src/dbc/scala/dbc/Database.scala
+++ /dev/null
@@ -1,187 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-
-package scala.dbc
-
-
-import java.sql._
-
-/** A link to a database. The <code>Database</code> abstract class must
- * be specialised for every different DBMS.
- *
- * @author Gilles Dubochet
- */
- at deprecated(DbcIsDeprecated, "2.9.0") case class Database(dbms: Vendor) {
-
- class Closed extends Exception {}
-
- /** A lock used for operations that need to be atomic for this database
- * instance. */
- private val lock: scala.concurrent.Lock = new scala.concurrent.Lock()
-
- /** The vendor of the DBMS that contains this database. */
- private val vendor: Vendor = dbms
-
- /** The Database connections available to use. */
- private var availableConnections: List[Connection] = Nil
-
- /** The connections that are currently in use. */
- private var usedConnections: List[Connection] = Nil
-
- /** Whether the database no longer accepts new connections. */
- private var closing: Boolean = false;
-
- /** Retrieves a connection from the available connection pool or creates
- * a new one.
- *
- * @return A connection that can be used to access the database.
- */
- private def getConnection: Connection = {
- if (closing) {
- throw new Closed;
- } else {
- availableConnections match {
- case Nil => {
- lock.acquire;
- val connection = vendor.getConnection;
- usedConnections = connection :: usedConnections;
- lock.release;
- connection
- }
- case connection :: cs => {
- lock.acquire;
- availableConnections = cs;
- usedConnections = connection :: usedConnections;
- lock.release;
- connection;
- }
- }
- }
- }
-
- /** Closes a connection to this database. A closed connection might
- * also return to the available connection pool if the latter is depleted.
- *
- * @param connection The connection that should be closed.
- */
- private def closeConnection(connection: Connection): Unit = {
- if (closing) {
- connection.close()
- } else {
- lock.acquire
- usedConnections = usedConnections.filterNot(e => (e.equals(connection)));
- if (availableConnections.length < vendor.retainedConnections)
- availableConnections = connection :: availableConnections
- else
- connection.close()
- lock.release
- }
- }
-
- /** ..
- */
- def close() {
- closing = true
- for (conn <- availableConnections) conn.close()
- }
-
- /** Executes a statement that returns a relation on this database.
- *
- * @param relationStatement The statement to execute.
- * @return The relation returned by the database for this statement.
- */
- def executeStatement(relationStatement: statement.Relation): result.Relation =
- executeStatement(relationStatement, false);
-
- /** Executes a statement that returns a relation on this database.
- *
- * @param relationStatement The statement to execute.
- * @param debug Whether debugging information should be printed on the console.
- * @return The relation returned by the database for this statement.
- */
- def executeStatement(relationStatement: statement.Relation,
- debug: Boolean): result.Relation =
- new scala.dbc.result.Relation {
- val statement = relationStatement
- if (debug) Console.println("## " + statement.sqlString)
- private val connection = getConnection
- val sqlResult = connection.createStatement().executeQuery(statement.sqlString)
- closeConnection(connection)
- statement.typeCheck(this)
- }
-
- /** Executes a statement that updates the state of the database.
- * @param statusStatement The statement to execute.
- * @return The status of the database after the statement has been executed. */
- def executeStatement(statusStatement: statement.Status): result.Status[Unit] =
- executeStatement(statusStatement, false);
-
- /** Executes a statement that updates the state of the database.
- *
- * @param statusStatement The statement to execute.
- * @param debug Whether debugging information should be printed on the console.
- * @return The status of the database after the statement has been executed.
- */
- def executeStatement(statusStatement: statement.Status,
- debug: Boolean): result.Status[Unit] =
- new scala.dbc.result.Status[Unit] {
- val statement = statusStatement;
- if (debug) Console.println("## " + statement.sqlString);
- def result = ()
- private val connection = getConnection;
- val jdbcStatement: java.sql.Statement = connection.createStatement();
- jdbcStatement.execute(statement.sqlString);
- val touchedCount = Some(jdbcStatement.getUpdateCount());
- closeConnection(connection);
- }
-
- /** Executes a list of statements or other operations inside a transaction.
- * Only statements are protected in a transaction, other Scala code is not.
- *
- * @param transactionStatement The transaction to execute as a closure.
- * @return The status of the database after the transaction has been executed.
- */
- def executeStatement[ResultType](transactionStatement: statement.Transaction[ResultType]): result.Status[ResultType] =
- executeStatement(transactionStatement, false);
-
- /** Executes a list of statements or other operations inside a transaction.
- * Only statements are protected in a transaction, other Scala code is not.
- *
- * @param transactionStatement The transaction to execute as a closure.
- * @param debug Whether debugging information should be printed on the console.
- * @return The status of the database after the transaction has been executed.
- */
- def executeStatement[ResultType](transactionStatement: statement.Transaction[ResultType], debug: Boolean): result.Status[ResultType] = {
- new scala.dbc.result.Status[ResultType] {
- val touchedCount = None
- val statement = transactionStatement
- private val connection = getConnection
- connection.setAutoCommit(false)
- val jdbcStatement: java.sql.Statement = connection.createStatement();
- if (debug) Console.println("## " + transactionStatement.sqlStartString);
- jdbcStatement.execute(transactionStatement.sqlStartString);
- val result: ResultType = try {
- val buffer = transactionStatement.transactionBody(Database.this);
- if (debug) Console.println("## " + transactionStatement.sqlCommitString);
- jdbcStatement.execute(transactionStatement.sqlCommitString);
- buffer
- } catch {
- case e: Throwable => {
- if (debug) Console.println("## " + transactionStatement.sqlAbortString);
- jdbcStatement.execute(transactionStatement.sqlAbortString);
- throw e
- }
- }
- connection.setAutoCommit(true)
- closeConnection(connection)
- }
- }
-
-}
diff --git a/src/dbc/scala/dbc/Syntax.scala b/src/dbc/scala/dbc/Syntax.scala
deleted file mode 100644
index 85cd1c1..0000000
--- a/src/dbc/scala/dbc/Syntax.scala
+++ /dev/null
@@ -1,47 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-
-package scala.dbc;
-
-
-import java.math.{BigDecimal, BigInteger};
-
-
-/** This class ..
- *
- */
- at deprecated(DbcIsDeprecated, "2.9.0") object Syntax {
-
- import syntax.DataTypeUtil;
-
- /* Data types */
- def boolean = DataTypeUtil.boolean;
- def tinyint = DataTypeUtil.tinyint;
- def smallint = DataTypeUtil.smallint;
- def integer = DataTypeUtil.integer;
- def bigint = DataTypeUtil.bigint;
- def real = DataTypeUtil.real;
-
- def numeric(precision: Int) = DataTypeUtil.numeric(precision);
- def numeric(precision: Int, scale: Int) = DataTypeUtil.numeric(precision, scale);
-
- def doublePrecision = DataTypeUtil.doublePrecision;
- def character(length: Int) = DataTypeUtil.character(length);
- def characterVarying(length: Int) = DataTypeUtil.characterVarying(length);
- def characterLargeObject = DataTypeUtil.characterLargeObject;
-
- /* Statements */
- //def select
-
- /* Other stuff */
- def database (server: String, username: String, password: String): dbc.Database =
- syntax.Database.database(server, username, password);
-
-}
diff --git a/src/dbc/scala/dbc/Utilities.scala b/src/dbc/scala/dbc/Utilities.scala
deleted file mode 100644
index c37b1bb..0000000
--- a/src/dbc/scala/dbc/Utilities.scala
+++ /dev/null
@@ -1,28 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-
-package scala.dbc;
-
-
-/** An object offering transformation methods (views) on various values.
- * This object's members must be visible in an expression to use value
- * auto-conversion.
- */
- at deprecated(DbcIsDeprecated, "2.9.0") object Utilities {
-
- implicit def constantToValue (obj: statement.expression.Constant): Value =
- obj.constantValue;
-
- implicit def valueToConstant (obj: Value): statement.expression.Constant =
- new statement.expression.Constant {
- val constantValue = obj;
- }
-
-}
diff --git a/src/dbc/scala/dbc/Value.scala b/src/dbc/scala/dbc/Value.scala
deleted file mode 100644
index a502f51..0000000
--- a/src/dbc/scala/dbc/Value.scala
+++ /dev/null
@@ -1,27 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-
-package scala.dbc;
-
-
-/** A SQL-99 value of any type. */
- at deprecated(DbcIsDeprecated, "2.9.0") abstract class Value {
-
- /** The SQL-99 type of the value. */
- val dataType: DataType;
-
- type NativeType = dataType.type#NativeType;
-
- val nativeValue: NativeType;
-
- /** A SQL-99 compliant string representation of the value. */
- def sqlString: String;
-
-}
diff --git a/src/dbc/scala/dbc/Vendor.scala b/src/dbc/scala/dbc/Vendor.scala
deleted file mode 100644
index 68f6102..0000000
--- a/src/dbc/scala/dbc/Vendor.scala
+++ /dev/null
@@ -1,41 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-
-package scala.dbc;
-
-
-import java.sql.{Connection, Driver};
-
-
-/** This class ..
- */
- at deprecated(DbcIsDeprecated, "2.9.0") abstract class Vendor {
-
- def nativeDriverClass: Class[_];
- def uri: java.net.URI;
- def user: String;
- def pass: String;
- def nativeProperties: java.util.Properties = {
- val properties = new java.util.Properties();
- properties.setProperty("user", user);
- properties.setProperty("password", pass);
- properties
- }
-
- def retainedConnections: Int;
-
- def getConnection: Connection = {
- val driver = nativeDriverClass.newInstance().asInstanceOf[Driver];
- driver.connect(uri.toString(),nativeProperties)
- }
-
- def urlProtocolString: String;
-
-}
diff --git a/src/dbc/scala/dbc/datatype/ApproximateNumeric.scala b/src/dbc/scala/dbc/datatype/ApproximateNumeric.scala
deleted file mode 100644
index 31752e1..0000000
--- a/src/dbc/scala/dbc/datatype/ApproximateNumeric.scala
+++ /dev/null
@@ -1,57 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-
-package scala.dbc
-package datatype;
-
-
-/** A type category for all SQL types that store varying-precision
- * numbers.
- */
- at deprecated(DbcIsDeprecated, "2.9.0") abstract class ApproximateNumeric[Type] (
- override val nativeTypeId: DataType.Id
-) extends datatype.Numeric[Type](nativeTypeId) {
-
- def isEquivalent(datatype: DataType) = datatype match {
- case dt: ApproximateNumeric[_] =>
- (nativeTypeId == dt.nativeTypeId &&
- precisionRadix == dt.precisionRadix &&
- precision == dt.precision &&
- signed == dt.signed)
- case _ =>
- false
- }
-
- def isSubtypeOf (datatype:DataType) = datatype match {
- case dt:ApproximateNumeric[_] =>
- (nativeTypeId == dt.nativeTypeId &&
- precisionRadix == dt.precisionRadix &&
- precision <= dt.precision &&
- signed == dt.signed)
- case _ =>
- false
- }
-
- /** A SQL-99 compliant string representation of the type.
- * <h3>Compatibility notice</h3> This method assumes that a real
- * uses 32 bits and a double 64. This is not defined in the
- * standard but is usually the case.
- */
- override def sqlString: java.lang.String = Tuple2(precisionRadix,precision) match {
- case Tuple2(2,64) => "REAL"
- case Tuple2(2,128) => "DOUBLE PRECISION"
- case Tuple2(2,p) =>
- throw exception.UnsupportedFeature("SQL-99 does not support an approximate numeric type with a binary defined precision other than 16, 32 and 64 bits");
- case Tuple2(10,p) => "FLOAT (" + p.toString() + ")"
- case Tuple2(pr,_) =>
- throw exception.UnsupportedFeature("SQL-99 does not support the precision of an approximate numeric type to be defined in a radix other than 2 or 10");
- }
-
-}
diff --git a/src/dbc/scala/dbc/datatype/Boolean.scala b/src/dbc/scala/dbc/datatype/Boolean.scala
deleted file mode 100644
index 4a32ce9..0000000
--- a/src/dbc/scala/dbc/datatype/Boolean.scala
+++ /dev/null
@@ -1,31 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-
-package scala.dbc
-package datatype;
-
-
-/** The SQL type for a truth value. */
- at deprecated(DbcIsDeprecated, "2.9.0") class Boolean extends DataType {
-
- def isEquivalent (datatype:DataType) = datatype match {
- case dt:Boolean => true
- case _ => false
- }
-
- def isSubtypeOf (datatype:DataType) = isEquivalent(datatype);
-
- type NativeType = scala.Boolean;
- val nativeTypeId = DataType.BOOLEAN;
-
- /** A SQL-99 compliant string representation of the type. */
- override def sqlString: java.lang.String = "BOOLEAN";
-
-}
diff --git a/src/dbc/scala/dbc/datatype/Character.scala b/src/dbc/scala/dbc/datatype/Character.scala
deleted file mode 100644
index 02dec06..0000000
--- a/src/dbc/scala/dbc/datatype/Character.scala
+++ /dev/null
@@ -1,40 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-
-package scala.dbc
-package datatype;
-
-
-/** A SQL type for a string of characters of arbitrary length with
- * arbitrary character set.
- */
- at deprecated(DbcIsDeprecated, "2.9.0") abstract class Character extends CharacterString {
-
- def isEquivalent(datatype: DataType) = datatype match {
- case dt: Character =>
- length == dt.length && encoding == dt.encoding
- case _ =>
- false
- }
-
- def isSubtypeOf(datatype: DataType) = datatype match {
- case dt: Character =>
- length >= dt.length && encoding == dt.encoding
- case _ =>
- false
- }
-
- /** The length of the string defined in characters. */
- def length: Int;
-
- /** A SQL-99 compliant string representation of the type. */
- override def sqlString: java.lang.String = "CHARACTER (" + length.toString() + ")";
-
-}
diff --git a/src/dbc/scala/dbc/datatype/CharacterLargeObject.scala b/src/dbc/scala/dbc/datatype/CharacterLargeObject.scala
deleted file mode 100644
index 2c7ef64..0000000
--- a/src/dbc/scala/dbc/datatype/CharacterLargeObject.scala
+++ /dev/null
@@ -1,31 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-
-package scala.dbc
-package datatype;
-
-
-/** A SQL type for an unbounded length string of characters with arbitrary
- * character set. */
- at deprecated(DbcIsDeprecated, "2.9.0") class CharacterLargeObject extends CharacterString {
-
- def isEquivalent (datatype:DataType) = datatype match {
- case dt:CharacterLargeObject => {
- encoding == dt.encoding
- }
- case _ => false
- }
-
- def isSubtypeOf (datatype:DataType) = isEquivalent(datatype);
-
- /** A SQL-99 compliant string representation of the type. */
- override def sqlString: java.lang.String = "CHARACTER LARGE OBJECT";
-
-}
diff --git a/src/dbc/scala/dbc/datatype/CharacterString.scala b/src/dbc/scala/dbc/datatype/CharacterString.scala
deleted file mode 100644
index 54d6e01..0000000
--- a/src/dbc/scala/dbc/datatype/CharacterString.scala
+++ /dev/null
@@ -1,24 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-
-package scala.dbc
-package datatype;
-
-
-/** A type category for all SQL types that store strings of characters. */
- at deprecated(DbcIsDeprecated, "2.9.0") abstract class CharacterString extends String {
-
- type NativeType = java.lang.String;
- val nativeTypeId = DataType.STRING;
-
- /** The name of the character set in which the string is encoded. */
- def encoding: Option[java.lang.String] = None;
-
-}
diff --git a/src/dbc/scala/dbc/datatype/CharacterVarying.scala b/src/dbc/scala/dbc/datatype/CharacterVarying.scala
deleted file mode 100644
index 9df4875..0000000
--- a/src/dbc/scala/dbc/datatype/CharacterVarying.scala
+++ /dev/null
@@ -1,41 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-
-package scala.dbc
-package datatype;
-
-
-/** A SQL type for a varying length string of characters with arbitrary
- * maximal length and arbitrary character set.
- */
- at deprecated(DbcIsDeprecated, "2.9.0") abstract class CharacterVarying extends CharacterString {
-
- def isEquivalent(datatype: DataType) = datatype match {
- case dt: CharacterVarying =>
- length == dt.length && encoding == dt.encoding
- case _ =>
- false
- }
-
- def isSubtypeOf(datatype: DataType) = datatype match {
- case dt: CharacterVarying =>
- length >= dt.length && encoding == dt.encoding
- case _ =>
- false
- }
-
- /** The maximal length of the string defined in characters. */
- def length: Int;
-
- /** A SQL-99 compliant string representation of the type. */
- override def sqlString: java.lang.String =
- "CHARACTER VARYING (" + length.toString() + ")";
-
-}
diff --git a/src/dbc/scala/dbc/datatype/ExactNumeric.scala b/src/dbc/scala/dbc/datatype/ExactNumeric.scala
deleted file mode 100644
index a578846..0000000
--- a/src/dbc/scala/dbc/datatype/ExactNumeric.scala
+++ /dev/null
@@ -1,65 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-
-package scala.dbc
-package datatype;
-
-
-/** A type category for all SQL types that store constant-precision
- * numbers.
- */
- at deprecated(DbcIsDeprecated, "2.9.0") abstract class ExactNumeric[Type](
- override val nativeTypeId: DataType.Id
-) extends datatype.Numeric[Type](nativeTypeId) {
-
- def isEquivalent(datatype: DataType) = datatype match {
- case dt: ExactNumeric[_] =>
- (nativeTypeId == dt.nativeTypeId &&
- precisionRadix == dt.precisionRadix &&
- precision == dt.precision &&
- scale == dt.scale &&
- signed == dt.signed)
- case _ =>
- false
- }
-
- def isSubtypeOf(datatype: DataType) = datatype match {
- case dt: ExactNumeric[_] =>
- (nativeTypeId == dt.nativeTypeId &&
- precisionRadix == dt.precisionRadix &&
- precision <= dt.precision &&
- scale <= dt.scale &&
- signed == dt.signed)
- case _ =>
- false
- }
-
- /** The number of digits used after the decimal point. */
- def scale: Int;
-
- /** A SQL-99 compliant string representation of the type.
- * <h3>Compatibility notice</h3> This method assumes that an integer
- * uses 32 bits, a small 16 and a big 64. This is not defined in the
- * standard but is usually the case.
- */
- override def sqlString: java.lang.String = Tuple3(precisionRadix,precision,scale) match {
- case Tuple3(2,16,0) => "SMALLINT"
- case Tuple3(2,32,0) => "INTEGER"
- case Tuple3(2,64,0) => "BIGINT"
- case Tuple3(2,java.lang.Integer.MAX_VALUE,0) => "BIGINT"
- case Tuple3(2,p,s) =>
- throw exception.UnsupportedFeature("SQL-99 does not support an exact numeric type with a binary defined precision other than 16, 32 and 64 bits");
- case Tuple3(10,p,0) => "NUMERIC (" + p.toString() + ")"
- case Tuple3(10,p,s) => "NUMERIC (" + p.toString() + ", " + s.toString() + ")"
- case Tuple3(pr,_,_) =>
- throw exception.UnsupportedFeature("SQL-99 does not support the precision of an exact numeric type to be defined in a radix other than 2 or 10");
- }
-
-}
diff --git a/src/dbc/scala/dbc/datatype/Factory.scala b/src/dbc/scala/dbc/datatype/Factory.scala
deleted file mode 100644
index bb9b3f5..0000000
--- a/src/dbc/scala/dbc/datatype/Factory.scala
+++ /dev/null
@@ -1,250 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-
-package scala.dbc
-package datatype;
-
-
-import java.sql.Types._;
-import java.math.BigInteger;
-import java.math.BigDecimal;
-
- at deprecated(DbcIsDeprecated, "2.9.0") object Factory {
-
- final val java_lang_Integer_SIZE = 32;
- final val java_lang_Long_SIZE = 64;
-
- /** Returns a nullable property formatted as a boolean option */
- def isNullable (metadata:java.sql.ResultSetMetaData, index:Int): Option[scala.Boolean] =
- metadata.isNullable(index) match {
- case java.sql.ResultSetMetaData.columnNoNulls => Some(false);
- case java.sql.ResultSetMetaData.columnNullable => Some(true);
- case java.sql.ResultSetMetaData.columnNullableUnknown => None;
- }
-
- /** Returns the binary precision for an integer field. This should only be
- * used to find precision for integer numbers. It assumes that
- * bytes cannot be used partially (result % 8 = 0). */
- def bytePrecision (precision:Int, signed:scala.Boolean, safe:scala.Boolean): Int = {
- val decimalPrecision = precision + (if (safe) 1 else 0);
- Pair(signed,decimalPrecision) match {
- case Pair(_,0) => java.lang.Integer.MAX_VALUE // That's a bit of a hack.
- case Pair(_,dp) if (dp <= 3) => 8
- case Pair(_,dp) if (dp <= 5) => 16
- case Pair(true,dp) if (dp <= 7) => 24
- case Pair(false,dp) if (dp <= 8) => 24
- case Pair(_,dp) if (dp <= 10) => 32
- case Pair(true,dp) if (dp <= 12) => 40
- case Pair(false,dp) if (dp <= 13) => 40
- case Pair(_,dp) if (dp <= 15) => 48
- case Pair(_,dp) if (dp <= 17) => 56
- case Pair(true,dp) if (dp <= 19) => 64
- case Pair(false,dp) if (dp <= 20) => 64
- case Pair(_,dp) if (dp <= 22) => 72
- case Pair(true,dp) if (dp <= 24) => 80
- case Pair(false,dp) if (dp <= 25) => 80
- case Pair(_,dp) if (dp <= 27) => 88
- case Pair(_,dp) if (dp <= 29) => 96
- case Pair(_,dp) if (dp <= 32) => 104
- case Pair(_,dp) if (dp <= 34) => 112
- case Pair(true,dp) if (dp <= 36) => 120
- case Pair(false,dp) if (dp <= 37) => 120
- case Pair(_,dp) if (dp <= 39) => 128
- case _ => java.lang.Integer.MAX_VALUE
- }
- }
-
- def create (metadata:java.sql.ResultSetMetaData, index:Int): DataType = {
- metadata.getColumnType(index) match {
- /* Boolean data types. */
- case BOOLEAN => new datatype.Boolean {
- override val nullable = isNullable(metadata,index);
- }
- case BIT => new datatype.Boolean {
- override val nullable = isNullable(metadata,index);
- }
- /* Fixed precision numeric data types. */
- case DECIMAL => {
- Pair(bytePrecision(metadata.getPrecision(index),metadata.isSigned(index),true),metadata.getScale(index) == 0) match {
- case Pair(bp,true) if (bp <= java_lang_Integer_SIZE) =>
- new datatype.ExactNumeric[Int](DataType.INT) {
- override val nullable = isNullable(metadata,index);
- val precisionRadix = 10;
- val precision = metadata.getPrecision(index);
- val signed = metadata.isSigned(index);
- val scale = metadata.getScale(index);
- }
- case Pair(bp,true) if (bp <= java_lang_Long_SIZE) =>
- new datatype.ExactNumeric[Long](DataType.LONG) {
- override val nullable = isNullable(metadata,index);
- val precisionRadix = 10;
- val precision = metadata.getPrecision(index);
- val signed = metadata.isSigned(index);
- val scale = metadata.getScale(index);
- }
- case Pair(_,true) =>
- new datatype.ExactNumeric[BigInteger](DataType.BIG_INTEGER) {
- override val nullable = isNullable(metadata,index);
- val precisionRadix = 10;
- val precision = metadata.getPrecision(index);
- val signed = metadata.isSigned(index);
- val scale = metadata.getScale(index);
- }
- case Pair(_,false) =>
- new datatype.ExactNumeric[BigDecimal](DataType.BIG_DECIMAL) {
- override val nullable = isNullable(metadata,index);
- val precisionRadix = 10;
- val precision = metadata.getPrecision(index);
- val signed = metadata.isSigned(index);
- val scale = metadata.getScale(index);
- }
- }
- }
- case NUMERIC => {
- Pair(bytePrecision(metadata.getPrecision(index),metadata.isSigned(index),true),metadata.getScale(index) == 0) match {
- case Pair(bp,true) if (bp <= java_lang_Integer_SIZE) =>
- new datatype.ExactNumeric[Int](DataType.INT) {
- override val nullable = isNullable(metadata,index);
- val precisionRadix = 10;
- val precision = metadata.getPrecision(index);
- val signed = metadata.isSigned(index);
- val scale = metadata.getScale(index);
- }
- case Pair(bp,true) if (bp <= java_lang_Long_SIZE) =>
- new datatype.ExactNumeric[Long](DataType.LONG) {
- override val nullable = isNullable(metadata,index);
- val precisionRadix = 10;
- val precision = metadata.getPrecision(index);
- val signed = metadata.isSigned(index);
- val scale = metadata.getScale(index);
- }
- case Pair(_,true) =>
- new datatype.ExactNumeric[BigInteger](DataType.BIG_INTEGER) {
- override val nullable = isNullable(metadata,index);
- val precisionRadix = 10;
- val precision = metadata.getPrecision(index);
- val signed = metadata.isSigned(index);
- val scale = metadata.getScale(index);
- }
- case Pair(_,false) =>
- new datatype.ExactNumeric[BigDecimal](DataType.BIG_DECIMAL) {
- override val nullable = isNullable(metadata,index);
- val precisionRadix = 10;
- val precision = metadata.getPrecision(index);
- val signed = metadata.isSigned(index);
- val scale = metadata.getScale(index);
- }
- }
- }
- /* Fixed precision integer data types. */
- case BIGINT =>
- new datatype.ExactNumeric[Long](DataType.LONG) {
- override val nullable = isNullable(metadata,index);
- val precisionRadix = 2;
- val precision = 64;
- val signed = metadata.isSigned(index);
- val scale = 0;
- }
- case INTEGER =>
- new datatype.ExactNumeric[Int](DataType.INT) {
- override val nullable = isNullable(metadata,index);
- val precisionRadix = 2;
- val precision = 32;
- val signed = metadata.isSigned(index);
- val scale = 0;
- }
- case SMALLINT =>
- new datatype.ExactNumeric[Short](DataType.SHORT) {
- override val nullable = isNullable(metadata,index);
- val precisionRadix = 2;
- val precision = 16;
- val signed = metadata.isSigned(index);
- val scale = 0;
- }
- case TINYINT =>
- new datatype.ExactNumeric[Byte](DataType.BYTE) {
- override val nullable = isNullable(metadata,index);
- val precisionRadix = 2;
- val precision = 8;
- val signed = metadata.isSigned(index);
- val scale = 0;
- }
- /* Floating point numeric data types. */
- case REAL =>
- new datatype.ApproximateNumeric[Float](DataType.FLOAT) {
- override val nullable = isNullable(metadata,index);
- val precisionRadix = 2;
- val precision = 64;
- val signed = metadata.isSigned(index);
- }
- case DOUBLE =>
- new datatype.ApproximateNumeric[Double](DataType.DOUBLE) {
- override val nullable = isNullable(metadata,index);
- val precisionRadix = 2;
- val precision = 128;
- val signed = metadata.isSigned(index);
- }
- case FLOAT =>
- new datatype.ApproximateNumeric[Double](DataType.DOUBLE) {
- override val nullable = isNullable(metadata,index);
- val precisionRadix = 2;
- val precision = 128;
- val signed = metadata.isSigned(index);
- }
- /* Character string data types. */
- case CHAR => new datatype.Character {
- override val nullable = isNullable(metadata,index);
- val length = metadata.getColumnDisplaySize(index);
- }
- case CLOB => new datatype.CharacterLargeObject {
- override val nullable = isNullable(metadata,index);
- }
- case LONGVARCHAR => {
- if (metadata.getColumnDisplaySize(index) >= 0)
- new datatype.CharacterVarying {
- override val nullable = isNullable(metadata,index);
- def length = metadata.getColumnDisplaySize(index);
- }
- else // A PostgreSQL Hack
- new datatype.CharacterLargeObject {
- override val nullable = isNullable(metadata,index);
- }
- }
- case VARCHAR => {
- if (metadata.getColumnDisplaySize(index) >= 0)
- new datatype.CharacterVarying {
- override val nullable = isNullable(metadata,index);
- def length = metadata.getColumnDisplaySize(index);
- }
- else // A PostgreSQL Hack
- new datatype.CharacterLargeObject {
- override val nullable = isNullable(metadata,index);
- }
- }
- /* Undefined cases. */
- case OTHER => new datatype.Unknown {
- override val nullable = isNullable(metadata, index);
- }
- /* Unsupported data types. */
- case REF | ARRAY | STRUCT =>
- sys.error ("I don't support composite data types yet.");
- case DATALINK | DISTINCT | JAVA_OBJECT | NULL =>
- sys.error ("I won't support strange data types.");
- /* Unsupported binary string data types. */
- case BINARY | BLOB | LONGVARBINARY | VARBINARY =>
- sys.error ("I don't support binary string data types yet.");
- /* Unsupported date and time data types. */
- case DATE | TIME | TIMESTAMP =>
- sys.error ("I don't support date and time data types yet.");
- /* Default case */
- case x => sys.error ("I don't know about this ("+metadata.getColumnTypeName(index)+") JDBC type.")
- }
- }
-}
diff --git a/src/dbc/scala/dbc/datatype/Numeric.scala b/src/dbc/scala/dbc/datatype/Numeric.scala
deleted file mode 100644
index c13f454..0000000
--- a/src/dbc/scala/dbc/datatype/Numeric.scala
+++ /dev/null
@@ -1,32 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-
-package scala.dbc
-package datatype;
-
-
-/** A type category for all SQL types that store numbers. */
- at deprecated(DbcIsDeprecated, "2.9.0") abstract class Numeric[Type](_nativeTypeId: DataType.Id) extends DataType {
-
- type NativeType = Type;
- val nativeTypeId = _nativeTypeId;
-
- /** The radix in which the precision (and scale when appliable) is defined.
- * ISO-9075 only allows 2 and 10 for this value.
- */
- def precisionRadix: Int;
-
- /** The number of significant digits for that number. */
- def precision: Int;
-
- /** Whether the number is signed or not. */
- def signed: scala.Boolean;
-
-}
diff --git a/src/dbc/scala/dbc/datatype/String.scala b/src/dbc/scala/dbc/datatype/String.scala
deleted file mode 100644
index 291504f..0000000
--- a/src/dbc/scala/dbc/datatype/String.scala
+++ /dev/null
@@ -1,24 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-
-package scala.dbc
-package datatype;
-
-
-/** A type category for all SQL types that store strings of elements.
- */
- at deprecated(DbcIsDeprecated, "2.9.0") abstract class String extends DataType {
-
- /** The maximal possible length of the string defined in characters.
- * This is an implementation-specific value.
- */
- def maxLength: Option[Int] = None;
-
-}
diff --git a/src/dbc/scala/dbc/datatype/Unknown.scala b/src/dbc/scala/dbc/datatype/Unknown.scala
deleted file mode 100644
index 14a33c6..0000000
--- a/src/dbc/scala/dbc/datatype/Unknown.scala
+++ /dev/null
@@ -1,34 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-
-package scala.dbc
-package datatype;
-
-
-/** The SQL type for a truth value. */
- at deprecated(DbcIsDeprecated, "2.9.0") class Unknown extends DataType {
-
- def isEquivalent(datatype: DataType) = datatype match {
- case dt: Unknown =>
- nativeTypeId == dt.nativeTypeId
- case _ =>
- false
- }
-
- def isSubtypeOf(datatype: DataType) = true;
-
- type NativeType = AnyRef;
- val nativeTypeId = DataType.OBJECT;
-
- /** A SQL-99 compliant string representation of the type. */
- override def sqlString: java.lang.String =
- sys.error("The 'UNKNOWN' data type cannot be represented.");
-
-}
diff --git a/src/dbc/scala/dbc/exception/IncompatibleSchema.scala b/src/dbc/scala/dbc/exception/IncompatibleSchema.scala
deleted file mode 100644
index c8d53bb..0000000
--- a/src/dbc/scala/dbc/exception/IncompatibleSchema.scala
+++ /dev/null
@@ -1,19 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-
-package scala.dbc
-package exception
-
-
-/** A type category for all SQL types that store constant-precision numbers. */
- at deprecated(DbcIsDeprecated, "2.9.0") case class IncompatibleSchema (
- expectedSchema: List[DataType],
- foundSchema: List[DataType]
-) extends Exception;
diff --git a/src/dbc/scala/dbc/exception/UnsupportedFeature.scala b/src/dbc/scala/dbc/exception/UnsupportedFeature.scala
deleted file mode 100644
index dd6f904..0000000
--- a/src/dbc/scala/dbc/exception/UnsupportedFeature.scala
+++ /dev/null
@@ -1,16 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-
-package scala.dbc
-package exception
-
-
-/** A type category for all SQL types that store constant-precision numbers. */
- at deprecated(DbcIsDeprecated, "2.9.0") case class UnsupportedFeature (msg: String) extends Exception;
diff --git a/src/dbc/scala/dbc/package.scala b/src/dbc/scala/dbc/package.scala
deleted file mode 100644
index b1552e1..0000000
--- a/src/dbc/scala/dbc/package.scala
+++ /dev/null
@@ -1,6 +0,0 @@
-package scala
-
-package object dbc {
- final val DbcIsDeprecated =
- "scala.dbc will be removed after version 2.9. Use an active sql library such as scalaquery instead."
-}
\ No newline at end of file
diff --git a/src/dbc/scala/dbc/result/Field.scala b/src/dbc/scala/dbc/result/Field.scala
deleted file mode 100644
index cd3309b..0000000
--- a/src/dbc/scala/dbc/result/Field.scala
+++ /dev/null
@@ -1,63 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-
-package scala.dbc
-package result
-
-
-import scala.dbc.datatype._
-import scala.dbc.value._
-
-/** An ISO-9075:2003 (SQL) table field. */
- at deprecated(DbcIsDeprecated, "2.9.0") abstract class Field {
-
- /** The content (value) of the field. The type of this value is undefined,
- * transformation into a useful type will be done by an automatic view
- * function defined in the field object.
- */
- def content: Value
-
- final def value[Type <: Value]: Type =
- content.asInstanceOf[Type]
-
- final def exactNumericValue[NativeType] =
- content.asInstanceOf[dbc.value.ExactNumeric[NativeType]]
-
- final def approximateNumericValue[NativeType] =
- content.asInstanceOf[dbc.value.ApproximateNumeric[NativeType]]
-
- final def booleanValue =
- content.asInstanceOf[dbc.value.Boolean]
-
- final def characterValue =
- content.asInstanceOf[dbc.value.Character]
-
- final def characterLargeObjectValue =
- content.asInstanceOf[dbc.value.CharacterLargeObject]
-
- final def characterVaryingValue =
- content.asInstanceOf[dbc.value.CharacterVarying]
-
- final def unknownValue =
- content.asInstanceOf[dbc.value.Unknown]
-
- /** The tuple that contains this field. */
- def originatingTuple: Tuple
-
- /** The field metadata attached to this field. */
- def metadata: FieldMetadata
-
-}
-
- at deprecated(DbcIsDeprecated, "2.9.0") object Field {
-
- implicit def fieldToValue (field: Field): Value = field.content
-
-}
diff --git a/src/dbc/scala/dbc/result/FieldMetadata.scala b/src/dbc/scala/dbc/result/FieldMetadata.scala
deleted file mode 100644
index 3c2de29..0000000
--- a/src/dbc/scala/dbc/result/FieldMetadata.scala
+++ /dev/null
@@ -1,40 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-
-package scala.dbc
-package result
-
-
-/** The class <code>FieldMetadata</cocde> provides informations attached to
- * a field about its content and its relationship to the originating database.
- */
- at deprecated(DbcIsDeprecated, "2.9.0") abstract class FieldMetadata {
-
- /** The name of the field. */
- def name: String
-
- /** The index of the field in the tuple. */
- def index: Int
-
- /** The expected type of the field. This information is used for automatic
- * transformation of the field value into a usable type.
- */
- def datatype: DataType
-
- /** The name of the catalog in the database from which the field originates */
- def catalog: String
-
- /** The name of the schema in the database from which the field originates */
- def schema: String
-
- /** The name of the table in the database from which the field originates */
- def table: String
-
-}
diff --git a/src/dbc/scala/dbc/result/Relation.scala b/src/dbc/scala/dbc/result/Relation.scala
deleted file mode 100644
index 4897cd1..0000000
--- a/src/dbc/scala/dbc/result/Relation.scala
+++ /dev/null
@@ -1,82 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-
-package scala.dbc
-package result
-
-
-/** An ISO-9075:2003 (SQL) table. This is equivalent to a relation in the
- * relational model. */
- at deprecated(DbcIsDeprecated, "2.9.0") abstract class Relation extends AnyRef with Iterable[Tuple] {
-
- /** The statement that generated this relation. */
- def statement: scala.dbc.statement.Relation
-
- /** A JDBC result containing this relation. */
- protected def sqlResult: java.sql.ResultSet
-
- /** A JDBC metadata object attached to the relation. */
- protected def sqlMetadata: java.sql.ResultSetMetaData = sqlResult.getMetaData()
-
- /** Metadata about all fields in a tuple of the relation. */
- def metadata: List[FieldMetadata] =
- for (count <- List.range(1, sqlMetadata.getColumnCount()+1)) yield
- new FieldMetadata {
- val name: String = sqlMetadata.getColumnName(count)
- val index: Int = count
- val datatype: DataType = dbc.datatype.Factory.create(sqlMetadata,count)
- val catalog: String = sqlMetadata.getCatalogName(count)
- val schema: String = sqlMetadata.getSchemaName(count)
- val table: String = sqlMetadata.getTableName(count)
- }
-
- /** Metadata about the field at the given index. If there is no such
- * field <code>None</code> is returned instead. */
- def metadataFor (index:Int): Option[FieldMetadata] = {
- val meta = metadata
- if (meta.length > index)
- Some(meta(index))
- else
- None
- }
-
- /** Metadata about the field with the given column name. If there is no
- * such field, <code>None</code> is returned instead. */
- def metadataFor (name:String): Option[FieldMetadata] =
- metadata.find(f=>(f.name==name));
-
- /** An iterator on the tuples of the relation.
- * <h3>Caution</h3> A Relation only has one single iterator, due to limitations
- * in DBMS. This means that if this method is called multiple times, all returned
- * iterators will share the same state. */
- def iterator: Iterator[Tuple] = new Iterator[Tuple] {
- protected val result: java.sql.ResultSet = Relation.this.sqlResult
- def hasNext: Boolean = resultNext
- private var resultNext = result.next()
- def next: Tuple = {
- if (resultNext) {
- val newTuple = new Tuple {
- val me = this
- val originatingRelation = Relation.this
- val fields: List[Field] = for (fieldMetadata <- metadata) yield
- new Field {
- val metadata = fieldMetadata
- val content = dbc.value.Factory.create(result,metadata.index,metadata.datatype)
- val originatingTuple = me
- }
- }
- resultNext = result.next()
- newTuple
- }
- else sys.error("next on empty iterator")
- }
- }
-
-}
diff --git a/src/dbc/scala/dbc/result/Status.scala b/src/dbc/scala/dbc/result/Status.scala
deleted file mode 100644
index d3152a5..0000000
--- a/src/dbc/scala/dbc/result/Status.scala
+++ /dev/null
@@ -1,28 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-
-package scala.dbc
-package result;
-
-
-import scala.dbc.datatype._;
-
-/** An object containing the status of a query */
- at deprecated(DbcIsDeprecated, "2.9.0") abstract class Status[ResultType] {
-
- /** The statement that generated this status result. */
- def statement: scala.dbc.statement.Statement;
-
- /** The number of elements modified or added by this statement. */
- def touchedCount: Option[Int];
-
- def result: ResultType;
-
-}
diff --git a/src/dbc/scala/dbc/result/Tuple.scala b/src/dbc/scala/dbc/result/Tuple.scala
deleted file mode 100644
index 80ab5c2..0000000
--- a/src/dbc/scala/dbc/result/Tuple.scala
+++ /dev/null
@@ -1,42 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-
-package scala.dbc
-package result;
-
-
-/** An ISO-9075:2003 (SQL) table row. This is equivalent to a tuple in the relational model. */
- at deprecated(DbcIsDeprecated, "2.9.0") abstract class Tuple {
-
- /** All the fields contained in the tuple. */
- def fields: List[Field];
-
- /** The relation that contains the tuple. */
- def originatingRelation: Relation;
-
- /** The field at the given index. If there is no such field (that is the index is out of bounds), <code>None</code> is returned instead. */
- def apply (index:Int): Field =
- try {
- fields(index)
- } catch {
- case e =>
- throw new java.lang.IndexOutOfBoundsException("Field at index "+index+" does not exist in relation");
- }
-
- /** The field with the given column name. If there is no such field, <code>None</code> is returned instead. */
- def apply (name:String): Field = {
- def findField (fields: List[Field], name:String): Field = fields match {
- case Nil => throw new java.lang.IndexOutOfBoundsException("Field '"+name+"' does not exist in relation")
- case field :: _ if (field.metadata.name == name) => field
- case field :: fields => findField (fields, name)
- }
- findField (fields, name);
- }
-}
diff --git a/src/dbc/scala/dbc/statement/AccessMode.scala b/src/dbc/scala/dbc/statement/AccessMode.scala
deleted file mode 100644
index 885e001..0000000
--- a/src/dbc/scala/dbc/statement/AccessMode.scala
+++ /dev/null
@@ -1,26 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-
-package scala.dbc
-package statement
-
-
- at deprecated(DbcIsDeprecated, "2.9.0") abstract class AccessMode {
- def sqlString: String
-}
-
- at deprecated(DbcIsDeprecated, "2.9.0") object AccessMode {
- case object ReadOnly extends AccessMode {
- def sqlString = "READ ONLY"
- }
- case object ReadWrite extends AccessMode {
- def sqlString = "READ WRITE"
- }
-}
diff --git a/src/dbc/scala/dbc/statement/DerivedColumn.scala b/src/dbc/scala/dbc/statement/DerivedColumn.scala
deleted file mode 100644
index ae05df9..0000000
--- a/src/dbc/scala/dbc/statement/DerivedColumn.scala
+++ /dev/null
@@ -1,38 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-
-package scala.dbc
-package statement
-
-
- at deprecated(DbcIsDeprecated, "2.9.0") abstract class DerivedColumn {
-
- /** The value for the column. This value can be of any type but must be
- * calculated from fields that appear in a relation that takes part
- * in the query.
- */
- def valueExpression: Expression
-
- /** A new name for this field. This name must be unique for the query in
- * which the column takes part.
- */
- def asClause: Option[String]
-
- /** A SQL-99 compliant string representation of the derived column
- * sub-statement. This only has a meaning inside a select statement.
- */
- def sqlString: String =
- valueExpression.sqlInnerString +
- (asClause match {
- case None => ""
- case Some(ac) => " AS " + ac
- })
-
-}
diff --git a/src/dbc/scala/dbc/statement/Expression.scala b/src/dbc/scala/dbc/statement/Expression.scala
deleted file mode 100644
index c2da91e..0000000
--- a/src/dbc/scala/dbc/statement/Expression.scala
+++ /dev/null
@@ -1,28 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-
-package scala.dbc
-package statement
-
-
-/** An expression that calculates some value from fields. */
- at deprecated(DbcIsDeprecated, "2.9.0") abstract class Expression extends Relation {
-
- def fieldTypes: List[DataType] = Nil
-
- /** A SQL-99 compliant string representation of the expression. */
- def sqlString: String = "SELECT " + sqlInnerString
-
- /** A SQL-99 compliant string representation of the relation sub-
- * statement. This only has a meaning inside another statement.
- */
- def sqlInnerString: String
-
-}
diff --git a/src/dbc/scala/dbc/statement/Insert.scala b/src/dbc/scala/dbc/statement/Insert.scala
deleted file mode 100644
index 189ccec..0000000
--- a/src/dbc/scala/dbc/statement/Insert.scala
+++ /dev/null
@@ -1,31 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-
-package scala.dbc
-package statement
-
-
-import scala.dbc.statement.expression._
-
-/** An insertion of values into a table. */
- at deprecated(DbcIsDeprecated, "2.9.0") case class Insert(insertionTarget: String, insertionData: InsertionData)
- extends Status {
-
- /** A SQL-99 compliant string representation of the select statement. */
- def sqlString: String =
- "INSERT INTO " + insertionTarget + " " + insertionData.sqlString
-
- /** The name of the table where the data should be added. */
- //def insertionTarget: String
-
- /** The data that will be added tot he table. */
- //def insertionData: InsertionData
-
-}
diff --git a/src/dbc/scala/dbc/statement/InsertionData.scala b/src/dbc/scala/dbc/statement/InsertionData.scala
deleted file mode 100644
index e91ad7e..0000000
--- a/src/dbc/scala/dbc/statement/InsertionData.scala
+++ /dev/null
@@ -1,40 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-
-package scala.dbc
-package statement
-
-
-import scala.dbc.statement.expression._
-
-/** Data to be inserted into a table in an <code>Insert</code>. */
- at deprecated(DbcIsDeprecated, "2.9.0") abstract class InsertionData {
- def sqlString: String
-}
-
- at deprecated(DbcIsDeprecated, "2.9.0") object InsertionData {
- /** Insertion of data resulting from a query on the database. */
- @deprecated(DbcIsDeprecated, "2.9.0") case class Subquery(query: Relation) extends InsertionData {
- def sqlString = query.sqlString
- }
- /** Insertion of data as explicitly defined values. */
- @deprecated(DbcIsDeprecated, "2.9.0") case class Constructor(
- columnNames: Option[List[String]],
- columnValues: List[Expression]
- ) extends InsertionData {
- def sqlString =
- (columnNames match {
- case None => ""
- case Some(cn) => cn.mkString(" (",", ",")")
- }) +
- " VALUES" +
- columnValues.map(e => e.sqlInnerString).mkString(" (",", ",")")
- }
-}
diff --git a/src/dbc/scala/dbc/statement/IsolationLevel.scala b/src/dbc/scala/dbc/statement/IsolationLevel.scala
deleted file mode 100644
index b31614c..0000000
--- a/src/dbc/scala/dbc/statement/IsolationLevel.scala
+++ /dev/null
@@ -1,32 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-
-package scala.dbc
-package statement
-
-
- at deprecated(DbcIsDeprecated, "2.9.0") abstract class IsolationLevel {
- def sqlString: String
-}
-
- at deprecated(DbcIsDeprecated, "2.9.0") object IsolationLevel {
- case object ReadUncommitted extends IsolationLevel {
- def sqlString = "ISOLATION LEVEL READ UNCOMMITTED"
- }
- case object ReadCommitted extends IsolationLevel {
- def sqlString = "ISOLATION LEVEL READ COMMITTED"
- }
- case object RepeatableRead extends IsolationLevel {
- def sqlString = "ISOLATION LEVEL REPEATABLE READ"
- }
- case object Serializable extends IsolationLevel {
- def sqlString = "ISOLATION LEVEL SERIALIZABLE"
- }
-}
diff --git a/src/dbc/scala/dbc/statement/JoinType.scala b/src/dbc/scala/dbc/statement/JoinType.scala
deleted file mode 100644
index 698612b..0000000
--- a/src/dbc/scala/dbc/statement/JoinType.scala
+++ /dev/null
@@ -1,56 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-
-package scala.dbc
-package statement
-
-
-/** A join behaviour in a <code>Jointure</code>. */
- at deprecated(DbcIsDeprecated, "2.9.0") abstract class JoinType {
- /** A SQL-99 string representation of the join behaviour. */
- def sqlString: String
-}
-
- at deprecated(DbcIsDeprecated, "2.9.0") object JoinType {
-
- /** A join behaviour where a joined tuple is created only when a
- * corresponding tuple exists in both original relations.
- */
- case object Inner extends JoinType {
- val sqlString = "INNER JOIN"
- }
-
- /** A join behaviour family where a joined tuple is created even when a
- * tuple has no corresponding tuple in the other relation. The fields
- * populated by values of the other tuple will receive the NULL value.
- */
- abstract class Outer extends JoinType
-
- object Outer {
- /** An outer join behaviour where there will be at least on tuple for
- * every tuple in the left relation.
- */
- case object Left extends Outer {
- val sqlString = "LEFT OUTER JOIN"
- }
- /** An outer join behaviour where there will be at least on tuple for
- * every tuple in the right relation.
- */
- case object Right extends Outer {
- val sqlString = "RIGHT OUTER JOIN"
- }
- /** An outer join behaviour where there will be at least on tuple for
- * every tuple in both right and left relations.
- */
- case object Full extends Outer {
- val sqlString = "FULL OUTER JOIN"
- }
- }
-}
diff --git a/src/dbc/scala/dbc/statement/Jointure.scala b/src/dbc/scala/dbc/statement/Jointure.scala
deleted file mode 100644
index 74c871c..0000000
--- a/src/dbc/scala/dbc/statement/Jointure.scala
+++ /dev/null
@@ -1,45 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-
-package scala.dbc
-package statement
-
-
-/** A jointure between two relations. */
- at deprecated(DbcIsDeprecated, "2.9.0") abstract class Jointure extends Relation {
-
- /** The relation on the left part of the join. */
- def leftRelation: Relation
-
- /** The relation on the right part of the join. */
- def rightRelation: Relation
-
- /** The type of the jointure. */
- def joinType: JoinType
-
- /** The condition on which the jointure needs be done. */
- def joinCondition: Option[Expression]
-
- /** A SQL-99 compliant string representation of the relation statement. */
- def sqlString: String = "SELECT * FROM " + sqlInnerString
-
- /** A SQL-99 compliant string representation of the relation sub-
- * statement. This only has a meaning inside a query.
- */
- def sqlInnerString: String =
- leftRelation.sqlInnerString + " " +
- joinType.sqlString + " " +
- rightRelation.sqlInnerString +
- (joinCondition match {
- case Some(jc) => jc.sqlString
- case None => ""
- })
-
-}
diff --git a/src/dbc/scala/dbc/statement/Relation.scala b/src/dbc/scala/dbc/statement/Relation.scala
deleted file mode 100644
index 787707e..0000000
--- a/src/dbc/scala/dbc/statement/Relation.scala
+++ /dev/null
@@ -1,55 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-
-package scala.dbc
-package statement;
-
-
-/** A statement that returns a relation. */
- at deprecated(DbcIsDeprecated, "2.9.0") abstract class Relation extends Statement {
-
- def isCompatibleType: (DataType,DataType)=>Boolean =
- ((dt,wdt)=>dt.isSubtypeOf(wdt));
-
- def typeCheck (relation: result.Relation): Unit = {
- val sameType: Boolean = (
- relation.metadata.length == fieldTypes.length &&
- (relation.metadata.zip(fieldTypes).forall({case Pair(field,expectedType) =>
- isCompatibleType(field.datatype, expectedType)}))
- );
- if (!sameType)
- throw new exception.IncompatibleSchema(fieldTypes,relation.metadata.map(field=>field.datatype));
- }
-
- def fieldTypes: List[DataType];
-
- def sqlTypeString: String =
- if (fieldTypes.isEmpty)
- "UNTYPED"
- else
- fieldTypes.map(dt=>dt.sqlString).mkString("RELATION (",", ",")");
-
- /** A SQL-99 compliant string representation of the statement. */
- def sqlString: String;
-
- /** A SQL-99 compliant string representation of the relation sub-
- * statement. This only has a meaning inside another statement. */
- def sqlInnerString: String;
-
- /** Executes the statement on the given database. */
- def execute (database: scala.dbc.Database): scala.dbc.result.Relation = {
- database.executeStatement(this);
- }
-
- def execute (database:scala.dbc.Database, debug:Boolean): scala.dbc.result.Relation = {
- database.executeStatement(this,debug);
- }
-
-}
diff --git a/src/dbc/scala/dbc/statement/Select.scala b/src/dbc/scala/dbc/statement/Select.scala
deleted file mode 100644
index a9ca021..0000000
--- a/src/dbc/scala/dbc/statement/Select.scala
+++ /dev/null
@@ -1,99 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-
-package scala.dbc
-package statement
-
-
-/** A statement that when executed on a database will return a relation.
- * The returned relation will be a subset of a table in the database or
- * a jointure between such subsets. */
- at deprecated(DbcIsDeprecated, "2.9.0") abstract class Select extends Relation {
-
- /** Defines if duplicated tuples should be removed from the returned
- * relation. <h3>Compatibility notice</h3> Some DBMS (PostgreSQL) allow
- * uniqueness constrains on an arbitrary field instead of the entire
- * tuple. */
- def setQuantifier: Option[SetQuantifier]
-
- /** Defines the output fields that a tuple in the returned relation will
- * contain, and their content with respect to the tables in the
- * database. If the fields are not specified (that is the list is
- * empty), all possible input fields will be returned. <h3>Compatibility
- * notice</h3> SQL's qualified asterisk select sublist is not
- * available. */
- def selectList: List[DerivedColumn]
-
- /** Defines the relations from which the query will obtain its data.*/
- def fromClause: List[Relation]
-
- /** Defines condition that must be true in the returned relation's tuples.
- * This value expression must return a boolean or boolean-compatible
- * value. This condition is applied before any GROUP BY clause.
- */
- def whereClause: Option[Expression]
-
- /** Defines the grouping of the returned relation's tuples. One tuple is
- * returned for every group. The value of <code>selectList</code> must
- * use aggregate functions for calculation.
- */
- def groupByClause: Option[List[Expression]]
-
- /** Defines conditions that must be true in the returned relation's tuples.
- * The value expression must return a boolean can only refer to fields
- * that are grouped or to any field from inside an aggregate function.
- */
- def havingClause: Option[Expression]
-
- /* def windowClause: Option[_]; */
-
- /** A SQL-99 compliant string representation of the select statement. */
- def sqlString: String = (
- "SELECT" +
- (setQuantifier match {
- case None => ""
- case Some(sq) => " " + sq.sqlString
- }) +
- (selectList match {
- case Nil => " *"
- case _ => (" " + selectList.tail.foldLeft(selectList.head.sqlString)
- ((name:String, dc:DerivedColumn) => name + ", " + dc.sqlString))
- }) +
- (fromClause match {
- case Nil => sys.error("Empty from clause is not allowed")
- case _ => (" FROM " + fromClause.tail.foldLeft(fromClause.head.sqlInnerString)
- ((name:String, rel:Relation) => name + ", " + rel.sqlInnerString))
- }) +
- (whereClause match {
- case None => ""
- case Some(expr) => " WHERE " + expr.sqlInnerString
- }) +
- (groupByClause match {
- case None => ""
- case Some(gbl) => gbl match {
- case Nil => sys.error("Empty group by clause is not allowed")
- case _ =>
- (" GROUP BY " +
- gbl.tail.foldLeft(gbl.head.sqlInnerString)
- ((name:String, gb) => name + ", " + gb.sqlInnerString))
- }
- }) +
- (havingClause match {
- case None => ""
- case Some(expr) => " HAVING " + expr.sqlString
- })
- );
-
- /** A SQL-99 compliant string representation of the relation sub-
- * statement. This only has a meaning inside a query.
- */
- def sqlInnerString: String = "("+sqlString+")"
-
-}
diff --git a/src/dbc/scala/dbc/statement/SetClause.scala b/src/dbc/scala/dbc/statement/SetClause.scala
deleted file mode 100644
index 3af509c..0000000
--- a/src/dbc/scala/dbc/statement/SetClause.scala
+++ /dev/null
@@ -1,21 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-
-package scala.dbc
-package statement
-
-
-import scala.dbc.statement.expression._
-
-/** Data to be inserted into a table in an <code>Insert</code>. */
- at deprecated(DbcIsDeprecated, "2.9.0") case class SetClause(name: String, expr: Expression) {
- val value: Pair[String,Expression] = (name, expr)
- def sqlString: String = value._1 + " = " + value._2.sqlInnerString
-}
diff --git a/src/dbc/scala/dbc/statement/SetQuantifier.scala b/src/dbc/scala/dbc/statement/SetQuantifier.scala
deleted file mode 100644
index 77a4b79..0000000
--- a/src/dbc/scala/dbc/statement/SetQuantifier.scala
+++ /dev/null
@@ -1,38 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-
-package scala.dbc
-package statement
-
-
-/** A set quantifier that defines the collection type of a relation. */
- at deprecated(DbcIsDeprecated, "2.9.0") abstract class SetQuantifier {
- /** A SQL-99 compliant string representation of the set quantifier. */
- def sqlString: String
-}
-
- at deprecated(DbcIsDeprecated, "2.9.0") object SetQuantifier {
-
- /** A set quantifier that defines a relation as being a bag. That means
- * that duplicates are allowed.
- */
- case object AllTuples extends SetQuantifier {
- /** A SQL-99 compliant string representation of the set quantifier. */
- def sqlString: String = "ALL"
- }
-
- /** A set quantifier that defines a relation as being a set. That means
- * that duplicates are not allowed and will be pruned.
- */
- case object DistinctTuples extends SetQuantifier {
- /** A SQL-99 compliant string representation of the set quantifier. */
- def sqlString: String = "DISTINCT"
- }
-}
diff --git a/src/dbc/scala/dbc/statement/Statement.scala b/src/dbc/scala/dbc/statement/Statement.scala
deleted file mode 100644
index fc53742..0000000
--- a/src/dbc/scala/dbc/statement/Statement.scala
+++ /dev/null
@@ -1,16 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-
-package scala.dbc
-package statement
-
-
-/** An ISO-9075:2003 (SQL) statement. */
- at deprecated(DbcIsDeprecated, "2.9.0") abstract class Statement
diff --git a/src/dbc/scala/dbc/statement/Status.scala b/src/dbc/scala/dbc/statement/Status.scala
deleted file mode 100644
index 0ce64b9..0000000
--- a/src/dbc/scala/dbc/statement/Status.scala
+++ /dev/null
@@ -1,32 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-
-package scala.dbc
-package statement
-
-import scala.dbc.Database
-import scala.dbc.result
-
-/** A statement that changes the status of the database. */
- at deprecated(DbcIsDeprecated, "2.9.0") abstract class Status extends Statement {
-
- /** A SQL-99 compliant string representation of the statement. */
- def sqlString: String
-
- /** Executes the statement on the given database. */
- def execute(database: Database): result.Status[Unit] = {
- database.executeStatement(this)
- }
-
- def execute(database: Database, debug: Boolean): result.Status[Unit] = {
- database.executeStatement(this, debug)
- }
-
-}
diff --git a/src/dbc/scala/dbc/statement/Table.scala b/src/dbc/scala/dbc/statement/Table.scala
deleted file mode 100644
index e729f80..0000000
--- a/src/dbc/scala/dbc/statement/Table.scala
+++ /dev/null
@@ -1,38 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-
-package scala.dbc
-package statement
-
-
-/** A reference to a table in the database.
- * @author Gilles Dubochet
- * @version 1.0 */
- at deprecated(DbcIsDeprecated, "2.9.0") abstract class Table extends Relation {
-
- /** The name of the table in the database. */
- def tableName: String
-
- /** The name that the table will be called in the enclosing statement. */
- def tableRename: Option[String]
-
- /** A SQL-99 compliant string representation of the relation statement. */
- def sqlString: String = "SELECT * FROM " + tableName
-
- /** A SQL-99 compliant string representation of the relation sub-
- * statement. This only has a meaning inside a query. */
- def sqlInnerString: String =
- tableName +
- (tableRename match {
- case None => ""
- case Some(rename) => " AS " + rename
- })
-
-}
diff --git a/src/dbc/scala/dbc/statement/Transaction.scala b/src/dbc/scala/dbc/statement/Transaction.scala
deleted file mode 100644
index 1740dae..0000000
--- a/src/dbc/scala/dbc/statement/Transaction.scala
+++ /dev/null
@@ -1,55 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-
-package scala.dbc
-package statement;
-
-
-/** A statement that changes the status of the database. */
- at deprecated(DbcIsDeprecated, "2.9.0") case class Transaction [ResultType] (
- transactionBody: (scala.dbc.Database=>ResultType),
- accessMode: Option[AccessMode],
- isolationLevel: Option[IsolationLevel]
-) extends Statement {
-
- /** A SQL-99 compliant string representation of the statement. */
- def sqlStartString: String = (
- "START TRANSACTION" +
- (Pair(accessMode,isolationLevel) match {
- case Pair(None,None) => ""
- case Pair(Some(am),None) => " " + am.sqlString
- case Pair(None,Some(il)) => " " + il.sqlString
- case Pair(Some(am),Some(il)) => " " + am.sqlString + ", " + il.sqlString
- })
- );
-
- def sqlCommitString: String = {
- "COMMIT"
- }
-
- def sqlAbortString: String = {
- "ROLLBACK"
- }
-
- //def transactionBody: (()=>Unit);
-
- //def accessMode: Option[AccessMode];
-
- //def isolationLevel: Option[IsolationLevel];
-
- def execute (database: scala.dbc.Database): scala.dbc.result.Status[ResultType] = {
- database.executeStatement(this);
- }
-
- def execute (database: scala.dbc.Database, debug: Boolean): scala.dbc.result.Status[ResultType] = {
- database.executeStatement(this,debug);
- }
-
-}
diff --git a/src/dbc/scala/dbc/statement/Update.scala b/src/dbc/scala/dbc/statement/Update.scala
deleted file mode 100644
index 836549a..0000000
--- a/src/dbc/scala/dbc/statement/Update.scala
+++ /dev/null
@@ -1,47 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-
-package scala.dbc
-package statement;
-
-
-import scala.dbc.statement.expression._;
-
-/** An update of the state of a table. */
- at deprecated(DbcIsDeprecated, "2.9.0") case class Update (
- updateTarget: String,
- setClauses: List[SetClause],
- whereClause: Option[Expression]
-) extends Status {
-
-
- /** A SQL-99 compliant string representation of the select statement. */
- def sqlString: String = (
- "UPDATE " +
- updateTarget +
- " SET " + setClauses.map(sc=>sc.sqlString).mkString("",", ","") +
- (whereClause match {
- case None => ""
- case Some(expr) => " WHERE " + expr.sqlString
- })
- );
-
- /** The name of the table that should be updated. */
- //def updateTarget: String;
-
- /** The data that will be added tot he table. */
- //def setClauses: List[SetClause];
-
- /** Defines condition that must be true in the tuples that will be updated.
- * This value expression must return a boolean or boolean-compatible
- * value. */
- //def whereClause: Option[scala.dbc.statement.expression.Expression];
-
-}
diff --git a/src/dbc/scala/dbc/statement/expression/Aggregate.scala b/src/dbc/scala/dbc/statement/expression/Aggregate.scala
deleted file mode 100644
index c42bffe..0000000
--- a/src/dbc/scala/dbc/statement/expression/Aggregate.scala
+++ /dev/null
@@ -1,35 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-
-package scala.dbc
-package statement
-package expression
-
-
- at deprecated(DbcIsDeprecated, "2.9.0") abstract class Aggregate extends Expression {
-
- def aggregateName: String;
-
- def setFunction: SetFunction;
-
- def filterClause: Option[Expression];
-
- /** A SQL-99 compliant string representation of the relation sub-
- * statement. This only has a meaning inside another statement. */
- def sqlInnerString: String = (
- aggregateName +
- "(" + setFunction.sqlString + ")" +
- (filterClause match {
- case None => ""
- case Some(fc) => " FILTER (WHERE " + fc.sqlString + ")"
- })
- )
-
-}
diff --git a/src/dbc/scala/dbc/statement/expression/BinaryOperator.scala b/src/dbc/scala/dbc/statement/expression/BinaryOperator.scala
deleted file mode 100644
index 32f016d..0000000
--- a/src/dbc/scala/dbc/statement/expression/BinaryOperator.scala
+++ /dev/null
@@ -1,33 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-
-package scala.dbc
-package statement
-package expression;
-
-
- at deprecated(DbcIsDeprecated, "2.9.0") abstract class BinaryOperator extends Expression {
-
- /** The name of the operator. */
- def operator: String;
-
- /** The expression applied on the left of the operator. */
- def leftOperand: Expression;
-
- /** The expression applied on the right of the operator. */
- def rightOperand: Expression;
-
- /** A SQL-99 compliant string representation of the relation sub-
- * statement. This only has a meaning inside another statement. */
- def sqlInnerString: String = {
- leftOperand.sqlInnerString + " " + operator + " " + rightOperand.sqlInnerString
- }
-
-}
diff --git a/src/dbc/scala/dbc/statement/expression/Constant.scala b/src/dbc/scala/dbc/statement/expression/Constant.scala
deleted file mode 100644
index 70ec781..0000000
--- a/src/dbc/scala/dbc/statement/expression/Constant.scala
+++ /dev/null
@@ -1,23 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-
-package scala.dbc
-package statement
-package expression;
-
-
- at deprecated(DbcIsDeprecated, "2.9.0") abstract class Constant extends Expression {
- /** A SQL-99 compliant string representation of the relation sub-
- * statement. This only has a meaning inside another statement. */
- def sqlInnerString: String = constantValue.sqlString;
-
- /** The value of the constant. */
- def constantValue: Value;
-}
diff --git a/src/dbc/scala/dbc/statement/expression/Default.scala b/src/dbc/scala/dbc/statement/expression/Default.scala
deleted file mode 100644
index 78204d0..0000000
--- a/src/dbc/scala/dbc/statement/expression/Default.scala
+++ /dev/null
@@ -1,22 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-
-package scala.dbc
-package statement
-package expression;
-
-
-case object Default extends Expression {
-
- /** A SQL-99 compliant string representation of the relation sub-
- * statement. This only has a meaning inside another statement. */
- def sqlInnerString: String = "DEFAULT";
-
-}
diff --git a/src/dbc/scala/dbc/statement/expression/Field.scala b/src/dbc/scala/dbc/statement/expression/Field.scala
deleted file mode 100644
index 9a90903..0000000
--- a/src/dbc/scala/dbc/statement/expression/Field.scala
+++ /dev/null
@@ -1,40 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-
-package scala.dbc
-package statement
-package expression;
-
-
- at deprecated(DbcIsDeprecated, "2.9.0") abstract class Field extends Expression {
-
- /** The name of the schema in the database where the field is located. */
- def schemaName: Option[String] = None;
-
- /** The name of the table in the database where the field is located. */
- def tableName: Option[String];
-
- /** The name of the field in the database. */
- def fieldName: String;
-
- /** A SQL-99 compliant string representation of the relation sub-
- * statement. This only has a meaning inside another statement. */
- def sqlInnerString: String = (
- (schemaName match {
- case None => ""
- case Some(sn) => sn + "."
- }) +
- (tableName match {
- case None => ""
- case Some(tn) => tn + "."
- }) + fieldName
- )
-
-}
diff --git a/src/dbc/scala/dbc/statement/expression/FunctionCall.scala b/src/dbc/scala/dbc/statement/expression/FunctionCall.scala
deleted file mode 100644
index 962cf20..0000000
--- a/src/dbc/scala/dbc/statement/expression/FunctionCall.scala
+++ /dev/null
@@ -1,33 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-
-package scala.dbc
-package statement
-package expression;
-
-
- at deprecated(DbcIsDeprecated, "2.9.0") case class FunctionCall (
- functionName: String,
- arguments: List[Expression]
-) extends Expression {
-
- /** A SQL-99 compliant string representation of the relation sub-
- * statement. This only has a meaning inside another statement. */
- def sqlInnerString: String = {
- functionName + "(" + arguments.mkString("",", ","") + ")"
- }
-
- /** The name of the function to call. */
- //def functionName: String;
-
- /** A list of all argument expressions to pass to the function, in order. */
- //def arguments: List[Expression];
-
-}
diff --git a/src/dbc/scala/dbc/statement/expression/Select.scala b/src/dbc/scala/dbc/statement/expression/Select.scala
deleted file mode 100644
index 7a6a4a2..0000000
--- a/src/dbc/scala/dbc/statement/expression/Select.scala
+++ /dev/null
@@ -1,28 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-
-package scala.dbc
-package statement
-package expression;
-
-
- at deprecated(DbcIsDeprecated, "2.9.0") abstract class Select extends Expression {
-
- /** The actual select statement */
- def selectStatement: statement.Select;
-
- /** A SQL-99 compliant string representation of the expression. */
- override def sqlString: String = selectStatement.sqlString;
-
- /** A SQL-99 compliant string representation of the relation sub-
- * statement. This only has a meaning inside another statement. */
- def sqlInnerString: String = "("+selectStatement.sqlString+")";
-
-}
diff --git a/src/dbc/scala/dbc/statement/expression/SetFunction.scala b/src/dbc/scala/dbc/statement/expression/SetFunction.scala
deleted file mode 100644
index 060b223..0000000
--- a/src/dbc/scala/dbc/statement/expression/SetFunction.scala
+++ /dev/null
@@ -1,40 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-
-package scala.dbc
-package statement
-package expression;
-
-
- at deprecated(DbcIsDeprecated, "2.9.0") abstract class SetFunction {
- /** A SQL-99 compliant string representation of the set quantifier. */
- def sqlString: String;
-}
-
- at deprecated(DbcIsDeprecated, "2.9.0") object SetFunction {
- abstract class Asterisk extends SetFunction {
- def sqlString = "(*)";
- }
- abstract class General extends SetFunction {
- def setQuantifier: Option[SetQuantifier];
- def valueExpression: Expression;
- def sqlString = (
- "(" +
- (setQuantifier match {
- case None => ""
- case Some(sq) => sq.sqlString + " "
- }) +
- valueExpression.sqlString + ")"
- );
- }
- abstract class Binary extends SetFunction {
- def sqlString = sys.error("Binary set function is not supported yet.");
- }
-}
diff --git a/src/dbc/scala/dbc/statement/expression/TypeCast.scala b/src/dbc/scala/dbc/statement/expression/TypeCast.scala
deleted file mode 100644
index dbb8dc1..0000000
--- a/src/dbc/scala/dbc/statement/expression/TypeCast.scala
+++ /dev/null
@@ -1,32 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-
-package scala.dbc
-package statement
-package expression;
-
-
- at deprecated(DbcIsDeprecated, "2.9.0") case class TypeCast (
- expression: Expression,
- castType: DataType
-) extends Expression {
-
- /** A SQL-99 compliant string representation of the relation sub-
- * statement. This only has a meaning inside another statement. */
- def sqlInnerString: String = {
- "CAST (" + expression.sqlInnerString + " AS " + castType.sqlString + ")";
- }
-
- /** The expression that will be casted. */
- //def expression: Expression;
-
- /** The type to which to cast. */
- //def castType: scala.dbc.datatype.DataType;
-}
diff --git a/src/dbc/scala/dbc/statement/expression/UnaryOperator.scala b/src/dbc/scala/dbc/statement/expression/UnaryOperator.scala
deleted file mode 100644
index 4172c45..0000000
--- a/src/dbc/scala/dbc/statement/expression/UnaryOperator.scala
+++ /dev/null
@@ -1,33 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-
-package scala.dbc
-package statement
-package expression;
-
-
- at deprecated(DbcIsDeprecated, "2.9.0") abstract class UnaryOperator extends Expression {
-
- /** The name of the operator */
- def operator: String;
-
- /** Whether the operator comes before the operand or not. */
- def operatorIsLeft: Boolean;
-
- /** The operand applied to the operator. */
- def operand: Expression;
-
- /** A SQL-99 compliant string representation of the relation sub-
- * statement. This only has a meaning inside another statement. */
- def sqlInnerString: String = operatorIsLeft match {
- case true => operator + " " + operand.sqlInnerString;
- case false => operand.sqlInnerString + " " + operator;
- }
-}
diff --git a/src/dbc/scala/dbc/syntax/DataTypeUtil.scala b/src/dbc/scala/dbc/syntax/DataTypeUtil.scala
deleted file mode 100644
index a0ebd17..0000000
--- a/src/dbc/scala/dbc/syntax/DataTypeUtil.scala
+++ /dev/null
@@ -1,98 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-
-package scala.dbc
-package syntax;
-
-
-import java.math.BigDecimal;
-import java.math.BigInteger;
-
- at deprecated(DbcIsDeprecated, "2.9.0") object DataTypeUtil {
-
- final val java_lang_Integer_SIZE = 32;
- final val java_lang_Long_SIZE = 64;
-
- def boolean = new datatype.Boolean;
- def tinyint = new datatype.ExactNumeric[Byte](dbc.DataType.BYTE) {
- val precisionRadix = 2;
- val precision = 8;
- val signed = true;
- val scale = 0;
- }
- def smallint = new datatype.ExactNumeric[Short](dbc.DataType.SHORT) {
- val precisionRadix = 2;
- val precision = 16;
- val signed = true;
- val scale = 0;
- }
- def integer = new datatype.ExactNumeric[Int](dbc.DataType.INT) {
- val precisionRadix = 2;
- val precision = 32;
- val signed = true;
- val scale = 0;
- }
- def bigint = new datatype.ExactNumeric[Long](dbc.DataType.LONG) {
- val precisionRadix = 2;
- val precision = 64;
- val signed = true;
- val scale = 0;
- }
- def numeric (_precision:Int): DataType = numeric(_precision,0);
- def numeric (_precision:Int, _scale:Int): DataType =
- Pair(datatype.Factory.bytePrecision(_precision,true,true),_scale == 0) match {
- case Pair(bp,true) if (bp <= java_lang_Integer_SIZE) =>
- new datatype.ExactNumeric[Int](DataType.INT) {
- val precisionRadix = 10;
- val precision = _precision;
- val signed = true;
- val scale = 0;
- }
- case Pair(bp,true) if (bp <= java_lang_Long_SIZE) =>
- new datatype.ExactNumeric[Long](DataType.LONG) {
- val precisionRadix = 10;
- val precision = _precision;
- val signed = true;
- val scale = 0;
- }
- case Pair(_,true) =>
- new datatype.ExactNumeric[BigInteger](DataType.BIG_INTEGER) {
- val precisionRadix = 10;
- val precision = _precision;
- val signed = true;
- val scale = 0;
- }
- case Pair(_,false) =>
- new datatype.ExactNumeric[BigDecimal](DataType.BIG_DECIMAL) {
- val precisionRadix = 10;
- val precision = _precision;
- val signed = true;
- val scale = _scale;
- }
- }
- def real = new datatype.ApproximateNumeric[Float](DataType.FLOAT) {
- val precisionRadix = 2;
- val precision = 64;
- val signed = true;
- }
- def doublePrecision = new datatype.ApproximateNumeric[Double](DataType.DOUBLE) {
- val precisionRadix = 2;
- val precision = 128;
- val signed = true;
- }
- def character (_length: Int) = new datatype.Character {
- val length = _length;
- }
- def characterVarying (_length: Int) = new datatype.CharacterVarying {
- def length = _length;
- }
- def characterLargeObject = new datatype.CharacterLargeObject;
-
-}
diff --git a/src/dbc/scala/dbc/syntax/Database.scala b/src/dbc/scala/dbc/syntax/Database.scala
deleted file mode 100644
index 4357fb7..0000000
--- a/src/dbc/scala/dbc/syntax/Database.scala
+++ /dev/null
@@ -1,33 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-
-package scala.dbc
-package syntax;
-
-
-import java.net.URI;
-
- at deprecated(DbcIsDeprecated, "2.9.0") object Database {
-
- def database (server:String, username:String, password:String): dbc.Database = {
- val uri = new URI(server);
- // Java 1.5 if (uri.toString().contains("postgres")) {
- if (uri.toString().indexOf("postgres") != -1) {
- new dbc.Database(new vendor.PostgreSQL {
- val uri = new URI(server);
- val user = username;
- val pass = password;
- })
- } else {
- throw new Exception("No DBMS vendor support could be found for the given URI");
- }
- }
-
-}
diff --git a/src/dbc/scala/dbc/syntax/Statement.scala b/src/dbc/scala/dbc/syntax/Statement.scala
deleted file mode 100644
index baccbfa..0000000
--- a/src/dbc/scala/dbc/syntax/Statement.scala
+++ /dev/null
@@ -1,274 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-
-package scala.dbc
-package syntax;
-
-
-import java.math.BigDecimal;
-import java.math.BigInteger;
-
-import StatementExpression._;
-
-/*
-
-ASSUMPTIONS:
-
-IMPROVABLE:
-For type safety, all types must be defined. If one is missing, none is taken into account.
-It is possible to redefine many types or renamings for a field, in that case,
- only the last one is taken into account ("a" as "b" as "c" of boolean as "e" of integer
- is equivalent to "a" as "e" of integer).
-
-FIXED:
-
-*/
-
- at deprecated(DbcIsDeprecated, "2.9.0") object Statement {
-
- // SELECT ZYGOTE ...
-
- def select: SelectZygote = new SelectZygote {
- val setQuantifier = None;
- }
- def selectBag: SelectZygote = new SelectZygote {
- val setQuantifier = Some(statement.SetQuantifier.AllTuples);
- }
- def selectSet: SelectZygote = new SelectZygote {
- val setQuantifier = Some(statement.SetQuantifier.DistinctTuples);
- }
-
- abstract class SelectZygote {
- def setQuantifier: Option[statement.SetQuantifier];
- def fields (sdc:SelectDerivedColumns): SelectOf = new SelectOf {
- val setQuantifier = SelectZygote.this.setQuantifier;
- val selectList = sdc.selectList;
- val selectTypes = sdc.selectTypes;
- }
- }
-
- abstract class SelectDerivedField {
- def fieldValue: StatementField;
- def fieldRename: Option[String] = {val x = None; x}
- def fieldType: Option[dbc.DataType] = {val x = None; x}
- def as (rename:String): SelectDerivedField = new SelectDerivedField {
- val fieldValue = SelectDerivedField.this.fieldValue;
- override val fieldRename = Some(rename);
- override val fieldType = SelectDerivedField.this.fieldType;
- }
- def of (datatype:dbc.DataType): SelectDerivedField = new SelectDerivedField {
- val fieldValue = SelectDerivedField.this.fieldValue;
- override val fieldRename = SelectDerivedField.this.fieldRename;
- override val fieldType = Some(datatype);
- }
- }
-
- implicit def statementFieldToSelectDerivedField (fv:StatementField): SelectDerivedField = new SelectDerivedField {
- val fieldValue = fv;
- }
-
- implicit def stringToSelectDerivedField (fv:String): SelectDerivedField = new SelectDerivedField {
- val fieldValue: StatementField = StatementExpression.stringToStatementField(fv);
- }
-
- abstract class SelectDerivedColumns {
- def selectList: List[statement.DerivedColumn];
- def selectTypes: List[DataType];
- def and (sdc:SelectDerivedColumns): SelectDerivedColumns = new SelectDerivedColumns {
- val selectList = SelectDerivedColumns.this.selectList ::: sdc.selectList;
- val selectTypes =
- if (SelectDerivedColumns.this.selectTypes.isEmpty | sdc.selectTypes.isEmpty)
- Nil
- else
- SelectDerivedColumns.this.selectTypes ::: sdc.selectTypes;
- }
- }
-
- implicit def selectDerivedFieldToSelectDerivedColumns (sdf:SelectDerivedField): SelectDerivedColumns = new SelectDerivedColumns {
- val selectList = List(new statement.DerivedColumn {
- val valueExpression = sdf.fieldValue.toStatement;
- val asClause = sdf.fieldRename;
- });
- val selectTypes = if (sdf.fieldType.isEmpty) Nil else List(sdf.fieldType.get);
- }
-
- implicit def stringToSelectDerivedColumns (sdfs:String): SelectDerivedColumns = {
- val sdf: SelectDerivedField = sdfs;
- selectDerivedFieldToSelectDerivedColumns(sdf);
- }
-
- // SELECT OF ...
-
- abstract class SelectOf {
- def setQuantifier: Option[statement.SetQuantifier];
- def selectList: List[statement.DerivedColumn];
- def selectTypes: List[DataType];
- def from (sst:SelectSourceTables): SelectBeyond = new SelectBeyond {
- val setQuantifier = SelectOf.this.setQuantifier;
- val selectList = SelectOf.this.selectList;
- val selectTypes = SelectOf.this.selectTypes;
- val fromClause = sst.fromClause;
- val whereClause = None;
- val groupByClause = None;
- val havingClause = None;
- }
- }
-
- abstract class SelectSourceTable {
- def fromRelation: statement.Relation;
- def innerJoin (sst: SelectSourceTable): SelectSourceTable = new SelectSourceTable {
- val fromRelation = new statement.Jointure {
- val leftRelation = SelectSourceTable.this.fromRelation;
- val rightRelation = sst.fromRelation;
- val joinType = statement.JoinType.Inner;
- val joinCondition = None;
- val fieldTypes = leftRelation.fieldTypes ::: rightRelation.fieldTypes;
- }
- }
- def leftOuterJoin (sst: SelectSourceTable): SelectSourceTable = new SelectSourceTable {
- val fromRelation = new statement.Jointure {
- val leftRelation = SelectSourceTable.this.fromRelation;
- val rightRelation = sst.fromRelation;
- val joinType = statement.JoinType.Outer.Left;
- val joinCondition = None;
- val fieldTypes = leftRelation.fieldTypes ::: rightRelation.fieldTypes;
- }
- }
- def rightOuterJoin (sst: SelectSourceTable): SelectSourceTable = new SelectSourceTable {
- val fromRelation = new statement.Jointure {
- val leftRelation = SelectSourceTable.this.fromRelation;
- val rightRelation = sst.fromRelation;
- val joinType = statement.JoinType.Outer.Right;
- val joinCondition = None;
- val fieldTypes = leftRelation.fieldTypes ::: rightRelation.fieldTypes;
- }
- }
- def fullOuterJoin (sst: SelectSourceTable): SelectSourceTable = new SelectSourceTable {
- val fromRelation = new statement.Jointure {
- val leftRelation = SelectSourceTable.this.fromRelation;
- val rightRelation = sst.fromRelation;
- val joinType = statement.JoinType.Outer.Full;
- val joinCondition = None;
- val fieldTypes = leftRelation.fieldTypes ::: rightRelation.fieldTypes;
- }
- }
- }
-
- implicit def stringToSelectSourceTable (sct:String): SelectSourceTable = new SelectSourceTable {
- val fromRelation = new statement.Table {
- val tableName = sct;
- val tableRename = None;
- val fieldTypes = Nil;
- }
- }
-
- implicit def selectToSelectSourceTable (sct:statement.Select): SelectSourceTable = new SelectSourceTable {
- val fromRelation = sct;
- }
-
- abstract class SelectSourceTables {
- def fromClause: List[statement.Relation];
- def join (sct:SelectSourceTable): SelectSourceTables = new SelectSourceTables {
- val fromClause = SelectSourceTables.this.fromClause ::: List(sct.fromRelation);
- }
- }
-
- implicit def stringToSelectSourceTables (sct:String): SelectSourceTables = new SelectSourceTables {
- val fromClause = List(new statement.Table {
- val tableName = sct;
- val tableRename = None;
- val fieldTypes = Nil;
- });
- }
-
- implicit def selectToSelectSourceTables (sct:statement.Select): SelectSourceTables = new SelectSourceTables {
- val fromClause = List(sct);
- }
-
- implicit def selectSourceTableToSelectSourceTables (sct:SelectSourceTable): SelectSourceTables = new SelectSourceTables {
- val fromClause = List(sct.fromRelation);
- }
-
- // SELECT BEYOND ...
-
- abstract class SelectBeyond {
- def setQuantifier: Option[statement.SetQuantifier];
- def selectList: List[statement.DerivedColumn];
- def selectTypes: List[DataType];
- def fromClause: List[statement.Relation];
- def whereClause: Option[statement.Expression];
- def groupByClause: Option[List[statement.Expression]];
- def havingClause: Option[statement.Expression];
- def where (se:StatementExpression): SelectBeyond = new SelectBeyond {
- val setQuantifier = SelectBeyond.this.setQuantifier;
- val selectList = SelectBeyond.this.selectList;
- val selectTypes = SelectBeyond.this.selectTypes;
- val fromClause = SelectBeyond.this.fromClause;
- val whereClause = Some(se.toStatement);
- val groupByClause = SelectBeyond.this.groupByClause;
- val havingClause = SelectBeyond.this.havingClause;
- }
- def groupBy (sgb:SelectGroupBy): SelectBeyond = new SelectBeyond {
- val setQuantifier = SelectBeyond.this.setQuantifier;
- val selectList = SelectBeyond.this.selectList;
- val selectTypes = SelectBeyond.this.selectTypes;
- val fromClause = SelectBeyond.this.fromClause;
- val whereClause = SelectBeyond.this.whereClause;
- val groupByClause = Some(sgb.groupByClause);
- val havingClause = SelectBeyond.this.havingClause;
- }
- def having (se:StatementExpression): SelectBeyond = new SelectBeyond {
- val setQuantifier = SelectBeyond.this.setQuantifier;
- val selectList = SelectBeyond.this.selectList;
- val selectTypes = SelectBeyond.this.selectTypes;
- val fromClause = SelectBeyond.this.fromClause;
- val whereClause = SelectBeyond.this.whereClause;
- val groupByClause = SelectBeyond.this.groupByClause;
- val havingClause = Some(se.toStatement);
- }
- }
-
- implicit def selectBeyondToStatementSelect (sb:SelectBeyond): statement.Select = new statement.Select {
- val setQuantifier = sb.setQuantifier;
- val selectList = sb.selectList;
- val fromClause = sb.fromClause;
- val whereClause = sb.whereClause;
- val groupByClause = sb.groupByClause;
- val havingClause = sb.havingClause;
- val fieldTypes = sb.selectTypes;
- }
-
- abstract class SelectGroupBy {
- def groupByClause: List[statement.Expression];
- def then (se:StatementExpression): SelectGroupBy = new SelectGroupBy {
- val groupByClause =
- SelectGroupBy.this.groupByClause ::: List(se.toStatement);
- }
- def then (se:String): SelectGroupBy = new SelectGroupBy {
- val groupByClause =
- SelectGroupBy.this.groupByClause ::: List(new statement.expression.Field {
- val tableName = None;
- val fieldName = se;
- });
- }
- }
-
- implicit def statementExpressionToSelectGroupBy (se:StatementExpression): SelectGroupBy = new SelectGroupBy {
- val groupByClause = List(se.toStatement);
- }
-
- implicit def stringToSelectGroupBy (se:String): SelectGroupBy = new SelectGroupBy {
- val groupByClause = List(new statement.expression.Field {
- val tableName = None;
- val fieldName = se;
- });
- }
-
-}
diff --git a/src/dbc/scala/dbc/syntax/StatementExpression.scala b/src/dbc/scala/dbc/syntax/StatementExpression.scala
deleted file mode 100644
index 65bb094..0000000
--- a/src/dbc/scala/dbc/syntax/StatementExpression.scala
+++ /dev/null
@@ -1,221 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-
-package scala.dbc
-package syntax;
-
-
-import java.math.BigDecimal;
-import java.math.BigInteger;
-
- at deprecated(DbcIsDeprecated, "2.9.0") abstract class StatementExpression {
-
- def toStatement: statement.Expression;
-
- def and (se:StatementExpression): StatementExpression = new StatementExpression {
- val toStatement = new statement.expression.BinaryOperator {
- val operator = "AND";
- val leftOperand = StatementExpression.this.toStatement;
- val rightOperand = se.toStatement;
- }
- }
- def or (se:StatementExpression): StatementExpression = new StatementExpression {
- val toStatement = new statement.expression.BinaryOperator {
- val operator = "OR";
- val leftOperand = StatementExpression.this.toStatement;
- val rightOperand = se.toStatement;
- }
- }
- def == (se:StatementExpression): StatementExpression = new StatementExpression {
- val toStatement = new statement.expression.BinaryOperator {
- val operator = "=";
- val leftOperand = StatementExpression.this.toStatement;
- val rightOperand = se.toStatement;
- }
- }
- def < (se:StatementExpression): StatementExpression = new StatementExpression {
- val toStatement = new statement.expression.BinaryOperator {
- val operator = "<";
- val leftOperand = StatementExpression.this.toStatement;
- val rightOperand = se.toStatement;
- }
- }
- def > (se:StatementExpression): StatementExpression = new StatementExpression {
- val toStatement = new statement.expression.BinaryOperator {
- val operator = ">";
- val leftOperand = StatementExpression.this.toStatement;
- val rightOperand = se.toStatement;
- }
- }
- def <= (se:StatementExpression): StatementExpression = new StatementExpression {
- val toStatement = new statement.expression.BinaryOperator {
- val operator = "<=";
- val leftOperand = StatementExpression.this.toStatement;
- val rightOperand = se.toStatement;
- }
- }
- def >= (se:StatementExpression): StatementExpression = new StatementExpression {
- val toStatement = new statement.expression.BinaryOperator {
- val operator = ">=";
- val leftOperand = StatementExpression.this.toStatement;
- val rightOperand = se.toStatement;
- }
- }
- def <> (se:StatementExpression): StatementExpression = new StatementExpression {
- val toStatement = new statement.expression.BinaryOperator {
- val operator = "<>";
- val leftOperand = StatementExpression.this.toStatement;
- val rightOperand = se.toStatement;
- }
- }
- def isNull: StatementExpression = new StatementExpression {
- val toStatement = new statement.expression.UnaryOperator {
- val operator = "IS NULL";
- val operatorIsLeft = false;
- val operand = StatementExpression.this.toStatement;
- }
- }
- def isNotNull: StatementExpression = new StatementExpression {
- val toStatement = new statement.expression.UnaryOperator {
- val operator = "IS NOT NULL";
- val operatorIsLeft = false;
- val operand = StatementExpression.this.toStatement;
- }
- }
- def + (se:StatementExpression): StatementExpression = new StatementExpression {
- val toStatement = new statement.expression.BinaryOperator {
- val operator = "+";
- val leftOperand = StatementExpression.this.toStatement;
- val rightOperand = se.toStatement;
- }
- }
- def - (se:StatementExpression): StatementExpression = new StatementExpression {
- val toStatement = new statement.expression.BinaryOperator {
- val operator = "-";
- val leftOperand = StatementExpression.this.toStatement;
- val rightOperand = se.toStatement;
- }
- }
- def * (se:StatementExpression): StatementExpression = new StatementExpression {
- val toStatement = new statement.expression.BinaryOperator {
- val operator = "*";
- val leftOperand = StatementExpression.this.toStatement;
- val rightOperand = se.toStatement;
- }
- }
- def / (se:StatementExpression): StatementExpression = new StatementExpression {
- val toStatement = new statement.expression.BinaryOperator {
- val operator = "/";
- val leftOperand = StatementExpression.this.toStatement;
- val rightOperand = se.toStatement;
- }
- }
- def % (se:StatementExpression): StatementExpression = new StatementExpression {
- val toStatement = new statement.expression.BinaryOperator {
- val operator = "%";
- val leftOperand = StatementExpression.this.toStatement;
- val rightOperand = se.toStatement;
- }
- }
- def ^ (se:StatementExpression): StatementExpression = new StatementExpression {
- val toStatement = new statement.expression.BinaryOperator {
- val operator = "^";
- val leftOperand = StatementExpression.this.toStatement;
- val rightOperand = se.toStatement;
- }
- }
- def not : StatementExpression = new StatementExpression {
- val toStatement = new statement.expression.UnaryOperator {
- val operator = "!";
- val operatorIsLeft = false;
- val operand = StatementExpression.this.toStatement;
- }
- }
- def || (se:StatementExpression): StatementExpression = new StatementExpression {
- val toStatement = new statement.expression.BinaryOperator {
- val operator = "||";
- val leftOperand = StatementExpression.this.toStatement;
- val rightOperand = se.toStatement;
- }
- }
- def like (se:StatementExpression): StatementExpression = new StatementExpression {
- val toStatement = new statement.expression.BinaryOperator {
- val operator = "LIKE";
- val leftOperand = StatementExpression.this.toStatement;
- val rightOperand = se.toStatement;
- }
- }
- def similar (se:StatementExpression): StatementExpression = new StatementExpression {
- val toStatement = new statement.expression.BinaryOperator {
- val operator = "SIMILAR";
- val leftOperand = StatementExpression.this.toStatement;
- val rightOperand = se.toStatement;
- }
- }
- def in (se:statement.Select): StatementExpression = new StatementExpression {
- val toStatement = new statement.expression.BinaryOperator {
- val operator = "IN";
- val leftOperand = StatementExpression.this.toStatement;
- val rightOperand = new statement.expression.Select {
- val selectStatement = se;
- };
- }
- }
-
-}
-
- at deprecated(DbcIsDeprecated, "2.9.0") object StatementExpression {
-
- def not (se:StatementExpression): StatementExpression = new StatementExpression {
- val toStatement = new statement.expression.UnaryOperator {
- val operator = "NOT";
- val operatorIsLeft = true;
- val operand = se.toStatement;
- }
- }
- def abs (se:StatementExpression): StatementExpression = new StatementExpression {
- val toStatement = new statement.expression.UnaryOperator {
- val operator = "@";
- val operatorIsLeft = true;
- val operand = se.toStatement;
- }
- }
- def exists (se:statement.Select): StatementExpression = new StatementExpression {
- val toStatement = new statement.expression.UnaryOperator {
- val operator = "EXISTS";
- val operatorIsLeft = true;
- val operand = new statement.expression.Select {
- val selectStatement = se;
- };
- }
- }
-
- abstract class StatementField extends StatementExpression {
- def fieldName: String;
- def tableName: Option[String] = None;
- def in (tn:String): StatementField = new StatementField {
- val fieldName = StatementField.this.fieldName;
- override val tableName = Some(tn);
- }
- def toStatement: statement.expression.Field = new statement.expression.Field {
- override val schemaName = None;
- val tableName = StatementField.this.tableName;
- val fieldName = StatementField.this.fieldName;
- }
- }
-
- implicit def stringToStatementField (ef:String): StatementField = new StatementField {
- val fieldName = ef;
- }
-
-
-
-
-}
diff --git a/src/dbc/scala/dbc/value/ApproximateNumeric.scala b/src/dbc/scala/dbc/value/ApproximateNumeric.scala
deleted file mode 100644
index fa47d88..0000000
--- a/src/dbc/scala/dbc/value/ApproximateNumeric.scala
+++ /dev/null
@@ -1,28 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-
-package scala.dbc
-package value;
-
-
- at deprecated(DbcIsDeprecated, "2.9.0") abstract class ApproximateNumeric [Type] extends Value {
-
- val dataType: datatype.ApproximateNumeric[Type];
-
- def sqlString = nativeValue.toString();
-
- }
-
- at deprecated(DbcIsDeprecated, "2.9.0") object ApproximateNumeric {
-
- implicit def approximateNumericToFloar (obj:value.ApproximateNumeric[Float]): Float = obj.nativeValue;
- implicit def approximateNumericToDouble (obj:value.ApproximateNumeric[Double]): Double = obj.nativeValue;
-
-}
diff --git a/src/dbc/scala/dbc/value/Boolean.scala b/src/dbc/scala/dbc/value/Boolean.scala
deleted file mode 100644
index 5221ce2..0000000
--- a/src/dbc/scala/dbc/value/Boolean.scala
+++ /dev/null
@@ -1,27 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-
-package scala.dbc
-package value;
-
-
- at deprecated(DbcIsDeprecated, "2.9.0") abstract class Boolean extends Value {
-
- val dataType: datatype.Boolean;
-
- def sqlString = if (nativeValue) "TRUE" else "FALSE";
-
-}
-
- at deprecated(DbcIsDeprecated, "2.9.0") object Boolean {
-
- implicit def booleanToBoolean (obj:value.Boolean): scala.Boolean = obj.nativeValue;
-
-}
diff --git a/src/dbc/scala/dbc/value/Character.scala b/src/dbc/scala/dbc/value/Character.scala
deleted file mode 100644
index 4ff983c..0000000
--- a/src/dbc/scala/dbc/value/Character.scala
+++ /dev/null
@@ -1,35 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-
-package scala.dbc
-package value;
-
-
-/** A SQL-99 value of type character string. */
- at deprecated(DbcIsDeprecated, "2.9.0") abstract class Character extends Value {
-
- override val dataType: datatype.Character;
-
- /** An SQL-99 compliant string representation of the value. */
- def sqlString: String = {
- "'" + nativeValue + "'"
- }
-
-}
-
-/** An object offering transformation methods (views) on the value.
- * This object must be visible in an expression to use value auto-
- * conversion. */
- at deprecated(DbcIsDeprecated, "2.9.0") object Character {
-
- /** A character string value as a native string. */
- implicit def characterToString (obj:value.Character): String = obj.nativeValue;
-
-}
diff --git a/src/dbc/scala/dbc/value/CharacterLargeObject.scala b/src/dbc/scala/dbc/value/CharacterLargeObject.scala
deleted file mode 100644
index b9e81eb..0000000
--- a/src/dbc/scala/dbc/value/CharacterLargeObject.scala
+++ /dev/null
@@ -1,35 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-
-package scala.dbc
-package value;
-
-
-/** A SQL-99 value of type character large object. */
- at deprecated(DbcIsDeprecated, "2.9.0") abstract class CharacterLargeObject extends Value {
-
- override val dataType: datatype.CharacterLargeObject;
-
- /** An SQL-99 compliant string representation of the value. */
- def sqlString: String = {
- "'" + nativeValue + "'"
- }
-
-}
-
-/** An object offering transformation methods (views) on the value.
- * This object must be visible in an expression to use value auto-
- * conversion. */
- at deprecated(DbcIsDeprecated, "2.9.0") object CharacterLargeObject {
-
- /** A character large object value as a native string. */
- implicit def characterLargeObjectToString (obj:value.CharacterLargeObject): String = obj.nativeValue;
-
-}
diff --git a/src/dbc/scala/dbc/value/CharacterVarying.scala b/src/dbc/scala/dbc/value/CharacterVarying.scala
deleted file mode 100644
index 72e7d06..0000000
--- a/src/dbc/scala/dbc/value/CharacterVarying.scala
+++ /dev/null
@@ -1,35 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-
-package scala.dbc
-package value;
-
-
-/** A SQL-99 value of type character varying string. */
- at deprecated(DbcIsDeprecated, "2.9.0") abstract class CharacterVarying extends Value {
-
- override val dataType: datatype.CharacterVarying;
-
- /** An SQL-99 compliant string representation of the value. */
- def sqlString: String = {
- "'" + nativeValue + "'"
- }
-
-}
-
-/** An object offering transformation methods (views) on the value.
- * This object must be visible in an expression to use value auto-
- * conversion. */
- at deprecated(DbcIsDeprecated, "2.9.0") object CharacterVarying {
-
- /** A character varying string value as a native string. */
- implicit def characterVaryingToString (obj:value.CharacterVarying): String = obj.nativeValue;
-
-}
diff --git a/src/dbc/scala/dbc/value/Conversion.scala b/src/dbc/scala/dbc/value/Conversion.scala
deleted file mode 100644
index c9297e3..0000000
--- a/src/dbc/scala/dbc/value/Conversion.scala
+++ /dev/null
@@ -1,156 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-
-package scala.dbc
-package value;
-
-import java.math._;
-
- at deprecated(DbcIsDeprecated, "2.9.0") object Conversion {
-
- class Illegal (msg:String) extends Exception(msg);
-
- implicit def view1 (value:Value): Byte = {
- if (value.dataType.nativeTypeId == DataType.BYTE) {
- val v = value.asInstanceOf[dbc.value.ExactNumeric[Byte]];
- v.nativeValue
- } else {
- throw new Illegal("Cannot convert value to byte: "+value)
- }
- }
-
- implicit def view2 (value:Value): Short = {
- if (value.dataType.nativeTypeId == DataType.BYTE) {
- val v = value.asInstanceOf[dbc.value.ExactNumeric[Byte]];
- v.nativeValue.toShort
- } else if (value.dataType.nativeTypeId == DataType.SHORT) {
- val v = value.asInstanceOf[dbc.value.ExactNumeric[Short]];
- v.nativeValue
- } else {
- throw new Illegal("Cannot convert value to short: "+value)
- }
- }
-
- implicit def view3 (value:Value): Int = {
- if (value.dataType.nativeTypeId == DataType.BYTE) {
- val v = value.asInstanceOf[dbc.value.ExactNumeric[Byte]];
- v.nativeValue.toInt
- } else if (value.dataType.nativeTypeId == DataType.SHORT) {
- val v = value.asInstanceOf[dbc.value.ExactNumeric[Short]];
- v.nativeValue.toInt
- } else if (value.dataType.nativeTypeId == DataType.INT) {
- val v = value.asInstanceOf[dbc.value.ExactNumeric[Int]];
- v.nativeValue
- } else {
- throw new Illegal("Cannot convert value to int: "+value)
- }
- }
-
- implicit def view4 (value:Value): Long = {
- if (value.dataType.nativeTypeId == DataType.BYTE) {
- val v = value.asInstanceOf[dbc.value.ExactNumeric[Byte]];
- v.nativeValue.toLong
- } else if (value.dataType.nativeTypeId == DataType.SHORT) {
- val v = value.asInstanceOf[dbc.value.ExactNumeric[Short]];
- v.nativeValue.toLong
- } else if (value.dataType.nativeTypeId == DataType.INT) {
- val v = value.asInstanceOf[dbc.value.ExactNumeric[Int]];
- v.nativeValue.toLong
- } else if (value.dataType.nativeTypeId == DataType.LONG) {
- val v = value.asInstanceOf[dbc.value.ExactNumeric[Long]];
- v.nativeValue
- } else {
- throw new Illegal("Cannot convert value to long: "+value)
- }
- }
-
- implicit def view5 (value:Value): BigInteger = {
- if (value.dataType.nativeTypeId == DataType.BYTE) {
- val v = value.asInstanceOf[dbc.value.ExactNumeric[Byte]];
- new BigInteger(v.nativeValue.toString(),10)
- } else if (value.dataType.nativeTypeId == DataType.SHORT) {
- val v = value.asInstanceOf[dbc.value.ExactNumeric[Short]];
- new BigInteger(v.nativeValue.toString(),10)
- } else if (value.dataType.nativeTypeId == DataType.INT) {
- val v = value.asInstanceOf[dbc.value.ExactNumeric[Int]];
- new BigInteger(v.nativeValue.toString(),10)
- } else if (value.dataType.nativeTypeId == DataType.LONG) {
- val v = value.asInstanceOf[dbc.value.ExactNumeric[Long]];
- new BigInteger(v.nativeValue.toString(),10)
- } else if (value.dataType.nativeTypeId == DataType.BIG_INTEGER) {
- val v = value.asInstanceOf[dbc.value.ExactNumeric[BigInteger]];
- v.nativeValue
- } else {
- throw new Illegal("Cannot convert value to big integer: "+value)
- }
- }
-
- implicit def view6 (value:Value): BigDecimal = {
- if (value.dataType.nativeTypeId == DataType.BYTE) {
- val v = value.asInstanceOf[dbc.value.ExactNumeric[Byte]];
- new BigDecimal(v.nativeValue.toString())
- } else if (value.dataType.nativeTypeId == DataType.SHORT) {
- val v = value.asInstanceOf[dbc.value.ExactNumeric[Short]];
- new BigDecimal(v.nativeValue.toString())
- } else if (value.dataType.nativeTypeId == DataType.INT) {
- val v = value.asInstanceOf[dbc.value.ExactNumeric[Int]];
- new BigDecimal(v.nativeValue.toString())
- } else if (value.dataType.nativeTypeId == DataType.LONG) {
- val v = value.asInstanceOf[dbc.value.ExactNumeric[Long]];
- new BigDecimal(v.nativeValue.toString())
- } else if (value.dataType.nativeTypeId == DataType.BIG_INTEGER) {
- val v = value.asInstanceOf[dbc.value.ExactNumeric[BigInteger]];
- new BigDecimal(v.nativeValue)
- } else if (value.dataType.nativeTypeId == DataType.BIG_DECIMAL) {
- val v = value.asInstanceOf[dbc.value.ExactNumeric[BigDecimal]];
- v.nativeValue
- } else {
- throw new Illegal("Cannot convert value to big decimal: "+value)
- }
- }
-
- implicit def view7 (value:Value): Float = {
- if (value.dataType.nativeTypeId == DataType.FLOAT) {
- val v = value.asInstanceOf[dbc.value.ApproximateNumeric[Float]];
- v.nativeValue
- } else {
- throw new Illegal("Cannot convert value to float: "+value)
- }
- }
-
- implicit def view8 (value:Value): Double = {
- if (value.dataType.nativeTypeId == DataType.FLOAT) {
- val v = value.asInstanceOf[dbc.value.ApproximateNumeric[Float]];
- v.nativeValue.toFloat
- } else if (value.dataType.nativeTypeId == DataType.DOUBLE) {
- val v = value.asInstanceOf[dbc.value.ApproximateNumeric[Double]];
- v.nativeValue
- } else {
- throw new Illegal("Cannot convert value to double: "+value)
- }
- }
-
- implicit def view9 (value:Value): scala.Boolean = {
- if (value.dataType.nativeTypeId == DataType.BOOLEAN) {
- val v = value.asInstanceOf[dbc.value.Boolean];
- v.nativeValue
- } else {
- throw new Illegal("Cannot convert value to boolean: "+value)
- }
- }
-
- implicit def view10 (value:Value): String = value match {
- case v:dbc.value.Character => v.nativeValue;
- case v:dbc.value.CharacterLargeObject => v.nativeValue;
- case v:dbc.value.CharacterVarying => v.nativeValue;
- case _ => throw new Illegal("Cannot convert value to string")
- }
-
-}
diff --git a/src/dbc/scala/dbc/value/ExactNumeric.scala b/src/dbc/scala/dbc/value/ExactNumeric.scala
deleted file mode 100644
index 7cd8b89..0000000
--- a/src/dbc/scala/dbc/value/ExactNumeric.scala
+++ /dev/null
@@ -1,35 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-
-package scala.dbc
-package value;
-
-
-import java.math.BigInteger;
-import java.math.BigDecimal;
-
- at deprecated(DbcIsDeprecated, "2.9.0") abstract class ExactNumeric [Type] extends Value {
-
- val dataType: datatype.ExactNumeric[Type];
-
- def sqlString = nativeValue.toString();
-
-}
-
- at deprecated(DbcIsDeprecated, "2.9.0") object ExactNumeric {
-
- implicit def exactNumericToByte (obj:value.ExactNumeric[Byte]): Byte = obj.nativeValue;
- implicit def exactNumericToShort (obj:value.ExactNumeric[Short]): Short = obj.nativeValue;
- implicit def exactNumericToInt (obj:value.ExactNumeric[Int]): Int = obj.nativeValue;
- implicit def exactNumericToLong (obj:value.ExactNumeric[Long]): Long = obj.nativeValue;
- implicit def exactNumericToBigInteger (obj:value.ExactNumeric[BigInteger]): BigInteger = obj.nativeValue;
- implicit def exactNumericToBigDecimal (obj:value.ExactNumeric[BigDecimal]): BigDecimal = obj.nativeValue;
-
-}
diff --git a/src/dbc/scala/dbc/value/Factory.scala b/src/dbc/scala/dbc/value/Factory.scala
deleted file mode 100644
index 2d6101f..0000000
--- a/src/dbc/scala/dbc/value/Factory.scala
+++ /dev/null
@@ -1,95 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-
-package scala.dbc
-package value;
-
-
-import java.math.BigInteger;
-import java.math.BigDecimal;
-
- at deprecated(DbcIsDeprecated, "2.9.0") object Factory {
-
- def create (result: java.sql.ResultSet, index: Int, expectedDataType: DataType): Value = {
- expectedDataType.nativeTypeId match {
- case DataType.OBJECT =>
- new value.Unknown {
- val dataType = expectedDataType.asInstanceOf[datatype.Unknown];
- val nativeValue: AnyRef = result.getObject(index);
- }
- case DataType.STRING => {
- expectedDataType match {
- case t:datatype.Character =>
- new value.Character {
- val dataType = t;
- val nativeValue: String = result.getString(index);
- }
- case t:datatype.CharacterVarying =>
- new value.CharacterVarying {
- val dataType = t;
- val nativeValue: String = result.getString(index);
- }
- case t:datatype.CharacterLargeObject =>
- new value.CharacterLargeObject {
- val dataType = t;
- val nativeValue: String = result.getString(index);
- }
- }
- }
- case DataType.BOOLEAN =>
- new value.Boolean {
- val dataType = expectedDataType.asInstanceOf[datatype.Boolean];
- val nativeValue: scala.Boolean = result.getBoolean(index);
- }
- case DataType.FLOAT =>
- new value.ApproximateNumeric[Float] {
- val dataType = expectedDataType.asInstanceOf[datatype.ApproximateNumeric[Float]];
- val nativeValue: Float = result.getFloat(index);
- }
- case DataType.DOUBLE =>
- new value.ApproximateNumeric[Double] {
- val dataType = expectedDataType.asInstanceOf[datatype.ApproximateNumeric[Double]];
- val nativeValue: Double = result.getDouble(index);
- }
- case DataType.BYTE =>
- new value.ExactNumeric[Byte] {
- val dataType = expectedDataType.asInstanceOf[datatype.ExactNumeric[Byte]];
- val nativeValue: Byte = result.getByte(index);
- }
- case DataType.SHORT =>
- new value.ExactNumeric[Short] {
- val dataType = expectedDataType.asInstanceOf[datatype.ExactNumeric[Short]];
- val nativeValue: Short = result.getShort(index);
- }
- case DataType.INT =>
- new value.ExactNumeric[Int] {
- val dataType = expectedDataType.asInstanceOf[datatype.ExactNumeric[Int]];
- val nativeValue: Int = result.getInt(index);
- }
- case DataType.LONG =>
- new value.ExactNumeric[Long] {
- val dataType = expectedDataType.asInstanceOf[datatype.ExactNumeric[Long]];
- val nativeValue:Long = result.getLong(index);
- }
- case DataType.BIG_INTEGER =>
- new value.ExactNumeric[BigInteger] {
- val dataType = expectedDataType.asInstanceOf[datatype.ExactNumeric[BigInteger]];
- val nativeValue: BigInteger = result.getBigDecimal(index).unscaledValue();
- }
- case DataType.BIG_DECIMAL =>
- new value.ExactNumeric[BigDecimal] {
- val dataType = expectedDataType.asInstanceOf[datatype.ExactNumeric[BigDecimal]];
- val nativeValue: BigDecimal = result.getBigDecimal(index);
- }
-
- }
- }
-
-}
diff --git a/src/dbc/scala/dbc/value/Unknown.scala b/src/dbc/scala/dbc/value/Unknown.scala
deleted file mode 100644
index 89764a5..0000000
--- a/src/dbc/scala/dbc/value/Unknown.scala
+++ /dev/null
@@ -1,27 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-
-package scala.dbc
-package value;
-
-
- at deprecated(DbcIsDeprecated, "2.9.0") abstract class Unknown extends Value {
-
- val dataType: datatype.Unknown;
-
- def sqlString = sys.error("An 'ANY' value cannot be represented.");
-
-}
-
- at deprecated(DbcIsDeprecated, "2.9.0") object UnknownType {
-
- def view (obj:value.Unknown): AnyRef = obj.nativeValue;
-
-}
diff --git a/src/dbc/scala/dbc/vendor/PostgreSQL.scala b/src/dbc/scala/dbc/vendor/PostgreSQL.scala
deleted file mode 100644
index ac528d5..0000000
--- a/src/dbc/scala/dbc/vendor/PostgreSQL.scala
+++ /dev/null
@@ -1,29 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-
-package scala.dbc
-package vendor;
-
-
-import compat.Platform
-
- at deprecated(DbcIsDeprecated, "2.9.0") abstract class PostgreSQL extends Vendor {
-
- def uri:java.net.URI;
- def user:String;
- def pass:String;
-
- val retainedConnections = 5;
-
- val nativeDriverClass = Platform.getClassForName("org.postgresql.Driver");
-
- val urlProtocolString = "jdbc:postgresql:"
-
-}
diff --git a/src/detach/library/scala/remoting/Channel.scala b/src/detach/library/scala/remoting/Channel.scala
index 541e45a..e60d16c 100644
--- a/src/detach/library/scala/remoting/Channel.scala
+++ b/src/detach/library/scala/remoting/Channel.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2007-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2007-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -116,20 +116,20 @@ class Channel protected (socket: Socket) {
* the expected type.
*/
@throws(classOf[ChannelException])
- def receive[T](implicit expected: reflect.Manifest[T]): T = {
- val found = in.readObject().asInstanceOf[reflect.Manifest[_]]
+ def receive[T](implicit expected: scala.reflect.ClassTag[T]): T = {
+ val found = in.readObject().asInstanceOf[reflect.ClassTag[_]]
info("receive: found="+found+", expected="+expected)
- import scala.reflect.Manifest
+ import scala.reflect.ClassTag
val x = found match {
- case Manifest.Unit => ()
- case Manifest.Boolean => in.readBoolean()
- case Manifest.Byte => in.readByte()
- case Manifest.Char => in.readChar()
- case Manifest.Short => in.readShort()
- case Manifest.Int => in.readInt()
- case Manifest.Long => in.readLong()
- case Manifest.Float => in.readFloat()
- case Manifest.Double => in.readDouble()
+ case ClassTag.Unit => ()
+ case ClassTag.Boolean => in.readBoolean()
+ case ClassTag.Byte => in.readByte()
+ case ClassTag.Char => in.readChar()
+ case ClassTag.Short => in.readShort()
+ case ClassTag.Int => in.readInt()
+ case ClassTag.Long => in.readLong()
+ case ClassTag.Float => in.readFloat()
+ case ClassTag.Double => in.readDouble()
case _ => in.readObject()
}
val res = if (found <:< expected)
@@ -144,12 +144,12 @@ class Channel protected (socket: Socket) {
/** <code>?</code> method may throw either an
* <code>ClassNotFoundException</code> or an <code>IOException</code>.
*/
- def ?[T](implicit m: reflect.Manifest[T]): T = receive[T](m)
+ def ?[T](implicit t: scala.reflect.ClassTag[T]): T = receive[T](t)
/** <code>send</code> method may throw an <code>IOException</code>.
*/
- def send[T](x: T)(implicit m: reflect.Manifest[T]) {
- out writeObject m
+ def send[T](x: T)(implicit t: scala.reflect.ClassTag[T]) {
+ out writeObject t
x match {
case x: Unit => // nop
case x: Boolean => out writeBoolean x
@@ -168,7 +168,7 @@ class Channel protected (socket: Socket) {
/** <code>!</code> method may throw an <code>IOException</code>.
*/
- def (implicit m: reflect.Manifest[T]) { send(x)(m) }
+ def (implicit m: scala.reflect.ClassTag[T]) { send(x)(m) }
def close() {
try { socket.close() }
diff --git a/src/detach/library/scala/remoting/Debug.scala b/src/detach/library/scala/remoting/Debug.scala
index bb5aa38..79f2bce 100644
--- a/src/detach/library/scala/remoting/Debug.scala
+++ b/src/detach/library/scala/remoting/Debug.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2007-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2007-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/detach/library/scala/remoting/ServerChannel.scala b/src/detach/library/scala/remoting/ServerChannel.scala
index 125f46a..7828f85 100644
--- a/src/detach/library/scala/remoting/ServerChannel.scala
+++ b/src/detach/library/scala/remoting/ServerChannel.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2007-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2007-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/detach/library/scala/remoting/detach.scala b/src/detach/library/scala/remoting/detach.scala
index 668dd98..51a3ac5 100644
--- a/src/detach/library/scala/remoting/detach.scala
+++ b/src/detach/library/scala/remoting/detach.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://www.scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/detach/library/scala/runtime/RemoteRef.scala b/src/detach/library/scala/runtime/RemoteRef.scala
index d8444e5..e65b22c 100644
--- a/src/detach/library/scala/runtime/RemoteRef.scala
+++ b/src/detach/library/scala/runtime/RemoteRef.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/detach/library/scala/runtime/remoting/Debug.scala b/src/detach/library/scala/runtime/remoting/Debug.scala
index 7249107..06cdc67 100644
--- a/src/detach/library/scala/runtime/remoting/Debug.scala
+++ b/src/detach/library/scala/runtime/remoting/Debug.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2007-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2007-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/detach/library/scala/runtime/remoting/RegistryDelegate.scala b/src/detach/library/scala/runtime/remoting/RegistryDelegate.scala
index 814d50e..1105832 100644
--- a/src/detach/library/scala/runtime/remoting/RegistryDelegate.scala
+++ b/src/detach/library/scala/runtime/remoting/RegistryDelegate.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/detach/library/scala/runtime/remoting/RemoteBooleanRef.scala b/src/detach/library/scala/runtime/remoting/RemoteBooleanRef.scala
index 47f7e0c..ff6c8f6 100644
--- a/src/detach/library/scala/runtime/remoting/RemoteBooleanRef.scala
+++ b/src/detach/library/scala/runtime/remoting/RemoteBooleanRef.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/detach/library/scala/runtime/remoting/RemoteByteRef.scala b/src/detach/library/scala/runtime/remoting/RemoteByteRef.scala
index 42ca1e4..335f0d9 100644
--- a/src/detach/library/scala/runtime/remoting/RemoteByteRef.scala
+++ b/src/detach/library/scala/runtime/remoting/RemoteByteRef.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/detach/library/scala/runtime/remoting/RemoteCharRef.scala b/src/detach/library/scala/runtime/remoting/RemoteCharRef.scala
index 70c2e18..e0f48eb 100644
--- a/src/detach/library/scala/runtime/remoting/RemoteCharRef.scala
+++ b/src/detach/library/scala/runtime/remoting/RemoteCharRef.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/detach/library/scala/runtime/remoting/RemoteDoubleRef.scala b/src/detach/library/scala/runtime/remoting/RemoteDoubleRef.scala
index d952b61..2e13195 100644
--- a/src/detach/library/scala/runtime/remoting/RemoteDoubleRef.scala
+++ b/src/detach/library/scala/runtime/remoting/RemoteDoubleRef.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/detach/library/scala/runtime/remoting/RemoteFloatRef.scala b/src/detach/library/scala/runtime/remoting/RemoteFloatRef.scala
index 2e096d1..f4e61ea 100644
--- a/src/detach/library/scala/runtime/remoting/RemoteFloatRef.scala
+++ b/src/detach/library/scala/runtime/remoting/RemoteFloatRef.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/detach/library/scala/runtime/remoting/RemoteGC.scala b/src/detach/library/scala/runtime/remoting/RemoteGC.scala
index 30a9b6b..393c031 100644
--- a/src/detach/library/scala/runtime/remoting/RemoteGC.scala
+++ b/src/detach/library/scala/runtime/remoting/RemoteGC.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -13,8 +13,7 @@ package scala.runtime.remoting
import java.lang.ref.{Reference, WeakReference, ReferenceQueue}
import java.rmi.{NoSuchObjectException, Remote}
import java.rmi.server.UnicastRemoteObject
-
-import scala.collection.mutable.HashSet
+import scala.collection.mutable
/**
*
@@ -25,7 +24,7 @@ import scala.collection.mutable.HashSet
private [runtime] class RemoteGC {
private val refQueue = new ReferenceQueue[Remote]
- private val refSet = new HashSet[Reference[T] forSome { type T <: Remote }]
+ private val refSet = new mutable.HashSet[Reference[T] forSome { type T <: Remote }]
private var liveRefs = 0
diff --git a/src/detach/library/scala/runtime/remoting/RemoteIntRef.scala b/src/detach/library/scala/runtime/remoting/RemoteIntRef.scala
index 75a4eed..b14403f 100644
--- a/src/detach/library/scala/runtime/remoting/RemoteIntRef.scala
+++ b/src/detach/library/scala/runtime/remoting/RemoteIntRef.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/detach/library/scala/runtime/remoting/RemoteLongRef.scala b/src/detach/library/scala/runtime/remoting/RemoteLongRef.scala
index 8aaffcc..da83491 100644
--- a/src/detach/library/scala/runtime/remoting/RemoteLongRef.scala
+++ b/src/detach/library/scala/runtime/remoting/RemoteLongRef.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/detach/library/scala/runtime/remoting/RemoteObjectRef.scala b/src/detach/library/scala/runtime/remoting/RemoteObjectRef.scala
index fcbf5c8..9f27b26 100644
--- a/src/detach/library/scala/runtime/remoting/RemoteObjectRef.scala
+++ b/src/detach/library/scala/runtime/remoting/RemoteObjectRef.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/detach/library/scala/runtime/remoting/RemoteShortRef.scala b/src/detach/library/scala/runtime/remoting/RemoteShortRef.scala
index adb92c7..2ced9db 100644
--- a/src/detach/library/scala/runtime/remoting/RemoteShortRef.scala
+++ b/src/detach/library/scala/runtime/remoting/RemoteShortRef.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/detach/plugin/scala/tools/detach/Detach.scala b/src/detach/plugin/scala/tools/detach/Detach.scala
index 56433cd..f9a3d80 100644
--- a/src/detach/plugin/scala/tools/detach/Detach.scala
+++ b/src/detach/plugin/scala/tools/detach/Detach.scala
@@ -1,13 +1,12 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
+ * Copyright 2005-2013 LAMP/EPFL
* @author Stephane Micheloud
*/
package scala.tools.detach
-import scala.collection.immutable
-import scala.collection.mutable.{HashMap, HashSet, ListBuffer}
-
+import scala.collection.{ mutable, immutable }
+import scala.collection.mutable.ListBuffer
import scala.tools.nsc._
import scala.tools.nsc.plugins.PluginComponent
import scala.tools.nsc.symtab.Flags._
@@ -116,23 +115,23 @@ abstract class Detach extends PluginComponent
private val remoteRefClass = immutable.HashMap(elems(""): _*)
private val remoteRefImpl = immutable.HashMap(elems("Impl"): _*)
- private val proxyInterfaceDefs = new HashMap[Symbol/*owner*/, ListBuffer[Tree]]
- private val detachedClosureApply = new HashMap[Tree, Apply]
+ private val proxyInterfaceDefs = new mutable.HashMap[Symbol/*owner*/, ListBuffer[Tree]]
+ private val detachedClosureApply = new mutable.HashMap[Tree, Apply]
- private type SymSet = HashSet[Symbol]
- private val capturedObjects = new HashMap[Symbol/*clazz*/, SymSet]
- private val capturedFuncs = new HashMap[Symbol/*clazz*/, SymSet]
- private val capturedCallers = new HashMap[Symbol/*clazz*/, SymSet]
- private val capturedThisClass = new HashMap[Symbol, Symbol]
+ private type SymSet = mutable.HashSet[Symbol]
+ private val capturedObjects = new mutable.HashMap[Symbol/*clazz*/, SymSet]
+ private val capturedFuncs = new mutable.HashMap[Symbol/*clazz*/, SymSet]
+ private val capturedCallers = new mutable.HashMap[Symbol/*clazz*/, SymSet]
+ private val capturedThisClass = new mutable.HashMap[Symbol, Symbol]
- private val proxies = new HashMap[
+ private val proxies = new mutable.HashMap[
Symbol, //clazz
- (Symbol, Symbol, HashMap[Symbol, Symbol]) //iface, impl, accessor map
+ (Symbol, Symbol, mutable.HashMap[Symbol, Symbol]) //iface, impl, accessor map
]
def toInterface(clazz: Symbol) = proxies(clazz)._1
- private val classdefs = new HashMap[Symbol/*clazz*/, ClassDef]
+ private val classdefs = new mutable.HashMap[Symbol/*clazz*/, ClassDef]
// detachedClosure gathers class definitions containing a "detach" apply
- private val detachedClosure = new HashMap[Symbol/*clazz*/, ClassDef]
+ private val detachedClosure = new mutable.HashMap[Symbol/*clazz*/, ClassDef]
/** <p>
* The method <code>freeObjTraverser.traverse</code> is invoked
@@ -146,9 +145,9 @@ abstract class Detach extends PluginComponent
* </p>
*/
private val freeObjTraverser = new Traverser {
- def symSet(f: HashMap[Symbol, SymSet], sym: Symbol): SymSet = f.get(sym) match {
+ def symSet(f: mutable.HashMap[Symbol, SymSet], sym: Symbol): SymSet = f.get(sym) match {
case Some(ss) => ss
- case None => val ss = new HashSet[Symbol]; f(sym) = ss; ss
+ case None => val ss = new mutable.HashSet[Symbol]; f(sym) = ss; ss
}
def getClosureApply(tree: Tree): Apply = tree match {
case Block(_, expr) => getClosureApply(expr)
@@ -255,7 +254,7 @@ abstract class Detach extends PluginComponent
println("\nTreeOuterSubstituter:"+
"\n\tfrom="+from.mkString(",")+
"\n\tto="+to.mkString(","))
- val substMap = new HashMap[Symbol, Symbol]
+ val substMap = new mutable.HashMap[Symbol, Symbol]
override def traverse(tree: Tree) {
def subst(from: List[Symbol], to: List[Symbol]) {
if (!from.isEmpty)
@@ -328,7 +327,7 @@ abstract class Detach extends PluginComponent
}
subst(sym.tpe)
}
- val map = new HashMap[Symbol, Symbol]
+ val map = new mutable.HashMap[Symbol, Symbol]
override def traverse(tree: Tree) {
if (tree.hasSymbol && tree.symbol != NoSymbol) {
val sym = tree.symbol
@@ -735,8 +734,8 @@ abstract class Detach extends PluginComponent
proxyOwner.newClass(clazz.pos, encode(clazz.name.decode + PROXY_SUFFIX))
iface.sourceFile = clazz.sourceFile
iface setFlag (ABSTRACT | TRAIT | INTERFACE) // Java interface
- val iparents = List(ObjectClass.tpe, RemoteClass.tpe, ScalaObjectClass.tpe)
- iface setInfo ClassInfoType(iparents, new Scope, iface)
+ val iparents = List(ObjectClass.tpe, RemoteClass.tpe)
+ iface setInfo ClassInfoType(iparents, newScope, iface)
// methods must throw RemoteException
iface addAnnotation remoteAnnotationInfo
@@ -745,13 +744,11 @@ abstract class Detach extends PluginComponent
iclaz.sourceFile = clazz.sourceFile
iclaz setFlag (SYNTHETIC | FINAL)
// Variant 1: rebind/unbind
- val cparents = List(UnicastRemoteObjectClass.tpe, iface.tpe,
- UnreferencedClass.tpe, ScalaObjectClass.tpe)
+ val cparents = List(UnicastRemoteObjectClass.tpe, iface.tpe, UnreferencedClass.tpe)
// Variant 2: un-/exportObject
- //val cparents = List(ObjectClass.tpe, iface.tpe,
- // UnreferencedClass.tpe, ScalaObjectClass.tpe)
- iclaz setInfo ClassInfoType(cparents, new Scope, iclaz)
- val proxy = (iface, iclaz, new HashMap[Symbol, Symbol])
+ //val cparents = List(ObjectClass.tpe, iface.tpe, UnreferencedClass.tpe)
+ iclaz setInfo ClassInfoType(cparents, newScope, iclaz)
+ val proxy = (iface, iclaz, new mutable.HashMap[Symbol, Symbol])
proxies(clazz) = proxy
proxy
}
diff --git a/src/detach/plugin/scala/tools/detach/DetachPlugin.scala b/src/detach/plugin/scala/tools/detach/DetachPlugin.scala
index e4e9e6b..c6e18b7 100644
--- a/src/detach/plugin/scala/tools/detach/DetachPlugin.scala
+++ b/src/detach/plugin/scala/tools/detach/DetachPlugin.scala
@@ -1,5 +1,5 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
+ * Copyright 2005-2013 LAMP/EPFL
* @author Stephane Micheloud
*/
diff --git a/src/eclipse/README.md b/src/eclipse/README.md
new file mode 100644
index 0000000..44dd3d8
--- /dev/null
+++ b/src/eclipse/README.md
@@ -0,0 +1,55 @@
+Eclipse project files
+=====================
+
+Import all projects inside Eclipse by choosing File/Import Existing Projects
+and navigate to src/eclipse. Check all projects and click ok.
+
+IMPORTANT
+=========
+
+1. You need to define a `path variable` inside Eclipse. Define SCALA_BASEDIR in
+Preferences/General/Workspace/Linked Resources. The value should be the absolute
+path to your scala checkout. All paths in project files are relative to this one,
+so nothing will work before you do so.
+Additionally, we start using Maven dependencies (e.g. junit) so you need to define
+`classpath variable` inside Eclipse. Define `M2_REPO` in Java/Build Path/Classpath Variables
+to point to your local Maven repository (e.g. $HOME/.m2/repository).
+
+2. The Eclipse Java compiler does not allow certain calls to restricted APIs in the
+JDK. The Scala library uses such APIs, so you'd see this error:
+
+ Access restriction: The method compareAndSwapObject(Object, long, Object, Object)
+ from the type Unsafe is not accessible due to restriction on required library.
+You can *fix* it by allowing calls to restricted APIs in `Java=>Compiler=>Errors/Warnings=>Deprecated and Restricted API`
+settings.
+
+3. The IDE guesses the Scala library version by looking for `library.properties` inside
+the library jar. The `scala-library` project does not have such a file, so you will see
+an error about incompatible libraries. You can work around it by adding a `library.properties`
+inside `src/library` with the following contents:
+
+ #Mon, 04 Jun 2012 02:08:56 +0200
+ version.number=2.10.0-20120603-141530-b34313db72
+ maven.version.number=2.10.0-SNAPSHOT
+ osgi.version.number=2.10.0.v20120603-141530-b34313db72
+ copyright.string=Copyright 2002-2012 LAMP/EPFL
+
+4. Project files are tracked by Git, so adding them to `.gitignore` won't prevent them
+from being shown as dirty in `git status`. You can still ignore them by telling Git to
+consider them unchanged:
+
+ git update-index --assume-unchanged `find src/eclipse -iname .classpath -or -iname .project`
+
+If you want to go back to normal (for instance, to commit your changes to project files), run:
+
+ git update-index --no-assume-unchanged `find src/eclipse -iname .classpath -or -iname .project`
+
+DETAILS
+=======
+
+The compiler project depends on the library, reflect, asm and fjbg projects. The
+builder will take care of the correct ordering, and changes in one project will
+be picked up by the dependent projects.
+
+The output directory is set to be build/quick, so the runner scripts in quick
+work as they are (run an ant build to have them generated once)
\ No newline at end of file
diff --git a/src/eclipse/asm/.classpath b/src/eclipse/asm/.classpath
new file mode 100644
index 0000000..03d9e97
--- /dev/null
+++ b/src/eclipse/asm/.classpath
@@ -0,0 +1,6 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<classpath>
+ <classpathentry kind="src" path="src"/>
+ <classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER"/>
+ <classpathentry kind="output" path="asm-quick-bin"/>
+</classpath>
diff --git a/src/eclipse/asm/.project b/src/eclipse/asm/.project
new file mode 100644
index 0000000..c905138
--- /dev/null
+++ b/src/eclipse/asm/.project
@@ -0,0 +1,29 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<projectDescription>
+ <name>asm</name>
+ <comment></comment>
+ <projects>
+ </projects>
+ <buildSpec>
+ <buildCommand>
+ <name>org.eclipse.jdt.core.javabuilder</name>
+ <arguments>
+ </arguments>
+ </buildCommand>
+ </buildSpec>
+ <natures>
+ <nature>org.eclipse.jdt.core.javanature</nature>
+ </natures>
+ <linkedResources>
+ <link>
+ <name>src</name>
+ <type>2</type>
+ <locationURI>SCALA_BASEDIR/src/asm</locationURI>
+ </link>
+ <link>
+ <name>asm-quick-bin</name>
+ <type>2</type>
+ <locationURI>SCALA_BASEDIR/build/asm/classes</locationURI>
+ </link>
+ </linkedResources>
+</projectDescription>
diff --git a/src/eclipse/continuations-library/.classpath b/src/eclipse/continuations-library/.classpath
new file mode 100644
index 0000000..b3ca4ee
--- /dev/null
+++ b/src/eclipse/continuations-library/.classpath
@@ -0,0 +1,8 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<classpath>
+ <classpathentry kind="src" path="library"/>
+ <classpathentry combineaccessrules="false" kind="src" path="/scala-library"/>
+ <classpathentry kind="con" path="org.scala-ide.sdt.launching.SCALA_CONTAINER"/>
+ <classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER"/>
+ <classpathentry kind="output" path="build-quick-continuations-library"/>
+</classpath>
diff --git a/src/eclipse/continuations-library/.project b/src/eclipse/continuations-library/.project
new file mode 100644
index 0000000..f3a53a3
--- /dev/null
+++ b/src/eclipse/continuations-library/.project
@@ -0,0 +1,30 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<projectDescription>
+ <name>continuations-library</name>
+ <comment></comment>
+ <projects>
+ </projects>
+ <buildSpec>
+ <buildCommand>
+ <name>org.scala-ide.sdt.core.scalabuilder</name>
+ <arguments>
+ </arguments>
+ </buildCommand>
+ </buildSpec>
+ <natures>
+ <nature>org.scala-ide.sdt.core.scalanature</nature>
+ <nature>org.eclipse.jdt.core.javanature</nature>
+ </natures>
+ <linkedResources>
+ <link>
+ <name>build-quick-continuations-library</name>
+ <type>2</type>
+ <locationURI>SCALA_BASEDIR/build/quick/classes/continuations/library</locationURI>
+ </link>
+ <link>
+ <name>library</name>
+ <type>2</type>
+ <locationURI>SCALA_BASEDIR/src/continuations/library</locationURI>
+ </link>
+ </linkedResources>
+</projectDescription>
diff --git a/src/eclipse/continuations-library/.settings/org.scala-ide.sdt.core.prefs b/src/eclipse/continuations-library/.settings/org.scala-ide.sdt.core.prefs
new file mode 100644
index 0000000..63e1df2
--- /dev/null
+++ b/src/eclipse/continuations-library/.settings/org.scala-ide.sdt.core.prefs
@@ -0,0 +1,2 @@
+P=continuations\:enable
+scala.compiler.useProjectSettings=true
diff --git a/src/eclipse/fjbg/.classpath b/src/eclipse/fjbg/.classpath
new file mode 100644
index 0000000..3e2f55f
--- /dev/null
+++ b/src/eclipse/fjbg/.classpath
@@ -0,0 +1,7 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<classpath>
+ <classpathentry kind="src" path="fjbg"/>
+ <classpathentry kind="con" path="org.scala-ide.sdt.launching.SCALA_CONTAINER"/>
+ <classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER/org.eclipse.jdt.internal.debug.ui.launcher.StandardVMType/JavaSE-1.6"/>
+ <classpathentry kind="output" path="libs-classes-fjbg"/>
+</classpath>
diff --git a/src/eclipse/fjbg/.project b/src/eclipse/fjbg/.project
new file mode 100644
index 0000000..8acea9f
--- /dev/null
+++ b/src/eclipse/fjbg/.project
@@ -0,0 +1,30 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<projectDescription>
+ <name>fjbg</name>
+ <comment></comment>
+ <projects>
+ </projects>
+ <buildSpec>
+ <buildCommand>
+ <name>org.scala-ide.sdt.core.scalabuilder</name>
+ <arguments>
+ </arguments>
+ </buildCommand>
+ </buildSpec>
+ <natures>
+ <nature>org.scala-ide.sdt.core.scalanature</nature>
+ <nature>org.eclipse.jdt.core.javanature</nature>
+ </natures>
+ <linkedResources>
+ <link>
+ <name>fjbg</name>
+ <type>2</type>
+ <locationURI>SCALA_BASEDIR/src/fjbg</locationURI>
+ </link>
+ <link>
+ <name>libs-classes-fjbg</name>
+ <type>2</type>
+ <locationURI>SCALA_BASEDIR/build/libs/classes/fjbg</locationURI>
+ </link>
+ </linkedResources>
+</projectDescription>
diff --git a/src/eclipse/partest/.classpath b/src/eclipse/partest/.classpath
new file mode 100644
index 0000000..7936d4d
--- /dev/null
+++ b/src/eclipse/partest/.classpath
@@ -0,0 +1,15 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<classpath>
+ <classpathentry kind="src" path="partest"/>
+ <classpathentry combineaccessrules="false" kind="src" path="/reflect"/>
+ <classpathentry combineaccessrules="false" kind="src" path="/scala-library"/>
+ <classpathentry combineaccessrules="false" kind="src" path="/scala-compiler"/>
+ <classpathentry combineaccessrules="false" kind="src" path="/scalap"/>
+ <classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER"/>
+ <classpathentry kind="lib" path="lib/ant/ant.jar"/>
+ <classpathentry kind="lib" path="lib/jline.jar"/>
+ <classpathentry kind="lib" path="lib/msil.jar"/>
+ <classpathentry combineaccessrules="false" kind="src" path="/asm"/>
+ <classpathentry combineaccessrules="false" kind="src" path="/continuations-library"/>
+ <classpathentry kind="output" path="build-quick-partest"/>
+</classpath>
diff --git a/src/eclipse/partest/.project b/src/eclipse/partest/.project
new file mode 100644
index 0000000..45c2433
--- /dev/null
+++ b/src/eclipse/partest/.project
@@ -0,0 +1,35 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<projectDescription>
+ <name>partest</name>
+ <comment></comment>
+ <projects>
+ </projects>
+ <buildSpec>
+ <buildCommand>
+ <name>org.scala-ide.sdt.core.scalabuilder</name>
+ <arguments>
+ </arguments>
+ </buildCommand>
+ </buildSpec>
+ <natures>
+ <nature>org.scala-ide.sdt.core.scalanature</nature>
+ <nature>org.eclipse.jdt.core.javanature</nature>
+ </natures>
+ <linkedResources>
+ <link>
+ <name>build-quick-partest</name>
+ <type>2</type>
+ <locationURI>SCALA_BASEDIR/build/quick/classes/partest</locationURI>
+ </link>
+ <link>
+ <name>lib</name>
+ <type>2</type>
+ <locationURI>SCALA_BASEDIR/lib</locationURI>
+ </link>
+ <link>
+ <name>partest</name>
+ <type>2</type>
+ <locationURI>SCALA_BASEDIR/src/partest</locationURI>
+ </link>
+ </linkedResources>
+</projectDescription>
diff --git a/src/eclipse/reflect/.classpath b/src/eclipse/reflect/.classpath
new file mode 100644
index 0000000..36e6b6a
--- /dev/null
+++ b/src/eclipse/reflect/.classpath
@@ -0,0 +1,8 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<classpath>
+ <classpathentry kind="src" path="reflect"/>
+ <classpathentry combineaccessrules="false" kind="src" path="/scala-library"/>
+ <classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER/org.eclipse.jdt.internal.debug.ui.launcher.StandardVMType/JavaSE-1.6"/>
+ <classpathentry combineaccessrules="false" kind="src" path="/continuations-library"/>
+ <classpathentry kind="output" path="build-quick-reflect"/>
+</classpath>
diff --git a/src/eclipse/reflect/.project b/src/eclipse/reflect/.project
new file mode 100644
index 0000000..1e5cbb4
--- /dev/null
+++ b/src/eclipse/reflect/.project
@@ -0,0 +1,30 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<projectDescription>
+ <name>reflect</name>
+ <comment></comment>
+ <projects>
+ </projects>
+ <buildSpec>
+ <buildCommand>
+ <name>org.scala-ide.sdt.core.scalabuilder</name>
+ <arguments>
+ </arguments>
+ </buildCommand>
+ </buildSpec>
+ <natures>
+ <nature>org.scala-ide.sdt.core.scalanature</nature>
+ <nature>org.eclipse.jdt.core.javanature</nature>
+ </natures>
+ <linkedResources>
+ <link>
+ <name>build-quick-reflect</name>
+ <type>2</type>
+ <locationURI>SCALA_BASEDIR/build/quick/classes/reflect</locationURI>
+ </link>
+ <link>
+ <name>reflect</name>
+ <type>2</type>
+ <locationURI>SCALA_BASEDIR/src/reflect</locationURI>
+ </link>
+ </linkedResources>
+</projectDescription>
diff --git a/src/eclipse/scala-compiler/.classpath b/src/eclipse/scala-compiler/.classpath
new file mode 100644
index 0000000..d438d3e
--- /dev/null
+++ b/src/eclipse/scala-compiler/.classpath
@@ -0,0 +1,14 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<classpath>
+ <classpathentry kind="src" path="compiler"/>
+ <classpathentry combineaccessrules="false" kind="src" path="/reflect"/>
+ <classpathentry combineaccessrules="false" kind="src" path="/scala-library"/>
+ <classpathentry combineaccessrules="false" kind="src" path="/fjbg"/>
+ <classpathentry combineaccessrules="false" kind="src" path="/asm"/>
+ <classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER"/>
+ <classpathentry kind="lib" path="lib/ant/ant.jar"/>
+ <classpathentry kind="lib" path="lib/jline.jar"/>
+ <classpathentry kind="lib" path="lib/msil.jar"/>
+ <classpathentry combineaccessrules="false" kind="src" path="/continuations-library"/>
+ <classpathentry kind="output" path="build-quick-compiler"/>
+</classpath>
diff --git a/src/eclipse/scala-compiler/.project b/src/eclipse/scala-compiler/.project
new file mode 100644
index 0000000..cf8a68c
--- /dev/null
+++ b/src/eclipse/scala-compiler/.project
@@ -0,0 +1,35 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<projectDescription>
+ <name>scala-compiler</name>
+ <comment></comment>
+ <projects>
+ </projects>
+ <buildSpec>
+ <buildCommand>
+ <name>org.scala-ide.sdt.core.scalabuilder</name>
+ <arguments>
+ </arguments>
+ </buildCommand>
+ </buildSpec>
+ <natures>
+ <nature>org.scala-ide.sdt.core.scalanature</nature>
+ <nature>org.eclipse.jdt.core.javanature</nature>
+ </natures>
+ <linkedResources>
+ <link>
+ <name>build-quick-compiler</name>
+ <type>2</type>
+ <locationURI>SCALA_BASEDIR/build/quick/classes/compiler</locationURI>
+ </link>
+ <link>
+ <name>compiler</name>
+ <type>2</type>
+ <locationURI>SCALA_BASEDIR/src/compiler</locationURI>
+ </link>
+ <link>
+ <name>lib</name>
+ <type>2</type>
+ <locationURI>SCALA_BASEDIR/lib</locationURI>
+ </link>
+ </linkedResources>
+</projectDescription>
diff --git a/src/eclipse/scala-library/.classpath b/src/eclipse/scala-library/.classpath
new file mode 100644
index 0000000..a3a4933
--- /dev/null
+++ b/src/eclipse/scala-library/.classpath
@@ -0,0 +1,7 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<classpath>
+ <classpathentry kind="src" path="library"/>
+ <classpathentry kind="con" path="org.scala-ide.sdt.launching.SCALA_CONTAINER"/>
+ <classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER/org.eclipse.jdt.internal.debug.ui.launcher.StandardVMType/JavaSE-1.6"/>
+ <classpathentry kind="output" path="build-quick-lib"/>
+</classpath>
diff --git a/src/eclipse/scala-library/.project b/src/eclipse/scala-library/.project
new file mode 100644
index 0000000..049cf75
--- /dev/null
+++ b/src/eclipse/scala-library/.project
@@ -0,0 +1,30 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<projectDescription>
+ <name>scala-library</name>
+ <comment></comment>
+ <projects>
+ </projects>
+ <buildSpec>
+ <buildCommand>
+ <name>org.scala-ide.sdt.core.scalabuilder</name>
+ <arguments>
+ </arguments>
+ </buildCommand>
+ </buildSpec>
+ <natures>
+ <nature>org.scala-ide.sdt.core.scalanature</nature>
+ <nature>org.eclipse.jdt.core.javanature</nature>
+ </natures>
+ <linkedResources>
+ <link>
+ <name>build-quick-lib</name>
+ <type>2</type>
+ <locationURI>SCALA_BASEDIR/build/quick/classes/library</locationURI>
+ </link>
+ <link>
+ <name>library</name>
+ <type>2</type>
+ <locationURI>SCALA_BASEDIR/src/library</locationURI>
+ </link>
+ </linkedResources>
+</projectDescription>
diff --git a/src/eclipse/scalap/.classpath b/src/eclipse/scalap/.classpath
new file mode 100644
index 0000000..16737bd
--- /dev/null
+++ b/src/eclipse/scalap/.classpath
@@ -0,0 +1,13 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<classpath>
+ <classpathentry kind="src" path="scalap"/>
+ <classpathentry combineaccessrules="false" kind="src" path="/reflect"/>
+ <classpathentry combineaccessrules="false" kind="src" path="/scala-library"/>
+ <classpathentry combineaccessrules="false" kind="src" path="/scala-compiler"/>
+ <classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER"/>
+ <classpathentry kind="lib" path="lib/ant/ant.jar"/>
+ <classpathentry kind="lib" path="lib/jline.jar"/>
+ <classpathentry kind="lib" path="lib/msil.jar"/>
+ <classpathentry combineaccessrules="false" kind="src" path="/continuations-library"/>
+ <classpathentry kind="output" path="build-quick-scalap"/>
+</classpath>
diff --git a/src/eclipse/scalap/.project b/src/eclipse/scalap/.project
new file mode 100644
index 0000000..3599168
--- /dev/null
+++ b/src/eclipse/scalap/.project
@@ -0,0 +1,35 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<projectDescription>
+ <name>scalap</name>
+ <comment></comment>
+ <projects>
+ </projects>
+ <buildSpec>
+ <buildCommand>
+ <name>org.scala-ide.sdt.core.scalabuilder</name>
+ <arguments>
+ </arguments>
+ </buildCommand>
+ </buildSpec>
+ <natures>
+ <nature>org.scala-ide.sdt.core.scalanature</nature>
+ <nature>org.eclipse.jdt.core.javanature</nature>
+ </natures>
+ <linkedResources>
+ <link>
+ <name>build-quick-scalap</name>
+ <type>2</type>
+ <locationURI>SCALA_BASEDIR/build/quick/classes/scalap</locationURI>
+ </link>
+ <link>
+ <name>lib</name>
+ <type>2</type>
+ <locationURI>SCALA_BASEDIR/lib</locationURI>
+ </link>
+ <link>
+ <name>scalap</name>
+ <type>2</type>
+ <locationURI>SCALA_BASEDIR/src/scalap</locationURI>
+ </link>
+ </linkedResources>
+</projectDescription>
diff --git a/src/eclipse/test-junit/.classpath b/src/eclipse/test-junit/.classpath
new file mode 100644
index 0000000..718f7b6
--- /dev/null
+++ b/src/eclipse/test-junit/.classpath
@@ -0,0 +1,12 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<classpath>
+ <classpathentry kind="src" path="test-junit"/>
+ <classpathentry combineaccessrules="false" kind="src" path="/reflect"/>
+ <classpathentry combineaccessrules="false" kind="src" path="/scala-library"/>
+ <classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER"/>
+ <classpathentry kind="lib" path="lib/ant/ant.jar"/>
+ <classpathentry kind="lib" path="lib/jline.jar"/>
+ <classpathentry combineaccessrules="false" kind="src" path="/scala-compiler"/>
+ <classpathentry kind="var" path="M2_REPO/junit/junit/4.10/junit-4.10.jar"/>
+ <classpathentry kind="output" path="build-test-junit"/>
+</classpath>
diff --git a/src/eclipse/test-junit/.project b/src/eclipse/test-junit/.project
new file mode 100644
index 0000000..052b6c1
--- /dev/null
+++ b/src/eclipse/test-junit/.project
@@ -0,0 +1,35 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<projectDescription>
+ <name>test-junit</name>
+ <comment></comment>
+ <projects>
+ </projects>
+ <buildSpec>
+ <buildCommand>
+ <name>org.scala-ide.sdt.core.scalabuilder</name>
+ <arguments>
+ </arguments>
+ </buildCommand>
+ </buildSpec>
+ <natures>
+ <nature>org.scala-ide.sdt.core.scalanature</nature>
+ <nature>org.eclipse.jdt.core.javanature</nature>
+ </natures>
+ <linkedResources>
+ <link>
+ <name>build-test-junit</name>
+ <type>2</type>
+ <locationURI>SCALA_BASEDIR/build/junit/classes</locationURI>
+ </link>
+ <link>
+ <name>lib</name>
+ <type>2</type>
+ <locationURI>SCALA_BASEDIR/lib</locationURI>
+ </link>
+ <link>
+ <name>test-junit</name>
+ <type>2</type>
+ <locationURI>SCALA_BASEDIR/test/junit</locationURI>
+ </link>
+ </linkedResources>
+</projectDescription>
diff --git a/src/ensime/.ensime.SAMPLE b/src/ensime/.ensime.SAMPLE
new file mode 100644
index 0000000..1080181
--- /dev/null
+++ b/src/ensime/.ensime.SAMPLE
@@ -0,0 +1,17 @@
+(
+ :disable-source-load-on-startup t
+ :disable-scala-jars-on-classpath t
+ :root-dir "c:/Projects/Kepler"
+ :sources (
+ "c:/Projects/Kepler/src/library"
+ "c:/Projects/Kepler/src/reflect"
+ "c:/Projects/Kepler/src/compiler"
+ )
+ :compile-deps (
+ "c:/Projects/Kepler/build/asm/classes"
+ "c:/Projects/Kepler/build/locker/classes/library"
+ "c:/Projects/Kepler/build/locker/classes/reflect"
+ "c:/Projects/Kepler/build/locker/classes/compiler"
+ )
+ :target "c:/Projects/Kepler/build/classes"
+)
\ No newline at end of file
diff --git a/src/ensime/README.md b/src/ensime/README.md
new file mode 100644
index 0000000..302d47b
--- /dev/null
+++ b/src/ensime/README.md
@@ -0,0 +1,11 @@
+Ensime project files
+=====================
+
+Rename .ensime.SAMPLE to .ensime and replace sample paths with real paths to your sources and build results.
+After that you're good to go with one of the ENSIME-enabled text editors.
+
+Editors that know how to talk to ENSIME servers:
+1) Emacs via https://github.com/aemoncannon/ensime
+2) jEdit via https://github.com/djspiewak/ensime-sidekick
+3) TextMate via https://github.com/mads379/ensime.tmbundle
+4) Sublime Text 2 via https://github.com/sublimescala/sublime-ensime
diff --git a/src/fjbg/ch/epfl/lamp/fjbg/FJBGContext.java b/src/fjbg/ch/epfl/lamp/fjbg/FJBGContext.java
index 4c5bc27..9856dc7 100644
--- a/src/fjbg/ch/epfl/lamp/fjbg/FJBGContext.java
+++ b/src/fjbg/ch/epfl/lamp/fjbg/FJBGContext.java
@@ -1,5 +1,5 @@
/* FJBG -- Fast Java Bytecode Generator
- * Copyright 2002-2011 LAMP/EPFL
+ * Copyright 2002-2013 LAMP/EPFL
* @author Michel Schinz
*/
diff --git a/src/fjbg/ch/epfl/lamp/fjbg/JAccessFlags.java b/src/fjbg/ch/epfl/lamp/fjbg/JAccessFlags.java
index 0a48fc1..01d8cc9 100644
--- a/src/fjbg/ch/epfl/lamp/fjbg/JAccessFlags.java
+++ b/src/fjbg/ch/epfl/lamp/fjbg/JAccessFlags.java
@@ -1,5 +1,5 @@
/* FJBG -- Fast Java Bytecode Generator
- * Copyright 2002-2011 LAMP/EPFL
+ * Copyright 2002-2013 LAMP/EPFL
* @author Michel Schinz
*/
diff --git a/src/fjbg/ch/epfl/lamp/fjbg/JArrayType.java b/src/fjbg/ch/epfl/lamp/fjbg/JArrayType.java
index 85f9408..61a0452 100644
--- a/src/fjbg/ch/epfl/lamp/fjbg/JArrayType.java
+++ b/src/fjbg/ch/epfl/lamp/fjbg/JArrayType.java
@@ -1,5 +1,5 @@
/* FJBG -- Fast Java Bytecode Generator
- * Copyright 2002-2011 LAMP/EPFL
+ * Copyright 2002-2013 LAMP/EPFL
* @author Michel Schinz
*/
diff --git a/src/fjbg/ch/epfl/lamp/fjbg/JAttribute.java b/src/fjbg/ch/epfl/lamp/fjbg/JAttribute.java
index 3b9e5f3..6a825be 100644
--- a/src/fjbg/ch/epfl/lamp/fjbg/JAttribute.java
+++ b/src/fjbg/ch/epfl/lamp/fjbg/JAttribute.java
@@ -1,5 +1,5 @@
/* FJBG -- Fast Java Bytecode Generator
- * Copyright 2002-2011 LAMP/EPFL
+ * Copyright 2002-2013 LAMP/EPFL
* @author Michel Schinz
*/
diff --git a/src/fjbg/ch/epfl/lamp/fjbg/JAttributeFactory.java b/src/fjbg/ch/epfl/lamp/fjbg/JAttributeFactory.java
index 212058a..33cdce2 100644
--- a/src/fjbg/ch/epfl/lamp/fjbg/JAttributeFactory.java
+++ b/src/fjbg/ch/epfl/lamp/fjbg/JAttributeFactory.java
@@ -1,5 +1,5 @@
/* FJBG -- Fast Java Bytecode Generator
- * Copyright 2002-2011 LAMP/EPFL
+ * Copyright 2002-2013 LAMP/EPFL
* @author Michel Schinz
*/
@@ -48,7 +48,6 @@ public class JAttributeFactory {
Constructor defaultConstructor) {
this.context = context;
this.defaultConstructor = defaultConstructor;
- registerClass("BootstrapInvokeDynamic", JBootstrapInvokeDynamic.class);
registerClass("Code", JCodeAttribute.class);
registerClass("ConstantValue", JConstantValueAttribute.class);
registerClass("EnclosingMethod", JEnclosingMethodAttribute.class);
diff --git a/src/fjbg/ch/epfl/lamp/fjbg/JBootstrapInvokeDynamic.java b/src/fjbg/ch/epfl/lamp/fjbg/JBootstrapInvokeDynamic.java
deleted file mode 100644
index 91c4a46..0000000
--- a/src/fjbg/ch/epfl/lamp/fjbg/JBootstrapInvokeDynamic.java
+++ /dev/null
@@ -1,69 +0,0 @@
-/* FJBG -- Fast Java Bytecode Generator
- * Copyright 2002-2011 LAMP/EPFL
- * @author Michel Schinz
- */
-
-package ch.epfl.lamp.fjbg;
-
-import java.io.DataInputStream;
-import java.io.DataOutputStream;
-import java.io.IOException;
-import java.util.Iterator;
-
-/**
- * BootstrapInvokeDynamic entry, as described by JSR 292 (invoke dynamic)
- *
- * @author Iulian Dragos
- * @version 1.0
- *
- */
-public class JBootstrapInvokeDynamic extends JAttribute {
- /** Constant pool of the current classfile. */
- private JConstantPool pool;
-
- protected final int classIdx;
-
- public JBootstrapInvokeDynamic(FJBGContext context,
- JClass clazz,
- String className) {
- super(context, clazz);
- this.pool = clazz.pool;
-
- this.classIdx = pool.addClass(className);
- }
-
- public JBootstrapInvokeDynamic(FJBGContext context,
- JClass clazz,
- Object owner,
- String name,
- int size,
- DataInputStream stream)
- throws IOException {
- super(context, clazz, name);
-
- this.classIdx = stream.readShort();
-
- assert name.equals(getName());
- }
-
- public String getName() { return "BootstrapInvokeDynamic"; }
-
- // Follows javap output format for BootstrapInvokeDynamic attribute.
- /*@Override*/ public String toString() {
- StringBuffer buf = new StringBuffer(" BootstrapInvokeDynamic:");
- buf.append("\n #");
- buf.append(classIdx);
- buf.append("; // class ");
- buf.append(pool.lookupClass(classIdx));
- buf.append("\n");
- return buf.toString();
- }
-
- protected int getSize() {
- return 2; // Short.SIZE
- }
-
- protected void writeContentsTo(DataOutputStream stream) throws IOException {
- stream.writeShort(classIdx);
- }
-}
diff --git a/src/fjbg/ch/epfl/lamp/fjbg/JClass.java b/src/fjbg/ch/epfl/lamp/fjbg/JClass.java
index 31e3072..bb1538e 100644
--- a/src/fjbg/ch/epfl/lamp/fjbg/JClass.java
+++ b/src/fjbg/ch/epfl/lamp/fjbg/JClass.java
@@ -1,5 +1,5 @@
/* FJBG -- Fast Java Bytecode Generator
- * Copyright 2002-2011 LAMP/EPFL
+ * Copyright 2002-2013 LAMP/EPFL
* @author Michel Schinz
*/
@@ -26,8 +26,6 @@ public class JClass extends JMember {
protected final String sourceFileName;
protected final JConstantPool pool;
- protected JBootstrapInvokeDynamic bootstrapClassAttr = null;
-
public final static String[] NO_INTERFACES = new String[0];
protected final LinkedList/*<JMethod>*/ methods = new LinkedList();
@@ -307,12 +305,6 @@ public class JClass extends JMember {
fStream.close();
}
- public void setBootstrapClass(String bootstrapClass) {
- assert bootstrapClassAttr == null;
- bootstrapClassAttr = new JBootstrapInvokeDynamic(context, this, bootstrapClass);
- addAttribute(bootstrapClassAttr);
- }
-
/**
* Writes the contents of the class to a data stream.
* @param stream The data stream in which the class must be written.
diff --git a/src/fjbg/ch/epfl/lamp/fjbg/JCode.java b/src/fjbg/ch/epfl/lamp/fjbg/JCode.java
index 332cc7c..ab6934a 100644
--- a/src/fjbg/ch/epfl/lamp/fjbg/JCode.java
+++ b/src/fjbg/ch/epfl/lamp/fjbg/JCode.java
@@ -1,5 +1,5 @@
/* FJBG -- Fast Java Bytecode Generator
- * Copyright 2002-2011 LAMP/EPFL
+ * Copyright 2002-2013 LAMP/EPFL
* @author Michel Schinz
*/
@@ -22,6 +22,8 @@ import ch.epfl.lamp.util.ByteArray;
public class JCode {
protected boolean frozen = false;
+ public static int MAX_CODE_SIZE = 65535;
+
protected final FJBGContext context;
protected final JMethod owner;
@@ -57,8 +59,8 @@ public class JCode {
this.owner = owner;
owner.setCode(this);
int size = stream.readInt();
- if (size >= 65536) // section 4.10
- throw new Error("code size must be less than 65536: " + size);
+ if (size > MAX_CODE_SIZE) // section 4.10
+ throw new Error("code size must be less than " + MAX_CODE_SIZE + ": " + size);
this.codeArray = new ByteArray(stream, size);
}
@@ -97,8 +99,19 @@ public class JCode {
// Freezing
//////////////////////////////////////////////////////////////////////
+ public static class CodeSizeTooBigException extends OffsetTooBigException {
+ public int codeSize;
+
+ public CodeSizeTooBigException(int size) {
+ codeSize = size;
+ }
+ }
+
public void freeze() throws OffsetTooBigException {
assert !frozen;
+
+ if (getSize() > MAX_CODE_SIZE) throw new CodeSizeTooBigException(getSize());
+
patchAllOffset();
codeArray.freeze();
frozen = true;
diff --git a/src/fjbg/ch/epfl/lamp/fjbg/JCodeAttribute.java b/src/fjbg/ch/epfl/lamp/fjbg/JCodeAttribute.java
index 153f156..9f3fcf8 100644
--- a/src/fjbg/ch/epfl/lamp/fjbg/JCodeAttribute.java
+++ b/src/fjbg/ch/epfl/lamp/fjbg/JCodeAttribute.java
@@ -1,5 +1,5 @@
/* FJBG -- Fast Java Bytecode Generator
- * Copyright 2002-2011 LAMP/EPFL
+ * Copyright 2002-2013 LAMP/EPFL
* @author Michel Schinz
*/
diff --git a/src/fjbg/ch/epfl/lamp/fjbg/JCodeIterator.java b/src/fjbg/ch/epfl/lamp/fjbg/JCodeIterator.java
index 989c313..d09dfd1 100644
--- a/src/fjbg/ch/epfl/lamp/fjbg/JCodeIterator.java
+++ b/src/fjbg/ch/epfl/lamp/fjbg/JCodeIterator.java
@@ -1,5 +1,5 @@
/* FJBG -- Fast Java Bytecode Generator
- * Copyright 2002-2011 LAMP/EPFL
+ * Copyright 2002-2013 LAMP/EPFL
* @author Michel Schinz
*/
diff --git a/src/fjbg/ch/epfl/lamp/fjbg/JConstantPool.java b/src/fjbg/ch/epfl/lamp/fjbg/JConstantPool.java
index ca2985c..9867e01 100644
--- a/src/fjbg/ch/epfl/lamp/fjbg/JConstantPool.java
+++ b/src/fjbg/ch/epfl/lamp/fjbg/JConstantPool.java
@@ -1,5 +1,5 @@
/* FJBG -- Fast Java Bytecode Generator
- * Copyright 2002-2011 LAMP/EPFL
+ * Copyright 2002-2013 LAMP/EPFL
* @author Michel Schinz
*/
diff --git a/src/fjbg/ch/epfl/lamp/fjbg/JConstantValueAttribute.java b/src/fjbg/ch/epfl/lamp/fjbg/JConstantValueAttribute.java
index 6efd3d7..6ee05e4 100644
--- a/src/fjbg/ch/epfl/lamp/fjbg/JConstantValueAttribute.java
+++ b/src/fjbg/ch/epfl/lamp/fjbg/JConstantValueAttribute.java
@@ -1,5 +1,5 @@
/* FJBG -- Fast Java Bytecode Generator
- * Copyright 2002-2011 LAMP/EPFL
+ * Copyright 2002-2013 LAMP/EPFL
* @author Michel Schinz
*/
diff --git a/src/fjbg/ch/epfl/lamp/fjbg/JEnclosingMethodAttribute.java b/src/fjbg/ch/epfl/lamp/fjbg/JEnclosingMethodAttribute.java
index 5536a3b..f663f00 100644
--- a/src/fjbg/ch/epfl/lamp/fjbg/JEnclosingMethodAttribute.java
+++ b/src/fjbg/ch/epfl/lamp/fjbg/JEnclosingMethodAttribute.java
@@ -1,5 +1,5 @@
/* FJBG -- Fast Java Bytecode Generator
- * Copyright 2002-2011 LAMP/EPFL
+ * Copyright 2002-2013 LAMP/EPFL
* @author Michel Schinz
*/
diff --git a/src/fjbg/ch/epfl/lamp/fjbg/JExceptionsAttribute.java b/src/fjbg/ch/epfl/lamp/fjbg/JExceptionsAttribute.java
index aee34de..b91d0f2 100644
--- a/src/fjbg/ch/epfl/lamp/fjbg/JExceptionsAttribute.java
+++ b/src/fjbg/ch/epfl/lamp/fjbg/JExceptionsAttribute.java
@@ -1,5 +1,5 @@
/* FJBG -- Fast Java Bytecode Generator
- * Copyright 2002-2011 LAMP/EPFL
+ * Copyright 2002-2013 LAMP/EPFL
* @author Michel Schinz
*/
diff --git a/src/fjbg/ch/epfl/lamp/fjbg/JExtendedCode.java b/src/fjbg/ch/epfl/lamp/fjbg/JExtendedCode.java
index 8b0338e..d82db82 100644
--- a/src/fjbg/ch/epfl/lamp/fjbg/JExtendedCode.java
+++ b/src/fjbg/ch/epfl/lamp/fjbg/JExtendedCode.java
@@ -1,5 +1,5 @@
/* FJBG -- Fast Java Bytecode Generator
- * Copyright 2002-2011 LAMP/EPFL
+ * Copyright 2002-2013 LAMP/EPFL
* @author Michel Schinz
*/
@@ -596,6 +596,16 @@ public class JExtendedCode extends JCode {
double minDensity) {
assert keys.length == branches.length;
+ //The special case for empty keys. It makes sense to allow
+ //empty keys and generate LOOKUPSWITCH with defaultBranch
+ //only. This is exactly what javac does for switch statement
+ //that has only a default case.
+ if (keys.length == 0) {
+ emitLOOKUPSWITCH(keys, branches, defaultBranch);
+ return;
+ }
+ //the rest of the code assumes that keys.length > 0
+
// sorting the tables
// FIXME use quicksort
for (int i = 1; i < keys.length; i++) {
diff --git a/src/fjbg/ch/epfl/lamp/fjbg/JField.java b/src/fjbg/ch/epfl/lamp/fjbg/JField.java
index 1456157..29d826b 100644
--- a/src/fjbg/ch/epfl/lamp/fjbg/JField.java
+++ b/src/fjbg/ch/epfl/lamp/fjbg/JField.java
@@ -1,5 +1,5 @@
/* FJBG -- Fast Java Bytecode Generator
- * Copyright 2002-2011 LAMP/EPFL
+ * Copyright 2002-2013 LAMP/EPFL
* @author Michel Schinz
*/
diff --git a/src/fjbg/ch/epfl/lamp/fjbg/JFieldOrMethod.java b/src/fjbg/ch/epfl/lamp/fjbg/JFieldOrMethod.java
index 3d2bf87..794c0f1 100644
--- a/src/fjbg/ch/epfl/lamp/fjbg/JFieldOrMethod.java
+++ b/src/fjbg/ch/epfl/lamp/fjbg/JFieldOrMethod.java
@@ -1,5 +1,5 @@
/* FJBG -- Fast Java Bytecode Generator
- * Copyright 2002-2011 LAMP/EPFL
+ * Copyright 2002-2013 LAMP/EPFL
* @author Michel Schinz
*/
diff --git a/src/fjbg/ch/epfl/lamp/fjbg/JInnerClassesAttribute.java b/src/fjbg/ch/epfl/lamp/fjbg/JInnerClassesAttribute.java
index 200bf17..1c1ced5 100644
--- a/src/fjbg/ch/epfl/lamp/fjbg/JInnerClassesAttribute.java
+++ b/src/fjbg/ch/epfl/lamp/fjbg/JInnerClassesAttribute.java
@@ -1,5 +1,5 @@
/* FJBG -- Fast Java Bytecode Generator
- * Copyright 2002-2011 LAMP/EPFL
+ * Copyright 2002-2013 LAMP/EPFL
* @author Michel Schinz
*/
diff --git a/src/fjbg/ch/epfl/lamp/fjbg/JLabel.java b/src/fjbg/ch/epfl/lamp/fjbg/JLabel.java
index 77148c8..96f3b4e 100644
--- a/src/fjbg/ch/epfl/lamp/fjbg/JLabel.java
+++ b/src/fjbg/ch/epfl/lamp/fjbg/JLabel.java
@@ -1,5 +1,5 @@
/* FJBG -- Fast Java Bytecode Generator
- * Copyright 2002-2011 LAMP/EPFL
+ * Copyright 2002-2013 LAMP/EPFL
* @author Michel Schinz
*/
diff --git a/src/fjbg/ch/epfl/lamp/fjbg/JLineNumberTableAttribute.java b/src/fjbg/ch/epfl/lamp/fjbg/JLineNumberTableAttribute.java
index af71459..f8c09b8 100644
--- a/src/fjbg/ch/epfl/lamp/fjbg/JLineNumberTableAttribute.java
+++ b/src/fjbg/ch/epfl/lamp/fjbg/JLineNumberTableAttribute.java
@@ -1,5 +1,5 @@
/* FJBG -- Fast Java Bytecode Generator
- * Copyright 2002-2011 LAMP/EPFL
+ * Copyright 2002-2013 LAMP/EPFL
* @author Michel Schinz
*/
diff --git a/src/fjbg/ch/epfl/lamp/fjbg/JLocalVariable.java b/src/fjbg/ch/epfl/lamp/fjbg/JLocalVariable.java
index ab2c8f2..af79806 100644
--- a/src/fjbg/ch/epfl/lamp/fjbg/JLocalVariable.java
+++ b/src/fjbg/ch/epfl/lamp/fjbg/JLocalVariable.java
@@ -1,5 +1,5 @@
/* FJBG -- Fast Java Bytecode Generator
- * Copyright 2002-2011 LAMP/EPFL
+ * Copyright 2002-2013 LAMP/EPFL
* @author Michel Schinz
*/
diff --git a/src/fjbg/ch/epfl/lamp/fjbg/JLocalVariableTableAttribute.java b/src/fjbg/ch/epfl/lamp/fjbg/JLocalVariableTableAttribute.java
index a091898..b277cc7 100644
--- a/src/fjbg/ch/epfl/lamp/fjbg/JLocalVariableTableAttribute.java
+++ b/src/fjbg/ch/epfl/lamp/fjbg/JLocalVariableTableAttribute.java
@@ -1,5 +1,5 @@
/* FJBG -- Fast Java Bytecode Generator
- * Copyright 2002-2011 LAMP/EPFL
+ * Copyright 2002-2013 LAMP/EPFL
* @author Michel Schinz
*/
diff --git a/src/fjbg/ch/epfl/lamp/fjbg/JMember.java b/src/fjbg/ch/epfl/lamp/fjbg/JMember.java
index 70d6189..6356cc8 100644
--- a/src/fjbg/ch/epfl/lamp/fjbg/JMember.java
+++ b/src/fjbg/ch/epfl/lamp/fjbg/JMember.java
@@ -1,5 +1,5 @@
/* FJBG -- Fast Java Bytecode Generator
- * Copyright 2002-2011 LAMP/EPFL
+ * Copyright 2002-2013 LAMP/EPFL
* @author Michel Schinz
*/
diff --git a/src/fjbg/ch/epfl/lamp/fjbg/JMethod.java b/src/fjbg/ch/epfl/lamp/fjbg/JMethod.java
index ad35c76..01d58a4 100644
--- a/src/fjbg/ch/epfl/lamp/fjbg/JMethod.java
+++ b/src/fjbg/ch/epfl/lamp/fjbg/JMethod.java
@@ -1,5 +1,5 @@
/* FJBG -- Fast Java Bytecode Generator
- * Copyright 2002-2011 LAMP/EPFL
+ * Copyright 2002-2013 LAMP/EPFL
* @author Michel Schinz
*/
diff --git a/src/fjbg/ch/epfl/lamp/fjbg/JMethodType.java b/src/fjbg/ch/epfl/lamp/fjbg/JMethodType.java
index a0197de..cd3d71f 100644
--- a/src/fjbg/ch/epfl/lamp/fjbg/JMethodType.java
+++ b/src/fjbg/ch/epfl/lamp/fjbg/JMethodType.java
@@ -1,5 +1,5 @@
/* FJBG -- Fast Java Bytecode Generator
- * Copyright 2002-2011 LAMP/EPFL
+ * Copyright 2002-2013 LAMP/EPFL
* @author Michel Schinz
*/
diff --git a/src/fjbg/ch/epfl/lamp/fjbg/JObjectType.java b/src/fjbg/ch/epfl/lamp/fjbg/JObjectType.java
index 50ccae7..06db5b1 100644
--- a/src/fjbg/ch/epfl/lamp/fjbg/JObjectType.java
+++ b/src/fjbg/ch/epfl/lamp/fjbg/JObjectType.java
@@ -1,5 +1,5 @@
/* FJBG -- Fast Java Bytecode Generator
- * Copyright 2002-2011 LAMP/EPFL
+ * Copyright 2002-2013 LAMP/EPFL
* @author Michel Schinz
*/
diff --git a/src/fjbg/ch/epfl/lamp/fjbg/JOpcode.java b/src/fjbg/ch/epfl/lamp/fjbg/JOpcode.java
index 4c971a1..cc68681 100644
--- a/src/fjbg/ch/epfl/lamp/fjbg/JOpcode.java
+++ b/src/fjbg/ch/epfl/lamp/fjbg/JOpcode.java
@@ -1,5 +1,5 @@
/* FJBG -- Fast Java Bytecode Generator
- * Copyright 2002-2011 LAMP/EPFL
+ * Copyright 2002-2013 LAMP/EPFL
* @author Michel Schinz
*/
diff --git a/src/fjbg/ch/epfl/lamp/fjbg/JOtherAttribute.java b/src/fjbg/ch/epfl/lamp/fjbg/JOtherAttribute.java
index dcee9c2..50aa9d3 100644
--- a/src/fjbg/ch/epfl/lamp/fjbg/JOtherAttribute.java
+++ b/src/fjbg/ch/epfl/lamp/fjbg/JOtherAttribute.java
@@ -1,5 +1,5 @@
/* FJBG -- Fast Java Bytecode Generator
- * Copyright 2002-2011 LAMP/EPFL
+ * Copyright 2002-2013 LAMP/EPFL
* @author Michel Schinz
*/
diff --git a/src/fjbg/ch/epfl/lamp/fjbg/JReferenceType.java b/src/fjbg/ch/epfl/lamp/fjbg/JReferenceType.java
index 646b0f8..73d1026 100644
--- a/src/fjbg/ch/epfl/lamp/fjbg/JReferenceType.java
+++ b/src/fjbg/ch/epfl/lamp/fjbg/JReferenceType.java
@@ -1,5 +1,5 @@
/* FJBG -- Fast Java Bytecode Generator
- * Copyright 2002-2011 LAMP/EPFL
+ * Copyright 2002-2013 LAMP/EPFL
* @author Michel Schinz
*/
diff --git a/src/fjbg/ch/epfl/lamp/fjbg/JSourceFileAttribute.java b/src/fjbg/ch/epfl/lamp/fjbg/JSourceFileAttribute.java
index 46090b7..3a17cb2 100644
--- a/src/fjbg/ch/epfl/lamp/fjbg/JSourceFileAttribute.java
+++ b/src/fjbg/ch/epfl/lamp/fjbg/JSourceFileAttribute.java
@@ -1,5 +1,5 @@
/* FJBG -- Fast Java Bytecode Generator
- * Copyright 2002-2011 LAMP/EPFL
+ * Copyright 2002-2013 LAMP/EPFL
* @author Michel Schinz
*/
diff --git a/src/fjbg/ch/epfl/lamp/fjbg/JStackMapTableAttribute.java b/src/fjbg/ch/epfl/lamp/fjbg/JStackMapTableAttribute.java
index 52881a2..72a5484 100644
--- a/src/fjbg/ch/epfl/lamp/fjbg/JStackMapTableAttribute.java
+++ b/src/fjbg/ch/epfl/lamp/fjbg/JStackMapTableAttribute.java
@@ -1,5 +1,5 @@
/* FJBG -- Fast Java Bytecode Generator
- * Copyright 2002-2011 LAMP/EPFL
+ * Copyright 2002-2013 LAMP/EPFL
* @author Michel Schinz
*/
diff --git a/src/fjbg/ch/epfl/lamp/fjbg/JType.java b/src/fjbg/ch/epfl/lamp/fjbg/JType.java
index 2519fc0..298a2b0 100644
--- a/src/fjbg/ch/epfl/lamp/fjbg/JType.java
+++ b/src/fjbg/ch/epfl/lamp/fjbg/JType.java
@@ -1,5 +1,5 @@
/* FJBG -- Fast Java Bytecode Generator
- * Copyright 2002-2011 LAMP/EPFL
+ * Copyright 2002-2013 LAMP/EPFL
* @author Michel Schinz
*/
diff --git a/src/fjbg/ch/epfl/lamp/fjbg/Main.java b/src/fjbg/ch/epfl/lamp/fjbg/Main.java
index d8fcdac..810ee7c 100644
--- a/src/fjbg/ch/epfl/lamp/fjbg/Main.java
+++ b/src/fjbg/ch/epfl/lamp/fjbg/Main.java
@@ -1,5 +1,5 @@
/* FJBG -- Fast Java Bytecode Generator
- * Copyright 2002-2011 LAMP/EPFL
+ * Copyright 2002-2013 LAMP/EPFL
* @author Michel Schinz
*/
diff --git a/src/fjbg/ch/epfl/lamp/util/ByteArray.java b/src/fjbg/ch/epfl/lamp/util/ByteArray.java
index 0605ae7..b852e1a 100644
--- a/src/fjbg/ch/epfl/lamp/util/ByteArray.java
+++ b/src/fjbg/ch/epfl/lamp/util/ByteArray.java
@@ -1,5 +1,5 @@
/* FJBG -- Fast Java Bytecode Generator
- * Copyright 2002-2011 LAMP/EPFL
+ * Copyright 2002-2013 LAMP/EPFL
* @author Michel Schinz
*/
diff --git a/src/forkjoin/scala/concurrent/forkjoin/ForkJoinPool.java b/src/forkjoin/scala/concurrent/forkjoin/ForkJoinPool.java
index 3fad92c..6578504 100644
--- a/src/forkjoin/scala/concurrent/forkjoin/ForkJoinPool.java
+++ b/src/forkjoin/scala/concurrent/forkjoin/ForkJoinPool.java
@@ -1,669 +1,3063 @@
/*
* Written by Doug Lea with assistance from members of JCP JSR-166
* Expert Group and released to the public domain, as explained at
- * http://creativecommons.org/licenses/publicdomain
+ * http://creativecommons.org/publicdomain/zero/1.0/
*/
package scala.concurrent.forkjoin;
-import java.util.*;
-import java.util.concurrent.*;
-import java.util.concurrent.locks.*;
-import java.util.concurrent.atomic.*;
-import sun.misc.Unsafe;
-import java.lang.reflect.*;
+
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.List;
+import java.util.concurrent.AbstractExecutorService;
+import java.util.concurrent.Callable;
+import java.util.concurrent.ExecutorService;
+import java.util.concurrent.Future;
+import java.util.concurrent.RejectedExecutionException;
+import java.util.concurrent.RunnableFuture;
+import java.util.concurrent.TimeUnit;
+
+/**
+ * @since 1.8
+ * @author Doug Lea
+ */
+/*public*/ abstract class CountedCompleter<T> extends ForkJoinTask<T> {
+ private static final long serialVersionUID = 5232453752276485070L;
+
+ /** This task's completer, or null if none */
+ final CountedCompleter<?> completer;
+ /** The number of pending tasks until completion */
+ volatile int pending;
+
+ /**
+ * Creates a new CountedCompleter with the given completer
+ * and initial pending count.
+ *
+ * @param completer this task's completer, or {@code null} if none
+ * @param initialPendingCount the initial pending count
+ */
+ protected CountedCompleter(CountedCompleter<?> completer,
+ int initialPendingCount) {
+ this.completer = completer;
+ this.pending = initialPendingCount;
+ }
+
+ /**
+ * Creates a new CountedCompleter with the given completer
+ * and an initial pending count of zero.
+ *
+ * @param completer this task's completer, or {@code null} if none
+ */
+ protected CountedCompleter(CountedCompleter<?> completer) {
+ this.completer = completer;
+ }
+
+ /**
+ * Creates a new CountedCompleter with no completer
+ * and an initial pending count of zero.
+ */
+ protected CountedCompleter() {
+ this.completer = null;
+ }
+
+ /**
+ * The main computation performed by this task.
+ */
+ public abstract void compute();
+
+ /**
+ * Performs an action when method {@link #tryComplete} is invoked
+ * and the pending count is zero, or when the unconditional
+ * method {@link #complete} is invoked. By default, this method
+ * does nothing. You can distinguish cases by checking the
+ * identity of the given caller argument. If not equal to {@code
+ * this}, then it is typically a subtask that may contain results
+ * (and/or links to other results) to combine.
+ *
+ * @param caller the task invoking this method (which may
+ * be this task itself)
+ */
+ public void onCompletion(CountedCompleter<?> caller) {
+ }
+
+ /**
+ * Performs an action when method {@link #completeExceptionally}
+ * is invoked or method {@link #compute} throws an exception, and
+ * this task has not otherwise already completed normally. On
+ * entry to this method, this task {@link
+ * ForkJoinTask#isCompletedAbnormally}. The return value of this
+ * method controls further propagation: If {@code true} and this
+ * task has a completer, then this completer is also completed
+ * exceptionally. The default implementation of this method does
+ * nothing except return {@code true}.
+ *
+ * @param ex the exception
+ * @param caller the task invoking this method (which may
+ * be this task itself)
+ * @return true if this exception should be propagated to this
+ * task's completer, if one exists
+ */
+ public boolean onExceptionalCompletion(Throwable ex, CountedCompleter<?> caller) {
+ return true;
+ }
+
+ /**
+ * Returns the completer established in this task's constructor,
+ * or {@code null} if none.
+ *
+ * @return the completer
+ */
+ public final CountedCompleter<?> getCompleter() {
+ return completer;
+ }
+
+ /**
+ * Returns the current pending count.
+ *
+ * @return the current pending count
+ */
+ public final int getPendingCount() {
+ return pending;
+ }
+
+ /**
+ * Sets the pending count to the given value.
+ *
+ * @param count the count
+ */
+ public final void setPendingCount(int count) {
+ pending = count;
+ }
+
+ /**
+ * Adds (atomically) the given value to the pending count.
+ *
+ * @param delta the value to add
+ */
+ public final void addToPendingCount(int delta) {
+ int c; // note: can replace with intrinsic in jdk8
+ do {} while (!U.compareAndSwapInt(this, PENDING, c = pending, c+delta));
+ }
+
+ /**
+ * Sets (atomically) the pending count to the given count only if
+ * it currently holds the given expected value.
+ *
+ * @param expected the expected value
+ * @param count the new value
+ * @return true if successful
+ */
+ public final boolean compareAndSetPendingCount(int expected, int count) {
+ return U.compareAndSwapInt(this, PENDING, expected, count);
+ }
+
+ /**
+ * If the pending count is nonzero, (atomically) decrements it.
+ *
+ * @return the initial (undecremented) pending count holding on entry
+ * to this method
+ */
+ public final int decrementPendingCountUnlessZero() {
+ int c;
+ do {} while ((c = pending) != 0 &&
+ !U.compareAndSwapInt(this, PENDING, c, c - 1));
+ return c;
+ }
+
+ /**
+ * Returns the root of the current computation; i.e., this
+ * task if it has no completer, else its completer's root.
+ *
+ * @return the root of the current computation
+ */
+ public final CountedCompleter<?> getRoot() {
+ CountedCompleter<?> a = this, p;
+ while ((p = a.completer) != null)
+ a = p;
+ return a;
+ }
+
+ /**
+ * If the pending count is nonzero, decrements the count;
+ * otherwise invokes {@link #onCompletion} and then similarly
+ * tries to complete this task's completer, if one exists,
+ * else marks this task as complete.
+ */
+ public final void tryComplete() {
+ CountedCompleter<?> a = this, s = a;
+ for (int c;;) {
+ if ((c = a.pending) == 0) {
+ a.onCompletion(s);
+ if ((a = (s = a).completer) == null) {
+ s.quietlyComplete();
+ return;
+ }
+ }
+ else if (U.compareAndSwapInt(a, PENDING, c, c - 1))
+ return;
+ }
+ }
+
+ /**
+ * Equivalent to {@link #tryComplete} but does not invoke {@link
+ * #onCompletion} along the completion path: If the pending count
+ * is nonzero, decrements the count; otherwise, similarly tries to
+ * complete this task's completer, if one exists, else marks this
+ * task as complete. This method may be useful in cases where
+ * {@code onCompletion} should not, or need not, be invoked for
+ * each completer in a computation.
+ */
+ public final void propagateCompletion() {
+ CountedCompleter<?> a = this, s = a;
+ for (int c;;) {
+ if ((c = a.pending) == 0) {
+ if ((a = (s = a).completer) == null) {
+ s.quietlyComplete();
+ return;
+ }
+ }
+ else if (U.compareAndSwapInt(a, PENDING, c, c - 1))
+ return;
+ }
+ }
+
+ /**
+ * Regardless of pending count, invokes {@link #onCompletion},
+ * marks this task as complete and further triggers {@link
+ * #tryComplete} on this task's completer, if one exists. The
+ * given rawResult is used as an argument to {@link #setRawResult}
+ * before invoking {@link #onCompletion} or marking this task as
+ * complete; its value is meaningful only for classes overriding
+ * {@code setRawResult}.
+ *
+ * <p>This method may be useful when forcing completion as soon as
+ * any one (versus all) of several subtask results are obtained.
+ * However, in the common (and recommended) case in which {@code
+ * setRawResult} is not overridden, this effect can be obtained
+ * more simply using {@code quietlyCompleteRoot();}.
+ *
+ * @param rawResult the raw result
+ */
+ public void complete(T rawResult) {
+ CountedCompleter<?> p;
+ setRawResult(rawResult);
+ onCompletion(this);
+ quietlyComplete();
+ if ((p = completer) != null)
+ p.tryComplete();
+ }
+
+
+ /**
+ * If this task's pending count is zero, returns this task;
+ * otherwise decrements its pending count and returns {@code
+ * null}. This method is designed to be used with {@link
+ * #nextComplete} in completion traversal loops.
+ *
+ * @return this task, if pending count was zero, else {@code null}
+ */
+ public final CountedCompleter<?> firstComplete() {
+ for (int c;;) {
+ if ((c = pending) == 0)
+ return this;
+ else if (U.compareAndSwapInt(this, PENDING, c, c - 1))
+ return null;
+ }
+ }
+
+ /**
+ * If this task does not have a completer, invokes {@link
+ * ForkJoinTask#quietlyComplete} and returns {@code null}. Or, if
+ * this task's pending count is non-zero, decrements its pending
+ * count and returns {@code null}. Otherwise, returns the
+ * completer. This method can be used as part of a completion
+ * traversal loop for homogeneous task hierarchies:
+ *
+ * <pre> {@code
+ * for (CountedCompleter<?> c = firstComplete();
+ * c != null;
+ * c = c.nextComplete()) {
+ * // ... process c ...
+ * }}</pre>
+ *
+ * @return the completer, or {@code null} if none
+ */
+ public final CountedCompleter<?> nextComplete() {
+ CountedCompleter<?> p;
+ if ((p = completer) != null)
+ return p.firstComplete();
+ else {
+ quietlyComplete();
+ return null;
+ }
+ }
+
+ /**
+ * Equivalent to {@code getRoot().quietlyComplete()}.
+ */
+ public final void quietlyCompleteRoot() {
+ for (CountedCompleter<?> a = this, p;;) {
+ if ((p = a.completer) == null) {
+ a.quietlyComplete();
+ return;
+ }
+ a = p;
+ }
+ }
+
+ /**
+ * Supports ForkJoinTask exception propagation.
+ */
+ void internalPropagateException(Throwable ex) {
+ CountedCompleter<?> a = this, s = a;
+ while (a.onExceptionalCompletion(ex, s) &&
+ (a = (s = a).completer) != null && a.status >= 0)
+ a.recordExceptionalCompletion(ex);
+ }
+
+ /**
+ * Implements execution conventions for CountedCompleters.
+ */
+ protected final boolean exec() {
+ compute();
+ return false;
+ }
+
+ /**
+ * Returns the result of the computation. By default
+ * returns {@code null}, which is appropriate for {@code Void}
+ * actions, but in other cases should be overridden, almost
+ * always to return a field or function of a field that
+ * holds the result upon completion.
+ *
+ * @return the result of the computation
+ */
+ public T getRawResult() { return null; }
+
+ /**
+ * A method that result-bearing CountedCompleters may optionally
+ * use to help maintain result data. By default, does nothing.
+ * Overrides are not recommended. However, if this method is
+ * overridden to update existing objects or fields, then it must
+ * in general be defined to be thread-safe.
+ */
+ protected void setRawResult(T t) { }
+
+ // Unsafe mechanics
+ private static final sun.misc.Unsafe U;
+ private static final long PENDING;
+ static {
+ try {
+ U = getUnsafe();
+ PENDING = U.objectFieldOffset
+ (CountedCompleter.class.getDeclaredField("pending"));
+ } catch (Exception e) {
+ throw new Error(e);
+ }
+ }
+
+ /**
+ * Returns a sun.misc.Unsafe. Suitable for use in a 3rd party package.
+ * Replace with a simple call to Unsafe.getUnsafe when integrating
+ * into a jdk.
+ *
+ * @return a sun.misc.Unsafe
+ */
+ private static sun.misc.Unsafe getUnsafe() {
+ return scala.concurrent.util.Unsafe.instance;
+ }
+}
/**
- * An {@link ExecutorService} for running {@link ForkJoinTask}s. A
- * ForkJoinPool provides the entry point for submissions from
- * non-ForkJoinTasks, as well as management and monitoring operations.
- * Normally a single ForkJoinPool is used for a large number of
- * submitted tasks. Otherwise, use would not usually outweigh the
- * construction and bookkeeping overhead of creating a large set of
- * threads.
+ * An {@link ExecutorService} for running {@link ForkJoinTask}s.
+ * A {@code ForkJoinPool} provides the entry point for submissions
+ * from non-{@code ForkJoinTask} clients, as well as management and
+ * monitoring operations.
+ *
+ * <p>A {@code ForkJoinPool} differs from other kinds of {@link
+ * ExecutorService} mainly by virtue of employing
+ * <em>work-stealing</em>: all threads in the pool attempt to find and
+ * execute tasks submitted to the pool and/or created by other active
+ * tasks (eventually blocking waiting for work if none exist). This
+ * enables efficient processing when most tasks spawn other subtasks
+ * (as do most {@code ForkJoinTask}s), as well as when many small
+ * tasks are submitted to the pool from external clients. Especially
+ * when setting <em>asyncMode</em> to true in constructors, {@code
+ * ForkJoinPool}s may also be appropriate for use with event-style
+ * tasks that are never joined.
*
- * <p>ForkJoinPools differ from other kinds of Executors mainly in
- * that they provide <em>work-stealing</em>: all threads in the pool
- * attempt to find and execute subtasks created by other active tasks
- * (eventually blocking if none exist). This makes them efficient when
- * most tasks spawn other subtasks (as do most ForkJoinTasks), as well
- * as the mixed execution of some plain Runnable- or Callable- based
- * activities along with ForkJoinTasks. When setting
- * <tt>setAsyncMode</tt>, a ForkJoinPools may also be appropriate for
- * use with fine-grained tasks that are never joined. Otherwise, other
- * ExecutorService implementations are typically more appropriate
- * choices.
+ * <p>A static {@link #commonPool()} is available and appropriate for
+ * most applications. The common pool is used by any ForkJoinTask that
+ * is not explicitly submitted to a specified pool. Using the common
+ * pool normally reduces resource usage (its threads are slowly
+ * reclaimed during periods of non-use, and reinstated upon subsequent
+ * use).
*
- * <p>A ForkJoinPool may be constructed with a given parallelism level
- * (target pool size), which it attempts to maintain by dynamically
- * adding, suspending, or resuming threads, even if some tasks are
- * waiting to join others. However, no such adjustments are performed
- * in the face of blocked IO or other unmanaged synchronization. The
- * nested <code>ManagedBlocker</code> interface enables extension of
- * the kinds of synchronization accommodated. The target parallelism
- * level may also be changed dynamically (<code>setParallelism</code>)
- * and thread construction can be limited using methods
- * <code>setMaximumPoolSize</code> and/or
- * <code>setMaintainsParallelism</code>.
+ * <p>For applications that require separate or custom pools, a {@code
+ * ForkJoinPool} may be constructed with a given target parallelism
+ * level; by default, equal to the number of available processors. The
+ * pool attempts to maintain enough active (or available) threads by
+ * dynamically adding, suspending, or resuming internal worker
+ * threads, even if some tasks are stalled waiting to join
+ * others. However, no such adjustments are guaranteed in the face of
+ * blocked I/O or other unmanaged synchronization. The nested {@link
+ * ManagedBlocker} interface enables extension of the kinds of
+ * synchronization accommodated.
*
* <p>In addition to execution and lifecycle control methods, this
* class provides status check methods (for example
- * <code>getStealCount</code>) that are intended to aid in developing,
+ * {@link #getStealCount}) that are intended to aid in developing,
* tuning, and monitoring fork/join applications. Also, method
- * <code>toString</code> returns indications of pool state in a
+ * {@link #toString} returns indications of pool state in a
* convenient form for informal monitoring.
*
+ * <p>As is the case with other ExecutorServices, there are three
+ * main task execution methods summarized in the following table.
+ * These are designed to be used primarily by clients not already
+ * engaged in fork/join computations in the current pool. The main
+ * forms of these methods accept instances of {@code ForkJoinTask},
+ * but overloaded forms also allow mixed execution of plain {@code
+ * Runnable}- or {@code Callable}- based activities as well. However,
+ * tasks that are already executing in a pool should normally instead
+ * use the within-computation forms listed in the table unless using
+ * async event-style tasks that are not usually joined, in which case
+ * there is little difference among choice of methods.
+ *
+ * <table BORDER CELLPADDING=3 CELLSPACING=1>
+ * <tr>
+ * <td></td>
+ * <td ALIGN=CENTER> <b>Call from non-fork/join clients</b></td>
+ * <td ALIGN=CENTER> <b>Call from within fork/join computations</b></td>
+ * </tr>
+ * <tr>
+ * <td> <b>Arrange async execution</td>
+ * <td> {@link #execute(ForkJoinTask)}</td>
+ * <td> {@link ForkJoinTask#fork}</td>
+ * </tr>
+ * <tr>
+ * <td> <b>Await and obtain result</td>
+ * <td> {@link #invoke(ForkJoinTask)}</td>
+ * <td> {@link ForkJoinTask#invoke}</td>
+ * </tr>
+ * <tr>
+ * <td> <b>Arrange exec and obtain Future</td>
+ * <td> {@link #submit(ForkJoinTask)}</td>
+ * <td> {@link ForkJoinTask#fork} (ForkJoinTasks <em>are</em> Futures)</td>
+ * </tr>
+ * </table>
+ *
+ * <p>The common pool is by default constructed with default
+ * parameters, but these may be controlled by setting three {@link
+ * System#getProperty system properties} with prefix {@code
+ * java.util.concurrent.ForkJoinPool.common}: {@code parallelism} --
+ * an integer greater than zero, {@code threadFactory} -- the class
+ * name of a {@link ForkJoinWorkerThreadFactory}, and {@code
+ * exceptionHandler} -- the class name of a {@link
+ * java.lang.Thread.UncaughtExceptionHandler
+ * Thread.UncaughtExceptionHandler}. Upon any error in establishing
+ * these settings, default parameters are used.
+ *
* <p><b>Implementation notes</b>: This implementation restricts the
* maximum number of running threads to 32767. Attempts to create
- * pools with greater than the maximum result in
- * IllegalArgumentExceptions.
+ * pools with greater than the maximum number result in
+ * {@code IllegalArgumentException}.
+ *
+ * <p>This implementation rejects submitted tasks (that is, by throwing
+ * {@link RejectedExecutionException}) only when the pool is shut down
+ * or internal resources have been exhausted.
+ *
+ * @since 1.7
+ * @author Doug Lea
*/
-public class ForkJoinPool /*extends AbstractExecutorService*/ {
+public class ForkJoinPool extends AbstractExecutorService {
+
+ /*
+ * Implementation Overview
+ *
+ * This class and its nested classes provide the main
+ * functionality and control for a set of worker threads:
+ * Submissions from non-FJ threads enter into submission queues.
+ * Workers take these tasks and typically split them into subtasks
+ * that may be stolen by other workers. Preference rules give
+ * first priority to processing tasks from their own queues (LIFO
+ * or FIFO, depending on mode), then to randomized FIFO steals of
+ * tasks in other queues.
+ *
+ * WorkQueues
+ * ==========
+ *
+ * Most operations occur within work-stealing queues (in nested
+ * class WorkQueue). These are special forms of Deques that
+ * support only three of the four possible end-operations -- push,
+ * pop, and poll (aka steal), under the further constraints that
+ * push and pop are called only from the owning thread (or, as
+ * extended here, under a lock), while poll may be called from
+ * other threads. (If you are unfamiliar with them, you probably
+ * want to read Herlihy and Shavit's book "The Art of
+ * Multiprocessor programming", chapter 16 describing these in
+ * more detail before proceeding.) The main work-stealing queue
+ * design is roughly similar to those in the papers "Dynamic
+ * Circular Work-Stealing Deque" by Chase and Lev, SPAA 2005
+ * (http://research.sun.com/scalable/pubs/index.html) and
+ * "Idempotent work stealing" by Michael, Saraswat, and Vechev,
+ * PPoPP 2009 (http://portal.acm.org/citation.cfm?id=1504186).
+ * The main differences ultimately stem from GC requirements that
+ * we null out taken slots as soon as we can, to maintain as small
+ * a footprint as possible even in programs generating huge
+ * numbers of tasks. To accomplish this, we shift the CAS
+ * arbitrating pop vs poll (steal) from being on the indices
+ * ("base" and "top") to the slots themselves. So, both a
+ * successful pop and poll mainly entail a CAS of a slot from
+ * non-null to null. Because we rely on CASes of references, we
+ * do not need tag bits on base or top. They are simple ints as
+ * used in any circular array-based queue (see for example
+ * ArrayDeque). Updates to the indices must still be ordered in a
+ * way that guarantees that top == base means the queue is empty,
+ * but otherwise may err on the side of possibly making the queue
+ * appear nonempty when a push, pop, or poll have not fully
+ * committed. Note that this means that the poll operation,
+ * considered individually, is not wait-free. One thief cannot
+ * successfully continue until another in-progress one (or, if
+ * previously empty, a push) completes. However, in the
+ * aggregate, we ensure at least probabilistic non-blockingness.
+ * If an attempted steal fails, a thief always chooses a different
+ * random victim target to try next. So, in order for one thief to
+ * progress, it suffices for any in-progress poll or new push on
+ * any empty queue to complete. (This is why we normally use
+ * method pollAt and its variants that try once at the apparent
+ * base index, else consider alternative actions, rather than
+ * method poll.)
+ *
+ * This approach also enables support of a user mode in which local
+ * task processing is in FIFO, not LIFO order, simply by using
+ * poll rather than pop. This can be useful in message-passing
+ * frameworks in which tasks are never joined. However neither
+ * mode considers affinities, loads, cache localities, etc, so
+ * rarely provide the best possible performance on a given
+ * machine, but portably provide good throughput by averaging over
+ * these factors. (Further, even if we did try to use such
+ * information, we do not usually have a basis for exploiting it.
+ * For example, some sets of tasks profit from cache affinities,
+ * but others are harmed by cache pollution effects.)
+ *
+ * WorkQueues are also used in a similar way for tasks submitted
+ * to the pool. We cannot mix these tasks in the same queues used
+ * for work-stealing (this would contaminate lifo/fifo
+ * processing). Instead, we randomly associate submission queues
+ * with submitting threads, using a form of hashing. The
+ * ThreadLocal Submitter class contains a value initially used as
+ * a hash code for choosing existing queues, but may be randomly
+ * repositioned upon contention with other submitters. In
+ * essence, submitters act like workers except that they are
+ * restricted to executing local tasks that they submitted (or in
+ * the case of CountedCompleters, others with the same root task).
+ * However, because most shared/external queue operations are more
+ * expensive than internal, and because, at steady state, external
+ * submitters will compete for CPU with workers, ForkJoinTask.join
+ * and related methods disable them from repeatedly helping to
+ * process tasks if all workers are active. Insertion of tasks in
+ * shared mode requires a lock (mainly to protect in the case of
+ * resizing) but we use only a simple spinlock (using bits in
+ * field qlock), because submitters encountering a busy queue move
+ * on to try or create other queues -- they block only when
+ * creating and registering new queues.
+ *
+ * Management
+ * ==========
+ *
+ * The main throughput advantages of work-stealing stem from
+ * decentralized control -- workers mostly take tasks from
+ * themselves or each other. We cannot negate this in the
+ * implementation of other management responsibilities. The main
+ * tactic for avoiding bottlenecks is packing nearly all
+ * essentially atomic control state into two volatile variables
+ * that are by far most often read (not written) as status and
+ * consistency checks.
+ *
+ * Field "ctl" contains 64 bits holding all the information needed
+ * to atomically decide to add, inactivate, enqueue (on an event
+ * queue), dequeue, and/or re-activate workers. To enable this
+ * packing, we restrict maximum parallelism to (1<<15)-1 (which is
+ * far in excess of normal operating range) to allow ids, counts,
+ * and their negations (used for thresholding) to fit into 16bit
+ * fields.
+ *
+ * Field "plock" is a form of sequence lock with a saturating
+ * shutdown bit (similarly for per-queue "qlocks"), mainly
+ * protecting updates to the workQueues array, as well as to
+ * enable shutdown. When used as a lock, it is normally only very
+ * briefly held, so is nearly always available after at most a
+ * brief spin, but we use a monitor-based backup strategy to
+ * block when needed.
+ *
+ * Recording WorkQueues. WorkQueues are recorded in the
+ * "workQueues" array that is created upon first use and expanded
+ * if necessary. Updates to the array while recording new workers
+ * and unrecording terminated ones are protected from each other
+ * by a lock but the array is otherwise concurrently readable, and
+ * accessed directly. To simplify index-based operations, the
+ * array size is always a power of two, and all readers must
+ * tolerate null slots. Worker queues are at odd indices. Shared
+ * (submission) queues are at even indices, up to a maximum of 64
+ * slots, to limit growth even if array needs to expand to add
+ * more workers. Grouping them together in this way simplifies and
+ * speeds up task scanning.
+ *
+ * All worker thread creation is on-demand, triggered by task
+ * submissions, replacement of terminated workers, and/or
+ * compensation for blocked workers. However, all other support
+ * code is set up to work with other policies. To ensure that we
+ * do not hold on to worker references that would prevent GC, ALL
+ * accesses to workQueues are via indices into the workQueues
+ * array (which is one source of some of the messy code
+ * constructions here). In essence, the workQueues array serves as
+ * a weak reference mechanism. Thus for example the wait queue
+ * field of ctl stores indices, not references. Access to the
+ * workQueues in associated methods (for example signalWork) must
+ * both index-check and null-check the IDs. All such accesses
+ * ignore bad IDs by returning out early from what they are doing,
+ * since this can only be associated with termination, in which
+ * case it is OK to give up. All uses of the workQueues array
+ * also check that it is non-null (even if previously
+ * non-null). This allows nulling during termination, which is
+ * currently not necessary, but remains an option for
+ * resource-revocation-based shutdown schemes. It also helps
+ * reduce JIT issuance of uncommon-trap code, which tends to
+ * unnecessarily complicate control flow in some methods.
+ *
+ * Event Queuing. Unlike HPC work-stealing frameworks, we cannot
+ * let workers spin indefinitely scanning for tasks when none can
+ * be found immediately, and we cannot start/resume workers unless
+ * there appear to be tasks available. On the other hand, we must
+ * quickly prod them into action when new tasks are submitted or
+ * generated. In many usages, ramp-up time to activate workers is
+ * the main limiting factor in overall performance (this is
+ * compounded at program start-up by JIT compilation and
+ * allocation). So we try to streamline this as much as possible.
+ * We park/unpark workers after placing in an event wait queue
+ * when they cannot find work. This "queue" is actually a simple
+ * Treiber stack, headed by the "id" field of ctl, plus a 15bit
+ * counter value (that reflects the number of times a worker has
+ * been inactivated) to avoid ABA effects (we need only as many
+ * version numbers as worker threads). Successors are held in
+ * field WorkQueue.nextWait. Queuing deals with several intrinsic
+ * races, mainly that a task-producing thread can miss seeing (and
+ * signalling) another thread that gave up looking for work but
+ * has not yet entered the wait queue. We solve this by requiring
+ * a full sweep of all workers (via repeated calls to method
+ * scan()) both before and after a newly waiting worker is added
+ * to the wait queue. During a rescan, the worker might release
+ * some other queued worker rather than itself, which has the same
+ * net effect. Because enqueued workers may actually be rescanning
+ * rather than waiting, we set and clear the "parker" field of
+ * WorkQueues to reduce unnecessary calls to unpark. (This
+ * requires a secondary recheck to avoid missed signals.) Note
+ * the unusual conventions about Thread.interrupts surrounding
+ * parking and other blocking: Because interrupts are used solely
+ * to alert threads to check termination, which is checked anyway
+ * upon blocking, we clear status (using Thread.interrupted)
+ * before any call to park, so that park does not immediately
+ * return due to status being set via some other unrelated call to
+ * interrupt in user code.
+ *
+ * Signalling. We create or wake up workers only when there
+ * appears to be at least one task they might be able to find and
+ * execute. However, many other threads may notice the same task
+ * and each signal to wake up a thread that might take it. So in
+ * general, pools will be over-signalled. When a submission is
+ * added or another worker adds a task to a queue that has fewer
+ * than two tasks, they signal waiting workers (or trigger
+ * creation of new ones if fewer than the given parallelism level
+ * -- signalWork), and may leave a hint to the unparked worker to
+ * help signal others upon wakeup). These primary signals are
+ * buttressed by others (see method helpSignal) whenever other
+ * threads scan for work or do not have a task to process. On
+ * most platforms, signalling (unpark) overhead time is noticeably
+ * long, and the time between signalling a thread and it actually
+ * making progress can be very noticeably long, so it is worth
+ * offloading these delays from critical paths as much as
+ * possible.
+ *
+ * Trimming workers. To release resources after periods of lack of
+ * use, a worker starting to wait when the pool is quiescent will
+ * time out and terminate if the pool has remained quiescent for a
+ * given period -- a short period if there are more threads than
+ * parallelism, longer as the number of threads decreases. This
+ * will slowly propagate, eventually terminating all workers after
+ * periods of non-use.
+ *
+ * Shutdown and Termination. A call to shutdownNow atomically sets
+ * a plock bit and then (non-atomically) sets each worker's
+ * qlock status, cancels all unprocessed tasks, and wakes up
+ * all waiting workers. Detecting whether termination should
+ * commence after a non-abrupt shutdown() call requires more work
+ * and bookkeeping. We need consensus about quiescence (i.e., that
+ * there is no more work). The active count provides a primary
+ * indication but non-abrupt shutdown still requires a rechecking
+ * scan for any workers that are inactive but not queued.
+ *
+ * Joining Tasks
+ * =============
+ *
+ * Any of several actions may be taken when one worker is waiting
+ * to join a task stolen (or always held) by another. Because we
+ * are multiplexing many tasks on to a pool of workers, we can't
+ * just let them block (as in Thread.join). We also cannot just
+ * reassign the joiner's run-time stack with another and replace
+ * it later, which would be a form of "continuation", that even if
+ * possible is not necessarily a good idea since we sometimes need
+ * both an unblocked task and its continuation to progress.
+ * Instead we combine two tactics:
+ *
+ * Helping: Arranging for the joiner to execute some task that it
+ * would be running if the steal had not occurred.
+ *
+ * Compensating: Unless there are already enough live threads,
+ * method tryCompensate() may create or re-activate a spare
+ * thread to compensate for blocked joiners until they unblock.
+ *
+ * A third form (implemented in tryRemoveAndExec) amounts to
+ * helping a hypothetical compensator: If we can readily tell that
+ * a possible action of a compensator is to steal and execute the
+ * task being joined, the joining thread can do so directly,
+ * without the need for a compensation thread (although at the
+ * expense of larger run-time stacks, but the tradeoff is
+ * typically worthwhile).
+ *
+ * The ManagedBlocker extension API can't use helping so relies
+ * only on compensation in method awaitBlocker.
+ *
+ * The algorithm in tryHelpStealer entails a form of "linear"
+ * helping: Each worker records (in field currentSteal) the most
+ * recent task it stole from some other worker. Plus, it records
+ * (in field currentJoin) the task it is currently actively
+ * joining. Method tryHelpStealer uses these markers to try to
+ * find a worker to help (i.e., steal back a task from and execute
+ * it) that could hasten completion of the actively joined task.
+ * In essence, the joiner executes a task that would be on its own
+ * local deque had the to-be-joined task not been stolen. This may
+ * be seen as a conservative variant of the approach in Wagner &
+ * Calder "Leapfrogging: a portable technique for implementing
+ * efficient futures" SIGPLAN Notices, 1993
+ * (http://portal.acm.org/citation.cfm?id=155354). It differs in
+ * that: (1) We only maintain dependency links across workers upon
+ * steals, rather than use per-task bookkeeping. This sometimes
+ * requires a linear scan of workQueues array to locate stealers,
+ * but often doesn't because stealers leave hints (that may become
+ * stale/wrong) of where to locate them. It is only a hint
+ * because a worker might have had multiple steals and the hint
+ * records only one of them (usually the most current). Hinting
+ * isolates cost to when it is needed, rather than adding to
+ * per-task overhead. (2) It is "shallow", ignoring nesting and
+ * potentially cyclic mutual steals. (3) It is intentionally
+ * racy: field currentJoin is updated only while actively joining,
+ * which means that we miss links in the chain during long-lived
+ * tasks, GC stalls etc (which is OK since blocking in such cases
+ * is usually a good idea). (4) We bound the number of attempts
+ * to find work (see MAX_HELP) and fall back to suspending the
+ * worker and if necessary replacing it with another.
+ *
+ * Helping actions for CountedCompleters are much simpler: Method
+ * helpComplete can take and execute any task with the same root
+ * as the task being waited on. However, this still entails some
+ * traversal of completer chains, so is less efficient than using
+ * CountedCompleters without explicit joins.
+ *
+ * It is impossible to keep exactly the target parallelism number
+ * of threads running at any given time. Determining the
+ * existence of conservatively safe helping targets, the
+ * availability of already-created spares, and the apparent need
+ * to create new spares are all racy, so we rely on multiple
+ * retries of each. Compensation in the apparent absence of
+ * helping opportunities is challenging to control on JVMs, where
+ * GC and other activities can stall progress of tasks that in
+ * turn stall out many other dependent tasks, without us being
+ * able to determine whether they will ever require compensation.
+ * Even though work-stealing otherwise encounters little
+ * degradation in the presence of more threads than cores,
+ * aggressively adding new threads in such cases entails risk of
+ * unwanted positive feedback control loops in which more threads
+ * cause more dependent stalls (as well as delayed progress of
+ * unblocked threads to the point that we know they are available)
+ * leading to more situations requiring more threads, and so
+ * on. This aspect of control can be seen as an (analytically
+ * intractable) game with an opponent that may choose the worst
+ * (for us) active thread to stall at any time. We take several
+ * precautions to bound losses (and thus bound gains), mainly in
+ * methods tryCompensate and awaitJoin.
+ *
+ * Common Pool
+ * ===========
+ *
+ * The static common Pool always exists after static
+ * initialization. Since it (or any other created pool) need
+ * never be used, we minimize initial construction overhead and
+ * footprint to the setup of about a dozen fields, with no nested
+ * allocation. Most bootstrapping occurs within method
+ * fullExternalPush during the first submission to the pool.
+ *
+ * When external threads submit to the common pool, they can
+ * perform some subtask processing (see externalHelpJoin and
+ * related methods). We do not need to record whether these
+ * submissions are to the common pool -- if not, externalHelpJoin
+ * returns quickly (at the most helping to signal some common pool
+ * workers). These submitters would otherwise be blocked waiting
+ * for completion, so the extra effort (with liberally sprinkled
+ * task status checks) in inapplicable cases amounts to an odd
+ * form of limited spin-wait before blocking in ForkJoinTask.join.
+ *
+ * Style notes
+ * ===========
+ *
+ * There is a lot of representation-level coupling among classes
+ * ForkJoinPool, ForkJoinWorkerThread, and ForkJoinTask. The
+ * fields of WorkQueue maintain data structures managed by
+ * ForkJoinPool, so are directly accessed. There is little point
+ * trying to reduce this, since any associated future changes in
+ * representations will need to be accompanied by algorithmic
+ * changes anyway. Several methods intrinsically sprawl because
+ * they must accumulate sets of consistent reads of volatiles held
+ * in local variables. Methods signalWork() and scan() are the
+ * main bottlenecks, so are especially heavily
+ * micro-optimized/mangled. There are lots of inline assignments
+ * (of form "while ((local = field) != 0)") which are usually the
+ * simplest way to ensure the required read orderings (which are
+ * sometimes critical). This leads to a "C"-like style of listing
+ * declarations of these locals at the heads of methods or blocks.
+ * There are several occurrences of the unusual "do {} while
+ * (!cas...)" which is the simplest way to force an update of a
+ * CAS'ed variable. There are also other coding oddities (including
+ * several unnecessary-looking hoisted null checks) that help
+ * some methods perform reasonably even when interpreted (not
+ * compiled).
+ *
+ * The order of declarations in this file is:
+ * (1) Static utility functions
+ * (2) Nested (static) classes
+ * (3) Static fields
+ * (4) Fields, along with constants used when unpacking some of them
+ * (5) Internal control methods
+ * (6) Callbacks and other support for ForkJoinTask methods
+ * (7) Exported methods
+ * (8) Static block initializing statics in minimally dependent order
+ */
+
+ // Static utilities
+
+ /**
+ * If there is a security manager, makes sure caller has
+ * permission to modify threads.
+ */
+ private static void checkPermission() {
+ SecurityManager security = System.getSecurityManager();
+ if (security != null)
+ security.checkPermission(modifyThreadPermission);
+ }
+
+ // Nested classes
+
+ /**
+ * Factory for creating new {@link ForkJoinWorkerThread}s.
+ * A {@code ForkJoinWorkerThreadFactory} must be defined and used
+ * for {@code ForkJoinWorkerThread} subclasses that extend base
+ * functionality or initialize threads with different contexts.
+ */
+ public static interface ForkJoinWorkerThreadFactory {
+ /**
+ * Returns a new worker thread operating in the given pool.
+ *
+ * @param pool the pool this thread works in
+ * @throws NullPointerException if the pool is null
+ */
+ public ForkJoinWorkerThread newThread(ForkJoinPool pool);
+ }
+
+ /**
+ * Default ForkJoinWorkerThreadFactory implementation; creates a
+ * new ForkJoinWorkerThread.
+ */
+ static final class DefaultForkJoinWorkerThreadFactory
+ implements ForkJoinWorkerThreadFactory {
+ public final ForkJoinWorkerThread newThread(ForkJoinPool pool) {
+ return new ForkJoinWorkerThread(pool);
+ }
+ }
+
+ /**
+ * Per-thread records for threads that submit to pools. Currently
+ * holds only pseudo-random seed / index that is used to choose
+ * submission queues in method externalPush. In the future, this may
+ * also incorporate a means to implement different task rejection
+ * and resubmission policies.
+ *
+ * Seeds for submitters and workers/workQueues work in basically
+ * the same way but are initialized and updated using slightly
+ * different mechanics. Both are initialized using the same
+ * approach as in class ThreadLocal, where successive values are
+ * unlikely to collide with previous values. Seeds are then
+ * randomly modified upon collisions using xorshifts, which
+ * requires a non-zero seed.
+ */
+ static final class Submitter {
+ int seed;
+ Submitter(int s) { seed = s; }
+ }
+
+ /**
+ * Class for artificial tasks that are used to replace the target
+ * of local joins if they are removed from an interior queue slot
+ * in WorkQueue.tryRemoveAndExec. We don't need the proxy to
+ * actually do anything beyond having a unique identity.
+ */
+ static final class EmptyTask extends ForkJoinTask<Void> {
+ private static final long serialVersionUID = -7721805057305804111L;
+ EmptyTask() { status = ForkJoinTask.NORMAL; } // force done
+ public final Void getRawResult() { return null; }
+ public final void setRawResult(Void x) {}
+ public final boolean exec() { return true; }
+ }
+
+ /**
+ * Queues supporting work-stealing as well as external task
+ * submission. See above for main rationale and algorithms.
+ * Implementation relies heavily on "Unsafe" intrinsics
+ * and selective use of "volatile":
+ *
+ * Field "base" is the index (mod array.length) of the least valid
+ * queue slot, which is always the next position to steal (poll)
+ * from if nonempty. Reads and writes require volatile orderings
+ * but not CAS, because updates are only performed after slot
+ * CASes.
+ *
+ * Field "top" is the index (mod array.length) of the next queue
+ * slot to push to or pop from. It is written only by owner thread
+ * for push, or under lock for external/shared push, and accessed
+ * by other threads only after reading (volatile) base. Both top
+ * and base are allowed to wrap around on overflow, but (top -
+ * base) (or more commonly -(base - top) to force volatile read of
+ * base before top) still estimates size. The lock ("qlock") is
+ * forced to -1 on termination, causing all further lock attempts
+ * to fail. (Note: we don't need CAS for termination state because
+ * upon pool shutdown, all shared-queues will stop being used
+ * anyway.) Nearly all lock bodies are set up so that exceptions
+ * within lock bodies are "impossible" (modulo JVM errors that
+ * would cause failure anyway.)
+ *
+ * The array slots are read and written using the emulation of
+ * volatiles/atomics provided by Unsafe. Insertions must in
+ * general use putOrderedObject as a form of releasing store to
+ * ensure that all writes to the task object are ordered before
+ * its publication in the queue. All removals entail a CAS to
+ * null. The array is always a power of two. To ensure safety of
+ * Unsafe array operations, all accesses perform explicit null
+ * checks and implicit bounds checks via power-of-two masking.
+ *
+ * In addition to basic queuing support, this class contains
+ * fields described elsewhere to control execution. It turns out
+ * to work better memory-layout-wise to include them in this class
+ * rather than a separate class.
+ *
+ * Performance on most platforms is very sensitive to placement of
+ * instances of both WorkQueues and their arrays -- we absolutely
+ * do not want multiple WorkQueue instances or multiple queue
+ * arrays sharing cache lines. (It would be best for queue objects
+ * and their arrays to share, but there is nothing available to
+ * help arrange that). Unfortunately, because they are recorded
+ * in a common array, WorkQueue instances are often moved to be
+ * adjacent by garbage collectors. To reduce impact, we use field
+ * padding that works OK on common platforms; this effectively
+ * trades off slightly slower average field access for the sake of
+ * avoiding really bad worst-case access. (Until better JVM
+ * support is in place, this padding is dependent on transient
+ * properties of JVM field layout rules.) We also take care in
+ * allocating, sizing and resizing the array. Non-shared queue
+ * arrays are initialized by workers before use. Others are
+ * allocated on first use.
+ */
+ static final class WorkQueue {
+ /**
+ * Capacity of work-stealing queue array upon initialization.
+ * Must be a power of two; at least 4, but should be larger to
+ * reduce or eliminate cacheline sharing among queues.
+ * Currently, it is much larger, as a partial workaround for
+ * the fact that JVMs often place arrays in locations that
+ * share GC bookkeeping (especially cardmarks) such that
+ * per-write accesses encounter serious memory contention.
+ */
+ static final int INITIAL_QUEUE_CAPACITY = 1 << 13;
+
+ /**
+ * Maximum size for queue arrays. Must be a power of two less
+ * than or equal to 1 << (31 - width of array entry) to ensure
+ * lack of wraparound of index calculations, but defined to a
+ * value a bit less than this to help users trap runaway
+ * programs before saturating systems.
+ */
+ static final int MAXIMUM_QUEUE_CAPACITY = 1 << 26; // 64M
+
+ // Heuristic padding to ameliorate unfortunate memory placements
+ volatile long pad00, pad01, pad02, pad03, pad04, pad05, pad06;
+
+ int seed; // for random scanning; initialize nonzero
+ volatile int eventCount; // encoded inactivation count; < 0 if inactive
+ int nextWait; // encoded record of next event waiter
+ int hint; // steal or signal hint (index)
+ int poolIndex; // index of this queue in pool (or 0)
+ final int mode; // 0: lifo, > 0: fifo, < 0: shared
+ int nsteals; // number of steals
+ volatile int qlock; // 1: locked, -1: terminate; else 0
+ volatile int base; // index of next slot for poll
+ int top; // index of next slot for push
+ ForkJoinTask<?>[] array; // the elements (initially unallocated)
+ final ForkJoinPool pool; // the containing pool (may be null)
+ final ForkJoinWorkerThread owner; // owning thread or null if shared
+ volatile Thread parker; // == owner during call to park; else null
+ volatile ForkJoinTask<?> currentJoin; // task being joined in awaitJoin
+ ForkJoinTask<?> currentSteal; // current non-local task being executed
+
+ volatile Object pad10, pad11, pad12, pad13, pad14, pad15, pad16, pad17;
+ volatile Object pad18, pad19, pad1a, pad1b, pad1c, pad1d;
+
+ WorkQueue(ForkJoinPool pool, ForkJoinWorkerThread owner, int mode,
+ int seed) {
+ this.pool = pool;
+ this.owner = owner;
+ this.mode = mode;
+ this.seed = seed;
+ // Place indices in the center of array (that is not yet allocated)
+ base = top = INITIAL_QUEUE_CAPACITY >>> 1;
+ }
+
+ /**
+ * Returns the approximate number of tasks in the queue.
+ */
+ final int queueSize() {
+ int n = base - top; // non-owner callers must read base first
+ return (n >= 0) ? 0 : -n; // ignore transient negative
+ }
+
+ /**
+ * Provides a more accurate estimate of whether this queue has
+ * any tasks than does queueSize, by checking whether a
+ * near-empty queue has at least one unclaimed task.
+ */
+ final boolean isEmpty() {
+ ForkJoinTask<?>[] a; int m, s;
+ int n = base - (s = top);
+ return (n >= 0 ||
+ (n == -1 &&
+ ((a = array) == null ||
+ (m = a.length - 1) < 0 ||
+ U.getObject
+ (a, (long)((m & (s - 1)) << ASHIFT) + ABASE) == null)));
+ }
+
+ /**
+ * Pushes a task. Call only by owner in unshared queues. (The
+ * shared-queue version is embedded in method externalPush.)
+ *
+ * @param task the task. Caller must ensure non-null.
+ * @throws RejectedExecutionException if array cannot be resized
+ */
+ final void push(ForkJoinTask<?> task) {
+ ForkJoinTask<?>[] a; ForkJoinPool p;
+ int s = top, m, n;
+ if ((a = array) != null) { // ignore if queue removed
+ int j = (((m = a.length - 1) & s) << ASHIFT) + ABASE;
+ U.putOrderedObject(a, j, task);
+ if ((n = (top = s + 1) - base) <= 2) {
+ if ((p = pool) != null)
+ p.signalWork(this);
+ }
+ else if (n >= m)
+ growArray();
+ }
+ }
+
+ /**
+ * Initializes or doubles the capacity of array. Call either
+ * by owner or with lock held -- it is OK for base, but not
+ * top, to move while resizings are in progress.
+ */
+ final ForkJoinTask<?>[] growArray() {
+ ForkJoinTask<?>[] oldA = array;
+ int size = oldA != null ? oldA.length << 1 : INITIAL_QUEUE_CAPACITY;
+ if (size > MAXIMUM_QUEUE_CAPACITY)
+ throw new RejectedExecutionException("Queue capacity exceeded");
+ int oldMask, t, b;
+ ForkJoinTask<?>[] a = array = new ForkJoinTask<?>[size];
+ if (oldA != null && (oldMask = oldA.length - 1) >= 0 &&
+ (t = top) - (b = base) > 0) {
+ int mask = size - 1;
+ do {
+ ForkJoinTask<?> x;
+ int oldj = ((b & oldMask) << ASHIFT) + ABASE;
+ int j = ((b & mask) << ASHIFT) + ABASE;
+ x = (ForkJoinTask<?>)U.getObjectVolatile(oldA, oldj);
+ if (x != null &&
+ U.compareAndSwapObject(oldA, oldj, x, null))
+ U.putObjectVolatile(a, j, x);
+ } while (++b != t);
+ }
+ return a;
+ }
+
+ /**
+ * Takes next task, if one exists, in LIFO order. Call only
+ * by owner in unshared queues.
+ */
+ final ForkJoinTask<?> pop() {
+ ForkJoinTask<?>[] a; ForkJoinTask<?> t; int m;
+ if ((a = array) != null && (m = a.length - 1) >= 0) {
+ for (int s; (s = top - 1) - base >= 0;) {
+ long j = ((m & s) << ASHIFT) + ABASE;
+ if ((t = (ForkJoinTask<?>)U.getObject(a, j)) == null)
+ break;
+ if (U.compareAndSwapObject(a, j, t, null)) {
+ top = s;
+ return t;
+ }
+ }
+ }
+ return null;
+ }
+
+ /**
+ * Takes a task in FIFO order if b is base of queue and a task
+ * can be claimed without contention. Specialized versions
+ * appear in ForkJoinPool methods scan and tryHelpStealer.
+ */
+ final ForkJoinTask<?> pollAt(int b) {
+ ForkJoinTask<?> t; ForkJoinTask<?>[] a;
+ if ((a = array) != null) {
+ int j = (((a.length - 1) & b) << ASHIFT) + ABASE;
+ if ((t = (ForkJoinTask<?>)U.getObjectVolatile(a, j)) != null &&
+ base == b &&
+ U.compareAndSwapObject(a, j, t, null)) {
+ base = b + 1;
+ return t;
+ }
+ }
+ return null;
+ }
+
+ /**
+ * Takes next task, if one exists, in FIFO order.
+ */
+ final ForkJoinTask<?> poll() {
+ ForkJoinTask<?>[] a; int b; ForkJoinTask<?> t;
+ while ((b = base) - top < 0 && (a = array) != null) {
+ int j = (((a.length - 1) & b) << ASHIFT) + ABASE;
+ t = (ForkJoinTask<?>)U.getObjectVolatile(a, j);
+ if (t != null) {
+ if (base == b &&
+ U.compareAndSwapObject(a, j, t, null)) {
+ base = b + 1;
+ return t;
+ }
+ }
+ else if (base == b) {
+ if (b + 1 == top)
+ break;
+ Thread.yield(); // wait for lagging update (very rare)
+ }
+ }
+ return null;
+ }
+
+ /**
+ * Takes next task, if one exists, in order specified by mode.
+ */
+ final ForkJoinTask<?> nextLocalTask() {
+ return mode == 0 ? pop() : poll();
+ }
+
+ /**
+ * Returns next task, if one exists, in order specified by mode.
+ */
+ final ForkJoinTask<?> peek() {
+ ForkJoinTask<?>[] a = array; int m;
+ if (a == null || (m = a.length - 1) < 0)
+ return null;
+ int i = mode == 0 ? top - 1 : base;
+ int j = ((i & m) << ASHIFT) + ABASE;
+ return (ForkJoinTask<?>)U.getObjectVolatile(a, j);
+ }
+
+ /**
+ * Pops the given task only if it is at the current top.
+ * (A shared version is available only via FJP.tryExternalUnpush)
+ */
+ final boolean tryUnpush(ForkJoinTask<?> t) {
+ ForkJoinTask<?>[] a; int s;
+ if ((a = array) != null && (s = top) != base &&
+ U.compareAndSwapObject
+ (a, (((a.length - 1) & --s) << ASHIFT) + ABASE, t, null)) {
+ top = s;
+ return true;
+ }
+ return false;
+ }
- /*
- * See the extended comments interspersed below for design,
- * rationale, and walkthroughs.
- */
+ /**
+ * Removes and cancels all known tasks, ignoring any exceptions.
+ */
+ final void cancelAll() {
+ ForkJoinTask.cancelIgnoringExceptions(currentJoin);
+ ForkJoinTask.cancelIgnoringExceptions(currentSteal);
+ for (ForkJoinTask<?> t; (t = poll()) != null; )
+ ForkJoinTask.cancelIgnoringExceptions(t);
+ }
- /** Mask for packing and unpacking shorts */
- private static final int shortMask = 0xffff;
+ /**
+ * Computes next value for random probes. Scans don't require
+ * a very high quality generator, but also not a crummy one.
+ * Marsaglia xor-shift is cheap and works well enough. Note:
+ * This is manually inlined in its usages in ForkJoinPool to
+ * avoid writes inside busy scan loops.
+ */
+ final int nextSeed() {
+ int r = seed;
+ r ^= r << 13;
+ r ^= r >>> 17;
+ return seed = r ^= r << 5;
+ }
- /** Max pool size -- must be a power of two minus 1 */
- private static final int MAX_THREADS = 0x7FFF;
+ // Specialized execution methods
- // placeholder for java.util.concurrent.RunnableFuture
- interface RunnableFuture<T> extends Runnable {
- }
+ /**
+ * Pops and runs tasks until empty.
+ */
+ private void popAndExecAll() {
+ // A bit faster than repeated pop calls
+ ForkJoinTask<?>[] a; int m, s; long j; ForkJoinTask<?> t;
+ while ((a = array) != null && (m = a.length - 1) >= 0 &&
+ (s = top - 1) - base >= 0 &&
+ (t = ((ForkJoinTask<?>)
+ U.getObject(a, j = ((m & s) << ASHIFT) + ABASE)))
+ != null) {
+ if (U.compareAndSwapObject(a, j, t, null)) {
+ top = s;
+ t.doExec();
+ }
+ }
+ }
- /**
- * Factory for creating new ForkJoinWorkerThreads. A
- * ForkJoinWorkerThreadFactory must be defined and used for
- * ForkJoinWorkerThread subclasses that extend base functionality
- * or initialize threads with different contexts.
- */
- public static interface ForkJoinWorkerThreadFactory {
/**
- * Returns a new worker thread operating in the given pool.
+ * Polls and runs tasks until empty.
+ */
+ private void pollAndExecAll() {
+ for (ForkJoinTask<?> t; (t = poll()) != null;)
+ t.doExec();
+ }
+
+ /**
+ * If present, removes from queue and executes the given task,
+ * or any other cancelled task. Returns (true) on any CAS
+ * or consistency check failure so caller can retry.
*
- * @param pool the pool this thread works in
- * @throws NullPointerException if pool is null;
+ * @return false if no progress can be made, else true
*/
- public ForkJoinWorkerThread newThread(ForkJoinPool pool);
- }
+ final boolean tryRemoveAndExec(ForkJoinTask<?> task) {
+ boolean stat = true, removed = false, empty = true;
+ ForkJoinTask<?>[] a; int m, s, b, n;
+ if ((a = array) != null && (m = a.length - 1) >= 0 &&
+ (n = (s = top) - (b = base)) > 0) {
+ for (ForkJoinTask<?> t;;) { // traverse from s to b
+ int j = ((--s & m) << ASHIFT) + ABASE;
+ t = (ForkJoinTask<?>)U.getObjectVolatile(a, j);
+ if (t == null) // inconsistent length
+ break;
+ else if (t == task) {
+ if (s + 1 == top) { // pop
+ if (!U.compareAndSwapObject(a, j, task, null))
+ break;
+ top = s;
+ removed = true;
+ }
+ else if (base == b) // replace with proxy
+ removed = U.compareAndSwapObject(a, j, task,
+ new EmptyTask());
+ break;
+ }
+ else if (t.status >= 0)
+ empty = false;
+ else if (s + 1 == top) { // pop and throw away
+ if (U.compareAndSwapObject(a, j, t, null))
+ top = s;
+ break;
+ }
+ if (--n == 0) {
+ if (!empty && base == b)
+ stat = false;
+ break;
+ }
+ }
+ }
+ if (removed)
+ task.doExec();
+ return stat;
+ }
- /**
- * Default ForkJoinWorkerThreadFactory implementation, creates a
- * new ForkJoinWorkerThread.
- */
- static class DefaultForkJoinWorkerThreadFactory
- implements ForkJoinWorkerThreadFactory {
- public ForkJoinWorkerThread newThread(ForkJoinPool pool) {
+ /**
+ * Polls for and executes the given task or any other task in
+ * its CountedCompleter computation.
+ */
+ final boolean pollAndExecCC(ForkJoinTask<?> root) {
+ ForkJoinTask<?>[] a; int b; Object o;
+ outer: while ((b = base) - top < 0 && (a = array) != null) {
+ long j = (((a.length - 1) & b) << ASHIFT) + ABASE;
+ if ((o = U.getObject(a, j)) == null ||
+ !(o instanceof CountedCompleter))
+ break;
+ for (CountedCompleter<?> t = (CountedCompleter<?>)o, r = t;;) {
+ if (r == root) {
+ if (base == b &&
+ U.compareAndSwapObject(a, j, t, null)) {
+ base = b + 1;
+ t.doExec();
+ return true;
+ }
+ else
+ break; // restart
+ }
+ if ((r = r.completer) == null)
+ break outer; // not part of root computation
+ }
+ }
+ return false;
+ }
+
+ /**
+ * Executes a top-level task and any local tasks remaining
+ * after execution.
+ */
+ final void runTask(ForkJoinTask<?> t) {
+ if (t != null) {
+ (currentSteal = t).doExec();
+ currentSteal = null;
+ ++nsteals;
+ if (base - top < 0) { // process remaining local tasks
+ if (mode == 0)
+ popAndExecAll();
+ else
+ pollAndExecAll();
+ }
+ }
+ }
+
+ /**
+ * Executes a non-top-level (stolen) task.
+ */
+ final void runSubtask(ForkJoinTask<?> t) {
+ if (t != null) {
+ ForkJoinTask<?> ps = currentSteal;
+ (currentSteal = t).doExec();
+ currentSteal = ps;
+ }
+ }
+
+ /**
+ * Returns true if owned and not known to be blocked.
+ */
+ final boolean isApparentlyUnblocked() {
+ Thread wt; Thread.State s;
+ return (eventCount >= 0 &&
+ (wt = owner) != null &&
+ (s = wt.getState()) != Thread.State.BLOCKED &&
+ s != Thread.State.WAITING &&
+ s != Thread.State.TIMED_WAITING);
+ }
+
+ // Unsafe mechanics
+ private static final sun.misc.Unsafe U;
+ private static final long QLOCK;
+ private static final int ABASE;
+ private static final int ASHIFT;
+ static {
try {
- return new ForkJoinWorkerThread(pool);
- } catch (OutOfMemoryError oom) {
- return null;
+ U = getUnsafe();
+ Class<?> k = WorkQueue.class;
+ Class<?> ak = ForkJoinTask[].class;
+ QLOCK = U.objectFieldOffset
+ (k.getDeclaredField("qlock"));
+ ABASE = U.arrayBaseOffset(ak);
+ int scale = U.arrayIndexScale(ak);
+ if ((scale & (scale - 1)) != 0)
+ throw new Error("data type scale not a power of two");
+ ASHIFT = 31 - Integer.numberOfLeadingZeros(scale);
+ } catch (Exception e) {
+ throw new Error(e);
}
}
}
+ // static fields (initialized in static initializer below)
+
/**
* Creates a new ForkJoinWorkerThread. This factory is used unless
* overridden in ForkJoinPool constructors.
*/
public static final ForkJoinWorkerThreadFactory
- defaultForkJoinWorkerThreadFactory =
- new DefaultForkJoinWorkerThreadFactory();
+ defaultForkJoinWorkerThreadFactory;
+
+ /**
+ * Per-thread submission bookkeeping. Shared across all pools
+ * to reduce ThreadLocal pollution and because random motion
+ * to avoid contention in one pool is likely to hold for others.
+ * Lazily initialized on first submission (but null-checked
+ * in other contexts to avoid unnecessary initialization).
+ */
+ static final ThreadLocal<Submitter> submitters;
/**
* Permission required for callers of methods that may start or
* kill threads.
*/
- private static final RuntimePermission modifyThreadPermission =
- new RuntimePermission("modifyThread");
+ private static final RuntimePermission modifyThreadPermission;
/**
- * If there is a security manager, makes sure caller has
- * permission to modify threads.
+ * Common (static) pool. Non-null for public use unless a static
+ * construction exception, but internal usages null-check on use
+ * to paranoically avoid potential initialization circularities
+ * as well as to simplify generated code.
*/
- private static void checkPermission() {
- SecurityManager security = System.getSecurityManager();
- if (security != null)
- security.checkPermission(modifyThreadPermission);
- }
+ static final ForkJoinPool common;
/**
- * Generator for assigning sequence numbers as pool names.
+ * Common pool parallelism. Must equal common.parallelism.
*/
- private static final AtomicInteger poolNumberGenerator =
- new AtomicInteger();
+ static final int commonParallelism;
/**
- * Array holding all worker threads in the pool. Initialized upon
- * first use. Array size must be a power of two. Updates and
- * replacements are protected by workerLock, but it is always kept
- * in a consistent enough state to be randomly accessed without
- * locking by workers performing work-stealing.
+ * Sequence number for creating workerNamePrefix.
*/
- public volatile ForkJoinWorkerThread[] workers;
+ private static int poolNumberSequence;
/**
- * Lock protecting access to workers.
+ * Returns the next sequence number. We don't expect this to
+ * ever contend, so use simple builtin sync.
*/
- private final ReentrantLock workerLock;
+ private static final synchronized int nextPoolId() {
+ return ++poolNumberSequence;
+ }
+
+ // static constants
/**
- * Condition for awaitTermination.
+ * Initial timeout value (in nanoseconds) for the thread
+ * triggering quiescence to park waiting for new work. On timeout,
+ * the thread will instead try to shrink the number of
+ * workers. The value should be large enough to avoid overly
+ * aggressive shrinkage during most transient stalls (long GCs
+ * etc).
*/
- private final Condition termination;
+ private static final long IDLE_TIMEOUT = 2000L * 1000L * 1000L; // 2sec
/**
- * The uncaught exception handler used when any worker
- * abrupty terminates
+ * Timeout value when there are more threads than parallelism level
*/
- private Thread.UncaughtExceptionHandler ueh;
+ private static final long FAST_IDLE_TIMEOUT = 200L * 1000L * 1000L;
/**
- * Creation factory for worker threads.
+ * Tolerance for idle timeouts, to cope with timer undershoots
*/
- private final ForkJoinWorkerThreadFactory factory;
+ private static final long TIMEOUT_SLOP = 2000000L;
/**
- * Head of stack of threads that were created to maintain
- * parallelism when other threads blocked, but have since
- * suspended when the parallelism level rose.
+ * The maximum stolen->joining link depth allowed in method
+ * tryHelpStealer. Must be a power of two. Depths for legitimate
+ * chains are unbounded, but we use a fixed constant to avoid
+ * (otherwise unchecked) cycles and to bound staleness of
+ * traversal parameters at the expense of sometimes blocking when
+ * we could be helping.
*/
- private volatile WaitQueueNode spareStack;
+ private static final int MAX_HELP = 64;
/**
- * Sum of per-thread steal counts, updated only when threads are
- * idle or terminating.
+ * Increment for seed generators. See class ThreadLocal for
+ * explanation.
*/
- private final AtomicLong stealCount;
+ private static final int SEED_INCREMENT = 0x61c88647;
+
+ /*
+ * Bits and masks for control variables
+ *
+ * Field ctl is a long packed with:
+ * AC: Number of active running workers minus target parallelism (16 bits)
+ * TC: Number of total workers minus target parallelism (16 bits)
+ * ST: true if pool is terminating (1 bit)
+ * EC: the wait count of top waiting thread (15 bits)
+ * ID: poolIndex of top of Treiber stack of waiters (16 bits)
+ *
+ * When convenient, we can extract the upper 32 bits of counts and
+ * the lower 32 bits of queue state, u = (int)(ctl >>> 32) and e =
+ * (int)ctl. The ec field is never accessed alone, but always
+ * together with id and st. The offsets of counts by the target
+ * parallelism and the positionings of fields makes it possible to
+ * perform the most common checks via sign tests of fields: When
+ * ac is negative, there are not enough active workers, when tc is
+ * negative, there are not enough total workers, and when e is
+ * negative, the pool is terminating. To deal with these possibly
+ * negative fields, we use casts in and out of "short" and/or
+ * signed shifts to maintain signedness.
+ *
+ * When a thread is queued (inactivated), its eventCount field is
+ * set negative, which is the only way to tell if a worker is
+ * prevented from executing tasks, even though it must continue to
+ * scan for them to avoid queuing races. Note however that
+ * eventCount updates lag releases so usage requires care.
+ *
+ * Field plock is an int packed with:
+ * SHUTDOWN: true if shutdown is enabled (1 bit)
+ * SEQ: a sequence lock, with PL_LOCK bit set if locked (30 bits)
+ * SIGNAL: set when threads may be waiting on the lock (1 bit)
+ *
+ * The sequence number enables simple consistency checks:
+ * Staleness of read-only operations on the workQueues array can
+ * be checked by comparing plock before vs after the reads.
+ */
+
+ // bit positions/shifts for fields
+ private static final int AC_SHIFT = 48;
+ private static final int TC_SHIFT = 32;
+ private static final int ST_SHIFT = 31;
+ private static final int EC_SHIFT = 16;
+
+ // bounds
+ private static final int SMASK = 0xffff; // short bits
+ private static final int MAX_CAP = 0x7fff; // max #workers - 1
+ private static final int EVENMASK = 0xfffe; // even short bits
+ private static final int SQMASK = 0x007e; // max 64 (even) slots
+ private static final int SHORT_SIGN = 1 << 15;
+ private static final int INT_SIGN = 1 << 31;
+
+ // masks
+ private static final long STOP_BIT = 0x0001L << ST_SHIFT;
+ private static final long AC_MASK = ((long)SMASK) << AC_SHIFT;
+ private static final long TC_MASK = ((long)SMASK) << TC_SHIFT;
+
+ // units for incrementing and decrementing
+ private static final long TC_UNIT = 1L << TC_SHIFT;
+ private static final long AC_UNIT = 1L << AC_SHIFT;
+
+ // masks and units for dealing with u = (int)(ctl >>> 32)
+ private static final int UAC_SHIFT = AC_SHIFT - 32;
+ private static final int UTC_SHIFT = TC_SHIFT - 32;
+ private static final int UAC_MASK = SMASK << UAC_SHIFT;
+ private static final int UTC_MASK = SMASK << UTC_SHIFT;
+ private static final int UAC_UNIT = 1 << UAC_SHIFT;
+ private static final int UTC_UNIT = 1 << UTC_SHIFT;
+
+ // masks and units for dealing with e = (int)ctl
+ private static final int E_MASK = 0x7fffffff; // no STOP_BIT
+ private static final int E_SEQ = 1 << EC_SHIFT;
+
+ // plock bits
+ private static final int SHUTDOWN = 1 << 31;
+ private static final int PL_LOCK = 2;
+ private static final int PL_SIGNAL = 1;
+ private static final int PL_SPINS = 1 << 8;
+
+ // access mode for WorkQueue
+ static final int LIFO_QUEUE = 0;
+ static final int FIFO_QUEUE = 1;
+ static final int SHARED_QUEUE = -1;
+
+ // bounds for #steps in scan loop -- must be power 2 minus 1
+ private static final int MIN_SCAN = 0x1ff; // cover estimation slop
+ private static final int MAX_SCAN = 0x1ffff; // 4 * max workers
+
+ // Instance fields
+
+ /*
+ * Field layout of this class tends to matter more than one would
+ * like. Runtime layout order is only loosely related to
+ * declaration order and may differ across JVMs, but the following
+ * empirically works OK on current JVMs.
+ */
+
+ // Heuristic padding to ameliorate unfortunate memory placements
+ volatile long pad00, pad01, pad02, pad03, pad04, pad05, pad06;
+
+ volatile long stealCount; // collects worker counts
+ volatile long ctl; // main pool control
+ volatile int plock; // shutdown status and seqLock
+ volatile int indexSeed; // worker/submitter index seed
+ final int config; // mode and parallelism level
+ WorkQueue[] workQueues; // main registry
+ final ForkJoinWorkerThreadFactory factory;
+ final Thread.UncaughtExceptionHandler ueh; // per-worker UEH
+ final String workerNamePrefix; // to create worker name string
+
+ volatile Object pad10, pad11, pad12, pad13, pad14, pad15, pad16, pad17;
+ volatile Object pad18, pad19, pad1a, pad1b;
+
+ /**
+ * Acquires the plock lock to protect worker array and related
+ * updates. This method is called only if an initial CAS on plock
+ * fails. This acts as a spinlock for normal cases, but falls back
+ * to builtin monitor to block when (rarely) needed. This would be
+ * a terrible idea for a highly contended lock, but works fine as
+ * a more conservative alternative to a pure spinlock.
+ */
+ private int acquirePlock() {
+ int spins = PL_SPINS, r = 0, ps, nps;
+ for (;;) {
+ if (((ps = plock) & PL_LOCK) == 0 &&
+ U.compareAndSwapInt(this, PLOCK, ps, nps = ps + PL_LOCK))
+ return nps;
+ else if (r == 0) { // randomize spins if possible
+ Thread t = Thread.currentThread(); WorkQueue w; Submitter z;
+ if ((t instanceof ForkJoinWorkerThread) &&
+ (w = ((ForkJoinWorkerThread)t).workQueue) != null)
+ r = w.seed;
+ else if ((z = submitters.get()) != null)
+ r = z.seed;
+ else
+ r = 1;
+ }
+ else if (spins >= 0) {
+ r ^= r << 1; r ^= r >>> 3; r ^= r << 10; // xorshift
+ if (r >= 0)
+ --spins;
+ }
+ else if (U.compareAndSwapInt(this, PLOCK, ps, ps | PL_SIGNAL)) {
+ synchronized (this) {
+ if ((plock & PL_SIGNAL) != 0) {
+ try {
+ wait();
+ } catch (InterruptedException ie) {
+ try {
+ Thread.currentThread().interrupt();
+ } catch (SecurityException ignore) {
+ }
+ }
+ }
+ else
+ notifyAll();
+ }
+ }
+ }
+ }
/**
- * Queue for external submissions.
+ * Unlocks and signals any thread waiting for plock. Called only
+ * when CAS of seq value for unlock fails.
*/
- private final LinkedTransferQueue<ForkJoinTask<?>> submissionQueue;
+ private void releasePlock(int ps) {
+ plock = ps;
+ synchronized (this) { notifyAll(); }
+ }
/**
- * Head of Treiber stack for barrier sync. See below for explanation
+ * Tries to create and start one worker if fewer than target
+ * parallelism level exist. Adjusts counts etc on failure.
*/
- private volatile WaitQueueNode syncStack;
+ private void tryAddWorker() {
+ long c; int u;
+ while ((u = (int)((c = ctl) >>> 32)) < 0 &&
+ (u & SHORT_SIGN) != 0 && (int)c == 0) {
+ long nc = (long)(((u + UTC_UNIT) & UTC_MASK) |
+ ((u + UAC_UNIT) & UAC_MASK)) << 32;
+ if (U.compareAndSwapLong(this, CTL, c, nc)) {
+ ForkJoinWorkerThreadFactory fac;
+ Throwable ex = null;
+ ForkJoinWorkerThread wt = null;
+ try {
+ if ((fac = factory) != null &&
+ (wt = fac.newThread(this)) != null) {
+ wt.start();
+ break;
+ }
+ } catch (Throwable e) {
+ ex = e;
+ }
+ deregisterWorker(wt, ex);
+ break;
+ }
+ }
+ }
+
+ // Registering and deregistering workers
/**
- * The count for event barrier
- */
- private volatile long eventCount;
+ * Callback from ForkJoinWorkerThread to establish and record its
+ * WorkQueue. To avoid scanning bias due to packing entries in
+ * front of the workQueues array, we treat the array as a simple
+ * power-of-two hash table using per-thread seed as hash,
+ * expanding as needed.
+ *
+ * @param wt the worker thread
+ * @return the worker's queue
+ */
+ final WorkQueue registerWorker(ForkJoinWorkerThread wt) {
+ Thread.UncaughtExceptionHandler handler; WorkQueue[] ws; int s, ps;
+ wt.setDaemon(true);
+ if ((handler = ueh) != null)
+ wt.setUncaughtExceptionHandler(handler);
+ do {} while (!U.compareAndSwapInt(this, INDEXSEED, s = indexSeed,
+ s += SEED_INCREMENT) ||
+ s == 0); // skip 0
+ WorkQueue w = new WorkQueue(this, wt, config >>> 16, s);
+ if (((ps = plock) & PL_LOCK) != 0 ||
+ !U.compareAndSwapInt(this, PLOCK, ps, ps += PL_LOCK))
+ ps = acquirePlock();
+ int nps = (ps & SHUTDOWN) | ((ps + PL_LOCK) & ~SHUTDOWN);
+ try {
+ if ((ws = workQueues) != null) { // skip if shutting down
+ int n = ws.length, m = n - 1;
+ int r = (s << 1) | 1; // use odd-numbered indices
+ if (ws[r &= m] != null) { // collision
+ int probes = 0; // step by approx half size
+ int step = (n <= 4) ? 2 : ((n >>> 1) & EVENMASK) + 2;
+ while (ws[r = (r + step) & m] != null) {
+ if (++probes >= n) {
+ workQueues = ws = Arrays.copyOf(ws, n <<= 1);
+ m = n - 1;
+ probes = 0;
+ }
+ }
+ }
+ w.eventCount = w.poolIndex = r; // volatile write orders
+ ws[r] = w;
+ }
+ } finally {
+ if (!U.compareAndSwapInt(this, PLOCK, ps, nps))
+ releasePlock(nps);
+ }
+ wt.setName(workerNamePrefix.concat(Integer.toString(w.poolIndex)));
+ return w;
+ }
+
+ /**
+ * Final callback from terminating worker, as well as upon failure
+ * to construct or start a worker. Removes record of worker from
+ * array, and adjusts counts. If pool is shutting down, tries to
+ * complete termination.
+ *
+ * @param wt the worker thread or null if construction failed
+ * @param ex the exception causing failure, or null if none
+ */
+ final void deregisterWorker(ForkJoinWorkerThread wt, Throwable ex) {
+ WorkQueue w = null;
+ if (wt != null && (w = wt.workQueue) != null) {
+ int ps;
+ w.qlock = -1; // ensure set
+ long ns = w.nsteals, sc; // collect steal count
+ do {} while (!U.compareAndSwapLong(this, STEALCOUNT,
+ sc = stealCount, sc + ns));
+ if (((ps = plock) & PL_LOCK) != 0 ||
+ !U.compareAndSwapInt(this, PLOCK, ps, ps += PL_LOCK))
+ ps = acquirePlock();
+ int nps = (ps & SHUTDOWN) | ((ps + PL_LOCK) & ~SHUTDOWN);
+ try {
+ int idx = w.poolIndex;
+ WorkQueue[] ws = workQueues;
+ if (ws != null && idx >= 0 && idx < ws.length && ws[idx] == w)
+ ws[idx] = null;
+ } finally {
+ if (!U.compareAndSwapInt(this, PLOCK, ps, nps))
+ releasePlock(nps);
+ }
+ }
+
+ long c; // adjust ctl counts
+ do {} while (!U.compareAndSwapLong
+ (this, CTL, c = ctl, (((c - AC_UNIT) & AC_MASK) |
+ ((c - TC_UNIT) & TC_MASK) |
+ (c & ~(AC_MASK|TC_MASK)))));
+
+ if (!tryTerminate(false, false) && w != null && w.array != null) {
+ w.cancelAll(); // cancel remaining tasks
+ WorkQueue[] ws; WorkQueue v; Thread p; int u, i, e;
+ while ((u = (int)((c = ctl) >>> 32)) < 0 && (e = (int)c) >= 0) {
+ if (e > 0) { // activate or create replacement
+ if ((ws = workQueues) == null ||
+ (i = e & SMASK) >= ws.length ||
+ (v = ws[i]) == null)
+ break;
+ long nc = (((long)(v.nextWait & E_MASK)) |
+ ((long)(u + UAC_UNIT) << 32));
+ if (v.eventCount != (e | INT_SIGN))
+ break;
+ if (U.compareAndSwapLong(this, CTL, c, nc)) {
+ v.eventCount = (e + E_SEQ) & E_MASK;
+ if ((p = v.parker) != null)
+ U.unpark(p);
+ break;
+ }
+ }
+ else {
+ if ((short)u < 0)
+ tryAddWorker();
+ break;
+ }
+ }
+ }
+ if (ex == null) // help clean refs on way out
+ ForkJoinTask.helpExpungeStaleExceptions();
+ else // rethrow
+ ForkJoinTask.rethrow(ex);
+ }
+
+ // Submissions
+
+ /**
+ * Unless shutting down, adds the given task to a submission queue
+ * at submitter's current queue index (modulo submission
+ * range). Only the most common path is directly handled in this
+ * method. All others are relayed to fullExternalPush.
+ *
+ * @param task the task. Caller must ensure non-null.
+ */
+ final void externalPush(ForkJoinTask<?> task) {
+ WorkQueue[] ws; WorkQueue q; Submitter z; int m; ForkJoinTask<?>[] a;
+ if ((z = submitters.get()) != null && plock > 0 &&
+ (ws = workQueues) != null && (m = (ws.length - 1)) >= 0 &&
+ (q = ws[m & z.seed & SQMASK]) != null &&
+ U.compareAndSwapInt(q, QLOCK, 0, 1)) { // lock
+ int b = q.base, s = q.top, n, an;
+ if ((a = q.array) != null && (an = a.length) > (n = s + 1 - b)) {
+ int j = (((an - 1) & s) << ASHIFT) + ABASE;
+ U.putOrderedObject(a, j, task);
+ q.top = s + 1; // push on to deque
+ q.qlock = 0;
+ if (n <= 2)
+ signalWork(q);
+ return;
+ }
+ q.qlock = 0;
+ }
+ fullExternalPush(task);
+ }
+
+ /**
+ * Full version of externalPush. This method is called, among
+ * other times, upon the first submission of the first task to the
+ * pool, so must perform secondary initialization. It also
+ * detects first submission by an external thread by looking up
+ * its ThreadLocal, and creates a new shared queue if the one at
+ * index if empty or contended. The plock lock body must be
+ * exception-free (so no try/finally) so we optimistically
+ * allocate new queues outside the lock and throw them away if
+ * (very rarely) not needed.
+ *
+ * Secondary initialization occurs when plock is zero, to create
+ * workQueue array and set plock to a valid value. This lock body
+ * must also be exception-free. Because the plock seq value can
+ * eventually wrap around zero, this method harmlessly fails to
+ * reinitialize if workQueues exists, while still advancing plock.
+ */
+ private void fullExternalPush(ForkJoinTask<?> task) {
+ int r = 0; // random index seed
+ for (Submitter z = submitters.get();;) {
+ WorkQueue[] ws; WorkQueue q; int ps, m, k;
+ if (z == null) {
+ if (U.compareAndSwapInt(this, INDEXSEED, r = indexSeed,
+ r += SEED_INCREMENT) && r != 0)
+ submitters.set(z = new Submitter(r));
+ }
+ else if (r == 0) { // move to a different index
+ r = z.seed;
+ r ^= r << 13; // same xorshift as WorkQueues
+ r ^= r >>> 17;
+ z.seed = r ^ (r << 5);
+ }
+ else if ((ps = plock) < 0)
+ throw new RejectedExecutionException();
+ else if (ps == 0 || (ws = workQueues) == null ||
+ (m = ws.length - 1) < 0) { // initialize workQueues
+ int p = config & SMASK; // find power of two table size
+ int n = (p > 1) ? p - 1 : 1; // ensure at least 2 slots
+ n |= n >>> 1; n |= n >>> 2; n |= n >>> 4;
+ n |= n >>> 8; n |= n >>> 16; n = (n + 1) << 1;
+ WorkQueue[] nws = ((ws = workQueues) == null || ws.length == 0 ?
+ new WorkQueue[n] : null);
+ if (((ps = plock) & PL_LOCK) != 0 ||
+ !U.compareAndSwapInt(this, PLOCK, ps, ps += PL_LOCK))
+ ps = acquirePlock();
+ if (((ws = workQueues) == null || ws.length == 0) && nws != null)
+ workQueues = nws;
+ int nps = (ps & SHUTDOWN) | ((ps + PL_LOCK) & ~SHUTDOWN);
+ if (!U.compareAndSwapInt(this, PLOCK, ps, nps))
+ releasePlock(nps);
+ }
+ else if ((q = ws[k = r & m & SQMASK]) != null) {
+ if (q.qlock == 0 && U.compareAndSwapInt(q, QLOCK, 0, 1)) {
+ ForkJoinTask<?>[] a = q.array;
+ int s = q.top;
+ boolean submitted = false;
+ try { // locked version of push
+ if ((a != null && a.length > s + 1 - q.base) ||
+ (a = q.growArray()) != null) { // must presize
+ int j = (((a.length - 1) & s) << ASHIFT) + ABASE;
+ U.putOrderedObject(a, j, task);
+ q.top = s + 1;
+ submitted = true;
+ }
+ } finally {
+ q.qlock = 0; // unlock
+ }
+ if (submitted) {
+ signalWork(q);
+ return;
+ }
+ }
+ r = 0; // move on failure
+ }
+ else if (((ps = plock) & PL_LOCK) == 0) { // create new queue
+ q = new WorkQueue(this, null, SHARED_QUEUE, r);
+ if (((ps = plock) & PL_LOCK) != 0 ||
+ !U.compareAndSwapInt(this, PLOCK, ps, ps += PL_LOCK))
+ ps = acquirePlock();
+ if ((ws = workQueues) != null && k < ws.length && ws[k] == null)
+ ws[k] = q;
+ int nps = (ps & SHUTDOWN) | ((ps + PL_LOCK) & ~SHUTDOWN);
+ if (!U.compareAndSwapInt(this, PLOCK, ps, nps))
+ releasePlock(nps);
+ }
+ else
+ r = 0; // try elsewhere while lock held
+ }
+ }
+
+ // Maintaining ctl counts
/**
- * Pool number, just for assigning useful names to worker threads
+ * Increments active count; mainly called upon return from blocking.
*/
- private final int poolNumber;
+ final void incrementActiveCount() {
+ long c;
+ do {} while (!U.compareAndSwapLong(this, CTL, c = ctl, c + AC_UNIT));
+ }
+
+ /**
+ * Tries to create or activate a worker if too few are active.
+ *
+ * @param q the (non-null) queue holding tasks to be signalled
+ */
+ final void signalWork(WorkQueue q) {
+ int hint = q.poolIndex;
+ long c; int e, u, i, n; WorkQueue[] ws; WorkQueue w; Thread p;
+ while ((u = (int)((c = ctl) >>> 32)) < 0) {
+ if ((e = (int)c) > 0) {
+ if ((ws = workQueues) != null && ws.length > (i = e & SMASK) &&
+ (w = ws[i]) != null && w.eventCount == (e | INT_SIGN)) {
+ long nc = (((long)(w.nextWait & E_MASK)) |
+ ((long)(u + UAC_UNIT) << 32));
+ if (U.compareAndSwapLong(this, CTL, c, nc)) {
+ w.hint = hint;
+ w.eventCount = (e + E_SEQ) & E_MASK;
+ if ((p = w.parker) != null)
+ U.unpark(p);
+ break;
+ }
+ if (q.top - q.base <= 0)
+ break;
+ }
+ else
+ break;
+ }
+ else {
+ if ((short)u < 0)
+ tryAddWorker();
+ break;
+ }
+ }
+ }
+
+ // Scanning for tasks
/**
- * The maximum allowed pool size
+ * Top-level runloop for workers, called by ForkJoinWorkerThread.run.
*/
- private volatile int maxPoolSize;
+ final void runWorker(WorkQueue w) {
+ w.growArray(); // allocate queue
+ do { w.runTask(scan(w)); } while (w.qlock >= 0);
+ }
+
+ /**
+ * Scans for and, if found, returns one task, else possibly
+ * inactivates the worker. This method operates on single reads of
+ * volatile state and is designed to be re-invoked continuously,
+ * in part because it returns upon detecting inconsistencies,
+ * contention, or state changes that indicate possible success on
+ * re-invocation.
+ *
+ * The scan searches for tasks across queues (starting at a random
+ * index, and relying on registerWorker to irregularly scatter
+ * them within array to avoid bias), checking each at least twice.
+ * The scan terminates upon either finding a non-empty queue, or
+ * completing the sweep. If the worker is not inactivated, it
+ * takes and returns a task from this queue. Otherwise, if not
+ * activated, it signals workers (that may include itself) and
+ * returns so caller can retry. Also returns for true if the
+ * worker array may have changed during an empty scan. On failure
+ * to find a task, we take one of the following actions, after
+ * which the caller will retry calling this method unless
+ * terminated.
+ *
+ * * If pool is terminating, terminate the worker.
+ *
+ * * If not already enqueued, try to inactivate and enqueue the
+ * worker on wait queue. Or, if inactivating has caused the pool
+ * to be quiescent, relay to idleAwaitWork to possibly shrink
+ * pool.
+ *
+ * * If already enqueued and none of the above apply, possibly
+ * park awaiting signal, else lingering to help scan and signal.
+ *
+ * * If a non-empty queue discovered or left as a hint,
+ * help wake up other workers before return.
+ *
+ * @param w the worker (via its WorkQueue)
+ * @return a task or null if none found
+ */
+ private final ForkJoinTask<?> scan(WorkQueue w) {
+ WorkQueue[] ws; int m;
+ int ps = plock; // read plock before ws
+ if (w != null && (ws = workQueues) != null && (m = ws.length - 1) >= 0) {
+ int ec = w.eventCount; // ec is negative if inactive
+ int r = w.seed; r ^= r << 13; r ^= r >>> 17; w.seed = r ^= r << 5;
+ w.hint = -1; // update seed and clear hint
+ int j = ((m + m + 1) | MIN_SCAN) & MAX_SCAN;
+ do {
+ WorkQueue q; ForkJoinTask<?>[] a; int b;
+ if ((q = ws[(r + j) & m]) != null && (b = q.base) - q.top < 0 &&
+ (a = q.array) != null) { // probably nonempty
+ int i = (((a.length - 1) & b) << ASHIFT) + ABASE;
+ ForkJoinTask<?> t = (ForkJoinTask<?>)
+ U.getObjectVolatile(a, i);
+ if (q.base == b && ec >= 0 && t != null &&
+ U.compareAndSwapObject(a, i, t, null)) {
+ if ((q.base = b + 1) - q.top < 0)
+ signalWork(q);
+ return t; // taken
+ }
+ else if ((ec < 0 || j < m) && (int)(ctl >> AC_SHIFT) <= 0) {
+ w.hint = (r + j) & m; // help signal below
+ break; // cannot take
+ }
+ }
+ } while (--j >= 0);
+
+ int h, e, ns; long c, sc; WorkQueue q;
+ if ((ns = w.nsteals) != 0) {
+ if (U.compareAndSwapLong(this, STEALCOUNT,
+ sc = stealCount, sc + ns))
+ w.nsteals = 0; // collect steals and rescan
+ }
+ else if (plock != ps) // consistency check
+ ; // skip
+ else if ((e = (int)(c = ctl)) < 0)
+ w.qlock = -1; // pool is terminating
+ else {
+ if ((h = w.hint) < 0) {
+ if (ec >= 0) { // try to enqueue/inactivate
+ long nc = (((long)ec |
+ ((c - AC_UNIT) & (AC_MASK|TC_MASK))));
+ w.nextWait = e; // link and mark inactive
+ w.eventCount = ec | INT_SIGN;
+ if (ctl != c || !U.compareAndSwapLong(this, CTL, c, nc))
+ w.eventCount = ec; // unmark on CAS failure
+ else if ((int)(c >> AC_SHIFT) == 1 - (config & SMASK))
+ idleAwaitWork(w, nc, c);
+ }
+ else if (w.eventCount < 0 && ctl == c) {
+ Thread wt = Thread.currentThread();
+ Thread.interrupted(); // clear status
+ U.putObject(wt, PARKBLOCKER, this);
+ w.parker = wt; // emulate LockSupport.park
+ if (w.eventCount < 0) // recheck
+ U.park(false, 0L); // block
+ w.parker = null;
+ U.putObject(wt, PARKBLOCKER, null);
+ }
+ }
+ if ((h >= 0 || (h = w.hint) >= 0) &&
+ (ws = workQueues) != null && h < ws.length &&
+ (q = ws[h]) != null) { // signal others before retry
+ WorkQueue v; Thread p; int u, i, s;
+ for (int n = (config & SMASK) - 1;;) {
+ int idleCount = (w.eventCount < 0) ? 0 : -1;
+ if (((s = idleCount - q.base + q.top) <= n &&
+ (n = s) <= 0) ||
+ (u = (int)((c = ctl) >>> 32)) >= 0 ||
+ (e = (int)c) <= 0 || m < (i = e & SMASK) ||
+ (v = ws[i]) == null)
+ break;
+ long nc = (((long)(v.nextWait & E_MASK)) |
+ ((long)(u + UAC_UNIT) << 32));
+ if (v.eventCount != (e | INT_SIGN) ||
+ !U.compareAndSwapLong(this, CTL, c, nc))
+ break;
+ v.hint = h;
+ v.eventCount = (e + E_SEQ) & E_MASK;
+ if ((p = v.parker) != null)
+ U.unpark(p);
+ if (--n <= 0)
+ break;
+ }
+ }
+ }
+ }
+ return null;
+ }
/**
- * The desired parallelism level, updated only under workerLock.
+ * If inactivating worker w has caused the pool to become
+ * quiescent, checks for pool termination, and, so long as this is
+ * not the only worker, waits for event for up to a given
+ * duration. On timeout, if ctl has not changed, terminates the
+ * worker, which will in turn wake up another worker to possibly
+ * repeat this process.
+ *
+ * @param w the calling worker
+ * @param currentCtl the ctl value triggering possible quiescence
+ * @param prevCtl the ctl value to restore if thread is terminated
+ */
+ private void idleAwaitWork(WorkQueue w, long currentCtl, long prevCtl) {
+ if (w != null && w.eventCount < 0 &&
+ !tryTerminate(false, false) && (int)prevCtl != 0 &&
+ ctl == currentCtl) {
+ int dc = -(short)(currentCtl >>> TC_SHIFT);
+ long parkTime = dc < 0 ? FAST_IDLE_TIMEOUT: (dc + 1) * IDLE_TIMEOUT;
+ long deadline = System.nanoTime() + parkTime - TIMEOUT_SLOP;
+ Thread wt = Thread.currentThread();
+ while (ctl == currentCtl) {
+ Thread.interrupted(); // timed variant of version in scan()
+ U.putObject(wt, PARKBLOCKER, this);
+ w.parker = wt;
+ if (ctl == currentCtl)
+ U.park(false, parkTime);
+ w.parker = null;
+ U.putObject(wt, PARKBLOCKER, null);
+ if (ctl != currentCtl)
+ break;
+ if (deadline - System.nanoTime() <= 0L &&
+ U.compareAndSwapLong(this, CTL, currentCtl, prevCtl)) {
+ w.eventCount = (w.eventCount + E_SEQ) | E_MASK;
+ w.hint = -1;
+ w.qlock = -1; // shrink
+ break;
+ }
+ }
+ }
+ }
+
+ /**
+ * Scans through queues looking for work while joining a task; if
+ * any present, signals. May return early if more signalling is
+ * detectably unneeded.
+ *
+ * @param task return early if done
+ * @param origin an index to start scan
+ */
+ private void helpSignal(ForkJoinTask<?> task, int origin) {
+ WorkQueue[] ws; WorkQueue w; Thread p; long c; int m, u, e, i, s;
+ if (task != null && task.status >= 0 &&
+ (u = (int)(ctl >>> 32)) < 0 && (u >> UAC_SHIFT) < 0 &&
+ (ws = workQueues) != null && (m = ws.length - 1) >= 0) {
+ outer: for (int k = origin, j = m; j >= 0; --j) {
+ WorkQueue q = ws[k++ & m];
+ for (int n = m;;) { // limit to at most m signals
+ if (task.status < 0)
+ break outer;
+ if (q == null ||
+ ((s = -q.base + q.top) <= n && (n = s) <= 0))
+ break;
+ if ((u = (int)((c = ctl) >>> 32)) >= 0 ||
+ (e = (int)c) <= 0 || m < (i = e & SMASK) ||
+ (w = ws[i]) == null)
+ break outer;
+ long nc = (((long)(w.nextWait & E_MASK)) |
+ ((long)(u + UAC_UNIT) << 32));
+ if (w.eventCount != (e | INT_SIGN))
+ break outer;
+ if (U.compareAndSwapLong(this, CTL, c, nc)) {
+ w.eventCount = (e + E_SEQ) & E_MASK;
+ if ((p = w.parker) != null)
+ U.unpark(p);
+ if (--n <= 0)
+ break;
+ }
+ }
+ }
+ }
+ }
+
+ /**
+ * Tries to locate and execute tasks for a stealer of the given
+ * task, or in turn one of its stealers, Traces currentSteal ->
+ * currentJoin links looking for a thread working on a descendant
+ * of the given task and with a non-empty queue to steal back and
+ * execute tasks from. The first call to this method upon a
+ * waiting join will often entail scanning/search, (which is OK
+ * because the joiner has nothing better to do), but this method
+ * leaves hints in workers to speed up subsequent calls. The
+ * implementation is very branchy to cope with potential
+ * inconsistencies or loops encountering chains that are stale,
+ * unknown, or so long that they are likely cyclic.
+ *
+ * @param joiner the joining worker
+ * @param task the task to join
+ * @return 0 if no progress can be made, negative if task
+ * known complete, else positive
+ */
+ private int tryHelpStealer(WorkQueue joiner, ForkJoinTask<?> task) {
+ int stat = 0, steps = 0; // bound to avoid cycles
+ if (joiner != null && task != null) { // hoist null checks
+ restart: for (;;) {
+ ForkJoinTask<?> subtask = task; // current target
+ for (WorkQueue j = joiner, v;;) { // v is stealer of subtask
+ WorkQueue[] ws; int m, s, h;
+ if ((s = task.status) < 0) {
+ stat = s;
+ break restart;
+ }
+ if ((ws = workQueues) == null || (m = ws.length - 1) <= 0)
+ break restart; // shutting down
+ if ((v = ws[h = (j.hint | 1) & m]) == null ||
+ v.currentSteal != subtask) {
+ for (int origin = h;;) { // find stealer
+ if (((h = (h + 2) & m) & 15) == 1 &&
+ (subtask.status < 0 || j.currentJoin != subtask))
+ continue restart; // occasional staleness check
+ if ((v = ws[h]) != null &&
+ v.currentSteal == subtask) {
+ j.hint = h; // save hint
+ break;
+ }
+ if (h == origin)
+ break restart; // cannot find stealer
+ }
+ }
+ for (;;) { // help stealer or descend to its stealer
+ ForkJoinTask[] a; int b;
+ if (subtask.status < 0) // surround probes with
+ continue restart; // consistency checks
+ if ((b = v.base) - v.top < 0 && (a = v.array) != null) {
+ int i = (((a.length - 1) & b) << ASHIFT) + ABASE;
+ ForkJoinTask<?> t =
+ (ForkJoinTask<?>)U.getObjectVolatile(a, i);
+ if (subtask.status < 0 || j.currentJoin != subtask ||
+ v.currentSteal != subtask)
+ continue restart; // stale
+ stat = 1; // apparent progress
+ if (t != null && v.base == b &&
+ U.compareAndSwapObject(a, i, t, null)) {
+ v.base = b + 1; // help stealer
+ joiner.runSubtask(t);
+ }
+ else if (v.base == b && ++steps == MAX_HELP)
+ break restart; // v apparently stalled
+ }
+ else { // empty -- try to descend
+ ForkJoinTask<?> next = v.currentJoin;
+ if (subtask.status < 0 || j.currentJoin != subtask ||
+ v.currentSteal != subtask)
+ continue restart; // stale
+ else if (next == null || ++steps == MAX_HELP)
+ break restart; // dead-end or maybe cyclic
+ else {
+ subtask = next;
+ j = v;
+ break;
+ }
+ }
+ }
+ }
+ }
+ }
+ return stat;
+ }
+
+ /**
+ * Analog of tryHelpStealer for CountedCompleters. Tries to steal
+ * and run tasks within the target's computation.
+ *
+ * @param task the task to join
+ * @param mode if shared, exit upon completing any task
+ * if all workers are active
+ */
+ private int helpComplete(ForkJoinTask<?> task, int mode) {
+ WorkQueue[] ws; WorkQueue q; int m, n, s, u;
+ if (task != null && (ws = workQueues) != null &&
+ (m = ws.length - 1) >= 0) {
+ for (int j = 1, origin = j;;) {
+ if ((s = task.status) < 0)
+ return s;
+ if ((q = ws[j & m]) != null && q.pollAndExecCC(task)) {
+ origin = j;
+ if (mode == SHARED_QUEUE &&
+ ((u = (int)(ctl >>> 32)) >= 0 || (u >> UAC_SHIFT) >= 0))
+ break;
+ }
+ else if ((j = (j + 2) & m) == origin)
+ break;
+ }
+ }
+ return 0;
+ }
+
+ /**
+ * Tries to decrement active count (sometimes implicitly) and
+ * possibly release or create a compensating worker in preparation
+ * for blocking. Fails on contention or termination. Otherwise,
+ * adds a new thread if no idle workers are available and pool
+ * may become starved.
+ */
+ final boolean tryCompensate() {
+ int pc = config & SMASK, e, i, tc; long c;
+ WorkQueue[] ws; WorkQueue w; Thread p;
+ if ((ws = workQueues) != null && (e = (int)(c = ctl)) >= 0) {
+ if (e != 0 && (i = e & SMASK) < ws.length &&
+ (w = ws[i]) != null && w.eventCount == (e | INT_SIGN)) {
+ long nc = ((long)(w.nextWait & E_MASK) |
+ (c & (AC_MASK|TC_MASK)));
+ if (U.compareAndSwapLong(this, CTL, c, nc)) {
+ w.eventCount = (e + E_SEQ) & E_MASK;
+ if ((p = w.parker) != null)
+ U.unpark(p);
+ return true; // replace with idle worker
+ }
+ }
+ else if ((tc = (short)(c >>> TC_SHIFT)) >= 0 &&
+ (int)(c >> AC_SHIFT) + pc > 1) {
+ long nc = ((c - AC_UNIT) & AC_MASK) | (c & ~AC_MASK);
+ if (U.compareAndSwapLong(this, CTL, c, nc))
+ return true; // no compensation
+ }
+ else if (tc + pc < MAX_CAP) {
+ long nc = ((c + TC_UNIT) & TC_MASK) | (c & ~TC_MASK);
+ if (U.compareAndSwapLong(this, CTL, c, nc)) {
+ ForkJoinWorkerThreadFactory fac;
+ Throwable ex = null;
+ ForkJoinWorkerThread wt = null;
+ try {
+ if ((fac = factory) != null &&
+ (wt = fac.newThread(this)) != null) {
+ wt.start();
+ return true;
+ }
+ } catch (Throwable rex) {
+ ex = rex;
+ }
+ deregisterWorker(wt, ex); // clean up and return false
+ }
+ }
+ }
+ return false;
+ }
+
+ /**
+ * Helps and/or blocks until the given task is done.
+ *
+ * @param joiner the joining worker
+ * @param task the task
+ * @return task status on exit
+ */
+ final int awaitJoin(WorkQueue joiner, ForkJoinTask<?> task) {
+ int s = 0;
+ if (joiner != null && task != null && (s = task.status) >= 0) {
+ ForkJoinTask<?> prevJoin = joiner.currentJoin;
+ joiner.currentJoin = task;
+ do {} while ((s = task.status) >= 0 && !joiner.isEmpty() &&
+ joiner.tryRemoveAndExec(task)); // process local tasks
+ if (s >= 0 && (s = task.status) >= 0) {
+ helpSignal(task, joiner.poolIndex);
+ if ((s = task.status) >= 0 &&
+ (task instanceof CountedCompleter))
+ s = helpComplete(task, LIFO_QUEUE);
+ }
+ while (s >= 0 && (s = task.status) >= 0) {
+ if ((!joiner.isEmpty() || // try helping
+ (s = tryHelpStealer(joiner, task)) == 0) &&
+ (s = task.status) >= 0) {
+ helpSignal(task, joiner.poolIndex);
+ if ((s = task.status) >= 0 && tryCompensate()) {
+ if (task.trySetSignal() && (s = task.status) >= 0) {
+ synchronized (task) {
+ if (task.status >= 0) {
+ try { // see ForkJoinTask
+ task.wait(); // for explanation
+ } catch (InterruptedException ie) {
+ }
+ }
+ else
+ task.notifyAll();
+ }
+ }
+ long c; // re-activate
+ do {} while (!U.compareAndSwapLong
+ (this, CTL, c = ctl, c + AC_UNIT));
+ }
+ }
+ }
+ joiner.currentJoin = prevJoin;
+ }
+ return s;
+ }
+
+ /**
+ * Stripped-down variant of awaitJoin used by timed joins. Tries
+ * to help join only while there is continuous progress. (Caller
+ * will then enter a timed wait.)
+ *
+ * @param joiner the joining worker
+ * @param task the task
*/
- private volatile int parallelism;
+ final void helpJoinOnce(WorkQueue joiner, ForkJoinTask<?> task) {
+ int s;
+ if (joiner != null && task != null && (s = task.status) >= 0) {
+ ForkJoinTask<?> prevJoin = joiner.currentJoin;
+ joiner.currentJoin = task;
+ do {} while ((s = task.status) >= 0 && !joiner.isEmpty() &&
+ joiner.tryRemoveAndExec(task));
+ if (s >= 0 && (s = task.status) >= 0) {
+ helpSignal(task, joiner.poolIndex);
+ if ((s = task.status) >= 0 &&
+ (task instanceof CountedCompleter))
+ s = helpComplete(task, LIFO_QUEUE);
+ }
+ if (s >= 0 && joiner.isEmpty()) {
+ do {} while (task.status >= 0 &&
+ tryHelpStealer(joiner, task) > 0);
+ }
+ joiner.currentJoin = prevJoin;
+ }
+ }
/**
- * True if use local fifo, not default lifo, for local polling
+ * Returns a (probably) non-empty steal queue, if one is found
+ * during a scan, else null. This method must be retried by
+ * caller if, by the time it tries to use the queue, it is empty.
+ * @param r a (random) seed for scanning
*/
- private volatile boolean locallyFifo;
+ private WorkQueue findNonEmptyStealQueue(int r) {
+ for (;;) {
+ int ps = plock, m; WorkQueue[] ws; WorkQueue q;
+ if ((ws = workQueues) != null && (m = ws.length - 1) >= 0) {
+ for (int j = (m + 1) << 2; j >= 0; --j) {
+ if ((q = ws[(((r + j) << 1) | 1) & m]) != null &&
+ q.base - q.top < 0)
+ return q;
+ }
+ }
+ if (plock == ps)
+ return null;
+ }
+ }
/**
- * Holds number of total (i.e., created and not yet terminated)
- * and running (i.e., not blocked on joins or other managed sync)
- * threads, packed into one int to ensure consistent snapshot when
- * making decisions about creating and suspending spare
- * threads. Updated only by CAS. Note: CASes in
- * updateRunningCount and preJoin running active count is in low
- * word, so need to be modified if this changes
+ * Runs tasks until {@code isQuiescent()}. We piggyback on
+ * active count ctl maintenance, but rather than blocking
+ * when tasks cannot be found, we rescan until all others cannot
+ * find tasks either.
*/
- private volatile int workerCounts;
-
- private static int totalCountOf(int s) { return s >>> 16; }
- private static int runningCountOf(int s) { return s & shortMask; }
- private static int workerCountsFor(int t, int r) { return (t << 16) + r; }
+ final void helpQuiescePool(WorkQueue w) {
+ for (boolean active = true;;) {
+ long c; WorkQueue q; ForkJoinTask<?> t; int b;
+ while ((t = w.nextLocalTask()) != null) {
+ if (w.base - w.top < 0)
+ signalWork(w);
+ t.doExec();
+ }
+ if ((q = findNonEmptyStealQueue(w.nextSeed())) != null) {
+ if (!active) { // re-establish active count
+ active = true;
+ do {} while (!U.compareAndSwapLong
+ (this, CTL, c = ctl, c + AC_UNIT));
+ }
+ if ((b = q.base) - q.top < 0 && (t = q.pollAt(b)) != null) {
+ if (q.base - q.top < 0)
+ signalWork(q);
+ w.runSubtask(t);
+ }
+ }
+ else if (active) { // decrement active count without queuing
+ long nc = (c = ctl) - AC_UNIT;
+ if ((int)(nc >> AC_SHIFT) + (config & SMASK) == 0)
+ return; // bypass decrement-then-increment
+ if (U.compareAndSwapLong(this, CTL, c, nc))
+ active = false;
+ }
+ else if ((int)((c = ctl) >> AC_SHIFT) + (config & SMASK) == 0 &&
+ U.compareAndSwapLong(this, CTL, c, c + AC_UNIT))
+ return;
+ }
+ }
/**
- * Add delta (which may be negative) to running count. This must
- * be called before (with negative arg) and after (with positive)
- * any managed synchronization (i.e., mainly, joins)
- * @param delta the number to add
- */
- final void updateRunningCount(int delta) {
- int s;
- do;while (!casWorkerCounts(s = workerCounts, s + delta));
+ * Gets and removes a local or stolen task for the given worker.
+ *
+ * @return a task, if available
+ */
+ final ForkJoinTask<?> nextTaskFor(WorkQueue w) {
+ for (ForkJoinTask<?> t;;) {
+ WorkQueue q; int b;
+ if ((t = w.nextLocalTask()) != null)
+ return t;
+ if ((q = findNonEmptyStealQueue(w.nextSeed())) == null)
+ return null;
+ if ((b = q.base) - q.top < 0 && (t = q.pollAt(b)) != null) {
+ if (q.base - q.top < 0)
+ signalWork(q);
+ return t;
+ }
+ }
}
/**
- * Add delta (which may be negative) to both total and running
- * count. This must be called upon creation and termination of
- * worker threads.
- * @param delta the number to add
- */
- private void updateWorkerCount(int delta) {
- int d = delta + (delta << 16); // add to both lo and hi parts
- int s;
- do;while (!casWorkerCounts(s = workerCounts, s + d));
+ * Returns a cheap heuristic guide for task partitioning when
+ * programmers, frameworks, tools, or languages have little or no
+ * idea about task granularity. In essence by offering this
+ * method, we ask users only about tradeoffs in overhead vs
+ * expected throughput and its variance, rather than how finely to
+ * partition tasks.
+ *
+ * In a steady state strict (tree-structured) computation, each
+ * thread makes available for stealing enough tasks for other
+ * threads to remain active. Inductively, if all threads play by
+ * the same rules, each thread should make available only a
+ * constant number of tasks.
+ *
+ * The minimum useful constant is just 1. But using a value of 1
+ * would require immediate replenishment upon each steal to
+ * maintain enough tasks, which is infeasible. Further,
+ * partitionings/granularities of offered tasks should minimize
+ * steal rates, which in general means that threads nearer the top
+ * of computation tree should generate more than those nearer the
+ * bottom. In perfect steady state, each thread is at
+ * approximately the same level of computation tree. However,
+ * producing extra tasks amortizes the uncertainty of progress and
+ * diffusion assumptions.
+ *
+ * So, users will want to use values larger (but not much larger)
+ * than 1 to both smooth over transient shortages and hedge
+ * against uneven progress; as traded off against the cost of
+ * extra task overhead. We leave the user to pick a threshold
+ * value to compare with the results of this call to guide
+ * decisions, but recommend values such as 3.
+ *
+ * When all threads are active, it is on average OK to estimate
+ * surplus strictly locally. In steady-state, if one thread is
+ * maintaining say 2 surplus tasks, then so are others. So we can
+ * just use estimated queue length. However, this strategy alone
+ * leads to serious mis-estimates in some non-steady-state
+ * conditions (ramp-up, ramp-down, other stalls). We can detect
+ * many of these by further considering the number of "idle"
+ * threads, that are known to have zero queued tasks, so
+ * compensate by a factor of (#idle/#active) threads.
+ *
+ * Note: The approximation of #busy workers as #active workers is
+ * not very good under current signalling scheme, and should be
+ * improved.
+ */
+ static int getSurplusQueuedTaskCount() {
+ Thread t; ForkJoinWorkerThread wt; ForkJoinPool pool; WorkQueue q;
+ if (((t = Thread.currentThread()) instanceof ForkJoinWorkerThread)) {
+ int p = (pool = (wt = (ForkJoinWorkerThread)t).pool).config & SMASK;
+ int n = (q = wt.workQueue).top - q.base;
+ int a = (int)(pool.ctl >> AC_SHIFT) + p;
+ return n - (a > (p >>>= 1) ? 0 :
+ a > (p >>>= 1) ? 1 :
+ a > (p >>>= 1) ? 2 :
+ a > (p >>>= 1) ? 4 :
+ 8);
+ }
+ return 0;
}
- /**
- * Lifecycle control. High word contains runState, low word
- * contains the number of workers that are (probably) executing
- * tasks. This value is atomically incremented before a worker
- * gets a task to run, and decremented when worker has no tasks
- * and cannot find any. These two fields are bundled together to
- * support correct termination triggering. Note: activeCount
- * CAS'es cheat by assuming active count is in low word, so need
- * to be modified if this changes
- */
- private volatile int runControl;
+ // Termination
- // RunState values. Order among values matters
- private static final int RUNNING = 0;
- private static final int SHUTDOWN = 1;
- private static final int TERMINATING = 2;
- private static final int TERMINATED = 3;
+ /**
+ * Possibly initiates and/or completes termination. The caller
+ * triggering termination runs three passes through workQueues:
+ * (0) Setting termination status, followed by wakeups of queued
+ * workers; (1) cancelling all tasks; (2) interrupting lagging
+ * threads (likely in external tasks, but possibly also blocked in
+ * joins). Each pass repeats previous steps because of potential
+ * lagging thread creation.
+ *
+ * @param now if true, unconditionally terminate, else only
+ * if no work and no active workers
+ * @param enable if true, enable shutdown when next possible
+ * @return true if now terminating or terminated
+ */
+ private boolean tryTerminate(boolean now, boolean enable) {
+ int ps;
+ if (this == common) // cannot shut down
+ return false;
+ if ((ps = plock) >= 0) { // enable by setting plock
+ if (!enable)
+ return false;
+ if ((ps & PL_LOCK) != 0 ||
+ !U.compareAndSwapInt(this, PLOCK, ps, ps += PL_LOCK))
+ ps = acquirePlock();
+ int nps = ((ps + PL_LOCK) & ~SHUTDOWN) | SHUTDOWN;
+ if (!U.compareAndSwapInt(this, PLOCK, ps, nps))
+ releasePlock(nps);
+ }
+ for (long c;;) {
+ if (((c = ctl) & STOP_BIT) != 0) { // already terminating
+ if ((short)(c >>> TC_SHIFT) == -(config & SMASK)) {
+ synchronized (this) {
+ notifyAll(); // signal when 0 workers
+ }
+ }
+ return true;
+ }
+ if (!now) { // check if idle & no tasks
+ WorkQueue[] ws; WorkQueue w;
+ if ((int)(c >> AC_SHIFT) != -(config & SMASK))
+ return false;
+ if ((ws = workQueues) != null) {
+ for (int i = 0; i < ws.length; ++i) {
+ if ((w = ws[i]) != null) {
+ if (!w.isEmpty()) { // signal unprocessed tasks
+ signalWork(w);
+ return false;
+ }
+ if ((i & 1) != 0 && w.eventCount >= 0)
+ return false; // unqueued inactive worker
+ }
+ }
+ }
+ }
+ if (U.compareAndSwapLong(this, CTL, c, c | STOP_BIT)) {
+ for (int pass = 0; pass < 3; ++pass) {
+ WorkQueue[] ws; WorkQueue w; Thread wt;
+ if ((ws = workQueues) != null) {
+ int n = ws.length;
+ for (int i = 0; i < n; ++i) {
+ if ((w = ws[i]) != null) {
+ w.qlock = -1;
+ if (pass > 0) {
+ w.cancelAll();
+ if (pass > 1 && (wt = w.owner) != null) {
+ if (!wt.isInterrupted()) {
+ try {
+ wt.interrupt();
+ } catch (Throwable ignore) {
+ }
+ }
+ U.unpark(wt);
+ }
+ }
+ }
+ }
+ // Wake up workers parked on event queue
+ int i, e; long cc; Thread p;
+ while ((e = (int)(cc = ctl) & E_MASK) != 0 &&
+ (i = e & SMASK) < n && i >= 0 &&
+ (w = ws[i]) != null) {
+ long nc = ((long)(w.nextWait & E_MASK) |
+ ((cc + AC_UNIT) & AC_MASK) |
+ (cc & (TC_MASK|STOP_BIT)));
+ if (w.eventCount == (e | INT_SIGN) &&
+ U.compareAndSwapLong(this, CTL, cc, nc)) {
+ w.eventCount = (e + E_SEQ) & E_MASK;
+ w.qlock = -1;
+ if ((p = w.parker) != null)
+ U.unpark(p);
+ }
+ }
+ }
+ }
+ }
+ }
+ }
- private static int runStateOf(int c) { return c >>> 16; }
- private static int activeCountOf(int c) { return c & shortMask; }
- private static int runControlFor(int r, int a) { return (r << 16) + a; }
+ // external operations on common pool
/**
- * Try incrementing active count; fail on contention. Called by
- * workers before/during executing tasks.
- * @return true on success;
+ * Returns common pool queue for a thread that has submitted at
+ * least one task.
*/
- final boolean tryIncrementActiveCount() {
- int c = runControl;
- return casRunControl(c, c+1);
+ static WorkQueue commonSubmitterQueue() {
+ ForkJoinPool p; WorkQueue[] ws; int m; Submitter z;
+ return ((z = submitters.get()) != null &&
+ (p = common) != null &&
+ (ws = p.workQueues) != null &&
+ (m = ws.length - 1) >= 0) ?
+ ws[m & z.seed & SQMASK] : null;
}
/**
- * Try decrementing active count; fail on contention.
- * Possibly trigger termination on success
- * Called by workers when they can't find tasks.
- * @return true on success
+ * Tries to pop the given task from submitter's queue in common pool.
*/
- final boolean tryDecrementActiveCount() {
- int c = runControl;
- int nextc = c - 1;
- if (!casRunControl(c, nextc))
- return false;
- if (canTerminateOnShutdown(nextc))
- terminateOnShutdown();
- return true;
+ static boolean tryExternalUnpush(ForkJoinTask<?> t) {
+ ForkJoinPool p; WorkQueue[] ws; WorkQueue q; Submitter z;
+ ForkJoinTask<?>[] a; int m, s;
+ if (t != null &&
+ (z = submitters.get()) != null &&
+ (p = common) != null &&
+ (ws = p.workQueues) != null &&
+ (m = ws.length - 1) >= 0 &&
+ (q = ws[m & z.seed & SQMASK]) != null &&
+ (s = q.top) != q.base &&
+ (a = q.array) != null) {
+ long j = (((a.length - 1) & (s - 1)) << ASHIFT) + ABASE;
+ if (U.getObject(a, j) == t &&
+ U.compareAndSwapInt(q, QLOCK, 0, 1)) {
+ if (q.array == a && q.top == s && // recheck
+ U.compareAndSwapObject(a, j, t, null)) {
+ q.top = s - 1;
+ q.qlock = 0;
+ return true;
+ }
+ q.qlock = 0;
+ }
+ }
+ return false;
}
/**
- * Return true if argument represents zero active count and
- * nonzero runstate, which is the triggering condition for
- * terminating on shutdown.
+ * Tries to pop and run local tasks within the same computation
+ * as the given root. On failure, tries to help complete from
+ * other queues via helpComplete.
*/
- private static boolean canTerminateOnShutdown(int c) {
- return ((c & -c) >>> 16) != 0; // i.e. least bit is nonzero runState bit
+ private void externalHelpComplete(WorkQueue q, ForkJoinTask<?> root) {
+ ForkJoinTask<?>[] a; int m;
+ if (q != null && (a = q.array) != null && (m = (a.length - 1)) >= 0 &&
+ root != null && root.status >= 0) {
+ for (;;) {
+ int s, u; Object o; CountedCompleter<?> task = null;
+ if ((s = q.top) - q.base > 0) {
+ long j = ((m & (s - 1)) << ASHIFT) + ABASE;
+ if ((o = U.getObject(a, j)) != null &&
+ (o instanceof CountedCompleter)) {
+ CountedCompleter<?> t = (CountedCompleter<?>)o, r = t;
+ do {
+ if (r == root) {
+ if (U.compareAndSwapInt(q, QLOCK, 0, 1)) {
+ if (q.array == a && q.top == s &&
+ U.compareAndSwapObject(a, j, t, null)) {
+ q.top = s - 1;
+ task = t;
+ }
+ q.qlock = 0;
+ }
+ break;
+ }
+ } while ((r = r.completer) != null);
+ }
+ }
+ if (task != null)
+ task.doExec();
+ if (root.status < 0 ||
+ (u = (int)(ctl >>> 32)) >= 0 || (u >> UAC_SHIFT) >= 0)
+ break;
+ if (task == null) {
+ helpSignal(root, q.poolIndex);
+ if (root.status >= 0)
+ helpComplete(root, SHARED_QUEUE);
+ break;
+ }
+ }
+ }
}
/**
- * Transition run state to at least the given state. Return true
- * if not already at least given state.
- */
- private boolean transitionRunStateTo(int state) {
- for (;;) {
- int c = runControl;
- if (runStateOf(c) >= state)
- return false;
- if (casRunControl(c, runControlFor(state, activeCountOf(c))))
- return true;
+ * Tries to help execute or signal availability of the given task
+ * from submitter's queue in common pool.
+ */
+ static void externalHelpJoin(ForkJoinTask<?> t) {
+ // Some hard-to-avoid overlap with tryExternalUnpush
+ ForkJoinPool p; WorkQueue[] ws; WorkQueue q, w; Submitter z;
+ ForkJoinTask<?>[] a; int m, s, n;
+ if (t != null &&
+ (z = submitters.get()) != null &&
+ (p = common) != null &&
+ (ws = p.workQueues) != null &&
+ (m = ws.length - 1) >= 0 &&
+ (q = ws[m & z.seed & SQMASK]) != null &&
+ (a = q.array) != null) {
+ int am = a.length - 1;
+ if ((s = q.top) != q.base) {
+ long j = ((am & (s - 1)) << ASHIFT) + ABASE;
+ if (U.getObject(a, j) == t &&
+ U.compareAndSwapInt(q, QLOCK, 0, 1)) {
+ if (q.array == a && q.top == s &&
+ U.compareAndSwapObject(a, j, t, null)) {
+ q.top = s - 1;
+ q.qlock = 0;
+ t.doExec();
+ }
+ else
+ q.qlock = 0;
+ }
+ }
+ if (t.status >= 0) {
+ if (t instanceof CountedCompleter)
+ p.externalHelpComplete(q, t);
+ else
+ p.helpSignal(t, q.poolIndex);
+ }
}
}
- /**
- * Controls whether to add spares to maintain parallelism
- */
- private volatile boolean maintainsParallelism;
+ // Exported methods
// Constructors
/**
- * Creates a ForkJoinPool with a pool size equal to the number of
- * processors available on the system and using the default
- * ForkJoinWorkerThreadFactory,
+ * Creates a {@code ForkJoinPool} with parallelism equal to {@link
+ * java.lang.Runtime#availableProcessors}, using the {@linkplain
+ * #defaultForkJoinWorkerThreadFactory default thread factory},
+ * no UncaughtExceptionHandler, and non-async LIFO processing mode.
+ *
* @throws SecurityException if a security manager exists and
* the caller is not permitted to modify threads
* because it does not hold {@link
- * java.lang.RuntimePermission}<code>("modifyThread")</code>,
+ * java.lang.RuntimePermission}{@code ("modifyThread")}
*/
public ForkJoinPool() {
- this(Runtime.getRuntime().availableProcessors(),
- defaultForkJoinWorkerThreadFactory);
+ this(Math.min(MAX_CAP, Runtime.getRuntime().availableProcessors()),
+ defaultForkJoinWorkerThreadFactory, null, false);
}
/**
- * Creates a ForkJoinPool with the indicated parellelism level
- * threads, and using the default ForkJoinWorkerThreadFactory,
- * @param parallelism the number of worker threads
+ * Creates a {@code ForkJoinPool} with the indicated parallelism
+ * level, the {@linkplain
+ * #defaultForkJoinWorkerThreadFactory default thread factory},
+ * no UncaughtExceptionHandler, and non-async LIFO processing mode.
+ *
+ * @param parallelism the parallelism level
* @throws IllegalArgumentException if parallelism less than or
- * equal to zero
+ * equal to zero, or greater than implementation limit
* @throws SecurityException if a security manager exists and
* the caller is not permitted to modify threads
* because it does not hold {@link
- * java.lang.RuntimePermission}<code>("modifyThread")</code>,
+ * java.lang.RuntimePermission}{@code ("modifyThread")}
*/
public ForkJoinPool(int parallelism) {
- this(parallelism, defaultForkJoinWorkerThreadFactory);
+ this(parallelism, defaultForkJoinWorkerThreadFactory, null, false);
}
/**
- * Creates a ForkJoinPool with parallelism equal to the number of
- * processors available on the system and using the given
- * ForkJoinWorkerThreadFactory,
- * @param factory the factory for creating new threads
- * @throws NullPointerException if factory is null
- * @throws SecurityException if a security manager exists and
- * the caller is not permitted to modify threads
- * because it does not hold {@link
- * java.lang.RuntimePermission}<code>("modifyThread")</code>,
- */
- public ForkJoinPool(ForkJoinWorkerThreadFactory factory) {
- this(Runtime.getRuntime().availableProcessors(), factory);
- }
-
- /**
- * Creates a ForkJoinPool with the given parallelism and factory.
+ * Creates a {@code ForkJoinPool} with the given parameters.
*
- * @param parallelism the targeted number of worker threads
- * @param factory the factory for creating new threads
+ * @param parallelism the parallelism level. For default value,
+ * use {@link java.lang.Runtime#availableProcessors}.
+ * @param factory the factory for creating new threads. For default value,
+ * use {@link #defaultForkJoinWorkerThreadFactory}.
+ * @param handler the handler for internal worker threads that
+ * terminate due to unrecoverable errors encountered while executing
+ * tasks. For default value, use {@code null}.
+ * @param asyncMode if true,
+ * establishes local first-in-first-out scheduling mode for forked
+ * tasks that are never joined. This mode may be more appropriate
+ * than default locally stack-based mode in applications in which
+ * worker threads only process event-style asynchronous tasks.
+ * For default value, use {@code false}.
* @throws IllegalArgumentException if parallelism less than or
- * equal to zero, or greater than implementation limit.
- * @throws NullPointerException if factory is null
+ * equal to zero, or greater than implementation limit
+ * @throws NullPointerException if the factory is null
* @throws SecurityException if a security manager exists and
* the caller is not permitted to modify threads
* because it does not hold {@link
- * java.lang.RuntimePermission}<code>("modifyThread")</code>,
+ * java.lang.RuntimePermission}{@code ("modifyThread")}
*/
- public ForkJoinPool(int parallelism, ForkJoinWorkerThreadFactory factory) {
- if (parallelism <= 0 || parallelism > MAX_THREADS)
- throw new IllegalArgumentException();
+ public ForkJoinPool(int parallelism,
+ ForkJoinWorkerThreadFactory factory,
+ Thread.UncaughtExceptionHandler handler,
+ boolean asyncMode) {
+ checkPermission();
if (factory == null)
throw new NullPointerException();
- checkPermission();
+ if (parallelism <= 0 || parallelism > MAX_CAP)
+ throw new IllegalArgumentException();
this.factory = factory;
- this.parallelism = parallelism;
- this.maxPoolSize = MAX_THREADS;
- this.maintainsParallelism = true;
- this.poolNumber = poolNumberGenerator.incrementAndGet();
- this.workerLock = new ReentrantLock();
- this.termination = workerLock.newCondition();
- this.stealCount = new AtomicLong();
- this.submissionQueue = new LinkedTransferQueue<ForkJoinTask<?>>();
- // worker array and workers are lazily constructed
- }
-
- /**
- * Create new worker using factory.
- * @param index the index to assign worker
- * @return new worker, or null of factory failed
- */
- private ForkJoinWorkerThread createWorker(int index) {
- Thread.UncaughtExceptionHandler h = ueh;
- ForkJoinWorkerThread w = factory.newThread(this);
- if (w != null) {
- w.poolIndex = index;
- w.setDaemon(true);
- w.setAsyncMode(locallyFifo);
- w.setName("ForkJoinPool-" + poolNumber + "-worker-" + index);
- if (h != null)
- w.setUncaughtExceptionHandler(h);
- }
- return w;
- }
-
- /**
- * Return a good size for worker array given pool size.
- * Currently requires size to be a power of two.
- */
- private static int arraySizeFor(int ps) {
- return ps <= 1? 1 : (1 << (32 - Integer.numberOfLeadingZeros(ps-1)));
- }
-
- public static ForkJoinWorkerThread[] copyOfWorkers(ForkJoinWorkerThread[] original, int newLength) {
- ForkJoinWorkerThread[] copy = new ForkJoinWorkerThread[newLength];
- System.arraycopy(original, 0, copy, 0, Math.min(newLength, original.length));
- return copy;
- }
-
- /**
- * Create or resize array if necessary to hold newLength.
- * Call only under exlusion or lock
- * @return the array
- */
- private ForkJoinWorkerThread[] ensureWorkerArrayCapacity(int newLength) {
- ForkJoinWorkerThread[] ws = workers;
- if (ws == null)
- return workers = new ForkJoinWorkerThread[arraySizeFor(newLength)];
- else if (newLength > ws.length)
- return workers = copyOfWorkers(ws, arraySizeFor(newLength));
- else
- return ws;
- }
-
- /**
- * Try to shrink workers into smaller array after one or more terminate
- */
- private void tryShrinkWorkerArray() {
- ForkJoinWorkerThread[] ws = workers;
- if (ws != null) {
- int len = ws.length;
- int last = len - 1;
- while (last >= 0 && ws[last] == null)
- --last;
- int newLength = arraySizeFor(last+1);
- if (newLength < len)
- workers = copyOfWorkers(ws, newLength);
- }
- }
-
- /**
- * Initialize workers if necessary
- */
- final void ensureWorkerInitialization() {
- ForkJoinWorkerThread[] ws = workers;
- if (ws == null) {
- final ReentrantLock lock = this.workerLock;
- lock.lock();
- try {
- ws = workers;
- if (ws == null) {
- int ps = parallelism;
- ws = ensureWorkerArrayCapacity(ps);
- for (int i = 0; i < ps; ++i) {
- ForkJoinWorkerThread w = createWorker(i);
- if (w != null) {
- ws[i] = w;
- w.start();
- updateWorkerCount(1);
- }
- }
- }
- } finally {
- lock.unlock();
- }
- }
+ this.ueh = handler;
+ this.config = parallelism | (asyncMode ? (FIFO_QUEUE << 16) : 0);
+ long np = (long)(-parallelism); // offset ctl counts
+ this.ctl = ((np << AC_SHIFT) & AC_MASK) | ((np << TC_SHIFT) & TC_MASK);
+ int pn = nextPoolId();
+ StringBuilder sb = new StringBuilder("ForkJoinPool-");
+ sb.append(Integer.toString(pn));
+ sb.append("-worker-");
+ this.workerNamePrefix = sb.toString();
+ }
+
+ /**
+ * Constructor for common pool, suitable only for static initialization.
+ * Basically the same as above, but uses smallest possible initial footprint.
+ */
+ ForkJoinPool(int parallelism, long ctl,
+ ForkJoinWorkerThreadFactory factory,
+ Thread.UncaughtExceptionHandler handler) {
+ this.config = parallelism;
+ this.ctl = ctl;
+ this.factory = factory;
+ this.ueh = handler;
+ this.workerNamePrefix = "ForkJoinPool.commonPool-worker-";
}
/**
- * Worker creation and startup for threads added via setParallelism.
+ * Returns the common pool instance. This pool is statically
+ * constructed; its run state is unaffected by attempts to {@link
+ * #shutdown} or {@link #shutdownNow}. However this pool and any
+ * ongoing processing are automatically terminated upon program
+ * {@link System#exit}. Any program that relies on asynchronous
+ * task processing to complete before program termination should
+ * invoke {@code commonPool().}{@link #awaitQuiescence}, before
+ * exit.
+ *
+ * @return the common pool instance
+ * @since 1.8
*/
- private void createAndStartAddedWorkers() {
- resumeAllSpares(); // Allow spares to convert to nonspare
- int ps = parallelism;
- ForkJoinWorkerThread[] ws = ensureWorkerArrayCapacity(ps);
- int len = ws.length;
- // Sweep through slots, to keep lowest indices most populated
- int k = 0;
- while (k < len) {
- if (ws[k] != null) {
- ++k;
- continue;
- }
- int s = workerCounts;
- int tc = totalCountOf(s);
- int rc = runningCountOf(s);
- if (rc >= ps || tc >= ps)
- break;
- if (casWorkerCounts (s, workerCountsFor(tc+1, rc+1))) {
- ForkJoinWorkerThread w = createWorker(k);
- if (w != null) {
- ws[k++] = w;
- w.start();
- }
- else {
- updateWorkerCount(-1); // back out on failed creation
- break;
- }
- }
- }
+ public static ForkJoinPool commonPool() {
+ // assert common != null : "static init error";
+ return common;
}
// Execution methods
/**
- * Common code for execute, invoke and submit
- */
- private <T> void doSubmit(ForkJoinTask<T> task) {
- if (isShutdown())
- throw new RejectedExecutionException();
- if (workers == null)
- ensureWorkerInitialization();
- submissionQueue.offer(task);
- signalIdleWorkers();
- }
-
- /**
- * Performs the given task; returning its result upon completion
+ * Performs the given task, returning its result upon completion.
+ * If the computation encounters an unchecked Exception or Error,
+ * it is rethrown as the outcome of this invocation. Rethrown
+ * exceptions behave in the same way as regular exceptions, but,
+ * when possible, contain stack traces (as displayed for example
+ * using {@code ex.printStackTrace()}) of both the current thread
+ * as well as the thread actually encountering the exception;
+ * minimally only the latter.
+ *
* @param task the task
* @return the task's result
- * @throws NullPointerException if task is null
- * @throws RejectedExecutionException if pool is shut down
+ * @throws NullPointerException if the task is null
+ * @throws RejectedExecutionException if the task cannot be
+ * scheduled for execution
*/
public <T> T invoke(ForkJoinTask<T> task) {
- doSubmit(task);
+ if (task == null)
+ throw new NullPointerException();
+ externalPush(task);
return task.join();
}
/**
* Arranges for (asynchronous) execution of the given task.
+ *
* @param task the task
- * @throws NullPointerException if task is null
- * @throws RejectedExecutionException if pool is shut down
+ * @throws NullPointerException if the task is null
+ * @throws RejectedExecutionException if the task cannot be
+ * scheduled for execution
*/
- public <T> void execute(ForkJoinTask<T> task) {
- doSubmit(task);
+ public void execute(ForkJoinTask<?> task) {
+ if (task == null)
+ throw new NullPointerException();
+ externalPush(task);
}
// AbstractExecutorService methods
+ /**
+ * @throws NullPointerException if the task is null
+ * @throws RejectedExecutionException if the task cannot be
+ * scheduled for execution
+ */
public void execute(Runnable task) {
- doSubmit(new AdaptedRunnable<Void>(task, null));
+ if (task == null)
+ throw new NullPointerException();
+ ForkJoinTask<?> job;
+ if (task instanceof ForkJoinTask<?>) // avoid re-wrap
+ job = (ForkJoinTask<?>) task;
+ else
+ job = new ForkJoinTask.AdaptedRunnableAction(task);
+ externalPush(job);
}
- public <T> ForkJoinTask<T> submit(Callable<T> task) {
- ForkJoinTask<T> job = new AdaptedCallable<T>(task);
- doSubmit(job);
- return job;
+ /**
+ * Submits a ForkJoinTask for execution.
+ *
+ * @param task the task to submit
+ * @return the task
+ * @throws NullPointerException if the task is null
+ * @throws RejectedExecutionException if the task cannot be
+ * scheduled for execution
+ */
+ public <T> ForkJoinTask<T> submit(ForkJoinTask<T> task) {
+ if (task == null)
+ throw new NullPointerException();
+ externalPush(task);
+ return task;
}
- public <T> ForkJoinTask<T> submit(Runnable task, T result) {
- ForkJoinTask<T> job = new AdaptedRunnable<T>(task, result);
- doSubmit(job);
+ /**
+ * @throws NullPointerException if the task is null
+ * @throws RejectedExecutionException if the task cannot be
+ * scheduled for execution
+ */
+ public <T> ForkJoinTask<T> submit(Callable<T> task) {
+ ForkJoinTask<T> job = new ForkJoinTask.AdaptedCallable<T>(task);
+ externalPush(job);
return job;
}
- public ForkJoinTask<?> submit(Runnable task) {
- ForkJoinTask<Void> job = new AdaptedRunnable<Void>(task, null);
- doSubmit(job);
+ /**
+ * @throws NullPointerException if the task is null
+ * @throws RejectedExecutionException if the task cannot be
+ * scheduled for execution
+ */
+ public <T> ForkJoinTask<T> submit(Runnable task, T result) {
+ ForkJoinTask<T> job = new ForkJoinTask.AdaptedRunnable<T>(task, result);
+ externalPush(job);
return job;
}
/**
- * Adaptor for Runnables. This implements RunnableFuture
- * to be compliant with AbstractExecutorService constraints
+ * @throws NullPointerException if the task is null
+ * @throws RejectedExecutionException if the task cannot be
+ * scheduled for execution
*/
- static final class AdaptedRunnable<T> extends ForkJoinTask<T>
- implements RunnableFuture<T> {
- final Runnable runnable;
- final T resultOnCompletion;
- T result;
- AdaptedRunnable(Runnable runnable, T result) {
- if (runnable == null) throw new NullPointerException();
- this.runnable = runnable;
- this.resultOnCompletion = result;
- }
- public T getRawResult() { return result; }
- public void setRawResult(T v) { result = v; }
- public boolean exec() {
- runnable.run();
- result = resultOnCompletion;
- return true;
- }
- public void run() { invoke(); }
+ public ForkJoinTask<?> submit(Runnable task) {
+ if (task == null)
+ throw new NullPointerException();
+ ForkJoinTask<?> job;
+ if (task instanceof ForkJoinTask<?>) // avoid re-wrap
+ job = (ForkJoinTask<?>) task;
+ else
+ job = new ForkJoinTask.AdaptedRunnableAction(task);
+ externalPush(job);
+ return job;
}
/**
- * Adaptor for Callables
+ * @throws NullPointerException {@inheritDoc}
+ * @throws RejectedExecutionException {@inheritDoc}
*/
- static final class AdaptedCallable<T> extends ForkJoinTask<T>
- implements RunnableFuture<T> {
- final Callable<T> callable;
- T result;
- AdaptedCallable(Callable<T> callable) {
- if (callable == null) throw new NullPointerException();
- this.callable = callable;
- }
- public T getRawResult() { return result; }
- public void setRawResult(T v) { result = v; }
- public boolean exec() {
- try {
- result = callable.call();
- return true;
- } catch (Error err) {
- throw err;
- } catch (RuntimeException rex) {
- throw rex;
- } catch (Exception ex) {
- throw new RuntimeException(ex);
- }
- }
- public void run() { invoke(); }
- }
-
public <T> List<Future<T>> invokeAll(Collection<? extends Callable<T>> tasks) {
- ArrayList<ForkJoinTask<T>> ts =
- new ArrayList<ForkJoinTask<T>>(tasks.size());
- for (Callable<T> c : tasks)
- ts.add(new AdaptedCallable<T>(c));
- invoke(new InvokeAll<T>(ts));
- return (List<Future<T>>)(List)ts;
- }
+ // In previous versions of this class, this method constructed
+ // a task to run ForkJoinTask.invokeAll, but now external
+ // invocation of multiple tasks is at least as efficient.
+ ArrayList<Future<T>> futures = new ArrayList<Future<T>>(tasks.size());
- static final class InvokeAll<T> extends RecursiveAction {
- final ArrayList<ForkJoinTask<T>> tasks;
- InvokeAll(ArrayList<ForkJoinTask<T>> tasks) { this.tasks = tasks; }
- public void compute() {
- try { invokeAll(tasks); } catch(Exception ignore) {}
+ boolean done = false;
+ try {
+ for (Callable<T> t : tasks) {
+ ForkJoinTask<T> f = new ForkJoinTask.AdaptedCallable<T>(t);
+ futures.add(f);
+ externalPush(f);
+ }
+ for (int i = 0, size = futures.size(); i < size; i++)
+ ((ForkJoinTask<?>)futures.get(i)).quietlyJoin();
+ done = true;
+ return futures;
+ } finally {
+ if (!done)
+ for (int i = 0, size = futures.size(); i < size; i++)
+ futures.get(i).cancel(false);
}
}
- // Configuration and status settings and queries
-
/**
- * Returns the factory used for constructing new workers
+ * Returns the factory used for constructing new workers.
*
* @return the factory used for constructing new workers
*/
@@ -674,234 +3068,99 @@ public class ForkJoinPool /*extends AbstractExecutorService*/ {
/**
* Returns the handler for internal worker threads that terminate
* due to unrecoverable errors encountered while executing tasks.
- * @return the handler, or null if none
+ *
+ * @return the handler, or {@code null} if none
*/
public Thread.UncaughtExceptionHandler getUncaughtExceptionHandler() {
- Thread.UncaughtExceptionHandler h;
- final ReentrantLock lock = this.workerLock;
- lock.lock();
- try {
- h = ueh;
- } finally {
- lock.unlock();
- }
- return h;
+ return ueh;
}
/**
- * Sets the handler for internal worker threads that terminate due
- * to unrecoverable errors encountered while executing tasks.
- * Unless set, the current default or ThreadGroup handler is used
- * as handler.
+ * Returns the targeted parallelism level of this pool.
*
- * @param h the new handler
- * @return the old handler, or null if none
- * @throws SecurityException if a security manager exists and
- * the caller is not permitted to modify threads
- * because it does not hold {@link
- * java.lang.RuntimePermission}<code>("modifyThread")</code>,
- */
- public Thread.UncaughtExceptionHandler
- setUncaughtExceptionHandler(Thread.UncaughtExceptionHandler h) {
- checkPermission();
- Thread.UncaughtExceptionHandler old = null;
- final ReentrantLock lock = this.workerLock;
- lock.lock();
- try {
- old = ueh;
- ueh = h;
- ForkJoinWorkerThread[] ws = workers;
- if (ws != null) {
- for (int i = 0; i < ws.length; ++i) {
- ForkJoinWorkerThread w = ws[i];
- if (w != null)
- w.setUncaughtExceptionHandler(h);
- }
- }
- } finally {
- lock.unlock();
- }
- return old;
- }
-
-
- /**
- * Sets the target paralleism level of this pool.
- * @param parallelism the target parallelism
- * @throws IllegalArgumentException if parallelism less than or
- * equal to zero or greater than maximum size bounds.
- * @throws SecurityException if a security manager exists and
- * the caller is not permitted to modify threads
- * because it does not hold {@link
- * java.lang.RuntimePermission}<code>("modifyThread")</code>,
+ * @return the targeted parallelism level of this pool
*/
- public void setParallelism(int parallelism) {
- checkPermission();
- if (parallelism <= 0 || parallelism > maxPoolSize)
- throw new IllegalArgumentException();
- final ReentrantLock lock = this.workerLock;
- lock.lock();
- try {
- if (!isTerminating()) {
- int p = this.parallelism;
- this.parallelism = parallelism;
- if (parallelism > p)
- createAndStartAddedWorkers();
- else
- trimSpares();
- }
- } finally {
- lock.unlock();
- }
- signalIdleWorkers();
+ public int getParallelism() {
+ return config & SMASK;
}
/**
- * Returns the targeted number of worker threads in this pool.
+ * Returns the targeted parallelism level of the common pool.
*
- * @return the targeted number of worker threads in this pool
+ * @return the targeted parallelism level of the common pool
+ * @since 1.8
*/
- public int getParallelism() {
- return parallelism;
+ public static int getCommonPoolParallelism() {
+ return commonParallelism;
}
/**
* Returns the number of worker threads that have started but not
- * yet terminated. This result returned by this method may differ
- * from <code>getParallelism</code> when threads are created to
+ * yet terminated. The result returned by this method may differ
+ * from {@link #getParallelism} when threads are created to
* maintain parallelism when others are cooperatively blocked.
*
* @return the number of worker threads
*/
public int getPoolSize() {
- return totalCountOf(workerCounts);
- }
-
- /**
- * Returns the maximum number of threads allowed to exist in the
- * pool, even if there are insufficient unblocked running threads.
- * @return the maximum
- */
- public int getMaximumPoolSize() {
- return maxPoolSize;
- }
-
- /**
- * Sets the maximum number of threads allowed to exist in the
- * pool, even if there are insufficient unblocked running threads.
- * Setting this value has no effect on current pool size. It
- * controls construction of new threads.
- * @throws IllegalArgumentException if negative or greater then
- * internal implementation limit.
- */
- public void setMaximumPoolSize(int newMax) {
- if (newMax < 0 || newMax > MAX_THREADS)
- throw new IllegalArgumentException();
- maxPoolSize = newMax;
- }
-
-
- /**
- * Returns true if this pool dynamically maintains its target
- * parallelism level. If false, new threads are added only to
- * avoid possible starvation.
- * This setting is by default true;
- * @return true if maintains parallelism
- */
- public boolean getMaintainsParallelism() {
- return maintainsParallelism;
- }
-
- /**
- * Sets whether this pool dynamically maintains its target
- * parallelism level. If false, new threads are added only to
- * avoid possible starvation.
- * @param enable true to maintains parallelism
- */
- public void setMaintainsParallelism(boolean enable) {
- maintainsParallelism = enable;
- }
-
- /**
- * Establishes local first-in-first-out scheduling mode for forked
- * tasks that are never joined. This mode may be more appropriate
- * than default locally stack-based mode in applications in which
- * worker threads only process asynchronous tasks. This method is
- * designed to be invoked only when pool is quiescent, and
- * typically only before any tasks are submitted. The effects of
- * invocations at ather times may be unpredictable.
- *
- * @param async if true, use locally FIFO scheduling
- * @return the previous mode.
- */
- public boolean setAsyncMode(boolean async) {
- boolean oldMode = locallyFifo;
- locallyFifo = async;
- ForkJoinWorkerThread[] ws = workers;
- if (ws != null) {
- for (int i = 0; i < ws.length; ++i) {
- ForkJoinWorkerThread t = ws[i];
- if (t != null)
- t.setAsyncMode(async);
- }
- }
- return oldMode;
+ return (config & SMASK) + (short)(ctl >>> TC_SHIFT);
}
/**
- * Returns true if this pool uses local first-in-first-out
+ * Returns {@code true} if this pool uses local first-in-first-out
* scheduling mode for forked tasks that are never joined.
*
- * @return true if this pool uses async mode.
+ * @return {@code true} if this pool uses async mode
*/
public boolean getAsyncMode() {
- return locallyFifo;
+ return (config >>> 16) == FIFO_QUEUE;
}
/**
* Returns an estimate of the number of worker threads that are
* not blocked waiting to join tasks or for other managed
- * synchronization.
+ * synchronization. This method may overestimate the
+ * number of running threads.
*
* @return the number of worker threads
*/
public int getRunningThreadCount() {
- return runningCountOf(workerCounts);
+ int rc = 0;
+ WorkQueue[] ws; WorkQueue w;
+ if ((ws = workQueues) != null) {
+ for (int i = 1; i < ws.length; i += 2) {
+ if ((w = ws[i]) != null && w.isApparentlyUnblocked())
+ ++rc;
+ }
+ }
+ return rc;
}
/**
* Returns an estimate of the number of threads that are currently
* stealing or executing tasks. This method may overestimate the
* number of active threads.
- * @return the number of active threads.
+ *
+ * @return the number of active threads
*/
public int getActiveThreadCount() {
- return activeCountOf(runControl);
- }
-
- /**
- * Returns an estimate of the number of threads that are currently
- * idle waiting for tasks. This method may underestimate the
- * number of idle threads.
- * @return the number of idle threads.
- */
- final int getIdleThreadCount() {
- int c = runningCountOf(workerCounts) - activeCountOf(runControl);
- return (c <= 0)? 0 : c;
+ int r = (config & SMASK) + (int)(ctl >> AC_SHIFT);
+ return (r <= 0) ? 0 : r; // suppress momentarily negative values
}
/**
- * Returns true if all worker threads are currently idle. An idle
- * worker is one that cannot obtain a task to execute because none
- * are available to steal from other threads, and there are no
- * pending submissions to the pool. This method is conservative:
- * It might not return true immediately upon idleness of all
- * threads, but will eventually become true if threads remain
- * inactive.
- * @return true if all threads are currently idle
+ * Returns {@code true} if all worker threads are currently idle.
+ * An idle worker is one that cannot obtain a task to execute
+ * because none are available to steal from other threads, and
+ * there are no pending submissions to the pool. This method is
+ * conservative; it might not return {@code true} immediately upon
+ * idleness of all threads, but will eventually become true if
+ * threads remain inactive.
+ *
+ * @return {@code true} if all threads are currently idle
*/
public boolean isQuiescent() {
- return activeCountOf(runControl) == 0;
+ return (int)(ctl >> AC_SHIFT) + (config & SMASK) == 0;
}
/**
@@ -909,23 +3168,22 @@ public class ForkJoinPool /*extends AbstractExecutorService*/ {
* one thread's work queue by another. The reported value
* underestimates the actual total number of steals when the pool
* is not quiescent. This value may be useful for monitoring and
- * tuning fork/join programs: In general, steal counts should be
+ * tuning fork/join programs: in general, steal counts should be
* high enough to keep threads busy, but low enough to avoid
* overhead and contention across threads.
- * @return the number of steals.
+ *
+ * @return the number of steals
*/
public long getStealCount() {
- return stealCount.get();
- }
-
- /**
- * Accumulate steal count from a worker. Call only
- * when worker known to be idle.
- */
- private void updateStealCount(ForkJoinWorkerThread w) {
- int sc = w.getAndClearStealCount();
- if (sc != 0)
- stealCount.addAndGet(sc);
+ long count = stealCount;
+ WorkQueue[] ws; WorkQueue w;
+ if ((ws = workQueues) != null) {
+ for (int i = 1; i < ws.length; i += 2) {
+ if ((w = ws[i]) != null)
+ count += w.nsteals;
+ }
+ }
+ return count;
}
/**
@@ -935,77 +3193,106 @@ public class ForkJoinPool /*extends AbstractExecutorService*/ {
* an approximation, obtained by iterating across all threads in
* the pool. This method may be useful for tuning task
* granularities.
- * @return the number of queued tasks.
+ *
+ * @return the number of queued tasks
*/
public long getQueuedTaskCount() {
long count = 0;
- ForkJoinWorkerThread[] ws = workers;
- if (ws != null) {
- for (int i = 0; i < ws.length; ++i) {
- ForkJoinWorkerThread t = ws[i];
- if (t != null)
- count += t.getQueueSize();
+ WorkQueue[] ws; WorkQueue w;
+ if ((ws = workQueues) != null) {
+ for (int i = 1; i < ws.length; i += 2) {
+ if ((w = ws[i]) != null)
+ count += w.queueSize();
}
}
return count;
}
/**
- * Returns an estimate of the number tasks submitted to this pool
- * that have not yet begun executing. This method takes time
- * proportional to the number of submissions.
- * @return the number of queued submissions.
+ * Returns an estimate of the number of tasks submitted to this
+ * pool that have not yet begun executing. This method may take
+ * time proportional to the number of submissions.
+ *
+ * @return the number of queued submissions
*/
public int getQueuedSubmissionCount() {
- return submissionQueue.size();
+ int count = 0;
+ WorkQueue[] ws; WorkQueue w;
+ if ((ws = workQueues) != null) {
+ for (int i = 0; i < ws.length; i += 2) {
+ if ((w = ws[i]) != null)
+ count += w.queueSize();
+ }
+ }
+ return count;
}
/**
- * Returns true if there are any tasks submitted to this pool
- * that have not yet begun executing.
- * @return <code>true</code> if there are any queued submissions.
+ * Returns {@code true} if there are any tasks submitted to this
+ * pool that have not yet begun executing.
+ *
+ * @return {@code true} if there are any queued submissions
*/
public boolean hasQueuedSubmissions() {
- return !submissionQueue.isEmpty();
+ WorkQueue[] ws; WorkQueue w;
+ if ((ws = workQueues) != null) {
+ for (int i = 0; i < ws.length; i += 2) {
+ if ((w = ws[i]) != null && !w.isEmpty())
+ return true;
+ }
+ }
+ return false;
}
/**
* Removes and returns the next unexecuted submission if one is
* available. This method may be useful in extensions to this
* class that re-assign work in systems with multiple pools.
- * @return the next submission, or null if none
+ *
+ * @return the next submission, or {@code null} if none
*/
protected ForkJoinTask<?> pollSubmission() {
- return submissionQueue.poll();
+ WorkQueue[] ws; WorkQueue w; ForkJoinTask<?> t;
+ if ((ws = workQueues) != null) {
+ for (int i = 0; i < ws.length; i += 2) {
+ if ((w = ws[i]) != null && (t = w.poll()) != null)
+ return t;
+ }
+ }
+ return null;
}
/**
* Removes all available unexecuted submitted and forked tasks
* from scheduling queues and adds them to the given collection,
* without altering their execution status. These may include
- * artifically generated or wrapped tasks. This method id designed
- * to be invoked only when the pool is known to be
+ * artificially generated or wrapped tasks. This method is
+ * designed to be invoked only when the pool is known to be
* quiescent. Invocations at other times may not remove all
* tasks. A failure encountered while attempting to add elements
- * to collection <tt>c</tt> may result in elements being in
+ * to collection {@code c} may result in elements being in
* neither, either or both collections when the associated
* exception is thrown. The behavior of this operation is
* undefined if the specified collection is modified while the
* operation is in progress.
+ *
* @param c the collection to transfer elements into
* @return the number of elements transferred
*/
- protected int drainTasksTo(Collection<ForkJoinTask<?>> c) {
- int n = submissionQueue.drainTo(c);
- ForkJoinWorkerThread[] ws = workers;
- if (ws != null) {
+ protected int drainTasksTo(Collection<? super ForkJoinTask<?>> c) {
+ int count = 0;
+ WorkQueue[] ws; WorkQueue w; ForkJoinTask<?> t;
+ if ((ws = workQueues) != null) {
for (int i = 0; i < ws.length; ++i) {
- ForkJoinWorkerThread w = ws[i];
- if (w != null)
- n += w.drainTasksTo(c);
+ if ((w = ws[i]) != null) {
+ while ((t = w.poll()) != null) {
+ c.add(t);
+ ++count;
+ }
+ }
}
}
- return n;
+ return count;
}
/**
@@ -1016,855 +3303,457 @@ public class ForkJoinPool /*extends AbstractExecutorService*/ {
* @return a string identifying this pool, as well as its state
*/
public String toString() {
- int ps = parallelism;
- int wc = workerCounts;
- int rc = runControl;
- long st = getStealCount();
- long qt = getQueuedTaskCount();
- long qs = getQueuedSubmissionCount();
+ // Use a single pass through workQueues to collect counts
+ long qt = 0L, qs = 0L; int rc = 0;
+ long st = stealCount;
+ long c = ctl;
+ WorkQueue[] ws; WorkQueue w;
+ if ((ws = workQueues) != null) {
+ for (int i = 0; i < ws.length; ++i) {
+ if ((w = ws[i]) != null) {
+ int size = w.queueSize();
+ if ((i & 1) == 0)
+ qs += size;
+ else {
+ qt += size;
+ st += w.nsteals;
+ if (w.isApparentlyUnblocked())
+ ++rc;
+ }
+ }
+ }
+ }
+ int pc = (config & SMASK);
+ int tc = pc + (short)(c >>> TC_SHIFT);
+ int ac = pc + (int)(c >> AC_SHIFT);
+ if (ac < 0) // ignore transient negative
+ ac = 0;
+ String level;
+ if ((c & STOP_BIT) != 0)
+ level = (tc == 0) ? "Terminated" : "Terminating";
+ else
+ level = plock < 0 ? "Shutting down" : "Running";
return super.toString() +
- "[" + runStateToString(runStateOf(rc)) +
- ", parallelism = " + ps +
- ", size = " + totalCountOf(wc) +
- ", active = " + activeCountOf(rc) +
- ", running = " + runningCountOf(wc) +
+ "[" + level +
+ ", parallelism = " + pc +
+ ", size = " + tc +
+ ", active = " + ac +
+ ", running = " + rc +
", steals = " + st +
", tasks = " + qt +
", submissions = " + qs +
"]";
}
- private static String runStateToString(int rs) {
- switch(rs) {
- case RUNNING: return "Running";
- case SHUTDOWN: return "Shutting down";
- case TERMINATING: return "Terminating";
- case TERMINATED: return "Terminated";
- default: throw new Error("Unknown run state");
- }
- }
-
- // lifecycle control
-
/**
- * Initiates an orderly shutdown in which previously submitted
- * tasks are executed, but no new tasks will be accepted.
- * Invocation has no additional effect if already shut down.
- * Tasks that are in the process of being submitted concurrently
- * during the course of this method may or may not be rejected.
+ * Possibly initiates an orderly shutdown in which previously
+ * submitted tasks are executed, but no new tasks will be
+ * accepted. Invocation has no effect on execution state if this
+ * is the {@link #commonPool()}, and no additional effect if
+ * already shut down. Tasks that are in the process of being
+ * submitted concurrently during the course of this method may or
+ * may not be rejected.
+ *
* @throws SecurityException if a security manager exists and
* the caller is not permitted to modify threads
* because it does not hold {@link
- * java.lang.RuntimePermission}<code>("modifyThread")</code>,
+ * java.lang.RuntimePermission}{@code ("modifyThread")}
*/
public void shutdown() {
checkPermission();
- transitionRunStateTo(SHUTDOWN);
- if (canTerminateOnShutdown(runControl))
- terminateOnShutdown();
+ tryTerminate(false, true);
}
/**
- * Attempts to stop all actively executing tasks, and cancels all
- * waiting tasks. Tasks that are in the process of being
- * submitted or executed concurrently during the course of this
- * method may or may not be rejected. Unlike some other executors,
- * this method cancels rather than collects non-executed tasks
- * upon termination, so always returns an empty list. However, you
- * can use method <code>drainTasksTo</code> before invoking this
- * method to transfer unexecuted tasks to another collection.
+ * Possibly attempts to cancel and/or stop all tasks, and reject
+ * all subsequently submitted tasks. Invocation has no effect on
+ * execution state if this is the {@link #commonPool()}, and no
+ * additional effect if already shut down. Otherwise, tasks that
+ * are in the process of being submitted or executed concurrently
+ * during the course of this method may or may not be
+ * rejected. This method cancels both existing and unexecuted
+ * tasks, in order to permit termination in the presence of task
+ * dependencies. So the method always returns an empty list
+ * (unlike the case for some other Executors).
+ *
* @return an empty list
* @throws SecurityException if a security manager exists and
* the caller is not permitted to modify threads
* because it does not hold {@link
- * java.lang.RuntimePermission}<code>("modifyThread")</code>,
+ * java.lang.RuntimePermission}{@code ("modifyThread")}
*/
public List<Runnable> shutdownNow() {
checkPermission();
- terminate();
+ tryTerminate(true, true);
return Collections.emptyList();
}
/**
- * Returns <code>true</code> if all tasks have completed following shut down.
+ * Returns {@code true} if all tasks have completed following shut down.
*
- * @return <code>true</code> if all tasks have completed following shut down
+ * @return {@code true} if all tasks have completed following shut down
*/
public boolean isTerminated() {
- return runStateOf(runControl) == TERMINATED;
+ long c = ctl;
+ return ((c & STOP_BIT) != 0L &&
+ (short)(c >>> TC_SHIFT) == -(config & SMASK));
}
/**
- * Returns <code>true</code> if the process of termination has
- * commenced but possibly not yet completed.
+ * Returns {@code true} if the process of termination has
+ * commenced but not yet completed. This method may be useful for
+ * debugging. A return of {@code true} reported a sufficient
+ * period after shutdown may indicate that submitted tasks have
+ * ignored or suppressed interruption, or are waiting for I/O,
+ * causing this executor not to properly terminate. (See the
+ * advisory notes for class {@link ForkJoinTask} stating that
+ * tasks should not normally entail blocking operations. But if
+ * they do, they must abort them on interrupt.)
*
- * @return <code>true</code> if terminating
+ * @return {@code true} if terminating but not yet terminated
*/
public boolean isTerminating() {
- return runStateOf(runControl) >= TERMINATING;
+ long c = ctl;
+ return ((c & STOP_BIT) != 0L &&
+ (short)(c >>> TC_SHIFT) != -(config & SMASK));
}
/**
- * Returns <code>true</code> if this pool has been shut down.
+ * Returns {@code true} if this pool has been shut down.
*
- * @return <code>true</code> if this pool has been shut down
+ * @return {@code true} if this pool has been shut down
*/
public boolean isShutdown() {
- return runStateOf(runControl) >= SHUTDOWN;
+ return plock < 0;
}
/**
- * Blocks until all tasks have completed execution after a shutdown
- * request, or the timeout occurs, or the current thread is
- * interrupted, whichever happens first.
+ * Blocks until all tasks have completed execution after a
+ * shutdown request, or the timeout occurs, or the current thread
+ * is interrupted, whichever happens first. Because the {@link
+ * #commonPool()} never terminates until program shutdown, when
+ * applied to the common pool, this method is equivalent to {@link
+ * #awaitQuiescence} but always returns {@code false}.
*
* @param timeout the maximum time to wait
* @param unit the time unit of the timeout argument
- * @return <code>true</code> if this executor terminated and
- * <code>false</code> if the timeout elapsed before termination
+ * @return {@code true} if this executor terminated and
+ * {@code false} if the timeout elapsed before termination
* @throws InterruptedException if interrupted while waiting
*/
public boolean awaitTermination(long timeout, TimeUnit unit)
throws InterruptedException {
- long nanos = unit.toNanos(timeout);
- final ReentrantLock lock = this.workerLock;
- lock.lock();
- try {
- for (;;) {
- if (isTerminated())
- return true;
- if (nanos <= 0)
- return false;
- nanos = termination.awaitNanos(nanos);
- }
- } finally {
- lock.unlock();
- }
- }
-
- // Shutdown and termination support
-
- /**
- * Callback from terminating worker. Null out the corresponding
- * workers slot, and if terminating, try to terminate, else try to
- * shrink workers array.
- * @param w the worker
- */
- final void workerTerminated(ForkJoinWorkerThread w) {
- updateStealCount(w);
- updateWorkerCount(-1);
- final ReentrantLock lock = this.workerLock;
- lock.lock();
- try {
- ForkJoinWorkerThread[] ws = workers;
- if (ws != null) {
- int idx = w.poolIndex;
- if (idx >= 0 && idx < ws.length && ws[idx] == w)
- ws[idx] = null;
- if (totalCountOf(workerCounts) == 0) {
- terminate(); // no-op if already terminating
- transitionRunStateTo(TERMINATED);
- termination.signalAll();
- }
- else if (!isTerminating()) {
- tryShrinkWorkerArray();
- tryResumeSpare(true); // allow replacement
- }
- }
- } finally {
- lock.unlock();
- }
- signalIdleWorkers();
- }
-
- /**
- * Initiate termination.
- */
- private void terminate() {
- if (transitionRunStateTo(TERMINATING)) {
- stopAllWorkers();
- resumeAllSpares();
- signalIdleWorkers();
- cancelQueuedSubmissions();
- cancelQueuedWorkerTasks();
- interruptUnterminatedWorkers();
- signalIdleWorkers(); // resignal after interrupt
- }
- }
-
- /**
- * Possibly terminate when on shutdown state
- */
- private void terminateOnShutdown() {
- if (!hasQueuedSubmissions() && canTerminateOnShutdown(runControl))
- terminate();
- }
-
- /**
- * Clear out and cancel submissions
- */
- private void cancelQueuedSubmissions() {
- ForkJoinTask<?> task;
- while ((task = pollSubmission()) != null)
- task.cancel(false);
- }
-
- /**
- * Clean out worker queues.
- */
- private void cancelQueuedWorkerTasks() {
- final ReentrantLock lock = this.workerLock;
- lock.lock();
- try {
- ForkJoinWorkerThread[] ws = workers;
- if (ws != null) {
- for (int i = 0; i < ws.length; ++i) {
- ForkJoinWorkerThread t = ws[i];
- if (t != null)
- t.cancelTasks();
- }
- }
- } finally {
- lock.unlock();
- }
- }
-
- /**
- * Set each worker's status to terminating. Requires lock to avoid
- * conflicts with add/remove
- */
- private void stopAllWorkers() {
- final ReentrantLock lock = this.workerLock;
- lock.lock();
- try {
- ForkJoinWorkerThread[] ws = workers;
- if (ws != null) {
- for (int i = 0; i < ws.length; ++i) {
- ForkJoinWorkerThread t = ws[i];
- if (t != null)
- t.shutdownNow();
- }
- }
- } finally {
- lock.unlock();
- }
- }
-
- /**
- * Interrupt all unterminated workers. This is not required for
- * sake of internal control, but may help unstick user code during
- * shutdown.
- */
- private void interruptUnterminatedWorkers() {
- final ReentrantLock lock = this.workerLock;
- lock.lock();
- try {
- ForkJoinWorkerThread[] ws = workers;
- if (ws != null) {
- for (int i = 0; i < ws.length; ++i) {
- ForkJoinWorkerThread t = ws[i];
- if (t != null && !t.isTerminated()) {
- try {
- t.interrupt();
- } catch (SecurityException ignore) {
- }
- }
- }
- }
- } finally {
- lock.unlock();
- }
- }
-
-
- /*
- * Nodes for event barrier to manage idle threads. Queue nodes
- * are basic Treiber stack nodes, also used for spare stack.
- *
- * The event barrier has an event count and a wait queue (actually
- * a Treiber stack). Workers are enabled to look for work when
- * the eventCount is incremented. If they fail to find work, they
- * may wait for next count. Upon release, threads help others wake
- * up.
- *
- * Synchronization events occur only in enough contexts to
- * maintain overall liveness:
- *
- * - Submission of a new task to the pool
- * - Resizes or other changes to the workers array
- * - pool termination
- * - A worker pushing a task on an empty queue
- *
- * The case of pushing a task occurs often enough, and is heavy
- * enough compared to simple stack pushes, to require special
- * handling: Method signalWork returns without advancing count if
- * the queue appears to be empty. This would ordinarily result in
- * races causing some queued waiters not to be woken up. To avoid
- * this, the first worker enqueued in method sync (see
- * syncIsReleasable) rescans for tasks after being enqueued, and
- * helps signal if any are found. This works well because the
- * worker has nothing better to do, and so might as well help
- * alleviate the overhead and contention on the threads actually
- * doing work. Also, since event counts increments on task
- * availability exist to maintain liveness (rather than to force
- * refreshes etc), it is OK for callers to exit early if
- * contending with another signaller.
- */
- static final class WaitQueueNode {
- WaitQueueNode next; // only written before enqueued
- volatile ForkJoinWorkerThread thread; // nulled to cancel wait
- final long count; // unused for spare stack
-
- WaitQueueNode(long c, ForkJoinWorkerThread w) {
- count = c;
- thread = w;
+ if (Thread.interrupted())
+ throw new InterruptedException();
+ if (this == common) {
+ awaitQuiescence(timeout, unit);
+ return false;
}
-
- /**
- * Wake up waiter, returning false if known to already
- */
- boolean signal() {
- ForkJoinWorkerThread t = thread;
- if (t == null)
- return false;
- thread = null;
- LockSupport.unpark(t);
+ long nanos = unit.toNanos(timeout);
+ if (isTerminated())
return true;
- }
-
- /**
- * Await release on sync
- */
- void awaitSyncRelease(ForkJoinPool p) {
- while (thread != null && !p.syncIsReleasable(this))
- LockSupport.park(this);
- }
-
- /**
- * Await resumption as spare
- */
- void awaitSpareRelease() {
- while (thread != null) {
- if (!Thread.interrupted())
- LockSupport.park(this);
- }
- }
- }
-
- /**
- * Ensures that no thread is waiting for count to advance from the
- * current value of eventCount read on entry to this method, by
- * releasing waiting threads if necessary.
- * @return the count
- */
- final long ensureSync() {
- long c = eventCount;
- WaitQueueNode q;
- while ((q = syncStack) != null && q.count < c) {
- if (casBarrierStack(q, null)) {
- do {
- q.signal();
- } while ((q = q.next) != null);
- break;
- }
- }
- return c;
- }
-
- /**
- * Increments event count and releases waiting threads.
- */
- private void signalIdleWorkers() {
- long c;
- do;while (!casEventCount(c = eventCount, c+1));
- ensureSync();
- }
-
- /**
- * Signal threads waiting to poll a task. Because method sync
- * rechecks availability, it is OK to only proceed if queue
- * appears to be non-empty, and OK to skip under contention to
- * increment count (since some other thread succeeded).
- */
- final void signalWork() {
- long c;
- WaitQueueNode q;
- if (syncStack != null &&
- casEventCount(c = eventCount, c+1) &&
- (((q = syncStack) != null && q.count <= c) &&
- (!casBarrierStack(q, q.next) || !q.signal())))
- ensureSync();
- }
-
- /**
- * Waits until event count advances from last value held by
- * caller, or if excess threads, caller is resumed as spare, or
- * caller or pool is terminating. Updates caller's event on exit.
- * @param w the calling worker thread
- */
- final void sync(ForkJoinWorkerThread w) {
- updateStealCount(w); // Transfer w's count while it is idle
-
- while (!w.isShutdown() && !isTerminating() && !suspendIfSpare(w)) {
- long prev = w.lastEventCount;
- WaitQueueNode node = null;
- WaitQueueNode h;
- while (eventCount == prev &&
- ((h = syncStack) == null || h.count == prev)) {
- if (node == null)
- node = new WaitQueueNode(prev, w);
- if (casBarrierStack(node.next = h, node)) {
- node.awaitSyncRelease(this);
+ long startTime = System.nanoTime();
+ boolean terminated = false;
+ synchronized (this) {
+ for (long waitTime = nanos, millis = 0L;;) {
+ if (terminated = isTerminated() ||
+ waitTime <= 0L ||
+ (millis = unit.toMillis(waitTime)) <= 0L)
break;
- }
+ wait(millis);
+ waitTime = nanos - (System.nanoTime() - startTime);
}
- long ec = ensureSync();
- if (ec != prev) {
- w.lastEventCount = ec;
- break;
- }
- }
- }
-
- /**
- * Returns true if worker waiting on sync can proceed:
- * - on signal (thread == null)
- * - on event count advance (winning race to notify vs signaller)
- * - on Interrupt
- * - if the first queued node, we find work available
- * If node was not signalled and event count not advanced on exit,
- * then we also help advance event count.
- * @return true if node can be released
- */
- final boolean syncIsReleasable(WaitQueueNode node) {
- long prev = node.count;
- if (!Thread.interrupted() && node.thread != null &&
- (node.next != null ||
- !ForkJoinWorkerThread.hasQueuedTasks(workers)) &&
- eventCount == prev)
- return false;
- if (node.thread != null) {
- node.thread = null;
- long ec = eventCount;
- if (prev <= ec) // help signal
- casEventCount(ec, ec+1);
}
- return true;
+ return terminated;
}
/**
- * Returns true if a new sync event occurred since last call to
- * sync or this method, if so, updating caller's count.
+ * If called by a ForkJoinTask operating in this pool, equivalent
+ * in effect to {@link ForkJoinTask#helpQuiesce}. Otherwise,
+ * waits and/or attempts to assist performing tasks until this
+ * pool {@link #isQuiescent} or the indicated timeout elapses.
+ *
+ * @param timeout the maximum time to wait
+ * @param unit the time unit of the timeout argument
+ * @return {@code true} if quiescent; {@code false} if the
+ * timeout elapsed.
*/
- final boolean hasNewSyncEvent(ForkJoinWorkerThread w) {
- long lc = w.lastEventCount;
- long ec = ensureSync();
- if (ec == lc)
- return false;
- w.lastEventCount = ec;
- return true;
- }
-
- // Parallelism maintenance
-
- /**
- * Decrement running count; if too low, add spare.
- *
- * Conceptually, all we need to do here is add or resume a
- * spare thread when one is about to block (and remove or
- * suspend it later when unblocked -- see suspendIfSpare).
- * However, implementing this idea requires coping with
- * several problems: We have imperfect information about the
- * states of threads. Some count updates can and usually do
- * lag run state changes, despite arrangements to keep them
- * accurate (for example, when possible, updating counts
- * before signalling or resuming), especially when running on
- * dynamic JVMs that don't optimize the infrequent paths that
- * update counts. Generating too many threads can make these
- * problems become worse, because excess threads are more
- * likely to be context-switched with others, slowing them all
- * down, especially if there is no work available, so all are
- * busy scanning or idling. Also, excess spare threads can
- * only be suspended or removed when they are idle, not
- * immediately when they aren't needed. So adding threads will
- * raise parallelism level for longer than necessary. Also,
- * FJ applications often enounter highly transient peaks when
- * many threads are blocked joining, but for less time than it
- * takes to create or resume spares.
- *
- * @param joinMe if non-null, return early if done
- * @param maintainParallelism if true, try to stay within
- * target counts, else create only to avoid starvation
- * @return true if joinMe known to be done
- */
- final boolean preJoin(ForkJoinTask<?> joinMe, boolean maintainParallelism) {
- maintainParallelism &= maintainsParallelism; // overrride
- boolean dec = false; // true when running count decremented
- while (spareStack == null || !tryResumeSpare(dec)) {
- int counts = workerCounts;
- if (dec || (dec = casWorkerCounts(counts, --counts))) { // CAS cheat
- if (!needSpare(counts, maintainParallelism))
- break;
- if (joinMe.status < 0)
- return true;
- if (tryAddSpare(counts))
- break;
- }
+ public boolean awaitQuiescence(long timeout, TimeUnit unit) {
+ long nanos = unit.toNanos(timeout);
+ ForkJoinWorkerThread wt;
+ Thread thread = Thread.currentThread();
+ if ((thread instanceof ForkJoinWorkerThread) &&
+ (wt = (ForkJoinWorkerThread)thread).pool == this) {
+ helpQuiescePool(wt.workQueue);
+ return true;
}
- return false;
- }
-
- /**
- * Same idea as preJoin
- */
- final boolean preBlock(ManagedBlocker blocker, boolean maintainParallelism){
- maintainParallelism &= maintainsParallelism;
- boolean dec = false;
- while (spareStack == null || !tryResumeSpare(dec)) {
- int counts = workerCounts;
- if (dec || (dec = casWorkerCounts(counts, --counts))) {
- if (!needSpare(counts, maintainParallelism))
- break;
- if (blocker.isReleasable())
- return true;
- if (tryAddSpare(counts))
- break;
+ long startTime = System.nanoTime();
+ WorkQueue[] ws;
+ int r = 0, m;
+ boolean found = true;
+ while (!isQuiescent() && (ws = workQueues) != null &&
+ (m = ws.length - 1) >= 0) {
+ if (!found) {
+ if ((System.nanoTime() - startTime) > nanos)
+ return false;
+ Thread.yield(); // cannot block
}
- }
- return false;
- }
-
- /**
- * Returns true if a spare thread appears to be needed. If
- * maintaining parallelism, returns true when the deficit in
- * running threads is more than the surplus of total threads, and
- * there is apparently some work to do. This self-limiting rule
- * means that the more threads that have already been added, the
- * less parallelism we will tolerate before adding another.
- * @param counts current worker counts
- * @param maintainParallelism try to maintain parallelism
- */
- private boolean needSpare(int counts, boolean maintainParallelism) {
- int ps = parallelism;
- int rc = runningCountOf(counts);
- int tc = totalCountOf(counts);
- int runningDeficit = ps - rc;
- int totalSurplus = tc - ps;
- return (tc < maxPoolSize &&
- (rc == 0 || totalSurplus < 0 ||
- (maintainParallelism &&
- runningDeficit > totalSurplus &&
- ForkJoinWorkerThread.hasQueuedTasks(workers))));
- }
-
- /**
- * Add a spare worker if lock available and no more than the
- * expected numbers of threads exist
- * @return true if successful
- */
- private boolean tryAddSpare(int expectedCounts) {
- final ReentrantLock lock = this.workerLock;
- int expectedRunning = runningCountOf(expectedCounts);
- int expectedTotal = totalCountOf(expectedCounts);
- boolean success = false;
- boolean locked = false;
- // confirm counts while locking; CAS after obtaining lock
- try {
- for (;;) {
- int s = workerCounts;
- int tc = totalCountOf(s);
- int rc = runningCountOf(s);
- if (rc > expectedRunning || tc > expectedTotal)
- break;
- if (!locked && !(locked = lock.tryLock()))
- break;
- if (casWorkerCounts(s, workerCountsFor(tc+1, rc+1))) {
- createAndStartSpare(tc);
- success = true;
+ found = false;
+ for (int j = (m + 1) << 2; j >= 0; --j) {
+ ForkJoinTask<?> t; WorkQueue q; int b;
+ if ((q = ws[r++ & m]) != null && (b = q.base) - q.top < 0) {
+ found = true;
+ if ((t = q.pollAt(b)) != null) {
+ if (q.base - q.top < 0)
+ signalWork(q);
+ t.doExec();
+ }
break;
}
}
- } finally {
- if (locked)
- lock.unlock();
- }
- return success;
- }
-
- /**
- * Add the kth spare worker. On entry, pool coounts are already
- * adjusted to reflect addition.
- */
- private void createAndStartSpare(int k) {
- ForkJoinWorkerThread w = null;
- ForkJoinWorkerThread[] ws = ensureWorkerArrayCapacity(k + 1);
- int len = ws.length;
- // Probably, we can place at slot k. If not, find empty slot
- if (k < len && ws[k] != null) {
- for (k = 0; k < len && ws[k] != null; ++k)
- ;
- }
- if (k < len && !isTerminating() && (w = createWorker(k)) != null) {
- ws[k] = w;
- w.start();
- }
- else
- updateWorkerCount(-1); // adjust on failure
- signalIdleWorkers();
- }
-
- /**
- * Suspend calling thread w if there are excess threads. Called
- * only from sync. Spares are enqueued in a Treiber stack
- * using the same WaitQueueNodes as barriers. They are resumed
- * mainly in preJoin, but are also woken on pool events that
- * require all threads to check run state.
- * @param w the caller
- */
- private boolean suspendIfSpare(ForkJoinWorkerThread w) {
- WaitQueueNode node = null;
- int s;
- while (parallelism < runningCountOf(s = workerCounts)) {
- if (node == null)
- node = new WaitQueueNode(0, w);
- if (casWorkerCounts(s, s-1)) { // representation-dependent
- // push onto stack
- do;while (!casSpareStack(node.next = spareStack, node));
- // block until released by resumeSpare
- node.awaitSpareRelease();
- return true;
- }
- }
- return false;
- }
-
- /**
- * Try to pop and resume a spare thread.
- * @param updateCount if true, increment running count on success
- * @return true if successful
- */
- private boolean tryResumeSpare(boolean updateCount) {
- WaitQueueNode q;
- while ((q = spareStack) != null) {
- if (casSpareStack(q, q.next)) {
- if (updateCount)
- updateRunningCount(1);
- q.signal();
- return true;
- }
- }
- return false;
- }
-
- /**
- * Pop and resume all spare threads. Same idea as ensureSync.
- * @return true if any spares released
- */
- private boolean resumeAllSpares() {
- WaitQueueNode q;
- while ( (q = spareStack) != null) {
- if (casSpareStack(q, null)) {
- do {
- updateRunningCount(1);
- q.signal();
- } while ((q = q.next) != null);
- return true;
- }
}
- return false;
+ return true;
}
/**
- * Pop and shutdown excessive spare threads. Call only while
- * holding lock. This is not guaranteed to eliminate all excess
- * threads, only those suspended as spares, which are the ones
- * unlikely to be needed in the future.
+ * Waits and/or attempts to assist performing tasks indefinitely
+ * until the {@link #commonPool()} {@link #isQuiescent}.
*/
- private void trimSpares() {
- int surplus = totalCountOf(workerCounts) - parallelism;
- WaitQueueNode q;
- while (surplus > 0 && (q = spareStack) != null) {
- if (casSpareStack(q, null)) {
- do {
- updateRunningCount(1);
- ForkJoinWorkerThread w = q.thread;
- if (w != null && surplus > 0 &&
- runningCountOf(workerCounts) > 0 && w.shutdown())
- --surplus;
- q.signal();
- } while ((q = q.next) != null);
- }
- }
+ static void quiesceCommonPool() {
+ common.awaitQuiescence(Long.MAX_VALUE, TimeUnit.NANOSECONDS);
}
/**
* Interface for extending managed parallelism for tasks running
- * in ForkJoinPools. A ManagedBlocker provides two methods.
- * Method <code>isReleasable</code> must return true if blocking is not
- * necessary. Method <code>block</code> blocks the current thread
- * if necessary (perhaps internally invoking isReleasable before
- * actually blocking.).
+ * in {@link ForkJoinPool}s.
+ *
+ * <p>A {@code ManagedBlocker} provides two methods. Method
+ * {@code isReleasable} must return {@code true} if blocking is
+ * not necessary. Method {@code block} blocks the current thread
+ * if necessary (perhaps internally invoking {@code isReleasable}
+ * before actually blocking). These actions are performed by any
+ * thread invoking {@link ForkJoinPool#managedBlock}. The
+ * unusual methods in this API accommodate synchronizers that may,
+ * but don't usually, block for long periods. Similarly, they
+ * allow more efficient internal handling of cases in which
+ * additional workers may be, but usually are not, needed to
+ * ensure sufficient parallelism. Toward this end,
+ * implementations of method {@code isReleasable} must be amenable
+ * to repeated invocation.
+ *
* <p>For example, here is a ManagedBlocker based on a
* ReentrantLock:
- * <pre>
- * class ManagedLocker implements ManagedBlocker {
- * final ReentrantLock lock;
- * boolean hasLock = false;
- * ManagedLocker(ReentrantLock lock) { this.lock = lock; }
- * public boolean block() {
- * if (!hasLock)
- * lock.lock();
- * return true;
- * }
- * public boolean isReleasable() {
- * return hasLock || (hasLock = lock.tryLock());
- * }
+ * <pre> {@code
+ * class ManagedLocker implements ManagedBlocker {
+ * final ReentrantLock lock;
+ * boolean hasLock = false;
+ * ManagedLocker(ReentrantLock lock) { this.lock = lock; }
+ * public boolean block() {
+ * if (!hasLock)
+ * lock.lock();
+ * return true;
* }
- * </pre>
+ * public boolean isReleasable() {
+ * return hasLock || (hasLock = lock.tryLock());
+ * }
+ * }}</pre>
+ *
+ * <p>Here is a class that possibly blocks waiting for an
+ * item on a given queue:
+ * <pre> {@code
+ * class QueueTaker<E> implements ManagedBlocker {
+ * final BlockingQueue<E> queue;
+ * volatile E item = null;
+ * QueueTaker(BlockingQueue<E> q) { this.queue = q; }
+ * public boolean block() throws InterruptedException {
+ * if (item == null)
+ * item = queue.take();
+ * return true;
+ * }
+ * public boolean isReleasable() {
+ * return item != null || (item = queue.poll()) != null;
+ * }
+ * public E getItem() { // call after pool.managedBlock completes
+ * return item;
+ * }
+ * }}</pre>
*/
public static interface ManagedBlocker {
/**
* Possibly blocks the current thread, for example waiting for
* a lock or condition.
- * @return true if no additional blocking is necessary (i.e.,
- * if isReleasable would return true).
+ *
+ * @return {@code true} if no additional blocking is necessary
+ * (i.e., if isReleasable would return true)
* @throws InterruptedException if interrupted while waiting
- * (the method is not required to do so, but is allowe to).
+ * (the method is not required to do so, but is allowed to)
*/
boolean block() throws InterruptedException;
/**
- * Returns true if blocking is unnecessary.
+ * Returns {@code true} if blocking is unnecessary.
*/
boolean isReleasable();
}
/**
* Blocks in accord with the given blocker. If the current thread
- * is a ForkJoinWorkerThread, this method possibly arranges for a
- * spare thread to be activated if necessary to ensure parallelism
- * while the current thread is blocked. If
- * <code>maintainParallelism</code> is true and the pool supports
- * it ({@link #getMaintainsParallelism}), this method attempts to
- * maintain the pool's nominal parallelism. Otherwise if activates
- * a thread only if necessary to avoid complete starvation. This
- * option may be preferable when blockages use timeouts, or are
- * almost always brief.
- *
- * <p> If the caller is not a ForkJoinTask, this method is behaviorally
- * equivalent to
- * <pre>
- * while (!blocker.isReleasable())
- * if (blocker.block())
- * return;
- * </pre>
- * If the caller is a ForkJoinTask, then the pool may first
- * be expanded to ensure parallelism, and later adjusted.
+ * is a {@link ForkJoinWorkerThread}, this method possibly
+ * arranges for a spare thread to be activated if necessary to
+ * ensure sufficient parallelism while the current thread is blocked.
+ *
+ * <p>If the caller is not a {@link ForkJoinTask}, this method is
+ * behaviorally equivalent to
+ * <pre> {@code
+ * while (!blocker.isReleasable())
+ * if (blocker.block())
+ * return;
+ * }</pre>
+ *
+ * If the caller is a {@code ForkJoinTask}, then the pool may
+ * first be expanded to ensure parallelism, and later adjusted.
*
* @param blocker the blocker
- * @param maintainParallelism if true and supported by this pool,
- * attempt to maintain the pool's nominal parallelism; otherwise
- * activate a thread only if necessary to avoid complete
- * starvation.
- * @throws InterruptedException if blocker.block did so.
- */
- public static void managedBlock(ManagedBlocker blocker,
- boolean maintainParallelism)
+ * @throws InterruptedException if blocker.block did so
+ */
+ public static void managedBlock(ManagedBlocker blocker)
throws InterruptedException {
Thread t = Thread.currentThread();
- ForkJoinPool pool = (t instanceof ForkJoinWorkerThread?
- ((ForkJoinWorkerThread)t).pool : null);
- if (!blocker.isReleasable()) {
- try {
- if (pool == null ||
- !pool.preBlock(blocker, maintainParallelism))
- awaitBlocker(blocker);
- } finally {
- if (pool != null)
- pool.updateRunningCount(1);
+ if (t instanceof ForkJoinWorkerThread) {
+ ForkJoinPool p = ((ForkJoinWorkerThread)t).pool;
+ while (!blocker.isReleasable()) { // variant of helpSignal
+ WorkQueue[] ws; WorkQueue q; int m, u;
+ if ((ws = p.workQueues) != null && (m = ws.length - 1) >= 0) {
+ for (int i = 0; i <= m; ++i) {
+ if (blocker.isReleasable())
+ return;
+ if ((q = ws[i]) != null && q.base - q.top < 0) {
+ p.signalWork(q);
+ if ((u = (int)(p.ctl >>> 32)) >= 0 ||
+ (u >> UAC_SHIFT) >= 0)
+ break;
+ }
+ }
+ }
+ if (p.tryCompensate()) {
+ try {
+ do {} while (!blocker.isReleasable() &&
+ !blocker.block());
+ } finally {
+ p.incrementActiveCount();
+ }
+ break;
+ }
}
}
+ else {
+ do {} while (!blocker.isReleasable() &&
+ !blocker.block());
+ }
}
- private static void awaitBlocker(ManagedBlocker blocker)
- throws InterruptedException {
- do;while (!blocker.isReleasable() && !blocker.block());
- }
-
- // AbstractExecutorService overrides
+ // AbstractExecutorService overrides. These rely on undocumented
+ // fact that ForkJoinTask.adapt returns ForkJoinTasks that also
+ // implement RunnableFuture.
protected <T> RunnableFuture<T> newTaskFor(Runnable runnable, T value) {
- return new AdaptedRunnable(runnable, value);
+ return new ForkJoinTask.AdaptedRunnable<T>(runnable, value);
}
protected <T> RunnableFuture<T> newTaskFor(Callable<T> callable) {
- return new AdaptedCallable(callable);
+ return new ForkJoinTask.AdaptedCallable<T>(callable);
}
+ // Unsafe mechanics
+ private static final sun.misc.Unsafe U;
+ private static final long CTL;
+ private static final long PARKBLOCKER;
+ private static final int ABASE;
+ private static final int ASHIFT;
+ private static final long STEALCOUNT;
+ private static final long PLOCK;
+ private static final long INDEXSEED;
+ private static final long QLOCK;
- // Temporary Unsafe mechanics for preliminary release
- private static Unsafe getUnsafe() throws Throwable {
+ static {
+ // initialize field offsets for CAS etc
try {
- return Unsafe.getUnsafe();
- } catch (SecurityException se) {
- try {
- return java.security.AccessController.doPrivileged
- (new java.security.PrivilegedExceptionAction<Unsafe>() {
- public Unsafe run() throws Exception {
- return getUnsafePrivileged();
- }});
- } catch (java.security.PrivilegedActionException e) {
- throw e.getCause();
- }
+ U = getUnsafe();
+ Class<?> k = ForkJoinPool.class;
+ CTL = U.objectFieldOffset
+ (k.getDeclaredField("ctl"));
+ STEALCOUNT = U.objectFieldOffset
+ (k.getDeclaredField("stealCount"));
+ PLOCK = U.objectFieldOffset
+ (k.getDeclaredField("plock"));
+ INDEXSEED = U.objectFieldOffset
+ (k.getDeclaredField("indexSeed"));
+ Class<?> tk = Thread.class;
+ PARKBLOCKER = U.objectFieldOffset
+ (tk.getDeclaredField("parkBlocker"));
+ Class<?> wk = WorkQueue.class;
+ QLOCK = U.objectFieldOffset
+ (wk.getDeclaredField("qlock"));
+ Class<?> ak = ForkJoinTask[].class;
+ ABASE = U.arrayBaseOffset(ak);
+ int scale = U.arrayIndexScale(ak);
+ if ((scale & (scale - 1)) != 0)
+ throw new Error("data type scale not a power of two");
+ ASHIFT = 31 - Integer.numberOfLeadingZeros(scale);
+ } catch (Exception e) {
+ throw new Error(e);
}
- }
-
- private static Unsafe getUnsafePrivileged()
- throws NoSuchFieldException, IllegalAccessException {
- Field f = Unsafe.class.getDeclaredField("theUnsafe");
- f.setAccessible(true);
- return (Unsafe) f.get(null);
- }
- private static long fieldOffset(String fieldName)
- throws NoSuchFieldException {
- return _unsafe.objectFieldOffset
- (ForkJoinPool.class.getDeclaredField(fieldName));
- }
+ submitters = new ThreadLocal<Submitter>();
+ ForkJoinWorkerThreadFactory fac = defaultForkJoinWorkerThreadFactory =
+ new DefaultForkJoinWorkerThreadFactory();
+ modifyThreadPermission = new RuntimePermission("modifyThread");
- static final Unsafe _unsafe;
- static final long eventCountOffset;
- static final long workerCountsOffset;
- static final long runControlOffset;
- static final long syncStackOffset;
- static final long spareStackOffset;
+ /*
+ * Establish common pool parameters. For extra caution,
+ * computations to set up common pool state are here; the
+ * constructor just assigns these values to fields.
+ */
- static {
- try {
- _unsafe = getUnsafe();
- eventCountOffset = fieldOffset("eventCount");
- workerCountsOffset = fieldOffset("workerCounts");
- runControlOffset = fieldOffset("runControl");
- syncStackOffset = fieldOffset("syncStack");
- spareStackOffset = fieldOffset("spareStack");
- } catch (Throwable e) {
- throw new RuntimeException("Could not initialize intrinsics", e);
+ int par = 0;
+ Thread.UncaughtExceptionHandler handler = null;
+ try { // TBD: limit or report ignored exceptions?
+ String pp = System.getProperty
+ ("java.util.concurrent.ForkJoinPool.common.parallelism");
+ String hp = System.getProperty
+ ("java.util.concurrent.ForkJoinPool.common.exceptionHandler");
+ String fp = System.getProperty
+ ("java.util.concurrent.ForkJoinPool.common.threadFactory");
+ if (fp != null)
+ fac = ((ForkJoinWorkerThreadFactory)ClassLoader.
+ getSystemClassLoader().loadClass(fp).newInstance());
+ if (hp != null)
+ handler = ((Thread.UncaughtExceptionHandler)ClassLoader.
+ getSystemClassLoader().loadClass(hp).newInstance());
+ if (pp != null)
+ par = Integer.parseInt(pp);
+ } catch (Exception ignore) {
}
- }
- private boolean casEventCount(long cmp, long val) {
- return _unsafe.compareAndSwapLong(this, eventCountOffset, cmp, val);
- }
- private boolean casWorkerCounts(int cmp, int val) {
- return _unsafe.compareAndSwapInt(this, workerCountsOffset, cmp, val);
- }
- private boolean casRunControl(int cmp, int val) {
- return _unsafe.compareAndSwapInt(this, runControlOffset, cmp, val);
- }
- private boolean casSpareStack(WaitQueueNode cmp, WaitQueueNode val) {
- return _unsafe.compareAndSwapObject(this, spareStackOffset, cmp, val);
+ if (par <= 0)
+ par = Runtime.getRuntime().availableProcessors();
+ if (par > MAX_CAP)
+ par = MAX_CAP;
+ commonParallelism = par;
+ long np = (long)(-par); // precompute initial ctl value
+ long ct = ((np << AC_SHIFT) & AC_MASK) | ((np << TC_SHIFT) & TC_MASK);
+
+ common = new ForkJoinPool(par, ct, fac, handler);
}
- private boolean casBarrierStack(WaitQueueNode cmp, WaitQueueNode val) {
- return _unsafe.compareAndSwapObject(this, syncStackOffset, cmp, val);
+
+ /**
+ * Returns a sun.misc.Unsafe. Suitable for use in a 3rd party package.
+ * Replace with a simple call to Unsafe.getUnsafe when integrating
+ * into a jdk.
+ *
+ * @return a sun.misc.Unsafe
+ */
+ private static sun.misc.Unsafe getUnsafe() {
+ return scala.concurrent.util.Unsafe.instance;
}
}
diff --git a/src/forkjoin/scala/concurrent/forkjoin/ForkJoinTask.java b/src/forkjoin/scala/concurrent/forkjoin/ForkJoinTask.java
index dc1a6bc..fd1e132 100644
--- a/src/forkjoin/scala/concurrent/forkjoin/ForkJoinTask.java
+++ b/src/forkjoin/scala/concurrent/forkjoin/ForkJoinTask.java
@@ -1,541 +1,735 @@
/*
* Written by Doug Lea with assistance from members of JCP JSR-166
* Expert Group and released to the public domain, as explained at
- * http://creativecommons.org/licenses/publicdomain
+ * http://creativecommons.org/publicdomain/zero/1.0/
*/
package scala.concurrent.forkjoin;
+
import java.io.Serializable;
-import java.util.*;
-import java.util.concurrent.*;
-import java.util.concurrent.atomic.*;
-import sun.misc.Unsafe;
-import java.lang.reflect.*;
+import java.util.Collection;
+import java.util.List;
+import java.util.RandomAccess;
+import java.lang.ref.WeakReference;
+import java.lang.ref.ReferenceQueue;
+import java.util.concurrent.Callable;
+import java.util.concurrent.CancellationException;
+import java.util.concurrent.ExecutionException;
+import java.util.concurrent.Future;
+import java.util.concurrent.RejectedExecutionException;
+import java.util.concurrent.RunnableFuture;
+import java.util.concurrent.TimeUnit;
+import java.util.concurrent.TimeoutException;
+import java.util.concurrent.locks.ReentrantLock;
+import java.lang.reflect.Constructor;
/**
- * Abstract base class for tasks that run within a {@link
- * ForkJoinPool}. A ForkJoinTask is a thread-like entity that is much
+ * Abstract base class for tasks that run within a {@link ForkJoinPool}.
+ * A {@code ForkJoinTask} is a thread-like entity that is much
* lighter weight than a normal thread. Huge numbers of tasks and
* subtasks may be hosted by a small number of actual threads in a
* ForkJoinPool, at the price of some usage limitations.
*
- * <p> A "main" ForkJoinTask begins execution when submitted to a
- * {@link ForkJoinPool}. Once started, it will usually in turn start
- * other subtasks. As indicated by the name of this class, many
- * programs using ForkJoinTasks employ only methods <code>fork</code>
- * and <code>join</code>, or derivatives such as
- * <code>invokeAll</code>. However, this class also provides a number
- * of other methods that can come into play in advanced usages, as
- * well as extension mechanics that allow support of new forms of
- * fork/join processing.
+ * <p>A "main" {@code ForkJoinTask} begins execution when it is
+ * explicitly submitted to a {@link ForkJoinPool}, or, if not already
+ * engaged in a ForkJoin computation, commenced in the {@link
+ * ForkJoinPool#commonPool()} via {@link #fork}, {@link #invoke}, or
+ * related methods. Once started, it will usually in turn start other
+ * subtasks. As indicated by the name of this class, many programs
+ * using {@code ForkJoinTask} employ only methods {@link #fork} and
+ * {@link #join}, or derivatives such as {@link
+ * #invokeAll(ForkJoinTask...) invokeAll}. However, this class also
+ * provides a number of other methods that can come into play in
+ * advanced usages, as well as extension mechanics that allow support
+ * of new forms of fork/join processing.
+ *
+ * <p>A {@code ForkJoinTask} is a lightweight form of {@link Future}.
+ * The efficiency of {@code ForkJoinTask}s stems from a set of
+ * restrictions (that are only partially statically enforceable)
+ * reflecting their main use as computational tasks calculating pure
+ * functions or operating on purely isolated objects. The primary
+ * coordination mechanisms are {@link #fork}, that arranges
+ * asynchronous execution, and {@link #join}, that doesn't proceed
+ * until the task's result has been computed. Computations should
+ * ideally avoid {@code synchronized} methods or blocks, and should
+ * minimize other blocking synchronization apart from joining other
+ * tasks or using synchronizers such as Phasers that are advertised to
+ * cooperate with fork/join scheduling. Subdividable tasks should also
+ * not perform blocking I/O, and should ideally access variables that
+ * are completely independent of those accessed by other running
+ * tasks. These guidelines are loosely enforced by not permitting
+ * checked exceptions such as {@code IOExceptions} to be
+ * thrown. However, computations may still encounter unchecked
+ * exceptions, that are rethrown to callers attempting to join
+ * them. These exceptions may additionally include {@link
+ * RejectedExecutionException} stemming from internal resource
+ * exhaustion, such as failure to allocate internal task
+ * queues. Rethrown exceptions behave in the same way as regular
+ * exceptions, but, when possible, contain stack traces (as displayed
+ * for example using {@code ex.printStackTrace()}) of both the thread
+ * that initiated the computation as well as the thread actually
+ * encountering the exception; minimally only the latter.
*
- * <p>A ForkJoinTask is a lightweight form of {@link Future}. The
- * efficiency of ForkJoinTasks stems from a set of restrictions (that
- * are only partially statically enforceable) reflecting their
- * intended use as computational tasks calculating pure functions or
- * operating on purely isolated objects. The primary coordination
- * mechanisms are {@link #fork}, that arranges asynchronous execution,
- * and {@link #join}, that doesn't proceed until the task's result has
- * been computed. Computations should avoid <code>synchronized</code>
- * methods or blocks, and should minimize other blocking
- * synchronization apart from joining other tasks or using
- * synchronizers such as Phasers that are advertised to cooperate with
- * fork/join scheduling. Tasks should also not perform blocking IO,
- * and should ideally access variables that are completely independent
- * of those accessed by other running tasks. Minor breaches of these
- * restrictions, for example using shared output streams, may be
- * tolerable in practice, but frequent use may result in poor
- * performance, and the potential to indefinitely stall if the number
- * of threads not waiting for IO or other external synchronization
- * becomes exhausted. This usage restriction is in part enforced by
- * not permitting checked exceptions such as <code>IOExceptions</code>
- * to be thrown. However, computations may still encounter unchecked
- * exceptions, that are rethrown to callers attempting join
- * them. These exceptions may additionally include
- * RejectedExecutionExceptions stemming from internal resource
- * exhaustion such as failure to allocate internal task queues.
+ * <p>It is possible to define and use ForkJoinTasks that may block,
+ * but doing do requires three further considerations: (1) Completion
+ * of few if any <em>other</em> tasks should be dependent on a task
+ * that blocks on external synchronization or I/O. Event-style async
+ * tasks that are never joined (for example, those subclassing {@link
+ * CountedCompleter}) often fall into this category. (2) To minimize
+ * resource impact, tasks should be small; ideally performing only the
+ * (possibly) blocking action. (3) Unless the {@link
+ * ForkJoinPool.ManagedBlocker} API is used, or the number of possibly
+ * blocked tasks is known to be less than the pool's {@link
+ * ForkJoinPool#getParallelism} level, the pool cannot guarantee that
+ * enough threads will be available to ensure progress or good
+ * performance.
*
* <p>The primary method for awaiting completion and extracting
* results of a task is {@link #join}, but there are several variants:
* The {@link Future#get} methods support interruptible and/or timed
- * waits for completion and report results using <code>Future</code>
- * conventions. Method {@link #helpJoin} enables callers to actively
- * execute other tasks while awaiting joins, which is sometimes more
- * efficient but only applies when all subtasks are known to be
- * strictly tree-structured. Method {@link #invoke} is semantically
- * equivalent to <code>fork(); join()</code> but always attempts to
- * begin execution in the current thread. The "<em>quiet</em>" forms
- * of these methods do not extract results or report exceptions. These
+ * waits for completion and report results using {@code Future}
+ * conventions. Method {@link #invoke} is semantically
+ * equivalent to {@code fork(); join()} but always attempts to begin
+ * execution in the current thread. The "<em>quiet</em>" forms of
+ * these methods do not extract results or report exceptions. These
* may be useful when a set of tasks are being executed, and you need
* to delay processing of results or exceptions until all complete.
- * Method <code>invokeAll</code> (available in multiple versions)
+ * Method {@code invokeAll} (available in multiple versions)
* performs the most common form of parallel invocation: forking a set
* of tasks and joining them all.
*
- * <p> The ForkJoinTask class is not usually directly subclassed.
+ * <p>In the most typical usages, a fork-join pair act like a call
+ * (fork) and return (join) from a parallel recursive function. As is
+ * the case with other forms of recursive calls, returns (joins)
+ * should be performed innermost-first. For example, {@code a.fork();
+ * b.fork(); b.join(); a.join();} is likely to be substantially more
+ * efficient than joining {@code a} before {@code b}.
+ *
+ * <p>The execution status of tasks may be queried at several levels
+ * of detail: {@link #isDone} is true if a task completed in any way
+ * (including the case where a task was cancelled without executing);
+ * {@link #isCompletedNormally} is true if a task completed without
+ * cancellation or encountering an exception; {@link #isCancelled} is
+ * true if the task was cancelled (in which case {@link #getException}
+ * returns a {@link java.util.concurrent.CancellationException}); and
+ * {@link #isCompletedAbnormally} is true if a task was either
+ * cancelled or encountered an exception, in which case {@link
+ * #getException} will return either the encountered exception or
+ * {@link java.util.concurrent.CancellationException}.
+ *
+ * <p>The ForkJoinTask class is not usually directly subclassed.
* Instead, you subclass one of the abstract classes that support a
- * particular style of fork/join processing. Normally, a concrete
- * ForkJoinTask subclass declares fields comprising its parameters,
- * established in a constructor, and then defines a <code>compute</code>
- * method that somehow uses the control methods supplied by this base
- * class. While these methods have <code>public</code> access (to allow
- * instances of different task subclasses to call each others
- * methods), some of them may only be called from within other
- * ForkJoinTasks. Attempts to invoke them in other contexts result in
- * exceptions or errors possibly including ClassCastException.
+ * particular style of fork/join processing, typically {@link
+ * RecursiveAction} for most computations that do not return results,
+ * {@link RecursiveTask} for those that do, and {@link
+ * CountedCompleter} for those in which completed actions trigger
+ * other actions. Normally, a concrete ForkJoinTask subclass declares
+ * fields comprising its parameters, established in a constructor, and
+ * then defines a {@code compute} method that somehow uses the control
+ * methods supplied by this base class.
+ *
+ * <p>Method {@link #join} and its variants are appropriate for use
+ * only when completion dependencies are acyclic; that is, the
+ * parallel computation can be described as a directed acyclic graph
+ * (DAG). Otherwise, executions may encounter a form of deadlock as
+ * tasks cyclically wait for each other. However, this framework
+ * supports other methods and techniques (for example the use of
+ * {@link Phaser}, {@link #helpQuiesce}, and {@link #complete}) that
+ * may be of use in constructing custom subclasses for problems that
+ * are not statically structured as DAGs. To support such usages a
+ * ForkJoinTask may be atomically <em>tagged</em> with a {@code short}
+ * value using {@link #setForkJoinTaskTag} or {@link
+ * #compareAndSetForkJoinTaskTag} and checked using {@link
+ * #getForkJoinTaskTag}. The ForkJoinTask implementation does not use
+ * these {@code protected} methods or tags for any purpose, but they
+ * may be of use in the construction of specialized subclasses. For
+ * example, parallel graph traversals can use the supplied methods to
+ * avoid revisiting nodes/tasks that have already been processed.
+ * (Method names for tagging are bulky in part to encourage definition
+ * of methods that reflect their usage patterns.)
*
- * <p>Most base support methods are <code>final</code> because their
- * implementations are intrinsically tied to the underlying
- * lightweight task scheduling framework, and so cannot be overridden.
- * Developers creating new basic styles of fork/join processing should
- * minimally implement <code>protected</code> methods
- * <code>exec</code>, <code>setRawResult</code>, and
- * <code>getRawResult</code>, while also introducing an abstract
- * computational method that can be implemented in its subclasses,
- * possibly relying on other <code>protected</code> methods provided
- * by this class.
+ * <p>Most base support methods are {@code final}, to prevent
+ * overriding of implementations that are intrinsically tied to the
+ * underlying lightweight task scheduling framework. Developers
+ * creating new basic styles of fork/join processing should minimally
+ * implement {@code protected} methods {@link #exec}, {@link
+ * #setRawResult}, and {@link #getRawResult}, while also introducing
+ * an abstract computational method that can be implemented in its
+ * subclasses, possibly relying on other {@code protected} methods
+ * provided by this class.
*
* <p>ForkJoinTasks should perform relatively small amounts of
- * computations, othewise splitting into smaller tasks. As a very
- * rough rule of thumb, a task should perform more than 100 and less
- * than 10000 basic computational steps. If tasks are too big, then
- * parellelism cannot improve throughput. If too small, then memory
- * and internal task maintenance overhead may overwhelm processing.
+ * computation. Large tasks should be split into smaller subtasks,
+ * usually via recursive decomposition. As a very rough rule of thumb,
+ * a task should perform more than 100 and less than 10000 basic
+ * computational steps, and should avoid indefinite looping. If tasks
+ * are too big, then parallelism cannot improve throughput. If too
+ * small, then memory and internal task maintenance overhead may
+ * overwhelm processing.
*
- * <p>ForkJoinTasks are <code>Serializable</code>, which enables them
- * to be used in extensions such as remote execution frameworks. It is
- * in general sensible to serialize tasks only before or after, but
- * not during execution. Serialization is not relied on during
- * execution itself.
+ * <p>This class provides {@code adapt} methods for {@link Runnable}
+ * and {@link Callable}, that may be of use when mixing execution of
+ * {@code ForkJoinTasks} with other kinds of tasks. When all tasks are
+ * of this form, consider using a pool constructed in <em>asyncMode</em>.
+ *
+ * <p>ForkJoinTasks are {@code Serializable}, which enables them to be
+ * used in extensions such as remote execution frameworks. It is
+ * sensible to serialize tasks only before or after, but not during,
+ * execution. Serialization is not relied on during execution itself.
+ *
+ * @since 1.7
+ * @author Doug Lea
*/
public abstract class ForkJoinTask<V> implements Future<V>, Serializable {
- /**
- * Run control status bits packed into a single int to minimize
- * footprint and to ensure atomicity (via CAS). Status is
- * initially zero, and takes on nonnegative values until
- * completed, upon which status holds COMPLETED. CANCELLED, or
- * EXCEPTIONAL, which use the top 3 bits. Tasks undergoing
- * blocking waits by other threads have SIGNAL_MASK bits set --
- * bit 15 for external (nonFJ) waits, and the rest a count of
- * waiting FJ threads. (This representation relies on
- * ForkJoinPool max thread limits). Completion of a stolen task
- * with SIGNAL_MASK bits set awakens waiter via notifyAll. Even
- * though suboptimal for some purposes, we use basic builtin
- * wait/notify to take advantage of "monitor inflation" in JVMs
- * that we would otherwise need to emulate to avoid adding further
- * per-task bookkeeping overhead. Note that bits 16-28 are
- * currently unused. Also value 0x80000000 is available as spare
- * completion value.
- */
- volatile int status; // accessed directy by pool and workers
-
- static final int COMPLETION_MASK = 0xe0000000;
- static final int NORMAL = 0xe0000000; // == mask
- static final int CANCELLED = 0xc0000000;
- static final int EXCEPTIONAL = 0xa0000000;
- static final int SIGNAL_MASK = 0x0000ffff;
- static final int INTERNAL_SIGNAL_MASK = 0x00007fff;
- static final int EXTERNAL_SIGNAL = 0x00008000; // top bit of low word
+ /*
+ * See the internal documentation of class ForkJoinPool for a
+ * general implementation overview. ForkJoinTasks are mainly
+ * responsible for maintaining their "status" field amidst relays
+ * to methods in ForkJoinWorkerThread and ForkJoinPool.
+ *
+ * The methods of this class are more-or-less layered into
+ * (1) basic status maintenance
+ * (2) execution and awaiting completion
+ * (3) user-level methods that additionally report results.
+ * This is sometimes hard to see because this file orders exported
+ * methods in a way that flows well in javadocs.
+ */
- /**
- * Table of exceptions thrown by tasks, to enable reporting by
- * callers. Because exceptions are rare, we don't directly keep
- * them with task objects, but instead us a weak ref table. Note
- * that cancellation exceptions don't appear in the table, but are
- * instead recorded as status values.
- * Todo: Use ConcurrentReferenceHashMap
+ /*
+ * The status field holds run control status bits packed into a
+ * single int to minimize footprint and to ensure atomicity (via
+ * CAS). Status is initially zero, and takes on nonnegative
+ * values until completed, upon which status (anded with
+ * DONE_MASK) holds value NORMAL, CANCELLED, or EXCEPTIONAL. Tasks
+ * undergoing blocking waits by other threads have the SIGNAL bit
+ * set. Completion of a stolen task with SIGNAL set awakens any
+ * waiters via notifyAll. Even though suboptimal for some
+ * purposes, we use basic builtin wait/notify to take advantage of
+ * "monitor inflation" in JVMs that we would otherwise need to
+ * emulate to avoid adding further per-task bookkeeping overhead.
+ * We want these monitors to be "fat", i.e., not use biasing or
+ * thin-lock techniques, so use some odd coding idioms that tend
+ * to avoid them, mainly by arranging that every synchronized
+ * block performs a wait, notifyAll or both.
+ *
+ * These control bits occupy only (some of) the upper half (16
+ * bits) of status field. The lower bits are used for user-defined
+ * tags.
*/
- static final Map<ForkJoinTask<?>, Throwable> exceptionMap =
- Collections.synchronizedMap
- (new WeakHashMap<ForkJoinTask<?>, Throwable>());
- // within-package utilities
+ /** The run status of this task */
+ volatile int status; // accessed directly by pool and workers
+ static final int DONE_MASK = 0xf0000000; // mask out non-completion bits
+ static final int NORMAL = 0xf0000000; // must be negative
+ static final int CANCELLED = 0xc0000000; // must be < NORMAL
+ static final int EXCEPTIONAL = 0x80000000; // must be < CANCELLED
+ static final int SIGNAL = 0x00010000; // must be >= 1 << 16
+ static final int SMASK = 0x0000ffff; // short bits for tags
/**
- * Get current worker thread, or null if not a worker thread
+ * Marks completion and wakes up threads waiting to join this
+ * task.
+ *
+ * @param completion one of NORMAL, CANCELLED, EXCEPTIONAL
+ * @return completion status on exit
*/
- static ForkJoinWorkerThread getWorker() {
- Thread t = Thread.currentThread();
- return ((t instanceof ForkJoinWorkerThread)?
- (ForkJoinWorkerThread)t : null);
+ private int setCompletion(int completion) {
+ for (int s;;) {
+ if ((s = status) < 0)
+ return s;
+ if (U.compareAndSwapInt(this, STATUS, s, s | completion)) {
+ if ((s >>> 16) != 0)
+ synchronized (this) { notifyAll(); }
+ return completion;
+ }
+ }
}
- final boolean casStatus(int cmp, int val) {
- return _unsafe.compareAndSwapInt(this, statusOffset, cmp, val);
+ /**
+ * Primary execution method for stolen tasks. Unless done, calls
+ * exec and records status if completed, but doesn't wait for
+ * completion otherwise.
+ *
+ * @return status on exit from this method
+ */
+ final int doExec() {
+ int s; boolean completed;
+ if ((s = status) >= 0) {
+ try {
+ completed = exec();
+ } catch (Throwable rex) {
+ return setExceptionalCompletion(rex);
+ }
+ if (completed)
+ s = setCompletion(NORMAL);
+ }
+ return s;
}
/**
- * Workaround for not being able to rethrow unchecked exceptions.
+ * Tries to set SIGNAL status unless already completed. Used by
+ * ForkJoinPool. Other variants are directly incorporated into
+ * externalAwaitDone etc.
+ *
+ * @return true if successful
*/
- static void rethrowException(Throwable ex) {
- if (ex != null)
- _unsafe.throwException(ex);
+ final boolean trySetSignal() {
+ int s = status;
+ return s >= 0 && U.compareAndSwapInt(this, STATUS, s, s | SIGNAL);
}
- // Setting completion status
-
/**
- * Mark completion and wake up threads waiting to join this task.
- * @param completion one of NORMAL, CANCELLED, EXCEPTIONAL
+ * Blocks a non-worker-thread until completion.
+ * @return status upon completion
*/
- final void setCompletion(int completion) {
- ForkJoinPool pool = getPool();
- if (pool != null) {
- int s; // Clear signal bits while setting completion status
- do;while ((s = status) >= 0 && !casStatus(s, completion));
-
- if ((s & SIGNAL_MASK) != 0) {
- if ((s &= INTERNAL_SIGNAL_MASK) != 0)
- pool.updateRunningCount(s);
- synchronized(this) { notifyAll(); }
+ private int externalAwaitDone() {
+ int s;
+ ForkJoinPool.externalHelpJoin(this);
+ boolean interrupted = false;
+ while ((s = status) >= 0) {
+ if (U.compareAndSwapInt(this, STATUS, s, s | SIGNAL)) {
+ synchronized (this) {
+ if (status >= 0) {
+ try {
+ wait();
+ } catch (InterruptedException ie) {
+ interrupted = true;
+ }
+ }
+ else
+ notifyAll();
+ }
}
}
- else
- externallySetCompletion(completion);
+ if (interrupted)
+ Thread.currentThread().interrupt();
+ return s;
}
/**
- * Version of setCompletion for non-FJ threads. Leaves signal
- * bits for unblocked threads to adjust, and always notifies.
+ * Blocks a non-worker-thread until completion or interruption.
*/
- private void externallySetCompletion(int completion) {
+ private int externalInterruptibleAwaitDone() throws InterruptedException {
int s;
- do;while ((s = status) >= 0 &&
- !casStatus(s, (s & SIGNAL_MASK) | completion));
- synchronized(this) { notifyAll(); }
+ if (Thread.interrupted())
+ throw new InterruptedException();
+ ForkJoinPool.externalHelpJoin(this);
+ while ((s = status) >= 0) {
+ if (U.compareAndSwapInt(this, STATUS, s, s | SIGNAL)) {
+ synchronized (this) {
+ if (status >= 0)
+ wait();
+ else
+ notifyAll();
+ }
+ }
+ }
+ return s;
}
+
/**
- * Sets status to indicate normal completion
+ * Implementation for join, get, quietlyJoin. Directly handles
+ * only cases of already-completed, external wait, and
+ * unfork+exec. Others are relayed to ForkJoinPool.awaitJoin.
+ *
+ * @return status upon completion
*/
- final void setNormalCompletion() {
- // Try typical fast case -- single CAS, no signal, not already done.
- // Manually expand casStatus to improve chances of inlining it
- if (!_unsafe.compareAndSwapInt(this, statusOffset, 0, NORMAL))
- setCompletion(NORMAL);
+ private int doJoin() {
+ int s; Thread t; ForkJoinWorkerThread wt; ForkJoinPool.WorkQueue w;
+ return (s = status) < 0 ? s :
+ ((t = Thread.currentThread()) instanceof ForkJoinWorkerThread) ?
+ (w = (wt = (ForkJoinWorkerThread)t).workQueue).
+ tryUnpush(this) && (s = doExec()) < 0 ? s :
+ wt.pool.awaitJoin(w, this) :
+ externalAwaitDone();
}
- // internal waiting and notification
-
/**
- * Performs the actual monitor wait for awaitDone
+ * Implementation for invoke, quietlyInvoke.
+ *
+ * @return status upon completion
*/
- private void doAwaitDone() {
- // Minimize lock bias and in/de-flation effects by maximizing
- // chances of waiting inside sync
- try {
- while (status >= 0)
- synchronized(this) { if (status >= 0) wait(); }
- } catch (InterruptedException ie) {
- onInterruptedWait();
- }
+ private int doInvoke() {
+ int s; Thread t; ForkJoinWorkerThread wt;
+ return (s = doExec()) < 0 ? s :
+ ((t = Thread.currentThread()) instanceof ForkJoinWorkerThread) ?
+ (wt = (ForkJoinWorkerThread)t).pool.awaitJoin(wt.workQueue, this) :
+ externalAwaitDone();
}
+ // Exception table support
+
/**
- * Performs the actual monitor wait for awaitDone
+ * Table of exceptions thrown by tasks, to enable reporting by
+ * callers. Because exceptions are rare, we don't directly keep
+ * them with task objects, but instead use a weak ref table. Note
+ * that cancellation exceptions don't appear in the table, but are
+ * instead recorded as status values.
+ *
+ * Note: These statics are initialized below in static block.
*/
- private void doAwaitDone(long startTime, long nanos) {
- synchronized(this) {
- try {
- while (status >= 0) {
- long nt = nanos - System.nanoTime() - startTime;
- if (nt <= 0)
- break;
- wait(nt / 1000000, (int)(nt % 1000000));
- }
- } catch (InterruptedException ie) {
- onInterruptedWait();
- }
- }
- }
+ private static final ExceptionNode[] exceptionTable;
+ private static final ReentrantLock exceptionTableLock;
+ private static final ReferenceQueue<Object> exceptionTableRefQueue;
- // Awaiting completion
+ /**
+ * Fixed capacity for exceptionTable.
+ */
+ private static final int EXCEPTION_MAP_CAPACITY = 32;
/**
- * Sets status to indicate there is joiner, then waits for join,
- * surrounded with pool notifications.
- * @return status upon exit
+ * Key-value nodes for exception table. The chained hash table
+ * uses identity comparisons, full locking, and weak references
+ * for keys. The table has a fixed capacity because it only
+ * maintains task exceptions long enough for joiners to access
+ * them, so should never become very large for sustained
+ * periods. However, since we do not know when the last joiner
+ * completes, we must use weak references and expunge them. We do
+ * so on each operation (hence full locking). Also, some thread in
+ * any ForkJoinPool will call helpExpungeStaleExceptions when its
+ * pool becomes isQuiescent.
*/
- private int awaitDone(ForkJoinWorkerThread w, boolean maintainParallelism) {
- ForkJoinPool pool = w == null? null : w.pool;
- int s;
- while ((s = status) >= 0) {
- if (casStatus(s, pool == null? s|EXTERNAL_SIGNAL : s+1)) {
- if (pool == null || !pool.preJoin(this, maintainParallelism))
- doAwaitDone();
- if (((s = status) & INTERNAL_SIGNAL_MASK) != 0)
- adjustPoolCountsOnUnblock(pool);
- break;
- }
+ static final class ExceptionNode extends WeakReference<ForkJoinTask<?>> {
+ final Throwable ex;
+ ExceptionNode next;
+ final long thrower; // use id not ref to avoid weak cycles
+ ExceptionNode(ForkJoinTask<?> task, Throwable ex, ExceptionNode next) {
+ super(task, exceptionTableRefQueue);
+ this.ex = ex;
+ this.next = next;
+ this.thrower = Thread.currentThread().getId();
}
- return s;
}
/**
- * Timed version of awaitDone
- * @return status upon exit
+ * Records exception and sets status.
+ *
+ * @return status on exit
*/
- private int awaitDone(ForkJoinWorkerThread w, long nanos) {
- ForkJoinPool pool = w == null? null : w.pool;
+ final int recordExceptionalCompletion(Throwable ex) {
int s;
- while ((s = status) >= 0) {
- if (casStatus(s, pool == null? s|EXTERNAL_SIGNAL : s+1)) {
- long startTime = System.nanoTime();
- if (pool == null || !pool.preJoin(this, false))
- doAwaitDone(startTime, nanos);
- if ((s = status) >= 0) {
- adjustPoolCountsOnCancelledWait(pool);
- s = status;
+ if ((s = status) >= 0) {
+ int h = System.identityHashCode(this);
+ final ReentrantLock lock = exceptionTableLock;
+ lock.lock();
+ try {
+ expungeStaleExceptions();
+ ExceptionNode[] t = exceptionTable;
+ int i = h & (t.length - 1);
+ for (ExceptionNode e = t[i]; ; e = e.next) {
+ if (e == null) {
+ t[i] = new ExceptionNode(this, ex, t[i]);
+ break;
+ }
+ if (e.get() == this) // already present
+ break;
}
- if (s < 0 && (s & INTERNAL_SIGNAL_MASK) != 0)
- adjustPoolCountsOnUnblock(pool);
- break;
+ } finally {
+ lock.unlock();
}
+ s = setCompletion(EXCEPTIONAL);
}
return s;
}
/**
- * Notify pool that thread is unblocked. Called by signalled
- * threads when woken by non-FJ threads (which is atypical).
+ * Records exception and possibly propagates.
+ *
+ * @return status on exit
*/
- private void adjustPoolCountsOnUnblock(ForkJoinPool pool) {
- int s;
- do;while ((s = status) < 0 && !casStatus(s, s & COMPLETION_MASK));
- if (pool != null && (s &= INTERNAL_SIGNAL_MASK) != 0)
- pool.updateRunningCount(s);
- }
-
- /**
- * Notify pool to adjust counts on cancelled or timed out wait
- */
- private void adjustPoolCountsOnCancelledWait(ForkJoinPool pool) {
- if (pool != null) {
- int s;
- while ((s = status) >= 0 && (s & INTERNAL_SIGNAL_MASK) != 0) {
- if (casStatus(s, s - 1)) {
- pool.updateRunningCount(1);
- break;
- }
- }
- }
+ private int setExceptionalCompletion(Throwable ex) {
+ int s = recordExceptionalCompletion(ex);
+ if ((s & DONE_MASK) == EXCEPTIONAL)
+ internalPropagateException(ex);
+ return s;
}
/**
- * Handle interruptions during waits.
- */
- private void onInterruptedWait() {
- ForkJoinWorkerThread w = getWorker();
- if (w == null)
- Thread.currentThread().interrupt(); // re-interrupt
- else if (w.isTerminating())
- cancelIgnoringExceptions();
- // else if FJworker, ignore interrupt
- }
-
- // Recording and reporting exceptions
-
- private void setDoneExceptionally(Throwable rex) {
- exceptionMap.put(this, rex);
- setCompletion(EXCEPTIONAL);
+ * Hook for exception propagation support for tasks with completers.
+ */
+ void internalPropagateException(Throwable ex) {
}
/**
- * Throws the exception associated with status s;
- * @throws the exception
+ * Cancels, ignoring any exceptions thrown by cancel. Used during
+ * worker and pool shutdown. Cancel is spec'ed not to throw any
+ * exceptions, but if it does anyway, we have no recourse during
+ * shutdown, so guard against this case.
*/
- private void reportException(int s) {
- if ((s &= COMPLETION_MASK) < NORMAL) {
- if (s == CANCELLED)
- throw new CancellationException();
- else
- rethrowException(exceptionMap.get(this));
+ static final void cancelIgnoringExceptions(ForkJoinTask<?> t) {
+ if (t != null && t.status >= 0) {
+ try {
+ t.cancel(false);
+ } catch (Throwable ignore) {
+ }
}
}
/**
- * Returns result or throws exception using j.u.c.Future conventions
- * Only call when isDone known to be true.
+ * Removes exception node and clears status.
*/
- private V reportFutureResult()
- throws ExecutionException, InterruptedException {
- int s = status & COMPLETION_MASK;
- if (s < NORMAL) {
- Throwable ex;
- if (s == CANCELLED)
- throw new CancellationException();
- if (s == EXCEPTIONAL && (ex = exceptionMap.get(this)) != null)
- throw new ExecutionException(ex);
- if (Thread.interrupted())
- throw new InterruptedException();
+ private void clearExceptionalCompletion() {
+ int h = System.identityHashCode(this);
+ final ReentrantLock lock = exceptionTableLock;
+ lock.lock();
+ try {
+ ExceptionNode[] t = exceptionTable;
+ int i = h & (t.length - 1);
+ ExceptionNode e = t[i];
+ ExceptionNode pred = null;
+ while (e != null) {
+ ExceptionNode next = e.next;
+ if (e.get() == this) {
+ if (pred == null)
+ t[i] = next;
+ else
+ pred.next = next;
+ break;
+ }
+ pred = e;
+ e = next;
+ }
+ expungeStaleExceptions();
+ status = 0;
+ } finally {
+ lock.unlock();
}
- return getRawResult();
}
/**
- * Returns result or throws exception using j.u.c.Future conventions
- * with timeouts
+ * Returns a rethrowable exception for the given task, if
+ * available. To provide accurate stack traces, if the exception
+ * was not thrown by the current thread, we try to create a new
+ * exception of the same type as the one thrown, but with the
+ * recorded exception as its cause. If there is no such
+ * constructor, we instead try to use a no-arg constructor,
+ * followed by initCause, to the same effect. If none of these
+ * apply, or any fail due to other exceptions, we return the
+ * recorded exception, which is still correct, although it may
+ * contain a misleading stack trace.
+ *
+ * @return the exception, or null if none
*/
- private V reportTimedFutureResult()
- throws InterruptedException, ExecutionException, TimeoutException {
+ private Throwable getThrowableException() {
+ if ((status & DONE_MASK) != EXCEPTIONAL)
+ return null;
+ int h = System.identityHashCode(this);
+ ExceptionNode e;
+ final ReentrantLock lock = exceptionTableLock;
+ lock.lock();
+ try {
+ expungeStaleExceptions();
+ ExceptionNode[] t = exceptionTable;
+ e = t[h & (t.length - 1)];
+ while (e != null && e.get() != this)
+ e = e.next;
+ } finally {
+ lock.unlock();
+ }
Throwable ex;
- int s = status & COMPLETION_MASK;
- if (s == NORMAL)
- return getRawResult();
- if (s == CANCELLED)
- throw new CancellationException();
- if (s == EXCEPTIONAL && (ex = exceptionMap.get(this)) != null)
- throw new ExecutionException(ex);
- if (Thread.interrupted())
- throw new InterruptedException();
- throw new TimeoutException();
+ if (e == null || (ex = e.ex) == null)
+ return null;
+ if (false && e.thrower != Thread.currentThread().getId()) {
+ Class<? extends Throwable> ec = ex.getClass();
+ try {
+ Constructor<?> noArgCtor = null;
+ Constructor<?>[] cs = ec.getConstructors();// public ctors only
+ for (int i = 0; i < cs.length; ++i) {
+ Constructor<?> c = cs[i];
+ Class<?>[] ps = c.getParameterTypes();
+ if (ps.length == 0)
+ noArgCtor = c;
+ else if (ps.length == 1 && ps[0] == Throwable.class)
+ return (Throwable)(c.newInstance(ex));
+ }
+ if (noArgCtor != null) {
+ Throwable wx = (Throwable)(noArgCtor.newInstance());
+ wx.initCause(ex);
+ return wx;
+ }
+ } catch (Exception ignore) {
+ }
+ }
+ return ex;
}
- // internal execution methods
-
/**
- * Calls exec, recording completion, and rethrowing exception if
- * encountered. Caller should normally check status before calling
- * @return true if completed normally
+ * Poll stale refs and remove them. Call only while holding lock.
*/
- private boolean tryExec() {
- try { // try block must contain only call to exec
- if (!exec())
- return false;
- } catch (Throwable rex) {
- setDoneExceptionally(rex);
- rethrowException(rex);
- return false; // not reached
+ private static void expungeStaleExceptions() {
+ for (Object x; (x = exceptionTableRefQueue.poll()) != null;) {
+ if (x instanceof ExceptionNode) {
+ ForkJoinTask<?> key = ((ExceptionNode)x).get();
+ ExceptionNode[] t = exceptionTable;
+ int i = System.identityHashCode(key) & (t.length - 1);
+ ExceptionNode e = t[i];
+ ExceptionNode pred = null;
+ while (e != null) {
+ ExceptionNode next = e.next;
+ if (e == x) {
+ if (pred == null)
+ t[i] = next;
+ else
+ pred.next = next;
+ break;
+ }
+ pred = e;
+ e = next;
+ }
+ }
}
- setNormalCompletion();
- return true;
}
/**
- * Main execution method used by worker threads. Invokes
- * base computation unless already complete
+ * If lock is available, poll stale refs and remove them.
+ * Called from ForkJoinPool when pools become quiescent.
*/
- final void quietlyExec() {
- if (status >= 0) {
+ static final void helpExpungeStaleExceptions() {
+ final ReentrantLock lock = exceptionTableLock;
+ if (lock.tryLock()) {
try {
- if (!exec())
- return;
- } catch(Throwable rex) {
- setDoneExceptionally(rex);
- return;
+ expungeStaleExceptions();
+ } finally {
+ lock.unlock();
}
- setNormalCompletion();
}
}
/**
- * Calls exec, recording but not rethrowing exception
- * Caller should normally check status before calling
- * @return true if completed normally
+ * A version of "sneaky throw" to relay exceptions
*/
- private boolean tryQuietlyInvoke() {
- try {
- if (!exec())
- return false;
- } catch (Throwable rex) {
- setDoneExceptionally(rex);
- return false;
+ static void rethrow(final Throwable ex) {
+ if (ex != null) {
+ if (ex instanceof Error)
+ throw (Error)ex;
+ if (ex instanceof RuntimeException)
+ throw (RuntimeException)ex;
+ ForkJoinTask.<RuntimeException>uncheckedThrow(ex);
}
- setNormalCompletion();
- return true;
}
/**
- * Cancel, ignoring any exceptions it throws
+ * The sneaky part of sneaky throw, relying on generics
+ * limitations to evade compiler complaints about rethrowing
+ * unchecked exceptions
*/
- final void cancelIgnoringExceptions() {
- try {
- cancel(false);
- } catch(Throwable ignore) {
- }
+ @SuppressWarnings("unchecked") static <T extends Throwable>
+ void uncheckedThrow(Throwable t) throws T {
+ if (t != null)
+ throw (T)t; // rely on vacuous cast
}
/**
- * Main implementation of helpJoin
+ * Throws exception, if any, associated with the given status.
*/
- private int busyJoin(ForkJoinWorkerThread w) {
- int s;
- ForkJoinTask<?> t;
- while ((s = status) >= 0 && (t = w.scanWhileJoining(this)) != null)
- t.quietlyExec();
- return (s >= 0)? awaitDone(w, false) : s; // block if no work
+ private void reportException(int s) {
+ if (s == CANCELLED)
+ throw new CancellationException();
+ if (s == EXCEPTIONAL)
+ rethrow(getThrowableException());
}
// public methods
/**
- * Arranges to asynchronously execute this task. While it is not
- * necessarily enforced, it is a usage error to fork a task more
- * than once unless it has completed and been reinitialized. This
- * method may be invoked only from within ForkJoinTask
- * computations. Attempts to invoke in other contexts result in
- * exceptions or errors possibly including ClassCastException.
- */
- public final void fork() {
- ((ForkJoinWorkerThread)(Thread.currentThread())).pushTask(this);
+ * Arranges to asynchronously execute this task in the pool the
+ * current task is running in, if applicable, or using the {@link
+ * ForkJoinPool#commonPool()} if not {@link #inForkJoinPool}. While
+ * it is not necessarily enforced, it is a usage error to fork a
+ * task more than once unless it has completed and been
+ * reinitialized. Subsequent modifications to the state of this
+ * task or any data it operates on are not necessarily
+ * consistently observable by any thread other than the one
+ * executing it unless preceded by a call to {@link #join} or
+ * related methods, or a call to {@link #isDone} returning {@code
+ * true}.
+ *
+ * @return {@code this}, to simplify usage
+ */
+ public final ForkJoinTask<V> fork() {
+ Thread t;
+ if ((t = Thread.currentThread()) instanceof ForkJoinWorkerThread)
+ ((ForkJoinWorkerThread)t).workQueue.push(this);
+ else
+ ForkJoinPool.common.externalPush(this);
+ return this;
}
/**
- * Returns the result of the computation when it is ready.
- * This method differs from <code>get</code> in that abnormal
- * completion results in RuntimeExceptions or Errors, not
- * ExecutionExceptions.
+ * Returns the result of the computation when it {@link #isDone is
+ * done}. This method differs from {@link #get()} in that
+ * abnormal completion results in {@code RuntimeException} or
+ * {@code Error}, not {@code ExecutionException}, and that
+ * interrupts of the calling thread do <em>not</em> cause the
+ * method to abruptly return by throwing {@code
+ * InterruptedException}.
*
* @return the computed result
*/
public final V join() {
- ForkJoinWorkerThread w = getWorker();
- if (w == null || status < 0 || !w.unpushTask(this) || !tryExec())
- reportException(awaitDone(w, true));
+ int s;
+ if ((s = doJoin() & DONE_MASK) != NORMAL)
+ reportException(s);
return getRawResult();
}
/**
* Commences performing this task, awaits its completion if
- * necessary, and return its result.
- * @throws Throwable (a RuntimeException, Error, or unchecked
- * exception) if the underlying computation did so.
+ * necessary, and returns its result, or throws an (unchecked)
+ * {@code RuntimeException} or {@code Error} if the underlying
+ * computation did so.
+ *
* @return the computed result
*/
public final V invoke() {
- if (status >= 0 && tryExec())
- return getRawResult();
- else
- return join();
+ int s;
+ if ((s = doInvoke() & DONE_MASK) != NORMAL)
+ reportException(s);
+ return getRawResult();
}
/**
- * Forks both tasks, returning when <code>isDone</code> holds for
- * both of them or an exception is encountered. This method may be
- * invoked only from within ForkJoinTask computations. Attempts to
- * invoke in other contexts result in exceptions or errors
- * possibly including ClassCastException.
- * @param t1 one task
- * @param t2 the other task
- * @throws NullPointerException if t1 or t2 are null
- * @throws RuntimeException or Error if either task did so.
- */
- public static void invokeAll(ForkJoinTask<?>t1, ForkJoinTask<?> t2) {
+ * Forks the given tasks, returning when {@code isDone} holds for
+ * each task or an (unchecked) exception is encountered, in which
+ * case the exception is rethrown. If more than one task
+ * encounters an exception, then this method throws any one of
+ * these exceptions. If any task encounters an exception, the
+ * other may be cancelled. However, the execution status of
+ * individual tasks is not guaranteed upon exceptional return. The
+ * status of each task may be obtained using {@link
+ * #getException()} and related methods to check if they have been
+ * cancelled, completed normally or exceptionally, or left
+ * unprocessed.
+ *
+ * @param t1 the first task
+ * @param t2 the second task
+ * @throws NullPointerException if any task is null
+ */
+ public static void invokeAll(ForkJoinTask<?> t1, ForkJoinTask<?> t2) {
+ int s1, s2;
t2.fork();
- t1.invoke();
- t2.join();
+ if ((s1 = t1.doInvoke() & DONE_MASK) != NORMAL)
+ t1.reportException(s1);
+ if ((s2 = t2.doJoin() & DONE_MASK) != NORMAL)
+ t2.reportException(s2);
}
/**
- * Forks the given tasks, returning when <code>isDone</code> holds
- * for all of them. If any task encounters an exception, others
- * may be cancelled. This method may be invoked only from within
- * ForkJoinTask computations. Attempts to invoke in other contexts
- * result in exceptions or errors possibly including ClassCastException.
- * @param tasks the array of tasks
- * @throws NullPointerException if tasks or any element are null.
- * @throws RuntimeException or Error if any task did so.
+ * Forks the given tasks, returning when {@code isDone} holds for
+ * each task or an (unchecked) exception is encountered, in which
+ * case the exception is rethrown. If more than one task
+ * encounters an exception, then this method throws any one of
+ * these exceptions. If any task encounters an exception, others
+ * may be cancelled. However, the execution status of individual
+ * tasks is not guaranteed upon exceptional return. The status of
+ * each task may be obtained using {@link #getException()} and
+ * related methods to check if they have been cancelled, completed
+ * normally or exceptionally, or left unprocessed.
+ *
+ * @param tasks the tasks
+ * @throws NullPointerException if any task is null
*/
public static void invokeAll(ForkJoinTask<?>... tasks) {
Throwable ex = null;
@@ -548,46 +742,47 @@ public abstract class ForkJoinTask<V> implements Future<V>, Serializable {
}
else if (i != 0)
t.fork();
- else {
- t.quietlyInvoke();
- if (ex == null)
- ex = t.getException();
- }
+ else if (t.doInvoke() < NORMAL && ex == null)
+ ex = t.getException();
}
for (int i = 1; i <= last; ++i) {
ForkJoinTask<?> t = tasks[i];
if (t != null) {
if (ex != null)
t.cancel(false);
- else {
- t.quietlyJoin();
- if (ex == null)
- ex = t.getException();
- }
+ else if (t.doJoin() < NORMAL)
+ ex = t.getException();
}
}
if (ex != null)
- rethrowException(ex);
+ rethrow(ex);
}
/**
- * Forks all tasks in the collection, returning when
- * <code>isDone</code> holds for all of them. If any task
- * encounters an exception, others may be cancelled. This method
- * may be invoked only from within ForkJoinTask
- * computations. Attempts to invoke in other contexts resul!t in
- * exceptions or errors possibly including ClassCastException.
+ * Forks all tasks in the specified collection, returning when
+ * {@code isDone} holds for each task or an (unchecked) exception
+ * is encountered, in which case the exception is rethrown. If
+ * more than one task encounters an exception, then this method
+ * throws any one of these exceptions. If any task encounters an
+ * exception, others may be cancelled. However, the execution
+ * status of individual tasks is not guaranteed upon exceptional
+ * return. The status of each task may be obtained using {@link
+ * #getException()} and related methods to check if they have been
+ * cancelled, completed normally or exceptionally, or left
+ * unprocessed.
+ *
* @param tasks the collection of tasks
- * @throws NullPointerException if tasks or any element are null.
- * @throws RuntimeException or Error if any task did so.
+ * @return the tasks argument, to simplify usage
+ * @throws NullPointerException if tasks or any element are null
*/
- public static void invokeAll(Collection<? extends ForkJoinTask<?>> tasks) {
- if (!(tasks instanceof List)) {
- invokeAll(tasks.toArray(new ForkJoinTask[tasks.size()]));
- return;
+ public static <T extends ForkJoinTask<?>> Collection<T> invokeAll(Collection<T> tasks) {
+ if (!(tasks instanceof RandomAccess) || !(tasks instanceof List<?>)) {
+ invokeAll(tasks.toArray(new ForkJoinTask<?>[tasks.size()]));
+ return tasks;
}
+ @SuppressWarnings("unchecked")
List<? extends ForkJoinTask<?>> ts =
- (List<? extends ForkJoinTask<?>>)tasks;
+ (List<? extends ForkJoinTask<?>>) tasks;
Throwable ex = null;
int last = ts.size() - 1;
for (int i = last; i >= 0; --i) {
@@ -598,268 +793,356 @@ public abstract class ForkJoinTask<V> implements Future<V>, Serializable {
}
else if (i != 0)
t.fork();
- else {
- t.quietlyInvoke();
- if (ex == null)
- ex = t.getException();
- }
+ else if (t.doInvoke() < NORMAL && ex == null)
+ ex = t.getException();
}
for (int i = 1; i <= last; ++i) {
ForkJoinTask<?> t = ts.get(i);
if (t != null) {
if (ex != null)
t.cancel(false);
- else {
- t.quietlyJoin();
- if (ex == null)
- ex = t.getException();
- }
+ else if (t.doJoin() < NORMAL)
+ ex = t.getException();
}
}
if (ex != null)
- rethrowException(ex);
+ rethrow(ex);
+ return tasks;
}
/**
- * Returns true if the computation performed by this task has
- * completed (or has been cancelled).
- * @return true if this computation has completed
+ * Attempts to cancel execution of this task. This attempt will
+ * fail if the task has already completed or could not be
+ * cancelled for some other reason. If successful, and this task
+ * has not started when {@code cancel} is called, execution of
+ * this task is suppressed. After this method returns
+ * successfully, unless there is an intervening call to {@link
+ * #reinitialize}, subsequent calls to {@link #isCancelled},
+ * {@link #isDone}, and {@code cancel} will return {@code true}
+ * and calls to {@link #join} and related methods will result in
+ * {@code CancellationException}.
+ *
+ * <p>This method may be overridden in subclasses, but if so, must
+ * still ensure that these properties hold. In particular, the
+ * {@code cancel} method itself must not throw exceptions.
+ *
+ * <p>This method is designed to be invoked by <em>other</em>
+ * tasks. To terminate the current task, you can just return or
+ * throw an unchecked exception from its computation method, or
+ * invoke {@link #completeExceptionally}.
+ *
+ * @param mayInterruptIfRunning this value has no effect in the
+ * default implementation because interrupts are not used to
+ * control cancellation.
+ *
+ * @return {@code true} if this task is now cancelled
*/
+ public boolean cancel(boolean mayInterruptIfRunning) {
+ return (setCompletion(CANCELLED) & DONE_MASK) == CANCELLED;
+ }
+
public final boolean isDone() {
return status < 0;
}
- /**
- * Returns true if this task was cancelled.
- * @return true if this task was cancelled
- */
public final boolean isCancelled() {
- return (status & COMPLETION_MASK) == CANCELLED;
+ return (status & DONE_MASK) == CANCELLED;
}
/**
- * Asserts that the results of this task's computation will not be
- * used. If a cancellation occurs before atempting to execute this
- * task, then execution will be suppressed, <code>isCancelled</code>
- * will report true, and <code>join</code> will result in a
- * <code>CancellationException</code> being thrown. Otherwise, when
- * cancellation races with completion, there are no guarantees
- * about whether <code>isCancelled</code> will report true, whether
- * <code>join</code> will return normally or via an exception, or
- * whether these behaviors will remain consistent upon repeated
- * invocation.
- *
- * <p>This method may be overridden in subclasses, but if so, must
- * still ensure that these minimal properties hold. In particular,
- * the cancel method itself must not throw exceptions.
+ * Returns {@code true} if this task threw an exception or was cancelled.
*
- * <p> This method is designed to be invoked by <em>other</em>
- * tasks. To terminate the current task, you can just return or
- * throw an unchecked exception from its computation method, or
- * invoke <code>completeExceptionally</code>.
- *
- * @param mayInterruptIfRunning this value is ignored in the
- * default implementation because tasks are not in general
- * cancelled via interruption.
- *
- * @return true if this task is now cancelled
+ * @return {@code true} if this task threw an exception or was cancelled
*/
- public boolean cancel(boolean mayInterruptIfRunning) {
- setCompletion(CANCELLED);
- return (status & COMPLETION_MASK) == CANCELLED;
+ public final boolean isCompletedAbnormally() {
+ return status < NORMAL;
}
/**
- * Returns true if this task threw an exception or was cancelled
- * @return true if this task threw an exception or was cancelled
+ * Returns {@code true} if this task completed without throwing an
+ * exception and was not cancelled.
+ *
+ * @return {@code true} if this task completed without throwing an
+ * exception and was not cancelled
*/
- public final boolean isCompletedAbnormally() {
- return (status & COMPLETION_MASK) < NORMAL;
+ public final boolean isCompletedNormally() {
+ return (status & DONE_MASK) == NORMAL;
}
/**
* Returns the exception thrown by the base computation, or a
- * CancellationException if cancelled, or null if none or if the
- * method has not yet completed.
- * @return the exception, or null if none
+ * {@code CancellationException} if cancelled, or {@code null} if
+ * none or if the method has not yet completed.
+ *
+ * @return the exception, or {@code null} if none
*/
public final Throwable getException() {
- int s = status & COMPLETION_MASK;
- if (s >= NORMAL)
- return null;
- if (s == CANCELLED)
- return new CancellationException();
- return exceptionMap.get(this);
+ int s = status & DONE_MASK;
+ return ((s >= NORMAL) ? null :
+ (s == CANCELLED) ? new CancellationException() :
+ getThrowableException());
}
/**
* Completes this task abnormally, and if not already aborted or
* cancelled, causes it to throw the given exception upon
- * <code>join</code> and related operations. This method may be used
+ * {@code join} and related operations. This method may be used
* to induce exceptions in asynchronous tasks, or to force
* completion of tasks that would not otherwise complete. Its use
- * in other situations is likely to be wrong. This method is
- * overridable, but overridden versions must invoke <code>super</code>
+ * in other situations is discouraged. This method is
+ * overridable, but overridden versions must invoke {@code super}
* implementation to maintain guarantees.
*
- * @param ex the exception to throw. If this exception is
- * not a RuntimeException or Error, the actual exception thrown
- * will be a RuntimeException with cause ex.
+ * @param ex the exception to throw. If this exception is not a
+ * {@code RuntimeException} or {@code Error}, the actual exception
+ * thrown will be a {@code RuntimeException} with cause {@code ex}.
*/
public void completeExceptionally(Throwable ex) {
- setDoneExceptionally((ex instanceof RuntimeException) ||
- (ex instanceof Error)? ex :
- new RuntimeException(ex));
+ setExceptionalCompletion((ex instanceof RuntimeException) ||
+ (ex instanceof Error) ? ex :
+ new RuntimeException(ex));
}
/**
* Completes this task, and if not already aborted or cancelled,
- * returning a <code>null</code> result upon <code>join</code> and related
- * operations. This method may be used to provide results for
- * asynchronous tasks, or to provide alternative handling for
- * tasks that would not otherwise complete normally. Its use in
- * other situations is likely to be wrong. This method is
- * overridable, but overridden versions must invoke <code>super</code>
- * implementation to maintain guarantees.
+ * returning the given value as the result of subsequent
+ * invocations of {@code join} and related operations. This method
+ * may be used to provide results for asynchronous tasks, or to
+ * provide alternative handling for tasks that would not otherwise
+ * complete normally. Its use in other situations is
+ * discouraged. This method is overridable, but overridden
+ * versions must invoke {@code super} implementation to maintain
+ * guarantees.
*
- * @param value the result value for this task.
+ * @param value the result value for this task
*/
public void complete(V value) {
try {
setRawResult(value);
- } catch(Throwable rex) {
- setDoneExceptionally(rex);
+ } catch (Throwable rex) {
+ setExceptionalCompletion(rex);
return;
}
- setNormalCompletion();
- }
-
- public final V get() throws InterruptedException, ExecutionException {
- ForkJoinWorkerThread w = getWorker();
- if (w == null || status < 0 || !w.unpushTask(this) || !tryQuietlyInvoke())
- awaitDone(w, true);
- return reportFutureResult();
+ setCompletion(NORMAL);
}
- public final V get(long timeout, TimeUnit unit)
- throws InterruptedException, ExecutionException, TimeoutException {
- ForkJoinWorkerThread w = getWorker();
- if (w == null || status < 0 || !w.unpushTask(this) || !tryQuietlyInvoke())
- awaitDone(w, unit.toNanos(timeout));
- return reportTimedFutureResult();
+ /**
+ * Completes this task normally without setting a value. The most
+ * recent value established by {@link #setRawResult} (or {@code
+ * null} by default) will be returned as the result of subsequent
+ * invocations of {@code join} and related operations.
+ *
+ * @since 1.8
+ */
+ public final void quietlyComplete() {
+ setCompletion(NORMAL);
}
/**
- * Possibly executes other tasks until this task is ready, then
- * returns the result of the computation. This method may be more
- * efficient than <code>join</code>, but is only applicable when
- * there are no potemtial dependencies between continuation of the
- * current task and that of any other task that might be executed
- * while helping. (This usually holds for pure divide-and-conquer
- * tasks). This method may be invoked only from within
- * ForkJoinTask computations. Attempts to invoke in other contexts
- * resul!t in exceptions or errors possibly including ClassCastException.
+ * Waits if necessary for the computation to complete, and then
+ * retrieves its result.
+ *
* @return the computed result
+ * @throws CancellationException if the computation was cancelled
+ * @throws ExecutionException if the computation threw an
+ * exception
+ * @throws InterruptedException if the current thread is not a
+ * member of a ForkJoinPool and was interrupted while waiting
*/
- public final V helpJoin() {
- ForkJoinWorkerThread w = (ForkJoinWorkerThread)(Thread.currentThread());
- if (status < 0 || !w.unpushTask(this) || !tryExec())
- reportException(busyJoin(w));
+ public final V get() throws InterruptedException, ExecutionException {
+ int s = (Thread.currentThread() instanceof ForkJoinWorkerThread) ?
+ doJoin() : externalInterruptibleAwaitDone();
+ Throwable ex;
+ if ((s &= DONE_MASK) == CANCELLED)
+ throw new CancellationException();
+ if (s == EXCEPTIONAL && (ex = getThrowableException()) != null)
+ throw new ExecutionException(ex);
return getRawResult();
}
/**
- * Possibly executes other tasks until this task is ready. This
- * method may be invoked only from within ForkJoinTask
- * computations. Attempts to invoke in other contexts resul!t in
- * exceptions or errors possibly including ClassCastException.
- */
- public final void quietlyHelpJoin() {
- if (status >= 0) {
- ForkJoinWorkerThread w =
- (ForkJoinWorkerThread)(Thread.currentThread());
- if (!w.unpushTask(this) || !tryQuietlyInvoke())
- busyJoin(w);
+ * Waits if necessary for at most the given time for the computation
+ * to complete, and then retrieves its result, if available.
+ *
+ * @param timeout the maximum time to wait
+ * @param unit the time unit of the timeout argument
+ * @return the computed result
+ * @throws CancellationException if the computation was cancelled
+ * @throws ExecutionException if the computation threw an
+ * exception
+ * @throws InterruptedException if the current thread is not a
+ * member of a ForkJoinPool and was interrupted while waiting
+ * @throws TimeoutException if the wait timed out
+ */
+ public final V get(long timeout, TimeUnit unit)
+ throws InterruptedException, ExecutionException, TimeoutException {
+ if (Thread.interrupted())
+ throw new InterruptedException();
+ // Messy in part because we measure in nanosecs, but wait in millisecs
+ int s; long ms;
+ long ns = unit.toNanos(timeout);
+ if ((s = status) >= 0 && ns > 0L) {
+ long deadline = System.nanoTime() + ns;
+ ForkJoinPool p = null;
+ ForkJoinPool.WorkQueue w = null;
+ Thread t = Thread.currentThread();
+ if (t instanceof ForkJoinWorkerThread) {
+ ForkJoinWorkerThread wt = (ForkJoinWorkerThread)t;
+ p = wt.pool;
+ w = wt.workQueue;
+ p.helpJoinOnce(w, this); // no retries on failure
+ }
+ else
+ ForkJoinPool.externalHelpJoin(this);
+ boolean canBlock = false;
+ boolean interrupted = false;
+ try {
+ while ((s = status) >= 0) {
+ if (w != null && w.qlock < 0)
+ cancelIgnoringExceptions(this);
+ else if (!canBlock) {
+ if (p == null || p.tryCompensate())
+ canBlock = true;
+ }
+ else {
+ if ((ms = TimeUnit.NANOSECONDS.toMillis(ns)) > 0L &&
+ U.compareAndSwapInt(this, STATUS, s, s | SIGNAL)) {
+ synchronized (this) {
+ if (status >= 0) {
+ try {
+ wait(ms);
+ } catch (InterruptedException ie) {
+ if (p == null)
+ interrupted = true;
+ }
+ }
+ else
+ notifyAll();
+ }
+ }
+ if ((s = status) < 0 || interrupted ||
+ (ns = deadline - System.nanoTime()) <= 0L)
+ break;
+ }
+ }
+ } finally {
+ if (p != null && canBlock)
+ p.incrementActiveCount();
+ }
+ if (interrupted)
+ throw new InterruptedException();
}
+ if ((s &= DONE_MASK) != NORMAL) {
+ Throwable ex;
+ if (s == CANCELLED)
+ throw new CancellationException();
+ if (s != EXCEPTIONAL)
+ throw new TimeoutException();
+ if ((ex = getThrowableException()) != null)
+ throw new ExecutionException(ex);
+ }
+ return getRawResult();
}
/**
- * Joins this task, without returning its result or throwing an
+ * Joins this task, without returning its result or throwing its
* exception. This method may be useful when processing
* collections of tasks when some have been cancelled or otherwise
* known to have aborted.
*/
public final void quietlyJoin() {
- if (status >= 0) {
- ForkJoinWorkerThread w = getWorker();
- if (w == null || !w.unpushTask(this) || !tryQuietlyInvoke())
- awaitDone(w, true);
- }
+ doJoin();
}
/**
* Commences performing this task and awaits its completion if
- * necessary, without returning its result or throwing an
- * exception. This method may be useful when processing
- * collections of tasks when some have been cancelled or otherwise
- * known to have aborted.
+ * necessary, without returning its result or throwing its
+ * exception.
*/
public final void quietlyInvoke() {
- if (status >= 0 && !tryQuietlyInvoke())
- quietlyJoin();
+ doInvoke();
}
/**
* Possibly executes tasks until the pool hosting the current task
- * {@link ForkJoinPool#isQuiescent}. This method may be of use in
- * designs in which many tasks are forked, but none are explicitly
- * joined, instead executing them until all are processed.
+ * {@link ForkJoinPool#isQuiescent is quiescent}. This method may
+ * be of use in designs in which many tasks are forked, but none
+ * are explicitly joined, instead executing them until all are
+ * processed.
*/
public static void helpQuiesce() {
- ((ForkJoinWorkerThread)(Thread.currentThread())).
- helpQuiescePool();
+ Thread t;
+ if ((t = Thread.currentThread()) instanceof ForkJoinWorkerThread) {
+ ForkJoinWorkerThread wt = (ForkJoinWorkerThread)t;
+ wt.pool.helpQuiescePool(wt.workQueue);
+ }
+ else
+ ForkJoinPool.quiesceCommonPool();
}
/**
* Resets the internal bookkeeping state of this task, allowing a
- * subsequent <code>fork</code>. This method allows repeated reuse of
+ * subsequent {@code fork}. This method allows repeated reuse of
* this task, but only if reuse occurs when this task has either
* never been forked, or has been forked, then completed and all
* outstanding joins of this task have also completed. Effects
- * under any other usage conditions are not guaranteed, and are
- * almost surely wrong. This method may be useful when executing
+ * under any other usage conditions are not guaranteed.
+ * This method may be useful when executing
* pre-constructed trees of subtasks in loops.
+ *
+ * <p>Upon completion of this method, {@code isDone()} reports
+ * {@code false}, and {@code getException()} reports {@code
+ * null}. However, the value returned by {@code getRawResult} is
+ * unaffected. To clear this value, you can invoke {@code
+ * setRawResult(null)}.
*/
public void reinitialize() {
- if ((status & COMPLETION_MASK) == EXCEPTIONAL)
- exceptionMap.remove(this);
- status = 0;
+ if ((status & DONE_MASK) == EXCEPTIONAL)
+ clearExceptionalCompletion();
+ else
+ status = 0;
}
/**
* Returns the pool hosting the current task execution, or null
- * if this task is executing outside of any pool.
- * @return the pool, or null if none.
+ * if this task is executing outside of any ForkJoinPool.
+ *
+ * @see #inForkJoinPool
+ * @return the pool, or {@code null} if none
*/
public static ForkJoinPool getPool() {
Thread t = Thread.currentThread();
- return ((t instanceof ForkJoinWorkerThread)?
- ((ForkJoinWorkerThread)t).pool : null);
+ return (t instanceof ForkJoinWorkerThread) ?
+ ((ForkJoinWorkerThread) t).pool : null;
+ }
+
+ /**
+ * Returns {@code true} if the current thread is a {@link
+ * ForkJoinWorkerThread} executing as a ForkJoinPool computation.
+ *
+ * @return {@code true} if the current thread is a {@link
+ * ForkJoinWorkerThread} executing as a ForkJoinPool computation,
+ * or {@code false} otherwise
+ */
+ public static boolean inForkJoinPool() {
+ return Thread.currentThread() instanceof ForkJoinWorkerThread;
}
/**
* Tries to unschedule this task for execution. This method will
- * typically succeed if this task is the most recently forked task
- * by the current thread, and has not commenced executing in
- * another thread. This method may be useful when arranging
- * alternative local processing of tasks that could have been, but
- * were not, stolen. This method may be invoked only from within
- * ForkJoinTask computations. Attempts to invoke in other contexts
- * result in exceptions or errors possibly including ClassCastException.
- * @return true if unforked
+ * typically (but is not guaranteed to) succeed if this task is
+ * the most recently forked task by the current thread, and has
+ * not commenced executing in another thread. This method may be
+ * useful when arranging alternative local processing of tasks
+ * that could have been, but were not, stolen.
+ *
+ * @return {@code true} if unforked
*/
public boolean tryUnfork() {
- return ((ForkJoinWorkerThread)(Thread.currentThread())).unpushTask(this);
+ Thread t;
+ return (((t = Thread.currentThread()) instanceof ForkJoinWorkerThread) ?
+ ((ForkJoinWorkerThread)t).workQueue.tryUnpush(this) :
+ ForkJoinPool.tryExternalUnpush(this));
}
/**
@@ -867,39 +1150,45 @@ public abstract class ForkJoinTask<V> implements Future<V>, Serializable {
* forked by the current worker thread but not yet executed. This
* value may be useful for heuristic decisions about whether to
* fork other tasks.
+ *
* @return the number of tasks
*/
public static int getQueuedTaskCount() {
- return ((ForkJoinWorkerThread)(Thread.currentThread())).
- getQueueSize();
+ Thread t; ForkJoinPool.WorkQueue q;
+ if ((t = Thread.currentThread()) instanceof ForkJoinWorkerThread)
+ q = ((ForkJoinWorkerThread)t).workQueue;
+ else
+ q = ForkJoinPool.commonSubmitterQueue();
+ return (q == null) ? 0 : q.queueSize();
}
/**
- * Returns a estimate of how many more locally queued tasks are
+ * Returns an estimate of how many more locally queued tasks are
* held by the current worker thread than there are other worker
- * threads that might steal them. This value may be useful for
+ * threads that might steal them, or zero if this thread is not
+ * operating in a ForkJoinPool. This value may be useful for
* heuristic decisions about whether to fork other tasks. In many
* usages of ForkJoinTasks, at steady state, each worker should
* aim to maintain a small constant surplus (for example, 3) of
* tasks, and to process computations locally if this threshold is
* exceeded.
+ *
* @return the surplus number of tasks, which may be negative
*/
public static int getSurplusQueuedTaskCount() {
- return ((ForkJoinWorkerThread)(Thread.currentThread()))
- .getEstimatedSurplusTaskCount();
+ return ForkJoinPool.getSurplusQueuedTaskCount();
}
// Extension methods
/**
- * Returns the result that would be returned by <code>join</code>,
- * even if this task completed abnormally, or null if this task is
- * not known to have been completed. This method is designed to
- * aid debugging, as well as to support extensions. Its use in any
- * other context is discouraged.
+ * Returns the result that would be returned by {@link #join}, even
+ * if this task completed abnormally, or {@code null} if this task
+ * is not known to have been completed. This method is designed
+ * to aid debugging, as well as to support extensions. Its use in
+ * any other context is discouraged.
*
- * @return the result, or null if not completed.
+ * @return the result, or {@code null} if not completed
*/
public abstract V getRawResult();
@@ -913,67 +1202,232 @@ public abstract class ForkJoinTask<V> implements Future<V>, Serializable {
protected abstract void setRawResult(V value);
/**
- * Immediately performs the base action of this task. This method
- * is designed to support extensions, and should not in general be
- * called otherwise. The return value controls whether this task
- * is considered to be done normally. It may return false in
+ * Immediately performs the base action of this task and returns
+ * true if, upon return from this method, this task is guaranteed
+ * to have completed normally. This method may return false
+ * otherwise, to indicate that this task is not necessarily
+ * complete (or is not known to be complete), for example in
* asynchronous actions that require explicit invocations of
- * <code>complete</code> to become joinable. It may throw exceptions
- * to indicate abnormal exit.
- * @return true if completed normally
- * @throws Error or RuntimeException if encountered during computation
+ * completion methods. This method may also throw an (unchecked)
+ * exception to indicate abnormal exit. This method is designed to
+ * support extensions, and should not in general be called
+ * otherwise.
+ *
+ * @return {@code true} if this task is known to have completed normally
*/
protected abstract boolean exec();
/**
- * Returns, but does not unschedule or execute, the task queued by
- * the current thread but not yet executed, if one is
+ * Returns, but does not unschedule or execute, a task queued by
+ * the current thread but not yet executed, if one is immediately
* available. There is no guarantee that this task will actually
- * be polled or executed next. This method is designed primarily
- * to support extensions, and is unlikely to be useful otherwise.
- * This method may be invoked only from within ForkJoinTask
- * computations. Attempts to invoke in other contexts result in
- * exceptions or errors possibly including ClassCastException.
+ * be polled or executed next. Conversely, this method may return
+ * null even if a task exists but cannot be accessed without
+ * contention with other threads. This method is designed
+ * primarily to support extensions, and is unlikely to be useful
+ * otherwise.
*
- * @return the next task, or null if none are available
+ * @return the next task, or {@code null} if none are available
*/
protected static ForkJoinTask<?> peekNextLocalTask() {
- return ((ForkJoinWorkerThread)(Thread.currentThread())).peekTask();
+ Thread t; ForkJoinPool.WorkQueue q;
+ if ((t = Thread.currentThread()) instanceof ForkJoinWorkerThread)
+ q = ((ForkJoinWorkerThread)t).workQueue;
+ else
+ q = ForkJoinPool.commonSubmitterQueue();
+ return (q == null) ? null : q.peek();
}
/**
* Unschedules and returns, without executing, the next task
- * queued by the current thread but not yet executed. This method
- * is designed primarily to support extensions, and is unlikely to
- * be useful otherwise. This method may be invoked only from
- * within ForkJoinTask computations. Attempts to invoke in other
- * contexts result in exceptions or errors possibly including
- * ClassCastException.
+ * queued by the current thread but not yet executed, if the
+ * current thread is operating in a ForkJoinPool. This method is
+ * designed primarily to support extensions, and is unlikely to be
+ * useful otherwise.
*
- * @return the next task, or null if none are available
+ * @return the next task, or {@code null} if none are available
*/
protected static ForkJoinTask<?> pollNextLocalTask() {
- return ((ForkJoinWorkerThread)(Thread.currentThread())).pollLocalTask();
+ Thread t;
+ return ((t = Thread.currentThread()) instanceof ForkJoinWorkerThread) ?
+ ((ForkJoinWorkerThread)t).workQueue.nextLocalTask() :
+ null;
}
/**
- * Unschedules and returns, without executing, the next task
+ * If the current thread is operating in a ForkJoinPool,
+ * unschedules and returns, without executing, the next task
* queued by the current thread but not yet executed, if one is
* available, or if not available, a task that was forked by some
* other thread, if available. Availability may be transient, so a
- * <code>null</code> result does not necessarily imply quiecence
- * of the pool this task is operating in. This method is designed
+ * {@code null} result does not necessarily imply quiescence of
+ * the pool this task is operating in. This method is designed
* primarily to support extensions, and is unlikely to be useful
- * otherwise. This method may be invoked only from within
- * ForkJoinTask computations. Attempts to invoke in other contexts
- * result in exceptions or errors possibly including
- * ClassCastException.
+ * otherwise.
*
- * @return a task, or null if none are available
+ * @return a task, or {@code null} if none are available
*/
protected static ForkJoinTask<?> pollTask() {
- return ((ForkJoinWorkerThread)(Thread.currentThread())).
- pollTask();
+ Thread t; ForkJoinWorkerThread wt;
+ return ((t = Thread.currentThread()) instanceof ForkJoinWorkerThread) ?
+ (wt = (ForkJoinWorkerThread)t).pool.nextTaskFor(wt.workQueue) :
+ null;
+ }
+
+ // tag operations
+
+ /**
+ * Returns the tag for this task.
+ *
+ * @return the tag for this task
+ * @since 1.8
+ */
+ public final short getForkJoinTaskTag() {
+ return (short)status;
+ }
+
+ /**
+ * Atomically sets the tag value for this task.
+ *
+ * @param tag the tag value
+ * @return the previous value of the tag
+ * @since 1.8
+ */
+ public final short setForkJoinTaskTag(short tag) {
+ for (int s;;) {
+ if (U.compareAndSwapInt(this, STATUS, s = status,
+ (s & ~SMASK) | (tag & SMASK)))
+ return (short)s;
+ }
+ }
+
+ /**
+ * Atomically conditionally sets the tag value for this task.
+ * Among other applications, tags can be used as visit markers
+ * in tasks operating on graphs, as in methods that check: {@code
+ * if (task.compareAndSetForkJoinTaskTag((short)0, (short)1))}
+ * before processing, otherwise exiting because the node has
+ * already been visited.
+ *
+ * @param e the expected tag value
+ * @param tag the new tag value
+ * @return true if successful; i.e., the current value was
+ * equal to e and is now tag.
+ * @since 1.8
+ */
+ public final boolean compareAndSetForkJoinTaskTag(short e, short tag) {
+ for (int s;;) {
+ if ((short)(s = status) != e)
+ return false;
+ if (U.compareAndSwapInt(this, STATUS, s,
+ (s & ~SMASK) | (tag & SMASK)))
+ return true;
+ }
+ }
+
+ /**
+ * Adaptor for Runnables. This implements RunnableFuture
+ * to be compliant with AbstractExecutorService constraints
+ * when used in ForkJoinPool.
+ */
+ static final class AdaptedRunnable<T> extends ForkJoinTask<T>
+ implements RunnableFuture<T> {
+ final Runnable runnable;
+ T result;
+ AdaptedRunnable(Runnable runnable, T result) {
+ if (runnable == null) throw new NullPointerException();
+ this.runnable = runnable;
+ this.result = result; // OK to set this even before completion
+ }
+ public final T getRawResult() { return result; }
+ public final void setRawResult(T v) { result = v; }
+ public final boolean exec() { runnable.run(); return true; }
+ public final void run() { invoke(); }
+ private static final long serialVersionUID = 5232453952276885070L;
+ }
+
+ /**
+ * Adaptor for Runnables without results
+ */
+ static final class AdaptedRunnableAction extends ForkJoinTask<Void>
+ implements RunnableFuture<Void> {
+ final Runnable runnable;
+ AdaptedRunnableAction(Runnable runnable) {
+ if (runnable == null) throw new NullPointerException();
+ this.runnable = runnable;
+ }
+ public final Void getRawResult() { return null; }
+ public final void setRawResult(Void v) { }
+ public final boolean exec() { runnable.run(); return true; }
+ public final void run() { invoke(); }
+ private static final long serialVersionUID = 5232453952276885070L;
+ }
+
+ /**
+ * Adaptor for Callables
+ */
+ static final class AdaptedCallable<T> extends ForkJoinTask<T>
+ implements RunnableFuture<T> {
+ final Callable<? extends T> callable;
+ T result;
+ AdaptedCallable(Callable<? extends T> callable) {
+ if (callable == null) throw new NullPointerException();
+ this.callable = callable;
+ }
+ public final T getRawResult() { return result; }
+ public final void setRawResult(T v) { result = v; }
+ public final boolean exec() {
+ try {
+ result = callable.call();
+ return true;
+ } catch (Error err) {
+ throw err;
+ } catch (RuntimeException rex) {
+ throw rex;
+ } catch (Exception ex) {
+ throw new RuntimeException(ex);
+ }
+ }
+ public final void run() { invoke(); }
+ private static final long serialVersionUID = 2838392045355241008L;
+ }
+
+ /**
+ * Returns a new {@code ForkJoinTask} that performs the {@code run}
+ * method of the given {@code Runnable} as its action, and returns
+ * a null result upon {@link #join}.
+ *
+ * @param runnable the runnable action
+ * @return the task
+ */
+ public static ForkJoinTask<?> adapt(Runnable runnable) {
+ return new AdaptedRunnableAction(runnable);
+ }
+
+ /**
+ * Returns a new {@code ForkJoinTask} that performs the {@code run}
+ * method of the given {@code Runnable} as its action, and returns
+ * the given result upon {@link #join}.
+ *
+ * @param runnable the runnable action
+ * @param result the result upon completion
+ * @return the task
+ */
+ public static <T> ForkJoinTask<T> adapt(Runnable runnable, T result) {
+ return new AdaptedRunnable<T>(runnable, result);
+ }
+
+ /**
+ * Returns a new {@code ForkJoinTask} that performs the {@code call}
+ * method of the given {@code Callable} as its action, and returns
+ * its result upon {@link #join}, translating any checked exceptions
+ * encountered into {@code RuntimeException}.
+ *
+ * @param callable the callable action
+ * @return the task
+ */
+ public static <T> ForkJoinTask<T> adapt(Callable<? extends T> callable) {
+ return new AdaptedCallable<T>(callable);
}
// Serialization support
@@ -981,11 +1435,10 @@ public abstract class ForkJoinTask<V> implements Future<V>, Serializable {
private static final long serialVersionUID = -7721805057305804111L;
/**
- * Save the state to a stream.
+ * Saves this task to a stream (that is, serializes it).
*
* @serialData the current run status and the exception thrown
- * during execution, or null if none.
- * @param s the stream
+ * during execution, or {@code null} if none
*/
private void writeObject(java.io.ObjectOutputStream s)
throws java.io.IOException {
@@ -994,70 +1447,42 @@ public abstract class ForkJoinTask<V> implements Future<V>, Serializable {
}
/**
- * Reconstitute the instance from a stream.
- * @param s the stream
+ * Reconstitutes this task from a stream (that is, deserializes it).
*/
private void readObject(java.io.ObjectInputStream s)
throws java.io.IOException, ClassNotFoundException {
s.defaultReadObject();
- status &= ~INTERNAL_SIGNAL_MASK; // clear internal signal counts
- status |= EXTERNAL_SIGNAL; // conservatively set external signal
Object ex = s.readObject();
if (ex != null)
- setDoneExceptionally((Throwable)ex);
+ setExceptionalCompletion((Throwable)ex);
}
- // Temporary Unsafe mechanics for preliminary release
- private static Unsafe getUnsafe() throws Throwable {
- try {
- return Unsafe.getUnsafe();
- } catch (SecurityException se) {
- try {
- return java.security.AccessController.doPrivileged
- (new java.security.PrivilegedExceptionAction<Unsafe>() {
- public Unsafe run() throws Exception {
- return getUnsafePrivileged();
- }});
- } catch (java.security.PrivilegedActionException e) {
- throw e.getCause();
- }
- }
- }
-
- private static Unsafe getUnsafePrivileged()
- throws NoSuchFieldException, IllegalAccessException {
- Field f = Unsafe.class.getDeclaredField("theUnsafe");
- f.setAccessible(true);
- return (Unsafe) f.get(null);
- }
-
- private static long fieldOffset(String fieldName, Unsafe unsafe)
- throws NoSuchFieldException {
- // do not use _unsafe to avoid NPE
- return unsafe.objectFieldOffset
- (ForkJoinTask.class.getDeclaredField(fieldName));
- }
-
- static final Unsafe _unsafe;
- static final long statusOffset;
+ // Unsafe mechanics
+ private static final sun.misc.Unsafe U;
+ private static final long STATUS;
static {
- Unsafe tmpUnsafe = null;
- long tmpStatusOffset = 0;
+ exceptionTableLock = new ReentrantLock();
+ exceptionTableRefQueue = new ReferenceQueue<Object>();
+ exceptionTable = new ExceptionNode[EXCEPTION_MAP_CAPACITY];
try {
- tmpUnsafe = getUnsafe();
- tmpStatusOffset = fieldOffset("status", tmpUnsafe);
- } catch (Throwable e) {
- // Ignore the failure to load sun.misc.Unsafe on Android so
- // that platform can use the actor library without the
- // fork/join scheduler.
- String vmVendor = System.getProperty("java.vm.vendor");
- if (!vmVendor.contains("Android")) {
- throw new RuntimeException("Could not initialize intrinsics", e);
- }
+ U = getUnsafe();
+ Class<?> k = ForkJoinTask.class;
+ STATUS = U.objectFieldOffset
+ (k.getDeclaredField("status"));
+ } catch (Exception e) {
+ throw new Error(e);
}
- _unsafe = tmpUnsafe;
- statusOffset = tmpStatusOffset;
}
+ /**
+ * Returns a sun.misc.Unsafe. Suitable for use in a 3rd party package.
+ * Replace with a simple call to Unsafe.getUnsafe when integrating
+ * into a jdk.
+ *
+ * @return a sun.misc.Unsafe
+ */
+ private static sun.misc.Unsafe getUnsafe() {
+ return scala.concurrent.util.Unsafe.instance;
+ }
}
diff --git a/src/forkjoin/scala/concurrent/forkjoin/ForkJoinWorkerThread.java b/src/forkjoin/scala/concurrent/forkjoin/ForkJoinWorkerThread.java
index b4d8897..e62fc6e 100644
--- a/src/forkjoin/scala/concurrent/forkjoin/ForkJoinWorkerThread.java
+++ b/src/forkjoin/scala/concurrent/forkjoin/ForkJoinWorkerThread.java
@@ -1,224 +1,58 @@
/*
* Written by Doug Lea with assistance from members of JCP JSR-166
* Expert Group and released to the public domain, as explained at
- * http://creativecommons.org/licenses/publicdomain
+ * http://creativecommons.org/publicdomain/zero/1.0/
*/
package scala.concurrent.forkjoin;
-import java.util.*;
-import java.util.concurrent.*;
-import java.util.concurrent.atomic.*;
-import java.util.concurrent.locks.*;
-import sun.misc.Unsafe;
-import java.lang.reflect.*;
/**
- * A thread managed by a {@link ForkJoinPool}. This class is
- * subclassable solely for the sake of adding functionality -- there
- * are no overridable methods dealing with scheduling or
- * execution. However, you can override initialization and termination
- * methods surrounding the main task processing loop. If you do
- * create such a subclass, you will also need to supply a custom
- * ForkJoinWorkerThreadFactory to use it in a ForkJoinPool.
+ * A thread managed by a {@link ForkJoinPool}, which executes
+ * {@link ForkJoinTask}s.
+ * This class is subclassable solely for the sake of adding
+ * functionality -- there are no overridable methods dealing with
+ * scheduling or execution. However, you can override initialization
+ * and termination methods surrounding the main task processing loop.
+ * If you do create such a subclass, you will also need to supply a
+ * custom {@link ForkJoinPool.ForkJoinWorkerThreadFactory} to use it
+ * in a {@code ForkJoinPool}.
*
+ * @since 1.7
+ * @author Doug Lea
*/
public class ForkJoinWorkerThread extends Thread {
/*
- * Algorithm overview:
+ * ForkJoinWorkerThreads are managed by ForkJoinPools and perform
+ * ForkJoinTasks. For explanation, see the internal documentation
+ * of class ForkJoinPool.
*
- * 1. Work-Stealing: Work-stealing queues are special forms of
- * Deques that support only three of the four possible
- * end-operations -- push, pop, and deq (aka steal), and only do
- * so under the constraints that push and pop are called only from
- * the owning thread, while deq may be called from other threads.
- * (If you are unfamiliar with them, you probably want to read
- * Herlihy and Shavit's book "The Art of Multiprocessor
- * programming", chapter 16 describing these in more detail before
- * proceeding.) The main work-stealing queue design is roughly
- * similar to "Dynamic Circular Work-Stealing Deque" by David
- * Chase and Yossi Lev, SPAA 2005
- * (http://research.sun.com/scalable/pubs/index.html). The main
- * difference ultimately stems from gc requirements that we null
- * out taken slots as soon as we can, to maintain as small a
- * footprint as possible even in programs generating huge numbers
- * of tasks. To accomplish this, we shift the CAS arbitrating pop
- * vs deq (steal) from being on the indices ("base" and "sp") to
- * the slots themselves (mainly via method "casSlotNull()"). So,
- * both a successful pop and deq mainly entail CAS'ing a nonnull
- * slot to null. Because we rely on CASes of references, we do
- * not need tag bits on base or sp. They are simple ints as used
- * in any circular array-based queue (see for example ArrayDeque).
- * Updates to the indices must still be ordered in a way that
- * guarantees that (sp - base) > 0 means the queue is empty, but
- * otherwise may err on the side of possibly making the queue
- * appear nonempty when a push, pop, or deq have not fully
- * committed. Note that this means that the deq operation,
- * considered individually, is not wait-free. One thief cannot
- * successfully continue until another in-progress one (or, if
- * previously empty, a push) completes. However, in the
- * aggregate, we ensure at least probablistic non-blockingness. If
- * an attempted steal fails, a thief always chooses a different
- * random victim target to try next. So, in order for one thief to
- * progress, it suffices for any in-progress deq or new push on
- * any empty queue to complete. One reason this works well here is
- * that apparently-nonempty often means soon-to-be-stealable,
- * which gives threads a chance to activate if necessary before
- * stealing (see below).
- *
- * Efficient implementation of this approach currently relies on
- * an uncomfortable amount of "Unsafe" mechanics. To maintain
- * correct orderings, reads and writes of variable base require
- * volatile ordering. Variable sp does not require volatile write
- * but needs cheaper store-ordering on writes. Because they are
- * protected by volatile base reads, reads of the queue array and
- * its slots do not need volatile load semantics, but writes (in
- * push) require store order and CASes (in pop and deq) require
- * (volatile) CAS semantics. Since these combinations aren't
- * supported using ordinary volatiles, the only way to accomplish
- * these effciently is to use direct Unsafe calls. (Using external
- * AtomicIntegers and AtomicReferenceArrays for the indices and
- * array is significantly slower because of memory locality and
- * indirection effects.) Further, performance on most platforms is
- * very sensitive to placement and sizing of the (resizable) queue
- * array. Even though these queues don't usually become all that
- * big, the initial size must be large enough to counteract cache
- * contention effects across multiple queues (especially in the
- * presence of GC cardmarking). Also, to improve thread-locality,
- * queues are currently initialized immediately after the thread
- * gets the initial signal to start processing tasks. However,
- * all queue-related methods except pushTask are written in a way
- * that allows them to instead be lazily allocated and/or disposed
- * of when empty. All together, these low-level implementation
- * choices produce as much as a factor of 4 performance
- * improvement compared to naive implementations, and enable the
- * processing of billions of tasks per second, sometimes at the
- * expense of ugliness.
- *
- * 2. Run control: The primary run control is based on a global
- * counter (activeCount) held by the pool. It uses an algorithm
- * similar to that in Herlihy and Shavit section 17.6 to cause
- * threads to eventually block when all threads declare they are
- * inactive. (See variable "scans".) For this to work, threads
- * must be declared active when executing tasks, and before
- * stealing a task. They must be inactive before blocking on the
- * Pool Barrier (awaiting a new submission or other Pool
- * event). In between, there is some free play which we take
- * advantage of to avoid contention and rapid flickering of the
- * global activeCount: If inactive, we activate only if a victim
- * queue appears to be nonempty (see above). Similarly, a thread
- * tries to inactivate only after a full scan of other threads.
- * The net effect is that contention on activeCount is rarely a
- * measurable performance issue. (There are also a few other cases
- * where we scan for work rather than retry/block upon
- * contention.)
- *
- * 3. Selection control. We maintain policy of always choosing to
- * run local tasks rather than stealing, and always trying to
- * steal tasks before trying to run a new submission. All steals
- * are currently performed in randomly-chosen deq-order. It may be
- * worthwhile to bias these with locality / anti-locality
- * information, but doing this well probably requires more
- * lower-level information from JVMs than currently provided.
- */
-
- /**
- * Capacity of work-stealing queue array upon initialization.
- * Must be a power of two. Initial size must be at least 2, but is
- * padded to minimize cache effects.
- */
- private static final int INITIAL_QUEUE_CAPACITY = 1 << 13;
-
- /**
- * Maximum work-stealing queue array size. Must be less than or
- * equal to 1 << 28 to ensure lack of index wraparound. (This
- * is less than usual bounds, because we need leftshift by 3
- * to be in int range).
- */
- private static final int MAXIMUM_QUEUE_CAPACITY = 1 << 28;
-
- /**
- * The pool this thread works in. Accessed directly by ForkJoinTask
- */
- final ForkJoinPool pool;
-
- /**
- * The work-stealing queue array. Size must be a power of two.
- * Initialized when thread starts, to improve memory locality.
- */
- private ForkJoinTask<?>[] queue;
-
- /**
- * Index (mod queue.length) of next queue slot to push to or pop
- * from. It is written only by owner thread, via ordered store.
- * Both sp and base are allowed to wrap around on overflow, but
- * (sp - base) still estimates size.
- */
- private volatile int sp;
-
- /**
- * Index (mod queue.length) of least valid queue slot, which is
- * always the next position to steal from if nonempty.
- */
- private volatile int base;
-
- /**
- * Activity status. When true, this worker is considered active.
- * Must be false upon construction. It must be true when executing
- * tasks, and BEFORE stealing a task. It must be false before
- * calling pool.sync
- */
- private boolean active;
-
- /**
- * Run state of this worker. Supports simple versions of the usual
- * shutdown/shutdownNow control.
+ * This class just maintains links to its pool and WorkQueue. The
+ * pool field is set immediately upon construction, but the
+ * workQueue field is not set until a call to registerWorker
+ * completes. This leads to a visibility race, that is tolerated
+ * by requiring that the workQueue field is only accessed by the
+ * owning thread.
*/
- private volatile int runState;
- /**
- * Seed for random number generator for choosing steal victims.
- * Uses Marsaglia xorshift. Must be nonzero upon initialization.
- */
- private int seed;
-
- /**
- * Number of steals, transferred to pool when idle
- */
- private int stealCount;
-
- /**
- * Index of this worker in pool array. Set once by pool before
- * running, and accessed directly by pool during cleanup etc
- */
- int poolIndex;
-
- /**
- * The last barrier event waited for. Accessed in pool callback
- * methods, but only by current thread.
- */
- long lastEventCount;
-
- /**
- * True if use local fifo, not default lifo, for local polling
- */
- private boolean locallyFifo;
+ final ForkJoinPool pool; // the pool this thread works in
+ final ForkJoinPool.WorkQueue workQueue; // work-stealing mechanics
/**
* Creates a ForkJoinWorkerThread operating in the given pool.
+ *
* @param pool the pool this thread works in
* @throws NullPointerException if pool is null
*/
protected ForkJoinWorkerThread(ForkJoinPool pool) {
- if (pool == null) throw new NullPointerException();
+ // Use a placeholder until a useful name can be set in registerWorker
+ super("aForkJoinWorkerThread");
this.pool = pool;
- // Note: poolIndex is set by pool during construction
- // Remaining initialization is deferred to onStart
+ this.workQueue = pool.registerWorker(this);
}
- // Public access methods
-
/**
- * Returns the pool hosting this thread
+ * Returns the pool hosting this thread.
+ *
* @return the pool
*/
public ForkJoinPool getPool() {
@@ -231,543 +65,57 @@ public class ForkJoinWorkerThread extends Thread {
* threads (minus one) that have ever been created in the pool.
* This method may be useful for applications that track status or
* collect results per-worker rather than per-task.
- * @return the index number.
+ *
+ * @return the index number
*/
public int getPoolIndex() {
- return poolIndex;
- }
-
- /**
- * Establishes local first-in-first-out scheduling mode for forked
- * tasks that are never joined.
- * @param async if true, use locally FIFO scheduling
- */
- void setAsyncMode(boolean async) {
- locallyFifo = async;
- }
-
- // Runstate management
-
- // Runstate values. Order matters
- private static final int RUNNING = 0;
- private static final int SHUTDOWN = 1;
- private static final int TERMINATING = 2;
- private static final int TERMINATED = 3;
-
- final boolean isShutdown() { return runState >= SHUTDOWN; }
- final boolean isTerminating() { return runState >= TERMINATING; }
- final boolean isTerminated() { return runState == TERMINATED; }
- final boolean shutdown() { return transitionRunStateTo(SHUTDOWN); }
- final boolean shutdownNow() { return transitionRunStateTo(TERMINATING); }
-
- /**
- * Transition to at least the given state. Return true if not
- * already at least given state.
- */
- private boolean transitionRunStateTo(int state) {
- for (;;) {
- int s = runState;
- if (s >= state)
- return false;
- if (_unsafe.compareAndSwapInt(this, runStateOffset, s, state))
- return true;
- }
- }
-
- /**
- * Try to set status to active; fail on contention
- */
- private boolean tryActivate() {
- if (!active) {
- if (!pool.tryIncrementActiveCount())
- return false;
- active = true;
- }
- return true;
- }
-
- /**
- * Try to set status to active; fail on contention
- */
- private boolean tryInactivate() {
- if (active) {
- if (!pool.tryDecrementActiveCount())
- return false;
- active = false;
- }
- return true;
- }
-
- /**
- * Computes next value for random victim probe. Scans don't
- * require a very high quality generator, but also not a crummy
- * one. Marsaglia xor-shift is cheap and works well.
- */
- private static int xorShift(int r) {
- r ^= r << 1;
- r ^= r >>> 3;
- r ^= r << 10;
- return r;
- }
-
- // Lifecycle methods
-
- /**
- * This method is required to be public, but should never be
- * called explicitly. It performs the main run loop to execute
- * ForkJoinTasks.
- */
- public void run() {
- Throwable exception = null;
- try {
- onStart();
- pool.sync(this); // await first pool event
- mainLoop();
- } catch (Throwable ex) {
- exception = ex;
- } finally {
- onTermination(exception);
- }
- }
-
- /**
- * Execute tasks until shut down.
- */
- private void mainLoop() {
- while (!isShutdown()) {
- ForkJoinTask<?> t = pollTask();
- if (t != null || (t = pollSubmission()) != null)
- t.quietlyExec();
- else if (tryInactivate())
- pool.sync(this);
- }
+ return workQueue.poolIndex;
}
/**
* Initializes internal state after construction but before
* processing any tasks. If you override this method, you must
- * invoke super.onStart() at the beginning of the method.
+ * invoke {@code super.onStart()} at the beginning of the method.
* Initialization requires care: Most fields must have legal
* default values, to ensure that attempted accesses from other
* threads work correctly even before this thread starts
* processing tasks.
*/
protected void onStart() {
- // Allocate while starting to improve chances of thread-local
- // isolation
- queue = new ForkJoinTask<?>[INITIAL_QUEUE_CAPACITY];
- // Initial value of seed need not be especially random but
- // should differ across workers and must be nonzero
- int p = poolIndex + 1;
- seed = p + (p << 8) + (p << 16) + (p << 24); // spread bits
}
/**
- * Perform cleanup associated with termination of this worker
+ * Performs cleanup associated with termination of this worker
* thread. If you override this method, you must invoke
- * super.onTermination at the end of the overridden method.
+ * {@code super.onTermination} at the end of the overridden method.
*
* @param exception the exception causing this thread to abort due
- * to an unrecoverable error, or null if completed normally.
+ * to an unrecoverable error, or {@code null} if completed normally
*/
protected void onTermination(Throwable exception) {
- // Execute remaining local tasks unless aborting or terminating
- while (exception == null && !pool.isTerminating() && base != sp) {
- try {
- ForkJoinTask<?> t = popTask();
- if (t != null)
- t.quietlyExec();
- } catch(Throwable ex) {
- exception = ex;
- }
- }
- // Cancel other tasks, transition status, notify pool, and
- // propagate exception to uncaught exception handler
- try {
- do;while (!tryInactivate()); // ensure inactive
- cancelTasks();
- runState = TERMINATED;
- pool.workerTerminated(this);
- } catch (Throwable ex) { // Shouldn't ever happen
- if (exception == null) // but if so, at least rethrown
- exception = ex;
- } finally {
- if (exception != null)
- ForkJoinTask.rethrowException(exception);
- }
}
- // Intrinsics-based support for queue operations.
-
/**
- * Add in store-order the given task at given slot of q to
- * null. Caller must ensure q is nonnull and index is in range.
- */
- private static void setSlot(ForkJoinTask<?>[] q, int i,
- ForkJoinTask<?> t){
- _unsafe.putOrderedObject(q, (i << qShift) + qBase, t);
- }
-
- /**
- * CAS given slot of q to null. Caller must ensure q is nonnull
- * and index is in range.
- */
- private static boolean casSlotNull(ForkJoinTask<?>[] q, int i,
- ForkJoinTask<?> t) {
- return _unsafe.compareAndSwapObject(q, (i << qShift) + qBase, t, null);
- }
-
- /**
- * Sets sp in store-order.
- */
- private void storeSp(int s) {
- _unsafe.putOrderedInt(this, spOffset, s);
- }
-
- // Main queue methods
-
- /**
- * Pushes a task. Called only by current thread.
- * @param t the task. Caller must ensure nonnull
- */
- final void pushTask(ForkJoinTask<?> t) {
- ForkJoinTask<?>[] q = queue;
- int mask = q.length - 1;
- int s = sp;
- setSlot(q, s & mask, t);
- storeSp(++s);
- if ((s -= base) == 1)
- pool.signalWork();
- else if (s >= mask)
- growQueue();
- }
-
- /**
- * Tries to take a task from the base of the queue, failing if
- * either empty or contended.
- * @return a task, or null if none or contended.
- */
- final ForkJoinTask<?> deqTask() {
- ForkJoinTask<?> t;
- ForkJoinTask<?>[] q;
- int i;
- int b;
- if (sp != (b = base) &&
- (q = queue) != null && // must read q after b
- (t = q[i = (q.length - 1) & b]) != null &&
- casSlotNull(q, i, t)) {
- base = b + 1;
- return t;
- }
- return null;
- }
-
- /**
- * Returns a popped task, or null if empty. Ensures active status
- * if nonnull. Called only by current thread.
- */
- final ForkJoinTask<?> popTask() {
- int s = sp;
- while (s != base) {
- if (tryActivate()) {
- ForkJoinTask<?>[] q = queue;
- int mask = q.length - 1;
- int i = (s - 1) & mask;
- ForkJoinTask<?> t = q[i];
- if (t == null || !casSlotNull(q, i, t))
- break;
- storeSp(s - 1);
- return t;
- }
- }
- return null;
- }
-
- /**
- * Specialized version of popTask to pop only if
- * topmost element is the given task. Called only
- * by current thread while active.
- * @param t the task. Caller must ensure nonnull
- */
- final boolean unpushTask(ForkJoinTask<?> t) {
- ForkJoinTask<?>[] q = queue;
- int mask = q.length - 1;
- int s = sp - 1;
- if (casSlotNull(q, s & mask, t)) {
- storeSp(s);
- return true;
- }
- return false;
- }
-
- /**
- * Returns next task.
- */
- final ForkJoinTask<?> peekTask() {
- ForkJoinTask<?>[] q = queue;
- if (q == null)
- return null;
- int mask = q.length - 1;
- int i = locallyFifo? base : (sp - 1);
- return q[i & mask];
- }
-
- /**
- * Doubles queue array size. Transfers elements by emulating
- * steals (deqs) from old array and placing, oldest first, into
- * new array.
- */
- private void growQueue() {
- ForkJoinTask<?>[] oldQ = queue;
- int oldSize = oldQ.length;
- int newSize = oldSize << 1;
- if (newSize > MAXIMUM_QUEUE_CAPACITY)
- throw new RejectedExecutionException("Queue capacity exceeded");
- ForkJoinTask<?>[] newQ = queue = new ForkJoinTask<?>[newSize];
-
- int b = base;
- int bf = b + oldSize;
- int oldMask = oldSize - 1;
- int newMask = newSize - 1;
- do {
- int oldIndex = b & oldMask;
- ForkJoinTask<?> t = oldQ[oldIndex];
- if (t != null && !casSlotNull(oldQ, oldIndex, t))
- t = null;
- setSlot(newQ, b & newMask, t);
- } while (++b != bf);
- pool.signalWork();
- }
-
- /**
- * Tries to steal a task from another worker. Starts at a random
- * index of workers array, and probes workers until finding one
- * with non-empty queue or finding that all are empty. It
- * randomly selects the first n probes. If these are empty, it
- * resorts to a full circular traversal, which is necessary to
- * accurately set active status by caller. Also restarts if pool
- * events occurred since last scan, which forces refresh of
- * workers array, in case barrier was associated with resize.
- *
- * This method must be both fast and quiet -- usually avoiding
- * memory accesses that could disrupt cache sharing etc other than
- * those needed to check for and take tasks. This accounts for,
- * among other things, updating random seed in place without
- * storing it until exit.
- *
- * @return a task, or null if none found
- */
- private ForkJoinTask<?> scan() {
- ForkJoinTask<?> t = null;
- int r = seed; // extract once to keep scan quiet
- ForkJoinWorkerThread[] ws; // refreshed on outer loop
- int mask; // must be power 2 minus 1 and > 0
- outer:do {
- if ((ws = pool.workers) != null && (mask = ws.length - 1) > 0) {
- int idx = r;
- int probes = ~mask; // use random index while negative
- for (;;) {
- r = xorShift(r); // update random seed
- ForkJoinWorkerThread v = ws[mask & idx];
- if (v == null || v.sp == v.base) {
- if (probes <= mask)
- idx = (probes++ < 0)? r : (idx + 1);
- else
- break;
- }
- else if (!tryActivate() || (t = v.deqTask()) == null)
- continue outer; // restart on contention
- else
- break outer;
- }
- }
- } while (pool.hasNewSyncEvent(this)); // retry on pool events
- seed = r;
- return t;
- }
-
- /**
- * gets and removes a local or stolen a task
- * @return a task, if available
- */
- final ForkJoinTask<?> pollTask() {
- ForkJoinTask<?> t = locallyFifo? deqTask() : popTask();
- if (t == null && (t = scan()) != null)
- ++stealCount;
- return t;
- }
-
- /**
- * gets a local task
- * @return a task, if available
- */
- final ForkJoinTask<?> pollLocalTask() {
- return locallyFifo? deqTask() : popTask();
- }
-
- /**
- * Returns a pool submission, if one exists, activating first.
- * @return a submission, if available
- */
- private ForkJoinTask<?> pollSubmission() {
- ForkJoinPool p = pool;
- while (p.hasQueuedSubmissions()) {
- ForkJoinTask<?> t;
- if (tryActivate() && (t = p.pollSubmission()) != null)
- return t;
- }
- return null;
- }
-
- // Methods accessed only by Pool
-
- /**
- * Removes and cancels all tasks in queue. Can be called from any
- * thread.
- */
- final void cancelTasks() {
- ForkJoinTask<?> t;
- while (base != sp && (t = deqTask()) != null)
- t.cancelIgnoringExceptions();
- }
-
- /**
- * Drains tasks to given collection c
- * @return the number of tasks drained
- */
- final int drainTasksTo(Collection<ForkJoinTask<?>> c) {
- int n = 0;
- ForkJoinTask<?> t;
- while (base != sp && (t = deqTask()) != null) {
- c.add(t);
- ++n;
- }
- return n;
- }
-
- /**
- * Get and clear steal count for accumulation by pool. Called
- * only when known to be idle (in pool.sync and termination).
- */
- final int getAndClearStealCount() {
- int sc = stealCount;
- stealCount = 0;
- return sc;
- }
-
- /**
- * Returns true if at least one worker in the given array appears
- * to have at least one queued task.
- * @param ws array of workers
- */
- static boolean hasQueuedTasks(ForkJoinWorkerThread[] ws) {
- if (ws != null) {
- int len = ws.length;
- for (int j = 0; j < 2; ++j) { // need two passes for clean sweep
- for (int i = 0; i < len; ++i) {
- ForkJoinWorkerThread w = ws[i];
- if (w != null && w.sp != w.base)
- return true;
- }
- }
- }
- return false;
- }
-
- // Support methods for ForkJoinTask
-
- /**
- * Returns an estimate of the number of tasks in the queue.
- */
- final int getQueueSize() {
- int n = sp - base;
- return n < 0? 0 : n; // suppress momentarily negative values
- }
-
- /**
- * Returns an estimate of the number of tasks, offset by a
- * function of number of idle workers.
- */
- final int getEstimatedSurplusTaskCount() {
- // The halving approximates weighting idle vs non-idle workers
- return (sp - base) - (pool.getIdleThreadCount() >>> 1);
- }
-
- /**
- * Scan, returning early if joinMe done
- */
- final ForkJoinTask<?> scanWhileJoining(ForkJoinTask<?> joinMe) {
- ForkJoinTask<?> t = pollTask();
- if (t != null && joinMe.status < 0 && sp == base) {
- pushTask(t); // unsteal if done and this task would be stealable
- t = null;
- }
- return t;
- }
-
- /**
- * Runs tasks until pool isQuiescent
+ * This method is required to be public, but should never be
+ * called explicitly. It performs the main run loop to execute
+ * {@link ForkJoinTask}s.
*/
- final void helpQuiescePool() {
- for (;;) {
- ForkJoinTask<?> t = pollTask();
- if (t != null)
- t.quietlyExec();
- else if (tryInactivate() && pool.isQuiescent())
- break;
- }
- do;while (!tryActivate()); // re-activate on exit
- }
-
- // Temporary Unsafe mechanics for preliminary release
- private static Unsafe getUnsafe() throws Throwable {
+ public void run() {
+ Throwable exception = null;
try {
- return Unsafe.getUnsafe();
- } catch (SecurityException se) {
+ onStart();
+ pool.runWorker(workQueue);
+ } catch (Throwable ex) {
+ exception = ex;
+ } finally {
try {
- return java.security.AccessController.doPrivileged
- (new java.security.PrivilegedExceptionAction<Unsafe>() {
- public Unsafe run() throws Exception {
- return getUnsafePrivileged();
- }});
- } catch (java.security.PrivilegedActionException e) {
- throw e.getCause();
+ onTermination(exception);
+ } catch (Throwable ex) {
+ if (exception == null)
+ exception = ex;
+ } finally {
+ pool.deregisterWorker(this, exception);
}
}
}
-
- private static Unsafe getUnsafePrivileged()
- throws NoSuchFieldException, IllegalAccessException {
- Field f = Unsafe.class.getDeclaredField("theUnsafe");
- f.setAccessible(true);
- return (Unsafe) f.get(null);
- }
-
- private static long fieldOffset(String fieldName)
- throws NoSuchFieldException {
- return _unsafe.objectFieldOffset
- (ForkJoinWorkerThread.class.getDeclaredField(fieldName));
- }
-
- static final Unsafe _unsafe;
- static final long baseOffset;
- static final long spOffset;
- static final long runStateOffset;
- static final long qBase;
- static final int qShift;
- static {
- try {
- _unsafe = getUnsafe();
- baseOffset = fieldOffset("base");
- spOffset = fieldOffset("sp");
- runStateOffset = fieldOffset("runState");
- qBase = _unsafe.arrayBaseOffset(ForkJoinTask[].class);
- int s = _unsafe.arrayIndexScale(ForkJoinTask[].class);
- if ((s & (s-1)) != 0)
- throw new Error("data type scale not a power of two");
- qShift = 31 - Integer.numberOfLeadingZeros(s);
- } catch (Throwable e) {
- throw new RuntimeException("Could not initialize intrinsics", e);
- }
- }
}
diff --git a/src/forkjoin/scala/concurrent/forkjoin/LinkedTransferQueue.java b/src/forkjoin/scala/concurrent/forkjoin/LinkedTransferQueue.java
index 3b46c17..07e81b3 100644
--- a/src/forkjoin/scala/concurrent/forkjoin/LinkedTransferQueue.java
+++ b/src/forkjoin/scala/concurrent/forkjoin/LinkedTransferQueue.java
@@ -1,30 +1,38 @@
/*
* Written by Doug Lea with assistance from members of JCP JSR-166
* Expert Group and released to the public domain, as explained at
- * http://creativecommons.org/licenses/publicdomain
+ * http://creativecommons.org/publicdomain/zero/1.0/
*/
package scala.concurrent.forkjoin;
-import java.util.concurrent.*;
-import java.util.concurrent.locks.*;
-import java.util.concurrent.atomic.*;
-import java.util.*;
-import java.io.*;
-import sun.misc.Unsafe;
-import java.lang.reflect.*;
+
+import java.util.AbstractQueue;
+import java.util.Collection;
+import java.util.Iterator;
+import java.util.NoSuchElementException;
+import java.util.Queue;
+import java.util.concurrent.TimeUnit;
+import java.util.concurrent.locks.LockSupport;
/**
- * An unbounded {@linkplain TransferQueue} based on linked nodes.
+ * An unbounded {@link TransferQueue} based on linked nodes.
* This queue orders elements FIFO (first-in-first-out) with respect
* to any given producer. The <em>head</em> of the queue is that
* element that has been on the queue the longest time for some
* producer. The <em>tail</em> of the queue is that element that has
* been on the queue the shortest time for some producer.
*
- * <p>Beware that, unlike in most collections, the {@code size}
- * method is <em>NOT</em> a constant-time operation. Because of the
+ * <p>Beware that, unlike in most collections, the {@code size} method
+ * is <em>NOT</em> a constant-time operation. Because of the
* asynchronous nature of these queues, determining the current number
- * of elements requires a traversal of the elements.
+ * of elements requires a traversal of the elements, and so may report
+ * inaccurate results if this collection is modified during traversal.
+ * Additionally, the bulk operations {@code addAll},
+ * {@code removeAll}, {@code retainAll}, {@code containsAll},
+ * {@code equals}, and {@code toArray} are <em>not</em> guaranteed
+ * to be performed atomically. For example, an iterator operating
+ * concurrently with an {@code addAll} operation might view only some
+ * of the added elements.
*
* <p>This class and its iterator implement all of the
* <em>optional</em> methods of the {@link Collection} and {@link
@@ -44,381 +52,938 @@ import java.lang.reflect.*;
* @since 1.7
* @author Doug Lea
* @param <E> the type of elements held in this collection
- *
*/
public class LinkedTransferQueue<E> extends AbstractQueue<E>
implements TransferQueue<E>, java.io.Serializable {
private static final long serialVersionUID = -3223113410248163686L;
/*
- * This class extends the approach used in FIFO-mode
- * SynchronousQueues. See the internal documentation, as well as
- * the PPoPP 2006 paper "Scalable Synchronous Queues" by Scherer,
- * Lea & Scott
- * (http://www.cs.rice.edu/~wns1/papers/2006-PPoPP-SQ.pdf)
+ * *** Overview of Dual Queues with Slack ***
+ *
+ * Dual Queues, introduced by Scherer and Scott
+ * (http://www.cs.rice.edu/~wns1/papers/2004-DISC-DDS.pdf) are
+ * (linked) queues in which nodes may represent either data or
+ * requests. When a thread tries to enqueue a data node, but
+ * encounters a request node, it instead "matches" and removes it;
+ * and vice versa for enqueuing requests. Blocking Dual Queues
+ * arrange that threads enqueuing unmatched requests block until
+ * other threads provide the match. Dual Synchronous Queues (see
+ * Scherer, Lea, & Scott
+ * http://www.cs.rochester.edu/u/scott/papers/2009_Scherer_CACM_SSQ.pdf)
+ * additionally arrange that threads enqueuing unmatched data also
+ * block. Dual Transfer Queues support all of these modes, as
+ * dictated by callers.
+ *
+ * A FIFO dual queue may be implemented using a variation of the
+ * Michael & Scott (M&S) lock-free queue algorithm
+ * (http://www.cs.rochester.edu/u/scott/papers/1996_PODC_queues.pdf).
+ * It maintains two pointer fields, "head", pointing to a
+ * (matched) node that in turn points to the first actual
+ * (unmatched) queue node (or null if empty); and "tail" that
+ * points to the last node on the queue (or again null if
+ * empty). For example, here is a possible queue with four data
+ * elements:
+ *
+ * head tail
+ * | |
+ * v v
+ * M -> U -> U -> U -> U
+ *
+ * The M&S queue algorithm is known to be prone to scalability and
+ * overhead limitations when maintaining (via CAS) these head and
+ * tail pointers. This has led to the development of
+ * contention-reducing variants such as elimination arrays (see
+ * Moir et al http://portal.acm.org/citation.cfm?id=1074013) and
+ * optimistic back pointers (see Ladan-Mozes & Shavit
+ * http://people.csail.mit.edu/edya/publications/OptimisticFIFOQueue-journal.pdf).
+ * However, the nature of dual queues enables a simpler tactic for
+ * improving M&S-style implementations when dual-ness is needed.
+ *
+ * In a dual queue, each node must atomically maintain its match
+ * status. While there are other possible variants, we implement
+ * this here as: for a data-mode node, matching entails CASing an
+ * "item" field from a non-null data value to null upon match, and
+ * vice-versa for request nodes, CASing from null to a data
+ * value. (Note that the linearization properties of this style of
+ * queue are easy to verify -- elements are made available by
+ * linking, and unavailable by matching.) Compared to plain M&S
+ * queues, this property of dual queues requires one additional
+ * successful atomic operation per enq/deq pair. But it also
+ * enables lower cost variants of queue maintenance mechanics. (A
+ * variation of this idea applies even for non-dual queues that
+ * support deletion of interior elements, such as
+ * j.u.c.ConcurrentLinkedQueue.)
+ *
+ * Once a node is matched, its match status can never again
+ * change. We may thus arrange that the linked list of them
+ * contain a prefix of zero or more matched nodes, followed by a
+ * suffix of zero or more unmatched nodes. (Note that we allow
+ * both the prefix and suffix to be zero length, which in turn
+ * means that we do not use a dummy header.) If we were not
+ * concerned with either time or space efficiency, we could
+ * correctly perform enqueue and dequeue operations by traversing
+ * from a pointer to the initial node; CASing the item of the
+ * first unmatched node on match and CASing the next field of the
+ * trailing node on appends. (Plus some special-casing when
+ * initially empty). While this would be a terrible idea in
+ * itself, it does have the benefit of not requiring ANY atomic
+ * updates on head/tail fields.
+ *
+ * We introduce here an approach that lies between the extremes of
+ * never versus always updating queue (head and tail) pointers.
+ * This offers a tradeoff between sometimes requiring extra
+ * traversal steps to locate the first and/or last unmatched
+ * nodes, versus the reduced overhead and contention of fewer
+ * updates to queue pointers. For example, a possible snapshot of
+ * a queue is:
+ *
+ * head tail
+ * | |
+ * v v
+ * M -> M -> U -> U -> U -> U
+ *
+ * The best value for this "slack" (the targeted maximum distance
+ * between the value of "head" and the first unmatched node, and
+ * similarly for "tail") is an empirical matter. We have found
+ * that using very small constants in the range of 1-3 work best
+ * over a range of platforms. Larger values introduce increasing
+ * costs of cache misses and risks of long traversal chains, while
+ * smaller values increase CAS contention and overhead.
+ *
+ * Dual queues with slack differ from plain M&S dual queues by
+ * virtue of only sometimes updating head or tail pointers when
+ * matching, appending, or even traversing nodes; in order to
+ * maintain a targeted slack. The idea of "sometimes" may be
+ * operationalized in several ways. The simplest is to use a
+ * per-operation counter incremented on each traversal step, and
+ * to try (via CAS) to update the associated queue pointer
+ * whenever the count exceeds a threshold. Another, that requires
+ * more overhead, is to use random number generators to update
+ * with a given probability per traversal step.
+ *
+ * In any strategy along these lines, because CASes updating
+ * fields may fail, the actual slack may exceed targeted
+ * slack. However, they may be retried at any time to maintain
+ * targets. Even when using very small slack values, this
+ * approach works well for dual queues because it allows all
+ * operations up to the point of matching or appending an item
+ * (hence potentially allowing progress by another thread) to be
+ * read-only, thus not introducing any further contention. As
+ * described below, we implement this by performing slack
+ * maintenance retries only after these points.
+ *
+ * As an accompaniment to such techniques, traversal overhead can
+ * be further reduced without increasing contention of head
+ * pointer updates: Threads may sometimes shortcut the "next" link
+ * path from the current "head" node to be closer to the currently
+ * known first unmatched node, and similarly for tail. Again, this
+ * may be triggered with using thresholds or randomization.
+ *
+ * These ideas must be further extended to avoid unbounded amounts
+ * of costly-to-reclaim garbage caused by the sequential "next"
+ * links of nodes starting at old forgotten head nodes: As first
+ * described in detail by Boehm
+ * (http://portal.acm.org/citation.cfm?doid=503272.503282) if a GC
+ * delays noticing that any arbitrarily old node has become
+ * garbage, all newer dead nodes will also be unreclaimed.
+ * (Similar issues arise in non-GC environments.) To cope with
+ * this in our implementation, upon CASing to advance the head
+ * pointer, we set the "next" link of the previous head to point
+ * only to itself; thus limiting the length of connected dead lists.
+ * (We also take similar care to wipe out possibly garbage
+ * retaining values held in other Node fields.) However, doing so
+ * adds some further complexity to traversal: If any "next"
+ * pointer links to itself, it indicates that the current thread
+ * has lagged behind a head-update, and so the traversal must
+ * continue from the "head". Traversals trying to find the
+ * current tail starting from "tail" may also encounter
+ * self-links, in which case they also continue at "head".
+ *
+ * It is tempting in slack-based scheme to not even use CAS for
+ * updates (similarly to Ladan-Mozes & Shavit). However, this
+ * cannot be done for head updates under the above link-forgetting
+ * mechanics because an update may leave head at a detached node.
+ * And while direct writes are possible for tail updates, they
+ * increase the risk of long retraversals, and hence long garbage
+ * chains, which can be much more costly than is worthwhile
+ * considering that the cost difference of performing a CAS vs
+ * write is smaller when they are not triggered on each operation
+ * (especially considering that writes and CASes equally require
+ * additional GC bookkeeping ("write barriers") that are sometimes
+ * more costly than the writes themselves because of contention).
+ *
+ * *** Overview of implementation ***
+ *
+ * We use a threshold-based approach to updates, with a slack
+ * threshold of two -- that is, we update head/tail when the
+ * current pointer appears to be two or more steps away from the
+ * first/last node. The slack value is hard-wired: a path greater
+ * than one is naturally implemented by checking equality of
+ * traversal pointers except when the list has only one element,
+ * in which case we keep slack threshold at one. Avoiding tracking
+ * explicit counts across method calls slightly simplifies an
+ * already-messy implementation. Using randomization would
+ * probably work better if there were a low-quality dirt-cheap
+ * per-thread one available, but even ThreadLocalRandom is too
+ * heavy for these purposes.
+ *
+ * With such a small slack threshold value, it is not worthwhile
+ * to augment this with path short-circuiting (i.e., unsplicing
+ * interior nodes) except in the case of cancellation/removal (see
+ * below).
+ *
+ * We allow both the head and tail fields to be null before any
+ * nodes are enqueued; initializing upon first append. This
+ * simplifies some other logic, as well as providing more
+ * efficient explicit control paths instead of letting JVMs insert
+ * implicit NullPointerExceptions when they are null. While not
+ * currently fully implemented, we also leave open the possibility
+ * of re-nulling these fields when empty (which is complicated to
+ * arrange, for little benefit.)
+ *
+ * All enqueue/dequeue operations are handled by the single method
+ * "xfer" with parameters indicating whether to act as some form
+ * of offer, put, poll, take, or transfer (each possibly with
+ * timeout). The relative complexity of using one monolithic
+ * method outweighs the code bulk and maintenance problems of
+ * using separate methods for each case.
*
- * The main extension is to provide different Wait modes for the
- * main "xfer" method that puts or takes items. These don't
- * impact the basic dual-queue logic, but instead control whether
- * or how threads block upon insertion of request or data nodes
- * into the dual queue. It also uses slightly different
- * conventions for tracking whether nodes are off-list or
- * cancelled.
+ * Operation consists of up to three phases. The first is
+ * implemented within method xfer, the second in tryAppend, and
+ * the third in method awaitMatch.
+ *
+ * 1. Try to match an existing node
+ *
+ * Starting at head, skip already-matched nodes until finding
+ * an unmatched node of opposite mode, if one exists, in which
+ * case matching it and returning, also if necessary updating
+ * head to one past the matched node (or the node itself if the
+ * list has no other unmatched nodes). If the CAS misses, then
+ * a loop retries advancing head by two steps until either
+ * success or the slack is at most two. By requiring that each
+ * attempt advances head by two (if applicable), we ensure that
+ * the slack does not grow without bound. Traversals also check
+ * if the initial head is now off-list, in which case they
+ * start at the new head.
+ *
+ * If no candidates are found and the call was untimed
+ * poll/offer, (argument "how" is NOW) return.
+ *
+ * 2. Try to append a new node (method tryAppend)
+ *
+ * Starting at current tail pointer, find the actual last node
+ * and try to append a new node (or if head was null, establish
+ * the first node). Nodes can be appended only if their
+ * predecessors are either already matched or are of the same
+ * mode. If we detect otherwise, then a new node with opposite
+ * mode must have been appended during traversal, so we must
+ * restart at phase 1. The traversal and update steps are
+ * otherwise similar to phase 1: Retrying upon CAS misses and
+ * checking for staleness. In particular, if a self-link is
+ * encountered, then we can safely jump to a node on the list
+ * by continuing the traversal at current head.
+ *
+ * On successful append, if the call was ASYNC, return.
+ *
+ * 3. Await match or cancellation (method awaitMatch)
+ *
+ * Wait for another thread to match node; instead cancelling if
+ * the current thread was interrupted or the wait timed out. On
+ * multiprocessors, we use front-of-queue spinning: If a node
+ * appears to be the first unmatched node in the queue, it
+ * spins a bit before blocking. In either case, before blocking
+ * it tries to unsplice any nodes between the current "head"
+ * and the first unmatched node.
+ *
+ * Front-of-queue spinning vastly improves performance of
+ * heavily contended queues. And so long as it is relatively
+ * brief and "quiet", spinning does not much impact performance
+ * of less-contended queues. During spins threads check their
+ * interrupt status and generate a thread-local random number
+ * to decide to occasionally perform a Thread.yield. While
+ * yield has underdefined specs, we assume that it might help,
+ * and will not hurt, in limiting impact of spinning on busy
+ * systems. We also use smaller (1/2) spins for nodes that are
+ * not known to be front but whose predecessors have not
+ * blocked -- these "chained" spins avoid artifacts of
+ * front-of-queue rules which otherwise lead to alternating
+ * nodes spinning vs blocking. Further, front threads that
+ * represent phase changes (from data to request node or vice
+ * versa) compared to their predecessors receive additional
+ * chained spins, reflecting longer paths typically required to
+ * unblock threads during phase changes.
+ *
+ *
+ * ** Unlinking removed interior nodes **
+ *
+ * In addition to minimizing garbage retention via self-linking
+ * described above, we also unlink removed interior nodes. These
+ * may arise due to timed out or interrupted waits, or calls to
+ * remove(x) or Iterator.remove. Normally, given a node that was
+ * at one time known to be the predecessor of some node s that is
+ * to be removed, we can unsplice s by CASing the next field of
+ * its predecessor if it still points to s (otherwise s must
+ * already have been removed or is now offlist). But there are two
+ * situations in which we cannot guarantee to make node s
+ * unreachable in this way: (1) If s is the trailing node of list
+ * (i.e., with null next), then it is pinned as the target node
+ * for appends, so can only be removed later after other nodes are
+ * appended. (2) We cannot necessarily unlink s given a
+ * predecessor node that is matched (including the case of being
+ * cancelled): the predecessor may already be unspliced, in which
+ * case some previous reachable node may still point to s.
+ * (For further explanation see Herlihy & Shavit "The Art of
+ * Multiprocessor Programming" chapter 9). Although, in both
+ * cases, we can rule out the need for further action if either s
+ * or its predecessor are (or can be made to be) at, or fall off
+ * from, the head of list.
+ *
+ * Without taking these into account, it would be possible for an
+ * unbounded number of supposedly removed nodes to remain
+ * reachable. Situations leading to such buildup are uncommon but
+ * can occur in practice; for example when a series of short timed
+ * calls to poll repeatedly time out but never otherwise fall off
+ * the list because of an untimed call to take at the front of the
+ * queue.
+ *
+ * When these cases arise, rather than always retraversing the
+ * entire list to find an actual predecessor to unlink (which
+ * won't help for case (1) anyway), we record a conservative
+ * estimate of possible unsplice failures (in "sweepVotes").
+ * We trigger a full sweep when the estimate exceeds a threshold
+ * ("SWEEP_THRESHOLD") indicating the maximum number of estimated
+ * removal failures to tolerate before sweeping through, unlinking
+ * cancelled nodes that were not unlinked upon initial removal.
+ * We perform sweeps by the thread hitting threshold (rather than
+ * background threads or by spreading work to other threads)
+ * because in the main contexts in which removal occurs, the
+ * caller is already timed-out, cancelled, or performing a
+ * potentially O(n) operation (e.g. remove(x)), none of which are
+ * time-critical enough to warrant the overhead that alternatives
+ * would impose on other threads.
+ *
+ * Because the sweepVotes estimate is conservative, and because
+ * nodes become unlinked "naturally" as they fall off the head of
+ * the queue, and because we allow votes to accumulate even while
+ * sweeps are in progress, there are typically significantly fewer
+ * such nodes than estimated. Choice of a threshold value
+ * balances the likelihood of wasted effort and contention, versus
+ * providing a worst-case bound on retention of interior nodes in
+ * quiescent queues. The value defined below was chosen
+ * empirically to balance these under various timeout scenarios.
+ *
+ * Note that we cannot self-link unlinked interior nodes during
+ * sweeps. However, the associated garbage chains terminate when
+ * some successor ultimately falls off the head of the list and is
+ * self-linked.
*/
- // Wait modes for xfer method
- static final int NOWAIT = 0;
- static final int TIMEOUT = 1;
- static final int WAIT = 2;
-
- /** The number of CPUs, for spin control */
- static final int NCPUS = Runtime.getRuntime().availableProcessors();
+ /** True if on multiprocessor */
+ private static final boolean MP =
+ Runtime.getRuntime().availableProcessors() > 1;
/**
- * The number of times to spin before blocking in timed waits.
- * The value is empirically derived -- it works well across a
- * variety of processors and OSes. Empirically, the best value
- * seems not to vary with number of CPUs (beyond 2) so is just
- * a constant.
+ * The number of times to spin (with randomly interspersed calls
+ * to Thread.yield) on multiprocessor before blocking when a node
+ * is apparently the first waiter in the queue. See above for
+ * explanation. Must be a power of two. The value is empirically
+ * derived -- it works pretty well across a variety of processors,
+ * numbers of CPUs, and OSes.
*/
- static final int maxTimedSpins = (NCPUS < 2)? 0 : 32;
+ private static final int FRONT_SPINS = 1 << 7;
/**
- * The number of times to spin before blocking in untimed waits.
- * This is greater than timed value because untimed waits spin
- * faster since they don't need to check times on each spin.
+ * The number of times to spin before blocking when a node is
+ * preceded by another node that is apparently spinning. Also
+ * serves as an increment to FRONT_SPINS on phase changes, and as
+ * base average frequency for yielding during spins. Must be a
+ * power of two.
*/
- static final int maxUntimedSpins = maxTimedSpins * 16;
+ private static final int CHAINED_SPINS = FRONT_SPINS >>> 1;
/**
- * The number of nanoseconds for which it is faster to spin
- * rather than to use timed park. A rough estimate suffices.
+ * The maximum number of estimated removal failures (sweepVotes)
+ * to tolerate before sweeping through the queue unlinking
+ * cancelled nodes that were not unlinked upon initial
+ * removal. See above for explanation. The value must be at least
+ * two to avoid useless sweeps when removing trailing nodes.
*/
- static final long spinForTimeoutThreshold = 1000L;
+ static final int SWEEP_THRESHOLD = 32;
/**
- * Node class for LinkedTransferQueue. Opportunistically
- * subclasses from AtomicReference to represent item. Uses Object,
- * not E, to allow setting item to "this" after use, to avoid
- * garbage retention. Similarly, setting the next field to this is
- * used as sentinel that node is off list.
+ * Queue nodes. Uses Object, not E, for items to allow forgetting
+ * them after use. Relies heavily on Unsafe mechanics to minimize
+ * unnecessary ordering constraints: Writes that are intrinsically
+ * ordered wrt other accesses or CASes use simple relaxed forms.
*/
- static final class QNode extends AtomicReference<Object> {
- volatile QNode next;
- volatile Thread waiter; // to control park/unpark
- final boolean isData;
- QNode(Object item, boolean isData) {
- super(item);
+ static final class Node {
+ final boolean isData; // false if this is a request node
+ volatile Object item; // initially non-null if isData; CASed to match
+ volatile Node next;
+ volatile Thread waiter; // null until waiting
+
+ // CAS methods for fields
+ final boolean casNext(Node cmp, Node val) {
+ return UNSAFE.compareAndSwapObject(this, nextOffset, cmp, val);
+ }
+
+ final boolean casItem(Object cmp, Object val) {
+ // assert cmp == null || cmp.getClass() != Node.class;
+ return UNSAFE.compareAndSwapObject(this, itemOffset, cmp, val);
+ }
+
+ /**
+ * Constructs a new node. Uses relaxed write because item can
+ * only be seen after publication via casNext.
+ */
+ Node(Object item, boolean isData) {
+ UNSAFE.putObject(this, itemOffset, item); // relaxed write
this.isData = isData;
}
- static final AtomicReferenceFieldUpdater<QNode, QNode>
- nextUpdater = AtomicReferenceFieldUpdater.newUpdater
- (QNode.class, QNode.class, "next");
+ /**
+ * Links node to itself to avoid garbage retention. Called
+ * only after CASing head field, so uses relaxed write.
+ */
+ final void forgetNext() {
+ UNSAFE.putObject(this, nextOffset, this);
+ }
- final boolean casNext(QNode cmp, QNode val) {
- return nextUpdater.compareAndSet(this, cmp, val);
+ /**
+ * Sets item to self and waiter to null, to avoid garbage
+ * retention after matching or cancelling. Uses relaxed writes
+ * because order is already constrained in the only calling
+ * contexts: item is forgotten only after volatile/atomic
+ * mechanics that extract items. Similarly, clearing waiter
+ * follows either CAS or return from park (if ever parked;
+ * else we don't care).
+ */
+ final void forgetContents() {
+ UNSAFE.putObject(this, itemOffset, this);
+ UNSAFE.putObject(this, waiterOffset, null);
}
- final void clearNext() {
- nextUpdater.lazySet(this, this);
+ /**
+ * Returns true if this node has been matched, including the
+ * case of artificial matches due to cancellation.
+ */
+ final boolean isMatched() {
+ Object x = item;
+ return (x == this) || ((x == null) == isData);
}
- }
+ /**
+ * Returns true if this is an unmatched request node.
+ */
+ final boolean isUnmatchedRequest() {
+ return !isData && item == null;
+ }
- /**
- * Padded version of AtomicReference used for head, tail and
- * cleanMe, to alleviate contention across threads CASing one vs
- * the other.
- */
- static final class PaddedAtomicReference<T> extends AtomicReference<T> {
- // enough padding for 64bytes with 4byte refs
- Object p0, p1, p2, p3, p4, p5, p6, p7, p8, p9, pa, pb, pc, pd, pe;
- PaddedAtomicReference(T r) { super(r); }
+ /**
+ * Returns true if a node with the given mode cannot be
+ * appended to this node because this node is unmatched and
+ * has opposite data mode.
+ */
+ final boolean cannotPrecede(boolean haveData) {
+ boolean d = isData;
+ Object x;
+ return d != haveData && (x = item) != this && (x != null) == d;
+ }
+
+ /**
+ * Tries to artificially match a data node -- used by remove.
+ */
+ final boolean tryMatchData() {
+ // assert isData;
+ Object x = item;
+ if (x != null && x != this && casItem(x, null)) {
+ LockSupport.unpark(waiter);
+ return true;
+ }
+ return false;
+ }
+
+ private static final long serialVersionUID = -3375979862319811754L;
+
+ // Unsafe mechanics
+ private static final sun.misc.Unsafe UNSAFE;
+ private static final long itemOffset;
+ private static final long nextOffset;
+ private static final long waiterOffset;
+ static {
+ try {
+ UNSAFE = getUnsafe();
+ Class<?> k = Node.class;
+ itemOffset = UNSAFE.objectFieldOffset
+ (k.getDeclaredField("item"));
+ nextOffset = UNSAFE.objectFieldOffset
+ (k.getDeclaredField("next"));
+ waiterOffset = UNSAFE.objectFieldOffset
+ (k.getDeclaredField("waiter"));
+ } catch (Exception e) {
+ throw new Error(e);
+ }
+ }
}
+ /** head of the queue; null until first enqueue */
+ transient volatile Node head;
- /** head of the queue */
- private transient final PaddedAtomicReference<QNode> head;
- /** tail of the queue */
- private transient final PaddedAtomicReference<QNode> tail;
+ /** tail of the queue; null until first append */
+ private transient volatile Node tail;
- /**
- * Reference to a cancelled node that might not yet have been
- * unlinked from queue because it was the last inserted node
- * when it cancelled.
- */
- private transient final PaddedAtomicReference<QNode> cleanMe;
+ /** The number of apparent failures to unsplice removed nodes */
+ private transient volatile int sweepVotes;
- /**
- * Tries to cas nh as new head; if successful, unlink
- * old head's next node to avoid garbage retention.
+ // CAS methods for fields
+ private boolean casTail(Node cmp, Node val) {
+ return UNSAFE.compareAndSwapObject(this, tailOffset, cmp, val);
+ }
+
+ private boolean casHead(Node cmp, Node val) {
+ return UNSAFE.compareAndSwapObject(this, headOffset, cmp, val);
+ }
+
+ private boolean casSweepVotes(int cmp, int val) {
+ return UNSAFE.compareAndSwapInt(this, sweepVotesOffset, cmp, val);
+ }
+
+ /*
+ * Possible values for "how" argument in xfer method.
*/
- private boolean advanceHead(QNode h, QNode nh) {
- if (h == head.get() && head.compareAndSet(h, nh)) {
- h.clearNext(); // forget old next
- return true;
- }
- return false;
+ private static final int NOW = 0; // for untimed poll, tryTransfer
+ private static final int ASYNC = 1; // for offer, put, add
+ private static final int SYNC = 2; // for transfer, take
+ private static final int TIMED = 3; // for timed poll, tryTransfer
+
+ @SuppressWarnings("unchecked")
+ static <E> E cast(Object item) {
+ // assert item == null || item.getClass() != Node.class;
+ return (E) item;
}
/**
- * Puts or takes an item. Used for most queue operations (except
- * poll() and tryTransfer()). See the similar code in
- * SynchronousQueue for detailed explanation.
+ * Implements all queuing methods. See above for explanation.
*
- * @param e the item or if null, signifies that this is a take
- * @param mode the wait mode: NOWAIT, TIMEOUT, WAIT
- * @param nanos timeout in nanosecs, used only if mode is TIMEOUT
- * @return an item, or null on failure
+ * @param e the item or null for take
+ * @param haveData true if this is a put, else a take
+ * @param how NOW, ASYNC, SYNC, or TIMED
+ * @param nanos timeout in nanosecs, used only if mode is TIMED
+ * @return an item if matched, else e
+ * @throws NullPointerException if haveData mode but e is null
*/
- private Object xfer(Object e, int mode, long nanos) {
- boolean isData = (e != null);
- QNode s = null;
- final PaddedAtomicReference<QNode> head = this.head;
- final PaddedAtomicReference<QNode> tail = this.tail;
-
- for (;;) {
- QNode t = tail.get();
- QNode h = head.get();
-
- if (t != null && (t == h || t.isData == isData)) {
- if (s == null)
- s = new QNode(e, isData);
- QNode last = t.next;
- if (last != null) {
- if (t == tail.get())
- tail.compareAndSet(t, last);
- }
- else if (t.casNext(null, s)) {
- tail.compareAndSet(t, s);
- return awaitFulfill(t, s, e, mode, nanos);
+ private E xfer(E e, boolean haveData, int how, long nanos) {
+ if (haveData && (e == null))
+ throw new NullPointerException();
+ Node s = null; // the node to append, if needed
+
+ retry:
+ for (;;) { // restart on append race
+
+ for (Node h = head, p = h; p != null;) { // find & match first node
+ boolean isData = p.isData;
+ Object item = p.item;
+ if (item != p && (item != null) == isData) { // unmatched
+ if (isData == haveData) // can't match
+ break;
+ if (p.casItem(item, e)) { // match
+ for (Node q = p; q != h;) {
+ Node n = q.next; // update by 2 unless singleton
+ if (head == h && casHead(h, n == null ? q : n)) {
+ h.forgetNext();
+ break;
+ } // advance and retry
+ if ((h = head) == null ||
+ (q = h.next) == null || !q.isMatched())
+ break; // unless slack < 2
+ }
+ LockSupport.unpark(p.waiter);
+ return LinkedTransferQueue.<E>cast(item);
+ }
}
+ Node n = p.next;
+ p = (p != n) ? n : (h = head); // Use head if p offlist
}
- else if (h != null) {
- QNode first = h.next;
- if (t == tail.get() && first != null &&
- advanceHead(h, first)) {
- Object x = first.get();
- if (x != first && first.compareAndSet(x, e)) {
- LockSupport.unpark(first.waiter);
- return isData? e : x;
- }
- }
+ if (how != NOW) { // No matches available
+ if (s == null)
+ s = new Node(e, haveData);
+ Node pred = tryAppend(s, haveData);
+ if (pred == null)
+ continue retry; // lost race vs opposite mode
+ if (how != ASYNC)
+ return awaitMatch(s, pred, e, (how == TIMED), nanos);
}
+ return e; // not waiting
}
}
-
/**
- * Version of xfer for poll() and tryTransfer, which
- * simplifies control paths both here and in xfer.
+ * Tries to append node s as tail.
+ *
+ * @param s the node to append
+ * @param haveData true if appending in data mode
+ * @return null on failure due to losing race with append in
+ * different mode, else s's predecessor, or s itself if no
+ * predecessor
*/
- private Object fulfill(Object e) {
- boolean isData = (e != null);
- final PaddedAtomicReference<QNode> head = this.head;
- final PaddedAtomicReference<QNode> tail = this.tail;
-
- for (;;) {
- QNode t = tail.get();
- QNode h = head.get();
-
- if (t != null && (t == h || t.isData == isData)) {
- QNode last = t.next;
- if (t == tail.get()) {
- if (last != null)
- tail.compareAndSet(t, last);
- else
- return null;
- }
+ private Node tryAppend(Node s, boolean haveData) {
+ for (Node t = tail, p = t;;) { // move p to last node and append
+ Node n, u; // temps for reads of next & tail
+ if (p == null && (p = head) == null) {
+ if (casHead(null, s))
+ return s; // initialize
}
- else if (h != null) {
- QNode first = h.next;
- if (t == tail.get() &&
- first != null &&
- advanceHead(h, first)) {
- Object x = first.get();
- if (x != first && first.compareAndSet(x, e)) {
- LockSupport.unpark(first.waiter);
- return isData? e : x;
- }
+ else if (p.cannotPrecede(haveData))
+ return null; // lost race vs opposite mode
+ else if ((n = p.next) != null) // not last; keep traversing
+ p = p != t && t != (u = tail) ? (t = u) : // stale tail
+ (p != n) ? n : null; // restart if off list
+ else if (!p.casNext(null, s))
+ p = p.next; // re-read on CAS failure
+ else {
+ if (p != t) { // update if slack now >= 2
+ while ((tail != t || !casTail(t, s)) &&
+ (t = tail) != null &&
+ (s = t.next) != null && // advance and retry
+ (s = s.next) != null && s != t);
}
+ return p;
}
}
}
/**
- * Spins/blocks until node s is fulfilled or caller gives up,
- * depending on wait mode.
+ * Spins/yields/blocks until node s is matched or caller gives up.
*
- * @param pred the predecessor of waiting node
* @param s the waiting node
+ * @param pred the predecessor of s, or s itself if it has no
+ * predecessor, or null if unknown (the null case does not occur
+ * in any current calls but may in possible future extensions)
* @param e the comparison value for checking match
- * @param mode mode
- * @param nanos timeout value
- * @return matched item, or s if cancelled
+ * @param timed if true, wait only until timeout elapses
+ * @param nanos timeout in nanosecs, used only if timed is true
+ * @return matched item, or e if unmatched on interrupt or timeout
*/
- private Object awaitFulfill(QNode pred, QNode s, Object e,
- int mode, long nanos) {
- if (mode == NOWAIT)
- return null;
-
- long lastTime = (mode == TIMEOUT)? System.nanoTime() : 0;
+ private E awaitMatch(Node s, Node pred, E e, boolean timed, long nanos) {
+ long lastTime = timed ? System.nanoTime() : 0L;
Thread w = Thread.currentThread();
- int spins = -1; // set to desired spin count below
+ int spins = -1; // initialized after first item and cancel checks
+ ThreadLocalRandom randomYields = null; // bound if needed
+
for (;;) {
- if (w.isInterrupted())
- s.compareAndSet(e, s);
- Object x = s.get();
- if (x != e) { // Node was matched or cancelled
- advanceHead(pred, s); // unlink if head
- if (x == s) { // was cancelled
- clean(pred, s);
- return null;
- }
- else if (x != null) {
- s.set(s); // avoid garbage retention
- return x;
- }
- else
- return e;
+ Object item = s.item;
+ if (item != e) { // matched
+ // assert item != s;
+ s.forgetContents(); // avoid garbage
+ return LinkedTransferQueue.<E>cast(item);
}
- if (mode == TIMEOUT) {
- long now = System.nanoTime();
- nanos -= now - lastTime;
- lastTime = now;
- if (nanos <= 0) {
- s.compareAndSet(e, s); // try to cancel
- continue;
- }
+ if ((w.isInterrupted() || (timed && nanos <= 0)) &&
+ s.casItem(e, s)) { // cancel
+ unsplice(pred, s);
+ return e;
}
- if (spins < 0) {
- QNode h = head.get(); // only spin if at head
- spins = ((h != null && h.next == s) ?
- (mode == TIMEOUT?
- maxTimedSpins : maxUntimedSpins) : 0);
+
+ if (spins < 0) { // establish spins at/near front
+ if ((spins = spinsFor(pred, s.isData)) > 0)
+ randomYields = ThreadLocalRandom.current();
}
- if (spins > 0)
+ else if (spins > 0) { // spin
--spins;
- else if (s.waiter == null)
- s.waiter = w;
- else if (mode != TIMEOUT) {
- LockSupport.park(this);
- s.waiter = null;
- spins = -1;
+ if (randomYields.nextInt(CHAINED_SPINS) == 0)
+ Thread.yield(); // occasionally yield
+ }
+ else if (s.waiter == null) {
+ s.waiter = w; // request unpark then recheck
}
- else if (nanos > spinForTimeoutThreshold) {
- LockSupport.parkNanos(this, nanos);
- s.waiter = null;
- spins = -1;
+ else if (timed) {
+ long now = System.nanoTime();
+ if ((nanos -= now - lastTime) > 0)
+ LockSupport.parkNanos(this, nanos);
+ lastTime = now;
+ }
+ else {
+ LockSupport.park(this);
}
}
}
/**
- * Returns validated tail for use in cleaning methods.
+ * Returns spin/yield value for a node with given predecessor and
+ * data mode. See above for explanation.
*/
- private QNode getValidatedTail() {
- for (;;) {
- QNode h = head.get();
- QNode first = h.next;
- if (first != null && first.next == first) { // help advance
- advanceHead(h, first);
- continue;
- }
- QNode t = tail.get();
- QNode last = t.next;
- if (t == tail.get()) {
- if (last != null)
- tail.compareAndSet(t, last); // help advance
- else
- return t;
+ private static int spinsFor(Node pred, boolean haveData) {
+ if (MP && pred != null) {
+ if (pred.isData != haveData) // phase change
+ return FRONT_SPINS + CHAINED_SPINS;
+ if (pred.isMatched()) // probably at front
+ return FRONT_SPINS;
+ if (pred.waiter == null) // pred apparently spinning
+ return CHAINED_SPINS;
+ }
+ return 0;
+ }
+
+ /* -------------- Traversal methods -------------- */
+
+ /**
+ * Returns the successor of p, or the head node if p.next has been
+ * linked to self, which will only be true if traversing with a
+ * stale pointer that is now off the list.
+ */
+ final Node succ(Node p) {
+ Node next = p.next;
+ return (p == next) ? head : next;
+ }
+
+ /**
+ * Returns the first unmatched node of the given mode, or null if
+ * none. Used by methods isEmpty, hasWaitingConsumer.
+ */
+ private Node firstOfMode(boolean isData) {
+ for (Node p = head; p != null; p = succ(p)) {
+ if (!p.isMatched())
+ return (p.isData == isData) ? p : null;
+ }
+ return null;
+ }
+
+ /**
+ * Returns the item in the first unmatched node with isData; or
+ * null if none. Used by peek.
+ */
+ private E firstDataItem() {
+ for (Node p = head; p != null; p = succ(p)) {
+ Object item = p.item;
+ if (p.isData) {
+ if (item != null && item != p)
+ return LinkedTransferQueue.<E>cast(item);
}
+ else if (item == null)
+ return null;
}
+ return null;
}
/**
- * Gets rid of cancelled node s with original predecessor pred.
- *
- * @param pred predecessor of cancelled node
- * @param s the cancelled node
+ * Traverses and counts unmatched nodes of the given mode.
+ * Used by methods size and getWaitingConsumerCount.
*/
- private void clean(QNode pred, QNode s) {
- Thread w = s.waiter;
- if (w != null) { // Wake up thread
- s.waiter = null;
- if (w != Thread.currentThread())
- LockSupport.unpark(w);
+ private int countOfMode(boolean data) {
+ int count = 0;
+ for (Node p = head; p != null; ) {
+ if (!p.isMatched()) {
+ if (p.isData != data)
+ return 0;
+ if (++count == Integer.MAX_VALUE) // saturated
+ break;
+ }
+ Node n = p.next;
+ if (n != p)
+ p = n;
+ else {
+ count = 0;
+ p = head;
+ }
}
+ return count;
+ }
- if (pred == null)
- return;
+ final class Itr implements Iterator<E> {
+ private Node nextNode; // next node to return item for
+ private E nextItem; // the corresponding item
+ private Node lastRet; // last returned node, to support remove
+ private Node lastPred; // predecessor to unlink lastRet
- /*
- * At any given time, exactly one node on list cannot be
- * deleted -- the last inserted node. To accommodate this, if
- * we cannot delete s, we save its predecessor as "cleanMe",
- * processing the previously saved version first. At least one
- * of node s or the node previously saved can always be
- * processed, so this always terminates.
+ /**
+ * Moves to next node after prev, or first node if prev null.
*/
- while (pred.next == s) {
- QNode oldpred = reclean(); // First, help get rid of cleanMe
- QNode t = getValidatedTail();
- if (s != t) { // If not tail, try to unsplice
- QNode sn = s.next; // s.next == s means s already off list
- if (sn == s || pred.casNext(s, sn))
+ private void advance(Node prev) {
+ /*
+ * To track and avoid buildup of deleted nodes in the face
+ * of calls to both Queue.remove and Itr.remove, we must
+ * include variants of unsplice and sweep upon each
+ * advance: Upon Itr.remove, we may need to catch up links
+ * from lastPred, and upon other removes, we might need to
+ * skip ahead from stale nodes and unsplice deleted ones
+ * found while advancing.
+ */
+
+ Node r, b; // reset lastPred upon possible deletion of lastRet
+ if ((r = lastRet) != null && !r.isMatched())
+ lastPred = r; // next lastPred is old lastRet
+ else if ((b = lastPred) == null || b.isMatched())
+ lastPred = null; // at start of list
+ else {
+ Node s, n; // help with removal of lastPred.next
+ while ((s = b.next) != null &&
+ s != b && s.isMatched() &&
+ (n = s.next) != null && n != s)
+ b.casNext(s, n);
+ }
+
+ this.lastRet = prev;
+
+ for (Node p = prev, s, n;;) {
+ s = (p == null) ? head : p.next;
+ if (s == null)
+ break;
+ else if (s == p) {
+ p = null;
+ continue;
+ }
+ Object item = s.item;
+ if (s.isData) {
+ if (item != null && item != s) {
+ nextItem = LinkedTransferQueue.<E>cast(item);
+ nextNode = s;
+ return;
+ }
+ }
+ else if (item == null)
+ break;
+ // assert s.isMatched();
+ if (p == null)
+ p = s;
+ else if ((n = s.next) == null)
break;
+ else if (s == n)
+ p = null;
+ else
+ p.casNext(s, n);
}
- else if (oldpred == pred || // Already saved
- (oldpred == null && cleanMe.compareAndSet(null, pred)))
- break; // Postpone cleaning
+ nextNode = null;
+ nextItem = null;
+ }
+
+ Itr() {
+ advance(null);
+ }
+
+ public final boolean hasNext() {
+ return nextNode != null;
+ }
+
+ public final E next() {
+ Node p = nextNode;
+ if (p == null) throw new NoSuchElementException();
+ E e = nextItem;
+ advance(p);
+ return e;
+ }
+
+ public final void remove() {
+ final Node lastRet = this.lastRet;
+ if (lastRet == null)
+ throw new IllegalStateException();
+ this.lastRet = null;
+ if (lastRet.tryMatchData())
+ unsplice(lastPred, lastRet);
}
}
+ /* -------------- Removal methods -------------- */
+
/**
- * Tries to unsplice the cancelled node held in cleanMe that was
- * previously uncleanable because it was at tail.
+ * Unsplices (now or later) the given deleted/cancelled node with
+ * the given predecessor.
*
- * @return current cleanMe node (or null)
+ * @param pred a node that was at one time known to be the
+ * predecessor of s, or null or s itself if s is/was at head
+ * @param s the node to be unspliced
*/
- private QNode reclean() {
+ final void unsplice(Node pred, Node s) {
+ s.forgetContents(); // forget unneeded fields
/*
- * cleanMe is, or at one time was, predecessor of cancelled
- * node s that was the tail so could not be unspliced. If s
- * is no longer the tail, try to unsplice if necessary and
- * make cleanMe slot available. This differs from similar
- * code in clean() because we must check that pred still
- * points to a cancelled node that must be unspliced -- if
- * not, we can (must) clear cleanMe without unsplicing.
- * This can loop only due to contention on casNext or
- * clearing cleanMe.
+ * See above for rationale. Briefly: if pred still points to
+ * s, try to unlink s. If s cannot be unlinked, because it is
+ * trailing node or pred might be unlinked, and neither pred
+ * nor s are head or offlist, add to sweepVotes, and if enough
+ * votes have accumulated, sweep.
*/
- QNode pred;
- while ((pred = cleanMe.get()) != null) {
- QNode t = getValidatedTail();
- QNode s = pred.next;
- if (s != t) {
- QNode sn;
- if (s == null || s == pred || s.get() != s ||
- (sn = s.next) == s || pred.casNext(s, sn))
- cleanMe.compareAndSet(pred, null);
+ if (pred != null && pred != s && pred.next == s) {
+ Node n = s.next;
+ if (n == null ||
+ (n != s && pred.casNext(s, n) && pred.isMatched())) {
+ for (;;) { // check if at, or could be, head
+ Node h = head;
+ if (h == pred || h == s || h == null)
+ return; // at head or list empty
+ if (!h.isMatched())
+ break;
+ Node hn = h.next;
+ if (hn == null)
+ return; // now empty
+ if (hn != h && casHead(h, hn))
+ h.forgetNext(); // advance head
+ }
+ if (pred.next != pred && s.next != s) { // recheck if offlist
+ for (;;) { // sweep now if enough votes
+ int v = sweepVotes;
+ if (v < SWEEP_THRESHOLD) {
+ if (casSweepVotes(v, v + 1))
+ break;
+ }
+ else if (casSweepVotes(v, 0)) {
+ sweep();
+ break;
+ }
+ }
+ }
}
- else // s is still tail; cannot clean
+ }
+ }
+
+ /**
+ * Unlinks matched (typically cancelled) nodes encountered in a
+ * traversal from head.
+ */
+ private void sweep() {
+ for (Node p = head, s, n; p != null && (s = p.next) != null; ) {
+ if (!s.isMatched())
+ // Unmatched nodes are never self-linked
+ p = s;
+ else if ((n = s.next) == null) // trailing node is pinned
break;
+ else if (s == n) // stale
+ // No need to also check for p == s, since that implies s == n
+ p = head;
+ else
+ p.casNext(s, n);
}
- return pred;
}
/**
+ * Main implementation of remove(Object)
+ */
+ private boolean findAndRemove(Object e) {
+ if (e != null) {
+ for (Node pred = null, p = head; p != null; ) {
+ Object item = p.item;
+ if (p.isData) {
+ if (item != null && item != p && e.equals(item) &&
+ p.tryMatchData()) {
+ unsplice(pred, p);
+ return true;
+ }
+ }
+ else if (item == null)
+ break;
+ pred = p;
+ if ((p = p.next) == pred) { // stale
+ pred = null;
+ p = head;
+ }
+ }
+ }
+ return false;
+ }
+
+
+ /**
* Creates an initially empty {@code LinkedTransferQueue}.
*/
public LinkedTransferQueue() {
- QNode dummy = new QNode(null, false);
- head = new PaddedAtomicReference<QNode>(dummy);
- tail = new PaddedAtomicReference<QNode>(dummy);
- cleanMe = new PaddedAtomicReference<QNode>(null);
}
/**
@@ -435,252 +1000,200 @@ public class LinkedTransferQueue<E> extends AbstractQueue<E>
addAll(c);
}
- public void put(E e) throws InterruptedException {
- if (e == null) throw new NullPointerException();
- if (Thread.interrupted()) throw new InterruptedException();
- xfer(e, NOWAIT, 0);
+ /**
+ * Inserts the specified element at the tail of this queue.
+ * As the queue is unbounded, this method will never block.
+ *
+ * @throws NullPointerException if the specified element is null
+ */
+ public void put(E e) {
+ xfer(e, true, ASYNC, 0);
}
- public boolean offer(E e, long timeout, TimeUnit unit)
- throws InterruptedException {
- if (e == null) throw new NullPointerException();
- if (Thread.interrupted()) throw new InterruptedException();
- xfer(e, NOWAIT, 0);
+ /**
+ * Inserts the specified element at the tail of this queue.
+ * As the queue is unbounded, this method will never block or
+ * return {@code false}.
+ *
+ * @return {@code true} (as specified by
+ * {@link java.util.concurrent.BlockingQueue#offer(Object,long,TimeUnit)
+ * BlockingQueue.offer})
+ * @throws NullPointerException if the specified element is null
+ */
+ public boolean offer(E e, long timeout, TimeUnit unit) {
+ xfer(e, true, ASYNC, 0);
return true;
}
+ /**
+ * Inserts the specified element at the tail of this queue.
+ * As the queue is unbounded, this method will never return {@code false}.
+ *
+ * @return {@code true} (as specified by {@link Queue#offer})
+ * @throws NullPointerException if the specified element is null
+ */
public boolean offer(E e) {
- if (e == null) throw new NullPointerException();
- xfer(e, NOWAIT, 0);
+ xfer(e, true, ASYNC, 0);
return true;
}
+ /**
+ * Inserts the specified element at the tail of this queue.
+ * As the queue is unbounded, this method will never throw
+ * {@link IllegalStateException} or return {@code false}.
+ *
+ * @return {@code true} (as specified by {@link Collection#add})
+ * @throws NullPointerException if the specified element is null
+ */
public boolean add(E e) {
- if (e == null) throw new NullPointerException();
- xfer(e, NOWAIT, 0);
+ xfer(e, true, ASYNC, 0);
return true;
}
+ /**
+ * Transfers the element to a waiting consumer immediately, if possible.
+ *
+ * <p>More precisely, transfers the specified element immediately
+ * if there exists a consumer already waiting to receive it (in
+ * {@link #take} or timed {@link #poll(long,TimeUnit) poll}),
+ * otherwise returning {@code false} without enqueuing the element.
+ *
+ * @throws NullPointerException if the specified element is null
+ */
+ public boolean tryTransfer(E e) {
+ return xfer(e, true, NOW, 0) == null;
+ }
+
+ /**
+ * Transfers the element to a consumer, waiting if necessary to do so.
+ *
+ * <p>More precisely, transfers the specified element immediately
+ * if there exists a consumer already waiting to receive it (in
+ * {@link #take} or timed {@link #poll(long,TimeUnit) poll}),
+ * else inserts the specified element at the tail of this queue
+ * and waits until the element is received by a consumer.
+ *
+ * @throws NullPointerException if the specified element is null
+ */
public void transfer(E e) throws InterruptedException {
- if (e == null) throw new NullPointerException();
- if (xfer(e, WAIT, 0) == null) {
- Thread.interrupted();
+ if (xfer(e, true, SYNC, 0) != null) {
+ Thread.interrupted(); // failure possible only due to interrupt
throw new InterruptedException();
}
}
+ /**
+ * Transfers the element to a consumer if it is possible to do so
+ * before the timeout elapses.
+ *
+ * <p>More precisely, transfers the specified element immediately
+ * if there exists a consumer already waiting to receive it (in
+ * {@link #take} or timed {@link #poll(long,TimeUnit) poll}),
+ * else inserts the specified element at the tail of this queue
+ * and waits until the element is received by a consumer,
+ * returning {@code false} if the specified wait time elapses
+ * before the element can be transferred.
+ *
+ * @throws NullPointerException if the specified element is null
+ */
public boolean tryTransfer(E e, long timeout, TimeUnit unit)
throws InterruptedException {
- if (e == null) throw new NullPointerException();
- if (xfer(e, TIMEOUT, unit.toNanos(timeout)) != null)
+ if (xfer(e, true, TIMED, unit.toNanos(timeout)) == null)
return true;
if (!Thread.interrupted())
return false;
throw new InterruptedException();
}
- public boolean tryTransfer(E e) {
- if (e == null) throw new NullPointerException();
- return fulfill(e) != null;
- }
-
public E take() throws InterruptedException {
- Object e = xfer(null, WAIT, 0);
+ E e = xfer(null, false, SYNC, 0);
if (e != null)
- return (E)e;
+ return e;
Thread.interrupted();
throw new InterruptedException();
}
public E poll(long timeout, TimeUnit unit) throws InterruptedException {
- Object e = xfer(null, TIMEOUT, unit.toNanos(timeout));
+ E e = xfer(null, false, TIMED, unit.toNanos(timeout));
if (e != null || !Thread.interrupted())
- return (E)e;
+ return e;
throw new InterruptedException();
}
public E poll() {
- return (E)fulfill(null);
+ return xfer(null, false, NOW, 0);
}
+ /**
+ * @throws NullPointerException {@inheritDoc}
+ * @throws IllegalArgumentException {@inheritDoc}
+ */
public int drainTo(Collection<? super E> c) {
if (c == null)
throw new NullPointerException();
if (c == this)
throw new IllegalArgumentException();
int n = 0;
- E e;
- while ( (e = poll()) != null) {
+ for (E e; (e = poll()) != null;) {
c.add(e);
++n;
}
return n;
}
+ /**
+ * @throws NullPointerException {@inheritDoc}
+ * @throws IllegalArgumentException {@inheritDoc}
+ */
public int drainTo(Collection<? super E> c, int maxElements) {
if (c == null)
throw new NullPointerException();
if (c == this)
throw new IllegalArgumentException();
int n = 0;
- E e;
- while (n < maxElements && (e = poll()) != null) {
+ for (E e; n < maxElements && (e = poll()) != null;) {
c.add(e);
++n;
}
return n;
}
- // Traversal-based methods
-
/**
- * Returns head after performing any outstanding helping steps.
+ * Returns an iterator over the elements in this queue in proper sequence.
+ * The elements will be returned in order from first (head) to last (tail).
+ *
+ * <p>The returned iterator is a "weakly consistent" iterator that
+ * will never throw {@link java.util.ConcurrentModificationException
+ * ConcurrentModificationException}, and guarantees to traverse
+ * elements as they existed upon construction of the iterator, and
+ * may (but is not guaranteed to) reflect any modifications
+ * subsequent to construction.
+ *
+ * @return an iterator over the elements in this queue in proper sequence
*/
- private QNode traversalHead() {
- for (;;) {
- QNode t = tail.get();
- QNode h = head.get();
- if (h != null && t != null) {
- QNode last = t.next;
- QNode first = h.next;
- if (t == tail.get()) {
- if (last != null)
- tail.compareAndSet(t, last);
- else if (first != null) {
- Object x = first.get();
- if (x == first)
- advanceHead(h, first);
- else
- return h;
- }
- else
- return h;
- }
- }
- reclean();
- }
- }
-
-
public Iterator<E> iterator() {
return new Itr();
}
- /**
- * Iterators. Basic strategy is to traverse list, treating
- * non-data (i.e., request) nodes as terminating list.
- * Once a valid data node is found, the item is cached
- * so that the next call to next() will return it even
- * if subsequently removed.
- */
- class Itr implements Iterator<E> {
- QNode next; // node to return next
- QNode pnext; // predecessor of next
- QNode snext; // successor of next
- QNode curr; // last returned node, for remove()
- QNode pcurr; // predecessor of curr, for remove()
- E nextItem; // Cache of next item, once commited to in next
-
- Itr() {
- findNext();
- }
-
- /**
- * Ensures next points to next valid node, or null if none.
- */
- void findNext() {
- for (;;) {
- QNode pred = pnext;
- QNode q = next;
- if (pred == null || pred == q) {
- pred = traversalHead();
- q = pred.next;
- }
- if (q == null || !q.isData) {
- next = null;
- return;
- }
- Object x = q.get();
- QNode s = q.next;
- if (x != null && q != x && q != s) {
- nextItem = (E)x;
- snext = s;
- pnext = pred;
- next = q;
- return;
- }
- pnext = q;
- next = s;
- }
- }
-
- public boolean hasNext() {
- return next != null;
- }
-
- public E next() {
- if (next == null) throw new NoSuchElementException();
- pcurr = pnext;
- curr = next;
- pnext = next;
- next = snext;
- E x = nextItem;
- findNext();
- return x;
- }
-
- public void remove() {
- QNode p = curr;
- if (p == null)
- throw new IllegalStateException();
- Object x = p.get();
- if (x != null && x != p && p.compareAndSet(x, p))
- clean(pcurr, p);
- }
- }
-
public E peek() {
- for (;;) {
- QNode h = traversalHead();
- QNode p = h.next;
- if (p == null)
- return null;
- Object x = p.get();
- if (p != x) {
- if (!p.isData)
- return null;
- if (x != null)
- return (E)x;
- }
- }
+ return firstDataItem();
}
+ /**
+ * Returns {@code true} if this queue contains no elements.
+ *
+ * @return {@code true} if this queue contains no elements
+ */
public boolean isEmpty() {
- for (;;) {
- QNode h = traversalHead();
- QNode p = h.next;
- if (p == null)
- return true;
- Object x = p.get();
- if (p != x) {
- if (!p.isData)
- return true;
- if (x != null)
- return false;
- }
+ for (Node p = head; p != null; p = succ(p)) {
+ if (!p.isMatched())
+ return !p.isData;
}
+ return true;
}
public boolean hasWaitingConsumer() {
- for (;;) {
- QNode h = traversalHead();
- QNode p = h.next;
- if (p == null)
- return false;
- Object x = p.get();
- if (p != x)
- return !p.isData;
- }
+ return firstOfMode(false) != null;
}
/**
@@ -696,58 +1209,64 @@ public class LinkedTransferQueue<E> extends AbstractQueue<E>
* @return the number of elements in this queue
*/
public int size() {
- int count = 0;
- QNode h = traversalHead();
- for (QNode p = h.next; p != null && p.isData; p = p.next) {
- Object x = p.get();
- if (x != null && x != p) {
- if (++count == Integer.MAX_VALUE) // saturated
- break;
- }
- }
- return count;
+ return countOfMode(true);
}
public int getWaitingConsumerCount() {
- int count = 0;
- QNode h = traversalHead();
- for (QNode p = h.next; p != null && !p.isData; p = p.next) {
- if (p.get() == null) {
- if (++count == Integer.MAX_VALUE)
- break;
- }
- }
- return count;
+ return countOfMode(false);
}
- public int remainingCapacity() {
- return Integer.MAX_VALUE;
+ /**
+ * Removes a single instance of the specified element from this queue,
+ * if it is present. More formally, removes an element {@code e} such
+ * that {@code o.equals(e)}, if this queue contains one or more such
+ * elements.
+ * Returns {@code true} if this queue contained the specified element
+ * (or equivalently, if this queue changed as a result of the call).
+ *
+ * @param o element to be removed from this queue, if present
+ * @return {@code true} if this queue changed as a result of the call
+ */
+ public boolean remove(Object o) {
+ return findAndRemove(o);
}
- public boolean remove(Object o) {
- if (o == null)
- return false;
- for (;;) {
- QNode pred = traversalHead();
- for (;;) {
- QNode q = pred.next;
- if (q == null || !q.isData)
- return false;
- if (q == pred) // restart
- break;
- Object x = q.get();
- if (x != null && x != q && o.equals(x) &&
- q.compareAndSet(x, q)) {
- clean(pred, q);
+ /**
+ * Returns {@code true} if this queue contains the specified element.
+ * More formally, returns {@code true} if and only if this queue contains
+ * at least one element {@code e} such that {@code o.equals(e)}.
+ *
+ * @param o object to be checked for containment in this queue
+ * @return {@code true} if this queue contains the specified element
+ */
+ public boolean contains(Object o) {
+ if (o == null) return false;
+ for (Node p = head; p != null; p = succ(p)) {
+ Object item = p.item;
+ if (p.isData) {
+ if (item != null && item != p && o.equals(item))
return true;
- }
- pred = q;
}
+ else if (item == null)
+ break;
}
+ return false;
+ }
+
+ /**
+ * Always returns {@code Integer.MAX_VALUE} because a
+ * {@code LinkedTransferQueue} is not capacity constrained.
+ *
+ * @return {@code Integer.MAX_VALUE} (as specified by
+ * {@link java.util.concurrent.BlockingQueue#remainingCapacity()
+ * BlockingQueue.remainingCapacity})
+ */
+ public int remainingCapacity() {
+ return Integer.MAX_VALUE;
}
/**
- * Save the state to a stream (that is, serialize it).
+ * Saves the state to a stream (that is, serializes it).
*
* @serialData All of the elements (each an {@code E}) in
* the proper order, followed by a null
@@ -763,16 +1282,17 @@ public class LinkedTransferQueue<E> extends AbstractQueue<E>
}
/**
- * Reconstitute the Queue instance from a stream (that is,
- * deserialize it).
+ * Reconstitutes the Queue instance from a stream (that is,
+ * deserializes it).
+ *
* @param s the stream
*/
private void readObject(java.io.ObjectInputStream s)
throws java.io.IOException, ClassNotFoundException {
s.defaultReadObject();
- resetHeadAndTail();
for (;;) {
- E item = (E)s.readObject();
+ @SuppressWarnings("unchecked")
+ E item = (E) s.readObject();
if (item == null)
break;
else
@@ -780,61 +1300,36 @@ public class LinkedTransferQueue<E> extends AbstractQueue<E>
}
}
+ // Unsafe mechanics
- // Support for resetting head/tail while deserializing
- private void resetHeadAndTail() {
- QNode dummy = new QNode(null, false);
- _unsafe.putObjectVolatile(this, headOffset,
- new PaddedAtomicReference<QNode>(dummy));
- _unsafe.putObjectVolatile(this, tailOffset,
- new PaddedAtomicReference<QNode>(dummy));
- _unsafe.putObjectVolatile(this, cleanMeOffset,
- new PaddedAtomicReference<QNode>(null));
- }
-
- // Temporary Unsafe mechanics for preliminary release
- private static Unsafe getUnsafe() throws Throwable {
- try {
- return Unsafe.getUnsafe();
- } catch (SecurityException se) {
- try {
- return java.security.AccessController.doPrivileged
- (new java.security.PrivilegedExceptionAction<Unsafe>() {
- public Unsafe run() throws Exception {
- return getUnsafePrivileged();
- }});
- } catch (java.security.PrivilegedActionException e) {
- throw e.getCause();
- }
- }
- }
-
- private static Unsafe getUnsafePrivileged()
- throws NoSuchFieldException, IllegalAccessException {
- Field f = Unsafe.class.getDeclaredField("theUnsafe");
- f.setAccessible(true);
- return (Unsafe) f.get(null);
- }
-
- private static long fieldOffset(String fieldName)
- throws NoSuchFieldException {
- return _unsafe.objectFieldOffset
- (LinkedTransferQueue.class.getDeclaredField(fieldName));
- }
-
- private static final Unsafe _unsafe;
+ private static final sun.misc.Unsafe UNSAFE;
private static final long headOffset;
private static final long tailOffset;
- private static final long cleanMeOffset;
+ private static final long sweepVotesOffset;
static {
try {
- _unsafe = getUnsafe();
- headOffset = fieldOffset("head");
- tailOffset = fieldOffset("tail");
- cleanMeOffset = fieldOffset("cleanMe");
- } catch (Throwable e) {
- throw new RuntimeException("Could not initialize intrinsics", e);
+ UNSAFE = getUnsafe();
+ Class<?> k = LinkedTransferQueue.class;
+ headOffset = UNSAFE.objectFieldOffset
+ (k.getDeclaredField("head"));
+ tailOffset = UNSAFE.objectFieldOffset
+ (k.getDeclaredField("tail"));
+ sweepVotesOffset = UNSAFE.objectFieldOffset
+ (k.getDeclaredField("sweepVotes"));
+ } catch (Exception e) {
+ throw new Error(e);
}
}
+ /**
+ * Returns a sun.misc.Unsafe. Suitable for use in a 3rd party package.
+ * Replace with a simple call to Unsafe.getUnsafe when integrating
+ * into a jdk.
+ *
+ * @return a sun.misc.Unsafe
+ */
+ static sun.misc.Unsafe getUnsafe() {
+ return scala.concurrent.util.Unsafe.instance;
+ }
+
}
diff --git a/src/forkjoin/scala/concurrent/forkjoin/RecursiveAction.java b/src/forkjoin/scala/concurrent/forkjoin/RecursiveAction.java
index 2d36f7e..1e7cdd9 100644
--- a/src/forkjoin/scala/concurrent/forkjoin/RecursiveAction.java
+++ b/src/forkjoin/scala/concurrent/forkjoin/RecursiveAction.java
@@ -1,64 +1,73 @@
/*
* Written by Doug Lea with assistance from members of JCP JSR-166
* Expert Group and released to the public domain, as explained at
- * http://creativecommons.org/licenses/publicdomain
+ * http://creativecommons.org/publicdomain/zero/1.0/
*/
package scala.concurrent.forkjoin;
/**
- * Recursive resultless ForkJoinTasks. This class establishes
- * conventions to parameterize resultless actions as <tt>Void</tt>
- * ForkJoinTasks. Because <tt>null</tt> is the only valid value of
- * <tt>Void</tt>, methods such as join always return <tt>null</tt>
- * upon completion.
+ * A recursive resultless {@link ForkJoinTask}. This class
+ * establishes conventions to parameterize resultless actions as
+ * {@code Void} {@code ForkJoinTask}s. Because {@code null} is the
+ * only valid value of type {@code Void}, methods such as {@code join}
+ * always return {@code null} upon completion.
*
- * <p><b>Sample Usages.</b> Here is a sketch of a ForkJoin sort that
- * sorts a given <tt>long[]</tt> array:
+ * <p><b>Sample Usages.</b> Here is a simple but complete ForkJoin
+ * sort that sorts a given {@code long[]} array:
*
- * <pre>
- * class SortTask extends RecursiveAction {
- * final long[] array; final int lo; final int hi;
+ * <pre> {@code
+ * static class SortTask extends RecursiveAction {
+ * final long[] array; final int lo, hi;
* SortTask(long[] array, int lo, int hi) {
* this.array = array; this.lo = lo; this.hi = hi;
* }
+ * SortTask(long[] array) { this(array, 0, array.length); }
* protected void compute() {
- * if (hi - lo < THRESHOLD)
- * sequentiallySort(array, lo, hi);
+ * if (hi - lo < THRESHOLD)
+ * sortSequentially(lo, hi);
* else {
- * int mid = (lo + hi) >>> 1;
+ * int mid = (lo + hi) >>> 1;
* invokeAll(new SortTask(array, lo, mid),
* new SortTask(array, mid, hi));
- * merge(array, lo, hi);
+ * merge(lo, mid, hi);
* }
* }
- * }
- * </pre>
+ * // implementation details follow:
+ * final static int THRESHOLD = 1000;
+ * void sortSequentially(int lo, int hi) {
+ * Arrays.sort(array, lo, hi);
+ * }
+ * void merge(int lo, int mid, int hi) {
+ * long[] buf = Arrays.copyOfRange(array, lo, mid);
+ * for (int i = 0, j = lo, k = mid; i < buf.length; j++)
+ * array[j] = (k == hi || buf[i] < array[k]) ?
+ * buf[i++] : array[k++];
+ * }
+ * }}</pre>
*
- * You could then sort anArray by creating <tt>new SortTask(anArray, 0,
- * anArray.length-1) </tt> and invoking it in a ForkJoinPool.
- * As a more concrete simple example, the following task increments
- * each element of an array:
- * <pre>
+ * You could then sort {@code anArray} by creating {@code new
+ * SortTask(anArray)} and invoking it in a ForkJoinPool. As a more
+ * concrete simple example, the following task increments each element
+ * of an array:
+ * <pre> {@code
* class IncrementTask extends RecursiveAction {
- * final long[] array; final int lo; final int hi;
+ * final long[] array; final int lo, hi;
* IncrementTask(long[] array, int lo, int hi) {
* this.array = array; this.lo = lo; this.hi = hi;
* }
* protected void compute() {
- * if (hi - lo < THRESHOLD) {
- * for (int i = lo; i < hi; ++i)
+ * if (hi - lo < THRESHOLD) {
+ * for (int i = lo; i < hi; ++i)
* array[i]++;
* }
* else {
- * int mid = (lo + hi) >>> 1;
+ * int mid = (lo + hi) >>> 1;
* invokeAll(new IncrementTask(array, lo, mid),
* new IncrementTask(array, mid, hi));
* }
* }
- * }
- * </pre>
- *
+ * }}</pre>
*
* <p>The following example illustrates some refinements and idioms
* that may lead to better performance: RecursiveActions need not be
@@ -66,33 +75,33 @@ package scala.concurrent.forkjoin;
* divide-and-conquer approach. Here is a class that sums the squares
* of each element of a double array, by subdividing out only the
* right-hand-sides of repeated divisions by two, and keeping track of
- * them with a chain of <tt>next</tt> references. It uses a dynamic
- * threshold based on method <tt>surplus</tt>, but counterbalances
- * potential excess partitioning by directly performing leaf actions
- * on unstolen tasks rather than further subdividing.
+ * them with a chain of {@code next} references. It uses a dynamic
+ * threshold based on method {@code getSurplusQueuedTaskCount}, but
+ * counterbalances potential excess partitioning by directly
+ * performing leaf actions on unstolen tasks rather than further
+ * subdividing.
*
- * <pre>
+ * <pre> {@code
* double sumOfSquares(ForkJoinPool pool, double[] array) {
* int n = array.length;
- * int seqSize = 1 + n / (8 * pool.getParallelism());
- * Applyer a = new Applyer(array, 0, n, seqSize, null);
+ * Applyer a = new Applyer(array, 0, n, null);
* pool.invoke(a);
* return a.result;
* }
*
* class Applyer extends RecursiveAction {
* final double[] array;
- * final int lo, hi, seqSize;
+ * final int lo, hi;
* double result;
* Applyer next; // keeps track of right-hand-side tasks
- * Applyer(double[] array, int lo, int hi, int seqSize, Applyer next) {
+ * Applyer(double[] array, int lo, int hi, Applyer next) {
* this.array = array; this.lo = lo; this.hi = hi;
- * this.seqSize = seqSize; this.next = next;
+ * this.next = next;
* }
*
- * double atLeaf(int l, int r) {
+ * double atLeaf(int l, int h) {
* double sum = 0;
- * for (int i = l; i < h; ++i) // perform leftmost base step
+ * for (int i = l; i < h; ++i) // perform leftmost base step
* sum += array[i] * array[i];
* return sum;
* }
@@ -101,10 +110,9 @@ package scala.concurrent.forkjoin;
* int l = lo;
* int h = hi;
* Applyer right = null;
- * while (h - l > 1 &&
- * ForkJoinWorkerThread.getEstimatedSurplusTaskCount() <= 3) {
- * int mid = (l + h) >>> 1;
- * right = new Applyer(array, mid, h, seqSize, right);
+ * while (h - l > 1 && getSurplusQueuedTaskCount() <= 3) {
+ * int mid = (l + h) >>> 1;
+ * right = new Applyer(array, mid, h, right);
* right.fork();
* h = mid;
* }
@@ -113,17 +121,20 @@ package scala.concurrent.forkjoin;
* if (right.tryUnfork()) // directly calculate if not stolen
* sum += right.atLeaf(right.lo, right.hi);
* else {
- * right.helpJoin();
+ * right.join();
* sum += right.result;
* }
* right = right.next;
* }
* result = sum;
* }
- * }
- * </pre>
+ * }}</pre>
+ *
+ * @since 1.7
+ * @author Doug Lea
*/
public abstract class RecursiveAction extends ForkJoinTask<Void> {
+ private static final long serialVersionUID = 5232453952276485070L;
/**
* The main computation performed by this task.
@@ -131,7 +142,9 @@ public abstract class RecursiveAction extends ForkJoinTask<Void> {
protected abstract void compute();
/**
- * Always returns null
+ * Always returns {@code null}.
+ *
+ * @return {@code null} always
*/
public final Void getRawResult() { return null; }
@@ -141,7 +154,7 @@ public abstract class RecursiveAction extends ForkJoinTask<Void> {
protected final void setRawResult(Void mustBeNull) { }
/**
- * Implements execution conventions for RecursiveActions
+ * Implements execution conventions for RecursiveActions.
*/
protected final boolean exec() {
compute();
diff --git a/src/forkjoin/scala/concurrent/forkjoin/RecursiveTask.java b/src/forkjoin/scala/concurrent/forkjoin/RecursiveTask.java
index a526f75..d1e1547 100644
--- a/src/forkjoin/scala/concurrent/forkjoin/RecursiveTask.java
+++ b/src/forkjoin/scala/concurrent/forkjoin/RecursiveTask.java
@@ -1,29 +1,29 @@
/*
* Written by Doug Lea with assistance from members of JCP JSR-166
* Expert Group and released to the public domain, as explained at
- * http://creativecommons.org/licenses/publicdomain
+ * http://creativecommons.org/publicdomain/zero/1.0/
*/
package scala.concurrent.forkjoin;
/**
- * Recursive result-bearing ForkJoinTasks.
- * <p> For a classic example, here is a task computing Fibonacci numbers:
+ * A recursive result-bearing {@link ForkJoinTask}.
*
- * <pre>
- * class Fibonacci extends RecursiveTask<Integer> {
+ * <p>For a classic example, here is a task computing Fibonacci numbers:
+ *
+ * <pre> {@code
+ * class Fibonacci extends RecursiveTask<Integer> {
* final int n;
- * Fibonnaci(int n) { this.n = n; }
+ * Fibonacci(int n) { this.n = n; }
* Integer compute() {
- * if (n <= 1)
+ * if (n <= 1)
* return n;
* Fibonacci f1 = new Fibonacci(n - 1);
* f1.fork();
* Fibonacci f2 = new Fibonacci(n - 2);
* return f2.compute() + f1.join();
* }
- * }
- * </pre>
+ * }}</pre>
*
* However, besides being a dumb way to compute Fibonacci functions
* (there is a simple fast linear algorithm that you'd use in
@@ -33,17 +33,14 @@ package scala.concurrent.forkjoin;
* minimum granularity size (for example 10 here) for which you always
* sequentially solve rather than subdividing.
*
+ * @since 1.7
+ * @author Doug Lea
*/
public abstract class RecursiveTask<V> extends ForkJoinTask<V> {
+ private static final long serialVersionUID = 5232453952276485270L;
/**
- * Empty constructor for use by subclasses.
- */
- protected RecursiveTask() {
- }
-
- /**
- * The result returned by compute method.
+ * The result of the computation.
*/
V result;
@@ -61,7 +58,7 @@ public abstract class RecursiveTask<V> extends ForkJoinTask<V> {
}
/**
- * Implements execution conventions for RecursiveTask
+ * Implements execution conventions for RecursiveTask.
*/
protected final boolean exec() {
result = compute();
diff --git a/src/forkjoin/scala/concurrent/forkjoin/ThreadLocalRandom.java b/src/forkjoin/scala/concurrent/forkjoin/ThreadLocalRandom.java
index 34e2e37..19237c9 100644
--- a/src/forkjoin/scala/concurrent/forkjoin/ThreadLocalRandom.java
+++ b/src/forkjoin/scala/concurrent/forkjoin/ThreadLocalRandom.java
@@ -1,49 +1,53 @@
/*
* Written by Doug Lea with assistance from members of JCP JSR-166
* Expert Group and released to the public domain, as explained at
- * http://creativecommons.org/licenses/publicdomain
+ * http://creativecommons.org/publicdomain/zero/1.0/
*/
package scala.concurrent.forkjoin;
-import java.util.*;
+
+import java.util.Random;
/**
- * A random number generator with the same properties as class {@link
- * Random} but isolated to the current Thread. Like the global
- * generator used by the {@link java.lang.Math} class, a
- * ThreadLocalRandom is initialized with an internally generated seed
- * that may not otherwise be modified. When applicable, use of
- * ThreadLocalRandom rather than shared Random objects in concurrent
- * programs will typically encounter much less overhead and
- * contention. ThreadLocalRandoms are particularly appropriate when
- * multiple tasks (for example, each a {@link ForkJoinTask}), use
- * random numbers in parallel in thread pools.
+ * A random number generator isolated to the current thread. Like the
+ * global {@link java.util.Random} generator used by the {@link
+ * java.lang.Math} class, a {@code ThreadLocalRandom} is initialized
+ * with an internally generated seed that may not otherwise be
+ * modified. When applicable, use of {@code ThreadLocalRandom} rather
+ * than shared {@code Random} objects in concurrent programs will
+ * typically encounter much less overhead and contention. Use of
+ * {@code ThreadLocalRandom} is particularly appropriate when multiple
+ * tasks (for example, each a {@link ForkJoinTask}) use random numbers
+ * in parallel in thread pools.
*
* <p>Usages of this class should typically be of the form:
- * <code>ThreadLocalRandom.current().nextX(...)</code> (where
- * <code>X</code> is <code>Int</code>, <code>Long</code>, etc).
+ * {@code ThreadLocalRandom.current().nextX(...)} (where
+ * {@code X} is {@code Int}, {@code Long}, etc).
* When all usages are of this form, it is never possible to
- * accidently share ThreadLocalRandoms across multiple threads.
+ * accidently share a {@code ThreadLocalRandom} across multiple threads.
*
* <p>This class also provides additional commonly used bounded random
* generation methods.
+ *
+ * @since 1.7
+ * @author Doug Lea
*/
public class ThreadLocalRandom extends Random {
// same constants as Random, but must be redeclared because private
- private final static long multiplier = 0x5DEECE66DL;
- private final static long addend = 0xBL;
- private final static long mask = (1L << 48) - 1;
+ private static final long multiplier = 0x5DEECE66DL;
+ private static final long addend = 0xBL;
+ private static final long mask = (1L << 48) - 1;
/**
- * The random seed. We can't use super.seed
+ * The random seed. We can't use super.seed.
*/
private long rnd;
/**
- * Initialization flag to permit the first and only allowed call
- * to setSeed (inside Random constructor) to succeed. We can't
- * allow others since it would cause setting seed in one part of a
- * program to unintentionally impact other usages by the thread.
+ * Initialization flag to permit calls to setSeed to succeed only
+ * while executing the Random constructor. We can't allow others
+ * since it would cause setting seed in one part of a program to
+ * unintentionally impact other usages by the thread.
*/
boolean initialized;
@@ -65,40 +69,42 @@ public class ThreadLocalRandom extends Random {
/**
* Constructor called only by localRandom.initialValue.
- * We rely on the fact that the superclass no-arg constructor
- * invokes setSeed exactly once to initialize.
*/
ThreadLocalRandom() {
super();
+ initialized = true;
}
/**
- * Returns the current Thread's ThreadLocalRandom
- * @return the current Thread's ThreadLocalRandom
+ * Returns the current thread's {@code ThreadLocalRandom}.
+ *
+ * @return the current thread's {@code ThreadLocalRandom}
*/
public static ThreadLocalRandom current() {
return localRandom.get();
}
/**
- * Throws UnsupportedOperationException. Setting seeds in this
- * generator is unsupported.
+ * Throws {@code UnsupportedOperationException}. Setting seeds in
+ * this generator is not supported.
+ *
* @throws UnsupportedOperationException always
*/
public void setSeed(long seed) {
if (initialized)
throw new UnsupportedOperationException();
- initialized = true;
rnd = (seed ^ multiplier) & mask;
}
protected int next(int bits) {
- return (int)((rnd = (rnd * multiplier + addend) & mask) >>> (48-bits));
+ rnd = (rnd * multiplier + addend) & mask;
+ return (int) (rnd >>> (48-bits));
}
/**
* Returns a pseudorandom, uniformly distributed value between the
* given least value (inclusive) and bound (exclusive).
+ *
* @param least the least value returned
* @param bound the upper bound (exclusive)
* @throws IllegalArgumentException if least greater than or equal
@@ -113,7 +119,8 @@ public class ThreadLocalRandom extends Random {
/**
* Returns a pseudorandom, uniformly distributed value
- * between 0 (inclusive) and the specified value (exclusive)
+ * between 0 (inclusive) and the specified value (exclusive).
+ *
* @param n the bound on the random number to be returned. Must be
* positive.
* @return the next value
@@ -131,17 +138,18 @@ public class ThreadLocalRandom extends Random {
while (n >= Integer.MAX_VALUE) {
int bits = next(2);
long half = n >>> 1;
- long nextn = ((bits & 2) == 0)? half : n - half;
+ long nextn = ((bits & 2) == 0) ? half : n - half;
if ((bits & 1) == 0)
offset += n - nextn;
n = nextn;
}
- return offset + nextInt((int)n);
+ return offset + nextInt((int) n);
}
/**
* Returns a pseudorandom, uniformly distributed value between the
* given least value (inclusive) and bound (exclusive).
+ *
* @param least the least value returned
* @param bound the upper bound (exclusive)
* @return the next value
@@ -156,7 +164,8 @@ public class ThreadLocalRandom extends Random {
/**
* Returns a pseudorandom, uniformly distributed {@code double} value
- * between 0 (inclusive) and the specified value (exclusive)
+ * between 0 (inclusive) and the specified value (exclusive).
+ *
* @param n the bound on the random number to be returned. Must be
* positive.
* @return the next value
@@ -171,6 +180,7 @@ public class ThreadLocalRandom extends Random {
/**
* Returns a pseudorandom, uniformly distributed value between the
* given least value (inclusive) and bound (exclusive).
+ *
* @param least the least value returned
* @param bound the upper bound (exclusive)
* @return the next value
@@ -183,4 +193,5 @@ public class ThreadLocalRandom extends Random {
return nextDouble() * (bound - least) + least;
}
+ private static final long serialVersionUID = -5851777807851030925L;
}
diff --git a/src/forkjoin/scala/concurrent/forkjoin/TransferQueue.java b/src/forkjoin/scala/concurrent/forkjoin/TransferQueue.java
index 9c7b228..7d149c7 100644
--- a/src/forkjoin/scala/concurrent/forkjoin/TransferQueue.java
+++ b/src/forkjoin/scala/concurrent/forkjoin/TransferQueue.java
@@ -1,7 +1,7 @@
/*
* Written by Doug Lea with assistance from members of JCP JSR-166
* Expert Group and released to the public domain, as explained at
- * http://creativecommons.org/licenses/publicdomain
+ * http://creativecommons.org/publicdomain/zero/1.0/
*/
package scala.concurrent.forkjoin;
@@ -11,21 +11,23 @@ import java.util.concurrent.*;
* A {@link BlockingQueue} in which producers may wait for consumers
* to receive elements. A {@code TransferQueue} may be useful for
* example in message passing applications in which producers
- * sometimes (using method {@code transfer}) await receipt of
- * elements by consumers invoking {@code take} or {@code poll},
- * while at other times enqueue elements (via method {@code put})
- * without waiting for receipt. Non-blocking and time-out versions of
- * {@code tryTransfer} are also available. A TransferQueue may also
- * be queried via {@code hasWaitingConsumer} whether there are any
- * threads waiting for items, which is a converse analogy to a
- * {@code peek} operation.
+ * sometimes (using method {@link #transfer}) await receipt of
+ * elements by consumers invoking {@code take} or {@code poll}, while
+ * at other times enqueue elements (via method {@code put}) without
+ * waiting for receipt.
+ * {@linkplain #tryTransfer(Object) Non-blocking} and
+ * {@linkplain #tryTransfer(Object,long,TimeUnit) time-out} versions of
+ * {@code tryTransfer} are also available.
+ * A {@code TransferQueue} may also be queried, via {@link
+ * #hasWaitingConsumer}, whether there are any threads waiting for
+ * items, which is a converse analogy to a {@code peek} operation.
*
- * <p>Like any {@code BlockingQueue}, a {@code TransferQueue} may be
- * capacity bounded. If so, an attempted {@code transfer} operation
- * may initially block waiting for available space, and/or
- * subsequently block waiting for reception by a consumer. Note that
- * in a queue with zero capacity, such as {@link SynchronousQueue},
- * {@code put} and {@code transfer} are effectively synonymous.
+ * <p>Like other blocking queues, a {@code TransferQueue} may be
+ * capacity bounded. If so, an attempted transfer operation may
+ * initially block waiting for available space, and/or subsequently
+ * block waiting for reception by a consumer. Note that in a queue
+ * with zero capacity, such as {@link SynchronousQueue}, {@code put}
+ * and {@code transfer} are effectively synonymous.
*
* <p>This interface is a member of the
* <a href="{@docRoot}/../technotes/guides/collections/index.html">
@@ -37,9 +39,12 @@ import java.util.concurrent.*;
*/
public interface TransferQueue<E> extends BlockingQueue<E> {
/**
- * Transfers the specified element if there exists a consumer
- * already waiting to receive it, otherwise returning {@code false}
- * without enqueuing the element.
+ * Transfers the element to a waiting consumer immediately, if possible.
+ *
+ * <p>More precisely, transfers the specified element immediately
+ * if there exists a consumer already waiting to receive it (in
+ * {@link #take} or timed {@link #poll(long,TimeUnit) poll}),
+ * otherwise returning {@code false} without enqueuing the element.
*
* @param e the element to transfer
* @return {@code true} if the element was transferred, else
@@ -53,13 +58,16 @@ public interface TransferQueue<E> extends BlockingQueue<E> {
boolean tryTransfer(E e);
/**
- * Inserts the specified element into this queue, waiting if
- * necessary for space to become available and the element to be
- * dequeued by a consumer invoking {@code take} or {@code poll}.
+ * Transfers the element to a consumer, waiting if necessary to do so.
+ *
+ * <p>More precisely, transfers the specified element immediately
+ * if there exists a consumer already waiting to receive it (in
+ * {@link #take} or timed {@link #poll(long,TimeUnit) poll}),
+ * else waits until the element is received by a consumer.
*
* @param e the element to transfer
* @throws InterruptedException if interrupted while waiting,
- * in which case the element is not enqueued.
+ * in which case the element is not left enqueued
* @throws ClassCastException if the class of the specified element
* prevents it from being added to this queue
* @throws NullPointerException if the specified element is null
@@ -69,10 +77,15 @@ public interface TransferQueue<E> extends BlockingQueue<E> {
void transfer(E e) throws InterruptedException;
/**
- * Inserts the specified element into this queue, waiting up to
- * the specified wait time if necessary for space to become
- * available and the element to be dequeued by a consumer invoking
- * {@code take} or {@code poll}.
+ * Transfers the element to a consumer if it is possible to do so
+ * before the timeout elapses.
+ *
+ * <p>More precisely, transfers the specified element immediately
+ * if there exists a consumer already waiting to receive it (in
+ * {@link #take} or timed {@link #poll(long,TimeUnit) poll}),
+ * else waits until the element is received by a consumer,
+ * returning {@code false} if the specified wait time elapses
+ * before the element can be transferred.
*
* @param e the element to transfer
* @param timeout how long to wait before giving up, in units of
@@ -81,9 +94,9 @@ public interface TransferQueue<E> extends BlockingQueue<E> {
* {@code timeout} parameter
* @return {@code true} if successful, or {@code false} if
* the specified waiting time elapses before completion,
- * in which case the element is not enqueued.
+ * in which case the element is not left enqueued
* @throws InterruptedException if interrupted while waiting,
- * in which case the element is not enqueued.
+ * in which case the element is not left enqueued
* @throws ClassCastException if the class of the specified element
* prevents it from being added to this queue
* @throws NullPointerException if the specified element is null
@@ -95,7 +108,8 @@ public interface TransferQueue<E> extends BlockingQueue<E> {
/**
* Returns {@code true} if there is at least one consumer waiting
- * to dequeue an element via {@code take} or {@code poll}.
+ * to receive an element via {@link #take} or
+ * timed {@link #poll(long,TimeUnit) poll}.
* The return value represents a momentary state of affairs.
*
* @return {@code true} if there is at least one waiting consumer
@@ -104,15 +118,16 @@ public interface TransferQueue<E> extends BlockingQueue<E> {
/**
* Returns an estimate of the number of consumers waiting to
- * dequeue elements via {@code take} or {@code poll}. The return
- * value is an approximation of a momentary state of affairs, that
- * may be inaccurate if consumers have completed or given up
- * waiting. The value may be useful for monitoring and heuristics,
- * but not for synchronization control. Implementations of this
+ * receive elements via {@link #take} or timed
+ * {@link #poll(long,TimeUnit) poll}. The return value is an
+ * approximation of a momentary state of affairs, that may be
+ * inaccurate if consumers have completed or given up waiting.
+ * The value may be useful for monitoring and heuristics, but
+ * not for synchronization control. Implementations of this
* method are likely to be noticeably slower than those for
* {@link #hasWaitingConsumer}.
*
- * @return the number of consumers waiting to dequeue elements
+ * @return the number of consumers waiting to receive elements
*/
int getWaitingConsumerCount();
}
diff --git a/src/forkjoin/scala/concurrent/forkjoin/package-info.java b/src/forkjoin/scala/concurrent/forkjoin/package-info.java
index b8fa0fa..3561b9b 100644
--- a/src/forkjoin/scala/concurrent/forkjoin/package-info.java
+++ b/src/forkjoin/scala/concurrent/forkjoin/package-info.java
@@ -1,7 +1,7 @@
/*
* Written by Doug Lea with assistance from members of JCP JSR-166
* Expert Group and released to the public domain, as explained at
- * http://creativecommons.org/licenses/publicdomain
+ * http://creativecommons.org/publicdomain/zero/1.0/
*/
@@ -15,7 +15,7 @@
* Threads. However, when applicable, they typically provide
* significantly greater performance on multiprocessor platforms.
*
- * <p> Candidates for fork/join processing mainly include those that
+ * <p>Candidates for fork/join processing mainly include those that
* can be expressed using parallel divide-and-conquer techniques: To
* solve a problem, break it in two (or more) parts, and then solve
* those parts in parallel, continuing on in this way until the
@@ -24,6 +24,5 @@
* available to other threads (normally one per CPU), that help
* complete the tasks. In general, the most efficient ForkJoinTasks
* are those that directly implement this algorithmic design pattern.
- *
*/
package scala.concurrent.forkjoin;
diff --git a/src/forkjoin/scala/concurrent/util/Unsafe.java b/src/forkjoin/scala/concurrent/util/Unsafe.java
new file mode 100644
index 0000000..ef893c9
--- /dev/null
+++ b/src/forkjoin/scala/concurrent/util/Unsafe.java
@@ -0,0 +1,35 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+package scala.concurrent.util;
+
+
+
+import java.lang.reflect.Field;
+
+
+
+public final class Unsafe {
+ public final static sun.misc.Unsafe instance;
+ static {
+ try {
+ sun.misc.Unsafe found = null;
+ for(Field field : sun.misc.Unsafe.class.getDeclaredFields()) {
+ if (field.getType() == sun.misc.Unsafe.class) {
+ field.setAccessible(true);
+ found = (sun.misc.Unsafe) field.get(null);
+ break;
+ }
+ }
+ if (found == null) throw new IllegalStateException("Can't find instance of sun.misc.Unsafe");
+ else instance = found;
+ } catch(Throwable t) {
+ throw new ExceptionInInitializerError(t);
+ }
+ }
+}
diff --git a/src/intellij/README b/src/intellij/README
index f24d700..9ef612b 100644
--- a/src/intellij/README
+++ b/src/intellij/README
@@ -1,12 +1,13 @@
-Use IntelliJ IDEA X EAP (http://confluence.jetbrains.net/display/IDEADEV/IDEA+X+EAP)
-a Scala Plugin nightly build (http://confluence.jetbrains.net/display/SCA/Scala+Plugin+Nightly+Builds+for+IDEA+X)
+Use the latest IntelliJ IDEA release and install the Scala plugin from within the IDE.
The following steps are required to use IntelliJ IDEA on Scala trunk
+ - compile "locker" using "ant locker.done"
- Copy the *.iml.SAMPLE / *.ipr.SAMPLE files to *.iml / *.ipr
- In IDEA, create a global library named "ant" which contains "ant.jar"
- - In the Scala Facet of the "library" module, update the path in the command-line
- argument for "-sourcepath"
+ - Also create an SDK entry named "1.6" containing the java 1.6 SDK
+ - In the Scala Facet of the "library" and "reflect" modules, update the path in the
+ command-line argument for "-sourcepath"
- In the Project Settings, update the "Version Control" to match your checkout
Known problems
- - Currently, it's not possible to build the "actors" module in IDEA
+ - Due to SI-4365, the "library" module has to be built using "-Yno-generic-signatures"
diff --git a/src/intellij/actors.iml.SAMPLE b/src/intellij/actors.iml.SAMPLE
index b095d29..896c496 100644
--- a/src/intellij/actors.iml.SAMPLE
+++ b/src/intellij/actors.iml.SAMPLE
@@ -6,7 +6,7 @@
<option name="compilerLibraryLevel" value="Project" />
<option name="compilerLibraryName" value="compiler-locker" />
<option name="maximumHeapSize" value="1536" />
- <option name="vmOptions" value="-Xms1536m -Xss1m -XX:MaxPermSize=128M -d32 -server -XX:+UseParallelGC" />
+ <option name="vmOptions" value="-Xms1536m -Xss1m -XX:MaxPermSize=512M -XX:ReservedCodeCacheSize=256m -XX:+CMSClassUnloadingEnabled -XX:+UseCompressedOops -XX:+UseParallelGC" />
</configuration>
</facet>
</component>
@@ -18,7 +18,7 @@
<orderEntry type="inheritedJdk" />
<orderEntry type="sourceFolder" forTests="false" />
<orderEntry type="module" module-name="library" />
- <orderEntry type="library" name="lib" level="project" />
+ <orderEntry type="module" module-name="forkjoin" />
</component>
</module>
diff --git a/src/intellij/asm.iml.SAMPLE b/src/intellij/asm.iml.SAMPLE
new file mode 100644
index 0000000..ba9e7e8
--- /dev/null
+++ b/src/intellij/asm.iml.SAMPLE
@@ -0,0 +1,12 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<module type="JAVA_MODULE" version="4">
+ <component name="NewModuleRootManager" inherit-compiler-output="true">
+ <exclude-output />
+ <content url="file://$MODULE_DIR$/../asm">
+ <sourceFolder url="file://$MODULE_DIR$/../asm" isTestSource="false" />
+ </content>
+ <orderEntry type="inheritedJdk" />
+ <orderEntry type="sourceFolder" forTests="false" />
+ </component>
+</module>
+
diff --git a/src/intellij/compiler.iml.SAMPLE b/src/intellij/compiler.iml.SAMPLE
index 75c7653..696c347 100644
--- a/src/intellij/compiler.iml.SAMPLE
+++ b/src/intellij/compiler.iml.SAMPLE
@@ -6,7 +6,7 @@
<option name="compilerLibraryLevel" value="Project" />
<option name="compilerLibraryName" value="compiler-locker" />
<option name="maximumHeapSize" value="1536" />
- <option name="vmOptions" value="-Xms1536m -Xss1m -XX:MaxPermSize=128M -d32 -server -XX:+UseParallelGC" />
+ <option name="vmOptions" value="-Xms1536m -Xss1m -XX:MaxPermSize=512M -XX:ReservedCodeCacheSize=256m -XX:+CMSClassUnloadingEnabled -XX:+UseCompressedOops -XX:+UseParallelGC" />
</configuration>
</facet>
</component>
@@ -18,8 +18,12 @@
<orderEntry type="inheritedJdk" />
<orderEntry type="sourceFolder" forTests="false" />
<orderEntry type="module" module-name="library" />
- <orderEntry type="library" name="lib" level="project" />
+ <orderEntry type="module" module-name="reflect" />
+ <orderEntry type="module" module-name="asm" />
+ <orderEntry type="module" module-name="fjbg" />
+ <orderEntry type="module" module-name="msil" />
<orderEntry type="library" name="ant" level="application" />
+ <orderEntry type="library" name="jline" level="project" />
</component>
</module>
diff --git a/src/intellij/dbc.iml.SAMPLE b/src/intellij/dbc.iml.SAMPLE
deleted file mode 100644
index 5a6df4c..0000000
--- a/src/intellij/dbc.iml.SAMPLE
+++ /dev/null
@@ -1,23 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<module type="JAVA_MODULE" version="4">
- <component name="FacetManager">
- <facet type="scala" name="Scala">
- <configuration>
- <option name="compilerLibraryLevel" value="Project" />
- <option name="compilerLibraryName" value="compiler-locker" />
- <option name="maximumHeapSize" value="1536" />
- <option name="vmOptions" value="-Xms1536m -Xss1m -XX:MaxPermSize=128M -d32 -server -XX:+UseParallelGC" />
- </configuration>
- </facet>
- </component>
- <component name="NewModuleRootManager" inherit-compiler-output="true">
- <exclude-output />
- <content url="file://$MODULE_DIR$/../dbc">
- <sourceFolder url="file://$MODULE_DIR$/../dbc" isTestSource="false" />
- </content>
- <orderEntry type="inheritedJdk" />
- <orderEntry type="sourceFolder" forTests="false" />
- <orderEntry type="module" module-name="library" />
- </component>
-</module>
-
diff --git a/src/intellij/fjbg.iml.SAMPLE b/src/intellij/fjbg.iml.SAMPLE
new file mode 100644
index 0000000..03eca69
--- /dev/null
+++ b/src/intellij/fjbg.iml.SAMPLE
@@ -0,0 +1,12 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<module type="JAVA_MODULE" version="4">
+ <component name="NewModuleRootManager" inherit-compiler-output="true">
+ <exclude-output />
+ <content url="file://$MODULE_DIR$/../fjbg">
+ <sourceFolder url="file://$MODULE_DIR$/../fjbg" isTestSource="false" />
+ </content>
+ <orderEntry type="inheritedJdk" />
+ <orderEntry type="sourceFolder" forTests="false" />
+ </component>
+</module>
+
diff --git a/src/intellij/forkjoin.iml.SAMPLE b/src/intellij/forkjoin.iml.SAMPLE
new file mode 100644
index 0000000..be807cc
--- /dev/null
+++ b/src/intellij/forkjoin.iml.SAMPLE
@@ -0,0 +1,12 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<module type="JAVA_MODULE" version="4">
+ <component name="NewModuleRootManager" inherit-compiler-output="true">
+ <exclude-output />
+ <content url="file://$MODULE_DIR$/../forkjoin">
+ <sourceFolder url="file://$MODULE_DIR$/../forkjoin" isTestSource="false" />
+ </content>
+ <orderEntry type="inheritedJdk" />
+ <orderEntry type="sourceFolder" forTests="false" />
+ </component>
+</module>
+
diff --git a/src/intellij/library.iml.SAMPLE b/src/intellij/library.iml.SAMPLE
index a86c0a0..9c1b7ec 100644
--- a/src/intellij/library.iml.SAMPLE
+++ b/src/intellij/library.iml.SAMPLE
@@ -5,9 +5,9 @@
<configuration>
<option name="compilerLibraryLevel" value="Project" />
<option name="compilerLibraryName" value="compiler-locker" />
- <option name="compilerOptions" value="-sourcepath /Users/luc/scala/git/src/library" />
+ <option name="compilerOptions" value="-sourcepath /Users/luc/scala/scala/src/library -Yno-generic-signatures" />
<option name="maximumHeapSize" value="1536" />
- <option name="vmOptions" value="-Xms1536m -Xss1m -XX:MaxPermSize=128M -d32 -server -XX:+UseParallelGC" />
+ <option name="vmOptions" value="-Xms1536m -Xss1m -XX:MaxPermSize=512M -XX:ReservedCodeCacheSize=256m -XX:+CMSClassUnloadingEnabled -XX:+UseCompressedOops -XX:+UseParallelGC" />
</configuration>
</facet>
</component>
@@ -18,7 +18,7 @@
</content>
<orderEntry type="inheritedJdk" />
<orderEntry type="sourceFolder" forTests="false" />
- <orderEntry type="library" name="lib" level="project" />
+ <orderEntry type="module" module-name="forkjoin" />
</component>
</module>
diff --git a/src/intellij/manual.iml.SAMPLE b/src/intellij/manual.iml.SAMPLE
index 10de797..62810e0 100644
--- a/src/intellij/manual.iml.SAMPLE
+++ b/src/intellij/manual.iml.SAMPLE
@@ -6,7 +6,7 @@
<option name="compilerLibraryLevel" value="Project" />
<option name="compilerLibraryName" value="compiler-locker" />
<option name="maximumHeapSize" value="1536" />
- <option name="vmOptions" value="-Xms1536m -Xss1m -XX:MaxPermSize=128M -d32 -server -XX:+UseParallelGC" />
+ <option name="vmOptions" value="-Xms1536m -Xss1m -XX:MaxPermSize=512M -XX:ReservedCodeCacheSize=256m -XX:+CMSClassUnloadingEnabled -XX:+UseCompressedOops -XX:+UseParallelGC" />
</configuration>
</facet>
</component>
diff --git a/src/intellij/msil.iml.SAMPLE b/src/intellij/msil.iml.SAMPLE
new file mode 100644
index 0000000..56f7947
--- /dev/null
+++ b/src/intellij/msil.iml.SAMPLE
@@ -0,0 +1,24 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<module type="JAVA_MODULE" version="4">
+ <component name="FacetManager">
+ <facet type="scala" name="Scala">
+ <configuration>
+ <option name="compilerLibraryLevel" value="Project" />
+ <option name="compilerLibraryName" value="compiler-locker" />
+ <option name="maximumHeapSize" value="1536" />
+ <option name="vmOptions" value="-Xms1536m -Xss1m -XX:MaxPermSize=512M -XX:ReservedCodeCacheSize=256m -XX:+CMSClassUnloadingEnabled -XX:+UseCompressedOops -XX:+UseParallelGC" />
+ </configuration>
+ </facet>
+ </component>
+ <component name="NewModuleRootManager" inherit-compiler-output="true">
+ <exclude-output />
+ <content url="file://$MODULE_DIR$/../msil">
+ <sourceFolder url="file://$MODULE_DIR$/../msil" isTestSource="false" />
+ <excludeFolder url="file://$MODULE_DIR$/../msil/ch/epfl/lamp/compiler/msil/tests" />
+ </content>
+ <orderEntry type="inheritedJdk" />
+ <orderEntry type="sourceFolder" forTests="false" />
+ <orderEntry type="module" module-name="library" />
+ </component>
+</module>
+
diff --git a/src/intellij/partest.iml.SAMPLE b/src/intellij/partest.iml.SAMPLE
index 7dcd868..ab4a32a 100644
--- a/src/intellij/partest.iml.SAMPLE
+++ b/src/intellij/partest.iml.SAMPLE
@@ -6,7 +6,7 @@
<option name="compilerLibraryLevel" value="Project" />
<option name="compilerLibraryName" value="compiler-locker" />
<option name="maximumHeapSize" value="1536" />
- <option name="vmOptions" value="-Xms1536m -Xss1m -XX:MaxPermSize=128M -d32 -server -XX:+UseParallelGC" />
+ <option name="vmOptions" value="-Xms1536m -Xss1m -XX:MaxPermSize=512M -XX:ReservedCodeCacheSize=256m -XX:+CMSClassUnloadingEnabled -XX:+UseCompressedOops -XX:+UseParallelGC" />
</configuration>
</facet>
</component>
@@ -17,11 +17,11 @@
</content>
<orderEntry type="inheritedJdk" />
<orderEntry type="sourceFolder" forTests="false" />
- <orderEntry type="module" module-name="compiler" />
<orderEntry type="module" module-name="library" />
+ <orderEntry type="module" module-name="reflect" />
<orderEntry type="module" module-name="actors" />
<orderEntry type="module" module-name="scalap" />
- <orderEntry type="library" name="lib" level="project" />
+ <orderEntry type="module" module-name="compiler" />
<orderEntry type="library" name="ant" level="application" />
</component>
</module>
diff --git a/src/intellij/reflect.iml.SAMPLE b/src/intellij/reflect.iml.SAMPLE
new file mode 100644
index 0000000..10973c5
--- /dev/null
+++ b/src/intellij/reflect.iml.SAMPLE
@@ -0,0 +1,25 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<module type="JAVA_MODULE" version="4">
+ <component name="FacetManager">
+ <facet type="scala" name="Scala">
+ <configuration>
+ <option name="compilerLibraryLevel" value="Project" />
+ <option name="compilerLibraryName" value="compiler-locker" />
+ <option name="compilerOptions" value="-sourcepath /Users/luc/scala/scala/src/reflect" />
+ <option name="maximumHeapSize" value="1536" />
+ <option name="vmOptions" value="-Xms1536m -Xss1m -XX:MaxPermSize=512M -XX:ReservedCodeCacheSize=256m -XX:+CMSClassUnloadingEnabled -XX:+UseCompressedOops -XX:+UseParallelGC" />
+ </configuration>
+ </facet>
+ </component>
+ <component name="NewModuleRootManager" inherit-compiler-output="true">
+ <exclude-output />
+ <content url="file://$MODULE_DIR$/../reflect">
+ <sourceFolder url="file://$MODULE_DIR$/../reflect" isTestSource="false" />
+ </content>
+ <orderEntry type="inheritedJdk" />
+ <orderEntry type="sourceFolder" forTests="false" />
+ <orderEntry type="module" module-name="library" />
+ <orderEntry type="library" name="jline" level="project" />
+ </component>
+</module>
+
diff --git a/src/intellij/scala-lang.ipr.SAMPLE b/src/intellij/scala-lang.ipr.SAMPLE
index 4d32f0e..37307c2 100644
--- a/src/intellij/scala-lang.ipr.SAMPLE
+++ b/src/intellij/scala-lang.ipr.SAMPLE
@@ -196,13 +196,19 @@
<component name="ProjectModuleManager">
<modules>
<module fileurl="file://$PROJECT_DIR$/actors.iml" filepath="$PROJECT_DIR$/actors.iml" />
+ <module fileurl="file://$PROJECT_DIR$/asm.iml" filepath="$PROJECT_DIR$/asm.iml" />
<module fileurl="file://$PROJECT_DIR$/compiler.iml" filepath="$PROJECT_DIR$/compiler.iml" />
- <module fileurl="file://$PROJECT_DIR$/dbc.iml" filepath="$PROJECT_DIR$/dbc.iml" />
+ <module fileurl="file://$PROJECT_DIR$/fjbg.iml" filepath="$PROJECT_DIR$/fjbg.iml" />
+ <module fileurl="file://$PROJECT_DIR$/forkjoin.iml" filepath="$PROJECT_DIR$/forkjoin.iml" />
<module fileurl="file://$PROJECT_DIR$/library.iml" filepath="$PROJECT_DIR$/library.iml" />
<module fileurl="file://$PROJECT_DIR$/manual.iml" filepath="$PROJECT_DIR$/manual.iml" />
+ <module fileurl="file://$PROJECT_DIR$/msil.iml" filepath="$PROJECT_DIR$/msil.iml" />
<module fileurl="file://$PROJECT_DIR$/partest.iml" filepath="$PROJECT_DIR$/partest.iml" />
+ <module fileurl="file://$PROJECT_DIR$/reflect.iml" filepath="$PROJECT_DIR$/reflect.iml" />
+ <module fileurl="file://$PROJECT_DIR$/scala.iml" filepath="$PROJECT_DIR$/scala.iml" />
<module fileurl="file://$PROJECT_DIR$/scalap.iml" filepath="$PROJECT_DIR$/scalap.iml" />
<module fileurl="file://$PROJECT_DIR$/swing.iml" filepath="$PROJECT_DIR$/swing.iml" />
+ <module fileurl="file://$PROJECT_DIR$/test.iml" filepath="$PROJECT_DIR$/test.iml" />
</modules>
</component>
<component name="ProjectResources">
@@ -211,6 +217,10 @@
<component name="ProjectRootManager" version="2" languageLevel="JDK_1_6" assert-keyword="true" jdk-15="true" project-jdk-name="1.6" project-jdk-type="JavaSDK">
<output url="file://$PROJECT_DIR$/../../out" />
</component>
+ <component name="ScalacSettings">
+ <option name="COMPILER_LIBRARY_NAME" value="compiler-locker" />
+ <option name="COMPILER_LIBRARY_LEVEL" value="Project" />
+ </component>
<component name="VcsDirectoryMappings">
<mapping directory="$PROJECT_DIR$/../.." vcs="Git" />
</component>
@@ -219,32 +229,20 @@
<CLASSES>
<root url="file://$PROJECT_DIR$/../../build/locker/classes/library" />
<root url="file://$PROJECT_DIR$/../../build/locker/classes/compiler" />
- <root url="jar://$PROJECT_DIR$/../../lib/forkjoin.jar!/" />
- <root url="jar://$PROJECT_DIR$/../../lib/fjbg.jar!/" />
- <root url="jar://$PROJECT_DIR$/../../lib/msil.jar!/" />
+ <root url="file://$PROJECT_DIR$/../../build/locker/classes/reflect" />
+ <root url="file://$PROJECT_DIR$/../../build/libs/classes/fjbg" />
+ <root url="file://$PROJECT_DIR$/../../build/asm/classes" />
</CLASSES>
<JAVADOC />
<SOURCES />
</library>
- <library name="lib">
+ <library name="jline">
<CLASSES>
<root url="jar://$PROJECT_DIR$/../../lib/jline.jar!/" />
- <root url="jar://$PROJECT_DIR$/../../lib/forkjoin.jar!/" />
- <root url="jar://$PROJECT_DIR$/../../lib/fjbg.jar!/" />
- <root url="jar://$PROJECT_DIR$/../../lib/msil.jar!/" />
</CLASSES>
<JAVADOC />
<SOURCES />
</library>
- <library name="quicklib">
- <CLASSES>
- <root url="file://$PROJECT_DIR$/../../build/quick/classes/library" />
- </CLASSES>
- <JAVADOC />
- <SOURCES>
- <root url="file://$PROJECT_DIR$/../library" />
- </SOURCES>
- </library>
</component>
</project>
diff --git a/src/intellij/scala.iml.SAMPLE b/src/intellij/scala.iml.SAMPLE
new file mode 100644
index 0000000..8ea9d0d
--- /dev/null
+++ b/src/intellij/scala.iml.SAMPLE
@@ -0,0 +1,10 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<module type="JAVA_MODULE" version="4">
+ <component name="NewModuleRootManager" inherit-compiler-output="true">
+ <exclude-output />
+ <content url="file://$MODULE_DIR$/../.." />
+ <orderEntry type="inheritedJdk" />
+ <orderEntry type="sourceFolder" forTests="false" />
+ </component>
+</module>
+
diff --git a/src/intellij/scalap.iml.SAMPLE b/src/intellij/scalap.iml.SAMPLE
index 9473864..77eea7c 100644
--- a/src/intellij/scalap.iml.SAMPLE
+++ b/src/intellij/scalap.iml.SAMPLE
@@ -6,7 +6,7 @@
<option name="compilerLibraryLevel" value="Project" />
<option name="compilerLibraryName" value="compiler-locker" />
<option name="maximumHeapSize" value="1536" />
- <option name="vmOptions" value="-Xms1536m -Xss1m -XX:MaxPermSize=128M -d32 -server -XX:+UseParallelGC" />
+ <option name="vmOptions" value="-Xms1536m -Xss1m -XX:MaxPermSize=512M -XX:ReservedCodeCacheSize=256m -XX:+CMSClassUnloadingEnabled -XX:+UseCompressedOops -XX:+UseParallelGC" />
</configuration>
</facet>
</component>
@@ -18,6 +18,7 @@
<orderEntry type="inheritedJdk" />
<orderEntry type="sourceFolder" forTests="false" />
<orderEntry type="module" module-name="library" />
+ <orderEntry type="module" module-name="reflect" />
<orderEntry type="module" module-name="compiler" />
</component>
</module>
diff --git a/src/intellij/swing.iml.SAMPLE b/src/intellij/swing.iml.SAMPLE
index e9c4d13..c97bfdf 100644
--- a/src/intellij/swing.iml.SAMPLE
+++ b/src/intellij/swing.iml.SAMPLE
@@ -6,7 +6,7 @@
<option name="compilerLibraryLevel" value="Project" />
<option name="compilerLibraryName" value="compiler-locker" />
<option name="maximumHeapSize" value="1536" />
- <option name="vmOptions" value="-Xms1536m -Xss1m -XX:MaxPermSize=128M -d32 -server -XX:+UseParallelGC" />
+ <option name="vmOptions" value="-Xms1536m -Xss1m -XX:MaxPermSize=512M -XX:ReservedCodeCacheSize=256m -XX:+CMSClassUnloadingEnabled -XX:+UseCompressedOops -XX:+UseParallelGC" />
</configuration>
</facet>
</component>
diff --git a/src/intellij/test.iml.SAMPLE b/src/intellij/test.iml.SAMPLE
new file mode 100644
index 0000000..112fec4
--- /dev/null
+++ b/src/intellij/test.iml.SAMPLE
@@ -0,0 +1,20 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<module type="JAVA_MODULE" version="4">
+ <component name="NewModuleRootManager" inherit-compiler-output="true">
+ <exclude-output />
+ <content url="file://$MODULE_DIR$/../../test" />
+ <orderEntry type="inheritedJdk" />
+ <orderEntry type="sourceFolder" forTests="false" />
+ <orderEntry type="module" module-name="library" />
+ <orderEntry type="module" module-name="reflect" />
+ <orderEntry type="module" module-name="compiler" />
+ <orderEntry type="module" module-name="actors" />
+ <orderEntry type="module" module-name="swing" />
+ <orderEntry type="module" module-name="partest" />
+ <orderEntry type="module" module-name="asm" />
+ <orderEntry type="module" module-name="fjbg" />
+ <orderEntry type="module" module-name="forkjoin" />
+ <orderEntry type="module" module-name="msil" />
+ </component>
+</module>
+
diff --git a/src/jline/TEST-NOTE.txt b/src/jline/TEST-NOTE.txt
deleted file mode 100644
index 04f5de8..0000000
--- a/src/jline/TEST-NOTE.txt
+++ /dev/null
@@ -1,4 +0,0 @@
-Apparently the jline bundled with sbt interferes with testing some changes: for instance after changing the keybindings I kept seeing failures until I realized what was happening, and bypassed sbt, e.g.
-
-% java -cp ./lib_managed/scala_2.9.0/compile/jansi-1.4.jar:./lib_managed/scala_2.9.0/test/'*':./target/scala_2.9.0/classes:./target/scala_2.9.0/test-classes:./target/scala_2.9.0/resources org.junit.runner.JUnitCore scala.tools.jline.console.EditLineTest
-
diff --git a/src/jline/build.sbt b/src/jline/build.sbt
new file mode 100644
index 0000000..4fc3bab
--- /dev/null
+++ b/src/jline/build.sbt
@@ -0,0 +1,49 @@
+seq(ProguardPlugin.proguardSettings :_*)
+
+name := "jline"
+
+organization := "org.scala-lang"
+
+version := "2.10.0-SNAPSHOT"
+
+scalaVersion := "2.9.0-1"
+
+// Only need these because of weird testing jline issues.
+retrieveManaged := true
+
+parallelExecution in Test := false
+
+libraryDependencies ++= Seq(
+ "org.fusesource.jansi" % "jansi" % "1.4",
+ "com.novocode" % "junit-interface" % "0.7" % "test->default"
+)
+
+javacOptions ++= Seq("-target", "1.5")
+
+proguardOptions ++= Seq(
+ "-dontshrink",
+ "-keep class *",
+ "-keepdirectories"
+)
+
+proguardInJars := Nil
+
+makeInJarFilter ~= { prevFilter =>
+ val jansiFilter = List(
+ "!META-INF/MANIFEST.MF",
+ "org/fusesource/hawtjni/runtime",
+ "org/fusesource/hawtjni/runtime/Callback.class",
+ "org/fusesource/hawtjni/runtime/Library.class",
+ "!org/fusesource/hawtjni/**",
+ "!META-INF/maven/org.fusesource.hawtjni",
+ "!META-INF/maven/org.fusesource.jansi",
+ "!META-INF/maven/org.fusesource.hawtjni/**",
+ "!META-INF/maven/org.fusesource.jansi/**"
+ ).mkString(",")
+ // In sbt 0.9.8 the scala-library.jar line was not necessary,
+ // but in 0.9.9 it started showing up here. Who knows.
+ file =>
+ if (file startsWith "jansi-") jansiFilter
+ else if (file == "scala-library.jar") "!**"
+ else prevFilter(file)
+}
diff --git a/src/jline/manual-test.sh b/src/jline/manual-test.sh
new file mode 100755
index 0000000..aa5131c
--- /dev/null
+++ b/src/jline/manual-test.sh
@@ -0,0 +1,8 @@
+#!/usr/bin/env bash
+#
+# Apparently the jline bundled with sbt interferes with testing some
+# changes: for instance after changing the keybindings I kept seeing
+# failures until I realized what was happening and bypassed sbt, like this.
+
+java -cp lib_managed/jar/com.novocode/junit-interface/junit-interface-0.5.jar:lib_managed/jar/junit/junit/junit-4.8.1.jar:lib_managed/jar/org.fusesource.jansi/jansi/jansi-1.4.jar:lib_managed/jar/org.scala-tools.testing/test-interface/test-interface-0.5.jar:target/scala-2.9.0.1/test-classes:target/scala-2.9.0.1/jline_2.9.0-1-2.10.0-SNAPSHOT.jar \
+org.junit.runner.JUnitCore scala.tools.jline.console.EditLineTest
diff --git a/src/jline/project/build.properties b/src/jline/project/build.properties
deleted file mode 100644
index 0c2795b..0000000
--- a/src/jline/project/build.properties
+++ /dev/null
@@ -1,8 +0,0 @@
-#Project properties
-#Wed Mar 23 21:05:24 PDT 2011
-project.organization=org.improving
-project.name=jline
-sbt.version=0.7.7
-project.version=0.99-SNAPSHOT
-build.scala.versions=2.9.0
-project.initialize=false
diff --git a/src/jline/project/build/JlineProject.scala b/src/jline/project/build/JlineProject.scala
deleted file mode 100644
index ef1b30c..0000000
--- a/src/jline/project/build/JlineProject.scala
+++ /dev/null
@@ -1,35 +0,0 @@
-import sbt._
-
-/** I'm sure much of this is done the hard way, but it's done!
- */
-class JlineProject(info: ProjectInfo) extends DefaultProject(info) with ProguardProject {
- val snapShots = "Snapshots" at "http://scala-tools.org/repo-snapshots/"
- val jansi = "org.fusesource.jansi" % "jansi" % "1.4"
- val junitInterface = "com.novocode" % "junit-interface" % "0.5" % "test->default"
-
- // val junit = "junit" % "junit" % "4.8.1" % "test"
- // lazy val jansiPath = (managedDependencyPath / "compile" ** "jansi*").get.toList.head.absolutePath
-
- override def javaCompileOptions = super.javaCompileOptions ++ javaCompileOptions("-target", "1.5")
-
- override def makeInJarFilter(file: String) = {
- if (!file.startsWith("jansi")) super.makeInJarFilter(file)
- else List(
- "!META-INF/MANIFEST.MF",
- "org/fusesource/hawtjni/runtime",
- "org/fusesource/hawtjni/runtime/Callback.class",
- "org/fusesource/hawtjni/runtime/Library.class",
- "!org/fusesource/hawtjni/**",
- "!META-INF/maven/org.fusesource.hawtjni",
- "!META-INF/maven/org.fusesource.jansi",
- "!META-INF/maven/org.fusesource.hawtjni/**",
- "!META-INF/maven/org.fusesource.jansi/**"
- ) mkString ", "
- }
-
- override def proguardOptions = List(
- "-dontshrink",
- "-keep class *",
- "-keepdirectories"
- )
-}
diff --git a/src/jline/project/plugins/Plugins.scala b/src/jline/project/plugins/Plugins.scala
deleted file mode 100644
index 8c336f0..0000000
--- a/src/jline/project/plugins/Plugins.scala
+++ /dev/null
@@ -1,5 +0,0 @@
-import sbt._
-
-class Plugins(info: ProjectInfo) extends PluginDefinition(info) {
- val proguard = "org.scala-tools.sbt" % "sbt-proguard-plugin" % "0.0.5"
-}
diff --git a/src/jline/project/plugins/build.sbt b/src/jline/project/plugins/build.sbt
new file mode 100644
index 0000000..0e0f27b
--- /dev/null
+++ b/src/jline/project/plugins/build.sbt
@@ -0,0 +1,5 @@
+resolvers += "Proguard plugin repo" at "http://siasia.github.com/maven2"
+
+libraryDependencies <<= (libraryDependencies, appConfiguration) { (deps, app) =>
+ deps :+ "com.github.siasia" %% "xsbt-proguard-plugin" % app.provider.id.version
+}
diff --git a/src/jline/project/plugins/project/build.properties b/src/jline/project/plugins/project/build.properties
deleted file mode 100644
index 7a06683..0000000
--- a/src/jline/project/plugins/project/build.properties
+++ /dev/null
@@ -1,3 +0,0 @@
-#Project properties
-#Wed May 25 15:08:22 PDT 2011
-plugin.uptodate=true
diff --git a/src/jline/src/main/java/scala/tools/jline/console/ConsoleReader.java b/src/jline/src/main/java/scala/tools/jline/console/ConsoleReader.java
index 7882fcc..9df4270 100644
--- a/src/jline/src/main/java/scala/tools/jline/console/ConsoleReader.java
+++ b/src/jline/src/main/java/scala/tools/jline/console/ConsoleReader.java
@@ -1120,6 +1120,15 @@ public class ConsoleReader
return getKeyForAction(op.code);
}
+ public void printBindings() {
+ System.out.println("printBindings(): keyBindings.length = " + keyBindings.length);
+ for (int i = 0; i < keyBindings.length; i++) {
+ if (keyBindings[i] != Operation.UNKNOWN.code) {
+ System.out.println("keyBindings[" + i + "] = " + keyBindings[i]);
+ }
+ }
+ }
+
/**
* Reads the console input and returns an array of the form [raw, key binding].
*/
diff --git a/src/jline/src/test/java/scala/tools/jline/console/ConsoleReaderTestSupport.java b/src/jline/src/test/java/scala/tools/jline/console/ConsoleReaderTestSupport.java
index 04e98e5..c19099f 100644
--- a/src/jline/src/test/java/scala/tools/jline/console/ConsoleReaderTestSupport.java
+++ b/src/jline/src/test/java/scala/tools/jline/console/ConsoleReaderTestSupport.java
@@ -65,6 +65,7 @@ public abstract class ConsoleReaderTestSupport
int action = console.getKeyForAction(logicalAction);
if (action == -1) {
+ console.printBindings();
fail("Keystroke for logical action " + logicalAction + " was not bound in the console");
}
diff --git a/src/library-aux/scala/Any.scala b/src/library-aux/scala/Any.scala
index a22a823..07a9ffa 100644
--- a/src/library-aux/scala/Any.scala
+++ b/src/library-aux/scala/Any.scala
@@ -10,11 +10,30 @@ package scala
/** Class `Any` is the root of the Scala class hierarchy. Every class in a Scala
* execution environment inherits directly or indirectly from this class.
+ *
+ * Starting with Scala 2.10 it is possible to directly extend `Any` using ''universal traits''.
+ * A ''universal trait'' is a trait that extends `Any`, only has `def`s as members, and does no initialization.
+ *
+ * The main use case for universal traits is to allow basic inheritance of methods for [[scala.AnyVal value classes]].
+ * For example,
+ *
+ * {{{
+ * trait Printable extends Any {
+ * def print(): Unit = println(this)
+ * }
+ * class Wrapper(val underlying: Int) extends AnyVal with Printable
+ *
+ * val w = new Wrapper(3)
+ * w.print()
+ * }}}
+ *
+ * See the [[http://docs.scala-lang.org/sips/pending/value-classes.html value classes guide]] for more
+ * details on the interplay of universal traits and value classes.
*/
abstract class Any {
/** Compares the receiver object (`this`) with the argument object (`that`) for equivalence.
*
- * The default implementations of this method is an [[http://en.wikipedia.org/wiki/Equivalence_relation equivalence relation]]:
+ * Any implementation of this method should be an [[http://en.wikipedia.org/wiki/Equivalence_relation equivalence relation]]:
*
* - It is reflexive: for any instance `x` of type `Any`, `x.equals(x)` should return `true`.
* - It is symmetric: for any instances `x` and `y` of type `Any`, `x.equals(y)` should return `true` if and
@@ -44,7 +63,7 @@ abstract class Any {
*
* @return the hash code value for this object.
*/
- def hashCode: Int
+ def hashCode(): Int
/** Returns a string representation of the object.
*
@@ -52,15 +71,16 @@ abstract class Any {
*
* @return a string representation of the object.
*/
- def toString: String
+ def toString(): String
/** Returns the runtime class representation of the object.
*
- * @return a class object corresponding to the static type of the receiver
+ * @return a class object corresponding to the runtime type of the receiver.
*/
def getClass(): Class[_]
/** Test two objects for equality.
+ * The expression `x == that` is equivalent to `if (x eq null) that eq null else x.equals(that)`.
*
* @param that the object to compare against this object for equality.
* @return `true` if the receiver object is equivalent to the argument; `false` otherwise.
@@ -74,15 +94,17 @@ abstract class Any {
*/
final def != (that: Any): Boolean = !(this == that)
- /** Equivalent to `x.hashCode` except for boxed numeric types.
+ /** Equivalent to `x.hashCode` except for boxed numeric types and `null`.
* For numerics, it returns a hash value which is consistent
* with value equality: if two value type instances compare
* as true, then ## will produce the same hash value for each
* of them.
+ * For `null` returns a hashcode where `null.hashCode` throws a
+ * `NullPointerException`.
*
* @return a hash value consistent with ==
*/
- final def ## : Int = sys.error("##")
+ final def ##(): Int = sys.error("##")
/** Test whether the dynamic type of the receiver object is `T0`.
*
diff --git a/src/library-aux/scala/AnyRef.scala b/src/library-aux/scala/AnyRef.scala
index 6792ba6..7d8b9f9 100644
--- a/src/library-aux/scala/AnyRef.scala
+++ b/src/library-aux/scala/AnyRef.scala
@@ -10,10 +10,13 @@ package scala
/** Class `AnyRef` is the root class of all ''reference types''.
* All types except the value types descend from this class.
+ * @template
*/
trait AnyRef extends Any {
- /** The equality method for reference types. See equals in [[scala.Any]].
+ /** The equality method for reference types. Default implementation delegates to `eq`.
+ *
+ * See also `equals` in [[scala.Any]].
*
* @param that the object to compare against this object for equality.
* @return `true` if the receiver object is equivalent to the argument; `false` otherwise.
diff --git a/src/library/rootdoc.txt b/src/library/rootdoc.txt
new file mode 100644
index 0000000..0722d80
--- /dev/null
+++ b/src/library/rootdoc.txt
@@ -0,0 +1,28 @@
+This is the documentation for the Scala standard library.
+
+== Package structure ==
+
+The [[scala]] package contains core types.
+
+[[scala.collection `scala.collection`]] and its subpackages contain a collections framework with higher-order functions for manipulation. Both [[scala.collection.immutable `scala.collection.immutable`]] and [[scala.collection.mutable `scala.collection.mutable`]] data structures are available, with immutable as the default. The [[scala.collection.parallel `scala.collection.parallel`]] collections provide automatic parallel operation.
+
+Other important packages include:
+
+ - [[scala.actors `scala.actors`]] - Concurrency framework inspired by Erlang.
+ - [[scala.io `scala.io`]] - Input and output.
+ - [[scala.math `scala.math`]] - Basic math functions and additional numeric types.
+ - [[scala.sys `scala.sys`]] - Interaction with other processes and the operating system.
+ - [[scala.util.matching `scala.util.matching`]] - Pattern matching in text using regular expressions.
+ - [[scala.util.parsing.combinator `scala.util.parsing.combinator`]] - Composable combinators for parsing.
+ - [[scala.xml `scala.xml`]] - XML parsing, manipulation, and serialization.
+
+Many other packages exist. See the complete list on the left.
+
+== Automatic imports ==
+
+Identifiers in the scala package and the [[scala.Predef `scala.Predef`]] object are always in scope by default.
+
+Some of these identifiers are type aliases provided as shortcuts to commonly used classes. For example, `List` is an alias for
+[[scala.collection.immutable.List `scala.collection.immutable.List`]].
+
+Other aliases refer to classes provided by the underlying platform. For example, on the JVM, `String` is an alias for `java.lang.String`.
diff --git a/src/library/scala/AnyVal.scala b/src/library/scala/AnyVal.scala
index 8500427..0d6ba24 100644
--- a/src/library/scala/AnyVal.scala
+++ b/src/library/scala/AnyVal.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2010, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -9,8 +9,8 @@
package scala
/** `AnyVal` is the root class of all ''value types'', which describe values
- * not implemented as objects in the underlying host system. The value classes
- * are specified in SLS 12.2.
+ * not implemented as objects in the underlying host system. Value classes
+ * are specified in Scala Language Specification, section 12.2.
*
* The standard implementation includes nine `AnyVal` subtypes:
*
@@ -21,8 +21,37 @@ package scala
*
* Other groupings:
*
- * The ''subrange types'' are [[scala.Byte]], [[scala.Short]], and [[scala.Char]].
- * The ''integer types'' include the subrange types as well as [[scala.Int]] and [[scala.Long]].
- * The ''floating point types'' are [[scala.Float]] and [[scala.Double]].
+ * - The ''subrange types'' are [[scala.Byte]], [[scala.Short]], and [[scala.Char]].
+ * - The ''integer types'' include the subrange types as well as [[scala.Int]] and [[scala.Long]].
+ * - The ''floating point types'' are [[scala.Float]] and [[scala.Double]].
+ *
+ * Prior to Scala 2.10, `AnyVal` was a sealed trait. Beginning with Scala 2.10,
+ * however, it is possible to define a subclass of `AnyVal` called a ''user-defined value class''
+ * which is treated specially by the compiler. Properly-defined user value classes provide a way
+ * to improve performance on user-defined types by avoiding object allocation at runtime, and by
+ * replacing virtual method invocations with static method invocations.
+ *
+ * User-defined value classes which avoid object allocation...
+ *
+ * - must have a single, public `val` parameter that is the underlying runtime representation.
+ * - can define `def`s, but no `val`s, `var`s, or nested `traits`s, `class`es or `object`s.
+ * - typically extend no other trait apart from `AnyVal`.
+ * - cannot be used in type tests or pattern matching.
+ * - may not override `equals` or `hashCode` methods.
+ *
+ * A minimal example:
+ * {{{
+ * class Wrapper(val underlying: Int) extends AnyVal {
+ * def foo: Wrapper = new Wrapper(underlying * 19)
+ * }
+ * }}}
+ *
+ * It's important to note that user-defined value classes are limited, and in some circumstances,
+ * still must allocate a value class instance at runtime. These limitations and circumstances are
+ * explained in greater detail in the [[http://docs.scala-lang.org/overviews/core/value-classes.html Value Classes Guide]]
+ * as well as in [[http://docs.scala-lang.org/sips/pending/value-classes.html SIP-15: Value Classes]],
+ * the Scala Improvement Proposal.
*/
-sealed trait AnyVal
+abstract class AnyVal extends Any with NotNull {
+ def getClass(): Class[_ <: AnyVal] = null
+}
diff --git a/src/library/scala/AnyValCompanion.scala b/src/library/scala/AnyValCompanion.scala
index d6cb498..302cafe 100644
--- a/src/library/scala/AnyValCompanion.scala
+++ b/src/library/scala/AnyValCompanion.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -18,4 +18,4 @@ package scala
* }}}
*
*/
-private[scala] trait AnyValCompanion extends SpecializableCompanion { }
+private[scala] trait AnyValCompanion extends Specializable { }
diff --git a/src/library/scala/App.scala b/src/library/scala/App.scala
index f42dc7d..90a8977 100644
--- a/src/library/scala/App.scala
+++ b/src/library/scala/App.scala
@@ -1,10 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2010-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
+
package scala
import scala.compat.Platform.currentTime
@@ -13,14 +14,24 @@ import scala.collection.mutable.ListBuffer
/** The `App` trait can be used to quickly turn objects
* into executable programs. Here is an example:
* {{{
- * object Main extends App {
- * Console.println("Hello World: " + (args mkString ", "))
- * }
+ * object Main extends App {
+ * Console.println("Hello World: " + (args mkString ", "))
+ * }
* }}}
* Here, object `Main` inherits the `main` method of `App`.
*
* `args` returns the current command line arguments as an array.
*
+ * ==Caveats==
+ *
+ * '''''It should be noted that this trait is implemented using the [[DelayedInit]]
+ * functionality, which means that fields of the object will not have been initialized
+ * before the main method has been executed.'''''
+ *
+ * It should also be noted that the `main` method will not normally need to be overridden:
+ * the purpose is to turn the whole class body into the “main method”. You should only
+ * chose to override it if you know what you are doing.
+ *
* @author Martin Odersky
* @version 2.1, 15/02/2011
*/
diff --git a/src/library/scala/Application.scala b/src/library/scala/Application.scala
index d3c024b..e7db0d2 100644
--- a/src/library/scala/Application.scala
+++ b/src/library/scala/Application.scala
@@ -1,68 +1,60 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2010, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
+
package scala
import scala.compat.Platform.currentTime
-/** <p>
- * The <code>Application</code> trait can be used to quickly turn objects
- * into executable programs, but is <em>not recommended</em>.
- * Here is an example:
- * </p><pre>
- * <b>object</b> Main <b>extends</b> Application {
+/** The `Application` trait can be used to quickly turn objects
+ * into executable programs, but is ''not recommended''.
+ * Here is an example:
+ * {{{
+ * object Main extends Application {
* Console.println("Hello World!")
* }
- * </pre>
- * <p>
- * Here, object <code>Main</code> inherits the <code>main</code> method
- * of <code>Application</code>. The body of the <code>Main</code> object
- * defines the main program. This technique does not work if the main
- * program depends on command-line arguments (which are not accessible
- * with the technique presented here).
- * </p>
- * <p>
- * It is possible to time the execution of objects that inherit from class
- * <code>Application</code> by setting the global <code>scala.time</code>
- * property. Here is an example for benchmarking object <code>Main</code>:
- * </p><pre>
+ * }}}
+ * Here, object `Main` inherits the `main` method of `Application`.
+ * The body of the `Main` object defines the main program. This technique
+ * does not work if the main program depends on command-line arguments
+ * (which are not accessible with the technique presented here).
+ *
+ * It is possible to time the execution of objects that inherit from class
+ * `Application` by setting the global `scala.time`
+ * property. Here is an example for benchmarking object `Main`:
+ * {{{
* java -Dscala.time Main
- * </pre>
- * <p>
- * In practice the <code>Application</code> trait has a number of serious
- * pitfalls:
- * </p>
- * <ul>
- * <li> Threaded code that references the object will block until static
- * initialization is complete. However, because the entire execution of an
- * <code>object</code> extending <code>Application</code> takes place during
- * static initialization, concurrent code will <em>always</em> deadlock if
- * it must synchronize with the enclosing object.</li>
- * <li>As described above, there is no way to obtain the
- * command-line arguments because all code in body of an <code>object</code>
- * extending <code>Application</code> is run as part of the static initialization
- * which occurs before <code>Application</code>'s <code>main</code> method
- * even begins execution.</li>
- * <li>Static initializers are run only once during program execution, and
+ * }}}
+ * In practice the `Application` trait has a number of serious pitfalls:
+ *
+ * - Threaded code that references the object will block until static
+ * initialization is complete. However, because the entire execution
+ * of an `object` extending `Application` takes place during
+ * static initialization, concurrent code will ''always'' deadlock if
+ * it must synchronize with the enclosing object.
+ * - As described above, there is no way to obtain the
+ * command-line arguments because all code in body of an `object`
+ * extending `Application` is run as part of the static initialization
+ * which occurs before `Application`'s `main` method
+ * even begins execution.
+ * - Static initializers are run only once during program execution, and
* JVM authors usually assume their execution to be relatively short.
- * Therefore, certain JVM configurations may become confused, or simply fail to
- * optimize or JIT the code in the body of an <code>object</code> extending
- * <code>Application</code>. This can lead to a significant
- * performance degradation.</li>
- * </ul>
+ * Therefore, certain JVM configurations may become confused, or simply
+ * fail to optimize or JIT the code in the body of an `object` extending
+ * `Application`. This can lead to a significant performance degradation.
*
- * It is recommended to use the `App` trait instead.
- * <pre>
- * <b>object</b> Main {
- * <b>def</b> main(args: Array[String]) {
+ * It is recommended to use the [[scala.App]] trait instead.
+ * {{{
+ * object Main {
+ * def main(args: Array[String]) {
* //..
* }
* }
- * </pre>
+ * }}}
*
* @author Matthias Zenger
* @version 1.0, 10/09/2003
@@ -70,8 +62,8 @@ import scala.compat.Platform.currentTime
@deprecated("use App instead", "2.9.0")
trait Application {
- /** The time when the execution of this program started, in milliseconds since 1
- * January 1970 UTC. */
+ /** The time when the execution of this program started,
+ * in milliseconds since 1 January 1970 UTC. */
val executionStart: Long = currentTime
/** The default main method.
@@ -79,7 +71,7 @@ trait Application {
* @param args the arguments passed to the main method
*/
def main(args: Array[String]) {
- if (util.Properties.propIsSet("scala.time")) {
+ if (util.Properties propIsSet "scala.time") {
val total = currentTime - executionStart
Console.println("[total " + total + "ms]")
}
diff --git a/src/library/scala/Array.scala b/src/library/scala/Array.scala
index ba7bef0..b9f5180 100644
--- a/src/library/scala/Array.scala
+++ b/src/library/scala/Array.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -11,21 +11,21 @@ package scala
import scala.collection.generic._
import scala.collection.{ mutable, immutable }
import mutable.{ ArrayBuilder, ArraySeq }
-import compat.Platform.arraycopy
-import scala.reflect.ClassManifest
+import scala.compat.Platform.arraycopy
+import scala.reflect.ClassTag
import scala.runtime.ScalaRunTime.{ array_apply, array_update }
/** Contains a fallback builder for arrays when the element type
- * does not have a class manifest. In that case a generic array is built.
+ * does not have a class tag. In that case a generic array is built.
*/
class FallbackArrayBuilding {
/** A builder factory that generates a generic array.
- * Called instead of Array.newBuilder if the element type of an array
- * does not have a class manifest. Note that fallbackBuilder factory
- * needs an implicit parameter (otherwise it would not be dominated in implicit search
- * by Array.canBuildFrom). We make sure that that implicit search is always
- * successful.
+ * Called instead of `Array.newBuilder` if the element type of an array
+ * does not have a class tag. Note that fallbackBuilder factory
+ * needs an implicit parameter (otherwise it would not be dominated in
+ * implicit search by `Array.canBuildFrom`). We make sure that
+ * implicit search is always successful.
*/
implicit def fallbackCanBuildFrom[T](implicit m: DummyImplicit): CanBuildFrom[Array[_], T, ArraySeq[T]] =
new CanBuildFrom[Array[_], T, ArraySeq[T]] {
@@ -35,21 +35,39 @@ class FallbackArrayBuilding {
}
/** Utility methods for operating on arrays.
+ * For example:
+ * {{{
+ * val a = Array(1, 2)
+ * val b = Array.ofDim[Int](2)
+ * val c = Array.concat(a, b)
+ * }}}
+ * where the array objects `a`, `b` and `c` have respectively the values
+ * `Array(1, 2)`, `Array(0, 0)` and `Array(1, 2, 0, 0)`.
*
* @author Martin Odersky
* @version 1.0
*/
object Array extends FallbackArrayBuilding {
- implicit def canBuildFrom[T](implicit m: ClassManifest[T]): CanBuildFrom[Array[_], T, Array[T]] =
+ val emptyBooleanArray = new Array[Boolean](0)
+ val emptyByteArray = new Array[Byte](0)
+ val emptyCharArray = new Array[Char](0)
+ val emptyDoubleArray = new Array[Double](0)
+ val emptyFloatArray = new Array[Float](0)
+ val emptyIntArray = new Array[Int](0)
+ val emptyLongArray = new Array[Long](0)
+ val emptyShortArray = new Array[Short](0)
+ val emptyObjectArray = new Array[Object](0)
+
+ implicit def canBuildFrom[T](implicit t: ClassTag[T]): CanBuildFrom[Array[_], T, Array[T]] =
new CanBuildFrom[Array[_], T, Array[T]] {
- def apply(from: Array[_]) = ArrayBuilder.make[T]()(m)
- def apply() = ArrayBuilder.make[T]()(m)
+ def apply(from: Array[_]) = ArrayBuilder.make[T]()(t)
+ def apply() = ArrayBuilder.make[T]()(t)
}
/**
* Returns a new [[scala.collection.mutable.ArrayBuilder]].
*/
- def newBuilder[T](implicit m: ClassManifest[T]): ArrayBuilder[T] = ArrayBuilder.make[T]()(m)
+ def newBuilder[T](implicit t: ClassTag[T]): ArrayBuilder[T] = ArrayBuilder.make[T]()(t)
private def slowcopy(src : AnyRef,
srcPos : Int,
@@ -90,14 +108,16 @@ object Array extends FallbackArrayBuilding {
}
/** Returns an array of length 0 */
- def empty[T: ClassManifest]: Array[T] = new Array[T](0)
+ def empty[T: ClassTag]: Array[T] = new Array[T](0)
/** Creates an array with given elements.
*
* @param xs the elements to put in the array
* @return an array containing all elements from xs.
*/
- def apply[T: ClassManifest](xs: T*): Array[T] = {
+ // Subject to a compiler optimization in Cleanup.
+ // Array(e0, ..., en) is translated to { val a = new Array(3); a(i) = ei; a }
+ def apply[T: ClassTag](xs: T*): Array[T] = {
val array = new Array[T](xs.length)
var i = 0
for (x <- xs.iterator) { array(i) = x; i += 1 }
@@ -105,6 +125,7 @@ object Array extends FallbackArrayBuilding {
}
/** Creates an array of `Boolean` objects */
+ // Subject to a compiler optimization in Cleanup, see above.
def apply(x: Boolean, xs: Boolean*): Array[Boolean] = {
val array = new Array[Boolean](xs.length + 1)
array(0) = x
@@ -114,6 +135,7 @@ object Array extends FallbackArrayBuilding {
}
/** Creates an array of `Byte` objects */
+ // Subject to a compiler optimization in Cleanup, see above.
def apply(x: Byte, xs: Byte*): Array[Byte] = {
val array = new Array[Byte](xs.length + 1)
array(0) = x
@@ -123,6 +145,7 @@ object Array extends FallbackArrayBuilding {
}
/** Creates an array of `Short` objects */
+ // Subject to a compiler optimization in Cleanup, see above.
def apply(x: Short, xs: Short*): Array[Short] = {
val array = new Array[Short](xs.length + 1)
array(0) = x
@@ -132,6 +155,7 @@ object Array extends FallbackArrayBuilding {
}
/** Creates an array of `Char` objects */
+ // Subject to a compiler optimization in Cleanup, see above.
def apply(x: Char, xs: Char*): Array[Char] = {
val array = new Array[Char](xs.length + 1)
array(0) = x
@@ -141,6 +165,7 @@ object Array extends FallbackArrayBuilding {
}
/** Creates an array of `Int` objects */
+ // Subject to a compiler optimization in Cleanup, see above.
def apply(x: Int, xs: Int*): Array[Int] = {
val array = new Array[Int](xs.length + 1)
array(0) = x
@@ -150,6 +175,7 @@ object Array extends FallbackArrayBuilding {
}
/** Creates an array of `Long` objects */
+ // Subject to a compiler optimization in Cleanup, see above.
def apply(x: Long, xs: Long*): Array[Long] = {
val array = new Array[Long](xs.length + 1)
array(0) = x
@@ -159,6 +185,7 @@ object Array extends FallbackArrayBuilding {
}
/** Creates an array of `Float` objects */
+ // Subject to a compiler optimization in Cleanup, see above.
def apply(x: Float, xs: Float*): Array[Float] = {
val array = new Array[Float](xs.length + 1)
array(0) = x
@@ -168,6 +195,7 @@ object Array extends FallbackArrayBuilding {
}
/** Creates an array of `Double` objects */
+ // Subject to a compiler optimization in Cleanup, see above.
def apply(x: Double, xs: Double*): Array[Double] = {
val array = new Array[Double](xs.length + 1)
array(0) = x
@@ -186,23 +214,23 @@ object Array extends FallbackArrayBuilding {
}
/** Creates array with given dimensions */
- def ofDim[T: ClassManifest](n1: Int): Array[T] =
+ def ofDim[T: ClassTag](n1: Int): Array[T] =
new Array[T](n1)
/** Creates a 2-dimensional array */
- def ofDim[T: ClassManifest](n1: Int, n2: Int): Array[Array[T]] = {
+ def ofDim[T: ClassTag](n1: Int, n2: Int): Array[Array[T]] = {
val arr: Array[Array[T]] = (new Array[Array[T]](n1): Array[Array[T]])
for (i <- 0 until n1) arr(i) = new Array[T](n2)
arr
// tabulate(n1)(_ => ofDim[T](n2))
}
/** Creates a 3-dimensional array */
- def ofDim[T: ClassManifest](n1: Int, n2: Int, n3: Int): Array[Array[Array[T]]] =
+ def ofDim[T: ClassTag](n1: Int, n2: Int, n3: Int): Array[Array[Array[T]]] =
tabulate(n1)(_ => ofDim[T](n2, n3))
/** Creates a 4-dimensional array */
- def ofDim[T: ClassManifest](n1: Int, n2: Int, n3: Int, n4: Int): Array[Array[Array[Array[T]]]] =
+ def ofDim[T: ClassTag](n1: Int, n2: Int, n3: Int, n4: Int): Array[Array[Array[Array[T]]]] =
tabulate(n1)(_ => ofDim[T](n2, n3, n4))
/** Creates a 5-dimensional array */
- def ofDim[T: ClassManifest](n1: Int, n2: Int, n3: Int, n4: Int, n5: Int): Array[Array[Array[Array[Array[T]]]]] =
+ def ofDim[T: ClassTag](n1: Int, n2: Int, n3: Int, n4: Int, n5: Int): Array[Array[Array[Array[Array[T]]]]] =
tabulate(n1)(_ => ofDim[T](n2, n3, n4, n5))
/** Concatenates all arrays into a single array.
@@ -210,7 +238,7 @@ object Array extends FallbackArrayBuilding {
* @param xss the given arrays
* @return the array created from concatenating `xss`
*/
- def concat[T: ClassManifest](xss: Array[T]*): Array[T] = {
+ def concat[T: ClassTag](xss: Array[T]*): Array[T] = {
val b = newBuilder[T]
b.sizeHint(xss.map(_.size).sum)
for (xs <- xss) b ++= xs
@@ -222,7 +250,7 @@ object Array extends FallbackArrayBuilding {
*
* Note that this means that `elem` is computed a total of n times:
* {{{
- * scala> Array.fill(3){ java.lang.Math.random }
+ * scala> Array.fill(3){ math.random }
* res3: Array[Double] = Array(0.365461167592537, 1.550395944913685E-4, 0.7907242137333306)
* }}}
*
@@ -231,7 +259,7 @@ object Array extends FallbackArrayBuilding {
* @return an Array of size n, where each element contains the result of computing
* `elem`.
*/
- def fill[T: ClassManifest](n: Int)(elem: => T): Array[T] = {
+ def fill[T: ClassTag](n: Int)(elem: => T): Array[T] = {
val b = newBuilder[T]
b.sizeHint(n)
var i = 0
@@ -249,7 +277,7 @@ object Array extends FallbackArrayBuilding {
* @param n2 the number of elements in the 2nd dimension
* @param elem the element computation
*/
- def fill[T: ClassManifest](n1: Int, n2: Int)(elem: => T): Array[Array[T]] =
+ def fill[T: ClassTag](n1: Int, n2: Int)(elem: => T): Array[Array[T]] =
tabulate(n1)(_ => fill(n2)(elem))
/** Returns a three-dimensional array that contains the results of some element
@@ -260,7 +288,7 @@ object Array extends FallbackArrayBuilding {
* @param n3 the number of elements in the 3nd dimension
* @param elem the element computation
*/
- def fill[T: ClassManifest](n1: Int, n2: Int, n3: Int)(elem: => T): Array[Array[Array[T]]] =
+ def fill[T: ClassTag](n1: Int, n2: Int, n3: Int)(elem: => T): Array[Array[Array[T]]] =
tabulate(n1)(_ => fill(n2, n3)(elem))
/** Returns a four-dimensional array that contains the results of some element
@@ -272,7 +300,7 @@ object Array extends FallbackArrayBuilding {
* @param n4 the number of elements in the 4th dimension
* @param elem the element computation
*/
- def fill[T: ClassManifest](n1: Int, n2: Int, n3: Int, n4: Int)(elem: => T): Array[Array[Array[Array[T]]]] =
+ def fill[T: ClassTag](n1: Int, n2: Int, n3: Int, n4: Int)(elem: => T): Array[Array[Array[Array[T]]]] =
tabulate(n1)(_ => fill(n2, n3, n4)(elem))
/** Returns a five-dimensional array that contains the results of some element
@@ -285,7 +313,7 @@ object Array extends FallbackArrayBuilding {
* @param n5 the number of elements in the 5th dimension
* @param elem the element computation
*/
- def fill[T: ClassManifest](n1: Int, n2: Int, n3: Int, n4: Int, n5: Int)(elem: => T): Array[Array[Array[Array[Array[T]]]]] =
+ def fill[T: ClassTag](n1: Int, n2: Int, n3: Int, n4: Int, n5: Int)(elem: => T): Array[Array[Array[Array[Array[T]]]]] =
tabulate(n1)(_ => fill(n2, n3, n4, n5)(elem))
/** Returns an array containing values of a given function over a range of integer
@@ -295,7 +323,7 @@ object Array extends FallbackArrayBuilding {
* @param f The function computing element values
* @return A traversable consisting of elements `f(0),f(1), ..., f(n - 1)`
*/
- def tabulate[T: ClassManifest](n: Int)(f: Int => T): Array[T] = {
+ def tabulate[T: ClassTag](n: Int)(f: Int => T): Array[T] = {
val b = newBuilder[T]
b.sizeHint(n)
var i = 0
@@ -306,56 +334,56 @@ object Array extends FallbackArrayBuilding {
b.result
}
- /** Returns a two-dimensional array containing values of a given function over
- * ranges of integer values starting from 0.
+ /** Returns a two-dimensional array containing values of a given function
+ * over ranges of integer values starting from `0`.
*
* @param n1 the number of elements in the 1st dimension
* @param n2 the number of elements in the 2nd dimension
* @param f The function computing element values
*/
- def tabulate[T: ClassManifest](n1: Int, n2: Int)(f: (Int, Int) => T): Array[Array[T]] =
+ def tabulate[T: ClassTag](n1: Int, n2: Int)(f: (Int, Int) => T): Array[Array[T]] =
tabulate(n1)(i1 => tabulate(n2)(f(i1, _)))
- /** Returns a three-dimensional array containing values of a given function over
- * ranges of integer values starting from 0.
+ /** Returns a three-dimensional array containing values of a given function
+ * over ranges of integer values starting from `0`.
*
* @param n1 the number of elements in the 1st dimension
* @param n2 the number of elements in the 2nd dimension
- * @param n3 the number of elements in the 3nd dimension
+ * @param n3 the number of elements in the 3rd dimension
* @param f The function computing element values
*/
- def tabulate[T: ClassManifest](n1: Int, n2: Int, n3: Int)(f: (Int, Int, Int) => T): Array[Array[Array[T]]] =
+ def tabulate[T: ClassTag](n1: Int, n2: Int, n3: Int)(f: (Int, Int, Int) => T): Array[Array[Array[T]]] =
tabulate(n1)(i1 => tabulate(n2, n3)(f(i1, _, _)))
- /** Returns a four-dimensional array containing values of a given function over
- * ranges of integer values starting from 0.
+ /** Returns a four-dimensional array containing values of a given function
+ * over ranges of integer values starting from `0`.
*
* @param n1 the number of elements in the 1st dimension
* @param n2 the number of elements in the 2nd dimension
- * @param n3 the number of elements in the 3nd dimension
+ * @param n3 the number of elements in the 3rd dimension
* @param n4 the number of elements in the 4th dimension
* @param f The function computing element values
*/
- def tabulate[T: ClassManifest](n1: Int, n2: Int, n3: Int, n4: Int)(f: (Int, Int, Int, Int) => T): Array[Array[Array[Array[T]]]] =
+ def tabulate[T: ClassTag](n1: Int, n2: Int, n3: Int, n4: Int)(f: (Int, Int, Int, Int) => T): Array[Array[Array[Array[T]]]] =
tabulate(n1)(i1 => tabulate(n2, n3, n4)(f(i1, _, _, _)))
- /** Returns a five-dimensional array containing values of a given function over
- * ranges of integer values starting from 0.
+ /** Returns a five-dimensional array containing values of a given function
+ * over ranges of integer values starting from `0`.
*
* @param n1 the number of elements in the 1st dimension
* @param n2 the number of elements in the 2nd dimension
- * @param n3 the number of elements in the 3nd dimension
+ * @param n3 the number of elements in the 3rd dimension
* @param n4 the number of elements in the 4th dimension
* @param n5 the number of elements in the 5th dimension
* @param f The function computing element values
*/
- def tabulate[T: ClassManifest](n1: Int, n2: Int, n3: Int, n4: Int, n5: Int)(f: (Int, Int, Int, Int, Int) => T): Array[Array[Array[Array[Array[T]]]]] =
+ def tabulate[T: ClassTag](n1: Int, n2: Int, n3: Int, n4: Int, n5: Int)(f: (Int, Int, Int, Int, Int) => T): Array[Array[Array[Array[Array[T]]]]] =
tabulate(n1)(i1 => tabulate(n2, n3, n4, n5)(f(i1, _, _, _, _)))
/** Returns an array containing a sequence of increasing integers in a range.
*
- * @param from the start value of the array
- * @param end the end value of the array, exclusive (in other words, this is the first value '''not''' returned)
+ * @param start the start value of the array
+ * @param end the end value of the array, exclusive (in other words, this is the first value '''not''' returned)
* @return the array with values in range `start, start + 1, ..., end - 1`
* up to, but excluding, `end`.
*/
@@ -388,7 +416,7 @@ object Array extends FallbackArrayBuilding {
* @param f the function that is repeatedly applied
* @return the array returning `len` values in the sequence `start, f(start), f(f(start)), ...`
*/
- def iterate[T: ClassManifest](start: T, len: Int)(f: T => T): Array[T] = {
+ def iterate[T: ClassTag](start: T, len: Int)(f: T => T): Array[T] = {
val b = newBuilder[T]
if (len > 0) {
@@ -409,71 +437,12 @@ object Array extends FallbackArrayBuilding {
/** Called in a pattern match like `{ case Array(x,y,z) => println('3 elements')}`.
*
* @param x the selector value
- * @return sequence wrapped in a [[scala.Some]], if x is a Seq, otherwise `None`
+ * @return sequence wrapped in a [[scala.Some]], if `x` is a Seq, otherwise `None`
*/
def unapplySeq[T](x: Array[T]): Option[IndexedSeq[T]] =
if (x == null) None else Some(x.toIndexedSeq)
// !!! the null check should to be necessary, but without it 2241 fails. Seems to be a bug
// in pattern matcher. @PP: I noted in #4364 I think the behavior is correct.
-
- /** Creates an array containing several copies of an element.
- *
- * @param n the length of the resulting array
- * @param elem the element composing the resulting array
- * @return an array composed of n elements all equal to elem
- */
- @deprecated("use `Array.fill' instead", "2.8.0")
- def make[T: ClassManifest](n: Int, elem: T): Array[T] = {
- val a = new Array[T](n)
- var i = 0
- while (i < n) {
- a(i) = elem
- i += 1
- }
- a
- }
-
- /** Creates an array containing the values of a given function `f`
- * over given range `[0..n)`
- */
- @deprecated("use `Array.tabulate' instead", "2.8.0")
- def fromFunction[T: ClassManifest](f: Int => T)(n: Int): Array[T] = {
- val a = new Array[T](n)
- var i = 0
- while (i < n) {
- a(i) = f(i)
- i += 1
- }
- a
- }
-
- /** Creates an array containing the values of a given function `f`
- * over given range `[0..n1, 0..n2)`
- */
- @deprecated("use `Array.tabulate' instead", "2.8.0")
- def fromFunction[T: ClassManifest](f: (Int, Int) => T)(n1: Int, n2: Int): Array[Array[T]] =
- fromFunction(i => fromFunction(f(i, _))(n2))(n1)
-
- /** Creates an array containing the values of a given function `f`
- * over given range `[0..n1, 0..n2, 0..n3)`
- */
- @deprecated("use `Array.tabulate' instead", "2.8.0")
- def fromFunction[T: ClassManifest](f: (Int, Int, Int) => T)(n1: Int, n2: Int, n3: Int): Array[Array[Array[T]]] =
- fromFunction(i => fromFunction(f(i, _, _))(n2, n3))(n1)
-
- /** Creates an array containing the values of a given function `f`
- * over given range `[0..n1, 0..n2, 0..n3, 0..n4)`
- */
- @deprecated("use `Array.tabulate' instead", "2.8.0")
- def fromFunction[T: ClassManifest](f: (Int, Int, Int, Int) => T)(n1: Int, n2: Int, n3: Int, n4: Int): Array[Array[Array[Array[T]]]] =
- fromFunction(i => fromFunction(f(i, _, _, _))(n2, n3, n4))(n1)
-
- /** Creates an array containing the values of a given function `f`
- * over given range `[0..n1, 0..n2, 0..n3, 0..n4, 0..n5)`
- */
- @deprecated("use `Array.tabulate' instead", "2.8.0")
- def fromFunction[T: ClassManifest](f: (Int, Int, Int, Int, Int) => T)(n1: Int, n2: Int, n3: Int, n4: Int, n5: Int): Array[Array[Array[Array[Array[T]]]]] =
- fromFunction(i => fromFunction(f(i, _, _, _, _))(n2, n3, n4, n5))(n1)
}
/** Arrays are mutable, indexed collections of values. `Array[T]` is Scala's representation
@@ -489,13 +458,11 @@ object Array extends FallbackArrayBuilding {
* Arrays make use of two common pieces of Scala syntactic sugar, shown on lines 2 and 3 of the above
* example code.
* Line 2 is translated into a call to `apply(Int)`, while line 3 is translated into a call to
- * `update(Int, T)`. For more information on these transformations, see the
- * [[http://www.scala-lang.org/docu/files/ScalaReference.pdf Scala Language Specification v2.8]], Sections
- * 6.6 and 6.15 respectively.
+ * `update(Int, T)`.
*
* Two implicit conversions exist in [[scala.Predef]] that are frequently applied to arrays: a conversion
* to [[scala.collection.mutable.ArrayOps]] (shown on line 4 of the example above) and a conversion
- * to [[scala.collection.mutable.WrappedArray]] (a subtype of [[scala.collections.Seq]]).
+ * to [[scala.collection.mutable.WrappedArray]] (a subtype of [[scala.collection.Seq]]).
* Both types make available many of the standard operations found in the Scala collections API.
* The conversion to `ArrayOps` is temporary, as all operations defined on `ArrayOps` return an `Array`,
* while the conversion to `WrappedArray` is permanent as all operations return a `WrappedArray`.
@@ -516,67 +483,25 @@ object Array extends FallbackArrayBuilding {
*
* @author Martin Odersky
* @version 1.0
- * @see [[http://www.scala-lang.org/docu/files/collections-api/collections_38.html#anchor "The Scala 2.8 Collections' API"]]
- * section on `Array` by Martin Odersky for more information.
+ * @see [[http://www.scala-lang.org/docu/files/ScalaReference.pdf Scala Language Specification]], for in-depth information on the transformations the Scala compiler makes on Arrays (Sections 6.6 and 6.15 respectively.)
+ * @see [[http://docs.scala-lang.org/sips/completed/scala-2-8-arrays.html "Scala 2.8 Arrays"]] the Scala Improvement Document detailing arrays since Scala 2.8.
+ * @see [[http://docs.scala-lang.org/overviews/collections/arrays.html "The Scala 2.8 Collections' API"]] section on `Array` by Martin Odersky for more information.
+ * @define coll array
+ * @define Coll `Array`
+ * @define orderDependent
+ * @define orderDependentFold
+ * @define mayNotTerminateInf
+ * @define willNotTerminateInf
+ * @define collectExample
+ * @define undefinedorder
+ * @define thatinfo the class of the returned collection. In the standard library configuration,
+ * `That` is either `Array[B]` if an ClassTag is available for B or `ArraySeq[B]` otherwise.
+ * @define zipthatinfo $thatinfo
+ * @define bfinfo an implicit value of class `CanBuildFrom` which determines the result class `That` from the current
+ * representation type `Repr` and the new element type `B`.
*/
final class Array[T](_length: Int) extends java.io.Serializable with java.lang.Cloneable {
- /** Multidimensional array creation */
- @deprecated("use `Array.ofDim' instead", "2.8.0")
- def this(dim1: Int, dim2: Int) = {
- this(dim1)
- throw new Error()
- }
-
- /** Multidimensional array creation */
- @deprecated("use `Array.ofDim' instead", "2.8.0")
- def this(dim1: Int, dim2: Int, dim3: Int) = {
- this(dim1)
- throw new Error()
- }
-
- /** Multidimensional array creation */
- @deprecated("use `Array.ofDim' instead", "2.8.0")
- def this(dim1: Int, dim2: Int, dim3: Int, dim4: Int) = {
- this(dim1)
- throw new Error()
- }
-
- /** Multidimensional array creation */
- @deprecated("use `Array.ofDim' instead", "2.8.0")
- def this(dim1: Int, dim2: Int, dim3: Int, dim4: Int, dim5: Int) = {
- this(dim1);
- throw new Error()
- }
-
- /** Multidimensional array creation */
- @deprecated("use `Array.ofDim' instead", "2.8.0")
- def this(dim1: Int, dim2: Int, dim3: Int, dim4: Int, dim5: Int, dim6: Int) = {
- this(dim1)
- throw new Error()
- }
-
- /** Multidimensional array creation */
- @deprecated("use `Array.ofDim' instead", "2.8.0")
- def this(dim1: Int, dim2: Int, dim3: Int, dim4: Int, dim5: Int, dim6: Int, dim7: Int) = {
- this(dim1)
- throw new Error()
- }
-
- /** Multidimensional array creation */
- @deprecated("use `Array.ofDim' instead", "2.8.0")
- def this(dim1: Int, dim2: Int, dim3: Int, dim4: Int, dim5: Int, dim6: Int, dim7: Int, dim8: Int) = {
- this(dim1)
- throw new Error()
- }
-
- /** Multidimensional array creation */
- @deprecated("use `Array.ofDim' instead", "2.8.0")
- def this(dim1: Int, dim2: Int, dim3: Int, dim4: Int, dim5: Int, dim6: Int, dim7: Int, dim8: Int, dim9: Int) = {
- this(dim1)
- throw new Error()
- }
-
/** The length of the array */
def length: Int = throw new Error()
@@ -606,5 +531,5 @@ final class Array[T](_length: Int) extends java.io.Serializable with java.lang.C
*
* @return A clone of the Array.
*/
- override def clone: Array[T] = throw new Error()
+ override def clone(): Array[T] = throw new Error()
}
diff --git a/src/library/scala/Boolean.scala b/src/library/scala/Boolean.scala
index f77bdd2..440e546 100644
--- a/src/library/scala/Boolean.scala
+++ b/src/library/scala/Boolean.scala
@@ -10,27 +10,106 @@
package scala
-/** `Boolean` is a member of the value classes, those whose instances are
- * not represented as objects by the underlying host system.
+import scala.language.implicitConversions
+
+/** `Boolean` (equivalent to Java's `boolean` primitive type) is a
+ * subtype of [[scala.AnyVal]]. Instances of `Boolean` are not
+ * represented by an object in the underlying runtime system.
*
* There is an implicit conversion from [[scala.Boolean]] => [[scala.runtime.RichBoolean]]
* which provides useful non-primitive operations.
*/
-final class Boolean extends AnyVal {
- def unary_! : Boolean = sys.error("stub")
+final abstract class Boolean private extends AnyVal {
+ /**
+ * Negates a Boolean expression.
+ *
+ * - `!a` results in `false` if and only if `a` evaluates to `true` and
+ * - `!a` results in `true` if and only if `a` evaluates to `false`.
+ *
+ * @return the negated expression
+ */
+ def unary_! : Boolean
+
+ /**
+ * Compares two Boolean expressions and returns `true` if they evaluate to the same value.
+ *
+ * `a == b` returns `true` if and only if
+ * - `a` and `b` are `true` or
+ * - `a` and `b` are `false`.
+ */
+ def ==(x: Boolean): Boolean
+
+ /**
+ * Compares two Boolean expressions and returns `true` if they evaluate to a different value.
+ *
+ * `a != b` returns `true` if and only if
+ * - `a` is `true` and `b` is `false` or
+ * - `a` is `false` and `b` is `true`.
+ */
+ def !=(x: Boolean): Boolean
+
+ /**
+ * Compares two Boolean expressions and returns `true` if one or both of them evaluate to true.
+ *
+ * `a || b` returns `true` if and only if
+ * - `a` is `true` or
+ * - `b` is `true` or
+ * - `a` and `b` are `true`.
+ *
+ * @note This method uses 'short-circuit' evaluation and
+ * behaves as if it was declared as `def ||(x: => Boolean): Boolean`.
+ * If `a` evaluates to `true`, `true` is returned without evaluating `b`.
+ */
+ def ||(x: Boolean): Boolean
+
+ /**
+ * Compares two Boolean expressions and returns `true` if both of them evaluate to true.
+ *
+ * `a && b` returns `true` if and only if
+ * - `a` and `b` are `true`.
+ *
+ * @note This method uses 'short-circuit' evaluation and
+ * behaves as if it was declared as `def &&(x: => Boolean): Boolean`.
+ * If `a` evaluates to `false`, `false` is returned without evaluating `b`.
+ */
+ def &&(x: Boolean): Boolean
- def ==(x: Boolean): Boolean = sys.error("stub")
- def !=(x: Boolean): Boolean = sys.error("stub")
- def ||(x: Boolean): Boolean = sys.error("stub")
- def &&(x: Boolean): Boolean = sys.error("stub")
// Compiler won't build with these seemingly more accurate signatures
- // def ||(x: => Boolean): Boolean = sys.error("stub")
- // def &&(x: => Boolean): Boolean = sys.error("stub")
- def |(x: Boolean): Boolean = sys.error("stub")
- def &(x: Boolean): Boolean = sys.error("stub")
- def ^(x: Boolean): Boolean = sys.error("stub")
+ // def ||(x: => Boolean): Boolean
+ // def &&(x: => Boolean): Boolean
+
+ /**
+ * Compares two Boolean expressions and returns `true` if one or both of them evaluate to true.
+ *
+ * `a | b` returns `true` if and only if
+ * - `a` is `true` or
+ * - `b` is `true` or
+ * - `a` and `b` are `true`.
+ *
+ * @note This method evaluates both `a` and `b`, even if the result is already determined after evaluating `a`.
+ */
+ def |(x: Boolean): Boolean
- def getClass(): Class[Boolean] = sys.error("stub")
+ /**
+ * Compares two Boolean expressions and returns `true` if both of them evaluate to true.
+ *
+ * `a & b` returns `true` if and only if
+ * - `a` and `b` are `true`.
+ *
+ * @note This method evaluates both `a` and `b`, even if the result is already determined after evaluating `a`.
+ */
+ def &(x: Boolean): Boolean
+
+ /**
+ * Compares two Boolean expressions and returns `true` if they evaluate to a different value.
+ *
+ * `a ^ b` returns `true` if and only if
+ * - `a` is `true` and `b` is `false` or
+ * - `a` is `false` and `b` is `true`.
+ */
+ def ^(x: Boolean): Boolean
+
+ override def getClass(): Class[Boolean] = null
}
object Boolean extends AnyValCompanion {
@@ -55,5 +134,6 @@ object Boolean extends AnyValCompanion {
/** The String representation of the scala.Boolean companion object.
*/
override def toString = "object scala.Boolean"
+
}
diff --git a/src/library/scala/Byte.scala b/src/library/scala/Byte.scala
index 8c598e0..df0d2c7 100644
--- a/src/library/scala/Byte.scala
+++ b/src/library/scala/Byte.scala
@@ -10,141 +10,589 @@
package scala
-/** `Byte` is a member of the value classes, those whose instances are
- * not represented as objects by the underlying host system.
+import scala.language.implicitConversions
+
+/** `Byte`, a 8-bit signed integer (equivalent to Java's `byte` primitive type) is a
+ * subtype of [[scala.AnyVal]]. Instances of `Byte` are not
+ * represented by an object in the underlying runtime system.
*
* There is an implicit conversion from [[scala.Byte]] => [[scala.runtime.RichByte]]
* which provides useful non-primitive operations.
*/
-final class Byte extends AnyVal {
- def toByte: Byte = sys.error("stub")
- def toShort: Short = sys.error("stub")
- def toChar: Char = sys.error("stub")
- def toInt: Int = sys.error("stub")
- def toLong: Long = sys.error("stub")
- def toFloat: Float = sys.error("stub")
- def toDouble: Double = sys.error("stub")
-
- def unary_+ : Int = sys.error("stub")
- def unary_- : Int = sys.error("stub")
- def unary_~ : Int = sys.error("stub")
-
- def +(x: String): String = sys.error("stub")
-
- def <<(x: Int): Int = sys.error("stub")
- def <<(x: Long): Int = sys.error("stub")
- def >>>(x: Int): Int = sys.error("stub")
- def >>>(x: Long): Int = sys.error("stub")
- def >>(x: Int): Int = sys.error("stub")
- def >>(x: Long): Int = sys.error("stub")
-
- def ==(x: Byte): Boolean = sys.error("stub")
- def ==(x: Short): Boolean = sys.error("stub")
- def ==(x: Char): Boolean = sys.error("stub")
- def ==(x: Int): Boolean = sys.error("stub")
- def ==(x: Long): Boolean = sys.error("stub")
- def ==(x: Float): Boolean = sys.error("stub")
- def ==(x: Double): Boolean = sys.error("stub")
-
- def !=(x: Byte): Boolean = sys.error("stub")
- def !=(x: Short): Boolean = sys.error("stub")
- def !=(x: Char): Boolean = sys.error("stub")
- def !=(x: Int): Boolean = sys.error("stub")
- def !=(x: Long): Boolean = sys.error("stub")
- def !=(x: Float): Boolean = sys.error("stub")
- def !=(x: Double): Boolean = sys.error("stub")
-
- def <(x: Byte): Boolean = sys.error("stub")
- def <(x: Short): Boolean = sys.error("stub")
- def <(x: Char): Boolean = sys.error("stub")
- def <(x: Int): Boolean = sys.error("stub")
- def <(x: Long): Boolean = sys.error("stub")
- def <(x: Float): Boolean = sys.error("stub")
- def <(x: Double): Boolean = sys.error("stub")
-
- def <=(x: Byte): Boolean = sys.error("stub")
- def <=(x: Short): Boolean = sys.error("stub")
- def <=(x: Char): Boolean = sys.error("stub")
- def <=(x: Int): Boolean = sys.error("stub")
- def <=(x: Long): Boolean = sys.error("stub")
- def <=(x: Float): Boolean = sys.error("stub")
- def <=(x: Double): Boolean = sys.error("stub")
-
- def >(x: Byte): Boolean = sys.error("stub")
- def >(x: Short): Boolean = sys.error("stub")
- def >(x: Char): Boolean = sys.error("stub")
- def >(x: Int): Boolean = sys.error("stub")
- def >(x: Long): Boolean = sys.error("stub")
- def >(x: Float): Boolean = sys.error("stub")
- def >(x: Double): Boolean = sys.error("stub")
-
- def >=(x: Byte): Boolean = sys.error("stub")
- def >=(x: Short): Boolean = sys.error("stub")
- def >=(x: Char): Boolean = sys.error("stub")
- def >=(x: Int): Boolean = sys.error("stub")
- def >=(x: Long): Boolean = sys.error("stub")
- def >=(x: Float): Boolean = sys.error("stub")
- def >=(x: Double): Boolean = sys.error("stub")
-
- def |(x: Byte): Int = sys.error("stub")
- def |(x: Short): Int = sys.error("stub")
- def |(x: Char): Int = sys.error("stub")
- def |(x: Int): Int = sys.error("stub")
- def |(x: Long): Long = sys.error("stub")
-
- def &(x: Byte): Int = sys.error("stub")
- def &(x: Short): Int = sys.error("stub")
- def &(x: Char): Int = sys.error("stub")
- def &(x: Int): Int = sys.error("stub")
- def &(x: Long): Long = sys.error("stub")
-
- def ^(x: Byte): Int = sys.error("stub")
- def ^(x: Short): Int = sys.error("stub")
- def ^(x: Char): Int = sys.error("stub")
- def ^(x: Int): Int = sys.error("stub")
- def ^(x: Long): Long = sys.error("stub")
-
- def +(x: Byte): Int = sys.error("stub")
- def +(x: Short): Int = sys.error("stub")
- def +(x: Char): Int = sys.error("stub")
- def +(x: Int): Int = sys.error("stub")
- def +(x: Long): Long = sys.error("stub")
- def +(x: Float): Float = sys.error("stub")
- def +(x: Double): Double = sys.error("stub")
-
- def -(x: Byte): Int = sys.error("stub")
- def -(x: Short): Int = sys.error("stub")
- def -(x: Char): Int = sys.error("stub")
- def -(x: Int): Int = sys.error("stub")
- def -(x: Long): Long = sys.error("stub")
- def -(x: Float): Float = sys.error("stub")
- def -(x: Double): Double = sys.error("stub")
-
- def *(x: Byte): Int = sys.error("stub")
- def *(x: Short): Int = sys.error("stub")
- def *(x: Char): Int = sys.error("stub")
- def *(x: Int): Int = sys.error("stub")
- def *(x: Long): Long = sys.error("stub")
- def *(x: Float): Float = sys.error("stub")
- def *(x: Double): Double = sys.error("stub")
-
- def /(x: Byte): Int = sys.error("stub")
- def /(x: Short): Int = sys.error("stub")
- def /(x: Char): Int = sys.error("stub")
- def /(x: Int): Int = sys.error("stub")
- def /(x: Long): Long = sys.error("stub")
- def /(x: Float): Float = sys.error("stub")
- def /(x: Double): Double = sys.error("stub")
-
- def %(x: Byte): Int = sys.error("stub")
- def %(x: Short): Int = sys.error("stub")
- def %(x: Char): Int = sys.error("stub")
- def %(x: Int): Int = sys.error("stub")
- def %(x: Long): Long = sys.error("stub")
- def %(x: Float): Float = sys.error("stub")
- def %(x: Double): Double = sys.error("stub")
-
- def getClass(): Class[Byte] = sys.error("stub")
+final abstract class Byte private extends AnyVal {
+ def toByte: Byte
+ def toShort: Short
+ def toChar: Char
+ def toInt: Int
+ def toLong: Long
+ def toFloat: Float
+ def toDouble: Double
+
+ /**
+ * Returns the bitwise negation of this value.
+ * @example {{{
+ * ~5 == -6
+ * // in binary: ~00000101 ==
+ * // 11111010
+ * }}}
+ */
+ def unary_~ : Int
+ /**
+ * Returns this value, unmodified.
+ */
+ def unary_+ : Int
+ /**
+ * Returns the negation of this value.
+ */
+ def unary_- : Int
+
+ def +(x: String): String
+
+ /**
+ * Returns this value bit-shifted left by the specified number of bits,
+ * filling in the new right bits with zeroes.
+ * @example {{{ 6 << 3 == 48 // in binary: 0110 << 3 == 0110000 }}}
+ */
+ def <<(x: Int): Int
+ /**
+ * Returns this value bit-shifted left by the specified number of bits,
+ * filling in the new right bits with zeroes.
+ * @example {{{ 6 << 3 == 48 // in binary: 0110 << 3 == 0110000 }}}
+ */
+ def <<(x: Long): Int
+ /**
+ * Returns this value bit-shifted right by the specified number of bits,
+ * filling the new left bits with zeroes.
+ * @example {{{ 21 >>> 3 == 2 // in binary: 010101 >>> 3 == 010 }}}
+ * @example {{{
+ * -21 >>> 3 == 536870909
+ * // in binary: 11111111 11111111 11111111 11101011 >>> 3 ==
+ * // 00011111 11111111 11111111 11111101
+ * }}}
+ */
+ def >>>(x: Int): Int
+ /**
+ * Returns this value bit-shifted right by the specified number of bits,
+ * filling the new left bits with zeroes.
+ * @example {{{ 21 >>> 3 == 2 // in binary: 010101 >>> 3 == 010 }}}
+ * @example {{{
+ * -21 >>> 3 == 536870909
+ * // in binary: 11111111 11111111 11111111 11101011 >>> 3 ==
+ * // 00011111 11111111 11111111 11111101
+ * }}}
+ */
+ def >>>(x: Long): Int
+ /**
+ * Returns this value bit-shifted left by the specified number of bits,
+ * filling in the right bits with the same value as the left-most bit of this.
+ * The effect of this is to retain the sign of the value.
+ * @example {{{
+ * -21 >> 3 == -3
+ * // in binary: 11111111 11111111 11111111 11101011 >> 3 ==
+ * // 11111111 11111111 11111111 11111101
+ * }}}
+ */
+ def >>(x: Int): Int
+ /**
+ * Returns this value bit-shifted left by the specified number of bits,
+ * filling in the right bits with the same value as the left-most bit of this.
+ * The effect of this is to retain the sign of the value.
+ * @example {{{
+ * -21 >> 3 == -3
+ * // in binary: 11111111 11111111 11111111 11101011 >> 3 ==
+ * // 11111111 11111111 11111111 11111101
+ * }}}
+ */
+ def >>(x: Long): Int
+
+ /**
+ * Returns `true` if this value is equal to x, `false` otherwise.
+ */
+ def ==(x: Byte): Boolean
+ /**
+ * Returns `true` if this value is equal to x, `false` otherwise.
+ */
+ def ==(x: Short): Boolean
+ /**
+ * Returns `true` if this value is equal to x, `false` otherwise.
+ */
+ def ==(x: Char): Boolean
+ /**
+ * Returns `true` if this value is equal to x, `false` otherwise.
+ */
+ def ==(x: Int): Boolean
+ /**
+ * Returns `true` if this value is equal to x, `false` otherwise.
+ */
+ def ==(x: Long): Boolean
+ /**
+ * Returns `true` if this value is equal to x, `false` otherwise.
+ */
+ def ==(x: Float): Boolean
+ /**
+ * Returns `true` if this value is equal to x, `false` otherwise.
+ */
+ def ==(x: Double): Boolean
+
+ /**
+ * Returns `true` if this value is not equal to x, `false` otherwise.
+ */
+ def !=(x: Byte): Boolean
+ /**
+ * Returns `true` if this value is not equal to x, `false` otherwise.
+ */
+ def !=(x: Short): Boolean
+ /**
+ * Returns `true` if this value is not equal to x, `false` otherwise.
+ */
+ def !=(x: Char): Boolean
+ /**
+ * Returns `true` if this value is not equal to x, `false` otherwise.
+ */
+ def !=(x: Int): Boolean
+ /**
+ * Returns `true` if this value is not equal to x, `false` otherwise.
+ */
+ def !=(x: Long): Boolean
+ /**
+ * Returns `true` if this value is not equal to x, `false` otherwise.
+ */
+ def !=(x: Float): Boolean
+ /**
+ * Returns `true` if this value is not equal to x, `false` otherwise.
+ */
+ def !=(x: Double): Boolean
+
+ /**
+ * Returns `true` if this value is less than x, `false` otherwise.
+ */
+ def <(x: Byte): Boolean
+ /**
+ * Returns `true` if this value is less than x, `false` otherwise.
+ */
+ def <(x: Short): Boolean
+ /**
+ * Returns `true` if this value is less than x, `false` otherwise.
+ */
+ def <(x: Char): Boolean
+ /**
+ * Returns `true` if this value is less than x, `false` otherwise.
+ */
+ def <(x: Int): Boolean
+ /**
+ * Returns `true` if this value is less than x, `false` otherwise.
+ */
+ def <(x: Long): Boolean
+ /**
+ * Returns `true` if this value is less than x, `false` otherwise.
+ */
+ def <(x: Float): Boolean
+ /**
+ * Returns `true` if this value is less than x, `false` otherwise.
+ */
+ def <(x: Double): Boolean
+
+ /**
+ * Returns `true` if this value is less than or equal to x, `false` otherwise.
+ */
+ def <=(x: Byte): Boolean
+ /**
+ * Returns `true` if this value is less than or equal to x, `false` otherwise.
+ */
+ def <=(x: Short): Boolean
+ /**
+ * Returns `true` if this value is less than or equal to x, `false` otherwise.
+ */
+ def <=(x: Char): Boolean
+ /**
+ * Returns `true` if this value is less than or equal to x, `false` otherwise.
+ */
+ def <=(x: Int): Boolean
+ /**
+ * Returns `true` if this value is less than or equal to x, `false` otherwise.
+ */
+ def <=(x: Long): Boolean
+ /**
+ * Returns `true` if this value is less than or equal to x, `false` otherwise.
+ */
+ def <=(x: Float): Boolean
+ /**
+ * Returns `true` if this value is less than or equal to x, `false` otherwise.
+ */
+ def <=(x: Double): Boolean
+
+ /**
+ * Returns `true` if this value is greater than x, `false` otherwise.
+ */
+ def >(x: Byte): Boolean
+ /**
+ * Returns `true` if this value is greater than x, `false` otherwise.
+ */
+ def >(x: Short): Boolean
+ /**
+ * Returns `true` if this value is greater than x, `false` otherwise.
+ */
+ def >(x: Char): Boolean
+ /**
+ * Returns `true` if this value is greater than x, `false` otherwise.
+ */
+ def >(x: Int): Boolean
+ /**
+ * Returns `true` if this value is greater than x, `false` otherwise.
+ */
+ def >(x: Long): Boolean
+ /**
+ * Returns `true` if this value is greater than x, `false` otherwise.
+ */
+ def >(x: Float): Boolean
+ /**
+ * Returns `true` if this value is greater than x, `false` otherwise.
+ */
+ def >(x: Double): Boolean
+
+ /**
+ * Returns `true` if this value is greater than or equal to x, `false` otherwise.
+ */
+ def >=(x: Byte): Boolean
+ /**
+ * Returns `true` if this value is greater than or equal to x, `false` otherwise.
+ */
+ def >=(x: Short): Boolean
+ /**
+ * Returns `true` if this value is greater than or equal to x, `false` otherwise.
+ */
+ def >=(x: Char): Boolean
+ /**
+ * Returns `true` if this value is greater than or equal to x, `false` otherwise.
+ */
+ def >=(x: Int): Boolean
+ /**
+ * Returns `true` if this value is greater than or equal to x, `false` otherwise.
+ */
+ def >=(x: Long): Boolean
+ /**
+ * Returns `true` if this value is greater than or equal to x, `false` otherwise.
+ */
+ def >=(x: Float): Boolean
+ /**
+ * Returns `true` if this value is greater than or equal to x, `false` otherwise.
+ */
+ def >=(x: Double): Boolean
+
+ /**
+ * Returns the bitwise OR of this value and `x`.
+ * @example {{{
+ * (0xf0 | 0xaa) == 0xfa
+ * // in binary: 11110000
+ * // | 10101010
+ * // --------
+ * // 11111010
+ * }}}
+ */
+ def |(x: Byte): Int
+ /**
+ * Returns the bitwise OR of this value and `x`.
+ * @example {{{
+ * (0xf0 | 0xaa) == 0xfa
+ * // in binary: 11110000
+ * // | 10101010
+ * // --------
+ * // 11111010
+ * }}}
+ */
+ def |(x: Short): Int
+ /**
+ * Returns the bitwise OR of this value and `x`.
+ * @example {{{
+ * (0xf0 | 0xaa) == 0xfa
+ * // in binary: 11110000
+ * // | 10101010
+ * // --------
+ * // 11111010
+ * }}}
+ */
+ def |(x: Char): Int
+ /**
+ * Returns the bitwise OR of this value and `x`.
+ * @example {{{
+ * (0xf0 | 0xaa) == 0xfa
+ * // in binary: 11110000
+ * // | 10101010
+ * // --------
+ * // 11111010
+ * }}}
+ */
+ def |(x: Int): Int
+ /**
+ * Returns the bitwise OR of this value and `x`.
+ * @example {{{
+ * (0xf0 | 0xaa) == 0xfa
+ * // in binary: 11110000
+ * // | 10101010
+ * // --------
+ * // 11111010
+ * }}}
+ */
+ def |(x: Long): Long
+
+ /**
+ * Returns the bitwise AND of this value and `x`.
+ * @example {{{
+ * (0xf0 & 0xaa) == 0xa0
+ * // in binary: 11110000
+ * // & 10101010
+ * // --------
+ * // 10100000
+ * }}}
+ */
+ def &(x: Byte): Int
+ /**
+ * Returns the bitwise AND of this value and `x`.
+ * @example {{{
+ * (0xf0 & 0xaa) == 0xa0
+ * // in binary: 11110000
+ * // & 10101010
+ * // --------
+ * // 10100000
+ * }}}
+ */
+ def &(x: Short): Int
+ /**
+ * Returns the bitwise AND of this value and `x`.
+ * @example {{{
+ * (0xf0 & 0xaa) == 0xa0
+ * // in binary: 11110000
+ * // & 10101010
+ * // --------
+ * // 10100000
+ * }}}
+ */
+ def &(x: Char): Int
+ /**
+ * Returns the bitwise AND of this value and `x`.
+ * @example {{{
+ * (0xf0 & 0xaa) == 0xa0
+ * // in binary: 11110000
+ * // & 10101010
+ * // --------
+ * // 10100000
+ * }}}
+ */
+ def &(x: Int): Int
+ /**
+ * Returns the bitwise AND of this value and `x`.
+ * @example {{{
+ * (0xf0 & 0xaa) == 0xa0
+ * // in binary: 11110000
+ * // & 10101010
+ * // --------
+ * // 10100000
+ * }}}
+ */
+ def &(x: Long): Long
+
+ /**
+ * Returns the bitwise XOR of this value and `x`.
+ * @example {{{
+ * (0xf0 ^ 0xaa) == 0x5a
+ * // in binary: 11110000
+ * // ^ 10101010
+ * // --------
+ * // 01011010
+ * }}}
+ */
+ def ^(x: Byte): Int
+ /**
+ * Returns the bitwise XOR of this value and `x`.
+ * @example {{{
+ * (0xf0 ^ 0xaa) == 0x5a
+ * // in binary: 11110000
+ * // ^ 10101010
+ * // --------
+ * // 01011010
+ * }}}
+ */
+ def ^(x: Short): Int
+ /**
+ * Returns the bitwise XOR of this value and `x`.
+ * @example {{{
+ * (0xf0 ^ 0xaa) == 0x5a
+ * // in binary: 11110000
+ * // ^ 10101010
+ * // --------
+ * // 01011010
+ * }}}
+ */
+ def ^(x: Char): Int
+ /**
+ * Returns the bitwise XOR of this value and `x`.
+ * @example {{{
+ * (0xf0 ^ 0xaa) == 0x5a
+ * // in binary: 11110000
+ * // ^ 10101010
+ * // --------
+ * // 01011010
+ * }}}
+ */
+ def ^(x: Int): Int
+ /**
+ * Returns the bitwise XOR of this value and `x`.
+ * @example {{{
+ * (0xf0 ^ 0xaa) == 0x5a
+ * // in binary: 11110000
+ * // ^ 10101010
+ * // --------
+ * // 01011010
+ * }}}
+ */
+ def ^(x: Long): Long
+
+ /**
+ * Returns the sum of this value and `x`.
+ */
+ def +(x: Byte): Int
+ /**
+ * Returns the sum of this value and `x`.
+ */
+ def +(x: Short): Int
+ /**
+ * Returns the sum of this value and `x`.
+ */
+ def +(x: Char): Int
+ /**
+ * Returns the sum of this value and `x`.
+ */
+ def +(x: Int): Int
+ /**
+ * Returns the sum of this value and `x`.
+ */
+ def +(x: Long): Long
+ /**
+ * Returns the sum of this value and `x`.
+ */
+ def +(x: Float): Float
+ /**
+ * Returns the sum of this value and `x`.
+ */
+ def +(x: Double): Double
+
+ /**
+ * Returns the difference of this value and `x`.
+ */
+ def -(x: Byte): Int
+ /**
+ * Returns the difference of this value and `x`.
+ */
+ def -(x: Short): Int
+ /**
+ * Returns the difference of this value and `x`.
+ */
+ def -(x: Char): Int
+ /**
+ * Returns the difference of this value and `x`.
+ */
+ def -(x: Int): Int
+ /**
+ * Returns the difference of this value and `x`.
+ */
+ def -(x: Long): Long
+ /**
+ * Returns the difference of this value and `x`.
+ */
+ def -(x: Float): Float
+ /**
+ * Returns the difference of this value and `x`.
+ */
+ def -(x: Double): Double
+
+ /**
+ * Returns the product of this value and `x`.
+ */
+ def *(x: Byte): Int
+ /**
+ * Returns the product of this value and `x`.
+ */
+ def *(x: Short): Int
+ /**
+ * Returns the product of this value and `x`.
+ */
+ def *(x: Char): Int
+ /**
+ * Returns the product of this value and `x`.
+ */
+ def *(x: Int): Int
+ /**
+ * Returns the product of this value and `x`.
+ */
+ def *(x: Long): Long
+ /**
+ * Returns the product of this value and `x`.
+ */
+ def *(x: Float): Float
+ /**
+ * Returns the product of this value and `x`.
+ */
+ def *(x: Double): Double
+
+ /**
+ * Returns the quotient of this value and `x`.
+ */
+ def /(x: Byte): Int
+ /**
+ * Returns the quotient of this value and `x`.
+ */
+ def /(x: Short): Int
+ /**
+ * Returns the quotient of this value and `x`.
+ */
+ def /(x: Char): Int
+ /**
+ * Returns the quotient of this value and `x`.
+ */
+ def /(x: Int): Int
+ /**
+ * Returns the quotient of this value and `x`.
+ */
+ def /(x: Long): Long
+ /**
+ * Returns the quotient of this value and `x`.
+ */
+ def /(x: Float): Float
+ /**
+ * Returns the quotient of this value and `x`.
+ */
+ def /(x: Double): Double
+
+ /**
+ * Returns the remainder of the division of this value by `x`.
+ */
+ def %(x: Byte): Int
+ /**
+ * Returns the remainder of the division of this value by `x`.
+ */
+ def %(x: Short): Int
+ /**
+ * Returns the remainder of the division of this value by `x`.
+ */
+ def %(x: Char): Int
+ /**
+ * Returns the remainder of the division of this value by `x`.
+ */
+ def %(x: Int): Int
+ /**
+ * Returns the remainder of the division of this value by `x`.
+ */
+ def %(x: Long): Long
+ /**
+ * Returns the remainder of the division of this value by `x`.
+ */
+ def %(x: Float): Float
+ /**
+ * Returns the remainder of the division of this value by `x`.
+ */
+ def %(x: Double): Double
+
+ override def getClass(): Class[Byte] = null
}
object Byte extends AnyValCompanion {
@@ -176,5 +624,13 @@ object Byte extends AnyValCompanion {
/** The String representation of the scala.Byte companion object.
*/
override def toString = "object scala.Byte"
+
+ /** Language mandated coercions from Byte to "wider" types.
+ */
+ implicit def byte2short(x: Byte): Short = x.toShort
+ implicit def byte2int(x: Byte): Int = x.toInt
+ implicit def byte2long(x: Byte): Long = x.toLong
+ implicit def byte2float(x: Byte): Float = x.toFloat
+ implicit def byte2double(x: Byte): Double = x.toDouble
}
diff --git a/src/library/scala/Cell.scala b/src/library/scala/Cell.scala
deleted file mode 100644
index f4fc3e3..0000000
--- a/src/library/scala/Cell.scala
+++ /dev/null
@@ -1,21 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-
-package scala
-
-/** A <code>Cell</code> is a generic wrapper which completely
- * hides the functionality of the wrapped object. The wrapped
- * object is accessible via the <code>elem</code> accessor method.
- *
- * @author Martin Odersky
- * @version 1.0, 08/08/2003
- */
- at deprecated("use `scala.Option` or `scala.Some` instead", "2.9.0")
-case class Cell[+T](elem: T)
diff --git a/src/library/scala/Char.scala b/src/library/scala/Char.scala
index a8f1512..1fa0c0d 100644
--- a/src/library/scala/Char.scala
+++ b/src/library/scala/Char.scala
@@ -10,141 +10,589 @@
package scala
-/** `Char` is a member of the value classes, those whose instances are
- * not represented as objects by the underlying host system.
+import scala.language.implicitConversions
+
+/** `Char`, a 16-bit unsigned integer (equivalent to Java's `char` primitive type) is a
+ * subtype of [[scala.AnyVal]]. Instances of `Char` are not
+ * represented by an object in the underlying runtime system.
*
* There is an implicit conversion from [[scala.Char]] => [[scala.runtime.RichChar]]
* which provides useful non-primitive operations.
*/
-final class Char extends AnyVal {
- def toByte: Byte = sys.error("stub")
- def toShort: Short = sys.error("stub")
- def toChar: Char = sys.error("stub")
- def toInt: Int = sys.error("stub")
- def toLong: Long = sys.error("stub")
- def toFloat: Float = sys.error("stub")
- def toDouble: Double = sys.error("stub")
-
- def unary_+ : Int = sys.error("stub")
- def unary_- : Int = sys.error("stub")
- def unary_~ : Int = sys.error("stub")
-
- def +(x: String): String = sys.error("stub")
-
- def <<(x: Int): Int = sys.error("stub")
- def <<(x: Long): Int = sys.error("stub")
- def >>>(x: Int): Int = sys.error("stub")
- def >>>(x: Long): Int = sys.error("stub")
- def >>(x: Int): Int = sys.error("stub")
- def >>(x: Long): Int = sys.error("stub")
-
- def ==(x: Byte): Boolean = sys.error("stub")
- def ==(x: Short): Boolean = sys.error("stub")
- def ==(x: Char): Boolean = sys.error("stub")
- def ==(x: Int): Boolean = sys.error("stub")
- def ==(x: Long): Boolean = sys.error("stub")
- def ==(x: Float): Boolean = sys.error("stub")
- def ==(x: Double): Boolean = sys.error("stub")
-
- def !=(x: Byte): Boolean = sys.error("stub")
- def !=(x: Short): Boolean = sys.error("stub")
- def !=(x: Char): Boolean = sys.error("stub")
- def !=(x: Int): Boolean = sys.error("stub")
- def !=(x: Long): Boolean = sys.error("stub")
- def !=(x: Float): Boolean = sys.error("stub")
- def !=(x: Double): Boolean = sys.error("stub")
-
- def <(x: Byte): Boolean = sys.error("stub")
- def <(x: Short): Boolean = sys.error("stub")
- def <(x: Char): Boolean = sys.error("stub")
- def <(x: Int): Boolean = sys.error("stub")
- def <(x: Long): Boolean = sys.error("stub")
- def <(x: Float): Boolean = sys.error("stub")
- def <(x: Double): Boolean = sys.error("stub")
-
- def <=(x: Byte): Boolean = sys.error("stub")
- def <=(x: Short): Boolean = sys.error("stub")
- def <=(x: Char): Boolean = sys.error("stub")
- def <=(x: Int): Boolean = sys.error("stub")
- def <=(x: Long): Boolean = sys.error("stub")
- def <=(x: Float): Boolean = sys.error("stub")
- def <=(x: Double): Boolean = sys.error("stub")
-
- def >(x: Byte): Boolean = sys.error("stub")
- def >(x: Short): Boolean = sys.error("stub")
- def >(x: Char): Boolean = sys.error("stub")
- def >(x: Int): Boolean = sys.error("stub")
- def >(x: Long): Boolean = sys.error("stub")
- def >(x: Float): Boolean = sys.error("stub")
- def >(x: Double): Boolean = sys.error("stub")
-
- def >=(x: Byte): Boolean = sys.error("stub")
- def >=(x: Short): Boolean = sys.error("stub")
- def >=(x: Char): Boolean = sys.error("stub")
- def >=(x: Int): Boolean = sys.error("stub")
- def >=(x: Long): Boolean = sys.error("stub")
- def >=(x: Float): Boolean = sys.error("stub")
- def >=(x: Double): Boolean = sys.error("stub")
-
- def |(x: Byte): Int = sys.error("stub")
- def |(x: Short): Int = sys.error("stub")
- def |(x: Char): Int = sys.error("stub")
- def |(x: Int): Int = sys.error("stub")
- def |(x: Long): Long = sys.error("stub")
-
- def &(x: Byte): Int = sys.error("stub")
- def &(x: Short): Int = sys.error("stub")
- def &(x: Char): Int = sys.error("stub")
- def &(x: Int): Int = sys.error("stub")
- def &(x: Long): Long = sys.error("stub")
-
- def ^(x: Byte): Int = sys.error("stub")
- def ^(x: Short): Int = sys.error("stub")
- def ^(x: Char): Int = sys.error("stub")
- def ^(x: Int): Int = sys.error("stub")
- def ^(x: Long): Long = sys.error("stub")
-
- def +(x: Byte): Int = sys.error("stub")
- def +(x: Short): Int = sys.error("stub")
- def +(x: Char): Int = sys.error("stub")
- def +(x: Int): Int = sys.error("stub")
- def +(x: Long): Long = sys.error("stub")
- def +(x: Float): Float = sys.error("stub")
- def +(x: Double): Double = sys.error("stub")
-
- def -(x: Byte): Int = sys.error("stub")
- def -(x: Short): Int = sys.error("stub")
- def -(x: Char): Int = sys.error("stub")
- def -(x: Int): Int = sys.error("stub")
- def -(x: Long): Long = sys.error("stub")
- def -(x: Float): Float = sys.error("stub")
- def -(x: Double): Double = sys.error("stub")
-
- def *(x: Byte): Int = sys.error("stub")
- def *(x: Short): Int = sys.error("stub")
- def *(x: Char): Int = sys.error("stub")
- def *(x: Int): Int = sys.error("stub")
- def *(x: Long): Long = sys.error("stub")
- def *(x: Float): Float = sys.error("stub")
- def *(x: Double): Double = sys.error("stub")
-
- def /(x: Byte): Int = sys.error("stub")
- def /(x: Short): Int = sys.error("stub")
- def /(x: Char): Int = sys.error("stub")
- def /(x: Int): Int = sys.error("stub")
- def /(x: Long): Long = sys.error("stub")
- def /(x: Float): Float = sys.error("stub")
- def /(x: Double): Double = sys.error("stub")
-
- def %(x: Byte): Int = sys.error("stub")
- def %(x: Short): Int = sys.error("stub")
- def %(x: Char): Int = sys.error("stub")
- def %(x: Int): Int = sys.error("stub")
- def %(x: Long): Long = sys.error("stub")
- def %(x: Float): Float = sys.error("stub")
- def %(x: Double): Double = sys.error("stub")
-
- def getClass(): Class[Char] = sys.error("stub")
+final abstract class Char private extends AnyVal {
+ def toByte: Byte
+ def toShort: Short
+ def toChar: Char
+ def toInt: Int
+ def toLong: Long
+ def toFloat: Float
+ def toDouble: Double
+
+ /**
+ * Returns the bitwise negation of this value.
+ * @example {{{
+ * ~5 == -6
+ * // in binary: ~00000101 ==
+ * // 11111010
+ * }}}
+ */
+ def unary_~ : Int
+ /**
+ * Returns this value, unmodified.
+ */
+ def unary_+ : Int
+ /**
+ * Returns the negation of this value.
+ */
+ def unary_- : Int
+
+ def +(x: String): String
+
+ /**
+ * Returns this value bit-shifted left by the specified number of bits,
+ * filling in the new right bits with zeroes.
+ * @example {{{ 6 << 3 == 48 // in binary: 0110 << 3 == 0110000 }}}
+ */
+ def <<(x: Int): Int
+ /**
+ * Returns this value bit-shifted left by the specified number of bits,
+ * filling in the new right bits with zeroes.
+ * @example {{{ 6 << 3 == 48 // in binary: 0110 << 3 == 0110000 }}}
+ */
+ def <<(x: Long): Int
+ /**
+ * Returns this value bit-shifted right by the specified number of bits,
+ * filling the new left bits with zeroes.
+ * @example {{{ 21 >>> 3 == 2 // in binary: 010101 >>> 3 == 010 }}}
+ * @example {{{
+ * -21 >>> 3 == 536870909
+ * // in binary: 11111111 11111111 11111111 11101011 >>> 3 ==
+ * // 00011111 11111111 11111111 11111101
+ * }}}
+ */
+ def >>>(x: Int): Int
+ /**
+ * Returns this value bit-shifted right by the specified number of bits,
+ * filling the new left bits with zeroes.
+ * @example {{{ 21 >>> 3 == 2 // in binary: 010101 >>> 3 == 010 }}}
+ * @example {{{
+ * -21 >>> 3 == 536870909
+ * // in binary: 11111111 11111111 11111111 11101011 >>> 3 ==
+ * // 00011111 11111111 11111111 11111101
+ * }}}
+ */
+ def >>>(x: Long): Int
+ /**
+ * Returns this value bit-shifted left by the specified number of bits,
+ * filling in the right bits with the same value as the left-most bit of this.
+ * The effect of this is to retain the sign of the value.
+ * @example {{{
+ * -21 >> 3 == -3
+ * // in binary: 11111111 11111111 11111111 11101011 >> 3 ==
+ * // 11111111 11111111 11111111 11111101
+ * }}}
+ */
+ def >>(x: Int): Int
+ /**
+ * Returns this value bit-shifted left by the specified number of bits,
+ * filling in the right bits with the same value as the left-most bit of this.
+ * The effect of this is to retain the sign of the value.
+ * @example {{{
+ * -21 >> 3 == -3
+ * // in binary: 11111111 11111111 11111111 11101011 >> 3 ==
+ * // 11111111 11111111 11111111 11111101
+ * }}}
+ */
+ def >>(x: Long): Int
+
+ /**
+ * Returns `true` if this value is equal to x, `false` otherwise.
+ */
+ def ==(x: Byte): Boolean
+ /**
+ * Returns `true` if this value is equal to x, `false` otherwise.
+ */
+ def ==(x: Short): Boolean
+ /**
+ * Returns `true` if this value is equal to x, `false` otherwise.
+ */
+ def ==(x: Char): Boolean
+ /**
+ * Returns `true` if this value is equal to x, `false` otherwise.
+ */
+ def ==(x: Int): Boolean
+ /**
+ * Returns `true` if this value is equal to x, `false` otherwise.
+ */
+ def ==(x: Long): Boolean
+ /**
+ * Returns `true` if this value is equal to x, `false` otherwise.
+ */
+ def ==(x: Float): Boolean
+ /**
+ * Returns `true` if this value is equal to x, `false` otherwise.
+ */
+ def ==(x: Double): Boolean
+
+ /**
+ * Returns `true` if this value is not equal to x, `false` otherwise.
+ */
+ def !=(x: Byte): Boolean
+ /**
+ * Returns `true` if this value is not equal to x, `false` otherwise.
+ */
+ def !=(x: Short): Boolean
+ /**
+ * Returns `true` if this value is not equal to x, `false` otherwise.
+ */
+ def !=(x: Char): Boolean
+ /**
+ * Returns `true` if this value is not equal to x, `false` otherwise.
+ */
+ def !=(x: Int): Boolean
+ /**
+ * Returns `true` if this value is not equal to x, `false` otherwise.
+ */
+ def !=(x: Long): Boolean
+ /**
+ * Returns `true` if this value is not equal to x, `false` otherwise.
+ */
+ def !=(x: Float): Boolean
+ /**
+ * Returns `true` if this value is not equal to x, `false` otherwise.
+ */
+ def !=(x: Double): Boolean
+
+ /**
+ * Returns `true` if this value is less than x, `false` otherwise.
+ */
+ def <(x: Byte): Boolean
+ /**
+ * Returns `true` if this value is less than x, `false` otherwise.
+ */
+ def <(x: Short): Boolean
+ /**
+ * Returns `true` if this value is less than x, `false` otherwise.
+ */
+ def <(x: Char): Boolean
+ /**
+ * Returns `true` if this value is less than x, `false` otherwise.
+ */
+ def <(x: Int): Boolean
+ /**
+ * Returns `true` if this value is less than x, `false` otherwise.
+ */
+ def <(x: Long): Boolean
+ /**
+ * Returns `true` if this value is less than x, `false` otherwise.
+ */
+ def <(x: Float): Boolean
+ /**
+ * Returns `true` if this value is less than x, `false` otherwise.
+ */
+ def <(x: Double): Boolean
+
+ /**
+ * Returns `true` if this value is less than or equal to x, `false` otherwise.
+ */
+ def <=(x: Byte): Boolean
+ /**
+ * Returns `true` if this value is less than or equal to x, `false` otherwise.
+ */
+ def <=(x: Short): Boolean
+ /**
+ * Returns `true` if this value is less than or equal to x, `false` otherwise.
+ */
+ def <=(x: Char): Boolean
+ /**
+ * Returns `true` if this value is less than or equal to x, `false` otherwise.
+ */
+ def <=(x: Int): Boolean
+ /**
+ * Returns `true` if this value is less than or equal to x, `false` otherwise.
+ */
+ def <=(x: Long): Boolean
+ /**
+ * Returns `true` if this value is less than or equal to x, `false` otherwise.
+ */
+ def <=(x: Float): Boolean
+ /**
+ * Returns `true` if this value is less than or equal to x, `false` otherwise.
+ */
+ def <=(x: Double): Boolean
+
+ /**
+ * Returns `true` if this value is greater than x, `false` otherwise.
+ */
+ def >(x: Byte): Boolean
+ /**
+ * Returns `true` if this value is greater than x, `false` otherwise.
+ */
+ def >(x: Short): Boolean
+ /**
+ * Returns `true` if this value is greater than x, `false` otherwise.
+ */
+ def >(x: Char): Boolean
+ /**
+ * Returns `true` if this value is greater than x, `false` otherwise.
+ */
+ def >(x: Int): Boolean
+ /**
+ * Returns `true` if this value is greater than x, `false` otherwise.
+ */
+ def >(x: Long): Boolean
+ /**
+ * Returns `true` if this value is greater than x, `false` otherwise.
+ */
+ def >(x: Float): Boolean
+ /**
+ * Returns `true` if this value is greater than x, `false` otherwise.
+ */
+ def >(x: Double): Boolean
+
+ /**
+ * Returns `true` if this value is greater than or equal to x, `false` otherwise.
+ */
+ def >=(x: Byte): Boolean
+ /**
+ * Returns `true` if this value is greater than or equal to x, `false` otherwise.
+ */
+ def >=(x: Short): Boolean
+ /**
+ * Returns `true` if this value is greater than or equal to x, `false` otherwise.
+ */
+ def >=(x: Char): Boolean
+ /**
+ * Returns `true` if this value is greater than or equal to x, `false` otherwise.
+ */
+ def >=(x: Int): Boolean
+ /**
+ * Returns `true` if this value is greater than or equal to x, `false` otherwise.
+ */
+ def >=(x: Long): Boolean
+ /**
+ * Returns `true` if this value is greater than or equal to x, `false` otherwise.
+ */
+ def >=(x: Float): Boolean
+ /**
+ * Returns `true` if this value is greater than or equal to x, `false` otherwise.
+ */
+ def >=(x: Double): Boolean
+
+ /**
+ * Returns the bitwise OR of this value and `x`.
+ * @example {{{
+ * (0xf0 | 0xaa) == 0xfa
+ * // in binary: 11110000
+ * // | 10101010
+ * // --------
+ * // 11111010
+ * }}}
+ */
+ def |(x: Byte): Int
+ /**
+ * Returns the bitwise OR of this value and `x`.
+ * @example {{{
+ * (0xf0 | 0xaa) == 0xfa
+ * // in binary: 11110000
+ * // | 10101010
+ * // --------
+ * // 11111010
+ * }}}
+ */
+ def |(x: Short): Int
+ /**
+ * Returns the bitwise OR of this value and `x`.
+ * @example {{{
+ * (0xf0 | 0xaa) == 0xfa
+ * // in binary: 11110000
+ * // | 10101010
+ * // --------
+ * // 11111010
+ * }}}
+ */
+ def |(x: Char): Int
+ /**
+ * Returns the bitwise OR of this value and `x`.
+ * @example {{{
+ * (0xf0 | 0xaa) == 0xfa
+ * // in binary: 11110000
+ * // | 10101010
+ * // --------
+ * // 11111010
+ * }}}
+ */
+ def |(x: Int): Int
+ /**
+ * Returns the bitwise OR of this value and `x`.
+ * @example {{{
+ * (0xf0 | 0xaa) == 0xfa
+ * // in binary: 11110000
+ * // | 10101010
+ * // --------
+ * // 11111010
+ * }}}
+ */
+ def |(x: Long): Long
+
+ /**
+ * Returns the bitwise AND of this value and `x`.
+ * @example {{{
+ * (0xf0 & 0xaa) == 0xa0
+ * // in binary: 11110000
+ * // & 10101010
+ * // --------
+ * // 10100000
+ * }}}
+ */
+ def &(x: Byte): Int
+ /**
+ * Returns the bitwise AND of this value and `x`.
+ * @example {{{
+ * (0xf0 & 0xaa) == 0xa0
+ * // in binary: 11110000
+ * // & 10101010
+ * // --------
+ * // 10100000
+ * }}}
+ */
+ def &(x: Short): Int
+ /**
+ * Returns the bitwise AND of this value and `x`.
+ * @example {{{
+ * (0xf0 & 0xaa) == 0xa0
+ * // in binary: 11110000
+ * // & 10101010
+ * // --------
+ * // 10100000
+ * }}}
+ */
+ def &(x: Char): Int
+ /**
+ * Returns the bitwise AND of this value and `x`.
+ * @example {{{
+ * (0xf0 & 0xaa) == 0xa0
+ * // in binary: 11110000
+ * // & 10101010
+ * // --------
+ * // 10100000
+ * }}}
+ */
+ def &(x: Int): Int
+ /**
+ * Returns the bitwise AND of this value and `x`.
+ * @example {{{
+ * (0xf0 & 0xaa) == 0xa0
+ * // in binary: 11110000
+ * // & 10101010
+ * // --------
+ * // 10100000
+ * }}}
+ */
+ def &(x: Long): Long
+
+ /**
+ * Returns the bitwise XOR of this value and `x`.
+ * @example {{{
+ * (0xf0 ^ 0xaa) == 0x5a
+ * // in binary: 11110000
+ * // ^ 10101010
+ * // --------
+ * // 01011010
+ * }}}
+ */
+ def ^(x: Byte): Int
+ /**
+ * Returns the bitwise XOR of this value and `x`.
+ * @example {{{
+ * (0xf0 ^ 0xaa) == 0x5a
+ * // in binary: 11110000
+ * // ^ 10101010
+ * // --------
+ * // 01011010
+ * }}}
+ */
+ def ^(x: Short): Int
+ /**
+ * Returns the bitwise XOR of this value and `x`.
+ * @example {{{
+ * (0xf0 ^ 0xaa) == 0x5a
+ * // in binary: 11110000
+ * // ^ 10101010
+ * // --------
+ * // 01011010
+ * }}}
+ */
+ def ^(x: Char): Int
+ /**
+ * Returns the bitwise XOR of this value and `x`.
+ * @example {{{
+ * (0xf0 ^ 0xaa) == 0x5a
+ * // in binary: 11110000
+ * // ^ 10101010
+ * // --------
+ * // 01011010
+ * }}}
+ */
+ def ^(x: Int): Int
+ /**
+ * Returns the bitwise XOR of this value and `x`.
+ * @example {{{
+ * (0xf0 ^ 0xaa) == 0x5a
+ * // in binary: 11110000
+ * // ^ 10101010
+ * // --------
+ * // 01011010
+ * }}}
+ */
+ def ^(x: Long): Long
+
+ /**
+ * Returns the sum of this value and `x`.
+ */
+ def +(x: Byte): Int
+ /**
+ * Returns the sum of this value and `x`.
+ */
+ def +(x: Short): Int
+ /**
+ * Returns the sum of this value and `x`.
+ */
+ def +(x: Char): Int
+ /**
+ * Returns the sum of this value and `x`.
+ */
+ def +(x: Int): Int
+ /**
+ * Returns the sum of this value and `x`.
+ */
+ def +(x: Long): Long
+ /**
+ * Returns the sum of this value and `x`.
+ */
+ def +(x: Float): Float
+ /**
+ * Returns the sum of this value and `x`.
+ */
+ def +(x: Double): Double
+
+ /**
+ * Returns the difference of this value and `x`.
+ */
+ def -(x: Byte): Int
+ /**
+ * Returns the difference of this value and `x`.
+ */
+ def -(x: Short): Int
+ /**
+ * Returns the difference of this value and `x`.
+ */
+ def -(x: Char): Int
+ /**
+ * Returns the difference of this value and `x`.
+ */
+ def -(x: Int): Int
+ /**
+ * Returns the difference of this value and `x`.
+ */
+ def -(x: Long): Long
+ /**
+ * Returns the difference of this value and `x`.
+ */
+ def -(x: Float): Float
+ /**
+ * Returns the difference of this value and `x`.
+ */
+ def -(x: Double): Double
+
+ /**
+ * Returns the product of this value and `x`.
+ */
+ def *(x: Byte): Int
+ /**
+ * Returns the product of this value and `x`.
+ */
+ def *(x: Short): Int
+ /**
+ * Returns the product of this value and `x`.
+ */
+ def *(x: Char): Int
+ /**
+ * Returns the product of this value and `x`.
+ */
+ def *(x: Int): Int
+ /**
+ * Returns the product of this value and `x`.
+ */
+ def *(x: Long): Long
+ /**
+ * Returns the product of this value and `x`.
+ */
+ def *(x: Float): Float
+ /**
+ * Returns the product of this value and `x`.
+ */
+ def *(x: Double): Double
+
+ /**
+ * Returns the quotient of this value and `x`.
+ */
+ def /(x: Byte): Int
+ /**
+ * Returns the quotient of this value and `x`.
+ */
+ def /(x: Short): Int
+ /**
+ * Returns the quotient of this value and `x`.
+ */
+ def /(x: Char): Int
+ /**
+ * Returns the quotient of this value and `x`.
+ */
+ def /(x: Int): Int
+ /**
+ * Returns the quotient of this value and `x`.
+ */
+ def /(x: Long): Long
+ /**
+ * Returns the quotient of this value and `x`.
+ */
+ def /(x: Float): Float
+ /**
+ * Returns the quotient of this value and `x`.
+ */
+ def /(x: Double): Double
+
+ /**
+ * Returns the remainder of the division of this value by `x`.
+ */
+ def %(x: Byte): Int
+ /**
+ * Returns the remainder of the division of this value by `x`.
+ */
+ def %(x: Short): Int
+ /**
+ * Returns the remainder of the division of this value by `x`.
+ */
+ def %(x: Char): Int
+ /**
+ * Returns the remainder of the division of this value by `x`.
+ */
+ def %(x: Int): Int
+ /**
+ * Returns the remainder of the division of this value by `x`.
+ */
+ def %(x: Long): Long
+ /**
+ * Returns the remainder of the division of this value by `x`.
+ */
+ def %(x: Float): Float
+ /**
+ * Returns the remainder of the division of this value by `x`.
+ */
+ def %(x: Double): Double
+
+ override def getClass(): Class[Char] = null
}
object Char extends AnyValCompanion {
@@ -176,5 +624,12 @@ object Char extends AnyValCompanion {
/** The String representation of the scala.Char companion object.
*/
override def toString = "object scala.Char"
+
+ /** Language mandated coercions from Char to "wider" types.
+ */
+ implicit def char2int(x: Char): Int = x.toInt
+ implicit def char2long(x: Char): Long = x.toLong
+ implicit def char2float(x: Char): Float = x.toFloat
+ implicit def char2double(x: Char): Double = x.toDouble
}
diff --git a/src/library/scala/Cloneable.scala b/src/library/scala/Cloneable.scala
new file mode 100644
index 0000000..2810e3c
--- /dev/null
+++ b/src/library/scala/Cloneable.scala
@@ -0,0 +1,14 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+package scala
+
+/**
+ * Classes extending this trait are cloneable across platforms (Java, .NET).
+ */
+trait Cloneable extends java.lang.Cloneable
diff --git a/src/library/scala/Console.scala b/src/library/scala/Console.scala
index f27393a..5b01550 100644
--- a/src/library/scala/Console.scala
+++ b/src/library/scala/Console.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -18,8 +18,7 @@ import scala.util.DynamicVariable
/** Implements functionality for
* printing Scala values on the terminal as well as reading specific values.
- * Also defines
- * constants for marking up text on ANSI terminals.
+ * Also defines constants for marking up text on ANSI terminals.
*
* @author Matthias Zenger
* @version 1.0, 03/09/2003
@@ -117,7 +116,6 @@ object Console {
/** Sets the default output stream for the duration
* of execution of one thunk.
*
- *
* @param out the new output stream.
* @param thunk the code to execute with
* the new output stream active
@@ -188,7 +186,6 @@ object Console {
* }
* }}}
*
- * @param in the new input stream.
* @param thunk the code to execute with
* the new input stream active
*
@@ -198,7 +195,6 @@ object Console {
def withIn[T](reader: Reader)(thunk: =>T): T =
inVar.withValue(new BufferedReader(reader))(thunk)
-
/** Sets the default input stream.
*
* @param in the new input stream.
@@ -243,13 +239,12 @@ object Console {
*/
def println(x: Any) { out.println(x) }
- /**
- * Prints its arguments as a formatted string to the default output, based on a string
- * pattern (in a fashion similar to printf in C).
+ /** Prints its arguments as a formatted string to the default output,
+ * based on a string pattern (in a fashion similar to printf in C).
*
- * The interpretation of the formatting patterns is described in
- * <a href="" target="contentFrame" class="java/util/Formatter">
- * <code>java.util.Formatter</code></a>.
+ * The interpretation of the formatting patterns is described in
+ * <a href="" target="contentFrame" class="java/util/Formatter">
+ * `java.util.Formatter`</a>.
*
* @param text the pattern for formatting the arguments.
* @param args the arguments used to instantiating the pattern.
@@ -257,7 +252,7 @@ object Console {
*/
def printf(text: String, args: Any*) { out.print(text format (args : _*)) }
- /** Read a full line from the default input. Returns <code>null</code> if the end of the
+ /** Read a full line from the default input. Returns `null` if the end of the
* input stream has been reached.
*
* @return the string read from the terminal or null if the end of stream was reached.
@@ -265,7 +260,7 @@ object Console {
def readLine(): String = in.readLine()
/** Print formatted text to the default output and read a full line from the default input.
- * Returns null if the end of the input stream has been reached.
+ * Returns `null` if the end of the input stream has been reached.
*
* @param text the format of the text to print out, as in `printf`.
* @param args the parameters used to instantiate the format, as in `printf`.
@@ -277,7 +272,7 @@ object Console {
}
/** Reads a boolean value from an entire line of the default input.
- * Has a fairly liberal interpretation of the input.
+ * Has a fairly liberal interpretation of the input.
*
* @return the boolean value read, or false if it couldn't be converted to a boolean
* @throws java.io.EOFException if the end of the input stream has been reached.
@@ -401,14 +396,14 @@ object Console {
s.toDouble
}
- /** Reads in some structured input (from the default input), specified by a format specifier.
- * See class <code>java.text.MessageFormat</code> for details of
+ /** Reads in some structured input (from the default input), specified by
+ * a format specifier. See class `java.text.MessageFormat` for details of
* the format specification.
*
* @param format the format of the input.
* @return a list of all extracted values.
- * @throws java.io.EOFException if the end of the
- * input stream has been reached.
+ * @throws java.io.EOFException if the end of the input stream has been
+ * reached.
*/
def readf(format: String): List[Any] = {
val s = readLine()
@@ -418,16 +413,18 @@ object Console {
textComponents(new MessageFormat(format).parse(s))
}
- /** Reads in some structured input (from the default input), specified by a format specifier, returning
- * only the first value extracted, according to the format specification.
+ /** Reads in some structured input (from the default input), specified by
+ * a format specifier, returning only the first value extracted, according
+ * to the format specification.
*
* @param format format string, as accepted by `readf`.
* @return The first value that was extracted from the input
*/
def readf1(format: String): Any = readf(format).head
- /** Reads in some structured input (from the default input), specified by a format specifier, returning
- * only the first two values extracted, according to the format specification.
+ /** Reads in some structured input (from the default input), specified
+ * by a format specifier, returning only the first two values extracted,
+ * according to the format specification.
*
* @param format format string, as accepted by `readf`.
* @return A [[scala.Tuple2]] containing the first two values extracted
@@ -437,8 +434,9 @@ object Console {
(res.head, res.tail.head)
}
- /** Reads in some structured input (from the default input), specified by a format specifier, returning
- * only the first three values extracted, according to the format specification.
+ /** Reads in some structured input (from the default input), specified
+ * by a format specifier, returning only the first three values extracted,
+ * according to the format specification.
*
* @param format format string, as accepted by `readf`.
* @return A [[scala.Tuple3]] containing the first three values extracted
diff --git a/src/library/scala/CountedIterator.scala b/src/library/scala/CountedIterator.scala
deleted file mode 100644
index 6f2c597..0000000
--- a/src/library/scala/CountedIterator.scala
+++ /dev/null
@@ -1,24 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-
-package scala
-
-/** Counted iterators keep track of the number of elements seen so far
- *
- * @since 2.0
- */
- at deprecated("use iterator.zipWithIndex instead", "2.8.0")
-trait CountedIterator[+A] extends Iterator[A] {
- /** counts the elements in this iterator; counts start at 0
- */
- def count: Int
-
- override def counted : this.type = this
-}
diff --git a/src/library/scala/DelayedInit.scala b/src/library/scala/DelayedInit.scala
index ab9741f..12793e6 100644
--- a/src/library/scala/DelayedInit.scala
+++ b/src/library/scala/DelayedInit.scala
@@ -1,12 +1,48 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2010-2013, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
package scala
-/** Classes and traits inheriting the `DelayedInit` marker trait
- * will have their initialization code rewritten as follows.
- * <code> becomes delayedInit(<code>)
+/** Classes and objects (but note, not traits) inheriting the `DelayedInit`
+ * marker trait will have their initialization code rewritten as follows:
+ * `code` becomes `delayedInit(code)`.
+ *
* Initialization code comprises all statements and all value definitions
* that are executed during initialization.
+ *
+ * Example:
+ * {{{
+ * trait Helper extends DelayedInit {
+ * def delayedInit(body: => Unit) = {
+ * println("dummy text, printed before initialization of C")
+ * body // evaluates the initialization code of C
+ * }
+ * }
+ *
+ * class C extends Helper {
+ * println("this is the initialization code of C")
+ * }
+ *
+ * object Test extends App {
+ * val c = new C
+ * }
+ * }}}
+ *
+ * Should result in the following being printed:
+ * {{{
+ * dummy text, printed before initialization of C
+ * this is the initialization code of C
+ * }}}
+ *
+ * @see "Delayed Initialization" subsection of the Scala Language Specification (section 5.1)
+ *
+ * @author Martin Odersky
*/
trait DelayedInit {
def delayedInit(x: => Unit): Unit
-}
-
+}
\ No newline at end of file
diff --git a/src/library/scala/Double.scala b/src/library/scala/Double.scala
index 108c620..f058d7c 100644
--- a/src/library/scala/Double.scala
+++ b/src/library/scala/Double.scala
@@ -10,115 +10,355 @@
package scala
-/** `Double` is a member of the value classes, those whose instances are
- * not represented as objects by the underlying host system.
+import scala.language.implicitConversions
+
+/** `Double`, a 64-bit IEEE-754 floating point number (equivalent to Java's `double` primitive type) is a
+ * subtype of [[scala.AnyVal]]. Instances of `Double` are not
+ * represented by an object in the underlying runtime system.
*
* There is an implicit conversion from [[scala.Double]] => [[scala.runtime.RichDouble]]
* which provides useful non-primitive operations.
*/
-final class Double extends AnyVal {
- def toByte: Byte = sys.error("stub")
- def toShort: Short = sys.error("stub")
- def toChar: Char = sys.error("stub")
- def toInt: Int = sys.error("stub")
- def toLong: Long = sys.error("stub")
- def toFloat: Float = sys.error("stub")
- def toDouble: Double = sys.error("stub")
-
- def unary_+ : Double = sys.error("stub")
- def unary_- : Double = sys.error("stub")
-
- def +(x: String): String = sys.error("stub")
-
- def ==(x: Byte): Boolean = sys.error("stub")
- def ==(x: Short): Boolean = sys.error("stub")
- def ==(x: Char): Boolean = sys.error("stub")
- def ==(x: Int): Boolean = sys.error("stub")
- def ==(x: Long): Boolean = sys.error("stub")
- def ==(x: Float): Boolean = sys.error("stub")
- def ==(x: Double): Boolean = sys.error("stub")
-
- def !=(x: Byte): Boolean = sys.error("stub")
- def !=(x: Short): Boolean = sys.error("stub")
- def !=(x: Char): Boolean = sys.error("stub")
- def !=(x: Int): Boolean = sys.error("stub")
- def !=(x: Long): Boolean = sys.error("stub")
- def !=(x: Float): Boolean = sys.error("stub")
- def !=(x: Double): Boolean = sys.error("stub")
-
- def <(x: Byte): Boolean = sys.error("stub")
- def <(x: Short): Boolean = sys.error("stub")
- def <(x: Char): Boolean = sys.error("stub")
- def <(x: Int): Boolean = sys.error("stub")
- def <(x: Long): Boolean = sys.error("stub")
- def <(x: Float): Boolean = sys.error("stub")
- def <(x: Double): Boolean = sys.error("stub")
-
- def <=(x: Byte): Boolean = sys.error("stub")
- def <=(x: Short): Boolean = sys.error("stub")
- def <=(x: Char): Boolean = sys.error("stub")
- def <=(x: Int): Boolean = sys.error("stub")
- def <=(x: Long): Boolean = sys.error("stub")
- def <=(x: Float): Boolean = sys.error("stub")
- def <=(x: Double): Boolean = sys.error("stub")
-
- def >(x: Byte): Boolean = sys.error("stub")
- def >(x: Short): Boolean = sys.error("stub")
- def >(x: Char): Boolean = sys.error("stub")
- def >(x: Int): Boolean = sys.error("stub")
- def >(x: Long): Boolean = sys.error("stub")
- def >(x: Float): Boolean = sys.error("stub")
- def >(x: Double): Boolean = sys.error("stub")
-
- def >=(x: Byte): Boolean = sys.error("stub")
- def >=(x: Short): Boolean = sys.error("stub")
- def >=(x: Char): Boolean = sys.error("stub")
- def >=(x: Int): Boolean = sys.error("stub")
- def >=(x: Long): Boolean = sys.error("stub")
- def >=(x: Float): Boolean = sys.error("stub")
- def >=(x: Double): Boolean = sys.error("stub")
-
- def +(x: Byte): Double = sys.error("stub")
- def +(x: Short): Double = sys.error("stub")
- def +(x: Char): Double = sys.error("stub")
- def +(x: Int): Double = sys.error("stub")
- def +(x: Long): Double = sys.error("stub")
- def +(x: Float): Double = sys.error("stub")
- def +(x: Double): Double = sys.error("stub")
-
- def -(x: Byte): Double = sys.error("stub")
- def -(x: Short): Double = sys.error("stub")
- def -(x: Char): Double = sys.error("stub")
- def -(x: Int): Double = sys.error("stub")
- def -(x: Long): Double = sys.error("stub")
- def -(x: Float): Double = sys.error("stub")
- def -(x: Double): Double = sys.error("stub")
-
- def *(x: Byte): Double = sys.error("stub")
- def *(x: Short): Double = sys.error("stub")
- def *(x: Char): Double = sys.error("stub")
- def *(x: Int): Double = sys.error("stub")
- def *(x: Long): Double = sys.error("stub")
- def *(x: Float): Double = sys.error("stub")
- def *(x: Double): Double = sys.error("stub")
-
- def /(x: Byte): Double = sys.error("stub")
- def /(x: Short): Double = sys.error("stub")
- def /(x: Char): Double = sys.error("stub")
- def /(x: Int): Double = sys.error("stub")
- def /(x: Long): Double = sys.error("stub")
- def /(x: Float): Double = sys.error("stub")
- def /(x: Double): Double = sys.error("stub")
-
- def %(x: Byte): Double = sys.error("stub")
- def %(x: Short): Double = sys.error("stub")
- def %(x: Char): Double = sys.error("stub")
- def %(x: Int): Double = sys.error("stub")
- def %(x: Long): Double = sys.error("stub")
- def %(x: Float): Double = sys.error("stub")
- def %(x: Double): Double = sys.error("stub")
-
- def getClass(): Class[Double] = sys.error("stub")
+final abstract class Double private extends AnyVal {
+ def toByte: Byte
+ def toShort: Short
+ def toChar: Char
+ def toInt: Int
+ def toLong: Long
+ def toFloat: Float
+ def toDouble: Double
+
+ /**
+ * Returns this value, unmodified.
+ */
+ def unary_+ : Double
+ /**
+ * Returns the negation of this value.
+ */
+ def unary_- : Double
+
+ def +(x: String): String
+
+ /**
+ * Returns `true` if this value is equal to x, `false` otherwise.
+ */
+ def ==(x: Byte): Boolean
+ /**
+ * Returns `true` if this value is equal to x, `false` otherwise.
+ */
+ def ==(x: Short): Boolean
+ /**
+ * Returns `true` if this value is equal to x, `false` otherwise.
+ */
+ def ==(x: Char): Boolean
+ /**
+ * Returns `true` if this value is equal to x, `false` otherwise.
+ */
+ def ==(x: Int): Boolean
+ /**
+ * Returns `true` if this value is equal to x, `false` otherwise.
+ */
+ def ==(x: Long): Boolean
+ /**
+ * Returns `true` if this value is equal to x, `false` otherwise.
+ */
+ def ==(x: Float): Boolean
+ /**
+ * Returns `true` if this value is equal to x, `false` otherwise.
+ */
+ def ==(x: Double): Boolean
+
+ /**
+ * Returns `true` if this value is not equal to x, `false` otherwise.
+ */
+ def !=(x: Byte): Boolean
+ /**
+ * Returns `true` if this value is not equal to x, `false` otherwise.
+ */
+ def !=(x: Short): Boolean
+ /**
+ * Returns `true` if this value is not equal to x, `false` otherwise.
+ */
+ def !=(x: Char): Boolean
+ /**
+ * Returns `true` if this value is not equal to x, `false` otherwise.
+ */
+ def !=(x: Int): Boolean
+ /**
+ * Returns `true` if this value is not equal to x, `false` otherwise.
+ */
+ def !=(x: Long): Boolean
+ /**
+ * Returns `true` if this value is not equal to x, `false` otherwise.
+ */
+ def !=(x: Float): Boolean
+ /**
+ * Returns `true` if this value is not equal to x, `false` otherwise.
+ */
+ def !=(x: Double): Boolean
+
+ /**
+ * Returns `true` if this value is less than x, `false` otherwise.
+ */
+ def <(x: Byte): Boolean
+ /**
+ * Returns `true` if this value is less than x, `false` otherwise.
+ */
+ def <(x: Short): Boolean
+ /**
+ * Returns `true` if this value is less than x, `false` otherwise.
+ */
+ def <(x: Char): Boolean
+ /**
+ * Returns `true` if this value is less than x, `false` otherwise.
+ */
+ def <(x: Int): Boolean
+ /**
+ * Returns `true` if this value is less than x, `false` otherwise.
+ */
+ def <(x: Long): Boolean
+ /**
+ * Returns `true` if this value is less than x, `false` otherwise.
+ */
+ def <(x: Float): Boolean
+ /**
+ * Returns `true` if this value is less than x, `false` otherwise.
+ */
+ def <(x: Double): Boolean
+
+ /**
+ * Returns `true` if this value is less than or equal to x, `false` otherwise.
+ */
+ def <=(x: Byte): Boolean
+ /**
+ * Returns `true` if this value is less than or equal to x, `false` otherwise.
+ */
+ def <=(x: Short): Boolean
+ /**
+ * Returns `true` if this value is less than or equal to x, `false` otherwise.
+ */
+ def <=(x: Char): Boolean
+ /**
+ * Returns `true` if this value is less than or equal to x, `false` otherwise.
+ */
+ def <=(x: Int): Boolean
+ /**
+ * Returns `true` if this value is less than or equal to x, `false` otherwise.
+ */
+ def <=(x: Long): Boolean
+ /**
+ * Returns `true` if this value is less than or equal to x, `false` otherwise.
+ */
+ def <=(x: Float): Boolean
+ /**
+ * Returns `true` if this value is less than or equal to x, `false` otherwise.
+ */
+ def <=(x: Double): Boolean
+
+ /**
+ * Returns `true` if this value is greater than x, `false` otherwise.
+ */
+ def >(x: Byte): Boolean
+ /**
+ * Returns `true` if this value is greater than x, `false` otherwise.
+ */
+ def >(x: Short): Boolean
+ /**
+ * Returns `true` if this value is greater than x, `false` otherwise.
+ */
+ def >(x: Char): Boolean
+ /**
+ * Returns `true` if this value is greater than x, `false` otherwise.
+ */
+ def >(x: Int): Boolean
+ /**
+ * Returns `true` if this value is greater than x, `false` otherwise.
+ */
+ def >(x: Long): Boolean
+ /**
+ * Returns `true` if this value is greater than x, `false` otherwise.
+ */
+ def >(x: Float): Boolean
+ /**
+ * Returns `true` if this value is greater than x, `false` otherwise.
+ */
+ def >(x: Double): Boolean
+
+ /**
+ * Returns `true` if this value is greater than or equal to x, `false` otherwise.
+ */
+ def >=(x: Byte): Boolean
+ /**
+ * Returns `true` if this value is greater than or equal to x, `false` otherwise.
+ */
+ def >=(x: Short): Boolean
+ /**
+ * Returns `true` if this value is greater than or equal to x, `false` otherwise.
+ */
+ def >=(x: Char): Boolean
+ /**
+ * Returns `true` if this value is greater than or equal to x, `false` otherwise.
+ */
+ def >=(x: Int): Boolean
+ /**
+ * Returns `true` if this value is greater than or equal to x, `false` otherwise.
+ */
+ def >=(x: Long): Boolean
+ /**
+ * Returns `true` if this value is greater than or equal to x, `false` otherwise.
+ */
+ def >=(x: Float): Boolean
+ /**
+ * Returns `true` if this value is greater than or equal to x, `false` otherwise.
+ */
+ def >=(x: Double): Boolean
+
+ /**
+ * Returns the sum of this value and `x`.
+ */
+ def +(x: Byte): Double
+ /**
+ * Returns the sum of this value and `x`.
+ */
+ def +(x: Short): Double
+ /**
+ * Returns the sum of this value and `x`.
+ */
+ def +(x: Char): Double
+ /**
+ * Returns the sum of this value and `x`.
+ */
+ def +(x: Int): Double
+ /**
+ * Returns the sum of this value and `x`.
+ */
+ def +(x: Long): Double
+ /**
+ * Returns the sum of this value and `x`.
+ */
+ def +(x: Float): Double
+ /**
+ * Returns the sum of this value and `x`.
+ */
+ def +(x: Double): Double
+
+ /**
+ * Returns the difference of this value and `x`.
+ */
+ def -(x: Byte): Double
+ /**
+ * Returns the difference of this value and `x`.
+ */
+ def -(x: Short): Double
+ /**
+ * Returns the difference of this value and `x`.
+ */
+ def -(x: Char): Double
+ /**
+ * Returns the difference of this value and `x`.
+ */
+ def -(x: Int): Double
+ /**
+ * Returns the difference of this value and `x`.
+ */
+ def -(x: Long): Double
+ /**
+ * Returns the difference of this value and `x`.
+ */
+ def -(x: Float): Double
+ /**
+ * Returns the difference of this value and `x`.
+ */
+ def -(x: Double): Double
+
+ /**
+ * Returns the product of this value and `x`.
+ */
+ def *(x: Byte): Double
+ /**
+ * Returns the product of this value and `x`.
+ */
+ def *(x: Short): Double
+ /**
+ * Returns the product of this value and `x`.
+ */
+ def *(x: Char): Double
+ /**
+ * Returns the product of this value and `x`.
+ */
+ def *(x: Int): Double
+ /**
+ * Returns the product of this value and `x`.
+ */
+ def *(x: Long): Double
+ /**
+ * Returns the product of this value and `x`.
+ */
+ def *(x: Float): Double
+ /**
+ * Returns the product of this value and `x`.
+ */
+ def *(x: Double): Double
+
+ /**
+ * Returns the quotient of this value and `x`.
+ */
+ def /(x: Byte): Double
+ /**
+ * Returns the quotient of this value and `x`.
+ */
+ def /(x: Short): Double
+ /**
+ * Returns the quotient of this value and `x`.
+ */
+ def /(x: Char): Double
+ /**
+ * Returns the quotient of this value and `x`.
+ */
+ def /(x: Int): Double
+ /**
+ * Returns the quotient of this value and `x`.
+ */
+ def /(x: Long): Double
+ /**
+ * Returns the quotient of this value and `x`.
+ */
+ def /(x: Float): Double
+ /**
+ * Returns the quotient of this value and `x`.
+ */
+ def /(x: Double): Double
+
+ /**
+ * Returns the remainder of the division of this value by `x`.
+ */
+ def %(x: Byte): Double
+ /**
+ * Returns the remainder of the division of this value by `x`.
+ */
+ def %(x: Short): Double
+ /**
+ * Returns the remainder of the division of this value by `x`.
+ */
+ def %(x: Char): Double
+ /**
+ * Returns the remainder of the division of this value by `x`.
+ */
+ def %(x: Int): Double
+ /**
+ * Returns the remainder of the division of this value by `x`.
+ */
+ def %(x: Long): Double
+ /**
+ * Returns the remainder of the division of this value by `x`.
+ */
+ def %(x: Float): Double
+ /**
+ * Returns the remainder of the division of this value by `x`.
+ */
+ def %(x: Double): Double
+
+ override def getClass(): Class[Double] = null
}
object Double extends AnyValCompanion {
@@ -130,9 +370,6 @@ object Double extends AnyValCompanion {
final val PositiveInfinity = java.lang.Double.POSITIVE_INFINITY
final val NegativeInfinity = java.lang.Double.NEGATIVE_INFINITY
- @deprecated("use Double.MinPositiveValue instead", "2.9.0")
- final val Epsilon = MinPositiveValue
-
/** The negative number with the greatest (finite) absolute value which is representable
* by a Double. Note that it differs from [[java.lang.Double.MIN_VALUE]], which
* is the smallest positive value representable by a Double. In Scala that number
diff --git a/src/library/scala/Dynamic.scala b/src/library/scala/Dynamic.scala
index 2f99518..56eb4cf 100644
--- a/src/library/scala/Dynamic.scala
+++ b/src/library/scala/Dynamic.scala
@@ -1,13 +1,34 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2010-2013, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
package scala
-/** A marker trait that enables dynamic invocations. Instances `x` of this trait
- * allow calls `x.meth(args)` for arbitrary method names `meth` and argument lists
- * `args`. If a call is not natively supported by `x`, it is rewritten to
- * `x.applyDynamic("meth", args)`.
+/** A marker trait that enables dynamic invocations. Instances `x` of this
+ * trait allow method invocations `x.meth(args)` for arbitrary method
+ * names `meth` and argument lists `args` as well as field accesses
+ * `x.field` for arbitrary field names `field`.
+ *
+ * If a call is not natively supported by `x` (i.e. if type checking
+ * fails), it is rewritten according to the following rules:
+ *
+ * {{{
+ * foo.method("blah") ~~> foo.applyDynamic("method")("blah")
+ * foo.method(x = "blah") ~~> foo.applyDynamicNamed("method")(("x", "blah"))
+ * foo.method(x = 1, 2) ~~> foo.applyDynamicNamed("method")(("x", 1), ("", 2))
+ * foo.field ~~> foo.selectDynamic("field")
+ * foo.varia = 10 ~~> foo.updateDynamic("varia")(10)
+ * foo.arr(10) = 13 ~~> foo.selectDynamic("arr").update(10, 13)
+ * foo.arr(10) ~~> foo.applyDynamic("arr")(10)
+ * }}}
*
- * As of scala 2.9, scalac must receive the -Xexperimental optional for Dynamic
- * to receive this treatment.
+ * As of Scala 2.10, defining direct or indirect subclasses of this trait
+ * is only possible if the language feature `dynamics` is enabled.
*/
-trait Dynamic
+trait Dynamic extends Any
diff --git a/src/library/scala/Either.scala b/src/library/scala/Either.scala
deleted file mode 100644
index 61d43b6..0000000
--- a/src/library/scala/Either.scala
+++ /dev/null
@@ -1,609 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-
-package scala
-
-/** Represents a value of one of two possible types (a disjoint union.)
- * Instances of Either are either an instance of [[scala.Left]] or [[scala.Right]].
- *
- * A common use of Either is as an alternative to [[scala.Option]] for dealing
- * with possible missing values. In this usage, [[scala.None]] is replaced
- * with a [[scala.Left]] which can contain useful information.
- * [[scala.Right]] takes the place of [[scala.Some]]. Convention dictates
- * that Left is used for failure and Right is used for success.
- *
- * For example, you could use ``Either[String, Int]`` to detect whether a
- * received input is a String or an Int.
- *
- * {{{
- * val in = Console.readLine("Type Either a string or an Int: ")
- * val result: Either[String,Int] = try {
- * Right(in.toInt)
- * } catch {
- * case e: Exception =>
- * Left(in)
- * }
- *
- * println( result match {
- * case Right(x) => "You passed me the Int: " + x + ", which I will increment. " + x + " + 1 = " + (x+1)
- * case Left(x) => "You passed me the String: " + x
- * })
- * }}}
- *
- * A ''projection'' can be used to selectively operate on a value of type Either,
- * depending on whether it is of type Left or Right. For example, to transform an
- * Either using a function, in the case where it's a Left, one can first apply
- * the `left` projection and invoke `map` on that projected Either. If a `right`
- * projection is applied to that Left, the original Left is returned, unmodified.
- *
- * {{{
- * val l: Either[String, Int] = Left("flower")
- * val r: Either[String, Int] = Right(12)
- * l.left.map(_.size): Either[Int, Int] // Left(6)
- * r.left.map(_.size): Either[Int, Int] // Right(12)
- * l.right.map(_.toDouble): Either[String, Double] // Left("flower")
- * r.right.map(_.toDouble): Either[String, Double] // Right(12.0)
- * }}}
- *
- * Like with other types which define a `map` method, the same can be achieved
- * using a for-comprehension:
- * {{{
- * for (s <- l.left) yield s.size // Left(6)
- * }}}
- *
- * To support multiple projections as generators in for-comprehensions, the Either
- * type also defines a `flatMap` method.
- *
- * @author <a href="mailto:research at workingmouse.com">Tony Morris</a>, Workingmouse
- * @version 1.0, 11/10/2008
- * @since 2.7
- */
-sealed abstract class Either[+A, +B] {
- /**
- * Projects this `Either` as a `Left`.
- */
- def left = Either.LeftProjection(this)
-
- /**
- * Projects this `Either` as a `Right`.
- */
- def right = Either.RightProjection(this)
-
- /**
- * Applies `fa` if this is a `Left` or `fb` if this is a `Right`.
- *
- * @example {{{
- * val result: Either[Exception, Value] = possiblyFailingOperation()
- * log(result.fold(
- * ex => "Operation failed with " + ex,
- * v => "Operation produced value: " + v
- * ))
- * }}}
- *
- * @param fa the function to apply if this is a `Left`
- * @param fb the function to apply if this is a `Right`
- * @return the results of applying the function
- */
- def fold[X](fa: A => X, fb: B => X) = this match {
- case Left(a) => fa(a)
- case Right(b) => fb(b)
- }
-
- /**
- * If this is a `Left`, then return the left value in `Right` or vice versa.
- *
- * @example {{{
- * val l: Either[String, Int] = Left("left")
- * val r: Either[Int, String] = l.swap // Result: Right("left")
- * }}}
- */
- def swap = this match {
- case Left(a) => Right(a)
- case Right(b) => Left(b)
- }
-
- /**
- * Joins an `Either` through `Right`.
- *
- * This method requires that the right side of this Either is itself an
- * Either type. That is, this must be some type like: {{{
- * Either[A, Either[A, C]]
- * }}} (which respects the type parameter bounds, shown below.)
- *
- * If this instance is a Right[Either[A, C]] then the contained Either[A, C]
- * will be returned, otherwise this value will be returned unmodified.
- *
- * @example {{{
- * Right[String, Either[String, Int]](Right(12)).joinRight // Result: Right(12)
- * Right[String, Either[String, Int]](Left("flower")).joinRight // Result: Left("flower")
- * Left[String, Either[String, Int]]("flower").joinRight // Result: Left("flower")
- * }}}
- *
- * This method, and `joinLeft`, are analogous to `Option#flatten`
- */
- def joinRight[A1 >: A, B1 >: B, C](implicit ev: B1 <:< Either[A1, C]): Either[A1, C] = this match {
- case Left(a) => Left(a)
- case Right(b) => b
- }
-
- /**
- * Joins an `Either` through `Left`.
- *
- * This method requires that the left side of this Either is itself an
- * Either type. That is, this must be some type like: {{{
- * Either[Either[C, B], B]
- * }}} (which respects the type parameter bounds, shown below.)
- *
- * If this instance is a Left[Either[C, B]] then the contained Either[C, B]
- * will be returned, otherwise this value will be returned unmodified.
- *
- * {{{
- * Left[Either[Int, String], String](Right("flower")).joinLeft // Result: Right("flower")
- * Left[Either[Int, String], String](Left(12)).joinLeft // Result: Left(12)
- * Right[Either[Int, String], String]("daisy").joinLeft // Result: Right("daisy")
- * }}}
- *
- * This method, and `joinRight`, are analogous to `Option#flatten`
- */
- def joinLeft[A1 >: A, B1 >: B, C](implicit ev: A1 <:< Either[C, B1]): Either[C, B1] = this match {
- case Left(a) => a
- case Right(b) => Right(b)
- }
-
- /**
- * Returns `true` if this is a `Left`, `false` otherwise.
- *
- * {{{
- * Left("tulip").isLeft // true
- * Right("venus fly-trap").isLeft // false
- * }}}
- */
- def isLeft: Boolean
-
- /**
- * Returns `true` if this is a `Right`, `false` otherwise.
- *
- * {{{
- * Left("tulip").isRight // false
- * Right("venus fly-trap").isRight // true
- * }}}
- */
- def isRight: Boolean
-}
-
-/**
- * The left side of the disjoint union, as opposed to the [[scala.Right]] side.
- *
- * @author <a href="mailto:research at workingmouse.com">Tony Morris</a>, Workingmouse
- * @version 1.0, 11/10/2008
- */
-final case class Left[+A, +B](a: A) extends Either[A, B] {
- def isLeft = true
- def isRight = false
-}
-
-/**
- * The right side of the disjoint union, as opposed to the [[scala.Left]] side.
- *
- * @author <a href="mailto:research at workingmouse.com">Tony Morris</a>, Workingmouse
- * @version 1.0, 11/10/2008
- */
-final case class Right[+A, +B](b: B) extends Either[A, B] {
- def isLeft = false
- def isRight = true
-}
-
-object Either {
- class MergeableEither[A](x: Either[A, A]) {
- def merge: A = x match {
- case Left(a) => a
- case Right(a) => a
- }
- }
-
- /**
- * Allows use of a ``merge`` method to extract values from Either instances
- * regardless of whether they are Left or Right.
- *
- * {{{
- * val l = Left(List(1)): Either[List[Int], Vector[Int]]
- * val r = Right(Vector(1)): Either[List[Int], Vector[Int]]
- * l.merge: Seq[Int] // List(1)
- * r.merge: Seq[Int] // Vector(1)
- * }}}
- */
- implicit def either2mergeable[A](x: Either[A, A]): MergeableEither[A] = new MergeableEither(x)
-
- /**
- * Projects an `Either` into a `Left`.
- *
- * This allows for-comprehensions over Either instances - for example {{{
- * for (s <- Left("flower").left) yield s.length // Left(6)
- * }}}
- *
- * Continuing the analogy with [[scala.Option]], a `LeftProjection` declares
- * that `Left` should be analogous to `Some` in some code.
- *
- * {{{
- * // using Option:
- * def interactWithDB(x: Query): Option[Result] =
- * try {
- * Some(getResultFromDatabase(x))
- * } catch {
- * case ex => None
- * }
- *
- * // this will only be executed if interactWithDB returns a Some
- * val report =
- * for (r <- interactWithDB(someQuery)) yield generateReport(r)
- * if (report.isDefined)
- * send(report)
- * else
- * log("report not generated, not sure why...")
- * }}}
- *
- * {{{
- * // using Either
- * def interactWithDB(x: Query): Either[Exception, Result] =
- * try {
- * Right(getResultFromDatabase(x))
- * } catch {
- * case ex => Left(ex)
- * }
- *
- * // this will only be executed if interactWithDB returns a Some
- * val report =
- * for (r <- interactWithDB(someQuery).right) yield generateReport(r)
- * if (report.isRight)
- * send(report)
- * else
- * log("report not generated, reason was " + report.left.get)
- * }}}
- *
- * @author <a href="mailto:research at workingmouse.com">Tony Morris</a>, Workingmouse
- * @version 1.0, 11/10/2008
- */
- final case class LeftProjection[+A, +B](e: Either[A, B]) {
- /**
- * Returns the value from this `Left` or throws `Predef.NoSuchElementException`
- * if this is a `Right`.
- *
- * {{{
- * Left(12).left.get // 12
- * Right(12).left.get // NoSuchElementException
- * }}}
- *
- * @throws Predef.NoSuchElementException if the projection is [[scala.Right]]
- */
- def get = e match {
- case Left(a) => a
- case Right(_) => throw new NoSuchElementException("Either.left.value on Right")
- }
-
- /**
- * Executes the given side-effecting function if this is a `Left`.
- *
- * {{{
- * Left(12).left.foreach(x => println(x)) // prints "12"
- * Right(12).left.foreach(x => println(x)) // doesn't print
- * }}}
- * @param e The side-effecting function to execute.
- */
- def foreach[U](f: A => U) = e match {
- case Left(a) => f(a)
- case Right(_) => {}
- }
-
- /**
- * Returns the value from this `Left` or the given argument if this is a
- * `Right`.
- *
- * {{{
- * Left(12).left.getOrElse(17) // 12
- * Right(12).left.getOrElse(17) // 17
- * }}}
- *
- */
- def getOrElse[AA >: A](or: => AA) = e match {
- case Left(a) => a
- case Right(_) => or
- }
-
- /**
- * Returns `true` if `Right` or returns the result of the application of
- * the given function to the `Left` value.
- *
- * {{{
- * Left(12).left.forall(_ > 10) // true
- * Left(7).left.forall(_ > 10) // false
- * Right(12).left.forall(_ > 10) // true
- * }}}
- *
- */
- def forall(f: A => Boolean) = e match {
- case Left(a) => f(a)
- case Right(_) => true
- }
-
- /**
- * Returns `false` if `Right` or returns the result of the application of
- * the given function to the `Left` value.
- *
- * {{{
- * Left(12).left.exists(_ > 10) // true
- * Left(7).left.exists(_ > 10) // false
- * Right(12).left.exists(_ > 10) // false
- * }}}
- *
- */
- def exists(f: A => Boolean) = e match {
- case Left(a) => f(a)
- case Right(_) => false
- }
-
- /**
- * Binds the given function across `Left`.
- *
- * {{{
- * Left(12).left.flatMap(x => Left("scala")) // Left("scala")
- * Right(12).left.flatMap(x => Left("scala") // Right(12)
- * }}}
- * @param The function to bind across `Left`.
- */
- def flatMap[BB >: B, X](f: A => Either[X, BB]) = e match {
- case Left(a) => f(a)
- case Right(b) => Right(b)
- }
-
- /**
- * Maps the function argument through `Left`.
- *
- * {{{
- * Left(12).left.map(_ + 2) // Left(14)
- * Right[Int, Int](12).left.map(_ + 2) // Right(12)
- * }}}
- */
- def map[X](f: A => X) = e match {
- case Left(a) => Left(f(a))
- case Right(b) => Right(b)
- }
-
- /**
- * Returns `None` if this is a `Right` or if the given predicate
- * `p` does not hold for the left value, otherwise, returns a `Left`.
- *
- * {{{
- * Left(12).left.filter(_ > 10) // Some(Left(12))
- * Left(7).left.filter(_ > 10) // None
- * Right(12).left.filter(_ > 10) // None
- * }}}
- */
- def filter[Y](p: A => Boolean): Option[Either[A, Y]] = e match {
- case Left(a) => if(p(a)) Some(Left(a)) else None
- case Right(b) => None
- }
-
- /**
- * Returns a `Seq` containing the `Left` value if it exists or an empty
- * `Seq` if this is a `Right`.
- *
- * {{{
- * Left(12).left.toSeq // Seq(12)
- * Right(12).left.toSeq // Seq()
- * }}}
- */
- def toSeq = e match {
- case Left(a) => Seq(a)
- case Right(_) => Seq.empty
- }
-
- /**
- * Returns a `Some` containing the `Left` value if it exists or a
- * `None` if this is a `Right`.
- *
- * {{{
- * Left(12).left.toOption // Some(12)
- * Right(12).left.toOption // None
- * }}}
- */
- def toOption = e match {
- case Left(a) => Some(a)
- case Right(_) => None
- }
- }
-
- /**
- * Projects an `Either` into a `Right`.
- *
- * This allows for-comprehensions over Either instances - for example {{{
- * for (s <- Right("flower").right) yield s.length // Right(6)
- * }}}
- *
- * Continuing the analogy with [[scala.Option]], a `RightProjection` declares
- * that `Right` should be analogous to `Some` in some code.
- *
- * Analogous to `LeftProjection`, see example usage in its documentation above.
- *
- * @author <a href="mailto:research at workingmouse.com">Tony Morris</a>, Workingmouse
- * @version 1.0, 11/10/2008
- */
- final case class RightProjection[+A, +B](e: Either[A, B]) {
-
- /**
- * Returns the value from this `Right` or throws
- * `Predef.NoSuchElementException` if this is a `Left`.
- *
- * {{{
- * Right(12).right.get // 12
- * Left(12).right.get // NoSuchElementException
- * }}}
- *
- * @throws Predef.NoSuchElementException if the projection is `Left`.
- */
- def get = e match {
- case Left(_) => throw new NoSuchElementException("Either.right.value on Left")
- case Right(a) => a
- }
-
- /**
- * Executes the given side-effecting function if this is a `Right`.
- *
- * {{{
- * Right(12).right.foreach(x => println(x)) // prints "12"
- * Left(12).right.foreach(x => println(x)) // doesn't print
- * }}}
- * @param e The side-effecting function to execute.
- */
- def foreach[U](f: B => U) = e match {
- case Left(_) => {}
- case Right(b) => f(b)
- }
-
- /**
- * Returns the value from this `Right` or the given argument if this is a
- * `Left`.
- *
- * {{{
- * Right(12).right.getOrElse(17) // 12
- * Left(12).right.getOrElse(17) // 17
- * }}}
- */
- def getOrElse[BB >: B](or: => BB) = e match {
- case Left(_) => or
- case Right(b) => b
- }
-
- /**
- * Returns `true` if `Left` or returns the result of the application of
- * the given function to the `Right` value.
- *
- * {{{
- * Right(12).right.forall(_ > 10) // true
- * Right(7).right.forall(_ > 10) // false
- * Left(12).right.forall(_ > 10) // true
- * }}}
- */
- def forall(f: B => Boolean) = e match {
- case Left(_) => true
- case Right(b) => f(b)
- }
-
- /**
- * Returns `false` if `Left` or returns the result of the application of
- * the given function to the `Right` value.
- *
- * {{{
- * Right(12).right.exists(_ > 10) // true
- * Right(7).right.exists(_ > 10) // false
- * Left(12).right.exists(_ > 10) // false
- * }}}
- */
- def exists(f: B => Boolean) = e match {
- case Left(_) => false
- case Right(b) => f(b)
- }
-
- /**
- * Binds the given function across `Right`.
- *
- * @param The function to bind across `Right`.
- */
- def flatMap[AA >: A, Y](f: B => Either[AA, Y]) = e match {
- case Left(a) => Left(a)
- case Right(b) => f(b)
- }
-
- /**
- * The given function is applied if this is a `Right`.
- *
- * {{{
- * Right(12).right.map(x => "flower") // Result: Right("flower")
- * Left(12).right.map(x => "flower") // Result: Left(12)
- * }}}
- */
- def map[Y](f: B => Y) = e match {
- case Left(a) => Left(a)
- case Right(b) => Right(f(b))
- }
-
- /** Returns `None` if this is a `Left` or if the
- * given predicate `p` does not hold for the right value,
- * otherwise, returns a `Right`.
- *
- * {{{
- * Right(12).right.filter(_ > 10) // Some(Right(12))
- * Right(7).right.filter(_ > 10) // None
- * Left(12).right.filter(_ > 10) // None
- * }}}
- */
- def filter[X](p: B => Boolean): Option[Either[X, B]] = e match {
- case Left(_) => None
- case Right(b) => if(p(b)) Some(Right(b)) else None
- }
-
- /** Returns a `Seq` containing the `Right` value if
- * it exists or an empty `Seq` if this is a `Left`.
- *
- * {{{
- * Right(12).right.toSeq // Seq(12)
- * Left(12).right.toSeq // Seq()
- * }}}
- */
- def toSeq = e match {
- case Left(_) => Seq.empty
- case Right(b) => Seq(b)
- }
-
- /** Returns a `Some` containing the `Right` value
- * if it exists or a `None` if this is a `Left`.
- *
- * {{{
- * Right(12).right.toOption // Some(12)
- * Left(12).right.toOption // None
- * }}}
- */
- def toOption = e match {
- case Left(_) => None
- case Right(b) => Some(b)
- }
- }
-
- @deprecated("use `x.joinLeft'", "2.8.0")
- def joinLeft[A, B](es: Either[Either[A, B], B]) =
- es.left.flatMap(x => x)
-
- @deprecated("use `x.joinRight'", "2.8.0")
- def joinRight[A, B](es: Either[A, Either[A, B]]) =
- es.right.flatMap(x => x)
-
- /**
- * Takes an `Either` to its contained value within `Left` or
- * `Right`.
- */
- @deprecated("use `x.merge'", "2.8.0")
- def merge[T](e: Either[T, T]) = e match {
- case Left(t) => t
- case Right(t) => t
- }
-
- /** If the condition is satisfied, return the given `B` in `Right`,
- * otherwise, return the given `A` in `Left`.
- *
- * {{{
- * val userInput: String = ...
- * Either.cond(
- * userInput.forall(_.isDigit) && userInput.size == 10,
- * PhoneNumber(userInput),
- * "The input (%s) does not look like a phone number".format(userInput)
- * }}}
- */
- def cond[A, B](test: Boolean, right: => B, left: => A): Either[A, B] =
- if (test) Right(right) else Left(left)
-}
diff --git a/src/library/scala/Enumeration.scala b/src/library/scala/Enumeration.scala
index 3c87aa5..47d7840 100644
--- a/src/library/scala/Enumeration.scala
+++ b/src/library/scala/Enumeration.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -8,22 +8,22 @@
package scala
-import scala.collection.SetLike
-import scala.collection.{ mutable, immutable, generic }
+import scala.collection.{ mutable, immutable, generic, SortedSetLike, AbstractSet }
import java.lang.reflect.{ Modifier, Method => JMethod, Field => JField }
+import scala.reflect.NameTransformer._
+import java.util.regex.Pattern
-/**
- * Defines a finite set of values specific to the enumeration. Typically
- * these values enumerate all possible forms something can take and provide a
- * lightweight alternative to case classes.
+/** Defines a finite set of values specific to the enumeration. Typically
+ * these values enumerate all possible forms something can take and provide
+ * a lightweight alternative to case classes.
*
- * Each call to a `Value` method adds a new unique value to the
- * enumeration. To be accessible, these values are usually defined as
- * `val` members of the evaluation.
+ * Each call to a `Value` method adds a new unique value to the enumeration.
+ * To be accessible, these values are usually defined as `val` members of
+ * the evaluation.
*
- * All values in an enumeration share a common, unique type defined as the
- * `Value` type member of the enumeration (`Value`
- * selected on the stable identifier path of the enumeration instance).
+ * All values in an enumeration share a common, unique type defined as the
+ * `Value` type member of the enumeration (`Value` selected on the stable
+ * identifier path of the enumeration instance).
*
* @example {{{
* object Main extends App {
@@ -48,24 +48,31 @@ import java.lang.reflect.{ Modifier, Method => JMethod, Field => JField }
*
* @param initial The initial value from which to count the integers that
* identifies values at run-time.
- * @param names The sequence of names to give to this enumeration's values.
- *
* @author Matthias Zenger
*/
@SerialVersionUID(8476000850333817230L)
-abstract class Enumeration(initial: Int, names: String*) extends Serializable {
+abstract class Enumeration (initial: Int) extends Serializable {
thisenum =>
def this() = this(0)
+
+ @deprecated("Names should be specified individually or discovered via reflection", "2.10.0")
+ def this(initial: Int, names: String*) = {
+ this(initial)
+ this.nextName = names.iterator
+ }
+ @deprecated("Names should be specified individually or discovered via reflection", "2.10.0")
def this(names: String*) = this(0, names: _*)
/* Note that `readResolve` cannot be private, since otherwise
the JVM does not invoke it when deserializing subclasses. */
- protected def readResolve(): AnyRef = thisenum.getClass.getField("MODULE$").get()
+ protected def readResolve(): AnyRef = thisenum.getClass.getField(MODULE_INSTANCE_NAME).get(null)
/** The name of this enumeration.
*/
- override def toString = (getClass.getName stripSuffix "$" split '.' last) split '$' last
+ override def toString =
+ ((getClass.getName stripSuffix MODULE_SUFFIX_STRING split '.').last split
+ Pattern.quote(NAME_JOIN_STRING)).last
/** The mapping from the integer used to identify values to the actual
* values. */
@@ -73,7 +80,7 @@ abstract class Enumeration(initial: Int, names: String*) extends Serializable {
/** The cache listing all values of this enumeration. */
@transient private var vset: ValueSet = null
- @transient private var vsetDefined = false
+ @transient @volatile private var vsetDefined = false
/** The mapping from the integer used to identify values to their
* names. */
@@ -83,55 +90,52 @@ abstract class Enumeration(initial: Int, names: String*) extends Serializable {
*/
def values: ValueSet = {
if (!vsetDefined) {
- vset = new ValueSet(immutable.SortedSet.empty[Int] ++ (vmap.values map (_.id)))
+ vset = (ValueSet.newBuilder ++= vmap.values).result()
vsetDefined = true
}
vset
}
/** The integer to use to identify the next created value. */
- protected var nextId = initial
+ protected var nextId: Int = initial
/** The string to use to name the next created value. */
- protected var nextName = names.iterator
+ protected var nextName: Iterator[String] = _
+
private def nextNameOrNull =
- if (nextName.hasNext) nextName.next else null
+ if (nextName != null && nextName.hasNext) nextName.next else null
/** The highest integer amongst those used to identify values in this
* enumeration. */
private var topId = initial
- /** The highest integer amongst those used to identify values in this
- * enumeration. */
+ /** The lowest integer amongst those used to identify values in this
+ * enumeration, but no higher than 0. */
+ private var bottomId = if(initial < 0) initial else 0
+
+ /** The one higher than the highest integer amongst those used to identify
+ * values in this enumeration. */
final def maxId = topId
/** The value of this enumeration with given id `x`
*/
final def apply(x: Int): Value = vmap(x)
- /** Returns a Value from this Enumeration whose name matches
- * the argument <var>s</var>.
- *
- * You can pass a String* set of names to the constructor, or
- * initialize each Enumeration with Value(String). Otherwise, the
- * names are determined automatically through reflection.
+ /** Return a `Value` from this `Enumeration` whose name matches
+ * the argument `s`. The names are determined automatically via reflection.
*
- * Note the change here wrt 2.7 is intentional. You should know whether
- * a name is in an Enumeration beforehand. If not, just use find on
- * values.
- *
- * @param s an Enumeration name
- * @return the Value of this Enumeration if its name matches <var>s</var>
- * @throws java.util.NoSuchElementException if no Value with a matching
- * name is in this Enumeration
+ * @param s an `Enumeration` name
+ * @return the `Value` of this `Enumeration` if its name matches `s`
+ * @throws java.util.NoSuchElementException if no `Value` with a matching
+ * name is in this `Enumeration`
*/
final def withName(s: String): Value = values.find(_.toString == s).get
/** Creates a fresh value, part of this enumeration. */
protected final def Value: Value = Value(nextId)
- /** Creates a fresh value, part of this enumeration, identified by the integer
- * `i`.
+ /** Creates a fresh value, part of this enumeration, identified by the
+ * integer `i`.
*
* @param i An integer that identifies this value at run-time. It must be
* unique amongst all values of the enumeration.
@@ -190,30 +194,36 @@ abstract class Enumeration(initial: Int, names: String*) extends Serializable {
/** a marker so we can tell whose values belong to whom come reflective-naming time */
private[Enumeration] val outerEnum = thisenum
- override def compare(that: Value): Int = this.id - that.id
+ override def compare(that: Value): Int =
+ if (this.id < that.id) -1
+ else if (this.id == that.id) 0
+ else 1
override def equals(other: Any) = other match {
case that: Enumeration#Value => (outerEnum eq that.outerEnum) && (id == that.id)
case _ => false
}
override def hashCode: Int = id.##
+
+ /** Create a ValueSet which contains this value and another one */
+ def + (v: Value) = ValueSet(this, v)
}
- /** A class implementing the <a href="Enumeration.Value.html"
- * target="contentFrame">`Value`</a> type. This class can be
- * overridden to change the enumeration's naming and integer identification
- * behaviour.
+ /** A class implementing the [[scala.Enumeration.Value]] type. This class
+ * can be overridden to change the enumeration's naming and integer
+ * identification behaviour.
*/
@SerialVersionUID(0 - 3501153230598116017L)
protected class Val(i: Int, name: String) extends Value with Serializable {
- def this(i: Int) = this(i, nextNameOrNull)
- def this(name: String) = this(nextId, name)
- def this() = this(nextId)
+ def this(i: Int) = this(i, nextNameOrNull)
+ def this(name: String) = this(nextId, name)
+ def this() = this(nextId)
assert(!vmap.isDefinedAt(i), "Duplicate id: " + i)
vmap(i) = this
vsetDefined = false
nextId = i + 1
if (nextId > topId) topId = nextId
+ if (i < bottomId) bottomId = i
def id = i
override def toString() =
if (name != null) name
@@ -227,30 +237,56 @@ abstract class Enumeration(initial: Int, names: String*) extends Serializable {
}
}
- /** A class for sets of values
+ /** An ordering by id for values of this set */
+ object ValueOrdering extends Ordering[Value] {
+ def compare(x: Value, y: Value): Int = x compare y
+ }
+
+ /** A class for sets of values.
* Iterating through this set will yield values in increasing order of their ids.
- * @param ids The set of ids of values, organized as a SortedSet.
+ *
+ * @param nnIds The set of ids of values (adjusted so that the lowest value does
+ * not fall below zero), organized as a `BitSet`.
*/
- class ValueSet private[Enumeration] (val ids: immutable.SortedSet[Int]) extends Set[Value] with SetLike[Value, ValueSet] {
+ class ValueSet private[ValueSet] (private[this] var nnIds: immutable.BitSet)
+ extends AbstractSet[Value]
+ with immutable.SortedSet[Value]
+ with SortedSetLike[Value, ValueSet]
+ with Serializable {
+
+ implicit def ordering: Ordering[Value] = ValueOrdering
+ def rangeImpl(from: Option[Value], until: Option[Value]): ValueSet =
+ new ValueSet(nnIds.rangeImpl(from.map(_.id - bottomId), until.map(_.id - bottomId)))
+
override def empty = ValueSet.empty
- def contains(v: Value) = ids contains (v.id)
- def + (value: Value) = new ValueSet(ids + value.id)
- def - (value: Value) = new ValueSet(ids - value.id)
- def iterator = ids.iterator map thisenum.apply
+ def contains(v: Value) = nnIds contains (v.id - bottomId)
+ def + (value: Value) = new ValueSet(nnIds + (value.id - bottomId))
+ def - (value: Value) = new ValueSet(nnIds - (value.id - bottomId))
+ def iterator = nnIds.iterator map (id => thisenum.apply(id + bottomId))
override def stringPrefix = thisenum + ".ValueSet"
+ /** Creates a bit mask for the zero-adjusted ids in this set as a
+ * new array of longs */
+ def toBitMask: Array[Long] = nnIds.toBitMask
}
/** A factory object for value sets */
object ValueSet {
- import mutable.{ Builder, SetBuilder }
import generic.CanBuildFrom
/** The empty value set */
- val empty = new ValueSet(immutable.SortedSet.empty)
+ val empty = new ValueSet(immutable.BitSet.empty)
/** A value set consisting of given elements */
- def apply(elems: Value*): ValueSet = empty ++ elems
+ def apply(elems: Value*): ValueSet = (newBuilder ++= elems).result()
+ /** A value set containing all the values for the zero-adjusted ids
+ * corresponding to the bits in an array */
+ def fromBitMask(elems: Array[Long]): ValueSet = new ValueSet(immutable.BitSet.fromBitMask(elems))
/** A builder object for value sets */
- def newBuilder: Builder[Value, ValueSet] = new SetBuilder(empty)
+ def newBuilder: mutable.Builder[Value, ValueSet] = new mutable.Builder[Value, ValueSet] {
+ private[this] val b = new mutable.BitSet
+ def += (x: Value) = { b += (x.id - bottomId); this }
+ def clear() = b.clear
+ def result() = new ValueSet(b.toImmutable)
+ }
/** The implicit builder for value sets */
implicit def canBuildFrom: CanBuildFrom[ValueSet, Value, ValueSet] =
new CanBuildFrom[ValueSet, Value, ValueSet] {
diff --git a/src/library/scala/Equals.scala b/src/library/scala/Equals.scala
index 8aff7af..f2f9ead 100644
--- a/src/library/scala/Equals.scala
+++ b/src/library/scala/Equals.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -11,7 +11,7 @@ package scala
/** An interface containing operations for equality.
* The only method not already present in class `AnyRef` is `canEqual`.
*/
-trait Equals {
+trait Equals extends Any {
/** A method that should be called from every well-designed equals method
* that is open to be overridden in a subclass. See Programming in Scala,
* Chapter 28 for discussion and design.
diff --git a/src/library/scala/Float.scala b/src/library/scala/Float.scala
index 9ef7181..d942ace 100644
--- a/src/library/scala/Float.scala
+++ b/src/library/scala/Float.scala
@@ -10,115 +10,355 @@
package scala
-/** `Float` is a member of the value classes, those whose instances are
- * not represented as objects by the underlying host system.
+import scala.language.implicitConversions
+
+/** `Float`, a 32-bit IEEE-754 floating point number (equivalent to Java's `float` primitive type) is a
+ * subtype of [[scala.AnyVal]]. Instances of `Float` are not
+ * represented by an object in the underlying runtime system.
*
* There is an implicit conversion from [[scala.Float]] => [[scala.runtime.RichFloat]]
* which provides useful non-primitive operations.
*/
-final class Float extends AnyVal {
- def toByte: Byte = sys.error("stub")
- def toShort: Short = sys.error("stub")
- def toChar: Char = sys.error("stub")
- def toInt: Int = sys.error("stub")
- def toLong: Long = sys.error("stub")
- def toFloat: Float = sys.error("stub")
- def toDouble: Double = sys.error("stub")
-
- def unary_+ : Float = sys.error("stub")
- def unary_- : Float = sys.error("stub")
-
- def +(x: String): String = sys.error("stub")
-
- def ==(x: Byte): Boolean = sys.error("stub")
- def ==(x: Short): Boolean = sys.error("stub")
- def ==(x: Char): Boolean = sys.error("stub")
- def ==(x: Int): Boolean = sys.error("stub")
- def ==(x: Long): Boolean = sys.error("stub")
- def ==(x: Float): Boolean = sys.error("stub")
- def ==(x: Double): Boolean = sys.error("stub")
-
- def !=(x: Byte): Boolean = sys.error("stub")
- def !=(x: Short): Boolean = sys.error("stub")
- def !=(x: Char): Boolean = sys.error("stub")
- def !=(x: Int): Boolean = sys.error("stub")
- def !=(x: Long): Boolean = sys.error("stub")
- def !=(x: Float): Boolean = sys.error("stub")
- def !=(x: Double): Boolean = sys.error("stub")
-
- def <(x: Byte): Boolean = sys.error("stub")
- def <(x: Short): Boolean = sys.error("stub")
- def <(x: Char): Boolean = sys.error("stub")
- def <(x: Int): Boolean = sys.error("stub")
- def <(x: Long): Boolean = sys.error("stub")
- def <(x: Float): Boolean = sys.error("stub")
- def <(x: Double): Boolean = sys.error("stub")
-
- def <=(x: Byte): Boolean = sys.error("stub")
- def <=(x: Short): Boolean = sys.error("stub")
- def <=(x: Char): Boolean = sys.error("stub")
- def <=(x: Int): Boolean = sys.error("stub")
- def <=(x: Long): Boolean = sys.error("stub")
- def <=(x: Float): Boolean = sys.error("stub")
- def <=(x: Double): Boolean = sys.error("stub")
-
- def >(x: Byte): Boolean = sys.error("stub")
- def >(x: Short): Boolean = sys.error("stub")
- def >(x: Char): Boolean = sys.error("stub")
- def >(x: Int): Boolean = sys.error("stub")
- def >(x: Long): Boolean = sys.error("stub")
- def >(x: Float): Boolean = sys.error("stub")
- def >(x: Double): Boolean = sys.error("stub")
-
- def >=(x: Byte): Boolean = sys.error("stub")
- def >=(x: Short): Boolean = sys.error("stub")
- def >=(x: Char): Boolean = sys.error("stub")
- def >=(x: Int): Boolean = sys.error("stub")
- def >=(x: Long): Boolean = sys.error("stub")
- def >=(x: Float): Boolean = sys.error("stub")
- def >=(x: Double): Boolean = sys.error("stub")
-
- def +(x: Byte): Float = sys.error("stub")
- def +(x: Short): Float = sys.error("stub")
- def +(x: Char): Float = sys.error("stub")
- def +(x: Int): Float = sys.error("stub")
- def +(x: Long): Float = sys.error("stub")
- def +(x: Float): Float = sys.error("stub")
- def +(x: Double): Double = sys.error("stub")
-
- def -(x: Byte): Float = sys.error("stub")
- def -(x: Short): Float = sys.error("stub")
- def -(x: Char): Float = sys.error("stub")
- def -(x: Int): Float = sys.error("stub")
- def -(x: Long): Float = sys.error("stub")
- def -(x: Float): Float = sys.error("stub")
- def -(x: Double): Double = sys.error("stub")
-
- def *(x: Byte): Float = sys.error("stub")
- def *(x: Short): Float = sys.error("stub")
- def *(x: Char): Float = sys.error("stub")
- def *(x: Int): Float = sys.error("stub")
- def *(x: Long): Float = sys.error("stub")
- def *(x: Float): Float = sys.error("stub")
- def *(x: Double): Double = sys.error("stub")
-
- def /(x: Byte): Float = sys.error("stub")
- def /(x: Short): Float = sys.error("stub")
- def /(x: Char): Float = sys.error("stub")
- def /(x: Int): Float = sys.error("stub")
- def /(x: Long): Float = sys.error("stub")
- def /(x: Float): Float = sys.error("stub")
- def /(x: Double): Double = sys.error("stub")
-
- def %(x: Byte): Float = sys.error("stub")
- def %(x: Short): Float = sys.error("stub")
- def %(x: Char): Float = sys.error("stub")
- def %(x: Int): Float = sys.error("stub")
- def %(x: Long): Float = sys.error("stub")
- def %(x: Float): Float = sys.error("stub")
- def %(x: Double): Double = sys.error("stub")
-
- def getClass(): Class[Float] = sys.error("stub")
+final abstract class Float private extends AnyVal {
+ def toByte: Byte
+ def toShort: Short
+ def toChar: Char
+ def toInt: Int
+ def toLong: Long
+ def toFloat: Float
+ def toDouble: Double
+
+ /**
+ * Returns this value, unmodified.
+ */
+ def unary_+ : Float
+ /**
+ * Returns the negation of this value.
+ */
+ def unary_- : Float
+
+ def +(x: String): String
+
+ /**
+ * Returns `true` if this value is equal to x, `false` otherwise.
+ */
+ def ==(x: Byte): Boolean
+ /**
+ * Returns `true` if this value is equal to x, `false` otherwise.
+ */
+ def ==(x: Short): Boolean
+ /**
+ * Returns `true` if this value is equal to x, `false` otherwise.
+ */
+ def ==(x: Char): Boolean
+ /**
+ * Returns `true` if this value is equal to x, `false` otherwise.
+ */
+ def ==(x: Int): Boolean
+ /**
+ * Returns `true` if this value is equal to x, `false` otherwise.
+ */
+ def ==(x: Long): Boolean
+ /**
+ * Returns `true` if this value is equal to x, `false` otherwise.
+ */
+ def ==(x: Float): Boolean
+ /**
+ * Returns `true` if this value is equal to x, `false` otherwise.
+ */
+ def ==(x: Double): Boolean
+
+ /**
+ * Returns `true` if this value is not equal to x, `false` otherwise.
+ */
+ def !=(x: Byte): Boolean
+ /**
+ * Returns `true` if this value is not equal to x, `false` otherwise.
+ */
+ def !=(x: Short): Boolean
+ /**
+ * Returns `true` if this value is not equal to x, `false` otherwise.
+ */
+ def !=(x: Char): Boolean
+ /**
+ * Returns `true` if this value is not equal to x, `false` otherwise.
+ */
+ def !=(x: Int): Boolean
+ /**
+ * Returns `true` if this value is not equal to x, `false` otherwise.
+ */
+ def !=(x: Long): Boolean
+ /**
+ * Returns `true` if this value is not equal to x, `false` otherwise.
+ */
+ def !=(x: Float): Boolean
+ /**
+ * Returns `true` if this value is not equal to x, `false` otherwise.
+ */
+ def !=(x: Double): Boolean
+
+ /**
+ * Returns `true` if this value is less than x, `false` otherwise.
+ */
+ def <(x: Byte): Boolean
+ /**
+ * Returns `true` if this value is less than x, `false` otherwise.
+ */
+ def <(x: Short): Boolean
+ /**
+ * Returns `true` if this value is less than x, `false` otherwise.
+ */
+ def <(x: Char): Boolean
+ /**
+ * Returns `true` if this value is less than x, `false` otherwise.
+ */
+ def <(x: Int): Boolean
+ /**
+ * Returns `true` if this value is less than x, `false` otherwise.
+ */
+ def <(x: Long): Boolean
+ /**
+ * Returns `true` if this value is less than x, `false` otherwise.
+ */
+ def <(x: Float): Boolean
+ /**
+ * Returns `true` if this value is less than x, `false` otherwise.
+ */
+ def <(x: Double): Boolean
+
+ /**
+ * Returns `true` if this value is less than or equal to x, `false` otherwise.
+ */
+ def <=(x: Byte): Boolean
+ /**
+ * Returns `true` if this value is less than or equal to x, `false` otherwise.
+ */
+ def <=(x: Short): Boolean
+ /**
+ * Returns `true` if this value is less than or equal to x, `false` otherwise.
+ */
+ def <=(x: Char): Boolean
+ /**
+ * Returns `true` if this value is less than or equal to x, `false` otherwise.
+ */
+ def <=(x: Int): Boolean
+ /**
+ * Returns `true` if this value is less than or equal to x, `false` otherwise.
+ */
+ def <=(x: Long): Boolean
+ /**
+ * Returns `true` if this value is less than or equal to x, `false` otherwise.
+ */
+ def <=(x: Float): Boolean
+ /**
+ * Returns `true` if this value is less than or equal to x, `false` otherwise.
+ */
+ def <=(x: Double): Boolean
+
+ /**
+ * Returns `true` if this value is greater than x, `false` otherwise.
+ */
+ def >(x: Byte): Boolean
+ /**
+ * Returns `true` if this value is greater than x, `false` otherwise.
+ */
+ def >(x: Short): Boolean
+ /**
+ * Returns `true` if this value is greater than x, `false` otherwise.
+ */
+ def >(x: Char): Boolean
+ /**
+ * Returns `true` if this value is greater than x, `false` otherwise.
+ */
+ def >(x: Int): Boolean
+ /**
+ * Returns `true` if this value is greater than x, `false` otherwise.
+ */
+ def >(x: Long): Boolean
+ /**
+ * Returns `true` if this value is greater than x, `false` otherwise.
+ */
+ def >(x: Float): Boolean
+ /**
+ * Returns `true` if this value is greater than x, `false` otherwise.
+ */
+ def >(x: Double): Boolean
+
+ /**
+ * Returns `true` if this value is greater than or equal to x, `false` otherwise.
+ */
+ def >=(x: Byte): Boolean
+ /**
+ * Returns `true` if this value is greater than or equal to x, `false` otherwise.
+ */
+ def >=(x: Short): Boolean
+ /**
+ * Returns `true` if this value is greater than or equal to x, `false` otherwise.
+ */
+ def >=(x: Char): Boolean
+ /**
+ * Returns `true` if this value is greater than or equal to x, `false` otherwise.
+ */
+ def >=(x: Int): Boolean
+ /**
+ * Returns `true` if this value is greater than or equal to x, `false` otherwise.
+ */
+ def >=(x: Long): Boolean
+ /**
+ * Returns `true` if this value is greater than or equal to x, `false` otherwise.
+ */
+ def >=(x: Float): Boolean
+ /**
+ * Returns `true` if this value is greater than or equal to x, `false` otherwise.
+ */
+ def >=(x: Double): Boolean
+
+ /**
+ * Returns the sum of this value and `x`.
+ */
+ def +(x: Byte): Float
+ /**
+ * Returns the sum of this value and `x`.
+ */
+ def +(x: Short): Float
+ /**
+ * Returns the sum of this value and `x`.
+ */
+ def +(x: Char): Float
+ /**
+ * Returns the sum of this value and `x`.
+ */
+ def +(x: Int): Float
+ /**
+ * Returns the sum of this value and `x`.
+ */
+ def +(x: Long): Float
+ /**
+ * Returns the sum of this value and `x`.
+ */
+ def +(x: Float): Float
+ /**
+ * Returns the sum of this value and `x`.
+ */
+ def +(x: Double): Double
+
+ /**
+ * Returns the difference of this value and `x`.
+ */
+ def -(x: Byte): Float
+ /**
+ * Returns the difference of this value and `x`.
+ */
+ def -(x: Short): Float
+ /**
+ * Returns the difference of this value and `x`.
+ */
+ def -(x: Char): Float
+ /**
+ * Returns the difference of this value and `x`.
+ */
+ def -(x: Int): Float
+ /**
+ * Returns the difference of this value and `x`.
+ */
+ def -(x: Long): Float
+ /**
+ * Returns the difference of this value and `x`.
+ */
+ def -(x: Float): Float
+ /**
+ * Returns the difference of this value and `x`.
+ */
+ def -(x: Double): Double
+
+ /**
+ * Returns the product of this value and `x`.
+ */
+ def *(x: Byte): Float
+ /**
+ * Returns the product of this value and `x`.
+ */
+ def *(x: Short): Float
+ /**
+ * Returns the product of this value and `x`.
+ */
+ def *(x: Char): Float
+ /**
+ * Returns the product of this value and `x`.
+ */
+ def *(x: Int): Float
+ /**
+ * Returns the product of this value and `x`.
+ */
+ def *(x: Long): Float
+ /**
+ * Returns the product of this value and `x`.
+ */
+ def *(x: Float): Float
+ /**
+ * Returns the product of this value and `x`.
+ */
+ def *(x: Double): Double
+
+ /**
+ * Returns the quotient of this value and `x`.
+ */
+ def /(x: Byte): Float
+ /**
+ * Returns the quotient of this value and `x`.
+ */
+ def /(x: Short): Float
+ /**
+ * Returns the quotient of this value and `x`.
+ */
+ def /(x: Char): Float
+ /**
+ * Returns the quotient of this value and `x`.
+ */
+ def /(x: Int): Float
+ /**
+ * Returns the quotient of this value and `x`.
+ */
+ def /(x: Long): Float
+ /**
+ * Returns the quotient of this value and `x`.
+ */
+ def /(x: Float): Float
+ /**
+ * Returns the quotient of this value and `x`.
+ */
+ def /(x: Double): Double
+
+ /**
+ * Returns the remainder of the division of this value by `x`.
+ */
+ def %(x: Byte): Float
+ /**
+ * Returns the remainder of the division of this value by `x`.
+ */
+ def %(x: Short): Float
+ /**
+ * Returns the remainder of the division of this value by `x`.
+ */
+ def %(x: Char): Float
+ /**
+ * Returns the remainder of the division of this value by `x`.
+ */
+ def %(x: Int): Float
+ /**
+ * Returns the remainder of the division of this value by `x`.
+ */
+ def %(x: Long): Float
+ /**
+ * Returns the remainder of the division of this value by `x`.
+ */
+ def %(x: Float): Float
+ /**
+ * Returns the remainder of the division of this value by `x`.
+ */
+ def %(x: Double): Double
+
+ override def getClass(): Class[Float] = null
}
object Float extends AnyValCompanion {
@@ -130,9 +370,6 @@ object Float extends AnyValCompanion {
final val PositiveInfinity = java.lang.Float.POSITIVE_INFINITY
final val NegativeInfinity = java.lang.Float.NEGATIVE_INFINITY
- @deprecated("use Float.MinPositiveValue instead", "2.9.0")
- final val Epsilon = MinPositiveValue
-
/** The negative number with the greatest (finite) absolute value which is representable
* by a Float. Note that it differs from [[java.lang.Float.MIN_VALUE]], which
* is the smallest positive value representable by a Float. In Scala that number
@@ -163,5 +400,9 @@ object Float extends AnyValCompanion {
/** The String representation of the scala.Float companion object.
*/
override def toString = "object scala.Float"
+
+ /** Language mandated coercions from Float to "wider" types.
+ */
+ implicit def float2double(x: Float): Double = x.toDouble
}
diff --git a/src/library/scala/Function.scala b/src/library/scala/Function.scala
index a6e2a2d..7bd12a2 100644
--- a/src/library/scala/Function.scala
+++ b/src/library/scala/Function.scala
@@ -1,7 +1,7 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
@@ -16,68 +16,30 @@ package scala
* @version 1.0, 29/11/2006
*/
object Function {
- /** Given a sequence of functions <code>f<sub>1</sub></code>, ...,
- * <code>f<sub>n</sub></code>, return the function <code>f<sub>1</sub>
- * andThen ... andThen f<sub>n</sub></code>.
+ /** Given a sequence of functions `f,,1,,`, ..., `f,,n,,`, return the
+ * function `f,,1,, andThen ... andThen f,,n,,`.
*
* @param fs The given sequence of functions
- * @return ...
*/
def chain[a](fs: Seq[a => a]): a => a = { x => (x /: fs) ((x, f) => f(x)) }
/** The constant function */
def const[T, U](x: T)(y: U): T = x
- /** Turns a function `A => Option[B]` into a `PartialFunction[A, B]`. Important note:
- * this transformation implies the original function will be called 2 or more
- * times on each logical invocation, because the only way to supply an implementation
- * of isDefinedAt is to call the function and examine the return value.
+ /** Turns a function `A => Option[B]` into a `PartialFunction[A, B]`.
*
- * @param f a function T => Option[R]
- * @return a partial function defined for those inputs where
- * f returns Some(_) and undefined where f returns None.
- * @see PartialFunction#lift
- */
- def unlift[T, R](f: T => Option[R]): PartialFunction[T, R] = new PartialFunction[T, R] {
- def apply(x: T): R = f(x).get
- def isDefinedAt(x: T): Boolean = f(x).isDefined
- override def lift: T => Option[R] = f
- }
-
- /** Currying for functions of arity 2. This transforms a function
- * of arity 2 into a a unary function returning another unary function.
+ * '''Important note''': this transformation implies the original function
+ * may be called 2 or more times on each logical invocation, because the
+ * only way to supply an implementation of `isDefinedAt` is to call the
+ * function and examine the return value.
+ * See also [[scala.PartialFunction]], method `applyOrElse`.
*
- * @param f ...
- * @return ...
- */
- @deprecated("Use `f.curried` instead", "2.8.0")
- def curried[a1, a2, b](f: (a1, a2) => b): a1 => a2 => b = {
- x1 => x2 => f(x1, x2)
- }
-
- /** Currying for functions of arity 3.
- *
- * @param f ...
- * @return ...
- */
- @deprecated("Use `f.curried` instead", "2.8.0")
- def curried[a1, a2, a3, b](f: (a1, a2, a3) => b): a1 => a2 => a3 => b = {
- x1 => x2 => x3 => f(x1, x2, x3)
- }
-
- /** Currying for functions of arity 4.
- */
- @deprecated("Use `f.curried` instead", "2.8.0")
- def curried[a1, a2, a3, a4, b](f: (a1, a2, a3, a4) => b): a1 => a2 => a3 => a4 => b = {
- x1 => x2 => x3 => x4 => f(x1, x2, x3, x4)
- }
-
- /** Currying for functions of arity 5.
+ * @param f a function `T => Option[R]`
+ * @return a partial function defined for those inputs where
+ * f returns `Some(_)` and undefined where `f` returns `None`.
+ * @see [[scala.PartialFunction]], method `lift`.
*/
- @deprecated("Use `f.curried` instead", "2.8.0")
- def curried[a1, a2, a3, a4, a5, b](f: (a1, a2, a3, a4, a5) => b): a1 => a2 => a3 => a4 => a5 => b = {
- x1 => x2 => x3 => x4 => x5 => f(x1, x2, x3, x4, x5)
- }
+ def unlift[T, R](f: T => Option[R]): PartialFunction[T, R] = PartialFunction.unlifted(f)
/** Uncurrying for functions of arity 2. This transforms a unary function
* returning another unary function into a function of arity 2.
@@ -109,9 +71,6 @@ object Function {
*
* @note These functions are slotted for deprecation, but it is on
* hold pending superior type inference for tupling anonymous functions.
- *
- * @param f ...
- * @return ...
*/
// @deprecated("Use `f.tupled` instead")
def tupled[a1, a2, b](f: (a1, a2) => b): Tuple2[a1, a2] => b = {
diff --git a/src/library/scala/Function0.scala b/src/library/scala/Function0.scala
index a3220f9..5f87b38 100644
--- a/src/library/scala/Function0.scala
+++ b/src/library/scala/Function0.scala
@@ -6,28 +6,34 @@
** |/ **
\* */
// GENERATED CODE: DO NOT EDIT.
-// genprod generated these sources at: Thu Apr 14 13:08:25 PDT 2011
+// genprod generated these sources at: Tue Aug 07 11:54:44 CEST 2012
package scala
/** A function of 0 parameters.
- *
+ *
* In the following example, the definition of javaVersion is a
* shorthand for the anonymous class definition anonfun0:
*
* {{{
- * object Main extends Application {
+ * object Main extends App {
* val javaVersion = () => sys.props("java.version")
*
* val anonfun0 = new Function0[String] {
* def apply(): String = sys.props("java.version")
* }
* assert(javaVersion() == anonfun0())
- * }
+ * }
* }}}
+ *
+ * Note that `Function1` does not define a total function, as might
+ * be suggested by the existence of [[scala.PartialFunction]]. The only
+ * distinction between `Function1` and `PartialFunction` is that the
+ * latter can specify inputs which it will not handle.
+
*/
-trait Function0[@specialized +R] extends AnyRef { self =>
+trait Function0[@specialized(Specializable.Primitives) +R] extends AnyRef { self =>
/** Apply the body of this function to the arguments.
* @return the result of function application.
*/
diff --git a/src/library/scala/Function1.scala b/src/library/scala/Function1.scala
index 7517e66..22393c6 100644
--- a/src/library/scala/Function1.scala
+++ b/src/library/scala/Function1.scala
@@ -11,31 +11,29 @@ package scala
/** A function of 1 parameter.
- *
+ *
* In the following example, the definition of succ is a
* shorthand for the anonymous class definition anonfun1:
*
* {{{
- * object Main extends Application {
+ * object Main extends App {
* val succ = (x: Int) => x + 1
* val anonfun1 = new Function1[Int, Int] {
* def apply(x: Int): Int = x + 1
* }
* assert(succ(0) == anonfun1(0))
- * }
+ * }
* }}}
*
* Note that `Function1` does not define a total function, as might
* be suggested by the existence of [[scala.PartialFunction]]. The only
* distinction between `Function1` and `PartialFunction` is that the
* latter can specify inputs which it will not handle.
- *
+
*/
@annotation.implicitNotFound(msg = "No implicit view available from ${T1} => ${R}.")
-trait Function1[@specialized(scala.Int, scala.Long, scala.Float, scala.Double) -T1, @specialized(scala.Unit, scala.Boolean, scala.Int, scala.Float, scala.Long, scala.Double) +R] extends AnyRef { self =>
- /** Apply the body of this function to the argument. It may throw an
- * exception.
- *
+trait Function1[@specialized(scala.Int, scala.Long, scala.Float, scala.Double/*, scala.AnyRef*/) -T1, @specialized(scala.Unit, scala.Boolean, scala.Int, scala.Float, scala.Long, scala.Double/*, scala.AnyRef*/) +R] extends AnyRef { self =>
+ /** Apply the body of this function to the argument.
* @return the result of function application.
*/
def apply(v1: T1): R
@@ -46,7 +44,7 @@ trait Function1[@specialized(scala.Int, scala.Long, scala.Float, scala.Double) -
* @param g a function A => T1
* @return a new function `f` such that `f(x) == apply(g(x))`
*/
- def compose[A](g: A => T1): A => R = { x => apply(g(x)) }
+ @annotation.unspecialized def compose[A](g: A => T1): A => R = { x => apply(g(x)) }
/** Composes two instances of Function1 in a new Function1, with this function applied first.
*
@@ -54,7 +52,7 @@ trait Function1[@specialized(scala.Int, scala.Long, scala.Float, scala.Double) -
* @param g a function R => A
* @return a new function `f` such that `f(x) == g(apply(x))`
*/
- def andThen[A](g: R => A): T1 => A = { x => g(apply(x)) }
+ @annotation.unspecialized def andThen[A](g: R => A): T1 => A = { x => g(apply(x)) }
override def toString() = "<function1>"
}
diff --git a/src/library/scala/Function10.scala b/src/library/scala/Function10.scala
index 632883f..f7e5d41 100644
--- a/src/library/scala/Function10.scala
+++ b/src/library/scala/Function10.scala
@@ -18,23 +18,20 @@ trait Function10[-T1, -T2, -T3, -T4, -T5, -T6, -T7, -T8, -T9, -T10, +R] extends
* @return the result of function application.
*/
def apply(v1: T1, v2: T2, v3: T3, v4: T4, v5: T5, v6: T6, v7: T7, v8: T8, v9: T9, v10: T10): R
-
/** Creates a curried version of this function.
*
* @return a function `f` such that `f(x1)(x2)(x3)(x4)(x5)(x6)(x7)(x8)(x9)(x10) == apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10)`
*/
- def curried: T1 => T2 => T3 => T4 => T5 => T6 => T7 => T8 => T9 => T10 => R = {
+ @annotation.unspecialized def curried: T1 => T2 => T3 => T4 => T5 => T6 => T7 => T8 => T9 => T10 => R = {
(x1: T1) => ((x2: T2, x3: T3, x4: T4, x5: T5, x6: T6, x7: T7, x8: T8, x9: T9, x10: T10) => self.apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10)).curried
}
- @deprecated("Use 'curried' instead", "2.8.0")
- def curry = curried
-
/** Creates a tupled version of this function: instead of 10 arguments,
* it accepts a single [[scala.Tuple10]] argument.
*
* @return a function `f` such that `f((x1, x2, x3, x4, x5, x6, x7, x8, x9, x10)) == f(Tuple10(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10)) == apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10)`
*/
- def tupled: Tuple10[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10] => R = {
+
+ @annotation.unspecialized def tupled: Tuple10[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10] => R = {
case Tuple10(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10) => apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10)
}
override def toString() = "<function10>"
diff --git a/src/library/scala/Function11.scala b/src/library/scala/Function11.scala
index 1c27ac7..53742bf 100644
--- a/src/library/scala/Function11.scala
+++ b/src/library/scala/Function11.scala
@@ -18,23 +18,20 @@ trait Function11[-T1, -T2, -T3, -T4, -T5, -T6, -T7, -T8, -T9, -T10, -T11, +R] ex
* @return the result of function application.
*/
def apply(v1: T1, v2: T2, v3: T3, v4: T4, v5: T5, v6: T6, v7: T7, v8: T8, v9: T9, v10: T10, v11: T11): R
-
/** Creates a curried version of this function.
*
* @return a function `f` such that `f(x1)(x2)(x3)(x4)(x5)(x6)(x7)(x8)(x9)(x10)(x11) == apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11)`
*/
- def curried: T1 => T2 => T3 => T4 => T5 => T6 => T7 => T8 => T9 => T10 => T11 => R = {
+ @annotation.unspecialized def curried: T1 => T2 => T3 => T4 => T5 => T6 => T7 => T8 => T9 => T10 => T11 => R = {
(x1: T1) => ((x2: T2, x3: T3, x4: T4, x5: T5, x6: T6, x7: T7, x8: T8, x9: T9, x10: T10, x11: T11) => self.apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11)).curried
}
- @deprecated("Use 'curried' instead", "2.8.0")
- def curry = curried
-
/** Creates a tupled version of this function: instead of 11 arguments,
* it accepts a single [[scala.Tuple11]] argument.
*
* @return a function `f` such that `f((x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11)) == f(Tuple11(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11)) == apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11)`
*/
- def tupled: Tuple11[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11] => R = {
+
+ @annotation.unspecialized def tupled: Tuple11[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11] => R = {
case Tuple11(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11) => apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11)
}
override def toString() = "<function11>"
diff --git a/src/library/scala/Function12.scala b/src/library/scala/Function12.scala
index 7f53d87..e349d90 100644
--- a/src/library/scala/Function12.scala
+++ b/src/library/scala/Function12.scala
@@ -18,23 +18,20 @@ trait Function12[-T1, -T2, -T3, -T4, -T5, -T6, -T7, -T8, -T9, -T10, -T11, -T12,
* @return the result of function application.
*/
def apply(v1: T1, v2: T2, v3: T3, v4: T4, v5: T5, v6: T6, v7: T7, v8: T8, v9: T9, v10: T10, v11: T11, v12: T12): R
-
/** Creates a curried version of this function.
*
* @return a function `f` such that `f(x1)(x2)(x3)(x4)(x5)(x6)(x7)(x8)(x9)(x10)(x11)(x12) == apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12)`
*/
- def curried: T1 => T2 => T3 => T4 => T5 => T6 => T7 => T8 => T9 => T10 => T11 => T12 => R = {
+ @annotation.unspecialized def curried: T1 => T2 => T3 => T4 => T5 => T6 => T7 => T8 => T9 => T10 => T11 => T12 => R = {
(x1: T1) => ((x2: T2, x3: T3, x4: T4, x5: T5, x6: T6, x7: T7, x8: T8, x9: T9, x10: T10, x11: T11, x12: T12) => self.apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12)).curried
}
- @deprecated("Use 'curried' instead", "2.8.0")
- def curry = curried
-
/** Creates a tupled version of this function: instead of 12 arguments,
* it accepts a single [[scala.Tuple12]] argument.
*
* @return a function `f` such that `f((x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12)) == f(Tuple12(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12)) == apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12)`
*/
- def tupled: Tuple12[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12] => R = {
+
+ @annotation.unspecialized def tupled: Tuple12[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12] => R = {
case Tuple12(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12) => apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12)
}
override def toString() = "<function12>"
diff --git a/src/library/scala/Function13.scala b/src/library/scala/Function13.scala
index 9e05c6c..10ec64b 100644
--- a/src/library/scala/Function13.scala
+++ b/src/library/scala/Function13.scala
@@ -18,23 +18,20 @@ trait Function13[-T1, -T2, -T3, -T4, -T5, -T6, -T7, -T8, -T9, -T10, -T11, -T12,
* @return the result of function application.
*/
def apply(v1: T1, v2: T2, v3: T3, v4: T4, v5: T5, v6: T6, v7: T7, v8: T8, v9: T9, v10: T10, v11: T11, v12: T12, v13: T13): R
-
/** Creates a curried version of this function.
*
* @return a function `f` such that `f(x1)(x2)(x3)(x4)(x5)(x6)(x7)(x8)(x9)(x10)(x11)(x12)(x13) == apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13)`
*/
- def curried: T1 => T2 => T3 => T4 => T5 => T6 => T7 => T8 => T9 => T10 => T11 => T12 => T13 => R = {
+ @annotation.unspecialized def curried: T1 => T2 => T3 => T4 => T5 => T6 => T7 => T8 => T9 => T10 => T11 => T12 => T13 => R = {
(x1: T1) => ((x2: T2, x3: T3, x4: T4, x5: T5, x6: T6, x7: T7, x8: T8, x9: T9, x10: T10, x11: T11, x12: T12, x13: T13) => self.apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13)).curried
}
- @deprecated("Use 'curried' instead", "2.8.0")
- def curry = curried
-
/** Creates a tupled version of this function: instead of 13 arguments,
* it accepts a single [[scala.Tuple13]] argument.
*
* @return a function `f` such that `f((x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13)) == f(Tuple13(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13)) == apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13)`
*/
- def tupled: Tuple13[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13] => R = {
+
+ @annotation.unspecialized def tupled: Tuple13[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13] => R = {
case Tuple13(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13) => apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13)
}
override def toString() = "<function13>"
diff --git a/src/library/scala/Function14.scala b/src/library/scala/Function14.scala
index 5dae911..82dd409 100644
--- a/src/library/scala/Function14.scala
+++ b/src/library/scala/Function14.scala
@@ -18,23 +18,20 @@ trait Function14[-T1, -T2, -T3, -T4, -T5, -T6, -T7, -T8, -T9, -T10, -T11, -T12,
* @return the result of function application.
*/
def apply(v1: T1, v2: T2, v3: T3, v4: T4, v5: T5, v6: T6, v7: T7, v8: T8, v9: T9, v10: T10, v11: T11, v12: T12, v13: T13, v14: T14): R
-
/** Creates a curried version of this function.
*
* @return a function `f` such that `f(x1)(x2)(x3)(x4)(x5)(x6)(x7)(x8)(x9)(x10)(x11)(x12)(x13)(x14) == apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14)`
*/
- def curried: T1 => T2 => T3 => T4 => T5 => T6 => T7 => T8 => T9 => T10 => T11 => T12 => T13 => T14 => R = {
+ @annotation.unspecialized def curried: T1 => T2 => T3 => T4 => T5 => T6 => T7 => T8 => T9 => T10 => T11 => T12 => T13 => T14 => R = {
(x1: T1) => ((x2: T2, x3: T3, x4: T4, x5: T5, x6: T6, x7: T7, x8: T8, x9: T9, x10: T10, x11: T11, x12: T12, x13: T13, x14: T14) => self.apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14)).curried
}
- @deprecated("Use 'curried' instead", "2.8.0")
- def curry = curried
-
/** Creates a tupled version of this function: instead of 14 arguments,
* it accepts a single [[scala.Tuple14]] argument.
*
* @return a function `f` such that `f((x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14)) == f(Tuple14(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14)) == apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14)`
*/
- def tupled: Tuple14[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14] => R = {
+
+ @annotation.unspecialized def tupled: Tuple14[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14] => R = {
case Tuple14(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14) => apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14)
}
override def toString() = "<function14>"
diff --git a/src/library/scala/Function15.scala b/src/library/scala/Function15.scala
index b50974a..be5fbee 100644
--- a/src/library/scala/Function15.scala
+++ b/src/library/scala/Function15.scala
@@ -18,23 +18,20 @@ trait Function15[-T1, -T2, -T3, -T4, -T5, -T6, -T7, -T8, -T9, -T10, -T11, -T12,
* @return the result of function application.
*/
def apply(v1: T1, v2: T2, v3: T3, v4: T4, v5: T5, v6: T6, v7: T7, v8: T8, v9: T9, v10: T10, v11: T11, v12: T12, v13: T13, v14: T14, v15: T15): R
-
/** Creates a curried version of this function.
*
* @return a function `f` such that `f(x1)(x2)(x3)(x4)(x5)(x6)(x7)(x8)(x9)(x10)(x11)(x12)(x13)(x14)(x15) == apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15)`
*/
- def curried: T1 => T2 => T3 => T4 => T5 => T6 => T7 => T8 => T9 => T10 => T11 => T12 => T13 => T14 => T15 => R = {
+ @annotation.unspecialized def curried: T1 => T2 => T3 => T4 => T5 => T6 => T7 => T8 => T9 => T10 => T11 => T12 => T13 => T14 => T15 => R = {
(x1: T1) => ((x2: T2, x3: T3, x4: T4, x5: T5, x6: T6, x7: T7, x8: T8, x9: T9, x10: T10, x11: T11, x12: T12, x13: T13, x14: T14, x15: T15) => self.apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15)).curried
}
- @deprecated("Use 'curried' instead", "2.8.0")
- def curry = curried
-
/** Creates a tupled version of this function: instead of 15 arguments,
* it accepts a single [[scala.Tuple15]] argument.
*
* @return a function `f` such that `f((x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15)) == f(Tuple15(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15)) == apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15)`
*/
- def tupled: Tuple15[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15] => R = {
+
+ @annotation.unspecialized def tupled: Tuple15[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15] => R = {
case Tuple15(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15) => apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15)
}
override def toString() = "<function15>"
diff --git a/src/library/scala/Function16.scala b/src/library/scala/Function16.scala
index 2dc7261..7a185b3 100644
--- a/src/library/scala/Function16.scala
+++ b/src/library/scala/Function16.scala
@@ -18,23 +18,20 @@ trait Function16[-T1, -T2, -T3, -T4, -T5, -T6, -T7, -T8, -T9, -T10, -T11, -T12,
* @return the result of function application.
*/
def apply(v1: T1, v2: T2, v3: T3, v4: T4, v5: T5, v6: T6, v7: T7, v8: T8, v9: T9, v10: T10, v11: T11, v12: T12, v13: T13, v14: T14, v15: T15, v16: T16): R
-
/** Creates a curried version of this function.
*
* @return a function `f` such that `f(x1)(x2)(x3)(x4)(x5)(x6)(x7)(x8)(x9)(x10)(x11)(x12)(x13)(x14)(x15)(x16) == apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16)`
*/
- def curried: T1 => T2 => T3 => T4 => T5 => T6 => T7 => T8 => T9 => T10 => T11 => T12 => T13 => T14 => T15 => T16 => R = {
+ @annotation.unspecialized def curried: T1 => T2 => T3 => T4 => T5 => T6 => T7 => T8 => T9 => T10 => T11 => T12 => T13 => T14 => T15 => T16 => R = {
(x1: T1) => ((x2: T2, x3: T3, x4: T4, x5: T5, x6: T6, x7: T7, x8: T8, x9: T9, x10: T10, x11: T11, x12: T12, x13: T13, x14: T14, x15: T15, x16: T16) => self.apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16)).curried
}
- @deprecated("Use 'curried' instead", "2.8.0")
- def curry = curried
-
/** Creates a tupled version of this function: instead of 16 arguments,
* it accepts a single [[scala.Tuple16]] argument.
*
* @return a function `f` such that `f((x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16)) == f(Tuple16(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16)) == apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16)`
*/
- def tupled: Tuple16[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16] => R = {
+
+ @annotation.unspecialized def tupled: Tuple16[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16] => R = {
case Tuple16(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16) => apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16)
}
override def toString() = "<function16>"
diff --git a/src/library/scala/Function17.scala b/src/library/scala/Function17.scala
index 520c9d1..94e0000 100644
--- a/src/library/scala/Function17.scala
+++ b/src/library/scala/Function17.scala
@@ -18,23 +18,20 @@ trait Function17[-T1, -T2, -T3, -T4, -T5, -T6, -T7, -T8, -T9, -T10, -T11, -T12,
* @return the result of function application.
*/
def apply(v1: T1, v2: T2, v3: T3, v4: T4, v5: T5, v6: T6, v7: T7, v8: T8, v9: T9, v10: T10, v11: T11, v12: T12, v13: T13, v14: T14, v15: T15, v16: T16, v17: T17): R
-
/** Creates a curried version of this function.
*
* @return a function `f` such that `f(x1)(x2)(x3)(x4)(x5)(x6)(x7)(x8)(x9)(x10)(x11)(x12)(x13)(x14)(x15)(x16)(x17) == apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17)`
*/
- def curried: T1 => T2 => T3 => T4 => T5 => T6 => T7 => T8 => T9 => T10 => T11 => T12 => T13 => T14 => T15 => T16 => T17 => R = {
+ @annotation.unspecialized def curried: T1 => T2 => T3 => T4 => T5 => T6 => T7 => T8 => T9 => T10 => T11 => T12 => T13 => T14 => T15 => T16 => T17 => R = {
(x1: T1) => ((x2: T2, x3: T3, x4: T4, x5: T5, x6: T6, x7: T7, x8: T8, x9: T9, x10: T10, x11: T11, x12: T12, x13: T13, x14: T14, x15: T15, x16: T16, x17: T17) => self.apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17)).curried
}
- @deprecated("Use 'curried' instead", "2.8.0")
- def curry = curried
-
/** Creates a tupled version of this function: instead of 17 arguments,
* it accepts a single [[scala.Tuple17]] argument.
*
* @return a function `f` such that `f((x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17)) == f(Tuple17(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17)) == apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17)`
*/
- def tupled: Tuple17[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17] => R = {
+
+ @annotation.unspecialized def tupled: Tuple17[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17] => R = {
case Tuple17(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17) => apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17)
}
override def toString() = "<function17>"
diff --git a/src/library/scala/Function18.scala b/src/library/scala/Function18.scala
index 8d72a3f..a3ee677 100644
--- a/src/library/scala/Function18.scala
+++ b/src/library/scala/Function18.scala
@@ -18,23 +18,20 @@ trait Function18[-T1, -T2, -T3, -T4, -T5, -T6, -T7, -T8, -T9, -T10, -T11, -T12,
* @return the result of function application.
*/
def apply(v1: T1, v2: T2, v3: T3, v4: T4, v5: T5, v6: T6, v7: T7, v8: T8, v9: T9, v10: T10, v11: T11, v12: T12, v13: T13, v14: T14, v15: T15, v16: T16, v17: T17, v18: T18): R
-
/** Creates a curried version of this function.
*
* @return a function `f` such that `f(x1)(x2)(x3)(x4)(x5)(x6)(x7)(x8)(x9)(x10)(x11)(x12)(x13)(x14)(x15)(x16)(x17)(x18) == apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17, x18)`
*/
- def curried: T1 => T2 => T3 => T4 => T5 => T6 => T7 => T8 => T9 => T10 => T11 => T12 => T13 => T14 => T15 => T16 => T17 => T18 => R = {
+ @annotation.unspecialized def curried: T1 => T2 => T3 => T4 => T5 => T6 => T7 => T8 => T9 => T10 => T11 => T12 => T13 => T14 => T15 => T16 => T17 => T18 => R = {
(x1: T1) => ((x2: T2, x3: T3, x4: T4, x5: T5, x6: T6, x7: T7, x8: T8, x9: T9, x10: T10, x11: T11, x12: T12, x13: T13, x14: T14, x15: T15, x16: T16, x17: T17, x18: T18) => self.apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17, x18)).curried
}
- @deprecated("Use 'curried' instead", "2.8.0")
- def curry = curried
-
/** Creates a tupled version of this function: instead of 18 arguments,
* it accepts a single [[scala.Tuple18]] argument.
*
* @return a function `f` such that `f((x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17, x18)) == f(Tuple18(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17, x18)) == apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17, x18)`
*/
- def tupled: Tuple18[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18] => R = {
+
+ @annotation.unspecialized def tupled: Tuple18[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18] => R = {
case Tuple18(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17, x18) => apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17, x18)
}
override def toString() = "<function18>"
diff --git a/src/library/scala/Function19.scala b/src/library/scala/Function19.scala
index 3990bc6..038dcbb 100644
--- a/src/library/scala/Function19.scala
+++ b/src/library/scala/Function19.scala
@@ -18,23 +18,20 @@ trait Function19[-T1, -T2, -T3, -T4, -T5, -T6, -T7, -T8, -T9, -T10, -T11, -T12,
* @return the result of function application.
*/
def apply(v1: T1, v2: T2, v3: T3, v4: T4, v5: T5, v6: T6, v7: T7, v8: T8, v9: T9, v10: T10, v11: T11, v12: T12, v13: T13, v14: T14, v15: T15, v16: T16, v17: T17, v18: T18, v19: T19): R
-
/** Creates a curried version of this function.
*
* @return a function `f` such that `f(x1)(x2)(x3)(x4)(x5)(x6)(x7)(x8)(x9)(x10)(x11)(x12)(x13)(x14)(x15)(x16)(x17)(x18)(x19) == apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17, x18, x19)`
*/
- def curried: T1 => T2 => T3 => T4 => T5 => T6 => T7 => T8 => T9 => T10 => T11 => T12 => T13 => T14 => T15 => T16 => T17 => T18 => T19 => R = {
+ @annotation.unspecialized def curried: T1 => T2 => T3 => T4 => T5 => T6 => T7 => T8 => T9 => T10 => T11 => T12 => T13 => T14 => T15 => T16 => T17 => T18 => T19 => R = {
(x1: T1) => ((x2: T2, x3: T3, x4: T4, x5: T5, x6: T6, x7: T7, x8: T8, x9: T9, x10: T10, x11: T11, x12: T12, x13: T13, x14: T14, x15: T15, x16: T16, x17: T17, x18: T18, x19: T19) => self.apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17, x18, x19)).curried
}
- @deprecated("Use 'curried' instead", "2.8.0")
- def curry = curried
-
/** Creates a tupled version of this function: instead of 19 arguments,
* it accepts a single [[scala.Tuple19]] argument.
*
* @return a function `f` such that `f((x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17, x18, x19)) == f(Tuple19(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17, x18, x19)) == apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17, x18, x19)`
*/
- def tupled: Tuple19[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19] => R = {
+
+ @annotation.unspecialized def tupled: Tuple19[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19] => R = {
case Tuple19(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17, x18, x19) => apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17, x18, x19)
}
override def toString() = "<function19>"
diff --git a/src/library/scala/Function2.scala b/src/library/scala/Function2.scala
index 7d0b9a5..0794a40 100644
--- a/src/library/scala/Function2.scala
+++ b/src/library/scala/Function2.scala
@@ -11,43 +11,46 @@ package scala
/** A function of 2 parameters.
- *
+ *
* In the following example, the definition of max is a
* shorthand for the anonymous class definition anonfun2:
*
* {{{
- * object Main extends Application {
+ * object Main extends App {
* val max = (x: Int, y: Int) => if (x < y) y else x
*
* val anonfun2 = new Function2[Int, Int, Int] {
* def apply(x: Int, y: Int): Int = if (x < y) y else x
* }
* assert(max(0, 1) == anonfun2(0, 1))
- * }
+ * }
* }}}
+ *
+ * Note that `Function1` does not define a total function, as might
+ * be suggested by the existence of [[scala.PartialFunction]]. The only
+ * distinction between `Function1` and `PartialFunction` is that the
+ * latter can specify inputs which it will not handle.
+
*/
trait Function2[@specialized(scala.Int, scala.Long, scala.Double) -T1, @specialized(scala.Int, scala.Long, scala.Double) -T2, @specialized(scala.Unit, scala.Boolean, scala.Int, scala.Float, scala.Long, scala.Double) +R] extends AnyRef { self =>
/** Apply the body of this function to the arguments.
* @return the result of function application.
*/
def apply(v1: T1, v2: T2): R
-
/** Creates a curried version of this function.
*
* @return a function `f` such that `f(x1)(x2) == apply(x1, x2)`
*/
- def curried: T1 => T2 => R = {
+ @annotation.unspecialized def curried: T1 => T2 => R = {
(x1: T1) => (x2: T2) => apply(x1, x2)
}
- @deprecated("Use 'curried' instead", "2.8.0")
- def curry = curried
-
/** Creates a tupled version of this function: instead of 2 arguments,
* it accepts a single [[scala.Tuple2]] argument.
*
* @return a function `f` such that `f((x1, x2)) == f(Tuple2(x1, x2)) == apply(x1, x2)`
*/
- def tupled: Tuple2[T1, T2] => R = {
+
+ @annotation.unspecialized def tupled: Tuple2[T1, T2] => R = {
case Tuple2(x1, x2) => apply(x1, x2)
}
override def toString() = "<function2>"
diff --git a/src/library/scala/Function20.scala b/src/library/scala/Function20.scala
index 94fa1cf..727684d 100644
--- a/src/library/scala/Function20.scala
+++ b/src/library/scala/Function20.scala
@@ -18,23 +18,20 @@ trait Function20[-T1, -T2, -T3, -T4, -T5, -T6, -T7, -T8, -T9, -T10, -T11, -T12,
* @return the result of function application.
*/
def apply(v1: T1, v2: T2, v3: T3, v4: T4, v5: T5, v6: T6, v7: T7, v8: T8, v9: T9, v10: T10, v11: T11, v12: T12, v13: T13, v14: T14, v15: T15, v16: T16, v17: T17, v18: T18, v19: T19, v20: T20): R
-
/** Creates a curried version of this function.
*
* @return a function `f` such that `f(x1)(x2)(x3)(x4)(x5)(x6)(x7)(x8)(x9)(x10)(x11)(x12)(x13)(x14)(x15)(x16)(x17)(x18)(x19)(x20) == apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17, x18, x19, x20)`
*/
- def curried: T1 => T2 => T3 => T4 => T5 => T6 => T7 => T8 => T9 => T10 => T11 => T12 => T13 => T14 => T15 => T16 => T17 => T18 => T19 => T20 => R = {
+ @annotation.unspecialized def curried: T1 => T2 => T3 => T4 => T5 => T6 => T7 => T8 => T9 => T10 => T11 => T12 => T13 => T14 => T15 => T16 => T17 => T18 => T19 => T20 => R = {
(x1: T1) => ((x2: T2, x3: T3, x4: T4, x5: T5, x6: T6, x7: T7, x8: T8, x9: T9, x10: T10, x11: T11, x12: T12, x13: T13, x14: T14, x15: T15, x16: T16, x17: T17, x18: T18, x19: T19, x20: T20) => self.apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17, x18, x19, x20)).curried
}
- @deprecated("Use 'curried' instead", "2.8.0")
- def curry = curried
-
/** Creates a tupled version of this function: instead of 20 arguments,
* it accepts a single [[scala.Tuple20]] argument.
*
* @return a function `f` such that `f((x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17, x18, x19, x20)) == f(Tuple20(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17, x18, x19, x20)) == apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17, x18, x19, x20)`
*/
- def tupled: Tuple20[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20] => R = {
+
+ @annotation.unspecialized def tupled: Tuple20[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20] => R = {
case Tuple20(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17, x18, x19, x20) => apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17, x18, x19, x20)
}
override def toString() = "<function20>"
diff --git a/src/library/scala/Function21.scala b/src/library/scala/Function21.scala
index f41d889..2441278 100644
--- a/src/library/scala/Function21.scala
+++ b/src/library/scala/Function21.scala
@@ -18,23 +18,20 @@ trait Function21[-T1, -T2, -T3, -T4, -T5, -T6, -T7, -T8, -T9, -T10, -T11, -T12,
* @return the result of function application.
*/
def apply(v1: T1, v2: T2, v3: T3, v4: T4, v5: T5, v6: T6, v7: T7, v8: T8, v9: T9, v10: T10, v11: T11, v12: T12, v13: T13, v14: T14, v15: T15, v16: T16, v17: T17, v18: T18, v19: T19, v20: T20, v21: T21): R
-
/** Creates a curried version of this function.
*
* @return a function `f` such that `f(x1)(x2)(x3)(x4)(x5)(x6)(x7)(x8)(x9)(x10)(x11)(x12)(x13)(x14)(x15)(x16)(x17)(x18)(x19)(x20)(x21) == apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17, x18, x19, x20, x21)`
*/
- def curried: T1 => T2 => T3 => T4 => T5 => T6 => T7 => T8 => T9 => T10 => T11 => T12 => T13 => T14 => T15 => T16 => T17 => T18 => T19 => T20 => T21 => R = {
+ @annotation.unspecialized def curried: T1 => T2 => T3 => T4 => T5 => T6 => T7 => T8 => T9 => T10 => T11 => T12 => T13 => T14 => T15 => T16 => T17 => T18 => T19 => T20 => T21 => R = {
(x1: T1) => ((x2: T2, x3: T3, x4: T4, x5: T5, x6: T6, x7: T7, x8: T8, x9: T9, x10: T10, x11: T11, x12: T12, x13: T13, x14: T14, x15: T15, x16: T16, x17: T17, x18: T18, x19: T19, x20: T20, x21: T21) => self.apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17, x18, x19, x20, x21)).curried
}
- @deprecated("Use 'curried' instead", "2.8.0")
- def curry = curried
-
/** Creates a tupled version of this function: instead of 21 arguments,
* it accepts a single [[scala.Tuple21]] argument.
*
* @return a function `f` such that `f((x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17, x18, x19, x20, x21)) == f(Tuple21(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17, x18, x19, x20, x21)) == apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17, x18, x19, x20, x21)`
*/
- def tupled: Tuple21[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21] => R = {
+
+ @annotation.unspecialized def tupled: Tuple21[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21] => R = {
case Tuple21(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17, x18, x19, x20, x21) => apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17, x18, x19, x20, x21)
}
override def toString() = "<function21>"
diff --git a/src/library/scala/Function22.scala b/src/library/scala/Function22.scala
index c1b290f..1f70b19 100644
--- a/src/library/scala/Function22.scala
+++ b/src/library/scala/Function22.scala
@@ -18,23 +18,20 @@ trait Function22[-T1, -T2, -T3, -T4, -T5, -T6, -T7, -T8, -T9, -T10, -T11, -T12,
* @return the result of function application.
*/
def apply(v1: T1, v2: T2, v3: T3, v4: T4, v5: T5, v6: T6, v7: T7, v8: T8, v9: T9, v10: T10, v11: T11, v12: T12, v13: T13, v14: T14, v15: T15, v16: T16, v17: T17, v18: T18, v19: T19, v20: T20, v21: T21, v22: T22): R
-
/** Creates a curried version of this function.
*
* @return a function `f` such that `f(x1)(x2)(x3)(x4)(x5)(x6)(x7)(x8)(x9)(x10)(x11)(x12)(x13)(x14)(x15)(x16)(x17)(x18)(x19)(x20)(x21)(x22) == apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17, x18, x19, x20, x21, x22)`
*/
- def curried: T1 => T2 => T3 => T4 => T5 => T6 => T7 => T8 => T9 => T10 => T11 => T12 => T13 => T14 => T15 => T16 => T17 => T18 => T19 => T20 => T21 => T22 => R = {
+ @annotation.unspecialized def curried: T1 => T2 => T3 => T4 => T5 => T6 => T7 => T8 => T9 => T10 => T11 => T12 => T13 => T14 => T15 => T16 => T17 => T18 => T19 => T20 => T21 => T22 => R = {
(x1: T1) => ((x2: T2, x3: T3, x4: T4, x5: T5, x6: T6, x7: T7, x8: T8, x9: T9, x10: T10, x11: T11, x12: T12, x13: T13, x14: T14, x15: T15, x16: T16, x17: T17, x18: T18, x19: T19, x20: T20, x21: T21, x22: T22) => self.apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17, x18, x19, x20, x21, x22)).curried
}
- @deprecated("Use 'curried' instead", "2.8.0")
- def curry = curried
-
/** Creates a tupled version of this function: instead of 22 arguments,
* it accepts a single [[scala.Tuple22]] argument.
*
* @return a function `f` such that `f((x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17, x18, x19, x20, x21, x22)) == f(Tuple22(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17, x18, x19, x20, x21, x22)) == apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17, x18, x19, x20, x21, x22)`
*/
- def tupled: Tuple22[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22] => R = {
+
+ @annotation.unspecialized def tupled: Tuple22[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22] => R = {
case Tuple22(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17, x18, x19, x20, x21, x22) => apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17, x18, x19, x20, x21, x22)
}
override def toString() = "<function22>"
diff --git a/src/library/scala/Function3.scala b/src/library/scala/Function3.scala
index 1b592f5..bbbde82 100644
--- a/src/library/scala/Function3.scala
+++ b/src/library/scala/Function3.scala
@@ -18,23 +18,20 @@ trait Function3[-T1, -T2, -T3, +R] extends AnyRef { self =>
* @return the result of function application.
*/
def apply(v1: T1, v2: T2, v3: T3): R
-
/** Creates a curried version of this function.
*
* @return a function `f` such that `f(x1)(x2)(x3) == apply(x1, x2, x3)`
*/
- def curried: T1 => T2 => T3 => R = {
+ @annotation.unspecialized def curried: T1 => T2 => T3 => R = {
(x1: T1) => (x2: T2) => (x3: T3) => apply(x1, x2, x3)
}
- @deprecated("Use 'curried' instead", "2.8.0")
- def curry = curried
-
/** Creates a tupled version of this function: instead of 3 arguments,
* it accepts a single [[scala.Tuple3]] argument.
*
* @return a function `f` such that `f((x1, x2, x3)) == f(Tuple3(x1, x2, x3)) == apply(x1, x2, x3)`
*/
- def tupled: Tuple3[T1, T2, T3] => R = {
+
+ @annotation.unspecialized def tupled: Tuple3[T1, T2, T3] => R = {
case Tuple3(x1, x2, x3) => apply(x1, x2, x3)
}
override def toString() = "<function3>"
diff --git a/src/library/scala/Function4.scala b/src/library/scala/Function4.scala
index bd11232..f100860 100644
--- a/src/library/scala/Function4.scala
+++ b/src/library/scala/Function4.scala
@@ -18,23 +18,20 @@ trait Function4[-T1, -T2, -T3, -T4, +R] extends AnyRef { self =>
* @return the result of function application.
*/
def apply(v1: T1, v2: T2, v3: T3, v4: T4): R
-
/** Creates a curried version of this function.
*
* @return a function `f` such that `f(x1)(x2)(x3)(x4) == apply(x1, x2, x3, x4)`
*/
- def curried: T1 => T2 => T3 => T4 => R = {
+ @annotation.unspecialized def curried: T1 => T2 => T3 => T4 => R = {
(x1: T1) => (x2: T2) => (x3: T3) => (x4: T4) => apply(x1, x2, x3, x4)
}
- @deprecated("Use 'curried' instead", "2.8.0")
- def curry = curried
-
/** Creates a tupled version of this function: instead of 4 arguments,
* it accepts a single [[scala.Tuple4]] argument.
*
* @return a function `f` such that `f((x1, x2, x3, x4)) == f(Tuple4(x1, x2, x3, x4)) == apply(x1, x2, x3, x4)`
*/
- def tupled: Tuple4[T1, T2, T3, T4] => R = {
+
+ @annotation.unspecialized def tupled: Tuple4[T1, T2, T3, T4] => R = {
case Tuple4(x1, x2, x3, x4) => apply(x1, x2, x3, x4)
}
override def toString() = "<function4>"
diff --git a/src/library/scala/Function5.scala b/src/library/scala/Function5.scala
index af83dcf..cba9b6c 100644
--- a/src/library/scala/Function5.scala
+++ b/src/library/scala/Function5.scala
@@ -18,23 +18,20 @@ trait Function5[-T1, -T2, -T3, -T4, -T5, +R] extends AnyRef { self =>
* @return the result of function application.
*/
def apply(v1: T1, v2: T2, v3: T3, v4: T4, v5: T5): R
-
/** Creates a curried version of this function.
*
* @return a function `f` such that `f(x1)(x2)(x3)(x4)(x5) == apply(x1, x2, x3, x4, x5)`
*/
- def curried: T1 => T2 => T3 => T4 => T5 => R = {
+ @annotation.unspecialized def curried: T1 => T2 => T3 => T4 => T5 => R = {
(x1: T1) => ((x2: T2, x3: T3, x4: T4, x5: T5) => self.apply(x1, x2, x3, x4, x5)).curried
}
- @deprecated("Use 'curried' instead", "2.8.0")
- def curry = curried
-
/** Creates a tupled version of this function: instead of 5 arguments,
* it accepts a single [[scala.Tuple5]] argument.
*
* @return a function `f` such that `f((x1, x2, x3, x4, x5)) == f(Tuple5(x1, x2, x3, x4, x5)) == apply(x1, x2, x3, x4, x5)`
*/
- def tupled: Tuple5[T1, T2, T3, T4, T5] => R = {
+
+ @annotation.unspecialized def tupled: Tuple5[T1, T2, T3, T4, T5] => R = {
case Tuple5(x1, x2, x3, x4, x5) => apply(x1, x2, x3, x4, x5)
}
override def toString() = "<function5>"
diff --git a/src/library/scala/Function6.scala b/src/library/scala/Function6.scala
index caceb55..0b8addf 100644
--- a/src/library/scala/Function6.scala
+++ b/src/library/scala/Function6.scala
@@ -18,23 +18,20 @@ trait Function6[-T1, -T2, -T3, -T4, -T5, -T6, +R] extends AnyRef { self =>
* @return the result of function application.
*/
def apply(v1: T1, v2: T2, v3: T3, v4: T4, v5: T5, v6: T6): R
-
/** Creates a curried version of this function.
*
* @return a function `f` such that `f(x1)(x2)(x3)(x4)(x5)(x6) == apply(x1, x2, x3, x4, x5, x6)`
*/
- def curried: T1 => T2 => T3 => T4 => T5 => T6 => R = {
+ @annotation.unspecialized def curried: T1 => T2 => T3 => T4 => T5 => T6 => R = {
(x1: T1) => ((x2: T2, x3: T3, x4: T4, x5: T5, x6: T6) => self.apply(x1, x2, x3, x4, x5, x6)).curried
}
- @deprecated("Use 'curried' instead", "2.8.0")
- def curry = curried
-
/** Creates a tupled version of this function: instead of 6 arguments,
* it accepts a single [[scala.Tuple6]] argument.
*
* @return a function `f` such that `f((x1, x2, x3, x4, x5, x6)) == f(Tuple6(x1, x2, x3, x4, x5, x6)) == apply(x1, x2, x3, x4, x5, x6)`
*/
- def tupled: Tuple6[T1, T2, T3, T4, T5, T6] => R = {
+
+ @annotation.unspecialized def tupled: Tuple6[T1, T2, T3, T4, T5, T6] => R = {
case Tuple6(x1, x2, x3, x4, x5, x6) => apply(x1, x2, x3, x4, x5, x6)
}
override def toString() = "<function6>"
diff --git a/src/library/scala/Function7.scala b/src/library/scala/Function7.scala
index 16aa9a9..2098658 100644
--- a/src/library/scala/Function7.scala
+++ b/src/library/scala/Function7.scala
@@ -18,23 +18,20 @@ trait Function7[-T1, -T2, -T3, -T4, -T5, -T6, -T7, +R] extends AnyRef { self =>
* @return the result of function application.
*/
def apply(v1: T1, v2: T2, v3: T3, v4: T4, v5: T5, v6: T6, v7: T7): R
-
/** Creates a curried version of this function.
*
* @return a function `f` such that `f(x1)(x2)(x3)(x4)(x5)(x6)(x7) == apply(x1, x2, x3, x4, x5, x6, x7)`
*/
- def curried: T1 => T2 => T3 => T4 => T5 => T6 => T7 => R = {
+ @annotation.unspecialized def curried: T1 => T2 => T3 => T4 => T5 => T6 => T7 => R = {
(x1: T1) => ((x2: T2, x3: T3, x4: T4, x5: T5, x6: T6, x7: T7) => self.apply(x1, x2, x3, x4, x5, x6, x7)).curried
}
- @deprecated("Use 'curried' instead", "2.8.0")
- def curry = curried
-
/** Creates a tupled version of this function: instead of 7 arguments,
* it accepts a single [[scala.Tuple7]] argument.
*
* @return a function `f` such that `f((x1, x2, x3, x4, x5, x6, x7)) == f(Tuple7(x1, x2, x3, x4, x5, x6, x7)) == apply(x1, x2, x3, x4, x5, x6, x7)`
*/
- def tupled: Tuple7[T1, T2, T3, T4, T5, T6, T7] => R = {
+
+ @annotation.unspecialized def tupled: Tuple7[T1, T2, T3, T4, T5, T6, T7] => R = {
case Tuple7(x1, x2, x3, x4, x5, x6, x7) => apply(x1, x2, x3, x4, x5, x6, x7)
}
override def toString() = "<function7>"
diff --git a/src/library/scala/Function8.scala b/src/library/scala/Function8.scala
index 6f97cc1..08a480d 100644
--- a/src/library/scala/Function8.scala
+++ b/src/library/scala/Function8.scala
@@ -18,23 +18,20 @@ trait Function8[-T1, -T2, -T3, -T4, -T5, -T6, -T7, -T8, +R] extends AnyRef { sel
* @return the result of function application.
*/
def apply(v1: T1, v2: T2, v3: T3, v4: T4, v5: T5, v6: T6, v7: T7, v8: T8): R
-
/** Creates a curried version of this function.
*
* @return a function `f` such that `f(x1)(x2)(x3)(x4)(x5)(x6)(x7)(x8) == apply(x1, x2, x3, x4, x5, x6, x7, x8)`
*/
- def curried: T1 => T2 => T3 => T4 => T5 => T6 => T7 => T8 => R = {
+ @annotation.unspecialized def curried: T1 => T2 => T3 => T4 => T5 => T6 => T7 => T8 => R = {
(x1: T1) => ((x2: T2, x3: T3, x4: T4, x5: T5, x6: T6, x7: T7, x8: T8) => self.apply(x1, x2, x3, x4, x5, x6, x7, x8)).curried
}
- @deprecated("Use 'curried' instead", "2.8.0")
- def curry = curried
-
/** Creates a tupled version of this function: instead of 8 arguments,
* it accepts a single [[scala.Tuple8]] argument.
*
* @return a function `f` such that `f((x1, x2, x3, x4, x5, x6, x7, x8)) == f(Tuple8(x1, x2, x3, x4, x5, x6, x7, x8)) == apply(x1, x2, x3, x4, x5, x6, x7, x8)`
*/
- def tupled: Tuple8[T1, T2, T3, T4, T5, T6, T7, T8] => R = {
+
+ @annotation.unspecialized def tupled: Tuple8[T1, T2, T3, T4, T5, T6, T7, T8] => R = {
case Tuple8(x1, x2, x3, x4, x5, x6, x7, x8) => apply(x1, x2, x3, x4, x5, x6, x7, x8)
}
override def toString() = "<function8>"
diff --git a/src/library/scala/Function9.scala b/src/library/scala/Function9.scala
index 3c39cb4..2e35f79 100644
--- a/src/library/scala/Function9.scala
+++ b/src/library/scala/Function9.scala
@@ -18,23 +18,20 @@ trait Function9[-T1, -T2, -T3, -T4, -T5, -T6, -T7, -T8, -T9, +R] extends AnyRef
* @return the result of function application.
*/
def apply(v1: T1, v2: T2, v3: T3, v4: T4, v5: T5, v6: T6, v7: T7, v8: T8, v9: T9): R
-
/** Creates a curried version of this function.
*
* @return a function `f` such that `f(x1)(x2)(x3)(x4)(x5)(x6)(x7)(x8)(x9) == apply(x1, x2, x3, x4, x5, x6, x7, x8, x9)`
*/
- def curried: T1 => T2 => T3 => T4 => T5 => T6 => T7 => T8 => T9 => R = {
+ @annotation.unspecialized def curried: T1 => T2 => T3 => T4 => T5 => T6 => T7 => T8 => T9 => R = {
(x1: T1) => ((x2: T2, x3: T3, x4: T4, x5: T5, x6: T6, x7: T7, x8: T8, x9: T9) => self.apply(x1, x2, x3, x4, x5, x6, x7, x8, x9)).curried
}
- @deprecated("Use 'curried' instead", "2.8.0")
- def curry = curried
-
/** Creates a tupled version of this function: instead of 9 arguments,
* it accepts a single [[scala.Tuple9]] argument.
*
* @return a function `f` such that `f((x1, x2, x3, x4, x5, x6, x7, x8, x9)) == f(Tuple9(x1, x2, x3, x4, x5, x6, x7, x8, x9)) == apply(x1, x2, x3, x4, x5, x6, x7, x8, x9)`
*/
- def tupled: Tuple9[T1, T2, T3, T4, T5, T6, T7, T8, T9] => R = {
+
+ @annotation.unspecialized def tupled: Tuple9[T1, T2, T3, T4, T5, T6, T7, T8, T9] => R = {
case Tuple9(x1, x2, x3, x4, x5, x6, x7, x8, x9) => apply(x1, x2, x3, x4, x5, x6, x7, x8, x9)
}
override def toString() = "<function9>"
diff --git a/src/library/scala/Immutable.scala b/src/library/scala/Immutable.scala
index 336877a..fead590 100644
--- a/src/library/scala/Immutable.scala
+++ b/src/library/scala/Immutable.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/library/scala/Int.scala b/src/library/scala/Int.scala
index 4546934..ae36413 100644
--- a/src/library/scala/Int.scala
+++ b/src/library/scala/Int.scala
@@ -10,141 +10,589 @@
package scala
-/** `Int` is a member of the value classes, those whose instances are
- * not represented as objects by the underlying host system.
+import scala.language.implicitConversions
+
+/** `Int`, a 32-bit signed integer (equivalent to Java's `int` primitive type) is a
+ * subtype of [[scala.AnyVal]]. Instances of `Int` are not
+ * represented by an object in the underlying runtime system.
*
* There is an implicit conversion from [[scala.Int]] => [[scala.runtime.RichInt]]
* which provides useful non-primitive operations.
*/
-final class Int extends AnyVal {
- def toByte: Byte = sys.error("stub")
- def toShort: Short = sys.error("stub")
- def toChar: Char = sys.error("stub")
- def toInt: Int = sys.error("stub")
- def toLong: Long = sys.error("stub")
- def toFloat: Float = sys.error("stub")
- def toDouble: Double = sys.error("stub")
-
- def unary_+ : Int = sys.error("stub")
- def unary_- : Int = sys.error("stub")
- def unary_~ : Int = sys.error("stub")
-
- def +(x: String): String = sys.error("stub")
-
- def <<(x: Int): Int = sys.error("stub")
- def <<(x: Long): Int = sys.error("stub")
- def >>>(x: Int): Int = sys.error("stub")
- def >>>(x: Long): Int = sys.error("stub")
- def >>(x: Int): Int = sys.error("stub")
- def >>(x: Long): Int = sys.error("stub")
-
- def ==(x: Byte): Boolean = sys.error("stub")
- def ==(x: Short): Boolean = sys.error("stub")
- def ==(x: Char): Boolean = sys.error("stub")
- def ==(x: Int): Boolean = sys.error("stub")
- def ==(x: Long): Boolean = sys.error("stub")
- def ==(x: Float): Boolean = sys.error("stub")
- def ==(x: Double): Boolean = sys.error("stub")
-
- def !=(x: Byte): Boolean = sys.error("stub")
- def !=(x: Short): Boolean = sys.error("stub")
- def !=(x: Char): Boolean = sys.error("stub")
- def !=(x: Int): Boolean = sys.error("stub")
- def !=(x: Long): Boolean = sys.error("stub")
- def !=(x: Float): Boolean = sys.error("stub")
- def !=(x: Double): Boolean = sys.error("stub")
-
- def <(x: Byte): Boolean = sys.error("stub")
- def <(x: Short): Boolean = sys.error("stub")
- def <(x: Char): Boolean = sys.error("stub")
- def <(x: Int): Boolean = sys.error("stub")
- def <(x: Long): Boolean = sys.error("stub")
- def <(x: Float): Boolean = sys.error("stub")
- def <(x: Double): Boolean = sys.error("stub")
-
- def <=(x: Byte): Boolean = sys.error("stub")
- def <=(x: Short): Boolean = sys.error("stub")
- def <=(x: Char): Boolean = sys.error("stub")
- def <=(x: Int): Boolean = sys.error("stub")
- def <=(x: Long): Boolean = sys.error("stub")
- def <=(x: Float): Boolean = sys.error("stub")
- def <=(x: Double): Boolean = sys.error("stub")
-
- def >(x: Byte): Boolean = sys.error("stub")
- def >(x: Short): Boolean = sys.error("stub")
- def >(x: Char): Boolean = sys.error("stub")
- def >(x: Int): Boolean = sys.error("stub")
- def >(x: Long): Boolean = sys.error("stub")
- def >(x: Float): Boolean = sys.error("stub")
- def >(x: Double): Boolean = sys.error("stub")
-
- def >=(x: Byte): Boolean = sys.error("stub")
- def >=(x: Short): Boolean = sys.error("stub")
- def >=(x: Char): Boolean = sys.error("stub")
- def >=(x: Int): Boolean = sys.error("stub")
- def >=(x: Long): Boolean = sys.error("stub")
- def >=(x: Float): Boolean = sys.error("stub")
- def >=(x: Double): Boolean = sys.error("stub")
-
- def |(x: Byte): Int = sys.error("stub")
- def |(x: Short): Int = sys.error("stub")
- def |(x: Char): Int = sys.error("stub")
- def |(x: Int): Int = sys.error("stub")
- def |(x: Long): Long = sys.error("stub")
-
- def &(x: Byte): Int = sys.error("stub")
- def &(x: Short): Int = sys.error("stub")
- def &(x: Char): Int = sys.error("stub")
- def &(x: Int): Int = sys.error("stub")
- def &(x: Long): Long = sys.error("stub")
-
- def ^(x: Byte): Int = sys.error("stub")
- def ^(x: Short): Int = sys.error("stub")
- def ^(x: Char): Int = sys.error("stub")
- def ^(x: Int): Int = sys.error("stub")
- def ^(x: Long): Long = sys.error("stub")
-
- def +(x: Byte): Int = sys.error("stub")
- def +(x: Short): Int = sys.error("stub")
- def +(x: Char): Int = sys.error("stub")
- def +(x: Int): Int = sys.error("stub")
- def +(x: Long): Long = sys.error("stub")
- def +(x: Float): Float = sys.error("stub")
- def +(x: Double): Double = sys.error("stub")
-
- def -(x: Byte): Int = sys.error("stub")
- def -(x: Short): Int = sys.error("stub")
- def -(x: Char): Int = sys.error("stub")
- def -(x: Int): Int = sys.error("stub")
- def -(x: Long): Long = sys.error("stub")
- def -(x: Float): Float = sys.error("stub")
- def -(x: Double): Double = sys.error("stub")
-
- def *(x: Byte): Int = sys.error("stub")
- def *(x: Short): Int = sys.error("stub")
- def *(x: Char): Int = sys.error("stub")
- def *(x: Int): Int = sys.error("stub")
- def *(x: Long): Long = sys.error("stub")
- def *(x: Float): Float = sys.error("stub")
- def *(x: Double): Double = sys.error("stub")
-
- def /(x: Byte): Int = sys.error("stub")
- def /(x: Short): Int = sys.error("stub")
- def /(x: Char): Int = sys.error("stub")
- def /(x: Int): Int = sys.error("stub")
- def /(x: Long): Long = sys.error("stub")
- def /(x: Float): Float = sys.error("stub")
- def /(x: Double): Double = sys.error("stub")
-
- def %(x: Byte): Int = sys.error("stub")
- def %(x: Short): Int = sys.error("stub")
- def %(x: Char): Int = sys.error("stub")
- def %(x: Int): Int = sys.error("stub")
- def %(x: Long): Long = sys.error("stub")
- def %(x: Float): Float = sys.error("stub")
- def %(x: Double): Double = sys.error("stub")
-
- def getClass(): Class[Int] = sys.error("stub")
+final abstract class Int private extends AnyVal {
+ def toByte: Byte
+ def toShort: Short
+ def toChar: Char
+ def toInt: Int
+ def toLong: Long
+ def toFloat: Float
+ def toDouble: Double
+
+ /**
+ * Returns the bitwise negation of this value.
+ * @example {{{
+ * ~5 == -6
+ * // in binary: ~00000101 ==
+ * // 11111010
+ * }}}
+ */
+ def unary_~ : Int
+ /**
+ * Returns this value, unmodified.
+ */
+ def unary_+ : Int
+ /**
+ * Returns the negation of this value.
+ */
+ def unary_- : Int
+
+ def +(x: String): String
+
+ /**
+ * Returns this value bit-shifted left by the specified number of bits,
+ * filling in the new right bits with zeroes.
+ * @example {{{ 6 << 3 == 48 // in binary: 0110 << 3 == 0110000 }}}
+ */
+ def <<(x: Int): Int
+ /**
+ * Returns this value bit-shifted left by the specified number of bits,
+ * filling in the new right bits with zeroes.
+ * @example {{{ 6 << 3 == 48 // in binary: 0110 << 3 == 0110000 }}}
+ */
+ def <<(x: Long): Int
+ /**
+ * Returns this value bit-shifted right by the specified number of bits,
+ * filling the new left bits with zeroes.
+ * @example {{{ 21 >>> 3 == 2 // in binary: 010101 >>> 3 == 010 }}}
+ * @example {{{
+ * -21 >>> 3 == 536870909
+ * // in binary: 11111111 11111111 11111111 11101011 >>> 3 ==
+ * // 00011111 11111111 11111111 11111101
+ * }}}
+ */
+ def >>>(x: Int): Int
+ /**
+ * Returns this value bit-shifted right by the specified number of bits,
+ * filling the new left bits with zeroes.
+ * @example {{{ 21 >>> 3 == 2 // in binary: 010101 >>> 3 == 010 }}}
+ * @example {{{
+ * -21 >>> 3 == 536870909
+ * // in binary: 11111111 11111111 11111111 11101011 >>> 3 ==
+ * // 00011111 11111111 11111111 11111101
+ * }}}
+ */
+ def >>>(x: Long): Int
+ /**
+ * Returns this value bit-shifted left by the specified number of bits,
+ * filling in the right bits with the same value as the left-most bit of this.
+ * The effect of this is to retain the sign of the value.
+ * @example {{{
+ * -21 >> 3 == -3
+ * // in binary: 11111111 11111111 11111111 11101011 >> 3 ==
+ * // 11111111 11111111 11111111 11111101
+ * }}}
+ */
+ def >>(x: Int): Int
+ /**
+ * Returns this value bit-shifted left by the specified number of bits,
+ * filling in the right bits with the same value as the left-most bit of this.
+ * The effect of this is to retain the sign of the value.
+ * @example {{{
+ * -21 >> 3 == -3
+ * // in binary: 11111111 11111111 11111111 11101011 >> 3 ==
+ * // 11111111 11111111 11111111 11111101
+ * }}}
+ */
+ def >>(x: Long): Int
+
+ /**
+ * Returns `true` if this value is equal to x, `false` otherwise.
+ */
+ def ==(x: Byte): Boolean
+ /**
+ * Returns `true` if this value is equal to x, `false` otherwise.
+ */
+ def ==(x: Short): Boolean
+ /**
+ * Returns `true` if this value is equal to x, `false` otherwise.
+ */
+ def ==(x: Char): Boolean
+ /**
+ * Returns `true` if this value is equal to x, `false` otherwise.
+ */
+ def ==(x: Int): Boolean
+ /**
+ * Returns `true` if this value is equal to x, `false` otherwise.
+ */
+ def ==(x: Long): Boolean
+ /**
+ * Returns `true` if this value is equal to x, `false` otherwise.
+ */
+ def ==(x: Float): Boolean
+ /**
+ * Returns `true` if this value is equal to x, `false` otherwise.
+ */
+ def ==(x: Double): Boolean
+
+ /**
+ * Returns `true` if this value is not equal to x, `false` otherwise.
+ */
+ def !=(x: Byte): Boolean
+ /**
+ * Returns `true` if this value is not equal to x, `false` otherwise.
+ */
+ def !=(x: Short): Boolean
+ /**
+ * Returns `true` if this value is not equal to x, `false` otherwise.
+ */
+ def !=(x: Char): Boolean
+ /**
+ * Returns `true` if this value is not equal to x, `false` otherwise.
+ */
+ def !=(x: Int): Boolean
+ /**
+ * Returns `true` if this value is not equal to x, `false` otherwise.
+ */
+ def !=(x: Long): Boolean
+ /**
+ * Returns `true` if this value is not equal to x, `false` otherwise.
+ */
+ def !=(x: Float): Boolean
+ /**
+ * Returns `true` if this value is not equal to x, `false` otherwise.
+ */
+ def !=(x: Double): Boolean
+
+ /**
+ * Returns `true` if this value is less than x, `false` otherwise.
+ */
+ def <(x: Byte): Boolean
+ /**
+ * Returns `true` if this value is less than x, `false` otherwise.
+ */
+ def <(x: Short): Boolean
+ /**
+ * Returns `true` if this value is less than x, `false` otherwise.
+ */
+ def <(x: Char): Boolean
+ /**
+ * Returns `true` if this value is less than x, `false` otherwise.
+ */
+ def <(x: Int): Boolean
+ /**
+ * Returns `true` if this value is less than x, `false` otherwise.
+ */
+ def <(x: Long): Boolean
+ /**
+ * Returns `true` if this value is less than x, `false` otherwise.
+ */
+ def <(x: Float): Boolean
+ /**
+ * Returns `true` if this value is less than x, `false` otherwise.
+ */
+ def <(x: Double): Boolean
+
+ /**
+ * Returns `true` if this value is less than or equal to x, `false` otherwise.
+ */
+ def <=(x: Byte): Boolean
+ /**
+ * Returns `true` if this value is less than or equal to x, `false` otherwise.
+ */
+ def <=(x: Short): Boolean
+ /**
+ * Returns `true` if this value is less than or equal to x, `false` otherwise.
+ */
+ def <=(x: Char): Boolean
+ /**
+ * Returns `true` if this value is less than or equal to x, `false` otherwise.
+ */
+ def <=(x: Int): Boolean
+ /**
+ * Returns `true` if this value is less than or equal to x, `false` otherwise.
+ */
+ def <=(x: Long): Boolean
+ /**
+ * Returns `true` if this value is less than or equal to x, `false` otherwise.
+ */
+ def <=(x: Float): Boolean
+ /**
+ * Returns `true` if this value is less than or equal to x, `false` otherwise.
+ */
+ def <=(x: Double): Boolean
+
+ /**
+ * Returns `true` if this value is greater than x, `false` otherwise.
+ */
+ def >(x: Byte): Boolean
+ /**
+ * Returns `true` if this value is greater than x, `false` otherwise.
+ */
+ def >(x: Short): Boolean
+ /**
+ * Returns `true` if this value is greater than x, `false` otherwise.
+ */
+ def >(x: Char): Boolean
+ /**
+ * Returns `true` if this value is greater than x, `false` otherwise.
+ */
+ def >(x: Int): Boolean
+ /**
+ * Returns `true` if this value is greater than x, `false` otherwise.
+ */
+ def >(x: Long): Boolean
+ /**
+ * Returns `true` if this value is greater than x, `false` otherwise.
+ */
+ def >(x: Float): Boolean
+ /**
+ * Returns `true` if this value is greater than x, `false` otherwise.
+ */
+ def >(x: Double): Boolean
+
+ /**
+ * Returns `true` if this value is greater than or equal to x, `false` otherwise.
+ */
+ def >=(x: Byte): Boolean
+ /**
+ * Returns `true` if this value is greater than or equal to x, `false` otherwise.
+ */
+ def >=(x: Short): Boolean
+ /**
+ * Returns `true` if this value is greater than or equal to x, `false` otherwise.
+ */
+ def >=(x: Char): Boolean
+ /**
+ * Returns `true` if this value is greater than or equal to x, `false` otherwise.
+ */
+ def >=(x: Int): Boolean
+ /**
+ * Returns `true` if this value is greater than or equal to x, `false` otherwise.
+ */
+ def >=(x: Long): Boolean
+ /**
+ * Returns `true` if this value is greater than or equal to x, `false` otherwise.
+ */
+ def >=(x: Float): Boolean
+ /**
+ * Returns `true` if this value is greater than or equal to x, `false` otherwise.
+ */
+ def >=(x: Double): Boolean
+
+ /**
+ * Returns the bitwise OR of this value and `x`.
+ * @example {{{
+ * (0xf0 | 0xaa) == 0xfa
+ * // in binary: 11110000
+ * // | 10101010
+ * // --------
+ * // 11111010
+ * }}}
+ */
+ def |(x: Byte): Int
+ /**
+ * Returns the bitwise OR of this value and `x`.
+ * @example {{{
+ * (0xf0 | 0xaa) == 0xfa
+ * // in binary: 11110000
+ * // | 10101010
+ * // --------
+ * // 11111010
+ * }}}
+ */
+ def |(x: Short): Int
+ /**
+ * Returns the bitwise OR of this value and `x`.
+ * @example {{{
+ * (0xf0 | 0xaa) == 0xfa
+ * // in binary: 11110000
+ * // | 10101010
+ * // --------
+ * // 11111010
+ * }}}
+ */
+ def |(x: Char): Int
+ /**
+ * Returns the bitwise OR of this value and `x`.
+ * @example {{{
+ * (0xf0 | 0xaa) == 0xfa
+ * // in binary: 11110000
+ * // | 10101010
+ * // --------
+ * // 11111010
+ * }}}
+ */
+ def |(x: Int): Int
+ /**
+ * Returns the bitwise OR of this value and `x`.
+ * @example {{{
+ * (0xf0 | 0xaa) == 0xfa
+ * // in binary: 11110000
+ * // | 10101010
+ * // --------
+ * // 11111010
+ * }}}
+ */
+ def |(x: Long): Long
+
+ /**
+ * Returns the bitwise AND of this value and `x`.
+ * @example {{{
+ * (0xf0 & 0xaa) == 0xa0
+ * // in binary: 11110000
+ * // & 10101010
+ * // --------
+ * // 10100000
+ * }}}
+ */
+ def &(x: Byte): Int
+ /**
+ * Returns the bitwise AND of this value and `x`.
+ * @example {{{
+ * (0xf0 & 0xaa) == 0xa0
+ * // in binary: 11110000
+ * // & 10101010
+ * // --------
+ * // 10100000
+ * }}}
+ */
+ def &(x: Short): Int
+ /**
+ * Returns the bitwise AND of this value and `x`.
+ * @example {{{
+ * (0xf0 & 0xaa) == 0xa0
+ * // in binary: 11110000
+ * // & 10101010
+ * // --------
+ * // 10100000
+ * }}}
+ */
+ def &(x: Char): Int
+ /**
+ * Returns the bitwise AND of this value and `x`.
+ * @example {{{
+ * (0xf0 & 0xaa) == 0xa0
+ * // in binary: 11110000
+ * // & 10101010
+ * // --------
+ * // 10100000
+ * }}}
+ */
+ def &(x: Int): Int
+ /**
+ * Returns the bitwise AND of this value and `x`.
+ * @example {{{
+ * (0xf0 & 0xaa) == 0xa0
+ * // in binary: 11110000
+ * // & 10101010
+ * // --------
+ * // 10100000
+ * }}}
+ */
+ def &(x: Long): Long
+
+ /**
+ * Returns the bitwise XOR of this value and `x`.
+ * @example {{{
+ * (0xf0 ^ 0xaa) == 0x5a
+ * // in binary: 11110000
+ * // ^ 10101010
+ * // --------
+ * // 01011010
+ * }}}
+ */
+ def ^(x: Byte): Int
+ /**
+ * Returns the bitwise XOR of this value and `x`.
+ * @example {{{
+ * (0xf0 ^ 0xaa) == 0x5a
+ * // in binary: 11110000
+ * // ^ 10101010
+ * // --------
+ * // 01011010
+ * }}}
+ */
+ def ^(x: Short): Int
+ /**
+ * Returns the bitwise XOR of this value and `x`.
+ * @example {{{
+ * (0xf0 ^ 0xaa) == 0x5a
+ * // in binary: 11110000
+ * // ^ 10101010
+ * // --------
+ * // 01011010
+ * }}}
+ */
+ def ^(x: Char): Int
+ /**
+ * Returns the bitwise XOR of this value and `x`.
+ * @example {{{
+ * (0xf0 ^ 0xaa) == 0x5a
+ * // in binary: 11110000
+ * // ^ 10101010
+ * // --------
+ * // 01011010
+ * }}}
+ */
+ def ^(x: Int): Int
+ /**
+ * Returns the bitwise XOR of this value and `x`.
+ * @example {{{
+ * (0xf0 ^ 0xaa) == 0x5a
+ * // in binary: 11110000
+ * // ^ 10101010
+ * // --------
+ * // 01011010
+ * }}}
+ */
+ def ^(x: Long): Long
+
+ /**
+ * Returns the sum of this value and `x`.
+ */
+ def +(x: Byte): Int
+ /**
+ * Returns the sum of this value and `x`.
+ */
+ def +(x: Short): Int
+ /**
+ * Returns the sum of this value and `x`.
+ */
+ def +(x: Char): Int
+ /**
+ * Returns the sum of this value and `x`.
+ */
+ def +(x: Int): Int
+ /**
+ * Returns the sum of this value and `x`.
+ */
+ def +(x: Long): Long
+ /**
+ * Returns the sum of this value and `x`.
+ */
+ def +(x: Float): Float
+ /**
+ * Returns the sum of this value and `x`.
+ */
+ def +(x: Double): Double
+
+ /**
+ * Returns the difference of this value and `x`.
+ */
+ def -(x: Byte): Int
+ /**
+ * Returns the difference of this value and `x`.
+ */
+ def -(x: Short): Int
+ /**
+ * Returns the difference of this value and `x`.
+ */
+ def -(x: Char): Int
+ /**
+ * Returns the difference of this value and `x`.
+ */
+ def -(x: Int): Int
+ /**
+ * Returns the difference of this value and `x`.
+ */
+ def -(x: Long): Long
+ /**
+ * Returns the difference of this value and `x`.
+ */
+ def -(x: Float): Float
+ /**
+ * Returns the difference of this value and `x`.
+ */
+ def -(x: Double): Double
+
+ /**
+ * Returns the product of this value and `x`.
+ */
+ def *(x: Byte): Int
+ /**
+ * Returns the product of this value and `x`.
+ */
+ def *(x: Short): Int
+ /**
+ * Returns the product of this value and `x`.
+ */
+ def *(x: Char): Int
+ /**
+ * Returns the product of this value and `x`.
+ */
+ def *(x: Int): Int
+ /**
+ * Returns the product of this value and `x`.
+ */
+ def *(x: Long): Long
+ /**
+ * Returns the product of this value and `x`.
+ */
+ def *(x: Float): Float
+ /**
+ * Returns the product of this value and `x`.
+ */
+ def *(x: Double): Double
+
+ /**
+ * Returns the quotient of this value and `x`.
+ */
+ def /(x: Byte): Int
+ /**
+ * Returns the quotient of this value and `x`.
+ */
+ def /(x: Short): Int
+ /**
+ * Returns the quotient of this value and `x`.
+ */
+ def /(x: Char): Int
+ /**
+ * Returns the quotient of this value and `x`.
+ */
+ def /(x: Int): Int
+ /**
+ * Returns the quotient of this value and `x`.
+ */
+ def /(x: Long): Long
+ /**
+ * Returns the quotient of this value and `x`.
+ */
+ def /(x: Float): Float
+ /**
+ * Returns the quotient of this value and `x`.
+ */
+ def /(x: Double): Double
+
+ /**
+ * Returns the remainder of the division of this value by `x`.
+ */
+ def %(x: Byte): Int
+ /**
+ * Returns the remainder of the division of this value by `x`.
+ */
+ def %(x: Short): Int
+ /**
+ * Returns the remainder of the division of this value by `x`.
+ */
+ def %(x: Char): Int
+ /**
+ * Returns the remainder of the division of this value by `x`.
+ */
+ def %(x: Int): Int
+ /**
+ * Returns the remainder of the division of this value by `x`.
+ */
+ def %(x: Long): Long
+ /**
+ * Returns the remainder of the division of this value by `x`.
+ */
+ def %(x: Float): Float
+ /**
+ * Returns the remainder of the division of this value by `x`.
+ */
+ def %(x: Double): Double
+
+ override def getClass(): Class[Int] = null
}
object Int extends AnyValCompanion {
@@ -176,5 +624,11 @@ object Int extends AnyValCompanion {
/** The String representation of the scala.Int companion object.
*/
override def toString = "object scala.Int"
+
+ /** Language mandated coercions from Int to "wider" types.
+ */
+ implicit def int2long(x: Int): Long = x.toLong
+ implicit def int2float(x: Int): Float = x.toFloat
+ implicit def int2double(x: Int): Double = x.toDouble
}
diff --git a/src/library/scala/Long.scala b/src/library/scala/Long.scala
index 12b8a25..4ee9383 100644
--- a/src/library/scala/Long.scala
+++ b/src/library/scala/Long.scala
@@ -10,141 +10,589 @@
package scala
-/** `Long` is a member of the value classes, those whose instances are
- * not represented as objects by the underlying host system.
+import scala.language.implicitConversions
+
+/** `Long`, a 64-bit signed integer (equivalent to Java's `long` primitive type) is a
+ * subtype of [[scala.AnyVal]]. Instances of `Long` are not
+ * represented by an object in the underlying runtime system.
*
* There is an implicit conversion from [[scala.Long]] => [[scala.runtime.RichLong]]
* which provides useful non-primitive operations.
*/
-final class Long extends AnyVal {
- def toByte: Byte = sys.error("stub")
- def toShort: Short = sys.error("stub")
- def toChar: Char = sys.error("stub")
- def toInt: Int = sys.error("stub")
- def toLong: Long = sys.error("stub")
- def toFloat: Float = sys.error("stub")
- def toDouble: Double = sys.error("stub")
-
- def unary_+ : Long = sys.error("stub")
- def unary_- : Long = sys.error("stub")
- def unary_~ : Long = sys.error("stub")
-
- def +(x: String): String = sys.error("stub")
-
- def <<(x: Int): Long = sys.error("stub")
- def <<(x: Long): Long = sys.error("stub")
- def >>>(x: Int): Long = sys.error("stub")
- def >>>(x: Long): Long = sys.error("stub")
- def >>(x: Int): Long = sys.error("stub")
- def >>(x: Long): Long = sys.error("stub")
-
- def ==(x: Byte): Boolean = sys.error("stub")
- def ==(x: Short): Boolean = sys.error("stub")
- def ==(x: Char): Boolean = sys.error("stub")
- def ==(x: Int): Boolean = sys.error("stub")
- def ==(x: Long): Boolean = sys.error("stub")
- def ==(x: Float): Boolean = sys.error("stub")
- def ==(x: Double): Boolean = sys.error("stub")
-
- def !=(x: Byte): Boolean = sys.error("stub")
- def !=(x: Short): Boolean = sys.error("stub")
- def !=(x: Char): Boolean = sys.error("stub")
- def !=(x: Int): Boolean = sys.error("stub")
- def !=(x: Long): Boolean = sys.error("stub")
- def !=(x: Float): Boolean = sys.error("stub")
- def !=(x: Double): Boolean = sys.error("stub")
-
- def <(x: Byte): Boolean = sys.error("stub")
- def <(x: Short): Boolean = sys.error("stub")
- def <(x: Char): Boolean = sys.error("stub")
- def <(x: Int): Boolean = sys.error("stub")
- def <(x: Long): Boolean = sys.error("stub")
- def <(x: Float): Boolean = sys.error("stub")
- def <(x: Double): Boolean = sys.error("stub")
-
- def <=(x: Byte): Boolean = sys.error("stub")
- def <=(x: Short): Boolean = sys.error("stub")
- def <=(x: Char): Boolean = sys.error("stub")
- def <=(x: Int): Boolean = sys.error("stub")
- def <=(x: Long): Boolean = sys.error("stub")
- def <=(x: Float): Boolean = sys.error("stub")
- def <=(x: Double): Boolean = sys.error("stub")
-
- def >(x: Byte): Boolean = sys.error("stub")
- def >(x: Short): Boolean = sys.error("stub")
- def >(x: Char): Boolean = sys.error("stub")
- def >(x: Int): Boolean = sys.error("stub")
- def >(x: Long): Boolean = sys.error("stub")
- def >(x: Float): Boolean = sys.error("stub")
- def >(x: Double): Boolean = sys.error("stub")
-
- def >=(x: Byte): Boolean = sys.error("stub")
- def >=(x: Short): Boolean = sys.error("stub")
- def >=(x: Char): Boolean = sys.error("stub")
- def >=(x: Int): Boolean = sys.error("stub")
- def >=(x: Long): Boolean = sys.error("stub")
- def >=(x: Float): Boolean = sys.error("stub")
- def >=(x: Double): Boolean = sys.error("stub")
-
- def |(x: Byte): Long = sys.error("stub")
- def |(x: Short): Long = sys.error("stub")
- def |(x: Char): Long = sys.error("stub")
- def |(x: Int): Long = sys.error("stub")
- def |(x: Long): Long = sys.error("stub")
-
- def &(x: Byte): Long = sys.error("stub")
- def &(x: Short): Long = sys.error("stub")
- def &(x: Char): Long = sys.error("stub")
- def &(x: Int): Long = sys.error("stub")
- def &(x: Long): Long = sys.error("stub")
-
- def ^(x: Byte): Long = sys.error("stub")
- def ^(x: Short): Long = sys.error("stub")
- def ^(x: Char): Long = sys.error("stub")
- def ^(x: Int): Long = sys.error("stub")
- def ^(x: Long): Long = sys.error("stub")
-
- def +(x: Byte): Long = sys.error("stub")
- def +(x: Short): Long = sys.error("stub")
- def +(x: Char): Long = sys.error("stub")
- def +(x: Int): Long = sys.error("stub")
- def +(x: Long): Long = sys.error("stub")
- def +(x: Float): Float = sys.error("stub")
- def +(x: Double): Double = sys.error("stub")
-
- def -(x: Byte): Long = sys.error("stub")
- def -(x: Short): Long = sys.error("stub")
- def -(x: Char): Long = sys.error("stub")
- def -(x: Int): Long = sys.error("stub")
- def -(x: Long): Long = sys.error("stub")
- def -(x: Float): Float = sys.error("stub")
- def -(x: Double): Double = sys.error("stub")
-
- def *(x: Byte): Long = sys.error("stub")
- def *(x: Short): Long = sys.error("stub")
- def *(x: Char): Long = sys.error("stub")
- def *(x: Int): Long = sys.error("stub")
- def *(x: Long): Long = sys.error("stub")
- def *(x: Float): Float = sys.error("stub")
- def *(x: Double): Double = sys.error("stub")
-
- def /(x: Byte): Long = sys.error("stub")
- def /(x: Short): Long = sys.error("stub")
- def /(x: Char): Long = sys.error("stub")
- def /(x: Int): Long = sys.error("stub")
- def /(x: Long): Long = sys.error("stub")
- def /(x: Float): Float = sys.error("stub")
- def /(x: Double): Double = sys.error("stub")
-
- def %(x: Byte): Long = sys.error("stub")
- def %(x: Short): Long = sys.error("stub")
- def %(x: Char): Long = sys.error("stub")
- def %(x: Int): Long = sys.error("stub")
- def %(x: Long): Long = sys.error("stub")
- def %(x: Float): Float = sys.error("stub")
- def %(x: Double): Double = sys.error("stub")
-
- def getClass(): Class[Long] = sys.error("stub")
+final abstract class Long private extends AnyVal {
+ def toByte: Byte
+ def toShort: Short
+ def toChar: Char
+ def toInt: Int
+ def toLong: Long
+ def toFloat: Float
+ def toDouble: Double
+
+ /**
+ * Returns the bitwise negation of this value.
+ * @example {{{
+ * ~5 == -6
+ * // in binary: ~00000101 ==
+ * // 11111010
+ * }}}
+ */
+ def unary_~ : Long
+ /**
+ * Returns this value, unmodified.
+ */
+ def unary_+ : Long
+ /**
+ * Returns the negation of this value.
+ */
+ def unary_- : Long
+
+ def +(x: String): String
+
+ /**
+ * Returns this value bit-shifted left by the specified number of bits,
+ * filling in the new right bits with zeroes.
+ * @example {{{ 6 << 3 == 48 // in binary: 0110 << 3 == 0110000 }}}
+ */
+ def <<(x: Int): Long
+ /**
+ * Returns this value bit-shifted left by the specified number of bits,
+ * filling in the new right bits with zeroes.
+ * @example {{{ 6 << 3 == 48 // in binary: 0110 << 3 == 0110000 }}}
+ */
+ def <<(x: Long): Long
+ /**
+ * Returns this value bit-shifted right by the specified number of bits,
+ * filling the new left bits with zeroes.
+ * @example {{{ 21 >>> 3 == 2 // in binary: 010101 >>> 3 == 010 }}}
+ * @example {{{
+ * -21 >>> 3 == 536870909
+ * // in binary: 11111111 11111111 11111111 11101011 >>> 3 ==
+ * // 00011111 11111111 11111111 11111101
+ * }}}
+ */
+ def >>>(x: Int): Long
+ /**
+ * Returns this value bit-shifted right by the specified number of bits,
+ * filling the new left bits with zeroes.
+ * @example {{{ 21 >>> 3 == 2 // in binary: 010101 >>> 3 == 010 }}}
+ * @example {{{
+ * -21 >>> 3 == 536870909
+ * // in binary: 11111111 11111111 11111111 11101011 >>> 3 ==
+ * // 00011111 11111111 11111111 11111101
+ * }}}
+ */
+ def >>>(x: Long): Long
+ /**
+ * Returns this value bit-shifted left by the specified number of bits,
+ * filling in the right bits with the same value as the left-most bit of this.
+ * The effect of this is to retain the sign of the value.
+ * @example {{{
+ * -21 >> 3 == -3
+ * // in binary: 11111111 11111111 11111111 11101011 >> 3 ==
+ * // 11111111 11111111 11111111 11111101
+ * }}}
+ */
+ def >>(x: Int): Long
+ /**
+ * Returns this value bit-shifted left by the specified number of bits,
+ * filling in the right bits with the same value as the left-most bit of this.
+ * The effect of this is to retain the sign of the value.
+ * @example {{{
+ * -21 >> 3 == -3
+ * // in binary: 11111111 11111111 11111111 11101011 >> 3 ==
+ * // 11111111 11111111 11111111 11111101
+ * }}}
+ */
+ def >>(x: Long): Long
+
+ /**
+ * Returns `true` if this value is equal to x, `false` otherwise.
+ */
+ def ==(x: Byte): Boolean
+ /**
+ * Returns `true` if this value is equal to x, `false` otherwise.
+ */
+ def ==(x: Short): Boolean
+ /**
+ * Returns `true` if this value is equal to x, `false` otherwise.
+ */
+ def ==(x: Char): Boolean
+ /**
+ * Returns `true` if this value is equal to x, `false` otherwise.
+ */
+ def ==(x: Int): Boolean
+ /**
+ * Returns `true` if this value is equal to x, `false` otherwise.
+ */
+ def ==(x: Long): Boolean
+ /**
+ * Returns `true` if this value is equal to x, `false` otherwise.
+ */
+ def ==(x: Float): Boolean
+ /**
+ * Returns `true` if this value is equal to x, `false` otherwise.
+ */
+ def ==(x: Double): Boolean
+
+ /**
+ * Returns `true` if this value is not equal to x, `false` otherwise.
+ */
+ def !=(x: Byte): Boolean
+ /**
+ * Returns `true` if this value is not equal to x, `false` otherwise.
+ */
+ def !=(x: Short): Boolean
+ /**
+ * Returns `true` if this value is not equal to x, `false` otherwise.
+ */
+ def !=(x: Char): Boolean
+ /**
+ * Returns `true` if this value is not equal to x, `false` otherwise.
+ */
+ def !=(x: Int): Boolean
+ /**
+ * Returns `true` if this value is not equal to x, `false` otherwise.
+ */
+ def !=(x: Long): Boolean
+ /**
+ * Returns `true` if this value is not equal to x, `false` otherwise.
+ */
+ def !=(x: Float): Boolean
+ /**
+ * Returns `true` if this value is not equal to x, `false` otherwise.
+ */
+ def !=(x: Double): Boolean
+
+ /**
+ * Returns `true` if this value is less than x, `false` otherwise.
+ */
+ def <(x: Byte): Boolean
+ /**
+ * Returns `true` if this value is less than x, `false` otherwise.
+ */
+ def <(x: Short): Boolean
+ /**
+ * Returns `true` if this value is less than x, `false` otherwise.
+ */
+ def <(x: Char): Boolean
+ /**
+ * Returns `true` if this value is less than x, `false` otherwise.
+ */
+ def <(x: Int): Boolean
+ /**
+ * Returns `true` if this value is less than x, `false` otherwise.
+ */
+ def <(x: Long): Boolean
+ /**
+ * Returns `true` if this value is less than x, `false` otherwise.
+ */
+ def <(x: Float): Boolean
+ /**
+ * Returns `true` if this value is less than x, `false` otherwise.
+ */
+ def <(x: Double): Boolean
+
+ /**
+ * Returns `true` if this value is less than or equal to x, `false` otherwise.
+ */
+ def <=(x: Byte): Boolean
+ /**
+ * Returns `true` if this value is less than or equal to x, `false` otherwise.
+ */
+ def <=(x: Short): Boolean
+ /**
+ * Returns `true` if this value is less than or equal to x, `false` otherwise.
+ */
+ def <=(x: Char): Boolean
+ /**
+ * Returns `true` if this value is less than or equal to x, `false` otherwise.
+ */
+ def <=(x: Int): Boolean
+ /**
+ * Returns `true` if this value is less than or equal to x, `false` otherwise.
+ */
+ def <=(x: Long): Boolean
+ /**
+ * Returns `true` if this value is less than or equal to x, `false` otherwise.
+ */
+ def <=(x: Float): Boolean
+ /**
+ * Returns `true` if this value is less than or equal to x, `false` otherwise.
+ */
+ def <=(x: Double): Boolean
+
+ /**
+ * Returns `true` if this value is greater than x, `false` otherwise.
+ */
+ def >(x: Byte): Boolean
+ /**
+ * Returns `true` if this value is greater than x, `false` otherwise.
+ */
+ def >(x: Short): Boolean
+ /**
+ * Returns `true` if this value is greater than x, `false` otherwise.
+ */
+ def >(x: Char): Boolean
+ /**
+ * Returns `true` if this value is greater than x, `false` otherwise.
+ */
+ def >(x: Int): Boolean
+ /**
+ * Returns `true` if this value is greater than x, `false` otherwise.
+ */
+ def >(x: Long): Boolean
+ /**
+ * Returns `true` if this value is greater than x, `false` otherwise.
+ */
+ def >(x: Float): Boolean
+ /**
+ * Returns `true` if this value is greater than x, `false` otherwise.
+ */
+ def >(x: Double): Boolean
+
+ /**
+ * Returns `true` if this value is greater than or equal to x, `false` otherwise.
+ */
+ def >=(x: Byte): Boolean
+ /**
+ * Returns `true` if this value is greater than or equal to x, `false` otherwise.
+ */
+ def >=(x: Short): Boolean
+ /**
+ * Returns `true` if this value is greater than or equal to x, `false` otherwise.
+ */
+ def >=(x: Char): Boolean
+ /**
+ * Returns `true` if this value is greater than or equal to x, `false` otherwise.
+ */
+ def >=(x: Int): Boolean
+ /**
+ * Returns `true` if this value is greater than or equal to x, `false` otherwise.
+ */
+ def >=(x: Long): Boolean
+ /**
+ * Returns `true` if this value is greater than or equal to x, `false` otherwise.
+ */
+ def >=(x: Float): Boolean
+ /**
+ * Returns `true` if this value is greater than or equal to x, `false` otherwise.
+ */
+ def >=(x: Double): Boolean
+
+ /**
+ * Returns the bitwise OR of this value and `x`.
+ * @example {{{
+ * (0xf0 | 0xaa) == 0xfa
+ * // in binary: 11110000
+ * // | 10101010
+ * // --------
+ * // 11111010
+ * }}}
+ */
+ def |(x: Byte): Long
+ /**
+ * Returns the bitwise OR of this value and `x`.
+ * @example {{{
+ * (0xf0 | 0xaa) == 0xfa
+ * // in binary: 11110000
+ * // | 10101010
+ * // --------
+ * // 11111010
+ * }}}
+ */
+ def |(x: Short): Long
+ /**
+ * Returns the bitwise OR of this value and `x`.
+ * @example {{{
+ * (0xf0 | 0xaa) == 0xfa
+ * // in binary: 11110000
+ * // | 10101010
+ * // --------
+ * // 11111010
+ * }}}
+ */
+ def |(x: Char): Long
+ /**
+ * Returns the bitwise OR of this value and `x`.
+ * @example {{{
+ * (0xf0 | 0xaa) == 0xfa
+ * // in binary: 11110000
+ * // | 10101010
+ * // --------
+ * // 11111010
+ * }}}
+ */
+ def |(x: Int): Long
+ /**
+ * Returns the bitwise OR of this value and `x`.
+ * @example {{{
+ * (0xf0 | 0xaa) == 0xfa
+ * // in binary: 11110000
+ * // | 10101010
+ * // --------
+ * // 11111010
+ * }}}
+ */
+ def |(x: Long): Long
+
+ /**
+ * Returns the bitwise AND of this value and `x`.
+ * @example {{{
+ * (0xf0 & 0xaa) == 0xa0
+ * // in binary: 11110000
+ * // & 10101010
+ * // --------
+ * // 10100000
+ * }}}
+ */
+ def &(x: Byte): Long
+ /**
+ * Returns the bitwise AND of this value and `x`.
+ * @example {{{
+ * (0xf0 & 0xaa) == 0xa0
+ * // in binary: 11110000
+ * // & 10101010
+ * // --------
+ * // 10100000
+ * }}}
+ */
+ def &(x: Short): Long
+ /**
+ * Returns the bitwise AND of this value and `x`.
+ * @example {{{
+ * (0xf0 & 0xaa) == 0xa0
+ * // in binary: 11110000
+ * // & 10101010
+ * // --------
+ * // 10100000
+ * }}}
+ */
+ def &(x: Char): Long
+ /**
+ * Returns the bitwise AND of this value and `x`.
+ * @example {{{
+ * (0xf0 & 0xaa) == 0xa0
+ * // in binary: 11110000
+ * // & 10101010
+ * // --------
+ * // 10100000
+ * }}}
+ */
+ def &(x: Int): Long
+ /**
+ * Returns the bitwise AND of this value and `x`.
+ * @example {{{
+ * (0xf0 & 0xaa) == 0xa0
+ * // in binary: 11110000
+ * // & 10101010
+ * // --------
+ * // 10100000
+ * }}}
+ */
+ def &(x: Long): Long
+
+ /**
+ * Returns the bitwise XOR of this value and `x`.
+ * @example {{{
+ * (0xf0 ^ 0xaa) == 0x5a
+ * // in binary: 11110000
+ * // ^ 10101010
+ * // --------
+ * // 01011010
+ * }}}
+ */
+ def ^(x: Byte): Long
+ /**
+ * Returns the bitwise XOR of this value and `x`.
+ * @example {{{
+ * (0xf0 ^ 0xaa) == 0x5a
+ * // in binary: 11110000
+ * // ^ 10101010
+ * // --------
+ * // 01011010
+ * }}}
+ */
+ def ^(x: Short): Long
+ /**
+ * Returns the bitwise XOR of this value and `x`.
+ * @example {{{
+ * (0xf0 ^ 0xaa) == 0x5a
+ * // in binary: 11110000
+ * // ^ 10101010
+ * // --------
+ * // 01011010
+ * }}}
+ */
+ def ^(x: Char): Long
+ /**
+ * Returns the bitwise XOR of this value and `x`.
+ * @example {{{
+ * (0xf0 ^ 0xaa) == 0x5a
+ * // in binary: 11110000
+ * // ^ 10101010
+ * // --------
+ * // 01011010
+ * }}}
+ */
+ def ^(x: Int): Long
+ /**
+ * Returns the bitwise XOR of this value and `x`.
+ * @example {{{
+ * (0xf0 ^ 0xaa) == 0x5a
+ * // in binary: 11110000
+ * // ^ 10101010
+ * // --------
+ * // 01011010
+ * }}}
+ */
+ def ^(x: Long): Long
+
+ /**
+ * Returns the sum of this value and `x`.
+ */
+ def +(x: Byte): Long
+ /**
+ * Returns the sum of this value and `x`.
+ */
+ def +(x: Short): Long
+ /**
+ * Returns the sum of this value and `x`.
+ */
+ def +(x: Char): Long
+ /**
+ * Returns the sum of this value and `x`.
+ */
+ def +(x: Int): Long
+ /**
+ * Returns the sum of this value and `x`.
+ */
+ def +(x: Long): Long
+ /**
+ * Returns the sum of this value and `x`.
+ */
+ def +(x: Float): Float
+ /**
+ * Returns the sum of this value and `x`.
+ */
+ def +(x: Double): Double
+
+ /**
+ * Returns the difference of this value and `x`.
+ */
+ def -(x: Byte): Long
+ /**
+ * Returns the difference of this value and `x`.
+ */
+ def -(x: Short): Long
+ /**
+ * Returns the difference of this value and `x`.
+ */
+ def -(x: Char): Long
+ /**
+ * Returns the difference of this value and `x`.
+ */
+ def -(x: Int): Long
+ /**
+ * Returns the difference of this value and `x`.
+ */
+ def -(x: Long): Long
+ /**
+ * Returns the difference of this value and `x`.
+ */
+ def -(x: Float): Float
+ /**
+ * Returns the difference of this value and `x`.
+ */
+ def -(x: Double): Double
+
+ /**
+ * Returns the product of this value and `x`.
+ */
+ def *(x: Byte): Long
+ /**
+ * Returns the product of this value and `x`.
+ */
+ def *(x: Short): Long
+ /**
+ * Returns the product of this value and `x`.
+ */
+ def *(x: Char): Long
+ /**
+ * Returns the product of this value and `x`.
+ */
+ def *(x: Int): Long
+ /**
+ * Returns the product of this value and `x`.
+ */
+ def *(x: Long): Long
+ /**
+ * Returns the product of this value and `x`.
+ */
+ def *(x: Float): Float
+ /**
+ * Returns the product of this value and `x`.
+ */
+ def *(x: Double): Double
+
+ /**
+ * Returns the quotient of this value and `x`.
+ */
+ def /(x: Byte): Long
+ /**
+ * Returns the quotient of this value and `x`.
+ */
+ def /(x: Short): Long
+ /**
+ * Returns the quotient of this value and `x`.
+ */
+ def /(x: Char): Long
+ /**
+ * Returns the quotient of this value and `x`.
+ */
+ def /(x: Int): Long
+ /**
+ * Returns the quotient of this value and `x`.
+ */
+ def /(x: Long): Long
+ /**
+ * Returns the quotient of this value and `x`.
+ */
+ def /(x: Float): Float
+ /**
+ * Returns the quotient of this value and `x`.
+ */
+ def /(x: Double): Double
+
+ /**
+ * Returns the remainder of the division of this value by `x`.
+ */
+ def %(x: Byte): Long
+ /**
+ * Returns the remainder of the division of this value by `x`.
+ */
+ def %(x: Short): Long
+ /**
+ * Returns the remainder of the division of this value by `x`.
+ */
+ def %(x: Char): Long
+ /**
+ * Returns the remainder of the division of this value by `x`.
+ */
+ def %(x: Int): Long
+ /**
+ * Returns the remainder of the division of this value by `x`.
+ */
+ def %(x: Long): Long
+ /**
+ * Returns the remainder of the division of this value by `x`.
+ */
+ def %(x: Float): Float
+ /**
+ * Returns the remainder of the division of this value by `x`.
+ */
+ def %(x: Double): Double
+
+ override def getClass(): Class[Long] = null
}
object Long extends AnyValCompanion {
@@ -176,5 +624,10 @@ object Long extends AnyValCompanion {
/** The String representation of the scala.Long companion object.
*/
override def toString = "object scala.Long"
+
+ /** Language mandated coercions from Long to "wider" types.
+ */
+ implicit def long2float(x: Long): Float = x.toFloat
+ implicit def long2double(x: Long): Double = x.toDouble
}
diff --git a/src/library/scala/LowPriorityImplicits.scala b/src/library/scala/LowPriorityImplicits.scala
index 447a3c3..bf6e494 100644
--- a/src/library/scala/LowPriorityImplicits.scala
+++ b/src/library/scala/LowPriorityImplicits.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -12,6 +12,7 @@ import scala.collection.{ mutable, immutable, generic }
import mutable.WrappedArray
import immutable.WrappedString
import generic.CanBuildFrom
+import scala.language.implicitConversions
/** The `LowPriorityImplicits` class provides implicit values that
* are valid in all Scala compilation units without explicit qualification,
@@ -26,15 +27,20 @@ class LowPriorityImplicits {
* any potential conflicts. Conflicts do exist because the wrappers
* need to implement ScalaNumber in order to have a symmetric equals
* method, but that implies implementing java.lang.Number as well.
+ *
+ * Note - these are inlined because they are value classes, but
+ * the call to xxxWrapper is not eliminated even though it does nothing.
+ * Even inlined, every call site does a no-op retrieval of Predef's MODULE$
+ * because maybe loading Predef has side effects!
*/
- implicit def byteWrapper(x: Byte) = new runtime.RichByte(x)
- implicit def shortWrapper(x: Short) = new runtime.RichShort(x)
- implicit def intWrapper(x: Int) = new runtime.RichInt(x)
- implicit def charWrapper(c: Char) = new runtime.RichChar(c)
- implicit def longWrapper(x: Long) = new runtime.RichLong(x)
- implicit def floatWrapper(x: Float) = new runtime.RichFloat(x)
- implicit def doubleWrapper(x: Double) = new runtime.RichDouble(x)
- implicit def booleanWrapper(x: Boolean) = new runtime.RichBoolean(x)
+ @inline implicit def byteWrapper(x: Byte) = new runtime.RichByte(x)
+ @inline implicit def shortWrapper(x: Short) = new runtime.RichShort(x)
+ @inline implicit def intWrapper(x: Int) = new runtime.RichInt(x)
+ @inline implicit def charWrapper(c: Char) = new runtime.RichChar(c)
+ @inline implicit def longWrapper(x: Long) = new runtime.RichLong(x)
+ @inline implicit def floatWrapper(x: Float) = new runtime.RichFloat(x)
+ @inline implicit def doubleWrapper(x: Double) = new runtime.RichDouble(x)
+ @inline implicit def booleanWrapper(x: Boolean) = new runtime.RichBoolean(x)
// These eight implicits exist solely to exclude Null from the domain of
// the boxed types, so that e.g. "var x: Int = null" is a compile time
diff --git a/src/library/scala/MatchError.scala b/src/library/scala/MatchError.scala
index 4e19ed1..9965bb1 100644
--- a/src/library/scala/MatchError.scala
+++ b/src/library/scala/MatchError.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -23,9 +23,15 @@ final class MatchError(obj: Any) extends RuntimeException {
/** There's no reason we need to call toString eagerly,
* so defer it until getMessage is called.
*/
- private lazy val objString =
+ private lazy val objString = {
+ def ofClass = "of class " + obj.getClass.getName
if (obj == null) "null"
- else obj.toString() + " (of class " + obj.asInstanceOf[AnyRef].getClass.getName + ")"
+ else try {
+ obj.toString() + " (" + ofClass + ")"
+ } catch {
+ case _: Throwable => "an instance " + ofClass
+ }
+ }
override def getMessage() = objString
}
diff --git a/src/library/scala/Math.scala b/src/library/scala/Math.scala
deleted file mode 100644
index b849318..0000000
--- a/src/library/scala/Math.scala
+++ /dev/null
@@ -1,95 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-package scala
-
-/** The object <code>Math</code> contains methods for performing basic numeric
- * operations such as the elementary exponential, logarithm, square root, and
- * trigonometric functions.
- */
- at deprecated("use the scala.math package object instead.\n(Example package object usage: scala.math.Pi )", "2.8.0")
-object Math extends MathCommon {
- @deprecated("Use scala.Byte.MinValue instead", "2.8.0")
- val MIN_BYTE = java.lang.Byte.MIN_VALUE
-
- @deprecated("Use scala.Byte.MaxValue instead", "2.8.0")
- val MAX_BYTE = java.lang.Byte.MAX_VALUE
-
- @deprecated("Use scala.Short.MinValue instead", "2.8.0")
- val MIN_SHORT = java.lang.Short.MIN_VALUE
-
- @deprecated("Use scala.Short.MaxValue instead", "2.8.0")
- val MAX_SHORT = java.lang.Short.MAX_VALUE
-
- @deprecated("Use scala.Char.MinValue instead", "2.8.0")
- val MIN_CHAR = java.lang.Character.MIN_VALUE
-
- @deprecated("Use scala.Char.MaxValue instead", "2.8.0")
- val MAX_CHAR = java.lang.Character.MAX_VALUE
-
- @deprecated("Use scala.Int.MinValue instead", "2.8.0")
- val MIN_INT = java.lang.Integer.MIN_VALUE
-
- @deprecated("Use scala.Int.MaxValue instead", "2.8.0")
- val MAX_INT = java.lang.Integer.MAX_VALUE
-
- @deprecated("Use scala.Long.MinValue instead", "2.8.0")
- val MIN_LONG = java.lang.Long.MIN_VALUE
-
- @deprecated("Use scala.Long.MaxValue instead", "2.8.0")
- val MAX_LONG = java.lang.Long.MAX_VALUE
-
- /** The smallest possible value for <a href="Float.html" target="_self">scala.Float</a>. */
- @deprecated("Use scala.Float.MinValue instead", "2.8.0")
- val MIN_FLOAT = -java.lang.Float.MAX_VALUE
-
- /** The smallest difference between two values of <a href="Float.html" target="_self">scala.Float</a>. */
- @deprecated("Use scala.Float.MinPositiveValue instead", "2.8.0")
- val EPS_FLOAT = java.lang.Float.MIN_VALUE
-
- /** The greatest possible value for <a href="Float.html" target="_self">scala.Float</a>. */
- @deprecated("Use scala.Float.MaxValue instead", "2.8.0")
- val MAX_FLOAT = java.lang.Float.MAX_VALUE
-
- /** A value of type <a href="Float.html" target="_self">scala.Float</a> that represents no number. */
- @deprecated("Use scala.Float.NaN instead", "2.8.0")
- val NaN_FLOAT = java.lang.Float.NaN
-
- /** Negative infinity of type <a href="Float.html" target="_self">scala.Float</a>. */
- @deprecated("Use scala.Float.NegativeInfinity instead", "2.8.0")
- val NEG_INF_FLOAT = java.lang.Float.NEGATIVE_INFINITY
-
- /** Positive infinity of type <a href="Float.html" target="_self">scala.Float</a>. */
- @deprecated("Use scala.Float.PositiveInfinity instead", "2.8.0")
- val POS_INF_FLOAT = java.lang.Float.POSITIVE_INFINITY
-
- /** The smallest possible value for <a href="Double.html" target="_self">scala.Double</a>. */
- @deprecated("Use scala.Double.MinValue instead", "2.8.0")
- val MIN_DOUBLE = -java.lang.Double.MAX_VALUE
-
- /** The smallest difference between two values of <a href="Double.html" target="_self">scala.Double</a>. */
- @deprecated("Use scala.Double.MinPositiveValue instead", "2.8.0")
- val EPS_DOUBLE = java.lang.Double.MIN_VALUE
-
- /** The greatest possible value for <a href="Double.html" target="_self">scala.Double</a>. */
- @deprecated("Use scala.Double.MaxValue instead", "2.8.0")
- val MAX_DOUBLE = java.lang.Double.MAX_VALUE
-
- /** A value of type <a href="Double.html" target="_self">scala.Double</a> that represents no number. */
- @deprecated("Use scala.Double.NaN instead", "2.8.0")
- val NaN_DOUBLE = java.lang.Double.NaN
-
- /** Negative infinity of type <a href="Double.html" target="_self">scala.Double</a>. */
- @deprecated("Use scala.Double.NegativeInfinity instead", "2.8.0")
- val NEG_INF_DOUBLE = java.lang.Double.NEGATIVE_INFINITY
-
- /** Positive infinity of type <a href="Double.html" target="_self">scala.Double</a>. */
- @deprecated("Use scala.Double.PositiveInfinity instead", "2.8.0")
- val POS_INF_DOUBLE = java.lang.Double.POSITIVE_INFINITY
-}
\ No newline at end of file
diff --git a/src/library/scala/MathCommon.scala b/src/library/scala/MathCommon.scala
deleted file mode 100644
index a2450fd..0000000
--- a/src/library/scala/MathCommon.scala
+++ /dev/null
@@ -1,143 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-package scala
-
-/** Common code between the deprecated scala.Math object and
- * the scala.math package object.
- */
-private[scala] class MathCommon {
- /** The <code>double</code> value that is closer than any other to
- * <code>e</code>, the base of the natural logarithms.
- */
- val E = java.lang.Math.E
-
- /** The <code>double</code> value that is closer than any other to
- * <code>pi</code>, the ratio of the circumference of a circle to its
- * diameter.
- */
- val Pi = java.lang.Math.PI
-
- /** Returns a <code>double</code> value with a positive sign, greater than
- * or equal to <code>0.0</code> and less than <code>1.0</code>.
- */
- def random: Double = java.lang.Math.random()
-
- def sin(x: Double): Double = java.lang.Math.sin(x)
- def cos(x: Double): Double = java.lang.Math.cos(x)
- def tan(x: Double): Double = java.lang.Math.tan(x)
- def asin(x: Double): Double = java.lang.Math.asin(x)
- def acos(x: Double): Double = java.lang.Math.acos(x)
- def atan(x: Double): Double = java.lang.Math.atan(x)
-
- /** Converts an angle measured in degrees to an approximately equivalent
- * angle measured in radians.
- *
- * @param x an angle, in degrees
- * @return the measurement of the angle <code>x</code> in radians.
- */
- def toRadians(x: Double): Double = java.lang.Math.toRadians(x)
-
- /** Converts an angle measured in radians to an approximately equivalent
- * angle measured in degrees.
- *
- * @param x angle, in radians
- * @return the measurement of the angle <code>x</code> in degrees.
- */
- def toDegrees(x: Double): Double = java.lang.Math.toDegrees(x)
-
- /** Returns Euler's number <code>e</code> raised to the power of a
- * <code>double</code> value.
- *
- * @param x the exponent to raise <code>e</code> to.
- * @return the value <code>e<sup>a</sup></code>, where <code>e</code>
- * is the base of the natural logarithms.
- */
- def exp(x: Double): Double = java.lang.Math.exp(x)
- def log(x: Double): Double = java.lang.Math.log(x)
- def sqrt(x: Double): Double = java.lang.Math.sqrt(x)
- def IEEEremainder(x: Double, y: Double): Double = java.lang.Math.IEEEremainder(x, y)
-
- def ceil(x: Double): Double = java.lang.Math.ceil(x)
- def floor(x: Double): Double = java.lang.Math.floor(x)
-
- /** Returns the <code>double</code> value that is closest in value to the
- * argument and is equal to a mathematical integer.
- *
- * @param x a <code>double</code> value
- * @return the closest floating-point value to a that is equal to a
- * mathematical integer.
- */
- def rint(x: Double): Double = java.lang.Math.rint(x)
-
- /** Converts rectangular coordinates <code>(x, y)</code> to polar
- * <code>(r, theta)</code>.
- *
- * @param x the ordinate coordinate
- * @param y the abscissa coordinate
- * @return the <em>theta</em> component of the point <code>(r, theta)</code>
- * in polar coordinates that corresponds to the point
- * <code>(x, y)</code> in Cartesian coordinates.
- */
- def atan2(y: Double, x: Double): Double = java.lang.Math.atan2(y, x)
-
- /** Returns the value of the first argument raised to the power of the
- * second argument.
- *
- * @param x the base.
- * @param y the exponent.
- * @return the value <code>x<sup>y</sup></code>.
- */
- def pow(x: Double, y: Double): Double = java.lang.Math.pow(x, y)
-
- /** Returns the closest <code>long</code> to the argument.
- *
- * @param x a floating-point value to be rounded to a <code>long</code>.
- * @return the value of the argument rounded to the nearest
- * <code>long</code> value.
- */
- def round(x: Float): Int = java.lang.Math.round(x)
- def round(x: Double): Long = java.lang.Math.round(x)
- def abs(x: Int): Int = java.lang.Math.abs(x)
- def abs(x: Long): Long = java.lang.Math.abs(x)
- def abs(x: Float): Float = java.lang.Math.abs(x)
- def abs(x: Double): Double = java.lang.Math.abs(x)
-
- def max(x: Int, y: Int): Int = java.lang.Math.max(x, y)
- def max(x: Long, y: Long): Long = java.lang.Math.max(x, y)
- def max(x: Float, y: Float): Float = java.lang.Math.max(x, y)
- def max(x: Double, y: Double): Double = java.lang.Math.max(x, y)
-
- def min(x: Int, y: Int): Int = java.lang.Math.min(x, y)
- def min(x: Long, y: Long): Long = java.lang.Math.min(x, y)
- def min(x: Float, y: Float): Float = java.lang.Math.min(x, y)
- def min(x: Double, y: Double): Double = java.lang.Math.min(x, y)
-
- def signum(x: Double): Double =
- if (x == 0d) 0d
- else if (x < 0) -1.0
- else if (x > 0) 1.0
- else x // NaN
-
- def signum(x: Float): Float =
- if (x == 0f) 0f
- else if (x < 0) -1.0f
- else if (x > 0) 1.0f
- else x // NaN
-
- def signum(x: Long): Long =
- if (x == 0l) 0l
- else if (x < 0) -1l
- else 1l
-
- def signum(x: Int): Int =
- if (x == 0) 0
- else if (x < 0) -1
- else 1
-}
diff --git a/src/library/scala/Mutable.scala b/src/library/scala/Mutable.scala
index f25b06f..8ef0424 100644
--- a/src/library/scala/Mutable.scala
+++ b/src/library/scala/Mutable.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/library/scala/NotDefinedError.scala b/src/library/scala/NotDefinedError.scala
deleted file mode 100644
index 1fd0e7e..0000000
--- a/src/library/scala/NotDefinedError.scala
+++ /dev/null
@@ -1,17 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-
-package scala
-
-/**
- * @since 2.0
- */
- at deprecated("Use a custom Error class instead", "2.8.0")
-final class NotDefinedError(msg: String) extends Error("not defined: " + msg)
diff --git a/src/library/scala/NotImplementedError.scala b/src/library/scala/NotImplementedError.scala
new file mode 100644
index 0000000..464a9a6
--- /dev/null
+++ b/src/library/scala/NotImplementedError.scala
@@ -0,0 +1,19 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+
+
+package scala
+
+/** Throwing this exception can be a temporary replacement for a method
+ * body that remains to be implemented. For instance, the exception is thrown by
+ * `Predef.???`.
+ */
+final class NotImplementedError(msg: String) extends Error(msg) {
+ def this() = this("an implementation is missing")
+}
diff --git a/src/library/scala/NotNull.scala b/src/library/scala/NotNull.scala
index d47d47a..f87416b 100644
--- a/src/library/scala/NotNull.scala
+++ b/src/library/scala/NotNull.scala
@@ -1,17 +1,15 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-
-
package scala
/**
* A marker trait for things that are not allowed to be null
* @since 2.5
*/
-trait NotNull {}
+trait NotNull extends Any {}
diff --git a/src/library/scala/Option.scala b/src/library/scala/Option.scala
index 8e938aa..3873df9 100644
--- a/src/library/scala/Option.scala
+++ b/src/library/scala/Option.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -9,6 +9,9 @@
package scala
object Option {
+
+ import scala.language.implicitConversions
+
/** An implicit conversion that converts an option to an iterable value
*/
implicit def option2Iterable[A](xo: Option[A]): Iterable[A] = xo.toList
@@ -35,18 +38,18 @@ object Option {
* `foreach`:
*
* {{{
- * val name:Option[String] = request.getParameter("name")
+ * val name: Option[String] = request getParameter "name"
* val upper = name map { _.trim } filter { _.length != 0 } map { _.toUpperCase }
- * println(upper.getOrElse(""))
+ * println(upper getOrElse "")
* }}}
*
* Note that this is equivalent to {{{
* val upper = for {
- * name <- request.getParameter("name")
+ * name <- request getParameter "name"
* trimmed <- Some(name.trim)
* upper <- Some(trimmed.toUpperCase) if trimmed.length != 0
* } yield upper
- * println(upper.getOrElse(""))
+ * println(upper getOrElse "")
* }}}
*
* Because of how for comprehension works, if $none is returned
@@ -57,14 +60,12 @@ object Option {
* having to check for the existence of a value.
*
* A less-idiomatic way to use $option values is via pattern matching: {{{
- * val nameMaybe = request.getParameter("name")
+ * val nameMaybe = request getParameter "name"
* nameMaybe match {
- * case Some(name) => {
+ * case Some(name) =>
* println(name.trim.toUppercase)
- * }
- * case None => {
+ * case None =>
* println("No name value")
- * }
* }
* }}}
*
@@ -81,6 +82,17 @@ object Option {
* @define option [[scala.Option]]
* @define p `p`
* @define f `f`
+ * @define coll option
+ * @define Coll `Option`
+ * @define orderDependent
+ * @define orderDependentFold
+ * @define mayNotTerminateInf
+ * @define willNotTerminateInf
+ * @define collectExample
+ * @define undefinedorder
+ * @define thatinfo the class of the returned collection. In the standard library configuration, `That` is `Iterable[B]`
+ * @define bfinfo an implicit value of class `CanBuildFrom` which determines the result class `That` from the current
+ * representation type `Repr` and the new element type `B`.
*/
sealed abstract class Option[+A] extends Product with Serializable {
self =>
@@ -132,6 +144,18 @@ sealed abstract class Option[+A] extends Product with Serializable {
@inline final def map[B](f: A => B): Option[B] =
if (isEmpty) None else Some(f(this.get))
+ /** Returns the result of applying $f to this $option's
+ * value if the $option is nonempty. Otherwise, evaluates
+ * expression `ifEmpty`.
+ *
+ * @note This is equivalent to `$option map f getOrElse ifEmpty`.
+ *
+ * @param ifEmpty the expression to evaluate if empty.
+ * @param f the function to apply if nonempty.
+ */
+ @inline final def fold[B](ifEmpty: => B)(f: A => B): B =
+ if (isEmpty) ifEmpty else f(this.get)
+
/** Returns the result of applying $f to this $option's value if
* this $option is nonempty.
* Returns $none if this $option is empty.
@@ -145,6 +169,9 @@ sealed abstract class Option[+A] extends Product with Serializable {
@inline final def flatMap[B](f: A => Option[B]): Option[B] =
if (isEmpty) None else f(this.get)
+ def flatten[B](implicit ev: A <:< Option[B]): Option[B] =
+ if (isEmpty) None else ev(this.get)
+
/** Returns this $option if it is nonempty '''and''' applying the predicate $p to
* this $option's value returns true. Otherwise, return $none.
*
@@ -161,10 +188,15 @@ sealed abstract class Option[+A] extends Product with Serializable {
@inline final def filterNot(p: A => Boolean): Option[A] =
if (isEmpty || !p(this.get)) this else None
+ /** Returns false if the option is $none, true otherwise.
+ * @note Implemented here to avoid the implicit conversion to Iterable.
+ */
+ final def nonEmpty = isDefined
+
/** Necessary to keep $option from being implicitly converted to
* [[scala.collection.Iterable]] in `for` comprehensions.
*/
- def withFilter(p: A => Boolean): WithFilter = new WithFilter(p)
+ @inline final def withFilter(p: A => Boolean): WithFilter = new WithFilter(p)
/** We need a whole WithFilter class to honor the "doesn't create a new
* collection" contract even though it seems unlikely to matter much in a
@@ -186,6 +218,13 @@ sealed abstract class Option[+A] extends Product with Serializable {
@inline final def exists(p: A => Boolean): Boolean =
!isEmpty && p(this.get)
+ /** Returns true if this option is empty '''or''' the predicate
+ * $p returns true when applied to this $option's value.
+ *
+ * @param p the predicate to test
+ */
+ @inline final def forall(p: A => Boolean): Boolean = isEmpty || p(this.get)
+
/** Apply the given procedure $f to the option's value,
* if it is nonempty. Otherwise, do nothing.
*
@@ -207,7 +246,7 @@ sealed abstract class Option[+A] extends Product with Serializable {
* @return the result of applying `pf` to this $option's
* value (if possible), or $none.
*/
- def collect[B](pf: PartialFunction[A, B]): Option[B] =
+ @inline final def collect[B](pf: PartialFunction[A, B]): Option[B] =
if (!isEmpty && pf.isDefinedAt(this.get)) Some(pf(this.get)) else None
/** Returns this $option if it is nonempty,
@@ -227,11 +266,11 @@ sealed abstract class Option[+A] extends Product with Serializable {
* if it is nonempty, or the empty list if the $option is empty.
*/
def toList: List[A] =
- if (isEmpty) List() else List(this.get)
+ if (isEmpty) List() else new ::(this.get, Nil)
- /** Returns a [[scala.Left]] containing the given
+ /** Returns a [[scala.util.Left]] containing the given
* argument `left` if this $option is empty, or
- * a [[scala.Right]] containing this $option's value if
+ * a [[scala.util.Right]] containing this $option's value if
* this is nonempty.
*
* @param left the expression to evaluate and return if this is empty
@@ -240,9 +279,9 @@ sealed abstract class Option[+A] extends Product with Serializable {
@inline final def toRight[X](left: => X) =
if (isEmpty) Left(left) else Right(this.get)
- /** Returns a [[scala.Right]] containing the given
+ /** Returns a [[scala.util.Right]] containing the given
* argument `right` if this is empty, or
- * a [[scala.Left]] containing this $option's value
+ * a [[scala.util.Left]] containing this $option's value
* if this $option is nonempty.
*
* @param right the expression to evaluate and return if this is empty
diff --git a/src/library/scala/PartialFunction.scala b/src/library/scala/PartialFunction.scala
index bb04d9e..9ff648a 100644
--- a/src/library/scala/PartialFunction.scala
+++ b/src/library/scala/PartialFunction.scala
@@ -1,19 +1,18 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-
-
package scala
-/** A partial function of type `PartialFunction[A, B]` is a
- * unary function where the domain does not necessarily include all values of type
- * `A`. The function `isDefinedAt` allows to
- * test dynamically if a value is in the domain of the function.
+
+/** A partial function of type `PartialFunction[A, B]` is a unary function
+ * where the domain does not necessarily include all values of type `A`.
+ * The function `isDefinedAt` allows to test dynamically if a value is in
+ * the domain of the function.
*
* Even if `isDefinedAt` returns true for an `a: A`, calling `apply(a)` may
* still throw an exception, so the following code is legal:
@@ -28,37 +27,38 @@ package scala
*
* {{{
* val sample = 1 to 10
- * val isEven: PartialFunction[Int, String] = {
- * case x if x % 2 == 0 => x+" is even"
+ * val isEven: PartialFunction[Int, String] = {
+ * case x if x % 2 == 0 => x+" is even"
* }
*
* // the method collect can use isDefinedAt to select which members to collect
* val evenNumbers = sample collect isEven
*
- * val isOdd: PartialFunction[Int, String] = {
- * case x if x % 2 == 1 => x+" is odd"
+ * val isOdd: PartialFunction[Int, String] = {
+ * case x if x % 2 == 1 => x+" is odd"
* }
*
- * // the method orElse allows chaining another partial function to handle
+ * // the method orElse allows chaining another partial function to handle
* // input outside the declared domain
* val numbers = sample map (isEven orElse isOdd)
* }}}
*
*
- * @author Martin Odersky
+ * @author Martin Odersky, Pavel Pavlov, Adriaan Moors
* @version 1.0, 16/07/2003
*/
-trait PartialFunction[-A, +B] extends (A => B) {
+trait PartialFunction[-A, +B] extends (A => B) { self =>
+ import PartialFunction._
/** Checks if a value is contained in the function's domain.
*
* @param x the value to test
- * @return `true`, iff `x` is in the domain of this function, `false` otherwise.
+ * @return `'''true'''`, iff `x` is in the domain of this function, `'''false'''` otherwise.
*/
def isDefinedAt(x: A): Boolean
- /** Composes this partial function with a fallback partial function which gets applied where this partial function
- * is not defined.
+ /** Composes this partial function with a fallback partial function which
+ * gets applied where this partial function is not defined.
*
* @param that the fallback function
* @tparam A1 the argument type of the fallback function
@@ -67,41 +67,80 @@ trait PartialFunction[-A, +B] extends (A => B) {
* of this partial function and `that`. The resulting partial function
* takes `x` to `this(x)` where `this` is defined, and to `that(x)` where it is not.
*/
- def orElse[A1 <: A, B1 >: B](that: PartialFunction[A1, B1]) : PartialFunction[A1, B1] =
- new PartialFunction[A1, B1] {
- def isDefinedAt(x: A1): Boolean =
- PartialFunction.this.isDefinedAt(x) || that.isDefinedAt(x)
- def apply(x: A1): B1 =
- if (PartialFunction.this.isDefinedAt(x)) PartialFunction.this.apply(x)
- else that.apply(x)
- }
+ def orElse[A1 <: A, B1 >: B](that: PartialFunction[A1, B1]): PartialFunction[A1, B1] =
+ new OrElse[A1, B1] (this, that)
+ //TODO: why not overload it with orElse(that: F1): F1?
- /** Composes this partial function with a transformation function that gets applied
- * to results of this partial function.
+ /** Composes this partial function with a transformation function that
+ * gets applied to results of this partial function.
* @param k the transformation function
* @tparam C the result type of the transformation function.
* @return a partial function with the same domain as this partial function, which maps
* arguments `x` to `k(this(x))`.
*/
- override def andThen[C](k: B => C) : PartialFunction[A, C] = new PartialFunction[A, C] {
- def isDefinedAt(x: A): Boolean = PartialFunction.this.isDefinedAt(x)
- def apply(x: A): C = k(PartialFunction.this.apply(x))
- }
+ override def andThen[C](k: B => C): PartialFunction[A, C] =
+ new AndThen[A, B, C] (this, k)
- /** Turns this partial function into an plain function returning an `Option` result.
+ /** Turns this partial function into a plain function returning an `Option` result.
* @see Function.unlift
* @return a function that takes an argument `x` to `Some(this(x))` if `this`
* is defined for `x`, and to `None` otherwise.
*/
- def lift: A => Option[B] = new (A => Option[B]) {
- def apply(x: A): Option[B] = if (isDefinedAt(x)) Some(PartialFunction.this.apply(x)) else None
+ def lift: A => Option[B] = new Lifted(this)
+
+ /** Applies this partial function to the given argument when it is contained in the function domain.
+ * Applies fallback function where this partial function is not defined.
+ *
+ * Note that expression `pf.applyOrElse(x, default)` is equivalent to
+ * {{{ if(pf isDefinedAt x) pf(x) else default(x) }}}
+ * except that `applyOrElse` method can be implemented more efficiently.
+ * For all partial function literals compiler generates `applyOrElse` implementation which
+ * avoids double evaluation of pattern matchers and guards.
+ * This makes `applyOrElse` the basis for the efficient implementation for many operations and scenarios, such as:
+ *
+ * - combining partial functions into `orElse`/`andThen` chains does not lead to
+ * excessive `apply`/`isDefinedAt` evaluation
+ * - `lift` and `unlift` do not evaluate source functions twice on each invocation
+ * - `runWith` allows efficient imperative-style combining of partial functions
+ * with conditionally applied actions
+ *
+ * For non-literal partial function classes with nontrivial `isDefinedAt` method
+ * it is recommended to override `applyOrElse` with custom implementation that avoids
+ * double `isDefinedAt` evaluation. This may result in better performance
+ * and more predictable behavior w.r.t. side effects.
+ *
+ * @param x the function argument
+ * @param default the fallback function
+ * @return the result of this function or fallback function application.
+ * @since 2.10
+ */
+ def applyOrElse[A1 <: A, B1 >: B](x: A1, default: A1 => B1): B1 =
+ if (isDefinedAt(x)) apply(x) else default(x)
+
+ /** Composes this partial function with an action function which
+ * gets applied to results of this partial function.
+ * The action function is invoked only for its side effects; its result is ignored.
+ *
+ * Note that expression `pf.runWith(action)(x)` is equivalent to
+ * {{{ if(pf isDefinedAt x) { action(pf(x)); true } else false }}}
+ * except that `runWith` is implemented via `applyOrElse` and thus potentially more efficient.
+ * Using `runWith` avoids double evaluation of pattern matchers and guards for partial function literals.
+ * @see `applyOrElse`.
+ *
+ * @param action the action function
+ * @return a function which maps arguments `x` to `isDefinedAt(x)`. The resulting function
+ * runs `action(this(x))` where `this` is defined.
+ * @since 2.10
+ */
+ def runWith[U](action: B => U): A => Boolean = { x =>
+ val z = applyOrElse(x, checkFallback[B])
+ if (!fallbackOccurred(z)) { action(z); true } else false
}
}
/** A few handy operations which leverage the extra bit of information
* available in partial functions. Examples:
- *
- * <pre>
+ * {{{
* import PartialFunction._
*
* def strangeConditional(other: Any): Boolean = cond(other) {
@@ -109,13 +148,116 @@ trait PartialFunction[-A, +B] extends (A => B) {
* case x: Int => true
* }
* def onlyInt(v: Any): Option[Int] = condOpt(v) { case x: Int => x }
- * </pre>
+ * }}}
*
* @author Paul Phillips
* @since 2.8
*/
-object PartialFunction
-{
+object PartialFunction {
+ /** Composite function produced by `PartialFunction#orElse` method
+ */
+ private class OrElse[-A, +B] (f1: PartialFunction[A, B], f2: PartialFunction[A, B]) extends PartialFunction[A, B] {
+ def isDefinedAt(x: A) = f1.isDefinedAt(x) || f2.isDefinedAt(x)
+
+ def apply(x: A): B = f1.applyOrElse(x, f2)
+
+ override def applyOrElse[A1 <: A, B1 >: B](x: A1, default: A1 => B1): B1 = {
+ val z = f1.applyOrElse(x, checkFallback[B])
+ if (!fallbackOccurred(z)) z else f2.applyOrElse(x, default)
+ }
+
+ override def orElse[A1 <: A, B1 >: B](that: PartialFunction[A1, B1]) =
+ new OrElse[A1, B1] (f1, f2 orElse that)
+
+ override def andThen[C](k: B => C) =
+ new OrElse[A, C] (f1 andThen k, f2 andThen k)
+ }
+
+ /** Composite function produced by `PartialFunction#andThen` method
+ */
+ private class AndThen[-A, B, +C] (pf: PartialFunction[A, B], k: B => C) extends PartialFunction[A, C] {
+ def isDefinedAt(x: A) = pf.isDefinedAt(x)
+
+ def apply(x: A): C = k(pf(x))
+
+ override def applyOrElse[A1 <: A, C1 >: C](x: A1, default: A1 => C1): C1 = {
+ val z = pf.applyOrElse(x, checkFallback[B])
+ if (!fallbackOccurred(z)) k(z) else default(x)
+ }
+ }
+
+ /** To implement patterns like {{{ if(pf isDefinedAt x) f1(pf(x)) else f2(x) }}} efficiently
+ * the following trick is used:
+ *
+ * To avoid double evaluation of pattern matchers & guards `applyOrElse` method is used here
+ * instead of `isDefinedAt`/`apply` pair.
+ *
+ * After call to `applyOrElse` we need both the function result it returned and
+ * the fact if the function's argument was contained in its domain. The only degree of freedom we have here
+ * to achieve this goal is tweaking with the continuation argument (`default`) of `applyOrElse` method.
+ * The obvious way is to throw an exception from `default` function and to catch it after
+ * calling `applyOrElse` but I consider this somewhat inefficient.
+ *
+ * I know only one way how you can do this task efficiently: `default` function should return unique marker object
+ * which never may be returned by any other (regular/partial) function. This way after calling `applyOrElse` you need
+ * just one reference comparison to distinguish if `pf isDefined x` or not.
+ *
+ * This correctly interacts with specialization as return type of `applyOrElse`
+ * (which is parameterized upper bound) can never be specialized.
+ *
+ * Here `fallback_pf` is used as both unique marker object and special fallback function that returns it.
+ */
+ private[this] val fallback_pf: PartialFunction[Any, Any] = { case _ => fallback_pf }
+ private def checkFallback[B] = fallback_pf.asInstanceOf[PartialFunction[Any, B]]
+ private def fallbackOccurred[B](x: B) = (fallback_pf eq x.asInstanceOf[AnyRef])
+
+ private class Lifted[-A, +B] (val pf: PartialFunction[A, B])
+ extends scala.runtime.AbstractFunction1[A, Option[B]] {
+
+ def apply(x: A): Option[B] = {
+ val z = pf.applyOrElse(x, checkFallback[B])
+ if (!fallbackOccurred(z)) Some(z) else None
+ }
+ }
+
+ private class Unlifted[A, B] (f: A => Option[B]) extends scala.runtime.AbstractPartialFunction[A, B] {
+ def isDefinedAt(x: A): Boolean = f(x).isDefined
+
+ override def applyOrElse[A1 <: A, B1 >: B](x: A1, default: A1 => B1): B1 = {
+ val z = f(x)
+ if (!z.isEmpty) z.get else default(x)
+ }
+
+ override def lift = f
+ }
+
+ private[scala] def unlifted[A, B](f: A => Option[B]): PartialFunction[A, B] = f match {
+ case lf: Lifted[A, B] => lf.pf
+ case ff => new Unlifted(ff)
+ }
+
+ /** Converts ordinary function to partial one
+ * @since 2.10
+ */
+ def apply[A, B](f: A => B): PartialFunction[A, B] = { case x => f(x) }
+
+ private[this] val constFalse: Any => Boolean = { _ => false}
+
+ private[this] val empty_pf: PartialFunction[Any, Nothing] = new PartialFunction[Any, Nothing] {
+ def isDefinedAt(x: Any) = false
+ def apply(x: Any) = throw new MatchError(x)
+ override def orElse[A1, B1](that: PartialFunction[A1, B1]) = that
+ override def andThen[C](k: Nothing => C) = this
+ override val lift = (x: Any) => None
+ override def runWith[U](action: Nothing => U) = constFalse
+ }
+
+ /** The partial function with empty domain.
+ * Any attempt to invoke empty partial function leads to throwing [[scala.MatchError]] exception.
+ * @since 2.10
+ */
+ def empty[A, B] : PartialFunction[A, B] = empty_pf
+
/** Creates a Boolean test based on a value and a partial function.
* It behaves like a 'match' statement with an implied 'case _ => false'
* following the supplied cases.
@@ -124,18 +266,17 @@ object PartialFunction
* @param pf the partial function
* @return true, iff `x` is in the domain of `pf` and `pf(x) == true`.
*/
- def cond[T](x: T)(pf: PartialFunction[T, Boolean]): Boolean =
- (pf isDefinedAt x) && pf(x)
+ def cond[T](x: T)(pf: PartialFunction[T, Boolean]): Boolean = pf.applyOrElse(x, constFalse)
/** Transforms a PartialFunction[T, U] `pf` into Function1[T, Option[U]] `f`
- * whose result is Some(x) if the argument is in pf's domain and None otherwise,
- * and applies it to the value `x`. In effect, it is a `match` statement
- * which wraps all case results in Some(_) and adds `case _ => None` to the end.
+ * whose result is `Some(x)` if the argument is in `pf`'s domain and `None`
+ * otherwise, and applies it to the value `x`. In effect, it is a
+ * `'''match'''` statement which wraps all case results in `Some(_)` and
+ * adds `'''case''' _ => None` to the end.
*
* @param x the value to test
* @param pf the PartialFunction[T, U]
* @return `Some(pf(x))` if `pf isDefinedAt x`, `None` otherwise.
*/
- def condOpt[T,U](x: T)(pf: PartialFunction[T, U]): Option[U] =
- if (pf isDefinedAt x) Some(pf(x)) else None
+ def condOpt[T,U](x: T)(pf: PartialFunction[T, U]): Option[U] = pf.lift(x)
}
diff --git a/src/library/scala/Predef.scala b/src/library/scala/Predef.scala
index 54111dc..9bb5787 100644
--- a/src/library/scala/Predef.scala
+++ b/src/library/scala/Predef.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -12,8 +12,9 @@ import scala.collection.{ mutable, immutable, generic }
import immutable.StringOps
import mutable.ArrayOps
import generic.CanBuildFrom
-import annotation.{ elidable, implicitNotFound }
-import annotation.elidable.ASSERTION
+import scala.annotation.{ elidable, implicitNotFound }
+import scala.annotation.elidable.ASSERTION
+import scala.language.{implicitConversions, existentials}
/** The `Predef` object provides definitions that are accessible in all Scala
* compilation units without explicit qualification.
@@ -95,23 +96,41 @@ object Predef extends LowPriorityImplicits {
type Set[A] = immutable.Set[A]
val Map = immutable.Map
val Set = immutable.Set
- val AnyRef = new SpecializableCompanion {} // a dummy used by the specialization annotation
+ // @deprecated("Use scala.AnyRef instead", "2.10.0")
+ // def AnyRef = scala.AnyRef
// Manifest types, companions, and incantations for summoning
+ @annotation.implicitNotFound(msg = "No ClassManifest available for ${T}.")
+ @deprecated("Use scala.reflect.ClassTag instead", "2.10.0")
type ClassManifest[T] = scala.reflect.ClassManifest[T]
- type Manifest[T] = scala.reflect.Manifest[T]
+ // TODO undeprecated until Scala reflection becomes non-experimental
+ // @deprecated("This notion doesn't have a corresponding concept in 2.10, because scala.reflect.runtime.universe.TypeTag can capture arbitrary types. Use type tags instead of manifests, and there will be no need in opt manifests.", "2.10.0")
type OptManifest[T] = scala.reflect.OptManifest[T]
+ @annotation.implicitNotFound(msg = "No Manifest available for ${T}.")
+ // TODO undeprecated until Scala reflection becomes non-experimental
+ // @deprecated("Use scala.reflect.ClassTag (to capture erasures) or scala.reflect.runtime.universe.TypeTag (to capture types) or both instead", "2.10.0")
+ type Manifest[T] = scala.reflect.Manifest[T]
+ @deprecated("Use scala.reflect.ClassTag instead", "2.10.0")
val ClassManifest = scala.reflect.ClassManifest
+ // TODO undeprecated until Scala reflection becomes non-experimental
+ // @deprecated("Use scala.reflect.ClassTag (to capture erasures) or scala.reflect.runtime.universe.TypeTag (to capture types) or both instead", "2.10.0")
val Manifest = scala.reflect.Manifest
+ // TODO undeprecated until Scala reflection becomes non-experimental
+ // @deprecated("This notion doesn't have a corresponding concept in 2.10, because scala.reflect.runtime.universe.TypeTag can capture arbitrary types. Use type tags instead of manifests, and there will be no need in opt manifests.", "2.10.0")
val NoManifest = scala.reflect.NoManifest
+ // TODO undeprecated until Scala reflection becomes non-experimental
+ // @deprecated("Use scala.reflect.classTag[T] and scala.reflect.runtime.universe.typeTag[T] instead", "2.10.0")
def manifest[T](implicit m: Manifest[T]) = m
+ @deprecated("Use scala.reflect.classTag[T] instead", "2.10.0")
def classManifest[T](implicit m: ClassManifest[T]) = m
+ // TODO undeprecated until Scala reflection becomes non-experimental
+ // @deprecated("This notion doesn't have a corresponding concept in 2.10, because scala.reflect.runtime.universe.TypeTag can capture arbitrary types. Use type tags instead of manifests, and there will be no need in opt manifests.", "2.10.0")
def optManifest[T](implicit m: OptManifest[T]) = m
// Minor variations on identity functions
def identity[A](x: A): A = x // @see `conforms` for the implicit version
- def implicitly[T](implicit e: T) = e // for summoning implicit values from the nether world
+ @inline def implicitly[T](implicit e: T) = e // for summoning implicit values from the nether world -- TODO: when dependent method types are on by default, give this result type `e.type`, so that inliner has better chance of knowing which method to inline in calls like `implicitly[MatchingStrategy[Option]].zero`
@inline def locally[T](x: T): T = x // to communicate intent and avoid unmoored statements
// Apparently needed for the xml library
@@ -119,16 +138,16 @@ object Predef extends LowPriorityImplicits {
// Deprecated
- @deprecated("Use sys.error(message) instead", "2.9.0")
+ @deprecated("Use `sys.error(message)` instead", "2.9.0")
def error(message: String): Nothing = sys.error(message)
- @deprecated("Use sys.exit() instead", "2.9.0")
+ @deprecated("Use `sys.exit()` instead", "2.9.0")
def exit(): Nothing = sys.exit()
- @deprecated("Use sys.exit(status) instead", "2.9.0")
+ @deprecated("Use `sys.exit(status)` instead", "2.9.0")
def exit(status: Int): Nothing = sys.exit(status)
- @deprecated("Use formatString.format(args: _*) or arg.formatted(formatString) instead", "2.9.0")
+ @deprecated("Use `formatString.format(args: _*)` or `arg.formatted(formatString)` instead", "2.9.0")
def format(text: String, xs: Any*) = augmentString(text).format(xs: _*)
// errors and asserts -------------------------------------------------
@@ -138,7 +157,7 @@ object Predef extends LowPriorityImplicits {
* is at least `ASSERTION`.
*
* @see elidable
- * @param p the expression to test
+ * @param assertion the expression to test
*/
@elidable(ASSERTION)
def assert(assertion: Boolean) {
@@ -151,8 +170,8 @@ object Predef extends LowPriorityImplicits {
* is at least `ASSERTION`.
*
* @see elidable
- * @param p the expression to test
- * @param msg a String to include in the failure message
+ * @param assertion the expression to test
+ * @param message a String to include in the failure message
*/
@elidable(ASSERTION) @inline
final def assert(assertion: Boolean, message: => Any) {
@@ -167,7 +186,7 @@ object Predef extends LowPriorityImplicits {
* will not be generated if `-Xelide-below` is at least `ASSERTION`.
*
* @see elidable
- * @param p the expression to test
+ * @param assumption the expression to test
*/
@elidable(ASSERTION)
def assume(assumption: Boolean) {
@@ -182,8 +201,8 @@ object Predef extends LowPriorityImplicits {
* will not be generated if `-Xelide-below` is at least `ASSERTION`.
*
* @see elidable
- * @param p the expression to test
- * @param msg a String to include in the failure message
+ * @param assumption the expression to test
+ * @param message a String to include in the failure message
*/
@elidable(ASSERTION) @inline
final def assume(assumption: Boolean, message: => Any) {
@@ -195,7 +214,7 @@ object Predef extends LowPriorityImplicits {
* This method is similar to `assert`, but blames the caller of the method
* for violating the condition.
*
- * @param p the expression to test
+ * @param requirement the expression to test
*/
def require(requirement: Boolean) {
if (!requirement)
@@ -206,21 +225,31 @@ object Predef extends LowPriorityImplicits {
* This method is similar to `assert`, but blames the caller of the method
* for violating the condition.
*
- * @param p the expression to test
- * @param msg a String to include in the failure message
+ * @param requirement the expression to test
+ * @param message a String to include in the failure message
*/
@inline final def require(requirement: Boolean, message: => Any) {
if (!requirement)
throw new IllegalArgumentException("requirement failed: "+ message)
}
- final class Ensuring[A](val x: A) {
- def ensuring(cond: Boolean): A = { assert(cond); x }
- def ensuring(cond: Boolean, msg: => Any): A = { assert(cond, msg); x }
- def ensuring(cond: A => Boolean): A = { assert(cond(x)); x }
- def ensuring(cond: A => Boolean, msg: => Any): A = { assert(cond(x), msg); x }
+ final class Ensuring[A](val __resultOfEnsuring: A) extends AnyVal {
+ // `__resultOfEnsuring` must be a public val to allow inlining.
+ // See comments in ArrowAssoc for more.
+ @deprecated("Use `__resultOfEnsuring` instead", "2.10.0")
+ def x = __resultOfEnsuring
+
+ def ensuring(cond: Boolean): A = { assert(cond); __resultOfEnsuring }
+ def ensuring(cond: Boolean, msg: => Any): A = { assert(cond, msg); __resultOfEnsuring }
+ def ensuring(cond: A => Boolean): A = { assert(cond(__resultOfEnsuring)); __resultOfEnsuring }
+ def ensuring(cond: A => Boolean, msg: => Any): A = { assert(cond(__resultOfEnsuring), msg); __resultOfEnsuring }
}
- implicit def any2Ensuring[A](x: A): Ensuring[A] = new Ensuring(x)
+ @inline implicit def any2Ensuring[A](x: A): Ensuring[A] = new Ensuring(x)
+
+ /** `???` can be used for marking methods that remain to be implemented.
+ * @throws A `NotImplementedError`
+ */
+ def ??? : Nothing = throw new NotImplementedError
// tupling ------------------------------------------------------------
@@ -236,11 +265,20 @@ object Predef extends LowPriorityImplicits {
def unapply[A, B, C](x: Tuple3[A, B, C]): Option[Tuple3[A, B, C]] = Some(x)
}
- final class ArrowAssoc[A](val x: A) {
- @inline def -> [B](y: B): Tuple2[A, B] = Tuple2(x, y)
+ final class ArrowAssoc[A](val __leftOfArrow: A) extends AnyVal {
+ // `__leftOfArrow` must be a public val to allow inlining. The val
+ // used to be called `x`, but now goes by `__leftOfArrow`, as that
+ // reduces the chances of a user's writing `foo.__leftOfArrow` and
+ // being confused why they get an ambiguous implicit conversion
+ // error. (`foo.x` used to produce this error since both
+ // any2Ensuring and any2ArrowAssoc pimped an `x` onto everything)
+ @deprecated("Use `__leftOfArrow` instead", "2.10.0")
+ def x = __leftOfArrow
+
+ @inline def -> [B](y: B): Tuple2[A, B] = Tuple2(__leftOfArrow, y)
def →[B](y: B): Tuple2[A, B] = ->(y)
}
- implicit def any2ArrowAssoc[A](x: A): ArrowAssoc[A] = new ArrowAssoc(x)
+ @inline implicit def any2ArrowAssoc[A](x: A): ArrowAssoc[A] = new ArrowAssoc(x)
// printing and reading -----------------------------------------------
@@ -250,7 +288,7 @@ object Predef extends LowPriorityImplicits {
def printf(text: String, xs: Any*) = Console.print(text.format(xs: _*))
def readLine(): String = Console.readLine()
- def readLine(text: String, args: Any*) = Console.readLine(text, args)
+ def readLine(text: String, args: Any*) = Console.readLine(text, args: _*)
def readBoolean() = Console.readBoolean()
def readByte() = Console.readByte()
def readShort() = Console.readShort()
@@ -266,69 +304,63 @@ object Predef extends LowPriorityImplicits {
// views --------------------------------------------------------------
- implicit def exceptionWrapper(exc: Throwable) = new runtime.RichException(exc)
-
- implicit def zipped2ToTraversable[El1, El2](zz: Tuple2[_, _]#Zipped[_, El1, _, El2]): Traversable[(El1, El2)] =
- new Traversable[(El1, El2)] {
- def foreach[U](f: ((El1, El2)) => U): Unit = zz foreach Function.untupled(f)
- }
-
- implicit def zipped3ToTraversable[El1, El2, El3](zz: Tuple3[_, _, _]#Zipped[_, El1, _, El2, _, El3]): Traversable[(El1, El2, El3)] =
- new Traversable[(El1, El2, El3)] {
- def foreach[U](f: ((El1, El2, El3)) => U): Unit = zz foreach Function.untupled(f)
- }
-
- implicit def genericArrayOps[T](xs: Array[T]): ArrayOps[T] = xs match {
- case x: Array[AnyRef] => refArrayOps[AnyRef](x).asInstanceOf[ArrayOps[T]]
- case x: Array[Int] => intArrayOps(x).asInstanceOf[ArrayOps[T]]
- case x: Array[Double] => doubleArrayOps(x).asInstanceOf[ArrayOps[T]]
- case x: Array[Long] => longArrayOps(x).asInstanceOf[ArrayOps[T]]
- case x: Array[Float] => floatArrayOps(x).asInstanceOf[ArrayOps[T]]
- case x: Array[Char] => charArrayOps(x).asInstanceOf[ArrayOps[T]]
- case x: Array[Byte] => byteArrayOps(x).asInstanceOf[ArrayOps[T]]
- case x: Array[Short] => shortArrayOps(x).asInstanceOf[ArrayOps[T]]
- case x: Array[Boolean] => booleanArrayOps(x).asInstanceOf[ArrayOps[T]]
- case x: Array[Unit] => unitArrayOps(x).asInstanceOf[ArrayOps[T]]
+ implicit def exceptionWrapper(exc: Throwable) = new runtime.RichException(exc)
+ implicit def tuple2ToZippedOps[T1, T2](x: (T1, T2)) = new runtime.Tuple2Zipped.Ops(x)
+ implicit def tuple3ToZippedOps[T1, T2, T3](x: (T1, T2, T3)) = new runtime.Tuple3Zipped.Ops(x)
+ implicit def seqToCharSequence(xs: scala.collection.IndexedSeq[Char]): CharSequence = new runtime.SeqCharSequence(xs)
+ implicit def arrayToCharSequence(xs: Array[Char]): CharSequence = new runtime.ArrayCharSequence(xs, 0, xs.length)
+
+ implicit def genericArrayOps[T](xs: Array[T]): ArrayOps[T] = (xs match {
+ case x: Array[AnyRef] => refArrayOps[AnyRef](x)
+ case x: Array[Boolean] => booleanArrayOps(x)
+ case x: Array[Byte] => byteArrayOps(x)
+ case x: Array[Char] => charArrayOps(x)
+ case x: Array[Double] => doubleArrayOps(x)
+ case x: Array[Float] => floatArrayOps(x)
+ case x: Array[Int] => intArrayOps(x)
+ case x: Array[Long] => longArrayOps(x)
+ case x: Array[Short] => shortArrayOps(x)
+ case x: Array[Unit] => unitArrayOps(x)
case null => null
- }
+ }).asInstanceOf[ArrayOps[T]]
- implicit def refArrayOps[T <: AnyRef](xs: Array[T]): ArrayOps[T] = new ArrayOps.ofRef[T](xs)
- implicit def intArrayOps(xs: Array[Int]): ArrayOps[Int] = new ArrayOps.ofInt(xs)
- implicit def doubleArrayOps(xs: Array[Double]): ArrayOps[Double] = new ArrayOps.ofDouble(xs)
- implicit def longArrayOps(xs: Array[Long]): ArrayOps[Long] = new ArrayOps.ofLong(xs)
- implicit def floatArrayOps(xs: Array[Float]): ArrayOps[Float] = new ArrayOps.ofFloat(xs)
- implicit def charArrayOps(xs: Array[Char]): ArrayOps[Char] = new ArrayOps.ofChar(xs)
- implicit def byteArrayOps(xs: Array[Byte]): ArrayOps[Byte] = new ArrayOps.ofByte(xs)
- implicit def shortArrayOps(xs: Array[Short]): ArrayOps[Short] = new ArrayOps.ofShort(xs)
implicit def booleanArrayOps(xs: Array[Boolean]): ArrayOps[Boolean] = new ArrayOps.ofBoolean(xs)
- implicit def unitArrayOps(xs: Array[Unit]): ArrayOps[Unit] = new ArrayOps.ofUnit(xs)
+ implicit def byteArrayOps(xs: Array[Byte]): ArrayOps[Byte] = new ArrayOps.ofByte(xs)
+ implicit def charArrayOps(xs: Array[Char]): ArrayOps[Char] = new ArrayOps.ofChar(xs)
+ implicit def doubleArrayOps(xs: Array[Double]): ArrayOps[Double] = new ArrayOps.ofDouble(xs)
+ implicit def floatArrayOps(xs: Array[Float]): ArrayOps[Float] = new ArrayOps.ofFloat(xs)
+ implicit def intArrayOps(xs: Array[Int]): ArrayOps[Int] = new ArrayOps.ofInt(xs)
+ implicit def longArrayOps(xs: Array[Long]): ArrayOps[Long] = new ArrayOps.ofLong(xs)
+ implicit def refArrayOps[T <: AnyRef](xs: Array[T]): ArrayOps[T] = new ArrayOps.ofRef[T](xs)
+ implicit def shortArrayOps(xs: Array[Short]): ArrayOps[Short] = new ArrayOps.ofShort(xs)
+ implicit def unitArrayOps(xs: Array[Unit]): ArrayOps[Unit] = new ArrayOps.ofUnit(xs)
// Primitive Widenings --------------------------------------------------------------
- implicit def byte2short(x: Byte): Short = x.toShort
- implicit def byte2int(x: Byte): Int = x.toInt
- implicit def byte2long(x: Byte): Long = x.toLong
- implicit def byte2float(x: Byte): Float = x.toFloat
- implicit def byte2double(x: Byte): Double = x.toDouble
+ @deprecated("Use `.toShort` for explicit conversion and `Byte.byte2short` for implicit conversion", "2.10.0") def byte2short(x: Byte): Short = x.toShort
+ @deprecated("Use `.toInt` for explicit conversion and `Byte.byte2int` for implicit conversion", "2.10.0") def byte2int(x: Byte): Int = x.toInt
+ @deprecated("Use `.toLong` for explicit conversion and `Byte.byte2long for implicit conversion", "2.10.0") def byte2long(x: Byte): Long = x.toLong
+ @deprecated("Use `.toFloat` for explicit conversion and `Byte.byte2float` for implicit conversion", "2.10.0") def byte2float(x: Byte): Float = x.toFloat
+ @deprecated("Use `.toDouble` for explicit conversion and `Byte.byte2double` for implicit conversion", "2.10.0") def byte2double(x: Byte): Double = x.toDouble
- implicit def short2int(x: Short): Int = x.toInt
- implicit def short2long(x: Short): Long = x.toLong
- implicit def short2float(x: Short): Float = x.toFloat
- implicit def short2double(x: Short): Double = x.toDouble
+ @deprecated("Use `.toInt` for explicit conversion and `Short.short2int` for implicit conversion", "2.10.0") def short2int(x: Short): Int = x.toInt
+ @deprecated("Use `.toLong` for explicit conversion and `Short.short2long` for implicit conversion", "2.10.0") def short2long(x: Short): Long = x.toLong
+ @deprecated("Use `.toFloat` for explicit conversion and `Short.short2float` for implicit conversion", "2.10.0") def short2float(x: Short): Float = x.toFloat
+ @deprecated("Use `.toDouble` for explicit conversion and `Short.short2double` for implicit conversion", "2.10.0") def short2double(x: Short): Double = x.toDouble
- implicit def char2int(x: Char): Int = x.toInt
- implicit def char2long(x: Char): Long = x.toLong
- implicit def char2float(x: Char): Float = x.toFloat
- implicit def char2double(x: Char): Double = x.toDouble
+ @deprecated("Use `.toInt` for explicit conversion and `Char.char2int` for implicit conversion", "2.10.0") def char2int(x: Char): Int = x.toInt
+ @deprecated("Use `.toLong` for explicit conversion and `Char.char2long` for implicit conversion", "2.10.0") def char2long(x: Char): Long = x.toLong
+ @deprecated("Use `.toFloat` for explicit conversion and `Char.char2float` for implicit conversion", "2.10.0") def char2float(x: Char): Float = x.toFloat
+ @deprecated("Use `.toDouble` for explicit conversion and `Char.char2double` for implicit conversion", "2.10.0") def char2double(x: Char): Double = x.toDouble
- implicit def int2long(x: Int): Long = x.toLong
- implicit def int2float(x: Int): Float = x.toFloat
- implicit def int2double(x: Int): Double = x.toDouble
+ @deprecated("Use `.toLong` for explicit conversion and `Int.int2long` for implicit conversion", "2.10.0") def int2long(x: Int): Long = x.toLong
+ @deprecated("Use `.toFloat` for explicit conversion and `Int.int2float` for implicit conversion", "2.10.0") def int2float(x: Int): Float = x.toFloat
+ @deprecated("Use `.toDouble` for explicit conversion and `Int.int2double` for implicit conversion", "2.10.0") def int2double(x: Int): Double = x.toDouble
- implicit def long2float(x: Long): Float = x.toFloat
- implicit def long2double(x: Long): Double = x.toDouble
+ @deprecated("Use `.toFloat` for explicit conversion and `Long.long2float` for implicit conversion", "2.10.0") def long2float(x: Long): Float = x.toFloat
+ @deprecated("Use `.toDouble` for explicit conversion and `Long.long2double` for implicit conversion", "2.10.0") def long2double(x: Long): Double = x.toDouble
- implicit def float2double(x: Float): Double = x.toDouble
+ @deprecated("Use `.toDouble` for explicit conversion and `Float.float2double` for implicit conversion", "2.10.0") def float2double(x: Float): Double = x.toDouble
// "Autoboxing" and "Autounboxing" ---------------------------------------------------
@@ -365,28 +397,17 @@ object Predef extends LowPriorityImplicits {
// Strings and CharSequences --------------------------------------------------------------
+ @inline implicit def any2stringfmt(x: Any) = new runtime.StringFormat(x)
+ @inline implicit def augmentString(x: String): StringOps = new StringOps(x)
implicit def any2stringadd(x: Any) = new runtime.StringAdd(x)
- implicit def augmentString(x: String): StringOps = new StringOps(x)
implicit def unaugmentString(x: StringOps): String = x.repr
- implicit def stringCanBuildFrom: CanBuildFrom[String, Char, String] =
- new CanBuildFrom[String, Char, String] {
- def apply(from: String) = apply()
- def apply() = mutable.StringBuilder.newBuilder
- }
-
- implicit def seqToCharSequence(xs: collection.IndexedSeq[Char]): CharSequence = new CharSequence {
- def length: Int = xs.length
- def charAt(index: Int): Char = xs(index)
- def subSequence(start: Int, end: Int): CharSequence = seqToCharSequence(xs.slice(start, end))
- override def toString: String = xs.mkString("")
- }
+ @deprecated("Use `StringCanBuildFrom`", "2.10.0")
+ def stringCanBuildFrom: CanBuildFrom[String, Char, String] = StringCanBuildFrom
- implicit def arrayToCharSequence(xs: Array[Char]): CharSequence = new CharSequence {
- def length: Int = xs.length
- def charAt(index: Int): Char = xs(index)
- def subSequence(start: Int, end: Int): CharSequence = arrayToCharSequence(xs.slice(start, end))
- override def toString: String = xs.mkString("")
+ implicit val StringCanBuildFrom: CanBuildFrom[String, Char, String] = new CanBuildFrom[String, Char, String] {
+ def apply(from: String) = apply()
+ def apply() = mutable.StringBuilder.newBuilder
}
// Type Constraints --------------------------------------------------------------
@@ -426,22 +447,15 @@ object Predef extends LowPriorityImplicits {
implicit def tpEquals[A]: A =:= A = singleton_=:=.asInstanceOf[A =:= A]
}
- // less useful due to #2781
- @deprecated("Use From => To instead", "2.9.0")
- sealed abstract class <%<[-From, +To] extends (From => To) with Serializable
- object <%< {
- implicit def conformsOrViewsAs[A <% B, B]: A <%< B = new (A <%< B) {def apply(x: A) = x}
- }
-
/** A type for which there is always an implicit value.
- * @see fallbackCanBuildFrom in Array.scala
+ * @see [[scala.Array$]], method `fallbackCanBuildFrom`
*/
class DummyImplicit
object DummyImplicit {
/** An implicit value yielding a `DummyImplicit`.
- * @see fallbackCanBuildFrom in Array.scala
+ * @see [[scala.Array$]], method `fallbackCanBuildFrom`
*/
implicit def dummyImplicit: DummyImplicit = new DummyImplicit
}
diff --git a/src/library/scala/Product.scala b/src/library/scala/Product.scala
index 1dbf46b..2c6838f 100644
--- a/src/library/scala/Product.scala
+++ b/src/library/scala/Product.scala
@@ -8,46 +8,43 @@
package scala
-/** Base trait for all products, which in the standard library include at least
- * [[scala.Product1]] through [[scala.Product22]] and therefore also their
- * subclasses [[scala.Tuple1]] through [[scala.Tuple22]]. In addition, all case
- * classes implement Product with synthetically generated methods.
+/** Base trait for all products, which in the standard library include at
+ * least [[scala.Product1]] through [[scala.Product22]] and therefore also
+ * their subclasses [[scala.Tuple1]] through [[scala.Tuple22]]. In addition,
+ * all case classes implement `Product` with synthetically generated methods.
*
* @author Burak Emir
* @version 1.0
* @since 2.3
*/
-trait Product extends Equals {
- /** The nth element of this product, 0-based. In other words, for a
- * product `A(x_1, ..., x_k)`, returns x_(n+1) where 0 < n < k.
+trait Product extends Any with Equals {
+ /** The n^th^ element of this product, 0-based. In other words, for a
+ * product `A(x,,1,,, ..., x,,k,,)`, returns `x,,(n+1),,` where `0 < n < k`.
*
* @param n the index of the element to return
- * @throws IndexOutOfBoundsException
+ * @throws `IndexOutOfBoundsException`
* @return the element `n` elements after the first element
*/
def productElement(n: Int): Any
/** The size of this product.
- * @return for a product `A(x_1, ..., x_k)`, returns `k`
+ * @return for a product `A(x,,1,,, ..., x,,k,,)`, returns `k`
*/
def productArity: Int
/** An iterator over all the elements of this product.
- * @return in the default implementation, an Iterator[Any]
+ * @return in the default implementation, an `Iterator[Any]`
*/
- def productIterator: Iterator[Any] = new Iterator[Any] {
+ def productIterator: Iterator[Any] = new scala.collection.AbstractIterator[Any] {
private var c: Int = 0
private val cmax = productArity
def hasNext = c < cmax
def next() = { val result = productElement(c); c += 1; result }
}
- @deprecated("use productIterator instead", "2.8.0")
- def productElements: Iterator[Any] = productIterator
-
/** A string used in the `toString` methods of derived classes.
* Implementations may override this method to prepend a string prefix
- * to the result of toString methods.
+ * to the result of `toString` methods.
*
* @return in the default implementation, the empty string
*/
diff --git a/src/library/scala/Product1.scala b/src/library/scala/Product1.scala
index ab8b0a4..edd095c 100644
--- a/src/library/scala/Product1.scala
+++ b/src/library/scala/Product1.scala
@@ -17,23 +17,23 @@ object Product1 {
/** Product1 is a cartesian product of 1 component.
* @since 2.3
*/
-trait Product1[@specialized(Int, Long, Double) +T1] extends Product {
+trait Product1[@specialized(Int, Long, Double) +T1] extends Any with Product {
/** The arity of this product.
* @return 1
*/
override def productArity = 1
-
+
/** Returns the n-th projection of this product if 0 < n <= productArity,
* otherwise throws an `IndexOutOfBoundsException`.
*
* @param n number of the projection to be returned
- * @return same as `._(n+1)`, for example `productElement(1)` is the same as `._1`.
+ * @return same as `._(n+1)`, for example `productElement(0)` is the same as `._1`.
* @throws IndexOutOfBoundsException
*/
@throws(classOf[IndexOutOfBoundsException])
- override def productElement(n: Int) = n match {
+ override def productElement(n: Int) = n match {
case 0 => _1
case _ => throw new IndexOutOfBoundsException(n.toString())
}
diff --git a/src/library/scala/Product10.scala b/src/library/scala/Product10.scala
index 536fb2f..8daefde 100644
--- a/src/library/scala/Product10.scala
+++ b/src/library/scala/Product10.scala
@@ -17,23 +17,23 @@ object Product10 {
/** Product10 is a cartesian product of 10 components.
* @since 2.3
*/
-trait Product10[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10] extends Product {
+trait Product10[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10] extends Any with Product {
/** The arity of this product.
* @return 10
*/
override def productArity = 10
-
+
/** Returns the n-th projection of this product if 0 < n <= productArity,
* otherwise throws an `IndexOutOfBoundsException`.
*
* @param n number of the projection to be returned
- * @return same as `._(n+1)`, for example `productElement(1)` is the same as `._1`.
+ * @return same as `._(n+1)`, for example `productElement(0)` is the same as `._1`.
* @throws IndexOutOfBoundsException
*/
@throws(classOf[IndexOutOfBoundsException])
- override def productElement(n: Int) = n match {
+ override def productElement(n: Int) = n match {
case 0 => _1
case 1 => _2
case 2 => _3
diff --git a/src/library/scala/Product11.scala b/src/library/scala/Product11.scala
index 7d49ecc..90b4e80 100644
--- a/src/library/scala/Product11.scala
+++ b/src/library/scala/Product11.scala
@@ -17,23 +17,23 @@ object Product11 {
/** Product11 is a cartesian product of 11 components.
* @since 2.3
*/
-trait Product11[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11] extends Product {
+trait Product11[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11] extends Any with Product {
/** The arity of this product.
* @return 11
*/
override def productArity = 11
-
+
/** Returns the n-th projection of this product if 0 < n <= productArity,
* otherwise throws an `IndexOutOfBoundsException`.
*
* @param n number of the projection to be returned
- * @return same as `._(n+1)`, for example `productElement(1)` is the same as `._1`.
+ * @return same as `._(n+1)`, for example `productElement(0)` is the same as `._1`.
* @throws IndexOutOfBoundsException
*/
@throws(classOf[IndexOutOfBoundsException])
- override def productElement(n: Int) = n match {
+ override def productElement(n: Int) = n match {
case 0 => _1
case 1 => _2
case 2 => _3
diff --git a/src/library/scala/Product12.scala b/src/library/scala/Product12.scala
index 0e9c4a0..d5997ea 100644
--- a/src/library/scala/Product12.scala
+++ b/src/library/scala/Product12.scala
@@ -17,23 +17,23 @@ object Product12 {
/** Product12 is a cartesian product of 12 components.
* @since 2.3
*/
-trait Product12[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12] extends Product {
+trait Product12[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12] extends Any with Product {
/** The arity of this product.
* @return 12
*/
override def productArity = 12
-
+
/** Returns the n-th projection of this product if 0 < n <= productArity,
* otherwise throws an `IndexOutOfBoundsException`.
*
* @param n number of the projection to be returned
- * @return same as `._(n+1)`, for example `productElement(1)` is the same as `._1`.
+ * @return same as `._(n+1)`, for example `productElement(0)` is the same as `._1`.
* @throws IndexOutOfBoundsException
*/
@throws(classOf[IndexOutOfBoundsException])
- override def productElement(n: Int) = n match {
+ override def productElement(n: Int) = n match {
case 0 => _1
case 1 => _2
case 2 => _3
diff --git a/src/library/scala/Product13.scala b/src/library/scala/Product13.scala
index a062920..db8e0f3 100644
--- a/src/library/scala/Product13.scala
+++ b/src/library/scala/Product13.scala
@@ -17,23 +17,23 @@ object Product13 {
/** Product13 is a cartesian product of 13 components.
* @since 2.3
*/
-trait Product13[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12, +T13] extends Product {
+trait Product13[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12, +T13] extends Any with Product {
/** The arity of this product.
* @return 13
*/
override def productArity = 13
-
+
/** Returns the n-th projection of this product if 0 < n <= productArity,
* otherwise throws an `IndexOutOfBoundsException`.
*
* @param n number of the projection to be returned
- * @return same as `._(n+1)`, for example `productElement(1)` is the same as `._1`.
+ * @return same as `._(n+1)`, for example `productElement(0)` is the same as `._1`.
* @throws IndexOutOfBoundsException
*/
@throws(classOf[IndexOutOfBoundsException])
- override def productElement(n: Int) = n match {
+ override def productElement(n: Int) = n match {
case 0 => _1
case 1 => _2
case 2 => _3
diff --git a/src/library/scala/Product14.scala b/src/library/scala/Product14.scala
index 32dda81..113c07e 100644
--- a/src/library/scala/Product14.scala
+++ b/src/library/scala/Product14.scala
@@ -17,23 +17,23 @@ object Product14 {
/** Product14 is a cartesian product of 14 components.
* @since 2.3
*/
-trait Product14[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12, +T13, +T14] extends Product {
+trait Product14[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12, +T13, +T14] extends Any with Product {
/** The arity of this product.
* @return 14
*/
override def productArity = 14
-
+
/** Returns the n-th projection of this product if 0 < n <= productArity,
* otherwise throws an `IndexOutOfBoundsException`.
*
* @param n number of the projection to be returned
- * @return same as `._(n+1)`, for example `productElement(1)` is the same as `._1`.
+ * @return same as `._(n+1)`, for example `productElement(0)` is the same as `._1`.
* @throws IndexOutOfBoundsException
*/
@throws(classOf[IndexOutOfBoundsException])
- override def productElement(n: Int) = n match {
+ override def productElement(n: Int) = n match {
case 0 => _1
case 1 => _2
case 2 => _3
diff --git a/src/library/scala/Product15.scala b/src/library/scala/Product15.scala
index 57851f9..a6ad9c7 100644
--- a/src/library/scala/Product15.scala
+++ b/src/library/scala/Product15.scala
@@ -17,23 +17,23 @@ object Product15 {
/** Product15 is a cartesian product of 15 components.
* @since 2.3
*/
-trait Product15[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12, +T13, +T14, +T15] extends Product {
+trait Product15[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12, +T13, +T14, +T15] extends Any with Product {
/** The arity of this product.
* @return 15
*/
override def productArity = 15
-
+
/** Returns the n-th projection of this product if 0 < n <= productArity,
* otherwise throws an `IndexOutOfBoundsException`.
*
* @param n number of the projection to be returned
- * @return same as `._(n+1)`, for example `productElement(1)` is the same as `._1`.
+ * @return same as `._(n+1)`, for example `productElement(0)` is the same as `._1`.
* @throws IndexOutOfBoundsException
*/
@throws(classOf[IndexOutOfBoundsException])
- override def productElement(n: Int) = n match {
+ override def productElement(n: Int) = n match {
case 0 => _1
case 1 => _2
case 2 => _3
diff --git a/src/library/scala/Product16.scala b/src/library/scala/Product16.scala
index 75076f3..cbf47ec 100644
--- a/src/library/scala/Product16.scala
+++ b/src/library/scala/Product16.scala
@@ -17,23 +17,23 @@ object Product16 {
/** Product16 is a cartesian product of 16 components.
* @since 2.3
*/
-trait Product16[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12, +T13, +T14, +T15, +T16] extends Product {
+trait Product16[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12, +T13, +T14, +T15, +T16] extends Any with Product {
/** The arity of this product.
* @return 16
*/
override def productArity = 16
-
+
/** Returns the n-th projection of this product if 0 < n <= productArity,
* otherwise throws an `IndexOutOfBoundsException`.
*
* @param n number of the projection to be returned
- * @return same as `._(n+1)`, for example `productElement(1)` is the same as `._1`.
+ * @return same as `._(n+1)`, for example `productElement(0)` is the same as `._1`.
* @throws IndexOutOfBoundsException
*/
@throws(classOf[IndexOutOfBoundsException])
- override def productElement(n: Int) = n match {
+ override def productElement(n: Int) = n match {
case 0 => _1
case 1 => _2
case 2 => _3
diff --git a/src/library/scala/Product17.scala b/src/library/scala/Product17.scala
index 9ee6072..f56836b 100644
--- a/src/library/scala/Product17.scala
+++ b/src/library/scala/Product17.scala
@@ -17,23 +17,23 @@ object Product17 {
/** Product17 is a cartesian product of 17 components.
* @since 2.3
*/
-trait Product17[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12, +T13, +T14, +T15, +T16, +T17] extends Product {
+trait Product17[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12, +T13, +T14, +T15, +T16, +T17] extends Any with Product {
/** The arity of this product.
* @return 17
*/
override def productArity = 17
-
+
/** Returns the n-th projection of this product if 0 < n <= productArity,
* otherwise throws an `IndexOutOfBoundsException`.
*
* @param n number of the projection to be returned
- * @return same as `._(n+1)`, for example `productElement(1)` is the same as `._1`.
+ * @return same as `._(n+1)`, for example `productElement(0)` is the same as `._1`.
* @throws IndexOutOfBoundsException
*/
@throws(classOf[IndexOutOfBoundsException])
- override def productElement(n: Int) = n match {
+ override def productElement(n: Int) = n match {
case 0 => _1
case 1 => _2
case 2 => _3
diff --git a/src/library/scala/Product18.scala b/src/library/scala/Product18.scala
index 25d0839..5b86bcf 100644
--- a/src/library/scala/Product18.scala
+++ b/src/library/scala/Product18.scala
@@ -17,23 +17,23 @@ object Product18 {
/** Product18 is a cartesian product of 18 components.
* @since 2.3
*/
-trait Product18[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12, +T13, +T14, +T15, +T16, +T17, +T18] extends Product {
+trait Product18[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12, +T13, +T14, +T15, +T16, +T17, +T18] extends Any with Product {
/** The arity of this product.
* @return 18
*/
override def productArity = 18
-
+
/** Returns the n-th projection of this product if 0 < n <= productArity,
* otherwise throws an `IndexOutOfBoundsException`.
*
* @param n number of the projection to be returned
- * @return same as `._(n+1)`, for example `productElement(1)` is the same as `._1`.
+ * @return same as `._(n+1)`, for example `productElement(0)` is the same as `._1`.
* @throws IndexOutOfBoundsException
*/
@throws(classOf[IndexOutOfBoundsException])
- override def productElement(n: Int) = n match {
+ override def productElement(n: Int) = n match {
case 0 => _1
case 1 => _2
case 2 => _3
diff --git a/src/library/scala/Product19.scala b/src/library/scala/Product19.scala
index 5464de7..ed4bf36 100644
--- a/src/library/scala/Product19.scala
+++ b/src/library/scala/Product19.scala
@@ -17,23 +17,23 @@ object Product19 {
/** Product19 is a cartesian product of 19 components.
* @since 2.3
*/
-trait Product19[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12, +T13, +T14, +T15, +T16, +T17, +T18, +T19] extends Product {
+trait Product19[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12, +T13, +T14, +T15, +T16, +T17, +T18, +T19] extends Any with Product {
/** The arity of this product.
* @return 19
*/
override def productArity = 19
-
+
/** Returns the n-th projection of this product if 0 < n <= productArity,
* otherwise throws an `IndexOutOfBoundsException`.
*
* @param n number of the projection to be returned
- * @return same as `._(n+1)`, for example `productElement(1)` is the same as `._1`.
+ * @return same as `._(n+1)`, for example `productElement(0)` is the same as `._1`.
* @throws IndexOutOfBoundsException
*/
@throws(classOf[IndexOutOfBoundsException])
- override def productElement(n: Int) = n match {
+ override def productElement(n: Int) = n match {
case 0 => _1
case 1 => _2
case 2 => _3
diff --git a/src/library/scala/Product2.scala b/src/library/scala/Product2.scala
index 8097245..e27e54e 100644
--- a/src/library/scala/Product2.scala
+++ b/src/library/scala/Product2.scala
@@ -17,23 +17,23 @@ object Product2 {
/** Product2 is a cartesian product of 2 components.
* @since 2.3
*/
-trait Product2[@specialized(Int, Long, Double) +T1, @specialized(Int, Long, Double) +T2] extends Product {
+trait Product2[@specialized(Int, Long, Double) +T1, @specialized(Int, Long, Double) +T2] extends Any with Product {
/** The arity of this product.
* @return 2
*/
override def productArity = 2
-
+
/** Returns the n-th projection of this product if 0 < n <= productArity,
* otherwise throws an `IndexOutOfBoundsException`.
*
* @param n number of the projection to be returned
- * @return same as `._(n+1)`, for example `productElement(1)` is the same as `._1`.
+ * @return same as `._(n+1)`, for example `productElement(0)` is the same as `._1`.
* @throws IndexOutOfBoundsException
*/
@throws(classOf[IndexOutOfBoundsException])
- override def productElement(n: Int) = n match {
+ override def productElement(n: Int) = n match {
case 0 => _1
case 1 => _2
case _ => throw new IndexOutOfBoundsException(n.toString())
diff --git a/src/library/scala/Product20.scala b/src/library/scala/Product20.scala
index b094e09..47437a2 100644
--- a/src/library/scala/Product20.scala
+++ b/src/library/scala/Product20.scala
@@ -17,23 +17,23 @@ object Product20 {
/** Product20 is a cartesian product of 20 components.
* @since 2.3
*/
-trait Product20[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12, +T13, +T14, +T15, +T16, +T17, +T18, +T19, +T20] extends Product {
+trait Product20[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12, +T13, +T14, +T15, +T16, +T17, +T18, +T19, +T20] extends Any with Product {
/** The arity of this product.
* @return 20
*/
override def productArity = 20
-
+
/** Returns the n-th projection of this product if 0 < n <= productArity,
* otherwise throws an `IndexOutOfBoundsException`.
*
* @param n number of the projection to be returned
- * @return same as `._(n+1)`, for example `productElement(1)` is the same as `._1`.
+ * @return same as `._(n+1)`, for example `productElement(0)` is the same as `._1`.
* @throws IndexOutOfBoundsException
*/
@throws(classOf[IndexOutOfBoundsException])
- override def productElement(n: Int) = n match {
+ override def productElement(n: Int) = n match {
case 0 => _1
case 1 => _2
case 2 => _3
diff --git a/src/library/scala/Product21.scala b/src/library/scala/Product21.scala
index fa06cfb..319d272 100644
--- a/src/library/scala/Product21.scala
+++ b/src/library/scala/Product21.scala
@@ -17,23 +17,23 @@ object Product21 {
/** Product21 is a cartesian product of 21 components.
* @since 2.3
*/
-trait Product21[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12, +T13, +T14, +T15, +T16, +T17, +T18, +T19, +T20, +T21] extends Product {
+trait Product21[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12, +T13, +T14, +T15, +T16, +T17, +T18, +T19, +T20, +T21] extends Any with Product {
/** The arity of this product.
* @return 21
*/
override def productArity = 21
-
+
/** Returns the n-th projection of this product if 0 < n <= productArity,
* otherwise throws an `IndexOutOfBoundsException`.
*
* @param n number of the projection to be returned
- * @return same as `._(n+1)`, for example `productElement(1)` is the same as `._1`.
+ * @return same as `._(n+1)`, for example `productElement(0)` is the same as `._1`.
* @throws IndexOutOfBoundsException
*/
@throws(classOf[IndexOutOfBoundsException])
- override def productElement(n: Int) = n match {
+ override def productElement(n: Int) = n match {
case 0 => _1
case 1 => _2
case 2 => _3
diff --git a/src/library/scala/Product22.scala b/src/library/scala/Product22.scala
index 46038bf..6ab3737 100644
--- a/src/library/scala/Product22.scala
+++ b/src/library/scala/Product22.scala
@@ -17,23 +17,23 @@ object Product22 {
/** Product22 is a cartesian product of 22 components.
* @since 2.3
*/
-trait Product22[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12, +T13, +T14, +T15, +T16, +T17, +T18, +T19, +T20, +T21, +T22] extends Product {
+trait Product22[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12, +T13, +T14, +T15, +T16, +T17, +T18, +T19, +T20, +T21, +T22] extends Any with Product {
/** The arity of this product.
* @return 22
*/
override def productArity = 22
-
+
/** Returns the n-th projection of this product if 0 < n <= productArity,
* otherwise throws an `IndexOutOfBoundsException`.
*
* @param n number of the projection to be returned
- * @return same as `._(n+1)`, for example `productElement(1)` is the same as `._1`.
+ * @return same as `._(n+1)`, for example `productElement(0)` is the same as `._1`.
* @throws IndexOutOfBoundsException
*/
@throws(classOf[IndexOutOfBoundsException])
- override def productElement(n: Int) = n match {
+ override def productElement(n: Int) = n match {
case 0 => _1
case 1 => _2
case 2 => _3
diff --git a/src/library/scala/Product3.scala b/src/library/scala/Product3.scala
index 3a4cd8f..1cfbd79 100644
--- a/src/library/scala/Product3.scala
+++ b/src/library/scala/Product3.scala
@@ -17,23 +17,23 @@ object Product3 {
/** Product3 is a cartesian product of 3 components.
* @since 2.3
*/
-trait Product3[+T1, +T2, +T3] extends Product {
+trait Product3[+T1, +T2, +T3] extends Any with Product {
/** The arity of this product.
* @return 3
*/
override def productArity = 3
-
+
/** Returns the n-th projection of this product if 0 < n <= productArity,
* otherwise throws an `IndexOutOfBoundsException`.
*
* @param n number of the projection to be returned
- * @return same as `._(n+1)`, for example `productElement(1)` is the same as `._1`.
+ * @return same as `._(n+1)`, for example `productElement(0)` is the same as `._1`.
* @throws IndexOutOfBoundsException
*/
@throws(classOf[IndexOutOfBoundsException])
- override def productElement(n: Int) = n match {
+ override def productElement(n: Int) = n match {
case 0 => _1
case 1 => _2
case 2 => _3
diff --git a/src/library/scala/Product4.scala b/src/library/scala/Product4.scala
index a4d4745..843571f 100644
--- a/src/library/scala/Product4.scala
+++ b/src/library/scala/Product4.scala
@@ -17,23 +17,23 @@ object Product4 {
/** Product4 is a cartesian product of 4 components.
* @since 2.3
*/
-trait Product4[+T1, +T2, +T3, +T4] extends Product {
+trait Product4[+T1, +T2, +T3, +T4] extends Any with Product {
/** The arity of this product.
* @return 4
*/
override def productArity = 4
-
+
/** Returns the n-th projection of this product if 0 < n <= productArity,
* otherwise throws an `IndexOutOfBoundsException`.
*
* @param n number of the projection to be returned
- * @return same as `._(n+1)`, for example `productElement(1)` is the same as `._1`.
+ * @return same as `._(n+1)`, for example `productElement(0)` is the same as `._1`.
* @throws IndexOutOfBoundsException
*/
@throws(classOf[IndexOutOfBoundsException])
- override def productElement(n: Int) = n match {
+ override def productElement(n: Int) = n match {
case 0 => _1
case 1 => _2
case 2 => _3
diff --git a/src/library/scala/Product5.scala b/src/library/scala/Product5.scala
index 9f25e70..df73bba 100644
--- a/src/library/scala/Product5.scala
+++ b/src/library/scala/Product5.scala
@@ -17,23 +17,23 @@ object Product5 {
/** Product5 is a cartesian product of 5 components.
* @since 2.3
*/
-trait Product5[+T1, +T2, +T3, +T4, +T5] extends Product {
+trait Product5[+T1, +T2, +T3, +T4, +T5] extends Any with Product {
/** The arity of this product.
* @return 5
*/
override def productArity = 5
-
+
/** Returns the n-th projection of this product if 0 < n <= productArity,
* otherwise throws an `IndexOutOfBoundsException`.
*
* @param n number of the projection to be returned
- * @return same as `._(n+1)`, for example `productElement(1)` is the same as `._1`.
+ * @return same as `._(n+1)`, for example `productElement(0)` is the same as `._1`.
* @throws IndexOutOfBoundsException
*/
@throws(classOf[IndexOutOfBoundsException])
- override def productElement(n: Int) = n match {
+ override def productElement(n: Int) = n match {
case 0 => _1
case 1 => _2
case 2 => _3
diff --git a/src/library/scala/Product6.scala b/src/library/scala/Product6.scala
index 87fd318..36906ca 100644
--- a/src/library/scala/Product6.scala
+++ b/src/library/scala/Product6.scala
@@ -17,23 +17,23 @@ object Product6 {
/** Product6 is a cartesian product of 6 components.
* @since 2.3
*/
-trait Product6[+T1, +T2, +T3, +T4, +T5, +T6] extends Product {
+trait Product6[+T1, +T2, +T3, +T4, +T5, +T6] extends Any with Product {
/** The arity of this product.
* @return 6
*/
override def productArity = 6
-
+
/** Returns the n-th projection of this product if 0 < n <= productArity,
* otherwise throws an `IndexOutOfBoundsException`.
*
* @param n number of the projection to be returned
- * @return same as `._(n+1)`, for example `productElement(1)` is the same as `._1`.
+ * @return same as `._(n+1)`, for example `productElement(0)` is the same as `._1`.
* @throws IndexOutOfBoundsException
*/
@throws(classOf[IndexOutOfBoundsException])
- override def productElement(n: Int) = n match {
+ override def productElement(n: Int) = n match {
case 0 => _1
case 1 => _2
case 2 => _3
diff --git a/src/library/scala/Product7.scala b/src/library/scala/Product7.scala
index d074503..e7b2c13 100644
--- a/src/library/scala/Product7.scala
+++ b/src/library/scala/Product7.scala
@@ -17,23 +17,23 @@ object Product7 {
/** Product7 is a cartesian product of 7 components.
* @since 2.3
*/
-trait Product7[+T1, +T2, +T3, +T4, +T5, +T6, +T7] extends Product {
+trait Product7[+T1, +T2, +T3, +T4, +T5, +T6, +T7] extends Any with Product {
/** The arity of this product.
* @return 7
*/
override def productArity = 7
-
+
/** Returns the n-th projection of this product if 0 < n <= productArity,
* otherwise throws an `IndexOutOfBoundsException`.
*
* @param n number of the projection to be returned
- * @return same as `._(n+1)`, for example `productElement(1)` is the same as `._1`.
+ * @return same as `._(n+1)`, for example `productElement(0)` is the same as `._1`.
* @throws IndexOutOfBoundsException
*/
@throws(classOf[IndexOutOfBoundsException])
- override def productElement(n: Int) = n match {
+ override def productElement(n: Int) = n match {
case 0 => _1
case 1 => _2
case 2 => _3
diff --git a/src/library/scala/Product8.scala b/src/library/scala/Product8.scala
index bd6150c..916e57e 100644
--- a/src/library/scala/Product8.scala
+++ b/src/library/scala/Product8.scala
@@ -17,23 +17,23 @@ object Product8 {
/** Product8 is a cartesian product of 8 components.
* @since 2.3
*/
-trait Product8[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8] extends Product {
+trait Product8[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8] extends Any with Product {
/** The arity of this product.
* @return 8
*/
override def productArity = 8
-
+
/** Returns the n-th projection of this product if 0 < n <= productArity,
* otherwise throws an `IndexOutOfBoundsException`.
*
* @param n number of the projection to be returned
- * @return same as `._(n+1)`, for example `productElement(1)` is the same as `._1`.
+ * @return same as `._(n+1)`, for example `productElement(0)` is the same as `._1`.
* @throws IndexOutOfBoundsException
*/
@throws(classOf[IndexOutOfBoundsException])
- override def productElement(n: Int) = n match {
+ override def productElement(n: Int) = n match {
case 0 => _1
case 1 => _2
case 2 => _3
diff --git a/src/library/scala/Product9.scala b/src/library/scala/Product9.scala
index 1f04294..d5e72ed 100644
--- a/src/library/scala/Product9.scala
+++ b/src/library/scala/Product9.scala
@@ -17,23 +17,23 @@ object Product9 {
/** Product9 is a cartesian product of 9 components.
* @since 2.3
*/
-trait Product9[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9] extends Product {
+trait Product9[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9] extends Any with Product {
/** The arity of this product.
* @return 9
*/
override def productArity = 9
-
+
/** Returns the n-th projection of this product if 0 < n <= productArity,
* otherwise throws an `IndexOutOfBoundsException`.
*
* @param n number of the projection to be returned
- * @return same as `._(n+1)`, for example `productElement(1)` is the same as `._1`.
+ * @return same as `._(n+1)`, for example `productElement(0)` is the same as `._1`.
* @throws IndexOutOfBoundsException
*/
@throws(classOf[IndexOutOfBoundsException])
- override def productElement(n: Int) = n match {
+ override def productElement(n: Int) = n match {
case 0 => _1
case 1 => _2
case 2 => _3
diff --git a/src/library/scala/Proxy.scala b/src/library/scala/Proxy.scala
index c9573fb..07fa6e2 100644
--- a/src/library/scala/Proxy.scala
+++ b/src/library/scala/Proxy.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://www.scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -9,35 +9,36 @@
package scala
/** This class implements a simple proxy that forwards all calls to
- * the public, non-final methods defined in class "Any" to another
+ * the public, non-final methods defined in class `Any` to another
* object self. Those methods are:
- *
+ * {{{
* def hashCode(): Int
* def equals(other: Any): Boolean
* def toString(): String
- *
- * Note: forwarding methods in this way will most likely create
+ * }}}
+ * '''Note:''' forwarding methods in this way will most likely create
* an asymmetric equals method, which is not generally recommended.
*
* @author Matthias Zenger
* @version 1.0, 26/04/2004
*/
-trait Proxy {
+trait Proxy extends Any {
def self: Any
override def hashCode: Int = self.hashCode
override def equals(that: Any): Boolean = that match {
- case null => false
- case x: Equals => (x canEqual self) && (x equals self)
- case x => (x equals self)
+ case null => false
+ case _ =>
+ val x = that.asInstanceOf[AnyRef]
+ (x eq this.asInstanceOf[AnyRef]) || (x eq self.asInstanceOf[AnyRef]) || (x equals self)
}
- override def toString: String = self.toString
+ override def toString = "" + self
}
object Proxy {
/** A proxy which exposes the type it is proxying for via a type parameter.
*/
- trait Typed[T] extends Proxy {
+ trait Typed[T] extends Any with Proxy {
def self: T
}
}
diff --git a/src/library/scala/Responder.scala b/src/library/scala/Responder.scala
index 2c6556d..0a42ddb 100644
--- a/src/library/scala/Responder.scala
+++ b/src/library/scala/Responder.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2005-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2005-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -20,25 +20,18 @@ package scala
*/
object Responder {
- /** Creates a responder that answer continuations with the constant
- * <code>a</code>.
- *
- * @param x ...
- * @return ...
+ /** Creates a responder that answer continuations with the constant `a`.
*/
def constant[A](x: A) = new Responder[A] {
def respond(k: A => Unit) = k(x)
}
- /** Executes <code>x</code> and returns <code>true</code>, useful
- * as syntactic convenience in for comprehensions.
- *
- * @param x ...
- * @return ...
+ /** Executes `x` and returns `'''true'''`, useful as syntactic
+ * convenience in for comprehensions.
*/
def exec[A](x: => Unit): Boolean = { x; true }
- /** runs a responder, returning an optional result
+ /** Runs a responder, returning an optional result.
*/
def run[A](r: Responder[A]): Option[A] = {
var result: Option[A] = None
@@ -47,12 +40,11 @@ object Responder {
}
def loop[A](r: Responder[Unit]): Responder[Nothing] =
- for (_ <- r; val y <- loop(r)) yield y
+ for (_ <- r; y <- loop(r)) yield y
def loopWhile[A](cond: => Boolean)(r: Responder[Unit]): Responder[Unit] =
- if (cond) for (_ <- r; val y <- loopWhile(cond)(r)) yield y
+ if (cond) for (_ <- r; y <- loopWhile(cond)(r)) yield y
else constant(())
-
}
/** Instances of responder are the building blocks of small programs
@@ -92,4 +84,3 @@ abstract class Responder[+A] extends Serializable {
override def toString = "Responder"
}
-
diff --git a/src/library/scala/ScalaObject.scala b/src/library/scala/ScalaObject.scala
index 8da0ab2..f67dc3a 100644
--- a/src/library/scala/ScalaObject.scala
+++ b/src/library/scala/ScalaObject.scala
@@ -1,13 +1,16 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-
-
package scala
-trait ScalaObject extends java.lang.Object
+/** Until scala 2.10.0 this marker trait was added to
+ * scala-compiled classes. Now it only exists for backward
+ * compatibility.
+ */
+ at deprecated("ScalaObject will be removed", "2.10.0")
+trait ScalaObject
diff --git a/src/library/scala/SerialVersionUID.scala b/src/library/scala/SerialVersionUID.scala
index 5e1b07b..1f7d047 100644
--- a/src/library/scala/SerialVersionUID.scala
+++ b/src/library/scala/SerialVersionUID.scala
@@ -1,18 +1,15 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
*/
-
-
package scala
-
/**
- * Annotation for specifying the <code>static SerialVersionUID</code> field
+ * Annotation for specifying the `static SerialVersionUID` field
* of a serializable class.
*/
-class SerialVersionUID(uid: Long) extends annotation.StaticAnnotation
+class SerialVersionUID(uid: Long) extends scala.annotation.StaticAnnotation
diff --git a/src/library/scala/Serializable.scala b/src/library/scala/Serializable.scala
index 9be258b..596ee98 100644
--- a/src/library/scala/Serializable.scala
+++ b/src/library/scala/Serializable.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -11,4 +11,4 @@ package scala
/**
* Classes extending this trait are serializable across platforms (Java, .NET).
*/
-trait Serializable extends java.io.Serializable
+trait Serializable extends Any with java.io.Serializable
diff --git a/src/library/scala/Short.scala b/src/library/scala/Short.scala
index 8195350..35c5fe3 100644
--- a/src/library/scala/Short.scala
+++ b/src/library/scala/Short.scala
@@ -10,141 +10,589 @@
package scala
-/** `Short` is a member of the value classes, those whose instances are
- * not represented as objects by the underlying host system.
+import scala.language.implicitConversions
+
+/** `Short`, a 16-bit signed integer (equivalent to Java's `short` primitive type) is a
+ * subtype of [[scala.AnyVal]]. Instances of `Short` are not
+ * represented by an object in the underlying runtime system.
*
* There is an implicit conversion from [[scala.Short]] => [[scala.runtime.RichShort]]
* which provides useful non-primitive operations.
*/
-final class Short extends AnyVal {
- def toByte: Byte = sys.error("stub")
- def toShort: Short = sys.error("stub")
- def toChar: Char = sys.error("stub")
- def toInt: Int = sys.error("stub")
- def toLong: Long = sys.error("stub")
- def toFloat: Float = sys.error("stub")
- def toDouble: Double = sys.error("stub")
-
- def unary_+ : Int = sys.error("stub")
- def unary_- : Int = sys.error("stub")
- def unary_~ : Int = sys.error("stub")
-
- def +(x: String): String = sys.error("stub")
-
- def <<(x: Int): Int = sys.error("stub")
- def <<(x: Long): Int = sys.error("stub")
- def >>>(x: Int): Int = sys.error("stub")
- def >>>(x: Long): Int = sys.error("stub")
- def >>(x: Int): Int = sys.error("stub")
- def >>(x: Long): Int = sys.error("stub")
-
- def ==(x: Byte): Boolean = sys.error("stub")
- def ==(x: Short): Boolean = sys.error("stub")
- def ==(x: Char): Boolean = sys.error("stub")
- def ==(x: Int): Boolean = sys.error("stub")
- def ==(x: Long): Boolean = sys.error("stub")
- def ==(x: Float): Boolean = sys.error("stub")
- def ==(x: Double): Boolean = sys.error("stub")
-
- def !=(x: Byte): Boolean = sys.error("stub")
- def !=(x: Short): Boolean = sys.error("stub")
- def !=(x: Char): Boolean = sys.error("stub")
- def !=(x: Int): Boolean = sys.error("stub")
- def !=(x: Long): Boolean = sys.error("stub")
- def !=(x: Float): Boolean = sys.error("stub")
- def !=(x: Double): Boolean = sys.error("stub")
-
- def <(x: Byte): Boolean = sys.error("stub")
- def <(x: Short): Boolean = sys.error("stub")
- def <(x: Char): Boolean = sys.error("stub")
- def <(x: Int): Boolean = sys.error("stub")
- def <(x: Long): Boolean = sys.error("stub")
- def <(x: Float): Boolean = sys.error("stub")
- def <(x: Double): Boolean = sys.error("stub")
-
- def <=(x: Byte): Boolean = sys.error("stub")
- def <=(x: Short): Boolean = sys.error("stub")
- def <=(x: Char): Boolean = sys.error("stub")
- def <=(x: Int): Boolean = sys.error("stub")
- def <=(x: Long): Boolean = sys.error("stub")
- def <=(x: Float): Boolean = sys.error("stub")
- def <=(x: Double): Boolean = sys.error("stub")
-
- def >(x: Byte): Boolean = sys.error("stub")
- def >(x: Short): Boolean = sys.error("stub")
- def >(x: Char): Boolean = sys.error("stub")
- def >(x: Int): Boolean = sys.error("stub")
- def >(x: Long): Boolean = sys.error("stub")
- def >(x: Float): Boolean = sys.error("stub")
- def >(x: Double): Boolean = sys.error("stub")
-
- def >=(x: Byte): Boolean = sys.error("stub")
- def >=(x: Short): Boolean = sys.error("stub")
- def >=(x: Char): Boolean = sys.error("stub")
- def >=(x: Int): Boolean = sys.error("stub")
- def >=(x: Long): Boolean = sys.error("stub")
- def >=(x: Float): Boolean = sys.error("stub")
- def >=(x: Double): Boolean = sys.error("stub")
-
- def |(x: Byte): Int = sys.error("stub")
- def |(x: Short): Int = sys.error("stub")
- def |(x: Char): Int = sys.error("stub")
- def |(x: Int): Int = sys.error("stub")
- def |(x: Long): Long = sys.error("stub")
-
- def &(x: Byte): Int = sys.error("stub")
- def &(x: Short): Int = sys.error("stub")
- def &(x: Char): Int = sys.error("stub")
- def &(x: Int): Int = sys.error("stub")
- def &(x: Long): Long = sys.error("stub")
-
- def ^(x: Byte): Int = sys.error("stub")
- def ^(x: Short): Int = sys.error("stub")
- def ^(x: Char): Int = sys.error("stub")
- def ^(x: Int): Int = sys.error("stub")
- def ^(x: Long): Long = sys.error("stub")
-
- def +(x: Byte): Int = sys.error("stub")
- def +(x: Short): Int = sys.error("stub")
- def +(x: Char): Int = sys.error("stub")
- def +(x: Int): Int = sys.error("stub")
- def +(x: Long): Long = sys.error("stub")
- def +(x: Float): Float = sys.error("stub")
- def +(x: Double): Double = sys.error("stub")
-
- def -(x: Byte): Int = sys.error("stub")
- def -(x: Short): Int = sys.error("stub")
- def -(x: Char): Int = sys.error("stub")
- def -(x: Int): Int = sys.error("stub")
- def -(x: Long): Long = sys.error("stub")
- def -(x: Float): Float = sys.error("stub")
- def -(x: Double): Double = sys.error("stub")
-
- def *(x: Byte): Int = sys.error("stub")
- def *(x: Short): Int = sys.error("stub")
- def *(x: Char): Int = sys.error("stub")
- def *(x: Int): Int = sys.error("stub")
- def *(x: Long): Long = sys.error("stub")
- def *(x: Float): Float = sys.error("stub")
- def *(x: Double): Double = sys.error("stub")
-
- def /(x: Byte): Int = sys.error("stub")
- def /(x: Short): Int = sys.error("stub")
- def /(x: Char): Int = sys.error("stub")
- def /(x: Int): Int = sys.error("stub")
- def /(x: Long): Long = sys.error("stub")
- def /(x: Float): Float = sys.error("stub")
- def /(x: Double): Double = sys.error("stub")
-
- def %(x: Byte): Int = sys.error("stub")
- def %(x: Short): Int = sys.error("stub")
- def %(x: Char): Int = sys.error("stub")
- def %(x: Int): Int = sys.error("stub")
- def %(x: Long): Long = sys.error("stub")
- def %(x: Float): Float = sys.error("stub")
- def %(x: Double): Double = sys.error("stub")
-
- def getClass(): Class[Short] = sys.error("stub")
+final abstract class Short private extends AnyVal {
+ def toByte: Byte
+ def toShort: Short
+ def toChar: Char
+ def toInt: Int
+ def toLong: Long
+ def toFloat: Float
+ def toDouble: Double
+
+ /**
+ * Returns the bitwise negation of this value.
+ * @example {{{
+ * ~5 == -6
+ * // in binary: ~00000101 ==
+ * // 11111010
+ * }}}
+ */
+ def unary_~ : Int
+ /**
+ * Returns this value, unmodified.
+ */
+ def unary_+ : Int
+ /**
+ * Returns the negation of this value.
+ */
+ def unary_- : Int
+
+ def +(x: String): String
+
+ /**
+ * Returns this value bit-shifted left by the specified number of bits,
+ * filling in the new right bits with zeroes.
+ * @example {{{ 6 << 3 == 48 // in binary: 0110 << 3 == 0110000 }}}
+ */
+ def <<(x: Int): Int
+ /**
+ * Returns this value bit-shifted left by the specified number of bits,
+ * filling in the new right bits with zeroes.
+ * @example {{{ 6 << 3 == 48 // in binary: 0110 << 3 == 0110000 }}}
+ */
+ def <<(x: Long): Int
+ /**
+ * Returns this value bit-shifted right by the specified number of bits,
+ * filling the new left bits with zeroes.
+ * @example {{{ 21 >>> 3 == 2 // in binary: 010101 >>> 3 == 010 }}}
+ * @example {{{
+ * -21 >>> 3 == 536870909
+ * // in binary: 11111111 11111111 11111111 11101011 >>> 3 ==
+ * // 00011111 11111111 11111111 11111101
+ * }}}
+ */
+ def >>>(x: Int): Int
+ /**
+ * Returns this value bit-shifted right by the specified number of bits,
+ * filling the new left bits with zeroes.
+ * @example {{{ 21 >>> 3 == 2 // in binary: 010101 >>> 3 == 010 }}}
+ * @example {{{
+ * -21 >>> 3 == 536870909
+ * // in binary: 11111111 11111111 11111111 11101011 >>> 3 ==
+ * // 00011111 11111111 11111111 11111101
+ * }}}
+ */
+ def >>>(x: Long): Int
+ /**
+ * Returns this value bit-shifted left by the specified number of bits,
+ * filling in the right bits with the same value as the left-most bit of this.
+ * The effect of this is to retain the sign of the value.
+ * @example {{{
+ * -21 >> 3 == -3
+ * // in binary: 11111111 11111111 11111111 11101011 >> 3 ==
+ * // 11111111 11111111 11111111 11111101
+ * }}}
+ */
+ def >>(x: Int): Int
+ /**
+ * Returns this value bit-shifted left by the specified number of bits,
+ * filling in the right bits with the same value as the left-most bit of this.
+ * The effect of this is to retain the sign of the value.
+ * @example {{{
+ * -21 >> 3 == -3
+ * // in binary: 11111111 11111111 11111111 11101011 >> 3 ==
+ * // 11111111 11111111 11111111 11111101
+ * }}}
+ */
+ def >>(x: Long): Int
+
+ /**
+ * Returns `true` if this value is equal to x, `false` otherwise.
+ */
+ def ==(x: Byte): Boolean
+ /**
+ * Returns `true` if this value is equal to x, `false` otherwise.
+ */
+ def ==(x: Short): Boolean
+ /**
+ * Returns `true` if this value is equal to x, `false` otherwise.
+ */
+ def ==(x: Char): Boolean
+ /**
+ * Returns `true` if this value is equal to x, `false` otherwise.
+ */
+ def ==(x: Int): Boolean
+ /**
+ * Returns `true` if this value is equal to x, `false` otherwise.
+ */
+ def ==(x: Long): Boolean
+ /**
+ * Returns `true` if this value is equal to x, `false` otherwise.
+ */
+ def ==(x: Float): Boolean
+ /**
+ * Returns `true` if this value is equal to x, `false` otherwise.
+ */
+ def ==(x: Double): Boolean
+
+ /**
+ * Returns `true` if this value is not equal to x, `false` otherwise.
+ */
+ def !=(x: Byte): Boolean
+ /**
+ * Returns `true` if this value is not equal to x, `false` otherwise.
+ */
+ def !=(x: Short): Boolean
+ /**
+ * Returns `true` if this value is not equal to x, `false` otherwise.
+ */
+ def !=(x: Char): Boolean
+ /**
+ * Returns `true` if this value is not equal to x, `false` otherwise.
+ */
+ def !=(x: Int): Boolean
+ /**
+ * Returns `true` if this value is not equal to x, `false` otherwise.
+ */
+ def !=(x: Long): Boolean
+ /**
+ * Returns `true` if this value is not equal to x, `false` otherwise.
+ */
+ def !=(x: Float): Boolean
+ /**
+ * Returns `true` if this value is not equal to x, `false` otherwise.
+ */
+ def !=(x: Double): Boolean
+
+ /**
+ * Returns `true` if this value is less than x, `false` otherwise.
+ */
+ def <(x: Byte): Boolean
+ /**
+ * Returns `true` if this value is less than x, `false` otherwise.
+ */
+ def <(x: Short): Boolean
+ /**
+ * Returns `true` if this value is less than x, `false` otherwise.
+ */
+ def <(x: Char): Boolean
+ /**
+ * Returns `true` if this value is less than x, `false` otherwise.
+ */
+ def <(x: Int): Boolean
+ /**
+ * Returns `true` if this value is less than x, `false` otherwise.
+ */
+ def <(x: Long): Boolean
+ /**
+ * Returns `true` if this value is less than x, `false` otherwise.
+ */
+ def <(x: Float): Boolean
+ /**
+ * Returns `true` if this value is less than x, `false` otherwise.
+ */
+ def <(x: Double): Boolean
+
+ /**
+ * Returns `true` if this value is less than or equal to x, `false` otherwise.
+ */
+ def <=(x: Byte): Boolean
+ /**
+ * Returns `true` if this value is less than or equal to x, `false` otherwise.
+ */
+ def <=(x: Short): Boolean
+ /**
+ * Returns `true` if this value is less than or equal to x, `false` otherwise.
+ */
+ def <=(x: Char): Boolean
+ /**
+ * Returns `true` if this value is less than or equal to x, `false` otherwise.
+ */
+ def <=(x: Int): Boolean
+ /**
+ * Returns `true` if this value is less than or equal to x, `false` otherwise.
+ */
+ def <=(x: Long): Boolean
+ /**
+ * Returns `true` if this value is less than or equal to x, `false` otherwise.
+ */
+ def <=(x: Float): Boolean
+ /**
+ * Returns `true` if this value is less than or equal to x, `false` otherwise.
+ */
+ def <=(x: Double): Boolean
+
+ /**
+ * Returns `true` if this value is greater than x, `false` otherwise.
+ */
+ def >(x: Byte): Boolean
+ /**
+ * Returns `true` if this value is greater than x, `false` otherwise.
+ */
+ def >(x: Short): Boolean
+ /**
+ * Returns `true` if this value is greater than x, `false` otherwise.
+ */
+ def >(x: Char): Boolean
+ /**
+ * Returns `true` if this value is greater than x, `false` otherwise.
+ */
+ def >(x: Int): Boolean
+ /**
+ * Returns `true` if this value is greater than x, `false` otherwise.
+ */
+ def >(x: Long): Boolean
+ /**
+ * Returns `true` if this value is greater than x, `false` otherwise.
+ */
+ def >(x: Float): Boolean
+ /**
+ * Returns `true` if this value is greater than x, `false` otherwise.
+ */
+ def >(x: Double): Boolean
+
+ /**
+ * Returns `true` if this value is greater than or equal to x, `false` otherwise.
+ */
+ def >=(x: Byte): Boolean
+ /**
+ * Returns `true` if this value is greater than or equal to x, `false` otherwise.
+ */
+ def >=(x: Short): Boolean
+ /**
+ * Returns `true` if this value is greater than or equal to x, `false` otherwise.
+ */
+ def >=(x: Char): Boolean
+ /**
+ * Returns `true` if this value is greater than or equal to x, `false` otherwise.
+ */
+ def >=(x: Int): Boolean
+ /**
+ * Returns `true` if this value is greater than or equal to x, `false` otherwise.
+ */
+ def >=(x: Long): Boolean
+ /**
+ * Returns `true` if this value is greater than or equal to x, `false` otherwise.
+ */
+ def >=(x: Float): Boolean
+ /**
+ * Returns `true` if this value is greater than or equal to x, `false` otherwise.
+ */
+ def >=(x: Double): Boolean
+
+ /**
+ * Returns the bitwise OR of this value and `x`.
+ * @example {{{
+ * (0xf0 | 0xaa) == 0xfa
+ * // in binary: 11110000
+ * // | 10101010
+ * // --------
+ * // 11111010
+ * }}}
+ */
+ def |(x: Byte): Int
+ /**
+ * Returns the bitwise OR of this value and `x`.
+ * @example {{{
+ * (0xf0 | 0xaa) == 0xfa
+ * // in binary: 11110000
+ * // | 10101010
+ * // --------
+ * // 11111010
+ * }}}
+ */
+ def |(x: Short): Int
+ /**
+ * Returns the bitwise OR of this value and `x`.
+ * @example {{{
+ * (0xf0 | 0xaa) == 0xfa
+ * // in binary: 11110000
+ * // | 10101010
+ * // --------
+ * // 11111010
+ * }}}
+ */
+ def |(x: Char): Int
+ /**
+ * Returns the bitwise OR of this value and `x`.
+ * @example {{{
+ * (0xf0 | 0xaa) == 0xfa
+ * // in binary: 11110000
+ * // | 10101010
+ * // --------
+ * // 11111010
+ * }}}
+ */
+ def |(x: Int): Int
+ /**
+ * Returns the bitwise OR of this value and `x`.
+ * @example {{{
+ * (0xf0 | 0xaa) == 0xfa
+ * // in binary: 11110000
+ * // | 10101010
+ * // --------
+ * // 11111010
+ * }}}
+ */
+ def |(x: Long): Long
+
+ /**
+ * Returns the bitwise AND of this value and `x`.
+ * @example {{{
+ * (0xf0 & 0xaa) == 0xa0
+ * // in binary: 11110000
+ * // & 10101010
+ * // --------
+ * // 10100000
+ * }}}
+ */
+ def &(x: Byte): Int
+ /**
+ * Returns the bitwise AND of this value and `x`.
+ * @example {{{
+ * (0xf0 & 0xaa) == 0xa0
+ * // in binary: 11110000
+ * // & 10101010
+ * // --------
+ * // 10100000
+ * }}}
+ */
+ def &(x: Short): Int
+ /**
+ * Returns the bitwise AND of this value and `x`.
+ * @example {{{
+ * (0xf0 & 0xaa) == 0xa0
+ * // in binary: 11110000
+ * // & 10101010
+ * // --------
+ * // 10100000
+ * }}}
+ */
+ def &(x: Char): Int
+ /**
+ * Returns the bitwise AND of this value and `x`.
+ * @example {{{
+ * (0xf0 & 0xaa) == 0xa0
+ * // in binary: 11110000
+ * // & 10101010
+ * // --------
+ * // 10100000
+ * }}}
+ */
+ def &(x: Int): Int
+ /**
+ * Returns the bitwise AND of this value and `x`.
+ * @example {{{
+ * (0xf0 & 0xaa) == 0xa0
+ * // in binary: 11110000
+ * // & 10101010
+ * // --------
+ * // 10100000
+ * }}}
+ */
+ def &(x: Long): Long
+
+ /**
+ * Returns the bitwise XOR of this value and `x`.
+ * @example {{{
+ * (0xf0 ^ 0xaa) == 0x5a
+ * // in binary: 11110000
+ * // ^ 10101010
+ * // --------
+ * // 01011010
+ * }}}
+ */
+ def ^(x: Byte): Int
+ /**
+ * Returns the bitwise XOR of this value and `x`.
+ * @example {{{
+ * (0xf0 ^ 0xaa) == 0x5a
+ * // in binary: 11110000
+ * // ^ 10101010
+ * // --------
+ * // 01011010
+ * }}}
+ */
+ def ^(x: Short): Int
+ /**
+ * Returns the bitwise XOR of this value and `x`.
+ * @example {{{
+ * (0xf0 ^ 0xaa) == 0x5a
+ * // in binary: 11110000
+ * // ^ 10101010
+ * // --------
+ * // 01011010
+ * }}}
+ */
+ def ^(x: Char): Int
+ /**
+ * Returns the bitwise XOR of this value and `x`.
+ * @example {{{
+ * (0xf0 ^ 0xaa) == 0x5a
+ * // in binary: 11110000
+ * // ^ 10101010
+ * // --------
+ * // 01011010
+ * }}}
+ */
+ def ^(x: Int): Int
+ /**
+ * Returns the bitwise XOR of this value and `x`.
+ * @example {{{
+ * (0xf0 ^ 0xaa) == 0x5a
+ * // in binary: 11110000
+ * // ^ 10101010
+ * // --------
+ * // 01011010
+ * }}}
+ */
+ def ^(x: Long): Long
+
+ /**
+ * Returns the sum of this value and `x`.
+ */
+ def +(x: Byte): Int
+ /**
+ * Returns the sum of this value and `x`.
+ */
+ def +(x: Short): Int
+ /**
+ * Returns the sum of this value and `x`.
+ */
+ def +(x: Char): Int
+ /**
+ * Returns the sum of this value and `x`.
+ */
+ def +(x: Int): Int
+ /**
+ * Returns the sum of this value and `x`.
+ */
+ def +(x: Long): Long
+ /**
+ * Returns the sum of this value and `x`.
+ */
+ def +(x: Float): Float
+ /**
+ * Returns the sum of this value and `x`.
+ */
+ def +(x: Double): Double
+
+ /**
+ * Returns the difference of this value and `x`.
+ */
+ def -(x: Byte): Int
+ /**
+ * Returns the difference of this value and `x`.
+ */
+ def -(x: Short): Int
+ /**
+ * Returns the difference of this value and `x`.
+ */
+ def -(x: Char): Int
+ /**
+ * Returns the difference of this value and `x`.
+ */
+ def -(x: Int): Int
+ /**
+ * Returns the difference of this value and `x`.
+ */
+ def -(x: Long): Long
+ /**
+ * Returns the difference of this value and `x`.
+ */
+ def -(x: Float): Float
+ /**
+ * Returns the difference of this value and `x`.
+ */
+ def -(x: Double): Double
+
+ /**
+ * Returns the product of this value and `x`.
+ */
+ def *(x: Byte): Int
+ /**
+ * Returns the product of this value and `x`.
+ */
+ def *(x: Short): Int
+ /**
+ * Returns the product of this value and `x`.
+ */
+ def *(x: Char): Int
+ /**
+ * Returns the product of this value and `x`.
+ */
+ def *(x: Int): Int
+ /**
+ * Returns the product of this value and `x`.
+ */
+ def *(x: Long): Long
+ /**
+ * Returns the product of this value and `x`.
+ */
+ def *(x: Float): Float
+ /**
+ * Returns the product of this value and `x`.
+ */
+ def *(x: Double): Double
+
+ /**
+ * Returns the quotient of this value and `x`.
+ */
+ def /(x: Byte): Int
+ /**
+ * Returns the quotient of this value and `x`.
+ */
+ def /(x: Short): Int
+ /**
+ * Returns the quotient of this value and `x`.
+ */
+ def /(x: Char): Int
+ /**
+ * Returns the quotient of this value and `x`.
+ */
+ def /(x: Int): Int
+ /**
+ * Returns the quotient of this value and `x`.
+ */
+ def /(x: Long): Long
+ /**
+ * Returns the quotient of this value and `x`.
+ */
+ def /(x: Float): Float
+ /**
+ * Returns the quotient of this value and `x`.
+ */
+ def /(x: Double): Double
+
+ /**
+ * Returns the remainder of the division of this value by `x`.
+ */
+ def %(x: Byte): Int
+ /**
+ * Returns the remainder of the division of this value by `x`.
+ */
+ def %(x: Short): Int
+ /**
+ * Returns the remainder of the division of this value by `x`.
+ */
+ def %(x: Char): Int
+ /**
+ * Returns the remainder of the division of this value by `x`.
+ */
+ def %(x: Int): Int
+ /**
+ * Returns the remainder of the division of this value by `x`.
+ */
+ def %(x: Long): Long
+ /**
+ * Returns the remainder of the division of this value by `x`.
+ */
+ def %(x: Float): Float
+ /**
+ * Returns the remainder of the division of this value by `x`.
+ */
+ def %(x: Double): Double
+
+ override def getClass(): Class[Short] = null
}
object Short extends AnyValCompanion {
@@ -176,5 +624,12 @@ object Short extends AnyValCompanion {
/** The String representation of the scala.Short companion object.
*/
override def toString = "object scala.Short"
+
+ /** Language mandated coercions from Short to "wider" types.
+ */
+ implicit def short2int(x: Short): Int = x.toInt
+ implicit def short2long(x: Short): Long = x.toLong
+ implicit def short2float(x: Short): Float = x.toFloat
+ implicit def short2double(x: Short): Double = x.toDouble
}
diff --git a/src/library/scala/Specializable.scala b/src/library/scala/Specializable.scala
new file mode 100644
index 0000000..c7a6091
--- /dev/null
+++ b/src/library/scala/Specializable.scala
@@ -0,0 +1,29 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+package scala
+
+/** A common supertype for companions of specializable types.
+ * Should not be extended in user code.
+ */
+trait Specializable extends SpecializableCompanion
+
+object Specializable {
+ // No type parameter in @specialized annotation.
+ trait SpecializedGroup { }
+
+ // Smuggle a list of types by way of a tuple upon which Group is parameterized.
+ class Group[T >: Null](value: T) extends SpecializedGroup { }
+
+ final val Primitives = new Group((Byte, Short, Int, Long, Char, Float, Double, Boolean, Unit))
+ final val Everything = new Group((Byte, Short, Int, Long, Char, Float, Double, Boolean, Unit, AnyRef))
+ final val Bits32AndUp = new Group((Int, Long, Float, Double))
+ final val Integral = new Group((Byte, Short, Int, Long, Char))
+ final val AllNumeric = new Group((Byte, Short, Int, Long, Char, Float, Double))
+ final val BestOfBreed = new Group((Int, Double, Boolean, Unit, AnyRef))
+}
diff --git a/src/library/scala/SpecializableCompanion.scala b/src/library/scala/SpecializableCompanion.scala
index fbdf42f..1a9ce71 100644
--- a/src/library/scala/SpecializableCompanion.scala
+++ b/src/library/scala/SpecializableCompanion.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -10,4 +10,5 @@ package scala
/** A common supertype for companion classes which specialization takes into account.
*/
+ at deprecated("Use Specializable instead", "2.10.0")
private[scala] trait SpecializableCompanion
diff --git a/src/library/scala/StringContext.scala b/src/library/scala/StringContext.scala
new file mode 100644
index 0000000..1b5fd6c
--- /dev/null
+++ b/src/library/scala/StringContext.scala
@@ -0,0 +1,240 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+package scala
+
+/** This class provides the basic mechanism to do String Interpolation.
+ * String Interpolation allows users
+ * to embed variable references directly in *processed* string literals.
+ * Here's an example:
+ * {{{
+ * val name = "James"
+ * println(s"Hello, $name") // Hello, James
+ * }}}
+ *
+ * Any processed string literal is rewritten as an instantiation and
+ * method call against this class. For example:
+ * {{{
+ * s"Hello, $name"
+ * }}}
+ *
+ * is rewritten to be:
+ *
+ * {{{
+ * StringContext("Hello, ", "").s(name)
+ * }}}
+ *
+ * By default, this class provides the `raw`, `s` and `f` methods as
+ * available interpolators.
+ *
+ * To provide your own string interpolator, create an implicit class
+ * which adds a method to `StringContext`. Here's an example:
+ * {{{
+ * implicit class JsonHelper(val sc: StringContext) extends AnyVal {
+ * def json(args: Any*): JSONObject = ...
+ * }
+ * val x: JSONObject = json"{ a: $a }"
+ * }}}
+ *
+ * Here the `JsonHelper` extenion class implicitly adds the `json` method to
+ * `StringContext` which can be used for `json` string literals.
+ *
+ * @since 2.10.0
+ * @param parts The parts that make up the interpolated string,
+ * without the expressions that get inserted by interpolation.
+ */
+case class StringContext(parts: String*) {
+
+ import StringContext._
+
+ /** Checks that the length of the given argument `args` is one less than the number
+ * of `parts` supplied to the enclosing `StringContext`.
+ * @param `args` The arguments to be checked.
+ * @throws An `IllegalArgumentException` if this is not the case.
+ */
+ def checkLengths(args: Seq[Any]): Unit =
+ if (parts.length != args.length + 1)
+ throw new IllegalArgumentException("wrong number of arguments for interpolated string")
+
+
+ /** The simple string interpolator.
+ *
+ * It inserts its arguments between corresponding parts of the string context.
+ * It also treats standard escape sequences as defined in the Scala specification.
+ * Here's an example of usage:
+ * {{{
+ * val name = "James"
+ * println(s"Hello, $name") // Hello, James
+ * }}}
+ * In this example, the expression $name is replaced with the `toString` of the
+ * variable `name`.
+ * The `s` interpolator can take the `toString` of any arbitrary expression within
+ * a `${}` block, for example:
+ * {{{
+ * println(s"1 + 1 = ${1 + 1}")
+ * }}}
+ * will print the string `1 + 1 = 2`.
+ *
+ * @param `args` The arguments to be inserted into the resulting string.
+ * @throws An `IllegalArgumentException`
+ * if the number of `parts` in the enclosing `StringContext` does not exceed
+ * the number of arguments `arg` by exactly 1.
+ * @throws A `StringContext.InvalidEscapeException` if a `parts` string contains a backslash (`\`) character
+ * that does not start a valid escape sequence.
+ */
+ def s(args: Any*): String = standardInterpolator(treatEscapes, args)
+
+ /** The raw string interpolator.
+ *
+ * It inserts its arguments between corresponding parts of the string context.
+ * As opposed to the simple string interpolator `s`, this one does not treat
+ * standard escape sequences as defined in the Scala specification.
+ *
+ * For example, the raw processed string `raw"a\nb"` is equal to the scala string `"a\\nb"`.
+ *
+ * ''Note:'' Even when using the raw interpolator, Scala will preprocess unicode escapes.
+ * For example:
+ * {{{
+ * scala> raw"\u005cu0025"
+ * res0: String = #
+ * }}}
+ *
+ * @param `args` The arguments to be inserted into the resulting string.
+ * @throws An `IllegalArgumentException`
+ * if the number of `parts` in the enclosing `StringContext` does not exceed
+ * the number of arguments `arg` by exactly 1.
+ * @throws A `StringContext.InvalidEscapeException` if a `parts` string contains a backslash (`\`) character
+ * that does not start a valid escape sequence.
+ */
+ def raw(args: Any*): String = standardInterpolator(identity, args)
+
+ def standardInterpolator(process: String => String, args: Seq[Any]): String = {
+ checkLengths(args)
+ val pi = parts.iterator
+ val ai = args.iterator
+ val bldr = new java.lang.StringBuilder(process(pi.next()))
+ while (ai.hasNext) {
+ bldr append ai.next
+ bldr append process(pi.next())
+ }
+ bldr.toString
+ }
+
+ /** The formatted string interpolator.
+ *
+ * It inserts its arguments between corresponding parts of the string context.
+ * It also treats standard escape sequences as defined in the Scala specification.
+ * Finally, if an interpolated expression is followed by a `parts` string
+ * that starts with a formatting specifier, the expression is formatted according to that
+ * specifier. All specifiers allowed in Java format strings are handled, and in the same
+ * way they are treated in Java.
+ *
+ * For example:
+ * {{{
+ * val height = 1.9d
+ * val name = "James"
+ * println(f"$name%s is $height%2.2f meters tall") // James is 1.90 meters tall
+ * }}}
+ *
+ * @param `args` The arguments to be inserted into the resulting string.
+ * @throws An `IllegalArgumentException`
+ * if the number of `parts` in the enclosing `StringContext` does not exceed
+ * the number of arguments `arg` by exactly 1.
+ * @throws A `StringContext.InvalidEscapeException` if a `parts` string contains a backslash (`\`) character
+ * that does not start a valid escape sequence.
+ *
+ * Note: The `f` method works by assembling a format string from all the `parts` strings and using
+ * `java.lang.String.format` to format all arguments with that format string. The format string is
+ * obtained by concatenating all `parts` strings, and performing two transformations:
+ *
+ * 1. Let a _formatting position_ be a start of any `parts` string except the first one.
+ * If a formatting position does not refer to a `%` character (which is assumed to
+ * start a format specifier), then the string format specifier `%s` is inserted.
+ *
+ * 2. Any `%` characters not in formatting positions are left in the resulting
+ * string literally. This is achieved by replacing each such occurrence by the
+ * format specifier `%%`.
+ */
+ // The implementation is hardwired to `scala.tools.reflect.MacroImplementations.macro_StringInterpolation_f`
+ // Using the mechanism implemented in `scala.tools.reflect.FastTrack`
+ def f(args: Any*): String = ??? // macro
+}
+
+object StringContext {
+
+ /** An exception that is thrown if a string contains a backslash (`\`) character
+ * that does not start a valid escape sequence.
+ * @param str The offending string
+ * @param idx The index of the offending backslash character in `str`.
+ */
+ class InvalidEscapeException(str: String, idx: Int)
+ extends IllegalArgumentException("invalid escape character at index "+idx+" in \""+str+"\"")
+
+ /** Expands standard Scala escape sequences in a string.
+ * Escape sequences are:
+ * control: `\b`, `\t`, `\n`, `\f`, `\r`
+ * escape: `\\`, `\"`, `\'`
+ * octal: `\d` `\dd` `\ddd` where `d` is an octal digit between `0` and `7`.
+ *
+ * @param str A string that may contain escape sequences
+ * @return The string with all escape sequences expanded.
+ */
+ def treatEscapes(str: String): String = {
+ lazy val bldr = new java.lang.StringBuilder
+ val len = str.length
+ var start = 0
+ var cur = 0
+ var idx = 0
+ def output(ch: Char) = {
+ bldr.append(str, start, cur)
+ bldr append ch
+ start = idx
+ }
+ while (idx < len) {
+ cur = idx
+ if (str(idx) == '\\') {
+ idx += 1
+ if (idx >= len) throw new InvalidEscapeException(str, cur)
+ if ('0' <= str(idx) && str(idx) <= '7') {
+ val leadch = str(idx)
+ var oct = leadch - '0'
+ idx += 1
+ if (idx < len && '0' <= str(idx) && str(idx) <= '7') {
+ oct = oct * 8 + str(idx) - '0'
+ idx += 1
+ if (idx < len && leadch <= '3' && '0' <= str(idx) && str(idx) <= '7') {
+ oct = oct * 8 + str(idx) - '0'
+ idx += 1
+ }
+ }
+ output(oct.toChar)
+ } else {
+ val ch = str(idx)
+ idx += 1
+ output {
+ ch match {
+ case 'b' => '\b'
+ case 't' => '\t'
+ case 'n' => '\n'
+ case 'f' => '\f'
+ case 'r' => '\r'
+ case '\"' => '\"'
+ case '\'' => '\''
+ case '\\' => '\\'
+ case _ => throw new InvalidEscapeException(str, cur)
+ }
+ }
+ }
+ } else {
+ idx += 1
+ }
+ }
+ if (start == 0) str
+ else bldr.append(str, start, idx).toString
+ }
+}
diff --git a/src/library/scala/Symbol.scala b/src/library/scala/Symbol.scala
index 79581f0..4fead7a 100644
--- a/src/library/scala/Symbol.scala
+++ b/src/library/scala/Symbol.scala
@@ -1,28 +1,21 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-
-
package scala
-/** <p>
- * This class provides a simple way to get unique objects for
- * equal strings. Since symbols are interned, they can be compared using
- * reference equality. Instances of
- * <code>Symbol</code> can be created easily with Scala's built-in
-* quote mechanism.
- * </p>
- * <p>
- * For instance, the <a href="http://scala-lang.org/" target="_top">Scala</a>
- * term <code>'mysym</code> will invoke the constructor of the
- * <code>Symbol</code> class in the following way:
- * <code>Symbol("mysym")</code>.
- * </p>
+/** This class provides a simple way to get unique objects for equal strings.
+ * Since symbols are interned, they can be compared using reference equality.
+ * Instances of `Symbol` can be created easily with Scala's built-in quote
+ * mechanism.
+ *
+ * For instance, the [[http://scala-lang.org/#_top Scala]] term `'mysym` will
+ * invoke the constructor of the `Symbol` class in the following way:
+ * `Symbol("mysym")`.
*
* @author Martin Odersky, Iulian Dragos
* @version 1.8
@@ -34,10 +27,12 @@ final class Symbol private (val name: String) extends Serializable {
@throws(classOf[java.io.ObjectStreamException])
private def readResolve(): Any = Symbol.apply(name)
+ override def hashCode = name.hashCode()
+ override def equals(other: Any) = this eq other.asInstanceOf[AnyRef]
}
-object Symbol extends UniquenessCache[String, Symbol]
-{
+object Symbol extends UniquenessCache[String, Symbol] {
+ override def apply(name: String): Symbol = super.apply(name)
protected def valueFromKey(name: String): Symbol = new Symbol(name)
protected def keyFromValue(sym: Symbol): Option[String] = Some(sym.name)
}
@@ -74,6 +69,11 @@ private[scala] abstract class UniquenessCache[K, V >: Null]
val res = cached()
if (res != null) res
else {
+ // If we don't remove the old String key from the map, we can
+ // wind up with one String as the key and a different String as
+ // as the name field in the Symbol, which can lead to surprising
+ // GC behavior and duplicate Symbols. See SI-6706.
+ map remove name
val sym = valueFromKey(name)
map.put(name, new WeakReference(sym))
sym
diff --git a/src/library/scala/Tuple1.scala b/src/library/scala/Tuple1.scala
index 6d31d35..02fdd0c 100644
--- a/src/library/scala/Tuple1.scala
+++ b/src/library/scala/Tuple1.scala
@@ -19,5 +19,5 @@ case class Tuple1[@specialized(Int, Long, Double) +T1](_1: T1)
extends Product1[T1]
{
override def toString() = "(" + _1 + ")"
-
+
}
diff --git a/src/library/scala/Tuple10.scala b/src/library/scala/Tuple10.scala
index 10d554d..ba2a02a 100644
--- a/src/library/scala/Tuple10.scala
+++ b/src/library/scala/Tuple10.scala
@@ -28,5 +28,5 @@ case class Tuple10[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10](_1: T1, _2
extends Product10[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10]
{
override def toString() = "(" + _1 + "," + _2 + "," + _3 + "," + _4 + "," + _5 + "," + _6 + "," + _7 + "," + _8 + "," + _9 + "," + _10 + ")"
-
+
}
diff --git a/src/library/scala/Tuple11.scala b/src/library/scala/Tuple11.scala
index 2065e4f..7f51d17 100644
--- a/src/library/scala/Tuple11.scala
+++ b/src/library/scala/Tuple11.scala
@@ -29,5 +29,5 @@ case class Tuple11[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11](_1:
extends Product11[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11]
{
override def toString() = "(" + _1 + "," + _2 + "," + _3 + "," + _4 + "," + _5 + "," + _6 + "," + _7 + "," + _8 + "," + _9 + "," + _10 + "," + _11 + ")"
-
+
}
diff --git a/src/library/scala/Tuple12.scala b/src/library/scala/Tuple12.scala
index a463986..4bbc6a0 100644
--- a/src/library/scala/Tuple12.scala
+++ b/src/library/scala/Tuple12.scala
@@ -31,5 +31,5 @@ case class Tuple12[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12
{
override def toString() = "(" + _1 + "," + _2 + "," + _3 + "," + _4 + "," + _5 + "," + _6 +
"," + _7 + "," + _8 + "," + _9 + "," + _10 + "," + _11 + "," + _12 + ")"
-
+
}
diff --git a/src/library/scala/Tuple13.scala b/src/library/scala/Tuple13.scala
index 2bee0d6..77bd59b 100644
--- a/src/library/scala/Tuple13.scala
+++ b/src/library/scala/Tuple13.scala
@@ -32,5 +32,5 @@ case class Tuple13[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12
{
override def toString() = "(" + _1 + "," + _2 + "," + _3 + "," + _4 + "," + _5 + "," + _6 +
"," + _7 + "," + _8 + "," + _9 + "," + _10 + "," + _11 + "," + _12 + "," + _13 + ")"
-
+
}
diff --git a/src/library/scala/Tuple14.scala b/src/library/scala/Tuple14.scala
index 60f7c51..bf7a4ce 100644
--- a/src/library/scala/Tuple14.scala
+++ b/src/library/scala/Tuple14.scala
@@ -33,5 +33,5 @@ case class Tuple14[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12
{
override def toString() = "(" + _1 + "," + _2 + "," + _3 + "," + _4 + "," + _5 + "," + _6 + "," + _7 +
"," + _8 + "," + _9 + "," + _10 + "," + _11 + "," + _12 + "," + _13 + "," + _14 + ")"
-
+
}
diff --git a/src/library/scala/Tuple15.scala b/src/library/scala/Tuple15.scala
index fc8e305..582c359 100644
--- a/src/library/scala/Tuple15.scala
+++ b/src/library/scala/Tuple15.scala
@@ -34,5 +34,5 @@ case class Tuple15[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12
{
override def toString() = "(" + _1 + "," + _2 + "," + _3 + "," + _4 + "," + _5 + "," + _6 + "," + _7 +
"," + _8 + "," + _9 + "," + _10 + "," + _11 + "," + _12 + "," + _13 + "," + _14 + "," + _15 + ")"
-
+
}
diff --git a/src/library/scala/Tuple16.scala b/src/library/scala/Tuple16.scala
index 80181f6..a1e9a79 100644
--- a/src/library/scala/Tuple16.scala
+++ b/src/library/scala/Tuple16.scala
@@ -35,5 +35,5 @@ case class Tuple16[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12
{
override def toString() = "(" + _1 + "," + _2 + "," + _3 + "," + _4 + "," + _5 + "," + _6 + "," + _7 + "," + _8 +
"," + _9 + "," + _10 + "," + _11 + "," + _12 + "," + _13 + "," + _14 + "," + _15 + "," + _16 + ")"
-
+
}
diff --git a/src/library/scala/Tuple17.scala b/src/library/scala/Tuple17.scala
index 6236122..f531766 100644
--- a/src/library/scala/Tuple17.scala
+++ b/src/library/scala/Tuple17.scala
@@ -36,5 +36,5 @@ case class Tuple17[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12
{
override def toString() = "(" + _1 + "," + _2 + "," + _3 + "," + _4 + "," + _5 + "," + _6 + "," + _7 + "," + _8 +
"," + _9 + "," + _10 + "," + _11 + "," + _12 + "," + _13 + "," + _14 + "," + _15 + "," + _16 + "," + _17 + ")"
-
+
}
diff --git a/src/library/scala/Tuple18.scala b/src/library/scala/Tuple18.scala
index dd6a819..a96db25 100644
--- a/src/library/scala/Tuple18.scala
+++ b/src/library/scala/Tuple18.scala
@@ -37,5 +37,5 @@ case class Tuple18[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12
{
override def toString() = "(" + _1 + "," + _2 + "," + _3 + "," + _4 + "," + _5 + "," + _6 + "," + _7 + "," + _8 + "," + _9 +
"," + _10 + "," + _11 + "," + _12 + "," + _13 + "," + _14 + "," + _15 + "," + _16 + "," + _17 + "," + _18 + ")"
-
+
}
diff --git a/src/library/scala/Tuple19.scala b/src/library/scala/Tuple19.scala
index 65f0fd2..718280d 100644
--- a/src/library/scala/Tuple19.scala
+++ b/src/library/scala/Tuple19.scala
@@ -38,5 +38,5 @@ case class Tuple19[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12
{
override def toString() = "(" + _1 + "," + _2 + "," + _3 + "," + _4 + "," + _5 + "," + _6 + "," + _7 + "," + _8 + "," + _9 +
"," + _10 + "," + _11 + "," + _12 + "," + _13 + "," + _14 + "," + _15 + "," + _16 + "," + _17 + "," + _18 + "," + _19 + ")"
-
+
}
diff --git a/src/library/scala/Tuple2.scala b/src/library/scala/Tuple2.scala
index dd6ac0c..35d5a44 100644
--- a/src/library/scala/Tuple2.scala
+++ b/src/library/scala/Tuple2.scala
@@ -9,9 +9,6 @@
package scala
-import scala.collection.{ TraversableLike => TLike, IterableLike => ILike }
-import scala.collection.generic.{ CanBuildFrom => CBF }
-
/** A tuple of 2 elements; the canonical representation of a [[scala.Product2]].
*
@@ -19,116 +16,15 @@ import scala.collection.generic.{ CanBuildFrom => CBF }
* @param _1 Element 1 of this Tuple2
* @param _2 Element 2 of this Tuple2
*/
-case class Tuple2[@specialized(Int, Long, Double) +T1, @specialized(Int, Long, Double) +T2](_1: T1, _2: T2)
+case class Tuple2[@specialized(Int, Long, Double, Char, Boolean/*, AnyRef*/) +T1, @specialized(Int, Long, Double, Char, Boolean/*, AnyRef*/) +T2](_1: T1, _2: T2)
extends Product2[T1, T2]
{
override def toString() = "(" + _1 + "," + _2 + ")"
-
+
/** Swaps the elements of this `Tuple`.
* @return a new Tuple where the first element is the second element of this Tuple and the
* second element is the first element of this Tuple.
*/
def swap: Tuple2[T2,T1] = Tuple2(_2, _1)
- @deprecated("Use `zipped` instead.", "2.9.0")
- def zip[Repr1, El1, El2, To](implicit w1: T1 => TLike[El1, Repr1],
- w2: T2 => Iterable[El2],
- cbf1: CBF[Repr1, (El1, El2), To]): To = {
- zipped map ((x, y) => ((x, y)))
- }
-
- /** Wraps a tuple in a `Zipped`, which supports 2-ary generalisations of `map`, `flatMap`, `filter`, etc.
- * Note that there must be an implicit value to convert this tuple's types into a [[scala.collection.TraversableLike]]
- * or [[scala.collection.IterableLike]].
- * {{{
- * scala> val tuple = (List(1,2,3),List('a','b','c'))
- * tuple: (List[Int], List[Char]) = (List(1, 2, 3),List(a, b, c))
- *
- * scala> tuple.zipped map { (x,y) => x + ":" + y }
- * res6: List[java.lang.String] = List(1:a, 2:b, 3:c)
- * }}}
- *
- * @see Zipped
- * Note: will not terminate for infinite-sized collections.
- */
- def zipped[Repr1, El1, Repr2, El2](implicit w1: T1 => TLike[El1, Repr1], w2: T2 => ILike[El2, Repr2]): Zipped[Repr1, El1, Repr2, El2]
- = new Zipped[Repr1, El1, Repr2, El2](_1, _2)
-
- class Zipped[+Repr1, +El1, +Repr2, +El2](coll1: TLike[El1, Repr1], coll2: ILike[El2, Repr2]) { // coll2: ILike for filter
- def map[B, To](f: (El1, El2) => B)(implicit cbf: CBF[Repr1, B, To]): To = {
- val b = cbf(coll1.repr)
- b.sizeHint(coll1)
- val elems2 = coll2.iterator
-
- for (el1 <- coll1) {
- if (elems2.hasNext)
- b += f(el1, elems2.next)
- else
- return b.result
- }
-
- b.result
- }
-
- def flatMap[B, To](f: (El1, El2) => TraversableOnce[B])(implicit cbf: CBF[Repr1, B, To]): To = {
- val b = cbf(coll1.repr)
- val elems2 = coll2.iterator
-
- for (el1 <- coll1) {
- if (elems2.hasNext)
- b ++= f(el1, elems2.next)
- else
- return b.result
- }
-
- b.result
- }
-
- def filter[To1, To2](f: (El1, El2) => Boolean)(implicit cbf1: CBF[Repr1, El1, To1], cbf2: CBF[Repr2, El2, To2]): (To1, To2) = {
- val b1 = cbf1(coll1.repr)
- val b2 = cbf2(coll2.repr)
- val elems2 = coll2.iterator
-
- for (el1 <- coll1) {
- if (elems2.hasNext) {
- val el2 = elems2.next
- if (f(el1, el2)) {
- b1 += el1
- b2 += el2
- }
- }
- else return (b1.result, b2.result)
- }
-
- (b1.result, b2.result)
- }
-
- def exists(f: (El1, El2) => Boolean): Boolean = {
- val elems2 = coll2.iterator
-
- for (el1 <- coll1) {
- if (elems2.hasNext) {
- if (f(el1, elems2.next))
- return true
- }
- else return false
- }
- false
- }
-
- def forall(f: (El1, El2) => Boolean): Boolean =
- !exists((x, y) => !f(x, y))
-
- def foreach[U](f: (El1, El2) => U): Unit = {
- val elems2 = coll2.iterator
-
- for (el1 <- coll1) {
- if (elems2.hasNext)
- f(el1, elems2.next)
- else
- return
- }
- }
- }
-
}
diff --git a/src/library/scala/Tuple20.scala b/src/library/scala/Tuple20.scala
index cf36269..4a44c0b 100644
--- a/src/library/scala/Tuple20.scala
+++ b/src/library/scala/Tuple20.scala
@@ -39,5 +39,5 @@ case class Tuple20[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12
{
override def toString() = "(" + _1 + "," + _2 + "," + _3 + "," + _4 + "," + _5 + "," + _6 + "," + _7 + "," + _8 + "," + _9 + "," + _10 +
"," + _11 + "," + _12 + "," + _13 + "," + _14 + "," + _15 + "," + _16 + "," + _17 + "," + _18 + "," + _19 + "," + _20 + ")"
-
+
}
diff --git a/src/library/scala/Tuple21.scala b/src/library/scala/Tuple21.scala
index 78b9c58..580a169 100644
--- a/src/library/scala/Tuple21.scala
+++ b/src/library/scala/Tuple21.scala
@@ -40,5 +40,5 @@ case class Tuple21[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12
{
override def toString() = "(" + _1 + "," + _2 + "," + _3 + "," + _4 + "," + _5 + "," + _6 + "," + _7 + "," + _8 + "," + _9 + "," + _10 +
"," + _11 + "," + _12 + "," + _13 + "," + _14 + "," + _15 + "," + _16 + "," + _17 + "," + _18 + "," + _19 + "," + _20 + "," + _21 + ")"
-
+
}
diff --git a/src/library/scala/Tuple22.scala b/src/library/scala/Tuple22.scala
index 0993dfb..fd3392d 100644
--- a/src/library/scala/Tuple22.scala
+++ b/src/library/scala/Tuple22.scala
@@ -41,5 +41,5 @@ case class Tuple22[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12
{
override def toString() = "(" + _1 + "," + _2 + "," + _3 + "," + _4 + "," + _5 + "," + _6 + "," + _7 + "," + _8 + "," + _9 + "," + _10 + "," + _11 +
"," + _12 + "," + _13 + "," + _14 + "," + _15 + "," + _16 + "," + _17 + "," + _18 + "," + _19 + "," + _20 + "," + _21 + "," + _22 + ")"
-
+
}
diff --git a/src/library/scala/Tuple3.scala b/src/library/scala/Tuple3.scala
index dfa0c96..5ed1360 100644
--- a/src/library/scala/Tuple3.scala
+++ b/src/library/scala/Tuple3.scala
@@ -9,9 +9,6 @@
package scala
-import scala.collection.{ TraversableLike => TLike, IterableLike => ILike }
-import scala.collection.generic.{ CanBuildFrom => CBF }
-
/** A tuple of 3 elements; the canonical representation of a [[scala.Product3]].
*
@@ -24,122 +21,5 @@ case class Tuple3[+T1, +T2, +T3](_1: T1, _2: T2, _3: T3)
extends Product3[T1, T2, T3]
{
override def toString() = "(" + _1 + "," + _2 + "," + _3 + ")"
-
-
- @deprecated("Use `zipped` instead.", "2.9.0")
- def zip[Repr1, El1, El2, El3, To](implicit w1: T1 => TLike[El1, Repr1],
- w2: T2 => Iterable[El2],
- w3: T3 => Iterable[El3],
- cbf1: CBF[Repr1, (El1, El2, El3), To]): To = {
- zipped map ((x, y, z) => ((x, y, z)))
- }
-
- /** Wraps a tuple in a `Zipped`, which supports 3-ary generalisations of `map`, `flatMap`, `filter`, etc.
- * Note that there must be an implicit value to convert this tuple's types into a [[scala.collection.TraversableLike]]
- * or [[scala.collection.IterableLike]].
- * {{{
- * scala> val tuple = (List(1,2,3),List('a','b','c'),List("x","y","z"))
- * tuple: (List[Int], List[Char], List[java.lang.String]) = (List(1, 2, 3),List(a, b, c),List(x, y, z))
- *
- * scala> tuple.zipped map { (x,y,z) => x + ":" + y + ":" + z}
- * res8: List[java.lang.String] = List(1:a:x, 2:b:y, 3:c:z)
- * }}}
- *
- * @see Zipped
- * Note: will not terminate for infinite-sized collections.
- */
- def zipped[Repr1, El1, Repr2, El2, Repr3, El3](implicit w1: T1 => TLike[El1, Repr1],
- w2: T2 => ILike[El2, Repr2],
- w3: T3 => ILike[El3, Repr3]): Zipped[Repr1, El1, Repr2, El2, Repr3, El3]
- = new Zipped[Repr1, El1, Repr2, El2, Repr3, El3](_1, _2, _3)
-
- class Zipped[+Repr1, +El1, +Repr2, +El2, +Repr3, +El3](coll1: TLike[El1, Repr1],
- coll2: ILike[El2, Repr2],
- coll3: ILike[El3, Repr3]) {
- def map[B, To](f: (El1, El2, El3) => B)(implicit cbf: CBF[Repr1, B, To]): To = {
- val b = cbf(coll1.repr)
- val elems2 = coll2.iterator
- val elems3 = coll3.iterator
-
- for (el1 <- coll1) {
- if (elems2.hasNext && elems3.hasNext)
- b += f(el1, elems2.next, elems3.next)
- else
- return b.result
- }
- b.result
- }
-
- def flatMap[B, To](f: (El1, El2, El3) => TraversableOnce[B])(implicit cbf: CBF[Repr1, B, To]): To = {
- val b = cbf(coll1.repr)
- val elems2 = coll2.iterator
- val elems3 = coll3.iterator
-
- for (el1 <- coll1) {
- if (elems2.hasNext && elems3.hasNext)
- b ++= f(el1, elems2.next, elems3.next)
- else
- return b.result
- }
- b.result
- }
-
- def filter[To1, To2, To3](f: (El1, El2, El3) => Boolean)(
- implicit cbf1: CBF[Repr1, El1, To1],
- cbf2: CBF[Repr2, El2, To2],
- cbf3: CBF[Repr3, El3, To3]): (To1, To2, To3) = {
- val b1 = cbf1(coll1.repr)
- val b2 = cbf2(coll2.repr)
- val b3 = cbf3(coll3.repr)
- val elems2 = coll2.iterator
- val elems3 = coll3.iterator
- def result = (b1.result, b2.result, b3.result)
-
- for (el1 <- coll1) {
- if (elems2.hasNext && elems3.hasNext) {
- val el2 = elems2.next
- val el3 = elems3.next
-
- if (f(el1, el2, el3)) {
- b1 += el1
- b2 += el2
- b3 += el3
- }
- }
- else return result
- }
-
- result
- }
-
- def exists(f: (El1, El2, El3) => Boolean): Boolean = {
- val elems2 = coll2.iterator
- val elems3 = coll3.iterator
-
- for (el1 <- coll1) {
- if (elems2.hasNext && elems3.hasNext) {
- if (f(el1, elems2.next, elems3.next))
- return true
- }
- else return false
- }
- false
- }
-
- def forall(f: (El1, El2, El3) => Boolean): Boolean =
- !exists((x, y, z) => !f(x, y, z))
-
- def foreach[U](f: (El1, El2, El3) => U): Unit = {
- val elems2 = coll2.iterator
- val elems3 = coll3.iterator
-
- for (el1 <- coll1) {
- if (elems2.hasNext && elems3.hasNext)
- f(el1, elems2.next, elems3.next)
- else
- return
- }
- }
- }
-
+
}
diff --git a/src/library/scala/Tuple4.scala b/src/library/scala/Tuple4.scala
index a919072..a859078 100644
--- a/src/library/scala/Tuple4.scala
+++ b/src/library/scala/Tuple4.scala
@@ -22,5 +22,5 @@ case class Tuple4[+T1, +T2, +T3, +T4](_1: T1, _2: T2, _3: T3, _4: T4)
extends Product4[T1, T2, T3, T4]
{
override def toString() = "(" + _1 + "," + _2 + "," + _3 + "," + _4 + ")"
-
+
}
diff --git a/src/library/scala/Tuple5.scala b/src/library/scala/Tuple5.scala
index 6a94f48..1edfb67 100644
--- a/src/library/scala/Tuple5.scala
+++ b/src/library/scala/Tuple5.scala
@@ -23,5 +23,5 @@ case class Tuple5[+T1, +T2, +T3, +T4, +T5](_1: T1, _2: T2, _3: T3, _4: T4, _5: T
extends Product5[T1, T2, T3, T4, T5]
{
override def toString() = "(" + _1 + "," + _2 + "," + _3 + "," + _4 + "," + _5 + ")"
-
+
}
diff --git a/src/library/scala/Tuple6.scala b/src/library/scala/Tuple6.scala
index 34f8224..5b74937 100644
--- a/src/library/scala/Tuple6.scala
+++ b/src/library/scala/Tuple6.scala
@@ -24,5 +24,5 @@ case class Tuple6[+T1, +T2, +T3, +T4, +T5, +T6](_1: T1, _2: T2, _3: T3, _4: T4,
extends Product6[T1, T2, T3, T4, T5, T6]
{
override def toString() = "(" + _1 + "," + _2 + "," + _3 + "," + _4 + "," + _5 + "," + _6 + ")"
-
+
}
diff --git a/src/library/scala/Tuple7.scala b/src/library/scala/Tuple7.scala
index 6fc3477..a7f572e 100644
--- a/src/library/scala/Tuple7.scala
+++ b/src/library/scala/Tuple7.scala
@@ -25,5 +25,5 @@ case class Tuple7[+T1, +T2, +T3, +T4, +T5, +T6, +T7](_1: T1, _2: T2, _3: T3, _4:
extends Product7[T1, T2, T3, T4, T5, T6, T7]
{
override def toString() = "(" + _1 + "," + _2 + "," + _3 + "," + _4 + "," + _5 + "," + _6 + "," + _7 + ")"
-
+
}
diff --git a/src/library/scala/Tuple8.scala b/src/library/scala/Tuple8.scala
index 1e21b68..9bb427d 100644
--- a/src/library/scala/Tuple8.scala
+++ b/src/library/scala/Tuple8.scala
@@ -26,5 +26,5 @@ case class Tuple8[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8](_1: T1, _2: T2, _3: T3
extends Product8[T1, T2, T3, T4, T5, T6, T7, T8]
{
override def toString() = "(" + _1 + "," + _2 + "," + _3 + "," + _4 + "," + _5 + "," + _6 + "," + _7 + "," + _8 + ")"
-
+
}
diff --git a/src/library/scala/Tuple9.scala b/src/library/scala/Tuple9.scala
index 453cea3..4d50539 100644
--- a/src/library/scala/Tuple9.scala
+++ b/src/library/scala/Tuple9.scala
@@ -27,5 +27,5 @@ case class Tuple9[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9](_1: T1, _2: T2, _
extends Product9[T1, T2, T3, T4, T5, T6, T7, T8, T9]
{
override def toString() = "(" + _1 + "," + _2 + "," + _3 + "," + _4 + "," + _5 + "," + _6 + "," + _7 + "," + _8 + "," + _9 + ")"
-
+
}
diff --git a/src/library/scala/UninitializedError.scala b/src/library/scala/UninitializedError.scala
index eae5e36..0641a66 100644
--- a/src/library/scala/UninitializedError.scala
+++ b/src/library/scala/UninitializedError.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/library/scala/UninitializedFieldError.scala b/src/library/scala/UninitializedFieldError.scala
index 476485d..10c6ccc 100644
--- a/src/library/scala/UninitializedFieldError.scala
+++ b/src/library/scala/UninitializedFieldError.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -13,11 +13,8 @@ package scala
/** This class implements errors which are thrown whenever a
* field is used before it has been initialized.
*
- * Such runtime checks are not emitted by default. See the
- * compiler documentation for knowing how to turn them on.
- *
- * Note: This check requires the initialization order
- * first implemented in scala 2.8.
+ * Such runtime checks are not emitted by default.
+ * They can be enabled by the `-Xcheckinit` compiler option.
*
* @since 2.7
*/
diff --git a/src/library/scala/Unit.scala b/src/library/scala/Unit.scala
index c5d12af..dc67e60 100644
--- a/src/library/scala/Unit.scala
+++ b/src/library/scala/Unit.scala
@@ -10,13 +10,16 @@
package scala
+import scala.language.implicitConversions
-/** Unit is a member of the value classes, those whose instances are
- * not represented as objects by the underlying host system. There is
- * only one value of type Unit: `()`.
+
+/** `Unit` is a subtype of [[scala.AnyVal]]. There is only one value of type
+ * `Unit`, `()`, and it is not represented by any object in the underlying
+ * runtime system. A method with return type `Unit` is analogous to a Java
+ * method which is declared `void`.
*/
-final class Unit extends AnyVal {
- def getClass(): Class[Unit] = sys.error("stub")
+final abstract class Unit private extends AnyVal {
+ override def getClass(): Class[Unit] = null
}
object Unit extends AnyValCompanion {
diff --git a/src/library/scala/annotation/Annotation.scala b/src/library/scala/annotation/Annotation.scala
index 05ed5af..c821344 100644
--- a/src/library/scala/annotation/Annotation.scala
+++ b/src/library/scala/annotation/Annotation.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2006-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2006-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -8,14 +8,11 @@
package scala.annotation
-/** <p>
- * A base class for annotations. Annotations extending this class directly
- * are not preserved for the Scala type checker and are also not stored
- * as Java annotations in classfiles. To enable either or both of these,
- * one needs to inherit from
- * <a href="StaticAnnotation.html"><code>StaticAnnotation</code></a> or/and
- * <a href="ClassfileAnnotation.html"><code>ClassfileAnnotation</code></a>.
- * </p>
+/** A base class for annotations. Annotations extending this class directly
+ * are not preserved for the Scala type checker and are also not stored as
+ * Java annotations in classfiles. To enable either or both of these, one
+ * needs to inherit from [[scala.annotation.StaticAnnotation]] or/and
+ * [[scala.annotation.ClassfileAnnotation]].
*
* @author Martin Odersky
* @version 1.1, 2/02/2007
diff --git a/src/library/scala/annotation/ClassfileAnnotation.scala b/src/library/scala/annotation/ClassfileAnnotation.scala
index 1019601..e32b93a 100644
--- a/src/library/scala/annotation/ClassfileAnnotation.scala
+++ b/src/library/scala/annotation/ClassfileAnnotation.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2006-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2006-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -8,11 +8,9 @@
package scala.annotation
-/** <p>
- * A base class for classfile annotations. These are stored as
- * <a href="http://java.sun.com/j2se/1.5.0/docs/guide/language/annotations.html"
- * target="_top">Java annotations</a> in classfiles.
- * </p>
+/** A base class for classfile annotations. These are stored as
+ * [[http://docs.oracle.com/javase/7/docs/technotes/guides/language/annotations.html#_top Java annotations]]]
+ * in classfiles.
*
* @author Martin Odersky
* @version 1.1, 2/02/2007
diff --git a/src/library/scala/annotation/StaticAnnotation.scala b/src/library/scala/annotation/StaticAnnotation.scala
index fad875b..3e7e7f2 100644
--- a/src/library/scala/annotation/StaticAnnotation.scala
+++ b/src/library/scala/annotation/StaticAnnotation.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2006-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2006-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -8,10 +8,8 @@
package scala.annotation
-/** <p>
- * A base class for static annotations. These are available
- * to the Scala type checker, even across different compilation units.
- * </p>
+/** A base class for static annotations. These are available
+ * to the Scala type checker, even across different compilation units.
*
* @author Martin Odersky
* @version 1.1, 2/02/2007
diff --git a/src/library/scala/annotation/TypeConstraint.scala b/src/library/scala/annotation/TypeConstraint.scala
index d13b0ac..d80569b 100644
--- a/src/library/scala/annotation/TypeConstraint.scala
+++ b/src/library/scala/annotation/TypeConstraint.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2006-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2006-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -8,18 +8,16 @@
package scala.annotation
-/** <p>
- * A marker for annotations that, when applied to a type,
- * should be treated as a constraint on the annotated type.
- * A proper constraint should restrict the type based only
- * on information mentioned within the type. A Scala compiler
- * can use this assumption to rewrite the contents of the
- * constraint as necessary. To contrast, a type annotation
- * whose meaning depends on the context where it is written
- * down is not a proper constrained type, and this marker
- * should not be applied. A Scala compiler will drop such
- * annotations in cases where it would rewrite a type constraint.
- * </p>
+/** A marker for annotations that, when applied to a type, should be treated
+ * as a constraint on the annotated type.
+ *
+ * A proper constraint should restrict the type based only on information
+ * mentioned within the type. A Scala compiler can use this assumption to
+ * rewrite the contents of the constraint as necessary. To contrast, a type
+ * annotation whose meaning depends on the context where it is written
+ * down is not a proper constrained type, and this marker should not be
+ * applied. A Scala compiler will drop such annotations in cases where it
+ * would rewrite a type constraint.
*
* @author Lex Spoon
* @version 1.1, 2007-11-5
diff --git a/src/library/scala/annotation/bridge.scala b/src/library/scala/annotation/bridge.scala
index 6903708..9f25e2b 100644
--- a/src/library/scala/annotation/bridge.scala
+++ b/src/library/scala/annotation/bridge.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -10,4 +10,5 @@ package scala.annotation
/** If this annotation is present on a method, it will be treated as a bridge method.
*/
-private[scala] class bridge extends annotation.StaticAnnotation
+ at deprecated("Reconsider whether using this annotation will accomplish anything", "2.10.0")
+private[scala] class bridge extends scala.annotation.StaticAnnotation
diff --git a/src/library/scala/annotation/cloneable.scala b/src/library/scala/annotation/cloneable.scala
new file mode 100644
index 0000000..4fb62b6
--- /dev/null
+++ b/src/library/scala/annotation/cloneable.scala
@@ -0,0 +1,15 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+package scala.annotation
+
+/**
+ * An annotation that designates the class to which it is applied as cloneable
+ */
+ at deprecated("instead of `@cloneable class C`, use `class C extends Cloneable`", "2.10.0")
+class cloneable extends scala.annotation.StaticAnnotation
diff --git a/src/library/scala/annotation/elidable.scala b/src/library/scala/annotation/elidable.scala
index 8239deb..f9c5e8a 100644
--- a/src/library/scala/annotation/elidable.scala
+++ b/src/library/scala/annotation/elidable.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -10,43 +10,81 @@ package scala.annotation
import java.util.logging.Level
-/** An annotation for methods for which invocations might
- * be removed in the generated code.
+/** An annotation for methods whose bodies may be excluded
+ * from compiler-generated bytecode.
+ *
+ * Behavior is influenced by passing `-Xelide-below <arg>` to `scalac`.
+ * Calls to methods marked elidable (as well as the method body) will
+ * be omitted from generated code if the priority given the annotation
+ * is lower than that given on the command line.
*
- * Behavior is influenced by passing -Xelide-below <arg>
- * to scalac. Methods marked elidable will be omitted from
- * generated code if the priority given the annotation is lower
- * than to the command line argument. Examples:
* {{{
- * import annotation.elidable._
+ * @elidable(123) // annotation priority
+ * scalac -Xelide-below 456 // command line priority
+ * }}}
*
- * @elidable(WARNING) def foo = log("foo")
- * @elidable(FINE) def bar = log("bar")
+ * The method call will be replaced with an expression which depends on
+ * the type of the elided expression. In decreasing order of precedence:
*
- * scalac -Xelide-below=1000
+ * {{{
+ * Unit ()
+ * Boolean false
+ * T <: AnyVal 0
+ * T >: Null null
+ * T >: Nothing Predef.???
* }}}
- * @since 2.8
+ *
+ * Complete example:
+ {{{
+ import scala.annotation._, elidable._
+ object Test extends App {
+ def expensiveComputation(): Int = { Thread.sleep(1000) ; 172 }
+
+ @elidable(WARNING) def warning(msg: String) = println(msg)
+ @elidable(FINE) def debug(msg: String) = println(msg)
+ @elidable(FINE) def computedValue = expensiveComputation()
+
+ warning("Warning! Danger! Warning!")
+ debug("Debug! Danger! Debug!")
+ println("I computed a value: " + computedValue)
+ }
+ % scalac example.scala && scala Test
+ Warning! Danger! Warning!
+ Debug! Danger! Debug!
+ I computed a value: 172
+
+ // INFO lies between WARNING and FINE
+ % scalac -Xelide-below INFO example.scala && scala Test
+ Warning! Danger! Warning!
+ I computed a value: 0
+ }}}
+ *
+ * @author Paul Phillips
+ * @since 2.8
*/
-final class elidable(final val level: Int) extends annotation.StaticAnnotation {}
+final class elidable(final val level: Int) extends scala.annotation.StaticAnnotation {}
/** This useless appearing code was necessary to allow people to use
* named constants for the elidable annotation. This is what it takes
* to convince the compiler to fold the constants: otherwise when it's
* time to check an elision level it's staring at a tree like
- * (Select(Level, Select(FINEST, Apply(intValue, Nil))))
- * instead of the number 300.
+ * {{{
+ * (Select(Level, Select(FINEST, Apply(intValue, Nil))))
+ * }}}
+ * instead of the number `300`.
*
* @since 2.8
*/
object elidable {
- /** The levels ALL and OFF are confusing in this context because the
- * sentiment being expressed when using the annotation is at cross purposes
- * with the one being expressed via -Xelide-below. This confusion reaches
- * its zenith at level OFF, where the annotation means "never elide this method"
- * but -Xelide-below OFF is how you would say "elide everything possible."
+ /** The levels `ALL` and `OFF` are confusing in this context because
+ * the sentiment being expressed when using the annotation is at cross
+ * purposes with the one being expressed via `-Xelide-below`. This
+ * confusion reaches its zenith at level `OFF`, where the annotation means
+ * ''never elide this method'' but `-Xelide-below OFF` is how you would
+ * say ''elide everything possible''.
*
* With no simple remedy at hand, the issue is now at least documented,
- * and aliases MAXIMUM and MINIMUM are offered.
+ * and aliases `MAXIMUM` and `MINIMUM` are offered.
*/
final val ALL = Int.MinValue // Level.ALL.intValue()
final val FINEST = 300 // Level.FINEST.intValue()
diff --git a/src/library/scala/annotation/implicitNotFound.scala b/src/library/scala/annotation/implicitNotFound.scala
index 0c6a5d6..bbde90c 100644
--- a/src/library/scala/annotation/implicitNotFound.scala
+++ b/src/library/scala/annotation/implicitNotFound.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -15,4 +15,4 @@ package scala.annotation
* @author Adriaan Moors
* @since 2.8.1
*/
-final class implicitNotFound(msg: String) extends annotation.StaticAnnotation {}
\ No newline at end of file
+final class implicitNotFound(msg: String) extends scala.annotation.StaticAnnotation {}
diff --git a/src/library/scala/annotation/meta/beanGetter.scala b/src/library/scala/annotation/meta/beanGetter.scala
new file mode 100644
index 0000000..ce4207e
--- /dev/null
+++ b/src/library/scala/annotation/meta/beanGetter.scala
@@ -0,0 +1,13 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+package scala.annotation.meta
+
+/**
+ * Consult the documentation in package [[scala.annotation.meta]].
+ */
+final class beanGetter extends scala.annotation.StaticAnnotation
diff --git a/src/library/scala/annotation/meta/beanSetter.scala b/src/library/scala/annotation/meta/beanSetter.scala
new file mode 100644
index 0000000..ad30932
--- /dev/null
+++ b/src/library/scala/annotation/meta/beanSetter.scala
@@ -0,0 +1,13 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+package scala.annotation.meta
+
+/**
+ * Consult the documentation in package [[scala.annotation.meta]].
+ */
+final class beanSetter extends scala.annotation.StaticAnnotation
diff --git a/src/library/scala/annotation/meta/companionClass.scala b/src/library/scala/annotation/meta/companionClass.scala
new file mode 100644
index 0000000..a0be63e
--- /dev/null
+++ b/src/library/scala/annotation/meta/companionClass.scala
@@ -0,0 +1,17 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+package scala.annotation.meta
+
+/**
+ * When defining an implicit class, the Scala compiler creates an implicit
+ * conversion method for it. Annotations `@companionClass` and `@companionMethod`
+ * control where an annotation on the implicit class will go. By default, annotations
+ * on an implicit class end up only on the class.
+ *
+ */
+final class companionClass extends scala.annotation.StaticAnnotation
diff --git a/src/library/scala/annotation/meta/companionMethod.scala b/src/library/scala/annotation/meta/companionMethod.scala
new file mode 100644
index 0000000..74d6240
--- /dev/null
+++ b/src/library/scala/annotation/meta/companionMethod.scala
@@ -0,0 +1,17 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+package scala.annotation.meta
+
+/**
+ * When defining an implicit class, the Scala compiler creates an implicit
+ * conversion method for it. Annotations `@companionClass` and `@companionMethod`
+ * control where an annotation on the implicit class will go. By default, annotations
+ * on an implicit class end up only on the class.
+ *
+ */
+final class companionMethod extends scala.annotation.StaticAnnotation
diff --git a/src/library/scala/annotation/meta/companionObject.scala b/src/library/scala/annotation/meta/companionObject.scala
new file mode 100644
index 0000000..8822993
--- /dev/null
+++ b/src/library/scala/annotation/meta/companionObject.scala
@@ -0,0 +1,14 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+package scala.annotation.meta
+
+/**
+ * Currently unused; intended as an annotation target for classes such as case classes
+ * that automatically generate a companion object
+ */
+final class companionObject extends scala.annotation.StaticAnnotation
diff --git a/src/library/scala/annotation/meta/field.scala b/src/library/scala/annotation/meta/field.scala
new file mode 100644
index 0000000..84e7fc8
--- /dev/null
+++ b/src/library/scala/annotation/meta/field.scala
@@ -0,0 +1,13 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+package scala.annotation.meta
+
+/**
+ * Consult the documentation in package [[scala.annotation.meta]].
+ */
+final class field extends scala.annotation.StaticAnnotation
diff --git a/src/library/scala/annotation/meta/getter.scala b/src/library/scala/annotation/meta/getter.scala
new file mode 100644
index 0000000..3190aef
--- /dev/null
+++ b/src/library/scala/annotation/meta/getter.scala
@@ -0,0 +1,13 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+package scala.annotation.meta
+
+/**
+ * Consult the documentation in package [[scala.annotation.meta]].
+ */
+final class getter extends scala.annotation.StaticAnnotation
diff --git a/src/library/scala/annotation/meta/languageFeature.scala b/src/library/scala/annotation/meta/languageFeature.scala
new file mode 100644
index 0000000..5b40712
--- /dev/null
+++ b/src/library/scala/annotation/meta/languageFeature.scala
@@ -0,0 +1,13 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+package scala.annotation.meta
+
+/**
+ * An annotation giving particulars for a language feature in object `scala.language`.
+ */
+final class languageFeature(feature: String, enableRequired: Boolean) extends scala.annotation.StaticAnnotation
diff --git a/src/library/scala/annotation/meta/package.scala b/src/library/scala/annotation/meta/package.scala
new file mode 100644
index 0000000..2d18ae5
--- /dev/null
+++ b/src/library/scala/annotation/meta/package.scala
@@ -0,0 +1,68 @@
+package scala.annotation
+
+/**
+ * When defining a field, the Scala compiler creates up to four accessors
+ * for it: a getter, a setter, and if the field is annotated with
+ * `@BeanProperty`, a bean getter and a bean setter.
+ *
+ * For instance in the following class definition
+ *
+ * {{{
+ * class C(@myAnnot @BeanProperty var c: Int)
+ * }}}
+ *
+ * there are six entities which can carry the annotation `@myAnnot`: the
+ * constructor parameter, the generated field and the four accessors.
+ *
+ * By default, annotations on (`val`-, `var`- or plain) constructor parameters
+ * end up on the parameter, not on any other entity. Annotations on fields
+ * by default only end up on the field.
+ *
+ * The meta-annotations in package `scala.annotation.meta` are used
+ * to control where annotations on fields and class parameters are copied.
+ * This is done by annotating either the annotation type or the annotation
+ * class with one or several of the meta-annotations in this package.
+ *
+ * ==Annotating the annotation type==
+ *
+ * The target meta-annotations can be put on the annotation type when
+ * instantiating the annotation. In the following example, the annotation
+ * `@Id` will be added only to the bean getter `getX`.
+ *
+ * {{{
+ * import javax.persistence.Id
+ * class A {
+ * @(Id @beanGetter) @BeanProperty val x = 0
+ * }
+ * }}}
+ *
+ * In order to annotate the field as well, the meta-annotation `@field`
+ * would need to be added.
+ *
+ * The syntax can be improved using a type alias:
+ *
+ * {{{
+ * object ScalaJPA {
+ * type Id = javax.persistence.Id @beanGetter
+ * }
+ * import ScalaJPA.Id
+ * class A {
+ * @Id @BeanProperty val x = 0
+ * }
+ * }}}
+ *
+ * ==Annotating the annotation class==
+ *
+ * For annotations defined in Scala, a default target can be specified
+ * in the annotation class itself, for example
+ *
+ * {{{
+ * @getter
+ * class myAnnotation extends Annotation
+ * }}}
+ *
+ * This only changes the default target for the annotation `myAnnotation`.
+ * When instantiating the annotation, the target can still be specified
+ * as described in the last section.
+ */
+package object meta
diff --git a/src/library/scala/annotation/meta/param.scala b/src/library/scala/annotation/meta/param.scala
new file mode 100644
index 0000000..1b28e8d
--- /dev/null
+++ b/src/library/scala/annotation/meta/param.scala
@@ -0,0 +1,13 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+package scala.annotation.meta
+
+/**
+ * Consult the documentation in package [[scala.annotation.meta]].
+ */
+final class param extends scala.annotation.StaticAnnotation
diff --git a/src/library/scala/annotation/meta/setter.scala b/src/library/scala/annotation/meta/setter.scala
new file mode 100644
index 0000000..33be4f0
--- /dev/null
+++ b/src/library/scala/annotation/meta/setter.scala
@@ -0,0 +1,13 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+package scala.annotation.meta
+
+/**
+ * Consult the documentation in package [[scala.annotation.meta]].
+ */
+final class setter extends scala.annotation.StaticAnnotation
diff --git a/src/library/scala/annotation/migration.scala b/src/library/scala/annotation/migration.scala
index 8ab12a7..adb6de6 100644
--- a/src/library/scala/annotation/migration.scala
+++ b/src/library/scala/annotation/migration.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -17,14 +17,15 @@ package scala.annotation
* order between Scala 2.7 and 2.8.
*
* @param message A message describing the change, which is emitted
- * by the compiler if the flag `-Xmigration` is set.
+ * by the compiler if the flag `-Xmigration` indicates a version
+ * prior to the changedIn version.
*
* @param changedIn The version, in which the behaviour change was
* introduced.
*
* @since 2.8
*/
- private[scala] final class migration(message: String, changedIn: String) extends annotation.StaticAnnotation {
- @deprecated("Use the constructor taking two Strings instead.", "2.10")
+ private[scala] final class migration(message: String, changedIn: String) extends scala.annotation.StaticAnnotation {
+ @deprecated("Use the constructor taking two Strings instead.", "2.10.0")
def this(majorVersion: Int, minorVersion: Int, message: String) = this(message, majorVersion + "." + minorVersion)
- }
\ No newline at end of file
+ }
diff --git a/src/library/scala/annotation/serializable.scala b/src/library/scala/annotation/serializable.scala
index 5a0d126..1e1aff1 100644
--- a/src/library/scala/annotation/serializable.scala
+++ b/src/library/scala/annotation/serializable.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -12,4 +12,4 @@ package scala.annotation
* An annotation that designates the class to which it is applied as serializable
*/
@deprecated("instead of `@serializable class C`, use `class C extends Serializable`", "2.9.0")
-class serializable extends annotation.StaticAnnotation
+class serializable extends scala.annotation.StaticAnnotation
diff --git a/src/library/scala/annotation/strictfp.scala b/src/library/scala/annotation/strictfp.scala
index e4efa6e..dd8659a 100644
--- a/src/library/scala/annotation/strictfp.scala
+++ b/src/library/scala/annotation/strictfp.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -15,4 +15,4 @@ package scala.annotation
* @version 2.9
* @since 2.9
*/
-class strictfp extends annotation.StaticAnnotation
+class strictfp extends scala.annotation.StaticAnnotation
diff --git a/src/library/scala/annotation/switch.scala b/src/library/scala/annotation/switch.scala
index 3734686..23e3923 100644
--- a/src/library/scala/annotation/switch.scala
+++ b/src/library/scala/annotation/switch.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -9,8 +9,7 @@ package scala.annotation
/** An annotation to be applied to a match expression. If present,
* the compiler will verify that the match has been compiled to a
- * [[http://java.sun.com/docs/books/jvms/second_edition/html/Instructions2.doc14.html tableswitch]]
- * or [[http://java.sun.com/docs/books/jvms/second_edition/html/Instructions2.doc8.html#lookupswitch lookupswitch]]
+ * [[http://docs.oracle.com/javase/specs/jvms/se7/html/jvms-3.html#jvms-3.10 tableswitch or lookupswitch]]
* and issue an error if it instead compiles into a series of conditional expressions.
* Example usage:
{{{
@@ -26,4 +25,4 @@ package scala.annotation
* @author Paul Phillips
* @since 2.8
*/
-final class switch extends annotation.StaticAnnotation
+final class switch extends scala.annotation.StaticAnnotation
diff --git a/src/library/scala/annotation/tailrec.scala b/src/library/scala/annotation/tailrec.scala
index a2c1791..03c2b6a 100644
--- a/src/library/scala/annotation/tailrec.scala
+++ b/src/library/scala/annotation/tailrec.scala
@@ -1,18 +1,19 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
+
package scala.annotation
-/** <p>
- * A method annotation which verifies that the method will be compiled
- * with tail call optimization. If it is present, the compiler will
- * issue an error if the method cannot be optimized into a loop.
- * </p>
+/** A method annotation which verifies that the method will be compiled
+ * with tail call optimization.
+ *
+ * If it is present, the compiler will issue an error if the method cannot
+ * be optimized into a loop.
*
* @since 2.8
*/
-final class tailrec extends annotation.StaticAnnotation
+final class tailrec extends scala.annotation.StaticAnnotation
diff --git a/src/library/scala/annotation/target/beanGetter.scala b/src/library/scala/annotation/target/beanGetter.scala
deleted file mode 100644
index 1707a9d..0000000
--- a/src/library/scala/annotation/target/beanGetter.scala
+++ /dev/null
@@ -1,13 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-package scala.annotation.target
-
-/**
- * Consult the documentation in package [[scala.annotation.target]].
- */
-final class beanGetter extends annotation.StaticAnnotation
diff --git a/src/library/scala/annotation/target/beanSetter.scala b/src/library/scala/annotation/target/beanSetter.scala
deleted file mode 100644
index 11e95db..0000000
--- a/src/library/scala/annotation/target/beanSetter.scala
+++ /dev/null
@@ -1,13 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-package scala.annotation.target
-
-/**
- * Consult the documentation in package [[scala.annotation.target]].
- */
-final class beanSetter extends annotation.StaticAnnotation
diff --git a/src/library/scala/annotation/target/field.scala b/src/library/scala/annotation/target/field.scala
deleted file mode 100644
index cd0e5a5..0000000
--- a/src/library/scala/annotation/target/field.scala
+++ /dev/null
@@ -1,13 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-package scala.annotation.target
-
-/**
- * Consult the documentation in package [[scala.annotation.target]].
- */
-final class field extends annotation.StaticAnnotation
diff --git a/src/library/scala/annotation/target/getter.scala b/src/library/scala/annotation/target/getter.scala
deleted file mode 100644
index 9363401..0000000
--- a/src/library/scala/annotation/target/getter.scala
+++ /dev/null
@@ -1,13 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-package scala.annotation.target
-
-/**
- * Consult the documentation in package [[scala.annotation.target]].
- */
-final class getter extends annotation.StaticAnnotation
diff --git a/src/library/scala/annotation/target/package.scala b/src/library/scala/annotation/target/package.scala
index 454ce46..ac2836c 100644
--- a/src/library/scala/annotation/target/package.scala
+++ b/src/library/scala/annotation/target/package.scala
@@ -1,68 +1,29 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
package scala.annotation
-/**
- * When defining a field, the Scala compiler creates up to four accessors
- * for it: a getter, a setter, and if the field is annotated with
- * `@BeanProperty`, a bean getter and a bean setter.
- *
- * For instance in the following class definition
- *
- * {{{
- * class C(@myAnnot @BeanProperty var c: Int)
- * }}}
- *
- * there are six entities which can carry the annotation `@myAnnot`: the
- * constructor parameter, the generated field and the four accessors.
- *
- * By default, annotations on (`val`-, `var`- or plain) constructor parameters
- * end up on the parameter, not on any other entity. Annotations on fields
- * by default only end up on the field.
- *
- * The meta-annotations in package `scala.annotation.target` are used
- * to control where annotations on fields and class parameters are copied.
- * This is done by annotating either the annotation type or the annotation
- * class with one or several of the meta-annotations in this package.
- *
- * ==Annotating the annotation type==
- *
- * The target meta-annotations can be put on the annotation type when
- * instantiating the annotation. In the following example, the annotation
- * `@Id` will be added only to the bean getter `getX`.
- *
- * {{{
- * import javax.persistence.Id
- * class A {
- * @(Id @beanGetter) @BeanProperty val x = 0
- * }
- * }}}
- *
- * In order to annotate the field as well, the meta-annotation `@field`
- * would need to be added.
- *
- * The syntax can be improved using a type alias:
- *
- * {{{
- * object ScalaJPA {
- * type Id = javax.persistence.Id @beanGetter
- * }
- * import ScalaJPA.Id
- * class A {
- * @Id @BeanProperty val x = 0
- * }
- * }}}
- *
- * ==Annotating the annotation class==
- *
- * For annotations defined in Scala, a default target can be specified
- * in the annotation class itself, for example
- *
- * {{{
- * @getter
- * class myAnnotation extends Annotation
- * }}}
- *
- * This only changes the default target for the annotation `myAnnotation`.
- * When instantiating the annotation, the target can still be specified
- * as described in the last section.
- */
-package object target
+package object target {
+ @deprecated("Use `@scala.annotation.meta.beanGetter` instead", "2.10.0")
+ type beanGetter = scala.annotation.meta.beanGetter
+
+ @deprecated("Use `@scala.annotation.meta.beanSetter` instead", "2.10.0")
+ type beanSetter = scala.annotation.meta.beanSetter
+
+ @deprecated("Use `@scala.annotation.meta.field` instead", "2.10.0")
+ type field = scala.annotation.meta.field
+
+ @deprecated("Use `@scala.annotation.meta.getter` instead", "2.10.0")
+ type getter = scala.annotation.meta.getter
+
+ @deprecated("Use `@scala.annotation.meta.param` instead", "2.10.0")
+ type param = scala.annotation.meta.param
+
+ @deprecated("Use `@scala.annotation.meta.setter` instead", "2.10.0")
+ type setter = scala.annotation.meta.setter
+}
diff --git a/src/library/scala/annotation/target/param.scala b/src/library/scala/annotation/target/param.scala
deleted file mode 100644
index 5b917b8..0000000
--- a/src/library/scala/annotation/target/param.scala
+++ /dev/null
@@ -1,13 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-package scala.annotation.target
-
-/**
- * Consult the documentation in package [[scala.annotation.target]].
- */
-final class param extends annotation.StaticAnnotation
diff --git a/src/library/scala/annotation/target/setter.scala b/src/library/scala/annotation/target/setter.scala
deleted file mode 100644
index 1c13a79..0000000
--- a/src/library/scala/annotation/target/setter.scala
+++ /dev/null
@@ -1,13 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-package scala.annotation.target
-
-/**
- * Consult the documentation in package [[scala.annotation.target]].
- */
-final class setter extends annotation.StaticAnnotation
diff --git a/src/library/scala/annotation/unchecked/uncheckedStable.scala b/src/library/scala/annotation/unchecked/uncheckedStable.scala
index 13b500f..d1414df 100644
--- a/src/library/scala/annotation/unchecked/uncheckedStable.scala
+++ b/src/library/scala/annotation/unchecked/uncheckedStable.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -12,4 +12,4 @@ package scala.annotation.unchecked
*
* @since 2.7
*/
-final class uncheckedStable extends annotation.StaticAnnotation {}
+final class uncheckedStable extends scala.annotation.StaticAnnotation {}
diff --git a/src/library/scala/annotation/unchecked/uncheckedVariance.scala b/src/library/scala/annotation/unchecked/uncheckedVariance.scala
index 51433be..0cd6aac 100644
--- a/src/library/scala/annotation/unchecked/uncheckedVariance.scala
+++ b/src/library/scala/annotation/unchecked/uncheckedVariance.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -12,4 +12,4 @@ package scala.annotation.unchecked
*
* @since 2.7
*/
-final class uncheckedVariance extends annotation.StaticAnnotation {}
+final class uncheckedVariance extends scala.annotation.StaticAnnotation {}
diff --git a/src/library/scala/annotation/unspecialized.scala b/src/library/scala/annotation/unspecialized.scala
new file mode 100644
index 0000000..6e77e3a
--- /dev/null
+++ b/src/library/scala/annotation/unspecialized.scala
@@ -0,0 +1,17 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+package scala.annotation
+
+/** A method annotation which suppresses the creation of
+ * additional specialized forms based on enclosing specialized
+ * type parameters.
+ *
+ * @since 2.10
+ */
+class unspecialized extends scala.annotation.StaticAnnotation
diff --git a/src/library/scala/annotation/varargs.scala b/src/library/scala/annotation/varargs.scala
index e29f7ba..46fc790 100644
--- a/src/library/scala/annotation/varargs.scala
+++ b/src/library/scala/annotation/varargs.scala
@@ -1,18 +1,17 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
+
package scala.annotation
-/** <p>
- * A method annotation which instructs the compiler to generate a
- * Java varargs-style forwarder method for interop. This annotation can
- * only be applied to methods with repeated parameters.
- * </p>
+/** A method annotation which instructs the compiler to generate a
+ * Java varargs-style forwarder method for interop. This annotation can
+ * only be applied to methods with repeated parameters.
*
* @since 2.9
*/
-final class varargs extends annotation.StaticAnnotation
+final class varargs extends scala.annotation.StaticAnnotation
diff --git a/src/library/scala/beans/BeanDescription.scala b/src/library/scala/beans/BeanDescription.scala
new file mode 100644
index 0000000..a9c748d
--- /dev/null
+++ b/src/library/scala/beans/BeanDescription.scala
@@ -0,0 +1,19 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+
+package scala.beans
+
+/** Provides a short description that will be included when generating
+ * bean information. This annotation can be attached to the bean itself,
+ * or to any member.
+ *
+ * @author Ross Judson (rjudson at managedobjects.com)
+ */
+class BeanDescription(val description: String) extends scala.annotation.Annotation
+
diff --git a/src/library/scala/beans/BeanDisplayName.scala b/src/library/scala/beans/BeanDisplayName.scala
new file mode 100644
index 0000000..5937c65
--- /dev/null
+++ b/src/library/scala/beans/BeanDisplayName.scala
@@ -0,0 +1,18 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+
+package scala.beans
+
+/** Provides a display name when generating bean information. This
+ * annotation can be attached to the bean itself, or to any member.
+ *
+ * @author Ross Judson (rjudson at managedobjects.com)
+ */
+class BeanDisplayName(val name: String) extends scala.annotation.Annotation
+
diff --git a/src/library/scala/beans/BeanInfo.scala b/src/library/scala/beans/BeanInfo.scala
new file mode 100644
index 0000000..799e93e
--- /dev/null
+++ b/src/library/scala/beans/BeanInfo.scala
@@ -0,0 +1,20 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+package scala.beans
+
+/** This annotation indicates that a JavaBean-compliant `BeanInfo` class
+ * should be generated for this annotated Scala class.
+ *
+ * - A `'''val'''` becomes a read-only property.
+ * - A `'''var'''` becomes a read-write property.
+ * - A `'''def'''` becomes a method.
+ *
+ * @author Ross Judson (rjudson at managedobjects.com)
+ */
+class BeanInfo extends scala.annotation.Annotation
diff --git a/src/library/scala/beans/BeanInfoSkip.scala b/src/library/scala/beans/BeanInfoSkip.scala
new file mode 100644
index 0000000..ccbb193
--- /dev/null
+++ b/src/library/scala/beans/BeanInfoSkip.scala
@@ -0,0 +1,18 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+
+package scala.beans
+
+/** This annotation indicates that bean information should
+ * <strong>not</strong> be generated for the val, var, or def that it is
+ * attached to.
+ *
+ * @author Ross Judson (rjudson at managedobjects.com)
+ */
+class BeanInfoSkip extends scala.annotation.Annotation
diff --git a/src/library/scala/beans/BeanProperty.scala b/src/library/scala/beans/BeanProperty.scala
new file mode 100644
index 0000000..fec469d
--- /dev/null
+++ b/src/library/scala/beans/BeanProperty.scala
@@ -0,0 +1,26 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+package scala.beans
+
+/** When attached to a field, this annotation adds a setter and a getter
+ * method following the Java Bean convention. For example:
+ * {{{
+ * @BeanProperty
+ * var status = ""
+ * }}}
+ * adds the following methods to the class:
+ * {{{
+ * def setStatus(s: String) { this.status = s }
+ * def getStatus: String = this.status
+ * }}}
+ * For fields of type `Boolean`, if you need a getter named `isStatus`,
+ * use the `scala.beans.BooleanBeanProperty` annotation instead.
+ */
+ at scala.annotation.meta.field
+class BeanProperty extends scala.annotation.StaticAnnotation
diff --git a/src/library/scala/beans/BooleanBeanProperty.scala b/src/library/scala/beans/BooleanBeanProperty.scala
new file mode 100644
index 0000000..775e1ac
--- /dev/null
+++ b/src/library/scala/beans/BooleanBeanProperty.scala
@@ -0,0 +1,16 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+package scala.beans
+
+/** This annotation has the same functionality as
+ * `scala.beans.BeanProperty`, but the generated Bean getter will be
+ * named `isFieldName` instead of `getFieldName`.
+ */
+ at scala.annotation.meta.field
+class BooleanBeanProperty extends scala.annotation.StaticAnnotation
diff --git a/src/library/scala/beans/ScalaBeanInfo.scala b/src/library/scala/beans/ScalaBeanInfo.scala
new file mode 100644
index 0000000..3a95335
--- /dev/null
+++ b/src/library/scala/beans/ScalaBeanInfo.scala
@@ -0,0 +1,46 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+
+package scala.beans
+
+/** Provides some simple runtime processing necessary to create
+ * JavaBean descriptors for Scala entities. The compiler creates
+ * subclasses of this class automatically when the BeanInfo annotation is
+ * attached to a class.
+ *
+ * @author Ross Judson (rjudson at managedobjects.com)
+ */
+abstract class ScalaBeanInfo(clazz: java.lang.Class[_],
+ props: Array[String],
+ methods: Array[String]) extends java.beans.SimpleBeanInfo {
+
+ import java.beans._
+
+ private val pd = new Array[PropertyDescriptor](props.length / 3)
+ private val md =
+ for (m <- clazz.getMethods if methods.exists(_ == m.getName))
+ yield new MethodDescriptor(m)
+
+ init
+
+ override def getPropertyDescriptors() = pd
+ override def getMethodDescriptors() = md
+
+ // override def getAdditionalBeanInfo() = Array(Introspector getBeanInfo clazz.getSuperclass)
+
+ private def init() {
+ var i = 0;
+ while (i < props.length) {
+ pd(i/3) = new PropertyDescriptor(props(i), clazz, props(i+1), props(i+2))
+ i = i + 3;
+ }
+ }
+
+}
+
diff --git a/src/library/scala/cloneable.scala b/src/library/scala/cloneable.scala
deleted file mode 100644
index 32a1ea6..0000000
--- a/src/library/scala/cloneable.scala
+++ /dev/null
@@ -1,16 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-
-package scala
-
-/**
- * An annotation that designates the class to which it is applied as cloneable
- */
-class cloneable extends annotation.StaticAnnotation
diff --git a/src/library/scala/collection/BitSet.scala b/src/library/scala/collection/BitSet.scala
index 2f411f9..6985563 100644
--- a/src/library/scala/collection/BitSet.scala
+++ b/src/library/scala/collection/BitSet.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -15,14 +15,14 @@ import generic._
/** A common base class for mutable and immutable bitsets.
* $bitsetinfo
*/
-trait BitSet extends Set[Int]
+trait BitSet extends SortedSet[Int]
with BitSetLike[BitSet] {
override def empty: BitSet = BitSet.empty
}
/** $factoryInfo
* @define coll bitset
- * @define Coll BitSet
+ * @define Coll `BitSet`
*/
object BitSet extends BitSetFactory[BitSet] {
val empty: BitSet = immutable.BitSet.empty
diff --git a/src/library/scala/collection/BitSetLike.scala b/src/library/scala/collection/BitSetLike.scala
index 447af94..4a1c0be 100644
--- a/src/library/scala/collection/BitSetLike.scala
+++ b/src/library/scala/collection/BitSetLike.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -30,9 +30,9 @@ import mutable.StringBuilder
* @version 2.8
* @since 2.8
* @define coll bitset
- * @define Coll BitSet
+ * @define Coll `BitSet`
*/
-trait BitSetLike[+This <: BitSetLike[This] with Set[Int]] extends SetLike[Int, This] { self =>
+trait BitSetLike[+This <: BitSetLike[This] with SortedSet[Int]] extends SortedSetLike[Int, This] { self =>
def empty: This
@@ -46,19 +46,60 @@ trait BitSetLike[+This <: BitSetLike[This] with Set[Int]] extends SetLike[Int, T
/** Creates a new set of this kind from an array of longs
*/
- protected def fromArray(elems: Array[Long]): This
+ protected def fromBitMaskNoCopy(elems: Array[Long]): This
+
+ /** Creates a bit mask for this set as a new array of longs
+ */
+ def toBitMask: Array[Long] = {
+ val a = new Array[Long](nwords)
+ var i = a.length
+ while(i > 0) {
+ i -= 1
+ a(i) = word(i)
+ }
+ a
+ }
override def size: Int = {
var s = 0
var i = nwords
while (i > 0) {
i -= 1
- s += popCount(word(i))
+ s += java.lang.Long.bitCount(word(i))
}
s
}
- def iterator = new Iterator[Int] {
+ implicit def ordering: Ordering[Int] = Ordering.Int
+
+ def rangeImpl(from: Option[Int], until: Option[Int]): This = {
+ val a = toBitMask
+ val len = a.length
+ if(from.isDefined) {
+ var f = from.get
+ var pos = 0
+ while(f >= 64 && pos < len) {
+ f -= 64
+ a(pos) = 0
+ pos += 1
+ }
+ if(f > 0 && pos < len) a(pos) &= ~((1L << f)-1)
+ }
+ if(until.isDefined) {
+ val u = until.get
+ val w = u / 64
+ val b = u % 64
+ var clearw = w+1
+ while(clearw < len) {
+ a(clearw) = 0
+ clearw += 1
+ }
+ if(w < len) a(w) &= (1L << b)-1
+ }
+ fromBitMaskNoCopy(a)
+ }
+
+ def iterator: Iterator[Int] = new AbstractIterator[Int] {
private var current = 0
private val end = nwords * WordLength
def hasNext: Boolean = {
@@ -91,12 +132,12 @@ trait BitSetLike[+This <: BitSetLike[This] with Set[Int]] extends SetLike[Int, T
val words = new Array[Long](len)
for (idx <- 0 until len)
words(idx) = this.word(idx) | other.word(idx)
- fromArray(words)
+ fromBitMaskNoCopy(words)
}
/** Computes the intersection between this bitset and another bitset by performing
* a bitwise "and".
- * @param that the bitset to intersect with.
+ * @param other the bitset to intersect with.
* @return a new bitset consisting of all elements that are both in this
* bitset and in the given bitset `other`.
*/
@@ -105,13 +146,13 @@ trait BitSetLike[+This <: BitSetLike[This] with Set[Int]] extends SetLike[Int, T
val words = new Array[Long](len)
for (idx <- 0 until len)
words(idx) = this.word(idx) & other.word(idx)
- fromArray(words)
+ fromBitMaskNoCopy(words)
}
/** Computes the difference of this bitset and another bitset by performing
* a bitwise "and-not".
*
- * @param that the set of bits to exclude.
+ * @param other the set of bits to exclude.
* @return a bitset containing those bits of this
* bitset that are not also contained in the given bitset `other`.
*/
@@ -120,13 +161,13 @@ trait BitSetLike[+This <: BitSetLike[This] with Set[Int]] extends SetLike[Int, T
val words = new Array[Long](len)
for (idx <- 0 until len)
words(idx) = this.word(idx) & ~other.word(idx)
- fromArray(words)
+ fromBitMaskNoCopy(words)
}
/** Computes the symmetric difference of this bitset and another bitset by performing
* a bitwise "exclusive-or".
*
- * @param that the other bitset to take part in the symmetric difference.
+ * @param other the other bitset to take part in the symmetric difference.
* @return a bitset containing those bits of this
* bitset or the other bitset that are not contained in both bitsets.
*/
@@ -135,7 +176,7 @@ trait BitSetLike[+This <: BitSetLike[This] with Set[Int]] extends SetLike[Int, T
val words = new Array[Long](len)
for (idx <- 0 until len)
words(idx) = this.word(idx) ^ other.word(idx)
- fromArray(words)
+ fromBitMaskNoCopy(words)
}
def contains(elem: Int): Boolean =
@@ -143,7 +184,7 @@ trait BitSetLike[+This <: BitSetLike[This] with Set[Int]] extends SetLike[Int, T
/** Tests whether this bitset is a subset of another bitset.
*
- * @param that the bitset to test.
+ * @param other the bitset to test.
* @return `true` if this bitset is a subset of `other`, i.e. if
* every bit of this set is also an element in `other`.
*/
@@ -180,15 +221,4 @@ object BitSetLike {
else assert(w == 0L)
newelems
}
-
- private val pc1: Array[Int] = {
- def countBits(x: Int): Int = if (x == 0) 0 else x % 2 + countBits(x >>> 1)
- Array.tabulate(256)(countBits _)
- }
-
- private def popCount(w: Long): Int = {
- def pc2(w: Int) = if (w == 0) 0 else pc1(w & 0xff) + pc1(w >>> 8)
- def pc4(w: Int) = if (w == 0) 0 else pc2(w & 0xffff) + pc2(w >>> 16)
- if (w == 0L) 0 else pc4(w.toInt) + pc4((w >>> 32).toInt)
- }
}
diff --git a/src/library/scala/collection/BufferedIterator.scala b/src/library/scala/collection/BufferedIterator.scala
index 2ca9187..741bca4 100644
--- a/src/library/scala/collection/BufferedIterator.scala
+++ b/src/library/scala/collection/BufferedIterator.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/library/scala/collection/CustomParallelizable.scala b/src/library/scala/collection/CustomParallelizable.scala
index dc634c6..53fe32b 100644
--- a/src/library/scala/collection/CustomParallelizable.scala
+++ b/src/library/scala/collection/CustomParallelizable.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -10,7 +10,7 @@ package scala.collection
import parallel.Combiner
-trait CustomParallelizable[+A, +ParRepr <: Parallel] extends Parallelizable[A, ParRepr] {
+trait CustomParallelizable[+A, +ParRepr <: Parallel] extends Any with Parallelizable[A, ParRepr] {
override def par: ParRepr
override protected[this] def parCombiner: Combiner[A, ParRepr] = throw new UnsupportedOperationException("")
}
diff --git a/src/library/scala/collection/DefaultMap.scala b/src/library/scala/collection/DefaultMap.scala
index cd6d7f8..5c91183 100644
--- a/src/library/scala/collection/DefaultMap.scala
+++ b/src/library/scala/collection/DefaultMap.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -12,26 +12,22 @@ package scala.collection
import generic._
-/** <p>
- * A default map which implements the <code>+</code> and <code>-</code>
- * methods of maps.<br/>
- * Instances that inherit from <code>DefaultMap[A, B]</code> still have to
- * define:
- * </p><pre>
- * <b>def</b> get(key: A): Option[B]
- * <b>def</b> iterator: Iterator[(A, B)]</pre>
- * <p>
- * It refers back to the original map.
- * </p>
- * <p>
- * It might also be advisable to override <code>foreach</code> or
- * <code>size</code> if efficient implementations can be found.
- * </p>
+/** A default map which implements the `+` and `-` methods of maps.
+ *
+ * Instances that inherit from `DefaultMap[A, B]` still have to define:
+ * {{{
+ * def get(key: A): Option[B]
+ * def iterator: Iterator[(A, B)]
+ * }}}
+ * It refers back to the original map.
+ *
+ * It might also be advisable to override `foreach` or `size` if efficient
+ * implementations can be found.
*
* @since 2.8
*/
trait DefaultMap[A, +B] extends Map[A, B] { self =>
-
+
/** A default implementation which creates a new immutable map.
*/
override def +[B1 >: B](kv: (A, B1)): Map[A, B1] = {
@@ -45,7 +41,7 @@ trait DefaultMap[A, +B] extends Map[A, B] { self =>
*/
override def - (key: A): Map[A, B] = {
val b = newBuilder
- b ++= this filter (key !=)
+ b ++= this filter (key != _._1)
b.result
}
}
diff --git a/src/library/scala/collection/GenIterable.scala b/src/library/scala/collection/GenIterable.scala
index 9d25f21..b4e7a14 100644
--- a/src/library/scala/collection/GenIterable.scala
+++ b/src/library/scala/collection/GenIterable.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -30,7 +30,7 @@ extends GenIterableLike[A, GenIterable[A]]
object GenIterable extends GenTraversableFactory[GenIterable] {
- implicit def canBuildFrom[A] = new GenericCanBuildFrom[A]
+ implicit def canBuildFrom[A] = ReusableCBF.asInstanceOf[GenericCanBuildFrom[A]]
def newBuilder[A] = Iterable.newBuilder
}
diff --git a/src/library/scala/collection/GenIterableLike.scala b/src/library/scala/collection/GenIterableLike.scala
index 18132f0..2ba9a72 100644
--- a/src/library/scala/collection/GenIterableLike.scala
+++ b/src/library/scala/collection/GenIterableLike.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -16,7 +16,7 @@ import generic.{ CanBuildFrom => CBF, _ }
* This trait contains abstract methods and methods that can be implemented
* directly in terms of other methods.
*
- * @define Coll GenIterable
+ * @define Coll `GenIterable`
* @define coll general iterable collection
*
* @author Martin Odersky
@@ -34,23 +34,24 @@ import generic.{ CanBuildFrom => CBF, _ }
* This is a base trait for all Scala collections that define an `iterator`
* method to step through one-by-one the collection's elements.
*/
-trait GenIterableLike[+A, +Repr] extends GenTraversableLike[A, Repr] {
+trait GenIterableLike[+A, +Repr] extends Any with GenTraversableLike[A, Repr] {
def iterator: Iterator[A]
/** Checks if the other iterable collection contains the same elements in the same order as this $coll.
*
- * $orderDependent
- * $willNotTerminateInf
- *
* @param that the collection to compare with.
- * @tparam B the type of the elements of collection `that`.
+ * @tparam A1 the type of the elements of collection `that`.
* @return `true`, if both collections contain the same elements in the same order, `false` otherwise.
*
* @usecase def sameElements(that: GenIterable[A]): Boolean
+ * @inheritdoc
*
- * @param that the collection to compare with.
- * @return `true`, if both collections contain the same elements in the same order, `false` otherwise.
+ * $orderDependent
+ * $willNotTerminateInf
+ *
+ * @param that the collection to compare with.
+ * @return `true`, if both collections contain the same elements in the same order, `false` otherwise.
*/
def sameElements[A1 >: A](that: GenIterable[A1]): Boolean
@@ -58,8 +59,6 @@ trait GenIterableLike[+A, +Repr] extends GenTraversableLike[A, Repr] {
* by combining corresponding elements in pairs.
* If one of the two collections is longer than the other, its remaining elements are ignored.
*
- * $orderDependent
- *
* @param that The iterable providing the second half of each result pair
* @tparam A1 the type of the first half of the returned pairs (this is always a supertype
* of the collection's element type `A`).
@@ -71,38 +70,42 @@ trait GenIterableLike[+A, +Repr] extends GenTraversableLike[A, Repr] {
* of the returned collection is the minimum of the lengths of this $coll and `that`.
*
* @usecase def zip[B](that: GenIterable[B]): $Coll[(A, B)]
+ * @inheritdoc
*
- * @param that The iterable providing the second half of each result pair
- * @tparam B the type of the second half of the returned pairs
- * @return a new $coll containing pairs consisting of
- * corresponding elements of this $coll and `that`. The length
- * of the returned collection is the minimum of the lengths of this $coll and `that`.
+ * $orderDependent
+ *
+ * @param that The iterable providing the second half of each result pair
+ * @tparam B the type of the second half of the returned pairs
+ * @return a new $coll containing pairs consisting of
+ * corresponding elements of this $coll and `that`. The length
+ * of the returned collection is the minimum of the lengths of this $coll and `that`.
*/
def zip[A1 >: A, B, That](that: GenIterable[B])(implicit bf: CBF[Repr, (A1, B), That]): That
/** Zips this $coll with its indices.
*
- * $orderDependent
- *
* @tparam A1 the type of the first half of the returned pairs (this is always a supertype
* of the collection's element type `A`).
* @tparam That the class of the returned collection. Where possible, `That` is
- * the same class as the current collection class `Repr`, but this
- * depends on the element type `(A1, Int)` being admissible for that class,
- * which means that an implicit instance of type `CanBuildFrom[Repr, (A1, Int), That]`.
- * is found.
- * @tparam bf an implicit value of class `CanBuildFrom` which determines the
- * result class `That` from the current representation type `Repr`
- * and the new element type `(A1, Int)`.
+ * the same class as the current collection class `Repr`, but this
+ * depends on the element type `(A1, Int)` being admissible for that class,
+ * which means that an implicit instance of type `CanBuildFrom[Repr, (A1, Int), That]`.
+ * is found.
+ * @param bf an implicit value of class `CanBuildFrom` which determines the
+ * result class `That` from the current representation type `Repr`
+ * and the new element type `(A1, Int)`.
* @return A new collection of type `That` containing pairs consisting of all elements of this
* $coll paired with their index. Indices start at `0`.
*
* @usecase def zipWithIndex: $Coll[(A, Int)]
+ * @inheritdoc
*
- * @return A new $coll containing pairs consisting of all elements of this
- * $coll paired with their index. Indices start at `0`.
- * @example
- * `List("a", "b", "c").zipWithIndex = List(("a", 0), ("b", 1), ("c", 2))`
+ * $orderDependent
+ *
+ * @return A new $coll containing pairs consisting of all elements of this
+ * $coll paired with their index. Indices start at `0`.
+ * @example
+ * `List("a", "b", "c").zipWithIndex = List(("a", 0), ("b", 1), ("c", 2))`
*
*/
def zipWithIndex[A1 >: A, That](implicit bf: CBF[Repr, (A1, Int), That]): That
@@ -112,8 +115,6 @@ trait GenIterableLike[+A, +Repr] extends GenTraversableLike[A, Repr] {
* If one of the two collections is shorter than the other,
* placeholder elements are used to extend the shorter collection to the length of the longer.
*
- * $orderDependent
- *
* @param that the iterable providing the second half of each result pair
* @param thisElem the element to be used to fill up the result if this $coll is shorter than `that`.
* @param thatElem the element to be used to fill up the result if `that` is shorter than this $coll.
@@ -124,20 +125,20 @@ trait GenIterableLike[+A, +Repr] extends GenTraversableLike[A, Repr] {
* If `that` is shorter than this $coll, `thatElem` values are used to pad the result.
*
* @usecase def zipAll[B](that: Iterable[B], thisElem: A, thatElem: B): $Coll[(A, B)]
- *
- * @param that The iterable providing the second half of each result pair
- * @param thisElem the element to be used to fill up the result if this $coll is shorter than `that`.
- * @param thatElem the element to be used to fill up the result if `that` is shorter than this $coll.
- * @tparam B the type of the second half of the returned pairs
- * @return a new $coll containing pairs consisting of
- * corresponding elements of this $coll and `that`. The length
- * of the returned collection is the maximum of the lengths of this $coll and `that`.
- * If this $coll is shorter than `that`, `thisElem` values are used to pad the result.
- * If `that` is shorter than this $coll, `thatElem` values are used to pad the result.
+ * @inheritdoc
+ *
+ * $orderDependent
+ *
+ * @param that The iterable providing the second half of each result pair
+ * @param thisElem the element to be used to fill up the result if this $coll is shorter than `that`.
+ * @param thatElem the element to be used to fill up the result if `that` is shorter than this $coll.
+ * @tparam B the type of the second half of the returned pairs
+ * @return a new $coll containing pairs consisting of
+ * corresponding elements of this $coll and `that`. The length
+ * of the returned collection is the maximum of the lengths of this $coll and `that`.
+ * If this $coll is shorter than `that`, `thisElem` values are used to pad the result.
+ * If `that` is shorter than this $coll, `thatElem` values are used to pad the result.
*/
def zipAll[B, A1 >: A, That](that: GenIterable[B], thisElem: A1, thatElem: B)(implicit bf: CBF[Repr, (A1, B), That]): That
- def isEmpty = iterator.isEmpty
-
- def head = iterator.next
}
diff --git a/src/library/scala/collection/GenIterableView.scala b/src/library/scala/collection/GenIterableView.scala
index 2ae964b..ca0332e 100644
--- a/src/library/scala/collection/GenIterableView.scala
+++ b/src/library/scala/collection/GenIterableView.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/library/scala/collection/GenIterableViewLike.scala b/src/library/scala/collection/GenIterableViewLike.scala
index 9e3927e..4e4ceb4 100644
--- a/src/library/scala/collection/GenIterableViewLike.scala
+++ b/src/library/scala/collection/GenIterableViewLike.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -25,6 +25,7 @@ self =>
def iterator: Iterator[B]
override def foreach[U](f: B => U): Unit = iterator foreach f
override def toString = viewToString
+ override def isEmpty = !iterator.hasNext
}
trait EmptyView extends Transformed[Nothing] with super.EmptyView {
diff --git a/src/library/scala/collection/GenMap.scala b/src/library/scala/collection/GenMap.scala
index 0556069..f7b2ae4 100644
--- a/src/library/scala/collection/GenMap.scala
+++ b/src/library/scala/collection/GenMap.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -23,6 +23,8 @@ extends GenMapLike[A, B, GenMap[A, B]]
with GenIterable[(A, B)]
{
def seq: Map[A, B]
+
+ def updated [B1 >: B](key: A, value: B1): GenMap[A, B1]
}
diff --git a/src/library/scala/collection/GenMapLike.scala b/src/library/scala/collection/GenMapLike.scala
index 2bbcc8f..367377a 100644
--- a/src/library/scala/collection/GenMapLike.scala
+++ b/src/library/scala/collection/GenMapLike.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -11,7 +11,7 @@ package scala.collection
/** A trait for all maps upon which operations may be
* implemented in parallel.
*
- * @define Coll GenMap
+ * @define Coll `GenMap`
* @define coll general map
* @author Martin Odersky
* @author Aleksandar Prokopec
@@ -31,7 +31,75 @@ trait GenMapLike[A, +B, +Repr] extends GenIterableLike[(A, B), Repr] with Equals
// This hash code must be symmetric in the contents but ought not
// collide trivially.
- override def hashCode() = util.MurmurHash.symmetricHash(seq, Map.hashSeed)
+ override def hashCode()= scala.util.hashing.MurmurHash3.mapHash(seq)
+
+ /** Returns the value associated with a key, or a default value if the key is not contained in the map.
+ * @param key the key.
+ * @param default a computation that yields a default value in case no binding for `key` is
+ * found in the map.
+ * @tparam B1 the result type of the default computation.
+ * @return the value associated with `key` if it exists,
+ * otherwise the result of the `default` computation.
+ * @usecase def getOrElse(key: A, default: => B): B
+ * @inheritdoc
+ */
+ def getOrElse[B1 >: B](key: A, default: => B1): B1
+
+ /** Tests whether this map contains a binding for a key.
+ *
+ * @param key the key
+ * @return `true` if there is a binding for `key` in this map, `false` otherwise.
+ */
+ def contains(key: A): Boolean
+
+ /** Tests whether this map contains a binding for a key. This method,
+ * which implements an abstract method of trait `PartialFunction`,
+ * is equivalent to `contains`.
+ *
+ * @param key the key
+ * @return `true` if there is a binding for `key` in this map, `false` otherwise.
+ */
+ def isDefinedAt(key: A): Boolean
+
+ def keySet: GenSet[A]
+
+ /** Collects all keys of this map in an iterable collection.
+ *
+ * @return the keys of this map as an iterable.
+ */
+ def keys: GenIterable[A]
+
+ /** Collects all values of this map in an iterable collection.
+ *
+ * @return the values of this map as an iterable.
+ */
+ def values: GenIterable[B]
+
+ /** Creates an iterator for all keys.
+ *
+ * @return an iterator over all keys.
+ */
+ def keysIterator: Iterator[A]
+
+ /** Creates an iterator for all values in this map.
+ *
+ * @return an iterator over all values that are associated with some key in this map.
+ */
+ def valuesIterator: Iterator[B]
+
+ /** Filters this map by retaining only keys satisfying a predicate.
+ * @param p the predicate used to test keys
+ * @return an immutable map consisting only of those key value pairs of this map where the key satisfies
+ * the predicate `p`. The resulting map wraps the original map without copying any elements.
+ */
+ def filterKeys(p: A => Boolean): GenMap[A, B]
+
+ /** Transforms this map by applying a function to every retrieved value.
+ * @param f the function used to transform values of this map.
+ * @return a map view which maps every key of this map
+ * to `f(this(key))`. The resulting map wraps the original map without copying any elements.
+ */
+ def mapValues[C](f: B => C): GenMap[A, C]
/** Compares two maps structurally; i.e. checks if all mappings
* contained in this map are also contained in the other map,
diff --git a/src/library/scala/collection/GenSeq.scala b/src/library/scala/collection/GenSeq.scala
index 9a7ee98..4c5488d 100644
--- a/src/library/scala/collection/GenSeq.scala
+++ b/src/library/scala/collection/GenSeq.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -31,6 +31,6 @@ extends GenSeqLike[A, GenSeq[A]]
object GenSeq extends GenTraversableFactory[GenSeq] {
- implicit def canBuildFrom[A] = new GenericCanBuildFrom[A]
+ implicit def canBuildFrom[A] = ReusableCBF.asInstanceOf[GenericCanBuildFrom[A]]
def newBuilder[A] = Seq.newBuilder
}
diff --git a/src/library/scala/collection/GenSeqLike.scala b/src/library/scala/collection/GenSeqLike.scala
index e78a1fe..78d6334 100644
--- a/src/library/scala/collection/GenSeqLike.scala
+++ b/src/library/scala/collection/GenSeqLike.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -9,7 +9,6 @@
package scala.collection
import generic._
-import annotation.bridge
/** A template trait for all sequences which may be traversed
* in parallel.
@@ -30,7 +29,8 @@ import annotation.bridge
* Sequences are special cases of iterable collections of class `Iterable`.
* Unlike iterables, sequences always have a defined order of elements.
*/
-trait GenSeqLike[+A, +Repr] extends GenIterableLike[A, Repr] with Equals with Parallelizable[A, parallel.ParSeq[A]] {
+trait GenSeqLike[+A, +Repr] extends Any with GenIterableLike[A, Repr] with Equals with Parallelizable[A, parallel.ParSeq[A]] {
+ def seq: Seq[A]
/** Selects an element by its index in the $coll.
*
@@ -114,55 +114,62 @@ trait GenSeqLike[+A, +Repr] extends GenIterableLike[A, Repr] with Equals with Pa
/** Finds index of first occurrence of some value in this $coll.
*
- * $mayNotTerminateInf
- *
* @param elem the element value to search for.
* @tparam B the type of the element `elem`.
- * @return the index of the first element of this $coll that is equal (wrt `==`)
+ * @return the index of the first element of this $coll that is equal (as determined by `==`)
* to `elem`, or `-1`, if none exists.
*
* @usecase def indexOf(elem: A): Int
+ * @inheritdoc
+ *
+ * $mayNotTerminateInf
+ *
*/
def indexOf[B >: A](elem: B): Int = indexOf(elem, 0)
/** Finds index of first occurrence of some value in this $coll after or at some start index.
*
- * $mayNotTerminateInf
- *
* @param elem the element value to search for.
* @tparam B the type of the element `elem`.
* @param from the start index
- * @return the index `>= from` of the first element of this $coll that is equal (wrt `==`)
+ * @return the index `>= from` of the first element of this $coll that is equal (as determined by `==`)
* to `elem`, or `-1`, if none exists.
*
* @usecase def indexOf(elem: A, from: Int): Int
+ * @inheritdoc
+ *
+ * $mayNotTerminateInf
+ *
*/
- def indexOf[B >: A](elem: B, from: Int): Int = indexWhere(elem ==, from)
+ def indexOf[B >: A](elem: B, from: Int): Int = indexWhere(elem == _, from)
/** Finds index of last occurrence of some value in this $coll.
*
- * $willNotTerminateInf
- *
* @param elem the element value to search for.
* @tparam B the type of the element `elem`.
- * @return the index of the last element of this $coll that is equal (wrt `==`)
+ * @return the index of the last element of this $coll that is equal (as determined by `==`)
* to `elem`, or `-1`, if none exists.
*
* @usecase def lastIndexOf(elem: A): Int
+ * @inheritdoc
+ *
+ * $willNotTerminateInf
+ *
*/
- def lastIndexOf[B >: A](elem: B): Int = lastIndexWhere(elem ==)
+ def lastIndexOf[B >: A](elem: B): Int = lastIndexWhere(elem == _)
/** Finds index of last occurrence of some value in this $coll before or at a given end index.
*
* @param elem the element value to search for.
* @param end the end index.
* @tparam B the type of the element `elem`.
- * @return the index `<= end` of the last element of this $coll that is equal (wrt `==`)
+ * @return the index `<= end` of the last element of this $coll that is equal (as determined by `==`)
* to `elem`, or `-1`, if none exists.
*
* @usecase def lastIndexOf(elem: A, end: Int): Int
+ * @inheritdoc
*/
- def lastIndexOf[B >: A](elem: B, end: Int): Int = lastIndexWhere(elem ==, end)
+ def lastIndexOf[B >: A](elem: B, end: Int): Int = lastIndexWhere(elem == _, end)
/** Finds index of last element satisfying some predicate.
*
@@ -194,10 +201,6 @@ trait GenSeqLike[+A, +Repr] extends GenIterableLike[A, Repr] with Equals with Pa
* Builds a new collection by applying a function to all elements of this $coll and
* collecting the results in reversed order.
*
- * $willNotTerminateInf
- *
- * Note: `xs.reverseMap(f)` is the same as `xs.reverse.map(f)` but might be more efficient.
- *
* @param f the function to apply to each element.
* @tparam B the element type of the returned collection.
* @tparam That $thatinfo
@@ -206,10 +209,14 @@ trait GenSeqLike[+A, +Repr] extends GenIterableLike[A, Repr] with Equals with Pa
* `f` to each element of this $coll and collecting the results in reversed order.
*
* @usecase def reverseMap[B](f: A => B): $Coll[B]
+ * @inheritdoc
*
- * Note: `xs.reverseMap(f)` is the same as `xs.reverse.map(f)` but might be more efficient.
- * @return a new $coll resulting from applying the given function
- * `f` to each element of this $coll and collecting the results in reversed order.
+ * $willNotTerminateInf
+ *
+ * Note: `xs.reverseMap(f)` is the same as `xs.reverse.map(f)` but might be more efficient.
+ *
+ * @return a new $coll resulting from applying the given function
+ * `f` to each element of this $coll and collecting the results in reversed order.
*/
def reverseMap[B, That](f: A => B)(implicit bf: CanBuildFrom[Repr, B, That]): That
@@ -220,19 +227,15 @@ trait GenSeqLike[+A, +Repr] extends GenIterableLike[A, Repr] with Equals with Pa
*/
def startsWith[B](that: GenSeq[B]): Boolean = startsWith(that, 0)
- @bridge
- def startsWith[B](that: Seq[B]): Boolean = startsWith(that: GenSeq[B])
-
/** Tests whether this $coll contains the given sequence at a given index.
*
- * If the both the receiver object, <code>this</code> and
- * the argument, <code>that</code> are infinite sequences
- * this method may not terminate.
+ * '''Note''': If the both the receiver object `this` and the argument
+ * `that` are infinite sequences this method may not terminate.
*
* @param that the sequence to test
* @param offset the index where the sequence is searched.
- * @return `true` if the sequence `that` is contained in this $coll at index `offset`,
- * otherwise `false`.
+ * @return `true` if the sequence `that` is contained in this $coll at
+ * index `offset`, otherwise `false`.
*/
def startsWith[B](that: GenSeq[B], offset: Int): Boolean
@@ -254,10 +257,13 @@ trait GenSeqLike[+A, +Repr] extends GenIterableLike[A, Repr] with Equals with Pa
* @return a new $coll consisting of all elements of this $coll
* except that `replaced` elements starting from `from` are replaced
* by `patch`.
+ *
* @usecase def patch(from: Int, that: GenSeq[A], replaced: Int): $Coll[A]
- * @return a new $coll consisting of all elements of this $coll
- * except that `replaced` elements starting from `from` are replaced
- * by `patch`.
+ * @inheritdoc
+ *
+ * @return a new $coll consisting of all elements of this $coll
+ * except that `replaced` elements starting from `from` are replaced
+ * by `patch`.
*/
def patch[B >: A, That](from: Int, patch: GenSeq[B], replaced: Int)(implicit bf: CanBuildFrom[Repr, B, That]): That
@@ -268,18 +274,33 @@ trait GenSeqLike[+A, +Repr] extends GenIterableLike[A, Repr] with Equals with Pa
* @tparam That $thatinfo
* @param bf $bfinfo
* @return a new $coll` which is a copy of this $coll with the element at position `index` replaced by `elem`.
+ *
* @usecase def updated(index: Int, elem: A): $Coll[A]
- * @return a copy of this $coll with the element at position `index` replaced by `elem`.
+ * @inheritdoc
+ *
+ * @return a copy of this $coll with the element at position `index` replaced by `elem`.
*/
def updated[B >: A, That](index: Int, elem: B)(implicit bf: CanBuildFrom[Repr, B, That]): That
/** A copy of the $coll with an element prepended.
*
- * Note that :-ending operators are right associative (see example).
- * Also, the original $coll is not modified, so you will want to capture the result.
+ * @param elem the prepended element
+ * @tparam B the element type of the returned $coll.
+ * @tparam That $thatinfo
+ * @param bf $bfinfo
+ * @return a new collection of type `That` consisting of `elem` followed
+ * by all elements of this $coll.
*
- * Example:
- * {{{
+ * @usecase def +:(elem: A): $Coll[A]
+ * @inheritdoc
+ *
+ * Note that :-ending operators are right associative (see example).
+ * A mnemonic for `+:` vs. `:+` is: the COLon goes on the COLlection side.
+ *
+ * Also, the original $coll is not modified, so you will want to capture the result.
+ *
+ * Example:
+ * {{{
* scala> val x = LinkedList(1)
* x: scala.collection.mutable.LinkedList[Int] = LinkedList(1)
*
@@ -288,34 +309,31 @@ trait GenSeqLike[+A, +Repr] extends GenIterableLike[A, Repr] with Equals with Pa
*
* scala> println(x)
* LinkedList(1)
- * }}}
+ * }}}
*
- * @param elem the prepended element
- * @tparam B the element type of the returned $coll.
- * @tparam That $thatinfo
- * @param bf $bfinfo
- * @return a new collection of type `That` consisting of `elem` followed
- * by all elements of this $coll.
- * @usecase def +:(elem: A): $Coll[A]
- * @return a new $coll consisting of `elem` followed
- * by all elements of this $coll.
+ * @return a new $coll consisting of `elem` followed
+ * by all elements of this $coll.
*/
def +:[B >: A, That](elem: B)(implicit bf: CanBuildFrom[Repr, B, That]): That
/** A copy of this $coll with an element appended.
*
- * $willNotTerminateInf
+ * A mnemonic for `+:` vs. `:+` is: the COLon goes on the COLlection side.
+ *
* @param elem the appended element
* @tparam B the element type of the returned $coll.
* @tparam That $thatinfo
* @param bf $bfinfo
* @return a new collection of type `That` consisting of
* all elements of this $coll followed by `elem`.
+ *
* @usecase def :+(elem: A): $Coll[A]
- * @return a new $coll consisting of
- * all elements of this $coll followed by `elem`.
- * @example
- * {{{
+ * @inheritdoc
+ *
+ * $willNotTerminateInf
+ *
+ * Example:
+ * {{{
* scala> import scala.collection.mutable.LinkedList
* import scala.collection.mutable.LinkedList
*
@@ -327,7 +345,10 @@ trait GenSeqLike[+A, +Repr] extends GenIterableLike[A, Repr] with Equals with Pa
*
* scala> println(a)
* LinkedList(1)
- * }}}
+ * }}}
+ *
+ * @return a new $coll consisting of
+ * all elements of this $coll followed by `elem`.
*/
def :+[B >: A, That](elem: B)(implicit bf: CanBuildFrom[Repr, B, That]): That
@@ -342,9 +363,11 @@ trait GenSeqLike[+A, +Repr] extends GenIterableLike[A, Repr] with Equals with Pa
* all elements of this $coll followed by the minimal number of occurrences of `elem` so
* that the resulting collection has a length of at least `len`.
* @usecase def padTo(len: Int, elem: A): $Coll[A]
- * @return a new $coll consisting of
- * all elements of this $coll followed by the minimal number of occurrences of `elem` so
- * that the resulting $coll has a length of at least `len`.
+ * @inheritdoc
+ *
+ * @return a new $coll consisting of
+ * all elements of this $coll followed by the minimal number of occurrences of `elem` so
+ * that the resulting $coll has a length of at least `len`.
*/
def padTo[B >: A, That](len: Int, elem: B)(implicit bf: CanBuildFrom[Repr, B, That]): That
@@ -364,13 +387,6 @@ trait GenSeqLike[+A, +Repr] extends GenIterableLike[A, Repr] with Equals with Pa
/** Produces a new sequence which contains all elements of this $coll and also all elements of
* a given sequence. `xs union ys` is equivalent to `xs ++ ys`.
- * $willNotTerminateInf
- *
- * Another way to express this
- * is that `xs union ys` computes the order-presevring multi-set union of `xs` and `ys`.
- * `union` is hence a counter-part of `diff` and `intersect` which also work on multi-sets.
- *
- * $willNotTerminateInf
*
* @param that the sequence to add.
* @tparam B the element type of the returned $coll.
@@ -378,55 +394,64 @@ trait GenSeqLike[+A, +Repr] extends GenIterableLike[A, Repr] with Equals with Pa
* @param bf $bfinfo
* @return a new collection of type `That` which contains all elements of this $coll
* followed by all elements of `that`.
+ *
* @usecase def union(that: GenSeq[A]): $Coll[A]
- * @return a new $coll which contains all elements of this $coll
- * followed by all elements of `that`.
+ * @inheritdoc
+ *
+ * Another way to express this
+ * is that `xs union ys` computes the order-presevring multi-set union of `xs` and `ys`.
+ * `union` is hence a counter-part of `diff` and `intersect` which also work on multi-sets.
+ *
+ * $willNotTerminateInf
+ *
+ * @return a new $coll which contains all elements of this $coll
+ * followed by all elements of `that`.
*/
def union[B >: A, That](that: GenSeq[B])(implicit bf: CanBuildFrom[Repr, B, That]): That = this ++ that
- @bridge
- def union[B >: A, That](that: Seq[B])(implicit bf: CanBuildFrom[Repr, B, That]): That =
- union(that: GenSeq[B])(bf)
-
/** Computes the multiset difference between this $coll and another sequence.
- * $willNotTerminateInf
*
* @param that the sequence of elements to remove
* @tparam B the element type of the returned $coll.
- * @tparam That $thatinfo
- * @param bf $bfinfo
* @return a new collection of type `That` which contains all elements of this $coll
* except some of occurrences of elements that also appear in `that`.
* If an element value `x` appears
* ''n'' times in `that`, then the first ''n'' occurrences of `x` will not form
* part of the result, but any following occurrences will.
+ *
* @usecase def diff(that: GenSeq[A]): $Coll[A]
- * @return a new $coll which contains all elements of this $coll
- * except some of occurrences of elements that also appear in `that`.
- * If an element value `x` appears
- * ''n'' times in `that`, then the first ''n'' occurrences of `x` will not form
- * part of the result, but any following occurrences will.
+ * @inheritdoc
+ *
+ * $willNotTerminateInf
+ *
+ * @return a new $coll which contains all elements of this $coll
+ * except some of occurrences of elements that also appear in `that`.
+ * If an element value `x` appears
+ * ''n'' times in `that`, then the first ''n'' occurrences of `x` will not form
+ * part of the result, but any following occurrences will.
*/
def diff[B >: A](that: GenSeq[B]): Repr
/** Computes the multiset intersection between this $coll and another sequence.
- * $mayNotTerminateInf
*
* @param that the sequence of elements to intersect with.
* @tparam B the element type of the returned $coll.
- * @tparam That $thatinfo
- * @param bf $bfinfo
* @return a new collection of type `That` which contains all elements of this $coll
* which also appear in `that`.
* If an element value `x` appears
* ''n'' times in `that`, then the first ''n'' occurrences of `x` will be retained
* in the result, but any following occurrences will be omitted.
+ *
* @usecase def intersect(that: GenSeq[A]): $Coll[A]
- * @return a new $coll which contains all elements of this $coll
- * which also appear in `that`.
- * If an element value `x` appears
- * ''n'' times in `that`, then the first ''n'' occurrences of `x` will be retained
- * in the result, but any following occurrences will be omitted.
+ * @inheritdoc
+ *
+ * $mayNotTerminateInf
+ *
+ * @return a new $coll which contains all elements of this $coll
+ * which also appear in `that`.
+ * If an element value `x` appears
+ * ''n'' times in `that`, then the first ''n'' occurrences of `x` will be retained
+ * in the result, but any following occurrences will be omitted.
*/
def intersect[B >: A](that: GenSeq[B]): Repr
@@ -440,11 +465,7 @@ trait GenSeqLike[+A, +Repr] extends GenIterableLike[A, Repr] with Equals with Pa
/** Hashcodes for $Coll produce a value from the hashcodes of all the
* elements of the $coll.
*/
- override def hashCode() = {
- val h = new util.MurmurHash[A](Seq.hashSeed)
- seq.foreach(h)
- h.hash
- }
+ override def hashCode()= scala.util.hashing.MurmurHash3.seqHash(seq)
/** The equals method for arbitrary sequences. Compares this sequence to
* some other object.
diff --git a/src/library/scala/collection/GenSeqView.scala b/src/library/scala/collection/GenSeqView.scala
index c18c656..92c8b77 100644
--- a/src/library/scala/collection/GenSeqView.scala
+++ b/src/library/scala/collection/GenSeqView.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/library/scala/collection/GenSeqViewLike.scala b/src/library/scala/collection/GenSeqViewLike.scala
index 2f06a52..5160021 100644
--- a/src/library/scala/collection/GenSeqViewLike.scala
+++ b/src/library/scala/collection/GenSeqViewLike.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/library/scala/collection/GenSet.scala b/src/library/scala/collection/GenSet.scala
index fdf6d1e..832177b 100644
--- a/src/library/scala/collection/GenSet.scala
+++ b/src/library/scala/collection/GenSet.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -31,7 +31,7 @@ extends GenSetLike[A, GenSet[A]]
object GenSet extends GenTraversableFactory[GenSet] {
- implicit def canBuildFrom[A] = new GenericCanBuildFrom[A]
+ implicit def canBuildFrom[A] = ReusableCBF.asInstanceOf[GenericCanBuildFrom[A]]
def newBuilder[A] = Set.newBuilder
}
diff --git a/src/library/scala/collection/GenSetLike.scala b/src/library/scala/collection/GenSetLike.scala
index 2fc94c2..f22a7c8 100644
--- a/src/library/scala/collection/GenSetLike.scala
+++ b/src/library/scala/collection/GenSetLike.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -8,7 +8,6 @@
package scala.collection
-import annotation.bridge
/** A template trait for sets which may possibly
* have their operations implemented in parallel.
@@ -51,9 +50,6 @@ extends GenIterableLike[A, Repr]
*/
def intersect(that: GenSet[A]): Repr = this filter that
- @bridge
- def intersect(that: Set[A]): Repr = intersect(that: GenSet[A])
-
/** Computes the intersection between this set and another set.
*
* '''Note:''' Same as `intersect`.
@@ -63,9 +59,6 @@ extends GenIterableLike[A, Repr]
*/
def &(that: GenSet[A]): Repr = this intersect that
- @bridge
- def &(that: Set[A]): Repr = &(that: GenSet[A])
-
/** Computes the union between of set and another set.
*
* @param that the set to form the union with.
@@ -83,9 +76,6 @@ extends GenIterableLike[A, Repr]
*/
def | (that: GenSet[A]): Repr = this union that
- @bridge
- def | (that: Set[A]): Repr = | (that: GenSet[A])
-
/** Computes the difference of this set and another set.
*
* @param that the set of elements to exclude.
@@ -103,9 +93,6 @@ extends GenIterableLike[A, Repr]
*/
def &~(that: GenSet[A]): Repr = this diff that
- @bridge
- def &~(that: Set[A]): Repr = &~(that: GenSet[A])
-
/** Tests whether this set is a subset of another set.
*
* @param that the set to test.
@@ -114,9 +101,6 @@ extends GenIterableLike[A, Repr]
*/
def subsetOf(that: GenSet[A]): Boolean = this forall that
- @bridge
- def subsetOf(that: Set[A]): Boolean = subsetOf(that: GenSet[A])
-
/** Compares this set with another object for equality.
*
* '''Note:''' This operation contains an unchecked cast: if `that`
@@ -143,6 +127,5 @@ extends GenIterableLike[A, Repr]
// Calling map on a set drops duplicates: any hashcode collisions would
// then be dropped before they can be added.
// Hash should be symmetric in set entries, but without trivial collisions.
- override def hashCode() = util.MurmurHash.symmetricHash(seq, Set.hashSeed)
-
+ override def hashCode()= scala.util.hashing.MurmurHash3.setHash(seq)
}
diff --git a/src/library/scala/collection/GenTraversable.scala b/src/library/scala/collection/GenTraversable.scala
index 8e6ab50..3db2dd7 100644
--- a/src/library/scala/collection/GenTraversable.scala
+++ b/src/library/scala/collection/GenTraversable.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -32,7 +32,7 @@ extends GenTraversableLike[A, GenTraversable[A]]
object GenTraversable extends GenTraversableFactory[GenTraversable] {
- implicit def canBuildFrom[A] = new GenericCanBuildFrom[A]
+ implicit def canBuildFrom[A] = ReusableCBF.asInstanceOf[GenericCanBuildFrom[A]]
def newBuilder[A] = Traversable.newBuilder
}
diff --git a/src/library/scala/collection/GenTraversableLike.scala b/src/library/scala/collection/GenTraversableLike.scala
index cf7214d..46134c9 100644
--- a/src/library/scala/collection/GenTraversableLike.scala
+++ b/src/library/scala/collection/GenTraversableLike.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -10,7 +10,7 @@ package scala.collection
import generic._
-import annotation.migration
+import scala.annotation.migration
/** A template trait for all traversable collections upon which operations
@@ -26,10 +26,12 @@ import annotation.migration
* and the new element type `B`.
* @define orderDependent
*
- * Note: might return different results for different runs, unless the underlying collection type is ordered.
+ * Note: might return different results for different runs, unless the
+ * underlying collection type is ordered.
* @define orderDependentFold
*
- * Note: might return different results for different runs, unless the underlying collection type is ordered.
+ * Note: might return different results for different runs, unless the
+ * underlying collection type is ordered.
* or the operator is associative and commutative.
* @define mayNotTerminateInf
*
@@ -41,7 +43,7 @@ import annotation.migration
* @define traversableInfo
* This is a base trait of all kinds of Scala collections.
*
- * @define Coll GenTraversable
+ * @define Coll `GenTraversable`
* @define coll general collection
* @define collectExample
* @tparam A the collection element type.
@@ -51,18 +53,30 @@ import annotation.migration
* @author Aleksandar Prokopec
* @since 2.9
*/
-trait GenTraversableLike[+A, +Repr] extends GenTraversableOnce[A] with Parallelizable[A, parallel.ParIterable[A]] {
+trait GenTraversableLike[+A, +Repr] extends Any with GenTraversableOnce[A] with Parallelizable[A, parallel.ParIterable[A]] {
def repr: Repr
def size: Int
+ /** Selects the first element of this $coll.
+ * $orderDependent
+ * @return the first element of this $coll.
+ * @throws `NoSuchElementException` if the $coll is empty.
+ */
def head: A
+ /** Optionally selects the first element.
+ * $orderDependent
+ * @return the first element of this $coll if it is nonempty,
+ * `None` if it is empty.
+ */
+ def headOption: Option[A]
+
/** Tests whether this $coll can be repeatedly traversed.
* @return `true`
*/
- final def isTraversableAgain = true
+ def isTraversableAgain: Boolean
/** Selects all elements except the first.
* $orderDependent
@@ -70,10 +84,29 @@ trait GenTraversableLike[+A, +Repr] extends GenTraversableOnce[A] with Paralleli
* except the first one.
* @throws `UnsupportedOperationException` if the $coll is empty.
*/
- def tail: Repr = {
- if (isEmpty) throw new UnsupportedOperationException("empty.tail")
- drop(1)
- }
+ def tail: Repr
+
+ /** Selects the last element.
+ * $orderDependent
+ * @return The last element of this $coll.
+ * @throws NoSuchElementException If the $coll is empty.
+ */
+ def last: A
+
+ /** Optionally selects the last element.
+ * $orderDependent
+ * @return the last element of this $coll$ if it is nonempty,
+ * `None` if it is empty.
+ */
+ def lastOption: Option[A]
+
+ /** Selects all elements except the last.
+ * $orderDependent
+ * @return a $coll consisting of all elements of this $coll
+ * except the last one.
+ * @throws `UnsupportedOperationException` if the $coll is empty.
+ */
+ def init: Repr
/** Computes a prefix scan of the elements of the collection.
*
@@ -89,7 +122,7 @@ trait GenTraversableLike[+A, +Repr] extends GenTraversableOnce[A] with Paralleli
*/
def scan[B >: A, That](z: B)(op: (B, B) => B)(implicit cbf: CanBuildFrom[Repr, B, That]): That
- /** Produces a collection containing cummulative results of applying the
+ /** Produces a collection containing cumulative results of applying the
* operator going left to right.
*
* $willNotTerminateInf
@@ -104,8 +137,8 @@ trait GenTraversableLike[+A, +Repr] extends GenTraversableOnce[A] with Paralleli
*/
def scanLeft[B, That](z: B)(op: (B, A) => B)(implicit bf: CanBuildFrom[Repr, B, That]): That
- /** Produces a collection containing cummulative results of applying the operator going right to left.
- * The head of the collection is the last cummulative result.
+ /** Produces a collection containing cumulative results of applying the operator going right to left.
+ * The head of the collection is the last cumulative result.
* $willNotTerminateInf
* $orderDependent
*
@@ -134,6 +167,7 @@ trait GenTraversableLike[+A, +Repr] extends GenTraversableOnce[A] with Paralleli
* but this is not necessary.
*
* @usecase def foreach(f: A => Unit): Unit
+ * @inheritdoc
*/
def foreach[U](f: A => U): Unit
@@ -147,17 +181,15 @@ trait GenTraversableLike[+A, +Repr] extends GenTraversableOnce[A] with Paralleli
* `f` to each element of this $coll and collecting the results.
*
* @usecase def map[B](f: A => B): $Coll[B]
- *
- * @return a new $coll resulting from applying the given function
- * `f` to each element of this $coll and collecting the results.
+ * @inheritdoc
+ * @return a new $coll resulting from applying the given function
+ * `f` to each element of this $coll and collecting the results.
*/
def map[B, That](f: A => B)(implicit bf: CanBuildFrom[Repr, B, That]): That
/** Builds a new collection by applying a partial function to all elements of this $coll
* on which the function is defined.
*
- * $collectExample
- *
* @param pf the partial function which filters and maps the $coll.
* @tparam B the element type of the returned collection.
* @tparam That $thatinfo
@@ -167,36 +199,18 @@ trait GenTraversableLike[+A, +Repr] extends GenTraversableOnce[A] with Paralleli
* The order of the elements is preserved.
*
* @usecase def collect[B](pf: PartialFunction[A, B]): $Coll[B]
+ * @inheritdoc
*
- * @return a new $coll resulting from applying the given partial function
- * `pf` to each element on which it is defined and collecting the results.
- * The order of the elements is preserved.
+ * $collectExample
+ *
+ * @return a new $coll resulting from applying the given partial function
+ * `pf` to each element on which it is defined and collecting the results.
+ * The order of the elements is preserved.
*/
def collect[B, That](pf: PartialFunction[A, B])(implicit bf: CanBuildFrom[Repr, B, That]): That
/** Builds a new collection by applying a function to all elements of this $coll
- * and using the elements of the resulting collections. For example:
- *
- * {{{
- * def getWords(lines: Seq[String]): Seq[String] = lines flatMap (line => line split "\\W+")
- * }}}
- *
- * The type of the resulting collection is guided by the static type of $coll. This might
- * cause unexpected results sometimes. For example:
- *
- * {{{
- * // lettersOf will return a Seq[Char] of likely repeated letters, instead of a Set
- * def lettersOf(words: Seq[String]) = words flatMap (word => word.toSet)
- *
- * // lettersOf will return a Set[Char], not a Seq
- * def lettersOf(words: Seq[String]) = words.toSet flatMap (word => word.toSeq)
- *
- * // xs will be a an Iterable[Int]
- * val xs = Map("a" -> List(11,111), "b" -> List(22,222)).flatMap(_._2)
- *
- * // ys will be a Map[Int, Int]
- * val ys = Map("a" -> List(1 -> 11,1 -> 111), "b" -> List(2 -> 22,2 -> 222)).flatMap(_._2)
- * }}}
+ * and using the elements of the resulting collections.
*
* @param f the function to apply to each element.
* @tparam B the element type of the returned collection.
@@ -206,33 +220,39 @@ trait GenTraversableLike[+A, +Repr] extends GenTraversableOnce[A] with Paralleli
* `f` to each element of this $coll and concatenating the results.
*
* @usecase def flatMap[B](f: A => GenTraversableOnce[B]): $Coll[B]
+ * @inheritdoc
*
- * @return a new $coll resulting from applying the given collection-valued function
- * `f` to each element of this $coll and concatenating the results.
- */
- def flatMap[B, That](f: A => GenTraversableOnce[B])(implicit bf: CanBuildFrom[Repr, B, That]): That
-
- /** Returns a new $coll containing the elements from the left hand operand followed by the elements from the
- * right hand operand. The element type of the $coll is the most specific superclass encompassing
- * the element types of the two operands (see example).
+ * For example:
*
- * Example:
- * {{{
- * scala> val a = LinkedList(1)
- * a: scala.collection.mutable.LinkedList[Int] = LinkedList(1)
+ * {{{
+ * def getWords(lines: Seq[String]): Seq[String] = lines flatMap (line => line split "\\W+")
+ * }}}
*
- * scala> val b = LinkedList(2)
- * b: scala.collection.mutable.LinkedList[Int] = LinkedList(2)
+ * The type of the resulting collection is guided by the static type of $coll. This might
+ * cause unexpected results sometimes. For example:
*
- * scala> val c = a ++ b
- * c: scala.collection.mutable.LinkedList[Int] = LinkedList(1, 2)
+ * {{{
+ * // lettersOf will return a Seq[Char] of likely repeated letters, instead of a Set
+ * def lettersOf(words: Seq[String]) = words flatMap (word => word.toSet)
*
- * scala> val d = LinkedList('a')
- * d: scala.collection.mutable.LinkedList[Char] = LinkedList(a)
+ * // lettersOf will return a Set[Char], not a Seq
+ * def lettersOf(words: Seq[String]) = words.toSet flatMap (word => word.toSeq)
*
- * scala> val e = c ++ d
- * e: scala.collection.mutable.LinkedList[AnyVal] = LinkedList(1, 2, a)
- * }}}
+ * // xs will be a an Iterable[Int]
+ * val xs = Map("a" -> List(11,111), "b" -> List(22,222)).flatMap(_._2)
+ *
+ * // ys will be a Map[Int, Int]
+ * val ys = Map("a" -> List(1 -> 11,1 -> 111), "b" -> List(2 -> 22,2 -> 222)).flatMap(_._2)
+ * }}}
+ *
+ * @return a new $coll resulting from applying the given collection-valued function
+ * `f` to each element of this $coll and concatenating the results.
+ */
+ def flatMap[B, That](f: A => GenTraversableOnce[B])(implicit bf: CanBuildFrom[Repr, B, That]): That
+
+ /** Returns a new $coll containing the elements from the left hand operand followed by the elements from the
+ * right hand operand. The element type of the $coll is the most specific superclass encompassing
+ * the element types of the two operands.
*
* @param that the traversable to append.
* @tparam B the element type of the returned collection.
@@ -242,15 +262,34 @@ trait GenTraversableLike[+A, +Repr] extends GenTraversableOnce[A] with Paralleli
* of this $coll followed by all elements of `that`.
*
* @usecase def ++[B](that: GenTraversableOnce[B]): $Coll[B]
+ * @inheritdoc
+ *
+ * Example:
+ * {{{
+ * scala> val a = LinkedList(1)
+ * a: scala.collection.mutable.LinkedList[Int] = LinkedList(1)
+ *
+ * scala> val b = LinkedList(2)
+ * b: scala.collection.mutable.LinkedList[Int] = LinkedList(2)
+ *
+ * scala> val c = a ++ b
+ * c: scala.collection.mutable.LinkedList[Int] = LinkedList(1, 2)
+ *
+ * scala> val d = LinkedList('a')
+ * d: scala.collection.mutable.LinkedList[Char] = LinkedList(a)
+ *
+ * scala> val e = c ++ d
+ * e: scala.collection.mutable.LinkedList[AnyVal] = LinkedList(1, 2, a)
+ * }}}
*
- * @return a new $coll which contains all elements of this $coll
- * followed by all elements of `that`.
+ * @return a new $coll which contains all elements of this $coll
+ * followed by all elements of `that`.
*/
def ++[B >: A, That](that: GenTraversableOnce[B])(implicit bf: CanBuildFrom[Repr, B, That]): That
/** Selects all elements of this $coll which satisfy a predicate.
*
- * @param p the predicate used to test elements.
+ * @param pred the predicate used to test elements.
* @return a new $coll consisting of all elements of this $coll that satisfy the given
* predicate `p`. Their order may not be preserved.
*/
@@ -258,7 +297,7 @@ trait GenTraversableLike[+A, +Repr] extends GenTraversableOnce[A] with Paralleli
/** Selects all elements of this $coll which do not satisfy a predicate.
*
- * @param p the predicate used to test elements.
+ * @param pred the predicate used to test elements.
* @return a new $coll consisting of all elements of this $coll that do not satisfy the given
* predicate `p`. Their order may not be preserved.
*/
@@ -266,11 +305,11 @@ trait GenTraversableLike[+A, +Repr] extends GenTraversableOnce[A] with Paralleli
/** Partitions this $coll in two ${coll}s according to a predicate.
*
- * @param p the predicate on which to partition.
- * @return a pair of ${coll}s: the first $coll consists of all elements that
- * satisfy the predicate `p` and the second $coll consists of all elements
- * that don't. The relative order of the elements in the resulting ${coll}s
- * may not be preserved.
+ * @param pred the predicate on which to partition.
+ * @return a pair of ${coll}s: the first $coll consists of all elements that
+ * satisfy the predicate `p` and the second $coll consists of all elements
+ * that don't. The relative order of the elements in the resulting ${coll}s
+ * may not be preserved.
*/
def partition(pred: A => Boolean): (Repr, Repr)
@@ -294,7 +333,7 @@ trait GenTraversableLike[+A, +Repr] extends GenTraversableOnce[A] with Paralleli
/** Selects first ''n'' elements.
* $orderDependent
- * @param n Tt number of elements to take from this $coll.
+ * @param n the number of elements to take from this $coll.
* @return a $coll consisting only of the first `n` elements of this $coll,
* or else the whole $coll, if it has less than `n` elements.
*/
@@ -315,8 +354,8 @@ trait GenTraversableLike[+A, +Repr] extends GenTraversableOnce[A] with Paralleli
* }}}
* $orderDependent
*
- * @param from the lowest index to include from this $coll.
- * @param until the highest index to EXCLUDE from this $coll.
+ * @param unc_from the lowest index to include from this $coll.
+ * @param unc_until the lowest index to EXCLUDE from this $coll.
* @return a $coll containing the elements greater than or equal to
* index `from` extending up to (but not including) index `until`
* of this $coll.
@@ -336,7 +375,7 @@ trait GenTraversableLike[+A, +Repr] extends GenTraversableOnce[A] with Paralleli
/** Takes longest prefix of elements that satisfy a predicate.
* $orderDependent
- * @param p The predicate used to test elements.
+ * @param pred The predicate used to test elements.
* @return the longest prefix of this $coll whose elements all satisfy
* the predicate `p`.
*/
@@ -349,7 +388,7 @@ trait GenTraversableLike[+A, +Repr] extends GenTraversableOnce[A] with Paralleli
* predicate `p` does not cause any side-effects.
* $orderDependent
*
- * @param p the test predicate
+ * @param pred the test predicate
* @return a pair consisting of the longest prefix of this $coll whose
* elements all satisfy `p`, and the rest of this $coll.
*/
@@ -357,7 +396,7 @@ trait GenTraversableLike[+A, +Repr] extends GenTraversableOnce[A] with Paralleli
/** Drops longest prefix of elements that satisfy a predicate.
* $orderDependent
- * @param p The predicate used to test elements.
+ * @param pred The predicate used to test elements.
* @return the longest suffix of this $coll whose first element
* does not satisfy the predicate `p`.
*/
diff --git a/src/library/scala/collection/GenTraversableOnce.scala b/src/library/scala/collection/GenTraversableOnce.scala
index 3e42e78..093db2a 100644
--- a/src/library/scala/collection/GenTraversableOnce.scala
+++ b/src/library/scala/collection/GenTraversableOnce.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -8,13 +8,18 @@
package scala.collection
+import scala.reflect.ClassTag
+import scala.collection.generic.CanBuildFrom
+import scala.annotation.unchecked.{ uncheckedVariance => uV }
+import scala.language.higherKinds
+
/** A template trait for all traversable-once objects which may be
* traversed in parallel.
*
* Methods in this trait are either abstract or can be implemented in terms
* of other methods.
*
- * @define Coll GenTraversableOnce
+ * @define Coll `GenTraversableOnce`
* @define coll collection or iterator
* @define possiblyparinfo
* This trait may possibly have operations implemented in parallel.
@@ -41,7 +46,7 @@ package scala.collection
* @author Aleksandar Prokopec
* @since 2.9
*/
-trait GenTraversableOnce[+A] {
+trait GenTraversableOnce[+A] extends Any {
def foreach[U](f: A => U): Unit
@@ -124,6 +129,7 @@ trait GenTraversableOnce[+A] {
* scala> val b = (a /:\ 5)(_+_)
* b: Int = 15
* }}}*/
+ @deprecated("use fold instead", "2.10.0")
def /:\[A1 >: A](z: A1)(op: (A1, A1) => A1): A1 = fold(z)(op)
/** Applies a binary operator to a start value and all elements of this $coll,
@@ -155,7 +161,7 @@ trait GenTraversableOnce[+A] {
* @return the result of inserting `op` between consecutive elements of this $coll,
* going left to right with the start value `z` on the left:
* {{{
- * op(...op(op(z, x1), x2), ..., xn)
+ * op(...op(op(z, x_1), x_2), ..., x_n)
* }}}
* where `x,,1,,, ..., x,,n,,` are the elements of this $coll.
*/
@@ -190,7 +196,7 @@ trait GenTraversableOnce[+A] {
* @return the result of inserting `op` between consecutive elements of this $coll,
* going right to left with the start value `z` on the right:
* {{{
- * op(x1, op(x2, ... op(xn, z)...))
+ * op(x_1, op(x_2, ... op(x_n, z)...))
* }}}
* where `x,,1,,, ..., x,,n,,` are the elements of this $coll.
*/
@@ -208,7 +214,7 @@ trait GenTraversableOnce[+A] {
* @return the result of inserting `op` between consecutive elements of this $coll,
* going left to right with the start value `z` on the left:
* {{{
- * op(...op(z, x1), x2, ..., xn)
+ * op(...op(z, x_1), x_2, ..., x_n)
* }}}
* where `x,,1,,, ..., x,,n,,` are the elements of this $coll.
*/
@@ -225,7 +231,7 @@ trait GenTraversableOnce[+A] {
* @return the result of inserting `op` between consecutive elements of this $coll,
* going right to left with the start value `z` on the right:
* {{{
- * op(x1, op(x2, ... op(xn, z)...))
+ * op(x_1, op(x_2, ... op(x_n, z)...))
* }}}
* where `x,,1,,, ..., x,,n,,` are the elements of this $coll.
*/
@@ -270,7 +276,7 @@ trait GenTraversableOnce[+A] {
* @return the result of inserting `op` between consecutive elements of this $coll,
* going right to left:
* {{{
- * op(x,,1,,, op(x,,2,,, ..., op(x,,n-1,,, x,,n,,)...))
+ * op(x_1, op(x_2, ..., op(x_{n-1}, x_n)...))
* }}}
* where `x,,1,,, ..., x,,n,,` are the elements of this $coll.
* @throws `UnsupportedOperationException` if this $coll is empty.
@@ -311,15 +317,16 @@ trait GenTraversableOnce[+A] {
*
* @param num an implicit parameter defining a set of numeric operations
* which includes the `+` operator to be used in forming the sum.
- * @tparam B the result type of the `+` operator.
+ * @tparam A1 the result type of the `+` operator.
* @return the sum of all elements of this $coll with respect to the `+` operator in `num`.
*
* @usecase def sum: A
+ * @inheritdoc
*
- * @return the sum of all elements in this $coll of numbers of type `Int`.
- * Instead of `Int`, any other type `T` with an implicit `Numeric[T]` implementation
- * can be used as element type of the $coll and as result type of `sum`.
- * Examples of such types are: `Long`, `Float`, `Double`, `BigInt`.
+ * @return the sum of all elements in this $coll of numbers of type `Int`.
+ * Instead of `Int`, any other type `T` with an implicit `Numeric[T]` implementation
+ * can be used as element type of the $coll and as result type of `sum`.
+ * Examples of such types are: `Long`, `Float`, `Double`, `BigInt`.
*
*/
def sum[A1 >: A](implicit num: Numeric[A1]): A1
@@ -328,37 +335,42 @@ trait GenTraversableOnce[+A] {
*
* @param num an implicit parameter defining a set of numeric operations
* which includes the `*` operator to be used in forming the product.
- * @tparam B the result type of the `*` operator.
+ * @tparam A1 the result type of the `*` operator.
* @return the product of all elements of this $coll with respect to the `*` operator in `num`.
*
* @usecase def product: A
+ * @inheritdoc
*
- * @return the product of all elements in this $coll of numbers of type `Int`.
- * Instead of `Int`, any other type `T` with an implicit `Numeric[T]` implementation
- * can be used as element type of the $coll and as result type of `product`.
- * Examples of such types are: `Long`, `Float`, `Double`, `BigInt`.
+ * @return the product of all elements in this $coll of numbers of type `Int`.
+ * Instead of `Int`, any other type `T` with an implicit `Numeric[T]` implementation
+ * can be used as element type of the $coll and as result type of `product`.
+ * Examples of such types are: `Long`, `Float`, `Double`, `BigInt`.
*/
def product[A1 >: A](implicit num: Numeric[A1]): A1
/** Finds the smallest element.
*
- * @param cmp An ordering to be used for comparing elements.
- * @tparam B The type over which the ordering is defined.
+ * @param ord An ordering to be used for comparing elements.
+ * @tparam A1 The type over which the ordering is defined.
* @return the smallest element of this $coll with respect to the ordering `cmp`.
*
* @usecase def min: A
- * @return the smallest element of this $coll
+ * @inheritdoc
+ *
+ * @return the smallest element of this $coll
*/
def min[A1 >: A](implicit ord: Ordering[A1]): A
/** Finds the largest element.
*
- * @param cmp An ordering to be used for comparing elements.
- * @tparam B The type over which the ordering is defined.
+ * @param ord An ordering to be used for comparing elements.
+ * @tparam A1 The type over which the ordering is defined.
* @return the largest element of this $coll with respect to the ordering `cmp`.
*
* @usecase def max: A
- * @return the largest element of this $coll.
+ * @inheritdoc
+ *
+ * @return the largest element of this $coll.
*/
def max[A1 >: A](implicit ord: Ordering[A1]): A
@@ -375,9 +387,9 @@ trait GenTraversableOnce[+A] {
* $mayNotTerminateInf
* $orderDependent
*
- * @param p the predicate used to test elements.
- * @return an option value containing the first element in the $coll
- * that satisfies `p`, or `None` if none exists.
+ * @param pred the predicate used to test elements.
+ * @return an option value containing the first element in the $coll
+ * that satisfies `p`, or `None` if none exists.
*/
def find(pred: A => Boolean): Option[A]
@@ -386,12 +398,13 @@ trait GenTraversableOnce[+A] {
* Copying will stop once either the end of the current $coll is reached,
* or the end of the array is reached.
*
- * $willNotTerminateInf
- *
* @param xs the array to fill.
* @tparam B the type of the elements of the array.
*
* @usecase def copyToArray(xs: Array[A]): Unit
+ * @inheritdoc
+ *
+ * $willNotTerminateInf
*/
def copyToArray[B >: A](xs: Array[B]): Unit
@@ -400,13 +413,14 @@ trait GenTraversableOnce[+A] {
* Copying will stop once either the end of the current $coll is reached,
* or the end of the array is reached.
*
- * $willNotTerminateInf
- *
* @param xs the array to fill.
* @param start the starting index.
* @tparam B the type of the elements of the array.
*
* @usecase def copyToArray(xs: Array[A], start: Int): Unit
+ * @inheritdoc
+ *
+ * $willNotTerminateInf
*/
def copyToArray[B >: A](xs: Array[B], start: Int): Unit
@@ -449,17 +463,20 @@ trait GenTraversableOnce[+A] {
def mkString: String
/** Converts this $coll to an array.
- * $willNotTerminateInf
*
- * @tparam B the type of the elements of the array. A `ClassManifest` for
- * this type must be available.
- * @return an array containing all elements of this $coll.
+ * @tparam A1 the type of the elements of the array. An `ClassTag` for
+ * this type must be available.
+ * @return an array containing all elements of this $coll.
*
* @usecase def toArray: Array[A]
- * @return an array containing all elements of this $coll.
- * A `ClassManifest` must be available for the element type of this $coll.
+ * @inheritdoc
+ *
+ * $willNotTerminateInf
+ *
+ * @return an array containing all elements of this $coll.
+ * An `ClassTag` must be available for the element type of this $coll.
*/
- def toArray[A1 >: A: ClassManifest]: Array[A1]
+ def toArray[A1 >: A: ClassTag]: Array[A1]
/** Converts this $coll to a list.
* $willNotTerminateInf
@@ -471,7 +488,7 @@ trait GenTraversableOnce[+A] {
* $willNotTerminateInf
* @return an indexed sequence containing all elements of this $coll.
*/
- def toIndexedSeq[A1 >: A]: immutable.IndexedSeq[A1]
+ def toIndexedSeq: immutable.IndexedSeq[A]
/** Converts this $coll to a stream.
* $willNotTerminateInf
@@ -490,7 +507,7 @@ trait GenTraversableOnce[+A] {
* $willNotTerminateInf
* @return a buffer containing all elements of this $coll.
*/
- def toBuffer[A1 >: A]: collection.mutable.Buffer[A1]
+ def toBuffer[A1 >: A]: scala.collection.mutable.Buffer[A1]
/** Converts this $coll to an unspecified Traversable. Will return
* the same collection if this instance is already Traversable.
@@ -529,11 +546,30 @@ trait GenTraversableOnce[+A] {
* pair in the map. Duplicate keys will be overwritten by later keys:
* if this is an unordered collection, which key is in the resulting map
* is undefined.
- * $willNotTerminateInf
* @return a map containing all elements of this $coll.
+ *
* @usecase def toMap[T, U]: Map[T, U]
- * @return a map of type `immutable.Map[T, U]`
- * containing all key/value pairs of type `(T, U)` of this $coll.
+ * @inheritdoc
+ * $willNotTerminateInf
+ * @return a map of type `immutable.Map[T, U]`
+ * containing all key/value pairs of type `(T, U)` of this $coll.
*/
def toMap[K, V](implicit ev: A <:< (K, V)): GenMap[K, V]
+
+ /** Converts this $coll to a Vector.
+ * $willNotTerminateInf
+ * @return a vector containing all elements of this $coll.
+ */
+ def toVector: Vector[A]
+
+ /** Converts this $coll into another by copying all elements.
+ * @tparam Col The collection type to build.
+ * @return a new collection containing all elements of this $coll.
+ *
+ * @usecase def to[Col[_]]: Col[A]
+ * @inheritdoc
+ * $willNotTerminateInf
+ * @return a new collection containing all elements of this $coll.
+ */
+ def to[Col[_]](implicit cbf: CanBuildFrom[Nothing, A, Col[A @uV]]): Col[A @uV]
}
diff --git a/src/library/scala/collection/GenTraversableView.scala b/src/library/scala/collection/GenTraversableView.scala
index e295955..cceb068 100644
--- a/src/library/scala/collection/GenTraversableView.scala
+++ b/src/library/scala/collection/GenTraversableView.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/library/scala/collection/GenTraversableViewLike.scala b/src/library/scala/collection/GenTraversableViewLike.scala
index 78e0773..77fe080 100644
--- a/src/library/scala/collection/GenTraversableViewLike.scala
+++ b/src/library/scala/collection/GenTraversableViewLike.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/library/scala/collection/IndexedSeq.scala b/src/library/scala/collection/IndexedSeq.scala
index d7a68d3..2de0043 100644
--- a/src/library/scala/collection/IndexedSeq.scala
+++ b/src/library/scala/collection/IndexedSeq.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2006-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2006-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -20,15 +20,22 @@ trait IndexedSeq[+A] extends Seq[A]
with GenericTraversableTemplate[A, IndexedSeq]
with IndexedSeqLike[A, IndexedSeq[A]] {
override def companion: GenericCompanion[IndexedSeq] = IndexedSeq
+ override def seq: IndexedSeq[A] = this
}
/** $factoryInfo
* The current default implementation of a $Coll is a `Vector`.
* @define coll indexed sequence
- * @define Coll IndexedSeq
+ * @define Coll `IndexedSeq`
*/
object IndexedSeq extends SeqFactory[IndexedSeq] {
- implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, IndexedSeq[A]] = new GenericCanBuildFrom[A]
+ // A single CBF which can be checked against to identify
+ // an indexed collection type.
+ override lazy val ReusableCBF: GenericCanBuildFrom[Nothing] = new GenericCanBuildFrom[Nothing] {
+ override def apply() = newBuilder[Nothing]
+ }
def newBuilder[A]: Builder[A, IndexedSeq[A]] = immutable.IndexedSeq.newBuilder[A]
+ implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, IndexedSeq[A]] =
+ ReusableCBF.asInstanceOf[GenericCanBuildFrom[A]]
}
diff --git a/src/library/scala/collection/IndexedSeqLike.scala b/src/library/scala/collection/IndexedSeqLike.scala
index 7e94820..9d0e9cb 100644
--- a/src/library/scala/collection/IndexedSeqLike.scala
+++ b/src/library/scala/collection/IndexedSeqLike.scala
@@ -1,13 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2006-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2006-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-
-
package scala.collection
import generic._
@@ -28,7 +26,7 @@ import scala.annotation.tailrec
* access and length computation. They are defined in terms of abstract methods
* `apply` for indexing and `length`.
*
- * Indexed sequences do not add any new methods wrt `Seq`, but promise
+ * Indexed sequences do not add any new methods to `Seq`, but promise
* efficient implementations of random access patterns.
*
* @tparam A the element type of the $coll
@@ -39,9 +37,12 @@ import scala.annotation.tailrec
* @define willNotTerminateInf
* @define mayNotTerminateInf
*/
-trait IndexedSeqLike[+A, +Repr] extends SeqLike[A, Repr] {
+trait IndexedSeqLike[+A, +Repr] extends Any with SeqLike[A, Repr] {
self =>
+ def seq: IndexedSeq[A]
+ override def hashCode()= scala.util.hashing.MurmurHash3.seqHash(seq) // TODO - can we get faster via "indexedSeqHash" ?
+
override protected[this] def thisCollection: IndexedSeq[A] = this.asInstanceOf[IndexedSeq[A]]
override protected[this] def toCollection(repr: Repr): IndexedSeq[A] = repr.asInstanceOf[IndexedSeq[A]]
@@ -51,14 +52,14 @@ trait IndexedSeqLike[+A, +Repr] extends SeqLike[A, Repr] {
*/
// pre: start >= 0, end <= self.length
@SerialVersionUID(1756321872811029277L)
- protected class Elements(start: Int, end: Int) extends BufferedIterator[A] with Serializable {
+ protected class Elements(start: Int, end: Int) extends AbstractIterator[A] with BufferedIterator[A] with Serializable {
private def initialSize = if (end <= start) 0 else end - start
private var index = start
private def available = (end - index) max 0
def hasNext: Boolean = index < end
- def next: A = {
+ def next(): A = {
if (index >= end)
Iterator.empty.next
@@ -89,11 +90,10 @@ trait IndexedSeqLike[+A, +Repr] extends SeqLike[A, Repr] {
override /*IterableLike*/
def iterator: Iterator[A] = new Elements(0, length)
- /** Overridden for efficiency */
+ /* Overridden for efficiency */
override def toBuffer[A1 >: A]: mutable.Buffer[A1] = {
val result = new mutable.ArrayBuffer[A1](size)
copyToBuffer(result)
result
}
}
-
diff --git a/src/library/scala/collection/IndexedSeqOptimized.scala b/src/library/scala/collection/IndexedSeqOptimized.scala
old mode 100644
new mode 100755
index b9a60ae..09c4b14
--- a/src/library/scala/collection/IndexedSeqOptimized.scala
+++ b/src/library/scala/collection/IndexedSeqOptimized.scala
@@ -1,14 +1,13 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2006-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2006-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-
-
-package scala.collection
+package scala
+package collection
import generic._
import mutable.ArrayBuffer
@@ -22,7 +21,7 @@ import scala.annotation.tailrec
* @define willNotTerminateInf
* @define mayNotTerminateInf
*/
-trait IndexedSeqOptimized[+A, +Repr] extends IndexedSeqLike[A, Repr] { self =>
+trait IndexedSeqOptimized[+A, +Repr] extends Any with IndexedSeqLike[A, Repr] { self =>
override /*IterableLike*/
def isEmpty: Boolean = { length == 0 }
@@ -104,7 +103,7 @@ trait IndexedSeqOptimized[+A, +Repr] extends IndexedSeqLike[A, Repr] { self =>
override /*IterableLike*/
def slice(from: Int, until: Int): Repr = {
val lo = math.max(from, 0)
- val hi = math.min(until, length)
+ val hi = math.min(math.max(until, 0), length)
val elems = math.max(hi - lo, 0)
val b = newBuilder
b.sizeHint(elems)
@@ -219,10 +218,10 @@ trait IndexedSeqOptimized[+A, +Repr] extends IndexedSeqLike[A, Repr] { self =>
}
override /*SeqLike*/
- def reverseIterator: Iterator[A] = new Iterator[A] {
+ def reverseIterator: Iterator[A] = new AbstractIterator[A] {
private var i = self.length
def hasNext: Boolean = 0 < i
- def next: A =
+ def next(): A =
if (0 < i) {
i -= 1
self(i)
diff --git a/src/library/scala/collection/Iterable.scala b/src/library/scala/collection/Iterable.scala
index 03a8302..5b73d72 100644
--- a/src/library/scala/collection/Iterable.scala
+++ b/src/library/scala/collection/Iterable.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -40,23 +40,15 @@ trait Iterable[+A] extends Traversable[A]
/** $factoryInfo
* The current default implementation of a $Coll is a `Vector`.
* @define coll iterable collection
- * @define Coll Iterable
+ * @define Coll `Iterable`
*/
object Iterable extends TraversableFactory[Iterable] {
/** $genericCanBuildFromInfo */
- implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, Iterable[A]] = new GenericCanBuildFrom[A]
+ implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, Iterable[A]] = ReusableCBF.asInstanceOf[GenericCanBuildFrom[A]]
def newBuilder[A]: Builder[A, Iterable[A]] = immutable.Iterable.newBuilder[A]
-
- /** The minimum element of a non-empty sequence of ordered elements */
- @deprecated("use <seq>.min instead, where <seq> is the sequence for which you want to compute the minimum", "2.8.0")
- def min[A](seq: Iterable[A])(implicit ord: Ordering[A]): A = seq.min
-
- /** The maximum element of a non-empty sequence of ordered elements */
- @deprecated("use <seq>.max instead, where <seq> is the sequence for which you want to compute the maximum", "2.8.0")
- def max[A](seq: Iterable[A])(implicit ord: Ordering[A]): A = seq.max
-
- @deprecated("use View instead", "2.8.0")
- type Projection[A] = IterableView[A, Coll]
}
+
+/** Explicit instantiation of the `Iterable` trait to reduce class file size in subclasses. */
+private[scala] abstract class AbstractIterable[+A] extends AbstractTraversable[A] with Iterable[A]
diff --git a/src/library/scala/collection/IterableLike.scala b/src/library/scala/collection/IterableLike.scala
index 55c12b8..540bd84 100644
--- a/src/library/scala/collection/IterableLike.scala
+++ b/src/library/scala/collection/IterableLike.scala
@@ -1,18 +1,17 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-package scala.collection
-
+package scala
+package collection
import generic._
import immutable.{ List, Stream }
-import annotation.unchecked.uncheckedVariance
-import annotation.bridge
+import scala.annotation.unchecked.uncheckedVariance
/** A template trait for iterable collections of type `Iterable[A]`.
* $iterableInfo
@@ -49,14 +48,13 @@ import annotation.bridge
* @define Coll Iterable
* @define coll iterable collection
*/
-trait IterableLike[+A, +Repr] extends Equals with TraversableLike[A, Repr] with GenIterableLike[A, Repr] {
+trait IterableLike[+A, +Repr] extends Any with Equals with TraversableLike[A, Repr] with GenIterableLike[A, Repr] {
self =>
override protected[this] def thisCollection: Iterable[A] = this.asInstanceOf[Iterable[A]]
override protected[this] def toCollection(repr: Repr): Iterable[A] = repr.asInstanceOf[Iterable[A]]
- /** Creates a new iterator over all elements contained in this
- * iterable object.
+ /** Creates a new iterator over all elements contained in this iterable object.
*
* @return the new iterator
*/
@@ -68,6 +66,7 @@ self =>
* Subclasses should re-implement this method if a more efficient implementation exists.
*
* @usecase def foreach(f: A => Unit): Unit
+ * @inheritdoc
*/
def foreach[U](f: A => U): Unit =
iterator.foreach(f)
@@ -86,6 +85,8 @@ self =>
iterator.reduceRight(op)
override /*TraversableLike*/ def toIterable: Iterable[A] =
thisCollection
+ override /*TraversableLike*/ def toIterator: Iterator[A] =
+ iterator
override /*TraversableLike*/ def head: A =
iterator.next
@@ -132,7 +133,7 @@ self =>
it.next
i += 1
}
- b ++= it result
+ (b ++= it).result
}
override /*TraversableLike*/ def takeWhile(p: A => Boolean): Repr = {
@@ -147,7 +148,7 @@ self =>
}
/** Partitions elements in fixed size ${coll}s.
- * @see Iterator#grouped
+ * @see [[scala.collection.Iterator]], method `grouped`
*
* @param size the number of elements per group
* @return An iterator producing ${coll}s of size `size`, except the
@@ -162,7 +163,18 @@ self =>
/** Groups elements in fixed size blocks by passing a "sliding window"
* over them (as opposed to partitioning them, as is done in grouped.)
- * @see Iterator#sliding
+ * @see [[scala.collection.Iterator]], method `sliding`
+ *
+ * @param size the number of elements per group
+ * @return An iterator producing ${coll}s of size `size`, except the
+ * last and the only element will be truncated if there are
+ * fewer elements than size.
+ */
+ def sliding(size: Int): Iterator[Repr] = sliding(size, 1)
+
+ /** Groups elements in fixed size blocks by passing a "sliding window"
+ * over them (as opposed to partitioning them, as is done in grouped.)
+ * @see [[scala.collection.Iterator]], method `sliding`
*
* @param size the number of elements per group
* @param step the distance between the first elements of successive
@@ -171,8 +183,7 @@ self =>
* last and the only element will be truncated if there are
* fewer elements than size.
*/
- def sliding[B >: A](size: Int): Iterator[Repr] = sliding(size, 1)
- def sliding[B >: A](size: Int, step: Int): Iterator[Repr] =
+ def sliding(size: Int, step: Int): Iterator[Repr] =
for (xs <- iterator.sliding(size, step)) yield {
val b = newBuilder
b ++= xs
@@ -237,10 +248,6 @@ self =>
b.result
}
- @bridge
- def zip[A1 >: A, B, That](that: Iterable[B])(implicit bf: CanBuildFrom[Repr, (A1, B), That]): That =
- zip(that: GenIterable[B])(bf)
-
def zipAll[B, A1 >: A, That](that: GenIterable[B], thisElem: A1, thatElem: B)(implicit bf: CanBuildFrom[Repr, (A1, B), That]): That = {
val b = bf(repr)
val these = this.iterator
@@ -254,10 +261,6 @@ self =>
b.result
}
- @bridge
- def zipAll[B, A1 >: A, That](that: Iterable[B], thisElem: A1, thatElem: B)(implicit bf: CanBuildFrom[Repr, (A1, B), That]): That =
- zipAll(that: GenIterable[B], thisElem, thatElem)(bf)
-
def zipWithIndex[A1 >: A, That](implicit bf: CanBuildFrom[Repr, (A1, Int), That]): That = {
val b = bf(repr)
var i = 0
@@ -278,9 +281,6 @@ self =>
!these.hasNext && !those.hasNext
}
- @bridge
- def sameElements[B >: A](that: Iterable[B]): Boolean = sameElements(that: GenIterable[B])
-
override /*TraversableLike*/ def toStream: Stream[A] = iterator.toStream
/** Method called from equality methods, so that user-defined subclasses can
@@ -297,21 +297,4 @@ self =>
}
override /*TraversableLike*/ def view(from: Int, until: Int) = view.slice(from, until)
-
- @deprecated("use `iterator' instead", "2.8.0")
- def elements = iterator
-
- @deprecated("use `head' instead", "2.8.0") def first: A = head
-
- /** `None` if iterable is empty.
- */
- @deprecated("use `headOption' instead", "2.8.0") def firstOption: Option[A] = headOption
-
- /**
- * returns a projection that can be used to call non-strict `filter`,
- * `map`, and `flatMap` methods that build projections
- * of the collection.
- */
- @deprecated("use `view' instead", "2.8.0")
- def projection = view
}
diff --git a/src/library/scala/collection/IterableProxy.scala b/src/library/scala/collection/IterableProxy.scala
index a05a3cf..2d04192 100644
--- a/src/library/scala/collection/IterableProxy.scala
+++ b/src/library/scala/collection/IterableProxy.scala
@@ -1,21 +1,17 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-
-
package scala.collection
import generic._
-/** <p>
- * This trait implements a proxy for iterable objects. It forwards
- * all calls to a different iterable object.
- * </p>
+/** This trait implements a proxy for iterable objects. It forwards all calls
+ * to a different iterable object.
*
* @author Martin Odersky
* @version 2.8
diff --git a/src/library/scala/collection/IterableProxyLike.scala b/src/library/scala/collection/IterableProxyLike.scala
index 0fe501a..6968a54 100644
--- a/src/library/scala/collection/IterableProxyLike.scala
+++ b/src/library/scala/collection/IterableProxyLike.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -27,8 +27,8 @@ trait IterableProxyLike[+A, +Repr <: IterableLike[A, Repr] with Iterable[A]]
with TraversableProxyLike[A, Repr] {
override def iterator: Iterator[A] = self.iterator
override def grouped(size: Int): Iterator[Repr] = self.grouped(size)
- override def sliding[B >: A](size: Int): Iterator[Repr] = self.sliding(size)
- override def sliding[B >: A](size: Int, step: Int): Iterator[Repr] = self.sliding(size, step)
+ override def sliding(size: Int): Iterator[Repr] = self.sliding(size)
+ override def sliding(size: Int, step: Int): Iterator[Repr] = self.sliding(size, step)
override def takeRight(n: Int): Repr = self.takeRight(n)
override def dropRight(n: Int): Repr = self.dropRight(n)
override def zip[A1 >: A, B, That](that: GenIterable[B])(implicit bf: CanBuildFrom[Repr, (A1, B), That]): That = self.zip[A1, B, That](that)(bf)
diff --git a/src/library/scala/collection/IterableView.scala b/src/library/scala/collection/IterableView.scala
index 9ff6762..985556e 100644
--- a/src/library/scala/collection/IterableView.scala
+++ b/src/library/scala/collection/IterableView.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/library/scala/collection/IterableViewLike.scala b/src/library/scala/collection/IterableViewLike.scala
index 70f4519..3a81a34 100644
--- a/src/library/scala/collection/IterableViewLike.scala
+++ b/src/library/scala/collection/IterableViewLike.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -11,6 +11,7 @@ package scala.collection
import generic._
import TraversableView.NoBuilder
import immutable.Stream
+import scala.language.implicitConversions
/** A template trait for non-strict views of iterable collections.
* $iterableViewInfo
@@ -42,6 +43,9 @@ trait IterableViewLike[+A,
override def toString = viewToString
}
+ /** Explicit instantiation of the `Transformed` trait to reduce class file size in subclasses. */
+ private[collection] abstract class AbstractTransformed[+B] extends Iterable[B] with super[TraversableViewLike].Transformed[B] with Transformed[B]
+
trait EmptyView extends Transformed[Nothing] with super[TraversableViewLike].EmptyView with super[GenIterableViewLike].EmptyView
trait Forced[B] extends super[TraversableViewLike].Forced[B] with super[GenIterableViewLike].Forced[B] with Transformed[B]
@@ -69,20 +73,20 @@ trait IterableViewLike[+A,
/** Boilerplate method, to override in each subclass
* This method could be eliminated if Scala had virtual classes
*/
- protected def newZipped[B](that: GenIterable[B]): Transformed[(A, B)] = new { val other = that } with Zipped[B]
+ protected def newZipped[B](that: GenIterable[B]): Transformed[(A, B)] = new { val other = that } with AbstractTransformed[(A, B)] with Zipped[B]
protected def newZippedAll[A1 >: A, B](that: GenIterable[B], _thisElem: A1, _thatElem: B): Transformed[(A1, B)] = new {
val other: GenIterable[B] = that
val thisElem = _thisElem
val thatElem = _thatElem
- } with ZippedAll[A1, B]
- protected override def newForced[B](xs: => GenSeq[B]): Transformed[B] = new { val forced = xs } with Forced[B]
- protected override def newAppended[B >: A](that: GenTraversable[B]): Transformed[B] = new { val rest = that } with Appended[B]
- protected override def newMapped[B](f: A => B): Transformed[B] = new { val mapping = f } with Mapped[B]
- protected override def newFlatMapped[B](f: A => GenTraversableOnce[B]): Transformed[B] = new { val mapping = f } with FlatMapped[B]
- protected override def newFiltered(p: A => Boolean): Transformed[A] = new { val pred = p } with Filtered
- protected override def newSliced(_endpoints: SliceInterval): Transformed[A] = new { val endpoints = _endpoints } with Sliced
- protected override def newDroppedWhile(p: A => Boolean): Transformed[A] = new { val pred = p } with DroppedWhile
- protected override def newTakenWhile(p: A => Boolean): Transformed[A] = new { val pred = p } with TakenWhile
+ } with AbstractTransformed[(A1, B)] with ZippedAll[A1, B]
+ protected override def newForced[B](xs: => GenSeq[B]): Transformed[B] = new { val forced = xs } with AbstractTransformed[B] with Forced[B]
+ protected override def newAppended[B >: A](that: GenTraversable[B]): Transformed[B] = new { val rest = that } with AbstractTransformed[B] with Appended[B]
+ protected override def newMapped[B](f: A => B): Transformed[B] = new { val mapping = f } with AbstractTransformed[B] with Mapped[B]
+ protected override def newFlatMapped[B](f: A => GenTraversableOnce[B]): Transformed[B] = new { val mapping = f } with AbstractTransformed[B] with FlatMapped[B]
+ protected override def newFiltered(p: A => Boolean): Transformed[A] = new { val pred = p } with AbstractTransformed[A] with Filtered
+ protected override def newSliced(_endpoints: SliceInterval): Transformed[A] = new { val endpoints = _endpoints } with AbstractTransformed[A] with Sliced
+ protected override def newDroppedWhile(p: A => Boolean): Transformed[A] = new { val pred = p } with AbstractTransformed[A] with DroppedWhile
+ protected override def newTakenWhile(p: A => Boolean): Transformed[A] = new { val pred = p } with AbstractTransformed[A] with TakenWhile
// After adding take and drop overrides to IterableLike, these overrides (which do nothing
// but duplicate the implementation in TraversableViewLike) had to be added to prevent the
@@ -110,7 +114,7 @@ trait IterableViewLike[+A,
override def grouped(size: Int): Iterator[This] =
self.iterator grouped size map (x => newForced(x).asInstanceOf[This])
- override def sliding[B >: A](size: Int, step: Int): Iterator[This] =
+ override def sliding(size: Int, step: Int): Iterator[This] =
self.iterator.sliding(size, step) map (x => newForced(x).asInstanceOf[This])
override def stringPrefix = "IterableView"
diff --git a/src/library/scala/collection/Iterator.scala b/src/library/scala/collection/Iterator.scala
index 9763974..2bb5bd1 100644
--- a/src/library/scala/collection/Iterator.scala
+++ b/src/library/scala/collection/Iterator.scala
@@ -1,16 +1,19 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-package scala.collection
+package scala
+package collection
import mutable.ArrayBuffer
-import annotation.migration
+import scala.annotation.migration
import immutable.Stream
+import scala.collection.generic.CanBuildFrom
+import scala.annotation.unchecked.{ uncheckedVariance => uV }
/** The `Iterator` object provides various functions for creating specialized iterators.
*
@@ -21,8 +24,17 @@ import immutable.Stream
*/
object Iterator {
+ /** With the advent of `TraversableOnce` and `Iterator`, it can be useful to have a builder which
+ * operates on `Iterator`s so they can be treated uniformly along with the collections.
+ * See `scala.util.Random.shuffle` for an example.
+ */
+ implicit def IteratorCanBuildFrom[A] = new TraversableOnce.BufferedCanBuildFrom[A, Iterator] {
+ def bufferToColl[B](coll: ArrayBuffer[B]) = coll.iterator
+ def traversableToColl[B](t: GenTraversable[B]) = t.toIterator
+ }
+
/** The iterator which produces no values. */
- val empty = new Iterator[Nothing] {
+ val empty: Iterator[Nothing] = new AbstractIterator[Nothing] {
def hasNext: Boolean = false
def next(): Nothing = throw new NoSuchElementException("next on empty iterator")
}
@@ -34,7 +46,7 @@ object Iterator {
* @return An iterator which produces `elem` on the first call to `next`,
* and which has no further elements.
*/
- def single[A](elem: A) = new Iterator[A] {
+ def single[A](elem: A): Iterator[A] = new AbstractIterator[A] {
private var hasnext = true
def hasNext: Boolean = hasnext
def next(): A =
@@ -56,7 +68,7 @@ object Iterator {
* @param elem the element computation
* @return An iterator that produces the results of `n` evaluations of `elem`.
*/
- def fill[A](len: Int)(elem: => A) = new Iterator[A] {
+ def fill[A](len: Int)(elem: => A): Iterator[A] = new AbstractIterator[A] {
private var i = 0
def hasNext: Boolean = i < len
def next(): A =
@@ -70,7 +82,7 @@ object Iterator {
* @param f The function computing element values
* @return An iterator that produces the values `f(0), ..., f(n -1)`.
*/
- def tabulate[A](end: Int)(f: Int => A) = new Iterator[A] {
+ def tabulate[A](end: Int)(f: Int => A): Iterator[A] = new AbstractIterator[A] {
private var i = 0
def hasNext: Boolean = i < end
def next(): A =
@@ -93,7 +105,7 @@ object Iterator {
* @param step the increment value of the iterator (must be positive or negative)
* @return the iterator producing values `start, start + step, ...` up to, but excluding `end`
*/
- def range(start: Int, end: Int, step: Int) = new Iterator[Int] {
+ def range(start: Int, end: Int, step: Int): Iterator[Int] = new AbstractIterator[Int] {
if (step == 0) throw new IllegalArgumentException("zero step")
private var i = start
def hasNext: Boolean = (step <= 0 || i < end) && (step >= 0 || i > end)
@@ -108,7 +120,7 @@ object Iterator {
* @param f the function that's repeatedly applied
* @return the iterator producing the infinite sequence of values `start, f(start), f(f(start)), ...`
*/
- def iterate[T](start: T)(f: T => T): Iterator[T] = new Iterator[T] {
+ def iterate[T](start: T)(f: T => T): Iterator[T] = new AbstractIterator[T] {
private[this] var first = true
private[this] var acc = start
def hasNext: Boolean = true
@@ -133,7 +145,7 @@ object Iterator {
* @param step the increment between successive values
* @return the iterator producing the infinite sequence of values `start, start + 1 * step, start + 2 * step, ...`
*/
- def from(start: Int, step: Int): Iterator[Int] = new Iterator[Int] {
+ def from(start: Int, step: Int): Iterator[Int] = new AbstractIterator[Int] {
private var i = start
def hasNext: Boolean = true
def next(): Int = { val result = i; i += step; result }
@@ -145,95 +157,10 @@ object Iterator {
* @param elem the element computation.
* @return the iterator containing an infinite number of results of evaluating `elem`.
*/
- def continually[A](elem: => A): Iterator[A] = new Iterator[A] {
+ def continually[A](elem: => A): Iterator[A] = new AbstractIterator[A] {
def hasNext = true
def next = elem
}
-
- @deprecated("use `xs.iterator' or `Iterator(xs)' instead", "2.8.0")
- def fromValues[a](xs: a*) = xs.iterator
-
- /** @param xs the array of elements
- * @see also: IndexedSeq.iterator and slice
- */
- @deprecated("use `xs.iterator' instead", "2.8.0")
- def fromArray[a](xs: Array[a]): Iterator[a] =
- fromArray(xs, 0, xs.length)
-
- /**
- * @param xs the array of elements
- * @param start the start index
- * @param length the length
- * @see also: IndexedSeq.iterator and slice
- */
- @deprecated("use `xs.slice(start, start + length).iterator' instead", "2.8.0")
- def fromArray[a](xs: Array[a], start: Int, length: Int): Iterator[a] =
- xs.slice(start, start + length).iterator
-
- /**
- * @param n the product arity
- * @return the iterator on `Product<n>`.
- */
- @deprecated("use product.productIterator instead", "2.8.0")
- def fromProduct(n: Product): Iterator[Any] = new Iterator[Any] {
- private var c: Int = 0
- private val cmax = n.productArity
- def hasNext = c < cmax
- def next() = { val a = n productElement c; c += 1; a }
- }
-
- /** Create an iterator with elements
- * `e<sub>n+1</sub> = step(e<sub>n</sub>)`
- * where `e<sub>0</sub> = start`
- * and elements are in the range between `start` (inclusive)
- * and `end` (exclusive)
- *
- * @param start the start value of the iterator
- * @param end the end value of the iterator
- * @param step the increment function of the iterator, must be monotonically increasing or decreasing
- * @return the iterator with values in range `[start;end)`.
- */
- @deprecated("use Iterator.iterate(start, end - start)(step) instead", "2.8.0")
- def range(start: Int, end: Int, step: Int => Int) = new Iterator[Int] {
- private val up = step(start) > start
- private val down = step(start) < start
- private var i = start
- def hasNext: Boolean = (!up || i < end) && (!down || i > end)
- def next(): Int =
- if (hasNext) { val j = i; i = step(i); j }
- else empty.next()
- }
-
- /** Create an iterator with elements
- * `e<sub>n+1</sub> = step(e<sub>n</sub>)`
- * where `e<sub>0</sub> = start`.
- *
- * @param start the start value of the iterator
- * @param step the increment function of the iterator
- * @return the iterator starting at value `start`.
- */
- @deprecated("use iterate(start)(step) instead", "2.8.0")
- def from(start: Int, step: Int => Int): Iterator[Int] = new Iterator[Int] {
- private var i = start
- override def hasNext: Boolean = true
- def next(): Int = { val j = i; i = step(i); j }
- }
-
- /** Create an iterator that is the concatenation of all iterators
- * returned by a given iterator of iterators.
- * @param its The iterator which returns on each call to next
- * a new iterator whose elements are to be concatenated to the result.
- */
- @deprecated("use its.flatten instead", "2.8.0")
- def flatten[T](its: Iterator[Iterator[T]]): Iterator[T] = new Iterator[T] {
- private var cur = its.next
- def hasNext: Boolean = {
- while (!cur.hasNext && its.hasNext) cur = its.next
- cur.hasNext
- }
- def next(): T =
- (if (hasNext) cur else empty).next()
- }
}
import Iterator.empty
@@ -376,7 +303,7 @@ trait Iterator[+A] extends TraversableOnce[A] {
toDrop -= 1
}
- new Iterator[A] {
+ new AbstractIterator[A] {
private var remaining = until - lo
def hasNext = remaining > 0 && self.hasNext
def next(): A =
@@ -396,7 +323,7 @@ trait Iterator[+A] extends TraversableOnce[A] {
* iterator by applying the function `f` to it.
* @note Reuse: $consumesAndProducesIterator
*/
- def map[B](f: A => B): Iterator[B] = new Iterator[B] {
+ def map[B](f: A => B): Iterator[B] = new AbstractIterator[B] {
def hasNext = self.hasNext
def next() = f(self.next())
}
@@ -407,19 +334,23 @@ trait Iterator[+A] extends TraversableOnce[A] {
* @return a new iterator that first yields the values produced by this
* iterator followed by the values produced by iterator `that`.
* @note Reuse: $consumesTwoAndProducesOneIterator
+ *
* @usecase def ++(that: => Iterator[A]): Iterator[A]
+ * @inheritdoc
*/
- def ++[B >: A](that: => GenTraversableOnce[B]): Iterator[B] = new Iterator[B] {
+ def ++[B >: A](that: => GenTraversableOnce[B]): Iterator[B] = new AbstractIterator[B] {
// optimize a little bit to prevent n log n behavior.
private var cur : Iterator[B] = self
+ private var selfExhausted : Boolean = false
// since that is by-name, make sure it's only referenced once -
// if "val it = that" is inside the block, then hasNext on an empty
// iterator will continually reevaluate it. (ticket #3269)
lazy val it = that.toIterator
// the eq check is to avoid an infinite loop on "x ++ x"
- def hasNext = cur.hasNext || ((cur eq self) && {
+ def hasNext = cur.hasNext || (!selfExhausted && {
it.hasNext && {
cur = it
+ selfExhausted = true
true
}
})
@@ -434,7 +365,7 @@ trait Iterator[+A] extends TraversableOnce[A] {
* `f` to each value produced by this iterator and concatenating the results.
* @note Reuse: $consumesAndProducesIterator
*/
- def flatMap[B](f: A => GenTraversableOnce[B]): Iterator[B] = new Iterator[B] {
+ def flatMap[B](f: A => GenTraversableOnce[B]): Iterator[B] = new AbstractIterator[B] {
private var cur: Iterator[B] = empty
def hasNext: Boolean =
cur.hasNext || self.hasNext && { cur = f(self.next).toIterator; hasNext }
@@ -448,7 +379,7 @@ trait Iterator[+A] extends TraversableOnce[A] {
* @return an iterator which produces those values of this iterator which satisfy the predicate `p`.
* @note Reuse: $consumesAndProducesIterator
*/
- def filter(p: A => Boolean): Iterator[A] = new Iterator[A] {
+ def filter(p: A => Boolean): Iterator[A] = new AbstractIterator[A] {
private var hd: A = _
private var hdDefined: Boolean = false
@@ -464,6 +395,24 @@ trait Iterator[+A] extends TraversableOnce[A] {
def next() = if (hasNext) { hdDefined = false; hd } else empty.next()
}
+ /** Tests whether every element of this iterator relates to the
+ * corresponding element of another collection by satisfying a test predicate.
+ *
+ * @param that the other collection
+ * @param p the test predicate, which relates elements from both collections
+ * @tparam B the type of the elements of `that`
+ * @return `true` if both collections have the same length and
+ * `p(x, y)` is `true` for all corresponding elements `x` of this iterator
+ * and `y` of `that`, otherwise `false`
+ */
+ def corresponds[B](that: GenTraversableOnce[B])(p: (A, B) => Boolean): Boolean = {
+ val that0 = that.toIterator
+ while (hasNext && that0.hasNext)
+ if (!p(next, that0.next)) return false
+
+ hasNext == that0.hasNext
+ }
+
/** Creates an iterator over all the elements of this iterator that
* satisfy the predicate `p`. The order of the elements
* is preserved.
@@ -498,7 +447,7 @@ trait Iterator[+A] extends TraversableOnce[A] {
@migration("`collect` has changed. The previous behavior can be reproduced with `toSeq`.", "2.8.0")
def collect[B](pf: PartialFunction[A, B]): Iterator[B] = {
val self = buffered
- new Iterator[B] {
+ new AbstractIterator[B] {
private def skip() = while (self.hasNext && !pf.isDefinedAt(self.head)) self.next()
def hasNext = { skip(); self.hasNext }
def next() = { skip(); pf(self.next()) }
@@ -517,7 +466,7 @@ trait Iterator[+A] extends TraversableOnce[A] {
* @return iterator with intermediate results
* @note Reuse: $consumesAndProducesIterator
*/
- def scanLeft[B](z: B)(op: (B, A) => B): Iterator[B] = new Iterator[B] {
+ def scanLeft[B](z: B)(op: (B, A) => B): Iterator[B] = new AbstractIterator[B] {
var hasNext = true
var elem = z
def next() = if (hasNext) {
@@ -553,7 +502,7 @@ trait Iterator[+A] extends TraversableOnce[A] {
* the predicate `p`.
* @note Reuse: $consumesAndProducesIterator
*/
- def takeWhile(p: A => Boolean): Iterator[A] = new Iterator[A] {
+ def takeWhile(p: A => Boolean): Iterator[A] = new AbstractIterator[A] {
private var hd: A = _
private var hdDefined: Boolean = false
private var tail: Iterator[A] = self
@@ -578,7 +527,7 @@ trait Iterator[+A] extends TraversableOnce[A] {
*/
def partition(p: A => Boolean): (Iterator[A], Iterator[A]) = {
val self = buffered
- class PartitionIterator(p: A => Boolean) extends Iterator[A] {
+ class PartitionIterator(p: A => Boolean) extends AbstractIterator[A] {
var other: PartitionIterator = _
val lookahead = new mutable.Queue[A]
def skip() =
@@ -612,7 +561,7 @@ trait Iterator[+A] extends TraversableOnce[A] {
* iterator is referring (the finish() method) and thus triggering
* handling of structural calls. It's not what's intended here.
*/
- class Leading extends Iterator[A] {
+ class Leading extends AbstractIterator[A] {
private var isDone = false
val lookahead = new mutable.Queue[A]
def advance() = {
@@ -634,7 +583,7 @@ trait Iterator[+A] extends TraversableOnce[A] {
}
}
val leading = new Leading
- val trailing = new Iterator[A] {
+ val trailing = new AbstractIterator[A] {
private lazy val it = {
leading.finish()
self
@@ -656,7 +605,7 @@ trait Iterator[+A] extends TraversableOnce[A] {
*/
def dropWhile(p: A => Boolean): Iterator[A] = {
val self = buffered
- new Iterator[A] {
+ new AbstractIterator[A] {
var dropped = false
private def skip() =
if (!dropped) {
@@ -681,7 +630,7 @@ trait Iterator[+A] extends TraversableOnce[A] {
* iterator and `that`.
* @note Reuse: $consumesTwoAndProducesOneIterator
*/
- def zip[B](that: Iterator[B]) = new Iterator[(A, B)] {
+ def zip[B](that: Iterator[B]): Iterator[(A, B)] = new AbstractIterator[(A, B)] {
def hasNext = self.hasNext && that.hasNext
def next = (self.next, that.next)
}
@@ -694,9 +643,11 @@ trait Iterator[+A] extends TraversableOnce[A] {
* followed by the minimal number of occurrences of `elem` so
* that the number of produced values is at least `len`.
* @note Reuse: $consumesAndProducesIterator
+ *
* @usecase def padTo(len: Int, elem: A): Iterator[A]
+ * @inheritdoc
*/
- def padTo[A1 >: A](len: Int, elem: A1) = new Iterator[A1] {
+ def padTo[A1 >: A](len: Int, elem: A1): Iterator[A1] = new AbstractIterator[A1] {
private var count = 0
def hasNext = self.hasNext || count < len
def next = {
@@ -714,7 +665,7 @@ trait Iterator[+A] extends TraversableOnce[A] {
* corresponding elements of this iterator and their indices.
* @note Reuse: $consumesAndProducesIterator
*/
- def zipWithIndex = new Iterator[(A, Int)] {
+ def zipWithIndex: Iterator[(A, Int)] = new AbstractIterator[(A, Int)] {
var idx = 0
def hasNext = self.hasNext
def next = {
@@ -743,9 +694,11 @@ trait Iterator[+A] extends TraversableOnce[A] {
* If this iterator is shorter than `that`, `thisElem` values are used to pad the result.
* If `that` is shorter than this iterator, `thatElem` values are used to pad the result.
* @note Reuse: $consumesTwoAndProducesOneIterator
+ *
* @usecase def zipAll[B](that: Iterator[B], thisElem: A, thatElem: B): Iterator[(A, B)]
+ * @inheritdoc
*/
- def zipAll[B, A1 >: A, B1 >: B](that: Iterator[B], thisElem: A1, thatElem: B1) = new Iterator[(A1, B1)] {
+ def zipAll[B, A1 >: A, B1 >: B](that: Iterator[B], thisElem: A1, thatElem: B1): Iterator[(A1, B1)] = new AbstractIterator[(A1, B1)] {
def hasNext = self.hasNext || that.hasNext
def next(): (A1, B1) =
if (self.hasNext) {
@@ -767,7 +720,9 @@ trait Iterator[+A] extends TraversableOnce[A] {
* but this is not necessary.
*
* @note Reuse: $consumesIterator
+ *
* @usecase def foreach(f: A => Unit): Unit
+ * @inheritdoc
*/
def foreach[U](f: A => U) { while (hasNext) f(next()) }
@@ -804,7 +759,7 @@ trait Iterator[+A] extends TraversableOnce[A] {
*
* @param elem the element to test.
* @return `true` if this iterator produces some value that is
- * is equal (wrt `==`) to `elem`, `false` otherwise.
+ * is equal (as determined by `==`) to `elem`, `false` otherwise.
* @note Reuse: $consumesIterator
*/
def contains(elem: Any): Boolean = exists(_ == elem)
@@ -872,11 +827,11 @@ trait Iterator[+A] extends TraversableOnce[A] {
/** Creates a buffered iterator from this iterator.
*
- * @see BufferedIterator
+ * @see [[scala.collection.BufferedIterator]]
* @return a buffered iterator producing the same values as this iterator.
* @note Reuse: $consumesAndProducesIterator
*/
- def buffered = new BufferedIterator[A] {
+ def buffered: BufferedIterator[A] = new AbstractIterator[A] with BufferedIterator[A] {
private var hd: A = _
private var hdDefined: Boolean = false
@@ -904,7 +859,10 @@ trait Iterator[+A] extends TraversableOnce[A] {
*
* Typical uses can be achieved via methods `grouped` and `sliding`.
*/
- class GroupedIterator[B >: A](self: Iterator[A], size: Int, step: Int) extends Iterator[Seq[B]] {
+ class GroupedIterator[B >: A](self: Iterator[A], size: Int, step: Int)
+ extends AbstractIterator[Seq[B]]
+ with Iterator[Seq[B]] {
+
require(size >= 1 && step >= 1, "size=%d and step=%d, but both must be positive".format(size, step))
private[this] var buffer: ArrayBuffer[B] = ArrayBuffer() // the buffer
@@ -1086,7 +1044,7 @@ trait Iterator[+A] extends TraversableOnce[A] {
def duplicate: (Iterator[A], Iterator[A]) = {
val gap = new scala.collection.mutable.Queue[A]
var ahead: Iterator[A] = null
- class Partner extends Iterator[A] {
+ class Partner extends AbstractIterator[A] {
def hasNext: Boolean = self.synchronized {
(this ne ahead) && !gap.isEmpty || self.hasNext
}
@@ -1117,18 +1075,19 @@ trait Iterator[+A] extends TraversableOnce[A] {
* @param replaced The number of values in the original iterator that are replaced by the patch.
* @note Reuse: $consumesTwoAndProducesOneIterator
*/
- def patch[B >: A](from: Int, patchElems: Iterator[B], replaced: Int) = new Iterator[B] {
+ def patch[B >: A](from: Int, patchElems: Iterator[B], replaced: Int): Iterator[B] = new AbstractIterator[B] {
private var origElems = self
private var i = 0
def hasNext: Boolean =
if (i < from) origElems.hasNext
else patchElems.hasNext || origElems.hasNext
def next(): B = {
+ // We have to do this *first* just in case from = 0.
+ if (i == from) origElems = origElems drop replaced
val result: B =
if (i < from || !patchElems.hasNext) origElems.next()
else patchElems.next()
i += 1
- if (i == from) origElems = origElems drop replaced
result
}
}
@@ -1139,20 +1098,23 @@ trait Iterator[+A] extends TraversableOnce[A] {
* Copying will stop once either the end of the current iterator is reached,
* or the end of the array is reached, or `len` elements have been copied.
*
- * $willNotTerminateInf
- *
* @param xs the array to fill.
* @param start the starting index.
* @param len the maximal number of elements to copy.
* @tparam B the type of the elements of the array.
*
* @note Reuse: $consumesIterator
+ *
* @usecase def copyToArray(xs: Array[A], start: Int, len: Int): Unit
+ * @inheritdoc
+ *
+ * $willNotTerminateInf
*/
def copyToArray[B >: A](xs: Array[B], start: Int, len: Int): Unit = {
+ require(start >= 0 && (start < xs.length || xs.length == 0), s"start $start out of range ${xs.length}")
var i = start
- val end = start + math.min(len, xs.length)
- while (hasNext && i < end) {
+ val end = start + math.min(len, xs.length - start)
+ while (i < end && hasNext) {
xs(i) = next()
i += 1
}
@@ -1181,6 +1143,7 @@ trait Iterator[+A] extends TraversableOnce[A] {
if (self.hasNext) Stream.cons(self.next, self.toStream)
else Stream.empty[A]
+
/** Converts this iterator to a string.
*
* @return `"empty iterator"` or `"non-empty iterator"`, depending on
@@ -1188,52 +1151,7 @@ trait Iterator[+A] extends TraversableOnce[A] {
* @note Reuse: $preservesIterator
*/
override def toString = (if (hasNext) "non-empty" else "empty")+" iterator"
-
- /** Returns a new iterator that first yields the elements of this
- * iterator followed by the elements provided by iterator `that`.
- */
- @deprecated("use `++`", "2.3.2")
- def append[B >: A](that: Iterator[B]) = self ++ that
-
- /** Returns index of the first element satisfying a predicate, or -1. */
- @deprecated("use `indexWhere` instead", "2.8.0")
- def findIndexOf(p: A => Boolean): Int = indexWhere(p)
-
- /** Returns a counted iterator from this iterator.
- */
- @deprecated("use zipWithIndex in Iterator", "2.8.0")
- def counted = new CountedIterator[A] {
- private var cnt = 0
- def count = cnt
- def hasNext: Boolean = self.hasNext
- def next(): A = { cnt += 1; self.next }
- }
-
- /** Fills the given array `xs` with the elements of
- * this sequence starting at position `start`. Like `copyToArray`,
- * but designed to accomodate IO stream operations.
- *
- * '''Note:''' the array must be large enough to hold `sz` elements.
- * @param xs the array to fill.
- * @param start the starting index.
- * @param sz the maximum number of elements to be read.
- */
- @deprecated("use copyToArray instead", "2.8.0")
- def readInto[B >: A](xs: Array[B], start: Int, sz: Int) {
- var i = start
- while (hasNext && i - start < sz) {
- xs(i) = next
- i += 1
- }
- }
-
- @deprecated("use copyToArray instead", "2.8.0")
- def readInto[B >: A](xs: Array[B], start: Int) {
- readInto(xs, start, xs.length - start)
- }
-
- @deprecated("use copyToArray instead", "2.8.0")
- def readInto[B >: A](xs: Array[B]) {
- readInto(xs, 0, xs.length)
- }
}
+
+/** Explicit instantiation of the `Iterator` trait to reduce class file size in subclasses. */
+private[scala] abstract class AbstractIterator[+A] extends Iterator[A]
diff --git a/src/library/scala/collection/JavaConversions.scala b/src/library/scala/collection/JavaConversions.scala
index 8b98f10..59d4259 100644
--- a/src/library/scala/collection/JavaConversions.scala
+++ b/src/library/scala/collection/JavaConversions.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2006-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2006-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://www.scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -8,6 +8,9 @@
package scala.collection
+import java.{ lang => jl, util => ju }, java.util.{ concurrent => juc }
+import convert._
+
/** A collection of implicit conversions supporting interoperability between
* Scala and Java collections.
*
@@ -19,7 +22,8 @@ package scala.collection
* scala.collection.mutable.Buffer <=> java.util.List
* scala.collection.mutable.Set <=> java.util.Set
* scala.collection.mutable.Map <=> java.util.{ Map, Dictionary }
- * scala.collection.mutable.ConcurrentMap <=> java.util.concurrent.ConcurrentMap
+ * scala.collection.mutable.ConcurrentMap (deprecated since 2.10) <=> java.util.concurrent.ConcurrentMap
+ * scala.collection.concurrent.Map <=> java.util.concurrent.ConcurrentMap
*}}}
* In all cases, converting from a source type to a target type and back
* again will return the original source object, eg.
@@ -46,902 +50,83 @@ package scala.collection
* @author Martin Odersky
* @since 2.8
*/
-object JavaConversions {
+object JavaConversions extends WrapAsScala with WrapAsJava {
+ @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") type ConcurrentMapWrapper[A, B] = Wrappers.ConcurrentMapWrapper[A, B]
+ @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") type DictionaryWrapper[A, B] = Wrappers.DictionaryWrapper[A, B]
+ @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") type IterableWrapper[A] = Wrappers.IterableWrapper[A]
+ @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") type IteratorWrapper[A] = Wrappers.IteratorWrapper[A]
+ @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") type JCollectionWrapper[A] = Wrappers.JCollectionWrapper[A]
+ @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") type JConcurrentMapWrapper[A, B] = Wrappers.JConcurrentMapWrapper[A, B]
+ @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") type JDictionaryWrapper[A, B] = Wrappers.JDictionaryWrapper[A, B]
+ @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") type JEnumerationWrapper[A] = Wrappers.JEnumerationWrapper[A]
+ @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") type JIterableWrapper[A] = Wrappers.JIterableWrapper[A]
+ @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") type JIteratorWrapper[A] = Wrappers.JIteratorWrapper[A]
+ @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") type JListWrapper[A] = Wrappers.JListWrapper[A]
+ @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") type JMapWrapper[A, B] = Wrappers.JMapWrapper[A, B]
+ @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") type JPropertiesWrapper = Wrappers.JPropertiesWrapper
+ @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") type JSetWrapper[A] = Wrappers.JSetWrapper[A]
+ @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") type MapWrapper[A, B] = Wrappers.MapWrapper[A, B]
+ @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") type MutableBufferWrapper[A] = Wrappers.MutableBufferWrapper[A]
+ @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") type MutableMapWrapper[A, B] = Wrappers.MutableMapWrapper[A, B]
+ @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") type MutableSeqWrapper[A] = Wrappers.MutableSeqWrapper[A]
+ @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") type MutableSetWrapper[A] = Wrappers.MutableSetWrapper[A]
+ @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") type SeqWrapper[A] = Wrappers.SeqWrapper[A]
+ @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") type SetWrapper[A] = Wrappers.SetWrapper[A]
+ @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") type ToIteratorWrapper[A] = Wrappers.ToIteratorWrapper[A]
+ @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") val DictionaryWrapper = Wrappers.DictionaryWrapper
+ @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") val IterableWrapper = Wrappers.IterableWrapper
+ @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") val IteratorWrapper = Wrappers.IteratorWrapper
+ @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") val JCollectionWrapper = Wrappers.JCollectionWrapper
+ @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") val JConcurrentMapWrapper = Wrappers.JConcurrentMapWrapper
+ @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") val JDictionaryWrapper = Wrappers.JDictionaryWrapper
+ @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") val JEnumerationWrapper = Wrappers.JEnumerationWrapper
+ @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") val JIterableWrapper = Wrappers.JIterableWrapper
+ @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") val JIteratorWrapper = Wrappers.JIteratorWrapper
+ @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") val JListWrapper = Wrappers.JListWrapper
+ @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") val JMapWrapper = Wrappers.JMapWrapper
+ @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") val JPropertiesWrapper = Wrappers.JPropertiesWrapper
+ @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") val JSetWrapper = Wrappers.JSetWrapper
+ @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") val MutableBufferWrapper = Wrappers.MutableBufferWrapper
+ @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") val MutableMapWrapper = Wrappers.MutableMapWrapper
+ @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") val MutableSeqWrapper = Wrappers.MutableSeqWrapper
+ @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") val MutableSetWrapper = Wrappers.MutableSetWrapper
+ @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") val SeqWrapper = Wrappers.SeqWrapper
+
// Note to implementors: the cavalcade of deprecated methods herein should
// serve as a warning to any who follow: don't overload implicit methods.
- import java.{ lang => jl, util => ju }
- import java.util.{ concurrent => juc }
-
- // Scala => Java
-
- /**
- * Implicitly converts a Scala Iterator to a Java Iterator.
- * The returned Java Iterator is backed by the provided Scala
- * Iterator and any side-effects of using it via the Java interface will
- * be visible via the Scala interface and vice versa.
- *
- * If the Scala Iterator was previously obtained from an implicit or
- * explicit call of asIterator(java.util.Iterator) then the original
- * Java Iterator will be returned.
- *
- * @param i The Iterator to be converted.
- * @return A Java Iterator view of the argument.
- */
- implicit def asJavaIterator[A](i : Iterator[A]): ju.Iterator[A] = i match {
- case JIteratorWrapper(wrapped) => wrapped
- case _ => IteratorWrapper(i)
- }
-
- @deprecated("use asJavaIterator instead", "2.8.1")
- def asIterator[A](i : Iterator[A]): ju.Iterator[A] = asJavaIterator[A](i)
-
- /**
- * Implicitly converts a Scala Iterator to a Java Enumeration.
- * The returned Java Enumeration is backed by the provided Scala
- * Iterator and any side-effects of using it via the Java interface will
- * be visible via the Scala interface and vice versa.
- *
- * If the Scala Iterator was previously obtained from an implicit or
- * explicit call of asIterator(java.util.Enumeration) then the
- * original Java Enumeration will be returned.
- *
- * @param i The Iterator to be converted.
- * @return A Java Enumeration view of the argument.
- */
- implicit def asJavaEnumeration[A](i : Iterator[A]): ju.Enumeration[A] = i match {
- case JEnumerationWrapper(wrapped) => wrapped
- case _ => IteratorWrapper(i)
- }
-
- @deprecated("use asJavaEnmeration instead", "2.8.1")
- def asEnumeration[A](i : Iterator[A]): ju.Enumeration[A] = asJavaEnumeration[A](i)
-
- /**
- * Implicitly converts a Scala Iterable to a Java Iterable.
- * The returned Java Iterable is backed by the provided Scala
- * Iterable and any side-effects of using it via the Java interface will
- * be visible via the Scala interface and vice versa.
- *
- * If the Scala Iterable was previously obtained from an implicit or
- * explicit call of asIterable(java.lang.Iterable) then the original
- * Java Iterable will be returned.
- *
- * @param i The Iterable to be converted.
- * @return A Java Iterable view of the argument.
- */
- implicit def asJavaIterable[A](i : Iterable[A]): jl.Iterable[A] = i match {
- case JIterableWrapper(wrapped) => wrapped
- case _ => IterableWrapper(i)
- }
-
- @deprecated("use asJavaIterable instead", "2.8.1")
- def asIterable[A](i : Iterable[A]): jl.Iterable[A] = asJavaIterable[A](i)
-
- /**
- * Implicitly converts a Scala Iterable to an immutable Java
- * Collection.
- *
- * If the Scala Iterable was previously obtained from an implicit or
- * explicit call of asSizedIterable(java.util.Collection) then the original
- * Java Collection will be returned.
- *
- * @param i The SizedIterable to be converted.
- * @return A Java Collection view of the argument.
- */
- implicit def asJavaCollection[A](i : Iterable[A]): ju.Collection[A] = i match {
- case JCollectionWrapper(wrapped) => wrapped
- case _ => new IterableWrapper(i)
- }
-
- @deprecated("use asJavaCollection instead", "2.8.1")
- def asCollection[A](i : Iterable[A]): ju.Collection[A] = asJavaCollection[A](i)
-
- /**
- * Implicitly converts a Scala mutable Buffer to a Java List.
- * The returned Java List is backed by the provided Scala
- * Buffer and any side-effects of using it via the Java interface will
- * be visible via the Scala interface and vice versa.
- *
- * If the Scala Buffer was previously obtained from an implicit or
- * explicit call of asBuffer(java.util.List) then the original
- * Java List will be returned.
- *
- * @param b The Buffer to be converted.
- * @return A Java List view of the argument.
- */
- implicit def bufferAsJavaList[A](b : mutable.Buffer[A]): ju.List[A] = b match {
- case JListWrapper(wrapped) => wrapped
- case _ => new MutableBufferWrapper(b)
- }
@deprecated("use bufferAsJavaList instead", "2.9.0")
def asJavaList[A](b : mutable.Buffer[A]): ju.List[A] = bufferAsJavaList[A](b)
- @deprecated("use bufferAsJavaList instead", "2.8.1")
- def asList[A](b : mutable.Buffer[A]): ju.List[A] = bufferAsJavaList[A](b)
- /**
- * Implicitly converts a Scala mutable Seq to a Java List.
- * The returned Java List is backed by the provided Scala
- * Seq and any side-effects of using it via the Java interface will
- * be visible via the Scala interface and vice versa.
- *
- * If the Scala Seq was previously obtained from an implicit or
- * explicit call of asSeq(java.util.List) then the original
- * Java List will be returned.
- *
- * @param b The Seq to be converted.
- * @return A Java List view of the argument.
- */
- implicit def mutableSeqAsJavaList[A](b : mutable.Seq[A]): ju.List[A] = b match {
- case JListWrapper(wrapped) => wrapped
- case _ => new MutableSeqWrapper(b)
- }
@deprecated("use mutableSeqAsJavaList instead", "2.9.0")
def asJavaList[A](b : mutable.Seq[A]): ju.List[A] = mutableSeqAsJavaList[A](b)
- @deprecated("use mutableSeqAsJavaList instead", "2.8.1")
- def asList[A](b : mutable.Seq[A]): ju.List[A] = mutableSeqAsJavaList[A](b)
-
- /**
- * Implicitly converts a Scala Seq to a Java List.
- * The returned Java List is backed by the provided Scala
- * Seq and any side-effects of using it via the Java interface will
- * be visible via the Scala interface and vice versa.
- *
- * If the Scala Seq was previously obtained from an implicit or
- * explicit call of asSeq(java.util.List) then the original
- * Java List will be returned.
- *
- * @param b The Seq to be converted.
- * @return A Java List view of the argument.
- */
- implicit def seqAsJavaList[A](b : Seq[A]): ju.List[A] = b match {
- case JListWrapper(wrapped) => wrapped
- case _ => new SeqWrapper(b)
- }
@deprecated("use seqAsJavaList instead", "2.9.0")
def asJavaList[A](b : Seq[A]): ju.List[A] = seqAsJavaList[A](b)
- @deprecated("use seqAsJavaList instead", "2.8.1")
- def asList[A](b : Seq[A]): ju.List[A] = seqAsJavaList[A](b)
-
- /**
- * Implicitly converts a Scala mutable Set to a Java Set.
- * The returned Java Set is backed by the provided Scala
- * Set and any side-effects of using it via the Java interface will
- * be visible via the Scala interface and vice versa.
- *
- * If the Scala Set was previously obtained from an implicit or
- * explicit call of asSet(java.util.Set) then the original
- * Java Set will be returned.
- *
- * @param s The Set to be converted.
- * @return A Java Set view of the argument.
- */
- implicit def mutableSetAsJavaSet[A](s : mutable.Set[A]): ju.Set[A] = s match {
- case JSetWrapper(wrapped) => wrapped
- case _ => new MutableSetWrapper(s)
- }
@deprecated("use mutableSetAsJavaSet instead", "2.9.0")
def asJavaSet[A](s : mutable.Set[A]): ju.Set[A] = mutableSetAsJavaSet[A](s)
- @deprecated("use mutableSetAsJavaSet instead", "2.8.1")
- def asSet[A](s : mutable.Set[A]): ju.Set[A] = mutableSetAsJavaSet[A](s)
-
- /**
- * Implicitly converts a Scala Set to a Java Set.
- * The returned Java Set is backed by the provided Scala
- * Set and any side-effects of using it via the Java interface will
- * be visible via the Scala interface and vice versa.
- *
- * If the Scala Set was previously obtained from an implicit or
- * explicit call of asSet(java.util.Set) then the original
- * Java Set will be returned.
- *
- * @param s The Set to be converted.
- * @return A Java Set view of the argument.
- */
- implicit def setAsJavaSet[A](s: Set[A]): ju.Set[A] = s match {
- case JSetWrapper(wrapped) => wrapped
- case _ => new SetWrapper(s)
- }
@deprecated("use setAsJavaSet instead", "2.9.0")
def asJavaSet[A](s: Set[A]): ju.Set[A] = setAsJavaSet[A](s)
- @deprecated("use setAsJavaSet instead", "2.8.1")
- def asSet[A](s : Set[A]): ju.Set[A] = setAsJavaSet[A](s)
-
- /**
- * Implicitly converts a Scala mutable Map to a Java Map.
- * The returned Java Map is backed by the provided Scala
- * Map and any side-effects of using it via the Java interface will
- * be visible via the Scala interface and vice versa.
- *
- * If the Scala Map was previously obtained from an implicit or
- * explicit call of asMap(java.util.Map) then the original
- * Java Map will be returned.
- *
- * @param m The Map to be converted.
- * @return A Java Map view of the argument.
- */
- implicit def mutableMapAsJavaMap[A, B](m : mutable.Map[A, B]): ju.Map[A, B] = m match {
- //case JConcurrentMapWrapper(wrapped) => wrapped
- case JMapWrapper(wrapped) => wrapped
- case _ => new MutableMapWrapper(m)
- }
@deprecated("use mutableMapAsJavaMap instead", "2.9.0")
def asJavaMap[A, B](m : mutable.Map[A, B]): ju.Map[A, B] = mutableMapAsJavaMap[A, B](m)
- @deprecated("use mutableMapAsJavaMap instead", "2.8.1")
- def asMap[A, B](m : mutable.Map[A, B]): ju.Map[A, B] = mutableMapAsJavaMap[A, B](m)
-
- /**
- * Implicitly converts a Scala mutable Map to a Java Dictionary.
- * The returned Java Dictionary is backed by the provided Scala
- * Dictionary and any side-effects of using it via the Java interface will
- * be visible via the Scala interface and vice versa.
- *
- * If the Scala Dictionary was previously obtained from an implicit or
- * explicit call of asMap(java.util.Dictionary) then the original
- * Java Dictionary will be returned.
- *
- * @param m The Map to be converted.
- * @return A Java Dictionary view of the argument.
- */
- implicit def asJavaDictionary[A, B](m : mutable.Map[A, B]): ju.Dictionary[A, B] = m match {
- //case JConcurrentMapWrapper(wrapped) => wrapped
- case JDictionaryWrapper(wrapped) => wrapped
- case _ => new DictionaryWrapper(m)
- }
-
- @deprecated("use asJavaDictionary instead", "2.8.1")
- def asDictionary[A, B](m : mutable.Map[A, B]): ju.Dictionary[A, B] = asJavaDictionary[A, B](m)
-
- /**
- * Implicitly converts a Scala Map to a Java Map.
- * The returned Java Map is backed by the provided Scala
- * Map and any side-effects of using it via the Java interface will
- * be visible via the Scala interface and vice versa.
- *
- * If the Scala Map was previously obtained from an implicit or
- * explicit call of asMap(java.util.Map) then the original
- * Java Map will be returned.
- *
- * @param m The Map to be converted.
- * @return A Java Map view of the argument.
- */
- implicit def mapAsJavaMap[A, B](m : Map[A, B]): ju.Map[A, B] = m match {
- //case JConcurrentMapWrapper(wrapped) => wrapped
- case JMapWrapper(wrapped) => wrapped
- case _ => new MapWrapper(m)
- }
@deprecated("use mapAsJavaMap instead", "2.9.0")
def asJavaMap[A, B](m : Map[A, B]): ju.Map[A, B] = mapAsJavaMap[A, B](m)
- @deprecated("use mapAsJavaMap instead", "2.8.1")
- def asMap[A, B](m : Map[A, B]): ju.Map[A, B] = mapAsJavaMap[A, B](m)
-
- /**
- * Implicitly converts a Scala mutable `ConcurrentMap` to a Java `ConcurrentMap`.
- * The returned Java `ConcurrentMap` is backed by the provided Scala `ConcurrentMap`
- * and any side-effects of using it via the Java interface will be visible
- * via the Scala interface and vice versa.
- *
- * If the Scala ConcurrentMap was previously obtained from an implicit or
- * explicit call of asConcurrentMap(java.util.concurrect.ConcurrentMap) then the original
- * Java ConcurrentMap will be returned.
- *
- * @param m The ConcurrentMap to be converted.
- * @return A Java ConcurrentMap view of the argument.
- */
- implicit def asJavaConcurrentMap[A, B](m: mutable.ConcurrentMap[A, B]): juc.ConcurrentMap[A, B] = m match {
- case JConcurrentMapWrapper(wrapped) => wrapped
- case _ => new ConcurrentMapWrapper(m)
- }
-
- @deprecated("use asJavaConcurrentMap instead", "2.8.1")
- def asConcurrentMap[A, B](m: mutable.ConcurrentMap[A, B]): juc.ConcurrentMap[A, B] = asJavaConcurrentMap[A, B](m)
-
- // Java => Scala
-
- /**
- * Implicitly converts a Java Iterator to a Scala Iterator.
- * The returned Scala Iterator is backed by the provided Java
- * Iterator and any side-effects of using it via the Scala interface will
- * be visible via the Java interface and vice versa.
- *
- * If the Java Iterator was previously obtained from an implicit or
- * explicit call of asIterator(scala.collection.Iterator) then the original
- * Scala Iterator will be returned.
- *
- * @param i The Iterator to be converted.
- * @return A Scala Iterator view of the argument.
- */
- implicit def asScalaIterator[A](i : ju.Iterator[A]): Iterator[A] = i match {
- case IteratorWrapper(wrapped) => wrapped
- case _ => JIteratorWrapper(i)
- }
-
- @deprecated("use asScalaIterator instead", "2.8.1")
- def asIterator[A](i : ju.Iterator[A]): Iterator[A] = asScalaIterator[A](i)
-
- /**
- * Implicitly converts a Java Enumeration to a Scala Iterator.
- * The returned Scala Iterator is backed by the provided Java
- * Enumeration and any side-effects of using it via the Scala interface will
- * be visible via the Java interface and vice versa.
- *
- * If the Java Enumeration was previously obtained from an implicit or
- * explicit call of enumerationAsScalaIterator(scala.collection.Iterator) then the
- * original Scala Iterator will be returned.
- *
- * @param i The Enumeration to be converted.
- * @return A Scala Iterator view of the argument.
- */
- implicit def enumerationAsScalaIterator[A](i : ju.Enumeration[A]): Iterator[A] = i match {
- case IteratorWrapper(wrapped) => wrapped
- case _ => JEnumerationWrapper(i)
- }
-
- @deprecated("use enumerationAsScalaIterator instead", "2.8.1")
- def asIterator[A](i : ju.Enumeration[A]): Iterator[A] = enumerationAsScalaIterator[A](i)
-
- /**
- * Implicitly converts a Java Iterable to a Scala Iterable.
- * The returned Scala Iterable is backed by the provided Java
- * Iterable and any side-effects of using it via the Scala interface will
- * be visible via the Java interface and vice versa.
- *
- * If the Java Iterable was previously obtained from an implicit or
- * explicit call of iterableAsScalaIterable(scala.collection.Iterable) then the original
- * Scala Iterable will be returned.
- *
- * @param i The Iterable to be converted.
- * @return A Scala Iterable view of the argument.
- */
- implicit def iterableAsScalaIterable[A](i : jl.Iterable[A]): Iterable[A] = i match {
- case IterableWrapper(wrapped) => wrapped
- case _ => JIterableWrapper(i)
- }
@deprecated("use iterableAsScalaIterable instead", "2.9.0")
def asScalaIterable[A](i : jl.Iterable[A]): Iterable[A] = iterableAsScalaIterable[A](i)
- @deprecated("use iterableAsScalaIterable instead", "2.8.1")
- def asIterable[A](i : jl.Iterable[A]): Iterable[A] = iterableAsScalaIterable[A](i)
- /**
- * Implicitly converts a Java Collection to an Scala Iterable.
- *
- * If the Java Collection was previously obtained from an implicit or
- * explicit call of collectionAsScalaIterable(scala.collection.SizedIterable) then
- * the original Scala Iterable will be returned.
- *
- * @param i The Collection to be converted.
- * @return A Scala Iterable view of the argument.
- */
- implicit def collectionAsScalaIterable[A](i : ju.Collection[A]): Iterable[A] = i match {
- case IterableWrapper(wrapped) => wrapped
- case _ => JCollectionWrapper(i)
- }
@deprecated("use collectionAsScalaIterable instead", "2.9.0")
def asScalaIterable[A](i : ju.Collection[A]): Iterable[A] = collectionAsScalaIterable[A](i)
- @deprecated("use collectionAsScalaIterable instead", "2.8.1")
- def asIterable[A](i : ju.Collection[A]): Iterable[A] = collectionAsScalaIterable[A](i)
-
- /**
- * Implicitly converts a Java List to a Scala mutable Buffer.
- * The returned Scala Buffer is backed by the provided Java
- * List and any side-effects of using it via the Scala interface will
- * be visible via the Java interface and vice versa.
- *
- * If the Java List was previously obtained from an implicit or
- * explicit call of asScalaBuffer(scala.collection.mutable.Buffer) then the original
- * Scala Buffer will be returned.
- *
- * @param l The List to be converted.
- * @return A Scala mutable Buffer view of the argument.
- */
- implicit def asScalaBuffer[A](l : ju.List[A]): mutable.Buffer[A] = l match {
- case MutableBufferWrapper(wrapped) => wrapped
- case _ =>new JListWrapper(l)
- }
-
- @deprecated("use asScalaBuffer instead", "2.8.1")
- def asBuffer[A](l : ju.List[A]): mutable.Buffer[A] = asScalaBuffer[A](l)
-
- /**
- * Implicitly converts a Java Set to a Scala mutable Set.
- * The returned Scala Set is backed by the provided Java
- * Set and any side-effects of using it via the Scala interface will
- * be visible via the Java interface and vice versa.
- *
- * If the Java Set was previously obtained from an implicit or
- * explicit call of asScalaSet(scala.collection.mutable.Set) then the original
- * Scala Set will be returned.
- *
- * @param s The Set to be converted.
- * @return A Scala mutable Set view of the argument.
- */
- implicit def asScalaSet[A](s : ju.Set[A]): mutable.Set[A] = s match {
- case MutableSetWrapper(wrapped) => wrapped
- case _ =>new JSetWrapper(s)
- }
-
- @deprecated("use asScalaSet instead", "2.8.1")
- def asSet[A](s : ju.Set[A]): mutable.Set[A] = asScalaSet[A](s)
-
- /**
- * Implicitly converts a Java Map to a Scala mutable Map.
- * The returned Scala Map is backed by the provided Java
- * Map and any side-effects of using it via the Scala interface will
- * be visible via the Java interface and vice versa.
- *
- * If the Java Map was previously obtained from an implicit or
- * explicit call of mapAsScalaMap(scala.collection.mutable.Map) then the original
- * Scala Map will be returned.
- *
- * @param m The Map to be converted.
- * @return A Scala mutable Map view of the argument.
- */
- implicit def mapAsScalaMap[A, B](m : ju.Map[A, B]): mutable.Map[A, B] = m match {
- //case ConcurrentMapWrapper(wrapped) => wrapped
- case MutableMapWrapper(wrapped) => wrapped
- case _ => new JMapWrapper(m)
- }
@deprecated("use mapAsScalaMap instead", "2.9.0")
- def asScalaMap[A, B](m : ju.Map[A, B]): mutable.Map[A, B] = mapAsScalaMap[A, B](m)
- @deprecated("use mapAsScalaMap instead", "2.8.1")
- def asMap[A, B](m : ju.Map[A, B]): mutable.Map[A, B] = mapAsScalaMap[A, B](m)
-
- /**
- * Implicitly converts a Java ConcurrentMap to a Scala mutable ConcurrentMap.
- * The returned Scala ConcurrentMap is backed by the provided Java
- * ConcurrentMap and any side-effects of using it via the Scala interface will
- * be visible via the Java interface and vice versa.
- *
- * If the Java ConcurrentMap was previously obtained from an implicit or
- * explicit call of asConcurrentMap(scala.collection.mutable.ConcurrentMap) then the original
- * Scala ConcurrentMap will be returned.
- *
- * @param m The ConcurrentMap to be converted.
- * @return A Scala mutable ConcurrentMap view of the argument.
- */
- implicit def asScalaConcurrentMap[A, B](m: juc.ConcurrentMap[A, B]): mutable.ConcurrentMap[A, B] = m match {
- case cmw: ConcurrentMapWrapper[a, b] => cmw.underlying
- case _ => new JConcurrentMapWrapper(m)
- }
-
- @deprecated("use asScalaConcurrentMap instead", "2.8.1")
- def asConcurrentMap[A, B](m: juc.ConcurrentMap[A, B]): mutable.ConcurrentMap[A, B] = asScalaConcurrentMap[A, B](m)
-
- /**
- * Implicitly converts a Java Dictionary to a Scala mutable Map[String, String].
- * The returned Scala Map[String, String] is backed by the provided Java
- * Dictionary and any side-effects of using it via the Scala interface will
- * be visible via the Java interface and vice versa.
- *
- * @param m The Dictionary to be converted.
- * @return A Scala mutable Map[String, String] view of the argument.
- */
- implicit def dictionaryAsScalaMap[A, B](p: ju.Dictionary[A, B]): mutable.Map[A, B] = p match {
- case DictionaryWrapper(wrapped) => wrapped
- case _ => new JDictionaryWrapper(p)
- }
-
- @deprecated("use dictionaryAsScalaMap instead", "2.8.1")
- def asMap[A, B](p: ju.Dictionary[A, B]): mutable.Map[A, B] = dictionaryAsScalaMap[A, B](p)
-
- /**
- * Implicitly converts a Java Properties to a Scala mutable Map[String, String].
- * The returned Scala Map[String, String] is backed by the provided Java
- * Properties and any side-effects of using it via the Scala interface will
- * be visible via the Java interface and vice versa.
- *
- * @param m The Properties to be converted.
- * @return A Scala mutable Map[String, String] view of the argument.
- */
- implicit def propertiesAsScalaMap(p: ju.Properties): mutable.Map[String, String] = p match {
- case _ => new JPropertiesWrapper(p)
- }
+ def asScalaMap[A, B](m: ju.Map[A, B]): mutable.Map[A, B] = mapAsScalaMap[A, B](m)
@deprecated("use propertiesAsScalaMap instead", "2.9.0")
def asScalaMap(p: ju.Properties): mutable.Map[String, String] = propertiesAsScalaMap(p)
- @deprecated("use propertiesAsScalaMap instead", "2.8.1")
- def asMap(p: ju.Properties): mutable.Map[String, String] = propertiesAsScalaMap(p)
-
- // Private implementations (shared by JavaConverters) ...
-
- trait IterableWrapperTrait[A] extends ju.AbstractCollection[A] {
- val underlying: Iterable[A]
- def size = underlying.size
- override def iterator = IteratorWrapper(underlying.iterator)
- override def isEmpty = underlying.isEmpty
- }
-
- case class IteratorWrapper[A](underlying : Iterator[A]) extends ju.Iterator[A] with ju.Enumeration[A] {
- def hasNext = underlying.hasNext
- def next() = underlying.next
- def hasMoreElements = underlying.hasNext
- def nextElement() = underlying.next
- def remove() = throw new UnsupportedOperationException
- }
-
- class ToIteratorWrapper[A](underlying : Iterator[A]) {
- def asJava = new IteratorWrapper(underlying)
- }
-
- case class JIteratorWrapper[A](underlying : ju.Iterator[A]) extends Iterator[A] {
- def hasNext = underlying.hasNext
- def next() = underlying.next
- }
-
- case class JEnumerationWrapper[A](underlying : ju.Enumeration[A]) extends Iterator[A] {
- def hasNext = underlying.hasMoreElements
- def next() = underlying.nextElement
- }
-
- case class IterableWrapper[A](underlying : Iterable[A])
- extends ju.AbstractCollection[A]
- with IterableWrapperTrait[A] { }
-
- case class JIterableWrapper[A](underlying : jl.Iterable[A]) extends Iterable[A] {
- def iterator = underlying.iterator
- def newBuilder[B] = new mutable.ArrayBuffer[B]
- }
-
- case class JCollectionWrapper[A](underlying : ju.Collection[A]) extends Iterable[A] {
- def iterator = underlying.iterator
- override def size = underlying.size
- override def isEmpty = underlying.isEmpty
- def newBuilder[B] = new mutable.ArrayBuffer[B]
- }
-
- case class SeqWrapper[A](underlying : Seq[A]) extends ju.AbstractList[A] with IterableWrapperTrait[A] {
- def get(i : Int) = underlying(i)
- }
-
- case class MutableSeqWrapper[A](underlying : mutable.Seq[A]) extends ju.AbstractList[A] with IterableWrapperTrait[A] {
- def get(i : Int) = underlying(i)
- override def set(i : Int, elem: A) = { val p = underlying(i) ; underlying(i) = elem ; p }
- }
-
- case class MutableBufferWrapper[A](underlying : mutable.Buffer[A]) extends ju.AbstractList[A] with IterableWrapperTrait[A] {
- def get(i : Int) = underlying(i)
- override def set(i : Int, elem: A) = { val p = underlying(i) ; underlying(i) = elem ; p }
- override def add(elem : A) = { underlying.append(elem) ; true }
- override def remove(i : Int) = underlying.remove(i)
- }
-
- case class JListWrapper[A](val underlying : ju.List[A]) extends mutable.Buffer[A] {
- def length = underlying.size
- override def isEmpty = underlying.isEmpty
- override def iterator : Iterator[A] = underlying.iterator
- def apply(i : Int) = underlying.get(i)
- def update(i : Int, elem : A) = underlying.set(i, elem)
- def +=:(elem : A) = { underlying.subList(0, 0).add(elem) ; this }
- def +=(elem : A): this.type = { underlying.add(elem); this }
- def insertAll(i : Int, elems : Traversable[A]) = { val ins = underlying.subList(0, i) ; elems.seq.foreach(ins.add(_)) }
- def remove(i : Int) = underlying.remove(i)
- def clear = underlying.clear
- def result = this
- }
-
- class SetWrapper[A](underlying: Set[A]) extends ju.AbstractSet[A] {
- self =>
- def size = underlying.size
- def iterator = new ju.Iterator[A] {
- val ui = underlying.iterator
- var prev : Option[A] = None
- def hasNext = ui.hasNext
- def next = { val e = ui.next ; prev = Some(e) ; e }
- def remove = prev match {
- case Some(e) =>
- underlying match {
- case ms: mutable.Set[a] =>
- ms.remove(e.asInstanceOf[a])
- prev = None
- case _ =>
- throw new UnsupportedOperationException("remove")
- }
- case _ => throw new IllegalStateException("next must be called at least once before remove")
- }
- }
- }
-
- case class MutableSetWrapper[A](underlying : mutable.Set[A]) extends SetWrapper[A](underlying) {
- override def add(elem: A) = { val sz = underlying.size ; underlying += elem ; sz < underlying.size }
- override def remove(elem : AnyRef) = try {
- underlying.remove(elem.asInstanceOf[A])
- } catch {
- case ex: ClassCastException => false
- }
- override def clear() = underlying.clear()
- }
-
- case class JSetWrapper[A](underlying : ju.Set[A]) extends mutable.Set[A] with mutable.SetLike[A, JSetWrapper[A]] {
- override def size = underlying.size
-
- def iterator = underlying.iterator
-
- def contains(elem: A): Boolean = underlying.contains(elem)
-
- def +=(elem: A): this.type = { underlying.add(elem); this }
- def -=(elem: A): this.type = { underlying.remove(elem); this }
-
- override def add(elem: A): Boolean = underlying.add(elem)
- override def remove(elem: A): Boolean = underlying.remove(elem)
- override def clear() = underlying.clear()
-
- override def empty = JSetWrapper(new ju.HashSet[A])
- }
-
- class MapWrapper[A, B](underlying: Map[A, B]) extends ju.AbstractMap[A, B] { self =>
- override def size = underlying.size
-
- override def get(key: AnyRef): B = try {
- underlying get key.asInstanceOf[A] match {
- case None => null.asInstanceOf[B]
- case Some(v) => v
- }
- } catch {
- case ex: ClassCastException => null.asInstanceOf[B]
- }
-
- override def entrySet: ju.Set[ju.Map.Entry[A, B]] = new ju.AbstractSet[ju.Map.Entry[A, B]] {
- def size = self.size
-
- def iterator = new ju.Iterator[ju.Map.Entry[A, B]] {
- val ui = underlying.iterator
- var prev : Option[A] = None
-
- def hasNext = ui.hasNext
-
- def next() = {
- val (k, v) = ui.next
- prev = Some(k)
- new ju.Map.Entry[A, B] {
- def getKey = k
- def getValue = v
- def setValue(v1 : B) = self.put(k, v1)
- override def hashCode = k.hashCode + v.hashCode
- override def equals(other: Any) = other match {
- case e : ju.Map.Entry[_, _] => k == e.getKey && v == e.getValue
- case _ => false
- }
- }
- }
-
- def remove() = prev match {
- case Some(k) =>
- underlying match {
- case mm: mutable.Map[a, _] =>
- val v = mm.remove(k.asInstanceOf[a])
- prev = None
- v
- case _ =>
- throw new UnsupportedOperationException("remove")
- }
- case _ =>
- throw new IllegalStateException("next must be called at least once before remove")
- }
- }
- }
- }
-
- case class MutableMapWrapper[A, B](underlying: mutable.Map[A, B])
- extends MapWrapper[A, B](underlying) {
- override def put(k : A, v : B) = underlying.put(k, v) match {
- case Some(v1) => v1
- case None => null.asInstanceOf[B]
- }
-
- override def remove(k : AnyRef): B = try {
- underlying.remove(k.asInstanceOf[A]) match {
- case None => null.asInstanceOf[B]
- case Some(v) => v
- }
- } catch {
- case ex: ClassCastException => null.asInstanceOf[B]
- }
-
- override def clear() = underlying.clear()
- }
-
- trait JMapWrapperLike[A, B, +Repr <: mutable.MapLike[A, B, Repr] with mutable.Map[A, B]]
- extends mutable.Map[A, B] with mutable.MapLike[A, B, Repr] {
- def underlying: ju.Map[A, B]
-
- override def size = underlying.size
-
- def get(k : A) = {
- val v = underlying.get(k)
- if (v != null)
- Some(v)
- else if(underlying.containsKey(k))
- Some(null.asInstanceOf[B])
- else
- None
- }
-
- def +=(kv: (A, B)): this.type = { underlying.put(kv._1, kv._2); this }
- def -=(key: A): this.type = { underlying.remove(key); this }
-
- override def put(k : A, v : B): Option[B] = {
- val r = underlying.put(k, v)
- if (r != null) Some(r) else None
- }
-
- override def update(k : A, v : B) { underlying.put(k, v) }
-
- override def remove(k : A): Option[B] = {
- val r = underlying.remove(k)
- if (r != null) Some(r) else None
- }
-
- def iterator = new Iterator[(A, B)] {
- val ui = underlying.entrySet.iterator
- def hasNext = ui.hasNext
- def next() = { val e = ui.next ; (e.getKey, e.getValue) }
- }
-
- override def clear() = underlying.clear()
-
- override def empty: Repr = null.asInstanceOf[Repr]
- }
-
- case class JMapWrapper[A, B](val underlying : ju.Map[A, B])
- extends JMapWrapperLike[A, B, JMapWrapper[A, B]] {
- override def empty = JMapWrapper(new ju.HashMap[A, B])
- }
-
- class ConcurrentMapWrapper[A, B](override val underlying: mutable.ConcurrentMap[A, B])
- extends MutableMapWrapper[A, B](underlying) with juc.ConcurrentMap[A, B] {
-
- def putIfAbsent(k: A, v: B) = underlying.putIfAbsent(k, v) match {
- case Some(v) => v
- case None => null.asInstanceOf[B]
- }
-
- def remove(k: AnyRef, v: AnyRef) = try {
- underlying.remove(k.asInstanceOf[A], v.asInstanceOf[B])
- } catch {
- case ex: ClassCastException =>
- false
- }
-
- def replace(k: A, v: B): B = underlying.replace(k, v) match {
- case Some(v) => v
- case None => null.asInstanceOf[B]
- }
-
- def replace(k: A, oldval: B, newval: B) = underlying.replace(k, oldval, newval)
- }
-
- case class JConcurrentMapWrapper[A, B](val underlying: juc.ConcurrentMap[A, B])
- extends JMapWrapperLike[A, B, JConcurrentMapWrapper[A, B]] with mutable.ConcurrentMap[A, B] {
- override def get(k: A) = {
- val v = underlying.get(k)
- if (v != null) Some(v)
- else None
- }
-
- override def empty = new JConcurrentMapWrapper(new juc.ConcurrentHashMap[A, B])
-
- def putIfAbsent(k: A, v: B): Option[B] = {
- val r = underlying.putIfAbsent(k, v)
- if (r != null) Some(r) else None
- }
-
- def remove(k: A, v: B): Boolean = underlying.remove(k, v)
-
- def replace(k: A, v: B): Option[B] = {
- val prev = underlying.replace(k, v)
- if (prev != null) Some(prev) else None
- }
-
- def replace(k: A, oldvalue: B, newvalue: B): Boolean = underlying.replace(k, oldvalue, newvalue)
- }
-
- case class DictionaryWrapper[A, B](underlying: mutable.Map[A, B])
- extends ju.Dictionary[A, B] {
- def size: Int = underlying.size
- def isEmpty: Boolean = underlying.isEmpty
- def keys: ju.Enumeration[A] = asJavaEnumeration(underlying.keysIterator)
- def elements: ju.Enumeration[B] = asJavaEnumeration(underlying.valuesIterator)
- def get(key: AnyRef) = try {
- underlying.get(key.asInstanceOf[A]) match {
- case None => null.asInstanceOf[B]
- case Some(v) => v
- }
- } catch {
- case ex: ClassCastException => null.asInstanceOf[B]
- }
- def put(key: A, value: B): B = underlying.put(key, value) match {
- case Some(v) => v
- case None => null.asInstanceOf[B]
- }
- override def remove(key: AnyRef) = try {
- underlying.remove(key.asInstanceOf[A]) match {
- case None => null.asInstanceOf[B]
- case Some(v) => v
- }
- } catch {
- case ex: ClassCastException => null.asInstanceOf[B]
- }
- }
-
- case class JDictionaryWrapper[A, B](underlying: ju.Dictionary[A, B])
- extends mutable.Map[A, B] {
-
- override def size: Int = underlying.size
-
- def get(k : A) = {
- val v = underlying.get(k)
- if (v != null) Some(v) else None
- }
-
- def +=(kv: (A, B)): this.type = { underlying.put(kv._1, kv._2); this }
- def -=(key: A): this.type = { underlying.remove(key); this }
-
- override def put(k : A, v : B): Option[B] = {
- val r = underlying.put(k, v)
- if (r != null) Some(r) else None
- }
-
- override def update(k : A, v : B) { underlying.put(k, v) }
-
- override def remove(k : A): Option[B] = {
- val r = underlying.remove(k)
- if (r != null) Some(r) else None
- }
-
- def iterator = enumerationAsScalaIterator(underlying.keys) map (k => (k, underlying get k))
-
- override def clear() = underlying.clear()
- }
-
- case class JPropertiesWrapper(underlying: ju.Properties)
- extends mutable.Map[String, String] with mutable.MapLike[String, String, JPropertiesWrapper] {
- override def size = underlying.size
-
- def get(k : String) = {
- val v = underlying.get(k)
- if (v != null)
- Some(v.asInstanceOf[String])
- else
- None
- }
-
- def +=(kv: (String, String)): this.type = { underlying.put(kv._1, kv._2); this }
- def -=(key: String): this.type = { underlying.remove(key); this }
-
- override def put(k : String, v : String): Option[String] = {
- val r = underlying.put(k, v)
- if (r != null) Some(r.asInstanceOf[String]) else None
- }
-
- override def update(k : String, v : String) { underlying.put(k, v) }
-
- override def remove(k : String): Option[String] = {
- val r = underlying.remove(k)
- if (r != null) Some(r.asInstanceOf[String]) else None
- }
-
- def iterator = new Iterator[(String, String)] {
- val ui = underlying.entrySet.iterator
- def hasNext = ui.hasNext
- def next() = { val e = ui.next ; (e.getKey.asInstanceOf[String], e.getValue.asInstanceOf[String]) }
- }
-
- override def clear() = underlying.clear()
-
- override def empty = JPropertiesWrapper(new ju.Properties)
-
- def getProperty(key: String) = underlying.getProperty(key)
-
- def getProperty(key: String, defaultValue: String) = underlying.getProperty(key, defaultValue)
-
- def setProperty(key: String, value: String) = underlying.setProperty(key, value)
- }
}
+
diff --git a/src/library/scala/collection/JavaConverters.scala b/src/library/scala/collection/JavaConverters.scala
old mode 100644
new mode 100755
index b0abf09..ab3ac89
--- a/src/library/scala/collection/JavaConverters.scala
+++ b/src/library/scala/collection/JavaConverters.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2006-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2006-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://www.scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -8,483 +8,96 @@
package scala.collection
-/** <p>
- * A collection of decorators that allow to convert between
- * Scala and Java collections using `asScala` and `asJava` methods.
- * </p>
- * <p>
- * The following conversions are supported via `asJava`, `asScala`
- * </p>
- * <ul>
- * <li><code>scala.collection.Iterable</code> <=> <code>java.lang.Iterable</code></li>
- * <li><code>scala.collection.Iterator</code> <=> <code>java.util.Iterator</code></li>
- * <li><code>scala.collection.mutable.Buffer</code> <=> <code>java.util.List</code></li>
- * <li><code>scala.collection.mutable.Set</code> <=> <code>java.util.Set</code></li>
- * <li><code>scala.collection.mutable.Map</code> <=> <code>java.util.Map</code></li>
- * <li><code>scala.collection.mutable.ConcurrentMap</code> <=> <code>java.util.concurrent.ConcurrentMap</code></li>
- * </ul>
- * <p>
- * In all cases, converting from a source type to a target type and back
- * again will return the original source object, e.g.
- * </p>
- * <pre>
- * <b>import</b> scala.collection.JavaConverters._
+import java.{ lang => jl, util => ju }, java.util.{ concurrent => juc }
+import convert._
+
+// TODO: I cleaned all this documentation up in JavaConversions, but the
+// documentation in here is basically the pre-cleaned-up version with minor
+// additions. Would be nice to have in one place.
+
+
+/** A collection of decorators that allow converting between
+ * Scala and Java collections using `asScala` and `asJava` methods.
+ *
+ * The following conversions are supported via `asJava`, `asScala`
+ *
+ * - `scala.collection.Iterable` <=> `java.lang.Iterable`
+ * - `scala.collection.Iterator` <=> `java.util.Iterator`
+ * - `scala.collection.mutable.Buffer` <=> `java.util.List`
+ * - `scala.collection.mutable.Set` <=> `java.util.Set`
+ * - `scala.collection.mutable.Map` <=> `java.util.Map`
+ * - `scala.collection.mutable.ConcurrentMap` <=> `java.util.concurrent.ConcurrentMap`
+ *
+ * In all cases, converting from a source type to a target type and back
+ * again will return the original source object, e.g.
+ * {{{
+ * import scala.collection.JavaConverters._
+ *
+ * val sl = new scala.collection.mutable.ListBuffer[Int]
+ * val jl : java.util.List[Int] = sl.asJava
+ * val sl2 : scala.collection.mutable.Buffer[Int] = jl.asScala
+ * assert(sl eq sl2)
+ * }}}
+ * The following conversions also are supported, but the
+ * direction Scala to Java is done my a more specifically named method:
+ * `asJavaCollection`, `asJavaEnumeration`, `asJavaDictionary`.
+ *
+ * - `scala.collection.Iterable` <=> `java.util.Collection`
+ * - `scala.collection.Iterator` <=> `java.util.Enumeration`
+ * - `scala.collection.mutable.Map` <=> `java.util.Dictionary`
*
- * <b>val</b> sl = <b>new</b> scala.collection.mutable.ListBuffer[Int]
- * <b>val</b> jl : java.util.List[Int] = sl.asJava
- * <b>val</b> sl2 : scala.collection.mutable.Buffer[Int] = jl.asScala
- * assert(sl eq sl2)g</pre>
- * <p>
- * <p>
- * The following conversions also are supported, but the
- * direction Scala to Java is done my a more specifically named method:
- * `asJavaCollection`, `asJavaEnumeration`, `asJavaDictionary`.
- * </p>
- * <ul>
- * <li><code>scala.collection.Iterable</code> <=> <code>java.util.Collection</code></li>
- * <li><code>scala.collection.Iterator</code> <=> <code>java.util.Enumeration</code></li>
- * <li><code>scala.collection.mutable.Map</code> <=> <code>java.util.Dictionary</code></li>
- * </ul>
* In addition, the following one way conversions are provided via `asJava`:
- * </p>
- * <ul>
- * <li><code>scala.collection.Seq</code> => <code>java.util.List</code></li>
- * <li><code>scala.collection.mutable.Seq</code> => <code>java.util.List</code></li>
- * <li><code>scala.collection.Set</code> => <code>java.util.Set</code></li>
- * <li><code>scala.collection.Map</code> => <code>java.util.Map</code></li>
- * </ul>
+ *
+ * - `scala.collection.Seq` => `java.util.List`
+ * - `scala.collection.mutable.Seq` => `java.util.List`
+ * - `scala.collection.Set` => `java.util.Set`
+ * - `scala.collection.Map` => `java.util.Map`
*
* @author Martin Odersky
* @since 2.8.1
*/
-object JavaConverters {
- import java.{ lang => jl, util => ju }
- import java.util.{ concurrent => juc }
- import JavaConversions._
-
- // TODO: I cleaned all this documentation up in JavaConversions, but the
- // documentation in here is basically the pre-cleaned-up version with minor
- // additions. Would be nice to have in one place.
-
- // Conversion decorator classes
-
- /** Generic class containing the `asJava` converter method */
- class AsJava[C](op: => C) {
- /** Converts a Scala collection to the corresponding Java collection */
- def asJava: C = op
- }
-
- /** Generic class containing the `asScala` converter method */
- class AsScala[C](op: => C) {
- /** Converts a Java collection to the corresponding Scala collection */
- def asScala: C = op
- }
-
- /** Generic class containing the `asJavaCollection` converter method */
- class AsJavaCollection[A](i: Iterable[A]) {
- /** Converts a Scala `Iterable` to a Java `Collection` */
- def asJavaCollection: ju.Collection[A] = JavaConversions.asJavaCollection(i)
- }
-
- /** Generic class containing the `asJavaEnumeration` converter method */
- class AsJavaEnumeration[A](i: Iterator[A]) {
- /** Converts a Scala `Iterator` to a Java `Enumeration` */
- def asJavaEnumeration: ju.Enumeration[A] = JavaConversions.asJavaEnumeration(i)
- }
-
- /** Generic class containing the `asJavaDictionary` converter method */
- class AsJavaDictionary[A, B](m : mutable.Map[A, B]) {
- /** Converts a Scala `Map` to a Java `Dictionary` */
- def asJavaDictionary: ju.Dictionary[A, B] = JavaConversions.asJavaDictionary(m)
- }
-
- // Scala => Java
-
- /**
- * Adds an `asJava` method that implicitly converts a Scala <code>Iterator</code> to a Java <code>Iterator</code>.
- * The returned Java <code>Iterator</code> is backed by the provided Scala
- * <code>Iterator</code> and any side-effects of using it via the Java interface will
- * be visible via the Scala interface and vice versa.
- * <p>
- * If the Scala <code>Iterator</code> was previously obtained from an implicit or
- * explicit call of <code>asIterator(java.util.Iterator)</code> then the original
- * Java <code>Iterator</code> will be returned by the `asJava` method.
- *
- * @param i The <code>Iterator</code> to be converted.
- * @return An object with an `asJava` method that returns a Java <code>Iterator</code> view of the argument.
- */
- implicit def asJavaIteratorConverter[A](i : Iterator[A]): AsJava[ju.Iterator[A]] =
- new AsJava(asJavaIterator(i))
-
- /**
- * Adds an `asJavaEnumeration` method that implicitly converts a Scala <code>Iterator</code> to a Java <code>Enumeration</code>.
- * The returned Java <code>Enumeration</code> is backed by the provided Scala
- * <code>Iterator</code> and any side-effects of using it via the Java interface will
- * be visible via the Scala interface and vice versa.
- * <p>
- * If the Scala <code>Iterator</code> was previously obtained from an implicit or
- * explicit call of <code>asIterator(java.util.Enumeration)</code> then the
- * original Java <code>Enumeration</code> will be returned.
- *
- * @param i The <code>Iterator</code> to be converted.
- * @return An object with an `asJavaEnumeration` method that returns a Java <code>Enumeration</code> view of the argument.
- */
- implicit def asJavaEnumerationConverter[A](i : Iterator[A]): AsJavaEnumeration[A] =
- new AsJavaEnumeration(i)
-
- /**
- * Adds an `asJava` method that implicitly converts a Scala <code>Iterable</code> to a Java <code>Iterable</code>.
- * The returned Java <code>Iterable</code> is backed by the provided Scala
- * <code>Iterable</code> and any side-effects of using it via the Java interface will
- * be visible via the Scala interface and vice versa.
- * <p>
- * If the Scala <code>Iterable</code> was previously obtained from an implicit or
- * explicit call of <code>asIterable(java.lang.Iterable)</code> then the original
- * Java <code>Iterable</code> will be returned.
- *
- * @param i The <code>Iterable</code> to be converted.
- * @return An object with an `asJavaCollection` method that returns a Java <code>Iterable</code> view of the argument.
- */
- implicit def asJavaIterableConverter[A](i : Iterable[A]): AsJava[jl.Iterable[A]] =
- new AsJava(asJavaIterable(i))
-
- /**
- * Adds an `asJavaCollection` method that implicitly converts a Scala <code>Iterable</code> to an immutable Java
- * <code>Collection</code>.
- * <p>
- * If the Scala <code>Iterable</code> was previously obtained from an implicit or
- * explicit call of <code>asSizedIterable(java.util.Collection)</code> then the original
- * Java <code>Collection</code> will be returned.
- *
- * @param i The <code>SizedIterable</code> to be converted.
- * @return An object with an `asJava` method that returns a Java <code>Collection</code> view of the argument.
- */
- implicit def asJavaCollectionConverter[A](i : Iterable[A]): AsJavaCollection[A] =
- new AsJavaCollection(i)
-
- /**
- * Adds an `asJava` method that implicitly converts a Scala mutable <code>Buffer</code> to a Java <code>List</code>.
- * The returned Java <code>List</code> is backed by the provided Scala
- * <code>Buffer</code> and any side-effects of using it via the Java interface will
- * be visible via the Scala interface and vice versa.
- * <p>
- * If the Scala <code>Buffer</code> was previously obtained from an implicit or
- * explicit call of <code>asBuffer(java.util.List)</code> then the original
- * Java <code>List</code> will be returned.
- *
- * @param b The <code>Buffer</code> to be converted.
- * @return An object with an `asJava` method that returns a Java <code>List</code> view of the argument.
- */
- implicit def bufferAsJavaListConverter[A](b : mutable.Buffer[A]): AsJava[ju.List[A]] =
- new AsJava(bufferAsJavaList(b))
-
- /**
- * Adds an `asJava` method that implicitly converts a Scala mutable <code>Seq</code> to a Java <code>List</code>.
- * The returned Java <code>List</code> is backed by the provided Scala
- * <code>Seq</code> and any side-effects of using it via the Java interface will
- * be visible via the Scala interface and vice versa.
- * <p>
- * If the Scala <code>Seq</code> was previously obtained from an implicit or
- * explicit call of <code>asSeq(java.util.List)</code> then the original
- * Java <code>List</code> will be returned.
- *
- * @param b The <code>Seq</code> to be converted.
- * @return An object with an `asJava` method that returns a Java <code>List</code> view of the argument.
- */
- implicit def mutableSeqAsJavaListConverter[A](b : mutable.Seq[A]): AsJava[ju.List[A]] =
- new AsJava(mutableSeqAsJavaList(b))
-
- /**
- * Adds an `asJava` method that implicitly converts a Scala <code>Seq</code> to a Java <code>List</code>.
- * The returned Java <code>List</code> is backed by the provided Scala
- * <code>Seq</code> and any side-effects of using it via the Java interface will
- * be visible via the Scala interface and vice versa.
- * <p>
- * If the Scala <code>Seq</code> was previously obtained from an implicit or
- * explicit call of <code>asSeq(java.util.List)</code> then the original
- * Java <code>List</code> will be returned.
- *
- * @param b The <code>Seq</code> to be converted.
- * @return An object with an `asJava` method that returns a Java <code>List</code> view of the argument.
- */
- implicit def seqAsJavaListConverter[A](b : Seq[A]): AsJava[ju.List[A]] =
- new AsJava(seqAsJavaList(b))
+object JavaConverters extends DecorateAsJava with DecorateAsScala {
+ @deprecated("Don't access these decorators directly.", "2.10.0")
+ type AsJava[A] = Decorators.AsJava[A]
+ @deprecated("Don't access these decorators directly.", "2.10.0")
+ type AsScala[A] = Decorators.AsScala[A]
+ @deprecated("Don't access these decorators directly.", "2.10.0")
+ type AsJavaCollection[A] = Decorators.AsJavaCollection[A]
+ @deprecated("Don't access these decorators directly.", "2.10.0")
+ type AsJavaEnumeration[A] = Decorators.AsJavaEnumeration[A]
+ @deprecated("Don't access these decorators directly.", "2.10.0")
+ type AsJavaDictionary[A, B] = Decorators.AsJavaDictionary[A, B]
@deprecated("Use bufferAsJavaListConverter instead", "2.9.0")
def asJavaListConverter[A](b : mutable.Buffer[A]): AsJava[ju.List[A]] = bufferAsJavaListConverter(b)
+
@deprecated("Use mutableSeqAsJavaListConverter instead", "2.9.0")
def asJavaListConverter[A](b : mutable.Seq[A]): AsJava[ju.List[A]] = mutableSeqAsJavaListConverter(b)
+
@deprecated("Use seqAsJavaListConverter instead", "2.9.0")
def asJavaListConverter[A](b : Seq[A]): AsJava[ju.List[A]] = seqAsJavaListConverter(b)
- /**
- * Adds an `asJava` method that implicitly converts a Scala mutable <code>Set</code> to a Java <code>Set</code>.
- * The returned Java <code>Set</code> is backed by the provided Scala
- * <code>Set</code> and any side-effects of using it via the Java interface will
- * be visible via the Scala interface and vice versa.
- * <p>
- * If the Scala <code>Set</code> was previously obtained from an implicit or
- * explicit call of <code>asSet(java.util.Set)</code> then the original
- * Java <code>Set</code> will be returned.
- *
- * @param s The <code>Set</code> to be converted.
- * @return An object with an `asJava` method that returns a Java <code>Set</code> view of the argument.
- */
- implicit def mutableSetAsJavaSetConverter[A](s : mutable.Set[A]): AsJava[ju.Set[A]] =
- new AsJava(mutableSetAsJavaSet(s))
-
@deprecated("Use mutableSetAsJavaSetConverter instead", "2.9.0")
def asJavaSetConverter[A](s : mutable.Set[A]): AsJava[ju.Set[A]] = mutableSetAsJavaSetConverter(s)
- /**
- * Adds an `asJava` method that implicitly converts a Scala <code>Set</code> to a Java <code>Set</code>.
- * The returned Java <code>Set</code> is backed by the provided Scala
- * <code>Set</code> and any side-effects of using it via the Java interface will
- * be visible via the Scala interface and vice versa.
- * <p>
- * If the Scala <code>Set</code> was previously obtained from an implicit or
- * explicit call of <code>asSet(java.util.Set)</code> then the original
- * Java <code>Set</code> will be returned.
- *
- * @param s The <code>Set</code> to be converted.
- * @return An object with an `asJava` method that returns a Java <code>Set</code> view of the argument.
- */
- implicit def setAsJavaSetConverter[A](s : Set[A]): AsJava[ju.Set[A]] =
- new AsJava(setAsJavaSet(s))
-
@deprecated("Use setAsJavaSetConverter instead", "2.9.0")
def asJavaSetConverter[A](s : Set[A]): AsJava[ju.Set[A]] = setAsJavaSetConverter(s)
- /**
- * Adds an `asJava` method that implicitly converts a Scala mutable <code>Map</code> to a Java <code>Map</code>.
- * The returned Java <code>Map</code> is backed by the provided Scala
- * <code>Map</code> and any side-effects of using it via the Java interface will
- * be visible via the Scala interface and vice versa.
- * <p>
- * If the Scala <code>Map</code> was previously obtained from an implicit or
- * explicit call of <code>asMap(java.util.Map)</code> then the original
- * Java <code>Map</code> will be returned.
- *
- * @param m The <code>Map</code> to be converted.
- * @return An object with an `asJava` method that returns a Java <code>Map</code> view of the argument.
- */
- implicit def mutableMapAsJavaMapConverter[A, B](m : mutable.Map[A, B]): AsJava[ju.Map[A, B]] =
- new AsJava(mutableMapAsJavaMap(m))
-
@deprecated("use mutableMapAsJavaMapConverter instead", "2.9.0")
def asJavaMapConverter[A, B](m : mutable.Map[A, B]): AsJava[ju.Map[A, B]] = mutableMapAsJavaMapConverter(m)
- /**
- * Adds an `asJavaDictionary` method that implicitly converts a Scala mutable <code>Map</code> to a Java <code>Dictionary</code>.
- * The returned Java <code>Dictionary</code> is backed by the provided Scala
- * <code>Dictionary</code> and any side-effects of using it via the Java interface will
- * be visible via the Scala interface and vice versa.
- * <p>
- * If the Scala <code>Dictionary</code> was previously obtained from an implicit or
- * explicit call of <code>asMap(java.util.Dictionary)</code> then the original
- * Java <code>Dictionary</code> will be returned.
- *
- * @param m The <code>Map</code> to be converted.
- * @return An object with an `asJavaDictionary` method that returns a Java <code>Dictionary</code> view of the argument.
- */
- implicit def asJavaDictionaryConverter[A, B](m : mutable.Map[A, B]): AsJavaDictionary[A, B] =
- new AsJavaDictionary(m)
-
- /**
- * Adds an `asJava` method that implicitly converts a Scala <code>Map</code> to a Java <code>Map</code>.
- * The returned Java <code>Map</code> is backed by the provided Scala
- * <code>Map</code> and any side-effects of using it via the Java interface will
- * be visible via the Scala interface and vice versa.
- * <p>
- * If the Scala <code>Map</code> was previously obtained from an implicit or
- * explicit call of <code>asMap(java.util.Map)</code> then the original
- * Java <code>Map</code> will be returned.
- *
- * @param m The <code>Map</code> to be converted.
- * @return An object with an `asJava` method that returns a Java <code>Map</code> view of the argument.
- */
- implicit def mapAsJavaMapConverter[A, B](m : Map[A, B]): AsJava[ju.Map[A, B]] =
- new AsJava(mapAsJavaMap(m))
-
@deprecated("Use mapAsJavaMapConverter instead", "2.9.0")
def asJavaMapConverter[A, B](m : Map[A, B]): AsJava[ju.Map[A, B]] = mapAsJavaMapConverter(m)
- /**
- * Adds an `asJava` method that implicitly converts a Scala mutable `ConcurrentMap` to a Java `ConcurrentMap`.
- * The returned Java `ConcurrentMap` is backed by the provided Scala `ConcurrentMap`
- * and any side-effects of using it via the Java interface will be visible
- * via the Scala interface and vice versa.
- * <p>
- * If the Scala <code>ConcurrentMap</code> was previously obtained from an implicit or
- * explicit call of <code>asConcurrentMap(java.util.concurrect.ConcurrentMap)</code> then the original
- * Java <code>ConcurrentMap</code> will be returned.
- *
- * @param m The <code>ConcurrentMap</code> to be converted.
- * @return An object with an `asJava` method that returns a Java <code>ConcurrentMap</code> view of the argument.
- */
- implicit def asJavaConcurrentMapConverter[A, B](m: mutable.ConcurrentMap[A, B]): AsJava[juc.ConcurrentMap[A, B]] =
- new AsJava(asJavaConcurrentMap(m))
-
- /**
- * Adds an `asScala` method that implicitly converts a Java <code>Iterator</code> to a Scala <code>Iterator</code>.
- * The returned Scala <code>Iterator</code> is backed by the provided Java
- * <code>Iterator</code> and any side-effects of using it via the Scala interface will
- * be visible via the Java interface and vice versa.
- * <p>
- * If the Java <code>Iterator</code> was previously obtained from an implicit or
- * explicit call of <code>asIterator(scala.collection.Iterator)</code> then the original
- * Scala <code>Iterator</code> will be returned.
- *
- * @param i The <code>Iterator</code> to be converted.
- * @return An object with an `asScala` method that returns a Scala <code>Iterator</code> view of the argument.
- */
- implicit def asScalaIteratorConverter[A](i : ju.Iterator[A]): AsScala[Iterator[A]] =
- new AsScala(asScalaIterator(i))
-
- /**
- * Adds an `asScala` method that implicitly converts a Java <code>Enumeration</code> to a Scala <code>Iterator</code>.
- * The returned Scala <code>Iterator</code> is backed by the provided Java
- * <code>Enumeration</code> and any side-effects of using it via the Scala interface will
- * be visible via the Java interface and vice versa.
- * <p>
- * If the Java <code>Enumeration</code> was previously obtained from an implicit or
- * explicit call of <code>asEnumeration(scala.collection.Iterator)</code> then the
- * original Scala <code>Iterator</code> will be returned.
- *
- * @param i The <code>Enumeration</code> to be converted.
- * @return An object with an `asScala` method that returns a Scala <code>Iterator</code> view of the argument.
- */
- implicit def enumerationAsScalaIteratorConverter[A](i : ju.Enumeration[A]): AsScala[Iterator[A]] =
- new AsScala(enumerationAsScalaIterator(i))
-
- /**
- * Adds an `asScala` method that implicitly converts a Java <code>Iterable</code> to a Scala <code>Iterable</code>.
- * The returned Scala <code>Iterable</code> is backed by the provided Java
- * <code>Iterable</code> and any side-effects of using it via the Scala interface will
- * be visible via the Java interface and vice versa.
- * <p>
- * If the Java <code>Iterable</code> was previously obtained from an implicit or
- * explicit call of <code>asIterable(scala.collection.Iterable)</code> then the original
- * Scala <code>Iterable</code> will be returned.
- *
- * @param i The <code>Iterable</code> to be converted.
- * @return An object with an `asScala` method that returns a Scala <code>Iterable</code> view of the argument.
- */
- implicit def iterableAsScalaIterableConverter[A](i : jl.Iterable[A]): AsScala[Iterable[A]] =
- new AsScala(iterableAsScalaIterable(i))
-
@deprecated("Use iterableAsScalaIterableConverter instead", "2.9.0")
def asScalaIterableConverter[A](i : jl.Iterable[A]): AsScala[Iterable[A]] = iterableAsScalaIterableConverter(i)
- /**
- * Adds an `asScala` method that implicitly converts a Java <code>Collection</code> to an Scala <code>Iterable</code>.
- * <p>
- * If the Java <code>Collection</code> was previously obtained from an implicit or
- * explicit call of <code>asCollection(scala.collection.SizedIterable)</code> then
- * the original Scala <code>SizedIterable</code> will be returned.
- *
- * @param i The <code>Collection</code> to be converted.
- * @return An object with an `asScala` method that returns a Scala <code>SizedIterable</code> view of the argument.
- */
- implicit def collectionAsScalaIterableConverter[A](i : ju.Collection[A]): AsScala[Iterable[A]] =
- new AsScala(collectionAsScalaIterable(i))
-
@deprecated("Use collectionAsScalaIterableConverter instead", "2.9.0")
def asScalaIterableConverter[A](i : ju.Collection[A]): AsScala[Iterable[A]] = collectionAsScalaIterableConverter(i)
- /**
- * Adds an `asScala` method that implicitly converts a Java <code>List</code> to a Scala mutable <code>Buffer</code>.
- * The returned Scala <code>Buffer</code> is backed by the provided Java
- * <code>List</code> and any side-effects of using it via the Scala interface will
- * be visible via the Java interface and vice versa.
- * <p>
- * If the Java <code>List</code> was previously obtained from an implicit or
- * explicit call of <code>asList(scala.collection.mutable.Buffer)</code> then the original
- * Scala <code>Buffer</code> will be returned.
- *
- * @param l The <code>List</code> to be converted.
- * @return An object with an `asScala` method that returns a Scala mutable <code>Buffer</code> view of the argument.
- */
- implicit def asScalaBufferConverter[A](l : ju.List[A]): AsScala[mutable.Buffer[A]] =
- new AsScala(asScalaBuffer(l))
-
- /**
- * Adds an `asScala` method that implicitly converts a Java <code>Set</code> to a Scala mutable <code>Set</code>.
- * The returned Scala <code>Set</code> is backed by the provided Java
- * <code>Set</code> and any side-effects of using it via the Scala interface will
- * be visible via the Java interface and vice versa.
- * <p>
- * If the Java <code>Set</code> was previously obtained from an implicit or
- * explicit call of <code>asSet(scala.collection.mutable.Set)</code> then the original
- * Scala <code>Set</code> will be returned.
- *
- * @param s The <code>Set</code> to be converted.
- * @return An object with an `asScala` method that returns a Scala mutable <code>Set</code> view of the argument.
- */
- implicit def asScalaSetConverter[A](s : ju.Set[A]): AsScala[mutable.Set[A]] =
- new AsScala(asScalaSet(s))
-
- /**
- * Adds an `asScala` method that implicitly converts a Java <code>Map</code> to a Scala mutable <code>Map</code>.
- * The returned Scala <code>Map</code> is backed by the provided Java
- * <code>Map</code> and any side-effects of using it via the Scala interface will
- * be visible via the Java interface and vice versa.
- * <p>
- * If the Java <code>Map</code> was previously obtained from an implicit or
- * explicit call of <code>asMap(scala.collection.mutable.Map)</code> then the original
- * Scala <code>Map</code> will be returned.
- *
- * @param m The <code>Map</code> to be converted.
- * @return An object with an `asScala` method that returns a Scala mutable <code>Map</code> view of the argument.
- */
- implicit def mapAsScalaMapConverter[A, B](m : ju.Map[A, B]): AsScala[mutable.Map[A, B]] =
- new AsScala(mapAsScalaMap(m))
-
@deprecated("Use mapAsScalaMapConverter instead", "2.9.0")
def asScalaMapConverter[A, B](m : ju.Map[A, B]): AsScala[mutable.Map[A, B]] = mapAsScalaMapConverter(m)
- /**
- * Adds an `asScala` method that implicitly converts a Java <code>ConcurrentMap</code> to a Scala mutable <code>ConcurrentMap</code>.
- * The returned Scala <code>ConcurrentMap</code> is backed by the provided Java
- * <code>ConcurrentMap</code> and any side-effects of using it via the Scala interface will
- * be visible via the Java interface and vice versa.
- * <p>
- * If the Java <code>ConcurrentMap</code> was previously obtained from an implicit or
- * explicit call of <code>asConcurrentMap(scala.collection.mutable.ConcurrentMap)</code> then the original
- * Scala <code>ConcurrentMap</code> will be returned.
- *
- * @param m The <code>ConcurrentMap</code> to be converted.
- * @return An object with an `asScala` method that returns a Scala mutable <code>ConcurrentMap</code> view of the argument.
- */
- implicit def asScalaConcurrentMapConverter[A, B](m: juc.ConcurrentMap[A, B]): AsScala[mutable.ConcurrentMap[A, B]] =
- new AsScala(asScalaConcurrentMap(m))
-
- /**
- * Adds an `asScala` method that implicitly converts a Java <code>Dictionary</code> to a Scala mutable <code>Map[String, String]</code>.
- * The returned Scala <code>Map[String, String]</code> is backed by the provided Java
- * <code>Dictionary</code> and any side-effects of using it via the Scala interface will
- * be visible via the Java interface and vice versa.
- *
- * @param m The <code>Dictionary</code> to be converted.
- * @return An object with an `asScala` method that returns a Scala mutable <code>Map[String, String]</code> view of the argument.
- */
- implicit def dictionaryAsScalaMapConverter[A, B](p: ju.Dictionary[A, B]): AsScala[mutable.Map[A, B]] =
- new AsScala(dictionaryAsScalaMap(p))
-
- /**
- * Adds an `asScala` method that implicitly converts a Java <code>Properties</code> to a Scala mutable <code>Map[String, String]</code>.
- * The returned Scala <code>Map[String, String]</code> is backed by the provided Java
- * <code>Properties</code> and any side-effects of using it via the Scala interface will
- * be visible via the Java interface and vice versa.
- *
- * @param m The <code>Properties</code> to be converted.
- * @return An object with an `asScala` method that returns a Scala mutable <code>Map[String, String]</code> view of the argument.
- */
- implicit def propertiesAsScalaMapConverter(p: ju.Properties): AsScala[mutable.Map[String, String]] =
- new AsScala(propertiesAsScalaMap(p))
-
@deprecated("Use propertiesAsScalaMapConverter instead", "2.9.0")
- def asScalaMapConverter(p: ju.Properties): AsScala[mutable.Map[String, String]] =
- propertiesAsScalaMapConverter(p)
-
+ def asScalaMapConverter(p: ju.Properties): AsScala[mutable.Map[String, String]] = propertiesAsScalaMapConverter(p)
}
diff --git a/src/library/scala/collection/LinearSeq.scala b/src/library/scala/collection/LinearSeq.scala
index f1fa919..e52a193 100644
--- a/src/library/scala/collection/LinearSeq.scala
+++ b/src/library/scala/collection/LinearSeq.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -20,14 +20,15 @@ trait LinearSeq[+A] extends Seq[A]
with GenericTraversableTemplate[A, LinearSeq]
with LinearSeqLike[A, LinearSeq[A]] {
override def companion: GenericCompanion[LinearSeq] = LinearSeq
+ override def seq: LinearSeq[A] = this
}
/** $factoryInfo
* The current default implementation of a $Coll is a `Vector`.
* @define coll linear sequence
- * @define Coll LinearSeq
+ * @define Coll `LinearSeq`
*/
object LinearSeq extends SeqFactory[LinearSeq] {
- implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, LinearSeq[A]] = new GenericCanBuildFrom[A]
+ implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, LinearSeq[A]] = ReusableCBF.asInstanceOf[GenericCanBuildFrom[A]]
def newBuilder[A]: Builder[A, LinearSeq[A]] = immutable.LinearSeq.newBuilder[A]
}
diff --git a/src/library/scala/collection/LinearSeqLike.scala b/src/library/scala/collection/LinearSeqLike.scala
index 068c634..78108a9 100644
--- a/src/library/scala/collection/LinearSeqLike.scala
+++ b/src/library/scala/collection/LinearSeqLike.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -13,6 +13,7 @@ import generic._
import mutable.ListBuffer
import immutable.List
import scala.util.control.Breaks._
+import scala.annotation.tailrec
/** A template trait for linear sequences of type `LinearSeq[A]`.
*
@@ -41,16 +42,21 @@ import scala.util.control.Breaks._
* @tparam A the element type of the $coll
* @tparam Repr the type of the actual $coll containing the elements.
*/
-trait LinearSeqLike[+A, +Repr <: LinearSeqLike[A, Repr]] extends SeqLike[A, Repr] { self: Repr =>
+trait LinearSeqLike[+A, +Repr <: LinearSeqLike[A, Repr]] extends SeqLike[A, Repr] {
+ self: Repr =>
override protected[this] def thisCollection: LinearSeq[A] = this.asInstanceOf[LinearSeq[A]]
override protected[this] def toCollection(repr: Repr): LinearSeq[A] = repr.asInstanceOf[LinearSeq[A]]
+ def seq: LinearSeq[A]
+
+ override def hashCode()= scala.util.hashing.MurmurHash3.seqHash(seq) // TODO - can we get faster via "linearSeqHash" ?
+
override /*IterableLike*/
- def iterator: Iterator[A] = new Iterator[A] {
+ def iterator: Iterator[A] = new AbstractIterator[A] {
var these = self
def hasNext: Boolean = !these.isEmpty
- def next: A =
+ def next(): A =
if (hasNext) {
val result = these.head; these = these.tail; result
} else Iterator.empty.next
@@ -64,4 +70,9 @@ trait LinearSeqLike[+A, +Repr <: LinearSeqLike[A, Repr]] extends SeqLike[A, Repr
xs
}
}
+
+ @tailrec override final def corresponds[B](that: GenSeq[B])(p: (A,B) => Boolean): Boolean = {
+ if (this.isEmpty) that.isEmpty
+ else that.nonEmpty && p(head, that.head) && (tail corresponds that.tail)(p)
+ }
}
diff --git a/src/library/scala/collection/LinearSeqOptimized.scala b/src/library/scala/collection/LinearSeqOptimized.scala
old mode 100644
new mode 100755
index 5e0bd01..81cccea
--- a/src/library/scala/collection/LinearSeqOptimized.scala
+++ b/src/library/scala/collection/LinearSeqOptimized.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -12,6 +12,7 @@ import generic._
import mutable.ListBuffer
import immutable.List
import scala.util.control.Breaks._
+import scala.annotation.tailrec
/** A template trait for linear sequences of type `LinearSeq[A]` which optimizes
* the implementation of several methods under the assumption of fast linear access.
@@ -82,17 +83,16 @@ trait LinearSeqOptimized[+A, +Repr <: LinearSeqOptimized[A, Repr]] extends Linea
false
}
- override /*TraversableLike*/
- def count(p: A => Boolean): Int = {
+ override /*SeqLike*/
+ def contains(elem: Any): Boolean = {
var these = this
- var cnt = 0
while (!these.isEmpty) {
- if (p(these.head)) cnt += 1
+ if (these.head == elem) return true
these = these.tail
}
- cnt
+ false
}
-
+
override /*IterableLike*/
def find(p: A => Boolean): Option[A] = {
var these = this
@@ -113,7 +113,7 @@ trait LinearSeqOptimized[+A, +Repr <: LinearSeqOptimized[A, Repr]] extends Linea
}
acc
}
-
+
override /*IterableLike*/
def foldRight[B](z: B)(f: (A, B) => B): B =
if (this.isEmpty) z
@@ -248,14 +248,17 @@ trait LinearSeqOptimized[+A, +Repr <: LinearSeqOptimized[A, Repr]] extends Linea
}
override /*SeqLike*/
- def lengthCompare(len: Int): Int = {
- var i = 0
- var these = self
- while (!these.isEmpty && i <= len) {
- i += 1
- these = these.tail
+ def lengthCompare(len: Int): Int = {
+ @tailrec def loop(i: Int, xs: Repr): Int = {
+ if (i == len)
+ if (xs.isEmpty) 0 else 1
+ else if (xs.isEmpty)
+ -1
+ else
+ loop(i + 1, xs.tail)
}
- i - len
+ if (len < 0) 1
+ else loop(0, this)
}
override /*SeqLike*/
diff --git a/src/library/scala/collection/Map.scala b/src/library/scala/collection/Map.scala
index 71b2e7f..18ad20a 100644
--- a/src/library/scala/collection/Map.scala
+++ b/src/library/scala/collection/Map.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -33,13 +33,10 @@ trait Map[A, +B] extends Iterable[(A, B)] with GenMap[A, B] with MapLike[A, B, M
}
/** $factoryInfo
- * @define Coll Map
+ * @define Coll `Map`
* @define coll map
*/
object Map extends MapFactory[Map] {
-
- private[collection] val hashSeed = "Map".hashCode
-
def empty[A, B]: immutable.Map[A, B] = immutable.Map.empty
/** $mapCanBuildFromInfo */
@@ -48,10 +45,14 @@ object Map extends MapFactory[Map] {
/** An abstract shell used by { mutable, immutable }.Map but not by collection.Map
* because of variance issues.
*/
- abstract class WithDefault[A, +B](underlying: Map[A, B], d: A => B) extends Map[A, B] {
+ abstract class WithDefault[A, +B](underlying: Map[A, B], d: A => B) extends AbstractMap[A, B] with Map[A, B] with Serializable {
override def size = underlying.size
def get(key: A) = underlying.get(key) // removed in 2.9: orElse Some(default(key))
def iterator = underlying.iterator
override def default(key: A): B = d(key)
}
+
}
+
+/** Explicit instantiation of the `Map` trait to reduce class file size in subclasses. */
+private[scala] abstract class AbstractMap[A, +B] extends AbstractIterable[(A, B)] with Map[A, B]
diff --git a/src/library/scala/collection/MapLike.scala b/src/library/scala/collection/MapLike.scala
index 5c2c96c..93d02a4 100644
--- a/src/library/scala/collection/MapLike.scala
+++ b/src/library/scala/collection/MapLike.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -11,7 +11,7 @@ package scala.collection
import generic._
import mutable.{ Builder, MapBuilder }
-import annotation.{migration, bridge}
+import scala.annotation.{migration, bridge}
import parallel.ParMap
/** A template trait for maps, which associate keys with values.
@@ -91,14 +91,18 @@ self =>
* @param kv the key/value pair
* @tparam B1 the type of the value in the key/value pair.
* @return a new map with the new binding added to this map
+ *
* @usecase def + (kv: (A, B)): Map[A, B]
+ * @inheritdoc
*/
def + [B1 >: B] (kv: (A, B1)): Map[A, B1]
/** Removes a key from this map, returning a new map.
* @param key the key to be removed
* @return a new map without a binding for `key`
+ *
* @usecase def - (key: A): Map[A, B]
+ * @inheritdoc
*/
def - (key: A): This
@@ -115,7 +119,9 @@ self =>
* @tparam B1 the result type of the default computation.
* @return the value associated with `key` if it exists,
* otherwise the result of the `default` computation.
+ *
* @usecase def getOrElse(key: A, default: => B): B
+ * @inheritdoc
*/
def getOrElse[B1 >: B](key: A, default: => B1): B1 = get(key) match {
case Some(v) => v
@@ -141,10 +147,7 @@ self =>
* @param key the key
* @return `true` if there is a binding for `key` in this map, `false` otherwise.
*/
- def contains(key: A): Boolean = get(key) match {
- case None => false
- case Some(_) => true
- }
+ def contains(key: A): Boolean = get(key).isDefined
/** Tests whether this map contains a binding for a key. This method,
* which implements an abstract method of trait `PartialFunction`,
@@ -162,20 +165,20 @@ self =>
/** The implementation class of the set returned by `keySet`.
*/
- protected class DefaultKeySet extends Set[A] {
+ protected class DefaultKeySet extends AbstractSet[A] with Set[A] with Serializable {
def contains(key : A) = self.contains(key)
def iterator = keysIterator
def + (elem: A): Set[A] = (Set[A]() ++ this + elem).asInstanceOf[Set[A]] // !!! concrete overrides abstract problem
def - (elem: A): Set[A] = (Set[A]() ++ this - elem).asInstanceOf[Set[A]] // !!! concrete overrides abstract problem
override def size = self.size
- override def foreach[C](f: A => C) = for ((k, v) <- self) f(k)
+ override def foreach[C](f: A => C) = self.keysIterator foreach f
}
/** Creates an iterator for all keys.
*
* @return an iterator over all keys.
*/
- def keysIterator: Iterator[A] = new Iterator[A] {
+ def keysIterator: Iterator[A] = new AbstractIterator[A] {
val iter = self.iterator
def hasNext = iter.hasNext
def next() = iter.next._1
@@ -197,17 +200,17 @@ self =>
/** The implementation class of the iterable returned by `values`.
*/
- protected class DefaultValuesIterable extends Iterable[B] {
+ protected class DefaultValuesIterable extends AbstractIterable[B] with Iterable[B] with Serializable {
def iterator = valuesIterator
override def size = self.size
- override def foreach[C](f: B => C) = for ((k, v) <- self) f(v)
+ override def foreach[C](f: B => C) = self.valuesIterator foreach f
}
/** Creates an iterator for all values in this map.
*
* @return an iterator over all values that are associated with some key in this map.
*/
- def valuesIterator: Iterator[B] = new Iterator[B] {
+ def valuesIterator: Iterator[B] = new AbstractIterator[B] {
val iter = self.iterator
def hasNext = iter.hasNext
def next() = iter.next._2
@@ -224,24 +227,21 @@ self =>
def default(key: A): B =
throw new NoSuchElementException("key not found: " + key)
- /** Filters this map by retaining only keys satisfying a predicate.
- * @param p the predicate used to test keys
- * @return an immutable map consisting only of those key value pairs of this map where the key satisfies
- * the predicate `p`. The resulting map wraps the original map without copying any elements.
- */
- def filterKeys(p: A => Boolean): Map[A, B] = new DefaultMap[A, B] {
+ protected class FilteredKeys(p: A => Boolean) extends AbstractMap[A, B] with DefaultMap[A, B] {
override def foreach[C](f: ((A, B)) => C): Unit = for (kv <- self) if (p(kv._1)) f(kv)
def iterator = self.iterator.filter(kv => p(kv._1))
override def contains(key: A) = self.contains(key) && p(key)
def get(key: A) = if (!p(key)) None else self.get(key)
}
- /** Transforms this map by applying a function to every retrieved value.
- * @param f the function used to transform values of this map.
- * @return a map view which maps every key of this map
- * to `f(this(key))`. The resulting map wraps the original map without copying any elements.
+ /** Filters this map by retaining only keys satisfying a predicate.
+ * @param p the predicate used to test keys
+ * @return an immutable map consisting only of those key value pairs of this map where the key satisfies
+ * the predicate `p`. The resulting map wraps the original map without copying any elements.
*/
- def mapValues[C](f: B => C): Map[A, C] = new DefaultMap[A, C] {
+ def filterKeys(p: A => Boolean): Map[A, B] = new FilteredKeys(p)
+
+ protected class MappedValues[C](f: B => C) extends AbstractMap[A, C] with DefaultMap[A, C] {
override def foreach[D](g: ((A, C)) => D): Unit = for ((k, v) <- self) g((k, f(v)))
def iterator = for ((k, v) <- self.iterator) yield (k, f(v))
override def size = self.size
@@ -249,8 +249,12 @@ self =>
def get(key: A) = self.get(key).map(f)
}
- @deprecated("use `mapValues' instead", "2.8.0")
- def mapElements[C](f: B => C) = mapValues(f)
+ /** Transforms this map by applying a function to every retrieved value.
+ * @param f the function used to transform values of this map.
+ * @return a map view which maps every key of this map
+ * to `f(this(key))`. The resulting map wraps the original map without copying any elements.
+ */
+ def mapValues[C](f: B => C): Map[A, C] = new MappedValues(f)
// The following 5 operations (updated, two times +, two times ++) should really be
// generic, returning This[B]. We need better covariance support to express that though.
@@ -261,7 +265,9 @@ self =>
* @param value the value
* @tparam B1 the type of the added value
* @return A new map with the new key/value mapping added to this map.
+ *
* @usecase def updated(key: A, value: B): Map[A, B]
+ * @inheritdoc
*/
def updated [B1 >: B](key: A, value: B1): Map[A, B1] = this + ((key, value))
@@ -275,30 +281,31 @@ self =>
* @param kvs the remaining key/value pairs
* @tparam B1 the type of the added values
* @return a new map with the given bindings added to this map
+ *
* @usecase def + (kvs: (A, B)*): Map[A, B]
- * @param the key/value pairs
+ * @inheritdoc
+ * @param kvs the key/value pairs
*/
def + [B1 >: B] (kv1: (A, B1), kv2: (A, B1), kvs: (A, B1) *): Map[A, B1] =
this + kv1 + kv2 ++ kvs
/** Adds all key/value pairs in a traversable collection to this map, returning a new map.
*
- * @param kvs the collection containing the added key/value pairs
+ * @param xs the collection containing the added key/value pairs
* @tparam B1 the type of the added values
* @return a new map with the given bindings added to this map
+ *
* @usecase def ++ (xs: Traversable[(A, B)]): Map[A, B]
+ * @inheritdoc
*/
def ++[B1 >: B](xs: GenTraversableOnce[(A, B1)]): Map[A, B1] =
((repr: Map[A, B1]) /: xs.seq) (_ + _)
- @bridge
- def ++[B1 >: B](xs: TraversableOnce[(A, B1)]): Map[A, B1] = ++(xs: GenTraversableOnce[(A, B1)])
-
- /** Returns a new map with all key/value pairs for which the predicate
+ /** Returns a new map obtained by removing all key/value pairs for which the predicate
* `p` returns `true`.
*
- * '''Note:''' This method works by successively removing elements fro which the
- * predicate is false from this set.
+ * '''Note:''' This method works by successively removing elements for which the
+ * predicate is true from this set.
* If removal is slow, or you expect that most elements of the set
* will be removed, you might consider using `filter`
* with a negated predicate instead.
@@ -312,7 +319,7 @@ self =>
res
}
- /** Overridden for efficiency. */
+ /* Overridden for efficiency. */
override def toSeq: Seq[(A, B)] = toBuffer[(A, B)]
override def toBuffer[C >: (A, B)]: mutable.Buffer[C] = {
val result = new mutable.ArrayBuffer[C](size)
diff --git a/src/library/scala/collection/MapProxy.scala b/src/library/scala/collection/MapProxy.scala
index 9b755d6..e85d306 100644
--- a/src/library/scala/collection/MapProxy.scala
+++ b/src/library/scala/collection/MapProxy.scala
@@ -1,17 +1,14 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-
-
package scala.collection
-/** This is a simple wrapper class for <a href="Map.html"
- * target="contentFrame"><code>scala.collection.Map</code></a>.
+/** This is a simple wrapper class for [[scala.collection.Map]].
* It is most useful for assembling customized map abstractions
* dynamically using object composition and forwarding.
*
diff --git a/src/library/scala/collection/MapProxyLike.scala b/src/library/scala/collection/MapProxyLike.scala
index 7352c01..44b39f6 100644
--- a/src/library/scala/collection/MapProxyLike.scala
+++ b/src/library/scala/collection/MapProxyLike.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/library/scala/collection/Parallel.scala b/src/library/scala/collection/Parallel.scala
index 930c339..6731f74 100644
--- a/src/library/scala/collection/Parallel.scala
+++ b/src/library/scala/collection/Parallel.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/library/scala/collection/Parallelizable.scala b/src/library/scala/collection/Parallelizable.scala
index 59b37af..d97c44a 100644
--- a/src/library/scala/collection/Parallelizable.scala
+++ b/src/library/scala/collection/Parallelizable.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -17,7 +17,7 @@ import parallel.Combiner
* @tparam A the type of the elements in the collection
* @tparam ParRepr the actual type of the collection, which has to be parallel
*/
-trait Parallelizable[+A, +ParRepr <: Parallel] {
+trait Parallelizable[+A, +ParRepr <: Parallel] extends Any {
def seq: TraversableOnce[A]
diff --git a/src/library/scala/collection/Seq.scala b/src/library/scala/collection/Seq.scala
index a4582e9..33e66c0 100644
--- a/src/library/scala/collection/Seq.scala
+++ b/src/library/scala/collection/Seq.scala
@@ -1,13 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-
-
package scala.collection
import generic._
@@ -29,21 +27,14 @@ trait Seq[+A] extends PartialFunction[Int, A]
/** $factoryInfo
* The current default implementation of a $Coll is a `List`.
* @define coll sequence
- * @define Coll Seq
+ * @define Coll `Seq`
*/
object Seq extends SeqFactory[Seq] {
-
- private[collection] val hashSeed = "Seq".hashCode
-
/** $genericCanBuildFromInfo */
- implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, Seq[A]] = new GenericCanBuildFrom[A]
+ implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, Seq[A]] = ReusableCBF.asInstanceOf[GenericCanBuildFrom[A]]
def newBuilder[A]: Builder[A, Seq[A]] = immutable.Seq.newBuilder[A]
-
- @deprecated("use View instead", "2.8.0")
- type Projection[A] = SeqView[A, Coll]
-
- @deprecated("use Seq(value) instead", "2.8.0")
- def singleton[A](value: A) = Seq(value)
}
+/** Explicit instantiation of the `Seq` trait to reduce class file size in subclasses. */
+private[scala] abstract class AbstractSeq[+A] extends AbstractIterable[A] with Seq[A]
diff --git a/src/library/scala/collection/SeqExtractors.scala b/src/library/scala/collection/SeqExtractors.scala
new file mode 100644
index 0000000..20ea7f5
--- /dev/null
+++ b/src/library/scala/collection/SeqExtractors.scala
@@ -0,0 +1,23 @@
+package scala.collection
+
+/** An extractor used to head/tail deconstruct sequences. */
+object +: {
+ def unapply[T,Coll <: SeqLike[T, Coll]](
+ t: Coll with SeqLike[T, Coll]): Option[(T, Coll)] =
+ if(t.isEmpty) None
+ else Some(t.head -> t.tail)
+}
+
+/** An extractor used to init/last deconstruct sequences. */
+object :+ {
+ /** Splits a sequence into init :+ tail.
+ * @return Some((init, tail)) if sequence is non-empty. None otherwise.
+ */
+ def unapply[T,Coll <: SeqLike[T, Coll]](
+ t: Coll with SeqLike[T, Coll]): Option[(Coll, T)] =
+ if(t.isEmpty) None
+ else Some(t.init -> t.last)
+}
+
+// Dummy to fool ant
+private abstract class SeqExtractors
\ No newline at end of file
diff --git a/src/library/scala/collection/SeqLike.scala b/src/library/scala/collection/SeqLike.scala
index 37caf82..1be0dba 100644
--- a/src/library/scala/collection/SeqLike.scala
+++ b/src/library/scala/collection/SeqLike.scala
@@ -1,19 +1,19 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-package scala.collection
+package scala
+package collection
import mutable.{ ListBuffer, ArraySeq }
import immutable.{ List, Range }
import generic._
import parallel.ParSeq
-import annotation.bridge
-import scala.math.Ordering
+import scala.math.{ min, max, Ordering }
/** A template trait for sequences of type `Seq[A]`
* $seqInfo
@@ -21,8 +21,8 @@ import scala.math.Ordering
* @define seqInfo
* Sequences are special cases of iterable collections of class `Iterable`.
* Unlike iterables, sequences always have a defined order of elements.
- * Sequences provide a method `apply` for indexing. Indices range from `0` up the the `length` of
- * a sequence. Sequences support a number to find occurrences of elements or subsequences, including
+ * Sequences provide a method `apply` for indexing. Indices range from `0` up to the `length` of
+ * a sequence. Sequences support a number of methods to find occurrences of elements or subsequences, including
* `segmentLength`, `prefixLength`, `indexWhere`, `indexOf`, `lastIndexWhere`, `lastIndexOf`,
* `startsWith`, `endsWith`, `indexOfSlice`.
*
@@ -46,7 +46,7 @@ import scala.math.Ordering
* @version 1.0, 16/07/2003
* @since 2.8
*
- * @define Coll Seq
+ * @define Coll `Seq`
* @define coll sequence
* @define thatinfo the class of the returned collection. Where possible, `That` is
* the same class as the current collection class `Repr`, but this
@@ -59,7 +59,7 @@ import scala.math.Ordering
* @define orderDependent
* @define orderDependentFold
*/
-trait SeqLike[+A, +Repr] extends IterableLike[A, Repr] with GenSeqLike[A, Repr] with Parallelizable[A, ParSeq[A]] { self =>
+trait SeqLike[+A, +Repr] extends Any with IterableLike[A, Repr] with GenSeqLike[A, Repr] with Parallelizable[A, ParSeq[A]] { self =>
override protected[this] def thisCollection: Seq[A] = this.asInstanceOf[Seq[A]]
override protected[this] def toCollection(repr: Repr): Seq[A] = repr.asInstanceOf[Seq[A]]
@@ -84,15 +84,21 @@ trait SeqLike[+A, +Repr] extends IterableLike[A, Repr] with GenSeqLike[A, Repr]
* if computing `length` is cheap.
*/
def lengthCompare(len: Int): Int = {
- var i = 0
- val it = iterator
- while (it.hasNext && i <= len) {
- it.next()
- i += 1
+ if (len < 0) 1
+ else {
+ var i = 0
+ val it = iterator
+ while (it.hasNext) {
+ if (i == len) return if (it.hasNext) 1 else 0
+ it.next()
+ i += 1
+ }
+ i - len
}
- i - len
}
+ override /*IterableLike*/ def isEmpty: Boolean = lengthCompare(0) == 0
+
/** The size of this $coll, equivalent to `length`.
*
* $willNotTerminateInf
@@ -118,11 +124,6 @@ trait SeqLike[+A, +Repr] extends IterableLike[A, Repr] with GenSeqLike[A, Repr]
-1
}
- /** Returns index of the first element satisfying a predicate, or `-1`.
- */
- @deprecated("Use indexWhere(p) instead.", "2.8.0")
- def findIndexOf(p: A => Boolean): Int = indexWhere(p)
-
def lastIndexWhere(p: A => Boolean, end: Int): Int = {
var i = length - 1
val it = reverseIterator
@@ -148,16 +149,17 @@ trait SeqLike[+A, +Repr] extends IterableLike[A, Repr] with GenSeqLike[A, Repr]
if (n < 0 || n > size) Iterator.empty
else new CombinationsItr(n)
- private class PermutationsItr extends Iterator[Repr] {
+ private class PermutationsItr extends AbstractIterator[Repr] {
private[this] val (elms, idxs) = init()
private var _hasNext = true
def hasNext = _hasNext
- def next: Repr = {
+ def next(): Repr = {
if (!hasNext)
Iterator.empty.next
- val result = (self.newBuilder ++= elms).result
+ val forcedElms = new mutable.ArrayBuffer[A](elms.size) ++= elms
+ val result = (self.newBuilder ++= forcedElms).result
var i = idxs.length - 2
while(i >= 0 && idxs(i) >= idxs(i+1))
i -= 1
@@ -189,13 +191,13 @@ trait SeqLike[+A, +Repr] extends IterableLike[A, Repr] with GenSeqLike[A, Repr]
private[this] def init() = {
val m = mutable.HashMap[A, Int]()
- val (es, is) = thisCollection map (e => (e, m.getOrElseUpdate(e, m.size))) sortBy (_._2) unzip
+ val (es, is) = (thisCollection map (e => (e, m.getOrElseUpdate(e, m.size))) sortBy (_._2)).unzip
(es.toBuffer, is.toArray)
}
}
- private class CombinationsItr(n: Int) extends Iterator[Repr] {
+ private class CombinationsItr(n: Int) extends AbstractIterator[Repr] {
// generating all nums such that:
// (1) nums(0) + .. + nums(length-1) = n
// (2) 0 <= nums(i) <= cnts(i), where 0 <= i <= cnts.length-1
@@ -204,7 +206,7 @@ trait SeqLike[+A, +Repr] extends IterableLike[A, Repr] with GenSeqLike[A, Repr]
private var _hasNext = true
def hasNext = _hasNext
- def next: Repr = {
+ def next(): Repr = {
if (!hasNext)
Iterator.empty.next
@@ -244,7 +246,7 @@ trait SeqLike[+A, +Repr] extends IterableLike[A, Repr] with GenSeqLike[A, Repr]
val m = mutable.HashMap[A, Int]()
// e => (e, weight(e))
- val (es, is) = thisCollection map (e => (e, m.getOrElseUpdate(e, m.size))) sortBy (_._2) unzip
+ val (es, is) = (thisCollection map (e => (e, m.getOrElseUpdate(e, m.size))) sortBy (_._2)).unzip
val cs = new Array[Int](m.size)
is foreach (i => cs(i) += 1)
val ns = new Array[Int](cs.length)
@@ -290,9 +292,6 @@ trait SeqLike[+A, +Repr] extends IterableLike[A, Repr] with GenSeqLike[A, Repr]
*/
def reverseIterator: Iterator[A] = toCollection(reverse).iterator
- @deprecated("use `reverseIterator' instead", "2.8.0")
- def reversedElements = reverseIterator
-
def startsWith[B](that: GenSeq[B], offset: Int): Boolean = {
val i = this.iterator drop offset
val j = that.iterator
@@ -303,9 +302,6 @@ trait SeqLike[+A, +Repr] extends IterableLike[A, Repr] with GenSeqLike[A, Repr]
!j.hasNext
}
- @bridge
- def startsWith[B](that: Seq[B], offset: Int): Boolean = startsWith(that: GenSeq[B], offset)
-
def endsWith[B](that: GenSeq[B]): Boolean = {
val i = this.iterator.drop(length - that.length)
val j = that.iterator
@@ -316,10 +312,6 @@ trait SeqLike[+A, +Repr] extends IterableLike[A, Repr] with GenSeqLike[A, Repr]
!j.hasNext
}
- @bridge
- def endsWith[B](that: Seq[B]): Boolean = endsWith(that: GenSeq[B])
-
-
/** Finds first index where this $coll contains a given sequence as a slice.
* $mayNotTerminateInf
* @param that the sequence to test
@@ -328,9 +320,6 @@ trait SeqLike[+A, +Repr] extends IterableLike[A, Repr] with GenSeqLike[A, Repr]
*/
def indexOfSlice[B >: A](that: GenSeq[B]): Int = indexOfSlice(that, 0)
- @bridge
- def indexOfSlice[B >: A](that: Seq[B]): Int = indexOfSlice(that: GenSeq[B])
-
/** Finds first index after or at a start index where this $coll contains a given sequence as a slice.
* $mayNotTerminateInf
* @param that the sequence to test
@@ -339,8 +328,15 @@ trait SeqLike[+A, +Repr] extends IterableLike[A, Repr] with GenSeqLike[A, Repr]
* match the elements of sequence `that`, or `-1` of no such subsequence exists.
*/
def indexOfSlice[B >: A](that: GenSeq[B], from: Int): Int =
- if (this.hasDefiniteSize && that.hasDefiniteSize)
- SeqLike.indexOf(thisCollection, 0, length, that.seq, 0, that.length, from)
+ if (this.hasDefiniteSize && that.hasDefiniteSize) {
+ val l = length
+ val tl = that.length
+ val clippedFrom = math.max(0, from)
+ if (from > l) -1
+ else if (tl < 1) clippedFrom
+ else if (l < tl) -1
+ else SeqLike.kmpSearch(thisCollection, clippedFrom, l, that.seq, 0, tl, true)
+ }
else {
var i = from
var s: Seq[A] = thisCollection drop i
@@ -354,9 +350,6 @@ trait SeqLike[+A, +Repr] extends IterableLike[A, Repr] with GenSeqLike[A, Repr]
-1
}
- @bridge
- def indexOfSlice[B >: A](that: Seq[B], from: Int): Int = indexOfSlice(that: GenSeq[B], from)
-
/** Finds last index where this $coll contains a given sequence as a slice.
* $willNotTerminateInf
* @param that the sequence to test
@@ -365,20 +358,22 @@ trait SeqLike[+A, +Repr] extends IterableLike[A, Repr] with GenSeqLike[A, Repr]
*/
def lastIndexOfSlice[B >: A](that: GenSeq[B]): Int = lastIndexOfSlice(that, length)
- @bridge
- def lastIndexOfSlice[B >: A](that: Seq[B]): Int = lastIndexOfSlice(that: GenSeq[B])
-
/** Finds last index before or at a given end index where this $coll contains a given sequence as a slice.
* @param that the sequence to test
* @param end the end index
* @return the last index `<= end` such that the elements of this $coll starting at this index
* match the elements of sequence `that`, or `-1` of no such subsequence exists.
*/
- def lastIndexOfSlice[B >: A](that: GenSeq[B], end: Int): Int =
- SeqLike.lastIndexOf(thisCollection, 0, length, that.seq, 0, that.length, end)
-
- @bridge
- def lastIndexOfSlice[B >: A](that: Seq[B], end: Int): Int = lastIndexOfSlice(that: GenSeq[B], end)
+ def lastIndexOfSlice[B >: A](that: GenSeq[B], end: Int): Int = {
+ val l = length
+ val tl = that.length
+ val clippedL = math.min(l-tl, end)
+
+ if (end < 0) -1
+ else if (tl < 1) clippedL
+ else if (l < tl) -1
+ else SeqLike.kmpSearch(thisCollection, 0, clippedL+tl, that.seq, 0, tl, false)
+ }
/** Tests whether this $coll contains a given sequence as a slice.
* $mayNotTerminateInf
@@ -388,27 +383,17 @@ trait SeqLike[+A, +Repr] extends IterableLike[A, Repr] with GenSeqLike[A, Repr]
*/
def containsSlice[B](that: GenSeq[B]): Boolean = indexOfSlice(that) != -1
- @bridge
- def containsSlice[B](that: Seq[B]): Boolean = containsSlice(that: GenSeq[B])
-
/** Tests whether this $coll contains a given value as an element.
* $mayNotTerminateInf
*
* @param elem the element to test.
- * @return `true` if this $coll has an element that is
- * is equal (wrt `==`) to `elem`, `false` otherwise.
+ * @return `true` if this $coll has an element that is equal (as
+ * determined by `==`) to `elem`, `false` otherwise.
*/
def contains(elem: Any): Boolean = exists (_ == elem)
/** Produces a new sequence which contains all elements of this $coll and also all elements of
* a given sequence. `xs union ys` is equivalent to `xs ++ ys`.
- * $willNotTerminateInf
- *
- * Another way to express this
- * is that `xs union ys` computes the order-presevring multi-set union of `xs` and `ys`.
- * `union` is hence a counter-part of `diff` and `intersect` which also work on multi-sets.
- *
- * $willNotTerminateInf
*
* @param that the sequence to add.
* @tparam B the element type of the returned $coll.
@@ -417,30 +402,39 @@ trait SeqLike[+A, +Repr] extends IterableLike[A, Repr] with GenSeqLike[A, Repr]
* @return a new collection of type `That` which contains all elements of this $coll
* followed by all elements of `that`.
* @usecase def union(that: Seq[A]): $Coll[A]
- * @return a new $coll which contains all elements of this $coll
- * followed by all elements of `that`.
+ * @inheritdoc
+ *
+ * Another way to express this
+ * is that `xs union ys` computes the order-presevring multi-set union of `xs` and `ys`.
+ * `union` is hence a counter-part of `diff` and `intersect` which also work on multi-sets.
+ *
+ * $willNotTerminateInf
+ *
+ * @return a new $coll which contains all elements of this $coll
+ * followed by all elements of `that`.
*/
override def union[B >: A, That](that: GenSeq[B])(implicit bf: CanBuildFrom[Repr, B, That]): That =
this ++ that
/** Computes the multiset difference between this $coll and another sequence.
- * $willNotTerminateInf
*
* @param that the sequence of elements to remove
* @tparam B the element type of the returned $coll.
- * @tparam That $thatinfo
- * @param bf $bfinfo
* @return a new collection of type `That` which contains all elements of this $coll
* except some of occurrences of elements that also appear in `that`.
* If an element value `x` appears
* ''n'' times in `that`, then the first ''n'' occurrences of `x` will not form
* part of the result, but any following occurrences will.
* @usecase def diff(that: Seq[A]): $Coll[A]
- * @return a new $coll which contains all elements of this $coll
- * except some of occurrences of elements that also appear in `that`.
- * If an element value `x` appears
- * ''n'' times in `that`, then the first ''n'' occurrences of `x` will not form
- * part of the result, but any following occurrences will.
+ * @inheritdoc
+ *
+ * $willNotTerminateInf
+ *
+ * @return a new $coll which contains all elements of this $coll
+ * except some of occurrences of elements that also appear in `that`.
+ * If an element value `x` appears
+ * ''n'' times in `that`, then the first ''n'' occurrences of `x` will not form
+ * part of the result, but any following occurrences will.
*/
def diff[B >: A](that: GenSeq[B]): Repr = {
val occ = occCounts(that.seq)
@@ -451,27 +445,25 @@ trait SeqLike[+A, +Repr] extends IterableLike[A, Repr] with GenSeqLike[A, Repr]
b.result
}
- @bridge
- def diff[B >: A](that: Seq[B]): Repr = diff(that: GenSeq[B])
-
/** Computes the multiset intersection between this $coll and another sequence.
- * $mayNotTerminateInf
*
* @param that the sequence of elements to intersect with.
* @tparam B the element type of the returned $coll.
- * @tparam That $thatinfo
- * @param bf $bfinfo
* @return a new collection of type `That` which contains all elements of this $coll
* which also appear in `that`.
* If an element value `x` appears
* ''n'' times in `that`, then the first ''n'' occurrences of `x` will be retained
* in the result, but any following occurrences will be omitted.
* @usecase def intersect(that: Seq[A]): $Coll[A]
- * @return a new $coll which contains all elements of this $coll
- * which also appear in `that`.
- * If an element value `x` appears
- * ''n'' times in `that`, then the first ''n'' occurrences of `x` will be retained
- * in the result, but any following occurrences will be omitted.
+ * @inheritdoc
+ *
+ * $mayNotTerminateInf
+ *
+ * @return a new $coll which contains all elements of this $coll
+ * which also appear in `that`.
+ * If an element value `x` appears
+ * ''n'' times in `that`, then the first ''n'' occurrences of `x` will be retained
+ * in the result, but any following occurrences will be omitted.
*/
def intersect[B >: A](that: GenSeq[B]): Repr = {
val occ = occCounts(that.seq)
@@ -484,9 +476,6 @@ trait SeqLike[+A, +Repr] extends IterableLike[A, Repr] with GenSeqLike[A, Repr]
b.result
}
- @bridge
- def intersect[B >: A](that: Seq[B]): Repr = intersect(that: GenSeq[B])
-
private def occCounts[B](sq: Seq[B]): mutable.Map[B, Int] = {
val occ = new mutable.HashMap[B, Int] { override def default(k: B) = 0 }
for (y <- sq.seq) occ(y) += 1
@@ -519,10 +508,6 @@ trait SeqLike[+A, +Repr] extends IterableLike[A, Repr] with GenSeqLike[A, Repr]
b.result
}
- @bridge
- def patch[B >: A, That](from: Int, patch: Seq[B], replaced: Int)(implicit bf: CanBuildFrom[Repr, B, That]): That =
- this.patch(from, patch: GenSeq[B], replaced)(bf)
-
def updated[B >: A, That](index: Int, elem: B)(implicit bf: CanBuildFrom[Repr, B, That]): That = {
val b = bf(repr)
val (prefix, rest) = this.splitAt(index)
@@ -568,15 +553,11 @@ trait SeqLike[+A, +Repr] extends IterableLike[A, Repr] with GenSeqLike[A, Repr]
!i.hasNext && !j.hasNext
}
- @bridge
- def corresponds[B](that: Seq[B])(p: (A,B) => Boolean): Boolean =
- corresponds(that: GenSeq[B])(p)
-
/** Sorts this $coll according to a comparison function.
* $willNotTerminateInf
*
- * The sort is stable. That is, elements that are equal wrt `lt` appear in the
- * same order in the sorted sequence as in the original.
+ * The sort is stable. That is, elements that are equal (as determined by
+ * `lt`) appear in the same order in the sorted sequence as in the original.
*
* @param lt the comparison function which tests whether
* its first argument precedes its second argument in
@@ -592,7 +573,7 @@ trait SeqLike[+A, +Repr] extends IterableLike[A, Repr] with GenSeqLike[A, Repr]
/** Sorts this $Coll according to the Ordering which results from transforming
* an implicitly given Ordering with a transformation function.
- * @see scala.math.Ordering
+ * @see [[scala.math.Ordering]]
* $willNotTerminateInf
* @param f the transformation function mapping elements
* to some other domain `B`.
@@ -614,10 +595,10 @@ trait SeqLike[+A, +Repr] extends IterableLike[A, Repr] with GenSeqLike[A, Repr]
/** Sorts this $coll according to an Ordering.
*
- * The sort is stable. That is, elements that are equal wrt `lt` appear in the
- * same order in the sorted sequence as in the original.
+ * The sort is stable. That is, elements that are equal (as determined by
+ * `lt`) appear in the same order in the sorted sequence as in the original.
*
- * @see scala.math.Ordering
+ * @see [[scala.math.Ordering]]
*
* @param ord the ordering to be used to compare elements.
* @return a $coll consisting of the elements of this $coll
@@ -663,89 +644,172 @@ trait SeqLike[+A, +Repr] extends IterableLike[A, Repr] with GenSeqLike[A, Repr]
/* Need to override string, so that it's not the Function1's string that gets mixed in.
*/
override def toString = super[IterableLike].toString
-
- /** Returns index of the last element satisfying a predicate, or -1.
- */
- @deprecated("use `lastIndexWhere` instead", "2.8.0")
- def findLastIndexOf(p: A => Boolean): Int = lastIndexWhere(p)
-
- /** Tests whether every element of this $coll relates to the
- * corresponding element of another sequence by satisfying a test predicate.
- *
- * @param that the other sequence
- * @param p the test predicate, which relates elements from both sequences
- * @tparam B the type of the elements of `that`
- * @return `true` if both sequences have the same length and
- * `p(x, y)` is `true` for all corresponding elements `x` of this $coll
- * and `y` of `that`, otherwise `false`.
- */
- @deprecated("use `corresponds` instead", "2.8.0")
- def equalsWith[B](that: Seq[B])(f: (A,B) => Boolean): Boolean = corresponds(that)(f)
-
- /**
- * returns a projection that can be used to call non-strict <code>filter</code>,
- * <code>map</code>, and <code>flatMap</code> methods that build projections
- * of the collection.
- */
- @deprecated("use `view' instead", "2.8.0")
- override def projection = view
}
/** The companion object for trait `SeqLike`.
*/
object SeqLike {
- /** A KMP implementation, based on the undoubtedly reliable wikipedia entry.
+ // KMP search utilities
+
+ /** Make sure a target sequence has fast, correctly-ordered indexing for KMP.
*
- * @author paulp
- * @since 2.8
+ * @author Rex Kerr
+ * @since 2.10
+ * @param W The target sequence
+ * @param n0 The first element in the target sequence that we should use
+ * @param n1 The far end of the target sequence that we should use (exclusive)
+ * @return Target packed in an IndexedSeq (taken from iterator unless W already is an IndexedSeq)
*/
- private def KMP[B](S: Seq[B], W: Seq[B]): Option[Int] = {
- // trivial cases
- if (W.isEmpty) return Some(0)
- else if (W drop 1 isEmpty) return (S indexOf W(0)) match {
- case -1 => None
- case x => Some(x)
- }
-
- val T: Array[Int] = {
- val arr = new Array[Int](W.length)
- var pos = 2
- var cnd = 0
- arr(0) = -1
- arr(1) = 0
- while (pos < W.length) {
- if (W(pos - 1) == W(cnd)) {
- arr(pos) = cnd + 1
- pos += 1
- cnd += 1
- }
- else if (cnd > 0) {
- cnd = arr(cnd)
- }
- else {
- arr(pos) = 0
- pos += 1
- }
+ private def kmpOptimizeWord[B](W: Seq[B], n0: Int, n1: Int, forward: Boolean) = W match {
+ case iso: IndexedSeq[_] =>
+ // Already optimized for indexing--use original (or custom view of original)
+ if (forward && n0==0 && n1==W.length) iso.asInstanceOf[IndexedSeq[B]]
+ else if (forward) new AbstractSeq[B] with IndexedSeq[B] {
+ val length = n1 - n0
+ def apply(x: Int) = iso(n0 + x).asInstanceOf[B]
}
- arr
- }
+ else new AbstractSeq[B] with IndexedSeq[B] {
+ def length = n1 - n0
+ def apply(x: Int) = iso(n1 - 1 - x).asInstanceOf[B]
+ }
+ case _ =>
+ // W is probably bad at indexing. Pack in array (in correct orientation)
+ // Would be marginally faster to special-case each direction
+ new AbstractSeq[B] with IndexedSeq[B] {
+ private[this] val Warr = new Array[AnyRef](n1-n0)
+ private[this] val delta = if (forward) 1 else -1
+ private[this] val done = if (forward) n1-n0 else -1
+ val wit = W.iterator.drop(n0)
+ var i = if (forward) 0 else (n1-n0-1)
+ while (i != done) {
+ Warr(i) = wit.next.asInstanceOf[AnyRef]
+ i += delta
+ }
- var m, i = 0
- def mi = m + i
+ val length = n1 - n0
+ def apply(x: Int) = Warr(x).asInstanceOf[B]
+ }
+ }
- while (mi < S.length) {
- if (W(i) == S(mi)) {
- i += 1
- if (i == W.length)
- return Some(m)
+ /** Make a jump table for KMP search.
+ *
+ * @author paulp, Rex Kerr
+ * @since 2.10
+ * @param Wopt The target sequence, as at least an IndexedSeq
+ * @param wlen Just in case we're only IndexedSeq and not IndexedSeqOptimized
+ * @return KMP jump table for target sequence
+ */
+ private def kmpJumpTable[B](Wopt: IndexedSeq[B], wlen: Int) = {
+ val arr = new Array[Int](wlen)
+ var pos = 2
+ var cnd = 0
+ arr(0) = -1
+ arr(1) = 0
+ while (pos < wlen) {
+ if (Wopt(pos-1) == Wopt(cnd)) {
+ arr(pos) = cnd + 1
+ pos += 1
+ cnd += 1
+ }
+ else if (cnd > 0) {
+ cnd = arr(cnd)
}
else {
- m = mi - T(i)
- if (i > 0)
- i = T(i)
+ arr(pos) = 0
+ pos += 1
}
}
- None
+ arr
+ }
+
+ /** A KMP implementation, based on the undoubtedly reliable wikipedia entry.
+ * Note: I made this private to keep it from entering the API. That can be reviewed.
+ *
+ * @author paulp, Rex Kerr
+ * @since 2.10
+ * @param S Sequence that may contain target
+ * @param m0 First index of S to consider
+ * @param m1 Last index of S to consider (exclusive)
+ * @param W Target sequence
+ * @param n0 First index of W to match
+ * @param n1 Last index of W to match (exclusive)
+ * @param forward Direction of search (from beginning==true, from end==false)
+ * @return Index of start of sequence if found, -1 if not (relative to beginning of S, not m0).
+ */
+ private def kmpSearch[B](S: Seq[B], m0: Int, m1: Int, W: Seq[B], n0: Int, n1: Int, forward: Boolean): Int = {
+ // Check for redundant case when target has single valid element
+ def clipR(x: Int, y: Int) = if (x < y) x else -1
+ def clipL(x: Int, y: Int) = if (x > y) x else -1
+
+ if (n1 == n0+1) {
+ if (forward)
+ clipR(S.indexOf(W(n0), m0), m1)
+ else
+ clipL(S.lastIndexOf(W(n0), m1-1), m0-1)
+ }
+
+ // Check for redundant case when both sequences are same size
+ else if (m1-m0 == n1-n0) {
+ // Accepting a little slowness for the uncommon case.
+ if (S.view.slice(m0, m1) == W.view.slice(n0, n1)) m0
+ else -1
+ }
+ // Now we know we actually need KMP search, so do it
+ else S match {
+ case xs: IndexedSeq[_] =>
+ // We can index into S directly; it should be adequately fast
+ val Wopt = kmpOptimizeWord(W, n0, n1, forward)
+ val T = kmpJumpTable(Wopt, n1-n0)
+ var i, m = 0
+ val zero = if (forward) m0 else m1-1
+ val delta = if (forward) 1 else -1
+ while (i+m < m1-m0) {
+ if (Wopt(i) == S(zero+delta*(i+m))) {
+ i += 1
+ if (i == n1-n0) return (if (forward) m+m0 else m1-m-i)
+ }
+ else {
+ val ti = T(i)
+ m += i - ti
+ if (i > 0) i = ti
+ }
+ }
+ -1
+ case _ =>
+ // We had better not index into S directly!
+ val iter = S.iterator.drop(m0)
+ val Wopt = kmpOptimizeWord(W, n0, n1, true)
+ val T = kmpJumpTable(Wopt, n1-n0)
+ var cache = new Array[AnyRef](n1-n0) // Ring buffer--need a quick way to do a look-behind
+ var largest = 0
+ var i, m = 0
+ var answer = -1
+ while (m+m0+n1-n0 <= m1) {
+ while (i+m >= largest) {
+ cache(largest%(n1-n0)) = iter.next.asInstanceOf[AnyRef]
+ largest += 1
+ }
+ if (Wopt(i) == cache((i+m)%(n1-n0))) {
+ i += 1
+ if (i == n1-n0) {
+ if (forward) return m+m0
+ else {
+ i -= 1
+ answer = m+m0
+ val ti = T(i)
+ m += i - ti
+ if (i > 0) i = ti
+ }
+ }
+ }
+ else {
+ val ti = T(i)
+ m += i - ti
+ if (i > 0) i = ti
+ }
+ }
+ answer
+ }
}
/** Finds a particular index at which one sequence occurs in another sequence.
@@ -769,37 +833,58 @@ object SeqLike {
def indexOf[B](
source: Seq[B], sourceOffset: Int, sourceCount: Int,
target: Seq[B], targetOffset: Int, targetCount: Int,
- fromIndex: Int): Int = {
- val toDrop = fromIndex max 0
- val src = source.slice(sourceOffset, sourceCount) drop toDrop
- val tgt = target.slice(targetOffset, targetCount)
-
- KMP(src, tgt) match {
- case None => -1
- case Some(x) => x + toDrop
- }
+ fromIndex: Int
+ ): Int = {
+ // Fiddle with variables to match previous behavior and use kmpSearch
+ // Doing LOTS of max/min, both clearer and faster to use math._
+ val slen = source.length
+ val clippedFrom = math.max(0, fromIndex)
+ val s0 = math.min(slen, sourceOffset + clippedFrom)
+ val s1 = math.min(slen, s0 + sourceCount)
+ val tlen = target.length
+ val t0 = math.min(tlen, targetOffset)
+ val t1 = math.min(tlen, t0 + targetCount)
+
+ // Error checking
+ if (clippedFrom > slen-sourceOffset) -1 // Cannot return an index in range
+ else if (t1 - t0 < 1) s0 // Empty, matches first available position
+ else if (s1 - s0 < t1 - t0) -1 // Source is too short to find target
+ else {
+ // Nontrivial search
+ val ans = kmpSearch(source, s0, s1, target, t0, t1, true)
+ if (ans < 0) ans else ans - math.min(slen, sourceOffset)
+ }
}
/** Finds a particular index at which one sequence occurs in another sequence.
- * Like indexOf, but finds the latest occurrence rather than earliest.
+ * Like `indexOf`, but finds the latest occurrence rather than earliest.
*
- * @see SeqLike#indexOf
+ * @see [[scala.collection.SeqLike]], method `indexOf`
*/
def lastIndexOf[B](
source: Seq[B], sourceOffset: Int, sourceCount: Int,
target: Seq[B], targetOffset: Int, targetCount: Int,
- fromIndex: Int): Int = {
- if (fromIndex < 0) return -1
- val toTake = (fromIndex + targetCount) min sourceCount
- // Given seq 1234567 looking for abc, we need to take an extra
- // abc.length chars to examine beyond what is dictated by fromIndex.
- val src = source.slice(sourceOffset, sourceCount) take toTake reverse
- val tgt = target.slice(targetOffset, targetCount).reverse
-
- // then we reverse the adjustment here on success.
- KMP(src, tgt) match {
- case None => -1
- case Some(x) => src.length - x - targetCount
- }
+ fromIndex: Int
+ ): Int = {
+ // Fiddle with variables to match previous behavior and use kmpSearch
+ // Doing LOTS of max/min, both clearer and faster to use math._
+ val slen = source.length
+ val tlen = target.length
+ val s0 = math.min(slen, sourceOffset)
+ val s1 = math.min(slen, s0 + sourceCount)
+ val clippedFrom = math.min(s1 - s0, fromIndex)
+ val t0 = math.min(tlen, targetOffset)
+ val t1 = math.min(tlen, t0 + targetCount)
+ val fixed_s1 = math.min(s1, s0 + clippedFrom + (t1 - t0) - 1)
+
+ // Error checking
+ if (clippedFrom < 0) -1 // Cannot return an index in range
+ else if (t1 - t0 < 1) s0+clippedFrom // Empty, matches last available position
+ else if (fixed_s1 - s0 < t1 - t0) -1 // Source is too short to find target
+ else {
+ // Nontrivial search
+ val ans = kmpSearch(source, s0, fixed_s1, target, t0, t1, false)
+ if (ans < 0) ans else ans - s0
}
+ }
}
diff --git a/src/library/scala/collection/SeqProxy.scala b/src/library/scala/collection/SeqProxy.scala
index d80c507..1f8dc4a 100644
--- a/src/library/scala/collection/SeqProxy.scala
+++ b/src/library/scala/collection/SeqProxy.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/library/scala/collection/SeqProxyLike.scala b/src/library/scala/collection/SeqProxyLike.scala
index 565bd9f..5e8030d 100644
--- a/src/library/scala/collection/SeqProxyLike.scala
+++ b/src/library/scala/collection/SeqProxyLike.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -33,11 +33,10 @@ trait SeqProxyLike[+A, +Repr <: SeqLike[A, Repr] with Seq[A]] extends SeqLike[A,
override def prefixLength(p: A => Boolean) = self.prefixLength(p)
override def indexWhere(p: A => Boolean): Int = self.indexWhere(p)
override def indexWhere(p: A => Boolean, from: Int): Int = self.indexWhere(p, from)
- override def findIndexOf(p: A => Boolean): Int = self.indexWhere(p)
override def indexOf[B >: A](elem: B): Int = self.indexOf(elem)
override def indexOf[B >: A](elem: B, from: Int): Int = self.indexOf(elem, from)
override def lastIndexOf[B >: A](elem: B): Int = self.lastIndexOf(elem)
- override def lastIndexOf[B >: A](elem: B, end: Int): Int = self.lastIndexWhere(elem ==, end)
+ override def lastIndexOf[B >: A](elem: B, end: Int): Int = self.lastIndexWhere(elem == _, end)
override def lastIndexWhere(p: A => Boolean): Int = self.lastIndexWhere(p, length - 1)
override def lastIndexWhere(p: A => Boolean, end: Int): Int = self.lastIndexWhere(p)
override def reverse: Repr = self.reverse
diff --git a/src/library/scala/collection/SeqView.scala b/src/library/scala/collection/SeqView.scala
index 9f936e5..c26124c 100644
--- a/src/library/scala/collection/SeqView.scala
+++ b/src/library/scala/collection/SeqView.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/library/scala/collection/SeqViewLike.scala b/src/library/scala/collection/SeqViewLike.scala
index 37c6680..5f2bf90 100644
--- a/src/library/scala/collection/SeqViewLike.scala
+++ b/src/library/scala/collection/SeqViewLike.scala
@@ -1,26 +1,24 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-
-
package scala.collection
import generic._
import Seq.fill
import TraversableView.NoBuilder
-import annotation.bridge
/** A template trait for non-strict views of sequences.
* $seqViewInfo
*
* @define seqViewInfo
* $viewInfo
- * All views for sequences are defined by re-interpreting the `length` and `apply` methods.
+ * All views for sequences are defined by re-interpreting the `length` and
+ * `apply` methods.
*
* @author Martin Odersky
* @version 2.8
@@ -41,6 +39,9 @@ trait SeqViewLike[+A,
override def toString = viewToString
}
+ /** Explicit instantiation of the `Transformed` trait to reduce class file size in subclasses. */
+ private[collection] abstract class AbstractTransformed[+B] extends Seq[B] with super[IterableViewLike].Transformed[B] with Transformed[B]
+
trait EmptyView extends Transformed[Nothing] with super[IterableViewLike].EmptyView with super[GenSeqViewLike].EmptyView
trait Forced[B] extends super[IterableViewLike].Forced[B] with super[GenSeqViewLike].Forced[B] with Transformed[B]
@@ -72,27 +73,27 @@ trait SeqViewLike[+A,
/** Boilerplate method, to override in each subclass
* This method could be eliminated if Scala had virtual classes
*/
- protected override def newForced[B](xs: => GenSeq[B]): Transformed[B] = new { val forced = xs } with Forced[B]
- protected override def newAppended[B >: A](that: GenTraversable[B]): Transformed[B] = new { val rest = that } with Appended[B]
- protected override def newMapped[B](f: A => B): Transformed[B] = new { val mapping = f } with Mapped[B]
- protected override def newFlatMapped[B](f: A => GenTraversableOnce[B]): Transformed[B] = new { val mapping = f } with FlatMapped[B]
- protected override def newFiltered(p: A => Boolean): Transformed[A] = new { val pred = p } with Filtered
- protected override def newSliced(_endpoints: SliceInterval): Transformed[A] = new { val endpoints = _endpoints } with Sliced
- protected override def newDroppedWhile(p: A => Boolean): Transformed[A] = new { val pred = p } with DroppedWhile
- protected override def newTakenWhile(p: A => Boolean): Transformed[A] = new { val pred = p } with TakenWhile
- protected override def newZipped[B](that: GenIterable[B]): Transformed[(A, B)] = new { val other = that } with Zipped[B]
+ protected override def newForced[B](xs: => GenSeq[B]): Transformed[B] = new { val forced = xs } with AbstractTransformed[B] with Forced[B]
+ protected override def newAppended[B >: A](that: GenTraversable[B]): Transformed[B] = new { val rest = that } with AbstractTransformed[B] with Appended[B]
+ protected override def newMapped[B](f: A => B): Transformed[B] = new { val mapping = f } with AbstractTransformed[B] with Mapped[B]
+ protected override def newFlatMapped[B](f: A => GenTraversableOnce[B]): Transformed[B] = new { val mapping = f } with AbstractTransformed[B] with FlatMapped[B]
+ protected override def newFiltered(p: A => Boolean): Transformed[A] = new { val pred = p } with AbstractTransformed[A] with Filtered
+ protected override def newSliced(_endpoints: SliceInterval): Transformed[A] = new { val endpoints = _endpoints } with AbstractTransformed[A] with Sliced
+ protected override def newDroppedWhile(p: A => Boolean): Transformed[A] = new { val pred = p } with AbstractTransformed[A] with DroppedWhile
+ protected override def newTakenWhile(p: A => Boolean): Transformed[A] = new { val pred = p } with AbstractTransformed[A] with TakenWhile
+ protected override def newZipped[B](that: GenIterable[B]): Transformed[(A, B)] = new { val other = that } with AbstractTransformed[(A, B)] with Zipped[B]
protected override def newZippedAll[A1 >: A, B](that: GenIterable[B], _thisElem: A1, _thatElem: B): Transformed[(A1, B)] = new {
val other = that
val thisElem = _thisElem
val thatElem = _thatElem
- } with ZippedAll[A1, B]
- protected def newReversed: Transformed[A] = new Reversed { }
+ } with AbstractTransformed[(A1, B)] with ZippedAll[A1, B]
+ protected def newReversed: Transformed[A] = new AbstractTransformed[A] with Reversed
protected def newPatched[B >: A](_from: Int, _patch: GenSeq[B], _replaced: Int): Transformed[B] = new {
val from = _from
val patch = _patch
val replaced = _replaced
- } with Patched[B]
- protected def newPrepended[B >: A](elem: B): Transformed[B] = new { protected[this] val fst = elem } with Prepended[B]
+ } with AbstractTransformed[B] with Patched[B]
+ protected def newPrepended[B >: A](elem: B): Transformed[B] = new { protected[this] val fst = elem } with AbstractTransformed[B] with Prepended[B]
// see comment in IterableViewLike.
protected override def newTaken(n: Int): Transformed[A] = newSliced(SliceInterval(0, n))
@@ -130,17 +131,11 @@ trait SeqViewLike[+A,
override def diff[B >: A](that: GenSeq[B]): This =
newForced(thisSeq diff that).asInstanceOf[This]
- @bridge def diff[B >: A](that: Seq[B]): This = diff(that: GenSeq[B])
-
override def intersect[B >: A](that: GenSeq[B]): This =
newForced(thisSeq intersect that).asInstanceOf[This]
- @bridge def intersect[B >: A](that: Seq[B]): This = intersect(that: GenSeq[B])
-
override def sorted[B >: A](implicit ord: Ordering[B]): This =
newForced(thisSeq sorted ord).asInstanceOf[This]
override def stringPrefix = "SeqView"
}
-
-
diff --git a/src/library/scala/collection/Set.scala b/src/library/scala/collection/Set.scala
index 7241d7f..c304323 100644
--- a/src/library/scala/collection/Set.scala
+++ b/src/library/scala/collection/Set.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -35,12 +35,13 @@ trait Set[A] extends (A => Boolean)
* The current default implementation of a $Coll is one of `EmptySet`, `Set1`, `Set2`, `Set3`, `Set4` in
* class `immutable.Set` for sets of sizes up to 4, and a `immutable.HashSet` for sets of larger sizes.
* @define coll set
- * @define Coll Set
+ * @define Coll `Set`
*/
object Set extends SetFactory[Set] {
- private[collection] val hashSeed = "Set".hashCode
-
def newBuilder[A] = immutable.Set.newBuilder[A]
override def empty[A]: Set[A] = immutable.Set.empty[A]
implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, Set[A]] = setCanBuildFrom[A]
}
+
+/** Explicit instantiation of the `Set` trait to reduce class file size in subclasses. */
+private[scala] abstract class AbstractSet[A] extends AbstractIterable[A] with Set[A]
diff --git a/src/library/scala/collection/SetLike.scala b/src/library/scala/collection/SetLike.scala
index 5754b54..a6ebcc0 100644
--- a/src/library/scala/collection/SetLike.scala
+++ b/src/library/scala/collection/SetLike.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -11,7 +11,7 @@ package scala.collection
import generic._
import mutable.{ Builder, SetBuilder }
-import annotation.{migration, bridge}
+import scala.annotation.{migration, bridge}
import parallel.ParSet
/** A template trait for sets.
@@ -78,7 +78,7 @@ self =>
protected[this] override def parCombiner = ParSet.newCombiner[A]
- /** Overridden for efficiency. */
+ /* Overridden for efficiency. */
override def toSeq: Seq[A] = toBuffer[A]
override def toBuffer[A1 >: A]: mutable.Buffer[A1] = {
val result = new mutable.ArrayBuffer[A1](size)
@@ -127,9 +127,6 @@ self =>
*/
def ++ (elems: GenTraversableOnce[A]): This = (repr /: elems.seq)(_ + _)
- @bridge
- def ++ (elems: TraversableOnce[A]): This = ++ (elems: GenTraversableOnce[A])
-
/** Creates a new set with a given element removed from this set.
*
* @param elem the element to be removed
@@ -144,15 +141,6 @@ self =>
*/
override def isEmpty: Boolean = size == 0
- /** This method is an alias for `intersect`.
- * It computes an intersection with set `that`.
- * It removes all the elements that are not present in `that`.
- *
- * @param that the set to intersect with
- */
- @deprecated("use & instead", "2.8.0")
- def ** (that: Set[A]): This = &(that)
-
/** Computes the union between of set and another set.
*
* @param that the set to form the union with.
@@ -161,9 +149,6 @@ self =>
*/
def union(that: GenSet[A]): This = this ++ that
- @bridge
- def union(that: Set[A]): This = union(that: GenSet[A])
-
/** Computes the difference of this set and another set.
*
* @param that the set of elements to exclude.
@@ -172,9 +157,6 @@ self =>
*/
def diff(that: GenSet[A]): This = this -- that
- @bridge
- def diff(that: Set[A]): This = diff(that: GenSet[A])
-
/** An iterator over all subsets of this set of the given size.
* If the requested size is impossible, an empty iterator is returned.
*
@@ -190,7 +172,7 @@ self =>
*
* @return the iterator.
*/
- def subsets: Iterator[This] = new Iterator[This] {
+ def subsets: Iterator[This] = new AbstractIterator[This] {
private val elms = self.toIndexedSeq
private var len = 0
private var itr: Iterator[This] = Iterator.empty
@@ -216,13 +198,13 @@ self =>
* @author Eastsun
* @date 2010.12.6
*/
- private class SubsetsItr(elms: IndexedSeq[A], len: Int) extends Iterator[This] {
+ private class SubsetsItr(elms: IndexedSeq[A], len: Int) extends AbstractIterator[This] {
private val idxs = Array.range(0, len+1)
private var _hasNext = true
idxs(len) = elms.size
def hasNext = _hasNext
- def next: This = {
+ def next(): This = {
if (!hasNext) Iterator.empty.next
val buf = self.newBuilder
diff --git a/src/library/scala/collection/SetProxy.scala b/src/library/scala/collection/SetProxy.scala
index 5260a0c..08075a7 100644
--- a/src/library/scala/collection/SetProxy.scala
+++ b/src/library/scala/collection/SetProxy.scala
@@ -1,17 +1,14 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-
-
package scala.collection
-/** This is a simple wrapper class for <a href="Set.html"
- * target="contentFrame"><code>scala.collection.Set</code></a>.
+/** This is a simple wrapper class for [[scala.collection.Set]].
* It is most useful for assembling customized set abstractions
* dynamically using object composition and forwarding.
*
@@ -19,5 +16,4 @@ package scala.collection
* @author Martin Odersky
* @version 2.0, 01/01/2007
*/
-
trait SetProxy[A] extends Set[A] with SetProxyLike[A, Set[A]]
diff --git a/src/library/scala/collection/SetProxyLike.scala b/src/library/scala/collection/SetProxyLike.scala
index ab31bf3..5196f39 100644
--- a/src/library/scala/collection/SetProxyLike.scala
+++ b/src/library/scala/collection/SetProxyLike.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/library/scala/collection/SortedMap.scala b/src/library/scala/collection/SortedMap.scala
index 3f92908..c81c16e 100644
--- a/src/library/scala/collection/SortedMap.scala
+++ b/src/library/scala/collection/SortedMap.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2006-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2006-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -30,9 +30,26 @@ trait SortedMap[A, +B] extends Map[A, B] with SortedMapLike[A, B, SortedMap[A, B
* @since 2.8
*/
object SortedMap extends SortedMapFactory[SortedMap] {
- def empty[A, B](implicit ord: Ordering[A]): immutable.SortedMap[A, B] = immutable.SortedMap.empty[A, B](ord)
+ def empty[A, B](implicit ord: Ordering[A]): SortedMap[A, B] = immutable.SortedMap.empty[A, B](ord)
implicit def canBuildFrom[A, B](implicit ord: Ordering[A]): CanBuildFrom[Coll, (A, B), SortedMap[A, B]] = new SortedMapCanBuildFrom[A, B]
+
+ private[collection] trait Default[A, +B] extends SortedMap[A, B] {
+ self =>
+ override def +[B1 >: B](kv: (A, B1)): SortedMap[A, B1] = {
+ val b = SortedMap.newBuilder[A, B1]
+ b ++= this
+ b += ((kv._1, kv._2))
+ b.result
+ }
+
+ override def - (key: A): SortedMap[A, B] = {
+ val b = newBuilder
+ for (kv <- this; if kv._1 != key) b += kv
+ b.result
+ }
+ }
+
}
diff --git a/src/library/scala/collection/SortedMapLike.scala b/src/library/scala/collection/SortedMapLike.scala
index 4dc0820..57ad349 100644
--- a/src/library/scala/collection/SortedMapLike.scala
+++ b/src/library/scala/collection/SortedMapLike.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2006-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2006-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -72,4 +72,27 @@ self =>
for (e <- elems) m = m + e
m
}
+
+ override def filterKeys(p: A => Boolean): SortedMap[A, B] = new FilteredKeys(p) with SortedMap.Default[A, B] {
+ implicit def ordering: Ordering[A] = self.ordering
+ override def rangeImpl(from : Option[A], until : Option[A]): SortedMap[A, B] = self.rangeImpl(from, until).filterKeys(p)
+ }
+
+ override def mapValues[C](f: B => C): SortedMap[A, C] = new MappedValues(f) with SortedMap.Default[A, C] {
+ implicit def ordering: Ordering[A] = self.ordering
+ override def rangeImpl(from : Option[A], until : Option[A]): SortedMap[A, C] = self.rangeImpl(from, until).mapValues(f)
+ }
+
+ /** Adds a number of elements provided by a traversable object
+ * and returns a new collection with the added elements.
+ *
+ * @param xs the traversable object.
+ */
+ override def ++[B1 >: B](xs: GenTraversableOnce[(A, B1)]): SortedMap[A, B1] =
+ ((repr: SortedMap[A, B1]) /: xs.seq) (_ + _)
+
}
+
+
+
+
diff --git a/src/library/scala/collection/SortedSet.scala b/src/library/scala/collection/SortedSet.scala
index a1d1cd3..2d5d4fb 100644
--- a/src/library/scala/collection/SortedSet.scala
+++ b/src/library/scala/collection/SortedSet.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2006-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2006-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -27,5 +27,7 @@ trait SortedSet[A] extends Set[A] with SortedSetLike[A, SortedSet[A]] {
*/
object SortedSet extends SortedSetFactory[SortedSet] {
def empty[A](implicit ord: Ordering[A]): immutable.SortedSet[A] = immutable.SortedSet.empty[A](ord)
- implicit def canBuildFrom[A](implicit ord: Ordering[A]): CanBuildFrom[Coll, A, SortedSet[A]] = new SortedSetCanBuildFrom[A]
+ def canBuildFrom[A](implicit ord: Ordering[A]): CanBuildFrom[Coll, A, SortedSet[A]] = newCanBuildFrom[A]
+ // Force a declaration here so that BitSet's (which does not inherit from SortedSetFactory) can be more specific
+ override implicit def newCanBuildFrom[A](implicit ord : Ordering[A]) : CanBuildFrom[Coll, A, SortedSet[A]] = super.newCanBuildFrom
}
diff --git a/src/library/scala/collection/SortedSetLike.scala b/src/library/scala/collection/SortedSetLike.scala
index 7554140..71b45c7 100644
--- a/src/library/scala/collection/SortedSetLike.scala
+++ b/src/library/scala/collection/SortedSetLike.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2006-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2006-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/library/scala/collection/Traversable.scala b/src/library/scala/collection/Traversable.scala
index 71c5839..36ef230 100644
--- a/src/library/scala/collection/Traversable.scala
+++ b/src/library/scala/collection/Traversable.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -13,7 +13,6 @@ package scala.collection
import generic._
import mutable.{Builder, Buffer, ArrayBuffer, ListBuffer}
import scala.util.control.Breaks
-import annotation.bridge
/** A trait for traversable collections.
* All operations are guaranteed to be performed in a single-threaded manner.
@@ -28,12 +27,6 @@ trait Traversable[+A] extends TraversableLike[A, Traversable[A]]
override def seq: Traversable[A] = this
- @bridge
- def flatten[B](implicit asTraversable: A => /*<:<!!!*/ TraversableOnce[B]): Traversable[B] = super.flatten(asTraversable)
-
- @bridge
- def transpose[B](implicit asTraversable: A => /*<:<!!!*/ TraversableOnce[B]): Traversable[Traversable[B]] = super.transpose(asTraversable)
-
/* The following methods are inherited from TraversableLike
*
override def isEmpty: Boolean
@@ -75,7 +68,7 @@ trait Traversable[+A] extends TraversableLike[A, Traversable[A]]
override def copyToBuffer[B >: A](dest: Buffer[B])
override def copyToArray[B >: A](xs: Array[B], start: Int, len: Int)
override def copyToArray[B >: A](xs: Array[B], start: Int)
- override def toArray[B >: A : ClassManifest]: Array[B]
+ override def toArray[B >: A : ClassTag]: Array[B]
override def toList: List[A]
override def toIterable: Iterable[A]
override def toSeq: Seq[A]
@@ -103,8 +96,10 @@ object Traversable extends TraversableFactory[Traversable] { self =>
private[collection] val breaks: Breaks = new Breaks
/** $genericCanBuildFromInfo */
- implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, Traversable[A]] = new GenericCanBuildFrom[A]
+ implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, Traversable[A]] = ReusableCBF.asInstanceOf[GenericCanBuildFrom[A]]
def newBuilder[A]: Builder[A, Traversable[A]] = immutable.Traversable.newBuilder[A]
}
+/** Explicit instantiation of the `Traversable` trait to reduce class file size in subclasses. */
+private[scala] abstract class AbstractTraversable[+A] extends Traversable[A]
diff --git a/src/library/scala/collection/TraversableLike.scala b/src/library/scala/collection/TraversableLike.scala
index 0fc219b..8b430ca 100644
--- a/src/library/scala/collection/TraversableLike.scala
+++ b/src/library/scala/collection/TraversableLike.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -9,10 +9,11 @@
package scala.collection
import generic._
-import mutable.{ Builder, ListBuffer }
-import annotation.{tailrec, migration, bridge}
-import annotation.unchecked.{ uncheckedVariance => uV }
+import mutable.{ Builder }
+import scala.annotation.{tailrec, migration, bridge}
+import scala.annotation.unchecked.{ uncheckedVariance => uV }
import parallel.ParIterable
+import scala.language.higherKinds
/** A template trait for traversable collections of type `Traversable[A]`.
*
@@ -39,7 +40,7 @@ import parallel.ParIterable
* a non-strict collection class may defer computation of some of their
* elements until after the instance is available as a value.
* A typical example of a non-strict collection class is a
- * [[scala.collection.immutable/Stream]].
+ * [[scala.collection.immutable.Stream]].
* A more general class of examples are `TraversableViews`.
*
* If a collection is an instance of an ordered collection class, traversing
@@ -64,7 +65,8 @@ import parallel.ParIterable
* @define Coll Traversable
* @define coll traversable collection
*/
-trait TraversableLike[+A, +Repr] extends HasNewBuilder[A, Repr]
+trait TraversableLike[+A, +Repr] extends Any
+ with HasNewBuilder[A, Repr]
with FilterMonadic[A, Repr]
with TraversableOnce[A]
with GenTraversableLike[A, Repr]
@@ -75,7 +77,7 @@ trait TraversableLike[+A, +Repr] extends HasNewBuilder[A, Repr]
import Traversable.breaks._
/** The type implementing this traversable */
- protected type Self = Repr
+ protected[this] type Self = Repr
/** The collection of type $coll underlying this `TraversableLike` object.
* By default this is implemented as the `TraversableLike` object itself,
@@ -83,6 +85,8 @@ trait TraversableLike[+A, +Repr] extends HasNewBuilder[A, Repr]
*/
def repr: Repr = this.asInstanceOf[Repr]
+ final def isTraversableAgain: Boolean = true
+
/** The underlying collection seen as an instance of `$Coll`.
* By default this is implemented as the current collection object itself,
* but this can be overridden.
@@ -102,10 +106,6 @@ trait TraversableLike[+A, +Repr] extends HasNewBuilder[A, Repr]
/** Applies a function `f` to all elements of this $coll.
*
- * Note: this method underlies the implementation of most other bulk operations.
- * It's important to implement this method in an efficient way.
- *
- *
* @param f the function that is applied for its side-effect to every element.
* The result of function `f` is discarded.
*
@@ -114,6 +114,11 @@ trait TraversableLike[+A, +Repr] extends HasNewBuilder[A, Repr]
* but this is not necessary.
*
* @usecase def foreach(f: A => Unit): Unit
+ * @inheritdoc
+ *
+ * Note: this method underlies the implementation of most other bulk operations.
+ * It's important to implement this method in an efficient way.
+ *
*/
def foreach[U](f: A => U): Unit
@@ -133,13 +138,15 @@ trait TraversableLike[+A, +Repr] extends HasNewBuilder[A, Repr]
}
/** Tests whether this $coll is known to have a finite size.
- * All strict collections are known to have finite size. For a non-strict collection
- * such as `Stream`, the predicate returns `true` if all elements have been computed.
- * It returns `false` if the stream is not yet evaluated to the end.
+ * All strict collections are known to have finite size. For a non-strict
+ * collection such as `Stream`, the predicate returns `'''true'''` if all
+ * elements have been computed. It returns `'''false'''` if the stream is
+ * not yet evaluated to the end.
*
* Note: many collection methods will not work on collections of infinite sizes.
*
- * @return `true` if this collection is known to have finite size, `false` otherwise.
+ * @return `'''true'''` if this collection is known to have finite size,
+ * `'''false'''` otherwise.
*/
def hasDefiniteSize = true
@@ -151,25 +158,13 @@ trait TraversableLike[+A, +Repr] extends HasNewBuilder[A, Repr]
b.result
}
- @bridge
- def ++[B >: A, That](that: TraversableOnce[B])(implicit bf: CanBuildFrom[Repr, B, That]): That =
- ++(that: GenTraversableOnce[B])(bf)
-
- /** Concatenates this $coll with the elements of a traversable collection.
- * It differs from ++ in that the right operand determines the type of the
- * resulting collection rather than the left one.
- *
- * Example:
- * {{{
- * scala> val x = List(1)
- * x: List[Int] = List(1)
+ /** As with `++`, returns a new collection containing the elements from the left operand followed by the
+ * elements from the right operand.
*
- * scala> val y = LinkedList(2)
- * y: scala.collection.mutable.LinkedList[Int] = LinkedList(2)
+ * It differs from `++` in that the right operand determines the type of
+ * the resulting collection rather than the left one.
+ * Mnemonic: the COLon is on the side of the new COLlection type.
*
- * scala> val z = x ++: y
- * z: scala.collection.mutable.LinkedList[Int] = LinkedList(1, 2)
- * }}}
* @param that the traversable to append.
* @tparam B the element type of the returned collection.
* @tparam That $thatinfo
@@ -178,9 +173,22 @@ trait TraversableLike[+A, +Repr] extends HasNewBuilder[A, Repr]
* of this $coll followed by all elements of `that`.
*
* @usecase def ++:[B](that: TraversableOnce[B]): $Coll[B]
- *
- * @return a new $coll which contains all elements of this $coll
- * followed by all elements of `that`.
+ * @inheritdoc
+ *
+ * Example:
+ * {{{
+ * scala> val x = List(1)
+ * x: List[Int] = List(1)
+ *
+ * scala> val y = LinkedList(2)
+ * y: scala.collection.mutable.LinkedList[Int] = LinkedList(2)
+ *
+ * scala> val z = x ++: y
+ * z: scala.collection.mutable.LinkedList[Int] = LinkedList(1, 2)
+ * }}}
+ *
+ * @return a new $coll which contains all elements of this $coll
+ * followed by all elements of `that`.
*/
def ++:[B >: A, That](that: TraversableOnce[B])(implicit bf: CanBuildFrom[Repr, B, That]): That = {
val b = bf(repr)
@@ -190,10 +198,12 @@ trait TraversableLike[+A, +Repr] extends HasNewBuilder[A, Repr]
b.result
}
- /** As with `++`, returns a new collection containing the elements from the left operand followed by the
- * elements from the right operand.
+ /** As with `++`, returns a new collection containing the elements from the
+ * left operand followed by the elements from the right operand.
+ *
* It differs from `++` in that the right operand determines the type of
* the resulting collection rather than the left one.
+ * Mnemonic: the COLon is on the side of the new COLlection type.
*
* Example:
* {{{
@@ -220,22 +230,24 @@ trait TraversableLike[+A, +Repr] extends HasNewBuilder[A, Repr]
* @param bf $bfinfo
* @return a new collection of type `That` which contains all elements
* of this $coll followed by all elements of `that`.
- *
- * @return a new $coll which contains all elements of this $coll
- * followed by all elements of `that`.
*/
def ++:[B >: A, That](that: Traversable[B])(implicit bf: CanBuildFrom[Repr, B, That]): That =
(that ++ seq)(breakOut)
def map[B, That](f: A => B)(implicit bf: CanBuildFrom[Repr, B, That]): That = {
- val b = bf(repr)
- b.sizeHint(this)
+ def builder = { // extracted to keep method size under 35 bytes, so that it can be JIT-inlined
+ val b = bf(repr)
+ b.sizeHint(this)
+ b
+ }
+ val b = builder
for (x <- this) b += f(x)
b.result
}
def flatMap[B, That](f: A => GenTraversableOnce[B])(implicit bf: CanBuildFrom[Repr, B, That]): That = {
- val b = bf(repr)
+ def builder = bf(repr) // extracted to keep method size under 35 bytes, so that it can be JIT-inlined
+ val b = builder
for (x <- this) b ++= f(x).seq
b.result
}
@@ -279,11 +291,12 @@ trait TraversableLike[+A, +Repr] extends HasNewBuilder[A, Repr]
* The order of the elements is preserved.
*
* @usecase def filterMap[B](f: A => Option[B]): $Coll[B]
+ * @inheritdoc
*
- * @param pf the partial function which filters and maps the $coll.
- * @return a new $coll resulting from applying the given option-valued function
- * `f` to each element and collecting all defined results.
- * The order of the elements is preserved.
+ * @param pf the partial function which filters and maps the $coll.
+ * @return a new $coll resulting from applying the given option-valued function
+ * `f` to each element and collecting all defined results.
+ * The order of the elements is preserved.
def filterMap[B, That](f: A => Option[B])(implicit bf: CanBuildFrom[Repr, B, That]): That = {
val b = bf(repr)
for (x <- this)
@@ -417,7 +430,8 @@ trait TraversableLike[+A, +Repr] extends HasNewBuilder[A, Repr]
/** Optionally selects the first element.
* $orderDependent
- * @return the first element of this $coll if it is nonempty, `None` if it is empty.
+ * @return the first element of this $coll if it is nonempty,
+ * `None` if it is empty.
*/
def headOption: Option[A] = if (isEmpty) None else Some(head)
@@ -446,7 +460,8 @@ trait TraversableLike[+A, +Repr] extends HasNewBuilder[A, Repr]
/** Optionally selects the last element.
* $orderDependent
- * @return the last element of this $coll$ if it is nonempty, `None` if it is empty.
+ * @return the last element of this $coll$ if it is nonempty,
+ * `None` if it is empty.
*/
def lastOption: Option[A] = if (isEmpty) None else Some(last)
@@ -476,11 +491,12 @@ trait TraversableLike[+A, +Repr] extends HasNewBuilder[A, Repr]
if (n <= 0) {
val b = newBuilder
b.sizeHint(this)
- b ++= thisCollection result
+ (b ++= thisCollection).result
}
else sliceWithKnownDelta(n, Int.MaxValue, -n)
- def slice(from: Int, until: Int): Repr = sliceWithKnownBound(math.max(from, 0), until)
+ def slice(from: Int, until: Int): Repr =
+ sliceWithKnownBound(scala.math.max(from, 0), until)
// Precondition: from >= 0, until > 0, builder already configured for building.
private[this] def sliceInternal(from: Int, until: Int, b: Builder[A, Repr]): Repr = {
@@ -528,7 +544,7 @@ trait TraversableLike[+A, +Repr] extends HasNewBuilder[A, Repr]
val b = newBuilder
var go = false
for (x <- this) {
- if (!p(x)) go = true
+ if (!go && !p(x)) go = true
if (go) b += x
}
b.result
@@ -580,8 +596,6 @@ trait TraversableLike[+A, +Repr] extends HasNewBuilder[A, Repr]
* Copying will stop once either the end of the current $coll is reached,
* or the end of the array is reached, or `len` elements have been copied.
*
- * $willNotTerminateInf
- *
* @param xs the array to fill.
* @param start the starting index.
* @param len the maximal number of elements to copy.
@@ -589,6 +603,9 @@ trait TraversableLike[+A, +Repr] extends HasNewBuilder[A, Repr]
*
*
* @usecase def copyToArray(xs: Array[A], start: Int, len: Int): Unit
+ * @inheritdoc
+ *
+ * $willNotTerminateInf
*/
def copyToArray[B >: A](xs: Array[B], start: Int, len: Int) {
var i = start
@@ -605,12 +622,19 @@ trait TraversableLike[+A, +Repr] extends HasNewBuilder[A, Repr]
def toTraversable: Traversable[A] = thisCollection
def toIterator: Iterator[A] = toStream.iterator
def toStream: Stream[A] = toBuffer.toStream
+ // Override to provide size hint.
+ override def to[Col[_]](implicit cbf: CanBuildFrom[Nothing, A, Col[A @uV]]): Col[A @uV] = {
+ val b = cbf()
+ b.sizeHint(this)
+ b ++= thisCollection
+ b.result
+ }
/** Converts this $coll to a string.
*
* @return a string representation of this collection. By default this
- * string consists of the `stringPrefix` of this $coll,
- * followed by all elements separated by commas and enclosed in parentheses.
+ * string consists of the `stringPrefix` of this $coll, followed
+ * by all elements separated by commas and enclosed in parentheses.
*/
override def toString = mkString(stringPrefix + "(", ", ", ")")
@@ -621,7 +645,7 @@ trait TraversableLike[+A, +Repr] extends HasNewBuilder[A, Repr]
* simple name of the collection class $coll.
*/
def stringPrefix : String = {
- var string = repr.asInstanceOf[AnyRef].getClass.getName
+ var string = repr.getClass.getName
val idx1 = string.lastIndexOf('.' : Int)
if (idx1 != -1) string = string.substring(idx1 + 1)
val idx2 = string.indexOf('$')
@@ -664,8 +688,8 @@ trait TraversableLike[+A, +Repr] extends HasNewBuilder[A, Repr]
* @param p the predicate used to test elements.
* @return an object of class `WithFilter`, which supports
* `map`, `flatMap`, `foreach`, and `withFilter` operations.
- * All these operations apply to those elements of this $coll which
- * satisfy the predicate `p`.
+ * All these operations apply to those elements of this $coll
+ * which satisfy the predicate `p`.
*/
def withFilter(p: A => Boolean): FilterMonadic[A, Repr] = new WithFilter(p)
@@ -686,10 +710,11 @@ trait TraversableLike[+A, +Repr] extends HasNewBuilder[A, Repr]
* that satisfies predicate `p` and collecting the results.
*
* @usecase def map[B](f: A => B): $Coll[B]
+ * @inheritdoc
*
- * @return a new $coll resulting from applying the given function
- * `f` to each element of the outer $coll that satisfies
- * predicate `p` and collecting the results.
+ * @return a new $coll resulting from applying the given function
+ * `f` to each element of the outer $coll that satisfies
+ * predicate `p` and collecting the results.
*/
def map[B, That](f: A => B)(implicit bf: CanBuildFrom[Repr, B, That]): That = {
val b = bf(repr)
@@ -702,9 +727,6 @@ trait TraversableLike[+A, +Repr] extends HasNewBuilder[A, Repr]
* outer $coll containing this `WithFilter` instance that satisfy
* predicate `p` and concatenating the results.
*
- * The type of the resulting collection will be guided by the static type
- * of the outer $coll.
- *
* @param f the function to apply to each element.
* @tparam B the element type of the returned collection.
* @tparam That $thatinfo
@@ -715,9 +737,15 @@ trait TraversableLike[+A, +Repr] extends HasNewBuilder[A, Repr]
* concatenating the results.
*
* @usecase def flatMap[B](f: A => TraversableOnce[B]): $Coll[B]
+ * @inheritdoc
+ *
+ * The type of the resulting collection will be guided by the static type
+ * of the outer $coll.
*
- * @return a new $coll resulting from applying the given collection-valued function
- * `f` to each element of the outer $coll that satisfies predicate `p` and concatenating the results.
+ * @return a new $coll resulting from applying the given
+ * collection-valued function `f` to each element of the
+ * outer $coll that satisfies predicate `p` and concatenating
+ * the results.
*/
def flatMap[B, That](f: A => GenTraversableOnce[B])(implicit bf: CanBuildFrom[Repr, B, That]): That = {
val b = bf(repr)
@@ -737,6 +765,7 @@ trait TraversableLike[+A, +Repr] extends HasNewBuilder[A, Repr]
* but this is not necessary.
*
* @usecase def foreach(f: A => Unit): Unit
+ * @inheritdoc
*/
def foreach[U](f: A => U): Unit =
for (x <- self)
@@ -757,6 +786,6 @@ trait TraversableLike[+A, +Repr] extends HasNewBuilder[A, Repr]
// A helper for tails and inits.
private def iterateUntilEmpty(f: Traversable[A @uV] => Traversable[A @uV]): Iterator[Repr] = {
val it = Iterator.iterate(thisCollection)(f) takeWhile (x => !x.isEmpty)
- it ++ Iterator(Nil) map (newBuilder ++= _ result)
+ it ++ Iterator(Nil) map (x => (newBuilder ++= x).result)
}
}
diff --git a/src/library/scala/collection/TraversableOnce.scala b/src/library/scala/collection/TraversableOnce.scala
index 5269eac..a448ac2 100644
--- a/src/library/scala/collection/TraversableOnce.scala
+++ b/src/library/scala/collection/TraversableOnce.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -8,15 +8,16 @@
package scala.collection
-import mutable.{ Buffer, ListBuffer, ArrayBuffer }
-import annotation.unchecked.{ uncheckedVariance => uV }
+import mutable.{ Buffer, Builder, ListBuffer, ArrayBuffer }
+import generic.CanBuildFrom
+import scala.annotation.unchecked.{ uncheckedVariance => uV }
+import scala.language.{implicitConversions, higherKinds}
+import scala.reflect.ClassTag
/** A template trait for collections which can be traversed either once only
* or one or more times.
* $traversableonceinfo
*
- * @tparam A the element type of the collection
- *
* @author Martin Odersky
* @author Paul Phillips
* @version 2.8
@@ -56,7 +57,7 @@ import annotation.unchecked.{ uncheckedVariance => uV }
*
* Note: will not terminate for infinite-sized collections.
*/
-trait TraversableOnce[+A] extends GenTraversableOnce[A] {
+trait TraversableOnce[+A] extends Any with GenTraversableOnce[A] {
self =>
/** Self-documenting abstract methods. */
@@ -124,7 +125,7 @@ trait TraversableOnce[+A] extends GenTraversableOnce[A] {
* @param pf the partial function
* @return an option value containing pf applied to the first
* value for which it is defined, or `None` if none exists.
- * @example `Seq("a", 1, 5L).collectFirst({ case x: Int => x*10 }) = Some(10)`
+ * @example `Seq("a", 1, 5L).collectFirst({ case x: Int => x*10 }) = Some(10)`
*/
def collectFirst[B](pf: PartialFunction[A, B]): Option[B] = {
for (x <- self.toIterator) { // make sure to use an iterator or `seq`
@@ -147,6 +148,20 @@ trait TraversableOnce[+A] extends GenTraversableOnce[A] {
def foldRight[B](z: B)(op: (A, B) => B): B =
reversed.foldLeft(z)((x, y) => op(y, x))
+ /** Applies a binary operator to all elements of this $coll,
+ * going left to right.
+ * $willNotTerminateInf
+ * $orderDependentFold
+ *
+ * @param op the binary operator.
+ * @tparam B the result type of the binary operator.
+ * @return the result of inserting `op` between consecutive elements of this $coll,
+ * going left to right:
+ * {{{
+ * op( op( ... op(x_1, x_2) ..., x_{n-1}), x_n)
+ * }}}
+ * where `x,,1,,, ..., x,,n,,` are the elements of this $coll.
+ * @throws `UnsupportedOperationException` if this $coll is empty. */
def reduceLeft[B >: A](op: (B, A) => B): B = {
if (isEmpty)
throw new UnsupportedOperationException("empty.reduceLeft")
@@ -228,7 +243,7 @@ trait TraversableOnce[+A] extends GenTraversableOnce[A] {
def copyToArray[B >: A](xs: Array[B]): Unit =
copyToArray(xs, 0, xs.length)
- def toArray[B >: A : ClassManifest]: Array[B] = {
+ def toArray[B >: A : ClassTag]: Array[B] = {
if (isTraversableAgain) {
val result = new Array[B](size)
copyToArray(result, 0)
@@ -239,17 +254,25 @@ trait TraversableOnce[+A] extends GenTraversableOnce[A] {
def toTraversable: Traversable[A]
- def toList: List[A] = new ListBuffer[A] ++= seq toList
+ def toList: List[A] = to[List]
def toIterable: Iterable[A] = toStream
def toSeq: Seq[A] = toStream
- def toIndexedSeq[B >: A]: immutable.IndexedSeq[B] = immutable.IndexedSeq() ++ seq
+ def toIndexedSeq: immutable.IndexedSeq[A] = to[immutable.IndexedSeq]
+
+ def toBuffer[B >: A]: mutable.Buffer[B] = to[ArrayBuffer].asInstanceOf[mutable.Buffer[B]]
- def toBuffer[B >: A]: mutable.Buffer[B] = new ArrayBuffer[B] ++= seq
+ def toSet[B >: A]: immutable.Set[B] = to[immutable.Set].asInstanceOf[immutable.Set[B]]
- def toSet[B >: A]: immutable.Set[B] = immutable.Set() ++ seq
+ def toVector: Vector[A] = to[Vector]
+
+ def to[Col[_]](implicit cbf: CanBuildFrom[Nothing, A, Col[A @uV]]): Col[A @uV] = {
+ val b = cbf()
+ b ++= seq
+ b.result
+ }
def toMap[T, U](implicit ev: A <:< (T, U)): immutable.Map[T, U] = {
val b = immutable.Map.newBuilder[T, U]
@@ -266,10 +289,9 @@ trait TraversableOnce[+A] extends GenTraversableOnce[A] {
def mkString: String = mkString("")
- /** Appends all elements of this $coll to a string builder using start, end,
- * and separator strings.
- * The written text begins with the string `start` and ends with the string
- * `end`. Inside, the string representations (w.r.t. the method `toString`)
+ /** Appends all elements of this $coll to a string builder using start, end, and separator strings.
+ * The written text begins with the string `start` and ends with the string `end`.
+ * Inside, the string representations (w.r.t. the method `toString`)
* of all elements of this $coll are separated by the string `sep`.
*
* Example:
@@ -310,10 +332,9 @@ trait TraversableOnce[+A] extends GenTraversableOnce[A] {
b
}
- /** Appends all elements of this $coll to a string builder using a separator
- * string. The written text consists of the string representations (w.r.t.
- * the method `toString`) of all elements of this $coll, separated by the
- * string `sep`.
+ /** Appends all elements of this $coll to a string builder using a separator string.
+ * The written text consists of the string representations (w.r.t. the method `toString`)
+ * of all elements of this $coll, separated by the string `sep`.
*
* Example:
*
@@ -358,25 +379,33 @@ trait TraversableOnce[+A] extends GenTraversableOnce[A] {
}
-
object TraversableOnce {
- implicit def traversableOnceCanBuildFrom[T] = new OnceCanBuildFrom[T]
- implicit def wrapTraversableOnce[A](trav: TraversableOnce[A]) = new MonadOps(trav)
+ @deprecated("use OnceCanBuildFrom instead", "2.10.0")
+ def traversableOnceCanBuildFrom[T] = new OnceCanBuildFrom[T]
+ @deprecated("use MonadOps instead", "2.10.0")
+ def wrapTraversableOnce[A](trav: TraversableOnce[A]) = new MonadOps(trav)
+
+ implicit def alternateImplicit[A](trav: TraversableOnce[A]) = new ForceImplicitAmbiguity
implicit def flattenTraversableOnce[A, CC[_]](travs: TraversableOnce[CC[A]])(implicit ev: CC[A] => TraversableOnce[A]) =
new FlattenOps[A](travs map ev)
- /** With the advent of TraversableOnce, it can be useful to have a builder which
- * operates on Iterators so they can be treated uniformly along with the collections.
- * See scala.util.Random.shuffle for an example.
- */
- class OnceCanBuildFrom[A] extends generic.CanBuildFrom[TraversableOnce[A], A, TraversableOnce[A]] {
- def newIterator = new ArrayBuffer[A] mapResult (_.iterator)
+ /* Functionality reused in Iterator.CanBuildFrom */
+ private[collection] abstract class BufferedCanBuildFrom[A, Coll[X] <: TraversableOnce[X]] extends generic.CanBuildFrom[Coll[_], A, Coll[A]] {
+ def bufferToColl[B](buff: ArrayBuffer[B]): Coll[B]
+ def traversableToColl[B](t: GenTraversable[B]): Coll[B]
+
+ def newIterator: Builder[A, Coll[A]] = new ArrayBuffer[A] mapResult bufferToColl
/** Creates a new builder on request of a collection.
* @param from the collection requesting the builder to be created.
* @return the result of invoking the `genericBuilder` method on `from`.
*/
- def apply(from: TraversableOnce[A]) = newIterator
+ def apply(from: Coll[_]): Builder[A, Coll[A]] = from match {
+ case xs: generic.GenericTraversableTemplate[_, _] => xs.genericBuilder.asInstanceOf[Builder[A, Traversable[A]]] mapResult {
+ case res => traversableToColl(res.asInstanceOf[GenTraversable[A]])
+ }
+ case _ => newIterator
+ }
/** Creates a new builder from scratch
* @return the result of invoking the `newBuilder` method of this factory.
@@ -384,8 +413,20 @@ object TraversableOnce {
def apply() = newIterator
}
+ /** With the advent of `TraversableOnce`, it can be useful to have a builder which
+ * operates on `Iterator`s so they can be treated uniformly along with the collections.
+ * See `scala.util.Random.shuffle` or `scala.concurrent.Future.sequence` for an example.
+ */
+ class OnceCanBuildFrom[A] extends BufferedCanBuildFrom[A, TraversableOnce] {
+ def bufferToColl[B](buff: ArrayBuffer[B]) = buff.iterator
+ def traversableToColl[B](t: GenTraversable[B]) = t.seq
+ }
+
+ /** Evidence for building collections from `TraversableOnce` collections */
+ implicit def OnceCanBuildFrom[A] = new OnceCanBuildFrom[A]
+
class FlattenOps[A](travs: TraversableOnce[TraversableOnce[A]]) {
- def flatten: Iterator[A] = new Iterator[A] {
+ def flatten: Iterator[A] = new AbstractIterator[A] {
val its = travs.toIterator
private var it: Iterator[A] = Iterator.empty
def hasNext: Boolean = it.hasNext || its.hasNext && { it = its.next.toIterator; hasNext }
@@ -393,7 +434,9 @@ object TraversableOnce {
}
}
- class MonadOps[+A](trav: TraversableOnce[A]) {
+ class ForceImplicitAmbiguity
+
+ implicit class MonadOps[+A](trav: TraversableOnce[A]) {
def map[B](f: A => B): TraversableOnce[B] = trav.toIterator map f
def flatMap[B](f: A => GenTraversableOnce[B]): TraversableOnce[B] = trav.toIterator flatMap f
def withFilter(p: A => Boolean) = trav.toIterator filter p
diff --git a/src/library/scala/collection/TraversableProxy.scala b/src/library/scala/collection/TraversableProxy.scala
index 215cf08..568298a 100644
--- a/src/library/scala/collection/TraversableProxy.scala
+++ b/src/library/scala/collection/TraversableProxy.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/library/scala/collection/TraversableProxyLike.scala b/src/library/scala/collection/TraversableProxyLike.scala
index 15565e5..8896cd1 100644
--- a/src/library/scala/collection/TraversableProxyLike.scala
+++ b/src/library/scala/collection/TraversableProxyLike.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -12,6 +12,7 @@ package scala.collection
import generic._
import mutable.{Buffer, StringBuilder}
+import scala.reflect.ClassTag
// Methods could be printed by cat TraversableLike.scala | egrep '^ (override )?def'
@@ -73,11 +74,11 @@ trait TraversableProxyLike[+A, +Repr <: TraversableLike[A, Repr] with Traversabl
override def copyToArray[B >: A](xs: Array[B], start: Int, len: Int) = self.copyToArray(xs, start, len)
override def copyToArray[B >: A](xs: Array[B], start: Int) = self.copyToArray(xs, start)
override def copyToArray[B >: A](xs: Array[B]) = self.copyToArray(xs)
- override def toArray[B >: A: ClassManifest]: Array[B] = self.toArray
+ override def toArray[B >: A: ClassTag]: Array[B] = self.toArray
override def toList: List[A] = self.toList
override def toIterable: Iterable[A] = self.toIterable
override def toSeq: Seq[A] = self.toSeq
- override def toIndexedSeq[B >: A] = self.toIndexedSeq
+ override def toIndexedSeq: immutable.IndexedSeq[A] = self.toIndexedSeq
override def toBuffer[B >: A] = self.toBuffer
override def toStream: Stream[A] = self.toStream
override def toSet[B >: A]: immutable.Set[B] = self.toSet
diff --git a/src/library/scala/collection/TraversableView.scala b/src/library/scala/collection/TraversableView.scala
index 3fad7d4..cce6b72 100644
--- a/src/library/scala/collection/TraversableView.scala
+++ b/src/library/scala/collection/TraversableView.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -24,8 +24,6 @@ object TraversableView {
class NoBuilder[A] extends Builder[A, Nothing] {
def +=(elem: A): this.type = this
def iterator: Iterator[A] = Iterator.empty
- @deprecated("use `iterator' instead", "2.8.0")
- def elements = iterator
def result() = throw new UnsupportedOperationException("TraversableView.Builder.result")
def clear() {}
}
diff --git a/src/library/scala/collection/TraversableViewLike.scala b/src/library/scala/collection/TraversableViewLike.scala
index 3608a6a..14f865c 100644
--- a/src/library/scala/collection/TraversableViewLike.scala
+++ b/src/library/scala/collection/TraversableViewLike.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -11,14 +11,15 @@ package scala.collection
import generic._
import mutable.{ Builder, ArrayBuffer }
import TraversableView.NoBuilder
-import annotation.migration
+import scala.annotation.migration
+import scala.language.implicitConversions
trait ViewMkString[+A] {
self: Traversable[A] =>
// It is necessary to use thisSeq rather than toSeq to avoid cycles in the
// eager evaluation of vals in transformed view subclasses, see #4558.
- protected[this] def thisSeq: Seq[A] = new ArrayBuffer[A] ++= self result
+ protected[this] def thisSeq: Seq[A] = (new ArrayBuffer[A] ++= self).result
// Have to overload all three to work around #4299. The overload
// is because mkString should force a view but toString should not.
@@ -27,8 +28,16 @@ trait ViewMkString[+A] {
override def mkString(start: String, sep: String, end: String): String = {
thisSeq.addString(new StringBuilder(), start, sep, end).toString
}
- override def addString(b: StringBuilder, start: String, sep: String, end: String): StringBuilder =
- b append start append "..." append end
+ override def addString(b: StringBuilder, start: String, sep: String, end: String): StringBuilder = {
+ var first = true
+ b append start
+ for (x <- self) {
+ if (first) first = false else b append sep
+ b append x
+ }
+ b append end
+ b
+ }
}
/** A template trait for non-strict views of traversable collections.
@@ -40,17 +49,17 @@ trait ViewMkString[+A] {
* that takes a `View` as its `From` type parameter must yield the same view (or a generic
* superclass of it) as its result parameter. If that assumption is broken, cast errors might result.
*
- * @define viewInfo
+ * @define viewInfo
* A view is a lazy version of some collection. Collection transformers such as
* `map` or `filter` or `++` do not traverse any elements when applied on a view.
* Instead they create a new view which simply records that fact that the operation
* needs to be applied. The collection elements are accessed, and the view operations are applied,
* when a non-view result is needed, or when the `force` method is called on a view.
- * @define traversableViewInfo
+ * @define traversableViewInfo
* $viewInfo
*
* All views for traversable collections are defined by creating a new `foreach` method.
-
+ *
* @author Martin Odersky
* @version 2.8
* @since 2.8
@@ -107,6 +116,9 @@ trait TraversableViewLike[+A,
override def toString = viewToString
}
+ /** Explicit instantiation of the `Transformed` trait to reduce class file size in subclasses. */
+ private[collection] abstract class AbstractTransformed[+B] extends Traversable[B] with Transformed[B]
+
trait EmptyView extends Transformed[Nothing] with super.EmptyView
/** A fall back which forces everything into a vector and then applies an operation
@@ -150,19 +162,21 @@ trait TraversableViewLike[+A,
// if (b.isInstanceOf[NoBuilder[_]]) newFlatMapped(f).asInstanceOf[That]
// else super.flatMap[B, That](f)(bf)
}
+ override def flatten[B](implicit asTraversable: A => /*<:<!!!*/ GenTraversableOnce[B]) =
+ newFlatMapped(asTraversable)
private[this] implicit def asThis(xs: Transformed[A]): This = xs.asInstanceOf[This]
/** Boilerplate method, to override in each subclass
* This method could be eliminated if Scala had virtual classes
*/
- protected def newForced[B](xs: => GenSeq[B]): Transformed[B] = new { val forced = xs } with Forced[B]
- protected def newAppended[B >: A](that: GenTraversable[B]): Transformed[B] = new { val rest = that } with Appended[B]
- protected def newMapped[B](f: A => B): Transformed[B] = new { val mapping = f } with Mapped[B]
- protected def newFlatMapped[B](f: A => GenTraversableOnce[B]): Transformed[B] = new { val mapping = f } with FlatMapped[B]
- protected def newFiltered(p: A => Boolean): Transformed[A] = new { val pred = p } with Filtered
- protected def newSliced(_endpoints: SliceInterval): Transformed[A] = new { val endpoints = _endpoints } with Sliced
- protected def newDroppedWhile(p: A => Boolean): Transformed[A] = new { val pred = p } with DroppedWhile
- protected def newTakenWhile(p: A => Boolean): Transformed[A] = new { val pred = p } with TakenWhile
+ protected def newForced[B](xs: => GenSeq[B]): Transformed[B] = new { val forced = xs } with AbstractTransformed[B] with Forced[B]
+ protected def newAppended[B >: A](that: GenTraversable[B]): Transformed[B] = new { val rest = that } with AbstractTransformed[B] with Appended[B]
+ protected def newMapped[B](f: A => B): Transformed[B] = new { val mapping = f } with AbstractTransformed[B] with Mapped[B]
+ protected def newFlatMapped[B](f: A => GenTraversableOnce[B]): Transformed[B] = new { val mapping = f } with AbstractTransformed[B] with FlatMapped[B]
+ protected def newFiltered(p: A => Boolean): Transformed[A] = new { val pred = p } with AbstractTransformed[A] with Filtered
+ protected def newSliced(_endpoints: SliceInterval): Transformed[A] = new { val endpoints = _endpoints } with AbstractTransformed[A] with Sliced
+ protected def newDroppedWhile(p: A => Boolean): Transformed[A] = new { val pred = p } with AbstractTransformed[A] with DroppedWhile
+ protected def newTakenWhile(p: A => Boolean): Transformed[A] = new { val pred = p } with AbstractTransformed[A] with TakenWhile
protected def newTaken(n: Int): Transformed[A] = newSliced(SliceInterval(0, n))
protected def newDropped(n: Int): Transformed[A] = newSliced(SliceInterval(n, Int.MaxValue))
@@ -189,6 +203,12 @@ trait TraversableViewLike[+A,
override def groupBy[K](f: A => K): immutable.Map[K, This] =
thisSeq groupBy f mapValues (xs => newForced(xs))
+ override def unzip[A1, A2](implicit asPair: A => (A1, A2)) =
+ (newMapped(x => asPair(x)._1), newMapped(x => asPair(x)._2)) // TODO - Performance improvements.
+
+ override def unzip3[A1, A2, A3](implicit asTriple: A => (A1, A2, A3)) =
+ (newMapped(x => asTriple(x)._1), newMapped(x => asTriple(x)._2), newMapped(x => asTriple(x)._3)) // TODO - Performance improvements.
+
override def toString = viewToString
}
diff --git a/src/library/scala/collection/concurrent/BasicNode.java b/src/library/scala/collection/concurrent/BasicNode.java
new file mode 100644
index 0000000..a65d84b
--- /dev/null
+++ b/src/library/scala/collection/concurrent/BasicNode.java
@@ -0,0 +1,20 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+package scala.collection.concurrent;
+
+
+
+
+
+
+public abstract class BasicNode {
+
+ public abstract String string(int lev);
+
+}
\ No newline at end of file
diff --git a/src/library/scala/collection/concurrent/CNodeBase.java b/src/library/scala/collection/concurrent/CNodeBase.java
new file mode 100644
index 0000000..d6eb29c
--- /dev/null
+++ b/src/library/scala/collection/concurrent/CNodeBase.java
@@ -0,0 +1,35 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+package scala.collection.concurrent;
+
+
+
+import java.util.concurrent.atomic.AtomicIntegerFieldUpdater;
+
+
+
+abstract class CNodeBase<K, V> extends MainNode<K, V> {
+
+ public static final AtomicIntegerFieldUpdater<CNodeBase> updater = AtomicIntegerFieldUpdater.newUpdater(CNodeBase.class, "csize");
+
+ public volatile int csize = -1;
+
+ public boolean CAS_SIZE(int oldval, int nval) {
+ return updater.compareAndSet(this, oldval, nval);
+ }
+
+ public void WRITE_SIZE(int nval) {
+ updater.set(this, nval);
+ }
+
+ public int READ_SIZE() {
+ return updater.get(this);
+ }
+
+}
\ No newline at end of file
diff --git a/src/library/scala/collection/concurrent/Gen.java b/src/library/scala/collection/concurrent/Gen.java
new file mode 100644
index 0000000..331eeca
--- /dev/null
+++ b/src/library/scala/collection/concurrent/Gen.java
@@ -0,0 +1,18 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+package scala.collection.concurrent;
+
+
+
+
+
+
+final class Gen {
+}
+
diff --git a/src/library/scala/collection/concurrent/INodeBase.java b/src/library/scala/collection/concurrent/INodeBase.java
new file mode 100644
index 0000000..cbe404e
--- /dev/null
+++ b/src/library/scala/collection/concurrent/INodeBase.java
@@ -0,0 +1,35 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+package scala.collection.concurrent;
+
+
+
+import java.util.concurrent.atomic.AtomicReferenceFieldUpdater;
+
+
+
+abstract class INodeBase<K, V> extends BasicNode {
+
+ public static final AtomicReferenceFieldUpdater<INodeBase, MainNode> updater = AtomicReferenceFieldUpdater.newUpdater(INodeBase.class, MainNode.class, "mainnode");
+
+ public static final Object RESTART = new Object();
+
+ public volatile MainNode<K, V> mainnode = null;
+
+ public final Gen gen;
+
+ public INodeBase(Gen generation) {
+ gen = generation;
+ }
+
+ public BasicNode prev() {
+ return null;
+ }
+
+}
\ No newline at end of file
diff --git a/src/library/scala/collection/concurrent/MainNode.java b/src/library/scala/collection/concurrent/MainNode.java
new file mode 100644
index 0000000..ffe5357
--- /dev/null
+++ b/src/library/scala/collection/concurrent/MainNode.java
@@ -0,0 +1,40 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+package scala.collection.concurrent;
+
+
+
+import java.util.concurrent.atomic.AtomicReferenceFieldUpdater;
+
+
+
+abstract class MainNode<K, V> extends BasicNode {
+
+ public static final AtomicReferenceFieldUpdater<MainNode, MainNode> updater = AtomicReferenceFieldUpdater.newUpdater(MainNode.class, MainNode.class, "prev");
+
+ public volatile MainNode<K, V> prev = null;
+
+ public abstract int cachedSize(Object ct);
+
+ public boolean CAS_PREV(MainNode<K, V> oldval, MainNode<K, V> nval) {
+ return updater.compareAndSet(this, oldval, nval);
+ }
+
+ public void WRITE_PREV(MainNode<K, V> nval) {
+ updater.set(this, nval);
+ }
+
+ // do we need this? unclear in the javadocs...
+ // apparently not - volatile reads are supposed to be safe
+ // irregardless of whether there are concurrent ARFU updates
+ public MainNode<K, V> READ_PREV() {
+ return updater.get(this);
+ }
+
+}
\ No newline at end of file
diff --git a/src/library/scala/collection/concurrent/Map.scala b/src/library/scala/collection/concurrent/Map.scala
new file mode 100644
index 0000000..b2276ce
--- /dev/null
+++ b/src/library/scala/collection/concurrent/Map.scala
@@ -0,0 +1,88 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2010-2013, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+package scala.collection.concurrent
+
+/** A template trait for mutable maps that allow concurrent access.
+ *
+ * $concurrentmapinfo
+ *
+ * @since 2.8
+ * @see [[http://docs.scala-lang.org/overviews/collections/concrete-mutable-collection-classes.html#concurrent_maps "Scala's Collection Library overview"]]
+ * section on `Concurrent Maps` for more information.
+ *
+ * @tparam A the key type of the map
+ * @tparam B the value type of the map
+ *
+ * @define Coll `ConcurrentMap`
+ * @define coll concurrent map
+ * @define concurrentmapinfo
+ * This is a base trait for all Scala concurrent map implementations. It
+ * provides all of the methods a `Map` does, with the difference that all the
+ * changes are atomic. It also describes methods specific to concurrent maps.
+ *
+ * '''Note''': The concurrent maps do not accept `'''null'''` for keys or values.
+ *
+ * @define atomicop
+ * This is an atomic operation.
+ */
+trait Map[A, B] extends scala.collection.mutable.Map[A, B] {
+
+ /**
+ * Associates the given key with a given value, unless the key was already
+ * associated with some other value.
+ *
+ * $atomicop
+ *
+ * @param k key with which the specified value is to be associated with
+ * @param v value to be associated with the specified key
+ * @return `Some(oldvalue)` if there was a value `oldvalue` previously
+ * associated with the specified key, or `None` if there was no
+ * mapping for the specified key
+ */
+ def putIfAbsent(k: A, v: B): Option[B]
+
+ /**
+ * Removes the entry for the specified key if its currently mapped to the
+ * specified value.
+ *
+ * $atomicop
+ *
+ * @param k key for which the entry should be removed
+ * @param v value expected to be associated with the specified key if
+ * the removal is to take place
+ * @return `true` if the removal took place, `false` otherwise
+ */
+ def remove(k: A, v: B): Boolean
+
+ /**
+ * Replaces the entry for the given key only if it was previously mapped to
+ * a given value.
+ *
+ * $atomicop
+ *
+ * @param k key for which the entry should be replaced
+ * @param oldvalue value expected to be associated with the specified key
+ * if replacing is to happen
+ * @param newvalue value to be associated with the specified key
+ * @return `true` if the entry was replaced, `false` otherwise
+ */
+ def replace(k: A, oldvalue: B, newvalue: B): Boolean
+
+ /**
+ * Replaces the entry for the given key only if it was previously mapped
+ * to some value.
+ *
+ * $atomicop
+ *
+ * @param k key for which the entry should be replaced
+ * @param v value to be associated with the specified key
+ * @return `Some(v)` if the given key was previously mapped to some value `v`, or `None` otherwise
+ */
+ def replace(k: A, v: B): Option[B]
+}
diff --git a/src/library/scala/collection/concurrent/TrieMap.scala b/src/library/scala/collection/concurrent/TrieMap.scala
new file mode 100644
index 0000000..714260f
--- /dev/null
+++ b/src/library/scala/collection/concurrent/TrieMap.scala
@@ -0,0 +1,1082 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+package scala.collection
+package concurrent
+
+import java.util.concurrent.atomic._
+import scala.collection.immutable.{ ListMap => ImmutableListMap }
+import scala.collection.parallel.mutable.ParTrieMap
+import scala.util.hashing.Hashing
+import scala.util.control.ControlThrowable
+import generic._
+import scala.annotation.tailrec
+import scala.annotation.switch
+
+private[collection] final class INode[K, V](bn: MainNode[K, V], g: Gen) extends INodeBase[K, V](g) {
+ import INodeBase._
+
+ WRITE(bn)
+
+ def this(g: Gen) = this(null, g)
+
+ def WRITE(nval: MainNode[K, V]) = INodeBase.updater.set(this, nval)
+
+ def CAS(old: MainNode[K, V], n: MainNode[K, V]) = INodeBase.updater.compareAndSet(this, old, n)
+
+ def gcasRead(ct: TrieMap[K, V]): MainNode[K, V] = GCAS_READ(ct)
+
+ def GCAS_READ(ct: TrieMap[K, V]): MainNode[K, V] = {
+ val m = /*READ*/mainnode
+ val prevval = /*READ*/m.prev
+ if (prevval eq null) m
+ else GCAS_Complete(m, ct)
+ }
+
+ @tailrec private def GCAS_Complete(m: MainNode[K, V], ct: TrieMap[K, V]): MainNode[K, V] = if (m eq null) null else {
+ // complete the GCAS
+ val prev = /*READ*/m.prev
+ val ctr = ct.readRoot(true)
+
+ prev match {
+ case null =>
+ m
+ case fn: FailedNode[_, _] => // try to commit to previous value
+ if (CAS(m, fn.prev)) fn.prev
+ else GCAS_Complete(/*READ*/mainnode, ct)
+ case vn: MainNode[_, _] =>
+ // Assume that you've read the root from the generation G.
+ // Assume that the snapshot algorithm is correct.
+ // ==> you can only reach nodes in generations <= G.
+ // ==> `gen` is <= G.
+ // We know that `ctr.gen` is >= G.
+ // ==> if `ctr.gen` = `gen` then they are both equal to G.
+ // ==> otherwise, we know that either `ctr.gen` > G, `gen` < G,
+ // or both
+ if ((ctr.gen eq gen) && ct.nonReadOnly) {
+ // try to commit
+ if (m.CAS_PREV(prev, null)) m
+ else GCAS_Complete(m, ct)
+ } else {
+ // try to abort
+ m.CAS_PREV(prev, new FailedNode(prev))
+ GCAS_Complete(/*READ*/mainnode, ct)
+ }
+ }
+ }
+
+ def GCAS(old: MainNode[K, V], n: MainNode[K, V], ct: TrieMap[K, V]): Boolean = {
+ n.WRITE_PREV(old)
+ if (CAS(old, n)) {
+ GCAS_Complete(n, ct)
+ /*READ*/n.prev eq null
+ } else false
+ }
+
+ private def equal(k1: K, k2: K, ct: TrieMap[K, V]) = ct.equality.equiv(k1, k2)
+
+ private def inode(cn: MainNode[K, V]) = {
+ val nin = new INode[K, V](gen)
+ nin.WRITE(cn)
+ nin
+ }
+
+ def copyToGen(ngen: Gen, ct: TrieMap[K, V]) = {
+ val nin = new INode[K, V](ngen)
+ val main = GCAS_READ(ct)
+ nin.WRITE(main)
+ nin
+ }
+
+ /** Inserts a key value pair, overwriting the old pair if the keys match.
+ *
+ * @return true if successful, false otherwise
+ */
+ @tailrec def rec_insert(k: K, v: V, hc: Int, lev: Int, parent: INode[K, V], startgen: Gen, ct: TrieMap[K, V]): Boolean = {
+ val m = GCAS_READ(ct) // use -Yinline!
+
+ m match {
+ case cn: CNode[K, V] => // 1) a multiway node
+ val idx = (hc >>> lev) & 0x1f
+ val flag = 1 << idx
+ val bmp = cn.bitmap
+ val mask = flag - 1
+ val pos = Integer.bitCount(bmp & mask)
+ if ((bmp & flag) != 0) {
+ // 1a) insert below
+ cn.array(pos) match {
+ case in: INode[K, V] =>
+ if (startgen eq in.gen) in.rec_insert(k, v, hc, lev + 5, this, startgen, ct)
+ else {
+ if (GCAS(cn, cn.renewed(startgen, ct), ct)) rec_insert(k, v, hc, lev, parent, startgen, ct)
+ else false
+ }
+ case sn: SNode[K, V] =>
+ if (sn.hc == hc && equal(sn.k, k, ct)) GCAS(cn, cn.updatedAt(pos, new SNode(k, v, hc), gen), ct)
+ else {
+ val rn = if (cn.gen eq gen) cn else cn.renewed(gen, ct)
+ val nn = rn.updatedAt(pos, inode(CNode.dual(sn, sn.hc, new SNode(k, v, hc), hc, lev + 5, gen)), gen)
+ GCAS(cn, nn, ct)
+ }
+ }
+ } else {
+ val rn = if (cn.gen eq gen) cn else cn.renewed(gen, ct)
+ val ncnode = rn.insertedAt(pos, flag, new SNode(k, v, hc), gen)
+ GCAS(cn, ncnode, ct)
+ }
+ case tn: TNode[K, V] =>
+ clean(parent, ct, lev - 5)
+ false
+ case ln: LNode[K, V] => // 3) an l-node
+ val nn = ln.inserted(k, v)
+ GCAS(ln, nn, ct)
+ }
+ }
+
+ /** Inserts a new key value pair, given that a specific condition is met.
+ *
+ * @param cond null - don't care if the key was there; KEY_ABSENT - key wasn't there; KEY_PRESENT - key was there; other value `v` - key must be bound to `v`
+ * @return null if unsuccessful, Option[V] otherwise (indicating previous value bound to the key)
+ */
+ @tailrec def rec_insertif(k: K, v: V, hc: Int, cond: AnyRef, lev: Int, parent: INode[K, V], startgen: Gen, ct: TrieMap[K, V]): Option[V] = {
+ val m = GCAS_READ(ct) // use -Yinline!
+
+ m match {
+ case cn: CNode[K, V] => // 1) a multiway node
+ val idx = (hc >>> lev) & 0x1f
+ val flag = 1 << idx
+ val bmp = cn.bitmap
+ val mask = flag - 1
+ val pos = Integer.bitCount(bmp & mask)
+ if ((bmp & flag) != 0) {
+ // 1a) insert below
+ cn.array(pos) match {
+ case in: INode[K, V] =>
+ if (startgen eq in.gen) in.rec_insertif(k, v, hc, cond, lev + 5, this, startgen, ct)
+ else {
+ if (GCAS(cn, cn.renewed(startgen, ct), ct)) rec_insertif(k, v, hc, cond, lev, parent, startgen, ct)
+ else null
+ }
+ case sn: SNode[K, V] => cond match {
+ case null =>
+ if (sn.hc == hc && equal(sn.k, k, ct)) {
+ if (GCAS(cn, cn.updatedAt(pos, new SNode(k, v, hc), gen), ct)) Some(sn.v) else null
+ } else {
+ val rn = if (cn.gen eq gen) cn else cn.renewed(gen, ct)
+ val nn = rn.updatedAt(pos, inode(CNode.dual(sn, sn.hc, new SNode(k, v, hc), hc, lev + 5, gen)), gen)
+ if (GCAS(cn, nn, ct)) None
+ else null
+ }
+ case INode.KEY_ABSENT =>
+ if (sn.hc == hc && equal(sn.k, k, ct)) Some(sn.v)
+ else {
+ val rn = if (cn.gen eq gen) cn else cn.renewed(gen, ct)
+ val nn = rn.updatedAt(pos, inode(CNode.dual(sn, sn.hc, new SNode(k, v, hc), hc, lev + 5, gen)), gen)
+ if (GCAS(cn, nn, ct)) None
+ else null
+ }
+ case INode.KEY_PRESENT =>
+ if (sn.hc == hc && equal(sn.k, k, ct)) {
+ if (GCAS(cn, cn.updatedAt(pos, new SNode(k, v, hc), gen), ct)) Some(sn.v) else null
+ } else None
+ case otherv =>
+ if (sn.hc == hc && equal(sn.k, k, ct) && sn.v == otherv) {
+ if (GCAS(cn, cn.updatedAt(pos, new SNode(k, v, hc), gen), ct)) Some(sn.v) else null
+ } else None
+ }
+ }
+ } else cond match {
+ case null | INode.KEY_ABSENT =>
+ val rn = if (cn.gen eq gen) cn else cn.renewed(gen, ct)
+ val ncnode = rn.insertedAt(pos, flag, new SNode(k, v, hc), gen)
+ if (GCAS(cn, ncnode, ct)) None else null
+ case INode.KEY_PRESENT => None
+ case otherv => None
+ }
+ case sn: TNode[K, V] =>
+ clean(parent, ct, lev - 5)
+ null
+ case ln: LNode[K, V] => // 3) an l-node
+ def insertln() = {
+ val nn = ln.inserted(k, v)
+ GCAS(ln, nn, ct)
+ }
+ cond match {
+ case null =>
+ val optv = ln.get(k)
+ if (insertln()) optv else null
+ case INode.KEY_ABSENT =>
+ ln.get(k) match {
+ case None => if (insertln()) None else null
+ case optv => optv
+ }
+ case INode.KEY_PRESENT =>
+ ln.get(k) match {
+ case Some(v0) => if (insertln()) Some(v0) else null
+ case None => None
+ }
+ case otherv =>
+ ln.get(k) match {
+ case Some(v0) if v0 == otherv => if (insertln()) Some(otherv.asInstanceOf[V]) else null
+ case _ => None
+ }
+ }
+ }
+ }
+
+ /** Looks up the value associated with the key.
+ *
+ * @return null if no value has been found, RESTART if the operation wasn't successful, or any other value otherwise
+ */
+ @tailrec def rec_lookup(k: K, hc: Int, lev: Int, parent: INode[K, V], startgen: Gen, ct: TrieMap[K, V]): AnyRef = {
+ val m = GCAS_READ(ct) // use -Yinline!
+
+ m match {
+ case cn: CNode[K, V] => // 1) a multinode
+ val idx = (hc >>> lev) & 0x1f
+ val flag = 1 << idx
+ val bmp = cn.bitmap
+ if ((bmp & flag) == 0) null // 1a) bitmap shows no binding
+ else { // 1b) bitmap contains a value - descend
+ val pos = if (bmp == 0xffffffff) idx else Integer.bitCount(bmp & (flag - 1))
+ val sub = cn.array(pos)
+ sub match {
+ case in: INode[K, V] =>
+ if (ct.isReadOnly || (startgen eq in.gen)) in.rec_lookup(k, hc, lev + 5, this, startgen, ct)
+ else {
+ if (GCAS(cn, cn.renewed(startgen, ct), ct)) rec_lookup(k, hc, lev, parent, startgen, ct)
+ else return RESTART // used to be throw RestartException
+ }
+ case sn: SNode[K, V] => // 2) singleton node
+ if (sn.hc == hc && equal(sn.k, k, ct)) sn.v.asInstanceOf[AnyRef]
+ else null
+ }
+ }
+ case tn: TNode[K, V] => // 3) non-live node
+ def cleanReadOnly(tn: TNode[K, V]) = if (ct.nonReadOnly) {
+ clean(parent, ct, lev - 5)
+ RESTART // used to be throw RestartException
+ } else {
+ if (tn.hc == hc && tn.k == k) tn.v.asInstanceOf[AnyRef]
+ else null
+ }
+ cleanReadOnly(tn)
+ case ln: LNode[K, V] => // 5) an l-node
+ ln.get(k).asInstanceOf[Option[AnyRef]].orNull
+ }
+ }
+
+ /** Removes the key associated with the given value.
+ *
+ * @param v if null, will remove the key irregardless of the value; otherwise removes only if binding contains that exact key and value
+ * @return null if not successful, an Option[V] indicating the previous value otherwise
+ */
+ def rec_remove(k: K, v: V, hc: Int, lev: Int, parent: INode[K, V], startgen: Gen, ct: TrieMap[K, V]): Option[V] = {
+ val m = GCAS_READ(ct) // use -Yinline!
+
+ m match {
+ case cn: CNode[K, V] =>
+ val idx = (hc >>> lev) & 0x1f
+ val bmp = cn.bitmap
+ val flag = 1 << idx
+ if ((bmp & flag) == 0) None
+ else {
+ val pos = Integer.bitCount(bmp & (flag - 1))
+ val sub = cn.array(pos)
+ val res = sub match {
+ case in: INode[K, V] =>
+ if (startgen eq in.gen) in.rec_remove(k, v, hc, lev + 5, this, startgen, ct)
+ else {
+ if (GCAS(cn, cn.renewed(startgen, ct), ct)) rec_remove(k, v, hc, lev, parent, startgen, ct)
+ else null
+ }
+ case sn: SNode[K, V] =>
+ if (sn.hc == hc && equal(sn.k, k, ct) && (v == null || sn.v == v)) {
+ val ncn = cn.removedAt(pos, flag, gen).toContracted(lev)
+ if (GCAS(cn, ncn, ct)) Some(sn.v) else null
+ } else None
+ }
+
+ if (res == None || (res eq null)) res
+ else {
+ @tailrec def cleanParent(nonlive: AnyRef) {
+ val pm = parent.GCAS_READ(ct)
+ pm match {
+ case cn: CNode[K, V] =>
+ val idx = (hc >>> (lev - 5)) & 0x1f
+ val bmp = cn.bitmap
+ val flag = 1 << idx
+ if ((bmp & flag) == 0) {} // somebody already removed this i-node, we're done
+ else {
+ val pos = Integer.bitCount(bmp & (flag - 1))
+ val sub = cn.array(pos)
+ if (sub eq this) nonlive match {
+ case tn: TNode[K, V] =>
+ val ncn = cn.updatedAt(pos, tn.copyUntombed, gen).toContracted(lev - 5)
+ if (!parent.GCAS(cn, ncn, ct))
+ if (ct.readRoot().gen == startgen) cleanParent(nonlive)
+ }
+ }
+ case _ => // parent is no longer a cnode, we're done
+ }
+ }
+
+ if (parent ne null) { // never tomb at root
+ val n = GCAS_READ(ct)
+ if (n.isInstanceOf[TNode[_, _]])
+ cleanParent(n)
+ }
+
+ res
+ }
+ }
+ case tn: TNode[K, V] =>
+ clean(parent, ct, lev - 5)
+ null
+ case ln: LNode[K, V] =>
+ if (v == null) {
+ val optv = ln.get(k)
+ val nn = ln.removed(k, ct)
+ if (GCAS(ln, nn, ct)) optv else null
+ } else ln.get(k) match {
+ case optv @ Some(v0) if v0 == v =>
+ val nn = ln.removed(k, ct)
+ if (GCAS(ln, nn, ct)) optv else null
+ case _ => None
+ }
+ }
+ }
+
+ private def clean(nd: INode[K, V], ct: TrieMap[K, V], lev: Int) {
+ val m = nd.GCAS_READ(ct)
+ m match {
+ case cn: CNode[K, V] => nd.GCAS(cn, cn.toCompressed(ct, lev, gen), ct)
+ case _ =>
+ }
+ }
+
+ def isNullInode(ct: TrieMap[K, V]) = GCAS_READ(ct) eq null
+
+ def cachedSize(ct: TrieMap[K, V]): Int = {
+ val m = GCAS_READ(ct)
+ m.cachedSize(ct)
+ }
+
+ /* this is a quiescent method! */
+ def string(lev: Int) = "%sINode -> %s".format(" " * lev, mainnode match {
+ case null => "<null>"
+ case tn: TNode[_, _] => "TNode(%s, %s, %d, !)".format(tn.k, tn.v, tn.hc)
+ case cn: CNode[_, _] => cn.string(lev)
+ case ln: LNode[_, _] => ln.string(lev)
+ case x => "<elem: %s>".format(x)
+ })
+
+}
+
+
+private[concurrent] object INode {
+ val KEY_PRESENT = new AnyRef
+ val KEY_ABSENT = new AnyRef
+
+ def newRootNode[K, V] = {
+ val gen = new Gen
+ val cn = new CNode[K, V](0, new Array(0), gen)
+ new INode[K, V](cn, gen)
+ }
+}
+
+
+private[concurrent] final class FailedNode[K, V](p: MainNode[K, V]) extends MainNode[K, V] {
+ WRITE_PREV(p)
+
+ def string(lev: Int) = throw new UnsupportedOperationException
+
+ def cachedSize(ct: AnyRef): Int = throw new UnsupportedOperationException
+
+ override def toString = "FailedNode(%s)".format(p)
+}
+
+
+private[concurrent] trait KVNode[K, V] {
+ def kvPair: (K, V)
+}
+
+
+private[collection] final class SNode[K, V](final val k: K, final val v: V, final val hc: Int)
+extends BasicNode with KVNode[K, V] {
+ final def copy = new SNode(k, v, hc)
+ final def copyTombed = new TNode(k, v, hc)
+ final def copyUntombed = new SNode(k, v, hc)
+ final def kvPair = (k, v)
+ final def string(lev: Int) = (" " * lev) + "SNode(%s, %s, %x)".format(k, v, hc)
+}
+
+
+private[collection] final class TNode[K, V](final val k: K, final val v: V, final val hc: Int)
+extends MainNode[K, V] with KVNode[K, V] {
+ final def copy = new TNode(k, v, hc)
+ final def copyTombed = new TNode(k, v, hc)
+ final def copyUntombed = new SNode(k, v, hc)
+ final def kvPair = (k, v)
+ final def cachedSize(ct: AnyRef): Int = 1
+ final def string(lev: Int) = (" " * lev) + "TNode(%s, %s, %x, !)".format(k, v, hc)
+}
+
+
+private[collection] final class LNode[K, V](final val listmap: ImmutableListMap[K, V])
+extends MainNode[K, V] {
+ def this(k: K, v: V) = this(ImmutableListMap(k -> v))
+ def this(k1: K, v1: V, k2: K, v2: V) = this(ImmutableListMap(k1 -> v1, k2 -> v2))
+ def inserted(k: K, v: V) = new LNode(listmap + ((k, v)))
+ def removed(k: K, ct: TrieMap[K, V]): MainNode[K, V] = {
+ val updmap = listmap - k
+ if (updmap.size > 1) new LNode(updmap)
+ else {
+ val (k, v) = updmap.iterator.next
+ new TNode(k, v, ct.computeHash(k)) // create it tombed so that it gets compressed on subsequent accesses
+ }
+ }
+ def get(k: K) = listmap.get(k)
+ def cachedSize(ct: AnyRef): Int = listmap.size
+ def string(lev: Int) = (" " * lev) + "LNode(%s)".format(listmap.mkString(", "))
+}
+
+
+private[collection] final class CNode[K, V](val bitmap: Int, val array: Array[BasicNode], val gen: Gen) extends CNodeBase[K, V] {
+ // this should only be called from within read-only snapshots
+ def cachedSize(ct: AnyRef) = {
+ val currsz = READ_SIZE()
+ if (currsz != -1) currsz
+ else {
+ val sz = computeSize(ct.asInstanceOf[TrieMap[K, V]])
+ while (READ_SIZE() == -1) CAS_SIZE(-1, sz)
+ READ_SIZE()
+ }
+ }
+
+ // lends itself towards being parallelizable by choosing
+ // a random starting offset in the array
+ // => if there are concurrent size computations, they start
+ // at different positions, so they are more likely to
+ // to be independent
+ private def computeSize(ct: TrieMap[K, V]): Int = {
+ var i = 0
+ var sz = 0
+ val offset =
+ if (array.length > 0)
+ //util.Random.nextInt(array.length) /* <-- benchmarks show that this causes observable contention */
+ scala.concurrent.forkjoin.ThreadLocalRandom.current.nextInt(0, array.length)
+ else 0
+ while (i < array.length) {
+ val pos = (i + offset) % array.length
+ array(pos) match {
+ case sn: SNode[_, _] => sz += 1
+ case in: INode[K, V] => sz += in.cachedSize(ct)
+ }
+ i += 1
+ }
+ sz
+ }
+
+ def updatedAt(pos: Int, nn: BasicNode, gen: Gen) = {
+ val len = array.length
+ val narr = new Array[BasicNode](len)
+ Array.copy(array, 0, narr, 0, len)
+ narr(pos) = nn
+ new CNode[K, V](bitmap, narr, gen)
+ }
+
+ def removedAt(pos: Int, flag: Int, gen: Gen) = {
+ val arr = array
+ val len = arr.length
+ val narr = new Array[BasicNode](len - 1)
+ Array.copy(arr, 0, narr, 0, pos)
+ Array.copy(arr, pos + 1, narr, pos, len - pos - 1)
+ new CNode[K, V](bitmap ^ flag, narr, gen)
+ }
+
+ def insertedAt(pos: Int, flag: Int, nn: BasicNode, gen: Gen) = {
+ val len = array.length
+ val bmp = bitmap
+ val narr = new Array[BasicNode](len + 1)
+ Array.copy(array, 0, narr, 0, pos)
+ narr(pos) = nn
+ Array.copy(array, pos, narr, pos + 1, len - pos)
+ new CNode[K, V](bmp | flag, narr, gen)
+ }
+
+ /** Returns a copy of this cnode such that all the i-nodes below it are copied
+ * to the specified generation `ngen`.
+ */
+ def renewed(ngen: Gen, ct: TrieMap[K, V]) = {
+ var i = 0
+ val arr = array
+ val len = arr.length
+ val narr = new Array[BasicNode](len)
+ while (i < len) {
+ arr(i) match {
+ case in: INode[K, V] => narr(i) = in.copyToGen(ngen, ct)
+ case bn: BasicNode => narr(i) = bn
+ }
+ i += 1
+ }
+ new CNode[K, V](bitmap, narr, ngen)
+ }
+
+ private def resurrect(inode: INode[K, V], inodemain: AnyRef): BasicNode = inodemain match {
+ case tn: TNode[_, _] => tn.copyUntombed
+ case _ => inode
+ }
+
+ def toContracted(lev: Int): MainNode[K, V] = if (array.length == 1 && lev > 0) array(0) match {
+ case sn: SNode[K, V] => sn.copyTombed
+ case _ => this
+ } else this
+
+ // - if the branching factor is 1 for this CNode, and the child
+ // is a tombed SNode, returns its tombed version
+ // - otherwise, if there is at least one non-null node below,
+ // returns the version of this node with at least some null-inodes
+ // removed (those existing when the op began)
+ // - if there are only null-i-nodes below, returns null
+ def toCompressed(ct: TrieMap[K, V], lev: Int, gen: Gen) = {
+ var bmp = bitmap
+ var i = 0
+ val arr = array
+ val tmparray = new Array[BasicNode](arr.length)
+ while (i < arr.length) { // construct new bitmap
+ val sub = arr(i)
+ sub match {
+ case in: INode[K, V] =>
+ val inodemain = in.gcasRead(ct)
+ assert(inodemain ne null)
+ tmparray(i) = resurrect(in, inodemain)
+ case sn: SNode[K, V] =>
+ tmparray(i) = sn
+ }
+ i += 1
+ }
+
+ new CNode[K, V](bmp, tmparray, gen).toContracted(lev)
+ }
+
+ private[concurrent] def string(lev: Int): String = "CNode %x\n%s".format(bitmap, array.map(_.string(lev + 1)).mkString("\n"))
+
+ /* quiescently consistent - don't call concurrently to anything involving a GCAS!! */
+ private def collectElems: Seq[(K, V)] = array flatMap {
+ case sn: SNode[K, V] => Some(sn.kvPair)
+ case in: INode[K, V] => in.mainnode match {
+ case tn: TNode[K, V] => Some(tn.kvPair)
+ case ln: LNode[K, V] => ln.listmap.toList
+ case cn: CNode[K, V] => cn.collectElems
+ }
+ }
+
+ private def collectLocalElems: Seq[String] = array flatMap {
+ case sn: SNode[K, V] => Some(sn.kvPair._2.toString)
+ case in: INode[K, V] => Some(in.toString.drop(14) + "(" + in.gen + ")")
+ }
+
+ override def toString = {
+ val elems = collectLocalElems
+ "CNode(sz: %d; %s)".format(elems.size, elems.sorted.mkString(", "))
+ }
+}
+
+
+private[concurrent] object CNode {
+
+ def dual[K, V](x: SNode[K, V], xhc: Int, y: SNode[K, V], yhc: Int, lev: Int, gen: Gen): MainNode[K, V] = if (lev < 35) {
+ val xidx = (xhc >>> lev) & 0x1f
+ val yidx = (yhc >>> lev) & 0x1f
+ val bmp = (1 << xidx) | (1 << yidx)
+ if (xidx == yidx) {
+ val subinode = new INode[K, V](gen)//(TrieMap.inodeupdater)
+ subinode.mainnode = dual(x, xhc, y, yhc, lev + 5, gen)
+ new CNode(bmp, Array(subinode), gen)
+ } else {
+ if (xidx < yidx) new CNode(bmp, Array(x, y), gen)
+ else new CNode(bmp, Array(y, x), gen)
+ }
+ } else {
+ new LNode(x.k, x.v, y.k, y.v)
+ }
+
+}
+
+
+private[concurrent] case class RDCSS_Descriptor[K, V](old: INode[K, V], expectedmain: MainNode[K, V], nv: INode[K, V]) {
+ @volatile var committed = false
+}
+
+
+/** A concurrent hash-trie or TrieMap is a concurrent thread-safe lock-free
+ * implementation of a hash array mapped trie. It is used to implement the
+ * concurrent map abstraction. It has particularly scalable concurrent insert
+ * and remove operations and is memory-efficient. It supports O(1), atomic,
+ * lock-free snapshots which are used to implement linearizable lock-free size,
+ * iterator and clear operations. The cost of evaluating the (lazy) snapshot is
+ * distributed across subsequent updates, thus making snapshot evaluation horizontally scalable.
+ *
+ * For details, see: http://lampwww.epfl.ch/~prokopec/ctries-snapshot.pdf
+ *
+ * @author Aleksandar Prokopec
+ * @since 2.10
+ */
+ at SerialVersionUID(0L - 6402774413839597105L)
+final class TrieMap[K, V] private (r: AnyRef, rtupd: AtomicReferenceFieldUpdater[TrieMap[K, V], AnyRef], hashf: Hashing[K], ef: Equiv[K])
+extends scala.collection.concurrent.Map[K, V]
+ with scala.collection.mutable.MapLike[K, V, TrieMap[K, V]]
+ with CustomParallelizable[(K, V), ParTrieMap[K, V]]
+ with Serializable
+{
+ private var hashingobj = if (hashf.isInstanceOf[Hashing.Default[_]]) new TrieMap.MangledHashing[K] else hashf
+ private var equalityobj = ef
+ private var rootupdater = rtupd
+ def hashing = hashingobj
+ def equality = equalityobj
+ @volatile var root = r
+
+ def this(hashf: Hashing[K], ef: Equiv[K]) = this(
+ INode.newRootNode,
+ AtomicReferenceFieldUpdater.newUpdater(classOf[TrieMap[K, V]], classOf[AnyRef], "root"),
+ hashf,
+ ef
+ )
+
+ def this() = this(Hashing.default, Equiv.universal)
+
+ /* internal methods */
+
+ private def writeObject(out: java.io.ObjectOutputStream) {
+ out.writeObject(hashf)
+ out.writeObject(ef)
+
+ val it = iterator
+ while (it.hasNext) {
+ val (k, v) = it.next()
+ out.writeObject(k)
+ out.writeObject(v)
+ }
+ out.writeObject(TrieMapSerializationEnd)
+ }
+
+ private def readObject(in: java.io.ObjectInputStream) {
+ root = INode.newRootNode
+ rootupdater = AtomicReferenceFieldUpdater.newUpdater(classOf[TrieMap[K, V]], classOf[AnyRef], "root")
+
+ hashingobj = in.readObject().asInstanceOf[Hashing[K]]
+ equalityobj = in.readObject().asInstanceOf[Equiv[K]]
+
+ var obj: AnyRef = null
+ do {
+ obj = in.readObject()
+ if (obj != TrieMapSerializationEnd) {
+ val k = obj.asInstanceOf[K]
+ val v = in.readObject().asInstanceOf[V]
+ update(k, v)
+ }
+ } while (obj != TrieMapSerializationEnd)
+ }
+
+ def CAS_ROOT(ov: AnyRef, nv: AnyRef) = rootupdater.compareAndSet(this, ov, nv)
+
+ def readRoot(abort: Boolean = false): INode[K, V] = RDCSS_READ_ROOT(abort)
+
+ def RDCSS_READ_ROOT(abort: Boolean = false): INode[K, V] = {
+ val r = /*READ*/root
+ r match {
+ case in: INode[K, V] => in
+ case desc: RDCSS_Descriptor[K, V] => RDCSS_Complete(abort)
+ }
+ }
+
+ @tailrec private def RDCSS_Complete(abort: Boolean): INode[K, V] = {
+ val v = /*READ*/root
+ v match {
+ case in: INode[K, V] => in
+ case desc: RDCSS_Descriptor[K, V] =>
+ val RDCSS_Descriptor(ov, exp, nv) = desc
+ if (abort) {
+ if (CAS_ROOT(desc, ov)) ov
+ else RDCSS_Complete(abort)
+ } else {
+ val oldmain = ov.gcasRead(this)
+ if (oldmain eq exp) {
+ if (CAS_ROOT(desc, nv)) {
+ desc.committed = true
+ nv
+ } else RDCSS_Complete(abort)
+ } else {
+ if (CAS_ROOT(desc, ov)) ov
+ else RDCSS_Complete(abort)
+ }
+ }
+ }
+ }
+
+ private def RDCSS_ROOT(ov: INode[K, V], expectedmain: MainNode[K, V], nv: INode[K, V]): Boolean = {
+ val desc = RDCSS_Descriptor(ov, expectedmain, nv)
+ if (CAS_ROOT(ov, desc)) {
+ RDCSS_Complete(false)
+ /*READ*/desc.committed
+ } else false
+ }
+
+ @tailrec private def inserthc(k: K, hc: Int, v: V) {
+ val r = RDCSS_READ_ROOT()
+ if (!r.rec_insert(k, v, hc, 0, null, r.gen, this)) inserthc(k, hc, v)
+ }
+
+ @tailrec private def insertifhc(k: K, hc: Int, v: V, cond: AnyRef): Option[V] = {
+ val r = RDCSS_READ_ROOT()
+
+ val ret = r.rec_insertif(k, v, hc, cond, 0, null, r.gen, this)
+ if (ret eq null) insertifhc(k, hc, v, cond)
+ else ret
+ }
+
+ @tailrec private def lookuphc(k: K, hc: Int): AnyRef = {
+ val r = RDCSS_READ_ROOT()
+ val res = r.rec_lookup(k, hc, 0, null, r.gen, this)
+ if (res eq INodeBase.RESTART) lookuphc(k, hc)
+ else res
+ }
+
+ /* slower:
+ //@tailrec
+ private def lookuphc(k: K, hc: Int): AnyRef = {
+ val r = RDCSS_READ_ROOT()
+ try {
+ r.rec_lookup(k, hc, 0, null, r.gen, this)
+ } catch {
+ case RestartException =>
+ lookuphc(k, hc)
+ }
+ }
+ */
+
+ @tailrec private def removehc(k: K, v: V, hc: Int): Option[V] = {
+ val r = RDCSS_READ_ROOT()
+ val res = r.rec_remove(k, v, hc, 0, null, r.gen, this)
+ if (res ne null) res
+ else removehc(k, v, hc)
+ }
+
+ def string = RDCSS_READ_ROOT().string(0)
+
+ /* public methods */
+
+ override def seq = this
+
+ override def par = new ParTrieMap(this)
+
+ override def empty: TrieMap[K, V] = new TrieMap[K, V]
+
+ def isReadOnly = rootupdater eq null
+
+ def nonReadOnly = rootupdater ne null
+
+ /** Returns a snapshot of this TrieMap.
+ * This operation is lock-free and linearizable.
+ *
+ * The snapshot is lazily updated - the first time some branch
+ * in the snapshot or this TrieMap are accessed, they are rewritten.
+ * This means that the work of rebuilding both the snapshot and this
+ * TrieMap is distributed across all the threads doing updates or accesses
+ * subsequent to the snapshot creation.
+ */
+ @tailrec def snapshot(): TrieMap[K, V] = {
+ val r = RDCSS_READ_ROOT()
+ val expmain = r.gcasRead(this)
+ if (RDCSS_ROOT(r, expmain, r.copyToGen(new Gen, this))) new TrieMap(r.copyToGen(new Gen, this), rootupdater, hashing, equality)
+ else snapshot()
+ }
+
+ /** Returns a read-only snapshot of this TrieMap.
+ * This operation is lock-free and linearizable.
+ *
+ * The snapshot is lazily updated - the first time some branch
+ * of this TrieMap are accessed, it is rewritten. The work of creating
+ * the snapshot is thus distributed across subsequent updates
+ * and accesses on this TrieMap by all threads.
+ * Note that the snapshot itself is never rewritten unlike when calling
+ * the `snapshot` method, but the obtained snapshot cannot be modified.
+ *
+ * This method is used by other methods such as `size` and `iterator`.
+ */
+ @tailrec def readOnlySnapshot(): scala.collection.Map[K, V] = {
+ val r = RDCSS_READ_ROOT()
+ val expmain = r.gcasRead(this)
+ if (RDCSS_ROOT(r, expmain, r.copyToGen(new Gen, this))) new TrieMap(r, null, hashing, equality)
+ else readOnlySnapshot()
+ }
+
+ @tailrec override def clear() {
+ val r = RDCSS_READ_ROOT()
+ if (!RDCSS_ROOT(r, r.gcasRead(this), INode.newRootNode[K, V])) clear()
+ }
+
+
+ def computeHash(k: K) = hashingobj.hash(k)
+
+ def lookup(k: K): V = {
+ val hc = computeHash(k)
+ lookuphc(k, hc).asInstanceOf[V]
+ }
+
+ override def apply(k: K): V = {
+ val hc = computeHash(k)
+ val res = lookuphc(k, hc)
+ if (res eq null) throw new NoSuchElementException
+ else res.asInstanceOf[V]
+ }
+
+ def get(k: K): Option[V] = {
+ val hc = computeHash(k)
+ Option(lookuphc(k, hc)).asInstanceOf[Option[V]]
+ }
+
+ override def put(key: K, value: V): Option[V] = {
+ val hc = computeHash(key)
+ insertifhc(key, hc, value, null)
+ }
+
+ override def update(k: K, v: V) {
+ val hc = computeHash(k)
+ inserthc(k, hc, v)
+ }
+
+ def +=(kv: (K, V)) = {
+ update(kv._1, kv._2)
+ this
+ }
+
+ override def remove(k: K): Option[V] = {
+ val hc = computeHash(k)
+ removehc(k, null.asInstanceOf[V], hc)
+ }
+
+ def -=(k: K) = {
+ remove(k)
+ this
+ }
+
+ def putIfAbsent(k: K, v: V): Option[V] = {
+ val hc = computeHash(k)
+ insertifhc(k, hc, v, INode.KEY_ABSENT)
+ }
+
+ def remove(k: K, v: V): Boolean = {
+ val hc = computeHash(k)
+ removehc(k, v, hc).nonEmpty
+ }
+
+ def replace(k: K, oldvalue: V, newvalue: V): Boolean = {
+ val hc = computeHash(k)
+ insertifhc(k, hc, newvalue, oldvalue.asInstanceOf[AnyRef]).nonEmpty
+ }
+
+ def replace(k: K, v: V): Option[V] = {
+ val hc = computeHash(k)
+ insertifhc(k, hc, v, INode.KEY_PRESENT)
+ }
+
+ def iterator: Iterator[(K, V)] =
+ if (nonReadOnly) readOnlySnapshot().iterator
+ else new TrieMapIterator(0, this)
+
+ private def cachedSize() = {
+ val r = RDCSS_READ_ROOT()
+ r.cachedSize(this)
+ }
+
+ override def size: Int =
+ if (nonReadOnly) readOnlySnapshot().size
+ else cachedSize()
+
+ override def stringPrefix = "TrieMap"
+
+}
+
+
+object TrieMap extends MutableMapFactory[TrieMap] {
+ val inodeupdater = AtomicReferenceFieldUpdater.newUpdater(classOf[INodeBase[_, _]], classOf[MainNode[_, _]], "mainnode")
+
+ implicit def canBuildFrom[K, V]: CanBuildFrom[Coll, (K, V), TrieMap[K, V]] = new MapCanBuildFrom[K, V]
+
+ def empty[K, V]: TrieMap[K, V] = new TrieMap[K, V]
+
+ class MangledHashing[K] extends Hashing[K] {
+ def hash(k: K)= scala.util.hashing.byteswap32(k.##)
+ }
+
+}
+
+
+private[collection] class TrieMapIterator[K, V](var level: Int, private var ct: TrieMap[K, V], mustInit: Boolean = true) extends Iterator[(K, V)] {
+ private var stack = new Array[Array[BasicNode]](7)
+ private var stackpos = new Array[Int](7)
+ private var depth = -1
+ private var subiter: Iterator[(K, V)] = null
+ private var current: KVNode[K, V] = null
+
+ if (mustInit) initialize()
+
+ def hasNext = (current ne null) || (subiter ne null)
+
+ def next() = if (hasNext) {
+ var r: (K, V) = null
+ if (subiter ne null) {
+ r = subiter.next()
+ checkSubiter()
+ } else {
+ r = current.kvPair
+ advance()
+ }
+ r
+ } else Iterator.empty.next()
+
+ private def readin(in: INode[K, V]) = in.gcasRead(ct) match {
+ case cn: CNode[K, V] =>
+ depth += 1
+ stack(depth) = cn.array
+ stackpos(depth) = -1
+ advance()
+ case tn: TNode[K, V] =>
+ current = tn
+ case ln: LNode[K, V] =>
+ subiter = ln.listmap.iterator
+ checkSubiter()
+ case null =>
+ current = null
+ }
+
+ private def checkSubiter() = if (!subiter.hasNext) {
+ subiter = null
+ advance()
+ }
+
+ private def initialize() {
+ assert(ct.isReadOnly)
+
+ val r = ct.RDCSS_READ_ROOT()
+ readin(r)
+ }
+
+ def advance(): Unit = if (depth >= 0) {
+ val npos = stackpos(depth) + 1
+ if (npos < stack(depth).length) {
+ stackpos(depth) = npos
+ stack(depth)(npos) match {
+ case sn: SNode[K, V] =>
+ current = sn
+ case in: INode[K, V] =>
+ readin(in)
+ }
+ } else {
+ depth -= 1
+ advance()
+ }
+ } else current = null
+
+ protected def newIterator(_lev: Int, _ct: TrieMap[K, V], _mustInit: Boolean) = new TrieMapIterator[K, V](_lev, _ct, _mustInit)
+
+ protected def dupTo(it: TrieMapIterator[K, V]) = {
+ it.level = this.level
+ it.ct = this.ct
+ it.depth = this.depth
+ it.current = this.current
+
+ // these need a deep copy
+ Array.copy(this.stack, 0, it.stack, 0, 7)
+ Array.copy(this.stackpos, 0, it.stackpos, 0, 7)
+
+ // this one needs to be evaluated
+ if (this.subiter == null) it.subiter = null
+ else {
+ val lst = this.subiter.toList
+ this.subiter = lst.iterator
+ it.subiter = lst.iterator
+ }
+ }
+
+ /** Returns a sequence of iterators over subsets of this iterator.
+ * It's used to ease the implementation of splitters for a parallel version of the TrieMap.
+ */
+ protected def subdivide(): Seq[Iterator[(K, V)]] = if (subiter ne null) {
+ // the case where an LNode is being iterated
+ val it = newIterator(level + 1, ct, _mustInit = false)
+ it.depth = -1
+ it.subiter = this.subiter
+ it.current = null
+ this.subiter = null
+ advance()
+ this.level += 1
+ Seq(it, this)
+ } else if (depth == -1) {
+ this.level += 1
+ Seq(this)
+ } else {
+ var d = 0
+ while (d <= depth) {
+ val rem = stack(d).length - 1 - stackpos(d)
+ if (rem > 0) {
+ val (arr1, arr2) = stack(d).drop(stackpos(d) + 1).splitAt(rem / 2)
+ stack(d) = arr1
+ stackpos(d) = -1
+ val it = newIterator(level + 1, ct, false)
+ it.stack(0) = arr2
+ it.stackpos(0) = -1
+ it.depth = 0
+ it.advance() // <-- fix it
+ this.level += 1
+ return Seq(this, it)
+ }
+ d += 1
+ }
+ this.level += 1
+ Seq(this)
+ }
+
+ def printDebug() {
+ println("ctrie iterator")
+ println(stackpos.mkString(","))
+ println("depth: " + depth)
+ println("curr.: " + current)
+ println(stack.mkString("\n"))
+ }
+
+}
+
+
+private[concurrent] object RestartException extends ControlThrowable
+
+
+/** Only used for ctrie serialization. */
+ at SerialVersionUID(0L - 7237891413820527142L)
+private[concurrent] case object TrieMapSerializationEnd
+
+
+private[concurrent] object Debug {
+ import scala.collection._
+
+ lazy val logbuffer = new java.util.concurrent.ConcurrentLinkedQueue[AnyRef]
+
+ def log(s: AnyRef) = logbuffer.add(s)
+
+ def flush() {
+ for (s <- JavaConversions.asScalaIterator(logbuffer.iterator())) Console.out.println(s.toString)
+ logbuffer.clear()
+ }
+
+ def clear() {
+ logbuffer.clear()
+ }
+
+}
diff --git a/src/library/scala/collection/convert/DecorateAsJava.scala b/src/library/scala/collection/convert/DecorateAsJava.scala
new file mode 100644
index 0000000..87bcae3
--- /dev/null
+++ b/src/library/scala/collection/convert/DecorateAsJava.scala
@@ -0,0 +1,318 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2006-2013, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://www.scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+package scala.collection
+package convert
+
+import java.{ lang => jl, util => ju }, java.util.{ concurrent => juc }
+import Decorators._
+import WrapAsJava._
+import scala.language.implicitConversions
+
+
+/** A collection of decorators that allow to convert between
+ * Scala and Java collections using `asScala` and `asJava` methods.
+ *
+ * The following conversions are supported via `asJava`, `asScala`
+ *
+ * - `scala.collection.Iterable` <=> `java.lang.Iterable`
+ * - `scala.collection.Iterator` <=> `java.util.Iterator`
+ * - `scala.collection.mutable.Buffer` <=> `java.util.List`
+ * - `scala.collection.mutable.Set` <=> `java.util.Set`
+ * - `scala.collection.mutable.Map` <=> `java.util.Map`
+ * - `scala.collection.mutable.ConcurrentMap` <=> `java.util.concurrent.ConcurrentMap`
+ *
+ * In all cases, converting from a source type to a target type and back
+ * again will return the original source object, e.g.
+ * {{{
+ * import scala.collection.JavaConverters._
+ *
+ * val sl = new scala.collection.mutable.ListBuffer[Int]
+ * val jl : java.util.List[Int] = sl.asJava
+ * val sl2 : scala.collection.mutable.Buffer[Int] = jl.asScala
+ * assert(sl eq sl2)
+ * }}}
+ * The following conversions also are supported, but the
+ * direction Scala to Java is done my a more specifically named method:
+ * `asJavaCollection`, `asJavaEnumeration`, `asJavaDictionary`.
+ *
+ * - `scala.collection.Iterable` <=> `java.util.Collection`
+ * - `scala.collection.Iterator` <=> `java.util.Enumeration`
+ * - `scala.collection.mutable.Map` <=> `java.util.Dictionary`
+ *
+ * In addition, the following one way conversions are provided via `asJava`:
+ *
+ * - `scala.collection.Seq` => `java.util.List`
+ * - `scala.collection.mutable.Seq` => `java.util.List`
+ * - `scala.collection.Set` => `java.util.Set`
+ * - `scala.collection.Map` => `java.util.Map`
+ *
+ * @author Martin Odersky
+ * @since 2.8.1
+ */
+
+trait DecorateAsJava {
+ /**
+ * Adds an `asJava` method that implicitly converts a Scala `Iterator` to a
+ * Java `Iterator`. The returned Java `Iterator` is backed by the provided Scala
+ * `Iterator` and any side-effects of using it via the Java interface will
+ * be visible via the Scala interface and vice versa.
+ *
+ * If the Scala `Iterator` was previously obtained from an implicit or explicit
+ * call of `asIterator(java.util.Iterator)` then the original Java `Iterator`
+ * will be returned by the `asJava` method.
+ *
+ * @param i The `Iterator` to be converted.
+ * @return An object with an `asJava` method that returns a Java `Iterator` view of the argument.
+ */
+ implicit def asJavaIteratorConverter[A](i : Iterator[A]): AsJava[ju.Iterator[A]] =
+ new AsJava(asJavaIterator(i))
+
+ /**
+ * Adds an `asJavaEnumeration` method that implicitly converts a Scala
+ * `Iterator` to a Java `Enumeration`. The returned Java `Enumeration` is
+ * backed by the provided Scala `Iterator` and any side-effects of using
+ * it via the Java interface will be visible via the Scala interface and
+ * vice versa.
+ *
+ * If the Scala `Iterator` was previously obtained from an implicit or
+ * explicit call of `asIterator(java.util.Enumeration)` then the
+ * original Java `Enumeration` will be returned.
+ *
+ * @param i The `Iterator` to be converted.
+ * @return An object with an `asJavaEnumeration` method that returns a Java
+ * `Enumeration` view of the argument.
+ */
+ implicit def asJavaEnumerationConverter[A](i : Iterator[A]): AsJavaEnumeration[A] =
+ new AsJavaEnumeration(i)
+
+ /**
+ * Adds an `asJava` method that implicitly converts a Scala `Iterable` to
+ * a Java `Iterable`.
+ *
+ * The returned Java `Iterable` is backed by the provided Scala `Iterable`
+ * and any side-effects of using it via the Java interface will be visible
+ * via the Scala interface and vice versa.
+ *
+ * If the Scala `Iterable` was previously obtained from an implicit or
+ * explicit call of `asIterable(java.lang.Iterable)` then the original
+ * Java `Iterable` will be returned.
+ *
+ * @param i The `Iterable` to be converted.
+ * @return An object with an `asJavaCollection` method that returns a Java
+ * `Iterable` view of the argument.
+ */
+ implicit def asJavaIterableConverter[A](i : Iterable[A]): AsJava[jl.Iterable[A]] =
+ new AsJava(asJavaIterable(i))
+
+ /**
+ * Adds an `asJavaCollection` method that implicitly converts a Scala
+ * `Iterable` to an immutable Java `Collection`.
+ *
+ * If the Scala `Iterable` was previously obtained from an implicit or
+ * explicit call of `asSizedIterable(java.util.Collection)` then the
+ * original Java `Collection` will be returned.
+ *
+ * @param i The `SizedIterable` to be converted.
+ * @return An object with an `asJava` method that returns a Java
+ * `Collection` view of the argument.
+ */
+ implicit def asJavaCollectionConverter[A](i : Iterable[A]): AsJavaCollection[A] =
+ new AsJavaCollection(i)
+
+ /**
+ * Adds an `asJava` method that implicitly converts a Scala mutable `Buffer`
+ * to a Java `List`.
+ *
+ * The returned Java `List` is backed by the provided Scala `Buffer` and any
+ * side-effects of using it via the Java interface will be visible via the
+ * Scala interface and vice versa.
+ *
+ * If the Scala `Buffer` was previously obtained from an implicit or explicit
+ * call of `asBuffer(java.util.List)` then the original Java `List` will be
+ * returned.
+ *
+ * @param b The `Buffer` to be converted.
+ * @return An object with an `asJava` method that returns a Java `List` view
+ * of the argument.
+ */
+ implicit def bufferAsJavaListConverter[A](b : mutable.Buffer[A]): AsJava[ju.List[A]] =
+ new AsJava(bufferAsJavaList(b))
+
+ /**
+ * Adds an `asJava` method that implicitly converts a Scala mutable `Seq`
+ * to a Java `List`.
+ *
+ * The returned Java `List` is backed by the provided Scala `Seq` and any
+ * side-effects of using it via the Java interface will be visible via the
+ * Scala interface and vice versa.
+ *
+ * If the Scala `Seq` was previously obtained from an implicit or explicit
+ * call of `asSeq(java.util.List)` then the original Java `List` will be
+ * returned.
+ *
+ * @param b The `Seq` to be converted.
+ * @return An object with an `asJava` method that returns a Java `List`
+ * view of the argument.
+ */
+ implicit def mutableSeqAsJavaListConverter[A](b : mutable.Seq[A]): AsJava[ju.List[A]] =
+ new AsJava(mutableSeqAsJavaList(b))
+
+ /**
+ * Adds an `asJava` method that implicitly converts a Scala `Seq` to a
+ * Java `List`.
+ *
+ * The returned Java `List` is backed by the provided Scala `Seq` and any
+ * side-effects of using it via the Java interface will be visible via the
+ * Scala interface and vice versa.
+ *
+ * If the Scala `Seq` was previously obtained from an implicit or explicit
+ * call of `asSeq(java.util.List)` then the original Java `List` will be
+ * returned.
+ *
+ * @param b The `Seq` to be converted.
+ * @return An object with an `asJava` method that returns a Java `List`
+ * view of the argument.
+ */
+ implicit def seqAsJavaListConverter[A](b : Seq[A]): AsJava[ju.List[A]] =
+ new AsJava(seqAsJavaList(b))
+
+ /**
+ * Adds an `asJava` method that implicitly converts a Scala mutable `Set`>
+ * to a Java `Set`.
+ *
+ * The returned Java `Set` is backed by the provided Scala `Set` and any
+ * side-effects of using it via the Java interface will be visible via
+ * the Scala interface and vice versa.
+ *
+ * If the Scala `Set` was previously obtained from an implicit or explicit
+ * call of `asSet(java.util.Set)` then the original Java `Set` will be
+ * returned.
+ *
+ * @param s The `Set` to be converted.
+ * @return An object with an `asJava` method that returns a Java `Set` view
+ * of the argument.
+ */
+ implicit def mutableSetAsJavaSetConverter[A](s : mutable.Set[A]): AsJava[ju.Set[A]] =
+ new AsJava(mutableSetAsJavaSet(s))
+
+ /**
+ * Adds an `asJava` method that implicitly converts a Scala `Set` to a
+ * Java `Set`.
+ *
+ * The returned Java `Set` is backed by the provided Scala `Set` and any
+ * side-effects of using it via the Java interface will be visible via
+ * the Scala interface and vice versa.
+ *
+ * If the Scala `Set` was previously obtained from an implicit or explicit
+ * call of `asSet(java.util.Set)` then the original Java `Set` will be
+ * returned.
+ *
+ * @param s The `Set` to be converted.
+ * @return An object with an `asJava` method that returns a Java `Set` view
+ * of the argument.
+ */
+ implicit def setAsJavaSetConverter[A](s : Set[A]): AsJava[ju.Set[A]] =
+ new AsJava(setAsJavaSet(s))
+
+ /**
+ * Adds an `asJava` method that implicitly converts a Scala mutable `Map`
+ * to a Java `Map`.
+ *
+ * The returned Java `Map` is backed by the provided Scala `Map` and any
+ * side-effects of using it via the Java interface will be visible via the
+ * Scala interface and vice versa.
+ *
+ * If the Scala `Map` was previously obtained from an implicit or explicit
+ * call of `asMap(java.util.Map)` then the original Java `Map` will be
+ * returned.
+ *
+ * @param m The `Map` to be converted.
+ * @return An object with an `asJava` method that returns a Java `Map` view
+ * of the argument.
+ */
+ implicit def mutableMapAsJavaMapConverter[A, B](m : mutable.Map[A, B]): AsJava[ju.Map[A, B]] =
+ new AsJava(mutableMapAsJavaMap(m))
+
+ /**
+ * Adds an `asJavaDictionary` method that implicitly converts a Scala
+ * mutable `Map` to a Java `Dictionary`.
+ *
+ * The returned Java `Dictionary` is backed by the provided Scala
+ * `Dictionary` and any side-effects of using it via the Java interface
+ * will be visible via the Scala interface and vice versa.
+ *
+ * If the Scala `Dictionary` was previously obtained from an implicit or
+ * explicit call of `asMap(java.util.Dictionary)` then the original
+ * Java `Dictionary` will be returned.
+ *
+ * @param m The `Map` to be converted.
+ * @return An object with an `asJavaDictionary` method that returns a
+ * Java `Dictionary` view of the argument.
+ */
+ implicit def asJavaDictionaryConverter[A, B](m : mutable.Map[A, B]): AsJavaDictionary[A, B] =
+ new AsJavaDictionary(m)
+
+ /**
+ * Adds an `asJava` method that implicitly converts a Scala `Map` to
+ * a Java `Map`.
+ *
+ * The returned Java `Map` is backed by the provided Scala `Map` and any
+ * side-effects of using it via the Java interface will be visible via
+ * the Scala interface and vice versa.
+ *
+ * If the Scala `Map` was previously obtained from an implicit or explicit
+ * call of `asMap(java.util.Map)` then the original Java `Map` will be
+ * returned.
+ *
+ * @param m The `Map` to be converted.
+ * @return An object with an `asJava` method that returns a Java `Map` view
+ * of the argument.
+ */
+ implicit def mapAsJavaMapConverter[A, B](m : Map[A, B]): AsJava[ju.Map[A, B]] =
+ new AsJava(mapAsJavaMap(m))
+
+ /**
+ * Adds an `asJava` method that implicitly converts a Scala mutable
+ * `ConcurrentMap` to a Java `ConcurrentMap`.
+ *
+ * The returned Java `ConcurrentMap` is backed by the provided Scala
+ * `ConcurrentMap` and any side-effects of using it via the Java interface
+ * will be visible via the Scala interface and vice versa.
+ *
+ * If the Scala `ConcurrentMap` was previously obtained from an implicit or
+ * explicit call of `asConcurrentMap(java.util.concurrect.ConcurrentMap)`
+ * then the original Java `ConcurrentMap` will be returned.
+ *
+ * @param m The `ConcurrentMap` to be converted.
+ * @return An object with an `asJava` method that returns a Java
+ * `ConcurrentMap` view of the argument.
+ */
+ @deprecated("Use `concurrent.Map` instead of `ConcurrentMap`.", "2.10.0")
+ implicit def asJavaConcurrentMapConverter[A, B](m: mutable.ConcurrentMap[A, B]): AsJava[juc.ConcurrentMap[A, B]] =
+ new AsJava(asJavaConcurrentMap(m))
+
+ /**
+ * Adds an `asJava` method that implicitly converts a Scala mutable
+ * `concurrent.Map` to a Java `ConcurrentMap`.
+ *
+ * The returned Java `ConcurrentMap` is backed by the provided Scala
+ * `concurrent.Map` and any side-effects of using it via the Java interface
+ * will be visible via the Scala interface and vice versa.
+ *
+ * If the Scala `concurrent.Map` was previously obtained from an implicit or
+ * explicit call of `asConcurrentMap(java.util.concurrect.ConcurrentMap)`
+ * then the original Java `ConcurrentMap` will be returned.
+ *
+ * @param m The Scala `concurrent.Map` to be converted.
+ * @return An object with an `asJava` method that returns a Java
+ * `ConcurrentMap` view of the argument.
+ */
+ implicit def mapAsJavaConcurrentMapConverter[A, B](m: concurrent.Map[A, B]): AsJava[juc.ConcurrentMap[A, B]] =
+ new AsJava(mapAsJavaConcurrentMap(m))
+}
diff --git a/src/library/scala/collection/convert/DecorateAsScala.scala b/src/library/scala/collection/convert/DecorateAsScala.scala
new file mode 100644
index 0000000..94847a7
--- /dev/null
+++ b/src/library/scala/collection/convert/DecorateAsScala.scala
@@ -0,0 +1,209 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2006-2013, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://www.scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+package scala.collection
+package convert
+
+import java.{ lang => jl, util => ju }, java.util.{ concurrent => juc }
+import Decorators._
+import WrapAsScala._
+import scala.language.implicitConversions
+
+trait DecorateAsScala {
+ /**
+ * Adds an `asScala` method that implicitly converts a Java `Iterator` to
+ * a Scala `Iterator`.
+ *
+ * The returned Scala `Iterator` is backed by the provided Java `Iterator`
+ * and any side-effects of using it via the Scala interface will be visible
+ * via the Java interface and vice versa.
+ *
+ * If the Java `Iterator` was previously obtained from an implicit or
+ * explicit call of `asIterator(scala.collection.Iterator)` then the
+ * original Scala `Iterator` will be returned.
+ *
+ * @param i The `Iterator` to be converted.
+ * @return An object with an `asScala` method that returns a Scala
+ * `Iterator` view of the argument.
+ */
+ implicit def asScalaIteratorConverter[A](i : ju.Iterator[A]): AsScala[Iterator[A]] =
+ new AsScala(asScalaIterator(i))
+
+ /**
+ * Adds an `asScala` method that implicitly converts a Java `Enumeration`
+ * to a Scala `Iterator`.
+ *
+ * The returned Scala `Iterator` is backed by the provided Java
+ * `Enumeration` and any side-effects of using it via the Scala interface
+ * will be visible via the Java interface and vice versa.
+ *
+ * If the Java `Enumeration` was previously obtained from an implicit or
+ * explicit call of `asEnumeration(scala.collection.Iterator)` then the
+ * original Scala `Iterator` will be returned.
+ *
+ * @param i The `Enumeration` to be converted.
+ * @return An object with an `asScala` method that returns a Scala
+ * `Iterator` view of the argument.
+ */
+ implicit def enumerationAsScalaIteratorConverter[A](i : ju.Enumeration[A]): AsScala[Iterator[A]] =
+ new AsScala(enumerationAsScalaIterator(i))
+
+ /**
+ * Adds an `asScala` method that implicitly converts a Java `Iterable` to
+ * a Scala `Iterable`.
+ *
+ * The returned Scala `Iterable` is backed by the provided Java `Iterable`
+ * and any side-effects of using it via the Scala interface will be visible
+ * via the Java interface and vice versa.
+ *
+ * If the Java `Iterable` was previously obtained from an implicit or
+ * explicit call of `asIterable(scala.collection.Iterable)` then the original
+ * Scala `Iterable` will be returned.
+ *
+ * @param i The `Iterable` to be converted.
+ * @return An object with an `asScala` method that returns a Scala `Iterable`
+ * view of the argument.
+ */
+ implicit def iterableAsScalaIterableConverter[A](i : jl.Iterable[A]): AsScala[Iterable[A]] =
+ new AsScala(iterableAsScalaIterable(i))
+
+ /**
+ * Adds an `asScala` method that implicitly converts a Java `Collection` to
+ * an Scala `Iterable`.
+ *
+ * If the Java `Collection` was previously obtained from an implicit or
+ * explicit call of `asCollection(scala.collection.SizedIterable)` then
+ * the original Scala `SizedIterable` will be returned.
+ *
+ * @param i The `Collection` to be converted.
+ * @return An object with an `asScala` method that returns a Scala
+ * `SizedIterable` view of the argument.
+ */
+ implicit def collectionAsScalaIterableConverter[A](i : ju.Collection[A]): AsScala[Iterable[A]] =
+ new AsScala(collectionAsScalaIterable(i))
+
+ /**
+ * Adds an `asScala` method that implicitly converts a Java `List` to a
+ * Scala mutable `Buffer`.
+ *
+ * The returned Scala `Buffer` is backed by the provided Java `List` and
+ * any side-effects of using it via the Scala interface will be visible via
+ * the Java interface and vice versa.
+ *
+ * If the Java `List` was previously obtained from an implicit or explicit
+ * call of `asList(scala.collection.mutable.Buffer)` then the original
+ * Scala `Buffer` will be returned.
+ *
+ * @param l The `List` to be converted.
+ * @return An object with an `asScala` method that returns a Scala mutable
+ * `Buffer` view of the argument.
+ */
+ implicit def asScalaBufferConverter[A](l : ju.List[A]): AsScala[mutable.Buffer[A]] =
+ new AsScala(asScalaBuffer(l))
+
+ /**
+ * Adds an `asScala` method that implicitly converts a Java `Set` to a
+ * Scala mutable `Set`.
+ *
+ * The returned Scala `Set` is backed by the provided Java `Set` and any
+ * side-effects of using it via the Scala interface will be visible via
+ * the Java interface and vice versa.
+ *
+ * If the Java `Set` was previously obtained from an implicit or explicit
+ * call of `asSet(scala.collection.mutable.Set)` then the original
+ * Scala `Set` will be returned.
+ *
+ * @param s The `Set` to be converted.
+ * @return An object with an `asScala` method that returns a Scala mutable
+ * `Set` view of the argument.
+ */
+ implicit def asScalaSetConverter[A](s : ju.Set[A]): AsScala[mutable.Set[A]] =
+ new AsScala(asScalaSet(s))
+
+ /**
+ * Adds an `asScala` method that implicitly converts a Java `Map` to a Scala
+ * mutable `Map`. The returned Scala `Map` is backed by the provided Java
+ * `Map` and any side-effects of using it via the Scala interface will
+ * be visible via the Java interface and vice versa.
+ *
+ * If the Java `Map` was previously obtained from an implicit or explicit
+ * call of `asMap(scala.collection.mutable.Map)` then the original
+ * Scala `Map` will be returned.
+ *
+ * @param m The `Map` to be converted.
+ * @return An object with an `asScala` method that returns a Scala mutable
+ * `Map` view of the argument.
+ */
+ implicit def mapAsScalaMapConverter[A, B](m : ju.Map[A, B]): AsScala[mutable.Map[A, B]] =
+ new AsScala(mapAsScalaMap(m))
+
+ /**
+ * Adds an `asScala` method that implicitly converts a Java `ConcurrentMap`
+ * to a Scala mutable `ConcurrentMap`. The returned Scala `ConcurrentMap` is
+ * backed by the provided Java `ConcurrentMap` and any side-effects of using
+ * it via the Scala interface will be visible via the Java interface and
+ * vice versa.
+ *
+ * If the Java `ConcurrentMap` was previously obtained from an implicit or
+ * explicit call of `asConcurrentMap(scala.collection.mutable.ConcurrentMap)`
+ * then the original Scala `ConcurrentMap` will be returned.
+ *
+ * @param m The `ConcurrentMap` to be converted.
+ * @return An object with an `asScala` method that returns a Scala mutable
+ * `ConcurrentMap` view of the argument.
+ */
+ @deprecated("Use `mapAsScalaConcurrentMapConverter` instead, and use `concurrent.Map` instead of `ConcurrentMap`.", "2.10.0")
+ def asScalaConcurrentMapConverter[A, B](m: juc.ConcurrentMap[A, B]): AsScala[mutable.ConcurrentMap[A, B]] =
+ new AsScala(asScalaConcurrentMap(m))
+
+ /**
+ * Adds an `asScala` method that implicitly converts a Java `ConcurrentMap`
+ * to a Scala mutable `concurrent.Map`. The returned Scala `concurrent.Map` is
+ * backed by the provided Java `ConcurrentMap` and any side-effects of using
+ * it via the Scala interface will be visible via the Java interface and
+ * vice versa.
+ *
+ * If the Java `ConcurrentMap` was previously obtained from an implicit or
+ * explicit call of `mapAsScalaConcurrentMap(scala.collection.mutable.ConcurrentMap)`
+ * then the original Scala `concurrent.Map` will be returned.
+ *
+ * @param m The `ConcurrentMap` to be converted.
+ * @return An object with an `asScala` method that returns a Scala mutable
+ * `concurrent.Map` view of the argument.
+ */
+ implicit def mapAsScalaConcurrentMapConverter[A, B](m: juc.ConcurrentMap[A, B]): AsScala[concurrent.Map[A, B]] =
+ new AsScala(mapAsScalaConcurrentMap(m))
+
+ /**
+ * Adds an `asScala` method that implicitly converts a Java `Dictionary`
+ * to a Scala mutable `Map[String, String]`. The returned Scala
+ * `Map[String, String]` is backed by the provided Java `Dictionary` and
+ * any side-effects of using it via the Scala interface will be visible via
+ * the Java interface and vice versa.
+ *
+ * @param p The `Dictionary` to be converted.
+ * @return An object with an `asScala` method that returns a Scala mutable
+ * `Map[String, String]` view of the argument.
+ */
+ implicit def dictionaryAsScalaMapConverter[A, B](p: ju.Dictionary[A, B]): AsScala[mutable.Map[A, B]] =
+ new AsScala(dictionaryAsScalaMap(p))
+
+ /**
+ * Adds an `asScala` method that implicitly converts a Java `Properties`
+ * to a Scala mutable `Map[String, String]`. The returned Scala
+ * `Map[String, String]` is backed by the provided Java `Properties` and
+ * any side-effects of using it via the Scala interface will be visible via
+ * the Java interface and vice versa.
+ *
+ * @param p The `Properties` to be converted.
+ * @return An object with an `asScala` method that returns a Scala mutable
+ * `Map[String, String]` view of the argument.
+ */
+ implicit def propertiesAsScalaMapConverter(p: ju.Properties): AsScala[mutable.Map[String, String]] =
+ new AsScala(propertiesAsScalaMap(p))
+}
diff --git a/src/library/scala/collection/convert/Decorators.scala b/src/library/scala/collection/convert/Decorators.scala
new file mode 100644
index 0000000..e2c46c1
--- /dev/null
+++ b/src/library/scala/collection/convert/Decorators.scala
@@ -0,0 +1,46 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2006-2013, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://www.scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+package scala.collection
+package convert
+
+import java.{ lang => jl, util => ju }, java.util.{ concurrent => juc }
+
+private[collection] trait Decorators {
+ /** Generic class containing the `asJava` converter method */
+ class AsJava[A](op: => A) {
+ /** Converts a Scala collection to the corresponding Java collection */
+ def asJava: A = op
+ }
+
+ /** Generic class containing the `asScala` converter method */
+ class AsScala[A](op: => A) {
+ /** Converts a Java collection to the corresponding Scala collection */
+ def asScala: A = op
+ }
+
+ /** Generic class containing the `asJavaCollection` converter method */
+ class AsJavaCollection[A](i: Iterable[A]) {
+ /** Converts a Scala `Iterable` to a Java `Collection` */
+ def asJavaCollection: ju.Collection[A] = JavaConversions.asJavaCollection(i)
+ }
+
+ /** Generic class containing the `asJavaEnumeration` converter method */
+ class AsJavaEnumeration[A](i: Iterator[A]) {
+ /** Converts a Scala `Iterator` to a Java `Enumeration` */
+ def asJavaEnumeration: ju.Enumeration[A] = JavaConversions.asJavaEnumeration(i)
+ }
+
+ /** Generic class containing the `asJavaDictionary` converter method */
+ class AsJavaDictionary[A, B](m : mutable.Map[A, B]) {
+ /** Converts a Scala `Map` to a Java `Dictionary` */
+ def asJavaDictionary: ju.Dictionary[A, B] = JavaConversions.asJavaDictionary(m)
+ }
+}
+
+private[collection] object Decorators extends Decorators
diff --git a/src/library/scala/collection/convert/WrapAsJava.scala b/src/library/scala/collection/convert/WrapAsJava.scala
new file mode 100644
index 0000000..5e6126a
--- /dev/null
+++ b/src/library/scala/collection/convert/WrapAsJava.scala
@@ -0,0 +1,287 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2006-2013, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://www.scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+package scala.collection
+package convert
+
+import java.{ lang => jl, util => ju }, java.util.{ concurrent => juc }
+import scala.language.implicitConversions
+
+trait WrapAsJava {
+ import Wrappers._
+
+ /**
+ * Implicitly converts a Scala Iterator to a Java Iterator.
+ * The returned Java Iterator is backed by the provided Scala
+ * Iterator and any side-effects of using it via the Java interface will
+ * be visible via the Scala interface and vice versa.
+ *
+ * If the Scala Iterator was previously obtained from an implicit or
+ * explicit call of `asIterator(java.util.Iterator)` then the original
+ * Java Iterator will be returned.
+ *
+ * @param it The Iterator to be converted.
+ * @return A Java Iterator view of the argument.
+ */
+ implicit def asJavaIterator[A](it: Iterator[A]): ju.Iterator[A] = it match {
+ case JIteratorWrapper(wrapped) => wrapped.asInstanceOf[ju.Iterator[A]]
+ case _ => IteratorWrapper(it)
+ }
+
+ /**
+ * Implicitly converts a Scala Iterator to a Java Enumeration.
+ * The returned Java Enumeration is backed by the provided Scala
+ * Iterator and any side-effects of using it via the Java interface will
+ * be visible via the Scala interface and vice versa.
+ *
+ * If the Scala Iterator was previously obtained from an implicit or
+ * explicit call of `asIterator(java.util.Enumeration)` then the
+ * original Java Enumeration will be returned.
+ *
+ * @param it The Iterator to be converted.
+ * @return A Java Enumeration view of the argument.
+ */
+ implicit def asJavaEnumeration[A](it: Iterator[A]): ju.Enumeration[A] = it match {
+ case JEnumerationWrapper(wrapped) => wrapped.asInstanceOf[ju.Enumeration[A]]
+ case _ => IteratorWrapper(it)
+ }
+
+ /**
+ * Implicitly converts a Scala Iterable to a Java Iterable.
+ * The returned Java Iterable is backed by the provided Scala
+ * Iterable and any side-effects of using it via the Java interface will
+ * be visible via the Scala interface and vice versa.
+ *
+ * If the Scala Iterable was previously obtained from an implicit or
+ * explicit call of `asIterable(java.lang.Iterable)` then the original
+ * Java Iterable will be returned.
+ *
+ * @param i The Iterable to be converted.
+ * @return A Java Iterable view of the argument.
+ */
+ implicit def asJavaIterable[A](i: Iterable[A]): jl.Iterable[A] = i match {
+ case JIterableWrapper(wrapped) => wrapped.asInstanceOf[jl.Iterable[A]]
+ case _ => IterableWrapper(i)
+ }
+
+ /**
+ * Implicitly converts a Scala Iterable to an immutable Java
+ * Collection.
+ *
+ * If the Scala Iterable was previously obtained from an implicit or
+ * explicit call of `asSizedIterable(java.util.Collection)` then the original
+ * Java Collection will be returned.
+ *
+ * @param it The SizedIterable to be converted.
+ * @return A Java Collection view of the argument.
+ */
+ implicit def asJavaCollection[A](it: Iterable[A]): ju.Collection[A] = it match {
+ case JCollectionWrapper(wrapped) => wrapped.asInstanceOf[ju.Collection[A]]
+ case _ => new IterableWrapper(it)
+ }
+
+ /**
+ * Implicitly converts a Scala mutable Buffer to a Java List.
+ * The returned Java List is backed by the provided Scala
+ * Buffer and any side-effects of using it via the Java interface will
+ * be visible via the Scala interface and vice versa.
+ *
+ * If the Scala Buffer was previously obtained from an implicit or
+ * explicit call of `asBuffer(java.util.List)` then the original
+ * Java List will be returned.
+ *
+ * @param b The Buffer to be converted.
+ * @return A Java List view of the argument.
+ */
+ implicit def bufferAsJavaList[A](b: mutable.Buffer[A]): ju.List[A] = b match {
+ case JListWrapper(wrapped) => wrapped
+ case _ => new MutableBufferWrapper(b)
+ }
+
+ /**
+ * Implicitly converts a Scala mutable Seq to a Java List.
+ * The returned Java List is backed by the provided Scala
+ * Seq and any side-effects of using it via the Java interface will
+ * be visible via the Scala interface and vice versa.
+ *
+ * If the Scala Seq was previously obtained from an implicit or
+ * explicit call of `asSeq(java.util.List)` then the original
+ * Java List will be returned.
+ *
+ * @param seq The Seq to be converted.
+ * @return A Java List view of the argument.
+ */
+ implicit def mutableSeqAsJavaList[A](seq: mutable.Seq[A]): ju.List[A] = seq match {
+ case JListWrapper(wrapped) => wrapped
+ case _ => new MutableSeqWrapper(seq)
+ }
+
+ /**
+ * Implicitly converts a Scala Seq to a Java List.
+ * The returned Java List is backed by the provided Scala
+ * Seq and any side-effects of using it via the Java interface will
+ * be visible via the Scala interface and vice versa.
+ *
+ * If the Scala Seq was previously obtained from an implicit or
+ * explicit call of `asSeq(java.util.List)` then the original
+ * Java List will be returned.
+ *
+ * @param seq The Seq to be converted.
+ * @return A Java List view of the argument.
+ */
+ implicit def seqAsJavaList[A](seq: Seq[A]): ju.List[A] = seq match {
+ case JListWrapper(wrapped) => wrapped.asInstanceOf[ju.List[A]]
+ case _ => new SeqWrapper(seq)
+ }
+
+ /**
+ * Implicitly converts a Scala mutable Set to a Java Set.
+ * The returned Java Set is backed by the provided Scala
+ * Set and any side-effects of using it via the Java interface will
+ * be visible via the Scala interface and vice versa.
+ *
+ * If the Scala Set was previously obtained from an implicit or
+ * explicit call of `asSet(java.util.Set)` then the original
+ * Java Set will be returned.
+ *
+ * @param s The Set to be converted.
+ * @return A Java Set view of the argument.
+ */
+ implicit def mutableSetAsJavaSet[A](s: mutable.Set[A]): ju.Set[A] = s match {
+ case JSetWrapper(wrapped) => wrapped
+ case _ => new MutableSetWrapper(s)
+ }
+
+ /**
+ * Implicitly converts a Scala Set to a Java Set.
+ * The returned Java Set is backed by the provided Scala
+ * Set and any side-effects of using it via the Java interface will
+ * be visible via the Scala interface and vice versa.
+ *
+ * If the Scala Set was previously obtained from an implicit or
+ * explicit call of asSet(java.util.Set) then the original
+ * Java Set will be returned.
+ *
+ * @param s The Set to be converted.
+ * @return A Java Set view of the argument.
+ */
+ implicit def setAsJavaSet[A](s: Set[A]): ju.Set[A] = s match {
+ case JSetWrapper(wrapped) => wrapped
+ case _ => new SetWrapper(s)
+ }
+
+ /**
+ * Implicitly converts a Scala mutable Map to a Java Map.
+ * The returned Java Map is backed by the provided Scala
+ * Map and any side-effects of using it via the Java interface will
+ * be visible via the Scala interface and vice versa.
+ *
+ * If the Scala Map was previously obtained from an implicit or
+ * explicit call of `asMap(java.util.Map)` then the original
+ * Java Map will be returned.
+ *
+ * @param m The Map to be converted.
+ * @return A Java Map view of the argument.
+ */
+ implicit def mutableMapAsJavaMap[A, B](m: mutable.Map[A, B]): ju.Map[A, B] = m match {
+ //case JConcurrentMapWrapper(wrapped) => wrapped
+ case JMapWrapper(wrapped) => wrapped
+ case _ => new MutableMapWrapper(m)
+ }
+
+ /**
+ * Implicitly converts a Scala mutable `Map` to a Java `Dictionary`.
+ *
+ * The returned Java `Dictionary` is backed by the provided Scala
+ * `Dictionary` and any side-effects of using it via the Java interface
+ * will be visible via the Scala interface and vice versa.
+ *
+ * If the Scala `Dictionary` was previously obtained from an implicit or
+ * explicit call of `asMap(java.util.Dictionary)` then the original
+ * Java Dictionary will be returned.
+ *
+ * @param m The `Map` to be converted.
+ * @return A Java `Dictionary` view of the argument.
+ */
+ implicit def asJavaDictionary[A, B](m: mutable.Map[A, B]): ju.Dictionary[A, B] = m match {
+ //case JConcurrentMapWrapper(wrapped) => wrapped
+ case JDictionaryWrapper(wrapped) => wrapped
+ case _ => new DictionaryWrapper(m)
+ }
+
+ /**
+ * Implicitly converts a Scala `Map` to a Java `Map`.
+ *
+ * The returned Java `Map` is backed by the provided Scala `Map` and
+ * any side-effects of using it via the Java interface will be visible
+ * via the Scala interface and vice versa.
+ *
+ * If the Scala `Map` was previously obtained from an implicit or
+ * explicit call of `asMap(java.util.Map)` then the original
+ * Java `Map` will be returned.
+ *
+ * @param m The `Map` to be converted.
+ * @return A Java `Map` view of the argument.
+ */
+ implicit def mapAsJavaMap[A, B](m: Map[A, B]): ju.Map[A, B] = m match {
+ //case JConcurrentMapWrapper(wrapped) => wrapped
+ case JMapWrapper(wrapped) => wrapped.asInstanceOf[ju.Map[A, B]]
+ case _ => new MapWrapper(m)
+ }
+
+ /**
+ * Implicitly converts a Scala mutable `ConcurrentMap` to a Java
+ * `ConcurrentMap`.
+ *
+ * The returned Java `ConcurrentMap` is backed by the provided Scala
+ * `ConcurrentMap` and any side-effects of using it via the Java interface
+ * will be visible via the Scala interface and vice versa.
+ *
+ * If the Scala `ConcurrentMap` was previously obtained from an implicit or
+ * explicit call of `asScalaConcurrentMap(java.util.concurrect.ConcurrentMap)`
+ * then the original Java ConcurrentMap will be returned.
+ *
+ * @param m The `ConcurrentMap` to be converted.
+ * @return A Java `ConcurrentMap` view of the argument.
+ */
+ @deprecated("Use `concurrent.Map` instead of `ConcurrentMap`.", "2.10.0")
+ implicit def asJavaConcurrentMap[A, B](m: mutable.ConcurrentMap[A, B]): juc.ConcurrentMap[A, B] = m match {
+ case JConcurrentMapDeprecatedWrapper(wrapped) => wrapped
+ case _ => new ConcurrentMapDeprecatedWrapper(m)
+ }
+
+ /**
+ * Implicitly converts a Scala mutable `concurrent.Map` to a Java
+ * `ConcurrentMap`.
+ *
+ * The returned Java `ConcurrentMap` is backed by the provided Scala
+ * `concurrent.Map` and any side-effects of using it via the Java interface
+ * will be visible via the Scala interface and vice versa.
+ *
+ * If the Scala `concurrent.Map` was previously obtained from an implicit or
+ * explicit call of `mapAsScalaConcurrentMap(java.util.concurrect.ConcurrentMap)`
+ * then the original Java ConcurrentMap will be returned.
+ *
+ * @param m The Scala `concurrent.Map` to be converted.
+ * @return A Java `ConcurrentMap` view of the argument.
+ */
+ implicit def mapAsJavaConcurrentMap[A, B](m: concurrent.Map[A, B]): juc.ConcurrentMap[A, B] = m match {
+ case JConcurrentMapWrapper(wrapped) => wrapped
+ case _ => new ConcurrentMapWrapper(m)
+ }
+}
+
+object WrapAsJava extends WrapAsJava { }
+
+
+
+
+
+
+
+
diff --git a/src/library/scala/collection/convert/WrapAsScala.scala b/src/library/scala/collection/convert/WrapAsScala.scala
new file mode 100644
index 0000000..ffcca62
--- /dev/null
+++ b/src/library/scala/collection/convert/WrapAsScala.scala
@@ -0,0 +1,236 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2006-2013, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://www.scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+package scala.collection
+package convert
+
+import java.{ lang => jl, util => ju }, java.util.{ concurrent => juc }
+import scala.language.implicitConversions
+
+trait LowPriorityWrapAsScala {
+ this: WrapAsScala =>
+
+ import Wrappers._
+
+ /**
+ * Implicitly converts a Java ConcurrentMap to a Scala mutable ConcurrentMap.
+ * The returned Scala ConcurrentMap is backed by the provided Java
+ * ConcurrentMap and any side-effects of using it via the Scala interface will
+ * be visible via the Java interface and vice versa.
+ *
+ * If the Java ConcurrentMap was previously obtained from an implicit or
+ * explicit call of `asConcurrentMap(scala.collection.mutable.ConcurrentMap)`
+ * then the original Scala ConcurrentMap will be returned.
+ *
+ * @param m The ConcurrentMap to be converted.
+ * @return A Scala mutable ConcurrentMap view of the argument.
+ */
+ @deprecated("Use `mapAsScalaConcurrentMap` instead, and use `concurrent.Map` instead of `ConcurrentMap`.", "2.10.0")
+ implicit def mapAsScalaDeprecatedConcurrentMap[A, B](m: juc.ConcurrentMap[A, B]): mutable.ConcurrentMap[A, B] =
+ asScalaConcurrentMap(m)
+}
+
+trait WrapAsScala extends LowPriorityWrapAsScala {
+ import Wrappers._
+ /**
+ * Implicitly converts a Java `Iterator` to a Scala `Iterator`.
+ *
+ * The returned Scala `Iterator` is backed by the provided Java `Iterator`
+ * and any side-effects of using it via the Scala interface will be visible
+ * via the Java interface and vice versa.
+ *
+ * If the Java `Iterator` was previously obtained from an implicit or
+ * explicit call of `asIterator(scala.collection.Iterator)` then the
+ * original Scala `Iterator` will be returned.
+ *
+ * @param it The `Iterator` to be converted.
+ * @return A Scala `Iterator` view of the argument.
+ */
+ implicit def asScalaIterator[A](it: ju.Iterator[A]): Iterator[A] = it match {
+ case IteratorWrapper(wrapped) => wrapped
+ case _ => JIteratorWrapper(it)
+ }
+
+ /**
+ * Implicitly converts a Java Enumeration to a Scala Iterator.
+ * The returned Scala Iterator is backed by the provided Java
+ * Enumeration and any side-effects of using it via the Scala interface will
+ * be visible via the Java interface and vice versa.
+ *
+ * If the Java Enumeration was previously obtained from an implicit or
+ * explicit call of `enumerationAsScalaIterator(scala.collection.Iterator)`
+ * then the original Scala Iterator will be returned.
+ *
+ * @param i The Enumeration to be converted.
+ * @return A Scala Iterator view of the argument.
+ */
+ implicit def enumerationAsScalaIterator[A](i: ju.Enumeration[A]): Iterator[A] = i match {
+ case IteratorWrapper(wrapped) => wrapped
+ case _ => JEnumerationWrapper(i)
+ }
+
+ /**
+ * Implicitly converts a Java `Iterable` to a Scala `Iterable`.
+ *
+ * The returned Scala `Iterable` is backed by the provided Java `Iterable`
+ * and any side-effects of using it via the Scala interface will be visible
+ * via the Java interface and vice versa.
+ *
+ * If the Java `Iterable` was previously obtained from an implicit or
+ * explicit call of `iterableAsScalaIterable(scala.collection.Iterable)`
+ * then the original Scala Iterable will be returned.
+ *
+ * @param i The Iterable to be converted.
+ * @return A Scala Iterable view of the argument.
+ */
+ implicit def iterableAsScalaIterable[A](i: jl.Iterable[A]): Iterable[A] = i match {
+ case IterableWrapper(wrapped) => wrapped
+ case _ => JIterableWrapper(i)
+ }
+
+ /**
+ * Implicitly converts a Java `Collection` to an Scala `Iterable`.
+ *
+ * If the Java `Collection` was previously obtained from an implicit or
+ * explicit call of `collectionAsScalaIterable(scala.collection.SizedIterable)`
+ * then the original Scala `Iterable` will be returned.
+ *
+ * @param i The Collection to be converted.
+ * @return A Scala Iterable view of the argument.
+ */
+ implicit def collectionAsScalaIterable[A](i: ju.Collection[A]): Iterable[A] = i match {
+ case IterableWrapper(wrapped) => wrapped
+ case _ => JCollectionWrapper(i)
+ }
+
+ /**
+ * Implicitly converts a Java `List` to a Scala mutable `Buffer`.
+ *
+ * The returned Scala `Buffer` is backed by the provided Java `List`
+ * and any side-effects of using it via the Scala interface will
+ * be visible via the Java interface and vice versa.
+ *
+ * If the Java `List` was previously obtained from an implicit or
+ * explicit call of `asScalaBuffer(scala.collection.mutable.Buffer)`
+ * then the original Scala `Buffer` will be returned.
+ *
+ * @param l The `List` to be converted.
+ * @return A Scala mutable `Buffer` view of the argument.
+ */
+ implicit def asScalaBuffer[A](l: ju.List[A]): mutable.Buffer[A] = l match {
+ case MutableBufferWrapper(wrapped) => wrapped
+ case _ =>new JListWrapper(l)
+ }
+
+ /**
+ * Implicitly converts a Java Set to a Scala mutable Set.
+ * The returned Scala Set is backed by the provided Java
+ * Set and any side-effects of using it via the Scala interface will
+ * be visible via the Java interface and vice versa.
+ *
+ * If the Java Set was previously obtained from an implicit or
+ * explicit call of `asScalaSet(scala.collection.mutable.Set)` then
+ * the original Scala Set will be returned.
+ *
+ * @param s The Set to be converted.
+ * @return A Scala mutable Set view of the argument.
+ */
+ implicit def asScalaSet[A](s: ju.Set[A]): mutable.Set[A] = s match {
+ case MutableSetWrapper(wrapped) => wrapped
+ case _ =>new JSetWrapper(s)
+ }
+
+ /**
+ * Implicitly converts a Java `Map` to a Scala mutable `Map`.
+ *
+ * The returned Scala `Map` is backed by the provided Java `Map` and any
+ * side-effects of using it via the Scala interface will be visible via
+ * the Java interface and vice versa.
+ *
+ * If the Java `Map` was previously obtained from an implicit or
+ * explicit call of `mapAsScalaMap(scala.collection.mutable.Map)` then
+ * the original Scala Map will be returned.
+ *
+ * @param m The Map to be converted.
+ * @return A Scala mutable Map view of the argument.
+ */
+ implicit def mapAsScalaMap[A, B](m: ju.Map[A, B]): mutable.Map[A, B] = m match {
+ //case ConcurrentMapWrapper(wrapped) => wrapped
+ case MutableMapWrapper(wrapped) => wrapped
+ case _ => new JMapWrapper(m)
+ }
+
+ /**
+ * Implicitly converts a Java ConcurrentMap to a Scala mutable ConcurrentMap.
+ * The returned Scala ConcurrentMap is backed by the provided Java
+ * ConcurrentMap and any side-effects of using it via the Scala interface will
+ * be visible via the Java interface and vice versa.
+ *
+ * If the Java ConcurrentMap was previously obtained from an implicit or
+ * explicit call of `asConcurrentMap(scala.collection.mutable.ConcurrentMap)`
+ * then the original Scala ConcurrentMap will be returned.
+ *
+ * @param m The ConcurrentMap to be converted.
+ * @return A Scala mutable ConcurrentMap view of the argument.
+ */
+ @deprecated("Use `mapAsScalaConcurrentMap` instead, and use `concurrent.Map` instead of `ConcurrentMap`.", "2.10.0")
+ def asScalaConcurrentMap[A, B](m: juc.ConcurrentMap[A, B]): mutable.ConcurrentMap[A, B] = m match {
+ case cmw: ConcurrentMapDeprecatedWrapper[a, b] => cmw.underlying
+ case _ => new JConcurrentMapDeprecatedWrapper(m)
+ }
+
+ /**
+ * Implicitly converts a Java ConcurrentMap to a Scala mutable ConcurrentMap.
+ * The returned Scala ConcurrentMap is backed by the provided Java
+ * ConcurrentMap and any side-effects of using it via the Scala interface will
+ * be visible via the Java interface and vice versa.
+ *
+ * If the Java ConcurrentMap was previously obtained from an implicit or
+ * explicit call of `asConcurrentMap(scala.collection.mutable.ConcurrentMap)`
+ * then the original Scala ConcurrentMap will be returned.
+ *
+ * @param m The ConcurrentMap to be converted.
+ * @return A Scala mutable ConcurrentMap view of the argument.
+ */
+ implicit def mapAsScalaConcurrentMap[A, B](m: juc.ConcurrentMap[A, B]): concurrent.Map[A, B] = m match {
+ case cmw: ConcurrentMapWrapper[a, b] => cmw.underlying
+ case _ => new JConcurrentMapWrapper(m)
+ }
+
+ /**
+ * Implicitly converts a Java `Dictionary` to a Scala mutable
+ * `Map[String, String]`.
+ *
+ * The returned Scala `Map[String, String]` is backed by the provided Java
+ * `Dictionary` and any side-effects of using it via the Scala interface
+ * will be visible via the Java interface and vice versa.
+ *
+ * @param p The Dictionary to be converted.
+ * @return A Scala mutable Map[String, String] view of the argument.
+ */
+ implicit def dictionaryAsScalaMap[A, B](p: ju.Dictionary[A, B]): mutable.Map[A, B] = p match {
+ case DictionaryWrapper(wrapped) => wrapped
+ case _ => new JDictionaryWrapper(p)
+ }
+
+ /**
+ * Implicitly converts a Java `Properties` to a Scala `mutable Map[String, String]`.
+ *
+ * The returned Scala `Map[String, String]` is backed by the provided Java
+ * `Properties` and any side-effects of using it via the Scala interface
+ * will be visible via the Java interface and vice versa.
+ *
+ * @param p The Properties to be converted.
+ * @return A Scala mutable Map[String, String] view of the argument.
+ */
+ implicit def propertiesAsScalaMap(p: ju.Properties): mutable.Map[String, String] = p match {
+ case _ => new JPropertiesWrapper(p)
+ }
+}
+
+object WrapAsScala extends WrapAsScala { }
diff --git a/src/library/scala/collection/convert/Wrappers.scala b/src/library/scala/collection/convert/Wrappers.scala
new file mode 100644
index 0000000..20add33
--- /dev/null
+++ b/src/library/scala/collection/convert/Wrappers.scala
@@ -0,0 +1,478 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2006-2013, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://www.scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+package scala.collection
+package convert
+
+import java.{ lang => jl, util => ju }, java.util.{ concurrent => juc }
+import WrapAsScala._
+import WrapAsJava._
+
+/** Don't put the implementations in the same scope as the implicits
+ * which utilize them, or they will stow away into every scope which
+ * extends one of those implementations. See SI-5580.
+ */
+private[collection] trait Wrappers {
+ trait IterableWrapperTrait[A] extends ju.AbstractCollection[A] {
+ val underlying: Iterable[A]
+ def size = underlying.size
+ override def iterator = IteratorWrapper(underlying.iterator)
+ override def isEmpty = underlying.isEmpty
+ }
+
+ case class IteratorWrapper[A](underlying: Iterator[A]) extends ju.Iterator[A] with ju.Enumeration[A] {
+ def hasNext = underlying.hasNext
+ def next() = underlying.next
+ def hasMoreElements = underlying.hasNext
+ def nextElement() = underlying.next
+ def remove() = throw new UnsupportedOperationException
+ }
+
+ class ToIteratorWrapper[A](underlying : Iterator[A]) {
+ def asJava = new IteratorWrapper(underlying)
+ }
+
+ case class JIteratorWrapper[A](underlying: ju.Iterator[A]) extends AbstractIterator[A] with Iterator[A] {
+ def hasNext = underlying.hasNext
+ def next() = underlying.next
+ }
+
+ case class JEnumerationWrapper[A](underlying: ju.Enumeration[A]) extends AbstractIterator[A] with Iterator[A] {
+ def hasNext = underlying.hasMoreElements
+ def next() = underlying.nextElement
+ }
+
+ case class IterableWrapper[A](underlying: Iterable[A]) extends ju.AbstractCollection[A] with IterableWrapperTrait[A] { }
+
+ case class JIterableWrapper[A](underlying: jl.Iterable[A]) extends AbstractIterable[A] with Iterable[A] {
+ def iterator = underlying.iterator
+ def newBuilder[B] = new mutable.ArrayBuffer[B]
+ }
+
+ case class JCollectionWrapper[A](underlying: ju.Collection[A]) extends AbstractIterable[A] with Iterable[A] {
+ def iterator = underlying.iterator
+ override def size = underlying.size
+ override def isEmpty = underlying.isEmpty
+ def newBuilder[B] = new mutable.ArrayBuffer[B]
+ }
+
+ case class SeqWrapper[A](underlying: Seq[A]) extends ju.AbstractList[A] with IterableWrapperTrait[A] {
+ def get(i: Int) = underlying(i)
+ }
+
+ case class MutableSeqWrapper[A](underlying: mutable.Seq[A]) extends ju.AbstractList[A] with IterableWrapperTrait[A] {
+ def get(i: Int) = underlying(i)
+ override def set(i: Int, elem: A) = {
+ val p = underlying(i)
+ underlying(i) = elem
+ p
+ }
+ }
+
+ case class MutableBufferWrapper[A](underlying: mutable.Buffer[A]) extends ju.AbstractList[A] with IterableWrapperTrait[A] {
+ def get(i: Int) = underlying(i)
+ override def set(i: Int, elem: A) = { val p = underlying(i); underlying(i) = elem; p }
+ override def add(elem: A) = { underlying append elem; true }
+ override def remove(i: Int) = underlying remove i
+ }
+
+ case class JListWrapper[A](val underlying: ju.List[A]) extends mutable.AbstractBuffer[A] with mutable.Buffer[A] {
+ def length = underlying.size
+ override def isEmpty = underlying.isEmpty
+ override def iterator: Iterator[A] = underlying.iterator
+ def apply(i: Int) = underlying.get(i)
+ def update(i: Int, elem: A) = underlying.set(i, elem)
+ def +=:(elem: A) = { underlying.subList(0, 0) add elem; this }
+ def +=(elem: A): this.type = { underlying add elem; this }
+ def insertAll(i: Int, elems: Traversable[A]) = {
+ val ins = underlying.subList(0, i)
+ elems.seq.foreach(ins.add(_))
+ }
+ def remove(i: Int) = underlying.remove(i)
+ def clear() = underlying.clear()
+ def result = this
+ // Note: Clone cannot just call underlying.clone because in Java, only specific collections
+ // expose clone methods. Generically, they're protected.
+ override def clone(): JListWrapper[A] = JListWrapper(new ju.ArrayList[A](underlying))
+ }
+
+ class SetWrapper[A](underlying: Set[A]) extends ju.AbstractSet[A] {
+ self =>
+ def size = underlying.size
+ def iterator = new ju.Iterator[A] {
+ val ui = underlying.iterator
+ var prev: Option[A] = None
+ def hasNext = ui.hasNext
+ def next = { val e = ui.next; prev = Some(e); e }
+ def remove = prev match {
+ case Some(e) =>
+ underlying match {
+ case ms: mutable.Set[a] =>
+ ms remove e
+ prev = None
+ case _ =>
+ throw new UnsupportedOperationException("remove")
+ }
+ case _ =>
+ throw new IllegalStateException("next must be called at least once before remove")
+ }
+ }
+ }
+
+ case class MutableSetWrapper[A](underlying: mutable.Set[A]) extends SetWrapper[A](underlying) {
+ override def add(elem: A) = {
+ val sz = underlying.size
+ underlying += elem
+ sz < underlying.size
+ }
+ override def remove(elem: AnyRef) =
+ try underlying remove elem.asInstanceOf[A]
+ catch { case ex: ClassCastException => false }
+ override def clear() = underlying.clear()
+ }
+
+ case class JSetWrapper[A](underlying: ju.Set[A]) extends mutable.AbstractSet[A] with mutable.Set[A] with mutable.SetLike[A, JSetWrapper[A]] {
+
+ override def size = underlying.size
+
+ def iterator = underlying.iterator
+
+ def contains(elem: A): Boolean = underlying.contains(elem)
+
+ def +=(elem: A): this.type = { underlying add elem; this }
+ def -=(elem: A): this.type = { underlying remove elem; this }
+
+ override def add(elem: A): Boolean = underlying add elem
+ override def remove(elem: A): Boolean = underlying remove elem
+ override def clear() = underlying.clear()
+
+ override def empty = JSetWrapper(new ju.HashSet[A])
+ // Note: Clone cannot just call underlying.clone because in Java, only specific collections
+ // expose clone methods. Generically, they're protected.
+ override def clone() =
+ new JSetWrapper[A](new ju.LinkedHashSet[A](underlying))
+ }
+
+ class MapWrapper[A, B](underlying: Map[A, B]) extends ju.AbstractMap[A, B] { self =>
+ override def size = underlying.size
+
+ override def get(key: AnyRef): B = try {
+ underlying get key.asInstanceOf[A] match {
+ case None => null.asInstanceOf[B]
+ case Some(v) => v
+ }
+ } catch {
+ case ex: ClassCastException => null.asInstanceOf[B]
+ }
+
+ override def entrySet: ju.Set[ju.Map.Entry[A, B]] = new ju.AbstractSet[ju.Map.Entry[A, B]] {
+ def size = self.size
+
+ def iterator = new ju.Iterator[ju.Map.Entry[A, B]] {
+ val ui = underlying.iterator
+ var prev : Option[A] = None
+
+ def hasNext = ui.hasNext
+
+ def next() = {
+ val (k, v) = ui.next
+ prev = Some(k)
+ new ju.Map.Entry[A, B] {
+ import scala.util.hashing.byteswap32
+ def getKey = k
+ def getValue = v
+ def setValue(v1 : B) = self.put(k, v1)
+ override def hashCode = byteswap32(k.hashCode) + (byteswap32(v.hashCode) << 16)
+ override def equals(other: Any) = other match {
+ case e: ju.Map.Entry[_, _] => k == e.getKey && v == e.getValue
+ case _ => false
+ }
+ }
+ }
+
+ def remove() {
+ prev match {
+ case Some(k) =>
+ underlying match {
+ case mm: mutable.Map[a, _] =>
+ mm remove k
+ prev = None
+ case _ =>
+ throw new UnsupportedOperationException("remove")
+ }
+ case _ =>
+ throw new IllegalStateException("next must be called at least once before remove")
+ }
+ }
+ }
+ }
+ }
+
+ case class MutableMapWrapper[A, B](underlying: mutable.Map[A, B]) extends MapWrapper[A, B](underlying) {
+ override def put(k: A, v: B) = underlying.put(k, v) match {
+ case Some(v1) => v1
+ case None => null.asInstanceOf[B]
+ }
+
+ override def remove(k: AnyRef): B = try {
+ underlying remove k.asInstanceOf[A] match {
+ case None => null.asInstanceOf[B]
+ case Some(v) => v
+ }
+ } catch {
+ case ex: ClassCastException => null.asInstanceOf[B]
+ }
+
+ override def clear() = underlying.clear()
+ }
+
+ trait JMapWrapperLike[A, B, +Repr <: mutable.MapLike[A, B, Repr] with mutable.Map[A, B]] extends mutable.Map[A, B] with mutable.MapLike[A, B, Repr] {
+ def underlying: ju.Map[A, B]
+
+ override def size = underlying.size
+
+ def get(k: A) = {
+ val v = underlying get k
+ if (v != null)
+ Some(v)
+ else if (underlying containsKey k)
+ Some(null.asInstanceOf[B])
+ else
+ None
+ }
+
+ def +=(kv: (A, B)): this.type = { underlying.put(kv._1, kv._2); this }
+ def -=(key: A): this.type = { underlying remove key; this }
+
+ override def put(k: A, v: B): Option[B] = {
+ val r = underlying.put(k, v)
+ if (r != null) Some(r) else None
+ }
+
+ override def update(k: A, v: B) { underlying.put(k, v) }
+
+ override def remove(k: A): Option[B] = {
+ val r = underlying remove k
+ if (r != null) Some(r) else None
+ }
+
+ def iterator: Iterator[(A, B)] = new AbstractIterator[(A, B)] {
+ val ui = underlying.entrySet.iterator
+ def hasNext = ui.hasNext
+ def next() = { val e = ui.next(); (e.getKey, e.getValue) }
+ }
+
+ override def clear() = underlying.clear()
+
+ override def empty: Repr = null.asInstanceOf[Repr]
+ }
+
+ case class JMapWrapper[A, B](val underlying : ju.Map[A, B]) extends mutable.AbstractMap[A, B] with JMapWrapperLike[A, B, JMapWrapper[A, B]] {
+ override def empty = JMapWrapper(new ju.HashMap[A, B])
+ }
+
+ class ConcurrentMapDeprecatedWrapper[A, B](override val underlying: mutable.ConcurrentMap[A, B]) extends MutableMapWrapper[A, B](underlying) with juc.ConcurrentMap[A, B] {
+
+ def putIfAbsent(k: A, v: B) = underlying.putIfAbsent(k, v) match {
+ case Some(v) => v
+ case None => null.asInstanceOf[B]
+ }
+
+ def remove(k: AnyRef, v: AnyRef) = try {
+ underlying.remove(k.asInstanceOf[A], v.asInstanceOf[B])
+ } catch {
+ case ex: ClassCastException =>
+ false
+ }
+
+ def replace(k: A, v: B): B = underlying.replace(k, v) match {
+ case Some(v) => v
+ case None => null.asInstanceOf[B]
+ }
+
+ def replace(k: A, oldval: B, newval: B) = underlying.replace(k, oldval, newval)
+ }
+
+ class ConcurrentMapWrapper[A, B](override val underlying: concurrent.Map[A, B]) extends MutableMapWrapper[A, B](underlying) with juc.ConcurrentMap[A, B] {
+
+ def putIfAbsent(k: A, v: B) = underlying.putIfAbsent(k, v) match {
+ case Some(v) => v
+ case None => null.asInstanceOf[B]
+ }
+
+ def remove(k: AnyRef, v: AnyRef) = try {
+ underlying.remove(k.asInstanceOf[A], v.asInstanceOf[B])
+ } catch {
+ case ex: ClassCastException =>
+ false
+ }
+
+ def replace(k: A, v: B): B = underlying.replace(k, v) match {
+ case Some(v) => v
+ case None => null.asInstanceOf[B]
+ }
+
+ def replace(k: A, oldval: B, newval: B) = underlying.replace(k, oldval, newval)
+ }
+
+ case class JConcurrentMapDeprecatedWrapper[A, B](val underlying: juc.ConcurrentMap[A, B]) extends mutable.AbstractMap[A, B] with JMapWrapperLike[A, B, JConcurrentMapDeprecatedWrapper[A, B]] with mutable.ConcurrentMap[A, B] {
+ override def get(k: A) = {
+ val v = underlying get k
+ if (v != null) Some(v)
+ else None
+ }
+
+ override def empty = new JConcurrentMapDeprecatedWrapper(new juc.ConcurrentHashMap[A, B])
+
+ def putIfAbsent(k: A, v: B): Option[B] = {
+ val r = underlying.putIfAbsent(k, v)
+ if (r != null) Some(r) else None
+ }
+
+ def remove(k: A, v: B): Boolean = underlying.remove(k, v)
+
+ def replace(k: A, v: B): Option[B] = {
+ val prev = underlying.replace(k, v)
+ if (prev != null) Some(prev) else None
+ }
+
+ def replace(k: A, oldvalue: B, newvalue: B): Boolean =
+ underlying.replace(k, oldvalue, newvalue)
+ }
+
+ case class JConcurrentMapWrapper[A, B](val underlying: juc.ConcurrentMap[A, B]) extends mutable.AbstractMap[A, B] with JMapWrapperLike[A, B, JConcurrentMapWrapper[A, B]] with concurrent.Map[A, B] {
+ override def get(k: A) = {
+ val v = underlying get k
+ if (v != null) Some(v)
+ else None
+ }
+
+ override def empty = new JConcurrentMapWrapper(new juc.ConcurrentHashMap[A, B])
+
+ def putIfAbsent(k: A, v: B): Option[B] = {
+ val r = underlying.putIfAbsent(k, v)
+ if (r != null) Some(r) else None
+ }
+
+ def remove(k: A, v: B): Boolean = underlying.remove(k, v)
+
+ def replace(k: A, v: B): Option[B] = {
+ val prev = underlying.replace(k, v)
+ if (prev != null) Some(prev) else None
+ }
+
+ def replace(k: A, oldvalue: B, newvalue: B): Boolean =
+ underlying.replace(k, oldvalue, newvalue)
+ }
+
+ case class DictionaryWrapper[A, B](underlying: mutable.Map[A, B]) extends ju.Dictionary[A, B] {
+ def size: Int = underlying.size
+ def isEmpty: Boolean = underlying.isEmpty
+ def keys: ju.Enumeration[A] = asJavaEnumeration(underlying.keysIterator)
+ def elements: ju.Enumeration[B] = asJavaEnumeration(underlying.valuesIterator)
+ def get(key: AnyRef) = try {
+ underlying get key.asInstanceOf[A] match {
+ case None => null.asInstanceOf[B]
+ case Some(v) => v
+ }
+ } catch {
+ case ex: ClassCastException => null.asInstanceOf[B]
+ }
+ def put(key: A, value: B): B = underlying.put(key, value) match {
+ case Some(v) => v
+ case None => null.asInstanceOf[B]
+ }
+ override def remove(key: AnyRef) = try {
+ underlying remove key.asInstanceOf[A] match {
+ case None => null.asInstanceOf[B]
+ case Some(v) => v
+ }
+ } catch {
+ case ex: ClassCastException => null.asInstanceOf[B]
+ }
+ }
+
+ case class JDictionaryWrapper[A, B](underlying: ju.Dictionary[A, B]) extends mutable.AbstractMap[A, B] with mutable.Map[A, B] {
+ override def size: Int = underlying.size
+
+ def get(k: A) = {
+ val v = underlying get k
+ if (v != null) Some(v) else None
+ }
+
+ def +=(kv: (A, B)): this.type = { underlying.put(kv._1, kv._2); this }
+ def -=(key: A): this.type = { underlying remove key; this }
+
+ override def put(k: A, v: B): Option[B] = {
+ val r = underlying.put(k, v)
+ if (r != null) Some(r) else None
+ }
+
+ override def update(k: A, v: B) { underlying.put(k, v) }
+
+ override def remove(k: A): Option[B] = {
+ val r = underlying remove k
+ if (r != null) Some(r) else None
+ }
+
+ def iterator = enumerationAsScalaIterator(underlying.keys) map (k => (k, underlying get k))
+
+ override def clear() = underlying.clear()
+ }
+
+ case class JPropertiesWrapper(underlying: ju.Properties) extends mutable.AbstractMap[String, String]
+ with mutable.Map[String, String]
+ with mutable.MapLike[String, String, JPropertiesWrapper] {
+
+ override def size = underlying.size
+
+ def get(k: String) = {
+ val v = underlying get k
+ if (v != null) Some(v.asInstanceOf[String]) else None
+ }
+
+ def +=(kv: (String, String)): this.type = { underlying.put(kv._1, kv._2); this }
+ def -=(key: String): this.type = { underlying remove key; this }
+
+ override def put(k: String, v: String): Option[String] = {
+ val r = underlying.put(k, v)
+ if (r != null) Some(r.asInstanceOf[String]) else None
+ }
+
+ override def update(k: String, v: String) { underlying.put(k, v) }
+
+ override def remove(k: String): Option[String] = {
+ val r = underlying remove k
+ if (r != null) Some(r.asInstanceOf[String]) else None
+ }
+
+ def iterator: Iterator[(String, String)] = new AbstractIterator[(String, String)] {
+ val ui = underlying.entrySet.iterator
+ def hasNext = ui.hasNext
+ def next() = {
+ val e = ui.next()
+ (e.getKey.asInstanceOf[String], e.getValue.asInstanceOf[String])
+ }
+ }
+
+ override def clear() = underlying.clear()
+
+ override def empty = JPropertiesWrapper(new ju.Properties)
+
+ def getProperty(key: String) = underlying.getProperty(key)
+
+ def getProperty(key: String, defaultValue: String) =
+ underlying.getProperty(key, defaultValue)
+
+ def setProperty(key: String, value: String) =
+ underlying.setProperty(key, value)
+ }
+}
+
+ at SerialVersionUID(0 - 5857859809262781311L)
+object Wrappers extends Wrappers with Serializable
diff --git a/src/library/scala/collection/convert/package.scala b/src/library/scala/collection/convert/package.scala
new file mode 100644
index 0000000..ea66101
--- /dev/null
+++ b/src/library/scala/collection/convert/package.scala
@@ -0,0 +1,18 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2006-2013, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://www.scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+package scala.collection
+
+package object convert {
+ val decorateAsJava = new DecorateAsJava { }
+ val decorateAsScala = new DecorateAsScala { }
+ val decorateAll = new DecorateAsJava with DecorateAsScala { }
+ val wrapAsJava = new WrapAsJava { }
+ val wrapAsScala = new WrapAsScala { }
+ val wrapAll = new WrapAsJava with WrapAsScala { }
+}
diff --git a/src/library/scala/collection/generic/Addable.scala b/src/library/scala/collection/generic/Addable.scala
deleted file mode 100644
index e2a34c1..0000000
--- a/src/library/scala/collection/generic/Addable.scala
+++ /dev/null
@@ -1,61 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-package scala.collection
-package generic
-
-import annotation.bridge
-
-/** This trait represents collection-like objects that can be added to
- * using a '+' operator. It defines variants of `+` and `++`
- * as convenience methods in terms of single-element addition `+`.
- * @tparam A the type of the elements of the $coll
- * @tparam Repr the type of the $coll itself
- * @author Martin Odersky
- * @version 2.8
- * @since 2.8
- * @define coll collection
- * @define Coll Addable
- */
- at deprecated("Will be removed after scala 2.9", "2.8.0")
-trait Addable[A, +Repr <: Addable[A, Repr]] { self =>
-
- /** The representation object of type `Repr` which contains the collection's elements
- */
- protected def repr: Repr
-
- /** Creates a new $coll with an additional element, unless the element is already present.
- * @param elem the element to add
- * @return a fresh collection with `elem` added.
- */
- def +(elem: A): Repr
-
- /** Creates a new $coll with additional elements.
- *
- * This method takes two or more elements to be added. Another overloaded
- * variant of this method handles the case where a single element is
- * added.
- * @param elem1 the first element to add.
- * @param elem2 the second element to add.
- * @param elems the remaining elements to add.
- * @return a new $coll with the given elements added.
- */
- def + (elem1: A, elem2: A, elems: A*): Repr =
- this + elem1 + elem2 ++ elems
-
- /** Creates a new $coll by adding all elements contained in another collection to this $coll.
- *
- * @param elems the collection containing the added elements.
- * @return a new $coll with the given elements added.
- */
- def ++ (xs: GenTraversableOnce[A]): Repr = (repr /: xs.seq) (_ + _)
-
- @bridge
- def ++ (xs: TraversableOnce[A]): Repr = ++ (xs: GenTraversableOnce[A])
-}
diff --git a/src/library/scala/collection/generic/BitOperations.scala b/src/library/scala/collection/generic/BitOperations.scala
index 8094062..c45ebcf 100644
--- a/src/library/scala/collection/generic/BitOperations.scala
+++ b/src/library/scala/collection/generic/BitOperations.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/library/scala/collection/generic/BitSetFactory.scala b/src/library/scala/collection/generic/BitSetFactory.scala
index 796b12b..46e2d29 100644
--- a/src/library/scala/collection/generic/BitSetFactory.scala
+++ b/src/library/scala/collection/generic/BitSetFactory.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -15,7 +15,7 @@ import scala.collection._
import mutable.Builder
/** @define coll collection
- * @define Coll Traversable
+ * @define Coll `Traversable`
* @define factoryInfo
* This object provides a set of operations to create `$Coll` values.
* @author Martin Odersky
diff --git a/src/library/scala/collection/generic/CanBuildFrom.scala b/src/library/scala/collection/generic/CanBuildFrom.scala
index 3a335f3..73fd4fc 100644
--- a/src/library/scala/collection/generic/CanBuildFrom.scala
+++ b/src/library/scala/collection/generic/CanBuildFrom.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -20,7 +20,7 @@ import scala.annotation.implicitNotFound
* @tparam Elem the element type of the collection to be created.
* @tparam To the type of the collection to be created.
*
- * @see Builder
+ * @see [[scala.collection.mutable.Builder]]
* @author Martin Odersky
* @author Adriaan Moors
* @since 2.8
diff --git a/src/library/scala/collection/generic/CanCombineFrom.scala b/src/library/scala/collection/generic/CanCombineFrom.scala
index ad2381a..9ca3332 100644
--- a/src/library/scala/collection/generic/CanCombineFrom.scala
+++ b/src/library/scala/collection/generic/CanCombineFrom.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2010-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -11,22 +11,16 @@ package generic
import scala.collection.parallel._
-/**
- * A base trait for parallel builder factories.
+/** A base trait for parallel builder factories.
*
- * @tparam From the type of the underlying collection that requests a builder to be created
- * @tparam Elem the element type of the collection to be created
- * @tparam To the type of the collection to be created
+ * @tparam From the type of the underlying collection that requests a
+ * builder to be created.
+ * @tparam Elem the element type of the collection to be created.
+ * @tparam To the type of the collection to be created.
+ * @since 2.8
*/
trait CanCombineFrom[-From, -Elem, +To] extends CanBuildFrom[From, Elem, To] with Parallel {
def apply(from: From): Combiner[Elem, To]
def apply(): Combiner[Elem, To]
}
-
-
-
-
-
-
-
diff --git a/src/library/scala/collection/generic/ClassManifestTraversableFactory.scala b/src/library/scala/collection/generic/ClassManifestTraversableFactory.scala
deleted file mode 100644
index 82270d5..0000000
--- a/src/library/scala/collection/generic/ClassManifestTraversableFactory.scala
+++ /dev/null
@@ -1,19 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2006-2011, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-package scala.collection
-package generic
-
-abstract class ClassManifestTraversableFactory[CC[X] <: Traversable[X] with GenericClassManifestTraversableTemplate[X, CC]]
- extends GenericClassManifestCompanion[CC] {
-
- class GenericCanBuildFrom[A](implicit manif: ClassManifest[A]) extends CanBuildFrom[CC[_], A, CC[A]] {
- def apply(from: CC[_]) = from.genericClassManifestBuilder[A]
- def apply = newBuilder[A]
- }
-}
diff --git a/src/library/scala/collection/generic/ClassTagTraversableFactory.scala b/src/library/scala/collection/generic/ClassTagTraversableFactory.scala
new file mode 100644
index 0000000..85cdbd7
--- /dev/null
+++ b/src/library/scala/collection/generic/ClassTagTraversableFactory.scala
@@ -0,0 +1,32 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2010-2013, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+package scala.collection
+package generic
+
+import scala.language.higherKinds
+import scala.reflect.ClassTag
+
+/** A template for companion objects of `ClassTagTraversable` and
+ * subclasses thereof.
+ *
+ * @define coll collection
+ * @define Coll `Traversable`
+ * @define genericCanBuildFromInfo
+ * The standard `CanBuildFrom` instance for $Coll objects.
+ * @author Aleksandar Prokopec
+ * @since 2.8
+ */
+abstract class ClassTagTraversableFactory[CC[X] <: Traversable[X] with GenericClassTagTraversableTemplate[X, CC]]
+ extends GenericClassTagCompanion[CC] {
+
+ class GenericCanBuildFrom[A](implicit tag: ClassTag[A]) extends CanBuildFrom[CC[_], A, CC[A]] {
+ def apply(from: CC[_]) = from.genericClassTagBuilder[A]
+ def apply = newBuilder[A]
+ }
+}
diff --git a/src/library/scala/collection/generic/Clearable.scala b/src/library/scala/collection/generic/Clearable.scala
new file mode 100644
index 0000000..a04ecb2
--- /dev/null
+++ b/src/library/scala/collection/generic/Clearable.scala
@@ -0,0 +1,26 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+package scala.collection
+package generic
+
+/** This trait forms part of collections that can be cleared
+ * with a clear() call.
+ *
+ * @author Paul Phillips
+ * @version 2.10
+ * @since 2.10
+ * @define coll clearable collection
+ * @define Coll `Clearable`
+ */
+trait Clearable {
+ /** Clears the $coll's contents. After this operation, the
+ * $coll is empty.
+ */
+ def clear(): Unit
+}
diff --git a/src/library/scala/collection/generic/FilterMonadic.scala b/src/library/scala/collection/generic/FilterMonadic.scala
old mode 100644
new mode 100755
index 4d6d9ec..e21f0be
--- a/src/library/scala/collection/generic/FilterMonadic.scala
+++ b/src/library/scala/collection/generic/FilterMonadic.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -12,9 +12,9 @@ package scala.collection.generic
/** A template trait that contains just the `map`, `flatMap`, `foreach` and `withFilter` methods
* of trait `TraversableLike`.
*/
-trait FilterMonadic[+A, +Repr] {
+trait FilterMonadic[+A, +Repr] extends Any {
def map[B, That](f: A => B)(implicit bf: CanBuildFrom[Repr, B, That]): That
- def flatMap[B, That](f: A => collection.GenTraversableOnce[B])(implicit bf: CanBuildFrom[Repr, B, That]): That
+ def flatMap[B, That](f: A => scala.collection.GenTraversableOnce[B])(implicit bf: CanBuildFrom[Repr, B, That]): That
def foreach[U](f: A => U): Unit
def withFilter(p: A => Boolean): FilterMonadic[A, Repr]
}
diff --git a/src/library/scala/collection/generic/GenMapFactory.scala b/src/library/scala/collection/generic/GenMapFactory.scala
index f3537e8..e869bba 100644
--- a/src/library/scala/collection/generic/GenMapFactory.scala
+++ b/src/library/scala/collection/generic/GenMapFactory.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -9,14 +9,13 @@
package scala.collection
package generic
-
import mutable.{Builder, MapBuilder}
-
+import scala.language.higherKinds
/** A template for companion objects of `Map` and subclasses thereof.
*
* @define coll map
- * @define Coll Map
+ * @define Coll `Map`
* @define factoryInfo
* This object provides a set of operations needed to create `$Coll` values.
* @author Martin Odersky
diff --git a/src/library/scala/collection/generic/GenSeqFactory.scala b/src/library/scala/collection/generic/GenSeqFactory.scala
index 6fcecc4..dd375c5 100644
--- a/src/library/scala/collection/generic/GenSeqFactory.scala
+++ b/src/library/scala/collection/generic/GenSeqFactory.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -11,16 +11,11 @@
package scala.collection
package generic
+import scala.language.higherKinds
+
/** A template for companion objects of Seq and subclasses thereof.
*
* @since 2.8
*/
-abstract class GenSeqFactory[CC[X] <: GenSeq[X] with GenericTraversableTemplate[X, CC]] extends GenTraversableFactory[CC] {
-
- /** This method is called in a pattern match { case Seq(...) => }.
- *
- * @param x the selector value
- * @return sequence wrapped in an option, if this is a Seq, otherwise none
- */
- def unapplySeq[A](x: CC[A]): Some[CC[A]] = Some(x)
-}
+abstract class GenSeqFactory[CC[X] <: GenSeq[X] with GenericTraversableTemplate[X, CC]]
+extends GenTraversableFactory[CC]
diff --git a/src/library/scala/collection/generic/GenSetFactory.scala b/src/library/scala/collection/generic/GenSetFactory.scala
index d83f248..9774805 100644
--- a/src/library/scala/collection/generic/GenSetFactory.scala
+++ b/src/library/scala/collection/generic/GenSetFactory.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -12,11 +12,12 @@ package scala.collection
package generic
import mutable.Builder
+import scala.language.higherKinds
/** A template for companion objects of `Set` and subclasses thereof.
*
* @define coll set
- * @define Coll Set
+ * @define Coll `Set`
* @define factoryInfo
* This object provides a set of operations needed to create `$Coll` values.
* @author Martin Odersky
diff --git a/src/library/scala/collection/generic/GenTraversableFactory.scala b/src/library/scala/collection/generic/GenTraversableFactory.scala
index c2718e5..2d3f7e6 100644
--- a/src/library/scala/collection/generic/GenTraversableFactory.scala
+++ b/src/library/scala/collection/generic/GenTraversableFactory.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2006-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2006-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -10,6 +10,8 @@
package scala.collection
package generic
+import scala.language.higherKinds
+
/** A template for companion objects of `Traversable` and subclasses thereof.
* This class provides a set of operations to create `$Coll` objects.
* It is typically inherited by companion objects of subclasses of `Traversable`.
@@ -17,7 +19,7 @@ package generic
* @since 2.8
*
* @define coll collection
- * @define Coll Traversable
+ * @define Coll `Traversable`
* @define factoryInfo
* This object provides a set of operations to create `$Coll` values.
* @author Martin Odersky
@@ -34,7 +36,14 @@ package generic
* @see GenericCanBuildFrom
*/
abstract class GenTraversableFactory[CC[X] <: GenTraversable[X] with GenericTraversableTemplate[X, CC]]
- extends GenericCompanion[CC] {
+extends GenericCompanion[CC] {
+
+ // A default implementation of GenericCanBuildFrom which can be cast
+ // to whatever is desired.
+ private class ReusableCBF extends GenericCanBuildFrom[Nothing] {
+ override def apply() = newBuilder[Nothing]
+ }
+ lazy val ReusableCBF: GenericCanBuildFrom[Nothing] = new ReusableCBF
/** A generic implementation of the `CanBuildFrom` trait, which forwards
* all calls to `apply(from)` to the `genericBuilder` method of
@@ -63,7 +72,7 @@ abstract class GenTraversableFactory[CC[X] <: GenTraversable[X] with GenericTrav
val b = newBuilder[A]
// At present we're using IndexedSeq as a proxy for "has a cheap size method".
if (xss forall (_.isInstanceOf[IndexedSeq[_]]))
- b.sizeHint(xss map (_.size) sum)
+ b.sizeHint(xss.map(_.size).sum)
for (xs <- xss.seq) b ++= xs
b.result
@@ -191,8 +200,8 @@ abstract class GenTraversableFactory[CC[X] <: GenTraversable[X] with GenericTrav
/** Produces a $coll containing a sequence of increasing of integers.
*
- * @param from the first element of the $coll
- * @param end the end value of the $coll (the first value NOT contained)
+ * @param start the first element of the $coll
+ * @param end the end value of the $coll (the first value NOT contained)
* @return a $coll with values `start, start + 1, ..., end - 1`
*/
def range[T: Integral](start: T, end: T): CC[T] = range(start, end, implicitly[Integral[T]].one)
diff --git a/src/library/scala/collection/generic/GenericClassManifestCompanion.scala b/src/library/scala/collection/generic/GenericClassManifestCompanion.scala
deleted file mode 100644
index 546e82f..0000000
--- a/src/library/scala/collection/generic/GenericClassManifestCompanion.scala
+++ /dev/null
@@ -1,31 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-package scala.collection
-package generic
-
-import mutable.Builder
-
-/** This class represents companions of classes which require ClassManifests
- * for their element types.
- *
- * @author Aleksandar Prokopec
- */
-abstract class GenericClassManifestCompanion[+CC[X] <: Traversable[X]] {
- type Coll = CC[_]
-
- def newBuilder[A](implicit ord: ClassManifest[A]): Builder[A, CC[A]]
-
- def empty[A: ClassManifest]: CC[A] = newBuilder[A].result
-
- def apply[A](elems: A*)(implicit ord: ClassManifest[A]): CC[A] = {
- val b = newBuilder[A]
- b ++= elems
- b.result
- }
-}
diff --git a/src/library/scala/collection/generic/GenericClassManifestTraversableTemplate.scala b/src/library/scala/collection/generic/GenericClassManifestTraversableTemplate.scala
deleted file mode 100644
index 03d4269..0000000
--- a/src/library/scala/collection/generic/GenericClassManifestTraversableTemplate.scala
+++ /dev/null
@@ -1,24 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-package scala.collection
-package generic
-
-import mutable.Builder
-import annotation.unchecked.uncheckedVariance
-
-/** This trait represents collections classes which require class
- * manifests for their element types.
- *
- * @author Aleksandar Prokopec
- */
-trait GenericClassManifestTraversableTemplate[+A, +CC[X] <: Traversable[X]] extends HasNewBuilder[A, CC[A] @uncheckedVariance] {
- implicit protected[this] val manifest: ClassManifest[A]
- def classManifestCompanion: GenericClassManifestCompanion[CC]
- def genericClassManifestBuilder[B](implicit man: ClassManifest[B]): Builder[B, CC[B]] = classManifestCompanion.newBuilder[B]
-}
diff --git a/src/library/scala/collection/generic/GenericClassTagCompanion.scala b/src/library/scala/collection/generic/GenericClassTagCompanion.scala
new file mode 100644
index 0000000..76c12d1
--- /dev/null
+++ b/src/library/scala/collection/generic/GenericClassTagCompanion.scala
@@ -0,0 +1,33 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+package scala.collection
+package generic
+
+import mutable.Builder
+import scala.language.higherKinds
+import scala.reflect.ClassTag
+
+/** This class represents companions of classes which require ClassTags
+ * for their element types.
+ *
+ * @author Aleksandar Prokopec
+ */
+abstract class GenericClassTagCompanion[+CC[X] <: Traversable[X]] {
+ protected[this] type Coll = CC[_]
+
+ def newBuilder[A](implicit ord: ClassTag[A]): Builder[A, CC[A]]
+
+ def empty[A: ClassTag]: CC[A] = newBuilder[A].result
+
+ def apply[A](elems: A*)(implicit ord: ClassTag[A]): CC[A] = {
+ val b = newBuilder[A]
+ b ++= elems
+ b.result
+ }
+}
diff --git a/src/library/scala/collection/generic/GenericClassTagTraversableTemplate.scala b/src/library/scala/collection/generic/GenericClassTagTraversableTemplate.scala
new file mode 100644
index 0000000..f327710
--- /dev/null
+++ b/src/library/scala/collection/generic/GenericClassTagTraversableTemplate.scala
@@ -0,0 +1,31 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2010-2013, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+package scala.collection
+package generic
+
+import mutable.Builder
+import scala.annotation.unchecked.uncheckedVariance
+import scala.language.higherKinds
+import scala.reflect.ClassTag
+
+/** This trait represents collections classes which require class
+ * tags for their element types.
+ *
+ * @author Aleksandar Prokopec
+ * @since 2.8
+ */
+trait GenericClassTagTraversableTemplate[+A, +CC[X] <: Traversable[X]] extends HasNewBuilder[A, CC[A] @uncheckedVariance] {
+ implicit protected[this] val tag: ClassTag[A]
+ def classTagCompanion: GenericClassTagCompanion[CC]
+ def genericClassTagBuilder[B](implicit tag: ClassTag[B]): Builder[B, CC[B]] = classTagCompanion.newBuilder[B]
+ @deprecated("use classTagCompanion instead", "2.10.0")
+ def classManifestCompanion: GenericClassManifestCompanion[CC] = classTagCompanion
+ @deprecated("use genericClassTagBuilder instead", "2.10.0")
+ def genericClassManifestBuilder[B](implicit manifest: ClassManifest[B]): Builder[B, CC[B]] = genericClassTagBuilder[B](manifest)
+}
diff --git a/src/library/scala/collection/generic/GenericCompanion.scala b/src/library/scala/collection/generic/GenericCompanion.scala
index 353ab99..b966ce5 100644
--- a/src/library/scala/collection/generic/GenericCompanion.scala
+++ b/src/library/scala/collection/generic/GenericCompanion.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -10,20 +10,21 @@ package scala.collection
package generic
import mutable.Builder
+import scala.language.higherKinds
-/** A template class for companion objects of ``regular'' collection classes
+/** A template class for companion objects of "regular" collection classes
* represent an unconstrained higher-kinded type. Typically
* such classes inherit from trait `GenericTraversableTemplate`.
* @tparam CC The type constructor representing the collection class.
- * @see GenericTraversableTemplate
+ * @see [[scala.collection.generic.GenericTraversableTemplate]]
* @author Martin Odersky
* @since 2.8
* @define coll collection
- * @define Coll CC
+ * @define Coll `CC`
*/
abstract class GenericCompanion[+CC[X] <: GenTraversable[X]] {
/** The underlying collection type with unknown element type */
- type Coll = CC[_]
+ protected[this] type Coll = CC[_]
/** The default builder for `$Coll` objects.
* @tparam A the type of the ${coll}'s elements
diff --git a/src/library/scala/collection/generic/GenericOrderedCompanion.scala b/src/library/scala/collection/generic/GenericOrderedCompanion.scala
index c0656dd..094912c 100644
--- a/src/library/scala/collection/generic/GenericOrderedCompanion.scala
+++ b/src/library/scala/collection/generic/GenericOrderedCompanion.scala
@@ -1,29 +1,25 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2010-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-
-
package scala.collection
package generic
import mutable.Builder
-
-
-
-
+import scala.language.higherKinds
/** This class represents companions of classes which require the ordered trait
* for their element types.
*
* @author Aleksandar Prokopec
+ * @since 2.8
*/
abstract class GenericOrderedCompanion[+CC[X] <: Traversable[X]] {
- type Coll = CC[_]
+ protected[this] type Coll = CC[_]
def newBuilder[A](implicit ord: Ordering[A]): Builder[A, CC[A]]
diff --git a/src/library/scala/collection/generic/GenericOrderedTraversableTemplate.scala b/src/library/scala/collection/generic/GenericOrderedTraversableTemplate.scala
index 5cfc466..a624e8c 100644
--- a/src/library/scala/collection/generic/GenericOrderedTraversableTemplate.scala
+++ b/src/library/scala/collection/generic/GenericOrderedTraversableTemplate.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -12,9 +12,8 @@ package scala.collection
package generic
import mutable.Builder
-import annotation.unchecked.uncheckedVariance
-
-
+import scala.annotation.unchecked.uncheckedVariance
+import scala.language.higherKinds
/** This trait represents collections classes which require
* ordered element types.
diff --git a/src/library/scala/collection/generic/GenericParCompanion.scala b/src/library/scala/collection/generic/GenericParCompanion.scala
index 783a487..bb39461 100644
--- a/src/library/scala/collection/generic/GenericParCompanion.scala
+++ b/src/library/scala/collection/generic/GenericParCompanion.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2010-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -11,11 +11,12 @@ package scala.collection.generic
import scala.collection.parallel.Combiner
import scala.collection.parallel.ParIterable
import scala.collection.parallel.ParMap
+import scala.language.higherKinds
/** A template class for companion objects of parallel collection classes.
* They should be mixed in together with `GenericCompanion` type.
*
- * @define Coll ParIterable
+ * @define Coll `ParIterable`
* @tparam CC the type constructor representing the collection class
* @since 2.8
*/
@@ -33,5 +34,3 @@ trait GenericParMapCompanion[+CC[P, Q] <: ParMap[P, Q]] {
def newCombiner[P, Q]: Combiner[(P, Q), CC[P, Q]]
}
-
-
diff --git a/src/library/scala/collection/generic/GenericParTemplate.scala b/src/library/scala/collection/generic/GenericParTemplate.scala
index 1e3f7b5..94c7663 100644
--- a/src/library/scala/collection/generic/GenericParTemplate.scala
+++ b/src/library/scala/collection/generic/GenericParTemplate.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2010-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -8,27 +8,20 @@
package scala.collection.generic
-
-
import scala.collection.parallel.Combiner
import scala.collection.parallel.ParIterable
import scala.collection.parallel.ParMap
import scala.collection.parallel.TaskSupport
-
-import annotation.unchecked.uncheckedVariance
-
-
-
-
-
+import scala.annotation.unchecked.uncheckedVariance
+import scala.language.higherKinds
/** A template trait for collections having a companion.
*
* @tparam A the element type of the collection
* @tparam CC the type constructor representing the collection class
+ * @author Aleksandar Prokopec
* @since 2.8
- * @author prokopec
*/
trait GenericParTemplate[+A, +CC[X] <: ParIterable[X]]
extends GenericTraversableTemplate[A, CC]
@@ -36,7 +29,7 @@ extends GenericTraversableTemplate[A, CC]
{
def companion: GenericCompanion[CC] with GenericParCompanion[CC]
- protected[this] override def newBuilder: collection.mutable.Builder[A, CC[A]] = newCombiner
+ protected[this] override def newBuilder: scala.collection.mutable.Builder[A, CC[A]] = newCombiner
protected[this] override def newCombiner: Combiner[A, CC[A]] = {
val cb = companion.newCombiner[A]
@@ -68,8 +61,3 @@ trait GenericParMapTemplate[K, +V, +CC[X, Y] <: ParMap[X, Y]] extends GenericPar
}
}
-
-
-
-
-
diff --git a/src/library/scala/collection/generic/GenericSeqCompanion.scala b/src/library/scala/collection/generic/GenericSeqCompanion.scala
index 41e8d6d..8b2f8a0 100644
--- a/src/library/scala/collection/generic/GenericSeqCompanion.scala
+++ b/src/library/scala/collection/generic/GenericSeqCompanion.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2006-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2006-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -10,15 +10,7 @@
package scala.collection
package generic
-import annotation.bridge
+import scala.language.higherKinds
trait GenericSeqCompanion[CC[X] <: Traversable[X]]
- extends GenericCompanion[CC] {
-
- @bridge
- override def empty[A]: CC[A] = super.empty[A]
-
- @bridge
- override def apply[A](elems: A*): CC[A] = super.apply(elems: _*)
-
-}
+ extends GenericCompanion[CC]
diff --git a/src/library/scala/collection/generic/GenericSetTemplate.scala b/src/library/scala/collection/generic/GenericSetTemplate.scala
index 9e1a041..ecfdcff 100644
--- a/src/library/scala/collection/generic/GenericSetTemplate.scala
+++ b/src/library/scala/collection/generic/GenericSetTemplate.scala
@@ -1,16 +1,14 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-
-
package scala.collection
package generic
-
+import scala.language.higherKinds
/**
* @since 2.8
*/
diff --git a/src/library/scala/collection/generic/GenericTraversableTemplate.scala b/src/library/scala/collection/generic/GenericTraversableTemplate.scala
index 2a7922f..f7a8a9a 100644
--- a/src/library/scala/collection/generic/GenericTraversableTemplate.scala
+++ b/src/library/scala/collection/generic/GenericTraversableTemplate.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -12,8 +12,9 @@ package scala.collection
package generic
import mutable.Builder
-import annotation.migration
-import annotation.unchecked.uncheckedVariance
+import scala.annotation.migration
+import scala.annotation.unchecked.uncheckedVariance
+import scala.language.higherKinds
/** A template class for companion objects of ``regular`` collection classes
* that represent an unconstrained higher-kinded type.
@@ -72,8 +73,8 @@ trait GenericTraversableTemplate[+A, +CC[X] <: GenTraversable[X]] extends HasNew
/** Converts this $coll of pairs into two collections of the first and second
* half of each pair.
*
- * @param A1 the type of the first half of the element pairs
- * @param A2 the type of the second half of the element pairs
+ * @tparam A1 the type of the first half of the element pairs
+ * @tparam A2 the type of the second half of the element pairs
* @param asPair an implicit conversion which asserts that the element type
* of this $coll is a pair.
* @return a pair ${coll}s, containing the first, respectively second
@@ -93,9 +94,9 @@ trait GenericTraversableTemplate[+A, +CC[X] <: GenTraversable[X]] extends HasNew
/** Converts this $coll of triples into three collections of the first, second,
* and third element of each triple.
*
- * @param A1 the type of the first member of the element triples
- * @param A2 the type of the second member of the element triples
- * @param A3 the type of the third member of the element triples
+ * @tparam A1 the type of the first member of the element triples
+ * @tparam A2 the type of the second member of the element triples
+ * @tparam A3 the type of the third member of the element triples
* @param asTriple an implicit conversion which asserts that the element type
* of this $coll is a triple.
* @return a triple ${coll}s, containing the first, second, respectively
@@ -119,27 +120,30 @@ trait GenericTraversableTemplate[+A, +CC[X] <: GenTraversable[X]] extends HasNew
* a $coll formed by the elements of these traversable
* collections.
*
- * The resulting collection's type will be guided by the
- * static type of $coll. For example:
- *
- * {{{
- * val xs = List(Set(1, 2, 3), Set(1, 2, 3))
- * // xs == List(1, 2, 3, 1, 2, 3)
- *
- * val ys = Set(List(1, 2, 3), List(3, 2, 1))
- * // ys == Set(1, 2, 3)
- * }}}
- *
* @tparam B the type of the elements of each traversable collection.
* @param asTraversable an implicit conversion which asserts that the element
- * type of this $coll is a `Traversable`.
+ * type of this $coll is a `GenTraversable`.
* @return a new $coll resulting from concatenating all element ${coll}s.
+ *
* @usecase def flatten[B]: $Coll[B]
+ *
+ * @inheritdoc
+ *
+ * The resulting collection's type will be guided by the
+ * static type of $coll. For example:
+ *
+ * {{{
+ * val xs = List(Set(1, 2, 3), Set(1, 2, 3))
+ * // xs == List(1, 2, 3, 1, 2, 3)
+ *
+ * val ys = Set(List(1, 2, 3), List(3, 2, 1))
+ * // ys == Set(1, 2, 3)
+ * }}}
*/
- def flatten[B](implicit asTraversable: A => /*<:<!!!*/ TraversableOnce[B]): CC[B] = {
+ def flatten[B](implicit asTraversable: A => /*<:<!!!*/ GenTraversableOnce[B]): CC[B] = {
val b = genericBuilder[B]
for (xs <- sequential)
- b ++= asTraversable(xs)
+ b ++= asTraversable(xs).seq
b.result
}
diff --git a/src/library/scala/collection/generic/Growable.scala b/src/library/scala/collection/generic/Growable.scala
index f0a70c2..cb75212 100644
--- a/src/library/scala/collection/generic/Growable.scala
+++ b/src/library/scala/collection/generic/Growable.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -18,11 +18,11 @@ package generic
* @version 2.8
* @since 2.8
* @define coll growable collection
- * @define Coll Growable
+ * @define Coll `Growable`
* @define add add
* @define Add add
*/
-trait Growable[-A] {
+trait Growable[-A] extends Clearable {
/** ${Add}s a single element to this $coll.
*
@@ -42,7 +42,7 @@ trait Growable[-A] {
/** ${Add}s all elements produced by a TraversableOnce to this $coll.
*
- * @param iter the TraversableOnce producing the elements to $add.
+ * @param xs the TraversableOnce producing the elements to $add.
* @return the $coll itself.
*/
def ++=(xs: TraversableOnce[A]): this.type = { xs.seq foreach += ; this }
@@ -50,5 +50,5 @@ trait Growable[-A] {
/** Clears the $coll's contents. After this operation, the
* $coll is empty.
*/
- def clear()
+ def clear(): Unit
}
diff --git a/src/library/scala/collection/generic/HasNewBuilder.scala b/src/library/scala/collection/generic/HasNewBuilder.scala
old mode 100644
new mode 100755
index 6154a56..1a981b4
--- a/src/library/scala/collection/generic/HasNewBuilder.scala
+++ b/src/library/scala/collection/generic/HasNewBuilder.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -10,7 +10,7 @@ package generic
import mutable.Builder
-trait HasNewBuilder[+A, +Repr] {
+trait HasNewBuilder[+A, +Repr] extends Any {
/** The builder that builds instances of Repr */
protected[this] def newBuilder: Builder[A, Repr]
}
diff --git a/src/library/scala/collection/generic/HasNewCombiner.scala b/src/library/scala/collection/generic/HasNewCombiner.scala
index cc69faa..1ecfba1 100644
--- a/src/library/scala/collection/generic/HasNewCombiner.scala
+++ b/src/library/scala/collection/generic/HasNewCombiner.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2010-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -8,28 +8,12 @@
package scala.collection.generic
-
-
import scala.collection.parallel.Combiner
-
-
+/**
+ * @since 2.8
+ */
trait HasNewCombiner[+T, +Repr] {
protected[this] def newCombiner: Combiner[T, Repr]
}
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/src/library/scala/collection/generic/ImmutableMapFactory.scala b/src/library/scala/collection/generic/ImmutableMapFactory.scala
index bdb657f..4ce50a3 100644
--- a/src/library/scala/collection/generic/ImmutableMapFactory.scala
+++ b/src/library/scala/collection/generic/ImmutableMapFactory.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -10,6 +10,8 @@
package scala.collection
package generic
+import scala.language.higherKinds
+
/** A template for companion objects of `immutable.Map` and subclasses thereof.
* @author Martin Odersky
* @version 2.8
diff --git a/src/library/scala/collection/generic/ImmutableSetFactory.scala b/src/library/scala/collection/generic/ImmutableSetFactory.scala
index e128be7..2e960e6 100644
--- a/src/library/scala/collection/generic/ImmutableSetFactory.scala
+++ b/src/library/scala/collection/generic/ImmutableSetFactory.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -10,6 +10,7 @@ package scala.collection
package generic
import mutable.{ Builder, SetBuilder }
+import scala.language.higherKinds
abstract class ImmutableSetFactory[CC[X] <: immutable.Set[X] with SetLike[X, CC[X]]]
extends SetFactory[CC] {
diff --git a/src/library/scala/collection/generic/ImmutableSortedMapFactory.scala b/src/library/scala/collection/generic/ImmutableSortedMapFactory.scala
index 89e19ee..7743fc2 100644
--- a/src/library/scala/collection/generic/ImmutableSortedMapFactory.scala
+++ b/src/library/scala/collection/generic/ImmutableSortedMapFactory.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -11,10 +11,12 @@
package scala.collection
package generic
+import scala.language.higherKinds
+
/** A template for companion objects of `SortedMap` and subclasses thereof.
*
* @since 2.8
- * @define Coll SortedMap
+ * @define Coll `SortedMap`
* @define coll sorted map
* @define factoryInfo
* This object provides a set of operations needed to create sorted maps of type `$Coll`.
diff --git a/src/library/scala/collection/generic/ImmutableSortedSetFactory.scala b/src/library/scala/collection/generic/ImmutableSortedSetFactory.scala
index fe807d9..9914557 100644
--- a/src/library/scala/collection/generic/ImmutableSortedSetFactory.scala
+++ b/src/library/scala/collection/generic/ImmutableSortedSetFactory.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -11,11 +11,13 @@
package scala.collection
package generic
+import scala.language.higherKinds
+
/** A template for companion objects of `SortedSet` and subclasses thereof.
*
* @since 2.8
- * @define Coll immutable.SortedSet
- * @define coll immutable sorted
+ * @define Coll `immutable.SortedSet`
+ * @define coll immutable sorted set
* @define factoryInfo
* This object provides a set of operations needed to create sorted sets of type `$Coll`.
* @author Martin Odersky
@@ -23,4 +25,4 @@ package generic
* @define sortedSetCanBuildFromInfo
* The standard `CanBuildFrom` instance for sorted sets
*/
-abstract class ImmutableSortedSetFactory[CC[A] <: immutable.SortedSet[A] with SortedSetLike[A, CC[A]]] extends SortedSetFactory[CC]
\ No newline at end of file
+abstract class ImmutableSortedSetFactory[CC[A] <: immutable.SortedSet[A] with SortedSetLike[A, CC[A]]] extends SortedSetFactory[CC]
diff --git a/src/library/scala/collection/generic/IsTraversableLike.scala b/src/library/scala/collection/generic/IsTraversableLike.scala
new file mode 100644
index 0000000..c70772d
--- /dev/null
+++ b/src/library/scala/collection/generic/IsTraversableLike.scala
@@ -0,0 +1,129 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+package scala.collection
+package generic
+
+/** A trait which can be used to avoid code duplication when defining extension
+ * methods that should be applicable both to existing Scala collections (i.e.,
+ * types extending `GenTraversableLike`) as well as other (potentially user-defined)
+ * types that could be converted to a Scala collection type. This trait
+ * makes it possible to treat Scala collections and types that can be implicitly
+ * converted to a collection type uniformly. For example, one can provide
+ * extension methods that work both on collection types and on `String`s (`String`s
+ * do not extend `GenTraversableLike`, but can be converted to `GenTraversableLike`)
+ *
+ * `IsTraversable` provides two members:
+ *
+ * 1. type member `A`, which represents the element type of the target `GenTraversableLike[A, Repr]`
+ * 1. value member `conversion`, which provides a way to convert between the type we wish to add extension methods to, `Repr`, and `GenTraversableLike[A, Repr]`.
+ *
+ * ===Usage===
+ *
+ * One must provide `IsTraversableLike` as an implicit parameter type of an implicit
+ * conversion. Its usage is shown below. Our objective in the following example
+ * is to provide a generic extension method `mapReduce` to any type that extends
+ * or can be converted to `GenTraversableLike`. In our example, this includes
+ * `String`.
+ *
+ * {{{
+ * import scala.collection.GenTraversableLike
+ * import scala.collection.generic.IsTraversableLike
+ *
+ * class ExtensionMethods[A, Repr](coll: GenTraversableLike[A, Repr]) {
+ * def mapReduce[B](mapper: A => B)(reducer: (B, B) => B): B = {
+ * val iter = coll.toIterator
+ * var res = mapper(iter.next())
+ * while (iter.hasNext)
+ * res = reducer(res, mapper(iter.next()))
+ * res
+ * }
+ * }
+ *
+ * implicit def withExtensions[Repr](coll: Repr)(implicit traversable: IsTraversableLike[Repr]) =
+ * new ExtensionMethods(traversable.conversion(coll))
+ *
+ * // See it in action!
+ * List(1, 2, 3).mapReduce(_ * 2)(_ + _) // res0: Int = 12
+ * "Yeah, well, you know, that's just, like, your opinion, man.".mapReduce(x => 1)(_ + _) // res1: Int = 59
+ *}}}
+ *
+ * Here, we begin by creating a class `ExtensionMethods` which contains our
+ * `mapReduce` extension method. Note that `ExtensionMethods` takes a constructor
+ * argument `coll` of type `GenTraversableLike[A, Repr]`, where `A` represents the
+ * element type and `Repr` represents (typically) the collection type. The
+ * implementation of `mapReduce` itself is straightforward.
+ *
+ * The interesting bit is the implicit conversion `withExtensions`, which
+ * returns an instance of `ExtensionMethods`. This implicit conversion can
+ * only be applied if there is an implicit value `traversable` of type
+ * `IsTraversableLike[Repr]` in scope. Since `IsTraversableLike` provides
+ * value member `conversion`, which gives us a way to convert between whatever
+ * type we wish to add an extension method to (in this case, `Repr`) and
+ * `GenTraversableLike[A, Repr]`, we can now convert `coll` from type `Repr`
+ * to `GenTraversableLike[A, Repr]`. This allows us to create an instance of
+ * the `ExtensionMethods` class, which we pass our new
+ * `GenTraversableLike[A, Repr]` to.
+ *
+ * When the `mapReduce` method is called on some type of which it is not
+ * a member, implicit search is triggered. Because implicit conversion
+ * `withExtensions` is generic, it will be applied as long as an implicit
+ * value of type `IsTraversableLike[Repr]` can be found. Given that
+ * `IsTraversableLike` contains implicit members that return values of type
+ * `IsTraversableLike`, this requirement is typically satisfied, and the chain
+ * of interactions described in the previous paragraph is set into action.
+ * (See the `IsTraversableLike` companion object, which contains a precise
+ * specification of the available implicits.)
+ *
+ * ''Note'': Currently, it's not possible to combine the implicit conversion and
+ * the class with the extension methods into an implicit class due to
+ * limitations of type inference.
+ *
+ * ===Implementing `IsTraversableLike` for New Types===
+ *
+ * One must simply provide an implicit value of type `IsTraversableLike`
+ * specific to the new type, or an implicit conversion which returns an
+ * instance of `IsTraversableLike` specific to the new type.
+ *
+ * Below is an example of an implementation of the `IsTraversableLike` trait
+ * where the `Repr` type is `String`.
+ *
+ *{{{
+ * implicit val stringRepr: IsTraversableLike[String] { type A = Char } =
+ * new IsTraversableLike[String] {
+ * type A = Char
+ * val conversion = implicitly[String => GenTraversableLike[Char, String]]
+ * }
+ *}}}
+ *
+ * @author Miles Sabin
+ * @author J. Suereth
+ * @since 2.10
+ */
+trait IsTraversableLike[Repr] {
+ /** The type of elements we can traverse over. */
+ type A
+ /** A conversion from the representation type `Repr` to a `GenTraversableLike[A,Repr]`. */
+ val conversion: Repr => GenTraversableLike[A, Repr]
+}
+
+object IsTraversableLike {
+ import scala.language.higherKinds
+
+ implicit val stringRepr: IsTraversableLike[String] { type A = Char } =
+ new IsTraversableLike[String] {
+ type A = Char
+ val conversion = implicitly[String => GenTraversableLike[Char, String]]
+ }
+
+ implicit def genTraversableLikeRepr[C[_], A0](implicit conv: C[A0] => GenTraversableLike[A0,C[A0]]): IsTraversableLike[C[A0]] { type A = A0 } =
+ new IsTraversableLike[C[A0]] {
+ type A = A0
+ val conversion = conv
+ }
+}
diff --git a/src/library/scala/collection/generic/IsTraversableOnce.scala b/src/library/scala/collection/generic/IsTraversableOnce.scala
new file mode 100644
index 0000000..bb5404c
--- /dev/null
+++ b/src/library/scala/collection/generic/IsTraversableOnce.scala
@@ -0,0 +1,62 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+package scala.collection
+package generic
+
+/** Type class witnessing that a collection representation type `Repr` has
+ * elements of type `A` and has a conversion to `GenTraversableOnce[A]`.
+ *
+ * This type enables simple enrichment of `GenTraversableOnce`s with extension
+ * methods which can make full use of the mechanics of the Scala collections
+ * framework in their implementation.
+ *
+ * Example usage,
+ * {{{
+ * class FilterMapImpl[A, Repr](val r: GenTraversableOnce[A]) {
+ * final def filterMap[B, That](f: A => Option[B])(implicit cbf: CanBuildFrom[Repr, B, That]): That = {
+ * val b = cbf()
+ * for(e <- r.seq) f(e) foreach (b +=)
+ * b.result
+ * }
+ * }
+ * implicit def filterMap[Repr, A](r: Repr)(implicit fr: IsTraversableOnce[Repr]): FilterMapImpl[fr.A,Repr] =
+ * new FilterMapImpl[fr.A, Repr](fr.conversion(r))
+ *
+ * val l = List(1, 2, 3, 4, 5)
+ * List(1, 2, 3, 4, 5) filterMap (i => if(i % 2 == 0) Some(i) else None)
+ * // == List(2, 4)
+ * }}}
+ *
+ * @author Miles Sabin
+ * @author J. Suereth
+ * @since 2.10
+ */
+trait IsTraversableOnce[Repr] {
+ /** The type of elements we can traverse over. */
+ type A
+ /** A conversion from the representation type `Repr` to a `GenTraversableOnce[A]`. */
+ val conversion: Repr => GenTraversableOnce[A]
+}
+
+object IsTraversableOnce {
+ import scala.language.higherKinds
+
+ implicit val stringRepr: IsTraversableOnce[String] { type A = Char } =
+ new IsTraversableOnce[String] {
+ type A = Char
+ val conversion = implicitly[String => GenTraversableOnce[Char]]
+ }
+
+ implicit def genTraversableLikeRepr[C[_], A0](implicit conv: C[A0] => GenTraversableOnce[A0]): IsTraversableOnce[C[A0]] { type A = A0 } =
+ new IsTraversableOnce[C[A0]] {
+ type A = A0
+ val conversion = conv
+ }
+}
+
diff --git a/src/library/scala/collection/generic/IterableForwarder.scala b/src/library/scala/collection/generic/IterableForwarder.scala
index 8f9c4c9..90ebcac 100644
--- a/src/library/scala/collection/generic/IterableForwarder.scala
+++ b/src/library/scala/collection/generic/IterableForwarder.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -11,22 +11,17 @@
package scala.collection.generic
import scala.collection._
-import collection.mutable.Buffer
-
-/** <p>
- * This trait implements a forwarder for iterable objects. It forwards
- * all calls to a different iterable object, except for
- * </p>
- * <ul>
- * <li><code>toString</code>, <code>hashCode</code>, <code>equals</code>,
- * <code>stringPrefix</code></li>
- * <li><code>newBuilder</code>, <code>view</code></li>
- * <li>all calls creating a new iterable object of the same kind</li>
- * </ul>
- * <p>
- * The above methods are forwarded by subclass <a href="../IterableProxy.html"
- * target="ContentFrame"><code>IterableProxy</code></a>.
- * </p>
+import scala.collection.mutable.Buffer
+
+/** This trait implements a forwarder for iterable objects. It forwards
+ * all calls to a different iterable object, except for
+ *
+ * - `toString`, `hashCode`, `equals`, `stringPrefix`
+ * - `newBuilder`, `view`
+ * - all calls creating a new iterable object of the same kind
+ *
+ * The above methods are forwarded by subclass <a href="../IterableProxy.html"
+ * target="ContentFrame">`IterableProxy`</a>.
*
* @author Martin Odersky
* @version 2.8
diff --git a/src/library/scala/collection/generic/MapFactory.scala b/src/library/scala/collection/generic/MapFactory.scala
index a60e303..565850b 100644
--- a/src/library/scala/collection/generic/MapFactory.scala
+++ b/src/library/scala/collection/generic/MapFactory.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -11,7 +11,7 @@ package generic
import mutable.{Builder, MapBuilder}
-import annotation.bridge
+import scala.language.higherKinds
/** A template for companion objects of `Map` and subclasses thereof.
*
@@ -35,6 +35,4 @@ abstract class MapFactory[CC[A, B] <: Map[A, B] with MapLike[A, B, CC[A, B]]] ex
def empty[A, B]: CC[A, B]
- @bridge
- override def apply[A, B](elems: (A, B)*): CC[A, B] = super.apply(elems: _*)
}
diff --git a/src/library/scala/collection/generic/MutableMapFactory.scala b/src/library/scala/collection/generic/MutableMapFactory.scala
index 076e41c..ac139cc 100644
--- a/src/library/scala/collection/generic/MutableMapFactory.scala
+++ b/src/library/scala/collection/generic/MutableMapFactory.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -12,6 +12,7 @@ package scala.collection
package generic
import mutable.Builder
+import scala.language.higherKinds
/** A template for companion objects of `mutable.Map` and subclasses thereof.
* @author Martin Odersky
diff --git a/src/library/scala/collection/generic/MutableSetFactory.scala b/src/library/scala/collection/generic/MutableSetFactory.scala
index 6130ef2..9c69d53 100644
--- a/src/library/scala/collection/generic/MutableSetFactory.scala
+++ b/src/library/scala/collection/generic/MutableSetFactory.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -10,6 +10,7 @@ package scala.collection
package generic
import mutable.{ Builder, GrowingBuilder }
+import scala.language.higherKinds
abstract class MutableSetFactory[CC[X] <: mutable.Set[X] with mutable.SetLike[X, CC[X]]]
extends SetFactory[CC] {
diff --git a/src/library/scala/collection/generic/MutableSortedSetFactory.scala b/src/library/scala/collection/generic/MutableSortedSetFactory.scala
new file mode 100644
index 0000000..b9be83c
--- /dev/null
+++ b/src/library/scala/collection/generic/MutableSortedSetFactory.scala
@@ -0,0 +1,34 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+package scala.collection
+package generic
+
+import scala.collection.mutable.{ Builder, GrowingBuilder }
+import scala.language.higherKinds
+
+/**
+ * @define Coll `mutable.SortedSet`
+ * @define coll mutable sorted set
+ *
+ * @author Lucien Pereira
+ *
+ */
+abstract class MutableSortedSetFactory[CC[A] <: mutable.SortedSet[A] with SortedSetLike[A, CC[A]] with mutable.Set[A] with mutable.SetLike[A, CC[A]]] extends SortedSetFactory[CC] {
+
+ /**
+ * mutable.SetBuilder uses '+' which is not a primitive for anything extending mutable.SetLike,
+ * this causes serious perfomances issues since each time 'elems = elems + x'
+ * is evaluated elems is cloned (which is O(n)).
+ *
+ * Fortunately GrowingBuilder comes to rescue.
+ *
+ */
+ override def newBuilder[A](implicit ord: Ordering[A]): Builder[A, CC[A]] = new GrowingBuilder[A, CC[A]](empty)
+
+}
diff --git a/src/library/scala/collection/generic/OrderedTraversableFactory.scala b/src/library/scala/collection/generic/OrderedTraversableFactory.scala
index 259e412..a2de108 100644
--- a/src/library/scala/collection/generic/OrderedTraversableFactory.scala
+++ b/src/library/scala/collection/generic/OrderedTraversableFactory.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2006-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2006-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -10,9 +10,7 @@
package scala.collection
package generic
-
-
-
+import scala.language.higherKinds
abstract class OrderedTraversableFactory[CC[X] <: Traversable[X] with GenericOrderedTraversableTemplate[X, CC]]
extends GenericOrderedCompanion[CC] {
diff --git a/src/library/scala/collection/generic/ParFactory.scala b/src/library/scala/collection/generic/ParFactory.scala
index 637a1dc..bb88d26 100644
--- a/src/library/scala/collection/generic/ParFactory.scala
+++ b/src/library/scala/collection/generic/ParFactory.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2010-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -8,28 +8,27 @@
package scala.collection.generic
-
import scala.collection.parallel.ParIterable
import scala.collection.parallel.Combiner
+import scala.language.higherKinds
-
-
-/** A template class for companion objects of `ParIterable` and subclasses thereof.
- * This class extends `TraversableFactory` and provides a set of operations to create `$Coll` objects.
+/** A template class for companion objects of `ParIterable` and subclasses
+ * thereof. This class extends `TraversableFactory` and provides a set of
+ * operations to create `$Coll` objects.
*
* @define coll parallel collection
- * @define Coll ParIterable
+ * @define Coll `ParIterable`
+ * @since 2.8
*/
abstract class ParFactory[CC[X] <: ParIterable[X] with GenericParTemplate[X, CC]]
extends GenTraversableFactory[CC]
with GenericParCompanion[CC] {
- //type EPC[T, C] = collection.parallel.EnvironmentPassingCombiner[T, C]
+ //type EPC[T, C] = scala.collection.parallel.EnvironmentPassingCombiner[T, C]
- /**
- * A generic implementation of the `CanCombineFrom` trait, which forwards all calls to
- * `apply(from)` to the `genericParBuilder` method of the $coll `from`, and calls to `apply()`
- * to this factory.
+ /** A generic implementation of the `CanCombineFrom` trait, which forwards
+ * all calls to `apply(from)` to the `genericParBuilder` method of the $coll
+ * `from`, and calls to `apply()` to this factory.
*/
class GenericCanCombineFrom[A] extends GenericCanBuildFrom[A] with CanCombineFrom[CC[_], A, CC[A]] {
override def apply(from: Coll) = from.genericCombiner
@@ -41,10 +40,3 @@ extends GenTraversableFactory[CC]
-
-
-
-
-
-
-
diff --git a/src/library/scala/collection/generic/ParMapFactory.scala b/src/library/scala/collection/generic/ParMapFactory.scala
index 06105c9..0a6b08a 100644
--- a/src/library/scala/collection/generic/ParMapFactory.scala
+++ b/src/library/scala/collection/generic/ParMapFactory.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2010-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -12,12 +12,16 @@ import scala.collection.parallel.ParMap
import scala.collection.parallel.ParMapLike
import scala.collection.parallel.Combiner
import scala.collection.mutable.Builder
+import scala.language.higherKinds
/** A template class for companion objects of `ParMap` and subclasses thereof.
- * This class extends `TraversableFactory` and provides a set of operations to create `$Coll` objects.
+ * This class extends `TraversableFactory` and provides a set of operations
+ * to create `$Coll` objects.
*
* @define coll parallel map
- * @define Coll ParMap
+ * @define Coll `ParMap`
+ * @author Aleksandar Prokopec
+ * @since 2.8
*/
abstract class ParMapFactory[CC[X, Y] <: ParMap[X, Y] with ParMapLike[X, Y, CC[X, Y], _]]
extends GenMapFactory[CC]
diff --git a/src/library/scala/collection/generic/ParSetFactory.scala b/src/library/scala/collection/generic/ParSetFactory.scala
index 221e893..3727ab8 100644
--- a/src/library/scala/collection/generic/ParSetFactory.scala
+++ b/src/library/scala/collection/generic/ParSetFactory.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2010-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -8,20 +8,16 @@
package scala.collection.generic
+import scala.collection.mutable.Builder
+import scala.collection.parallel.Combiner
+import scala.collection.parallel.ParSet
+import scala.collection.parallel.ParSetLike
+import scala.language.higherKinds
-
-
-
-import collection.mutable.Builder
-import collection.parallel.Combiner
-import collection.parallel.ParSet
-import collection.parallel.ParSetLike
-
-
-
-
-
-
+/**
+ * @author Aleksandar Prokopec
+ * @since 2.8
+ */
abstract class ParSetFactory[CC[X] <: ParSet[X] with ParSetLike[X, CC[X], _] with GenericParTemplate[X, CC]]
extends GenSetFactory[CC]
with GenericParCompanion[CC]
@@ -36,7 +32,3 @@ abstract class ParSetFactory[CC[X] <: ParSet[X] with ParSetLike[X, CC[X], _] wit
}
}
-
-
-
-
diff --git a/src/library/scala/collection/generic/SeqFactory.scala b/src/library/scala/collection/generic/SeqFactory.scala
index 544d8f4..a660747 100644
--- a/src/library/scala/collection/generic/SeqFactory.scala
+++ b/src/library/scala/collection/generic/SeqFactory.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -10,13 +10,21 @@
package scala.collection
package generic
-
-import annotation.bridge
+import scala.language.higherKinds
/** A template for companion objects of Seq and subclasses thereof.
*
* @since 2.8
*/
abstract class SeqFactory[CC[X] <: Seq[X] with GenericTraversableTemplate[X, CC]]
- extends GenSeqFactory[CC] with TraversableFactory[CC]
+extends GenSeqFactory[CC] with TraversableFactory[CC] {
+
+ /** This method is called in a pattern match { case Seq(...) => }.
+ *
+ * @param x the selector value
+ * @return sequence wrapped in an option, if this is a Seq, otherwise none
+ */
+ def unapplySeq[A](x: CC[A]): Some[CC[A]] = Some(x)
+
+}
diff --git a/src/library/scala/collection/generic/SeqForwarder.scala b/src/library/scala/collection/generic/SeqForwarder.scala
index 2624e63..e8b15ec 100644
--- a/src/library/scala/collection/generic/SeqForwarder.scala
+++ b/src/library/scala/collection/generic/SeqForwarder.scala
@@ -1,13 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-
-
package scala.collection.generic
import scala.collection._
import scala.collection.immutable.Range
@@ -15,11 +13,11 @@ import scala.collection.immutable.Range
/** This class implements a forwarder for sequences. It forwards
* all calls to a different sequence object except for
*
- * - toString, hashCode, equals, stringPrefix
- * - newBuilder, view, toSeq
+ * - `toString`, `hashCode`, `equals`, `stringPrefix`
+ * - `newBuilder`, `view`, `toSeq`
* - all calls creating a new sequence of the same kind
*
- * The above methods are forwarded by subclass SeqProxy
+ * The above methods are forwarded by subclass `SeqProxy`.
*
* @author Martin Odersky
* @version 2.8
@@ -31,29 +29,28 @@ trait SeqForwarder[+A] extends Seq[A] with IterableForwarder[A] {
override def length: Int = underlying.length
override def apply(idx: Int): A = underlying.apply(idx)
- override def lengthCompare(len: Int): Int = underlying.lengthCompare(len)
- override def isDefinedAt(x: Int): Boolean = underlying.isDefinedAt(x)
+ override def lengthCompare(len: Int): Int = underlying lengthCompare len
+ override def isDefinedAt(x: Int): Boolean = underlying isDefinedAt x
override def segmentLength(p: A => Boolean, from: Int): Int = underlying.segmentLength(p, from)
- override def prefixLength(p: A => Boolean) = underlying.prefixLength(p)
- override def indexWhere(p: A => Boolean): Int = underlying.indexWhere(p)
+ override def prefixLength(p: A => Boolean) = underlying prefixLength p
+ override def indexWhere(p: A => Boolean): Int = underlying indexWhere p
override def indexWhere(p: A => Boolean, from: Int): Int = underlying.indexWhere(p, from)
- override def findIndexOf(p: A => Boolean): Int = underlying.indexWhere(p)
- override def indexOf[B >: A](elem: B): Int = underlying.indexOf(elem)
+ override def indexOf[B >: A](elem: B): Int = underlying indexOf elem
override def indexOf[B >: A](elem: B, from: Int): Int = underlying.indexOf(elem, from)
- override def lastIndexOf[B >: A](elem: B): Int = underlying.lastIndexOf(elem)
+ override def lastIndexOf[B >: A](elem: B): Int = underlying lastIndexOf elem
override def lastIndexOf[B >: A](elem: B, end: Int): Int = underlying.lastIndexOf(elem, end)
- override def lastIndexWhere(p: A => Boolean): Int = underlying.lastIndexWhere(p)
+ override def lastIndexWhere(p: A => Boolean): Int = underlying lastIndexWhere p
override def lastIndexWhere(p: A => Boolean, end: Int): Int = underlying.lastIndexWhere(p, end)
override def reverseIterator: Iterator[A] = underlying.reverseIterator
override def startsWith[B](that: GenSeq[B], offset: Int): Boolean = underlying.startsWith(that, offset)
- override def startsWith[B](that: GenSeq[B]): Boolean = underlying.startsWith(that)
- override def endsWith[B](that: GenSeq[B]): Boolean = underlying.endsWith(that)
- override def indexOfSlice[B >: A](that: GenSeq[B]): Int = underlying.indexOfSlice(that)
+ override def startsWith[B](that: GenSeq[B]): Boolean = underlying startsWith that
+ override def endsWith[B](that: GenSeq[B]): Boolean = underlying endsWith that
+ override def indexOfSlice[B >: A](that: GenSeq[B]): Int = underlying indexOfSlice that
override def indexOfSlice[B >: A](that: GenSeq[B], from: Int): Int = underlying.indexOfSlice(that, from)
- override def lastIndexOfSlice[B >: A](that: GenSeq[B]): Int = underlying.lastIndexOfSlice(that)
+ override def lastIndexOfSlice[B >: A](that: GenSeq[B]): Int = underlying lastIndexOfSlice that
override def lastIndexOfSlice[B >: A](that: GenSeq[B], end: Int): Int = underlying.lastIndexOfSlice(that, end)
- override def containsSlice[B](that: GenSeq[B]): Boolean = underlying.containsSlice(that)
- override def contains(elem: Any): Boolean = underlying.contains(elem)
+ override def containsSlice[B](that: GenSeq[B]): Boolean = underlying containsSlice that
+ override def contains(elem: Any): Boolean = underlying contains elem
override def corresponds[B](that: GenSeq[B])(p: (A,B) => Boolean): Boolean = underlying.corresponds(that)(p)
override def indices: Range = underlying.indices
}
diff --git a/src/library/scala/collection/generic/SetFactory.scala b/src/library/scala/collection/generic/SetFactory.scala
index 348743a..e9bbde9 100644
--- a/src/library/scala/collection/generic/SetFactory.scala
+++ b/src/library/scala/collection/generic/SetFactory.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -12,14 +12,7 @@ package scala.collection
package generic
import mutable.Builder
-import annotation.bridge
+import scala.language.higherKinds
abstract class SetFactory[CC[X] <: Set[X] with SetLike[X, CC[X]]]
- extends GenSetFactory[CC] with GenericSeqCompanion[CC] {
-
- @bridge
- override def empty[A]: CC[A] = super.empty[A]
-
- @bridge
- override def apply[A](elems: A*): CC[A] = super.apply(elems: _*)
-}
+ extends GenSetFactory[CC] with GenericSeqCompanion[CC]
diff --git a/src/library/scala/collection/generic/Shrinkable.scala b/src/library/scala/collection/generic/Shrinkable.scala
index 88c7ce3..b00048f 100644
--- a/src/library/scala/collection/generic/Shrinkable.scala
+++ b/src/library/scala/collection/generic/Shrinkable.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -17,7 +17,7 @@ package generic
* @version 2.8
* @since 2.8
* @define coll shrinkable collection
- * @define Coll Shrinkable
+ * @define Coll `Shrinkable`
*/
trait Shrinkable[-A] {
@@ -43,7 +43,7 @@ trait Shrinkable[-A] {
/** Removes all elements produced by an iterator from this $coll.
*
- * @param iter the iterator producing the elements to remove.
+ * @param xs the iterator producing the elements to remove.
* @return the $coll itself
*/
def --=(xs: TraversableOnce[A]): this.type = { xs.seq foreach -= ; this }
diff --git a/src/library/scala/collection/generic/Signalling.scala b/src/library/scala/collection/generic/Signalling.scala
index cab3802..498db7f 100644
--- a/src/library/scala/collection/generic/Signalling.scala
+++ b/src/library/scala/collection/generic/Signalling.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/library/scala/collection/generic/Sizing.scala b/src/library/scala/collection/generic/Sizing.scala
index 17181dc..1191259 100644
--- a/src/library/scala/collection/generic/Sizing.scala
+++ b/src/library/scala/collection/generic/Sizing.scala
@@ -1,3 +1,11 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2010-2013, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
package scala.collection.generic
/** A trait for objects which have a size.
diff --git a/src/library/scala/collection/generic/SliceInterval.scala b/src/library/scala/collection/generic/SliceInterval.scala
index 56033ca..244e960 100644
--- a/src/library/scala/collection/generic/SliceInterval.scala
+++ b/src/library/scala/collection/generic/SliceInterval.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -32,7 +32,7 @@ private[collection] class SliceInterval private (val from: Int, val until: Int)
*/
def recalculate(_from: Int, _until: Int): SliceInterval = {
val lo = _from max 0
- val elems = math.min(_until - lo, width)
+ val elems = scala.math.min(_until - lo, width)
val start = from + lo
if (elems <= 0) new SliceInterval(from, from)
diff --git a/src/library/scala/collection/generic/Sorted.scala b/src/library/scala/collection/generic/Sorted.scala
index 99eb680..f962b26 100644
--- a/src/library/scala/collection/generic/Sorted.scala
+++ b/src/library/scala/collection/generic/Sorted.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2006-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2006-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-
package scala.collection
package generic
@@ -31,17 +30,19 @@ trait Sorted[K, +This <: Sorted[K, This]] {
def lastKey: K
/** Comparison function that orders keys. */
- def compare(k0: K, k1: K): Int = ordering.compare(k0, k1);
+ def compare(k0: K, k1: K): Int = ordering.compare(k0, k1)
/** Creates a ranged projection of this collection. Any mutations in the
- * ranged projection will update this collection and vice versa. Note: keys
- * are not garuanteed to be consistent between this collection and the projection.
- * This is the case for buffers where indexing is relative to the projection.
+ * ranged projection will update this collection and vice versa.
+ *
+ * Note: keys are not garuanteed to be consistent between this collection
+ * and the projection. This is the case for buffers where indexing is
+ * relative to the projection.
*
* @param from The lower-bound (inclusive) of the ranged projection.
- * <code>None</code> if there is no lower bound.
+ * `None` if there is no lower bound.
* @param until The upper-bound (exclusive) of the ranged projection.
- * <code>None</code> if there is no upper bound.
+ * `None` if there is no upper bound.
*/
def rangeImpl(from: Option[K], until: Option[K]): This
@@ -61,8 +62,6 @@ trait Sorted[K, +This <: Sorted[K, This]] {
* and an upper-bound.
*
* @param from The upper-bound (exclusive) of the ranged projection.
- * @param until ...
- * @return ...
*/
def range(from: K, until: K): This = rangeImpl(Some(from), Some(until))
@@ -70,11 +69,10 @@ trait Sorted[K, +This <: Sorted[K, This]] {
* @param to The upper-bound (inclusive) of the ranged projection.
*/
def to(to: K): This = {
- // tough!
val i = keySet.from(to).iterator
if (i.isEmpty) return repr
val next = i.next
- if (next == to)
+ if (compare(next, to) == 0)
if (i.isEmpty) repr
else until(i.next)
else
diff --git a/src/library/scala/collection/generic/SortedMapFactory.scala b/src/library/scala/collection/generic/SortedMapFactory.scala
index 962a945..17201b0 100644
--- a/src/library/scala/collection/generic/SortedMapFactory.scala
+++ b/src/library/scala/collection/generic/SortedMapFactory.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -12,6 +12,7 @@ package scala.collection
package generic
import mutable.{Builder, MapBuilder}
+import scala.language.higherKinds
/** A template for companion objects of mutable.Map and subclasses thereof.
*
diff --git a/src/library/scala/collection/generic/SortedSetFactory.scala b/src/library/scala/collection/generic/SortedSetFactory.scala
index 45340cf..08bca04 100644
--- a/src/library/scala/collection/generic/SortedSetFactory.scala
+++ b/src/library/scala/collection/generic/SortedSetFactory.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2006-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2006-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -12,6 +12,7 @@ package scala.collection
package generic
import mutable.{Builder, SetBuilder}
+import scala.language.higherKinds
/** A template for companion objects of Set and subclasses thereof.
*
diff --git a/src/library/scala/collection/generic/Subtractable.scala b/src/library/scala/collection/generic/Subtractable.scala
index 1ca9d70..e0fe07a 100644
--- a/src/library/scala/collection/generic/Subtractable.scala
+++ b/src/library/scala/collection/generic/Subtractable.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -10,7 +10,6 @@
package scala.collection
package generic
-import annotation.bridge
/** This trait represents collection-like objects that can be reduced
* using a '+' operator. It defines variants of `-` and `--`
@@ -53,12 +52,9 @@ trait Subtractable[A, +Repr <: Subtractable[A, Repr]] { self =>
/** Creates a new $coll from this $coll by removing all elements of another
* collection.
*
- * @param elems the collection containing the removed elements.
+ * @param xs the collection containing the removed elements.
* @return a new $coll that contains all elements of the current $coll
* except one less occurrence of each of the elements of `elems`.
*/
def --(xs: GenTraversableOnce[A]): Repr = (repr /: xs.seq) (_ - _)
-
- @bridge
- def --(xs: TraversableOnce[A]): Repr = --(xs: GenTraversableOnce[A])
}
diff --git a/src/library/scala/collection/generic/TraversableFactory.scala b/src/library/scala/collection/generic/TraversableFactory.scala
index e71de12..5d1c9d1 100644
--- a/src/library/scala/collection/generic/TraversableFactory.scala
+++ b/src/library/scala/collection/generic/TraversableFactory.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2006-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2006-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -10,7 +10,7 @@
package scala.collection
package generic
-import annotation.bridge
+import scala.language.higherKinds
/** A template for companion objects of `Traversable` and subclasses thereof.
* This class provides a set of operations to create `$Coll` objects.
@@ -36,48 +36,5 @@ import annotation.bridge
* @see GenericCanBuildFrom
*/
trait TraversableFactory[CC[X] <: Traversable[X] with GenericTraversableTemplate[X, CC]]
- extends GenTraversableFactory[CC] with GenericSeqCompanion[CC] {
-
- @bridge
- override def concat[A](xss: Traversable[A]*): CC[A] = super.concat(xss: _*)
-
- @bridge
- override def fill[A](n: Int)(elem: => A): CC[A] = super.fill(n)(elem)
-
- @bridge
- override def fill[A](n1: Int, n2: Int)(elem: => A): CC[CC[A]] = super.fill(n1, n2)(elem)
-
- @bridge
- override def fill[A](n1: Int, n2: Int, n3: Int)(elem: => A): CC[CC[CC[A]]] = super.fill(n1, n2, n3)(elem)
-
- @bridge
- override def fill[A](n1: Int, n2: Int, n3: Int, n4: Int)(elem: => A): CC[CC[CC[CC[A]]]] = super.fill(n1, n2, n3, n4)(elem)
-
- @bridge
- override def fill[A](n1: Int, n2: Int, n3: Int, n4: Int, n5: Int)(elem: => A): CC[CC[CC[CC[CC[A]]]]] = super.fill(n1, n2, n3, n4, n5)(elem)
-
- @bridge
- override def tabulate[A](n: Int)(f: Int => A): CC[A] = super.tabulate(n)(f)
-
- @bridge
- override def tabulate[A](n1: Int, n2: Int)(f: (Int, Int) => A): CC[CC[A]] = super.tabulate(n1, n2)(f)
-
- @bridge
- override def tabulate[A](n1: Int, n2: Int, n3: Int)(f: (Int, Int, Int) => A): CC[CC[CC[A]]] = super.tabulate(n1, n2, n3)(f)
-
- @bridge
- override def tabulate[A](n1: Int, n2: Int, n3: Int, n4: Int)(f: (Int, Int, Int, Int) => A): CC[CC[CC[CC[A]]]] = super.tabulate(n1, n2, n3, n4)(f)
-
- @bridge
- override def tabulate[A](n1: Int, n2: Int, n3: Int, n4: Int, n5: Int)(f: (Int, Int, Int, Int, Int) => A): CC[CC[CC[CC[CC[A]]]]] = super.tabulate(n1, n2, n3, n4, n5)(f)
-
- @bridge
- override def range[T: Integral](start: T, end: T): CC[T] = super.range(start, end)
-
- @bridge
- override def range[T: Integral](start: T, end: T, step: T): CC[T] = super.range(start, end, step)
-
- @bridge
- override def iterate[A](start: A, len: Int)(f: A => A): CC[A] = super.iterate(start, len)(f)
-}
+ extends GenTraversableFactory[CC] with GenericSeqCompanion[CC]
diff --git a/src/library/scala/collection/generic/TraversableForwarder.scala b/src/library/scala/collection/generic/TraversableForwarder.scala
index 0049d23..2662018 100644
--- a/src/library/scala/collection/generic/TraversableForwarder.scala
+++ b/src/library/scala/collection/generic/TraversableForwarder.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -11,12 +11,14 @@ package scala.collection.generic
import scala.collection._
import mutable.{ Buffer, StringBuilder }
import immutable.{ List, Stream }
+import scala.reflect.ClassTag
/** This trait implements a forwarder for traversable objects. It forwards
* all calls to a different traversable, except for:
- {{{
- toString, hashCode, equals, stringPrefix, newBuilder, view
- }}}
+ *
+ * - `toString`, `hashCode`, `equals`, `stringPrefix`
+ * - `newBuilder`, `view`
+ *
* All calls creating a new traversable of the same kind.
*
* @author Martin Odersky
@@ -27,15 +29,15 @@ trait TraversableForwarder[+A] extends Traversable[A] {
/** The traversable object to which calls are forwarded. */
protected def underlying: Traversable[A]
- override def foreach[B](f: A => B): Unit = underlying.foreach(f)
+ override def foreach[B](f: A => B): Unit = underlying foreach f
override def isEmpty: Boolean = underlying.isEmpty
override def nonEmpty: Boolean = underlying.nonEmpty
override def size: Int = underlying.size
override def hasDefiniteSize = underlying.hasDefiniteSize
- override def forall(p: A => Boolean): Boolean = underlying.forall(p)
- override def exists(p: A => Boolean): Boolean = underlying.exists(p)
- override def count(p: A => Boolean): Int = underlying.count(p)
- override def find(p: A => Boolean): Option[A] = underlying.find(p)
+ override def forall(p: A => Boolean): Boolean = underlying forall p
+ override def exists(p: A => Boolean): Boolean = underlying exists p
+ override def count(p: A => Boolean): Int = underlying count p
+ override def find(p: A => Boolean): Option[A] = underlying find p
override def foldLeft[B](z: B)(op: (B, A) => B): B = underlying.foldLeft(z)(op)
override def /: [B](z: B)(op: (B, A) => B): B = underlying./:(z)(op)
override def foldRight[B](z: B)(op: (A, B) => B): B = underlying.foldRight(z)(op)
@@ -44,10 +46,10 @@ trait TraversableForwarder[+A] extends Traversable[A] {
override def reduceLeftOption[B >: A](op: (B, A) => B): Option[B] = underlying.reduceLeftOption(op)
override def reduceRight[B >: A](op: (A, B) => B): B = underlying.reduceRight(op)
override def reduceRightOption[B >: A](op: (A, B) => B): Option[B] = underlying.reduceRightOption(op)
- override def sum[B >: A](implicit num: Numeric[B]): B = underlying.sum(num)
- override def product[B >: A](implicit num: Numeric[B]): B = underlying.product(num)
- override def min[B >: A](implicit cmp: Ordering[B]): A = underlying.min(cmp)
- override def max[B >: A](implicit cmp: Ordering[B]): A = underlying.max(cmp)
+ override def sum[B >: A](implicit num: Numeric[B]): B = underlying sum num
+ override def product[B >: A](implicit num: Numeric[B]): B = underlying product num
+ override def min[B >: A](implicit cmp: Ordering[B]): A = underlying min cmp
+ override def max[B >: A](implicit cmp: Ordering[B]): A = underlying max cmp
override def head: A = underlying.head
override def headOption: Option[A] = underlying.headOption
override def last: A = underlying.last
@@ -56,11 +58,11 @@ trait TraversableForwarder[+A] extends Traversable[A] {
override def copyToArray[B >: A](xs: Array[B], start: Int, len: Int) = underlying.copyToArray(xs, start, len)
override def copyToArray[B >: A](xs: Array[B], start: Int) = underlying.copyToArray(xs, start)
override def copyToArray[B >: A](xs: Array[B]) = underlying.copyToArray(xs)
- override def toArray[B >: A: ClassManifest]: Array[B] = underlying.toArray
+ override def toArray[B >: A: ClassTag]: Array[B] = underlying.toArray
override def toList: List[A] = underlying.toList
override def toIterable: Iterable[A] = underlying.toIterable
override def toSeq: Seq[A] = underlying.toSeq
- override def toIndexedSeq[B >: A] = underlying.toIndexedSeq
+ override def toIndexedSeq = underlying.toIndexedSeq
override def toBuffer[B >: A] = underlying.toBuffer
override def toStream: Stream[A] = underlying.toStream
override def toSet[B >: A]: immutable.Set[B] = underlying.toSet
diff --git a/src/library/scala/collection/generic/package.scala b/src/library/scala/collection/generic/package.scala
index 0457fef..dd47b7a 100644
--- a/src/library/scala/collection/generic/package.scala
+++ b/src/library/scala/collection/generic/package.scala
@@ -1,6 +1,17 @@
package scala.collection
import generic.CanBuildFrom
+import scala.language.higherKinds
+
package object generic {
type CanBuild[-Elem, +To] = CanBuildFrom[Nothing, Elem, To]
-}
\ No newline at end of file
+
+ @deprecated("use ClassTagTraversableFactory instead", "2.10.0")
+ type ClassManifestTraversableFactory[CC[X] <: Traversable[X] with GenericClassManifestTraversableTemplate[X, CC]] = ClassTagTraversableFactory[CC]
+
+ @deprecated("use GenericClassTagCompanion instead", "2.10.0")
+ type GenericClassManifestCompanion[+CC[X] <: Traversable[X]] = GenericClassTagCompanion[CC]
+
+ @deprecated("use GenericClassTagTraversableTemplate instead", "2.10.0")
+ type GenericClassManifestTraversableTemplate[+A, +CC[X] <: Traversable[X]] = GenericClassTagTraversableTemplate[A, CC]
+}
diff --git a/src/library/scala/collection/immutable/BitSet.scala b/src/library/scala/collection/immutable/BitSet.scala
index 301e744..ed3630e 100644
--- a/src/library/scala/collection/immutable/BitSet.scala
+++ b/src/library/scala/collection/immutable/BitSet.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -20,20 +20,23 @@ import mutable.{ Builder, SetBuilder }
* @see [[http://docs.scala-lang.org/overviews/collections/concrete-immutable-collection-classes.html#immutable_bitsets "Scala's Collection Library overview"]]
* section on `Immutable BitSets` for more information.
*
- * @define Coll immutable.BitSet
+ * @define Coll `immutable.BitSet`
* @define coll immutable bitset
*/
@SerialVersionUID(1611436763290191562L)
-abstract class BitSet extends Set[Int]
+abstract class BitSet extends scala.collection.AbstractSet[Int]
+ with SortedSet[Int]
with scala.collection.BitSet
with BitSetLike[BitSet]
with Serializable {
override def empty = BitSet.empty
- def fromArray(elems: Array[Long]): BitSet = BitSet.fromArray(elems)
+ @deprecated("Use BitSet.fromBitMask[NoCopy] instead of fromArray", "2.10.0")
+ def fromArray(elems: Array[Long]): BitSet = fromBitMaskNoCopy(elems)
- /** Update word at index <code>idx</code>; enlarge set if <code>idx</code>
- * outside range of set.
+ protected def fromBitMaskNoCopy(elems: Array[Long]): BitSet = BitSet.fromBitMaskNoCopy(elems)
+
+ /** Update word at index `idx`; enlarge set if `idx` outside range of set.
*/
protected def updateWord(idx: Int, w: Long): BitSet
@@ -60,21 +63,45 @@ abstract class BitSet extends Set[Int]
}
/** $factoryInfo
- * @define Coll immutable.BitSet
+ * @define Coll `immutable.BitSet`
* @define coll immutable bitset
*/
object BitSet extends BitSetFactory[BitSet] {
/** The empty bitset */
val empty: BitSet = new BitSet1(0L)
- /** An adding builder for immutable Sets. */
- def newBuilder: Builder[Int, BitSet] = new SetBuilder[Int, BitSet](empty)
+ /** A builder that takes advantage of mutable BitSets. */
+ def newBuilder: Builder[Int, BitSet] = new Builder[Int, BitSet] {
+ private[this] val b = new mutable.BitSet
+ def += (x: Int) = { b += x; this }
+ def clear() = b.clear
+ def result() = b.toImmutable
+ }
/** $bitsetCanBuildFrom */
implicit def canBuildFrom: CanBuildFrom[BitSet, Int, BitSet] = bitsetCanBuildFrom
/** A bitset containing all the bits in an array */
- def fromArray(elems: Array[Long]): BitSet = {
+ @deprecated("Use fromBitMask[NoCopy] instead of fromArray", "2.10.0")
+ def fromArray(elems: Array[Long]): BitSet = fromBitMaskNoCopy(elems)
+
+ /** A bitset containing all the bits in an array */
+ def fromBitMask(elems: Array[Long]): BitSet = {
+ val len = elems.length
+ if (len == 0) empty
+ else if (len == 1) new BitSet1(elems(0))
+ else if (len == 2) new BitSet2(elems(0), elems(1))
+ else {
+ val a = new Array[Long](len)
+ Array.copy(elems, 0, a, 0, len)
+ new BitSetN(a)
+ }
+ }
+
+ /** A bitset containing all the bits in an array, wrapping the existing
+ * array without copying.
+ */
+ def fromBitMaskNoCopy(elems: Array[Long]): BitSet = {
val len = elems.length
if (len == 0) empty
else if (len == 1) new BitSet1(elems(0))
@@ -88,7 +115,7 @@ object BitSet extends BitSetFactory[BitSet] {
protected def updateWord(idx: Int, w: Long): BitSet =
if (idx == 0) new BitSet1(w)
else if (idx == 1) new BitSet2(elems, w)
- else fromArray(updateArray(Array(elems), idx, w))
+ else fromBitMaskNoCopy(updateArray(Array(elems), idx, w))
}
class BitSet2(val elems0: Long, elems1: Long) extends BitSet {
@@ -97,13 +124,18 @@ object BitSet extends BitSetFactory[BitSet] {
protected def updateWord(idx: Int, w: Long): BitSet =
if (idx == 0) new BitSet2(w, elems1)
else if (idx == 1) new BitSet2(elems0, w)
- else fromArray(updateArray(Array(elems0, elems1), idx, w))
+ else fromBitMaskNoCopy(updateArray(Array(elems0, elems1), idx, w))
}
+ /** The implementing class for bit sets with elements >= 128 (exceeding
+ * the capacity of two long values). The constructor wraps an existing
+ * bit mask without copying, thus exposing a mutable part of the internal
+ * implementation. Care needs to be taken not to modify the exposed
+ * array.
+ */
class BitSetN(val elems: Array[Long]) extends BitSet {
protected def nwords = elems.length
protected def word(idx: Int) = if (idx < nwords) elems(idx) else 0L
- protected def updateWord(idx: Int, w: Long): BitSet = fromArray(updateArray(elems, idx, w))
+ protected def updateWord(idx: Int, w: Long): BitSet = fromBitMaskNoCopy(updateArray(elems, idx, w))
}
}
-
diff --git a/src/library/scala/collection/immutable/DefaultMap.scala b/src/library/scala/collection/immutable/DefaultMap.scala
old mode 100644
new mode 100755
index 155da0f..4a0503a
--- a/src/library/scala/collection/immutable/DefaultMap.scala
+++ b/src/library/scala/collection/immutable/DefaultMap.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/library/scala/collection/immutable/GenIterable.scala.disabled b/src/library/scala/collection/immutable/GenIterable.scala.disabled
index 252c721..d34f7fd 100644
--- a/src/library/scala/collection/immutable/GenIterable.scala.disabled
+++ b/src/library/scala/collection/immutable/GenIterable.scala.disabled
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -31,7 +31,7 @@ trait GenIterable[+A] extends GenTraversable[A]
// object GenIterable extends TraversableFactory[GenIterable] {
-// implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, GenIterable[A]] = new GenericCanBuildFrom[A]
+// implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, GenIterable[A]] = ReusableCBF.asInstanceOf[GenericCanBuildFrom[A]]
// def newBuilder[A]: Builder[A, GenIterable[A]] = Iterable.newBuilder
// }
diff --git a/src/library/scala/collection/immutable/GenMap.scala.disabled b/src/library/scala/collection/immutable/GenMap.scala.disabled
index eb7ef29..73557a4 100644
--- a/src/library/scala/collection/immutable/GenMap.scala.disabled
+++ b/src/library/scala/collection/immutable/GenMap.scala.disabled
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/library/scala/collection/immutable/GenSeq.scala.disabled b/src/library/scala/collection/immutable/GenSeq.scala.disabled
index 36aff8f..713529f 100644
--- a/src/library/scala/collection/immutable/GenSeq.scala.disabled
+++ b/src/library/scala/collection/immutable/GenSeq.scala.disabled
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -25,7 +25,7 @@ import mutable.Builder
*
* The class adds an `update` method to `collection.Seq`.
*
- * @define Coll mutable.Seq
+ * @define Coll `mutable.Seq`
* @define coll mutable sequence
*/
trait GenSeq[+A] extends GenIterable[A]
@@ -39,7 +39,7 @@ trait GenSeq[+A] extends GenIterable[A]
// object GenSeq extends SeqFactory[GenSeq] {
-// implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, GenSeq[A]] = new GenericCanBuildFrom[A]
+// implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, GenSeq[A]] = ReusableCBF.asInstanceOf[GenericCanBuildFrom[A]]
// def newBuilder[A]: Builder[A, GenSeq[A]] = Seq.newBuilder
// }
diff --git a/src/library/scala/collection/immutable/GenSet.scala.disabled b/src/library/scala/collection/immutable/GenSet.scala.disabled
index 3cca6ba..56bd273 100644
--- a/src/library/scala/collection/immutable/GenSet.scala.disabled
+++ b/src/library/scala/collection/immutable/GenSet.scala.disabled
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -24,7 +24,7 @@ import mutable.Builder
*
* @since 1.0
* @author Matthias Zenger
- * @define Coll mutable.Set
+ * @define Coll `mutable.Set`
* @define coll mutable set
*/
trait GenSet[A] extends GenIterable[A]
@@ -38,6 +38,6 @@ trait GenSet[A] extends GenIterable[A]
// object GenSet extends TraversableFactory[GenSet] {
-// implicit def canBuildFrom[A] = new GenericCanBuildFrom[A]
+// implicit def canBuildFrom[A] = ReusableCBF.asInstanceOf[GenericCanBuildFrom[A]]
// def newBuilder[A] = Set.newBuilder
// }
diff --git a/src/library/scala/collection/immutable/GenTraversable.scala.disabled b/src/library/scala/collection/immutable/GenTraversable.scala.disabled
index 2ee9bd9..e5b609f 100644
--- a/src/library/scala/collection/immutable/GenTraversable.scala.disabled
+++ b/src/library/scala/collection/immutable/GenTraversable.scala.disabled
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -34,7 +34,7 @@ trait GenTraversable[+A] extends scala.collection.GenTraversable[A]
// object GenTraversable extends TraversableFactory[GenTraversable] {
-// implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, GenTraversable[A]] = new GenericCanBuildFrom[A]
+// implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, GenTraversable[A]] = ReusableCBF.asInstanceOf[GenericCanBuildFrom[A]]
// def newBuilder[A]: Builder[A, GenTraversable[A]] = Traversable.newBuilder
// }
diff --git a/src/library/scala/collection/immutable/HashMap.scala b/src/library/scala/collection/immutable/HashMap.scala
index 82be6ca..84416a6 100644
--- a/src/library/scala/collection/immutable/HashMap.scala
+++ b/src/library/scala/collection/immutable/HashMap.scala
@@ -1,24 +1,22 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-
-
-package scala.collection
+package scala
+package collection
package immutable
import generic._
-import annotation.unchecked.{ uncheckedVariance=> uV }
+import scala.annotation.unchecked.{ uncheckedVariance=> uV }
import parallel.immutable.ParHashMap
/** This class implements immutable maps using a hash trie.
*
- * '''Note:''' the builder of a hash map returns specialized representations EmptyMap,Map1,..., Map4
- * for maps of size <= 4.
+ * '''Note:''' The builder of this hash map may return specialized representations for small maps.
*
* @tparam A the type of the keys contained in this hash map.
* @tparam B the type of the values associated with the keys.
@@ -29,14 +27,18 @@ import parallel.immutable.ParHashMap
* @since 2.3
* @see [[http://docs.scala-lang.org/overviews/collections/concrete-immutable-collection-classes.html#hash_tries "Scala's Collection Library overview"]]
* section on `Hash Tries` for more information.
- * @define Coll immutable.HashMap
+ * @define Coll `immutable.HashMap`
* @define coll immutable hash map
* @define mayNotTerminateInf
* @define willNotTerminateInf
*/
@SerialVersionUID(2L)
-class HashMap[A, +B] extends Map[A,B] with MapLike[A, B, HashMap[A, B]] with Serializable with CustomParallelizable[(A, B), ParHashMap[A, B]] {
-
+class HashMap[A, +B] extends AbstractMap[A, B]
+ with Map[A, B]
+ with MapLike[A, B, HashMap[A, B]]
+ with Serializable
+ with CustomParallelizable[(A, B), ParHashMap[A, B]]
+{
override def size: Int = 0
override def empty = HashMap.empty[A, B]
@@ -72,11 +74,11 @@ class HashMap[A, +B] extends Map[A,B] with MapLike[A, B, HashMap[A, B]] with Ser
private[collection] def computeHash(key: A) = improve(elemHashCode(key))
- protected type Merger[B1] = ((A, B1), (A, B1)) => (A, B1)
+ import HashMap.{Merger, MergeFunction, liftMerger}
private[collection] def get0(key: A, hash: Int, level: Int): Option[B] = None
- private[collection] def updated0[B1 >: B](key: A, hash: Int, level: Int, value: B1, kv: (A, B1), merger: Merger[B1]): HashMap[A, B1] =
+ private[collection] def updated0[B1 >: B](key: A, hash: Int, level: Int, value: B1, kv: (A, B1), merger: Merger[A, B1]): HashMap[A, B1] =
new HashMap.HashMap1(key, hash, value, kv)
protected def removed0(key: A, hash: Int, level: Int): HashMap[A, B] = this
@@ -85,33 +87,92 @@ class HashMap[A, +B] extends Map[A,B] with MapLike[A, B, HashMap[A, B]] with Ser
def split: Seq[HashMap[A, B]] = Seq(this)
- def merge[B1 >: B](that: HashMap[A, B1], merger: Merger[B1] = null): HashMap[A, B1] = merge0(that, 0, merger)
-
- protected def merge0[B1 >: B](that: HashMap[A, B1], level: Int, merger: Merger[B1]): HashMap[A, B1] = that
+ @deprecated("Use the `merged` method instead.", "2.10.0")
+ def merge[B1 >: B](that: HashMap[A, B1], mergef: MergeFunction[A, B1] = null): HashMap[A, B1] = merge0(that, 0, liftMerger(mergef))
+
+ /** Creates a new map which is the merge of this and the argument hash map.
+ *
+ * Uses the specified collision resolution function if two keys are the same.
+ * The collision resolution function will always take the first argument from
+ * `this` hash map and the second from `that`.
+ *
+ * The `merged` method is on average more performant than doing a traversal and reconstructing a
+ * new immutable hash map from scratch, or `++`.
+ *
+ * @tparam B1 the value type of the other hash map
+ * @param that the other hash map
+ * @param mergef the merge function or null if the first key-value pair is to be picked
+ */
+ def merged[B1 >: B](that: HashMap[A, B1])(mergef: MergeFunction[A, B1]): HashMap[A, B1] = merge0(that, 0, liftMerger(mergef))
+
+ protected def merge0[B1 >: B](that: HashMap[A, B1], level: Int, merger: Merger[A, B1]): HashMap[A, B1] = that
override def par = ParHashMap.fromTrie(this)
}
/** $factoryInfo
- * @define Coll immutable.HashMap
+ * @define Coll `immutable.HashMap`
* @define coll immutable hash map
*
* @author Tiark Rompf
* @since 2.3
*/
object HashMap extends ImmutableMapFactory[HashMap] with BitOperations.Int {
+
+ private[collection] abstract class Merger[A, B] {
+ def apply(kv1: (A, B), kv2: (A, B)): (A, B)
+ def invert: Merger[A, B]
+ }
+
+ private type MergeFunction[A1, B1] = ((A1, B1), (A1, B1)) => (A1, B1)
+
+ private def liftMerger[A1, B1](mergef: MergeFunction[A1, B1]): Merger[A1, B1] =
+ if (mergef == null) defaultMerger.asInstanceOf[Merger[A1, B1]] else liftMerger0(mergef)
+
+ private[this] val defaultMerger : Merger[Any, Any] = liftMerger0((a,b) => a)
+
+ private[this] def liftMerger0[A1, B1](mergef: MergeFunction[A1, B1]): Merger[A1, B1] = new Merger[A1, B1] {
+ self =>
+ def apply(kv1: (A1, B1), kv2: (A1, B1)): (A1, B1) = mergef(kv1, kv2)
+ val invert: Merger[A1, B1] = new Merger[A1, B1] {
+ def apply(kv1: (A1, B1), kv2: (A1, B1)): (A1, B1) = mergef(kv2, kv1)
+ def invert: Merger[A1, B1] = self
+ }
+ }
+
/** $mapCanBuildFromInfo */
implicit def canBuildFrom[A, B]: CanBuildFrom[Coll, (A, B), HashMap[A, B]] = new MapCanBuildFrom[A, B]
def empty[A, B]: HashMap[A, B] = EmptyHashMap.asInstanceOf[HashMap[A, B]]
- private object EmptyHashMap extends HashMap[Any,Nothing] {
-
+ private object EmptyHashMap extends HashMap[Any, Nothing] { }
+
+ // utility method to create a HashTrieMap from two leaf HashMaps (HashMap1 or HashMapCollision1) with non-colliding hash code)
+ private def makeHashTrieMap[A, B](hash0:Int, elem0:HashMap[A, B], hash1:Int, elem1:HashMap[A, B], level:Int, size:Int) : HashTrieMap[A, B] = {
+ val index0 = (hash0 >>> level) & 0x1f
+ val index1 = (hash1 >>> level) & 0x1f
+ if(index0 != index1) {
+ val bitmap = (1 << index0) | (1 << index1)
+ val elems = new Array[HashMap[A,B]](2)
+ if(index0 < index1) {
+ elems(0) = elem0
+ elems(1) = elem1
+ } else {
+ elems(0) = elem1
+ elems(1) = elem0
+ }
+ new HashTrieMap[A, B](bitmap, elems, size)
+ } else {
+ val elems = new Array[HashMap[A,B]](1)
+ val bitmap = (1 << index0)
+ elems(0) = makeHashTrieMap(hash0, elem0, hash1, elem1, level + 5, size)
+ new HashTrieMap[A, B](bitmap, elems, size)
+ }
}
// TODO: add HashMap2, HashMap3, ...
- class HashMap1[A,+B](private[HashMap] var key: A, private[HashMap] var hash: Int, private[collection] var value: (B @uV), private[collection] var kv: (A,B @uV)) extends HashMap[A,B] {
+ class HashMap1[A,+B](private[collection] val key: A, private[collection] val hash: Int, private[collection] val value: (B @uV), private[collection] var kv: (A,B @uV)) extends HashMap[A,B] {
override def size = 1
private[collection] def getKey = key
@@ -136,35 +197,20 @@ object HashMap extends ImmutableMapFactory[HashMap] with BitOperations.Int {
// }
// }
- override def updated0[B1 >: B](key: A, hash: Int, level: Int, value: B1, kv: (A, B1), merger: Merger[B1]): HashMap[A, B1] =
+ private[collection] override def updated0[B1 >: B](key: A, hash: Int, level: Int, value: B1, kv: (A, B1), merger: Merger[A, B1]): HashMap[A, B1] =
if (hash == this.hash && key == this.key ) {
- if (merger eq null) new HashMap1(key, hash, value, kv)
- else new HashMap1(key, hash, value, merger(this.kv, kv))
+ if (merger eq null) {
+ if (this.value.asInstanceOf[AnyRef] eq value.asInstanceOf[AnyRef]) this
+ else new HashMap1(key, hash, value, kv)
+ } else {
+ val nkv = merger(this.kv, kv)
+ new HashMap1(nkv._1, hash, nkv._2, nkv)
+ }
} else {
- var thatindex = (hash >>> level) & 0x1f
- var thisindex = (this.hash >>> level) & 0x1f
if (hash != this.hash) {
// they have different hashes, but may collide at this level - find a level at which they don't
- var lvl = level
- var top: HashTrieMap[A, B1] = null
- var prev: HashTrieMap[A, B1] = null
- while (thisindex == thatindex) {
- val newlevel = new HashTrieMap[A, B1](1 << thisindex, new Array[HashMap[A, B1]](1), 2)
- if (prev ne null) prev.elems(0) = newlevel else top = newlevel
- prev = newlevel
- lvl += 5
- thatindex = (hash >>> lvl) & 0x1f
- thisindex = (this.hash >>> lvl) & 0x1f
- }
- val bottelems = new Array[HashMap[A,B1]](2)
- val ind = if (thisindex < thatindex) 1 else 0
- bottelems(1 - ind) = this
- bottelems(ind) = new HashMap1[A, B1](key, hash, value, kv)
- val bottom = new HashTrieMap[A,B1]((1 << thisindex) | (1 << thatindex), bottelems, 2)
- if (prev ne null) {
- prev.elems(0) = bottom
- top
- } else bottom
+ val that = new HashMap1[A, B1](key, hash, value, kv)
+ makeHashTrieMap[A,B1](this.hash, this, hash, that, level, 2)
} else {
// 32-bit hash collision (rare, but not impossible)
new HashMapCollision1(hash, ListMap.empty.updated(this.key,this.value).updated(key,value))
@@ -176,39 +222,41 @@ object HashMap extends ImmutableMapFactory[HashMap] with BitOperations.Int {
override def iterator: Iterator[(A,B)] = Iterator(ensurePair)
override def foreach[U](f: ((A, B)) => U): Unit = f(ensurePair)
+ // this method may be called multiple times in a multithreaded environment, but that's ok
private[HashMap] def ensurePair: (A,B) = if (kv ne null) kv else { kv = (key, value); kv }
- protected override def merge0[B1 >: B](that: HashMap[A, B1], level: Int, merger: Merger[B1]): HashMap[A, B1] = {
- that.updated0(key, hash, level, value, kv, merger)
+ protected override def merge0[B1 >: B](that: HashMap[A, B1], level: Int, merger: Merger[A, B1]): HashMap[A, B1] = {
+ that.updated0(key, hash, level, value, kv, merger.invert)
}
}
- private[collection] class HashMapCollision1[A, +B](private[HashMap] var hash: Int, var kvs: ListMap[A, B @uV])
+ private[collection] class HashMapCollision1[A, +B](private[collection] val hash: Int, val kvs: ListMap[A, B @uV])
extends HashMap[A, B @uV] {
+ // assert(kvs.size > 1)
override def size = kvs.size
override def get0(key: A, hash: Int, level: Int): Option[B] =
if (hash == this.hash) kvs.get(key) else None
- override def updated0[B1 >: B](key: A, hash: Int, level: Int, value: B1, kv: (A, B1), merger: Merger[B1]): HashMap[A, B1] =
+ private[collection] override def updated0[B1 >: B](key: A, hash: Int, level: Int, value: B1, kv: (A, B1), merger: Merger[A, B1]): HashMap[A, B1] =
if (hash == this.hash) {
if ((merger eq null) || !kvs.contains(key)) new HashMapCollision1(hash, kvs.updated(key, value))
else new HashMapCollision1(hash, kvs + merger((key, kvs(key)), kv))
} else {
- var m: HashMap[A,B1] = new HashTrieMap[A,B1](0,new Array[HashMap[A,B1]](0),0)
- // might be able to save some ops here, but it doesn't seem to be worth it
- for ((k,v) <- kvs)
- m = m.updated0(k, this.hash, level, v, null, merger)
- m.updated0(key, hash, level, value, kv, merger)
+ val that = new HashMap1(key, hash, value, kv)
+ makeHashTrieMap(this.hash, this, hash, that, level, size + 1)
}
override def removed0(key: A, hash: Int, level: Int): HashMap[A, B] =
if (hash == this.hash) {
val kvs1 = kvs - key
- if (!kvs1.isEmpty)
- new HashMapCollision1(hash, kvs1)
- else
+ if (kvs1.isEmpty)
HashMap.empty[A,B]
+ else if(kvs1.tail.isEmpty) {
+ val kv = kvs1.head
+ new HashMap1[A,B](kv._1,hash,kv._2,kv)
+ } else
+ new HashMapCollision1(hash, kvs1)
} else this
override def iterator: Iterator[(A,B)] = kvs.iterator
@@ -218,7 +266,7 @@ object HashMap extends ImmutableMapFactory[HashMap] with BitOperations.Int {
def newhm(lm: ListMap[A, B @uV]) = new HashMapCollision1(hash, lm)
List(newhm(x), newhm(y))
}
- protected override def merge0[B1 >: B](that: HashMap[A, B1], level: Int, merger: Merger[B1]): HashMap[A, B1] = {
+ protected override def merge0[B1 >: B](that: HashMap[A, B1], level: Int, merger: Merger[A, B1]): HashMap[A, B1] = {
// this can be made more efficient by passing the entire ListMap at once
var m = that
for (p <- kvs) m = m.updated0(p._1, this.hash, level, p._2, p, merger)
@@ -227,11 +275,14 @@ object HashMap extends ImmutableMapFactory[HashMap] with BitOperations.Int {
}
class HashTrieMap[A, +B](
- private[HashMap] var bitmap: Int,
- private[collection] var elems: Array[HashMap[A, B @uV]],
- private[HashMap] var size0: Int
+ private[collection] val bitmap: Int,
+ private[collection] val elems: Array[HashMap[A, B @uV]],
+ private[collection] val size0: Int
) extends HashMap[A, B @uV] {
+ // assert(Integer.bitCount(bitmap) == elems.length)
+ // assert(elems.length > 1 || (elems.length == 1 && elems(0).isInstanceOf[HashTrieMap[_,_]]))
+
/*
def this (level: Int, m1: HashMap1[A,B], m2: HashMap1[A,B]) = {
this(((m1.hash >>> level) & 0x1f) | ((m2.hash >>> level) & 0x1f), {
@@ -265,18 +316,20 @@ object HashMap extends ImmutableMapFactory[HashMap] with BitOperations.Int {
None
}
- override def updated0[B1 >: B](key: A, hash: Int, level: Int, value: B1, kv: (A, B1), merger: Merger[B1]): HashMap[A, B1] = {
+ private[collection] override def updated0[B1 >: B](key: A, hash: Int, level: Int, value: B1, kv: (A, B1), merger: Merger[A, B1]): HashMap[A, B1] = {
val index = (hash >>> level) & 0x1f
val mask = (1 << index)
val offset = Integer.bitCount(bitmap & (mask-1))
if ((bitmap & mask) != 0) {
- val elemsNew = new Array[HashMap[A,B1]](elems.length)
- Array.copy(elems, 0, elemsNew, 0, elems.length)
val sub = elems(offset)
// TODO: might be worth checking if sub is HashTrieMap (-> monomorphic call site)
val subNew = sub.updated0(key, hash, level + 5, value, kv, merger)
- elemsNew(offset) = subNew
- new HashTrieMap(bitmap, elemsNew, size + (subNew.size - sub.size))
+ if(subNew eq sub) this else {
+ val elemsNew = new Array[HashMap[A,B1]](elems.length)
+ Array.copy(elems, 0, elemsNew, 0, elems.length)
+ elemsNew(offset) = subNew
+ new HashTrieMap(bitmap, elemsNew, size + (subNew.size - sub.size))
+ }
} else {
val elemsNew = new Array[HashMap[A,B1]](elems.length + 1)
Array.copy(elems, 0, elemsNew, 0, offset)
@@ -294,16 +347,22 @@ object HashMap extends ImmutableMapFactory[HashMap] with BitOperations.Int {
val sub = elems(offset)
// TODO: might be worth checking if sub is HashTrieMap (-> monomorphic call site)
val subNew = sub.removed0(key, hash, level + 5)
- if (subNew.isEmpty) {
+ if (subNew eq sub) this
+ else if (subNew.isEmpty) {
val bitmapNew = bitmap ^ mask
if (bitmapNew != 0) {
val elemsNew = new Array[HashMap[A,B]](elems.length - 1)
Array.copy(elems, 0, elemsNew, 0, offset)
Array.copy(elems, offset + 1, elemsNew, offset, elems.length - offset - 1)
val sizeNew = size - sub.size
- new HashTrieMap(bitmapNew, elemsNew, sizeNew)
+ if (elemsNew.length == 1 && !elemsNew(0).isInstanceOf[HashTrieMap[_,_]])
+ elemsNew(0)
+ else
+ new HashTrieMap(bitmapNew, elemsNew, sizeNew)
} else
HashMap.empty[A,B]
+ } else if(elems.length == 1 && !subNew.isInstanceOf[HashTrieMap[_,_]]) {
+ subNew
} else {
val elemsNew = new Array[HashMap[A,B]](elems.length)
Array.copy(elems, 0, elemsNew, 0, elems.length)
@@ -321,7 +380,6 @@ object HashMap extends ImmutableMapFactory[HashMap] with BitOperations.Int {
}
/*
-
def time(block: =>Unit) = { val t0 = System.nanoTime; block; println("elapsed: " + (System.nanoTime - t0)/1000000.0) }
var mOld = OldHashMap.empty[Int,Int]
var mNew = HashMap.empty[Int,Int]
@@ -337,10 +395,8 @@ time { mOld.iterator.foreach( p => ()) }
time { mNew.iterator.foreach( p => ()) }
time { mNew.iterator.foreach( p => ()) }
time { mNew.iterator.foreach( p => ()) }
-
*/
-
override def foreach[U](f: ((A, B)) => U): Unit = {
var i = 0;
while (i < elems.length) {
@@ -349,18 +405,14 @@ time { mNew.iterator.foreach( p => ()) }
}
}
- private def printBitmap(bm: Int) {
- println(bitString(bm, " "))
- }
-
private def posOf(n: Int, bm: Int) = {
var left = n
var i = -1
var b = bm
while (left >= 0) {
- i += 1
- if ((b & 1) != 0) left -= 1
- b = b >>> 1
+ i += 1
+ if ((b & 1) != 0) left -= 1
+ b = b >>> 1
}
i
}
@@ -368,20 +420,12 @@ time { mNew.iterator.foreach( p => ()) }
override def split: Seq[HashMap[A, B]] = if (size == 1) Seq(this) else {
val nodesize = Integer.bitCount(bitmap)
if (nodesize > 1) {
- // printBitmap(bitmap)
- // println(elems.toList)
-
- // println("subtrees: " + nodesize)
- // println("will split at: " + (nodesize / 2))
val splitpoint = nodesize / 2
val bitsplitpoint = posOf(nodesize / 2, bitmap)
val bm1 = bitmap & (-1 << bitsplitpoint)
val bm2 = bitmap & (-1 >>> (32 - bitsplitpoint))
- // printBitmap(bm1)
- // printBitmap(bm2)
+
val (e1, e2) = elems.splitAt(splitpoint)
- // println(e1.toList)
- // println(e2.toList)
val hm1 = new HashTrieMap(bm1, e1, e1.foldLeft(0)(_ + _.size))
val hm2 = new HashTrieMap(bm2, e2, e2.foldLeft(0)(_ + _.size))
@@ -389,12 +433,10 @@ time { mNew.iterator.foreach( p => ()) }
} else elems(0).split
}
- protected override def merge0[B1 >: B](that: HashMap[A, B1], level: Int, merger: Merger[B1]): HashMap[A, B1] = that match {
+ protected override def merge0[B1 >: B](that: HashMap[A, B1], level: Int, merger: Merger[A, B1]): HashMap[A, B1] = that match {
case hm: HashMap1[_, _] =>
- // onetrie += 1
this.updated0(hm.key, hm.hash, level, hm.value.asInstanceOf[B1], hm.kv, merger)
case hm: HashTrieMap[_, _] =>
- // bothtries += 1
val that = hm.asInstanceOf[HashTrieMap[A, B1]]
val thiselems = this.elems
val thatelems = that.elems
@@ -402,12 +444,12 @@ time { mNew.iterator.foreach( p => ()) }
var thatbm = that.bitmap
// determine the necessary size for the array
- val subcount = Integer.bitCount(thisbm | thatbm)
+ val subcount = Integer.bitCount(thisbm | thatbm)
// construct a new array of appropriate size
val merged = new Array[HashMap[A, B1]](subcount)
- // run through both bitmaps and add elements to it
+ // run through both bitmaps and add elements to it
var i = 0
var thisi = 0
var thati = 0
@@ -415,13 +457,9 @@ time { mNew.iterator.foreach( p => ()) }
while (i < subcount) {
val thislsb = thisbm ^ (thisbm & (thisbm - 1))
val thatlsb = thatbm ^ (thatbm & (thatbm - 1))
- // if (this.bitmap == -1660585213) { TODO remove
- // printBitmap(thislsb)
- // printBitmap(thatlsb)
- // println("------------------")
- // }
+
+ // collision
if (thislsb == thatlsb) {
- // println("a collision")
val m = thiselems(thisi).merge0(thatelems(thati), level + 5, merger)
totalelems += m.size
merged(i) = m
@@ -437,14 +475,13 @@ time { mNew.iterator.foreach( p => ()) }
val b = thatlsb - 1
if (unsignedCompare(thislsb - 1, thatlsb - 1)) {
- // println("an element from this trie")
val m = thiselems(thisi)
totalelems += m.size
merged(i) = m
thisbm = thisbm & ~thislsb
thisi += 1
- } else {
- // println("an element from that trie")
+ }
+ else {
val m = thatelems(thati)
totalelems += m.size
merged(i) = m
@@ -456,31 +493,12 @@ time { mNew.iterator.foreach( p => ()) }
}
new HashTrieMap[A, B1](this.bitmap | that.bitmap, merged, totalelems)
- case hm: HashMapCollision1[_, _] => that.merge0(this, level, merger)
+ case hm: HashMapCollision1[_, _] => that.merge0(this, level, merger.invert)
case hm: HashMap[_, _] => this
case _ => sys.error("section supposed to be unreachable.")
}
}
- private def check[K](x: HashMap[K, _], y: HashMap[K, _], xy: HashMap[K, _]) = { // TODO remove this debugging helper
- var xs = Set[K]()
- for (elem <- x) xs += elem._1
- var ys = Set[K]()
- for (elem <- y) ys += elem._1
- var union = Set[K]()
- for (elem <- xy) union += elem._1
- if ((xs ++ ys) != union) {
- println("Error.")
- println(x.getClass)
- println(y.getClass)
- println(xs)
- println(ys)
- println(xs ++ ys)
- println(union)
- false
- } else true
- }
-
@SerialVersionUID(2L)
private class SerializationProxy[A,B](@transient private var orig: HashMap[A, B]) extends Serializable {
private def writeObject(out: java.io.ObjectOutputStream) {
diff --git a/src/library/scala/collection/immutable/HashSet.scala b/src/library/scala/collection/immutable/HashSet.scala
index c4b0c65..87995f7 100644
--- a/src/library/scala/collection/immutable/HashSet.scala
+++ b/src/library/scala/collection/immutable/HashSet.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -8,17 +8,17 @@
-package scala.collection
+package scala
+package collection
package immutable
-import annotation.unchecked.{ uncheckedVariance => uV }
+import scala.annotation.unchecked.{ uncheckedVariance => uV }
import generic._
-import collection.parallel.immutable.ParHashSet
+import scala.collection.parallel.immutable.ParHashSet
/** This class implements immutable sets using a hash trie.
*
- * '''Note:''' the builder of a hash set returns specialized representations `EmptySet`,`Set1`,..., `Set4`
- * for sets of `size <= 4`.
+ * '''Note:''' The builder of this hash set may return specialized representations for small sets.
*
* @tparam A the type of the elements contained in this hash set.
*
@@ -26,11 +26,12 @@ import collection.parallel.immutable.ParHashSet
* @author Tiark Rompf
* @version 2.8
* @since 2.3
- * @define Coll immutable.HashSet
+ * @define Coll `immutable.HashSet`
* @define coll immutable hash set
*/
@SerialVersionUID(2L)
-class HashSet[A] extends Set[A]
+class HashSet[A] extends AbstractSet[A]
+ with Set[A]
with GenericSetTemplate[A, HashSet]
with SetLike[A, HashSet[A]]
with CustomParallelizable[A, ParHashSet[A]]
@@ -84,12 +85,12 @@ class HashSet[A] extends Set[A]
}
/** $factoryInfo
- * @define Coll immutable.HashSet
+ * @define Coll `immutable.HashSet`
* @define coll immutable hash set
*
* @author Tiark Rompf
* @since 2.3
- * @define Coll immutable.HashSet
+ * @define Coll `immutable.HashSet`
* @define coll immutable hash set
* @define mayNotTerminateInf
* @define willNotTerminateInf
@@ -102,9 +103,33 @@ object HashSet extends ImmutableSetFactory[HashSet] {
private object EmptyHashSet extends HashSet[Any] { }
+ // utility method to create a HashTrieSet from two leaf HashSets (HashSet1 or HashSetCollision1) with non-colliding hash code)
+ private def makeHashTrieSet[A](hash0:Int, elem0:HashSet[A], hash1:Int, elem1:HashSet[A], level:Int) : HashTrieSet[A] = {
+ val index0 = (hash0 >>> level) & 0x1f
+ val index1 = (hash1 >>> level) & 0x1f
+ if(index0 != index1) {
+ val bitmap = (1 << index0) | (1 << index1)
+ val elems = new Array[HashSet[A]](2)
+ if(index0 < index1) {
+ elems(0) = elem0
+ elems(1) = elem1
+ } else {
+ elems(0) = elem1
+ elems(1) = elem0
+ }
+ new HashTrieSet[A](bitmap, elems, elem0.size + elem1.size)
+ } else {
+ val elems = new Array[HashSet[A]](1)
+ val bitmap = (1 << index0)
+ val child = makeHashTrieSet(hash0, elem0, hash1, elem1, level + 5)
+ elems(0) = child
+ new HashTrieSet[A](bitmap, elems, child.size)
+ }
+ }
+
// TODO: add HashSet2, HashSet3, ...
- class HashSet1[A](private[HashSet] var key: A, private[HashSet] var hash: Int) extends HashSet[A] {
+ class HashSet1[A](private[HashSet] val key: A, private[HashSet] val hash: Int) extends HashSet[A] {
override def size = 1
override def get0(key: A, hash: Int, level: Int): Boolean =
@@ -114,9 +139,7 @@ object HashSet extends ImmutableSetFactory[HashSet] {
if (hash == this.hash && key == this.key) this
else {
if (hash != this.hash) {
- //new HashTrieSet[A](level+5, this, new HashSet1(key, hash))
- val m = new HashTrieSet[A](0,new Array[HashSet[A]](0),0) // TODO: could save array alloc
- m.updated0(this.key, this.hash, level).updated0(key, hash, level)
+ makeHashTrieSet(this.hash, this, hash, new HashSet1(key, hash), level)
} else {
// 32-bit hash collision (rare, but not impossible)
new HashSetCollision1(hash, ListSet.empty + this.key + key)
@@ -130,7 +153,7 @@ object HashSet extends ImmutableSetFactory[HashSet] {
override def foreach[U](f: A => U): Unit = f(key)
}
- private[immutable] class HashSetCollision1[A](private[HashSet] var hash: Int, var ks: ListSet[A])
+ private[immutable] class HashSetCollision1[A](private[HashSet] val hash: Int, val ks: ListSet[A])
extends HashSet[A] {
override def size = ks.size
@@ -140,21 +163,17 @@ object HashSet extends ImmutableSetFactory[HashSet] {
override def updated0(key: A, hash: Int, level: Int): HashSet[A] =
if (hash == this.hash) new HashSetCollision1(hash, ks + key)
- else {
- var m: HashSet[A] = new HashTrieSet[A](0,new Array[HashSet[A]](0),0)
- // might be able to save some ops here, but it doesn't seem to be worth it
- for (k <- ks)
- m = m.updated0(k, this.hash, level)
- m.updated0(key, hash, level)
- }
+ else makeHashTrieSet(this.hash, this, hash, new HashSet1(key, hash), level)
override def removed0(key: A, hash: Int, level: Int): HashSet[A] =
if (hash == this.hash) {
val ks1 = ks - key
- if (!ks1.isEmpty)
- new HashSetCollision1(hash, ks1)
- else
+ if(ks1.isEmpty)
HashSet.empty[A]
+ else if(ks1.tail.isEmpty)
+ new HashSet1(ks1.head, hash)
+ else
+ new HashSetCollision1(hash, ks1)
} else this
override def iterator: Iterator[A] = ks.iterator
@@ -177,8 +196,11 @@ object HashSet extends ImmutableSetFactory[HashSet] {
}
- class HashTrieSet[A](private var bitmap: Int, private[collection] var elems: Array[HashSet[A]], private var size0: Int)
+ class HashTrieSet[A](private val bitmap: Int, private[collection] val elems: Array[HashSet[A]], private val size0: Int)
extends HashSet[A] {
+ assert(Integer.bitCount(bitmap) == elems.length)
+ // assertion has to remain disabled until SI-6197 is solved
+ // assert(elems.length > 1 || (elems.length == 1 && elems(0).isInstanceOf[HashTrieSet[_]]))
override def size = size0
@@ -200,13 +222,16 @@ object HashSet extends ImmutableSetFactory[HashSet] {
val mask = (1 << index)
val offset = Integer.bitCount(bitmap & (mask-1))
if ((bitmap & mask) != 0) {
- val elemsNew = new Array[HashSet[A]](elems.length)
- Array.copy(elems, 0, elemsNew, 0, elems.length)
- val sub = elems(offset)
// TODO: might be worth checking if sub is HashTrieSet (-> monomorphic call site)
+ val sub = elems(offset)
val subNew = sub.updated0(key, hash, level + 5)
- elemsNew(offset) = subNew
- new HashTrieSet(bitmap, elemsNew, size + (subNew.size - sub.size))
+ if (sub eq subNew) this
+ else {
+ val elemsNew = new Array[HashSet[A]](elems.length)
+ Array.copy(elems, 0, elemsNew, 0, elems.length)
+ elemsNew(offset) = subNew
+ new HashTrieSet(bitmap, elemsNew, size + (subNew.size - sub.size))
+ }
} else {
val elemsNew = new Array[HashSet[A]](elems.length + 1)
Array.copy(elems, 0, elemsNew, 0, offset)
@@ -225,14 +250,20 @@ object HashSet extends ImmutableSetFactory[HashSet] {
val sub = elems(offset)
// TODO: might be worth checking if sub is HashTrieMap (-> monomorphic call site)
val subNew = sub.removed0(key, hash, level + 5)
- if (subNew.isEmpty) {
+ if (sub eq subNew) this
+ else if (subNew.isEmpty) {
val bitmapNew = bitmap ^ mask
if (bitmapNew != 0) {
val elemsNew = new Array[HashSet[A]](elems.length - 1)
Array.copy(elems, 0, elemsNew, 0, offset)
Array.copy(elems, offset + 1, elemsNew, offset, elems.length - offset - 1)
val sizeNew = size - sub.size
- new HashTrieSet(bitmapNew, elemsNew, sizeNew)
+ // if we have only one child, which is not a HashTrieSet but a self-contained set like
+ // HashSet1 or HashSetCollision1, return the child instead
+ if (elemsNew.length == 1 && !elemsNew(0).isInstanceOf[HashTrieSet[_]])
+ elemsNew(0)
+ else
+ new HashTrieSet(bitmapNew, elemsNew, sizeNew)
} else
HashSet.empty[A]
} else {
diff --git a/src/library/scala/collection/immutable/IndexedSeq.scala b/src/library/scala/collection/immutable/IndexedSeq.scala
index 9945f6f..96414c0 100644
--- a/src/library/scala/collection/immutable/IndexedSeq.scala
+++ b/src/library/scala/collection/immutable/IndexedSeq.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -22,19 +22,23 @@ trait IndexedSeq[+A] extends Seq[A]
with GenericTraversableTemplate[A, IndexedSeq]
with IndexedSeqLike[A, IndexedSeq[A]] {
override def companion: GenericCompanion[IndexedSeq] = IndexedSeq
- override def toIndexedSeq[B >: A]: IndexedSeq[B] = this
+ override def toIndexedSeq: IndexedSeq[A] = this
+ override def seq: IndexedSeq[A] = this
}
/** $factoryInfo
* The current default implementation of a $Coll is a `Vector`.
* @define coll indexed sequence
- * @define Coll IndexedSeq
+ * @define Coll `IndexedSeq`
*/
object IndexedSeq extends SeqFactory[IndexedSeq] {
- class Impl[A](buf: ArrayBuffer[A]) extends IndexedSeq[A] with Serializable {
+ override lazy val ReusableCBF =
+ scala.collection.IndexedSeq.ReusableCBF.asInstanceOf[GenericCanBuildFrom[Nothing]]
+ class Impl[A](buf: ArrayBuffer[A]) extends AbstractSeq[A] with IndexedSeq[A] with Serializable {
def length = buf.length
def apply(idx: Int) = buf.apply(idx)
}
- implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, IndexedSeq[A]] = new GenericCanBuildFrom[A]
def newBuilder[A]: Builder[A, IndexedSeq[A]] = Vector.newBuilder[A]
+ implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, IndexedSeq[A]] =
+ ReusableCBF.asInstanceOf[GenericCanBuildFrom[A]]
}
diff --git a/src/library/scala/collection/immutable/IntMap.scala b/src/library/scala/collection/immutable/IntMap.scala
index ea68b53..ab1faf3 100644
--- a/src/library/scala/collection/immutable/IntMap.scala
+++ b/src/library/scala/collection/immutable/IntMap.scala
@@ -1,12 +1,13 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-package scala.collection
+package scala
+package collection
package immutable
import scala.collection.generic.{ CanBuildFrom, BitOperations }
@@ -18,17 +19,17 @@ import scala.collection.mutable.{ Builder, MapBuilder }
private[immutable] object IntMapUtils extends BitOperations.Int {
def branchMask(i: Int, j: Int) = highestOneBit(i ^ j)
- def join[T](p1 : Int, t1 : IntMap[T], p2 : Int, t2 : IntMap[T]) : IntMap[T] = {
- val m = branchMask(p1, p2);
- val p = mask(p1, m);
+ def join[T](p1: Int, t1: IntMap[T], p2: Int, t2: IntMap[T]): IntMap[T] = {
+ val m = branchMask(p1, p2)
+ val p = mask(p1, m)
if (zero(p1, m)) IntMap.Bin(p, m, t1, t2)
- else IntMap.Bin(p, m, t2, t1);
+ else IntMap.Bin(p, m, t2, t1)
}
- def bin[T](prefix : Int, mask : Int, left : IntMap[T], right : IntMap[T]) : IntMap[T] = (left, right) match {
- case (left, IntMap.Nil) => left;
- case (IntMap.Nil, right) => right;
- case (left, right) => IntMap.Bin(prefix, mask, left, right);
+ def bin[T](prefix: Int, mask: Int, left: IntMap[T], right: IntMap[T]): IntMap[T] = (left, right) match {
+ case (left, IntMap.Nil) => left
+ case (IntMap.Nil, right) => right
+ case (left, right) => IntMap.Bin(prefix, mask, left, right)
}
}
@@ -36,7 +37,7 @@ import IntMapUtils._
/** A companion object for integer maps.
*
- * @define Coll IntMap
+ * @define Coll `IntMap`
* @define mapCanBuildFromInfo
* The standard `CanBuildFrom` instance for `$Coll` objects.
* The created value is an instance of class `MapCanBuildFrom`.
@@ -50,9 +51,9 @@ object IntMap {
}
def empty[T] : IntMap[T] = IntMap.Nil;
- def singleton[T](key : Int, value : T) : IntMap[T] = IntMap.Tip(key, value);
- def apply[T](elems : (Int, T)*) : IntMap[T] =
- elems.foldLeft(empty[T])((x, y) => x.updated(y._1, y._2));
+ def singleton[T](key: Int, value: T): IntMap[T] = IntMap.Tip(key, value);
+ def apply[T](elems: (Int, T)*): IntMap[T] =
+ elems.foldLeft(empty[T])((x, y) => x.updated(y._1, y._2))
private[immutable] case object Nil extends IntMap[Nothing] {
// Important! Without this equals method in place, an infinite
@@ -66,15 +67,15 @@ object IntMap {
}
}
- private[immutable] case class Tip[+T](key : Int, value : T) extends IntMap[T]{
+ private[immutable] case class Tip[+T](key: Int, value: T) extends IntMap[T]{
def withValue[S](s: S) =
- if (s.asInstanceOf[AnyRef] eq value.asInstanceOf[AnyRef]) this.asInstanceOf[IntMap.Tip[S]];
- else IntMap.Tip(key, s);
+ if (s.asInstanceOf[AnyRef] eq value.asInstanceOf[AnyRef]) this.asInstanceOf[IntMap.Tip[S]]
+ else IntMap.Tip(key, s)
}
- private[immutable] case class Bin[+T](prefix : Int, mask : Int, left : IntMap[T], right : IntMap[T]) extends IntMap[T]{
- def bin[S](left : IntMap[S], right : IntMap[S]) : IntMap[S] = {
- if ((this.left eq left) && (this.right eq right)) this.asInstanceOf[IntMap.Bin[S]];
- else IntMap.Bin[S](prefix, mask, left, right);
+ private[immutable] case class Bin[+T](prefix: Int, mask: Int, left: IntMap[T], right: IntMap[T]) extends IntMap[T] {
+ def bin[S](left: IntMap[S], right: IntMap[S]): IntMap[S] = {
+ if ((this.left eq left) && (this.right eq right)) this.asInstanceOf[IntMap.Bin[S]]
+ else IntMap.Bin[S](prefix, mask, left, right)
}
}
@@ -83,60 +84,60 @@ object IntMap {
import IntMap._
// Iterator over a non-empty IntMap.
-private[immutable] abstract class IntMapIterator[V, T](it : IntMap[V]) extends Iterator[T]{
+private[immutable] abstract class IntMapIterator[V, T](it: IntMap[V]) extends AbstractIterator[T] {
// Basically this uses a simple stack to emulate conversion over the tree. However
// because we know that Ints are at least 32 bits we can have at most 32 IntMap.Bins and
// one IntMap.Tip sitting on the tree at any point. Therefore we know the maximum stack
// depth is 33 and
- var index = 0;
- var buffer = new Array[AnyRef](33);
+ var index = 0
+ var buffer = new Array[AnyRef](33)
def pop = {
- index -= 1;
- buffer(index).asInstanceOf[IntMap[V]];
+ index -= 1
+ buffer(index).asInstanceOf[IntMap[V]]
}
- def push(x : IntMap[V]) {
- buffer(index) = x.asInstanceOf[AnyRef];
- index += 1;
+ def push(x: IntMap[V]) {
+ buffer(index) = x.asInstanceOf[AnyRef]
+ index += 1
}
- push(it);
+ push(it)
/**
* What value do we assign to a tip?
*/
- def valueOf(tip : IntMap.Tip[V]) : T;
+ def valueOf(tip: IntMap.Tip[V]): T
- def hasNext = index != 0;
- final def next : T =
+ def hasNext = index != 0
+ final def next: T =
pop match {
case IntMap.Bin(_,_, t at IntMap.Tip(_, _), right) => {
- push(right);
- valueOf(t);
+ push(right)
+ valueOf(t)
}
case IntMap.Bin(_, _, left, right) => {
- push(right);
- push(left);
- next;
+ push(right)
+ push(left)
+ next
}
- case t at IntMap.Tip(_, _) => valueOf(t);
+ case t at IntMap.Tip(_, _) => valueOf(t)
// This should never happen. We don't allow IntMap.Nil in subtrees of the IntMap
// and don't return an IntMapIterator for IntMap.Nil.
- case IntMap.Nil => sys.error("Empty maps not allowed as subtrees");
+ case IntMap.Nil => sys.error("Empty maps not allowed as subtrees")
}
}
-private[immutable] class IntMapEntryIterator[V](it : IntMap[V]) extends IntMapIterator[V, (Int, V)](it){
- def valueOf(tip : IntMap.Tip[V]) = (tip.key, tip.value);
+private[immutable] class IntMapEntryIterator[V](it: IntMap[V]) extends IntMapIterator[V, (Int, V)](it) {
+ def valueOf(tip: IntMap.Tip[V]) = (tip.key, tip.value)
}
-private[immutable] class IntMapValueIterator[V](it : IntMap[V]) extends IntMapIterator[V, V](it){
- def valueOf(tip : IntMap.Tip[V]) = tip.value
+private[immutable] class IntMapValueIterator[V](it: IntMap[V]) extends IntMapIterator[V, V](it) {
+ def valueOf(tip: IntMap.Tip[V]) = tip.value
}
-private[immutable] class IntMapKeyIterator[V](it : IntMap[V]) extends IntMapIterator[V, Int](it){
- def valueOf(tip : IntMap.Tip[V]) = tip.key
+private[immutable] class IntMapKeyIterator[V](it: IntMap[V]) extends IntMapIterator[V, Int](it) {
+ def valueOf(tip: IntMap.Tip[V]) = tip.key
}
import IntMap._
@@ -145,23 +146,26 @@ import IntMap._
* <a href="http://citeseer.ist.psu.edu/okasaki98fast.html">Fast Mergeable Integer Maps</a>
* by Okasaki and Gill. Essentially a trie based on binary digits of the integers.
*
- * Note: This class is as of 2.8 largely superseded by HashMap.
+ * '''Note:''' This class is as of 2.8 largely superseded by HashMap.
*
* @tparam T type of the values associated with integer keys.
*
* @since 2.7
- * @define Coll immutable.IntMap
+ * @define Coll `immutable.IntMap`
* @define coll immutable integer map
* @define mayNotTerminateInf
* @define willNotTerminateInf
*/
-sealed abstract class IntMap[+T] extends Map[Int, T] with MapLike[Int, T, IntMap[T]] {
- override def empty: IntMap[T] = IntMap.Nil;
+sealed abstract class IntMap[+T] extends AbstractMap[Int, T]
+ with Map[Int, T]
+ with MapLike[Int, T, IntMap[T]] {
+
+ override def empty: IntMap[T] = IntMap.Nil
override def toList = {
- val buffer = new scala.collection.mutable.ListBuffer[(Int, T)];
- foreach(buffer += _);
- buffer.toList;
+ val buffer = new scala.collection.mutable.ListBuffer[(Int, T)]
+ foreach(buffer += _)
+ buffer.toList
}
/**
@@ -169,109 +173,112 @@ sealed abstract class IntMap[+T] extends Map[Int, T] with MapLike[Int, T, IntMap
*
* @return an iterator over pairs of integer keys and corresponding values.
*/
- def iterator : Iterator[(Int, T)] = this match {
- case IntMap.Nil => Iterator.empty;
- case _ => new IntMapEntryIterator(this);
+ def iterator: Iterator[(Int, T)] = this match {
+ case IntMap.Nil => Iterator.empty
+ case _ => new IntMapEntryIterator(this)
}
/**
* Loops over the key, value pairs of the map in unsigned order of the keys.
*/
- override final def foreach[U](f : ((Int, T)) => U) : Unit = this match {
- case IntMap.Bin(_, _, left, right) => {left.foreach(f); right.foreach(f); }
- case IntMap.Tip(key, value) => f((key, value));
- case IntMap.Nil => {};
+ override final def foreach[U](f: ((Int, T)) => U): Unit = this match {
+ case IntMap.Bin(_, _, left, right) => { left.foreach(f); right.foreach(f) }
+ case IntMap.Tip(key, value) => f((key, value))
+ case IntMap.Nil =>
}
- override def keysIterator : Iterator[Int] = this match {
- case IntMap.Nil => Iterator.empty;
- case _ => new IntMapKeyIterator(this);
+ override def keysIterator: Iterator[Int] = this match {
+ case IntMap.Nil => Iterator.empty
+ case _ => new IntMapKeyIterator(this)
}
/**
- * Loop over the keys of the map. The same as keys.foreach(f), but may
+ * Loop over the keys of the map. The same as `keys.foreach(f)`, but may
* be more efficient.
*
* @param f The loop body
*/
- final def foreachKey(f : Int => Unit) : Unit = this match {
- case IntMap.Bin(_, _, left, right) => {left.foreachKey(f); right.foreachKey(f); }
- case IntMap.Tip(key, _) => f(key);
- case IntMap.Nil => {}
+ final def foreachKey(f: Int => Unit): Unit = this match {
+ case IntMap.Bin(_, _, left, right) => { left.foreachKey(f); right.foreachKey(f) }
+ case IntMap.Tip(key, _) => f(key)
+ case IntMap.Nil =>
}
- override def valuesIterator : Iterator[T] = this match {
- case IntMap.Nil => Iterator.empty;
- case _ => new IntMapValueIterator(this);
+ override def valuesIterator: Iterator[T] = this match {
+ case IntMap.Nil => Iterator.empty
+ case _ => new IntMapValueIterator(this)
}
/**
- * Loop over the keys of the map. The same as keys.foreach(f), but may
+ * Loop over the keys of the map. The same as `keys.foreach(f)`, but may
* be more efficient.
*
* @param f The loop body
*/
- final def foreachValue(f : T => Unit) : Unit = this match {
- case IntMap.Bin(_, _, left, right) => {left.foreachValue(f); right.foreachValue(f); }
- case IntMap.Tip(_, value) => f(value);
- case IntMap.Nil => {};
+ final def foreachValue(f: T => Unit): Unit = this match {
+ case IntMap.Bin(_, _, left, right) => { left.foreachValue(f); right.foreachValue(f) }
+ case IntMap.Tip(_, value) => f(value)
+ case IntMap.Nil =>
}
override def stringPrefix = "IntMap"
- override def isEmpty = this == IntMap.Nil;
+ override def isEmpty = this == IntMap.Nil
- override def filter(f : ((Int, T)) => Boolean) : IntMap[T] = this match {
+ override def filter(f: ((Int, T)) => Boolean): IntMap[T] = this match {
case IntMap.Bin(prefix, mask, left, right) => {
- val (newleft, newright) = (left.filter(f), right.filter(f));
- if ((left eq newleft) && (right eq newright)) this;
- else bin(prefix, mask, newleft, newright);
+ val (newleft, newright) = (left.filter(f), right.filter(f))
+ if ((left eq newleft) && (right eq newright)) this
+ else bin(prefix, mask, newleft, newright)
}
case IntMap.Tip(key, value) =>
if (f((key, value))) this
- else IntMap.Nil;
- case IntMap.Nil => IntMap.Nil;
+ else IntMap.Nil
+ case IntMap.Nil => IntMap.Nil
}
- def transform[S](f : (Int, T) => S) : IntMap[S] = this match {
- case b at IntMap.Bin(prefix, mask, left, right) => b.bin(left.transform(f), right.transform(f));
- case t at IntMap.Tip(key, value) => t.withValue(f(key, value));
- case IntMap.Nil => IntMap.Nil;
+ def transform[S](f: (Int, T) => S): IntMap[S] = this match {
+ case b at IntMap.Bin(prefix, mask, left, right) => b.bin(left.transform(f), right.transform(f))
+ case t at IntMap.Tip(key, value) => t.withValue(f(key, value))
+ case IntMap.Nil => IntMap.Nil
}
- final override def size : Int = this match {
- case IntMap.Nil => 0;
- case IntMap.Tip(_, _) => 1;
- case IntMap.Bin(_, _, left, right) => left.size + right.size;
+ final override def size: Int = this match {
+ case IntMap.Nil => 0
+ case IntMap.Tip(_, _) => 1
+ case IntMap.Bin(_, _, left, right) => left.size + right.size
}
- final def get(key : Int) : Option[T] = this match {
- case IntMap.Bin(prefix, mask, left, right) => if (zero(key, mask)) left.get(key) else right.get(key);
- case IntMap.Tip(key2, value) => if (key == key2) Some(value) else None;
- case IntMap.Nil => None;
+ final def get(key: Int): Option[T] = this match {
+ case IntMap.Bin(prefix, mask, left, right) => if (zero(key, mask)) left.get(key) else right.get(key)
+ case IntMap.Tip(key2, value) => if (key == key2) Some(value) else None
+ case IntMap.Nil => None
}
- final override def getOrElse[S >: T](key : Int, default : =>S) : S = this match {
- case IntMap.Nil => default;
- case IntMap.Tip(key2, value) => if (key == key2) value else default;
- case IntMap.Bin(prefix, mask, left, right) => if (zero(key, mask)) left.getOrElse(key, default) else right.getOrElse(key, default);
+ final override def getOrElse[S >: T](key: Int, default: => S): S = this match {
+ case IntMap.Nil => default
+ case IntMap.Tip(key2, value) => if (key == key2) value else default
+ case IntMap.Bin(prefix, mask, left, right) =>
+ if (zero(key, mask)) left.getOrElse(key, default) else right.getOrElse(key, default)
}
- final override def apply(key : Int) : T = this match {
- case IntMap.Bin(prefix, mask, left, right) => if (zero(key, mask)) left(key) else right(key);
- case IntMap.Tip(key2, value) => if (key == key2) value else sys.error("Key not found");
- case IntMap.Nil => sys.error("key not found");
+ final override def apply(key: Int): T = this match {
+ case IntMap.Bin(prefix, mask, left, right) => if (zero(key, mask)) left(key) else right(key)
+ case IntMap.Tip(key2, value) => if (key == key2) value else sys.error("Key not found")
+ case IntMap.Nil => sys.error("key not found")
}
def + [S >: T] (kv: (Int, S)): IntMap[S] = updated(kv._1, kv._2)
- override def updated[S >: T](key : Int, value : S) : IntMap[S] = this match {
- case IntMap.Bin(prefix, mask, left, right) => if (!hasMatch(key, prefix, mask)) join(key, IntMap.Tip(key, value), prefix, this);
- else if (zero(key, mask)) IntMap.Bin(prefix, mask, left.updated(key, value), right)
- else IntMap.Bin(prefix, mask, left, right.updated(key, value));
- case IntMap.Tip(key2, value2) => if (key == key2) IntMap.Tip(key, value);
- else join(key, IntMap.Tip(key, value), key2, this);
- case IntMap.Nil => IntMap.Tip(key, value);
+ override def updated[S >: T](key: Int, value: S): IntMap[S] = this match {
+ case IntMap.Bin(prefix, mask, left, right) =>
+ if (!hasMatch(key, prefix, mask)) join(key, IntMap.Tip(key, value), prefix, this)
+ else if (zero(key, mask)) IntMap.Bin(prefix, mask, left.updated(key, value), right)
+ else IntMap.Bin(prefix, mask, left, right.updated(key, value))
+ case IntMap.Tip(key2, value2) =>
+ if (key == key2) IntMap.Tip(key, value)
+ else join(key, IntMap.Tip(key, value), key2, this)
+ case IntMap.Nil => IntMap.Tip(key, value)
}
/**
@@ -280,7 +287,7 @@ sealed abstract class IntMap[+T] extends Map[Int, T] with MapLike[Int, T, IntMap
* Equivalent to:
* {{{
* this.get(key) match {
- * case None => this.update(key, value);
+ * case None => this.update(key, value)
* case Some(oldvalue) => this.update(key, f(oldvalue, value)
* }
* }}}
@@ -291,52 +298,55 @@ sealed abstract class IntMap[+T] extends Map[Int, T] with MapLike[Int, T, IntMap
* @param f The function used to resolve conflicts.
* @return The updated map.
*/
- def updateWith[S >: T](key : Int, value : S, f : (T, S) => S) : IntMap[S] = this match {
- case IntMap.Bin(prefix, mask, left, right) => if (!hasMatch(key, prefix, mask)) join(key, IntMap.Tip(key, value), prefix, this);
- else if (zero(key, mask)) IntMap.Bin(prefix, mask, left.updateWith(key, value, f), right)
- else IntMap.Bin(prefix, mask, left, right.updateWith(key, value, f));
- case IntMap.Tip(key2, value2) => if (key == key2) IntMap.Tip(key, f(value2, value));
- else join(key, IntMap.Tip(key, value), key2, this);
- case IntMap.Nil => IntMap.Tip(key, value);
+ def updateWith[S >: T](key: Int, value: S, f: (T, S) => S): IntMap[S] = this match {
+ case IntMap.Bin(prefix, mask, left, right) =>
+ if (!hasMatch(key, prefix, mask)) join(key, IntMap.Tip(key, value), prefix, this)
+ else if (zero(key, mask)) IntMap.Bin(prefix, mask, left.updateWith(key, value, f), right)
+ else IntMap.Bin(prefix, mask, left, right.updateWith(key, value, f))
+ case IntMap.Tip(key2, value2) =>
+ if (key == key2) IntMap.Tip(key, f(value2, value))
+ else join(key, IntMap.Tip(key, value), key2, this)
+ case IntMap.Nil => IntMap.Tip(key, value)
}
- def - (key : Int) : IntMap[T] = this match {
+ def - (key: Int): IntMap[T] = this match {
case IntMap.Bin(prefix, mask, left, right) =>
- if (!hasMatch(key, prefix, mask)) this;
- else if (zero(key, mask)) bin(prefix, mask, left - key, right);
- else bin(prefix, mask, left, right - key);
+ if (!hasMatch(key, prefix, mask)) this
+ else if (zero(key, mask)) bin(prefix, mask, left - key, right)
+ else bin(prefix, mask, left, right - key)
case IntMap.Tip(key2, _) =>
- if (key == key2) IntMap.Nil;
- else this;
- case IntMap.Nil => IntMap.Nil;
+ if (key == key2) IntMap.Nil
+ else this
+ case IntMap.Nil => IntMap.Nil
}
/**
- * A combined transform and filter function. Returns an IntMap such that for each (key, value) mapping
- * in this map, if f(key, value) == None the map contains no mapping for key, and if <code>f(key, value)
+ * A combined transform and filter function. Returns an `IntMap` such that
+ * for each `(key, value)` mapping in this map, if `f(key, value) == None`
+ * the map contains no mapping for key, and if `f(key, value)`.
*
* @tparam S The type of the values in the resulting `LongMap`.
* @param f The transforming function.
* @return The modified map.
*/
- def modifyOrRemove[S](f : (Int, T) => Option[S]) : IntMap[S] = this match {
- case IntMap.Bin(prefix, mask, left, right) => {
- val newleft = left.modifyOrRemove(f);
- val newright = right.modifyOrRemove(f);
- if ((left eq newleft) && (right eq newright)) this.asInstanceOf[IntMap[S]];
- else bin(prefix, mask, newleft, newright)
- }
+ def modifyOrRemove[S](f: (Int, T) => Option[S]): IntMap[S] = this match {
+ case IntMap.Bin(prefix, mask, left, right) =>
+ val newleft = left.modifyOrRemove(f)
+ val newright = right.modifyOrRemove(f)
+ if ((left eq newleft) && (right eq newright)) this.asInstanceOf[IntMap[S]]
+ else bin(prefix, mask, newleft, newright)
case IntMap.Tip(key, value) => f(key, value) match {
- case None => IntMap.Nil;
+ case None =>
+ IntMap.Nil
case Some(value2) =>
//hack to preserve sharing
if (value.asInstanceOf[AnyRef] eq value2.asInstanceOf[AnyRef]) this.asInstanceOf[IntMap[S]]
- else IntMap.Tip(key, value2);
+ else IntMap.Tip(key, value2)
}
- case IntMap.Nil => IntMap.Nil;
+ case IntMap.Nil =>
+ IntMap.Nil
}
-
/**
* Forms a union map with that map, using the combining function to resolve conflicts.
*
@@ -345,31 +355,31 @@ sealed abstract class IntMap[+T] extends Map[Int, T] with MapLike[Int, T, IntMap
* @param f The function used to resolve conflicts between two mappings.
* @return Union of `this` and `that`, with identical key conflicts resolved using the function `f`.
*/
- def unionWith[S >: T](that : IntMap[S], f : (Int, S, S) => S) : IntMap[S] = (this, that) match{
+ def unionWith[S >: T](that: IntMap[S], f: (Int, S, S) => S): IntMap[S] = (this, that) match{
case (IntMap.Bin(p1, m1, l1, r1), that@(IntMap.Bin(p2, m2, l2, r2))) =>
if (shorter(m1, m2)) {
- if (!hasMatch(p2, p1, m1)) join(p1, this, p2, that);
- else if (zero(p2, m1)) IntMap.Bin(p1, m1, l1.unionWith(that, f), r1);
- else IntMap.Bin(p1, m1, l1, r1.unionWith(that, f));
+ if (!hasMatch(p2, p1, m1)) join[S](p1, this, p2, that) // TODO: remove [S] when SI-5548 is fixed
+ else if (zero(p2, m1)) IntMap.Bin(p1, m1, l1.unionWith(that, f), r1)
+ else IntMap.Bin(p1, m1, l1, r1.unionWith(that, f))
} else if (shorter(m2, m1)){
- if (!hasMatch(p1, p2, m2)) join(p1, this, p2, that);
- else if (zero(p1, m2)) IntMap.Bin(p2, m2, this.unionWith(l2, f), r2);
- else IntMap.Bin(p2, m2, l2, this.unionWith(r2, f));
+ if (!hasMatch(p1, p2, m2)) join[S](p1, this, p2, that) // TODO: remove [S] when SI-5548 is fixed
+ else if (zero(p1, m2)) IntMap.Bin(p2, m2, this.unionWith(l2, f), r2)
+ else IntMap.Bin(p2, m2, l2, this.unionWith(r2, f))
}
else {
- if (p1 == p2) IntMap.Bin(p1, m1, l1.unionWith(l2,f), r1.unionWith(r2, f));
- else join(p1, this, p2, that);
+ if (p1 == p2) IntMap.Bin(p1, m1, l1.unionWith(l2,f), r1.unionWith(r2, f))
+ else join[S](p1, this, p2, that) // TODO: remove [S] when SI-5548 is fixed
}
- case (IntMap.Tip(key, value), x) => x.updateWith(key, value, (x, y) => f(key, y, x));
- case (x, IntMap.Tip(key, value)) => x.updateWith[S](key, value, (x, y) => f(key, x, y));
- case (IntMap.Nil, x) => x;
- case (x, IntMap.Nil) => x;
+ case (IntMap.Tip(key, value), x) => x.updateWith[S](key, value, (x, y) => f(key, y, x))
+ case (x, IntMap.Tip(key, value)) => x.updateWith[S](key, value, (x, y) => f(key, x, y))
+ case (IntMap.Nil, x) => x
+ case (x, IntMap.Nil) => x
}
/**
- * Forms the intersection of these two maps with a combining function. The resulting map is
- * a map that has only keys present in both maps and has values produced from the original mappings
- * by combining them with f.
+ * Forms the intersection of these two maps with a combining function. The
+ * resulting map is a map that has only keys present in both maps and has
+ * values produced from the original mappings by combining them with `f`.
*
* @tparam S The type of values in `that`.
* @tparam R The type of values in the resulting `LongMap`.
@@ -377,57 +387,58 @@ sealed abstract class IntMap[+T] extends Map[Int, T] with MapLike[Int, T, IntMap
* @param f The combining function.
* @return Intersection of `this` and `that`, with values for identical keys produced by function `f`.
*/
- def intersectionWith[S, R](that : IntMap[S], f : (Int, T, S) => R) : IntMap[R] = (this, that) match {
+ def intersectionWith[S, R](that: IntMap[S], f: (Int, T, S) => R): IntMap[R] = (this, that) match {
case (IntMap.Bin(p1, m1, l1, r1), that at IntMap.Bin(p2, m2, l2, r2)) =>
if (shorter(m1, m2)) {
- if (!hasMatch(p2, p1, m1)) IntMap.Nil;
- else if (zero(p2, m1)) l1.intersectionWith(that, f);
- else r1.intersectionWith(that, f);
- } else if (m1 == m2) bin(p1, m1, l1.intersectionWith(l2, f), r1.intersectionWith(r2, f));
+ if (!hasMatch(p2, p1, m1)) IntMap.Nil
+ else if (zero(p2, m1)) l1.intersectionWith(that, f)
+ else r1.intersectionWith(that, f)
+ } else if (m1 == m2) bin(p1, m1, l1.intersectionWith(l2, f), r1.intersectionWith(r2, f))
else {
- if (!hasMatch(p1, p2, m2)) IntMap.Nil;
- else if (zero(p1, m2)) this.intersectionWith(l2, f);
- else this.intersectionWith(r2, f);
+ if (!hasMatch(p1, p2, m2)) IntMap.Nil
+ else if (zero(p1, m2)) this.intersectionWith(l2, f)
+ else this.intersectionWith(r2, f)
}
case (IntMap.Tip(key, value), that) => that.get(key) match {
- case None => IntMap.Nil;
- case Some(value2) => IntMap.Tip(key, f(key, value, value2));
+ case None => IntMap.Nil
+ case Some(value2) => IntMap.Tip(key, f(key, value, value2))
}
case (_, IntMap.Tip(key, value)) => this.get(key) match {
- case None => IntMap.Nil;
- case Some(value2) => IntMap.Tip(key, f(key, value2, value));
+ case None => IntMap.Nil
+ case Some(value2) => IntMap.Tip(key, f(key, value2, value))
}
- case (_, _) => IntMap.Nil;
+ case (_, _) => IntMap.Nil
}
/**
- * Left biased intersection. Returns the map that has all the same mappings as this but only for keys
- * which are present in the other map.
+ * Left biased intersection. Returns the map that has all the same mappings
+ * as this but only for keys which are present in the other map.
*
* @tparam R The type of values in `that`.
* @param that The map to intersect with.
* @return A map with all the keys both in `this` and `that`, mapped to corresponding values from `this`.
*/
- def intersection[R](that : IntMap[R]) : IntMap[T] = this.intersectionWith(that, (key : Int, value : T, value2 : R) => value);
+ def intersection[R](that: IntMap[R]): IntMap[T] =
+ this.intersectionWith(that, (key: Int, value: T, value2: R) => value)
- def ++[S >: T](that : IntMap[S]) =
+ def ++[S >: T](that: IntMap[S]) =
this.unionWith[S](that, (key, x, y) => y)
/**
* The entry with the lowest key value considered in unsigned order.
*/
- final def firstKey : Int = this match {
- case Bin(_, _, l, r) => l.firstKey;
- case Tip(k, v) => k;
+ final def firstKey: Int = this match {
+ case Bin(_, _, l, r) => l.firstKey
+ case Tip(k, v) => k
case IntMap.Nil => sys.error("Empty set")
}
/**
* The entry with the highest key value considered in unsigned order.
*/
- final def lastKey : Int = this match {
- case Bin(_, _, l, r) => r.lastKey;
- case Tip(k, v) => k;
+ final def lastKey: Int = this match {
+ case Bin(_, _, l, r) => r.lastKey
+ case Tip(k, v) => k
case IntMap.Nil => sys.error("Empty set")
}
}
diff --git a/src/library/scala/collection/immutable/Iterable.scala b/src/library/scala/collection/immutable/Iterable.scala
index 81579ef..cc64d8f 100644
--- a/src/library/scala/collection/immutable/Iterable.scala
+++ b/src/library/scala/collection/immutable/Iterable.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -18,7 +18,7 @@ import parallel.immutable.ParIterable
/** A base trait for iterable collections that are guaranteed immutable.
* $iterableInfo
*
- * @define Coll immutable.Iterable
+ * @define Coll `immutable.Iterable`
* @define coll immutable iterable collection
*/
trait Iterable[+A] extends Traversable[A]
@@ -34,10 +34,10 @@ trait Iterable[+A] extends Traversable[A]
}
/** $factoryInfo
- * @define Coll immutable.Iterable
+ * @define Coll `immutable.Iterable`
* @define coll immutable iterable collection
*/
object Iterable extends TraversableFactory[Iterable] {
- implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, Iterable[A]] = new GenericCanBuildFrom[A]
+ implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, Iterable[A]] = ReusableCBF.asInstanceOf[GenericCanBuildFrom[A]]
def newBuilder[A]: Builder[A, Iterable[A]] = new mutable.ListBuffer
}
diff --git a/src/library/scala/collection/immutable/LinearSeq.scala b/src/library/scala/collection/immutable/LinearSeq.scala
index ab8b632..5ede6d9 100644
--- a/src/library/scala/collection/immutable/LinearSeq.scala
+++ b/src/library/scala/collection/immutable/LinearSeq.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -23,14 +23,15 @@ trait LinearSeq[+A] extends Seq[A]
with GenericTraversableTemplate[A, LinearSeq]
with LinearSeqLike[A, LinearSeq[A]] {
override def companion: GenericCompanion[LinearSeq] = LinearSeq
+ override def seq: LinearSeq[A] = this
}
/** $factoryInfo
* The current default implementation of a $Coll is a `List`.
* @define coll immutable linear sequence
- * @define Coll immutable.LinearSeq
+ * @define Coll `immutable.LinearSeq`
*/
object LinearSeq extends SeqFactory[LinearSeq] {
- implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, LinearSeq[A]] = new GenericCanBuildFrom[A]
+ implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, LinearSeq[A]] = ReusableCBF.asInstanceOf[GenericCanBuildFrom[A]]
def newBuilder[A]: Builder[A, LinearSeq[A]] = new mutable.ListBuffer
}
diff --git a/src/library/scala/collection/immutable/List.scala b/src/library/scala/collection/immutable/List.scala
index 684cee6..2d6952f 100644
--- a/src/library/scala/collection/immutable/List.scala
+++ b/src/library/scala/collection/immutable/List.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -13,7 +13,8 @@ package immutable
import generic._
import mutable.{Builder, ListBuffer}
-import annotation.tailrec
+import scala.annotation.tailrec
+import java.io._
/** A class for immutable linked lists representing ordered collections
* of elements of type.
@@ -54,13 +55,20 @@ import annotation.tailrec
* val shorter = mainList.tail // costs nothing as it uses the same 2::1::Nil instances as mainList
* }}}
*
+ * @note The functional list is characterized by persistence and structural sharing, thus offering considerable
+ * performance and space consumption benefits in some scenarios if used correctly.
+ * However, note that objects having multiple references into the same functional list (that is,
+ * objects that rely on structural sharing), will be serialized and deserialized with multiple lists, one for
+ * each reference to it. I.e. structural sharing is lost after serialization/deserialization.
+ *
* @author Martin Odersky and others
* @version 2.8
* @since 1.0
- * @see [["http://docs.scala-lang.org/overviews/collections/concrete-immutable-collection-classes.html#lists" "Scala's Collection Library overview"]]
+ * @see [[http://docs.scala-lang.org/overviews/collections/concrete-immutable-collection-classes.html#lists "Scala's Collection Library overview"]]
* section on `Lists` for more information.
*
* @define coll list
+ * @define Coll `List`
* @define thatinfo the class of the returned collection. In the standard library configuration,
* `That` is always `List[B]` because an implicit of type `CanBuildFrom[List, B, That]`
* is defined in object `List`.
@@ -73,7 +81,8 @@ import annotation.tailrec
* @define mayNotTerminateInf
* @define willNotTerminateInf
*/
-sealed abstract class List[+A] extends LinearSeq[A]
+sealed abstract class List[+A] extends AbstractSeq[A]
+ with LinearSeq[A]
with Product
with GenericTraversableTemplate[A, List]
with LinearSeqOptimized[A, List[A]] {
@@ -91,8 +100,12 @@ sealed abstract class List[+A] extends LinearSeq[A]
* @param x the element to prepend.
* @return a list which contains `x` as first element and
* which continues with this list.
- * @example `1 :: List(2, 3) = List(2, 3).::(1) = List(1, 2, 3)`
+ *
* @usecase def ::(x: A): List[A]
+ * @inheritdoc
+ *
+ * Example:
+ * {{{1 :: List(2, 3) = List(2, 3).::(1) = List(1, 2, 3)}}}
*/
def ::[B >: A] (x: B): List[B] =
new scala.collection.immutable.::(x, this)
@@ -101,11 +114,16 @@ sealed abstract class List[+A] extends LinearSeq[A]
* @param prefix The list elements to prepend.
* @return a list resulting from the concatenation of the given
* list `prefix` and this list.
- * @example `List(1, 2) ::: List(3, 4) = List(3, 4).:::(List(1, 2)) = List(1, 2, 3, 4)`
+ *
* @usecase def :::(prefix: List[A]): List[A]
+ * @inheritdoc
+ *
+ * Example:
+ * {{{List(1, 2) ::: List(3, 4) = List(3, 4).:::(List(1, 2)) = List(1, 2, 3, 4)}}}
*/
def :::[B >: A](prefix: List[B]): List[B] =
if (isEmpty) prefix
+ else if (prefix.isEmpty) this
else (new ListBuffer[B] ++= prefix).prependToList(this)
/** Adds the elements of a given list in reverse order in front of this list.
@@ -114,7 +132,9 @@ sealed abstract class List[+A] extends LinearSeq[A]
*
* @param prefix the prefix to reverse and then prepend
* @return the concatenation of the reversed prefix and the current list.
+ *
* @usecase def reverse_:::(prefix: List[A]): List[A]
+ * @inheritdoc
*/
def reverse_:::[B >: A](prefix: List[B]): List[B] = {
var these: List[B] = this
@@ -128,13 +148,15 @@ sealed abstract class List[+A] extends LinearSeq[A]
/** Builds a new list by applying a function to all elements of this list.
* Like `xs map f`, but returns `xs` unchanged if function
- * `f` maps all elements to themselves (wrt eq).
+ * `f` maps all elements to themselves (as determined by `eq`).
*
* @param f the function to apply to each element.
* @tparam B the element type of the returned collection.
* @return a list resulting from applying the given function
* `f` to each element of this list and collecting the results.
+ *
* @usecase def mapConserve(f: A => A): List[A]
+ * @inheritdoc
*/
def mapConserve[B >: A <: AnyRef](f: A => B): List[B] = {
@tailrec
@@ -202,8 +224,18 @@ sealed abstract class List[+A] extends LinearSeq[A]
these
}
+ /**
+ * @example {{{
+ * // Given a list
+ * val letters = List('a','b','c','d','e')
+ *
+ * // `slice` returns all elements beginning at index `from` and afterwards,
+ * // up until index `until` (excluding index `until`.)
+ * letters.slice(1,3) // Returns List('b','c')
+ * }}}
+ */
override def slice(from: Int, until: Int): List[A] = {
- val lo = math.max(from, 0)
+ val lo = scala.math.max(from, 0)
if (until <= lo || isEmpty) Nil
else this drop lo take (until - lo)
}
@@ -269,6 +301,9 @@ sealed abstract class List[+A] extends LinearSeq[A]
}
result
}
+
+ override def foldRight[B](z: B)(op: (A, B) => B): B =
+ reverse.foldLeft(z)((right, left) => op(left, right))
override def stringPrefix = "List"
@@ -276,117 +311,17 @@ sealed abstract class List[+A] extends LinearSeq[A]
if (isEmpty) Stream.Empty
else new Stream.Cons(head, tail.toStream)
- /** Like <code>span</code> but with the predicate inverted.
- */
- @deprecated("use `span { x => !p(x) }` instead", "2.8.0")
- def break(p: A => Boolean): (List[A], List[A]) = span { x => !p(x) }
-
- @deprecated("use `filterNot' instead", "2.8.0")
- def remove(p: A => Boolean): List[A] = filterNot(p)
-
- /** Computes the difference between this list and the given list
- * `that`.
- *
- * @param that the list of elements to remove from this list.
- * @return this list without the elements of the given list
- * `that`.
- */
- @deprecated("use `list1 filterNot (list2 contains)` instead", "2.8.0")
- def -- [B >: A](that: List[B]): List[B] = {
- val b = new ListBuffer[B]
- var these = this
- while (!these.isEmpty) {
- if (!that.contains(these.head)) b += these.head
- these = these.tail
- }
- b.toList
- }
-
- /** Computes the difference between this list and the given object
- * `x`.
- *
- * @param x the object to remove from this list.
- * @return this list without occurrences of the given object
- * `x`.
- */
- @deprecated("use `filterNot (_ == x)` instead", "2.8.0")
- def - [B >: A](x: B): List[B] = {
- val b = new ListBuffer[B]
+ @inline override final
+ def foreach[B](f: A => B) {
var these = this
while (!these.isEmpty) {
- if (these.head != x) b += these.head
+ f(these.head)
these = these.tail
}
- b.toList
}
- @deprecated("use `distinct' instead", "2.8.0")
+ @deprecated("use `distinct` instead", "2.8.0")
def removeDuplicates: List[A] = distinct
-
- @deprecated("use `sortWith' instead", "2.8.0")
- def sort(lt : (A,A) => Boolean): List[A] = {
- /** Merge two already-sorted lists */
- def merge(l1: List[A], l2: List[A]): List[A] = {
- val res = new ListBuffer[A]
- var left1 = l1
- var left2 = l2
-
- while (!left1.isEmpty && !left2.isEmpty) {
- if(lt(left1.head, left2.head)) {
- res += left1.head
- left1 = left1.tail
- } else {
- res += left2.head
- left2 = left2.tail
- }
- }
-
- res ++= left1
- res ++= left2
-
- res.toList
- }
-
- /** Split a list into two lists of about the same size */
- def split(lst: List[A]) = {
- val res1 = new ListBuffer[A]
- val res2 = new ListBuffer[A]
- var left = lst
-
- while (!left.isEmpty) {
- res1 += left.head
- left = left.tail
- if (!left.isEmpty) {
- res2 += left.head
- left = left.tail
- }
- }
-
- (res1.toList, res2.toList)
- }
-
-
- /** Merge-sort the specified list */
- def ms(lst: List[A]): List[A] =
- lst match {
- case Nil => lst
- case x :: Nil => lst
- case x :: y :: Nil =>
- if (lt(x,y))
- lst
- else
- y :: x :: Nil
-
- case lst =>
- val (l1, l2) = split(lst)
- val l1s = ms(l1)
- val l2s = ms(l2)
- merge(l1s, l2s)
- }
-
- ms(this)
- }
-
}
/** The empty list.
@@ -404,7 +339,7 @@ case object Nil extends List[Nothing] {
throw new UnsupportedOperationException("tail of empty list")
// Removal of equals method here might lead to an infinite recursion similar to IntMap.equals.
override def equals(that: Any) = that match {
- case that1: collection.Seq[_] => that1.isEmpty
+ case that1: scala.collection.GenSeq[_] => that1.isEmpty
case _ => false
}
}
@@ -423,40 +358,40 @@ final case class ::[B](private var hd: B, private[scala] var tl: List[B]) extend
override def tail : List[B] = tl
override def isEmpty: Boolean = false
- import java.io._
-
- private def writeObject(out: ObjectOutputStream) {
- var xs: List[B] = this
- while (!xs.isEmpty) { out.writeObject(xs.head); xs = xs.tail }
- out.writeObject(ListSerializeEnd)
- }
-
private def readObject(in: ObjectInputStream) {
- hd = in.readObject.asInstanceOf[B]
+ val firstObject = in.readObject()
+ hd = firstObject.asInstanceOf[B]
assert(hd != ListSerializeEnd)
var current: ::[B] = this
while (true) in.readObject match {
case ListSerializeEnd =>
current.tl = Nil
return
- case a : Any =>
+ case a =>
val list : ::[B] = new ::(a.asInstanceOf[B], Nil)
current.tl = list
current = list
}
}
+
+ private def writeObject(out: ObjectOutputStream) {
+ var xs: List[B] = this
+ while (!xs.isEmpty) { out.writeObject(xs.head); xs = xs.tail }
+ out.writeObject(ListSerializeEnd)
+ }
}
/** $factoryInfo
* @define coll list
- * @define Coll List
+ * @define Coll `List`
*/
object List extends SeqFactory[List] {
import scala.collection.{Iterable, Seq, IndexedSeq}
/** $genericCanBuildFromInfo */
- implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, List[A]] = new GenericCanBuildFrom[A]
+ implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, List[A]] =
+ ReusableCBF.asInstanceOf[GenericCanBuildFrom[A]]
def newBuilder[A]: Builder[A, List[A]] = new ListBuffer[A]
@@ -464,20 +399,18 @@ object List extends SeqFactory[List] {
override def apply[A](xs: A*): List[A] = xs.toList
- /** Create a sorted list with element values
- * `v<sub>n+1</sub> = step(v<sub>n</sub>)`
- * where `v<sub>0</sub> = start`
- * and elements are in the range between `start` (inclusive)
- * and `end` (exclusive)
+ /** Create a sorted list with element values `v,,>n+1,, = step(v,,n,,)`
+ * where `v,,0,, = start` and elements are in the range between `start`
+ * (inclusive) and `end` (exclusive).
*
* @param start the start value of the list
* @param end the end value of the list
- * @param step the increment function of the list, which given `v<sub>n</sub>`,
- * computes `v<sub>n+1</sub>`. Must be monotonically increasing
+ * @param step the increment function of the list, which given `v,,n,,`,
+ * computes `v,,n+1,,`. Must be monotonically increasing
* or decreasing.
- * @return the sorted list of all integers in range [start;end).
+ * @return the sorted list of all integers in range `[start;end)`.
*/
- @deprecated("use `iterate' instead", "2.8.0")
+ @deprecated("use `iterate` instead", "2.8.0")
def range(start: Int, end: Int, step: Int => Int): List[Int] = {
val up = step(start) > start
val down = step(start) < start
@@ -497,9 +430,9 @@ object List extends SeqFactory[List] {
*
* @param n the length of the resulting list
* @param elem the element composing the resulting list
- * @return a list composed of n elements all equal to elem
+ * @return a list composed of `n` elements all equal to `elem`
*/
- @deprecated("use `fill' instead", "2.8.0")
+ @deprecated("use `fill` instead", "2.8.0")
def make[A](n: Int, elem: A): List[A] = {
val b = new ListBuffer[A]
var i = 0
@@ -515,7 +448,7 @@ object List extends SeqFactory[List] {
* @param xss the list of lists that are to be concatenated
* @return the concatenation of all the lists
*/
- @deprecated("use `xss.flatten' instead of `List.flatten(xss)'", "2.8.0")
+ @deprecated("use `xss.flatten` instead of `List.flatten(xss)`", "2.8.0")
def flatten[A](xss: List[List[A]]): List[A] = {
val b = new ListBuffer[A]
for (xs <- xss) {
@@ -533,7 +466,7 @@ object List extends SeqFactory[List] {
* @param xs the list of pairs to unzip
* @return a pair of lists.
*/
- @deprecated("use `xs.unzip' instead of `List.unzip(xs)'", "2.8.0")
+ @deprecated("use `xs.unzip` instead of `List.unzip(xs)`", "2.8.0")
def unzip[A,B](xs: List[(A,B)]): (List[A], List[B]) = {
val b1 = new ListBuffer[A]
val b2 = new ListBuffer[B]
@@ -551,17 +484,16 @@ object List extends SeqFactory[List] {
* @param xs the iterable of pairs to unzip
* @return a pair of lists.
*/
- @deprecated("use `xs.unzip' instead of `List.unzip(xs)'", "2.8.0")
+ @deprecated("use `xs.unzip` instead of `List.unzip(xs)`", "2.8.0")
def unzip[A,B](xs: Iterable[(A,B)]): (List[A], List[B]) =
xs.foldRight[(List[A], List[B])]((Nil, Nil)) {
case ((x, y), (xs, ys)) => (x :: xs, y :: ys)
}
/**
- * Returns the `Left` values in the given `Iterable`
- * of `Either`s.
+ * Returns the `Left` values in the given `Iterable` of `Either`s.
*/
- @deprecated("use `xs collect { case Left(x: A) => x }' instead of `List.lefts(xs)'", "2.8.0")
+ @deprecated("use `xs collect { case Left(x: A) => x }` instead of `List.lefts(xs)`", "2.8.0")
def lefts[A, B](es: Iterable[Either[A, B]]) =
es.foldRight[List[A]](Nil)((e, as) => e match {
case Left(a) => a :: as
@@ -569,9 +501,9 @@ object List extends SeqFactory[List] {
})
/**
- * Returns the `Right` values in the given`Iterable` of `Either`s.
+ * Returns the `Right` values in the given `Iterable` of `Either`s.
*/
- @deprecated("use `xs collect { case Right(x: B) => x }' instead of `List.rights(xs)'", "2.8.0")
+ @deprecated("use `xs collect { case Right(x: B) => x }` instead of `List.rights(xs)`", "2.8.0")
def rights[A, B](es: Iterable[Either[A, B]]) =
es.foldRight[List[B]](Nil)((e, bs) => e match {
case Left(_) => bs
@@ -580,7 +512,7 @@ object List extends SeqFactory[List] {
/** Transforms an Iterable of Eithers into a pair of lists.
*
- * @param xs the iterable of Eithers to separate
+ * @param es the iterable of Eithers to separate
* @return a pair of lists.
*/
@deprecated("use `(for (Left(x) <- es) yield x, for (Right(x) <- es) yield x)` instead", "2.8.0")
@@ -596,7 +528,7 @@ object List extends SeqFactory[List] {
* @return a list that contains the elements returned by successive
* calls to `it.next`
*/
- @deprecated("use `it.toList' instead of `List.toList(it)'", "2.8.0")
+ @deprecated("use `it.toList` instead of `List.toList(it)`", "2.8.0")
def fromIterator[A](it: Iterator[A]): List[A] = it.toList
/** Converts an array into a list.
@@ -605,7 +537,7 @@ object List extends SeqFactory[List] {
* @return a list that contains the same elements than `arr`
* in the same order
*/
- @deprecated("use `array.toList' instead of `List.fromArray(array)'", "2.8.0")
+ @deprecated("use `array.toList` instead of `List.fromArray(array)`", "2.8.0")
def fromArray[A](arr: Array[A]): List[A] = fromArray(arr, 0, arr.length)
/** Converts a range of an array into a list.
@@ -616,7 +548,7 @@ object List extends SeqFactory[List] {
* @return a list that contains the same elements than `arr`
* in the same order
*/
- @deprecated("use `array.view(start, end).toList' instead of `List.fromArray(array, start, end)'", "2.8.0")
+ @deprecated("use `array.view(start, end).toList` instead of `List.fromArray(array, start, end)`", "2.8.0")
def fromArray[A](arr: Array[A], start: Int, len: Int): List[A] = {
var res: List[A] = Nil
var i = start + len
@@ -627,80 +559,15 @@ object List extends SeqFactory[List] {
res
}
- /** Parses a string which contains substrings separated by a
- * separator character and returns a list of all substrings.
- *
- * @param str the string to parse
- * @param separator the separator character
- * @return the list of substrings
- */
- @deprecated("use `str.split(separator).toList' instead of `List.fromString(str, separator)'", "2.8.0")
- def fromString(str: String, separator: Char): List[String] = {
- var words: List[String] = Nil
- var pos = str.length()
- while (pos > 0) {
- val pos1 = str.lastIndexOf(separator, pos - 1)
- if (pos1 + 1 < pos)
- words = str.substring(pos1 + 1, pos) :: words
- pos = pos1
- }
- words
- }
-
- /** Returns the given list of characters as a string.
- *
- * @param xs the list to convert.
- * @return the list in form of a string.
- */
- @deprecated("use `xs.mkString' instead of `List.toString(xs)'", "2.8.0")
- def toString(xs: List[Char]): String = {
- val sb = new StringBuilder()
- var xc = xs
- while (!xc.isEmpty) {
- sb.append(xc.head)
- xc = xc.tail
- }
- sb.toString()
- }
-
- /** Like xs map f, but returns `xs` unchanged if function
- * `f` maps all elements to themselves.
- */
- @deprecated("use `xs.mapConserve(f)' instead of `List.mapConserve(xs, f)'", "2.8.0")
- def mapConserve[A <: AnyRef](xs: List[A])(f: A => A): List[A] = {
- def loop(ys: List[A]): List[A] =
- if (ys.isEmpty) xs
- else {
- val head0 = ys.head
- val head1 = f(head0)
- if (head1 eq head0) {
- loop(ys.tail)
- } else {
- val ys1 = head1 :: mapConserve(ys.tail)(f)
- if (xs eq ys) ys1
- else {
- val b = new ListBuffer[A]
- var xc = xs
- while (xc ne ys) {
- b += xc.head
- xc = xc.tail
- }
- b.prependToList(ys1)
- }
- }
- }
- loop(xs)
- }
-
/** Returns the list resulting from applying the given function `f`
* to corresponding elements of the argument lists.
*
* @param f function to apply to each pair of elements.
- * @return `[f(a0,b0), ..., f(an,bn)]` if the lists are
- * `[a0, ..., ak]`, `[b0, ..., bl]` and
+ * @return `[f(a,,0,,,b,,0,,), ..., f(a,,n,,,b,,n,,)]` if the lists are
+ * `[a,,0,,, ..., a,,k,,]`, `[b,,0,,, ..., b,,l,,]` and
* `n = min(k,l)`
*/
- @deprecated("use `(xs, ys).zipped.map(f)' instead of `List.map2(xs, ys)(f)'", "2.8.0")
+ @deprecated("use `(xs, ys).zipped.map(f)` instead of `List.map2(xs, ys)(f)`", "2.8.0")
def map2[A,B,C](xs: List[A], ys: List[B])(f: (A, B) => C): List[C] = {
val b = new ListBuffer[C]
var xc = xs
@@ -713,43 +580,17 @@ object List extends SeqFactory[List] {
b.toList
}
- /** Returns the list resulting from applying the given function
- * `f` to corresponding elements of the argument lists.
- *
- * @param f function to apply to each pair of elements.
- * @return `[f(a<sub>0</sub>,b<sub>0</sub>,c<sub>0</sub>),
- * ..., f(a<sub>n</sub>,b<sub>n</sub>,c<sub>n</sub>)]`
- * if the lists are `[a<sub>0</sub>, ..., a<sub>k</sub>]`,
- * `[b<sub>0</sub>, ..., b<sub>l</sub>]`,
- * `[c<sub>0</sub>, ..., c<sub>m</sub>]` and
- * `n = min(k,l,m)`
- */
- @deprecated("use `(xs, ys, zs).zipped.map(f)' instead of `List.map3(xs, ys, zs)(f)'", "2.8.0")
- def map3[A,B,C,D](xs: List[A], ys: List[B], zs: List[C])(f: (A, B, C) => D): List[D] = {
- val b = new ListBuffer[D]
- var xc = xs
- var yc = ys
- var zc = zs
- while (!xc.isEmpty && !yc.isEmpty && !zc.isEmpty) {
- b += f(xc.head, yc.head, zc.head)
- xc = xc.tail
- yc = yc.tail
- zc = zc.tail
- }
- b.toList
- }
-
/** Tests whether the given predicate `p` holds
* for all corresponding elements of the argument lists.
*
- * @param p function to apply to each pair of elements.
+ * @param f function to apply to each pair of elements.
* @return `(p(a<sub>0</sub>,b<sub>0</sub>) &&
* ... && p(a<sub>n</sub>,b<sub>n</sub>))]`
* if the lists are `[a<sub>0</sub>, ..., a<sub>k</sub>]`;
* `[b<sub>0</sub>, ..., b<sub>l</sub>]`
* and `n = min(k,l)`
*/
- @deprecated("use `(xs, ys).zipped.forall(f)' instead of `List.forall2(xs, ys)(f)'", "2.8.0")
+ @deprecated("use `(xs, ys).zipped.forall(f)` instead of `List.forall2(xs, ys)(f)`", "2.8.0")
def forall2[A,B](xs: List[A], ys: List[B])(f: (A, B) => Boolean): Boolean = {
var xc = xs
var yc = ys
@@ -764,14 +605,14 @@ object List extends SeqFactory[List] {
/** Tests whether the given predicate `p` holds
* for some corresponding elements of the argument lists.
*
- * @param p function to apply to each pair of elements.
+ * @param f function to apply to each pair of elements.
* @return `n != 0 && (p(a<sub>0</sub>,b<sub>0</sub>) ||
* ... || p(a<sub>n</sub>,b<sub>n</sub>))]` if the lists are
* `[a<sub>0</sub>, ..., a<sub>k</sub>]`,
* `[b<sub>0</sub>, ..., b<sub>l</sub>]` and
* `n = min(k,l)`
*/
- @deprecated("use `(xs, ys).zipped.exists(f)' instead of `List.exists2(xs, ys)(f)'", "2.8.0")
+ @deprecated("use `(xs, ys).zipped.exists(f)` instead of `List.exists2(xs, ys)(f)`", "2.8.0")
def exists2[A,B](xs: List[A], ys: List[B])(f: (A, B) => Boolean): Boolean = {
var xc = xs
var yc = ys
@@ -789,7 +630,7 @@ object List extends SeqFactory[List] {
* @param xss the list of lists
* @return the transposed list of lists
*/
- @deprecated("use `xss.transpose' instead of `List.transpose(xss)'", "2.8.0")
+ @deprecated("use `xss.transpose` instead of `List.transpose(xss)`", "2.8.0")
def transpose[A](xss: List[List[A]]): List[List[A]] = {
val buf = new ListBuffer[List[A]]
var yss = xss
@@ -802,6 +643,10 @@ object List extends SeqFactory[List] {
}
/** Only used for list serialization */
+ at SerialVersionUID(0L - 8287891243975527522L)
+private[scala] case object ListSerializeStart
+
+/** Only used for list serialization */
@SerialVersionUID(0L - 8476791151975527571L)
private[scala] case object ListSerializeEnd
diff --git a/src/library/scala/collection/immutable/ListMap.scala b/src/library/scala/collection/immutable/ListMap.scala
index c75e042..7581735 100644
--- a/src/library/scala/collection/immutable/ListMap.scala
+++ b/src/library/scala/collection/immutable/ListMap.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -12,7 +12,7 @@ package scala.collection
package immutable
import generic._
-import annotation.{tailrec, bridge}
+import scala.annotation.{tailrec, bridge}
/** $factoryInfo
* @since 1
@@ -48,7 +48,11 @@ object ListMap extends ImmutableMapFactory[ListMap] {
* @define willNotTerminateInf
*/
@SerialVersionUID(301002838095710379L)
-class ListMap[A, +B] extends Map[A, B] with MapLike[A, B, ListMap[A, B]] with Serializable {
+class ListMap[A, +B]
+extends AbstractMap[A, B]
+ with Map[A, B]
+ with MapLike[A, B, ListMap[A, B]]
+ with Serializable {
override def empty = ListMap.empty
@@ -66,11 +70,9 @@ class ListMap[A, +B] extends Map[A, B] with MapLike[A, B, ListMap[A, B]] with Se
*/
def get(key: A): Option[B] = None
- /** This method allows one to create a new map with an
- * additional mapping from <code>key</code>
- * to <code>value</code>. If the map contains already a
- * mapping for <code>key</code>, it will be overridden by this
- * function.
+ /** This method allows one to create a new map with an additional mapping
+ * from `key` to `value`. If the map contains already a mapping for `key`,
+ * it will be overridden by this function.
*
* @param key the key element of the updated entry.
* @param value the value element of the updated entry.
@@ -103,10 +105,7 @@ class ListMap[A, +B] extends Map[A, B] with MapLike[A, B, ListMap[A, B]] with Se
override def ++[B1 >: B](xs: GenTraversableOnce[(A, B1)]): ListMap[A, B1] =
((repr: ListMap[A, B1]) /: xs.seq) (_ + _)
- @bridge def ++[B1 >: B](xs: TraversableOnce[(A, B1)]): ListMap[A, B1] =
- ++(xs: GenTraversableOnce[(A, B1)])
-
- /** This creates a new mapping without the given <code>key</code>.
+ /** This creates a new mapping without the given `key`.
* If the map does not contain a mapping for the given key, the
* method returns the same map.
*
@@ -117,17 +116,17 @@ class ListMap[A, +B] extends Map[A, B] with MapLike[A, B, ListMap[A, B]] with Se
/** Returns an iterator over key-value pairs.
*/
def iterator: Iterator[(A,B)] =
- new Iterator[(A,B)] {
+ new AbstractIterator[(A,B)] {
var self: ListMap[A,B] = ListMap.this
def hasNext = !self.isEmpty
- def next: (A,B) =
+ def next(): (A,B) =
if (!hasNext) throw new NoSuchElementException("next on empty iterator")
- else { val res = (self.key, self.value); self = self.next; res }
+ else { val res = (self.key, self.value); self = self.tail; res }
}.toList.reverseIterator
protected def key: A = throw new NoSuchElementException("empty map")
protected def value: B = throw new NoSuchElementException("empty map")
- protected def next: ListMap[A, B] = throw new NoSuchElementException("empty map")
+ override def tail: ListMap[A, B] = throw new NoSuchElementException("empty map")
/** This class represents an entry in the `ListMap`.
*/
@@ -141,7 +140,7 @@ class ListMap[A, +B] extends Map[A, B] with MapLike[A, B, ListMap[A, B]] with Se
override def size: Int = size0(this, 0)
// to allow tail recursion and prevent stack overflows
- @tailrec private def size0(cur: ListMap[A, B1], acc: Int): Int = if (cur.isEmpty) acc else size0(cur.next, acc + 1)
+ @tailrec private def size0(cur: ListMap[A, B1], acc: Int): Int = if (cur.isEmpty) acc else size0(cur.tail, acc + 1)
/** Is this an empty map?
*
@@ -153,43 +152,41 @@ class ListMap[A, +B] extends Map[A, B] with MapLike[A, B, ListMap[A, B]] with Se
* method throws an exception if there is no mapping from the given
* key to a value.
*
- * @param key the key
+ * @param k the key
* @return the value associated with the given key.
*/
override def apply(k: A): B1 = apply0(this, k)
-
- @tailrec private def apply0(cur: ListMap[A, B1], k: A): B1 = if (k == cur.key) cur.value else apply0(cur.next, k)
-
- /** Checks if this map maps <code>key</code> to a value and return the
+
+
+ @tailrec private def apply0(cur: ListMap[A, B1], k: A): B1 =
+ if (cur.isEmpty) throw new NoSuchElementException("key not found: "+k)
+ else if (k == cur.key) cur.value
+ else apply0(cur.tail, k)
+
+ /** Checks if this map maps `key` to a value and return the
* value if it exists.
*
- * @param key the key of the mapping of interest
+ * @param k the key of the mapping of interest
* @return the value of the mapping, if it exists
*/
override def get(k: A): Option[B1] = get0(this, k)
@tailrec private def get0(cur: ListMap[A, B1], k: A): Option[B1] =
if (k == cur.key) Some(cur.value)
- else if (cur.next.nonEmpty) get0(cur.next, k) else None
+ else if (cur.tail.nonEmpty) get0(cur.tail, k) else None
/** This method allows one to create a new map with an additional mapping
* from `key` to `value`. If the map contains already a mapping for `key`,
* it will be overridden by this function.
- *
- * @param k ...
- * @param v ...
*/
override def updated [B2 >: B1](k: A, v: B2): ListMap[A, B2] = {
val m = if (contains(k)) this - k else this
new m.Node[B2](k, v)
}
- /** Creates a new mapping without the given <code>key</code>.
+ /** Creates a new mapping without the given `key`.
* If the map does not contain a mapping for the given key, the
* method returns the same map.
- *
- * @param k ...
- * @return ...
*/
override def - (k: A): ListMap[A, B1] = {
// This definition used to result in stack overflows
@@ -205,7 +202,7 @@ class ListMap[A, +B] extends Map[A, B] with MapLike[A, B, ListMap[A, B]] with Se
var lst: List[(A, B1)] = Nil
while (cur.nonEmpty) {
if (k != cur.key) lst ::= ((cur.key, cur.value))
- cur = cur.next
+ cur = cur.tail
}
var acc = ListMap[A, B1]()
while (lst != Nil) {
@@ -218,6 +215,6 @@ class ListMap[A, +B] extends Map[A, B] with MapLike[A, B, ListMap[A, B]] with Se
}
- override protected def next: ListMap[A, B1] = ListMap.this
+ override def tail: ListMap[A, B1] = ListMap.this
}
}
diff --git a/src/library/scala/collection/immutable/ListSet.scala b/src/library/scala/collection/immutable/ListSet.scala
index a298c62..6cf6c42 100644
--- a/src/library/scala/collection/immutable/ListSet.scala
+++ b/src/library/scala/collection/immutable/ListSet.scala
@@ -1,18 +1,16 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-
-
package scala.collection
package immutable
import generic._
-import annotation.{tailrec, bridge}
+import scala.annotation.{tailrec, bridge}
import mutable.{ ListBuffer, Builder }
/** $factoryInfo
@@ -35,7 +33,7 @@ object ListSet extends ImmutableSetFactory[ListSet] {
*/
class ListSetBuilder[Elem](initial: ListSet[Elem]) extends Builder[Elem, ListSet[Elem]] {
def this() = this(empty[Elem])
- protected val elems = new mutable.ListBuffer[Elem] ++= initial reverse
+ protected val elems = (new mutable.ListBuffer[Elem] ++= initial).reverse
protected val seen = new mutable.HashSet[Elem] ++= initial
def +=(x: Elem): this.type = {
@@ -65,7 +63,8 @@ object ListSet extends ImmutableSetFactory[ListSet] {
* @define mayNotTerminateInf
* @define willNotTerminateInf
*/
-class ListSet[A] extends Set[A]
+class ListSet[A] extends AbstractSet[A]
+ with Set[A]
with GenericSetTemplate[A, ListSet]
with SetLike[A, ListSet[A]]
with Serializable{ self =>
@@ -78,10 +77,10 @@ class ListSet[A] extends Set[A]
override def size: Int = 0
override def isEmpty: Boolean = true;
- /** Checks if this set contains element <code>elem</code>.
+ /** Checks if this set contains element `elem`.
*
* @param elem the element to check for membership.
- * @return true, iff <code>elem</code> is contained in this set.
+ * @return `'''true'''`, iff `elem` is contained in this set.
*/
def contains(elem: A): Boolean = false
@@ -101,9 +100,7 @@ class ListSet[A] extends Set[A]
*/
override def ++(xs: GenTraversableOnce[A]): ListSet[A] =
if (xs.isEmpty) this
- else new ListSet.ListSetBuilder(this) ++= xs.seq result
-
- @bridge def ++(xs: TraversableOnce[A]): ListSet[A] = ++(xs: GenTraversableOnce[A]): ListSet[A]
+ else (new ListSet.ListSetBuilder(this) ++= xs.seq).result
private[ListSet] def unchecked_+(e: A): ListSet[A] = new Node(e)
private[ListSet] def unchecked_outer: ListSet[A] =
@@ -114,13 +111,13 @@ class ListSet[A] extends Set[A]
* @throws Predef.NoSuchElementException
* @return the new iterator
*/
- def iterator: Iterator[A] = new Iterator[A] {
+ def iterator: Iterator[A] = new AbstractIterator[A] {
var that: ListSet[A] = self
def hasNext = that.nonEmpty
def next: A =
if (hasNext) {
- val res = that.elem
- that = that.next
+ val res = that.head
+ that = that.tail
res
}
else Iterator.empty.next
@@ -129,18 +126,18 @@ class ListSet[A] extends Set[A]
/**
* @throws Predef.NoSuchElementException
*/
- protected def elem: A = throw new NoSuchElementException("Set has no elements");
+ override def head: A = throw new NoSuchElementException("Set has no elements");
/**
* @throws Predef.NoSuchElementException
*/
- protected def next: ListSet[A] = throw new NoSuchElementException("Next of an empty set");
+ override def tail: ListSet[A] = throw new NoSuchElementException("Next of an empty set");
override def stringPrefix = "ListSet"
/** Represents an entry in the `ListSet`.
*/
- protected class Node(override protected val elem: A) extends ListSet[A] with Serializable {
+ protected class Node(override val head: A) extends ListSet[A] with Serializable {
override private[ListSet] def unchecked_outer = self
/** Returns the number of elements in this set.
@@ -158,26 +155,25 @@ class ListSet[A] extends Set[A]
*/
override def isEmpty: Boolean = false
- /** Checks if this set contains element <code>elem</code>.
+ /** Checks if this set contains element `elem`.
*
- * @param elem the element to check for membership.
- * @return true, iff <code>elem</code> is contained in this set.
+ * @param e the element to check for membership.
+ * @return `'''true'''`, iff `elem` is contained in this set.
*/
override def contains(e: A) = containsInternal(this, e)
@tailrec private def containsInternal(n: ListSet[A], e: A): Boolean =
- !n.isEmpty && (n.elem == e || containsInternal(n.unchecked_outer, e))
+ !n.isEmpty && (n.head == e || containsInternal(n.unchecked_outer, e))
/** This method creates a new set with an additional element.
*/
override def +(e: A): ListSet[A] = if (contains(e)) this else new Node(e)
- /** <code>-</code> can be used to remove a single element from
- * a set.
+ /** `-` can be used to remove a single element from a set.
*/
- override def -(e: A): ListSet[A] = if (e == elem) self else {
- val tail = self - e; new tail.Node(elem)
+ override def -(e: A): ListSet[A] = if (e == head) self else {
+ val tail = self - e; new tail.Node(head)
}
- override protected def next: ListSet[A] = self
+ override def tail: ListSet[A] = self
}
}
diff --git a/src/library/scala/collection/immutable/LongMap.scala b/src/library/scala/collection/immutable/LongMap.scala
index 030dd72..2a29104 100644
--- a/src/library/scala/collection/immutable/LongMap.scala
+++ b/src/library/scala/collection/immutable/LongMap.scala
@@ -1,12 +1,13 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-package scala.collection
+package scala
+package collection
package immutable
import scala.collection.generic.{ CanBuildFrom, BitOperations }
@@ -18,17 +19,17 @@ import scala.collection.mutable.{ Builder, MapBuilder }
private[immutable] object LongMapUtils extends BitOperations.Long {
def branchMask(i: Long, j: Long) = highestOneBit(i ^ j)
- def join[T](p1 : Long, t1 : LongMap[T], p2 : Long, t2 : LongMap[T]) : LongMap[T] = {
- val m = branchMask(p1, p2);
- val p = mask(p1, m);
+ def join[T](p1: Long, t1: LongMap[T], p2: Long, t2: LongMap[T]): LongMap[T] = {
+ val m = branchMask(p1, p2)
+ val p = mask(p1, m)
if (zero(p1, m)) LongMap.Bin(p, m, t1, t2)
- else LongMap.Bin(p, m, t2, t1);
+ else LongMap.Bin(p, m, t2, t1)
}
- def bin[T](prefix : Long, mask : Long, left : LongMap[T], right : LongMap[T]) : LongMap[T] = (left, right) match {
- case (left, LongMap.Nil) => left;
- case (LongMap.Nil, right) => right;
- case (left, right) => LongMap.Bin(prefix, mask, left, right);
+ def bin[T](prefix: Long, mask: Long, left: LongMap[T], right: LongMap[T]): LongMap[T] = (left, right) match {
+ case (left, LongMap.Nil) => left
+ case (LongMap.Nil, right) => right
+ case (left, right) => LongMap.Bin(prefix, mask, left, right)
}
}
@@ -36,7 +37,7 @@ import LongMapUtils._
/** A companion object for long maps.
*
- * @define Coll LongMap
+ * @define Coll `LongMap`
* @define mapCanBuildFromInfo
* The standard `CanBuildFrom` instance for `$Coll` objects.
* The created value is an instance of class `MapCanBuildFrom`.
@@ -49,29 +50,29 @@ object LongMap {
def apply(): Builder[(Long, B), LongMap[B]] = new MapBuilder[Long, B, LongMap[B]](empty[B])
}
- def empty[T] : LongMap[T] = LongMap.Nil;
- def singleton[T](key : Long, value : T) : LongMap[T] = LongMap.Tip(key, value);
- def apply[T](elems : (Long, T)*) : LongMap[T] =
- elems.foldLeft(empty[T])((x, y) => x.updated(y._1, y._2));
+ def empty[T]: LongMap[T] = LongMap.Nil
+ def singleton[T](key: Long, value: T): LongMap[T] = LongMap.Tip(key, value)
+ def apply[T](elems: (Long, T)*): LongMap[T] =
+ elems.foldLeft(empty[T])((x, y) => x.updated(y._1, y._2))
private[immutable] case object Nil extends LongMap[Nothing] {
// Important, don't remove this! See IntMap for explanation.
override def equals(that : Any) = that match {
- case (that : AnyRef) if (this eq that) => true;
- case (that : LongMap[_]) => false; // The only empty LongMaps are eq Nil
- case that => super.equals(that);
+ case (that: AnyRef) if (this eq that) => true
+ case (that: LongMap[_]) => false // The only empty LongMaps are eq Nil
+ case that => super.equals(that)
}
- };
+ }
- private[immutable] case class Tip[+T](key : Long, value : T) extends LongMap[T]{
- def withValue[S](s : S) =
- if (s.asInstanceOf[AnyRef] eq value.asInstanceOf[AnyRef]) this.asInstanceOf[LongMap.Tip[S]];
- else LongMap.Tip(key, s);
+ private[immutable] case class Tip[+T](key: Long, value: T) extends LongMap[T] {
+ def withValue[S](s: S) =
+ if (s.asInstanceOf[AnyRef] eq value.asInstanceOf[AnyRef]) this.asInstanceOf[LongMap.Tip[S]]
+ else LongMap.Tip(key, s)
}
- private[immutable] case class Bin[+T](prefix : Long, mask : Long, left : LongMap[T], right : LongMap[T]) extends LongMap[T]{
- def bin[S](left : LongMap[S], right : LongMap[S]) : LongMap[S] = {
- if ((this.left eq left) && (this.right eq right)) this.asInstanceOf[LongMap.Bin[S]];
- else LongMap.Bin[S](prefix, mask, left, right);
+ private[immutable] case class Bin[+T](prefix: Long, mask: Long, left: LongMap[T], right: LongMap[T]) extends LongMap[T] {
+ def bin[S](left: LongMap[S], right: LongMap[S]): LongMap[S] = {
+ if ((this.left eq left) && (this.right eq right)) this.asInstanceOf[LongMap.Bin[S]]
+ else LongMap.Bin[S](prefix, mask, left, right)
}
}
}
@@ -79,64 +80,62 @@ object LongMap {
import LongMap._
// Iterator over a non-empty LongMap.
-private[immutable] abstract class LongMapIterator[V, T](it : LongMap[V]) extends Iterator[T]{
+private[immutable] abstract class LongMapIterator[V, T](it: LongMap[V]) extends AbstractIterator[T] {
// Basically this uses a simple stack to emulate conversion over the tree. However
// because we know that Longs are only 64 bits we can have at most 64 LongMap.Bins and
// one LongMap.Tip sitting on the tree at any point. Therefore we know the maximum stack
// depth is 65
- var index = 0;
- var buffer = new Array[AnyRef](65);
+ var index = 0
+ var buffer = new Array[AnyRef](65)
def pop() = {
- index -= 1;
- buffer(index).asInstanceOf[LongMap[V]];
+ index -= 1
+ buffer(index).asInstanceOf[LongMap[V]]
}
- def push(x : LongMap[V]) {
- buffer(index) = x.asInstanceOf[AnyRef];
- index += 1;
+ def push(x: LongMap[V]) {
+ buffer(index) = x.asInstanceOf[AnyRef]
+ index += 1
}
push(it);
/**
* What value do we assign to a tip?
*/
- def valueOf(tip : LongMap.Tip[V]) : T;
+ def valueOf(tip: LongMap.Tip[V]): T
- def hasNext = index != 0;
- final def next : T =
+ def hasNext = index != 0
+ final def next: T =
pop() match {
case LongMap.Bin(_,_, t at LongMap.Tip(_, _), right) => {
- push(right);
- valueOf(t);
+ push(right)
+ valueOf(t)
}
case LongMap.Bin(_, _, left, right) => {
- push(right);
- push(left);
- next;
+ push(right)
+ push(left)
+ next
}
- case t at LongMap.Tip(_, _) => valueOf(t);
+ case t at LongMap.Tip(_, _) => valueOf(t)
// This should never happen. We don't allow LongMap.Nil in subtrees of the LongMap
// and don't return an LongMapIterator for LongMap.Nil.
- case LongMap.Nil => sys.error("Empty maps not allowed as subtrees");
+ case LongMap.Nil => sys.error("Empty maps not allowed as subtrees")
}
}
-private[immutable] class LongMapEntryIterator[V](it : LongMap[V]) extends LongMapIterator[V, (Long, V)](it){
- def valueOf(tip : LongMap.Tip[V]) = (tip.key, tip.value);
+private[immutable] class LongMapEntryIterator[V](it: LongMap[V]) extends LongMapIterator[V, (Long, V)](it){
+ def valueOf(tip: LongMap.Tip[V]) = (tip.key, tip.value)
}
-private[immutable] class LongMapValueIterator[V](it : LongMap[V]) extends LongMapIterator[V, V](it){
- def valueOf(tip : LongMap.Tip[V]) = tip.value;
+private[immutable] class LongMapValueIterator[V](it: LongMap[V]) extends LongMapIterator[V, V](it){
+ def valueOf(tip: LongMap.Tip[V]) = tip.value
}
-private[immutable] class LongMapKeyIterator[V](it : LongMap[V]) extends LongMapIterator[V, Long](it){
- def valueOf(tip : LongMap.Tip[V]) = tip.key;
+private[immutable] class LongMapKeyIterator[V](it: LongMap[V]) extends LongMapIterator[V, Long](it){
+ def valueOf(tip: LongMap.Tip[V]) = tip.key
}
-import LongMap._;
-
/**
* Specialised immutable map structure for long keys, based on
* <a href="http://citeseer.ist.psu.edu/okasaki98fast.html">Fast Mergeable Long Maps</a>
@@ -147,18 +146,22 @@ import LongMap._;
* @tparam T type of the values associated with the long keys.
*
* @since 2.7
- * @define Coll immutable.LongMap
+ * @define Coll `immutable.LongMap`
* @define coll immutable long integer map
* @define mayNotTerminateInf
* @define willNotTerminateInf
*/
-sealed abstract class LongMap[+T] extends Map[Long, T] with MapLike[Long, T, LongMap[T]] {
- override def empty: LongMap[T] = LongMap.Nil;
+sealed abstract class LongMap[+T]
+extends AbstractMap[Long, T]
+ with Map[Long, T]
+ with MapLike[Long, T, LongMap[T]] {
+
+ override def empty: LongMap[T] = LongMap.Nil
override def toList = {
- val buffer = new scala.collection.mutable.ListBuffer[(Long, T)];
- foreach(buffer += _);
- buffer.toList;
+ val buffer = new scala.collection.mutable.ListBuffer[(Long, T)]
+ foreach(buffer += _)
+ buffer.toList
}
/**
@@ -167,22 +170,22 @@ sealed abstract class LongMap[+T] extends Map[Long, T] with MapLike[Long, T, Lon
* @return an iterator over pairs of long keys and corresponding values.
*/
def iterator: Iterator[(Long, T)] = this match {
- case LongMap.Nil => Iterator.empty;
- case _ => new LongMapEntryIterator(this);
+ case LongMap.Nil => Iterator.empty
+ case _ => new LongMapEntryIterator(this)
}
/**
* Loops over the key, value pairs of the map in unsigned order of the keys.
*/
- override final def foreach[U](f : ((Long, T)) => U) : Unit = this match {
- case LongMap.Bin(_, _, left, right) => {left.foreach(f); right.foreach(f); }
+ override final def foreach[U](f: ((Long, T)) => U): Unit = this match {
+ case LongMap.Bin(_, _, left, right) => { left.foreach(f); right.foreach(f) }
case LongMap.Tip(key, value) => f((key, value));
- case LongMap.Nil => {};
+ case LongMap.Nil =>
}
- override def keysIterator : Iterator[Long] = this match {
- case LongMap.Nil => Iterator.empty;
- case _ => new LongMapKeyIterator(this);
+ override def keysIterator: Iterator[Long] = this match {
+ case LongMap.Nil => Iterator.empty
+ case _ => new LongMapKeyIterator(this)
}
/**
@@ -191,15 +194,15 @@ sealed abstract class LongMap[+T] extends Map[Long, T] with MapLike[Long, T, Lon
*
* @param f The loop body
*/
- final def foreachKey(f : Long => Unit) : Unit = this match {
- case LongMap.Bin(_, _, left, right) => {left.foreachKey(f); right.foreachKey(f); }
- case LongMap.Tip(key, _) => f(key);
- case LongMap.Nil => {}
+ final def foreachKey(f: Long => Unit): Unit = this match {
+ case LongMap.Bin(_, _, left, right) => { left.foreachKey(f); right.foreachKey(f) }
+ case LongMap.Tip(key, _) => f(key)
+ case LongMap.Nil =>
}
- override def valuesIterator : Iterator[T] = this match {
- case LongMap.Nil => Iterator.empty;
- case _ => new LongMapValueIterator(this);
+ override def valuesIterator: Iterator[T] = this match {
+ case LongMap.Nil => Iterator.empty
+ case _ => new LongMapValueIterator(this)
}
/**
@@ -208,67 +211,70 @@ sealed abstract class LongMap[+T] extends Map[Long, T] with MapLike[Long, T, Lon
*
* @param f The loop body
*/
- final def foreachValue(f : T => Unit) : Unit = this match {
- case LongMap.Bin(_, _, left, right) => {left.foreachValue(f); right.foreachValue(f); }
- case LongMap.Tip(_, value) => f(value);
- case LongMap.Nil => {};
+ final def foreachValue(f: T => Unit): Unit = this match {
+ case LongMap.Bin(_, _, left, right) => { left.foreachValue(f); right.foreachValue(f) }
+ case LongMap.Tip(_, value) => f(value)
+ case LongMap.Nil =>
}
override def stringPrefix = "LongMap"
- override def isEmpty = this == LongMap.Nil;
+ override def isEmpty = this == LongMap.Nil
- override def filter(f : ((Long, T)) => Boolean) : LongMap[T] = this match {
+ override def filter(f: ((Long, T)) => Boolean): LongMap[T] = this match {
case LongMap.Bin(prefix, mask, left, right) => {
- val (newleft, newright) = (left.filter(f), right.filter(f));
- if ((left eq newleft) && (right eq newright)) this;
- else bin(prefix, mask, newleft, newright);
+ val (newleft, newright) = (left.filter(f), right.filter(f))
+ if ((left eq newleft) && (right eq newright)) this
+ else bin(prefix, mask, newleft, newright)
}
case LongMap.Tip(key, value) =>
if (f((key, value))) this
- else LongMap.Nil;
- case LongMap.Nil => LongMap.Nil;
+ else LongMap.Nil
+ case LongMap.Nil => LongMap.Nil
}
- def transform[S](f : (Long, T) => S) : LongMap[S] = this match {
- case b at LongMap.Bin(prefix, mask, left, right) => b.bin(left.transform(f), right.transform(f));
- case t at LongMap.Tip(key, value) => t.withValue(f(key, value));
- case LongMap.Nil => LongMap.Nil;
+ def transform[S](f: (Long, T) => S): LongMap[S] = this match {
+ case b at LongMap.Bin(prefix, mask, left, right) => b.bin(left.transform(f), right.transform(f))
+ case t at LongMap.Tip(key, value) => t.withValue(f(key, value))
+ case LongMap.Nil => LongMap.Nil
}
- final override def size : Int = this match {
- case LongMap.Nil => 0;
- case LongMap.Tip(_, _) => 1;
- case LongMap.Bin(_, _, left, right) => left.size + right.size;
+ final override def size: Int = this match {
+ case LongMap.Nil => 0
+ case LongMap.Tip(_, _) => 1
+ case LongMap.Bin(_, _, left, right) => left.size + right.size
}
- final def get(key : Long) : Option[T] = this match {
- case LongMap.Bin(prefix, mask, left, right) => if (zero(key, mask)) left.get(key) else right.get(key);
- case LongMap.Tip(key2, value) => if (key == key2) Some(value) else None;
- case LongMap.Nil => None;
+ final def get(key: Long): Option[T] = this match {
+ case LongMap.Bin(prefix, mask, left, right) => if (zero(key, mask)) left.get(key) else right.get(key)
+ case LongMap.Tip(key2, value) => if (key == key2) Some(value) else None
+ case LongMap.Nil => None
}
- final override def getOrElse[S >: T](key : Long, default : =>S) : S = this match {
- case LongMap.Nil => default;
- case LongMap.Tip(key2, value) => if (key == key2) value else default;
- case LongMap.Bin(prefix, mask, left, right) => if (zero(key, mask)) left.getOrElse(key, default) else right.getOrElse(key, default);
+ final override def getOrElse[S >: T](key: Long, default: => S): S = this match {
+ case LongMap.Nil => default
+ case LongMap.Tip(key2, value) => if (key == key2) value else default
+ case LongMap.Bin(prefix, mask, left, right) =>
+ if (zero(key, mask)) left.getOrElse(key, default) else right.getOrElse(key, default)
}
- final override def apply(key : Long) : T = this match {
- case LongMap.Bin(prefix, mask, left, right) => if (zero(key, mask)) left(key) else right(key);
- case LongMap.Tip(key2, value) => if (key == key2) value else sys.error("Key not found");
- case LongMap.Nil => sys.error("key not found");
+ final override def apply(key: Long): T = this match {
+ case LongMap.Bin(prefix, mask, left, right) => if (zero(key, mask)) left(key) else right(key)
+ case LongMap.Tip(key2, value) => if (key == key2) value else sys.error("Key not found")
+ case LongMap.Nil => sys.error("key not found")
}
def + [S >: T] (kv: (Long, S)): LongMap[S] = updated(kv._1, kv._2)
- override def updated[S >: T](key : Long, value : S) : LongMap[S] = this match {
- case LongMap.Bin(prefix, mask, left, right) => if (!hasMatch(key, prefix, mask)) join(key, LongMap.Tip(key, value), prefix, this);
- else if (zero(key, mask)) LongMap.Bin(prefix, mask, left.updated(key, value), right)
- else LongMap.Bin(prefix, mask, left, right.updated(key, value));
- case LongMap.Tip(key2, value2) => if (key == key2) LongMap.Tip(key, value);
- else join(key, LongMap.Tip(key, value), key2, this);
- case LongMap.Nil => LongMap.Tip(key, value);
+ override def updated[S >: T](key: Long, value: S): LongMap[S] = this match {
+ case LongMap.Bin(prefix, mask, left, right) =>
+ if (!hasMatch(key, prefix, mask)) join(key, LongMap.Tip(key, value), prefix, this)
+ else if (zero(key, mask)) LongMap.Bin(prefix, mask, left.updated(key, value), right)
+ else LongMap.Bin(prefix, mask, left, right.updated(key, value))
+ case LongMap.Tip(key2, value2) =>
+ if (key == key2) LongMap.Tip(key, value)
+ else join(key, LongMap.Tip(key, value), key2, this)
+ case LongMap.Nil => LongMap.Tip(key, value)
}
/**
@@ -277,7 +283,7 @@ sealed abstract class LongMap[+T] extends Map[Long, T] with MapLike[Long, T, Lon
* Equivalent to
* {{{
* this.get(key) match {
- * case None => this.update(key, value);
+ * case None => this.update(key, value)
* case Some(oldvalue) => this.update(key, f(oldvalue, value)
* }
* }}}
@@ -288,52 +294,54 @@ sealed abstract class LongMap[+T] extends Map[Long, T] with MapLike[Long, T, Lon
* @param f The function used to resolve conflicts.
* @return The updated map.
*/
- def updateWith[S >: T](key : Long, value : S, f : (T, S) => S) : LongMap[S] = this match {
- case LongMap.Bin(prefix, mask, left, right) => if (!hasMatch(key, prefix, mask)) join(key, LongMap.Tip(key, value), prefix, this);
- else if (zero(key, mask)) LongMap.Bin(prefix, mask, left.updateWith(key, value, f), right)
- else LongMap.Bin(prefix, mask, left, right.updateWith(key, value, f));
- case LongMap.Tip(key2, value2) => if (key == key2) LongMap.Tip(key, f(value2, value));
- else join(key, LongMap.Tip(key, value), key2, this);
- case LongMap.Nil => LongMap.Tip(key, value);
+ def updateWith[S >: T](key: Long, value: S, f: (T, S) => S): LongMap[S] = this match {
+ case LongMap.Bin(prefix, mask, left, right) =>
+ if (!hasMatch(key, prefix, mask)) join(key, LongMap.Tip(key, value), prefix, this)
+ else if (zero(key, mask)) LongMap.Bin(prefix, mask, left.updateWith(key, value, f), right)
+ else LongMap.Bin(prefix, mask, left, right.updateWith(key, value, f))
+ case LongMap.Tip(key2, value2) =>
+ if (key == key2) LongMap.Tip(key, f(value2, value))
+ else join(key, LongMap.Tip(key, value), key2, this)
+ case LongMap.Nil => LongMap.Tip(key, value)
}
- def -(key : Long) : LongMap[T] = this match {
+ def -(key: Long): LongMap[T] = this match {
case LongMap.Bin(prefix, mask, left, right) =>
- if (!hasMatch(key, prefix, mask)) this;
- else if (zero(key, mask)) bin(prefix, mask, left - key, right);
- else bin(prefix, mask, left, right - key);
+ if (!hasMatch(key, prefix, mask)) this
+ else if (zero(key, mask)) bin(prefix, mask, left - key, right)
+ else bin(prefix, mask, left, right - key)
case LongMap.Tip(key2, _) =>
- if (key == key2) LongMap.Nil;
- else this;
- case LongMap.Nil => LongMap.Nil;
+ if (key == key2) LongMap.Nil
+ else this
+ case LongMap.Nil => LongMap.Nil
}
/**
- * A combined transform and filter function. Returns an LongMap such that for each (key, value) mapping
- * in this map, if f(key, value) == None the map contains no mapping for key, and if <code>f(key, value)
+ * A combined transform and filter function. Returns an `LongMap` such that
+ * for each `(key, value)` mapping in this map, if `f(key, value) == None`
+ * the map contains no mapping for key, and if `f(key, value)`.
*
* @tparam S The type of the values in the resulting `LongMap`.
* @param f The transforming function.
* @return The modified map.
*/
- def modifyOrRemove[S](f : (Long, T) => Option[S]) : LongMap[S] = this match {
+ def modifyOrRemove[S](f: (Long, T) => Option[S]): LongMap[S] = this match {
case LongMap.Bin(prefix, mask, left, right) => {
- val newleft = left.modifyOrRemove(f);
- val newright = right.modifyOrRemove(f);
- if ((left eq newleft) && (right eq newright)) this.asInstanceOf[LongMap[S]];
+ val newleft = left.modifyOrRemove(f)
+ val newright = right.modifyOrRemove(f)
+ if ((left eq newleft) && (right eq newright)) this.asInstanceOf[LongMap[S]]
else bin(prefix, mask, newleft, newright)
}
case LongMap.Tip(key, value) => f(key, value) match {
- case None => LongMap.Nil;
+ case None => LongMap.Nil
case Some(value2) =>
//hack to preserve sharing
if (value.asInstanceOf[AnyRef] eq value2.asInstanceOf[AnyRef]) this.asInstanceOf[LongMap[S]]
- else LongMap.Tip(key, value2);
+ else LongMap.Tip(key, value2)
}
- case LongMap.Nil => LongMap.Nil;
+ case LongMap.Nil => LongMap.Nil
}
-
/**
* Forms a union map with that map, using the combining function to resolve conflicts.
*
@@ -342,31 +350,31 @@ sealed abstract class LongMap[+T] extends Map[Long, T] with MapLike[Long, T, Lon
* @param f The function used to resolve conflicts between two mappings.
* @return Union of `this` and `that`, with identical key conflicts resolved using the function `f`.
*/
- def unionWith[S >: T](that : LongMap[S], f : (Long, S, S) => S) : LongMap[S] = (this, that) match{
+ def unionWith[S >: T](that: LongMap[S], f: (Long, S, S) => S): LongMap[S] = (this, that) match{
case (LongMap.Bin(p1, m1, l1, r1), that@(LongMap.Bin(p2, m2, l2, r2))) =>
if (shorter(m1, m2)) {
- if (!hasMatch(p2, p1, m1)) join(p1, this, p2, that);
- else if (zero(p2, m1)) LongMap.Bin(p1, m1, l1.unionWith(that, f), r1);
- else LongMap.Bin(p1, m1, l1, r1.unionWith(that, f));
+ if (!hasMatch(p2, p1, m1)) join[S](p1, this, p2, that) // TODO: remove [S] when SI-5548 is fixed
+ else if (zero(p2, m1)) LongMap.Bin(p1, m1, l1.unionWith(that, f), r1)
+ else LongMap.Bin(p1, m1, l1, r1.unionWith(that, f))
} else if (shorter(m2, m1)){
- if (!hasMatch(p1, p2, m2)) join(p1, this, p2, that);
- else if (zero(p1, m2)) LongMap.Bin(p2, m2, this.unionWith(l2, f), r2);
- else LongMap.Bin(p2, m2, l2, this.unionWith(r2, f));
+ if (!hasMatch(p1, p2, m2)) join[S](p1, this, p2, that) // TODO: remove [S] when SI-5548 is fixed
+ else if (zero(p1, m2)) LongMap.Bin(p2, m2, this.unionWith(l2, f), r2)
+ else LongMap.Bin(p2, m2, l2, this.unionWith(r2, f))
}
else {
- if (p1 == p2) LongMap.Bin(p1, m1, l1.unionWith(l2,f), r1.unionWith(r2, f));
- else join(p1, this, p2, that);
+ if (p1 == p2) LongMap.Bin(p1, m1, l1.unionWith(l2,f), r1.unionWith(r2, f))
+ else join[S](p1, this, p2, that) // TODO: remove [S] when SI-5548 is fixed
}
- case (LongMap.Tip(key, value), x) => x.updateWith(key, value, (x, y) => f(key, y, x));
- case (x, LongMap.Tip(key, value)) => x.updateWith[S](key, value, (x, y) => f(key, x, y));
- case (LongMap.Nil, x) => x;
- case (x, LongMap.Nil) => x;
+ case (LongMap.Tip(key, value), x) => x.updateWith[S](key, value, (x, y) => f(key, y, x)) // TODO: remove [S] when SI-5548 is fixed
+ case (x, LongMap.Tip(key, value)) => x.updateWith[S](key, value, (x, y) => f(key, x, y))
+ case (LongMap.Nil, x) => x
+ case (x, LongMap.Nil) => x
}
/**
- * Forms the intersection of these two maps with a combining function. The resulting map is
- * a map that has only keys present in both maps and has values produced from the original mappings
- * by combining them with f.
+ * Forms the intersection of these two maps with a combining function. The
+ * resulting map is a map that has only keys present in both maps and has
+ * values produced from the original mappings by combining them with `f`.
*
* @tparam S The type of values in `that`.
* @tparam R The type of values in the resulting `LongMap`.
@@ -374,27 +382,27 @@ sealed abstract class LongMap[+T] extends Map[Long, T] with MapLike[Long, T, Lon
* @param f The combining function.
* @return Intersection of `this` and `that`, with values for identical keys produced by function `f`.
*/
- def intersectionWith[S, R](that : LongMap[S], f : (Long, T, S) => R) : LongMap[R] = (this, that) match {
+ def intersectionWith[S, R](that: LongMap[S], f: (Long, T, S) => R): LongMap[R] = (this, that) match {
case (LongMap.Bin(p1, m1, l1, r1), that at LongMap.Bin(p2, m2, l2, r2)) =>
if (shorter(m1, m2)) {
- if (!hasMatch(p2, p1, m1)) LongMap.Nil;
- else if (zero(p2, m1)) l1.intersectionWith(that, f);
- else r1.intersectionWith(that, f);
- } else if (m1 == m2) bin(p1, m1, l1.intersectionWith(l2, f), r1.intersectionWith(r2, f));
+ if (!hasMatch(p2, p1, m1)) LongMap.Nil
+ else if (zero(p2, m1)) l1.intersectionWith(that, f)
+ else r1.intersectionWith(that, f)
+ } else if (m1 == m2) bin(p1, m1, l1.intersectionWith(l2, f), r1.intersectionWith(r2, f))
else {
- if (!hasMatch(p1, p2, m2)) LongMap.Nil;
- else if (zero(p1, m2)) this.intersectionWith(l2, f);
- else this.intersectionWith(r2, f);
+ if (!hasMatch(p1, p2, m2)) LongMap.Nil
+ else if (zero(p1, m2)) this.intersectionWith(l2, f)
+ else this.intersectionWith(r2, f)
}
case (LongMap.Tip(key, value), that) => that.get(key) match {
- case None => LongMap.Nil;
- case Some(value2) => LongMap.Tip(key, f(key, value, value2));
+ case None => LongMap.Nil
+ case Some(value2) => LongMap.Tip(key, f(key, value, value2))
}
case (_, LongMap.Tip(key, value)) => this.get(key) match {
- case None => LongMap.Nil;
- case Some(value2) => LongMap.Tip(key, f(key, value2, value));
+ case None => LongMap.Nil
+ case Some(value2) => LongMap.Tip(key, f(key, value2, value))
}
- case (_, _) => LongMap.Nil;
+ case (_, _) => LongMap.Nil
}
/**
@@ -405,9 +413,10 @@ sealed abstract class LongMap[+T] extends Map[Long, T] with MapLike[Long, T, Lon
* @param that The map to intersect with.
* @return A map with all the keys both in `this` and `that`, mapped to corresponding values from `this`.
*/
- def intersection[R](that : LongMap[R]) : LongMap[T] = this.intersectionWith(that, (key : Long, value : T, value2 : R) => value);
+ def intersection[R](that: LongMap[R]): LongMap[T] =
+ this.intersectionWith(that, (key: Long, value: T, value2: R) => value)
- def ++[S >: T](that : LongMap[S]) =
+ def ++[S >: T](that: LongMap[S]) =
this.unionWith[S](that, (key, x, y) => y)
}
diff --git a/src/library/scala/collection/immutable/Map.scala b/src/library/scala/collection/immutable/Map.scala
index ef27bc9..2ebf503 100644
--- a/src/library/scala/collection/immutable/Map.scala
+++ b/src/library/scala/collection/immutable/Map.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -66,7 +66,7 @@ trait Map[A, +B] extends Iterable[(A, B)]
}
/** $factoryInfo
- * @define Coll immutable.Map
+ * @define Coll `immutable.Map`
* @define coll immutable map
*/
object Map extends ImmutableMapFactory[Map] {
@@ -76,7 +76,7 @@ object Map extends ImmutableMapFactory[Map] {
def empty[A, B]: Map[A, B] = EmptyMap.asInstanceOf[Map[A, B]]
- class WithDefault[A, +B](underlying: Map[A, B], d: A => B) extends collection.Map.WithDefault[A, B](underlying, d) with Map[A, B] {
+ class WithDefault[A, +B](underlying: Map[A, B], d: A => B) extends scala.collection.Map.WithDefault[A, B](underlying, d) with Map[A, B] {
override def empty = new WithDefault(underlying.empty, d)
override def updated[B1 >: B](key: A, value: B1): WithDefault[A, B1] = new WithDefault[A, B1](underlying.updated[B1](key, value), d)
override def + [B1 >: B](kv: (A, B1)): WithDefault[A, B1] = updated(kv._1, kv._2)
@@ -85,7 +85,7 @@ object Map extends ImmutableMapFactory[Map] {
override def withDefaultValue[B1 >: B](d: B1): immutable.Map[A, B1] = new WithDefault[A, B1](underlying, x => d)
}
- private object EmptyMap extends Map[Any, Nothing] with Serializable {
+ private object EmptyMap extends AbstractMap[Any, Nothing] with Map[Any, Nothing] with Serializable {
override def size: Int = 0
def get(key: Any): Option[Nothing] = None
def iterator: Iterator[(Any, Nothing)] = Iterator.empty
@@ -94,17 +94,7 @@ object Map extends ImmutableMapFactory[Map] {
def - (key: Any): Map[Any, Nothing] = this
}
- @deprecated("use `Map.empty' instead", "2.8.0")
- class EmptyMap[A,B] extends Map[A,B] with Serializable {
- override def size: Int = 0
- def get(key: A): Option[B] = None
- def iterator: Iterator[(A, B)] = Iterator.empty
- override def updated [B1] (key: A, value: B1): Map[A, B1] = new Map1(key, value)
- def + [B1](kv: (A, B1)): Map[A, B1] = updated(kv._1, kv._2)
- def - (key: A): Map[A, B] = this
- }
-
- class Map1[A, +B](key1: A, value1: B) extends Map[A, B] with Serializable {
+ class Map1[A, +B](key1: A, value1: B) extends AbstractMap[A, B] with Map[A, B] with Serializable {
override def size = 1
def get(key: A): Option[B] =
if (key == key1) Some(value1) else None
@@ -120,7 +110,7 @@ object Map extends ImmutableMapFactory[Map] {
}
}
- class Map2[A, +B](key1: A, value1: B, key2: A, value2: B) extends Map[A, B] with Serializable {
+ class Map2[A, +B](key1: A, value1: B, key2: A, value2: B) extends AbstractMap[A, B] with Map[A, B] with Serializable {
override def size = 2
def get(key: A): Option[B] =
if (key == key1) Some(value1)
@@ -141,7 +131,7 @@ object Map extends ImmutableMapFactory[Map] {
}
}
- class Map3[A, +B](key1: A, value1: B, key2: A, value2: B, key3: A, value3: B) extends Map[A, B] with Serializable {
+ class Map3[A, +B](key1: A, value1: B, key2: A, value2: B, key3: A, value3: B) extends AbstractMap[A, B] with Map[A, B] with Serializable {
override def size = 3
def get(key: A): Option[B] =
if (key == key1) Some(value1)
@@ -165,7 +155,7 @@ object Map extends ImmutableMapFactory[Map] {
}
}
- class Map4[A, +B](key1: A, value1: B, key2: A, value2: B, key3: A, value3: B, key4: A, value4: B) extends Map[A, B] with Serializable {
+ class Map4[A, +B](key1: A, value1: B, key2: A, value2: B, key3: A, value3: B, key4: A, value4: B) extends AbstractMap[A, B] with Map[A, B] with Serializable {
override def size = 4
def get(key: A): Option[B] =
if (key == key1) Some(value1)
@@ -193,3 +183,5 @@ object Map extends ImmutableMapFactory[Map] {
}
}
+/** Explicit instantiation of the `Map` trait to reduce class file size in subclasses. */
+private[scala] abstract class AbstractMap[A, +B] extends scala.collection.AbstractMap[A, B] with Map[A, B]
diff --git a/src/library/scala/collection/immutable/MapLike.scala b/src/library/scala/collection/immutable/MapLike.scala
index beea72d..7e60f07 100644
--- a/src/library/scala/collection/immutable/MapLike.scala
+++ b/src/library/scala/collection/immutable/MapLike.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -11,7 +11,6 @@ package immutable
import generic._
import parallel.immutable.ParMap
-import annotation.bridge
/**
* A generic template for immutable maps from keys of type `A`
@@ -36,9 +35,9 @@ import annotation.bridge
* It is also good idea to override methods `foreach` and
* `size` for efficiency.
*
- * @param A the type of the keys contained in this collection.
- * @param B the type of the values associated with the keys.
- * @param This The type of the actual map implementation.
+ * @tparam A the type of the keys contained in this collection.
+ * @tparam B the type of the values associated with the keys.
+ * @tparam This The type of the actual map implementation.
*
* @author Martin Odersky
* @version 2.8
@@ -49,7 +48,8 @@ import annotation.bridge
trait MapLike[A, +B, +This <: MapLike[A, B, This] with Map[A, B]]
extends scala.collection.MapLike[A, B, This]
with Parallelizable[(A, B), ParMap[A, B]]
-{ self =>
+{
+self =>
protected[this] override def parCombiner = ParMap.newCombiner[A, B]
@@ -85,33 +85,20 @@ trait MapLike[A, +B, +This <: MapLike[A, B, This] with Map[A, B]]
*/
override def ++[B1 >: B](xs: GenTraversableOnce[(A, B1)]): immutable.Map[A, B1] =
((repr: immutable.Map[A, B1]) /: xs.seq) (_ + _)
-
- @bridge def ++[B1 >: B](xs: TraversableOnce[(A, B1)]): immutable.Map[A, B1] = ++(xs: GenTraversableOnce[(A, B1)])
-
+
/** Filters this map by retaining only keys satisfying a predicate.
* @param p the predicate used to test keys
* @return an immutable map consisting only of those key value pairs of this map where the key satisfies
* the predicate `p`. The resulting map wraps the original map without copying any elements.
*/
- override def filterKeys(p: A => Boolean): Map[A, B] = new DefaultMap[A, B] {
- override def foreach[C](f: ((A, B)) => C): Unit = for (kv <- self) if (p(kv._1)) f(kv)
- def iterator = self.iterator.filter(kv => p(kv._1))
- override def contains(key: A) = self.contains(key) && p(key)
- def get(key: A) = if (!p(key)) None else self.get(key)
- }
-
+ override def filterKeys(p: A => Boolean): Map[A, B] = new FilteredKeys(p) with DefaultMap[A, B]
+
/** Transforms this map by applying a function to every retrieved value.
* @param f the function used to transform values of this map.
* @return a map view which maps every key of this map
* to `f(this(key))`. The resulting map wraps the original map without copying any elements.
*/
- override def mapValues[C](f: B => C): Map[A, C] = new DefaultMap[A, C] {
- override def foreach[D](g: ((A, C)) => D): Unit = for ((k, v) <- self) g((k, f(v)))
- def iterator = for ((k, v) <- self.iterator) yield (k, f(v))
- override def size = self.size
- override def contains(key: A) = self.contains(key)
- def get(key: A) = self.get(key).map(f)
- }
+ override def mapValues[C](f: B => C): Map[A, C] = new MappedValues(f) with DefaultMap[A, C]
/** Collects all keys of this map in a set.
* @return a set containing all keys of this map.
diff --git a/src/library/scala/collection/immutable/MapProxy.scala b/src/library/scala/collection/immutable/MapProxy.scala
index b08b3c9..f3f04ec 100644
--- a/src/library/scala/collection/immutable/MapProxy.scala
+++ b/src/library/scala/collection/immutable/MapProxy.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/library/scala/collection/immutable/NumericRange.scala b/src/library/scala/collection/immutable/NumericRange.scala
index 0e32118..d3be299 100644
--- a/src/library/scala/collection/immutable/NumericRange.scala
+++ b/src/library/scala/collection/immutable/NumericRange.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2006-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2006-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -34,7 +34,7 @@ import generic._
*
* @author Paul Phillips
* @version 2.8
- * @define Coll NumericRange
+ * @define Coll `NumericRange`
* @define coll numeric range
* @define mayNotTerminateInf
* @define willNotTerminateInf
@@ -42,7 +42,7 @@ import generic._
abstract class NumericRange[T]
(val start: T, val end: T, val step: T, val isInclusive: Boolean)
(implicit num: Integral[T])
-extends IndexedSeq[T] with Serializable {
+extends AbstractSeq[T] with IndexedSeq[T] with Serializable {
/** Note that NumericRange must be invariant so that constructs
* such as "1L to 10 by 5" do not infer the range type as AnyVal.
*/
@@ -124,7 +124,21 @@ extends IndexedSeq[T] with Serializable {
if (idx < 0 || idx >= length) throw new IndexOutOfBoundsException(idx.toString)
else locationAfterN(idx)
}
-
+
+ import NumericRange.defaultOrdering
+
+ override def min[T1 >: T](implicit ord: Ordering[T1]): T =
+ if (ord eq defaultOrdering(num)) {
+ if (num.signum(step) > 0) start
+ else last
+ } else super.min(ord)
+
+ override def max[T1 >: T](implicit ord: Ordering[T1]): T =
+ if (ord eq defaultOrdering(num)) {
+ if (num.signum(step) > 0) last
+ else start
+ } else super.max(ord)
+
// Motivated by the desire for Double ranges with BigDecimal precision,
// we need some way to map a Range and get another Range. This can't be
// done in any fully general way because Ranges are not arbitrary
@@ -172,6 +186,13 @@ extends IndexedSeq[T] with Serializable {
try containsTyped(x.asInstanceOf[T])
catch { case _: ClassCastException => false }
+ final override def sum[B >: T](implicit num: Numeric[B]): B = {
+ import num.Ops
+ if (isEmpty) this.num fromInt 0
+ else if (numRangeElements == 1) head
+ else ((this.num fromInt numRangeElements) * (head + last) / (this.num fromInt 2))
+ }
+
override lazy val hashCode = super.hashCode()
override def equals(other: Any) = other match {
case x: NumericRange[_] =>
@@ -192,6 +213,7 @@ extends IndexedSeq[T] with Serializable {
/** A companion object for numeric ranges.
*/
object NumericRange {
+
/** Calculates the number of elements in a range given start, end, step, and
* whether or not it is inclusive. Throws an exception if step == 0 or
* the number of elements exceeds the maximum Int.
@@ -207,7 +229,7 @@ object NumericRange {
else {
val diff = num.minus(end, start)
val jumps = num.toLong(num.quot(diff, step))
- val remainder = num.toLong(num.rem(diff, step))
+ val remainder = num.rem(diff, step)
val longCount = jumps + (
if (!isInclusive && zero == remainder) 0 else 1
)
@@ -250,5 +272,18 @@ object NumericRange {
new Exclusive(start, end, step)
def inclusive[T](start: T, end: T, step: T)(implicit num: Integral[T]): Inclusive[T] =
new Inclusive(start, end, step)
+
+ private[collection] val defaultOrdering = Map[Numeric[_], Ordering[_]](
+ Numeric.BigIntIsIntegral -> Ordering.BigInt,
+ Numeric.IntIsIntegral -> Ordering.Int,
+ Numeric.ShortIsIntegral -> Ordering.Short,
+ Numeric.ByteIsIntegral -> Ordering.Byte,
+ Numeric.CharIsIntegral -> Ordering.Char,
+ Numeric.LongIsIntegral -> Ordering.Long,
+ Numeric.FloatAsIfIntegral -> Ordering.Float,
+ Numeric.DoubleAsIfIntegral -> Ordering.Double,
+ Numeric.BigDecimalAsIfIntegral -> Ordering.BigDecimal
+ )
+
}
diff --git a/src/library/scala/collection/immutable/PagedSeq.scala b/src/library/scala/collection/immutable/PagedSeq.scala
index 8e14d98..952107b 100644
--- a/src/library/scala/collection/immutable/PagedSeq.scala
+++ b/src/library/scala/collection/immutable/PagedSeq.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2006-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2006-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -13,6 +13,7 @@ package immutable
import java.io._
import scala.util.matching.Regex
+import scala.reflect.ClassTag
/** The `PagedSeq` object defines a lazy implementations of
* a random access sequence.
@@ -25,7 +26,7 @@ object PagedSeq {
final val UndeterminedEnd = Int.MaxValue
/** Constructs a paged sequence from an iterator */
- def fromIterator[T: ClassManifest](source: Iterator[T]): PagedSeq[T] =
+ def fromIterator[T: ClassTag](source: Iterator[T]): PagedSeq[T] =
new PagedSeq[T]((data: Array[T], start: Int, len: Int) => {
var i = 0
while (i < len && source.hasNext) {
@@ -36,7 +37,7 @@ object PagedSeq {
})
/** Constructs a paged sequence from an iterable */
- def fromIterable[T: ClassManifest](source: Iterable[T]): PagedSeq[T] =
+ def fromIterable[T: ClassTag](source: Iterable[T]): PagedSeq[T] =
fromIterator(source.iterator)
/** Constructs a paged character sequence from a string iterator */
@@ -98,7 +99,7 @@ object PagedSeq {
/** Constructs a paged character sequence from a scala.io.Source value
*/
- def fromSource(source: io.Source) =
+ def fromSource(source: scala.io.Source) =
fromLines(source.getLines())
}
@@ -115,21 +116,22 @@ import PagedSeq._
* It returns the number of elements produced, or -1 if end of logical input stream was reached
* before reading any element.
*
- * @tparam T the type of the elements contained in this paged sequence, with a `ClassManifest` context bound.
+ * @tparam T the type of the elements contained in this paged sequence, with an `ClassTag` context bound.
*
* @author Martin Odersky
* @since 2.7
- * @define Coll PagedSeq
+ * @define Coll `PagedSeq`
* @define coll paged sequence
* @define mayNotTerminateInf
* @define willNotTerminateInf
*/
-class PagedSeq[T: ClassManifest] protected(
+class PagedSeq[T: ClassTag] protected(
more: (Array[T], Int, Int) => Int,
first1: Page[T],
start: Int,
end: Int)
-extends scala.collection.IndexedSeq[T]
+extends scala.collection.AbstractSeq[T]
+ with scala.collection.IndexedSeq[T]
{
def this(more: (Array[T], Int, Int) => Int) = this(more, new Page[T](0), 0, UndeterminedEnd)
@@ -204,7 +206,7 @@ extends scala.collection.IndexedSeq[T]
/** Page containing up to PageSize characters of the input sequence.
*/
-private class Page[T: ClassManifest](val num: Int) {
+private class Page[T: ClassTag](val num: Int) {
private final val PageSize = 4096
diff --git a/src/library/scala/collection/immutable/Queue.scala b/src/library/scala/collection/immutable/Queue.scala
index 7b0b188..7d2ff95 100644
--- a/src/library/scala/collection/immutable/Queue.scala
+++ b/src/library/scala/collection/immutable/Queue.scala
@@ -1,19 +1,17 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-
-
package scala.collection
package immutable
import generic._
import mutable.{ Builder, ListBuffer }
-import annotation.tailrec
+import scala.annotation.tailrec
/** `Queue` objects implement data structures that allow to
* insert and retrieve elements in a first-in-first-out (FIFO) manner.
@@ -32,7 +30,7 @@ import annotation.tailrec
* @see [[http://docs.scala-lang.org/overviews/collections/concrete-immutable-collection-classes.html#immutable_queues "Scala's Collection Library overview"]]
* section on `Immutable Queues` for more information.
*
- * @define Coll immutable.Queue
+ * @define Coll `immutable.Queue`
* @define coll immutable queue
* @define mayNotTerminateInf
* @define willNotTerminateInf
@@ -40,7 +38,8 @@ import annotation.tailrec
@SerialVersionUID(-7622936493364270175L)
class Queue[+A] protected(protected val in: List[A], protected val out: List[A])
- extends LinearSeq[A]
+ extends AbstractSeq[A]
+ with LinearSeq[A]
with GenericTraversableTemplate[A, Queue]
with LinearSeqLike[A, Queue[A]]
with Serializable {
@@ -48,7 +47,7 @@ class Queue[+A] protected(protected val in: List[A], protected val out: List[A])
override def companion: GenericCompanion[Queue] = Queue
/** Returns the `n`-th element of this queue.
- * The first element is at position 0.
+ * The first element is at position `0`.
*
* @param n index of the element to return
* @return the element at position `n` in this queue.
@@ -93,37 +92,18 @@ class Queue[+A] protected(protected val in: List[A], protected val out: List[A])
*
* @param elem the element to insert
*/
- @deprecated("Use `enqueue` instead", "2.7.2")
- def +[B >: A](elem: B) = enqueue(elem)
-
- /** Creates a new queue with element added at the end
- * of the old queue.
- *
- * @param elem the element to insert
- */
def enqueue[B >: A](elem: B) = new Queue(elem :: in, out)
- /** Returns a new queue with all all elements provided by
- * an <code>Iterable</code> object added at the end of
- * the queue.
- * The elements are prepended in the order they
- * are given out by the iterator.
+ /** Returns a new queue with all elements provided by an `Iterable` object
+ * added at the end of the queue.
*
- * @param iter an iterable object
- */
- @deprecated("Use `enqueue` instead", "2.7.2")
- def +[B >: A](iter: Iterable[B]) = enqueue(iter)
-
- /** Returns a new queue with all elements provided by
- * an <code>Iterable</code> object added at the end of
- * the queue.
- * The elements are prepended in the order they
- * are given out by the iterator.
+ * The elements are prepended in the order they are given out by the
+ * iterator.
*
* @param iter an iterable object
*/
def enqueue[B >: A](iter: Iterable[B]) =
- new Queue(iter.toList.reverse ::: in, out)
+ new Queue(iter.toList reverse_::: in, out)
/** Returns a tuple with the first element in the queue,
* and a new queue with this element removed.
@@ -151,18 +131,15 @@ class Queue[+A] protected(protected val in: List[A], protected val out: List[A])
}
/** $factoryInfo
- * @define Coll immutable.Queue
+ * @define Coll `immutable.Queue`
* @define coll immutable queue
*/
object Queue extends SeqFactory[Queue] {
/** $genericCanBuildFromInfo */
- implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, Queue[A]] = new GenericCanBuildFrom[A]
+ implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, Queue[A]] = ReusableCBF.asInstanceOf[GenericCanBuildFrom[A]]
def newBuilder[A]: Builder[A, Queue[A]] = new ListBuffer[A] mapResult (x => new Queue[A](Nil, x.toList))
override def empty[A]: Queue[A] = EmptyQueue.asInstanceOf[Queue[A]]
override def apply[A](xs: A*): Queue[A] = new Queue[A](Nil, xs.toList)
private object EmptyQueue extends Queue[Nothing](Nil, Nil) { }
-
- @deprecated("Use Queue.empty instead", "2.8.0")
- val Empty: Queue[Nothing] = Queue()
}
diff --git a/src/library/scala/collection/immutable/Range.scala b/src/library/scala/collection/immutable/Range.scala
index f5082aa..802e166 100644
--- a/src/library/scala/collection/immutable/Range.scala
+++ b/src/library/scala/collection/immutable/Range.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2006-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2006-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -10,7 +10,6 @@
package scala.collection.immutable
import scala.collection.parallel.immutable.ParRange
-import annotation.bridge
/** The `Range` class represents integer values in range
* ''[start;end)'' with non-zero step value `step`.
@@ -34,7 +33,6 @@ import annotation.bridge
* @see [[http://docs.scala-lang.org/overviews/collections/concrete-immutable-collection-classes.html#ranges "Scala's Collection Library overview"]]
* section on `Ranges` for more information.
*
- * @define Coll Range
* @define coll range
* @define mayNotTerminateInf
* @define willNotTerminateInf
@@ -44,20 +42,53 @@ import annotation.bridge
*/
@SerialVersionUID(7618862778670199309L)
class Range(val start: Int, val end: Int, val step: Int)
-extends IndexedSeq[Int]
- with collection.CustomParallelizable[Int, ParRange]
+extends scala.collection.AbstractSeq[Int]
+ with IndexedSeq[Int]
+ with scala.collection.CustomParallelizable[Int, ParRange]
with Serializable
{
override def par = new ParRange(this)
- // This member is designed to enforce conditions:
- // (step != 0) && (length <= Int.MaxValue),
- // but cannot be evaluated eagerly because we have a pattern where ranges
- // are constructed like: "x to y by z"
- // The "x to y" piece should not trigger an exception. So the calculation
- // is delayed, which means it will not fail fast for those cases where failing
- // was correct.
- private lazy val numRangeElements: Int = Range.count(start, end, step, isInclusive)
+ private def gap = end.toLong - start.toLong
+ private def isExact = gap % step == 0
+ private def hasStub = isInclusive || !isExact
+ private def longLength = gap / step + ( if (hasStub) 1 else 0 )
+
+ // Check cannot be evaluated eagerly because we have a pattern where
+ // ranges are constructed like: "x to y by z" The "x to y" piece
+ // should not trigger an exception. So the calculation is delayed,
+ // which means it will not fail fast for those cases where failing was
+ // correct.
+ override final val isEmpty = (
+ (start > end && step > 0)
+ || (start < end && step < 0)
+ || (start == end && !isInclusive)
+ )
+ final val numRangeElements: Int = {
+ if (step == 0) throw new IllegalArgumentException("step cannot be 0.")
+ else if (isEmpty) 0
+ else {
+ val len = longLength
+ if (len > scala.Int.MaxValue) -1
+ else len.toInt
+ }
+ }
+ final val lastElement = start + (numRangeElements - 1) * step
+ final val terminalElement = start + numRangeElements * step
+
+ override def last = if (isEmpty) Nil.last else lastElement
+
+ override def min[A1 >: Int](implicit ord: Ordering[A1]): Int =
+ if (ord eq Ordering.Int) {
+ if (step > 0) start
+ else last
+ } else super.min(ord)
+
+ override def max[A1 >: Int](implicit ord: Ordering[A1]): Int =
+ if (ord eq Ordering.Int) {
+ if (step > 0) last
+ else start
+ } else super.max(ord)
protected def copy(start: Int, end: Int, step: Int): Range = new Range(start, end, step)
@@ -70,31 +101,49 @@ extends IndexedSeq[Int]
def isInclusive = false
+ override def size = length
+ override def length = if (numRangeElements < 0) fail() else numRangeElements
+
+ private def description = "%d %s %d by %s".format(start, if (isInclusive) "to" else "until", end, step)
+ private def fail() = throw new IllegalArgumentException(description + ": seqs cannot contain more than Int.MaxValue elements.")
+ private def validateMaxLength() {
+ if (numRangeElements < 0)
+ fail()
+ }
+
+ def validateRangeBoundaries(f: Int => Any): Boolean = {
+ validateMaxLength()
+
+ start != Int.MinValue || end != Int.MinValue || {
+ var count = 0
+ var num = start
+ while (count < numRangeElements) {
+ f(num)
+ count += 1
+ num += step
+ }
+ false
+ }
+ }
+
+ final def apply(idx: Int): Int = {
+ validateMaxLength()
+ if (idx < 0 || idx >= numRangeElements) throw new IndexOutOfBoundsException(idx.toString)
+ else start + (step * idx)
+ }
+
@inline final override def foreach[@specialized(Unit) U](f: Int => U) {
- if (length > 0) {
- val last = this.last
+ if (validateRangeBoundaries(f)) {
var i = start
- while (i != last) {
+ val terminal = terminalElement
+ val step = this.step
+ while (i != terminal) {
f(i)
i += step
}
- f(i)
}
}
- override def length: Int = numRangeElements
- override lazy val last: Int =
- if (length == 0) Nil.last
- else locationAfterN(length - 1)
-
- final override def isEmpty = length == 0
-
- @inline
- final def apply(idx: Int): Int = {
- if (idx < 0 || idx >= length) throw new IndexOutOfBoundsException(idx.toString)
- locationAfterN(idx)
- }
-
/** Creates a new range containing the first `n` elements of this range.
*
* $doesNotUseBuilders
@@ -103,8 +152,8 @@ extends IndexedSeq[Int]
* @return a new range consisting of `n` first elements.
*/
final override def take(n: Int): Range = (
- if (n <= 0 || length == 0) newEmptyRange(start)
- else if (n >= length) this
+ if (n <= 0 || isEmpty) newEmptyRange(start)
+ else if (n >= numRangeElements) this
else new Range.Inclusive(start, locationAfterN(n - 1), step)
)
@@ -116,8 +165,8 @@ extends IndexedSeq[Int]
* @return a new range consisting of all the elements of this range except `n` first elements.
*/
final override def drop(n: Int): Range = (
- if (n <= 0 || length == 0) this
- else if (n >= length) newEmptyRange(end)
+ if (n <= 0 || isEmpty) this
+ else if (n >= numRangeElements) newEmptyRange(end)
else copy(locationAfterN(n), end, step)
)
@@ -152,7 +201,7 @@ extends IndexedSeq[Int]
var current = start
var counted = 0
- while (counted < length && p(current)) {
+ while (counted < numRangeElements && p(current)) {
counted += 1
current += step
}
@@ -160,7 +209,7 @@ extends IndexedSeq[Int]
}
// Tests whether a number is within the endpoints, without testing
// whether it is a member of the sequence (i.e. when step > 1.)
- private def isWithinBoundaries(elem: Int) = (length > 0) && (
+ private def isWithinBoundaries(elem: Int) = !isEmpty && (
(step > 0 && start <= elem && elem <= last ) ||
(step < 0 && last <= elem && elem <= start)
)
@@ -189,21 +238,21 @@ extends IndexedSeq[Int]
*
* $doesNotUseBuilders
*/
- final override def takeRight(n: Int): Range = drop(length - n)
+ final override def takeRight(n: Int): Range = drop(numRangeElements - n)
/** Creates a new range consisting of the initial `length - n` elements of the range.
*
* $doesNotUseBuilders
*/
- final override def dropRight(n: Int): Range = take(length - n)
+ final override def dropRight(n: Int): Range = take(numRangeElements - n)
/** Returns the reverse of this range.
*
* $doesNotUseBuilders
*/
final override def reverse: Range =
- if (length > 0) new Range.Inclusive(last, start, -step)
- else this
+ if (isEmpty) this
+ else new Range.Inclusive(last, start, -step)
/** Make range inclusive.
*/
@@ -213,6 +262,12 @@ extends IndexedSeq[Int]
final def contains(x: Int) = isWithinBoundaries(x) && ((x - start) % step == 0)
+ final override def sum[B >: Int](implicit num: Numeric[B]): Int = {
+ if (isEmpty) 0
+ else if (numRangeElements == 1) head
+ else (numRangeElements.toLong * (head + last) / 2).toInt
+ }
+
override def toIterable = this
override def toSeq = this
@@ -220,7 +275,7 @@ extends IndexedSeq[Int]
override def equals(other: Any) = other match {
case x: Range =>
(x canEqual this) && (length == x.length) && (
- (length == 0) || // all empty sequences are equal
+ isEmpty || // all empty sequences are equal
(start == x.start && last == x.last) // same length and same endpoints implies equality
)
case _ =>
@@ -231,7 +286,7 @@ extends IndexedSeq[Int]
*/
override def toString() = {
- val endStr = if (length > Range.MAX_PRINT) ", ... )" else ")"
+ val endStr = if (numRangeElements > Range.MAX_PRINT) ", ... )" else ")"
take(Range.MAX_PRINT).mkString("Range(", ", ", endStr)
}
}
@@ -241,18 +296,36 @@ extends IndexedSeq[Int]
object Range {
private[immutable] val MAX_PRINT = 512 // some arbitrary value
- /** Counts in "Long arithmetic" so we can recognize overflow.
+ /** Counts the number of range elements.
+ * @pre step != 0
+ * If the size of the range exceeds Int.MaxValue, the
+ * result will be negative.
*/
- def count(start: Int, end: Int, step: Int): Int =
- count(start, end, step, false)
-
def count(start: Int, end: Int, step: Int, isInclusive: Boolean): Int = {
- // faster path for the common counting range
- if (start >= 0 && end > start && end < scala.Int.MaxValue && step == 1)
- (end - start) + ( if (isInclusive) 1 else 0 )
- else
- NumericRange.count[Long](start, end, step, isInclusive)
+ if (step == 0)
+ throw new IllegalArgumentException("step cannot be 0.")
+
+ val isEmpty = (
+ if (start == end) !isInclusive
+ else if (start < end) step < 0
+ else step > 0
+ )
+ if (isEmpty) 0
+ else {
+ // Counts with Longs so we can recognize too-large ranges.
+ val gap: Long = end.toLong - start.toLong
+ val jumps: Long = gap / step
+ // Whether the size of this range is one larger than the
+ // number of full-sized jumps.
+ val hasStub = isInclusive || (gap % step != 0)
+ val result: Long = jumps + ( if (hasStub) 1 else 0 )
+
+ if (result > scala.Int.MaxValue) -1
+ else result.toInt
+ }
}
+ def count(start: Int, end: Int, step: Int): Int =
+ count(start, end, step, false)
class Inclusive(start: Int, end: Int, step: Int) extends Range(start, end, step) {
// override def par = new ParRange(this)
@@ -265,18 +338,18 @@ object Range {
*/
def apply(start: Int, end: Int, step: Int): Range = new Range(start, end, step)
- /** Make an range from `start` to `end` inclusive with step value 1.
+ /** Make a range from `start` until `end` (exclusive) with step value 1.
*/
def apply(start: Int, end: Int): Range = new Range(start, end, 1)
- /** Make an inclusive range from start to end with given step value.
+ /** Make an inclusive range from `start` to `end` with given step value.
* @note step != 0
*/
- @inline def inclusive(start: Int, end: Int, step: Int): Range.Inclusive = new Inclusive(start, end, step)
+ def inclusive(start: Int, end: Int, step: Int): Range.Inclusive = new Inclusive(start, end, step)
- /** Make an inclusive range from start to end with step value 1.
+ /** Make an inclusive range from `start` to `end` with step value 1.
*/
- @inline def inclusive(start: Int, end: Int): Range.Inclusive = new Inclusive(start, end, 1)
+ def inclusive(start: Int, end: Int): Range.Inclusive = new Inclusive(start, end, 1)
// BigInt and Long are straightforward generic ranges.
object BigInt {
@@ -335,10 +408,4 @@ object Range {
def apply(start: Int, end: Int, step: Int) = NumericRange(start, end, step)
def inclusive(start: Int, end: Int, step: Int) = NumericRange.inclusive(start, end, step)
}
-
- @deprecated("use Range instead", "2.9.0")
- trait ByOne extends Range {
-// @bridge override def foreach[@specialized(Unit) U](f: Int => U) =
-// super.foreach(f)
- }
}
diff --git a/src/library/scala/collection/immutable/RedBlack.scala b/src/library/scala/collection/immutable/RedBlack.scala
index 4bea8fb..9739e8f 100644
--- a/src/library/scala/collection/immutable/RedBlack.scala
+++ b/src/library/scala/collection/immutable/RedBlack.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2005-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2005-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -8,13 +8,17 @@
-package scala.collection
+package scala
+package collection
package immutable
-/** A base class containing the implementations for `TreeMaps` and `TreeSets`.
+/** Old base class that was used by previous implementations of `TreeMaps` and `TreeSets`.
+ *
+ * Deprecated due to various performance bugs (see [[https://issues.scala-lang.org/browse/SI-5331 SI-5331]] for more information).
*
* @since 2.3
*/
+ at deprecated("use `TreeMap` or `TreeSet` instead", "2.10.0")
@SerialVersionUID(8691885935445612921L)
abstract class RedBlack[A] extends Serializable {
@@ -35,12 +39,8 @@ abstract class RedBlack[A] extends Serializable {
def delete(k: A): Tree[B] = blacken(del(k))
def range(from: Option[A], until: Option[A]): Tree[B] = blacken(rng(from, until))
def foreach[U](f: (A, B) => U)
- @deprecated("use `foreach' instead", "2.8.0")
- def visit[T](input: T)(f: (T, A, B) => (Boolean, T)): (Boolean, T)
def toStream: Stream[(A,B)]
def iterator: Iterator[(A, B)]
- @deprecated("use `iterator' instead", "2.8.0")
- def elements = iterator
def upd[B1 >: B](k: A, v: B1): Tree[B1]
def del(k: A): Tree[B]
def smallest: NonEmpty[B]
@@ -165,14 +165,6 @@ abstract class RedBlack[A] extends Serializable {
right foreach f
}
- @deprecated("use `foreach' instead", "2.8.0")
- def visit[T](input: T)(f: (T,A,B) => (Boolean, T)): (Boolean, T) = {
- val left = this.left.visit(input)(f)
- if (!left._1) return left
- val middle = f(left._2, key, value)
- if (!middle._1) return middle
- return this.right.visit(middle._2)(f)
- }
override def rng(from: Option[A], until: Option[A]): Tree[B] = {
if (from == None && until == None) return this
if (from != None && isSmaller(key, from.get)) return right.rng(from, until);
@@ -281,9 +273,6 @@ abstract class RedBlack[A] extends Serializable {
def foreach[U](f: (A, Nothing) => U) {}
- @deprecated("use `foreach' instead", "2.8.0")
- def visit[T](input: T)(f: (T, A, Nothing) => (Boolean, T)) = (true, input)
-
def rng(from: Option[A], until: Option[A]) = this
def first = throw new NoSuchElementException("empty map")
def last = throw new NoSuchElementException("empty map")
@@ -302,5 +291,3 @@ abstract class RedBlack[A] extends Serializable {
def isBlack = true
}
}
-
-
diff --git a/src/library/scala/collection/immutable/RedBlackTree.scala b/src/library/scala/collection/immutable/RedBlackTree.scala
new file mode 100644
index 0000000..0254e9c
--- /dev/null
+++ b/src/library/scala/collection/immutable/RedBlackTree.scala
@@ -0,0 +1,496 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2005-2013, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+
+
+package scala
+package collection
+package immutable
+
+import scala.annotation.tailrec
+import scala.annotation.meta.getter
+
+/** An object containing the RedBlack tree implementation used by for `TreeMaps` and `TreeSets`.
+ *
+ * Implementation note: since efficiency is important for data structures this implementation
+ * uses <code>null</code> to represent empty trees. This also means pattern matching cannot
+ * easily be used. The API represented by the RedBlackTree object tries to hide these
+ * optimizations behind a reasonably clean API.
+ *
+ * @since 2.10
+ */
+private[immutable]
+object RedBlackTree {
+
+ def isEmpty(tree: Tree[_, _]): Boolean = tree eq null
+
+ def contains[A](tree: Tree[A, _], x: A)(implicit ordering: Ordering[A]): Boolean = lookup(tree, x) ne null
+ def get[A, B](tree: Tree[A, B], x: A)(implicit ordering: Ordering[A]): Option[B] = lookup(tree, x) match {
+ case null => None
+ case tree => Some(tree.value)
+ }
+
+ @tailrec
+ def lookup[A, B](tree: Tree[A, B], x: A)(implicit ordering: Ordering[A]): Tree[A, B] = if (tree eq null) null else {
+ val cmp = ordering.compare(x, tree.key)
+ if (cmp < 0) lookup(tree.left, x)
+ else if (cmp > 0) lookup(tree.right, x)
+ else tree
+ }
+
+ def count(tree: Tree[_, _]) = if (tree eq null) 0 else tree.count
+ def update[A, B, B1 >: B](tree: Tree[A, B], k: A, v: B1, overwrite: Boolean)(implicit ordering: Ordering[A]): Tree[A, B1] = blacken(upd(tree, k, v, overwrite))
+ def delete[A, B](tree: Tree[A, B], k: A)(implicit ordering: Ordering[A]): Tree[A, B] = blacken(del(tree, k))
+ def rangeImpl[A: Ordering, B](tree: Tree[A, B], from: Option[A], until: Option[A]): Tree[A, B] = (from, until) match {
+ case (Some(from), Some(until)) => this.range(tree, from, until)
+ case (Some(from), None) => this.from(tree, from)
+ case (None, Some(until)) => this.until(tree, until)
+ case (None, None) => tree
+ }
+ def range[A: Ordering, B](tree: Tree[A, B], from: A, until: A): Tree[A, B] = blacken(doRange(tree, from, until))
+ def from[A: Ordering, B](tree: Tree[A, B], from: A): Tree[A, B] = blacken(doFrom(tree, from))
+ def to[A: Ordering, B](tree: Tree[A, B], to: A): Tree[A, B] = blacken(doTo(tree, to))
+ def until[A: Ordering, B](tree: Tree[A, B], key: A): Tree[A, B] = blacken(doUntil(tree, key))
+
+ def drop[A: Ordering, B](tree: Tree[A, B], n: Int): Tree[A, B] = blacken(doDrop(tree, n))
+ def take[A: Ordering, B](tree: Tree[A, B], n: Int): Tree[A, B] = blacken(doTake(tree, n))
+ def slice[A: Ordering, B](tree: Tree[A, B], from: Int, until: Int): Tree[A, B] = blacken(doSlice(tree, from, until))
+
+ def smallest[A, B](tree: Tree[A, B]): Tree[A, B] = {
+ if (tree eq null) throw new NoSuchElementException("empty map")
+ var result = tree
+ while (result.left ne null) result = result.left
+ result
+ }
+ def greatest[A, B](tree: Tree[A, B]): Tree[A, B] = {
+ if (tree eq null) throw new NoSuchElementException("empty map")
+ var result = tree
+ while (result.right ne null) result = result.right
+ result
+ }
+
+ def foreach[A, B, U](tree: Tree[A, B], f: ((A, B)) => U): Unit = if (tree ne null) {
+ if (tree.left ne null) foreach(tree.left, f)
+ f((tree.key, tree.value))
+ if (tree.right ne null) foreach(tree.right, f)
+ }
+ def foreachKey[A, U](tree: Tree[A, _], f: A => U): Unit = if (tree ne null) {
+ if (tree.left ne null) foreachKey(tree.left, f)
+ f(tree.key)
+ if (tree.right ne null) foreachKey(tree.right, f)
+ }
+
+ def iterator[A, B](tree: Tree[A, B]): Iterator[(A, B)] = new EntriesIterator(tree)
+ def keysIterator[A, _](tree: Tree[A, _]): Iterator[A] = new KeysIterator(tree)
+ def valuesIterator[_, B](tree: Tree[_, B]): Iterator[B] = new ValuesIterator(tree)
+
+ @tailrec
+ def nth[A, B](tree: Tree[A, B], n: Int): Tree[A, B] = {
+ val count = this.count(tree.left)
+ if (n < count) nth(tree.left, n)
+ else if (n > count) nth(tree.right, n - count - 1)
+ else tree
+ }
+
+ def isBlack(tree: Tree[_, _]) = (tree eq null) || isBlackTree(tree)
+
+ private[this] def isRedTree(tree: Tree[_, _]) = tree.isInstanceOf[RedTree[_, _]]
+ private[this] def isBlackTree(tree: Tree[_, _]) = tree.isInstanceOf[BlackTree[_, _]]
+
+ private[this] def blacken[A, B](t: Tree[A, B]): Tree[A, B] = if (t eq null) null else t.black
+
+ private[this] def mkTree[A, B](isBlack: Boolean, k: A, v: B, l: Tree[A, B], r: Tree[A, B]) =
+ if (isBlack) BlackTree(k, v, l, r) else RedTree(k, v, l, r)
+
+ private[this] def balanceLeft[A, B, B1 >: B](isBlack: Boolean, z: A, zv: B, l: Tree[A, B1], d: Tree[A, B1]): Tree[A, B1] = {
+ if (isRedTree(l) && isRedTree(l.left))
+ RedTree(l.key, l.value, BlackTree(l.left.key, l.left.value, l.left.left, l.left.right), BlackTree(z, zv, l.right, d))
+ else if (isRedTree(l) && isRedTree(l.right))
+ RedTree(l.right.key, l.right.value, BlackTree(l.key, l.value, l.left, l.right.left), BlackTree(z, zv, l.right.right, d))
+ else
+ mkTree(isBlack, z, zv, l, d)
+ }
+ private[this] def balanceRight[A, B, B1 >: B](isBlack: Boolean, x: A, xv: B, a: Tree[A, B1], r: Tree[A, B1]): Tree[A, B1] = {
+ if (isRedTree(r) && isRedTree(r.left))
+ RedTree(r.left.key, r.left.value, BlackTree(x, xv, a, r.left.left), BlackTree(r.key, r.value, r.left.right, r.right))
+ else if (isRedTree(r) && isRedTree(r.right))
+ RedTree(r.key, r.value, BlackTree(x, xv, a, r.left), BlackTree(r.right.key, r.right.value, r.right.left, r.right.right))
+ else
+ mkTree(isBlack, x, xv, a, r)
+ }
+ private[this] def upd[A, B, B1 >: B](tree: Tree[A, B], k: A, v: B1, overwrite: Boolean)(implicit ordering: Ordering[A]): Tree[A, B1] = if (tree eq null) {
+ RedTree(k, v, null, null)
+ } else {
+ val cmp = ordering.compare(k, tree.key)
+ if (cmp < 0) balanceLeft(isBlackTree(tree), tree.key, tree.value, upd(tree.left, k, v, overwrite), tree.right)
+ else if (cmp > 0) balanceRight(isBlackTree(tree), tree.key, tree.value, tree.left, upd(tree.right, k, v, overwrite))
+ else if (overwrite || k != tree.key) mkTree(isBlackTree(tree), k, v, tree.left, tree.right)
+ else tree
+ }
+ private[this] def updNth[A, B, B1 >: B](tree: Tree[A, B], idx: Int, k: A, v: B1, overwrite: Boolean): Tree[A, B1] = if (tree eq null) {
+ RedTree(k, v, null, null)
+ } else {
+ val rank = count(tree.left) + 1
+ if (idx < rank) balanceLeft(isBlackTree(tree), tree.key, tree.value, updNth(tree.left, idx, k, v, overwrite), tree.right)
+ else if (idx > rank) balanceRight(isBlackTree(tree), tree.key, tree.value, tree.left, updNth(tree.right, idx - rank, k, v, overwrite))
+ else if (overwrite) mkTree(isBlackTree(tree), k, v, tree.left, tree.right)
+ else tree
+ }
+
+ /* Based on Stefan Kahrs' Haskell version of Okasaki's Red&Black Trees
+ * http://www.cse.unsw.edu.au/~dons/data/RedBlackTree.html */
+ private[this] def del[A, B](tree: Tree[A, B], k: A)(implicit ordering: Ordering[A]): Tree[A, B] = if (tree eq null) null else {
+ def balance(x: A, xv: B, tl: Tree[A, B], tr: Tree[A, B]) = if (isRedTree(tl)) {
+ if (isRedTree(tr)) {
+ RedTree(x, xv, tl.black, tr.black)
+ } else if (isRedTree(tl.left)) {
+ RedTree(tl.key, tl.value, tl.left.black, BlackTree(x, xv, tl.right, tr))
+ } else if (isRedTree(tl.right)) {
+ RedTree(tl.right.key, tl.right.value, BlackTree(tl.key, tl.value, tl.left, tl.right.left), BlackTree(x, xv, tl.right.right, tr))
+ } else {
+ BlackTree(x, xv, tl, tr)
+ }
+ } else if (isRedTree(tr)) {
+ if (isRedTree(tr.right)) {
+ RedTree(tr.key, tr.value, BlackTree(x, xv, tl, tr.left), tr.right.black)
+ } else if (isRedTree(tr.left)) {
+ RedTree(tr.left.key, tr.left.value, BlackTree(x, xv, tl, tr.left.left), BlackTree(tr.key, tr.value, tr.left.right, tr.right))
+ } else {
+ BlackTree(x, xv, tl, tr)
+ }
+ } else {
+ BlackTree(x, xv, tl, tr)
+ }
+ def subl(t: Tree[A, B]) =
+ if (t.isInstanceOf[BlackTree[_, _]]) t.red
+ else sys.error("Defect: invariance violation; expected black, got "+t)
+
+ def balLeft(x: A, xv: B, tl: Tree[A, B], tr: Tree[A, B]) = if (isRedTree(tl)) {
+ RedTree(x, xv, tl.black, tr)
+ } else if (isBlackTree(tr)) {
+ balance(x, xv, tl, tr.red)
+ } else if (isRedTree(tr) && isBlackTree(tr.left)) {
+ RedTree(tr.left.key, tr.left.value, BlackTree(x, xv, tl, tr.left.left), balance(tr.key, tr.value, tr.left.right, subl(tr.right)))
+ } else {
+ sys.error("Defect: invariance violation")
+ }
+ def balRight(x: A, xv: B, tl: Tree[A, B], tr: Tree[A, B]) = if (isRedTree(tr)) {
+ RedTree(x, xv, tl, tr.black)
+ } else if (isBlackTree(tl)) {
+ balance(x, xv, tl.red, tr)
+ } else if (isRedTree(tl) && isBlackTree(tl.right)) {
+ RedTree(tl.right.key, tl.right.value, balance(tl.key, tl.value, subl(tl.left), tl.right.left), BlackTree(x, xv, tl.right.right, tr))
+ } else {
+ sys.error("Defect: invariance violation")
+ }
+ def delLeft = if (isBlackTree(tree.left)) balLeft(tree.key, tree.value, del(tree.left, k), tree.right) else RedTree(tree.key, tree.value, del(tree.left, k), tree.right)
+ def delRight = if (isBlackTree(tree.right)) balRight(tree.key, tree.value, tree.left, del(tree.right, k)) else RedTree(tree.key, tree.value, tree.left, del(tree.right, k))
+ def append(tl: Tree[A, B], tr: Tree[A, B]): Tree[A, B] = if (tl eq null) {
+ tr
+ } else if (tr eq null) {
+ tl
+ } else if (isRedTree(tl) && isRedTree(tr)) {
+ val bc = append(tl.right, tr.left)
+ if (isRedTree(bc)) {
+ RedTree(bc.key, bc.value, RedTree(tl.key, tl.value, tl.left, bc.left), RedTree(tr.key, tr.value, bc.right, tr.right))
+ } else {
+ RedTree(tl.key, tl.value, tl.left, RedTree(tr.key, tr.value, bc, tr.right))
+ }
+ } else if (isBlackTree(tl) && isBlackTree(tr)) {
+ val bc = append(tl.right, tr.left)
+ if (isRedTree(bc)) {
+ RedTree(bc.key, bc.value, BlackTree(tl.key, tl.value, tl.left, bc.left), BlackTree(tr.key, tr.value, bc.right, tr.right))
+ } else {
+ balLeft(tl.key, tl.value, tl.left, BlackTree(tr.key, tr.value, bc, tr.right))
+ }
+ } else if (isRedTree(tr)) {
+ RedTree(tr.key, tr.value, append(tl, tr.left), tr.right)
+ } else if (isRedTree(tl)) {
+ RedTree(tl.key, tl.value, tl.left, append(tl.right, tr))
+ } else {
+ sys.error("unmatched tree on append: " + tl + ", " + tr)
+ }
+
+ val cmp = ordering.compare(k, tree.key)
+ if (cmp < 0) delLeft
+ else if (cmp > 0) delRight
+ else append(tree.left, tree.right)
+ }
+
+ private[this] def doFrom[A, B](tree: Tree[A, B], from: A)(implicit ordering: Ordering[A]): Tree[A, B] = {
+ if (tree eq null) return null
+ if (ordering.lt(tree.key, from)) return doFrom(tree.right, from)
+ val newLeft = doFrom(tree.left, from)
+ if (newLeft eq tree.left) tree
+ else if (newLeft eq null) upd(tree.right, tree.key, tree.value, false)
+ else rebalance(tree, newLeft, tree.right)
+ }
+ private[this] def doTo[A, B](tree: Tree[A, B], to: A)(implicit ordering: Ordering[A]): Tree[A, B] = {
+ if (tree eq null) return null
+ if (ordering.lt(to, tree.key)) return doTo(tree.left, to)
+ val newRight = doTo(tree.right, to)
+ if (newRight eq tree.right) tree
+ else if (newRight eq null) upd(tree.left, tree.key, tree.value, false)
+ else rebalance(tree, tree.left, newRight)
+ }
+ private[this] def doUntil[A, B](tree: Tree[A, B], until: A)(implicit ordering: Ordering[A]): Tree[A, B] = {
+ if (tree eq null) return null
+ if (ordering.lteq(until, tree.key)) return doUntil(tree.left, until)
+ val newRight = doUntil(tree.right, until)
+ if (newRight eq tree.right) tree
+ else if (newRight eq null) upd(tree.left, tree.key, tree.value, false)
+ else rebalance(tree, tree.left, newRight)
+ }
+ private[this] def doRange[A, B](tree: Tree[A, B], from: A, until: A)(implicit ordering: Ordering[A]): Tree[A, B] = {
+ if (tree eq null) return null
+ if (ordering.lt(tree.key, from)) return doRange(tree.right, from, until);
+ if (ordering.lteq(until, tree.key)) return doRange(tree.left, from, until);
+ val newLeft = doFrom(tree.left, from)
+ val newRight = doUntil(tree.right, until)
+ if ((newLeft eq tree.left) && (newRight eq tree.right)) tree
+ else if (newLeft eq null) upd(newRight, tree.key, tree.value, false);
+ else if (newRight eq null) upd(newLeft, tree.key, tree.value, false);
+ else rebalance(tree, newLeft, newRight)
+ }
+
+ private[this] def doDrop[A, B](tree: Tree[A, B], n: Int): Tree[A, B] = {
+ if (n <= 0) return tree
+ if (n >= this.count(tree)) return null
+ val count = this.count(tree.left)
+ if (n > count) return doDrop(tree.right, n - count - 1)
+ val newLeft = doDrop(tree.left, n)
+ if (newLeft eq tree.left) tree
+ else if (newLeft eq null) updNth(tree.right, n - count - 1, tree.key, tree.value, false)
+ else rebalance(tree, newLeft, tree.right)
+ }
+ private[this] def doTake[A, B](tree: Tree[A, B], n: Int): Tree[A, B] = {
+ if (n <= 0) return null
+ if (n >= this.count(tree)) return tree
+ val count = this.count(tree.left)
+ if (n <= count) return doTake(tree.left, n)
+ val newRight = doTake(tree.right, n - count - 1)
+ if (newRight eq tree.right) tree
+ else if (newRight eq null) updNth(tree.left, n, tree.key, tree.value, false)
+ else rebalance(tree, tree.left, newRight)
+ }
+ private[this] def doSlice[A, B](tree: Tree[A, B], from: Int, until: Int): Tree[A, B] = {
+ if (tree eq null) return null
+ val count = this.count(tree.left)
+ if (from > count) return doSlice(tree.right, from - count - 1, until - count - 1)
+ if (until <= count) return doSlice(tree.left, from, until)
+ val newLeft = doDrop(tree.left, from)
+ val newRight = doTake(tree.right, until - count - 1)
+ if ((newLeft eq tree.left) && (newRight eq tree.right)) tree
+ else if (newLeft eq null) updNth(newRight, from - count - 1, tree.key, tree.value, false)
+ else if (newRight eq null) updNth(newLeft, until, tree.key, tree.value, false)
+ else rebalance(tree, newLeft, newRight)
+ }
+
+ // The zipper returned might have been traversed left-most (always the left child)
+ // or right-most (always the right child). Left trees are traversed right-most,
+ // and right trees are traversed leftmost.
+
+ // Returns the zipper for the side with deepest black nodes depth, a flag
+ // indicating whether the trees were unbalanced at all, and a flag indicating
+ // whether the zipper was traversed left-most or right-most.
+
+ // If the trees were balanced, returns an empty zipper
+ private[this] def compareDepth[A, B](left: Tree[A, B], right: Tree[A, B]): (List[Tree[A, B]], Boolean, Boolean, Int) = {
+ // Once a side is found to be deeper, unzip it to the bottom
+ def unzip(zipper: List[Tree[A, B]], leftMost: Boolean): List[Tree[A, B]] = {
+ val next = if (leftMost) zipper.head.left else zipper.head.right
+ next match {
+ case null => zipper
+ case node => unzip(node :: zipper, leftMost)
+ }
+ }
+
+ // Unzip left tree on the rightmost side and right tree on the leftmost side until one is
+ // found to be deeper, or the bottom is reached
+ def unzipBoth(left: Tree[A, B],
+ right: Tree[A, B],
+ leftZipper: List[Tree[A, B]],
+ rightZipper: List[Tree[A, B]],
+ smallerDepth: Int): (List[Tree[A, B]], Boolean, Boolean, Int) = {
+ if (isBlackTree(left) && isBlackTree(right)) {
+ unzipBoth(left.right, right.left, left :: leftZipper, right :: rightZipper, smallerDepth + 1)
+ } else if (isRedTree(left) && isRedTree(right)) {
+ unzipBoth(left.right, right.left, left :: leftZipper, right :: rightZipper, smallerDepth)
+ } else if (isRedTree(right)) {
+ unzipBoth(left, right.left, leftZipper, right :: rightZipper, smallerDepth)
+ } else if (isRedTree(left)) {
+ unzipBoth(left.right, right, left :: leftZipper, rightZipper, smallerDepth)
+ } else if ((left eq null) && (right eq null)) {
+ (Nil, true, false, smallerDepth)
+ } else if ((left eq null) && isBlackTree(right)) {
+ val leftMost = true
+ (unzip(right :: rightZipper, leftMost), false, leftMost, smallerDepth)
+ } else if (isBlackTree(left) && (right eq null)) {
+ val leftMost = false
+ (unzip(left :: leftZipper, leftMost), false, leftMost, smallerDepth)
+ } else {
+ sys.error("unmatched trees in unzip: " + left + ", " + right)
+ }
+ }
+ unzipBoth(left, right, Nil, Nil, 0)
+ }
+
+ private[this] def rebalance[A, B](tree: Tree[A, B], newLeft: Tree[A, B], newRight: Tree[A, B]) = {
+ // This is like drop(n-1), but only counting black nodes
+ def findDepth(zipper: List[Tree[A, B]], depth: Int): List[Tree[A, B]] = zipper match {
+ case head :: tail if isBlackTree(head) =>
+ if (depth == 1) zipper else findDepth(tail, depth - 1)
+ case _ :: tail => findDepth(tail, depth)
+ case Nil => sys.error("Defect: unexpected empty zipper while computing range")
+ }
+
+ // Blackening the smaller tree avoids balancing problems on union;
+ // this can't be done later, though, or it would change the result of compareDepth
+ val blkNewLeft = blacken(newLeft)
+ val blkNewRight = blacken(newRight)
+ val (zipper, levelled, leftMost, smallerDepth) = compareDepth(blkNewLeft, blkNewRight)
+
+ if (levelled) {
+ BlackTree(tree.key, tree.value, blkNewLeft, blkNewRight)
+ } else {
+ val zipFrom = findDepth(zipper, smallerDepth)
+ val union = if (leftMost) {
+ RedTree(tree.key, tree.value, blkNewLeft, zipFrom.head)
+ } else {
+ RedTree(tree.key, tree.value, zipFrom.head, blkNewRight)
+ }
+ val zippedTree = zipFrom.tail.foldLeft(union: Tree[A, B]) { (tree, node) =>
+ if (leftMost)
+ balanceLeft(isBlackTree(node), node.key, node.value, tree, node.right)
+ else
+ balanceRight(isBlackTree(node), node.key, node.value, node.left, tree)
+ }
+ zippedTree
+ }
+ }
+
+ /*
+ * Forcing direct fields access using the @inline annotation helps speed up
+ * various operations (especially smallest/greatest and update/delete).
+ *
+ * Unfortunately the direct field access is not guaranteed to work (but
+ * works on the current implementation of the Scala compiler).
+ *
+ * An alternative is to implement the these classes using plain old Java code...
+ */
+ sealed abstract class Tree[A, +B](
+ @(inline @getter) final val key: A,
+ @(inline @getter) final val value: B,
+ @(inline @getter) final val left: Tree[A, B],
+ @(inline @getter) final val right: Tree[A, B])
+ extends Serializable {
+ @(inline @getter) final val count: Int = 1 + RedBlackTree.count(left) + RedBlackTree.count(right)
+ def black: Tree[A, B]
+ def red: Tree[A, B]
+ }
+ final class RedTree[A, +B](key: A,
+ value: B,
+ left: Tree[A, B],
+ right: Tree[A, B]) extends Tree[A, B](key, value, left, right) {
+ override def black: Tree[A, B] = BlackTree(key, value, left, right)
+ override def red: Tree[A, B] = this
+ override def toString: String = "RedTree(" + key + ", " + value + ", " + left + ", " + right + ")"
+ }
+ final class BlackTree[A, +B](key: A,
+ value: B,
+ left: Tree[A, B],
+ right: Tree[A, B]) extends Tree[A, B](key, value, left, right) {
+ override def black: Tree[A, B] = this
+ override def red: Tree[A, B] = RedTree(key, value, left, right)
+ override def toString: String = "BlackTree(" + key + ", " + value + ", " + left + ", " + right + ")"
+ }
+
+ object RedTree {
+ @inline def apply[A, B](key: A, value: B, left: Tree[A, B], right: Tree[A, B]) = new RedTree(key, value, left, right)
+ def unapply[A, B](t: RedTree[A, B]) = Some((t.key, t.value, t.left, t.right))
+ }
+ object BlackTree {
+ @inline def apply[A, B](key: A, value: B, left: Tree[A, B], right: Tree[A, B]) = new BlackTree(key, value, left, right)
+ def unapply[A, B](t: BlackTree[A, B]) = Some((t.key, t.value, t.left, t.right))
+ }
+
+ private[this] abstract class TreeIterator[A, B, R](tree: Tree[A, B]) extends Iterator[R] {
+ protected[this] def nextResult(tree: Tree[A, B]): R
+
+ override def hasNext: Boolean = next ne null
+
+ override def next: R = next match {
+ case null =>
+ throw new NoSuchElementException("next on empty iterator")
+ case tree =>
+ next = findNext(tree.right)
+ nextResult(tree)
+ }
+
+ @tailrec
+ private[this] def findNext(tree: Tree[A, B]): Tree[A, B] = {
+ if (tree eq null) popPath()
+ else if (tree.left eq null) tree
+ else {
+ pushPath(tree)
+ findNext(tree.left)
+ }
+ }
+
+ private[this] def pushPath(tree: Tree[A, B]) {
+ try {
+ path(index) = tree
+ index += 1
+ } catch {
+ case _: ArrayIndexOutOfBoundsException =>
+ /*
+ * Either the tree became unbalanced or we calculated the maximum height incorrectly.
+ * To avoid crashing the iterator we expand the path array. Obviously this should never
+ * happen...
+ *
+ * An exception handler is used instead of an if-condition to optimize the normal path.
+ * This makes a large difference in iteration speed!
+ */
+ assert(index >= path.length)
+ path :+= null
+ pushPath(tree)
+ }
+ }
+ private[this] def popPath(): Tree[A, B] = if (index == 0) null else {
+ index -= 1
+ path(index)
+ }
+
+ private[this] var path = if (tree eq null) null else {
+ /*
+ * According to "Ralf Hinze. Constructing red-black trees" [http://www.cs.ox.ac.uk/ralf.hinze/publications/#P5]
+ * the maximum height of a red-black tree is 2*log_2(n + 2) - 2.
+ *
+ * According to {@see Integer#numberOfLeadingZeros} ceil(log_2(n)) = (32 - Integer.numberOfLeadingZeros(n - 1))
+ *
+ * We also don't store the deepest nodes in the path so the maximum path length is further reduced by one.
+ */
+ val maximumHeight = 2 * (32 - Integer.numberOfLeadingZeros(tree.count + 2 - 1)) - 2 - 1
+ new Array[Tree[A, B]](maximumHeight)
+ }
+ private[this] var index = 0
+ private[this] var next: Tree[A, B] = findNext(tree)
+ }
+
+ private[this] class EntriesIterator[A, B](tree: Tree[A, B]) extends TreeIterator[A, B, (A, B)](tree) {
+ override def nextResult(tree: Tree[A, B]) = (tree.key, tree.value)
+ }
+
+ private[this] class KeysIterator[A, B](tree: Tree[A, B]) extends TreeIterator[A, B, A](tree) {
+ override def nextResult(tree: Tree[A, B]) = tree.key
+ }
+
+ private[this] class ValuesIterator[A, B](tree: Tree[A, B]) extends TreeIterator[A, B, B](tree) {
+ override def nextResult(tree: Tree[A, B]) = tree.value
+ }
+}
diff --git a/src/library/scala/collection/immutable/Seq.scala b/src/library/scala/collection/immutable/Seq.scala
index 11e56df..14610ae 100644
--- a/src/library/scala/collection/immutable/Seq.scala
+++ b/src/library/scala/collection/immutable/Seq.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -19,7 +19,7 @@ import parallel.immutable.ParSeq
* that are guaranteed immutable.
*
* $seqInfo
- * @define Coll immutable.Seq
+ * @define Coll `immutable.Seq`
* @define coll immutable sequence
*/
trait Seq[+A] extends Iterable[A]
@@ -36,11 +36,11 @@ trait Seq[+A] extends Iterable[A]
}
/** $factoryInfo
- * @define Coll immutable.Seq
+ * @define Coll `immutable.Seq`
* @define coll immutable sequence
*/
object Seq extends SeqFactory[Seq] {
/** genericCanBuildFromInfo */
- implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, Seq[A]] = new GenericCanBuildFrom[A]
+ implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, Seq[A]] = ReusableCBF.asInstanceOf[GenericCanBuildFrom[A]]
def newBuilder[A]: Builder[A, Seq[A]] = new mutable.ListBuffer
}
diff --git a/src/library/scala/collection/immutable/Set.scala b/src/library/scala/collection/immutable/Set.scala
index ce2b3b1..8433c2b 100644
--- a/src/library/scala/collection/immutable/Set.scala
+++ b/src/library/scala/collection/immutable/Set.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -21,7 +21,7 @@ import parallel.immutable.ParSet
* @since 1.0
* @author Matthias Zenger
* @author Martin Odersky
- * @define Coll immutable.Set
+ * @define Coll `immutable.Set`
* @define coll immutable set
*/
trait Set[A] extends Iterable[A]
@@ -38,7 +38,7 @@ trait Set[A] extends Iterable[A]
}
/** $factoryInfo
- * @define Coll immutable.Set
+ * @define Coll `immutable.Set`
* @define coll immutable set
*/
object Set extends ImmutableSetFactory[Set] {
@@ -46,10 +46,8 @@ object Set extends ImmutableSetFactory[Set] {
implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, Set[A]] = setCanBuildFrom[A]
override def empty[A]: Set[A] = EmptySet.asInstanceOf[Set[A]]
- private val hashSeed = "Set".hashCode
-
/** An optimized representation for immutable empty sets */
- private object EmptySet extends Set[Any] with Serializable {
+ private object EmptySet extends AbstractSet[Any] with Set[Any] with Serializable {
override def size: Int = 0
def contains(elem: Any): Boolean = false
def + (elem: Any): Set[Any] = new Set1(elem)
@@ -58,19 +56,9 @@ object Set extends ImmutableSetFactory[Set] {
override def foreach[U](f: Any => U): Unit = {}
}
- @deprecated("use `Set.empty' instead", "2.8.0")
- class EmptySet[A] extends Set[A] with Serializable {
- override def size: Int = 0
- def contains(elem: A): Boolean = false
- def + (elem: A): Set[A] = new Set1(elem)
- def - (elem: A): Set[A] = this
- def iterator: Iterator[A] = Iterator.empty
- override def foreach[U](f: A => U): Unit = {}
- }
-
/** An optimized representation for immutable sets of size 1 */
@SerialVersionUID(1233385750652442003L)
- class Set1[A] private[collection] (elem1: A) extends Set[A] with Serializable {
+ class Set1[A] private[collection] (elem1: A) extends AbstractSet[A] with Set[A] with Serializable {
override def size: Int = 1
def contains(elem: A): Boolean =
elem == elem1
@@ -89,7 +77,7 @@ object Set extends ImmutableSetFactory[Set] {
/** An optimized representation for immutable sets of size 2 */
@SerialVersionUID(-6443011234944830092L)
- class Set2[A] private[collection] (elem1: A, elem2: A) extends Set[A] with Serializable {
+ class Set2[A] private[collection] (elem1: A, elem2: A) extends AbstractSet[A] with Set[A] with Serializable {
override def size: Int = 2
def contains(elem: A): Boolean =
elem == elem1 || elem == elem2
@@ -109,7 +97,7 @@ object Set extends ImmutableSetFactory[Set] {
/** An optimized representation for immutable sets of size 3 */
@SerialVersionUID(-3590273538119220064L)
- class Set3[A] private[collection] (elem1: A, elem2: A, elem3: A) extends Set[A] with Serializable {
+ class Set3[A] private[collection] (elem1: A, elem2: A, elem3: A) extends AbstractSet[A] with Set[A] with Serializable {
override def size: Int = 3
def contains(elem: A): Boolean =
elem == elem1 || elem == elem2 || elem == elem3
@@ -130,7 +118,7 @@ object Set extends ImmutableSetFactory[Set] {
/** An optimized representation for immutable sets of size 4 */
@SerialVersionUID(-3622399588156184395L)
- class Set4[A] private[collection] (elem1: A, elem2: A, elem3: A, elem4: A) extends Set[A] with Serializable {
+ class Set4[A] private[collection] (elem1: A, elem2: A, elem3: A, elem4: A) extends AbstractSet[A] with Set[A] with Serializable {
override def size: Int = 4
def contains(elem: A): Boolean =
elem == elem1 || elem == elem2 || elem == elem3 || elem == elem4
diff --git a/src/library/scala/collection/immutable/SetProxy.scala b/src/library/scala/collection/immutable/SetProxy.scala
index 9c1e2f0..06c6843 100644
--- a/src/library/scala/collection/immutable/SetProxy.scala
+++ b/src/library/scala/collection/immutable/SetProxy.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -24,7 +24,7 @@ package immutable
trait SetProxy[A] extends Set[A] with SetProxyLike[A, Set[A]] {
override def repr = this
private def newProxy[B >: A](newSelf: Set[B]): SetProxy[B] =
- new SetProxy[B] { val self = newSelf }
+ new AbstractSet[B] with SetProxy[B] { val self = newSelf }
override def empty = newProxy(self.empty)
override def + (elem: A) = newProxy(self + elem)
diff --git a/src/library/scala/collection/immutable/SortedMap.scala b/src/library/scala/collection/immutable/SortedMap.scala
index 902a0f8..eb04231 100644
--- a/src/library/scala/collection/immutable/SortedMap.scala
+++ b/src/library/scala/collection/immutable/SortedMap.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -13,8 +13,7 @@ package immutable
import generic._
import mutable.Builder
-import annotation.unchecked.uncheckedVariance
-import annotation.bridge
+import scala.annotation.unchecked.uncheckedVariance
/** A map whose keys are sorted.
*
@@ -31,7 +30,9 @@ import annotation.bridge
trait SortedMap[A, +B] extends Map[A, B]
with scala.collection.SortedMap[A, B]
with MapLike[A, B, SortedMap[A, B]]
- with SortedMapLike[A, B, SortedMap[A, B]] { self =>
+ with SortedMapLike[A, B, SortedMap[A, B]]
+{
+self =>
override protected[this] def newBuilder : Builder[(A, B), SortedMap[A, B]] =
SortedMap.newBuilder[A, B]
@@ -78,7 +79,16 @@ trait SortedMap[A, +B] extends Map[A, B]
override def ++[B1 >: B](xs: GenTraversableOnce[(A, B1)]): SortedMap[A, B1] =
((repr: SortedMap[A, B1]) /: xs.seq) (_ + _)
- @bridge def ++[B1 >: B](xs: TraversableOnce[(A, B1)]): SortedMap[A, B1] = ++(xs: GenTraversableOnce[(A, B1)])
+ override def filterKeys(p: A => Boolean): SortedMap[A, B] = new FilteredKeys(p) with SortedMap.Default[A, B] {
+ implicit def ordering: Ordering[A] = self.ordering
+ override def rangeImpl(from : Option[A], until : Option[A]): SortedMap[A, B] = self.rangeImpl(from, until).filterKeys(p)
+ }
+
+ override def mapValues[C](f: B => C): SortedMap[A, C] = new MappedValues(f) with SortedMap.Default[A, C] {
+ implicit def ordering: Ordering[A] = self.ordering
+ override def rangeImpl(from : Option[A], until : Option[A]): SortedMap[A, C] = self.rangeImpl(from, until).mapValues(f)
+ }
+
}
/** $factoryInfo
@@ -89,4 +99,20 @@ object SortedMap extends ImmutableSortedMapFactory[SortedMap] {
/** $sortedMapCanBuildFromInfo */
implicit def canBuildFrom[A, B](implicit ord: Ordering[A]): CanBuildFrom[Coll, (A, B), SortedMap[A, B]] = new SortedMapCanBuildFrom[A, B]
def empty[A, B](implicit ord: Ordering[A]): SortedMap[A, B] = TreeMap.empty[A, B]
+
+ private[collection] trait Default[A, +B] extends SortedMap[A, B] with scala.collection.SortedMap.Default[A, B] {
+ self =>
+ override def +[B1 >: B](kv: (A, B1)): SortedMap[A, B1] = {
+ val b = SortedMap.newBuilder[A, B1]
+ b ++= this
+ b += ((kv._1, kv._2))
+ b.result
+ }
+
+ override def - (key: A): SortedMap[A, B] = {
+ val b = newBuilder
+ for (kv <- this; if kv._1 != key) b += kv
+ b.result
+ }
+ }
}
diff --git a/src/library/scala/collection/immutable/SortedSet.scala b/src/library/scala/collection/immutable/SortedSet.scala
index 50cd544..3f75d50 100644
--- a/src/library/scala/collection/immutable/SortedSet.scala
+++ b/src/library/scala/collection/immutable/SortedSet.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -21,7 +21,7 @@ import mutable.Builder
* @author Martin Odersky
* @version 2.8
* @since 2.4
- * @define Coll immutable.SortedSet
+ * @define Coll `immutable.SortedSet`
* @define coll immutable sorted set
*/
trait SortedSet[A] extends Set[A] with scala.collection.SortedSet[A] with SortedSetLike[A, SortedSet[A]] {
@@ -30,11 +30,13 @@ trait SortedSet[A] extends Set[A] with scala.collection.SortedSet[A] with Sorted
}
/** $factoryInfo
- * @define Coll immutable.SortedSet
+ * @define Coll `immutable.SortedSet`
* @define coll immutable sorted set
*/
object SortedSet extends ImmutableSortedSetFactory[SortedSet] {
/** $sortedSetCanBuildFromInfo */
- implicit def canBuildFrom[A](implicit ord: Ordering[A]): CanBuildFrom[Coll, A, SortedSet[A]] = new SortedSetCanBuildFrom[A]
+ def canBuildFrom[A](implicit ord: Ordering[A]): CanBuildFrom[Coll, A, SortedSet[A]] = newCanBuildFrom[A]
def empty[A](implicit ord: Ordering[A]): SortedSet[A] = TreeSet.empty[A]
+ // Force a declaration here so that BitSet's (which does not inherit from SortedSetFactory) can be more specific
+ override implicit def newCanBuildFrom[A](implicit ord : Ordering[A]) : CanBuildFrom[Coll, A, SortedSet[A]] = super.newCanBuildFrom
}
diff --git a/src/library/scala/collection/immutable/Stack.scala b/src/library/scala/collection/immutable/Stack.scala
index 7330964..357e9a1 100644
--- a/src/library/scala/collection/immutable/Stack.scala
+++ b/src/library/scala/collection/immutable/Stack.scala
@@ -1,13 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-
-
package scala.collection
package immutable
@@ -15,16 +13,13 @@ import generic._
import mutable.{ ArrayBuffer, Builder }
/** $factoryInfo
- * @define Coll immutable.Stack
+ * @define Coll `immutable.Stack`
* @define coll immutable stack
*/
object Stack extends SeqFactory[Stack] {
/** $genericCanBuildFromInfo */
- implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, Stack[A]] = new GenericCanBuildFrom[A]
+ implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, Stack[A]] = ReusableCBF.asInstanceOf[GenericCanBuildFrom[A]]
def newBuilder[A]: Builder[A, Stack[A]] = new ArrayBuffer[A] mapResult (buf => new Stack(buf.toList))
-
- @deprecated("Use Stack.empty instead", "2.8.0")
- val Empty: Stack[Nothing] = Stack()
}
/** This class implements immutable stacks using a list-based data
@@ -42,7 +37,7 @@ object Stack extends SeqFactory[Stack] {
* @see [[http://docs.scala-lang.org/overviews/collections/concrete-immutable-collection-classes.html#immutable_stacks "Scala's Collection Library overview"]]
* section on `Immutable stacks` for more information.
*
- * @define Coll immutable.Stack
+ * @define Coll `immutable.Stack`
* @define coll immutable stack
* @define orderDependent
* @define orderDependentFold
@@ -51,7 +46,8 @@ object Stack extends SeqFactory[Stack] {
*/
@SerialVersionUID(1976480595012942526L)
class Stack[+A] protected (protected val elems: List[A])
- extends LinearSeq[A]
+ extends AbstractSeq[A]
+ with LinearSeq[A]
with GenericTraversableTemplate[A, Stack]
with LinearSeqOptimized[A, Stack[A]]
with Serializable {
@@ -88,7 +84,7 @@ class Stack[+A] protected (protected val elems: List[A])
* the stack. The last element returned by the traversable object
* will be on top of the new stack.
*
- * @param elems the iterator object.
+ * @param xs the iterator object.
* @return the stack with the new elements on top.
*/
def pushAll[B >: A](xs: TraversableOnce[B]): Stack[B] =
@@ -105,7 +101,7 @@ class Stack[+A] protected (protected val elems: List[A])
else throw new NoSuchElementException("top of empty stack")
/** Removes the top element from the stack.
- * Note: should return <code>(A, Stack[A])</code> as for queues (mics)
+ * Note: should return `(A, Stack[A])` as for queues (mics)
*
* @throws Predef.NoSuchElementException
* @return the new stack without the former top element.
@@ -132,4 +128,3 @@ class Stack[+A] protected (protected val elems: List[A])
*/
override def toString() = elems.mkString("Stack(", ", ", ")")
}
-
diff --git a/src/library/scala/collection/immutable/Stream.scala b/src/library/scala/collection/immutable/Stream.scala
index 057cde9..5bb4ef5 100644
--- a/src/library/scala/collection/immutable/Stream.scala
+++ b/src/library/scala/collection/immutable/Stream.scala
@@ -1,13 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-
-
package scala.collection
package immutable
@@ -15,6 +13,7 @@ import generic._
import mutable.{Builder, StringBuilder, LazyBuilder, ListBuffer}
import scala.annotation.tailrec
import Stream.cons
+import scala.language.implicitConversions
/** The class `Stream` implements lazy lists where elements
* are only evaluated when they are needed. Here is an example:
@@ -138,7 +137,7 @@ import Stream.cons
* val it3 = new Iterator[Int] {
* var i = -1
* def hasNext = true
- * def next: Int = { i += 1; i }
+ * def next(): Int = { i += 1; i }
* }
* loop("Iterator3: ", it3.next, it3)
* }}}
@@ -178,12 +177,13 @@ import Stream.cons
* section on `Streams` for more information.
* @define naturalsEx def naturalsFrom(i: Int): Stream[Int] = i #:: naturalsFrom(i + 1)
- * @define Coll Stream
+ * @define Coll `Stream`
* @define coll stream
* @define orderDependent
* @define orderDependentFold
*/
-abstract class Stream[+A] extends LinearSeq[A]
+abstract class Stream[+A] extends AbstractSeq[A]
+ with LinearSeq[A]
with GenericTraversableTemplate[A, Stream]
with LinearSeqOptimized[A, Stream[A]] {
self =>
@@ -422,6 +422,9 @@ self =>
* // produces: 10, 10, 11, 10, 11, 11, 10, 11, 11, 12, 10, 11, 11, 12, 13
* }}}
*
+ * ''Note:'' Currently `flatMap` will evaluate as much of the Stream as needed
+ * until it finds a non-empty element for the head, which is non-lazy.
+ *
* @tparam B The element type of the returned collection '''That'''.
* @param f the function to apply on each element.
* @return `f(a,,0,,) ::: ... ::: f(a,,n,,)` if
@@ -479,22 +482,40 @@ self =>
final class StreamWithFilter(p: A => Boolean) extends WithFilter(p) {
override def map[B, That](f: A => B)(implicit bf: CanBuildFrom[Stream[A], B, That]): That = {
- def tailMap = asStream[B](tail withFilter p map f)
- if (isStreamBuilder(bf)) asThat(
- if (isEmpty) Stream.Empty
- else if (p(head)) cons(f(head), tailMap)
- else tailMap
- )
+ def tailMap(coll: Stream[A]): Stream[B] = {
+ var head: A = null.asInstanceOf[A]
+ var tail: Stream[A] = coll
+ while (true) {
+ if (tail.isEmpty)
+ return Stream.Empty
+ head = tail.head
+ tail = tail.tail
+ if (p(head))
+ return cons(f(head), tailMap(tail))
+ }
+ throw new RuntimeException()
+ }
+
+ if (isStreamBuilder(bf)) asThat(tailMap(Stream.this))
else super.map(f)(bf)
}
override def flatMap[B, That](f: A => GenTraversableOnce[B])(implicit bf: CanBuildFrom[Stream[A], B, That]): That = {
- def tailFlatMap = asStream[B](tail withFilter p flatMap f)
- if (isStreamBuilder(bf)) asThat(
- if (isEmpty) Stream.Empty
- else if (p(head)) f(head).toStream append tailFlatMap
- else tailFlatMap
- )
+ def tailFlatMap(coll: Stream[A]): Stream[B] = {
+ var head: A = null.asInstanceOf[A]
+ var tail: Stream[A] = coll
+ while (true) {
+ if (tail.isEmpty)
+ return Stream.Empty
+ head = tail.head
+ tail = tail.tail
+ if (p(head))
+ return f(head).toStream append tailFlatMap(tail)
+ }
+ throw new RuntimeException()
+ }
+
+ if (isStreamBuilder(bf)) asThat(tailFlatMap(Stream.this))
else super.flatMap(f)(bf)
}
@@ -604,7 +625,7 @@ self =>
*
* @example {{{
* $naturalsEx
- * naturalsFrom(1) zip naturalsFrom(2) zip take 5 foreach println
+ * naturalsFrom(1) zip naturalsFrom(2) take 5 foreach println
* // prints
* // (1,2)
* // (2,3)
@@ -613,7 +634,7 @@ self =>
* // (5,6)
* }}}
*/
- override final def zip[A1 >: A, B, That](that: collection.GenIterable[B])(implicit bf: CanBuildFrom[Stream[A], (A1, B), That]): That =
+ override final def zip[A1 >: A, B, That](that: scala.collection.GenIterable[B])(implicit bf: CanBuildFrom[Stream[A], (A1, B), That]): That =
// we assume there is no other builder factory on streams and therefore know that That = Stream[(A1, B)]
if (isStreamBuilder(bf)) asThat(
if (this.isEmpty || that.isEmpty) Stream.Empty
@@ -716,8 +737,8 @@ self =>
/** A substream starting at index `from` and extending up to (but not including)
* index `until`. This returns a `Stream` that is lazily evaluated.
*
- * @param start The index of the first element of the returned subsequence
- * @param end The index of the element following the returned subsequence
+ * @param from The index of the first element of the returned subsequence
+ * @param until The index of the element following the returned subsequence
* @return A new string containing the elements requested from `start` until
* `end`.
*
@@ -774,7 +795,7 @@ self =>
* `p`.
*
* @example {{{
- * naturalsFrom(0) takeWhile { _ < 5 } mkString ", "
+ + naturalsFrom(0) takeWhile { _ < 5 } mkString ", "
* produces: "0, 1, 2, 3, 4"
* }}}
*/
@@ -805,9 +826,9 @@ self =>
these
}
- /** Builds a new stream from this stream in which any duplicates (wrt to ==)
- * have been removed. Among duplicate elements, only the first one is
- * retained in the resulting `Stream`.
+ /** Builds a new stream from this stream in which any duplicates (as
+ * determined by `==`) have been removed. Among duplicate elements, only the
+ * first one is retained in the resulting `Stream`.
*
* @return A new `Stream` representing the result of applying distinctness to
* the original `Stream`.
@@ -820,9 +841,16 @@ self =>
* // produces: "1, 2, 3, 4, 5, 6"
* }}}
*/
- override def distinct: Stream[A] =
- if (isEmpty) this
- else cons(head, tail.filter(head !=).distinct)
+ override def distinct: Stream[A] = {
+ // This should use max memory proportional to N, whereas
+ // recursively calling distinct on the tail is N^2.
+ def loop(seen: Set[A], rest: Stream[A]): Stream[A] = {
+ if (rest.isEmpty) rest
+ else if (seen(rest.head)) loop(seen, rest.tail)
+ else cons(rest.head, loop(seen + rest.head, rest.tail))
+ }
+ loop(Set(), this)
+ }
/** Returns a new sequence of given length containing the elements of this
* sequence followed by zero or more occurrences of given elements.
@@ -903,7 +931,7 @@ self =>
* // produces: "0, 0, 0, 0, 0, 0, 0, 0, 0, 0"
* }}}
*/
- override def flatten[B](implicit asTraversable: A => /*<:<!!!*/ TraversableOnce[B]): Stream[B] = {
+ override def flatten[B](implicit asTraversable: A => /*<:<!!!*/ GenTraversableOnce[B]): Stream[B] = {
def flatten1(t: Traversable[B]): Stream[B] =
if (!t.isEmpty)
cons(t.head, flatten1(t.tail))
@@ -911,7 +939,7 @@ self =>
tail.flatten
if (isEmpty) Stream.empty
- else flatten1(asTraversable(head).toTraversable)
+ else flatten1(asTraversable(head).seq.toTraversable)
}
override def view = new StreamView[A, Stream[A]] {
@@ -930,7 +958,7 @@ self =>
/** A specialized, extra-lazy implementation of a stream iterator, so it can
* iterate as lazily as it traverses the tail.
*/
-final class StreamIterator[+A] private() extends Iterator[A] {
+final class StreamIterator[+A] private() extends AbstractIterator[A] with Iterator[A] {
def this(self: Stream[A]) {
this()
these = new LazyCell(self)
@@ -944,7 +972,7 @@ final class StreamIterator[+A] private() extends Iterator[A] {
private var these: LazyCell = _
def hasNext: Boolean = these.v.nonEmpty
- def next: A =
+ def next(): A =
if (isEmpty) Iterator.empty.next
else {
val cur = these.v
@@ -995,7 +1023,7 @@ object Stream extends SeqFactory[Stream] {
def result: Stream[A] = parts.toStream flatMap (_.toStream)
}
- object Empty extends Stream[Nothing] {
+ object Empty extends Stream[Nothing] with Serializable {
override def isEmpty = true
override def head = throw new NoSuchElementException("head of empty stream")
override def tail = throw new UnsupportedOperationException("tail of empty stream")
@@ -1030,9 +1058,6 @@ object Stream extends SeqFactory[Stream] {
else Some((xs.head, xs.tail))
}
- @deprecated("use #:: instead", "2.8.0")
- lazy val lazy_:: = #::
-
/** An alternative way of building and matching Streams using Stream.cons(hd, tl).
*/
object cons {
@@ -1127,55 +1152,6 @@ object Stream extends SeqFactory[Stream] {
private[immutable] def collectedTail[A, B, That](stream: Stream[A], pf: PartialFunction[A, B], bf: CanBuildFrom[Stream[A], B, That]) = {
cons(pf(stream.head), stream.tail.collect(pf)(bf).asInstanceOf[Stream[B]])
}
-
- /** A stream containing all elements of a given iterator, in the order they are produced.
- * @param it The iterator producing the stream's elements
- */
- @deprecated("use it.toStream instead", "2.8.0")
- def fromIterator[A](it: Iterator[A]): Stream[A] = it.toStream
-
- /** The concatenation of a sequence of streams
- */
- @deprecated("use xs.flatten instead", "2.8.0")
- def concat[A](xs: Iterable[Stream[A]]): Stream[A] = concat(xs.iterator)
-
- /** The concatenation of all streams returned by an iterator
- */
- @deprecated("use xs.toStream.flatten instead", "2.8.0")
- def concat[A](xs: Iterator[Stream[A]]): Stream[A] = xs.toStream.flatten //(conforms[Stream[A], scala.collection.Traversable[A]])
-
- /**
- * Create a stream with element values
- * <code>v<sub>n+1</sub> = step(v<sub>n</sub>)</code>
- * where <code>v<sub>0</sub> = start</code>
- * and elements are in the range between <code>start</code> (inclusive)
- * and <code>end</code> (exclusive)
- * @param start the start value of the stream
- * @param end the end value of the stream
- * @param step the increment function of the stream, must be monotonically increasing or decreasing
- * @return the stream starting at value <code>start</code>.
- */
- @deprecated("use `iterate' instead.", "2.8.0")
- def range(start: Int, end: Int, step: Int => Int): Stream[Int] =
- iterate(start, end - start)(step)
-
- /**
- * Create an infinite stream containing the given element.
- *
- * @param elem the element composing the resulting stream
- * @return the stream containing an infinite number of elem
- */
- @deprecated("use `continually' instead", "2.8.0")
- def const[A](elem: A): Stream[A] = cons(elem, const(elem))
-
- /** Create a stream containing several copies of an element.
- *
- * @param n the length of the resulting stream
- * @param elem the element composing the resulting stream
- * @return the stream composed of n elements all equal to elem
- */
- @deprecated("use fill(n, elem) instead", "2.8.0")
- def make[A](n: Int, elem: A): Stream[A] = fill(n)(elem)
}
diff --git a/src/library/scala/collection/immutable/StreamViewLike.scala b/src/library/scala/collection/immutable/StreamViewLike.scala
index 7c44c1e..236308d 100644
--- a/src/library/scala/collection/immutable/StreamViewLike.scala
+++ b/src/library/scala/collection/immutable/StreamViewLike.scala
@@ -18,11 +18,14 @@ extends SeqView[A, Coll]
override def toString = viewToString
}
- trait EmptyView extends Transformed[Nothing] with super.EmptyView { }
+ /** Explicit instantiation of the `Transformed` trait to reduce class file size in subclasses. */
+ private[collection] abstract class AbstractTransformed[+B] extends super.AbstractTransformed[B] with Transformed[B]
- trait Forced[B] extends super.Forced[B] with Transformed[B] { }
+ trait EmptyView extends Transformed[Nothing] with super.EmptyView
- trait Sliced extends super.Sliced with Transformed[A] { }
+ trait Forced[B] extends super.Forced[B] with Transformed[B]
+
+ trait Sliced extends super.Sliced with Transformed[A]
trait Mapped[B] extends super.Mapped[B] with Transformed[B]
@@ -47,23 +50,23 @@ extends SeqView[A, Coll]
trait Prepended[B >: A] extends super.Prepended[B] with Transformed[B]
/** boilerplate */
- protected override def newForced[B](xs: => collection.GenSeq[B]): Transformed[B] = new { val forced = xs } with Forced[B]
- protected override def newAppended[B >: A](that: collection.GenTraversable[B]): Transformed[B] = new { val rest = that } with Appended[B]
- protected override def newMapped[B](f: A => B): Transformed[B] = new { val mapping = f } with Mapped[B]
- protected override def newFlatMapped[B](f: A => collection.GenTraversableOnce[B]): Transformed[B] = new { val mapping = f } with FlatMapped[B]
- protected override def newFiltered(p: A => Boolean): Transformed[A] = new { val pred = p } with Filtered
- protected override def newSliced(_endpoints: SliceInterval): Transformed[A] = new { val endpoints = _endpoints } with Sliced
- protected override def newDroppedWhile(p: A => Boolean): Transformed[A] = new { val pred = p } with DroppedWhile
- protected override def newTakenWhile(p: A => Boolean): Transformed[A] = new { val pred = p } with TakenWhile
- protected override def newZipped[B](that: collection.GenIterable[B]): Transformed[(A, B)] = new { val other = that } with Zipped[B]
- protected override def newZippedAll[A1 >: A, B](that: collection.GenIterable[B], _thisElem: A1, _thatElem: B): Transformed[(A1, B)] = {
- new { val other = that; val thisElem = _thisElem; val thatElem = _thatElem } with ZippedAll[A1, B]
+ protected override def newForced[B](xs: => scala.collection.GenSeq[B]): Transformed[B] = new { val forced = xs } with AbstractTransformed[B] with Forced[B]
+ protected override def newAppended[B >: A](that: scala.collection.GenTraversable[B]): Transformed[B] = new { val rest = that } with AbstractTransformed[B] with Appended[B]
+ protected override def newMapped[B](f: A => B): Transformed[B] = new { val mapping = f } with AbstractTransformed[B] with Mapped[B]
+ protected override def newFlatMapped[B](f: A => scala.collection.GenTraversableOnce[B]): Transformed[B] = new { val mapping = f } with AbstractTransformed[B] with FlatMapped[B]
+ protected override def newFiltered(p: A => Boolean): Transformed[A] = new { val pred = p } with AbstractTransformed[A] with Filtered
+ protected override def newSliced(_endpoints: SliceInterval): Transformed[A] = new { val endpoints = _endpoints } with AbstractTransformed[A] with Sliced
+ protected override def newDroppedWhile(p: A => Boolean): Transformed[A] = new { val pred = p } with AbstractTransformed[A] with DroppedWhile
+ protected override def newTakenWhile(p: A => Boolean): Transformed[A] = new { val pred = p } with AbstractTransformed[A] with TakenWhile
+ protected override def newZipped[B](that: scala.collection.GenIterable[B]): Transformed[(A, B)] = new { val other = that } with AbstractTransformed[(A, B)] with Zipped[B]
+ protected override def newZippedAll[A1 >: A, B](that: scala.collection.GenIterable[B], _thisElem: A1, _thatElem: B): Transformed[(A1, B)] = {
+ new { val other = that; val thisElem = _thisElem; val thatElem = _thatElem } with AbstractTransformed[(A1, B)] with ZippedAll[A1, B]
}
protected override def newReversed: Transformed[A] = new Reversed { }
- protected override def newPatched[B >: A](_from: Int, _patch: collection.GenSeq[B], _replaced: Int): Transformed[B] = {
- new { val from = _from; val patch = _patch; val replaced = _replaced } with Patched[B]
+ protected override def newPatched[B >: A](_from: Int, _patch: scala.collection.GenSeq[B], _replaced: Int): Transformed[B] = {
+ new { val from = _from; val patch = _patch; val replaced = _replaced } with AbstractTransformed[B] with Patched[B]
}
- protected override def newPrepended[B >: A](elem: B): Transformed[B] = new { protected[this] val fst = elem } with Prepended[B]
+ protected override def newPrepended[B >: A](elem: B): Transformed[B] = new { protected[this] val fst = elem } with AbstractTransformed[B] with Prepended[B]
override def stringPrefix = "StreamView"
}
diff --git a/src/library/scala/collection/immutable/StringLike.scala b/src/library/scala/collection/immutable/StringLike.scala
index 5a3a369..edea89b 100644
--- a/src/library/scala/collection/immutable/StringLike.scala
+++ b/src/library/scala/collection/immutable/StringLike.scala
@@ -1,13 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-
-
package scala.collection
package immutable
@@ -15,6 +13,7 @@ import generic._
import mutable.Builder
import scala.util.matching.Regex
import scala.math.ScalaNumber
+import scala.reflect.ClassTag
/** A companion object for the `StringLike` containing some constants.
* @since 2.8
@@ -35,14 +34,14 @@ import StringLike._
* @tparam Repr The type of the actual collection inheriting `StringLike`.
*
* @since 2.8
- * @define Coll String
+ * @define Coll `String`
* @define coll string
* @define orderDependent
* @define orderDependentFold
* @define mayNotTerminateInf
* @define willNotTerminateInf
*/
-trait StringLike[+Repr] extends collection.IndexedSeqOptimized[Char, Repr] with Ordered[String] {
+trait StringLike[+Repr] extends Any with scala.collection.IndexedSeqOptimized[Char, Repr] with Ordered[String] {
self =>
/** Creates a string builder buffer as builder for this class */
@@ -62,7 +61,7 @@ self =>
val end = until min length
if (start >= end) newBuilder.result
- else newBuilder ++= toString.substring(start, end) result
+ else (newBuilder ++= toString.substring(start, end)).result
}
/** Return the current string concatenated `n` times.
@@ -81,12 +80,11 @@ self =>
* Strip trailing line end character from this string if it has one.
*
* A line end character is one of
- * <ul style="list-style-type: none;">
- * <li>LF - line feed (0x0A hex)</li>
- * <li>FF - form feed (0x0C hex)</li>
- * </ul>
- * If a line feed character LF is preceded by a carriage return CR
- * (0x0D hex), the CR character is also stripped (Windows convention).
+ * - `LF` - line feed (`0x0A` hex)
+ * - `FF` - form feed (`0x0C` hex)
+ *
+ * If a line feed character `LF` is preceded by a carriage return `CR`
+ * (`0x0D` hex), the `CR` character is also stripped (Windows convention).
*/
def stripLineEnd: String = {
val len = toString.length
@@ -100,20 +98,16 @@ self =>
}
}
- /**
- * Return all lines in this string in an iterator, including trailing
- * line end characters.
- *
- * The number of strings returned is one greater than the number of line
- * end characters in this string. For an empty string, a single empty
- * line is returned. A line end character is one of
+ /** Return all lines in this string in an iterator, including trailing
+ * line end characters.
*
- * <ul style="list-style-type: none;">
- * <li>LF - line feed (0x0A hex)</li>
- * <li>FF - form feed (0x0C hex)</li>
- * </ul>
+ * The number of strings returned is one greater than the number of line
+ * end characters in this string. For an empty string, a single empty
+ * line is returned. A line end character is one of
+ * - `LF` - line feed (`0x0A` hex)
+ * - `FF` - form feed (`0x0C` hex)
*/
- def linesWithSeparators: Iterator[String] = new Iterator[String] {
+ def linesWithSeparators: Iterator[String] = new AbstractIterator[String] {
val str = self.toString
private val len = str.length
private var index = 0
@@ -177,13 +171,10 @@ self =>
toString.replaceAll(arg1, arg2)
}
- /**
- * For every line in this string:
+ /** For every line in this string:
*
- * <blockquote>
- * Strip a leading prefix consisting of blanks or control characters
- * followed by `marginChar` from the line.
- * </blockquote>
+ * Strip a leading prefix consisting of blanks or control characters
+ * followed by `marginChar` from the line.
*/
def stripMargin(marginChar: Char): String = {
val buf = new StringBuilder
@@ -197,13 +188,10 @@ self =>
buf.toString
}
- /**
- * For every line in this string:
+ /** For every line in this string:
*
- * <blockquote>
- * Strip a leading prefix consisting of blanks or control characters
- * followed by `|` from the line.
- * </blockquote>
+ * Strip a leading prefix consisting of blanks or control characters
+ * followed by `|` from the line.
*/
def stripMargin: String = stripMargin('|')
@@ -218,12 +206,22 @@ self =>
toString.split(re)
}
- /** You can follow a string with `.r', turning
- * it into a Regex. E.g.
+ /** You can follow a string with `.r`, turning it into a `Regex`. E.g.
*
- * """A\w*""".r is the regular expression for identifiers starting with `A'.
+ * `"""A\w*""".r` is the regular expression for identifiers starting with `A`.
*/
- def r: Regex = new Regex(toString)
+ def r: Regex = r()
+
+ /** You can follow a string with `.r(g1, ... , gn)`, turning it into a `Regex`,
+ * with group names g1 through gn.
+ *
+ * `"""(\d\d)-(\d\d)-(\d\d\d\d)""".r("month", "day", "year")` matches dates
+ * and provides its subcomponents through groups named "month", "day" and
+ * "year".
+ *
+ * @param groupNames The names of the groups in the pattern, in the order they appear.
+ */
+ def r(groupNames: String*): Regex = new Regex(toString, groupNames: _*)
def toBoolean: Boolean = parseBoolean(toString)
def toByte: Byte = java.lang.Byte.parseByte(toString)
@@ -237,12 +235,12 @@ self =>
if (s != null) s.toLowerCase match {
case "true" => true
case "false" => false
- case _ => throw new NumberFormatException("For input string: \""+s+"\"")
+ case _ => throw new IllegalArgumentException("For input string: \""+s+"\"")
}
else
- throw new NumberFormatException("For input string: \"null\"")
+ throw new IllegalArgumentException("For input string: \"null\"")
- override def toArray[B >: Char : ClassManifest]: Array[B] =
+ override def toArray[B >: Char : ClassTag]: Array[B] =
toString.toCharArray.asInstanceOf[Array[B]]
private def unwrapArg(arg: Any): AnyRef = arg match {
@@ -250,30 +248,26 @@ self =>
case x => x.asInstanceOf[AnyRef]
}
- /**
- * Uses the underlying string as a pattern (in a fashion similar to
+ /** Uses the underlying string as a pattern (in a fashion similar to
* printf in C), and uses the supplied arguments to fill in the
* holes.
*
* The interpretation of the formatting patterns is described in
* <a href="" target="contentFrame" class="java/util/Formatter">
* `java.util.Formatter`</a>, with the addition that
- * classes deriving from `ScalaNumber` (such as `scala.BigInt` and
- * `scala.BigDecimal`) are unwrapped to pass a type which `Formatter`
+ * classes deriving from `ScalaNumber` (such as [[scala.BigInt]] and
+ * [[scala.BigDecimal]]) are unwrapped to pass a type which `Formatter`
* understands.
*
- *
* @param args the arguments used to instantiating the pattern.
- * @throws java.lang.IllegalArgumentException
+ * @throws `java.lang.IllegalArgumentException`
*/
def format(args : Any*): String =
java.lang.String.format(toString, args map unwrapArg: _*)
- /**
- * Like `format(args*)` but takes an initial `Locale` parameter
+ /** Like `format(args*)` but takes an initial `Locale` parameter
* which influences formatting as in `java.lang.String`'s format.
*
- *
* The interpretation of the formatting patterns is described in
* <a href="" target="contentFrame" class="java/util/Formatter">
* `java.util.Formatter`</a>, with the addition that
@@ -281,12 +275,10 @@ self =>
* `scala.BigDecimal`) are unwrapped to pass a type which `Formatter`
* understands.
*
- *
- * @param locale an instance of `java.util.Locale`
+ * @param l an instance of `java.util.Locale`
* @param args the arguments used to instantiating the pattern.
- * @throws java.lang.IllegalArgumentException
+ * @throws `java.lang.IllegalArgumentException`
*/
def formatLocal(l: java.util.Locale, args: Any*): String =
java.lang.String.format(l, toString, args map unwrapArg: _*)
}
-
diff --git a/src/library/scala/collection/immutable/StringOps.scala b/src/library/scala/collection/immutable/StringOps.scala
index 5fc71c7..a650d98 100644
--- a/src/library/scala/collection/immutable/StringOps.scala
+++ b/src/library/scala/collection/immutable/StringOps.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -25,10 +25,10 @@ import mutable.StringBuilder
* @param repr the actual representation of this string operations object.
*
* @since 2.8
- * @define Coll StringOps
+ * @define Coll `String`
* @define coll string
*/
-final class StringOps(override val repr: String) extends StringLike[String] {
+final class StringOps(override val repr: String) extends AnyVal with StringLike[String] {
override protected[this] def thisCollection: WrappedString = new WrappedString(repr)
override protected[this] def toCollection(repr: String): WrappedString = new WrappedString(repr)
@@ -48,5 +48,5 @@ final class StringOps(override val repr: String) extends StringLike[String] {
override def toString = repr
override def length = repr.length
- def seq = this.iterator
+ def seq = new WrappedString(repr)
}
diff --git a/src/library/scala/collection/immutable/Traversable.scala b/src/library/scala/collection/immutable/Traversable.scala
index 80839db..5188343 100644
--- a/src/library/scala/collection/immutable/Traversable.scala
+++ b/src/library/scala/collection/immutable/Traversable.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -30,9 +30,9 @@ trait Traversable[+A] extends scala.collection.Traversable[A]
/** $factoryInfo
* The current default implementation of a $Coll is a `Vector`.
* @define coll immutable traversable collection
- * @define Coll immutable.Traversable
+ * @define Coll `immutable.Traversable`
*/
object Traversable extends TraversableFactory[Traversable] {
- implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, Traversable[A]] = new GenericCanBuildFrom[A]
+ implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, Traversable[A]] = ReusableCBF.asInstanceOf[GenericCanBuildFrom[A]]
def newBuilder[A]: Builder[A, Traversable[A]] = new mutable.ListBuffer
}
diff --git a/src/library/scala/collection/immutable/TreeMap.scala b/src/library/scala/collection/immutable/TreeMap.scala
index ef0eac3..5b4db26 100644
--- a/src/library/scala/collection/immutable/TreeMap.scala
+++ b/src/library/scala/collection/immutable/TreeMap.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -12,8 +12,8 @@ package scala.collection
package immutable
import generic._
+import immutable.{RedBlackTree => RB}
import mutable.Builder
-import annotation.bridge
/** $factoryInfo
* @define Coll immutable.TreeMap
@@ -23,7 +23,6 @@ object TreeMap extends ImmutableSortedMapFactory[TreeMap] {
def empty[A, B](implicit ord: Ordering[A]) = new TreeMap[A, B]()(ord)
/** $sortedMapCanBuildFromInfo */
implicit def canBuildFrom[A, B](implicit ord: Ordering[A]): CanBuildFrom[Coll, (A, B), TreeMap[A, B]] = new SortedMapCanBuildFrom[A, B]
- private def make[A, B](s: Int, t: RedBlack[A]#Tree[B])(implicit ord: Ordering[A]) = new TreeMap[A, B](s, t)(ord)
}
/** This class implements immutable maps using a tree.
@@ -46,31 +45,79 @@ object TreeMap extends ImmutableSortedMapFactory[TreeMap] {
* @define mayNotTerminateInf
* @define willNotTerminateInf
*/
-class TreeMap[A, +B](override val size: Int, t: RedBlack[A]#Tree[B])(implicit val ordering: Ordering[A])
- extends RedBlack[A]
- with SortedMap[A, B]
+class TreeMap[A, +B] private (tree: RB.Tree[A, B])(implicit val ordering: Ordering[A])
+ extends SortedMap[A, B]
with SortedMapLike[A, B, TreeMap[A, B]]
with MapLike[A, B, TreeMap[A, B]]
with Serializable {
+ @deprecated("use `ordering.lt` instead", "2.10.0")
def isSmaller(x: A, y: A) = ordering.lt(x, y)
override protected[this] def newBuilder : Builder[(A, B), TreeMap[A, B]] =
TreeMap.newBuilder[A, B]
- def this()(implicit ordering: Ordering[A]) = this(0, null)(ordering)
+ override def size = RB.count(tree)
- protected val tree: RedBlack[A]#Tree[B] = if (size == 0) Empty else t
+ def this()(implicit ordering: Ordering[A]) = this(null)(ordering)
- override def rangeImpl(from : Option[A], until : Option[A]): TreeMap[A,B] = {
- val ntree = tree.range(from,until)
- new TreeMap[A,B](ntree.count, ntree)
- }
+ override def rangeImpl(from: Option[A], until: Option[A]): TreeMap[A, B] = new TreeMap[A, B](RB.rangeImpl(tree, from, until))
+ override def range(from: A, until: A): TreeMap[A, B] = new TreeMap[A, B](RB.range(tree, from, until))
+ override def from(from: A): TreeMap[A, B] = new TreeMap[A, B](RB.from(tree, from))
+ override def to(to: A): TreeMap[A, B] = new TreeMap[A, B](RB.to(tree, to))
+ override def until(until: A): TreeMap[A, B] = new TreeMap[A, B](RB.until(tree, until))
- override def firstKey = t.first
- override def lastKey = t.last
+ override def firstKey = RB.smallest(tree).key
+ override def lastKey = RB.greatest(tree).key
override def compare(k0: A, k1: A): Int = ordering.compare(k0, k1)
+ override def head = {
+ val smallest = RB.smallest(tree)
+ (smallest.key, smallest.value)
+ }
+ override def headOption = if (RB.isEmpty(tree)) None else Some(head)
+ override def last = {
+ val greatest = RB.greatest(tree)
+ (greatest.key, greatest.value)
+ }
+ override def lastOption = if (RB.isEmpty(tree)) None else Some(last)
+
+ override def tail = new TreeMap(RB.delete(tree, firstKey))
+ override def init = new TreeMap(RB.delete(tree, lastKey))
+
+ override def drop(n: Int) = {
+ if (n <= 0) this
+ else if (n >= size) empty
+ else new TreeMap(RB.drop(tree, n))
+ }
+
+ override def take(n: Int) = {
+ if (n <= 0) empty
+ else if (n >= size) this
+ else new TreeMap(RB.take(tree, n))
+ }
+
+ override def slice(from: Int, until: Int) = {
+ if (until <= from) empty
+ else if (from <= 0) take(until)
+ else if (until >= size) drop(from)
+ else new TreeMap(RB.slice(tree, from, until))
+ }
+
+ override def dropRight(n: Int) = take(size - n)
+ override def takeRight(n: Int) = drop(size - n)
+ override def splitAt(n: Int) = (take(n), drop(n))
+
+ private[this] def countWhile(p: ((A, B)) => Boolean): Int = {
+ var result = 0
+ val it = iterator
+ while (it.hasNext && p(it.next)) result += 1
+ result
+ }
+ override def dropWhile(p: ((A, B)) => Boolean) = drop(countWhile(p))
+ override def takeWhile(p: ((A, B)) => Boolean) = take(countWhile(p))
+ override def span(p: ((A, B)) => Boolean) = splitAt(countWhile(p))
+
/** A factory to create empty maps of the same type of keys.
*/
override def empty: TreeMap[A, B] = TreeMap.empty[A, B](ordering)
@@ -84,10 +131,7 @@ class TreeMap[A, +B](override val size: Int, t: RedBlack[A]#Tree[B])(implicit va
* @param value the value to be associated with `key`
* @return a new $coll with the updated binding
*/
- override def updated [B1 >: B](key: A, value: B1): TreeMap[A, B1] = {
- val newsize = if (tree.lookup(key).isEmpty) size + 1 else size
- TreeMap.make(newsize, tree.update(key, value))
- }
+ override def updated [B1 >: B](key: A, value: B1): TreeMap[A, B1] = new TreeMap(RB.update(tree, key, value, true))
/** Add a key/value pair to this map.
* @tparam B1 type of the value of the new binding, a supertype of `B`
@@ -117,8 +161,6 @@ class TreeMap[A, +B](override val size: Int, t: RedBlack[A]#Tree[B])(implicit va
override def ++[B1 >: B] (xs: GenTraversableOnce[(A, B1)]): TreeMap[A, B1] =
((repr: TreeMap[A, B1]) /: xs.seq) (_ + _)
- @bridge def ++[B1 >: B] (xs: TraversableOnce[(A, B1)]): TreeMap[A, B1] = ++(xs: GenTraversableOnce[(A, B1)])
-
/** A new TreeMap with the entry added is returned,
* assuming that key is <em>not</em> in the TreeMap.
*
@@ -128,14 +170,13 @@ class TreeMap[A, +B](override val size: Int, t: RedBlack[A]#Tree[B])(implicit va
* @return a new $coll with the inserted binding, if it wasn't present in the map
*/
def insert [B1 >: B](key: A, value: B1): TreeMap[A, B1] = {
- assert(tree.lookup(key).isEmpty)
- TreeMap.make(size + 1, tree.update(key, value))
+ assert(!RB.contains(tree, key))
+ new TreeMap(RB.update(tree, key, value, true))
}
def - (key:A): TreeMap[A, B] =
- if (tree.lookup(key).isEmpty) this
- else if (size == 1) empty
- else TreeMap.make(size - 1, tree.delete(key))
+ if (!RB.contains(tree, key)) this
+ else new TreeMap(RB.delete(tree, key))
/** Check if this map maps `key` to a value and return the
* value if it exists.
@@ -143,21 +184,22 @@ class TreeMap[A, +B](override val size: Int, t: RedBlack[A]#Tree[B])(implicit va
* @param key the key of the mapping of interest
* @return the value of the mapping, if it exists
*/
- override def get(key: A): Option[B] = tree.lookup(key) match {
- case n: NonEmpty[b] => Some(n.value)
- case _ => None
- }
+ override def get(key: A): Option[B] = RB.get(tree, key)
/** Creates a new iterator over all elements contained in this
* object.
*
* @return the new iterator
*/
- def iterator: Iterator[(A, B)] = tree.toStream.iterator
+ override def iterator: Iterator[(A, B)] = RB.iterator(tree)
+
+ override def keysIterator: Iterator[A] = RB.keysIterator(tree)
+ override def valuesIterator: Iterator[B] = RB.valuesIterator(tree)
- override def toStream: Stream[(A, B)] = tree.toStream
+ override def contains(key: A): Boolean = RB.contains(tree, key)
+ override def isDefinedAt(key: A): Boolean = RB.contains(tree, key)
- override def foreach[U](f : ((A,B)) => U) = tree foreach { case (x, y) => f(x, y) }
+ override def foreach[U](f : ((A,B)) => U) = RB.foreach(tree, f)
}
diff --git a/src/library/scala/collection/immutable/TreeSet.scala b/src/library/scala/collection/immutable/TreeSet.scala
index 8b90ece..4947765 100644
--- a/src/library/scala/collection/immutable/TreeSet.scala
+++ b/src/library/scala/collection/immutable/TreeSet.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -12,10 +12,11 @@ package scala.collection
package immutable
import generic._
+import immutable.{RedBlackTree => RB}
import mutable.{ Builder, SetBuilder }
/** $factoryInfo
- * @define Coll immutable.TreeSet
+ * @define Coll `immutable.TreeSet`
* @define coll immutable tree set
*/
object TreeSet extends ImmutableSortedSetFactory[TreeSet] {
@@ -39,27 +40,68 @@ object TreeSet extends ImmutableSortedSetFactory[TreeSet] {
* @see [[http://docs.scala-lang.org/overviews/collections/concrete-immutable-collection-classes.html#redblack_trees "Scala's Collection Library overview"]]
* section on `Red-Black Trees` for more information.
*
- * @define Coll immutable.TreeSet
+ * @define Coll `immutable.TreeSet`
* @define coll immutable tree set
* @define orderDependent
* @define orderDependentFold
* @define mayNotTerminateInf
* @define willNotTerminateInf
*/
- at SerialVersionUID(-234066569443569402L)
-class TreeSet[A](override val size: Int, t: RedBlack[A]#Tree[Unit])
- (implicit val ordering: Ordering[A])
- extends RedBlack[A] with SortedSet[A] with SortedSetLike[A, TreeSet[A]] with Serializable {
+ at SerialVersionUID(-5685982407650748405L)
+class TreeSet[A] private (tree: RB.Tree[A, Unit])(implicit val ordering: Ordering[A])
+ extends SortedSet[A] with SortedSetLike[A, TreeSet[A]] with Serializable {
override def stringPrefix = "TreeSet"
- def isSmaller(x: A, y: A) = compare(x,y) < 0
+ override def size = RB.count(tree)
+
+ override def head = RB.smallest(tree).key
+ override def headOption = if (RB.isEmpty(tree)) None else Some(head)
+ override def last = RB.greatest(tree).key
+ override def lastOption = if (RB.isEmpty(tree)) None else Some(last)
+
+ override def tail = new TreeSet(RB.delete(tree, firstKey))
+ override def init = new TreeSet(RB.delete(tree, lastKey))
+
+ override def drop(n: Int) = {
+ if (n <= 0) this
+ else if (n >= size) empty
+ else newSet(RB.drop(tree, n))
+ }
- def this()(implicit ordering: Ordering[A]) = this(0, null)(ordering)
+ override def take(n: Int) = {
+ if (n <= 0) empty
+ else if (n >= size) this
+ else newSet(RB.take(tree, n))
+ }
- protected val tree: RedBlack[A]#Tree[Unit] = if (size == 0) Empty else t
+ override def slice(from: Int, until: Int) = {
+ if (until <= from) empty
+ else if (from <= 0) take(until)
+ else if (until >= size) drop(from)
+ else newSet(RB.slice(tree, from, until))
+ }
- private def newSet(s: Int, t: RedBlack[A]#Tree[Unit]) = new TreeSet[A](s, t)
+ override def dropRight(n: Int) = take(size - n)
+ override def takeRight(n: Int) = drop(size - n)
+ override def splitAt(n: Int) = (take(n), drop(n))
+
+ private[this] def countWhile(p: A => Boolean): Int = {
+ var result = 0
+ val it = iterator
+ while (it.hasNext && p(it.next)) result += 1
+ result
+ }
+ override def dropWhile(p: A => Boolean) = drop(countWhile(p))
+ override def takeWhile(p: A => Boolean) = take(countWhile(p))
+ override def span(p: A => Boolean) = splitAt(countWhile(p))
+
+ @deprecated("use `ordering.lt` instead", "2.10.0")
+ def isSmaller(x: A, y: A) = compare(x,y) < 0
+
+ def this()(implicit ordering: Ordering[A]) = this(null)(ordering)
+
+ private def newSet(t: RB.Tree[A, Unit]) = new TreeSet[A](t)
/** A factory to create empty sets of the same type of keys.
*/
@@ -70,10 +112,7 @@ class TreeSet[A](override val size: Int, t: RedBlack[A]#Tree[Unit])
* @param elem a new element to add.
* @return a new $coll containing `elem` and all the elements of this $coll.
*/
- def + (elem: A): TreeSet[A] = {
- val newsize = if (tree.lookup(elem).isEmpty) size + 1 else size
- newSet(newsize, tree.update(elem, ()))
- }
+ def + (elem: A): TreeSet[A] = newSet(RB.update(tree, elem, (), false))
/** A new `TreeSet` with the entry added is returned,
* assuming that elem is <em>not</em> in the TreeSet.
@@ -82,8 +121,8 @@ class TreeSet[A](override val size: Int, t: RedBlack[A]#Tree[Unit])
* @return a new $coll containing `elem` and all the elements of this $coll.
*/
def insert(elem: A): TreeSet[A] = {
- assert(tree.lookup(elem).isEmpty)
- newSet(size + 1, tree.update(elem, ()))
+ assert(!RB.contains(tree, elem))
+ newSet(RB.update(tree, elem, (), false))
}
/** Creates a new `TreeSet` with the entry removed.
@@ -92,31 +131,31 @@ class TreeSet[A](override val size: Int, t: RedBlack[A]#Tree[Unit])
* @return a new $coll containing all the elements of this $coll except `elem`.
*/
def - (elem:A): TreeSet[A] =
- if (tree.lookup(elem).isEmpty) this
- else newSet(size - 1, tree delete elem)
+ if (!RB.contains(tree, elem)) this
+ else newSet(RB.delete(tree, elem))
/** Checks if this set contains element `elem`.
*
* @param elem the element to check for membership.
* @return true, iff `elem` is contained in this set.
*/
- def contains(elem: A): Boolean = !tree.lookup(elem).isEmpty
+ def contains(elem: A): Boolean = RB.contains(tree, elem)
/** Creates a new iterator over all elements contained in this
* object.
*
* @return the new iterator
*/
- def iterator: Iterator[A] = tree.toStream.iterator map (_._1)
+ def iterator: Iterator[A] = RB.keysIterator(tree)
- override def toStream: Stream[A] = tree.toStream map (_._1)
+ override def foreach[U](f: A => U) = RB.foreachKey(tree, f)
- override def foreach[U](f: A => U) = tree foreach { (x, y) => f(x) }
+ override def rangeImpl(from: Option[A], until: Option[A]): TreeSet[A] = newSet(RB.rangeImpl(tree, from, until))
+ override def range(from: A, until: A): TreeSet[A] = newSet(RB.range(tree, from, until))
+ override def from(from: A): TreeSet[A] = newSet(RB.from(tree, from))
+ override def to(to: A): TreeSet[A] = newSet(RB.to(tree, to))
+ override def until(until: A): TreeSet[A] = newSet(RB.until(tree, until))
- override def rangeImpl(from: Option[A], until: Option[A]): TreeSet[A] = {
- val tree = this.tree.range(from, until)
- newSet(tree.count, tree)
- }
- override def firstKey = tree.first
- override def lastKey = tree.last
+ override def firstKey = head
+ override def lastKey = last
}
diff --git a/src/library/scala/collection/immutable/TrieIterator.scala b/src/library/scala/collection/immutable/TrieIterator.scala
index c2cd0b2..b0bd253 100644
--- a/src/library/scala/collection/immutable/TrieIterator.scala
+++ b/src/library/scala/collection/immutable/TrieIterator.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -11,13 +11,13 @@ package immutable
import HashMap.{ HashTrieMap, HashMapCollision1, HashMap1 }
import HashSet.{ HashTrieSet, HashSetCollision1, HashSet1 }
-import annotation.unchecked.{ uncheckedVariance => uV }
+import scala.annotation.unchecked.{ uncheckedVariance => uV }
import scala.annotation.tailrec
/** Abandons any pretense of type safety for speed. You can't say I
* didn't try: see r23934.
*/
-private[collection] abstract class TrieIterator[+T](elems: Array[Iterable[T]]) extends Iterator[T] {
+private[collection] abstract class TrieIterator[+T](elems: Array[Iterable[T]]) extends AbstractIterator[T] {
outer =>
private[immutable] def getElem(x: AnyRef): T
@@ -46,7 +46,7 @@ private[collection] abstract class TrieIterator[+T](elems: Array[Iterable[T]]) e
case x: HashSetCollision1[_] => x.ks.map(x => HashSet(x)).toArray
}).asInstanceOf[Array[Iterable[T]]]
- private type SplitIterators = ((Iterator[T], Int), Iterator[T])
+ private[this] type SplitIterators = ((Iterator[T], Int), Iterator[T])
private def isTrie(x: AnyRef) = x match {
case _: HashTrieMap[_,_] | _: HashTrieSet[_] => true
@@ -75,7 +75,7 @@ private[collection] abstract class TrieIterator[+T](elems: Array[Iterable[T]]) e
}
private[this] def iteratorWithSize(arr: Array[Iterable[T]]): (Iterator[T], Int) =
- (newIterator(arr), arr map (_.size) sum)
+ (newIterator(arr), arr.map(_.size).sum)
private[this] def arrayToIterators(arr: Array[Iterable[T]]): SplitIterators = {
val (fst, snd) = arr.splitAt(arr.length / 2)
@@ -92,7 +92,7 @@ private[collection] abstract class TrieIterator[+T](elems: Array[Iterable[T]]) e
}
def hasNext = (subIter ne null) || depth >= 0
- def next: T = {
+ def next(): T = {
if (subIter ne null) {
val el = subIter.next
if (!subIter.hasNext)
@@ -216,4 +216,4 @@ private[collection] abstract class TrieIterator[+T](elems: Array[Iterable[T]]) e
}
}
}
-}
\ No newline at end of file
+}
diff --git a/src/library/scala/collection/immutable/Vector.scala b/src/library/scala/collection/immutable/Vector.scala
index 1639a3d..bcce4a9 100644
--- a/src/library/scala/collection/immutable/Vector.scala
+++ b/src/library/scala/collection/immutable/Vector.scala
@@ -1,18 +1,17 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-
-package scala.collection
+package scala
+package collection
package immutable
import scala.annotation.unchecked.uncheckedVariance
-import compat.Platform
-
+import scala.compat.Platform
import scala.collection.generic._
import scala.collection.mutable.Builder
import scala.collection.parallel.immutable.ParVector
@@ -20,17 +19,22 @@ import scala.collection.parallel.immutable.ParVector
/** Companion object to the Vector class
*/
object Vector extends SeqFactory[Vector] {
- private[immutable] val BF = new GenericCanBuildFrom[Nothing] {
+ // left lying around for binary compatibility check
+ private[collection] class VectorReusableCBF extends GenericCanBuildFrom[Nothing] {
override def apply() = newBuilder[Nothing]
- }
- @inline implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, Vector[A]] =
- BF.asInstanceOf[CanBuildFrom[Coll, A, Vector[A]]]
+ }
+ // left lying around for binary compatibility check
+ private val VectorReusableCBF: GenericCanBuildFrom[Nothing] = new VectorReusableCBF
+
+ override lazy val ReusableCBF =
+ scala.collection.IndexedSeq.ReusableCBF.asInstanceOf[GenericCanBuildFrom[Nothing]]
def newBuilder[A]: Builder[A, Vector[A]] = new VectorBuilder[A]
+ implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, Vector[A]] =
+ ReusableCBF.asInstanceOf[GenericCanBuildFrom[A]]
private[immutable] val NIL = new Vector[Nothing](0, 0, 0)
- @inline override def empty[A]: Vector[A] = NIL
+ override def empty[A]: Vector[A] = NIL
}
-
// in principle, most members should be private. however, access privileges must
// be carefully chosen to not prevent method inlining
@@ -46,7 +50,7 @@ object Vector extends SeqFactory[Vector] {
*
* @tparam A the element type
*
- * @define Coll Vector
+ * @define Coll `Vector`
* @define coll vector
* @define thatinfo the class of the returned collection. In the standard library configuration,
* `That` is always `Vector[B]` because an implicit of type `CanBuildFrom[Vector, B, That]`
@@ -61,7 +65,8 @@ object Vector extends SeqFactory[Vector] {
* @define willNotTerminateInf
*/
final class Vector[+A](private[collection] val startIndex: Int, private[collection] val endIndex: Int, focus: Int)
-extends IndexedSeq[A]
+extends AbstractSeq[A]
+ with IndexedSeq[A]
with GenericTraversableTemplate[A, Vector]
with IndexedSeqLike[A, Vector[A]]
with VectorPointer[A @uncheckedVariance]
@@ -82,6 +87,8 @@ override def companion: GenericCompanion[Vector] = Vector
override def par = new ParVector(this)
+ override def toVector: Vector[A] = this
+
override def lengthCompare(len: Int): Int = length - len
private[collection] final def initIterator[B >: A](s: VectorIterator[B]) {
@@ -90,7 +97,7 @@ override def companion: GenericCompanion[Vector] = Vector
if (s.depth > 1) s.gotoPos(startIndex, startIndex ^ focus)
}
- @inline override def iterator: VectorIterator[A] = {
+ override def iterator: VectorIterator[A] = {
val s = new VectorIterator[A](startIndex, endIndex)
initIterator(s)
s
@@ -99,10 +106,10 @@ override def companion: GenericCompanion[Vector] = Vector
// can still be improved
override /*SeqLike*/
- def reverseIterator: Iterator[A] = new Iterator[A] {
+ def reverseIterator: Iterator[A] = new AbstractIterator[A] {
private var i = self.length
def hasNext: Boolean = 0 < i
- def next: A =
+ def next(): A =
if (0 < i) {
i -= 1
self(i)
@@ -116,16 +123,6 @@ override def companion: GenericCompanion[Vector] = Vector
// In principle, escape analysis could even remove the iterator/builder allocations and do it
// with local variables exclusively. But we're not quite there yet ...
- @deprecated("this method is experimental and will be removed in a future release", "2.8.0")
- @inline def foreachFast[U](f: A => U): Unit = iterator.foreachFast(f)
- @deprecated("this method is experimental and will be removed in a future release", "2.8.0")
- @inline def mapFast[B, That](f: A => B)(implicit bf: CanBuildFrom[Vector[A], B, That]): That = {
- val b = bf(repr)
- foreachFast(x => b += f(x))
- b.result
- }
-
-
def apply(index: Int): A = {
val idx = checkRangeConvert(index)
//println("get elem: "+index + "/"+idx + "(focus:" +focus+" xor:"+(idx^focus)+" depth:"+depth+")")
@@ -143,20 +140,17 @@ override def companion: GenericCompanion[Vector] = Vector
// SeqLike api
- @inline override def updated[B >: A, That](index: Int, elem: B)(implicit bf: CanBuildFrom[Vector[A], B, That]): That = {
- // just ignore bf
- updateAt(index, elem).asInstanceOf[That]
- }
+ override def updated[B >: A, That](index: Int, elem: B)(implicit bf: CanBuildFrom[Vector[A], B, That]): That =
+ if (bf eq IndexedSeq.ReusableCBF) updateAt(index, elem).asInstanceOf[That] // just ignore bf
+ else super.updated(index, elem)(bf)
- @inline override def +:[B >: A, That](elem: B)(implicit bf: CanBuildFrom[Vector[A], B, That]): That = {
- // just ignore bf
- appendFront(elem).asInstanceOf[That]
- }
+ override def +:[B >: A, That](elem: B)(implicit bf: CanBuildFrom[Vector[A], B, That]): That =
+ if (bf eq IndexedSeq.ReusableCBF) appendFront(elem).asInstanceOf[That] // just ignore bf
+ else super.+:(elem)(bf)
- @inline override def :+[B >: A, That](elem: B)(implicit bf: CanBuildFrom[Vector[A], B, That]): That = {
- // just ignore bf
- appendBack(elem).asInstanceOf[That]
- }
+ override def :+[B >: A, That](elem: B)(implicit bf: CanBuildFrom[Vector[A], B, That]): That =
+ if (bf eq IndexedSeq.ReusableCBF) appendBack(elem).asInstanceOf[That] // just ignore bf
+ else super.:+(elem)(bf)
override def take(n: Int): Vector[A] = {
if (n <= 0)
@@ -645,7 +639,10 @@ override def companion: GenericCompanion[Vector] = Vector
}
-class VectorIterator[+A](_startIndex: Int, _endIndex: Int) extends Iterator[A] with VectorPointer[A @uncheckedVariance] {
+class VectorIterator[+A](_startIndex: Int, _endIndex: Int)
+extends AbstractIterator[A]
+ with Iterator[A]
+ with VectorPointer[A @uncheckedVariance] {
private var blockIndex: Int = _startIndex & ~31
private var lo: Int = _startIndex & 31
@@ -689,9 +686,6 @@ class VectorIterator[+A](_startIndex: Int, _endIndex: Int) extends Iterator[A] w
v.initFrom(this)
v
}
-
- @deprecated("this method is experimental and will be removed in a future release", "2.8.0")
- @inline def foreachFast[U](f: A => U) { while (hasNext) f(next()) }
}
diff --git a/src/library/scala/collection/immutable/WrappedString.scala b/src/library/scala/collection/immutable/WrappedString.scala
index 512944f..edcab31 100644
--- a/src/library/scala/collection/immutable/WrappedString.scala
+++ b/src/library/scala/collection/immutable/WrappedString.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -13,7 +13,6 @@ package immutable
import generic._
import mutable.{Builder, StringBuilder}
-import scala.util.matching.Regex
/**
* This class serves as a wrapper augmenting `String`s with all the operations
@@ -26,10 +25,10 @@ import scala.util.matching.Regex
* @param self a string contained within this wrapped string
*
* @since 2.8
- * @define Coll WrappedString
+ * @define Coll `WrappedString`
* @define coll wrapped string
*/
-class WrappedString(val self: String) extends IndexedSeq[Char] with StringLike[WrappedString] {
+class WrappedString(val self: String) extends AbstractSeq[Char] with IndexedSeq[Char] with StringLike[WrappedString] {
override protected[this] def thisCollection: WrappedString = this
override protected[this] def toCollection(repr: WrappedString): WrappedString = repr
diff --git a/src/library/scala/collection/immutable/package.scala b/src/library/scala/collection/immutable/package.scala
index 5ff9fa2..ed0c1b3 100644
--- a/src/library/scala/collection/immutable/package.scala
+++ b/src/library/scala/collection/immutable/package.scala
@@ -1,17 +1,21 @@
-package scala.collection
-
-
-
-
-
-
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+package scala.collection
-
-package object immutable {
-
+package immutable {
+ /** It looks like once upon a time this was used by ParRange, but
+ * since December 2010 in r23721 it is not used by anything. We
+ * should not have public API traits with seductive names like
+ * "RangeUtils" which are neither documented nor used.
+ */
+ @deprecated("this class will be removed", "2.10.0")
trait RangeUtils[+Repr <: RangeUtils[Repr]] {
-
def start: Int
def end: Int
def step: Int
@@ -23,16 +27,17 @@ package object immutable {
(size / step.toLong * step.toLong + start.toLong).toInt
}
- final def _last: Int = if (!inclusive) {
- if (step == 1 || step == -1) end - step
- else {
- val inclast = inclusiveLast
- if ((end.toLong - start.toLong) % step == 0) inclast - step else inclast
+ final def _last: Int = (
+ if (!inclusive) {
+ if (step == 1 || step == -1) end - step
+ else {
+ val inclast = inclusiveLast
+ if ((end.toLong - start.toLong) % step == 0) inclast - step else inclast
+ }
}
- } else {
- if (step == 1 || step == -1) end
+ else if (step == 1 || step == -1) end
else inclusiveLast
- }
+ )
final def _foreach[U](f: Int => U) = if (_length > 0) {
var i = start
@@ -43,39 +48,46 @@ package object immutable {
}
}
- final def _length: Int = if (!inclusive) {
- if (end > start == step > 0 && start != end) {
- (_last.toLong - start.toLong) / step.toLong + 1
- } else 0
- }.toInt else {
- if (end > start == step > 0 || start == end) {
- (_last.toLong - start.toLong) / step.toLong + 1
- } else 0
- }.toInt
+ final def _length: Int = (
+ if (!inclusive) {
+ if (end > start == step > 0 && start != end) {
+ (_last.toLong - start.toLong) / step.toLong + 1
+ } else 0
+ }.toInt
+ else {
+ if (end > start == step > 0 || start == end) {
+ (_last.toLong - start.toLong) / step.toLong + 1
+ } else 0
+ }.toInt
+ )
final def _apply(idx: Int): Int = {
if (idx < 0 || idx >= _length) throw new IndexOutOfBoundsException(idx.toString)
start + idx * step
}
- private def locationAfterN(n: Int) = if (n > 0) {
- if (step > 0) ((start.toLong + step.toLong * n.toLong) min _last.toLong).toInt
- else ((start.toLong + step.toLong * n.toLong) max _last.toLong).toInt
- } else start
-
- final def _take(n: Int) = if (n > 0 && _length > 0) {
- create(start, locationAfterN(n), step, true)
- } else create(start, start, step, false)
+ private def locationAfterN(n: Int) = (
+ if (n > 0) {
+ if (step > 0)
+ scala.math.min(start.toLong + step.toLong * n.toLong, _last.toLong).toInt
+ else
+ scala.math.max(start.toLong + step.toLong * n.toLong, _last.toLong).toInt
+ }
+ else start
+ )
- final def _drop(n: Int) = create(locationAfterN(n), end, step, inclusive)
+ final def _take(n: Int) = (
+ if (n > 0 && _length > 0)
+ create(start, locationAfterN(n), step, true)
+ else
+ create(start, start, step, false)
+ )
+ final def _drop(n: Int) = create(locationAfterN(n), end, step, inclusive)
final def _slice(from: Int, until: Int) = _drop(from)._take(until - from)
-
}
-
}
-
-
-
-
+package object immutable {
+ /** Nothing left after I promoted RangeUtils to the package. */
+}
diff --git a/src/library/scala/collection/interfaces/IterableMethods.scala b/src/library/scala/collection/interfaces/IterableMethods.scala
deleted file mode 100644
index f6941c8..0000000
--- a/src/library/scala/collection/interfaces/IterableMethods.scala
+++ /dev/null
@@ -1,39 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-package scala.collection
-package interfaces
-
-import generic._
-import mutable.Buffer
-import scala.reflect.ClassManifest
-import annotation.unchecked.uncheckedVariance
-
-/**
- * @since 2.8
- */
-trait IterableMethods[+A, +This <: IterableLike[A, This] with Iterable[A]] extends TraversableMethods[A, This] {
- self: Iterable[A] =>
-
- // abstract
- def iterator: Iterator[A]
-
- // concrete
- def dropRight(n: Int): Iterable[A]
- def grouped(size: Int): Iterator[Iterable[A]]
- def sameElements[B >: A](that: GenIterable[B]): Boolean
- def sliding[B >: A](size: Int): Iterator[Iterable[A]]
- def sliding[B >: A](size: Int, step: Int): Iterator[Iterable[A]]
- def takeRight(n: Int): Iterable[A]
- def zipAll[B, A1 >: A, That](that: GenIterable[B], e1: A1, e2: B)(implicit bf: CanBuildFrom[This, (A1, B), That]): That
- def zipWithIndex[A1 >: A, That](implicit bf: CanBuildFrom[This, (A1, Int), That]): That
- def zip[A1 >: A, B, That](that: GenIterable[B])(implicit bf: CanBuildFrom[This, (A1, B), That]): That
-
- override def view: IterableView[A, This]
- override def view(from: Int, until: Int): IterableView[A, This]
-}
diff --git a/src/library/scala/collection/interfaces/MapMethods.scala b/src/library/scala/collection/interfaces/MapMethods.scala
deleted file mode 100644
index bc38ccd..0000000
--- a/src/library/scala/collection/interfaces/MapMethods.scala
+++ /dev/null
@@ -1,45 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-package scala.collection
-package interfaces
-
-import generic._
-
-/**
- * @since 2.8
- */
-trait MapMethods[A, +B, +This <: MapLike[A, B, This] with Map[A, B]]
- extends IterableMethods[(A, B), This]
- with SubtractableMethods[A, This] {
- self: Map[A, B] =>
-
- // abstract
- def empty: This
- def get(key: A): Option[B]
- def iterator: Iterator[(A, B)]
- def + [B1 >: B] (kv: (A, B1)): Map[A, B1]
- def - (key: A): This
-
- // concrete
- def getOrElse[B1 >: B](key: A, default: => B1): B1
- def apply(key: A): B
- def contains(key: A): Boolean
- def isDefinedAt(key: A): Boolean
- def keys: Iterable[A]
- def keysIterator: Iterator[A]
- def keySet: Set[A]
- def values: Iterable[B]
- def valuesIterator: Iterator[B]
- def default(key: A): B
- def filterKeys(p: A => Boolean): Map[A, B]
- def mapValues[C](f: B => C): Map[A, C]
- def updated [B1 >: B](key: A, value: B1): Map[A, B1]
- def + [B1 >: B] (elem1: (A, B1), elem2: (A, B1), elems: (A, B1) *): Map[A, B1]
- def ++[B1 >: B](xs: GenTraversableOnce[(A, B1)]): Map[A, B1]
-}
diff --git a/src/library/scala/collection/interfaces/SeqMethods.scala b/src/library/scala/collection/interfaces/SeqMethods.scala
deleted file mode 100644
index 1f5b08d..0000000
--- a/src/library/scala/collection/interfaces/SeqMethods.scala
+++ /dev/null
@@ -1,71 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-package scala.collection
-package interfaces
-
-import generic._
-import mutable.Buffer
-import scala.reflect.ClassManifest
-
-/**
- * @since 2.8
- */
-trait SeqMethods[+A, +This <: SeqLike[A, This] with Seq[A]] extends IterableMethods[A, This] {
- self: Seq[A] =>
-
- // abstract
- def apply(idx: Int): A
- def length: Int
-
- // concrete
- def +:[B >: A, That](elem: B)(implicit bf: CanBuildFrom[This, B, That]): That
- def :+[B >: A, That](elem: B)(implicit bf: CanBuildFrom[This, B, That]): That
- def combinations(n: Int): Iterator[This]
- def contains(elem: Any): Boolean
- def containsSlice[B](that: Seq[B]): Boolean
- def corresponds[B](that: Seq[B])(p: (A,B) => Boolean): Boolean
- def diff[B >: A, That](that: Seq[B]): This
- def distinct: This
- def endsWith[B](that: Seq[B]): Boolean
- def indexOfSlice[B >: A](that: Seq[B]): Int
- def indexOfSlice[B >: A](that: Seq[B], fromIndex: Int): Int
- def indexOf[B >: A](elem: B): Int
- def indexOf[B >: A](elem: B, from: Int): Int
- def indexWhere(p: A => Boolean): Int
- def indexWhere(p: A => Boolean, from: Int): Int
- def indices: Range
- def intersect[B >: A, That](that: Seq[B]): This
- def isDefinedAt(x: Int): Boolean
- def lastIndexOfSlice[B >: A](that: Seq[B]): Int
- def lastIndexOfSlice[B >: A](that: Seq[B], fromIndex: Int): Int
- def lastIndexOf[B >: A](elem: B): Int
- def lastIndexOf[B >: A](elem: B, end: Int): Int
- def lastIndexWhere(p: A => Boolean): Int
- def lastIndexWhere(p: A => Boolean, end: Int): Int
- def lengthCompare(len: Int): Int
- def padTo[B >: A, That](len: Int, elem: B)(implicit bf: CanBuildFrom[This, B, That]): That
- def patch[B >: A, That](from: Int, patch: GenSeq[B], replaced: Int)(implicit bf: CanBuildFrom[This, B, That]): That
- def permutations: Iterator[This]
- def prefixLength(p: A => Boolean): Int
- def reverse: This
- def reverseIterator: Iterator[A]
- def reverseMap[B, That](f: A => B)(implicit bf: CanBuildFrom[This, B, That]): That
- def segmentLength(p: A => Boolean, from: Int): Int
- def sortBy[B](f: A => B)(implicit ord: Ordering[B]): This
- def sortWith(lt: (A, A) => Boolean): This
- def sorted[B >: A](implicit ord: Ordering[B]): This
- def startsWith[B](that: Seq[B]): Boolean
- def startsWith[B](that: Seq[B], offset: Int): Boolean
- def union[B >: A, That](that: Seq[B])(implicit bf: CanBuildFrom[This, B, That]): That
- def updated[B >: A, That](index: Int, elem: B)(implicit bf: CanBuildFrom[This, B, That]): That
-
- // refinements
- def view: SeqView[A, This]
- def view(from: Int, until: Int): SeqView[A, This]
-}
diff --git a/src/library/scala/collection/interfaces/SetMethods.scala b/src/library/scala/collection/interfaces/SetMethods.scala
deleted file mode 100644
index ffe141e..0000000
--- a/src/library/scala/collection/interfaces/SetMethods.scala
+++ /dev/null
@@ -1,52 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-package scala.collection
-package interfaces
-
-import generic._
-import mutable.Buffer
-import scala.reflect.ClassManifest
-import annotation.unchecked.uncheckedVariance
-
-/**
- * @since 2.8
- */
-trait SubtractableMethods[A, +This <: Subtractable[A, This]] {
- def -(elem: A): This
- def -(elem1: A, elem2: A, elems: A*): This
- def --(xs: TraversableOnce[A]): This
-}
-
-/**
- * @since 2.8
- */
-trait SetMethods[A, +This <: SetLike[A, This] with Set[A]]
- extends IterableMethods[A, This]
- with SubtractableMethods[A, This] {
- self: Set[A] =>
-
- // abstract
- def empty: This
- def contains(elem: A): Boolean
- def + (elem: A): This
- def - (elem: A): This
-
- // concrete
- def & (that: Set[A]): This
- def &~ (that: Set[A]): This
- def + (elem1: A, elem2: A, elems: A*): This
- def apply(elem: A): Boolean
- def diff(that: Set[A]): This
- def intersect(that: Set[A]): This
- def subsetOf(that: Set[A]): Boolean
- def subsets(len: Int): Iterator[This]
- def subsets: Iterator[This]
- def union(that: Set[A]): This
- def | (that: Set[A]): This
-}
diff --git a/src/library/scala/collection/interfaces/TraversableMethods.scala b/src/library/scala/collection/interfaces/TraversableMethods.scala
deleted file mode 100644
index 8aba390..0000000
--- a/src/library/scala/collection/interfaces/TraversableMethods.scala
+++ /dev/null
@@ -1,63 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-package scala.collection
-package interfaces
-
-import generic._
-
-/**
- * @since 2.8
- */
-trait TraversableMethods[+A, +This <: TraversableLike[A, This]] extends TraversableOnceMethods[A] {
- self: Traversable[A] =>
-
- // maps/iteration
- def flatMap[B, That](f: A => GenTraversableOnce[B])(implicit bf: CanBuildFrom[This, B, That]): That
- def map[B, That](f: A => B)(implicit bf: CanBuildFrom[This, B, That]): That
- def collect[B, That](pf: PartialFunction[A, B])(implicit bf: CanBuildFrom[This, B, That]): That
- def scanLeft[B, That](z: B)(op: (B, A) => B)(implicit bf: CanBuildFrom[This, B, That]): That
- def scanRight[B, That](z: B)(op: (A, B) => B)(implicit bf: CanBuildFrom[This, B, That]): That
-
- // new collections
- def ++:[B >: A, That](that: TraversableOnce[B])(implicit bf: CanBuildFrom[This, B, That]): That
- def ++[B >: A, That](that: GenTraversableOnce[B])(implicit bf: CanBuildFrom[This, B, That]): That
-
- // element retrieval
- def head: A
- def headOption: Option[A]
- def last: A
- def lastOption: Option[A]
-
- // subcollections
- def drop(n: Int): Traversable[A]
- def dropWhile(p: A => Boolean): Traversable[A]
- def filter(p: A => Boolean): Traversable[A]
- def filterNot(p: A => Boolean): Traversable[A]
- def init: Traversable[A]
- def inits: Iterator[This]
- def slice(from: Int, until: Int): Traversable[A]
- def tail: Traversable[A]
- def tails: Iterator[This]
- def take(n: Int): Traversable[A]
- def takeWhile(p: A => Boolean): Traversable[A]
- def withFilter(p: A => Boolean): FilterMonadic[A, Traversable[A]]
-
- // subdivisions
- def groupBy[K](f: A => K): Map[K, Traversable[A]]
- def partition(p: A => Boolean): (Traversable[A], Traversable[A])
- def span(p: A => Boolean): (Traversable[A], Traversable[A])
- def splitAt(n: Int): (Traversable[A], Traversable[A])
-
- // info
- def stringPrefix: String
-
- // views
- def view: TraversableView[A, This]
- def view(from: Int, until: Int): TraversableView[A, This]
-}
diff --git a/src/library/scala/collection/interfaces/TraversableOnceMethods.scala b/src/library/scala/collection/interfaces/TraversableOnceMethods.scala
deleted file mode 100644
index 5e1325f..0000000
--- a/src/library/scala/collection/interfaces/TraversableOnceMethods.scala
+++ /dev/null
@@ -1,77 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-package scala.collection
-package interfaces
-
-trait TraversableOnceMethods[+A] {
- self: TraversableOnce[A] =>
-
- def foreach[U](f: A => U): Unit
- def size: Int
- protected[this] def reversed: TraversableOnce[A]
-
- // tests
- def hasDefiniteSize: Boolean
- def isEmpty: Boolean
- def isTraversableAgain: Boolean
- def nonEmpty: Boolean
-
- // applying a predicate
- def collectFirst[B](pf: PartialFunction[A, B]): Option[B]
- def count(p: A => Boolean): Int
- def exists(p: A => Boolean): Boolean
- def find(p: A => Boolean): Option[A]
- def forall(p: A => Boolean): Boolean
-
- // folds
- def /:[B](z: B)(op: (B, A) => B): B
- def :\[B](z: B)(op: (A, B) => B): B
- def foldLeft[B](z: B)(op: (B, A) => B): B
- def foldRight[B](z: B)(op: (A, B) => B): B
- def reduceLeftOption[B >: A](op: (B, A) => B): Option[B]
- def reduceLeft[B >: A](op: (B, A) => B): B
- def reduceRightOption[B >: A](op: (A, B) => B): Option[B]
- def reduceRight[B >: A](op: (A, B) => B): B
-
- // copies
- def copyToArray[B >: A](xs: Array[B]): Unit
- def copyToArray[B >: A](xs: Array[B], start: Int): Unit
- def copyToArray[B >: A](xs: Array[B], start: Int, len: Int): Unit
- def copyToBuffer[B >: A](dest: mutable.Buffer[B]): Unit
-
- // conversions
- def toArray[B >: A : ClassManifest]: Array[B]
- def toBuffer[B >: A]: mutable.Buffer[B]
- def toIndexedSeq[B >: A]: immutable.IndexedSeq[B]
- def toIterable: Iterable[A]
- def toIterator: Iterator[A]
- def toList: List[A]
- def toMap[T, U](implicit ev: A <:< (T, U)): immutable.Map[T, U]
- def toSeq: Seq[A]
- def toSet[B >: A]: immutable.Set[B]
- def toStream: Stream[A]
- def toTraversable: Traversable[A]
-
- // type-constrained folds
- def maxBy[B](f: A => B)(implicit cmp: Ordering[B]): A
- def max[B >: A](implicit cmp: Ordering[B]): A
- def minBy[B](f: A => B)(implicit cmp: Ordering[B]): A
- def min[B >: A](implicit cmp: Ordering[B]): A
- def product[B >: A](implicit num: Numeric[B]): B
- def sum[B >: A](implicit num: Numeric[B]): B
-
- // strings
- def mkString(start: String, sep: String, end: String): String
- def mkString(sep: String): String
- def mkString: String
-
- def addString(buf: StringBuilder, start: String, sep: String, end: String): StringBuilder
- def addString(buf: StringBuilder, sep: String): StringBuilder
- def addString(buf: StringBuilder): StringBuilder
-}
diff --git a/src/library/scala/collection/mutable/AVLTree.scala b/src/library/scala/collection/mutable/AVLTree.scala
new file mode 100644
index 0000000..157e5da
--- /dev/null
+++ b/src/library/scala/collection/mutable/AVLTree.scala
@@ -0,0 +1,242 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+package scala
+package collection
+package mutable
+
+
+/**
+ * An immutable AVL Tree implementation used by mutable.TreeSet
+ *
+ * @author Lucien Pereira
+ *
+ */
+private[mutable] sealed trait AVLTree[+A] extends Serializable {
+ def balance: Int
+
+ def depth: Int
+
+ def iterator[B >: A]: Iterator[B] = Iterator.empty
+
+ def contains[B >: A](value: B, ordering: Ordering[B]): Boolean = false
+
+ /**
+ * Returns a new tree containing the given element.
+ * Thows an IllegalArgumentException if element is already present.
+ *
+ */
+ def insert[B >: A](value: B, ordering: Ordering[B]): AVLTree[B] = Node(value, Leaf, Leaf)
+
+ /**
+ * Return a new tree which not contains given element.
+ *
+ */
+ def remove[B >: A](value: B, ordering: Ordering[B]): AVLTree[A] =
+ throw new NoSuchElementException(String.valueOf(value))
+
+ /**
+ * Return a tuple containing the smallest element of the provided tree
+ * and a new tree from which this element has been extracted.
+ *
+ */
+ def removeMin[B >: A]: (B, AVLTree[B]) = sys.error("Should not happen.")
+
+ /**
+ * Return a tuple containing the biggest element of the provided tree
+ * and a new tree from which this element has been extracted.
+ *
+ */
+ def removeMax[B >: A]: (B, AVLTree[B]) = sys.error("Should not happen.")
+
+ def rebalance[B >: A]: AVLTree[B] = this
+
+ def leftRotation[B >: A]: Node[B] = sys.error("Should not happen.")
+
+ def rightRotation[B >: A]: Node[B] = sys.error("Should not happen.")
+
+ def doubleLeftRotation[B >: A]: Node[B] = sys.error("Should not happen.")
+
+ def doubleRightRotation[B >: A]: Node[B] = sys.error("Should not happen.")
+}
+
+private case object Leaf extends AVLTree[Nothing] {
+ override val balance: Int = 0
+
+ override val depth: Int = -1
+}
+
+private case class Node[A](val data: A, val left: AVLTree[A], val right: AVLTree[A]) extends AVLTree[A] {
+ override val balance: Int = right.depth - left.depth
+
+ override val depth: Int = math.max(left.depth, right.depth) + 1
+
+ override def iterator[B >: A]: Iterator[B] = new AVLIterator(this)
+
+ override def contains[B >: A](value: B, ordering: Ordering[B]) = {
+ val ord = ordering.compare(value, data)
+ if (0 == ord)
+ true
+ else if (ord < 0)
+ left.contains(value, ordering)
+ else
+ right.contains(value, ordering)
+ }
+
+ /**
+ * Returns a new tree containing the given element.
+ * Thows an IllegalArgumentException if element is already present.
+ *
+ */
+ override def insert[B >: A](value: B, ordering: Ordering[B]) = {
+ val ord = ordering.compare(value, data)
+ if (0 == ord)
+ throw new IllegalArgumentException()
+ else if (ord < 0)
+ Node(data, left.insert(value, ordering), right).rebalance
+ else
+ Node(data, left, right.insert(value, ordering)).rebalance
+ }
+
+ /**
+ * Return a new tree which not contains given element.
+ *
+ */
+ override def remove[B >: A](value: B, ordering: Ordering[B]): AVLTree[A] = {
+ val ord = ordering.compare(value, data)
+ if(ord == 0) {
+ if (Leaf == left) {
+ if (Leaf == right) {
+ Leaf
+ } else {
+ val (min, newRight) = right.removeMin
+ Node(min, left, newRight).rebalance
+ }
+ } else {
+ val (max, newLeft) = left.removeMax
+ Node(max, newLeft, right).rebalance
+ }
+ } else if (ord < 0) {
+ Node(data, left.remove(value, ordering), right).rebalance
+ } else {
+ Node(data, left, right.remove(value, ordering)).rebalance
+ }
+ }
+
+ /**
+ * Return a tuple containing the smallest element of the provided tree
+ * and a new tree from which this element has been extracted.
+ *
+ */
+ override def removeMin[B >: A]: (B, AVLTree[B]) = {
+ if (Leaf == left)
+ (data, right)
+ else {
+ val (min, newLeft) = left.removeMin
+ (min, Node(data, newLeft, right).rebalance)
+ }
+ }
+
+ /**
+ * Return a tuple containing the biggest element of the provided tree
+ * and a new tree from which this element has been extracted.
+ *
+ */
+ override def removeMax[B >: A]: (B, AVLTree[B]) = {
+ if (Leaf == right)
+ (data, left)
+ else {
+ val (max, newRight) = right.removeMax
+ (max, Node(data, left, newRight).rebalance)
+ }
+ }
+
+ override def rebalance[B >: A] = {
+ if (-2 == balance) {
+ if (1 == left.balance)
+ doubleRightRotation
+ else
+ rightRotation
+ } else if (2 == balance) {
+ if (-1 == right.balance)
+ doubleLeftRotation
+ else
+ leftRotation
+ } else {
+ this
+ }
+ }
+
+ override def leftRotation[B >: A] = {
+ if (Leaf != right) {
+ val r: Node[A] = right.asInstanceOf[Node[A]]
+ Node(r.data, Node(data, left, r.left), r.right)
+ } else sys.error("Should not happen.")
+ }
+
+ override def rightRotation[B >: A] = {
+ if (Leaf != left) {
+ val l: Node[A] = left.asInstanceOf[Node[A]]
+ Node(l.data, l.left, Node(data, l.right, right))
+ } else sys.error("Should not happen.")
+ }
+
+ override def doubleLeftRotation[B >: A] = {
+ if (Leaf != right) {
+ val r: Node[A] = right.asInstanceOf[Node[A]]
+ // Let's save an instanceOf by 'inlining' the left rotation
+ val rightRotated = r.rightRotation
+ Node(rightRotated.data, Node(data, left, rightRotated.left), rightRotated.right)
+ } else sys.error("Should not happen.")
+ }
+
+ override def doubleRightRotation[B >: A] = {
+ if (Leaf != left) {
+ val l: Node[A] = left.asInstanceOf[Node[A]]
+ // Let's save an instanceOf by 'inlining' the right rotation
+ val leftRotated = l.leftRotation
+ Node(leftRotated.data, leftRotated.left, Node(data, leftRotated.right, right))
+ } else sys.error("Should not happen.")
+ }
+}
+
+private class AVLIterator[A](root: Node[A]) extends Iterator[A] {
+ val stack = mutable.ArrayStack[Node[A]](root)
+ diveLeft()
+
+ private def diveLeft(): Unit = {
+ if (Leaf != stack.head.left) {
+ val left: Node[A] = stack.head.left.asInstanceOf[Node[A]]
+ stack.push(left)
+ diveLeft()
+ }
+ }
+
+ private def engageRight(): Unit = {
+ if (Leaf != stack.head.right) {
+ val right: Node[A] = stack.head.right.asInstanceOf[Node[A]]
+ stack.pop
+ stack.push(right)
+ diveLeft()
+ } else
+ stack.pop
+ }
+
+ override def hasNext: Boolean = !stack.isEmpty
+
+ override def next(): A = {
+ if (stack.isEmpty)
+ throw new NoSuchElementException()
+ else {
+ val result = stack.head.data
+ // Let's maintain stack for the next invocation
+ engageRight()
+ result
+ }
+ }
+}
diff --git a/src/library/scala/collection/mutable/AddingBuilder.scala b/src/library/scala/collection/mutable/AddingBuilder.scala
deleted file mode 100644
index 6f198b8..0000000
--- a/src/library/scala/collection/mutable/AddingBuilder.scala
+++ /dev/null
@@ -1,37 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-package scala.collection
-package mutable
-
-import generic._
-
-/** The canonical builder for collections that are addable, i.e. that support an efficient `+` method
- * which adds an element to the collection.
- *
- * Collections are built from their empty element using this `+` method.
- * @param empty the empty element of the collection.
- * @tparam Elem the type of elements that get added to the builder.
- * @tparam To the type of the built collection.
- *
- * @note "efficient `+`" is not idle talk. Do not use this on mutable collections or any others
- * for which `+` may perform an unshared copy! See GrowingBuilder comments for more.
- *
- * @author Martin Odersky
- * @version 2.8
- * @since 2.8
- */
- at deprecated("Will be removed after scala 2.9", "2.8.0")
-class AddingBuilder[Elem, To <: Addable[Elem, To] with collection.Iterable[Elem] with collection.IterableLike[Elem, To]](empty: To)
-extends Builder[Elem, To] {
- protected var elems: To = empty
- def +=(x: Elem): this.type = { elems = elems + x; this }
- def clear() { elems = empty }
- def result: To = elems
-}
diff --git a/src/library/scala/collection/mutable/ArrayBuffer.scala b/src/library/scala/collection/mutable/ArrayBuffer.scala
index 91c2751..f1cfd2d 100644
--- a/src/library/scala/collection/mutable/ArrayBuffer.scala
+++ b/src/library/scala/collection/mutable/ArrayBuffer.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -29,7 +29,7 @@ import parallel.mutable.ParArray
*
* @tparam A the type of this arraybuffer's elements.
*
- * @define Coll ArrayBuffer
+ * @define Coll `ArrayBuffer`
* @define coll arraybuffer
* @define thatinfo the class of the returned collection. In the standard library configuration,
* `That` is always `ArrayBuffer[B]` because an implicit of type `CanBuildFrom[ArrayBuffer, B, ArrayBuffer[B]]`
@@ -45,7 +45,8 @@ import parallel.mutable.ParArray
*/
@SerialVersionUID(1529165946227428979L)
class ArrayBuffer[A](override protected val initialSize: Int)
- extends Buffer[A]
+ extends AbstractBuffer[A]
+ with Buffer[A]
with GenericTraversableTemplate[A, ArrayBuffer]
with BufferLike[A, ArrayBuffer[A]]
with IndexedSeqOptimized[A, ArrayBuffer[A]]
@@ -65,7 +66,7 @@ class ArrayBuffer[A](override protected val initialSize: Int)
override def sizeHint(len: Int) {
if (len > size && len >= 1) {
val newarray = new Array[AnyRef](len)
- compat.Platform.arraycopy(array, 0, newarray, 0, size0)
+ scala.compat.Platform.arraycopy(array, 0, newarray, 0, size0)
array = newarray
}
}
@@ -92,7 +93,7 @@ class ArrayBuffer[A](override protected val initialSize: Int)
* @return the updated buffer.
*/
override def ++=(xs: TraversableOnce[A]): this.type = xs match {
- case v: collection.IndexedSeqLike[_, _] =>
+ case v: scala.collection.IndexedSeqLike[_, _] =>
val n = v.length
ensureSize(size0 + n)
v.copyToArray(array.asInstanceOf[scala.Array[Any]], size0, n)
@@ -106,7 +107,7 @@ class ArrayBuffer[A](override protected val initialSize: Int)
* the identity of the buffer. It takes time linear in
* the buffer size.
*
- * @param elem the element to append.
+ * @param elem the element to prepend.
* @return the updated buffer.
*/
def +=:(elem: A): this.type = {
@@ -168,12 +169,6 @@ class ArrayBuffer[A](override protected val initialSize: Int)
result
}
- /** Return a clone of this buffer.
- *
- * @return an `ArrayBuffer` with the same elements.
- */
- override def clone(): ArrayBuffer[A] = new ArrayBuffer[A] ++= this
-
def result: ArrayBuffer[A] = this
/** Defines the prefix of the string representation.
@@ -186,11 +181,11 @@ class ArrayBuffer[A](override protected val initialSize: Int)
*
* $factoryInfo
* @define coll array buffer
- * @define Coll ArrayBuffer
+ * @define Coll `ArrayBuffer`
*/
object ArrayBuffer extends SeqFactory[ArrayBuffer] {
/** $genericCanBuildFromInfo */
- implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, ArrayBuffer[A]] = new GenericCanBuildFrom[A]
+ implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, ArrayBuffer[A]] = ReusableCBF.asInstanceOf[GenericCanBuildFrom[A]]
def newBuilder[A]: Builder[A, ArrayBuffer[A]] = new ArrayBuffer[A]
}
diff --git a/src/library/scala/collection/mutable/ArrayBuilder.scala b/src/library/scala/collection/mutable/ArrayBuilder.scala
index f72ba78..0ce2cda 100644
--- a/src/library/scala/collection/mutable/ArrayBuilder.scala
+++ b/src/library/scala/collection/mutable/ArrayBuilder.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -12,7 +12,8 @@ package scala.collection
package mutable
import generic._
-import scala.reflect.ClassManifest
+import scala.reflect.ClassTag
+import scala.runtime.ScalaRunTime
/** A builder class for arrays.
*
@@ -30,17 +31,30 @@ object ArrayBuilder {
/** Creates a new arraybuilder of type `T`.
*
- * @tparam T type of the elements for the array builder, with a `ClassManifest` context bound.
+ * @tparam T type of the elements for the array builder, with a `ClassTag` context bound.
* @return a new empty array builder.
*/
- def make[T: ClassManifest](): ArrayBuilder[T] =
- implicitly[ClassManifest[T]].newArrayBuilder()
+ def make[T: ClassTag](): ArrayBuilder[T] = {
+ val tag = implicitly[ClassTag[T]]
+ tag.runtimeClass match {
+ case java.lang.Byte.TYPE => new ArrayBuilder.ofByte().asInstanceOf[ArrayBuilder[T]]
+ case java.lang.Short.TYPE => new ArrayBuilder.ofShort().asInstanceOf[ArrayBuilder[T]]
+ case java.lang.Character.TYPE => new ArrayBuilder.ofChar().asInstanceOf[ArrayBuilder[T]]
+ case java.lang.Integer.TYPE => new ArrayBuilder.ofInt().asInstanceOf[ArrayBuilder[T]]
+ case java.lang.Long.TYPE => new ArrayBuilder.ofLong().asInstanceOf[ArrayBuilder[T]]
+ case java.lang.Float.TYPE => new ArrayBuilder.ofFloat().asInstanceOf[ArrayBuilder[T]]
+ case java.lang.Double.TYPE => new ArrayBuilder.ofDouble().asInstanceOf[ArrayBuilder[T]]
+ case java.lang.Boolean.TYPE => new ArrayBuilder.ofBoolean().asInstanceOf[ArrayBuilder[T]]
+ case java.lang.Void.TYPE => new ArrayBuilder.ofUnit().asInstanceOf[ArrayBuilder[T]]
+ case _ => new ArrayBuilder.ofRef[T with AnyRef]()(tag.asInstanceOf[ClassTag[T with AnyRef]]).asInstanceOf[ArrayBuilder[T]]
+ }
+ }
/** A class for array builders for arrays of reference types.
*
- * @tparam T type of elements for the array builder, subtype of `AnyRef` with a `ClassManifest` context bound.
+ * @tparam T type of elements for the array builder, subtype of `AnyRef` with a `ClassTag` context bound.
*/
- class ofRef[T <: AnyRef : ClassManifest] extends ArrayBuilder[T] {
+ class ofRef[T <: AnyRef : ClassTag] extends ArrayBuilder[T] {
private var elems: Array[T] = _
private var capacity: Int = 0
@@ -76,7 +90,7 @@ object ArrayBuilder {
this
}
- override def ++=(xs: TraversableOnce[T]): this.type = (xs: AnyRef) match {
+ override def ++=(xs: TraversableOnce[T]): this.type = (xs.asInstanceOf[AnyRef]) match {
case xs: WrappedArray.ofRef[_] =>
ensureSize(this.size + xs.length)
Array.copy(xs.array, 0, elems, this.size, xs.length)
diff --git a/src/library/scala/collection/mutable/ArrayLike.scala b/src/library/scala/collection/mutable/ArrayLike.scala
index bae6bd1..31f3d2a 100644
--- a/src/library/scala/collection/mutable/ArrayLike.scala
+++ b/src/library/scala/collection/mutable/ArrayLike.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -18,11 +18,11 @@ import generic._
* @tparam A type of the elements contained in the array like object.
* @tparam Repr the type of the actual collection containing the elements.
*
- * @define Coll ArrayLike
+ * @define Coll `ArrayLike`
* @version 2.8
* @since 2.8
*/
-trait ArrayLike[A, +Repr] extends IndexedSeqOptimized[A, Repr] { self =>
+trait ArrayLike[A, +Repr] extends Any with IndexedSeqOptimized[A, Repr] { self =>
/** Creates a possible nested `IndexedSeq` which consists of all the elements
* of this array. If the elements are arrays themselves, the `deep` transformation
@@ -38,7 +38,7 @@ trait ArrayLike[A, +Repr] extends IndexedSeqOptimized[A, Repr] { self =>
*
* @return An possibly nested indexed sequence of consisting of all the elements of the array.
*/
- def deep: scala.collection.IndexedSeq[Any] = new scala.collection.IndexedSeq[Any] {
+ def deep: scala.collection.IndexedSeq[Any] = new scala.collection.AbstractSeq[Any] with scala.collection.IndexedSeq[Any] {
def length = self.length
def apply(idx: Int): Any = self.apply(idx) match {
case x: AnyRef if x.getClass.isArray => WrappedArray.make(x).deep
@@ -46,22 +46,4 @@ trait ArrayLike[A, +Repr] extends IndexedSeqOptimized[A, Repr] { self =>
}
override def stringPrefix = "Array"
}
-
- @deprecated("use deep.toString instead", "2.8.0")
- final def deepToString() =
- deep.toString
-
- @deprecated("use deep.mkString instead", "2.8.0")
- final def deepMkString(start: String, sep: String, end: String): String =
- deep.mkString(start, sep, end)
-
- @deprecated("use deep.mkString instead", "2.8.0")
- final def deepMkString(sep: String): String =
- deepMkString("", sep, "")
-
- @deprecated("use array1.deep.equals(array2.deep) instead", "2.8.0")
- final def deepEquals(that: Any): Boolean = that match {
- case x: AnyRef if x.getClass.isArray => deep.equals(WrappedArray.make(x).deep)
- case _ => false
- }
}
diff --git a/src/library/scala/collection/mutable/ArrayOps.scala b/src/library/scala/collection/mutable/ArrayOps.scala
index 008e7fa..25ba7e4 100644
--- a/src/library/scala/collection/mutable/ArrayOps.scala
+++ b/src/library/scala/collection/mutable/ArrayOps.scala
@@ -1,22 +1,20 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-
-
-package scala.collection
+package scala
+package collection
package mutable
-import compat.Platform.arraycopy
-
-import scala.reflect.ClassManifest
+import scala.compat.Platform.arraycopy
+import scala.reflect.ClassTag
+import scala.runtime.ScalaRunTime._
import parallel.mutable.ParArray
-
/** This class serves as a wrapper for `Array`s with all the operations found in
* indexed sequences. Where needed, instances of arrays are implicitly converted
* into this class.
@@ -29,18 +27,16 @@ import parallel.mutable.ParArray
*
* @tparam T type of the elements contained in this array.
*
- * @define Coll ArrayOps
+ * @define Coll `Array`
* @define orderDependent
* @define orderDependentFold
* @define mayNotTerminateInf
* @define willNotTerminateInf
*/
-abstract class ArrayOps[T] extends ArrayLike[T, Array[T]] with CustomParallelizable[T, ParArray[T]] {
+trait ArrayOps[T] extends Any with ArrayLike[T, Array[T]] with CustomParallelizable[T, ParArray[T]] {
- private def rowBuilder[U]: Builder[U, Array[U]] =
- Array.newBuilder(
- ClassManifest.fromClass(
- repr.getClass.getComponentType.getComponentType.asInstanceOf[Predef.Class[U]]))
+ private def elementClass: Class[_] =
+ arrayElementClass(repr.getClass)
override def copyToArray[U >: T](xs: Array[U], start: Int, len: Int) {
var l = math.min(len, repr.length)
@@ -48,11 +44,13 @@ abstract class ArrayOps[T] extends ArrayLike[T, Array[T]] with CustomParalleliza
Array.copy(repr, 0, xs, start, l)
}
- override def toArray[U >: T : ClassManifest]: Array[U] =
- if (implicitly[ClassManifest[U]].erasure eq repr.getClass.getComponentType)
+ override def toArray[U >: T : ClassTag]: Array[U] = {
+ val thatElementClass = arrayElementClass(implicitly[ClassTag[U]])
+ if (elementClass eq thatElementClass)
repr.asInstanceOf[Array[U]]
else
super.toArray[U]
+ }
override def par = ParArray.handoff(repr)
@@ -60,12 +58,12 @@ abstract class ArrayOps[T] extends ArrayLike[T, Array[T]] with CustomParalleliza
* into a single array.
*
* @tparam U Type of row elements.
- * @param asArray A function that converts elements of this array to rows - arrays of type `U`.
+ * @param asTrav A function that converts elements of this array to rows - arrays of type `U`.
* @return An array obtained by concatenating rows of this array.
*/
- def flatten[U, To](implicit asTrav: T => collection.Traversable[U], m: ClassManifest[U]): Array[U] = {
+ def flatten[U](implicit asTrav: T => scala.collection.Traversable[U], m: ClassTag[U]): Array[U] = {
val b = Array.newBuilder[U]
- b.sizeHint(map{case is: collection.IndexedSeq[_] => is.size case _ => 0} sum)
+ b.sizeHint(map{case is: scala.collection.IndexedSeq[_] => is.size case _ => 0}.sum)
for (xs <- this)
b ++= asTrav(xs)
b.result
@@ -78,22 +76,24 @@ abstract class ArrayOps[T] extends ArrayLike[T, Array[T]] with CustomParalleliza
* @return An array obtained by replacing elements of this arrays with rows the represent.
*/
def transpose[U](implicit asArray: T => Array[U]): Array[Array[U]] = {
- val bs = asArray(head) map (_ => rowBuilder[U])
- for (xs <- this) {
- var i = 0
- for (x <- asArray(xs)) {
- bs(i) += x
- i += 1
+ val bb: Builder[Array[U], Array[Array[U]]] = Array.newBuilder(ClassTag[Array[U]](elementClass))
+ if (isEmpty) bb.result()
+ else {
+ def mkRowBuilder() = Array.newBuilder(ClassTag[U](arrayElementClass(elementClass)))
+ val bs = asArray(head) map (_ => mkRowBuilder())
+ for (xs <- this) {
+ var i = 0
+ for (x <- asArray(xs)) {
+ bs(i) += x
+ i += 1
+ }
}
+ for (b <- bs) bb += b.result()
+ bb.result()
}
- val bb: Builder[Array[U], Array[Array[U]]] = Array.newBuilder(
- ClassManifest.fromClass(
- repr.getClass.getComponentType.asInstanceOf[Predef.Class[Array[U]]]))
- for (b <- bs) bb += b.result
- bb.result
}
- def seq = this.iterator
+ def seq = thisCollection
}
@@ -105,12 +105,11 @@ abstract class ArrayOps[T] extends ArrayLike[T, Array[T]] with CustomParalleliza
object ArrayOps {
/** A class of `ArrayOps` for arrays containing reference types. */
- class ofRef[T <: AnyRef](override val repr: Array[T]) extends ArrayOps[T] with ArrayLike[T, Array[T]] {
+ final class ofRef[T <: AnyRef](override val repr: Array[T]) extends AnyVal with ArrayOps[T] with ArrayLike[T, Array[T]] {
override protected[this] def thisCollection: WrappedArray[T] = new WrappedArray.ofRef[T](repr)
override protected[this] def toCollection(repr: Array[T]): WrappedArray[T] = new WrappedArray.ofRef[T](repr)
- override protected[this] def newBuilder = new ArrayBuilder.ofRef[T]()(
- ClassManifest.classType[T](repr.getClass.getComponentType))
+ override protected[this] def newBuilder = new ArrayBuilder.ofRef[T]()(ClassTag[T](arrayElementClass(repr.getClass)))
def length: Int = repr.length
def apply(index: Int): T = repr(index)
@@ -118,7 +117,7 @@ object ArrayOps {
}
/** A class of `ArrayOps` for arrays containing `byte`s. */
- class ofByte(override val repr: Array[Byte]) extends ArrayOps[Byte] with ArrayLike[Byte, Array[Byte]] {
+final class ofByte(override val repr: Array[Byte]) extends AnyVal with ArrayOps[Byte] with ArrayLike[Byte, Array[Byte]] {
override protected[this] def thisCollection: WrappedArray[Byte] = new WrappedArray.ofByte(repr)
override protected[this] def toCollection(repr: Array[Byte]): WrappedArray[Byte] = new WrappedArray.ofByte(repr)
@@ -130,7 +129,7 @@ object ArrayOps {
}
/** A class of `ArrayOps` for arrays containing `short`s. */
- class ofShort(override val repr: Array[Short]) extends ArrayOps[Short] with ArrayLike[Short, Array[Short]] {
+final class ofShort(override val repr: Array[Short]) extends AnyVal with ArrayOps[Short] with ArrayLike[Short, Array[Short]] {
override protected[this] def thisCollection: WrappedArray[Short] = new WrappedArray.ofShort(repr)
override protected[this] def toCollection(repr: Array[Short]): WrappedArray[Short] = new WrappedArray.ofShort(repr)
@@ -142,7 +141,7 @@ object ArrayOps {
}
/** A class of `ArrayOps` for arrays containing `char`s. */
- class ofChar(override val repr: Array[Char]) extends ArrayOps[Char] with ArrayLike[Char, Array[Char]] {
+final class ofChar(override val repr: Array[Char]) extends AnyVal with ArrayOps[Char] with ArrayLike[Char, Array[Char]] {
override protected[this] def thisCollection: WrappedArray[Char] = new WrappedArray.ofChar(repr)
override protected[this] def toCollection(repr: Array[Char]): WrappedArray[Char] = new WrappedArray.ofChar(repr)
@@ -154,7 +153,7 @@ object ArrayOps {
}
/** A class of `ArrayOps` for arrays containing `int`s. */
- class ofInt(override val repr: Array[Int]) extends ArrayOps[Int] with ArrayLike[Int, Array[Int]] {
+final class ofInt(override val repr: Array[Int]) extends AnyVal with ArrayOps[Int] with ArrayLike[Int, Array[Int]] {
override protected[this] def thisCollection: WrappedArray[Int] = new WrappedArray.ofInt(repr)
override protected[this] def toCollection(repr: Array[Int]): WrappedArray[Int] = new WrappedArray.ofInt(repr)
@@ -166,7 +165,7 @@ object ArrayOps {
}
/** A class of `ArrayOps` for arrays containing `long`s. */
- class ofLong(override val repr: Array[Long]) extends ArrayOps[Long] with ArrayLike[Long, Array[Long]] {
+final class ofLong(override val repr: Array[Long]) extends AnyVal with ArrayOps[Long] with ArrayLike[Long, Array[Long]] {
override protected[this] def thisCollection: WrappedArray[Long] = new WrappedArray.ofLong(repr)
override protected[this] def toCollection(repr: Array[Long]): WrappedArray[Long] = new WrappedArray.ofLong(repr)
@@ -178,7 +177,7 @@ object ArrayOps {
}
/** A class of `ArrayOps` for arrays containing `float`s. */
- class ofFloat(override val repr: Array[Float]) extends ArrayOps[Float] with ArrayLike[Float, Array[Float]] {
+final class ofFloat(override val repr: Array[Float]) extends AnyVal with ArrayOps[Float] with ArrayLike[Float, Array[Float]] {
override protected[this] def thisCollection: WrappedArray[Float] = new WrappedArray.ofFloat(repr)
override protected[this] def toCollection(repr: Array[Float]): WrappedArray[Float] = new WrappedArray.ofFloat(repr)
@@ -190,7 +189,7 @@ object ArrayOps {
}
/** A class of `ArrayOps` for arrays containing `double`s. */
- class ofDouble(override val repr: Array[Double]) extends ArrayOps[Double] with ArrayLike[Double, Array[Double]] {
+final class ofDouble(override val repr: Array[Double]) extends AnyVal with ArrayOps[Double] with ArrayLike[Double, Array[Double]] {
override protected[this] def thisCollection: WrappedArray[Double] = new WrappedArray.ofDouble(repr)
override protected[this] def toCollection(repr: Array[Double]): WrappedArray[Double] = new WrappedArray.ofDouble(repr)
@@ -202,7 +201,7 @@ object ArrayOps {
}
/** A class of `ArrayOps` for arrays containing `boolean`s. */
- class ofBoolean(override val repr: Array[Boolean]) extends ArrayOps[Boolean] with ArrayLike[Boolean, Array[Boolean]] {
+final class ofBoolean(override val repr: Array[Boolean]) extends AnyVal with ArrayOps[Boolean] with ArrayLike[Boolean, Array[Boolean]] {
override protected[this] def thisCollection: WrappedArray[Boolean] = new WrappedArray.ofBoolean(repr)
override protected[this] def toCollection(repr: Array[Boolean]): WrappedArray[Boolean] = new WrappedArray.ofBoolean(repr)
@@ -214,7 +213,7 @@ object ArrayOps {
}
/** A class of `ArrayOps` for arrays of `Unit` types. */
- class ofUnit(override val repr: Array[Unit]) extends ArrayOps[Unit] with ArrayLike[Unit, Array[Unit]] {
+final class ofUnit(override val repr: Array[Unit]) extends AnyVal with ArrayOps[Unit] with ArrayLike[Unit, Array[Unit]] {
override protected[this] def thisCollection: WrappedArray[Unit] = new WrappedArray.ofUnit(repr)
override protected[this] def toCollection(repr: Array[Unit]): WrappedArray[Unit] = new WrappedArray.ofUnit(repr)
diff --git a/src/library/scala/collection/mutable/ArraySeq.scala b/src/library/scala/collection/mutable/ArraySeq.scala
index 7cdb369..33f6949 100644
--- a/src/library/scala/collection/mutable/ArraySeq.scala
+++ b/src/library/scala/collection/mutable/ArraySeq.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -27,7 +27,7 @@ import parallel.mutable.ParArray
* @tparam A type of the elements contained in this array sequence.
* @param length the length of the underlying array.
*
- * @define Coll ArraySeq
+ * @define Coll `ArraySeq`
* @define coll array sequence
* @define thatinfo the class of the returned collection. In the standard library configuration,
* `That` is always `ArraySeq[B]` because an implicit of type `CanBuildFrom[ArraySeq, B, ArraySeq[B]]`
@@ -43,7 +43,8 @@ import parallel.mutable.ParArray
*/
@SerialVersionUID(1530165946227428979L)
class ArraySeq[A](override val length: Int)
-extends IndexedSeq[A]
+extends AbstractSeq[A]
+ with IndexedSeq[A]
with GenericTraversableTemplate[A, ArraySeq]
with IndexedSeqOptimized[A, ArraySeq[A]]
with CustomParallelizable[A, ParArray[A]]
@@ -88,15 +89,22 @@ extends IndexedSeq[A]
Array.copy(array, 0, xs, start, len1)
}
+ override def clone(): ArraySeq[A] = {
+ val cloned = array.clone.asInstanceOf[Array[AnyRef]]
+ new ArraySeq[A](length) {
+ override val array = cloned
+ }
+ }
+
}
/** $factoryInfo
* @define coll array sequence
- * @define Coll ArraySeq
+ * @define Coll `ArraySeq`
*/
object ArraySeq extends SeqFactory[ArraySeq] {
/** $genericCanBuildFromInfo */
- implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, ArraySeq[A]] = new GenericCanBuildFrom[A]
+ implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, ArraySeq[A]] = ReusableCBF.asInstanceOf[GenericCanBuildFrom[A]]
def newBuilder[A]: Builder[A, ArraySeq[A]] =
new ArrayBuffer[A] mapResult { buf =>
val result = new ArraySeq[A](buf.length)
diff --git a/src/library/scala/collection/mutable/ArrayStack.scala b/src/library/scala/collection/mutable/ArrayStack.scala
index fa2b084..670558a 100644
--- a/src/library/scala/collection/mutable/ArrayStack.scala
+++ b/src/library/scala/collection/mutable/ArrayStack.scala
@@ -1,38 +1,36 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-
-
-package scala.collection
+package scala
+package collection
package mutable
import generic._
-
-
+import scala.reflect.ClassTag
/** Factory object for the `ArrayStack` class.
*
* $factoryInfo
* @define coll array stack
- * @define Coll ArrayStack
+ * @define Coll `ArrayStack`
*/
object ArrayStack extends SeqFactory[ArrayStack] {
- implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, ArrayStack[A]] = new GenericCanBuildFrom[A]
+ implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, ArrayStack[A]] = ReusableCBF.asInstanceOf[GenericCanBuildFrom[A]]
def newBuilder[A]: Builder[A, ArrayStack[A]] = new ArrayStack[A]
def empty: ArrayStack[Nothing] = new ArrayStack()
- def apply[A: ClassManifest](elems: A*): ArrayStack[A] = {
- val els: Array[AnyRef] = elems.reverse.map{_.asInstanceOf[AnyRef]}(breakOut)
+ def apply[A: ClassTag](elems: A*): ArrayStack[A] = {
+ val els: Array[AnyRef] = elems.reverseMap(_.asInstanceOf[AnyRef])(breakOut)
if (els.length == 0) new ArrayStack()
else new ArrayStack[A](els, els.length)
}
private[mutable] def growArray(x: Array[AnyRef]) = {
- val y = new Array[AnyRef](x.length * 2)
+ val y = new Array[AnyRef](math.max(x.length * 2, 1))
Array.copy(x, 0, y, 0, x.length)
y
}
@@ -55,17 +53,18 @@ object ArrayStack extends SeqFactory[ArrayStack] {
*
* @tparam T type of the elements contained in this array stack.
*
- * @define Coll ArrayStack
+ * @define Coll `ArrayStack`
* @define coll array stack
* @define orderDependent
* @define orderDependentFold
* @define mayNotTerminateInf
* @define willNotTerminateInf
*/
- at cloneable @SerialVersionUID(8565219180626620510L)
+ at SerialVersionUID(8565219180626620510L)
class ArrayStack[T] private(private var table : Array[AnyRef],
private var index : Int)
-extends Seq[T]
+extends AbstractSeq[T]
+ with Seq[T]
with SeqLike[T, ArrayStack[T]]
with GenericTraversableTemplate[T, ArrayStack]
with Cloneable[ArrayStack[T]]
@@ -90,8 +89,7 @@ extends Seq[T]
override def companion = ArrayStack
- /** Replace element at index <code>n</code> with the new element
- * <code>newelem</code>.
+ /** Replace element at index `n` with the new element `newelem`.
*
* This is a constant time operation.
*
@@ -124,10 +122,6 @@ extends Seq[T]
x
}
- /** View the top element of the stack. */
- @deprecated("use top instead", "2.8.0")
- def peek = top
-
/** View the top element of the stack.
*
* Does not remove the element on the top. If the stack is empty,
@@ -160,7 +154,7 @@ extends Seq[T]
/** Pushes all the provided elements in the traversable object onto the stack.
*
- * @param x The source of elements to push.
+ * @param xs The source of elements to push.
* @return A reference to this stack.
*/
override def ++=(xs: TraversableOnce[T]): this.type = { xs.seq foreach += ; this }
@@ -172,7 +166,22 @@ extends Seq[T]
*/
def +=(x: T): this.type = { push(x); this }
- def result = new ArrayStack[T](table.reverse, index)
+ def result = {
+ reverseTable()
+ this
+ }
+
+ private def reverseTable() {
+ var i = 0
+ val until = index / 2
+ while (i < until) {
+ val revi = index - i - 1
+ val tmp = table(i)
+ table(i) = table(revi)
+ table(revi) = tmp
+ i += 1
+ }
+ }
/** Pop the top two elements off the stack, apply `f` to them and push the result
* back on to the stack.
@@ -215,7 +224,7 @@ extends Seq[T]
/** Creates and iterator over the stack in LIFO order.
* @return an iterator over the elements of the stack.
*/
- def iterator: Iterator[T] = new Iterator[T] {
+ def iterator: Iterator[T] = new AbstractIterator[T] {
var currentIndex = index
def hasNext = currentIndex > 0
def next() = {
diff --git a/src/library/scala/collection/mutable/BitSet.scala b/src/library/scala/collection/mutable/BitSet.scala
index adda881..2a535a7 100644
--- a/src/library/scala/collection/mutable/BitSet.scala
+++ b/src/library/scala/collection/mutable/BitSet.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -21,7 +21,7 @@ import BitSetLike.{LogWL, updateArray}
* @see [[http://docs.scala-lang.org/overviews/collections/concrete-mutable-collection-classes.html#mutable_bitsets "Scala's Collection Library overview"]]
* section on `Mutable Bitsets` for more information.
*
- * @define Coll BitSet
+ * @define Coll `BitSet`
* @define coll bitset
* @define thatinfo the class of the returned collection. In the standard library configuration,
* `That` is always `BitSet[B]` because an implicit of type `CanBuildFrom[BitSet, B, BitSet]`
@@ -36,7 +36,8 @@ import BitSetLike.{LogWL, updateArray}
* @define willNotTerminateInf
*/
@SerialVersionUID(8483111450368547763L)
-class BitSet(protected var elems: Array[Long]) extends Set[Int]
+class BitSet(protected var elems: Array[Long]) extends AbstractSet[Int]
+ with SortedSet[Int]
with scala.collection.BitSet
with BitSetLike[BitSet]
with SetLike[Int, BitSet]
@@ -67,7 +68,7 @@ class BitSet(protected var elems: Array[Long]) extends Set[Int]
elems(idx) = w
}
- protected def fromArray(words: Array[Long]): BitSet = new BitSet(words)
+ protected def fromBitMaskNoCopy(words: Array[Long]): BitSet = new BitSet(words)
override def add(elem: Int): Boolean = {
require(elem >= 0)
@@ -102,7 +103,7 @@ class BitSet(protected var elems: Array[Long]) extends Set[Int]
*
* @return an immutable set containing all the elements of this set.
*/
- def toImmutable = immutable.BitSet.fromArray(elems)
+ def toImmutable = immutable.BitSet.fromBitMaskNoCopy(elems)
override def clone(): BitSet = {
val elems1 = new Array[Long](elems.length)
@@ -113,7 +114,7 @@ class BitSet(protected var elems: Array[Long]) extends Set[Int]
/** $factoryInfo
* @define coll bitset
- * @define Coll BitSet
+ * @define Coll `BitSet`
*/
object BitSet extends BitSetFactory[BitSet] {
def empty: BitSet = new BitSet
@@ -123,4 +124,17 @@ object BitSet extends BitSetFactory[BitSet] {
/** $bitsetCanBuildFrom */
implicit def canBuildFrom: CanBuildFrom[BitSet, Int, BitSet] = bitsetCanBuildFrom
+
+ /** A bitset containing all the bits in an array */
+ def fromBitMask(elems: Array[Long]): BitSet = {
+ val len = elems.length
+ val a = new Array[Long](len)
+ Array.copy(elems, 0, a, 0, len)
+ new BitSet(a)
+ }
+
+ /** A bitset containing all the bits in an array, wrapping the existing
+ * array without copying.
+ */
+ def fromBitMaskNoCopy(elems: Array[Long]): BitSet = new BitSet(elems)
}
diff --git a/src/library/scala/collection/mutable/Buffer.scala b/src/library/scala/collection/mutable/Buffer.scala
index fa2615e..230799c 100644
--- a/src/library/scala/collection/mutable/Buffer.scala
+++ b/src/library/scala/collection/mutable/Buffer.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -25,22 +25,24 @@ import generic._
*
* @tparam A type of the elements contained in this buffer.
*
- * @define Coll Buffer
+ * @define Coll `Buffer`
* @define coll buffer
*/
- at cloneable
trait Buffer[A] extends Seq[A]
with GenericTraversableTemplate[A, Buffer]
- with BufferLike[A, Buffer[A]] {
+ with BufferLike[A, Buffer[A]]
+ with scala.Cloneable {
override def companion: GenericCompanion[Buffer] = Buffer
}
/** $factoryInfo
* @define coll buffer
- * @define Coll Buffer
+ * @define Coll `Buffer`
*/
object Buffer extends SeqFactory[Buffer] {
- implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, Buffer[A]] = new GenericCanBuildFrom[A]
+ implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, Buffer[A]] = ReusableCBF.asInstanceOf[GenericCanBuildFrom[A]]
def newBuilder[A]: Builder[A, Buffer[A]] = new ArrayBuffer
}
+/** Explicit instantiation of the `Buffer` trait to reduce class file size in subclasses. */
+private[scala] abstract class AbstractBuffer[A] extends AbstractSeq[A] with Buffer[A]
diff --git a/src/library/scala/collection/mutable/BufferLike.scala b/src/library/scala/collection/mutable/BufferLike.scala
index 8e96684..5935a28 100644
--- a/src/library/scala/collection/mutable/BufferLike.scala
+++ b/src/library/scala/collection/mutable/BufferLike.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -13,7 +13,7 @@ package mutable
import generic._
import script._
-import annotation.{migration, bridge}
+import scala.annotation.{migration, bridge}
/** A template trait for buffers of type `Buffer[A]`.
*
@@ -58,13 +58,13 @@ import annotation.{migration, bridge}
* mutates the collection in place, unlike similar but
* undeprecated methods throughout the collections hierarchy.
*/
- at cloneable
trait BufferLike[A, +This <: BufferLike[A, This] with Buffer[A]]
extends Growable[A]
with Shrinkable[A]
with Scriptable[A]
with Subtractable[A, This]
with SeqLike[A, This]
+ with scala.Cloneable
{ self : This =>
// Abstract methods from Seq:
@@ -93,7 +93,7 @@ trait BufferLike[A, +This <: BufferLike[A, This] with Buffer[A]]
* @throws IndexOutOfBoundsException if the index `n` is not in the valid range
* `0 <= n <= length`.
*/
- def insertAll(n: Int, elems: collection.Traversable[A])
+ def insertAll(n: Int, elems: scala.collection.Traversable[A])
/** Removes the element at a given index from this buffer.
*
@@ -214,51 +214,6 @@ trait BufferLike[A, +This <: BufferLike[A, This] with Buffer[A]]
*/
def readOnly: scala.collection.Seq[A] = toSeq
- /** Adds a number of elements in an array
- *
- * @param src the array
- * @param start the first element to append
- * @param len the number of elements to append
- */
- @deprecated("replace by: `buf ++= src.view(start, end)`", "2.8.0")
- def ++=(src: Array[A], start: Int, len: Int) {
- var i = start
- val end = i + len
- while (i < end) {
- this += src(i)
- i += 1
- }
- }
-
- /** Adds a single element to this collection and returns
- * the collection itself.
- *
- * $compatMutate
- * You are strongly recommended to use '+=' instead.
- *
- * @param elem the element to add.
- */
- @deprecated("Use += instead if you intend to add by side effect to an existing collection.\n"+
- "Use `clone() +=' if you intend to create a new collection.", "2.8.0")
- def + (elem: A): This = { +=(elem); repr }
-
- /** Adds two or more elements to this collection and returns
- * the collection itself.
- *
- * $compatMutate
- * You are strongly recommended to use '++=' instead.
- *
- * @param elem1 the first element to add.
- * @param elem2 the second element to add.
- * @param elems the remaining elements to add.
- */
- @deprecated("Use ++= instead if you intend to add by side effect to an existing collection.\n"+
- "Use `clone() ++=' if you intend to create a new collection.", "2.8.0")
- def + (elem1: A, elem2: A, elems: A*): This = {
- this += elem1 += elem2 ++= elems
- repr
- }
-
/** Creates a new collection containing both the elements of this collection and the provided
* traversable object.
*
@@ -268,9 +223,6 @@ trait BufferLike[A, +This <: BufferLike[A, This] with Buffer[A]]
@migration("`++` creates a new buffer. Use `++=` to add an element from this buffer and return that buffer itself.", "2.8.0")
def ++(xs: GenTraversableOnce[A]): This = clone() ++= xs.seq
- @bridge
- def ++(xs: TraversableOnce[A]): This = ++(xs: GenTraversableOnce[A])
-
/** Creates a new collection with all the elements of this collection except `elem`.
*
* @param elem the element to remove.
@@ -301,5 +253,13 @@ trait BufferLike[A, +This <: BufferLike[A, This] with Buffer[A]]
@migration("`--` creates a new buffer. Use `--=` to remove an element from this buffer and return that buffer itself.", "2.8.0")
override def --(xs: GenTraversableOnce[A]): This = clone() --= xs.seq
- @bridge def --(xs: TraversableOnce[A]): This = --(xs: GenTraversableOnce[A])
+ /** Return a clone of this buffer.
+ *
+ * @return a `Buffer` with the same elements.
+ */
+ override def clone(): This = {
+ val bf = newBuilder
+ bf ++= this
+ bf.result.asInstanceOf[This]
+ }
}
diff --git a/src/library/scala/collection/mutable/BufferProxy.scala b/src/library/scala/collection/mutable/BufferProxy.scala
index 03102f7..37aa186 100644
--- a/src/library/scala/collection/mutable/BufferProxy.scala
+++ b/src/library/scala/collection/mutable/BufferProxy.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -25,7 +25,7 @@ import script._
*
* @tparam A type of the elements the buffer proxy contains.
*
- * @define Coll BufferProxy
+ * @define Coll `BufferProxy`
* @define coll buffer proxy
*/
trait BufferProxy[A] extends Buffer[A] with Proxy {
@@ -38,16 +38,6 @@ trait BufferProxy[A] extends Buffer[A] with Proxy {
def apply(n: Int): A = self.apply(n)
- /** Append a single element to this buffer and return
- * the identity of the buffer.
- *
- * @param elem the element to append.
- * @return the updated buffer.
- */
- @deprecated("Use += instead if you intend to add by side effect to an existing collection.\n"+
- "Use `clone() ++=' if you intend to create a new collection.", "2.8.0")
- override def +(elem: A): Buffer[A] = self.+(elem)
-
/** Append a single element to this buffer.
*
* @param elem the element to append.
@@ -56,17 +46,6 @@ trait BufferProxy[A] extends Buffer[A] with Proxy {
override def readOnly = self.readOnly
- /** Appends a number of elements provided by a traversable object
- * via its <code>foreach</code> method. The identity of the
- * buffer is returned.
- *
- * @param iter the traversable object.
- * @return the updated buffer.
- */
- @deprecated("Use ++= instead if you intend to add by side effect to an existing collection.\n"+
- "Use `clone() ++=` if you intend to create a new collection.", "2.8.0")
- override def ++(xs: GenTraversableOnce[A]): Buffer[A] = self.++(xs)
-
/** Appends a number of elements provided by a traversable object.
*
* @param xs the traversable object.
@@ -98,7 +77,7 @@ trait BufferProxy[A] extends Buffer[A] with Proxy {
/** Prepend an element to this list.
*
- * @param elem the element to prepend.
+ * @param elems the elements to prepend.
*/
override def prepend(elems: A*) { self.prependAll(elems) }
diff --git a/src/library/scala/collection/mutable/Builder.scala b/src/library/scala/collection/mutable/Builder.scala
index 44cc1c8..5c0681d 100644
--- a/src/library/scala/collection/mutable/Builder.scala
+++ b/src/library/scala/collection/mutable/Builder.scala
@@ -1,13 +1,14 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-package scala.collection
+package scala
+package collection
package mutable
import generic._
@@ -62,9 +63,27 @@ trait Builder[-Elem, +To] extends Growable[Elem] {
* wrong, i.e. a different number of elements is added.
*
* @param coll the collection which serves as a hint for the result's size.
+ */
+ def sizeHint(coll: TraversableLike[_, _]) {
+ if (coll.isInstanceOf[collection.IndexedSeqLike[_,_]]) {
+ sizeHint(coll.size)
+ }
+ }
+
+ /** Gives a hint that one expects the `result` of this builder
+ * to have the same size as the given collection, plus some delta. This will
+ * provide a hint only if the collection is known to have a cheap
+ * `size` method. Currently this is assumed to be the case if and only if
+ * the collection is of type `IndexedSeqLike`.
+ * Some builder classes
+ * will optimize their representation based on the hint. However,
+ * builder implementations are still required to work correctly even if the hint is
+ * wrong, i.e. a different number of elements is added.
+ *
+ * @param coll the collection which serves as a hint for the result's size.
* @param delta a correction to add to the `coll.size` to produce the size hint.
*/
- def sizeHint(coll: TraversableLike[_, _], delta: Int = 0) {
+ def sizeHint(coll: TraversableLike[_, _], delta: Int) {
if (coll.isInstanceOf[collection.IndexedSeqLike[_,_]]) {
sizeHint(coll.size + delta)
}
@@ -100,6 +119,8 @@ trait Builder[-Elem, +To] extends Growable[Elem] {
def +=(x: Elem): this.type = { self += x; this }
def clear() = self.clear()
override def ++=(xs: TraversableOnce[Elem]): this.type = { self ++= xs; this }
+ override def sizeHint(size: Int) = self.sizeHint(size)
+ override def sizeHintBounded(size: Int, boundColl: TraversableLike[_, _]) = self.sizeHintBounded(size, boundColl)
def result: NewTo = f(self.result)
}
}
diff --git a/src/library/scala/collection/mutable/Cloneable.scala b/src/library/scala/collection/mutable/Cloneable.scala
index e6fbce4..dadcd36 100644
--- a/src/library/scala/collection/mutable/Cloneable.scala
+++ b/src/library/scala/collection/mutable/Cloneable.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -17,9 +17,6 @@ package mutable
*
* @tparam A Type of the elements contained in the collection, covariant and with reference types as upperbound.
*/
- at cloneable
-trait Cloneable[+A <: AnyRef] {
- // !!! why doesn't this extend java.lang.Cloneable?
- // because neither did @serializable, then we changed it to Serializable
- override def clone: A = super.clone().asInstanceOf[A]
+trait Cloneable[+A <: AnyRef] extends scala.Cloneable {
+ override def clone(): A = super.clone().asInstanceOf[A]
}
diff --git a/src/library/scala/collection/mutable/ConcurrentMap.scala b/src/library/scala/collection/mutable/ConcurrentMap.scala
index 2ba3148..5b5d738 100644
--- a/src/library/scala/collection/mutable/ConcurrentMap.scala
+++ b/src/library/scala/collection/mutable/ConcurrentMap.scala
@@ -1,7 +1,14 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2010-2013, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
package scala.collection
package mutable
-
/** A template trait for mutable maps that allow concurrent access.
*
* $concurrentmapinfo
@@ -13,56 +20,65 @@ package mutable
* @tparam A the key type of the map
* @tparam B the value type of the map
*
- * @define Coll ConcurrentMap
+ * @define Coll `ConcurrentMap`
* @define coll concurrent map
* @define concurrentmapinfo
* This is a base trait for all Scala concurrent map implementations. It
* provides all of the methods a `Map` does, with the difference that all the
* changes are atomic. It also describes methods specific to concurrent maps.
- * Note: The concurrent maps do not accept `null` for keys or values.
+ *
+ * '''Note''': The concurrent maps do not accept `'''null'''` for keys or values.
*
* @define atomicop
* This is an atomic operation.
*/
+ at deprecated("Use `scala.collection.concurrent.Map` instead.", "2.10.0")
trait ConcurrentMap[A, B] extends Map[A, B] {
/**
- * Associates the given key with a given value, unless the key was already associated with some other value.
+ * Associates the given key with a given value, unless the key was already
+ * associated with some other value.
*
* $atomicop
*
* @param k key with which the specified value is to be associated with
* @param v value to be associated with the specified key
- * @return `Some(oldvalue)` if there was a value `oldvalue` previously associated with the
- * specified key, or `None` if there was no mapping for the specified key
+ * @return `Some(oldvalue)` if there was a value `oldvalue` previously
+ * associated with the specified key, or `None` if there was no
+ * mapping for the specified key
*/
def putIfAbsent(k: A, v: B): Option[B]
/**
- * Removes the entry for the specified key if its currently mapped to the specified value.
+ * Removes the entry for the specified key if its currently mapped to the
+ * specified value.
*
* $atomicop
*
* @param k key for which the entry should be removed
- * @param v value expected to be associated with the specified key if the removal is to take place
+ * @param v value expected to be associated with the specified key if
+ * the removal is to take place
* @return `true` if the removal took place, `false` otherwise
*/
def remove(k: A, v: B): Boolean
/**
- * Replaces the entry for the given key only if it was previously mapped to a given value.
+ * Replaces the entry for the given key only if it was previously mapped to
+ * a given value.
*
* $atomicop
*
* @param k key for which the entry should be replaced
- * @param oldvalue value expected to be associated with the specified key if replacing is to happen
+ * @param oldvalue value expected to be associated with the specified key
+ * if replacing is to happen
* @param newvalue value to be associated with the specified key
* @return `true` if the entry was replaced, `false` otherwise
*/
def replace(k: A, oldvalue: B, newvalue: B): Boolean
/**
- * Replaces the entry for the given key only if it was previously mapped to some value.
+ * Replaces the entry for the given key only if it was previously mapped
+ * to some value.
*
* $atomicop
*
diff --git a/src/library/scala/collection/mutable/DefaultEntry.scala b/src/library/scala/collection/mutable/DefaultEntry.scala
index 66ca375..f14cb4a 100644
--- a/src/library/scala/collection/mutable/DefaultEntry.scala
+++ b/src/library/scala/collection/mutable/DefaultEntry.scala
@@ -1,18 +1,14 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-
-
package scala.collection
package mutable
-
-
/** Class used internally for default map model.
* @since 2.3
*/
diff --git a/src/library/scala/collection/mutable/DefaultMapModel.scala b/src/library/scala/collection/mutable/DefaultMapModel.scala
index 45a9d87..903f117 100644
--- a/src/library/scala/collection/mutable/DefaultMapModel.scala
+++ b/src/library/scala/collection/mutable/DefaultMapModel.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://www.scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/library/scala/collection/mutable/DoubleLinkedList.scala b/src/library/scala/collection/mutable/DoubleLinkedList.scala
index 58758a3..18a1e23 100644
--- a/src/library/scala/collection/mutable/DoubleLinkedList.scala
+++ b/src/library/scala/collection/mutable/DoubleLinkedList.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -26,7 +26,7 @@ import generic._
*
* @tparam A the type of the elements contained in this double linked list.
*
- * @define Coll DoubleLinkedList
+ * @define Coll `DoubleLinkedList`
* @define coll double linked list
* @define thatinfo the class of the returned collection. In the standard library configuration,
* `That` is always `DoubleLinkedList[B]` because an implicit of type `CanBuildFrom[DoubleLinkedList, B, DoubleLinkedList[B]]`
@@ -41,7 +41,8 @@ import generic._
* @define willNotTerminateInf
*/
@SerialVersionUID(-8144992287952814767L)
-class DoubleLinkedList[A]() extends LinearSeq[A]
+class DoubleLinkedList[A]() extends AbstractSeq[A]
+ with LinearSeq[A]
with GenericTraversableTemplate[A, DoubleLinkedList]
with DoubleLinkedListLike[A, DoubleLinkedList[A]]
with Serializable {
@@ -62,15 +63,22 @@ class DoubleLinkedList[A]() extends LinearSeq[A]
}
override def companion: GenericCompanion[DoubleLinkedList] = DoubleLinkedList
+
+ // Accurately clone this collection. See SI-6296
+ override def clone(): DoubleLinkedList[A] = {
+ val builder = newBuilder
+ builder ++= this
+ builder.result
+ }
}
/** $factoryInfo
* @define coll double linked list
- * @define Coll DoubleLinkedList
+ * @define Coll `DoubleLinkedList`
*/
object DoubleLinkedList extends SeqFactory[DoubleLinkedList] {
/** $genericCanBuildFromInfo */
- implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, DoubleLinkedList[A]] = new GenericCanBuildFrom[A]
+ implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, DoubleLinkedList[A]] = ReusableCBF.asInstanceOf[GenericCanBuildFrom[A]]
def newBuilder[A]: Builder[A, DoubleLinkedList[A]] =
new Builder[A, DoubleLinkedList[A]] {
diff --git a/src/library/scala/collection/mutable/DoubleLinkedListLike.scala b/src/library/scala/collection/mutable/DoubleLinkedListLike.scala
index dfb70be..3f223f3 100644
--- a/src/library/scala/collection/mutable/DoubleLinkedListLike.scala
+++ b/src/library/scala/collection/mutable/DoubleLinkedListLike.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -11,7 +11,7 @@
package scala.collection
package mutable
-import annotation.migration
+import scala.annotation.migration
/** This extensible class may be used as a basis for implementing double
* linked lists. Type variable `A` refers to the element type
@@ -52,7 +52,7 @@ import annotation.migration
* @tparam A type of the elements contained in the double linked list
* @tparam This the type of the actual linked list holding the elements
*
- * @define Coll DoubleLinkedList
+ * @define Coll `DoubleLinkedList`
* @define coll double linked list
*/
trait DoubleLinkedListLike[A, This <: Seq[A] with DoubleLinkedListLike[A, This]] extends SeqLike[A, This] with LinkedListLike[A, This] { self =>
diff --git a/src/library/scala/collection/mutable/FlatHashTable.scala b/src/library/scala/collection/mutable/FlatHashTable.scala
index 3faa26b..91e95e0 100644
--- a/src/library/scala/collection/mutable/FlatHashTable.scala
+++ b/src/library/scala/collection/mutable/FlatHashTable.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -24,7 +24,7 @@ package mutable
trait FlatHashTable[A] extends FlatHashTable.HashUtils[A] {
import FlatHashTable._
- private final val tableDebug = false
+ private final def tableDebug = false
@transient private[collection] var _loadFactor = defaultLoadFactor
@@ -44,10 +44,22 @@ trait FlatHashTable[A] extends FlatHashTable.HashUtils[A] {
*/
@transient protected var sizemap: Array[Int] = null
+ @transient protected var seedvalue: Int = tableSizeSeed
+
import HashTable.powerOfTwo
+
protected def capacity(expectedSize: Int) = if (expectedSize == 0) 1 else powerOfTwo(expectedSize)
+
+ /** The initial size of the hash table.
+ */
+ def initialSize: Int = 32
+
private def initialCapacity = capacity(initialSize)
+ protected def randomSeed = seedGenerator.get.nextInt()
+
+ protected def tableSizeSeed = Integer.bitCount(table.length - 1)
+
/**
* Initializes the collection from the input stream. `f` will be called for each element
* read from the input stream in the order determined by the stream. This is useful for
@@ -58,22 +70,24 @@ trait FlatHashTable[A] extends FlatHashTable.HashUtils[A] {
private[collection] def init(in: java.io.ObjectInputStream, f: A => Unit) {
in.defaultReadObject
- _loadFactor = in.readInt
+ _loadFactor = in.readInt()
assert(_loadFactor > 0)
- val size = in.readInt
+ val size = in.readInt()
tableSize = 0
assert(size >= 0)
table = new Array(capacity(sizeForThreshold(size, _loadFactor)))
threshold = newThreshold(_loadFactor, table.size)
- val smDefined = in.readBoolean
+ seedvalue = in.readInt()
+
+ val smDefined = in.readBoolean()
if (smDefined) sizeMapInit(table.length) else sizemap = null
var index = 0
while (index < size) {
- val elem = in.readObject.asInstanceOf[A]
+ val elem = in.readObject().asInstanceOf[A]
f(elem)
addEntry(elem)
index += 1
@@ -89,42 +103,43 @@ trait FlatHashTable[A] extends FlatHashTable.HashUtils[A] {
out.defaultWriteObject
out.writeInt(_loadFactor)
out.writeInt(tableSize)
+ out.writeInt(seedvalue)
out.writeBoolean(isSizeMapDefined)
iterator.foreach(out.writeObject)
}
/** Finds an entry in the hash table if such an element exists. */
- def findEntry(elem: A): Option[A] = {
- var h = index(elemHashCode(elem))
- var entry = table(h)
- while (null != entry && entry != elem) {
- h = (h + 1) % table.length
- entry = table(h)
- }
+ protected def findEntry(elem: A): Option[A] = {
+ val entry = findEntryImpl(elem)
if (null == entry) None else Some(entry.asInstanceOf[A])
}
/** Checks whether an element is contained in the hash table. */
- def containsEntry(elem: A): Boolean = {
+ protected def containsEntry(elem: A): Boolean = {
+ null != findEntryImpl(elem)
+ }
+
+ private def findEntryImpl(elem: A): AnyRef = {
var h = index(elemHashCode(elem))
var entry = table(h)
while (null != entry && entry != elem) {
h = (h + 1) % table.length
entry = table(h)
}
- null != entry
+ entry
}
/** Add entry if not yet in table.
* @return Returns `true` if a new entry was added, `false` otherwise.
*/
- def addEntry(elem: A) : Boolean = {
+ protected def addEntry(elem: A) : Boolean = {
var h = index(elemHashCode(elem))
var entry = table(h)
while (null != entry) {
if (entry == elem) return false
h = (h + 1) % table.length
entry = table(h)
+ //Statistics.collisions += 1
}
table(h) = elem.asInstanceOf[AnyRef]
tableSize = tableSize + 1
@@ -134,7 +149,7 @@ trait FlatHashTable[A] extends FlatHashTable.HashUtils[A] {
}
/** Removes an entry from the hash table, returning an option value with the element, or `None` if it didn't exist. */
- def removeEntry(elem: A) : Option[A] = {
+ protected def removeEntry(elem: A) : Option[A] = {
if (tableDebug) checkConsistent()
def precedes(i: Int, j: Int) = {
val d = table.length >> 1
@@ -169,7 +184,7 @@ trait FlatHashTable[A] extends FlatHashTable.HashUtils[A] {
None
}
- def iterator = new Iterator[A] {
+ protected def iterator: Iterator[A] = new AbstractIterator[A] {
private var i = 0
def hasNext: Boolean = {
while (i < table.length && (null == table(i))) i += 1
@@ -185,6 +200,7 @@ trait FlatHashTable[A] extends FlatHashTable.HashUtils[A] {
table = new Array[AnyRef](table.length * 2)
tableSize = 0
nnSizeMapReset(table.length)
+ seedvalue = tableSizeSeed
threshold = newThreshold(_loadFactor, table.length)
var i = 0
while (i < oldtable.length) {
@@ -267,6 +283,10 @@ trait FlatHashTable[A] extends FlatHashTable.HashUtils[A] {
println(sizemap.mkString("szmap: [", ", ", "]"))
}
+ private[collection] def printContents() {
+ println(table.mkString("[", ", ", "]"))
+ }
+
protected def sizeMapDisable() = sizemap = null
protected def isSizeMapDefined = sizemap ne null
@@ -276,10 +296,24 @@ trait FlatHashTable[A] extends FlatHashTable.HashUtils[A] {
/* End of size map handling code */
protected final def index(hcode: Int) = {
+ // version 1 (no longer used - did not work with parallel hash tables)
// improve(hcode) & (table.length - 1)
- val improved = improve(hcode)
+
+ // version 2 (allows for parallel hash table construction)
+ val improved = improve(hcode, seedvalue)
val ones = table.length - 1
(improved >>> (32 - java.lang.Integer.bitCount(ones))) & ones
+
+ // version 3 (solves SI-5293 in most cases, but such a case would still arise for parallel hash tables)
+ // val hc = improve(hcode)
+ // val bbp = blockbitpos
+ // val ones = table.length - 1
+ // val needed = Integer.bitCount(ones)
+ // val blockbits = ((hc >>> bbp) & 0x1f) << (needed - 5)
+ // val rest = ((hc >>> (bbp + 5)) << bbp) | (((1 << bbp) - 1) & hc)
+ // val restmask = (1 << (needed - 5)) - 1
+ // val improved = blockbits | (rest & restmask)
+ // improved
}
protected def clearTable() {
@@ -294,6 +328,7 @@ trait FlatHashTable[A] extends FlatHashTable.HashUtils[A] {
table,
tableSize,
threshold,
+ seedvalue,
sizemap
)
@@ -303,6 +338,7 @@ trait FlatHashTable[A] extends FlatHashTable.HashUtils[A] {
table = c.table
tableSize = c.tableSize
threshold = c.threshold
+ seedvalue = c.seedvalue
sizemap = c.sizemap
}
if (alwaysInitSizeMap && sizemap == null) sizeMapInitAndRebuild
@@ -311,21 +347,26 @@ trait FlatHashTable[A] extends FlatHashTable.HashUtils[A] {
}
-
private[collection] object FlatHashTable {
- /** The load factor for the hash table; must be < 500 (0.5)
+ /** Creates a specific seed to improve hashcode of a hash table instance
+ * and ensure that iteration order vulnerabilities are not 'felt' in other
+ * hash tables.
+ *
+ * See SI-5293.
*/
- private[collection] def defaultLoadFactor: Int = 450
- private[collection] final def loadFactorDenum = 1000
+ final def seedGenerator = new ThreadLocal[scala.util.Random] {
+ override def initialValue = new scala.util.Random
+ }
- /** The initial size of the hash table.
+ /** The load factor for the hash table; must be < 500 (0.5)
*/
- private[collection] def initialSize: Int = 16
+ def defaultLoadFactor: Int = 450
+ final def loadFactorDenum = 1000
- private[collection] def sizeForThreshold(size: Int, _loadFactor: Int) = (size.toLong * loadFactorDenum / _loadFactor).toInt
+ def sizeForThreshold(size: Int, _loadFactor: Int) = scala.math.max(32, (size.toLong * loadFactorDenum / _loadFactor).toInt)
- private[collection] def newThreshold(_loadFactor: Int, size: Int) = {
+ def newThreshold(_loadFactor: Int, size: Int) = {
val lf = _loadFactor
assert(lf < (loadFactorDenum / 2), "loadFactor too large; must be < 0.5")
(size.toLong * lf / loadFactorDenum ).toInt
@@ -336,6 +377,7 @@ private[collection] object FlatHashTable {
val table: Array[AnyRef],
val tableSize: Int,
val threshold: Int,
+ val seedvalue: Int,
val sizemap: Array[Int]
)
@@ -348,16 +390,22 @@ private[collection] object FlatHashTable {
if (elem == null) throw new IllegalArgumentException("Flat hash tables cannot contain null elements.")
else elem.hashCode()
- protected final def improve(hcode: Int) = {
- // var h: Int = hcode + ~(hcode << 9)
- // h = h ^ (h >>> 14)
- // h = h + (h << 4)
- // h ^ (h >>> 10)
- var i = hcode * 0x9e3775cd
- i = java.lang.Integer.reverseBytes(i)
- i * 0x9e3775cd
+ protected final def improve(hcode: Int, seed: Int) = {
+ //var h: Int = hcode + ~(hcode << 9)
+ //h = h ^ (h >>> 14)
+ //h = h + (h << 4)
+ //h ^ (h >>> 10)
+
+ val improved= scala.util.hashing.byteswap32(hcode)
+
+ // for the remainder, see SI-5293
+ // to ensure that different bits are used for different hash tables, we have to rotate based on the seed
+ val rotation = seed % 32
+ val rotated = (improved >>> rotation) | (improved << (32 - rotation))
+ rotated
}
}
}
+
diff --git a/src/library/scala/collection/mutable/GenIterable.scala.disabled b/src/library/scala/collection/mutable/GenIterable.scala.disabled
index 7fd6a8c..9acfccd 100644
--- a/src/library/scala/collection/mutable/GenIterable.scala.disabled
+++ b/src/library/scala/collection/mutable/GenIterable.scala.disabled
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -30,7 +30,7 @@ trait GenIterable[A] extends GenTraversable[A]
// object GenIterable extends TraversableFactory[GenIterable] {
-// implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, GenIterable[A]] = new GenericCanBuildFrom[A]
+// implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, GenIterable[A]] = ReusableCBF.asInstanceOf[GenericCanBuildFrom[A]]
// def newBuilder[A]: Builder[A, GenIterable[A]] = Iterable.newBuilder
// }
diff --git a/src/library/scala/collection/mutable/GenMap.scala.disabled b/src/library/scala/collection/mutable/GenMap.scala.disabled
index eca63b4..e4fd1da 100644
--- a/src/library/scala/collection/mutable/GenMap.scala.disabled
+++ b/src/library/scala/collection/mutable/GenMap.scala.disabled
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/library/scala/collection/mutable/GenSeq.scala.disabled b/src/library/scala/collection/mutable/GenSeq.scala.disabled
index 376a2ce..ec90472 100644
--- a/src/library/scala/collection/mutable/GenSeq.scala.disabled
+++ b/src/library/scala/collection/mutable/GenSeq.scala.disabled
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -24,7 +24,7 @@ import generic._
*
* The class adds an `update` method to `collection.Seq`.
*
- * @define Coll mutable.Seq
+ * @define Coll `mutable.Seq`
* @define coll mutable sequence
*/
trait GenSeq[A] extends GenIterable[A]
@@ -38,7 +38,7 @@ trait GenSeq[A] extends GenIterable[A]
// object GenSeq extends SeqFactory[GenSeq] {
-// implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, GenSeq[A]] = new GenericCanBuildFrom[A]
+// implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, GenSeq[A]] = ReusableCBF.asInstanceOf[GenericCanBuildFrom[A]]
// def newBuilder[A]: Builder[A, GenSeq[A]] = Seq.newBuilder
// }
diff --git a/src/library/scala/collection/mutable/GenSet.scala.disabled b/src/library/scala/collection/mutable/GenSet.scala.disabled
index 7416577..dec20e2 100644
--- a/src/library/scala/collection/mutable/GenSet.scala.disabled
+++ b/src/library/scala/collection/mutable/GenSet.scala.disabled
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -24,7 +24,7 @@ import generic._
*
* @since 1.0
* @author Matthias Zenger
- * @define Coll mutable.Set
+ * @define Coll `mutable.Set`
* @define coll mutable set
*/
trait GenSet[A] extends GenIterable[A]
@@ -39,7 +39,7 @@ trait GenSet[A] extends GenIterable[A]
// object GenSet extends TraversableFactory[GenSet] {
-// implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, GenSet[A]] = new GenericCanBuildFrom[A]
+// implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, GenSet[A]] = ReusableCBF.asInstanceOf[GenericCanBuildFrom[A]]
// def newBuilder[A]: Builder[A, GenSet[A]] = Set.newBuilder
// }
diff --git a/src/library/scala/collection/mutable/GenTraversable.scala.disabled b/src/library/scala/collection/mutable/GenTraversable.scala.disabled
index 1ad9bfa..2453e2c 100644
--- a/src/library/scala/collection/mutable/GenTraversable.scala.disabled
+++ b/src/library/scala/collection/mutable/GenTraversable.scala.disabled
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -32,7 +32,7 @@ trait GenTraversable[A] extends scala.collection.GenTraversable[A]
}
// object GenTraversable extends TraversableFactory[GenTraversable] {
-// implicit def canBuildFrom[A] = new GenericCanBuildFrom[A]
+// implicit def canBuildFrom[A] = ReusableCBF.asInstanceOf[GenericCanBuildFrom[A]]
// def newBuilder[A] = Traversable.newBuilder
// }
diff --git a/src/library/scala/collection/mutable/GrowingBuilder.scala b/src/library/scala/collection/mutable/GrowingBuilder.scala
index 0b73851..ba7ea60 100644
--- a/src/library/scala/collection/mutable/GrowingBuilder.scala
+++ b/src/library/scala/collection/mutable/GrowingBuilder.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -18,7 +18,7 @@ import generic._
* @version 2.8
* @since 2.8
*
- * @define Coll GrowingBuilder
+ * @define Coll `GrowingBuilder`
* @define coll growing builder
*/
class GrowingBuilder[Elem, To <: Growable[Elem]](empty: To) extends Builder[Elem, To] {
diff --git a/src/library/scala/collection/mutable/HashEntry.scala b/src/library/scala/collection/mutable/HashEntry.scala
index 7d3fa69..5cd976e 100644
--- a/src/library/scala/collection/mutable/HashEntry.scala
+++ b/src/library/scala/collection/mutable/HashEntry.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://www.scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/library/scala/collection/mutable/HashMap.scala b/src/library/scala/collection/mutable/HashMap.scala
index 61152fd..3cd7f07 100644
--- a/src/library/scala/collection/mutable/HashMap.scala
+++ b/src/library/scala/collection/mutable/HashMap.scala
@@ -1,23 +1,17 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-
-
package scala.collection
package mutable
import generic._
-
-
import scala.collection.parallel.mutable.ParHashMap
-
-
/** This class implements mutable maps using a hashtable.
*
* @since 1
@@ -27,7 +21,7 @@ import scala.collection.parallel.mutable.ParHashMap
* @tparam A the type of the keys contained in this hash map.
* @tparam B the type of the values assigned to keys in this hash map.
*
- * @define Coll mutable.HashMap
+ * @define Coll `mutable.HashMap`
* @define coll mutable hash map
* @define thatinfo the class of the returned collection. In the standard library configuration,
* `That` is always `HashMap[A, B]` if the elements contained in the resulting collection are
@@ -43,7 +37,8 @@ import scala.collection.parallel.mutable.ParHashMap
*/
@SerialVersionUID(1L)
class HashMap[A, B] private[collection] (contents: HashTable.Contents[A, DefaultEntry[A, B]])
-extends Map[A, B]
+extends AbstractMap[A, B]
+ with Map[A, B]
with MapLike[A, B, HashMap[A, B]]
with HashTable[A, DefaultEntry[A, B]]
with CustomParallelizable[(A, B), ParHashMap[A, B]]
@@ -54,22 +49,31 @@ extends Map[A, B]
type Entry = DefaultEntry[A, B]
override def empty: HashMap[A, B] = HashMap.empty[A, B]
- override def clear() = clearTable()
+ override def clear() { clearTable() }
override def size: Int = tableSize
def this() = this(null)
override def par = new ParHashMap[A, B](hashTableContents)
+ // contains and apply overridden to avoid option allocations.
+ override def contains(key: A): Boolean = findEntry(key) != null
+
+ override def apply(key: A): B = {
+ val result = findEntry(key)
+ if (result eq null) default(key)
+ else result.value
+ }
+
def get(key: A): Option[B] = {
val e = findEntry(key)
- if (e == null) None
+ if (e eq null) None
else Some(e.value)
}
override def put(key: A, value: B): Option[B] = {
- val e = findEntry(key)
- if (e == null) { addEntry(new Entry(key, value)); None }
+ val e = findOrAddEntry(key, value)
+ if (e eq null) None
else { val v = e.value; e.value = value; Some(v) }
}
@@ -82,9 +86,8 @@ extends Map[A, B]
}
def += (kv: (A, B)): this.type = {
- val e = findEntry(kv._1)
- if (e == null) addEntry(new Entry(kv._1, kv._2))
- else e.value = kv._2
+ val e = findOrAddEntry(kv._1, kv._2)
+ if (e ne null) e.value = kv._2
this
}
@@ -95,27 +98,27 @@ extends Map[A, B]
override def foreach[C](f: ((A, B)) => C): Unit = foreachEntry(e => f(e.key, e.value))
/* Override to avoid tuple allocation in foreach */
- override def keySet: collection.Set[A] = new DefaultKeySet {
+ override def keySet: scala.collection.Set[A] = new DefaultKeySet {
override def foreach[C](f: A => C) = foreachEntry(e => f(e.key))
}
/* Override to avoid tuple allocation in foreach */
- override def values: collection.Iterable[B] = new DefaultValuesIterable {
+ override def values: scala.collection.Iterable[B] = new DefaultValuesIterable {
override def foreach[C](f: B => C) = foreachEntry(e => f(e.value))
}
/* Override to avoid tuple allocation */
- override def keysIterator: Iterator[A] = new Iterator[A] {
- val iter = entriesIterator
+ override def keysIterator: Iterator[A] = new AbstractIterator[A] {
+ val iter = entriesIterator
def hasNext = iter.hasNext
- def next() = iter.next.key
+ def next() = iter.next.key
}
/* Override to avoid tuple allocation */
- override def valuesIterator: Iterator[B] = new Iterator[B] {
- val iter = entriesIterator
+ override def valuesIterator: Iterator[B] = new AbstractIterator[B] {
+ val iter = entriesIterator
def hasNext = iter.hasNext
- def next() = iter.next.value
+ def next() = iter.next.value
}
/** Toggles whether a size map is used to track hash map statistics.
@@ -124,18 +127,25 @@ extends Map[A, B]
if (!isSizeMapDefined) sizeMapInitAndRebuild
} else sizeMapDisable
+ protected def createNewEntry[B1](key: A, value: B1): Entry = {
+ new Entry(key, value.asInstanceOf[B])
+ }
+
private def writeObject(out: java.io.ObjectOutputStream) {
- serializeTo(out, _.value)
+ serializeTo(out, { entry =>
+ out.writeObject(entry.key)
+ out.writeObject(entry.value)
+ })
}
private def readObject(in: java.io.ObjectInputStream) {
- init[B](in, new Entry(_, _))
+ init(in, createNewEntry(in.readObject().asInstanceOf[A], in.readObject()))
}
}
/** $factoryInfo
- * @define Coll mutable.HashMap
+ * @define Coll `mutable.HashMap`
* @define coll mutable hash map
*/
object HashMap extends MutableMapFactory[HashMap] {
diff --git a/src/library/scala/collection/mutable/HashSet.scala b/src/library/scala/collection/mutable/HashSet.scala
index 1fa8d5c..c60e363 100644
--- a/src/library/scala/collection/mutable/HashSet.scala
+++ b/src/library/scala/collection/mutable/HashSet.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -12,7 +12,7 @@ package scala.collection
package mutable
import generic._
-import collection.parallel.mutable.ParHashSet
+import scala.collection.parallel.mutable.ParHashSet
/** This class implements mutable sets using a hashtable.
*
@@ -25,7 +25,7 @@ import collection.parallel.mutable.ParHashSet
* @see [[http://docs.scala-lang.org/overviews/collections/concrete-mutable-collection-classes.html#hash_tables "Scala's Collection Library overview"]]
* section on `Hash Tables` for more information.
*
- * @define Coll mutable.HashSet
+ * @define Coll `mutable.HashSet`
* @define coll mutable hash set
* @define thatinfo the class of the returned collection. In the standard library configuration,
* `That` is always `HashSet[B]` because an implicit of type `CanBuildFrom[HashSet, B, HashSet[B]]`
@@ -39,7 +39,8 @@ import collection.parallel.mutable.ParHashSet
*/
@SerialVersionUID(1L)
class HashSet[A] private[collection] (contents: FlatHashTable.Contents[A])
-extends Set[A]
+extends AbstractSet[A]
+ with Set[A]
with GenericSetTemplate[A, HashSet]
with SetLike[A, HashSet[A]]
with FlatHashTable[A]
@@ -52,7 +53,7 @@ extends Set[A]
override def companion: GenericCompanion[HashSet] = HashSet
- override def size = tableSize
+ override def size: Int = tableSize
def contains(elem: A): Boolean = containsEntry(elem)
@@ -66,7 +67,9 @@ extends Set[A]
override def remove(elem: A): Boolean = removeEntry(elem).isDefined
- override def clear() = clearTable()
+ override def clear() { clearTable() }
+
+ override def iterator: Iterator[A] = super[FlatHashTable].iterator
override def foreach[U](f: A => U) {
var i = 0
@@ -85,7 +88,7 @@ extends Set[A]
}
private def readObject(in: java.io.ObjectInputStream) {
- init(in, x => x)
+ init(in, x => ())
}
/** Toggles whether a size map is used to track hash map statistics.
@@ -97,7 +100,7 @@ extends Set[A]
}
/** $factoryInfo
- * @define Coll mutable.HashSet
+ * @define Coll `mutable.HashSet`
* @define coll mutable hash set
*/
object HashSet extends MutableSetFactory[HashSet] {
diff --git a/src/library/scala/collection/mutable/HashTable.scala b/src/library/scala/collection/mutable/HashTable.scala
index 0f6fde0..8fef1be 100644
--- a/src/library/scala/collection/mutable/HashTable.scala
+++ b/src/library/scala/collection/mutable/HashTable.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://www.scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -32,6 +32,9 @@ package mutable
* @tparam A type of the elements contained in this hash table.
*/
trait HashTable[A, Entry >: Null <: HashEntry[A, Entry]] extends HashTable.HashUtils[A] {
+ // Replacing Entry type parameter by abstract type member here allows to not expose to public
+ // implementation-specific entry classes such as `DefaultEntry` or `LinkedEntry`.
+ // However, I'm afraid it's too late now for such breaking change.
import HashTable._
@transient protected var _loadFactor = defaultLoadFactor
@@ -52,24 +55,45 @@ trait HashTable[A, Entry >: Null <: HashEntry[A, Entry]] extends HashTable.HashU
*/
@transient protected var sizemap: Array[Int] = null
- protected def initialSize: Int = HashTable.initialSize
+ @transient protected var seedvalue: Int = tableSizeSeed
+
+ protected def tableSizeSeed = Integer.bitCount(table.length - 1)
+
+ /** The initial size of the hash table.
+ */
+ protected def initialSize: Int = 16
+
+ /** The initial threshold.
+ */
+ private def initialThreshold(_loadFactor: Int): Int = newThreshold(_loadFactor, initialCapacity)
+
+ private def initialCapacity = capacity(initialSize)
+
+ private def lastPopulatedIndex = {
+ var idx = table.length - 1
+ while (table(idx) == null && idx > 0)
+ idx -= 1
+
+ idx
+ }
/**
- * Initializes the collection from the input stream. `f` will be called for each key/value pair
- * read from the input stream in the order determined by the stream. This is useful for
- * structures where iteration order is important (e.g. LinkedHashMap).
+ * Initializes the collection from the input stream. `readEntry` will be called for each
+ * entry to be read from the input stream.
*/
- private[collection] def init[B](in: java.io.ObjectInputStream, f: (A, B) => Entry) {
+ private[collection] def init(in: java.io.ObjectInputStream, readEntry: => Entry) {
in.defaultReadObject
- _loadFactor = in.readInt
+ _loadFactor = in.readInt()
assert(_loadFactor > 0)
- val size = in.readInt
+ val size = in.readInt()
tableSize = 0
assert(size >= 0)
- val smDefined = in.readBoolean
+ seedvalue = in.readInt()
+
+ val smDefined = in.readBoolean()
table = new Array(capacity(sizeForThreshold(_loadFactor, size)))
threshold = newThreshold(_loadFactor, table.size)
@@ -78,34 +102,34 @@ trait HashTable[A, Entry >: Null <: HashEntry[A, Entry]] extends HashTable.HashU
var index = 0
while (index < size) {
- addEntry(f(in.readObject.asInstanceOf[A], in.readObject.asInstanceOf[B]))
+ addEntry(readEntry)
index += 1
}
}
/**
* Serializes the collection to the output stream by saving the load factor, collection
- * size, collection keys and collection values. `value` is responsible for providing a value
- * from an entry.
+ * size and collection entries. `writeEntry` is responsible for writing an entry to the stream.
*
- * `foreach` determines the order in which the key/value pairs are saved to the stream. To
+ * `foreachEntry` determines the order in which the key/value pairs are saved to the stream. To
* deserialize, `init` should be used.
*/
- private[collection] def serializeTo[B](out: java.io.ObjectOutputStream, value: Entry => B) {
+ private[collection] def serializeTo(out: java.io.ObjectOutputStream, writeEntry: Entry => Unit) {
out.defaultWriteObject
out.writeInt(_loadFactor)
out.writeInt(tableSize)
+ out.writeInt(seedvalue)
out.writeBoolean(isSizeMapDefined)
- foreachEntry { entry =>
- out.writeObject(entry.key)
- out.writeObject(value(entry))
- }
+
+ foreachEntry(writeEntry)
}
/** Find entry with given key in table, null if not found.
*/
- protected def findEntry(key: A): Entry = {
- val h = index(elemHashCode(key))
+ protected def findEntry(key: A): Entry =
+ findEntry0(key, index(elemHashCode(key)))
+
+ private[this] def findEntry0(key: A, h: Int): Entry = {
var e = table(h).asInstanceOf[Entry]
while (e != null && !elemEquals(e.key, key)) e = e.next
e
@@ -115,7 +139,10 @@ trait HashTable[A, Entry >: Null <: HashEntry[A, Entry]] extends HashTable.HashU
* pre: no entry with same key exists
*/
protected def addEntry(e: Entry) {
- val h = index(elemHashCode(e.key))
+ addEntry0(e, index(elemHashCode(e.key)))
+ }
+
+ private[this] def addEntry0(e: Entry, h: Int) {
e.next = table(h).asInstanceOf[Entry]
table(h) = e
tableSize = tableSize + 1
@@ -124,6 +151,24 @@ trait HashTable[A, Entry >: Null <: HashEntry[A, Entry]] extends HashTable.HashU
resize(2 * table.length)
}
+ /** Find entry with given key in table, or add new one if not found.
+ * May be somewhat faster then `findEntry`/`addEntry` pair as it
+ * computes entry's hash index only once.
+ * Returns entry found in table or null.
+ * New entries are created by calling `createNewEntry` method.
+ */
+ protected def findOrAddEntry[B](key: A, value: B): Entry = {
+ val h = index(elemHashCode(key))
+ val e = findEntry0(key, h)
+ if (e ne null) e else { addEntry0(createNewEntry(key, value), h); null }
+ }
+
+ /** Creates new entry to be immediately inserted into the hashtable.
+ * This method is guaranteed to be called only once and in case that the entry
+ * will be added. In other words, an implementation may be side-effecting.
+ */
+ protected def createNewEntry[B](key: A, value: B): Entry
+
/** Remove entry from table if present.
*/
protected def removeEntry(key: A) : Entry = {
@@ -154,44 +199,39 @@ trait HashTable[A, Entry >: Null <: HashEntry[A, Entry]] extends HashTable.HashU
/** An iterator returning all entries.
*/
- protected def entriesIterator: Iterator[Entry] = new Iterator[Entry] {
+ protected def entriesIterator: Iterator[Entry] = new AbstractIterator[Entry] {
val iterTable = table
- var idx = table.length - 1
- var es = iterTable(idx).asInstanceOf[Entry]
- scan()
+ var idx = lastPopulatedIndex
+ var es = iterTable(idx)
+
def hasNext = es != null
def next() = {
val res = es
es = es.next
- scan()
- res
- }
- def scan() {
while (es == null && idx > 0) {
idx = idx - 1
- es = iterTable(idx).asInstanceOf[Entry]
+ es = iterTable(idx)
}
+ res.asInstanceOf[Entry]
}
}
- /*
- * We should implement this as a primitive operation over the underlying array, but it can
- * cause a behaviour change in edge cases where:
- * - Someone modifies a map during iteration
- * - The insertion point is close to the iteration point.
- *
- * The reason this happens is that the iterator prefetches the following element before
- * returning from next (to simplify the implementation of hasNext) while the natural
- * implementation of foreach does not.
- *
- * It should be mentioned that modifying a map during iteration leads to unpredictable
- * results with either implementation.
- */
- protected final def foreachEntry[C](f: Entry => C) { entriesIterator.foreach(f) }
+ /** Avoid iterator for a 2x faster traversal. */
+ protected def foreachEntry[U](f: Entry => U) {
+ val iterTable = table
+ var idx = lastPopulatedIndex
+ var es = iterTable(idx)
- /** An iterator returning all entries */
- @deprecated("use entriesIterator instead", "2.8.0")
- protected def entries: Iterator[Entry] = entriesIterator
+ while (es != null) {
+ f(es.asInstanceOf[Entry])
+ es = es.next
+
+ while (es == null && idx > 0) {
+ idx -= 1
+ es = iterTable(idx)
+ }
+ }
+ }
/** Remove all entries from table
*/
@@ -311,7 +351,7 @@ trait HashTable[A, Entry >: Null <: HashEntry[A, Entry]] extends HashTable.HashU
// this is of crucial importance when populating the table in parallel
protected final def index(hcode: Int) = {
val ones = table.length - 1
- val improved = improve(hcode)
+ val improved = improve(hcode, seedvalue)
val shifted = (improved >> (32 - java.lang.Integer.bitCount(ones))) & ones
shifted
}
@@ -322,6 +362,7 @@ trait HashTable[A, Entry >: Null <: HashEntry[A, Entry]] extends HashTable.HashU
table = c.table
tableSize = c.tableSize
threshold = c.threshold
+ seedvalue = c.seedvalue
sizemap = c.sizemap
}
if (alwaysInitSizeMap && sizemap == null) sizeMapInitAndRebuild
@@ -332,6 +373,7 @@ trait HashTable[A, Entry >: Null <: HashEntry[A, Entry]] extends HashTable.HashU
table,
tableSize,
threshold,
+ seedvalue,
sizemap
)
}
@@ -342,19 +384,9 @@ private[collection] object HashTable {
private[collection] final def defaultLoadFactor: Int = 750 // corresponds to 75%
private[collection] final def loadFactorDenum = 1000;
- /** The initial size of the hash table.
- */
- private[collection] final def initialSize: Int = 16
-
- /** The initial threshold.
- */
- private[collection] final def initialThreshold(_loadFactor: Int): Int = newThreshold(_loadFactor, initialCapacity)
-
- private[collection] final def initialCapacity = capacity(initialSize)
-
private[collection] final def newThreshold(_loadFactor: Int, size: Int) = ((size.toLong * _loadFactor) / loadFactorDenum).toInt
- private[collection] final def sizeForThreshold(_loadFactor: Int, thr: Int) = thr * loadFactorDenum / _loadFactor
+ private[collection] final def sizeForThreshold(_loadFactor: Int, thr: Int) = ((thr.toLong * loadFactorDenum) / _loadFactor).toInt
private[collection] final def capacity(expectedSize: Int) = if (expectedSize == 0) 1 else powerOfTwo(expectedSize)
@@ -365,7 +397,7 @@ private[collection] object HashTable {
protected def elemHashCode(key: KeyType) = key.##
- protected final def improve(hcode: Int) = {
+ protected final def improve(hcode: Int, seed: Int) = {
/* Murmur hash
* m = 0x5bd1e995
* r = 24
@@ -391,12 +423,7 @@ private[collection] object HashTable {
*
* For performance reasons, we avoid this improvement.
* */
- var i = hcode * 0x9e3775cd
- i = java.lang.Integer.reverseBytes(i)
- i * 0x9e3775cd
- // a slower alternative for byte reversal:
- // i = (i << 16) | (i >> 16)
- // i = ((i >> 8) & 0x00ff00ff) | ((i << 8) & 0xff00ff00)
+ val i= scala.util.hashing.byteswap32(hcode)
/* Jenkins hash
* for range 0-10000, output has the msb set to zero */
@@ -417,6 +444,11 @@ private[collection] object HashTable {
// h = h ^ (h >>> 14)
// h = h + (h << 4)
// h ^ (h >>> 10)
+
+ // the rest of the computation is due to SI-5293
+ val rotation = seed % 32
+ val rotated = (i >>> rotation) | (i << (32 - rotation))
+ rotated
}
}
@@ -439,9 +471,10 @@ private[collection] object HashTable {
val table: Array[HashEntry[A, Entry]],
val tableSize: Int,
val threshold: Int,
+ val seedvalue: Int,
val sizemap: Array[Int]
) {
- import collection.DebugUtils._
+ import scala.collection.DebugUtils._
private[collection] def debugInformation = buildString {
append =>
append("Hash table contents")
@@ -449,6 +482,7 @@ private[collection] object HashTable {
append("Table: [" + arrayString(table, 0, table.length) + "]")
append("Table size: " + tableSize)
append("Load factor: " + loadFactor)
+ append("Seedvalue: " + seedvalue)
append("Threshold: " + threshold)
append("Sizemap: [" + arrayString(sizemap, 0, sizemap.length) + "]")
}
diff --git a/src/library/scala/collection/mutable/History.scala b/src/library/scala/collection/mutable/History.scala
index bc61a61..c1d94a9 100644
--- a/src/library/scala/collection/mutable/History.scala
+++ b/src/library/scala/collection/mutable/History.scala
@@ -25,7 +25,11 @@ package mutable
* @tparam Pub Type of publishers.
*/
@SerialVersionUID(5219213543849892588L)
-class History[Evt, Pub] extends Subscriber[Evt, Pub] with Iterable[(Pub, Evt)] with Serializable
+class History[Evt, Pub]
+extends AbstractIterable[(Pub, Evt)]
+ with Subscriber[Evt, Pub]
+ with Iterable[(Pub, Evt)]
+ with Serializable
{
protected val log: Queue[(Pub, Evt)] = new Queue
val maxHistory: Int = 1000
diff --git a/src/library/scala/collection/mutable/ImmutableMapAdaptor.scala b/src/library/scala/collection/mutable/ImmutableMapAdaptor.scala
index 876cac3..755eea8 100644
--- a/src/library/scala/collection/mutable/ImmutableMapAdaptor.scala
+++ b/src/library/scala/collection/mutable/ImmutableMapAdaptor.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -11,7 +11,7 @@
package scala.collection
package mutable
-import annotation.migration
+import scala.annotation.migration
/** This class can be used as an adaptor to create mutable maps from
* immutable map implementations. Only method `empty` has
@@ -25,7 +25,9 @@ import annotation.migration
* @since 1
*/
class ImmutableMapAdaptor[A, B](protected var imap: immutable.Map[A, B])
-extends Map[A, B] with Serializable
+extends AbstractMap[A, B]
+ with Map[A, B]
+ with Serializable
{
override def size: Int = imap.size
@@ -40,23 +42,20 @@ extends Map[A, B] with Serializable
override def isDefinedAt(key: A) = imap.isDefinedAt(key)
- override def keySet: collection.Set[A] = imap.keySet
+ override def keySet: scala.collection.Set[A] = imap.keySet
override def keysIterator: Iterator[A] = imap.keysIterator
@migration("`keys` returns Iterable[A] rather than Iterator[A].", "2.8.0")
- override def keys: collection.Iterable[A] = imap.keys
+ override def keys: scala.collection.Iterable[A] = imap.keys
override def valuesIterator: Iterator[B] = imap.valuesIterator
@migration("`values` returns Iterable[B] rather than Iterator[B].", "2.8.0")
- override def values: collection.Iterable[B] = imap.values
+ override def values: scala.collection.Iterable[B] = imap.values
def iterator: Iterator[(A, B)] = imap.iterator
- @deprecated("use `iterator' instead", "2.8.0")
- override def elements = iterator
-
override def toList: List[(A, B)] = imap.toList
override def update(key: A, value: B): Unit = { imap = imap.updated(key, value) }
diff --git a/src/library/scala/collection/mutable/ImmutableSetAdaptor.scala b/src/library/scala/collection/mutable/ImmutableSetAdaptor.scala
index 0975b33..42c757d 100644
--- a/src/library/scala/collection/mutable/ImmutableSetAdaptor.scala
+++ b/src/library/scala/collection/mutable/ImmutableSetAdaptor.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -22,7 +22,10 @@ package mutable
* @version 1.0, 21/07/2003
* @since 1
*/
-class ImmutableSetAdaptor[A](protected var set: immutable.Set[A]) extends Set[A] with Serializable {
+class ImmutableSetAdaptor[A](protected var set: immutable.Set[A])
+extends AbstractSet[A]
+ with Set[A]
+ with Serializable {
override def size: Int = set.size
@@ -40,9 +43,6 @@ class ImmutableSetAdaptor[A](protected var set: immutable.Set[A]) extends Set[A]
def iterator: Iterator[A] = set.iterator
- @deprecated("use `iterator' instead", "2.8.0")
- override def elements: Iterator[A] = iterator
-
def +=(elem: A): this.type = { set = set + elem; this }
def -=(elem: A): this.type = { set = set - elem; this }
diff --git a/src/library/scala/collection/mutable/IndexedSeq.scala b/src/library/scala/collection/mutable/IndexedSeq.scala
index 1d7b542..4d094e6 100644
--- a/src/library/scala/collection/mutable/IndexedSeq.scala
+++ b/src/library/scala/collection/mutable/IndexedSeq.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -23,14 +23,15 @@ trait IndexedSeq[A] extends Seq[A]
with GenericTraversableTemplate[A, IndexedSeq]
with IndexedSeqLike[A, IndexedSeq[A]] {
override def companion: GenericCompanion[IndexedSeq] = IndexedSeq
+ override def seq: IndexedSeq[A] = this
}
/** $factoryInfo
* The current default implementation of a $Coll is an `ArrayBuffer`.
* @define coll mutable indexed sequence
- * @define Coll mutable.IndexedSeq
+ * @define Coll `mutable.IndexedSeq`
*/
object IndexedSeq extends SeqFactory[IndexedSeq] {
- implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, IndexedSeq[A]] = new GenericCanBuildFrom[A]
+ implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, IndexedSeq[A]] = ReusableCBF.asInstanceOf[GenericCanBuildFrom[A]]
def newBuilder[A]: Builder[A, IndexedSeq[A]] = new ArrayBuffer[A]
}
diff --git a/src/library/scala/collection/mutable/IndexedSeqLike.scala b/src/library/scala/collection/mutable/IndexedSeqLike.scala
index 0c1df17..f0c31ec 100644
--- a/src/library/scala/collection/mutable/IndexedSeqLike.scala
+++ b/src/library/scala/collection/mutable/IndexedSeqLike.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -27,7 +27,7 @@ import generic._
* @tparam A the element type of the $coll
* @tparam Repr the type of the actual $coll containing the elements.
*
- * @define Coll IndexedSeq
+ * @define Coll `IndexedSeq`
* @define coll mutable indexed sequence
* @define indexedSeqInfo
* @author Martin Odersky
@@ -36,14 +36,14 @@ import generic._
* @define willNotTerminateInf
* @define mayNotTerminateInf
*/
-trait IndexedSeqLike[A, +Repr] extends scala.collection.IndexedSeqLike[A, Repr] { self =>
+trait IndexedSeqLike[A, +Repr] extends Any with scala.collection.IndexedSeqLike[A, Repr] { self =>
override protected[this] def thisCollection: IndexedSeq[A] = this.asInstanceOf[IndexedSeq[A]]
override protected[this] def toCollection(repr: Repr): IndexedSeq[A] = repr.asInstanceOf[IndexedSeq[A]]
/** Replaces element at given index with a new value.
*
- * @param n the index of the element to replace.
+ * @param idx the index of the element to replace.
* @param elem the new value.
* @throws IndexOutOfBoundsException if the index is not valid.
*/
diff --git a/src/library/scala/collection/mutable/IndexedSeqOptimized.scala b/src/library/scala/collection/mutable/IndexedSeqOptimized.scala
old mode 100644
new mode 100755
index 819d064..cb7e8ef
--- a/src/library/scala/collection/mutable/IndexedSeqOptimized.scala
+++ b/src/library/scala/collection/mutable/IndexedSeqOptimized.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -17,4 +17,4 @@ import generic._
*
* @since 2.8
*/
-trait IndexedSeqOptimized[A, +Repr] extends IndexedSeqLike[A, Repr] with scala.collection.IndexedSeqOptimized[A, Repr]
+trait IndexedSeqOptimized[A, +Repr] extends Any with IndexedSeqLike[A, Repr] with scala.collection.IndexedSeqOptimized[A, Repr]
diff --git a/src/library/scala/collection/mutable/IndexedSeqView.scala b/src/library/scala/collection/mutable/IndexedSeqView.scala
index 6af00d0..cf5166e 100644
--- a/src/library/scala/collection/mutable/IndexedSeqView.scala
+++ b/src/library/scala/collection/mutable/IndexedSeqView.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -14,12 +14,13 @@ package mutable
import generic._
import TraversableView.NoBuilder
+import scala.language.implicitConversions
/** A non-strict view of a mutable `IndexedSeq`.
* $viewInfo
* Some of the operations of this class will yield again a mutable indexed sequence,
* others will just yield a plain indexed sequence of type `collection.IndexedSeq`.
- * Because this is a leaf class there is no associated `Like' class.
+ * Because this is a leaf class there is no associated `Like` class.
* @author Martin Odersky
* @version 2.8
* @since 2.8
@@ -41,6 +42,9 @@ self =>
override def toString = viewToString
}
+ /** Explicit instantiation of the `Transformed` trait to reduce class file size in subclasses. */
+ private[collection] abstract class AbstractTransformed[B] extends super.AbstractTransformed[B] with Transformed[B]
+
// pre: until <= self.length
trait Sliced extends super.Sliced with Transformed[A] {
override def length = endpoints.width
@@ -72,11 +76,11 @@ self =>
/** Boilerplate method, to override in each subclass
* This method could be eliminated if Scala had virtual classes
*/
- protected override def newFiltered(p: A => Boolean): Transformed[A] = new { val pred = p } with Filtered
- protected override def newSliced(_endpoints: SliceInterval): Transformed[A] = new { val endpoints = _endpoints } with Sliced
- protected override def newDroppedWhile(p: A => Boolean): Transformed[A] = new { val pred = p } with DroppedWhile
- protected override def newTakenWhile(p: A => Boolean): Transformed[A] = new { val pred = p } with TakenWhile
- protected override def newReversed: Transformed[A] = new Reversed { }
+ protected override def newFiltered(p: A => Boolean): Transformed[A] = new { val pred = p } with AbstractTransformed[A] with Filtered
+ protected override def newSliced(_endpoints: SliceInterval): Transformed[A] = new { val endpoints = _endpoints } with AbstractTransformed[A] with Sliced
+ protected override def newDroppedWhile(p: A => Boolean): Transformed[A] = new { val pred = p } with AbstractTransformed[A] with DroppedWhile
+ protected override def newTakenWhile(p: A => Boolean): Transformed[A] = new { val pred = p } with AbstractTransformed[A] with TakenWhile
+ protected override def newReversed: Transformed[A] = new AbstractTransformed[A] with Reversed
private implicit def asThis(xs: Transformed[A]): This = xs.asInstanceOf[This]
diff --git a/src/library/scala/collection/mutable/Iterable.scala b/src/library/scala/collection/mutable/Iterable.scala
index 1a5e58b..b79453e 100644
--- a/src/library/scala/collection/mutable/Iterable.scala
+++ b/src/library/scala/collection/mutable/Iterable.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -29,10 +29,12 @@ trait Iterable[A] extends Traversable[A]
/** $factoryInfo
* The current default implementation of a $Coll is an `ArrayBuffer`.
* @define coll mutable iterable collection
- * @define Coll mutable.Iterable
+ * @define Coll `mutable.Iterable`
*/
object Iterable extends TraversableFactory[Iterable] {
- implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, Iterable[A]] = new GenericCanBuildFrom[A]
+ implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, Iterable[A]] = ReusableCBF.asInstanceOf[GenericCanBuildFrom[A]]
def newBuilder[A]: Builder[A, Iterable[A]] = new ArrayBuffer
}
+/** Explicit instantiation of the `Iterable` trait to reduce class file size in subclasses. */
+private[scala] abstract class AbstractIterable[A] extends scala.collection.AbstractIterable[A] with Iterable[A]
diff --git a/src/library/scala/collection/mutable/LazyBuilder.scala b/src/library/scala/collection/mutable/LazyBuilder.scala
index c7c43ae..0b56c86 100644
--- a/src/library/scala/collection/mutable/LazyBuilder.scala
+++ b/src/library/scala/collection/mutable/LazyBuilder.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/library/scala/collection/mutable/LinearSeq.scala b/src/library/scala/collection/mutable/LinearSeq.scala
index 0f84159..f241a2f 100644
--- a/src/library/scala/collection/mutable/LinearSeq.scala
+++ b/src/library/scala/collection/mutable/LinearSeq.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -17,7 +17,7 @@ import generic._
* that can be mutated.
* $linearSeqInfo
*
- * @define Coll LinearSeq
+ * @define Coll `LinearSeq`
* @define coll linear sequence
* @see [[http://docs.scala-lang.org/overviews/collections/concrete-mutable-collection-classes.html#mutable_lists "Scala's Collection Library overview"]]
* section on `Mutable Lists` for more information.
@@ -27,14 +27,15 @@ trait LinearSeq[A] extends Seq[A]
with GenericTraversableTemplate[A, LinearSeq]
with LinearSeqLike[A, LinearSeq[A]] {
override def companion: GenericCompanion[LinearSeq] = LinearSeq
+ override def seq: LinearSeq[A] = this
}
/** $factoryInfo
* The current default implementation of a $Coll is a `MutableList`.
* @define coll mutable linear sequence
- * @define Coll mutable.LinearSeq
+ * @define Coll `mutable.LinearSeq`
*/
object LinearSeq extends SeqFactory[LinearSeq] {
- implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, LinearSeq[A]] = new GenericCanBuildFrom[A]
+ implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, LinearSeq[A]] = ReusableCBF.asInstanceOf[GenericCanBuildFrom[A]]
def newBuilder[A]: Builder[A, LinearSeq[A]] = new MutableList[A]
}
diff --git a/src/library/scala/collection/mutable/LinkedEntry.scala b/src/library/scala/collection/mutable/LinkedEntry.scala
index 9c2f224..e4e2912 100644
--- a/src/library/scala/collection/mutable/LinkedEntry.scala
+++ b/src/library/scala/collection/mutable/LinkedEntry.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/library/scala/collection/mutable/LinkedHashMap.scala b/src/library/scala/collection/mutable/LinkedHashMap.scala
index 31f539c..da2c36a 100644
--- a/src/library/scala/collection/mutable/LinkedHashMap.scala
+++ b/src/library/scala/collection/mutable/LinkedHashMap.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -14,7 +14,7 @@ package mutable
import generic._
/** $factoryInfo
- * @define Coll LinkedHashMap
+ * @define Coll `LinkedHashMap`
* @define coll linked hash map
*/
object LinkedHashMap extends MutableMapFactory[LinkedHashMap] {
@@ -28,7 +28,7 @@ object LinkedHashMap extends MutableMapFactory[LinkedHashMap] {
* @tparam A the type of the keys contained in this hash map.
* @tparam B the type of the values assigned to keys in this hash map.
*
- * @define Coll LinkedHashMap
+ * @define Coll `LinkedHashMap`
* @define coll linked hash map
* @define thatinfo the class of the returned collection. In the standard library configuration,
* `That` is always `LinkedHashMap[A, B]` if the elements contained in the resulting collection are
@@ -45,10 +45,12 @@ object LinkedHashMap extends MutableMapFactory[LinkedHashMap] {
* @define orderDependentFold
*/
@SerialVersionUID(1L)
-class LinkedHashMap[A, B] extends Map[A, B]
+class LinkedHashMap[A, B] extends AbstractMap[A, B]
+ with Map[A, B]
with MapLike[A, B, LinkedHashMap[A, B]]
with HashTable[A, LinkedEntry[A, B]]
- with Serializable {
+ with Serializable
+{
override def empty = LinkedHashMap.empty[A, B]
override def size = tableSize
@@ -65,23 +67,9 @@ class LinkedHashMap[A, B] extends Map[A, B]
}
override def put(key: A, value: B): Option[B] = {
- val e = findEntry(key)
- if (e == null) {
- val e = new Entry(key, value)
- addEntry(e)
- updateLinkedEntries(e)
- None
- } else {
- val v = e.value
- e.value = value
- Some(v)
- }
- }
-
- private def updateLinkedEntries(e: Entry) {
- if (firstEntry == null) firstEntry = e
- else { lastEntry.later = e; e.earlier = lastEntry }
- lastEntry = e
+ val e = findOrAddEntry(key, value)
+ if (e eq null) None
+ else { val v = e.value; e.value = value; Some(v) }
}
override def remove(key: A): Option[B] = {
@@ -99,15 +87,33 @@ class LinkedHashMap[A, B] extends Map[A, B]
def += (kv: (A, B)): this.type = { put(kv._1, kv._2); this }
def -=(key: A): this.type = { remove(key); this }
- def iterator: Iterator[(A, B)] = new Iterator[(A, B)] {
+ def iterator: Iterator[(A, B)] = new AbstractIterator[(A, B)] {
private var cur = firstEntry
def hasNext = cur ne null
def next =
if (hasNext) { val res = (cur.key, cur.value); cur = cur.later; res }
else Iterator.empty.next
}
+
+ protected class FilteredKeys(p: A => Boolean) extends super.FilteredKeys(p) {
+ override def empty = LinkedHashMap.empty
+ }
+
+ override def filterKeys(p: A => Boolean): scala.collection.Map[A, B] = new FilteredKeys(p)
- override def keysIterator: Iterator[A] = new Iterator[A] {
+ protected class MappedValues[C](f: B => C) extends super.MappedValues[C](f) {
+ override def empty = LinkedHashMap.empty
+ }
+
+ override def mapValues[C](f: B => C): scala.collection.Map[A, C] = new MappedValues(f)
+
+ protected class DefaultKeySet extends super.DefaultKeySet {
+ override def empty = LinkedHashSet.empty
+ }
+
+ override def keySet: scala.collection.Set[A] = new DefaultKeySet
+
+ override def keysIterator: Iterator[A] = new AbstractIterator[A] {
private var cur = firstEntry
def hasNext = cur ne null
def next =
@@ -115,7 +121,7 @@ class LinkedHashMap[A, B] extends Map[A, B]
else Iterator.empty.next
}
- override def valuesIterator: Iterator[B] = new Iterator[B] {
+ override def valuesIterator: Iterator[B] = new AbstractIterator[B] {
private var cur = firstEntry
def hasNext = cur ne null
def next =
@@ -123,7 +129,7 @@ class LinkedHashMap[A, B] extends Map[A, B]
else Iterator.empty.next
}
- override def foreach[U](f: ((A, B)) => U) = {
+ override def foreach[U](f: ((A, B)) => U) {
var cur = firstEntry
while (cur ne null) {
f((cur.key, cur.value))
@@ -131,22 +137,37 @@ class LinkedHashMap[A, B] extends Map[A, B]
}
}
+ protected override def foreachEntry[U](f: Entry => U) {
+ var cur = firstEntry
+ while (cur ne null) {
+ f(cur)
+ cur = cur.later
+ }
+ }
+
+ protected def createNewEntry[B1](key: A, value: B1): Entry = {
+ val e = new Entry(key, value.asInstanceOf[B])
+ if (firstEntry eq null) firstEntry = e
+ else { lastEntry.later = e; e.earlier = lastEntry }
+ lastEntry = e
+ e
+ }
+
override def clear() {
clearTable()
firstEntry = null
}
private def writeObject(out: java.io.ObjectOutputStream) {
- serializeTo(out, _.value)
+ serializeTo(out, { entry =>
+ out.writeObject(entry.key)
+ out.writeObject(entry.value)
+ })
}
private def readObject(in: java.io.ObjectInputStream) {
firstEntry = null
lastEntry = null
- init[B](in, { (key, value) =>
- val entry = new Entry(key, value)
- updateLinkedEntries(entry)
- entry
- })
+ init(in, createNewEntry(in.readObject().asInstanceOf[A], in.readObject()))
}
}
diff --git a/src/library/scala/collection/mutable/LinkedHashSet.scala b/src/library/scala/collection/mutable/LinkedHashSet.scala
index 7a1e695..1723258 100644
--- a/src/library/scala/collection/mutable/LinkedHashSet.scala
+++ b/src/library/scala/collection/mutable/LinkedHashSet.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2005-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2005-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -15,16 +15,15 @@ import generic._
/** This class implements mutable sets using a hashtable.
* The iterator and all traversal methods of this class visit elements in the order they were inserted.
*
- * $cannotStoreNull
- *
* @author Matthias Zenger
* @author Martin Odersky
+ * @author Pavel Pavlov
* @version 2.0, 31/12/2006
* @since 1
*
* @tparam A the type of the elements contained in this set.
*
- * @define Coll LinkedHashSet
+ * @define Coll `LinkedHashSet`
* @define coll linked hash set
* @define thatinfo the class of the returned collection. In the standard library configuration,
* `That` is always `LinkedHashSet[B]` because an implicit of type `CanBuildFrom[LinkedHashSet, B, LinkedHashSet[B]]`
@@ -39,58 +38,103 @@ import generic._
* @define orderDependentFold
*/
@SerialVersionUID(1L)
-class LinkedHashSet[A] extends Set[A]
+class LinkedHashSet[A] extends AbstractSet[A]
+ with Set[A]
with GenericSetTemplate[A, LinkedHashSet]
with SetLike[A, LinkedHashSet[A]]
- with FlatHashTable[A]
+ with HashTable[A, LinkedHashSet.Entry[A]]
with Serializable
{
override def companion: GenericCompanion[LinkedHashSet] = LinkedHashSet
- @transient private[this] var ordered = new ListBuffer[A]
+ type Entry = LinkedHashSet.Entry[A]
+
+ @transient protected var firstEntry: Entry = null
+ @transient protected var lastEntry: Entry = null
- override def size = tableSize
+ override def size: Int = tableSize
- def contains(elem: A): Boolean = containsEntry(elem)
+ def contains(elem: A): Boolean = findEntry(elem) ne null
def += (elem: A): this.type = { add(elem); this }
def -= (elem: A): this.type = { remove(elem); this }
- override def add(elem: A): Boolean =
- if (addEntry(elem)) { ordered += elem; true }
- else false
+ override def add(elem: A): Boolean = findOrAddEntry(elem, null) eq null
+
+ override def remove(elem: A): Boolean = {
+ val e = removeEntry(elem)
+ if (e eq null) false
+ else {
+ if (e.earlier eq null) firstEntry = e.later
+ else e.earlier.later = e.later
+ if (e.later eq null) lastEntry = e.earlier
+ else e.later.earlier = e.earlier
+ true
+ }
+ }
- override def remove(elem: A): Boolean =
- removeEntry(elem) match {
- case None => false
- case _ => ordered -= elem; true
+ def iterator: Iterator[A] = new AbstractIterator[A] {
+ private var cur = firstEntry
+ def hasNext = cur ne null
+ def next =
+ if (hasNext) { val res = cur.key; cur = cur.later; res }
+ else Iterator.empty.next
+ }
+
+ override def foreach[U](f: A => U) {
+ var cur = firstEntry
+ while (cur ne null) {
+ f(cur.key)
+ cur = cur.later
}
+ }
- override def clear() {
- ordered.clear()
- clearTable()
+ protected override def foreachEntry[U](f: Entry => U) {
+ var cur = firstEntry
+ while (cur ne null) {
+ f(cur)
+ cur = cur.later
+ }
}
- override def iterator: Iterator[A] = ordered.iterator
+ protected def createNewEntry[B](key: A, dummy: B): Entry = {
+ val e = new Entry(key)
+ if (firstEntry eq null) firstEntry = e
+ else { lastEntry.later = e; e.earlier = lastEntry }
+ lastEntry = e
+ e
+ }
- override def foreach[U](f: A => U) = ordered foreach f
+ override def clear() {
+ clearTable()
+ firstEntry = null
+ }
- private def writeObject(s: java.io.ObjectOutputStream) {
- serializeTo(s)
+ private def writeObject(out: java.io.ObjectOutputStream) {
+ serializeTo(out, { e => out.writeObject(e.key) })
}
private def readObject(in: java.io.ObjectInputStream) {
- ordered = new ListBuffer[A]
- init(in, ordered += )
+ firstEntry = null
+ lastEntry = null
+ init(in, createNewEntry(in.readObject().asInstanceOf[A], null))
}
}
/** $factoryInfo
- * @define Coll LinkedHashSet
+ * @define Coll `LinkedHashSet`
* @define coll linked hash set
*/
object LinkedHashSet extends MutableSetFactory[LinkedHashSet] {
implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, LinkedHashSet[A]] = setCanBuildFrom[A]
override def empty[A]: LinkedHashSet[A] = new LinkedHashSet[A]
+
+ /** Class for the linked hash set entry, used internally.
+ * @since 2.10
+ */
+ private[scala] final class Entry[A](val key: A) extends HashEntry[A, Entry[A]] with Serializable {
+ var earlier: Entry[A] = null
+ var later: Entry[A] = null
+ }
}
diff --git a/src/library/scala/collection/mutable/LinkedList.scala b/src/library/scala/collection/mutable/LinkedList.scala
index 5077df5..29e6fdd 100644
--- a/src/library/scala/collection/mutable/LinkedList.scala
+++ b/src/library/scala/collection/mutable/LinkedList.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -40,7 +40,7 @@ import generic._
*
* @constructor Creates an "empty" list, defined as a single node with no data element and next pointing to itself.
- * @define Coll LinkedList
+ * @define Coll `LinkedList`
* @define coll linked list
* @define thatinfo the class of the returned collection. In the standard library configuration,
* `That` is always `LinkedList[B]` because an implicit of type `CanBuildFrom[LinkedList, B, LinkedList[B]]`
@@ -75,7 +75,8 @@ import generic._
* }}}
*/
@SerialVersionUID(-7308240733518833071L)
-class LinkedList[A]() extends LinearSeq[A]
+class LinkedList[A]() extends AbstractSeq[A]
+ with LinearSeq[A]
with GenericTraversableTemplate[A, LinkedList]
with LinkedListLike[A, LinkedList[A]]
with Serializable {
@@ -108,12 +109,12 @@ class LinkedList[A]() extends LinearSeq[A]
}
/** $factoryInfo
- * @define Coll LinkedList
+ * @define Coll `LinkedList`
* @define coll linked list
*/
object LinkedList extends SeqFactory[LinkedList] {
override def empty[A]: LinkedList[A] = new LinkedList[A]
- implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, LinkedList[A]] = new GenericCanBuildFrom[A]
+ implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, LinkedList[A]] = ReusableCBF.asInstanceOf[GenericCanBuildFrom[A]]
def newBuilder[A]: Builder[A, LinkedList[A]] =
(new MutableList) mapResult ((l: MutableList[A]) => l.toLinkedList)
diff --git a/src/library/scala/collection/mutable/LinkedListLike.scala b/src/library/scala/collection/mutable/LinkedListLike.scala
index 0f977c0..4f63ede 100644
--- a/src/library/scala/collection/mutable/LinkedListLike.scala
+++ b/src/library/scala/collection/mutable/LinkedListLike.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -12,7 +12,7 @@ package scala.collection
package mutable
import generic._
-import annotation.tailrec
+import scala.annotation.tailrec
/** This extensible class may be used as a basis for implementing linked
* list. Type variable `A` refers to the element type of the
@@ -29,7 +29,7 @@ import annotation.tailrec
* @tparam A type of the elements contained in the linked list
* @tparam This the type of the actual linked list holding the elements
*
- * @define Coll LinkedList
+ * @define Coll `LinkedList`
* @define coll linked list
*
* @define singleLinkedListExample
@@ -163,7 +163,7 @@ trait LinkedListLike[A, This <: Seq[A] with LinkedListLike[A, This]] extends Seq
else None
}
- override def iterator: Iterator[A] = new Iterator[A] {
+ override def iterator: Iterator[A] = new AbstractIterator[A] {
var elems = self
def hasNext = elems.nonEmpty
def next = {
@@ -180,4 +180,14 @@ trait LinkedListLike[A, This <: Seq[A] with LinkedListLike[A, This]] extends Seq
these = these.next
}
}
+
+ /** Return a clone of this list.
+ *
+ * @return a `LinkedList` with the same elements.
+ */
+ override def clone(): This = {
+ val bf = newBuilder
+ bf ++= this
+ bf.result
+ }
}
diff --git a/src/library/scala/collection/mutable/ListBuffer.scala b/src/library/scala/collection/mutable/ListBuffer.scala
index b2c3a6c..67af4a6 100644
--- a/src/library/scala/collection/mutable/ListBuffer.scala
+++ b/src/library/scala/collection/mutable/ListBuffer.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -13,6 +13,7 @@ package mutable
import generic._
import immutable.{List, Nil, ::}
+import java.io._
/** A `Buffer` implementation back up by a list. It provides constant time
* prepend and append. Most other operations are linear.
@@ -26,7 +27,7 @@ import immutable.{List, Nil, ::}
*
* @tparam A the type of this list buffer's elements.
*
- * @define Coll ListBuffer
+ * @define Coll `ListBuffer`
* @define coll list buffer
* @define thatinfo the class of the returned collection. In the standard library configuration,
* `That` is always `ListBuffer[B]` because an implicit of type `CanBuildFrom[ListBuffer, B, ListBuffer[B]]`
@@ -40,9 +41,10 @@ import immutable.{List, Nil, ::}
* @define mayNotTerminateInf
* @define willNotTerminateInf
*/
- at SerialVersionUID(3419063961353022661L)
+ at SerialVersionUID(3419063961353022662L)
final class ListBuffer[A]
- extends Buffer[A]
+ extends AbstractBuffer[A]
+ with Buffer[A]
with GenericTraversableTemplate[A, ListBuffer]
with BufferLike[A, ListBuffer[A]]
with Builder[A, List[A]]
@@ -52,6 +54,7 @@ final class ListBuffer[A]
override def companion: GenericCompanion[ListBuffer] = ListBuffer
import scala.collection.Traversable
+ import scala.collection.immutable.ListSerializeEnd
private var start: List[A] = Nil
private var last0: ::[A] = _
@@ -60,12 +63,57 @@ final class ListBuffer[A]
protected def underlying: immutable.Seq[A] = start
+ private def writeObject(out: ObjectOutputStream) {
+ // write start
+ var xs: List[A] = start
+ while (!xs.isEmpty) { out.writeObject(xs.head); xs = xs.tail }
+ out.writeObject(ListSerializeEnd)
+
+ // no need to write last0
+
+ // write if exported
+ out.writeBoolean(exported)
+
+ // write the length
+ out.writeInt(len)
+ }
+
+ private def readObject(in: ObjectInputStream) {
+ // read start, set last0 appropriately
+ var elem: A = in.readObject.asInstanceOf[A]
+ if (elem == ListSerializeEnd) {
+ start = Nil
+ last0 = null
+ } else {
+ var current = new ::(elem, Nil)
+ start = current
+ elem = in.readObject.asInstanceOf[A]
+ while (elem != ListSerializeEnd) {
+ val list = new ::(elem, Nil)
+ current.tl = list
+ current = list
+ elem = in.readObject.asInstanceOf[A]
+ }
+ last0 = current
+ start
+ }
+
+ // read if exported
+ exported = in.readBoolean()
+
+ // read the length
+ len = in.readInt()
+ }
+
/** The current length of the buffer.
*
* This operation takes constant time.
*/
override def length = len
+ // Don't use the inherited size, which forwards to a List and is O(n).
+ override def size = length
+
// Implementations of abstract methods in Buffer
override def apply(n: Int): A =
@@ -81,29 +129,27 @@ final class ListBuffer[A]
* @throws Predef.IndexOutOfBoundsException if `n` is out of bounds.
*/
def update(n: Int, x: A) {
- try {
- if (exported) copy()
- if (n == 0) {
- val newElem = new :: (x, start.tail);
- if (last0 eq start) {
- last0 = newElem
- }
- start = newElem
- } else {
- var cursor = start
- var i = 1
- while (i < n) {
- cursor = cursor.tail
- i += 1
- }
- val newElem = new :: (x, cursor.tail.tail)
- if (last0 eq cursor.tail) {
- last0 = newElem
- }
- cursor.asInstanceOf[::[A]].tl = newElem
+ // We check the bounds early, so that we don't trigger copying.
+ if (n < 0 || n >= len) throw new IndexOutOfBoundsException(n.toString)
+ if (exported) copy()
+ if (n == 0) {
+ val newElem = new :: (x, start.tail);
+ if (last0 eq start) {
+ last0 = newElem
+ }
+ start = newElem
+ } else {
+ var cursor = start
+ var i = 1
+ while (i < n) {
+ cursor = cursor.tail
+ i += 1
}
- } catch {
- case ex: Exception => throw new IndexOutOfBoundsException(n.toString())
+ val newElem = new :: (x, cursor.tail.tail)
+ if (last0 eq cursor.tail) {
+ last0 = newElem
+ }
+ cursor.asInstanceOf[::[A]].tl = newElem
}
}
@@ -127,10 +173,10 @@ final class ListBuffer[A]
}
override def ++=(xs: TraversableOnce[A]): this.type =
- if (xs eq this) ++= (this take size) else super.++=(xs)
+ if (xs.asInstanceOf[AnyRef] eq this) ++= (this take size) else super.++=(xs)
override def ++=:(xs: TraversableOnce[A]): this.type =
- if (xs eq this) ++=: (this take size) else super.++=:(xs)
+ if (xs.asInstanceOf[AnyRef] eq this) ++=: (this take size) else super.++=:(xs)
/** Clears the buffer contents.
*/
@@ -160,38 +206,35 @@ final class ListBuffer[A]
* one. Instead, it will insert a new element at index `n`.
*
* @param n the index where a new element will be inserted.
- * @param iter the iterable object providing all elements to insert.
+ * @param seq the iterable object providing all elements to insert.
* @throws Predef.IndexOutOfBoundsException if `n` is out of bounds.
*/
def insertAll(n: Int, seq: Traversable[A]) {
- try {
- if (exported) copy()
- var elems = seq.toList.reverse
- len += elems.length
- if (n == 0) {
- while (!elems.isEmpty) {
- val newElem = new :: (elems.head, start)
- if (start.isEmpty) last0 = newElem
- start = newElem
- elems = elems.tail
- }
- } else {
- var cursor = start
- var i = 1
- while (i < n) {
- cursor = cursor.tail
- i += 1
- }
- while (!elems.isEmpty) {
- val newElem = new :: (elems.head, cursor.tail)
- if (cursor.tail.isEmpty) last0 = newElem
- cursor.asInstanceOf[::[A]].tl = newElem
- elems = elems.tail
- }
+ // We check the bounds early, so that we don't trigger copying.
+ if (n < 0 || n > len) throw new IndexOutOfBoundsException(n.toString)
+ if (exported) copy()
+ var elems = seq.toList.reverse
+ len += elems.length
+ if (n == 0) {
+ while (!elems.isEmpty) {
+ val newElem = new :: (elems.head, start)
+ if (start.isEmpty) last0 = newElem
+ start = newElem
+ elems = elems.tail
+ }
+ } else {
+ var cursor = start
+ var i = 1
+ while (i < n) {
+ cursor = cursor.tail
+ i += 1
+ }
+ while (!elems.isEmpty) {
+ val newElem = new :: (elems.head, cursor.tail)
+ if (cursor.tail.isEmpty) last0 = newElem
+ cursor.asInstanceOf[::[A]].tl = newElem
+ elems = elems.tail
}
- } catch {
- case ex: Exception =>
- throw new IndexOutOfBoundsException(n.toString())
}
}
@@ -201,7 +244,12 @@ final class ListBuffer[A]
* @param n the index which refers to the first element to remove.
* @param count the number of elements to remove.
*/
+ @annotation.migration("Invalid input values will be rejected in future releases.", "2.11")
override def remove(n: Int, count: Int) {
+ if (n >= len)
+ return
+ if (count < 0)
+ throw new IllegalArgumentException(s"removing negative number ($count) of elements")
if (exported) copy()
val n1 = n max 0
val count1 = count min (len - n1)
@@ -290,8 +338,8 @@ final class ListBuffer[A]
/** Remove a single element from this buffer. May take time linear in the
* buffer size.
*
- * @param x the element to remove.
- * @return this $coll.
+ * @param elem the element to remove.
+ * @return this $coll.
*/
override def -= (elem: A): this.type = {
if (exported) copy()
@@ -315,7 +363,7 @@ final class ListBuffer[A]
this
}
- override def iterator: Iterator[A] = new Iterator[A] {
+ override def iterator: Iterator[A] = new AbstractIterator[A] {
// Have to be careful iterating over mutable structures.
// This used to have "(cursor ne last0)" as part of its hasNext
// condition, which means it can return true even when the iterator
@@ -351,7 +399,7 @@ final class ListBuffer[A]
private def copy() {
var cursor = start
val limit = last0.tail
- clear
+ clear()
while (cursor ne limit) {
this += cursor.head
cursor = cursor.tail
@@ -365,7 +413,7 @@ final class ListBuffer[A]
/** Returns a clone of this buffer.
*
- * @return a <code>ListBuffer</code> with the same elements.
+ * @return a `ListBuffer` with the same elements.
*/
override def clone(): ListBuffer[A] = (new ListBuffer[A]) ++= this
@@ -377,10 +425,10 @@ final class ListBuffer[A]
}
/** $factoryInfo
- * @define Coll ListBuffer
+ * @define Coll `ListBuffer`
* @define coll list buffer
*/
object ListBuffer extends SeqFactory[ListBuffer] {
- implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, ListBuffer[A]] = new GenericCanBuildFrom[A]
+ implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, ListBuffer[A]] = ReusableCBF.asInstanceOf[GenericCanBuildFrom[A]]
def newBuilder[A]: Builder[A, ListBuffer[A]] = new GrowingBuilder(new ListBuffer[A])
}
diff --git a/src/library/scala/collection/mutable/ListMap.scala b/src/library/scala/collection/mutable/ListMap.scala
index c02593f..7f05def 100644
--- a/src/library/scala/collection/mutable/ListMap.scala
+++ b/src/library/scala/collection/mutable/ListMap.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -12,13 +12,14 @@ package scala.collection
package mutable
import generic._
+import annotation.tailrec
/** A simple mutable map backed by a list.
*
* @tparam A the type of the keys contained in this list map.
* @tparam B the type of the values assigned to keys in this list map.
*
- * @define Coll mutable.ListMap
+ * @define Coll `mutable.ListMap`
* @define coll mutable list map
* @define thatinfo the class of the returned collection. In the standard library configuration,
* `That` is always `ListMap[A, B]` if the elements contained in the resulting collection are
@@ -34,8 +35,11 @@ import generic._
* @define orderDependent
* @define orderDependentFold
*/
-class ListMap[A, B] extends Map[A, B] with MapLike[A, B, ListMap[A, B]] with Serializable {
-
+class ListMap[A, B]
+extends AbstractMap[A, B]
+ with Map[A, B]
+ with MapLike[A, B, ListMap[A, B]]
+ with Serializable {
override def empty = ListMap.empty[A, B]
@@ -44,20 +48,24 @@ class ListMap[A, B] extends Map[A, B] with MapLike[A, B, ListMap[A, B]] with Ser
def get(key: A): Option[B] = elems find (_._1 == key) map (_._2)
def iterator: Iterator[(A, B)] = elems.iterator
- def += (kv: (A, B)) = { elems = remove(kv._1, elems); elems = kv :: elems; siz += 1; this }
- def -= (key: A) = { elems = remove(key, elems); this }
- private def remove(key: A, elems: List[(A, B)]): List[(A, B)] =
- if (elems.isEmpty) elems
- else if (elems.head._1 == key) { siz -= 1; elems.tail }
- else elems.head :: remove(key, elems.tail)
+ def += (kv: (A, B)) = { elems = remove(kv._1, elems, List()); elems = kv :: elems; siz += 1; this }
+ def -= (key: A) = { elems = remove(key, elems, List()); this }
+
+ @tailrec
+ private def remove(key: A, elems: List[(A, B)], acc: List[(A, B)]): List[(A, B)] = {
+ if (elems.isEmpty) acc
+ else if (elems.head._1 == key) { siz -= 1; acc ::: elems.tail }
+ else remove(key, elems.tail, elems.head :: acc)
+ }
+
override def clear() = { elems = List(); siz = 0 }
override def size: Int = siz
}
/** $factoryInfo
- * @define Coll mutable.ListMap
+ * @define Coll `mutable.ListMap`
* @define coll mutable list map
*/
object ListMap extends MutableMapFactory[ListMap] {
diff --git a/src/library/scala/collection/mutable/Map.scala b/src/library/scala/collection/mutable/Map.scala
index 561dcc9..f72e1fc 100644
--- a/src/library/scala/collection/mutable/Map.scala
+++ b/src/library/scala/collection/mutable/Map.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -63,7 +63,7 @@ trait Map[A, B]
/** $factoryInfo
* The current default implementation of a $Coll is a `HashMap`.
* @define coll mutable map
- * @define Coll mutable.Map
+ * @define Coll `mutable.Map`
*/
object Map extends MutableMapFactory[Map] {
/** $canBuildFromInfo */
@@ -71,7 +71,7 @@ object Map extends MutableMapFactory[Map] {
def empty[A, B]: Map[A, B] = new HashMap[A, B]
- class WithDefault[A, B](underlying: Map[A, B], d: A => B) extends collection.Map.WithDefault(underlying, d) with Map[A, B] {
+ class WithDefault[A, B](underlying: Map[A, B], d: A => B) extends scala.collection.Map.WithDefault(underlying, d) with Map[A, B] {
override def += (kv: (A, B)) = {underlying += kv; this}
def -= (key: A) = {underlying -= key; this}
override def empty = new WithDefault(underlying.empty, d)
@@ -86,3 +86,6 @@ object Map extends MutableMapFactory[Map] {
override def withDefaultValue(d: B): mutable.Map[A, B] = new WithDefault[A, B](underlying, x => d)
}
}
+
+/** Explicit instantiation of the `Map` trait to reduce class file size in subclasses. */
+private[scala] abstract class AbstractMap[A, B] extends scala.collection.AbstractMap[A, B] with Map[A, B]
diff --git a/src/library/scala/collection/mutable/MapBuilder.scala b/src/library/scala/collection/mutable/MapBuilder.scala
index 174c3c6..8468e09 100644
--- a/src/library/scala/collection/mutable/MapBuilder.scala
+++ b/src/library/scala/collection/mutable/MapBuilder.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/library/scala/collection/mutable/MapLike.scala b/src/library/scala/collection/mutable/MapLike.scala
index 69ae43f..42e5a0a 100644
--- a/src/library/scala/collection/mutable/MapLike.scala
+++ b/src/library/scala/collection/mutable/MapLike.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -11,13 +11,35 @@ package scala.collection
package mutable
import generic._
-import annotation.{migration, bridge}
+import scala.annotation.{migration, bridge}
import parallel.mutable.ParMap
/** A template trait for mutable maps.
* $mapNote
* $mapTags
* @since 2.8
+ *
+ * @define mapNote
+ * '''Implementation note:'''
+ * This trait provides most of the operations of a mutable `Map`
+ * independently of its representation. It is typically inherited by
+ * concrete implementations of maps.
+ *
+ * To implement a concrete mutable map, you need to provide
+ * implementations of the following methods:
+ * {{{
+ * def get(key: A): Option[B]
+ * def iterator: Iterator[(A, B)]
+ * def += (kv: (A, B)): This
+ * def -= (key: A): This
+ * }}}
+ * If you wish that methods like `take`, `drop`, `filter` also return the same kind of map
+ * you should also override:
+ * {{{
+ * def empty: This
+ * }}}
+ * It is also good idea to override methods `foreach` and
+ * `size` for efficiency.
*/
trait MapLike[A, B, +This <: MapLike[A, B, This] with Map[A, B]]
extends scala.collection.MapLike[A, B, This]
@@ -119,8 +141,6 @@ trait MapLike[A, B, +This <: MapLike[A, B, This] with Map[A, B]]
override def ++[B1 >: B](xs: GenTraversableOnce[(A, B1)]): Map[A, B1] =
clone().asInstanceOf[Map[A, B1]] ++= xs.seq
- @bridge def ++[B1 >: B](xs: TraversableOnce[(A, B1)]): Map[A, B1] = ++(xs: GenTraversableOnce[(A, B1)])
-
/** Removes a key from this map, returning the value associated previously
* with that key as an option.
* @param key the key to be removed
@@ -148,13 +168,6 @@ trait MapLike[A, B, +This <: MapLike[A, B, This] with Map[A, B]]
@migration("`-` creates a new map. Use `-=` to remove an element from this map and return that map itself.", "2.8.0")
override def -(key: A): This = clone() -= key
- /** If given key is defined in this map, remove it and return associated value as an Option.
- * If key is not present return None.
- * @param key the key to be removed
- */
- @deprecated("Use `remove' instead", "2.8.0")
- def removeKey(key: A): Option[B] = remove(key)
-
/** Removes all bindings from the map. After this operation has completed,
* the map will be empty.
*/
@@ -196,8 +209,8 @@ trait MapLike[A, B, +This <: MapLike[A, B, This] with Map[A, B]]
* @param p The test predicate
*/
def retain(p: (A, B) => Boolean): this.type = {
- for ((k, v) <- this.seq ; if !p(k, v))
- this -= k
+ for ((k, v) <- this.toList) // SI-7269 toList avoids ConcurrentModificationException
+ if (!p(k, v)) this -= k
this
}
@@ -231,6 +244,4 @@ trait MapLike[A, B, +This <: MapLike[A, B, This] with Map[A, B]]
*/
@migration("`--` creates a new map. Use `--=` to remove an element from this map and return that map itself.", "2.8.0")
override def --(xs: GenTraversableOnce[A]): This = clone() --= xs.seq
-
- @bridge def --(xs: TraversableOnce[A]): This = --(xs: GenTraversableOnce[A])
}
diff --git a/src/library/scala/collection/mutable/MapProxy.scala b/src/library/scala/collection/mutable/MapProxy.scala
index d32f155..c730e2b 100644
--- a/src/library/scala/collection/mutable/MapProxy.scala
+++ b/src/library/scala/collection/mutable/MapProxy.scala
@@ -1,19 +1,16 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-
-
package scala.collection
package mutable
/**
- * This trait implements a proxy for <a href="Map.html"
- * target="contentFrame"><code>scala.collection.mutable.Map</code></a>.
+ * This trait implements a proxy for [[scala.collection.mutable.Map]].
*
* It is most useful for assembling customized map abstractions
* dynamically using object composition and forwarding.
diff --git a/src/library/scala/collection/mutable/MultiMap.scala b/src/library/scala/collection/mutable/MultiMap.scala
index 128ec0f..4635bfb 100644
--- a/src/library/scala/collection/mutable/MultiMap.scala
+++ b/src/library/scala/collection/mutable/MultiMap.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -15,11 +15,39 @@ package mutable
/** A trait for mutable maps with multiple values assigned to a key.
*
* This class is typically used as a mixin. It turns maps which map `A`
- * to `Set[B]` objects into multi maps which map `A` to
- * `B` objects.
+ * to `Set[B]` objects into multimaps that map `A` to `B` objects.
+ *
+ * @example {{{
+ * // first import all necessary types from package `collection.mutable`
+ * import collection.mutable.{ HashMap, MultiMap, Set }
+ *
+ * // to create a `MultiMap` the easiest way is to mixin it into a normal
+ * // `Map` instance
+ * val mm = new HashMap[Int, Set[String]] with MultiMap[Int, String]
+ *
+ * // to add key-value pairs to a multimap it is important to use
+ * // the method `addBinding` because standard methods like `+` will
+ * // overwrite the complete key-value pair instead of adding the
+ * // value to the existing key
+ * mm.addBinding(1, "a")
+ * mm.addBinding(2, "b")
+ * mm.addBinding(1, "c")
+ *
+ * // mm now contains `Map(2 -> Set(b), 1 -> Set(c, a))`
+ *
+ * // to check if the multimap contains a value there is method
+ * // `entryExists`, which allows to traverse the including set
+ * mm.entryExists(1, _ == "a") == true
+ * mm.entryExists(1, _ == "b") == false
+ * mm.entryExists(2, _ == "b") == true
+ *
+ * // to remove a previous added value there is the method `removeBinding`
+ * mm.removeBinding(1, "a")
+ * mm.entryExists(1, _ == "a") == false
+ * }}}
*
* @define coll multimap
- * @define Coll MultiMap
+ * @define Coll `MultiMap`
* @author Matthias Zenger
* @author Martin Odersky
* @version 2.8
@@ -36,9 +64,6 @@ trait MultiMap[A, B] extends Map[A, Set[B]] {
*/
protected def makeSet: Set[B] = new HashSet[B]
- @deprecated("use addBinding instead", "2.8.0")
- def add(key: A, value: B): this.type = addBinding(key, value)
-
/** Assigns the specified `value` to a specified `key`, replacing
* the existing value assigned to that `key` if it is equal to
* the specified value. Otherwise, simply adds another binding to
@@ -60,7 +85,8 @@ trait MultiMap[A, B] extends Map[A, Set[B]] {
this
}
- /** Removes the binding of `value` to `key` if it exists.
+ /** Removes the binding of `value` to `key` if it exists, otherwise this
+ * operation doesn't have any effect.
*
* If this was the last value assigned to the specified key, the
* set assigned to that key will be removed as well.
diff --git a/src/library/scala/collection/mutable/MutableList.scala b/src/library/scala/collection/mutable/MutableList.scala
index 0b73642..bc6272b 100644
--- a/src/library/scala/collection/mutable/MutableList.scala
+++ b/src/library/scala/collection/mutable/MutableList.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -28,7 +28,8 @@ import immutable.{List, Nil}
*/
@SerialVersionUID(5938451523372603072L)
class MutableList[A]
-extends LinearSeq[A]
+extends AbstractSeq[A]
+ with LinearSeq[A]
with LinearSeqOptimized[A, MutableList[A]]
with GenericTraversableTemplate[A, MutableList]
with Builder[A, MutableList[A]]
@@ -55,12 +56,17 @@ extends LinearSeq[A]
/** Returns the rest of this list
*/
override def tail: MutableList[A] = {
- require(nonEmpty, "tail of empty list")
val tl = new MutableList[A]
+ tailImpl(tl)
+ tl
+ }
+
+ // this method must be private for binary compatibility
+ private final def tailImpl(tl: MutableList[A]) {
+ require(nonEmpty, "tail of empty list")
tl.first0 = first0.tail
- tl.last0 = last0
tl.len = len - 1
- tl
+ tl.last0 = if (tl.len == 0) tl.first0 else last0
}
/** Prepends a single element to this list. This operation takes constant
@@ -74,17 +80,17 @@ extends LinearSeq[A]
*/
override def length: Int = len
- /** Returns the <code>n</code>th element of this list.
+ /** Returns the `n`-th element of this list.
* @throws IndexOutOfBoundsException if index does not exist.
*/
override def apply(n: Int): A = first0.apply(n)
- /** Updates the <code>n</code>th element of this list to a new value.
+ /** Updates the `n`-th element of this list to a new value.
* @throws IndexOutOfBoundsException if index does not exist.
*/
def update(n: Int, x: A): Unit = first0.update(n, x)
- /** Returns the <code>n</code>th element of this list or <code>None</code>
+ /** Returns the `n`-th element of this list or `None`
* if index does not exist.
*/
def get(n: Int): Option[A] = first0.get(n)
@@ -95,19 +101,17 @@ extends LinearSeq[A]
len = len + 1
}
- protected def appendElem(elem: A): Unit =
+ protected def appendElem(elem: A) {
if (len == 0) {
prependElem(elem)
} else {
last0.next = new LinkedList[A]
last0 = last0.next
last0.elem = elem
- last0.next = new LinkedList[A] // for performance, use sentinel `object' instead?
+ last0.next = new LinkedList[A] // for performance, use sentinel `object` instead?
len = len + 1
}
-
- @deprecated("use clear() instead", "2.8.0")
- def reset() { clear() }
+ }
/** Returns an iterator over all elements of this list.
*/
@@ -118,7 +122,7 @@ extends LinearSeq[A]
last0.elem
}
- /** Returns an instance of <code>scala.List</code> containing the same
+ /** Returns an instance of [[scala.List]] containing the same
* sequence of elements.
*/
override def toList: List[A] = first0.toList
@@ -141,15 +145,19 @@ extends LinearSeq[A]
}
def result = this
+
+ override def clone(): MutableList[A] = {
+ val bf = newBuilder
+ bf ++= seq
+ bf.result
+ }
+
}
object MutableList extends SeqFactory[MutableList] {
- implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, MutableList[A]] = new GenericCanBuildFrom[A]
+ implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, MutableList[A]] =
+ ReusableCBF.asInstanceOf[GenericCanBuildFrom[A]]
def newBuilder[A]: Builder[A, MutableList[A]] = new MutableList[A]
}
-
-
-
-
diff --git a/src/library/scala/collection/mutable/ObservableBuffer.scala b/src/library/scala/collection/mutable/ObservableBuffer.scala
index c38bf5f..bcaf977 100644
--- a/src/library/scala/collection/mutable/ObservableBuffer.scala
+++ b/src/library/scala/collection/mutable/ObservableBuffer.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -34,6 +34,11 @@ trait ObservableBuffer[A] extends Buffer[A] with Publisher[Message[A] with Undoa
this
}
+ abstract override def ++=(xs: TraversableOnce[A]): this.type = {
+ for (x <- xs) this += x
+ this
+ }
+
abstract override def +=:(element: A): this.type = {
super.+=:(element)
publish(new Include(Start, element) with Undoable {
@@ -65,4 +70,18 @@ trait ObservableBuffer[A] extends Buffer[A] with Publisher[Message[A] with Undoa
def undo() { throw new UnsupportedOperationException("cannot undo") }
})
}
+
+ abstract override def insertAll(n: Int, elems: scala.collection.Traversable[A]) {
+ super.insertAll(n, elems)
+ var curr = n - 1
+ val msg = elems.foldLeft(new Script[A]() with Undoable {
+ def undo() { throw new UnsupportedOperationException("cannot undo") }
+ }) {
+ case (msg, elem) =>
+ curr += 1
+ msg += Include(Index(curr), elem)
+ }
+ publish(msg)
+ }
+
}
diff --git a/src/library/scala/collection/mutable/ObservableMap.scala b/src/library/scala/collection/mutable/ObservableMap.scala
index ceb23d2..d81c90b 100644
--- a/src/library/scala/collection/mutable/ObservableMap.scala
+++ b/src/library/scala/collection/mutable/ObservableMap.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/library/scala/collection/mutable/ObservableSet.scala b/src/library/scala/collection/mutable/ObservableSet.scala
index 1b37580..3e79506 100644
--- a/src/library/scala/collection/mutable/ObservableSet.scala
+++ b/src/library/scala/collection/mutable/ObservableSet.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/library/scala/collection/mutable/OpenHashMap.scala b/src/library/scala/collection/mutable/OpenHashMap.scala
index 72e729f..8b3e524 100644
--- a/src/library/scala/collection/mutable/OpenHashMap.scala
+++ b/src/library/scala/collection/mutable/OpenHashMap.scala
@@ -1,16 +1,17 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-package scala.collection
+package scala
+package collection
package mutable
/**
- * @define Coll OpenHashMap
+ * @define Coll `OpenHashMap`
* @define coll open hash map
*
* @since 2.7
@@ -29,10 +30,11 @@ object OpenHashMap {
private[mutable] def nextPowerOfTwo(i : Int) = highestOneBit(i) << 1;
}
-/** A mutable hash map based on an open hashing scheme. The precise scheme is undefined,
- * but it should make a reasonable effort to ensure that an insert with consecutive hash
- * codes is not unneccessarily penalised. In particular, mappings of consecutive integer
- * keys should work without significant performance loss.
+/** A mutable hash map based on an open hashing scheme. The precise scheme is
+ * undefined, but it should make a reasonable effort to ensure that an insert
+ * with consecutive hash codes is not unneccessarily penalised. In particular,
+ * mappings of consecutive integer keys should work without significant
+ * performance loss.
*
* @tparam Key type of the keys in this map.
* @tparam Value type of the values in this map.
@@ -41,94 +43,94 @@ object OpenHashMap {
* @author David MacIver
* @since 2.7
*
- * @define Coll OpenHashMap
+ * @define Coll `OpenHashMap`
* @define coll open hash map
* @define mayNotTerminateInf
* @define willNotTerminateInf
*/
class OpenHashMap[Key, Value](initialSize : Int)
-extends Map[Key, Value]
+extends AbstractMap[Key, Value]
+ with Map[Key, Value]
with MapLike[Key, Value, OpenHashMap[Key, Value]] {
import OpenHashMap.OpenEntry
private type Entry = OpenEntry[Key, Value]
- /**
- * A default constructor creates a hashmap with initial size 8.
+ /** A default constructor creates a hashmap with initial size `8`.
*/
- def this() = this(8);
+ def this() = this(8)
override def empty: OpenHashMap[Key, Value] = OpenHashMap.empty[Key, Value]
- private[this] val actualInitialSize = OpenHashMap.nextPowerOfTwo(initialSize);
+ private[this] val actualInitialSize = OpenHashMap.nextPowerOfTwo(initialSize)
- private var mask = actualInitialSize - 1;;
- private var table : Array[Entry] = new Array[Entry](actualInitialSize);
- private var _size = 0;
- private var deleted = 0;
+ private var mask = actualInitialSize - 1
+ private var table : Array[Entry] = new Array[Entry](actualInitialSize)
+ private var _size = 0
+ private var deleted = 0
// Used for tracking inserts so that iterators can determine in concurrent modification has occurred.
- private[this] var modCount = 0;
+ private[this] var modCount = 0
- override def size = _size;
- private[this] def size_=(s : Int) = _size = s;
+ override def size = _size
+ private[this] def size_=(s : Int) { _size = s }
/** Returns a mangled hash code of the provided key. */
- protected def hashOf(key : Key) = {
+ protected def hashOf(key: Key) = {
var h = key.##
h ^= ((h >>> 20) ^ (h >>> 12));
h ^ (h >>> 7) ^ (h >>> 4);
}
private[this] def growTable() = {
- val oldSize = mask + 1;
- val newSize = 4 * oldSize;
- val oldTable = table;
- table = new Array[Entry](newSize);
- mask = newSize - 1;
+ val oldSize = mask + 1
+ val newSize = 4 * oldSize
+ val oldTable = table
+ table = new Array[Entry](newSize)
+ mask = newSize - 1
oldTable.foreach( entry =>
if (entry != null && entry.value != None) addEntry(entry));
- deleted = 0;
+ deleted = 0
}
- private[this] def findIndex(key : Key) : Int = findIndex(key, hashOf(key));
+ private[this] def findIndex(key: Key) : Int = findIndex(key, hashOf(key))
- private[this] def findIndex(key : Key, hash : Int) : Int = {
- var j = hash;
+ private[this] def findIndex(key: Key, hash: Int): Int = {
+ var j = hash
- var index = hash & mask;
- var perturb = index;
+ var index = hash & mask
+ var perturb = index
while(table(index) != null &&
!(table(index).hash == hash &&
table(index).key == key)){
- j = 5 * j + 1 + perturb;
- perturb >>= 5;
- index = j & mask;
+ j = 5 * j + 1 + perturb
+ perturb >>= 5
+ index = j & mask
}
- index;
+ index
}
- private[this] def addEntry(entry : Entry) =
- if (entry != null) table(findIndex(entry.key, entry.hash)) = entry;
+ private[this] def addEntry(entry: Entry) =
+ if (entry != null) table(findIndex(entry.key, entry.hash)) = entry
- override def update(key : Key, value : Value) {
- put(key, hashOf(key), value);
+ override def update(key: Key, value: Value) {
+ put(key, hashOf(key), value)
}
def += (kv: (Key, Value)): this.type = { put(kv._1, kv._2); this }
def -= (key: Key): this.type = { remove(key); this }
- override def put(key : Key, value : Value): Option[Value] =
+ override def put(key: Key, value: Value): Option[Value] =
put(key, hashOf(key), value)
- private def put(key : Key, hash : Int, value : Value): Option[Value] = {
- if (2 * (size + deleted) > mask) growTable;
- val index = findIndex(key, hash);
- val entry = table(index);
+ private def put(key: Key, hash: Int, value: Value): Option[Value] = {
+ if (2 * (size + deleted) > mask) growTable
+ val index = findIndex(key, hash)
+ val entry = table(index)
if (entry == null) {
table(index) = new OpenEntry(key, hash, Some(value));
- modCount += 1;
- size += 1;
+ modCount += 1
+ size += 1
None
} else {
val res = entry.value
@@ -139,23 +141,23 @@ extends Map[Key, Value]
}
override def remove(key : Key): Option[Value] = {
- val index = findIndex(key);
+ val index = findIndex(key)
if (table(index) != null && table(index).value != None){
val res = table(index).value
- table(index).value = None;
- size -= 1;
- deleted += 1;
+ table(index).value = None
+ size -= 1
+ deleted += 1
res
} else None
}
def get(key : Key) : Option[Value] = {
- val hash = hashOf(key);
+ val hash = hashOf(key)
- var j = hash;
- var index = hash & mask;
- var perturb = index;
- var entry = table(index);
+ var j = hash
+ var index = hash & mask
+ var perturb = index
+ var entry = table(index)
while(entry != null){
if (entry.hash == hash &&
entry.key == key){
@@ -167,49 +169,45 @@ extends Map[Key, Value]
index = j & mask;
entry = table(index);
}
- None;
+ None
}
- /** An iterator over the elements of this map. Use of this iterator follows the same
- * contract for concurrent modification as the foreach method.
+ /** An iterator over the elements of this map. Use of this iterator follows
+ * the same contract for concurrent modification as the foreach method.
*
* @return the iterator
*/
- def iterator = new Iterator[(Key, Value)]{
- var index = 0;
- val initialModCount = modCount;
+ def iterator: Iterator[(Key, Value)] = new AbstractIterator[(Key, Value)] {
+ var index = 0
+ val initialModCount = modCount
private[this] def advance() {
if (initialModCount != modCount) sys.error("Concurrent modification");
while((index <= mask) && (table(index) == null || table(index).value == None)) index+=1;
}
- def hasNext = {advance; index <= mask; }
+ def hasNext = {advance(); index <= mask }
def next = {
- advance;
- val result = table(index);
- index += 1;
- (result.key, result.value.get);
+ advance()
+ val result = table(index)
+ index += 1
+ (result.key, result.value.get)
}
}
- override def clone = {
+ override def clone() = {
val it = new OpenHashMap[Key, Value]
foreachUndeletedEntry(entry => it.put(entry.key, entry.hash, entry.value.get));
it
}
- /**
- * Loop over the key, value mappings of this map.
+ /** Loop over the key, value mappings of this map.
*
* The behaviour of modifying the map during an iteration is as follows:
- *
- * <ul>
- * <li>Deleting a mapping is always permitted.</li>
- * <li>Changing the value of mapping which is already present is permitted.</li>
- * <li>Anything else is not permitted. It will usually, but not always, throw an exception.</li>
- * </ul>
+ * - Deleting a mapping is always permitted.
+ * - Changing the value of mapping which is already present is permitted.
+ * - Anything else is not permitted. It will usually, but not always, throw an exception.
*
* @tparam U The return type of the specified function `f`, return result of which is ignored.
* @param f The function to apply to each key, value mapping.
diff --git a/src/library/scala/collection/mutable/PriorityQueue.scala b/src/library/scala/collection/mutable/PriorityQueue.scala
index 3f5bfb9..84257c6 100644
--- a/src/library/scala/collection/mutable/PriorityQueue.scala
+++ b/src/library/scala/collection/mutable/PriorityQueue.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -12,7 +12,6 @@ package scala.collection
package mutable
import generic._
-import annotation.bridge
/** This class implements priority queues using a heap.
* To prioritize elements of type A there must be an implicit
@@ -32,23 +31,24 @@ import annotation.bridge
* @define mayNotTerminateInf
* @define willNotTerminateInf
*/
- at cloneable
class PriorityQueue[A](implicit val ord: Ordering[A])
- extends Iterable[A]
+ extends AbstractIterable[A]
+ with Iterable[A]
with GenericOrderedTraversableTemplate[A, PriorityQueue]
with IterableLike[A, PriorityQueue[A]]
with Growable[A]
with Builder[A, PriorityQueue[A]]
with Serializable
+ with scala.Cloneable
{
import ord._
- private final class ResizableArrayAccess[A] extends ResizableArray[A] {
- @inline def p_size0 = size0
- @inline def p_size0_=(s: Int) = size0 = s
- @inline def p_array = array
- @inline def p_ensureSize(n: Int) = super.ensureSize(n)
- @inline def p_swap(a: Int, b: Int) = super.swap(a, b)
+ private class ResizableArrayAccess[A] extends AbstractSeq[A] with ResizableArray[A] {
+ def p_size0 = size0
+ def p_size0_=(s: Int) = size0 = s
+ def p_array = array
+ def p_ensureSize(n: Int) = super.ensureSize(n)
+ def p_swap(a: Int, b: Int) = super.swap(a, b)
}
protected[this] override def newBuilder = new PriorityQueue[A]
@@ -91,23 +91,6 @@ class PriorityQueue[A](implicit val ord: Ordering[A])
}
}
- @deprecated(
- "Use += instead if you intend to add by side effect to an existing collection.\n"+
- "Use `clone() +=' if you intend to create a new collection.", "2.8.0"
- )
- def +(elem: A): PriorityQueue[A] = { this.clone() += elem }
-
- /** Add two or more elements to this set.
- * @param elem1 the first element.
- * @param kv2 the second element.
- * @param kvs the remaining elements.
- */
- @deprecated(
- "Use ++= instead if you intend to add by side effect to an existing collection.\n"+
- "Use `clone() ++=' if you intend to create a new collection.", "2.8.0"
- )
- def +(elem1: A, elem2: A, elems: A*) = { this.clone().+=(elem1, elem2, elems : _*) }
-
/** Inserts a single element into the priority queue.
*
* @param elem the element to insert.
@@ -129,9 +112,6 @@ class PriorityQueue[A](implicit val ord: Ordering[A])
*/
def ++(xs: GenTraversableOnce[A]): PriorityQueue[A] = { this.clone() ++= xs.seq }
- @bridge
- def ++(xs: TraversableOnce[A]): PriorityQueue[A] = ++ (xs: GenTraversableOnce[A])
-
/** Adds all elements to the queue.
*
* @param elems the elements to add.
@@ -181,12 +161,15 @@ class PriorityQueue[A](implicit val ord: Ordering[A])
*/
def clear(): Unit = { resarr.p_size0 = 1 }
- /** Returns an iterator which yields all the elements of the priority
- * queue in descending priority order.
+ /** Returns an iterator which yields all the elements.
*
- * @return an iterator over all elements sorted in descending order.
+ * Note: The order of elements returned is undefined.
+ * If you want to traverse the elements in priority queue
+ * order, use `clone().dequeueAll.iterator`.
+ *
+ * @return an iterator over all the elements.
*/
- override def iterator: Iterator[A] = new Iterator[A] {
+ override def iterator: Iterator[A] = new AbstractIterator[A] {
private var i = 1
def hasNext: Boolean = i < resarr.p_size0
def next(): A = {
@@ -196,7 +179,6 @@ class PriorityQueue[A](implicit val ord: Ordering[A])
}
}
-
/** Returns the reverse of this queue. The priority queue that gets
* returned will have an inversed ordering - if for some elements
* `x` and `y` the original queue's ordering
@@ -211,14 +193,21 @@ class PriorityQueue[A](implicit val ord: Ordering[A])
* @return A reversed priority queue.
*/
def reverse = {
- val revq = new PriorityQueue[A]()(new math.Ordering[A] {
+ val revq = new PriorityQueue[A]()(new scala.math.Ordering[A] {
def compare(x: A, y: A) = ord.compare(y, x)
})
for (i <- 1 until resarr.length) revq += resarr(i)
revq
}
- def reverseIterator = new Iterator[A] {
+ /** Returns an iterator which yields all the elements in the reverse order
+ * than that returned by the method `iterator`.
+ *
+ * Note: The order of elements returned is undefined.
+ *
+ * @return an iterator over all elements sorted in descending order.
+ */
+ def reverseIterator: Iterator[A] = new AbstractIterator[A] {
private var i = resarr.p_size0 - 1
def hasNext: Boolean = i >= 1
def next(): A = {
@@ -237,6 +226,8 @@ class PriorityQueue[A](implicit val ord: Ordering[A])
throw new UnsupportedOperationException("unsuitable as hash key")
/** Returns a regular queue containing the same elements.
+ *
+ * Note: the order of elements is undefined.
*/
def toQueue: Queue[A] = new Queue[A] ++= this.iterator
@@ -245,6 +236,13 @@ class PriorityQueue[A](implicit val ord: Ordering[A])
* @return the string representation of this queue.
*/
override def toString() = toList.mkString("PriorityQueue(", ", ", ")")
+
+ /** Converts this $coll to a list.
+ *
+ * Note: the order of elements is undefined.
+ *
+ * @return a list containing all elements of this $coll.
+ */
override def toList = this.iterator.toList
/** This method clones the priority queue.
diff --git a/src/library/scala/collection/mutable/PriorityQueueProxy.scala b/src/library/scala/collection/mutable/PriorityQueueProxy.scala
index 70b9e82..3bb5d32 100644
--- a/src/library/scala/collection/mutable/PriorityQueueProxy.scala
+++ b/src/library/scala/collection/mutable/PriorityQueueProxy.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/library/scala/collection/mutable/Publisher.scala b/src/library/scala/collection/mutable/Publisher.scala
index 7e06199..e31205b 100644
--- a/src/library/scala/collection/mutable/Publisher.scala
+++ b/src/library/scala/collection/mutable/Publisher.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -32,8 +32,8 @@ trait Publisher[Evt] {
type Sub = Subscriber[Evt, Pub]
type Filter = Evt => Boolean
- /** The publisher itself of type `Pub'. Implemented by a cast from `this' here.
- * Needs to be overridden if the actual publisher is different from `this'.
+ /** The publisher itself of type `Pub`. Implemented by a cast from `this` here.
+ * Needs to be overridden if the actual publisher is different from `this`.
*/
protected val self: Pub = this.asInstanceOf[Pub]
diff --git a/src/library/scala/collection/mutable/Queue.scala b/src/library/scala/collection/mutable/Queue.scala
index f3dc4d0..8ef5f6a 100644
--- a/src/library/scala/collection/mutable/Queue.scala
+++ b/src/library/scala/collection/mutable/Queue.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -23,16 +23,16 @@ import generic._
* @see [[http://docs.scala-lang.org/overviews/collections/concrete-mutable-collection-classes.html#mutable_queues "Scala's Collection Library overview"]]
* section on `Queues` for more information.
*
- * @define Coll mutable.Queue
+ * @define Coll `mutable.Queue`
* @define coll mutable queue
* @define orderDependent
* @define orderDependentFold
* @define mayNotTerminateInf
* @define willNotTerminateInf
*/
- at cloneable
class Queue[A]
extends MutableList[A]
+ with LinearSeqOptimized[A, Queue[A]]
with GenericTraversableTemplate[A, Queue]
with Cloneable[Queue[A]]
with Serializable
@@ -66,7 +66,7 @@ extends MutableList[A]
else {
val res = first0.elem
first0 = first0.next
- len -= 1
+ decrementLength()
res
}
@@ -82,11 +82,11 @@ extends MutableList[A]
else if (p(first0.elem)) {
val res: Option[A] = Some(first0.elem)
first0 = first0.next
- len -= 1
+ decrementLength()
res
} else {
val optElem = removeFromList(p)
- if (optElem != None) len -= 1
+ if (optElem != None) decrementLength()
optElem
}
@@ -119,7 +119,7 @@ extends MutableList[A]
while ((first0.nonEmpty) && p(first0.elem)) {
res += first0.elem
first0 = first0.next
- len -= 1
+ decrementLength()
}
if (first0.isEmpty) res
else removeAllFromList(p, res)
@@ -130,10 +130,10 @@ extends MutableList[A]
var leftlst = first0
while (leftlst.next.nonEmpty) {
if (p(leftlst.next.elem)) {
- res += leftlst.next.elem
- if (leftlst.next eq last0) last0 = leftlst
- leftlst.next = leftlst.next.next
- len -= 1
+ res += leftlst.next.elem
+ if (leftlst.next eq last0) last0 = leftlst
+ leftlst.next = leftlst.next.next
+ decrementLength()
} else leftlst = leftlst.next
}
res
@@ -154,7 +154,7 @@ extends MutableList[A]
else {
val res: Option[LinkedList[A]] = Some(cell.next)
cell.next = cell.next.next
- len -= 1
+ decrementLength()
res
}
}
@@ -166,11 +166,37 @@ extends MutableList[A]
* @return the first element.
*/
def front: A = head
+
+ // this method (duplicated from MutableList) must be private for binary compatibility
+ private final def tailImpl(tl: Queue[A]) {
+ require(nonEmpty, "tail of empty list")
+ tl.first0 = first0.tail
+ tl.len = len - 1
+ tl.last0 = if (tl.len == 0) tl.first0 else last0
+ }
+
+ // TODO - Don't override this just for new to create appropriate type....
+ override def tail: Queue[A] = {
+ val tl = new Queue[A]
+ tailImpl(tl)
+ tl
+ }
+
+ override def clone(): Queue[A] = {
+ val bf = newBuilder
+ bf ++= seq
+ bf.result
+ }
+
+ private[this] def decrementLength() {
+ len -= 1
+ if (len == 0) last0 = first0
+ }
}
object Queue extends SeqFactory[Queue] {
- implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, Queue[A]] = new GenericCanBuildFrom[A]
+ implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, Queue[A]] = ReusableCBF.asInstanceOf[GenericCanBuildFrom[A]]
def newBuilder[A]: Builder[A, Queue[A]] = new MutableList[A] mapResult { _.toQueue }
}
diff --git a/src/library/scala/collection/mutable/QueueProxy.scala b/src/library/scala/collection/mutable/QueueProxy.scala
index be26d41..c286a34 100644
--- a/src/library/scala/collection/mutable/QueueProxy.scala
+++ b/src/library/scala/collection/mutable/QueueProxy.scala
@@ -1,7 +1,7 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
@@ -24,9 +24,9 @@ trait QueueProxy[A] extends Queue[A] with Proxy {
def self: Queue[A]
- /** Access element number <code>n</code>.
+ /** Access element number `n`.
*
- * @return the element at index <code>n</code>.
+ * @return the element at index `n`.
*/
override def apply(n: Int): A = self.apply(n)
@@ -46,11 +46,10 @@ trait QueueProxy[A] extends Queue[A] with Proxy {
*/
override def +=(elem: A): this.type = { self += elem; this }
- /** Adds all elements provided by an iterator
- * at the end of the queue. The elements are prepended in the order they
- * are given out by the iterator.
+ /** Adds all elements provided by an iterator at the end of the queue. The
+ * elements are prepended in the order they are given out by the iterator.
*
- * @param iter an iterator
+ * @param it an iterator
*/
override def ++=(it: TraversableOnce[A]): this.type = {
self ++= it
@@ -61,7 +60,7 @@ trait QueueProxy[A] extends Queue[A] with Proxy {
*
* @param elems the elements to add.
*/
- override def enqueue(elems: A*): Unit = self ++= elems
+ override def enqueue(elems: A*) { self ++= elems }
/** Returns the first element in the queue, and removes this element
* from the queue.
diff --git a/src/library/scala/collection/mutable/ResizableArray.scala b/src/library/scala/collection/mutable/ResizableArray.scala
index d5398f0..4a12f95 100644
--- a/src/library/scala/collection/mutable/ResizableArray.scala
+++ b/src/library/scala/collection/mutable/ResizableArray.scala
@@ -1,14 +1,13 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-
-
-package scala.collection
+package scala
+package collection
package mutable
import generic._
@@ -62,10 +61,12 @@ trait ResizableArray[A] extends IndexedSeq[A]
}
}
- /** Fills the given array <code>xs</code> with at most `len` elements of
- * this traversable starting at position `start`.
- * Copying will stop once either the end of the current traversable is reached or
- * `len` elements have been copied or the end of the array is reached.
+ /** Fills the given array `xs` with at most `len` elements of this
+ * traversable starting at position `start`.
+ *
+ * Copying will stop once either the end of the current traversable is
+ * reached or `len` elements have been copied or the end of the array
+ * is reached.
*
* @param xs the array to fill.
* @param start starting index.
@@ -78,7 +79,7 @@ trait ResizableArray[A] extends IndexedSeq[A]
//##########################################################################
- /** remove elements of this array at indices after <code>sz</code>
+ /** Remove elements of this array at indices after `sz`.
*/
def reduceToSize(sz: Int) {
require(sz <= size0)
@@ -88,7 +89,7 @@ trait ResizableArray[A] extends IndexedSeq[A]
}
}
- /** ensure that the internal array has at n cells */
+ /** Ensure that the internal array has at `n` cells. */
protected def ensureSize(n: Int) {
if (n > array.length) {
var newsize = array.length * 2
@@ -96,7 +97,7 @@ trait ResizableArray[A] extends IndexedSeq[A]
newsize = newsize * 2
val newar: Array[AnyRef] = new Array(newsize)
- compat.Platform.arraycopy(array, 0, newar, 0, size0)
+ scala.compat.Platform.arraycopy(array, 0, newar, 0, size0)
array = newar
}
}
@@ -112,11 +113,13 @@ trait ResizableArray[A] extends IndexedSeq[A]
/** Move parts of the array.
*/
protected def copy(m: Int, n: Int, len: Int) {
- compat.Platform.arraycopy(array, m, array, n, len)
+ scala.compat.Platform.arraycopy(array, m, array, n, len)
}
}
object ResizableArray extends SeqFactory[ResizableArray] {
- implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, ResizableArray[A]] = new GenericCanBuildFrom[A]
+ implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, ResizableArray[A]] =
+ ReusableCBF.asInstanceOf[GenericCanBuildFrom[A]]
+
def newBuilder[A]: Builder[A, ResizableArray[A]] = new ArrayBuffer[A]
}
diff --git a/src/library/scala/collection/mutable/RevertibleHistory.scala b/src/library/scala/collection/mutable/RevertibleHistory.scala
index 922824d..5544a21 100644
--- a/src/library/scala/collection/mutable/RevertibleHistory.scala
+++ b/src/library/scala/collection/mutable/RevertibleHistory.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/library/scala/collection/mutable/Seq.scala b/src/library/scala/collection/mutable/Seq.scala
index 42b4ac5..9d9399e 100644
--- a/src/library/scala/collection/mutable/Seq.scala
+++ b/src/library/scala/collection/mutable/Seq.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -21,7 +21,7 @@ import generic._
*
* The class adds an `update` method to `collection.Seq`.
*
- * @define Coll mutable.Seq
+ * @define Coll `mutable.Seq`
* @define coll mutable sequence
*/
trait Seq[A] extends Iterable[A]
@@ -36,9 +36,12 @@ trait Seq[A] extends Iterable[A]
/** $factoryInfo
* The current default implementation of a $Coll is an `ArrayBuffer`.
* @define coll mutable sequence
- * @define Coll mutable.Seq
+ * @define Coll `mutable.Seq`
*/
object Seq extends SeqFactory[Seq] {
- implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, Seq[A]] = new GenericCanBuildFrom[A]
+ implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, Seq[A]] = ReusableCBF.asInstanceOf[GenericCanBuildFrom[A]]
def newBuilder[A]: Builder[A, Seq[A]] = new ArrayBuffer
}
+
+/** Explicit instantiation of the `Seq` trait to reduce class file size in subclasses. */
+private[scala] abstract class AbstractSeq[A] extends scala.collection.AbstractSeq[A] with Seq[A]
diff --git a/src/library/scala/collection/mutable/SeqLike.scala b/src/library/scala/collection/mutable/SeqLike.scala
index 7194f15..447100c 100644
--- a/src/library/scala/collection/mutable/SeqLike.scala
+++ b/src/library/scala/collection/mutable/SeqLike.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -28,8 +28,8 @@ trait SeqLike[A, +This <: SeqLike[A, This] with Seq[A]]
/** Replaces element at given index with a new value.
*
- * @param n the index of the element to replace.
- * @param lem the new value.
+ * @param idx the index of the element to replace.
+ * @param elem the new value.
* @throws IndexOutOfBoundsException if the index is not valid.
*/
def update(idx: Int, elem: A)
diff --git a/src/library/scala/collection/mutable/Set.scala b/src/library/scala/collection/mutable/Set.scala
index 30fc368..023ff63 100644
--- a/src/library/scala/collection/mutable/Set.scala
+++ b/src/library/scala/collection/mutable/Set.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -19,7 +19,7 @@ import generic._
*
* @since 1.0
* @author Matthias Zenger
- * @define Coll mutable.Set
+ * @define Coll `mutable.Set`
* @define coll mutable set
*/
trait Set[A] extends Iterable[A]
@@ -34,10 +34,12 @@ trait Set[A] extends Iterable[A]
/** $factoryInfo
* The current default implementation of a $Coll is a `HashSet`.
* @define coll mutable set
- * @define Coll mutable.Set
+ * @define Coll `mutable.Set`
*/
object Set extends MutableSetFactory[Set] {
implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, Set[A]] = setCanBuildFrom[A]
override def empty[A]: Set[A] = HashSet.empty[A]
}
+/** Explicit instantiation of the `Set` trait to reduce class file size in subclasses. */
+private[scala] abstract class AbstractSet[A] extends AbstractIterable[A] with Set[A]
diff --git a/src/library/scala/collection/mutable/SetBuilder.scala b/src/library/scala/collection/mutable/SetBuilder.scala
index 582ca89..42fd651 100644
--- a/src/library/scala/collection/mutable/SetBuilder.scala
+++ b/src/library/scala/collection/mutable/SetBuilder.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -19,7 +19,7 @@ import generic._
* @param empty The empty element of the collection.
* @since 2.8
*/
-class SetBuilder[A, Coll <: collection.Set[A] with collection.SetLike[A, Coll]](empty: Coll) extends Builder[A, Coll] {
+class SetBuilder[A, Coll <: scala.collection.Set[A] with scala.collection.SetLike[A, Coll]](empty: Coll) extends Builder[A, Coll] {
protected var elems: Coll = empty
def +=(x: A): this.type = { elems = elems + x; this }
def clear() { elems = empty }
diff --git a/src/library/scala/collection/mutable/SetLike.scala b/src/library/scala/collection/mutable/SetLike.scala
index 883ae4a..71da4c8 100644
--- a/src/library/scala/collection/mutable/SetLike.scala
+++ b/src/library/scala/collection/mutable/SetLike.scala
@@ -1,19 +1,17 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-
-
package scala.collection
package mutable
import generic._
import script._
-import annotation.{migration, bridge}
+import scala.annotation.{ migration, bridge }
import parallel.mutable.ParSet
/** A template trait for mutable sets of type `mutable.Set[A]`.
@@ -122,7 +120,9 @@ trait SetLike[A, +This <: SetLike[A, This] with Set[A]]
* which `p` returns `true` are retained in the set; all others
* are removed.
*/
- def retain(p: A => Boolean): Unit = for (elem <- this.toList) if (!p(elem)) this -= elem
+ def retain(p: A => Boolean): Unit =
+ for (elem <- this.toList) // SI-7269 toList avoids ConcurrentModificationException
+ if (!p(elem)) this -= elem
/** Removes all elements from the set. After this operation is completed,
* the set will be empty.
@@ -172,8 +172,6 @@ trait SetLike[A, +This <: SetLike[A, This] with Set[A]]
@migration("`++` creates a new set. Use `++=` to add elements to this set and return that set itself.", "2.8.0")
override def ++(xs: GenTraversableOnce[A]): This = clone() ++= xs.seq
- @bridge def ++(xs: TraversableOnce[A]): This = ++(xs: GenTraversableOnce[A])
-
/** Creates a new set consisting of all the elements of this set except `elem`.
*
* @param elem the element to remove.
@@ -205,8 +203,6 @@ trait SetLike[A, +This <: SetLike[A, This] with Set[A]]
@migration("`--` creates a new set. Use `--=` to remove elements from this set and return that set itself.", "2.8.0")
override def --(xs: GenTraversableOnce[A]): This = clone() --= xs.seq
- @bridge def --(xs: TraversableOnce[A]): This = --(xs: GenTraversableOnce[A])
-
/** Send a message to this scriptable object.
*
* @param cmd the message to send.
diff --git a/src/library/scala/collection/mutable/SetProxy.scala b/src/library/scala/collection/mutable/SetProxy.scala
index 9168c74..c9f2975 100644
--- a/src/library/scala/collection/mutable/SetProxy.scala
+++ b/src/library/scala/collection/mutable/SetProxy.scala
@@ -1,17 +1,15 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-
package scala.collection
package mutable
-/** This is a simple wrapper class for <a href="Set.html"
- * target="contentFrame"><code>scala.collection.mutable.Set</code></a>.
+/** This is a simple wrapper class for [[scala.collection.mutable.Set]].
* It is most useful for assembling customized set abstractions
* dynamically using object composition and forwarding.
*
diff --git a/src/library/scala/collection/mutable/SortedSet.scala b/src/library/scala/collection/mutable/SortedSet.scala
new file mode 100644
index 0000000..41f2c6e
--- /dev/null
+++ b/src/library/scala/collection/mutable/SortedSet.scala
@@ -0,0 +1,49 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+package scala.collection
+package mutable
+
+import generic._
+
+/**
+ * Base trait for mutable sorted set.
+ *
+ * @define Coll `mutable.SortedSet`
+ * @define coll mutable sorted set
+ *
+ * @author Lucien Pereira
+ *
+ */
+trait SortedSet[A] extends scala.collection.SortedSet[A] with scala.collection.SortedSetLike[A,SortedSet[A]]
+ with mutable.Set[A] with mutable.SetLike[A, SortedSet[A]] {
+
+ /** Needs to be overridden in subclasses. */
+ override def empty: SortedSet[A] = SortedSet.empty[A]
+
+}
+
+/**
+ * A template for mutable sorted set companion objects.
+ *
+ * @define Coll `mutable.SortedSet`
+ * @define coll mutable sorted set
+ * @define factoryInfo
+ * This object provides a set of operations needed to create sorted sets of type mutable.SortedSet.
+ * @define sortedSetCanBuildFromInfo
+ * Standard `CanBuildFrom` instance for sorted sets.
+ *
+ * @author Lucien Pereira
+ *
+ */
+object SortedSet extends MutableSortedSetFactory[SortedSet] {
+ implicit def canBuildFrom[A](implicit ord: Ordering[A]): CanBuildFrom[Coll, A, SortedSet[A]] = new SortedSetCanBuildFrom[A]
+
+ def empty[A](implicit ord: Ordering[A]): SortedSet[A] = TreeSet.empty[A]
+
+}
diff --git a/src/library/scala/collection/mutable/Stack.scala b/src/library/scala/collection/mutable/Stack.scala
index 4b297de..6eef250 100644
--- a/src/library/scala/collection/mutable/Stack.scala
+++ b/src/library/scala/collection/mutable/Stack.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -12,28 +12,25 @@ package scala.collection
package mutable
import generic._
-import collection.immutable.{List, Nil}
-import collection.Iterator
-import annotation.migration
+import scala.collection.immutable.{List, Nil}
+import scala.collection.Iterator
+import scala.annotation.migration
/** Factory object for the `mutable.Stack` class.
*
* $factoryInfo
* @define coll mutable stack
- * @define Coll mutable.Stack
+ * @define Coll `mutable.Stack`
*/
object Stack extends SeqFactory[Stack] {
class StackBuilder[A] extends Builder[A, Stack[A]] {
val lbuff = new ListBuffer[A]
def +=(elem: A) = { lbuff += elem; this }
- def clear = lbuff.clear
- def result = {
- val lst = lbuff.result
- new Stack(lst)
- }
+ def clear() = lbuff.clear()
+ def result = new Stack(lbuff.result)
}
- implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, Stack[A]] = new GenericCanBuildFrom[A]
+ implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, Stack[A]] = ReusableCBF.asInstanceOf[GenericCanBuildFrom[A]]
def newBuilder[A]: Builder[A, Stack[A]] = new StackBuilder[A]
val empty: Stack[Nothing] = new Stack(Nil)
}
@@ -47,18 +44,18 @@ object Stack extends SeqFactory[Stack] {
* @author Martin Odersky
* @version 2.8
* @since 1
- * @see [[http://docs.scala-lang.org/overviews/collections/concrete-mutable-collection-classes.html#stacks"Scala's Collection Library overview"]]
+ * @see [[http://docs.scala-lang.org/overviews/collections/concrete-mutable-collection-classes.html#stacks "Scala's Collection Library overview"]]
* section on `Stacks` for more information.
- * @define Coll Stack
+ * @define Coll `Stack`
* @define coll stack
* @define orderDependent
* @define orderDependentFold
* @define mayNotTerminateInf
* @define willNotTerminateInf
*/
- at cloneable
class Stack[A] private (var elems: List[A])
-extends Seq[A]
+extends AbstractSeq[A]
+ with Seq[A]
with SeqLike[A, Stack[A]]
with GenericTraversableTemplate[A, Stack]
with Cloneable[Stack[A]]
@@ -77,7 +74,7 @@ extends Seq[A]
/** The number of elements in the stack */
override def length = elems.length
- /** Retrieve n'th element from stack, where top of stack has index 0.
+ /** Retrieve `n`-th element from stack, where top of stack has index `0`.
*
* This is a linear time operation.
*
@@ -87,8 +84,7 @@ extends Seq[A]
*/
override def apply(index: Int) = elems(index)
- /** Replace element at index <code>n</code> with the new element
- * <code>newelem</code>.
+ /** Replace element at index `n` with the new element `newelem`.
*
* This is a linear time operation.
*
@@ -116,19 +112,14 @@ extends Seq[A]
def push(elem1: A, elem2: A, elems: A*): this.type =
this.push(elem1).push(elem2).pushAll(elems)
- /** Push all elements in the given traversable object onto
- * the stack. The last element in the traversable object
- * will be on top of the new stack.
+ /** Push all elements in the given traversable object onto the stack. The
+ * last element in the traversable object will be on top of the new stack.
*
* @param xs the traversable object.
* @return the stack with the new elements on top.
*/
def pushAll(xs: TraversableOnce[A]): this.type = { xs.seq foreach push ; this }
- @deprecated("use pushAll", "2.8.0")
- @migration(2, 8, "Stack ++= now pushes arguments on the stack from left to right.")
- def ++=(xs: TraversableOnce[A]): this.type = pushAll(xs)
-
/** Returns the top element of the stack. This method will not remove
* the element from the stack. An error is signaled if there is no
* element on the stack.
@@ -159,8 +150,8 @@ extends Seq[A]
/** Returns an iterator over all elements on the stack. This iterator
* is stable with respect to state changes in the stack object; i.e.
* such changes will not be reflected in the iterator. The iterator
- * issues elements in the reversed order they were inserted into the stack
- * (LIFO order).
+ * issues elements in the reversed order they were inserted into the
+ * stack (LIFO order).
*
* @return an iterator over all stack elements.
*/
@@ -183,4 +174,3 @@ extends Seq[A]
*/
override def clone(): Stack[A] = new Stack[A](elems)
}
-
diff --git a/src/library/scala/collection/mutable/StackProxy.scala b/src/library/scala/collection/mutable/StackProxy.scala
index 5dd5592..16f13ff 100644
--- a/src/library/scala/collection/mutable/StackProxy.scala
+++ b/src/library/scala/collection/mutable/StackProxy.scala
@@ -1,17 +1,14 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-
-
package scala.collection
package mutable
-
/** A stack implements a data structure which allows to store and retrieve
* objects in a last-in-first-out (LIFO) fashion.
*
@@ -25,9 +22,9 @@ trait StackProxy[A] extends Stack[A] with Proxy {
def self: Stack[A]
- /** Access element number <code>n</code>.
+ /** Access element number `n`.
*
- * @return the element at index <code>n</code>.
+ * @return the element at index `n`.
*/
override def apply(n: Int): A = self.apply(n)
@@ -52,21 +49,16 @@ trait StackProxy[A] extends Stack[A] with Proxy {
override def pushAll(xs: TraversableOnce[A]): this.type = { self pushAll xs; this }
- /** Pushes all elements provided by an <code>Iterable</code> object
- * on top of the stack. The elements are pushed in the order they
- * are given out by the iterator.
- *
- * @param iter an iterable object
- */
- @deprecated("use pushAll", "2.8.0")
- override def ++=(xs: TraversableOnce[A]): this.type = { self ++= xs ; this }
-
-
override def push(elem1: A, elem2: A, elems: A*): this.type = {
self.push(elem1).push(elem2).pushAll(elems)
this
}
+ override def push(elem: A): this.type = {
+ self.push(elem)
+ this
+ }
+
/** Returns the top element of the stack. This method will not remove
* the element from the stack. An error is signaled if there is no
* element on the stack.
diff --git a/src/library/scala/collection/mutable/StringBuilder.scala b/src/library/scala/collection/mutable/StringBuilder.scala
index 812e02f..4d269a9 100644
--- a/src/library/scala/collection/mutable/StringBuilder.scala
+++ b/src/library/scala/collection/mutable/StringBuilder.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2006-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2006-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -10,12 +10,12 @@ package scala.collection
package mutable
import java.lang.{ StringBuilder => JavaStringBuilder }
-import annotation.migration
+import scala.annotation.migration
import immutable.StringLike
/** A builder for mutable sequence of characters. This class provides an API
- * mostly compatible with java.lang.StringBuilder, except where there are conflicts
- * with the Scala collections API (such as the `reverse` method.)
+ * mostly compatible with `java.lang.StringBuilder`, except where there are
+ * conflicts with the Scala collections API (such as the `reverse` method.)
*
* @author Stephane Micheloud
* @author Martin Odersky
@@ -26,7 +26,8 @@ import immutable.StringLike
*/
@SerialVersionUID(0 - 8525408645367278351L)
final class StringBuilder(private val underlying: JavaStringBuilder)
- extends java.lang.CharSequence
+ extends AbstractSeq[Char]
+ with java.lang.CharSequence
with IndexedSeq[Char]
with StringLike[StringBuilder]
with Builder[Char, String]
@@ -38,8 +39,8 @@ final class StringBuilder(private val underlying: JavaStringBuilder)
/** Creates a string builder buffer as builder for this class */
override protected[this] def newBuilder = new GrowingBuilder(new StringBuilder)
- /** Constructs a string builder initialized with String initValue
- * and with additional Char capacity initCapacity.
+ /** Constructs a string builder initialized with string value `initValue`
+ * and with additional character capacity `initCapacity`.
*/
def this(initCapacity: Int, initValue: String) =
this(new JavaStringBuilder(initValue.length + initCapacity) append initValue)
@@ -50,7 +51,7 @@ final class StringBuilder(private val underlying: JavaStringBuilder)
def this() = this(16, "")
/** Constructs a string builder with no characters in it and an
- * initial capacity specified by the capacity argument.
+ * initial capacity specified by the `capacity` argument.
*
* @param capacity the initial capacity.
* @throws NegativeArraySizeException if capacity < 0.
@@ -91,18 +92,14 @@ final class StringBuilder(private val underlying: JavaStringBuilder)
*/
def capacity: Int = underlying.capacity()
- @deprecated("Use `ensureCapacity' instead. An assignment is misleading because\n"+
- "it can never decrease the capacity.", "2.8.0")
- def capacity_=(n: Int) { ensureCapacity(n) }
-
/** Ensure that the capacity is at least the given argument.
* If the argument is greater than the current capacity, new
* storage will be allocated with size equal to the given
- * argument or to (2 * capacity + 2), whichever is larger.
+ * argument or to `(2 * capacity + 2)`, whichever is larger.
*
* @param newCapacity the minimum desired capacity.
*/
- def ensureCapacity(newCapacity: Int): Unit = underlying ensureCapacity newCapacity
+ def ensureCapacity(newCapacity: Int) { underlying ensureCapacity newCapacity }
/** Returns the Char at the specified index, counting from 0 as in Arrays.
*
@@ -167,20 +164,33 @@ final class StringBuilder(private val underlying: JavaStringBuilder)
/** For implementing CharSequence.
*/
- def subSequence(start: Int, end: Int): java.lang.CharSequence = substring(start, end)
+ def subSequence(start: Int, end: Int): java.lang.CharSequence =
+ substring(start, end)
/** Appends the given Char to the end of the sequence.
*/
def +=(x: Char): this.type = { append(x); this }
+ /** Optimization.
+ */
+ def ++=(s: String): this.type = {
+ underlying append s
+ this
+ }
+
+ def appendAll(xs: String): StringBuilder = {
+ underlying append xs
+ this
+ }
+
/** !!! This should create a new sequence.
*/
def +(x: Char): this.type = { +=(x); this }
/** Appends the string representation of the given argument,
- * which is converted to a String with String.valueOf.
+ * which is converted to a String with `String.valueOf`.
*
- * @param x an <code>Any</code> object.
+ * @param x an `Any` object.
* @return this StringBuilder.
*/
def append(x: Any): StringBuilder = {
@@ -357,31 +367,6 @@ final class StringBuilder(private val underlying: JavaStringBuilder)
def insert(index: Int, x: Double): StringBuilder = insert(index, String.valueOf(x))
def insert(index: Int, x: Char): StringBuilder = insert(index, String.valueOf(x))
- @deprecated("Use appendAll instead. This method is deprecated because of the\n"+
- "possible confusion with `append(Any)'.", "2.8.0")
- def append(x: Seq[Char]): StringBuilder = appendAll(x)
-
- @deprecated("use appendAll instead. This method is deprecated because\n"+
- "of the possible confusion with `append(Any)'.", "2.8.0")
- def append(x: Array[Char]): StringBuilder = appendAll(x)
-
- @deprecated("use appendAll instead. This method is deprecated because\n"+
- "of the possible confusion with `append(Any, Int, Int)'.", "2.8.0")
- def append(x: Array[Char], offset: Int, len: Int): StringBuilder = appendAll(x, offset, len)
-
- @deprecated("use insertAll instead. This method is deprecated because of the\n"+
- "possible confusion with `insert(Int, Any, Int, Int)'.", "2.8.0")
- def insert(index: Int, str: Array[Char], offset: Int, len: Int): StringBuilder =
- insertAll(index, str, offset, len)
-
- @deprecated("use insertAll instead. This method is deprecated because of\n"+
- "the possible confusion with `insert(Int, Any)'.", "2.8.0")
- def insert(at: Int, x: Seq[Char]): StringBuilder = insertAll(at, x)
-
- @deprecated("use insertAll instead. This method is deprecated because of\n"+
- "the possible confusion with `insert(Int, Any)'.", "2.8.0")
- def insert(at: Int, x: Array[Char]): StringBuilder = insertAll(at, x)
-
/** Finds the index of the first occurrence of the specified substring.
*
* @param str the target string to search for
@@ -419,7 +404,7 @@ final class StringBuilder(private val underlying: JavaStringBuilder)
* @return the reversed StringBuilder
*/
@migration("`reverse` returns a new instance. Use `reverseContents` to update in place and return that StringBuilder itself.", "2.8.0")
- override def reverse: StringBuilder = new StringBuilder(new JavaStringBuilder(underlying) reverse)
+ override def reverse: StringBuilder = new StringBuilder(new JavaStringBuilder(underlying).reverse)
override def clone(): StringBuilder = new StringBuilder(new JavaStringBuilder(underlying))
diff --git a/src/library/scala/collection/mutable/Subscriber.scala b/src/library/scala/collection/mutable/Subscriber.scala
index 29474b6..35d31d7 100644
--- a/src/library/scala/collection/mutable/Subscriber.scala
+++ b/src/library/scala/collection/mutable/Subscriber.scala
@@ -1,21 +1,17 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-
-
package scala.collection
package mutable
-
-/** <code>Subscriber[A, B]</code> objects may subscribe to events of
- * type <code>A</code> published by an object of type <code>B</code>.
- * <code>B</code> is typically a subtype of <a href="Publisher.html"
- * target="contentFrame"><code>Publisher</code></a>.
+/** `Subscriber[A, B]` objects may subscribe to events of type `A`
+ * published by an object of type `B`. `B` is typically a subtype of
+ * [[scala.collection.mutable.Publisher]].
*
* @author Matthias Zenger
* @author Martin Odersky
diff --git a/src/library/scala/collection/mutable/SynchronizedBuffer.scala b/src/library/scala/collection/mutable/SynchronizedBuffer.scala
index 5b76d94..bf9a70c 100644
--- a/src/library/scala/collection/mutable/SynchronizedBuffer.scala
+++ b/src/library/scala/collection/mutable/SynchronizedBuffer.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -21,7 +21,7 @@ import script._
* @author Matthias Zenger
* @version 1.0, 08/07/2003
* @since 1
- * @define Coll SynchronizedBuffer
+ * @define Coll `SynchronizedBuffer`
* @define coll synchronized buffer
*/
trait SynchronizedBuffer[A] extends Buffer[A] {
@@ -40,15 +40,6 @@ trait SynchronizedBuffer[A] extends Buffer[A] {
super.apply(n)
}
- /** Append a single element to this buffer and return
- * the identity of the buffer.
- *
- * @param elem the element to append.
- */
- override def +(elem: A): Self = synchronized {
- super.+(elem)
- }
-
/** Append a single element to this buffer.
*
* @param elem the element to append.
@@ -70,7 +61,7 @@ trait SynchronizedBuffer[A] extends Buffer[A] {
/** Appends a number of elements provided by a traversable object
* via its `foreach` method.
*
- * @param iter the iterable object.
+ * @param xs the iterable object.
*/
override def ++=(xs: TraversableOnce[A]): this.type = synchronized[this.type] {
super.++=(xs)
@@ -85,7 +76,7 @@ trait SynchronizedBuffer[A] extends Buffer[A] {
}
/** Appends a number of elements provided by a traversable object
- * via its <code>foreach</code> method.
+ * via its `foreach` method.
*
* @param xs the traversable object.
*/
@@ -103,8 +94,7 @@ trait SynchronizedBuffer[A] extends Buffer[A] {
}
/** Prepends a number of elements provided by a traversable object
- * via its <code>foreach</code> method. The identity of the
- * buffer is returned.
+ * via its `foreach` method. The identity of the buffer is returned.
*
* @param xs the traversable object.
*/
@@ -112,13 +102,12 @@ trait SynchronizedBuffer[A] extends Buffer[A] {
/** Prepend an element to this list.
*
- * @param elem the element to prepend.
+ * @param elems the elements to prepend.
*/
override def prepend(elems: A*): Unit = prependAll(elems)
/** Prepends a number of elements provided by a traversable object
- * via its <code>foreach</code> method. The identity of the
- * buffer is returned.
+ * via its `foreach` method. The identity of the buffer is returned.
*
* @param xs the traversable object.
*/
@@ -126,9 +115,9 @@ trait SynchronizedBuffer[A] extends Buffer[A] {
super.prependAll(xs)
}
- /** Inserts new elements at the index <code>n</code>. Opposed to method
- * <code>update</code>, this method will not replace an element with a
- * one. Instead, it will insert the new elements at index <code>n</code>.
+ /** Inserts new elements at the index `n`. Opposed to method `update`,
+ * this method will not replace an element with a one.
+ * Instead, it will insert the new elements at index `n`.
*
* @param n the index where a new element will be inserted.
* @param elems the new elements to insert.
@@ -137,9 +126,9 @@ trait SynchronizedBuffer[A] extends Buffer[A] {
super.insertAll(n, elems)
}
- /** Inserts new elements at the index <code>n</code>. Opposed to method
- * <code>update</code>, this method will not replace an element with a
- * one. Instead, it will insert a new element at index <code>n</code>.
+ /** Inserts new elements at the index `n`. Opposed to method `update`,
+ * this method will not replace an element with a one.
+ * Instead, it will insert a new element at index `n`.
*
* @param n the index where a new element will be inserted.
* @param xs the traversable object providing all elements to insert.
@@ -148,8 +137,7 @@ trait SynchronizedBuffer[A] extends Buffer[A] {
super.insertAll(n, xs)
}
- /** Replace element at index <code>n</code> with the new element
- * <code>newelem</code>.
+ /** Replace element at index `n` with the new element `newelem`.
*
* @param n the index of the element to replace.
* @param newelem the new element.
@@ -178,13 +166,13 @@ trait SynchronizedBuffer[A] extends Buffer[A] {
/** Return a clone of this buffer.
*
- * @return an <code>ArrayBuffer</code> with the same elements.
+ * @return an `ArrayBuffer` with the same elements.
*/
override def clone(): Self = synchronized {
super.clone()
}
- /** The hashCode method always yields an error, since it is not
+ /** The `hashCode` method always yields an error, since it is not
* safe to use buffers as keys in hash tables.
*
* @return never.
diff --git a/src/library/scala/collection/mutable/SynchronizedMap.scala b/src/library/scala/collection/mutable/SynchronizedMap.scala
index 6e3ae13..5a3562c 100644
--- a/src/library/scala/collection/mutable/SynchronizedMap.scala
+++ b/src/library/scala/collection/mutable/SynchronizedMap.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -11,7 +11,7 @@
package scala.collection
package mutable
-import annotation.migration
+import scala.annotation.migration
/** This class should be used as a mixin. It synchronizes the `Map`
* functions of the class into which it is mixed in.
@@ -22,7 +22,7 @@ import annotation.migration
* @author Matthias Zenger, Martin Odersky
* @version 2.0, 31/12/2006
* @since 1
- * @define Coll SynchronizedMap
+ * @define Coll `SynchronizedMap`
* @define coll synchronized map
*/
trait SynchronizedMap[A, B] extends Map[A, B] {
@@ -41,14 +41,14 @@ trait SynchronizedMap[A, B] extends Map[A, B] {
override def transform(f: (A, B) => B): this.type = synchronized[this.type] { super.transform(f) }
override def retain(p: (A, B) => Boolean): this.type = synchronized[this.type] { super.retain(p) }
@migration("`values` returns `Iterable[B]` rather than `Iterator[B]`.", "2.8.0")
- override def values: collection.Iterable[B] = synchronized { super.values }
+ override def values: scala.collection.Iterable[B] = synchronized { super.values }
override def valuesIterator: Iterator[B] = synchronized { super.valuesIterator }
override def clone(): Self = synchronized { super.clone() }
override def foreach[U](f: ((A, B)) => U) = synchronized { super.foreach(f) }
override def apply(key: A): B = synchronized { super.apply(key) }
- override def keySet: collection.Set[A] = synchronized { super.keySet }
+ override def keySet: scala.collection.Set[A] = synchronized { super.keySet }
@migration("`keys` returns `Iterable[A]` rather than `Iterator[A]`.", "2.8.0")
- override def keys: collection.Iterable[A] = synchronized { super.keys }
+ override def keys: scala.collection.Iterable[A] = synchronized { super.keys }
override def keysIterator: Iterator[A] = synchronized { super.keysIterator }
override def isEmpty: Boolean = synchronized { super.isEmpty }
override def contains(key: A): Boolean = synchronized {super.contains(key) }
diff --git a/src/library/scala/collection/mutable/SynchronizedPriorityQueue.scala b/src/library/scala/collection/mutable/SynchronizedPriorityQueue.scala
index 159b831..8dfc40b 100644
--- a/src/library/scala/collection/mutable/SynchronizedPriorityQueue.scala
+++ b/src/library/scala/collection/mutable/SynchronizedPriorityQueue.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -20,7 +20,7 @@ package mutable
* @author Matthias Zenger
* @version 1.0, 03/05/2004
* @since 1
- * @define Coll SynchronizedPriorityQueue
+ * @define Coll `SynchronizedPriorityQueue`
* @define coll synchronized priority queue
*/
class SynchronizedPriorityQueue[A](implicit ord: Ordering[A]) extends PriorityQueue[A] {
diff --git a/src/library/scala/collection/mutable/SynchronizedQueue.scala b/src/library/scala/collection/mutable/SynchronizedQueue.scala
index 1dc8a41..9559d5e 100644
--- a/src/library/scala/collection/mutable/SynchronizedQueue.scala
+++ b/src/library/scala/collection/mutable/SynchronizedQueue.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -21,7 +21,7 @@ package mutable
* @author Matthias Zenger
* @version 1.0, 03/05/2004
* @since 1
- * @define Coll SynchronizedQueue
+ * @define Coll `SynchronizedQueue`
* @define coll synchronized queue
*/
class SynchronizedQueue[A] extends Queue[A] {
@@ -60,6 +60,23 @@ class SynchronizedQueue[A] extends Queue[A] {
*/
override def dequeue(): A = synchronized { super.dequeue }
+ /** Returns the first element in the queue which satisfies the
+ * given predicate, and removes this element from the queue.
+ *
+ * @param p the predicate used for choosing the first element
+ * @return the first element of the queue for which p yields true
+ */
+ override def dequeueFirst(p: A => Boolean): Option[A] = synchronized { super.dequeueFirst(p) }
+
+ /** Returns all elements in the queue which satisfy the
+ * given predicate, and removes those elements from the queue.
+ *
+ * @param p the predicate used for choosing elements
+ * @return a sequence of all elements in the queue for which
+ * p yields true.
+ */
+ override def dequeueAll(p: A => Boolean): Seq[A] = synchronized { super.dequeueAll(p) }
+
/** Returns the first element in the queue, or throws an error if there
* is no element contained in the queue.
*
diff --git a/src/library/scala/collection/mutable/SynchronizedSet.scala b/src/library/scala/collection/mutable/SynchronizedSet.scala
index c945a85..e4a4499 100644
--- a/src/library/scala/collection/mutable/SynchronizedSet.scala
+++ b/src/library/scala/collection/mutable/SynchronizedSet.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -20,7 +20,7 @@ import script._
* @author Matthias Zenger
* @version 1.0, 08/07/2003
* @since 1
- * @define Coll SynchronizedSet
+ * @define Coll `SynchronizedSet`
* @define coll synchronized set
*/
trait SynchronizedSet[A] extends Set[A] {
diff --git a/src/library/scala/collection/mutable/SynchronizedStack.scala b/src/library/scala/collection/mutable/SynchronizedStack.scala
index a09ae21..5d7c9f6 100644
--- a/src/library/scala/collection/mutable/SynchronizedStack.scala
+++ b/src/library/scala/collection/mutable/SynchronizedStack.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -21,7 +21,7 @@ package mutable
* @author Matthias Zenger
* @version 1.0, 03/05/2004
* @since 1
- * @define Coll SynchronizedStack
+ * @define Coll `SynchronizedStack`
* @define coll synchronized stack
*/
class SynchronizedStack[A] extends Stack[A] {
diff --git a/src/library/scala/collection/mutable/Traversable.scala b/src/library/scala/collection/mutable/Traversable.scala
index b711e0b..e36ffc8 100644
--- a/src/library/scala/collection/mutable/Traversable.scala
+++ b/src/library/scala/collection/mutable/Traversable.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -29,10 +29,10 @@ trait Traversable[A] extends scala.collection.Traversable[A]
/** $factoryInfo
* The current default implementation of a $Coll is an `ArrayBuffer`.
* @define coll mutable traversable collection
- * @define Coll mutable.Traversable
+ * @define Coll `mutable.Traversable`
*/
object Traversable extends TraversableFactory[Traversable] {
- implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, Traversable[A]] = new GenericCanBuildFrom[A]
+ implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, Traversable[A]] = ReusableCBF.asInstanceOf[GenericCanBuildFrom[A]]
def newBuilder[A]: Builder[A, Traversable[A]] = new ArrayBuffer
}
diff --git a/src/library/scala/collection/mutable/TreeSet.scala b/src/library/scala/collection/mutable/TreeSet.scala
new file mode 100644
index 0000000..5197af1
--- /dev/null
+++ b/src/library/scala/collection/mutable/TreeSet.scala
@@ -0,0 +1,123 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+package scala.collection
+package mutable
+
+import generic._
+
+/**
+ * @define Coll `mutable.TreeSet`
+ * @define coll mutable tree set
+ * @factoryInfo
+ * Companion object of TreeSet providing factory related utilities.
+ *
+ * @author Lucien Pereira
+ *
+ */
+object TreeSet extends MutableSortedSetFactory[TreeSet] {
+ /**
+ * The empty set of this type
+ */
+ def empty[A](implicit ordering: Ordering[A]) = new TreeSet[A]()
+
+}
+
+/**
+ * A mutable SortedSet using an immutable AVL Tree as underlying data structure.
+ *
+ * @author Lucien Pereira
+ *
+ */
+class TreeSet[A](implicit val ordering: Ordering[A]) extends SortedSet[A] with SetLike[A, TreeSet[A]]
+ with SortedSetLike[A, TreeSet[A]] with Set[A] with Serializable {
+
+ // Projection constructor
+ private def this(base: Option[TreeSet[A]], from: Option[A], until: Option[A])(implicit ordering: Ordering[A]) {
+ this();
+ this.base = base
+ this.from = from
+ this.until = until
+ }
+
+ private var base: Option[TreeSet[A]] = None
+
+ private var from: Option[A] = None
+
+ private var until: Option[A] = None
+
+ private var avl: AVLTree[A] = Leaf
+
+ private var cardinality: Int = 0
+
+ def resolve: TreeSet[A] = base.getOrElse(this)
+
+ private def isLeftAcceptable(from: Option[A], ordering: Ordering[A])(a: A): Boolean =
+ from.map(x => ordering.gteq(a, x)).getOrElse(true)
+
+ private def isRightAcceptable(until: Option[A], ordering: Ordering[A])(a: A): Boolean =
+ until.map(x => ordering.lt(a, x)).getOrElse(true)
+
+ /**
+ * Cardinality store the set size, unfortunately a
+ * set view (given by rangeImpl)
+ * cannot take advantage of this optimisation
+ *
+ */
+ override def size: Int = base.map(_ => super.size).getOrElse(cardinality)
+
+ override def stringPrefix = "TreeSet"
+
+ override def empty: TreeSet[A] = TreeSet.empty
+
+ override def rangeImpl(from: Option[A], until: Option[A]): TreeSet[A] = new TreeSet(Some(this), from, until)
+
+ override def -=(elem: A): this.type = {
+ try {
+ resolve.avl = resolve.avl.remove(elem, ordering)
+ resolve.cardinality = resolve.cardinality - 1
+ } catch {
+ case e: NoSuchElementException => ()
+ }
+ this
+ }
+
+ override def +=(elem: A): this.type = {
+ try {
+ resolve.avl = resolve.avl.insert(elem, ordering)
+ resolve.cardinality = resolve.cardinality + 1
+ } catch {
+ case e: IllegalArgumentException => ()
+ }
+ this
+ }
+
+ /**
+ * Thanks to the immutable nature of the
+ * underlying AVL Tree, we can share it with
+ * the clone. So clone complexity in time is O(1).
+ *
+ */
+ override def clone(): TreeSet[A] = {
+ val clone = new TreeSet[A](base, from, until)
+ clone.avl = resolve.avl
+ clone.cardinality = resolve.cardinality
+ clone
+ }
+
+ override def contains(elem: A): Boolean = {
+ isLeftAcceptable(from, ordering)(elem) &&
+ isRightAcceptable(until, ordering)(elem) &&
+ resolve.avl.contains(elem, ordering)
+ }
+
+ override def iterator: Iterator[A] = resolve.avl.iterator
+ .dropWhile(e => !isLeftAcceptable(from, ordering)(e))
+ .takeWhile(e => isRightAcceptable(until, ordering)(e))
+
+}
diff --git a/src/library/scala/collection/mutable/Undoable.scala b/src/library/scala/collection/mutable/Undoable.scala
index b5cadab..0c0e8fe 100644
--- a/src/library/scala/collection/mutable/Undoable.scala
+++ b/src/library/scala/collection/mutable/Undoable.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/library/scala/collection/mutable/UnrolledBuffer.scala b/src/library/scala/collection/mutable/UnrolledBuffer.scala
index 9e0b313..9b48c8f 100644
--- a/src/library/scala/collection/mutable/UnrolledBuffer.scala
+++ b/src/library/scala/collection/mutable/UnrolledBuffer.scala
@@ -1,8 +1,18 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
package scala.collection.mutable
-import collection.Iterator
-import collection.generic._
-import annotation.tailrec
+import scala.collection.AbstractIterator
+import scala.collection.Iterator
+import scala.collection.generic._
+import scala.annotation.tailrec
+import scala.reflect.ClassTag
/** A buffer that stores elements in an unrolled linked list.
*
@@ -27,16 +37,17 @@ import annotation.tailrec
* should still be avoided for such a purpose.
*
* @define coll unrolled buffer
- * @define Coll UnrolledBuffer
+ * @define Coll `UnrolledBuffer`
* @author Aleksandar Prokopec
*
*/
@SerialVersionUID(1L)
-class UnrolledBuffer[T](implicit val manifest: ClassManifest[T])
-extends collection.mutable.Buffer[T]
- with collection.mutable.BufferLike[T, UnrolledBuffer[T]]
- with GenericClassManifestTraversableTemplate[T, UnrolledBuffer]
- with collection.mutable.Builder[T, UnrolledBuffer[T]]
+class UnrolledBuffer[T](implicit val tag: ClassTag[T])
+extends scala.collection.mutable.AbstractBuffer[T]
+ with scala.collection.mutable.Buffer[T]
+ with scala.collection.mutable.BufferLike[T, UnrolledBuffer[T]]
+ with GenericClassTagTraversableTemplate[T, UnrolledBuffer]
+ with scala.collection.mutable.Builder[T, UnrolledBuffer[T]]
with Serializable
{
import UnrolledBuffer.Unrolled
@@ -57,7 +68,7 @@ extends collection.mutable.Buffer[T]
private[collection] def calcNextLength(sz: Int) = sz
- def classManifestCompanion = UnrolledBuffer
+ def classTagCompanion = UnrolledBuffer
/** Concatenates the targer unrolled buffer to this unrolled buffer.
*
@@ -94,7 +105,7 @@ extends collection.mutable.Buffer[T]
sz = 0
}
- def iterator = new Iterator[T] {
+ def iterator: Iterator[T] = new AbstractIterator[T] {
var pos: Int = -1
var node: Unrolled[T] = headptr
scan()
@@ -137,12 +148,12 @@ extends collection.mutable.Buffer[T]
} else throw new IndexOutOfBoundsException(idx.toString)
def +=:(elem: T) = {
- headptr = headptr.prepend(elem)
+ headptr = headptr prepend elem
sz += 1
this
}
- def insertAll(idx: Int, elems: collection.Traversable[T]) =
+ def insertAll(idx: Int, elems: scala.collection.Traversable[T]) =
if (idx >= 0 && idx <= sz) {
headptr.insertAll(idx, elems, this)
sz += elems.size
@@ -150,8 +161,8 @@ extends collection.mutable.Buffer[T]
private def writeObject(out: java.io.ObjectOutputStream) {
out.defaultWriteObject
- out.writeInt(sz)
- for (elem <- this) out.writeObject(elem)
+ out writeInt sz
+ for (elem <- this) out writeObject elem
}
private def readObject(in: java.io.ObjectInputStream) {
@@ -169,15 +180,17 @@ extends collection.mutable.Buffer[T]
}
}
+ override def clone(): UnrolledBuffer[T] = new UnrolledBuffer[T] ++= this
+
override def stringPrefix = "UnrolledBuffer"
}
-object UnrolledBuffer extends ClassManifestTraversableFactory[UnrolledBuffer] {
+object UnrolledBuffer extends ClassTagTraversableFactory[UnrolledBuffer] {
/** $genericCanBuildFromInfo */
- implicit def canBuildFrom[T](implicit m: ClassManifest[T]): CanBuildFrom[Coll, T, UnrolledBuffer[T]] =
+ implicit def canBuildFrom[T](implicit t: ClassTag[T]): CanBuildFrom[Coll, T, UnrolledBuffer[T]] =
new GenericCanBuildFrom[T]
- def newBuilder[T](implicit m: ClassManifest[T]): Builder[T, UnrolledBuffer[T]] = new UnrolledBuffer[T]
+ def newBuilder[T](implicit t: ClassTag[T]): Builder[T, UnrolledBuffer[T]] = new UnrolledBuffer[T]
val waterline = 50
val waterlineDelim = 100
@@ -185,7 +198,7 @@ object UnrolledBuffer extends ClassManifestTraversableFactory[UnrolledBuffer] {
/** Unrolled buffer node.
*/
- class Unrolled[T: ClassManifest] private[collection] (var size: Int, var array: Array[T], var next: Unrolled[T], val buff: UnrolledBuffer[T] = null) {
+ class Unrolled[T: ClassTag] private[collection] (var size: Int, var array: Array[T], var next: Unrolled[T], val buff: UnrolledBuffer[T] = null) {
private[collection] def this() = this(0, new Array[T](unrolledlength), null, null)
private[collection] def this(b: UnrolledBuffer[T]) = this(0, new Array[T](unrolledlength), null, b)
@@ -198,7 +211,7 @@ object UnrolledBuffer extends ClassManifestTraversableFactory[UnrolledBuffer] {
this
} else {
next = new Unrolled[T](0, new Array[T](nextlength), null, buff)
- next.append(elem)
+ next append elem
}
def foreach[U](f: T => U) {
var unrolled = this
@@ -232,7 +245,7 @@ object UnrolledBuffer extends ClassManifestTraversableFactory[UnrolledBuffer] {
// allocate a new node and store element
// then make it point to this
val newhead = new Unrolled[T](buff)
- newhead.append(elem)
+ newhead append elem
newhead.next = this
newhead
}
@@ -272,7 +285,7 @@ object UnrolledBuffer extends ClassManifestTraversableFactory[UnrolledBuffer] {
if (next eq null) true else false // checks if last node was thrown out
} else false
- @tailrec final def insertAll(idx: Int, t: collection.Traversable[T], buffer: UnrolledBuffer[T]): Unit = if (idx < size) {
+ @tailrec final def insertAll(idx: Int, t: scala.collection.Traversable[T], buffer: UnrolledBuffer[T]): Unit = if (idx < size) {
// divide this node at the appropriate position and insert all into head
// update new next
val newnextnode = new Unrolled[T](0, new Array(array.length), null, buff)
@@ -314,4 +327,3 @@ object UnrolledBuffer extends ClassManifestTraversableFactory[UnrolledBuffer] {
}
}
-
diff --git a/src/library/scala/collection/mutable/WeakHashMap.scala b/src/library/scala/collection/mutable/WeakHashMap.scala
index 89d7c7a..70e428c 100644
--- a/src/library/scala/collection/mutable/WeakHashMap.scala
+++ b/src/library/scala/collection/mutable/WeakHashMap.scala
@@ -1,19 +1,16 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-
-
package scala.collection
package mutable
-import JavaConversions._
import generic._
-
+import convert.Wrappers._
/** A hash map with references to entries which are weakly reachable. Entries are
* removed from this map when the key is no longer (strongly) referenced. This class wraps
@@ -26,7 +23,7 @@ import generic._
* @see [[http://docs.scala-lang.org/overviews/collections/concrete-mutable-collection-classes.html#weak_hash_maps "Scala's Collection Library overview"]]
* section on `Weak Hash Maps` for more information.
*
- * @define Coll WeakHashMap
+ * @define Coll `WeakHashMap`
* @define coll weak hash map
* @define thatinfo the class of the returned collection. In the standard library configuration,
* `That` is always `WeakHashMap[A, B]` if the elements contained in the resulting collection are
@@ -46,7 +43,7 @@ class WeakHashMap[A, B] extends JMapWrapper[A, B](new java.util.WeakHashMap)
}
/** $factoryInfo
- * @define Coll WeakHashMap
+ * @define Coll `WeakHashMap`
* @define coll weak hash map
*/
object WeakHashMap extends MutableMapFactory[WeakHashMap] {
diff --git a/src/library/scala/collection/mutable/WrappedArray.scala b/src/library/scala/collection/mutable/WrappedArray.scala
index 21e6230..b837240 100644
--- a/src/library/scala/collection/mutable/WrappedArray.scala
+++ b/src/library/scala/collection/mutable/WrappedArray.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -11,7 +11,8 @@
package scala.collection
package mutable
-import scala.reflect.ClassManifest
+import scala.reflect.ClassTag
+import scala.runtime.ScalaRunTime._
import scala.collection.generic._
import scala.collection.parallel.mutable.ParArray
@@ -23,7 +24,7 @@ import scala.collection.parallel.mutable.ParArray
* @author Martin Odersky, Stephane Micheloud
* @version 1.0
* @since 2.8
- * @define Coll WrappedArray
+ * @define Coll `WrappedArray`
* @define coll wrapped array
* @define orderDependent
* @define orderDependentFold
@@ -31,7 +32,8 @@ import scala.collection.parallel.mutable.ParArray
* @define willNotTerminateInf
*/
abstract class WrappedArray[T]
-extends IndexedSeq[T]
+extends AbstractSeq[T]
+ with IndexedSeq[T]
with ArrayLike[T, WrappedArray[T]]
with CustomParallelizable[T, ParArray[T]]
{
@@ -39,8 +41,11 @@ extends IndexedSeq[T]
override protected[this] def thisCollection: WrappedArray[T] = this
override protected[this] def toCollection(repr: WrappedArray[T]): WrappedArray[T] = repr
- /** The manifest of the element type */
- def elemManifest: ClassManifest[T]
+ /** The tag of the element type */
+ def elemTag: ClassTag[T]
+
+ @deprecated("use elemTag instead", "2.10.0")
+ def elemManifest: ClassManifest[T] = ClassManifest.fromClass[T](arrayElementClass(elemTag).asInstanceOf[Class[T]])
/** The length of the array */
def length: Int
@@ -56,21 +61,26 @@ extends IndexedSeq[T]
override def par = ParArray.handoff(array)
- override def toArray[U >: T : ClassManifest]: Array[U] =
- if (implicitly[ClassManifest[U]].erasure eq array.getClass.getComponentType)
+ private def elementClass: Class[_] =
+ arrayElementClass(array.getClass)
+
+ override def toArray[U >: T : ClassTag]: Array[U] = {
+ val thatElementClass = arrayElementClass(implicitly[ClassTag[U]])
+ if (elementClass eq thatElementClass)
array.asInstanceOf[Array[U]]
else
super.toArray[U]
+ }
override def stringPrefix = "WrappedArray"
/** Clones this object, including the underlying Array. */
- override def clone: WrappedArray[T] = WrappedArray make array.clone()
+ override def clone(): WrappedArray[T] = WrappedArray make array.clone()
/** Creates new builder for this collection ==> move to subclasses
*/
override protected[this] def newBuilder: Builder[T, WrappedArray[T]] =
- new WrappedArrayBuilder[T](elemManifest)
+ new WrappedArrayBuilder[T](elemTag)
}
@@ -100,7 +110,7 @@ object WrappedArray {
case x: Array[Unit] => new ofUnit(x)
}).asInstanceOf[WrappedArray[T]]
- implicit def canBuildFrom[T](implicit m: ClassManifest[T]): CanBuildFrom[WrappedArray[_], T, WrappedArray[T]] =
+ implicit def canBuildFrom[T](implicit m: ClassTag[T]): CanBuildFrom[WrappedArray[_], T, WrappedArray[T]] =
new CanBuildFrom[WrappedArray[_], T, WrappedArray[T]] {
def apply(from: WrappedArray[_]): Builder[T, WrappedArray[T]] =
ArrayBuilder.make[T]()(m) mapResult WrappedArray.make[T]
@@ -111,70 +121,70 @@ object WrappedArray {
def newBuilder[A]: Builder[A, IndexedSeq[A]] = new ArrayBuffer
final class ofRef[T <: AnyRef](val array: Array[T]) extends WrappedArray[T] with Serializable {
- lazy val elemManifest = ClassManifest.classType[T](array.getClass.getComponentType)
+ lazy val elemTag = ClassTag[T](arrayElementClass(array.getClass))
def length: Int = array.length
def apply(index: Int): T = array(index).asInstanceOf[T]
def update(index: Int, elem: T) { array(index) = elem }
}
final class ofByte(val array: Array[Byte]) extends WrappedArray[Byte] with Serializable {
- def elemManifest = ClassManifest.Byte
+ def elemTag = ClassTag.Byte
def length: Int = array.length
def apply(index: Int): Byte = array(index)
def update(index: Int, elem: Byte) { array(index) = elem }
}
final class ofShort(val array: Array[Short]) extends WrappedArray[Short] with Serializable {
- def elemManifest = ClassManifest.Short
+ def elemTag = ClassTag.Short
def length: Int = array.length
def apply(index: Int): Short = array(index)
def update(index: Int, elem: Short) { array(index) = elem }
}
final class ofChar(val array: Array[Char]) extends WrappedArray[Char] with Serializable {
- def elemManifest = ClassManifest.Char
+ def elemTag = ClassTag.Char
def length: Int = array.length
def apply(index: Int): Char = array(index)
def update(index: Int, elem: Char) { array(index) = elem }
}
final class ofInt(val array: Array[Int]) extends WrappedArray[Int] with Serializable {
- def elemManifest = ClassManifest.Int
+ def elemTag = ClassTag.Int
def length: Int = array.length
def apply(index: Int): Int = array(index)
def update(index: Int, elem: Int) { array(index) = elem }
}
final class ofLong(val array: Array[Long]) extends WrappedArray[Long] with Serializable {
- def elemManifest = ClassManifest.Long
+ def elemTag = ClassTag.Long
def length: Int = array.length
def apply(index: Int): Long = array(index)
def update(index: Int, elem: Long) { array(index) = elem }
}
final class ofFloat(val array: Array[Float]) extends WrappedArray[Float] with Serializable {
- def elemManifest = ClassManifest.Float
+ def elemTag = ClassTag.Float
def length: Int = array.length
def apply(index: Int): Float = array(index)
def update(index: Int, elem: Float) { array(index) = elem }
}
final class ofDouble(val array: Array[Double]) extends WrappedArray[Double] with Serializable {
- def elemManifest = ClassManifest.Double
+ def elemTag = ClassTag.Double
def length: Int = array.length
def apply(index: Int): Double = array(index)
def update(index: Int, elem: Double) { array(index) = elem }
}
final class ofBoolean(val array: Array[Boolean]) extends WrappedArray[Boolean] with Serializable {
- def elemManifest = ClassManifest.Boolean
+ def elemTag = ClassTag.Boolean
def length: Int = array.length
def apply(index: Int): Boolean = array(index)
def update(index: Int, elem: Boolean) { array(index) = elem }
}
final class ofUnit(val array: Array[Unit]) extends WrappedArray[Unit] with Serializable {
- def elemManifest = ClassManifest.Unit
+ def elemTag = ClassTag.Unit
def length: Int = array.length
def apply(index: Int): Unit = array(index)
def update(index: Int, elem: Unit) { array(index) = elem }
diff --git a/src/library/scala/collection/mutable/WrappedArrayBuilder.scala b/src/library/scala/collection/mutable/WrappedArrayBuilder.scala
index 9771a45..7e02103 100644
--- a/src/library/scala/collection/mutable/WrappedArrayBuilder.scala
+++ b/src/library/scala/collection/mutable/WrappedArrayBuilder.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -12,23 +12,39 @@ package scala.collection
package mutable
import generic._
-import scala.reflect.ClassManifest
+import scala.reflect.ClassTag
+import scala.runtime.ScalaRunTime._
/** A builder class for arrays.
*
- * @tparam A type of elements that can be added to this builder.
- * @param manifest class manifest for objects of type `A`.
+ * @tparam A type of elements that can be added to this builder.
+ * @param tag class tag for objects of type `A`.
*
* @since 2.8
*/
-class WrappedArrayBuilder[A](manifest: ClassManifest[A]) extends Builder[A, WrappedArray[A]] {
+class WrappedArrayBuilder[A](tag: ClassTag[A]) extends Builder[A, WrappedArray[A]] {
+
+ @deprecated("use tag instead", "2.10.0")
+ val manifest: ClassTag[A] = tag
private var elems: WrappedArray[A] = _
private var capacity: Int = 0
private var size: Int = 0
private def mkArray(size: Int): WrappedArray[A] = {
- val newelems = manifest.newWrappedArray(size)
+ val runtimeClass = arrayElementClass(tag)
+ val newelems = runtimeClass match {
+ case java.lang.Byte.TYPE => new WrappedArray.ofByte(new Array[Byte](size)).asInstanceOf[WrappedArray[A]]
+ case java.lang.Short.TYPE => new WrappedArray.ofShort(new Array[Short](size)).asInstanceOf[WrappedArray[A]]
+ case java.lang.Character.TYPE => new WrappedArray.ofChar(new Array[Char](size)).asInstanceOf[WrappedArray[A]]
+ case java.lang.Integer.TYPE => new WrappedArray.ofInt(new Array[Int](size)).asInstanceOf[WrappedArray[A]]
+ case java.lang.Long.TYPE => new WrappedArray.ofLong(new Array[Long](size)).asInstanceOf[WrappedArray[A]]
+ case java.lang.Float.TYPE => new WrappedArray.ofFloat(new Array[Float](size)).asInstanceOf[WrappedArray[A]]
+ case java.lang.Double.TYPE => new WrappedArray.ofDouble(new Array[Double](size)).asInstanceOf[WrappedArray[A]]
+ case java.lang.Boolean.TYPE => new WrappedArray.ofBoolean(new Array[Boolean](size)).asInstanceOf[WrappedArray[A]]
+ case java.lang.Void.TYPE => new WrappedArray.ofUnit(new Array[Unit](size)).asInstanceOf[WrappedArray[A]]
+ case _ => new WrappedArray.ofRef[A with AnyRef](tag.newArray(size).asInstanceOf[Array[A with AnyRef]]).asInstanceOf[WrappedArray[A]]
+ }
if (this.size > 0) Array.copy(elems.array, 0, newelems.array, 0, this.size)
newelems
}
diff --git a/src/library/scala/collection/package.scala b/src/library/scala/collection/package.scala
index f0a0c40..26b061b 100644
--- a/src/library/scala/collection/package.scala
+++ b/src/library/scala/collection/package.scala
@@ -1,3 +1,11 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
package scala
/**
@@ -6,7 +14,7 @@ package scala
* == Guide ==
*
* A detailed guide for the collections library is available
- * at [[http://www.scala-lang.org/docu/files/collections-api]].
+ * at [[http://docs.scala-lang.org/overviews/collections/introduction.html]].
*
* == Using Collections ==
*
@@ -37,7 +45,7 @@ package scala
*
* The most common way to create a collection is to use the companion objects as factories.
* Of these, the three most common
- * are [[scala.collection.immutable.Seq]], [[scala.collection.immutable.Set]], and [[scala.collection.immutable.Map]]. Their
+ * are [[scala.collection.Seq]], [[scala.collection.immutable.Set]], and [[scala.collection.immutable.Map]]. Their
* companion objects are all available
* as type aliases the either the [[scala]] package or in `scala.Predef`, and can be used
* like so:
@@ -53,13 +61,13 @@ package scala
* }}}
*
* It is also typical to use the [[scala.collection.immutable]] collections over those
- * in [[scala.collection.mutable]]; The types aliased in the [[scala]] package and
+ * in [[scala.collection.mutable]]; The types aliased in
* the `scala.Predef` object are the immutable versions.
*
* Also note that the collections library was carefully designed to include several implementations of
* each of the three basic collection types. These implementations have specific performance
* characteristics which are described
- * in [[http://www.scala-lang.org/docu/files/collections-api the guide]].
+ * in [[http://docs.scala-lang.org/overviews/collections/performance-characteristics.html the guide]].
*
* === Converting between Java Collections ===
*
diff --git a/src/library/scala/collection/parallel/Combiner.scala b/src/library/scala/collection/parallel/Combiner.scala
index d1453c9..00993c0 100644
--- a/src/library/scala/collection/parallel/Combiner.scala
+++ b/src/library/scala/collection/parallel/Combiner.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -33,9 +33,21 @@ import scala.collection.generic.Sizing
* @since 2.9
*/
trait Combiner[-Elem, +To] extends Builder[Elem, To] with Sizing with Parallel {
-//self: EnvironmentPassingCombiner[Elem, To] =>
- private[collection] final val tasksupport = getTaskSupport
-
+
+ @transient
+ @volatile
+ var _combinerTaskSupport = defaultTaskSupport
+
+ def combinerTaskSupport = {
+ val cts = _combinerTaskSupport
+ if (cts eq null) {
+ _combinerTaskSupport = defaultTaskSupport
+ defaultTaskSupport
+ } else cts
+ }
+
+ def combinerTaskSupport_=(cts: TaskSupport) = _combinerTaskSupport = cts
+
/** Combines the contents of the receiver builder and the `other` builder,
* producing a new builder containing both their elements.
*
@@ -63,6 +75,21 @@ trait Combiner[-Elem, +To] extends Builder[Elem, To] with Sizing with Parallel {
*/
def combine[N <: Elem, NewTo >: To](other: Combiner[N, NewTo]): Combiner[N, NewTo]
+ /** Returns `true` if this combiner has a thread-safe `+=` and is meant to be shared
+ * across several threads constructing the collection.
+ *
+ * By default, this method returns `false`.
+ */
+ def canBeShared: Boolean = false
+
+ /** Constructs the result and sets the appropriate tasksupport object to the resulting collection
+ * if this is applicable.
+ */
+ def resultWithTaskSupport: To = {
+ val res = result
+ setTaskSupport(res, combinerTaskSupport)
+ }
+
}
diff --git a/src/library/scala/collection/parallel/ParIterable.scala b/src/library/scala/collection/parallel/ParIterable.scala
index 0b5faf1..2b24c88 100644
--- a/src/library/scala/collection/parallel/ParIterable.scala
+++ b/src/library/scala/collection/parallel/ParIterable.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -24,7 +24,7 @@ import scala.collection.parallel.mutable.ParArray
* @author Aleksandar Prokopec
* @since 2.9
*
- * @define Coll ParIterable
+ * @define Coll `ParIterable`
* @define coll parallel iterable
*/
trait ParIterable[+T]
diff --git a/src/library/scala/collection/parallel/ParIterableLike.scala b/src/library/scala/collection/parallel/ParIterableLike.scala
index 7c176ee..0f06ff3 100644
--- a/src/library/scala/collection/parallel/ParIterableLike.scala
+++ b/src/library/scala/collection/parallel/ParIterableLike.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -24,11 +24,13 @@ import scala.collection.GenIterable
import scala.collection.GenTraversableOnce
import scala.collection.GenTraversable
import immutable.HashMapCombiner
+import scala.reflect.{ClassTag, classTag}
import java.util.concurrent.atomic.AtomicBoolean
-import annotation.unchecked.uncheckedVariance
-
+import scala.annotation.unchecked.uncheckedVariance
+import scala.annotation.unchecked.uncheckedStable
+import scala.language.{ higherKinds, implicitConversions }
/** A template trait for parallel collections of type `ParIterable[T]`.
@@ -58,7 +60,7 @@ import annotation.unchecked.uncheckedVariance
* }}}
*
* which returns an instance of `IterableSplitter[T]`, which is a subtype of `Splitter[T]`.
- * Parallel iterators have a method `remaining` to check the remaining number of elements,
+ * Splitters have a method `remaining` to check the remaining number of elements,
* and method `split` which is defined by splitters. Method `split` divides the splitters
* iterate over into disjunct subsets:
*
@@ -70,6 +72,10 @@ import annotation.unchecked.uncheckedVariance
* very fast operation which simply creates wrappers around the receiver collection.
* This can be repeated recursively.
*
+ * Tasks are scheduled for execution through a
+ * [[scala.collection.parallel.TaskSupport]] object, which can be changed
+ * through the `tasksupport` setter of the collection.
+ *
* Method `newCombiner` produces a new combiner. Combiners are an extension of builders.
* They provide a method `combine` which combines two combiners and returns a combiner
* containing elements of both combiners.
@@ -96,18 +102,7 @@ import annotation.unchecked.uncheckedVariance
* The combination of methods `toMap`, `toSeq` or `toSet` along with `par` and `seq` is a flexible
* way to change between different collection types.
*
- * The method:
- *
- * {{{
- * def threshold(sz: Int, p: Int): Int
- * }}}
- *
- * provides an estimate on the minimum number of elements the collection has before
- * the splitting stops and depends on the number of elements in the collection. A rule of the
- * thumb is the number of elements divided by 8 times the parallelism level. This method may
- * be overridden in concrete implementations if necessary.
- *
- * Since this trait extends the `Iterable` trait, methods like `size` must also
+ * Since this trait extends the `GenIterable` trait, methods like `size` must also
* be implemented in concrete collections, while `iterator` forwards to `splitter` by
* default.
*
@@ -127,7 +122,7 @@ import annotation.unchecked.uncheckedVariance
* which do not know the number of elements remaining. To do this, the new collection implementation must override
* `isStrictSplitterCollection` to `false`. This will make some operations unavailable.
*
- * To create a new parallel collection, extend the `ParIterable` trait, and implement `size`, `parallelIterator`,
+ * To create a new parallel collection, extend the `ParIterable` trait, and implement `size`, `splitter`,
* `newCombiner` and `seq`. Having an implicit combiner factory requires extending this trait in addition, as
* well as providing a companion object, as with regular collections.
*
@@ -166,52 +161,75 @@ extends GenIterableLike[T, Repr]
{
self: ParIterableLike[T, Repr, Sequential] =>
- import tasksupport._
+ @transient
+ @volatile
+ private var _tasksupport = defaultTaskSupport
- def seq: Sequential
+ protected def initTaskSupport() {
+ _tasksupport = defaultTaskSupport
+ }
- def repr: Repr = this.asInstanceOf[Repr]
+ /** The task support object which is responsible for scheduling and
+ * load-balancing tasks to processors.
+ *
+ * @see [[scala.collection.parallel.TaskSupport]]
+ */
+ def tasksupport = {
+ val ts = _tasksupport
+ if (ts eq null) {
+ _tasksupport = defaultTaskSupport
+ defaultTaskSupport
+ } else ts
+ }
- /** Parallel iterators are split iterators that have additional accessor and
- * transformer methods defined in terms of methods `next` and `hasNext`.
- * When creating a new parallel collection, one might want to override these
- * new methods to make them more efficient.
+ /** Changes the task support object which is responsible for scheduling and
+ * load-balancing tasks to processors.
*
- * Parallel iterators are augmented with signalling capabilities. This means
- * that a signalling object can be assigned to them as needed.
+ * A task support object can be changed in a parallel collection after it
+ * has been created, but only during a quiescent period, i.e. while there
+ * are no concurrent invocations to parallel collection methods.
+ *
+ * Here is a way to change the task support of a parallel collection:
+ *
+ * {{{
+ * import scala.collection.parallel._
+ * val pc = mutable.ParArray(1, 2, 3)
+ * pc.tasksupport = new ForkJoinTaskSupport(
+ * new scala.concurrent.forkjoin.ForkJoinPool(2))
+ * }}}
*
- * The self-type ensures that signal context passing behaviour gets mixed in
- * a concrete object instance.
- */
- trait ParIterator extends IterableSplitter[T] {
- me: SignalContextPassingIterator[ParIterator] =>
- var signalDelegate: Signalling = IdleSignalling
- def repr = self.repr
- def split: Seq[IterableSplitter[T]]
- }
+ * @see [[scala.collection.parallel.TaskSupport]]
+ */
+ def tasksupport_=(ts: TaskSupport) = _tasksupport = ts
- /** A stackable modification that ensures signal contexts get passed along the iterators.
- * A self-type requirement in `ParIterator` ensures that this trait gets mixed into
- * concrete iterators.
- */
- trait SignalContextPassingIterator[+IterRepr <: ParIterator] extends ParIterator {
- // Note: This functionality must be factored out to this inner trait to avoid boilerplate.
- // Also, one could omit the cast below. However, this leads to return type inconsistencies,
- // due to inability to override the return type of _abstract overrides_.
- // Be aware that this stackable modification has to be subclassed, so it shouldn't be rigid
- // on the type of iterators it splits.
- // The alternative is some boilerplate - better to tradeoff some type safety to avoid it here.
- abstract override def split: Seq[IterRepr] = {
- val pits = super.split
- pits foreach { _.signalDelegate = signalDelegate }
- pits.asInstanceOf[Seq[IterRepr]]
- }
- }
+ def seq: Sequential
+
+ def repr: Repr = this.asInstanceOf[Repr]
+
+ final def isTraversableAgain = true
def hasDefiniteSize = true
+ def isEmpty = size == 0
+
def nonEmpty = size != 0
+ def head = iterator.next
+
+ def headOption = if (nonEmpty) Some(head) else None
+
+ def tail = drop(1)
+
+ def last = {
+ var lst = head
+ for (x <- this.seq) lst = x
+ lst
+ }
+
+ def lastOption = if (nonEmpty) Some(last) else None
+
+ def init = take(size - 1)
+
/** Creates a new parallel iterator used to traverse the elements of this parallel collection.
* This iterator is more specific than the iterator of the returned by `iterator`, and augmented
* with additional accessor and transformer methods.
@@ -242,22 +260,10 @@ self: ParIterableLike[T, Repr, Sequential] =>
*/
def isStrictSplitterCollection = true
- /** Some minimal number of elements after which this collection should be handled
- * sequentially by different processors.
- *
- * This method depends on the size of the collection and the parallelism level, which
- * are both specified as arguments.
- *
- * @param sz the size based on which to compute the threshold
- * @param p the parallelism level based on which to compute the threshold
- * @return the maximum number of elements for performing operations sequentially
- */
- def threshold(sz: Int, p: Int): Int = thresholdFromSize(sz, p)
-
/** The `newBuilder` operation returns a parallel builder assigned to this collection's fork/join pool.
* This method forwards the call to `newCombiner`.
*/
- //protected[this] def newBuilder: collection.mutable.Builder[T, Repr] = newCombiner
+ //protected[this] def newBuilder: scala.collection.mutable.Builder[T, Repr] = newCombiner
/** Optionally reuses an existing combiner for better performance. By default it doesn't - subclasses may override this behaviour.
* The provided combiner `oldc` that can potentially be reused will be either some combiner from the previous computational task, or `None` if there
@@ -275,16 +281,19 @@ self: ParIterableLike[T, Repr, Sequential] =>
trait TaskOps[R, Tp] {
def mapResult[R1](mapping: R => R1): ResultMapping[R, Tp, R1]
+ // public method with inaccessible types in parameters
def compose[R3, R2, Tp2](t2: SSCTask[R2, Tp2])(resCombiner: (R, R2) => R3): SeqComposite[R, R2, R3, SSCTask[R, Tp], SSCTask[R2, Tp2]]
def parallel[R3, R2, Tp2](t2: SSCTask[R2, Tp2])(resCombiner: (R, R2) => R3): ParComposite[R, R2, R3, SSCTask[R, Tp], SSCTask[R2, Tp2]]
}
trait BuilderOps[Elem, To] {
trait Otherwise[Cmb] {
- def otherwise(notbody: => Unit)(implicit m: ClassManifest[Cmb]): Unit
+ def otherwise(notbody: => Unit)(implicit t: ClassTag[Cmb]): Unit
}
def ifIs[Cmb](isbody: Cmb => Unit): Otherwise[Cmb]
+ def isCombiner: Boolean
+ def asCombiner: Combiner[Elem, To]
}
trait SignallingOps[PI <: DelegatedSignalling] {
@@ -321,10 +330,12 @@ self: ParIterableLike[T, Repr, Sequential] =>
protected implicit def builder2ops[Elem, To](cb: Builder[Elem, To]) = new BuilderOps[Elem, To] {
def ifIs[Cmb](isbody: Cmb => Unit) = new Otherwise[Cmb] {
- def otherwise(notbody: => Unit)(implicit m: ClassManifest[Cmb]) {
- if (cb.getClass == m.erasure) isbody(cb.asInstanceOf[Cmb]) else notbody
+ def otherwise(notbody: => Unit)(implicit t: ClassTag[Cmb]) {
+ if (cb.getClass == t.runtimeClass) isbody(cb.asInstanceOf[Cmb]) else notbody
}
}
+ def isCombiner = cb.isInstanceOf[Combiner[_, _]]
+ def asCombiner = cb.asInstanceOf[Combiner[Elem, To]]
}
protected[this] def bf2seq[S, That](bf: CanBuildFrom[Repr, S, That]) = new CanBuildFrom[Sequential, S, That] {
@@ -360,7 +371,7 @@ self: ParIterableLike[T, Repr, Sequential] =>
* if this $coll is empty.
*/
def reduce[U >: T](op: (U, U) => U): U = {
- executeAndWaitResult(new Reduce(op, splitter) mapResult { _.get })
+ tasksupport.executeAndWaitResult(new Reduce(op, splitter) mapResult { _.get })
}
/** Optionally reduces the elements of this sequence using the specified associative binary operator.
@@ -395,7 +406,7 @@ self: ParIterableLike[T, Repr, Sequential] =>
* @return the result of applying fold operator `op` between all the elements and `z`
*/
def fold[U >: T](z: U)(op: (U, U) => U): U = {
- executeAndWaitResult(new Fold(z, op, splitter))
+ tasksupport.executeAndWaitResult(new Fold(z, op, splitter))
}
/** Aggregates the results of applying an operator to subsequent elements.
@@ -427,13 +438,9 @@ self: ParIterableLike[T, Repr, Sequential] =>
* @param combop an associative operator used to combine results from different partitions
*/
def aggregate[S](z: S)(seqop: (S, T) => S, combop: (S, S) => S): S = {
- executeAndWaitResult(new Aggregate(z, seqop, combop, splitter))
+ tasksupport.executeAndWaitResult(new Aggregate(z, seqop, combop, splitter))
}
- def /:[S](z: S)(op: (S, T) => S): S = foldLeft(z)(op)
-
- def :\[S](z: S)(op: (T, S) => S): S = foldRight(z)(op)
-
def foldLeft[S](z: S)(op: (S, T) => S): S = seq.foldLeft(z)(op)
def foldRight[S](z: S)(op: (T, S) => S): S = seq.foldRight(z)(op)
@@ -446,47 +453,33 @@ self: ParIterableLike[T, Repr, Sequential] =>
def reduceRightOption[U >: T](op: (T, U) => U): Option[U] = seq.reduceRightOption(op)
- /*
- /** Applies a function `f` to all the elements of $coll. Does so in a nondefined order,
- * and in parallel.
- *
- * $undefinedorder
- *
- * @tparam U the result type of the function applied to each element, which is always discarded
- * @param f function applied to each element
- */
- def pareach[U](f: T => U): Unit = {
- executeAndWaitResult(new Foreach(f, splitter))
- }
- */
-
- /** Applies a function `f` to all the elements of $coll in a sequential order.
+ /** Applies a function `f` to all the elements of $coll in a undefined order.
*
* @tparam U the result type of the function applied to each element, which is always discarded
* @param f function applied to each element
*/
def foreach[U](f: T => U) = {
- executeAndWaitResult(new Foreach(f, splitter))
+ tasksupport.executeAndWaitResult(new Foreach(f, splitter))
}
def count(p: T => Boolean): Int = {
- executeAndWaitResult(new Count(p, splitter))
+ tasksupport.executeAndWaitResult(new Count(p, splitter))
}
def sum[U >: T](implicit num: Numeric[U]): U = {
- executeAndWaitResult(new Sum[U](num, splitter))
+ tasksupport.executeAndWaitResult(new Sum[U](num, splitter))
}
def product[U >: T](implicit num: Numeric[U]): U = {
- executeAndWaitResult(new Product[U](num, splitter))
+ tasksupport.executeAndWaitResult(new Product[U](num, splitter))
}
def min[U >: T](implicit ord: Ordering[U]): T = {
- executeAndWaitResult(new Min(ord, splitter) mapResult { _.get }).asInstanceOf[T]
+ tasksupport.executeAndWaitResult(new Min(ord, splitter) mapResult { _.get }).asInstanceOf[T]
}
def max[U >: T](implicit ord: Ordering[U]): T = {
- executeAndWaitResult(new Max(ord, splitter) mapResult { _.get }).asInstanceOf[T]
+ tasksupport.executeAndWaitResult(new Max(ord, splitter) mapResult { _.get }).asInstanceOf[T]
}
def maxBy[S](f: T => S)(implicit cmp: Ordering[S]): T = {
@@ -501,38 +494,47 @@ self: ParIterableLike[T, Repr, Sequential] =>
reduce((x, y) => if (cmp.lteq(f(x), f(y))) x else y)
}
- def map[S, That](f: T => S)(implicit bf: CanBuildFrom[Repr, S, That]): That = bf ifParallel { pbf =>
- executeAndWaitResult(new Map[S, That](f, pbf, splitter) mapResult { _.result })
- } otherwise seq.map(f)(bf2seq(bf))
+ def map[S, That](f: T => S)(implicit bf: CanBuildFrom[Repr, S, That]): That = if (bf(repr).isCombiner) {
+ tasksupport.executeAndWaitResult(new Map[S, That](f, combinerFactory(() => bf(repr).asCombiner), splitter) mapResult { _.resultWithTaskSupport })
+ } else setTaskSupport(seq.map(f)(bf2seq(bf)), tasksupport)
+ /*bf ifParallel { pbf =>
+ tasksupport.executeAndWaitResult(new Map[S, That](f, pbf, splitter) mapResult { _.result })
+ } otherwise seq.map(f)(bf2seq(bf))*/
- def collect[S, That](pf: PartialFunction[T, S])(implicit bf: CanBuildFrom[Repr, S, That]): That = bf ifParallel { pbf =>
- executeAndWaitResult(new Collect[S, That](pf, pbf, splitter) mapResult { _.result })
- } otherwise seq.collect(pf)(bf2seq(bf))
+ def collect[S, That](pf: PartialFunction[T, S])(implicit bf: CanBuildFrom[Repr, S, That]): That = if (bf(repr).isCombiner) {
+ tasksupport.executeAndWaitResult(new Collect[S, That](pf, combinerFactory(() => bf(repr).asCombiner), splitter) mapResult { _.resultWithTaskSupport })
+ } else setTaskSupport(seq.collect(pf)(bf2seq(bf)), tasksupport)
+ /*bf ifParallel { pbf =>
+ tasksupport.executeAndWaitResult(new Collect[S, That](pf, pbf, splitter) mapResult { _.result })
+ } otherwise seq.collect(pf)(bf2seq(bf))*/
- def flatMap[S, That](f: T => GenTraversableOnce[S])(implicit bf: CanBuildFrom[Repr, S, That]): That = bf ifParallel { pbf =>
- executeAndWaitResult(new FlatMap[S, That](f, pbf, splitter) mapResult { _.result })
- } otherwise seq.flatMap(f)(bf2seq(bf))
+ def flatMap[S, That](f: T => GenTraversableOnce[S])(implicit bf: CanBuildFrom[Repr, S, That]): That = if (bf(repr).isCombiner) {
+ tasksupport.executeAndWaitResult(new FlatMap[S, That](f, combinerFactory(() => bf(repr).asCombiner), splitter) mapResult { _.resultWithTaskSupport })
+ } else setTaskSupport(seq.flatMap(f)(bf2seq(bf)), tasksupport)
+ /*bf ifParallel { pbf =>
+ tasksupport.executeAndWaitResult(new FlatMap[S, That](f, pbf, splitter) mapResult { _.result })
+ } otherwise seq.flatMap(f)(bf2seq(bf))*/
/** Tests whether a predicate holds for all elements of this $coll.
*
* $abortsignalling
*
- * @param p a predicate used to test elements
- * @return true if `p` holds for all elements, false otherwise
+ * @param pred a predicate used to test elements
+ * @return true if `p` holds for all elements, false otherwise
*/
def forall(pred: T => Boolean): Boolean = {
- executeAndWaitResult(new Forall(pred, splitter assign new DefaultSignalling with VolatileAbort))
+ tasksupport.executeAndWaitResult(new Forall(pred, splitter assign new DefaultSignalling with VolatileAbort))
}
/** Tests whether a predicate holds for some element of this $coll.
*
* $abortsignalling
*
- * @param p a predicate used to test elements
- * @return true if `p` holds for some element, false otherwise
+ * @param pred a predicate used to test elements
+ * @return true if `p` holds for some element, false otherwise
*/
def exists(pred: T => Boolean): Boolean = {
- executeAndWaitResult(new Exists(pred, splitter assign new DefaultSignalling with VolatileAbort))
+ tasksupport.executeAndWaitResult(new Exists(pred, splitter assign new DefaultSignalling with VolatileAbort))
}
/** Finds some element in the collection for which the predicate holds, if such
@@ -543,23 +545,56 @@ self: ParIterableLike[T, Repr, Sequential] =>
*
* $abortsignalling
*
- * @param p predicate used to test the elements
- * @return an option value with the element if such an element exists, or `None` otherwise
+ * @param pred predicate used to test the elements
+ * @return an option value with the element if such an element exists, or `None` otherwise
*/
def find(pred: T => Boolean): Option[T] = {
- executeAndWaitResult(new Find(pred, splitter assign new DefaultSignalling with VolatileAbort))
+ tasksupport.executeAndWaitResult(new Find(pred, splitter assign new DefaultSignalling with VolatileAbort))
}
- protected[this] def cbfactory ={
- () => newCombiner
+ /** Creates a combiner factory. Each combiner factory instance is used
+ * once per invocation of a parallel transformer method for a single
+ * collection.
+ *
+ * The default combiner factory creates a new combiner every time it
+ * is requested, unless the combiner is thread-safe as indicated by its
+ * `canBeShared` method. In this case, the method returns a factory which
+ * returns the same combiner each time. This is typically done for
+ * concurrent parallel collections, the combiners of which allow
+ * thread safe access.
+ */
+ protected[this] def combinerFactory = {
+ val combiner = newCombiner
+ combiner.combinerTaskSupport = tasksupport
+ if (combiner.canBeShared) new CombinerFactory[T, Repr] {
+ val shared = combiner
+ def apply() = shared
+ def doesShareCombiners = true
+ } else new CombinerFactory[T, Repr] {
+ def apply() = newCombiner
+ def doesShareCombiners = false
+ }
+ }
+
+ protected[this] def combinerFactory[S, That](cbf: () => Combiner[S, That]) = {
+ val combiner = cbf()
+ combiner.combinerTaskSupport = tasksupport
+ if (combiner.canBeShared) new CombinerFactory[S, That] {
+ val shared = combiner
+ def apply() = shared
+ def doesShareCombiners = true
+ } else new CombinerFactory[S, That] {
+ def apply() = cbf()
+ def doesShareCombiners = false
+ }
}
def filter(pred: T => Boolean): Repr = {
- executeAndWaitResult(new Filter(pred, cbfactory, splitter) mapResult { _.result })
+ tasksupport.executeAndWaitResult(new Filter(pred, combinerFactory, splitter) mapResult { _.resultWithTaskSupport })
}
def filterNot(pred: T => Boolean): Repr = {
- executeAndWaitResult(new FilterNot(pred, cbfactory, splitter) mapResult { _.result })
+ tasksupport.executeAndWaitResult(new FilterNot(pred, combinerFactory, splitter) mapResult { _.resultWithTaskSupport })
}
def ++[U >: T, That](that: GenTraversableOnce[U])(implicit bf: CanBuildFrom[Repr, U, That]): That = {
@@ -567,49 +602,54 @@ self: ParIterableLike[T, Repr, Sequential] =>
// println("case both are parallel")
val other = that.asParIterable
val pbf = bf.asParallel
- val copythis = new Copy(() => pbf(repr), splitter)
+ val cfactory = combinerFactory(() => pbf(repr))
+ val copythis = new Copy(cfactory, splitter)
val copythat = wrap {
- val othtask = new other.Copy(() => pbf(self.repr), other.splitter)
+ val othtask = new other.Copy(cfactory, other.splitter)
tasksupport.executeAndWaitResult(othtask)
}
val task = (copythis parallel copythat) { _ combine _ } mapResult {
- _.result
+ _.resultWithTaskSupport
}
- executeAndWaitResult(task)
- } else if (bf.isParallel) {
+ tasksupport.executeAndWaitResult(task)
+ } else if (bf(repr).isCombiner) {
// println("case parallel builder, `that` not parallel")
- val pbf = bf.asParallel
- val copythis = new Copy(() => pbf(repr), splitter)
+ val copythis = new Copy(combinerFactory(() => bf(repr).asCombiner), splitter)
val copythat = wrap {
- val cb = pbf(repr)
+ val cb = bf(repr).asCombiner
for (elem <- that.seq) cb += elem
cb
}
- executeAndWaitResult((copythis parallel copythat) { _ combine _ } mapResult { _.result })
+ tasksupport.executeAndWaitResult((copythis parallel copythat) { _ combine _ } mapResult { _.resultWithTaskSupport })
} else {
// println("case not a parallel builder")
val b = bf(repr)
this.splitter.copy2builder[U, That, Builder[U, That]](b)
for (elem <- that.seq) b += elem
- b.result
+ setTaskSupport(b.result, tasksupport)
}
}
def partition(pred: T => Boolean): (Repr, Repr) = {
- executeAndWaitResult(new Partition(pred, cbfactory, splitter) mapResult { p => (p._1.result, p._2.result) })
+ tasksupport.executeAndWaitResult(
+ new Partition(pred, combinerFactory, combinerFactory, splitter) mapResult {
+ p => (p._1.resultWithTaskSupport, p._2.resultWithTaskSupport)
+ }
+ )
}
def groupBy[K](f: T => K): immutable.ParMap[K, Repr] = {
- executeAndWaitResult(new GroupBy(f, () => HashMapCombiner[K, T], splitter) mapResult {
- rcb => rcb.groupByKey(cbfactory)
+ val r = tasksupport.executeAndWaitResult(new GroupBy(f, () => HashMapCombiner[K, T], splitter) mapResult {
+ rcb => rcb.groupByKey(() => combinerFactory())
})
+ setTaskSupport(r, tasksupport)
}
def take(n: Int): Repr = {
val actualn = if (size > n) n else size
if (actualn < MIN_FOR_COPY) take_sequential(actualn)
- else executeAndWaitResult(new Take(actualn, cbfactory, splitter) mapResult {
- _.result
+ else tasksupport.executeAndWaitResult(new Take(actualn, combinerFactory, splitter) mapResult {
+ _.resultWithTaskSupport
})
}
@@ -622,13 +662,13 @@ self: ParIterableLike[T, Repr, Sequential] =>
cb += it.next
left -= 1
}
- cb.result
+ cb.resultWithTaskSupport
}
def drop(n: Int): Repr = {
val actualn = if (size > n) n else size
if ((size - actualn) < MIN_FOR_COPY) drop_sequential(actualn)
- else executeAndWaitResult(new Drop(actualn, cbfactory, splitter) mapResult { _.result })
+ else tasksupport.executeAndWaitResult(new Drop(actualn, combinerFactory, splitter) mapResult { _.resultWithTaskSupport })
}
private def drop_sequential(n: Int) = {
@@ -636,14 +676,14 @@ self: ParIterableLike[T, Repr, Sequential] =>
val cb = newCombiner
cb.sizeHint(size - n)
while (it.hasNext) cb += it.next
- cb.result
+ cb.resultWithTaskSupport
}
override def slice(unc_from: Int, unc_until: Int): Repr = {
val from = unc_from min size max 0
val until = unc_until min size max from
if ((until - from) <= MIN_FOR_COPY) slice_sequential(from, until)
- else executeAndWaitResult(new Slice(from, until, cbfactory, splitter) mapResult { _.result })
+ else tasksupport.executeAndWaitResult(new Slice(from, until, combinerFactory, splitter) mapResult { _.resultWithTaskSupport })
}
private def slice_sequential(from: Int, until: Int): Repr = {
@@ -654,11 +694,15 @@ self: ParIterableLike[T, Repr, Sequential] =>
cb += it.next
left -= 1
}
- cb.result
+ cb.resultWithTaskSupport
}
def splitAt(n: Int): (Repr, Repr) = {
- executeAndWaitResult(new SplitAt(n, cbfactory, splitter) mapResult { p => (p._1.result, p._2.result) })
+ tasksupport.executeAndWaitResult(
+ new SplitAt(n, combinerFactory, combinerFactory, splitter) mapResult {
+ p => (p._1.resultWithTaskSupport, p._2.resultWithTaskSupport)
+ }
+ )
}
/** Computes a prefix scan of the elements of the collection.
@@ -669,27 +713,27 @@ self: ParIterableLike[T, Repr, Sequential] =>
* @tparam That type of the resulting collection
* @param z neutral element for the operator `op`
* @param op the associative operator for the scan
- * @param cbf combiner factory which provides a combiner
+ * @param bf $bfinfo
* @return a collection containing the prefix scan of the elements in the original collection
*
* @usecase def scan(z: T)(op: (T, T) => T): $Coll[T]
+ * @inheritdoc
*
- * @return a new $coll containing the prefix scan of the elements in this $coll
+ * @return a new $coll containing the prefix scan of the elements in this $coll
*/
- def scan[U >: T, That](z: U)(op: (U, U) => U)(implicit bf: CanBuildFrom[Repr, U, That]): That = if (bf.isParallel) {
- val cbf = bf.asParallel
- if (parallelismLevel > 1) {
- if (size > 0) executeAndWaitResult(new CreateScanTree(0, size, z, op, splitter) mapResult {
- tree => executeAndWaitResult(new FromScanTree(tree, z, op, cbf) mapResult {
- cb => cb.result
+ def scan[U >: T, That](z: U)(op: (U, U) => U)(implicit bf: CanBuildFrom[Repr, U, That]): That = if (bf(repr).isCombiner) {
+ if (tasksupport.parallelismLevel > 1) {
+ if (size > 0) tasksupport.executeAndWaitResult(new CreateScanTree(0, size, z, op, splitter) mapResult {
+ tree => tasksupport.executeAndWaitResult(new FromScanTree(tree, z, op, combinerFactory(() => bf(repr).asCombiner)) mapResult {
+ cb => cb.resultWithTaskSupport
})
- }) else (cbf(self.repr) += z).result
- } else seq.scan(z)(op)(bf2seq(bf))
- } else seq.scan(z)(op)(bf2seq(bf))
+ }) else setTaskSupport((bf(repr) += z).result, tasksupport)
+ } else setTaskSupport(seq.scan(z)(op)(bf2seq(bf)), tasksupport)
+ } else setTaskSupport(seq.scan(z)(op)(bf2seq(bf)), tasksupport)
- def scanLeft[S, That](z: S)(op: (S, T) => S)(implicit bf: CanBuildFrom[Repr, S, That]) = seq.scanLeft(z)(op)(bf2seq(bf))
+ def scanLeft[S, That](z: S)(op: (S, T) => S)(implicit bf: CanBuildFrom[Repr, S, That]) = setTaskSupport(seq.scanLeft(z)(op)(bf2seq(bf)), tasksupport)
- def scanRight[S, That](z: S)(op: (T, S) => S)(implicit bf: CanBuildFrom[Repr, S, That]) = seq.scanRight(z)(op)(bf2seq(bf))
+ def scanRight[S, That](z: S)(op: (T, S) => S)(implicit bf: CanBuildFrom[Repr, S, That]) = setTaskSupport(seq.scanRight(z)(op)(bf2seq(bf)), tasksupport)
/** Takes the longest prefix of elements that satisfy the predicate.
*
@@ -700,9 +744,19 @@ self: ParIterableLike[T, Repr, Sequential] =>
* @return the longest prefix of this $coll of elements that satisy the predicate `pred`
*/
def takeWhile(pred: T => Boolean): Repr = {
- val cntx = new DefaultSignalling with AtomicIndexFlag
- cntx.setIndexFlag(Int.MaxValue)
- executeAndWaitResult(new TakeWhile(0, pred, cbfactory, splitter assign cntx) mapResult { _._1.result })
+ val cbf = combinerFactory
+ if (cbf.doesShareCombiners) {
+ val parseqspan = toSeq.takeWhile(pred)
+ tasksupport.executeAndWaitResult(new Copy(combinerFactory, parseqspan.splitter) mapResult {
+ _.resultWithTaskSupport
+ })
+ } else {
+ val cntx = new DefaultSignalling with AtomicIndexFlag
+ cntx.setIndexFlag(Int.MaxValue)
+ tasksupport.executeAndWaitResult(new TakeWhile(0, pred, combinerFactory, splitter assign cntx) mapResult {
+ _._1.resultWithTaskSupport
+ })
+ }
}
/** Splits this $coll into a prefix/suffix pair according to a predicate.
@@ -715,11 +769,22 @@ self: ParIterableLike[T, Repr, Sequential] =>
* the elements satisfy `pred`, and the rest of the collection
*/
def span(pred: T => Boolean): (Repr, Repr) = {
- val cntx = new DefaultSignalling with AtomicIndexFlag
- cntx.setIndexFlag(Int.MaxValue)
- executeAndWaitResult(new Span(0, pred, cbfactory, splitter assign cntx) mapResult {
- p => (p._1.result, p._2.result)
- })
+ val cbf = combinerFactory
+ if (cbf.doesShareCombiners) {
+ val (xs, ys) = toSeq.span(pred)
+ val copyxs = new Copy(combinerFactory, xs.splitter) mapResult { _.resultWithTaskSupport }
+ val copyys = new Copy(combinerFactory, ys.splitter) mapResult { _.resultWithTaskSupport }
+ val copyall = (copyxs parallel copyys) {
+ (xr, yr) => (xr, yr)
+ }
+ tasksupport.executeAndWaitResult(copyall)
+ } else {
+ val cntx = new DefaultSignalling with AtomicIndexFlag
+ cntx.setIndexFlag(Int.MaxValue)
+ tasksupport.executeAndWaitResult(new Span(0, pred, combinerFactory, combinerFactory, splitter assign cntx) mapResult {
+ p => (p._1.resultWithTaskSupport, p._2.resultWithTaskSupport)
+ })
+ }
}
/** Drops all elements in the longest prefix of elements that satisfy the predicate,
@@ -735,7 +800,11 @@ self: ParIterableLike[T, Repr, Sequential] =>
def dropWhile(pred: T => Boolean): Repr = {
val cntx = new DefaultSignalling with AtomicIndexFlag
cntx.setIndexFlag(Int.MaxValue)
- executeAndWaitResult(new Span(0, pred, cbfactory, splitter assign cntx) mapResult { _._2.result })
+ tasksupport.executeAndWaitResult(
+ new Span(0, pred, combinerFactory, combinerFactory, splitter assign cntx) mapResult {
+ _._2.resultWithTaskSupport
+ }
+ )
}
def copyToArray[U >: T](xs: Array[U]) = copyToArray(xs, 0)
@@ -743,31 +812,33 @@ self: ParIterableLike[T, Repr, Sequential] =>
def copyToArray[U >: T](xs: Array[U], start: Int) = copyToArray(xs, start, xs.length - start)
def copyToArray[U >: T](xs: Array[U], start: Int, len: Int) = if (len > 0) {
- executeAndWaitResult(new CopyToArray(start, len, xs, splitter))
+ tasksupport.executeAndWaitResult(new CopyToArray(start, len, xs, splitter))
}
def sameElements[U >: T](that: GenIterable[U]) = seq.sameElements(that)
- def zip[U >: T, S, That](that: GenIterable[S])(implicit bf: CanBuildFrom[Repr, (U, S), That]): That = if (bf.isParallel && that.isParSeq) {
- val pbf = bf.asParallel
+ def zip[U >: T, S, That](that: GenIterable[S])(implicit bf: CanBuildFrom[Repr, (U, S), That]): That = if (bf(repr).isCombiner && that.isParSeq) {
val thatseq = that.asParSeq
- executeAndWaitResult(new Zip(pbf, splitter, thatseq.splitter) mapResult { _.result });
- } else seq.zip(that)(bf2seq(bf))
+ tasksupport.executeAndWaitResult(new Zip(combinerFactory(() => bf(repr).asCombiner), splitter, thatseq.splitter) mapResult { _.resultWithTaskSupport });
+ } else setTaskSupport(seq.zip(that)(bf2seq(bf)), tasksupport)
def zipWithIndex[U >: T, That](implicit bf: CanBuildFrom[Repr, (U, Int), That]): That = this zip immutable.ParRange(0, size, 1, false)
- def zipAll[S, U >: T, That](that: GenIterable[S], thisElem: U, thatElem: S)(implicit bf: CanBuildFrom[Repr, (U, S), That]): That = if (bf.isParallel && that.isParSeq) {
- val pbf = bf.asParallel
+ def zipAll[S, U >: T, That](that: GenIterable[S], thisElem: U, thatElem: S)(implicit bf: CanBuildFrom[Repr, (U, S), That]): That = if (bf(repr).isCombiner && that.isParSeq) {
val thatseq = that.asParSeq
- executeAndWaitResult(new ZipAll(size max thatseq.length, thisElem, thatElem, pbf, splitter, thatseq.splitter) mapResult { _.result });
- } else seq.zipAll(that, thisElem, thatElem)(bf2seq(bf))
+ tasksupport.executeAndWaitResult(
+ new ZipAll(size max thatseq.length, thisElem, thatElem, combinerFactory(() => bf(repr).asCombiner), splitter, thatseq.splitter) mapResult {
+ _.resultWithTaskSupport
+ }
+ );
+ } else setTaskSupport(seq.zipAll(that, thisElem, thatElem)(bf2seq(bf)), tasksupport)
protected def toParCollection[U >: T, That](cbf: () => Combiner[U, That]): That = {
- executeAndWaitResult(new ToParCollection(cbf, splitter) mapResult { _.result });
+ tasksupport.executeAndWaitResult(new ToParCollection(combinerFactory(cbf), splitter) mapResult { _.resultWithTaskSupport });
}
protected def toParMap[K, V, That](cbf: () => Combiner[(K, V), That])(implicit ev: T <:< (K, V)): That = {
- executeAndWaitResult(new ToParMap(cbf, splitter)(ev) mapResult { _.result })
+ tasksupport.executeAndWaitResult(new ToParMap(combinerFactory(cbf), splitter)(ev) mapResult { _.resultWithTaskSupport })
}
def view = new ParIterableView[T, Repr, Sequential] {
@@ -779,7 +850,7 @@ self: ParIterableLike[T, Repr, Sequential] =>
def size = splitter.remaining
}
- override def toArray[U >: T: ClassManifest]: Array[U] = {
+ override def toArray[U >: T: ClassTag]: Array[U] = {
val arr = new Array[U](size)
copyToArray(arr)
arr
@@ -787,7 +858,7 @@ self: ParIterableLike[T, Repr, Sequential] =>
override def toList: List[T] = seq.toList
- override def toIndexedSeq[U >: T]: collection.immutable.IndexedSeq[U] = seq.toIndexedSeq[U]
+ override def toIndexedSeq: scala.collection.immutable.IndexedSeq[T] = seq.toIndexedSeq
override def toStream: Stream[T] = seq.toStream
@@ -795,9 +866,9 @@ self: ParIterableLike[T, Repr, Sequential] =>
// the methods below are overridden
- override def toBuffer[U >: T]: collection.mutable.Buffer[U] = seq.toBuffer // have additional, parallel buffers?
+ override def toBuffer[U >: T]: scala.collection.mutable.Buffer[U] = seq.toBuffer // have additional, parallel buffers?
- override def toTraversable: GenTraversable[T] = this.asInstanceOf[GenTraversable[T]] // TODO add ParTraversable[T]
+ override def toTraversable: GenTraversable[T] = this.asInstanceOf[GenTraversable[T]]
override def toIterable: ParIterable[T] = this.asInstanceOf[ParIterable[T]]
@@ -806,7 +877,13 @@ self: ParIterableLike[T, Repr, Sequential] =>
override def toSet[U >: T]: immutable.ParSet[U] = toParCollection[U, immutable.ParSet[U]](() => immutable.ParSet.newCombiner[U])
override def toMap[K, V](implicit ev: T <:< (K, V)): immutable.ParMap[K, V] = toParMap[K, V, immutable.ParMap[K, V]](() => immutable.ParMap.newCombiner[K, V])
+
+ override def toVector: Vector[T] = to[Vector]
+ override def to[Col[_]](implicit cbf: CanBuildFrom[Nothing, T, Col[T @uncheckedVariance]]): Col[T @uncheckedVariance] = if (cbf().isCombiner) {
+ toParCollection[T, Col[T]](() => cbf().asCombiner)
+ } else seq.to(cbf)
+
/* tasks */
protected trait StrictSplitterCheckTask[R, Tp] extends Task[R, Tp] {
@@ -824,8 +901,8 @@ self: ParIterableLike[T, Repr, Sequential] =>
extends StrictSplitterCheckTask[R, Tp] {
protected[this] val pit: IterableSplitter[T]
protected[this] def newSubtask(p: IterableSplitter[T]): Accessor[R, Tp]
- def shouldSplitFurther = pit.remaining > threshold(size, parallelismLevel)
- def split = pit.split.map(newSubtask(_)) // default split procedure
+ def shouldSplitFurther = pit.shouldSplitFurther(self.repr, tasksupport.parallelismLevel)
+ def split = pit.splitWithSignalling.map(newSubtask(_)) // default split procedure
private[parallel] override def signalAbort = pit.abort
override def toString = this.getClass.getSimpleName + "(" + pit.toString + ")(" + result + ")(supername: " + super.toString + ")"
}
@@ -855,22 +932,22 @@ self: ParIterableLike[T, Repr, Sequential] =>
/** Sequentially performs one task after another. */
protected[this] abstract class SeqComposite[FR, SR, R, First <: StrictSplitterCheckTask[FR, _], Second <: StrictSplitterCheckTask[SR, _]]
- (f: First, s: Second)
+ (f: First, s: Second)
extends Composite[FR, SR, R, First, Second](f, s) {
def leaf(prevr: Option[R]) = {
- executeAndWaitResult(ft)
- executeAndWaitResult(st)
+ tasksupport.executeAndWaitResult(ft)
+ tasksupport.executeAndWaitResult(st)
mergeSubtasks
}
}
/** Performs two tasks in parallel, and waits for both to finish. */
protected[this] abstract class ParComposite[FR, SR, R, First <: StrictSplitterCheckTask[FR, _], Second <: StrictSplitterCheckTask[SR, _]]
- (f: First, s: Second)
+ (f: First, s: Second)
extends Composite[FR, SR, R, First, Second](f, s) {
def leaf(prevr: Option[R]) = {
- val ftfuture = execute(ft)
- executeAndWaitResult(st)
+ val ftfuture = tasksupport.execute(ft)
+ tasksupport.executeAndWaitResult(st)
ftfuture()
mergeSubtasks
}
@@ -881,7 +958,8 @@ self: ParIterableLike[T, Repr, Sequential] =>
@volatile var result: R1 = null.asInstanceOf[R1]
def map(r: R): R1
def leaf(prevr: Option[R1]) = {
- result = map(executeAndWaitResult(inner))
+ val initialResult = tasksupport.executeAndWaitResult(inner)
+ result = map(initialResult)
}
private[parallel] override def signalAbort() {
inner.signalAbort
@@ -891,13 +969,15 @@ self: ParIterableLike[T, Repr, Sequential] =>
protected trait Transformer[R, Tp] extends Accessor[R, Tp]
- protected[this] class Foreach[S](op: T => S, protected[this] val pit: IterableSplitter[T]) extends Accessor[Unit, Foreach[S]] {
+ protected[this] class Foreach[S](op: T => S, protected[this] val pit: IterableSplitter[T])
+ extends Accessor[Unit, Foreach[S]] {
@volatile var result: Unit = ()
def leaf(prevr: Option[Unit]) = pit.foreach(op)
protected[this] def newSubtask(p: IterableSplitter[T]) = new Foreach[S](op, p)
}
- protected[this] class Count(pred: T => Boolean, protected[this] val pit: IterableSplitter[T]) extends Accessor[Int, Count] {
+ protected[this] class Count(pred: T => Boolean, protected[this] val pit: IterableSplitter[T])
+ extends Accessor[Int, Count] {
// val pittxt = pit.toString
@volatile var result: Int = 0
def leaf(prevr: Option[Int]) = result = pit.count(pred)
@@ -906,7 +986,8 @@ self: ParIterableLike[T, Repr, Sequential] =>
// override def toString = "CountTask(" + pittxt + ")"
}
- protected[this] class Reduce[U >: T](op: (U, U) => U, protected[this] val pit: IterableSplitter[T]) extends Accessor[Option[U], Reduce[U]] {
+ protected[this] class Reduce[U >: T](op: (U, U) => U, protected[this] val pit: IterableSplitter[T])
+ extends Accessor[Option[U], Reduce[U]] {
@volatile var result: Option[U] = None
def leaf(prevr: Option[Option[U]]) = if (pit.remaining > 0) result = Some(pit.reduce(op))
protected[this] def newSubtask(p: IterableSplitter[T]) = new Reduce(op, p)
@@ -916,7 +997,8 @@ self: ParIterableLike[T, Repr, Sequential] =>
override def requiresStrictSplitters = true
}
- protected[this] class Fold[U >: T](z: U, op: (U, U) => U, protected[this] val pit: IterableSplitter[T]) extends Accessor[U, Fold[U]] {
+ protected[this] class Fold[U >: T](z: U, op: (U, U) => U, protected[this] val pit: IterableSplitter[T])
+ extends Accessor[U, Fold[U]] {
@volatile var result: U = null.asInstanceOf[U]
def leaf(prevr: Option[U]) = result = pit.fold(z)(op)
protected[this] def newSubtask(p: IterableSplitter[T]) = new Fold(z, op, p)
@@ -931,21 +1013,24 @@ self: ParIterableLike[T, Repr, Sequential] =>
override def merge(that: Aggregate[S]) = result = combop(result, that.result)
}
- protected[this] class Sum[U >: T](num: Numeric[U], protected[this] val pit: IterableSplitter[T]) extends Accessor[U, Sum[U]] {
+ protected[this] class Sum[U >: T](num: Numeric[U], protected[this] val pit: IterableSplitter[T])
+ extends Accessor[U, Sum[U]] {
@volatile var result: U = null.asInstanceOf[U]
def leaf(prevr: Option[U]) = result = pit.sum(num)
protected[this] def newSubtask(p: IterableSplitter[T]) = new Sum(num, p)
override def merge(that: Sum[U]) = result = num.plus(result, that.result)
}
- protected[this] class Product[U >: T](num: Numeric[U], protected[this] val pit: IterableSplitter[T]) extends Accessor[U, Product[U]] {
+ protected[this] class Product[U >: T](num: Numeric[U], protected[this] val pit: IterableSplitter[T])
+ extends Accessor[U, Product[U]] {
@volatile var result: U = null.asInstanceOf[U]
def leaf(prevr: Option[U]) = result = pit.product(num)
protected[this] def newSubtask(p: IterableSplitter[T]) = new Product(num, p)
override def merge(that: Product[U]) = result = num.times(result, that.result)
}
- protected[this] class Min[U >: T](ord: Ordering[U], protected[this] val pit: IterableSplitter[T]) extends Accessor[Option[U], Min[U]] {
+ protected[this] class Min[U >: T](ord: Ordering[U], protected[this] val pit: IterableSplitter[T])
+ extends Accessor[Option[U], Min[U]] {
@volatile var result: Option[U] = None
def leaf(prevr: Option[Option[U]]) = if (pit.remaining > 0) result = Some(pit.min(ord))
protected[this] def newSubtask(p: IterableSplitter[T]) = new Min(ord, p)
@@ -955,7 +1040,8 @@ self: ParIterableLike[T, Repr, Sequential] =>
override def requiresStrictSplitters = true
}
- protected[this] class Max[U >: T](ord: Ordering[U], protected[this] val pit: IterableSplitter[T]) extends Accessor[Option[U], Max[U]] {
+ protected[this] class Max[U >: T](ord: Ordering[U], protected[this] val pit: IterableSplitter[T])
+ extends Accessor[Option[U], Max[U]] {
@volatile var result: Option[U] = None
def leaf(prevr: Option[Option[U]]) = if (pit.remaining > 0) result = Some(pit.max(ord))
protected[this] def newSubtask(p: IterableSplitter[T]) = new Max(ord, p)
@@ -965,27 +1051,28 @@ self: ParIterableLike[T, Repr, Sequential] =>
override def requiresStrictSplitters = true
}
- protected[this] class Map[S, That](f: T => S, pbf: CanCombineFrom[Repr, S, That], protected[this] val pit: IterableSplitter[T])
+ protected[this] class Map[S, That](f: T => S, cbf: CombinerFactory[S, That], protected[this] val pit: IterableSplitter[T])
extends Transformer[Combiner[S, That], Map[S, That]] {
@volatile var result: Combiner[S, That] = null
- def leaf(prev: Option[Combiner[S, That]]) = result = pit.map2combiner(f, reuse(prev, pbf(self.repr)))
- protected[this] def newSubtask(p: IterableSplitter[T]) = new Map(f, pbf, p)
+ def leaf(prev: Option[Combiner[S, That]]) = result = pit.map2combiner(f, reuse(prev, cbf()))
+ protected[this] def newSubtask(p: IterableSplitter[T]) = new Map(f, cbf, p)
override def merge(that: Map[S, That]) = result = result combine that.result
}
protected[this] class Collect[S, That]
- (pf: PartialFunction[T, S], pbf: CanCombineFrom[Repr, S, That], protected[this] val pit: IterableSplitter[T])
+ (pf: PartialFunction[T, S], pbf: CombinerFactory[S, That], protected[this] val pit: IterableSplitter[T])
extends Transformer[Combiner[S, That], Collect[S, That]] {
@volatile var result: Combiner[S, That] = null
- def leaf(prev: Option[Combiner[S, That]]) = result = pit.collect2combiner[S, That](pf, pbf(self.repr))
+ def leaf(prev: Option[Combiner[S, That]]) = result = pit.collect2combiner[S, That](pf, pbf())
protected[this] def newSubtask(p: IterableSplitter[T]) = new Collect(pf, pbf, p)
override def merge(that: Collect[S, That]) = result = result combine that.result
}
- protected[this] class FlatMap[S, That](f: T => GenTraversableOnce[S], pbf: CanCombineFrom[Repr, S, That], protected[this] val pit: IterableSplitter[T])
+ protected[this] class FlatMap[S, That]
+ (f: T => GenTraversableOnce[S], pbf: CombinerFactory[S, That], protected[this] val pit: IterableSplitter[T])
extends Transformer[Combiner[S, That], FlatMap[S, That]] {
@volatile var result: Combiner[S, That] = null
- def leaf(prev: Option[Combiner[S, That]]) = result = pit.flatmap2combiner(f, pbf(self.repr))
+ def leaf(prev: Option[Combiner[S, That]]) = result = pit.flatmap2combiner(f, pbf())
protected[this] def newSubtask(p: IterableSplitter[T]) = new FlatMap(f, pbf, p)
override def merge(that: FlatMap[S, That]) = {
//debuglog("merging " + result + " and " + that.result)
@@ -994,28 +1081,31 @@ self: ParIterableLike[T, Repr, Sequential] =>
}
}
- protected[this] class Forall(pred: T => Boolean, protected[this] val pit: IterableSplitter[T]) extends Accessor[Boolean, Forall] {
+ protected[this] class Forall(pred: T => Boolean, protected[this] val pit: IterableSplitter[T])
+ extends Accessor[Boolean, Forall] {
@volatile var result: Boolean = true
def leaf(prev: Option[Boolean]) = { if (!pit.isAborted) result = pit.forall(pred); if (result == false) pit.abort }
protected[this] def newSubtask(p: IterableSplitter[T]) = new Forall(pred, p)
override def merge(that: Forall) = result = result && that.result
}
- protected[this] class Exists(pred: T => Boolean, protected[this] val pit: IterableSplitter[T]) extends Accessor[Boolean, Exists] {
+ protected[this] class Exists(pred: T => Boolean, protected[this] val pit: IterableSplitter[T])
+ extends Accessor[Boolean, Exists] {
@volatile var result: Boolean = false
def leaf(prev: Option[Boolean]) = { if (!pit.isAborted) result = pit.exists(pred); if (result == true) pit.abort }
protected[this] def newSubtask(p: IterableSplitter[T]) = new Exists(pred, p)
override def merge(that: Exists) = result = result || that.result
}
- protected[this] class Find[U >: T](pred: T => Boolean, protected[this] val pit: IterableSplitter[T]) extends Accessor[Option[U], Find[U]] {
+ protected[this] class Find[U >: T](pred: T => Boolean, protected[this] val pit: IterableSplitter[T])
+ extends Accessor[Option[U], Find[U]] {
@volatile var result: Option[U] = None
def leaf(prev: Option[Option[U]]) = { if (!pit.isAborted) result = pit.find(pred); if (result != None) pit.abort }
protected[this] def newSubtask(p: IterableSplitter[T]) = new Find(pred, p)
override def merge(that: Find[U]) = if (this.result == None) result = that.result
}
- protected[this] class Filter[U >: T, This >: Repr](pred: T => Boolean, cbf: () => Combiner[U, This], protected[this] val pit: IterableSplitter[T])
+ protected[this] class Filter[U >: T, This >: Repr](pred: T => Boolean, cbf: CombinerFactory[U, This], protected[this] val pit: IterableSplitter[T])
extends Transformer[Combiner[U, This], Filter[U, This]] {
@volatile var result: Combiner[U, This] = null
def leaf(prev: Option[Combiner[U, This]]) = {
@@ -1025,7 +1115,7 @@ self: ParIterableLike[T, Repr, Sequential] =>
override def merge(that: Filter[U, This]) = result = result combine that.result
}
- protected[this] class FilterNot[U >: T, This >: Repr](pred: T => Boolean, cbf: () => Combiner[U, This], protected[this] val pit: IterableSplitter[T])
+ protected[this] class FilterNot[U >: T, This >: Repr](pred: T => Boolean, cbf: CombinerFactory[U, This], protected[this] val pit: IterableSplitter[T])
extends Transformer[Combiner[U, This], FilterNot[U, This]] {
@volatile var result: Combiner[U, This] = null
def leaf(prev: Option[Combiner[U, This]]) = {
@@ -1035,7 +1125,7 @@ self: ParIterableLike[T, Repr, Sequential] =>
override def merge(that: FilterNot[U, This]) = result = result combine that.result
}
- protected class Copy[U >: T, That](cfactory: () => Combiner[U, That], protected[this] val pit: IterableSplitter[T])
+ protected class Copy[U >: T, That](cfactory: CombinerFactory[U, That], protected[this] val pit: IterableSplitter[T])
extends Transformer[Combiner[U, That], Copy[U, That]] {
@volatile var result: Combiner[U, That] = null
def leaf(prev: Option[Combiner[U, That]]) = result = pit.copy2builder[U, That, Combiner[U, That]](reuse(prev, cfactory()))
@@ -1043,11 +1133,12 @@ self: ParIterableLike[T, Repr, Sequential] =>
override def merge(that: Copy[U, That]) = result = result combine that.result
}
- protected[this] class Partition[U >: T, This >: Repr](pred: T => Boolean, cbf: () => Combiner[U, This], protected[this] val pit: IterableSplitter[T])
+ protected[this] class Partition[U >: T, This >: Repr]
+ (pred: T => Boolean, cbfTrue: CombinerFactory[U, This], cbfFalse: CombinerFactory[U, This], protected[this] val pit: IterableSplitter[T])
extends Transformer[(Combiner[U, This], Combiner[U, This]), Partition[U, This]] {
@volatile var result: (Combiner[U, This], Combiner[U, This]) = null
- def leaf(prev: Option[(Combiner[U, This], Combiner[U, This])]) = result = pit.partition2combiners(pred, reuse(prev.map(_._1), cbf()), reuse(prev.map(_._2), cbf()))
- protected[this] def newSubtask(p: IterableSplitter[T]) = new Partition(pred, cbf, p)
+ def leaf(prev: Option[(Combiner[U, This], Combiner[U, This])]) = result = pit.partition2combiners(pred, reuse(prev.map(_._1), cbfTrue()), reuse(prev.map(_._2), cbfFalse()))
+ protected[this] def newSubtask(p: IterableSplitter[T]) = new Partition(pred, cbfTrue, cbfFalse, p)
override def merge(that: Partition[U, This]) = result = (result._1 combine that.result._1, result._2 combine that.result._2)
}
@@ -1074,7 +1165,8 @@ self: ParIterableLike[T, Repr, Sequential] =>
}
}
- protected[this] class Take[U >: T, This >: Repr](n: Int, cbf: () => Combiner[U, This], protected[this] val pit: IterableSplitter[T])
+ protected[this] class Take[U >: T, This >: Repr]
+ (n: Int, cbf: CombinerFactory[U, This], protected[this] val pit: IterableSplitter[T])
extends Transformer[Combiner[U, This], Take[U, This]] {
@volatile var result: Combiner[U, This] = null
def leaf(prev: Option[Combiner[U, This]]) = {
@@ -1082,7 +1174,7 @@ self: ParIterableLike[T, Repr, Sequential] =>
}
protected[this] def newSubtask(p: IterableSplitter[T]) = throw new UnsupportedOperationException
override def split = {
- val pits = pit.split
+ val pits = pit.splitWithSignalling
val sizes = pits.scanLeft(0)(_ + _.remaining)
for ((p, untilp) <- pits zip sizes; if untilp <= n) yield {
if (untilp + p.remaining < n) new Take(p.remaining, cbf, p)
@@ -1093,13 +1185,14 @@ self: ParIterableLike[T, Repr, Sequential] =>
override def requiresStrictSplitters = true
}
- protected[this] class Drop[U >: T, This >: Repr](n: Int, cbf: () => Combiner[U, This], protected[this] val pit: IterableSplitter[T])
+ protected[this] class Drop[U >: T, This >: Repr]
+ (n: Int, cbf: CombinerFactory[U, This], protected[this] val pit: IterableSplitter[T])
extends Transformer[Combiner[U, This], Drop[U, This]] {
@volatile var result: Combiner[U, This] = null
def leaf(prev: Option[Combiner[U, This]]) = result = pit.drop2combiner(n, reuse(prev, cbf()))
protected[this] def newSubtask(p: IterableSplitter[T]) = throw new UnsupportedOperationException
override def split = {
- val pits = pit.split
+ val pits = pit.splitWithSignalling
val sizes = pits.scanLeft(0)(_ + _.remaining)
for ((p, withp) <- pits zip sizes.tail; if withp >= n) yield {
if (withp - p.remaining > n) new Drop(0, cbf, p)
@@ -1110,13 +1203,14 @@ self: ParIterableLike[T, Repr, Sequential] =>
override def requiresStrictSplitters = true
}
- protected[this] class Slice[U >: T, This >: Repr](from: Int, until: Int, cbf: () => Combiner[U, This], protected[this] val pit: IterableSplitter[T])
+ protected[this] class Slice[U >: T, This >: Repr]
+ (from: Int, until: Int, cbf: CombinerFactory[U, This], protected[this] val pit: IterableSplitter[T])
extends Transformer[Combiner[U, This], Slice[U, This]] {
@volatile var result: Combiner[U, This] = null
def leaf(prev: Option[Combiner[U, This]]) = result = pit.slice2combiner(from, until, reuse(prev, cbf()))
protected[this] def newSubtask(p: IterableSplitter[T]) = throw new UnsupportedOperationException
override def split = {
- val pits = pit.split
+ val pits = pit.splitWithSignalling
val sizes = pits.scanLeft(0)(_ + _.remaining)
for ((p, untilp) <- pits zip sizes; if untilp + p.remaining >= from || untilp <= until) yield {
val f = (from max untilp) - untilp
@@ -1128,22 +1222,23 @@ self: ParIterableLike[T, Repr, Sequential] =>
override def requiresStrictSplitters = true
}
- protected[this] class SplitAt[U >: T, This >: Repr](at: Int, cbf: () => Combiner[U, This], protected[this] val pit: IterableSplitter[T])
+ protected[this] class SplitAt[U >: T, This >: Repr]
+ (at: Int, cbfBefore: CombinerFactory[U, This], cbfAfter: CombinerFactory[U, This], protected[this] val pit: IterableSplitter[T])
extends Transformer[(Combiner[U, This], Combiner[U, This]), SplitAt[U, This]] {
@volatile var result: (Combiner[U, This], Combiner[U, This]) = null
- def leaf(prev: Option[(Combiner[U, This], Combiner[U, This])]) = result = pit.splitAt2combiners(at, reuse(prev.map(_._1), cbf()), reuse(prev.map(_._2), cbf()))
+ def leaf(prev: Option[(Combiner[U, This], Combiner[U, This])]) = result = pit.splitAt2combiners(at, reuse(prev.map(_._1), cbfBefore()), reuse(prev.map(_._2), cbfAfter()))
protected[this] def newSubtask(p: IterableSplitter[T]) = throw new UnsupportedOperationException
override def split = {
- val pits = pit.split
+ val pits = pit.splitWithSignalling
val sizes = pits.scanLeft(0)(_ + _.remaining)
- for ((p, untilp) <- pits zip sizes) yield new SplitAt((at max untilp min (untilp + p.remaining)) - untilp, cbf, p)
+ for ((p, untilp) <- pits zip sizes) yield new SplitAt((at max untilp min (untilp + p.remaining)) - untilp, cbfBefore, cbfAfter, p)
}
override def merge(that: SplitAt[U, This]) = result = (result._1 combine that.result._1, result._2 combine that.result._2)
override def requiresStrictSplitters = true
}
protected[this] class TakeWhile[U >: T, This >: Repr]
- (pos: Int, pred: T => Boolean, cbf: () => Combiner[U, This], protected[this] val pit: IterableSplitter[T])
+ (pos: Int, pred: T => Boolean, cbf: CombinerFactory[U, This], protected[this] val pit: IterableSplitter[T])
extends Transformer[(Combiner[U, This], Boolean), TakeWhile[U, This]] {
@volatile var result: (Combiner[U, This], Boolean) = null
def leaf(prev: Option[(Combiner[U, This], Boolean)]) = if (pos < pit.indexFlag) {
@@ -1152,7 +1247,7 @@ self: ParIterableLike[T, Repr, Sequential] =>
} else result = (reuse(prev.map(_._1), cbf()), false)
protected[this] def newSubtask(p: IterableSplitter[T]) = throw new UnsupportedOperationException
override def split = {
- val pits = pit.split
+ val pits = pit.splitWithSignalling
for ((p, untilp) <- pits zip pits.scanLeft(0)(_ + _.remaining)) yield new TakeWhile(pos + untilp, pred, cbf, p)
}
override def merge(that: TakeWhile[U, This]) = if (result._2) {
@@ -1162,23 +1257,23 @@ self: ParIterableLike[T, Repr, Sequential] =>
}
protected[this] class Span[U >: T, This >: Repr]
- (pos: Int, pred: T => Boolean, cbf: () => Combiner[U, This], protected[this] val pit: IterableSplitter[T])
+ (pos: Int, pred: T => Boolean, cbfBefore: CombinerFactory[U, This], cbfAfter: CombinerFactory[U, This], protected[this] val pit: IterableSplitter[T])
extends Transformer[(Combiner[U, This], Combiner[U, This]), Span[U, This]] {
@volatile var result: (Combiner[U, This], Combiner[U, This]) = null
def leaf(prev: Option[(Combiner[U, This], Combiner[U, This])]) = if (pos < pit.indexFlag) {
// val lst = pit.toList
// val pa = mutable.ParArray(lst: _*)
// val str = "At leaf we will iterate: " + pa.splitter.toList
- result = pit.span2combiners(pred, cbf(), cbf()) // do NOT reuse old combiners here, lest ye be surprised
+ result = pit.span2combiners(pred, cbfBefore(), cbfAfter()) // do NOT reuse old combiners here, lest ye be surprised
// println("\nAt leaf result is: " + result)
if (result._2.size > 0) pit.setIndexFlagIfLesser(pos)
} else {
- result = (reuse(prev.map(_._2), cbf()), pit.copy2builder[U, This, Combiner[U, This]](reuse(prev.map(_._2), cbf())))
+ result = (reuse(prev.map(_._2), cbfBefore()), pit.copy2builder[U, This, Combiner[U, This]](reuse(prev.map(_._2), cbfAfter())))
}
protected[this] def newSubtask(p: IterableSplitter[T]) = throw new UnsupportedOperationException
override def split = {
- val pits = pit.split
- for ((p, untilp) <- pits zip pits.scanLeft(0)(_ + _.remaining)) yield new Span(pos + untilp, pred, cbf, p)
+ val pits = pit.splitWithSignalling
+ for ((p, untilp) <- pits zip pits.scanLeft(0)(_ + _.remaining)) yield new Span(pos + untilp, pred, cbfBefore, cbfAfter, p)
}
override def merge(that: Span[U, This]) = result = if (result._2.size == 0) {
(result._1 combine that.result._1, that.result._2)
@@ -1188,15 +1283,15 @@ self: ParIterableLike[T, Repr, Sequential] =>
override def requiresStrictSplitters = true
}
- protected[this] class Zip[U >: T, S, That](pbf: CanCombineFrom[Repr, (U, S), That], protected[this] val pit: IterableSplitter[T], val othpit: SeqSplitter[S])
+ protected[this] class Zip[U >: T, S, That](pbf: CombinerFactory[(U, S), That], protected[this] val pit: IterableSplitter[T], val othpit: SeqSplitter[S])
extends Transformer[Combiner[(U, S), That], Zip[U, S, That]] {
@volatile var result: Result = null
- def leaf(prev: Option[Result]) = result = pit.zip2combiner[U, S, That](othpit, pbf(self.repr))
+ def leaf(prev: Option[Result]) = result = pit.zip2combiner[U, S, That](othpit, pbf())
protected[this] def newSubtask(p: IterableSplitter[T]) = unsupported
override def split = {
- val pits = pit.split
+ val pits = pit.splitWithSignalling
val sizes = pits.map(_.remaining)
- val opits = othpit.psplit(sizes: _*)
+ val opits = othpit.psplitWithSignalling(sizes: _*)
(pits zip opits) map { p => new Zip(pbf, p._1, p._2) }
}
override def merge(that: Zip[U, S, That]) = result = result combine that.result
@@ -1204,18 +1299,18 @@ self: ParIterableLike[T, Repr, Sequential] =>
}
protected[this] class ZipAll[U >: T, S, That]
- (len: Int, thiselem: U, thatelem: S, pbf: CanCombineFrom[Repr, (U, S), That], protected[this] val pit: IterableSplitter[T], val othpit: SeqSplitter[S])
+ (len: Int, thiselem: U, thatelem: S, pbf: CombinerFactory[(U, S), That], protected[this] val pit: IterableSplitter[T], val othpit: SeqSplitter[S])
extends Transformer[Combiner[(U, S), That], ZipAll[U, S, That]] {
@volatile var result: Result = null
- def leaf(prev: Option[Result]) = result = pit.zipAll2combiner[U, S, That](othpit, thiselem, thatelem, pbf(self.repr))
+ def leaf(prev: Option[Result]) = result = pit.zipAll2combiner[U, S, That](othpit, thiselem, thatelem, pbf())
protected[this] def newSubtask(p: IterableSplitter[T]) = unsupported
override def split = if (pit.remaining <= len) {
- val pits = pit.split
+ val pits = pit.splitWithSignalling
val sizes = pits.map(_.remaining)
- val opits = othpit.psplit(sizes: _*)
+ val opits = othpit.psplitWithSignalling(sizes: _*)
((pits zip opits) zip sizes) map { t => new ZipAll(t._2, thiselem, thatelem, pbf, t._1._1, t._1._2) }
} else {
- val opits = othpit.psplit(pit.remaining)
+ val opits = othpit.psplitWithSignalling(pit.remaining)
val diff = len - pit.remaining
Seq(
new ZipAll(pit.remaining, thiselem, thatelem, pbf, pit, opits(0)), // nothing wrong will happen with the cast below - elem T is never accessed
@@ -1232,7 +1327,7 @@ self: ParIterableLike[T, Repr, Sequential] =>
def leaf(prev: Option[Unit]) = pit.copyToArray(array, from, len)
protected[this] def newSubtask(p: IterableSplitter[T]) = unsupported
override def split = {
- val pits = pit.split
+ val pits = pit.splitWithSignalling
for ((p, untilp) <- pits zip pits.scanLeft(0)(_ + _.remaining); if untilp < len) yield {
val plen = p.remaining min (len - untilp)
new CopyToArray[U, This](from + untilp, plen, array, p)
@@ -1241,7 +1336,7 @@ self: ParIterableLike[T, Repr, Sequential] =>
override def requiresStrictSplitters = true
}
- protected[this] class ToParCollection[U >: T, That](cbf: () => Combiner[U, That], protected[this] val pit: IterableSplitter[T])
+ protected[this] class ToParCollection[U >: T, That](cbf: CombinerFactory[U, That], protected[this] val pit: IterableSplitter[T])
extends Transformer[Combiner[U, That], ToParCollection[U, That]] {
@volatile var result: Result = null
def leaf(prev: Option[Combiner[U, That]]) {
@@ -1252,7 +1347,7 @@ self: ParIterableLike[T, Repr, Sequential] =>
override def merge(that: ToParCollection[U, That]) = result = result combine that.result
}
- protected[this] class ToParMap[K, V, That](cbf: () => Combiner[(K, V), That], protected[this] val pit: IterableSplitter[T])(implicit ev: T <:< (K, V))
+ protected[this] class ToParMap[K, V, That](cbf: CombinerFactory[(K, V), That], protected[this] val pit: IterableSplitter[T])(implicit ev: T <:< (K, V))
extends Transformer[Combiner[(K, V), That], ToParMap[K, V, That]] {
@volatile var result: Result = null
def leaf(prev: Option[Combiner[(K, V), That]]) {
@@ -1272,7 +1367,7 @@ self: ParIterableLike[T, Repr, Sequential] =>
val until = from + len
val blocksize = scanBlockSize
while (i < until) {
- trees += scanBlock(i, math.min(blocksize, pit.remaining))
+ trees += scanBlock(i, scala.math.min(blocksize, pit.remaining))
i += blocksize
}
@@ -1289,7 +1384,7 @@ self: ParIterableLike[T, Repr, Sequential] =>
} else trees(from)
protected[this] def newSubtask(pit: IterableSplitter[T]) = unsupported
override def split = {
- val pits = pit.split
+ val pits = pit.splitWithSignalling
for ((p, untilp) <- pits zip pits.scanLeft(from)(_ + _.remaining)) yield {
new CreateScanTree(untilp, p.remaining, z, op, p)
}
@@ -1301,11 +1396,11 @@ self: ParIterableLike[T, Repr, Sequential] =>
}
protected[this] class FromScanTree[U >: T, That]
- (tree: ScanTree[U], z: U, op: (U, U) => U, cbf: CanCombineFrom[Repr, U, That])
+ (tree: ScanTree[U], z: U, op: (U, U) => U, cbf: CombinerFactory[U, That])
extends StrictSplitterCheckTask[Combiner[U, That], FromScanTree[U, That]] {
@volatile var result: Combiner[U, That] = null
def leaf(prev: Option[Combiner[U, That]]) {
- val cb = reuse(prev, cbf(self.repr))
+ val cb = reuse(prev, cbf())
iterate(tree, cb)
result = cb
}
@@ -1335,7 +1430,7 @@ self: ParIterableLike[T, Repr, Sequential] =>
/* scan tree */
- protected[this] def scanBlockSize = (threshold(size, parallelismLevel) / 2) max 1
+ protected[this] def scanBlockSize = (thresholdFromSize(size, tasksupport.parallelismLevel) / 2) max 1
protected[this] trait ScanTree[U >: T] {
def beginsAt: Int
@@ -1376,6 +1471,12 @@ self: ParIterableLike[T, Repr, Sequential] =>
def print(depth: Int) = println((" " * depth) + this)
}
+ /* alias methods */
+
+ def /:[S](z: S)(op: (S, T) => S): S = foldLeft(z)(op);
+
+ def :\[S](z: S)(op: (T, S) => S): S = foldRight(z)(op);
+
/* debug information */
private[parallel] def debugInformation = "Parallel collection: " + this.getClass
@@ -1394,7 +1495,7 @@ self: ParIterableLike[T, Repr, Sequential] =>
debugBuffer += s
}
- import collection.DebugUtils._
+ import scala.collection.DebugUtils._
private[parallel] def printDebugBuffer() = println(buildString {
append =>
for (s <- debugBuffer) {
diff --git a/src/library/scala/collection/parallel/ParIterableView.scala b/src/library/scala/collection/parallel/ParIterableView.scala
index 2b4f241..7644e1b 100644
--- a/src/library/scala/collection/parallel/ParIterableView.scala
+++ b/src/library/scala/collection/parallel/ParIterableView.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/library/scala/collection/parallel/ParIterableViewLike.scala b/src/library/scala/collection/parallel/ParIterableViewLike.scala
index 1d76599..0ecd6bd 100644
--- a/src/library/scala/collection/parallel/ParIterableViewLike.scala
+++ b/src/library/scala/collection/parallel/ParIterableViewLike.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -18,6 +18,7 @@ import scala.collection.GenSeq
import scala.collection.generic.{ CanBuildFrom, SliceInterval }
import scala.collection.generic.CanCombineFrom
import scala.collection.parallel.immutable.ParRange
+import scala.language.implicitConversions
@@ -47,7 +48,6 @@ extends GenIterableView[T, Coll]
with ParIterableLike[T, This, ThisSeq]
{
self =>
- import tasksupport._
override def foreach[U](f: T => U): Unit = super[ParIterableLike].foreach(f)
override protected[this] def newCombiner: Combiner[T, This] = throw new UnsupportedOperationException(this + ".newCombiner");
@@ -135,7 +135,7 @@ self =>
newZippedAllTryParSeq(that, thisElem, thatElem).asInstanceOf[That]
override def force[U >: T, That](implicit bf: CanBuildFrom[Coll, U, That]) = bf ifParallel { pbf =>
- executeAndWaitResult(new Force(pbf, splitter).mapResult(_.result).asInstanceOf[Task[That, ResultMapping[_, Force[U, That], That]]])
+ tasksupport.executeAndWaitResult(new Force(pbf, splitter).mapResult(_.result).asInstanceOf[Task[That, ResultMapping[_, Force[U, That], That]]])
} otherwise {
val b = bf(underlying)
b ++= this.iterator
diff --git a/src/library/scala/collection/parallel/ParMap.scala b/src/library/scala/collection/parallel/ParMap.scala
index c696099..1f27ae8 100644
--- a/src/library/scala/collection/parallel/ParMap.scala
+++ b/src/library/scala/collection/parallel/ParMap.scala
@@ -1,18 +1,13 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-
package scala.collection.parallel
-
-
-
-
import scala.collection.Map
import scala.collection.GenMap
import scala.collection.mutable.Builder
@@ -21,10 +16,6 @@ import scala.collection.generic.GenericParMapTemplate
import scala.collection.generic.GenericParMapCompanion
import scala.collection.generic.CanCombineFrom
-
-
-
-
/** A template trait for parallel maps.
*
* $sideeffects
@@ -50,6 +41,10 @@ self =>
def empty: ParMap[K, V] = new mutable.ParHashMap[K, V]
override def stringPrefix = "ParMap"
+
+ override def updated [U >: V](key: K, value: U): ParMap[K, U] = this + ((key, value))
+
+ def + [U >: V](kv: (K, U)): ParMap[K, U]
}
@@ -61,32 +56,13 @@ object ParMap extends ParMapFactory[ParMap] {
implicit def canBuildFrom[K, V]: CanCombineFrom[Coll, (K, V), ParMap[K, V]] = new CanCombineFromMap[K, V]
+ /** An abstract shell used by { mutable, immutable }.Map but not by collection.Map
+ * because of variance issues.
+ */
+ abstract class WithDefault[A, +B](underlying: ParMap[A, B], d: A => B) extends ParMap[A, B] {
+ override def size = underlying.size
+ def get(key: A) = underlying.get(key)
+ def splitter = underlying.splitter
+ override def default(key: A): B = d(key)
+ }
}
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/src/library/scala/collection/parallel/ParMapLike.scala b/src/library/scala/collection/parallel/ParMapLike.scala
index 5b85546..56594be 100644
--- a/src/library/scala/collection/parallel/ParMapLike.scala
+++ b/src/library/scala/collection/parallel/ParMapLike.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -16,9 +16,9 @@ import scala.collection.MapLike
import scala.collection.GenMapLike
import scala.collection.Map
import scala.collection.mutable.Builder
-
-
-
+import scala.annotation.unchecked.uncheckedVariance
+import scala.collection.generic.IdleSignalling
+import scala.collection.generic.Signalling
@@ -53,6 +53,97 @@ self =>
case None => default(key)
}
+ def getOrElse[U >: V](key: K, default: => U): U = get(key) match {
+ case Some(v) => v
+ case None => default
+ }
+
+ def contains(key: K): Boolean = get(key).isDefined
+
+ def isDefinedAt(key: K): Boolean = contains(key)
+
+ private[this] def keysIterator(s: IterableSplitter[(K, V)] @uncheckedVariance): IterableSplitter[K] =
+ new IterableSplitter[K] {
+ i =>
+ val iter = s
+ def hasNext = iter.hasNext
+ def next() = iter.next._1
+ def split = {
+ val ss = iter.split.map(keysIterator(_))
+ ss.foreach { _.signalDelegate = i.signalDelegate }
+ ss
+ }
+ def remaining = iter.remaining
+ def dup = keysIterator(iter.dup)
+ }
+
+ def keysIterator: IterableSplitter[K] = keysIterator(splitter)
+
+ private[this] def valuesIterator(s: IterableSplitter[(K, V)] @uncheckedVariance): IterableSplitter[V] =
+ new IterableSplitter[V] {
+ i =>
+ val iter = s
+ def hasNext = iter.hasNext
+ def next() = iter.next._2
+ def split = {
+ val ss = iter.split.map(valuesIterator(_))
+ ss.foreach { _.signalDelegate = i.signalDelegate }
+ ss
+ }
+ def remaining = iter.remaining
+ def dup = valuesIterator(iter.dup)
+ }
+
+ def valuesIterator: IterableSplitter[V] = valuesIterator(splitter)
+
+ protected class DefaultKeySet extends ParSet[K] {
+ def contains(key : K) = self.contains(key)
+ def splitter = keysIterator(self.splitter)
+ def + (elem: K): ParSet[K] =
+ (ParSet[K]() ++ this + elem).asInstanceOf[ParSet[K]] // !!! concrete overrides abstract problem
+ def - (elem: K): ParSet[K] =
+ (ParSet[K]() ++ this - elem).asInstanceOf[ParSet[K]] // !!! concrete overrides abstract problem
+ override def size = self.size
+ override def foreach[S](f: K => S) = for ((k, v) <- self) f(k)
+ override def seq = self.seq.keySet
+ }
+
+ protected class DefaultValuesIterable extends ParIterable[V] {
+ def splitter = valuesIterator(self.splitter)
+ override def size = self.size
+ override def foreach[S](f: V => S) = for ((k, v) <- self) f(v)
+ def seq = self.seq.values
+ }
+
+ def keySet: ParSet[K] = new DefaultKeySet
+
+ def keys: ParIterable[K] = keySet
+
+ def values: ParIterable[V] = new DefaultValuesIterable
+
+ def filterKeys(p: K => Boolean): ParMap[K, V] = new ParMap[K, V] {
+ lazy val filtered = self.filter(kv => p(kv._1))
+ override def foreach[S](f: ((K, V)) => S): Unit = for (kv <- self) if (p(kv._1)) f(kv)
+ def splitter = filtered.splitter
+ override def contains(key: K) = self.contains(key) && p(key)
+ def get(key: K) = if (!p(key)) None else self.get(key)
+ def seq = self.seq.filterKeys(p)
+ def size = filtered.size
+ def + [U >: V](kv: (K, U)): ParMap[K, U] = ParMap[K, U]() ++ this + kv
+ def - (key: K): ParMap[K, V] = ParMap[K, V]() ++ this - key
+ }
+
+ def mapValues[S](f: V => S): ParMap[K, S] = new ParMap[K, S] {
+ override def foreach[Q](g: ((K, S)) => Q): Unit = for ((k, v) <- self) g((k, f(v)))
+ def splitter = self.splitter.map(kv => (kv._1, f(kv._2)))
+ override def size = self.size
+ override def contains(key: K) = self.contains(key)
+ def get(key: K) = self.get(key).map(f)
+ def seq = self.seq.mapValues(f)
+ def + [U >: S](kv: (K, U)): ParMap[K, U] = ParMap[K, U]() ++ this + kv
+ def - (key: K): ParMap[K, S] = ParMap[K, S]() ++ this - key
+ }
+
// note - should not override toMap (could be mutable)
}
diff --git a/src/library/scala/collection/parallel/ParSeq.scala b/src/library/scala/collection/parallel/ParSeq.scala
index eefd0a7..b905d1d 100644
--- a/src/library/scala/collection/parallel/ParSeq.scala
+++ b/src/library/scala/collection/parallel/ParSeq.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/library/scala/collection/parallel/ParSeqLike.scala b/src/library/scala/collection/parallel/ParSeqLike.scala
index 1d4d8a1..da9abfc 100644
--- a/src/library/scala/collection/parallel/ParSeqLike.scala
+++ b/src/library/scala/collection/parallel/ParSeqLike.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -44,38 +44,8 @@ trait ParSeqLike[+T, +Repr <: ParSeq[T], +Sequential <: Seq[T] with SeqLike[T, S
extends scala.collection.GenSeqLike[T, Repr]
with ParIterableLike[T, Repr, Sequential] {
self =>
- import tasksupport._
-
- type SuperParIterator = IterableSplitter[T]
-
- /** An iterator that can be split into arbitrary subsets of iterators.
- * The self-type requirement ensures that the signal context passing behaviour gets mixed in
- * the concrete iterator instance in some concrete collection.
- *
- * '''Note:''' In concrete collection classes, collection implementers might want to override the iterator
- * `reverse2builder` method to ensure higher efficiency.
- */
- trait ParIterator extends SeqSplitter[T] with super.ParIterator {
- me: SignalContextPassingIterator[ParIterator] =>
- def split: Seq[ParIterator]
- def psplit(sizes: Int*): Seq[ParIterator]
- }
-
- /** A stackable modification that ensures signal contexts get passed along the iterators.
- * A self-type requirement in `ParIterator` ensures that this trait gets mixed into
- * concrete iterators.
- */
- trait SignalContextPassingIterator[+IterRepr <: ParIterator]
- extends ParIterator with super.SignalContextPassingIterator[IterRepr] {
- // Note: See explanation in `ParallelIterableLike.this.SignalContextPassingIterator`
- // to understand why we do the cast here, and have a type parameter.
- // Bottomline: avoiding boilerplate and fighting against inability to override stackable modifications.
- abstract override def psplit(sizes: Int*): Seq[IterRepr] = {
- val pits = super.psplit(sizes: _*)
- pits foreach { _.signalDelegate = signalDelegate }
- pits.asInstanceOf[Seq[IterRepr]]
- }
- }
+
+ protected[this] type SuperParIterator = IterableSplitter[T]
/** A more refined version of the iterator found in the `ParallelIterable` trait,
* this iterator can be split into arbitrary subsets of iterators.
@@ -89,14 +59,12 @@ self =>
override def size = length
/** Used to iterate elements using indices */
- protected abstract class Elements(start: Int, val end: Int) extends ParIterator with BufferedIterator[T] {
- me: SignalContextPassingIterator[ParIterator] =>
-
+ protected abstract class Elements(start: Int, val end: Int) extends SeqSplitter[T] with BufferedIterator[T] {
private var i = start
def hasNext = i < end
- def next: T = if (i < end) {
+ def next(): T = if (i < end) {
val x = self(i)
i += 1
x
@@ -106,14 +74,14 @@ self =>
final def remaining = end - i
- def dup = new Elements(i, end) with SignalContextPassingIterator[ParIterator]
+ def dup = new Elements(i, end) {}
def split = psplit(remaining / 2, remaining - remaining / 2)
def psplit(sizes: Int*) = {
val incr = sizes.scanLeft(0)(_ + _)
for ((from, until) <- incr.init zip incr.tail) yield {
- new Elements(start + from, (start + until) min end) with SignalContextPassingIterator[ParIterator]
+ new Elements(start + from, (start + until) min end) {}
}
}
@@ -138,7 +106,7 @@ self =>
val realfrom = if (from < 0) 0 else from
val ctx = new DefaultSignalling with AtomicIndexFlag
ctx.setIndexFlag(Int.MaxValue)
- executeAndWaitResult(new SegmentLength(p, 0, splitter.psplit(realfrom, length - realfrom)(1) assign ctx))._1
+ tasksupport.executeAndWaitResult(new SegmentLength(p, 0, splitter.psplitWithSignalling(realfrom, length - realfrom)(1) assign ctx))._1
}
/** Finds the first element satisfying some predicate.
@@ -156,7 +124,7 @@ self =>
val realfrom = if (from < 0) 0 else from
val ctx = new DefaultSignalling with AtomicIndexFlag
ctx.setIndexFlag(Int.MaxValue)
- executeAndWaitResult(new IndexWhere(p, realfrom, splitter.psplit(realfrom, length - realfrom)(1) assign ctx))
+ tasksupport.executeAndWaitResult(new IndexWhere(p, realfrom, splitter.psplitWithSignalling(realfrom, length - realfrom)(1) assign ctx))
}
/** Finds the last element satisfying some predicate.
@@ -174,22 +142,27 @@ self =>
val until = if (end >= length) length else end + 1
val ctx = new DefaultSignalling with AtomicIndexFlag
ctx.setIndexFlag(Int.MinValue)
- executeAndWaitResult(new LastIndexWhere(p, 0, splitter.psplit(until, length - until)(0) assign ctx))
+ tasksupport.executeAndWaitResult(new LastIndexWhere(p, 0, splitter.psplitWithSignalling(until, length - until)(0) assign ctx))
}
def reverse: Repr = {
- executeAndWaitResult(new Reverse(() => newCombiner, splitter) mapResult { _.result })
+ tasksupport.executeAndWaitResult(new Reverse(() => newCombiner, splitter) mapResult { _.resultWithTaskSupport })
}
- def reverseMap[S, That](f: T => S)(implicit bf: CanBuildFrom[Repr, S, That]): That = bf ifParallel { pbf =>
- executeAndWaitResult(new ReverseMap[S, That](f, pbf, splitter) mapResult { _.result })
- } otherwise seq.reverseMap(f)(bf2seq(bf))
+ def reverseMap[S, That](f: T => S)(implicit bf: CanBuildFrom[Repr, S, That]): That = if (bf(repr).isCombiner) {
+ tasksupport.executeAndWaitResult(
+ new ReverseMap[S, That](f, () => bf(repr).asCombiner, splitter) mapResult { _.resultWithTaskSupport }
+ )
+ } else setTaskSupport(seq.reverseMap(f)(bf2seq(bf)), tasksupport)
+ /*bf ifParallel { pbf =>
+ tasksupport.executeAndWaitResult(new ReverseMap[S, That](f, pbf, splitter) mapResult { _.result })
+ } otherwise seq.reverseMap(f)(bf2seq(bf))*/
/** Tests whether this $coll contains the given sequence at a given index.
*
* $abortsignalling
*
- * @tparam U the element type of `that` parallel sequence
+ * @tparam S the element type of `that` parallel sequence
* @param that the parallel sequence this sequence is being searched for
* @param offset the starting offset for the search
* @return `true` if there is a sequence `that` starting at `offset` in this sequence, `false` otherwise
@@ -200,13 +173,15 @@ self =>
else if (pthat.length > length - offset) false
else {
val ctx = new DefaultSignalling with VolatileAbort
- executeAndWaitResult(new SameElements(splitter.psplit(offset, pthat.length)(1) assign ctx, pthat.splitter))
+ tasksupport.executeAndWaitResult(
+ new SameElements(splitter.psplitWithSignalling(offset, pthat.length)(1) assign ctx, pthat.splitter)
+ )
}
} otherwise seq.startsWith(that, offset)
override def sameElements[U >: T](that: GenIterable[U]): Boolean = that ifParSeq { pthat =>
val ctx = new DefaultSignalling with VolatileAbort
- length == pthat.length && executeAndWaitResult(new SameElements(splitter assign ctx, pthat.splitter))
+ length == pthat.length && tasksupport.executeAndWaitResult(new SameElements(splitter assign ctx, pthat.splitter))
} otherwise seq.sameElements(that)
/** Tests whether this $coll ends with the given parallel sequence.
@@ -223,24 +198,24 @@ self =>
else {
val ctx = new DefaultSignalling with VolatileAbort
val tlen = that.length
- executeAndWaitResult(new SameElements(splitter.psplit(length - tlen, tlen)(1) assign ctx, pthat.splitter))
+ tasksupport.executeAndWaitResult(new SameElements(splitter.psplitWithSignalling(length - tlen, tlen)(1) assign ctx, pthat.splitter))
}
} otherwise seq.endsWith(that)
def patch[U >: T, That](from: Int, patch: GenSeq[U], replaced: Int)(implicit bf: CanBuildFrom[Repr, U, That]): That = {
val realreplaced = replaced min (length - from)
- if (patch.isParSeq && bf.isParallel && (size - realreplaced + patch.size) > MIN_FOR_COPY) {
+ if (patch.isParSeq && bf(repr).isCombiner && (size - realreplaced + patch.size) > MIN_FOR_COPY) {
val that = patch.asParSeq
- val pbf = bf.asParallel
- val pits = splitter.psplit(from, replaced, length - from - realreplaced)
- val copystart = new Copy[U, That](() => pbf(repr), pits(0))
+ val pits = splitter.psplitWithSignalling(from, replaced, length - from - realreplaced)
+ val cfactory = combinerFactory(() => bf(repr).asCombiner)
+ val copystart = new Copy[U, That](cfactory, pits(0))
val copymiddle = wrap {
- val tsk = new that.Copy[U, That](() => pbf(repr), that.splitter)
+ val tsk = new that.Copy[U, That](cfactory, that.splitter)
tasksupport.executeAndWaitResult(tsk)
}
- val copyend = new Copy[U, That](() => pbf(repr), pits(2))
- executeAndWaitResult(((copystart parallel copymiddle) { _ combine _ } parallel copyend) { _ combine _ } mapResult {
- _.result
+ val copyend = new Copy[U, That](cfactory, pits(2))
+ tasksupport.executeAndWaitResult(((copystart parallel copymiddle) { _ combine _ } parallel copyend) { _ combine _ } mapResult {
+ _.resultWithTaskSupport
})
} else patch_sequential(from, patch.seq, replaced)
}
@@ -249,16 +224,23 @@ self =>
val from = 0 max fromarg
val b = bf(repr)
val repl = (r min (length - from)) max 0
- val pits = splitter.psplit(from, repl, length - from - repl)
+ val pits = splitter.psplitWithSignalling(from, repl, length - from - repl)
b ++= pits(0)
b ++= patch
b ++= pits(2)
- b.result
+ setTaskSupport(b.result, tasksupport)
}
- def updated[U >: T, That](index: Int, elem: U)(implicit bf: CanBuildFrom[Repr, U, That]): That = bf ifParallel { pbf =>
- executeAndWaitResult(new Updated(index, elem, pbf, splitter) mapResult { _.result })
- } otherwise seq.updated(index, elem)(bf2seq(bf))
+ def updated[U >: T, That](index: Int, elem: U)(implicit bf: CanBuildFrom[Repr, U, That]): That = if (bf(repr).isCombiner) {
+ tasksupport.executeAndWaitResult(
+ new Updated(index, elem, combinerFactory(() => bf(repr).asCombiner), splitter) mapResult {
+ _.resultWithTaskSupport
+ }
+ )
+ } else setTaskSupport(seq.updated(index, elem)(bf2seq(bf)), tasksupport)
+ /*bf ifParallel { pbf =>
+ tasksupport.executeAndWaitResult(new Updated(index, elem, pbf, splitter) mapResult { _.result })
+ } otherwise seq.updated(index, elem)(bf2seq(bf))*/
def +:[U >: T, That](elem: U)(implicit bf: CanBuildFrom[Repr, U, That]): That = {
patch(0, mutable.ParArray(elem), 0)
@@ -272,10 +254,13 @@ self =>
patch(length, new immutable.Repetition(elem, len - length), 0)
} else patch(length, Nil, 0);
- override def zip[U >: T, S, That](that: GenIterable[S])(implicit bf: CanBuildFrom[Repr, (U, S), That]): That = if (bf.isParallel && that.isParSeq) {
- val pbf = bf.asParallel
+ override def zip[U >: T, S, That](that: GenIterable[S])(implicit bf: CanBuildFrom[Repr, (U, S), That]): That = if (bf(repr).isCombiner && that.isParSeq) {
val thatseq = that.asParSeq
- executeAndWaitResult(new Zip(length min thatseq.length, pbf, splitter, thatseq.splitter) mapResult { _.result });
+ tasksupport.executeAndWaitResult(
+ new Zip(length min thatseq.length, combinerFactory(() => bf(repr).asCombiner), splitter, thatseq.splitter) mapResult {
+ _.resultWithTaskSupport
+ }
+ );
} else super.zip(that)(bf)
/** Tests whether every element of this $coll relates to the
@@ -292,7 +277,7 @@ self =>
*/
def corresponds[S](that: GenSeq[S])(p: (T, S) => Boolean): Boolean = that ifParSeq { pthat =>
val ctx = new DefaultSignalling with VolatileAbort
- length == pthat.length && executeAndWaitResult(new Corresponds(p, splitter assign ctx, pthat.splitter))
+ length == pthat.length && tasksupport.executeAndWaitResult(new Corresponds(p, splitter assign ctx, pthat.splitter))
} otherwise seq.corresponds(that)(p)
def diff[U >: T](that: GenSeq[U]): Repr = sequentially {
@@ -300,23 +285,25 @@ self =>
}
/** Computes the multiset intersection between this $coll and another sequence.
- * $mayNotTerminateInf
*
* @param that the sequence of elements to intersect with.
- * @tparam B the element type of the returned $coll.
- * @tparam That $thatinfo
- * @param bf $bfinfo
+ * @tparam U the element type of `that` parallel sequence
* @return a new collection of type `That` which contains all elements of this $coll
* which also appear in `that`.
* If an element value `x` appears
* ''n'' times in `that`, then the first ''n'' occurrences of `x` will be retained
* in the result, but any following occurrences will be omitted.
+ *
* @usecase def intersect(that: Seq[T]): $Coll[T]
- * @return a new $coll which contains all elements of this $coll
- * which also appear in `that`.
- * If an element value `x` appears
- * ''n'' times in `that`, then the first ''n'' occurrences of `x` will be retained
- * in the result, but any following occurrences will be omitted.
+ * @inheritdoc
+ *
+ * $mayNotTerminateInf
+ *
+ * @return a new $coll which contains all elements of this $coll
+ * which also appear in `that`.
+ * If an element value `x` appears
+ * ''n'' times in `that`, then the first ''n'' occurrences of `x` will be retained
+ * in the result, but any following occurrences will be omitted.
*/
def intersect[U >: T](that: GenSeq[U]) = sequentially {
_ intersect that
@@ -366,7 +353,7 @@ self =>
} else result = (0, false)
protected[this] def newSubtask(p: SuperParIterator) = throw new UnsupportedOperationException
override def split = {
- val pits = pit.split
+ val pits = pit.splitWithSignalling
for ((p, untilp) <- pits zip pits.scanLeft(0)(_ + _.remaining)) yield new SegmentLength(pred, from + untilp, p)
}
override def merge(that: SegmentLength) = if (result._2) result = (result._1 + that.result._1, that.result._2)
@@ -385,7 +372,7 @@ self =>
}
protected[this] def newSubtask(p: SuperParIterator) = unsupported
override def split = {
- val pits = pit.split
+ val pits = pit.splitWithSignalling
for ((p, untilp) <- pits zip pits.scanLeft(from)(_ + _.remaining)) yield new IndexWhere(pred, untilp, p)
}
override def merge(that: IndexWhere) = result = if (result == -1) that.result else {
@@ -406,7 +393,7 @@ self =>
}
protected[this] def newSubtask(p: SuperParIterator) = unsupported
override def split = {
- val pits = pit.split
+ val pits = pit.splitWithSignalling
for ((p, untilp) <- pits zip pits.scanLeft(pos)(_ + _.remaining)) yield new LastIndexWhere(pred, untilp, p)
}
override def merge(that: LastIndexWhere) = result = if (result == -1) that.result else {
@@ -423,15 +410,15 @@ self =>
override def merge(that: Reverse[U, This]) = result = that.result combine result
}
- protected[this] class ReverseMap[S, That](f: T => S, pbf: CanCombineFrom[Repr, S, That], protected[this] val pit: SeqSplitter[T])
+ protected[this] class ReverseMap[S, That](f: T => S, pbf: () => Combiner[S, That], protected[this] val pit: SeqSplitter[T])
extends Transformer[Combiner[S, That], ReverseMap[S, That]] {
@volatile var result: Combiner[S, That] = null
- def leaf(prev: Option[Combiner[S, That]]) = result = pit.reverseMap2combiner(f, pbf(self.repr))
+ def leaf(prev: Option[Combiner[S, That]]) = result = pit.reverseMap2combiner(f, pbf())
protected[this] def newSubtask(p: SuperParIterator) = new ReverseMap(f, pbf, down(p))
override def merge(that: ReverseMap[S, That]) = result = that.result combine result
}
- protected[this] class SameElements[U >: T](protected[this] val pit: SeqSplitter[T], val otherpit: PreciseSplitter[U])
+ protected[this] class SameElements[U >: T](protected[this] val pit: SeqSplitter[T], val otherpit: SeqSplitter[U])
extends Accessor[Boolean, SameElements[U]] {
@volatile var result: Boolean = true
def leaf(prev: Option[Boolean]) = if (!pit.isAborted) {
@@ -442,44 +429,44 @@ self =>
override def split = {
val fp = pit.remaining / 2
val sp = pit.remaining - fp
- for ((p, op) <- pit.psplit(fp, sp) zip otherpit.psplit(fp, sp)) yield new SameElements(p, op)
+ for ((p, op) <- pit.psplitWithSignalling(fp, sp) zip otherpit.psplitWithSignalling(fp, sp)) yield new SameElements(p, op)
}
override def merge(that: SameElements[U]) = result = result && that.result
override def requiresStrictSplitters = true
}
- protected[this] class Updated[U >: T, That](pos: Int, elem: U, pbf: CanCombineFrom[Repr, U, That], protected[this] val pit: SeqSplitter[T])
+ protected[this] class Updated[U >: T, That](pos: Int, elem: U, pbf: CombinerFactory[U, That], protected[this] val pit: SeqSplitter[T])
extends Transformer[Combiner[U, That], Updated[U, That]] {
@volatile var result: Combiner[U, That] = null
- def leaf(prev: Option[Combiner[U, That]]) = result = pit.updated2combiner(pos, elem, pbf(self.repr))
+ def leaf(prev: Option[Combiner[U, That]]) = result = pit.updated2combiner(pos, elem, pbf())
protected[this] def newSubtask(p: SuperParIterator) = unsupported
override def split = {
- val pits = pit.split
+ val pits = pit.splitWithSignalling
for ((p, untilp) <- pits zip pits.scanLeft(0)(_ + _.remaining)) yield new Updated(pos - untilp, elem, pbf, p)
}
override def merge(that: Updated[U, That]) = result = result combine that.result
override def requiresStrictSplitters = true
}
- protected[this] class Zip[U >: T, S, That](len: Int, pbf: CanCombineFrom[Repr, (U, S), That], protected[this] val pit: SeqSplitter[T], val otherpit: SeqSplitter[S])
+ protected[this] class Zip[U >: T, S, That](len: Int, cf: CombinerFactory[(U, S), That], protected[this] val pit: SeqSplitter[T], val otherpit: SeqSplitter[S])
extends Transformer[Combiner[(U, S), That], Zip[U, S, That]] {
@volatile var result: Result = null
- def leaf(prev: Option[Result]) = result = pit.zip2combiner[U, S, That](otherpit, pbf(self.repr))
+ def leaf(prev: Option[Result]) = result = pit.zip2combiner[U, S, That](otherpit, cf())
protected[this] def newSubtask(p: SuperParIterator) = unsupported
override def split = {
val fp = len / 2
val sp = len - len / 2
- val pits = pit.psplit(fp, sp)
- val opits = otherpit.psplit(fp, sp)
+ val pits = pit.psplitWithSignalling(fp, sp)
+ val opits = otherpit.psplitWithSignalling(fp, sp)
Seq(
- new Zip(fp, pbf, pits(0), opits(0)),
- new Zip(sp, pbf, pits(1), opits(1))
+ new Zip(fp, cf, pits(0), opits(0)),
+ new Zip(sp, cf, pits(1), opits(1))
)
}
override def merge(that: Zip[U, S, That]) = result = result combine that.result
}
- protected[this] class Corresponds[S](corr: (T, S) => Boolean, protected[this] val pit: SeqSplitter[T], val otherpit: PreciseSplitter[S])
+ protected[this] class Corresponds[S](corr: (T, S) => Boolean, protected[this] val pit: SeqSplitter[T], val otherpit: SeqSplitter[S])
extends Accessor[Boolean, Corresponds[S]] {
@volatile var result: Boolean = true
def leaf(prev: Option[Boolean]) = if (!pit.isAborted) {
@@ -490,7 +477,7 @@ self =>
override def split = {
val fp = pit.remaining / 2
val sp = pit.remaining - fp
- for ((p, op) <- pit.psplit(fp, sp) zip otherpit.psplit(fp, sp)) yield new Corresponds(corr, p, op)
+ for ((p, op) <- pit.psplitWithSignalling(fp, sp) zip otherpit.psplitWithSignalling(fp, sp)) yield new Corresponds(corr, p, op)
}
override def merge(that: Corresponds[S]) = result = result && that.result
override def requiresStrictSplitters = true
diff --git a/src/library/scala/collection/parallel/ParSeqView.scala b/src/library/scala/collection/parallel/ParSeqView.scala
index a08b9a4..3e3c497 100644
--- a/src/library/scala/collection/parallel/ParSeqView.scala
+++ b/src/library/scala/collection/parallel/ParSeqView.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/library/scala/collection/parallel/ParSeqViewLike.scala b/src/library/scala/collection/parallel/ParSeqViewLike.scala
index 0da6f41..04369d8 100644
--- a/src/library/scala/collection/parallel/ParSeqViewLike.scala
+++ b/src/library/scala/collection/parallel/ParSeqViewLike.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -38,7 +38,6 @@ extends GenSeqView[T, Coll]
with ParSeqLike[T, This, ThisSeq]
{
self =>
- import tasksupport._
trait Transformed[+S] extends ParSeqView[S, Coll, CollSeq]
with super[ParIterableView].Transformed[S] with super[GenSeqViewLike].Transformed[S] {
@@ -92,10 +91,18 @@ self =>
override def seq = self.seq.patch(from, patch, replaced).asInstanceOf[SeqView[U, CollSeq]]
}
+ // !!!
+ //
+ // What is up with this trait and method, why are they here doing
+ // nothing but throwing exceptions, without even being deprecated?
+ // They're not implementing something abstract; why aren't they
+ // just removed?
+ //
// use Patched instead
trait Prepended[U >: T] extends super.Prepended[U] with Transformed[U] {
unsupported
}
+ protected def newPrepended[U >: T](elem: U): Transformed[U] = unsupported
/* wrapper virtual ctors */
@@ -122,7 +129,6 @@ self =>
val patch = _patch;
val replaced = _replaced
} with Patched[U]
- protected def newPrepended[U >: T](elem: U): Transformed[U] = unsupported
/* operation overrides */
@@ -163,7 +169,7 @@ self =>
override def scanRight[S, That](z: S)(op: (T, S) => S)(implicit bf: CanBuildFrom[This, S, That]): That = newForced(thisParSeq.scanRight(z)(op)).asInstanceOf[That]
override def groupBy[K](f: T => K): immutable.ParMap[K, This] = thisParSeq.groupBy(f).map(kv => (kv._1, newForced(kv._2).asInstanceOf[This]))
override def force[U >: T, That](implicit bf: CanBuildFrom[Coll, U, That]) = bf ifParallel { pbf =>
- executeAndWaitResult(new Force(pbf, splitter).mapResult(_.result).asInstanceOf[Task[That, _]])
+ tasksupport.executeAndWaitResult(new Force(pbf, splitter).mapResult(_.result).asInstanceOf[Task[That, _]])
} otherwise {
val b = bf(underlying)
b ++= this.iterator
diff --git a/src/library/scala/collection/parallel/ParSet.scala b/src/library/scala/collection/parallel/ParSet.scala
index 1514334..6e5e9b4 100644
--- a/src/library/scala/collection/parallel/ParSet.scala
+++ b/src/library/scala/collection/parallel/ParSet.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/library/scala/collection/parallel/ParSetLike.scala b/src/library/scala/collection/parallel/ParSetLike.scala
index 3728158..c80b5de 100644
--- a/src/library/scala/collection/parallel/ParSetLike.scala
+++ b/src/library/scala/collection/parallel/ParSetLike.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/library/scala/collection/parallel/PreciseSplitter.scala b/src/library/scala/collection/parallel/PreciseSplitter.scala
index 6a652bb..42563f4 100644
--- a/src/library/scala/collection/parallel/PreciseSplitter.scala
+++ b/src/library/scala/collection/parallel/PreciseSplitter.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/library/scala/collection/parallel/RemainsIterator.scala b/src/library/scala/collection/parallel/RemainsIterator.scala
index e04e0e9..3150b0d 100644
--- a/src/library/scala/collection/parallel/RemainsIterator.scala
+++ b/src/library/scala/collection/parallel/RemainsIterator.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -14,6 +14,7 @@ package scala.collection.parallel
import scala.collection.Parallel
import scala.collection.generic.Signalling
import scala.collection.generic.DelegatedSignalling
+import scala.collection.generic.IdleSignalling
import scala.collection.generic.CanCombineFrom
import scala.collection.mutable.Builder
import scala.collection.Iterator.empty
@@ -27,14 +28,18 @@ private[collection] trait RemainsIterator[+T] extends Iterator[T] {
* This method doesn't change the state of the iterator.
*/
def remaining: Int
+
+ /** For most collections, this is a cheap operation.
+ * Exceptions can override this method.
+ */
+ def isRemainingCheap = true
}
/** Augments iterators with additional methods, mostly transformers,
* assuming they iterate an iterable collection.
*
- * @param T type of the elements iterated.
- * @param IterRepr iterator type.
+ * @tparam T type of the elements iterated.
*/
private[collection] trait AugmentedIterableIterator[+T] extends RemainsIterator[T] {
@@ -111,7 +116,7 @@ private[collection] trait AugmentedIterableIterator[+T] extends RemainsIterator[
def map2combiner[S, That](f: T => S, cb: Combiner[S, That]): Combiner[S, That] = {
//val cb = pbf(repr)
- cb.sizeHint(remaining)
+ if (isRemainingCheap) cb.sizeHint(remaining)
while (hasNext) cb += f(next)
cb
}
@@ -136,7 +141,7 @@ private[collection] trait AugmentedIterableIterator[+T] extends RemainsIterator[
}
def copy2builder[U >: T, Coll, Bld <: Builder[U, Coll]](b: Bld): Bld = {
- b.sizeHint(remaining)
+ if (isRemainingCheap) b.sizeHint(remaining)
while (hasNext) b += next
b
}
@@ -178,14 +183,14 @@ private[collection] trait AugmentedIterableIterator[+T] extends RemainsIterator[
def drop2combiner[U >: T, This](n: Int, cb: Combiner[U, This]): Combiner[U, This] = {
drop(n)
- cb.sizeHint(remaining)
+ if (isRemainingCheap) cb.sizeHint(remaining)
while (hasNext) cb += next
cb
}
def slice2combiner[U >: T, This](from: Int, until: Int, cb: Combiner[U, This]): Combiner[U, This] = {
drop(from)
- var left = math.max(until - from, 0)
+ var left = scala.math.max(until - from, 0)
cb.sizeHint(left)
while (left > 0) {
cb += next
@@ -196,7 +201,7 @@ private[collection] trait AugmentedIterableIterator[+T] extends RemainsIterator[
def splitAt2combiners[U >: T, This](at: Int, before: Combiner[U, This], after: Combiner[U, This]) = {
before.sizeHint(at)
- after.sizeHint(remaining - at)
+ if (isRemainingCheap) after.sizeHint(remaining - at)
var left = at
while (left > 0) {
before += next
@@ -222,7 +227,7 @@ private[collection] trait AugmentedIterableIterator[+T] extends RemainsIterator[
val curr = next
if (p(curr)) before += curr
else {
- after.sizeHint(remaining + 1)
+ if (isRemainingCheap) after.sizeHint(remaining + 1)
after += curr
isBefore = false
}
@@ -262,7 +267,7 @@ private[collection] trait AugmentedIterableIterator[+T] extends RemainsIterator[
}
def zip2combiner[U >: T, S, That](otherpit: RemainsIterator[S], cb: Combiner[(U, S), That]): Combiner[(U, S), That] = {
- cb.sizeHint(remaining min otherpit.remaining)
+ if (isRemainingCheap && otherpit.isRemainingCheap) cb.sizeHint(remaining min otherpit.remaining)
while (hasNext && otherpit.hasNext) {
cb += ((next, otherpit.next))
}
@@ -270,7 +275,7 @@ private[collection] trait AugmentedIterableIterator[+T] extends RemainsIterator[
}
def zipAll2combiner[U >: T, S, That](that: RemainsIterator[S], thiselem: U, thatelem: S, cb: Combiner[(U, S), That]): Combiner[(U, S), That] = {
- cb.sizeHint(remaining max that.remaining)
+ if (isRemainingCheap && that.isRemainingCheap) cb.sizeHint(remaining max that.remaining)
while (this.hasNext && that.hasNext) cb += ((this.next, that.next))
while (this.hasNext) cb += ((this.next, thatelem))
while (that.hasNext) cb += ((thiselem, that.next))
@@ -329,7 +334,7 @@ private[collection] trait AugmentedSeqIterator[+T] extends AugmentedIterableIter
/* transformers */
def reverse2combiner[U >: T, This](cb: Combiner[U, This]): Combiner[U, This] = {
- cb.sizeHint(remaining)
+ if (isRemainingCheap) cb.sizeHint(remaining)
var lst = List[T]()
while (hasNext) lst ::= next
while (lst != Nil) {
@@ -341,7 +346,7 @@ private[collection] trait AugmentedSeqIterator[+T] extends AugmentedIterableIter
def reverseMap2combiner[S, That](f: T => S, cb: Combiner[S, That]): Combiner[S, That] = {
//val cb = cbf(repr)
- cb.sizeHint(remaining)
+ if (isRemainingCheap) cb.sizeHint(remaining)
var lst = List[S]()
while (hasNext) lst ::= f(next)
while (lst != Nil) {
@@ -353,7 +358,7 @@ private[collection] trait AugmentedSeqIterator[+T] extends AugmentedIterableIter
def updated2combiner[U >: T, That](index: Int, elem: U, cb: Combiner[U, That]): Combiner[U, That] = {
//val cb = cbf(repr)
- cb.sizeHint(remaining)
+ if (isRemainingCheap) cb.sizeHint(remaining)
var j = 0
while (hasNext) {
if (j == index) {
@@ -371,7 +376,7 @@ private[collection] trait AugmentedSeqIterator[+T] extends AugmentedIterableIter
/** Parallel iterators allow splitting and provide a `remaining` method to
* obtain the number of elements remaining in the iterator.
*
- * @param T type of the elements iterated.
+ * @tparam T type of the elements iterated.
*/
trait IterableSplitter[+T]
extends AugmentedIterableIterator[T]
@@ -381,11 +386,21 @@ extends AugmentedIterableIterator[T]
{
self =>
+ var signalDelegate: Signalling = IdleSignalling
+
/** Creates a copy of this iterator. */
def dup: IterableSplitter[T]
def split: Seq[IterableSplitter[T]]
+ def splitWithSignalling: Seq[IterableSplitter[T]] = {
+ val pits = split
+ pits foreach { _.signalDelegate = signalDelegate }
+ pits
+ }
+
+ def shouldSplitFurther[S](coll: ParIterable[S], parallelismLevel: Int) = remaining > thresholdFromSize(coll.size, parallelismLevel)
+
/** The number of elements this iterator has yet to traverse. This method
* doesn't change the state of the iterator.
*
@@ -421,7 +436,6 @@ self =>
/* iterator transformers */
class Taken(taken: Int) extends IterableSplitter[T] {
- var signalDelegate = self.signalDelegate
var remaining = taken min self.remaining
def hasNext = remaining > 0
def next = { remaining -= 1; self.next }
@@ -450,7 +464,7 @@ self =>
override def slice(from1: Int, until1: Int): IterableSplitter[T] = newSliceInternal(newTaken(until1), from1)
class Mapped[S](f: T => S) extends IterableSplitter[S] {
- var signalDelegate = self.signalDelegate
+ signalDelegate = self.signalDelegate
def hasNext = self.hasNext
def next = f(self.next)
def remaining = self.remaining
@@ -461,7 +475,7 @@ self =>
override def map[S](f: T => S) = new Mapped(f)
class Appended[U >: T, PI <: IterableSplitter[U]](protected val that: PI) extends IterableSplitter[U] {
- var signalDelegate = self.signalDelegate
+ signalDelegate = self.signalDelegate
protected var curr: IterableSplitter[U] = self
def hasNext = if (curr.hasNext) true else if (curr eq self) {
curr = that
@@ -480,7 +494,7 @@ self =>
def appendParIterable[U >: T, PI <: IterableSplitter[U]](that: PI) = new Appended[U, PI](that)
class Zipped[S](protected val that: SeqSplitter[S]) extends IterableSplitter[(T, S)] {
- var signalDelegate = self.signalDelegate
+ signalDelegate = self.signalDelegate
def hasNext = self.hasNext && that.hasNext
def next = (self.next, that.next)
def remaining = self.remaining min that.remaining
@@ -497,7 +511,7 @@ self =>
class ZippedAll[U >: T, S](protected val that: SeqSplitter[S], protected val thiselem: U, protected val thatelem: S)
extends IterableSplitter[(U, S)] {
- var signalDelegate = self.signalDelegate
+ signalDelegate = self.signalDelegate
def hasNext = self.hasNext || that.hasNext
def next = if (self.hasNext) {
if (that.hasNext) (self.next, that.next)
@@ -522,7 +536,7 @@ self =>
/** Parallel sequence iterators allow splitting into arbitrary subsets.
*
- * @param T type of the elements iterated.
+ * @tparam T type of the elements iterated.
*/
trait SeqSplitter[+T]
extends IterableSplitter[T]
@@ -534,6 +548,18 @@ self =>
def split: Seq[SeqSplitter[T]]
def psplit(sizes: Int*): Seq[SeqSplitter[T]]
+ override def splitWithSignalling: Seq[SeqSplitter[T]] = {
+ val pits = split
+ pits foreach { _.signalDelegate = signalDelegate }
+ pits
+ }
+
+ def psplitWithSignalling(sizes: Int*): Seq[SeqSplitter[T]] = {
+ val pits = psplit(sizes: _*)
+ pits foreach { _.signalDelegate = signalDelegate }
+ pits
+ }
+
/** The number of elements this iterator has yet to traverse. This method
* doesn't change the state of the iterator. Unlike the version of this method in the supertrait,
* method `remaining` in `ParSeqLike.this.ParIterator` must return an exact number
@@ -626,13 +652,13 @@ self =>
def reverse: SeqSplitter[T] = {
val pa = mutable.ParArray.fromTraversables(self).reverse
- new pa.ParArrayIterator with pa.SCPI {
+ new pa.ParArrayIterator {
override def reverse = self
}
}
class Patched[U >: T](from: Int, patch: SeqSplitter[U], replaced: Int) extends SeqSplitter[U] {
- var signalDelegate = self.signalDelegate
+ signalDelegate = self.signalDelegate
private[this] val trio = {
val pits = self.psplit(from, replaced, self.remaining - from - replaced)
(pits(0).appendParSeq[U, SeqSplitter[U]](patch)) appendParSeq pits(2)
diff --git a/src/library/scala/collection/parallel/Splitter.scala b/src/library/scala/collection/parallel/Splitter.scala
index ee10ea7..dc49bcf 100644
--- a/src/library/scala/collection/parallel/Splitter.scala
+++ b/src/library/scala/collection/parallel/Splitter.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/library/scala/collection/parallel/TaskSupport.scala b/src/library/scala/collection/parallel/TaskSupport.scala
index 2080025..9bed5be 100644
--- a/src/library/scala/collection/parallel/TaskSupport.scala
+++ b/src/library/scala/collection/parallel/TaskSupport.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -11,15 +11,83 @@ package scala.collection.parallel
-
-
-
-
+import java.util.concurrent.ThreadPoolExecutor
+import scala.concurrent.forkjoin.ForkJoinPool
+import scala.concurrent.ExecutionContext
+
+
+
+/** A trait implementing the scheduling of
+ * a parallel collection operation.
+ *
+ * Parallel collections are modular in the way operations are scheduled. Each
+ * parallel collection is parametrized with a task support object which is
+ * responsible for scheduling and load-balancing tasks to processors.
+ *
+ * A task support object can be changed in a parallel collection after it has
+ * been created, but only during a quiescent period, i.e. while there are no
+ * concurrent invocations to parallel collection methods.
+ *
+ * There are currently a few task support implementations available for
+ * parallel collections. The [[scala.collection.parallel.ForkJoinTaskSupport]]
+ * uses a fork-join pool
+ * internally and is used by default on JVM 1.6 or greater. The less efficient
+ * [[scala.collection.parallel.ThreadPoolTaskSupport]] is a fallback for JVM
+ * 1.5 and JVMs that do not support the fork join pools. The
+ * [[scala.collection.parallel.ExecutionContextTaskSupport]] uses the
+ * default execution context implementation found in scala.concurrent, and it
+ * reuses the thread pool used in scala.concurrent (this is either a fork join
+ * pool or a thread pool executor, depending on the JVM version). The
+ * execution context task support is set to each parallel collection by
+ * default, so parallel collections reuse the same fork-join pool as the
+ * future API.
+ *
+ * Here is a way to change the task support of a parallel collection:
+ *
+ * {{{
+ * import scala.collection.parallel._
+ * val pc = mutable.ParArray(1, 2, 3)
+ * pc.tasksupport = new ForkJoinTaskSupport(
+ * new scala.concurrent.forkjoin.ForkJoinPool(2))
+ * }}}
+ *
+ * @see [[http://docs.scala-lang.org/overviews/parallel-collections/configuration.html Configuring Parallel Collections]] section
+ * on the parallel collection's guide for more information.
+ */
trait TaskSupport extends Tasks
-private[collection] class ForkJoinTaskSupport extends TaskSupport with AdaptiveWorkStealingForkJoinTasks
-private[collection] class ThreadPoolTaskSupport extends TaskSupport with AdaptiveWorkStealingThreadPoolTasks
+/** A task support that uses a fork join pool to schedule tasks.
+ *
+ * @see [[scala.collection.parallel.TaskSupport]] for more information.
+ */
+class ForkJoinTaskSupport(val environment: ForkJoinPool = ForkJoinTasks.defaultForkJoinPool)
+extends TaskSupport with AdaptiveWorkStealingForkJoinTasks
+
+/** A task support that uses a thread pool executor to schedule tasks.
+ *
+ * @see [[scala.collection.parallel.TaskSupport]] for more information.
+ */
+class ThreadPoolTaskSupport(val environment: ThreadPoolExecutor = ThreadPoolTasks.defaultThreadPool)
+extends TaskSupport with AdaptiveWorkStealingThreadPoolTasks
+
+
+/** A task support that uses an execution context to schedule tasks.
+ *
+ * It can be used with the default execution context implementation in the
+ * `scala.concurrent` package. It internally forwards the call to either a
+ * forkjoin based task support or a thread pool executor one, depending on
+ * what the execution context uses.
+ *
+ * By default, parallel collections are parametrized with this task support
+ * object, so parallel collections share the same execution context backend
+ * as the rest of the `scala.concurrent` package.
+ *
+ * @see [[scala.collection.parallel.TaskSupport]] for more information.
+ */
+class ExecutionContextTaskSupport(val environment: ExecutionContext = scala.concurrent.ExecutionContext.global)
+extends TaskSupport with ExecutionContextTasks
+
diff --git a/src/library/scala/collection/parallel/Tasks.scala b/src/library/scala/collection/parallel/Tasks.scala
index 873291f..cec9e29 100644
--- a/src/library/scala/collection/parallel/Tasks.scala
+++ b/src/library/scala/collection/parallel/Tasks.scala
@@ -1,114 +1,114 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-
package scala.collection.parallel
+import java.util.concurrent.ThreadPoolExecutor
import scala.concurrent.forkjoin._
+import scala.concurrent.ExecutionContext
import scala.util.control.Breaks._
-
-import annotation.unchecked.uncheckedVariance
+import scala.annotation.unchecked.uncheckedVariance
+trait Task[R, +Tp] {
+ type Result = R
-/** A trait that declares task execution capabilities used
- * by parallel collections.
- */
-trait Tasks {
-
- private[parallel] val debugMessages = collection.mutable.ArrayBuffer[String]()
+ def repr = this.asInstanceOf[Tp]
- private[parallel] def debuglog(s: String) = synchronized {
- debugMessages += s
- }
-
- trait Task[R, +Tp] {
- type Result = R
-
- def repr = this.asInstanceOf[Tp]
-
- /** Body of the task - non-divisible unit of work done by this task.
- * Optionally is provided with the result from the previous completed task
- * or `None` if there was no previous task (or the previous task is uncompleted or unknown).
- */
- def leaf(result: Option[R])
+ /** Body of the task - non-divisible unit of work done by this task.
+ * Optionally is provided with the result from the previous completed task
+ * or `None` if there was no previous task (or the previous task is uncompleted or unknown).
+ */
+ def leaf(result: Option[R])
- /** A result that can be accessed once the task is completed. */
- var result: R
+ /** A result that can be accessed once the task is completed. */
+ var result: R
- /** Decides whether or not this task should be split further. */
- def shouldSplitFurther: Boolean
+ /** Decides whether or not this task should be split further. */
+ def shouldSplitFurther: Boolean
- /** Splits this task into a list of smaller tasks. */
- private[parallel] def split: Seq[Task[R, Tp]]
+ /** Splits this task into a list of smaller tasks. */
+ private[parallel] def split: Seq[Task[R, Tp]]
- /** Read of results of `that` task and merge them into results of this one. */
- private[parallel] def merge(that: Tp @uncheckedVariance) {}
+ /** Read of results of `that` task and merge them into results of this one. */
+ private[parallel] def merge(that: Tp @uncheckedVariance) {}
- // exception handling mechanism
- @volatile var throwable: Throwable = null
- def forwardThrowable() = if (throwable != null) throw throwable
+ // exception handling mechanism
+ @volatile var throwable: Throwable = null
+ def forwardThrowable() = if (throwable != null) throw throwable
- // tries to do the leaf computation, storing the possible exception
- private[parallel] def tryLeaf(lastres: Option[R]) {
- try {
- tryBreakable {
- leaf(lastres)
- result = result // ensure that effects of `leaf` are visible to readers of `result`
- } catchBreak {
- signalAbort
- }
- } catch {
- case thr: Exception =>
- result = result // ensure that effects of `leaf` are visible
- throwable = thr
- signalAbort
+ // tries to do the leaf computation, storing the possible exception
+ private[parallel] def tryLeaf(lastres: Option[R]) {
+ try {
+ tryBreakable {
+ leaf(lastres)
+ result = result // ensure that effects of `leaf` are visible to readers of `result`
+ } catchBreak {
+ signalAbort
}
+ } catch {
+ case thr: Exception =>
+ result = result // ensure that effects of `leaf` are visible
+ throwable = thr
+ signalAbort
}
+ }
- private[parallel] def tryMerge(t: Tp @uncheckedVariance) {
- val that = t.asInstanceOf[Task[R, Tp]]
- val local = result // ensure that any effects of modifying `result` are detected
- // checkMerge(that)
- if (this.throwable == null && that.throwable == null) merge(t)
- mergeThrowables(that)
- }
+ private[parallel] def tryMerge(t: Tp @uncheckedVariance) {
+ val that = t.asInstanceOf[Task[R, Tp]]
+ val local = result // ensure that any effects of modifying `result` are detected
+ // checkMerge(that)
+ if (this.throwable == null && that.throwable == null) merge(t)
+ mergeThrowables(that)
+ }
- private def checkMerge(that: Task[R, Tp] @uncheckedVariance) {
- if (this.throwable == null && that.throwable == null && (this.result == null || that.result == null)) {
- println("This: " + this + ", thr=" + this.throwable + "; merged with " + that + ", thr=" + that.throwable)
- } else if (this.throwable != null || that.throwable != null) {
- println("merging this thr: " + this.throwable + " with " + that + ", thr=" + that.throwable)
- }
+ private def checkMerge(that: Task[R, Tp] @uncheckedVariance) {
+ if (this.throwable == null && that.throwable == null && (this.result == null || that.result == null)) {
+ println("This: " + this + ", thr=" + this.throwable + "; merged with " + that + ", thr=" + that.throwable)
+ } else if (this.throwable != null || that.throwable != null) {
+ println("merging this: " + this + " with thr: " + this.throwable + " with " + that + ", thr=" + that.throwable)
}
+ }
- private[parallel] def mergeThrowables(that: Task[_, _]) {
- if (this.throwable != null && that.throwable != null) {
- // merge exceptions, since there were multiple exceptions
- this.throwable = this.throwable alongWith that.throwable
- } else if (that.throwable != null) this.throwable = that.throwable
+ private[parallel] def mergeThrowables(that: Task[_, _]) {
+ if (this.throwable != null && that.throwable != null) {
+ // merge exceptions, since there were multiple exceptions
+ this.throwable = this.throwable alongWith that.throwable
+ } else if (that.throwable != null) this.throwable = that.throwable
else this.throwable = this.throwable
- }
+ }
+
+ // override in concrete task implementations to signal abort to other tasks
+ private[parallel] def signalAbort() {}
+}
+
- // override in concrete task implementations to signal abort to other tasks
- private[parallel] def signalAbort() {}
+/** A trait that declares task execution capabilities used
+ * by parallel collections.
+ */
+trait Tasks {
+
+ private[parallel] val debugMessages = scala.collection.mutable.ArrayBuffer[String]()
+
+ private[parallel] def debuglog(s: String) = synchronized {
+ debugMessages += s
}
- trait TaskImpl[R, +Tp] {
+ trait WrappedTask[R, +Tp] {
/** the body of this task - what it executes, how it gets split and how results are merged. */
val body: Task[R, Tp]
- def split: Seq[TaskImpl[R, Tp]]
+ def split: Seq[WrappedTask[R, Tp]]
/** Code that gets called after the task gets started - it may spawn other tasks instead of calling `leaf`. */
def compute()
/** Start task. */
@@ -118,7 +118,7 @@ trait Tasks {
/** Try to cancel the task.
* @return `true` if cancellation is successful.
*/
- def tryCancel: Boolean
+ def tryCancel(): Boolean
/** If the task has been cancelled successfully, those syncing on it may
* automatically be notified, depending on the implementation. If they
* aren't, this release method should be called after processing the
@@ -129,13 +129,10 @@ trait Tasks {
def release() {}
}
- protected def newTaskImpl[R, Tp](b: Task[R, Tp]): TaskImpl[R, Tp]
-
/* task control */
- // safe to assume it will always have the same type,
- // because the `tasksupport` in parallel iterable is final
- var environment: AnyRef
+ /** The type of the environment is more specific in the implementations. */
+ val environment: AnyRef
/** Executes a task and returns a future. Forwards an exception if some task threw it. */
def execute[R, Tp](fjtask: Task[R, Tp]): () => R
@@ -155,18 +152,25 @@ trait Tasks {
*/
trait AdaptiveWorkStealingTasks extends Tasks {
- trait TaskImpl[R, Tp] extends super.TaskImpl[R, Tp] {
- @volatile var next: TaskImpl[R, Tp] = null
+ trait WrappedTask[R, Tp] extends super.WrappedTask[R, Tp] {
+ @volatile var next: WrappedTask[R, Tp] = null
@volatile var shouldWaitFor = true
- def split: Seq[TaskImpl[R, Tp]]
+ def split: Seq[WrappedTask[R, Tp]]
- def compute() = if (body.shouldSplitFurther) internal else body.tryLeaf(None)
+ def compute() = if (body.shouldSplitFurther) {
+ internal()
+ release()
+ } else {
+ body.tryLeaf(None)
+ release()
+ }
def internal() = {
var last = spawnSubtasks()
last.body.tryLeaf(None)
+ last.release()
body.result = last.body.result
body.throwable = last.body.throwable
@@ -174,13 +178,13 @@ trait AdaptiveWorkStealingTasks extends Tasks {
// val lastresult = Option(last.body.result)
val beforelast = last
last = last.next
- if (last.tryCancel) {
+ if (last.tryCancel()) {
// println("Done with " + beforelast.body + ", next direct is " + last.body)
last.body.tryLeaf(Some(body.result))
- last.release
+ last.release()
} else {
// println("Done with " + beforelast.body + ", next sync is " + last.body)
- last.sync
+ last.sync()
}
// println("Merging " + body + " with " + last.body)
body.tryMerge(last.body.repr)
@@ -188,15 +192,15 @@ trait AdaptiveWorkStealingTasks extends Tasks {
}
def spawnSubtasks() = {
- var last: TaskImpl[R, Tp] = null
- var head: TaskImpl[R, Tp] = this
+ var last: WrappedTask[R, Tp] = null
+ var head: WrappedTask[R, Tp] = this
do {
val subtasks = head.split
head = subtasks.head
for (t <- subtasks.tail.reverse) {
t.next = last
last = t
- t.start
+ t.start()
}
} while (head.body.shouldSplitFurther);
head.next = last
@@ -215,7 +219,7 @@ trait AdaptiveWorkStealingTasks extends Tasks {
}
// specialize ctor
- protected def newTaskImpl[R, Tp](b: Task[R, Tp]): TaskImpl[R, Tp]
+ protected def newWrappedTask[R, Tp](b: Task[R, Tp]): WrappedTask[R, Tp]
}
@@ -224,7 +228,7 @@ trait AdaptiveWorkStealingTasks extends Tasks {
trait ThreadPoolTasks extends Tasks {
import java.util.concurrent._
- trait TaskImpl[R, +Tp] extends Runnable with super.TaskImpl[R, Tp] {
+ trait WrappedTask[R, +Tp] extends Runnable with super.WrappedTask[R, Tp] {
// initially, this is null
// once the task is started, this future is set and used for `sync`
// utb: var future: Future[_] = null
@@ -235,7 +239,7 @@ trait ThreadPoolTasks extends Tasks {
// debuglog("Starting " + body)
// utb: future = executor.submit(this)
executor.synchronized {
- incrTasks
+ incrTasks()
executor.submit(this)
}
}
@@ -249,9 +253,9 @@ trait ThreadPoolTasks extends Tasks {
//assert(executor.getCorePoolSize == (coresize + 1))
}
}
- if (!completed) this.wait
+ while (!completed) this.wait
}
- def tryCancel = synchronized {
+ def tryCancel() = synchronized {
// utb: future.cancel(false)
if (!owned) {
// debuglog("Cancelling " + body)
@@ -259,7 +263,7 @@ trait ThreadPoolTasks extends Tasks {
true
} else false
}
- def run = {
+ def run() = {
// utb: compute
var isOkToRun = false
synchronized {
@@ -270,25 +274,25 @@ trait ThreadPoolTasks extends Tasks {
}
if (isOkToRun) {
// debuglog("Running body of " + body)
- compute
- release
+ compute()
} else {
// just skip
// debuglog("skipping body of " + body)
}
}
- override def release = synchronized {
+ override def release() = synchronized {
+ //println("releasing: " + this + ", body: " + this.body)
completed = true
executor.synchronized {
- decrTasks
+ decrTasks()
}
this.notifyAll
}
}
- protected def newTaskImpl[R, Tp](b: Task[R, Tp]): TaskImpl[R, Tp]
+ protected def newWrappedTask[R, Tp](b: Task[R, Tp]): WrappedTask[R, Tp]
- var environment: AnyRef = ThreadPoolTasks.defaultThreadPool
+ val environment: ThreadPoolExecutor
def executor = environment.asInstanceOf[ThreadPoolExecutor]
def queue = executor.getQueue.asInstanceOf[LinkedBlockingQueue[Runnable]]
@volatile var totaltasks = 0
@@ -302,25 +306,25 @@ trait ThreadPoolTasks extends Tasks {
}
def execute[R, Tp](task: Task[R, Tp]): () => R = {
- val t = newTaskImpl(task)
+ val t = newWrappedTask(task)
// debuglog("-----------> Executing without wait: " + task)
- t.start
+ t.start()
() => {
- t.sync
+ t.sync()
t.body.forwardThrowable
t.body.result
}
}
def executeAndWaitResult[R, Tp](task: Task[R, Tp]): R = {
- val t = newTaskImpl(task)
+ val t = newWrappedTask(task)
// debuglog("-----------> Executing with wait: " + task)
- t.start
+ t.start()
- t.sync
+ t.sync()
t.body.forwardThrowable
t.body.result
}
@@ -355,10 +359,11 @@ object ThreadPoolTasks {
/** An implementation of tasks objects based on the Java thread pooling API and synchronization using futures. */
+ at deprecated("This implementation is not used.", "2.10.0")
trait FutureThreadPoolTasks extends Tasks {
import java.util.concurrent._
- trait TaskImpl[R, +Tp] extends Runnable with super.TaskImpl[R, Tp] {
+ trait WrappedTask[R, +Tp] extends Runnable with super.WrappedTask[R, Tp] {
@volatile var future: Future[_] = null
def start() = {
@@ -369,17 +374,17 @@ trait FutureThreadPoolTasks extends Tasks {
def sync() = future.get
def tryCancel = false
def run = {
- compute
+ compute()
}
}
- protected def newTaskImpl[R, Tp](b: Task[R, Tp]): TaskImpl[R, Tp]
+ protected def newWrappedTask[R, Tp](b: Task[R, Tp]): WrappedTask[R, Tp]
- var environment: AnyRef = FutureThreadPoolTasks.defaultThreadPool
+ val environment: AnyRef = FutureThreadPoolTasks.defaultThreadPool
def executor = environment.asInstanceOf[ThreadPoolExecutor]
def execute[R, Tp](task: Task[R, Tp]): () => R = {
- val t = newTaskImpl(task)
+ val t = newWrappedTask(task)
// debuglog("-----------> Executing without wait: " + task)
t.start
@@ -392,7 +397,7 @@ trait FutureThreadPoolTasks extends Tasks {
}
def executeAndWaitResult[R, Tp](task: Task[R, Tp]): R = {
- val t = newTaskImpl(task)
+ val t = newWrappedTask(task)
// debuglog("-----------> Executing with wait: " + task)
t.start
@@ -434,26 +439,26 @@ trait HavingForkJoinPool {
*/
trait ForkJoinTasks extends Tasks with HavingForkJoinPool {
- trait TaskImpl[R, +Tp] extends RecursiveAction with super.TaskImpl[R, Tp] {
+ trait WrappedTask[R, +Tp] extends RecursiveAction with super.WrappedTask[R, Tp] {
def start() = fork
def sync() = join
def tryCancel = tryUnfork
}
// specialize ctor
- protected def newTaskImpl[R, Tp](b: Task[R, Tp]): TaskImpl[R, Tp]
+ protected def newWrappedTask[R, Tp](b: Task[R, Tp]): WrappedTask[R, Tp]
/** The fork/join pool of this collection.
*/
def forkJoinPool: ForkJoinPool = environment.asInstanceOf[ForkJoinPool]
- var environment: AnyRef = ForkJoinTasks.defaultForkJoinPool
+ val environment: ForkJoinPool
/** Executes a task and does not wait for it to finish - instead returns a future.
*
* $fjdispatch
*/
def execute[R, Tp](task: Task[R, Tp]): () => R = {
- val fjtask = newTaskImpl(task)
+ val fjtask = newWrappedTask(task)
if (Thread.currentThread.isInstanceOf[ForkJoinWorkerThread]) {
fjtask.fork
@@ -476,7 +481,7 @@ trait ForkJoinTasks extends Tasks with HavingForkJoinPool {
* @return the result of the task
*/
def executeAndWaitResult[R, Tp](task: Task[R, Tp]): R = {
- val fjtask = newTaskImpl(task)
+ val fjtask = newWrappedTask(task)
if (Thread.currentThread.isInstanceOf[ForkJoinWorkerThread]) {
fjtask.fork
@@ -506,24 +511,49 @@ object ForkJoinTasks {
*/
trait AdaptiveWorkStealingForkJoinTasks extends ForkJoinTasks with AdaptiveWorkStealingTasks {
- class TaskImpl[R, Tp](val body: Task[R, Tp])
- extends super[ForkJoinTasks].TaskImpl[R, Tp] with super[AdaptiveWorkStealingTasks].TaskImpl[R, Tp] {
- def split = body.split.map(b => newTaskImpl(b))
+ class WrappedTask[R, Tp](val body: Task[R, Tp])
+ extends super[ForkJoinTasks].WrappedTask[R, Tp] with super[AdaptiveWorkStealingTasks].WrappedTask[R, Tp] {
+ def split = body.split.map(b => newWrappedTask(b))
}
- def newTaskImpl[R, Tp](b: Task[R, Tp]) = new TaskImpl[R, Tp](b)
+ def newWrappedTask[R, Tp](b: Task[R, Tp]) = new WrappedTask[R, Tp](b)
}
trait AdaptiveWorkStealingThreadPoolTasks extends ThreadPoolTasks with AdaptiveWorkStealingTasks {
- class TaskImpl[R, Tp](val body: Task[R, Tp])
- extends super[ThreadPoolTasks].TaskImpl[R, Tp] with super[AdaptiveWorkStealingTasks].TaskImpl[R, Tp] {
- def split = body.split.map(b => newTaskImpl(b))
+ class WrappedTask[R, Tp](val body: Task[R, Tp])
+ extends super[ThreadPoolTasks].WrappedTask[R, Tp] with super[AdaptiveWorkStealingTasks].WrappedTask[R, Tp] {
+ def split = body.split.map(b => newWrappedTask(b))
+ }
+
+ def newWrappedTask[R, Tp](b: Task[R, Tp]) = new WrappedTask[R, Tp](b)
+
+}
+
+
+trait ExecutionContextTasks extends Tasks {
+
+ def executionContext = environment
+
+ val environment: ExecutionContext
+
+ // this part is a hack which allows switching
+ val driver: Tasks = executionContext match {
+ case eci: scala.concurrent.impl.ExecutionContextImpl => eci.executor match {
+ case fjp: ForkJoinPool => new ForkJoinTaskSupport(fjp)
+ case tpe: ThreadPoolExecutor => new ThreadPoolTaskSupport(tpe)
+ case _ => ???
+ }
+ case _ => ???
}
- def newTaskImpl[R, Tp](b: Task[R, Tp]) = new TaskImpl[R, Tp](b)
+ def execute[R, Tp](task: Task[R, Tp]): () => R = driver execute task
+
+ def executeAndWaitResult[R, Tp](task: Task[R, Tp]): R = driver executeAndWaitResult task
+
+ def parallelismLevel = driver.parallelismLevel
}
@@ -534,3 +564,6 @@ trait AdaptiveWorkStealingThreadPoolTasks extends ThreadPoolTasks with AdaptiveW
+
+
+
diff --git a/src/library/scala/collection/parallel/immutable/ParHashMap.scala b/src/library/scala/collection/parallel/immutable/ParHashMap.scala
index 85084a9..b25230b 100644
--- a/src/library/scala/collection/parallel/immutable/ParHashMap.scala
+++ b/src/library/scala/collection/parallel/immutable/ParHashMap.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -8,6 +8,8 @@
package scala.collection.parallel.immutable
+
+
import scala.collection.parallel.ParMapLike
import scala.collection.parallel.Combiner
import scala.collection.parallel.IterableSplitter
@@ -18,7 +20,10 @@ import scala.collection.generic.CanCombineFrom
import scala.collection.generic.GenericParMapTemplate
import scala.collection.generic.GenericParMapCompanion
import scala.collection.immutable.{ HashMap, TrieIterator }
-import annotation.unchecked.uncheckedVariance
+import scala.annotation.unchecked.uncheckedVariance
+import scala.collection.parallel.Task
+
+
/** Immutable parallel hash map, based on hash tries.
*
@@ -31,8 +36,10 @@ import annotation.unchecked.uncheckedVariance
*
* @author Aleksandar Prokopec
* @since 2.9
- *
- * @define Coll immutable.ParHashMap
+ * @see [[http://docs.scala-lang.org/overviews/parallel-collections/concrete-parallel-collections.html#parallel_hash_tries Scala's Parallel Collections Library overview]]
+ * section on Parallel Hash Tries for more information.
+ *
+ * @define Coll `immutable.ParHashMap`
* @define coll immutable parallel hash map
*/
@SerialVersionUID(1L)
@@ -52,7 +59,7 @@ self =>
protected[this] override def newCombiner = HashMapCombiner[K, V]
- def splitter: IterableSplitter[(K, V)] = new ParHashMapIterator(trie.iterator, trie.size) with SCPI
+ def splitter: IterableSplitter[(K, V)] = new ParHashMapIterator(trie.iterator, trie.size)
override def seq = trie
@@ -69,11 +76,8 @@ self =>
case None => newc
}
- type SCPI = SignalContextPassingIterator[ParHashMapIterator]
-
class ParHashMapIterator(var triter: Iterator[(K, V @uncheckedVariance)], val sz: Int)
- extends super.ParIterator {
- self: SignalContextPassingIterator[ParHashMapIterator] =>
+ extends IterableSplitter[(K, V)] {
var i = 0
def dup = triter match {
case t: TrieIterator[_] =>
@@ -84,26 +88,26 @@ self =>
dupFromIterator(buff.iterator)
}
private def dupFromIterator(it: Iterator[(K, V @uncheckedVariance)]) = {
- val phit = new ParHashMapIterator(it, sz) with SCPI
+ val phit = new ParHashMapIterator(it, sz)
phit.i = i
phit
}
- def split: Seq[ParIterator] = if (remaining < 2) Seq(this) else triter match {
+ def split: Seq[IterableSplitter[(K, V)]] = if (remaining < 2) Seq(this) else triter match {
case t: TrieIterator[_] =>
val previousRemaining = remaining
val ((fst, fstlength), snd) = t.split
val sndlength = previousRemaining - fstlength
Seq(
- new ParHashMapIterator(fst, fstlength) with SCPI,
- new ParHashMapIterator(snd, sndlength) with SCPI
+ new ParHashMapIterator(fst, fstlength),
+ new ParHashMapIterator(snd, sndlength)
)
case _ =>
// iterator of the collision map case
val buff = triter.toBuffer
val (fp, sp) = buff.splitAt(buff.length / 2)
- Seq(fp, sp) map { b => new ParHashMapIterator(b.iterator, b.length) with SCPI }
+ Seq(fp, sp) map { b => new ParHashMapIterator(b.iterator, b.length) }
}
- def next: (K, V) = {
+ def next(): (K, V) = {
i += 1
val r = triter.next
r
@@ -115,6 +119,8 @@ self =>
override def toString = "HashTrieIterator(" + sz + ")"
}
+ /* debug */
+
private[parallel] def printDebugInfo() {
println("Parallel hash trie")
println("Top level inner trie type: " + trie.getClass)
@@ -134,7 +140,7 @@ self =>
/** $factoryInfo
- * @define Coll immutable.ParHashMap
+ * @define Coll `immutable.ParHashMap`
* @define coll immutable parallel hash map
*/
object ParHashMap extends ParMapFactory[ParHashMap] {
@@ -153,10 +159,9 @@ object ParHashMap extends ParMapFactory[ParHashMap] {
private[parallel] abstract class HashMapCombiner[K, V]
-extends collection.parallel.BucketCombiner[(K, V), ParHashMap[K, V], (K, V), HashMapCombiner[K, V]](HashMapCombiner.rootsize) {
+extends scala.collection.parallel.BucketCombiner[(K, V), ParHashMap[K, V], (K, V), HashMapCombiner[K, V]](HashMapCombiner.rootsize) {
//self: EnvironmentPassingCombiner[(K, V), ParHashMap[K, V]] =>
import HashMapCombiner._
- import collection.parallel.tasksupport._
val emptyTrie = HashMap.empty[K, V]
def +=(elem: (K, V)) = {
@@ -176,7 +181,7 @@ extends collection.parallel.BucketCombiner[(K, V), ParHashMap[K, V], (K, V), Has
val bucks = buckets.filter(_ != null).map(_.headPtr)
val root = new Array[HashMap[K, V]](bucks.length)
- executeAndWaitResult(new CreateTrie(bucks, root, 0, bucks.length))
+ combinerTaskSupport.executeAndWaitResult(new CreateTrie(bucks, root, 0, bucks.length))
var bitmap = 0
var i = 0
@@ -198,7 +203,7 @@ extends collection.parallel.BucketCombiner[(K, V), ParHashMap[K, V], (K, V), Has
val bucks = buckets.filter(_ != null).map(_.headPtr)
val root = new Array[HashMap[K, AnyRef]](bucks.length)
- executeAndWaitResult(new CreateGroupedTrie(cbf, bucks, root, 0, bucks.length))
+ combinerTaskSupport.executeAndWaitResult(new CreateGroupedTrie(cbf, bucks, root, 0, bucks.length))
var bitmap = 0
var i = 0
@@ -259,7 +264,7 @@ extends collection.parallel.BucketCombiner[(K, V), ParHashMap[K, V], (K, V), Has
val fp = howmany / 2
List(new CreateTrie(bucks, root, offset, fp), new CreateTrie(bucks, root, offset + fp, howmany - fp))
}
- def shouldSplitFurther = howmany > collection.parallel.thresholdFromSize(root.length, parallelismLevel)
+ def shouldSplitFurther = howmany > scala.collection.parallel.thresholdFromSize(root.length, combinerTaskSupport.parallelismLevel)
}
class CreateGroupedTrie[Repr](cbf: () => Combiner[V, Repr], bucks: Array[Unrolled[(K, V)]], root: Array[HashMap[K, AnyRef]], offset: Int, howmany: Int)
@@ -301,23 +306,29 @@ extends collection.parallel.BucketCombiner[(K, V), ParHashMap[K, V], (K, V), Has
unrolled = unrolled.next
}
- evaluateCombiners(trie)
- trie.asInstanceOf[HashMap[K, Repr]]
+ evaluateCombiners(trie).asInstanceOf[HashMap[K, Repr]]
}
- private def evaluateCombiners(trie: HashMap[K, Combiner[V, Repr]]): Unit = trie match {
+ private def evaluateCombiners(trie: HashMap[K, Combiner[V, Repr]]): HashMap[K, Repr] = trie match {
case hm1: HashMap.HashMap1[_, _] =>
- hm1.asInstanceOf[HashMap.HashMap1[K, Repr]].value = hm1.value.result
- hm1.kv = null
+ val evaledvalue = hm1.value.result
+ new HashMap.HashMap1[K, Repr](hm1.key, hm1.hash, evaledvalue, null)
case hmc: HashMap.HashMapCollision1[_, _] =>
- hmc.asInstanceOf[HashMap.HashMapCollision1[K, Repr]].kvs = hmc.kvs map { p => (p._1, p._2.result) }
- case htm: HashMap.HashTrieMap[_, _] =>
- for (hm <- htm.elems) evaluateCombiners(hm)
+ val evaledkvs = hmc.kvs map { p => (p._1, p._2.result) }
+ new HashMap.HashMapCollision1[K, Repr](hmc.hash, evaledkvs)
+ case htm: HashMap.HashTrieMap[k, v] =>
+ var i = 0
+ while (i < htm.elems.length) {
+ htm.elems(i) = evaluateCombiners(htm.elems(i)).asInstanceOf[HashMap[k, v]]
+ i += 1
+ }
+ htm.asInstanceOf[HashMap[K, Repr]]
+ case empty => empty.asInstanceOf[HashMap[K, Repr]]
}
def split = {
val fp = howmany / 2
List(new CreateGroupedTrie(cbf, bucks, root, offset, fp), new CreateGroupedTrie(cbf, bucks, root, offset + fp, howmany - fp))
}
- def shouldSplitFurther = howmany > collection.parallel.thresholdFromSize(root.length, parallelismLevel)
+ def shouldSplitFurther = howmany > scala.collection.parallel.thresholdFromSize(root.length, combinerTaskSupport.parallelismLevel)
}
}
diff --git a/src/library/scala/collection/parallel/immutable/ParHashSet.scala b/src/library/scala/collection/parallel/immutable/ParHashSet.scala
index e3c408e..e7e64eb 100644
--- a/src/library/scala/collection/parallel/immutable/ParHashSet.scala
+++ b/src/library/scala/collection/parallel/immutable/ParHashSet.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -8,6 +8,8 @@
package scala.collection.parallel.immutable
+
+
import scala.collection.parallel.ParSetLike
import scala.collection.parallel.Combiner
import scala.collection.parallel.IterableSplitter
@@ -19,6 +21,9 @@ import scala.collection.generic.GenericParTemplate
import scala.collection.generic.GenericParCompanion
import scala.collection.generic.GenericCompanion
import scala.collection.immutable.{ HashSet, TrieIterator }
+import scala.collection.parallel.Task
+
+
/** Immutable parallel hash set, based on hash tries.
*
@@ -30,8 +35,10 @@ import scala.collection.immutable.{ HashSet, TrieIterator }
*
* @author Aleksandar Prokopec
* @since 2.9
+ * @see [[http://docs.scala-lang.org/overviews/parallel-collections/concrete-parallel-collections.html#parallel_hash_tries Scala's Parallel Collections Library overview]]
+ * section on Parallel Hash Tries for more information.
*
- * @define Coll immutable.ParHashSet
+ * @define Coll `immutable.ParHashSet`
* @define coll immutable parallel hash set
*/
@SerialVersionUID(1L)
@@ -49,7 +56,7 @@ self =>
override def empty: ParHashSet[T] = new ParHashSet[T]
- def splitter: IterableSplitter[T] = new ParHashSetIterator(trie.iterator, trie.size) with SCPI
+ def splitter: IterableSplitter[T] = new ParHashSetIterator(trie.iterator, trie.size)
override def seq = trie
@@ -66,11 +73,8 @@ self =>
case None => newc
}
- type SCPI = SignalContextPassingIterator[ParHashSetIterator]
-
class ParHashSetIterator(var triter: Iterator[T], val sz: Int)
- extends super.ParIterator {
- self: SignalContextPassingIterator[ParHashSetIterator] =>
+ extends IterableSplitter[T] {
var i = 0
def dup = triter match {
case t: TrieIterator[_] =>
@@ -81,26 +85,26 @@ self =>
dupFromIterator(buff.iterator)
}
private def dupFromIterator(it: Iterator[T]) = {
- val phit = new ParHashSetIterator(it, sz) with SCPI
+ val phit = new ParHashSetIterator(it, sz)
phit.i = i
phit
}
- def split: Seq[ParIterator] = if (remaining < 2) Seq(this) else triter match {
+ def split: Seq[IterableSplitter[T]] = if (remaining < 2) Seq(this) else triter match {
case t: TrieIterator[_] =>
val previousRemaining = remaining
val ((fst, fstlength), snd) = t.split
val sndlength = previousRemaining - fstlength
Seq(
- new ParHashSetIterator(fst, fstlength) with SCPI,
- new ParHashSetIterator(snd, sndlength) with SCPI
+ new ParHashSetIterator(fst, fstlength),
+ new ParHashSetIterator(snd, sndlength)
)
case _ =>
// iterator of the collision map case
val buff = triter.toBuffer
val (fp, sp) = buff.splitAt(buff.length / 2)
- Seq(fp, sp) map { b => new ParHashSetIterator(b.iterator, b.length) with SCPI }
+ Seq(fp, sp) map { b => new ParHashSetIterator(b.iterator, b.length) }
}
- def next: T = {
+ def next(): T = {
i += 1
triter.next
}
@@ -114,7 +118,7 @@ self =>
/** $factoryInfo
- * @define Coll immutable.ParHashSet
+ * @define Coll `immutable.ParHashSet`
* @define coll immutable parallel hash set
*/
object ParHashSet extends ParSetFactory[ParHashSet] {
@@ -128,10 +132,9 @@ object ParHashSet extends ParSetFactory[ParHashSet] {
private[immutable] abstract class HashSetCombiner[T]
-extends collection.parallel.BucketCombiner[T, ParHashSet[T], Any, HashSetCombiner[T]](HashSetCombiner.rootsize) {
+extends scala.collection.parallel.BucketCombiner[T, ParHashSet[T], Any, HashSetCombiner[T]](HashSetCombiner.rootsize) {
//self: EnvironmentPassingCombiner[T, ParHashSet[T]] =>
import HashSetCombiner._
- import collection.parallel.tasksupport._
val emptyTrie = HashSet.empty[T]
def +=(elem: T) = {
@@ -151,7 +154,7 @@ extends collection.parallel.BucketCombiner[T, ParHashSet[T], Any, HashSetCombine
val bucks = buckets.filter(_ != null).map(_.headPtr)
val root = new Array[HashSet[T]](bucks.length)
- executeAndWaitResult(new CreateTrie(bucks, root, 0, bucks.length))
+ combinerTaskSupport.executeAndWaitResult(new CreateTrie(bucks, root, 0, bucks.length))
var bitmap = 0
var i = 0
@@ -206,9 +209,8 @@ extends collection.parallel.BucketCombiner[T, ParHashSet[T], Any, HashSetCombine
val fp = howmany / 2
List(new CreateTrie(bucks, root, offset, fp), new CreateTrie(bucks, root, offset + fp, howmany - fp))
}
- def shouldSplitFurther = howmany > collection.parallel.thresholdFromSize(root.length, parallelismLevel)
+ def shouldSplitFurther = howmany > scala.collection.parallel.thresholdFromSize(root.length, combinerTaskSupport.parallelismLevel)
}
-
}
@@ -218,45 +220,3 @@ object HashSetCombiner {
private[immutable] val rootbits = 5
private[immutable] val rootsize = 1 << 5
}
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/src/library/scala/collection/parallel/immutable/ParIterable.scala b/src/library/scala/collection/parallel/immutable/ParIterable.scala
index d8c42d7..142f07f 100644
--- a/src/library/scala/collection/parallel/immutable/ParIterable.scala
+++ b/src/library/scala/collection/parallel/immutable/ParIterable.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -30,10 +30,11 @@ import scala.collection.GenIterable
* @since 2.9
*/
trait ParIterable[+T]
-extends collection/*.immutable*/.GenIterable[T]
- with collection.parallel.ParIterable[T]
+extends scala.collection/*.immutable*/.GenIterable[T]
+ with scala.collection.parallel.ParIterable[T]
with GenericParTemplate[T, ParIterable]
- with ParIterableLike[T, ParIterable[T], collection.immutable.Iterable[T]]
+ with ParIterableLike[T, ParIterable[T], scala.collection.immutable.Iterable[T]]
+ with Immutable
{
override def companion: GenericCompanion[ParIterable] with GenericParCompanion[ParIterable] = ParIterable
diff --git a/src/library/scala/collection/parallel/immutable/ParMap.scala b/src/library/scala/collection/parallel/immutable/ParMap.scala
index a44a8c9..e904a76 100644
--- a/src/library/scala/collection/parallel/immutable/ParMap.scala
+++ b/src/library/scala/collection/parallel/immutable/ParMap.scala
@@ -1,20 +1,14 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-
package scala.collection
package parallel.immutable
-
-
-
-
-import scala.collection.immutable.Map
import scala.collection.generic.ParMapFactory
import scala.collection.generic.GenericParMapTemplate
import scala.collection.generic.GenericParMapCompanion
@@ -23,10 +17,6 @@ import scala.collection.parallel.ParMapLike
import scala.collection.parallel.Combiner
import scala.collection.GenMapLike
-
-
-
-
/** A template trait for immutable parallel maps.
*
* $sideeffects
@@ -38,11 +28,11 @@ import scala.collection.GenMapLike
* @since 2.9
*/
trait ParMap[K, +V]
-extends collection/*.immutable*/.GenMap[K, V]
+extends scala.collection/*.immutable*/.GenMap[K, V]
with GenericParMapTemplate[K, V, ParMap]
with parallel.ParMap[K, V]
with ParIterable[(K, V)]
- with ParMapLike[K, V, ParMap[K, V], Map[K, V]]
+ with ParMapLike[K, V, ParMap[K, V], scala.collection.immutable.Map[K, V]]
{
self =>
@@ -53,6 +43,30 @@ self =>
override def stringPrefix = "ParMap"
override def toMap[P, Q](implicit ev: (K, V) <:< (P, Q)): ParMap[P, Q] = this.asInstanceOf[ParMap[P, Q]]
+
+ override def updated [U >: V](key: K, value: U): ParMap[K, U] = this + ((key, value))
+
+ def + [U >: V](kv: (K, U)): ParMap[K, U]
+
+ /** The same map with a given default function.
+ * Note: `get`, `contains`, `iterator`, `keys`, etc are not affected by `withDefault`.
+ *
+ * Invoking transformer methods (e.g. `map`) will not preserve the default value.
+ *
+ * @param d the function mapping keys to values, used for non-present keys
+ * @return a wrapper of the map with a default value
+ */
+ def withDefault[U >: V](d: K => U): scala.collection.parallel.immutable.ParMap[K, U] = new ParMap.WithDefault[K, U](this, d)
+
+ /** The same map with a given default value.
+ *
+ * Invoking transformer methods (e.g. `map`) will not preserve the default value.
+ *
+ * @param d the function mapping keys to values, used for non-present keys
+ * @return a wrapper of the map with a default value
+ */
+ def withDefaultValue[U >: V](d: U): scala.collection.parallel.immutable.ParMap[K, U] = new ParMap.WithDefault[K, U](this, x => d)
+
}
@@ -64,23 +78,15 @@ object ParMap extends ParMapFactory[ParMap] {
implicit def canBuildFrom[K, V]: CanCombineFrom[Coll, (K, V), ParMap[K, V]] = new CanCombineFromMap[K, V]
-}
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
+ class WithDefault[K, +V](underlying: ParMap[K, V], d: K => V)
+ extends scala.collection.parallel.ParMap.WithDefault[K, V](underlying, d) with ParMap[K, V] {
+ override def empty = new WithDefault(underlying.empty, d)
+ override def updated[U >: V](key: K, value: U): WithDefault[K, U] = new WithDefault[K, U](underlying.updated[U](key, value), d)
+ override def + [U >: V](kv: (K, U)): WithDefault[K, U] = updated(kv._1, kv._2)
+ override def - (key: K): WithDefault[K, V] = new WithDefault(underlying - key, d)
+ override def withDefault[U >: V](d: K => U): ParMap[K, U] = new WithDefault[K, U](underlying, d)
+ override def withDefaultValue[U >: V](d: U): ParMap[K, U] = new WithDefault[K, U](underlying, x => d)
+ override def seq = underlying.seq.withDefault(d)
+ }
+}
diff --git a/src/library/scala/collection/parallel/immutable/ParNumericRange.scala.disabled b/src/library/scala/collection/parallel/immutable/ParNumericRange.scala.disabled
index fb411ec..5f9c9c3 100644
--- a/src/library/scala/collection/parallel/immutable/ParNumericRange.scala.disabled
+++ b/src/library/scala/collection/parallel/immutable/ParNumericRange.scala.disabled
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -29,7 +29,7 @@ import scala.collection.parallel.ParIterableIterator
* @author Aleksandar Prokopec
* @since 2.9
*
- * @define Coll immutable.ParRange
+ * @define Coll `immutable.ParRange`
* @define coll immutable parallel range
*/
@SerialVersionUID(1L)
diff --git a/src/library/scala/collection/parallel/immutable/ParRange.scala b/src/library/scala/collection/parallel/immutable/ParRange.scala
index 2a10458..0c9f82b 100644
--- a/src/library/scala/collection/parallel/immutable/ParRange.scala
+++ b/src/library/scala/collection/parallel/immutable/ParRange.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -10,6 +10,7 @@ package scala.collection.parallel.immutable
import scala.collection.immutable.Range
import scala.collection.parallel.Combiner
+import scala.collection.parallel.SeqSplitter
import scala.collection.generic.CanCombineFrom
import scala.collection.parallel.IterableSplitter
import scala.collection.Iterator
@@ -24,8 +25,10 @@ import scala.collection.Iterator
*
* @author Aleksandar Prokopec
* @since 2.9
+ * @see [[http://docs.scala-lang.org/overviews/parallel-collections/concrete-parallel-collections.html#parallel_range Scala's Parallel Collections Library overview]]
+ * section on `ParRange` for more information.
*
- * @define Coll immutable.ParRange
+ * @define Coll `immutable.ParRange`
* @define coll immutable parallel range
*/
@SerialVersionUID(1L)
@@ -41,13 +44,10 @@ self =>
@inline final def apply(idx: Int) = range.apply(idx);
- def splitter = new ParRangeIterator with SCPI
-
- type SCPI = SignalContextPassingIterator[ParRangeIterator]
+ def splitter = new ParRangeIterator
class ParRangeIterator(range: Range = self.range)
- extends ParIterator {
- me: SignalContextPassingIterator[ParRangeIterator] =>
+ extends SeqSplitter[Int] {
override def toString = "ParRangeIterator(over: " + range + ")"
private var ind = 0
private val len = range.length
@@ -64,15 +64,15 @@ self =>
private def rangeleft = range.drop(ind)
- def dup = new ParRangeIterator(rangeleft) with SCPI
+ def dup = new ParRangeIterator(rangeleft)
def split = {
val rleft = rangeleft
val elemleft = rleft.length
- if (elemleft < 2) Seq(new ParRangeIterator(rleft) with SCPI)
+ if (elemleft < 2) Seq(new ParRangeIterator(rleft))
else Seq(
- new ParRangeIterator(rleft.take(elemleft / 2)) with SCPI,
- new ParRangeIterator(rleft.drop(elemleft / 2)) with SCPI
+ new ParRangeIterator(rleft.take(elemleft / 2)),
+ new ParRangeIterator(rleft.drop(elemleft / 2))
)
}
@@ -81,14 +81,14 @@ self =>
for (sz <- sizes) yield {
val fronttaken = rleft.take(sz)
rleft = rleft.drop(sz)
- new ParRangeIterator(fronttaken) with SCPI
+ new ParRangeIterator(fronttaken)
}
}
/* accessors */
override def foreach[U](f: Int => U): Unit = {
- rangeleft.foreach(f)
+ rangeleft.foreach(f.asInstanceOf[Int => Unit])
ind = len
}
@@ -107,6 +107,7 @@ self =>
cb
}
}
+
}
object ParRange {
diff --git a/src/library/scala/collection/parallel/immutable/ParSeq.scala b/src/library/scala/collection/parallel/immutable/ParSeq.scala
index bf3d3a5..aa19307 100644
--- a/src/library/scala/collection/parallel/immutable/ParSeq.scala
+++ b/src/library/scala/collection/parallel/immutable/ParSeq.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -24,15 +24,15 @@ import scala.collection.GenSeq
/** An immutable variant of `ParSeq`.
*
- * @define Coll mutable.ParSeq
+ * @define Coll `mutable.ParSeq`
* @define coll mutable parallel sequence
*/
trait ParSeq[+T]
-extends collection/*.immutable*/.GenSeq[T]
- with collection.parallel.ParSeq[T]
+extends scala.collection/*.immutable*/.GenSeq[T]
+ with scala.collection.parallel.ParSeq[T]
with ParIterable[T]
with GenericParTemplate[T, ParSeq]
- with ParSeqLike[T, ParSeq[T], collection.immutable.Seq[T]]
+ with ParSeqLike[T, ParSeq[T], scala.collection.immutable.Seq[T]]
{
override def companion: GenericCompanion[ParSeq] with GenericParCompanion[ParSeq] = ParSeq
override def toSeq: ParSeq[T] = this
@@ -40,7 +40,7 @@ extends collection/*.immutable*/.GenSeq[T]
/** $factoryInfo
- * @define Coll mutable.ParSeq
+ * @define Coll `mutable.ParSeq`
* @define coll mutable parallel sequence
*/
object ParSeq extends ParFactory[ParSeq] {
diff --git a/src/library/scala/collection/parallel/immutable/ParSet.scala b/src/library/scala/collection/parallel/immutable/ParSet.scala
index a39607b..3622377 100644
--- a/src/library/scala/collection/parallel/immutable/ParSet.scala
+++ b/src/library/scala/collection/parallel/immutable/ParSet.scala
@@ -1,40 +1,30 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-
package scala.collection
package parallel.immutable
-
-
-
-
-
import scala.collection.GenSet
-import scala.collection.immutable.Set
import scala.collection.generic._
import scala.collection.parallel.ParSetLike
import scala.collection.parallel.Combiner
-
-
-
/** An immutable variant of `ParSet`.
*
- * @define Coll mutable.ParSet
+ * @define Coll `mutable.ParSet`
* @define coll mutable parallel set
*/
trait ParSet[T]
-extends collection/*.immutable*/.GenSet[T]
+extends scala.collection/*.immutable*/.GenSet[T]
with GenericParTemplate[T, ParSet]
with parallel.ParSet[T]
with ParIterable[T]
- with ParSetLike[T, ParSet[T], Set[T]]
+ with ParSetLike[T, ParSet[T], scala.collection.immutable.Set[T]]
{
self =>
override def empty: ParSet[T] = ParHashSet[T]()
@@ -47,10 +37,8 @@ self =>
override def toSet[U >: T]: ParSet[U] = this.asInstanceOf[ParSet[U]]
}
-
-
/** $factoryInfo
- * @define Coll mutable.ParSet
+ * @define Coll `mutable.ParSet`
* @define coll mutable parallel set
*/
object ParSet extends ParSetFactory[ParSet] {
@@ -58,17 +46,3 @@ object ParSet extends ParSetFactory[ParSet] {
implicit def canBuildFrom[T]: CanCombineFrom[Coll, T, ParSet[T]] = new GenericCanCombineFrom[T]
}
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/src/library/scala/collection/parallel/immutable/ParVector.scala b/src/library/scala/collection/parallel/immutable/ParVector.scala
index d1cf3d5..1ee7f4a 100644
--- a/src/library/scala/collection/parallel/immutable/ParVector.scala
+++ b/src/library/scala/collection/parallel/immutable/ParVector.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -34,8 +34,10 @@ import immutable.VectorIterator
*
* @author Aleksandar Prokopec
* @since 2.9
+ * @see [[http://docs.scala-lang.org/overviews/parallel-collections/concrete-parallel-collections.html#parallel_vector Scala's Parallel Collections Library overview]]
+ * section on `ParVector` for more information.
*
- * @define Coll immutable.ParVector
+ * @define Coll `immutable.ParVector`
* @define coll immutable parallel vector
*/
class ParVector[+T](private[this] val vector: Vector[T])
@@ -48,22 +50,21 @@ extends ParSeq[T]
def this() = this(Vector())
- type SCPI = SignalContextPassingIterator[ParVectorIterator]
-
def apply(idx: Int) = vector.apply(idx)
def length = vector.length
def splitter: SeqSplitter[T] = {
- val pit = new ParVectorIterator(vector.startIndex, vector.endIndex) with SCPI
+ val pit = new ParVectorIterator(vector.startIndex, vector.endIndex)
vector.initIterator(pit)
pit
}
override def seq: Vector[T] = vector
- class ParVectorIterator(_start: Int, _end: Int) extends VectorIterator[T](_start, _end) with ParIterator {
- self: SCPI =>
+ override def toVector: Vector[T] = vector
+
+ class ParVectorIterator(_start: Int, _end: Int) extends VectorIterator[T](_start, _end) with SeqSplitter[T] {
def remaining: Int = remainingElementCount
def dup: SeqSplitter[T] = (new ParVector(remainingVector)).splitter
def split: Seq[ParVectorIterator] = {
@@ -87,7 +88,7 @@ extends ParSeq[T]
/** $factoryInfo
- * @define Coll immutable.ParVector
+ * @define Coll `immutable.ParVector`
* @define coll immutable parallel vector
*/
object ParVector extends ParFactory[ParVector] {
@@ -114,8 +115,8 @@ private[immutable] class LazyParVectorCombiner[T] extends Combiner[T, ParVector[
this
}
- def clear = {
- vectors.clear
+ def clear() = {
+ vectors.clear()
vectors += new VectorBuilder[T]
sz = 0
}
diff --git a/src/library/scala/collection/parallel/immutable/package.scala b/src/library/scala/collection/parallel/immutable/package.scala
index 19f8665..5ca0724 100644
--- a/src/library/scala/collection/parallel/immutable/package.scala
+++ b/src/library/scala/collection/parallel/immutable/package.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -8,15 +8,7 @@
package scala.collection.parallel
-package object immutable {
-
- /* package level methods */
- def repetition[T](elem: T, len: Int) = new Repetition(elem, len)
-
- /* constants */
-
- /* classes */
-
+package immutable {
/** A (parallel) sequence consisting of `length` elements `elem`. Used in the `padTo` method.
*
* @tparam T type of the elements
@@ -24,27 +16,29 @@ package object immutable {
* @param length the length of the collection
*/
private[parallel] class Repetition[T](elem: T, val length: Int) extends ParSeq[T] {
- self =>
+ self =>
+
def apply(idx: Int) = if (0 <= idx && idx < length) elem else throw new IndexOutOfBoundsException("" + idx)
override def seq = throw new UnsupportedOperationException
def update(idx: Int, elem: T) = throw new UnsupportedOperationException
- type SCPI = SignalContextPassingIterator[ParIterator]
-
- class ParIterator(var i: Int = 0, val until: Int = length, elem: T = self.elem) extends super.ParIterator {
- me: SignalContextPassingIterator[ParIterator] =>
+ class ParIterator(var i: Int = 0, val until: Int = length, elem: T = self.elem) extends SeqSplitter[T] {
def remaining = until - i
def hasNext = i < until
def next = { i += 1; elem }
- def dup = new ParIterator(i, until, elem) with SCPI
+ def dup = new ParIterator(i, until, elem)
def psplit(sizes: Int*) = {
val incr = sizes.scanLeft(0)(_ + _)
- for ((start, end) <- incr.init zip incr.tail) yield new ParIterator(i + start, (i + end) min until, elem) with SCPI
+ for ((start, end) <- incr.init zip incr.tail) yield new ParIterator(i + start, (i + end) min until, elem)
}
def split = psplit(remaining / 2, remaining - remaining / 2)
}
- def splitter = new ParIterator with SCPI
-
+ def splitter = new ParIterator
}
}
+
+package object immutable {
+ /* package level methods */
+ def repetition[T](elem: T, len: Int) = new Repetition(elem, len)
+}
diff --git a/src/library/scala/collection/parallel/mutable/LazyCombiner.scala b/src/library/scala/collection/parallel/mutable/LazyCombiner.scala
index def6fa7..12b2bc5 100644
--- a/src/library/scala/collection/parallel/mutable/LazyCombiner.scala
+++ b/src/library/scala/collection/parallel/mutable/LazyCombiner.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -8,17 +8,11 @@
package scala.collection.parallel.mutable
-
-
-
import scala.collection.generic.Growable
import scala.collection.generic.Sizing
import scala.collection.mutable.ArrayBuffer
import scala.collection.parallel.Combiner
-
-
-
/** Implements combining contents of two combiners
* by postponing the operation until `result` method is called. It chains
* the leaf results together instead of evaluating the actual collection.
@@ -27,14 +21,13 @@ import scala.collection.parallel.Combiner
* @tparam To the type of the collection the combiner produces
* @tparam Buff the type of the buffers that contain leaf results and this combiner chains together
*/
-trait LazyCombiner[Elem, +To, Buff <: Growable[Elem] with Sizing] extends Combiner[Elem, To]
-{
-//self: collection.parallel.EnvironmentPassingCombiner[Elem, To] =>
+trait LazyCombiner[Elem, +To, Buff <: Growable[Elem] with Sizing] extends Combiner[Elem, To] {
+//self: scala.collection.parallel.EnvironmentPassingCombiner[Elem, To] =>
val chain: ArrayBuffer[Buff]
val lastbuff = chain.last
def +=(elem: Elem) = { lastbuff += elem; this }
def result: To = allocateAndCopy
- def clear = { chain.clear }
+ def clear() = { chain.clear() }
def combine[N <: Elem, NewTo >: To](other: Combiner[N, NewTo]): Combiner[N, NewTo] = if (this ne other) {
if (other.isInstanceOf[LazyCombiner[_, _, _]]) {
val that = other.asInstanceOf[LazyCombiner[Elem, To, Buff]]
diff --git a/src/library/scala/collection/parallel/mutable/ParArray.scala b/src/library/scala/collection/parallel/mutable/ParArray.scala
index a1eb3be..0a4f301 100644
--- a/src/library/scala/collection/parallel/mutable/ParArray.scala
+++ b/src/library/scala/collection/parallel/mutable/ParArray.scala
@@ -1,13 +1,14 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-package scala.collection.parallel.mutable
+package scala
+package collection.parallel.mutable
@@ -19,12 +20,14 @@ import scala.collection.generic.CanBuildFrom
import scala.collection.generic.ParFactory
import scala.collection.generic.Sizing
import scala.collection.parallel.Combiner
+import scala.collection.parallel.SeqSplitter
import scala.collection.parallel.ParSeqLike
+import scala.collection.parallel.Task
import scala.collection.parallel.CHECK_RATE
import scala.collection.mutable.ArraySeq
import scala.collection.mutable.Builder
import scala.collection.GenTraversableOnce
-
+import scala.reflect.ClassTag
@@ -42,10 +45,14 @@ import scala.collection.GenTraversableOnce
*
* @tparam T type of the elements in the array
*
- * @define Coll ParArray
+ * @author Aleksandar Prokopec
+ * @since 2.9
+ * @see [[http://docs.scala-lang.org/overviews/parallel-collections/concrete-parallel-collections.html#parallel_array Scala's Parallel Collections Library overview]]
+ * section on `ParArray` for more information.
+ *
+ * @define Coll `ParArray`
* @define coll parallel array
*
- * @author Aleksandar Prokopec
*/
@SerialVersionUID(1L)
class ParArray[T] private[mutable] (val arrayseq: ArraySeq[T])
@@ -55,7 +62,6 @@ extends ParSeq[T]
with Serializable
{
self =>
- import collection.parallel.tasksupport._
@transient private var array: Array[Any] = arrayseq.array.asInstanceOf[Array[Any]]
@@ -74,17 +80,13 @@ self =>
override def seq = arrayseq
- type SCPI = SignalContextPassingIterator[ParArrayIterator]
-
protected[parallel] def splitter: ParArrayIterator = {
- val pit = new ParArrayIterator with SCPI
+ val pit = new ParArrayIterator
pit
}
class ParArrayIterator(var i: Int = 0, val until: Int = length, val arr: Array[Any] = array)
- extends super.ParIterator {
- me: SignalContextPassingIterator[ParArrayIterator] =>
-
+ extends SeqSplitter[T] {
def hasNext = i < until
def next = {
@@ -95,9 +97,9 @@ self =>
def remaining = until - i
- def dup = new ParArrayIterator(i, until, arr) with SCPI
+ def dup = new ParArrayIterator(i, until, arr)
- def psplit(sizesIncomplete: Int*): Seq[ParIterator] = {
+ def psplit(sizesIncomplete: Int*): Seq[ParArrayIterator] = {
var traversed = i
val total = sizesIncomplete.reduceLeft(_ + _)
val left = remaining
@@ -106,19 +108,19 @@ self =>
val start = traversed
val end = (traversed + sz) min until
traversed = end
- new ParArrayIterator(start, end, arr) with SCPI
+ new ParArrayIterator(start, end, arr)
} else {
- new ParArrayIterator(traversed, traversed, arr) with SCPI
+ new ParArrayIterator(traversed, traversed, arr)
}
}
- override def split: Seq[ParIterator] = {
+ override def split: Seq[ParArrayIterator] = {
val left = remaining
if (left >= 2) {
val splitpoint = left / 2
val sq = Seq(
- new ParArrayIterator(i, i + splitpoint, arr) with SCPI,
- new ParArrayIterator(i + splitpoint, until, arr) with SCPI)
+ new ParArrayIterator(i, i + splitpoint, arr),
+ new ParArrayIterator(i + splitpoint, until, arr))
i = until
sq
} else {
@@ -467,7 +469,6 @@ self =>
Array.copy(arr, i, targetarr, 0, until - i)
pac.buff.size = pac.buff.size + until - i
pac.buff.lastPtr.size = until - i
- pac
} otherwise {
copy2builder_quick(cb, arr, until, i)
i = until
@@ -529,7 +530,6 @@ self =>
val targetarr: Array[Any] = pac.lastbuff.internalArray.asInstanceOf[Array[Any]]
reverse2combiner_quick(targetarr, arr, 0, i, until)
pac.lastbuff.setInternalSize(sz)
- pac
} otherwise {
cb.ifIs[UnrolledParArrayCombiner[T]] {
pac =>
@@ -540,7 +540,6 @@ self =>
reverse2combiner_quick(targetarr, arr, 0, i, until)
pac.buff.size = pac.buff.size + sz
pac.buff.lastPtr.size = sz
- pac
} otherwise super.reverse2combiner(cb)
}
cb
@@ -587,22 +586,22 @@ self =>
val targetarr = targarrseq.array.asInstanceOf[Array[Any]]
// fill it in parallel
- executeAndWaitResult(new Map[S](f, targetarr, 0, length))
+ tasksupport.executeAndWaitResult(new Map[S](f, targetarr, 0, length))
// wrap it into a parallel array
(new ParArray[S](targarrseq)).asInstanceOf[That]
} else super.map(f)(bf)
override def scan[U >: T, That](z: U)(op: (U, U) => U)(implicit cbf: CanBuildFrom[ParArray[T], U, That]): That =
- if (parallelismLevel > 1 && buildsArray(cbf(repr))) {
+ if (tasksupport.parallelismLevel > 1 && buildsArray(cbf(repr))) {
// reserve an array
val targarrseq = new ArraySeq[U](length + 1)
val targetarr = targarrseq.array.asInstanceOf[Array[Any]]
targetarr(0) = z
// do a parallel prefix scan
- if (length > 0) executeAndWaitResult(new CreateScanTree[U](0, size, z, op, splitter) mapResult {
- tree => executeAndWaitResult(new ScanToArray(tree, z, op, targetarr))
+ if (length > 0) tasksupport.executeAndWaitResult(new CreateScanTree[U](0, size, z, op, splitter) mapResult {
+ tree => tasksupport.executeAndWaitResult(new ScanToArray(tree, z, op, targetarr))
})
// wrap the array into a parallel array
@@ -664,7 +663,7 @@ self =>
val fp = howmany / 2
List(new Map(f, targetarr, offset, fp), new Map(f, targetarr, offset + fp, howmany - fp))
}
- def shouldSplitFurther = howmany > collection.parallel.thresholdFromSize(length, parallelismLevel)
+ def shouldSplitFurther = howmany > scala.collection.parallel.thresholdFromSize(length, tasksupport.parallelismLevel)
}
/* serialization */
@@ -684,7 +683,7 @@ self =>
/** $factoryInfo
- * @define Coll mutable.ParArray
+ * @define Coll `mutable.ParArray`
* @define coll parallel array
*/
object ParArray extends ParFactory[ParArray] {
@@ -705,7 +704,7 @@ object ParArray extends ParFactory[ParArray] {
case _ => new ParArray[T](new ExposedArraySeq[T](runtime.ScalaRunTime.toObjectArray(arr), sz))
}
- def createFromCopy[T <: AnyRef : ClassManifest](arr: Array[T]): ParArray[T] = {
+ def createFromCopy[T <: AnyRef : ClassTag](arr: Array[T]): ParArray[T] = {
val newarr = new Array[T](arr.length)
Array.copy(arr, 0, newarr, 0, arr.length)
handoff(newarr)
diff --git a/src/library/scala/collection/parallel/mutable/ParFlatHashTable.scala b/src/library/scala/collection/parallel/mutable/ParFlatHashTable.scala
index 35c7489..8bc108a 100644
--- a/src/library/scala/collection/parallel/mutable/ParFlatHashTable.scala
+++ b/src/library/scala/collection/parallel/mutable/ParFlatHashTable.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -9,23 +9,23 @@
package scala.collection
package parallel.mutable
-import collection.parallel.IterableSplitter
+import scala.collection.parallel.IterableSplitter
/** Parallel flat hash table.
*
* @tparam T type of the elements in the $coll.
* @define coll table
- * @define Coll flat hash table
+ * @define Coll `ParFlatHashTable`
*
* @author Aleksandar Prokopec
*/
-trait ParFlatHashTable[T] extends collection.mutable.FlatHashTable[T] {
+trait ParFlatHashTable[T] extends scala.collection.mutable.FlatHashTable[T] {
override def alwaysInitSizeMap = true
abstract class ParFlatHashTableIterator(var idx: Int, val until: Int, val totalsize: Int)
extends IterableSplitter[T] with SizeMapUtils {
- import collection.DebugUtils._
+ import scala.collection.DebugUtils._
private var traversed = 0
private val itertable = table
diff --git a/src/library/scala/collection/parallel/mutable/ParHashMap.scala b/src/library/scala/collection/parallel/mutable/ParHashMap.scala
index 37065e3..11588e5 100644
--- a/src/library/scala/collection/parallel/mutable/ParHashMap.scala
+++ b/src/library/scala/collection/parallel/mutable/ParHashMap.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -12,12 +12,12 @@ package mutable
-
-import collection.generic._
-import collection.mutable.DefaultEntry
-import collection.mutable.HashEntry
-import collection.mutable.HashTable
-import collection.mutable.UnrolledBuffer
+import scala.collection.generic._
+import scala.collection.mutable.DefaultEntry
+import scala.collection.mutable.HashEntry
+import scala.collection.mutable.HashTable
+import scala.collection.mutable.UnrolledBuffer
+import scala.collection.parallel.Task
@@ -26,25 +26,28 @@ import collection.mutable.UnrolledBuffer
* `ParHashMap` is a parallel map which internally keeps elements within a hash table.
* It uses chaining to resolve collisions.
*
- * @tparam T type of the elements in the parallel hash map
+ * @tparam K type of the keys in the parallel hash map
+ * @tparam V type of the values in the parallel hash map
*
- * @define Coll ParHashMap
+ * @define Coll `ParHashMap`
* @define coll parallel hash map
*
* @author Aleksandar Prokopec
+ * @see [[http://docs.scala-lang.org/overviews/parallel-collections/concrete-parallel-collections.html#parallel_hash_tables Scala's Parallel Collections Library overview]]
+ * section on Parallel Hash Tables for more information.
*/
@SerialVersionUID(1L)
class ParHashMap[K, V] private[collection] (contents: HashTable.Contents[K, DefaultEntry[K, V]])
extends ParMap[K, V]
with GenericParMapTemplate[K, V, ParHashMap]
- with ParMapLike[K, V, ParHashMap[K, V], collection.mutable.HashMap[K, V]]
+ with ParMapLike[K, V, ParHashMap[K, V], scala.collection.mutable.HashMap[K, V]]
with ParHashTable[K, DefaultEntry[K, V]]
with Serializable
{
self =>
initWithContents(contents)
- type Entry = collection.mutable.DefaultEntry[K, V]
+ type Entry = scala.collection.mutable.DefaultEntry[K, V]
def this() = this(null)
@@ -54,9 +57,9 @@ self =>
protected[this] override def newCombiner = ParHashMapCombiner[K, V]
- override def seq = new collection.mutable.HashMap[K, V](hashTableContents)
+ override def seq = new scala.collection.mutable.HashMap[K, V](hashTableContents)
- def splitter = new ParHashMapIterator(1, table.length, size, table(0).asInstanceOf[DefaultEntry[K, V]]) with SCPI
+ def splitter = new ParHashMapIterator(1, table.length, size, table(0).asInstanceOf[DefaultEntry[K, V]])
override def size = tableSize
@@ -64,13 +67,13 @@ self =>
def get(key: K): Option[V] = {
val e = findEntry(key)
- if (e == null) None
+ if (e eq null) None
else Some(e.value)
}
def put(key: K, value: V): Option[V] = {
- val e = findEntry(key)
- if (e == null) { addEntry(new Entry(key, value)); None }
+ val e = findOrAddEntry(key, value)
+ if (e eq null) None
else { val v = e.value; e.value = value; Some(v) }
}
@@ -83,9 +86,8 @@ self =>
}
def += (kv: (K, V)): this.type = {
- val e = findEntry(kv._1)
- if (e == null) addEntry(new Entry(kv._1, kv._2))
- else e.value = kv._2
+ val e = findOrAddEntry(kv._1, kv._2)
+ if (e ne null) e.value = kv._2
this
}
@@ -93,22 +95,26 @@ self =>
override def stringPrefix = "ParHashMap"
- type SCPI = SignalContextPassingIterator[ParHashMapIterator]
-
class ParHashMapIterator(start: Int, untilIdx: Int, totalSize: Int, e: DefaultEntry[K, V])
- extends EntryIterator[(K, V), ParHashMapIterator](start, untilIdx, totalSize, e) with ParIterator {
- me: SCPI =>
+ extends EntryIterator[(K, V), ParHashMapIterator](start, untilIdx, totalSize, e) {
def entry2item(entry: DefaultEntry[K, V]) = (entry.key, entry.value);
def newIterator(idxFrom: Int, idxUntil: Int, totalSz: Int, es: DefaultEntry[K, V]) =
- new ParHashMapIterator(idxFrom, idxUntil, totalSz, es) with SCPI
+ new ParHashMapIterator(idxFrom, idxUntil, totalSz, es)
+ }
+
+ protected def createNewEntry[V1](key: K, value: V1): Entry = {
+ new Entry(key, value.asInstanceOf[V])
}
private def writeObject(out: java.io.ObjectOutputStream) {
- serializeTo(out, _.value)
+ serializeTo(out, { entry =>
+ out.writeObject(entry.key)
+ out.writeObject(entry.value)
+ })
}
private def readObject(in: java.io.ObjectInputStream) {
- init[V](in, new Entry(_, _))
+ init(in, createNewEntry(in.readObject().asInstanceOf[K], in.readObject()))
}
private[parallel] override def brokenInvariants = {
@@ -142,7 +148,7 @@ self =>
/** $factoryInfo
- * @define Coll mutable.ParHashMap
+ * @define Coll `mutable.ParHashMap`
* @define coll parallel hash map
*/
object ParHashMap extends ParMapFactory[ParHashMap] {
@@ -157,17 +163,16 @@ object ParHashMap extends ParMapFactory[ParHashMap] {
private[mutable] abstract class ParHashMapCombiner[K, V](private val tableLoadFactor: Int)
-extends collection.parallel.BucketCombiner[(K, V), ParHashMap[K, V], DefaultEntry[K, V], ParHashMapCombiner[K, V]](ParHashMapCombiner.numblocks)
- with collection.mutable.HashTable.HashUtils[K]
+extends scala.collection.parallel.BucketCombiner[(K, V), ParHashMap[K, V], DefaultEntry[K, V], ParHashMapCombiner[K, V]](ParHashMapCombiner.numblocks)
+ with scala.collection.mutable.HashTable.HashUtils[K]
{
-//self: EnvironmentPassingCombiner[(K, V), ParHashMap[K, V]] =>
- import collection.parallel.tasksupport._
private var mask = ParHashMapCombiner.discriminantmask
private var nonmasklen = ParHashMapCombiner.nonmasklength
+ private var seedvalue = 27
def +=(elem: (K, V)) = {
sz += 1
- val hc = improve(elemHashCode(elem._1))
+ val hc = improve(elemHashCode(elem._1), seedvalue)
val pos = (hc >>> nonmasklen)
if (buckets(pos) eq null) {
// initialize bucket
@@ -180,9 +185,9 @@ extends collection.parallel.BucketCombiner[(K, V), ParHashMap[K, V], DefaultEntr
def result: ParHashMap[K, V] = if (size >= (ParHashMapCombiner.numblocks * sizeMapBucketSize)) { // 1024
// construct table
- val table = new AddingHashTable(size, tableLoadFactor)
+ val table = new AddingHashTable(size, tableLoadFactor, seedvalue)
val bucks = buckets.map(b => if (b ne null) b.headPtr else null)
- val insertcount = executeAndWaitResult(new FillBlocks(bucks, table, 0, bucks.length))
+ val insertcount = combinerTaskSupport.executeAndWaitResult(new FillBlocks(bucks, table, 0, bucks.length))
table.setSize(insertcount)
// TODO compare insertcount and size to see if compression is needed
val c = table.hashTableContents
@@ -190,8 +195,10 @@ extends collection.parallel.BucketCombiner[(K, V), ParHashMap[K, V], DefaultEntr
} else {
// construct a normal table and fill it sequentially
// TODO parallelize by keeping separate sizemaps and merging them
- val table = new HashTable[K, DefaultEntry[K, V]] {
- def insertEntry(e: DefaultEntry[K, V]) = if (super.findEntry(e.key) eq null) super.addEntry(e)
+ object table extends HashTable[K, DefaultEntry[K, V]] {
+ type Entry = DefaultEntry[K, V]
+ def insertEntry(e: Entry) { super.findOrAddEntry(e.key, e) }
+ def createNewEntry[E](key: K, entry: E): Entry = entry.asInstanceOf[Entry]
sizeMapInit(table.length)
}
var i = 0
@@ -201,8 +208,7 @@ extends collection.parallel.BucketCombiner[(K, V), ParHashMap[K, V], DefaultEntr
}
i += 1
}
- val c = table.hashTableContents
- new ParHashMap(c)
+ new ParHashMap(table.hashTableContents)
}
/* classes */
@@ -215,11 +221,12 @@ extends collection.parallel.BucketCombiner[(K, V), ParHashMap[K, V], DefaultEntr
* and true if the key was successfully inserted. It does not update the number of elements
* in the table.
*/
- private[ParHashMapCombiner] class AddingHashTable(numelems: Int, lf: Int) extends HashTable[K, DefaultEntry[K, V]] {
+ private[ParHashMapCombiner] class AddingHashTable(numelems: Int, lf: Int, _seedvalue: Int) extends HashTable[K, DefaultEntry[K, V]] {
import HashTable._
_loadFactor = lf
table = new Array[HashEntry[K, DefaultEntry[K, V]]](capacity(sizeForThreshold(_loadFactor, numelems)))
tableSize = 0
+ seedvalue = _seedvalue
threshold = newThreshold(_loadFactor, table.length)
sizeMapInit(table.length)
def setSize(sz: Int) = tableSize = sz
@@ -252,6 +259,7 @@ extends collection.parallel.BucketCombiner[(K, V), ParHashMap[K, V], DefaultEntr
assert(h >= block * blocksize && h < (block + 1) * blocksize)
}
}
+ protected def createNewEntry[X](key: K, x: X) = ???
}
/* tasks */
@@ -290,7 +298,7 @@ extends collection.parallel.BucketCombiner[(K, V), ParHashMap[K, V], DefaultEntr
insertcount
}
private def assertCorrectBlock(block: Int, k: K) {
- val hc = improve(elemHashCode(k))
+ val hc = improve(elemHashCode(k), seedvalue)
if ((hc >>> nonmasklen) != block) {
println(hc + " goes to " + (hc >>> nonmasklen) + ", while expected block is " + block)
assert((hc >>> nonmasklen) == block)
@@ -303,7 +311,7 @@ extends collection.parallel.BucketCombiner[(K, V), ParHashMap[K, V], DefaultEntr
override def merge(that: FillBlocks) {
this.result += that.result
}
- def shouldSplitFurther = howmany > collection.parallel.thresholdFromSize(ParHashMapCombiner.numblocks, parallelismLevel)
+ def shouldSplitFurther = howmany > scala.collection.parallel.thresholdFromSize(ParHashMapCombiner.numblocks, combinerTaskSupport.parallelismLevel)
}
}
diff --git a/src/library/scala/collection/parallel/mutable/ParHashSet.scala b/src/library/scala/collection/parallel/mutable/ParHashSet.scala
index 3e22e3c..57fab57 100644
--- a/src/library/scala/collection/parallel/mutable/ParHashSet.scala
+++ b/src/library/scala/collection/parallel/mutable/ParHashSet.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -8,11 +8,15 @@
package scala.collection.parallel.mutable
-import collection.generic._
-import collection.mutable.HashSet
-import collection.mutable.FlatHashTable
-import collection.parallel.Combiner
-import collection.mutable.UnrolledBuffer
+
+
+import scala.collection.generic._
+import scala.collection.mutable.FlatHashTable
+import scala.collection.parallel.Combiner
+import scala.collection.mutable.UnrolledBuffer
+import scala.collection.parallel.Task
+
+
/** A parallel hash set.
*
@@ -21,16 +25,18 @@ import collection.mutable.UnrolledBuffer
*
* @tparam T type of the elements in the $coll.
*
- * @define Coll ParHashSet
+ * @define Coll `ParHashSet`
* @define coll parallel hash set
*
* @author Aleksandar Prokopec
+ * @see [[http://docs.scala-lang.org/overviews/parallel-collections/concrete-parallel-collections.html#parallel_hash_tables Scala's Parallel Collections Library overview]]
+ * section on Parallel Hash Tables for more information.
*/
@SerialVersionUID(1L)
class ParHashSet[T] private[collection] (contents: FlatHashTable.Contents[T])
extends ParSet[T]
with GenericParTemplate[T, ParHashSet]
- with ParSetLike[T, ParHashSet[T], collection.mutable.HashSet[T]]
+ with ParSetLike[T, ParHashSet[T], scala.collection.mutable.HashSet[T]]
with ParFlatHashTable[T]
with Serializable
{
@@ -51,7 +57,7 @@ extends ParSet[T]
def clear() = clearTable()
- override def seq = new HashSet(hashTableContents)
+ override def seq = new scala.collection.mutable.HashSet(hashTableContents)
def +=(elem: T) = {
addEntry(elem)
@@ -67,14 +73,11 @@ extends ParSet[T]
def contains(elem: T) = containsEntry(elem)
- def splitter = new ParHashSetIterator(0, table.length, size) with SCPI
-
- type SCPI = SignalContextPassingIterator[ParHashSetIterator]
+ def splitter = new ParHashSetIterator(0, table.length, size)
class ParHashSetIterator(start: Int, iteratesUntil: Int, totalElements: Int)
- extends ParFlatHashTableIterator(start, iteratesUntil, totalElements) with ParIterator {
- me: SCPI =>
- def newIterator(start: Int, until: Int, total: Int) = new ParHashSetIterator(start, until, total) with SCPI
+ extends ParFlatHashTableIterator(start, iteratesUntil, totalElements) {
+ def newIterator(start: Int, until: Int, total: Int) = new ParHashSetIterator(start, until, total)
}
private def writeObject(s: java.io.ObjectOutputStream) {
@@ -82,10 +85,10 @@ extends ParSet[T]
}
private def readObject(in: java.io.ObjectInputStream) {
- init(in, x => x)
+ init(in, x => ())
}
- import collection.DebugUtils._
+ import scala.collection.DebugUtils._
override def debugInformation = buildString {
append =>
append("Parallel flat hash table set")
@@ -101,7 +104,7 @@ extends ParSet[T]
/** $factoryInfo
- * @define Coll mutable.ParHashSet
+ * @define Coll `mutable.ParHashSet`
* @define coll parallel hash set
*/
object ParHashSet extends ParSetFactory[ParHashSet] {
@@ -114,16 +117,16 @@ object ParHashSet extends ParSetFactory[ParHashSet] {
private[mutable] abstract class ParHashSetCombiner[T](private val tableLoadFactor: Int)
-extends collection.parallel.BucketCombiner[T, ParHashSet[T], Any, ParHashSetCombiner[T]](ParHashSetCombiner.numblocks)
-with collection.mutable.FlatHashTable.HashUtils[T] {
+extends scala.collection.parallel.BucketCombiner[T, ParHashSet[T], Any, ParHashSetCombiner[T]](ParHashSetCombiner.numblocks)
+with scala.collection.mutable.FlatHashTable.HashUtils[T] {
//self: EnvironmentPassingCombiner[T, ParHashSet[T]] =>
- import collection.parallel.tasksupport._
private var mask = ParHashSetCombiner.discriminantmask
private var nonmasklen = ParHashSetCombiner.nonmasklength
+ private var seedvalue = 27
def +=(elem: T) = {
sz += 1
- val hc = improve(elemHashCode(elem))
+ val hc = improve(elemHashCode(elem), seedvalue)
val pos = hc >>> nonmasklen
if (buckets(pos) eq null) {
// initialize bucket
@@ -141,8 +144,8 @@ with collection.mutable.FlatHashTable.HashUtils[T] {
private def parPopulate: FlatHashTable.Contents[T] = {
// construct it in parallel
- val table = new AddingFlatHashTable(size, tableLoadFactor)
- val (inserted, leftovers) = executeAndWaitResult(new FillBlocks(buckets, table, 0, buckets.length))
+ val table = new AddingFlatHashTable(size, tableLoadFactor, seedvalue)
+ val (inserted, leftovers) = combinerTaskSupport.executeAndWaitResult(new FillBlocks(buckets, table, 0, buckets.length))
var leftinserts = 0
for (elem <- leftovers) leftinserts += table.insertEntry(0, table.tableLength, elem.asInstanceOf[T])
table.setSize(leftinserts + inserted)
@@ -154,12 +157,13 @@ with collection.mutable.FlatHashTable.HashUtils[T] {
// TODO parallelize by keeping separate size maps and merging them
val tbl = new FlatHashTable[T] {
sizeMapInit(table.length)
+ seedvalue = ParHashSetCombiner.this.seedvalue
+ for {
+ buffer <- buckets;
+ if buffer ne null;
+ elem <- buffer
+ } addEntry(elem.asInstanceOf[T])
}
- for {
- buffer <- buckets;
- if buffer ne null;
- elem <- buffer
- } tbl.addEntry(elem.asInstanceOf[T])
tbl.hashTableContents
}
@@ -169,13 +173,13 @@ with collection.mutable.FlatHashTable.HashUtils[T] {
* it has to take and allocates the underlying hash table in advance.
* Elements can only be added to it. The final size has to be adjusted manually.
* It is internal to `ParHashSet` combiners.
- *
*/
- class AddingFlatHashTable(numelems: Int, lf: Int) extends FlatHashTable[T] {
+ class AddingFlatHashTable(numelems: Int, lf: Int, inseedvalue: Int) extends FlatHashTable[T] {
_loadFactor = lf
table = new Array[AnyRef](capacity(FlatHashTable.sizeForThreshold(numelems, _loadFactor)))
tableSize = 0
threshold = FlatHashTable.newThreshold(_loadFactor, table.length)
+ seedvalue = inseedvalue
sizeMapInit(table.length)
override def toString = "AFHT(%s)".format(table.length)
@@ -306,11 +310,12 @@ with collection.mutable.FlatHashTable.HashUtils[T] {
// the total number of successfully inserted elements is adjusted accordingly
result = (this.result._1 + that.result._1 + inserted, remainingLeftovers concat that.result._2)
}
- def shouldSplitFurther = howmany > collection.parallel.thresholdFromSize(ParHashMapCombiner.numblocks, parallelismLevel)
+ def shouldSplitFurther = howmany > scala.collection.parallel.thresholdFromSize(ParHashMapCombiner.numblocks, combinerTaskSupport.parallelismLevel)
}
}
+
private[parallel] object ParHashSetCombiner {
private[mutable] val discriminantbits = 5
private[mutable] val numblocks = 1 << discriminantbits
diff --git a/src/library/scala/collection/parallel/mutable/ParHashTable.scala b/src/library/scala/collection/parallel/mutable/ParHashTable.scala
index e5c0be3..66ddef6 100644
--- a/src/library/scala/collection/parallel/mutable/ParHashTable.scala
+++ b/src/library/scala/collection/parallel/mutable/ParHashTable.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -13,8 +13,8 @@ package parallel.mutable
-import collection.mutable.HashEntry
-import collection.parallel.IterableSplitter
+import scala.collection.mutable.HashEntry
+import scala.collection.parallel.IterableSplitter
@@ -22,14 +22,14 @@ import collection.parallel.IterableSplitter
* enriching the data structure by fulfilling certain requirements
* for their parallel construction and iteration.
*/
-trait ParHashTable[K, Entry >: Null <: HashEntry[K, Entry]] extends collection.mutable.HashTable[K, Entry] {
+trait ParHashTable[K, Entry >: Null <: HashEntry[K, Entry]] extends scala.collection.mutable.HashTable[K, Entry] {
override def alwaysInitSizeMap = true
/** A parallel iterator returning all the entries.
*/
abstract class EntryIterator[T, +IterRepr <: IterableSplitter[T]]
- (private var idx: Int, private val until: Int, private val totalsize: Int, private var es: Entry)
+ (private var idx: Int, private val until: Int, private val totalsize: Int, private var es: Entry)
extends IterableSplitter[T] with SizeMapUtils {
private val itertable = table
private var traversed = 0
@@ -42,7 +42,7 @@ trait ParHashTable[K, Entry >: Null <: HashEntry[K, Entry]] extends collection.m
es ne null
}
- def next: T = {
+ def next(): T = {
val res = es
es = es.next
scan()
@@ -104,7 +104,7 @@ trait ParHashTable[K, Entry >: Null <: HashEntry[K, Entry]] extends collection.m
// otherwise, this is the last entry in the table - all what remains is the chain
// so split the rest of the chain
val arr = convertToArrayBuffer(es)
- val arrpit = new collection.parallel.BufferSplitter[T](arr, 0, arr.length, signalDelegate)
+ val arrpit = new scala.collection.parallel.BufferSplitter[T](arr, 0, arr.length, signalDelegate)
arrpit.split
}
} else Seq(this.asInstanceOf[IterRepr])
diff --git a/src/library/scala/collection/parallel/mutable/ParIterable.scala b/src/library/scala/collection/parallel/mutable/ParIterable.scala
index 700d21d..7090c51 100644
--- a/src/library/scala/collection/parallel/mutable/ParIterable.scala
+++ b/src/library/scala/collection/parallel/mutable/ParIterable.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -26,10 +26,11 @@ import scala.collection.GenIterable
* @author Aleksandar Prokopec
* @since 2.9
*/
-trait ParIterable[T] extends collection/*.mutable*/.GenIterable[T]
- with collection.parallel.ParIterable[T]
+trait ParIterable[T] extends scala.collection/*.mutable*/.GenIterable[T]
+ with scala.collection.parallel.ParIterable[T]
with GenericParTemplate[T, ParIterable]
- with ParIterableLike[T, ParIterable[T], Iterable[T]] {
+ with ParIterableLike[T, ParIterable[T], Iterable[T]]
+ with Mutable {
override def companion: GenericCompanion[ParIterable] with GenericParCompanion[ParIterable] = ParIterable
//protected[this] override def newBuilder = ParIterable.newBuilder[T]
@@ -38,7 +39,7 @@ trait ParIterable[T] extends collection/*.mutable*/.GenIterable[T]
override def toSeq: ParSeq[T] = toParCollection[T, ParSeq[T]](() => ParSeq.newCombiner[T])
- def seq: collection.mutable.Iterable[T]
+ def seq: scala.collection.mutable.Iterable[T]
}
/** $factoryInfo
diff --git a/src/library/scala/collection/parallel/mutable/ParMap.scala b/src/library/scala/collection/parallel/mutable/ParMap.scala
index 91c2b3d..2250a38 100644
--- a/src/library/scala/collection/parallel/mutable/ParMap.scala
+++ b/src/library/scala/collection/parallel/mutable/ParMap.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -12,8 +12,8 @@ package scala.collection.parallel.mutable
-import collection.generic._
-import collection.parallel.Combiner
+import scala.collection.generic._
+import scala.collection.parallel.Combiner
@@ -28,11 +28,11 @@ import collection.parallel.Combiner
* @since 2.9
*/
trait ParMap[K, V]
-extends collection/*.mutable*/.GenMap[K, V]
- with collection.parallel.ParMap[K, V]
+extends scala.collection/*.mutable*/.GenMap[K, V]
+ with scala.collection.parallel.ParMap[K, V]
with /* mutable */ ParIterable[(K, V)]
with GenericParMapTemplate[K, V, ParMap]
- with /* mutable */ ParMapLike[K, V, ParMap[K, V], collection.mutable.Map[K, V]]
+ with /* mutable */ ParMapLike[K, V, ParMap[K, V], scala.collection.mutable.Map[K, V]]
{
protected[this] override def newCombiner: Combiner[(K, V), ParMap[K, V]] = ParMap.newCombiner[K, V]
@@ -41,7 +41,28 @@ extends collection/*.mutable*/.GenMap[K, V]
override def empty: ParMap[K, V] = new ParHashMap[K, V]
- def seq: collection.mutable.Map[K, V]
+ def seq: scala.collection.mutable.Map[K, V]
+
+ override def updated [U >: V](key: K, value: U): ParMap[K, U] = this + ((key, value))
+
+ /** The same map with a given default function.
+ * Note: `get`, `contains`, `iterator`, `keys`, etc are not affected by `withDefault`.
+ *
+ * Invoking transformer methods (e.g. `map`) will not preserve the default value.
+ *
+ * @param d the function mapping keys to values, used for non-present keys
+ * @return a wrapper of the map with a default value
+ */
+ def withDefault(d: K => V): scala.collection.parallel.mutable.ParMap[K, V] = new ParMap.WithDefault[K, V](this, d)
+
+ /** The same map with a given default value.
+ *
+ * Invoking transformer methods (e.g. `map`) will not preserve the default value.
+ *
+ * @param d the function mapping keys to values, used for non-present keys
+ * @return a wrapper of the map with a default value
+ */
+ def withDefaultValue(d: V): scala.collection.parallel.mutable.ParMap[K, V] = new ParMap.WithDefault[K, V](this, x => d)
}
@@ -54,6 +75,24 @@ object ParMap extends ParMapFactory[ParMap] {
implicit def canBuildFrom[K, V]: CanCombineFrom[Coll, (K, V), ParMap[K, V]] = new CanCombineFromMap[K, V]
+ class WithDefault[K, V](underlying: ParMap[K, V], d: K => V)
+ extends scala.collection.parallel.ParMap.WithDefault(underlying, d) with ParMap[K, V] {
+ override def += (kv: (K, V)) = {underlying += kv; this}
+ def -= (key: K) = {underlying -= key; this}
+ override def empty = new WithDefault(underlying.empty, d)
+ override def updated[U >: V](key: K, value: U): WithDefault[K, U] = new WithDefault[K, U](underlying.updated[U](key, value), d)
+ override def + [U >: V](kv: (K, U)): WithDefault[K, U] = updated(kv._1, kv._2)
+ override def - (key: K): WithDefault[K, V] = new WithDefault(underlying - key, d)
+ override def seq = underlying.seq.withDefault(d)
+ def clear() = underlying.clear()
+ def put(key: K, value: V): Option[V] = underlying.put(key, value)
+
+ /** If these methods aren't overridden to thread through the underlying map,
+ * successive calls to withDefault* have no effect.
+ */
+ override def withDefault(d: K => V): ParMap[K, V] = new WithDefault[K, V](underlying, d)
+ override def withDefaultValue(d: V): ParMap[K, V] = new WithDefault[K, V](underlying, x => d)
+ }
}
diff --git a/src/library/scala/collection/parallel/mutable/ParMapLike.scala b/src/library/scala/collection/parallel/mutable/ParMapLike.scala
index aff590d..cdcfc59 100644
--- a/src/library/scala/collection/parallel/mutable/ParMapLike.scala
+++ b/src/library/scala/collection/parallel/mutable/ParMapLike.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -11,9 +11,11 @@ package mutable
-import collection.generic._
-import collection.mutable.Builder
-import collection.mutable.Cloneable
+import scala.collection.generic._
+import scala.collection.mutable.Builder
+import scala.collection.mutable.Cloneable
+import scala.collection.generic.Growable
+import scala.collection.generic.Shrinkable
@@ -31,11 +33,13 @@ import collection.mutable.Cloneable
trait ParMapLike[K,
V,
+Repr <: ParMapLike[K, V, Repr, Sequential] with ParMap[K, V],
- +Sequential <: collection.mutable.Map[K, V] with collection.mutable.MapLike[K, V, Sequential]]
-extends collection.GenMapLike[K, V, Repr]
- with collection.parallel.ParMapLike[K, V, Repr, Sequential]
- with Cloneable[Repr] {
-
+ +Sequential <: scala.collection.mutable.Map[K, V] with scala.collection.mutable.MapLike[K, V, Sequential]]
+extends scala.collection.GenMapLike[K, V, Repr]
+ with scala.collection.parallel.ParMapLike[K, V, Repr, Sequential]
+ with Growable[(K, V)]
+ with Shrinkable[K]
+ with Cloneable[Repr]
+{
// note: should not override toMap
def put(key: K, value: V): Option[V]
diff --git a/src/library/scala/collection/parallel/mutable/ParSeq.scala b/src/library/scala/collection/parallel/mutable/ParSeq.scala
index a48ba48..95a4d4a 100644
--- a/src/library/scala/collection/parallel/mutable/ParSeq.scala
+++ b/src/library/scala/collection/parallel/mutable/ParSeq.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -26,28 +26,28 @@ import scala.collection.GenSeq
/** A mutable variant of `ParSeq`.
*
- * @define Coll mutable.ParSeq
+ * @define Coll `mutable.ParSeq`
* @define coll mutable parallel sequence
*/
-trait ParSeq[T] extends collection/*.mutable*/.GenSeq[T] // was: collection.mutable.Seq[T]
+trait ParSeq[T] extends scala.collection/*.mutable*/.GenSeq[T] // was: scala.collection.mutable.Seq[T]
with ParIterable[T]
- with collection.parallel.ParSeq[T]
+ with scala.collection.parallel.ParSeq[T]
with GenericParTemplate[T, ParSeq]
- with ParSeqLike[T, ParSeq[T], collection.mutable.Seq[T]] {
+ with ParSeqLike[T, ParSeq[T], scala.collection.mutable.Seq[T]] {
self =>
override def companion: GenericCompanion[ParSeq] with GenericParCompanion[ParSeq] = ParSeq
//protected[this] override def newBuilder = ParSeq.newBuilder[T]
def update(i: Int, elem: T): Unit
- def seq: collection.mutable.Seq[T]
+ def seq: scala.collection.mutable.Seq[T]
override def toSeq: ParSeq[T] = this
}
/** $factoryInfo
- * @define Coll mutable.ParSeq
+ * @define Coll `mutable.ParSeq`
* @define coll mutable parallel sequence
*/
object ParSeq extends ParFactory[ParSeq] {
diff --git a/src/library/scala/collection/parallel/mutable/ParSet.scala b/src/library/scala/collection/parallel/mutable/ParSet.scala
index 1d295fd..d8f8217 100644
--- a/src/library/scala/collection/parallel/mutable/ParSet.scala
+++ b/src/library/scala/collection/parallel/mutable/ParSet.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -21,27 +21,27 @@ import scala.collection.GenSet
/** A mutable variant of `ParSet`.
*
- * @define Coll mutable.ParSet
+ * @define Coll `mutable.ParSet`
* @define coll mutable parallel set
*
* @author Aleksandar Prokopec
*/
trait ParSet[T]
-extends collection/*.mutable*/.GenSet[T]
+extends scala.collection/*.mutable*/.GenSet[T]
with ParIterable[T]
- with collection.parallel.ParSet[T]
+ with scala.collection.parallel.ParSet[T]
with GenericParTemplate[T, ParSet]
- with ParSetLike[T, ParSet[T], collection.mutable.Set[T]]
+ with ParSetLike[T, ParSet[T], scala.collection.mutable.Set[T]]
{
self =>
override def companion: GenericCompanion[ParSet] with GenericParCompanion[ParSet] = ParSet
override def empty: ParSet[T] = ParHashSet()
- def seq: collection.mutable.Set[T]
+ def seq: scala.collection.mutable.Set[T]
}
/** $factoryInfo
- * @define Coll mutable.ParSet
+ * @define Coll `mutable.ParSet`
* @define coll mutable parallel set
*/
object ParSet extends ParSetFactory[ParSet] {
diff --git a/src/library/scala/collection/parallel/mutable/ParSetLike.scala b/src/library/scala/collection/parallel/mutable/ParSetLike.scala
index 7c9767b..609888f 100644
--- a/src/library/scala/collection/parallel/mutable/ParSetLike.scala
+++ b/src/library/scala/collection/parallel/mutable/ParSetLike.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -16,9 +16,8 @@ import scala.collection.mutable.Set
import scala.collection.mutable.Builder
import scala.collection.mutable.Cloneable
import scala.collection.GenSetLike
-
-
-
+import scala.collection.generic.Growable
+import scala.collection.generic.Shrinkable
@@ -36,8 +35,10 @@ trait ParSetLike[T,
+Repr <: ParSetLike[T, Repr, Sequential] with ParSet[T],
+Sequential <: mutable.Set[T] with mutable.SetLike[T, Sequential]]
extends GenSetLike[T, Repr]
- with collection.parallel.ParIterableLike[T, Repr, Sequential]
- with collection.parallel.ParSetLike[T, Repr, Sequential]
+ with scala.collection.parallel.ParIterableLike[T, Repr, Sequential]
+ with scala.collection.parallel.ParSetLike[T, Repr, Sequential]
+ with Growable[T]
+ with Shrinkable[T]
with Cloneable[Repr]
{
self =>
diff --git a/src/library/scala/collection/parallel/mutable/ParTrieMap.scala b/src/library/scala/collection/parallel/mutable/ParTrieMap.scala
new file mode 100644
index 0000000..61a50a1
--- /dev/null
+++ b/src/library/scala/collection/parallel/mutable/ParTrieMap.scala
@@ -0,0 +1,195 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+package scala.collection.parallel.mutable
+
+
+
+import scala.collection.generic._
+import scala.collection.parallel.Combiner
+import scala.collection.parallel.IterableSplitter
+import scala.collection.parallel.Task
+import scala.collection.concurrent.BasicNode
+import scala.collection.concurrent.TNode
+import scala.collection.concurrent.LNode
+import scala.collection.concurrent.CNode
+import scala.collection.concurrent.SNode
+import scala.collection.concurrent.INode
+import scala.collection.concurrent.TrieMap
+import scala.collection.concurrent.TrieMapIterator
+
+
+
+/** Parallel TrieMap collection.
+ *
+ * It has its bulk operations parallelized, but uses the snapshot operation
+ * to create the splitter. This means that parallel bulk operations can be
+ * called concurrently with the modifications.
+ *
+ * @author Aleksandar Prokopec
+ * @since 2.10
+ * @see [[http://docs.scala-lang.org/overviews/parallel-collections/concrete-parallel-collections.html#parallel_concurrent_tries Scala's Parallel Collections Library overview]]
+ * section on `ParTrieMap` for more information.
+ */
+final class ParTrieMap[K, V] private[collection] (private val ctrie: TrieMap[K, V])
+extends ParMap[K, V]
+ with GenericParMapTemplate[K, V, ParTrieMap]
+ with ParMapLike[K, V, ParTrieMap[K, V], TrieMap[K, V]]
+ with ParTrieMapCombiner[K, V]
+ with Serializable
+{
+ def this() = this(new TrieMap)
+
+ override def mapCompanion: GenericParMapCompanion[ParTrieMap] = ParTrieMap
+
+ override def empty: ParTrieMap[K, V] = ParTrieMap.empty
+
+ protected[this] override def newCombiner = ParTrieMap.newCombiner
+
+ override def seq = ctrie
+
+ def splitter = new ParTrieMapSplitter(0, ctrie.readOnlySnapshot().asInstanceOf[TrieMap[K, V]], true)
+
+ override def clear() = ctrie.clear()
+
+ def result = this
+
+ def get(key: K): Option[V] = ctrie.get(key)
+
+ def put(key: K, value: V): Option[V] = ctrie.put(key, value)
+
+ def update(key: K, value: V): Unit = ctrie.update(key, value)
+
+ def remove(key: K): Option[V] = ctrie.remove(key)
+
+ def +=(kv: (K, V)): this.type = {
+ ctrie.+=(kv)
+ this
+ }
+
+ def -=(key: K): this.type = {
+ ctrie.-=(key)
+ this
+ }
+
+ override def size = {
+ val in = ctrie.readRoot()
+ val r = in.gcasRead(ctrie)
+ r match {
+ case tn: TNode[_, _] => tn.cachedSize(ctrie)
+ case ln: LNode[_, _] => ln.cachedSize(ctrie)
+ case cn: CNode[_, _] =>
+ tasksupport.executeAndWaitResult(new Size(0, cn.array.length, cn.array))
+ cn.cachedSize(ctrie)
+ }
+ }
+
+ override def stringPrefix = "ParTrieMap"
+
+ /* tasks */
+
+ /** Computes TrieMap size in parallel. */
+ class Size(offset: Int, howmany: Int, array: Array[BasicNode]) extends Task[Int, Size] {
+ var result = -1
+ def leaf(prev: Option[Int]) = {
+ var sz = 0
+ var i = offset
+ val until = offset + howmany
+ while (i < until) {
+ array(i) match {
+ case sn: SNode[_, _] => sz += 1
+ case in: INode[K, V] => sz += in.cachedSize(ctrie)
+ }
+ i += 1
+ }
+ result = sz
+ }
+ def split = {
+ val fp = howmany / 2
+ Seq(new Size(offset, fp, array), new Size(offset + fp, howmany - fp, array))
+ }
+ def shouldSplitFurther = howmany > 1
+ override def merge(that: Size) = result = result + that.result
+ }
+
+}
+
+
+private[collection] class ParTrieMapSplitter[K, V](lev: Int, ct: TrieMap[K, V], mustInit: Boolean)
+extends TrieMapIterator[K, V](lev, ct, mustInit)
+ with IterableSplitter[(K, V)]
+{
+ // only evaluated if `remaining` is invoked (which is not used by most tasks)
+ lazy val totalsize = ct.par.size
+ var iterated = 0
+
+ protected override def newIterator(_lev: Int, _ct: TrieMap[K, V], _mustInit: Boolean) = new ParTrieMapSplitter[K, V](_lev, _ct, _mustInit)
+
+ override def shouldSplitFurther[S](coll: scala.collection.parallel.ParIterable[S], parallelismLevel: Int) = {
+ val maxsplits = 3 + Integer.highestOneBit(parallelismLevel)
+ level < maxsplits
+ }
+
+ def dup = {
+ val it = newIterator(0, ct, false)
+ dupTo(it)
+ it.iterated = this.iterated
+ it
+ }
+
+ override def next() = {
+ iterated += 1
+ super.next()
+ }
+
+ def split: Seq[IterableSplitter[(K, V)]] = subdivide().asInstanceOf[Seq[IterableSplitter[(K, V)]]]
+
+ override def isRemainingCheap = false
+
+ def remaining: Int = totalsize - iterated
+}
+
+
+/** Only used within the `ParTrieMap`. */
+private[mutable] trait ParTrieMapCombiner[K, V] extends Combiner[(K, V), ParTrieMap[K, V]] {
+
+ def combine[N <: (K, V), NewTo >: ParTrieMap[K, V]](other: Combiner[N, NewTo]): Combiner[N, NewTo] = if (this eq other) this else {
+ throw new UnsupportedOperationException("This shouldn't have been called in the first place.")
+
+ val thiz = this.asInstanceOf[ParTrieMap[K, V]]
+ val that = other.asInstanceOf[ParTrieMap[K, V]]
+ val result = new ParTrieMap[K, V]
+
+ result ++= thiz.iterator
+ result ++= that.iterator
+
+ result
+ }
+
+ override def canBeShared = true
+
+}
+
+
+object ParTrieMap extends ParMapFactory[ParTrieMap] {
+
+ def empty[K, V]: ParTrieMap[K, V] = new ParTrieMap[K, V]
+
+ def newCombiner[K, V]: Combiner[(K, V), ParTrieMap[K, V]] = new ParTrieMap[K, V]
+
+ implicit def canBuildFrom[K, V]: CanCombineFrom[Coll, (K, V), ParTrieMap[K, V]] = new CanCombineFromMap[K, V]
+
+}
+
+
+
+
+
+
+
+
diff --git a/src/library/scala/collection/parallel/mutable/ResizableParArrayCombiner.scala b/src/library/scala/collection/parallel/mutable/ResizableParArrayCombiner.scala
index 8290438..dc31d1b 100644
--- a/src/library/scala/collection/parallel/mutable/ResizableParArrayCombiner.scala
+++ b/src/library/scala/collection/parallel/mutable/ResizableParArrayCombiner.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -14,33 +14,29 @@ import scala.collection.generic.Sizing
import scala.collection.mutable.ArraySeq
import scala.collection.mutable.ArrayBuffer
import scala.collection.parallel.TaskSupport
-//import scala.collection.parallel.EnvironmentPassingCombiner
import scala.collection.parallel.unsupportedop
import scala.collection.parallel.Combiner
+import scala.collection.parallel.Task
/** An array combiner that uses a chain of arraybuffers to store elements. */
-trait ResizableParArrayCombiner[T]
-extends LazyCombiner[T, ParArray[T], ExposedArrayBuffer[T]]
-{
-//self: EnvironmentPassingCombiner[T, ParArray[T]] =>
- import collection.parallel.tasksupport._
+trait ResizableParArrayCombiner[T] extends LazyCombiner[T, ParArray[T], ExposedArrayBuffer[T]] {
override def sizeHint(sz: Int) = if (chain.length == 1) chain(0).sizeHint(sz)
+ // public method with private[mutable] type ExposedArrayBuffer in parameter type; cannot be overridden.
def newLazyCombiner(c: ArrayBuffer[ExposedArrayBuffer[T]]) = ResizableParArrayCombiner(c)
def allocateAndCopy = if (chain.size > 1) {
val arrayseq = new ArraySeq[T](size)
val array = arrayseq.array.asInstanceOf[Array[Any]]
- executeAndWaitResult(new CopyChainToArray(array, 0, size))
+ combinerTaskSupport.executeAndWaitResult(new CopyChainToArray(array, 0, size))
new ParArray(arrayseq)
} else { // optimisation if there is only 1 array
- val pa = new ParArray(new ExposedArraySeq[T](chain(0).internalArray, size))
- pa
+ new ParArray(new ExposedArraySeq[T](chain(0).internalArray, size))
}
override def toString = "ResizableParArrayCombiner(" + size + "): " //+ chain
@@ -85,27 +81,13 @@ extends LazyCombiner[T, ParArray[T], ExposedArrayBuffer[T]]
val fp = howmany / 2
List(new CopyChainToArray(array, offset, fp), new CopyChainToArray(array, offset + fp, howmany - fp))
}
- def shouldSplitFurther = howmany > collection.parallel.thresholdFromSize(size, parallelismLevel)
+ def shouldSplitFurther = howmany > scala.collection.parallel.thresholdFromSize(size, combinerTaskSupport.parallelismLevel)
}
-
}
-
object ResizableParArrayCombiner {
def apply[T](c: ArrayBuffer[ExposedArrayBuffer[T]]): ResizableParArrayCombiner[T] = {
new { val chain = c } with ResizableParArrayCombiner[T] // was: with EnvironmentPassingCombiner[T, ParArray[T]]
}
def apply[T](): ResizableParArrayCombiner[T] = apply(new ArrayBuffer[ExposedArrayBuffer[T]] += new ExposedArrayBuffer[T])
}
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/src/library/scala/collection/parallel/mutable/UnrolledParArrayCombiner.scala b/src/library/scala/collection/parallel/mutable/UnrolledParArrayCombiner.scala
index dc583fb..c3a3794 100644
--- a/src/library/scala/collection/parallel/mutable/UnrolledParArrayCombiner.scala
+++ b/src/library/scala/collection/parallel/mutable/UnrolledParArrayCombiner.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -8,31 +8,23 @@
package scala.collection.parallel.mutable
-
-
-
-
import scala.collection.generic.Sizing
import scala.collection.mutable.ArraySeq
import scala.collection.mutable.ArrayBuffer
import scala.collection.mutable.UnrolledBuffer
import scala.collection.mutable.UnrolledBuffer.Unrolled
import scala.collection.parallel.TaskSupport
-//import scala.collection.parallel.EnvironmentPassingCombiner
import scala.collection.parallel.unsupportedop
import scala.collection.parallel.Combiner
+import scala.collection.parallel.Task
+import scala.reflect.ClassTag
-
-
-
-
-private[mutable] class DoublingUnrolledBuffer[T](implicit m: ClassManifest[T]) extends UnrolledBuffer[T]()(m) {
+private[mutable] class DoublingUnrolledBuffer[T](implicit t: ClassTag[T]) extends UnrolledBuffer[T]()(t) {
override def calcNextLength(sz: Int) = if (sz < 10000) sz * 2 else sz
protected override def newUnrolled = new Unrolled[T](0, new Array[T](4), null, this)
}
-
/** An array combiner that uses doubling unrolled buffers to store elements. */
trait UnrolledParArrayCombiner[T]
extends Combiner[T, ParArray[T]] {
@@ -40,8 +32,6 @@ extends Combiner[T, ParArray[T]] {
// because size is doubling, random access is O(logn)!
val buff = new DoublingUnrolledBuffer[Any]
- import collection.parallel.tasksupport._
-
def +=(elem: T) = {
buff += elem
this
@@ -51,7 +41,7 @@ extends Combiner[T, ParArray[T]] {
val arrayseq = new ArraySeq[T](size)
val array = arrayseq.array.asInstanceOf[Array[Any]]
- executeAndWaitResult(new CopyUnrolledToArray(array, 0, size))
+ combinerTaskSupport.executeAndWaitResult(new CopyUnrolledToArray(array, 0, size))
new ParArray(arrayseq)
}
@@ -87,7 +77,7 @@ extends Combiner[T, ParArray[T]] {
var pos = startpos
var arroffset = offset
while (totalleft > 0) {
- val lefthere = math.min(totalleft, curr.size - pos)
+ val lefthere = scala.math.min(totalleft, curr.size - pos)
Array.copy(curr.array, pos, array, arroffset, lefthere)
// println("from: " + arroffset + " elems " + lefthere + " - " + pos + ", " + curr + " -> " + array.toList + " by " + this + " !! " + buff.headPtr)
totalleft -= lefthere
@@ -109,13 +99,11 @@ extends Combiner[T, ParArray[T]] {
val fp = howmany / 2
List(new CopyUnrolledToArray(array, offset, fp), new CopyUnrolledToArray(array, offset + fp, howmany - fp))
}
- def shouldSplitFurther = howmany > collection.parallel.thresholdFromSize(size, parallelismLevel)
+ def shouldSplitFurther = howmany > scala.collection.parallel.thresholdFromSize(size, combinerTaskSupport.parallelismLevel)
override def toString = "CopyUnrolledToArray(" + offset + ", " + howmany + ")"
}
}
-
-
object UnrolledParArrayCombiner {
def apply[T](): UnrolledParArrayCombiner[T] = new UnrolledParArrayCombiner[T] {} // was: with EnvironmentPassingCombiner[T, ParArray[T]]
}
diff --git a/src/library/scala/collection/parallel/mutable/package.scala b/src/library/scala/collection/parallel/mutable/package.scala
index 1efe79b..2494d09 100644
--- a/src/library/scala/collection/parallel/mutable/package.scala
+++ b/src/library/scala/collection/parallel/mutable/package.scala
@@ -1,22 +1,25 @@
-package scala.collection.parallel
-
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+package scala.collection.parallel
import scala.collection.mutable.ArrayBuffer
import scala.collection.mutable.ArraySeq
import scala.collection.generic.Sizing
-
-
package object mutable {
-
/* aliases */
-
type ParArrayCombiner[T] = ResizableParArrayCombiner[T]
val ParArrayCombiner = ResizableParArrayCombiner
+}
+package mutable {
/* classes and traits */
-
private[mutable] trait SizeMapUtils {
protected def calcNumElems(from: Int, until: Int, tableLength: Int, sizeMapBucketSize: Int) = {
@@ -53,7 +56,6 @@ package object mutable {
}
/* hack-arounds */
-
private[mutable] class ExposedArrayBuffer[T] extends ArrayBuffer[T] with Sizing {
def internalArray = array
def setInternalSize(s: Int) = size0 = s
@@ -71,5 +73,4 @@ package object mutable {
override val length = sz
override def stringPrefix = "ArraySeq"
}
-
}
diff --git a/src/library/scala/collection/parallel/package.scala b/src/library/scala/collection/parallel/package.scala
index ec2b583..988886b 100644
--- a/src/library/scala/collection/parallel/package.scala
+++ b/src/library/scala/collection/parallel/package.scala
@@ -1,28 +1,24 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-
-package scala.collection
-
-
-import java.lang.Thread._
+package scala
+package collection
import scala.collection.generic.CanBuildFrom
import scala.collection.generic.CanCombineFrom
import scala.collection.parallel.mutable.ParArray
import scala.collection.mutable.UnrolledBuffer
-import annotation.unchecked.uncheckedVariance
-
+import scala.annotation.unchecked.uncheckedVariance
+import scala.language.implicitConversions
/** Package object for parallel collections.
*/
package object parallel {
-
/* constants */
val MIN_FOR_COPY = 512
val CHECK_RATE = 512
@@ -46,26 +42,21 @@ package object parallel {
private[parallel] def outofbounds(idx: Int) = throw new IndexOutOfBoundsException(idx.toString)
private[parallel] def getTaskSupport: TaskSupport =
- if (util.Properties.isJavaAtLeast("1.6")) {
- val vendor = util.Properties.javaVmVendor
- if ((vendor contains "Oracle") || (vendor contains "Sun") || (vendor contains "Apple")) new ForkJoinTaskSupport
- else new ThreadPoolTaskSupport
- } else new ThreadPoolTaskSupport
-
- val tasksupport = getTaskSupport
+ if (scala.util.Properties.isJavaAtLeast("1.6")) new ForkJoinTaskSupport
+ else new ThreadPoolTaskSupport
- /* implicit conversions */
+ val defaultTaskSupport: TaskSupport = getTaskSupport
- trait FactoryOps[From, Elem, To] {
- trait Otherwise[R] {
- def otherwise(notbody: => R): R
+ def setTaskSupport[Coll](c: Coll, t: TaskSupport): Coll = {
+ c match {
+ case pc: ParIterableLike[_, _, _] => pc.tasksupport = t
+ case _ => // do nothing
}
-
- def isParallel: Boolean
- def asParallel: CanCombineFrom[From, Elem, To]
- def ifParallel[R](isbody: CanCombineFrom[From, Elem, To] => R): Otherwise[R]
+ c
}
+ /* implicit conversions */
+
implicit def factory2ops[From, Elem, To](bf: CanBuildFrom[From, Elem, To]) = new FactoryOps[From, Elem, To] {
def isParallel = bf.isInstanceOf[Parallel]
def asParallel = bf.asInstanceOf[CanCombineFrom[From, Elem, To]]
@@ -73,22 +64,7 @@ package object parallel {
def otherwise(notbody: => R) = if (isParallel) isbody(asParallel) else notbody
}
}
-
- trait TraversableOps[T] {
- trait Otherwise[R] {
- def otherwise(notbody: => R): R
- }
-
- def isParallel: Boolean
- def isParIterable: Boolean
- def asParIterable: ParIterable[T]
- def isParSeq: Boolean
- def asParSeq: ParSeq[T]
- def ifParSeq[R](isbody: ParSeq[T] => R): Otherwise[R]
- def toParArray: ParArray[T]
- }
-
- implicit def traversable2ops[T](t: collection.GenTraversableOnce[T]) = new TraversableOps[T] {
+ implicit def traversable2ops[T](t: scala.collection.GenTraversableOnce[T]) = new TraversableOps[T] {
def isParallel = t.isInstanceOf[Parallel]
def isParIterable = t.isInstanceOf[ParIterable[_]]
def asParIterable = t.asInstanceOf[ParIterable[T]]
@@ -104,11 +80,6 @@ package object parallel {
cb.result
}
}
-
- trait ThrowableOps {
- def alongWith(that: Throwable): Throwable
- }
-
implicit def throwable2ops(self: Throwable) = new ThrowableOps {
def alongWith(that: Throwable) = (self, that) match {
case (self: CompositeThrowable, that: CompositeThrowable) => new CompositeThrowable(self.throwables ++ that.throwables)
@@ -117,20 +88,68 @@ package object parallel {
case _ => new CompositeThrowable(Set(self, that))
}
}
+}
+
+
+package parallel {
+ trait FactoryOps[From, Elem, To] {
+ trait Otherwise[R] {
+ def otherwise(notbody: => R): R
+ }
+
+ def isParallel: Boolean
+ def asParallel: CanCombineFrom[From, Elem, To]
+ def ifParallel[R](isbody: CanCombineFrom[From, Elem, To] => R): Otherwise[R]
+ }
+
+ trait TraversableOps[T] {
+ trait Otherwise[R] {
+ def otherwise(notbody: => R): R
+ }
+
+ def isParallel: Boolean
+ def isParIterable: Boolean
+ def asParIterable: ParIterable[T]
+ def isParSeq: Boolean
+ def asParSeq: ParSeq[T]
+ def ifParSeq[R](isbody: ParSeq[T] => R): Otherwise[R]
+ def toParArray: ParArray[T]
+ }
+
+ trait ThrowableOps {
+ def alongWith(that: Throwable): Throwable
+ }
/* classes */
+ trait CombinerFactory[U, Repr] {
+ /** Provides a combiner used to construct a collection. */
+ def apply(): Combiner[U, Repr]
+ /** The call to the `apply` method can create a new combiner each time.
+ * If it does, this method returns `false`.
+ * The same combiner factory may be used each time (typically, this is
+ * the case for concurrent collections, which are thread safe).
+ * If so, the method returns `true`.
+ */
+ def doesShareCombiners: Boolean
+ }
+
/** Composite throwable - thrown when multiple exceptions are thrown at the same time. */
- final class CompositeThrowable(val throwables: Set[Throwable])
- extends Throwable("Multiple exceptions thrown during a parallel computation: " + throwables.map(t => (t, t.getStackTrace.toList)).mkString(", "))
+ final case class CompositeThrowable(
+ val throwables: Set[Throwable]
+ ) extends Exception(
+ "Multiple exceptions thrown during a parallel computation: " +
+ throwables.map(t => t + "\n" + t.getStackTrace.take(10).++("...").mkString("\n")).mkString("\n\n")
+ )
/** A helper iterator for iterating very small array buffers.
* Automatically forwards the signal delegate when splitting.
*/
private[parallel] class BufferSplitter[T]
- (private val buffer: collection.mutable.ArrayBuffer[T], private var index: Int, private val until: Int, var signalDelegate: collection.generic.Signalling)
+ (private val buffer: scala.collection.mutable.ArrayBuffer[T], private var index: Int, private val until: Int, _sigdel: scala.collection.generic.Signalling)
extends IterableSplitter[T] {
+ signalDelegate = _sigdel
def hasNext = index < until
def next = {
val r = buffer(index)
@@ -184,7 +203,7 @@ package object parallel {
* the receiver (which will be the return value).
*/
private[parallel] abstract class BucketCombiner[-Elem, +To, Buck, +CombinerType <: BucketCombiner[Elem, To, Buck, CombinerType]]
- (private val bucketnumber: Int)
+ (private val bucketnumber: Int)
extends Combiner[Elem, To] {
//self: EnvironmentPassingCombiner[Elem, To] =>
protected var buckets: Array[UnrolledBuffer[Buck]] @uncheckedVariance = new Array[UnrolledBuffer[Buck]](bucketnumber)
@@ -192,53 +211,37 @@ package object parallel {
def size = sz
- def clear = {
+ def clear() = {
buckets = new Array[UnrolledBuffer[Buck]](bucketnumber)
sz = 0
}
def beforeCombine[N <: Elem, NewTo >: To](other: Combiner[N, NewTo]) {}
+
def afterCombine[N <: Elem, NewTo >: To](other: Combiner[N, NewTo]) {}
- def combine[N <: Elem, NewTo >: To](other: Combiner[N, NewTo]): Combiner[N, NewTo] = if (this ne other) {
- if (other.isInstanceOf[BucketCombiner[_, _, _, _]]) {
- beforeCombine(other)
-
- val that = other.asInstanceOf[BucketCombiner[Elem, To, Buck, CombinerType]]
- var i = 0
- while (i < bucketnumber) {
- if (buckets(i) eq null) {
- buckets(i) = that.buckets(i)
- } else {
- if (that.buckets(i) ne null) buckets(i) concat that.buckets(i)
+ def combine[N <: Elem, NewTo >: To](other: Combiner[N, NewTo]): Combiner[N, NewTo] = {
+ if (this eq other) this
+ else other match {
+ case _: BucketCombiner[_, _, _, _] =>
+ beforeCombine(other)
+ val that = other.asInstanceOf[BucketCombiner[Elem, To, Buck, CombinerType]]
+
+ var i = 0
+ while (i < bucketnumber) {
+ if (buckets(i) eq null)
+ buckets(i) = that.buckets(i)
+ else if (that.buckets(i) ne null)
+ buckets(i) concat that.buckets(i)
+
+ i += 1
}
- i += 1
- }
- sz = sz + that.size
-
- afterCombine(other)
-
- this
- } else sys.error("Unexpected combiner type.")
- } else this
-
+ sz = sz + that.size
+ afterCombine(other)
+ this
+ case _ =>
+ sys.error("Unexpected combiner type.")
+ }
+ }
}
-
-
}
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/src/library/scala/collection/readme-if-you-want-to-add-something.txt b/src/library/scala/collection/readme-if-you-want-to-add-something.txt
old mode 100644
new mode 100755
diff --git a/src/library/scala/collection/script/Location.scala b/src/library/scala/collection/script/Location.scala
index fea7e63..cd64fa2 100644
--- a/src/library/scala/collection/script/Location.scala
+++ b/src/library/scala/collection/script/Location.scala
@@ -1,19 +1,16 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-
-
package scala.collection
package script
-
-/** Class <code>Location</code> describes locations in messages implemented by
- * class <a href="Message.html" target="contentFrame"><code>Message</code></a>.
+/** Class `Location` describes locations in messages implemented by
+ * class [[scala.collection.script.Message]].
*
* @author Matthias Zenger
* @version 1.0, 10/05/2004
diff --git a/src/library/scala/collection/script/Message.scala b/src/library/scala/collection/script/Message.scala
index 877d005..2ab7ea7 100644
--- a/src/library/scala/collection/script/Message.scala
+++ b/src/library/scala/collection/script/Message.scala
@@ -1,23 +1,20 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-
-
package scala.collection
package script
import mutable.ArrayBuffer
-/** Class <code>Message</code> represents messages that are issued by observable
- * collection classes whenever a data structure is changed. Class <code>Message</code>
- * has several subclasses for the various kinds of events: <code>Update</code>
- * <code>Remove</code>, <code>Include</code>, <code>Reset</code>, and
- * <code>Script</code>.
+/** Class `Message` represents messages that are issued by observable
+ * collection classes whenever a data structure is changed. Class `Message`
+ * has several subclasses for the various kinds of events: `Update`
+ * `Remove`, `Include`, `Reset`, and `Script`.
*
* @author Matthias Zenger
* @version 1.0, 08/07/2003
@@ -78,7 +75,7 @@ class Script[A] extends ArrayBuffer[Message[A]] with Message[A] {
if (i > 1)
res = res + ", "
res = res + "[" + i + "] " + it.next
- i = i + 1
+ i += 1
}
res + ")"
}
diff --git a/src/library/scala/collection/script/Scriptable.scala b/src/library/scala/collection/script/Scriptable.scala
index 904088e..ceaf19a 100644
--- a/src/library/scala/collection/script/Scriptable.scala
+++ b/src/library/scala/collection/script/Scriptable.scala
@@ -1,19 +1,16 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-
-
package scala.collection
package script
-
-/** Classes that mix in the <code>Scriptable</code> class allow
- * messages to be sent to objects of that class.
+/** Classes that mix in the `Scriptable` class allow messages to be sent to
+ * objects of that class.
*
* @author Matthias Zenger
* @version 1.0, 09/05/2004
diff --git a/src/library/scala/compat/Platform.scala b/src/library/scala/compat/Platform.scala
index f18ce12..88cb150 100644
--- a/src/library/scala/compat/Platform.scala
+++ b/src/library/scala/compat/Platform.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -109,7 +109,7 @@ object Platform {
* `System.getProperty("line.separator")`
* with a default value of "\n".
*/
- val EOL = util.Properties.lineSeparator
+ val EOL = scala.util.Properties.lineSeparator
/** The current time in milliseconds. The time is counted since 1 January 1970
* UTC.
diff --git a/src/library/scala/concurrent/Awaitable.scala b/src/library/scala/concurrent/Awaitable.scala
new file mode 100644
index 0000000..652a234
--- /dev/null
+++ b/src/library/scala/concurrent/Awaitable.scala
@@ -0,0 +1,64 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+package scala.concurrent
+
+
+
+import scala.concurrent.duration.Duration
+
+
+
+/**
+ * An object that may eventually be completed with a result value of type `T` which may be
+ * awaited using blocking methods.
+ *
+ * The [[Await]] object provides methods that allow accessing the result of an `Awaitable`
+ * by blocking the current thread until the `Awaitable` has been completed or a timeout has
+ * occurred.
+ */
+trait Awaitable[+T] {
+
+ /**
+ * Await the "completed" state of this `Awaitable`.
+ *
+ * '''''This method should not be called directly; use [[Await.ready]] instead.'''''
+ *
+ * @param atMost
+ * maximum wait time, which may be negative (no waiting is done),
+ * [[scala.concurrent.duration.Duration.Inf Duration.Inf]] for unbounded waiting, or a finite positive
+ * duration
+ * @return this `Awaitable`
+ * @throws InterruptedException if the current thread is interrupted while waiting
+ * @throws TimeoutException if after waiting for the specified time this `Awaitable` is still not ready
+ * @throws IllegalArgumentException if `atMost` is [[scala.concurrent.duration.Duration.Undefined Duration.Undefined]]
+ */
+ @throws(classOf[TimeoutException])
+ @throws(classOf[InterruptedException])
+ def ready(atMost: Duration)(implicit permit: CanAwait): this.type
+
+ /**
+ * Await and return the result (of type `T`) of this `Awaitable`.
+ *
+ * '''''This method should not be called directly; use [[Await.result]] instead.'''''
+ *
+ * @param atMost
+ * maximum wait time, which may be negative (no waiting is done),
+ * [[scala.concurrent.duration.Duration.Inf Duration.Inf]] for unbounded waiting, or a finite positive
+ * duration
+ * @return the result value if the `Awaitable` is completed within the specific maximum wait time
+ * @throws InterruptedException if the current thread is interrupted while waiting
+ * @throws TimeoutException if after waiting for the specified time this `Awaitable` is still not ready
+ * @throws IllegalArgumentException if `atMost` is [[scala.concurrent.duration.Duration.Undefined Duration.Undefined]]
+ */
+ @throws(classOf[Exception])
+ def result(atMost: Duration)(implicit permit: CanAwait): T
+}
+
+
+
diff --git a/src/library/scala/concurrent/BlockContext.scala b/src/library/scala/concurrent/BlockContext.scala
new file mode 100644
index 0000000..747cc39
--- /dev/null
+++ b/src/library/scala/concurrent/BlockContext.scala
@@ -0,0 +1,77 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+package scala.concurrent
+
+/**
+ * A context to be notified by `scala.concurrent.blocking` when
+ * a thread is about to block. In effect this trait provides
+ * the implementation for `scala.concurrent.Await`.
+ * `scala.concurrent.Await.result()` and `scala.concurrent.Await.ready()`
+ * locates an instance of `BlockContext` by first looking for one
+ * provided through `BlockContext.withBlockContext()` and failing that,
+ * checking whether `Thread.currentThread` is an instance of `BlockContext`.
+ * So a thread pool can have its `java.lang.Thread` instances implement
+ * `BlockContext`. There's a default `BlockContext` used if the thread
+ * doesn't implement `BlockContext`.
+ *
+ * Typically, you'll want to chain to the previous `BlockContext`,
+ * like this:
+ * {{{
+ * val oldContext = BlockContext.current
+ * val myContext = new BlockContext {
+ * override def blockOn[T](thunk: =>T)(implicit permission: CanAwait): T = {
+ * // you'd have code here doing whatever you need to do
+ * // when the thread is about to block.
+ * // Then you'd chain to the previous context:
+ * oldContext.blockOn(thunk)
+ * }
+ * }
+ * BlockContext.withBlockContext(myContext) {
+ * // then this block runs with myContext as the handler
+ * // for scala.concurrent.blocking
+ * }
+ * }}}
+ */
+trait BlockContext {
+
+ /** Used internally by the framework;
+ * Designates (and eventually executes) a thunk which potentially blocks the calling `Thread`.
+ *
+ * Clients must use `scala.concurrent.blocking` or `scala.concurrent.Await` instead.
+ */
+ def blockOn[T](thunk: =>T)(implicit permission: CanAwait): T
+}
+
+object BlockContext {
+ private object DefaultBlockContext extends BlockContext {
+ override def blockOn[T](thunk: =>T)(implicit permission: CanAwait): T = thunk
+ }
+
+ private val contextLocal = new ThreadLocal[BlockContext]()
+
+ /** Obtain the current thread's current `BlockContext`. */
+ def current: BlockContext = contextLocal.get match {
+ case null => Thread.currentThread match {
+ case ctx: BlockContext => ctx
+ case _ => DefaultBlockContext
+ }
+ case some => some
+ }
+
+ /** Pushes a current `BlockContext` while executing `body`. */
+ def withBlockContext[T](blockContext: BlockContext)(body: => T): T = {
+ val old = contextLocal.get // can be null
+ try {
+ contextLocal.set(blockContext)
+ body
+ } finally {
+ contextLocal.set(old)
+ }
+ }
+}
diff --git a/src/library/scala/concurrent/Channel.scala b/src/library/scala/concurrent/Channel.scala
index 43d6846..067244b 100644
--- a/src/library/scala/concurrent/Channel.scala
+++ b/src/library/scala/concurrent/Channel.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -25,7 +25,6 @@ class Channel[A] {
private var nreaders = 0
/**
- * @param x ...
*/
def write(x: A) = synchronized {
lastWritten.elem = x
@@ -46,4 +45,5 @@ class Channel[A] {
written = written.next
x
}
+
}
diff --git a/src/library/scala/concurrent/DelayedLazyVal.scala b/src/library/scala/concurrent/DelayedLazyVal.scala
index 2a143ac..595d411 100644
--- a/src/library/scala/concurrent/DelayedLazyVal.scala
+++ b/src/library/scala/concurrent/DelayedLazyVal.scala
@@ -1,23 +1,21 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-
package scala.concurrent
-import ops.future
-/** A <code>DelayedLazyVal</code> is a wrapper for lengthy
- * computations which have a valid partially computed result.
- * The first argument is a function for obtaining the result
- * at any given point in time, and the second is the lengthy
- * computation. Once the computation is complete, the apply()
- * method will stop recalculating it and return a fixed value
- * from that point forward.
+/** A `DelayedLazyVal` is a wrapper for lengthy computations which have a
+ * valid partially computed result.
+ *
+ * The first argument is a function for obtaining the result at any given
+ * point in time, and the second is the lengthy computation. Once the
+ * computation is complete, the `apply` method will stop recalculating it
+ * and return a fixed value from that point forward.
*
* @param f the function to obtain the current value at any point in time
* @param body the computation to run to completion in another thread
@@ -25,7 +23,7 @@ import ops.future
* @author Paul Phillips
* @version 2.8
*/
-class DelayedLazyVal[T](f: () => T, body: => Unit) {
+class DelayedLazyVal[T](f: () => T, body: => Unit)(implicit exec: ExecutionContext){
@volatile private[this] var _isDone = false
private[this] lazy val complete = f()
@@ -41,8 +39,5 @@ class DelayedLazyVal[T](f: () => T, body: => Unit) {
*/
def apply(): T = if (isDone) complete else f()
- future {
- body
- _isDone = true
- }
+ exec.execute(new Runnable { def run = { body; _isDone = true } })
}
diff --git a/src/library/scala/concurrent/ExecutionContext.scala b/src/library/scala/concurrent/ExecutionContext.scala
new file mode 100644
index 0000000..b4af161
--- /dev/null
+++ b/src/library/scala/concurrent/ExecutionContext.scala
@@ -0,0 +1,89 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+package scala.concurrent
+
+
+import java.util.concurrent.{ ExecutorService, Executor }
+import scala.annotation.implicitNotFound
+import scala.util.Try
+
+/**
+ * An `ExecutionContext` is an abstraction over an entity that can execute program logic.
+ */
+ at implicitNotFound("Cannot find an implicit ExecutionContext, either import scala.concurrent.ExecutionContext.Implicits.global or use a custom one")
+trait ExecutionContext {
+
+ /** Runs a block of code on this execution context.
+ */
+ def execute(runnable: Runnable): Unit
+
+ /** Reports that an asynchronous computation failed.
+ */
+ def reportFailure(t: Throwable): Unit
+
+ /** Prepares for the execution of a task. Returns the prepared
+ * execution context. A valid implementation of `prepare` is one
+ * that simply returns `this`.
+ */
+ def prepare(): ExecutionContext = this
+
+}
+
+/**
+ * Union interface since Java does not support union types
+ */
+trait ExecutionContextExecutor extends ExecutionContext with Executor
+
+/**
+ * Union interface since Java does not support union types
+ */
+trait ExecutionContextExecutorService extends ExecutionContextExecutor with ExecutorService
+
+
+/** Contains factory methods for creating execution contexts.
+ */
+object ExecutionContext {
+ /**
+ * This is the explicit global ExecutionContext,
+ * call this when you want to provide the global ExecutionContext explicitly
+ */
+ def global: ExecutionContextExecutor = Implicits.global
+
+ object Implicits {
+ /**
+ * This is the implicit global ExecutionContext,
+ * import this when you want to provide the global ExecutionContext implicitly
+ */
+ implicit lazy val global: ExecutionContextExecutor = impl.ExecutionContextImpl.fromExecutor(null: Executor)
+ }
+
+ /** Creates an `ExecutionContext` from the given `ExecutorService`.
+ */
+ def fromExecutorService(e: ExecutorService, reporter: Throwable => Unit): ExecutionContextExecutorService =
+ impl.ExecutionContextImpl.fromExecutorService(e, reporter)
+
+ /** Creates an `ExecutionContext` from the given `ExecutorService` with the default Reporter.
+ */
+ def fromExecutorService(e: ExecutorService): ExecutionContextExecutorService = fromExecutorService(e, defaultReporter)
+
+ /** Creates an `ExecutionContext` from the given `Executor`.
+ */
+ def fromExecutor(e: Executor, reporter: Throwable => Unit): ExecutionContextExecutor =
+ impl.ExecutionContextImpl.fromExecutor(e, reporter)
+
+ /** Creates an `ExecutionContext` from the given `Executor` with the default Reporter.
+ */
+ def fromExecutor(e: Executor): ExecutionContextExecutor = fromExecutor(e, defaultReporter)
+
+ /** The default reporter simply prints the stack trace of the `Throwable` to System.err.
+ */
+ def defaultReporter: Throwable => Unit = (t: Throwable) => t.printStackTrace()
+}
+
+
diff --git a/src/library/scala/concurrent/Future.scala b/src/library/scala/concurrent/Future.scala
new file mode 100644
index 0000000..b2c09ec
--- /dev/null
+++ b/src/library/scala/concurrent/Future.scala
@@ -0,0 +1,706 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+package scala.concurrent
+
+import scala.language.higherKinds
+
+import java.util.concurrent.{ ConcurrentLinkedQueue, TimeUnit, Callable }
+import java.util.concurrent.TimeUnit.{ NANOSECONDS => NANOS, MILLISECONDS ⇒ MILLIS }
+import java.lang.{ Iterable => JIterable }
+import java.util.{ LinkedList => JLinkedList }
+import java.util.concurrent.atomic.{ AtomicReferenceFieldUpdater, AtomicInteger, AtomicLong, AtomicBoolean }
+
+import scala.util.control.NonFatal
+import scala.Option
+import scala.util.{Try, Success, Failure}
+
+import scala.annotation.tailrec
+import scala.collection.mutable.Builder
+import scala.collection.generic.CanBuildFrom
+import scala.reflect.ClassTag
+
+
+
+/** The trait that represents futures.
+ *
+ * Asynchronous computations that yield futures are created with the `future` call:
+ *
+ * {{{
+ * val s = "Hello"
+ * val f: Future[String] = future {
+ * s + " future!"
+ * }
+ * f onSuccess {
+ * case msg => println(msg)
+ * }
+ * }}}
+ *
+ * @author Philipp Haller, Heather Miller, Aleksandar Prokopec, Viktor Klang
+ *
+ * @define multipleCallbacks
+ * Multiple callbacks may be registered; there is no guarantee that they will be
+ * executed in a particular order.
+ *
+ * @define caughtThrowables
+ * The future may contain a throwable object and this means that the future failed.
+ * Futures obtained through combinators have the same exception as the future they were obtained from.
+ * The following throwable objects are not contained in the future:
+ * - `Error` - errors are not contained within futures
+ * - `InterruptedException` - not contained within futures
+ * - all `scala.util.control.ControlThrowable` except `NonLocalReturnControl` - not contained within futures
+ *
+ * Instead, the future is completed with a ExecutionException with one of the exceptions above
+ * as the cause.
+ * If a future is failed with a `scala.runtime.NonLocalReturnControl`,
+ * it is completed with a value from that throwable instead.
+ *
+ * @define nonDeterministic
+ * Note: using this method yields nondeterministic dataflow programs.
+ *
+ * @define forComprehensionExamples
+ * Example:
+ *
+ * {{{
+ * val f = future { 5 }
+ * val g = future { 3 }
+ * val h = for {
+ * x: Int <- f // returns Future(5)
+ * y: Int <- g // returns Future(5)
+ * } yield x + y
+ * }}}
+ *
+ * is translated to:
+ *
+ * {{{
+ * f flatMap { (x: Int) => g map { (y: Int) => x + y } }
+ * }}}
+ *
+ * @define callbackInContext
+ * The provided callback always runs in the provided implicit
+ *`ExecutionContext`, though there is no guarantee that the
+ * `execute()` method on the `ExecutionContext` will be called once
+ * per callback or that `execute()` will be called in the current
+ * thread. That is, the implementation may run multiple callbacks
+ * in a batch within a single `execute()` and it may run
+ * `execute()` either immediately or asynchronously.
+ */
+trait Future[+T] extends Awaitable[T] {
+
+ // The executor within the lexical scope
+ // of the Future trait. Note that this will
+ // (modulo bugs) _never_ execute a callback
+ // other than those below in this same file.
+ //
+ // See the documentation on `InternalCallbackExecutor` for more details.
+ private def internalExecutor = Future.InternalCallbackExecutor
+
+ /* Callbacks */
+
+ /** When this future is completed successfully (i.e. with a value),
+ * apply the provided partial function to the value if the partial function
+ * is defined at that value.
+ *
+ * If the future has already been completed with a value,
+ * this will either be applied immediately or be scheduled asynchronously.
+ *
+ * $multipleCallbacks
+ * $callbackInContext
+ */
+ def onSuccess[U](pf: PartialFunction[T, U])(implicit executor: ExecutionContext): Unit = onComplete {
+ case Success(v) =>
+ pf.applyOrElse[T, Any](v, Predef.conforms[T]) // Exploiting the cached function to avoid MatchError
+ case _ =>
+ }
+
+ /** When this future is completed with a failure (i.e. with a throwable),
+ * apply the provided callback to the throwable.
+ *
+ * $caughtThrowables
+ *
+ * If the future has already been completed with a failure,
+ * this will either be applied immediately or be scheduled asynchronously.
+ *
+ * Will not be called in case that the future is completed with a value.
+ *
+ * $multipleCallbacks
+ * $callbackInContext
+ */
+ def onFailure[U](callback: PartialFunction[Throwable, U])(implicit executor: ExecutionContext): Unit = onComplete {
+ case Failure(t) =>
+ callback.applyOrElse[Throwable, Any](t, Predef.conforms[Throwable]) // Exploiting the cached function to avoid MatchError
+ case _ =>
+ }
+
+ /** When this future is completed, either through an exception, or a value,
+ * apply the provided function.
+ *
+ * If the future has already been completed,
+ * this will either be applied immediately or be scheduled asynchronously.
+ *
+ * $multipleCallbacks
+ * $callbackInContext
+ */
+ def onComplete[U](func: Try[T] => U)(implicit executor: ExecutionContext): Unit
+
+
+ /* Miscellaneous */
+
+ /** Returns whether the future has already been completed with
+ * a value or an exception.
+ *
+ * $nonDeterministic
+ *
+ * @return `true` if the future is already completed, `false` otherwise
+ */
+ def isCompleted: Boolean
+
+ /** The value of this `Future`.
+ *
+ * If the future is not completed the returned value will be `None`.
+ * If the future is completed the value will be `Some(Success(t))`
+ * if it contains a valid result, or `Some(Failure(error))` if it contains
+ * an exception.
+ */
+ def value: Option[Try[T]]
+
+
+ /* Projections */
+
+ /** Returns a failed projection of this future.
+ *
+ * The failed projection is a future holding a value of type `Throwable`.
+ *
+ * It is completed with a value which is the throwable of the original future
+ * in case the original future is failed.
+ *
+ * It is failed with a `NoSuchElementException` if the original future is completed successfully.
+ *
+ * Blocking on this future returns a value if the original future is completed with an exception
+ * and throws a corresponding exception if the original future fails.
+ */
+ def failed: Future[Throwable] = {
+ implicit val ec = internalExecutor
+ val p = Promise[Throwable]()
+ onComplete {
+ case Failure(t) => p success t
+ case Success(v) => p failure (new NoSuchElementException("Future.failed not completed with a throwable."))
+ }
+ p.future
+ }
+
+
+ /* Monadic operations */
+
+ /** Asynchronously processes the value in the future once the value becomes available.
+ *
+ * Will not be called if the future fails.
+ */
+ def foreach[U](f: T => U)(implicit executor: ExecutionContext): Unit = onComplete { _ foreach f }
+
+ /** Creates a new future by applying the 's' function to the successful result of
+ * this future, or the 'f' function to the failed result. If there is any non-fatal
+ * exception thrown when 's' or 'f' is applied, that exception will be propagated
+ * to the resulting future.
+ *
+ * @param s function that transforms a successful result of the receiver into a
+ * successful result of the returned future
+ * @param f function that transforms a failure of the receiver into a failure of
+ * the returned future
+ * @return a future that will be completed with the transformed value
+ */
+ def transform[S](s: T => S, f: Throwable => Throwable)(implicit executor: ExecutionContext): Future[S] = {
+ val p = Promise[S]()
+ // transform on Try has the wrong shape for us here
+ onComplete {
+ case Success(r) => p complete Try(s(r))
+ case Failure(t) => p complete Try(throw f(t)) // will throw fatal errors!
+ }
+ p.future
+ }
+
+ /** Creates a new future by applying a function to the successful result of
+ * this future. If this future is completed with an exception then the new
+ * future will also contain this exception.
+ *
+ * $forComprehensionExamples
+ */
+ def map[S](f: T => S)(implicit executor: ExecutionContext): Future[S] = { // transform(f, identity)
+ val p = Promise[S]()
+ onComplete { v => p complete (v map f) }
+ p.future
+ }
+
+ /** Creates a new future by applying a function to the successful result of
+ * this future, and returns the result of the function as the new future.
+ * If this future is completed with an exception then the new future will
+ * also contain this exception.
+ *
+ * $forComprehensionExamples
+ */
+ def flatMap[S](f: T => Future[S])(implicit executor: ExecutionContext): Future[S] = {
+ import impl.Promise.DefaultPromise
+ val p = new DefaultPromise[S]()
+ onComplete {
+ case f: Failure[_] => p complete f.asInstanceOf[Failure[S]]
+ case Success(v) => try f(v) match {
+ // If possible, link DefaultPromises to avoid space leaks
+ case dp: DefaultPromise[_] => dp.asInstanceOf[DefaultPromise[S]].linkRootOf(p)
+ case fut => fut.onComplete(p.complete)(internalExecutor)
+ } catch { case NonFatal(t) => p failure t }
+ }
+ p.future
+ }
+
+ /** Creates a new future by filtering the value of the current future with a predicate.
+ *
+ * If the current future contains a value which satisfies the predicate, the new future will also hold that value.
+ * Otherwise, the resulting future will fail with a `NoSuchElementException`.
+ *
+ * If the current future fails, then the resulting future also fails.
+ *
+ * Example:
+ * {{{
+ * val f = future { 5 }
+ * val g = f filter { _ % 2 == 1 }
+ * val h = f filter { _ % 2 == 0 }
+ * Await.result(g, Duration.Zero) // evaluates to 5
+ * Await.result(h, Duration.Zero) // throw a NoSuchElementException
+ * }}}
+ */
+ def filter(pred: T => Boolean)(implicit executor: ExecutionContext): Future[T] =
+ map {
+ r => if (pred(r)) r else throw new NoSuchElementException("Future.filter predicate is not satisfied")
+ }
+
+ /** Used by for-comprehensions.
+ */
+ final def withFilter(p: T => Boolean)(implicit executor: ExecutionContext): Future[T] = filter(p)(executor)
+
+ /** Creates a new future by mapping the value of the current future, if the given partial function is defined at that value.
+ *
+ * If the current future contains a value for which the partial function is defined, the new future will also hold that value.
+ * Otherwise, the resulting future will fail with a `NoSuchElementException`.
+ *
+ * If the current future fails, then the resulting future also fails.
+ *
+ * Example:
+ * {{{
+ * val f = future { -5 }
+ * val g = f collect {
+ * case x if x < 0 => -x
+ * }
+ * val h = f collect {
+ * case x if x > 0 => x * 2
+ * }
+ * Await.result(g, Duration.Zero) // evaluates to 5
+ * Await.result(h, Duration.Zero) // throw a NoSuchElementException
+ * }}}
+ */
+ def collect[S](pf: PartialFunction[T, S])(implicit executor: ExecutionContext): Future[S] =
+ map {
+ r => pf.applyOrElse(r, (t: T) => throw new NoSuchElementException("Future.collect partial function is not defined at: " + t))
+ }
+
+ /** Creates a new future that will handle any matching throwable that this
+ * future might contain. If there is no match, or if this future contains
+ * a valid result then the new future will contain the same.
+ *
+ * Example:
+ *
+ * {{{
+ * future (6 / 0) recover { case e: ArithmeticException => 0 } // result: 0
+ * future (6 / 0) recover { case e: NotFoundException => 0 } // result: exception
+ * future (6 / 2) recover { case e: ArithmeticException => 0 } // result: 3
+ * }}}
+ */
+ def recover[U >: T](pf: PartialFunction[Throwable, U])(implicit executor: ExecutionContext): Future[U] = {
+ val p = Promise[U]()
+ onComplete { v => p complete (v recover pf) }
+ p.future
+ }
+
+ /** Creates a new future that will handle any matching throwable that this
+ * future might contain by assigning it a value of another future.
+ *
+ * If there is no match, or if this future contains
+ * a valid result then the new future will contain the same result.
+ *
+ * Example:
+ *
+ * {{{
+ * val f = future { Int.MaxValue }
+ * future (6 / 0) recoverWith { case e: ArithmeticException => f } // result: Int.MaxValue
+ * }}}
+ */
+ def recoverWith[U >: T](pf: PartialFunction[Throwable, Future[U]])(implicit executor: ExecutionContext): Future[U] = {
+ val p = Promise[U]()
+ onComplete {
+ case Failure(t) => try pf.applyOrElse(t, (_: Throwable) => this).onComplete(p.complete)(internalExecutor) catch { case NonFatal(t) => p failure t }
+ case other => p complete other
+ }
+ p.future
+ }
+
+ /** Zips the values of `this` and `that` future, and creates
+ * a new future holding the tuple of their results.
+ *
+ * If `this` future fails, the resulting future is failed
+ * with the throwable stored in `this`.
+ * Otherwise, if `that` future fails, the resulting future is failed
+ * with the throwable stored in `that`.
+ */
+ def zip[U](that: Future[U]): Future[(T, U)] = {
+ implicit val ec = internalExecutor
+ val p = Promise[(T, U)]()
+ onComplete {
+ case f: Failure[_] => p complete f.asInstanceOf[Failure[(T, U)]]
+ case Success(s) => that onComplete { c => p.complete(c map { s2 => (s, s2) }) }
+ }
+ p.future
+ }
+
+ /** Creates a new future which holds the result of this future if it was completed successfully, or, if not,
+ * the result of the `that` future if `that` is completed successfully.
+ * If both futures are failed, the resulting future holds the throwable object of the first future.
+ *
+ * Using this method will not cause concurrent programs to become nondeterministic.
+ *
+ * Example:
+ * {{{
+ * val f = future { sys.error("failed") }
+ * val g = future { 5 }
+ * val h = f fallbackTo g
+ * Await.result(h, Duration.Zero) // evaluates to 5
+ * }}}
+ */
+ def fallbackTo[U >: T](that: Future[U]): Future[U] = {
+ implicit val ec = internalExecutor
+ val p = Promise[U]()
+ onComplete {
+ case s @ Success(_) => p complete s
+ case f @ Failure(_) => that onComplete {
+ case s2 @ Success(_) => p complete s2
+ case _ => p complete f // Use the first failure as the failure
+ }
+ }
+ p.future
+ }
+
+ /** Creates a new `Future[S]` which is completed with this `Future`'s result if
+ * that conforms to `S`'s erased type or a `ClassCastException` otherwise.
+ */
+ def mapTo[S](implicit tag: ClassTag[S]): Future[S] = {
+ implicit val ec = internalExecutor
+ val boxedClass = {
+ val c = tag.runtimeClass
+ if (c.isPrimitive) Future.toBoxed(c) else c
+ }
+ require(boxedClass ne null)
+ map(s => boxedClass.cast(s).asInstanceOf[S])
+ }
+
+ /** Applies the side-effecting function to the result of this future, and returns
+ * a new future with the result of this future.
+ *
+ * This method allows one to enforce that the callbacks are executed in a
+ * specified order.
+ *
+ * Note that if one of the chained `andThen` callbacks throws
+ * an exception, that exception is not propagated to the subsequent `andThen`
+ * callbacks. Instead, the subsequent `andThen` callbacks are given the original
+ * value of this future.
+ *
+ * The following example prints out `5`:
+ *
+ * {{{
+ * val f = future { 5 }
+ * f andThen {
+ * case r => sys.error("runtime exception")
+ * } andThen {
+ * case Failure(t) => println(t)
+ * case Success(v) => println(v)
+ * }
+ * }}}
+ */
+ def andThen[U](pf: PartialFunction[Try[T], U])(implicit executor: ExecutionContext): Future[T] = {
+ val p = Promise[T]()
+ onComplete {
+ case r => try pf.applyOrElse[Try[T], Any](r, Predef.conforms[Try[T]]) finally p complete r
+ }
+ p.future
+ }
+
+}
+
+
+
+/** Future companion object.
+ *
+ * @define nonDeterministic
+ * Note: using this method yields nondeterministic dataflow programs.
+ */
+object Future {
+
+ private[concurrent] val toBoxed = Map[Class[_], Class[_]](
+ classOf[Boolean] -> classOf[java.lang.Boolean],
+ classOf[Byte] -> classOf[java.lang.Byte],
+ classOf[Char] -> classOf[java.lang.Character],
+ classOf[Short] -> classOf[java.lang.Short],
+ classOf[Int] -> classOf[java.lang.Integer],
+ classOf[Long] -> classOf[java.lang.Long],
+ classOf[Float] -> classOf[java.lang.Float],
+ classOf[Double] -> classOf[java.lang.Double],
+ classOf[Unit] -> classOf[scala.runtime.BoxedUnit]
+ )
+
+ /** Creates an already completed Future with the specified exception.
+ *
+ * @tparam T the type of the value in the future
+ * @return the newly created `Future` object
+ */
+ def failed[T](exception: Throwable): Future[T] = Promise.failed(exception).future
+
+ /** Creates an already completed Future with the specified result.
+ *
+ * @tparam T the type of the value in the future
+ * @return the newly created `Future` object
+ */
+ def successful[T](result: T): Future[T] = Promise.successful(result).future
+
+ /** Starts an asynchronous computation and returns a `Future` object with the result of that computation.
+ *
+ * The result becomes available once the asynchronous computation is completed.
+ *
+ * @tparam T the type of the result
+ * @param body the asychronous computation
+ * @param execctx the execution context on which the future is run
+ * @return the `Future` holding the result of the computation
+ */
+ def apply[T](body: =>T)(implicit execctx: ExecutionContext): Future[T] = impl.Future(body)
+
+ /** Simple version of `Futures.traverse`. Transforms a `TraversableOnce[Future[A]]` into a `Future[TraversableOnce[A]]`.
+ * Useful for reducing many `Future`s into a single `Future`.
+ */
+ def sequence[A, M[_] <: TraversableOnce[_]](in: M[Future[A]])(implicit cbf: CanBuildFrom[M[Future[A]], A, M[A]], executor: ExecutionContext): Future[M[A]] = {
+ in.foldLeft(Promise.successful(cbf(in)).future) {
+ (fr, fa) => for (r <- fr; a <- fa.asInstanceOf[Future[A]]) yield (r += a)
+ } map (_.result)
+ }
+
+ /** Returns a new `Future` to the result of the first future in the list that is completed.
+ */
+ def firstCompletedOf[T](futures: TraversableOnce[Future[T]])(implicit executor: ExecutionContext): Future[T] = {
+ val p = Promise[T]()
+ val completeFirst: Try[T] => Unit = p tryComplete _
+ futures foreach { _ onComplete completeFirst }
+ p.future
+ }
+
+ /** Returns a `Future` that will hold the optional result of the first `Future` with a result that matches the predicate.
+ */
+ def find[T](futurestravonce: TraversableOnce[Future[T]])(predicate: T => Boolean)(implicit executor: ExecutionContext): Future[Option[T]] = {
+ val futures = futurestravonce.toBuffer
+ if (futures.isEmpty) Promise.successful[Option[T]](None).future
+ else {
+ val result = Promise[Option[T]]()
+ val ref = new AtomicInteger(futures.size)
+ val search: Try[T] => Unit = v => try {
+ v match {
+ case Success(r) => if (predicate(r)) result tryComplete Success(Some(r))
+ case _ =>
+ }
+ } finally {
+ if (ref.decrementAndGet == 0) {
+ result tryComplete Success(None)
+ }
+ }
+
+ futures.foreach(_ onComplete search)
+
+ result.future
+ }
+ }
+
+ /** A non-blocking fold over the specified futures, with the start value of the given zero.
+ * The fold is performed on the thread where the last future is completed,
+ * the result will be the first failure of any of the futures, or any failure in the actual fold,
+ * or the result of the fold.
+ *
+ * Example:
+ * {{{
+ * val result = Await.result(Future.fold(futures)(0)(_ + _), 5 seconds)
+ * }}}
+ */
+ def fold[T, R](futures: TraversableOnce[Future[T]])(zero: R)(foldFun: (R, T) => R)(implicit executor: ExecutionContext): Future[R] = {
+ if (futures.isEmpty) Future.successful(zero)
+ else sequence(futures).map(_.foldLeft(zero)(foldFun))
+ }
+
+ /** Initiates a fold over the supplied futures where the fold-zero is the result value of the `Future` that's completed first.
+ *
+ * Example:
+ * {{{
+ * val result = Await.result(Future.reduce(futures)(_ + _), 5 seconds)
+ * }}}
+ */
+ def reduce[T, R >: T](futures: TraversableOnce[Future[T]])(op: (R, T) => R)(implicit executor: ExecutionContext): Future[R] = {
+ if (futures.isEmpty) Future.failed(new NoSuchElementException("reduce attempted on empty collection"))
+ else sequence(futures).map(_ reduceLeft op)
+ }
+
+ /** Transforms a `TraversableOnce[A]` into a `Future[TraversableOnce[B]]` using the provided function `A => Future[B]`.
+ * This is useful for performing a parallel map. For example, to apply a function to all items of a list
+ * in parallel:
+ *
+ * {{{
+ * val myFutureList = Future.traverse(myList)(x => Future(myFunc(x)))
+ * }}}
+ */
+ def traverse[A, B, M[_] <: TraversableOnce[_]](in: M[A])(fn: A => Future[B])(implicit cbf: CanBuildFrom[M[A], B, M[B]], executor: ExecutionContext): Future[M[B]] =
+ in.foldLeft(Promise.successful(cbf(in)).future) { (fr, a) =>
+ val fb = fn(a.asInstanceOf[A])
+ for (r <- fr; b <- fb) yield (r += b)
+ }.map(_.result)
+
+ // This is used to run callbacks which are internal
+ // to scala.concurrent; our own callbacks are only
+ // ever used to eventually run another callback,
+ // and that other callback will have its own
+ // executor because all callbacks come with
+ // an executor. Our own callbacks never block
+ // and have no "expected" exceptions.
+ // As a result, this executor can do nothing;
+ // some other executor will always come after
+ // it (and sometimes one will be before it),
+ // and those will be performing the "real"
+ // dispatch to code outside scala.concurrent.
+ // Because this exists, ExecutionContext.defaultExecutionContext
+ // isn't instantiated by Future internals, so
+ // if some code for some reason wants to avoid
+ // ever starting up the default context, it can do so
+ // by just not ever using it itself. scala.concurrent
+ // doesn't need to create defaultExecutionContext as
+ // a side effect.
+ private[concurrent] object InternalCallbackExecutor extends ExecutionContext with java.util.concurrent.Executor {
+ override def reportFailure(t: Throwable): Unit =
+ throw new IllegalStateException("problem in scala.concurrent internal callback", t)
+
+ /**
+ * The BatchingExecutor trait had to be inlined into InternalCallbackExecutor for binary compatibility.
+ *
+ * BatchingExecutor is a trait for an Executor
+ * which groups multiple nested `Runnable.run()` calls
+ * into a single Runnable passed to the original
+ * Executor. This can be a useful optimization
+ * because it bypasses the original context's task
+ * queue and keeps related (nested) code on a single
+ * thread which may improve CPU affinity. However,
+ * if tasks passed to the Executor are blocking
+ * or expensive, this optimization can prevent work-stealing
+ * and make performance worse. Also, some ExecutionContext
+ * may be fast enough natively that this optimization just
+ * adds overhead.
+ * The default ExecutionContext.global is already batching
+ * or fast enough not to benefit from it; while
+ * `fromExecutor` and `fromExecutorService` do NOT add
+ * this optimization since they don't know whether the underlying
+ * executor will benefit from it.
+ * A batching executor can create deadlocks if code does
+ * not use `scala.concurrent.blocking` when it should,
+ * because tasks created within other tasks will block
+ * on the outer task completing.
+ * This executor may run tasks in any order, including LIFO order.
+ * There are no ordering guarantees.
+ *
+ * WARNING: The underlying Executor's execute-method must not execute the submitted Runnable
+ * in the calling thread synchronously. It must enqueue/handoff the Runnable.
+ */
+ // invariant: if "_tasksLocal.get ne null" then we are inside BatchingRunnable.run; if it is null, we are outside
+ private val _tasksLocal = new ThreadLocal[List[Runnable]]()
+
+ private class Batch(val initial: List[Runnable]) extends Runnable with BlockContext {
+ private[this] var parentBlockContext: BlockContext = _
+ // this method runs in the delegate ExecutionContext's thread
+ override def run(): Unit = {
+ require(_tasksLocal.get eq null)
+
+ val prevBlockContext = BlockContext.current
+ BlockContext.withBlockContext(this) {
+ try {
+ parentBlockContext = prevBlockContext
+
+ @tailrec def processBatch(batch: List[Runnable]): Unit = batch match {
+ case Nil => ()
+ case head :: tail =>
+ _tasksLocal set tail
+ try {
+ head.run()
+ } catch {
+ case t: Throwable =>
+ // if one task throws, move the
+ // remaining tasks to another thread
+ // so we can throw the exception
+ // up to the invoking executor
+ val remaining = _tasksLocal.get
+ _tasksLocal set Nil
+ unbatchedExecute(new Batch(remaining)) //TODO what if this submission fails?
+ throw t // rethrow
+ }
+ processBatch(_tasksLocal.get) // since head.run() can add entries, always do _tasksLocal.get here
+ }
+
+ processBatch(initial)
+ } finally {
+ _tasksLocal.remove()
+ parentBlockContext = null
+ }
+ }
+ }
+
+ override def blockOn[T](thunk: => T)(implicit permission: CanAwait): T = {
+ // if we know there will be blocking, we don't want to keep tasks queued up because it could deadlock.
+ {
+ val tasks = _tasksLocal.get
+ _tasksLocal set Nil
+ if ((tasks ne null) && tasks.nonEmpty)
+ unbatchedExecute(new Batch(tasks))
+ }
+
+ // now delegate the blocking to the previous BC
+ require(parentBlockContext ne null)
+ parentBlockContext.blockOn(thunk)
+ }
+ }
+
+ override def execute(runnable: Runnable): Unit = runnable match {
+ // If we can batch the runnable
+ case _: OnCompleteRunnable =>
+ _tasksLocal.get match {
+ case null => unbatchedExecute(new Batch(List(runnable))) // If we aren't in batching mode yet, enqueue batch
+ case some => _tasksLocal.set(runnable :: some) // If we are already in batching mode, add to batch
+ }
+
+ // If not batchable, just delegate to underlying
+ case _ =>
+ unbatchedExecute(runnable)
+ }
+
+ private def unbatchedExecute(r: Runnable): Unit = r.run()
+ }
+}
+
+/** A marker indicating that a `java.lang.Runnable` provided to `scala.concurrent.ExecutionContext`
+ * wraps a callback provided to `Future.onComplete`.
+ * All callbacks provided to a `Future` end up going through `onComplete`, so this allows an
+ * `ExecutionContext` to special-case callbacks that were executed by `Future` if desired.
+ */
+trait OnCompleteRunnable {
+ self: Runnable =>
+}
+
diff --git a/src/library/scala/concurrent/FutureTaskRunner.scala b/src/library/scala/concurrent/FutureTaskRunner.scala
index 206155c..eeadadd 100644
--- a/src/library/scala/concurrent/FutureTaskRunner.scala
+++ b/src/library/scala/concurrent/FutureTaskRunner.scala
@@ -1,10 +1,21 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2009-2011, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
package scala.concurrent
-/** The <code>FutureTaskRunner</code> trait is a base trait of task runners
+import scala.language.{implicitConversions, higherKinds}
+
+/** The `FutureTaskRunner</code> trait is a base trait of task runners
* that provide some sort of future abstraction.
*
* @author Philipp Haller
*/
+ at deprecated("Use `ExecutionContext` instead.", "2.10.0")
trait FutureTaskRunner extends TaskRunner {
/** The type of the futures that the underlying task runner supports.
@@ -22,6 +33,7 @@ trait FutureTaskRunner extends TaskRunner {
/* Possibly blocks the current thread, for example, waiting for
* a lock or condition.
*/
+ @deprecated("Use `blocking` instead.", "2.10.0")
def managedBlock(blocker: ManagedBlocker): Unit
}
diff --git a/src/library/scala/concurrent/JavaConversions.scala b/src/library/scala/concurrent/JavaConversions.scala
index 2fa213e..d6a7c1f 100644
--- a/src/library/scala/concurrent/JavaConversions.scala
+++ b/src/library/scala/concurrent/JavaConversions.scala
@@ -1,23 +1,24 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-
-
package scala.concurrent
import java.util.concurrent.{ExecutorService, Executor}
+import scala.language.implicitConversions
-/** The <code>JavaConversions</code> object...
+/** The `JavaConversions` object provides implicit converstions supporting
+ * interoperability between Scala and Java concurrency classes.
*
* @author Philipp Haller
*/
object JavaConversions {
+ @deprecated("Use `asExecutionContext` instead.", "2.10.0")
implicit def asTaskRunner(exec: ExecutorService): FutureTaskRunner =
new ThreadPoolRunner {
override protected def executor =
@@ -27,6 +28,7 @@ object JavaConversions {
exec.shutdown()
}
+ @deprecated("Use `asExecutionContext` instead.", "2.10.0")
implicit def asTaskRunner(exec: Executor): TaskRunner =
new TaskRunner {
type Task[T] = Runnable
@@ -47,4 +49,17 @@ object JavaConversions {
// do nothing
}
}
+
+ /**
+ * Creates a new `ExecutionContext` which uses the provided `ExecutorService`.
+ */
+ implicit def asExecutionContext(exec: ExecutorService): ExecutionContextExecutorService =
+ ExecutionContext.fromExecutorService(exec)
+
+ /**
+ * Creates a new `ExecutionContext` which uses the provided `Executor`.
+ */
+ implicit def asExecutionContext(exec: Executor): ExecutionContextExecutor =
+ ExecutionContext.fromExecutor(exec)
+
}
diff --git a/src/library/scala/concurrent/Lock.scala b/src/library/scala/concurrent/Lock.scala
index 08c9f6c..4b81397 100644
--- a/src/library/scala/concurrent/Lock.scala
+++ b/src/library/scala/concurrent/Lock.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/library/scala/concurrent/MailBox.scala b/src/library/scala/concurrent/MailBox.scala
deleted file mode 100644
index b00ab79..0000000
--- a/src/library/scala/concurrent/MailBox.scala
+++ /dev/null
@@ -1,179 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-
-package scala.concurrent
-
-/** This class ...
- *
- * @author Martin Odersky
- * @version 1.0, 12/03/2003
- */
-//class MailBox with Monitor with LinkedListQueueCreator {
- at deprecated("use actors instead", "2.8.0")
-class MailBox extends AnyRef with ListQueueCreator {
-
- type Message = AnyRef
-
- private abstract class PreReceiver {
- var msg: Message = null
- def isDefinedAt(msg: Message): Boolean
- }
-
- private class Receiver[A](receiver: PartialFunction[Message, A]) extends PreReceiver {
-
- def isDefinedAt(msg: Message) = receiver.isDefinedAt(msg)
-
- def receive(): A = synchronized {
- while (msg eq null) wait()
- receiver(msg)
- }
-
- def receiveWithin(msec: Long): A = synchronized {
- if (msg eq null) wait(msec)
- receiver(if (msg ne null) msg else TIMEOUT)
- }
- }
-
- private val messageQueue = queueCreate[Message]
- private val receiverQueue = queueCreate[PreReceiver]
-
- /** Unconsumed messages. */
- private var sent = messageQueue.make
-
- /** Pending receivers. */
- private var receivers = receiverQueue.make
-
- /**
- * Check whether the receiver can be applied to an unconsumed message.
- * If yes, the message is extracted and associated with the receiver.
- * Otherwise the receiver is appended to the list of pending receivers.
- */
- private def scanSentMsgs[A](receiver: Receiver[A]): Unit = synchronized {
- messageQueue.extractFirst(sent, msg => receiver.isDefinedAt(msg)) match {
- case None =>
- receivers = receiverQueue.append(receivers, receiver)
- case Some((msg, withoutMsg)) =>
- sent = withoutMsg
- receiver.msg = msg
- }
- }
-
- /**
- * First check whether a pending receiver is applicable to the sent
- * message. If yes, the receiver is notified. Otherwise the message
- * is appended to the linked list of sent messages.
- */
- def send(msg: Message): Unit = synchronized {
- receiverQueue.extractFirst(receivers, r => r.isDefinedAt(msg)) match {
- case None =>
- sent = messageQueue.append(sent, msg)
- case Some((receiver, withoutReceiver)) =>
- receivers = withoutReceiver
- receiver.msg = msg
- receiver synchronized { receiver.notify() }
- }
- }
-
- /**
- * Block until there is a message in the mailbox for which the processor
- * <code>f</code> is defined.
- */
- def receive[A](f: PartialFunction[Message, A]): A = {
- val r = new Receiver(f)
- scanSentMsgs(r)
- r.receive()
- }
-
- /**
- * Block until there is a message in the mailbox for which the processor
- * <code>f</code> is defined or the timeout is over.
- */
- def receiveWithin[A](msec: Long)(f: PartialFunction[Message, A]): A = {
- val r = new Receiver(f)
- scanSentMsgs(r)
- r.receiveWithin(msec)
- }
-
-}
-
-
-
-/**
-* Module for dealing with queues.
-*/
- at deprecated("use actors instead", "2.8.0")
-trait QueueModule[A] {
- /** Type of queues. */
- type T
- /** Create an empty queue. */
- def make: T
- /** Append an element to a queue. */
- def append(l: T, x: A): T
- /** Extract an element satisfying a predicate from a queue. */
- def extractFirst(l: T, p: A => Boolean): Option[(A, T)]
-}
-
-/** Inefficient but simple queue module creator. */
- at deprecated("use actors instead", "2.8.0")
-trait ListQueueCreator {
- def queueCreate[A]: QueueModule[A] = new QueueModule[A] {
- type T = List[A]
- def make: T = Nil
- def append(l: T, x: A): T = l ::: x :: Nil
- def extractFirst(l: T, p: A => Boolean): Option[(A, T)] =
- l match {
- case Nil => None
- case head :: tail =>
- if (p(head))
- Some((head, tail))
- else
- extractFirst(tail, p) match {
- case None => None
- case Some((x, without_x)) => Some((x, head :: without_x))
- }
- }
- }
-}
-
-/** Efficient queue module creator based on linked lists. */
- at deprecated("use actors instead", "2.8.0")
-trait LinkedListQueueCreator {
- import scala.collection.mutable.LinkedList
- def queueCreate[A >: Null <: AnyRef]: QueueModule[A] = new QueueModule[A] {
- type T = (LinkedList[A], LinkedList[A]) // fst = the list, snd = last elem
- def make: T = {
- val l = new LinkedList[A](null, null)
- (l, l)
- }
- def append(l: T, x: A): T = {
- val atTail = new LinkedList(x, null)
- l._2 append atTail;
- (l._1, atTail)
- }
- def extractFirst(l: T, p: A => Boolean): Option[(A, T)] = {
- var xs = l._1
- var xs1 = xs.next
- while ((xs1 ne null) && !p(xs1.elem)) {
- xs = xs1
- xs1 = xs1.next
- }
- if (xs1 ne null) {
- xs.next = xs1.next
- if (xs.next eq null)
- Some((xs1.elem, (l._1, xs)))
- else
- Some((xs1.elem, l))
- }
- else
- None
- }
- }
-}
-
diff --git a/src/library/scala/concurrent/ManagedBlocker.scala b/src/library/scala/concurrent/ManagedBlocker.scala
index a0a58aa..7b2966c 100644
--- a/src/library/scala/concurrent/ManagedBlocker.scala
+++ b/src/library/scala/concurrent/ManagedBlocker.scala
@@ -1,33 +1,33 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-
-
package scala.concurrent
-/** The <code>ManagedBlocker</code> trait...
+/** The `ManagedBlocker` trait...
*
* @author Philipp Haller
*/
+ at deprecated("Use `blocking` instead.", "2.10.0")
trait ManagedBlocker {
/**
* Possibly blocks the current thread, for example waiting for
* a lock or condition.
+ *
* @return true if no additional blocking is necessary (i.e.,
- * if isReleasable would return true).
+ * if `isReleasable` would return `true`).
* @throws InterruptedException if interrupted while waiting
- * (the method is not required to do so, but is allowed to).
+ * (the method is not required to do so, but is allowed to).
*/
def block(): Boolean
/**
- * Returns true if blocking is unnecessary.
+ * Returns `true` if blocking is unnecessary.
*/
def isReleasable: Boolean
diff --git a/src/library/scala/concurrent/Promise.scala b/src/library/scala/concurrent/Promise.scala
new file mode 100644
index 0000000..8355a73
--- /dev/null
+++ b/src/library/scala/concurrent/Promise.scala
@@ -0,0 +1,152 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+package scala.concurrent
+
+import scala.util.{ Try, Success, Failure }
+
+/** Promise is an object which can be completed with a value or failed
+ * with an exception.
+ *
+ * @define promiseCompletion
+ * If the promise has already been fulfilled, failed or has timed out,
+ * calling this method will throw an IllegalStateException.
+ *
+ * @define allowedThrowables
+ * If the throwable used to fail this promise is an error, a control exception
+ * or an interrupted exception, it will be wrapped as a cause within an
+ * `ExecutionException` which will fail the promise.
+ *
+ * @define nonDeterministic
+ * Note: Using this method may result in non-deterministic concurrent programs.
+ */
+trait Promise[T] {
+
+ // used for internal callbacks defined in
+ // the lexical scope of this trait;
+ // _never_ for application callbacks.
+ private implicit def internalExecutor: ExecutionContext = Future.InternalCallbackExecutor
+
+ /** Future containing the value of this promise.
+ */
+ def future: Future[T]
+
+ /** Returns whether the promise has already been completed with
+ * a value or an exception.
+ *
+ * $nonDeterministic
+ *
+ * @return `true` if the promise is already completed, `false` otherwise
+ */
+ def isCompleted: Boolean
+
+ /** Completes the promise with either an exception or a value.
+ *
+ * @param result Either the value or the exception to complete the promise with.
+ *
+ * $promiseCompletion
+ */
+ def complete(result: Try[T]): this.type =
+ if (tryComplete(result)) this else throw new IllegalStateException("Promise already completed.")
+
+ /** Tries to complete the promise with either a value or the exception.
+ *
+ * $nonDeterministic
+ *
+ * @return If the promise has already been completed returns `false`, or `true` otherwise.
+ */
+ def tryComplete(result: Try[T]): Boolean
+
+ /** Completes this promise with the specified future, once that future is completed.
+ *
+ * @return This promise
+ */
+ final def completeWith(other: Future[T]): this.type = {
+ other onComplete { this complete _ }
+ this
+ }
+
+ /** Attempts to complete this promise with the specified future, once that future is completed.
+ *
+ * @return This promise
+ */
+ final def tryCompleteWith(other: Future[T]): this.type = {
+ other onComplete { this tryComplete _ }
+ this
+ }
+
+ /** Completes the promise with a value.
+ *
+ * @param v The value to complete the promise with.
+ *
+ * $promiseCompletion
+ */
+ def success(v: T): this.type = complete(Success(v))
+
+ /** Tries to complete the promise with a value.
+ *
+ * $nonDeterministic
+ *
+ * @return If the promise has already been completed returns `false`, or `true` otherwise.
+ */
+ def trySuccess(value: T): Boolean = tryComplete(Success(value))
+
+ /** Completes the promise with an exception.
+ *
+ * @param t The throwable to complete the promise with.
+ *
+ * $allowedThrowables
+ *
+ * $promiseCompletion
+ */
+ def failure(t: Throwable): this.type = complete(Failure(t))
+
+ /** Tries to complete the promise with an exception.
+ *
+ * $nonDeterministic
+ *
+ * @return If the promise has already been completed returns `false`, or `true` otherwise.
+ */
+ def tryFailure(t: Throwable): Boolean = tryComplete(Failure(t))
+}
+
+
+
+object Promise {
+
+ /** Creates a promise object which can be completed with a value.
+ *
+ * @tparam T the type of the value in the promise
+ * @return the newly created `Promise` object
+ */
+ def apply[T](): Promise[T] = new impl.Promise.DefaultPromise[T]()
+
+ /** Creates an already completed Promise with the specified exception.
+ *
+ * @tparam T the type of the value in the promise
+ * @return the newly created `Promise` object
+ */
+ def failed[T](exception: Throwable): Promise[T] = new impl.Promise.KeptPromise[T](Failure(exception))
+
+ /** Creates an already completed Promise with the specified result.
+ *
+ * @tparam T the type of the value in the promise
+ * @return the newly created `Promise` object
+ */
+ def successful[T](result: T): Promise[T] = new impl.Promise.KeptPromise[T](Success(result))
+
+}
+
+
+
+
+
+
+
+
+
diff --git a/src/library/scala/concurrent/SyncChannel.scala b/src/library/scala/concurrent/SyncChannel.scala
index 1c398f9..ec584b3 100644
--- a/src/library/scala/concurrent/SyncChannel.scala
+++ b/src/library/scala/concurrent/SyncChannel.scala
@@ -1,19 +1,16 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-
-
package scala.concurrent
-/** A <code>SyncChannel</code> allows one to exchange data
- * synchronously between a reader and a writer thread.
- * The writer thread is blocked until the data to be written
- * has been read by a corresponding reader thread.
+/** A `SyncChannel` allows one to exchange data synchronously between
+ * a reader and a writer thread. The writer thread is blocked until the
+ * data to be written has been read by a corresponding reader thread.
*
* @author Philipp Haller
* @version 2.0, 04/17/2008
diff --git a/src/library/scala/concurrent/SyncVar.scala b/src/library/scala/concurrent/SyncVar.scala
index eebdd9b..9ab7bcc 100644
--- a/src/library/scala/concurrent/SyncVar.scala
+++ b/src/library/scala/concurrent/SyncVar.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -16,11 +16,11 @@ package scala.concurrent
*/
class SyncVar[A] {
private var isDefined: Boolean = false
- private var value: A = _
+ private var value: Option[A] = None
- def get = synchronized {
+ def get: A = synchronized {
while (!isDefined) wait()
- value
+ value.get
}
/** Waits `timeout` millis. If `timeout <= 0` just returns 0. If the system clock
@@ -50,33 +50,77 @@ class SyncVar[A] {
val elapsed = waitMeasuringElapsed(rest)
rest -= elapsed
}
- if (isDefined) Some(value)
- else None
+ value
}
- def take() = synchronized {
+ /** Waits for this SyncVar to become defined and returns
+ * the result */
+ def take(): A = synchronized {
try get
- finally unset()
+ finally unsetVal()
}
- def set(x: A) = synchronized {
- value = x
- isDefined = true
- notifyAll()
+ /** Waits for this SyncVar to become defined at least for
+ * `timeout` milliseconds (possibly more), and takes its
+ * value by first reading and then removing the value from
+ * the SyncVar.
+ *
+ * @param timeout the amount of milliseconds to wait, 0 means forever
+ * @return the value or a throws an exception if the timeout occurs
+ * @throws NoSuchElementException on timeout
+ */
+ def take(timeout: Long): A = synchronized {
+ try get(timeout).get
+ finally unsetVal()
}
- def put(x: A) = synchronized {
+ // TODO: this method should be private
+ // [Heather] the reason why: it doesn't take into consideration
+ // whether or not the SyncVar is already defined. So, set has been
+ // deprecated in order to eventually be able to make "setting" private
+ @deprecated("Use `put` instead, as `set` is potentionally error-prone", "2.10.0")
+ def set(x: A): Unit = setVal(x)
+
+ /** Places a value in the SyncVar. If the SyncVar already has a stored value,
+ * it waits until another thread takes it */
+ def put(x: A): Unit = synchronized {
while (isDefined) wait()
- set(x)
+ setVal(x)
}
+ /** Checks whether a value is stored in the synchronized variable */
def isSet: Boolean = synchronized {
isDefined
}
+ // TODO: this method should be private
+ // [Heather] the reason why: it doesn't take into consideration
+ // whether or not the SyncVar is already defined. So, unset has been
+ // deprecated in order to eventually be able to make "unsetting" private
+ @deprecated("Use `take` instead, as `unset` is potentionally error-prone", "2.10.0")
def unset(): Unit = synchronized {
isDefined = false
+ value = None
notifyAll()
}
+
+ // `setVal` exists so as to retroactively deprecate `set` without
+ // deprecation warnings where we use `set` internally. The
+ // implementation of `set` was moved to `setVal` to achieve this
+ private def setVal(x: A): Unit = synchronized {
+ isDefined = true
+ value = Some(x)
+ notifyAll()
+ }
+
+ // `unsetVal` exists so as to retroactively deprecate `unset` without
+ // deprecation warnings where we use `unset` internally. The
+ // implementation of `unset` was moved to `unsetVal` to achieve this
+ private def unsetVal(): Unit = synchronized {
+ isDefined = false
+ value = None
+ notifyAll()
+ }
+
}
diff --git a/src/library/scala/concurrent/TIMEOUT.scala b/src/library/scala/concurrent/TIMEOUT.scala
deleted file mode 100644
index 54d7db8..0000000
--- a/src/library/scala/concurrent/TIMEOUT.scala
+++ /dev/null
@@ -1,21 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-
-package scala.concurrent
-
-/**
- * The message sent to a message box when the period specified in
- * <code>receiveWithin</code> expires.
- *
- * @author Martin Odersky
- * @version 1.0, 10/03/2003
- */
- at deprecated("use actors instead", "2.8.0")
-case object TIMEOUT
diff --git a/src/library/scala/concurrent/TaskRunner.scala b/src/library/scala/concurrent/TaskRunner.scala
index 7337c73..a939a3f 100644
--- a/src/library/scala/concurrent/TaskRunner.scala
+++ b/src/library/scala/concurrent/TaskRunner.scala
@@ -1,19 +1,20 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-
-
package scala.concurrent
-/** The <code>TaskRunner</code> trait...
+import scala.language.{higherKinds, implicitConversions}
+
+/** The `TaskRunner` trait...
*
* @author Philipp Haller
*/
+ at deprecated("Use `ExecutionContext` instead.", "2.10.0")
trait TaskRunner {
type Task[T]
diff --git a/src/library/scala/concurrent/TaskRunners.scala b/src/library/scala/concurrent/TaskRunners.scala
index 9eb036c..e109a8a 100644
--- a/src/library/scala/concurrent/TaskRunners.scala
+++ b/src/library/scala/concurrent/TaskRunners.scala
@@ -1,21 +1,20 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-
-
package scala.concurrent
import java.util.concurrent.{ThreadPoolExecutor, LinkedBlockingQueue, TimeUnit}
-/** The <code>TaskRunners</code> object...
+/** The `TaskRunners` object...
*
* @author Philipp Haller
*/
+ at deprecated("Use `ExecutionContext` instead.", "2.10.0")
object TaskRunners {
implicit val threadRunner: FutureTaskRunner =
diff --git a/src/library/scala/concurrent/ThreadPoolRunner.scala b/src/library/scala/concurrent/ThreadPoolRunner.scala
index 5747344..afa14ed 100644
--- a/src/library/scala/concurrent/ThreadPoolRunner.scala
+++ b/src/library/scala/concurrent/ThreadPoolRunner.scala
@@ -1,23 +1,22 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-
-
package scala.concurrent
import java.util.concurrent.{ExecutorService, Callable, TimeUnit}
+import scala.language.implicitConversions
-/** The <code>ThreadPoolRunner</code> trait uses
- * a <code>java.util.concurrent.ExecutorService</code>
+/** The `ThreadPoolRunner` trait uses a `java.util.concurrent.ExecutorService`
* to run submitted tasks.
*
* @author Philipp Haller
*/
+ at deprecated("Use `ExecutionContext` instead.", "2.10.0")
trait ThreadPoolRunner extends FutureTaskRunner {
type Task[T] = Callable[T] with Runnable
@@ -44,6 +43,7 @@ trait ThreadPoolRunner extends FutureTaskRunner {
executor execute task
}
+ @deprecated("Use `blocking` instead.", "2.10.0")
def managedBlock(blocker: ManagedBlocker) {
blocker.block()
}
diff --git a/src/library/scala/concurrent/ThreadRunner.scala b/src/library/scala/concurrent/ThreadRunner.scala
index ee6e2b9..cd92db9 100644
--- a/src/library/scala/concurrent/ThreadRunner.scala
+++ b/src/library/scala/concurrent/ThreadRunner.scala
@@ -1,21 +1,21 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-
-
package scala.concurrent
import java.lang.Thread
+import scala.language.implicitConversions
-/** The <code>ThreadRunner</code> trait...
+/** The `ThreadRunner` trait...
*
* @author Philipp Haller
*/
+ at deprecated("Use `ExecutionContext` instead.", "2.10.0")
class ThreadRunner extends FutureTaskRunner {
type Task[T] = () => T
@@ -48,6 +48,7 @@ class ThreadRunner extends FutureTaskRunner {
() => result.get.fold[S](throw _, identity _)
}
+ @deprecated("Use `blocking` instead.", "2.10.0")
def managedBlock(blocker: ManagedBlocker) {
blocker.block()
}
diff --git a/src/library/scala/concurrent/duration/Deadline.scala b/src/library/scala/concurrent/duration/Deadline.scala
new file mode 100644
index 0000000..61cbe47
--- /dev/null
+++ b/src/library/scala/concurrent/duration/Deadline.scala
@@ -0,0 +1,81 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+package scala.concurrent.duration
+
+/**
+ * This class stores a deadline, as obtained via `Deadline.now` or the
+ * duration DSL:
+ *
+ * {{{
+ * import scala.concurrent.duration._
+ * 3.seconds.fromNow
+ * }}}
+ *
+ * Its main purpose is to manage repeated attempts to achieve something (like
+ * awaiting a condition) by offering the methods `hasTimeLeft` and `timeLeft`. All
+ * durations are measured according to `System.nanoTime` aka wall-time; this
+ * does not take into account changes to the system clock (such as leap
+ * seconds).
+ */
+case class Deadline private (time: FiniteDuration) extends Ordered[Deadline] {
+ /**
+ * Return a deadline advanced (i.e. moved into the future) by the given duration.
+ */
+ def +(other: FiniteDuration): Deadline = copy(time = time + other)
+ /**
+ * Return a deadline moved backwards (i.e. towards the past) by the given duration.
+ */
+ def -(other: FiniteDuration): Deadline = copy(time = time - other)
+ /**
+ * Calculate time difference between this and the other deadline, where the result is directed (i.e. may be negative).
+ */
+ def -(other: Deadline): FiniteDuration = time - other.time
+ /**
+ * Calculate time difference between this duration and now; the result is negative if the deadline has passed.
+ *
+ * '''''Note that on some systems this operation is costly because it entails a system call.'''''
+ * Check `System.nanoTime` for your platform.
+ */
+ def timeLeft: FiniteDuration = this - Deadline.now
+ /**
+ * Determine whether the deadline still lies in the future at the point where this method is called.
+ *
+ * '''''Note that on some systems this operation is costly because it entails a system call.'''''
+ * Check `System.nanoTime` for your platform.
+ */
+ def hasTimeLeft(): Boolean = !isOverdue()
+ /**
+ * Determine whether the deadline lies in the past at the point where this method is called.
+ *
+ * '''''Note that on some systems this operation is costly because it entails a system call.'''''
+ * Check `System.nanoTime` for your platform.
+ */
+ def isOverdue(): Boolean = (time.toNanos - System.nanoTime()) < 0
+ /**
+ * The natural ordering for deadline is determined by the natural order of the underlying (finite) duration.
+ */
+ def compare(other: Deadline) = time compare other.time
+}
+
+object Deadline {
+ /**
+ * Construct a deadline due exactly at the point where this method is called. Useful for then
+ * advancing it to obtain a future deadline, or for sampling the current time exactly once and
+ * then comparing it to multiple deadlines (using subtraction).
+ */
+ def now: Deadline = Deadline(Duration(System.nanoTime, NANOSECONDS))
+
+ /**
+ * The natural ordering for deadline is determined by the natural order of the underlying (finite) duration.
+ */
+ implicit object DeadlineIsOrdered extends Ordering[Deadline] {
+ def compare(a: Deadline, b: Deadline) = a compare b
+ }
+
+}
diff --git a/src/library/scala/concurrent/duration/Duration.scala b/src/library/scala/concurrent/duration/Duration.scala
new file mode 100644
index 0000000..0353d61
--- /dev/null
+++ b/src/library/scala/concurrent/duration/Duration.scala
@@ -0,0 +1,698 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+package scala.concurrent.duration
+
+import java.lang.{ Double => JDouble, Long => JLong }
+import scala.language.implicitConversions
+
+object Duration {
+
+ /**
+ * Construct a Duration from the given length and unit. Observe that nanosecond precision may be lost if
+ *
+ * - the unit is NANOSECONDS
+ * - and the length has an absolute value greater than 2^53
+ *
+ * Infinite inputs (and NaN) are converted into [[Duration.Inf]], [[Duration.MinusInf]] and [[Duration.Undefined]], respectively.
+ *
+ * @throws IllegalArgumentException if the length was finite but the resulting duration cannot be expressed as a [[FiniteDuration]]
+ */
+ def apply(length: Double, unit: TimeUnit): Duration = fromNanos(unit.toNanos(1) * length)
+
+ /**
+ * Construct a finite duration from the given length and time unit. The unit given is retained
+ * throughout calculations as long as possible, so that it can be retrieved later.
+ */
+ def apply(length: Long, unit: TimeUnit): FiniteDuration = new FiniteDuration(length, unit)
+
+ /**
+ * Construct a finite duration from the given length and time unit, where the latter is
+ * looked up in a list of string representation. Valid choices are:
+ *
+ * `d, day, h, hour, min, minute, s, sec, second, ms, milli, millisecond, µs, micro, microsecond, ns, nano, nanosecond`
+ * and their pluralized forms (for every but the first mentioned form of each unit, i.e. no "ds", but "days").
+ */
+ def apply(length: Long, unit: String): FiniteDuration = new FiniteDuration(length, Duration.timeUnit(unit))
+
+ // Double stores 52 bits mantissa, but there is an implied '1' in front, making the limit 2^53
+ private[this] final val maxPreciseDouble = 9007199254740992d
+
+ /**
+ * Parse String into Duration. Format is `"<length><unit>"`, where
+ * whitespace is allowed before, between and after the parts. Infinities are
+ * designated by `"Inf"`, `"PlusInf"`, `"+Inf"` and `"-Inf"` or `"MinusInf"`.
+ *
+ * @throws NumberFormatException if format is not parseable
+ */
+ def apply(s: String): Duration = {
+ val s1: String = s filterNot (_.isWhitespace)
+ s1 match {
+ case "Inf" | "PlusInf" | "+Inf" => Inf
+ case "MinusInf" | "-Inf" => MinusInf
+ case _ =>
+ val unitName = s1.reverse takeWhile (_.isLetter) reverse;
+ timeUnit get unitName match {
+ case Some(unit) =>
+ val valueStr = s1 dropRight unitName.length
+ val valueD = JDouble.parseDouble(valueStr)
+ if (valueD >= -maxPreciseDouble && valueD <= maxPreciseDouble) Duration(valueD, unit)
+ else Duration(JLong.parseLong(valueStr), unit)
+ case _ => throw new NumberFormatException("format error " + s)
+ }
+ }
+ }
+
+ // "ms milli millisecond" -> List("ms", "milli", "millis", "millisecond", "milliseconds")
+ private[this] def words(s: String) = (s.trim split "\\s+").toList
+ private[this] def expandLabels(labels: String): List[String] = {
+ val hd :: rest = words(labels)
+ hd :: rest.flatMap(s => List(s, s + "s"))
+ }
+ private[this] val timeUnitLabels = List(
+ DAYS -> "d day",
+ HOURS -> "h hour",
+ MINUTES -> "min minute",
+ SECONDS -> "s sec second",
+ MILLISECONDS -> "ms milli millisecond",
+ MICROSECONDS -> "µs micro microsecond",
+ NANOSECONDS -> "ns nano nanosecond"
+ )
+
+ // TimeUnit => standard label
+ protected[duration] val timeUnitName: Map[TimeUnit, String] =
+ timeUnitLabels.toMap mapValues (s => words(s).last) toMap
+
+ // Label => TimeUnit
+ protected[duration] val timeUnit: Map[String, TimeUnit] =
+ timeUnitLabels flatMap { case (unit, names) => expandLabels(names) map (_ -> unit) } toMap
+
+ /**
+ * Extract length and time unit out of a string, where the format must match the description for [[Duration$.apply(String):Duration apply(String)]].
+ * The extractor will not match for malformed strings or non-finite durations.
+ */
+ def unapply(s: String): Option[(Long, TimeUnit)] =
+ ( try Some(apply(s)) catch { case _: RuntimeException => None } ) flatMap unapply
+
+ /**
+ * Extract length and time unit out of a duration, if it is finite.
+ */
+ def unapply(d: Duration): Option[(Long, TimeUnit)] =
+ if (d.isFinite) Some((d.length, d.unit)) else None
+
+ /**
+ * Construct a possibly infinite or undefined Duration from the given number of nanoseconds.
+ *
+ * - `Double.PositiveInfinity` is mapped to [[Duration.Inf]]
+ * - `Double.NegativeInfinity` is mapped to [[Duration.MinusInf]]
+ * - `Double.NaN` is mapped to [[Duration.Undefined]]
+ * - `-0d` is mapped to [[Duration.Zero]] (exactly like `0d`)
+ *
+ * The semantics of the resulting Duration objects matches the semantics of their Double
+ * counterparts with respect to arithmetic operations.
+ *
+ * @throws IllegalArgumentException if the length was finite but the resulting duration cannot be expressed as a [[FiniteDuration]]
+ */
+ def fromNanos(nanos: Double): Duration = {
+ if (nanos.isInfinite)
+ if (nanos > 0) Inf else MinusInf
+ else if (nanos.isNaN)
+ Undefined
+ else if (nanos > Long.MaxValue || nanos < Long.MinValue)
+ throw new IllegalArgumentException("trying to construct too large duration with " + nanos + "ns")
+ else
+ fromNanos((nanos + 0.5).toLong)
+ }
+
+ private[this] final val µs_per_ns = 1000L
+ private[this] final val ms_per_ns = µs_per_ns * 1000
+ private[this] final val s_per_ns = ms_per_ns * 1000
+ private[this] final val min_per_ns = s_per_ns * 60
+ private[this] final val h_per_ns = min_per_ns * 60
+ private[this] final val d_per_ns = h_per_ns * 24
+
+ /**
+ * Construct a finite duration from the given number of nanoseconds. The
+ * result will have the coarsest possible time unit which can exactly express
+ * this duration.
+ *
+ * @throws IllegalArgumentException for `Long.MinValue` since that would lead to inconsistent behavior afterwards (cannot be negated)
+ */
+ def fromNanos(nanos: Long): FiniteDuration = {
+ if (nanos % d_per_ns == 0) Duration(nanos / d_per_ns, DAYS)
+ else if (nanos % h_per_ns == 0) Duration(nanos / h_per_ns, HOURS)
+ else if (nanos % min_per_ns == 0) Duration(nanos / min_per_ns, MINUTES)
+ else if (nanos % s_per_ns == 0) Duration(nanos / s_per_ns, SECONDS)
+ else if (nanos % ms_per_ns == 0) Duration(nanos / ms_per_ns, MILLISECONDS)
+ else if (nanos % µs_per_ns == 0) Duration(nanos / µs_per_ns, MICROSECONDS)
+ else Duration(nanos, NANOSECONDS)
+ }
+
+ /**
+ * Preconstructed value of `0.days`.
+ */
+ // unit as coarse as possible to keep (_ + Zero) sane unit-wise
+ val Zero: FiniteDuration = new FiniteDuration(0, DAYS)
+
+ /**
+ * The Undefined value corresponds closely to Double.NaN:
+ *
+ * - it is the result of otherwise invalid operations
+ * - it does not equal itself (according to `equals()`)
+ * - it compares greater than any other Duration apart from itself (for which `compare` returns 0)
+ *
+ * The particular comparison semantics mirror those of Double.NaN.
+ *
+ * '''''Use `eq` when checking an input of a method against this value.'''''
+ */
+ val Undefined: Infinite = new Infinite {
+ override def toString = "Duration.Undefined"
+ override def equals(other: Any) = false
+ override def +(other: Duration): Duration = this
+ override def -(other: Duration): Duration = this
+ override def *(factor: Double): Duration = this
+ override def /(factor: Double): Duration = this
+ override def /(other: Duration): Double = Double.NaN
+ def compare(other: Duration) = if (other eq this) 0 else 1
+ def unary_- : Duration = this
+ def toUnit(unit: TimeUnit): Double = Double.NaN
+ }
+
+ sealed abstract class Infinite extends Duration {
+ def +(other: Duration): Duration = other match {
+ case x if x eq Undefined => Undefined
+ case x: Infinite if x ne this => Undefined
+ case _ => this
+ }
+ def -(other: Duration): Duration = other match {
+ case x if x eq Undefined => Undefined
+ case x: Infinite if x eq this => Undefined
+ case _ => this
+ }
+
+ def *(factor: Double): Duration =
+ if (factor == 0d || factor.isNaN) Undefined
+ else if (factor < 0d) -this
+ else this
+ def /(divisor: Double): Duration =
+ if (divisor.isNaN || divisor.isInfinite) Undefined
+ else if ((divisor compare 0d) < 0) -this
+ else this
+ def /(divisor: Duration): Double = divisor match {
+ case _: Infinite => Double.NaN
+ case x => Double.PositiveInfinity * (if ((this > Zero) ^ (divisor >= Zero)) -1 else 1)
+ }
+
+ final def isFinite() = false
+
+ private[this] def fail(what: String) = throw new IllegalArgumentException(s"$what not allowed on infinite Durations")
+ final def length: Long = fail("length")
+ final def unit: TimeUnit = fail("unit")
+ final def toNanos: Long = fail("toNanos")
+ final def toMicros: Long = fail("toMicros")
+ final def toMillis: Long = fail("toMillis")
+ final def toSeconds: Long = fail("toSeconds")
+ final def toMinutes: Long = fail("toMinutes")
+ final def toHours: Long = fail("toHours")
+ final def toDays: Long = fail("toDays")
+ }
+
+ /**
+ * Infinite duration: greater than any other (apart from Undefined) and not equal to any other
+ * but itself. This value closely corresponds to Double.PositiveInfinity,
+ * matching its semantics in arithmetic operations.
+ */
+ val Inf: Infinite = new Infinite {
+ override def toString = "Duration.Inf"
+ def compare(other: Duration) = other match {
+ case x if x eq Undefined => -1 // Undefined != Undefined
+ case x if x eq this => 0 // `case Inf` will include null checks in the byte code
+ case _ => 1
+ }
+ def unary_- : Duration = MinusInf
+ def toUnit(unit: TimeUnit): Double = Double.PositiveInfinity
+ }
+
+ /**
+ * Infinite duration: less than any other and not equal to any other
+ * but itself. This value closely corresponds to Double.NegativeInfinity,
+ * matching its semantics in arithmetic operations.
+ */
+ val MinusInf: Infinite = new Infinite {
+ override def toString = "Duration.MinusInf"
+ def compare(other: Duration) = if (other eq this) 0 else -1
+ def unary_- : Duration = Inf
+ def toUnit(unit: TimeUnit): Double = Double.NegativeInfinity
+ }
+
+ // Java Factories
+
+ /**
+ * Construct a finite duration from the given length and time unit. The unit given is retained
+ * throughout calculations as long as possible, so that it can be retrieved later.
+ */
+ def create(length: Long, unit: TimeUnit): FiniteDuration = apply(length, unit)
+ /**
+ * Construct a Duration from the given length and unit. Observe that nanosecond precision may be lost if
+ *
+ * - the unit is NANOSECONDS
+ * - and the length has an absolute value greater than 2^53
+ *
+ * Infinite inputs (and NaN) are converted into [[Duration.Inf]], [[Duration.MinusInf]] and [[Duration.Undefined]], respectively.
+ *
+ * @throws IllegalArgumentException if the length was finite but the resulting duration cannot be expressed as a [[FiniteDuration]]
+ */
+ def create(length: Double, unit: TimeUnit): Duration = apply(length, unit)
+ /**
+ * Construct a finite duration from the given length and time unit, where the latter is
+ * looked up in a list of string representation. Valid choices are:
+ *
+ * `d, day, h, hour, min, minute, s, sec, second, ms, milli, millisecond, µs, micro, microsecond, ns, nano, nanosecond`
+ * and their pluralized forms (for every but the first mentioned form of each unit, i.e. no "ds", but "days").
+ */
+ def create(length: Long, unit: String): FiniteDuration = apply(length, unit)
+ /**
+ * Parse String into Duration. Format is `"<length><unit>"`, where
+ * whitespace is allowed before, between and after the parts. Infinities are
+ * designated by `"Inf"`, `"PlusInf"`, `"+Inf"` and `"-Inf"` or `"MinusInf"`.
+ *
+ * @throws NumberFormatException if format is not parseable
+ */
+ def create(s: String): Duration = apply(s)
+
+ /**
+ * The natural ordering of durations matches the natural ordering for Double, including non-finite values.
+ */
+ implicit object DurationIsOrdered extends Ordering[Duration] {
+ def compare(a: Duration, b: Duration) = a compare b
+ }
+}
+
+/**
+ * <h2>Utility for working with java.util.concurrent.TimeUnit durations.</h2>
+ *
+ * '''''This class is not meant as a general purpose representation of time, it is
+ * optimized for the needs of `scala.concurrent`.'''''
+ *
+ * <h2>Basic Usage</h2>
+ *
+ * <p/>
+ * Examples:
+ * {{{
+ * import scala.concurrent.duration._
+ *
+ * val duration = Duration(100, MILLISECONDS)
+ * val duration = Duration(100, "millis")
+ *
+ * duration.toNanos
+ * duration < 1.second
+ * duration <= Duration.Inf
+ * }}}
+ *
+ * '''''Invoking inexpressible conversions (like calling `toSeconds` on an infinite duration) will throw an IllegalArgumentException.'''''
+ *
+ * <p/>
+ * Implicits are also provided for Int, Long and Double. Example usage:
+ * {{{
+ * import scala.concurrent.duration._
+ *
+ * val duration = 100 millis
+ * }}}
+ *
+ * '''''The DSL provided by the implicit conversions always allows construction of finite durations, even for infinite Double inputs; use Duration.Inf instead.'''''
+ *
+ * Extractors, parsing and arithmetic are also included:
+ * {{{
+ * val d = Duration("1.2 µs")
+ * val Duration(length, unit) = 5 millis
+ * val d2 = d * 2.5
+ * val d3 = d2 + 1.millisecond
+ * }}}
+ *
+ * <h2>Handling of Time Units</h2>
+ *
+ * Calculations performed on finite durations always retain the more precise unit of either operand, no matter
+ * whether a coarser unit would be able to exactly express the same duration. This means that Duration can be
+ * used as a lossless container for a (length, unit) pair if it is constructed using the corresponding methods
+ * and no arithmetic is performed on it; adding/subtracting durations should in that case be done with care.
+ *
+ * <h2>Correspondence to Double Semantics</h2>
+ *
+ * The semantics of arithmetic operations on Duration are two-fold:
+ *
+ * - exact addition/subtraction with nanosecond resolution for finite durations, independent of the summands' magnitude
+ * - isomorphic to `java.lang.Double` when it comes to infinite or undefined values
+ *
+ * The conversion between Duration and Double is done using [[Duration.toUnit]] (with unit NANOSECONDS)
+ * and [[Duration$.fromNanos(Double):Duration Duration.fromNanos(Double)]].
+ *
+ * <h2>Ordering</h2>
+ *
+ * The default ordering is consistent with the ordering of Double numbers, which means that Undefined is
+ * considered greater than all other durations, including [[Duration.Inf]].
+ *
+ * @define exc @throws IllegalArgumentException when invoked on a non-finite duration
+ *
+ * @define ovf @throws IllegalArgumentException in case of a finite overflow: the range of a finite duration is +-(2^63-1)ns, and no conversion to infinite durations takes place.
+ */
+sealed abstract class Duration extends Serializable with Ordered[Duration] {
+ /**
+ * Obtain the length of this Duration measured in the unit obtained by the `unit` method.
+ *
+ * $exc
+ */
+ def length: Long
+ /**
+ * Obtain the time unit in which the length of this duration is measured.
+ *
+ * $exc
+ */
+ def unit: TimeUnit
+ /**
+ * Return the length of this duration measured in whole nanoseconds, rounding towards zero.
+ *
+ * $exc
+ */
+ def toNanos: Long
+ /**
+ * Return the length of this duration measured in whole microseconds, rounding towards zero.
+ *
+ * $exc
+ */
+ def toMicros: Long
+ /**
+ * Return the length of this duration measured in whole milliseconds, rounding towards zero.
+ *
+ * $exc
+ */
+ def toMillis: Long
+ /**
+ * Return the length of this duration measured in whole seconds, rounding towards zero.
+ *
+ * $exc
+ */
+ def toSeconds: Long
+ /**
+ * Return the length of this duration measured in whole minutes, rounding towards zero.
+ *
+ * $exc
+ */
+ def toMinutes: Long
+ /**
+ * Return the length of this duration measured in whole hours, rounding towards zero.
+ *
+ * $exc
+ */
+ def toHours: Long
+ /**
+ * Return the length of this duration measured in whole days, rounding towards zero.
+ *
+ * $exc
+ */
+ def toDays: Long
+ /**
+ * Return the number of nanoseconds as floating point number, scaled down to the given unit.
+ * The result may not precisely represent this duration due to the Double datatype's inherent
+ * limitations (mantissa size effectively 53 bits). Non-finite durations are represented as
+ * - [[Duration.Undefined]] is mapped to Double.NaN
+ * - [[Duration.Inf]] is mapped to Double.PositiveInfinity
+ * - [[Duration.MinusInf]] is mapped to Double.NegativeInfinity
+ */
+ def toUnit(unit: TimeUnit): Double
+
+ /**
+ * Return the sum of that duration and this. When involving non-finite summands the semantics match those
+ * of Double.
+ *
+ * $ovf
+ */
+ def +(other: Duration): Duration
+ /**
+ * Return the difference of that duration and this. When involving non-finite summands the semantics match those
+ * of Double.
+ *
+ * $ovf
+ */
+ def -(other: Duration): Duration
+ /**
+ * Return this duration multiplied by the scalar factor. When involving non-finite factors the semantics match those
+ * of Double.
+ *
+ * $ovf
+ */
+ def *(factor: Double): Duration
+ /**
+ * Return this duration divided by the scalar factor. When involving non-finite factors the semantics match those
+ * of Double.
+ *
+ * $ovf
+ */
+ def /(divisor: Double): Duration
+ /**
+ * Return the quotient of this and that duration as floating-point number. The semantics are
+ * determined by Double as if calculating the quotient of the nanosecond lengths of both factors.
+ */
+ def /(divisor: Duration): Double
+ /**
+ * Negate this duration. The only two values which are mapped to themselves are [[Duration.Zero]] and [[Duration.Undefined]].
+ */
+ def unary_- : Duration
+ /**
+ * This method returns whether this duration is finite, which is not the same as
+ * `!isInfinite` for Double because this method also returns `false` for [[Duration.Undefined]].
+ */
+ def isFinite(): Boolean
+ /**
+ * Return the smaller of this and that duration as determined by the natural ordering.
+ */
+ def min(other: Duration): Duration = if (this < other) this else other
+ /**
+ * Return the larger of this and that duration as determined by the natural ordering.
+ */
+ def max(other: Duration): Duration = if (this > other) this else other
+
+ // Java API
+
+ /**
+ * Return this duration divided by the scalar factor. When involving non-finite factors the semantics match those
+ * of Double.
+ *
+ * $ovf
+ */
+ def div(divisor: Double) = this / divisor
+ /**
+ * Return the quotient of this and that duration as floating-point number. The semantics are
+ * determined by Double as if calculating the quotient of the nanosecond lengths of both factors.
+ */
+ def div(other: Duration) = this / other
+ def gt(other: Duration) = this > other
+ def gteq(other: Duration) = this >= other
+ def lt(other: Duration) = this < other
+ def lteq(other: Duration) = this <= other
+ /**
+ * Return the difference of that duration and this. When involving non-finite summands the semantics match those
+ * of Double.
+ *
+ * $ovf
+ */
+ def minus(other: Duration) = this - other
+ /**
+ * Return this duration multiplied by the scalar factor. When involving non-finite factors the semantics match those
+ * of Double.
+ *
+ * $ovf
+ */
+ def mul(factor: Double) = this * factor
+ /**
+ * Negate this duration. The only two values which are mapped to themselves are [[Duration.Zero]] and [[Duration.Undefined]].
+ */
+ def neg() = -this
+ /**
+ * Return the sum of that duration and this. When involving non-finite summands the semantics match those
+ * of Double.
+ *
+ * $ovf
+ */
+ def plus(other: Duration) = this + other
+}
+
+object FiniteDuration {
+
+ implicit object FiniteDurationIsOrdered extends Ordering[FiniteDuration] {
+ def compare(a: FiniteDuration, b: FiniteDuration) = a compare b
+ }
+
+ def apply(length: Long, unit: TimeUnit) = new FiniteDuration(length, unit)
+ def apply(length: Long, unit: String) = new FiniteDuration(length, Duration.timeUnit(unit))
+
+ // limit on abs. value of durations in their units
+ private final val max_ns = Long.MaxValue
+ private final val max_µs = max_ns / 1000
+ private final val max_ms = max_µs / 1000
+ private final val max_s = max_ms / 1000
+ private final val max_min= max_s / 60
+ private final val max_h = max_min / 60
+ private final val max_d = max_h / 24
+}
+
+/**
+ * This class represents a finite duration. Its addition and subtraction operators are overloaded to retain
+ * this guarantee statically. The range of this class is limited to +-(2^63-1)ns, which is roughly 292 years.
+ */
+final class FiniteDuration(val length: Long, val unit: TimeUnit) extends Duration {
+ import FiniteDuration._
+ import Duration._
+
+ private[this] def bounded(max: Long) = -max <= length && length <= max
+
+ require(unit match {
+ /*
+ * enforce the 2^63-1 ns limit, must be pos/neg symmetrical because of unary_-
+ */
+ case NANOSECONDS ⇒ bounded(max_ns)
+ case MICROSECONDS ⇒ bounded(max_µs)
+ case MILLISECONDS ⇒ bounded(max_ms)
+ case SECONDS ⇒ bounded(max_s)
+ case MINUTES ⇒ bounded(max_min)
+ case HOURS ⇒ bounded(max_h)
+ case DAYS ⇒ bounded(max_d)
+ case _ ⇒
+ val v = DAYS.convert(length, unit)
+ -max_d <= v && v <= max_d
+ }, "Duration is limited to +-(2^63-1)ns (ca. 292 years)")
+
+ def toNanos = unit.toNanos(length)
+ def toMicros = unit.toMicros(length)
+ def toMillis = unit.toMillis(length)
+ def toSeconds = unit.toSeconds(length)
+ def toMinutes = unit.toMinutes(length)
+ def toHours = unit.toHours(length)
+ def toDays = unit.toDays(length)
+ def toUnit(u: TimeUnit) = toNanos.toDouble / NANOSECONDS.convert(1, u)
+
+ /**
+ * Construct a [[Deadline]] from this duration by adding it to the current instant `Deadline.now`.
+ */
+ def fromNow: Deadline = Deadline.now + this
+
+ private[this] def unitString = timeUnitName(unit) + ( if (length == 1) "" else "s" )
+ override def toString = "" + length + " " + unitString
+
+ def compare(other: Duration) = other match {
+ case x: FiniteDuration => toNanos compare x.toNanos
+ case _ => -(other compare this)
+ }
+
+ // see https://www.securecoding.cert.org/confluence/display/java/NUM00-J.+Detect+or+prevent+integer+overflow
+ private[this] def safeAdd(a: Long, b: Long): Long = {
+ if ((b > 0) && (a > Long.MaxValue - b) ||
+ (b < 0) && (a < Long.MinValue - b)) throw new IllegalArgumentException("integer overflow")
+ a + b
+ }
+ private[this] def add(otherLength: Long, otherUnit: TimeUnit): FiniteDuration = {
+ val commonUnit = if (otherUnit.convert(1, unit) == 0) unit else otherUnit
+ val totalLength = safeAdd(commonUnit.convert(length, unit), commonUnit.convert(otherLength, otherUnit))
+ new FiniteDuration(totalLength, commonUnit)
+ }
+
+ def +(other: Duration) = other match {
+ case x: FiniteDuration => add(x.length, x.unit)
+ case _ => other
+ }
+ def -(other: Duration) = other match {
+ case x: FiniteDuration => add(-x.length, x.unit)
+ case _ => other
+ }
+
+ def *(factor: Double) =
+ if (!factor.isInfinite) fromNanos(toNanos * factor)
+ else if (factor.isNaN) Undefined
+ else if ((factor > 0) ^ (this < Zero)) Inf
+ else MinusInf
+
+ def /(divisor: Double) =
+ if (!divisor.isInfinite) fromNanos(toNanos / divisor)
+ else if (divisor.isNaN) Undefined
+ else Zero
+
+ // if this is made a constant, then scalac will elide the conditional and always return +0.0, SI-6331
+ private[this] def minusZero = -0d
+ def /(divisor: Duration): Double =
+ if (divisor.isFinite) toNanos.toDouble / divisor.toNanos
+ else if (divisor eq Undefined) Double.NaN
+ else if ((length < 0) ^ (divisor > Zero)) 0d
+ else minusZero
+
+ // overloaded methods taking FiniteDurations, so that you can calculate while statically staying finite
+ def +(other: FiniteDuration) = add(other.length, other.unit)
+ def -(other: FiniteDuration) = add(-other.length, other.unit)
+ def plus(other: FiniteDuration) = this + other
+ def minus(other: FiniteDuration) = this - other
+ def min(other: FiniteDuration) = if (this < other) this else other
+ def max(other: FiniteDuration) = if (this > other) this else other
+
+ // overloaded methods taking Long so that you can calculate while statically staying finite
+
+ /**
+ * Return the quotient of this duration and the given integer factor.
+ *
+ * @throws ArithmeticException if the factor is 0
+ */
+ def /(divisor: Long) = fromNanos(toNanos / divisor)
+
+ /**
+ * Return the product of this duration and the given integer factor.
+ *
+ * @throws IllegalArgumentException if the result would overflow the range of FiniteDuration
+ */
+ def *(factor: Long) = new FiniteDuration(safeMul(length, factor), unit)
+
+ /*
+ * This method avoids the use of Long division, which saves 95% of the time spent,
+ * by checking that there are enough leading zeros so that the result has a chance
+ * to fit into a Long again; the remaining edge cases are caught by using the sign
+ * of the product for overflow detection.
+ *
+ * This method is not general purpose because it disallows the (otherwise legal)
+ * case of Long.MinValue * 1, but that is okay for use in FiniteDuration, since
+ * Long.MinValue is not a legal `length` anyway.
+ */
+ private def safeMul(_a: Long, _b: Long): Long = {
+ val a = math.abs(_a)
+ val b = math.abs(_b)
+ import java.lang.Long.{ numberOfLeadingZeros => leading }
+ if (leading(a) + leading(b) < 64) throw new IllegalArgumentException("multiplication overflow")
+ val product = a * b
+ if (product < 0) throw new IllegalArgumentException("multiplication overflow")
+ if (a == _a ^ b == _b) -product else product
+ }
+
+ /**
+ * Return the quotient of this duration and the given integer factor.
+ *
+ * @throws ArithmeticException if the factor is 0
+ */
+ def div(divisor: Long) = this / divisor
+
+ /**
+ * Return the product of this duration and the given integer factor.
+ *
+ * @throws IllegalArgumentException if the result would overflow the range of FiniteDuration
+ */
+ def mul(factor: Long) = this * factor
+
+ def unary_- = Duration(-length, unit)
+
+ final def isFinite() = true
+
+ override def equals(other: Any) = other match {
+ case x: FiniteDuration => toNanos == x.toNanos
+ case _ => super.equals(other)
+ }
+ override def hashCode = toNanos.toInt
+}
diff --git a/src/library/scala/concurrent/duration/DurationConversions.scala b/src/library/scala/concurrent/duration/DurationConversions.scala
new file mode 100644
index 0000000..74afa0c
--- /dev/null
+++ b/src/library/scala/concurrent/duration/DurationConversions.scala
@@ -0,0 +1,92 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+package scala.concurrent.duration
+
+import DurationConversions._
+
+// Would be nice to limit the visibility of this trait a little bit,
+// but it crashes scalac to do so.
+trait DurationConversions extends Any {
+ protected def durationIn(unit: TimeUnit): FiniteDuration
+
+ def nanoseconds = durationIn(NANOSECONDS)
+ def nanos = nanoseconds
+ def nanosecond = nanoseconds
+ def nano = nanoseconds
+
+ def microseconds = durationIn(MICROSECONDS)
+ def micros = microseconds
+ def microsecond = microseconds
+ def micro = microseconds
+
+ def milliseconds = durationIn(MILLISECONDS)
+ def millis = milliseconds
+ def millisecond = milliseconds
+ def milli = milliseconds
+
+ def seconds = durationIn(SECONDS)
+ def second = seconds
+
+ def minutes = durationIn(MINUTES)
+ def minute = minutes
+
+ def hours = durationIn(HOURS)
+ def hour = hours
+
+ def days = durationIn(DAYS)
+ def day = days
+
+ def nanoseconds[C](c: C)(implicit ev: Classifier[C]): ev.R = ev.convert(nanoseconds)
+ def nanos[C](c: C)(implicit ev: Classifier[C]): ev.R = nanoseconds(c)
+ def nanosecond[C](c: C)(implicit ev: Classifier[C]): ev.R = nanoseconds(c)
+ def nano[C](c: C)(implicit ev: Classifier[C]): ev.R = nanoseconds(c)
+
+ def microseconds[C](c: C)(implicit ev: Classifier[C]): ev.R = ev.convert(microseconds)
+ def micros[C](c: C)(implicit ev: Classifier[C]): ev.R = microseconds(c)
+ def microsecond[C](c: C)(implicit ev: Classifier[C]): ev.R = microseconds(c)
+ def micro[C](c: C)(implicit ev: Classifier[C]): ev.R = microseconds(c)
+
+ def milliseconds[C](c: C)(implicit ev: Classifier[C]): ev.R = ev.convert(milliseconds)
+ def millis[C](c: C)(implicit ev: Classifier[C]): ev.R = milliseconds(c)
+ def millisecond[C](c: C)(implicit ev: Classifier[C]): ev.R = milliseconds(c)
+ def milli[C](c: C)(implicit ev: Classifier[C]): ev.R = milliseconds(c)
+
+ def seconds[C](c: C)(implicit ev: Classifier[C]): ev.R = ev.convert(seconds)
+ def second[C](c: C)(implicit ev: Classifier[C]): ev.R = seconds(c)
+
+ def minutes[C](c: C)(implicit ev: Classifier[C]): ev.R = ev.convert(minutes)
+ def minute[C](c: C)(implicit ev: Classifier[C]): ev.R = minutes(c)
+
+ def hours[C](c: C)(implicit ev: Classifier[C]): ev.R = ev.convert(hours)
+ def hour[C](c: C)(implicit ev: Classifier[C]): ev.R = hours(c)
+
+ def days[C](c: C)(implicit ev: Classifier[C]): ev.R = ev.convert(days)
+ def day[C](c: C)(implicit ev: Classifier[C]): ev.R = days(c)
+}
+
+/**
+ * This object just holds some cogs which make the DSL machine work, not for direct consumption.
+ */
+object DurationConversions {
+ trait Classifier[C] {
+ type R
+ def convert(d: FiniteDuration): R
+ }
+
+ implicit object spanConvert extends Classifier[span.type] {
+ type R = FiniteDuration
+ def convert(d: FiniteDuration) = d
+ }
+
+ implicit object fromNowConvert extends Classifier[fromNow.type] {
+ type R = Deadline
+ def convert(d: FiniteDuration) = Deadline.now + d
+ }
+
+}
diff --git a/src/library/scala/concurrent/duration/package.scala b/src/library/scala/concurrent/duration/package.scala
new file mode 100644
index 0000000..2fd735f
--- /dev/null
+++ b/src/library/scala/concurrent/duration/package.scala
@@ -0,0 +1,75 @@
+package scala.concurrent
+
+import scala.language.implicitConversions
+
+package object duration {
+ /**
+ * This object can be used as closing token if you prefer dot-less style but do not want
+ * to enable language.postfixOps:
+ *
+ * {{{
+ * import scala.concurrent.duration._
+ *
+ * val duration = 2 seconds span
+ * }}}
+ */
+ object span
+
+ /**
+ * This object can be used as closing token for declaring a deadline at some future point
+ * in time:
+ *
+ * {{{
+ * import scala.concurrent.duration._
+ *
+ * val deadline = 3 seconds fromNow
+ * }}}
+ */
+ object fromNow
+
+ type TimeUnit = java.util.concurrent.TimeUnit
+ final val DAYS = java.util.concurrent.TimeUnit.DAYS
+ final val HOURS = java.util.concurrent.TimeUnit.HOURS
+ final val MICROSECONDS = java.util.concurrent.TimeUnit.MICROSECONDS
+ final val MILLISECONDS = java.util.concurrent.TimeUnit.MILLISECONDS
+ final val MINUTES = java.util.concurrent.TimeUnit.MINUTES
+ final val NANOSECONDS = java.util.concurrent.TimeUnit.NANOSECONDS
+ final val SECONDS = java.util.concurrent.TimeUnit.SECONDS
+
+ implicit def pairIntToDuration(p: (Int, TimeUnit)): Duration = Duration(p._1, p._2)
+ implicit def pairLongToDuration(p: (Long, TimeUnit)): FiniteDuration = Duration(p._1, p._2)
+ implicit def durationToPair(d: Duration): (Long, TimeUnit) = (d.length, d.unit)
+
+ implicit final class DurationInt(val n: Int) extends AnyVal with DurationConversions {
+ override protected def durationIn(unit: TimeUnit): FiniteDuration = Duration(n, unit)
+ }
+
+ implicit final class DurationLong(val n: Long) extends AnyVal with DurationConversions {
+ override protected def durationIn(unit: TimeUnit): FiniteDuration = Duration(n, unit)
+ }
+
+ implicit final class DurationDouble(val d: Double) extends AnyVal with DurationConversions {
+ override protected def durationIn(unit: TimeUnit): FiniteDuration =
+ Duration(d, unit) match {
+ case f: FiniteDuration => f
+ case _ => throw new IllegalArgumentException("Duration DSL not applicable to " + d)
+ }
+ }
+
+ /*
+ * Avoid reflection based invocation by using non-duck type
+ */
+ implicit final class IntMult(val i: Int) extends AnyVal {
+ def *(d: Duration) = d * i
+ def *(d: FiniteDuration) = d * i
+ }
+
+ implicit final class LongMult(val i: Long) extends AnyVal {
+ def *(d: Duration) = d * i
+ def *(d: FiniteDuration) = d * i
+ }
+
+ implicit final class DoubleMult(val f: Double) extends AnyVal {
+ def *(d: Duration) = d * f
+ }
+}
diff --git a/src/library/scala/concurrent/impl/AbstractPromise.java b/src/library/scala/concurrent/impl/AbstractPromise.java
new file mode 100644
index 0000000..b8165b6
--- /dev/null
+++ b/src/library/scala/concurrent/impl/AbstractPromise.java
@@ -0,0 +1,40 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+package scala.concurrent.impl;
+
+
+import scala.concurrent.util.Unsafe;
+import java.util.concurrent.atomic.AtomicReferenceFieldUpdater;
+
+
+
+abstract class AbstractPromise {
+ private volatile Object _ref;
+
+ final static long _refoffset;
+
+ static {
+ try {
+ _refoffset = Unsafe.instance.objectFieldOffset(AbstractPromise.class.getDeclaredField("_ref"));
+ } catch (Throwable t) {
+ throw new ExceptionInInitializerError(t);
+ }
+ }
+
+ protected final boolean updateState(Object oldState, Object newState) {
+ return Unsafe.instance.compareAndSwapObject(this, _refoffset, oldState, newState);
+ }
+
+ protected final Object getState() {
+ return _ref;
+ }
+
+ protected final static AtomicReferenceFieldUpdater<AbstractPromise, Object> updater =
+ AtomicReferenceFieldUpdater.newUpdater(AbstractPromise.class, Object.class, "_ref");
+}
\ No newline at end of file
diff --git a/src/library/scala/concurrent/impl/ExecutionContextImpl.scala b/src/library/scala/concurrent/impl/ExecutionContextImpl.scala
new file mode 100644
index 0000000..0aa6b37
--- /dev/null
+++ b/src/library/scala/concurrent/impl/ExecutionContextImpl.scala
@@ -0,0 +1,149 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+package scala.concurrent.impl
+
+
+
+import java.util.concurrent.{ LinkedBlockingQueue, Callable, Executor, ExecutorService, Executors, ThreadFactory, TimeUnit, ThreadPoolExecutor }
+import java.util.Collection
+import scala.concurrent.forkjoin._
+import scala.concurrent.{ BlockContext, ExecutionContext, Awaitable, CanAwait, ExecutionContextExecutor, ExecutionContextExecutorService }
+import scala.util.control.NonFatal
+
+
+
+private[scala] class ExecutionContextImpl private[impl] (es: Executor, reporter: Throwable => Unit) extends ExecutionContextExecutor {
+ // Placed here since the creation of the executor needs to read this val
+ private[this] val uncaughtExceptionHandler: Thread.UncaughtExceptionHandler = new Thread.UncaughtExceptionHandler {
+ def uncaughtException(thread: Thread, cause: Throwable): Unit = reporter(cause)
+ }
+
+ val executor: Executor = es match {
+ case null => createExecutorService
+ case some => some
+ }
+
+ // Implement BlockContext on FJP threads
+ class DefaultThreadFactory(daemonic: Boolean) extends ThreadFactory with ForkJoinPool.ForkJoinWorkerThreadFactory {
+ def wire[T <: Thread](thread: T): T = {
+ thread.setDaemon(daemonic)
+ thread.setUncaughtExceptionHandler(uncaughtExceptionHandler)
+ thread
+ }
+
+ def newThread(runnable: Runnable): Thread = wire(new Thread(runnable))
+
+ def newThread(fjp: ForkJoinPool): ForkJoinWorkerThread = wire(new ForkJoinWorkerThread(fjp) with BlockContext {
+ override def blockOn[T](thunk: =>T)(implicit permission: CanAwait): T = {
+ var result: T = null.asInstanceOf[T]
+ ForkJoinPool.managedBlock(new ForkJoinPool.ManagedBlocker {
+ @volatile var isdone = false
+ override def block(): Boolean = {
+ result = try thunk finally { isdone = true }
+ true
+ }
+ override def isReleasable = isdone
+ })
+ result
+ }
+ })
+ }
+
+ def createExecutorService: ExecutorService = {
+
+ def getInt(name: String, f: String => Int): Int =
+ try f(System.getProperty(name)) catch { case e: Exception => Runtime.getRuntime.availableProcessors }
+ def range(floor: Int, desired: Int, ceiling: Int): Int =
+ if (ceiling < floor) range(ceiling, desired, floor) else scala.math.min(scala.math.max(desired, floor), ceiling)
+
+ val desiredParallelism = range(
+ getInt("scala.concurrent.context.minThreads", _.toInt),
+ getInt("scala.concurrent.context.numThreads", {
+ case null | "" => Runtime.getRuntime.availableProcessors
+ case s if s.charAt(0) == 'x' => (Runtime.getRuntime.availableProcessors * s.substring(1).toDouble).ceil.toInt
+ case other => other.toInt
+ }),
+ getInt("scala.concurrent.context.maxThreads", _.toInt))
+
+ val threadFactory = new DefaultThreadFactory(daemonic = true)
+
+ try {
+ new ForkJoinPool(
+ desiredParallelism,
+ threadFactory,
+ uncaughtExceptionHandler,
+ true) // Async all the way baby
+ } catch {
+ case NonFatal(t) =>
+ System.err.println("Failed to create ForkJoinPool for the default ExecutionContext, falling back to ThreadPoolExecutor")
+ t.printStackTrace(System.err)
+ val exec = new ThreadPoolExecutor(
+ desiredParallelism,
+ desiredParallelism,
+ 5L,
+ TimeUnit.MINUTES,
+ new LinkedBlockingQueue[Runnable],
+ threadFactory
+ )
+ exec.allowCoreThreadTimeOut(true)
+ exec
+ }
+ }
+
+
+ def execute(runnable: Runnable): Unit = executor match {
+ case fj: ForkJoinPool =>
+ val fjt = runnable match {
+ case t: ForkJoinTask[_] => t
+ case runnable => new ForkJoinTask[Unit] {
+ final override def setRawResult(u: Unit): Unit = ()
+ final override def getRawResult(): Unit = ()
+ final override def exec(): Boolean = try { runnable.run(); true } catch {
+ case anything: Throwable ⇒
+ val t = Thread.currentThread
+ t.getUncaughtExceptionHandler match {
+ case null ⇒
+ case some ⇒ some.uncaughtException(t, anything)
+ }
+ throw anything
+ }
+ }
+ }
+ Thread.currentThread match {
+ case fjw: ForkJoinWorkerThread if fjw.getPool eq fj => fjt.fork()
+ case _ => fj execute fjt
+ }
+ case generic => generic execute runnable
+ }
+
+ def reportFailure(t: Throwable) = reporter(t)
+}
+
+private[concurrent] object ExecutionContextImpl {
+ def fromExecutor(e: Executor, reporter: Throwable => Unit = ExecutionContext.defaultReporter): ExecutionContextImpl = new ExecutionContextImpl(e, reporter)
+ def fromExecutorService(es: ExecutorService, reporter: Throwable => Unit = ExecutionContext.defaultReporter): ExecutionContextImpl with ExecutionContextExecutorService =
+ new ExecutionContextImpl(es, reporter) with ExecutionContextExecutorService {
+ final def asExecutorService: ExecutorService = executor.asInstanceOf[ExecutorService]
+ override def execute(command: Runnable) = executor.execute(command)
+ override def shutdown() { asExecutorService.shutdown() }
+ override def shutdownNow() = asExecutorService.shutdownNow()
+ override def isShutdown = asExecutorService.isShutdown
+ override def isTerminated = asExecutorService.isTerminated
+ override def awaitTermination(l: Long, timeUnit: TimeUnit) = asExecutorService.awaitTermination(l, timeUnit)
+ override def submit[T](callable: Callable[T]) = asExecutorService.submit(callable)
+ override def submit[T](runnable: Runnable, t: T) = asExecutorService.submit(runnable, t)
+ override def submit(runnable: Runnable) = asExecutorService.submit(runnable)
+ override def invokeAll[T](callables: Collection[_ <: Callable[T]]) = asExecutorService.invokeAll(callables)
+ override def invokeAll[T](callables: Collection[_ <: Callable[T]], l: Long, timeUnit: TimeUnit) = asExecutorService.invokeAll(callables, l, timeUnit)
+ override def invokeAny[T](callables: Collection[_ <: Callable[T]]) = asExecutorService.invokeAny(callables)
+ override def invokeAny[T](callables: Collection[_ <: Callable[T]], l: Long, timeUnit: TimeUnit) = asExecutorService.invokeAny(callables, l, timeUnit)
+ }
+}
+
+
diff --git a/src/library/scala/concurrent/impl/Future.scala b/src/library/scala/concurrent/impl/Future.scala
new file mode 100644
index 0000000..89d10e5
--- /dev/null
+++ b/src/library/scala/concurrent/impl/Future.scala
@@ -0,0 +1,34 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+package scala.concurrent.impl
+
+
+
+import scala.concurrent.ExecutionContext
+import scala.util.control.NonFatal
+import scala.util.{Try, Success, Failure}
+
+
+private[concurrent] object Future {
+ class PromiseCompletingRunnable[T](body: => T) extends Runnable {
+ val promise = new Promise.DefaultPromise[T]()
+
+ override def run() = {
+ promise complete {
+ try Success(body) catch { case NonFatal(e) => Failure(e) }
+ }
+ }
+ }
+
+ def apply[T](body: =>T)(implicit executor: ExecutionContext): scala.concurrent.Future[T] = {
+ val runnable = new PromiseCompletingRunnable(body)
+ executor.prepare.execute(runnable)
+ runnable.promise.future
+ }
+}
diff --git a/src/library/scala/concurrent/impl/Promise.scala b/src/library/scala/concurrent/impl/Promise.scala
new file mode 100644
index 0000000..c9b2a15
--- /dev/null
+++ b/src/library/scala/concurrent/impl/Promise.scala
@@ -0,0 +1,341 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+package scala.concurrent.impl
+
+import scala.concurrent.{ ExecutionContext, CanAwait, OnCompleteRunnable, TimeoutException, ExecutionException, blocking }
+import scala.concurrent.Future.InternalCallbackExecutor
+import scala.concurrent.duration.{ Duration, Deadline, FiniteDuration, NANOSECONDS }
+import scala.annotation.tailrec
+import scala.util.control.NonFatal
+import scala.util.{ Try, Success, Failure }
+import java.io.ObjectInputStream
+import java.util.concurrent.locks.AbstractQueuedSynchronizer
+
+private[concurrent] trait Promise[T] extends scala.concurrent.Promise[T] with scala.concurrent.Future[T] {
+ def future: this.type = this
+}
+
+/* Precondition: `executor` is prepared, i.e., `executor` has been returned from invocation of `prepare` on some other `ExecutionContext`.
+ */
+private class CallbackRunnable[T](val executor: ExecutionContext, val onComplete: Try[T] => Any) extends Runnable with OnCompleteRunnable {
+ // must be filled in before running it
+ var value: Try[T] = null
+
+ override def run() = {
+ require(value ne null) // must set value to non-null before running!
+ try onComplete(value) catch { case NonFatal(e) => executor reportFailure e }
+ }
+
+ def executeWithValue(v: Try[T]): Unit = {
+ require(value eq null) // can't complete it twice
+ value = v
+ // Note that we cannot prepare the ExecutionContext at this point, since we might
+ // already be running on a different thread!
+ try executor.execute(this) catch { case NonFatal(t) => executor reportFailure t }
+ }
+}
+
+private[concurrent] object Promise {
+
+ private def resolveTry[T](source: Try[T]): Try[T] = source match {
+ case Failure(t) => resolver(t)
+ case _ => source
+ }
+
+ private def resolver[T](throwable: Throwable): Try[T] = throwable match {
+ case t: scala.runtime.NonLocalReturnControl[_] => Success(t.value.asInstanceOf[T])
+ case t: scala.util.control.ControlThrowable => Failure(new ExecutionException("Boxed ControlThrowable", t))
+ case t: InterruptedException => Failure(new ExecutionException("Boxed InterruptedException", t))
+ case e: Error => Failure(new ExecutionException("Boxed Error", e))
+ case t => Failure(t)
+ }
+
+ /**
+ * Latch used to implement waiting on a DefaultPromise's result.
+ *
+ * Inspired by: http://gee.cs.oswego.edu/cgi-bin/viewcvs.cgi/jsr166/src/main/java/util/concurrent/locks/AbstractQueuedSynchronizer.java
+ * Written by Doug Lea with assistance from members of JCP JSR-166
+ * Expert Group and released to the public domain, as explained at
+ * http://creativecommons.org/publicdomain/zero/1.0/
+ */
+ private final class CompletionLatch[T] extends AbstractQueuedSynchronizer with (Try[T] => Unit) {
+ override protected def tryAcquireShared(ignored: Int): Int = if (getState != 0) 1 else -1
+ override protected def tryReleaseShared(ignore: Int): Boolean = {
+ setState(1)
+ true
+ }
+ override def apply(ignored: Try[T]): Unit = releaseShared(1)
+ }
+
+
+ /** Default promise implementation.
+ *
+ * A DefaultPromise has three possible states. It can be:
+ *
+ * 1. Incomplete, with an associated list of callbacks waiting on completion.
+ * 2. Complete, with a result.
+ * 3. Linked to another DefaultPromise.
+ *
+ * If a DefaultPromise is linked it another DefaultPromise then it will
+ * delegate all its operations to that other promise. This means that two
+ * DefaultPromises that are linked will appear, to external callers, to have
+ * exactly the same state and behaviour. E.g. they will both appear to be
+ * either complete or incomplete, and with the same values.
+ *
+ * A DefaultPromise stores its state entirely in the AnyRef cell exposed by
+ * AbstractPromise. The type of object stored in the cell fully describes the
+ * current state of the promise.
+ *
+ * 1. List[CallbackRunnable] - The promise is incomplete and has zero or more callbacks
+ * to call when it is eventually completed.
+ * 2. Try[T] - The promise is complete and now contains its value.
+ * 3. DefaultPromise[T] - The promise is linked to another promise.
+ *
+ * The ability to link DefaultPromises is needed to prevent memory leaks when
+ * using Future.flatMap. The previous implementation of Future.flatMap used
+ * onComplete handlers to propagate the ultimate value of a flatMap operation
+ * to its promise. Recursive calls to flatMap built a chain of onComplete
+ * handlers and promises. Unfortunately none of the handlers or promises in
+ * the chain could be collected until the handlers had been called and
+ * detached, which only happened when the final flatMap future was completed.
+ * (In some situations, such as infinite streams, this would never actually
+ * happen.) Because of the fact that the promise implementation internally
+ * created references between promises, and these references were invisible to
+ * user code, it was easy for user code to accidentally build large chains of
+ * promises and thereby leak memory.
+ *
+ * The problem of leaks is solved by automatically breaking these chains of
+ * promises, so that promises don't refer to each other in a long chain. This
+ * allows each promise to be individually collected. The idea is to "flatten"
+ * the chain of promises, so that instead of each promise pointing to its
+ * neighbour, they instead point directly the promise at the root of the
+ * chain. This means that only the root promise is referenced, and all the
+ * other promises are available for garbage collection as soon as they're no
+ * longer referenced by user code.
+ *
+ * To make the chains flattenable, the concept of linking promises together
+ * needed to become an explicit feature of the DefaultPromise implementation,
+ * so that the implementation to navigate and rewire links as needed. The idea
+ * of linking promises is based on the [[Twitter promise implementation
+ * https://github.com/twitter/util/blob/master/util-core/src/main/scala/com/twitter/util/Promise.scala]].
+ *
+ * In practice, flattening the chain cannot always be done perfectly. When a
+ * promise is added to the end of the chain, it scans the chain and links
+ * directly to the root promise. This prevents the chain from growing forwards
+ * But the root promise for a chain can change, causing the chain to grow
+ * backwards, and leaving all previously-linked promise pointing at a promise
+ * which is no longer the root promise.
+ *
+ * To mitigate the problem of the root promise changing, whenever a promise's
+ * methods are called, and it needs a reference to its root promise it calls
+ * the `compressedRoot()` method. This method re-scans the promise chain to
+ * get the root promise, and also compresses its links so that it links
+ * directly to whatever the current root promise is. This ensures that the
+ * chain is flattened whenever `compressedRoot()` is called. And since
+ * `compressedRoot()` is called at every possible opportunity (when getting a
+ * promise's value, when adding an onComplete handler, etc), this will happen
+ * frequently. Unfortunately, even this eager relinking doesn't absolutely
+ * guarantee that the chain will be flattened and that leaks cannot occur.
+ * However eager relinking does greatly reduce the chance that leaks will
+ * occur.
+ *
+ * Future.flatMap links DefaultPromises together by calling the `linkRootOf`
+ * method. This is the only externally visible interface to linked
+ * DefaultPromises, and `linkedRootOf` is currently only designed to be called
+ * by Future.flatMap.
+ */
+ class DefaultPromise[T] extends AbstractPromise with Promise[T] { self =>
+ updateState(null, Nil) // The promise is incomplete and has no callbacks
+
+ /** Get the root promise for this promise, compressing the link chain to that
+ * promise if necessary.
+ *
+ * For promises that are not linked, the result of calling
+ * `compressedRoot()` will the promise itself. However for linked promises,
+ * this method will traverse each link until it locates the root promise at
+ * the base of the link chain.
+ *
+ * As a side effect of calling this method, the link from this promise back
+ * to the root promise will be updated ("compressed") to point directly to
+ * the root promise. This allows intermediate promises in the link chain to
+ * be garbage collected. Also, subsequent calls to this method should be
+ * faster as the link chain will be shorter.
+ */
+ @tailrec
+ private def compressedRoot(): DefaultPromise[T] = {
+ getState match {
+ case linked: DefaultPromise[_] =>
+ val target = linked.asInstanceOf[DefaultPromise[T]].root
+ if (linked eq target) target else if (updateState(linked, target)) target else compressedRoot()
+ case _ => this
+ }
+ }
+
+ /** Get the promise at the root of the chain of linked promises. Used by `compressedRoot()`.
+ * The `compressedRoot()` method should be called instead of this method, as it is important
+ * to compress the link chain whenever possible.
+ */
+ @tailrec
+ private def root: DefaultPromise[T] = {
+ getState match {
+ case linked: DefaultPromise[_] => linked.asInstanceOf[DefaultPromise[T]].root
+ case _ => this
+ }
+ }
+
+ /** Try waiting for this promise to be completed.
+ */
+ protected final def tryAwait(atMost: Duration): Boolean = if (!isCompleted) {
+ import Duration.Undefined
+ import scala.concurrent.Future.InternalCallbackExecutor
+ atMost match {
+ case e if e eq Undefined => throw new IllegalArgumentException("cannot wait for Undefined period")
+ case Duration.Inf =>
+ val l = new CompletionLatch[T]()
+ onComplete(l)(InternalCallbackExecutor)
+ l.acquireSharedInterruptibly(1)
+ case Duration.MinusInf => // Drop out
+ case f: FiniteDuration =>
+ if (f > Duration.Zero) {
+ val l = new CompletionLatch[T]()
+ onComplete(l)(InternalCallbackExecutor)
+ l.tryAcquireSharedNanos(1, f.toNanos)
+ }
+ }
+
+ isCompleted
+ } else true // Already completed
+
+ @throws(classOf[TimeoutException])
+ @throws(classOf[InterruptedException])
+ def ready(atMost: Duration)(implicit permit: CanAwait): this.type =
+ if (tryAwait(atMost)) this
+ else throw new TimeoutException("Futures timed out after [" + atMost + "]")
+
+ @throws(classOf[Exception])
+ def result(atMost: Duration)(implicit permit: CanAwait): T =
+ ready(atMost).value.get.get // ready throws TimeoutException if timeout so value.get is safe here
+
+ def value: Option[Try[T]] = value0
+
+ @tailrec
+ private def value0: Option[Try[T]] = getState match {
+ case c: Try[_] => Some(c.asInstanceOf[Try[T]])
+ case _: DefaultPromise[_] => compressedRoot().value0
+ case _ => None
+ }
+
+ override def isCompleted: Boolean = isCompleted0
+
+ @tailrec
+ private def isCompleted0: Boolean = getState match {
+ case _: Try[_] => true
+ case _: DefaultPromise[_] => compressedRoot().isCompleted0
+ case _ => false
+ }
+
+ def tryComplete(value: Try[T]): Boolean = {
+ val resolved = resolveTry(value)
+ tryCompleteAndGetListeners(resolved) match {
+ case null => false
+ case rs if rs.isEmpty => true
+ case rs => rs.foreach(r => r.executeWithValue(resolved)); true
+ }
+ }
+
+ /** Called by `tryComplete` to store the resolved value and get the list of
+ * listeners, or `null` if it is already completed.
+ */
+ @tailrec
+ private def tryCompleteAndGetListeners(v: Try[T]): List[CallbackRunnable[T]] = {
+ getState match {
+ case raw: List[_] =>
+ val cur = raw.asInstanceOf[List[CallbackRunnable[T]]]
+ if (updateState(cur, v)) cur else tryCompleteAndGetListeners(v)
+ case _: DefaultPromise[_] =>
+ compressedRoot().tryCompleteAndGetListeners(v)
+ case _ => null
+ }
+ }
+
+ def onComplete[U](func: Try[T] => U)(implicit executor: ExecutionContext): Unit = {
+ val preparedEC = executor.prepare
+ val runnable = new CallbackRunnable[T](preparedEC, func)
+ dispatchOrAddCallback(runnable)
+ }
+
+ /** Tries to add the callback, if already completed, it dispatches the callback to be executed.
+ * Used by `onComplete()` to add callbacks to a promise and by `link()` to transfer callbacks
+ * to the root promise when linking two promises togehter.
+ */
+ @tailrec
+ private def dispatchOrAddCallback(runnable: CallbackRunnable[T]): Unit = {
+ getState match {
+ case r: Try[_] => runnable.executeWithValue(r.asInstanceOf[Try[T]])
+ case _: DefaultPromise[_] => compressedRoot().dispatchOrAddCallback(runnable)
+ case listeners: List[_] => if (updateState(listeners, runnable :: listeners)) () else dispatchOrAddCallback(runnable)
+ }
+ }
+
+ /** Link this promise to the root of another promise using `link()`. Should only be
+ * be called by Future.flatMap.
+ */
+ protected[concurrent] final def linkRootOf(target: DefaultPromise[T]): Unit = link(target.compressedRoot())
+
+ /** Link this promise to another promise so that both promises share the same
+ * externally-visible state. Depending on the current state of this promise, this
+ * may involve different things. For example, any onComplete listeners will need
+ * to be transferred.
+ *
+ * If this promise is already completed, then the same effect as linking -
+ * sharing the same completed value - is achieved by simply sending this
+ * promise's result to the target promise.
+ */
+ @tailrec
+ private def link(target: DefaultPromise[T]): Unit = if (this ne target) {
+ getState match {
+ case r: Try[_] =>
+ if (!target.tryComplete(r.asInstanceOf[Try[T]])) {
+ // Currently linking is done from Future.flatMap, which should ensure only
+ // one promise can be completed. Therefore this situation is unexpected.
+ throw new IllegalStateException("Cannot link completed promises together")
+ }
+ case _: DefaultPromise[_] =>
+ compressedRoot().link(target)
+ case listeners: List[_] => if (updateState(listeners, target)) {
+ if (!listeners.isEmpty) listeners.asInstanceOf[List[CallbackRunnable[T]]].foreach(target.dispatchOrAddCallback(_))
+ } else link(target)
+ }
+ }
+ }
+
+ /** An already completed Future is given its result at creation.
+ *
+ * Useful in Future-composition when a value to contribute is already available.
+ */
+ final class KeptPromise[T](suppliedValue: Try[T]) extends Promise[T] {
+
+ val value = Some(resolveTry(suppliedValue))
+
+ override def isCompleted: Boolean = true
+
+ def tryComplete(value: Try[T]): Boolean = false
+
+ def onComplete[U](func: Try[T] => U)(implicit executor: ExecutionContext): Unit = {
+ val completedAs = value.get
+ val preparedEC = executor.prepare
+ (new CallbackRunnable(preparedEC, func)).executeWithValue(completedAs)
+ }
+
+ def ready(atMost: Duration)(implicit permit: CanAwait): this.type = this
+
+ def result(atMost: Duration)(implicit permit: CanAwait): T = value.get.get
+ }
+
+}
diff --git a/src/library/scala/concurrent/ops.scala b/src/library/scala/concurrent/ops.scala
index 9498a62..4c91e78 100644
--- a/src/library/scala/concurrent/ops.scala
+++ b/src/library/scala/concurrent/ops.scala
@@ -1,22 +1,21 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-
-
package scala.concurrent
import java.lang.Thread
import scala.util.control.Exception.allCatch
-/** The object <code>ops</code> ...
+/** The object `ops` ...
*
* @author Martin Odersky, Stepan Koltsov, Philipp Haller
*/
+ at deprecated("Use `Future` instead.", "2.10.0")
object ops
{
val defaultRunner: FutureTaskRunner = TaskRunners.threadRunner
@@ -39,8 +38,8 @@ object ops
runner execute runner.functionAsTask(() => p)
}
- /** Evaluates an expression asynchronously, and returns a closure for retrieving
- * the result.
+ /** Evaluates an expression asynchronously, and returns a closure for
+ * retrieving the result.
*
* @param p the expression to evaluate
* @return a closure which returns the result once it has been computed
@@ -49,7 +48,7 @@ object ops
runner.futureAsFunction(runner submit runner.functionAsTask(() => p))
}
- /** Evaluates two expressions in parallel. Invoking `par' blocks the current
+ /** Evaluates two expressions in parallel. Invoking `par` blocks the current
* thread until both expressions have been evaluated.
*
* @param xp the first expression to evaluate
@@ -63,24 +62,6 @@ object ops
(xp, getOrThrow(y.get))
}
- /**
- * @param start ...
- * @param end ...
- * @param p ...
- */
- @deprecated("use `collection.parallel.ParIterable.foreach' instead", "2.9.0")
- def replicate(start: Int, end: Int)(p: Int => Unit)(implicit runner: TaskRunner = defaultRunner) {
- if (start == end)
- ()
- else if (start + 1 == end)
- p(start)
- else {
- val mid = (start + end) / 2
- spawn { replicate(start, mid)(p) }
- replicate(mid, end)(p)
- }
- }
-
/*
def parMap[a,b](f: a => b, xs: Array[a]): Array[b] = {
val results = new Array[b](xs.length);
diff --git a/src/library/scala/concurrent/package.scala b/src/library/scala/concurrent/package.scala
new file mode 100644
index 0000000..3e849f1
--- /dev/null
+++ b/src/library/scala/concurrent/package.scala
@@ -0,0 +1,109 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+package scala
+
+import scala.concurrent.duration.Duration
+import scala.annotation.implicitNotFound
+
+/** This package object contains primitives for concurrent and parallel programming.
+ */
+package object concurrent {
+ type ExecutionException = java.util.concurrent.ExecutionException
+ type CancellationException = java.util.concurrent.CancellationException
+ type TimeoutException = java.util.concurrent.TimeoutException
+
+ /** Starts an asynchronous computation and returns a `Future` object with the result of that computation.
+ *
+ * The result becomes available once the asynchronous computation is completed.
+ *
+ * @tparam T the type of the result
+ * @param body the asynchronous computation
+ * @param execctx the execution context on which the future is run
+ * @return the `Future` holding the result of the computation
+ */
+ def future[T](body: =>T)(implicit execctx: ExecutionContext): Future[T] = Future[T](body)
+
+ /** Creates a promise object which can be completed with a value or an exception.
+ *
+ * @tparam T the type of the value in the promise
+ * @return the newly created `Promise` object
+ */
+ def promise[T](): Promise[T] = Promise[T]()
+
+ /** Used to designate a piece of code which potentially blocks, allowing the current [[BlockContext]] to adjust
+ * the runtime's behavior.
+ * Properly marking blocking code may improve performance or avoid deadlocks.
+ *
+ * Blocking on an [[Awaitable]] should be done using [[Await.result]] instead of `blocking`.
+ *
+ * @param body A piece of code which contains potentially blocking or long running calls.
+ * @throws `CancellationException` if the computation was cancelled
+ * @throws `InterruptedException` in the case that a wait within the blocking `body` was interrupted
+ */
+ @throws(classOf[Exception])
+ def blocking[T](body: =>T): T = BlockContext.current.blockOn(body)(scala.concurrent.AwaitPermission)
+}
+
+package concurrent {
+ @implicitNotFound("Don't call `Awaitable` methods directly, use the `Await` object.")
+ sealed trait CanAwait
+
+ /**
+ * Internal usage only, implementation detail.
+ */
+ private[concurrent] object AwaitPermission extends CanAwait
+
+ /**
+ * `Await` is what is used to ensure proper handling of blocking for `Awaitable` instances.
+ */
+ object Await {
+ /**
+ * Await the "completed" state of an `Awaitable`.
+ *
+ * Although this method is blocking, the internal use of [[scala.concurrent.blocking blocking]] ensures that
+ * the underlying [[ExecutionContext]] is prepared to properly manage the blocking.
+ *
+ * @param awaitable
+ * the `Awaitable` to be awaited
+ * @param atMost
+ * maximum wait time, which may be negative (no waiting is done),
+ * [[scala.concurrent.duration.Duration.Inf Duration.Inf]] for unbounded waiting, or a finite positive
+ * duration
+ * @return the `awaitable`
+ * @throws InterruptedException if the current thread is interrupted while waiting
+ * @throws TimeoutException if after waiting for the specified time this `Awaitable` is still not ready
+ * @throws IllegalArgumentException if `atMost` is [[scala.concurrent.duration.Duration.Undefined Duration.Undefined]]
+ */
+ @throws(classOf[TimeoutException])
+ @throws(classOf[InterruptedException])
+ def ready[T](awaitable: Awaitable[T], atMost: Duration): awaitable.type =
+ blocking(awaitable.ready(atMost)(AwaitPermission))
+
+ /**
+ * Await and return the result (of type `T`) of an `Awaitable`.
+ *
+ * Although this method is blocking, the internal use of [[scala.concurrent.blocking blocking]] ensures that
+ * the underlying [[ExecutionContext]] to properly detect blocking and ensure that there are no deadlocks.
+ *
+ * @param awaitable
+ * the `Awaitable` to be awaited
+ * @param atMost
+ * maximum wait time, which may be negative (no waiting is done),
+ * [[scala.concurrent.duration.Duration.Inf Duration.Inf]] for unbounded waiting, or a finite positive
+ * duration
+ * @return the result value if `awaitable` is completed within the specific maximum wait time
+ * @throws InterruptedException if the current thread is interrupted while waiting
+ * @throws TimeoutException if after waiting for the specified time `awaitable` is still not ready
+ * @throws IllegalArgumentException if `atMost` is [[scala.concurrent.duration.Duration.Undefined Duration.Undefined]]
+ */
+ @throws(classOf[Exception])
+ def result[T](awaitable: Awaitable[T], atMost: Duration): T =
+ blocking(awaitable.result(atMost)(AwaitPermission))
+ }
+}
diff --git a/src/library/scala/concurrent/package.scala.disabled b/src/library/scala/concurrent/package.scala.disabled
deleted file mode 100644
index 42b4bf9..0000000
--- a/src/library/scala/concurrent/package.scala.disabled
+++ /dev/null
@@ -1,108 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-
-package scala
-
-
-
-
-/** This package object contains primitives for parallel programming.
- */
-package object concurrent {
-
- /** Performs a call which can potentially block execution.
- *
- * Example:
- * {{{
- * val lock = new ReentrantLock
- *
- * // ... do something ...
- *
- * blocking {
- * if (!lock.hasLock) lock.lock()
- * }
- * }}}
- *
- * '''Note:''' calling methods that wait arbitrary amounts of time
- * (e.g. for I/O operations or locks) may severely decrease performance
- * or even result in deadlocks. This does not include waiting for
- * results of futures.
- *
- * @tparam T the result type of the blocking operation
- * @param body the blocking operation
- * @param runner the runner used for parallel computations
- * @return the result of the potentially blocking operation
- */
- def blocking[T](body: =>T)(implicit runner: TaskRunner): T = {
- null.asInstanceOf[T]
- }
-
- /** Invokes a computation asynchronously. Does not wait for the computation
- * to finish.
- *
- * @tparam U the result type of the operation
- * @param p the computation to be invoked asynchronously
- * @param runner the runner used for parallel computations
- */
- def spawn[U](p: =>U)(implicit runner: TaskRunner): Unit = {
- }
-
- /** Starts 2 parallel computations and returns once they are completed.
- *
- * $invokingPar
- *
- * @tparam T1 the type of the result of 1st the parallel computation
- * @tparam T2 the type of the result of 2nd the parallel computation
- * @param b1 the 1st computation to be invoked in parallel
- * @param b2 the 2nd computation to be invoked in parallel
- * @param runner the runner used for parallel computations
- * @return a tuple of results corresponding to parallel computations
- */
- def par[T1, T2](b1: =>T1)(b2: =>T2)(implicit runner: TaskRunner): (T1, T2) = {
- null
- }
-
- /** Starts 3 parallel computations and returns once they are completed.
- *
- * $invokingPar
- *
- * @tparam T1 the type of the result of 1st the parallel computation
- * @tparam T2 the type of the result of 2nd the parallel computation
- * @tparam T3 the type of the result of 3rd the parallel computation
- * @param b1 the 1st computation to be invoked in parallel
- * @param b2 the 2nd computation to be invoked in parallel
- * @param b3 the 3rd computation to be invoked in parallel
- * @param runner the runner used for parallel computations
- * @return a tuple of results corresponding to parallel computations
- */
- def par[T1, T2, T3](b1: =>T1)(b2: =>T2)(b3: =>T3)(implicit runner: TaskRunner): (T1, T2, T3) = {
- null
- }
-
- /** Starts 4 parallel computations and returns once they are completed.
- *
- * $invokingPar
- *
- * @tparam T1 the type of the result of 1st the parallel computation
- * @tparam T2 the type of the result of 2nd the parallel computation
- * @tparam T3 the type of the result of 3rd the parallel computation
- * @tparam T4 the type of the result of 4th the parallel computation
- * @param b1 the 1st computation to be invoked in parallel
- * @param b2 the 2nd computation to be invoked in parallel
- * @param b3 the 3rd computation to be invoked in parallel
- * @param b4 the 4th computation to be invoked in parallel
- * @param runner the runner used for parallel computations
- * @return a tuple of results corresponding to parallel computations
- */
- def par[T1, T2, T3, T4](b1: =>T1)(b2: =>T2)(b3: =>T3)(b4: =>T4)(implicit runner: TaskRunner): (T1, T2, T3, T4) = {
- null
- }
-
-}
diff --git a/src/library/scala/concurrent/pilib.scala b/src/library/scala/concurrent/pilib.scala
deleted file mode 100644
index 7a3758b..0000000
--- a/src/library/scala/concurrent/pilib.scala
+++ /dev/null
@@ -1,208 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-
-package scala.concurrent
-
-/** <p>
- * Library for using Pi-calculus concurrent primitives in
- * <a href="http://scala-lang.org/" target="_top">Scala</a>. As an
- * example, the definition of a two-place buffer using the <code>pilib</code>
- * library looks like:
- * </p><pre>
- * <b>def</b> Buffer[a](put: Chan[a], get: Chan[a]) {
- * <b>def</b> B0 { choice ( put * { x => B1(x) } ) }
- * <b>def</b> B1(x: a) { choice ( get(x) * B0, put * { y => B2(x, y) } ) }
- * <b>def</b> B2(x: a, y: a) { choice ( get(x) * B1(y) ) }
- * B0
- * }
- * </pre>
- *
- * @see <a href="http://scala-lang.org/docu/papers.html" target="_top">
- * PiLib: A Hosted Language for Pi-Calculus Style Concurrency</a>
- * @author Vincent Cremet, Martin Odersky
- * @version 1.0
- */
- at deprecated("use actors instead", "2.8.0")
-object pilib {
-
- import TaskRunners.threadRunner
-
- //////////////////////////////// SPAWN /////////////////////////////////
-
- /**
- * Run several processes in parallel using the following syntax:
- * <code>spawn < p<sub>1</sub> | ... | p<sub>n</sub> ></code>
- */
- abstract class Spawn {
- def <(p: => Unit): Spawn
- def |(p: => Unit): Spawn
- def > (): Unit
- }
- val spawn = new Spawn {
- //object spawn extends Spawn { // BUG !
- def <(p: => Unit): Spawn = { scala.concurrent.ops.spawn(p); this }
- def |(p: => Unit): Spawn = { scala.concurrent.ops.spawn(p); this }
- def > (): Unit = ()
- }
-
- /////////////////////////// GUARDED PROCESSES //////////////////////////
-
- /** Untyped channel. */
- class UChan {
- /** Default log function. */
- var log = (x: Any) => ()
- }
-
- /** An untyped guarded process.
- *
- * @param n channel name
- * @param polarity input (true) or output (false)
- * @param v transmitted value
- * @param c continuation
- */
- case class UGP(n: UChan, polarity: Boolean, v: Any, c: Any => Any)
-
- /** Typed guarded process. */
- class GP[a](n: UChan, polarity: Boolean, v: Any, c: Any => a) {
- val untyped = UGP(n, polarity, v, c)
- }
-
- //////////////////////////////// CHANNELS //////////////////////////////
-
- /**
- * Name on which one can emit, receive or that can be emitted or received
- * during a communication.
- */
- class Chan[A] extends UChan with Function1[A, Product[A]] {
-
- var defaultValue: A = _
-
- /** Creates an input guarded process. */
- def input[B](c: A => B) =
- new GP(this, true, (), x => c(x.asInstanceOf[A]))
-
- /** Creates an input guarded process. */
- def output[B](v: A, c: () => B) =
- new GP(this, false, v, x => c())
-
- /** Blocking read. */
- def read = {
- var res: A = defaultValue
- choice ( input(x => res = x) )
- res
- }
-
- /** Blocking write. */
- def write(x: A) =
- choice ( output(x, () => ()) )
-
- /** Syntactic sugar for input. */
- def *[B](f: A => B) =
- input(f)
-
- /** Syntactic sugar for output. */
- def apply(v: A) =
- new Product(this, v)
-
- /** Attach a function to be evaluated at each communication event
- * on this channel. Replace previous attached function.
- */
- def attach(f: A => Unit) =
- log = x => f(x.asInstanceOf[A])
- }
-
- class Product[A](c: Chan[A], v: A) {
- def *[B](f: => B) = c.output(v, () => f)
- }
-
- /////////////////////// SUM OF GUARDED PROCESSES ///////////////////////
-
- case class Sum(gs: List[UGP]) {
-
- /** Continuation of the sum. */
- var cont: () => Any = _
-
- var initialized = false
-
- /** Block if not initialized otherwise continue with the
- * continuation.
- */
- def continue = synchronized {
- if (!initialized) wait()
- cont()
- }
-
- /** Set the values of parameters and awake the sleeping sum.
- *
- * @param f ...
- */
- def set(f: () => Any) = synchronized {
- cont = f
- initialized = true
- notify()
- }
- }
-
- ///////////////////////////// COMMUNICATION ////////////////////////////
-
- private var sums: List[Sum] = Nil
-
- /** Test if two lists of guarded processes can communicate.
- *
- * @param gs1 ...
- * @param gs2 ...
- * @return ...
- */
- private def matches(gs1: List[UGP], gs2: List[UGP]): Option[(() => Unit, () => Any, () => Any)] =
- (gs1, gs2) match {
- case (Nil, _) => None
- case (_, Nil) => None
- case (UGP(a1, d1, v1, c1) :: rest1, UGP(a2, d2, v2, c2) :: rest2) =>
- if (a1 == a2 && d1 == !d2)
- Some(((() => if (d1) a1.log(v2) else a1.log(v1)), (() => c1(v2)), (() => c2(v1))))
- else matches(gs1, rest2) match {
- case None => matches(rest1, gs2)
- case Some(t) => Some(t)
- }
- }
-
- /** Test if the given sum can react with one of the pending sums.
- * If yes then do the reaction otherwise append the sum at the end
- * of the pending sums.
- *
- * @param s1 ...
- * @param ss ...
- * @return ...
- */
- private def compare(s1: Sum, ss: List[Sum]): List[Sum] =
- ss match {
- case Nil => ss ::: List(s1)
- case s2 :: rest => matches(s1.gs, s2.gs) match {
- case None => s2 :: compare(s1, rest)
- case Some((log, c1, c2)) =>
- log()
- s1.set(c1)
- s2.set(c2)
- rest
- }
- }
-
- /** Pi-calculus non-deterministic choice.
- *
- * @param s ...
- * @return ...
- */
- def choice[A](s: GP[A]*): A = {
- val sum = Sum(s.toList map { _.untyped })
- synchronized { sums = compare(sum, sums) }
- (sum.continue).asInstanceOf[A]
- }
-
-}
diff --git a/src/library/scala/deprecated.scala b/src/library/scala/deprecated.scala
index 53f5c45..e940a4b 100644
--- a/src/library/scala/deprecated.scala
+++ b/src/library/scala/deprecated.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -8,7 +8,7 @@
package scala
-import annotation.target._
+import scala.annotation.meta._
/** An annotation that designates that a definition is deprecated.
* Access to the member then generates a deprecated warning.
@@ -18,4 +18,4 @@ import annotation.target._
* @since 2.3
*/
@getter @setter @beanGetter @beanSetter
-class deprecated(message: String = "", since: String = "") extends annotation.StaticAnnotation
+class deprecated(message: String = "", since: String = "") extends scala.annotation.StaticAnnotation
diff --git a/src/library/scala/deprecatedInheritance.scala b/src/library/scala/deprecatedInheritance.scala
new file mode 100644
index 0000000..7006556
--- /dev/null
+++ b/src/library/scala/deprecatedInheritance.scala
@@ -0,0 +1,22 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+package scala
+
+/** An annotation that designates that inheriting from a class is deprecated.
+ *
+ * This is usually done to warn about a non-final class being made final in a future version.
+ * Sub-classing such a class then generates a warning.
+ *
+ * @param message the message to print during compilation if the class was sub-classed
+ * @param since a string identifying the first version in which inheritance was deprecated
+ * @since 2.10
+ * @see [[scala.deprecatedOverriding]]
+ */
+private[scala] // for now, this needs to be generalized to communicate other modifier deltas
+class deprecatedInheritance(message: String = "", since: String = "") extends scala.annotation.StaticAnnotation
diff --git a/src/library/scala/deprecatedName.scala b/src/library/scala/deprecatedName.scala
index e91ce6e..07c5c89 100644
--- a/src/library/scala/deprecatedName.scala
+++ b/src/library/scala/deprecatedName.scala
@@ -1,13 +1,32 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2010-2013, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
package scala
-import annotation.target._
+import scala.annotation.meta._
/**
* An annotation that designates the name of the parameter to which it is
* applied as deprecated. Using that name in a named argument generates
* a deprecation warning.
*
+ * For instance, evaluating the code below in the Scala interpreter
+ * {{{
+ * def inc(x: Int, @deprecatedName('y) n: Int): Int = x + n
+ * inc(1, y = 2)
+ * }}}
+ * will produce the following output:
+ * {{{
+ * warning: there were 1 deprecation warnings; re-run with -deprecation for details
+ * res0: Int = 3
+ * }}}
+ *
* @since 2.8.1
*/
@param
-class deprecatedName(name: Symbol) extends annotation.StaticAnnotation
+class deprecatedName(name: Symbol) extends scala.annotation.StaticAnnotation
diff --git a/src/library/scala/deprecatedOverriding.scala b/src/library/scala/deprecatedOverriding.scala
new file mode 100644
index 0000000..04bce34
--- /dev/null
+++ b/src/library/scala/deprecatedOverriding.scala
@@ -0,0 +1,21 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+package scala
+
+/** An annotation that designates that overriding a member is deprecated.
+ *
+ * Overriding such a member in a sub-class then generates a warning.
+ *
+ * @param message the message to print during compilation if the member was overridden
+ * @param since a string identifying the first version in which overriding was deprecated
+ * @since 2.10
+ * @see [[scala.deprecatedInheritance]]
+ */
+private[scala] // for the same reasons as deprecatedInheritance
+class deprecatedOverriding(message: String = "", since: String = "") extends scala.annotation.StaticAnnotation
diff --git a/src/library/scala/inline.scala b/src/library/scala/inline.scala
index a182fdf..a21cced 100644
--- a/src/library/scala/inline.scala
+++ b/src/library/scala/inline.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -17,4 +17,4 @@ package scala
* @author Lex Spoon
* @version 1.0, 2007-5-21
*/
-class inline extends annotation.StaticAnnotation
+class inline extends scala.annotation.StaticAnnotation
diff --git a/src/library/scala/io/BufferedSource.scala b/src/library/scala/io/BufferedSource.scala
index 845a816..767f06f 100644
--- a/src/library/scala/io/BufferedSource.scala
+++ b/src/library/scala/io/BufferedSource.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -10,7 +10,7 @@ package scala.io
import java.io.{ InputStream, BufferedReader, InputStreamReader, PushbackReader }
import Source.DefaultBufSize
-import scala.collection.Iterator
+import scala.collection.{ Iterator, AbstractIterator }
/** This object provides convenience methods to create an iterable
* representation of a source file.
@@ -40,7 +40,7 @@ class BufferedSource(inputStream: InputStream, bufferSize: Int)(implicit val cod
map (_.toChar)
)
- class BufferedLineIterator extends Iterator[String] {
+ class BufferedLineIterator extends AbstractIterator[String] with Iterator[String] {
// Don't want to lose a buffered char sitting in iter either. Yes,
// this is ridiculous, but if I can't get rid of Source, and all the
// Iterator bits are designed into Source, and people create Sources
@@ -48,7 +48,7 @@ class BufferedSource(inputStream: InputStream, bufferSize: Int)(implicit val cod
// that calls hasNext to find out if they're empty, and that leads
// to chars being buffered, and no, I don't work here, they left a
// door unlocked.
- val bufReader: BufferedReader = {
+ private val lineReader: BufferedReader = {
// To avoid inflicting this silliness indiscriminately, we can
// skip it if the char reader was never created: and almost always
// it will not have been created, since getLines will be called
@@ -64,13 +64,13 @@ class BufferedSource(inputStream: InputStream, bufferSize: Int)(implicit val cod
override def hasNext = {
if (nextLine == null)
- nextLine = bufReader.readLine
+ nextLine = lineReader.readLine
nextLine != null
}
override def next(): String = {
val result = {
- if (nextLine == null) bufReader.readLine
+ if (nextLine == null) lineReader.readLine
else try nextLine finally nextLine = null
}
if (result == null) Iterator.empty.next
diff --git a/src/library/scala/io/BytePickle.scala b/src/library/scala/io/BytePickle.scala
index 526e4bf..2c4a0bd 100644
--- a/src/library/scala/io/BytePickle.scala
+++ b/src/library/scala/io/BytePickle.scala
@@ -1,16 +1,14 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-
-
package scala.io
-import scala.collection.mutable.{HashMap, ArrayBuffer}
+import scala.collection.mutable
/**
* Pickler combinators.
@@ -21,6 +19,7 @@ import scala.collection.mutable.{HashMap, ArrayBuffer}
* @author Philipp Haller
* @version 1.1
*/
+ at deprecated("This class will be removed.", "2.10.0")
object BytePickle {
abstract class SPU[T] {
def appP(a: T, state: PicklerState): PicklerState
@@ -44,12 +43,12 @@ object BytePickle {
def uunpickle[T](p: PU[T], stream: Array[Byte]): T =
p.appU(stream)._1
- class PicklerEnv extends HashMap[Any, Int] {
+ class PicklerEnv extends mutable.HashMap[Any, Int] {
private var cnt: Int = 64
def nextLoc() = { cnt += 1; cnt }
}
- class UnPicklerEnv extends HashMap[Int, Any] {
+ class UnPicklerEnv extends mutable.HashMap[Int, Any] {
private var cnt: Int = 64
def nextLoc() = { cnt += 1; cnt }
}
@@ -231,7 +230,7 @@ object BytePickle {
Array.concat(a, Array(b.toByte))
def nat2Bytes(x: Int): Array[Byte] = {
- val buf = new ArrayBuffer[Byte]
+ val buf = new mutable.ArrayBuffer[Byte]
def writeNatPrefix(x: Int) {
val y = x >>> 7;
if (y != 0) writeNatPrefix(y);
@@ -271,7 +270,7 @@ object BytePickle {
}
def string: SPU[String] = share(wrap(
- (a: Array[Byte]) => Codec fromUTF8 a mkString,
+ (a: Array[Byte]) => (Codec fromUTF8 a).mkString,
(s: String) => Codec toUTF8 s,
bytearray
))
diff --git a/src/library/scala/io/Codec.scala b/src/library/scala/io/Codec.scala
index 6d28aed..5d046e4 100644
--- a/src/library/scala/io/Codec.scala
+++ b/src/library/scala/io/Codec.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -10,7 +10,8 @@
package scala.io
import java.nio.charset.{ Charset, CharsetDecoder, CharsetEncoder, CharacterCodingException, CodingErrorAction => Action }
-import annotation.migration
+import scala.annotation.migration
+import scala.language.implicitConversions
// Some notes about encodings for use in refining this implementation.
//
@@ -38,6 +39,9 @@ class Codec(val charSet: Charset) {
private[this] var _decodingReplacement: String = null
private[this] var _onCodingException: Handler = e => throw e
+ /** The name of the Codec. */
+ override def toString = name
+
// these methods can be chained to configure the variables above
def onMalformedInput(newAction: Action): this.type = { _onMalformedInput = newAction ; this }
def onUnmappableCharacter(newAction: Action): this.type = { _onUnmappableCharacter = newAction ; this }
@@ -78,8 +82,8 @@ trait LowPriorityCodecImplicits {
}
object Codec extends LowPriorityCodecImplicits {
- final val ISO8859 = Charset forName "ISO-8859-1"
- final val UTF8 = Charset forName "UTF-8"
+ final val ISO8859: Codec = new Codec(Charset forName "ISO-8859-1")
+ final val UTF8: Codec = new Codec(Charset forName "UTF-8")
/** Optimistically these two possible defaults will be the same thing.
* In practice this is not necessarily true, and in fact Sun classifies
@@ -87,7 +91,7 @@ object Codec extends LowPriorityCodecImplicits {
* as an accident, with any anomalies considered "not a bug".
*/
def defaultCharsetCodec = apply(Charset.defaultCharset)
- def fileEncodingCodec = apply(util.Properties.encodingString)
+ def fileEncodingCodec = apply(scala.util.Properties.encodingString)
def default = defaultCharsetCodec
def apply(encoding: String): Codec = new Codec(Charset forName encoding)
@@ -98,10 +102,11 @@ object Codec extends LowPriorityCodecImplicits {
}
@migration("This method was previously misnamed `toUTF8`. Converts from Array[Byte] to Array[Char].", "2.9.0")
- def fromUTF8(bytes: Array[Byte]): Array[Char] = {
- val bbuffer = java.nio.ByteBuffer wrap bytes
- val cbuffer = UTF8 decode bbuffer
- val chars = new Array[Char](cbuffer.remaining())
+ def fromUTF8(bytes: Array[Byte]): Array[Char] = fromUTF8(bytes, 0, bytes.length)
+ def fromUTF8(bytes: Array[Byte], offset: Int, len: Int): Array[Char] = {
+ val bbuffer = java.nio.ByteBuffer.wrap(bytes, offset, len)
+ val cbuffer = UTF8.charSet decode bbuffer
+ val chars = new Array[Char](cbuffer.remaining())
cbuffer get chars
chars
@@ -109,8 +114,16 @@ object Codec extends LowPriorityCodecImplicits {
@migration("This method was previously misnamed `fromUTF8`. Converts from character sequence to Array[Byte].", "2.9.0")
def toUTF8(cs: CharSequence): Array[Byte] = {
- val cbuffer = java.nio.CharBuffer wrap cs
- val bbuffer = UTF8 encode cbuffer
+ val cbuffer = java.nio.CharBuffer.wrap(cs, 0, cs.length)
+ val bbuffer = UTF8.charSet encode cbuffer
+ val bytes = new Array[Byte](bbuffer.remaining())
+ bbuffer get bytes
+
+ bytes
+ }
+ def toUTF8(chars: Array[Char], offset: Int, len: Int): Array[Byte] = {
+ val cbuffer = java.nio.CharBuffer.wrap(chars, offset, len)
+ val bbuffer = UTF8.charSet encode cbuffer
val bytes = new Array[Byte](bbuffer.remaining())
bbuffer get bytes
diff --git a/src/library/scala/io/Position.scala b/src/library/scala/io/Position.scala
index 917d61e..daa4e10 100644
--- a/src/library/scala/io/Position.scala
+++ b/src/library/scala/io/Position.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -10,26 +10,29 @@ package scala.io
/** The object Position provides convenience methods to encode
* line and column number in one single integer. The encoded line
- * (column) numbers range from 0 to LINE_MASK (COLUMN_MASK),
- * where 0 indicates that the line (column) is undefined and 1
- * represents the first line (column).
+ * (column) numbers range from 0 to `LINE_MASK` (`COLUMN_MASK`),
+ * where `0` indicates that the line (column) is undefined and
+ * `1` represents the first line (column).
*
- * Line (Column) numbers greater than LINE_MASK (COLUMN_MASK) are
- * replaced by LINE_MASK (COLUMN_MASK). Furthermore, if the encoded
- * line number is LINE_MASK, the column number is always set to 0.
+ * Line (Column) numbers greater than `LINE_MASK` (`COLUMN_MASK`) are
+ * replaced by `LINE_MASK` (`COLUMN_MASK`). Furthermore, if the encoded
+ * line number is `LINE_MASK`, the column number is always set to 0.
*
* The following properties hold:
*
- * the undefined position is 0: encode(0,0) == 0
- * encodings are non-negative : encode(line,column) >= 0
+ * the undefined position is 0: `encode(0,0) == 0`
+ * encodings are non-negative : `encode(line,column) >= 0`
* position order is preserved:
- *
+ * {{{
* (line1 <= line2) || (line1 == line2 && column1 <= column2)
- * implies
+ * }}}
+ * implies
+ * {{{
* encode(line1,column1) <= encode(line2,column2)
- *
+ * }}}
* @author Burak Emir (translated from work by Matthias Zenger and others)
*/
+ at deprecated("This class will be removed.", "2.10.0")
abstract class Position {
/** Definable behavior for overflow conditions.
*/
@@ -51,7 +54,7 @@ abstract class Position {
if (line >= LINE_MASK)
LINE_MASK << COLUMN_BITS
else
- (line << COLUMN_BITS) | math.min(COLUMN_MASK, column)
+ (line << COLUMN_BITS) | scala.math.min(COLUMN_MASK, column)
}
/** Returns the line number of the encoded position. */
diff --git a/src/library/scala/io/Source.scala b/src/library/scala/io/Source.scala
index 103be17..b13729a 100644
--- a/src/library/scala/io/Source.scala
+++ b/src/library/scala/io/Source.scala
@@ -1,15 +1,14 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-
-
package scala.io
+import scala.collection.AbstractIterator
import java.io.{ FileInputStream, InputStream, PrintStream, File => JFile }
import java.net.{ URI, URL }
@@ -22,7 +21,7 @@ import java.net.{ URI, URL }
object Source {
val DefaultBufSize = 2048
- /** Creates a <code>Source</code> from System.in.
+ /** Creates a `Source` from System.in.
*/
def stdin = fromInputStream(System.in)
@@ -59,7 +58,7 @@ object Source {
def fromFile(name: String, enc: String): BufferedSource =
fromFile(name)(Codec(enc))
- /** creates <code>Source</code> from file with given file: URI
+ /** creates `ource` from file with given file `URI`.
*/
def fromFile(uri: URI)(implicit codec: Codec): BufferedSource =
fromFile(new JFile(uri))(codec)
@@ -83,9 +82,9 @@ object Source {
def fromFile(file: JFile, enc: String, bufferSize: Int): BufferedSource =
fromFile(file, bufferSize)(Codec(enc))
- /** Creates Source from <code>file</code>, using given character encoding,
- * setting its description to filename. Input is buffered in a buffer of
- * size <code>bufferSize</code>.
+ /** Creates Source from `file`, using given character encoding, setting
+ * its description to filename. Input is buffered in a buffer of size
+ * `bufferSize`.
*/
def fromFile(file: JFile, bufferSize: Int)(implicit codec: Codec): BufferedSource = {
val inputStream = new FileInputStream(file)
@@ -98,12 +97,10 @@ object Source {
)(codec) withDescription ("file:" + file.getAbsolutePath)
}
- /** Create a <code>Source</code> from array of bytes, decoding
+ /** Create a `Source` from array of bytes, decoding
* the bytes according to codec.
*
- * @param bytes ...
- * @param enc ...
- * @return the created <code>Source</code> instance.
+ * @return the created `Source` instance.
*/
def fromBytes(bytes: Array[Byte])(implicit codec: Codec): Source =
fromString(new String(bytes, codec.name))
@@ -111,13 +108,13 @@ object Source {
def fromBytes(bytes: Array[Byte], enc: String): Source =
fromBytes(bytes)(Codec(enc))
- /** Create a <code>Source</code> from array of bytes, assuming
+ /** Create a `Source` from array of bytes, assuming
* one byte per character (ISO-8859-1 encoding.)
*/
def fromRawBytes(bytes: Array[Byte]): Source =
fromString(new String(bytes, Codec.ISO8859.name))
- /** creates <code>Source</code> from file with given file: URI
+ /** creates `Source` from file with given file: URI
*/
def fromURI(uri: URI)(implicit codec: Codec): BufferedSource =
fromFile(new JFile(uri))(codec)
@@ -171,9 +168,9 @@ object Source {
createBufferedSource(is, reset = () => fromInputStream(is)(codec), close = () => is.close())(codec)
}
-/** The class <code>Source</code> implements an iterable representation
- * of source data. Calling method <code>reset</code> returns an identical,
- * resetted source, where possible.
+/** The class `Source` implements an iterable representation of source data.
+ * Calling method `reset` returns an identical, resetted source, where
+ * possible.
*
* @author Burak Emir
* @version 1.0
@@ -189,18 +186,9 @@ abstract class Source extends Iterator[Char] {
var nerrors = 0
var nwarnings = 0
- /** Convenience method, returns given line (not including newline)
- * from Source.
- *
- * @param line the line index, first line is 1
- * @return the specified line.
- *
- */
- @deprecated("Use a collections method such as getLines().toIndexedSeq for random access.", "2.8.0")
- def getLine(line: Int): String = lineNum(line)
- private def lineNum(line: Int): String = getLines() drop (line - 1) take 1 mkString
+ private def lineNum(line: Int): String = (getLines() drop (line - 1) take 1).mkString
- class LineIterator() extends Iterator[String] {
+ class LineIterator extends AbstractIterator[String] with Iterator[String] {
private[this] val sb = new StringBuilder
lazy val iter: BufferedIterator[Char] = Source.this.iter.buffered
@@ -233,13 +221,13 @@ abstract class Source extends Iterator[Char] {
*/
def getLines(): Iterator[String] = new LineIterator()
- /** Returns <code>true</code> if this source has more characters.
+ /** Returns `'''true'''` if this source has more characters.
*/
def hasNext = iter.hasNext
/** Returns next character.
*/
- def next: Char = positioner.next
+ def next(): Char = positioner.next
class Positioner(encoder: Position) {
def this() = this(RelaxedPosition)
@@ -256,7 +244,7 @@ abstract class Source extends Iterator[Char] {
/** default col increment for tabs '\t', set to 4 initially */
var tabinc = 4
- def next: Char = {
+ def next(): Char = {
ch = iter.next
pos = encoder.encode(cline, ccol)
ch match {
@@ -279,16 +267,16 @@ abstract class Source extends Iterator[Char] {
}
object RelaxedPositioner extends Positioner(RelaxedPosition) { }
object NoPositioner extends Positioner(Position) {
- override def next: Char = iter.next
+ override def next(): Char = iter.next
}
def ch = positioner.ch
def pos = positioner.pos
- /** Reports an error message to the output stream <code>out</code>.
+ /** Reports an error message to the output stream `out`.
*
* @param pos the source position (line/column)
* @param msg the error message to report
- * @param out PrintStream to use (optional: defaults to <code>Console.err</code>)
+ * @param out PrintStream to use (optional: defaults to `Console.err`)
*/
def reportError(
pos: Int,
@@ -315,7 +303,7 @@ abstract class Source extends Iterator[Char] {
/**
* @param pos the source position (line/column)
* @param msg the warning message to report
- * @param out PrintStream to use (optional: defaults to <code>Console.out</code>)
+ * @param out PrintStream to use (optional: defaults to `Console.out`)
*/
def reportWarning(
pos: Int,
diff --git a/src/library/scala/io/UTF8Codec.scala b/src/library/scala/io/UTF8Codec.scala
index 72a1daf..e4c2145 100644
--- a/src/library/scala/io/UTF8Codec.scala
+++ b/src/library/scala/io/UTF8Codec.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -13,8 +13,8 @@ package scala.io
* @author Martin Odersky
* @version 1.0, 04/10/2004
*/
-object UTF8Codec
-{
+ at deprecated("This class will be removed.", "2.10.0")
+object UTF8Codec {
final val UNI_REPLACEMENT_CHAR: Int = 0x0000FFFD
final val UNI_REPLACEMENT_BYTES = Array[Byte](-17, -65, -67)
@@ -29,36 +29,4 @@ object UTF8Codec
//
// Some useful locations:
// http://www.cl.cam.ac.uk/~mgk25/ucs/examples/UTF-8-test.txt
-
- @deprecated("""Use new String(Array(ch), 0, 1).getBytes("UTF-8") instead""", "2.8.0")
- def encode(ch: Int): Array[Byte] =
- if ((Character getType ch) == Character.SURROGATE.toInt) UNI_REPLACEMENT_BYTES
- else try new String(Array(ch), 0, 1) getBytes "UTF-8" catch {
- case _: IllegalArgumentException => UNI_REPLACEMENT_BYTES
- }
-
- @deprecated("Use Codec.toUTF8 instead", "2.8.0")
- def encode(src: Array[Char], from: Int, dst: Array[Byte], to: Int, len: Int): Int = {
- val bytes = Codec toUTF8 src.slice(from, from + len)
- Array.copy(bytes, 0, dst, to, bytes.length)
- bytes.length
- }
-
- @deprecated("Use Codec.toUTF8 instead", "2.8.0")
- def encode(s: String, dst: Array[Byte], to: Int): Int =
- encode(s.toArray, 0, dst, to, s.length)
-
- @deprecated("Use Codec.toUTF8 instead", "2.8.0")
- def encode(s: String): Array[Byte] = Codec toUTF8 s
-
- @deprecated("Use Codec.fromUTF8 instead", "2.8.0")
- def decode(src: Array[Byte], from: Int, dst: Array[Char], to: Int, len: Int): Int = {
- val chars = Codec fromUTF8 src.slice(from, from + len)
- Array.copy(chars, 0, dst, to, chars.length)
- chars.length
- }
-
- @deprecated("Use Codec.fromUTF8 instead", "2.8.0")
- def decode(src: Array[Byte], from: Int, len: Int): String =
- Codec fromUTF8 src.slice(from, from + len) mkString
}
diff --git a/src/library/scala/language.scala b/src/library/scala/language.scala
new file mode 100644
index 0000000..c638f53
--- /dev/null
+++ b/src/library/scala/language.scala
@@ -0,0 +1,173 @@
+package scala
+
+/**
+ * The `scala.language` object controls the language features available to the programmer, as proposed in the
+ * [[https://docs.google.com/document/d/1nlkvpoIRkx7at1qJEZafJwthZ3GeIklTFhqmXMvTX9Q/edit '''SIP-18 document''']].
+ *
+ * Each of these features has to be explicitly imported into the current scope to become available:
+ * {{{
+ * import language.postfixOps // or language._
+ * List(1, 2, 3) reverse
+ * }}}
+ *
+ * The language features are:
+ * - [[dynamics `dynamics`]] enables defining calls rewriting using the [[scala.Dynamic `Dynamic`]] trait
+ * - [[postfixOps `postfixOps`]] enables postfix operators
+ * - [[reflectiveCalls `reflectiveCalls`]] enables using structural types
+ * - [[implicitConversions `implicitConversions`]] enables defining implicit methods and members
+ * - [[higherKinds `higherKinds`]] enables writing higher-kinded types
+ * - [[existentials `existentials`]] enables writing existential types
+ * - [[experimental `experimental`]] contains newer features that have not yet been tested in production
+ *
+ * @groupname production Language Features
+ * @groupname experimental Experimental Language Features
+ * @groupprio experimental 10
+ */
+object language {
+
+ import languageFeature._
+
+ /** Where enabled, direct or indirect subclasses of trait scala.Dynamic can
+ * be defined. Unless dynamics is enabled, a definition of a class, trait,
+ * or object that has Dynamic as a base trait is rejected. Dynamic member
+ * selection of existing subclasses of trait Dynamic are unaffected;
+ * they can be used anywhere.
+ *
+ * '''Why introduce the feature?''' To enable flexible DSLs and convenient interfacing
+ * with dynamic languages.
+ *
+ * '''Why control it?''' Dynamic member selection can undermine static checkability
+ * of programs. Furthermore, dynamic member selection often relies on reflection,
+ * which is not available on all platforms.
+ *
+ * @group production
+ */
+ implicit lazy val dynamics: dynamics = languageFeature.dynamics
+
+ /** Only where enabled, postfix operator notation `(expr op)` will be allowed.
+ *
+ * '''Why keep the feature?''' Several DSLs written in Scala need the notation.
+ *
+ * '''Why control it?''' Postfix operators interact poorly with semicolon inference.
+ * Most programmers avoid them for this reason.
+ *
+ * @group production
+ */
+ implicit lazy val postfixOps: postfixOps = languageFeature.postfixOps
+
+ /** Only where enabled, accesses to members of structural types that need
+ * reflection are supported. Reminder: A structural type is a type of the form
+ * `Parents { Decls }` where `Decls` contains declarations of new members that do
+ * not override any member in `Parents`. To access one of these members, a
+ * reflective call is needed.
+ *
+ * '''Why keep the feature?''' Structural types provide great flexibility because
+ * they avoid the need to define inheritance hierarchies a priori. Besides,
+ * their definition falls out quite naturally from Scala’s concept of type refinement.
+ *
+ * '''Why control it?''' Reflection is not available on all platforms. Popular tools
+ * such as ProGuard have problems dealing with it. Even where reflection is available,
+ * reflective dispatch can lead to surprising performance degradations.
+ *
+ * @group production
+ */
+ implicit lazy val reflectiveCalls: reflectiveCalls = languageFeature.reflectiveCalls
+
+ /** Only where enabled, definitions of implicit conversions are allowed. An
+ * implicit conversion is an implicit value of unary function type `A => B`,
+ * or an implicit method that has in its first parameter section a single,
+ * non-implicit parameter. Examples:
+ *
+ * {{{
+ * implicit def stringToInt(s: String): Int = s.length
+ * implicit val conv = (s: String) => s.length
+ * implicit def listToX(xs: List[T])(implicit f: T => X): X = ...
+ * }}}
+ *
+ * implicit values of other types are not affected, and neither are implicit
+ * classes.
+ *
+ * '''Why keep the feature?''' Implicit conversions are central to many aspects
+ * of Scala’s core libraries.
+ *
+ * '''Why control it?''' Implicit conversions are known to cause many pitfalls
+ * if over-used. And there is a tendency to over-use them because they look
+ * very powerful and their effects seem to be easy to understand. Also, in
+ * most situations using implicit parameters leads to a better design than
+ * implicit conversions.
+ *
+ * @group production
+ */
+ implicit lazy val implicitConversions: implicitConversions = languageFeature.implicitConversions
+
+ /** Only where this flag is enabled, higher-kinded types can be written.
+ *
+ * '''Why keep the feature?''' Higher-kinded types enable the definition of very general
+ * abstractions such as functor, monad, or arrow. A significant set of advanced
+ * libraries relies on them. Higher-kinded types are also at the core of the
+ * scala-virtualized effort to produce high-performance parallel DSLs through staging.
+ *
+ * '''Why control it?''' Higher kinded types in Scala lead to a Turing-complete
+ * type system, where compiler termination is no longer guaranteed. They tend
+ * to be useful mostly for type-level computation and for highly generic design
+ * patterns. The level of abstraction implied by these design patterns is often
+ * a barrier to understanding for newcomers to a Scala codebase. Some syntactic
+ * aspects of higher-kinded types are hard to understand for the uninitiated and
+ * type inference is less effective for them than for normal types. Because we are
+ * not completely happy with them yet, it is possible that some aspects of
+ * higher-kinded types will change in future versions of Scala. So an explicit
+ * enabling also serves as a warning that code involving higher-kinded types
+ * might have to be slightly revised in the future.
+ *
+ * @group production
+ */
+ implicit lazy val higherKinds: higherKinds = languageFeature.higherKinds
+
+ /** Only where enabled, existential types that cannot be expressed as wildcard
+ * types can be written and are allowed in inferred types of values or return
+ * types of methods. Existential types with wildcard type syntax such as `List[_]`,
+ * or `Map[String, _]` are not affected.
+ *
+ * '''Why keep the feature?''' Existential types are needed to make sense of Java’s wildcard
+ * types and raw types and the erased types of run-time values.
+ *
+ * '''Why control it?''' Having complex existential types in a code base usually makes
+ * application code very brittle, with a tendency to produce type errors with
+ * obscure error messages. Therefore, going overboard with existential types
+ * is generally perceived not to be a good idea. Also, complicated existential types
+ * might be no longer supported in a future simplification of the language.
+ *
+ * @group production
+ */
+ implicit lazy val existentials: existentials = languageFeature.existentials
+
+ /** The experimental object contains features that have been recently added but have not
+ * been thoroughly tested in production yet.
+ *
+ * Experimental features '''may undergo API changes''' in future releases, so production
+ * code should not rely on them.
+ *
+ * Programmers are encouraged to try out experimental features and
+ * [[http://issues.scala-lang.org report any bugs or API inconsistencies]]
+ * they encounter so they can be improved in future releases.
+ *
+ * @group experimental
+ */
+ object experimental {
+
+ import languageFeature.experimental._
+
+ /** Where enabled, macro definitions are allowed. Macro implementations and
+ * macro applications are unaffected; they can be used anywhere.
+ *
+ * '''Why introduce the feature?''' Macros promise to make the language more regular,
+ * replacing ad-hoc language constructs with a general powerful abstraction
+ * capability that can express them. Macros are also a more disciplined and
+ * powerful replacement for compiler plugins.
+ *
+ * '''Why control it?''' For their very power, macros can lead to code that is hard
+ * to debug and understand.
+ */
+ implicit lazy val macros: macros = languageFeature.experimental.macros
+ }
+}
diff --git a/src/library/scala/languageFeature.scala b/src/library/scala/languageFeature.scala
new file mode 100644
index 0000000..1f411c4
--- /dev/null
+++ b/src/library/scala/languageFeature.scala
@@ -0,0 +1,37 @@
+package scala
+
+import scala.annotation.meta
+
+object languageFeature {
+
+ @meta.languageFeature("extension of type scala.Dynamic", enableRequired = true)
+ sealed trait dynamics
+ object dynamics extends dynamics
+
+ @meta.languageFeature("postfix operator #", enableRequired = false)
+ sealed trait postfixOps
+ object postfixOps extends postfixOps
+
+ @meta.languageFeature("reflective access of structural type member #", enableRequired = false)
+ sealed trait reflectiveCalls
+ object reflectiveCalls extends reflectiveCalls
+
+ @meta.languageFeature("implicit conversion #", enableRequired = false)
+ sealed trait implicitConversions
+ object implicitConversions extends implicitConversions
+
+ @meta.languageFeature("higher-kinded type", enableRequired = false)
+ sealed trait higherKinds
+ object higherKinds extends higherKinds
+
+ @meta.languageFeature("#, which cannot be expressed by wildcards, ", enableRequired = false)
+ sealed trait existentials
+ object existentials extends existentials
+
+ object experimental {
+ @meta.languageFeature("macro definition", enableRequired = true)
+ sealed trait macros
+ object macros extends macros
+ }
+}
+
diff --git a/src/library/scala/math/BigDecimal.scala b/src/library/scala/math/BigDecimal.scala
index 3f7b2a5..62528e1 100644
--- a/src/library/scala/math/BigDecimal.scala
+++ b/src/library/scala/math/BigDecimal.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2007-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2007-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -12,6 +12,7 @@ package scala.math
import java.{ lang => jl }
import java.math.{ MathContext, BigDecimal => BigDec }
import scala.collection.immutable.NumericRange
+import scala.language.implicitConversions
/**
@@ -33,8 +34,10 @@ object BigDecimal {
/** Cache ony for defaultMathContext using BigDecimals in a small range. */
private lazy val cache = new Array[BigDecimal](maxCached - minCached + 1)
- object RoundingMode extends Enumeration(java.math.RoundingMode.values map (_.toString) : _*) with Serializable {
+ object RoundingMode extends Enumeration {
type RoundingMode = Value
+ // These are supposed to be the same as java.math.RoundingMode.values,
+ // though it seems unwise to rely on the correspondence.
val UP, DOWN, CEILING, FLOOR, HALF_UP, HALF_DOWN, HALF_EVEN, UNNECESSARY = Value
}
@@ -156,6 +159,7 @@ object BigDecimal {
* @author Stephane Micheloud
* @version 1.0
*/
+ at deprecatedInheritance("This class will be made final.", "2.10.0")
class BigDecimal(
val bigDecimal: BigDec,
val mc: MathContext)
@@ -181,12 +185,34 @@ extends ScalaNumber with ScalaNumericConversions with Serializable {
override def equals (that: Any): Boolean = that match {
case that: BigDecimal => this equals that
case that: BigInt => this.toBigIntExact exists (that equals _)
- case _: Float | _: Double => unifiedPrimitiveEquals(that)
- case _ => fitsInLong && unifiedPrimitiveEquals(that)
+ case that: Double => isValidDouble && toDouble == that
+ case that: Float => isValidFloat && toFloat == that
+ case _ => isValidLong && unifiedPrimitiveEquals(that)
+ }
+ override def isValidByte = noArithmeticException(toByteExact)
+ override def isValidShort = noArithmeticException(toShortExact)
+ override def isValidChar = isValidInt && toIntExact >= Char.MinValue && toIntExact <= Char.MaxValue
+ override def isValidInt = noArithmeticException(toIntExact)
+ def isValidLong = noArithmeticException(toLongExact)
+ /** Returns `true` iff this can be represented exactly by [[scala.Float]]; otherwise returns `false`.
+ */
+ def isValidFloat = {
+ val f = toFloat
+ !f.isInfinity && bigDecimal.compareTo(new java.math.BigDecimal(f)) == 0
+ }
+ /** Returns `true` iff this can be represented exactly by [[scala.Double]]; otherwise returns `false`.
+ */
+ def isValidDouble = {
+ val d = toDouble
+ !d.isInfinity && bigDecimal.compareTo(new java.math.BigDecimal(d)) == 0
+ }
+
+ private def noArithmeticException(body: => Unit): Boolean = {
+ try { body ; true }
+ catch { case _: ArithmeticException => false }
}
- private def fitsInLong = isWhole && this <= Long.MaxValue && this >= Long.MinValue
- protected[math] def isWhole = (this remainder 1) == BigDecimal(0)
+ def isWhole() = (this remainder 1) == BigDecimal(0)
def underlying = bigDecimal
/** Compares this BigDecimal with the specified BigDecimal for equality.
@@ -215,11 +241,11 @@ extends ScalaNumber with ScalaNumericConversions with Serializable {
/** Addition of BigDecimals
*/
- def + (that: BigDecimal): BigDecimal = this.bigDecimal.add(that.bigDecimal, mc)
+ def + (that: BigDecimal): BigDecimal = this.bigDecimal.add(that.bigDecimal)
/** Subtraction of BigDecimals
*/
- def - (that: BigDecimal): BigDecimal = this.bigDecimal.subtract(that.bigDecimal, mc)
+ def - (that: BigDecimal): BigDecimal = this.bigDecimal.subtract(that.bigDecimal)
/** Multiplication of BigDecimals
*/
@@ -233,14 +259,14 @@ extends ScalaNumber with ScalaNumericConversions with Serializable {
* divideToIntegralValue and the remainder.
*/
def /% (that: BigDecimal): (BigDecimal, BigDecimal) =
- this.bigDecimal.divideAndRemainder(that.bigDecimal, mc) match {
+ this.bigDecimal.divideAndRemainder(that.bigDecimal) match {
case Array(q, r) => (q, r)
}
/** Divide to Integral value.
*/
def quot (that: BigDecimal): BigDecimal =
- this.bigDecimal.divideToIntegralValue(that.bigDecimal, mc)
+ this.bigDecimal.divideToIntegralValue(that.bigDecimal)
/** Returns the minimum of this and that
*/
@@ -252,7 +278,7 @@ extends ScalaNumber with ScalaNumericConversions with Serializable {
/** Remainder after dividing this by that.
*/
- def remainder (that: BigDecimal): BigDecimal = this.bigDecimal.remainder(that.bigDecimal, mc)
+ def remainder (that: BigDecimal): BigDecimal = this.bigDecimal.remainder(that.bigDecimal)
/** Remainder after dividing this by that.
*/
@@ -264,11 +290,11 @@ extends ScalaNumber with ScalaNumericConversions with Serializable {
/** Returns a BigDecimal whose value is the negation of this BigDecimal
*/
- def unary_- : BigDecimal = this.bigDecimal.negate(mc)
+ def unary_- : BigDecimal = this.bigDecimal.negate()
/** Returns the absolute value of this BigDecimal
*/
- def abs: BigDecimal = this.bigDecimal abs mc
+ def abs: BigDecimal = this.bigDecimal.abs
/** Returns the sign of this BigDecimal, i.e.
* -1 if it is less than 0,
diff --git a/src/library/scala/math/BigInt.scala b/src/library/scala/math/BigInt.scala
index f98ef81..58838f1 100644
--- a/src/library/scala/math/BigInt.scala
+++ b/src/library/scala/math/BigInt.scala
@@ -1,16 +1,15 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2006-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2006-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-
-
package scala.math
import java.math.BigInteger
+import scala.language.implicitConversions
/**
* @author Martin Odersky
@@ -22,6 +21,7 @@ object BigInt {
private val minCached = -1024
private val maxCached = 1024
private val cache = new Array[BigInt](maxCached - minCached + 1)
+ private val minusOne = BigInteger.valueOf(-1)
@deprecated("Use Long.MinValue", "2.9.0")
val MinLong = BigInt(Long.MinValue)
@@ -29,11 +29,11 @@ object BigInt {
@deprecated("Use Long.MaxValue", "2.9.0")
val MaxLong = BigInt(Long.MaxValue)
- /** Constructs a <code>BigInt</code> whose value is equal to that of the
+ /** Constructs a `BigInt` whose value is equal to that of the
* specified integer value.
*
* @param i the specified integer value
- * @return the constructed <code>BigInt</code>
+ * @return the constructed `BigInt`
*/
def apply(i: Int): BigInt =
if (minCached <= i && i <= maxCached) {
@@ -43,11 +43,11 @@ object BigInt {
n
} else new BigInt(BigInteger.valueOf(i))
- /** Constructs a <code>BigInt</code> whose value is equal to that of the
+ /** Constructs a `BigInt` whose value is equal to that of the
* specified long value.
*
* @param l the specified long value
- * @return the constructed <code>BigInt</code>
+ * @return the constructed `BigInt`
*/
def apply(l: Long): BigInt =
if (minCached <= l && l <= maxCached) apply(l.toInt)
@@ -71,11 +71,7 @@ object BigInt {
new BigInt(new BigInteger(bitlength, certainty, rnd.self))
/** Constructs a randomly generated BigInt, uniformly distributed over the
- * range 0 to (2 ^ numBits - 1), inclusive.
- *
- * @param numbits ...
- * @param rnd ...
- * @return ...
+ * range `0` to `(2 ^ numBits - 1)`, inclusive.
*/
def apply(numbits: Int, rnd: scala.util.Random): BigInt =
new BigInt(new BigInteger(numbits, rnd.self))
@@ -85,39 +81,44 @@ object BigInt {
def apply(x: String): BigInt =
new BigInt(new BigInteger(x))
- /** Translates the string representation of a BigInt in the
- * specified <code>radix</code> into a BigInt.
- *
- * @param x ...
- * @param radix ...
- * @return ...
+ /** Translates the string representation of a `BigInt` in the
+ * specified `radix` into a BigInt.
*/
def apply(x: String, radix: Int): BigInt =
new BigInt(new BigInteger(x, radix))
+ /** Translates a `java.math.BigInteger` into a BigInt.
+ */
+ def apply(x: BigInteger): BigInt =
+ new BigInt(x)
+
/** Returns a positive BigInt that is probably prime, with the specified bitLength.
*/
def probablePrime(bitLength: Int, rnd: scala.util.Random): BigInt =
new BigInt(BigInteger.probablePrime(bitLength, rnd.self))
- /** Implicit conversion from <code>int</code> to <code>BigInt</code>.
+ /** Implicit conversion from `Int` to `BigInt`.
*/
implicit def int2bigInt(i: Int): BigInt = apply(i)
- /** Implicit conversion from long to BigInt
+ /** Implicit conversion from `Long` to `BigInt`.
*/
implicit def long2bigInt(l: Long): BigInt = apply(l)
+
+ /** Implicit conversion from `java.math.BigInteger` to `scala.BigInt`.
+ */
+ implicit def javaBigInteger2bigInt(x: BigInteger): BigInt = apply(x)
}
/**
* @author Martin Odersky
* @version 1.0, 15/07/2003
*/
-class BigInt(val bigInteger: BigInteger) extends ScalaNumber with ScalaNumericConversions with Serializable
-{
+ at deprecatedInheritance("This class will be made final.", "2.10.0")
+class BigInt(val bigInteger: BigInteger) extends ScalaNumber with ScalaNumericConversions with Serializable {
/** Returns the hash code for this BigInt. */
override def hashCode(): Int =
- if (fitsInLong) unifiedPrimitiveHashcode
+ if (isValidLong) unifiedPrimitiveHashcode
else bigInteger.##
/** Compares this BigInt with the specified value for equality.
@@ -125,11 +126,52 @@ class BigInt(val bigInteger: BigInteger) extends ScalaNumber with ScalaNumericCo
override def equals(that: Any): Boolean = that match {
case that: BigInt => this equals that
case that: BigDecimal => that.toBigIntExact exists (this equals _)
- case x => fitsInLong && unifiedPrimitiveEquals(x)
+ case that: Double => isValidDouble && toDouble == that
+ case that: Float => isValidFloat && toFloat == that
+ case x => isValidLong && unifiedPrimitiveEquals(x)
+ }
+ override def isValidByte = this >= Byte.MinValue && this <= Byte.MaxValue
+ override def isValidShort = this >= Short.MinValue && this <= Short.MaxValue
+ override def isValidChar = this >= Char.MinValue && this <= Char.MaxValue
+ override def isValidInt = this >= Int.MinValue && this <= Int.MaxValue
+ def isValidLong = this >= Long.MinValue && this <= Long.MaxValue
+ /** Returns `true` iff this can be represented exactly by [[scala.Float]]; otherwise returns `false`.
+ */
+ def isValidFloat = {
+ val bitLen = bitLength
+ (bitLen <= 24 ||
+ {
+ val lowest = lowestSetBit
+ bitLen <= java.lang.Float.MAX_EXPONENT + 1 && // exclude this < -2^128 && this >= 2^128
+ lowest >= bitLen - 24 &&
+ lowest < java.lang.Float.MAX_EXPONENT + 1 // exclude this == -2^128
+ }
+ ) && !bitLengthOverflow
+ }
+ /** Returns `true` iff this can be represented exactly by [[scala.Double]]; otherwise returns `false`.
+ */
+ def isValidDouble = {
+ val bitLen = bitLength
+ (bitLen <= 53 ||
+ {
+ val lowest = lowestSetBit
+ bitLen <= java.lang.Double.MAX_EXPONENT + 1 && // exclude this < -2^1024 && this >= 2^1024
+ lowest >= bitLen - 53 &&
+ lowest < java.lang.Double.MAX_EXPONENT + 1 // exclude this == -2^1024
+ }
+ ) && !bitLengthOverflow
+ }
+ /** Some implementations of java.math.BigInteger allow huge values with bit length greater than Int.MaxValue .
+ * The BigInteger.bitLength method returns truncated bit length in this case .
+ * This method tests if result of bitLength is valid.
+ * This method will become unnecessary if BigInt constructors reject huge BigIntegers.
+ */
+ private def bitLengthOverflow = {
+ val shifted = bigInteger.shiftRight(Int.MaxValue)
+ (shifted.signum != 0) && !(shifted equals BigInt.minusOne)
}
- private def fitsInLong = this >= Long.MinValue && this <= Long.MaxValue
- protected[math] def isWhole = true
+ def isWhole() = true
def underlying = bigInteger
/** Compares this BigInt with the specified BigInt for equality.
@@ -211,8 +253,8 @@ class BigInt(val bigInteger: BigInteger) extends ScalaNumber with ScalaNumericCo
*/
def gcd (that: BigInt): BigInt = new BigInt(this.bigInteger.gcd(that.bigInteger))
- /** Returns a BigInt whose value is (this mod m).
- * This method differs from `%' in that it always returns a non-negative BigInt.
+ /** Returns a BigInt whose value is (this mod that).
+ * This method differs from `%` in that it always returns a non-negative BigInt.
*/
def mod (that: BigInt): BigInt = new BigInt(this.bigInteger.mod(that.bigInteger))
@@ -308,7 +350,7 @@ class BigInt(val bigInteger: BigInteger) extends ScalaNumber with ScalaNumericCo
override def byteValue = intValue.toByte
/** Converts this BigInt to a <tt>short</tt>.
- * If the BigInt is too big to fit in a byte, only the low-order 16 bits are returned.
+ * If the BigInt is too big to fit in a short, only the low-order 16 bits are returned.
* Note that this conversion can lose information about the overall magnitude of the
* BigInt value as well as return a result with the opposite sign.
*/
@@ -322,7 +364,7 @@ class BigInt(val bigInteger: BigInteger) extends ScalaNumber with ScalaNumericCo
def charValue = intValue.toChar
/** Converts this BigInt to an <tt>int</tt>.
- * If the BigInt is too big to fit in a char, only the low-order 32 bits
+ * If the BigInt is too big to fit in a int, only the low-order 32 bits
* are returned. Note that this conversion can lose information about the
* overall magnitude of the BigInt value as well as return a result with
* the opposite sign.
@@ -330,28 +372,28 @@ class BigInt(val bigInteger: BigInteger) extends ScalaNumber with ScalaNumericCo
def intValue = this.bigInteger.intValue
/** Converts this BigInt to a <tt>long</tt>.
- * If the BigInt is too big to fit in a char, only the low-order 64 bits
+ * If the BigInt is too big to fit in a long, only the low-order 64 bits
* are returned. Note that this conversion can lose information about the
* overall magnitude of the BigInt value as well as return a result with
* the opposite sign.
*/
def longValue = this.bigInteger.longValue
- /** Converts this BigInt to a <tt>float</tt>.
- * if this BigInt has too great a magnitude to represent as a float,
- * it will be converted to <code>Float.NEGATIVE_INFINITY</code> or
- * <code>Float.POSITIVE_INFINITY</code> as appropriate.
+ /** Converts this `BigInt` to a `float`.
+ * If this `BigInt` has too great a magnitude to represent as a float,
+ * it will be converted to `Float.NEGATIVE_INFINITY` or
+ * `Float.POSITIVE_INFINITY` as appropriate.
*/
def floatValue = this.bigInteger.floatValue
- /** Converts this BigInt to a <tt>double</tt>.
- * if this BigInt has too great a magnitude to represent as a double,
- * it will be converted to <code>Double.NEGATIVE_INFINITY</code> or
- * <code>Double.POSITIVE_INFINITY</code> as appropriate.
+ /** Converts this `BigInt` to a `double`.
+ * if this `BigInt` has too great a magnitude to represent as a double,
+ * it will be converted to `Double.NEGATIVE_INFINITY` or
+ * `Double.POSITIVE_INFINITY` as appropriate.
*/
def doubleValue = this.bigInteger.doubleValue
- /** Create a NumericRange[BigInt] in range <code>[start;end)</code>
+ /** Create a `NumericRange[BigInt]` in range `[start;end)`
* with the specified step, where start is the target BigInt.
*
* @param end the end value of the range (exclusive)
diff --git a/src/library/scala/math/Equiv.scala b/src/library/scala/math/Equiv.scala
index 92a7945..5f5e049 100644
--- a/src/library/scala/math/Equiv.scala
+++ b/src/library/scala/math/Equiv.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -29,7 +29,7 @@ import java.util.Comparator
* @since 2.7
*/
-trait Equiv[T] {
+trait Equiv[T] extends Any with Serializable {
/** Returns `true` iff `x` is equivalent to `y`.
*/
def equiv(x: T, y: T): Boolean
diff --git a/src/library/scala/math/Fractional.scala b/src/library/scala/math/Fractional.scala
index de09b18..ca33675 100644
--- a/src/library/scala/math/Fractional.scala
+++ b/src/library/scala/math/Fractional.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -8,6 +8,8 @@
package scala.math
+import scala.language.implicitConversions
+
/**
* @since 2.8
*/
@@ -26,4 +28,4 @@ object Fractional {
implicit def infixFractionalOps[T](x: T)(implicit num: Fractional[T]): Fractional[T]#FractionalOps = new num.FractionalOps(x)
}
object Implicits extends ExtraImplicits
-}
\ No newline at end of file
+}
diff --git a/src/library/scala/math/Integral.scala b/src/library/scala/math/Integral.scala
index bb364a7..f3684c4 100644
--- a/src/library/scala/math/Integral.scala
+++ b/src/library/scala/math/Integral.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -10,6 +10,8 @@
package scala.math
+import scala.language.implicitConversions
+
/**
* @since 2.8
*/
@@ -34,4 +36,4 @@ object Integral {
implicit def infixIntegralOps[T](x: T)(implicit num: Integral[T]): Integral[T]#IntegralOps = new num.IntegralOps(x)
}
object Implicits extends ExtraImplicits
-}
\ No newline at end of file
+}
diff --git a/src/library/scala/math/Numeric.scala b/src/library/scala/math/Numeric.scala
index ff88e0f..5a76f4f 100644
--- a/src/library/scala/math/Numeric.scala
+++ b/src/library/scala/math/Numeric.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -8,6 +8,8 @@
package scala.math
+import scala.language.implicitConversions
+
/**
* @since 2.8
*/
@@ -34,8 +36,8 @@ object Numeric {
def fromInt(x: Int): BigInt = BigInt(x)
def toInt(x: BigInt): Int = x.intValue
def toLong(x: BigInt): Long = x.longValue
- def toFloat(x: BigInt): Float = x.longValue.toFloat
- def toDouble(x: BigInt): Double = x.longValue.toDouble
+ def toFloat(x: BigInt): Float = x.floatValue
+ def toDouble(x: BigInt): Double = x.doubleValue
}
implicit object BigIntIsIntegral extends BigIntIsIntegral with Ordering.BigIntOrdering
diff --git a/src/library/scala/math/Ordered.scala b/src/library/scala/math/Ordered.scala
index 58b5e23..e8be92e 100644
--- a/src/library/scala/math/Ordered.scala
+++ b/src/library/scala/math/Ordered.scala
@@ -1,21 +1,21 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-
-
package scala.math
+import scala.language.implicitConversions
+
/** A trait for data that have a single, natural ordering. See
* [[scala.math.Ordering]] before using this trait for
* more information about whether to use [[scala.math.Ordering]] instead.
*
* Classes that implement this trait can be sorted with
- * [[scala.utils.Sorting]] and can be compared with standard comparison operators
+ * [[scala.util.Sorting]] and can be compared with standard comparison operators
* (e.g. > and <).
*
* Ordered should be used for data with a single, natural ordering (like
@@ -27,14 +27,15 @@ package scala.math
*
* [[scala.math.PartiallyOrdered]] is an alternative to this trait for partially ordered data.
*
- * For example, to create a simple class that implements Ordered and then sort it with [[scala.utils.Sorting]]:
+ * For example, create a simple class that implements `Ordered` and then sort it with [[scala.util.Sorting]]:
* {{{
- * class OrderedClass(n:Int) extends Ordered[OrderedClass] {
+ * case class OrderedClass(n:Int) extends Ordered[OrderedClass] {
* def compare(that: OrderedClass) = this.n - that.n
* }
*
- * val x = List(new MyClass(1), new MyClass(5), new MyClass(3))
- * val result = scala.utils.Sorting.quickSort(x)
+ * val x = Array(OrderedClass(1), OrderedClass(5), OrderedClass(3))
+ * scala.util.Sorting.quickSort(x)
+ * x
* }}}
*
* It is important that the `equals` method for an instance of `Ordered[A]` be consistent with the
@@ -52,7 +53,7 @@ package scala.math
* @author Martin Odersky
* @version 1.1, 2006-07-24
*/
-trait Ordered[A] extends java.lang.Comparable[A] {
+trait Ordered[A] extends Any with java.lang.Comparable[A] {
/** Result of comparing `this` with operand `that`.
*
diff --git a/src/library/scala/math/Ordering.scala b/src/library/scala/math/Ordering.scala
index d007ae3..11b1205 100644
--- a/src/library/scala/math/Ordering.scala
+++ b/src/library/scala/math/Ordering.scala
@@ -1,14 +1,16 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-package scala.math
+package scala
+package math
import java.util.Comparator
+import scala.language.{implicitConversions, higherKinds}
/** Ordering is a trait whose instances each represent a strategy for sorting
* instances of a type.
@@ -24,10 +26,10 @@ import java.util.Comparator
* val pairs = Array(("a", 5, 2), ("c", 3, 1), ("b", 1, 3))
*
* // sort by 2nd element
- * Sorting.quickSort(pairs)(Ordering.by[(String, Int, Int), Int](_._2)
+ * Sorting.quickSort(pairs)(Ordering.by[(String, Int, Int), Int](_._2))
*
* // sort by the 3rd element, then 1st
- * Sorting.quickSort(pairs)(Ordering[(Int, String)].on[(String, Int, Int)]((_._3, _._1))
+ * Sorting.quickSort(pairs)(Ordering[(Int, String)].on(x => (x._3, x._1)))
* }}}
*
* An Ordering[T] is implemented by specifying compare(a:T, b:T), which
@@ -164,7 +166,7 @@ object Ordering extends LowPriorityOrderingImplicits {
/** Not in the standard scope due to the potential for divergence:
* For instance `implicitly[Ordering[Any]]` diverges in its presence.
*/
- implicit def seqDerivedOrdering[CC[X] <: collection.Seq[X], T](implicit ord: Ordering[T]): Ordering[CC[T]] =
+ implicit def seqDerivedOrdering[CC[X] <: scala.collection.Seq[X], T](implicit ord: Ordering[T]): Ordering[CC[T]] =
new Ordering[CC[T]] {
def compare(x: CC[T], y: CC[T]): Int = {
val xe = x.iterator
@@ -262,12 +264,52 @@ object Ordering extends LowPriorityOrderingImplicits {
implicit object Long extends LongOrdering
trait FloatOrdering extends Ordering[Float] {
+ outer =>
+
def compare(x: Float, y: Float) = java.lang.Float.compare(x, y)
+
+ override def lteq(x: Float, y: Float): Boolean = x <= y
+ override def gteq(x: Float, y: Float): Boolean = x >= y
+ override def lt(x: Float, y: Float): Boolean = x < y
+ override def gt(x: Float, y: Float): Boolean = x > y
+ override def equiv(x: Float, y: Float): Boolean = x == y
+ override def max(x: Float, y: Float): Float = math.max(x, y)
+ override def min(x: Float, y: Float): Float = math.min(x, y)
+
+ override def reverse: Ordering[Float] = new FloatOrdering {
+ override def reverse = outer
+ override def compare(x: Float, y: Float) = outer.compare(y, x)
+
+ override def lteq(x: Float, y: Float): Boolean = outer.lteq(y, x)
+ override def gteq(x: Float, y: Float): Boolean = outer.gteq(y, x)
+ override def lt(x: Float, y: Float): Boolean = outer.lt(y, x)
+ override def gt(x: Float, y: Float): Boolean = outer.gt(y, x)
+ }
}
implicit object Float extends FloatOrdering
trait DoubleOrdering extends Ordering[Double] {
+ outer =>
+
def compare(x: Double, y: Double) = java.lang.Double.compare(x, y)
+
+ override def lteq(x: Double, y: Double): Boolean = x <= y
+ override def gteq(x: Double, y: Double): Boolean = x >= y
+ override def lt(x: Double, y: Double): Boolean = x < y
+ override def gt(x: Double, y: Double): Boolean = x > y
+ override def equiv(x: Double, y: Double): Boolean = x == y
+ override def max(x: Double, y: Double): Double = math.max(x, y)
+ override def min(x: Double, y: Double): Double = math.min(x, y)
+
+ override def reverse: Ordering[Double] = new DoubleOrdering {
+ override def reverse = outer
+ override def compare(x: Double, y: Double) = outer.compare(y, x)
+
+ override def lteq(x: Double, y: Double): Boolean = outer.lteq(y, x)
+ override def gteq(x: Double, y: Double): Boolean = outer.gteq(y, x)
+ override def lt(x: Double, y: Double): Boolean = outer.lt(y, x)
+ override def gt(x: Double, y: Double): Boolean = outer.gt(y, x)
+ }
}
implicit object Double extends DoubleOrdering
diff --git a/src/library/scala/math/PartialOrdering.scala b/src/library/scala/math/PartialOrdering.scala
index cd3b132..a9e317d 100644
--- a/src/library/scala/math/PartialOrdering.scala
+++ b/src/library/scala/math/PartialOrdering.scala
@@ -1,39 +1,29 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-
-
package scala.math
-/** <p>
- * A trait for representing partial orderings. It is important to
- * distinguish between a type that has a partial order and a representation
- * of partial ordering on some type. This trait is for representing the
- * latter.
- * </p>
- * <p>
- * A <a href="http://en.wikipedia.org/wiki/Partial_order">partial ordering</a>
- * is a binary relation on a type <code>T</code> that is also an equivalence
- * relation on values of type <code>T</code>. This relation is exposed as
- * the <code>lteq</code> method of the <code>PartialOrdering</code> trait.
- * This relation must be:
- * </p>
- * <ul>
- * <li>reflexive: <code>lteq(x, x) == true</code>, for any <code>x</code> of
- * type <code>T</code>.</li>
- * <li>anti-symmetric: <code>lteq(x, y) == true</code> and
- * <code>lteq(y, x) == true</code> then <code>equiv(x, y)</code>, for any
- * <code>x</code> and <code>y</code> of type <code>T</code>.</li>
- * <li>transitive: if <code>lteq(x, y) == true</code> and
- * <code>lteq(y, z) == true</code> then <code>lteq(x, z) == true</code>,
- * for any <code>x</code>, <code>y</code>, and <code>z</code> of type
- * <code>T</code>.</li>
- * </ul>
+/** A trait for representing partial orderings. It is important to
+ * distinguish between a type that has a partial order and a representation
+ * of partial ordering on some type. This trait is for representing the
+ * latter.
+ *
+ * A [[http://en.wikipedia.org/wiki/Partial_order partial ordering]] is a
+ * binary relation on a type `T` that is also an equivalence relation on
+ * values of type `T`. This relation is exposed as the `lteq` method of
+ * the `PartialOrdering` trait. This relation must be:
+ *
+ * - reflexive: `lteq(x, x) == '''true'''`, for any `x` of type `T`.
+ * - anti-symmetric: `lteq(x, y) == '''true'''` and `lteq(y, x) == true`
+ * then `equiv(x, y)`, for any `x` and `y` of type `T`.
+ * - transitive: if `lteq(x, y) == '''true'''` and
+ * `lteq(y, z) == '''true'''` then `lteq(x, z) == '''true'''`,
+ * for any `x`, `y`, and `z` of type `T`.
*
* @author Geoffrey Washburn
* @version 1.0, 2008-04-0-3
@@ -43,37 +33,34 @@ package scala.math
trait PartialOrdering[T] extends Equiv[T] {
outer =>
- /** Result of comparing <code>x</code> with operand <code>y</code>.
- * Returns <code>None</code> if operands are not comparable.
- * If operands are comparable, returns <code>Some(r)</code> where
- * <code>r < 0</code> iff <code>x < y</code>
- * <code>r == 0</code> iff <code>x == y</code>
- * <code>r > 0</code> iff <code>x > y</code>
+ /** Result of comparing `x` with operand `y`.
+ * Returns `None` if operands are not comparable.
+ * If operands are comparable, returns `Some(r)` where
+ * - `r < 0` iff `x < y`
+ * - `r == 0` iff `x == y`
+ * - `r > 0` iff `x > y`
*/
def tryCompare(x: T, y: T): Option[Int]
- /** Returns <code>true</code> iff <code>x</code> comes before
- * <code>y</code> in the ordering.
+ /** Returns `'''true'''` iff `x` comes before `y` in the ordering.
*/
def lteq(x: T, y: T): Boolean
- /** Returns <code>true</code> iff <code>y</code> comes before
- * <code>x</code> in the ordering.
+ /** Returns `'''true'''` iff `y` comes before `x` in the ordering.
*/
def gteq(x: T, y: T): Boolean = lteq(y, x)
- /** Returns <code>true</code> iff <code>x</code> comes before
- * <code>y</code> in the ordering and is not the same as <code>y</code>.
+ /** Returns `'''true'''` iff `x` comes before `y` in the ordering
+ * and is not the same as `y`.
*/
def lt(x: T, y: T): Boolean = lteq(x, y) && !equiv(x, y)
- /** Returns <code>true</code> iff <code>y</code> comes before
- * <code>x</code> in the ordering and is not the same as <code>x</code>.
+ /** Returns `'''true'''` iff `y` comes before `x` in the ordering
+ * and is not the same as `x`.
*/
def gt(x: T, y: T): Boolean = gteq(x, y) && !equiv(x, y)
- /** Returns <code>true</code> iff <code>x</code> is equivalent to
- * <code>y</code> in the ordering.
+ /** Returns `'''true'''` iff `x` is equivalent to `y` in the ordering.
*/
def equiv(x: T, y: T): Boolean = lteq(x,y) && lteq(y,x)
diff --git a/src/library/scala/math/PartiallyOrdered.scala b/src/library/scala/math/PartiallyOrdered.scala
index 6c447d8..7823e5b 100644
--- a/src/library/scala/math/PartiallyOrdered.scala
+++ b/src/library/scala/math/PartiallyOrdered.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -17,12 +17,12 @@ package scala.math
*/
trait PartiallyOrdered[+A] {
- /** Result of comparing <code>this</code> with operand <code>that</code>.
- * Returns <code>None</code> if operands are not comparable.
- * If operands are comparable, returns <code>Some(x)</code> where
- * <code>x < 0</code> iff <code>this < that</code>
- * <code>x == 0</code> iff <code>this == that</code>
- * <code>x > 0</code> iff <code>this > that</code>
+ /** Result of comparing `'''this'''` with operand `that`.
+ * Returns `None` if operands are not comparable.
+ * If operands are comparable, returns `Some(x)` where
+ * - `x < 0` iff `'''this''' < that`
+ * - `x == 0` iff `'''this''' == that`
+ * - `x > 0` iff `'''this''' > that`
*/
def tryCompareTo [B >: A <% PartiallyOrdered[B]](that: B): Option[Int]
diff --git a/src/library/scala/math/ScalaNumber.java b/src/library/scala/math/ScalaNumber.java
index 8379b67..7345147 100644
--- a/src/library/scala/math/ScalaNumber.java
+++ b/src/library/scala/math/ScalaNumber.java
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2006-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2006-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/library/scala/math/ScalaNumericConversions.scala b/src/library/scala/math/ScalaNumericConversions.scala
index 2b7ef74..6ddf48d 100644
--- a/src/library/scala/math/ScalaNumericConversions.scala
+++ b/src/library/scala/math/ScalaNumericConversions.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2007-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2007-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -10,10 +10,27 @@ package scala.math
import java.{ lang => jl }
+/** A slightly more specific conversion trait for classes which
+ * extend ScalaNumber (which excludes value classes.)
+ */
+trait ScalaNumericConversions extends ScalaNumber with ScalaNumericAnyConversions {
+ def underlying(): Object
+}
+
/** Conversions which present a consistent conversion interface
- * across all the numeric types.
+ * across all the numeric types, suitable for use in value classes.
*/
-trait ScalaNumericConversions extends ScalaNumber {
+trait ScalaNumericAnyConversions extends Any {
+ def isWhole(): Boolean
+ def underlying(): Any
+
+ def byteValue(): Byte
+ def shortValue(): Short
+ def intValue(): Int
+ def longValue(): Long
+ def floatValue(): Float
+ def doubleValue(): Double
+
/** Returns the value of this as a [[scala.Char]]. This may involve
* rounding or truncation.
*/
diff --git a/src/library/scala/math/package.scala b/src/library/scala/math/package.scala
index 2aadfb8..cb033bd 100644
--- a/src/library/scala/math/package.scala
+++ b/src/library/scala/math/package.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -8,23 +8,167 @@
package scala
-/** The package object <code>scala.math</code> contains methods for performing basic numeric
- * operations such as the elementary exponential, logarithm, square root, and
- * trigonometric functions.
- */
+/** The package object `scala.math` contains methods for performing basic
+ * numeric operations such as elementary exponential, logarithmic, root and
+ * trigonometric functions.
+ */
+package object math {
+ /** The `double` value that is closer than any other to `e`, the base of
+ * the natural logarithms.
+ */
+ val E = java.lang.Math.E
-package object math extends MathCommon {
- // These are new in 2.8, so they don't belong in the deprecated scala.Math.
+ /** The `double` value that is closer than any other to `pi`, the ratio of
+ * the circumference of a circle to its diameter.
+ */
+ val Pi = java.lang.Math.PI
- def log10(x: Double): Double = java.lang.Math.log10(x)
+ /** Returns a `double` value with a positive sign, greater than or equal
+ * to `0.0` and less than `1.0`.
+ */
+ def random: Double = java.lang.Math.random()
+
+ def sin(x: Double): Double = java.lang.Math.sin(x)
+ def cos(x: Double): Double = java.lang.Math.cos(x)
+ def tan(x: Double): Double = java.lang.Math.tan(x)
+ def asin(x: Double): Double = java.lang.Math.asin(x)
+ def acos(x: Double): Double = java.lang.Math.acos(x)
+ def atan(x: Double): Double = java.lang.Math.atan(x)
+
+ /** Converts an angle measured in degrees to an approximately equivalent
+ * angle measured in radians.
+ *
+ * @param x an angle, in degrees
+ * @return the measurement of the angle `x` in radians.
+ */
+ def toRadians(x: Double): Double = java.lang.Math.toRadians(x)
+
+ /** Converts an angle measured in radians to an approximately equivalent
+ * angle measured in degrees.
+ *
+ * @param x angle, in radians
+ * @return the measurement of the angle `x` in degrees.
+ */
+ def toDegrees(x: Double): Double = java.lang.Math.toDegrees(x)
+
+ /** Returns Euler's number `e` raised to the power of a `double` value.
+ *
+ * @param x the exponent to raise `e` to.
+ * @return the value `e^a^`, where `e` is the base of the natural
+ * logarithms.
+ */
+ def exp(x: Double): Double = java.lang.Math.exp(x)
+ def log(x: Double): Double = java.lang.Math.log(x)
+ def sqrt(x: Double): Double = java.lang.Math.sqrt(x)
+ def IEEEremainder(x: Double, y: Double): Double = java.lang.Math.IEEEremainder(x, y)
+
+ def ceil(x: Double): Double = java.lang.Math.ceil(x)
+ def floor(x: Double): Double = java.lang.Math.floor(x)
+
+ /** Returns the `double` value that is closest in value to the
+ * argument and is equal to a mathematical integer.
+ *
+ * @param x a `double` value
+ * @return the closest floating-point value to a that is equal to a
+ * mathematical integer.
+ */
+ def rint(x: Double): Double = java.lang.Math.rint(x)
+
+ /** Converts rectangular coordinates `(x, y)` to polar `(r, theta)`.
+ *
+ * @param x the ordinate coordinate
+ * @param y the abscissa coordinate
+ * @return the ''theta'' component of the point `(r, theta)` in polar
+ * coordinates that corresponds to the point `(x, y)` in
+ * Cartesian coordinates.
+ */
+ def atan2(y: Double, x: Double): Double = java.lang.Math.atan2(y, x)
+
+ /** Returns the value of the first argument raised to the power of the
+ * second argument.
+ *
+ * @param x the base.
+ * @param y the exponent.
+ * @return the value `x^y^`.
+ */
+ def pow(x: Double, y: Double): Double = java.lang.Math.pow(x, y)
+
+ /** Returns the closest `long` to the argument.
+ *
+ * @param x a floating-point value to be rounded to a `long`.
+ * @return the value of the argument rounded to the nearest`long` value.
+ */
+ def round(x: Float): Int = java.lang.Math.round(x)
+ def round(x: Double): Long = java.lang.Math.round(x)
+ def abs(x: Int): Int = java.lang.Math.abs(x)
+ def abs(x: Long): Long = java.lang.Math.abs(x)
+ def abs(x: Float): Float = java.lang.Math.abs(x)
+ def abs(x: Double): Double = java.lang.Math.abs(x)
+
+ def max(x: Int, y: Int): Int = java.lang.Math.max(x, y)
+ def max(x: Long, y: Long): Long = java.lang.Math.max(x, y)
+ def max(x: Float, y: Float): Float = java.lang.Math.max(x, y)
+ def max(x: Double, y: Double): Double = java.lang.Math.max(x, y)
+
+ def min(x: Int, y: Int): Int = java.lang.Math.min(x, y)
+ def min(x: Long, y: Long): Long = java.lang.Math.min(x, y)
+ def min(x: Float, y: Float): Float = java.lang.Math.min(x, y)
+ def min(x: Double, y: Double): Double = java.lang.Math.min(x, y)
+
+ def signum(x: Int): Int = java.lang.Integer.signum(x)
+ def signum(x: Long): Long = java.lang.Long.signum(x)
+ def signum(x: Float): Float = java.lang.Math.signum(x)
+ def signum(x: Double): Double = java.lang.Math.signum(x)
+
+ // -----------------------------------------------------------------------
+ // root functions
+ // -----------------------------------------------------------------------
+
+ /** Returns the cube root of the given `Double` value. */
def cbrt(x: Double): Double = java.lang.Math.cbrt(x)
- def ulp(x: Double): Double = java.lang.Math.ulp(x)
- def ulp(x: Float): Float = java.lang.Math.ulp(x)
+ // -----------------------------------------------------------------------
+ // exponential functions
+ // -----------------------------------------------------------------------
+
+ /** Returns `exp(x) - 1`. */
+ def expm1(x: Double): Double = java.lang.Math.expm1(x)
+
+ // -----------------------------------------------------------------------
+ // logarithmic functions
+ // -----------------------------------------------------------------------
+
+ /** Returns the natural logarithm of the sum of the given `Double` value and 1. */
+ def log1p(x: Double): Double = java.lang.Math.log1p(x)
+
+ /** Returns the base 10 logarithm of the given `Double` value. */
+ def log10(x: Double): Double = java.lang.Math.log10(x)
+
+ // -----------------------------------------------------------------------
+ // trigonometric functions
+ // -----------------------------------------------------------------------
+
+ /** Returns the hyperbolic sine of the given `Double` value. */
def sinh(x: Double): Double = java.lang.Math.sinh(x)
+
+ /** Returns the hyperbolic cosine of the given `Double` value. */
def cosh(x: Double): Double = java.lang.Math.cosh(x)
+
+ /** Returns the hyperbolic tangent of the given `Double` value. */
def tanh(x: Double):Double = java.lang.Math.tanh(x)
+
+ // -----------------------------------------------------------------------
+ // miscellaneous functions
+ // -----------------------------------------------------------------------
+
+ /** Returns the square root of the sum of the squares of both given `Double`
+ * values without intermediate underflow or overflow.
+ */
def hypot(x: Double, y: Double): Double = java.lang.Math.hypot(x, y)
- def expm1(x: Double): Double = java.lang.Math.expm1(x)
- def log1p(x: Double): Double = java.lang.Math.log1p(x)
-}
\ No newline at end of file
+
+ /** Returns the size of an ulp of the given `Double` value. */
+ def ulp(x: Double): Double = java.lang.Math.ulp(x)
+
+ /** Returns the size of an ulp of the given `Float` value. */
+ def ulp(x: Float): Float = java.lang.Math.ulp(x)
+}
diff --git a/src/library/scala/native.scala b/src/library/scala/native.scala
index 28bb9f7..dbacc78 100644
--- a/src/library/scala/native.scala
+++ b/src/library/scala/native.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -20,4 +20,4 @@ package scala
* but it is type checked when present.
*
* @since 2.6 */
-class native extends annotation.StaticAnnotation {}
+class native extends scala.annotation.StaticAnnotation {}
diff --git a/src/library/scala/noinline.scala b/src/library/scala/noinline.scala
index de650ed..38fd4c3 100644
--- a/src/library/scala/noinline.scala
+++ b/src/library/scala/noinline.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -18,4 +18,4 @@ package scala
* @version 1.0, 2007-5-21
* @since 2.5
*/
-class noinline extends annotation.StaticAnnotation
+class noinline extends scala.annotation.StaticAnnotation
diff --git a/src/library/scala/package.scala b/src/library/scala/package.scala
index 264fd18..84f6f0b 100644
--- a/src/library/scala/package.scala
+++ b/src/library/scala/package.scala
@@ -1,15 +1,15 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-import annotation.bridge
/**
* Core Scala types. They are always available without an explicit import.
+ * @contentDiagram hideNodes "scala.Serializable"
*/
package object scala {
type Throwable = java.lang.Throwable
@@ -27,10 +27,19 @@ package object scala {
type NoSuchElementException = java.util.NoSuchElementException
type NumberFormatException = java.lang.NumberFormatException
type AbstractMethodError = java.lang.AbstractMethodError
+ type InterruptedException = java.lang.InterruptedException
+
+ // A dummy used by the specialization annotation.
+ val AnyRef = new Specializable {
+ override def toString = "object AnyRef"
+ }
@deprecated("instead of `@serializable class C`, use `class C extends Serializable`", "2.9.0")
type serializable = annotation.serializable
+ @deprecated("instead of `@cloneable class C`, use `class C extends Cloneable`", "2.10.0")
+ type cloneable = annotation.cloneable
+
type TraversableOnce[+A] = scala.collection.TraversableOnce[A]
type Traversable[+A] = scala.collection.Traversable[A]
@@ -58,6 +67,9 @@ package object scala {
type ::[A] = scala.collection.immutable.::[A]
val :: = scala.collection.immutable.::
+ val +: = scala.collection.+:
+ val :+ = scala.collection.:+
+
type Stream[+A] = scala.collection.immutable.Stream[A]
val Stream = scala.collection.immutable.Stream
val #:: = scala.collection.immutable.Stream.#::
@@ -71,15 +83,6 @@ package object scala {
type Range = scala.collection.immutable.Range
val Range = scala.collection.immutable.Range
- // Migrated from Predef
- @deprecated("Use Thread.currentThread instead", "2.9.0")
- def currentThread = java.lang.Thread.currentThread()
-
- // Moved back into Predef to avoid unnecessary indirection by
- // way of the scala package object within the standard library,
- // but bridged for compatibility.
- @bridge def $scope = scala.xml.TopScope
-
// Numeric types which were moved into scala.math.*
type BigDecimal = scala.math.BigDecimal
@@ -106,6 +109,15 @@ package object scala {
type PartialOrdering[T] = scala.math.PartialOrdering[T]
type PartiallyOrdered[T] = scala.math.PartiallyOrdered[T]
+ type Either[+A, +B] = scala.util.Either[A, B]
+ val Either = scala.util.Either
+
+ type Left[+A, +B] = scala.util.Left[A, B]
+ val Left = scala.util.Left
+
+ type Right[+A, +B] = scala.util.Right[A, B]
+ val Right = scala.util.Right
+
// Annotations which we might move to annotation.*
/*
type SerialVersionUID = annotation.SerialVersionUID
@@ -123,77 +135,4 @@ package object scala {
type unchecked = annotation.unchecked.unchecked
type volatile = annotation.volatile
*/
- @deprecated("Use Tuple1(x) to create a 1-tuple.", "2.8.0")
- def Tuple[A1](x1: A1) = Tuple1(x1)
- @deprecated("Use ((x1, x2, ...)) syntax to create Tuples", "2.8.0")
- def Tuple[A1, A2](x1: A1, x2: A2) = Tuple2(x1, x2)
- @deprecated("Use ((x1, x2, ...)) syntax to create Tuples", "2.8.0")
- def Tuple[A1, A2, A3](x1: A1, x2: A2, x3: A3) = Tuple3(x1, x2, x3)
- @deprecated("Use ((x1, x2, ...)) syntax to create Tuples", "2.8.0")
- def Tuple[A1, A2, A3, A4](x1: A1, x2: A2, x3: A3, x4: A4) = Tuple4(x1, x2, x3, x4)
- @deprecated("Use ((x1, x2, ...)) syntax to create Tuples", "2.8.0")
- def Tuple[A1, A2, A3, A4, A5](x1: A1, x2: A2, x3: A3, x4: A4, x5: A5) = Tuple5(x1, x2, x3, x4, x5)
- @deprecated("Use ((x1, x2, ...)) syntax to create Tuples", "2.8.0")
- def Tuple[A1, A2, A3, A4, A5, A6](x1: A1, x2: A2, x3: A3, x4: A4, x5: A5, x6: A6) = Tuple6(x1, x2, x3, x4, x5, x6)
- @deprecated("Use ((x1, x2, ...)) syntax to create Tuples", "2.8.0")
- def Tuple[A1, A2, A3, A4, A5, A6, A7](x1: A1, x2: A2, x3: A3, x4: A4, x5: A5, x6: A6, x7: A7) = Tuple7(x1, x2, x3, x4, x5, x6, x7)
- @deprecated("Use ((x1, x2, ...)) syntax to create Tuples", "2.8.0")
- def Tuple[A1, A2, A3, A4, A5, A6, A7, A8](x1: A1, x2: A2, x3: A3, x4: A4, x5: A5, x6: A6, x7: A7, x8: A8) = Tuple8(x1, x2, x3, x4, x5, x6, x7, x8)
- @deprecated("Use ((x1, x2, ...)) syntax to create Tuples", "2.8.0")
- def Tuple[A1, A2, A3, A4, A5, A6, A7, A8, A9](x1: A1, x2: A2, x3: A3, x4: A4, x5: A5, x6: A6, x7: A7, x8: A8, x9: A9) = Tuple9(x1, x2, x3, x4, x5, x6, x7, x8, x9)
- @deprecated("Use ((x1, x2, ...)) syntax to create Tuples", "2.8.0")
- def Tuple[A1, A2, A3, A4, A5, A6, A7, A8, A9, A10](x1: A1, x2: A2, x3: A3, x4: A4, x5: A5, x6: A6, x7: A7, x8: A8, x9: A9, x10: A10) = Tuple10(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10)
- @deprecated("Use ((x1, x2, ...)) syntax to create Tuples", "2.8.0")
- def Tuple[A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11](x1: A1, x2: A2, x3: A3, x4: A4, x5: A5, x6: A6, x7: A7, x8: A8, x9: A9, x10: A10, x11: A11) = Tuple11(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11)
- @deprecated("Use ((x1, x2, ...)) syntax to create Tuples", "2.8.0")
- def Tuple[A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12](x1: A1, x2: A2, x3: A3, x4: A4, x5: A5, x6: A6, x7: A7, x8: A8, x9: A9, x10: A10, x11: A11, x12: A12) = Tuple12(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12)
- @deprecated("Use ((x1, x2, ...)) syntax to create Tuples", "2.8.0")
- def Tuple[A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13](x1: A1, x2: A2, x3: A3, x4: A4, x5: A5, x6: A6, x7: A7, x8: A8, x9: A9, x10: A10, x11: A11, x12: A12, x13: A13) = Tuple13(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13)
- @deprecated("Use ((x1, x2, ...)) syntax to create Tuples", "2.8.0")
- def Tuple[A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14](x1: A1, x2: A2, x3: A3, x4: A4, x5: A5, x6: A6, x7: A7, x8: A8, x9: A9, x10: A10, x11: A11, x12: A12, x13: A13, x14: A14) = Tuple14(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14)
- @deprecated("Use ((x1, x2, ...)) syntax to create Tuples", "2.8.0")
- def Tuple[A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15](x1: A1, x2: A2, x3: A3, x4: A4, x5: A5, x6: A6, x7: A7, x8: A8, x9: A9, x10: A10, x11: A11, x12: A12, x13: A13, x14: A14, x15: A15) = Tuple15(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15)
- @deprecated("Use ((x1, x2, ...)) syntax to create Tuples", "2.8.0")
- def Tuple[A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16](x1: A1, x2: A2, x3: A3, x4: A4, x5: A5, x6: A6, x7: A7, x8: A8, x9: A9, x10: A10, x11: A11, x12: A12, x13: A13, x14: A14, x15: A15, x16: A16) = Tuple16(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16)
- @deprecated("Use ((x1, x2, ...)) syntax to create Tuples", "2.8.0")
- def Tuple[A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17](x1: A1, x2: A2, x3: A3, x4: A4, x5: A5, x6: A6, x7: A7, x8: A8, x9: A9, x10: A10, x11: A11, x12: A12, x13: A13, x14: A14, x15: A15, x16: A16, x17: A17) = Tuple17(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17)
- @deprecated("Use ((x1, x2, ...)) syntax to create Tuples", "2.8.0")
- def Tuple[A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18](x1: A1, x2: A2, x3: A3, x4: A4, x5: A5, x6: A6, x7: A7, x8: A8, x9: A9, x10: A10, x11: A11, x12: A12, x13: A13, x14: A14, x15: A15, x16: A16, x17: A17, x18: A18) = Tuple18(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17, x18)
- @deprecated("Use ((x1, x2, ...)) syntax to create Tuples", "2.8.0")
- def Tuple[A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19](x1: A1, x2: A2, x3: A3, x4: A4, x5: A5, x6: A6, x7: A7, x8: A8, x9: A9, x10: A10, x11: A11, x12: A12, x13: A13, x14: A14, x15: A15, x16: A16, x17: A17, x18: A18, x19: A19) = Tuple19(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17, x18, x19)
- @deprecated("Use ((x1, x2, ...)) syntax to create Tuples", "2.8.0")
- def Tuple[A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20](x1: A1, x2: A2, x3: A3, x4: A4, x5: A5, x6: A6, x7: A7, x8: A8, x9: A9, x10: A10, x11: A11, x12: A12, x13: A13, x14: A14, x15: A15, x16: A16, x17: A17, x18: A18, x19: A19, x20: A20) = Tuple20(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17, x18, x19, x20)
- @deprecated("Use ((x1, x2, ...)) syntax to create Tuples", "2.8.0")
- def Tuple[A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21](x1: A1, x2: A2, x3: A3, x4: A4, x5: A5, x6: A6, x7: A7, x8: A8, x9: A9, x10: A10, x11: A11, x12: A12, x13: A13, x14: A14, x15: A15, x16: A16, x17: A17, x18: A18, x19: A19, x20: A20, x21: A21) = Tuple21(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17, x18, x19, x20, x21)
- @deprecated("Use ((x1, x2, ...)) syntax to create Tuples", "2.8.0")
- def Tuple[A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22](x1: A1, x2: A2, x3: A3, x4: A4, x5: A5, x6: A6, x7: A7, x8: A8, x9: A9, x10: A10, x11: A11, x12: A12, x13: A13, x14: A14, x15: A15, x16: A16, x17: A17, x18: A18, x19: A19, x20: A20, x21: A21, x22: A22) = Tuple22(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17, x18, x19, x20, x21, x22)
-
- @deprecated("use java.lang.Integer instead", "2.6.0")
- type Integer = java.lang.Integer
- @deprecated("use java.lang.Character instead", "2.6.0")
- type Character = java.lang.Character
-
- @deprecated("use Iterable instead", "2.8.0")
- type Collection[+A] = Iterable[A]
- @deprecated("use Iterable instead", "2.8.0")
- val Collection = Iterable
-
- @deprecated("use Seq instead", "2.8.0")
- type Sequence[+A] = scala.collection.Seq[A]
- @deprecated("use Seq instead", "2.8.0")
- val Sequence = scala.collection.Seq
-
- @deprecated("use IndexedSeq instead", "2.8.0")
- type RandomAccessSeq[+A] = scala.collection.IndexedSeq[A]
- @deprecated("use IndexedSeq instead", "2.8.0")
- val RandomAccessSeq = scala.collection.IndexedSeq
-
- @deprecated("use scala.annotation.Annotation instead", "2.9.0")
- type Annotation = scala.annotation.Annotation
- @deprecated("use scala.annotation.ClassfileAnnotation instead", "2.9.0")
- type ClassfileAnnotation = scala.annotation.ClassfileAnnotation
- @deprecated("use scala.annotation.StaticAnnotation instead", "2.9.0")
- type StaticAnnotation = scala.annotation.StaticAnnotation
- @deprecated("use scala.annotation.TypeConstraint instead", "2.9.0")
- type TypeConstraint = scala.annotation.TypeConstraint
}
diff --git a/src/library/scala/parallel/Future.scala b/src/library/scala/parallel/Future.scala
index 29faa06..e255a57 100644
--- a/src/library/scala/parallel/Future.scala
+++ b/src/library/scala/parallel/Future.scala
@@ -1,24 +1,30 @@
-package scala.parallel
-
-
-
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2005-2013, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+package scala.parallel
-/** A future is a function without parameters that will block the caller if the
- * parallel computation associated with the function is not completed.
+/** A future is a function without parameters that will block the caller if
+ * the parallel computation associated with the function is not completed.
+ *
+ * @tparam R the type of the result
*
* @since 2.9
*/
+ at deprecated("Use `scala.concurrent.Future` instead.", "2.10.0")
trait Future[@specialized +R] extends (() => R) {
- /** Returns a result once the parallel computation completes. If the computation
- * produced an exception, an exception is forwarded.
+ /** Returns a result once the parallel computation completes. If the
+ * computation produced an exception, an exception is forwarded.
*
- * '''Note:''' creating a circular dependency between futures by calling this method will
- * result in a deadlock.
+ * '''Note:''' creating a circular dependency between futures by calling
+ * this method will result in a deadlock.
*
- * @tparam R the type of the result
* @return the result
* @throws the exception that was thrown during a parallel computation
*/
@@ -31,5 +37,3 @@ trait Future[@specialized +R] extends (() => R) {
def isDone(): Boolean
}
-
-
diff --git a/src/library/scala/ref/PhantomReference.scala b/src/library/scala/ref/PhantomReference.scala
index bf70c21..80e77bd 100644
--- a/src/library/scala/ref/PhantomReference.scala
+++ b/src/library/scala/ref/PhantomReference.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2006-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2006-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/library/scala/ref/Reference.scala b/src/library/scala/ref/Reference.scala
index 0906cf7..6377ddd 100644
--- a/src/library/scala/ref/Reference.scala
+++ b/src/library/scala/ref/Reference.scala
@@ -1,22 +1,21 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2006-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2006-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-
package scala.ref
/**
- * @see <code>java.lang.ref.Reference</code>
+ * @see `java.lang.ref.Reference`
* @author Sean McDirmid
*/
trait Reference[+T <: AnyRef] extends Function0[T] {
/** return the underlying value */
def apply(): T
- /** return <code>Some</code> underlying if it hasn't been collected, otherwise <code>None</code> */
+ /** return `Some` underlying if it hasn't been collected, otherwise `None` */
def get: Option[T]
override def toString = get.map(_.toString).getOrElse("<deleted>")
def clear(): Unit
diff --git a/src/library/scala/ref/ReferenceQueue.scala b/src/library/scala/ref/ReferenceQueue.scala
index ec20a9a..89215ef 100644
--- a/src/library/scala/ref/ReferenceQueue.scala
+++ b/src/library/scala/ref/ReferenceQueue.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2006-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2006-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/library/scala/ref/ReferenceWrapper.scala b/src/library/scala/ref/ReferenceWrapper.scala
index 868d96c..3da1f2e 100644
--- a/src/library/scala/ref/ReferenceWrapper.scala
+++ b/src/library/scala/ref/ReferenceWrapper.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2006-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2006-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -14,19 +14,15 @@ package scala.ref
*/
trait ReferenceWrapper[+T <: AnyRef] extends Reference[T] with Proxy {
val underlying: java.lang.ref.Reference[_ <: T]
- override def get = {
- val ret = underlying.get
- if (ret eq null) None else Some(ret)
- }
+ override def get = Option(underlying.get)
def apply() = {
val ret = underlying.get
if (ret eq null) throw new NoSuchElementException
ret
}
- def clear = underlying.clear
+ def clear() = underlying.clear()
def enqueue = underlying.enqueue
def isEnqueued = underlying.isEnqueued
-
def self = underlying
}
diff --git a/src/library/scala/ref/SoftReference.scala b/src/library/scala/ref/SoftReference.scala
index 1d4d2b1..b414db6 100644
--- a/src/library/scala/ref/SoftReference.scala
+++ b/src/library/scala/ref/SoftReference.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2006-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2006-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/library/scala/ref/WeakReference.scala b/src/library/scala/ref/WeakReference.scala
index 98cfb2c..6eb4899 100644
--- a/src/library/scala/ref/WeakReference.scala
+++ b/src/library/scala/ref/WeakReference.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2006-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2006-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -10,6 +10,9 @@
package scala.ref
/**
+ * A wrapper class for java.lag.ref.WeakReference
+ * The new functionality is (1) results are Option values, instead of using null.
+ * (2) There is an extractor that maps the weak reference itself into an option.
* @author Sean McDirmid
*/
class WeakReference[+T <: AnyRef](value: T, queue: ReferenceQueue[T]) extends ReferenceWrapper[T] {
@@ -18,6 +21,19 @@ class WeakReference[+T <: AnyRef](value: T, queue: ReferenceQueue[T]) extends Re
new WeakReferenceWithWrapper[T](value, queue, this)
}
+/** An extractor for weak reference values */
+object WeakReference {
+
+ /** Creates a weak reference pointing to `value` */
+ def apply[T <: AnyRef](value: T) = new WeakReference(value)
+
+ /** Optionally returns the referenced value, or `None` if that value no longer exists */
+ def unapply[T <: AnyRef](wr: WeakReference[T]): Option[T] = {
+ val x = wr.underlying.get
+ if (x != null) Some(x) else None
+ }
+}
+
/**
* @author Philipp Haller
*/
diff --git a/src/library/scala/reflect/BeanDescription.scala b/src/library/scala/reflect/BeanDescription.scala
deleted file mode 100644
index d0069cd..0000000
--- a/src/library/scala/reflect/BeanDescription.scala
+++ /dev/null
@@ -1,19 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-package scala.reflect
-
-/** Provides a short description that will be included when generating
- * bean information. This annotation can be attached to the bean itself,
- * or to any member.
- *
- * @author Ross Judson (rjudson at managedobjects.com)
- */
-class BeanDescription(val description: String) extends annotation.Annotation
-
diff --git a/src/library/scala/reflect/BeanDisplayName.scala b/src/library/scala/reflect/BeanDisplayName.scala
deleted file mode 100644
index 5fecee4..0000000
--- a/src/library/scala/reflect/BeanDisplayName.scala
+++ /dev/null
@@ -1,18 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-package scala.reflect
-
-/** Provides a display name when generating bean information. This
- * annotation can be attached to the bean itself, or to any member.
- *
- * @author Ross Judson (rjudson at managedobjects.com)
- */
-class BeanDisplayName(val name: String) extends annotation.Annotation
-
diff --git a/src/library/scala/reflect/BeanInfo.scala b/src/library/scala/reflect/BeanInfo.scala
deleted file mode 100644
index ba8cc8b..0000000
--- a/src/library/scala/reflect/BeanInfo.scala
+++ /dev/null
@@ -1,21 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-package scala.reflect
-
-/** <p>
- * This annotation indicates that a JavaBean-compliant BeanInfo
- * class should be generated for this annotated Scala class.
- * A val becomes a read-only property. A var becomes a read-write
- * property. A def becomes a method.
- * </p>
- *
- * @author Ross Judson (rjudson at managedobjects.com)
- */
-class BeanInfo extends annotation.Annotation
diff --git a/src/library/scala/reflect/BeanInfoSkip.scala b/src/library/scala/reflect/BeanInfoSkip.scala
deleted file mode 100644
index 4059686..0000000
--- a/src/library/scala/reflect/BeanInfoSkip.scala
+++ /dev/null
@@ -1,18 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-package scala.reflect
-
-/** This annotation indicates that bean information should
- * <strong>not</strong> be generated for the val, var, or def that it is
- * attached to.
- *
- * @author Ross Judson (rjudson at managedobjects.com)
- */
-class BeanInfoSkip extends annotation.Annotation
diff --git a/src/library/scala/reflect/BeanProperty.scala b/src/library/scala/reflect/BeanProperty.scala
deleted file mode 100644
index 84a2429..0000000
--- a/src/library/scala/reflect/BeanProperty.scala
+++ /dev/null
@@ -1,34 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-
-package scala.reflect
-
-import annotation.target._
-
-/** <p>
- * When attached to a field, this annotation adds a setter and a getter
- * method following the Java Bean convention. For example:
- * </p><pre>
- * @BeanProperty
- * <b>var</b> status = ""</pre>
- * <p>
- * adds the following methods to the class:
- * </p><pre>
- * <b>def</b> setStatus(s: String) { <b>this</b>.status = s }
- * <b>def</b> getStatus: String = <b>this</b>.status
- * </pre>
- * <p>
- * For fields of type <code>Boolean</code>, if you need a getter
- * named <code>isStatus</code>, use the
- * <code>scala.reflect.BooleanBeanProperty</code> annotation instead.
- * </p>
- */
- at field
-class BeanProperty extends annotation.StaticAnnotation
diff --git a/src/library/scala/reflect/BooleanBeanProperty.scala b/src/library/scala/reflect/BooleanBeanProperty.scala
deleted file mode 100644
index fa0bf27..0000000
--- a/src/library/scala/reflect/BooleanBeanProperty.scala
+++ /dev/null
@@ -1,23 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-
-package scala.reflect
-
-import annotation.target._
-
-/** <p>
- * This annotation has the same functionality as
- * <code>scala.reflect.BeanProperty</code>, but the generated
- * Bean getter will be named <code>isFieldName</code> instead
- * of <code>getFieldName</code>.
- * </p>
- */
- at field
-class BooleanBeanProperty extends annotation.StaticAnnotation
diff --git a/src/library/scala/reflect/ClassManifest.scala b/src/library/scala/reflect/ClassManifest.scala
deleted file mode 100644
index 3f3892e..0000000
--- a/src/library/scala/reflect/ClassManifest.scala
+++ /dev/null
@@ -1,242 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2007-2011, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-package scala.reflect
-
-import scala.collection.mutable.{ WrappedArray, ArrayBuilder }
-import java.lang.{ Class => JClass }
-
-/** A ClassManifest[T] is an opaque descriptor for type T.
- * It is used by the compiler to preserve information necessary
- * for instantiating Arrays in those cases where the element type
- * is unknown at compile time.
- *
- * The type-relation operators make an effort to present a
- * more accurate picture than can be realized with erased types,
- * but they should not be relied upon to give correct answers.
- * In particular they are likely to be wrong when variance is
- * involved or when a subtype has a different number of type
- * arguments than a supertype.
- */
-trait ClassManifest[T] extends OptManifest[T] with Equals with Serializable {
- /** A class representing the type U to which T would be erased. Note
- * that there is no subtyping relationship between T and U. */
- def erasure: JClass[_]
-
- private def subtype(sub: JClass[_], sup: JClass[_]): Boolean = {
- def loop(left: Set[JClass[_]], seen: Set[JClass[_]]): Boolean = {
- left.nonEmpty && {
- val next = left.head
- val supers = next.getInterfaces.toSet ++ Option(next.getSuperclass)
- supers(sup) || {
- val xs = left ++ supers filterNot seen
- loop(xs - next, seen + next)
- }
- }
- }
- loop(Set(sub), Set())
- }
-
- private def subargs(args1: List[OptManifest[_]], args2: List[OptManifest[_]]) = (args1 corresponds args2) {
- // !!! [Martin] this is wrong, need to take variance into account
- case (x: ClassManifest[_], y: ClassManifest[_]) => x <:< y
- case (x, y) => (x eq NoManifest) && (y eq NoManifest)
- }
-
- /** Tests whether the type represented by this manifest is a subtype
- * of the type represented by `that' manifest, subject to the limitations
- * described in the header.
- */
- def <:<(that: ClassManifest[_]): Boolean = {
- // All types which could conform to these types will override <:<.
- def cannotMatch = {
- import Manifest._
- that.isInstanceOf[AnyValManifest[_]] || (that eq AnyVal) || (that eq Nothing) || (that eq Null)
- }
-
- // This is wrong, and I don't know how it can be made right
- // without more development of Manifests, due to arity-defying
- // relationships like:
- //
- // List[String] <: AnyRef
- // Map[Int, Int] <: Iterable[(Int, Int)]
- //
- // Given the manifest for Map[A, B] how do I determine that a
- // supertype has single type argument (A, B) ? I don't see how we
- // can say whether X <:< Y when type arguments are involved except
- // when the erasure is the same, even before considering variance.
- !cannotMatch && {
- // this part is wrong for not considering variance
- if (this.erasure == that.erasure)
- subargs(this.typeArguments, that.typeArguments)
- // this part is wrong for punting unless the rhs has no type
- // arguments, but it's better than a blindfolded pinata swing.
- else
- that.typeArguments.isEmpty && subtype(this.erasure, that.erasure)
- }
- }
-
- /** Tests whether the type represented by this manifest is a supertype
- * of the type represented by `that' manifest, subject to the limitations
- * described in the header.
- */
- def >:>(that: ClassManifest[_]): Boolean =
- that <:< this
-
- def canEqual(other: Any) = other match {
- case _: ClassManifest[_] => true
- case _ => false
- }
-
- /** Tests whether the type represented by this manifest is equal to
- * the type represented by `that' manifest, subject to the limitations
- * described in the header.
- */
- override def equals(that: Any): Boolean = that match {
- case m: ClassManifest[_] => (m canEqual this) && (this.erasure == m.erasure)
- case _ => false
- }
- override def hashCode = this.erasure.##
-
- protected def arrayClass[T](tp: JClass[_]): JClass[Array[T]] =
- java.lang.reflect.Array.newInstance(tp, 0).getClass.asInstanceOf[JClass[Array[T]]]
-
- def arrayManifest: ClassManifest[Array[T]] =
- ClassManifest.classType[Array[T]](arrayClass[T](erasure))
-
- def newArray(len: Int): Array[T] =
- java.lang.reflect.Array.newInstance(erasure, len).asInstanceOf[Array[T]]
-
- def newArray2(len: Int): Array[Array[T]] =
- java.lang.reflect.Array.newInstance(arrayClass[T](erasure), len)
- .asInstanceOf[Array[Array[T]]]
-
- def newArray3(len: Int): Array[Array[Array[T]]] =
- java.lang.reflect.Array.newInstance(arrayClass[Array[T]](arrayClass[T](erasure)), len)
- .asInstanceOf[Array[Array[Array[T]]]]
-
- def newArray4(len: Int): Array[Array[Array[Array[T]]]] =
- java.lang.reflect.Array.newInstance(arrayClass[Array[Array[T]]](arrayClass[Array[T]](arrayClass[T](erasure))), len)
- .asInstanceOf[Array[Array[Array[Array[T]]]]]
-
- def newArray5(len: Int): Array[Array[Array[Array[Array[T]]]]] =
- java.lang.reflect.Array.newInstance(arrayClass[Array[Array[Array[T]]]](arrayClass[Array[Array[T]]](arrayClass[Array[T]](arrayClass[T](erasure)))), len)
- .asInstanceOf[Array[Array[Array[Array[Array[T]]]]]]
-
- def newWrappedArray(len: Int): WrappedArray[T] =
- // it's safe to assume T <: AnyRef here because the method is overridden for all value type manifests
- new WrappedArray.ofRef[T with AnyRef](newArray(len).asInstanceOf[Array[T with AnyRef]]).asInstanceOf[WrappedArray[T]]
-
- def newArrayBuilder(): ArrayBuilder[T] =
- // it's safe to assume T <: AnyRef here because the method is overridden for all value type manifests
- new ArrayBuilder.ofRef[T with AnyRef]()(this.asInstanceOf[ClassManifest[T with AnyRef]]).asInstanceOf[ArrayBuilder[T]]
-
- def typeArguments: List[OptManifest[_]] = List()
-
- protected def argString =
- if (typeArguments.nonEmpty) typeArguments.mkString("[", ", ", "]")
- else if (erasure.isArray) "["+ClassManifest.fromClass(erasure.getComponentType)+"]"
- else ""
-}
-
-/** The object ClassManifest defines factory methods for manifests.
- * It is intended for use by the compiler and should not be used
- * in client code.
- */
-object ClassManifest {
- val Byte = Manifest.Byte
- val Short = Manifest.Short
- val Char = Manifest.Char
- val Int = Manifest.Int
- val Long = Manifest.Long
- val Float = Manifest.Float
- val Double = Manifest.Double
- val Boolean = Manifest.Boolean
- val Unit = Manifest.Unit
- val Any = Manifest.Any
- val Object = Manifest.Object
- val AnyVal = Manifest.AnyVal
- val Nothing = Manifest.Nothing
- val Null = Manifest.Null
-
- def fromClass[T](clazz: JClass[T]): ClassManifest[T] = clazz match {
- case java.lang.Byte.TYPE => Byte.asInstanceOf[ClassManifest[T]]
- case java.lang.Short.TYPE => Short.asInstanceOf[ClassManifest[T]]
- case java.lang.Character.TYPE => Char.asInstanceOf[ClassManifest[T]]
- case java.lang.Integer.TYPE => Int.asInstanceOf[ClassManifest[T]]
- case java.lang.Long.TYPE => Long.asInstanceOf[ClassManifest[T]]
- case java.lang.Float.TYPE => Float.asInstanceOf[ClassManifest[T]]
- case java.lang.Double.TYPE => Double.asInstanceOf[ClassManifest[T]]
- case java.lang.Boolean.TYPE => Boolean.asInstanceOf[ClassManifest[T]]
- case java.lang.Void.TYPE => Unit.asInstanceOf[ClassManifest[T]]
- case _ => classType[T with AnyRef](clazz).asInstanceOf[ClassManifest[T]]
- }
-
- def singleType[T <: AnyRef](value: AnyRef): Manifest[T] = Manifest.singleType(value)
-
- /** ClassManifest for the class type `clazz', where `clazz' is
- * a top-level or static class.
- * @note This no-prefix, no-arguments case is separate because we
- * it's called from ScalaRunTime.boxArray itself. If we
- * pass varargs as arrays into this, we get an infinitely recursive call
- * to boxArray. (Besides, having a separate case is more efficient)
- */
- def classType[T <: AnyRef](clazz: JClass[_]): ClassManifest[T] =
- new ClassTypeManifest[T](None, clazz, Nil)
-
- /** ClassManifest for the class type `clazz[args]', where `clazz' is
- * a top-level or static class and `args` are its type arguments */
- def classType[T <: AnyRef](clazz: JClass[_], arg1: OptManifest[_], args: OptManifest[_]*): ClassManifest[T] =
- new ClassTypeManifest[T](None, clazz, arg1 :: args.toList)
-
- /** ClassManifest for the class type `clazz[args]', where `clazz' is
- * a class with non-package prefix type `prefix` and type arguments `args`.
- */
- def classType[T <: AnyRef](prefix: OptManifest[_], clazz: JClass[_], args: OptManifest[_]*): ClassManifest[T] =
- new ClassTypeManifest[T](Some(prefix), clazz, args.toList)
-
- def arrayType[T](arg: OptManifest[_]): ClassManifest[Array[T]] = arg match {
- case NoManifest => Object.asInstanceOf[ClassManifest[Array[T]]]
- case m: ClassManifest[_] => m.asInstanceOf[ClassManifest[T]].arrayManifest
- }
-
- /** ClassManifest for the abstract type `prefix # name'. `upperBound' is not
- * strictly necessary as it could be obtained by reflection. It was
- * added so that erasure can be calculated without reflection. */
- def abstractType[T](prefix: OptManifest[_], name: String, clazz: JClass[_], args: OptManifest[_]*): ClassManifest[T] =
- new ClassManifest[T] {
- def erasure = clazz
- override val typeArguments = args.toList
- override def toString = prefix.toString+"#"+name+argString
- }
-
- /** ClassManifest for the abstract type `prefix # name'. `upperBound' is not
- * strictly necessary as it could be obtained by reflection. It was
- * added so that erasure can be calculated without reflection.
- * todo: remove after next boostrap
- */
- def abstractType[T](prefix: OptManifest[_], name: String, upperbound: ClassManifest[_], args: OptManifest[_]*): ClassManifest[T] =
- new ClassManifest[T] {
- def erasure = upperbound.erasure
- override val typeArguments = args.toList
- override def toString = prefix.toString+"#"+name+argString
- }
-}
-
-/** Manifest for the class type `clazz[args]', where `clazz' is
- * a top-level or static class. */
-private class ClassTypeManifest[T <: AnyRef](
- prefix: Option[OptManifest[_]],
- val erasure: JClass[_],
- override val typeArguments: List[OptManifest[_]]) extends ClassManifest[T]
-{
- override def toString =
- (if (prefix.isEmpty) "" else prefix.get.toString+"#") +
- (if (erasure.isArray) "Array" else erasure.getName) +
- argString
-}
diff --git a/src/library/scala/reflect/ClassManifestDeprecatedApis.scala b/src/library/scala/reflect/ClassManifestDeprecatedApis.scala
new file mode 100644
index 0000000..0a3d818
--- /dev/null
+++ b/src/library/scala/reflect/ClassManifestDeprecatedApis.scala
@@ -0,0 +1,240 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2007-2013, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+package scala.reflect
+
+import scala.collection.mutable.{ WrappedArray, ArrayBuilder }
+import java.lang.{ Class => jClass }
+
+ at deprecated("Use scala.reflect.ClassTag instead", "2.10.0")
+trait ClassManifestDeprecatedApis[T] extends OptManifest[T] {
+ self: ClassManifest[T] =>
+
+ @deprecated("Use runtimeClass instead", "2.10.0")
+ def erasure: jClass[_] = runtimeClass
+
+ private def subtype(sub: jClass[_], sup: jClass[_]): Boolean = {
+ def loop(left: Set[jClass[_]], seen: Set[jClass[_]]): Boolean = {
+ left.nonEmpty && {
+ val next = left.head
+ val supers = next.getInterfaces.toSet ++ Option(next.getSuperclass)
+ supers(sup) || {
+ val xs = left ++ supers filterNot seen
+ loop(xs - next, seen + next)
+ }
+ }
+ }
+ loop(Set(sub), Set())
+ }
+
+ private def subargs(args1: List[OptManifest[_]], args2: List[OptManifest[_]]) = (args1 corresponds args2) {
+ // !!! [Martin] this is wrong, need to take variance into account
+ case (x: ClassManifest[_], y: ClassManifest[_]) => x <:< y
+ case (x, y) => (x eq NoManifest) && (y eq NoManifest)
+ }
+
+ /** Tests whether the type represented by this manifest is a subtype
+ * of the type represented by `that` manifest, subject to the limitations
+ * described in the header.
+ */
+ @deprecated("Use scala.reflect.runtime.universe.TypeTag for subtype checking instead", "2.10.0")
+ def <:<(that: ClassManifest[_]): Boolean = {
+ // All types which could conform to these types will override <:<.
+ def cannotMatch = {
+ import Manifest._
+ that.isInstanceOf[AnyValManifest[_]] || (that eq AnyVal) || (that eq Nothing) || (that eq Null)
+ }
+
+ // This is wrong, and I don't know how it can be made right
+ // without more development of Manifests, due to arity-defying
+ // relationships like:
+ //
+ // List[String] <: AnyRef
+ // Map[Int, Int] <: Iterable[(Int, Int)]
+ //
+ // Given the manifest for Map[A, B] how do I determine that a
+ // supertype has single type argument (A, B) ? I don't see how we
+ // can say whether X <:< Y when type arguments are involved except
+ // when the erasure is the same, even before considering variance.
+ !cannotMatch && {
+ // this part is wrong for not considering variance
+ if (this.erasure == that.erasure)
+ subargs(this.typeArguments, that.typeArguments)
+ // this part is wrong for punting unless the rhs has no type
+ // arguments, but it's better than a blindfolded pinata swing.
+ else
+ that.typeArguments.isEmpty && subtype(this.erasure, that.erasure)
+ }
+ }
+
+ /** Tests whether the type represented by this manifest is a supertype
+ * of the type represented by `that` manifest, subject to the limitations
+ * described in the header.
+ */
+ @deprecated("Use scala.reflect.runtime.universe.TypeTag for subtype checking instead", "2.10.0")
+ def >:>(that: ClassManifest[_]): Boolean =
+ that <:< this
+
+ override def canEqual(other: Any) = other match {
+ case _: ClassManifest[_] => true
+ case _ => false
+ }
+
+ protected def arrayClass[T](tp: jClass[_]): jClass[Array[T]] =
+ java.lang.reflect.Array.newInstance(tp, 0).getClass.asInstanceOf[jClass[Array[T]]]
+
+ @deprecated("Use wrap instead", "2.10.0")
+ def arrayManifest: ClassManifest[Array[T]] =
+ ClassManifest.classType[Array[T]](arrayClass[T](erasure), this)
+
+ override def newArray(len: Int): Array[T] =
+ java.lang.reflect.Array.newInstance(erasure, len).asInstanceOf[Array[T]]
+
+ @deprecated("Use wrap.newArray instead", "2.10.0")
+ def newArray2(len: Int): Array[Array[T]] =
+ java.lang.reflect.Array.newInstance(arrayClass[T](erasure), len)
+ .asInstanceOf[Array[Array[T]]]
+
+ @deprecated("Use wrap.wrap.newArray instead", "2.10.0")
+ def newArray3(len: Int): Array[Array[Array[T]]] =
+ java.lang.reflect.Array.newInstance(arrayClass[Array[T]](arrayClass[T](erasure)), len)
+ .asInstanceOf[Array[Array[Array[T]]]]
+
+ @deprecated("Use wrap.wrap.wrap.newArray instead", "2.10.0")
+ def newArray4(len: Int): Array[Array[Array[Array[T]]]] =
+ java.lang.reflect.Array.newInstance(arrayClass[Array[Array[T]]](arrayClass[Array[T]](arrayClass[T](erasure))), len)
+ .asInstanceOf[Array[Array[Array[Array[T]]]]]
+
+ @deprecated("Use wrap.wrap.wrap.wrap.newArray instead", "2.10.0")
+ def newArray5(len: Int): Array[Array[Array[Array[Array[T]]]]] =
+ java.lang.reflect.Array.newInstance(arrayClass[Array[Array[Array[T]]]](arrayClass[Array[Array[T]]](arrayClass[Array[T]](arrayClass[T](erasure)))), len)
+ .asInstanceOf[Array[Array[Array[Array[Array[T]]]]]]
+
+ @deprecated("Create WrappedArray directly instead", "2.10.0")
+ def newWrappedArray(len: Int): WrappedArray[T] =
+ // it's safe to assume T <: AnyRef here because the method is overridden for all value type manifests
+ new WrappedArray.ofRef[T with AnyRef](newArray(len).asInstanceOf[Array[T with AnyRef]]).asInstanceOf[WrappedArray[T]]
+
+ @deprecated("Use ArrayBuilder.make(this) instead", "2.10.0")
+ def newArrayBuilder(): ArrayBuilder[T] =
+ // it's safe to assume T <: AnyRef here because the method is overridden for all value type manifests
+ new ArrayBuilder.ofRef[T with AnyRef]()(this.asInstanceOf[ClassManifest[T with AnyRef]]).asInstanceOf[ArrayBuilder[T]]
+
+ @deprecated("Use scala.reflect.runtime.universe.TypeTag to capture type structure instead", "2.10.0")
+ def typeArguments: List[OptManifest[_]] = List()
+
+ protected def argString =
+ if (typeArguments.nonEmpty) typeArguments.mkString("[", ", ", "]")
+ else if (erasure.isArray) "["+ClassManifest.fromClass(erasure.getComponentType)+"]"
+ else ""
+}
+
+/** `ClassManifestFactory` defines factory methods for manifests.
+ * It is intended for use by the compiler and should not be used in client code.
+ *
+ * Unlike `ClassManifest`, this factory isn't annotated with a deprecation warning.
+ * This is done to prevent avalanches of deprecation warnings in the code that calls methods with manifests.
+ *
+ * In a perfect world, we would just remove the @deprecated annotation from `ClassManifest` the object
+ * and then delete it in 2.11. After all, that object is explicitly marked as internal, so noone should use it.
+ * However a lot of existing libraries disregarded the scaladoc that comes with `ClassManifest`,
+ * so we need to somehow nudge them into migrating prior to removing stuff out of the blue.
+ * Hence we've introduced this design decision as the lesser of two evils.
+ */
+object ClassManifestFactory {
+ val Byte = ManifestFactory.Byte
+ val Short = ManifestFactory.Short
+ val Char = ManifestFactory.Char
+ val Int = ManifestFactory.Int
+ val Long = ManifestFactory.Long
+ val Float = ManifestFactory.Float
+ val Double = ManifestFactory.Double
+ val Boolean = ManifestFactory.Boolean
+ val Unit = ManifestFactory.Unit
+ val Any = ManifestFactory.Any
+ val Object = ManifestFactory.Object
+ val AnyVal = ManifestFactory.AnyVal
+ val Nothing = ManifestFactory.Nothing
+ val Null = ManifestFactory.Null
+
+ def fromClass[T](clazz: jClass[T]): ClassManifest[T] = clazz match {
+ case java.lang.Byte.TYPE => Byte.asInstanceOf[ClassManifest[T]]
+ case java.lang.Short.TYPE => Short.asInstanceOf[ClassManifest[T]]
+ case java.lang.Character.TYPE => Char.asInstanceOf[ClassManifest[T]]
+ case java.lang.Integer.TYPE => Int.asInstanceOf[ClassManifest[T]]
+ case java.lang.Long.TYPE => Long.asInstanceOf[ClassManifest[T]]
+ case java.lang.Float.TYPE => Float.asInstanceOf[ClassManifest[T]]
+ case java.lang.Double.TYPE => Double.asInstanceOf[ClassManifest[T]]
+ case java.lang.Boolean.TYPE => Boolean.asInstanceOf[ClassManifest[T]]
+ case java.lang.Void.TYPE => Unit.asInstanceOf[ClassManifest[T]]
+ case _ => classType[T with AnyRef](clazz).asInstanceOf[ClassManifest[T]]
+ }
+
+ def singleType[T <: AnyRef](value: AnyRef): Manifest[T] = Manifest.singleType(value)
+
+ /** ClassManifest for the class type `clazz`, where `clazz` is
+ * a top-level or static class.
+ * @note This no-prefix, no-arguments case is separate because we
+ * it's called from ScalaRunTime.boxArray itself. If we
+ * pass varargs as arrays into this, we get an infinitely recursive call
+ * to boxArray. (Besides, having a separate case is more efficient)
+ */
+ def classType[T](clazz: jClass[_]): ClassManifest[T] =
+ new ClassTypeManifest[T](None, clazz, Nil)
+
+ /** ClassManifest for the class type `clazz[args]`, where `clazz` is
+ * a top-level or static class and `args` are its type arguments */
+ def classType[T](clazz: jClass[_], arg1: OptManifest[_], args: OptManifest[_]*): ClassManifest[T] =
+ new ClassTypeManifest[T](None, clazz, arg1 :: args.toList)
+
+ /** ClassManifest for the class type `clazz[args]`, where `clazz` is
+ * a class with non-package prefix type `prefix` and type arguments `args`.
+ */
+ def classType[T](prefix: OptManifest[_], clazz: jClass[_], args: OptManifest[_]*): ClassManifest[T] =
+ new ClassTypeManifest[T](Some(prefix), clazz, args.toList)
+
+ def arrayType[T](arg: OptManifest[_]): ClassManifest[Array[T]] = arg match {
+ case NoManifest => Object.asInstanceOf[ClassManifest[Array[T]]]
+ case m: ClassManifest[_] => m.asInstanceOf[ClassManifest[T]].arrayManifest
+ }
+
+ /** ClassManifest for the abstract type `prefix # name`. `upperBound` is not
+ * strictly necessary as it could be obtained by reflection. It was
+ * added so that erasure can be calculated without reflection. */
+ def abstractType[T](prefix: OptManifest[_], name: String, clazz: jClass[_], args: OptManifest[_]*): ClassManifest[T] =
+ new ClassManifest[T] {
+ override def runtimeClass = clazz
+ override val typeArguments = args.toList
+ override def toString = prefix.toString+"#"+name+argString
+ }
+
+ /** ClassManifest for the abstract type `prefix # name`. `upperBound` is not
+ * strictly necessary as it could be obtained by reflection. It was
+ * added so that erasure can be calculated without reflection.
+ * todo: remove after next boostrap
+ */
+ def abstractType[T](prefix: OptManifest[_], name: String, upperbound: ClassManifest[_], args: OptManifest[_]*): ClassManifest[T] =
+ new ClassManifest[T] {
+ override def runtimeClass = upperbound.erasure
+ override val typeArguments = args.toList
+ override def toString = prefix.toString+"#"+name+argString
+ }
+}
+
+/** Manifest for the class type `clazz[args]`, where `clazz` is
+ * a top-level or static class */
+private class ClassTypeManifest[T](
+ prefix: Option[OptManifest[_]],
+ val runtimeClass: jClass[_],
+ override val typeArguments: List[OptManifest[_]]) extends ClassManifest[T]
+{
+ override def toString =
+ (if (prefix.isEmpty) "" else prefix.get.toString+"#") +
+ (if (erasure.isArray) "Array" else erasure.getName) +
+ argString
+}
\ No newline at end of file
diff --git a/src/library/scala/reflect/ClassTag.scala b/src/library/scala/reflect/ClassTag.scala
new file mode 100644
index 0000000..d699e34
--- /dev/null
+++ b/src/library/scala/reflect/ClassTag.scala
@@ -0,0 +1,148 @@
+package scala
+package reflect
+
+import java.lang.{ Class => jClass }
+import scala.language.{implicitConversions, existentials}
+import scala.runtime.ScalaRunTime.{ arrayClass, arrayElementClass }
+
+/**
+ *
+ * A `ClassTag[T]` stores the erased class of a given type `T`, accessible via the `runtimeClass`
+ * field. This is particularly useful for instantiating `Array`s whose element types are unknown
+ * at compile time.
+ *
+ * `ClassTag`s are a weaker special case of [[scala.reflect.api.TypeTags#TypeTag]]s, in that they
+ * wrap only the runtime class of a given type, whereas a `TypeTag` contains all static type
+ * information. That is, `ClassTag`s are constructed from knowing only the top-level class of a
+ * type, without necessarily knowing all of its argument types. This runtime information is enough
+ * for runtime `Array` creation.
+ *
+ * For example:
+ * {{{
+ * scala> def mkArray[T : ClassTag](elems: T*) = Array[T](elems: _*)
+ * mkArray: [T](elems: T*)(implicit evidence$1: scala.reflect.ClassTag[T])Array[T]
+ *
+ * scala> mkArray(42, 13)
+ * res0: Array[Int] = Array(42, 13)
+ *
+ * scala> mkArray("Japan","Brazil","Germany")
+ * res1: Array[String] = Array(Japan, Brazil, Germany)
+ * }}}
+ *
+ * See [[scala.reflect.api.TypeTags]] for more examples, or the
+ * [[http://docs.scala-lang.org/overviews/reflection/typetags-manifests.html Reflection Guide: TypeTags]]
+ * for more details.
+ *
+ */
+ at scala.annotation.implicitNotFound(msg = "No ClassTag available for ${T}")
+trait ClassTag[T] extends ClassManifestDeprecatedApis[T] with Equals with Serializable {
+ // please, don't add any APIs here, like it was with `newWrappedArray` and `newArrayBuilder`
+ // class tags, and all tags in general, should be as minimalistic as possible
+
+ /** A class representing the type `U` to which `T` would be erased.
+ * Note that there is no subtyping relationship between `T` and `U`.
+ */
+ def runtimeClass: jClass[_]
+
+ /** Produces a `ClassTag` that knows how to instantiate an `Array[Array[T]]` */
+ def wrap: ClassTag[Array[T]] = ClassTag[Array[T]](arrayClass(runtimeClass))
+
+ /** Produces a new array with element type `T` and length `len` */
+ override def newArray(len: Int): Array[T] =
+ runtimeClass match {
+ case java.lang.Byte.TYPE => new Array[Byte](len).asInstanceOf[Array[T]]
+ case java.lang.Short.TYPE => new Array[Short](len).asInstanceOf[Array[T]]
+ case java.lang.Character.TYPE => new Array[Char](len).asInstanceOf[Array[T]]
+ case java.lang.Integer.TYPE => new Array[Int](len).asInstanceOf[Array[T]]
+ case java.lang.Long.TYPE => new Array[Long](len).asInstanceOf[Array[T]]
+ case java.lang.Float.TYPE => new Array[Float](len).asInstanceOf[Array[T]]
+ case java.lang.Double.TYPE => new Array[Double](len).asInstanceOf[Array[T]]
+ case java.lang.Boolean.TYPE => new Array[Boolean](len).asInstanceOf[Array[T]]
+ case java.lang.Void.TYPE => new Array[Unit](len).asInstanceOf[Array[T]]
+ case _ => java.lang.reflect.Array.newInstance(runtimeClass, len).asInstanceOf[Array[T]]
+ }
+
+ /** A ClassTag[T] can serve as an extractor that matches only objects of type T.
+ *
+ * The compiler tries to turn unchecked type tests in pattern matches into checked ones
+ * by wrapping a `(_: T)` type pattern as `ct(_: T)`, where `ct` is the `ClassTag[T]` instance.
+ * Type tests necessary before calling other extractors are treated similarly.
+ * `SomeExtractor(...)` is turned into `ct(SomeExtractor(...))` if `T` in `SomeExtractor.unapply(x: T)`
+ * is uncheckable, but we have an instance of `ClassTag[T]`.
+ */
+ def unapply(x: Any): Option[T] = unapply_impl(x)
+ def unapply(x: Byte): Option[T] = unapply_impl(x)
+ def unapply(x: Short): Option[T] = unapply_impl(x)
+ def unapply(x: Char): Option[T] = unapply_impl(x)
+ def unapply(x: Int): Option[T] = unapply_impl(x)
+ def unapply(x: Long): Option[T] = unapply_impl(x)
+ def unapply(x: Float): Option[T] = unapply_impl(x)
+ def unapply(x: Double): Option[T] = unapply_impl(x)
+ def unapply(x: Boolean): Option[T] = unapply_impl(x)
+ def unapply(x: Unit): Option[T] = unapply_impl(x)
+
+ private def unapply_impl[U: ClassTag](x: U): Option[T] =
+ if (x == null) None
+ else {
+ val staticClass = classTag[U].runtimeClass
+ val dynamicClass = x.getClass
+ val effectiveClass = if (staticClass.isPrimitive) staticClass else dynamicClass
+ val conforms = runtimeClass.isAssignableFrom(effectiveClass)
+ if (conforms) Some(x.asInstanceOf[T]) else None
+ }
+
+ // case class accessories
+ override def canEqual(x: Any) = x.isInstanceOf[ClassTag[_]]
+ override def equals(x: Any) = x.isInstanceOf[ClassTag[_]] && this.runtimeClass == x.asInstanceOf[ClassTag[_]].runtimeClass
+ override def hashCode = scala.runtime.ScalaRunTime.hash(runtimeClass)
+ override def toString = {
+ def prettyprint(clazz: jClass[_]): String =
+ if (clazz.isArray) s"Array[${prettyprint(arrayElementClass(clazz))}]" else
+ clazz.getName
+ prettyprint(runtimeClass)
+ }
+}
+
+/**
+ * Class tags corresponding to primitive types and constructor/extractor for ClassTags.
+ */
+object ClassTag {
+ private val ObjectTYPE = classOf[java.lang.Object]
+ private val NothingTYPE = classOf[scala.runtime.Nothing$]
+ private val NullTYPE = classOf[scala.runtime.Null$]
+
+ val Byte : ClassTag[scala.Byte] = Manifest.Byte
+ val Short : ClassTag[scala.Short] = Manifest.Short
+ val Char : ClassTag[scala.Char] = Manifest.Char
+ val Int : ClassTag[scala.Int] = Manifest.Int
+ val Long : ClassTag[scala.Long] = Manifest.Long
+ val Float : ClassTag[scala.Float] = Manifest.Float
+ val Double : ClassTag[scala.Double] = Manifest.Double
+ val Boolean : ClassTag[scala.Boolean] = Manifest.Boolean
+ val Unit : ClassTag[scala.Unit] = Manifest.Unit
+ val Any : ClassTag[scala.Any] = Manifest.Any
+ val Object : ClassTag[java.lang.Object] = Manifest.Object
+ val AnyVal : ClassTag[scala.AnyVal] = Manifest.AnyVal
+ val AnyRef : ClassTag[scala.AnyRef] = Manifest.AnyRef
+ val Nothing : ClassTag[scala.Nothing] = Manifest.Nothing
+ val Null : ClassTag[scala.Null] = Manifest.Null
+
+ def apply[T](runtimeClass1: jClass[_]): ClassTag[T] =
+ runtimeClass1 match {
+ case java.lang.Byte.TYPE => ClassTag.Byte.asInstanceOf[ClassTag[T]]
+ case java.lang.Short.TYPE => ClassTag.Short.asInstanceOf[ClassTag[T]]
+ case java.lang.Character.TYPE => ClassTag.Char.asInstanceOf[ClassTag[T]]
+ case java.lang.Integer.TYPE => ClassTag.Int.asInstanceOf[ClassTag[T]]
+ case java.lang.Long.TYPE => ClassTag.Long.asInstanceOf[ClassTag[T]]
+ case java.lang.Float.TYPE => ClassTag.Float.asInstanceOf[ClassTag[T]]
+ case java.lang.Double.TYPE => ClassTag.Double.asInstanceOf[ClassTag[T]]
+ case java.lang.Boolean.TYPE => ClassTag.Boolean.asInstanceOf[ClassTag[T]]
+ case java.lang.Void.TYPE => ClassTag.Unit.asInstanceOf[ClassTag[T]]
+ case ObjectTYPE => ClassTag.Object.asInstanceOf[ClassTag[T]]
+ case NothingTYPE => ClassTag.Nothing.asInstanceOf[ClassTag[T]]
+ case NullTYPE => ClassTag.Null.asInstanceOf[ClassTag[T]]
+ case _ => new ClassTag[T]{ def runtimeClass = runtimeClass1 }
+ }
+
+ def unapply[T](ctag: ClassTag[T]): Option[Class[_]] = Some(ctag.runtimeClass)
+}
diff --git a/src/library/scala/reflect/Code.scala b/src/library/scala/reflect/Code.scala
deleted file mode 100644
index 1a5154c..0000000
--- a/src/library/scala/reflect/Code.scala
+++ /dev/null
@@ -1,20 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-
-package scala.reflect
-
-/** This type is required by the compiler and <b>should not be used in client code</b>. */
-class Code[T](val tree: Tree)
-
-/** This type is required by the compiler and <b>should not be used in client code</b>. */
-object Code {
- def lift[A](tree: A): Code[A] =
- throw new Error("Code was not lifted by compiler")
-}
diff --git a/src/library/scala/reflect/Manifest.scala b/src/library/scala/reflect/Manifest.scala
index 229fa72..f62d0ec 100644
--- a/src/library/scala/reflect/Manifest.scala
+++ b/src/library/scala/reflect/Manifest.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2007-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2007-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -10,12 +10,12 @@ package scala.reflect
import scala.collection.mutable.{ ArrayBuilder, WrappedArray }
-/** A Manifest[T] is an opaque descriptor for type T. Its
- * supported use is to give access to the erasure of the type
- * as a Class instance, as is necessary for the creation of native
- * Arrays if the class is not known at compile time.
+/** A `Manifest[T]` is an opaque descriptor for type T. Its supported use
+ * is to give access to the erasure of the type as a `Class` instance, as
+ * is necessary for the creation of native `Arrays` if the class is not
+ * known at compile time.
*
- * The type-relation operators <:< and =:= should be considered
+ * The type-relation operators `<:<` and `=:=` should be considered
* approximations only, as there are numerous aspects of type conformance
* which are not yet adequately represented in manifests.
*
@@ -38,12 +38,14 @@ import scala.collection.mutable.{ ArrayBuilder, WrappedArray }
}}}
*
*/
- at annotation.implicitNotFound(msg = "No Manifest available for ${T}.")
+ at scala.annotation.implicitNotFound(msg = "No Manifest available for ${T}.")
+// TODO undeprecated until Scala reflection becomes non-experimental
+// @deprecated("Use scala.reflect.ClassTag (to capture erasures) or scala.reflect.runtime.universe.TypeTag (to capture types) or both instead", "2.10.0")
trait Manifest[T] extends ClassManifest[T] with Equals {
override def typeArguments: List[Manifest[_]] = Nil
override def arrayManifest: Manifest[Array[T]] =
- Manifest.classType[Array[T]](arrayClass[T](erasure))
+ Manifest.classType[Array[T]](arrayClass[T](erasure), this)
override def canEqual(that: Any): Boolean = that match {
case _: Manifest[_] => true
@@ -59,7 +61,9 @@ trait Manifest[T] extends ClassManifest[T] with Equals {
override def hashCode = this.erasure.##
}
-trait AnyValManifest[T] extends Manifest[T] with Equals {
+// TODO undeprecated until Scala reflection becomes non-experimental
+// @deprecated("Use type tags and manually check the corresponding class or type instead", "2.10.0")
+abstract class AnyValManifest[T <: AnyVal](override val toString: String) extends Manifest[T] with Equals {
override def <:<(that: ClassManifest[_]): Boolean =
(that eq this) || (that eq Manifest.Any) || (that eq Manifest.AnyVal)
override def canEqual(other: Any) = other match {
@@ -67,148 +71,139 @@ trait AnyValManifest[T] extends Manifest[T] with Equals {
case _ => false
}
override def equals(that: Any): Boolean = this eq that.asInstanceOf[AnyRef]
- override def hashCode = System.identityHashCode(this)
+ override val hashCode = System.identityHashCode(this)
}
-/** The object Manifest defines factory methods for manifests.
- * It is intended for use by the compiler and should not be used
- * in client code.
+/** `ManifestFactory` defines factory methods for manifests.
+ * It is intended for use by the compiler and should not be used in client code.
+ *
+ * Unlike `Manifest`, this factory isn't annotated with a deprecation warning.
+ * This is done to prevent avalanches of deprecation warnings in the code that calls methods with manifests.
+ * Why so complicated? Read up the comments for `ClassManifestFactory`.
*/
-object Manifest {
- private def ObjectClass = classOf[java.lang.Object]
+object ManifestFactory {
+ def valueManifests: List[AnyValManifest[_]] =
+ List(Byte, Short, Char, Int, Long, Float, Double, Boolean, Unit)
- val Byte: AnyValManifest[Byte] = new AnyValManifest[scala.Byte] {
- def erasure = java.lang.Byte.TYPE
- override def toString = "Byte"
+ val Byte: AnyValManifest[Byte] = new AnyValManifest[scala.Byte]("Byte") {
+ def runtimeClass = java.lang.Byte.TYPE
override def newArray(len: Int): Array[Byte] = new Array[Byte](len)
override def newWrappedArray(len: Int): WrappedArray[Byte] = new WrappedArray.ofByte(new Array[Byte](len))
override def newArrayBuilder(): ArrayBuilder[Byte] = new ArrayBuilder.ofByte()
private def readResolve(): Any = Manifest.Byte
}
- val Short: AnyValManifest[Short] = new AnyValManifest[scala.Short] {
- def erasure = java.lang.Short.TYPE
- override def toString = "Short"
+ val Short: AnyValManifest[Short] = new AnyValManifest[scala.Short]("Short") {
+ def runtimeClass = java.lang.Short.TYPE
override def newArray(len: Int): Array[Short] = new Array[Short](len)
override def newWrappedArray(len: Int): WrappedArray[Short] = new WrappedArray.ofShort(new Array[Short](len))
override def newArrayBuilder(): ArrayBuilder[Short] = new ArrayBuilder.ofShort()
private def readResolve(): Any = Manifest.Short
}
- val Char: AnyValManifest[Char] = new AnyValManifest[scala.Char] {
- def erasure = java.lang.Character.TYPE
- override def toString = "Char"
+ val Char: AnyValManifest[Char] = new AnyValManifest[scala.Char]("Char") {
+ def runtimeClass = java.lang.Character.TYPE
override def newArray(len: Int): Array[Char] = new Array[Char](len)
override def newWrappedArray(len: Int): WrappedArray[Char] = new WrappedArray.ofChar(new Array[Char](len))
override def newArrayBuilder(): ArrayBuilder[Char] = new ArrayBuilder.ofChar()
private def readResolve(): Any = Manifest.Char
}
- val Int: AnyValManifest[Int] = new AnyValManifest[scala.Int] {
- def erasure = java.lang.Integer.TYPE
- override def toString = "Int"
+ val Int: AnyValManifest[Int] = new AnyValManifest[scala.Int]("Int") {
+ def runtimeClass = java.lang.Integer.TYPE
override def newArray(len: Int): Array[Int] = new Array[Int](len)
override def newWrappedArray(len: Int): WrappedArray[Int] = new WrappedArray.ofInt(new Array[Int](len))
override def newArrayBuilder(): ArrayBuilder[Int] = new ArrayBuilder.ofInt()
private def readResolve(): Any = Manifest.Int
}
- val Long: AnyValManifest[Long] = new AnyValManifest[scala.Long] {
- def erasure = java.lang.Long.TYPE
- override def toString = "Long"
+ val Long: AnyValManifest[Long] = new AnyValManifest[scala.Long]("Long") {
+ def runtimeClass = java.lang.Long.TYPE
override def newArray(len: Int): Array[Long] = new Array[Long](len)
override def newWrappedArray(len: Int): WrappedArray[Long] = new WrappedArray.ofLong(new Array[Long](len))
override def newArrayBuilder(): ArrayBuilder[Long] = new ArrayBuilder.ofLong()
private def readResolve(): Any = Manifest.Long
}
- val Float: AnyValManifest[Float] = new AnyValManifest[scala.Float] {
- def erasure = java.lang.Float.TYPE
- override def toString = "Float"
+ val Float: AnyValManifest[Float] = new AnyValManifest[scala.Float]("Float") {
+ def runtimeClass = java.lang.Float.TYPE
override def newArray(len: Int): Array[Float] = new Array[Float](len)
override def newWrappedArray(len: Int): WrappedArray[Float] = new WrappedArray.ofFloat(new Array[Float](len))
override def newArrayBuilder(): ArrayBuilder[Float] = new ArrayBuilder.ofFloat()
private def readResolve(): Any = Manifest.Float
}
- val Double: AnyValManifest[Double] = new AnyValManifest[scala.Double] {
- def erasure = java.lang.Double.TYPE
- override def toString = "Double"
+ val Double: AnyValManifest[Double] = new AnyValManifest[scala.Double]("Double") {
+ def runtimeClass = java.lang.Double.TYPE
override def newArray(len: Int): Array[Double] = new Array[Double](len)
override def newWrappedArray(len: Int): WrappedArray[Double] = new WrappedArray.ofDouble(new Array[Double](len))
override def newArrayBuilder(): ArrayBuilder[Double] = new ArrayBuilder.ofDouble()
private def readResolve(): Any = Manifest.Double
}
- val Boolean: AnyValManifest[Boolean] = new AnyValManifest[scala.Boolean] {
- def erasure = java.lang.Boolean.TYPE
- override def toString = "Boolean"
+ val Boolean: AnyValManifest[Boolean] = new AnyValManifest[scala.Boolean]("Boolean") {
+ def runtimeClass = java.lang.Boolean.TYPE
override def newArray(len: Int): Array[Boolean] = new Array[Boolean](len)
override def newWrappedArray(len: Int): WrappedArray[Boolean] = new WrappedArray.ofBoolean(new Array[Boolean](len))
override def newArrayBuilder(): ArrayBuilder[Boolean] = new ArrayBuilder.ofBoolean()
private def readResolve(): Any = Manifest.Boolean
}
- val Unit: AnyValManifest[Unit] = new AnyValManifest[scala.Unit] {
- def erasure = java.lang.Void.TYPE
- override def toString = "Unit"
+ val Unit: AnyValManifest[Unit] = new AnyValManifest[scala.Unit]("Unit") {
+ def runtimeClass = java.lang.Void.TYPE
override def newArray(len: Int): Array[Unit] = new Array[Unit](len)
override def newWrappedArray(len: Int): WrappedArray[Unit] = new WrappedArray.ofUnit(new Array[Unit](len))
override def newArrayBuilder(): ArrayBuilder[Unit] = new ArrayBuilder.ofUnit()
private def readResolve(): Any = Manifest.Unit
}
- val Any: Manifest[Any] = new ClassTypeManifest[scala.Any](None, ObjectClass, Nil) {
- override def toString = "Any"
+ private val ObjectTYPE = classOf[java.lang.Object]
+ private val NothingTYPE = classOf[scala.runtime.Nothing$]
+ private val NullTYPE = classOf[scala.runtime.Null$]
+
+ val Any: Manifest[scala.Any] = new PhantomManifest[scala.Any](ObjectTYPE, "Any") {
+ override def newArray(len: Int) = new Array[scala.Any](len)
override def <:<(that: ClassManifest[_]): Boolean = (that eq this)
- override def equals(that: Any): Boolean = this eq that.asInstanceOf[AnyRef]
- override def hashCode = System.identityHashCode(this)
private def readResolve(): Any = Manifest.Any
}
- val Object: Manifest[Object] = new ClassTypeManifest[java.lang.Object](None, ObjectClass, Nil) {
- override def toString = "Object"
+ val Object: Manifest[java.lang.Object] = new PhantomManifest[java.lang.Object](ObjectTYPE, "Object") {
+ override def newArray(len: Int) = new Array[java.lang.Object](len)
override def <:<(that: ClassManifest[_]): Boolean = (that eq this) || (that eq Any)
- override def equals(that: Any): Boolean = this eq that.asInstanceOf[AnyRef]
- override def hashCode = System.identityHashCode(this)
private def readResolve(): Any = Manifest.Object
}
- val AnyVal: Manifest[AnyVal] = new ClassTypeManifest[scala.AnyVal](None, ObjectClass, Nil) {
- override def toString = "AnyVal"
+ val AnyRef: Manifest[scala.AnyRef] = Object.asInstanceOf[Manifest[scala.AnyRef]]
+
+ val AnyVal: Manifest[scala.AnyVal] = new PhantomManifest[scala.AnyVal](ObjectTYPE, "AnyVal") {
+ override def newArray(len: Int) = new Array[scala.AnyVal](len)
override def <:<(that: ClassManifest[_]): Boolean = (that eq this) || (that eq Any)
- override def equals(that: Any): Boolean = this eq that.asInstanceOf[AnyRef]
- override def hashCode = System.identityHashCode(this)
private def readResolve(): Any = Manifest.AnyVal
}
- val Null: Manifest[Null] = new ClassTypeManifest[scala.Null](None, ObjectClass, Nil) {
- override def toString = "Null"
+ val Null: Manifest[scala.Null] = new PhantomManifest[scala.Null](NullTYPE, "Null") {
+ override def newArray(len: Int) = new Array[scala.Null](len)
override def <:<(that: ClassManifest[_]): Boolean =
(that ne null) && (that ne Nothing) && !(that <:< AnyVal)
- override def equals(that: Any): Boolean = this eq that.asInstanceOf[AnyRef]
- override def hashCode = System.identityHashCode(this)
private def readResolve(): Any = Manifest.Null
}
- val Nothing: Manifest[Nothing] = new ClassTypeManifest[scala.Nothing](None, ObjectClass, Nil) {
- override def toString = "Nothing"
+ val Nothing: Manifest[scala.Nothing] = new PhantomManifest[scala.Nothing](NothingTYPE, "Nothing") {
+ override def newArray(len: Int) = new Array[scala.Nothing](len)
override def <:<(that: ClassManifest[_]): Boolean = (that ne null)
- override def equals(that: Any): Boolean = this eq that.asInstanceOf[AnyRef]
- override def hashCode = System.identityHashCode(this)
private def readResolve(): Any = Manifest.Nothing
}
private class SingletonTypeManifest[T <: AnyRef](value: AnyRef) extends Manifest[T] {
- lazy val erasure = value.getClass
+ lazy val runtimeClass = value.getClass
override lazy val toString = value.toString + ".type"
}
- /** Manifest for the singleton type `value.type'. */
+ /** Manifest for the singleton type `value.type`. */
def singleType[T <: AnyRef](value: AnyRef): Manifest[T] =
new SingletonTypeManifest[T](value)
- /** Manifest for the class type `clazz[args]', where `clazz' is
+ /** Manifest for the class type `clazz[args]`, where `clazz` is
* a top-level or static class.
* @note This no-prefix, no-arguments case is separate because we
* it's called from ScalaRunTime.boxArray itself. If we
@@ -218,21 +213,27 @@ object Manifest {
def classType[T](clazz: Predef.Class[_]): Manifest[T] =
new ClassTypeManifest[T](None, clazz, Nil)
- /** Manifest for the class type `clazz', where `clazz' is
+ /** Manifest for the class type `clazz`, where `clazz` is
* a top-level or static class and args are its type arguments. */
def classType[T](clazz: Predef.Class[T], arg1: Manifest[_], args: Manifest[_]*): Manifest[T] =
new ClassTypeManifest[T](None, clazz, arg1 :: args.toList)
- /** Manifest for the class type `clazz[args]', where `clazz' is
+ /** Manifest for the class type `clazz[args]`, where `clazz` is
* a class with non-package prefix type `prefix` and type arguments `args`.
*/
def classType[T](prefix: Manifest[_], clazz: Predef.Class[_], args: Manifest[_]*): Manifest[T] =
new ClassTypeManifest[T](Some(prefix), clazz, args.toList)
- /** Manifest for the class type `clazz[args]', where `clazz' is
+ private abstract class PhantomManifest[T](_runtimeClass: Predef.Class[_],
+ override val toString: String) extends ClassTypeManifest[T](None, _runtimeClass, Nil) {
+ override def equals(that: Any): Boolean = this eq that.asInstanceOf[AnyRef]
+ override val hashCode = System.identityHashCode(this)
+ }
+
+ /** Manifest for the class type `clazz[args]`, where `clazz` is
* a top-level or static class. */
private class ClassTypeManifest[T](prefix: Option[Manifest[_]],
- val erasure: Predef.Class[_],
+ val runtimeClass: Predef.Class[_],
override val typeArguments: List[Manifest[_]]) extends Manifest[T] {
override def toString =
(if (prefix.isEmpty) "" else prefix.get.toString+"#") +
@@ -243,21 +244,21 @@ object Manifest {
def arrayType[T](arg: Manifest[_]): Manifest[Array[T]] =
arg.asInstanceOf[Manifest[T]].arrayManifest
- /** Manifest for the abstract type `prefix # name'. `upperBound' is not
+ /** Manifest for the abstract type `prefix # name'. `upperBound` is not
* strictly necessary as it could be obtained by reflection. It was
* added so that erasure can be calculated without reflection. */
- def abstractType[T](prefix: Manifest[_], name: String, clazz: Predef.Class[_], args: Manifest[_]*): Manifest[T] =
+ def abstractType[T](prefix: Manifest[_], name: String, upperBound: Predef.Class[_], args: Manifest[_]*): Manifest[T] =
new Manifest[T] {
- def erasure = clazz
+ def runtimeClass = upperBound
override val typeArguments = args.toList
override def toString = prefix.toString+"#"+name+argString
}
- /** Manifest for the unknown type `_ >: L <: U' in an existential.
+ /** Manifest for the unknown type `_ >: L <: U` in an existential.
*/
def wildcardType[T](lowerBound: Manifest[_], upperBound: Manifest[_]): Manifest[T] =
new Manifest[T] {
- def erasure = upperBound.erasure
+ def runtimeClass = upperBound.erasure
override def toString =
"_" +
(if (lowerBound eq Nothing) "" else " >: "+lowerBound) +
@@ -267,7 +268,7 @@ object Manifest {
/** Manifest for the intersection type `parents_0 with ... with parents_n'. */
def intersectionType[T](parents: Manifest[_]*): Manifest[T] =
new Manifest[T] {
- def erasure = parents.head.erasure
+ def runtimeClass = parents.head.erasure
override def toString = parents.mkString(" with ")
}
}
diff --git a/src/library/scala/reflect/NameTransformer.scala b/src/library/scala/reflect/NameTransformer.scala
old mode 100644
new mode 100755
index 7bc5a61..384ebc6
--- a/src/library/scala/reflect/NameTransformer.scala
+++ b/src/library/scala/reflect/NameTransformer.scala
@@ -1,17 +1,24 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-package scala.reflect
+package scala
+package reflect
-/**
- * @author Martin Odersky
+/** Provides functions to encode and decode Scala symbolic names.
+ * Also provides some constants.
*/
object NameTransformer {
+ // XXX Short term: providing a way to alter these without having to recompile
+ // the compiler before recompiling the compiler.
+ val MODULE_SUFFIX_STRING = sys.props.getOrElse("SCALA_MODULE_SUFFIX_STRING", "$")
+ val NAME_JOIN_STRING = sys.props.getOrElse("SCALA_NAME_JOIN_STRING", "$")
+ val MODULE_INSTANCE_NAME = "MODULE$"
+
private val nops = 128
private val ncodes = 26 * 26
@@ -130,10 +137,10 @@ object NameTransformer {
unicode = true
} catch {
case _:NumberFormatException =>
- /* <code>hex</code> did not decode to a hexadecimal number, so
+ /* `hex` did not decode to a hexadecimal number, so
* do nothing. */
}
- }
+ }
}
}
/* If we didn't see an opcode or encoded Unicode glyph, and the
diff --git a/src/library/scala/reflect/NoManifest.scala b/src/library/scala/reflect/NoManifest.scala
index f791cd4..61bc5e2 100644
--- a/src/library/scala/reflect/NoManifest.scala
+++ b/src/library/scala/reflect/NoManifest.scala
@@ -1,17 +1,17 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2007-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2007-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-
-
package scala.reflect
-/** <p> One of the branches of an OptManifest
+/** One of the branches of an [[scala.reflect.OptManifest]].
*/
+// TODO undeprecated until Scala reflection becomes non-experimental
+// @deprecated("This notion doesn't have a corresponding concept in 2.10, because scala.reflect.runtime.universe.TypeTag can capture arbitrary types. Use type tags instead of manifests, and there will be no need in opt manifests.", "2.10.0")
object NoManifest extends OptManifest[Nothing] with Serializable {
override def toString = "<?>"
-}
+}
\ No newline at end of file
diff --git a/src/library/scala/reflect/OptManifest.scala b/src/library/scala/reflect/OptManifest.scala
index 8e64da3..5e373c7 100644
--- a/src/library/scala/reflect/OptManifest.scala
+++ b/src/library/scala/reflect/OptManifest.scala
@@ -1,21 +1,19 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2007-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2007-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-
-
package scala.reflect
-/** <p>
- * A <code>OptManifest[T]</code> is an optional <a href="Manifest.html"
- * target="ContentFrame"><code>Manifest</code></a>.<br/>
- * It is either a <code>Manifest</code> or the value <code>NoManifest</code>.
- * </p>
+/** A `OptManifest[T]` is an optional [[scala.reflect.Manifest]].
+ *
+ * It is either a `Manifest` or the value `NoManifest`.
*
* @author Martin Odersky
*/
-trait OptManifest[+T] extends Serializable
+// TODO undeprecated until Scala reflection becomes non-experimental
+// @deprecated("This notion doesn't have a corresponding concept in 2.10, because scala.reflect.runtime.universe.TypeTag can capture arbitrary types. Use type tags instead of manifests, and there will be no need in opt manifests.", "2.10.0")
+trait OptManifest[+T] extends Serializable
\ No newline at end of file
diff --git a/src/library/scala/reflect/Print.scala b/src/library/scala/reflect/Print.scala
deleted file mode 100644
index 2efc791..0000000
--- a/src/library/scala/reflect/Print.scala
+++ /dev/null
@@ -1,113 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-
-package scala.reflect
-
-/** This type is required by the compiler and <b>should not be used in client code</b>. */
-object Print extends Function1[Any, String] {
-
- def apply(any: Any): String = any match {
- case x: Code[_] =>
- apply(x.tree)
- case x: Tree =>
- apply(x)
- case x: Symbol =>
- apply(x)
- case x: Type =>
- apply(x)
- case _ =>
- "UnknownAny"
- }
-
- def apply(tree: Tree): String = tree match {
- case reflect.Ident(sym) =>
- Print(sym)
- case reflect.Select(qual, sym) =>
- Print(qual) + "." + Print(sym)
- case reflect.Literal(value) => value match {
- case s:String => "\"" + s + "\""
- case _ => value.toString
- }
- case reflect.Apply(fun, args) =>
- Print(fun) + args.map(Print).mkString("(", ", ", ")")
- case reflect.TypeApply(fun, args) =>
- Print(fun) + args.map(Print).mkString("[", ", ", "]")
- case reflect.Function(params, body) =>
- params.map(Print).mkString("(", ", ", ")") + " => " + Print(body)
- case reflect.This(sym) =>
- Print(sym)+".this"
- case reflect.Block(stats, expr) =>
- (stats ::: List(expr)).map(Print).mkString("{\n", ";\n", "\n}")
- case reflect.New(tpt) =>
- "new " + Print(tpt)
- case reflect.If(condition, trueCase, falseCase) =>
- "if (" + Print(condition) + ") " + Print(trueCase) + " else " + Print(falseCase)
- case reflect.Assign(destination: Tree, source: Tree) =>
- Print(destination) + " = " + Print(source)
- case reflect.Target(sym, body) =>
- "target " + Print(sym) + " {\n" + Print(body) + "\n}"
- case reflect.Goto(target) =>
- "goto " + Print(target)
- case _ =>
- "???"
- }
-
- def apply(symbol: Symbol): String = symbol match {
- case reflect.Class(name) =>
- name.substring(name.lastIndexOf('.') + 1)
- case reflect.Method(name, datatype) =>
- name.substring(name.lastIndexOf('.') +1)
- case reflect.Field(name, datatype) =>
- name.substring(name.lastIndexOf('.') + 1)
- case reflect.TypeField(name, datatype) =>
- name.substring(name.lastIndexOf('.') + 1)
- case reflect.LocalValue(owner, name, datatype) =>
- name.substring(name.lastIndexOf('.') + 1)
- case reflect.LocalMethod(owner, name, datatype) =>
- name.substring(name.lastIndexOf('.') + 1)
- case reflect.NoSymbol =>
- "NoSymbol"
- case reflect.RootSymbol =>
- "RootSymbol"
- case reflect.LabelSymbol(name) =>
- name
- case _ =>
- "???"
- }
-
- def apply(datatype: Type): String = datatype match {
- case reflect.NoPrefix =>
- "NoPrefix"
- case reflect.NoType =>
- "NoType"
- case reflect.NamedType(name) =>
- "(named: " + name + ")"
- case reflect.PrefixedType(prefix, symbol) =>
- "(" + Print(prefix) + "." + Print(symbol) + ")"
- case reflect.SingleType(prefix, symbol) =>
- "(" + Print(prefix) + "." + Print(symbol) + ")"
- case reflect.ThisType(clazz) =>
- "(" + Print(clazz) + ".this.type)"
- case reflect.AppliedType(datatype, args) =>
- Print(datatype) + args.map(Print).mkString("[", ", ", "]")
- case reflect.TypeBounds(lo, hi) =>
- "[" + Print(lo) + " ... " + Print(hi) + "]"
- case reflect.MethodType(formals, resultType) =>
- formals.map(Print).mkString("(", ", ", ")") + " => " + Print(resultType)
- case reflect.NullaryMethodType(resultType) =>
- " => " + Print(resultType)
- case reflect.PolyType(typeParams, typeBounds, resultType) =>
- val z = (typeParams, typeBounds).zipped map ((tp, tb) => "[" + Print(tb._1) + " :> " + Print(tp) + " :> " + Print(tb._2) + "]")
- z.mkString("[", ", ", "]") + " -> " + Print(resultType)
- case _ =>
- "???"
- }
-
-}
diff --git a/src/library/scala/reflect/ScalaBeanInfo.scala b/src/library/scala/reflect/ScalaBeanInfo.scala
deleted file mode 100644
index bcb76c3..0000000
--- a/src/library/scala/reflect/ScalaBeanInfo.scala
+++ /dev/null
@@ -1,46 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-package scala.reflect
-
-/** Provides some simple runtime processing necessary to create
- * JavaBean descriptors for Scala entities. The compiler creates
- * subclasses of this class automatically when the BeanInfo annotation is
- * attached to a class.
- *
- * @author Ross Judson (rjudson at managedobjects.com)
- */
-abstract class ScalaBeanInfo(clazz: java.lang.Class[_],
- props: Array[String],
- methods: Array[String]) extends java.beans.SimpleBeanInfo {
-
- import java.beans._
-
- private val pd = new Array[PropertyDescriptor](props.length / 3)
- private val md =
- for (m <- clazz.getMethods if methods.exists(_ == m.getName))
- yield new MethodDescriptor(m)
-
- init
-
- override def getPropertyDescriptors() = pd
- override def getMethodDescriptors() = md
-
- // override def getAdditionalBeanInfo() = Array(Introspector getBeanInfo clazz.getSuperclass)
-
- private def init() {
- var i = 0;
- while (i < props.length) {
- pd(i/3) = new PropertyDescriptor(props(i), clazz, props(i+1), props(i+2))
- i = i + 3;
- }
- }
-
-}
-
diff --git a/src/library/scala/reflect/ScalaLongSignature.java b/src/library/scala/reflect/ScalaLongSignature.java
index 1ffd6d2..5b6d78f 100644
--- a/src/library/scala/reflect/ScalaLongSignature.java
+++ b/src/library/scala/reflect/ScalaLongSignature.java
@@ -5,7 +5,6 @@ import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
-/** */
@Retention(RetentionPolicy.RUNTIME)
@Target(ElementType.TYPE)
public @interface ScalaLongSignature {
diff --git a/src/library/scala/reflect/ScalaSignature.java b/src/library/scala/reflect/ScalaSignature.java
index d1cdbc0..a8af554 100644
--- a/src/library/scala/reflect/ScalaSignature.java
+++ b/src/library/scala/reflect/ScalaSignature.java
@@ -5,7 +5,6 @@ import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
-/** */
@Retention(RetentionPolicy.RUNTIME)
@Target(ElementType.TYPE)
public @interface ScalaSignature {
diff --git a/src/library/scala/reflect/Symbol.scala b/src/library/scala/reflect/Symbol.scala
deleted file mode 100644
index b2f8cd9..0000000
--- a/src/library/scala/reflect/Symbol.scala
+++ /dev/null
@@ -1,85 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-
-package scala.reflect
-
-/** This type is required by the compiler and <b>should not be used in client code</b>. */
-abstract class Symbol {
- val owner: Symbol
- val name: String
- val tpe: Type
-}
-
-/** This type is required by the compiler and <b>should not be used in client code</b>. */
-abstract class GlobalSymbol(val fullname: String) extends Symbol {
- private val pointIndex = fullname.lastIndexOf(".")
- val owner: Symbol =
- if (pointIndex < 0) RootSymbol
- else Class(fullname.substring(0, pointIndex))
- val name: String =
- if (pointIndex < 0) fullname
- else fullname.substring(pointIndex+1, fullname.length())
-}
-
-/** This type is required by the compiler and <b>should not be used in client code</b>. */
-abstract class LocalSymbol extends Symbol
-
-/** This type is required by the compiler and <b>should not be used in client code</b>. */
-case class Class(override val fullname: String) extends GlobalSymbol(fullname) {
- val tpe = NamedType(fullname)
-}
-
-/** This type is required by the compiler and <b>should not be used in client code</b>. */
-case class Method(override val fullname: String, tpe: Type) extends GlobalSymbol(fullname)
-
-/** This type is required by the compiler and <b>should not be used in client code</b>. */
-case class Field(override val fullname: String, tpe: Type) extends GlobalSymbol(fullname)
-
-/** This type is required by the compiler and <b>should not be used in client code</b>. */
-case class TypeField(override val fullname: String, tpe: Type) extends GlobalSymbol(fullname)
-
-/** This type is required by the compiler and <b>should not be used in client code</b>. */case class LocalValue(owner: Symbol, name: String, tpe: Type) extends LocalSymbol
-
-/** This type is required by the compiler and <b>should not be used in client code</b>. */
-case class LocalMethod(owner: Symbol, name: String, tpe: Type) extends LocalSymbol
-
-/** This type is required by the compiler and <b>should not be used in client code</b>. */
-case object NoSymbol extends Symbol {
- val owner = null
- val name = null
- val tpe = NoType
-}
-
-/** This type is required by the compiler and <b>should not be used in client code</b>. */
-case object RootSymbol extends Symbol {
- val owner = NoSymbol
- val name = "<root>"
- val tpe = NoPrefix
-}
-
-/** This type is required by the compiler and <b>should not be used in client code</b>. */
-case class LabelSymbol(val name: String) extends Symbol {
- val owner = NoSymbol
- val tpe = NamedType("scala.Unit")
-}
-
-
-/* Standard pattern match:
-
- case reflect.Class(fullname) =>
- case reflect.Method(fullname, tpe) =>
- case reflect.Field(fullname, tpe) =>
- case reflect.TypeField(fullname, tpe) =>
- case reflect.LocalValue(owner, name, tpe) =>
- case reflect.LocalMethod(owner, name, tpe) =>
- case reflect.NoSymbol =>
- case reflect.RootSymbol =>
- case reflect.LabelSymbol(name) =>
-*/
diff --git a/src/library/scala/reflect/Tree.scala b/src/library/scala/reflect/Tree.scala
deleted file mode 100644
index 4f44211..0000000
--- a/src/library/scala/reflect/Tree.scala
+++ /dev/null
@@ -1,52 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-package scala.reflect
-
-/** This type is required by the compiler and <b>should not be used in client code</b>. */
-abstract class Tree
-
-/** This type is required by the compiler and <b>should not be used in client code</b>. */
-case class Ident(sym: Symbol) extends Tree
-/** This type is required by the compiler and <b>should not be used in client code</b>. */
-case class Select(qual: Tree, sym: Symbol) extends Tree
-/** This type is required by the compiler and <b>should not be used in client code</b>. */
-case class Literal(value: Any) extends Tree
-/** This type is required by the compiler and <b>should not be used in client code</b>. */
-case class Apply(fun: Tree, args: List[Tree]) extends Tree
-/** This type is required by the compiler and <b>should not be used in client code</b>. */
-case class TypeApply(fun: Tree, args: List[Type]) extends Tree
-/** This type is required by the compiler and <b>should not be used in client code</b>. */
-case class Function(params: List[Symbol], body: Tree) extends Tree
-/** This type is required by the compiler and <b>should not be used in client code</b>. */
-case class This(sym: Symbol) extends Tree
-/** This type is required by the compiler and <b>should not be used in client code</b>. */
-case class Block(stats: List[Tree], expr: Tree) extends Tree
-/** This type is required by the compiler and <b>should not be used in client code</b>. */
-case class New(sym: Tree) extends Tree
-/** This type is required by the compiler and <b>should not be used in client code</b>. */
-case class If(condition: Tree, trueCase: Tree, falseCase: Tree) extends Tree
-/** This type is required by the compiler and <b>should not be used in client code</b>. */
-case class Assign(destination: Tree, source: Tree) extends Tree
-/** This type is required by the compiler and <b>should not be used in client code</b>. */
-case class Target(sym: LabelSymbol, body: Tree) extends Tree
-/** This type is required by the compiler and <b>should not be used in client code</b>. */
-case class Goto(target: LabelSymbol) extends Tree
-/** This type is required by the compiler and <b>should not be used in client code</b>. */
-case class ValDef(sym: Symbol, rhs: Tree) extends Tree
-
-//Monomorphic
-/** This type is required by the compiler and <b>should not be used in client code</b>. */
-case class ClassDef(sym: Symbol, tpe: Type, impl: Template) extends Tree
-/** This type is required by the compiler and <b>should not be used in client code</b>. */
-case class DefDef(sym: Symbol, vparamss: List[List[Tree]], ret: Type, rhs: Tree) extends Tree
-/** This type is required by the compiler and <b>should not be used in client code</b>. */
-case class Super(psym: Symbol) extends Tree
-/** This type is required by the compiler and <b>should not be used in client code</b>. */
-case class Template(parents: List[Type], body: List[Tree]) extends Tree
diff --git a/src/library/scala/reflect/Type.scala b/src/library/scala/reflect/Type.scala
deleted file mode 100644
index 5aa92b3..0000000
--- a/src/library/scala/reflect/Type.scala
+++ /dev/null
@@ -1,67 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-package scala.reflect
-
-/** This type is required by the compiler and <b>should not be used in client code</b>. */
-abstract class Type
-
-/** This type is required by the compiler and <b>should not be used in client code</b>. */
-case object NoPrefix extends Type
-/** This type is required by the compiler and <b>should not be used in client code</b>. */
-case object NoType extends Type
-
-/** This type is required by the compiler and <b>should not be used in client code</b>.
- * fullname */
-case class NamedType(fullname: String) extends Type
-
-/** This type is required by the compiler and <b>should not be used in client code</b>.
- * pre # sym */
-case class PrefixedType(pre: Type, sym: Symbol) extends Type
-
-/** This type is required by the compiler and <b>should not be used in client code</b>.
- * pre.type # sym == pre.sym */
-case class SingleType(pre: Type, sym: Symbol) extends Type
-
-/** This type is required by the compiler and <b>should not be used in client code</b>.
- * clazz.this */
-case class ThisType(clazz: Symbol) extends Type
-
-/** This type is required by the compiler and <b>should not be used in client code</b>.
- * clazz.super[superClazz]
- * <code>tpe[args1, ..., argsn]</code> */
-case class AppliedType(tpe: Type, args: List[Type]) extends Type
-
-/** This type is required by the compiler and <b>should not be used in client code</b>.
- * [a <: lo >: hi] */
-case class TypeBounds(lo: Type, hi: Type) extends Type
-
-/** This type is required by the compiler and <b>should not be used in client code</b>.
- * <code>(formals1 ... formalsn) restpe</code> */
-case class MethodType(formals: List[Symbol], restpe: Type) extends Type
-
-/** This type is required by the compiler and <b>should not be used in client code</b>. */
-case class NullaryMethodType(resultType: Type) extends Type
-
-/** This type is required by the compiler and <b>should not be used in client code</b>. */
-case class PolyType(typeParams: List[Symbol], typeBounds: List[(Type, Type)], resultType: Type) extends Type
-
-/* Standard pattern match:
-
- case reflect.NoPrefix =>
- case reflect.NoType =>
- case reflect.NamedType(fullname) =>
- case reflect.PrefixedType(pre, sym) =>
- case reflect.SingleType(pre, sym) =>
- case reflect.ThisType(clazz) =>
- case reflect.AppliedType(tpe, args) =>
- case reflect.TypeBounds(lo, hi) =>
- case reflect.MethodType(formals, restpe) =>
- case reflect.NullaryMethodType(restpe) =>
- case reflect.PolyType(typeParams, typeBounds, resultType) =>
-*/
diff --git a/src/library/scala/reflect/generic/AnnotationInfos.scala b/src/library/scala/reflect/generic/AnnotationInfos.scala
deleted file mode 100644
index f995a2b..0000000
--- a/src/library/scala/reflect/generic/AnnotationInfos.scala
+++ /dev/null
@@ -1,42 +0,0 @@
-package scala.reflect
-package generic
-
- at deprecated("scala.reflect.generic will be removed", "2.9.1") trait AnnotationInfos { self: Universe =>
-
- type AnnotationInfo <: AnyRef
- val AnnotationInfo: AnnotationInfoExtractor
-
- abstract class AnnotationInfoExtractor {
- def apply(atp: Type, args: List[Tree], assocs: List[(Name, ClassfileAnnotArg)]): AnnotationInfo
- def unapply(info: AnnotationInfo): Option[(Type, List[Tree], List[(Name, ClassfileAnnotArg)])]
- }
-
- type ClassfileAnnotArg <: AnyRef
- implicit def classfileAnnotArgManifest: ClassManifest[ClassfileAnnotArg] // need a precise manifest to pass to UnPickle's toArray call
-
- type LiteralAnnotArg <: ClassfileAnnotArg
- val LiteralAnnotArg: LiteralAnnotArgExtractor
-
- type ArrayAnnotArg <: ClassfileAnnotArg
- val ArrayAnnotArg: ArrayAnnotArgExtractor
-
- type NestedAnnotArg <: ClassfileAnnotArg
- val NestedAnnotArg: NestedAnnotArgExtractor
-
- abstract class LiteralAnnotArgExtractor {
- def apply(const: Constant): LiteralAnnotArg
- def unapply(arg: LiteralAnnotArg): Option[Constant]
- }
-
- abstract class ArrayAnnotArgExtractor {
- def apply(const: Array[ClassfileAnnotArg]): ArrayAnnotArg
- def unapply(arg: ArrayAnnotArg): Option[Array[ClassfileAnnotArg]]
- }
-
- abstract class NestedAnnotArgExtractor {
- def apply(anninfo: AnnotationInfo): NestedAnnotArg
- def unapply(arg: NestedAnnotArg): Option[AnnotationInfo]
- }
-}
-
-
diff --git a/src/library/scala/reflect/generic/ByteCodecs.scala b/src/library/scala/reflect/generic/ByteCodecs.scala
deleted file mode 100644
index 8993e06..0000000
--- a/src/library/scala/reflect/generic/ByteCodecs.scala
+++ /dev/null
@@ -1,216 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2007-2011, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-package scala.reflect.generic
-
-object ByteCodecs {
-
- def avoidZero(src: Array[Byte]): Array[Byte] = {
- var i = 0
- val srclen = src.length
- var count = 0
- while (i < srclen) {
- if (src(i) == 0x7f) count += 1
- i += 1
- }
- val dst = new Array[Byte](srclen + count)
- i = 0
- var j = 0
- while (i < srclen) {
- val in = src(i)
- if (in == 0x7f) {
- dst(j) = (0xc0).toByte
- dst(j + 1) = (0x80).toByte
- j += 2
- } else {
- dst(j) = (in + 1).toByte
- j += 1
- }
- i += 1
- }
- dst
- }
-
- def regenerateZero(src: Array[Byte]): Int = {
- var i = 0
- val srclen = src.length
- var j = 0
- while (i < srclen) {
- val in: Int = src(i) & 0xff
- if (in == 0xc0 && (src(i + 1) & 0xff) == 0x80) {
- src(j) = 0x7f
- i += 2
- } else {
- src(j) = (in - 1).toByte
- i += 1
- }
- j += 1
- }
- j
- }
-
- def encode8to7(src: Array[Byte]): Array[Byte] = {
- val srclen = src.length
- val dstlen = (srclen * 8 + 6) / 7
- val dst = new Array[Byte](dstlen)
- var i = 0
- var j = 0
- while (i + 6 < srclen) {
- var in: Int = src(i) & 0xff
- dst(j) = (in & 0x7f).toByte
- var out: Int = in >>> 7
- in = src(i + 1) & 0xff
- dst(j + 1) = (out | (in << 1) & 0x7f).toByte
- out = in >>> 6
- in = src(i + 2) & 0xff
- dst(j + 2) = (out | (in << 2) & 0x7f).toByte
- out = in >>> 5
- in = src(i + 3) & 0xff
- dst(j + 3) = (out | (in << 3) & 0x7f).toByte
- out = in >>> 4
- in = src(i + 4) & 0xff
- dst(j + 4) = (out | (in << 4) & 0x7f).toByte
- out = in >>> 3
- in = src(i + 5) & 0xff
- dst(j + 5) = (out | (in << 5) & 0x7f).toByte
- out = in >>> 2
- in = src(i + 6) & 0xff
- dst(j + 6) = (out | (in << 6) & 0x7f).toByte
- out = in >>> 1
- dst(j + 7) = out.toByte
- i += 7
- j += 8
- }
- if (i < srclen) {
- var in: Int = src(i) & 0xff
- dst(j) = (in & 0x7f).toByte; j += 1
- var out: Int = in >>> 7
- if (i + 1 < srclen) {
- in = src(i + 1) & 0xff
- dst(j) = (out | (in << 1) & 0x7f).toByte; j += 1
- out = in >>> 6
- if (i + 2 < srclen) {
- in = src(i + 2) & 0xff
- dst(j) = (out | (in << 2) & 0x7f).toByte; j += 1
- out = in >>> 5
- if (i + 3 < srclen) {
- in = src(i + 3) & 0xff
- dst(j) = (out | (in << 3) & 0x7f).toByte; j += 1
- out = in >>> 4
- if (i + 4 < srclen) {
- in = src(i + 4) & 0xff
- dst(j) = (out | (in << 4) & 0x7f).toByte; j += 1
- out = in >>> 3
- if (i + 5 < srclen) {
- in = src(i + 5) & 0xff
- dst(j) = (out | (in << 5) & 0x7f).toByte; j += 1
- out = in >>> 2
- }
- }
- }
- }
- }
- if (j < dstlen) dst(j) = out.toByte
- }
- dst
- }
-
- @deprecated("use 2-argument version instead", "2.8.0")
- def decode7to8(src: Array[Byte], srclen: Int, dstlen: Int) { decode7to8(src, srclen) }
-
- def decode7to8(src: Array[Byte], srclen: Int): Int = {
- var i = 0
- var j = 0
- val dstlen = (srclen * 7 + 7) / 8
- while (i + 7 < srclen) {
- var out: Int = src(i)
- var in: Byte = src(i + 1)
- src(j) = (out | (in & 0x01) << 7).toByte
- out = in >>> 1
- in = src(i + 2)
- src(j + 1) = (out | (in & 0x03) << 6).toByte
- out = in >>> 2
- in = src(i + 3)
- src(j + 2) = (out | (in & 0x07) << 5).toByte
- out = in >>> 3
- in = src(i + 4)
- src(j + 3) = (out | (in & 0x0f) << 4).toByte
- out = in >>> 4
- in = src(i + 5)
- src(j + 4) = (out | (in & 0x1f) << 3).toByte
- out = in >>> 5
- in = src(i + 6)
- src(j + 5) = (out | (in & 0x3f) << 2).toByte
- out = in >>> 6
- in = src(i + 7)
- src(j + 6) = (out | in << 1).toByte
- i += 8
- j += 7
- }
- if (i < srclen) {
- var out: Int = src(i)
- if (i + 1 < srclen) {
- var in: Byte = src(i + 1)
- src(j) = (out | (in & 0x01) << 7).toByte; j += 1
- out = in >>> 1
- if (i + 2 < srclen) {
- in = src(i + 2)
- src(j) = (out | (in & 0x03) << 6).toByte; j += 1
- out = in >>> 2
- if (i + 3 < srclen) {
- in = src(i + 3)
- src(j) = (out | (in & 0x07) << 5).toByte; j += 1
- out = in >>> 3
- if (i + 4 < srclen) {
- in = src(i + 4)
- src(j) = (out | (in & 0x0f) << 4).toByte; j += 1
- out = in >>> 4
- if (i + 5 < srclen) {
- in = src(i + 5)
- src(j) = (out | (in & 0x1f) << 3).toByte; j += 1
- out = in >>> 5
- if (i + 6 < srclen) {
- in = src(i + 6)
- src(j) = (out | (in & 0x3f) << 2).toByte; j += 1
- out = in >>> 6
- }
- }
- }
- }
- }
- }
- if (j < dstlen) src(j) = out.toByte
- }
- dstlen
- }
-
- def encode(xs: Array[Byte]): Array[Byte] = avoidZero(encode8to7(xs))
-
- @deprecated("use 1-argument version instead", "2.8.0")
- def decode(xs: Array[Byte], dstlen: Int) { decode(xs) }
-
- /**
- * Destructively decodes array xs and returns the length of the decoded array.
- *
- * Sometimes returns (length+1) of the decoded array. Example:
- *
- * scala> val enc = reflect.generic.ByteCodecs.encode(Array(1,2,3))
- * enc: Array[Byte] = Array(2, 5, 13, 1)
- *
- * scala> reflect.generic.ByteCodecs.decode(enc)
- * res43: Int = 4
- *
- * scala> enc
- * res44: Array[Byte] = Array(1, 2, 3, 0)
- *
- * However, this does not always happen.
- */
- def decode(xs: Array[Byte]): Int = {
- val len = regenerateZero(xs)
- decode7to8(xs, len)
- }
-}
diff --git a/src/library/scala/reflect/generic/Constants.scala b/src/library/scala/reflect/generic/Constants.scala
deleted file mode 100644
index ca04fda..0000000
--- a/src/library/scala/reflect/generic/Constants.scala
+++ /dev/null
@@ -1,238 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
- * @author Martin Odersky
- */
-
-package scala.reflect
-package generic
-
-import java.lang.Integer.toOctalString
-import annotation.switch
-
- at deprecated("scala.reflect.generic will be removed", "2.9.1") trait Constants {
- self: Universe =>
-
- import definitions._
-
- final val NoTag = 0
- final val UnitTag = 1
- final val BooleanTag = 2
- final val ByteTag = 3
- final val ShortTag = 4
- final val CharTag = 5
- final val IntTag = 6
- final val LongTag = 7
- final val FloatTag = 8
- final val DoubleTag = 9
- final val StringTag = 10
- final val NullTag = 11
- final val ClassTag = 12
- // For supporting java enumerations inside java annotations (see ClassfileParser)
- final val EnumTag = 13
-
- case class Constant(value: Any) {
- val tag: Int = value match {
- case null => NullTag
- case x: Unit => UnitTag
- case x: Boolean => BooleanTag
- case x: Byte => ByteTag
- case x: Short => ShortTag
- case x: Int => IntTag
- case x: Long => LongTag
- case x: Float => FloatTag
- case x: Double => DoubleTag
- case x: String => StringTag
- case x: Char => CharTag
- case x: AbsType => ClassTag
- case x: AbsSymbol => EnumTag
- case _ => throw new Error("bad constant value: " + value)
- }
-
- def isByteRange: Boolean = isIntRange && Byte.MinValue <= intValue && intValue <= Byte.MaxValue
- def isShortRange: Boolean = isIntRange && Short.MinValue <= intValue && intValue <= Short.MaxValue
- def isCharRange: Boolean = isIntRange && Char.MinValue <= intValue && intValue <= Char.MaxValue
- def isIntRange: Boolean = ByteTag <= tag && tag <= IntTag
- def isLongRange: Boolean = ByteTag <= tag && tag <= LongTag
- def isFloatRange: Boolean = ByteTag <= tag && tag <= FloatTag
- def isNumeric: Boolean = ByteTag <= tag && tag <= DoubleTag
-
- def tpe: Type = tag match {
- case UnitTag => UnitClass.tpe
- case BooleanTag => BooleanClass.tpe
- case ByteTag => ByteClass.tpe
- case ShortTag => ShortClass.tpe
- case CharTag => CharClass.tpe
- case IntTag => IntClass.tpe
- case LongTag => LongClass.tpe
- case FloatTag => FloatClass.tpe
- case DoubleTag => DoubleClass.tpe
- case StringTag => StringClass.tpe
- case NullTag => NullClass.tpe
- case ClassTag => ClassType(value.asInstanceOf[Type])
- case EnumTag =>
- // given (in java): "class A { enum E { VAL1 } }"
- // - symbolValue: the symbol of the actual enumeration value (VAL1)
- // - .owner: the ModuleClasSymbol of the enumeration (object E)
- // - .linkedClassOfClass: the ClassSymbol of the enumeration (class E)
- symbolValue.owner.linkedClassOfClass.tpe
- }
-
- /** We need the equals method to take account of tags as well as values.
- */
- override def equals(other: Any): Boolean = other match {
- case that: Constant =>
- this.tag == that.tag &&
- (this.value == that.value || this.isNaN && that.isNaN)
- case _ => false
- }
-
- def isNaN = value match {
- case f: Float => f.isNaN
- case d: Double => d.isNaN
- case _ => false
- }
-
- def booleanValue: Boolean =
- if (tag == BooleanTag) value.asInstanceOf[Boolean]
- else throw new Error("value " + value + " is not a boolean");
-
- def byteValue: Byte = tag match {
- case ByteTag => value.asInstanceOf[Byte]
- case ShortTag => value.asInstanceOf[Short].toByte
- case CharTag => value.asInstanceOf[Char].toByte
- case IntTag => value.asInstanceOf[Int].toByte
- case LongTag => value.asInstanceOf[Long].toByte
- case FloatTag => value.asInstanceOf[Float].toByte
- case DoubleTag => value.asInstanceOf[Double].toByte
- case _ => throw new Error("value " + value + " is not a Byte")
- }
-
- def shortValue: Short = tag match {
- case ByteTag => value.asInstanceOf[Byte].toShort
- case ShortTag => value.asInstanceOf[Short]
- case CharTag => value.asInstanceOf[Char].toShort
- case IntTag => value.asInstanceOf[Int].toShort
- case LongTag => value.asInstanceOf[Long].toShort
- case FloatTag => value.asInstanceOf[Float].toShort
- case DoubleTag => value.asInstanceOf[Double].toShort
- case _ => throw new Error("value " + value + " is not a Short")
- }
-
- def charValue: Char = tag match {
- case ByteTag => value.asInstanceOf[Byte].toChar
- case ShortTag => value.asInstanceOf[Short].toChar
- case CharTag => value.asInstanceOf[Char]
- case IntTag => value.asInstanceOf[Int].toChar
- case LongTag => value.asInstanceOf[Long].toChar
- case FloatTag => value.asInstanceOf[Float].toChar
- case DoubleTag => value.asInstanceOf[Double].toChar
- case _ => throw new Error("value " + value + " is not a Char")
- }
-
- def intValue: Int = tag match {
- case ByteTag => value.asInstanceOf[Byte].toInt
- case ShortTag => value.asInstanceOf[Short].toInt
- case CharTag => value.asInstanceOf[Char].toInt
- case IntTag => value.asInstanceOf[Int]
- case LongTag => value.asInstanceOf[Long].toInt
- case FloatTag => value.asInstanceOf[Float].toInt
- case DoubleTag => value.asInstanceOf[Double].toInt
- case _ => throw new Error("value " + value + " is not an Int")
- }
-
- def longValue: Long = tag match {
- case ByteTag => value.asInstanceOf[Byte].toLong
- case ShortTag => value.asInstanceOf[Short].toLong
- case CharTag => value.asInstanceOf[Char].toLong
- case IntTag => value.asInstanceOf[Int].toLong
- case LongTag => value.asInstanceOf[Long]
- case FloatTag => value.asInstanceOf[Float].toLong
- case DoubleTag => value.asInstanceOf[Double].toLong
- case _ => throw new Error("value " + value + " is not a Long")
- }
-
- def floatValue: Float = tag match {
- case ByteTag => value.asInstanceOf[Byte].toFloat
- case ShortTag => value.asInstanceOf[Short].toFloat
- case CharTag => value.asInstanceOf[Char].toFloat
- case IntTag => value.asInstanceOf[Int].toFloat
- case LongTag => value.asInstanceOf[Long].toFloat
- case FloatTag => value.asInstanceOf[Float]
- case DoubleTag => value.asInstanceOf[Double].toFloat
- case _ => throw new Error("value " + value + " is not a Float")
- }
-
- def doubleValue: Double = tag match {
- case ByteTag => value.asInstanceOf[Byte].toDouble
- case ShortTag => value.asInstanceOf[Short].toDouble
- case CharTag => value.asInstanceOf[Char].toDouble
- case IntTag => value.asInstanceOf[Int].toDouble
- case LongTag => value.asInstanceOf[Long].toDouble
- case FloatTag => value.asInstanceOf[Float].toDouble
- case DoubleTag => value.asInstanceOf[Double]
- case _ => throw new Error("value " + value + " is not a Double")
- }
-
- /** Convert constant value to conform to given type.
- */
- def convertTo(pt: Type): Constant = {
- val target = pt.typeSymbol
- if (target == tpe.typeSymbol)
- this
- else if (target == ByteClass && isByteRange)
- Constant(byteValue)
- else if (target == ShortClass && isShortRange)
- Constant(shortValue)
- else if (target == CharClass && isCharRange)
- Constant(charValue)
- else if (target == IntClass && isIntRange)
- Constant(intValue)
- else if (target == LongClass && isLongRange)
- Constant(longValue)
- else if (target == FloatClass && isFloatRange)
- Constant(floatValue)
- else if (target == DoubleClass && isNumeric)
- Constant(doubleValue)
- else
- null
- }
-
- def stringValue: String =
- if (value == null) "null"
- else if (tag == ClassTag) signature(typeValue)
- else value.toString()
-
- @switch def escapedChar(ch: Char): String = ch match {
- case '\b' => "\\b"
- case '\t' => "\\t"
- case '\n' => "\\n"
- case '\f' => "\\f"
- case '\r' => "\\r"
- case '"' => "\\\""
- case '\'' => "\\\'"
- case '\\' => "\\\\"
- case _ => String.valueOf(ch)
- }
-
- def escapedStringValue: String = {
- def escape(text: String): String = {
- text map { ch =>
- if (ch.isControl) "\\0" + toOctalString(ch)
- else escapedChar(ch)
- } mkString ""
- }
- tag match {
- case NullTag => "null"
- case StringTag => "\"" + escape(stringValue) + "\""
- case ClassTag => "classOf[" + signature(typeValue) + "]"
- case CharTag => escape("'" + escapedChar(charValue) + "'")
- case LongTag => longValue.toString() + "L"
- case _ => String.valueOf(value)
- }
- }
- def typeValue: Type = value.asInstanceOf[Type]
- def symbolValue: Symbol = value.asInstanceOf[Symbol]
-
- override def hashCode: Int = value.## * 41 + 17
- }
-}
diff --git a/src/library/scala/reflect/generic/Flags.scala b/src/library/scala/reflect/generic/Flags.scala
deleted file mode 100644
index 81e6fbd..0000000
--- a/src/library/scala/reflect/generic/Flags.scala
+++ /dev/null
@@ -1,264 +0,0 @@
-package scala.reflect
-package generic
-
-/** Flags set on Modifiers instances in the parsing stage.
- */
- at deprecated("scala.reflect.generic will be removed", "2.9.1") class ModifierFlags {
- final val IMPLICIT = 0x00000200
- final val FINAL = 0x00000020
- final val PRIVATE = 0x00000004
- final val PROTECTED = 0x00000001
-
- final val SEALED = 0x00000400
- final val OVERRIDE = 0x00000002
- final val CASE = 0x00000800
- final val ABSTRACT = 0x00000008 // abstract class, or used in conjunction with abstract override.
- // Note difference to DEFERRED!
- final val DEFERRED = 0x00000010 // was `abstract' for members | trait is virtual
- final val INTERFACE = 0x00000080 // symbol is an interface (i.e. a trait which defines only abstract methods)
- final val MUTABLE = 0x00001000 // symbol is a mutable variable.
- final val PARAM = 0x00002000 // symbol is a (value or type) parameter to a method
-
- final val COVARIANT = 0x00010000 // symbol is a covariant type variable
- final val BYNAMEPARAM = 0x00010000 // parameter is by name
- final val CONTRAVARIANT = 0x00020000 // symbol is a contravariant type variable
- final val ABSOVERRIDE = 0x00040000 // combination of abstract & override
- final val LOCAL = 0x00080000 // symbol is local to current class (i.e. private[this] or protected[this]
- // pre: PRIVATE or PROTECTED are also set
- final val JAVA = 0x00100000 // symbol was defined by a Java class
- final val STATIC = 0x00800000 // static field, method or class
- final val CASEACCESSOR = 0x01000000 // symbol is a case parameter (or its accessor)
- final val TRAIT = 0x02000000 // symbol is a trait
- final val DEFAULTPARAM = 0x02000000 // the parameter has a default value
- final val PARAMACCESSOR = 0x20000000 // for field definitions generated for primary constructor
- // parameters (no matter if it's a 'val' parameter or not)
- // for parameters of a primary constructor ('val' or not)
- // for the accessor methods generated for 'val' or 'var' parameters
- final val LAZY = 0x80000000L // symbol is a lazy val. can't have MUTABLE unless transformed by typer
- final val PRESUPER = 0x2000000000L // value is evaluated before super call
- final val DEFAULTINIT = 0x20000000000L// symbol is initialized to the default value: used by -Xcheckinit
-
- // Overridden.
- def flagToString(flag: Long): String = ""
-}
-object ModifierFlags extends ModifierFlags
-
- at deprecated("scala.reflect.generic will be removed", "2.9.1") class Flags extends ModifierFlags {
- final val METHOD = 0x00000040 // a method
- final val MODULE = 0x00000100 // symbol is module or class implementing a module
- final val PACKAGE = 0x00004000 // symbol is a java package
-
- final val CAPTURED = 0x00010000 // variable is accessed from nested function. Set by LambdaLift.
- final val LABEL = 0x00020000 // method symbol is a label. Set by TailCall
- final val INCONSTRUCTOR = 0x00020000 // class symbol is defined in this/superclass constructor.
- final val SYNTHETIC = 0x00200000 // symbol is compiler-generated
- final val STABLE = 0x00400000 // functions that are assumed to be stable
- // (typically, access methods for valdefs)
- // or classes that do not contain abstract types.
- final val BRIDGE = 0x04000000 // function is a bridge method. Set by Erasure
- final val ACCESSOR = 0x08000000 // a value or variable accessor (getter or setter)
-
- final val SUPERACCESSOR = 0x10000000 // a super accessor
- final val MODULEVAR = 0x40000000 // for variables: is the variable caching a module value
-
- final val IS_ERROR = 0x100000000L // symbol is an error symbol
- final val OVERLOADED = 0x200000000L // symbol is overloaded
- final val LIFTED = 0x400000000L // class has been lifted out to package level
- // local value has been lifted out to class level
- // todo: make LIFTED = latePRIVATE?
- final val MIXEDIN = 0x800000000L // term member has been mixed in
- final val EXISTENTIAL = 0x800000000L // type is an existential parameter or skolem
- final val EXPANDEDNAME = 0x1000000000L // name has been expanded with class suffix
- final val IMPLCLASS = 0x2000000000L // symbol is an implementation class
- final val TRANS_FLAG = 0x4000000000L // transient flag guaranteed to be reset after each phase.
-
- final val LOCKED = 0x8000000000L // temporary flag to catch cyclic dependencies
- final val SPECIALIZED = 0x10000000000L// symbol is a generated specialized member
- final val VBRIDGE = 0x40000000000L// symbol is a varargs bridge
-
- final val VARARGS = 0x80000000000L// symbol is a Java-style varargs method
- final val TRIEDCOOKING = 0x100000000000L // ``Cooking'' has been tried on this symbol
- // A Java method's type is ``cooked'' by transforming raw types to existentials
-
- // pickling and unpickling of flags
-
- // The flags from 0x001 to 0x800 are different in the raw flags
- // and in the pickled format.
-
- private final val IMPLICIT_PKL = (1 << 0)
- private final val FINAL_PKL = (1 << 1)
- private final val PRIVATE_PKL = (1 << 2)
- private final val PROTECTED_PKL = (1 << 3)
- private final val SEALED_PKL = (1 << 4)
- private final val OVERRIDE_PKL = (1 << 5)
- private final val CASE_PKL = (1 << 6)
- private final val ABSTRACT_PKL = (1 << 7)
- private final val DEFERRED_PKL = (1 << 8)
- private final val METHOD_PKL = (1 << 9)
- private final val MODULE_PKL = (1 << 10)
- private final val INTERFACE_PKL = (1 << 11)
-
- private final val PKL_MASK = 0x00000FFF
-
- final val PickledFlags: Long = 0xFFFFFFFFL
-
- private def rawPickledCorrespondence = Array(
- (IMPLICIT, IMPLICIT_PKL),
- (FINAL, FINAL_PKL),
- (PRIVATE, PRIVATE_PKL),
- (PROTECTED, PROTECTED_PKL),
- (SEALED, SEALED_PKL),
- (OVERRIDE, OVERRIDE_PKL),
- (CASE, CASE_PKL),
- (ABSTRACT, ABSTRACT_PKL),
- (DEFERRED, DEFERRED_PKL),
- (METHOD, METHOD_PKL),
- (MODULE, MODULE_PKL),
- (INTERFACE, INTERFACE_PKL)
- )
- private val rawFlags: Array[Int] = rawPickledCorrespondence map (_._1)
- private val pickledFlags: Array[Int] = rawPickledCorrespondence map (_._2)
-
- // unused in 2.9.1: left to satisfy mima complaint about missing f$1
- private def mkCorrespondenceArray(correspondence: List[(Int, Int)]) = {
- def f(flags: Int): Int = {
- correspondence.foldLeft(0) {
- case (result, (oldFlag, newFlag)) =>
- if ((flags & oldFlag) != 0) result | newFlag
- else result
- }
- }
- 0 to PKL_MASK map f toArray
- }
-
- private def r2p(flags: Int): Int = {
- var result = 0
- var i = 0
- while (i < rawFlags.length) {
- if ((flags & rawFlags(i)) != 0)
- result |= pickledFlags(i)
-
- i += 1
- }
- result
- }
- private def p2r(flags: Int): Int = {
- var result = 0
- var i = 0
- while (i < rawFlags.length) {
- if ((flags & pickledFlags(i)) != 0)
- result |= rawFlags(i)
-
- i += 1
- }
- result
- }
-
- // Generated by mkFlagToStringMethod() at Mon Oct 11 10:07:29 PDT 2010
- @annotation.switch override def flagToString(flag: Long): String = flag match {
- case PROTECTED => "protected" // (1L << 0)
- case OVERRIDE => "override" // (1L << 1)
- case PRIVATE => "private" // (1L << 2)
- case ABSTRACT => "abstract" // (1L << 3)
- case DEFERRED => "<deferred>" // (1L << 4)
- case FINAL => "final" // (1L << 5)
- case METHOD => "<method>" // (1L << 6)
- case INTERFACE => "<interface>" // (1L << 7)
- case MODULE => "<module>" // (1L << 8)
- case IMPLICIT => "implicit" // (1L << 9)
- case SEALED => "sealed" // (1L << 10)
- case CASE => "case" // (1L << 11)
- case MUTABLE => "<mutable>" // (1L << 12)
- case PARAM => "<param>" // (1L << 13)
- case PACKAGE => "<package>" // (1L << 14)
- case 0x8000L => "" // (1L << 15)
- case BYNAMEPARAM => "<bynameparam/captured/covariant>" // (1L << 16)
- case CONTRAVARIANT => "<contravariant/inconstructor/label>" // (1L << 17)
- case ABSOVERRIDE => "absoverride" // (1L << 18)
- case LOCAL => "<local>" // (1L << 19)
- case JAVA => "<java>" // (1L << 20)
- case SYNTHETIC => "<synthetic>" // (1L << 21)
- case STABLE => "<stable>" // (1L << 22)
- case STATIC => "<static>" // (1L << 23)
- case CASEACCESSOR => "<caseaccessor>" // (1L << 24)
- case DEFAULTPARAM => "<defaultparam/trait>" // (1L << 25)
- case BRIDGE => "<bridge>" // (1L << 26)
- case ACCESSOR => "<accessor>" // (1L << 27)
- case SUPERACCESSOR => "<superaccessor>" // (1L << 28)
- case PARAMACCESSOR => "<paramaccessor>" // (1L << 29)
- case MODULEVAR => "<modulevar>" // (1L << 30)
- case LAZY => "lazy" // (1L << 31)
- case IS_ERROR => "<is_error>" // (1L << 32)
- case OVERLOADED => "<overloaded>" // (1L << 33)
- case LIFTED => "<lifted>" // (1L << 34)
- case EXISTENTIAL => "<existential/mixedin>" // (1L << 35)
- case EXPANDEDNAME => "<expandedname>" // (1L << 36)
- case IMPLCLASS => "<implclass/presuper>" // (1L << 37)
- case TRANS_FLAG => "<trans_flag>" // (1L << 38)
- case LOCKED => "<locked>" // (1L << 39)
- case SPECIALIZED => "<specialized>" // (1L << 40)
- case DEFAULTINIT => "<defaultinit>" // (1L << 41)
- case VBRIDGE => "<vbridge>" // (1L << 42)
- case 0x80000000000L => "" // (1L << 43)
- case 0x100000000000L => "" // (1L << 44)
- case 0x200000000000L => "" // (1L << 45)
- case 0x400000000000L => "" // (1L << 46)
- case 0x800000000000L => "" // (1L << 47)
- case 0x1000000000000L => "" // (1L << 48)
- case 0x2000000000000L => "" // (1L << 49)
- case 0x4000000000000L => "" // (1L << 50)
- case 0x8000000000000L => "" // (1L << 51)
- case 0x10000000000000L => "" // (1L << 52)
- case 0x20000000000000L => "" // (1L << 53)
- case 0x40000000000000L => "" // (1L << 54)
- case 0x80000000000000L => "" // (1L << 55)
- case 0x100000000000000L => "" // (1L << 56)
- case 0x200000000000000L => "" // (1L << 57)
- case 0x400000000000000L => "" // (1L << 58)
- case 0x800000000000000L => "" // (1L << 59)
- case 0x1000000000000000L => "" // (1L << 60)
- case 0x2000000000000000L => "" // (1L << 61)
- case 0x4000000000000000L => "" // (1L << 62)
- case 0x8000000000000000L => "" // (1L << 63)
- case _ => ""
- }
- def flagsToString(flags: Long, privateWithin: String): String = {
- var f = flags
- val pw =
- if (privateWithin == "") {
- if ((flags & (PRIVATE | LOCAL)) == (PRIVATE | LOCAL).toLong) {
- f = f & ~(PRIVATE | LOCAL)
- "private[this]"
- } else if ((flags & (PROTECTED | LOCAL)) == (PROTECTED | LOCAL).toLong) {
- f = f & ~(PROTECTED | LOCAL)
- "protected[this]"
- } else {
- ""
- }
- } else if ((f & PROTECTED) != 0L) {
- f = f & ~PROTECTED
- "protected[" + privateWithin + "]"
- } else {
- "private[" + privateWithin + "]"
- }
- List(flagsToString(f), pw) filterNot (_ == "") mkString " "
- }
- def flagsToString(flags: Long): String =
- pickledListOrder map (mask => flagToString(flags & mask)) filterNot (_ == "") mkString " "
-
- def rawFlagsToPickled(flags: Long): Long =
- (flags & ~PKL_MASK) | r2p(flags.toInt & PKL_MASK)
-
- def pickledToRawFlags(pflags: Long): Long =
- (pflags & ~PKL_MASK) | p2r(pflags.toInt & PKL_MASK)
-
- // List of the raw flags, in pickled order
- protected val pickledListOrder: List[Long] = {
- val all = 0 to 62 map (1L << _)
- val front = rawFlags map (_.toLong)
-
- front.toList ++ (all filterNot (front contains _))
- }
-}
-
-object Flags extends Flags
diff --git a/src/library/scala/reflect/generic/HasFlags.scala b/src/library/scala/reflect/generic/HasFlags.scala
deleted file mode 100644
index 3d9d121..0000000
--- a/src/library/scala/reflect/generic/HasFlags.scala
+++ /dev/null
@@ -1,231 +0,0 @@
-package scala.reflect
-package generic
-
-/** ISSUE #1: Flag names vs. Test method names
- *
- * The following methods from Symbol have a name of
- * the form isFoo where FOO is the name of a flag, but where the method
- * body tests for more than whether the flag is set.
- *
- * There are two possibilities with such methods. Either the extra
- * tests are strictly to partition among overloaded flags (which is
- * the case we can live with in the short term, if each such flag's
- * partitioning assumptions are documented) or they aren't.
- *
- * The second case implies that "x hasFlag FOO" and "x.isFoo" have
- * different semantics, and this we can't live with, because even if
- * we're smart enough to avoid being tripped up by that, the next guy isn't.
- *
- * No extreme measures necessary, only renaming isFoo to something
- * which hews more closely to its implementation. (Or renaming the flag.)
- *
- // Defined in the compiler Symbol
- //
- final def isLabel = isMethod && !hasAccessorFlag && hasFlag(LABEL)
- final def isLocal: Boolean = owner.isTerm
- final def isModuleVar: Boolean = isVariable && hasFlag(MODULEVAR)
- final def isStable =
- isTerm &&
- !hasTraitFlag &&
- (!hasFlag(METHOD | BYNAMEPARAM) || hasFlag(STABLE)) &&
- !(tpe.isVolatile && !hasAnnotation(uncheckedStableClass))
- final def isStatic: Boolean =
- hasFlag(STATIC) || isRoot || owner.isStaticOwner
- override final def isTrait: Boolean =
- isClass && hasFlag(TRAIT | notDEFERRED) // A virtual class becomes a trait (part of DEVIRTUALIZE)
-
- // Defined in the library Symbol
- //
- def isTrait: Boolean = isClass && hasFlag(TRAIT) // refined later for virtual classes.
- final def isContravariant = isType && hasFlag(CONTRAVARIANT)
- final def isCovariant = isType && hasFlag(COVARIANT)
- final def isMethod = isTerm && hasFlag(METHOD)
- final def isModule = isTerm && hasFlag(MODULE)
- final def isPackage = isModule && hasFlag(PACKAGE)
- *
- */
-
-/** ISSUE #2: Implicit flag relationships must be made explicit.
- *
- * For instance, every time the MODULE flag is set, the FINAL flag is
- * set along with it:
- *
- .setFlag(FINAL | MODULE | PACKAGE | JAVA)
- .setFlag(FINAL | MODULE | PACKAGE | JAVA).setInfo(rootLoader)
- new ModuleSymbol(this, pos, name).setFlag(MODULE | FINAL)
- new ModuleSymbol(this, pos, name).setFlag(MODULE | FINAL)
- val m = new ModuleSymbol(this, pos, name).setFlag(MODULE | FINAL)
- setFlag(module.getFlag(ModuleToClassFlags) | MODULE | FINAL)
- sourceModule.flags = MODULE | FINAL
-
- * However the same is not true of when the MODULE flag is cleared:
-
- sym.resetFlag(MODULE)
- .setFlag(sym.flags | STABLE).resetFlag(MODULE)
- sym.resetFlag(MODULE | FINAL | CASE)
-
- * It's not relevant whether this example poses any issues: we must
- * not tolerate these uncertainties. If the flags are to move together
- * then both setting and clearing have to be encapsulated. If there
- * is a useful and used distinction between the various permutations
- * of on and off, then it must be documented. It's the only way!
- */
-
-import Flags._
-
-/** Common code utilized by Modifiers (which carry the flags associated
- * with Trees) and Symbol.
- */
- at deprecated("scala.reflect.generic will be removed", "2.9.1") trait HasFlags {
- type FlagsType
- type AccessBoundaryType
- type AnnotationType
-
- /** Though both Symbol and Modifiers widen this method to public, it's
- * defined protected here to give us the option in the future to route
- * flag methods through accessors and disallow raw flag manipulation.
- * And after that, perhaps, on some magical day: a typesafe enumeration.
- */
- protected def flags: FlagsType
-
- /** The printable representation of this entity's flags and access boundary,
- * restricted to flags in the given mask.
- */
- def hasFlagsToString(mask: FlagsType): String
-
- /** Access level encoding: there are three scala flags (PRIVATE, PROTECTED,
- * and LOCAL) which combine with value privateWithin (the "foo" in private[foo])
- * to define from where an entity can be accessed. The meanings are as follows:
- *
- * PRIVATE access restricted to class only.
- * PROTECTED access restricted to class and subclasses only.
- * LOCAL can only be set in conjunction with PRIVATE or PROTECTED.
- * Further restricts access to the same object instance.
- *
- * In addition, privateWithin can be used to set a visibility barrier.
- * When set, everything contained in the named enclosing package or class
- * has access. It is incompatible with PRIVATE or LOCAL, but is additive
- * with PROTECTED (i.e. if either the flags or privateWithin allow access,
- * then it is allowed.)
- *
- * The java access levels translate as follows:
- *
- * java private: hasFlag(PRIVATE) && !hasAccessBoundary
- * java package: !hasFlag(PRIVATE | PROTECTED) && (privateWithin == enclosing package)
- * java protected: hasFlag(PROTECTED) && (privateWithin == enclosing package)
- * java public: !hasFlag(PRIVATE | PROTECTED) && !hasAccessBoundary
- */
- def privateWithin: AccessBoundaryType
-
- /** A list of annotations attached to this entity.
- */
- def annotations: List[AnnotationType]
-
- /** Whether this entity has a "privateWithin" visibility barrier attached.
- */
- def hasAccessBoundary: Boolean
-
- /** Whether this entity has ANY of the flags in the given mask.
- */
- def hasFlag(flag: Long): Boolean
-
- /** Whether this entity has ALL of the flags in the given mask.
- */
- def hasAllFlags(mask: Long): Boolean
-
- /** Whether this entity has NONE of the flags in the given mask.
- */
- def hasNoFlags(mask: Long): Boolean = !hasFlag(mask)
-
- // Tests which come through cleanly: both Symbol and Modifiers use these
- // identically, testing for a single flag.
- def isCase = hasFlag(CASE )
- def isFinal = hasFlag(FINAL )
- def isImplicit = hasFlag(IMPLICIT )
- def isLazy = hasFlag(LAZY )
- def isMutable = hasFlag(MUTABLE ) // in Modifiers, formerly isVariable
- def isOverride = hasFlag(OVERRIDE )
- def isPrivate = hasFlag(PRIVATE )
- def isProtected = hasFlag(PROTECTED)
- def isSynthetic = hasFlag(SYNTHETIC)
- def isInterface = hasFlag(INTERFACE)
-
- // Newly introduced based on having a reasonably obvious clean translation.
- def isPrivateLocal = hasAllFlags(PRIVATE | LOCAL)
- def isProtectedLocal = hasAllFlags(PROTECTED | LOCAL)
- def isParamAccessor = hasFlag(PARAMACCESSOR)
- def isCaseAccessor = hasFlag(CASEACCESSOR)
- def isSuperAccessor = hasFlag(SUPERACCESSOR)
- def isLifted = hasFlag(LIFTED)
-
- // Formerly the Modifiers impl did not include the access boundary check,
- // which must have been a bug.
- def isPublic = hasNoFlags(PRIVATE | PROTECTED) && !hasAccessBoundary
-
- // Renamed the Modifiers impl from isArgument.
- def isParameter = hasFlag(PARAM)
-
- // Removed isClass qualification since the flag isn't overloaded and
- // sym.isClass is enforced in Namers#validate.
- def isSealed = hasFlag(SEALED)
-
- // Removed !isClass qualification since the flag isn't overloaded.
- def isDeferred = hasFlag(DEFERRED )
-
- // Dropped isTerm condition because flag isn't overloaded.
- def isAbstractOverride = hasFlag(ABSOVERRIDE)
-
- def isDefaultInit = hasFlag(DEFAULTINIT)
-
- // Disambiguating: DEFAULTPARAM, TRAIT
- def hasDefault = hasAllFlags(DEFAULTPARAM | PARAM)
- def isTrait = hasFlag(TRAIT) && !hasFlag(PARAM)
- def hasTraitFlag = hasFlag(TRAIT)
- def hasDefaultFlag = hasFlag(DEFAULTPARAM)
-
- // Straightforwardly named accessors already being used differently.
- // These names are most likely temporary.
- def hasAbstractFlag = hasFlag(ABSTRACT)
- def hasAccessorFlag = hasFlag(ACCESSOR)
- def hasLocalFlag = hasFlag(LOCAL)
- def hasModuleFlag = hasFlag(MODULE)
- def hasPackageFlag = hasFlag(PACKAGE)
- def hasPreSuperFlag = hasFlag(PRESUPER)
- def hasStableFlag = hasFlag(STABLE)
- def hasStaticFlag = hasFlag(STATIC)
-
- // Disambiguating: BYNAMEPARAM, CAPTURED, COVARIANT.
- def isByNameParam = hasAllFlags(BYNAMEPARAM | PARAM)
- // Nope, these aren't going to fly:
- // def isCapturedVariable = hasAllFlags(CAPTURED | MUTABLE)
- // def isCovariant = hasFlag(COVARIANT) && hasNoFlags(PARAM | MUTABLE)
-
- // Disambiguating: LABEL, CONTRAVARIANT, INCONSTRUCTOR
- def isLabel = hasAllFlags(LABEL | METHOD) && !hasAccessorFlag
- // Cannot effectively disambiguate the others at this level.
- def hasContravariantFlag = hasFlag(CONTRAVARIANT)
- def hasInConstructorFlag = hasFlag(INCONSTRUCTOR)
-
- // Name
- def isJavaDefined = hasFlag(JAVA)
-
- // Keeping some potentially ambiguous names around so as not to break
- // the rest of the world
- @deprecated("", "2.9.0")
- def isAbstract = hasFlag(ABSTRACT)
- // Problematic:
- // ABSTRACT and DEFERRED too easy to confuse, and
- // ABSTRACT + OVERRIDE ==> ABSOVERRIDE adds to it.
- //
- // final def isAbstractClass = isClass && hasFlag(ABSTRACT)
- // def isAbstractType = false // to be overridden
-
- // Question:
- // Which name? All other flags are isFlag so it's probably a mistake to
- // vary from that, but isAccessor does sound like it includes the other
- // *ACCESSOR flags. Perhaps something like isSimpleAccessor.
- //
- // def isAccessor = hasFlag(ACCESSOR )
- // final def isGetterOrSetter = hasAccessorFlag
-}
-
diff --git a/src/library/scala/reflect/generic/Names.scala b/src/library/scala/reflect/generic/Names.scala
deleted file mode 100644
index 1906a99..0000000
--- a/src/library/scala/reflect/generic/Names.scala
+++ /dev/null
@@ -1,24 +0,0 @@
-package scala.reflect
-package generic
-
- at deprecated("scala.reflect.generic will be removed", "2.9.1") trait Names {
- type Name >: Null <: AnyRef
- type TypeName <: Name
- type TermName <: Name
-
- def newTermName(cs: Array[Char], offset: Int, len: Int): TermName
- def newTermName(cs: Array[Byte], offset: Int, len: Int): TermName
- def newTermName(s: String): TermName
- def mkTermName(name: Name): TermName
-
- def newTypeName(cs: Array[Char], offset: Int, len: Int): TypeName
- def newTypeName(cs: Array[Byte], offset: Int, len: Int): TypeName
- def newTypeName(s: String): TypeName
- def mkTypeName(name: Name): TypeName
-
- def isTermName(name: Name): Boolean
- def isTypeName(name: Name): Boolean
-
- implicit def promoteTermNamesAsNecessary(name: Name): TermName = mkTermName(name)
-}
-
diff --git a/src/library/scala/reflect/generic/PickleBuffer.scala b/src/library/scala/reflect/generic/PickleBuffer.scala
deleted file mode 100644
index f52a248..0000000
--- a/src/library/scala/reflect/generic/PickleBuffer.scala
+++ /dev/null
@@ -1,187 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
- * @author Martin Odersky
- */
-
-package scala.reflect
-package generic
-
-/** Variable length byte arrays, with methods for basic pickling and unpickling.
- *
- * @param data The initial buffer
- * @param from The first index where defined data are found
- * @param to The first index where new data can be written
- */
- at deprecated("scala.reflect.generic will be removed", "2.9.1") class PickleBuffer(data: Array[Byte], from: Int, to: Int) {
-
- var bytes = data
- var readIndex = from
- var writeIndex = to
-
- /** Double bytes array */
- private def dble() {
- val bytes1 = new Array[Byte](bytes.length * 2)
- Array.copy(bytes, 0, bytes1, 0, writeIndex)
- bytes = bytes1
- }
-
- def ensureCapacity(capacity: Int) =
- while (bytes.length < writeIndex + capacity) dble()
-
- // -- Basic output routines --------------------------------------------
-
- /** Write a byte of data */
- def writeByte(b: Int) {
- if (writeIndex == bytes.length) dble()
- bytes(writeIndex) = b.toByte
- writeIndex += 1
- }
-
- /** Write a natural number in big endian format, base 128.
- * All but the last digits have bit 0x80 set.
- */
- def writeNat(x: Int) =
- writeLongNat(x.toLong & 0x00000000FFFFFFFFL)
-
- /**
- * Like writeNat, but for longs. This is not the same as
- * writeLong, which writes in base 256. Note that the
- * binary representation of LongNat is identical to Nat
- * if the long value is in the range Int.MIN_VALUE to
- * Int.MAX_VALUE.
- */
- def writeLongNat(x: Long) {
- def writeNatPrefix(x: Long) {
- val y = x >>> 7
- if (y != 0L) writeNatPrefix(y)
- writeByte(((x & 0x7f) | 0x80).toInt)
- }
- val y = x >>> 7
- if (y != 0L) writeNatPrefix(y)
- writeByte((x & 0x7f).toInt)
- }
-
- /** Write a natural number <code>x</code> at position <code>pos</code>.
- * If number is more than one byte, shift rest of array to make space.
- *
- * @param pos ...
- * @param x ...
- */
- def patchNat(pos: Int, x: Int) {
- def patchNatPrefix(x: Int) {
- writeByte(0)
- Array.copy(bytes, pos, bytes, pos+1, writeIndex - (pos+1))
- bytes(pos) = ((x & 0x7f) | 0x80).toByte
- val y = x >>> 7
- if (y != 0) patchNatPrefix(y)
- }
- bytes(pos) = (x & 0x7f).toByte
- val y = x >>> 7
- if (y != 0) patchNatPrefix(y)
- }
-
- /** Write a long number <code>x</code> in signed big endian format, base 256.
- *
- * @param x The long number to be written.
- */
- def writeLong(x: Long) {
- val y = x >> 8
- val z = x & 0xff
- if (-y != (z >> 7)) writeLong(y)
- writeByte(z.toInt)
- }
-
- // -- Basic input routines --------------------------------------------
-
- /** Peek at the current byte without moving the read index */
- def peekByte(): Int = bytes(readIndex)
-
- /** Read a byte */
- def readByte(): Int = {
- val x = bytes(readIndex); readIndex += 1; x
- }
-
- /** Read a natural number in big endian format, base 128.
- * All but the last digits have bit 0x80 set.*/
- def readNat(): Int = readLongNat().toInt
-
- def readLongNat(): Long = {
- var b = 0L
- var x = 0L
- do {
- b = readByte()
- x = (x << 7) + (b & 0x7f)
- } while ((b & 0x80) != 0L);
- x
- }
-
- /** Read a long number in signed big endian format, base 256. */
- def readLong(len: Int): Long = {
- var x = 0L
- var i = 0
- while (i < len) {
- x = (x << 8) + (readByte() & 0xff)
- i += 1
- }
- val leading = 64 - (len << 3)
- x << leading >> leading
- }
-
- /** Returns the buffer as a sequence of (Int, Array[Byte]) representing
- * (tag, data) of the individual entries. Saves and restores buffer state.
- */
-
- def toIndexedSeq: IndexedSeq[(Int, Array[Byte])] = {
- val saved = readIndex
- readIndex = 0
- readNat() ; readNat() // discarding version
- val result = new Array[(Int, Array[Byte])](readNat())
-
- result.indices foreach { index =>
- val tag = readNat()
- val len = readNat()
- val bytes = data.slice(readIndex, len + readIndex)
- readIndex += len
-
- result(index) = tag -> bytes
- }
-
- readIndex = saved
- result.toIndexedSeq
- }
-
- /** Perform operation <code>op</code> until the condition
- * <code>readIndex == end</code> is satisfied.
- * Concatenate results into a list.
- *
- * @param end ...
- * @param op ...
- * @return ...
- */
- def until[T](end: Int, op: () => T): List[T] =
- if (readIndex == end) List() else op() :: until(end, op);
-
- /** Perform operation <code>op</code> the number of
- * times specified. Concatenate the results into a list.
- */
- def times[T](n: Int, op: ()=>T): List[T] =
- if (n == 0) List() else op() :: times(n-1, op)
-
- /** Pickle = majorVersion_Nat minorVersion_Nat nbEntries_Nat {Entry}
- * Entry = type_Nat length_Nat [actual entries]
- *
- * Assumes that the ..Version_Nat are already consumed.
- *
- * @return an array mapping entry numbers to locations in
- * the byte array where the entries start.
- */
- def createIndex: Array[Int] = {
- val index = new Array[Int](readNat()) // nbEntries_Nat
- for (i <- 0 until index.length) {
- index(i) = readIndex
- readByte() // skip type_Nat
- readIndex = readNat() + readIndex // read length_Nat, jump to next entry
- }
- index
- }
-}
diff --git a/src/library/scala/reflect/generic/PickleFormat.scala b/src/library/scala/reflect/generic/PickleFormat.scala
deleted file mode 100644
index c6308e7..0000000
--- a/src/library/scala/reflect/generic/PickleFormat.scala
+++ /dev/null
@@ -1,224 +0,0 @@
-package scala.reflect
-package generic
-
-/** This object provides constants for pickling attributes.
- *
- * If you extend the format, be sure to increase the
- * version minor number.
- *
- * @author Martin Odersky
- * @version 1.0
- */
- at deprecated("scala.reflect.generic will be removed", "2.9.1") object PickleFormat {
-
-/***************************************************
- * Symbol table attribute format:
- * Symtab = nentries_Nat {Entry}
- * Entry = 1 TERMNAME len_Nat NameInfo
- * | 2 TYPENAME len_Nat NameInfo
- * | 3 NONEsym len_Nat
- * | 4 TYPEsym len_Nat SymbolInfo
- * | 5 ALIASsym len_Nat SymbolInfo
- * | 6 CLASSsym len_Nat SymbolInfo [thistype_Ref]
- * | 7 MODULEsym len_Nat SymbolInfo
- * | 8 VALsym len_Nat [defaultGetter_Ref /* no longer needed*/] SymbolInfo [alias_Ref]
- * | 9 EXTref len_Nat name_Ref [owner_Ref]
- * | 10 EXTMODCLASSref len_Nat name_Ref [owner_Ref]
- * | 11 NOtpe len_Nat
- * | 12 NOPREFIXtpe len_Nat
- * | 13 THIStpe len_Nat sym_Ref
- * | 14 SINGLEtpe len_Nat type_Ref sym_Ref
- * | 15 CONSTANTtpe len_Nat constant_Ref
- * | 16 TYPEREFtpe len_Nat type_Ref sym_Ref {targ_Ref}
- * | 17 TYPEBOUNDStpe len_Nat tpe_Ref tpe_Ref
- * | 18 REFINEDtpe len_Nat classsym_Ref {tpe_Ref}
- * | 19 CLASSINFOtpe len_Nat classsym_Ref {tpe_Ref}
- * | 20 METHODtpe len_Nat tpe_Ref {sym_Ref}
- * | 21 POLYTtpe len_Nat tpe_Ref {sym_Ref}
- * | 22 IMPLICITMETHODtpe len_Nat tpe_Ref {sym_Ref} /* no longer needed */
- * | 52 SUPERtpe len_Nat tpe_Ref tpe_Ref
- * | 24 LITERALunit len_Nat
- * | 25 LITERALboolean len_Nat value_Long
- * | 26 LITERALbyte len_Nat value_Long
- * | 27 LITERALshort len_Nat value_Long
- * | 28 LITERALchar len_Nat value_Long
- * | 29 LITERALint len_Nat value_Long
- * | 30 LITERALlong len_Nat value_Long
- * | 31 LITERALfloat len_Nat value_Long
- * | 32 LITERALdouble len_Nat value_Long
- * | 33 LITERALstring len_Nat name_Ref
- * | 34 LITERALnull len_Nat
- * | 35 LITERALclass len_Nat tpe_Ref
- * | 36 LITERALenum len_Nat sym_Ref
- * | 40 SYMANNOT len_Nat sym_Ref AnnotInfoBody
- * | 41 CHILDREN len_Nat sym_Ref {sym_Ref}
- * | 42 ANNOTATEDtpe len_Nat [sym_Ref /* no longer needed */] tpe_Ref {annotinfo_Ref}
- * | 43 ANNOTINFO len_Nat AnnotInfoBody
- * | 44 ANNOTARGARRAY len_Nat {constAnnotArg_Ref}
- * | 47 DEBRUIJNINDEXtpe len_Nat level_Nat index_Nat
- * | 48 EXISTENTIALtpe len_Nat type_Ref {symbol_Ref}
- * | 49 TREE len_Nat 1 EMPTYtree
- * | 49 TREE len_Nat 2 PACKAGEtree type_Ref sym_Ref mods_Ref name_Ref {tree_Ref}
- * | 49 TREE len_Nat 3 CLASStree type_Ref sym_Ref mods_Ref name_Ref tree_Ref {tree_Ref}
- * | 49 TREE len_Nat 4 MODULEtree type_Ref sym_Ref mods_Ref name_Ref tree_Ref
- * | 49 TREE len_Nat 5 VALDEFtree type_Ref sym_Ref mods_Ref name_Ref tree_Ref tree_Ref
- * | 49 TREE len_Nat 6 DEFDEFtree type_Ref sym_Ref mods_Ref name_Ref numtparams_Nat {tree_Ref} numparamss_Nat {numparams_Nat {tree_Ref}} tree_Ref tree_Ref
- * | 49 TREE len_Nat 7 TYPEDEFtree type_Ref sym_Ref mods_Ref name_Ref tree_Ref {tree_Ref}
- * | 49 TREE len_Nat 8 LABELtree type_Ref sym_Ref tree_Ref {tree_Ref}
- * | 49 TREE len_Nat 9 IMPORTtree type_Ref sym_Ref tree_Ref {name_Ref name_Ref}
- * | 49 TREE len_Nat 11 DOCDEFtree type_Ref sym_Ref string_Ref tree_Ref
- * | 49 TREE len_Nat 12 TEMPLATEtree type_Ref sym_Ref numparents_Nat {tree_Ref} tree_Ref {tree_Ref}
- * | 49 TREE len_Nat 13 BLOCKtree type_Ref tree_Ref {tree_Ref}
- * | 49 TREE len_Nat 14 CASEtree type_Ref tree_Ref tree_Ref tree_Ref
- * | 49 TREE len_Nat 15 SEQUENCEtree type_Ref {tree_Ref}
- * | 49 TREE len_Nat 16 ALTERNATIVEtree type_Ref {tree_Ref}
- * | 49 TREE len_Nat 17 STARtree type_Ref {tree_Ref}
- * | 49 TREE len_Nat 18 BINDtree type_Ref sym_Ref name_Ref tree_Ref
- * | 49 TREE len_Nat 19 UNAPPLYtree type_Ref tree_Ref {tree_Ref}
- * | 49 TREE len_Nat 20 ARRAYVALUEtree type_Ref tree_Ref {tree_Ref}
- * | 49 TREE len_Nat 21 FUNCTIONtree type_Ref sym_Ref tree_Ref {tree_Ref}
- * | 49 TREE len_Nat 22 ASSIGNtree type_Ref tree_Ref tree_Ref
- * | 49 TREE len_Nat 23 IFtree type_Ref tree_Ref tree_Ref tree_Ref
- * | 49 TREE len_Nat 24 MATCHtree type_Ref tree_Ref {tree_Ref}
- * | 49 TREE len_Nat 25 RETURNtree type_Ref sym_Ref tree_Ref
- * | 49 TREE len_Nat 26 TREtree type_Ref tree_Ref tree_Ref {tree_Ref}
- * | 49 TREE len_Nat 27 THROWtree type_Ref tree_Ref
- * | 49 TREE len_Nat 28 NEWtree type_Ref tree_Ref
- * | 49 TREE len_Nat 29 TYPEDtree type_Ref tree_Ref tree_Ref
- * | 49 TREE len_Nat 30 TYPEAPPLYtree type_Ref tree_Ref {tree_Ref}
- * | 49 TREE len_Nat 31 APPLYtree type_Ref tree_Ref {tree_Ref}
- * | 49 TREE len_Nat 32 APPLYDYNAMICtree type_Ref sym_Ref tree_Ref {tree_Ref}
- * | 49 TREE len_Nat 33 SUPERtree type_Ref sym_Ref tree_Ref name_Ref
- * | 49 TREE len_Nat 34 THIStree type_Ref sym_Ref name_Ref
- * | 49 TREE len_Nat 35 SELECTtree type_Ref sym_Ref tree_Ref name_Ref
- * | 49 TREE len_Nat 36 IDENTtree type_Ref sym_Ref name_Ref
- * | 49 TREE len_Nat 37 LITERALtree type_Ref constant_Ref
- * | 49 TREE len_Nat 38 TYPEtree type_Ref
- * | 49 TREE len_Nat 39 ANNOTATEDtree type_Ref tree_Ref tree_Ref
- * | 49 TREE len_Nat 40 SINGLETONTYPEtree type_Ref tree_Ref
- * | 49 TREE len_Nat 41 SELECTFROMTYPEtree type_Ref tree_Ref name_Ref
- * | 49 TREE len_Nat 42 COMPOUNDTYPEtree type_Ref tree_Ref
- * | 49 TREE len_Nat 43 APPLIEDTYPEtree type_Ref tree_Ref {tree_Ref}
- * | 49 TREE len_Nat 44 TYPEBOUNDStree type_Ref tree_Ref tree_Ref
- * | 49 TREE len_Nat 45 EXISTENTIALTYPEtree type_Ref tree_Ref {tree_Ref}
- * | 50 MODIFIERS len_Nat flags_Long privateWithin_Ref
- * SymbolInfo = name_Ref owner_Ref flags_LongNat [privateWithin_Ref] info_Ref
- * NameInfo = <character sequence of length len_Nat in Utf8 format>
- * NumInfo = <len_Nat-byte signed number in big endian format>
- * Ref = Nat
- * AnnotInfoBody = info_Ref {annotArg_Ref} {name_Ref constAnnotArg_Ref}
- * AnnotArg = Tree | Constant
- * ConstAnnotArg = Constant | AnnotInfo | AnnotArgArray
- *
- * len is remaining length after `len'.
- */
- val MajorVersion = 5
- val MinorVersion = 0
- def VersionString = "V" + MajorVersion + "." + MinorVersion
-
- final val TERMname = 1
- final val TYPEname = 2
- final val NONEsym = 3
- final val TYPEsym = 4
- final val ALIASsym = 5
- final val CLASSsym = 6
- final val MODULEsym = 7
- final val VALsym = 8
- final val EXTref = 9
- final val EXTMODCLASSref = 10
- final val NOtpe = 11
- final val NOPREFIXtpe = 12
- final val THIStpe = 13
- final val SINGLEtpe = 14
- final val CONSTANTtpe = 15
- final val TYPEREFtpe = 16
- final val TYPEBOUNDStpe = 17
- final val REFINEDtpe = 18
- final val CLASSINFOtpe = 19
- final val METHODtpe = 20
- final val POLYtpe = 21
- final val IMPLICITMETHODtpe = 22 // no longer generated
-
- final val LITERAL = 23 // base line for literals
- final val LITERALunit = 24
- final val LITERALboolean = 25
- final val LITERALbyte = 26
- final val LITERALshort = 27
- final val LITERALchar = 28
- final val LITERALint = 29
- final val LITERALlong = 30
- final val LITERALfloat = 31
- final val LITERALdouble = 32
- final val LITERALstring = 33
- final val LITERALnull = 34
- final val LITERALclass = 35
- final val LITERALenum = 36
- final val SYMANNOT = 40
- final val CHILDREN = 41
- final val ANNOTATEDtpe = 42
- final val ANNOTINFO = 43
- final val ANNOTARGARRAY = 44
-
- final val SUPERtpe = 46
- final val DEBRUIJNINDEXtpe = 47
- final val EXISTENTIALtpe = 48
-
- final val TREE = 49 // prefix code that means a tree is coming
- final val EMPTYtree = 1
- final val PACKAGEtree = 2
- final val CLASStree = 3
- final val MODULEtree = 4
- final val VALDEFtree = 5
- final val DEFDEFtree = 6
- final val TYPEDEFtree = 7
- final val LABELtree = 8
- final val IMPORTtree = 9
- final val DOCDEFtree = 11
- final val TEMPLATEtree = 12
- final val BLOCKtree = 13
- final val CASEtree = 14
- // This node type has been removed.
- // final val SEQUENCEtree = 15
- final val ALTERNATIVEtree = 16
- final val STARtree = 17
- final val BINDtree = 18
- final val UNAPPLYtree = 19
- final val ARRAYVALUEtree = 20
- final val FUNCTIONtree = 21
- final val ASSIGNtree = 22
- final val IFtree = 23
- final val MATCHtree = 24
- final val RETURNtree = 25
- final val TREtree = 26
- final val THROWtree = 27
- final val NEWtree = 28
- final val TYPEDtree = 29
- final val TYPEAPPLYtree = 30
- final val APPLYtree = 31
- final val APPLYDYNAMICtree = 32
- final val SUPERtree = 33
- final val THIStree = 34
- final val SELECTtree = 35
- final val IDENTtree = 36
- final val LITERALtree = 37
- final val TYPEtree = 38
- final val ANNOTATEDtree = 39
- final val SINGLETONTYPEtree = 40
- final val SELECTFROMTYPEtree = 41
- final val COMPOUNDTYPEtree = 42
- final val APPLIEDTYPEtree = 43
- final val TYPEBOUNDStree = 44
- final val EXISTENTIALTYPEtree = 45
-
- final val MODIFIERS = 50
-
- final val firstSymTag = NONEsym
- final val lastSymTag = VALsym
- final val lastExtSymTag = EXTMODCLASSref
-
-
- //The following two are no longer accurate, because ANNOTATEDtpe,
- //SUPERtpe, ... are not in the same range as the other types
- //final val firstTypeTag = NOtpe
- //final val lastTypeTag = POLYtpe
-}
diff --git a/src/library/scala/reflect/generic/Scopes.scala b/src/library/scala/reflect/generic/Scopes.scala
deleted file mode 100644
index 5ca7c95..0000000
--- a/src/library/scala/reflect/generic/Scopes.scala
+++ /dev/null
@@ -1,15 +0,0 @@
-package scala.reflect
-package generic
-
- at deprecated("scala.reflect.generic will be removed", "2.9.1") trait Scopes { self: Universe =>
-
- abstract class AbsScope extends Iterable[Symbol] {
- private[reflect] def enter(sym: Symbol): Symbol
- }
-
- type Scope <: AbsScope
-
- def newScope(): Scope
-}
-
-
diff --git a/src/library/scala/reflect/generic/StandardDefinitions.scala b/src/library/scala/reflect/generic/StandardDefinitions.scala
deleted file mode 100644
index 49ac1d3..0000000
--- a/src/library/scala/reflect/generic/StandardDefinitions.scala
+++ /dev/null
@@ -1,66 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
- * @author Martin Odersky
- */
-
-package scala.reflect
-package generic
-
- at deprecated("scala.reflect.generic will be removed", "2.9.1") trait StandardDefinitions { self: Universe =>
-
- val definitions: AbsDefinitions
-
- abstract class AbsDefinitions {
-
- // outer packages and their classes
- def RootPackage: Symbol
- def RootClass: Symbol
- def EmptyPackage: Symbol
- def EmptyPackageClass: Symbol
-
- def ScalaPackage: Symbol
- def ScalaPackageClass: Symbol
-
- // top types
- def AnyClass : Symbol
- def AnyValClass: Symbol
- def AnyRefClass: Symbol
- def ObjectClass: Symbol
-
- // bottom types
- def NullClass : Symbol
- def NothingClass: Symbol
-
- // the scala value classes
- def UnitClass : Symbol
- def ByteClass : Symbol
- def ShortClass : Symbol
- def CharClass : Symbol
- def IntClass : Symbol
- def LongClass : Symbol
- def FloatClass : Symbol
- def DoubleClass : Symbol
- def BooleanClass: Symbol
-
- // fundamental reference classes
- def SymbolClass : Symbol
- def StringClass : Symbol
- def ClassClass : Symbol
-
- // fundamental modules
- def PredefModule: Symbol
-
- // fundamental type constructions
- def ClassType(arg: Type): Type
-
- /** The string representation used by the given type in the VM.
- */
- def signature(tp: Type): String
-
- /** Is symbol one of the value classes? */
- def isValueClass(sym: Symbol): Boolean
-
- /** Is symbol one of the numeric value classes? */
- def isNumericValueClass(sym: Symbol): Boolean
- }
-}
diff --git a/src/library/scala/reflect/generic/StdNames.scala b/src/library/scala/reflect/generic/StdNames.scala
deleted file mode 100644
index 8f8d695..0000000
--- a/src/library/scala/reflect/generic/StdNames.scala
+++ /dev/null
@@ -1,44 +0,0 @@
-package scala.reflect
-package generic
-
-import scala.reflect.NameTransformer
-
- at deprecated("scala.reflect.generic will be removed", "2.9.1") trait StdNames {
- self: Universe =>
-
- val nme: LibraryTermNames
- val tpnme: LibraryTypeNames
-
- def encode(str: String): TermName = newTermName(NameTransformer.encode(str))
-
- implicit def stringToTermName(s: String): TermName = newTermName(s)
-
- trait LibraryCommonNames {
- type NameType <: Name
- implicit def createNameType(name: String): NameType
-
- val EMPTY: NameType = ""
- val ANON_FUN_NAME: NameType = "$anonfun"
- val EMPTY_PACKAGE_NAME: NameType = "<empty>"
- val IMPORT: NameType = "<import>"
- val MODULE_SUFFIX: NameType = "$module"
- val ROOT: NameType = "<root>"
- }
-
- trait LibraryTermNames extends LibraryCommonNames {
- val EXPAND_SEPARATOR_STRING = "$$"
- val LOCAL_SUFFIX_STRING = " "
- val ROOTPKG: NameType = "_root_"
-
- /** The expanded name of `name' relative to this class `base` with given `separator`
- */
- def expandedName(name: TermName, base: Symbol, separator: String = EXPAND_SEPARATOR_STRING): TermName =
- newTermName(base.fullName('$') + separator + name)
-
- def moduleVarName(name: TermName): TermName = newTermName("" + name + MODULE_SUFFIX)
- }
- trait LibraryTypeNames extends LibraryCommonNames {
- val REFINE_CLASS_NAME: NameType = "<refinement>"
- val ANON_CLASS_NAME: NameType = "$anon"
- }
-}
diff --git a/src/library/scala/reflect/generic/Symbols.scala b/src/library/scala/reflect/generic/Symbols.scala
deleted file mode 100644
index a587116..0000000
--- a/src/library/scala/reflect/generic/Symbols.scala
+++ /dev/null
@@ -1,187 +0,0 @@
-package scala.reflect
-package generic
-
-import Flags._
-
- at deprecated("scala.reflect.generic will be removed", "2.9.1") trait Symbols { self: Universe =>
-
- type Symbol >: Null <: AbsSymbol
-
- abstract class AbsSymbol extends HasFlags {
- this: Symbol =>
-
- type FlagsType = Long
- type AccessBoundaryType = Symbol
- type AnnotationType = AnnotationInfo
-
- /** The owner of this symbol.
- */
- def owner: Symbol
-
- /** The flags of this symbol */
- def flags: Long
-
- /** The name of the symbol as a member of the `Name` type.
- */
- def name: Name
-
- /** The name of the symbol before decoding, e.g. `\$eq\$eq` instead of `==`.
- */
- def encodedName: String = name.toString
-
- /** The decoded name of the symbol, e.g. `==` instead of `\$eq\$eq`.
- */
- def decodedName: String = stripLocalSuffix(NameTransformer.decode(encodedName))
-
- /** The encoded full path name of this symbol, where outer names and inner names
- * are separated by `separator` characters.
- * Never translates expansions of operators back to operator symbol.
- * Never adds id.
- */
- final def fullName(separator: Char): String = stripLocalSuffix {
- if (isRoot || isRootPackage || this == NoSymbol) this.toString
- else if (owner.isEffectiveRoot) encodedName
- else owner.enclClass.fullName(separator) + separator + encodedName
- }
-
- private def stripLocalSuffix(s: String) = s stripSuffix nme.LOCAL_SUFFIX_STRING
-
- /** The encoded full path name of this symbol, where outer names and inner names
- * are separated by periods.
- */
- final def fullName: String = fullName('.')
-
- /** Does symbol have ANY flag in `mask` set? */
- final def hasFlag(mask: Long): Boolean = (flags & mask) != 0L
-
- /** Does symbol have ALL the flags in `mask` set? */
- final def hasAllFlags(mask: Long): Boolean = (flags & mask) == mask
-
- /** Set when symbol has a modifier of the form private[X], NoSymbol otherwise.
- */
- def privateWithin: Symbol
-
- final def hasAccessBoundary = (privateWithin != null) && (privateWithin != NoSymbol)
-
- /** The raw info of the type
- */
- def rawInfo: Type
-
- /** The type of the symbol
- */
- def tpe: Type = info
-
- /** The info of the symbol. This is like tpe, except for class symbols where the `info`
- * describes the contents of the class whereas the `tpe` is a reference to the class.
- */
- def info: Type = {
- val tp = rawInfo
- tp.complete(this)
- tp
- }
-
- /** If this symbol is a class or trait, its self type, otherwise the type of the symbol itse;lf
- */
- def typeOfThis: Type
-
- def owner_=(sym: Symbol) { throw new UnsupportedOperationException("owner_= inapplicable for " + this) }
- def flags_=(flags: Long) { throw new UnsupportedOperationException("flags_= inapplicable for " + this) }
- def info_=(tp: Type) { throw new UnsupportedOperationException("info_= inapplicable for " + this) }
- def typeOfThis_=(tp: Type) { throw new UnsupportedOperationException("typeOfThis_= inapplicable for " + this) }
- def privateWithin_=(sym: Symbol) { throw new UnsupportedOperationException("privateWithin_= inapplicable for " + this) }
- def sourceModule_=(sym: Symbol) { throw new UnsupportedOperationException("sourceModule_= inapplicable for " + this) }
- def addChild(sym: Symbol) { throw new UnsupportedOperationException("addChild inapplicable for " + this) }
- def addAnnotation(annot: AnnotationInfo) { throw new UnsupportedOperationException("addAnnotation inapplicable for " + this) }
-
- /** For a module class its linked class, for a plain class
- * the module class of its linked module.
- * For instance
- * object Foo
- * class Foo
- *
- * Then object Foo has a `moduleClass' (invisible to the user, the backend calls it Foo$
- * linkedClassOfClass goes from class Foo$ to class Foo, and back.
- */
- def linkedClassOfClass: Symbol
-
- /** The module corresponding to this module class (note that this
- * is not updated when a module is cloned), or NoSymbol if this is not a ModuleClass
- */
- def sourceModule: Symbol = NoSymbol
-
- /** If symbol is an object definition, it's implied associated class,
- * otherwise NoSymbol
- */
- def moduleClass: Symbol
-
- /**
- * If symbol is a lazy val, it's lazy accessor
- */
- def lazyAccessor: Symbol
-
-// flags and kind tests
-
- def isTerm = false // to be overridden
- def isType = false // to be overridden
- def isClass = false // to be overridden
- def isAliasType = false // to be overridden
- def isAbstractType = false // to be overridden
- private[scala] def isSkolem = false // to be overridden
-
- override def isTrait: Boolean = isClass && hasFlag(TRAIT) // refined later for virtual classes.
- final def isAbstractClass = isClass && hasFlag(ABSTRACT)
- final def isBridge = hasFlag(BRIDGE)
- final def isContravariant = isType && hasFlag(CONTRAVARIANT)
- final def isCovariant = isType && hasFlag(COVARIANT)
- final def isEarlyInitialized: Boolean = isTerm && hasFlag(PRESUPER)
- final def isExistentiallyBound = isType && hasFlag(EXISTENTIAL)
- final def isImplClass = isClass && hasFlag(IMPLCLASS) // Is this symbol an implementation class for a mixin?
- final def isLazyAccessor = isLazy && lazyAccessor != NoSymbol
- final def isMethod = isTerm && hasFlag(METHOD)
- final def isVarargsMethod = isMethod && hasFlag(VARARGS)
- final def isModule = isTerm && hasFlag(MODULE)
- final def isModuleClass = isClass && hasFlag(MODULE)
- final def isOverloaded = hasFlag(OVERLOADED)
- final def isRefinementClass = isClass && name == tpnme.REFINE_CLASS_NAME
- final def isSourceMethod = isMethod && !hasFlag(STABLE) // exclude all accessors!!!
- final def isTypeParameter = isType && isParameter && !isSkolem
-
- /** Package tests */
- final def isEmptyPackage = isPackage && name == nme.EMPTY_PACKAGE_NAME
- final def isEmptyPackageClass = isPackageClass && name == tpnme.EMPTY_PACKAGE_NAME
- final def isPackage = isModule && hasFlag(PACKAGE)
- final def isPackageClass = isClass && hasFlag(PACKAGE)
- final def isRoot = isPackageClass && owner == NoSymbol
- final def isRootPackage = isPackage && owner == NoSymbol
-
- /** Is this symbol an effective root for fullname string?
- */
- def isEffectiveRoot = isRoot || isEmptyPackageClass
-
- /** If this is NoSymbol, evaluate the argument: otherwise, this.
- */
- def orElse[T](alt: => Symbol): Symbol = if (this ne NoSymbol) this else alt
-
- // creators
-
- def newAbstractType(name: TypeName, pos: Position = NoPosition): Symbol
- def newAliasType(name: TypeName, pos: Position = NoPosition): Symbol
- def newClass(name: TypeName, pos: Position = NoPosition): Symbol
- def newMethod(name: TermName, pos: Position = NoPosition): Symbol
- def newModule(name: TermName, clazz: Symbol, pos: Position = NoPosition): Symbol
- def newModuleClass(name: TypeName, pos: Position = NoPosition): Symbol
- def newValue(name: TermName, pos: Position = NoPosition): Symbol
-
- // access to related symbols
-
- /** The next enclosing class */
- def enclClass: Symbol = if (isClass) this else owner.enclClass
-
- /** The next enclosing method */
- def enclMethod: Symbol = if (isSourceMethod) this else owner.enclMethod
- }
-
- val NoSymbol: Symbol
-}
-
-
diff --git a/src/library/scala/reflect/generic/Trees.scala b/src/library/scala/reflect/generic/Trees.scala
deleted file mode 100644
index 8697445..0000000
--- a/src/library/scala/reflect/generic/Trees.scala
+++ /dev/null
@@ -1,730 +0,0 @@
-package scala.reflect
-package generic
-
-import java.io.{ PrintWriter, StringWriter }
-import Flags._
-
- at deprecated("scala.reflect.generic will be removed", "2.9.1") trait Trees { self: Universe =>
-
- abstract class AbsTreePrinter(out: PrintWriter) {
- def print(tree: Tree)
- def flush()
- }
-
- def newTreePrinter(out: PrintWriter): AbsTreePrinter
-
- private[scala] var nodeCount = 0
-
- protected def flagsIntoString(flags: Long, privateWithin: String): String
-
- /** @param privateWithin the qualifier for a private (a type name)
- * or tpnme.EMPTY, if none is given.
- * @param annotations the annotations for the definition.
- * <strong>Note:</strong> the typechecker drops these annotations,
- * use the AnnotationInfo's (Symbol.annotations) in later phases.
- */
- case class Modifiers(flags: Long, privateWithin: Name, annotations: List[Tree], positions: Map[Long, Position]) extends HasFlags {
- /* Abstract types from HasFlags. */
- type FlagsType = Long
- type AccessBoundaryType = Name
- type AnnotationType = Tree
-
- def hasAccessBoundary = privateWithin != tpnme.EMPTY
- def hasAllFlags(mask: Long): Boolean = (flags & mask) == mask
- def hasFlag(flag: Long) = (flag & flags) != 0L
- def hasFlagsToString(mask: Long): String = flagsToString(
- flags & mask,
- if (hasAccessBoundary) privateWithin.toString else ""
- )
- def & (flag: Long): Modifiers = {
- val flags1 = flags & flag
- if (flags1 == flags) this
- else Modifiers(flags1, privateWithin, annotations, positions)
- }
- def &~ (flag: Long): Modifiers = {
- val flags1 = flags & (~flag)
- if (flags1 == flags) this
- else Modifiers(flags1, privateWithin, annotations, positions)
- }
- def | (flag: Long): Modifiers = {
- val flags1 = flags | flag
- if (flags1 == flags) this
- else Modifiers(flags1, privateWithin, annotations, positions)
- }
- def withAnnotations(annots: List[Tree]) =
- if (annots.isEmpty) this
- else copy(annotations = annotations ::: annots)
- def withPosition(flag: Long, position: Position) =
- copy(positions = positions + (flag -> position))
-
- override def toString = "Modifiers(%s, %s, %s)".format(hasFlagsToString(-1L), annotations mkString ", ", positions)
- }
-
- def Modifiers(flags: Long, privateWithin: Name): Modifiers = Modifiers(flags, privateWithin, List(), Map.empty)
- def Modifiers(flags: Long): Modifiers = Modifiers(flags, tpnme.EMPTY)
-
- lazy val NoMods = Modifiers(0)
-
- abstract class Tree extends Product {
- val id = nodeCount
- nodeCount += 1
-
- private[this] var rawpos: Position = NoPosition
-
- def pos = rawpos
- def pos_=(pos: Position) = rawpos = pos
- def setPos(pos: Position): this.type = { rawpos = pos; this }
-
- private[this] var rawtpe: Type = _
-
- def tpe = rawtpe
- def tpe_=(t: Type) = rawtpe = t
-
- /** Set tpe to give `tp` and return this.
- */
- def setType(tp: Type): this.type = { rawtpe = tp; this }
-
- /** Like `setType`, but if this is a previously empty TypeTree
- * that fact is remembered so that resetType will snap back.
- */
- def defineType(tp: Type): this.type = setType(tp)
-
- def symbol: Symbol = null
- def symbol_=(sym: Symbol) { throw new UnsupportedOperationException("symbol_= inapplicable for " + this) }
- def setSymbol(sym: Symbol): this.type = { symbol = sym; this }
-
- def hasSymbol = false
- def isDef = false
- def isEmpty = false
-
- def hasSymbolWhich(f: Symbol => Boolean) = hasSymbol && f(symbol)
-
- /** The direct child trees of this tree
- * EmptyTrees are always omitted. Lists are collapsed.
- */
- def children: List[Tree] = {
- def subtrees(x: Any): List[Tree] = x match {
- case EmptyTree => Nil
- case t: Tree => List(t)
- case xs: List[_] => xs flatMap subtrees
- case _ => Nil
- }
- productIterator.toList flatMap subtrees
- }
-
- /** In compiler: Make a copy of this tree, keeping all attributes,
- * except that all positions are focussed (so nothing
- * in this tree will be found when searching by position).
- * If not in compiler may also return tree unchanged.
- */
- private[scala] def duplicate: this.type =
- duplicateTree(this).asInstanceOf[this.type]
-
- private[scala] def copyAttrs(tree: Tree): this.type = {
- pos = tree.pos
- tpe = tree.tpe
- if (hasSymbol) symbol = tree.symbol
- this
- }
-
- override def toString(): String = {
- val buffer = new StringWriter()
- val printer = newTreePrinter(new PrintWriter(buffer))
- printer.print(this)
- printer.flush()
- buffer.toString
- }
-
- override def hashCode(): Int = System.identityHashCode(this)
- override def equals(that: Any) = this eq that.asInstanceOf[AnyRef]
- }
-
- private[scala] def duplicateTree(tree: Tree): Tree = tree
-
- trait SymTree extends Tree {
- override def hasSymbol = true
- override var symbol: Symbol = NoSymbol
- }
-
- trait RefTree extends SymTree {
- def name: Name
- }
-
- abstract class DefTree extends SymTree {
- def name: Name
- override def isDef = true
- }
-
- trait TermTree extends Tree
-
- /** A tree for a type. Note that not all type trees implement
- * this trait; in particular, Ident's are an exception. */
- trait TypTree extends Tree
-
-// ----- tree node alternatives --------------------------------------
-
- /** The empty tree */
- case object EmptyTree extends TermTree {
- super.tpe_=(NoType)
- override def tpe_=(t: Type) =
- if (t != NoType) throw new UnsupportedOperationException("tpe_=("+t+") inapplicable for <empty>")
- override def isEmpty = true
- }
-
- abstract class MemberDef extends DefTree {
- def mods: Modifiers
- def keyword: String = this match {
- case TypeDef(_, _, _, _) => "type"
- case ClassDef(mods, _, _, _) => if (mods hasFlag TRAIT) "trait" else "class"
- case DefDef(_, _, _, _, _, _) => "def"
- case ModuleDef(_, _, _) => "object"
- case PackageDef(_, _) => "package"
- case ValDef(mods, _, _, _) => if (mods.isMutable) "var" else "val"
- case _ => ""
- }
- // final def hasFlag(mask: Long): Boolean = mods hasFlag mask
- }
-
- /** Package clause
- */
- case class PackageDef(pid: RefTree, stats: List[Tree])
- extends MemberDef {
- def name = pid.name
- def mods = NoMods
- }
-
- abstract class ImplDef extends MemberDef {
- def impl: Template
- }
-
- /** Class definition */
- case class ClassDef(mods: Modifiers, name: TypeName, tparams: List[TypeDef], impl: Template)
- extends ImplDef
-
- /** Singleton object definition
- */
- case class ModuleDef(mods: Modifiers, name: TermName, impl: Template)
- extends ImplDef
-
- abstract class ValOrDefDef extends MemberDef {
- def name: TermName
- def tpt: Tree
- def rhs: Tree
- }
-
- /** Value definition
- */
- case class ValDef(mods: Modifiers, name: TermName, tpt: Tree, rhs: Tree) extends ValOrDefDef
-
- /** Method definition
- */
- case class DefDef(mods: Modifiers, name: TermName, tparams: List[TypeDef],
- vparamss: List[List[ValDef]], tpt: Tree, rhs: Tree) extends ValOrDefDef
-
- /** Abstract type, type parameter, or type alias */
- case class TypeDef(mods: Modifiers, name: TypeName, tparams: List[TypeDef], rhs: Tree)
- extends MemberDef
-
- /** <p>
- * Labelled expression - the symbols in the array (must be Idents!)
- * are those the label takes as argument
- * </p>
- * <p>
- * The symbol that is given to the labeldef should have a MethodType
- * (as if it were a nested function)
- * </p>
- * <p>
- * Jumps are apply nodes attributed with label symbol, the arguments
- * will get assigned to the idents.
- * </p>
- * <p>
- * Note: on 2005-06-09 Martin, Iuli, Burak agreed to have forward
- * jumps within a Block.
- * </p>
- */
- case class LabelDef(name: TermName, params: List[Ident], rhs: Tree)
- extends DefTree with TermTree
-
-
- /** Import selector
- *
- * Representation of an imported name its optional rename and their optional positions
- *
- * @param name the imported name
- * @param namePos its position or -1 if undefined
- * @param rename the name the import is renamed to (== name if no renaming)
- * @param renamePos the position of the rename or -1 if undefined
- */
- case class ImportSelector(name: Name, namePos: Int, rename: Name, renamePos: Int)
-
- /** Import clause
- *
- * @param expr
- * @param selectors
- */
- case class Import(expr: Tree, selectors: List[ImportSelector])
- extends SymTree
- // The symbol of an Import is an import symbol @see Symbol.newImport
- // It's used primarily as a marker to check that the import has been typechecked.
-
- /** Instantiation template of a class or trait
- *
- * @param parents
- * @param body
- */
- case class Template(parents: List[Tree], self: ValDef, body: List[Tree])
- extends SymTree {
- // the symbol of a template is a local dummy. @see Symbol.newLocalDummy
- // the owner of the local dummy is the enclosing trait or class.
- // the local dummy is itself the owner of any local blocks
- // For example:
- //
- // class C {
- // def foo // owner is C
- // {
- // def bar // owner is local dummy
- // }
- // System.err.println("TEMPLATE: " + parents)
- }
-
- /** Block of expressions (semicolon separated expressions) */
- case class Block(stats: List[Tree], expr: Tree)
- extends TermTree
-
- /** Case clause in a pattern match, eliminated during explicitouter
- * (except for occurrences in switch statements)
- */
- case class CaseDef(pat: Tree, guard: Tree, body: Tree)
- extends Tree
-
- /** Alternatives of patterns, eliminated by explicitouter, except for
- * occurrences in encoded Switch stmt (=remaining Match(CaseDef(...))
- */
- case class Alternative(trees: List[Tree])
- extends TermTree
-
- /** Repetition of pattern, eliminated by explicitouter */
- case class Star(elem: Tree)
- extends TermTree
-
- /** Bind of a variable to a rhs pattern, eliminated by explicitouter
- *
- * @param name
- * @param body
- */
- case class Bind(name: Name, body: Tree)
- extends DefTree
-
- case class UnApply(fun: Tree, args: List[Tree])
- extends TermTree
-
- /** Array of expressions, needs to be translated in backend,
- */
- case class ArrayValue(elemtpt: Tree, elems: List[Tree])
- extends TermTree
-
- /** Anonymous function, eliminated by analyzer */
- case class Function(vparams: List[ValDef], body: Tree)
- extends TermTree with SymTree
- // The symbol of a Function is a synthetic value of name nme.ANON_FUN_NAME
- // It is the owner of the function's parameters.
-
- /** Assignment */
- case class Assign(lhs: Tree, rhs: Tree)
- extends TermTree
-
- /** Conditional expression */
- case class If(cond: Tree, thenp: Tree, elsep: Tree)
- extends TermTree
-
- /** - Pattern matching expression (before explicitouter)
- * - Switch statements (after explicitouter)
- *
- * After explicitouter, cases will satisfy the following constraints:
- *
- * - all guards are `EmptyTree`,
- * - all patterns will be either `Literal(Constant(x:Int))`
- * or `Alternative(lit|...|lit)`
- * - except for an "otherwise" branch, which has pattern
- * `Ident(nme.WILDCARD)`
- */
- case class Match(selector: Tree, cases: List[CaseDef])
- extends TermTree
-
- /** Return expression */
- case class Return(expr: Tree)
- extends TermTree with SymTree
- // The symbol of a Return node is the enclosing method.
-
- case class Try(block: Tree, catches: List[CaseDef], finalizer: Tree)
- extends TermTree
-
- /** Throw expression */
- case class Throw(expr: Tree)
- extends TermTree
-
- /** Object instantiation
- * One should always use factory method below to build a user level new.
- *
- * @param tpt a class type
- */
- case class New(tpt: Tree) extends TermTree
-
- /** Type annotation, eliminated by explicit outer */
- case class Typed(expr: Tree, tpt: Tree)
- extends TermTree
-
- // Martin to Sean: Should GenericApply/TypeApply/Apply not be SymTree's? After all,
- // ApplyDynamic is a SymTree.
- abstract class GenericApply extends TermTree {
- val fun: Tree
- val args: List[Tree]
- }
-
- /** Type application */
- case class TypeApply(fun: Tree, args: List[Tree])
- extends GenericApply {
- override def symbol: Symbol = fun.symbol
- override def symbol_=(sym: Symbol) { fun.symbol = sym }
- }
-
- /** Value application */
- case class Apply(fun: Tree, args: List[Tree])
- extends GenericApply {
- override def symbol: Symbol = fun.symbol
- override def symbol_=(sym: Symbol) { fun.symbol = sym }
- }
-
- /** Dynamic value application.
- * In a dynamic application q.f(as)
- * - q is stored in qual
- * - as is stored in args
- * - f is stored as the node's symbol field.
- */
- case class ApplyDynamic(qual: Tree, args: List[Tree])
- extends TermTree with SymTree
- // The symbol of an ApplyDynamic is the function symbol of `qual', or NoSymbol, if there is none.
-
- /** Super reference, qual = corresponding this reference */
- case class Super(qual: Tree, mix: TypeName) extends TermTree {
- // The symbol of a Super is the class _from_ which the super reference is made.
- // For instance in C.super(...), it would be C.
- override def symbol: Symbol = qual.symbol
- override def symbol_=(sym: Symbol) { qual.symbol = sym }
- }
-
- /** Self reference */
- case class This(qual: TypeName)
- extends TermTree with SymTree
- // The symbol of a This is the class to which the this refers.
- // For instance in C.this, it would be C.
-
- /** Designator <qualifier> . <name> */
- case class Select(qualifier: Tree, name: Name)
- extends RefTree
-
- /** Identifier <name> */
- case class Ident(name: Name) extends RefTree { }
-
- class BackQuotedIdent(name: Name) extends Ident(name)
-
- /** Literal */
- case class Literal(value: Constant)
- extends TermTree {
- assert(value ne null)
- }
-
- def Literal(value: Any): Literal =
- Literal(Constant(value))
-
- type TypeTree <: AbsTypeTree
- val TypeTree: TypeTreeExtractor
-
- abstract class TypeTreeExtractor {
- def apply(): TypeTree
- def unapply(tree: TypeTree): Boolean
- }
-
- class Traverser {
- protected var currentOwner: Symbol = definitions.RootClass
- def traverse(tree: Tree): Unit = tree match {
- case EmptyTree =>
- ;
- case PackageDef(pid, stats) =>
- traverse(pid)
- atOwner(tree.symbol.moduleClass) {
- traverseTrees(stats)
- }
- case ClassDef(mods, name, tparams, impl) =>
- atOwner(tree.symbol) {
- traverseTrees(mods.annotations); traverseTrees(tparams); traverse(impl)
- }
- case ModuleDef(mods, name, impl) =>
- atOwner(tree.symbol.moduleClass) {
- traverseTrees(mods.annotations); traverse(impl)
- }
- case ValDef(mods, name, tpt, rhs) =>
- atOwner(tree.symbol) {
- traverseTrees(mods.annotations); traverse(tpt); traverse(rhs)
- }
- case DefDef(mods, name, tparams, vparamss, tpt, rhs) =>
- atOwner(tree.symbol) {
- traverseTrees(mods.annotations); traverseTrees(tparams); traverseTreess(vparamss); traverse(tpt); traverse(rhs)
- }
- case TypeDef(mods, name, tparams, rhs) =>
- atOwner(tree.symbol) {
- traverseTrees(mods.annotations); traverseTrees(tparams); traverse(rhs)
- }
- case LabelDef(name, params, rhs) =>
- traverseTrees(params); traverse(rhs)
- case Import(expr, selectors) =>
- traverse(expr)
- case Annotated(annot, arg) =>
- traverse(annot); traverse(arg)
- case Template(parents, self, body) =>
- traverseTrees(parents)
- if (!self.isEmpty) traverse(self)
- traverseStats(body, tree.symbol)
- case Block(stats, expr) =>
- traverseTrees(stats); traverse(expr)
- case CaseDef(pat, guard, body) =>
- traverse(pat); traverse(guard); traverse(body)
- case Alternative(trees) =>
- traverseTrees(trees)
- case Star(elem) =>
- traverse(elem)
- case Bind(name, body) =>
- traverse(body)
- case UnApply(fun, args) =>
- traverse(fun); traverseTrees(args)
- case ArrayValue(elemtpt, trees) =>
- traverse(elemtpt); traverseTrees(trees)
- case Function(vparams, body) =>
- atOwner(tree.symbol) {
- traverseTrees(vparams); traverse(body)
- }
- case Assign(lhs, rhs) =>
- traverse(lhs); traverse(rhs)
- case If(cond, thenp, elsep) =>
- traverse(cond); traverse(thenp); traverse(elsep)
- case Match(selector, cases) =>
- traverse(selector); traverseTrees(cases)
- case Return(expr) =>
- traverse(expr)
- case Try(block, catches, finalizer) =>
- traverse(block); traverseTrees(catches); traverse(finalizer)
- case Throw(expr) =>
- traverse(expr)
- case New(tpt) =>
- traverse(tpt)
- case Typed(expr, tpt) =>
- traverse(expr); traverse(tpt)
- case TypeApply(fun, args) =>
- traverse(fun); traverseTrees(args)
- case Apply(fun, args) =>
- traverse(fun); traverseTrees(args)
- case ApplyDynamic(qual, args) =>
- traverse(qual); traverseTrees(args)
- case Super(_, _) =>
- ;
- case This(_) =>
- ;
- case Select(qualifier, selector) =>
- traverse(qualifier)
- case Ident(_) =>
- ;
- case Literal(_) =>
- ;
- case TypeTree() =>
- ;
- case SingletonTypeTree(ref) =>
- traverse(ref)
- case SelectFromTypeTree(qualifier, selector) =>
- traverse(qualifier)
- case CompoundTypeTree(templ) =>
- traverse(templ)
- case AppliedTypeTree(tpt, args) =>
- traverse(tpt); traverseTrees(args)
- case TypeBoundsTree(lo, hi) =>
- traverse(lo); traverse(hi)
- case ExistentialTypeTree(tpt, whereClauses) =>
- traverse(tpt); traverseTrees(whereClauses)
- case SelectFromArray(qualifier, selector, erasure) =>
- traverse(qualifier)
- }
-
- def traverseTrees(trees: List[Tree]) {
- trees foreach traverse
- }
- def traverseTreess(treess: List[List[Tree]]) {
- treess foreach traverseTrees
- }
- def traverseStats(stats: List[Tree], exprOwner: Symbol) {
- stats foreach (stat =>
- if (exprOwner != currentOwner) atOwner(exprOwner)(traverse(stat))
- else traverse(stat)
- )
- }
-
- def atOwner(owner: Symbol)(traverse: => Unit) {
- val prevOwner = currentOwner
- currentOwner = owner
- traverse
- currentOwner = prevOwner
- }
- }
-
- /** A synthetic term holding an arbitrary type. Not to be confused with
- * with TypTree, the trait for trees that are only used for type trees.
- * TypeTree's are inserted in several places, but most notably in
- * <code>RefCheck</code>, where the arbitrary type trees are all replaced by
- * TypeTree's. */
- abstract class AbsTypeTree extends TypTree {
- override def symbol = if (tpe == null) null else tpe.typeSymbol
- override def isEmpty = (tpe eq null) || tpe == NoType
- }
-
- /** A tree that has an annotation attached to it. Only used for annotated types and
- * annotation ascriptions, annotations on definitions are stored in the Modifiers.
- * Eliminated by typechecker (typedAnnotated), the annotations are then stored in
- * an AnnotatedType.
- */
- case class Annotated(annot: Tree, arg: Tree) extends Tree
-
- /** Singleton type, eliminated by RefCheck */
- case class SingletonTypeTree(ref: Tree)
- extends TypTree
-
- /** Type selection <qualifier> # <name>, eliminated by RefCheck */
- case class SelectFromTypeTree(qualifier: Tree, name: TypeName)
- extends TypTree with RefTree
-
- /** Intersection type <parent1> with ... with <parentN> { <decls> }, eliminated by RefCheck */
- case class CompoundTypeTree(templ: Template)
- extends TypTree
-
- /** Applied type <tpt> [ <args> ], eliminated by RefCheck */
- case class AppliedTypeTree(tpt: Tree, args: List[Tree])
- extends TypTree {
- override def symbol: Symbol = tpt.symbol
- override def symbol_=(sym: Symbol) { tpt.symbol = sym }
- }
-
- case class TypeBoundsTree(lo: Tree, hi: Tree)
- extends TypTree
-
- case class ExistentialTypeTree(tpt: Tree, whereClauses: List[Tree])
- extends TypTree
-
- /** Array selection <qualifier> . <name> only used during erasure */
- case class SelectFromArray(qualifier: Tree, name: Name, erasure: Type)
- extends TermTree with RefTree { }
-
-/* A standard pattern match
- case EmptyTree =>
- case PackageDef(pid, stats) =>
- // package pid { stats }
- case ClassDef(mods, name, tparams, impl) =>
- // mods class name [tparams] impl where impl = extends parents { defs }
- case ModuleDef(mods, name, impl) => (eliminated by refcheck)
- // mods object name impl where impl = extends parents { defs }
- case ValDef(mods, name, tpt, rhs) =>
- // mods val name: tpt = rhs
- // note missing type information is expressed by tpt = TypeTree()
- case DefDef(mods, name, tparams, vparamss, tpt, rhs) =>
- // mods def name[tparams](vparams_1)...(vparams_n): tpt = rhs
- // note missing type information is expressed by tpt = TypeTree()
- case TypeDef(mods, name, tparams, rhs) => (eliminated by erasure)
- // mods type name[tparams] = rhs
- // mods type name[tparams] >: lo <: hi, where lo, hi are in a TypeBoundsTree,
- and DEFERRED is set in mods
- case LabelDef(name, params, rhs) =>
- // used for tailcalls and like
- // while/do are desugared to label defs as follows:
- // while (cond) body ==> LabelDef($L, List(), if (cond) { body; L$() } else ())
- // do body while (cond) ==> LabelDef($L, List(), body; if (cond) L$() else ())
- case Import(expr, selectors) => (eliminated by typecheck)
- // import expr.{selectors}
- // Selectors are a list of pairs of names (from, to).
- // The last (and maybe only name) may be a nme.WILDCARD
- // for instance
- // import qual.{x, y => z, _} would be represented as
- // Import(qual, List(("x", "x"), ("y", "z"), (WILDCARD, null)))
- case Template(parents, self, body) =>
- // extends parents { self => body }
- // if self is missing it is represented as emptyValDef
- case Block(stats, expr) =>
- // { stats; expr }
- case CaseDef(pat, guard, body) => (eliminated by transmatch/explicitouter)
- // case pat if guard => body
- case Alternative(trees) => (eliminated by transmatch/explicitouter)
- // pat1 | ... | patn
- case Star(elem) => (eliminated by transmatch/explicitouter)
- // pat*
- case Bind(name, body) => (eliminated by transmatch/explicitouter)
- // name @ pat
- case UnApply(fun: Tree, args) (introduced by typer, eliminated by transmatch/explicitouter)
- // used for unapply's
- case ArrayValue(elemtpt, trees) => (introduced by uncurry)
- // used to pass arguments to vararg arguments
- // for instance, printf("%s%d", foo, 42) is translated to after uncurry to:
- // Apply(
- // Ident("printf"),
- // Literal("%s%d"),
- // ArrayValue(<Any>, List(Ident("foo"), Literal(42))))
- case Function(vparams, body) => (eliminated by lambdaLift)
- // vparams => body where vparams:List[ValDef]
- case Assign(lhs, rhs) =>
- // lhs = rhs
- case If(cond, thenp, elsep) =>
- // if (cond) thenp else elsep
- case Match(selector, cases) =>
- // selector match { cases }
- case Return(expr) =>
- // return expr
- case Try(block, catches, finalizer) =>
- // try block catch { catches } finally finalizer where catches: List[CaseDef]
- case Throw(expr) =>
- // throw expr
- case New(tpt) =>
- // new tpt always in the context: (new tpt).<init>[targs](args)
- case Typed(expr, tpt) => (eliminated by erasure)
- // expr: tpt
- case TypeApply(fun, args) =>
- // fun[args]
- case Apply(fun, args) =>
- // fun(args)
- // for instance fun[targs](args) is expressed as Apply(TypeApply(fun, targs), args)
- case ApplyDynamic(qual, args) (introduced by erasure, eliminated by cleanup)
- // fun(args)
- case Super(qual, mix) =>
- // qual.super[mix] if qual and/or mix is empty, ther are tpnme.EMPTY
- case This(qual) =>
- // qual.this
- case Select(qualifier, selector) =>
- // qualifier.selector
- case Ident(name) =>
- // name
- // note: type checker converts idents that refer to enclosing fields or methods
- // to selects; name ==> this.name
- case Literal(value) =>
- // value
- case TypeTree() => (introduced by refcheck)
- // a type that's not written out, but given in the tpe attribute
- case Annotated(annot, arg) => (eliminated by typer)
- // arg @annot for types, arg: @annot for exprs
- case SingletonTypeTree(ref) => (eliminated by uncurry)
- // ref.type
- case SelectFromTypeTree(qualifier, selector) => (eliminated by uncurry)
- // qualifier # selector, a path-dependent type p.T is expressed as p.type # T
- case CompoundTypeTree(templ: Template) => (eliminated by uncurry)
- // parent1 with ... with parentN { refinement }
- case AppliedTypeTree(tpt, args) => (eliminated by uncurry)
- // tpt[args]
- case TypeBoundsTree(lo, hi) => (eliminated by uncurry)
- // >: lo <: hi
- case ExistentialTypeTree(tpt, whereClauses) => (eliminated by uncurry)
- // tpt forSome { whereClauses }
-
-*/
-}
diff --git a/src/library/scala/reflect/generic/Types.scala b/src/library/scala/reflect/generic/Types.scala
deleted file mode 100644
index 837f548..0000000
--- a/src/library/scala/reflect/generic/Types.scala
+++ /dev/null
@@ -1,165 +0,0 @@
-package scala.reflect
-package generic
-
- at deprecated("scala.reflect.generic will be removed", "2.9.1") trait Types { self: Universe =>
-
- abstract class AbsType {
- def typeSymbol: Symbol
- def decl(name: Name): Symbol
-
- /** Is this type completed (i.e. not a lazy type)?
- */
- def isComplete: Boolean = true
-
- /** If this is a lazy type, assign a new type to `sym'. */
- def complete(sym: Symbol) {}
-
- /** Convert toString avoiding infinite recursions by cutting off
- * after `maxTostringRecursions` recursion levels. Uses `safeToString`
- * to produce a string on each level.
- */
- override def toString: String =
- if (tostringRecursions >= maxTostringRecursions)
- "..."
- else
- try {
- tostringRecursions += 1
- safeToString
- } finally {
- tostringRecursions -= 1
- }
-
- /** Method to be implemented in subclasses.
- * Converts this type to a string in calling toString for its parts.
- */
- def safeToString: String = super.toString
- }
-
- type Type >: Null <: AbsType
- type SingletonType >: Null <: Type
-
- val NoType: Type
- val NoPrefix: Type
-
- type ThisType <: SingletonType
- val ThisType: ThisTypeExtractor
-
- type TypeRef <: Type
- val TypeRef: TypeRefExtractor
-
- type SingleType <: SingletonType
- val SingleType: SingleTypeExtractor
-
- type SuperType <: SingletonType
- val SuperType: SuperTypeExtractor
-
- type TypeBounds <: Type
- val TypeBounds: TypeBoundsExtractor
-
- type CompoundType <: Type
-
- type RefinedType <: CompoundType
- val RefinedType: RefinedTypeExtractor
-
- type ClassInfoType <: CompoundType
- val ClassInfoType: ClassInfoTypeExtractor
-
- type ConstantType <: Type
- val ConstantType: ConstantTypeExtractor
-
- type MethodType <: Type
- val MethodType: MethodTypeExtractor
-
- type NullaryMethodType <: Type
- val NullaryMethodType: NullaryMethodTypeExtractor
-
- type PolyType <: Type
- val PolyType: PolyTypeExtractor
-
- type ExistentialType <: Type
- val ExistentialType: ExistentialTypeExtractor
-
- type AnnotatedType <: Type
- val AnnotatedType: AnnotatedTypeExtractor
-
- type LazyType <: Type with AbsLazyType
-
- trait AbsLazyType extends AbsType {
- override def isComplete: Boolean = false
- override def complete(sym: Symbol)
- override def safeToString = "<?>"
- }
-
- abstract class ThisTypeExtractor {
- def apply(sym: Symbol): Type
- def unapply(tpe: ThisType): Option[Symbol]
- }
-
- abstract class SingleTypeExtractor {
- def apply(pre: Type, sym: Symbol): Type
- def unapply(tpe: SingleType): Option[(Type, Symbol)]
- }
-
- abstract class SuperTypeExtractor {
- def apply(thistpe: Type, supertpe: Type): Type
- def unapply(tpe: SuperType): Option[(Type, Type)]
- }
-
- abstract class TypeRefExtractor {
- def apply(pre: Type, sym: Symbol, args: List[Type]): Type
- def unapply(tpe: TypeRef): Option[(Type, Symbol, List[Type])]
- }
-
- abstract class TypeBoundsExtractor {
- def apply(lo: Type, hi: Type): TypeBounds
- def unapply(tpe: TypeBounds): Option[(Type, Type)]
- }
-
- abstract class RefinedTypeExtractor {
- def apply(parents: List[Type], decls: Scope): RefinedType
- def apply(parents: List[Type], decls: Scope, clazz: Symbol): RefinedType
- def unapply(tpe: RefinedType): Option[(List[Type], Scope)]
- }
-
- abstract class ClassInfoTypeExtractor {
- def apply(parents: List[Type], decls: Scope, clazz: Symbol): ClassInfoType
- def unapply(tpe: ClassInfoType): Option[(List[Type], Scope, Symbol)]
- }
-
- abstract class ConstantTypeExtractor {
- def apply(value: Constant): ConstantType
- def unapply(tpe: ConstantType): Option[Constant]
- }
-
- abstract class MethodTypeExtractor {
- def apply(params: List[Symbol], resultType: Type): MethodType
- def unapply(tpe: MethodType): Option[(List[Symbol], Type)]
- }
-
- abstract class NullaryMethodTypeExtractor {
- def apply(resultType: Type): NullaryMethodType
- def unapply(tpe: NullaryMethodType): Option[(Type)]
- }
-
- abstract class PolyTypeExtractor {
- def apply(typeParams: List[Symbol], resultType: Type): PolyType
- def unapply(tpe: PolyType): Option[(List[Symbol], Type)]
- }
-
- abstract class ExistentialTypeExtractor {
- def apply(quantified: List[Symbol], underlying: Type): ExistentialType
- def unapply(tpe: ExistentialType): Option[(List[Symbol], Type)]
- }
-
- abstract class AnnotatedTypeExtractor {
- def apply(annotations: List[AnnotationInfo], underlying: Type, selfsym: Symbol): AnnotatedType
- def unapply(tpe: AnnotatedType): Option[(List[AnnotationInfo], Type, Symbol)]
- }
-
- /** The maximum number of recursions allowed in toString
- */
- final val maxTostringRecursions = 50
-
- private var tostringRecursions = 0
-}
-
diff --git a/src/library/scala/reflect/generic/UnPickler.scala b/src/library/scala/reflect/generic/UnPickler.scala
deleted file mode 100644
index 745dd1c..0000000
--- a/src/library/scala/reflect/generic/UnPickler.scala
+++ /dev/null
@@ -1,862 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
- * @author Martin Odersky
- */
-
-package scala.reflect
-package generic
-
-import java.io.IOException
-import java.lang.Float.intBitsToFloat
-import java.lang.Double.longBitsToDouble
-
-import Flags._
-import PickleFormat._
-import collection.mutable.{HashMap, ListBuffer}
-import annotation.switch
-
-/** @author Martin Odersky
- * @version 1.0
- */
- at deprecated("scala.reflect.generic will be removed", "2.9.1")
-abstract class UnPickler {
-
- val global: Universe
- import global._
-
- /** Unpickle symbol table information descending from a class and/or module root
- * from an array of bytes.
- * @param bytes bytearray from which we unpickle
- * @param offset offset from which unpickling starts
- * @param classroot the top-level class which is unpickled, or NoSymbol if inapplicable
- * @param moduleroot the top-level module which is unpickled, or NoSymbol if inapplicable
- * @param filename filename associated with bytearray, only used for error messages
- */
- def unpickle(bytes: Array[Byte], offset: Int, classRoot: Symbol, moduleRoot: Symbol, filename: String) {
- try {
- scan(bytes, offset, classRoot, moduleRoot, filename)
- } catch {
- case ex: IOException =>
- throw ex
- case ex: Throwable =>
- /*if (settings.debug.value)*/ ex.printStackTrace()
- throw new RuntimeException("error reading Scala signature of "+filename+": "+ex.getMessage())
- }
- }
-
- /** To be implemented in subclasses. Like `unpickle` but without the catch-all error handling.
- */
- def scan(bytes: Array[Byte], offset: Int, classRoot: Symbol, moduleRoot: Symbol, filename: String)
-
- abstract class Scan(bytes: Array[Byte], offset: Int, classRoot: Symbol, moduleRoot: Symbol, filename: String) extends PickleBuffer(bytes, offset, -1) {
- //println("unpickle " + classRoot + " and " + moduleRoot)//debug
-
- protected def debug = false
-
- checkVersion()
-
- /** A map from entry numbers to array offsets */
- private val index = createIndex
-
- /** A map from entry numbers to symbols, types, or annotations */
- private val entries = new Array[AnyRef](index.length)
-
- /** A map from symbols to their associated `decls' scopes */
- private val symScopes = new HashMap[Symbol, Scope]
-
- //println("unpickled " + classRoot + ":" + classRoot.rawInfo + ", " + moduleRoot + ":" + moduleRoot.rawInfo);//debug
-
- // Unused: left in 2.9.1 to satisfy mima.
- private def run$unused() {
- // read children last, fix for #3951
- val queue = new collection.mutable.ListBuffer[() => Unit]()
- def delay(i: Int, action: => Unit) {
- queue += (() => at(i, {() => action; null}))
- }
-
- for (i <- 0 until index.length) {
- if (isSymbolEntry(i))
- at(i, readSymbol)
- else if (isSymbolAnnotationEntry(i))
- delay(i, readSymbolAnnotation())
- else if (isChildrenEntry(i))
- delay(i, readChildren())
- }
-
- for (action <- queue)
- action()
- }
-
- // Laboriously unrolled for performance.
- def run() {
- var i = 0
- while (i < index.length) {
- if (entries(i) == null && isSymbolEntry(i)) {
- val savedIndex = readIndex
- readIndex = index(i)
- entries(i) = readSymbol()
- readIndex = savedIndex
- }
- i += 1
- }
- // read children last, fix for #3951
- i = 0
- while (i < index.length) {
- if (entries(i) == null) {
- if (isSymbolAnnotationEntry(i)) {
- val savedIndex = readIndex
- readIndex = index(i)
- readSymbolAnnotation()
- readIndex = savedIndex
- }
- else if (isChildrenEntry(i)) {
- val savedIndex = readIndex
- readIndex = index(i)
- readChildren()
- readIndex = savedIndex
- }
- }
- i += 1
- }
- }
-
- private def checkVersion() {
- val major = readNat()
- val minor = readNat()
- if (major != MajorVersion || minor > MinorVersion)
- throw new IOException("Scala signature " + classRoot.decodedName +
- " has wrong version\n expected: " +
- MajorVersion + "." + MinorVersion +
- "\n found: " + major + "." + minor +
- " in "+filename)
- }
-
- /** The `decls' scope associated with given symbol */
- protected def symScope(sym: Symbol) = symScopes.getOrElseUpdate(sym, newScope)
-
- /** Does entry represent an (internal) symbol */
- protected def isSymbolEntry(i: Int): Boolean = {
- val tag = bytes(index(i)).toInt
- (firstSymTag <= tag && tag <= lastSymTag &&
- (tag != CLASSsym || !isRefinementSymbolEntry(i)))
- }
-
- /** Does entry represent an (internal or external) symbol */
- protected def isSymbolRef(i: Int): Boolean = {
- val tag = bytes(index(i))
- (firstSymTag <= tag && tag <= lastExtSymTag)
- }
-
- /** Does entry represent a name? */
- protected def isNameEntry(i: Int): Boolean = {
- val tag = bytes(index(i)).toInt
- tag == TERMname || tag == TYPEname
- }
-
- /** Does entry represent a symbol annotation? */
- protected def isSymbolAnnotationEntry(i: Int): Boolean = {
- val tag = bytes(index(i)).toInt
- tag == SYMANNOT
- }
-
- /** Does the entry represent children of a symbol? */
- protected def isChildrenEntry(i: Int): Boolean = {
- val tag = bytes(index(i)).toInt
- tag == CHILDREN
- }
-
- /** Does entry represent a refinement symbol?
- * pre: Entry is a class symbol
- */
- protected def isRefinementSymbolEntry(i: Int): Boolean = {
- val savedIndex = readIndex
- readIndex = index(i)
- val tag = readByte().toInt
- assert(tag == CLASSsym)
-
- readNat(); // read length
- val result = readNameRef() == tpnme.REFINE_CLASS_NAME
- readIndex = savedIndex
- result
- }
-
- /** If entry at <code>i</code> is undefined, define it by performing
- * operation <code>op</code> with <code>readIndex at start of i'th
- * entry. Restore <code>readIndex</code> afterwards.
- */
- protected def at[T <: AnyRef](i: Int, op: () => T): T = {
- var r = entries(i)
- if (r eq null) {
- val savedIndex = readIndex
- readIndex = index(i)
- r = op()
- assert(entries(i) eq null, entries(i))
- entries(i) = r
- readIndex = savedIndex
- }
- r.asInstanceOf[T]
- }
-
- /** Read a name */
- protected def readName(): Name = {
- val tag = readByte()
- val len = readNat()
- tag match {
- case TERMname => newTermName(bytes, readIndex, len)
- case TYPEname => newTypeName(bytes, readIndex, len)
- case _ => errorBadSignature("bad name tag: " + tag)
- }
- }
-
- /** Read a symbol */
- protected def readSymbol(): Symbol = {
- val tag = readByte()
- val end = readNat() + readIndex
- def atEnd = readIndex == end
-
- def readExtSymbol(): Symbol = {
- val name = readNameRef()
- val owner = if (atEnd) definitions.RootClass else readSymbolRef()
-
- def fromName(name: Name) = mkTermName(name) match {
- case nme.ROOT => definitions.RootClass
- case nme.ROOTPKG => definitions.RootPackage
- case _ =>
- val s = owner.info.decl(name)
- if (tag == EXTref) s else s.moduleClass
- }
- def nestedObjectSymbol: Symbol = {
- // If the owner is overloaded (i.e. a method), it's not possible to select the
- // right member, so return NoSymbol. This can only happen when unpickling a tree.
- // the "case Apply" in readTree() takes care of selecting the correct alternative
- // after parsing the arguments.
- if (owner.isOverloaded)
- return NoSymbol
-
- if (tag == EXTMODCLASSref) {
- val moduleVar = owner.info.decl(nme.moduleVarName(name))
- if (moduleVar.isLazyAccessor)
- return moduleVar.lazyAccessor.lazyAccessor
- }
- NoSymbol
- }
-
- // (1) Try name.
- fromName(name) orElse {
- // (2) Try with expanded name. Can happen if references to private
- // symbols are read from outside: for instance when checking the children
- // of a class. See #1722.
- fromName(nme.expandedName(name, owner)) orElse {
- // (3) Try as a nested object symbol.
- nestedObjectSymbol orElse {
- // (4) Otherwise, fail.
- errorMissingRequirement(name, owner)
- }
- }
- }
- }
-
- tag match {
- case NONEsym => return NoSymbol
- case EXTref | EXTMODCLASSref => return readExtSymbol()
- case _ => ()
- }
-
- // symbols that were pickled with Pickler.writeSymInfo
- val nameref = readNat()
- val name = at(nameref, readName)
- val owner = readSymbolRef()
- val flags = pickledToRawFlags(readLongNat())
- var inforef = readNat()
- val privateWithin =
- if (!isSymbolRef(inforef)) NoSymbol
- else {
- val pw = at(inforef, readSymbol)
- inforef = readNat()
- pw
- }
-
- def isModuleFlag = (flags & MODULE) != 0L
- def isMethodFlag = (flags & METHOD) != 0L
- def isClassRoot = (name == classRoot.name) && (owner == classRoot.owner)
- def isModuleRoot = (name == moduleRoot.name) && (owner == moduleRoot.owner)
-
- def finishSym(sym: Symbol): Symbol = {
- sym.flags = flags & PickledFlags
- sym.privateWithin = privateWithin
- sym.info = (
- if (atEnd) {
- assert(!sym.isSuperAccessor, sym)
- newLazyTypeRef(inforef)
- }
- else {
- assert(sym.isSuperAccessor || sym.isParamAccessor, sym)
- newLazyTypeRefAndAlias(inforef, readNat())
- }
- )
- if (sym.owner.isClass && sym != classRoot && sym != moduleRoot &&
- !sym.isModuleClass && !sym.isRefinementClass && !sym.isTypeParameter && !sym.isExistentiallyBound)
- symScope(sym.owner) enter sym
-
- sym
- }
-
- finishSym(tag match {
- case TYPEsym => owner.newAbstractType(mkTypeName(name))
- case ALIASsym => owner.newAliasType(mkTypeName(name))
- case CLASSsym =>
- val sym = (isClassRoot, isModuleFlag) match {
- case (true, true) => moduleRoot.moduleClass
- case (true, false) => classRoot
- case (false, true) => owner.newModuleClass(mkTypeName(name))
- case (false, false) => owner.newClass(mkTypeName(name))
- }
- if (!atEnd)
- sym.typeOfThis = newLazyTypeRef(readNat())
-
- sym
- case MODULEsym =>
- val clazz = at(inforef, () => readType()).typeSymbol // after the NMT_TRANSITION period, we can leave off the () => ... ()
- if (isModuleRoot) moduleRoot
- else {
- val m = owner.newModule(name, clazz)
- clazz.sourceModule = m
- m
- }
- case VALsym =>
- if (isModuleRoot) { assert(false); NoSymbol }
- else if (isMethodFlag) owner.newMethod(name)
- else owner.newValue(name)
-
- case _ =>
- errorBadSignature("bad symbol tag: " + tag)
- })
- }
-
- /** Read a type
- *
- * @param forceProperType is used to ease the transition to NullaryMethodTypes (commentmarker: NMT_TRANSITION)
- * the flag say that a type of kind * is expected, so that PolyType(tps, restpe) can be disambiguated to PolyType(tps, NullaryMethodType(restpe))
- * (if restpe is not a ClassInfoType, a MethodType or a NullaryMethodType, which leaves TypeRef/SingletonType -- the latter would make the polytype a type constructor)
- */
- protected def readType(forceProperType: Boolean = false): Type = {
- val tag = readByte()
- val end = readNat() + readIndex
- (tag: @switch) match {
- case NOtpe =>
- NoType
- case NOPREFIXtpe =>
- NoPrefix
- case THIStpe =>
- ThisType(readSymbolRef())
- case SINGLEtpe =>
- SingleType(readTypeRef(), readSymbolRef()) // !!! was singleType
- case SUPERtpe =>
- val thistpe = readTypeRef()
- val supertpe = readTypeRef()
- SuperType(thistpe, supertpe)
- case CONSTANTtpe =>
- ConstantType(readConstantRef())
- case TYPEREFtpe =>
- val pre = readTypeRef()
- val sym = readSymbolRef()
- var args = until(end, readTypeRef)
- TypeRef(pre, sym, args)
- case TYPEBOUNDStpe =>
- TypeBounds(readTypeRef(), readTypeRef())
- case REFINEDtpe =>
- val clazz = readSymbolRef()
- RefinedType(until(end, readTypeRef), symScope(clazz), clazz)
- case CLASSINFOtpe =>
- val clazz = readSymbolRef()
- ClassInfoType(until(end, readTypeRef), symScope(clazz), clazz)
- case METHODtpe | IMPLICITMETHODtpe =>
- val restpe = readTypeRef()
- val params = until(end, readSymbolRef)
- // if the method is overloaded, the params cannot be determined (see readSymbol) => return NoType.
- // Only happen for trees, "case Apply" in readTree() takes care of selecting the correct
- // alternative after parsing the arguments.
- if (params.contains(NoSymbol) || restpe == NoType) NoType
- else MethodType(params, restpe)
- case POLYtpe =>
- val restpe = readTypeRef()
- val typeParams = until(end, readSymbolRef)
- if(typeParams nonEmpty) {
- // NMT_TRANSITION: old class files denoted a polymorphic nullary method as PolyType(tps, restpe), we now require PolyType(tps, NullaryMethodType(restpe))
- // when a type of kind * is expected (forceProperType is true), we know restpe should be wrapped in a NullaryMethodType (if it wasn't suitably wrapped yet)
- def transitionNMT(restpe: Type) = {
- val resTpeCls = restpe.getClass.toString // what's uglier than isInstanceOf? right! -- isInstanceOf does not work since the concrete types are defined in the compiler (not in scope here)
- if(forceProperType /*&& pickleformat < 2.9 */ && !(resTpeCls.endsWith("MethodType"))) { assert(!resTpeCls.contains("ClassInfoType"))
- NullaryMethodType(restpe) }
- else restpe
- }
- PolyType(typeParams, transitionNMT(restpe))
- }
- else
- NullaryMethodType(restpe)
- case EXISTENTIALtpe =>
- val restpe = readTypeRef()
- val tparams = until(end, readSymbolRef)
- // binary compatibility: in 2.9.x, Symbol doesn't have setFlag
- tparams foreach (x => x.asInstanceOf[{ def setFlag(mask: Long): this.type }] setFlag EXISTENTIAL)
- ExistentialType(tparams, restpe)
- case ANNOTATEDtpe =>
- var typeRef = readNat()
- val selfsym = if (isSymbolRef(typeRef)) {
- val s = at(typeRef, readSymbol)
- typeRef = readNat()
- s
- } else NoSymbol // selfsym can go.
- val tp = at(typeRef, () => readType(forceProperType)) // NMT_TRANSITION
- val annots = until(end, readAnnotationRef)
- if (selfsym == NoSymbol) AnnotatedType(annots, tp, selfsym)
- else tp
- case _ =>
- noSuchTypeTag(tag, end)
- }
- }
-
- def noSuchTypeTag(tag: Int, end: Int): Type =
- errorBadSignature("bad type tag: " + tag)
-
- /** Read a constant */
- protected def readConstant(): Constant = {
- val tag = readByte().toInt
- val len = readNat()
- (tag: @switch) match {
- case LITERALunit => Constant(())
- case LITERALboolean => Constant(readLong(len) != 0L)
- case LITERALbyte => Constant(readLong(len).toByte)
- case LITERALshort => Constant(readLong(len).toShort)
- case LITERALchar => Constant(readLong(len).toChar)
- case LITERALint => Constant(readLong(len).toInt)
- case LITERALlong => Constant(readLong(len))
- case LITERALfloat => Constant(intBitsToFloat(readLong(len).toInt))
- case LITERALdouble => Constant(longBitsToDouble(readLong(len)))
- case LITERALstring => Constant(readNameRef().toString())
- case LITERALnull => Constant(null)
- case LITERALclass => Constant(readTypeRef())
- case LITERALenum => Constant(readSymbolRef())
- case _ => noSuchConstantTag(tag, len)
- }
- }
-
- def noSuchConstantTag(tag: Int, len: Int): Constant =
- errorBadSignature("bad constant tag: " + tag)
-
- /** Read children and store them into the corresponding symbol.
- */
- protected def readChildren() {
- val tag = readByte()
- assert(tag == CHILDREN)
- val end = readNat() + readIndex
- val target = readSymbolRef()
- while (readIndex != end) target addChild readSymbolRef()
- }
-
- /** Read an annotation argument, which is pickled either
- * as a Constant or a Tree.
- */
- protected def readAnnotArg(i: Int): Tree = bytes(index(i)) match {
- case TREE => at(i, readTree)
- case _ =>
- val const = at(i, readConstant)
- Literal(const) setType const.tpe
- }
-
- /** Read a ClassfileAnnotArg (argument to a classfile annotation)
- */
- private def readArrayAnnot() = {
- readByte() // skip the `annotargarray` tag
- val end = readNat() + readIndex
- until(end, () => readClassfileAnnotArg(readNat())).toArray(classfileAnnotArgManifest)
- }
- protected def readClassfileAnnotArg(i: Int): ClassfileAnnotArg = bytes(index(i)) match {
- case ANNOTINFO => NestedAnnotArg(at(i, readAnnotation))
- case ANNOTARGARRAY => at(i, () => ArrayAnnotArg(readArrayAnnot()))
- case _ => LiteralAnnotArg(at(i, readConstant))
- }
-
- /** Read an AnnotationInfo. Not to be called directly, use
- * readAnnotation or readSymbolAnnotation
- */
- protected def readAnnotationInfo(end: Int): AnnotationInfo = {
- val atp = readTypeRef()
- val args = new ListBuffer[Tree]
- val assocs = new ListBuffer[(Name, ClassfileAnnotArg)]
- while (readIndex != end) {
- val argref = readNat()
- if (isNameEntry(argref)) {
- val name = at(argref, readName)
- val arg = readClassfileAnnotArg(readNat())
- assocs += ((name, arg))
- }
- else
- args += readAnnotArg(argref)
- }
- AnnotationInfo(atp, args.toList, assocs.toList)
- }
-
- /** Read an annotation and as a side effect store it into
- * the symbol it requests. Called at top-level, for all
- * (symbol, annotInfo) entries. */
- protected def readSymbolAnnotation() {
- val tag = readByte()
- if (tag != SYMANNOT)
- errorBadSignature("symbol annotation expected ("+ tag +")")
- val end = readNat() + readIndex
- val target = readSymbolRef()
- target.addAnnotation(readAnnotationInfo(end))
- }
-
- /** Read an annotation and return it. Used when unpickling
- * an ANNOTATED(WSELF)tpe or a NestedAnnotArg */
- protected def readAnnotation(): AnnotationInfo = {
- val tag = readByte()
- if (tag != ANNOTINFO)
- errorBadSignature("annotation expected (" + tag + ")")
- val end = readNat() + readIndex
- readAnnotationInfo(end)
- }
-
- /* Read an abstract syntax tree */
- protected def readTree(): Tree = {
- val outerTag = readByte()
- if (outerTag != TREE)
- errorBadSignature("tree expected (" + outerTag + ")")
- val end = readNat() + readIndex
- val tag = readByte()
- val tpe = if (tag == EMPTYtree) NoType else readTypeRef()
-
- // Set by the three functions to follow. If symbol is non-null
- // after the the new tree 't' has been created, t has its Symbol
- // set to symbol; and it always has its Type set to tpe.
- var symbol: Symbol = null
- var mods: Modifiers = null
- var name: Name = null
-
- /** Read a Symbol, Modifiers, and a Name */
- def setSymModsName() {
- symbol = readSymbolRef()
- mods = readModifiersRef()
- name = readNameRef()
- }
- /** Read a Symbol and a Name */
- def setSymName() {
- symbol = readSymbolRef()
- name = readNameRef()
- }
- /** Read a Symbol */
- def setSym() {
- symbol = readSymbolRef()
- }
-
- val t = tag match {
- case EMPTYtree =>
- EmptyTree
-
- case PACKAGEtree =>
- setSym()
- val pid = readTreeRef().asInstanceOf[RefTree]
- val stats = until(end, readTreeRef)
- PackageDef(pid, stats)
-
- case CLASStree =>
- setSymModsName()
- val impl = readTemplateRef()
- val tparams = until(end, readTypeDefRef)
- ClassDef(mods, mkTypeName(name), tparams, impl)
-
- case MODULEtree =>
- setSymModsName()
- ModuleDef(mods, name, readTemplateRef())
-
- case VALDEFtree =>
- setSymModsName()
- val tpt = readTreeRef()
- val rhs = readTreeRef()
- ValDef(mods, name, tpt, rhs)
-
- case DEFDEFtree =>
- setSymModsName()
- val tparams = times(readNat(), readTypeDefRef)
- val vparamss = times(readNat(), () => times(readNat(), readValDefRef))
- val tpt = readTreeRef()
- val rhs = readTreeRef()
- DefDef(mods, name, tparams, vparamss, tpt, rhs)
-
- case TYPEDEFtree =>
- setSymModsName()
- val rhs = readTreeRef()
- val tparams = until(end, readTypeDefRef)
- TypeDef(mods, mkTypeName(name), tparams, rhs)
-
- case LABELtree =>
- setSymName()
- val rhs = readTreeRef()
- val params = until(end, readIdentRef)
- LabelDef(name, params, rhs)
-
- case IMPORTtree =>
- setSym()
- val expr = readTreeRef()
- val selectors = until(end, () => {
- val from = readNameRef()
- val to = readNameRef()
- ImportSelector(from, -1, to, -1)
- })
-
- Import(expr, selectors)
-
- case TEMPLATEtree =>
- setSym()
- val parents = times(readNat(), readTreeRef)
- val self = readValDefRef()
- val body = until(end, readTreeRef)
-
- Template(parents, self, body)
-
- case BLOCKtree =>
- val expr = readTreeRef()
- val stats = until(end, readTreeRef)
- Block(stats, expr)
-
- case CASEtree =>
- val pat = readTreeRef()
- val guard = readTreeRef()
- val body = readTreeRef()
- CaseDef(pat, guard, body)
-
- case ALTERNATIVEtree =>
- Alternative(until(end, readTreeRef))
-
- case STARtree =>
- Star(readTreeRef())
-
- case BINDtree =>
- setSymName()
- Bind(name, readTreeRef())
-
- case UNAPPLYtree =>
- val fun = readTreeRef()
- val args = until(end, readTreeRef)
- UnApply(fun, args)
-
- case ARRAYVALUEtree =>
- val elemtpt = readTreeRef()
- val trees = until(end, readTreeRef)
- ArrayValue(elemtpt, trees)
-
- case FUNCTIONtree =>
- setSym()
- val body = readTreeRef()
- val vparams = until(end, readValDefRef)
- Function(vparams, body)
-
- case ASSIGNtree =>
- val lhs = readTreeRef()
- val rhs = readTreeRef()
- Assign(lhs, rhs)
-
- case IFtree =>
- val cond = readTreeRef()
- val thenp = readTreeRef()
- val elsep = readTreeRef()
- If(cond, thenp, elsep)
-
- case MATCHtree =>
- val selector = readTreeRef()
- val cases = until(end, readCaseDefRef)
- Match(selector, cases)
-
- case RETURNtree =>
- setSym()
- Return(readTreeRef())
-
- case TREtree =>
- val block = readTreeRef()
- val finalizer = readTreeRef()
- val catches = until(end, readCaseDefRef)
- Try(block, catches, finalizer)
-
- case THROWtree =>
- Throw(readTreeRef())
-
- case NEWtree =>
- New(readTreeRef())
-
- case TYPEDtree =>
- val expr = readTreeRef()
- val tpt = readTreeRef()
- Typed(expr, tpt)
-
- case TYPEAPPLYtree =>
- val fun = readTreeRef()
- val args = until(end, readTreeRef)
- TypeApply(fun, args)
-
- case APPLYtree =>
- val fun = readTreeRef()
- val args = until(end, readTreeRef)
- if (fun.symbol.isOverloaded) {
- fun.setType(fun.symbol.info)
- inferMethodAlternative(fun, args map (_.tpe), tpe)
- }
- Apply(fun, args)
-
- case APPLYDYNAMICtree =>
- setSym()
- val qual = readTreeRef()
- val args = until(end, readTreeRef)
- ApplyDynamic(qual, args)
-
- case SUPERtree =>
- setSym()
- val qual = readTreeRef()
- val mix = readTypeNameRef()
- Super(qual, mix)
-
- case THIStree =>
- setSym()
- This(readTypeNameRef())
-
- case SELECTtree =>
- setSym()
- val qualifier = readTreeRef()
- val selector = readNameRef()
- Select(qualifier, selector)
-
- case IDENTtree =>
- setSymName()
- Ident(name)
-
- case LITERALtree =>
- Literal(readConstantRef())
-
- case TYPEtree =>
- TypeTree()
-
- case ANNOTATEDtree =>
- val annot = readTreeRef()
- val arg = readTreeRef()
- Annotated(annot, arg)
-
- case SINGLETONTYPEtree =>
- SingletonTypeTree(readTreeRef())
-
- case SELECTFROMTYPEtree =>
- val qualifier = readTreeRef()
- val selector = readTypeNameRef()
- SelectFromTypeTree(qualifier, selector)
-
- case COMPOUNDTYPEtree =>
- CompoundTypeTree(readTemplateRef())
-
- case APPLIEDTYPEtree =>
- val tpt = readTreeRef()
- val args = until(end, readTreeRef)
- AppliedTypeTree(tpt, args)
-
- case TYPEBOUNDStree =>
- val lo = readTreeRef()
- val hi = readTreeRef()
- TypeBoundsTree(lo, hi)
-
- case EXISTENTIALTYPEtree =>
- val tpt = readTreeRef()
- val whereClauses = until(end, readTreeRef)
- ExistentialTypeTree(tpt, whereClauses)
-
- case _ =>
- noSuchTreeTag(tag, end)
- }
-
- if (symbol == null) t setType tpe
- else t setSymbol symbol setType tpe
- }
-
- def noSuchTreeTag(tag: Int, end: Int) =
- errorBadSignature("unknown tree type (" + tag + ")")
-
- def readModifiers(): Modifiers = {
- val tag = readNat()
- if (tag != MODIFIERS)
- errorBadSignature("expected a modifiers tag (" + tag + ")")
- val end = readNat() + readIndex
- val pflagsHi = readNat()
- val pflagsLo = readNat()
- val pflags = (pflagsHi.toLong << 32) + pflagsLo
- val flags = pickledToRawFlags(pflags)
- val privateWithin = readNameRef()
- Modifiers(flags, privateWithin, Nil, Map.empty)
- }
-
- /* Read a reference to a pickled item */
- protected def readNameRef(): Name = at(readNat(), readName)
- protected def readSymbolRef(): Symbol = at(readNat(), readSymbol)
- protected def readTypeRef(): Type = at(readNat(), () => readType()) // after the NMT_TRANSITION period, we can leave off the () => ... ()
- protected def readConstantRef(): Constant = at(readNat(), readConstant)
- protected def readAnnotationRef(): AnnotationInfo = at(readNat(), readAnnotation)
- protected def readModifiersRef(): Modifiers = at(readNat(), readModifiers)
- protected def readTreeRef(): Tree = at(readNat(), readTree)
-
- protected def readTypeNameRef(): TypeName = mkTypeName(readNameRef())
- protected def readTermNameRef(): TermName = mkTermName(readNameRef())
-
- protected def readTemplateRef(): Template =
- readTreeRef() match {
- case templ:Template => templ
- case other =>
- errorBadSignature("expected a template (" + other + ")")
- }
- protected def readCaseDefRef(): CaseDef =
- readTreeRef() match {
- case tree:CaseDef => tree
- case other =>
- errorBadSignature("expected a case def (" + other + ")")
- }
- protected def readValDefRef(): ValDef =
- readTreeRef() match {
- case tree:ValDef => tree
- case other =>
- errorBadSignature("expected a ValDef (" + other + ")")
- }
- protected def readIdentRef(): Ident =
- readTreeRef() match {
- case tree:Ident => tree
- case other =>
- errorBadSignature("expected an Ident (" + other + ")")
- }
- protected def readTypeDefRef(): TypeDef =
- readTreeRef() match {
- case tree:TypeDef => tree
- case other =>
- errorBadSignature("expected an TypeDef (" + other + ")")
- }
-
- protected def errorBadSignature(msg: String) =
- throw new RuntimeException("malformed Scala signature of " + classRoot.name + " at " + readIndex + "; " + msg)
-
- protected def errorMissingRequirement(msg: String): Nothing =
- if (debug) errorBadSignature(msg)
- else throw new IOException("class file needed by "+classRoot.name+" is missing.\n"+msg)
-
- protected def errorMissingRequirement(name: Name, owner: Symbol): Nothing =
- errorMissingRequirement("reference " + NameTransformer.decode(name.toString) + " of " + owner.tpe + " refers to nonexisting symbol.")
-
- /** pre: `fun` points to a symbol with an overloaded type.
- * Selects the overloaded alternative of `fun` which best matches given
- * argument types `argtpes` and result type `restpe`. Stores this alternative as
- * the symbol of `fun`.
- */
- def inferMethodAlternative(fun: Tree, argtpes: List[Type], restpe: Type)
-
- /** Create a lazy type which when completed returns type at index `i`. */
- def newLazyTypeRef(i: Int): LazyType
-
- /** Create a lazy type which when completed returns type at index `i` and sets alias
- * of completed symbol to symbol at index `j`
- */
- def newLazyTypeRefAndAlias(i: Int, j: Int): LazyType
- }
-}
diff --git a/src/library/scala/reflect/generic/Universe.scala b/src/library/scala/reflect/generic/Universe.scala
deleted file mode 100644
index 4bc7004..0000000
--- a/src/library/scala/reflect/generic/Universe.scala
+++ /dev/null
@@ -1,17 +0,0 @@
-package scala.reflect
-package generic
-
- at deprecated("scala.reflect.generic will be removed", "2.9.1")
-abstract class Universe extends Symbols
- with Types
- with Constants
- with Scopes
- with Names
- with StdNames
- with Trees
- with AnnotationInfos
- with StandardDefinitions {
- type Position
- val NoPosition: Position
-}
-
diff --git a/src/library/scala/reflect/macros/internal/macroImpl.scala b/src/library/scala/reflect/macros/internal/macroImpl.scala
new file mode 100644
index 0000000..b281fb7
--- /dev/null
+++ b/src/library/scala/reflect/macros/internal/macroImpl.scala
@@ -0,0 +1,18 @@
+package scala.reflect.macros
+package internal
+
+/** Links macro definitions with their implementation.
+ * This is necessary to preserve macro def -> macro impl links between compilation runs.
+ *
+ * More precisely, after typechecking right-hand side of a macro def
+ * `typedMacroBody` slaps `macroImpl` annotation onto the macro def
+ * with the result of typechecking as a sole parameter.
+ *
+ * As an unfortunate consequence, this annotation must be defined in scala-library.jar,
+ * because anyone (even those programmers who compile their programs with only scala-library on classpath)
+ * must be able to define macros.
+ *
+ * To lessen the weirdness we define this annotation as `private[scala]`.
+ * It will not prevent pickling, but it will prevent application developers (and scaladocs) from seeing the annotation.
+ */
+private[scala] class macroImpl(val referenceToMacroImpl: Any) extends scala.annotation.StaticAnnotation
diff --git a/src/library/scala/reflect/package.scala b/src/library/scala/reflect/package.scala
new file mode 100644
index 0000000..10e6d7d
--- /dev/null
+++ b/src/library/scala/reflect/package.scala
@@ -0,0 +1,68 @@
+package scala
+
+package object reflect {
+
+ // in the new scheme of things ClassManifests are aliased to ClassTags
+ // this is done because we want `toArray` in collections work with ClassTags
+ // but changing it to use the ClassTag context bound without aliasing ClassManifest
+ // will break everyone who subclasses and overrides `toArray`
+ // luckily for us, aliasing doesn't hamper backward compatibility, so it's ideal in this situation
+ // I wish we could do the same for Manifests and TypeTags though
+
+ // note, by the way, that we don't touch ClassManifest the object
+ // because its Byte, Short and so on factory fields are incompatible with ClassTag's
+
+ /** A `ClassManifest[T]` is an opaque descriptor for type `T`.
+ * It is used by the compiler to preserve information necessary
+ * for instantiating `Arrays` in those cases where the element type
+ * is unknown at compile time.
+ *
+ * The type-relation operators make an effort to present a more accurate
+ * picture than can be realized with erased types, but they should not be
+ * relied upon to give correct answers. In particular they are likely to
+ * be wrong when variance is involved or when a subtype has a different
+ * number of type arguments than a supertype.
+ */
+ @deprecated("Use scala.reflect.ClassTag instead", "2.10.0")
+ @annotation.implicitNotFound(msg = "No ClassManifest available for ${T}.")
+ type ClassManifest[T] = scala.reflect.ClassTag[T]
+
+ /** The object `ClassManifest` defines factory methods for manifests.
+ * It is intended for use by the compiler and should not be used in client code.
+ */
+ @deprecated("Use scala.reflect.ClassTag instead", "2.10.0")
+ val ClassManifest = ClassManifestFactory
+
+ /** The object `Manifest` defines factory methods for manifests.
+ * It is intended for use by the compiler and should not be used in client code.
+ */
+ // TODO undeprecated until Scala reflection becomes non-experimental
+ // @deprecated("Use scala.reflect.ClassTag (to capture erasures), scala.reflect.runtime.universe.TypeTag (to capture types) or both instead", "2.10.0")
+ val Manifest = ManifestFactory
+
+ def classTag[T](implicit ctag: ClassTag[T]) = ctag
+
+ // anchor for the class tag materialization macro emitted during tag materialization in Implicits.scala
+ // implementation is hardwired into `scala.reflect.reify.Taggers`
+ // using the mechanism implemented in `scala.tools.reflect.FastTrack`
+ // todo. once we have implicit macros for tag generation, we can remove this anchor
+ private[scala] def materializeClassTag[T](): ClassTag[T] = ??? // macro
+
+ @deprecated("Use `@scala.beans.BeanDescription` instead", "2.10.0")
+ type BeanDescription = scala.beans.BeanDescription
+ @deprecated("Use `@scala.beans.BeanDisplayName` instead", "2.10.0")
+ type BeanDisplayName = scala.beans.BeanDisplayName
+ @deprecated("Use `@scala.beans.BeanInfo` instead", "2.10.0")
+ type BeanInfo = scala.beans.BeanInfo
+ @deprecated("Use `@scala.beans.BeanInfoSkip` instead", "2.10.0")
+ type BeanInfoSkip = scala.beans.BeanInfoSkip
+ @deprecated("Use `@scala.beans.BeanProperty` instead", "2.10.0")
+ type BeanProperty = scala.beans.BeanProperty
+ @deprecated("Use `@scala.beans.BooleanBeanProperty` instead", "2.10.0")
+ type BooleanBeanProperty = scala.beans.BooleanBeanProperty
+ @deprecated("Use `@scala.beans.ScalaBeanInfo` instead", "2.10.0")
+ type ScalaBeanInfo = scala.beans.ScalaBeanInfo
+}
+
+/** An exception that indicates an error during Scala reflection */
+case class ScalaReflectionException(msg: String) extends Exception(msg)
diff --git a/src/library/scala/remote.scala b/src/library/scala/remote.scala
index 0180510..4b16651 100644
--- a/src/library/scala/remote.scala
+++ b/src/library/scala/remote.scala
@@ -1,16 +1,27 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-
-
package scala
/**
* An annotation that designates the class to which it is applied as remotable.
+ *
+ * For instance, the Scala code
+ * {{{
+ * @remote trait Hello {
+ * def sayHello(): String
+ * }
+ * }}}
+ * is equivalent to the following Java code:
+ * {{{
+ * public interface Hello extends java.rmi.Remote {
+ * String sayHello() throws java.rmi.RemoteException;
+ * }
+ * }}}
*/
-class remote extends annotation.StaticAnnotation {}
+class remote extends scala.annotation.StaticAnnotation {}
diff --git a/src/library/scala/runtime/AbstractFunction0.scala b/src/library/scala/runtime/AbstractFunction0.scala
index c4ce0eb..1b351c6 100644
--- a/src/library/scala/runtime/AbstractFunction0.scala
+++ b/src/library/scala/runtime/AbstractFunction0.scala
@@ -9,6 +9,6 @@
package scala.runtime
-abstract class AbstractFunction0[@specialized +R] extends Function0[R] {
+abstract class AbstractFunction0[@specialized(Specializable.Primitives) +R] extends Function0[R] {
}
diff --git a/src/library/scala/runtime/AbstractFunction1.scala b/src/library/scala/runtime/AbstractFunction1.scala
index a9e5e90..a68a82e 100644
--- a/src/library/scala/runtime/AbstractFunction1.scala
+++ b/src/library/scala/runtime/AbstractFunction1.scala
@@ -9,6 +9,6 @@
package scala.runtime
-abstract class AbstractFunction1[@specialized(scala.Int, scala.Long, scala.Float, scala.Double) -T1, @specialized(scala.Unit, scala.Boolean, scala.Int, scala.Float, scala.Long, scala.Double) +R] extends Function1[T1, R] {
+abstract class AbstractFunction1[@specialized(scala.Int, scala.Long, scala.Float, scala.Double/*, scala.AnyRef*/) -T1, @specialized(scala.Unit, scala.Boolean, scala.Int, scala.Float, scala.Long, scala.Double/*, scala.AnyRef*/) +R] extends Function1[T1, R] {
}
diff --git a/src/library/scala/runtime/AbstractPartialFunction.scala b/src/library/scala/runtime/AbstractPartialFunction.scala
new file mode 100644
index 0000000..57f8e26
--- /dev/null
+++ b/src/library/scala/runtime/AbstractPartialFunction.scala
@@ -0,0 +1,34 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2013, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+package scala.runtime
+
+/** `AbstractPartialFunction` reformulates all operations of its supertrait `PartialFunction`
+ * in terms of `isDefinedAt` and `applyOrElse`.
+ *
+ * This allows more efficient implementations in many cases:
+ * - optimized `orElse` method supports chained `orElse` in linear time,
+ * and with no slow-down if the `orElse` part is not needed.
+ * - optimized `lift` method helps to avoid double evaluation of pattern matchers & guards
+ * of partial function literals.
+ *
+ * This trait is used as a basis for implementation of all partial function literals.
+ *
+ * @author Pavel Pavlov
+ * @since 2.10
+ */
+abstract class AbstractPartialFunction[@specialized(scala.Int, scala.Long, scala.Float, scala.Double, scala.AnyRef) -T1, @specialized(scala.Unit, scala.Boolean, scala.Int, scala.Float, scala.Long, scala.Double, scala.AnyRef) +R] extends Function1[T1, R] with PartialFunction[T1, R] { self =>
+ // this method must be overridden for better performance,
+ // for backwards compatibility, fall back to the one inherited from PartialFunction
+ // this assumes the old-school partial functions override the apply method, though
+ // override def applyOrElse[A1 <: T1, B1 >: R](x: A1, default: A1 => B1): B1 = ???
+
+ // probably okay to make final since classes compiled before have overridden against the old version of AbstractPartialFunction
+ // let's not make it final so as not to confuse anyone
+ /*final*/ def apply(x: T1): R = applyOrElse(x, PartialFunction.empty)
+}
diff --git a/src/library/scala/runtime/AnyValCompanion.scala b/src/library/scala/runtime/AnyValCompanion.scala
deleted file mode 100644
index fdb0e8f..0000000
--- a/src/library/scala/runtime/AnyValCompanion.scala
+++ /dev/null
@@ -1,14 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-package scala.runtime
-
-/** See scala.AnyValCompanion.
- */
- at deprecated("Use scala.AnyValCompanion instead", "2.8.0")
-private[scala] trait AnyValCompanion extends scala.AnyValCompanion { }
\ No newline at end of file
diff --git a/src/library/scala/runtime/ArrayRuntime.java b/src/library/scala/runtime/ArrayRuntime.java
index b382fdf..1a0f748 100644
--- a/src/library/scala/runtime/ArrayRuntime.java
+++ b/src/library/scala/runtime/ArrayRuntime.java
@@ -1,3 +1,13 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+
+
package scala.runtime;
/**
diff --git a/src/library/scala/runtime/BooleanRef.java b/src/library/scala/runtime/BooleanRef.java
index 3ed6aac..889db31 100644
--- a/src/library/scala/runtime/BooleanRef.java
+++ b/src/library/scala/runtime/BooleanRef.java
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/library/scala/runtime/Boxed.scala b/src/library/scala/runtime/Boxed.scala
index 4570606..8b53107 100644
--- a/src/library/scala/runtime/Boxed.scala
+++ b/src/library/scala/runtime/Boxed.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/library/scala/runtime/BoxedUnit.java b/src/library/scala/runtime/BoxedUnit.java
index f65284e..f436b7c 100644
--- a/src/library/scala/runtime/BoxedUnit.java
+++ b/src/library/scala/runtime/BoxedUnit.java
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -17,6 +17,8 @@ public final class BoxedUnit implements java.io.Serializable {
public final static BoxedUnit UNIT = new BoxedUnit();
public final static Class<Void> TYPE = java.lang.Void.TYPE;
+
+ private Object readResolve() { return UNIT; }
private BoxedUnit() { }
diff --git a/src/library/scala/runtime/BoxesRunTime.java b/src/library/scala/runtime/BoxesRunTime.java
index c726c56..3504c57 100644
--- a/src/library/scala/runtime/BoxesRunTime.java
+++ b/src/library/scala/runtime/BoxesRunTime.java
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2006-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2006-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -31,14 +31,16 @@ public final class BoxesRunTime
{
private static final int CHAR = 0, BYTE = 1, SHORT = 2, INT = 3, LONG = 4, FLOAT = 5, DOUBLE = 6, OTHER = 7;
+ /** We don't need to return BYTE and SHORT, as everything which might
+ * care widens to INT.
+ */
private static int typeCode(Object a) {
if (a instanceof java.lang.Integer) return INT;
- if (a instanceof java.lang.Byte) return BYTE;
- if (a instanceof java.lang.Character) return CHAR;
- if (a instanceof java.lang.Long) return LONG;
if (a instanceof java.lang.Double) return DOUBLE;
- if (a instanceof java.lang.Short) return SHORT;
+ if (a instanceof java.lang.Long) return LONG;
+ if (a instanceof java.lang.Character) return CHAR;
if (a instanceof java.lang.Float) return FLOAT;
+ if ((a instanceof java.lang.Byte) || (a instanceof java.lang.Short)) return INT;
return OTHER;
}
@@ -119,15 +121,6 @@ public final class BoxesRunTime
/* COMPARISON ... COMPARISON ... COMPARISON ... COMPARISON ... COMPARISON ... COMPARISON */
- private static int eqTypeCode(Number a) {
- if ((a instanceof java.lang.Integer) || (a instanceof java.lang.Byte)) return INT;
- if (a instanceof java.lang.Long) return LONG;
- if (a instanceof java.lang.Double) return DOUBLE;
- if (a instanceof java.lang.Short) return INT;
- if (a instanceof java.lang.Float) return FLOAT;
- return OTHER;
- }
-
public static boolean equals(Object x, Object y) {
if (x == y) return true;
return equals2(x, y);
@@ -159,8 +152,8 @@ public final class BoxesRunTime
}
public static boolean equalsNumNum(java.lang.Number xn, java.lang.Number yn) {
- int xcode = eqTypeCode(xn);
- int ycode = eqTypeCode(yn);
+ int xcode = typeCode(xn);
+ int ycode = typeCode(yn);
switch (ycode > xcode ? ycode : xcode) {
case INT:
return xn.intValue() == yn.intValue();
@@ -192,8 +185,11 @@ public final class BoxesRunTime
}
private static boolean equalsNumChar(java.lang.Number xn, java.lang.Character yc) {
+ if (yc == null)
+ return xn == null;
+
char ch = yc.charValue();
- switch (eqTypeCode(xn)) {
+ switch (typeCode(xn)) {
case INT:
return xn.intValue() == ch;
case LONG:
@@ -203,9 +199,6 @@ public final class BoxesRunTime
case DOUBLE:
return xn.doubleValue() == ch;
default:
- if (xn == null)
- return yc == null;
-
return xn.equals(yc);
}
}
@@ -235,7 +228,7 @@ public final class BoxesRunTime
* as yet have not.
*
* Note: Among primitives, Float.NaN != Float.NaN, but the boxed
- * verisons are equal. This still needs reconciliation.
+ * versions are equal. This still needs reconciliation.
*/
public static int hashFromLong(java.lang.Long n) {
int iv = n.intValue();
@@ -249,6 +242,9 @@ public final class BoxesRunTime
long lv = n.longValue();
if (lv == dv) return java.lang.Long.valueOf(lv).hashCode();
+
+ float fv = n.floatValue();
+ if (fv == dv) return java.lang.Float.valueOf(fv).hashCode();
else return n.hashCode();
}
public static int hashFromFloat(java.lang.Float n) {
@@ -271,6 +267,31 @@ public final class BoxesRunTime
else return a.hashCode();
}
+ private static int unboxCharOrInt(Object arg1, int code) {
+ if (code == CHAR)
+ return ((java.lang.Character) arg1).charValue();
+ else
+ return ((java.lang.Number) arg1).intValue();
+ }
+ private static long unboxCharOrLong(Object arg1, int code) {
+ if (code == CHAR)
+ return ((java.lang.Character) arg1).charValue();
+ else
+ return ((java.lang.Number) arg1).longValue();
+ }
+ private static float unboxCharOrFloat(Object arg1, int code) {
+ if (code == CHAR)
+ return ((java.lang.Character) arg1).charValue();
+ else
+ return ((java.lang.Number) arg1).floatValue();
+ }
+ private static double unboxCharOrDouble(Object arg1, int code) {
+ if (code == CHAR)
+ return ((java.lang.Character) arg1).charValue();
+ else
+ return ((java.lang.Number) arg1).doubleValue();
+ }
+
/* OPERATORS ... OPERATORS ... OPERATORS ... OPERATORS ... OPERATORS ... OPERATORS ... OPERATORS ... OPERATORS */
/** arg1 + arg2 */
@@ -279,24 +300,16 @@ public final class BoxesRunTime
int code2 = typeCode(arg2);
int maxcode = (code1 < code2) ? code2 : code1;
if (maxcode <= INT) {
- int val1 = (code1 == CHAR) ? ((java.lang.Character) arg1).charValue() : ((java.lang.Number) arg1).intValue();
- int val2 = (code2 == CHAR) ? ((java.lang.Character) arg2).charValue() : ((java.lang.Number) arg2).intValue();
- return boxToInteger(val1 + val2);
+ return boxToInteger(unboxCharOrInt(arg1, code1) + unboxCharOrInt(arg2, code2));
}
if (maxcode <= LONG) {
- long val1 = (code1 == CHAR) ? ((java.lang.Character) arg1).charValue() : ((java.lang.Number) arg1).longValue();
- long val2 = (code2 == CHAR) ? ((java.lang.Character) arg2).charValue() : ((java.lang.Number) arg2).longValue();
- return boxToLong(val1 + val2);
+ return boxToLong(unboxCharOrLong(arg1, code1) + unboxCharOrLong(arg2, code2));
}
if (maxcode <= FLOAT) {
- float val1 = (code1 == CHAR) ? ((java.lang.Character) arg1).charValue() : ((java.lang.Number) arg1).floatValue();
- float val2 = (code2 == CHAR) ? ((java.lang.Character) arg2).charValue() : ((java.lang.Number) arg2).floatValue();
- return boxToFloat(val1 + val2);
+ return boxToFloat(unboxCharOrFloat(arg1, code1) + unboxCharOrFloat(arg2, code2));
}
if (maxcode <= DOUBLE) {
- double val1 = (code1 == CHAR) ? ((java.lang.Character) arg1).charValue() : ((java.lang.Number) arg1).doubleValue();
- double val2 = (code2 == CHAR) ? ((java.lang.Character) arg2).charValue() : ((java.lang.Number) arg2).doubleValue();
- return boxToDouble(val1 + val2);
+ return boxToDouble(unboxCharOrDouble(arg1, code1) + unboxCharOrDouble(arg2, code2));
}
throw new NoSuchMethodException();
}
@@ -307,24 +320,16 @@ public final class BoxesRunTime
int code2 = typeCode(arg2);
int maxcode = (code1 < code2) ? code2 : code1;
if (maxcode <= INT) {
- int val1 = (code1 == CHAR) ? ((java.lang.Character) arg1).charValue() : ((java.lang.Number) arg1).intValue();
- int val2 = (code2 == CHAR) ? ((java.lang.Character) arg2).charValue() : ((java.lang.Number) arg2).intValue();
- return boxToInteger(val1 - val2);
+ return boxToInteger(unboxCharOrInt(arg1, code1) - unboxCharOrInt(arg2, code2));
}
if (maxcode <= LONG) {
- long val1 = (code1 == CHAR) ? ((java.lang.Character) arg1).charValue() : ((java.lang.Number) arg1).longValue();
- long val2 = (code2 == CHAR) ? ((java.lang.Character) arg2).charValue() : ((java.lang.Number) arg2).longValue();
- return boxToLong(val1 - val2);
+ return boxToLong(unboxCharOrLong(arg1, code1) - unboxCharOrLong(arg2, code2));
}
if (maxcode <= FLOAT) {
- float val1 = (code1 == CHAR) ? ((java.lang.Character) arg1).charValue() : ((java.lang.Number) arg1).floatValue();
- float val2 = (code2 == CHAR) ? ((java.lang.Character) arg2).charValue() : ((java.lang.Number) arg2).floatValue();
- return boxToFloat(val1 - val2);
+ return boxToFloat(unboxCharOrFloat(arg1, code1) - unboxCharOrFloat(arg2, code2));
}
if (maxcode <= DOUBLE) {
- double val1 = (code1 == CHAR) ? ((java.lang.Character) arg1).charValue() : ((java.lang.Number) arg1).doubleValue();
- double val2 = (code2 == CHAR) ? ((java.lang.Character) arg2).charValue() : ((java.lang.Number) arg2).doubleValue();
- return boxToDouble(val1 - val2);
+ return boxToDouble(unboxCharOrDouble(arg1, code1) - unboxCharOrDouble(arg2, code2));
}
throw new NoSuchMethodException();
}
@@ -335,24 +340,16 @@ public final class BoxesRunTime
int code2 = typeCode(arg2);
int maxcode = (code1 < code2) ? code2 : code1;
if (maxcode <= INT) {
- int val1 = (code1 == CHAR) ? ((java.lang.Character) arg1).charValue() : ((java.lang.Number) arg1).intValue();
- int val2 = (code2 == CHAR) ? ((java.lang.Character) arg2).charValue() : ((java.lang.Number) arg2).intValue();
- return boxToInteger(val1 * val2);
+ return boxToInteger(unboxCharOrInt(arg1, code1) * unboxCharOrInt(arg2, code2));
}
if (maxcode <= LONG) {
- long val1 = (code1 == CHAR) ? ((java.lang.Character) arg1).charValue() : ((java.lang.Number) arg1).longValue();
- long val2 = (code2 == CHAR) ? ((java.lang.Character) arg2).charValue() : ((java.lang.Number) arg2).longValue();
- return boxToLong(val1 * val2);
+ return boxToLong(unboxCharOrLong(arg1, code1) * unboxCharOrLong(arg2, code2));
}
if (maxcode <= FLOAT) {
- float val1 = (code1 == CHAR) ? ((java.lang.Character) arg1).charValue() : ((java.lang.Number) arg1).floatValue();
- float val2 = (code2 == CHAR) ? ((java.lang.Character) arg2).charValue() : ((java.lang.Number) arg2).floatValue();
- return boxToFloat(val1 * val2);
+ return boxToFloat(unboxCharOrFloat(arg1, code1) * unboxCharOrFloat(arg2, code2));
}
if (maxcode <= DOUBLE) {
- double val1 = (code1 == CHAR) ? ((java.lang.Character) arg1).charValue() : ((java.lang.Number) arg1).doubleValue();
- double val2 = (code2 == CHAR) ? ((java.lang.Character) arg2).charValue() : ((java.lang.Number) arg2).doubleValue();
- return boxToDouble(val1 * val2);
+ return boxToDouble(unboxCharOrDouble(arg1, code1) * unboxCharOrDouble(arg2, code2));
}
throw new NoSuchMethodException();
}
@@ -362,26 +359,16 @@ public final class BoxesRunTime
int code1 = typeCode(arg1);
int code2 = typeCode(arg2);
int maxcode = (code1 < code2) ? code2 : code1;
- if (maxcode <= INT) {
- int val1 = (code1 == CHAR) ? ((java.lang.Character) arg1).charValue() : ((java.lang.Number) arg1).intValue();
- int val2 = (code2 == CHAR) ? ((java.lang.Character) arg2).charValue() : ((java.lang.Number) arg2).intValue();
- return boxToInteger(val1 / val2);
- }
- if (maxcode <= LONG) {
- long val1 = (code1 == CHAR) ? ((java.lang.Character) arg1).charValue() : ((java.lang.Number) arg1).longValue();
- long val2 = (code2 == CHAR) ? ((java.lang.Character) arg2).charValue() : ((java.lang.Number) arg2).longValue();
- return boxToLong(val1 / val2);
- }
- if (maxcode <= FLOAT) {
- float val1 = (code1 == CHAR) ? ((java.lang.Character) arg1).charValue() : ((java.lang.Number) arg1).floatValue();
- float val2 = (code2 == CHAR) ? ((java.lang.Character) arg2).charValue() : ((java.lang.Number) arg2).floatValue();
- return boxToFloat(val1 / val2);
- }
- if (maxcode <= DOUBLE) {
- double val1 = (code1 == CHAR) ? ((java.lang.Character) arg1).charValue() : ((java.lang.Number) arg1).doubleValue();
- double val2 = (code2 == CHAR) ? ((java.lang.Character) arg2).charValue() : ((java.lang.Number) arg2).doubleValue();
- return boxToDouble(val1 / val2);
- }
+
+ if (maxcode <= INT)
+ return boxToInteger(unboxCharOrInt(arg1, code1) / unboxCharOrInt(arg2, code2));
+ if (maxcode <= LONG)
+ return boxToLong(unboxCharOrLong(arg1, code1) / unboxCharOrLong(arg2, code2));
+ if (maxcode <= FLOAT)
+ return boxToFloat(unboxCharOrFloat(arg1, code1) / unboxCharOrFloat(arg2, code2));
+ if (maxcode <= DOUBLE)
+ return boxToDouble(unboxCharOrDouble(arg1, code1) / unboxCharOrDouble(arg2, code2));
+
throw new NoSuchMethodException();
}
@@ -390,26 +377,16 @@ public final class BoxesRunTime
int code1 = typeCode(arg1);
int code2 = typeCode(arg2);
int maxcode = (code1 < code2) ? code2 : code1;
- if (maxcode <= INT) {
- int val1 = (code1 == CHAR) ? ((java.lang.Character) arg1).charValue() : ((java.lang.Number) arg1).intValue();
- int val2 = (code2 == CHAR) ? ((java.lang.Character) arg2).charValue() : ((java.lang.Number) arg2).intValue();
- return boxToInteger(val1 % val2);
- }
- if (maxcode <= LONG) {
- long val1 = (code1 == CHAR) ? ((java.lang.Character) arg1).charValue() : ((java.lang.Number) arg1).longValue();
- long val2 = (code2 == CHAR) ? ((java.lang.Character) arg2).charValue() : ((java.lang.Number) arg2).longValue();
- return boxToLong(val1 % val2);
- }
- if (maxcode <= FLOAT) {
- float val1 = (code1 == CHAR) ? ((java.lang.Character) arg1).charValue() : ((java.lang.Number) arg1).floatValue();
- float val2 = (code2 == CHAR) ? ((java.lang.Character) arg2).charValue() : ((java.lang.Number) arg2).floatValue();
- return boxToFloat(val1 % val2);
- }
- if (maxcode <= DOUBLE) {
- double val1 = (code1 == CHAR) ? ((java.lang.Character) arg1).charValue() : ((java.lang.Number) arg1).doubleValue();
- double val2 = (code2 == CHAR) ? ((java.lang.Character) arg2).charValue() : ((java.lang.Number) arg2).doubleValue();
- return boxToDouble(val1 % val2);
- }
+
+ if (maxcode <= INT)
+ return boxToInteger(unboxCharOrInt(arg1, code1) % unboxCharOrInt(arg2, code2));
+ if (maxcode <= LONG)
+ return boxToLong(unboxCharOrLong(arg1, code1) % unboxCharOrLong(arg2, code2));
+ if (maxcode <= FLOAT)
+ return boxToFloat(unboxCharOrFloat(arg1, code1) % unboxCharOrFloat(arg2, code2));
+ if (maxcode <= DOUBLE)
+ return boxToDouble(unboxCharOrDouble(arg1, code1) % unboxCharOrDouble(arg2, code2));
+
throw new NoSuchMethodException();
}
@@ -418,24 +395,24 @@ public final class BoxesRunTime
int code1 = typeCode(arg1);
int code2 = typeCode(arg2);
if (code1 <= INT) {
- int val1 = (code1 == CHAR) ? ((java.lang.Character) arg1).charValue() : ((java.lang.Number) arg1).intValue();
+ int val1 = unboxCharOrInt(arg1, code1);
if (code2 <= INT) {
- int val2 = (code2 == CHAR) ? ((java.lang.Character) arg2).charValue() : ((java.lang.Number) arg2).intValue();
+ int val2 = unboxCharOrInt(arg2, code2);
return boxToInteger(val1 >> val2);
}
if (code2 <= LONG) {
- long val2 = (code2 == CHAR) ? ((java.lang.Character) arg2).charValue() : ((java.lang.Number) arg2).longValue();
+ long val2 = unboxCharOrLong(arg2, code2);
return boxToInteger(val1 >> val2);
}
}
if (code1 <= LONG) {
- long val1 = (code1 == CHAR) ? ((java.lang.Character) arg1).charValue() : ((java.lang.Number) arg1).longValue();
+ long val1 = unboxCharOrLong(arg1, code1);
if (code2 <= INT) {
- int val2 = (code2 == CHAR) ? ((java.lang.Character) arg2).charValue() : ((java.lang.Number) arg2).intValue();
+ int val2 = unboxCharOrInt(arg2, code2);
return boxToLong(val1 >> val2);
}
if (code2 <= LONG) {
- long val2 = (code2 == CHAR) ? ((java.lang.Character) arg2).charValue() : ((java.lang.Number) arg2).longValue();
+ long val2 = unboxCharOrLong(arg2, code2);
return boxToLong(val1 >> val2);
}
}
@@ -447,24 +424,24 @@ public final class BoxesRunTime
int code1 = typeCode(arg1);
int code2 = typeCode(arg2);
if (code1 <= INT) {
- int val1 = (code1 == CHAR) ? ((java.lang.Character) arg1).charValue() : ((java.lang.Number) arg1).intValue();
+ int val1 = unboxCharOrInt(arg1, code1);
if (code2 <= INT) {
- int val2 = (code2 == CHAR) ? ((java.lang.Character) arg2).charValue() : ((java.lang.Number) arg2).intValue();
+ int val2 = unboxCharOrInt(arg2, code2);
return boxToInteger(val1 << val2);
}
if (code2 <= LONG) {
- long val2 = (code2 == CHAR) ? ((java.lang.Character) arg2).charValue() : ((java.lang.Number) arg2).longValue();
+ long val2 = unboxCharOrLong(arg2, code2);
return boxToInteger(val1 << val2);
}
}
if (code1 <= LONG) {
- long val1 = (code1 == CHAR) ? ((java.lang.Character) arg1).charValue() : ((java.lang.Number) arg1).longValue();
+ long val1 = unboxCharOrLong(arg1, code1);
if (code2 <= INT) {
- int val2 = (code2 == CHAR) ? ((java.lang.Character) arg2).charValue() : ((java.lang.Number) arg2).intValue();
+ int val2 = unboxCharOrInt(arg2, code2);
return boxToLong(val1 << val2);
}
if (code2 <= LONG) {
- long val2 = (code2 == CHAR) ? ((java.lang.Character) arg2).charValue() : ((java.lang.Number) arg2).longValue();
+ long val2 = unboxCharOrLong(arg2, code2);
return boxToLong(val1 << val2);
}
}
@@ -476,24 +453,24 @@ public final class BoxesRunTime
int code1 = typeCode(arg1);
int code2 = typeCode(arg2);
if (code1 <= INT) {
- int val1 = (code1 == CHAR) ? ((java.lang.Character) arg1).charValue() : ((java.lang.Number) arg1).intValue();
+ int val1 = unboxCharOrInt(arg1, code1);
if (code2 <= INT) {
- int val2 = (code2 == CHAR) ? ((java.lang.Character) arg2).charValue() : ((java.lang.Number) arg2).intValue();
+ int val2 = unboxCharOrInt(arg2, code2);
return boxToInteger(val1 >>> val2);
}
if (code2 <= LONG) {
- long val2 = (code2 == CHAR) ? ((java.lang.Character) arg2).charValue() : ((java.lang.Number) arg2).longValue();
+ long val2 = unboxCharOrLong(arg2, code2);
return boxToInteger(val1 >>> val2);
}
}
if (code1 <= LONG) {
- long val1 = (code1 == CHAR) ? ((java.lang.Character) arg1).charValue() : ((java.lang.Number) arg1).longValue();
+ long val1 = unboxCharOrLong(arg1, code1);
if (code2 <= INT) {
- int val2 = (code2 == CHAR) ? ((java.lang.Character) arg2).charValue() : ((java.lang.Number) arg2).intValue();
+ int val2 = unboxCharOrInt(arg2, code2);
return boxToLong(val1 >>> val2);
}
if (code2 <= LONG) {
- long val2 = (code2 == CHAR) ? ((java.lang.Character) arg2).charValue() : ((java.lang.Number) arg2).longValue();
+ long val2 = unboxCharOrLong(arg2, code2);
return boxToLong(val1 >>> val2);
}
}
@@ -504,19 +481,19 @@ public final class BoxesRunTime
public static Object negate(Object arg) throws NoSuchMethodException {
int code = typeCode(arg);
if (code <= INT) {
- int val = (code == CHAR) ? ((java.lang.Character) arg).charValue() : ((java.lang.Number) arg).intValue();
+ int val = unboxCharOrInt(arg, code);
return boxToInteger(-val);
}
if (code <= LONG) {
- long val = (code == CHAR) ? ((java.lang.Character) arg).charValue() : ((java.lang.Number) arg).longValue();
+ long val = unboxCharOrLong(arg, code);
return boxToLong(-val);
}
if (code <= FLOAT) {
- float val = (code == CHAR) ? ((java.lang.Character) arg).charValue() : ((java.lang.Number) arg).floatValue();
+ float val = unboxCharOrFloat(arg, code);
return boxToFloat(-val);
}
if (code <= DOUBLE) {
- double val = (code == CHAR) ? ((java.lang.Character) arg).charValue() : ((java.lang.Number) arg).doubleValue();
+ double val = unboxCharOrDouble(arg, code);
return boxToDouble(-val);
}
throw new NoSuchMethodException();
@@ -526,20 +503,16 @@ public final class BoxesRunTime
public static Object positive(Object arg) throws NoSuchMethodException {
int code = typeCode(arg);
if (code <= INT) {
- int val = (code == CHAR) ? ((java.lang.Character) arg).charValue() : ((java.lang.Number) arg).intValue();
- return boxToInteger(+val);
+ return boxToInteger(+unboxCharOrInt(arg, code));
}
if (code <= LONG) {
- long val = (code == CHAR) ? ((java.lang.Character) arg).charValue() : ((java.lang.Number) arg).longValue();
- return boxToLong(+val);
+ return boxToLong(+unboxCharOrLong(arg, code));
}
if (code <= FLOAT) {
- float val = (code == CHAR) ? ((java.lang.Character) arg).charValue() : ((java.lang.Number) arg).floatValue();
- return boxToFloat(+val);
+ return boxToFloat(+unboxCharOrFloat(arg, code));
}
if (code <= DOUBLE) {
- double val = (code == CHAR) ? ((java.lang.Character) arg).charValue() : ((java.lang.Number) arg).doubleValue();
- return boxToDouble(+val);
+ return boxToDouble(+unboxCharOrDouble(arg, code));
}
throw new NoSuchMethodException();
}
@@ -547,72 +520,60 @@ public final class BoxesRunTime
/** arg1 & arg2 */
public static Object takeAnd(Object arg1, Object arg2) throws NoSuchMethodException {
if ((arg1 instanceof Boolean) || (arg2 instanceof Boolean)) {
- if (!((arg1 instanceof Boolean) && (arg2 instanceof Boolean))) {
+ if ((arg1 instanceof Boolean) && (arg2 instanceof Boolean))
+ return boxToBoolean(((java.lang.Boolean) arg1).booleanValue() & ((java.lang.Boolean) arg2).booleanValue());
+ else
throw new NoSuchMethodException();
- }
- return boxToBoolean(((java.lang.Boolean) arg1).booleanValue() & ((java.lang.Boolean) arg2).booleanValue());
}
int code1 = typeCode(arg1);
int code2 = typeCode(arg2);
int maxcode = (code1 < code2) ? code2 : code1;
- if (maxcode <= INT) {
- int val1 = (code1 == CHAR) ? ((java.lang.Character) arg1).charValue() : ((java.lang.Number) arg1).intValue();
- int val2 = (code2 == CHAR) ? ((java.lang.Character) arg2).charValue() : ((java.lang.Number) arg2).intValue();
- return boxToInteger(val1 & val2);
- }
- if (maxcode <= LONG) {
- long val1 = (code1 == CHAR) ? ((java.lang.Character) arg1).charValue() : ((java.lang.Number) arg1).longValue();
- long val2 = (code2 == CHAR) ? ((java.lang.Character) arg2).charValue() : ((java.lang.Number) arg2).longValue();
- return boxToLong(val1 & val2);
- }
+
+ if (maxcode <= INT)
+ return boxToInteger(unboxCharOrInt(arg1, code1) & unboxCharOrInt(arg2, code2));
+ if (maxcode <= LONG)
+ return boxToLong(unboxCharOrLong(arg1, code1) & unboxCharOrLong(arg2, code2));
+
throw new NoSuchMethodException();
}
/** arg1 | arg2 */
public static Object takeOr(Object arg1, Object arg2) throws NoSuchMethodException {
if ((arg1 instanceof Boolean) || (arg2 instanceof Boolean)) {
- if (!((arg1 instanceof Boolean) && (arg2 instanceof Boolean))) {
+ if ((arg1 instanceof Boolean) && (arg2 instanceof Boolean))
+ return boxToBoolean(((java.lang.Boolean) arg1).booleanValue() | ((java.lang.Boolean) arg2).booleanValue());
+ else
throw new NoSuchMethodException();
- }
- return boxToBoolean(((java.lang.Boolean) arg1).booleanValue() | ((java.lang.Boolean) arg2).booleanValue());
}
int code1 = typeCode(arg1);
int code2 = typeCode(arg2);
int maxcode = (code1 < code2) ? code2 : code1;
- if (maxcode <= INT) {
- int val1 = (code1 == CHAR) ? ((java.lang.Character) arg1).charValue() : ((java.lang.Number) arg1).intValue();
- int val2 = (code2 == CHAR) ? ((java.lang.Character) arg2).charValue() : ((java.lang.Number) arg2).intValue();
- return boxToInteger(val1 | val2);
- }
- if (maxcode <= LONG) {
- long val1 = (code1 == CHAR) ? ((java.lang.Character) arg1).charValue() : ((java.lang.Number) arg1).longValue();
- long val2 = (code2 == CHAR) ? ((java.lang.Character) arg2).charValue() : ((java.lang.Number) arg2).longValue();
- return boxToLong(val1 | val2);
- }
+
+ if (maxcode <= INT)
+ return boxToInteger(unboxCharOrInt(arg1, code1) | unboxCharOrInt(arg2, code2));
+ if (maxcode <= LONG)
+ return boxToLong(unboxCharOrLong(arg1, code1) | unboxCharOrLong(arg2, code2));
+
throw new NoSuchMethodException();
}
/** arg1 ^ arg2 */
public static Object takeXor(Object arg1, Object arg2) throws NoSuchMethodException {
if ((arg1 instanceof Boolean) || (arg2 instanceof Boolean)) {
- if (!((arg1 instanceof Boolean) && (arg2 instanceof Boolean))) {
+ if ((arg1 instanceof Boolean) && (arg2 instanceof Boolean))
+ return boxToBoolean(((java.lang.Boolean) arg1).booleanValue() ^ ((java.lang.Boolean) arg2).booleanValue());
+ else
throw new NoSuchMethodException();
- }
- return boxToBoolean(((java.lang.Boolean) arg1).booleanValue() ^ ((java.lang.Boolean) arg2).booleanValue());
}
int code1 = typeCode(arg1);
int code2 = typeCode(arg2);
int maxcode = (code1 < code2) ? code2 : code1;
- if (maxcode <= INT) {
- int val1 = (code1 == CHAR) ? ((java.lang.Character) arg1).charValue() : ((java.lang.Number) arg1).intValue();
- int val2 = (code2 == CHAR) ? ((java.lang.Character) arg2).charValue() : ((java.lang.Number) arg2).intValue();
- return boxToInteger(val1 ^ val2);
- }
- if (maxcode <= LONG) {
- long val1 = (code1 == CHAR) ? ((java.lang.Character) arg1).charValue() : ((java.lang.Number) arg1).longValue();
- long val2 = (code2 == CHAR) ? ((java.lang.Character) arg2).charValue() : ((java.lang.Number) arg2).longValue();
- return boxToLong(val1 ^ val2);
- }
+
+ if (maxcode <= INT)
+ return boxToInteger(unboxCharOrInt(arg1, code1) ^ unboxCharOrInt(arg2, code2));
+ if (maxcode <= LONG)
+ return boxToLong(unboxCharOrLong(arg1, code1) ^ unboxCharOrLong(arg2, code2));
+
throw new NoSuchMethodException();
}
@@ -636,12 +597,10 @@ public final class BoxesRunTime
public static Object complement(Object arg) throws NoSuchMethodException {
int code = typeCode(arg);
if (code <= INT) {
- int val = (code == CHAR) ? ((java.lang.Character) arg).charValue() : ((java.lang.Number) arg).intValue();
- return boxToInteger(~val);
+ return boxToInteger(~unboxCharOrInt(arg, code));
}
if (code <= LONG) {
- long val = (code == CHAR) ? ((java.lang.Character) arg).charValue() : ((java.lang.Number) arg).longValue();
- return boxToLong(~val);
+ return boxToLong(~unboxCharOrLong(arg, code));
}
throw new NoSuchMethodException();
}
@@ -667,23 +626,23 @@ public final class BoxesRunTime
int code2 = typeCode(arg2);
int maxcode = (code1 < code2) ? code2 : code1;
if (maxcode <= INT) {
- int val1 = (code1 == CHAR) ? ((java.lang.Character) arg1).charValue() : ((java.lang.Number) arg1).intValue();
- int val2 = (code2 == CHAR) ? ((java.lang.Character) arg2).charValue() : ((java.lang.Number) arg2).intValue();
+ int val1 = unboxCharOrInt(arg1, code1);
+ int val2 = unboxCharOrInt(arg2, code2);
return boxToBoolean(val1 < val2);
}
if (maxcode <= LONG) {
- long val1 = (code1 == CHAR) ? ((java.lang.Character) arg1).charValue() : ((java.lang.Number) arg1).longValue();
- long val2 = (code2 == CHAR) ? ((java.lang.Character) arg2).charValue() : ((java.lang.Number) arg2).longValue();
+ long val1 = unboxCharOrLong(arg1, code1);
+ long val2 = unboxCharOrLong(arg2, code2);
return boxToBoolean(val1 < val2);
}
if (maxcode <= FLOAT) {
- float val1 = (code1 == CHAR) ? ((java.lang.Character) arg1).charValue() : ((java.lang.Number) arg1).floatValue();
- float val2 = (code2 == CHAR) ? ((java.lang.Character) arg2).charValue() : ((java.lang.Number) arg2).floatValue();
+ float val1 = unboxCharOrFloat(arg1, code1);
+ float val2 = unboxCharOrFloat(arg2, code2);
return boxToBoolean(val1 < val2);
}
if (maxcode <= DOUBLE) {
- double val1 = (code1 == CHAR) ? ((java.lang.Character) arg1).charValue() : ((java.lang.Number) arg1).doubleValue();
- double val2 = (code2 == CHAR) ? ((java.lang.Character) arg2).charValue() : ((java.lang.Number) arg2).doubleValue();
+ double val1 = unboxCharOrDouble(arg1, code1);
+ double val2 = unboxCharOrDouble(arg2, code2);
return boxToBoolean(val1 < val2);
}
throw new NoSuchMethodException();
@@ -694,23 +653,23 @@ public final class BoxesRunTime
int code2 = typeCode(arg2);
int maxcode = (code1 < code2) ? code2 : code1;
if (maxcode <= INT) {
- int val1 = (code1 == CHAR) ? ((java.lang.Character) arg1).charValue() : ((java.lang.Number) arg1).intValue();
- int val2 = (code2 == CHAR) ? ((java.lang.Character) arg2).charValue() : ((java.lang.Number) arg2).intValue();
+ int val1 = unboxCharOrInt(arg1, code1);
+ int val2 = unboxCharOrInt(arg2, code2);
return boxToBoolean(val1 <= val2);
}
if (maxcode <= LONG) {
- long val1 = (code1 == CHAR) ? ((java.lang.Character) arg1).charValue() : ((java.lang.Number) arg1).longValue();
- long val2 = (code2 == CHAR) ? ((java.lang.Character) arg2).charValue() : ((java.lang.Number) arg2).longValue();
+ long val1 = unboxCharOrLong(arg1, code1);
+ long val2 = unboxCharOrLong(arg2, code2);
return boxToBoolean(val1 <= val2);
}
if (maxcode <= FLOAT) {
- float val1 = (code1 == CHAR) ? ((java.lang.Character) arg1).charValue() : ((java.lang.Number) arg1).floatValue();
- float val2 = (code2 == CHAR) ? ((java.lang.Character) arg2).charValue() : ((java.lang.Number) arg2).floatValue();
+ float val1 = unboxCharOrFloat(arg1, code1);
+ float val2 = unboxCharOrFloat(arg2, code2);
return boxToBoolean(val1 <= val2);
}
if (maxcode <= DOUBLE) {
- double val1 = (code1 == CHAR) ? ((java.lang.Character) arg1).charValue() : ((java.lang.Number) arg1).doubleValue();
- double val2 = (code2 == CHAR) ? ((java.lang.Character) arg2).charValue() : ((java.lang.Number) arg2).doubleValue();
+ double val1 = unboxCharOrDouble(arg1, code1);
+ double val2 = unboxCharOrDouble(arg2, code2);
return boxToBoolean(val1 <= val2);
}
throw new NoSuchMethodException();
@@ -721,23 +680,23 @@ public final class BoxesRunTime
int code2 = typeCode(arg2);
int maxcode = (code1 < code2) ? code2 : code1;
if (maxcode <= INT) {
- int val1 = (code1 == CHAR) ? ((java.lang.Character) arg1).charValue() : ((java.lang.Number) arg1).intValue();
- int val2 = (code2 == CHAR) ? ((java.lang.Character) arg2).charValue() : ((java.lang.Number) arg2).intValue();
+ int val1 = unboxCharOrInt(arg1, code1);
+ int val2 = unboxCharOrInt(arg2, code2);
return boxToBoolean(val1 >= val2);
}
if (maxcode <= LONG) {
- long val1 = (code1 == CHAR) ? ((java.lang.Character) arg1).charValue() : ((java.lang.Number) arg1).longValue();
- long val2 = (code2 == CHAR) ? ((java.lang.Character) arg2).charValue() : ((java.lang.Number) arg2).longValue();
+ long val1 = unboxCharOrLong(arg1, code1);
+ long val2 = unboxCharOrLong(arg2, code2);
return boxToBoolean(val1 >= val2);
}
if (maxcode <= FLOAT) {
- float val1 = (code1 == CHAR) ? ((java.lang.Character) arg1).charValue() : ((java.lang.Number) arg1).floatValue();
- float val2 = (code2 == CHAR) ? ((java.lang.Character) arg2).charValue() : ((java.lang.Number) arg2).floatValue();
+ float val1 = unboxCharOrFloat(arg1, code1);
+ float val2 = unboxCharOrFloat(arg2, code2);
return boxToBoolean(val1 >= val2);
}
if (maxcode <= DOUBLE) {
- double val1 = (code1 == CHAR) ? ((java.lang.Character) arg1).charValue() : ((java.lang.Number) arg1).doubleValue();
- double val2 = (code2 == CHAR) ? ((java.lang.Character) arg2).charValue() : ((java.lang.Number) arg2).doubleValue();
+ double val1 = unboxCharOrDouble(arg1, code1);
+ double val2 = unboxCharOrDouble(arg2, code2);
return boxToBoolean(val1 >= val2);
}
throw new NoSuchMethodException();
@@ -748,28 +707,43 @@ public final class BoxesRunTime
int code2 = typeCode(arg2);
int maxcode = (code1 < code2) ? code2 : code1;
if (maxcode <= INT) {
- int val1 = (code1 == CHAR) ? ((java.lang.Character) arg1).charValue() : ((java.lang.Number) arg1).intValue();
- int val2 = (code2 == CHAR) ? ((java.lang.Character) arg2).charValue() : ((java.lang.Number) arg2).intValue();
+ int val1 = unboxCharOrInt(arg1, code1);
+ int val2 = unboxCharOrInt(arg2, code2);
return boxToBoolean(val1 > val2);
}
if (maxcode <= LONG) {
- long val1 = (code1 == CHAR) ? ((java.lang.Character) arg1).charValue() : ((java.lang.Number) arg1).longValue();
- long val2 = (code2 == CHAR) ? ((java.lang.Character) arg2).charValue() : ((java.lang.Number) arg2).longValue();
+ long val1 = unboxCharOrLong(arg1, code1);
+ long val2 = unboxCharOrLong(arg2, code2);
return boxToBoolean(val1 > val2);
}
if (maxcode <= FLOAT) {
- float val1 = (code1 == CHAR) ? ((java.lang.Character) arg1).charValue() : ((java.lang.Number) arg1).floatValue();
- float val2 = (code2 == CHAR) ? ((java.lang.Character) arg2).charValue() : ((java.lang.Number) arg2).floatValue();
+ float val1 = unboxCharOrFloat(arg1, code1);
+ float val2 = unboxCharOrFloat(arg2, code2);
return boxToBoolean(val1 > val2);
}
if (maxcode <= DOUBLE) {
- double val1 = (code1 == CHAR) ? ((java.lang.Character) arg1).charValue() : ((java.lang.Number) arg1).doubleValue();
- double val2 = (code2 == CHAR) ? ((java.lang.Character) arg2).charValue() : ((java.lang.Number) arg2).doubleValue();
+ double val1 = unboxCharOrDouble(arg1, code1);
+ double val2 = unboxCharOrDouble(arg2, code2);
return boxToBoolean(val1 > val2);
}
throw new NoSuchMethodException();
}
+ public static boolean isBoxedNumberOrBoolean(Object arg) {
+ return (arg instanceof java.lang.Boolean) || isBoxedNumber(arg);
+ }
+ public static boolean isBoxedNumber(Object arg) {
+ return (
+ (arg instanceof java.lang.Integer)
+ || (arg instanceof java.lang.Long)
+ || (arg instanceof java.lang.Double)
+ || (arg instanceof java.lang.Float)
+ || (arg instanceof java.lang.Short)
+ || (arg instanceof java.lang.Character)
+ || (arg instanceof java.lang.Byte)
+ );
+ }
+
/** arg.toChar */
public static java.lang.Character toCharacter(Object arg) throws NoSuchMethodException {
if (arg instanceof java.lang.Integer) return boxToCharacter((char)unboxToInt(arg));
diff --git a/src/library/scala/runtime/ByteRef.java b/src/library/scala/runtime/ByteRef.java
index a9962f2..cc10611 100644
--- a/src/library/scala/runtime/ByteRef.java
+++ b/src/library/scala/runtime/ByteRef.java
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/library/scala/runtime/CharRef.java b/src/library/scala/runtime/CharRef.java
index bc3c4d9..03d3337 100644
--- a/src/library/scala/runtime/CharRef.java
+++ b/src/library/scala/runtime/CharRef.java
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/library/scala/runtime/DoubleRef.java b/src/library/scala/runtime/DoubleRef.java
index 87ea50f..317198e 100644
--- a/src/library/scala/runtime/DoubleRef.java
+++ b/src/library/scala/runtime/DoubleRef.java
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/library/scala/runtime/DynamicDispatch.java-notyet b/src/library/scala/runtime/DynamicDispatch.java-notyet
deleted file mode 100644
index 744ee79..0000000
--- a/src/library/scala/runtime/DynamicDispatch.java-notyet
+++ /dev/null
@@ -1,42 +0,0 @@
-package scala.runtime;
-
-import java.dyn.CallSite;
-import java.dyn.MethodHandle;
-
-/**
- * This class resolves calls through refinement types. The
- * bootstrap method is called when an invokedynamic is found
- * by the Java VM.
- *
- * Note: Requires Java 7 with invoke dynamic support (see JSR 292)
- *
- * @author Iulian Dragos
- * @see JSR292
- */
-public class DynamicDispatch {
-
- /**
- * Resolve an invoke dynamic in Scala code. invokedynamic calls appear
- * when a method defined by a refinement type is called. It is resolved
- * by looking up a method with the same name and types in the receiver
- * object. It is guaranteed by the type checker that such a method
- * exists.
- *
- * The current implementation is not correct, a call site being
- * always bootstrapped to a method handle. A bound call site should be
- * guarded by a test on the receiver type. Such code should either
- * be generated by the compiler, or by this bootstrap method using
- * one of the code combinators provided in java.dyn.*.
- *
- * ATM, they are not yet available in the JVM.
- */
- public static Object bootstrapInvokeDynamic(CallSite cs, Object... args) {
- println(cs);
-
- MethodHandle mh = MethodHandles.findVirtual(cs.callerClass(),
- cs.name(),
- cs.type());
- cs.setTarget(mh);
- return mh(args);
- }
-}
diff --git a/src/library/scala/runtime/FloatRef.java b/src/library/scala/runtime/FloatRef.java
index 8d0916c..e26b89b 100644
--- a/src/library/scala/runtime/FloatRef.java
+++ b/src/library/scala/runtime/FloatRef.java
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/library/scala/runtime/IntRef.java b/src/library/scala/runtime/IntRef.java
index 6dd1682..edb6faf 100644
--- a/src/library/scala/runtime/IntRef.java
+++ b/src/library/scala/runtime/IntRef.java
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/library/scala/runtime/LongRef.java b/src/library/scala/runtime/LongRef.java
index 5921b55..12004b5 100644
--- a/src/library/scala/runtime/LongRef.java
+++ b/src/library/scala/runtime/LongRef.java
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/library/scala/runtime/MethodCache.scala b/src/library/scala/runtime/MethodCache.scala
index b2b3a3b..217b518 100644
--- a/src/library/scala/runtime/MethodCache.scala
+++ b/src/library/scala/runtime/MethodCache.scala
@@ -1,13 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http:/// **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-
-
package scala.runtime
import java.lang.reflect.{ Method => JMethod }
@@ -16,18 +14,18 @@ import java.lang.{ Class => JClass }
import scala.annotation.tailrec
/** An element of a polymorphic object cache.
- * This class is refered to by the CleanUp phase. Each PolyMethodCache chain
- * must only relate to one method as PolyMethodCache does not identify
- * the method name and argument types. In practice, one variable will be
- * generated per call point, and will uniquely relate to the method called
- * at that point, making the method name and argument types irrelevant. **/
+ * This class is refered to by the `CleanUp` phase. Each `PolyMethodCache` chain
+ * must only relate to one method as `PolyMethodCache` does not identify
+ * the method name and argument types. In practice, one variable will be
+ * generated per call point, and will uniquely relate to the method called
+ * at that point, making the method name and argument types irrelevant. */
/* TODO: if performance is acceptable, PolyMethodCache should be made generic on the method type */
sealed abstract class MethodCache {
- /** Searches for a cached method in the MethodCache chain that
- * is compatible with receiver class "forReceiver". If none is cached,
- * "null" is returned. If "null is returned", find's caller should look-
- * up the right method using whichever means it prefers, and add it to
- * the cache for later use. */
+ /** Searches for a cached method in the `MethodCache` chain that
+ * is compatible with receiver class `forReceiver`. If none is cached,
+ * `null` is returned. If `null` is returned, find's caller should look-
+ * up the right method using whichever means it prefers, and add it to
+ * the cache for later use. */
def find(forReceiver: JClass[_]): JMethod
def add(forReceiver: JClass[_], forMethod: JMethod): MethodCache
}
@@ -61,7 +59,7 @@ final class PolyMethodCache(
) extends MethodCache {
/** To achieve tail recursion this must be a separate method
- * from find, because the type of next is not PolyMethodCache.
+ * from `find`, because the type of next is not `PolyMethodCache`.
*/
@tailrec private def findInternal(forReceiver: JClass[_]): JMethod =
if (forReceiver eq receiver) method
diff --git a/src/library/scala/runtime/NonLocalReturnControl.scala b/src/library/scala/runtime/NonLocalReturnControl.scala
index 7c35e0b..b9525ef 100644
--- a/src/library/scala/runtime/NonLocalReturnControl.scala
+++ b/src/library/scala/runtime/NonLocalReturnControl.scala
@@ -1,17 +1,15 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-
-
package scala.runtime
import scala.util.control.ControlThrowable
-class NonLocalReturnControl[T](val key: AnyRef, val value: T) extends ControlThrowable {
- override def fillInStackTrace(): Throwable = this
+class NonLocalReturnControl[@specialized T](val key: AnyRef, val value: T) extends ControlThrowable {
+ final override def fillInStackTrace(): Throwable = this
}
diff --git a/src/library/scala/runtime/Nothing$.scala b/src/library/scala/runtime/Nothing$.scala
index c52783b..04fcc55 100644
--- a/src/library/scala/runtime/Nothing$.scala
+++ b/src/library/scala/runtime/Nothing$.scala
@@ -1,20 +1,16 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-
-
package scala.runtime
-
/**
* Dummy class which exist only to satisfy the JVM. It corresponds
- * to <code>scala.Nothing</code>. If such type appears in method
+ * to `scala.Nothing`. If such type appears in method
* signatures, it is erased to this one.
*/
-
sealed abstract class Nothing$ extends Throwable
diff --git a/src/library/scala/runtime/Null$.scala b/src/library/scala/runtime/Null$.scala
index 3ba1bb3..797b315 100644
--- a/src/library/scala/runtime/Null$.scala
+++ b/src/library/scala/runtime/Null$.scala
@@ -1,20 +1,16 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-
-
package scala.runtime
-
/**
- * Dummy class which exist only to satisfy the JVM. It corresponds
- * to <code>scala.Null</code>. If such type appears in method
- * signatures, it is erased to this one.
+ * Dummy class which exist only to satisfy the JVM. It corresponds to
+ * `scala.Null`. If such type appears in method signatures, it is erased
+ * to this one.
*/
-
sealed abstract class Null$
diff --git a/src/library/scala/runtime/ObjectRef.java b/src/library/scala/runtime/ObjectRef.java
index a1dd3d7..c8298b8 100644
--- a/src/library/scala/runtime/ObjectRef.java
+++ b/src/library/scala/runtime/ObjectRef.java
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -11,10 +11,10 @@
package scala.runtime;
-public class ObjectRef implements java.io.Serializable {
+public class ObjectRef<T> implements java.io.Serializable {
private static final long serialVersionUID = -9055728157600312291L;
- public Object elem;
- public ObjectRef(Object elem) { this.elem = elem; }
+ public T elem;
+ public ObjectRef(T elem) { this.elem = elem; }
public String toString() { return String.valueOf(elem); }
}
diff --git a/src/library/scala/runtime/RichBoolean.scala b/src/library/scala/runtime/RichBoolean.scala
index d448687..97e2b77 100644
--- a/src/library/scala/runtime/RichBoolean.scala
+++ b/src/library/scala/runtime/RichBoolean.scala
@@ -1,13 +1,13 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
package scala.runtime
-final class RichBoolean(val self: Boolean) extends OrderedProxy[Boolean] {
- protected val ord = math.Ordering[Boolean]
+final class RichBoolean(val self: Boolean) extends AnyVal with OrderedProxy[Boolean] {
+ protected def ord = scala.math.Ordering.Boolean
}
diff --git a/src/library/scala/runtime/RichByte.scala b/src/library/scala/runtime/RichByte.scala
index fa24bd6..ca57862 100644
--- a/src/library/scala/runtime/RichByte.scala
+++ b/src/library/scala/runtime/RichByte.scala
@@ -1,11 +1,14 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
package scala.runtime
-final class RichByte(val self: Byte) extends ScalaWholeNumberProxy[Byte] { }
+final class RichByte(val self: Byte) extends AnyVal with ScalaWholeNumberProxy[Byte] {
+ protected def num = scala.math.Numeric.ByteIsIntegral
+ protected def ord = scala.math.Ordering.Byte
+}
diff --git a/src/library/scala/runtime/RichChar.scala b/src/library/scala/runtime/RichChar.scala
index 358faf4..5124ca0 100644
--- a/src/library/scala/runtime/RichChar.scala
+++ b/src/library/scala/runtime/RichChar.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2006-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2006-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -10,7 +10,10 @@ package scala.runtime
import java.lang.Character
-final class RichChar(val self: Char) extends IntegralProxy[Char] {
+final class RichChar(val self: Char) extends AnyVal with IntegralProxy[Char] {
+ protected def num = scala.math.Numeric.CharIsIntegral
+ protected def ord = scala.math.Ordering.Char
+
def asDigit: Int = Character.digit(self, Character.MAX_RADIX)
def isControl: Boolean = Character.isISOControl(self)
@@ -45,14 +48,4 @@ final class RichChar(val self: Char) extends IntegralProxy[Char] {
// public static boolean isDefined(char ch)
// public static boolean isJavaIdentifierStart(char ch)
// public static boolean isJavaIdentifierPart(char ch)
-
- @deprecated("Use ch.toLower instead", "2.8.0")
- def toLowerCase: Char = toLower
- @deprecated("Use ch.toUpper instead", "2.8.0")
- def toUpperCase: Char = toUpper
-
- @deprecated("Use ch.isLower instead", "2.8.0")
- def isLowerCase: Boolean = isLower
- @deprecated("Use ch.isUpper instead", "2.8.0")
- def isUpperCase: Boolean = isUpper
}
diff --git a/src/library/scala/runtime/RichDouble.scala b/src/library/scala/runtime/RichDouble.scala
index 1414fb7..2f16a29 100644
--- a/src/library/scala/runtime/RichDouble.scala
+++ b/src/library/scala/runtime/RichDouble.scala
@@ -1,15 +1,18 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-package scala.runtime
+package scala
+package runtime
-final class RichDouble(val self: Double) extends FractionalProxy[Double] {
- protected val integralNum = Numeric.DoubleAsIfIntegral
+final class RichDouble(val self: Double) extends AnyVal with FractionalProxy[Double] {
+ protected def num = scala.math.Numeric.DoubleIsFractional
+ protected def ord = scala.math.Ordering.Double
+ protected def integralNum = scala.math.Numeric.DoubleAsIfIntegral
def round: Long = math.round(self)
def ceil: Double = math.ceil(self)
@@ -18,15 +21,12 @@ final class RichDouble(val self: Double) extends FractionalProxy[Double] {
/** Converts an angle measured in degrees to an approximately equivalent
* angle measured in radians.
*
- * @param x an angle, in degrees
* @return the measurement of the angle x in radians.
*/
def toRadians: Double = math.toRadians(self)
/** Converts an angle measured in radians to an approximately equivalent
- * angle measured in degrees
- *
- * @param x angle, in radians
+ * angle measured in degrees.
* @return the measurement of the angle x in degrees.
*/
def toDegrees: Double = math.toDegrees(self)
@@ -36,4 +36,16 @@ final class RichDouble(val self: Double) extends FractionalProxy[Double] {
def isInfinity: Boolean = java.lang.Double.isInfinite(self)
def isPosInfinity: Boolean = isInfinity && self > 0.0
def isNegInfinity: Boolean = isInfinity && self < 0.0
+
+ override def isValidByte = self.toByte.toDouble == self
+ override def isValidShort = self.toShort.toDouble == self
+ override def isValidChar = self.toChar.toDouble == self
+ override def isValidInt = self.toInt.toDouble == self
+ // override def isValidLong = { val l = self.toLong; l.toDouble == self && l != Long.MaxValue }
+ // override def isValidFloat = self.toFloat.toDouble == self
+ // override def isValidDouble = !java.lang.Double.isNaN(self)
+ override def isWhole = {
+ val l = self.toLong
+ l.toDouble == self || l == Long.MaxValue && self < Double.PositiveInfinity || l == Long.MinValue && self > Double.NegativeInfinity
+ }
}
diff --git a/src/library/scala/runtime/RichException.scala b/src/library/scala/runtime/RichException.scala
index 5dd8953..94c4137 100644
--- a/src/library/scala/runtime/RichException.scala
+++ b/src/library/scala/runtime/RichException.scala
@@ -1,16 +1,14 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-
-
package scala.runtime
-import compat.Platform.EOL
+import scala.compat.Platform.EOL
final class RichException(exc: Throwable) {
def getStackTraceString = exc.getStackTrace().mkString("", EOL, EOL)
diff --git a/src/library/scala/runtime/RichFloat.scala b/src/library/scala/runtime/RichFloat.scala
index 3c385e3..cb0681b 100644
--- a/src/library/scala/runtime/RichFloat.scala
+++ b/src/library/scala/runtime/RichFloat.scala
@@ -1,15 +1,18 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-package scala.runtime
+package scala
+package runtime
-final class RichFloat(val self: Float) extends FractionalProxy[Float] {
- protected val integralNum = Numeric.FloatAsIfIntegral
+final class RichFloat(val self: Float) extends AnyVal with FractionalProxy[Float] {
+ protected def num = scala.math.Numeric.FloatIsFractional
+ protected def ord = scala.math.Ordering.Float
+ protected def integralNum = scala.math.Numeric.FloatAsIfIntegral
def round: Int = math.round(self)
def ceil: Float = math.ceil(self).toFloat
@@ -18,16 +21,14 @@ final class RichFloat(val self: Float) extends FractionalProxy[Float] {
/** Converts an angle measured in degrees to an approximately equivalent
* angle measured in radians.
*
- * @param x an angle, in degrees
- * @return the measurement of the angle <code>x</code> in radians.
+ * @return the measurement of the angle `x` in radians.
*/
def toRadians: Float = math.toRadians(self).toFloat
/** Converts an angle measured in radians to an approximately equivalent
* angle measured in degrees.
*
- * @param x angle, in radians
- * @return the measurement of the angle <code>x</code> in degrees.
+ * @return the measurement of the angle `x` in degrees.
*/
def toDegrees: Float = math.toDegrees(self).toFloat
@@ -36,4 +37,16 @@ final class RichFloat(val self: Float) extends FractionalProxy[Float] {
def isInfinity: Boolean = java.lang.Float.isInfinite(self)
def isPosInfinity: Boolean = isInfinity && self > 0.0f
def isNegInfinity: Boolean = isInfinity && self < 0.0f
+
+ override def isValidByte = self.toByte.toFloat == self
+ override def isValidShort = self.toShort.toFloat == self
+ override def isValidChar = self.toChar.toFloat == self
+ override def isValidInt = { val i = self.toInt; i.toFloat == self && i != Int.MaxValue }
+ // override def isValidLong = { val l = self.toLong; l.toFloat == self && l != Long.MaxValue }
+ // override def isValidFloat = !java.lang.Float.isNaN(self)
+ // override def isValidDouble = !java.lang.Float.isNaN(self)
+ override def isWhole = {
+ val l = self.toLong
+ l.toFloat == self || l == Long.MaxValue && self < Float.PositiveInfinity || l == Long.MinValue && self > Float.NegativeInfinity
+ }
}
diff --git a/src/library/scala/runtime/RichInt.scala b/src/library/scala/runtime/RichInt.scala
index 347e8c8..192f94f 100644
--- a/src/library/scala/runtime/RichInt.scala
+++ b/src/library/scala/runtime/RichInt.scala
@@ -1,72 +1,71 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-
package scala.runtime
import scala.collection.immutable.Range
-import annotation.bridge
// Note that this does not implement IntegralProxy[Int] so that it can return
// the Int-specific Range class from until/to.
-final class RichInt(val self: Int) extends ScalaNumberProxy[Int] with RangedProxy[Int] {
+final class RichInt(val self: Int) extends AnyVal with ScalaNumberProxy[Int] with RangedProxy[Int] {
+ protected def num = scala.math.Numeric.IntIsIntegral
+ protected def ord = scala.math.Ordering.Int
type ResultWithoutStep = Range
/**
- * @return `true` if this number has no decimal component. Always returns true for `RichInt`.
+ * @return `'''true'''` if this number has no decimal component.
+ * Always returns `'''true'''` for `RichInt`.
*/
def isWhole() = true
/**
* @param end The final bound of the range to make.
- * @return A [[Range]] from `this` up to but not including `end`.
+ * @return A [[scala.collection.immutable.Range]] from `this` up to but
+ * not including `end`.
*/
def until(end: Int): Range = Range(self, end)
/**
* @param end The final bound of the range to make.
* @param step The number to increase by for each step of the range.
- * @return A [[Range]] from `this` up to but not including `end`.
+ * @return A [[scala.collection.immutable.Range]] from `this` up to but
+ * not including `end`.
*/
def until(end: Int, step: Int): Range = Range(self, end, step)
-// @bridge
-// def until(end: Int): Range with Range.ByOne = new Range(self, end, 1) with Range.ByOne
-
/** like `until`, but includes the last index */
/**
* @param end The final bound of the range to make.
- * @return A [[Range]] from `this` up to and including `end`.
+ * @return A [[scala.collection.immutable.Range]] from `'''this'''` up to
+ * and including `end`.
*/
def to(end: Int): Range.Inclusive = Range.inclusive(self, end)
/**
* @param end The final bound of the range to make.
* @param step The number to increase by for each step of the range.
- * @return A [[Range]] from `this` up to and including `end`.
+ * @return A [[scala.collection.immutable.Range]] from `'''this'''` up to
+ * and including `end`.
*/
def to(end: Int, step: Int): Range.Inclusive = Range.inclusive(self, end, step)
-// @bridge
-// def to(end: Int): Range with Range.ByOne = new Range.Inclusive(self, end, 1) with Range.ByOne
-
/**
- * @return `this` if `this < that` or `that` otherwise
+ * @return `'''this'''` if `'''this''' < that` or `that` otherwise
*/
override def min(that: Int): Int = if (self < that) self else that
/**
- * @return `this` if `this > that` or `that` otherwise
+ * @return `'''this'''` if `'''this''' > that` or `that` otherwise
*/
override def max(that: Int): Int = if (self > that) self else that
/**
- * Computes the absolute value of `this`.
+ * Computes the absolute value of `'''this'''`.
*/
override def abs: Int = if (self < 0) -self else self
diff --git a/src/library/scala/runtime/RichLong.scala b/src/library/scala/runtime/RichLong.scala
index f93d945..ce2d1fd 100644
--- a/src/library/scala/runtime/RichLong.scala
+++ b/src/library/scala/runtime/RichLong.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -8,8 +8,19 @@
package scala.runtime
-final class RichLong(val self: Long) extends IntegralProxy[Long] {
+final class RichLong(val self: Long) extends AnyVal with IntegralProxy[Long] {
+ protected def num = scala.math.Numeric.LongIsIntegral
+ protected def ord = scala.math.Ordering.Long
+
def toBinaryString: String = java.lang.Long.toBinaryString(self)
def toHexString: String = java.lang.Long.toHexString(self)
def toOctalString: String = java.lang.Long.toOctalString(self)
+
+ override def isValidByte = self.toByte.toLong == self
+ override def isValidShort = self.toShort.toLong == self
+ override def isValidChar = self.toChar.toLong == self
+ override def isValidInt = self.toInt.toLong == self
+ // override def isValidLong = true
+ // override def isValidFloat = self.toFloat.toLong == self && self != Long.MaxValue
+ // override def isValidDouble = self.toDouble.toLong == self && self != Long.MaxValue
}
diff --git a/src/library/scala/runtime/RichShort.scala b/src/library/scala/runtime/RichShort.scala
index a174438..aa24dd2 100644
--- a/src/library/scala/runtime/RichShort.scala
+++ b/src/library/scala/runtime/RichShort.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -8,4 +8,7 @@
package scala.runtime
-final class RichShort(val self: Short) extends ScalaWholeNumberProxy[Short] { }
+final class RichShort(val self: Short) extends AnyVal with ScalaWholeNumberProxy[Short] {
+ protected def num = scala.math.Numeric.ShortIsIntegral
+ protected def ord = scala.math.Ordering.Short
+}
diff --git a/src/library/scala/runtime/ScalaNumberProxy.scala b/src/library/scala/runtime/ScalaNumberProxy.scala
index b036036..76fc38b 100644
--- a/src/library/scala/runtime/ScalaNumberProxy.scala
+++ b/src/library/scala/runtime/ScalaNumberProxy.scala
@@ -1,7 +1,7 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://www.scala-lang.org/ **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
@@ -9,7 +9,7 @@
package scala.runtime
import scala.collection.{ mutable, immutable }
-import math.ScalaNumericConversions
+import scala.math.{ ScalaNumericConversions, ScalaNumericAnyConversions }
import immutable.NumericRange
import Proxy.Typed
@@ -20,26 +20,27 @@ import Proxy.Typed
* @version 2.9
* @since 2.9
*/
-abstract class ScalaNumberProxy[T: Numeric] extends ScalaNumericConversions with Typed[T] with OrderedProxy[T] {
- private val num = implicitly[Numeric[T]]
- protected val ord: Ordering[T] = num
+trait ScalaNumberProxy[T] extends Any with ScalaNumericAnyConversions with Typed[T] with OrderedProxy[T] {
+ protected implicit def num: Numeric[T]
def underlying() = self.asInstanceOf[AnyRef]
def doubleValue() = num.toDouble(self)
def floatValue() = num.toFloat(self)
def longValue() = num.toLong(self)
def intValue() = num.toInt(self)
+ def byteValue() = intValue.toByte
+ def shortValue() = intValue.toShort
def min(that: T): T = num.min(self, that)
def max(that: T): T = num.max(self, that)
def abs = num.abs(self)
def signum = num.signum(self)
}
-abstract class ScalaWholeNumberProxy[T: Numeric] extends ScalaNumberProxy[T] {
+trait ScalaWholeNumberProxy[T] extends Any with ScalaNumberProxy[T] {
def isWhole() = true
}
-abstract class IntegralProxy[T : Integral] extends ScalaWholeNumberProxy[T] with RangedProxy[T] {
- private lazy val num = implicitly[Integral[T]]
+trait IntegralProxy[T] extends Any with ScalaWholeNumberProxy[T] with RangedProxy[T] {
+ protected implicit def num: Integral[T]
type ResultWithoutStep = NumericRange[T]
def until(end: T): NumericRange.Exclusive[T] = NumericRange(self, end, num.one)
@@ -47,29 +48,29 @@ abstract class IntegralProxy[T : Integral] extends ScalaWholeNumberProxy[T] with
def to(end: T): NumericRange.Inclusive[T] = NumericRange.inclusive(self, end, num.one)
def to(end: T, step: T): NumericRange.Inclusive[T] = NumericRange.inclusive(self, end, step)
}
-abstract class FractionalProxy[T : Fractional] extends ScalaNumberProxy[T] with RangedProxy[T] {
- def isWhole() = false
+trait FractionalProxy[T] extends Any with ScalaNumberProxy[T] with RangedProxy[T] {
+ protected implicit def num: Fractional[T]
+ protected implicit def integralNum: Integral[T]
/** In order to supply predictable ranges, we require an Integral[T] which provides
* us with discrete operations on the (otherwise fractional) T. See Numeric.DoubleAsIfIntegral
* for an example.
*/
- protected implicit def integralNum: Integral[T]
- private lazy val num = implicitly[Fractional[T]]
type ResultWithoutStep = Range.Partial[T, NumericRange[T]]
+ def isWhole() = false
def until(end: T): ResultWithoutStep = new Range.Partial(NumericRange(self, end, _))
def until(end: T, step: T): NumericRange.Exclusive[T] = NumericRange(self, end, step)
def to(end: T): ResultWithoutStep = new Range.Partial(NumericRange.inclusive(self, end, _))
def to(end: T, step: T): NumericRange.Inclusive[T] = NumericRange.inclusive(self, end, step)
}
-trait OrderedProxy[T] extends Typed[T] with Ordered[T] {
+trait OrderedProxy[T] extends Any with Ordered[T] with Typed[T] {
protected def ord: Ordering[T]
def compare(y: T) = ord.compare(self, y)
}
-trait RangedProxy[T] extends Typed[T] {
+trait RangedProxy[T] extends Any with Typed[T] {
type ResultWithoutStep
def until(end: T): ResultWithoutStep
diff --git a/src/library/scala/runtime/ScalaRunTime.scala b/src/library/scala/runtime/ScalaRunTime.scala
index 7befac7..dcd3239 100644
--- a/src/library/scala/runtime/ScalaRunTime.scala
+++ b/src/library/scala/runtime/ScalaRunTime.scala
@@ -1,20 +1,23 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-package scala.runtime
+package scala
+package runtime
-import scala.reflect.ClassManifest
-import scala.collection.{ Seq, IndexedSeq, TraversableView }
+import scala.collection.{ Seq, IndexedSeq, TraversableView, AbstractIterator }
import scala.collection.mutable.WrappedArray
import scala.collection.immutable.{ StringLike, NumericRange, List, Stream, Nil, :: }
import scala.collection.generic.{ Sorted }
-import scala.xml.{ Node, MetaData }
+import scala.reflect.{ ClassTag, classTag }
import scala.util.control.ControlThrowable
+import scala.xml.{ Node, MetaData }
+import java.lang.{ Class => jClass }
+
import java.lang.Double.doubleToLongBits
import java.lang.reflect.{ Modifier, Method => JMethod }
@@ -25,57 +28,76 @@ import java.lang.reflect.{ Modifier, Method => JMethod }
object ScalaRunTime {
def isArray(x: AnyRef): Boolean = isArray(x, 1)
def isArray(x: Any, atLevel: Int): Boolean =
- x != null && isArrayClass(x.asInstanceOf[AnyRef].getClass, atLevel)
+ x != null && isArrayClass(x.getClass, atLevel)
- private def isArrayClass(clazz: Class[_], atLevel: Int): Boolean =
+ private def isArrayClass(clazz: jClass[_], atLevel: Int): Boolean =
clazz.isArray && (atLevel == 1 || isArrayClass(clazz.getComponentType, atLevel - 1))
- def isValueClass(clazz: Class[_]) = clazz.isPrimitive()
+ def isValueClass(clazz: jClass[_]) = clazz.isPrimitive()
+
+ // includes specialized subclasses and future proofed against hypothetical TupleN (for N > 22)
+ def isTuple(x: Any) = x != null && x.getClass.getName.startsWith("scala.Tuple")
+ def isAnyVal(x: Any) = x match {
+ case _: Byte | _: Short | _: Char | _: Int | _: Long | _: Float | _: Double | _: Boolean | _: Unit => true
+ case _ => false
+ }
+
+ /** Return the class object representing an array with element class `clazz`.
+ */
+ def arrayClass(clazz: jClass[_]): jClass[_] = {
+ // newInstance throws an exception if the erasure is Void.TYPE. see SI-5680
+ if (clazz == java.lang.Void.TYPE) classOf[Array[Unit]]
+ else java.lang.reflect.Array.newInstance(clazz, 0).getClass
+ }
+
+ /** Return the class object representing elements in arrays described by a given schematic.
+ */
+ def arrayElementClass(schematic: Any): jClass[_] = schematic match {
+ case cls: jClass[_] => cls.getComponentType
+ case tag: ClassTag[_] => tag.runtimeClass
+ case _ =>
+ throw new UnsupportedOperationException(s"unsupported schematic $schematic (${schematic.getClass})")
+ }
/** Return the class object representing an unboxed value type,
* e.g. classOf[int], not classOf[java.lang.Integer]. The compiler
* rewrites expressions like 5.getClass to come here.
*/
- def anyValClass[T <: AnyVal](value: T): Class[T] = (value match {
- case x: Byte => java.lang.Byte.TYPE
- case x: Short => java.lang.Short.TYPE
- case x: Char => java.lang.Character.TYPE
- case x: Int => java.lang.Integer.TYPE
- case x: Long => java.lang.Long.TYPE
- case x: Float => java.lang.Float.TYPE
- case x: Double => java.lang.Double.TYPE
- case x: Boolean => java.lang.Boolean.TYPE
- case x: Unit => java.lang.Void.TYPE
- }).asInstanceOf[Class[T]]
+ def anyValClass[T <: AnyVal : ClassTag](value: T): jClass[T] =
+ classTag[T].runtimeClass.asInstanceOf[jClass[T]]
/** Retrieve generic array element */
- def array_apply(xs: AnyRef, idx: Int): Any = xs match {
- case x: Array[AnyRef] => x(idx).asInstanceOf[Any]
- case x: Array[Int] => x(idx).asInstanceOf[Any]
- case x: Array[Double] => x(idx).asInstanceOf[Any]
- case x: Array[Long] => x(idx).asInstanceOf[Any]
- case x: Array[Float] => x(idx).asInstanceOf[Any]
- case x: Array[Char] => x(idx).asInstanceOf[Any]
- case x: Array[Byte] => x(idx).asInstanceOf[Any]
- case x: Array[Short] => x(idx).asInstanceOf[Any]
- case x: Array[Boolean] => x(idx).asInstanceOf[Any]
- case x: Array[Unit] => x(idx).asInstanceOf[Any]
- case null => throw new NullPointerException
+ def array_apply(xs: AnyRef, idx: Int): Any = {
+ xs match {
+ case x: Array[AnyRef] => x(idx).asInstanceOf[Any]
+ case x: Array[Int] => x(idx).asInstanceOf[Any]
+ case x: Array[Double] => x(idx).asInstanceOf[Any]
+ case x: Array[Long] => x(idx).asInstanceOf[Any]
+ case x: Array[Float] => x(idx).asInstanceOf[Any]
+ case x: Array[Char] => x(idx).asInstanceOf[Any]
+ case x: Array[Byte] => x(idx).asInstanceOf[Any]
+ case x: Array[Short] => x(idx).asInstanceOf[Any]
+ case x: Array[Boolean] => x(idx).asInstanceOf[Any]
+ case x: Array[Unit] => x(idx).asInstanceOf[Any]
+ case null => throw new NullPointerException
+ }
}
/** update generic array element */
- def array_update(xs: AnyRef, idx: Int, value: Any): Unit = xs match {
- case x: Array[AnyRef] => x(idx) = value.asInstanceOf[AnyRef]
- case x: Array[Int] => x(idx) = value.asInstanceOf[Int]
- case x: Array[Double] => x(idx) = value.asInstanceOf[Double]
- case x: Array[Long] => x(idx) = value.asInstanceOf[Long]
- case x: Array[Float] => x(idx) = value.asInstanceOf[Float]
- case x: Array[Char] => x(idx) = value.asInstanceOf[Char]
- case x: Array[Byte] => x(idx) = value.asInstanceOf[Byte]
- case x: Array[Short] => x(idx) = value.asInstanceOf[Short]
- case x: Array[Boolean] => x(idx) = value.asInstanceOf[Boolean]
- case x: Array[Unit] => x(idx) = value.asInstanceOf[Unit]
- case null => throw new NullPointerException
+ def array_update(xs: AnyRef, idx: Int, value: Any): Unit = {
+ xs match {
+ case x: Array[AnyRef] => x(idx) = value.asInstanceOf[AnyRef]
+ case x: Array[Int] => x(idx) = value.asInstanceOf[Int]
+ case x: Array[Double] => x(idx) = value.asInstanceOf[Double]
+ case x: Array[Long] => x(idx) = value.asInstanceOf[Long]
+ case x: Array[Float] => x(idx) = value.asInstanceOf[Float]
+ case x: Array[Char] => x(idx) = value.asInstanceOf[Char]
+ case x: Array[Byte] => x(idx) = value.asInstanceOf[Byte]
+ case x: Array[Short] => x(idx) = value.asInstanceOf[Short]
+ case x: Array[Boolean] => x(idx) = value.asInstanceOf[Boolean]
+ case x: Array[Unit] => x(idx) = value.asInstanceOf[Unit]
+ case null => throw new NullPointerException
+ }
}
/** Get generic array length */
@@ -107,19 +129,21 @@ object ScalaRunTime {
case null => throw new NullPointerException
}
- /** Convert a numeric value array to an object array.
+ /** Convert an array to an object array.
* Needed to deal with vararg arguments of primitive types that are passed
* to a generic Java vararg parameter T ...
*/
- def toObjectArray(src: AnyRef): Array[Object] = {
- val length = array_length(src)
- val dest = new Array[Object](length)
- for (i <- 0 until length)
- array_update(dest, i, array_apply(src, i))
- dest
+ def toObjectArray(src: AnyRef): Array[Object] = src match {
+ case x: Array[AnyRef] => x
+ case _ =>
+ val length = array_length(src)
+ val dest = new Array[Object](length)
+ for (i <- 0 until length)
+ array_update(dest, i, array_apply(src, i))
+ dest
}
- def toArray[T](xs: collection.Seq[T]) = {
+ def toArray[T](xs: scala.collection.Seq[T]) = {
val arr = new Array[AnyRef](xs.length)
var i = 0
for (x <- xs) {
@@ -142,60 +166,22 @@ object ScalaRunTime {
def checkInitialized[T <: AnyRef](x: T): T =
if (x == null) throw new UninitializedError else x
- abstract class Try[+A] {
- def Catch[B >: A](handler: PartialFunction[Throwable, B]): B
- def Finally(fin: => Unit): A
- }
-
- def Try[A](block: => A): Try[A] = new Try[A] with Runnable {
- private var result: A = _
- private var exception: Throwable =
- try { run() ; null }
- catch {
- case e: ControlThrowable => throw e // don't catch non-local returns etc
- case e: Throwable => e
- }
-
- def run() { result = block }
-
- def Catch[B >: A](handler: PartialFunction[Throwable, B]): B =
- if (exception == null) result
- else if (handler isDefinedAt exception) handler(exception)
- else throw exception
-
- def Finally(fin: => Unit): A = {
- fin
-
- if (exception == null) result
- else throw exception
- }
- }
-
def _toString(x: Product): String =
x.productIterator.mkString(x.productPrefix + "(", ",", ")")
- def _hashCode(x: Product): Int = {
- import scala.util.MurmurHash._
- val arr = x.productArity
- // Case objects have the hashCode inlined directly into the
- // synthetic hashCode method, but this method should still give
- // a correct result if passed a case object.
- if (arr == 0) {
- x.productPrefix.hashCode
- }
- else {
- var h = startHash(arr)
- var c = startMagicA
- var k = startMagicB
- var i = 0
- while (i < arr) {
- val elem = x.productElement(i)
- h = extendHash(h, elem.##, c, k)
- c = nextMagicA(c)
- k = nextMagicB(k)
- i += 1
+ def _hashCode(x: Product): Int = scala.util.hashing.MurmurHash3.productHash(x)
+
+ /** A helper for case classes. */
+ def typedProductIterator[T](x: Product): Iterator[T] = {
+ new AbstractIterator[T] {
+ private var c: Int = 0
+ private val cmax = x.productArity
+ def hasNext = c < cmax
+ def next() = {
+ val result = x.productElement(c)
+ c += 1
+ result.asInstanceOf[T]
}
- finalizeHash(h)
}
}
@@ -218,12 +204,12 @@ object ScalaRunTime {
// Note that these are the implementations called by ##, so they
// must not call ## themselves.
- @inline def hash(x: Any): Int =
+ def hash(x: Any): Int =
if (x == null) 0
else if (x.isInstanceOf[java.lang.Number]) BoxesRunTime.hashFromNumber(x.asInstanceOf[java.lang.Number])
else x.hashCode
- @inline def hash(dv: Double): Int = {
+ def hash(dv: Double): Int = {
val iv = dv.toInt
if (iv == dv) return iv
@@ -233,7 +219,7 @@ object ScalaRunTime {
val fv = dv.toFloat
if (fv == dv) fv.hashCode else dv.hashCode
}
- @inline def hash(fv: Float): Int = {
+ def hash(fv: Float): Int = {
val iv = fv.toInt
if (iv == fv) return iv
@@ -241,31 +227,29 @@ object ScalaRunTime {
if (lv == fv) return hash(lv)
else fv.hashCode
}
- @inline def hash(lv: Long): Int = {
+ def hash(lv: Long): Int = {
val low = lv.toInt
val lowSign = low >>> 31
val high = (lv >>> 32).toInt
low ^ (high + lowSign)
}
- @inline def hash(x: Int): Int = x
- @inline def hash(x: Short): Int = x.toInt
- @inline def hash(x: Byte): Int = x.toInt
- @inline def hash(x: Char): Int = x.toInt
- @inline def hash(x: Boolean): Int = if (x) trueHashcode else falseHashcode
- @inline def hash(x: Unit): Int = 0
- @inline def hash(x: Number): Int = runtime.BoxesRunTime.hashFromNumber(x)
-
- // These are so these values are constant folded into def hash(Boolean)
- // rather than being recalculated all the time.
- private final val trueHashcode = true.hashCode
- private final val falseHashcode = false.hashCode
+ def hash(x: Number): Int = runtime.BoxesRunTime.hashFromNumber(x)
+
+ // The remaining overloads are here for completeness, but the compiler
+ // inlines these definitions directly so they're not generally used.
+ def hash(x: Int): Int = x
+ def hash(x: Short): Int = x.toInt
+ def hash(x: Byte): Int = x.toInt
+ def hash(x: Char): Int = x.toInt
+ def hash(x: Boolean): Int = if (x) true.hashCode else false.hashCode
+ def hash(x: Unit): Int = 0
/** A helper method for constructing case class equality methods,
* because existential types get in the way of a clean outcome and
* it's performing a series of Any/Any equals comparisons anyway.
* See ticket #2867 for specifics.
*/
- def sameElements(xs1: collection.Seq[Any], xs2: collection.Seq[Any]) = xs1 sameElements xs2
+ def sameElements(xs1: scala.collection.Seq[Any], xs2: scala.collection.Seq[Any]) = xs1 sameElements xs2
/** Given any Scala value, convert it to a String.
*
@@ -281,11 +265,12 @@ object ScalaRunTime {
*/
def stringOf(arg: Any): String = stringOf(arg, scala.Int.MaxValue)
def stringOf(arg: Any, maxElements: Int): String = {
- def isScalaClass(x: AnyRef) =
- Option(x.getClass.getPackage) exists (_.getName startsWith "scala.")
-
- def isTuple(x: AnyRef) =
- x.getClass.getName matches """^scala\.Tuple(\d+).*"""
+ def packageOf(x: AnyRef) = x.getClass.getPackage match {
+ case null => ""
+ case p => p.getName
+ }
+ def isScalaClass(x: AnyRef) = packageOf(x) startsWith "scala."
+ def isScalaCompilerClass(x: AnyRef) = packageOf(x) startsWith "scala.tools.nsc."
// When doing our own iteration is dangerous
def useOwnToString(x: Any) = x match {
@@ -301,7 +286,8 @@ object ScalaRunTime {
case _: TraversableView[_, _] => true
// Don't want to a) traverse infinity or b) be overly helpful with peoples' custom
// collections which may have useful toString methods - ticket #3710
- case x: Traversable[_] => !x.hasDefiniteSize || !isScalaClass(x)
+ // or c) print AbstractFiles which are somehow also Iterable[AbstractFile]s.
+ case x: Traversable[_] => !x.hasDefiniteSize || !isScalaClass(x) || isScalaCompilerClass(x)
// Otherwise, nothing could possibly go wrong
case _ => false
}
@@ -311,6 +297,15 @@ object ScalaRunTime {
case (k, v) => inner(k) + " -> " + inner(v)
case _ => inner(arg)
}
+
+ // Special casing Unit arrays, the value class which uses a reference array type.
+ def arrayToString(x: AnyRef) = {
+ if (x.getClass.getComponentType == classOf[BoxedUnit])
+ 0 until (array_length(x) min maxElements) map (_ => "()") mkString ("Array(", ", ", ")")
+ else
+ WrappedArray make x take maxElements map inner mkString ("Array(", ", ", ")")
+ }
+
// The recursively applied attempt to prettify Array printing.
// Note that iterator is used if possible and foreach is used as a
// last resort, because the parallel collections "foreach" in a
@@ -319,14 +314,14 @@ object ScalaRunTime {
case null => "null"
case "" => "\"\""
case x: String => if (x.head.isWhitespace || x.last.isWhitespace) "\"" + x + "\"" else x
- case x if useOwnToString(x) => x toString
- case x: AnyRef if isArray(x) => WrappedArray make x take maxElements map inner mkString ("Array(", ", ", ")")
- case x: collection.Map[_, _] => x.iterator take maxElements map mapInner mkString (x.stringPrefix + "(", ", ", ")")
+ case x if useOwnToString(x) => x.toString
+ case x: AnyRef if isArray(x) => arrayToString(x)
+ case x: scala.collection.Map[_, _] => x.iterator take maxElements map mapInner mkString (x.stringPrefix + "(", ", ", ")")
case x: Iterable[_] => x.iterator take maxElements map inner mkString (x.stringPrefix + "(", ", ", ")")
case x: Traversable[_] => x take maxElements map inner mkString (x.stringPrefix + "(", ", ", ")")
case x: Product1[_] if isTuple(x) => "(" + inner(x._1) + ",)" // that special trailing comma
case x: Product if isTuple(x) => x.productIterator map inner mkString ("(", ",", ")")
- case x => x toString
+ case x => x.toString
}
// The try/catch is defense against iterables which aren't actually designed
@@ -336,6 +331,7 @@ object ScalaRunTime {
case _: StackOverflowError | _: UnsupportedOperationException | _: AssertionError => "" + arg
}
}
+
/** stringOf formatted for use in a repl result. */
def replStringOf(arg: Any, maxElements: Int): String = {
val s = stringOf(arg, maxElements)
@@ -343,4 +339,18 @@ object ScalaRunTime {
nl + s + "\n"
}
+ private[scala] def checkZip(what: String, coll1: TraversableOnce[_], coll2: TraversableOnce[_]) {
+ if (sys.props contains "scala.debug.zip") {
+ val xs = coll1.toIndexedSeq
+ val ys = coll2.toIndexedSeq
+ if (xs.length != ys.length) {
+ Console.err.println(
+ "Mismatched zip in " + what + ":\n" +
+ " this: " + xs.mkString(", ") + "\n" +
+ " that: " + ys.mkString(", ")
+ )
+ (new Exception).getStackTrace.drop(2).take(10).foreach(println)
+ }
+ }
+ }
}
diff --git a/src/library/scala/runtime/SeqCharSequence.scala b/src/library/scala/runtime/SeqCharSequence.scala
new file mode 100644
index 0000000..d2084a6
--- /dev/null
+++ b/src/library/scala/runtime/SeqCharSequence.scala
@@ -0,0 +1,45 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+package scala
+package runtime
+
+import java.util.Arrays.copyOfRange
+
+final class SeqCharSequence(val xs: scala.collection.IndexedSeq[Char]) extends CharSequence {
+ def length: Int = xs.length
+ def charAt(index: Int): Char = xs(index)
+ def subSequence(start: Int, end: Int): CharSequence = new SeqCharSequence(xs.slice(start, end))
+ override def toString = xs.mkString("")
+}
+
+final class ArrayCharSequence(val xs: Array[Char], start: Int, end: Int) extends CharSequence {
+ // yikes
+ // java.lang.VerifyError: (class: scala/runtime/ArrayCharSequence, method: <init> signature: ([C)V)
+ // Constructor must call super() or this()
+ //
+ // def this(xs: Array[Char]) = this(xs, 0, xs.length)
+
+ def length: Int = math.max(0, end - start)
+ def charAt(index: Int): Char = {
+ if (0 <= index && index < length)
+ xs(start + index)
+ else throw new ArrayIndexOutOfBoundsException(index)
+ }
+ def subSequence(start0: Int, end0: Int): CharSequence = {
+ if (start0 < 0) throw new ArrayIndexOutOfBoundsException(start0)
+ else if (end0 > length) throw new ArrayIndexOutOfBoundsException(end0)
+ else if (end0 <= start0) new ArrayCharSequence(xs, 0, 0)
+ else {
+ val newlen = end0 - start0
+ val start1 = start + start0
+ new ArrayCharSequence(xs, start1, start1 + newlen)
+ }
+ }
+ override def toString = xs drop start take length mkString ""
+}
diff --git a/src/library/scala/runtime/ShortRef.java b/src/library/scala/runtime/ShortRef.java
index dc4f014..461b521 100644
--- a/src/library/scala/runtime/ShortRef.java
+++ b/src/library/scala/runtime/ShortRef.java
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/library/scala/runtime/Statics.java b/src/library/scala/runtime/Statics.java
new file mode 100644
index 0000000..485511e
--- /dev/null
+++ b/src/library/scala/runtime/Statics.java
@@ -0,0 +1,89 @@
+package scala.runtime;
+
+/** Not for public consumption. Usage by the runtime only.
+ */
+
+public final class Statics {
+ public static int mix(int hash, int data) {
+ int h = mixLast(hash, data);
+ h = Integer.rotateLeft(h, 13);
+ return h * 5 + 0xe6546b64;
+ }
+
+ public static int mixLast(int hash, int data) {
+ int k = data;
+
+ k *= 0xcc9e2d51;
+ k = Integer.rotateLeft(k, 15);
+ k *= 0x1b873593;
+
+ return hash ^ k;
+ }
+
+ public static int finalizeHash(int hash, int length) {
+ return avalanche(hash ^ length);
+ }
+
+ /** Force all bits of the hash to avalanche. Used for finalizing the hash. */
+ public static int avalanche(int h) {
+ h ^= h >>> 16;
+ h *= 0x85ebca6b;
+ h ^= h >>> 13;
+ h *= 0xc2b2ae35;
+ h ^= h >>> 16;
+
+ return h;
+ }
+
+ public static int longHash(long lv) {
+ if ((int)lv == lv)
+ return (int)lv;
+ else
+ return (int)(lv ^ (lv >>> 32));
+ }
+
+ public static int doubleHash(double dv) {
+ int iv = (int)dv;
+ if (iv == dv)
+ return iv;
+
+ float fv = (float)dv;
+ if (fv == dv)
+ return java.lang.Float.floatToIntBits(fv);
+
+ long lv = (long)dv;
+ if (lv == dv)
+ return (int)lv;
+
+ lv = Double.doubleToLongBits(dv);
+ return (int)(lv ^ (lv >>> 32));
+ }
+
+ public static int floatHash(float fv) {
+ int iv = (int)fv;
+ if (iv == fv)
+ return iv;
+
+ long lv = (long)fv;
+ if (lv == fv)
+ return (int)(lv^(lv>>>32));
+
+ return java.lang.Float.floatToIntBits(fv);
+ }
+
+ public static int anyHash(Object x) {
+ if (x == null)
+ return 0;
+
+ if (x instanceof java.lang.Long)
+ return longHash(((java.lang.Long)x).longValue());
+
+ if (x instanceof java.lang.Double)
+ return doubleHash(((java.lang.Double)x).doubleValue());
+
+ if (x instanceof java.lang.Float)
+ return floatHash(((java.lang.Float)x).floatValue());
+
+ return x.hashCode();
+ }
+}
diff --git a/src/library/scala/runtime/StringAdd.scala b/src/library/scala/runtime/StringAdd.scala
index 824c4f5..9d848f0 100644
--- a/src/library/scala/runtime/StringAdd.scala
+++ b/src/library/scala/runtime/StringAdd.scala
@@ -1,23 +1,14 @@
/* *\
** ________ ___ __ ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ |_| **
** **
\* */
-
-
package scala.runtime
-
-final class StringAdd(self: Any) {
-
+/** A wrapper class that adds string concatenation `+` to any value */
+final class StringAdd(val self: Any) extends AnyVal {
def +(other: String) = String.valueOf(self) + other
-
- /** Returns string formatted according to given <code>format</code> string.
- * Format strings are as for <code>String.format</code>
- * (@see java.lang.String.format).
- */
- def formatted(fmtstr: String): String = fmtstr format self
}
diff --git a/src/library/scala/runtime/StringFormat.scala b/src/library/scala/runtime/StringFormat.scala
new file mode 100644
index 0000000..983ae2f
--- /dev/null
+++ b/src/library/scala/runtime/StringFormat.scala
@@ -0,0 +1,19 @@
+/* *\
+** ________ ___ __ ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ |_| **
+** **
+\* */
+
+package scala.runtime
+
+/** A wrapper class that adds a `formatted` operation to any value
+ */
+final class StringFormat(val self: Any) extends AnyVal {
+ /** Returns string formatted according to given `format` string.
+ * Format strings are as for `String.format`
+ * (@see java.lang.String.format).
+ */
+ @inline def formatted(fmtstr: String): String = fmtstr format self
+}
diff --git a/src/library/scala/runtime/TraitSetter.java b/src/library/scala/runtime/TraitSetter.java
old mode 100644
new mode 100755
diff --git a/src/library/scala/runtime/Tuple2Zipped.scala b/src/library/scala/runtime/Tuple2Zipped.scala
new file mode 100644
index 0000000..ef29075
--- /dev/null
+++ b/src/library/scala/runtime/Tuple2Zipped.scala
@@ -0,0 +1,130 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+package scala.runtime
+
+import scala.collection.{ TraversableLike, IterableLike }
+import scala.collection.generic.{ CanBuildFrom => CBF }
+import scala.language.{ higherKinds, implicitConversions }
+
+/** This interface is intended as a minimal interface, not complicated
+ * by the requirement to resolve type constructors, for implicit search (which only
+ * needs to find an implicit conversion to Traversable for our purposes.)
+ */
+trait ZippedTraversable2[+El1, +El2] extends Any {
+ def foreach[U](f: (El1, El2) => U): Unit
+}
+object ZippedTraversable2 {
+ implicit def zippedTraversable2ToTraversable[El1, El2](zz: ZippedTraversable2[El1, El2]): Traversable[(El1, El2)] = {
+ new scala.collection.AbstractTraversable[(El1, El2)] {
+ def foreach[U](f: ((El1, El2)) => U): Unit = zz foreach Function.untupled(f)
+ }
+ }
+}
+
+final class Tuple2Zipped[El1, Repr1, El2, Repr2](val colls: (TraversableLike[El1, Repr1], IterableLike[El2, Repr2])) extends AnyVal with ZippedTraversable2[El1, El2] {
+ // This would be better as "private def coll1 = colls._1" but
+ // SI-6215 precludes private methods in value classes.
+ def map[B, To](f: (El1, El2) => B)(implicit cbf: CBF[Repr1, B, To]): To = {
+ val b = cbf(colls._1.repr)
+ b.sizeHint(colls._1)
+ val elems2 = colls._2.iterator
+
+ for (el1 <- colls._1) {
+ if (elems2.hasNext)
+ b += f(el1, elems2.next)
+ else
+ return b.result
+ }
+
+ b.result
+ }
+
+ def flatMap[B, To](f: (El1, El2) => TraversableOnce[B])(implicit cbf: CBF[Repr1, B, To]): To = {
+ val b = cbf(colls._1.repr)
+ val elems2 = colls._2.iterator
+
+ for (el1 <- colls._1) {
+ if (elems2.hasNext)
+ b ++= f(el1, elems2.next)
+ else
+ return b.result
+ }
+
+ b.result
+ }
+
+ def filter[To1, To2](f: (El1, El2) => Boolean)(implicit cbf1: CBF[Repr1, El1, To1], cbf2: CBF[Repr2, El2, To2]): (To1, To2) = {
+ val b1 = cbf1(colls._1.repr)
+ val b2 = cbf2(colls._2.repr)
+ val elems2 = colls._2.iterator
+
+ for (el1 <- colls._1) {
+ if (elems2.hasNext) {
+ val el2 = elems2.next
+ if (f(el1, el2)) {
+ b1 += el1
+ b2 += el2
+ }
+ }
+ else return (b1.result, b2.result)
+ }
+
+ (b1.result, b2.result)
+ }
+
+ def exists(f: (El1, El2) => Boolean): Boolean = {
+ val elems2 = colls._2.iterator
+
+ for (el1 <- colls._1) {
+ if (elems2.hasNext) {
+ if (f(el1, elems2.next))
+ return true
+ }
+ else return false
+ }
+ false
+ }
+
+ def forall(f: (El1, El2) => Boolean): Boolean =
+ !exists((x, y) => !f(x, y))
+
+ def foreach[U](f: (El1, El2) => U): Unit = {
+ val elems2 = colls._2.iterator
+
+ for (el1 <- colls._1) {
+ if (elems2.hasNext)
+ f(el1, elems2.next)
+ else
+ return
+ }
+ }
+}
+
+object Tuple2Zipped {
+ final class Ops[T1, T2](val x: (T1, T2)) extends AnyVal {
+ def invert[El1, CC1[X] <: TraversableOnce[X], El2, CC2[X] <: TraversableOnce[X], That]
+ (implicit w1: T1 <:< CC1[El1],
+ w2: T2 <:< CC2[El2],
+ bf: scala.collection.generic.CanBuildFrom[CC1[_], (El1, El2), That]
+ ): That = {
+ val buf = bf(x._1)
+ val it1 = x._1.toIterator
+ val it2 = x._2.toIterator
+ while (it1.hasNext && it2.hasNext)
+ buf += ((it1.next, it2.next))
+
+ buf.result
+ }
+
+ def zipped[El1, Repr1, El2, Repr2]
+ (implicit w1: T1 => TraversableLike[El1, Repr1],
+ w2: T2 => IterableLike[El2, Repr2]
+ ): Tuple2Zipped[El1, Repr1, El2, Repr2] = new Tuple2Zipped((x._1, x._2))
+ }
+}
diff --git a/src/library/scala/runtime/Tuple3Zipped.scala b/src/library/scala/runtime/Tuple3Zipped.scala
new file mode 100644
index 0000000..3f2afaf
--- /dev/null
+++ b/src/library/scala/runtime/Tuple3Zipped.scala
@@ -0,0 +1,140 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+package scala.runtime
+
+import scala.collection.{ TraversableLike, IterableLike }
+import scala.collection.generic.{ CanBuildFrom => CBF }
+import scala.language.{ higherKinds, implicitConversions }
+
+/** See comment on ZippedTraversable2. */
+trait ZippedTraversable3[+El1, +El2, +El3] extends Any {
+ def foreach[U](f: (El1, El2, El3) => U): Unit
+}
+object ZippedTraversable3 {
+ implicit def zippedTraversable3ToTraversable[El1, El2, El3](zz: ZippedTraversable3[El1, El2, El3]): Traversable[(El1, El2, El3)] = {
+ new scala.collection.AbstractTraversable[(El1, El2, El3)] {
+ def foreach[U](f: ((El1, El2, El3)) => U): Unit = zz foreach Function.untupled(f)
+ }
+ }
+}
+
+final class Tuple3Zipped[El1, Repr1, El2, Repr2, El3, Repr3](val colls: (TraversableLike[El1, Repr1], IterableLike[El2, Repr2], IterableLike[El3, Repr3]))
+ extends AnyVal with ZippedTraversable3[El1, El2, El3] {
+
+ def map[B, To](f: (El1, El2, El3) => B)(implicit cbf: CBF[Repr1, B, To]): To = {
+ val b = cbf(colls._1.repr)
+ val elems2 = colls._2.iterator
+ val elems3 = colls._3.iterator
+
+ for (el1 <- colls._1) {
+ if (elems2.hasNext && elems3.hasNext)
+ b += f(el1, elems2.next, elems3.next)
+ else
+ return b.result
+ }
+ b.result
+ }
+
+ def flatMap[B, To](f: (El1, El2, El3) => TraversableOnce[B])(implicit cbf: CBF[Repr1, B, To]): To = {
+ val b = cbf(colls._1.repr)
+ val elems2 = colls._2.iterator
+ val elems3 = colls._3.iterator
+
+ for (el1 <- colls._1) {
+ if (elems2.hasNext && elems3.hasNext)
+ b ++= f(el1, elems2.next, elems3.next)
+ else
+ return b.result
+ }
+ b.result
+ }
+
+ def filter[To1, To2, To3](f: (El1, El2, El3) => Boolean)(
+ implicit cbf1: CBF[Repr1, El1, To1],
+ cbf2: CBF[Repr2, El2, To2],
+ cbf3: CBF[Repr3, El3, To3]): (To1, To2, To3) = {
+ val b1 = cbf1(colls._1.repr)
+ val b2 = cbf2(colls._2.repr)
+ val b3 = cbf3(colls._3.repr)
+ val elems2 = colls._2.iterator
+ val elems3 = colls._3.iterator
+ def result = (b1.result, b2.result, b3.result)
+
+ for (el1 <- colls._1) {
+ if (elems2.hasNext && elems3.hasNext) {
+ val el2 = elems2.next
+ val el3 = elems3.next
+
+ if (f(el1, el2, el3)) {
+ b1 += el1
+ b2 += el2
+ b3 += el3
+ }
+ }
+ else return result
+ }
+
+ result
+ }
+
+ def exists(f: (El1, El2, El3) => Boolean): Boolean = {
+ val elems2 = colls._2.iterator
+ val elems3 = colls._3.iterator
+
+ for (el1 <- colls._1) {
+ if (elems2.hasNext && elems3.hasNext) {
+ if (f(el1, elems2.next, elems3.next))
+ return true
+ }
+ else return false
+ }
+ false
+ }
+
+ def forall(f: (El1, El2, El3) => Boolean): Boolean =
+ !exists((x, y, z) => !f(x, y, z))
+
+ def foreach[U](f: (El1, El2, El3) => U): Unit = {
+ val elems2 = colls._2.iterator
+ val elems3 = colls._3.iterator
+
+ for (el1 <- colls._1) {
+ if (elems2.hasNext && elems3.hasNext)
+ f(el1, elems2.next, elems3.next)
+ else
+ return
+ }
+ }
+}
+
+object Tuple3Zipped {
+ final class Ops[T1, T2, T3](val x: (T1, T2, T3)) extends AnyVal {
+ def invert[El1, CC1[X] <: TraversableOnce[X], El2, CC2[X] <: TraversableOnce[X], El3, CC3[X] <: TraversableOnce[X], That]
+ (implicit w1: T1 <:< CC1[El1],
+ w2: T2 <:< CC2[El2],
+ w3: T3 <:< CC3[El3],
+ bf: scala.collection.generic.CanBuildFrom[CC1[_], (El1, El2, El3), That]
+ ): That = {
+ val buf = bf(x._1)
+ val it1 = x._1.toIterator
+ val it2 = x._2.toIterator
+ val it3 = x._3.toIterator
+ while (it1.hasNext && it2.hasNext && it3.hasNext)
+ buf += ((it1.next, it2.next, it3.next))
+
+ buf.result
+ }
+
+ def zipped[El1, Repr1, El2, Repr2, El3, Repr3]
+ (implicit w1: T1 => TraversableLike[El1, Repr1],
+ w2: T2 => IterableLike[El2, Repr2],
+ w3: T3 => IterableLike[El3, Repr3]
+ ): Tuple3Zipped[El1, Repr1, El2, Repr2, El3, Repr3] = new Tuple3Zipped((x._1, x._2, x._3))
+ }
+}
diff --git a/src/library/scala/runtime/VolatileBooleanRef.java b/src/library/scala/runtime/VolatileBooleanRef.java
old mode 100644
new mode 100755
index 6f265aa..e3bd182
--- a/src/library/scala/runtime/VolatileBooleanRef.java
+++ b/src/library/scala/runtime/VolatileBooleanRef.java
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/library/scala/runtime/VolatileByteRef.java b/src/library/scala/runtime/VolatileByteRef.java
old mode 100644
new mode 100755
index e7d3d79..034b003
--- a/src/library/scala/runtime/VolatileByteRef.java
+++ b/src/library/scala/runtime/VolatileByteRef.java
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/library/scala/runtime/VolatileCharRef.java b/src/library/scala/runtime/VolatileCharRef.java
old mode 100644
new mode 100755
index e6eb937..f90648c
--- a/src/library/scala/runtime/VolatileCharRef.java
+++ b/src/library/scala/runtime/VolatileCharRef.java
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/library/scala/runtime/VolatileDoubleRef.java b/src/library/scala/runtime/VolatileDoubleRef.java
old mode 100644
new mode 100755
index c4a041b..d47c957
--- a/src/library/scala/runtime/VolatileDoubleRef.java
+++ b/src/library/scala/runtime/VolatileDoubleRef.java
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/library/scala/runtime/VolatileFloatRef.java b/src/library/scala/runtime/VolatileFloatRef.java
old mode 100644
new mode 100755
index 0eca2c7..97da95f
--- a/src/library/scala/runtime/VolatileFloatRef.java
+++ b/src/library/scala/runtime/VolatileFloatRef.java
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/library/scala/runtime/VolatileIntRef.java b/src/library/scala/runtime/VolatileIntRef.java
old mode 100644
new mode 100755
index dd3e751..e8a68a1
--- a/src/library/scala/runtime/VolatileIntRef.java
+++ b/src/library/scala/runtime/VolatileIntRef.java
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/library/scala/runtime/VolatileLongRef.java b/src/library/scala/runtime/VolatileLongRef.java
old mode 100644
new mode 100755
index 94309f3..80e627c
--- a/src/library/scala/runtime/VolatileLongRef.java
+++ b/src/library/scala/runtime/VolatileLongRef.java
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/library/scala/runtime/VolatileObjectRef.java b/src/library/scala/runtime/VolatileObjectRef.java
old mode 100644
new mode 100755
index 73facba..848b063
--- a/src/library/scala/runtime/VolatileObjectRef.java
+++ b/src/library/scala/runtime/VolatileObjectRef.java
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -11,10 +11,10 @@
package scala.runtime;
-public class VolatileObjectRef implements java.io.Serializable {
+public class VolatileObjectRef<T> implements java.io.Serializable {
private static final long serialVersionUID = -9055728157600312291L;
- volatile public Object elem;
- public VolatileObjectRef(Object elem) { this.elem = elem; }
+ volatile public T elem;
+ public VolatileObjectRef(T elem) { this.elem = elem; }
public String toString() { return String.valueOf(elem); }
}
diff --git a/src/library/scala/runtime/VolatileShortRef.java b/src/library/scala/runtime/VolatileShortRef.java
old mode 100644
new mode 100755
index 10a770c..4e91d0d
--- a/src/library/scala/runtime/VolatileShortRef.java
+++ b/src/library/scala/runtime/VolatileShortRef.java
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/library/scala/runtime/WorksheetSupport.scala b/src/library/scala/runtime/WorksheetSupport.scala
new file mode 100644
index 0000000..016a0d0
--- /dev/null
+++ b/src/library/scala/runtime/WorksheetSupport.scala
@@ -0,0 +1,94 @@
+package scala.runtime
+import java.io.{OutputStream, PrintStream}
+import scala.runtime.ScalaRunTime.stringOf
+
+/** A utility object that's needed by the code that executes a worksheet.
+ */
+ at deprecated("SI-6458: Instrumentation logic will be moved out of the compiler.","2.10.0")
+object WorksheetSupport {
+
+ /** The offset in the source which should be printed */
+ private var currentOffset = 0
+
+ /** A stream that flushes in regular intervals so that output can be captured
+ * in real time. The flush interval is determined by the field "flushInterval".
+ * By default it is 30ms.
+ */
+ private class FlushedOutputStream(out: OutputStream) extends OutputStream {
+ protected def flushInterval = 30000000L // interval between flushes, by default 30ms
+ protected def width = 80 // output width, by default 80 characters
+ protected def tabInc = 8 // tab increment, by default 8 characters
+ private var lastFlush: Long = 0L
+ private var col = -1
+ override def write(b: Array[Byte], off: Int, len: Int) = {
+ for (idx <- off until (off + len min b.length)) writeOne(b(idx))
+ flush()
+ }
+ override def write(c: Int) {
+ writeOne(c)
+ flush()
+ }
+ override def flush() {
+ val current = System.nanoTime
+ if (current - lastFlush >= flushInterval) {
+ out.flush()
+ lastFlush = current
+ }
+ }
+ def writeOne(c: Int) {
+ if (col < 0) {
+ col = 0
+ write((currentOffset+" ").getBytes)
+ }
+ out.write(c)
+ col =
+ if (c == '\n') -1
+ else if (c == '\t') (col / tabInc) * tabInc + tabInc
+ else col + 1
+ if (col >= width) writeOne('\n')
+ }
+ def ensureNewLine() = if (col > 0) writeOne('\n')
+ }
+
+ private val flushedOut = new FlushedOutputStream(System.out)
+ private val printOut = new PrintStream(flushedOut)
+
+ private def redirected(op: => Unit) = {
+ val oldSysOut = System.out
+ val oldSysErr = System.err
+ val oldConsOut = Console.out
+ val oldConsErr = Console.err
+ System.setOut(printOut)
+ System.setErr(printOut)
+ Console.setOut(printOut)
+ Console.setErr(printOut)
+ try op
+ finally {
+ printOut.close()
+ System.setOut(oldSysOut)
+ System.setErr(oldSysErr)
+ Console.setOut(oldConsOut)
+ Console.setErr(oldConsErr)
+ }
+ }
+
+ def $execute(op: => Unit) = redirected {
+ try op
+ catch {
+ case ex: StopException => ;
+ case ex: Throwable => ex.printStackTrace()
+ }
+ }
+
+ def $skip(n: Int) = {
+ flushedOut.ensureNewLine()
+ currentOffset += n
+ }
+
+ def $stop() = throw new StopException
+
+ def $show(x: Any): String = stringOf(x)
+}
+
+class StopException extends Exception
+
diff --git a/src/library/scala/runtime/package.scala b/src/library/scala/runtime/package.scala
index 9c87baf..e4472b3 100644
--- a/src/library/scala/runtime/package.scala
+++ b/src/library/scala/runtime/package.scala
@@ -1,13 +1,3 @@
package scala
-package object runtime {
- @deprecated("Use `scala.Unit` instead.", "2.9.0") val Unit = scala.Unit
- @deprecated("Use `scala.Boolean` instead.", "2.9.0") val Boolean = scala.Boolean
- @deprecated("Use `scala.Byte` instead.", "2.9.0") val Byte = scala.Byte
- @deprecated("Use `scala.Short` instead.", "2.9.0") val Short = scala.Short
- @deprecated("Use `scala.Char` instead.", "2.9.0") val Char = scala.Char
- @deprecated("Use `scala.Int` instead.", "2.9.0") val Int = scala.Int
- @deprecated("Use `scala.Long` instead.", "2.9.0") val Long = scala.Long
- @deprecated("Use `scala.Float` instead.", "2.9.0") val Float = scala.Float
- @deprecated("Use `scala.Double` instead.", "2.9.0") val Double = scala.Double
-}
+package object runtime { }
diff --git a/src/library/scala/specialized.scala b/src/library/scala/specialized.scala
index 3254a0c..cb77935 100644
--- a/src/library/scala/specialized.scala
+++ b/src/library/scala/specialized.scala
@@ -1,33 +1,32 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-
-
package scala
+import Specializable._
+
/** Annotate type parameters on which code should be automatically
* specialized. For example:
- * <code>
+ * {{{
* class MyList[@specialized T] ...
- * </code>
+ * }}}
*
* Type T can be specialized on a subset of the primitive types by
* specifying a list of primitive types to specialize at:
- *
- * <code>
- * class MyList[@specialized(Int, Double, Boolean) T] ..
- * </code>
+ * {{{
+ * class MyList[@specialized(Int, Double, Boolean) T] ..
+ * }}}
*
* @since 2.8
*/
-class specialized(types: SpecializableCompanion*) extends annotation.StaticAnnotation {
- def this() {
- this(Unit, Boolean, Byte, Short, Char, Int, Long, Float, Double)
- }
-}
+// class tspecialized[T](group: Group[T]) extends scala.annotation.StaticAnnotation {
+class specialized(group: SpecializedGroup) extends scala.annotation.StaticAnnotation {
+ def this(types: Specializable*) = this(new Group(types.toList))
+ def this() = this(Primitives)
+}
diff --git a/src/library/scala/sys/BooleanProp.scala b/src/library/scala/sys/BooleanProp.scala
index e940990..e3c25bb 100644
--- a/src/library/scala/sys/BooleanProp.scala
+++ b/src/library/scala/sys/BooleanProp.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -8,6 +8,8 @@
package scala.sys
+import scala.language.implicitConversions
+
/** A few additional conveniences for Boolean properties.
*/
trait BooleanProp extends Prop[Boolean] {
diff --git a/src/library/scala/sys/Prop.scala b/src/library/scala/sys/Prop.scala
index 33b88f1..04c7b51 100644
--- a/src/library/scala/sys/Prop.scala
+++ b/src/library/scala/sys/Prop.scala
@@ -1,12 +1,13 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-package scala.sys
+package scala
+package sys
/** A lightweight interface wrapping a property contained in some
* unspecified map. Generally it'll be the system properties but this
@@ -38,7 +39,7 @@ trait Prop[+T] {
/** Sets the property.
*
- * @param the new string value
+ * @param newValue the new string value
* @return the old value, or null if it was unset.
*/
def set(newValue: String): String
diff --git a/src/library/scala/sys/PropImpl.scala b/src/library/scala/sys/PropImpl.scala
index b84553e..b50e0e1 100644
--- a/src/library/scala/sys/PropImpl.scala
+++ b/src/library/scala/sys/PropImpl.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/library/scala/sys/ShutdownHookThread.scala b/src/library/scala/sys/ShutdownHookThread.scala
index 9a4979c..a8f4871 100644
--- a/src/library/scala/sys/ShutdownHookThread.scala
+++ b/src/library/scala/sys/ShutdownHookThread.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -32,7 +32,6 @@ object ShutdownHookThread {
val t = new ShutdownHookThread(hookName()) {
override def run() = body
}
- t setDaemon true
runtime addShutdownHook t
t
}
diff --git a/src/library/scala/sys/SystemProperties.scala b/src/library/scala/sys/SystemProperties.scala
index 25fd6e5..da9adb3 100644
--- a/src/library/scala/sys/SystemProperties.scala
+++ b/src/library/scala/sys/SystemProperties.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -11,6 +11,8 @@ package scala.sys
import scala.collection.{ mutable, Iterator }
import scala.collection.JavaConverters._
import java.security.AccessControlException
+import scala.language.implicitConversions
+
/** A bidirectional map wrapping the java System properties.
* Changes to System properties will be immediately visible in the map,
@@ -23,7 +25,10 @@ import java.security.AccessControlException
* @version 2.9
* @since 2.9
*/
-class SystemProperties extends mutable.Map[String, String] {
+class SystemProperties
+extends mutable.AbstractMap[String, String]
+ with mutable.Map[String, String] {
+
override def empty = new SystemProperties
override def default(key: String): String = null
@@ -73,6 +78,5 @@ object SystemProperties {
lazy val preferIPv4Stack = bool("java.net.preferIPv4Stack", "system should prefer IPv4 sockets")
lazy val preferIPv6Addresses = bool("java.net.preferIPv6Addresses", "system should prefer IPv6 addresses")
lazy val noTraceSupression = bool("scala.control.noTraceSuppression", "scala should not suppress any stack trace creation")
- lazy val traceSourcePath = str("scala.control.sourcepath", "sourcepath for looking up stack trace elements")
}
diff --git a/src/library/scala/sys/package.scala b/src/library/scala/sys/package.scala
index 16faded..386bd84 100644
--- a/src/library/scala/sys/package.scala
+++ b/src/library/scala/sys/package.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -9,7 +9,7 @@
package scala
import scala.collection.immutable
-import collection.JavaConverters._
+import scala.collection.JavaConverters._
/** The package object `scala.sys` contains methods for reading
* and altering core aspects of the virtual machine as well as the
@@ -50,7 +50,7 @@ package object sys {
/** A bidirectional, mutable Map representing the current system Properties.
*
* @return a SystemProperties.
- * @see `scala.sys.SystemProperties`
+ * @see [[scala.sys.SystemProperties]]
*/
def props: SystemProperties = new SystemProperties
@@ -69,8 +69,8 @@ package object sys {
*
* Note that shutdown hooks are NOT guaranteed to be run.
*
- * @param the body of code to run at shutdown
- * @return the Thread which will run the shutdown hook.
+ * @param body the body of code to run at shutdown
+ * @return the Thread which will run the shutdown hook.
*/
def addShutdownHook(body: => Unit): ShutdownHookThread = ShutdownHookThread(body)
@@ -85,4 +85,4 @@ package object sys {
tarray take got
}
-}
\ No newline at end of file
+}
diff --git a/src/library/scala/sys/process/BasicIO.scala b/src/library/scala/sys/process/BasicIO.scala
index edc60a1..0003df6 100644
--- a/src/library/scala/sys/process/BasicIO.scala
+++ b/src/library/scala/sys/process/BasicIO.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -45,7 +45,7 @@ object BasicIO {
val q = new LinkedBlockingQueue[Either[Int, T]]
def next(): Stream[T] = q.take match {
case Left(0) => Stream.empty
- case Left(code) => if (nonzeroException) sys.error("Nonzero exit code: " + code) else Stream.empty
+ case Left(code) => if (nonzeroException) scala.sys.error("Nonzero exit code: " + code) else Stream.empty
case Right(s) => Stream.cons(s, next)
}
new Streamed((s: T) => q put Right(s), code => q put Left(code), () => next())
@@ -97,7 +97,7 @@ object BasicIO {
*
* @param withIn True if the process input should be attached to stdin.
* @param buffer A `StringBuffer` which will receive the process normal
- * output.
+ * output.
* @param log An optional `ProcessLogger` to which the output should be
* sent. If `None`, output will be sent to stderr.
* @return A `ProcessIO` with the characteristics above.
diff --git a/src/library/scala/sys/process/Process.scala b/src/library/scala/sys/process/Process.scala
index c2a61af..715b364 100644
--- a/src/library/scala/sys/process/Process.scala
+++ b/src/library/scala/sys/process/Process.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -11,6 +11,7 @@ package process
import processInternal._
import ProcessBuilder._
+import scala.language.implicitConversions
/** Represents a process that is running or has finished running.
* It may be a compound process with several underlying native processes (such as `a #&& b`).
diff --git a/src/library/scala/sys/process/ProcessBuilder.scala b/src/library/scala/sys/process/ProcessBuilder.scala
index dd6a5e7..d0b2ecf 100644
--- a/src/library/scala/sys/process/ProcessBuilder.scala
+++ b/src/library/scala/sys/process/ProcessBuilder.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -149,16 +149,14 @@ trait ProcessBuilder extends Source with Sink {
* the exit code is non-zero, an exception is thrown. The newly started
* process reads from standard input of the current process.
*/
- // def !!< : String
- // Not in 2.9.1, can't be added to the interface
+ def !!< : String
/** Starts the process represented by this builder, blocks until it exits, and
* returns the output as a String. Standard error is sent to the provided
* ProcessLogger. If the exit code is non-zero, an exception is thrown. The
* newly started process reads from standard input of the current process.
*/
- // def !!<(log: ProcessLogger): String
- // Not in 2.9.1, can't be added to the interface
+ def !!<(log: ProcessLogger): String
/** Starts the process represented by this builder. The output is returned as
* a Stream that blocks when lines are not available but the process has not
diff --git a/src/library/scala/sys/process/ProcessBuilderImpl.scala b/src/library/scala/sys/process/ProcessBuilderImpl.scala
index 58f06e1..49fea6f 100644
--- a/src/library/scala/sys/process/ProcessBuilderImpl.scala
+++ b/src/library/scala/sys/process/ProcessBuilderImpl.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -128,7 +128,7 @@ private[process] trait ProcessBuilderImpl {
val code = this ! BasicIO(withIn, buffer, log)
if (code == 0) buffer.toString
- else sys.error("Nonzero exit value: " + code)
+ else scala.sys.error("Nonzero exit value: " + code)
}
private[this] def lines(
@@ -213,4 +213,4 @@ private[process] trait ProcessBuilderImpl {
) extends SequentialBuilder(first, second, "###") {
override def createProcess(io: ProcessIO) = new ProcessSequence(first, second, io)
}
-}
\ No newline at end of file
+}
diff --git a/src/library/scala/sys/process/ProcessIO.scala b/src/library/scala/sys/process/ProcessIO.scala
index fa06746..f5b2668 100644
--- a/src/library/scala/sys/process/ProcessIO.scala
+++ b/src/library/scala/sys/process/ProcessIO.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/library/scala/sys/process/ProcessImpl.scala b/src/library/scala/sys/process/ProcessImpl.scala
index b7549ee..c21c0da 100644
--- a/src/library/scala/sys/process/ProcessImpl.scala
+++ b/src/library/scala/sys/process/ProcessImpl.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -84,7 +84,7 @@ private[process] trait ProcessImpl {
private[process] abstract class CompoundProcess extends BasicProcess {
def destroy() = destroyer()
- def exitValue() = getExitValue() getOrElse sys.error("No exit code: process destroyed.")
+ def exitValue() = getExitValue() getOrElse scala.sys.error("No exit code: process destroyed.")
def start() = getExitValue
protected lazy val (getExitValue, destroyer) = {
@@ -222,7 +222,10 @@ private[process] trait ProcessImpl {
p.exitValue()
}
override def destroy() = {
- try p.destroy()
+ try{
+ outputThreads foreach (_.stop())
+ p.destroy()
+ }
finally inputThread.interrupt()
}
}
diff --git a/src/library/scala/sys/process/ProcessLogger.scala b/src/library/scala/sys/process/ProcessLogger.scala
index a8241db..a4acb06 100644
--- a/src/library/scala/sys/process/ProcessLogger.scala
+++ b/src/library/scala/sys/process/ProcessLogger.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/library/scala/sys/process/package.scala b/src/library/scala/sys/process/package.scala
index c1bf470..ed436feb 100644
--- a/src/library/scala/sys/process/package.scala
+++ b/src/library/scala/sys/process/package.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -205,7 +205,7 @@ package scala.sys {
package object process extends ProcessImplicits {
/** The arguments passed to `java` when creating this process */
def javaVmArguments: List[String] = {
- import collection.JavaConversions._
+ import scala.collection.JavaConversions._
java.lang.management.ManagementFactory.getRuntimeMXBean().getInputArguments().toList
}
diff --git a/src/library/scala/testing/Benchmark.scala b/src/library/scala/testing/Benchmark.scala
index fde2259..66d7d44 100644
--- a/src/library/scala/testing/Benchmark.scala
+++ b/src/library/scala/testing/Benchmark.scala
@@ -1,17 +1,14 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-
-
package scala.testing
-
-import compat.Platform
+import scala.compat.Platform
/** `Benchmark` can be used to quickly turn an existing class into a
* benchmark. Here is a short example:
@@ -36,11 +33,13 @@ import compat.Platform
*
* @author Iulian Dragos, Burak Emir
*/
+ at deprecated("This class will be removed.", "2.10.0")
trait Benchmark {
/** this method should be implemented by the concrete benchmark.
* This method is called by the benchmarking code for a number of times.
- * The GC is called before each call to 'run'.
+ * The GC is called between "multiplier" calls to run, right after tear
+ * down.
*
* @see setUp
* @see tearDown
@@ -51,9 +50,6 @@ trait Benchmark {
/** Run the benchmark the specified number of times and return a list with
* the execution times in milliseconds in reverse order of the execution.
- *
- * @param noTimes ...
- * @return ...
*/
def runBenchmark(noTimes: Int): List[Long] =
for (i <- List.range(1, noTimes + 1)) yield {
@@ -74,8 +70,7 @@ trait Benchmark {
* should not be measured. This method is run before each call to the
* benchmark payload, 'run'.
*/
- def setUp() {
- }
+ def setUp() {}
/** Perform cleanup operations after each 'run'. For micro benchmarks,
* think about using the result of 'run' in a way that prevents the JVM
@@ -83,8 +78,7 @@ trait Benchmark {
* write the results to a file. The execution time of this method is not
* measured.
*/
- def tearDown() {
- }
+ def tearDown() {}
/** a string that is written at the beginning of the output line
* that contains the timings. By default, this is the class name.
@@ -118,4 +112,3 @@ trait Benchmark {
}
}
}
-
diff --git a/src/library/scala/testing/SUnit.scala b/src/library/scala/testing/SUnit.scala
deleted file mode 100644
index 9720015..0000000
--- a/src/library/scala/testing/SUnit.scala
+++ /dev/null
@@ -1,272 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-
-package scala.testing
-
-import scala.collection.mutable.ArrayBuffer
-import xml.{ Node, NodeSeq }
-
-/**
- * <p>
- * Unit testing methods in the spirit of
- * <a href="http://www.junit.org/" target="_top">JUnit</a> framework.
- * </p>
- * <p>
- * Use these classes like this:
- * </p>
- * <pre>
- * <b>import</b> scala.testing.SUnit
- * <b>import</b> SUnit._
- *
- * <b>class</b> MyTest(n: String) <b>extends</b> TestCase(n) {
- *
- * <b>override def</b> runTest() = n <b>match</b> {
- * <b>case</b> "myTest1" => assertTrue(<b>true</b>)
- * <b>case</b> "myTest2" => assertTrue("hello", <b>false</b>)
- * }
- * }
- *
- * <b>val</b> r = <b>new</b> TestResult()
- * suite.run(r)
- * <b>for</b> (tf <- r.failures()) {
- * println(tf.toString())
- * }
- * </pre>
- * <p>
- * The trait <code>TestConsoleMain</code> contains this code as
- * a <code>main</code> method, for convenience.
- * </p>
- *
- * @author Burak Emir
- */
- at deprecated("SUnit will be removed in 2.8.0. There are several free and sophisticated testing\n"+
- """frameworks for Scala available, examples are "ScalaTest", "ScalaCheck" or "Specs".""",
- "2.7.2")
-object SUnit {
-
- /** <p>
- * Convenience trait, mix it in a <code>TestMain</code> object and
- * implement "suite" to get this code.
- * </p><pre>
- * <b>val</b> r = <b>new</b> TestResult()
- * suite.run(r)
- * <b>for</b> (<b>val</b> tf <- r.failures()) {
- * println(tf.toString())
- * </pre>
- */
- trait TestConsoleMain {
- def suite: TestSuite
- def main(args: Array[String]) {
- val r = new TestResult()
- suite.run(r)
- for (tf <- r.failures())
- println(tf.toString())
- }
- }
-
- /** a Test can be run with its result being collected */
- trait Test {
- def run(r: TestResult): Unit
- }
-
- /** The class <code>TestCase</code> defines the fixture to run multiple
- * tests.
- *
- * @param name ...
- */
- abstract class TestCase(val name: String) extends Test with Assert {
-
- protected def runTest(): Unit
-
- def run(r: TestResult) {
- try {
- runTest()
- } catch {
- case t:Throwable => r.addFailure(this, t)
- }
- }
-
- def setUp() {}
-
- def tearDown() {}
-
- override def toString() = name
- }
-
- /** The class <code>TestFailure</code> collects a failed test together
- * with the thrown exception.
- */
- class TestFailure(val failedTest: Test, val thrownException: Throwable) {
-
- def this(p: (Test, Throwable)) = this(p._1, p._2)
-
- override def toString() =
- failedTest.toString() + " failed due to " + thrownException.toString()
-
- def trace(): String = thrownException.getStackTraceString
-
- }
-
- /** a TestResult collects the result of executing a test case */
- class TestResult {
- val buf = new ArrayBuffer[(Test, Throwable)]()
-
- def addFailure(test: Test, t: Throwable) {
- buf += ((test, t))
- }
-
- def failureCount() =
- buf.length
-
- def failures() =
- buf.iterator map { x => new TestFailure(x) }
- }
-
- /** The class <code>TestSuite</code> runs a composite of test cases.
- */
- class TestSuite(tests: Test*) extends Test {
-
- def this(names: Seq[String], constr: String => Test) =
- this((names map constr):_*)
-
- val buf = new ArrayBuffer[Test]()
-
- buf ++= tests
-
- def addTest(t: Test) {
- buf += t
- }
-
- def run(r: TestResult) {
- for (t <- buf) t.run(r)
- }
- }
-
- /** an AssertFailed is thrown for a failed assertion */
- case class AssertFailed(msg: String, stackTrace: Boolean) extends RuntimeException {
- private val msg0 =
- if (stackTrace) super.getStackTrace().map(_.toString + "\n").mkString
- else msg
- override def toString() =
- if (msg0 eq null) "failed assertion: " + msg else msg0
- }
-
- /** this class defines useful <code>assert</code> methods */
- trait Assert {
-
- def enableStackTrace: Boolean = true
-
- /** fails if <code>! actual.sameElements(expected)</code> */
- def assertSameElements[A](actual: Seq[A], expected: Seq[A]) {
- if (! actual.sameElements(expected))
- fail("(no message)", actual.toString, expected.toString)
- }
-
- /** fails if expected != actual */
- def assertEquals[A](msg: String, expected: A, actual: A) {
- if (expected != actual) fail(msg, expected, actual)
- }
-
- /** fails if expected != actual */
- def assertEquals[A](expected: A, actual: A) {
- assertEquals("(no message)", expected, actual)
- }
-
- /** succeeds if actual is false */
- def assertFalse(msg: String, actual: Boolean) {
- assertEquals(msg, false, actual)
- }
-
- /** succeeds if actual is false */
- def assertFalse(actual: Boolean) {
- assertFalse("(no message)", actual)
- }
-
- /** fails if null eq actual */
- def assertNotNull(msg: String, actual: AnyRef) {
- if (null eq actual) fail(msg)
- }
-
- /** fails if null eq actual */
- def assertNotNull(actual: AnyRef): Unit =
- assertNotNull("(no message)", actual)
-
- /** fails if <code>expected eq actual</code> */
- def assertNotEq(msg: String, expected: AnyRef, actual: AnyRef) {
- if (expected eq actual) fail(msg)
- }
-
- /** fails if <code>expected eq actual</code> */
- def assertNotEq(expected: AnyRef, actual: AnyRef) {
- assertNotEq("(no message)", expected, actual)
- }
-
- /** fails if actual ne null */
- def assertNull(msg: String, actual: AnyRef) {
- if (null ne actual) fail(msg)
- }
-
- /** fails if actual ne null */
- def assertNull(actual: AnyRef) {
- assertNull("(no message)", actual)
- }
-
- /** fails if <code>expected ne actual</code> */
- def assertEq(msg: String, expected: AnyRef, actual: AnyRef) {
- if (expected ne actual) fail(msg)
- }
-
- /** fails if expected ne actual */
- def assertEq(expected: AnyRef, actual: AnyRef) {
- assertEq("(no message)", expected, actual)
- }
-
- /** succeeds if actual == true */
- def assertTrue(msg: String, actual: Boolean) {
- assertEquals(msg, true, actual)
- }
-
- /** succeeds if actual == true */
- def assertTrue(actual: Boolean) {
- assertTrue("(no message)", actual)
- }
-
- /** Temporary patchwork trying to nurse xml forward. */
- def assertEqualsXML(msg: String, expected: NodeSeq, actual: NodeSeq) {
- if (!expected.xml_==(actual))
- fail(msg, expected, actual)
- }
- def assertEqualsXML(msg: String, expected: Seq[Node], actual: Seq[Node]) {
- assertEqualsXML(msg, expected: NodeSeq, actual: NodeSeq)
- }
-
- def assertEqualsXML(expected: NodeSeq, actual: NodeSeq) {
- assertEqualsXML("(no message)", expected, actual)
- }
-
- def assertSameElementsXML(actual: Seq[Node], expected: Seq[Node]) {
- val res = (actual: NodeSeq) xml_sameElements expected
-
- assert(res, "\nassertSameElementsXML:\n actual = %s\n expected = %s".format(actual, expected))
- }
-
- /** throws <code>AssertFailed</code> with given message <code>msg</code>.
- */
- def fail(msg: String) {
- throw AssertFailed(msg, enableStackTrace)
- }
-
- def fail[A](msg: String, expected: A, actual: A) {
- throw AssertFailed(msg +
- ", expected: " + expected +
- ", actual: " + actual, enableStackTrace)
- }
- }
-}
diff --git a/src/library/scala/testing/Show.scala b/src/library/scala/testing/Show.scala
index f5670ac..9376e26 100644
--- a/src/library/scala/testing/Show.scala
+++ b/src/library/scala/testing/Show.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -10,35 +10,35 @@
package scala.testing
-/** Classes inheriting trait `Show` can test their member methods
- * using the notattion <code>meth(arg<sub>1</sub>, ..., arg<sub>n</sub>)</code>,
- * where `meth` is the name of the method and
- * <code>arg<sub>1</sub>,...,arg<sub>n</sub></code> are the arguments.
+/** Classes inheriting trait `Show` can test their member methods using the
+ * notation `meth(arg,,1,,, ..., arg,,n,,)`, where `meth` is the name of
+ * the method and `arg,,1,,,...,arg,,n,,` are the arguments.
*
* The only difference to a normal method call is the leading quote
- * character (`'`). A quoted method call like the one above will produces a
- * legible diagnostic to be printed on [[scala.Console]].
+ * character (`'`). A quoted method call like the one above will produces
+ * a legible diagnostic to be printed on [[scala.Console]].
*
* It is of the form
- * <pre>
- * meth(arg<sub>1</sub>, ..., arg<sub>n</sub>) gives <result></pre>
*
- * where <code><result></code> is the result of evaluating the call.
+ * `meth(arg,,1,,, ..., arg,,n,,)` gives `<result>`
+ *
+ * where `<result>` is the result of evaluating the call.
*
*/
+ at deprecated("This class will be removed.", "2.10.0")
trait Show {
- /** The result class of wrapper `symApply`.
+ /** An implicit definition that adds an apply method to Symbol which forwards to `test`.
* Prints out diagnostics of method applications.
*/
- class SymApply(f: Symbol) {
+ implicit class SymApply(f: Symbol) {
def apply[A](args: A*) {
println(test(f, args: _*))
}
}
- /** An implicit definition that adds an apply method to Symbol which forwards to `test`. */
- implicit def symApply(sym: Symbol) = new SymApply(sym)
+ @deprecated("use SymApply instead", "2.10.0")
+ def symApply(sym: Symbol): SymApply = new SymApply(sym)
/** Apply method with name of given symbol `f` to given arguments and return
* a result diagnostics.
diff --git a/src/library/scala/text/Document.scala b/src/library/scala/text/Document.scala
index 9d2b9a1..b74fd15 100644
--- a/src/library/scala/text/Document.scala
+++ b/src/library/scala/text/Document.scala
@@ -1,16 +1,13 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-
-
package scala.text
-
import java.io.Writer
case object DocNil extends Document
@@ -34,11 +31,8 @@ abstract class Document {
def :/:(hd: String): Document = hd :: DocBreak :: this
/**
- * Format this document on <code>writer</code> and try to set line
- * breaks so that the result fits in <code>width</code> columns.
- *
- * @param width ...
- * @param writer ...
+ * Format this document on `writer` and try to set line
+ * breaks so that the result fits in `width` columns.
*/
def format(width: Int, writer: Writer) {
type FmtState = (Int, Boolean, Document)
@@ -94,6 +88,8 @@ abstract class Document {
case (i, b, DocGroup(d)) :: z =>
val fitsFlat = fits(width - k, (i, false, d) :: z)
fmt(k, (i, !fitsFlat, d) :: z)
+ case _ =>
+ ()
}
fmt(0, (0, false, DocGroup(this)) :: Nil)
diff --git a/src/library/scala/throws.scala b/src/library/scala/throws.scala
index 1ff8cdc..159f1f0 100644
--- a/src/library/scala/throws.scala
+++ b/src/library/scala/throws.scala
@@ -1,22 +1,20 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-
-
package scala
-/** <p>
- * Annotation for specifying the exceptions thrown by a method.
- * For example:
+/**
+ * Annotation for specifying the exceptions thrown by a method.
+ * For example:
* {{{
* class Reader(fname: String) {
* private val in = new BufferedReader(new FileReader(fname))
- * @throws(classOf[IOException])
+ * @throws[IOException]("if the file doesn't exist")
* def read() = in.read()
* }
* }}}
@@ -25,4 +23,6 @@ package scala
* @version 1.0, 19/05/2006
* @since 2.1
*/
-class throws(clazz: Class[_]) extends annotation.StaticAnnotation
+class throws[T <: Throwable](cause: String = "") extends scala.annotation.StaticAnnotation {
+ def this(clazz: Class[T]) = this()
+}
diff --git a/src/library/scala/transient.scala b/src/library/scala/transient.scala
index c17fab1..8ff7c58 100644
--- a/src/library/scala/transient.scala
+++ b/src/library/scala/transient.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -10,7 +10,7 @@
package scala
-import annotation.target._
+import scala.annotation.meta._
@field
-class transient extends annotation.StaticAnnotation
+class transient extends scala.annotation.StaticAnnotation
diff --git a/src/library/scala/unchecked.scala b/src/library/scala/unchecked.scala
index 10d3431..9dff6a9 100644
--- a/src/library/scala/unchecked.scala
+++ b/src/library/scala/unchecked.scala
@@ -1,38 +1,36 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-
-
package scala
-/** An annotation that gets applied to a selector in a match expression.
- * If it is present, exhaustiveness warnings for that expression will be
- * suppressed.
- * For example, compiling the code:
+/** An annotation to designate that the annotated entity
+ * should not be considered for additional compiler checks.
+ * Specific applications include annotating the subject of
+ * a match expression to suppress exhaustiveness warnings, and
+ * annotating a type argument in a match case to suppress
+ * unchecked warnings.
+ *
+ * Such suppression should be used with caution, without which
+ * one may encounter [[scala.MatchError]] or [[java.lang.ClassCastException]]
+ * at runtime. In most cases one can and should address the
+ * warning instead of suppressing it.
+ *
* {{{
- * object test extends App {
- * def f(x: Option[Int]) = x match {
- * case Some(y) => y
- * }
- * f(None)
+ * object Test extends App {
+ * // This would normally warn "match is not exhaustive"
+ * // because `None` is not covered.
+ * def f(x: Option[String]) = (x: @unchecked) match { case Some(y) => y }
+ * // This would normally warn "type pattern is unchecked"
+ * // but here will blindly cast the head element to String.
+ * def g(xs: Any) = xs match { case x: List[String @unchecked] => x.head }
* }
- * }}}
- * will display the following warning:
- * {{{
- * test.scala:2: warning: does not cover case {object None}
- * def f(x: Option[int]) = x match {
- * ^
- * one warning found
- * }}}
- * The above message may be suppressed by substituting the expression `x`
- * with `(x: @unchecked)`. Then the modified code will compile silently,
- * but, in any case, a [[scala.MatchError]] will be raised at runtime.
+ * }}}
*
* @since 2.4
*/
-class unchecked extends annotation.Annotation {}
+class unchecked extends scala.annotation.Annotation {}
diff --git a/src/library/scala/util/DynamicVariable.scala b/src/library/scala/util/DynamicVariable.scala
index fa9d17a..52cba68 100644
--- a/src/library/scala/util/DynamicVariable.scala
+++ b/src/library/scala/util/DynamicVariable.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2006-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2006-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -10,13 +10,13 @@ package scala.util
import java.lang.InheritableThreadLocal
-/** DynamicVariables provide a binding mechanism where the current
+/** `DynamicVariables` provide a binding mechanism where the current
* value is found through dynamic scope, but where access to the
* variable itself is resolved through static scope.
*
- * The current value can be retrieved with the value method. New values
- * should be pushed using the withValue method. Values pushed via
- * withValue only stay valid while the withValue's second argument, a
+ * The current value can be retrieved with the value method. New values
+ * should be pushed using the `withValue` method. Values pushed via
+ * `withValue` only stay valid while the `withValue`'s second argument, a
* parameterless closure, executes. When the second argument finishes,
* the variable reverts to the previous value.
*
@@ -28,7 +28,7 @@ import java.lang.InheritableThreadLocal
* }}}
*
* Each thread gets its own stack of bindings. When a
- * new thread is created, the DynamicVariable gets a copy
+ * new thread is created, the `DynamicVariable` gets a copy
* of the stack of bindings from the parent thread, and
* from then on the bindings for the new thread
* are independent of those for the original thread.
diff --git a/src/library/scala/util/Either.scala b/src/library/scala/util/Either.scala
new file mode 100644
index 0000000..dba11ed
--- /dev/null
+++ b/src/library/scala/util/Either.scala
@@ -0,0 +1,594 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+
+
+package scala.util
+
+import scala.language.implicitConversions
+
+/** Represents a value of one of two possible types (a disjoint union.)
+ * Instances of Either are either an instance of [[scala.util.Left]] or [[scala.util.Right]].
+ *
+ * A common use of Either is as an alternative to [[scala.Option]] for dealing
+ * with possible missing values. In this usage, [[scala.None]] is replaced
+ * with a [[scala.util.Left]] which can contain useful information.
+ * [[scala.util.Right]] takes the place of [[scala.Some]]. Convention dictates
+ * that Left is used for failure and Right is used for success.
+ *
+ * For example, you could use ``Either[String, Int]`` to detect whether a
+ * received input is a String or an Int.
+ *
+ * {{{
+ * val in = Console.readLine("Type Either a string or an Int: ")
+ * val result: Either[String,Int] = try {
+ * Right(in.toInt)
+ * } catch {
+ * case e: Exception =>
+ * Left(in)
+ * }
+ *
+ * println( result match {
+ * case Right(x) => "You passed me the Int: " + x + ", which I will increment. " + x + " + 1 = " + (x+1)
+ * case Left(x) => "You passed me the String: " + x
+ * })
+ * }}}
+ *
+ * A ''projection'' can be used to selectively operate on a value of type Either,
+ * depending on whether it is of type Left or Right. For example, to transform an
+ * Either using a function, in the case where it's a Left, one can first apply
+ * the `left` projection and invoke `map` on that projected Either. If a `right`
+ * projection is applied to that Left, the original Left is returned, unmodified.
+ *
+ * {{{
+ * val l: Either[String, Int] = Left("flower")
+ * val r: Either[String, Int] = Right(12)
+ * l.left.map(_.size): Either[Int, Int] // Left(6)
+ * r.left.map(_.size): Either[Int, Int] // Right(12)
+ * l.right.map(_.toDouble): Either[String, Double] // Left("flower")
+ * r.right.map(_.toDouble): Either[String, Double] // Right(12.0)
+ * }}}
+ *
+ * Like with other types which define a `map` method, the same can be achieved
+ * using a for-comprehension:
+ * {{{
+ * for (s <- l.left) yield s.size // Left(6)
+ * }}}
+ *
+ * To support multiple projections as generators in for-comprehensions, the Either
+ * type also defines a `flatMap` method.
+ *
+ * @author <a href="mailto:research at workingmouse.com">Tony Morris</a>, Workingmouse
+ * @version 1.0, 11/10/2008
+ * @since 2.7
+ */
+sealed abstract class Either[+A, +B] {
+ /**
+ * Projects this `Either` as a `Left`.
+ */
+ def left = Either.LeftProjection(this)
+
+ /**
+ * Projects this `Either` as a `Right`.
+ */
+ def right = Either.RightProjection(this)
+
+ /**
+ * Applies `fa` if this is a `Left` or `fb` if this is a `Right`.
+ *
+ * @example {{{
+ * val result: Either[Exception, Value] = possiblyFailingOperation()
+ * log(result.fold(
+ * ex => "Operation failed with " + ex,
+ * v => "Operation produced value: " + v
+ * ))
+ * }}}
+ *
+ * @param fa the function to apply if this is a `Left`
+ * @param fb the function to apply if this is a `Right`
+ * @return the results of applying the function
+ */
+ def fold[X](fa: A => X, fb: B => X) = this match {
+ case Left(a) => fa(a)
+ case Right(b) => fb(b)
+ }
+
+ /**
+ * If this is a `Left`, then return the left value in `Right` or vice versa.
+ *
+ * @example {{{
+ * val l: Either[String, Int] = Left("left")
+ * val r: Either[Int, String] = l.swap // Result: Right("left")
+ * }}}
+ */
+ def swap = this match {
+ case Left(a) => Right(a)
+ case Right(b) => Left(b)
+ }
+
+ /**
+ * Joins an `Either` through `Right`.
+ *
+ * This method requires that the right side of this Either is itself an
+ * Either type. That is, this must be some type like: {{{
+ * Either[A, Either[A, C]]
+ * }}} (which respects the type parameter bounds, shown below.)
+ *
+ * If this instance is a Right[Either[A, C]] then the contained Either[A, C]
+ * will be returned, otherwise this value will be returned unmodified.
+ *
+ * @example {{{
+ * Right[String, Either[String, Int]](Right(12)).joinRight // Result: Right(12)
+ * Right[String, Either[String, Int]](Left("flower")).joinRight // Result: Left("flower")
+ * Left[String, Either[String, Int]]("flower").joinRight // Result: Left("flower")
+ * }}}
+ *
+ * This method, and `joinLeft`, are analogous to `Option#flatten`
+ */
+ def joinRight[A1 >: A, B1 >: B, C](implicit ev: B1 <:< Either[A1, C]): Either[A1, C] = this match {
+ case Left(a) => Left(a)
+ case Right(b) => b
+ }
+
+ /**
+ * Joins an `Either` through `Left`.
+ *
+ * This method requires that the left side of this Either is itself an
+ * Either type. That is, this must be some type like: {{{
+ * Either[Either[C, B], B]
+ * }}} (which respects the type parameter bounds, shown below.)
+ *
+ * If this instance is a Left[Either[C, B]] then the contained Either[C, B]
+ * will be returned, otherwise this value will be returned unmodified.
+ *
+ * {{{
+ * Left[Either[Int, String], String](Right("flower")).joinLeft // Result: Right("flower")
+ * Left[Either[Int, String], String](Left(12)).joinLeft // Result: Left(12)
+ * Right[Either[Int, String], String]("daisy").joinLeft // Result: Right("daisy")
+ * }}}
+ *
+ * This method, and `joinRight`, are analogous to `Option#flatten`
+ */
+ def joinLeft[A1 >: A, B1 >: B, C](implicit ev: A1 <:< Either[C, B1]): Either[C, B1] = this match {
+ case Left(a) => a
+ case Right(b) => Right(b)
+ }
+
+ /**
+ * Returns `true` if this is a `Left`, `false` otherwise.
+ *
+ * {{{
+ * Left("tulip").isLeft // true
+ * Right("venus fly-trap").isLeft // false
+ * }}}
+ */
+ def isLeft: Boolean
+
+ /**
+ * Returns `true` if this is a `Right`, `false` otherwise.
+ *
+ * {{{
+ * Left("tulip").isRight // false
+ * Right("venus fly-trap").isRight // true
+ * }}}
+ */
+ def isRight: Boolean
+}
+
+/**
+ * The left side of the disjoint union, as opposed to the [[scala.util.Right]] side.
+ *
+ * @author <a href="mailto:research at workingmouse.com">Tony Morris</a>, Workingmouse
+ * @version 1.0, 11/10/2008
+ */
+final case class Left[+A, +B](a: A) extends Either[A, B] {
+ def isLeft = true
+ def isRight = false
+}
+
+/**
+ * The right side of the disjoint union, as opposed to the [[scala.util.Left]] side.
+ *
+ * @author <a href="mailto:research at workingmouse.com">Tony Morris</a>, Workingmouse
+ * @version 1.0, 11/10/2008
+ */
+final case class Right[+A, +B](b: B) extends Either[A, B] {
+ def isLeft = false
+ def isRight = true
+}
+
+object Either {
+
+ /**
+ * Allows use of a ``merge`` method to extract values from Either instances
+ * regardless of whether they are Left or Right.
+ *
+ * {{{
+ * val l = Left(List(1)): Either[List[Int], Vector[Int]]
+ * val r = Right(Vector(1)): Either[List[Int], Vector[Int]]
+ * l.merge: Seq[Int] // List(1)
+ * r.merge: Seq[Int] // Vector(1)
+ * }}}
+ */
+ implicit class MergeableEither[A](x: Either[A, A]) {
+ def merge: A = x match {
+ case Left(a) => a
+ case Right(a) => a
+ }
+ }
+ @deprecated("use MergeableEither instead", "2.10.0")
+ def either2mergeable[A](x: Either[A, A]): MergeableEither[A] = new MergeableEither(x)
+
+ /**
+ * Projects an `Either` into a `Left`.
+ *
+ * This allows for-comprehensions over Either instances - for example {{{
+ * for (s <- Left("flower").left) yield s.length // Left(6)
+ * }}}
+ *
+ * Continuing the analogy with [[scala.Option]], a `LeftProjection` declares
+ * that `Left` should be analogous to `Some` in some code.
+ *
+ * {{{
+ * // using Option:
+ * def interactWithDB(x: Query): Option[Result] =
+ * try {
+ * Some(getResultFromDatabase(x))
+ * } catch {
+ * case ex => None
+ * }
+ *
+ * // this will only be executed if interactWithDB returns a Some
+ * val report =
+ * for (r <- interactWithDB(someQuery)) yield generateReport(r)
+ * if (report.isDefined)
+ * send(report)
+ * else
+ * log("report not generated, not sure why...")
+ * }}}
+ *
+ * {{{
+ * // using Either
+ * def interactWithDB(x: Query): Either[Exception, Result] =
+ * try {
+ * Right(getResultFromDatabase(x))
+ * } catch {
+ * case ex => Left(ex)
+ * }
+ *
+ * // this will only be executed if interactWithDB returns a Right
+ * val report =
+ * for (r <- interactWithDB(someQuery).right) yield generateReport(r)
+ * if (report.isRight)
+ * send(report)
+ * else
+ * log("report not generated, reason was " + report.left.get)
+ * }}}
+ *
+ * @author <a href="mailto:research at workingmouse.com">Tony Morris</a>, Workingmouse
+ * @version 1.0, 11/10/2008
+ */
+ final case class LeftProjection[+A, +B](e: Either[A, B]) {
+ /**
+ * Returns the value from this `Left` or throws `Predef.NoSuchElementException`
+ * if this is a `Right`.
+ *
+ * {{{
+ * Left(12).left.get // 12
+ * Right(12).left.get // NoSuchElementException
+ * }}}
+ *
+ * @throws Predef.NoSuchElementException if the projection is [[scala.util.Right]]
+ */
+ def get = e match {
+ case Left(a) => a
+ case Right(_) => throw new NoSuchElementException("Either.left.value on Right")
+ }
+
+ /**
+ * Executes the given side-effecting function if this is a `Left`.
+ *
+ * {{{
+ * Left(12).left.foreach(x => println(x)) // prints "12"
+ * Right(12).left.foreach(x => println(x)) // doesn't print
+ * }}}
+ * @param f The side-effecting function to execute.
+ */
+ def foreach[U](f: A => U) = e match {
+ case Left(a) => f(a)
+ case Right(_) => {}
+ }
+
+ /**
+ * Returns the value from this `Left` or the given argument if this is a
+ * `Right`.
+ *
+ * {{{
+ * Left(12).left.getOrElse(17) // 12
+ * Right(12).left.getOrElse(17) // 17
+ * }}}
+ *
+ */
+ def getOrElse[AA >: A](or: => AA) = e match {
+ case Left(a) => a
+ case Right(_) => or
+ }
+
+ /**
+ * Returns `true` if `Right` or returns the result of the application of
+ * the given function to the `Left` value.
+ *
+ * {{{
+ * Left(12).left.forall(_ > 10) // true
+ * Left(7).left.forall(_ > 10) // false
+ * Right(12).left.forall(_ > 10) // true
+ * }}}
+ *
+ */
+ def forall(f: A => Boolean) = e match {
+ case Left(a) => f(a)
+ case Right(_) => true
+ }
+
+ /**
+ * Returns `false` if `Right` or returns the result of the application of
+ * the given function to the `Left` value.
+ *
+ * {{{
+ * Left(12).left.exists(_ > 10) // true
+ * Left(7).left.exists(_ > 10) // false
+ * Right(12).left.exists(_ > 10) // false
+ * }}}
+ *
+ */
+ def exists(f: A => Boolean) = e match {
+ case Left(a) => f(a)
+ case Right(_) => false
+ }
+
+ /**
+ * Binds the given function across `Left`.
+ *
+ * {{{
+ * Left(12).left.flatMap(x => Left("scala")) // Left("scala")
+ * Right(12).left.flatMap(x => Left("scala") // Right(12)
+ * }}}
+ * @param f The function to bind across `Left`.
+ */
+ def flatMap[BB >: B, X](f: A => Either[X, BB]) = e match {
+ case Left(a) => f(a)
+ case Right(b) => Right(b)
+ }
+
+ /**
+ * Maps the function argument through `Left`.
+ *
+ * {{{
+ * Left(12).left.map(_ + 2) // Left(14)
+ * Right[Int, Int](12).left.map(_ + 2) // Right(12)
+ * }}}
+ */
+ def map[X](f: A => X) = e match {
+ case Left(a) => Left(f(a))
+ case Right(b) => Right(b)
+ }
+
+ /**
+ * Returns `None` if this is a `Right` or if the given predicate
+ * `p` does not hold for the left value, otherwise, returns a `Left`.
+ *
+ * {{{
+ * Left(12).left.filter(_ > 10) // Some(Left(12))
+ * Left(7).left.filter(_ > 10) // None
+ * Right(12).left.filter(_ > 10) // None
+ * }}}
+ */
+ def filter[Y](p: A => Boolean): Option[Either[A, Y]] = e match {
+ case Left(a) => if(p(a)) Some(Left(a)) else None
+ case Right(b) => None
+ }
+
+ /**
+ * Returns a `Seq` containing the `Left` value if it exists or an empty
+ * `Seq` if this is a `Right`.
+ *
+ * {{{
+ * Left(12).left.toSeq // Seq(12)
+ * Right(12).left.toSeq // Seq()
+ * }}}
+ */
+ def toSeq = e match {
+ case Left(a) => Seq(a)
+ case Right(_) => Seq.empty
+ }
+
+ /**
+ * Returns a `Some` containing the `Left` value if it exists or a
+ * `None` if this is a `Right`.
+ *
+ * {{{
+ * Left(12).left.toOption // Some(12)
+ * Right(12).left.toOption // None
+ * }}}
+ */
+ def toOption = e match {
+ case Left(a) => Some(a)
+ case Right(_) => None
+ }
+ }
+
+ /**
+ * Projects an `Either` into a `Right`.
+ *
+ * This allows for-comprehensions over Either instances - for example {{{
+ * for (s <- Right("flower").right) yield s.length // Right(6)
+ * }}}
+ *
+ * Continuing the analogy with [[scala.Option]], a `RightProjection` declares
+ * that `Right` should be analogous to `Some` in some code.
+ *
+ * Analogous to `LeftProjection`, see example usage in its documentation above.
+ *
+ * @author <a href="mailto:research at workingmouse.com">Tony Morris</a>, Workingmouse
+ * @version 1.0, 11/10/2008
+ */
+ final case class RightProjection[+A, +B](e: Either[A, B]) {
+
+ /**
+ * Returns the value from this `Right` or throws
+ * `Predef.NoSuchElementException` if this is a `Left`.
+ *
+ * {{{
+ * Right(12).right.get // 12
+ * Left(12).right.get // NoSuchElementException
+ * }}}
+ *
+ * @throws Predef.NoSuchElementException if the projection is `Left`.
+ */
+ def get = e match {
+ case Left(_) => throw new NoSuchElementException("Either.right.value on Left")
+ case Right(a) => a
+ }
+
+ /**
+ * Executes the given side-effecting function if this is a `Right`.
+ *
+ * {{{
+ * Right(12).right.foreach(x => println(x)) // prints "12"
+ * Left(12).right.foreach(x => println(x)) // doesn't print
+ * }}}
+ * @param f The side-effecting function to execute.
+ */
+ def foreach[U](f: B => U) = e match {
+ case Left(_) => {}
+ case Right(b) => f(b)
+ }
+
+ /**
+ * Returns the value from this `Right` or the given argument if this is a
+ * `Left`.
+ *
+ * {{{
+ * Right(12).right.getOrElse(17) // 12
+ * Left(12).right.getOrElse(17) // 17
+ * }}}
+ */
+ def getOrElse[BB >: B](or: => BB) = e match {
+ case Left(_) => or
+ case Right(b) => b
+ }
+
+ /**
+ * Returns `true` if `Left` or returns the result of the application of
+ * the given function to the `Right` value.
+ *
+ * {{{
+ * Right(12).right.forall(_ > 10) // true
+ * Right(7).right.forall(_ > 10) // false
+ * Left(12).right.forall(_ > 10) // true
+ * }}}
+ */
+ def forall(f: B => Boolean) = e match {
+ case Left(_) => true
+ case Right(b) => f(b)
+ }
+
+ /**
+ * Returns `false` if `Left` or returns the result of the application of
+ * the given function to the `Right` value.
+ *
+ * {{{
+ * Right(12).right.exists(_ > 10) // true
+ * Right(7).right.exists(_ > 10) // false
+ * Left(12).right.exists(_ > 10) // false
+ * }}}
+ */
+ def exists(f: B => Boolean) = e match {
+ case Left(_) => false
+ case Right(b) => f(b)
+ }
+
+ /**
+ * Binds the given function across `Right`.
+ *
+ * @param f The function to bind across `Right`.
+ */
+ def flatMap[AA >: A, Y](f: B => Either[AA, Y]) = e match {
+ case Left(a) => Left(a)
+ case Right(b) => f(b)
+ }
+
+ /**
+ * The given function is applied if this is a `Right`.
+ *
+ * {{{
+ * Right(12).right.map(x => "flower") // Result: Right("flower")
+ * Left(12).right.map(x => "flower") // Result: Left(12)
+ * }}}
+ */
+ def map[Y](f: B => Y) = e match {
+ case Left(a) => Left(a)
+ case Right(b) => Right(f(b))
+ }
+
+ /** Returns `None` if this is a `Left` or if the
+ * given predicate `p` does not hold for the right value,
+ * otherwise, returns a `Right`.
+ *
+ * {{{
+ * Right(12).right.filter(_ > 10) // Some(Right(12))
+ * Right(7).right.filter(_ > 10) // None
+ * Left(12).right.filter(_ > 10) // None
+ * }}}
+ */
+ def filter[X](p: B => Boolean): Option[Either[X, B]] = e match {
+ case Left(_) => None
+ case Right(b) => if(p(b)) Some(Right(b)) else None
+ }
+
+ /** Returns a `Seq` containing the `Right` value if
+ * it exists or an empty `Seq` if this is a `Left`.
+ *
+ * {{{
+ * Right(12).right.toSeq // Seq(12)
+ * Left(12).right.toSeq // Seq()
+ * }}}
+ */
+ def toSeq = e match {
+ case Left(_) => Seq.empty
+ case Right(b) => Seq(b)
+ }
+
+ /** Returns a `Some` containing the `Right` value
+ * if it exists or a `None` if this is a `Left`.
+ *
+ * {{{
+ * Right(12).right.toOption // Some(12)
+ * Left(12).right.toOption // None
+ * }}}
+ */
+ def toOption = e match {
+ case Left(_) => None
+ case Right(b) => Some(b)
+ }
+ }
+
+ /** If the condition is satisfied, return the given `B` in `Right`,
+ * otherwise, return the given `A` in `Left`.
+ *
+ * {{{
+ * val userInput: String = ...
+ * Either.cond(
+ * userInput.forall(_.isDigit) && userInput.size == 10,
+ * PhoneNumber(userInput),
+ * "The input (%s) does not look like a phone number".format(userInput)
+ * }}}
+ */
+ def cond[A, B](test: Boolean, right: => B, left: => A): Either[A, B] =
+ if (test) Right(right) else Left(left)
+}
diff --git a/src/library/scala/util/Marshal.scala b/src/library/scala/util/Marshal.scala
index daeaf4c..b78ed21 100644
--- a/src/library/scala/util/Marshal.scala
+++ b/src/library/scala/util/Marshal.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2008-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2008-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -11,19 +11,20 @@
package scala.util
/**
- * Marshalling of Scala objects using Scala manifests.
+ * Marshalling of Scala objects using Scala tags.
*
* @author Stephane Micheloud
* @version 1.0
*/
+ at deprecated("This class will be removed", "2.10.0")
object Marshal {
import java.io._
- import scala.reflect.ClassManifest
+ import scala.reflect.ClassTag
- def dump[A](o: A)(implicit m: ClassManifest[A]): Array[Byte] = {
+ def dump[A](o: A)(implicit t: ClassTag[A]): Array[Byte] = {
val ba = new ByteArrayOutputStream(512)
val out = new ObjectOutputStream(ba)
- out.writeObject(m)
+ out.writeObject(t)
out.writeObject(o)
out.close()
ba.toByteArray()
@@ -32,19 +33,18 @@ object Marshal {
@throws(classOf[IOException])
@throws(classOf[ClassCastException])
@throws(classOf[ClassNotFoundException])
- def load[A](buffer: Array[Byte])(implicit expected: ClassManifest[A]): A = {
+ def load[A](buffer: Array[Byte])(implicit expected: ClassTag[A]): A = {
val in = new ObjectInputStream(new ByteArrayInputStream(buffer))
- val found = in.readObject.asInstanceOf[ClassManifest[_]]
- if (found <:< expected) {
- val o = in.readObject.asInstanceOf[A]
- in.close()
- o
- } else {
- in.close()
- throw new ClassCastException("type mismatch;"+
- "\n found : "+found+
- "\n required: "+expected)
+ val found = in.readObject.asInstanceOf[ClassTag[_]]
+ try {
+ found.runtimeClass.asSubclass(expected.runtimeClass)
+ in.readObject.asInstanceOf[A]
+ } catch {
+ case _: ClassCastException =>
+ in.close()
+ throw new ClassCastException("type mismatch;"+
+ "\n found : "+found+
+ "\n required: "+expected)
}
}
-
}
diff --git a/src/library/scala/util/MurmurHash.scala b/src/library/scala/util/MurmurHash.scala
index 71db253..a5bc8fa 100644
--- a/src/library/scala/util/MurmurHash.scala
+++ b/src/library/scala/util/MurmurHash.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -27,6 +27,7 @@ import scala.collection.Iterator
* or can take individual hash values with append. Its own hash code is
* set equal to the hash code of whatever it is hashing.
*/
+ at deprecated("Use the object MurmurHash3 instead.", "2.10.0")
class MurmurHash[@specialized(Int,Long,Float,Double) T](seed: Int) extends (T => Unit) {
import MurmurHash._
@@ -79,7 +80,7 @@ class MurmurHash[@specialized(Int,Long,Float,Double) T](seed: Int) extends (T =>
* incorporate a new integer) to update the values. Only one method
* needs to be called to finalize the hash.
*/
-
+ at deprecated("Use the object MurmurHash3 instead.", "2.10.0")
object MurmurHash {
// Magic values used for MurmurHash's 32 bit hash.
// Don't change these without consulting a hashing expert!
@@ -177,7 +178,7 @@ object MurmurHash {
* where the order of appearance of elements does not matter.
* This is useful for hashing sets, for example.
*/
- def symmetricHash[T](xs: collection.TraversableOnce[T], seed: Int) = {
+ def symmetricHash[T](xs: scala.collection.TraversableOnce[T], seed: Int) = {
var a,b,n = 0
var c = 1
xs.seq.foreach(i => {
diff --git a/src/library/scala/util/Properties.scala b/src/library/scala/util/Properties.scala
index 283ecc3..d04e5e4 100644
--- a/src/library/scala/util/Properties.scala
+++ b/src/library/scala/util/Properties.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2006-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2006-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -10,11 +10,16 @@
package scala.util
import java.io.{ IOException, PrintWriter }
+import java.util.jar.Attributes.{ Name => AttributeName }
-/** Loads library.properties from the jar. */
+/** Loads `library.properties` from the jar. */
object Properties extends PropertiesTrait {
protected def propCategory = "library"
- protected def pickJarBasedOn = classOf[ScalaObject]
+ protected def pickJarBasedOn = classOf[Option[_]]
+
+ /** Scala manifest attributes.
+ */
+ val ScalaCompilerVersion = new AttributeName("Scala-Compiler-Version")
}
private[scala] trait PropertiesTrait {
@@ -59,7 +64,7 @@ private[scala] trait PropertiesTrait {
def scalaPropOrEmpty(name: String): String = scalaPropOrElse(name, "")
def scalaPropOrNone(name: String): Option[String] = Option(scalaProps.getProperty(name))
- /** The numeric portion of the runtime scala version, if this is a final
+ /** The numeric portion of the runtime Scala version, if this is a final
* release. If for instance the versionString says "version 2.9.0.final",
* this would return Some("2.9.0").
*
@@ -67,13 +72,13 @@ private[scala] trait PropertiesTrait {
* it is an RC, Beta, etc. or was built from source, or if the version
* cannot be read.
*/
- val releaseVersion =
+ val releaseVersion =
for {
v <- scalaPropOrNone("maven.version.number")
if !(v endsWith "-SNAPSHOT")
} yield v
- /** The development scala version, if this is not a final release.
+ /** The development Scala version, if this is not a final release.
* The precise contents are not guaranteed, but it aims to provide a
* unique repository identifier (currently the svn revision) in the
* fourth dotted segment if the running version was built from source.
@@ -81,18 +86,23 @@ private[scala] trait PropertiesTrait {
* @return Some(version) if this is a non-final version, None if this
* is a final release or the version cannot be read.
*/
- val developmentVersion =
+ val developmentVersion =
for {
v <- scalaPropOrNone("maven.version.number")
if v endsWith "-SNAPSHOT"
ov <- scalaPropOrNone("version.number")
} yield ov
+ /** Either the development or release version if known, otherwise
+ * the empty string.
+ */
+ def versionNumberString = scalaPropOrEmpty("version.number")
+
/** The version number of the jar this was loaded from plus "version " prefix,
* or "version (unknown)" if it cannot be determined.
*/
val versionString = "version " + scalaPropOrElse("version.number", "(unknown)")
- val copyrightString = scalaPropOrElse("copyright.string", "(c) 2002-2011 LAMP/EPFL")
+ val copyrightString = scalaPropOrElse("copyright.string", "Copyright 2002-2013, LAMP/EPFL")
/** This is the encoding to use reading in source files, overridden with -encoding
* Note that it uses "prop" i.e. looks in the scala jar, not the system properties.
@@ -101,7 +111,7 @@ private[scala] trait PropertiesTrait {
def sourceReader = scalaPropOrElse("source.reader", "scala.tools.nsc.io.SourceReader")
/** This is the default text encoding, overridden (unreliably) with
- * JAVA_OPTS="-Dfile.encoding=Foo"
+ * `JAVA_OPTS="-Dfile.encoding=Foo"`
*/
def encodingString = propOrElse("file.encoding", "UTF-8")
@@ -109,8 +119,7 @@ private[scala] trait PropertiesTrait {
*/
def lineSeparator = propOrElse("line.separator", "\n")
- /** Various well-known properties.
- */
+ /* Various well-known properties. */
def javaClassPath = propOrEmpty("java.class.path")
def javaHome = propOrEmpty("java.home")
def javaVendor = propOrEmpty("java.vendor")
@@ -119,6 +128,10 @@ private[scala] trait PropertiesTrait {
def javaVmName = propOrEmpty("java.vm.name")
def javaVmVendor = propOrEmpty("java.vm.vendor")
def javaVmVersion = propOrEmpty("java.vm.version")
+ // this property must remain less-well-known until 2.11
+ private def javaSpecVersion = propOrEmpty("java.specification.version")
+ //private def javaSpecVendor = propOrEmpty("java.specification.vendor")
+ //private def javaSpecName = propOrEmpty("java.specification.name")
def osName = propOrEmpty("os.name")
def scalaHome = propOrEmpty("scala.home")
def tmpDir = propOrEmpty("java.io.tmpdir")
@@ -126,27 +139,46 @@ private[scala] trait PropertiesTrait {
def userHome = propOrEmpty("user.home")
def userName = propOrEmpty("user.name")
- /** Some derived values.
- */
+ /* Some derived values. */
+ /** Returns `true` iff the underlying operating system is a version of Microsoft Windows. */
def isWin = osName startsWith "Windows"
- def isMac = javaVendor startsWith "Apple"
+ // See http://mail.openjdk.java.net/pipermail/macosx-port-dev/2012-November/005148.html for
+ // the reason why we don't follow developer.apple.com/library/mac/#technotes/tn2002/tn2110.
+ /** Returns `true` iff the underlying operating system is a version of Apple Mac OSX. */
+ def isMac = osName startsWith "Mac OS X"
+
+ // This is looking for javac, tools.jar, etc.
+ // Tries JDK_HOME first, then the more common but likely jre JAVA_HOME,
+ // and finally the system property based javaHome.
+ def jdkHome = envOrElse("JDK_HOME", envOrElse("JAVA_HOME", javaHome))
def versionMsg = "Scala %s %s -- %s".format(propCategory, versionString, copyrightString)
def scalaCmd = if (isWin) "scala.bat" else "scala"
def scalacCmd = if (isWin) "scalac.bat" else "scalac"
- /** Can the java version be determined to be at least as high as the argument?
- * Hard to properly future proof this but at the rate 1.7 is going we can leave
- * the issue for our cyborg grandchildren to solve.
+ /** Compares the given specification version to the specification version of the platform.
+ *
+ * @param version a specification version of the form "major.minor"
+ * @return `true` iff the specification version of the current runtime
+ * is equal to or higher than the version denoted by the given string.
+ * @throws NumberFormatException if the given string is not a version string
+ *
+ * @example {{{
+ * // In this example, the runtime's Java specification is assumed to be at version 1.7.
+ * isJavaAtLeast("1.6") // true
+ * isJavaAtLeast("1.7") // true
+ * isJavaAtLeast("1.8") // false
+ * }}
*/
- def isJavaAtLeast(version: String) = {
- val okVersions = version match {
- case "1.5" => List("1.5", "1.6", "1.7")
- case "1.6" => List("1.6", "1.7")
- case "1.7" => List("1.7")
- case _ => Nil
+ def isJavaAtLeast(version: String): Boolean = {
+ def parts(x: String) = {
+ val i = x.indexOf('.')
+ if (i < 0) throw new NumberFormatException("Not a version: " + x)
+ (x.substring(0, i), x.substring(i+1, x.length))
}
- okVersions exists (javaVersion startsWith _)
+ val (v, _v) = parts(version)
+ val (s, _s) = parts(javaSpecVersion)
+ s.toInt >= v.toInt && _s.toInt >= _v.toInt
}
// provide a main method so version info can be obtained by running this
diff --git a/src/library/scala/util/Random.scala b/src/library/scala/util/Random.scala
index cfd6154..24c4cd7 100644
--- a/src/library/scala/util/Random.scala
+++ b/src/library/scala/util/Random.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2006-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2006-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -8,9 +8,10 @@
package scala.util
-import collection.mutable.ArrayBuffer
-import collection.generic.CanBuildFrom
+import scala.collection.mutable.ArrayBuffer
+import scala.collection.generic.CanBuildFrom
import scala.collection.immutable.{ List, Stream }
+import scala.language.{implicitConversions, higherKinds}
/**
* @author Stephane Micheloud
@@ -100,8 +101,7 @@ class Random(val self: java.util.Random) {
/** Returns a new collection of the same type in a randomly chosen order.
*
- * @param coll the TraversableOnce to shuffle
- * @return the shuffled TraversableOnce
+ * @return the shuffled collection
*/
def shuffle[T, CC[X] <: TraversableOnce[X]](xs: CC[T])(implicit bf: CanBuildFrom[CC[T], T, CC[T]]): CC[T] = {
val buf = new ArrayBuffer[T] ++= xs
@@ -117,18 +117,9 @@ class Random(val self: java.util.Random) {
swap(n - 1, k)
}
- bf(xs) ++= buf result
+ (bf(xs) ++= buf).result
}
-}
-
-/** The object <code>Random</code> offers a default implementation
- * of scala.util.Random and random-related convenience methods.
- *
- * @since 2.8
- */
-object Random extends Random {
-
/** Returns a Stream of pseudorandomly chosen alphanumeric characters,
* equally chosen from A-Z, a-z, and 0-9.
*
@@ -141,3 +132,14 @@ object Random extends Random {
}
}
+
+/** The object `Random` offers a default implementation
+ * of scala.util.Random and random-related convenience methods.
+ *
+ * @since 2.8
+ */
+object Random extends Random {
+
+ implicit def javaRandomToRandom(r: java.util.Random): Random = new Random(r)
+
+}
diff --git a/src/library/scala/util/Sorting.scala b/src/library/scala/util/Sorting.scala
index f286670..276e157 100644
--- a/src/library/scala/util/Sorting.scala
+++ b/src/library/scala/util/Sorting.scala
@@ -6,19 +6,20 @@
** |/ **
\* */
-package scala.util
+package scala
+package util
-import scala.reflect.ClassManifest
-import scala.math.Ordering
+import scala.reflect.{ ClassTag, classTag }
+import scala.math.{ Ordering, max, min }
/** The Sorting object provides functions that can sort various kinds of
* objects. You can provide a comparison function, or you can request a sort
- * of items that are viewable as <code>Ordered</code>. Some sorts that
+ * of items that are viewable as [[scala.math.Ordered]]. Some sorts that
* operate directly on a subset of value types are also provided. These
* implementations are derived from those in the Sun JDK.
*
- * Note that stability doesn't matter for value types, so use the quickSort
- * variants for those. <code>stableSort</code> is intended to be used with
+ * Note that stability doesn't matter for value types, so use the `quickSort`
+ * variants for those. `stableSort` is intended to be used with
* objects when the prior ordering should be preserved, where possible.
*
* @author Ross Judson
@@ -39,34 +40,32 @@ object Sorting {
/** Sort an array of K where K is Ordered, preserving the existing order
* where the values are equal. */
- def stableSort[K: ClassManifest: Ordering](a: Array[K]) {
+ def stableSort[K: ClassTag: Ordering](a: Array[K]) {
stableSort(a, 0, a.length-1, new Array[K](a.length), Ordering[K].lt _)
}
- /** Sorts an array of <code>K</code> given an ordering function
- * <code>f</code>. <code>f</code> should return <code>true</code> iff
- * its first parameter is strictly less than its second parameter.
+ /** Sorts an array of `K` given an ordering function `f`.
+ * `f` should return `true` iff its first parameter is strictly less than its second parameter.
*/
- def stableSort[K: ClassManifest](a: Array[K], f: (K, K) => Boolean) {
+ def stableSort[K: ClassTag](a: Array[K], f: (K, K) => Boolean) {
stableSort(a, 0, a.length-1, new Array[K](a.length), f)
}
/** Sorts an arbitrary sequence into an array, given a comparison function
- * that should return <code>true</code> iff parameter one is strictly less
- * than parameter two.
+ * that should return `true` iff parameter one is strictly less than parameter two.
*
* @param a the sequence to be sorted.
* @param f the comparison function.
* @return the sorted sequence of items.
*/
- def stableSort[K: ClassManifest](a: Seq[K], f: (K, K) => Boolean): Array[K] = {
+ def stableSort[K: ClassTag](a: Seq[K], f: (K, K) => Boolean): Array[K] = {
val ret = a.toArray
stableSort(ret, f)
ret
}
/** Sorts an arbitrary sequence of items that are viewable as ordered. */
- def stableSort[K: ClassManifest: Ordering](a: Seq[K]): Array[K] =
+ def stableSort[K: ClassTag: Ordering](a: Seq[K]): Array[K] =
stableSort(a, Ordering[K].lt _)
/** Stably sorts a sequence of items given an extraction function that will
@@ -76,8 +75,8 @@ object Sorting {
* @param f the comparison function.
* @return the sorted sequence of items.
*/
- def stableSort[K: ClassManifest, M: Ordering](a: Seq[K], f: K => M): Array[K] =
- stableSort(a)(implicitly[ClassManifest[K]], Ordering[M] on f)
+ def stableSort[K: ClassTag, M: Ordering](a: Seq[K], f: K => M): Array[K] =
+ stableSort(a)(implicitly[ClassTag[K]], Ordering[M] on f)
private def sort1[K: Ordering](x: Array[K], off: Int, len: Int) {
val ord = Ordering[K]
@@ -125,7 +124,7 @@ object Sorting {
var l = off
var n = off + len - 1
if (len > 40) { // Big arrays, pseudomedian of 9
- var s = len / 8
+ val s = len / 8
l = med3(l, l+s, l+2*s)
m = med3(m-s, m, m+s)
n = med3(n-2*s, n-s, n)
@@ -226,7 +225,7 @@ object Sorting {
var l = off
var n = off + len - 1
if (len > 40) { // Big arrays, pseudomedian of 9
- var s = len / 8
+ val s = len / 8
l = med3(l, l+s, l+2*s)
m = med3(m-s, m, m+s)
n = med3(n-2*s, n-s, n)
@@ -330,7 +329,7 @@ object Sorting {
var l = off
var n = off + len - 1
if (len > 40) { // Big arrays, pseudomedian of 9
- var s = len / 8
+ val s = len / 8
l = med3(l, l+s, l+2*s)
m = med3(m-s, m, m+s)
n = med3(n-2*s, n-s, n)
@@ -438,7 +437,7 @@ object Sorting {
var l = off
var n = off + len - 1
if (len > 40) { // Big arrays, pseudomedian of 9
- var s = len / 8
+ val s = len / 8
l = med3(l, l+s, l+2*s)
m = med3(m-s, m, m+s)
n = med3(n-2*s, n-s, n)
@@ -500,7 +499,7 @@ object Sorting {
sort2(off, len)
}
- private def stableSort[K : ClassManifest](a: Array[K], lo: Int, hi: Int, scratch: Array[K], f: (K,K) => Boolean) {
+ private def stableSort[K : ClassTag](a: Array[K], lo: Int, hi: Int, scratch: Array[K], f: (K,K) => Boolean) {
if (lo < hi) {
val mid = (lo+hi) / 2
stableSort(a, lo, mid, scratch, f)
diff --git a/src/library/scala/util/Try.scala b/src/library/scala/util/Try.scala
new file mode 100644
index 0000000..7749543
--- /dev/null
+++ b/src/library/scala/util/Try.scala
@@ -0,0 +1,217 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2008-2013, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+package scala.util
+
+import scala.collection.Seq
+import scala.util.control.NonFatal
+import scala.language.implicitConversions
+
+/**
+ * The `Try` type represents a computation that may either result in an exception, or return a
+ * successfully computed value. It's similar to, but semantically different from the [[scala.util.Either]] type.
+ *
+ * Instances of `Try[T]`, are either an instance of [[scala.util.Success]][T] or [[scala.util.Failure]][T].
+ *
+ * For example, `Try` can be used to perform division on a user-defined input, without the need to do explicit
+ * exception-handling in all of the places that an exception might occur.
+ *
+ * Example:
+ * {{{
+ * import scala.util.{Try, Success, Failure}
+ *
+ * def divide: Try[Int] = {
+ * val dividend = Try(Console.readLine("Enter an Int that you'd like to divide:\n").toInt)
+ * val divisor = Try(Console.readLine("Enter an Int that you'd like to divide by:\n").toInt)
+ * val problem = dividend.flatMap(x => divisor.map(y => x/y))
+ * problem match {
+ * case Success(v) =>
+ * println("Result of " + dividend.get + "/"+ divisor.get +" is: " + v)
+ * Success(v)
+ * case Failure(e) =>
+ * println("You must've divided by zero or entered something that's not an Int. Try again!")
+ * println("Info from the exception: " + e.getMessage)
+ * divide
+ * }
+ * }
+ *
+ * }}}
+ *
+ * An important property of `Try` shown in the above example is its ability to ''pipeline'', or chain, operations,
+ * catching exceptions along the way. The `flatMap` and `map` combinators in the above example each essentially
+ * pass off either their successfully completed value, wrapped in the `Success` type for it to be further operated
+ * upon by the next combinator in the chain, or the exception wrapped in the `Failure` type usually to be simply
+ * passed on down the chain. Combinators such as `rescue` and `recover` are designed to provide some type of
+ * default behavior in the case of failure.
+ *
+ * ''Note'': only non-fatal exceptions are caught by the combinators on `Try` (see [[scala.util.control.NonFatal]]).
+ * Serious system errors, on the other hand, will be thrown.
+ *
+ * ''Note:'': all Try combinators will catch exceptions and return failure unless otherwise specified in the documentation.
+ *
+ * `Try` comes to the Scala standard library after years of use as an integral part of Twitter's stack.
+ *
+ * @author based on Twitter's original implementation in com.twitter.util.
+ * @since 2.10
+ */
+sealed abstract class Try[+T] {
+
+ /** Returns `true` if the `Try` is a `Failure`, `false` otherwise.
+ */
+ def isFailure: Boolean
+
+ /** Returns `true` if the `Try` is a `Success`, `false` otherwise.
+ */
+ def isSuccess: Boolean
+
+ /** Returns the value from this `Success` or the given `default` argument if this is a `Failure`.
+ *
+ * ''Note:'': This will throw an exception if it is not a success and default throws an exception.
+ */
+ def getOrElse[U >: T](default: => U): U =
+ if (isSuccess) get else default
+
+ /** Returns this `Try` if it's a `Success` or the given `default` argument if this is a `Failure`.
+ */
+ def orElse[U >: T](default: => Try[U]): Try[U] =
+ try if (isSuccess) this else default
+ catch {
+ case NonFatal(e) => Failure(e)
+ }
+
+ /** Returns the value from this `Success` or throws the exception if this is a `Failure`.
+ */
+ def get: T
+
+ /**
+ * Applies the given function `f` if this is a `Success`, otherwise returns `Unit` if this is a `Failure`.
+ *
+ * ''Note:'' If `f` throws, then this method may throw an exception.
+ */
+ def foreach[U](f: T => U): Unit
+
+ /**
+ * Returns the given function applied to the value from this `Success` or returns this if this is a `Failure`.
+ */
+ def flatMap[U](f: T => Try[U]): Try[U]
+
+ /**
+ * Maps the given function to the value from this `Success` or returns this if this is a `Failure`.
+ */
+ def map[U](f: T => U): Try[U]
+
+ /**
+ * Converts this to a `Failure` if the predicate is not satisfied.
+ */
+ def filter(p: T => Boolean): Try[T]
+
+ /**
+ * Applies the given function `f` if this is a `Failure`, otherwise returns this if this is a `Success`.
+ * This is like `flatMap` for the exception.
+ */
+ def recoverWith[U >: T](f: PartialFunction[Throwable, Try[U]]): Try[U]
+
+ /**
+ * Applies the given function `f` if this is a `Failure`, otherwise returns this if this is a `Success`.
+ * This is like map for the exception.
+ */
+ def recover[U >: T](f: PartialFunction[Throwable, U]): Try[U]
+
+ /**
+ * Returns `None` if this is a `Failure` or a `Some` containing the value if this is a `Success`.
+ */
+ def toOption: Option[T] = if (isSuccess) Some(get) else None
+
+ /**
+ * Transforms a nested `Try`, ie, a `Try` of type `Try[Try[T]]`,
+ * into an un-nested `Try`, ie, a `Try` of type `Try[T]`.
+ */
+ def flatten[U](implicit ev: T <:< Try[U]): Try[U]
+
+ /**
+ * Completes this `Try` with an exception wrapped in a `Success`. The exception is either the exception that the
+ * `Try` failed with (if a `Failure`) or an `UnsupportedOperationException`.
+ */
+ def failed: Try[Throwable]
+
+ /** Completes this `Try` by applying the function `f` to this if this is of type `Failure`, or conversely, by applying
+ * `s` if this is a `Success`.
+ */
+ def transform[U](s: T => Try[U], f: Throwable => Try[U]): Try[U] =
+ try this match {
+ case Success(v) => s(v)
+ case Failure(e) => f(e)
+ } catch {
+ case NonFatal(e) => Failure(e)
+ }
+
+}
+
+object Try {
+ /** Constructs a `Try` using the by-name parameter. This
+ * method will ensure any non-fatal exception is caught and a
+ * `Failure` object is returned.
+ */
+ def apply[T](r: => T): Try[T] =
+ try Success(r) catch {
+ case NonFatal(e) => Failure(e)
+ }
+
+}
+
+final case class Failure[+T](val exception: Throwable) extends Try[T] {
+ def isFailure: Boolean = true
+ def isSuccess: Boolean = false
+ def recoverWith[U >: T](f: PartialFunction[Throwable, Try[U]]): Try[U] =
+ try {
+ if (f isDefinedAt exception) f(exception) else this
+ } catch {
+ case NonFatal(e) => Failure(e)
+ }
+ def get: T = throw exception
+ def flatMap[U](f: T => Try[U]): Try[U] = this.asInstanceOf[Try[U]]
+ def flatten[U](implicit ev: T <:< Try[U]): Try[U] = this.asInstanceOf[Try[U]]
+ def foreach[U](f: T => U): Unit = ()
+ def map[U](f: T => U): Try[U] = this.asInstanceOf[Try[U]]
+ def filter(p: T => Boolean): Try[T] = this
+ def recover[U >: T](rescueException: PartialFunction[Throwable, U]): Try[U] =
+ try {
+ if (rescueException isDefinedAt exception) {
+ Try(rescueException(exception))
+ } else this
+ } catch {
+ case NonFatal(e) => Failure(e)
+ }
+ def failed: Try[Throwable] = Success(exception)
+}
+
+
+final case class Success[+T](value: T) extends Try[T] {
+ def isFailure: Boolean = false
+ def isSuccess: Boolean = true
+ def recoverWith[U >: T](f: PartialFunction[Throwable, Try[U]]): Try[U] = this
+ def get = value
+ def flatMap[U](f: T => Try[U]): Try[U] =
+ try f(value)
+ catch {
+ case NonFatal(e) => Failure(e)
+ }
+ def flatten[U](implicit ev: T <:< Try[U]): Try[U] = value
+ def foreach[U](f: T => U): Unit = f(value)
+ def map[U](f: T => U): Try[U] = Try[U](f(value))
+ def filter(p: T => Boolean): Try[T] = {
+ try {
+ if (p(value)) this
+ else Failure(new NoSuchElementException("Predicate does not hold for " + value))
+ } catch {
+ case NonFatal(e) => Failure(e)
+ }
+ }
+ def recover[U >: T](rescueException: PartialFunction[Throwable, U]): Try[U] = this
+ def failed: Try[Throwable] = Failure(new UnsupportedOperationException("Success.failed"))
+}
diff --git a/src/library/scala/util/automata/BaseBerrySethi.scala b/src/library/scala/util/automata/BaseBerrySethi.scala
index 4b1003b..3f6f450 100644
--- a/src/library/scala/util/automata/BaseBerrySethi.scala
+++ b/src/library/scala/util/automata/BaseBerrySethi.scala
@@ -1,13 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-
-
package scala.util.automata
import scala.util.regexp.{ Base }
@@ -15,10 +13,11 @@ import scala.collection.{ mutable, immutable }
// todo: replace global variable pos with acc
-/** this turns a regexp over A into a NondetWorkAutom over A using the
- * celebrated position automata construction (also called Berry-Sethi or
- * Glushkov)
- */
+/** This class turns a regular expression over `A` into a
+ * [[scala.util.automata.NondetWordAutom]] over `A` using the celebrated
+ * position automata construction (also called ''Berry-Sethi'' or ''Glushkov'').
+ */
+ at deprecated("This class will be removed", "2.10.0")
abstract class BaseBerrySethi {
val lang: Base
import lang.{ Alt, Eps, Meta, RegExp, Sequ, Star }
@@ -44,21 +43,18 @@ abstract class BaseBerrySethi {
val (l1, l2) = x.rs span (_.isNullable)
((l1 ++ (l2 take 1)) map compFunction).foldLeft(emptySet)(_ ++ _)
case Star(t) => compFunction(t)
- case _ => throw new IllegalArgumentException("unexpected pattern " + r.getClass())
+ case _ => throw new IllegalArgumentException("unexpected pattern " + r.getClass)
}
- /** computes first( r ) for the word regexp r */
+ /** Computes `first(r)` for the word regexp `r`. */
protected def compFirst(r: RegExp): Set[Int] = doComp(r, compFirst)
- /** computes last( r ) for the regexp r */
+ /** Computes `last(r)` for the regexp `r`. */
protected def compLast(r: RegExp): Set[Int] = doComp(r, compLast)
/** Starts from the right-to-left
* precondition: pos is final
* pats are successor patterns of a Sequence node
- *
- * @param r ...
- * @return ...
*/
protected def compFollow(rs: Seq[RegExp]): Set[Int] = {
follow(0) =
@@ -73,15 +69,10 @@ abstract class BaseBerrySethi {
follow(0)
}
- /** returns the first set of an expression, setting the follow set along
- * the way.
- *
- * @param fol1 ...
- * @param r ...
- * @return ...
+ /** Returns the first set of an expression, setting the follow set along the way.
*/
protected def compFollow1(fol1: Set[Int], r: RegExp): Set[Int] = r match {
- case x: Alt => Set(x.rs reverseMap (compFollow1(fol1, _)) flatten: _*)
+ case x: Alt => Set((x.rs reverseMap (compFollow1(fol1, _))).flatten: _*)
case x: Meta => compFollow1(fol1, x.r)
case x: Star => compFollow1(fol1 ++ compFirst(x.r), x.r)
case x: Sequ =>
@@ -91,20 +82,17 @@ abstract class BaseBerrySethi {
if (p.isNullable) fol ++ first
else first
}
- case _ => throw new IllegalArgumentException("unexpected pattern: " + r.getClass())
+ case _ => throw new IllegalArgumentException("unexpected pattern: " + r.getClass)
}
- /** returns "Sethi-length" of a pattern, creating the set of position
- * along the way.
- *
- * @param r ...
+ /** Returns the "Sethi-length" of a pattern, creating the set of position along the way.
*/
protected def traverse(r: RegExp): Unit = r match {
// (is tree automaton stuff, more than Berry-Sethi)
- case x: Alt => x.rs foreach traverse
- case x: Sequ => x.rs foreach traverse
- case x: Meta => traverse(x.r)
- case Star(t) => traverse(t)
- case _ => throw new IllegalArgumentException("unexp pattern " + r.getClass())
+ case x: Alt => x.rs foreach traverse
+ case x: Sequ => x.rs foreach traverse
+ case x: Meta => traverse(x.r)
+ case Star(t) => traverse(t)
+ case _ => throw new IllegalArgumentException("unexp pattern " + r.getClass)
}
}
diff --git a/src/library/scala/util/automata/DetWordAutom.scala b/src/library/scala/util/automata/DetWordAutom.scala
index 16c5d29..5d70910 100644
--- a/src/library/scala/util/automata/DetWordAutom.scala
+++ b/src/library/scala/util/automata/DetWordAutom.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -20,6 +20,7 @@ import scala.collection.{ mutable, immutable }
* @author Burak Emir
* @version 1.0
*/
+ at deprecated("This class will be removed", "2.10.0")
abstract class DetWordAutom[T <: AnyRef] {
val nstates: Int
val finals: Array[Int]
diff --git a/src/library/scala/util/automata/Inclusion.scala b/src/library/scala/util/automata/Inclusion.scala
index c45fca5..91441bd 100644
--- a/src/library/scala/util/automata/Inclusion.scala
+++ b/src/library/scala/util/automata/Inclusion.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -12,19 +12,17 @@ package scala.util.automata
/** A fast test of language inclusion between minimal automata.
- * inspired by the AMoRE automata library
+ * inspired by the ''AMoRE automata library''.
*
* @author Burak Emir
* @version 1.0
*/
+ at deprecated("This class will be removed", "2.10.0")
trait Inclusion[A <: AnyRef] {
val labels: Seq[A]
- /** Returns true if dfa1 is included in dfa2.
- *
- * @param dfa1 ...
- * @param dfa2 ...
+ /** Returns true if `dfa1` is included in `dfa2`.
*/
def inclusion(dfa1: DetWordAutom[A], dfa2: DetWordAutom[A]) = {
diff --git a/src/library/scala/util/automata/NondetWordAutom.scala b/src/library/scala/util/automata/NondetWordAutom.scala
index 05bb442..24c6612 100644
--- a/src/library/scala/util/automata/NondetWordAutom.scala
+++ b/src/library/scala/util/automata/NondetWordAutom.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-
package scala.util.automata
import scala.collection.{ immutable, mutable }
@@ -16,45 +15,44 @@ import scala.collection.{ immutable, mutable }
* in the delta function. Default transitions are transitions that
* are taken when no other transitions can be applied.
* All states are reachable. Accepting states are those for which
- * the partial function 'finals' is defined.
+ * the partial function `finals` is defined.
*/
+ at deprecated("This class will be removed", "2.10.0")
abstract class NondetWordAutom[T <: AnyRef] {
- import immutable.BitSet
-
val nstates: Int
val labels: Seq[T]
val finals: Array[Int] // 0 means not final
- val delta: Array[mutable.Map[T, BitSet]]
- val default: Array[BitSet]
+ val delta: Array[mutable.Map[T, immutable.BitSet]]
+ val default: Array[immutable.BitSet]
- /** returns true if the state is final */
+ /** @return true if the state is final */
final def isFinal(state: Int) = finals(state) > 0
- /** returns tag of final state */
+ /** @return tag of final state */
final def finalTag(state: Int) = finals(state)
- /** returns true if the set of states contains at least one final state */
- final def containsFinal(Q: BitSet): Boolean = Q exists isFinal
+ /** @return true if the set of states contains at least one final state */
+ final def containsFinal(Q: immutable.BitSet): Boolean = Q exists isFinal
- /** returns true if there are no accepting states */
+ /** @return true if there are no accepting states */
final def isEmpty = (0 until nstates) forall (x => !isFinal(x))
- /** returns a BitSet with the next states for given state and label */
- def next(q: Int, a: T): BitSet = delta(q).getOrElse(a, default(q))
+ /** @return a immutable.BitSet with the next states for given state and label */
+ def next(q: Int, a: T): immutable.BitSet = delta(q).getOrElse(a, default(q))
- /** returns a BitSet with the next states for given state and label */
- def next(Q: BitSet, a: T): BitSet = next(Q, next(_, a))
- def nextDefault(Q: BitSet): BitSet = next(Q, default)
+ /** @return a immutable.BitSet with the next states for given state and label */
+ def next(Q: immutable.BitSet, a: T): immutable.BitSet = next(Q, next(_, a))
+ def nextDefault(Q: immutable.BitSet): immutable.BitSet = next(Q, default)
- private def next(Q: BitSet, f: (Int) => BitSet): BitSet =
- (Q map f).foldLeft(BitSet.empty)(_ ++ _)
+ private def next(Q: immutable.BitSet, f: (Int) => immutable.BitSet): immutable.BitSet =
+ (Q map f).foldLeft(immutable.BitSet.empty)(_ ++ _)
private def finalStates = 0 until nstates filter isFinal
override def toString = {
val finalString = Map(finalStates map (j => j -> finals(j)) : _*).toString
- val deltaString = (0 until nstates) .
- map (i => " %d->%s\n _>%s\n".format(i, delta(i), default(i))) mkString
+ val deltaString = (0 until nstates)
+ .map(i => " %d->%s\n _>%s\n".format(i, delta(i), default(i))).mkString
"[NondetWordAutom nstates=%d finals=%s delta=\n%s".format(nstates, finalString, deltaString)
}
diff --git a/src/library/scala/util/automata/SubsetConstruction.scala b/src/library/scala/util/automata/SubsetConstruction.scala
index e2afabf..0ee7685 100644
--- a/src/library/scala/util/automata/SubsetConstruction.scala
+++ b/src/library/scala/util/automata/SubsetConstruction.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -10,36 +10,36 @@ package scala.util.automata
import scala.collection.{ mutable, immutable }
+ at deprecated("This class will be removed", "2.10.0")
class SubsetConstruction[T <: AnyRef](val nfa: NondetWordAutom[T]) {
import nfa.labels
- import immutable.BitSet
- def selectTag(Q: BitSet, finals: Array[Int]) =
- Q map finals filter (_ > 0) min
+ def selectTag(Q: immutable.BitSet, finals: Array[Int]) =
+ (Q map finals filter (_ > 0)).min
def determinize: DetWordAutom[T] = {
// for assigning numbers to bitsets
- var indexMap = collection.Map[BitSet, Int]()
- var invIndexMap = collection.Map[Int, BitSet]()
+ var indexMap = scala.collection.Map[immutable.BitSet, Int]()
+ var invIndexMap = scala.collection.Map[Int, immutable.BitSet]()
var ix = 0
// we compute the dfa with states = bitsets
- val q0 = BitSet(0) // the set { 0 }
- val sink = BitSet.empty // the set { }
+ val q0 = immutable.BitSet(0) // the set { 0 }
+ val sink = immutable.BitSet.empty // the set { }
var states = Set(q0, sink) // initial set of sets
- val delta = new mutable.HashMap[BitSet, mutable.HashMap[T, BitSet]]
+ val delta = new mutable.HashMap[immutable.BitSet, mutable.HashMap[T, immutable.BitSet]]
var deftrans = mutable.Map(q0 -> sink, sink -> sink) // initial transitions
- var finals: mutable.Map[BitSet, Int] = mutable.Map()
- val rest = new mutable.Stack[BitSet]
+ var finals: mutable.Map[immutable.BitSet, Int] = mutable.Map()
+ val rest = new mutable.Stack[immutable.BitSet]
rest.push(sink, q0)
- def addFinal(q: BitSet) {
+ def addFinal(q: immutable.BitSet) {
if (nfa containsFinal q)
finals = finals.updated(q, selectTag(q, nfa.finals))
}
- def add(Q: BitSet) {
+ def add(Q: immutable.BitSet) {
if (!states(Q)) {
states += Q
rest push Q
@@ -57,7 +57,7 @@ class SubsetConstruction[T <: AnyRef](val nfa: NondetWordAutom[T]) {
ix += 1
// make transition map
- val Pdelta = new mutable.HashMap[T, BitSet]
+ val Pdelta = new mutable.HashMap[T, immutable.BitSet]
delta.update(P, Pdelta)
labels foreach { label =>
diff --git a/src/library/scala/util/automata/WordBerrySethi.scala b/src/library/scala/util/automata/WordBerrySethi.scala
index ad15dc9..12448f5 100644
--- a/src/library/scala/util/automata/WordBerrySethi.scala
+++ b/src/library/scala/util/automata/WordBerrySethi.scala
@@ -1,52 +1,49 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-
-
package scala.util.automata
import scala.collection.{ immutable, mutable }
-import mutable.{ HashSet, HashMap }
import scala.util.regexp.WordExp
-/** This class turns a regexp into a NondetWordAutom using the
- * celebrated position automata construction (also called Berry-Sethi or
- * Glushkov)
- *
- * @author Burak Emir
- * @version 1.0
- */
+/** This class turns a regular expression into a [[scala.util.automata.NondetWordAutom]]
+ * celebrated position automata construction (also called ''Berry-Sethi'' or ''Glushkov'').
+ *
+ * @author Burak Emir
+ * @version 1.0
+ */
+ at deprecated("This class will be removed", "2.10.0")
abstract class WordBerrySethi extends BaseBerrySethi {
override val lang: WordExp
import lang.{ Alt, Eps, Letter, Meta, RegExp, Sequ, Star, _labelT }
- protected var labels: HashSet[_labelT] = _
+ protected var labels: mutable.HashSet[_labelT] = _
// don't let this fool you, only labelAt is a real, surjective mapping
- protected var labelAt: Map[Int, _labelT] = _ // new alphabet "gamma"
- protected var deltaq: Array[HashMap[_labelT, List[Int]]] = _ // delta
- protected var defaultq: Array[List[Int]] = _ // default transitions
- protected var initials: Set[Int] = _
+ protected var labelAt: Map[Int, _labelT] = _ // new alphabet "gamma"
+ protected var deltaq: Array[mutable.HashMap[_labelT, List[Int]]] = _ // delta
+ protected var defaultq: Array[List[Int]] = _ // default transitions
+ protected var initials: Set[Int] = _
- /** Computes <code>first(r)</code> where the word regexp <code>r</code>.
+ /** Computes `first(r)` where the word regexp `r`.
*
* @param r the regular expression
- * @return the computed set <code>first(r)</code>
+ * @return the computed set `first(r)`
*/
protected override def compFirst(r: RegExp): Set[Int] = r match {
case x: Letter => Set(x.pos)
case _ => super.compFirst(r)
}
- /** Computes <code>last(r)</code> where the word regexp <code>r</code>.
+ /** Computes `last(r)` where the word regexp `r`.
*
* @param r the regular expression
- * @return the computed set <code>last(r)</code>
+ * @return the computed set `last(r)`
*/
protected override def compLast(r: RegExp): Set[Int] = r match {
case x: Letter => Set(x.pos)
@@ -56,7 +53,6 @@ abstract class WordBerrySethi extends BaseBerrySethi {
/** Returns the first set of an expression, setting the follow set along
* the way.
*
- * @param fol1 ...
* @param r the regular expression
* @return the computed set
*/
@@ -66,11 +62,11 @@ abstract class WordBerrySethi extends BaseBerrySethi {
case _ => super.compFollow1(fol1, r)
}
- /** returns "Sethi-length" of a pattern, creating the set of position
+ /** Returns "Sethi-length" of a pattern, creating the set of position
* along the way
*/
- /** called at the leaves of the regexp */
+ /** Called at the leaves of the regexp */
protected def seenLabel(r: RegExp, i: Int, label: _labelT) {
labelAt = labelAt.updated(i, label)
this.labels += label
@@ -98,8 +94,8 @@ abstract class WordBerrySethi extends BaseBerrySethi {
protected def initialize(subexpr: Seq[RegExp]): Unit = {
this.labelAt = immutable.Map()
- this.follow = HashMap()
- this.labels = HashSet()
+ this.follow = mutable.HashMap()
+ this.labels = mutable.HashSet()
this.pos = 0
// determine "Sethi-length" of the regexp
@@ -110,17 +106,17 @@ abstract class WordBerrySethi extends BaseBerrySethi {
protected def initializeAutom() {
finals = immutable.Map.empty[Int, Int] // final states
- deltaq = new Array[HashMap[_labelT, List[Int]]](pos) // delta
+ deltaq = new Array[mutable.HashMap[_labelT, List[Int]]](pos) // delta
defaultq = new Array[List[Int]](pos) // default transitions
for (j <- 0 until pos) {
- deltaq(j) = HashMap[_labelT, List[Int]]()
+ deltaq(j) = mutable.HashMap[_labelT, List[Int]]()
defaultq(j) = Nil
}
}
protected def collectTransitions(): Unit = // make transitions
- for (j <- 0 until pos ; val fol = follow(j) ; k <- fol) {
+ for (j <- 0 until pos ; fol = follow(j) ; k <- fol) {
if (pos == k) finals = finals.updated(j, finalTag)
else makeTransition(j, k, labelAt(k))
}
@@ -143,15 +139,15 @@ abstract class WordBerrySethi extends BaseBerrySethi {
finals = finals.updated(0, finalTag)
val delta1 = immutable.Map(deltaq.zipWithIndex map (_.swap): _*)
- val finalsArr = 0 until pos map (k => finals.getOrElse(k, 0)) toArray // 0 == not final
+ val finalsArr = (0 until pos map (k => finals.getOrElse(k, 0))).toArray // 0 == not final
val initialsArr = initials.toArray
val deltaArr: Array[mutable.Map[_labelT, immutable.BitSet]] =
(0 until pos map { x =>
- HashMap(delta1(x).toSeq map { case (k, v) => k -> immutable.BitSet(v: _*) } : _*)
+ mutable.HashMap(delta1(x).toSeq map { case (k, v) => k -> immutable.BitSet(v: _*) } : _*)
}).toArray
- val defaultArr = 0 until pos map (k => immutable.BitSet(defaultq(k): _*)) toArray
+ val defaultArr = (0 until pos map (k => immutable.BitSet(defaultq(k): _*))).toArray
new NondetWordAutom[_labelT] {
val nstates = pos
@@ -165,4 +161,4 @@ abstract class WordBerrySethi extends BaseBerrySethi {
automatonFrom(Sequ(z.asInstanceOf[this.lang._regexpT]), finalTag)
}
}
-}
\ No newline at end of file
+}
diff --git a/src/library/scala/util/control/Breaks.scala b/src/library/scala/util/control/Breaks.scala
index 20c3502..89e1b58 100644
--- a/src/library/scala/util/control/Breaks.scala
+++ b/src/library/scala/util/control/Breaks.scala
@@ -1,13 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-
-
package scala.util.control
/** A class that can be instantiated for the break control abstraction.
@@ -43,8 +41,8 @@ class Breaks {
}
}
- trait TryBlock {
- def catchBreak(onBreak: => Unit): Unit
+ sealed trait TryBlock[T] {
+ def catchBreak(onBreak: =>T): T
}
/**
@@ -59,8 +57,8 @@ class Breaks {
* }
* }}}
*/
- def tryBreakable(op: => Unit) = new TryBlock {
- def catchBreak(onBreak: => Unit) = try {
+ def tryBreakable[T](op: =>T) = new TryBlock[T] {
+ def catchBreak(onBreak: =>T) = try {
op
} catch {
case ex: BreakControl =>
@@ -75,7 +73,7 @@ class Breaks {
*
* @note This might be different than the statically closest enclosing block!
*/
- def break() { throw breakException }
+ def break(): Nothing = { throw breakException }
}
/** An object that can be used for the break control abstraction.
@@ -93,4 +91,3 @@ class Breaks {
object Breaks extends Breaks
private class BreakControl extends ControlThrowable
-
diff --git a/src/library/scala/util/control/ControlThrowable.scala b/src/library/scala/util/control/ControlThrowable.scala
index 122b0c9..33c90c5 100644
--- a/src/library/scala/util/control/ControlThrowable.scala
+++ b/src/library/scala/util/control/ControlThrowable.scala
@@ -1,38 +1,34 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-
package scala.util.control
-/**
- * A marker trait indicating that the <code>Throwable</code> it is mixed
- * into is intended for flow control.
- *
- * <p>Note that <code>Throwable</code> subclasses which extend this trait
- * may extend any other <code>Throwable</code> subclass (eg.
- * <code>RuntimeException</code>) and are not required to extend
- * <code>Throwable</code> directly.</p>
+/** A marker trait indicating that the `Throwable` it is mixed into is
+ * intended for flow control.
*
- * <p>Instances of <code>Throwable</code> subclasses marked in
- * this way should not normally be caught. Where catch-all behaviour is
- * required <code>ControlThrowable</code>s should be propagated, for
- * example,</p>
+ * Note that `Throwable` subclasses which extend this trait may extend any
+ * other `Throwable` subclass (eg. `RuntimeException`) and are not required
+ * to extend `Throwable` directly.
*
- * <pre>
+ * Instances of `Throwable` subclasses marked in this way should not normally
+ * be caught. Where catch-all behaviour is required `ControlThrowable`
+ * should be propagated, for example:
+ * {{{
* import scala.util.control.ControlThrowable
*
* try {
* // Body might throw arbitrarily
- * } catch {
- * case ce : ControlThrowable => throw ce // propagate
- * case t : Exception => log(t) // log and suppress
- * </pre>
+ * } catch {
+ * case c: ControlThrowable => throw c // propagate
+ * case t: Exception => log(t) // log and suppress
+ * }
+ * }}}
*
- * @author Miles Sabin
+ * @author Miles Sabin
*/
trait ControlThrowable extends Throwable with NoStackTrace
diff --git a/src/library/scala/util/control/Exception.scala b/src/library/scala/util/control/Exception.scala
index ad90b22..b97914c 100644
--- a/src/library/scala/util/control/Exception.scala
+++ b/src/library/scala/util/control/Exception.scala
@@ -1,21 +1,34 @@
-package scala.util.control
-
-import collection.immutable.List
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+package scala.util
+package control
+
+import scala.collection.immutable.List
+import scala.reflect.{ ClassTag, classTag }
import java.lang.reflect.InvocationTargetException
+import scala.language.implicitConversions
+
/** Classes representing the components of exception handling.
* Each class is independently composable. Some example usages:
+ * {{{
+ * import scala.util.control.Exception._
+ * import java.net._
*
+ * val s = "http://www.scala-lang.org/"
+ * val x1 = catching(classOf[MalformedURLException]) opt new URL(s)
+ * val x2 = catching(classOf[MalformedURLException], classOf[NullPointerException]) either new URL(s)
+ * }}}
*
- *
- * <pre>
- * <b>import</b> scala.util.control.Exception._
- * <b>import</b> java.net._
- *
- * <b>val</b> s = "http://www.scala-lang.org/"
- * <b>val</b> x1 = catching(classOf[MalformedURLException]) opt new URL(s)
- * <b>val</b> x2 = catching(classOf[MalformedURLException], classOf[NullPointerException]) either new URL(s)
- * </pre>
+ * This class differs from `scala.util.Try` in that it focuses on composing exception handlers rather than
+ * composing behavior. All behavior should be composed first and fed to a `Catch` object using one of the
+ * `opt` or `either` methods.
*
* @author Paul Phillips
*/
@@ -23,17 +36,18 @@ import java.lang.reflect.InvocationTargetException
object Exception {
type Catcher[+T] = PartialFunction[Throwable, T]
- def mkCatcher[Ex <: Throwable: ClassManifest, T](isDef: Ex => Boolean, f: Ex => T) = new Catcher[T] {
+ def mkCatcher[Ex <: Throwable: ClassTag, T](isDef: Ex => Boolean, f: Ex => T) = new Catcher[T] {
private def downcast(x: Throwable): Option[Ex] =
- if (classManifest[Ex].erasure.isAssignableFrom(x.getClass)) Some(x.asInstanceOf[Ex])
+ if (classTag[Ex].runtimeClass.isAssignableFrom(x.getClass)) Some(x.asInstanceOf[Ex])
else None
def isDefinedAt(x: Throwable) = downcast(x) exists isDef
def apply(x: Throwable): T = f(downcast(x).get)
}
+
def mkThrowableCatcher[T](isDef: Throwable => Boolean, f: Throwable => T) = mkCatcher(isDef, f)
- implicit def throwableSubtypeToCatcher[Ex <: Throwable: ClassManifest, T](pf: PartialFunction[Ex, T]) =
+ implicit def throwableSubtypeToCatcher[Ex <: Throwable: ClassTag, T](pf: PartialFunction[Ex, T]) =
mkCatcher(pf.isDefinedAt _, pf.apply _)
/** !!! Not at all sure of every factor which goes into this,
@@ -62,7 +76,7 @@ object Exception {
protected val name = "Finally"
def and(other: => Unit): Finally = new Finally({ body ; other })
- def invoke(): Unit = { body }
+ def invoke() { body }
}
/** A container class for catch/finally logic.
@@ -92,14 +106,14 @@ object Exception {
}
finally fin map (_.invoke())
- /* Create an empty Try container with this Catch and the supplied Finally */
+ /* Create an empty Try container with this Catch and the supplied `Finally`. */
def andFinally(body: => Unit): Catch[T] = fin match {
case None => new Catch(pf, Some(new Finally(body)), rethrow)
case Some(f) => new Catch(pf, Some(f and body), rethrow)
}
/** Apply this catch logic to the supplied body, mapping the result
- * into Option[T] - None if any exception was caught, Some(T) otherwise.
+ * into `Option[T]` - `None` if any exception was caught, `Some(T)` otherwise.
*/
def opt[U >: T](body: => U): Option[U] = toOption(Some(body))
@@ -109,8 +123,13 @@ object Exception {
*/
def either[U >: T](body: => U): Either[Throwable, U] = toEither(Right(body))
- /** Create a new Catch with the same isDefinedAt logic as this one,
- * but with the supplied apply method replacing the current one. */
+ /** Apply this catch logic to the supplied body, mapping the result
+ * into Try[T] - Failure if an exception was caught, Success(T) otherwise.
+ */
+ def withTry[U >: T](body: => U): scala.util.Try[U] = toTry(Success(body))
+
+ /** Create a `Catch` object with the same `isDefinedAt` logic as this one,
+ * but with the supplied `apply` method replacing the current one. */
def withApply[U](f: Throwable => U): Catch[U] = {
val pf2 = new Catcher[U] {
def isDefinedAt(x: Throwable) = pf isDefinedAt x
@@ -122,80 +141,61 @@ object Exception {
/** Convenience methods. */
def toOption: Catch[Option[T]] = withApply(_ => None)
def toEither: Catch[Either[Throwable, T]] = withApply(Left(_))
- }
-
- /** A container class for Try logic */
- class Try[+T] private[Exception](body: => T, val catcher: Catch[T]) {
- /** Execute "body" using catch/finally logic "catcher" */
- def apply(): T = catcher(body)
- def apply[U >: T](other: => U): U = catcher(other)
-
- /** As apply, but map caught exceptions to None and success to Some(T) */
- def opt(): Option[T] = catcher opt body
- def opt[U >: T](other: => U): Option[U] = catcher opt other
-
- /** As apply, but map caught exceptions to Left(ex) and success to Right(x) */
- def either(): Either[Throwable, T] = catcher either body
- def either[U >: T](other: => U): Either[Throwable, U] = catcher either other
-
- /** Create a new Try with the supplied body replacing the current body */
- def tryInstead[U >: T](other: => U) = new Try(other, catcher)
-
- /** Create a new Try with the supplied logic appended to the existing Catch logic. */
- def or[U >: T](pf: Catcher[U]) = new Try(body, catcher or pf)
-
- /** Create a new Try with the supplied code appended to the existing Finally. */
- def andFinally(fin: => Unit) = new Try(body, catcher andFinally fin)
-
- override def toString() = List("Try(<body>)", catcher.toString) mkString " "
+ def toTry: Catch[scala.util.Try[T]] = withApply(x => Failure(x))
}
final val nothingCatcher: Catcher[Nothing] = mkThrowableCatcher(_ => false, throw _)
+ final def nonFatalCatcher[T]: Catcher[T] = mkThrowableCatcher({ case NonFatal(_) => true; case _ => false }, throw _)
final def allCatcher[T]: Catcher[T] = mkThrowableCatcher(_ => true, throw _)
- /** The empty Catch object. */
+ /** The empty `Catch` object. */
final val noCatch: Catch[Nothing] = new Catch(nothingCatcher) withDesc "<nothing>"
- /** A Catch object which catches everything. */
+ /** A `Catch` object which catches everything. */
final def allCatch[T]: Catch[T] = new Catch(allCatcher[T]) withDesc "<everything>"
- /** Creates a Catch object which will catch any of the supplied exceptions.
- * Since the returned Catch object has no specific logic defined and will simply
- * rethrow the exceptions it catches, you will typically want to call "opt" or
- * "either" on the return value, or assign custom logic by calling "withApply".
+ /** A `Catch` object witch catches non-fatal exceptions. */
+ final def nonFatalCatch[T]: Catch[T] = new Catch(nonFatalCatcher[T]) withDesc "<non-fatal>"
+
+ /** Creates a `Catch` object which will catch any of the supplied exceptions.
+ * Since the returned `Catch` object has no specific logic defined and will simply
+ * rethrow the exceptions it catches, you will typically want to call `opt` or
+ * `either` on the return value, or assign custom logic by calling "withApply".
*
- * Note that Catch objects automatically rethrow ControlExceptions and others
+ * Note that `Catch` objects automatically rethrow `ControlExceptions` and others
* which should only be caught in exceptional circumstances. If you really want
- * to catch exactly what you specify, use "catchingPromiscuously" instead.
+ * to catch exactly what you specify, use `catchingPromiscuously` instead.
*/
def catching[T](exceptions: Class[_]*): Catch[T] =
new Catch(pfFromExceptions(exceptions : _*)) withDesc (exceptions map (_.getName) mkString ", ")
def catching[T](c: Catcher[T]): Catch[T] = new Catch(c)
- /** Creates a Catch object which will catch any of the supplied exceptions.
+ /** Creates a `Catch` object which will catch any of the supplied exceptions.
* Unlike "catching" which filters out those in shouldRethrow, this one will
- * catch whatever you ask of it: ControlThrowable, InterruptedException,
- * OutOfMemoryError, you name it.
+ * catch whatever you ask of it: `ControlThrowable`, `InterruptedException`,
+ * `OutOfMemoryError`, you name it.
*/
def catchingPromiscuously[T](exceptions: Class[_]*): Catch[T] = catchingPromiscuously(pfFromExceptions(exceptions : _*))
def catchingPromiscuously[T](c: Catcher[T]): Catch[T] = new Catch(c, None, _ => false)
- /** Creates a Catch object which catches and ignores any of the supplied exceptions. */
+ /** Creates a `Catch` object which catches and ignores any of the supplied exceptions. */
def ignoring(exceptions: Class[_]*): Catch[Unit] =
catching(exceptions: _*) withApply (_ => ())
- /** Creates a Catch object which maps all the supplied exceptions to 'None'. */
+ /** Creates a `Catch` object which maps all the supplied exceptions to `None`. */
def failing[T](exceptions: Class[_]*): Catch[Option[T]] =
catching(exceptions: _*) withApply (_ => None)
- /** Creates a Catch object which maps all the supplied exceptions to the given value. */
+ /** Creates a `Catch` object which maps all the supplied exceptions to the given value. */
def failAsValue[T](exceptions: Class[_]*)(value: => T): Catch[T] =
catching(exceptions: _*) withApply (_ => value)
- /** Returns a partially constructed Catch object, which you must give
- * an exception handler function as an argument to "by". Example:
- * handling(ex1, ex2) by (_.printStackTrace)
+ /** Returns a partially constructed `Catch` object, which you must give
+ * an exception handler function as an argument to `by`. Example:
+ * {{{
+ * handling(ex1, ex2) by (_.printStackTrace)
+ * }}}
*/
class By[T,R](f: T => R) {
def by(x: T): R = f(x)
@@ -205,10 +205,10 @@ object Exception {
new By[Throwable => T, Catch[T]](fun _)
}
- /** Returns a Catch object with no catch logic and the argument as Finally. */
+ /** Returns a `Catch` object with no catch logic and the argument as `Finally`. */
def ultimately[T](body: => Unit): Catch[T] = noCatch andFinally body
- /** Creates a Catch object which unwraps any of the supplied exceptions. */
+ /** Creates a `Catch` object which unwraps any of the supplied exceptions. */
def unwrapping[T](exceptions: Class[_]*): Catch[T] = {
def unwrap(x: Throwable): Throwable =
if (wouldMatch(x, exceptions) && x.getCause != null) unwrap(x.getCause)
@@ -218,12 +218,9 @@ object Exception {
}
/** Private **/
- private def wouldMatch(x: Throwable, classes: collection.Seq[Class[_]]): Boolean =
+ private def wouldMatch(x: Throwable, classes: scala.collection.Seq[Class[_]]): Boolean =
classes exists (_ isAssignableFrom x.getClass)
- private def pfFromExceptions(exceptions: Class[_]*) =
- new PartialFunction[Throwable, Nothing] {
- def apply(x: Throwable) = throw x
- def isDefinedAt(x: Throwable) = wouldMatch(x, exceptions)
- }
+ private def pfFromExceptions(exceptions: Class[_]*): PartialFunction[Throwable, Nothing] =
+ { case x if wouldMatch(x, exceptions) => throw x }
}
diff --git a/src/library/scala/util/control/NoStackTrace.scala b/src/library/scala/util/control/NoStackTrace.scala
index dff29c8..b33b6a1 100644
--- a/src/library/scala/util/control/NoStackTrace.scala
+++ b/src/library/scala/util/control/NoStackTrace.scala
@@ -1,12 +1,13 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-package scala.util.control
+package scala
+package util.control
/** A trait for exceptions which, for efficiency reasons, do not
* fill in the stack trace. Stack trace suppression can be disabled
@@ -23,5 +24,9 @@ trait NoStackTrace extends Throwable {
}
object NoStackTrace {
- final val noSuppression = sys.SystemProperties.noTraceSupression.value
+ final def noSuppression = _noSuppression
+
+ // two-stage init to make checkinit happy, since sys.SystemProperties.noTraceSupression.value calls back into NoStackTrace.noSuppression
+ final private var _noSuppression = false
+ _noSuppression = sys.SystemProperties.noTraceSupression.value
}
diff --git a/src/library/scala/util/control/NonFatal.scala b/src/library/scala/util/control/NonFatal.scala
new file mode 100644
index 0000000..0d8cdfb
--- /dev/null
+++ b/src/library/scala/util/control/NonFatal.scala
@@ -0,0 +1,45 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+package scala.util.control
+
+/**
+ * Extractor of non-fatal Throwables. Will not match fatal errors like `VirtualMachineError`
+ * (for example, `OutOfMemoryError`, a subclass of `VirtualMachineError`), `ThreadDeath`,
+ * `LinkageError`, `InterruptedException`, `ControlThrowable`, or `NotImplementedError`.
+ * However, `StackOverflowError` is matched, i.e. considered non-fatal.
+ *
+ * Note that [[scala.util.control.ControlThrowable]], an internal Throwable, is not matched by
+ * `NonFatal` (and would therefore be thrown).
+ *
+ * For example, all harmless Throwables can be caught by:
+ * {{{
+ * try {
+ * // dangerous stuff
+ * } catch {
+ * case NonFatal(e) => log.error(e, "Something not that bad.")
+ * // or
+ * case e if NonFatal(e) => log.error(e, "Something not that bad.")
+ * }
+ * }}}
+ */
+object NonFatal {
+ /**
+ * Returns true if the provided `Throwable` is to be considered non-fatal, or false if it is to be considered fatal
+ */
+ def apply(t: Throwable): Boolean = t match {
+ case _: StackOverflowError => true // StackOverflowError ok even though it is a VirtualMachineError
+ // VirtualMachineError includes OutOfMemoryError and other fatal errors
+ case _: VirtualMachineError | _: ThreadDeath | _: InterruptedException | _: LinkageError | _: ControlThrowable | _: NotImplementedError => false
+ case _ => true
+ }
+ /**
+ * Returns Some(t) if NonFatal(t) == true, otherwise None
+ */
+ def unapply(t: Throwable): Option[Throwable] = if (apply(t)) Some(t) else None
+}
diff --git a/src/library/scala/util/control/TailCalls.scala b/src/library/scala/util/control/TailCalls.scala
index 59e9618..955cee7 100644
--- a/src/library/scala/util/control/TailCalls.scala
+++ b/src/library/scala/util/control/TailCalls.scala
@@ -1,10 +1,18 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
package scala.util.control
/** Methods exported by this object implement tail calls via trampolining.
- * Tail calling methods have to return their result using `done` or call the next
- * method using `tailcall`. Both return a `TailRec` object. The result of evaluating
- * a tailcalling function can be retrieved from a `Tailrec` value using method result`.
- * Here's a usage example:
+ * Tail calling methods have to return their result using `done` or call the
+ * next method using `tailcall`. Both return a `TailRec` object. The result
+ * of evaluating a tailcalling function can be retrieved from a `Tailrec`
+ * value using method `result`. Here's a usage example:
* {{{
* import scala.util.control.TailCalls._
*
@@ -19,10 +27,10 @@ package scala.util.control
*/
object TailCalls {
- /** This class represents a tailcalling computation.
+ /** This class represents a tailcalling computation
*/
abstract class TailRec[+A] {
- /** Returns the result of the tailcalling computation
+ /** Returns the result of the tailcalling computation.
*/
def result: A = {
def loop(body: TailRec[A]): A = body match {
@@ -36,7 +44,8 @@ object TailCalls {
/** Internal class representing a tailcall */
protected case class Call[A](rest: () => TailRec[A]) extends TailRec[A]
- /** Internal class representing the final result return from a tailcalling computation */
+ /** Internal class representing the final result returned from a tailcalling
+ * computation */
protected case class Done[A](override val result: A) extends TailRec[A]
/** Performs a tailcall
@@ -47,10 +56,9 @@ object TailCalls {
/** Used to return final result from tailcalling computation
* @param `result` the result value
- * @return a `TailRec` object representing a computation which immediately returns `result`
+ * @return a `TailRec` object representing a computation which immediately
+ * returns `result`
*/
def done[A](result: A): TailRec[A] = new Done(result)
}
-
-
diff --git a/src/library/scala/util/grammar/HedgeRHS.scala b/src/library/scala/util/grammar/HedgeRHS.scala
index 46c474a..d1c11a2 100644
--- a/src/library/scala/util/grammar/HedgeRHS.scala
+++ b/src/library/scala/util/grammar/HedgeRHS.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -10,13 +10,17 @@
package scala.util.grammar
+ at deprecated("This class will be removed", "2.10.0")
abstract class HedgeRHS
-/** right hand side of a hedge production, deriving a single tree */
+/** Right hand side of a hedge production, deriving a single tree. */
+ at deprecated("This class will be removed", "2.10.0")
case class ConsRHS(tnt: Int, hnt: Int) extends HedgeRHS
-/** right hand side of a hedge production, deriving any hedge */
+/** Right hand side of a hedge production, deriving any hedge. */
+ at deprecated("This class will be removed", "2.10.0")
case object AnyHedgeRHS extends HedgeRHS
-/** right hand side of a hedge production, deriving the empty hedge */
+/** Right hand side of a hedge production, deriving the empty hedge. */
+ at deprecated("This class will be removed", "2.10.0")
case object EmptyHedgeRHS extends HedgeRHS
diff --git a/src/library/scala/util/grammar/TreeRHS.scala b/src/library/scala/util/grammar/TreeRHS.scala
index b6e2b1a..ee72ea9 100644
--- a/src/library/scala/util/grammar/TreeRHS.scala
+++ b/src/library/scala/util/grammar/TreeRHS.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -10,10 +10,13 @@
package scala.util.grammar
-/** right hand side of a tree production */
+/** Right hand side of a tree production. */
+ at deprecated("This class will be removed", "2.10.0")
abstract class TreeRHS
-/** right hand side of a tree production, labelled with a letter from an alphabet */
+/** Right hand side of a tree production, labelled with a letter from an alphabet. */
+ at deprecated("This class will be removed", "2.10.0")
case class LabelledRHS[A](label: A, hnt: Int) extends TreeRHS
+ at deprecated("This class will be removed", "2.10.0")
case object AnyTreeRHS extends TreeRHS
diff --git a/src/library/scala/util/hashing/ByteswapHashing.scala b/src/library/scala/util/hashing/ByteswapHashing.scala
new file mode 100644
index 0000000..a969457
--- /dev/null
+++ b/src/library/scala/util/hashing/ByteswapHashing.scala
@@ -0,0 +1,35 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2006-2013, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://www.scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+package scala.util.hashing
+
+
+
+
+
+
+/** A fast multiplicative hash by Phil Bagwell.
+ */
+final class ByteswapHashing[T] extends Hashing[T] {
+
+ def hash(v: T) = byteswap32(v.##)
+
+}
+
+
+object ByteswapHashing {
+
+ private class Chained[T](h: Hashing[T]) extends Hashing[T] {
+ def hash(v: T) = byteswap32(h.hash(v))
+ }
+
+ /** Composes another `Hashing` with the Byteswap hash.
+ */
+ def chain[T](h: Hashing[T]): Hashing[T] = new Chained(h)
+
+}
diff --git a/src/library/scala/util/hashing/Hashing.scala b/src/library/scala/util/hashing/Hashing.scala
new file mode 100644
index 0000000..b57f858
--- /dev/null
+++ b/src/library/scala/util/hashing/Hashing.scala
@@ -0,0 +1,39 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+package scala.util.hashing
+
+import scala.annotation.implicitNotFound
+
+/** `Hashing` is a trait whose instances each represent a strategy for hashing
+ * instances of a type.
+ *
+ * `Hashing`'s companion object defines a default hashing strategy for all
+ * objects - it calls their `##` method.
+ *
+ * Note: when using a custom `Hashing`, make sure to use it with the `Equiv`
+ * such that if any two objects are equal, then their hash codes must be equal.
+ *
+ * @since 2.10
+ */
+ at implicitNotFound(msg = "No implicit Hashing defined for ${T}.")
+trait Hashing[T] extends Serializable {
+ def hash(x: T): Int
+}
+
+object Hashing {
+ final class Default[T] extends Hashing[T] {
+ def hash(x: T) = x.##
+ }
+
+ implicit def default[T] = new Default[T]
+
+ def fromFunction[T](f: T => Int) = new Hashing[T] {
+ def hash(x: T) = f(x)
+ }
+}
diff --git a/src/library/scala/util/hashing/MurmurHash3.scala b/src/library/scala/util/hashing/MurmurHash3.scala
new file mode 100644
index 0000000..0aa7e6f
--- /dev/null
+++ b/src/library/scala/util/hashing/MurmurHash3.scala
@@ -0,0 +1,285 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+package scala.util.hashing
+
+import java.lang.Integer.{ rotateLeft => rotl }
+
+private[hashing] class MurmurHash3 {
+ /** Mix in a block of data into an intermediate hash value. */
+ final def mix(hash: Int, data: Int): Int = {
+ var h = mixLast(hash, data)
+ h = rotl(h, 13)
+ h * 5 + 0xe6546b64
+ }
+
+ /** May optionally be used as the last mixing step. Is a little bit faster than mix,
+ * as it does no further mixing of the resulting hash. For the last element this is not
+ * necessary as the hash is thoroughly mixed during finalization anyway. */
+ final def mixLast(hash: Int, data: Int): Int = {
+ var k = data
+
+ k *= 0xcc9e2d51
+ k = rotl(k, 15)
+ k *= 0x1b873593
+
+ hash ^ k
+ }
+
+ /** Finalize a hash to incorporate the length and make sure all bits avalanche. */
+ final def finalizeHash(hash: Int, length: Int): Int = avalanche(hash ^ length)
+
+ /** Force all bits of the hash to avalanche. Used for finalizing the hash. */
+ private final def avalanche(hash: Int): Int = {
+ var h = hash
+
+ h ^= h >>> 16
+ h *= 0x85ebca6b
+ h ^= h >>> 13
+ h *= 0xc2b2ae35
+ h ^= h >>> 16
+
+ h
+ }
+
+ /** Compute the hash of a product */
+ final def productHash(x: Product, seed: Int): Int = {
+ val arr = x.productArity
+ // Case objects have the hashCode inlined directly into the
+ // synthetic hashCode method, but this method should still give
+ // a correct result if passed a case object.
+ if (arr == 0) {
+ x.productPrefix.hashCode
+ }
+ else {
+ var h = seed
+ var i = 0
+ while (i < arr) {
+ h = mix(h, x.productElement(i).##)
+ i += 1
+ }
+ finalizeHash(h, arr)
+ }
+ }
+
+ /** Compute the hash of a string */
+ final def stringHash(str: String, seed: Int): Int = {
+ var h = seed
+ var i = 0
+ while (i + 1 < str.length) {
+ val data = (str.charAt(i) << 16) + str.charAt(i + 1)
+ h = mix(h, data)
+ i += 2
+ }
+ if (i < str.length) h = mixLast(h, str.charAt(i))
+ finalizeHash(h, str.length)
+ }
+
+ /** Compute a hash that is symmetric in its arguments - that is a hash
+ * where the order of appearance of elements does not matter.
+ * This is useful for hashing sets, for example.
+ */
+ final def unorderedHash(xs: TraversableOnce[Any], seed: Int): Int = {
+ var a, b, n = 0
+ var c = 1
+ xs foreach { x =>
+ val h = x.##
+ a += h
+ b ^= h
+ if (h != 0) c *= h
+ n += 1
+ }
+ var h = seed
+ h = mix(h, a)
+ h = mix(h, b)
+ h = mixLast(h, c)
+ finalizeHash(h, n)
+ }
+ /** Compute a hash that depends on the order of its arguments.
+ */
+ final def orderedHash(xs: TraversableOnce[Any], seed: Int): Int = {
+ var n = 0
+ var h = seed
+ xs foreach { x =>
+ h = mix(h, x.##)
+ n += 1
+ }
+ finalizeHash(h, n)
+ }
+
+ /** Compute the hash of an array.
+ */
+ final def arrayHash[@specialized T](a: Array[T], seed: Int): Int = {
+ var h = seed
+ var i = 0
+ while (i < a.length) {
+ h = mix(h, a(i).##)
+ i += 1
+ }
+ finalizeHash(h, a.length)
+ }
+
+ /** Compute the hash of a byte array. Faster than arrayHash, because
+ * it hashes 4 bytes at once.
+ */
+ final def bytesHash(data: Array[Byte], seed: Int): Int = {
+ var len = data.length
+ var h = seed
+
+ // Body
+ var i = 0
+ while(len >= 4) {
+ var k = data(i + 0) & 0xFF
+ k |= (data(i + 1) & 0xFF) << 8
+ k |= (data(i + 2) & 0xFF) << 16
+ k |= (data(i + 3) & 0xFF) << 24
+
+ h = mix(h, k)
+
+ i += 4
+ len -= 4
+ }
+
+ // Tail
+ var k = 0
+ if(len == 3) k ^= (data(i + 2) & 0xFF) << 16
+ if(len >= 2) k ^= (data(i + 1) & 0xFF) << 8
+ if(len >= 1) {
+ k ^= (data(i + 0) & 0xFF)
+ h = mixLast(h, k)
+ }
+
+ // Finalization
+ finalizeHash(h, data.length)
+ }
+
+ final def listHash(xs: scala.collection.immutable.List[_], seed: Int): Int = {
+ var n = 0
+ var h = seed
+ var elems = xs
+ while (!elems.isEmpty) {
+ val head = elems.head
+ val tail = elems.tail
+ h = mix(h, head.##)
+ n += 1
+ elems = tail
+ }
+ finalizeHash(h, n)
+ }
+}
+
+/**
+ * An implementation of Austin Appleby's MurmurHash 3 algorithm
+ * (MurmurHash3_x86_32). This object contains methods that hash
+ * values of various types as well as means to construct `Hashing`
+ * objects.
+ *
+ * This algorithm is designed to generate well-distributed non-cryptographic
+ * hashes. It is designed to hash data in 32 bit chunks (ints).
+ *
+ * The mix method needs to be called at each step to update the intermediate
+ * hash value. For the last chunk to incorporate into the hash mixLast may
+ * be used instead, which is slightly faster. Finally finalizeHash needs to
+ * be called to compute the final hash value.
+ *
+ * This is based on the earlier MurmurHash3 code by Rex Kerr, but the
+ * MurmurHash3 algorithm was since changed by its creator Austin Appleby
+ * to remedy some weaknesses and improve performance. This represents the
+ * latest and supposedly final version of the algortihm (revision 136).
+ *
+ * @see [[http://code.google.com/p/smhasher]]
+ */
+object MurmurHash3 extends MurmurHash3 {
+ final val arraySeed = 0x3c074a61
+ final val stringSeed = 0xf7ca7fd2
+ final val productSeed = 0xcafebabe
+ final val symmetricSeed = 0xb592f7ae
+ final val traversableSeed = 0xe73a8b15
+ final val seqSeed = "Seq".hashCode
+ final val mapSeed = "Map".hashCode
+ final val setSeed = "Set".hashCode
+
+ def arrayHash[@specialized T](a: Array[T]): Int = arrayHash(a, arraySeed)
+ def bytesHash(data: Array[Byte]): Int = bytesHash(data, arraySeed)
+ def orderedHash(xs: TraversableOnce[Any]): Int = orderedHash(xs, symmetricSeed)
+ def productHash(x: Product): Int = productHash(x, productSeed)
+ def stringHash(x: String): Int = stringHash(x, stringSeed)
+ def unorderedHash(xs: TraversableOnce[Any]): Int = unorderedHash(xs, traversableSeed)
+
+ /** To offer some potential for optimization.
+ */
+ def seqHash(xs: scala.collection.Seq[_]): Int = xs match {
+ case xs: List[_] => listHash(xs, seqSeed)
+ case xs => orderedHash(xs, seqSeed)
+ }
+
+ def mapHash(xs: scala.collection.Map[_, _]): Int = unorderedHash(xs, mapSeed)
+ def setHash(xs: scala.collection.Set[_]): Int = unorderedHash(xs, setSeed)
+
+ class ArrayHashing[@specialized T] extends Hashing[Array[T]] {
+ def hash(a: Array[T]) = arrayHash(a)
+ }
+
+ def arrayHashing[@specialized T] = new ArrayHashing[T]
+
+ def bytesHashing = new Hashing[Array[Byte]] {
+ def hash(data: Array[Byte]) = bytesHash(data)
+ }
+
+ def orderedHashing = new Hashing[TraversableOnce[Any]] {
+ def hash(xs: TraversableOnce[Any]) = orderedHash(xs)
+ }
+
+ def productHashing = new Hashing[Product] {
+ def hash(x: Product) = productHash(x)
+ }
+
+ def stringHashing = new Hashing[String] {
+ def hash(x: String) = stringHash(x)
+ }
+
+ def unorderedHashing = new Hashing[TraversableOnce[Any]] {
+ def hash(xs: TraversableOnce[Any]) = unorderedHash(xs)
+ }
+
+ /** All this trouble and foreach still appears faster.
+ * Leaving in place in case someone would like to investigate further.
+ */
+ /**
+ def linearSeqHash(xs: scala.collection.LinearSeq[_], seed: Int): Int = {
+ var n = 0
+ var h = seed
+ var elems = xs
+ while (elems.nonEmpty) {
+ h = mix(h, elems.head.##)
+ n += 1
+ elems = elems.tail
+ }
+ finalizeHash(h, n)
+ }
+
+ def indexedSeqHash(xs: scala.collection.IndexedSeq[_], seed: Int): Int = {
+ var n = 0
+ var h = seed
+ val len = xs.length
+ while (n < len) {
+ h = mix(h, xs(n).##)
+ n += 1
+ }
+ finalizeHash(h, n)
+ }
+ */
+
+ @deprecated("Use unorderedHash", "2.10.0")
+ final def symmetricHash[T](xs: scala.collection.GenTraversableOnce[T], seed: Int = symmetricSeed): Int =
+ unorderedHash(xs.seq, seed)
+
+ @deprecated("Use orderedHash", "2.10.0")
+ final def traversableHash[T](xs: scala.collection.GenTraversableOnce[T], seed: Int = traversableSeed): Int =
+ orderedHash(xs.seq, seed)
+}
diff --git a/src/library/scala/util/hashing/package.scala b/src/library/scala/util/hashing/package.scala
new file mode 100644
index 0000000..7d38f15
--- /dev/null
+++ b/src/library/scala/util/hashing/package.scala
@@ -0,0 +1,35 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2006-2013, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://www.scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+package scala.util
+
+
+
+
+
+
+package object hashing {
+
+ /** Fast multiplicative hash with a nice distribution.
+ */
+ def byteswap32(v: Int): Int = {
+ var hc = v * 0x9e3775cd
+ hc = java.lang.Integer.reverseBytes(hc)
+ hc * 0x9e3775cd
+ }
+
+ /** Fast multiplicative hash with a nice distribution
+ * for 64-bit values.
+ */
+ def byteswap64(v: Long): Long = {
+ var hc = v * 0x9e3775cd9e3775cdL
+ hc = java.lang.Long.reverseBytes(hc)
+ hc * 0x9e3775cd9e3775cdL
+ }
+
+}
diff --git a/src/library/scala/util/logging/ConsoleLogger.scala b/src/library/scala/util/logging/ConsoleLogger.scala
index 362a8e9..74f058b 100644
--- a/src/library/scala/util/logging/ConsoleLogger.scala
+++ b/src/library/scala/util/logging/ConsoleLogger.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -11,15 +11,16 @@
package scala.util.logging
/**
- * The trait <code>ConsoleLogger</code> is mixed into a concrete class who
- * has class <code>Logged</code> among its base classes.
+ * The trait `ConsoleLogger` is mixed into a concrete class who
+ * has class `Logged` among its base classes.
*
* @author Burak Emir
* @version 1.0
*/
+ at deprecated("This class will be removed.", "2.10.0")
trait ConsoleLogger extends Logged {
- /** logs argument to Console using <code>Console.println</code>
+ /** logs argument to Console using [[scala.Console.println]]
*/
override def log(msg: String): Unit = Console.println(msg)
}
diff --git a/src/library/scala/util/logging/Logged.scala b/src/library/scala/util/logging/Logged.scala
index 01757e1..f2661d3 100644
--- a/src/library/scala/util/logging/Logged.scala
+++ b/src/library/scala/util/logging/Logged.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -9,17 +9,20 @@
package scala.util.logging
/** Mixing in Logged indicates that a class provides support for logging.
- * For instance:
-{{{
- // The developer of the library writes:
- class MyClass extends Logged {
- // do stuff, call log
- }
- // The user of the library instantiates:
- val x = new MyClass() with ConsoleLogger
-}}}
- * and the logging is sent to the [[scala.util.logging.ConsoleLogger]] object.
- */
+ *
+ * For instance:
+ * {{{
+ * // The developer of the library writes:
+ * class MyClass extends Logged {
+ * // do stuff, call log
+ * }
+ *
+ * // The user of the library instantiates:
+ * val x = new MyClass() with ConsoleLogger
+ * }}}
+ * and the logging is sent to the [[scala.util.logging.ConsoleLogger]] object.
+ */
+ at deprecated("This class will be removed.", "2.10.0")
trait Logged {
/** This method should log the message given as argument somewhere
* as a side-effect.
diff --git a/src/library/scala/util/matching/Regex.scala b/src/library/scala/util/matching/Regex.scala
index 76c08a0..716d746 100644
--- a/src/library/scala/util/matching/Regex.scala
+++ b/src/library/scala/util/matching/Regex.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2007-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2007-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -30,6 +30,7 @@
*/
package scala.util.matching
+import scala.collection.AbstractIterator
import java.util.regex.{ Pattern, Matcher }
/** This class provides methods for creating and using regular expressions.
@@ -144,6 +145,7 @@ import java.util.regex.{ Pattern, Matcher }
*/
@SerialVersionUID(-2094783597747625537L)
class Regex(regex: String, groupNames: String*) extends Serializable {
+ outer =>
import Regex._
@@ -178,17 +180,16 @@ class Regex(regex: String, groupNames: String*) extends Serializable {
* @return The matches
*/
def unapplySeq(target: Any): Option[List[String]] = target match {
- case s: java.lang.CharSequence =>
- val m = pattern.matcher(s)
- if (m.matches) Some((1 to m.groupCount).toList map m.group)
+ case s: CharSequence =>
+ val m = pattern matcher s
+ if (runMatcher(m)) Some((1 to m.groupCount).toList map m.group)
else None
- case Match(s) =>
- unapplySeq(s)
- case _ =>
- None
+ case m: Match => unapplySeq(m.matched)
+ case _ => None
}
+ protected def runMatcher(m: Matcher) = m.matches()
- /** Return all matches of this regexp in given character sequence as a [[scala.util.mathcing.Regex.MatchIterator]],
+ /** Return all matches of this regexp in given character sequence as a [[scala.util.matching.Regex.MatchIterator]],
* which is a special [[scala.collection.Iterator]] that returns the
* matched strings, but can also be converted into a normal iterator
* that returns objects of type [[scala.util.matching.Regex.Match]]
@@ -196,11 +197,30 @@ class Regex(regex: String, groupNames: String*) extends Serializable {
* match, subgroups, etc.
*
* @param source The text to match against.
- * @return A [[scala.util.matching.Reegex.MatchIterator]] of all matches.
+ * @return A [[scala.util.matching.Regex.MatchIterator]] of all matches.
* @example {{{for (words <- """\w+""".r findAllIn "A simple example.") yield words}}}
*/
def findAllIn(source: java.lang.CharSequence) = new Regex.MatchIterator(source, this, groupNames)
+
+ /** Return all matches of this regexp in given character sequence as a
+ * [[scala.collection.Iterator]] of [[scala.util.matching.Regex.Match]].
+ *
+ * @param source The text to match against.
+ * @return A [[scala.collection.Iterator]] of [[scala.util.matching.Regex.Match]] for all matches.
+ * @example {{{for (words <- """\w+""".r findAllMatchIn "A simple example.") yield words.start}}}
+ */
+ def findAllMatchIn(source: java.lang.CharSequence): Iterator[Match] = {
+ val matchIterator = findAllIn(source)
+ new Iterator[Match] {
+ def hasNext = matchIterator.hasNext
+ def next: Match = {
+ matchIterator.next;
+ new Match(matchIterator.source, matchIterator.matcher, matchIterator.groupNames).force
+ }
+ }
+ }
+
/** Return optionally first matching string of this regexp in given character sequence,
* or None if it does not exist.
*
@@ -353,10 +373,35 @@ class Regex(regex: String, groupNames: String*) extends Serializable {
def split(toSplit: java.lang.CharSequence): Array[String] =
pattern.split(toSplit)
+ /** Create a new Regex with the same pattern, but no requirement that
+ * the entire String matches in extractor patterns. For instance, the strings
+ * shown below lead to successful matches, where they would not otherwise.
+ *
+ * {{{
+ * val dateP1 = """(\d\d\d\d)-(\d\d)-(\d\d)""".r.unanchored
+ *
+ * val dateP1(year, month, day) = "Date 2011-07-15"
+ *
+ * val copyright: String = "Date of this document: 2011-07-15" match {
+ * case dateP1(year, month, day) => "Copyright "+year
+ * case _ => "No copyright"
+ * }
+ * }}}
+ *
+ * @return The new unanchored regex
+ */
+ def unanchored: UnanchoredRegex = new Regex(regex, groupNames: _*) with UnanchoredRegex { override def anchored = outer }
+ def anchored: Regex = this
+
/** The string defining the regular expression */
override def toString = regex
}
+trait UnanchoredRegex extends Regex {
+ override protected def runMatcher(m: Matcher) = m.find()
+ override def unanchored = this
+}
+
/** This object defines inner classes that describe
* regex matches and helper objects. The class hierarchy
* is as follows:
@@ -519,9 +564,9 @@ object Regex {
/** A class to step through a sequence of regex matches
*/
class MatchIterator(val source: java.lang.CharSequence, val regex: Regex, val groupNames: Seq[String])
- extends Iterator[String] with MatchData { self =>
+ extends AbstractIterator[String] with Iterator[String] with MatchData { self =>
- protected val matcher = regex.pattern.matcher(source)
+ protected[Regex] val matcher = regex.pattern.matcher(source)
private var nextSeen = false
/** Is there another match? */
@@ -531,13 +576,13 @@ object Regex {
}
/** The next matched substring of `source` */
- def next: String = {
+ def next(): String = {
if (!hasNext) throw new NoSuchElementException
nextSeen = false
matcher.group
}
- override def toString = super[Iterator].toString
+ override def toString = super[AbstractIterator].toString
/** The index of the first matched character */
def start: Int = matcher.start
@@ -555,13 +600,13 @@ object Regex {
def groupCount = matcher.groupCount
/** Convert to an iterator that yields MatchData elements instead of Strings */
- def matchData = new Iterator[Match] {
+ def matchData: Iterator[Match] = new AbstractIterator[Match] {
def hasNext = self.hasNext
def next = { self.next; new Match(source, matcher, groupNames).force }
}
/** Convert to an iterator that yields MatchData elements instead of Strings and has replacement support */
- private[matching] def replacementData = new Iterator[Match] with Replacement {
+ private[matching] def replacementData = new AbstractIterator[Match] with Replacement {
def matcher = self.matcher
def hasNext = self.hasNext
def next = { self.next; new Match(source, matcher, groupNames).force }
diff --git a/src/library/scala/util/parsing/ast/AbstractSyntax.scala b/src/library/scala/util/parsing/ast/AbstractSyntax.scala
index f0f0ec4..30b20d7 100644
--- a/src/library/scala/util/parsing/ast/AbstractSyntax.scala
+++ b/src/library/scala/util/parsing/ast/AbstractSyntax.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2006-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2006-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -14,12 +14,13 @@ import scala.util.parsing.input.Positional
*
* @author Adriaan Moors
*/
+ at deprecated("This class will be removed", "2.10.0")
trait AbstractSyntax {
/** The base class for elements of the abstract syntax tree.
*/
trait Element extends Positional
- /** The base class for elements in the AST that represent names {@see Binders}.
+ /** The base class for elements in the AST that represent names [[scala.util.parsing.ast.Binders]].
*/
trait NameElement extends Element {
def name: String
diff --git a/src/library/scala/util/parsing/ast/Binders.scala b/src/library/scala/util/parsing/ast/Binders.scala
index 5a19597..a6ad190 100644
--- a/src/library/scala/util/parsing/ast/Binders.scala
+++ b/src/library/scala/util/parsing/ast/Binders.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2006-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2006-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -8,7 +8,9 @@
package scala.util.parsing.ast
-import scala.collection.mutable.Map
+import scala.collection.AbstractIterable
+import scala.collection.mutable
+import scala.language.implicitConversions
//DISCLAIMER: this code is highly experimental!
@@ -22,6 +24,7 @@ import scala.collection.mutable.Map
*
* @author Adriaan Moors
*/
+ at deprecated("This class will be removed", "2.10.0")
trait Mappable {
trait Mapper { def apply[T <% Mappable[T]](x: T): T } /* TODO: having type `Forall T. T => T` is too strict:
sometimes we want to allow `Forall T >: precision. T => T` for some type `precision`, so that,
@@ -83,9 +86,9 @@ trait Binders extends AbstractSyntax with Mappable {
* For example: `[x, y]!1` represents the scope with `id` `1` and binder elements `x` and `y`.
* (`id` is solely used for this textual representation.)
*/
- class Scope[binderType <: NameElement] extends Iterable[binderType]{
- private val substitution: Map[binderType, Element] =
- new scala.collection.mutable.LinkedHashMap[binderType, Element] // a LinkedHashMap is ordered by insertion order -- important!
+ class Scope[binderType <: NameElement] extends AbstractIterable[binderType] with Iterable[binderType] {
+ private val substitution: mutable.Map[binderType, Element] =
+ new mutable.LinkedHashMap[binderType, Element] // a LinkedHashMap is ordered by insertion order -- important!
/** Returns a unique number identifying this Scope (only used for representation purposes). */
val id: Int = _Binder.genId
@@ -127,9 +130,6 @@ trait Binders extends AbstractSyntax with Mappable {
// associated to the UnderBinder, but after that, no changes are allowed, except for substitution)?
/** `canAddElement` indicates whether `b` may be added to this scope.
*
- * TODO: strengthen this condition so that no binders may be added after this scope has been
- * linked to its `UnderBinder' (i.e., while parsing, BoundElements may be added to the Scope
- * associated to the UnderBinder, but after that, no changes are allowed, except for substitution)?
*
* @return true if `b` had not been added yet
*/
@@ -200,8 +200,7 @@ trait Binders extends AbstractSyntax with Mappable {
def alpha_==[t <: NameElement](other: BoundElement[t]): Boolean = scope.indexFor(el) == other.scope.indexFor(other.el)
}
- /** A variable that escaped its scope (i.e., a free variable) -- we don't deal very well with these yet
- */
+ /** A variable that escaped its scope (i.e., a free variable) -- we don't deal very well with these yet. */
class UnboundElement[N <: NameElement](private val el: N) extends NameElement {
def name = el.name+"@??"
}
@@ -240,7 +239,7 @@ trait Binders extends AbstractSyntax with Mappable {
})
}*/
- def cloneElementWithSubst(subst: scala.collection.immutable.Map[NameElement, NameElement]) = element.gmap(new Mapper { def apply[t <% Mappable[t]](x :t): t = x match{
+ def cloneElementWithSubst(subst: Map[NameElement, NameElement]) = element.gmap(new Mapper { def apply[t <% Mappable[t]](x :t): t = x match{
case substable: NameElement if subst.contains(substable) => subst.get(substable).asInstanceOf[t] // TODO: wrong... substitution is not (necessarily) the identity function
//Console.println("substed: "+substable+"-> "+subst.get(substable)+")");
case x => x // Console.println("subst: "+x+"(keys: "+subst.keys+")");x
@@ -253,9 +252,9 @@ trait Binders extends AbstractSyntax with Mappable {
}})
def extract: elementT = cloneElementNoBoundElements
- def extract(subst: scala.collection.immutable.Map[NameElement, NameElement]): elementT = cloneElementWithSubst(subst)
+ def extract(subst: Map[NameElement, NameElement]): elementT = cloneElementWithSubst(subst)
- /** Get a string representation of element, normally we don't allow direct access to element, but just getting a string representation is ok*/
+ /** Get a string representation of element, normally we don't allow direct access to element, but just getting a string representation is ok. */
def elementToString: String = element.toString
}
@@ -327,11 +326,11 @@ trait Binders extends AbstractSyntax with Mappable {
// TODO: move this to some utility object higher in the scala hierarchy?
/** Returns a given result, but executes the supplied closure before returning.
* (The effect of this closure does not influence the returned value.)
- *
- * @param result the result to be returned
- * @param block code to be executed, purely for its side-effects
*/
trait ReturnAndDo[T]{
+ /**
+ * @param block code to be executed, purely for its side-effects
+ */
def andDo(block: => Unit): T
}
diff --git a/src/library/scala/util/parsing/combinator/ImplicitConversions.scala b/src/library/scala/util/parsing/combinator/ImplicitConversions.scala
index 7f173da..ad06749 100644
--- a/src/library/scala/util/parsing/combinator/ImplicitConversions.scala
+++ b/src/library/scala/util/parsing/combinator/ImplicitConversions.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2006-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2006-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -9,19 +9,24 @@
package scala.util.parsing.combinator
-/** This object contains implicit conversions that come in handy when using the `^^' combinator
- * {@see Parsers} to construct an AST from the concrete syntax.
- *<p>
- * The reason for this is that the sequential composition combinator (`~') combines its constituents
- * into a ~. When several `~'s are combined, this results in nested `~'s (to the left).
- * The `flatten*' coercions makes it easy to apply an `n'-argument function to a nested ~ of
- * depth (`n-1')</p>
- *<p>
- * The `headOptionTailToFunList' converts a function that takes a List[A] to a function that
- * accepts a ~[A, Option[List[A]]] (this happens when, e.g., parsing something of the following
- * shape: p ~ opt("." ~ repsep(p, ".")) -- where `p' is a parser that yields an A)</p>
+import scala.language.implicitConversions
+
+/** This object contains implicit conversions that come in handy when using the `^^` combinator.
+ *
+ * Refer to [[scala.util.parsing.combinator.Parsers]] to construct an AST from the concrete syntax.
+ *
+ * The reason for this is that the sequential composition combinator (`~`) combines its constituents
+ * into a ~. When several `~`s are combined, this results in nested `~`s (to the left).
+ * The `flatten*` coercions makes it easy to apply an `n`-argument function to a nested `~` of
+ * depth `n-1`
+ *
+ * The `headOptionTailToFunList` converts a function that takes a `List[A]` to a function that
+ * accepts a `~[A, Option[List[A]]]` (this happens when parsing something of the following
+ * shape: `p ~ opt("." ~ repsep(p, "."))` -- where `p` is a parser that yields an `A`).
*
- * @author Martin Odersky, Iulian Dragos, Adriaan Moors
+ * @author Martin Odersky
+ * @author Iulian Dragos
+ * @author Adriaan Moors
*/
trait ImplicitConversions { self: Parsers =>
implicit def flatten2[A, B, C] (f: (A, B) => C) =
diff --git a/src/library/scala/util/parsing/combinator/JavaTokenParsers.scala b/src/library/scala/util/parsing/combinator/JavaTokenParsers.scala
index bc71391..89832d3 100644
--- a/src/library/scala/util/parsing/combinator/JavaTokenParsers.scala
+++ b/src/library/scala/util/parsing/combinator/JavaTokenParsers.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2006-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2006-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -9,6 +9,8 @@
package scala.util.parsing.combinator
+import scala.annotation.migration
+
/** `JavaTokenParsers` differs from [[scala.util.parsing.combinator.RegexParsers]]
* by adding the following definitions:
*
@@ -19,11 +21,12 @@ package scala.util.parsing.combinator
* - `floatingPointNumber`
*/
trait JavaTokenParsers extends RegexParsers {
- /** Anything starting with an ASCII alphabetic character or underscore,
- * followed by zero or more repetitions of regex's `\w`.
+ /** Anything that is a valid Java identifier, according to
+ * <a href="http://docs.oracle.com/javase/specs/jls/se7/html/jls-3.html#jls-3.8">The Java Language Spec</a>.
+ * Generally, this means a letter, followed by zero or more letters or numbers.
*/
def ident: Parser[String] =
- """[a-zA-Z_]\w*""".r
+ """\p{javaJavaIdentifierStart}\p{javaJavaIdentifierPart}*""".r
/** An integer, without sign or with a negative sign. */
def wholeNumber: Parser[String] =
"""-?\d+""".r
@@ -39,12 +42,13 @@ trait JavaTokenParsers extends RegexParsers {
/** Double quotes (`"`) enclosing a sequence of:
*
* - Any character except double quotes, control characters or backslash (`\`)
- * - A backslash followed by a slash, another backslash, or one of the letters
- * `b`, `f`, `n`, `r` or `t`.
+ * - A backslash followed by another backslash, a single or double quote, or one
+ * of the letters `b`, `f`, `n`, `r` or `t`
* - `\` followed by `u` followed by four hexadecimal digits
*/
+ @migration("`stringLiteral` allows escaping single and double quotes, but not forward slashes any longer.", "2.10.0")
def stringLiteral: Parser[String] =
- ("\""+"""([^"\p{Cntrl}\\]|\\[\\/bfnrt]|\\u[a-fA-F0-9]{4})*"""+"\"").r
+ ("\""+"""([^"\p{Cntrl}\\]|\\[\\'"bfnrt]|\\u[a-fA-F0-9]{4})*"""+"\"").r
/** A number following the rules of `decimalNumber`, with the following
* optional additions:
*
diff --git a/src/library/scala/util/parsing/combinator/PackratParsers.scala b/src/library/scala/util/parsing/combinator/PackratParsers.scala
index f53b998..16705d4 100644
--- a/src/library/scala/util/parsing/combinator/PackratParsers.scala
+++ b/src/library/scala/util/parsing/combinator/PackratParsers.scala
@@ -1,57 +1,54 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2006-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2006-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-
package scala.util.parsing.combinator
import scala.util.parsing.combinator._
import scala.util.parsing.input.{ Reader, Position }
import scala.collection.mutable
+import scala.language.implicitConversions
/**
- * <p>
- * <code>PackratParsers</code> is a component that extends the parser combinators
- * provided by <a href="Parsers.html"><code>Parsers</code></a> with a memoization facility
- * (``Packrat Parsing'').
- * </p>
- * <p>
- * Packrat Parsing is a technique for implementing backtracking, recursive-descent parsers, with the
- * advantage that it guarantees unlimited lookahead and a linear parse time. Using this technique,
- * left recursive grammars can also be accepted.
- * </p>
- * <p>
- * Using <code>PackratParsers</code> is very similar to using <code>Parsers</code>:
- * <ul>
- * <li> any class/trait that extends <code>Parsers</code> (directly or through a subclass) can
- * mix in <code>PackratParsers</code>. Example:
- * <code>object MyGrammar extends StandardTokenParsers with PackratParsers </code>
- * <li> each grammar production previously declared as a <code>def</code> without formal parameters
- * becomes a <code>lazy val</code>, and its type is changed from <code>Parser[Elem]</code>
- * to <code>PackratParser[Elem]</code>. So, for example, <code>def production: Parser[Int] = {...}</code>
- * becomes <code>lazy val production: PackratParser[Int] = {...}</code>
- * <li> Important: using <code>PackratParser</code>s is not an ``all or nothing'' decision. They
- * can be free mixed with regular <code>Parser</code>s in a single grammar.
- * </ul>
- * </p>
- * <p>
- * Cached parse results are attached to the <i>input</i>, not the grammar.
- * Therefore, <code>PackratsParser</code>s require a <code>PackratReader</code> as input, which
- * adds memoization to an underlying <code>Reader</code>. Programmers can create <code>PackratReader</code>
- * objects either manually, as in <code>production(new PackratReader(new lexical.Scanner("input")))</code>,
- * but the common way should be to rely on the combinator <code>phrase</code> to wrap a given
- * input with a <code>PackratReader</code> if the input is not one itself.
- * </p>
+ * `PackratParsers` is a component that extends the parser combinators
+ * provided by [[scala.util.parsing.combinator.Parsers]] with a memoization
+ * facility (''Packrat Parsing'').
+ *
+ * Packrat Parsing is a technique for implementing backtracking,
+ * recursive-descent parsers, with the advantage that it guarantees
+ * unlimited lookahead and a linear parse time. Using this technique,
+ * left recursive grammars can also be accepted.
+ *
+ * Using `PackratParsers` is very similar to using `Parsers`:
+ * - any class/trait that extends `Parsers` (directly or through a subclass)
+ * can mix in `PackratParsers`.
+ * Example: `'''object''' MyGrammar '''extends''' StandardTokenParsers '''with''' PackratParsers`
+ * - each grammar production previously declared as a `def` without formal
+ * parameters becomes a `lazy val`, and its type is changed from
+ * `Parser[Elem]` to `PackratParser[Elem]`.
+ * So, for example, `'''def''' production: Parser[Int] = {...}`
+ * becomes `'''lazy val''' production: PackratParser[Int] = {...}`
+ * - Important: using `PackratParser`s is not an ''all or nothing'' decision.
+ * They can be free mixed with regular `Parser`s in a single grammar.
+ *
+ * Cached parse results are attached to the ''input'', not the grammar.
+ * Therefore, `PackratsParser`s require a `PackratReader` as input, which
+ * adds memoization to an underlying `Reader`. Programmers can create
+ * `PackratReader` objects either manually, as in
+ * `production('''new''' PackratReader('''new''' lexical.Scanner("input")))`,
+ * but the common way should be to rely on the combinator `phrase` to wrap
+ * a given input with a `PackratReader` if the input is not one itself.
*
* @see Bryan Ford: "Packrat Parsing: Simple, Powerful, Lazy, Linear Time." ICFP'02
* @see Alessandro Warth, James R. Douglass, Todd Millstein: "Packrat Parsers Can Support Left Recursion." PEPM'08
*
* @since 2.8
- * @author Manohar Jonnalagedda, Tiark Rompf
+ * @author Manohar Jonnalagedda
+ * @author Tiark Rompf
*/
trait PackratParsers extends Parsers {
@@ -59,15 +56,14 @@ trait PackratParsers extends Parsers {
//type Input = PackratReader[Elem]
/**
- * A specialized <code>Reader</code> class that wraps an underlying <code>Reader</code>
+ * A specialized `Reader` class that wraps an underlying `Reader`
* and provides memoization of parse results.
*/
- class PackratReader[+T](underlying: Reader[T]) extends Reader[T] { outer =>
+ class PackratReader[+T](underlying: Reader[T]) extends Reader[T] { outer =>
/*
* caching of intermediate parse results and information about recursion
*/
-
private[PackratParsers] val cache = mutable.HashMap.empty[(Parser[_], Position), MemoEntry[_]]
private[PackratParsers] def getFromCache[T](p: Parser[T]): Option[MemoEntry[T]] = {
@@ -100,15 +96,11 @@ trait PackratParsers extends Parsers {
def atEnd: Boolean = underlying.atEnd
}
-
/**
- * <p>
- * A parser generator delimiting whole phrases (i.e. programs).
- * </p>
- * <p>
- * Overridden to make sure any input passed to the argument parser
- * is wrapped in a <code>PackratReader</code>.
- * </p>
+ * A parser generator delimiting whole phrases (i.e. programs).
+ *
+ * Overridden to make sure any input passed to the argument parser
+ * is wrapped in a `PackratReader`.
*/
override def phrase[T](p: Parser[T]) = {
val q = super.phrase(p)
@@ -120,7 +112,6 @@ trait PackratParsers extends Parsers {
}
}
-
private def getPosFromResult(r: ParseResult[_]): Position = r.next.pos
// auxiliary data structures
@@ -148,21 +139,20 @@ trait PackratParsers extends Parsers {
/**
* Implicitly convert a parser to a packrat parser.
* The conversion is triggered by giving the appropriate target type:
- * val myParser: PackratParser[MyResult] = aParser
- */
+ * {{{
+ * val myParser: PackratParser[MyResult] = aParser
+ * }}} */
implicit def parser2packrat[T](p: => super.Parser[T]): PackratParser[T] = {
lazy val q = p
memo(super.Parser {in => q(in)})
}
-
/*
* An unspecified function that is called when a packrat reader is applied.
* It verifies whether we are in the process of growing a parse or not.
* In the former case, it makes sure that rules involved in the recursion are evaluated.
* It also prevents non-involved rules from getting evaluated further
*/
-
private def recall(p: super.Parser[_], in: PackratReader[Elem]): Option[MemoEntry[_]] = {
val cached = in.getFromCache(p)
val head = in.recursionHeads.get(in.pos)
@@ -237,7 +227,7 @@ to update each parser involved in the recursion.
/**
* Explicitly convert a given parser to a memoizing packrat parser.
- * In most cases, client code should avoid calling <code>memo</code> directly
+ * In most cases, client code should avoid calling `memo` directly
* and rely on implicit conversion instead.
*/
def memo[T](p: super.Parser[T]): PackratParser[T] = {
diff --git a/src/library/scala/util/parsing/combinator/Parsers.scala b/src/library/scala/util/parsing/combinator/Parsers.scala
index 21bc109..ead4446 100644
--- a/src/library/scala/util/parsing/combinator/Parsers.scala
+++ b/src/library/scala/util/parsing/combinator/Parsers.scala
@@ -1,18 +1,19 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2006-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2006-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-
package scala.util.parsing.combinator
import scala.util.parsing.input._
import scala.collection.mutable.ListBuffer
import scala.annotation.tailrec
-import annotation.migration
+import scala.annotation.migration
+import scala.language.implicitConversions
+import scala.util.DynamicVariable
// TODO: better error handling (labelling like parsec's <?>)
@@ -109,6 +110,8 @@ trait Parsers {
def flatMapWithNext[U](f: T => Input => ParseResult[U]): ParseResult[U]
+ def filterWithError(p: T => Boolean, error: T => String, position: Input): ParseResult[T]
+
def append[U >: T](a: => ParseResult[U]): ParseResult[U]
def isEmpty = !successful
@@ -138,6 +141,10 @@ trait Parsers {
def flatMapWithNext[U](f: T => Input => ParseResult[U]): ParseResult[U]
= f(result)(next)
+ def filterWithError(p: T => Boolean, error: T => String, position: Input): ParseResult[T] =
+ if (p(result)) this
+ else Failure(error(result), position)
+
def append[U >: T](a: => ParseResult[U]): ParseResult[U] = this
def get: T = result
@@ -148,13 +155,20 @@ trait Parsers {
val successful = true
}
- var lastNoSuccess: NoSuccess = null
+ private lazy val lastNoSuccessVar = new DynamicVariable[Option[NoSuccess]](None)
+
+ @deprecated("lastNoSuccess was not thread-safe and will be removed in 2.11.0", "2.10.0")
+ def lastNoSuccess: NoSuccess = lastNoSuccessVar.value.orNull
+
+ @deprecated("lastNoSuccess was not thread-safe and will be removed in 2.11.0", "2.10.0")
+ def lastNoSuccess_=(x: NoSuccess): Unit = lastNoSuccessVar.value = Option(x)
/** A common super-class for unsuccessful parse results. */
sealed abstract class NoSuccess(val msg: String, override val next: Input) extends ParseResult[Nothing] { // when we don't care about the difference between Failure and Error
val successful = false
- if (!(lastNoSuccess != null && next.pos < lastNoSuccess.next.pos))
- lastNoSuccess = this
+
+ if (lastNoSuccessVar.value forall (v => !(next.pos < v.next.pos)))
+ lastNoSuccessVar.value = Some(this)
def map[U](f: Nothing => U) = this
def mapPartial[U](f: PartialFunction[Nothing, U], error: Nothing => String): ParseResult[U] = this
@@ -162,13 +176,15 @@ trait Parsers {
def flatMapWithNext[U](f: Nothing => Input => ParseResult[U]): ParseResult[U]
= this
- def get: Nothing = sys.error("No result when parsing failed")
+ def filterWithError(p: Nothing => Boolean, error: Nothing => String, position: Input): ParseResult[Nothing] = this
+
+ def get: Nothing = scala.sys.error("No result when parsing failed")
}
/** An extractor so `NoSuccess(msg, next)` can be used in matches. */
object NoSuccess {
def unapply[T](x: ParseResult[T]) = x match {
- case Failure(msg, next) => Some(msg, next)
- case Error(msg, next) => Some(msg, next)
+ case Failure(msg, next) => Some((msg, next))
+ case Error(msg, next) => Some((msg, next))
case _ => None
}
}
@@ -202,7 +218,6 @@ trait Parsers {
def append[U >: Nothing](a: => ParseResult[U]): ParseResult[U] = this
}
-
def Parser[T](f: Input => ParseResult[T]): Parser[T]
= new Parser[T]{ def apply(in: Input) = f(in) }
@@ -226,6 +241,12 @@ trait Parsers {
def map[U](f: T => U): Parser[U] //= flatMap{x => success(f(x))}
= Parser{ in => this(in) map(f)}
+ def filter(p: T => Boolean): Parser[T]
+ = withFilter(p)
+
+ def withFilter(p: T => Boolean): Parser[T]
+ = Parser{ in => this(in) filterWithError(p, "Input doesn't match filter: "+_, in)}
+
// no filter yet, dealing with zero is tricky!
@migration("The call-by-name argument is evaluated at most once per constructed Parser object, instead of on every need that arises during parsing.", "2.9.0")
@@ -233,7 +254,6 @@ trait Parsers {
Parser{ in => this(in) append p(in)}
}
-
// the operator formerly known as +++, ++, &, but now, behold the venerable ~
// it's short, light (looks like whitespace), has few overloaded meaning (thanks to the recent change from ~ to unary_~)
// and we love it! (or do we like `,` better?)
@@ -349,7 +369,6 @@ trait Parsers {
*/
def ^^ [U](f: T => U): Parser[U] = map(f).named(toString+"^^")
-
/** A parser combinator that changes a successful result into the specified value.
*
* `p ^^^ v` succeeds if `p` succeeds; discards its result, and returns `v` instead.
@@ -447,6 +466,62 @@ trait Parsers {
* @return opt(this)
*/
def ? = opt(this)
+
+ /** Changes the failure message produced by a parser.
+ *
+ * This doesn't change the behavior of a parser on neither
+ * success nor error, just on failure. The semantics are
+ * slightly different than those obtained by doing `| failure(msg)`,
+ * in that the message produced by this method will always
+ * replace the message produced, which is not guaranteed
+ * by that idiom.
+ *
+ * For example, parser `p` below will always produce the
+ * designated failure message, while `q` will not produce
+ * it if `sign` is parsed but `number` is not.
+ *
+ * {{{
+ * def p = sign.? ~ number withFailureMessage "Number expected!"
+ * def q = sign.? ~ number | failure("Number expected!")
+ * }}}
+ *
+ * @param msg The message that will replace the default failure message.
+ * @return A parser with the same properties and different failure message.
+ */
+ def withFailureMessage(msg: String) = Parser{ in =>
+ this(in) match {
+ case Failure(_, next) => Failure(msg, next)
+ case other => other
+ }
+ }
+
+ /** Changes the error message produced by a parser.
+ *
+ * This doesn't change the behavior of a parser on neither
+ * success nor failure, just on error. The semantics are
+ * slightly different than those obtained by doing `| error(msg)`,
+ * in that the message produced by this method will always
+ * replace the message produced, which is not guaranteed
+ * by that idiom.
+ *
+ * For example, parser `p` below will always produce the
+ * designated error message, while `q` will not produce
+ * it if `sign` is parsed but `number` is not.
+ *
+ * {{{
+ * def p = sign.? ~ number withErrorMessage "Number expected!"
+ * def q = sign.? ~ number | error("Number expected!")
+ * }}}
+ *
+ * @param msg The message that will replace the default error message.
+ * @return A parser with the same properties and different error message.
+ */
+ def withErrorMessage(msg: String) = Parser{ in =>
+ this(in) match {
+ case Error(_, next) => Error(msg, next)
+ case other => other
+ }
+ }
}
/** Wrap a parser so that its failures become errors (the `|` combinator
@@ -529,7 +604,8 @@ trait Parsers {
* @return A parser for elements satisfying p(e).
*/
def acceptIf(p: Elem => Boolean)(err: Elem => String): Parser[Elem] = Parser { in =>
- if (p(in.first)) Success(in.first, in.rest)
+ if (in.atEnd) Failure("end of input", in)
+ else if (p(in.first)) Success(in.first, in.rest)
else Failure(err(in.first), in)
}
@@ -547,7 +623,8 @@ trait Parsers {
* applying `f` to it to produce the result.
*/
def acceptMatch[U](expected: String, f: PartialFunction[Elem, U]): Parser[U] = Parser{ in =>
- if (f.isDefinedAt(in.first)) Success(f(in.first), in.rest)
+ if (in.atEnd) Failure("end of input", in)
+ else if (f.isDefinedAt(in.first)) Success(f(in.first), in.rest)
else Failure(expected+" expected", in)
}
@@ -558,7 +635,8 @@ trait Parsers {
* @param es the list of expected elements
* @return a Parser that recognizes a specified list of elements
*/
- def acceptSeq[ES <% Iterable[Elem]](es: ES): Parser[List[Elem]] = es.foldRight[Parser[List[Elem]]](success(Nil)){(x, pxs) => accept(x) ~ pxs ^^ mkList}
+ def acceptSeq[ES <% Iterable[Elem]](es: ES): Parser[List[Elem]] =
+ es.foldRight[Parser[List[Elem]]](success(Nil)){(x, pxs) => accept(x) ~ pxs ^^ mkList}
/** A parser that always fails.
*
@@ -648,6 +726,7 @@ trait Parsers {
val p0 = p // avoid repeatedly re-evaluating by-name parser
@tailrec def applyp(in0: Input): ParseResult[List[T]] = p0(in0) match {
case Success(x, rest) => elems += x ; applyp(rest)
+ case e @ Error(_, _) => e // still have to propagate error
case _ => Success(elems.toList, in0)
}
@@ -678,8 +757,8 @@ trait Parsers {
@tailrec def applyp(in0: Input): ParseResult[List[T]] =
if (elems.length == num) Success(elems.toList, in0)
else p0(in0) match {
- case Success(x, rest) => elems += x ; applyp(rest)
- case ns: NoSuccess => return ns
+ case Success(x, rest) => elems += x ; applyp(rest)
+ case ns: NoSuccess => return ns
}
applyp(in)
@@ -726,7 +805,7 @@ trait Parsers {
*/
def chainl1[T, U](first: => Parser[T], p: => Parser[U], q: => Parser[(T, U) => T]): Parser[T]
= first ~ rep(q ~ p) ^^ {
- case x ~ xs => xs.foldLeft(x){(_, _) match {case (a, f ~ b) => f(a, b)}}
+ case x ~ xs => xs.foldLeft(x: T){case (a, f ~ b) => f(a, b)} // x's type annotation is needed to deal with changed type inference due to SI-5189
}
/** A parser generator that generalises the `rep1sep` generator so that `q`,
@@ -744,8 +823,7 @@ trait Parsers {
*/
def chainr1[T, U](p: => Parser[T], q: => Parser[(T, U) => U], combine: (T, U) => U, first: U): Parser[U]
= p ~ rep(q ~ p) ^^ {
- case x ~ xs => (new ~(combine, x) :: xs).
- foldRight(first){(_, _) match {case (f ~ a, b) => f(a, b)}}
+ case x ~ xs => (new ~(combine, x) :: xs).foldRight(first){case (f ~ a, b) => f(a, b)}
}
/** A parser generator for optional sub-phrases.
@@ -809,16 +887,15 @@ trait Parsers {
* if `p` consumed all the input.
*/
def phrase[T](p: Parser[T]) = new Parser[T] {
- lastNoSuccess = null
- def apply(in: Input) = p(in) match {
+ def apply(in: Input) = lastNoSuccessVar.withValue(None) {
+ p(in) match {
case s @ Success(out, in1) =>
if (in1.atEnd)
s
- else if (lastNoSuccess == null || lastNoSuccess.next.pos < in1.pos)
- Failure("end of input expected", in1)
else
- lastNoSuccess
- case _ => lastNoSuccess
+ lastNoSuccessVar.value filterNot { _.next.pos < in1.pos } getOrElse Failure("end of input expected", in1)
+ case ns => lastNoSuccessVar.value.getOrElse(ns)
+ }
}
}
diff --git a/src/library/scala/util/parsing/combinator/RegexParsers.scala b/src/library/scala/util/parsing/combinator/RegexParsers.scala
index a06b9d5..d17d0ca 100644
--- a/src/library/scala/util/parsing/combinator/RegexParsers.scala
+++ b/src/library/scala/util/parsing/combinator/RegexParsers.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2006-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2006-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -13,6 +13,7 @@ import java.util.regex.Pattern
import scala.util.matching.Regex
import scala.util.parsing.input._
import scala.collection.immutable.PagedSeq
+import scala.language.implicitConversions
/** The ''most important'' differences between `RegexParsers` and
* [[scala.util.parsing.combinator.Parsers]] are:
@@ -23,7 +24,7 @@ import scala.collection.immutable.PagedSeq
* - There's an implicit conversion from [[scala.util.matching.Regex]] to `Parser[String]`,
* so that regex expressions can be used as parser combinators.
* - The parsing methods call the method `skipWhitespace` (defaults to `true`) and, if true,
- * skip any whitespace before before each parser is called.
+ * skip any whitespace before each parser is called.
* - Protected val `whiteSpace` returns a regex that identifies whitespace.
*
* For example, this creates a very simple calculator receiving `String` input:
diff --git a/src/library/scala/util/parsing/combinator/lexical/Lexical.scala b/src/library/scala/util/parsing/combinator/lexical/Lexical.scala
index 8c6be7f..c25c972 100644
--- a/src/library/scala/util/parsing/combinator/lexical/Lexical.scala
+++ b/src/library/scala/util/parsing/combinator/lexical/Lexical.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2006-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2006-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -15,28 +15,25 @@ package lexical
import token._
import input.CharArrayReader.EofCh
-/** <p>
- * This component complements the <code>Scanners</code> component with
- * common operations for lexical parsers.
- * </p>
- * <p>
- * {@see StdLexical} for a concrete implementation for a simple, Scala-like
- * language.
- * </p>
+/** This component complements the `Scanners` component with
+ * common operations for lexical parsers.
+ *
+ * Refer to [[scala.util.parsing.combinator.lexical.StdLexical]]
+ * for a concrete implementation for a simple, Scala-like language.
*
* @author Martin Odersky, Adriaan Moors
*/
abstract class Lexical extends Scanners with Tokens {
- /** A character-parser that matches a letter (and returns it)*/
+ /** A character-parser that matches a letter (and returns it).*/
def letter = elem("letter", _.isLetter)
- /** A character-parser that matches a digit (and returns it)*/
+ /** A character-parser that matches a digit (and returns it).*/
def digit = elem("digit", _.isDigit)
- /** A character-parser that matches any character except the ones given in `cs' (and returns it)*/
- def chrExcept(cs: Char*) = elem("", ch => (cs forall (ch !=)))
+ /** A character-parser that matches any character except the ones given in `cs` (and returns it).*/
+ def chrExcept(cs: Char*) = elem("", ch => (cs forall (ch != _)))
- /** A character-parser that matches a white-space character (and returns it)*/
+ /** A character-parser that matches a white-space character (and returns it).*/
def whitespaceChar = elem("space char", ch => ch <= ' ' && ch != EofCh)
}
diff --git a/src/library/scala/util/parsing/combinator/lexical/Scanners.scala b/src/library/scala/util/parsing/combinator/lexical/Scanners.scala
index 1b1a777..5c23ad7 100644
--- a/src/library/scala/util/parsing/combinator/lexical/Scanners.scala
+++ b/src/library/scala/util/parsing/combinator/lexical/Scanners.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2006-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2006-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -15,13 +15,10 @@ package lexical
import token._
import input._
-/** <p>
- * This component provides core functionality for lexical parsers.
- * </p>
- * <p>
- * See its subclasses {@see Lexical} and -- most interestingly
- * {@see StdLexical}, for more functionality.
- * </p>
+/** This component provides core functionality for lexical parsers.
+ *
+ * See its subclasses [[scala.util.parsing.combinator.lexical.Lexical]] and -- most interestingly
+ * [[scala.util.parsing.combinator.lexical.StdLexical]], for more functionality.
*
* @author Martin Odersky, Adriaan Moors
*/
@@ -29,23 +26,20 @@ trait Scanners extends Parsers {
type Elem = Char
type Token
- /** This token is produced by a scanner {@see Scanner} when scanning failed. */
+ /** This token is produced by a scanner `Scanner` when scanning failed. */
def errorToken(msg: String): Token
- /** a parser that produces a token (from a stream of characters) */
+ /** A parser that produces a token (from a stream of characters). */
def token: Parser[Token]
- /** a parser for white-space -- its result will be discarded */
+ /** A parser for white-space -- its result will be discarded. */
def whitespace: Parser[Any]
- /** <p>
- * <code>Scanner</code> is essentially(*) a parser that produces `Token's
- * from a stream of characters. The tokens it produces are typically
- * passed to parsers in <code>TokenParsers</code>.
- * </p>
- * <p>
- * Note: (*) <code>Scanner</code> is really a `Reader' of `Token's
- * </p>
+ /** `Scanner` is essentially¹ a parser that produces `Token`s
+ * from a stream of characters. The tokens it produces are typically
+ * passed to parsers in `TokenParsers`.
+ *
+ * @note ¹ `Scanner` is really a `Reader` of `Token`s
*/
class Scanner(in: Reader[Char]) extends Reader[Token] {
/** Convenience constructor (makes a character reader out of the given string) */
diff --git a/src/library/scala/util/parsing/combinator/lexical/StdLexical.scala b/src/library/scala/util/parsing/combinator/lexical/StdLexical.scala
index 68636b8..f3491c0 100644
--- a/src/library/scala/util/parsing/combinator/lexical/StdLexical.scala
+++ b/src/library/scala/util/parsing/combinator/lexical/StdLexical.scala
@@ -1,31 +1,32 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2006-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2006-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-
-
package scala.util.parsing
package combinator
package lexical
import token._
import input.CharArrayReader.EofCh
-import collection.mutable.HashSet
+import scala.collection.mutable
-/** This component provides a standard lexical parser for a simple, Scala-like language.
- * It parses keywords and identifiers, numeric literals (integers), strings, and delimiters.
+/** This component provides a standard lexical parser for a simple,
+ * [[http://scala-lang.org Scala]]-like language. It parses keywords and
+ * identifiers, numeric literals (integers), strings, and delimiters.
*
- * To distinguish between identifiers and keywords, it uses a set of reserved identifiers:
- * every string contained in `reserved` is returned as a keyword token.
- * (Note that "=>" is hard-coded as a keyword.)
- * Additionally, the kinds of delimiters can be specified by the `delimiters` set.
+ * To distinguish between identifiers and keywords, it uses a set of
+ * reserved identifiers: every string contained in `reserved` is returned
+ * as a keyword token. (Note that `=>` is hard-coded as a keyword.)
+ * Additionally, the kinds of delimiters can be specified by the
+ * `delimiters` set.
*
- * Usually this component is used to break character-based input into bigger tokens,
- * which are then passed to a token-parser {@see TokenParsers}.
+ * Usually this component is used to break character-based input into
+ * bigger tokens, which are then passed to a token-parser (see
+ * [[scala.util.parsing.combinator.syntactical.TokenParsers]].)
*
* @author Martin Odersky
* @author Iulian Dragos
@@ -62,10 +63,10 @@ class StdLexical extends Lexical with StdTokens {
)
/** The set of reserved identifiers: these will be returned as `Keyword`s. */
- val reserved = new HashSet[String]
+ val reserved = new mutable.HashSet[String]
/** The set of delimiters (ordering does not matter). */
- val delimiters = new HashSet[String]
+ val delimiters = new mutable.HashSet[String]
protected def processIdent(name: String) =
if (reserved contains name) Keyword(name) else Identifier(name)
diff --git a/src/library/scala/util/parsing/combinator/syntactical/StandardTokenParsers.scala b/src/library/scala/util/parsing/combinator/syntactical/StandardTokenParsers.scala
index 5db6268..d3ae0ea 100644
--- a/src/library/scala/util/parsing/combinator/syntactical/StandardTokenParsers.scala
+++ b/src/library/scala/util/parsing/combinator/syntactical/StandardTokenParsers.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2006-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2006-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -13,8 +13,9 @@ package syntactical
import token._
import lexical.StdLexical
+import scala.language.implicitConversions
-/** This component provides primitive parsers for the standard tokens defined in `StdTokens'.
+/** This component provides primitive parsers for the standard tokens defined in `StdTokens`.
*
* @author Martin Odersky, Adriaan Moors
*/
diff --git a/src/library/scala/util/parsing/combinator/syntactical/StdTokenParsers.scala b/src/library/scala/util/parsing/combinator/syntactical/StdTokenParsers.scala
index 0938d58..7283b01 100644
--- a/src/library/scala/util/parsing/combinator/syntactical/StdTokenParsers.scala
+++ b/src/library/scala/util/parsing/combinator/syntactical/StdTokenParsers.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2006-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2006-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -13,9 +13,10 @@ package combinator
package syntactical
import token._
-import collection.mutable.HashMap
+import scala.collection.mutable
+import scala.language.implicitConversions
-/** This component provides primitive parsers for the standard tokens defined in `StdTokens'.
+/** This component provides primitive parsers for the standard tokens defined in `StdTokens`.
*
* @author Martin Odersky, Adriaan Moors
*/
@@ -23,12 +24,12 @@ trait StdTokenParsers extends TokenParsers {
type Tokens <: StdTokens
import lexical.{Keyword, NumericLit, StringLit, Identifier}
- protected val keywordCache : HashMap[String, Parser[String]] = HashMap.empty
+ protected val keywordCache = mutable.HashMap[String, Parser[String]]()
/** A parser which matches a single keyword token.
*
* @param chars The character string making up the matched keyword.
- * @return a `Parser' that matches the given string
+ * @return a `Parser` that matches the given string
*/
// implicit def keyword(chars: String): Parser[String] = accept(Keyword(chars)) ^^ (_.chars)
implicit def keyword(chars: String): Parser[String] =
diff --git a/src/library/scala/util/parsing/combinator/syntactical/TokenParsers.scala b/src/library/scala/util/parsing/combinator/syntactical/TokenParsers.scala
index 430b01f..1c4b25b 100644
--- a/src/library/scala/util/parsing/combinator/syntactical/TokenParsers.scala
+++ b/src/library/scala/util/parsing/combinator/syntactical/TokenParsers.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2006-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2006-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -13,13 +13,14 @@ package syntactical
/** This is the core component for token-based parsers.
*
- * @author Martin Odersky, Adriaan Moors
+ * @author Martin Odersky
+ * @author Adriaan Moors
*/
trait TokenParsers extends Parsers {
- /** Tokens is the abstract type of the `Token's consumed by the parsers in this component*/
+ /** `Tokens` is the abstract type of the `Token`s consumed by the parsers in this component. */
type Tokens <: token.Tokens
- /** lexical is the component responsible for consuming some basic kind of
+ /** `lexical` is the component responsible for consuming some basic kind of
* input (usually character-based) and turning it into the tokens
* understood by these parsers.
*/
diff --git a/src/library/scala/util/parsing/combinator/testing/RegexTest.scala b/src/library/scala/util/parsing/combinator/testing/RegexTest.scala
index 2997360..80e9b0d 100644
--- a/src/library/scala/util/parsing/combinator/testing/RegexTest.scala
+++ b/src/library/scala/util/parsing/combinator/testing/RegexTest.scala
@@ -3,11 +3,16 @@ package scala.util.parsing.combinator.testing
import scala.util.parsing.combinator._
import scala.util.parsing.input._
+import scala.language.postfixOps
+ at deprecated("This class will be removed", "2.10.0")
case class Ident(s: String)
+ at deprecated("This class will be removed", "2.10.0")
case class Number(n: Int)
+ at deprecated("This class will be removed", "2.10.0")
case class Str(s: String)
+ at deprecated("This class will be removed", "2.10.0")
object RegexTest extends RegexParsers {
val ident: Parser[Any] = """[a-zA-Z_]\w*""".r ^^ (s => Ident(s))
val number: Parser[Any] = """\d\d*""".r ^^ (s => Number(s.toInt))
diff --git a/src/library/scala/util/parsing/combinator/testing/Tester.scala b/src/library/scala/util/parsing/combinator/testing/Tester.scala
index 8b96a2c..95730ee 100644
--- a/src/library/scala/util/parsing/combinator/testing/Tester.scala
+++ b/src/library/scala/util/parsing/combinator/testing/Tester.scala
@@ -1,44 +1,42 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2006-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2006-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-
package scala.util.parsing.combinator.testing
import scala.util.parsing.combinator._
import scala.util.parsing.combinator.lexical.Lexical
import scala.util.parsing.combinator.syntactical.TokenParsers
-/** <p>
- * Facilitates testing a given parser on various input strings.
- * </p>
- * <p>
- * Example use:
- * </p><pre>
- * <b>val</b> syntactic = <b>new</b> MyParsers</pre>
- * <p>
- * and
- * </p><pre>
- * <b>val</b> parser = syntactic.term</pre>
- * <p>
- * (if MyParsers extends TokenParsers with a parser called `term')
- * </p>
+/** Facilitates testing a given parser on various input strings.
+ *
+ * Example use:
+ * {{{
+ * val syntactic = new MyParsers
+ * }}}
+ * and
+ * {{{
+ * val parser = syntactic.term
+ * }}}
+ * (If `MyParsers` extends [[scala.util.parsing.combinator.syntactical.TokenParsers]]
+ * with a parser called `term`.)
*
- * @author Martin Odersky, Adriaan Moors
+ * @author Martin Odersky
+ * @author Adriaan Moors
*/
+ at deprecated("This class will be removed", "2.10.0")
abstract class Tester {
val syntactic: TokenParsers { val lexical: Lexical }
val parser: syntactic.Parser[Any]
-
- /** Scans a String (using a `syntactic.lexical.Scanner'), parses it
- * using <code>phrase(parser)</code>, and prints the input and the
- * parsed result to the console.
+ /** Scans a String (using a `syntactic.lexical.Scanner`), parses it using
+ * `phrase(parser)`, and prints the input and the parsed result to the
+ * console.
*/
def test(in: String) {
Console.println("\nin : "+in)
diff --git a/src/library/scala/util/parsing/combinator/token/StdTokens.scala b/src/library/scala/util/parsing/combinator/token/StdTokens.scala
index a6bea0a..605f53b 100644
--- a/src/library/scala/util/parsing/combinator/token/StdTokens.scala
+++ b/src/library/scala/util/parsing/combinator/token/StdTokens.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2006-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2006-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -10,9 +10,10 @@ package scala.util.parsing
package combinator
package token
-/** This component provides the standard `Token's for a simple, Scala-like language.
+/** This component provides the standard `Token`s for a simple, Scala-like language.
*
- * @author Martin Odersky, Adriaan Moors
+ * @author Martin Odersky
+ * @author Adriaan Moors
*/
trait StdTokens extends Tokens {
/** The class of keyword tokens */
diff --git a/src/library/scala/util/parsing/combinator/token/Tokens.scala b/src/library/scala/util/parsing/combinator/token/Tokens.scala
index f5bbb95..ff92802 100644
--- a/src/library/scala/util/parsing/combinator/token/Tokens.scala
+++ b/src/library/scala/util/parsing/combinator/token/Tokens.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2006-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2006-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -10,14 +10,16 @@ package scala.util.parsing
package combinator
package token
-/** This component provides the notion of `Token', the unit of information that is passed from lexical
- * parsers in the `Lexical' component to the parsers in the `TokenParsers' component.
+/** This component provides the notion of `Token`, the unit of information that is passed from lexical
+ * parsers in the `Lexical` component to the parsers in the `TokenParsers` component.
*
- * @author Martin Odersky, Adriaan Moors
+ * @author Martin Odersky
+ * @author Adriaan Moors
*/
trait Tokens {
- /** Objects of this type are produced by a lexical parser or ``scanner'', and consumed by a parser
- * {@see scala.util.parsing.combinator.syntactical.TokenParsers}.
+ /** Objects of this type are produced by a lexical parser or ``scanner'', and consumed by a parser.
+ *
+ * @see [[scala.util.parsing.combinator.syntactical.TokenParsers]]
*/
abstract class Token {
def chars: String
@@ -35,6 +37,6 @@ trait Tokens {
def chars = "<eof>"
}
- /** This token is produced by a scanner {@see Scanner} when scanning failed. */
+ /** This token is produced by a scanner `Scanner` when scanning failed. */
def errorToken(msg: String): Token = new ErrorToken(msg)
}
diff --git a/src/library/scala/util/parsing/input/CharArrayReader.scala b/src/library/scala/util/parsing/input/CharArrayReader.scala
index 6e11435..3ba69b2 100644
--- a/src/library/scala/util/parsing/input/CharArrayReader.scala
+++ b/src/library/scala/util/parsing/input/CharArrayReader.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2006-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2006-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -9,9 +9,10 @@
package scala.util.parsing.input
-/** An object encapsulating basic character constants
+/** An object encapsulating basic character constants.
*
- * @author Martin Odersky, Adriaan Moors
+ * @author Martin Odersky
+ * @author Adriaan Moors
*/
object CharArrayReader {
final val EofCh = '\032'
@@ -20,15 +21,13 @@ object CharArrayReader {
/** A character array reader reads a stream of characters (keeping track of their positions)
* from an array.
*
- * @param source an array of characters
- * @param index starting offset into the array; the first element returned will be `source(index)'
- * @param line the line number of the first element (counting from index `0' of `source')
- * @param column the column number of the first element (counting from index `0' of `source')
+ * @param chars an array of characters
+ * @param index starting offset into the array; the first element returned will be `source(index)`
*
- * @author Martin Odersky, Adriaan Moors
+ * @author Martin Odersky
+ * @author Adriaan Moors
*/
-class CharArrayReader(chars: Array[Char], index: Int)
-extends CharSequenceReader(chars, index) {
+class CharArrayReader(chars: Array[Char], index: Int) extends CharSequenceReader(chars, index) {
def this(chars: Array[Char]) = this(chars, 0)
diff --git a/src/library/scala/util/parsing/input/CharSequenceReader.scala b/src/library/scala/util/parsing/input/CharSequenceReader.scala
index a3d5f5f..02aa2ab 100644
--- a/src/library/scala/util/parsing/input/CharSequenceReader.scala
+++ b/src/library/scala/util/parsing/input/CharSequenceReader.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2006-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2006-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/library/scala/util/parsing/input/NoPosition.scala b/src/library/scala/util/parsing/input/NoPosition.scala
index 1748501..40584b3 100644
--- a/src/library/scala/util/parsing/input/NoPosition.scala
+++ b/src/library/scala/util/parsing/input/NoPosition.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2006-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2006-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -10,9 +10,10 @@
package scala.util.parsing.input
-/** Undefined position
+/** Undefined position.
*
- * @author Martin Odersky, Adriaan Moors
+ * @author Martin Odersky
+ * @author Adriaan Moors
*/
object NoPosition extends Position {
def line = 0
diff --git a/src/library/scala/util/parsing/input/OffsetPosition.scala b/src/library/scala/util/parsing/input/OffsetPosition.scala
index 864d897..01d9ea5 100644
--- a/src/library/scala/util/parsing/input/OffsetPosition.scala
+++ b/src/library/scala/util/parsing/input/OffsetPosition.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2006-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2006-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -8,19 +8,19 @@
package scala.util.parsing.input
-import collection.mutable.ArrayBuffer
+import scala.collection.mutable.ArrayBuffer
-/** <p>
- * <code>OffsetPosition</code> is a standard class for positions
- * represented as offsets into a source ``document''.
- * @param source The source document
- * @param offset The offset indicating the position
+/** `OffsetPosition` is a standard class for positions
+ * represented as offsets into a source ``document''.
+ *
+ * @param source The source document
+ * @param offset The offset indicating the position
*
* @author Martin Odersky
*/
case class OffsetPosition(source: java.lang.CharSequence, offset: Int) extends Position {
- /** An index that contains all line starts, including first line, and eof */
+ /** An index that contains all line starts, including first line, and eof. */
private lazy val index: Array[Int] = {
var lineStarts = new ArrayBuffer[Int]
lineStarts += 0
@@ -30,7 +30,7 @@ case class OffsetPosition(source: java.lang.CharSequence, offset: Int) extends P
lineStarts.toArray
}
- /** The line number referred to by the position; line numbers start at 1 */
+ /** The line number referred to by the position; line numbers start at 1. */
def line: Int = {
var lo = 0
var hi = index.length - 1
@@ -42,26 +42,25 @@ case class OffsetPosition(source: java.lang.CharSequence, offset: Int) extends P
lo + 1
}
- /** The column number referred to by the position; column numbers start at 1 */
+ /** The column number referred to by the position; column numbers start at 1. */
def column: Int = offset - index(line - 1) + 1
- /** The contents of the line numbered `lnum' (must not contain a new-line character).
+ /** The contents of the line numbered at the current offset.
*
- * @param lnum a 1-based integer index into the `document'
- * @return the line at `lnum' (not including a newline)
+ * @return the line at `offset` (not including a newline)
*/
def lineContents: String =
source.subSequence(index(line - 1), index(line)).toString
- /** Returns a string representation of the `Position', of the form `line.column' */
+ /** Returns a string representation of the `Position`, of the form `line.column`. */
override def toString = line+"."+column
/** Compare this position to another, by first comparing their line numbers,
* and then -- if necessary -- using the columns to break a tie.
*
- * @param `that' a `Position' to compare to this `Position'
- * @return true if this position's line or (in case of a tie wrt. line numbers)
- * its column is smaller than the corresponding components of `that'
+ * @param that a `Position` to compare to this `Position`
+ * @return true if this position's line number or (in case of equal line numbers)
+ * column is smaller than the corresponding components of `that`
*/
override def <(that: Position) = that match {
case OffsetPosition(_, that_offset) =>
diff --git a/src/library/scala/util/parsing/input/PagedSeqReader.scala b/src/library/scala/util/parsing/input/PagedSeqReader.scala
index 134cf0a..9140bf2 100644
--- a/src/library/scala/util/parsing/input/PagedSeqReader.scala
+++ b/src/library/scala/util/parsing/input/PagedSeqReader.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2006-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2006-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -23,7 +23,7 @@ object PagedSeqReader {
/** A character array reader reads a stream of characters (keeping track of their positions)
* from an array.
*
- * @param source the source sequence
+ * @param seq the source sequence
* @param offset starting offset.
*
* @author Martin Odersky
diff --git a/src/library/scala/util/parsing/input/Position.scala b/src/library/scala/util/parsing/input/Position.scala
index bb5180c..31715bd 100644
--- a/src/library/scala/util/parsing/input/Position.scala
+++ b/src/library/scala/util/parsing/input/Position.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2006-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2006-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -8,59 +8,51 @@
package scala.util.parsing.input
-/** <p>
- * <code>Position</code> is the base trait for objects describing a
- * position in a ``document''.
- * </p>
- * <p>
- * It provides functionality for:
- * </p><ul>
- * <li> generating a visual representation of this position (`longString');
- * <li> comparing two positions (`<').
- * </ul>
- * <p>
- * To use this class for a concrete kind of ``document'', implement the
- * <code>lineContents</code> method.
- * </p>
+/** `Position` is the base trait for objects describing a position in a ``document''.
*
- * @author Martin Odersky, Adriaan Moors
+ * It provides functionality for:
+ * - generating a visual representation of this position (`longString`);
+ * - comparing two positions (`<`).
+ *
+ * To use this class for a concrete kind of ``document'', implement the `lineContents` method.
+ *
+ * @author Martin Odersky
+ * @author Adriaan Moors
*/
trait Position {
- /** The line number referred to by the position; line numbers start at 1 */
+ /** The line number referred to by the position; line numbers start at 1. */
def line: Int
- /** The column number referred to by the position; column numbers start at 1 */
+ /** The column number referred to by the position; column numbers start at 1. */
def column: Int
- /** The contents of the line numbered `lnum' (must not contain a new-line character).
- *
- * @param lnum a 1-based integer index into the `document'
- * @return the line at `lnum' (not including a newline)
+ /** The contents of the line at this position. (must not contain a new-line character).
*/
protected def lineContents: String
- /** Returns a string representation of the `Position', of the form `line.column' */
+ /** Returns a string representation of the `Position`, of the form `line.column`. */
override def toString = ""+line+"."+column
/** Returns a more ``visual'' representation of this position.
- * More precisely, the resulting string consists of two lines: <ol>
- * <li> the line in the document referred to by this position </li>
- * <li>a caret indicating the column</li></ol>
+ * More precisely, the resulting string consists of two lines:
+ * 1. the line in the document referred to by this position
+ * 2. a caret indicating the column
*
* Example:
- *
- *<pre> List(this, is, a, line, from, the, document)
- * ^</pre>
+ * {{{
+ * List(this, is, a, line, from, the, document)
+ * ^
+ * }}}
*/
def longString = lineContents+"\n"+lineContents.take(column-1).map{x => if (x == '\t') x else ' ' } + "^"
/** Compare this position to another, by first comparing their line numbers,
* and then -- if necessary -- using the columns to break a tie.
*
- * @param `that' a `Position' to compare to this `Position'
- * @return true if this position's line or (in case of a tie wrt. line numbers)
- * its column is smaller than the corresponding components of `that'
+ * @param `that` a `Position` to compare to this `Position`
+ * @return true if this position's line number or (in case of equal line numbers)
+ * column is smaller than the corresponding components of `that`
*/
def <(that: Position) = {
this.line < that.line ||
diff --git a/src/library/scala/util/parsing/input/Positional.scala b/src/library/scala/util/parsing/input/Positional.scala
index 5824b5a..87cb16e 100644
--- a/src/library/scala/util/parsing/input/Positional.scala
+++ b/src/library/scala/util/parsing/input/Positional.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2006-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2006-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -17,7 +17,7 @@ trait Positional {
/** The source position of this object, initially set to undefined. */
var pos: Position = NoPosition
- /** If current source position is undefined, update it with given position `newpos'
+ /** If current source position is undefined, update it with given position `newpos`
* @return the object itself
*/
def setPos(newpos: Position): this.type = {
diff --git a/src/library/scala/util/parsing/input/Reader.scala b/src/library/scala/util/parsing/input/Reader.scala
index d0fddd2..bded57b 100644
--- a/src/library/scala/util/parsing/input/Reader.scala
+++ b/src/library/scala/util/parsing/input/Reader.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2006-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2006-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -13,12 +13,15 @@ package scala.util.parsing.input
/** An interface for streams of values that have positions.
*
- * @author Martin Odersky, Adriaan Moors
+ * @author Martin Odersky
+ * @author Adriaan Moors
*/
abstract class Reader[+T] {
- /** If this is a reader over character sequences, the underlying char sequence
- * If not, throws a <code>NoSuchMethodError</code> exception.
+ /** If this is a reader over character sequences, the underlying char sequence.
+ * If not, throws a `NoSuchMethodError` exception.
+ *
+ * @throws [[java.lang.NoSuchMethodError]] if this not a char sequence reader.
*/
def source: java.lang.CharSequence =
throw new NoSuchMethodError("not a char sequence reader")
@@ -32,14 +35,12 @@ abstract class Reader[+T] {
/** Returns an abstract reader consisting of all elements except the first
*
- * @return If <code>atEnd</code> is <code>true</code>, the result will be
- * <code>this'; otherwise, it's a <code>Reader</code> containing
- * more elements.
+ * @return If `atEnd` is `true`, the result will be `this';
+ * otherwise, it's a `Reader` containing more elements.
*/
def rest: Reader[T]
- /** Returns an abstract reader consisting of all elements except the first
- * <code>n</code> elements.
+ /** Returns an abstract reader consisting of all elements except the first `n` elements.
*/
def drop(n: Int): Reader[T] = {
var r: Reader[T] = this
@@ -50,11 +51,11 @@ abstract class Reader[+T] {
r
}
- /** The position of the first element in the reader
+ /** The position of the first element in the reader.
*/
def pos: Position
- /** true iff there are no more elements in this reader
+ /** `true` iff there are no more elements in this reader.
*/
def atEnd: Boolean
}
diff --git a/src/library/scala/util/parsing/input/StreamReader.scala b/src/library/scala/util/parsing/input/StreamReader.scala
index 176e3af..ba7ab65 100644
--- a/src/library/scala/util/parsing/input/StreamReader.scala
+++ b/src/library/scala/util/parsing/input/StreamReader.scala
@@ -1,27 +1,28 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2006-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2006-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-
package scala.util.parsing.input
import java.io.BufferedReader
import scala.collection.immutable.PagedSeq
-/** An object to create a StreamReader from a <code>java.io.Reader</code>.
- *
- * @param in the <code>java.io.Reader</code> that provides the underlying
- * stream of characters for this Reader.
+/** An object to create a `StreamReader` from a `java.io.Reader`.
*
* @author Miles Sabin
*/
object StreamReader {
final val EofCh = '\032'
+ /** Create a `StreamReader` from a `java.io.Reader`.
+ *
+ * @param in the `java.io.Reader` that provides the underlying
+ * stream of characters for this Reader.
+ */
def apply(in: java.io.Reader): StreamReader = {
new StreamReader(PagedSeq.fromReader(in), 0, 1)
}
@@ -32,13 +33,13 @@ object StreamReader {
*
* NOTE:
* StreamReaders do not really fulfill the new contract for readers, which
- * requires a `source' CharSequence representing the full input.
+ * requires a `source` CharSequence representing the full input.
* Instead source is treated line by line.
* As a consequence, regex matching cannot extend beyond a single line
* when a StreamReader are used for input.
*
* If you need to match regexes spanning several lines you should consider
- * class <code>PagedSeqReader</code> instead.
+ * class `PagedSeqReader` instead.
*
* @author Miles Sabin
* @author Martin Odersky
diff --git a/src/library/scala/util/parsing/json/JSON.scala b/src/library/scala/util/parsing/json/JSON.scala
index 0369646..2f450ed 100644
--- a/src/library/scala/util/parsing/json/JSON.scala
+++ b/src/library/scala/util/parsing/json/JSON.scala
@@ -1,26 +1,24 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2006-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2006-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-
-
package scala.util.parsing.json
import scala.util.parsing.combinator._
import scala.util.parsing.combinator.syntactical._
import scala.util.parsing.combinator.lexical._
/**
- * This object provides a simple interface to the JSON parser class. The default conversion
- * for numerics is into a double. If you wish to override this behavior at the global level,
- * you can set the globalNumberParser property to your own (String => Any) function. If you only
- * want to override at the per-thread level then you can set the perThreadNumberParser property to your
- * function. For example:
- *
- * <pre>
+ * This object provides a simple interface to the JSON parser class.
+ * The default conversion for numerics is into a double. If you wish to
+ * override this behavior at the global level, you can set the
+ * `globalNumberParser` property to your own `(String => Any)` function.
+ * If you only want to override at the per-thread level then you can set
+ * the `perThreadNumberParser` property to your function. For example:
+ * {{{
* val myConversionFunc = {input : String => BigDecimal(input)}
*
* // Global override
@@ -28,28 +26,14 @@ import scala.util.parsing.combinator.lexical._
*
* // Per-thread override
* JSON.perThreadNumberParser = myConversionFunc
- * </pre>
+ * }}}
*
- * @author Derek Chen-Becker <"java"+ at +"chen-becker"+"."+"org">
+ * @author Derek Chen-Becker <"java"+ at +"chen-becker"+"."+"org">
*/
object JSON extends Parser {
/**
- * Parse the given JSON string and return a list of elements. If the
- * string is a JSON object it will be a list of pairs. If it's a JSON
- * array it will be be a list of individual elements.
- *
- * @param input the given JSON string.
- * @return an optional list of of elements.
- */
- @deprecated("Use parseFull or parseRaw as needed.", "2.8.0")
- def parse(input: String): Option[List[Any]] = parseRaw(input).map(unRaw).flatMap({
- case l : List[_] => Some(l)
- case _ => None
- })
-
- /**
- * This method converts "raw" results back into the original, deprecated
+ * This method converts ''raw'' results back into the original, deprecated
* form.
*/
private def unRaw (in : Any) : Any = in match {
@@ -59,12 +43,12 @@ object JSON extends Parser {
}
/**
- * Parse the given JSON string and return a list of elements. If the
- * string is a JSON object it will be a JSONObject. If it's a JSON
- * array it will be be a JSONArray.
+ * Parse the given `JSON` string and return a list of elements. If the
+ * string is a `JSON` object it will be a `JSONObject`. If it's a `JSON`
+ * array it will be a `JSONArray`.
*
- * @param input the given JSON string.
- * @return an optional JSONType element.
+ * @param input the given `JSON` string.
+ * @return an optional `JSONType` element.
*/
def parseRaw(input : String) : Option[JSONType] =
phrase(root)(new lexical.Scanner(input)) match {
@@ -73,11 +57,11 @@ object JSON extends Parser {
}
/**
- * Parse the given JSON string and return either a <code>List[Any]</code>
- * if the JSON string specifies an <code>Array</code>, or a
- * <code>Map[String,Any]</code> if the JSON string specifies an object.
+ * Parse the given `JSON` string and return either a `List[Any]`
+ * if the `JSON` string specifies an `Array`, or a
+ * `Map[String,Any]` if the `JSON` string specifies an object.
*
- * @param input the given JSON string.
+ * @param input the given `JSON` string.
* @return an optional list or map.
*/
def parseFull(input: String): Option[Any] =
@@ -87,8 +71,8 @@ object JSON extends Parser {
}
/**
- * A utility method to resolve a parsed JSON list into objects or
- * arrays. See the parse method for details.
+ * A utility method to resolve a parsed `JSON` list into objects or
+ * arrays. See the `parse` method for details.
*/
def resolveType(input: Any): Any = input match {
case JSONObject(data) => data.transform {
@@ -105,8 +89,9 @@ object JSON extends Parser {
def globalNumberParser : NumericParser = defaultNumberParser
/**
- * Defines the function used to convert a numeric string literal into a numeric format on a per-thread
- * basis. Use globalNumberParser for a global override
+ * Defines the function used to convert a numeric string literal into a
+ * numeric format on a per-thread basis. Use `globalNumberParser` for a
+ * global override.
*/
def perThreadNumberParser_=(f : NumericParser) { numberParser.set(f) }
def perThreadNumberParser : NumericParser = numberParser.get()
diff --git a/src/library/scala/util/parsing/json/Lexer.scala b/src/library/scala/util/parsing/json/Lexer.scala
index 5f5968c..991b5d5 100644
--- a/src/library/scala/util/parsing/json/Lexer.scala
+++ b/src/library/scala/util/parsing/json/Lexer.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2006-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2006-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/library/scala/util/parsing/json/Parser.scala b/src/library/scala/util/parsing/json/Parser.scala
index 202e4da..cb87866 100644
--- a/src/library/scala/util/parsing/json/Parser.scala
+++ b/src/library/scala/util/parsing/json/Parser.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2006-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2006-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/library/scala/util/parsing/syntax/package.scala b/src/library/scala/util/parsing/syntax/package.scala
deleted file mode 100644
index 547136c..0000000
--- a/src/library/scala/util/parsing/syntax/package.scala
+++ /dev/null
@@ -1,21 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2006-2011, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-package scala.util.parsing
-
-import scala.util.parsing.combinator.token
-
-/** If deprecating the whole package worked, that's what would best
- * be done, but it doesn't (yet) so it isn't.
- */
-package object syntax {
- @deprecated("Moved to scala.util.parsing.combinator.token", "2.8.0")
- type Tokens = token.Tokens
- @deprecated("Moved to scala.util.parsing.combinator.token", "2.8.0")
- type StdTokens = token.StdTokens
-}
diff --git a/src/library/scala/util/regexp/Base.scala b/src/library/scala/util/regexp/Base.scala
index 1404e9f..7dbe60a 100644
--- a/src/library/scala/util/regexp/Base.scala
+++ b/src/library/scala/util/regexp/Base.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -15,8 +15,9 @@ package scala.util.regexp
* @author Burak Emir
* @version 1.0
*/
-abstract class Base
-{
+
+ at deprecated("This class will be removed", "2.10.0")
+abstract class Base {
type _regexpT <: RegExp
abstract class RegExp {
@@ -24,7 +25,7 @@ abstract class Base
}
object Alt {
- /** Alt( R,R,R* ) */
+ /** `Alt( R,R,R* )`. */
def apply(rs: _regexpT*) =
if (rs.size < 2) throw new SyntaxError("need at least 2 branches in Alt")
else new Alt(rs: _*)
@@ -57,7 +58,7 @@ abstract class Base
override def toString() = "Eps"
}
- /** this class can be used to add meta information to regexps */
+ /** this class can be used to add meta information to regexps. */
class Meta(r1: _regexpT) extends RegExp {
final val isNullable = r1.isNullable
def r = r1
diff --git a/src/library/scala/util/regexp/PointedHedgeExp.scala b/src/library/scala/util/regexp/PointedHedgeExp.scala
index ef68e60..5c0379b 100644
--- a/src/library/scala/util/regexp/PointedHedgeExp.scala
+++ b/src/library/scala/util/regexp/PointedHedgeExp.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -10,12 +10,12 @@
package scala.util.regexp
-/** pointed regular hedge expressions, a useful subclass of
- * regular hedge expressions.
+/** Pointed regular hedge expressions, a useful subclass of regular hedge expressions.
*
* @author Burak Emir
* @version 1.0
*/
+ at deprecated("This class will be removed", "2.10.0")
abstract class PointedHedgeExp extends Base {
type _regexpT <: RegExp
diff --git a/src/library/scala/util/regexp/SyntaxError.scala b/src/library/scala/util/regexp/SyntaxError.scala
index 0f5c2af..1788fdf 100644
--- a/src/library/scala/util/regexp/SyntaxError.scala
+++ b/src/library/scala/util/regexp/SyntaxError.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -16,4 +16,5 @@ package scala.util.regexp
* @author Burak Emir
* @version 1.0
*/
+ at deprecated("This class will be removed", "2.10.0")
class SyntaxError(e: String) extends RuntimeException(e)
diff --git a/src/library/scala/util/regexp/WordExp.scala b/src/library/scala/util/regexp/WordExp.scala
index 1168b1e..3c0c2ec 100644
--- a/src/library/scala/util/regexp/WordExp.scala
+++ b/src/library/scala/util/regexp/WordExp.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -10,32 +10,35 @@
package scala.util.regexp
-/** <p>
- * The class <code>WordExp</code> provides regular word expressions.
- * Users have to instantiate type member <code>_regexpT <: RegExp</code>
- * (from class <code>Base</code>) and a type member
- * <code>_labelT <: Label</code>. Here is a short example:
- * </p><pre>
- * <b>import</b> scala.util.regexp._
- * <b>import</b> scala.util.automata._
- * <b>object</b> MyLang <b>extends</b> WordExp {
- * <b>type</b> _regexpT = RegExp
- * <b>type</b> _labelT = MyChar
+/**
+ * The class `WordExp` provides regular word expressions.
*
- * <b>case class</b> MyChar(c:Char) <b>extends</b> Label
+ * Users have to instantiate type member `_regexpT <;: RegExp`
+ * (from class `Base`) and a type member `_labelT <;: Label`.
+ *
+ * Here is a short example:
+ * {{{
+ * import scala.util.regexp._
+ * import scala.util.automata._
+ * object MyLang extends WordExp {
+ * type _regexpT = RegExp
+ * type _labelT = MyChar
+ *
+ * case class MyChar(c:Char) extends Label
* }
- * <b>import</b> MyLang._
+ * import MyLang._
* // (a* | b)*
- * <b>val</b> rex = Star(Alt(Star(Letter(MyChar('a'))),Letter(MyChar('b'))))
- * <b>object</b> MyBerriSethi <b>extends</b> WordBerrySethi {
- * <b>override val</b> lang = MyLang
+ * val rex = Star(Alt(Star(Letter(MyChar('a'))),Letter(MyChar('b'))))
+ * object MyBerriSethi extends WordBerrySethi {
+ * override val lang = MyLang
* }
- * <b>val</b> nfa = MyBerriSethi.automatonFrom(Sequ(rex), 1)
- * </pre>
+ * val nfa = MyBerriSethi.automatonFrom(Sequ(rex), 1)
+ * }}}
*
* @author Burak Emir
* @version 1.0
*/
+ at deprecated("This class will be removed", "2.10.0")
abstract class WordExp extends Base {
abstract class Label
@@ -53,4 +56,3 @@ abstract class WordExp extends Base {
var pos = -1
}
}
-
diff --git a/src/library/scala/volatile.scala b/src/library/scala/volatile.scala
index 9b47a52..bea216e 100644
--- a/src/library/scala/volatile.scala
+++ b/src/library/scala/volatile.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -10,7 +10,7 @@
package scala
-import annotation.target._
+import scala.annotation.meta._
@field
-class volatile extends annotation.StaticAnnotation
+class volatile extends scala.annotation.StaticAnnotation
diff --git a/src/library/scala/xml/Atom.scala b/src/library/scala/xml/Atom.scala
index 61329af..cba0b96 100644
--- a/src/library/scala/xml/Atom.scala
+++ b/src/library/scala/xml/Atom.scala
@@ -1,30 +1,30 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-
package scala.xml
-/** The class <code>Atom</code> provides an XML node for text (PCDATA).
+/** The class `Atom` provides an XML node for text (`PCDATA`).
* It is used in both non-bound and bound XML representations.
*
* @author Burak Emir
- * @param text the text contained in this node, may not be <code>null</code>.
+ * @param data the text contained in this node, may not be `'''null'''`.
*/
-class Atom[+A](val data: A) extends SpecialNode with Serializable
-{
+class Atom[+A](val data: A) extends SpecialNode with Serializable {
if (data == null)
- throw new IllegalArgumentException("cannot construct Atom(null)")
+ throw new IllegalArgumentException("cannot construct "+getClass.getSimpleName+" with null")
+
+ override protected def basisForHashCode: Seq[Any] = Seq(data)
- override def basisForHashCode: Seq[Any] = Seq(data)
override def strict_==(other: Equality) = other match {
case x: Atom[_] => data == x.data
case _ => false
}
+
override def canEqual(other: Any) = other match {
case _: Atom[_] => true
case _ => false
@@ -37,13 +37,10 @@ class Atom[+A](val data: A) extends SpecialNode with Serializable
/** Returns text, with some characters escaped according to the XML
* specification.
- *
- * @param sb ...
- * @return ...
*/
- def buildString(sb: StringBuilder) =
- Utility.escape(data.toString(), sb)
+ def buildString(sb: StringBuilder): StringBuilder =
+ Utility.escape(data.toString, sb)
- override def text: String = data.toString()
+ override def text: String = data.toString
}
diff --git a/src/library/scala/xml/Attribute.scala b/src/library/scala/xml/Attribute.scala
index ec36640..0224913 100644
--- a/src/library/scala/xml/Attribute.scala
+++ b/src/library/scala/xml/Attribute.scala
@@ -1,22 +1,23 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-
package scala.xml
-/** Attribute defines the interface shared by both
- * PrefixedAttribute and UnprefixedAttribute
+/** This singleton object contains the `apply` and `unapply` methods for
+ * convenient construction and deconstruction.
+ *
+ * @author Burak Emir
+ * @version 1.0
*/
-
object Attribute {
def unapply(x: Attribute) = x match {
- case PrefixedAttribute(_, key, value, next) => Some(key, value, next)
- case UnprefixedAttribute(key, value, next) => Some(key, value, next)
+ case PrefixedAttribute(_, key, value, next) => Some((key, value, next))
+ case UnprefixedAttribute(key, value, next) => Some((key, value, next))
case _ => None
}
@@ -34,11 +35,17 @@ object Attribute {
def apply(pre: Option[String], key: String, value: Seq[Node], next: MetaData): Attribute =
pre match {
- case None => new UnprefixedAttribute(key, value, next)
- case Some(p) => new PrefixedAttribute(p, key, value, next)
+ case None => new UnprefixedAttribute(key, value, next)
+ case Some(p) => new PrefixedAttribute(p, key, value, next)
}
}
+/** The `Attribute` trait defines the interface shared by both
+ * [[scala.xml.PrefixedAttribute]] and [[scala.xml.UnprefixedAttribute]].
+ *
+ * @author Burak Emir
+ * @version 1.0
+ */
abstract trait Attribute extends MetaData {
def pre: String // will be null if unprefixed
val key: String
@@ -54,11 +61,13 @@ abstract trait Attribute extends MetaData {
else copy(next remove key)
def remove(namespace: String, scope: NamespaceBinding, key: String) =
- if (isPrefixed && this.key == key && (scope getURI pre) == namespace) next
- else next.remove(namespace, scope, key)
+ if (this.key == key && (scope getURI pre) == namespace) next
+ else copy(next.remove(namespace, scope, key))
def isPrefixed: Boolean = pre != null
+
def getNamespace(owner: Node): String
+
def wellformed(scope: NamespaceBinding): Boolean = {
val arg = if (isPrefixed) scope getURI pre else null
(next(arg, scope, key) == null) && (next wellformed scope)
@@ -77,7 +86,7 @@ abstract trait Attribute extends MetaData {
/** Appends string representation of only this attribute to stringbuffer.
*/
- def toString1(sb: StringBuilder) {
+ protected def toString1(sb: StringBuilder) {
if (value == null)
return
if (isPrefixed)
@@ -86,6 +95,6 @@ abstract trait Attribute extends MetaData {
sb append key append '='
val sb2 = new StringBuilder()
Utility.sequenceToXML(value, TopScope, sb2, true)
- Utility.appendQuoted(sb2.toString(), sb)
+ Utility.appendQuoted(sb2.toString, sb)
}
}
diff --git a/src/library/scala/xml/Comment.scala b/src/library/scala/xml/Comment.scala
index dc649eb..ff4280d 100644
--- a/src/library/scala/xml/Comment.scala
+++ b/src/library/scala/xml/Comment.scala
@@ -1,23 +1,20 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-
-
package scala.xml
-
-/** The class <code>Comment</code> implements an XML node for comments.
+/** The class `Comment` implements an XML node for comments.
*
* @author Burak Emir
- * @param text the text contained in this node, may not contain "--"
+ * @param commentText the text contained in this node, may not contain "--"
*/
-case class Comment(commentText: String) extends SpecialNode
-{
+case class Comment(commentText: String) extends SpecialNode {
+
def label = "#REM"
override def text = ""
final override def doCollectNamespaces = false
@@ -29,5 +26,5 @@ case class Comment(commentText: String) extends SpecialNode
/** Appends "<!-- text -->" to this string buffer.
*/
override def buildString(sb: StringBuilder) =
- sb append ("<!--" + commentText + "-->")
+ sb append "<!--" append commentText append "-->"
}
diff --git a/src/library/scala/xml/Document.scala b/src/library/scala/xml/Document.scala
index df8a926..a064c4d 100644
--- a/src/library/scala/xml/Document.scala
+++ b/src/library/scala/xml/Document.scala
@@ -1,19 +1,17 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-
-
package scala.xml
/** A document information item (according to InfoSet spec). The comments
* are copied from the Infoset spec, only augmented with some information
* on the Scala types for definitions that might have no value.
- * also plays the role of an <code>XMLEvent</code> for pull parsing
+ * Also plays the role of an `XMLEvent` for pull parsing.
*
* @author Burak Emir
* @version 1.0, 26/04/2005
@@ -62,14 +60,14 @@ class Document extends NodeSeq with pull.XMLEvent with Serializable {
/** An indication of the standalone status of the document, either
* true or false. This property is derived from the optional standalone
* document declaration in the XML declaration at the beginning of the
- * document entity, and has no value (<code>None</code>) if there is no
+ * document entity, and has no value (`None`) if there is no
* standalone document declaration.
*/
var standAlone: Option[Boolean] = _
/** A string representing the XML version of the document. This
* property is derived from the XML declaration optionally present at
- * the beginning of the document entity, and has no value (<code>None</code>)
+ * the beginning of the document entity, and has no value (`None`)
* if there is no XML declaration.
*/
var version: Option[String] = _
diff --git a/src/library/scala/xml/Elem.scala b/src/library/scala/xml/Elem.scala
old mode 100644
new mode 100755
index 4ea7e90..b9e665e
--- a/src/library/scala/xml/Elem.scala
+++ b/src/library/scala/xml/Elem.scala
@@ -1,25 +1,34 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-
package scala.xml
-/** This singleton object contains the apply and unapplySeq methods for convenient construction and
- * deconstruction. It is possible to deconstruct any Node instance (that is not a SpecialNode or
- * a Group) using the syntax
- * <code> case Elem(prefix, label, attribs, scope, child @ _*) => ... </code>
+/** This singleton object contains the `apply` and `unapplySeq` methods for
+ * convenient construction and deconstruction. It is possible to deconstruct
+ * any `Node` instance (that is not a `SpecialNode` or a `Group`) using the
+ * syntax `case Elem(prefix, label, attribs, scope, child @ _*) => ...`
*
- * Copyright 2008 Google Inc. All Rights Reserved.
- * @author Burak Emir <bqe at google.com>
+ * Copyright 2008 Google Inc. All Rights Reserved.
+ * @author Burak Emir <bqe at google.com>
*/
object Elem {
- def apply(prefix: String,label: String, attributes: MetaData, scope: NamespaceBinding, child: Node*) =
- new Elem(prefix,label,attributes,scope,child:_*)
+ /** Build an Elem, setting its minimizeEmpty property to <code>true</code> if it has no children. Note that this
+ * default may not be exactly what you want, as some XML dialects don't permit some elements to be minimized.
+ *
+ * @deprecated This factory method is retained for backward compatibility; please use the other one, with which you
+ * can specify your own preference for minimizeEmpty.
+ */
+ @deprecated("Use the other apply method in this object", "2.10.0")
+ def apply(prefix: String, label: String, attributes: MetaData, scope: NamespaceBinding, child: Node*): Elem =
+ apply(prefix, label, attributes, scope, child.isEmpty, child: _*)
+
+ def apply(prefix: String, label: String, attributes: MetaData, scope: NamespaceBinding, minimizeEmpty: Boolean, child: Node*): Elem =
+ new Elem(prefix, label, attributes, scope, minimizeEmpty, child: _*)
def unapplySeq(n: Node) = n match {
case _: SpecialNode | _: Group => None
@@ -27,43 +36,55 @@ object Elem {
}
}
-/** The case class <code>Elem</code> extends the <code>Node</code> class,
+/** The case class `Elem` extends the `Node` class,
* providing an immutable data object representing an XML element.
*
- * @param prefix namespace prefix (may be null, but not the empty string)
- * @param label the element name
- * @param attribute the attribute map
- * @param scope the scope containing the namespace bindings
- * @param child the children of this node
+ * @param prefix namespace prefix (may be null, but not the empty string)
+ * @param label the element name
+ * @param attributes1 the attribute map
+ * @param scope the scope containing the namespace bindings
+ * @param minimizeEmpty `true` if this element should be serialized as minimized (i.e. "<el/>") when
+ * empty; `false` if it should be written out in long form.
+ * @param child the children of this node
*
- * Copyright 2008 Google Inc. All Rights Reserved.
- * @author Burak Emir <bqe at google.com>
+ * Copyright 2008 Google Inc. All Rights Reserved.
+ * @author Burak Emir <bqe at google.com>
*/
class Elem(
override val prefix: String,
val label: String,
- override val attributes: MetaData,
+ attributes1: MetaData,
override val scope: NamespaceBinding,
+ val minimizeEmpty: Boolean,
val child: Node*)
extends Node with Serializable
{
+ @deprecated("This constructor is retained for backward compatibility. Please use the primary constructor, which lets you specify your own preference for `minimizeEmpty`.", "2.10.0")
+ def this(prefix: String, label: String, attributes: MetaData, scope: NamespaceBinding, child: Node*) = {
+ this(prefix, label, attributes, scope, child.isEmpty, child: _*)
+ }
+
final override def doCollectNamespaces = true
final override def doTransform = true
+ override val attributes = MetaData.normalize(attributes1, scope)
+
if (prefix == "")
throw new IllegalArgumentException("prefix of zero length, use null instead")
if (scope == null)
- throw new IllegalArgumentException("scope is null, use xml.TopScope for empty scope")
+ throw new IllegalArgumentException("scope is null, use scala.xml.TopScope for empty scope")
//@todo: copy the children,
// setting namespace scope if necessary
// cleaning adjacent text nodes if necessary
- override def basisForHashCode: Seq[Any] = prefix :: label :: attributes :: child.toList
+ override protected def basisForHashCode: Seq[Any] =
+ prefix :: label :: attributes :: child.toList
- /** Returns a new element with updated attributes, resolving namespace uris from this element's scope.
- * See MetaData.update for details.
+ /** Returns a new element with updated attributes, resolving namespace uris
+ * from this element's scope. See MetaData.update for details.
+ *
* @param updates MetaData with new and updated attributes
* @return a new symbol with updated attributes
*/
@@ -80,11 +101,11 @@ extends Node with Serializable
label: String = this.label,
attributes: MetaData = this.attributes,
scope: NamespaceBinding = this.scope,
+ minimizeEmpty: Boolean = this.minimizeEmpty,
child: Seq[Node] = this.child.toSeq
- ): Elem = Elem(prefix, label, attributes, scope, child: _*)
+ ): Elem = Elem(prefix, label, attributes, scope, minimizeEmpty, child: _*)
- /** Returns concatenation of <code>text(n)</code> for each child
- * <code>n</code>.
+ /** Returns concatenation of `text(n)` for each child `n`.
*/
- override def text = child map (_.text) mkString
+ override def text = (child map (_.text)).mkString
}
diff --git a/src/library/scala/xml/EntityRef.scala b/src/library/scala/xml/EntityRef.scala
index d3f0a3f..a7b9835 100644
--- a/src/library/scala/xml/EntityRef.scala
+++ b/src/library/scala/xml/EntityRef.scala
@@ -1,23 +1,18 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-
-
package scala.xml
-
-
-/** The class <code>EntityRef</code> implements an XML node for entity
- * references.
+/** The class `EntityRef` implements an XML node for entity references.
*
* @author Burak Emir
* @version 1.0
- * @param text the text contained in this node.
+ * @param entityName the name of the entity reference, for example `amp`.
*/
case class EntityRef(entityName: String) extends SpecialNode {
final override def doCollectNamespaces = false
@@ -33,10 +28,10 @@ case class EntityRef(entityName: String) extends SpecialNode {
case _ => Utility.sbToString(buildString)
}
- /** Appends "& entityName;" to this string buffer.
+ /** Appends `"& entityName;"` to this string buffer.
*
* @param sb the string buffer.
- * @return the modified string buffer <code>sb</code>.
+ * @return the modified string buffer `sb`.
*/
override def buildString(sb: StringBuilder) =
sb.append("&").append(entityName).append(";")
diff --git a/src/library/scala/xml/Equality.scala b/src/library/scala/xml/Equality.scala
index 1e7cd48..02db22a 100644
--- a/src/library/scala/xml/Equality.scala
+++ b/src/library/scala/xml/Equality.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -8,26 +8,23 @@
package scala.xml
-/** In an attempt to contain the damage being inflicted on
- * consistency by the ad hoc equals methods spread around
- * xml, the logic is centralized and all the xml classes
- * go through the xml.Equality trait. There are two forms
- * of xml comparison.
+/** In an attempt to contain the damage being inflicted on consistency by the
+ * ad hoc `equals` methods spread around `xml`, the logic is centralized and
+ * all the `xml` classes go through the `xml.Equality trait`. There are two
+ * forms of `xml` comparison.
*
- * 1) def strict_==(other: xml.Equality)
+ * 1. `'''def''' strict_==(other: scala.xml.Equality)`
*
- * This one tries to honor the little things like symmetry
- * and hashCode contracts. The equals method routes all
- * comparisons through this.
+ * This one tries to honor the little things like symmetry and hashCode
+ * contracts. The `equals` method routes all comparisons through this.
*
- * 2) xml_==(other: Any)
+ * 1. `xml_==(other: Any)`
*
- * This one picks up where strict_== leaves off. It might
- * declare any two things equal.
+ * This one picks up where `strict_==` leaves off. It might declare any two
+ * things equal.
*
- * As things stood, the logic not only made a mockery of
- * the collections equals contract, but also laid waste to
- * that of case classes.
+ * As things stood, the logic not only made a mockery of the collections
+ * equals contract, but also laid waste to that of case classes.
*
* Among the obstacles to sanity are/were:
*
@@ -68,13 +65,13 @@ object Equality {
}
import Equality._
-private[xml]
trait Equality extends scala.Equals {
- def basisForHashCode: Seq[Any]
+ protected def basisForHashCode: Seq[Any]
+
def strict_==(other: Equality): Boolean
def strict_!=(other: Equality) = !strict_==(other)
- /** We insist we're only equal to other xml.Equality implementors,
+ /** We insist we're only equal to other `xml.Equality` implementors,
* which heads off a lot of inconsistency up front.
*/
override def canEqual(other: Any): Boolean = other match {
@@ -107,4 +104,3 @@ trait Equality extends scala.Equals {
strictlyEqual || (blithe && compareBlithely(this, asRef(other)))
}
}
-
diff --git a/src/library/scala/xml/Group.scala b/src/library/scala/xml/Group.scala
index 3018e05..92da2f9 100644
--- a/src/library/scala/xml/Group.scala
+++ b/src/library/scala/xml/Group.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-
package scala.xml
/** A hack to group XML nodes in one node for output.
@@ -21,11 +20,13 @@ final case class Group(val nodes: Seq[Node]) extends Node {
case x: Group => true
case _ => false
}
+
override def strict_==(other: Equality) = other match {
case Group(xs) => nodes sameElements xs
case _ => false
}
- override def basisForHashCode = nodes
+
+ override protected def basisForHashCode = nodes
/** Since Group is very much a hack it throws an exception if you
* try to do anything with it.
diff --git a/src/library/scala/xml/HasKeyValue.scala b/src/library/scala/xml/HasKeyValue.scala
deleted file mode 100644
index 29c3f58..0000000
--- a/src/library/scala/xml/HasKeyValue.scala
+++ /dev/null
@@ -1,25 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-package scala.xml
-
-/** <p>
- * Use this class to match on (unprefixed) attribute values
- * <p><pre>
- * <b>val</b> hasName = <b>new</b> HasKeyValue("name")
- * node <b>match</b> {
- * <b>case</b> Node("foo", hasName(x), _*) => x // foo had attribute with key "name" and with value x
- * }</pre>
- *
- * @author Burak Emir
- */
- at deprecated("Use UnprefixedAttribute's extractor", "2.8.0")
-class HasKeyValue(key: String) {
- def unapplySeq(x: MetaData): Option[Seq[Node]] = x.get(key)
-}
diff --git a/src/library/scala/xml/MalformedAttributeException.scala b/src/library/scala/xml/MalformedAttributeException.scala
index 212b352..3431cb6 100644
--- a/src/library/scala/xml/MalformedAttributeException.scala
+++ b/src/library/scala/xml/MalformedAttributeException.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/library/scala/xml/MetaData.scala b/src/library/scala/xml/MetaData.scala
index 8043508..3bf3ebb 100644
--- a/src/library/scala/xml/MetaData.scala
+++ b/src/library/scala/xml/MetaData.scala
@@ -1,32 +1,32 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-
package scala.xml
import Utility.sbToString
-import annotation.tailrec
-import scala.collection.Iterator
+import scala.annotation.tailrec
+import scala.collection.{ AbstractIterable, Iterator }
/**
* Copyright 2008 Google Inc. All Rights Reserved.
* @author Burak Emir <bqe at google.com>
*/
object MetaData {
-
/**
- * appends all attributes from new_tail to attribs, without attempting to detect
- * or remove duplicates. The method guarantees that all attributes from attribs come before
- * the attributes in new_tail, but does not guarantee to preserve the relative order of attribs.
- * Duplicates can be removed with normalize.
+ * appends all attributes from new_tail to attribs, without attempting to
+ * detect or remove duplicates. The method guarantees that all attributes
+ * from attribs come before the attributes in new_tail, but does not
+ * guarantee to preserve the relative order of attribs.
+ *
+ * Duplicates can be removed with `normalize`.
*/
- @tailrec
- def concatenate(attribs: MetaData, new_tail: MetaData): MetaData =
+ @tailrec // temporarily marked final so it will compile under -Xexperimental
+ final def concatenate(attribs: MetaData, new_tail: MetaData): MetaData =
if (attribs eq Null) new_tail
else concatenate(attribs.next, attribs copy new_tail)
@@ -38,8 +38,8 @@ object MetaData {
def iterate(md: MetaData, normalized_attribs: MetaData, set: Set[String]): MetaData = {
lazy val key = getUniversalKey(md, scope)
if (md eq Null) normalized_attribs
- else if (set(key)) iterate(md.next, normalized_attribs, set)
- else iterate(md.next, md copy normalized_attribs, set + key)
+ else if ((md.value eq null) || set(key)) iterate(md.next, normalized_attribs, set)
+ else md copy iterate(md.next, normalized_attribs, set + key)
}
iterate(attribs, Null, Set())
}
@@ -60,18 +60,24 @@ object MetaData {
}
-/** <p>
- * This class represents an attribute and at the same time a linked list of attributes.
- * Every instance of this class is either an instance of UnprefixedAttribute <code>key,value</code>
- * or an instance of PrefixedAttribute <code>namespace_prefix,key,value</code> or Null, the empty
- * attribute list. Namespace URIs are obtained by using the namespace scope of the element owning
- * this attribute (see <code>getNamespace</code>)
- * </p>
+/** This class represents an attribute and at the same time a linked list of
+ * attributes. Every instance of this class is either
+ * - an instance of `UnprefixedAttribute key,value` or
+ * - an instance of `PrefixedAttribute namespace_prefix,key,value` or
+ * - `Null, the empty attribute list.
*
- * Copyright 2008 Google Inc. All Rights Reserved.
- * @author Burak Emir <bqe at google.com>
+ * Namespace URIs are obtained by using the namespace scope of the element
+ * owning this attribute (see `getNamespace`).
+ *
+ * Copyright 2008 Google Inc. All Rights Reserved.
+ * @author Burak Emir <bqe at google.com>
*/
-abstract class MetaData extends Iterable[MetaData] with Equality with Serializable {
+abstract class MetaData
+extends AbstractIterable[MetaData]
+ with Iterable[MetaData]
+ with Equality
+ with Serializable {
+
/** Updates this MetaData with the MetaData given as argument. All attributes that occur in updates
* are part of the resulting MetaData. If an attribute occurs in both this instance and
* updates, only the one in updates is part of the result (avoiding duplicates). For prefixed
@@ -91,12 +97,11 @@ abstract class MetaData extends Iterable[MetaData] with Equality with Serializab
*/
def apply(key: String): Seq[Node]
- /** convenience method, same as <code>apply(namespace, owner.scope, key)</code>.
+ /** convenience method, same as `apply(namespace, owner.scope, key)`.
*
* @param namespace_uri namespace uri of key
* @param owner the element owning this attribute list
* @param key the attribute key
- * @return ...
*/
final def apply(namespace_uri: String, owner: Node, key: String): Seq[Node] =
apply(namespace_uri, owner.scope, key)
@@ -106,15 +111,12 @@ abstract class MetaData extends Iterable[MetaData] with Equality with Serializab
*
* @param namespace_uri namespace uri of key
* @param scp a namespace scp (usually of the element owning this attribute list)
- * @param key to be looked fore
+ * @param k to be looked for
* @return value as Seq[Node] if key is found, null otherwise
*/
- def apply(namespace_uri:String, scp:NamespaceBinding, k:String): Seq[Node]
+ def apply(namespace_uri: String, scp: NamespaceBinding, k: String): Seq[Node]
/** returns a copy of this MetaData item with next field set to argument.
- *
- * @param next ...
- * @return ...
*/
def copy(next: MetaData): MetaData
@@ -137,7 +139,7 @@ abstract class MetaData extends Iterable[MetaData] with Equality with Serializab
case m: MetaData => this.asAttrMap == m.asAttrMap
case _ => false
}
- def basisForHashCode: Seq[Any] = List(this.asAttrMap)
+ protected def basisForHashCode: Seq[Any] = List(this.asAttrMap)
/** filters this sequence of meta data */
override def filter(f: MetaData => Boolean): MetaData =
@@ -161,7 +163,7 @@ abstract class MetaData extends Iterable[MetaData] with Equality with Serializab
/** Returns a Map containing the attributes stored as key/value pairs.
*/
def asAttrMap: Map[String, String] =
- iterator map (x => (x.prefixedKey, x.value.text)) toMap
+ (iterator map (x => (x.prefixedKey, x.value.text))).toMap
/** returns Null or the next MetaData item */
def next: MetaData
@@ -188,45 +190,27 @@ abstract class MetaData extends Iterable[MetaData] with Equality with Serializab
final def get(uri: String, scope: NamespaceBinding, key: String): Option[Seq[Node]] =
Option(apply(uri, scope, key))
- def toString1(): String = sbToString(toString1)
+ protected def toString1(): String = sbToString(toString1)
// appends string representations of single attribute to StringBuilder
- def toString1(sb: StringBuilder): Unit
+ protected def toString1(sb: StringBuilder): Unit
override def toString(): String = sbToString(buildString)
def buildString(sb: StringBuilder): StringBuilder = {
- sb.append(' ')
+ sb append ' '
toString1(sb)
- next.buildString(sb)
+ next buildString sb
}
/**
- * @param scope ...
- * @return <code>true</code> iff ...
*/
def wellformed(scope: NamespaceBinding): Boolean
- /**
- * @param key ...
- * @return ...
- */
def remove(key: String): MetaData
- /**
- * @param namespace ...
- * @param scope ...
- * @param key ...
- * @return ...
- */
def remove(namespace: String, scope: NamespaceBinding, key: String): MetaData
- /**
- * @param namespace ...
- * @param owner ...
- * @param key ...
- * @return ...
- */
final def remove(namespace: String, owner: Node, key: String): MetaData =
remove(namespace, owner.scope, key)
}
diff --git a/src/library/scala/xml/NamespaceBinding.scala b/src/library/scala/xml/NamespaceBinding.scala
index 7ad2b42..c7cd9e6 100644
--- a/src/library/scala/xml/NamespaceBinding.scala
+++ b/src/library/scala/xml/NamespaceBinding.scala
@@ -1,19 +1,16 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-
-
package scala.xml
import Utility.sbToString
-
-/** The class <code>NamespaceBinding</code> represents namespace bindings
+/** The class `NamespaceBinding` represents namespace bindings
* and scopes. The binding for the default namespace is treated as a null
* prefix. the absent namespace is represented with the null uri. Neither
* prefix nor uri may be empty, which is not checked.
@@ -40,18 +37,22 @@ case class NamespaceBinding(prefix: String, uri: String, parent: NamespaceBindin
if (_uri == uri) prefix else parent getPrefix _uri
override def toString(): String = sbToString(buildString(_, TopScope))
+
override def canEqual(other: Any) = other match {
case _: NamespaceBinding => true
case _ => false
}
+
override def strict_==(other: Equality) = other match {
case x: NamespaceBinding => (prefix == x.prefix) && (uri == x.uri) && (parent == x.parent)
case _ => false
}
+
def basisForHashCode: Seq[Any] = List(prefix, uri, parent)
def buildString(stop: NamespaceBinding): String = sbToString(buildString(_, stop))
- def buildString(sb: StringBuilder, stop: NamespaceBinding): Unit = {
+
+ def buildString(sb: StringBuilder, stop: NamespaceBinding) {
if (this eq stop) return // contains?
val s = " xmlns%s=\"%s\"".format(
diff --git a/src/library/scala/xml/Node.scala b/src/library/scala/xml/Node.scala
old mode 100644
new mode 100755
index bf4d81e..6b6c962
--- a/src/library/scala/xml/Node.scala
+++ b/src/library/scala/xml/Node.scala
@@ -1,19 +1,18 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-
package scala.xml
-/**
- * This object provides methods ...
+/** This singleton object contains the `unapplySeq` method for
+ * convenient deconstruction.
*
- * @author Burak Emir
- * @version 1.0
+ * @author Burak Emir
+ * @version 1.0
*/
object Node {
/** the constant empty attribute sequence */
@@ -61,8 +60,8 @@ abstract class Node extends NodeSeq {
def namespace = getNamespace(this.prefix)
/**
- * Convenience method, same as <code>scope.getURI(pre)</code> but additionally
- * checks if scope is <code>null</code>.
+ * Convenience method, same as `scope.getURI(pre)` but additionally
+ * checks if scope is `'''null'''`.
*
* @param pre the prefix whose namespace name we would like to obtain
* @return the namespace if <code>scope != null</code> and prefix was
@@ -72,7 +71,7 @@ abstract class Node extends NodeSeq {
/**
* Convenience method, looks up an unprefixed attribute in attributes of this node.
- * Same as <code>attributes.getValue(key)</code>
+ * Same as `attributes.getValue(key)`
*
* @param key of queried attribute.
* @return value of <code>UnprefixedAttribute</code> with given key
@@ -82,20 +81,20 @@ abstract class Node extends NodeSeq {
/**
* Convenience method, looks up a prefixed attribute in attributes of this node.
- * Same as <code>attributes.getValue(uri, this, key)</code>
+ * Same as `attributes.getValue(uri, this, key)`-
*
* @param uri namespace of queried attribute (may not be null).
* @param key of queried attribute.
- * @return value of <code>PrefixedAttribute</code> with given namespace
- * and given key, otherwise <code>null</code>.
+ * @return value of `PrefixedAttribute` with given namespace
+ * and given key, otherwise `'''null'''`.
*/
final def attribute(uri: String, key: String): Option[Seq[Node]] =
attributes.get(uri, this, key)
/**
* Returns attribute meaning all attributes of this node, prefixed and
- * unprefixed, in no particular order. In class <code>Node</code>, this
- * defaults to <code>Null</code> (the empty attribute list).
+ * unprefixed, in no particular order. In class `Node`, this
+ * defaults to `Null` (the empty attribute list).
*
* @return all attributes of this node
*/
@@ -130,7 +129,10 @@ abstract class Node extends NodeSeq {
case x: Node => true
case _ => false
}
- override def basisForHashCode: Seq[Any] = prefix :: label :: attributes :: nonEmptyChildren.toList
+
+ override protected def basisForHashCode: Seq[Any] =
+ prefix :: label :: attributes :: nonEmptyChildren.toList
+
override def strict_==(other: Equality) = other match {
case _: Group => false
case x: Node =>
@@ -153,35 +155,29 @@ abstract class Node extends NodeSeq {
/**
* String representation of this node
*
- * @param stripComment if true, strips comment nodes from result
- * @return ...
+ * @param stripComments if true, strips comment nodes from result
*/
def buildString(stripComments: Boolean): String =
- Utility.toXML(this, stripComments = stripComments).toString
+ Utility.serialize(this, stripComments = stripComments).toString
/**
- * Same as <code>toString(false)</code>.
- *
- * @see <code><a href="#toString">toString(Boolean)</a></code>
+ * Same as `toString('''false''')`.
*/
override def toString(): String = buildString(false)
/**
- * Appends qualified name of this node to <code>StringBuilder</code>.
- *
- * @param sb ...
- * @return ...
+ * Appends qualified name of this node to `StringBuilder`.
*/
def nameToString(sb: StringBuilder): StringBuilder = {
if (null != prefix) {
- sb.append(prefix)
- sb.append(':')
+ sb append prefix
+ sb append ':'
}
- sb.append(label)
+ sb append label
}
/**
- * Returns a type symbol (e.g. DTD, XSD), default <code>null</code>.
+ * Returns a type symbol (e.g. DTD, XSD), default `'''null'''`.
*/
def xmlType(): TypeSymbol = null
diff --git a/src/library/scala/xml/NodeBuffer.scala b/src/library/scala/xml/NodeBuffer.scala
index 2495cb0..2db4338 100644
--- a/src/library/scala/xml/NodeBuffer.scala
+++ b/src/library/scala/xml/NodeBuffer.scala
@@ -1,25 +1,20 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-
package scala.xml
/**
- * <p>
- * This class acts as a Buffer for nodes. If it is used as a sequence
- * of nodes <code>Seq[Node]</code>, it must be ensured that no updates
- * occur after that point, because <code>scala.xml.Node</code> is assumed
- * to be immutable.
- * </p>
- * <p>
- * Despite this being a sequence, don't use it as key in a hashtable.
- * Calling the hashcode function will result in a runtime error.
- * </p>
+ * This class acts as a Buffer for nodes. If it is used as a sequence of
+ * nodes `Seq[Node]`, it must be ensured that no updates occur after that
+ * point, because `scala.xml.Node` is assumed to be immutable.
+ *
+ * Despite this being a sequence, don't use it as key in a hashtable.
+ * Calling the hashcode function will result in a runtime error.
*
* @author Burak Emir
* @version 1.0
@@ -27,10 +22,12 @@ package scala.xml
class NodeBuffer extends scala.collection.mutable.ArrayBuffer[Node] {
/**
- * Append given object to this buffer, returns reference on this NodeBuffer
- * for convenience. Some rules apply: If o is null, it is ignored. If it is
- * an Iterator or Iterable, its elements will be added. If o is a node, it is
- * added as it is. If it is anything else, it gets wrapped in an Atom.
+ * Append given object to this buffer, returns reference on this
+ * `NodeBuffer` for convenience. Some rules apply:
+ * - If argument `o` is `'''null'''`, it is ignored.
+ * - If it is an `Iterator` or `Iterable`, its elements will be added.
+ * - If `o` is a node, it is added as it is.
+ * - If it is anything else, it gets wrapped in an [[scala.xml.Atom]].
*
* @param o converts to an xml node and adds to this node buffer
* @return this nodebuffer
diff --git a/src/library/scala/xml/NodeSeq.scala b/src/library/scala/xml/NodeSeq.scala
index 1524ab8..decf60d 100644
--- a/src/library/scala/xml/NodeSeq.scala
+++ b/src/library/scala/xml/NodeSeq.scala
@@ -1,18 +1,17 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-
-
package scala.xml
-import collection.{ mutable, immutable, generic, SeqLike }
+import scala.collection.{ mutable, immutable, generic, SeqLike, AbstractSeq }
import mutable.{ Builder, ListBuffer }
import generic.{ CanBuildFrom }
+import scala.language.implicitConversions
/** This object ...
*
@@ -34,13 +33,13 @@ object NodeSeq {
implicit def seqToNodeSeq(s: Seq[Node]): NodeSeq = fromSeq(s)
}
-/** This class implements a wrapper around <code>Seq[Node]</code> that
- * adds XPath and comprehension methods.
+/** This class implements a wrapper around `Seq[Node]` that adds XPath
+ * and comprehension methods.
*
* @author Burak Emir
* @version 1.0
*/
-abstract class NodeSeq extends immutable.Seq[Node] with SeqLike[Node, NodeSeq] with Equality {
+abstract class NodeSeq extends AbstractSeq[Node] with immutable.Seq[Node] with SeqLike[Node, NodeSeq] with Equality {
import NodeSeq.seqToNodeSeq // import view magic for NodeSeq wrappers
/** Creates a list buffer as builder for this class */
@@ -62,32 +61,33 @@ abstract class NodeSeq extends immutable.Seq[Node] with SeqLike[Node, NodeSeq] w
!these.hasNext && !those.hasNext
}
- def basisForHashCode: Seq[Any] = theSeq
+
+ protected def basisForHashCode: Seq[Any] = theSeq
+
override def canEqual(other: Any) = other match {
case _: NodeSeq => true
case _ => false
}
+
override def strict_==(other: Equality) = other match {
case x: NodeSeq => (length == x.length) && (theSeq sameElements x.theSeq)
case _ => false
}
- /** Projection function, which returns elements of `this` sequence based on the string `that`. Use:
+ /** Projection function, which returns elements of `this` sequence based
+ * on the string `that`. Use:
* - `this \ "foo"` to get a list of all elements that are labelled with `"foo"`;
* - `\ "_"` to get a list of all elements (wildcard);
* - `ns \ "@foo"` to get the unprefixed attribute `"foo"`;
- * - `ns \ "@{uri}foo"` to get the prefixed attribute `"pre:foo"` whose prefix `"pre"` is resolved to the
- * namespace `"uri"`.
+ * - `ns \ "@{uri}foo"` to get the prefixed attribute `"pre:foo"` whose
+ * prefix `"pre"` is resolved to the namespace `"uri"`.
*
- * For attribute projections, the resulting [[scala.xml.NodeSeq]] attribute values are wrapped in a
- * [[scala.xml.Group]].
+ * For attribute projections, the resulting [[scala.xml.NodeSeq]] attribute
+ * values are wrapped in a [[scala.xml.Group]].
*
* There is no support for searching a prefixed attribute by its literal prefix.
*
* The document order is preserved.
- *
- * @param that ...
- * @return ...
*/
def \(that: String): NodeSeq = {
def fail = throw new IllegalArgumentException(that)
@@ -121,23 +121,20 @@ abstract class NodeSeq extends immutable.Seq[Node] with SeqLike[Node, NodeSeq] w
}
}
- /** Projection function, which returns elements of `this` sequence and of all its subsequences, based on
- * the string `that`. Use:
+ /** Projection function, which returns elements of `this` sequence and of
+ * all its subsequences, based on the string `that`. Use:
* - `this \\ 'foo` to get a list of all elements that are labelled with `"foo"`;
* - `\\ "_"` to get a list of all elements (wildcard);
* - `ns \\ "@foo"` to get the unprefixed attribute `"foo"`;
- * - `ns \\ "@{uri}foo"` to get each prefixed attribute `"pre:foo"` whose prefix `"pre"` is resolved to the
- * namespace `"uri"`.
+ * - `ns \\ "@{uri}foo"` to get each prefixed attribute `"pre:foo"` whose
+ * prefix `"pre"` is resolved to the namespace `"uri"`.
*
- * For attribute projections, the resulting [[scala.xml.NodeSeq]] attribute values are wrapped in a
- * [[scala.xml.Group]].
+ * For attribute projections, the resulting [[scala.xml.NodeSeq]] attribute
+ * values are wrapped in a [[scala.xml.Group]].
*
* There is no support for searching a prefixed attribute by its literal prefix.
*
* The document order is preserved.
- *
- * @param that ...
- * @return ...
*/
def \\ (that: String): NodeSeq = {
def filt(cond: (Node) => Boolean) = this flatMap (_.descendant_or_self) filter cond
@@ -149,5 +146,6 @@ abstract class NodeSeq extends immutable.Seq[Node] with SeqLike[Node, NodeSeq] w
}
override def toString(): String = theSeq.mkString
- def text: String = this map (_.text) mkString
+
+ def text: String = (this map (_.text)).mkString
}
diff --git a/src/library/scala/xml/Null.scala b/src/library/scala/xml/Null.scala
index 3a75aea..b39ef5d 100644
--- a/src/library/scala/xml/Null.scala
+++ b/src/library/scala/xml/Null.scala
@@ -1,21 +1,22 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-
-
package scala.xml
-import Utility.{ isNameStart }
+import Utility.isNameStart
import scala.collection.Iterator
/** Essentially, every method in here is a dummy, returning Zero[T].
* It provides a backstop for the unusual collection defined by MetaData,
* sort of a linked list of tails.
+ *
+ * @author Burak Emir
+ * @version 1.0
*/
case object Null extends MetaData {
override def iterator = Iterator.empty
@@ -39,21 +40,20 @@ case object Null extends MetaData {
case x: MetaData => x.length == 0
case _ => false
}
- override def basisForHashCode: Seq[Any] = Nil
+ override protected def basisForHashCode: Seq[Any] = Nil
def apply(namespace: String, scope: NamespaceBinding, key: String) = null
- def apply(key: String) = {
- if (!isNameStart(key.head))
- throw new IllegalArgumentException("not a valid attribute name '"+key+"', so can never match !")
+ def apply(key: String) =
+ if (isNameStart(key.head)) null
+ else throw new IllegalArgumentException("not a valid attribute name '"+key+"', so can never match !")
- null
- }
+ protected def toString1(sb: StringBuilder) = ()
+ override protected def toString1(): String = ""
- def toString1(sb: StringBuilder) = ()
- override def toString1(): String = ""
override def toString(): String = ""
override def buildString(sb: StringBuilder): StringBuilder = sb
+
override def wellformed(scope: NamespaceBinding) = true
def remove(key: String) = this
diff --git a/src/library/scala/xml/PCData.scala b/src/library/scala/xml/PCData.scala
index 330ad89..64818a9 100644
--- a/src/library/scala/xml/PCData.scala
+++ b/src/library/scala/xml/PCData.scala
@@ -1,20 +1,43 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
package scala.xml
-/** This class (which is not used by all XML parsers, but always used by the XHTML one)
- * represents parseable character data, which appeared as CDATA sections in the input
- * and is to be preserved as CDATA section in the output.
+/** This class (which is not used by all XML parsers, but always used by the
+ * XHTML one) represents parseable character data, which appeared as CDATA
+ * sections in the input and is to be preserved as CDATA section in the output.
+ *
+ * @author Burak Emir
+ * @version 1.0
*/
-case class PCData(_data: String) extends Atom[String](_data) {
- if (null == data)
- throw new IllegalArgumentException("tried to construct PCData with null")
+class PCData(data: String) extends Atom[String](data) {
/** Returns text, with some characters escaped according to the XML
* specification.
*
- * @param sb ...
- * @return ...
+ * @param sb the input string buffer associated to some XML element
+ * @return the input string buffer with the formatted CDATA section
*/
- override def buildString(sb: StringBuilder) =
+ override def buildString(sb: StringBuilder): StringBuilder =
sb append "<![CDATA[%s]]>".format(data)
}
+
+/** This singleton object contains the `apply`and `unapply` methods for
+ * convenient construction and deconstruction.
+ *
+ * @author Burak Emir
+ * @version 1.0
+ */
+object PCData {
+ def apply(data: String) = new PCData(data)
+ def unapply(other: Any): Option[String] = other match {
+ case x: PCData => Some(x.data)
+ case _ => None
+ }
+}
+
diff --git a/src/library/scala/xml/PrefixedAttribute.scala b/src/library/scala/xml/PrefixedAttribute.scala
index 01a3e0c..429cd68 100644
--- a/src/library/scala/xml/PrefixedAttribute.scala
+++ b/src/library/scala/xml/PrefixedAttribute.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -11,24 +11,27 @@ package scala.xml
/** prefixed attributes always have a non-null namespace.
*
- * @param pre ...
- * @param key ...
- * @param value the attribute value, which may not be null
- * @param next ...
+ * @param pre
+ * @param key
+ * @param value the attribute value
+ * @param next1
*/
class PrefixedAttribute(
val pre: String,
val key: String,
val value: Seq[Node],
- val next: MetaData)
+ val next1: MetaData)
extends Attribute
{
- if (value eq null)
- throw new UnsupportedOperationException("value is null")
+ val next = if (value ne null) next1 else next1.remove(key)
- /** same as this(key, Utility.parseAttributeValue(value), next) */
+ /** same as this(pre, key, Text(value), next), or no attribute if value is null */
def this(pre: String, key: String, value: String, next: MetaData) =
- this(pre, key, Text(value), next)
+ this(pre, key, if (value ne null) Text(value) else null: NodeSeq, next)
+
+ /** same as this(pre, key, value.get, next), or no attribute if value is None */
+ def this(pre: String, key: String, value: Option[Seq[Node]], next: MetaData) =
+ this(pre, key, value.orNull, next)
/** Returns a copy of this unprefixed attribute with the given
* next field.
@@ -53,5 +56,5 @@ extends Attribute
}
object PrefixedAttribute {
- def unapply(x: PrefixedAttribute) = Some(x.pre, x.key, x.value, x.next)
+ def unapply(x: PrefixedAttribute) = Some((x.pre, x.key, x.value, x.next))
}
diff --git a/src/library/scala/xml/PrettyPrinter.scala b/src/library/scala/xml/PrettyPrinter.scala
old mode 100644
new mode 100755
index 89742f9..39ff8c3
--- a/src/library/scala/xml/PrettyPrinter.scala
+++ b/src/library/scala/xml/PrettyPrinter.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-
package scala.xml
import Utility.sbToString
@@ -43,17 +42,13 @@ class PrettyPrinter(width: Int, step: Int) {
}
/** Try to cut at whitespace.
- *
- * @param s ...
- * @param ind ...
- * @return ...
*/
protected def cut(s: String, ind: Int): List[Item] = {
val tmp = width - cur
if (s.length <= tmp)
return List(Box(ind, s))
val sb = new StringBuilder()
- var i = s.indexOf(' ')
+ var i = s indexOf ' '
if (i > tmp || i == -1) throw new BrokenException() // cannot break
var last: List[Int] = Nil
@@ -75,10 +70,6 @@ class PrettyPrinter(width: Int, step: Int) {
}
/** Try to make indented box, if possible, else para.
- *
- * @param ind ...
- * @param s ...
- * @return ...
*/
protected def makeBox(ind: Int, s: String) =
if (cur + s.length > width) { // fits in this line
@@ -100,10 +91,6 @@ class PrettyPrinter(width: Int, step: Int) {
cur = 0
}
- /**
- * @param n ...
- * @return ...
- */
protected def leafTag(n: Node) = {
def mkLeaf(sb: StringBuilder) {
sb append '<'
@@ -150,21 +137,20 @@ class PrettyPrinter(width: Int, step: Int) {
private def doPreserve(node: Node) =
node.attribute(XML.namespace, XML.space).map(_.toString == XML.preserve) getOrElse false
- /** @param tail: what we'd like to sqeeze in */
protected def traverse(node: Node, pscope: NamespaceBinding, ind: Int): Unit = node match {
case Text(s) if s.trim() == "" =>
;
case _:Atom[_] | _:Comment | _:EntityRef | _:ProcInstr =>
- makeBox( ind, node.toString().trim() )
+ makeBox( ind, node.toString.trim() )
case g @ Group(xs) =>
traverse(xs.iterator, pscope, ind)
case _ =>
val test = {
val sb = new StringBuilder()
- Utility.toXML(node, pscope, sb, false)
+ Utility.serialize(node, pscope, sb, false)
if (doPreserve(node)) sb.toString
- else TextBuffer.fromString(sb.toString()).toText(0).data
+ else TextBuffer.fromString(sb.toString).toText(0).data
}
if (childrenAreLeaves(node) && fits(test)) {
makeBox(ind, test)
@@ -211,13 +197,13 @@ class PrettyPrinter(width: Int, step: Int) {
* given namespace to prefix mapping to the given string buffer.
*
* @param n the node to be serialized
- * @param pmap the namespace to prefix mapping
* @param sb the stringbuffer to append to
*/
- def format(n: Node, sb: StringBuilder ): Unit = // entry point
+ def format(n: Node, sb: StringBuilder) { // entry point
format(n, null, sb)
+ }
- def format(n: Node, pscope: NamespaceBinding, sb: StringBuilder): Unit = { // entry point
+ def format(n: Node, pscope: NamespaceBinding, sb: StringBuilder) { // entry point
var lastwasbreak = false
reset()
traverse(n, pscope, 0)
@@ -227,21 +213,21 @@ class PrettyPrinter(width: Int, step: Int) {
if (!lastwasbreak) sb.append('\n') // on windows: \r\n ?
lastwasbreak = true
cur = 0
-// while( cur < last ) {
-// sb.append(' ');
-// cur = cur + 1;
+// while (cur < last) {
+// sb append ' '
+// cur += 1
// }
case Box(i, s) =>
lastwasbreak = false
while (cur < i) {
- sb.append(' ')
+ sb append ' '
cur += 1
}
sb.append(s)
case Para( s ) =>
lastwasbreak = false
- sb.append(s)
+ sb append s
}
}
@@ -250,9 +236,9 @@ class PrettyPrinter(width: Int, step: Int) {
/** Returns a formatted string containing well-formed XML with
* given namespace to prefix mapping.
*
- * @param n the node to be serialized
- * @param pmap the namespace to prefix mapping
- * @return ...
+ * @param n the node to be serialized
+ * @param pscope the namespace to prefix mapping
+ * @return the formatted string
*/
def format(n: Node, pscope: NamespaceBinding = null): String =
sbToString(format(n, pscope, _))
diff --git a/src/library/scala/xml/ProcInstr.scala b/src/library/scala/xml/ProcInstr.scala
index b3f4ba9..64a9dd5 100644
--- a/src/library/scala/xml/ProcInstr.scala
+++ b/src/library/scala/xml/ProcInstr.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -12,8 +12,8 @@ package scala.xml
/** an XML node for processing instructions (PI)
*
* @author Burak Emir
- * @param target target name of this PI
- * @param text text contained in this node, may not contain "?>"
+ * @param target target name of this PI
+ * @param proctext text contained in this node, may not contain "?>"
*/
case class ProcInstr(target: String, proctext: String) extends SpecialNode
{
diff --git a/src/library/scala/xml/QNode.scala b/src/library/scala/xml/QNode.scala
index 52fed9a..d4d3872 100644
--- a/src/library/scala/xml/QNode.scala
+++ b/src/library/scala/xml/QNode.scala
@@ -1,20 +1,18 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-
-
package scala.xml
-/**
- * This object provides an extractor method to match a qualified node with its namespace URI
+/** This object provides an extractor method to match a qualified node with
+ * its namespace URI
*
- * @author Burak Emir
- * @version 1.0
+ * @author Burak Emir
+ * @version 1.0
*/
object QNode {
def unapplySeq(n: Node) = Some((n.scope.getURI(n.prefix), n.label, n.attributes, n.child))
diff --git a/src/library/scala/xml/SpecialNode.scala b/src/library/scala/xml/SpecialNode.scala
index d69dbac..4c1b81c 100644
--- a/src/library/scala/xml/SpecialNode.scala
+++ b/src/library/scala/xml/SpecialNode.scala
@@ -1,26 +1,23 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-
package scala.xml
-/** <p>
- * <code>SpecialNode</code> is a special XML node which
- * represents either text (PCDATA), a comment, a PI, or an entity ref.
- * </p>
- * <p>
- * SpecialNodes also play the role of XMLEvents for pull-parsing.
- * </p>
+/** `SpecialNode` is a special XML node which represents either text
+ * `(PCDATA)`, a comment, a `PI`, or an entity ref.
+ *
+ * `SpecialNode`s also play the role of [[scala.xml.pull.XMLEvent]]s for
+ * pull-parsing.
*
* @author Burak Emir
*/
-abstract class SpecialNode extends Node with pull.XMLEvent
-{
+abstract class SpecialNode extends Node with pull.XMLEvent {
+
/** always empty */
final override def attributes = Null
@@ -30,6 +27,6 @@ abstract class SpecialNode extends Node with pull.XMLEvent
/** always empty */
final def child = Nil
- /** append string representation to the given stringbuffer */
+ /** Append string representation to the given string buffer argument. */
def buildString(sb: StringBuilder): StringBuilder
}
diff --git a/src/library/scala/xml/Text.scala b/src/library/scala/xml/Text.scala
index 734cbe3..782c80f 100644
--- a/src/library/scala/xml/Text.scala
+++ b/src/library/scala/xml/Text.scala
@@ -1,49 +1,38 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-
package scala.xml
-// XXX This attempt to make Text not a case class revealed a bug in the pattern
-// matcher (see ticket #2883) so I've put the case back. (It was/is desirable that
-// it not be a case class because it is using the antipattern of passing constructor
-// parameters to the superclass where they become vals, but since they will also be
-// vals in the subclass, it acquires an underscore to avoid a name clash.)
-//
-// object Text {
-// def apply(data: String) =
-// if (data != null) new Text(data)
-// else throw new IllegalArgumentException("tried to construct Text with null")
-//
-// def unapply(other: Any): Option[String] = other match {
-// case x: Text => Some(x.data)
-// case _ => None
-// }
-// }
-
-/** The class <code>Text</code> implements an XML node for text (PCDATA).
+/** The class `Text` implements an XML node for text (PCDATA).
* It is used in both non-bound and bound XML representations.
*
* @author Burak Emir
- *
- * @param text the text contained in this node, may not be null.
+ * @param data the text contained in this node, may not be null.
*/
-case class Text(_data: String) extends Atom[String](_data)
-{
- if (_data == null)
- throw new IllegalArgumentException("tried to construct Text with null")
+class Text(data: String) extends Atom[String](data) {
/** Returns text, with some characters escaped according to the XML
* specification.
- *
- * @param sb ...
- * @return ...
*/
- override def buildString(sb: StringBuilder) =
+ override def buildString(sb: StringBuilder): StringBuilder =
Utility.escape(data, sb)
}
+
+/** This singleton object contains the `apply`and `unapply` methods for
+ * convenient construction and deconstruction.
+ *
+ * @author Burak Emir
+ * @version 1.0
+ */
+object Text {
+ def apply(data: String) = new Text(data)
+ def unapply(other: Any): Option[String] = other match {
+ case x: Text => Some(x.data)
+ case _ => None
+ }
+}
diff --git a/src/library/scala/xml/TextBuffer.scala b/src/library/scala/xml/TextBuffer.scala
index 817cf03..0b96379 100644
--- a/src/library/scala/xml/TextBuffer.scala
+++ b/src/library/scala/xml/TextBuffer.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -15,19 +15,16 @@ object TextBuffer {
def fromString(str: String): TextBuffer = new TextBuffer() append str
}
-/** The class <code>TextBuffer</code> is for creating text nodes without
- * surplus whitespace. All occurrences of one or more whitespace in strings
- * appended with the <code>append</code> method will be replaced by a single
- * space character, and leading and trailing space will be removed completely.
+/** The class `TextBuffer` is for creating text nodes without surplus
+ * whitespace. All occurrences of one or more whitespace in strings
+ * appended with the `append` method will be replaced by a single space
+ * character, and leading and trailing space will be removed completely.
*/
class TextBuffer
{
val sb = new StringBuilder()
/** Appends this string to the text buffer, trimming whitespaces as needed.
- *
- * @param cs ...
- * @return ...
*/
def append(cs: Seq[Char]): this.type = {
cs foreach { c =>
diff --git a/src/library/scala/xml/TopScope.scala b/src/library/scala/xml/TopScope.scala
index 3356101..1ed1d50 100644
--- a/src/library/scala/xml/TopScope.scala
+++ b/src/library/scala/xml/TopScope.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -13,8 +13,8 @@ package scala.xml
* for the "xml" prefix which is bound to
* "http://www.w3.org/XML/1998/namespace"
*/
-object TopScope extends NamespaceBinding(null, null, null)
-{
+object TopScope extends NamespaceBinding(null, null, null) {
+
import XML.{ xml, namespace }
override def getURI(prefix1: String): String =
@@ -24,6 +24,7 @@ object TopScope extends NamespaceBinding(null, null, null)
if (uri1 == namespace) xml else null
override def toString() = ""
+
override def buildString(stop: NamespaceBinding) = ""
override def buildString(sb: StringBuilder, ignore: NamespaceBinding) = {}
}
diff --git a/src/library/scala/xml/TypeSymbol.scala b/src/library/scala/xml/TypeSymbol.scala
index 774e602..f02c026 100644
--- a/src/library/scala/xml/TypeSymbol.scala
+++ b/src/library/scala/xml/TypeSymbol.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/library/scala/xml/Unparsed.scala b/src/library/scala/xml/Unparsed.scala
index 6186d2b..ef80823 100644
--- a/src/library/scala/xml/Unparsed.scala
+++ b/src/library/scala/xml/Unparsed.scala
@@ -1,30 +1,34 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-
-
package scala.xml
/** An XML node for unparsed content. It will be output verbatim, all bets
* are off regarding wellformedness etc.
*
- * @author Burak Emir
- * @param data content in this node, may not be null.
+ * @author Burak Emir
+ * @param data content in this node, may not be null.
*/
-class Unparsed(data: String) extends Atom[String](data)
-{
- if (null == data)
- throw new IllegalArgumentException("tried to construct Unparsed with null")
+class Unparsed(data: String) extends Atom[String](data) {
- /** returns text, with some characters escaped according to XML spec */
- override def buildString(sb: StringBuilder) = sb append data
+ /** Returns text, with some characters escaped according to XML
+ * specification.
+ */
+ override def buildString(sb: StringBuilder): StringBuilder =
+ sb append data
}
+/** This singleton object contains the `apply`and `unapply` methods for
+ * convenient construction and deconstruction.
+ *
+ * @author Burak Emir
+ * @version 1.0
+ */
object Unparsed {
def apply(data: String) = new Unparsed(data)
def unapply(x: Unparsed) = Some(x.data)
diff --git a/src/library/scala/xml/UnprefixedAttribute.scala b/src/library/scala/xml/UnprefixedAttribute.scala
index bf4f99c..2985591 100644
--- a/src/library/scala/xml/UnprefixedAttribute.scala
+++ b/src/library/scala/xml/UnprefixedAttribute.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -22,7 +22,7 @@ extends Attribute
final val pre = null
val next = if (value ne null) next1 else next1.remove(key)
- /** same as this(key, Text(value), next) */
+ /** same as this(key, Text(value), next), or no attribute if value is null */
def this(key: String, value: String, next: MetaData) =
this(key, if (value ne null) Text(value) else null: NodeSeq, next)
@@ -56,5 +56,5 @@ extends Attribute
next(namespace, scope, key)
}
object UnprefixedAttribute {
- def unapply(x: UnprefixedAttribute) = Some(x.key, x.value, x.next)
+ def unapply(x: UnprefixedAttribute) = Some((x.key, x.value, x.next))
}
diff --git a/src/library/scala/xml/Utility.scala b/src/library/scala/xml/Utility.scala
old mode 100644
new mode 100755
index 3f1c5fb..030a897
--- a/src/library/scala/xml/Utility.scala
+++ b/src/library/scala/xml/Utility.scala
@@ -1,29 +1,28 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-
-
package scala.xml
-import collection.mutable
-import mutable.{ Set, HashSet }
+import scala.collection.mutable
import parsing.XhtmlEntities
+import scala.language.implicitConversions
/**
- * The <code>Utility</code> object provides utility functions for processing
- * instances of bound and not bound XML classes, as well as escaping text nodes.
+ * The `Utility` object provides utility functions for processing instances
+ * of bound and not bound XML classes, as well as escaping text nodes.
*
* @author Burak Emir
*/
-object Utility extends AnyRef with parsing.TokenTests
-{
+object Utility extends AnyRef with parsing.TokenTests {
final val SU = '\u001A'
+ // [Martin] This looks dubious. We don't convert StringBuilders to
+ // Strings anywhere else, why do it here?
implicit def implicitSbToString(sb: StringBuilder) = sb.toString()
// helper for the extremely oft-repeated sequence of creating a
@@ -35,22 +34,21 @@ object Utility extends AnyRef with parsing.TokenTests
}
private[xml] def isAtomAndNotText(x: Node) = x.isAtom && !x.isInstanceOf[Text]
- /** trims an element - call this method, when you know that it is an
+ /** Trims an element - call this method, when you know that it is an
* element (and not a text node) so you know that it will not be trimmed
- * away. With this assumption, the function can return a <code>Node</code>,
- * rather than a <code>Seq[Node]</code>. If you don't know, call
- * <code>trimProper</code> and account for the fact that you may get back
- * an empty sequence of nodes.
+ * away. With this assumption, the function can return a `Node`, rather
+ * than a `Seq[Node]`. If you don't know, call `trimProper` and account
+ * for the fact that you may get back an empty sequence of nodes.
*
- * precondition: node is not a text node (it might be trimmed)
+ * Precondition: node is not a text node (it might be trimmed)
*/
def trim(x: Node): Node = x match {
case Elem(pre, lab, md, scp, child at _*) =>
Elem(pre, lab, md, scp, (child flatMap trimProper):_*)
}
- /** trim a child of an element. <code>Attribute</code> values and
- * <code>Atom</code> nodes that are not <code>Text</code> nodes are unaffected.
+ /** trim a child of an element. `Attribute` values and `Atom` nodes that
+ * are not `Text` nodes are unaffected.
*/
def trimProper(x:Node): Seq[Node] = x match {
case Elem(pre,lab,md,scp,child at _*) =>
@@ -60,26 +58,25 @@ object Utility extends AnyRef with parsing.TokenTests
case _ =>
x
}
+
/** returns a sorted attribute list */
def sort(md: MetaData): MetaData = if((md eq Null) || (md.next eq Null)) md else {
val key = md.key
val smaller = sort(md.filter { m => m.key < key })
val greater = sort(md.filter { m => m.key > key })
- smaller.append( Null ).append(md.copy ( greater ))
+ smaller.foldRight (md copy greater) ((x, xs) => x copy xs)
}
- /** returns the node with its attribute list sorted alphabetically (prefixes are ignored) */
+ /** Return the node with its attribute list sorted alphabetically
+ * (prefixes are ignored) */
def sort(n:Node): Node = n match {
- case Elem(pre,lab,md,scp,child at _*) =>
- Elem(pre,lab,sort(md),scp, (child map sort):_*)
- case _ => n
+ case Elem(pre,lab,md,scp,child at _*) =>
+ Elem(pre,lab,sort(md),scp, (child map sort):_*)
+ case _ => n
}
/**
* Escapes the characters < > & and " from string.
- *
- * @param text ...
- * @return ...
*/
final def escape(text: String): String = sbToString(escape(text, _))
@@ -101,11 +98,7 @@ object Utility extends AnyRef with parsing.TokenTests
import Escapes.{ escMap, unescMap }
/**
- * Appends escaped string to <code>s</code>.
- *
- * @param text ...
- * @param s ...
- * @return ...
+ * Appends escaped string to `s`.
*/
final def escape(text: String, s: StringBuilder): StringBuilder = {
// Implemented per XML spec:
@@ -132,32 +125,23 @@ object Utility extends AnyRef with parsing.TokenTests
}
/**
- * Appends unescaped string to <code>s</code>, amp becomes &
- * lt becomes < etc..
+ * Appends unescaped string to `s`, `amp` becomes `&`,
+ * `lt` becomes `<` etc..
*
- * @param ref ...
- * @param s ...
- * @return <code>null</code> if <code>ref</code> was not a predefined
- * entity.
+ * @return `'''null'''` if `ref` was not a predefined entity.
*/
final def unescape(ref: String, s: StringBuilder): StringBuilder =
- (unescMap get ref) map (s append _) orNull
+ ((unescMap get ref) map (s append _)).orNull
/**
* Returns a set of all namespaces used in a sequence of nodes
* and all their descendants, including the empty namespaces.
- *
- * @param nodes ...
- * @return ...
*/
def collectNamespaces(nodes: Seq[Node]): mutable.Set[String] =
- nodes.foldLeft(new HashSet[String]) { (set, x) => collectNamespaces(x, set) ; set }
+ nodes.foldLeft(new mutable.HashSet[String]) { (set, x) => collectNamespaces(x, set) ; set }
/**
* Adds all namespaces in node to set.
- *
- * @param n ...
- * @param set ...
*/
def collectNamespaces(n: Node, set: mutable.Set[String]) {
if (n.doCollectNamespaces) {
@@ -185,6 +169,13 @@ object Utility extends AnyRef with parsing.TokenTests
// sb.toString()
// }
+ /**
+ * Serialize the provided Node to the provided StringBuilder.
+ * <p/>
+ * Note that calling this source-compatible method will result in the same old, arguably almost universally unwanted,
+ * behaviour.
+ */
+ @deprecated("Please use `serialize` instead and specify a `minimizeTags` parameter", "2.10.0")
def toXML(
x: Node,
pscope: NamespaceBinding = TopScope,
@@ -194,29 +185,51 @@ object Utility extends AnyRef with parsing.TokenTests
preserveWhitespace: Boolean = false,
minimizeTags: Boolean = false): StringBuilder =
{
+ serialize(x, pscope, sb, stripComments, decodeEntities, preserveWhitespace, if (minimizeTags) MinimizeMode.Always else MinimizeMode.Never)
+ }
+
+ /**
+ * Serialize an XML Node to a StringBuilder.
+ *
+ * This is essentially a minor rework of `toXML` that can't have the same name due to an unfortunate
+ * combination of named/default arguments and overloading.
+ *
+ * @todo use a Writer instead
+ */
+ def serialize(
+ x: Node,
+ pscope: NamespaceBinding = TopScope,
+ sb: StringBuilder = new StringBuilder,
+ stripComments: Boolean = false,
+ decodeEntities: Boolean = true,
+ preserveWhitespace: Boolean = false,
+ minimizeTags: MinimizeMode.Value = MinimizeMode.Default): StringBuilder =
+ {
x match {
- case c: Comment => if (!stripComments) c buildString sb else sb
- case x: SpecialNode => x buildString sb
- case g: Group =>
- g.nodes foreach {toXML(_, x.scope, sb, stripComments, decodeEntities, preserveWhitespace, minimizeTags)}
- sb
- case _ =>
+ case c: Comment if !stripComments => c buildString sb
+ case s: SpecialNode => s buildString sb
+ case g: Group => for (c <- g.nodes) serialize(c, g.scope, sb, minimizeTags = minimizeTags) ; sb
+ case el: Elem =>
// print tag with namespace declarations
sb.append('<')
- x.nameToString(sb)
- if (x.attributes ne null) x.attributes.buildString(sb)
- x.scope.buildString(sb, pscope)
- if (x.child.isEmpty && minimizeTags) {
+ el.nameToString(sb)
+ if (el.attributes ne null) el.attributes.buildString(sb)
+ el.scope.buildString(sb, pscope)
+ if (el.child.isEmpty &&
+ (minimizeTags == MinimizeMode.Always ||
+ (minimizeTags == MinimizeMode.Default && el.minimizeEmpty)))
+ {
// no children, so use short form: <xyz .../>
- sb.append(" />")
+ sb.append("/>")
} else {
// children, so use long form: <xyz ...>...</xyz>
sb.append('>')
- sequenceToXML(x.child, x.scope, sb, stripComments, decodeEntities, preserveWhitespace, minimizeTags)
+ sequenceToXML(el.child, el.scope, sb, stripComments)
sb.append("</")
- x.nameToString(sb)
+ el.nameToString(sb)
sb.append('>')
}
+ case _ => throw new IllegalArgumentException("Don't know how to serialize a " + x.getClass.getName)
}
}
@@ -227,27 +240,24 @@ object Utility extends AnyRef with parsing.TokenTests
stripComments: Boolean = false,
decodeEntities: Boolean = true,
preserveWhitespace: Boolean = false,
- minimizeTags: Boolean = false): Unit =
+ minimizeTags: MinimizeMode.Value = MinimizeMode.Default): Unit =
{
if (children.isEmpty) return
else if (children forall isAtomAndNotText) { // add space
val it = children.iterator
val f = it.next
- toXML(f, pscope, sb, stripComments, decodeEntities, preserveWhitespace, minimizeTags)
+ serialize(f, pscope, sb, stripComments, decodeEntities, preserveWhitespace, minimizeTags)
while (it.hasNext) {
val x = it.next
sb.append(' ')
- toXML(x, pscope, sb, stripComments, decodeEntities, preserveWhitespace, minimizeTags)
+ serialize(x, pscope, sb, stripComments, decodeEntities, preserveWhitespace, minimizeTags)
}
}
- else children foreach { toXML(_, pscope, sb, stripComments, decodeEntities, preserveWhitespace, minimizeTags) }
+ else children foreach { serialize(_, pscope, sb, stripComments, decodeEntities, preserveWhitespace, minimizeTags) }
}
/**
* Returns prefix of qualified name if any.
- *
- * @param name ...
- * @return ...
*/
final def prefix(name: String): Option[String] = (name indexOf ':') match {
case -1 => None
@@ -256,30 +266,15 @@ object Utility extends AnyRef with parsing.TokenTests
/**
* Returns a hashcode for the given constituents of a node
- *
- * @param uri
- * @param label
- * @param attribHashCode
- * @param children
*/
- def hashCode(pre: String, label: String, attribHashCode: Int, scpeHash: Int, children: Seq[Node]) = {
- val h = new util.MurmurHash[Node](pre.##)
- h.append(label.##)
- h.append(attribHashCode)
- h.append(scpeHash)
- children.foreach(h)
- h.hash
- }
+ def hashCode(pre: String, label: String, attribHashCode: Int, scpeHash: Int, children: Seq[Node]) =
+ scala.util.hashing.MurmurHash3.orderedHash(label +: attribHashCode +: scpeHash +: children, pre.##)
def appendQuoted(s: String): String = sbToString(appendQuoted(s, _))
/**
- * Appends "s" if string <code>s</code> does not contain ",
+ * Appends "s" if string `s` does not contain ",
* 's' otherwise.
- *
- * @param s ...
- * @param sb ...
- * @return ...
*/
def appendQuoted(s: String, sb: StringBuilder) = {
val ch = if (s contains '"') '\'' else '"'
@@ -288,10 +283,6 @@ object Utility extends AnyRef with parsing.TokenTests
/**
* Appends "s" and escapes and " i s with \"
- *
- * @param s ...
- * @param sb ...
- * @return ...
*/
def appendEscapedQuoted(s: String, sb: StringBuilder): StringBuilder = {
sb.append('"')
@@ -302,11 +293,6 @@ object Utility extends AnyRef with parsing.TokenTests
sb.append('"')
}
- /**
- * @param s ...
- * @param index ...
- * @return ...
- */
def getName(s: String, index: Int): String = {
if (index >= s.length) null
else {
@@ -317,11 +303,8 @@ object Utility extends AnyRef with parsing.TokenTests
}
/**
- * Returns <code>null</code> if the value is a correct attribute value,
+ * Returns `'''null'''` if the value is a correct attribute value,
* error message if it isn't.
- *
- * @param value ...
- * @return ...
*/
def checkAttributeValue(value: String): String = {
var i = 0
@@ -343,12 +326,6 @@ object Utility extends AnyRef with parsing.TokenTests
null
}
- /**
- * new
- *
- * @param value ...
- * @return ...
- */
def parseAttributeValue(value: String): Seq[Node] = {
val sb = new StringBuilder
var rfb: StringBuilder = null
@@ -374,14 +351,14 @@ object Utility extends AnyRef with parsing.TokenTests
c = it.next
}
val ref = rfb.toString()
- rfb.setLength(0)
+ rfb.clear()
unescape(ref,sb) match {
case null =>
- if (sb.length > 0) { // flush buffer
+ if (sb.length > 0) { // flush buffer
nb += Text(sb.toString())
- sb.setLength(0)
+ sb.clear()
}
- nb += EntityRef(sb.toString()) // add entityref
+ nb += EntityRef(ref) // add entityref
case _ =>
}
}
@@ -399,18 +376,11 @@ object Utility extends AnyRef with parsing.TokenTests
}
/**
- * <pre>
+ * {{{
* CharRef ::= "&#" '0'..'9' {'0'..'9'} ";"
* | "&#x" '0'..'9'|'A'..'F'|'a'..'f' { hexdigit } ";"
- * </pre>
- * <p>
- * see [66]
- * <p>
- *
- * @param ch ...
- * @param nextch ...
- * @param reportSyntaxError ...
- * @return ...
+ * }}}
+ * See [66]
*/
def parseCharRef(ch: () => Char, nextch: () => Unit, reportSyntaxError: String => Unit, reportTruncatedError: String => Unit): String = {
val hex = (ch() == 'x') && { nextch(); true }
diff --git a/src/library/scala/xml/XML.scala b/src/library/scala/xml/XML.scala
old mode 100644
new mode 100755
index 04dedfd..d101684
--- a/src/library/scala/xml/XML.scala
+++ b/src/library/scala/xml/XML.scala
@@ -1,13 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-
-
package scala.xml
import parsing.NoBindingFactoryAdapter
@@ -17,8 +15,7 @@ import java.io.{ InputStream, Reader, StringReader, Writer }
import java.nio.channels.Channels
import scala.util.control.Exception.ultimately
-object Source
-{
+object Source {
def fromFile(file: File) = new InputSource(new FileInputStream(file))
def fromFile(fd: FileDescriptor) = new InputSource(new FileInputStream(fd))
def fromFile(name: String) = new InputSource(new FileInputStream(name))
@@ -28,17 +25,36 @@ object Source
def fromSysId(sysID: String) = new InputSource(sysID)
def fromString(string: String) = fromReader(new StringReader(string))
}
+
+/**
+ * Governs how empty elements (i.e. those without child elements) should be serialized.
+ */
+object MinimizeMode extends Enumeration {
+ /** Minimize empty tags if they were originally empty when parsed, or if they were constructed
+ * with [[scala.xml.Elem]]`#minimizeEmpty` == true
+ */
+ val Default = Value
+
+ /** Always minimize empty tags. Note that this may be problematic for XHTML, in which
+ * case [[scala.xml.Xhtml]]`#toXhtml` should be used instead.
+ */
+ val Always = Value
+
+ /** Never minimize empty tags.
+ */
+ val Never = Value
+}
+
import Source._
-/** The object <code>XML</code> provides constants, and functions to load
+/** The object `XML` provides constants, and functions to load
* and save XML elements. Use this when data binding is not desired, i.e.
- * when XML is handled using <code>Symbol</code> nodes.
+ * when XML is handled using `Symbol` nodes.
*
* @author Burak Emir
* @version 1.0, 25/04/2005
*/
-object XML extends XMLLoader[Elem]
-{
+object XML extends XMLLoader[Elem] {
val xml = "xml"
val xmlns = "xmlns"
val namespace = "http://www.w3.org/XML/1998/namespace"
@@ -51,14 +67,6 @@ object XML extends XMLLoader[Elem]
def withSAXParser(p: SAXParser): XMLLoader[Elem] =
new XMLLoader[Elem] { override val parser: SAXParser = p }
- @deprecated("Use save() instead", "2.8.0")
- final def saveFull(filename: String, node: Node, xmlDecl: Boolean, doctype: dtd.DocType): Unit =
- save(filename, node, encoding, xmlDecl, doctype)
-
- @deprecated("Use save() instead", "2.8.0")
- final def saveFull(filename: String, node: Node, enc: String, xmlDecl: Boolean, doctype: dtd.DocType): Unit =
- save(filename, node, enc, xmlDecl, doctype)
-
/** Saves a node to a file with given filename using given encoding
* optionally with xmldecl and doctype declaration.
*
@@ -89,14 +97,14 @@ object XML extends XMLLoader[Elem]
*
* @param w the writer
* @param node the xml node we want to write
- * @param enc the string to be used in <code>xmlDecl</code>
+ * @param enc the string to be used in `xmlDecl`
* @param xmlDecl if true, write xml declaration
* @param doctype if not null, write doctype declaration
*/
- final def write(w: java.io.Writer, node: Node, enc: String, xmlDecl: Boolean, doctype: dtd.DocType) {
+ final def write(w: java.io.Writer, node: Node, enc: String, xmlDecl: Boolean, doctype: dtd.DocType, minimizeTags: MinimizeMode.Value = MinimizeMode.Default) {
/* TODO: optimize by giving writer parameter to toXML*/
if (xmlDecl) w.write("<?xml version='1.0' encoding='" + enc + "'?>\n")
if (doctype ne null) w.write( doctype.toString() + "\n")
- w.write(Utility.toXML(node).toString)
+ w.write(Utility.serialize(node, minimizeTags = minimizeTags).toString)
}
}
diff --git a/src/library/scala/xml/dtd/ContentModel.scala b/src/library/scala/xml/dtd/ContentModel.scala
index cae2236..abc71f5 100644
--- a/src/library/scala/xml/dtd/ContentModel.scala
+++ b/src/library/scala/xml/dtd/ContentModel.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -11,9 +11,9 @@
package scala.xml
package dtd
-import util.regexp.WordExp
-import util.automata._
-import Utility.sbToString
+import scala.util.regexp.WordExp
+import scala.util.automata._
+import scala.xml.Utility.sbToString
import PartialFunction._
object ContentModel extends WordExp {
@@ -36,8 +36,8 @@ object ContentModel extends WordExp {
def traverse(r: RegExp): Set[String] = r match { // !!! check for match translation problem
case Letter(ElemName(name)) => Set(name)
case Star( x @ _ ) => traverse( x ) // bug if x at _*
- case Sequ( xs @ _* ) => Set(xs map traverse flatten: _*)
- case Alt( xs @ _* ) => Set(xs map traverse flatten: _*)
+ case Sequ( xs @ _* ) => Set(xs flatMap traverse: _*)
+ case Alt( xs @ _* ) => Set(xs flatMap traverse: _*)
}
traverse(r)
@@ -52,7 +52,6 @@ object ContentModel extends WordExp {
sb append sep
buildString(z, sb)
}
- sb
}
def buildString(c: ContentModel, sb: StringBuilder): StringBuilder = c match {
diff --git a/src/library/scala/xml/dtd/ContentModelParser.scala b/src/library/scala/xml/dtd/ContentModelParser.scala
index 2d87bc0..ace0219 100644
--- a/src/library/scala/xml/dtd/ContentModelParser.scala
+++ b/src/library/scala/xml/dtd/ContentModelParser.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://www.scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-
package scala.xml
package dtd
@@ -21,10 +20,10 @@ object ContentModelParser extends Scanner { // a bit too permissive concerning #
def accept(tok: Int) = {
if (token != tok) {
if ((tok == STAR) && (token == END)) // common mistake
- sys.error("in DTDs, \n"+
+ scala.sys.error("in DTDs, \n"+
"mixed content models must be like (#PCDATA|Name|Name|...)*");
else
- sys.error("expected "+token2string(tok)+
+ scala.sys.error("expected "+token2string(tok)+
", got unexpected token:"+token2string(token));
}
nextToken
@@ -45,7 +44,7 @@ object ContentModelParser extends Scanner { // a bit too permissive concerning #
case NAME => value match {
case "ANY" => ANY
case "EMPTY" => EMPTY
- case _ => sys.error("expected ANY, EMPTY or '(' instead of " + value );
+ case _ => scala.sys.error("expected ANY, EMPTY or '(' instead of " + value );
}
case LPAREN =>
@@ -65,12 +64,12 @@ object ContentModelParser extends Scanner { // a bit too permissive concerning #
accept( STAR );
res
case _ =>
- sys.error("unexpected token:" + token2string(token) );
+ scala.sys.error("unexpected token:" + token2string(token) );
}
}
case _ =>
- sys.error("unexpected token:" + token2string(token) );
+ scala.sys.error("unexpected token:" + token2string(token) );
}
// sopt ::= S?
def sOpt() = if( token == S ) nextToken;
@@ -118,12 +117,12 @@ object ContentModelParser extends Scanner { // a bit too permissive concerning #
def particle = token match {
case LPAREN => nextToken; sOpt; regexp;
case NAME => val a = Letter(ElemName(value)); nextToken; maybeSuffix(a)
- case _ => sys.error("expected '(' or Name, got:"+token2string(token));
+ case _ => scala.sys.error("expected '(' or Name, got:"+token2string(token));
}
// atom ::= name
def atom = token match {
case NAME => val a = Letter(ElemName(value)); nextToken; a
- case _ => sys.error("expected Name, got:"+token2string(token));
+ case _ => scala.sys.error("expected Name, got:"+token2string(token));
}
}
diff --git a/src/library/scala/xml/dtd/DTD.scala b/src/library/scala/xml/dtd/DTD.scala
index aa86e2f..1f8af3b 100644
--- a/src/library/scala/xml/dtd/DTD.scala
+++ b/src/library/scala/xml/dtd/DTD.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -10,8 +10,7 @@
package scala.xml
package dtd
-import collection.mutable
-import mutable.HashMap
+import scala.collection.mutable
/** A document type declaration.
*
@@ -23,9 +22,9 @@ abstract class DTD {
def notations: Seq[NotationDecl] = Nil
def unparsedEntities: Seq[EntityDecl] = Nil
- var elem: mutable.Map[String, ElemDecl] = new HashMap[String, ElemDecl]()
- var attr: mutable.Map[String, AttListDecl] = new HashMap[String, AttListDecl]()
- var ent: mutable.Map[String, EntityDecl] = new HashMap[String, EntityDecl]()
+ var elem: mutable.Map[String, ElemDecl] = new mutable.HashMap[String, ElemDecl]()
+ var attr: mutable.Map[String, AttListDecl] = new mutable.HashMap[String, AttListDecl]()
+ var ent: mutable.Map[String, EntityDecl] = new mutable.HashMap[String, EntityDecl]()
override def toString() =
"DTD [\n%s%s]".format(
diff --git a/src/library/scala/xml/dtd/Decl.scala b/src/library/scala/xml/dtd/Decl.scala
index ffb3701..dc4cb93 100644
--- a/src/library/scala/xml/dtd/Decl.scala
+++ b/src/library/scala/xml/dtd/Decl.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
- ** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+ ** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://www.scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-
package scala.xml
package dtd
@@ -21,27 +20,19 @@ abstract class MarkupDecl extends Decl {
/** an element declaration
*/
case class ElemDecl(name: String, contentModel: ContentModel)
-extends MarkupDecl
-{
+extends MarkupDecl {
override def buildString(sb: StringBuilder): StringBuilder = {
- sb
- .append("<!ELEMENT ")
- .append(name)
- .append(' ');
+ sb append "<!ELEMENT " append name append ' '
- ContentModel.buildString(contentModel, sb);
- sb.append('>');
+ ContentModel.buildString(contentModel, sb)
+ sb append '>'
}
}
case class AttListDecl(name: String, attrs:List[AttrDecl])
extends MarkupDecl {
override def buildString(sb: StringBuilder): StringBuilder = {
- sb
- .append("<!ATTLIST ")
- .append(name)
- .append('\n')
- .append(attrs.mkString("","\n",">"));
+ sb append "<!ATTLIST " append name append '\n' append attrs.mkString("","\n",">")
}
}
@@ -53,8 +44,8 @@ case class AttrDecl(name: String, tpe: String, default: DefaultDecl) {
override def toString(): String = sbToString(buildString)
def buildString(sb: StringBuilder): StringBuilder = {
- sb.append(" ").append(name).append(' ').append(tpe).append(' ');
- default.buildString(sb)
+ sb append " " append name append ' ' append tpe append ' '
+ default buildString sb
}
}
@@ -65,31 +56,31 @@ abstract class EntityDecl extends MarkupDecl
/** a parsed general entity declaration */
case class ParsedEntityDecl(name: String, entdef: EntityDef) extends EntityDecl {
override def buildString(sb: StringBuilder): StringBuilder = {
- sb.append("<!ENTITY ").append( name ).append(' ');
- entdef.buildString(sb).append('>')
+ sb append "<!ENTITY " append name append ' '
+ entdef buildString sb append '>'
}
}
/** a parameter entity declaration */
case class ParameterEntityDecl(name: String, entdef: EntityDef) extends EntityDecl {
override def buildString(sb: StringBuilder): StringBuilder = {
- sb.append("<!ENTITY % ").append( name ).append(' ');
- entdef.buildString(sb).append('>')
+ sb append "<!ENTITY % " append name append ' '
+ entdef buildString sb append '>'
}
}
/** an unparsed entity declaration */
case class UnparsedEntityDecl( name:String, extID:ExternalID, notation:String ) extends EntityDecl {
override def buildString(sb: StringBuilder): StringBuilder = {
- sb.append("<!ENTITY ").append( name ).append(' ');
- extID.buildString(sb).append(" NDATA ").append(notation).append('>');
+ sb append "<!ENTITY " append name append ' '
+ extID buildString sb append " NDATA " append notation append '>'
}
}
/** a notation declaration */
case class NotationDecl( name:String, extID:ExternalID ) extends MarkupDecl {
override def buildString(sb: StringBuilder): StringBuilder = {
- sb.append("<!NOTATION ").append( name ).append(' ');
- extID.buildString(sb)
+ sb append "<!NOTATION " append name append ' '
+ extID buildString sb
}
}
@@ -99,33 +90,33 @@ abstract class EntityDef {
case class IntDef(value:String) extends EntityDef {
private def validateValue() {
- var tmp = value;
- var ix = tmp.indexOf('%');
- while( ix != -1) {
- val iz = tmp.indexOf(';', ix);
+ var tmp = value
+ var ix = tmp indexOf '%'
+ while (ix != -1) {
+ val iz = tmp.indexOf(';', ix)
if(iz == -1 && iz == ix + 1)
- throw new IllegalArgumentException("no % allowed in entity value, except for parameter-entity-references");
+ throw new IllegalArgumentException("no % allowed in entity value, except for parameter-entity-references")
else {
- val n = tmp.substring(ix, iz);
+ val n = tmp.substring(ix, iz)
- if( !Utility.isName( n ))
- throw new IllegalArgumentException("internal entity def: \""+n+"\" must be an XML Name");
+ if (!Utility.isName(n))
+ throw new IllegalArgumentException("internal entity def: \""+n+"\" must be an XML Name")
- tmp = tmp.substring(iz+1, tmp.length());
- ix = tmp.indexOf('%');
+ tmp = tmp.substring(iz+1, tmp.length)
+ ix = tmp indexOf '%'
}
}
}
- validateValue();
+ validateValue()
override def buildString(sb: StringBuilder): StringBuilder =
- Utility.appendQuoted(value, sb);
+ Utility.appendQuoted(value, sb)
}
case class ExtDef(extID:ExternalID) extends EntityDef {
override def buildString(sb: StringBuilder): StringBuilder =
- extID.buildString(sb);
+ extID buildString sb
}
@@ -135,7 +126,7 @@ case class PEReference(ent:String) extends MarkupDecl {
throw new IllegalArgumentException("ent must be an XML Name");
override def buildString(sb: StringBuilder): StringBuilder =
- sb.append('%').append(ent).append(';');
+ sb append '%' append ent append ';'
}
@@ -148,18 +139,18 @@ abstract class DefaultDecl {
case object REQUIRED extends DefaultDecl {
override def toString(): String = "#REQUIRED"
- override def buildString(sb: StringBuilder) = sb.append("#REQUIRED")
+ override def buildString(sb: StringBuilder) = sb append "#REQUIRED"
}
case object IMPLIED extends DefaultDecl {
override def toString(): String = "#IMPLIED"
- override def buildString(sb: StringBuilder) = sb.append("#IMPLIED")
+ override def buildString(sb: StringBuilder) = sb append "#IMPLIED"
}
case class DEFAULT(fixed: Boolean, attValue: String) extends DefaultDecl {
override def toString(): String = sbToString(buildString)
override def buildString(sb: StringBuilder): StringBuilder = {
- if (fixed) sb.append("#FIXED ")
+ if (fixed) sb append "#FIXED "
Utility.appendEscapedQuoted(attValue, sb)
}
}
diff --git a/src/library/scala/xml/dtd/DocType.scala b/src/library/scala/xml/dtd/DocType.scala
index f0ef09b..79f8f9f 100644
--- a/src/library/scala/xml/dtd/DocType.scala
+++ b/src/library/scala/xml/dtd/DocType.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -14,7 +14,7 @@ package dtd
*
* @author Burak Emir
*
- * @param target name of this DOCTYPE
+ * @param name name of this DOCTYPE
* @param extID None, or Some(external ID of this doctype)
* @param intSubset sequence of internal subset declarations
*/
diff --git a/src/library/scala/xml/dtd/ElementValidator.scala b/src/library/scala/xml/dtd/ElementValidator.scala
index 162edc7..bfc85f4 100644
--- a/src/library/scala/xml/dtd/ElementValidator.scala
+++ b/src/library/scala/xml/dtd/ElementValidator.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://www.scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -14,9 +14,8 @@ package dtd
import PartialFunction._
import ContentModel.ElemName
import MakeValidationException._ // @todo other exceptions
-
import scala.util.automata._
-import scala.collection.mutable.BitSet
+import scala.collection.mutable
/** validate children and/or attributes of an element
* exceptions are created but not thrown.
@@ -62,7 +61,7 @@ class ElementValidator() extends Function1[Node,Boolean] {
*/
def check(md: MetaData): Boolean = {
val len: Int = exc.length
- var ok = new BitSet(adecls.length)
+ var ok = new mutable.BitSet(adecls.length)
for (attr <- md) {
def attrStr = attr.value.toString
@@ -116,6 +115,7 @@ class ElementValidator() extends Function1[Node,Boolean] {
(dfa delta q).getOrElse(e, throw ValidationException("element %s not allowed here" format e))
}
}
+ case _ => false
}
/** applies various validations - accumulates error messages in exc
diff --git a/src/library/scala/xml/dtd/ExternalID.scala b/src/library/scala/xml/dtd/ExternalID.scala
index fdde18a..7a74635 100644
--- a/src/library/scala/xml/dtd/ExternalID.scala
+++ b/src/library/scala/xml/dtd/ExternalID.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://www.scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -41,7 +41,7 @@ abstract class ExternalID extends parsing.TokenTests
/** a system identifier
*
* @author Burak Emir
- * @param systemLiteral the system identifier literal
+ * @param systemId the system identifier literal
*/
case class SystemID(systemId: String) extends ExternalID {
val publicId = null
@@ -54,8 +54,8 @@ case class SystemID(systemId: String) extends ExternalID {
/** a public identifier (see http://www.w3.org/QA/2002/04/valid-dtd-list.html).
*
* @author Burak Emir
- * @param publicLiteral the public identifier literal
- * @param systemLiteral (can be null for notation pubIDs) the system identifier literal
+ * @param publicId the public identifier literal
+ * @param systemId (can be null for notation pubIDs) the system identifier literal
*/
case class PublicID(publicId: String, systemId: String) extends ExternalID {
if (!checkPubID(publicId))
diff --git a/src/library/scala/xml/dtd/Scanner.scala b/src/library/scala/xml/dtd/Scanner.scala
index 82a8d1a..9b64cc6 100644
--- a/src/library/scala/xml/dtd/Scanner.scala
+++ b/src/library/scala/xml/dtd/Scanner.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -44,7 +44,7 @@ class Scanner extends Tokens with parsing.TokenTests {
final def next() = if (it.hasNext) c = it.next else c = ENDCH
final def acc(d: Char) {
- if (c == d) next else sys.error("expected '"+d+"' found '"+c+"' !");
+ if (c == d) next else scala.sys.error("expected '"+d+"' found '"+c+"' !");
}
final def accS(ds: Seq[Char]) { ds foreach acc }
@@ -65,7 +65,7 @@ class Scanner extends Tokens with parsing.TokenTests {
case ENDCH => END
case _ =>
if (isNameStart(c)) name; // NAME
- else sys.error("unexpected character:" + c)
+ else scala.sys.error("unexpected character:" + c)
}
final def name = {
diff --git a/src/library/scala/xml/dtd/Tokens.scala b/src/library/scala/xml/dtd/Tokens.scala
index 0c8b557..eaffba9 100644
--- a/src/library/scala/xml/dtd/Tokens.scala
+++ b/src/library/scala/xml/dtd/Tokens.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://www.scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/library/scala/xml/dtd/ValidationException.scala b/src/library/scala/xml/dtd/ValidationException.scala
index 8d2da20..243db69 100644
--- a/src/library/scala/xml/dtd/ValidationException.scala
+++ b/src/library/scala/xml/dtd/ValidationException.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://www.scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/library/scala/xml/factory/Binder.scala b/src/library/scala/xml/factory/Binder.scala
old mode 100644
new mode 100755
index a8b0ed5..bad4a4e
--- a/src/library/scala/xml/factory/Binder.scala
+++ b/src/library/scala/xml/factory/Binder.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -43,13 +43,13 @@ abstract class Binder(val preserveWS: Boolean) extends ValidatingMarkupHandler {
result &+ text(0, x.data)
case x:EntityRef =>
result &+ entityRef(0, x.entityName)
- case _ =>
- elemStart(0, n.prefix, n.label, n.attributes, n.scope)
+ case x:Elem =>
+ elemStart(0, x.prefix, x.label, x.attributes, x.scope)
val old = result
result = new NodeBuffer()
- for (m <- n.child) traverse(m)
- result = old &+ elem(0, n.prefix, n.label, n.attributes, n.scope, NodeSeq.fromSeq(result)).toList;
- elemEnd(0, n.prefix, n.label)
+ for (m <- x.child) traverse(m)
+ result = old &+ elem(0, x.prefix, x.label, x.attributes, x.scope, x.minimizeEmpty, NodeSeq.fromSeq(result)).toList;
+ elemEnd(0, x.prefix, x.label)
}
final def validate(n: Node): Node = {
diff --git a/src/library/scala/xml/factory/LoggedNodeFactory.scala b/src/library/scala/xml/factory/LoggedNodeFactory.scala
index abf8f97..cac61ac 100644
--- a/src/library/scala/xml/factory/LoggedNodeFactory.scala
+++ b/src/library/scala/xml/factory/LoggedNodeFactory.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -12,7 +12,7 @@ package factory
/** This class logs what the nodefactory is actually doing.
* If you want to see what happens during loading, use it like this:
{{{
-object testLogged extends Application {
+object testLogged extends App {
val x = new scala.xml.parsing.NoBindingFactoryAdapter
with scala.xml.factory.LoggedNodeFactory[scala.xml.Elem]
with scala.util.logging.ConsoleLogger
diff --git a/src/library/scala/xml/factory/NodeFactory.scala b/src/library/scala/xml/factory/NodeFactory.scala
index 61d4855..28a1b6f 100644
--- a/src/library/scala/xml/factory/NodeFactory.scala
+++ b/src/library/scala/xml/factory/NodeFactory.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -18,7 +18,7 @@ trait NodeFactory[A <: Node] {
val ignoreProcInstr = false
/* default behaviour is to use hash-consing */
- val cache = new collection.mutable.HashMap[Int, List[A]]
+ val cache = new scala.collection.mutable.HashMap[Int, List[A]]
protected def create(pre: String, name: String, attrs: MetaData, scope: NamespaceBinding, children:Seq[Node]): A
diff --git a/src/library/scala/xml/factory/XMLLoader.scala b/src/library/scala/xml/factory/XMLLoader.scala
index 73dfd4e..72e4c51 100644
--- a/src/library/scala/xml/factory/XMLLoader.scala
+++ b/src/library/scala/xml/factory/XMLLoader.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/library/scala/xml/include/CircularIncludeException.scala b/src/library/scala/xml/include/CircularIncludeException.scala
index 2b2d357..5e74967 100644
--- a/src/library/scala/xml/include/CircularIncludeException.scala
+++ b/src/library/scala/xml/include/CircularIncludeException.scala
@@ -1,28 +1,24 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-
package scala.xml
package include
/**
- * <p>
- * A <code>CircularIncludeException</code> is thrown when
- * an included document attempts to include itself or
- * one of its ancestor documents.
- * </p>
+ * A `CircularIncludeException` is thrown when an included document attempts
+ * to include itself or one of its ancestor documents.
*/
class CircularIncludeException(message: String) extends XIncludeException {
- /**
- * Constructs a <code>CircularIncludeException</code> with <code>null</code>
- * as its error detail message.
- */
- def this() = this(null);
+ /**
+ * Constructs a `CircularIncludeException` with `'''null'''`.
+ * as its error detail message.
+ */
+ def this() = this(null)
}
diff --git a/src/library/scala/xml/include/UnavailableResourceException.scala b/src/library/scala/xml/include/UnavailableResourceException.scala
index e511561..f00cc58 100644
--- a/src/library/scala/xml/include/UnavailableResourceException.scala
+++ b/src/library/scala/xml/include/UnavailableResourceException.scala
@@ -1,21 +1,17 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-
package scala.xml
package include
/**
- * <p>
- * An <code>UnavailableResourceException</code> is thrown when
- * an included document cannot be found or loaded.
- * </p>
- *
+ * An `UnavailableResourceException` is thrown when an included document
+ * cannot be found or loaded.
*/
class UnavailableResourceException(message: String)
extends XIncludeException(message) {
diff --git a/src/library/scala/xml/include/XIncludeException.scala b/src/library/scala/xml/include/XIncludeException.scala
index a8ef395..84033f8 100644
--- a/src/library/scala/xml/include/XIncludeException.scala
+++ b/src/library/scala/xml/include/XIncludeException.scala
@@ -1,44 +1,38 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-
package scala.xml
package include
/**
- * <p>
- * <code>XIncludeException</code> is the generic superclass
- * for all checked exceptions that may be thrown as a result
- * of a violation of XInclude's rules.
- * </p>
- * <p>
- * Constructs an <code>XIncludeException</code> with the specified detail
- * message. The error message string <code>message</code> can later be
- * retrieved by the <code>{@link java.lang.Throwable#getMessage}</code>
- * method of class <code>java.lang.Throwable</code>.
- * </p>
+ * `XIncludeException` is the generic superclass for all checked exceptions
+ * that may be thrown as a result of a violation of XInclude's rules.
+ *
+ * Constructs an `XIncludeException` with the specified detail message.
+ * The error message string `message` can later be retrieved by the
+ * `{@link java.lang.Throwable#getMessage}`
+ * method of class `java.lang.Throwable`.
*
* @param message the detail message.
*/
class XIncludeException(message: String) extends Exception(message) {
/**
- * uses <code>null</code> as its error detail message.
+ * uses `'''null'''` as its error detail message.
*/
def this() = this(null)
private var rootCause: Throwable = null
/**
- * When an <code>IOException</code>, <code>MalformedURLException</code>
- * or other generic exception is thrown while processing an XML document
- * for XIncludes, it is customarily replaced
- * by some form of <code>XIncludeException</code>.
+ * When an `IOException`, `MalformedURLException` or other generic
+ * exception is thrown while processing an XML document for XIncludes,
+ * it is customarily replaced by some form of `XIncludeException`.
* This method allows you to store the original exception.
*
* @param nestedException the underlying exception which
@@ -49,15 +43,14 @@ class XIncludeException(message: String) extends Exception(message) {
}
/**
- * When an <code>IOException</code>, <code>MalformedURLException</code>
- * or other generic exception is thrown while processing an XML document
- * for XIncludes, it is customarily replaced
- * by some form of <code>XIncludeException</code>.
+ * When an `IOException`, `MalformedURLException` or other generic
+ * exception is thrown while processing an XML document for XIncludes,
+ * it is customarily replaced by some form of `XIncludeException`.
* This method allows you to retrieve the original exception.
- * It returns null if no such exception caused this <code>XIncludeException</code>.
+ * It returns null if no such exception caused this `XIncludeException`.
*
* @return Throwable the underlying exception which caused the
- * <code>XIncludeException</code> to be thrown
+ * `XIncludeException` to be thrown
*/
def getRootCause(): Throwable = this.rootCause
diff --git a/src/library/scala/xml/include/sax/EncodingHeuristics.scala b/src/library/scala/xml/include/sax/EncodingHeuristics.scala
index eaf0dff..1340689 100644
--- a/src/library/scala/xml/include/sax/EncodingHeuristics.scala
+++ b/src/library/scala/xml/include/sax/EncodingHeuristics.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/library/scala/xml/include/sax/Main.scala b/src/library/scala/xml/include/sax/Main.scala
index 6b6f6c1..92d4d6e 100644
--- a/src/library/scala/xml/include/sax/Main.scala
+++ b/src/library/scala/xml/include/sax/Main.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -10,11 +10,11 @@
package scala.xml
package include.sax
-import scala.xml.include._
import scala.util.control.Exception.{ catching, ignoring }
import org.xml.sax.XMLReader
import org.xml.sax.helpers.XMLReaderFactory
+ at deprecated("Code example will be moved to documentation.", "2.10.0")
object Main {
private val namespacePrefixes = "http://xml.org/sax/features/namespace-prefixes"
private val lexicalHandler = "http://xml.org/sax/properties/lexical-handler"
diff --git a/src/library/scala/xml/include/sax/XIncludeFilter.scala b/src/library/scala/xml/include/sax/XIncludeFilter.scala
index 0e9facc..7297693 100644
--- a/src/library/scala/xml/include/sax/XIncludeFilter.scala
+++ b/src/library/scala/xml/include/sax/XIncludeFilter.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-
package scala.xml
package include.sax
@@ -19,74 +18,58 @@ import java.io.{ InputStream, BufferedInputStream, InputStreamReader, IOExceptio
import java.util.Stack
import java.net.{ URL, MalformedURLException }
-/**
- * <p>
- * This is a SAX filter which resolves all XInclude include elements
- * before passing them on to the client application. Currently this
- * class has the following known deviation from the XInclude specification:
- * </p>
- * <ol>
- * <li>XPointer is not supported.</li>
- * </ol>
+/** This is a SAX filter which resolves all XInclude include elements before
+ * passing them on to the client application. Currently this class has the
+ * following known deviation from the XInclude specification:
+ *
+ * 1. XPointer is not supported.
+ *
+ * Furthermore, I would definitely use a new instance of this class for each
+ * document you want to process. I doubt it can be used successfully on
+ * multiple documents. Furthermore, I can virtually guarantee that this
+ * class is not thread safe. You have been warned.
+ *
+ * Since this class is not designed to be subclassed, and since I have not
+ * yet considered how that might affect the methods herein or what other
+ * protected methods might be needed to support subclasses, I have declared
+ * this class final. I may remove this restriction later, though the use-case
+ * for subclassing is weak. This class is designed to have its functionality
+ * extended via a horizontal chain of filters, not a vertical hierarchy of
+ * sub and superclasses.
*
- * <p>
- * Furthermore, I would definitely use a new instance of this class
- * for each document you want to process. I doubt it can be used
- * successfully on multiple documents. Furthermore, I can virtually
- * guarantee that this class is not thread safe. You have been
- * warned.
- * </p>
+ * To use this class:
*
- * <p>
- * Since this class is not designed to be subclassed, and since
- * I have not yet considered how that might affect the methods
- * herein or what other protected methods might be needed to support
- * subclasses, I have declared this class final. I may remove this
- * restriction later, though the use-case for subclassing is weak.
- * This class is designed to have its functionality extended via a
- * a horizontal chain of filters, not a
- * vertical hierarchy of sub and superclasses.
- * </p>
+ * - Construct an `XIncludeFilter` object with a known base URL
+ * - Pass the `XMLReader` object from which the raw document will be read to
+ * the `setParent()` method of this object.
+ * - Pass your own `ContentHandler` object to the `setContentHandler()`
+ * method of this object. This is the object which will receive events
+ * from the parsed and included document.
+ * - Optional: if you wish to receive comments, set your own `LexicalHandler`
+ * object as the value of this object's
+ * `http://xml.org/sax/properties/lexical-handler` property.
+ * Also make sure your `LexicalHandler` asks this object for the status of
+ * each comment using `insideIncludeElement` before doing anything with the
+ * comment.
+ * - Pass the URL of the document to read to this object's `parse()` method
*
- * <p>
- * To use this class:
- * </p>
- * <ol>
- * <li>Construct an <code>XIncludeFilter</code> object with a known base URL</li>
- * <li>Pass the <code>XMLReader</code> object from which the raw document will
- * be read to the <code>setParent()</code> method of this object. </li>
- * <li>Pass your own <code>ContentHandler</code> object to the
- * <code>setContentHandler()</code> method of this object. This is the
- * object which will receive events from the parsed and included
- * document.
- * </li>
- * <li>Optional: if you wish to receive comments, set your own
- * <code>LexicalHandler</code> object as the value of this object's
- * http://xml.org/sax/properties/lexical-handler property.
- * Also make sure your <code>LexicalHandler</code> asks this object
- * for the status of each comment using <code>insideIncludeElement</code>
- * before doing anything with the comment.
- * </li>
- * <li>Pass the URL of the document to read to this object's
- * <code>parse()</code> method</li>
- * </ol>
+ * e.g.
+ * {{{
+ * val includer = new XIncludeFilter(base)
+ * includer setParent parser
+ * includer setContentHandler new SAXXIncluder(System.out)
+ * includer parse args(i)
+ * }}}
+ * translated from Elliotte Rusty Harold's Java source.
*
- * <p> e.g.</p>
- * <pre><code>XIncludeFilter includer = new XIncludeFilter(base);
- * includer.setParent(parser);
- * includer.setContentHandler(new SAXXIncluder(System.out));
- * includer.parse(args[i]);</code>
- * </pre>
- * </p>
- * translated from Elliotte Rusty Harold's Java source
* @author Burak Emir
*/
class XIncludeFilter extends XMLFilterImpl {
- final val XINCLUDE_NAMESPACE = "http://www.w3.org/2001/XInclude";
+ final val XINCLUDE_NAMESPACE = "http://www.w3.org/2001/XInclude"
- private val bases = new Stack[URL]();
- private val locators = new Stack[Locator]();
+ private val bases = new Stack[URL]()
+ private val locators = new Stack[Locator]()
/* private EntityResolver resolver;
@@ -103,7 +86,7 @@ class XIncludeFilter extends XMLFilterImpl {
// do I need to check this in startDocument() and push something
// there????
override def setDocumentLocator(locator: Locator) {
- locators.push(locator)
+ locators push locator
val base = locator.getSystemId()
try {
bases.push(new URL(base))
@@ -119,15 +102,12 @@ class XIncludeFilter extends XMLFilterImpl {
// necessary to throw away contents of non-empty XInclude elements
private var level = 0
- /**
- * <p>
- * This utility method returns true if and only if this reader is
- * currently inside a non-empty include element. (This is <strong>
- * not</strong> the same as being inside the node set which replaces
- * the include element.) This is primarily needed for comments
- * inside include elements. It must be checked by the actual
- * LexicalHandler to see whether a comment is passed or not.
- * </p>
+ /** This utility method returns true if and only if this reader is
+ * currently inside a non-empty include element. (This is '''not''' the
+ * same as being inside the node set which replaces the include element.)
+ * This is primarily needed for comments inside include elements.
+ * It must be checked by the actual `LexicalHandler` to see whether
+ * a comment is passed or not.
*
* @return boolean
*/
@@ -152,30 +132,30 @@ class XIncludeFilter extends XMLFilterImpl {
+ currentBase, e)
}
}
- bases.push(currentBase);
+ bases push currentBase
if (uri.equals(XINCLUDE_NAMESPACE) && localName.equals("include")) {
// include external document
val href = atts.getValue("href")
// Verify that there is an href attribute
- if (href==null) {
+ if (href == null) {
throw new SAXException("Missing href attribute")
}
- var parse = atts.getValue("parse")
+ var parse = atts getValue "parse"
if (parse == null) parse = "xml"
- if (parse.equals("text")) {
- val encoding = atts.getValue("encoding");
+ if (parse equals "text") {
+ val encoding = atts getValue "encoding"
includeTextDocument(href, encoding);
}
- else if (parse.equals("xml")) {
+ else if (parse equals "xml") {
includeXMLDocument(href);
}
// Need to check this also in DOM and JDOM????
else {
throw new SAXException(
- "Illegal value for parse attribute: " + parse);
+ "Illegal value for parse attribute: " + parse)
}
level += 1
}
@@ -196,7 +176,7 @@ class XIncludeFilter extends XMLFilterImpl {
override def endElement(uri: String, localName: String, qName: String) {
if (uri.equals(XINCLUDE_NAMESPACE)
&& localName.equals("include")) {
- level -= 1;
+ level -= 1
}
else if (level == 0) {
bases.pop()
@@ -214,7 +194,7 @@ class XIncludeFilter extends XMLFilterImpl {
override def endDocument() {
locators.pop()
- bases.pop(); // pop the URL for the document itself
+ bases.pop() // pop the URL for the document itself
depth -= 1
if (depth == 0) super.endDocument()
}
@@ -265,15 +245,11 @@ class XIncludeFilter extends XMLFilterImpl {
locationString
}
- /**
- * <p>
- * This utility method reads a document at a specified URL
- * and fires off calls to <code>characters()</code>.
- * It's used to include files with <code>parse="text"</code>
- * </p>
+ /** This utility method reads a document at a specified URL and fires off
+ * calls to `characters()`. It's used to include files with `parse="text"`.
*
* @param url URL of the document that will be read
- * @param encoding Encoding of the document; e.g. UTF-8,
+ * @param encoding1 Encoding of the document; e.g. UTF-8,
* ISO-8859-1, etc.
* @return void
* @throws SAXException if the requested document cannot
@@ -291,9 +267,9 @@ class XIncludeFilter extends XMLFilterImpl {
catch {
case e: MalformedURLException =>
val ex = new UnavailableResourceException("Unresolvable URL " + url
- + getLocation());
- ex.setRootCause(e);
- throw new SAXException("Unresolvable URL " + url + getLocation(), ex);
+ + getLocation())
+ ex.setRootCause(e)
+ throw new SAXException("Unresolvable URL " + url + getLocation(), ex)
}
try {
@@ -321,29 +297,26 @@ class XIncludeFilter extends XMLFilterImpl {
val c = new Array[Char](1024)
var charsRead: Int = 0 // bogus init value
do {
- charsRead = reader.read(c, 0, 1024);
- if (charsRead > 0) this.characters(c, 0, charsRead);
- } while (charsRead != -1) ;
+ charsRead = reader.read(c, 0, 1024)
+ if (charsRead > 0) this.characters(c, 0, charsRead)
+ } while (charsRead != -1)
}
catch {
case e: UnsupportedEncodingException =>
throw new SAXException("Unsupported encoding: "
- + encoding + getLocation(), e);
+ + encoding + getLocation(), e)
case e: IOException =>
throw new SAXException("Document not found: "
- + source.toExternalForm() + getLocation(), e);
+ + source.toExternalForm() + getLocation(), e)
}
}
private var atRoot = false
- /**
- * <p>
- * This utility method reads a document at a specified URL
- * and fires off calls to various <code>ContentHandler</code> methods.
- * It's used to include files with <code>parse="xml"</code>
- * </p>
+ /** This utility method reads a document at a specified URL
+ * and fires off calls to various `ContentHandler` methods.
+ * It's used to include files with `parse="xml"`.
*
* @param url URL of the document that will be read
* @return void
diff --git a/src/library/scala/xml/include/sax/XIncluder.scala b/src/library/scala/xml/include/sax/XIncluder.scala
index 979ea16..5064d6b 100644
--- a/src/library/scala/xml/include/sax/XIncluder.scala
+++ b/src/library/scala/xml/include/sax/XIncluder.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -11,19 +11,15 @@ package scala.xml
package include.sax
import scala.xml.include._
-import collection.mutable.Stack
-
+import scala.collection.mutable
import org.xml.sax.{ ContentHandler, XMLReader, Locator, Attributes }
import org.xml.sax.ext.LexicalHandler
import java.io.{ File, OutputStream, OutputStreamWriter, Writer, IOException }
-/** XIncluder is a SAX <code>ContentHandler</code>
- * that writes its XML document onto an output stream after resolving
- * all <code>xinclude:include</code> elements.
+/** XIncluder is a SAX `ContentHandler` that writes its XML document onto
+ * an output stream after resolving all `xinclude:include` elements.
*
- * <p>
- * based on Eliotte Rusty Harold's SAXXIncluder
- * </p>
+ * Based on Eliotte Rusty Harold's SAXXIncluder.
*/
class XIncluder(outs: OutputStream, encoding: String) extends ContentHandler with LexicalHandler {
@@ -66,7 +62,7 @@ class XIncluder(outs: OutputStream, encoding: String) extends ContentHandler wit
val value = atts.getValue(i);
// @todo Need to use character references if the encoding
// can't support the character
- out.write(xml.Utility.escape(value))
+ out.write(scala.xml.Utility.escape(value))
out.write("'");
i += 1
}
@@ -101,7 +97,7 @@ class XIncluder(outs: OutputStream, encoding: String) extends ContentHandler wit
// (The end CDATA section delimiter)
else if (c == '>') out.write(">");
else out.write(c);
- i = i+1;
+ i += 1
}
}
catch {
@@ -137,7 +133,7 @@ class XIncluder(outs: OutputStream, encoding: String) extends ContentHandler wit
// LexicalHandler methods
private var inDTD: Boolean = false
- private val entities = new Stack[String]()
+ private val entities = new mutable.Stack[String]()
def startDTD(name: String, publicID: String, systemID: String) {
inDTD = true
diff --git a/src/library/scala/xml/package.scala b/src/library/scala/xml/package.scala
index dec05ab..4001cc5 100644
--- a/src/library/scala/xml/package.scala
+++ b/src/library/scala/xml/package.scala
@@ -1,3 +1,11 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
package scala
package object xml {
@@ -8,4 +16,4 @@ package object xml {
type EntityResolver = org.xml.sax.EntityResolver
type InputSource = org.xml.sax.InputSource
type SAXParser = javax.xml.parsers.SAXParser
-}
\ No newline at end of file
+}
diff --git a/src/library/scala/xml/parsing/ConstructingHandler.scala b/src/library/scala/xml/parsing/ConstructingHandler.scala
old mode 100644
new mode 100755
index 60c1913..6fda4da
--- a/src/library/scala/xml/parsing/ConstructingHandler.scala
+++ b/src/library/scala/xml/parsing/ConstructingHandler.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -21,8 +21,8 @@ abstract class ConstructingHandler extends MarkupHandler
val preserveWS: Boolean
def elem(pos: Int, pre: String, label: String, attrs: MetaData,
- pscope: NamespaceBinding, nodes: NodeSeq): NodeSeq =
- Elem(pre, label, attrs, pscope, nodes:_*)
+ pscope: NamespaceBinding, empty: Boolean, nodes: NodeSeq): NodeSeq =
+ Elem(pre, label, attrs, pscope, empty, nodes:_*)
def procInstr(pos: Int, target: String, txt: String) =
ProcInstr(target, txt)
diff --git a/src/library/scala/xml/parsing/ConstructingParser.scala b/src/library/scala/xml/parsing/ConstructingParser.scala
index 78490d4..4044118 100644
--- a/src/library/scala/xml/parsing/ConstructingParser.scala
+++ b/src/library/scala/xml/parsing/ConstructingParser.scala
@@ -1,7 +1,7 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
@@ -16,10 +16,10 @@ import scala.io.Source
object ConstructingParser {
def fromFile(inp: File, preserveWS: Boolean) =
- new ConstructingParser(Source.fromFile(inp), preserveWS) initialize
+ new ConstructingParser(Source.fromFile(inp), preserveWS).initialize
def fromSource(inp: Source, preserveWS: Boolean) =
- new ConstructingParser(inp, preserveWS) initialize
+ new ConstructingParser(inp, preserveWS).initialize
}
/** An xml parser. parses XML and invokes callback methods of a MarkupHandler.
@@ -29,18 +29,18 @@ object ConstructingParser {
*
* {{{
* object parseFromURL {
- * def main(args:Array[String]): Unit = {
- * val url = args(0);
- * val src = scala.io.Source.fromURL(url);
- * val cpa = scala.xml.parsing.ConstructingParser.fromSource(src, false); // fromSource initializes automatically
- * val doc = cpa.document();
+ * def main(args: Array[String]) {
+ * val url = args(0)
+ * val src = scala.io.Source.fromURL(url)
+ * val cpa = scala.xml.parsing.ConstructingParser.fromSource(src, false) // fromSource initializes automatically
+ * val doc = cpa.document()
*
* // let's see what it is
- * val ppr = new scala.xml.PrettyPrinter(80,5);
- * val ele = doc.docElem;
- * Console.println("finished parsing");
- * val out = ppr.format(ele);
- * Console.println(out);
+ * val ppr = new scala.xml.PrettyPrinter(80, 5)
+ * val ele = doc.docElem
+ * println("finished parsing")
+ * val out = ppr.format(ele)
+ * println(out)
* }
* }
* }}} */
@@ -52,4 +52,3 @@ with MarkupParser {
// default impl. of Logged
override def log(msg: String): Unit = {}
}
-
diff --git a/src/library/scala/xml/parsing/DefaultMarkupHandler.scala b/src/library/scala/xml/parsing/DefaultMarkupHandler.scala
old mode 100644
new mode 100755
index 6cab639..0152e44
--- a/src/library/scala/xml/parsing/DefaultMarkupHandler.scala
+++ b/src/library/scala/xml/parsing/DefaultMarkupHandler.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -12,11 +12,11 @@ package scala.xml
package parsing
-/** default implementation of markup handler always returns NodeSeq.Empty */
+/** Default implementation of markup handler always returns `NodeSeq.Empty` */
abstract class DefaultMarkupHandler extends MarkupHandler {
def elem(pos: Int, pre: String, label: String, attrs: MetaData,
- scope:NamespaceBinding, args: NodeSeq) = NodeSeq.Empty
+ scope:NamespaceBinding, empty: Boolean, args: NodeSeq) = NodeSeq.Empty
def procInstr(pos: Int, target: String, txt: String) = NodeSeq.Empty
diff --git a/src/library/scala/xml/parsing/ExternalSources.scala b/src/library/scala/xml/parsing/ExternalSources.scala
index ca6cea4..aaac588 100644
--- a/src/library/scala/xml/parsing/ExternalSources.scala
+++ b/src/library/scala/xml/parsing/ExternalSources.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -23,11 +23,6 @@ import scala.io.Source
trait ExternalSources {
self: ExternalSources with MarkupParser with MarkupHandler =>
- /** ...
- *
- * @param systemId ...
- * @return ...
- */
def externalSource(systemId: String): Source = {
if (systemId startsWith "http:")
return Source fromURL new URL(systemId)
diff --git a/src/library/scala/xml/parsing/FactoryAdapter.scala b/src/library/scala/xml/parsing/FactoryAdapter.scala
index 67e06ff..5f776f5 100644
--- a/src/library/scala/xml/parsing/FactoryAdapter.scala
+++ b/src/library/scala/xml/parsing/FactoryAdapter.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -49,7 +49,7 @@ abstract class FactoryAdapter extends DefaultHandler with factory.XMLLoader[Node
// abstract methods
/** Tests if an XML element contains text.
- * @return true if element named <code>localName</code> contains text.
+ * @return true if element named `localName` contains text.
*/
def nodeContainsText(localName: String): Boolean // abstract
@@ -158,7 +158,7 @@ abstract class FactoryAdapter extends DefaultHandler with factory.XMLLoader[Node
/** End element.
* @param uri
- * @param localName
+ * @param _localName
* @param qname
* @throws org.xml.sax.SAXException if ..
*/
diff --git a/src/library/scala/xml/parsing/FatalError.scala b/src/library/scala/xml/parsing/FatalError.scala
index d54e187..a8b4f8f 100644
--- a/src/library/scala/xml/parsing/FatalError.scala
+++ b/src/library/scala/xml/parsing/FatalError.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/library/scala/xml/parsing/MarkupHandler.scala b/src/library/scala/xml/parsing/MarkupHandler.scala
old mode 100644
new mode 100755
index e7f9a68..7028161
--- a/src/library/scala/xml/parsing/MarkupHandler.scala
+++ b/src/library/scala/xml/parsing/MarkupHandler.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -11,8 +11,7 @@
package scala.xml
package parsing
-import collection.mutable
-import mutable.HashMap
+import scala.collection.mutable
import scala.io.Source
import scala.util.logging.Logged
import scala.xml.dtd._
@@ -32,7 +31,7 @@ abstract class MarkupHandler extends Logged
val isValidating: Boolean = false
var decls: List[Decl] = Nil
- var ent: mutable.Map[String, EntityDecl] = new HashMap[String, EntityDecl]()
+ var ent: mutable.Map[String, EntityDecl] = new mutable.HashMap[String, EntityDecl]()
def lookupElemDecl(Label: String): ElemDecl = {
for (z @ ElemDecl(Label, _) <- decls)
@@ -65,7 +64,6 @@ abstract class MarkupHandler extends Logged
* @param pos the position in the source file
* @param pre the prefix
* @param label the local name
- * @param attrs the attributes (metadata)
*/
def elemEnd(pos: Int, pre: String, label: String): Unit = ()
@@ -76,10 +74,10 @@ abstract class MarkupHandler extends Logged
* @param pre the prefix
* @param label the local name
* @param attrs the attributes (metadata)
+ * @param empty `true` if the element was previously empty; `false` otherwise.
* @param args the children of this element
- * @return ...
*/
- def elem(pos: Int, pre: String, label: String, attrs: MetaData, scope: NamespaceBinding, args: NodeSeq): NodeSeq
+ def elem(pos: Int, pre: String, label: String, attrs: MetaData, scope: NamespaceBinding, empty: Boolean, args: NodeSeq): NodeSeq
/** callback method invoked by MarkupParser after parsing PI.
*/
diff --git a/src/library/scala/xml/parsing/MarkupParser.scala b/src/library/scala/xml/parsing/MarkupParser.scala
old mode 100644
new mode 100755
index d68b8b6..f9ff54d
--- a/src/library/scala/xml/parsing/MarkupParser.scala
+++ b/src/library/scala/xml/parsing/MarkupParser.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -16,10 +16,9 @@ import Utility.Escapes.{ pairs => unescape }
/**
* An XML parser.
*
- * Parses XML 1.0, invokes callback methods of a MarkupHandler
- * and returns whatever the markup handler returns. Use
- * <code>ConstructingParser</code> if you just want to parse XML to
- * construct instances of <code>scala.xml.Node</code>.
+ * Parses XML 1.0, invokes callback methods of a `MarkupHandler` and returns
+ * whatever the markup handler returns. Use `ConstructingParser` if you just
+ * want to parse XML to construct instances of `scala.xml.Node`.
*
* While XML elements are returned, DTD declarations - if handled - are
* collected using side-effects.
@@ -53,11 +52,11 @@ trait MarkupParser extends MarkupParserCommon with TokenTests
// variables, values
//
- var curInput: Source = input
+ protected var curInput: Source = input
// See ticket #3720 for motivations.
private class WithLookAhead(underlying: Source) extends Source {
- private val queue = collection.mutable.Queue[Char]()
+ private val queue = scala.collection.mutable.Queue[Char]()
def lookahead(): BufferedIterator[Char] = {
val iter = queue.iterator ++ new Iterator[Char] {
def hasNext = underlying.hasNext
@@ -72,7 +71,8 @@ trait MarkupParser extends MarkupParserCommon with TokenTests
}
def lookahead(): BufferedIterator[Char] = curInput match {
- case curInputWLA:WithLookAhead => curInputWLA.lookahead()
+ case curInputWLA:WithLookAhead =>
+ curInputWLA.lookahead()
case _ =>
val newInput = new WithLookAhead(curInput)
curInput = newInput
@@ -96,7 +96,29 @@ trait MarkupParser extends MarkupParserCommon with TokenTests
var tmppos: Int = _
/** holds the next character */
- var ch: Char = _
+ var nextChNeeded: Boolean = false
+ var reachedEof: Boolean = false
+ var lastChRead: Char = _
+ def ch: Char = {
+ if (nextChNeeded) {
+ if (curInput.hasNext) {
+ lastChRead = curInput.next
+ pos = curInput.pos
+ } else {
+ val ilen = inpStack.length;
+ //Console.println(" ilen = "+ilen+ " extIndex = "+extIndex);
+ if ((ilen != extIndex) && (ilen > 0)) {
+ /** for external source, inpStack == Nil ! need notify of eof! */
+ pop()
+ } else {
+ reachedEof = true
+ lastChRead = 0.asInstanceOf[Char]
+ }
+ }
+ nextChNeeded = false
+ }
+ lastChRead
+ }
/** character buffer, for names */
protected val cbuf = new StringBuilder()
@@ -105,14 +127,15 @@ trait MarkupParser extends MarkupParserCommon with TokenTests
protected var doc: Document = null
- var eof: Boolean = false
+ def eof: Boolean = { ch; reachedEof }
//
// methods
//
- /** <? prolog ::= xml S ... ?>
- */
+ /** {{{
+ * <? prolog ::= xml S ... ?>
+ * }}} */
def xmlProcInstr(): MetaData = {
xToken("xml")
xSpace
@@ -138,14 +161,14 @@ trait MarkupParser extends MarkupParserCommon with TokenTests
xSpaceOpt
m("version") match {
- case null => ;
+ case null =>
case Text("1.0") => info_ver = Some("1.0"); n += 1
case _ => reportSyntaxError("cannot deal with versions != 1.0")
}
m("encoding") match {
- case null => ;
- case Text(enc) =>
+ case null =>
+ case Text(enc) =>
if (!isValidIANAEncoding(enc))
reportSyntaxError("\"" + enc + "\" is not a valid encoding")
else {
@@ -156,7 +179,7 @@ trait MarkupParser extends MarkupParserCommon with TokenTests
if (isProlog) {
m("standalone") match {
- case null => ;
+ case null =>
case Text("yes") => info_stdl = Some(true); n += 1
case Text("no") => info_stdl = Some(false); n += 1
case _ => reportSyntaxError("either 'yes' or 'no' expected")
@@ -171,9 +194,10 @@ trait MarkupParser extends MarkupParserCommon with TokenTests
(info_ver, info_enc, info_stdl)
}
- /** <? prolog ::= xml S?
+ /** {{{
+ * <? prolog ::= xml S?
* // this is a bit more lenient than necessary...
- */
+ * }}} */
def prolog(): (Option[String], Option[String], Option[Boolean]) =
prologOrTextDecl(true)
@@ -181,20 +205,19 @@ trait MarkupParser extends MarkupParserCommon with TokenTests
def textDecl(): (Option[String], Option[String]) =
prologOrTextDecl(false) match { case (x1, x2, _) => (x1, x2) }
- /**
- *[22] prolog ::= XMLDecl? Misc* (doctypedecl Misc*)?
- *[23] XMLDecl ::= '<?xml' VersionInfo EncodingDecl? SDDecl? S? '?>'
- *[24] VersionInfo ::= S 'version' Eq ("'" VersionNum "'" | '"' VersionNum '"')
- *[25] Eq ::= S? '=' S?
- *[26] VersionNum ::= '1.0'
- *[27] Misc ::= Comment | PI | S
- */
-
+ /** {{{
+ * [22] prolog ::= XMLDecl? Misc* (doctypedecl Misc*)?
+ * [23] XMLDecl ::= '<?xml' VersionInfo EncodingDecl? SDDecl? S? '?>'
+ * [24] VersionInfo ::= S 'version' Eq ("'" VersionNum "'" | '"' VersionNum '"')
+ * [25] Eq ::= S? '=' S?
+ * [26] VersionNum ::= '1.0'
+ * [27] Misc ::= Comment | PI | S
+ * }}} */
def document(): Document = {
doc = new Document()
this.dtd = null
- var info_prolog: Tuple3[Option[String], Option[String], Option[Boolean]] = Tuple3(None, None, None);
+ var info_prolog: (Option[String], Option[String], Option[Boolean]) = (None, None, None)
if ('<' != ch) {
reportSyntaxError("< expected")
return null
@@ -203,7 +226,7 @@ trait MarkupParser extends MarkupParserCommon with TokenTests
nextch // is prolog ?
var children: NodeSeq = null
if ('?' == ch) {
- nextch;
+ nextch
info_prolog = prolog()
doc.version = info_prolog._1
doc.encoding = info_prolog._2
@@ -212,25 +235,25 @@ trait MarkupParser extends MarkupParserCommon with TokenTests
children = content(TopScope) // DTD handled as side effect
}
else {
- val ts = new NodeBuffer();
- content1(TopScope, ts); // DTD handled as side effect
- ts &+ content(TopScope);
- children = NodeSeq.fromSeq(ts);
- }
- //Console.println("[MarkupParser::document] children now: "+children.toList);
- var elemCount = 0;
- var theNode: Node = null;
+ val ts = new NodeBuffer()
+ content1(TopScope, ts) // DTD handled as side effect
+ ts &+ content(TopScope)
+ children = NodeSeq.fromSeq(ts)
+ }
+ //println("[MarkupParser::document] children now: "+children.toList)
+ var elemCount = 0
+ var theNode: Node = null
for (c <- children) c match {
- case _:ProcInstr => ;
- case _:Comment => ;
+ case _:ProcInstr =>
+ case _:Comment =>
case _:EntityRef => // todo: fix entities, shouldn't be "special"
reportSyntaxError("no entity references allowed here");
case s:SpecialNode =>
- if (s.toString().trim().length > 0) //non-empty text nodes not allowed
- elemCount = elemCount + 2;
+ if (s.toString.trim().length > 0) //non-empty text nodes not allowed
+ elemCount += 2
case m:Node =>
- elemCount = elemCount + 1;
- theNode = m;
+ elemCount += 1
+ theNode = m
}
if (1 != elemCount) {
reportSyntaxError("document must contain exactly one element")
@@ -243,7 +266,7 @@ trait MarkupParser extends MarkupParserCommon with TokenTests
}
/** append Unicode character to name buffer*/
- protected def putChar(c: Char) = cbuf.append(c)
+ protected def putChar(c: Char) = cbuf append c
/** As the current code requires you to call nextch once manually
* after construction, this method formalizes that suboptimal reality.
@@ -253,37 +276,30 @@ trait MarkupParser extends MarkupParserCommon with TokenTests
this
}
- def ch_returning_nextch = { val res = ch ; nextch ; res }
- def mkProcInstr(position: Int, name: String, text: String): NodeSeq =
- handle.procInstr(position, name, text)
+ protected def ch_returning_nextch: Char = { val res = ch; nextch(); res }
- def mkAttributes(name: String, pscope: NamespaceBinding) =
+ def mkAttributes(name: String, pscope: NamespaceBinding): AttributesType =
if (isNameStart (ch)) xAttributes(pscope)
else (Null, pscope)
- /** this method assign the next character to ch and advances in input */
- def nextch = {
- if (curInput.hasNext) {
- ch = curInput.next
- pos = curInput.pos
- } else {
- val ilen = inpStack.length;
- //Console.println(" ilen = "+ilen+ " extIndex = "+extIndex);
- if ((ilen != extIndex) && (ilen > 0)) {
- /** for external source, inpStack == Nil ! need notify of eof! */
- pop()
- } else {
- eof = true
- ch = 0.asInstanceOf[Char]
- }
- }
+ def mkProcInstr(position: Int, name: String, text: String): ElementType =
+ handle.procInstr(position, name, text)
+
+ /** this method tells ch to get the next character when next called */
+ def nextch() {
+ // Read current ch if needed
ch
+
+ // Mark next ch to be required
+ nextChNeeded = true
}
/** parse attribute and create namespace scope, metadata
+ * {{{
* [41] Attributes ::= { S Name Eq AttValue }
+ * }}}
*/
- def xAttributes(pscope:NamespaceBinding): (MetaData,NamespaceBinding) = {
+ def xAttributes(pscope: NamespaceBinding): (MetaData, NamespaceBinding) = {
var scope: NamespaceBinding = pscope
var aMap: MetaData = Null
while (isNameStart(ch)) {
@@ -295,22 +311,22 @@ trait MarkupParser extends MarkupParserCommon with TokenTests
Utility.prefix(qname) match {
case Some("xmlns") =>
- val prefix = qname.substring(6 /*xmlns:*/ , qname.length);
- scope = new NamespaceBinding(prefix, value, scope);
+ val prefix = qname.substring(6 /*xmlns:*/ , qname.length)
+ scope = new NamespaceBinding(prefix, value, scope)
- case Some(prefix) =>
- val key = qname.substring(prefix.length+1, qname.length);
- aMap = new PrefixedAttribute(prefix, key, Text(value), aMap);
+ case Some(prefix) =>
+ val key = qname.substring(prefix.length+1, qname.length)
+ aMap = new PrefixedAttribute(prefix, key, Text(value), aMap)
- case _ =>
+ case _ =>
if( qname == "xmlns" )
- scope = new NamespaceBinding(null, value, scope);
+ scope = new NamespaceBinding(null, value, scope)
else
- aMap = new UnprefixedAttribute(qname, Text(value), aMap);
+ aMap = new UnprefixedAttribute(qname, Text(value), aMap)
}
if ((ch != '/') && (ch != '>') && ('?' != ch))
- xSpace;
+ xSpace
}
if(!aMap.wellformed(scope))
@@ -320,8 +336,10 @@ trait MarkupParser extends MarkupParserCommon with TokenTests
}
/** entity value, terminated by either ' or ". value may not contain <.
+ * {{{
* AttValue ::= `'` { _ } `'`
* | `"` { _ } `"`
+ * }}}
*/
def xEntityValue(): String = {
val endch = ch
@@ -336,10 +354,11 @@ trait MarkupParser extends MarkupParserCommon with TokenTests
str
}
- /** '<! CharData ::= [CDATA[ ( {char} - {char}"]]>"{char} ) ']]>'
+ /** {{{
+ * '<! CharData ::= [CDATA[ ( {char} - {char}"]]>"{char} ) ']]>'
*
- * see [15]
- */
+ * see [15]
+ * }}} */
def xCharData: NodeSeq = {
xToken("[CDATA[")
def mkResult(pos: Int, s: String): NodeSeq = {
@@ -349,10 +368,11 @@ trait MarkupParser extends MarkupParserCommon with TokenTests
xTakeUntil(mkResult, () => pos, "]]>")
}
- /** Comment ::= '<!--' ((Char - '-') | ('-' (Char - '-')))* '-->'
+ /** {{{
+ * Comment ::= '<!--' ((Char - '-') | ('-' (Char - '-')))* '-->'
*
* see [15]
- */
+ * }}} */
def xComment: NodeSeq = {
val sb: StringBuilder = new StringBuilder()
xToken("--")
@@ -378,8 +398,10 @@ trait MarkupParser extends MarkupParserCommon with TokenTests
}
}
- /** '<' content1 ::= ... */
- def content1(pscope: NamespaceBinding, ts: NodeBuffer): Unit =
+ /** {{{
+ * '<' content1 ::= ...
+ * }}} */
+ def content1(pscope: NamespaceBinding, ts: NodeBuffer) {
ch match {
case '!' =>
nextch
@@ -395,8 +417,11 @@ trait MarkupParser extends MarkupParserCommon with TokenTests
case _ =>
ts &+ element1(pscope) // child
}
+ }
- /** content1 ::= '<' content1 | '&' charref ... */
+ /** {{{
+ * content1 ::= '<' content1 | '&' charref ...
+ * }}} */
def content(pscope: NamespaceBinding): NodeSeq = {
var ts = new NodeBuffer
var exit = eof
@@ -412,14 +437,14 @@ trait MarkupParser extends MarkupParserCommon with TokenTests
ch match {
case '<' => // another tag
- nextch match {
+ nextch; ch match {
case '/' => exit = true // end tag
case _ => content1(pscope, ts)
}
// postcond: xEmbeddedBlock == false!
case '&' => // EntityRef or CharRef
- nextch match {
+ nextch; ch match {
case '#' => // CharacterRef
nextch
val theChar = handle.text(tmppos, xCharRef(() => ch, () => nextch))
@@ -435,16 +460,16 @@ trait MarkupParser extends MarkupParserCommon with TokenTests
} else push(n)
}
case _ => // text content
- appendText(tmppos, ts, xText);
+ appendText(tmppos, ts, xText)
}
}
done
} // content(NamespaceBinding)
- /** externalID ::= SYSTEM S syslit
+ /** {{{
+ * externalID ::= SYSTEM S syslit
* PUBLIC S pubid S syslit
- */
-
+ * }}} */
def externalID(): ExternalID = ch match {
case 'S' =>
nextch
@@ -464,14 +489,13 @@ trait MarkupParser extends MarkupParserCommon with TokenTests
/** parses document type declaration and assigns it to instance variable
* dtd.
- *
- * <! parseDTD ::= DOCTYPE name ... >
- */
- def parseDTD(): Unit = { // dirty but fast
- //Console.println("(DEBUG) parseDTD");
+ * {{{
+ * <! parseDTD ::= DOCTYPE name ... >
+ * }}} */
+ def parseDTD() { // dirty but fast
var extID: ExternalID = null
if (this.dtd ne null)
- reportSyntaxError("unexpected character (DOCTYPE already defined");
+ reportSyntaxError("unexpected character (DOCTYPE already defined")
xToken("DOCTYPE")
xSpace
val n = xName
@@ -520,9 +544,10 @@ trait MarkupParser extends MarkupParserCommon with TokenTests
element1(pscope)
}
- /** '<' element ::= xmlTag1 '>' { xmlExpr | '{' simpleExpr '}' } ETag
- * | xmlTag1 '/' '>'
- */
+ /** {{{
+ * '<' element ::= xmlTag1 '>' { xmlExpr | '{' simpleExpr '}' } ETag
+ * | xmlTag1 '/' '>'
+ * }}} */
def element1(pscope: NamespaceBinding): NodeSeq = {
val pos = this.pos
val (qname, (aMap, scope)) = xTag(pscope)
@@ -544,32 +569,34 @@ trait MarkupParser extends MarkupParserCommon with TokenTests
tmp
}
}
- val res = handle.elem(pos, pre, local, aMap, scope, ts)
+ val res = handle.elem(pos, pre, local, aMap, scope, ts == NodeSeq.Empty, ts)
handle.elemEnd(pos, pre, local)
res
}
- /** parse character data.
- * precondition: xEmbeddedBlock == false (we are not in a scala block)
+ /** Parse character data.
+ *
+ * precondition: `xEmbeddedBlock == false` (we are not in a scala block)
*/
- def xText: String = {
- var exit = false;
+ private def xText: String = {
+ var exit = false
while (! exit) {
- putChar(ch);
- val opos = pos;
- nextch;
+ putChar(ch)
+ val opos = pos
+ nextch
exit = eof || ( ch == '<' ) || ( ch == '&' )
}
- val str = cbuf.toString();
- cbuf.length = 0;
+ val str = cbuf.toString
+ cbuf.length = 0
str
}
/** attribute value, terminated by either ' or ". value may not contain <.
+ * {{{
* AttValue ::= `'` { _ } `'`
* | `"` { _ } `"`
- */
+ * }}} */
def systemLiteral(): String = {
val endch = ch
if (ch != '\'' && ch != '"')
@@ -585,7 +612,9 @@ trait MarkupParser extends MarkupParserCommon with TokenTests
str
}
- /* [12] PubidLiteral ::= '"' PubidChar* '"' | "'" (PubidChar - "'")* "'" */
+ /** {{{
+ * [12] PubidLiteral ::= '"' PubidChar* '"' | "'" (PubidChar - "'")* "'"
+ * }}} */
def pubidLiteral(): String = {
val endch = ch
if (ch!='\'' && ch != '"')
@@ -593,13 +622,13 @@ trait MarkupParser extends MarkupParserCommon with TokenTests
nextch
while (ch != endch && !eof) {
putChar(ch)
- //Console.println("hello '"+ch+"'"+isPubIDChar(ch));
+ //println("hello '"+ch+"'"+isPubIDChar(ch))
if (!isPubIDChar(ch))
- reportSyntaxError("char '"+ch+"' is not allowed in public id");
+ reportSyntaxError("char '"+ch+"' is not allowed in public id")
nextch
}
nextch
- val str = cbuf.toString()
+ val str = cbuf.toString
cbuf.length = 0
str
}
@@ -609,10 +638,10 @@ trait MarkupParser extends MarkupParserCommon with TokenTests
//
def extSubset(): Unit = {
- var textdecl:Tuple2[Option[String],Option[String]] = null;
- if (ch=='<') {
+ var textdecl: (Option[String],Option[String]) = null
+ if (ch == '<') {
nextch
- if (ch=='?') {
+ if (ch == '?') {
nextch
textdecl = textDecl()
} else
@@ -627,7 +656,7 @@ trait MarkupParser extends MarkupParserCommon with TokenTests
xToken('['); while(']' != ch) markupDecl(); nextch // ']'
}
def doIgnore() = {
- xToken('['); while(']' != ch) nextch; nextch; // ']'
+ xToken('['); while(']' != ch) nextch; nextch // ']'
}
if ('?' == ch) {
nextch
@@ -671,8 +700,8 @@ trait MarkupParser extends MarkupParserCommon with TokenTests
stmt match {
// parameter entity
- case "INCLUDE" => doInclude()
- case "IGNORE" => doIgnore()
+ case "INCLUDE" => doInclude()
+ case "IGNORE" => doIgnore()
}
case 'I' =>
nextch
@@ -748,8 +777,9 @@ trait MarkupParser extends MarkupParserCommon with TokenTests
handle.elemDecl(n, cmstr)
}
- /** <! attlist := ATTLIST
- */
+ /** {{{
+ * <! attlist := ATTLIST
+ * }}} */
def attrDecl() = {
xToken("TTLIST")
xSpace
@@ -764,10 +794,10 @@ trait MarkupParser extends MarkupParserCommon with TokenTests
// could be enumeration (foo,bar) parse this later :-/
while ('"' != ch && '\'' != ch && '#' != ch && '<' != ch) {
if (!isSpace(ch))
- cbuf.append(ch);
- nextch;
+ cbuf.append(ch)
+ nextch
}
- val atpe = cbuf.toString()
+ val atpe = cbuf.toString
cbuf.length = 0
val defdecl: DefaultDecl = ch match {
@@ -793,10 +823,10 @@ trait MarkupParser extends MarkupParserCommon with TokenTests
handle.attListDecl(n, attList.reverse)
}
- /** <! element := ELEMENT
- */
+ /** {{{
+ * <! element := ELEMENT
+ * }}} */
def entityDecl() = {
- //Console.println("entityDecl()")
var isParameterEntity = false
var entdef: EntityDef = null
xToken("NTITY")
@@ -842,15 +872,16 @@ trait MarkupParser extends MarkupParserCommon with TokenTests
{}
} // entityDecl
- /** 'N' notationDecl ::= "OTATION"
- */
+ /** {{{
+ * 'N' notationDecl ::= "OTATION"
+ * }}} */
def notationDecl() {
xToken("OTATION")
xSpace
val notat = xName
xSpace
val extID = if (ch == 'S') {
- externalID();
+ externalID()
}
else if (ch == 'P') {
/** PublicID (without system, only used in NOTATION) */
@@ -863,24 +894,27 @@ trait MarkupParser extends MarkupParserCommon with TokenTests
systemLiteral()
else
null;
- new PublicID(pubID, sysID);
+ new PublicID(pubID, sysID)
} else {
reportSyntaxError("PUBLIC or SYSTEM expected");
- sys.error("died parsing notationdecl")
+ scala.sys.error("died parsing notationdecl")
}
xSpaceOpt
xToken('>')
handle.notationDecl(notat, extID)
}
- def reportSyntaxError(pos: Int, str: String): Unit = curInput.reportError(pos, str)
- def reportSyntaxError(str: String): Unit = reportSyntaxError(pos, str)
- def reportValidationError(pos: Int, str: String): Unit = reportSyntaxError(pos, str)
+ def reportSyntaxError(pos: Int, str: String) { curInput.reportError(pos, str) }
+ def reportSyntaxError(str: String) { reportSyntaxError(pos, str) }
+ def reportValidationError(pos: Int, str: String) { reportSyntaxError(pos, str) }
def push(entityName: String) {
if (!eof)
inpStack = curInput :: inpStack
+ // can't push before getting next character if needed
+ ch
+
curInput = replacementText(entityName)
nextch
}
@@ -889,6 +923,9 @@ trait MarkupParser extends MarkupParserCommon with TokenTests
if (!eof)
inpStack = curInput :: inpStack
+ // can't push before getting next character if needed
+ ch
+
curInput = externalSource(systemId)
nextch
}
@@ -896,8 +933,9 @@ trait MarkupParser extends MarkupParserCommon with TokenTests
def pop() {
curInput = inpStack.head
inpStack = inpStack.tail
- ch = curInput.ch
+ lastChRead = curInput.ch
+ nextChNeeded = false
pos = curInput.pos
- eof = false // must be false, because of places where entity refs occur
+ reachedEof = false // must be false, because of places where entity refs occur
}
}
diff --git a/src/library/scala/xml/parsing/MarkupParserCommon.scala b/src/library/scala/xml/parsing/MarkupParserCommon.scala
index d9729e1..da64048 100644
--- a/src/library/scala/xml/parsing/MarkupParserCommon.scala
+++ b/src/library/scala/xml/parsing/MarkupParserCommon.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -21,7 +21,7 @@ import Utility.SU
* All members should be accessed through those.
*/
private[scala] trait MarkupParserCommon extends TokenTests {
- protected def unreachable = sys.error("Cannot be reached.")
+ protected def unreachable = scala.sys.error("Cannot be reached.")
// type HandleType // MarkupHandler, SymbolicXMLBuilder
type InputType // Source, CharArrayReader
@@ -54,8 +54,8 @@ private[scala] trait MarkupParserCommon extends TokenTests {
xTakeUntil(mkProcInstr(_, n, _), () => tmppos, "?>")
}
- /** attribute value, terminated by either ' or ". value may not contain <.
- * @param endch either ' or "
+ /** attribute value, terminated by either `'` or `"`. value may not contain `<`.
+ @param endCh either `'` or `"`
*/
def xAttributeValue(endCh: Char): String = {
val buf = new StringBuilder
@@ -82,7 +82,7 @@ private[scala] trait MarkupParserCommon extends TokenTests {
case `end` => return buf.toString
case ch => buf append ch
}
- sys.error("Expected '%s'".format(end))
+ scala.sys.error("Expected '%s'".format(end))
}
/** [42] '<' xmlEndTag ::= '<' '/' Name S? '>'
@@ -175,8 +175,8 @@ private[scala] trait MarkupParserCommon extends TokenTests {
* temporarily abstract over the nextchs.
*/
def ch: Char
- def nextch: Char
- def ch_returning_nextch: Char
+ def nextch(): Unit
+ protected def ch_returning_nextch: Char
def eof: Boolean
// def handle: HandleType
@@ -212,7 +212,7 @@ private[scala] trait MarkupParserCommon extends TokenTests {
else xHandleError(ch, "whitespace expected")
/** Apply a function and return the passed value */
- def returning[T](x: T)(f: T => Unit): T = { f(x) ; x }
+ def returning[T](x: T)(f: T => Unit): T = { f(x); x }
/** Execute body with a variable saved and restored after execution */
def saving[A, B](getter: A, setter: A => Unit)(body: => B): B = {
diff --git a/src/library/scala/xml/parsing/NoBindingFactoryAdapter.scala b/src/library/scala/xml/parsing/NoBindingFactoryAdapter.scala
index 7bbfad9..22dd450 100644
--- a/src/library/scala/xml/parsing/NoBindingFactoryAdapter.scala
+++ b/src/library/scala/xml/parsing/NoBindingFactoryAdapter.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/library/scala/xml/parsing/TokenTests.scala b/src/library/scala/xml/parsing/TokenTests.scala
index 4cf0c64..c9cafae 100644
--- a/src/library/scala/xml/parsing/TokenTests.scala
+++ b/src/library/scala/xml/parsing/TokenTests.scala
@@ -1,13 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-
-
package scala.xml
package parsing
@@ -16,22 +14,27 @@ package parsing
*/
trait TokenTests {
- /** (#x20 | #x9 | #xD | #xA) */
+ /** {{{
+ * (#x20 | #x9 | #xD | #xA)
+ * }}} */
final def isSpace(ch: Char): Boolean = ch match {
case '\u0009' | '\u000A' | '\u000D' | '\u0020' => true
case _ => false
}
- /** (#x20 | #x9 | #xD | #xA)+ */
+ /** {{{
+ * (#x20 | #x9 | #xD | #xA)+
+ * }}} */
final def isSpace(cs: Seq[Char]): Boolean = cs.nonEmpty && (cs forall isSpace)
/** These are 99% sure to be redundant but refactoring on the safe side. */
def isAlpha(c: Char) = (c >= 'A' && c <= 'Z') || (c >= 'a' && c <= 'z')
def isAlphaDigit(c: Char) = isAlpha(c) || (c >= '0' && c <= '9')
- /** NameChar ::= Letter | Digit | '.' | '-' | '_' | ':'
+ /** {{{
+ * NameChar ::= Letter | Digit | '.' | '-' | '_' | ':'
* | CombiningChar | Extender
- *
- * see [4] and Appendix B of XML 1.0 specification
+ * }}}
+ * See [4] and Appendix B of XML 1.0 specification.
*/
def isNameChar(ch: Char) = {
import java.lang.Character._
@@ -40,17 +43,19 @@ trait TokenTests {
isNameStart(ch) || (getType(ch).toByte match {
case COMBINING_SPACING_MARK |
ENCLOSING_MARK | NON_SPACING_MARK |
- MODIFIER_LETTER | DECIMAL_DIGIT_NUMBER => true
- case _ => ".-:" contains ch
+ MODIFIER_LETTER | DECIMAL_DIGIT_NUMBER => true
+ case _ => ".-:" contains ch
})
}
- /** NameStart ::= ( Letter | '_' )
+ /** {{{
+ * NameStart ::= ( Letter | '_' )
+ * }}}
* where Letter means in one of the Unicode general
- * categories { Ll, Lu, Lo, Lt, Nl }
+ * categories `{ Ll, Lu, Lo, Lt, Nl }`.
*
- * We do not allow a name to start with ':'.
- * see [3] and Appendix B of XML 1.0 specification
+ * We do not allow a name to start with `:`.
+ * See [3] and Appendix B of XML 1.0 specification
*/
def isNameStart(ch: Char) = {
import java.lang.Character._
@@ -58,14 +63,15 @@ trait TokenTests {
getType(ch).toByte match {
case LOWERCASE_LETTER |
UPPERCASE_LETTER | OTHER_LETTER |
- TITLECASE_LETTER | LETTER_NUMBER => true
- case _ => ch == '_'
+ TITLECASE_LETTER | LETTER_NUMBER => true
+ case _ => ch == '_'
}
}
- /** Name ::= ( Letter | '_' ) (NameChar)*
- *
- * see [5] of XML 1.0 specification
+ /** {{{
+ * Name ::= ( Letter | '_' ) (NameChar)*
+ * }}}
+ * See [5] of XML 1.0 specification.
*/
def isName(s: String) =
s.nonEmpty && isNameStart(s.head) && (s.tail forall isNameChar)
@@ -75,7 +81,7 @@ trait TokenTests {
("""-\()+,./:=?;!*#@$_%""" contains ch)
/**
- * Returns true if the encoding name is a valid IANA encoding.
+ * Returns `true` if the encoding name is a valid IANA encoding.
* This method does not verify that there is a decoder available
* for this encoding, only that the characters are valid for an
* IANA encoding name.
diff --git a/src/library/scala/xml/parsing/ValidatingMarkupHandler.scala b/src/library/scala/xml/parsing/ValidatingMarkupHandler.scala
index f5f04b4..0edea04 100644
--- a/src/library/scala/xml/parsing/ValidatingMarkupHandler.scala
+++ b/src/library/scala/xml/parsing/ValidatingMarkupHandler.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/library/scala/xml/parsing/XhtmlEntities.scala b/src/library/scala/xml/parsing/XhtmlEntities.scala
index 8ab229a..1bb8438 100644
--- a/src/library/scala/xml/parsing/XhtmlEntities.scala
+++ b/src/library/scala/xml/parsing/XhtmlEntities.scala
@@ -1,20 +1,19 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-
package scala.xml
package parsing
import scala.xml.dtd.{ IntDef, ParsedEntityDecl }
-/** <p>
- * (c) David Pollak 2007 WorldWide Conferencing, LLC.
- * </p>
+/**
+ * @author (c) David Pollak 2007 WorldWide Conferencing, LLC.
+ *
*/
object XhtmlEntities {
val entList = List(("quot",34), ("amp",38), ("lt",60), ("gt",62), ("nbsp",160), ("iexcl",161), ("cent",162), ("pound",163), ("curren",164), ("yen",165),
diff --git a/src/library/scala/xml/parsing/XhtmlParser.scala b/src/library/scala/xml/parsing/XhtmlParser.scala
index fb61376..d08cb1f 100644
--- a/src/library/scala/xml/parsing/XhtmlParser.scala
+++ b/src/library/scala/xml/parsing/XhtmlParser.scala
@@ -1,35 +1,29 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-
package scala.xml
package parsing
import scala.io.Source
-/** <p>
- * An XML Parser that preserves CDATA blocks and knows about HtmlEntities.
- * </p>
- * <p>
- * (c) David Pollak, 2007 WorldWide Conferencing, LLC.
- * </p>
+/** An XML Parser that preserves `CDATA` blocks and knows about
+ * [[scala.xml.parsing.XhtmlEntities]].
+ *
+ * @author (c) David Pollak, 2007 WorldWide Conferencing, LLC.
*/
class XhtmlParser(val input: Source) extends ConstructingHandler with MarkupParser with ExternalSources {
val preserveWS = true
ent ++= XhtmlEntities()
}
-/** <p>
- * Convenience method that instantiates, initializes and runs an XhtmlParser.
- * </p>
- * <p>
- * (c) Burak Emir
- * </p>
+/** Convenience method that instantiates, initializes and runs an `XhtmlParser`.
+ *
+ * @author Burak Emir
*/
object XhtmlParser {
def apply(source: Source): NodeSeq = new XhtmlParser(source).initialize.document
diff --git a/src/library/scala/xml/persistent/CachedFileStorage.scala b/src/library/scala/xml/persistent/CachedFileStorage.scala
index bcdd1a3..916a1a0 100644
--- a/src/library/scala/xml/persistent/CachedFileStorage.scala
+++ b/src/library/scala/xml/persistent/CachedFileStorage.scala
@@ -1,12 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-
package scala.xml
package persistent
@@ -17,13 +16,12 @@ import java.lang.Thread
import scala.util.logging.Logged
import scala.collection.Iterator
-/** <p>
- * Mutable storage of immutable xml trees. Everything is kept in memory,
- * with a thread periodically checking for changes and writing to file.
- * To ensure atomicity, two files are used, filename1 and '$'+filename1.
- * The implementation switches between the two, deleting the older one
- * after a complete dump of the database has been written.
- * </p>
+/** Mutable storage of immutable xml trees. Everything is kept in memory,
+ * with a thread periodically checking for changes and writing to file.
+ *
+ * To ensure atomicity, two files are used, `filename1` and `'$'+filename1`.
+ * The implementation switches between the two, deleting the older one
+ * after a complete dump of the database has been written.
*
* @author Burak Emir
*/
@@ -31,7 +29,8 @@ abstract class CachedFileStorage(private val file1: File) extends Thread with Lo
private val file2 = new File(file1.getParent, file1.getName+"$")
- /** either equals file1 or file2, references the next file in which updates will be stored
+ /** Either equals `file1` or `file2`, references the next file in which
+ * updates will be stored.
*/
private var theFile: File = null
@@ -85,10 +84,10 @@ abstract class CachedFileStorage(private val file1: File) extends Thread with Lo
/** saves the XML to file */
private def save() = if (this.dirty) {
- log("[save]\ndeleting "+theFile);
- theFile.delete();
- log("creating new "+theFile);
- theFile.createNewFile();
+ log("[save]\ndeleting "+theFile)
+ theFile.delete()
+ log("creating new "+theFile)
+ theFile.createNewFile()
val fos = new FileOutputStream(theFile)
val c = fos.getChannel()
@@ -97,7 +96,7 @@ abstract class CachedFileStorage(private val file1: File) extends Thread with Lo
val w = Channels.newWriter(c, "utf-8")
XML.write(w, storageNode, "utf-8", true, null)
- log("writing to "+theFile);
+ log("writing to "+theFile)
w.close
c.close
@@ -107,19 +106,20 @@ abstract class CachedFileStorage(private val file1: File) extends Thread with Lo
log("[save done]")
}
- /** run method of the thread. remember to use start() to start a thread, not run. */
+ /** Run method of the thread. remember to use `start()` to start a thread,
+ * not `run`. */
override def run = {
- log("[run]\nstarting storage thread, checking every "+interval+" ms");
- while(true) {
- Thread.sleep( this.interval );
+ log("[run]\nstarting storage thread, checking every "+interval+" ms")
+ while (true) {
+ Thread.sleep( this.interval )
save
}
}
- /** forces writing of contents to the file, even if there has not been any update. */
+ /** Force writing of contents to the file, even if there has not been any
+ * update. */
def flush() = {
- this.dirty = true;
+ this.dirty = true
save
}
}
-
diff --git a/src/library/scala/xml/persistent/Index.scala b/src/library/scala/xml/persistent/Index.scala
index a0c34af..defaf67 100644
--- a/src/library/scala/xml/persistent/Index.scala
+++ b/src/library/scala/xml/persistent/Index.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/library/scala/xml/persistent/SetStorage.scala b/src/library/scala/xml/persistent/SetStorage.scala
index 765d2a8..20a5bb6 100644
--- a/src/library/scala/xml/persistent/SetStorage.scala
+++ b/src/library/scala/xml/persistent/SetStorage.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/library/scala/xml/pull/XMLEvent.scala b/src/library/scala/xml/pull/XMLEvent.scala
index dff81e8..a266380 100644
--- a/src/library/scala/xml/pull/XMLEvent.scala
+++ b/src/library/scala/xml/pull/XMLEvent.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -38,7 +38,7 @@ case class EvElemEnd(pre: String, label: String) extends XMLEvent
case class EvText(text: String) extends XMLEvent
/** An entity reference was encountered.
- * @param the name of the entity, e.g. `gt` when encountering the entity `>`
+ * @param entity the name of the entity, e.g. `gt` when encountering the entity `>`
*/
case class EvEntityRef(entity: String) extends XMLEvent
diff --git a/src/library/scala/xml/pull/XMLEventReader.scala b/src/library/scala/xml/pull/XMLEventReader.scala
old mode 100644
new mode 100755
index 8b7137e..428c305
--- a/src/library/scala/xml/pull/XMLEventReader.scala
+++ b/src/library/scala/xml/pull/XMLEventReader.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -23,7 +23,10 @@ import scala.xml.parsing.{ ExternalSources, MarkupHandler, MarkupParser }
* @author Burak Emir
* @author Paul Phillips
*/
-class XMLEventReader(src: Source) extends ProducerConsumerIterator[XMLEvent] {
+class XMLEventReader(src: Source)
+extends scala.collection.AbstractIterator[XMLEvent]
+ with ProducerConsumerIterator[XMLEvent] {
+
// We implement a pull parser as an iterator, but since we may be operating on
// a stream (e.g. XML over a network) there may be arbitrarily long periods when
// the queue is empty. Fortunately the ProducerConsumerIterator is ideally
@@ -75,9 +78,10 @@ class XMLEventReader(src: Source) extends ProducerConsumerIterator[XMLEvent] {
}
// this is a dummy to satisfy MarkupHandler's API
- // memory usage optimization return one <ignore/> for top level to satisfy MarkupParser.document() otherwise NodeSeq.Empty
+ // memory usage optimization return one <ignore/> for top level to satisfy
+ // MarkupParser.document() otherwise NodeSeq.Empty
private var ignoreWritten = false
- final def elem(pos: Int, pre: String, label: String, attrs: MetaData, pscope: NamespaceBinding, nodes: NodeSeq): NodeSeq =
+ final def elem(pos: Int, pre: String, label: String, attrs: MetaData, pscope: NamespaceBinding, empty: Boolean, nodes: NodeSeq): NodeSeq =
if (level == 1 && !ignoreWritten) {ignoreWritten = true; <ignore/> } else NodeSeq.Empty
def procInstr(pos: Int, target: String, txt: String) = setEvent(EvProcInstr(target, txt))
@@ -111,7 +115,7 @@ trait ProducerConsumerIterator[T >: Null] extends Iterator[T] {
val MaxQueueSize = -1
def interruptibly[T](body: => T): Option[T] = try Some(body) catch {
- case _: InterruptedException => Thread.currentThread.interrupt() ; None
+ case _: InterruptedException => Thread.currentThread.interrupt(); None
case _: ClosedChannelException => None
}
@@ -133,12 +137,14 @@ trait ProducerConsumerIterator[T >: Null] extends Iterator[T] {
// consumer/iterator interface - we need not synchronize access to buffer
// because we required there to be only one consumer.
def hasNext = !eos && (buffer != null || fillBuffer)
+
def next() = {
if (eos) throw new NoSuchElementException("ProducerConsumerIterator")
if (buffer == null) fillBuffer
drainBuffer
}
+
def available() = isElement(buffer) || isElement(queue.peek)
private def drainBuffer() = {
diff --git a/src/library/scala/xml/transform/BasicTransformer.scala b/src/library/scala/xml/transform/BasicTransformer.scala
index 002f86a..1402ccd 100644
--- a/src/library/scala/xml/transform/BasicTransformer.scala
+++ b/src/library/scala/xml/transform/BasicTransformer.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -18,11 +18,6 @@ package transform
*/
abstract class BasicTransformer extends Function1[Node,Node]
{
- /**
- * @param n ...
- * @param ns ...
- * @return ...
- */
protected def unchanged(n: Node, ns: Seq[Node]) =
ns.length == 1 && (ns.head == n)
diff --git a/src/library/scala/xml/transform/RewriteRule.scala b/src/library/scala/xml/transform/RewriteRule.scala
index aea5cab..1dca495 100644
--- a/src/library/scala/xml/transform/RewriteRule.scala
+++ b/src/library/scala/xml/transform/RewriteRule.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/library/scala/xml/transform/RuleTransformer.scala b/src/library/scala/xml/transform/RuleTransformer.scala
index 86aaa45..85e92e5 100644
--- a/src/library/scala/xml/transform/RuleTransformer.scala
+++ b/src/library/scala/xml/transform/RuleTransformer.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/manual/scala/man1/Command.scala b/src/manual/scala/man1/Command.scala
index cb0fcc8..1cf55cb 100644
--- a/src/manual/scala/man1/Command.scala
+++ b/src/manual/scala/man1/Command.scala
@@ -1,10 +1,14 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
+ * Copyright 2005-2013 LAMP/EPFL
* @author Stephane Micheloud
*/
package scala.man1
+/**
+ * @author Stephane Micheloud
+ * @version 1.0
+ */
trait Command {
import _root_.scala.tools.docutil.ManPage._
@@ -23,6 +27,9 @@ trait Command {
protected def CmdOption(opt: String): AbstractText =
Mono(Bold(NDash & opt) & " ")
+ protected def CmdOptionBound(opt: String, params: AbstractText) =
+ Mono(Bold(NDash & opt) & params & " ")
+
protected def CmdOptionLong(opt: String, params: AbstractText) =
Mono(Bold(NDash & NDash & opt) & " " & params & " ")
@@ -46,12 +53,7 @@ trait Command {
def bugs = Section("REPORTING BUGS",
- "Report bugs to " & Mono("http://lampsvn.epfl.ch/trac/scala") & ".")
-
- //private val df = new java.text.SimpleDateFormat("MMM d, yyyy")
- //private val rightNow = new java.util.Date()
-
- def lastModified: String = "April 18, 2007" // df.format(rightNow)
+ "Report bugs to " & Mono("https://issues.scala-lang.org/") & ".")
def manpage: Document
}
diff --git a/src/manual/scala/man1/fsc.scala b/src/manual/scala/man1/fsc.scala
index 03ca391..f2f8feb 100644
--- a/src/manual/scala/man1/fsc.scala
+++ b/src/manual/scala/man1/fsc.scala
@@ -1,15 +1,18 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
+ * Copyright 2005-2013 LAMP/EPFL
* @author Stephane Micheloud
*/
package scala.man1
+/**
+ * @author Lex Spoon
+ * @version 1.0
+ */
object fsc extends Command {
import _root_.scala.tools.docutil.ManPage._
protected def cn = new Error().getStackTrace()(0).getClassName()
- override def lastModified = "January 18, 2007"
val name = Section("NAME",
@@ -63,8 +66,8 @@ object fsc extends Command {
"is not needed. Note that the hostname must be for a host that shares " &
"the same filesystem."),
Definition(
- CmdOption("J", Argument("flag")),
- "Pass <flag> directly to the Java VM for the compilation daemon.")
+ CmdOptionBound("J", Argument("flag")),
+ "Pass " & Mono(Argument("flag")) & " directly to the Java VM for the compilation daemon.")
))
val example = Section("EXAMPLE",
@@ -138,7 +141,6 @@ object fsc extends Command {
val seeAlso = Section("SEE ALSO",
- Link(Bold("sbaz") & "(1)", "sbaz.html") & ", " &
Link(Bold("scala") & "(1)", "scala.html") & ", " &
Link(Bold("scalac") & "(1)", "scalac.html") & ", " &
Link(Bold("scaladoc") & "(1)", "scaladoc.html") & ", " &
@@ -146,9 +148,9 @@ object fsc extends Command {
def manpage = new Document {
title = command
- date = lastModified
+ date = "March 2012"
author = "Lex Spoon"
- version = "0.4"
+ version = "0.5"
sections = List(
name,
synopsis,
diff --git a/src/manual/scala/man1/sbaz.scala b/src/manual/scala/man1/sbaz.scala
deleted file mode 100644
index a9c65fa..0000000
--- a/src/manual/scala/man1/sbaz.scala
+++ /dev/null
@@ -1,205 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
- * @author Stephane Micheloud
- */
-
-package scala.man1
-
-object sbaz extends Command {
- import _root_.scala.tools.docutil.ManPage._
-
- protected val cn = new Error().getStackTrace()(0).getClassName()
-
- val name = Section("NAME",
-
- MBold(command) & " " & NDash & " Scala package sharing tool for the " &
- Link("Scala 2", "http://scala-lang.org/") & " language")
-
- val synopsis = Section("SYNOPSIS",
-
- CmdLine(" [ " & Argument("global_options") & " ] " & Argument("command") &
- " [ " & Argument("command_options") & " ]"))
-
- val parameters = Section("PARAMETERS",
-
- DefinitionList(
- Definition(
- Mono(Argument("global_options")),
- "Command line options. See " & Link(Bold("OPTIONS"), "#options") &
- " below."),
- Definition(
- Mono(Argument("command")),
- "Internal " & MBold(command) & " command."),
- Definition(
- Mono(Argument("command_options")),
- MBold(command) & " command options.")))
-
- val description = Section("DESCRIPTION",
-
- "The " & MBold(command) & " tool is a system used by Scala enthusiasts " &
- "to share computer files with each other. In particular, it makes it " &
- "easy to share libraries and applications.")
-
- val options = Section("OPTIONS",
-
- "The " & MBold(command) & " tool has a set of standard options that are " &
- "supported on the current development environment and will be supported " &
- "in future releases.",
-
- Section("Global Options",
- DefinitionList(
- Definition(
- CmdOption("d", Argument("dir")),
- "Operate on dir as the local managed directory."),
- Definition(
- CmdOption("n") & "| " & CmdOptionLong("dryrun"),
- "Do not actually do anything. Only print out what " +
- "tool would normally do with the following arguments."),
- Definition(
- CmdOption("v") & "| " & CmdOptionLong("verbose"),
- "Output messages about what the " & MBold(command) & " tool is doing"),
- Definition(
- CmdOption("version"),
- "Display the version information"),
-
- Definition(
- CmdOption("-univ") & Argument("name"),
- "Operate on the named remote universe, selected from those " &
- "in the local managed directory's universe. Affects "&
- "the "&MBold("share")&" and "&MBold("retract")&" commands."),
-
- Definition(
- CmdOption("-univ-url") & Argument("url"),
- "Operate on the universe at the specified URL. Affects "&
- "the "&MBold("share")&" and "&MBold("retract")&" commands."))),
-
- Section("Available Commands",
- DefinitionList(
- Definition(
- MBold("available"),
- "List the available packages for installation; only display the " +
- "three most recent versions of each package."),
- Definition(
- MBold("available") & " " & CmdOption("a"),
- "List the available packages for installation; display all shared " +
- "versions of each package."),
- Definition(
- MBold("compact"),
- "Clear the download cache to save space."),
- Definition(
- MBold("help"),
- "Display a help message."),
- Definition(
- MBold("install"),
- "Install a package."),
- Definition(
- MBold("installed"),
- "List the packages that are installed."),
- Definition(
- MBold("keycreate"),
- "Request that a new key be created."),
- Definition(
- MBold("keyforget"),
- "Forget the specified key."),
- Definition(
- MBold("keyknown"),
- "List all known keys."),
- Definition(
- MBold("keyremember"),
- "Remember the specified key for future use."),
- Definition(
- MBold("keyremoteknown"),
- "List all keys known to the bazaar server."),
- Definition(
- MBold("keyrevoke"),
- "Request that a specified key be revoked."),
- Definition(
- MBold("pack") & " " & Argument("name") & " " & Argument("directory") &
- " [ " & Argument("options") & " ]",
- "Create an sbaz package and, if a link base is specified, "&
- "an advertisement file. The package file is named " &
- Mono("name-version.sbp") & ". The advertisement file is named " &
- Mono("name-version.advert") & ". The URL is the advertisement " &
- "file is the URL base with the package filename appended."),
- Definition(
- MBold("remove"),
- "Remove a package."),
- Definition(
- MBold("retract"),
- "Retract a previously shared package."),
- Definition(
- MBold("setuniverse"),
- "Set the universe for a directory."),
- Definition(
- MBold("setup"),
- "Initialize a directory to be managed."),
- Definition(
- MBold("share") & " " & Argument("filename"),
- "Share a package advertisement on a bazaar."),
- Definition(
- MBold("share") & " " & CmdOption("i", Argument("descriptor")),
- "The package advertisement is usually specified in a file, " &
- "but it may also be specified on the command line with the " &
- CmdOption("i") & " option."),
- Definition(
- MBold("share") & " " & CmdOptionLong("template"),
- "If " & CmdOptionLong("template") & " is specified, then instead " &
- "of uploading a description, the command prints out a template " &
- "of a package advertisement."),
- Definition(
- MBold("show"),
- "Show information about one package."),
- Definition(
- MBold("update"),
- "Update the list of available packages."),
- Definition(
- MBold("upgrade"),
- "Upgrade all possible packages."))))
-
- val examples = Section("EXAMPLES",
-
- DefinitionList(
- Definition(
- "Update the list of available packages.",
- CmdLine(MBold("update"))),
- Definition(
- "Upload package description for " & Mono("scala-devel-2.5.1") &
- " to the universe",
- CmdLine(MBold("share") & " scala-devel-2.5.1.advert"))))
-
- val exitStatus = Section("EXIT STATUS",
-
- MBold(command) & " returns a zero exist status if it succeeds to process " &
- "the specified input files. Non zero is returned in case of failure.")
-
- override val authors = Section("AUTHOR",
-
- "Written by Lex Spoon.")
-
- val seeAlso = Section("SEE ALSO",
-
- Link(Bold("fsc") & "(1)", "fsc.html") & ", " &
- Link(Bold("scala") & "(1)", "scala.html") & ", " &
- Link(Bold("scalac") & "(1)", "scalac.html") & ", " &
- Link(Bold("scaladoc") & "(1)", "scaladoc.html") & ", " &
- Link(Bold("scalap") & "(1)", "scalap.html"))
-
- def manpage = new Document {
- title = command
- date = "August 24, 2006"
- author = "Stephane Micheloud"
- version = "0.3"
- sections = List(
- name,
- synopsis,
- parameters,
- description,
- options,
- examples,
- exitStatus,
- authors,
- bugs,
- copyright,
- seeAlso)
- }
-}
diff --git a/src/manual/scala/man1/scala.scala b/src/manual/scala/man1/scala.scala
index e1084e4..dbd4ea5 100644
--- a/src/manual/scala/man1/scala.scala
+++ b/src/manual/scala/man1/scala.scala
@@ -1,10 +1,14 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
+ * Copyright 2005-2013 LAMP/EPFL
* @author Stephane Micheloud
*/
package scala.man1
+/**
+ * @author Stephane Micheloud
+ * @version 1.0
+ */
object scala extends Command {
import _root_.scala.tools.docutil.ManPage._
@@ -32,23 +36,23 @@ object scala extends Command {
Link(Bold("scalac") & "(1)", "scalac.html") & "."),
Definition(
- Mono("-howtorun:") & Argument("how"),
+ CmdOptionBound("howtorun:", Argument("how")),
"How to execute " & Argument("torun") & ", if it is present. " &
"Options for " & Argument("how") & " are " & Mono("guess") &
" (the default), " & Mono("script") & ", and " & Mono("object") &
"."),
Definition(
- Mono("-i"),
+ CmdOption("i"),
"Requests that a file be pre-loaded. It is only " &
"meaningful for interactive shells."),
Definition(
- Mono("-e"),
+ CmdOption("e"),
"Requests that its argument be executed as Scala code."),
Definition(
- Mono("-savecompiled"),
+ CmdOption("savecompiled"),
"Save this compiled version of scripts in order to speed up " &
"later executions of the same script. When running a script, " &
"save the compiled version of in a file with the same name as the " &
@@ -57,11 +61,11 @@ object scala extends Command {
"will be used if it is newer than the script file."),
Definition(
- Mono("-nocompdaemon"),
- "Do not use the " & Bold("fsc") & " offline compiler."),
+ CmdOption("nocompdaemon"),
+ "Do not use the " & MBold("fsc") & " offline compiler."),
Definition(
- Mono("-D") & Argument("property=value"),
+ CmdOptionBound("D", "property=value"),
"Set a Java system property. If no value is specified, " &
"then the property is set to the empty string."),
@@ -75,8 +79,8 @@ object scala extends Command {
val description = Section("DESCRIPTION",
- "The "&MBold(command)&" utility runs Scala code using a Java runtime "&
- "environment. The Scala code to run is " &
+ "The " & MBold(command) & " utility runs Scala code using a Java " &
+ "runtime environment. The Scala code to run is " &
"specified in one of three ways:",
NumberedList(
@@ -246,14 +250,13 @@ object scala extends Command {
val seeAlso = Section("SEE ALSO",
Link(Bold("fsc") & "(1)", "fsc.html") & ", " &
- Link(Bold("sbaz") & "(1)", "sbaz.html") & ", " &
Link(Bold("scalac") & "(1)", "scalac.html") & ", " &
Link(Bold("scaladoc") & "(1)", "scaladoc.html") & ", " &
Link(Bold("scalap") & "(1)", "scalap.html"))
def manpage = new Document {
title = command
- date = lastModified
+ date = "April 2007"
author = "Stephane Micheloud"
version = "0.5"
sections = List(
diff --git a/src/manual/scala/man1/scalac.scala b/src/manual/scala/man1/scalac.scala
index f305679..13b1fd5 100644
--- a/src/manual/scala/man1/scalac.scala
+++ b/src/manual/scala/man1/scalac.scala
@@ -1,5 +1,5 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
+ * Copyright 2005-2013 LAMP/EPFL
* @author Stephane Micheloud
*/
@@ -7,6 +7,7 @@ package scala.man1
/**
* @author Stephane Micheloud
+ * @version 1.0
*/
object scalac extends Command {
import _root_.scala.tools.docutil.ManPage._
@@ -51,41 +52,26 @@ object scalac extends Command {
"current development environment and will be supported in future " &
"releases. An additional set of non-standard options are specific to " &
"the current virtual machine implementation and are subject to change " &
- "in the future. Non-standard options begin with " & Bold("-X") & ".",
+ "in the future. Non-standard options begin with " & MBold("-X") & ".",
Section("Standard Options",
DefinitionList(
Definition(
- CmdOption("g:{none,source,line,vars,notc}"),
- SeqPara(
- Mono("\"none\"") & " generates no debugging info,",
- Mono("\"source\"") & " generates only the source file attribute,",
- Mono("\"line\"") & " generates source and line number information,",
- Mono("\"vars\"") & " generates source, line number and local " &
- "variable information,",
- Mono("\"notc\"") & " generates all of the above and " &
- Italic("will not") & " perform tail call optimization.")),
+ CmdOptionBound("D", "property=value"),
+ "Pass " & CmdOptionBound("D", "property=value") & " directly to the runtime system."),
Definition(
- CmdOption("nowarn"),
- "Generate no warnings"),
+ CmdOptionBound("J", Argument("flag")),
+ "Pass " & Mono(Argument("flag")) & " directly to the runtime system."),
Definition(
- CmdOption("verbose"),
- "Output messages about what the compiler is doing"),
+ CmdOptionBound("P:", Argument("plugin:opt")),
+ "Pass an option to a plugin"),
Definition(
- CmdOption("deprecation"),
- SeqPara(
- "Indicate whether source should be compiled with deprecation " &
- "information; defaults to " & Mono("off") & " (" &
- "accepted values are: " & Mono("on") & ", " & Mono("off") &
- ", " & Mono("yes") & " and " & Mono("no") & ")",
- "Available since Scala version 2.2.1")),
+ CmdOption("X"),
+ "Print a synopsis of advanced options."),
Definition(
- CmdOption("unchecked"),
- SeqPara(
- "Enable detailed unchecked warnings",
- "Non variable type-arguments in type patterns are unchecked " &
- "since they are eliminated by erasure",
- "Available since Scala version 2.3.0")),
+ CmdOption("bootclasspath", Argument("path")),
+ "Override location of bootstrap class files (where to find the " &
+ "standard built-in classes, such as \"" & Mono("scala.List") & "\")."),
Definition(
CmdOption("classpath", Argument("path")),
SeqPara(
@@ -99,19 +85,14 @@ object scalac extends Command {
"include the current directory in the search path, you must " &
"include " & Mono("\".\"") & " in the new settings.")),
Definition(
- CmdOption("sourcepath", Argument("path")),
- "Specify where to find input source files."),
- Definition(
- CmdOption("bootclasspath", Argument("path")),
- "Override location of bootstrap class files (where to find the " &
- "standard built-in classes, such as \"" & Mono("scala.List") & "\")."),
- Definition(
- CmdOption("extdirs", Argument("dirs")),
- "Override location of installed extensions."),
- Definition(
- CmdOption("d", Argument("directory")),
+ CmdOption("d", Argument("directory|jar")),
"Specify where to place generated class files."),
Definition(
+ CmdOption("deprecation"),
+ SeqPara(
+ "Emit warning and location for usages of deprecated APIs.",
+ "Available since Scala version 2.2.1")),
+ Definition(
CmdOption("encoding", Argument("encoding")),
SeqPara(
"Specify character encoding used by source files.",
@@ -122,77 +103,167 @@ object scalac extends Command {
MBold(" scala> ") &
Mono("new java.io.InputStreamReader(System.in).getEncoding"))),
Definition(
- CmdOption("target:", Argument("target")),
+ CmdOption("explaintypes"),
+ "Explain type errors in more detail."),
+ Definition(
+ CmdOption("extdirs", Argument("dirs")),
+ "Override location of installed extensions."),
+ Definition(
+ CmdOptionBound("g:", "{none,source,line,vars,notailcalls}"),
+ SeqPara(
+ Mono("\"none\"") & " generates no debugging info,",
+ Mono("\"source\"") & " generates only the source file attribute,",
+ Mono("\"line\"") & " generates source and line number information,",
+ Mono("\"vars\"") & " generates source, line number and local " &
+ "variable information,",
+ Mono("\"notailcalls\"") & " generates all of the above and " &
+ Italic("will not") & " perform tail call optimization.")),
+ Definition(
+ CmdOption("help"),
+ "Print a synopsis of standard options."),
+ Definition(
+ CmdOption("javabootclasspath", Argument("path")),
+ "Override Java boot classpath."),
+ Definition(
+ CmdOption("javaextdirs", Argument("path")),
+ "Override Java extdirs classpath."),
+ Definition(
+ CmdOption("no-specialization"),
+ "Ignore " & MItalic("@specialize") & " annotations."),
+ Definition(
+ CmdOption("nobootcp"),
+ "Do not use the boot classpath for the Scala jar files."),
+ Definition(
+ CmdOption("nowarn"),
+ "Generate no warnings"),
+ Definition(
+ CmdOption("optimise"),
+ "Generates faster bytecode by applying optimisations to the program."),
+ Definition(
+ CmdOption("print"),
+ "Print program with all Scala-specific features removed."),
+ Definition(
+ CmdOption("sourcepath", Argument("path")),
+ "Specify location(s) of source files."),
+ Definition(
+ CmdOptionBound("target:", Argument("target")),
SeqPara(
"Specify which backend to use (" & Mono("jvm-1.5," &
"msil") & ").",
"The default value is " & Mono("\"jvm-1.5\"") & " (was " &
Mono("\"jvm-1.4\"") & " up to Scala version 2.6.1).")),
Definition(
- CmdOption("print"),
- "Print program with all Scala-specific features removed"
- ),
- Definition(
- CmdOption("optimise"),
- "Generates faster bytecode by applying optimisations to the program"
- ),
+ CmdOption("toolcp", Argument("path")),
+ "Add to the runner classpath."),
Definition(
- CmdOption("explaintypes"),
- "Explain type errors in more detail."),
+ CmdOption("unchecked"),
+ SeqPara(
+ "Enable detailed unchecked (erasure) warnings",
+ "Non variable type-arguments in type patterns are unchecked " &
+ "since they are eliminated by erasure",
+ "Available since Scala version 2.3.0")),
Definition(
CmdOption("uniqid"),
- "Print identifiers with unique names (debugging option)."),
+ "Uniquely tag all identifiers in debugging output."),
+ Definition(
+ CmdOption("verbose"),
+ "Output messages about what the compiler is doing"),
Definition(
CmdOption("version"),
"Print product version and exit."),
Definition(
- /*CmdOption("?") & "| " &*/ CmdOption("help"),
- "Print a synopsis of standard options."))),
+ Mono(Bold("@") & Argument("file")),
+ "A text file containing compiler arguments (options and source files)")
+
+ // TODO - Add macros an dsuch here.
+ )
+ ),
Section("Advanced Options",
DefinitionList(
Definition(
- CmdOption("Xassem", Argument("file")),
- "Name of the output assembly (only relevant with -target:msil)"),
+ CmdOption("Xassem-extdirs", Argument("dirs")),
+ "(Requires " & Mono("-target:msil") &
+ ") List of directories containing assemblies." &
+ " default:" & Mono("lib") & "."),
+ Definition(
+ CmdOption("Xassem-name", Argument("file")),
+ "(Requires " & Mono("-target:msil") &
+ ") Name of the output assembly."),
Definition(
CmdOption("Xassem-path", Argument("path")),
- "List of assemblies referenced by the program (only relevant with -target:msil)"),
+ "(Requires " & Mono("-target:msil") &
+ ") List of assemblies referenced by the program."),
Definition(
CmdOption("Xcheck-null"),
- "Emit warning on selection of nullable reference"),
+ "Warn upon selection of nullable reference"),
+ Definition(
+ CmdOption("Xcheckinit"),
+ "Wrap field accessors to throw an exception on uninitialized access."),
Definition(
CmdOption("Xdisable-assertions"),
"Generate no assertions and assumptions"),
Definition(
+ CmdOption("Xelide-below", Argument("n")),
+ "Calls to " & MItalic("@elidable") &
+ " methods are omitted if method priority is lower than argument."),
+ Definition(
CmdOption("Xexperimental"),
- "enable experimental extensions"),
+ "Enable experimental extensions"),
+ Definition(
+ CmdOption("Xfatal-warnings"),
+ "Fail the compilation if there are any warnings."),
+ Definition(
+ CmdOption("Xfuture"),
+ "Turn on future language features."),
+ Definition(
+ CmdOption("Xgenerate-phase-graph", Argument("file")),
+ "Generate the phase graphs (outputs .dot files) to fileX.dot."),
+ Definition(
+ CmdOption("Xlint"),
+ "Enable recommended additional warnings."),
+ Definition(
+ CmdOption("Xlog-implicits"),
+ "Show more detail on why some implicits are not applicable."),
+ Definition(
+ CmdOption("Xmax-classfile-name", Argument("n")),
+ "Maximum filename length for generated classes."),
+ Definition(
+ CmdOption("Xmigration"),
+ "Warn about constructs whose behavior may have changed between 2.7 and 2.8."),
+ Definition(
+ CmdOption("Xno-forwarders"),
+ "Do not generate static forwarders in mirror classes."),
Definition(
CmdOption("Xno-uescape"),
"Disable handling of " & BSlash & "u unicode escapes"),
Definition(
- CmdOption("Xplug-types"),
- "Parse but ignore annotations in more locations"),
+ CmdOption("Xnojline"),
+ "Do not use JLine for editing."),
Definition(
- CmdOption("Xplugin:", Argument("file")),
+ CmdOptionBound("Xplugin:", Argument("file")),
"Load a plugin from a file"),
Definition(
- CmdOption("Xplugin-disable:", Argument("plugin")),
+ CmdOptionBound("Xplugin-disable:", Argument("plugin")),
"Disable a plugin"),
Definition(
CmdOption("Xplugin-list"),
"Print a synopsis of loaded plugins"),
Definition(
- CmdOption("Xplugin-opt:", Argument("plugin:opt")),
- "Pass an option to a plugin"),
+ CmdOptionBound("Xplugin-require:", Argument("plugin")),
+ "Abort unless the given plugin(s) are available"),
Definition(
- CmdOption("Xplugin-require:", Argument("plugin")),
- "Abort unless a plugin is available"),
+ CmdOption("Xpluginsdir", Argument("path")),
+ "Path to search compiler plugins."),
Definition(
- CmdOption("Xprint:", Argument("phases")),
+ CmdOptionBound("Xprint:", Argument("phases")),
"Print out program after " & Argument("phases") & " (see below)."),
Definition(
+ CmdOption("Xprint-icode"),
+ "Log internal icode to *.icode files."),
+ Definition(
CmdOption("Xprint-pos"),
- "Print tree positions (as offsets)"),
+ "Print tree positions, as offsets."),
Definition(
CmdOption("Xprint-types"),
"Print tree types (debugging option)."),
@@ -204,11 +275,14 @@ object scalac extends Command {
"Compiler stays resident, files to compile are read from standard " &
"input."),
Definition(
+ CmdOption("Xscript", Argument("object")),
+ "Treat the source file as a script and wrap it in a main method."),
+ Definition(
CmdOption("Xshow-class", Argument("class")),
- "Show class info."),
+ "Show internal representation of class."),
Definition(
CmdOption("Xshow-object", Argument("object")),
- "Show object info."),
+ "Show internal representation of object."),
Definition(
CmdOption("Xshow-phases"),
"Print a synopsis of compiler phases."),
@@ -216,8 +290,15 @@ object scalac extends Command {
CmdOption("Xsource-reader", Argument("classname")),
"Specify a custom method for reading source files."),
Definition(
- CmdOption("Xscript", Argument("object")),
- "Compile as a script, wrapping the code into object.main().")
+ CmdOption("Xsourcedir", Argument("path")),
+ "(Requires " & Mono("-target:msil") &
+ ") Mirror source folder structure in output directory.."),
+ Definition(
+ CmdOption("Xverify"),
+ "Verify generic signatures in generated bytecode."),
+ Definition(
+ CmdOption("Y"),
+ "Print a synopsis of private options.")
)
),
@@ -345,20 +426,20 @@ object scalac extends Command {
val seeAlso = Section("SEE ALSO",
Link(Bold("fsc") & "(1)", "fsc.html") & ", " &
- Link(Bold("sbaz") & "(1)", "sbaz.html") & ", " &
Link(Bold("scala") & "(1)", "scala.html") & ", " &
Link(Bold("scaladoc") & "(1)", "scaladoc.html") & ", " &
Link(Bold("scalap") & "(1)", "scalap.html"))
def manpage = new Document {
title = command
- date = lastModified // e.g. "June 8, 2006"
- author = "Stephane Micheloud & LAMP"
- version = "0.4"
+ date = "March 2012"
+ author = "Stephane Micheloud"
+ version = "1.0"
sections = List(
name,
synopsis,
parameters,
+ description,
options,
environment,
examples,
diff --git a/src/manual/scala/man1/scaladoc.scala b/src/manual/scala/man1/scaladoc.scala
index 420bb08..1737c5e 100644
--- a/src/manual/scala/man1/scaladoc.scala
+++ b/src/manual/scala/man1/scaladoc.scala
@@ -1,11 +1,14 @@
/* NSC -- new Scala compiler
- * Copyright LAMP/EPFL
+ * Copyright 2005-2013 LAMP/EPFL
* @author Stephane Micheloud
- * @author Gilles Dubochet
*/
package scala.man1
+/**
+ * @author Gilles Dubochet
+ * @version 1.0
+ */
object scaladoc extends Command {
import _root_.scala.tools.docutil.ManPage._
@@ -46,7 +49,7 @@ object scaladoc extends Command {
// tags are defined in class "scala.tools.nsc.doc.DocGenerator"
"The recognised format of comments in source is described in the " & Link("online documentation",
- "http://lampsvn.epfl.ch/trac/scala/wiki/Scaladoc"))
+ "https://wiki.scala-lang.org/display/SW/Scaladoc"))
val options = Section("OPTIONS",
@@ -72,7 +75,10 @@ object scaladoc extends Command {
"Define the overall version number of the documentation, typically the version of the library being documented."),
Definition(
CmdOption("doc-source-url", Argument("url")),
- "Define a URL to be concatenated with source locations for link to source files."))),
+ "Define a URL to be concatenated with source locations for link to source files."),
+ Definition(
+ CmdOption("doc-external-doc", Argument("external-doc")),
+ "Define a comma-separated list of classpath_entry_path#doc_URL pairs describing external dependencies."))),
Section("Compiler Options",
DefinitionList(
@@ -122,26 +128,25 @@ object scaladoc extends Command {
val exitStatus = Section("EXIT STATUS",
- MBold(command) & " returns a zero exist status if it succeeds to process " &
+ MBold(command) & " returns a zero exit status if it succeeds at processing " &
"the specified input files. Non zero is returned in case of failure.")
override val authors = Section("AUTHORS",
"This version of Scaladoc was written by Gilles Dubochet with contributions by Pedro Furlanetto and Johannes Rudolph. " &
- "It is based on the original Scaladoc (Sean McDirmid, Geoffrey Washburn, Vincent Cremet and Stéphane Michleoud), " &
+ "It is based on the original Scaladoc (Sean McDirmid, Geoffrey Washburn, Vincent Cremet and Stéphane Micheloud), " &
"on vScaladoc (David Bernard), as well as on an unreleased version of Scaladoc 2 (Manohar Jonnalagedda).")
val seeAlso = Section("SEE ALSO",
Link(Bold("fsc") & "(1)", "fsc.html") & ", " &
- Link(Bold("sbaz") & "(1)", "sbaz.html") & ", " &
Link(Bold("scala") & "(1)", "scala.html") & ", " &
Link(Bold("scalac") & "(1)", "scalac.html") & ", " &
Link(Bold("scalap") & "(1)", "scalap.html"))
def manpage = new Document {
title = command
- date = "2 June 2010"
+ date = "June 2010"
author = "Gilles Dubochet"
version = "2.0"
sections = List(
diff --git a/src/manual/scala/man1/scalap.scala b/src/manual/scala/man1/scalap.scala
index a77e49c..472b522 100644
--- a/src/manual/scala/man1/scalap.scala
+++ b/src/manual/scala/man1/scalap.scala
@@ -1,10 +1,14 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
+ * Copyright 2005-2013 LAMP/EPFL
* @author Stephane Micheloud
*/
package scala.man1
+/**
+ * @author Stephane Micheloud
+ * @version 1.0
+ */
object scalap extends Command {
import _root_.scala.tools.docutil.ManPage._
@@ -82,16 +86,15 @@ object scalap extends Command {
val seeAlso = Section("SEE ALSO",
Link(Bold("fsc") & "(1)", "fsc.html") & ", " &
- Link(Bold("sbaz") & "(1)", "sbaz.html") & ", " &
Link(Bold("scala") & "(1)", "scala.html") & ", " &
Link(Bold("scalac") & "(1)", "scalac.html") & ", " &
Link(Bold("scaladoc") & "(1)", "scaladoc.html"))
def manpage = new Document {
title = command
- date = "June 8, 2006"
+ date = "June 2006"
author = "Stephane Micheloud"
- version = "0.2"
+ version = "1.0"
sections = List(
name,
synopsis,
diff --git a/src/manual/scala/tools/docutil/EmitHtml.scala b/src/manual/scala/tools/docutil/EmitHtml.scala
index 394e32c..731123c 100644
--- a/src/manual/scala/tools/docutil/EmitHtml.scala
+++ b/src/manual/scala/tools/docutil/EmitHtml.scala
@@ -1,5 +1,5 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
+ * Copyright 2005-2013 LAMP/EPFL
* @author Stephane Micheloud
* Adapted from Lex Spoon's sbaz manual
*/
@@ -20,110 +20,110 @@ object EmitHtml {
/* */
def emitSection(section: Section, depth: Int) {
def emitPara(text: AbstractText) {
- out.println("<div>")
+ out println "<div>"
emitText(text)
- out.println("\n</div>")
+ out println "\n</div>"
}
def emitText(text: AbstractText) {
text match {
case seq:SeqText =>
- seq.components.foreach(emitText)
+ seq.components foreach emitText
case seq:SeqPara =>
- seq.components.foreach(emitPara)
+ seq.components foreach emitPara
case Text(text) =>
- out.print(escape(text))
+ out print escape(text)
case BSlash =>
- out.print("\\")
+ out print "\\"
case MDash =>
- out.print("—")
+ out print "—"
case NDash =>
- out.print("–")
+ out print "–"
case Bold(text) =>
- out.print("<b>")
+ out print "<b>"
emitText(text)
- out.print("</b>")
+ out print "</b>"
case Italic(text) =>
- out.print("<i>")
+ out print "<i>"
emitText(text)
- out.print("</i>")
+ out print "</i>"
case Emph(text) =>
- out.print("<em>")
+ out print "<em>"
emitText(text)
- out.print("</em>")
+ out print "</em>"
case Mono(text) =>
- out.print("<code>")
+ out print "<code>"
emitText(text)
- out.print("</code>")
+ out print "</code>"
case Quote(text) =>
- out.print("\"")
+ out print "\""
emitText(text)
- out.print("\"")
+ out print "\""
case DefinitionList(definitions @ _*) =>
- out.println("<ins><dl>")
+ out println "<ins><dl>"
for (d <- definitions) {
- out.println("<dt>")
+ out println "<dt>"
emitText(d.term)
- out.println("\n</dt>")
- out.println("<dd>")
+ out println "\n</dt>"
+ out println "<dd>"
emitText(d.description)
- out.println("</dd>")
+ out println "</dd>"
}
- out.println("</dl></ins>")
+ out println "</dl></ins>"
case Link(label, url) =>
out.print("<a href=\"" + url + "\">")
emitText(label)
- out.print("</a>")
+ out print "</a>"
case _ =>
- error("unknown text node: " + text)
+ sys.error("unknown text node: " + text)
}
}
def emitParagraph(para: Paragraph) {
para match {
case TextParagraph(text) =>
- out.println("<p>")
+ out println "<p>"
emitText(text)
- out.println("</p>")
+ out println "</p>"
case BlockQuote(text) =>
- out.println("<blockquote><p>")
+ out println "<blockquote><p>"
emitText(text)
- out.println("</p></blockquote>")
+ out println "</p></blockquote>"
case CodeSample(text) =>
- out.print("<pre>")
- out.print(escape(text))
- out.println("</pre>")
+ out print "<pre>"
+ out print escape(text)
+ out println "</pre>"
case lst:BulletList =>
- out.println("<ul>")
+ out println "<ul>"
for (item <- lst.items) {
- out.print("<li>")
+ out print "<li>"
emitText(item)
- out.println("</li>")
+ out println "</li>"
}
- out.println("</ul>")
+ out println "</ul>"
case lst:NumberedList =>
- out.println("<ol>")
+ out println "<ol>"
for (item <- lst.items) {
- out.print("<li>")
+ out print "<li>"
emitText(item)
}
- out.println("</ol>")
+ out println "</ol>"
case TitledPara(title, text) =>
out.println("<p><strong>" + escape(title) + "</strong></p>")
@@ -133,7 +133,7 @@ object EmitHtml {
emitSection(sect, depth + 1)
case _ =>
- error("unknown paragraph node: " + para)
+ sys.error("unknown paragraph node: " + para)
}
}
@@ -141,33 +141,33 @@ object EmitHtml {
out.println("\n<h" + depth + " id=\"" + name + "\">" +
section.title +
"</h" + depth + ">")
- section.paragraphs.foreach(emitParagraph)
+ section.paragraphs foreach emitParagraph
}
private def emit3columns(col1: String, col2: String, col3: String) {
- out.println("<div style=\"float:left;\">")
- out.println(col1)
- out.println("</div>")
- out.println("<div style=\"float:right;\">")
- out.println(col3)
- out.println("</div>")
- out.println("<div style=\"text-align:center;\">")
- out.println(col2)
- out.println("</div>")
+ out println "<div style=\"float:left;\">"
+ out println col1
+ out println "</div>"
+ out println "<div style=\"float:right;\">"
+ out println col3
+ out println "</div>"
+ out println "<div style=\"text-align:center;\">"
+ out println col2
+ out println "</div>"
}
private def emitHeader(col1: String, col2: String, col3: String) {
- out.println("<!-- header -->")
- out.println("<div style=\"margin: 0 0 2em 0;\">")
+ out println "<!-- header -->"
+ out println "<div style=\"margin: 0 0 2em 0;\">"
emit3columns(col1, col2, col3)
- out.println("</div>")
+ out println "</div>"
}
private def emitFooter(col1: String, col2: String, col3: String) {
- out.println("<!-- footer -->")
- out.println("<div style=\"margin: 2em 0 0 0;\">")
+ out println "<!-- footer -->"
+ out println "<div style=\"margin: 2em 0 0 0;\">"
emit3columns(col1, col2, col3)
- out.println("</div>")
+ out println "</div>"
}
def emitDocument(document: Document) {
@@ -175,190 +175,52 @@ object EmitHtml {
out.println("<!DOCTYPE html PUBLIC \"-//W3C//DTD XHTML 1.1//EN\" \"http://www.w3.org/TR/xhtml11/DTD/xhtml11.dtd\">")
out.println("<html xmlns=\"http://www.w3.org/1999/xhtml\" xml:lang=\"en\" lang=\"en\">\n")
- out.println("<head>")
+ out println "<head>"
out.println("<title>" + document.title + " man page</title>")
out.println("<meta http-equiv=\"Content-Language\" content=\"en\"/>")
out.println("<meta http-equiv=\"Content-Type\" content=\"text/html; charset=" +
document.encoding + "\"/>")
out.println("<meta name=\"Author\" content=\"" + document.author + "\"/>")
- out.println("<style type=\"text/css\">")
- out.println(" <!--")
- out.println(" blockquote, pre { margin:1em 4em 1em 4em; }")
- out.println(" dt { margin: 0.6em 0 0 0; }")
- out.println(" p { margin:0.6em 2em 0.6em 2em; text-align:justify; }")
- out.println(" //-->")
- out.println("</style>")
- out.println("</head>\n")
-
- out.println("<body>")
+ out println "<style type=\"text/css\">"
+ out println " <!--"
+ out println " blockquote, pre { margin:1em 4em 1em 4em; }"
+ out println " dt { margin: 0.6em 0 0 0; }"
+ out println " p { margin:0.6em 2em 0.6em 2em; text-align:justify; }"
+ out println " //-->"
+ out println "</style>"
+ out println "</head>\n"
+
+ out println "<body>"
val name = document.title + "(" + document.category.id + ")"
emitHeader(name, "" + document.category, name)
- document.sections.foreach(s => emitSection(s, 3))
+ document.sections foreach (s => emitSection(s, 3))
emitFooter("version " + document.version, document.date, name)
- out.println("</body>")
- out.println("</html>")
+ out println "</body>"
+ out println "</html>"
}
-/* */
-/*
- private def group(ns: Iterable[NodeSeq]): NodeSeq = {
- val zs = new NodeBuffer
- for (z <- ns) { zs &+ z }
- zs
- }
-
- def emitSection(section: Section, depth: int): NodeSeq = {
- def emitText(text: AbstractText): NodeSeq = text match {
- case seq:SeqText =>
- group(seq.components.toList.map(item => emitText(item)))
-
- case Text(text) =>
- scala.xml.Text(escape(text))
-
- case MDash =>
- scala.xml.Text("—")
-
- case NDash =>
- scala.xml.Text("–")
-
- case Bold(text) =>
- <b>{emitText(text)}</b>
-
- case Italic(text) =>
- <i>{emitText(text)}</i>
-
- case Emph(text) =>
- <em>{emitText(text)}</em>
-
- case Mono(text) =>
- <code>{emitText(text)}</code>
-
- case Quote(text) =>
- emitText("\"" & text & "\"")
-
- case DefinitionList(definitions @ _*) =>
- <ins><dl>
- {definitions.toList.map(d =>
- <dt>{emitText(d.term)}</dt>
- <dd>{emitText(d.description)}</dd>
- )}
- </dl></ins>
-
- case Link(label, url) =>
- <a href={url}>{emitText(label)}</a>
-
- case _ =>
- error("unknown text node " + text)
- }
-
- def emitParagraph(para: Paragraph): NodeSeq = para match {
- case TextParagraph(text) =>
- <p>{emitText(text)}</p>
- case BlockQuote(text) =>
- <blockquote>{emitText(text)}</blockquote>
-
- case CodeSample(text) =>
- <blockquote><pre>{escape(text)}</pre></blockquote>
-
- case lst:BulletList =>
- <ul>
- {lst.items.toList.map(item => <li>{emitText(item)}</li>)}
- </ul>
-
- case lst:NumberedList =>
- <ol>
- {lst.items.toList.map(item => <li>{emitText(item)}</li>)}
- </ol>
-
- case TitledPara(title, text) =>
- <p><strong>{escape(title)}</strong></p>
- {emitText(text)}
-
- case EmbeddedSection(sect) =>
- {emitSection(sect, depth + 1)}
-
- case _ =>
- error("unknown paragraph node " + para)
- }
-
- val name = section.title.replaceAll("\\p{Space}", "_").toLowerCase()
- <h3 id={name}>{section.title}</h3>.concat(
- group(section.paragraphs.toList.map(p => emitParagraph(p))))
- }
-
- private def emit3columns(col1: String, col2: String, col3: String): NodeSeq =
- <div style="float:left;">{col1}</div>
- <div style="float:right;">{col3}</div>
- <div style="text-align:center;">{col2}</div>
- <div style="clear:both;"></div>
-
- private def emitHeader(col1: String, col2: String, col3: String): NodeSeq =
- <div style="margin: 0 0 2em 0;">
- {emit3columns(col1, col2, col3)}
- </div>
-
- private def emitFooter(col1: String, col2: String, col3: String): NodeSeq = {
- scala.xml.Comment("footer")
- <div style="margin: 2em 0 0 0;">
- {emit3columns(col1, col2, col3)}
- </div>
+ def main(args: Array[String]) = args match{
+ case Array(classname) => emitHtml(classname)
+ case Array(classname, file, _*) => emitHtml(classname, new java.io.FileOutputStream(file))
+ case _ => sys.exit(1)
}
- def emitDocument(document: Document, addDocType: Boolean) = {
- val name = document.title + "(" + document.category.id + ")"
- val doc =
- <html xml:lang="en">
- <head>
- <title>{document.title}</title>
- <meta http-equiv="Content-Language" content="en"/>
- <meta http-equiv="Content-Type" content={"text/html; charset=" + document.encoding}/>
- <meta name="Author" content={document.author}/>
- <style type="text/css">
- {" blockquote, pre { margin:1em 4em 1em 4em; }\n" +
- " p { margin:1em 2em 1em 2em; text-align:justify; }\n"}
- </style>
- </head>
- <body>
- {emitHeader(name, "" + document.category, name)}
- {document.sections.map(s => emitSection(s, 2))}
- {emitFooter("version " + document.version, document.date, name)}
- </body>
- </html>
- out.println(doc)
-/*
- val w = new java.io.StringWriter
- val id = scala.xml.dtd.PublicID("PUBLIC", null)
- val dtd = null //scala.xml.dtd.DEFAULT(true, "")
- val doctype = scala.xml.dtd.DocType("html", id, null) //List(dtd))
- XML.write(w, doc, document.encoding, true/ *xmlDecl* /, doctype)
- out.println(w.toString())
-*/
- }
-*/
- def main(args: Array[String]) {
- if (args.length < 1) {
- System.err.println("usage: EmitHtml <classname>")
- exit(1)
- }
+ def emitHtml(classname: String, outStream: java.io.OutputStream = out.out) {
+ if(outStream != out.out) out setOut outStream
try {
val cl = this.getClass.getClassLoader()
- val clasz = cl.loadClass(args(0))
- val meth = clasz.getDeclaredMethod("manpage")
+ val clasz = cl loadClass classname
+ val meth = clasz getDeclaredMethod "manpage"
val doc = meth.invoke(null).asInstanceOf[Document]
emitDocument(doc)
} catch {
case ex: Exception =>
ex.printStackTrace()
- System.err.println("Error in EmitHtml")
- exit(1)
+ System.err println "Error in EmitManPage"
+ sys.exit(1)
}
}
-
- def emitHtml(classname: String, outStream: java.io.OutputStream) {
- out.setOut(outStream)
- main(Array(classname))
- }
}
diff --git a/src/manual/scala/tools/docutil/EmitManPage.scala b/src/manual/scala/tools/docutil/EmitManPage.scala
index bab6852..c30e847 100644
--- a/src/manual/scala/tools/docutil/EmitManPage.scala
+++ b/src/manual/scala/tools/docutil/EmitManPage.scala
@@ -1,5 +1,5 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
+ * Copyright 2005-2013 LAMP/EPFL
* @author Stephane Micheloud
* Adapted from Lex Spoon's sbaz manual
*/
@@ -21,34 +21,34 @@ object EmitManPage {
def emitSection(section: Section, depth: Int) {
def emitPara(text: AbstractText) {
emitText(text)
- out.println("\n.IP")
+ out println "\n.IP"
}
def emitText(text: AbstractText) {
text match {
case seq:SeqText =>
- seq.components.foreach(emitText)
+ seq.components foreach emitText
case seq:SeqPara =>
- seq.components.foreach(emitPara)
+ seq.components foreach emitPara
case Text(text) =>
- out.print(escape(text))
+ out print escape(text)
case BSlash =>
- out.print("\\e")
+ out print "\\e"
case NDash | MDash =>
- out.print("\\-")
+ out print "\\-"
case Bold(text) =>
- out.print("\\fB")
+ out print "\\fB"
emitText(text)
- out.print("\\fR")
+ out print "\\fR"
case Italic(text) =>
- out.print("\\fI")
+ out print "\\fI"
emitText(text)
- out.print("\\fR")
+ out print "\\fR"
case Emph(text) =>
out.print("\\fI")
@@ -68,7 +68,7 @@ object EmitManPage {
case DefinitionList(definitions @ _*) =>
var n = definitions.length
for (d <- definitions) {
- out.println(".TP")
+ out println ".TP"
emitText(d.term)
out.println
emitText(d.description)
@@ -79,30 +79,30 @@ object EmitManPage {
emitText(label)
case _ =>
- error("unknown text node: " + text)
+ sys.error("unknown text node: " + text)
}
}
def emitParagraph(para: Paragraph) {
para match {
case TextParagraph(text) =>
- out.println(".PP")
+ out println ".PP"
emitText(text)
out.println
case BlockQuote(text) =>
- out.println(".TP")
+ out println ".TP"
emitText(text)
out.println
case CodeSample(text) =>
- out.println("\n.nf")
+ out println "\n.nf"
out.print(text)
- out.println("\n.fi")
+ out println "\n.fi"
case lst:BulletList =>
for (item <- lst.items) {
- out.println(".IP")
+ out println ".IP"
emitText(item)
out.println
}
@@ -118,68 +118,70 @@ object EmitManPage {
}
case TitledPara(title, text) =>
- out.println(".PP")
- out.print("\\fB")
+ out println ".PP"
+ out print "\\fB"
emitText(title)
- out.print("\\fR")
+ out print "\\fR"
emitText(text)
case EmbeddedSection(sect) =>
emitSection(sect, depth + 1)
case _ =>
- error("unknown paragraph node: " + para)
+ sys.error("unknown paragraph node: " + para)
}
}
- out.println(".\\\"")
+ out println ".\\\""
out.println(".\\\" ############################## " + section.title + " ###############################")
- out.println(".\\\"")
+ out println ".\\\""
val tag = if (depth > 1) ".SS" else ".SH"
val title =
if (section.title.indexOf(" ") > 0) "\"" + section.title + "\""
else section.title
out.println(tag + " " + title)
- section.paragraphs.foreach(emitParagraph)
+ section.paragraphs foreach emitParagraph
}
def emitDocument(doc: Document) {
- out.println(".\\\" ##########################################################################")
- out.println(".\\\" # __ #")
- out.println(".\\\" # ________ ___ / / ___ Scala 2 On-line Manual Pages #")
- out.println(".\\\" # / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL #")
- out.println(".\\\" # __\\ \\/ /__/ __ |/ /__/ __ | #")
- out.println(".\\\" # /____/\\___/_/ |_/____/_/ | | http://scala-lang.org/ #")
- out.println(".\\\" # |/ #")
- out.println(".\\\" ##########################################################################")
- out.println(".\\\"")
- out.println(".\\\" Process this file with nroff -man scala.1")
- out.println(".\\\"")
+ out println ".\\\" ##########################################################################"
+ out println ".\\\" # __ #"
+ out println ".\\\" # ________ ___ / / ___ Scala 2 On-line Manual Pages #"
+ out println ".\\\" # / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL #"
+ out println ".\\\" # __\\ \\/ /__/ __ |/ /__/ __ | #"
+ out println ".\\\" # /____/\\___/_/ |_/____/_/ | | http://scala-lang.org/ #"
+ out println ".\\\" # |/ #"
+ out println ".\\\" ##########################################################################"
+ out println ".\\\""
+ out println ".\\\" Process this file with nroff -man scala.1"
+ out println ".\\\""
out.println(".TH " + doc.title + " " + doc.category.id +
" \"" + doc.date + "\" \"version " + doc.version +
"\" \"" + doc.category + "\"")
- doc.sections.foreach(s => emitSection(s, 1))
+ doc.sections foreach (s => emitSection(s, 1))
+ }
+
+ def main(args: Array[String]) = args match{
+ case Array(classname) => emitManPage(classname)
+ case Array(classname, file, _*) => emitManPage(classname, new java.io.FileOutputStream(file))
+ case _ => sys.exit(1)
}
- def main(args: Array[String]) {
+ def emitManPage(classname: String, outStream: java.io.OutputStream = out.out) {
+ if(outStream != out.out) out setOut outStream
try {
val cl = this.getClass.getClassLoader()
- val clasz = cl.loadClass(args(0))
- val meth = clasz.getDeclaredMethod("manpage")
+ val clasz = cl loadClass classname
+ val meth = clasz getDeclaredMethod "manpage"
val doc = meth.invoke(null).asInstanceOf[Document]
emitDocument(doc)
} catch {
case ex: Exception =>
ex.printStackTrace()
- System.err.println("Error in EmitManPage")
- exit(1)
+ System.err println "Error in EmitManPage"
+ sys.exit(1)
}
}
-
- def emitManPage(classname: String, outStream: java.io.OutputStream) {
- out.setOut(outStream)
- main(Array(classname))
- }
}
diff --git a/src/manual/scala/tools/docutil/ManMaker.scala b/src/manual/scala/tools/docutil/ManMaker.scala
index b947e3d..47b861a 100644
--- a/src/manual/scala/tools/docutil/ManMaker.scala
+++ b/src/manual/scala/tools/docutil/ManMaker.scala
@@ -1,3 +1,9 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2013 LAMP/EPFL
+ * @author Stephane Micheloud
+ * Adapted from Lex Spoon's sbaz manual
+ */
+
package scala.tools.docutil
import org.apache.tools.ant.Task
@@ -32,9 +38,9 @@ class ManMaker extends Task {
}
override def execute() {
- if (command.isEmpty) error("Attribute 'command' is not set.")
- if (htmlout.isEmpty) error("Attribute 'htmlout' is not set.")
- if (manout.isEmpty) error("Attribute 'manout' is not set.")
+ if (command.isEmpty) sys.error("Attribute 'command' is not set.")
+ if (htmlout.isEmpty) sys.error("Attribute 'htmlout' is not set.")
+ if (manout.isEmpty) sys.error("Attribute 'manout' is not set.")
command foreach (cmd => {
val classname = "scala.man1."+ cmd
diff --git a/src/manual/scala/tools/docutil/ManPage.scala b/src/manual/scala/tools/docutil/ManPage.scala
index ff7ac1b..2c5d696 100644
--- a/src/manual/scala/tools/docutil/ManPage.scala
+++ b/src/manual/scala/tools/docutil/ManPage.scala
@@ -1,5 +1,5 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
+ * Copyright 2005-2013 LAMP/EPFL
* @author Stephane Micheloud
* Adapted from Lex Spoon's sbaz manual
*/
diff --git a/src/manual/scala/tools/docutil/resources/css/style.css b/src/manual/scala/tools/docutil/resources/css/style.css
index 3072be4..6276829 100644
--- a/src/manual/scala/tools/docutil/resources/css/style.css
+++ b/src/manual/scala/tools/docutil/resources/css/style.css
@@ -61,6 +61,19 @@ span.tool {
font-weight: bold;
}
-th {
+table.basic {
+ width: 100%;
+}
+
+table.basic td {
+ margin: 0;
+ padding: 2px;
+}
+
+table.basic th {
+ text-align: left;
+}
+
+table.basic th.links, td.links {
white-space: nowrap;
}
diff --git a/src/manual/scala/tools/docutil/resources/index.html b/src/manual/scala/tools/docutil/resources/index.html
index 67913e7..aaef94d 100644
--- a/src/manual/scala/tools/docutil/resources/index.html
+++ b/src/manual/scala/tools/docutil/resources/index.html
@@ -8,7 +8,7 @@
<meta http-equiv="Content-Style-Type" content="text/css"/>
<meta http-equiv="Content-Language" content="en"/>
<meta http-equiv="Content-Type" content="text/html; charset=iso-8859-1"/>
- <meta name="Copyright" content="(C) 2002-2011 LAMP/EPFL"/>
+ <meta name="Copyright" content="(C) 2002-2012 LAMP/EPFL"/>
<meta name="Language" content="en"/>
<meta name="Description" content="The Scala Programming Language"/>
<meta name="Author" content="Stephane Micheloud"/>
@@ -44,8 +44,8 @@
<ul class="ContentList">
<li>
- <a href="#basic"><b class="SansSerif">Basic Tools</b></a> (<code>sbaz</code>,
- <code>fsc</code>, <code>scala</code>, <code>scalac</code>, <code>scaladoc</code>,
+ <a href="#basic"><b class="SansSerif">Basic Tools</b></a> (<code>fsc</code>,
+ <code>scala</code>, <code>scalac</code>, <code>scaladoc</code>,
<code>scalap</code>)
</li>
</ul>
@@ -114,11 +114,11 @@
use to create and build applications.
</p>
-<table cellspacing="0" cellpadding="2" style="width:100%;">
+<table class="basic">
<tr>
<th>Tool Name</th>
<th>Brief Description</th>
- <th>Links to Reference Pages</th>
+ <th class="links">Links to Reference Pages</th>
</tr>
<tr>
<td width="13%" valign="top">
@@ -127,29 +127,18 @@
<td width="70%" valign="top">
The fast Scala compiler.
</td>
- <td width="17%" valign="top">
+ <td width="17%" valign="top" class="links">
[<a href="fsc.html">Solaris, Linux and Windows</a>]
</td>
</tr>
<tr>
<td width="13%" valign="top">
- <span class="tool">sbaz</span>
- </td>
- <td width="70%" valign="top">
- The Scala sharing tool.
- </td>
- <td width="17%" valign="top">
- [<a href="sbaz.html">Solaris, Linux and Windows</a>]
- </td>
- </tr>
- <tr>
- <td width="13%" valign="top">
<span class="tool">scala</span>
</td>
<td width="70%" valign="top">
Run Scala code.
</td>
- <td width="17%" valign="top">
+ <td width="17%" valign="top" class="links">
[<a href="scala.html">Solaris, Linux and Windows</a>]
</td>
</tr>
@@ -160,7 +149,7 @@
<td width="70%" valign="top">
Compile Scala code ahead of time.
</td>
- <td width="17%" valign="top">
+ <td width="17%" valign="top" class="links">
[<a href="scalac.html">Solaris, Linux and Windows</a>]
</td>
</tr>
@@ -171,7 +160,7 @@
<td width="70%" valign="top">
The API document generator.
</td>
- <td width="17%" valign="top">
+ <td width="17%" valign="top" class="links">
[<a href="scaladoc.html">Solaris, Linux and Windows</a>]
</td>
</tr>
@@ -182,7 +171,7 @@
<td width="70%" valign="top">
The Scala class file decoder.
</td>
- <td width="17%" valign="top">
+ <td width="17%" valign="top" class="links">
[<a href="scalap.html">Solaris, Linux and Windows</a>]
</td>
</tr>
@@ -191,7 +180,7 @@
<hr/>
<div style="font-size:x-small;">
- Copyright (c) 2002-2011 <a href="http://www.epfl.ch/">EPFL</a>,
+ Copyright (c) 2002-2012 <a href="http://www.epfl.ch/">EPFL</a>,
Lausanne, unless specified otherwise.<br/>
All rights reserved.
</div>
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/emit/ILGenerator.scala b/src/msil/ch/epfl/lamp/compiler/msil/emit/ILGenerator.scala
index 2223a6d..2aa9a99 100644
--- a/src/msil/ch/epfl/lamp/compiler/msil/emit/ILGenerator.scala
+++ b/src/msil/ch/epfl/lamp/compiler/msil/emit/ILGenerator.scala
@@ -336,7 +336,6 @@ import ILGenerator._
emitSpecialLabel(Label.Try)
val endExc: Label = new Label.NormalLabel() // new Label(lastLabel) ???
excStack.push(Label.Try, endExc)
- return endExc
}
/** Begins a catch block. */
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/emit/ILPrinterVisitor.scala b/src/msil/ch/epfl/lamp/compiler/msil/emit/ILPrinterVisitor.scala
index d3a5719..0ed5e3f 100644
--- a/src/msil/ch/epfl/lamp/compiler/msil/emit/ILPrinterVisitor.scala
+++ b/src/msil/ch/epfl/lamp/compiler/msil/emit/ILPrinterVisitor.scala
@@ -726,7 +726,7 @@ abstract class ILPrinterVisitor extends Visitor {
val ta = ct.typeArgs(i)
val sigOpt = primitive.get(ta)
if (sigOpt.isDefined) print(sigOpt.get)
- else printTypeName(ta); /* should be printSignature, but don't want `class' or `valuetype'
+ else printTypeName(ta); /* should be printSignature, but don't want `class` or `valuetype`
appearing before a type param usage. */
i = i + 1;
if (i < ct.typeArgs.length) {
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/emit/MultipleFilesILPrinterVisitor.scala b/src/msil/ch/epfl/lamp/compiler/msil/emit/MultipleFilesILPrinterVisitor.scala
index 43333ef..55c5210 100644
--- a/src/msil/ch/epfl/lamp/compiler/msil/emit/MultipleFilesILPrinterVisitor.scala
+++ b/src/msil/ch/epfl/lamp/compiler/msil/emit/MultipleFilesILPrinterVisitor.scala
@@ -43,12 +43,12 @@ final class MultipleFilesILPrinterVisitor(destPath: String, sourceFilesPath: Str
// print each module
var m: Array[Module] = assemblyBuilder.GetModules()
nomembers = true
- for(val i <- 0 until m.length) {
+ for(i <- 0 until m.length) {
print(m(i).asInstanceOf[ModuleBuilder])
}
nomembers = false
- for(val i <- 0 until m.length) {
+ for(i <- 0 until m.length) {
print(m(i).asInstanceOf[ModuleBuilder])
}
ILPrinterVisitor.currAssembly = null
@@ -72,7 +72,7 @@ final class MultipleFilesILPrinterVisitor(destPath: String, sourceFilesPath: Str
// "Types" contain all the classes
var t: Array[Type] = module.GetTypes()
- for(val i <- 0 until t.length) {
+ for(i <- 0 until t.length) {
val tBuilder = t(i).asInstanceOf[TypeBuilder]
val sourceFilename = tBuilder.sourceFilename
val sourceFilepath = new File(tBuilder.sourceFilepath).getCanonicalPath
@@ -124,7 +124,7 @@ final class MultipleFilesILPrinterVisitor(destPath: String, sourceFilesPath: Str
printAttributes(module)
}
- for(val i <- 0 until m.length) {
+ for(i <- 0 until m.length) {
print(m(i).asInstanceOf[MethodBuilder])
}
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/emit/OpCode.scala b/src/msil/ch/epfl/lamp/compiler/msil/emit/OpCode.scala
index fbcdbf8..b0c2688 100644
--- a/src/msil/ch/epfl/lamp/compiler/msil/emit/OpCode.scala
+++ b/src/msil/ch/epfl/lamp/compiler/msil/emit/OpCode.scala
@@ -887,7 +887,7 @@ opcode(Readonly, CEE_READONLY , "readonly." , 0xFFFFFE1E, POP_NONE, PUSH_NONE
opcode(Bne_Un_S, CEE_BNE_UN_S, "bne.un.s", 0xFFFFFF33, POP_1_1 , PUSH_NONE, INLINE_TARGET_S, FLOW_COND_BRANCH)
/**
- * Transfers control to a target instruction (short form) if if the the first value is greather
+ * Transfers control to a target instruction (short form) if the first value is greather
* than the second value, when comparing unsigned integer values or unordered float values.
*/
final val Bge_Un_S = new OpCode()
@@ -973,7 +973,7 @@ opcode(Readonly, CEE_READONLY , "readonly." , 0xFFFFFE1E, POP_NONE, PUSH_NONE
opcode(Bne_Un, CEE_BNE_UN , "bne.un", 0xFFFFFF40, POP_1_1 , PUSH_NONE, INLINE_TARGET, FLOW_COND_BRANCH)
/**
- * Transfers control to a target instruction if the the first value is greather than
+ * Transfers control to a target instruction if the first value is greather than
* the second value, when comparing unsigned integer values or unordered float values.
*/
final val Bge_Un = new OpCode()
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/emit/OpCodes.scala b/src/msil/ch/epfl/lamp/compiler/msil/emit/OpCodes.scala
index d486c31..80e4267 100644
--- a/src/msil/ch/epfl/lamp/compiler/msil/emit/OpCodes.scala
+++ b/src/msil/ch/epfl/lamp/compiler/msil/emit/OpCodes.scala
@@ -307,7 +307,7 @@ object OpCodes {
final val Bne_Un_S = OpCode.Bne_Un_S
/**
- * Transfers control to a target instruction (short form) if if the the first value is greather
+ * Transfers control to a target instruction (short form) if the first value is greather
* than the second value, when comparing unsigned integer values or unordered float values.
*/
final val Bge_Un_S = OpCode.Bge_Un_S
@@ -380,7 +380,7 @@ object OpCodes {
final val Bne_Un = OpCode.Bne_Un
/**
- * Transfers control to a target instruction if the the first value is greather than
+ * Transfers control to a target instruction if the first value is greather than
* the second value, when comparing unsigned integer values or unordered float values.
*/
final val Bge_Un = OpCode.Bge_Un
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/emit/SingleFileILPrinterVisitor.scala b/src/msil/ch/epfl/lamp/compiler/msil/emit/SingleFileILPrinterVisitor.scala
index 0f2e7d7..5d59d4d 100644
--- a/src/msil/ch/epfl/lamp/compiler/msil/emit/SingleFileILPrinterVisitor.scala
+++ b/src/msil/ch/epfl/lamp/compiler/msil/emit/SingleFileILPrinterVisitor.scala
@@ -50,12 +50,12 @@ final class SingleFileILPrinterVisitor(_fileName: String) extends ILPrinterVisit
// print each module
var m: Array[Module] = assemblyBuilder.GetModules()
nomembers = true
- for(val i <- 0 until m.length) {
+ for(i <- 0 until m.length) {
print(m(i).asInstanceOf[ModuleBuilder])
}
nomembers = false
- for(val i <- 0 until m.length) {
+ for(i <- 0 until m.length) {
print(m(i).asInstanceOf[ModuleBuilder])
}
// close out file
@@ -79,12 +79,12 @@ final class SingleFileILPrinterVisitor(_fileName: String) extends ILPrinterVisit
module.CreateGlobalFunctions()
var m: Array[MethodInfo] = module.GetMethods()
- for(val i <- 0 until m.length) {
+ for(i <- 0 until m.length) {
print(m(i).asInstanceOf[MethodBuilder])
}
var t: Array[Type] = module.GetTypes()
- for(val i <- 0 until t.length) {
+ for(i <- 0 until t.length) {
print(t(i).asInstanceOf[TypeBuilder])
}
currentModule = null
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/emit/TypeBuilder.scala b/src/msil/ch/epfl/lamp/compiler/msil/emit/TypeBuilder.scala
index 5126a0c..57dc883 100644
--- a/src/msil/ch/epfl/lamp/compiler/msil/emit/TypeBuilder.scala
+++ b/src/msil/ch/epfl/lamp/compiler/msil/emit/TypeBuilder.scala
@@ -222,7 +222,7 @@ class TypeBuilder (module: Module, attributes: Int, fullName: String, baseType:
object TypeBuilder {
def types2String(types: Array[Type]): String = {
var s = new StringBuffer("(")
- for(val i <- 0 until types.length) {
+ for(i <- 0 until types.length) {
if (i > 0) s.append(", ")
s.append(types(i))
}
@@ -239,7 +239,7 @@ object TypeBuilder {
val p2 = m2.GetParameters()
if (p1.length != p2.length)
return false
- for(val i <- 0 until p1.length)
+ for(i <- 0 until p1.length)
if (p1(i).ParameterType != p2(i).ParameterType)
return false
return true
@@ -252,7 +252,7 @@ object TypeBuilder {
val p2 = c2.GetParameters()
if (p1.length != p2.length)
return false
- for(val i <- 0 until p1.length)
+ for(i <- 0 until p1.length)
if (p1(i).ParameterType != p2(i).ParameterType)
return false
return true
diff --git a/src/partest-alternative/README b/src/partest-alternative/README
deleted file mode 100644
index c7673fe..0000000
--- a/src/partest-alternative/README
+++ /dev/null
@@ -1,50 +0,0 @@
-If you're looking for something to read, I suggest running ../test/partest
-with no arguments, which at this moment prints this:
-
-Usage: partest [<options>] [<test> <test> ...]
- <test>: a path to a test designator, typically a .scala file or a directory.
- Examples: files/pos/test1.scala, files/res/bug785
-
- Test categories:
- --all run all tests (default, unless no options given)
- --pos Compile files that are expected to build
- --neg Compile files that are expected to fail
- --run Test JVM backend
- --jvm Test JVM backend
- --res Run resident compiler scenarii
- --buildmanager Run Build Manager scenarii
- --scalacheck Run Scalacheck tests
- --script Run script files
- --shootout Run shootout tests
- --scalap Run scalap tests
-
- Test "smart" categories:
- --grep run all tests with a source file containing <expr>
- --failed run all tests which failed on the last run
-
- Specifying paths and additional flags, ~ means repository root:
- --rootdir path from ~ to partest (default: test)
- --builddir path from ~ to test build (default: build/pack)
- --srcdir path from --rootdir to sources (default: files)
- --javaopts flags to java on all runs (overrides JAVA_OPTS)
- --scalacopts flags to scalac on all tests (overrides SCALAC_OPTS)
- --pack alias for --builddir build/pack
- --quick alias for --builddir build/quick
-
- Options influencing output:
- --trace show the individual steps taken by each test
- --show-diff show diff between log and check file
- --show-log show log on failures
- --dry-run do not run tests, only show their traces.
- --terse be less verbose (almost silent except for failures)
- --verbose be more verbose (additive with --trace)
- --debug maximum debugging output
- --ansi print output in color
-
- Other options:
- --timeout Timeout in seconds
- --cleanup delete all stale files and dirs before run
- --nocleanup do not delete any logfiles or object dirs
- --stats collect and print statistics about the tests
- --validate examine test filesystem for inconsistencies
- --version print version
diff --git a/src/partest-alternative/scala/tools/partest/Actions.scala b/src/partest-alternative/scala/tools/partest/Actions.scala
deleted file mode 100644
index 9a64ede..0000000
--- a/src/partest-alternative/scala/tools/partest/Actions.scala
+++ /dev/null
@@ -1,189 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala Parallel Testing **
-** / __/ __// _ | / / / _ | (c) 2007-2011, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-package scala.tools
-package partest
-
-import util._
-import nsc.io._
-import scala.sys.process._
-
-trait Actions {
- partest: Universe =>
-
- class TestSequence(val actions: List[TestStep]) extends AbsTestSequence {
- }
-
- implicit def createSequence(xs: List[TestStep]) = new TestSequence(xs)
-
- trait ExecSupport {
- self: TestEntity =>
-
- def execEnv: Map[String, String] = {
- val map = assembleEnvironment()
- val cwd = execCwd.toList map ("CWD" -> _.path)
-
- map ++ cwd
- }
- def execCwd = if (commandFile.isFile) Some(sourcesDir) else None
-
- def runExec(args: List[String]): Boolean = {
- val cmd = fromArgs(args)
-
- if (isVerbose) {
- trace("runExec: " + execEnv.mkString("ENV(", "\n", "\n)"))
- execCwd foreach (x => trace("CWD(" + x + ")"))
- }
-
- trace("runExec: " + cmd)
- isDryRun || execAndLog(cmd)
- }
-
- /** Exec a process to run a command. Assumes 0 exit value is success.
- * Of necessity, also treats no available exit value as success.
- */
- protected def execAndLog(cmd: String) = (cmd #> logFile.jfile !) == 0
- }
-
- trait ScriptableTest {
- self: TestEntity =>
-
- /** Translates a line from a .cmds file into a teststep.
- */
- def customTestStep(line: String): TestStep = {
- trace("customTestStep: " + line)
- val (cmd, rest) = line span (x => !Character.isWhitespace(x))
- def qualify(name: String) = sourcesDir / name path
- val args = toArgs(rest) map qualify
- def fail: TestStep = (_: TestEntity) => error("Parse error: did not understand '%s'" format line)
-
- val f: TestEntity => Boolean = cmd match {
- case "scalac" => _ scalac args
- case "javac" => _ javac args
- case "scala" => _ runScala args
- case _ => fail
- }
- f
- }
- }
-
- trait CompilableTest extends CompileExecSupport {
- self: TestEntity =>
-
- def sourceFiles = location.walk collect { case f: File if isJavaOrScala(f) => f } toList
- def allSources = sourceFiles map (_.path)
- def scalaSources = sourceFiles filter isScala map (_.path)
- def javaSources = sourceFiles filter isJava map (_.path)
-
- /** If there are mixed java and scala files, the standard compilation
- * sequence is:
- *
- * scalac with all files
- * javac with only java files
- * scalac with only scala files
- *
- * This should be expanded to encompass other strategies so we know how
- * well they're working or not working - notably, it would be very useful
- * to know exactly when and how two-pass compilation fails.
- */
- def compile() = {
- trace("compile: " + sourceFiles)
-
- def compileJava() = javac(javaSources)
- def compileScala() = scalac(scalaSources)
- def compileAll() = scalac(allSources)
- def compileMixed() = compileAll() && compileJava() && compileScala()
-
- if (scalaSources.nonEmpty && javaSources.nonEmpty) compileMixed()
- else compileScala()
- }
- }
-
- trait DiffableTest {
- self: TestEntity =>
-
- def checkFile: File = withExtension("check").toFile
- def checkFileRequired =
- returning(checkFile.isFile)(res => if (!res) warnAndLog("A checkFile at '%s' is mandatory.\n" format checkFile.path))
-
- lazy val sourceFileNames = sourceFiles map (_.name)
-
- /** Given the difficulty of verifying that any selective approach works
- * everywhere, the algorithm now is to look for the name of any known
- * source file for this test, and if seen, remove all the non-whitespace
- * preceding it. (Paths with whitespace don't work anyway.) This should
- * wipe out all slashes, backslashes, C:\, cygwin/windows differences,
- * and whatever else makes a simple diff not simple.
- *
- * The log and check file are both transformed, which I don't think is
- * correct -- only the log should be -- but doing it this way until I
- * can clarify martin's comments in #3283.
- */
- def normalizePaths(s: String) =
- sourceFileNames.foldLeft(s)((res, name) => res.replaceAll("""\S+\Q%s\E""" format name, name))
-
- /** The default cleanup normalizes paths relative to sourcesDir,
- * absorbs line terminator differences by going to lines and back,
- * and trims leading or trailing whitespace.
- */
- def diffCleanup(f: File) = safeLines(f) map normalizePaths mkString "\n" trim
-
- /** diffFiles requires actual Files as arguments but the output we want
- * is the post-processed versions of log/check, so we resort to tempfiles.
- */
- lazy val diffOutput = {
- if (!checkFile.exists) "" else {
- val input = diffCleanup(checkFile)
- val output = diffCleanup(logFile)
- def asFile(s: String) = returning(File.makeTemp("partest-diff"))(_ writeAll s)
-
- if (input == output) ""
- else diffFiles(asFile(input), asFile(output))
- }
- }
- private def checkTraceName = tracePath(checkFile)
- private def logTraceName = tracePath(logFile)
- private def isDiffConfirmed = checkFile.exists && (diffOutput == "")
-
- private def sendTraceMsg() {
- def result =
- if (isDryRun) ""
- else if (isDiffConfirmed) " [passed]"
- else if (checkFile.exists) " [failed]"
- else " [unchecked]"
-
- trace("diff %s %s%s".format(checkTraceName, logTraceName, result))
- }
-
- /** If optional is true, a missing check file is considered
- * a successful diff. Necessary since many categories use
- * checkfiles in an ad hoc manner.
- */
- def runDiff() = {
- sendTraceMsg()
-
- def updateCheck = (
- isUpdateCheck && {
- val formatStr = "** diff %s %s: " + (
- if (checkFile.exists) "failed, updating '%s' and marking as passed."
- else if (diffOutput == "") "not creating checkFile at '%s' as there is no output."
- else "was unchecked, creating '%s' for future tests."
- ) + "\n"
-
- normal(formatStr.format(checkTraceName, logTraceName, checkFile.path))
- if (diffOutput != "") normal(diffOutput)
-
- checkFile.writeAll(diffCleanup(logFile), "\n")
- true
- }
- )
-
- isDryRun || isDiffConfirmed || (updateCheck || !checkFile.exists)
- }
- }
-}
diff --git a/src/partest-alternative/scala/tools/partest/Alarms.scala b/src/partest-alternative/scala/tools/partest/Alarms.scala
deleted file mode 100644
index ef30d13..0000000
--- a/src/partest-alternative/scala/tools/partest/Alarms.scala
+++ /dev/null
@@ -1,86 +0,0 @@
-/* NEST (New Scala Test)
- * Copyright 2007-2011 LAMP/EPFL
- * @author Paul Phillips
- */
-
-package scala.tools
-package partest
-
-import java.util.{ Timer, TimerTask }
-
-trait Alarms {
- self: Universe =>
-
- def interruptMeIn[T](debugMsg: String, seconds: Int)(body: => T): Option[T] = {
- val thisThread = currentThread
- val alarm = new SimpleAlarm(seconds * 1000) set thisThread.interrupt()
- debug("interruptMeIn(%d) '%s'".format(seconds, debugMsg))
-
- try { Some(body) }
- catch { case _: InterruptedException => debug("Received interrupted exception.") ; None }
- finally { debug("Cancelling interruptMeIn '%s'" format debugMsg) ; alarm.cancel() ; Thread.interrupted() }
- }
-
- case class AlarmerAction(secs: Int, action: () => Unit) extends Runnable {
- override def run() = action()
- }
-
- /** Set any number of alarms up with tuples of the form:
- * seconds to alarm -> Function0[Unit] to execute
- */
- class Alarmer(alarms: AlarmerAction*) {
- import java.util.concurrent._
-
- val exec = Executors.newSingleThreadScheduledExecutor()
- alarms foreach (x => exec.schedule(x, x.secs, TimeUnit.SECONDS))
- exec.shutdown()
-
- def cancelAll() = exec.shutdownNow()
- }
-
- class SimpleAlarm(timeout: Long) {
- private val alarm = new Timer
-
- /** Start a timer, running the given body if it goes off.
- */
- def set(body: => Unit) = returning(new TimerTask { def run() = body })(alarm.schedule(_, timeout))
-
- /** Cancel the timer.
- */
- def cancel() = alarm.cancel()
- }
-
- trait TestAlarms {
- test: TestEntity =>
-
- private def warning1 = AlarmerAction(testWarning, () => warning(
- """|I've been waiting %s seconds for this to complete:
- | %s
- |It may be stuck, or if not, it should be broken into smaller tests.
- |""".stripMargin.format(testWarning, test))
- )
- private def warning2 = AlarmerAction(testWarning * 2, () => warning(
- """|Now I've been waiting %s seconds for this to complete:
- | %s
- |If partest seems hung it would be a good place to look.
- |""".stripMargin.format(testWarning * 2, test))
- )
-
- def startAlarms(onTimeout: => Unit) =
- if (isNoAlarms) new Alarmer() // for alarm debugging
- else new Alarmer(Seq(warning1, warning2, AlarmerAction(testTimeout, () => onTimeout)): _*)
- }
-
- // Thread.setDefaultUncaughtExceptionHandler(new UncaughtException)
- // class UncaughtException extends Thread.UncaughtExceptionHandler {
- // def uncaughtException(t: Thread, e: Throwable) {
- // Console.println("Uncaught in %s: %s".format(t, e))
- // }
- // }
- //
- // lazy val logger = File("/tmp/partest.log").bufferedWriter()
- // def flog(msg: String) = logger synchronized {
- // logger write (msg + "\n")
- // logger.flush()
- // }
-}
diff --git a/src/partest-alternative/scala/tools/partest/BuildContributors.scala b/src/partest-alternative/scala/tools/partest/BuildContributors.scala
deleted file mode 100644
index 85ca895..0000000
--- a/src/partest-alternative/scala/tools/partest/BuildContributors.scala
+++ /dev/null
@@ -1,102 +0,0 @@
-/* NEST (New Scala Test)
- * Copyright 2007-2011 LAMP/EPFL
- */
-
-package scala.tools
-package partest
-
-import nsc.io._
-import nsc.util.ClassPath
-
-trait BuildContributors {
- universe: Universe =>
-
- /** A trait mixed into types which contribute a portion of the values.
- * The basic mechanism is the TestBuild, TestCategory, and TestEntity
- * can each contribute to each value. They are assembled at the last
- * moment by the ContributorAssembler (presently the TestEntity.)
- */
- trait BuildContributor {
- def javaFlags: List[String]
- def scalacFlags: List[String]
- def classpathPaths: List[Path]
- def buildProperties: List[(String, Any)]
- def buildEnvironment: Map[String, String]
- }
-
- trait ContributorAssembler {
- def contributors: List[BuildContributor]
- def assemble[T](what: BuildContributor => List[T]): List[T] = contributors flatMap what
-
- /** !!! This will need work if we want to achieve real composability,
- * but it can wait for the demand.
- */
- def assembleScalacArgs(args: List[String]) = assemble(_.scalacFlags) ++ args
- def assembleJavaArgs(args: List[String]) = assemble(_.javaFlags) ++ args
- def assembleProperties() = assemble(_.buildProperties)
- def assembleClasspaths(paths: List[Path]) = assemble(_.classpathPaths) ++ paths
- def assembleEnvironment() = assemble(_.buildEnvironment.toList).toMap
-
- def createClasspathString() = ClassPath fromPaths (assembleClasspaths(Nil) : _*)
- def createPropertyString() = assembleProperties() map { case (k, v) => "-D%s=%s".format(k, v.toString) }
- }
-
- trait BuildContribution extends BuildContributor {
- self: TestBuild =>
-
- /** The base classpath and system properties.
- * !!! TODO - this should adjust itself depending on the build
- * being tested, because pack and quick at least need different jars.
- */
- def classpathPaths = List[Path](library, compiler, partest, fjbg) ++ forkJoinPath
- def buildProperties = List(
- "scala.home" -> testBuildDir,
- "partest.lib" -> library, // used in jvm/inner
- "java.awt.headless" -> true,
- "user.language" -> "en",
- "user.country" -> "US",
- "partest.debug" -> isDebug,
- "partest.verbose" -> isVerbose
- // Disabled because there are no natives tests.
- // "java.library.path" -> srcLibDir
- )
- def javaFlags: List[String] = toArgs(javaOpts)
- def scalacFlags: List[String] = toArgs(scalacOpts)
-
- /** We put the build being tested's /bin directory in the front of the
- * path so the scripts and such written to execute "scala" will use this
- * build and not whatever happens to be on their path.
- */
- private def modifiedPath = ClassPath.join(scalaBin.path, Properties.envOrElse("PATH", ""))
- def buildEnvironment = Map("PATH" -> modifiedPath)
- }
-
- trait CategoryContribution extends BuildContributor {
- self: DirBasedCategory =>
-
- /** Category-wide classpath additions placed in <category>/lib. */
- private def libContents = root / "lib" ifDirectory (_.list.toList)
-
- def classpathPaths = libContents getOrElse Nil
- def buildProperties = Nil
- def javaFlags = Nil
- def scalacFlags = Nil
- def buildEnvironment = Map()
- }
-
- trait TestContribution extends BuildContributor with ContributorAssembler {
- self: TestEntity =>
-
- def jarsInTestDir = location.walk collect { case f: File if f hasExtension "jar" => f } toList
-
- def contributors = List(build, category, self)
- def javaFlags = safeArgs(javaOptsFile)
- def scalacFlags = safeArgs(scalaOptsFile)
- def classpathPaths = jarsInTestDir :+ outDir
- def buildProperties = List(
- "partest.output" -> outDir.toAbsolute, // used in jvm/inner
- "partest.cwd" -> outDir.parent.toAbsolute // used in shootout tests
- )
- def buildEnvironment = Map("JAVA_OPTS" -> fromArgs(assembleJavaArgs(Nil)))
- }
-}
\ No newline at end of file
diff --git a/src/partest-alternative/scala/tools/partest/Categories.scala b/src/partest-alternative/scala/tools/partest/Categories.scala
deleted file mode 100644
index c517a3f..0000000
--- a/src/partest-alternative/scala/tools/partest/Categories.scala
+++ /dev/null
@@ -1,70 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala Parallel Testing **
-** / __/ __// _ | / / / _ | (c) 2007-2011, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-package scala.tools
-package partest
-
-import nsc.Settings
-import nsc.io._
-import nsc.util.{ ClassPath }
-
-trait Categories {
- self: Universe =>
-
- trait TestCategory extends AbsTestCategory {
- def kind: String
- def startMessage: String = "Executing test group"
- def testSequence: TestSequence
-
- class TestSettings(entity: TestEntity, error: String => Unit) extends Settings(error) {
- def this(entity: TestEntity) = this(entity, Console println _)
-
- deprecation.value = false
- encoding.value = "ISO-8859-1"
- classpath.value = entity.testClasspath
- outdir.value = entity.outDir.path
- }
-
- def createSettings(entity: TestEntity): TestSettings = new TestSettings(entity)
- def createTest(location: Path): TestEntity =
- if (location.isFile) TestFile(this, location.toFile)
- else if (location.isDirectory) TestDirectory(this, location.toDirectory)
- else error("Failed to create test at '%s'" format location)
-
- /** Category test identification.
- */
- def denotesTestFile(p: Path) = p.isFile && (p hasExtension "scala")
- def denotesTestDir(p: Path) = p.isDirectory && !ignorePath(p)
- def denotesTest(p: Path) = denotesTestDir(p) || denotesTestFile(p)
-
- /** This should verify that all necessary files are present.
- * By default it delegates to denotesTest.
- */
- def denotesValidTest(p: Path) = denotesTest(p)
- }
-
- abstract class DirBasedCategory(val kind: String) extends TestCategory with CategoryContribution {
- lazy val root = Directory(src / kind).normalize
- def enumerate = root.list filter denotesTest map createTest toList
-
- /** Standard actions. These can be overridden either on the
- * Category level or by individual tests.
- */
- def compile: TestStep = (_: TestEntity).compile()
- def checkFileRequired: TestStep = (_: TestEntity).checkFileRequired
- def diff: TestStep = (_: TestEntity).diff()
- def run: TestStep = (_: TestEntity).run()
- def exec: TestStep = (_: TestEntity).exec()
-
- /** Combinators.
- */
- def not(f: TestStep): TestStep = !f(_: TestEntity)
-
- override def toString = kind
- }
-}
\ No newline at end of file
diff --git a/src/partest-alternative/scala/tools/partest/Compilable.scala b/src/partest-alternative/scala/tools/partest/Compilable.scala
deleted file mode 100644
index 65b5d5d..0000000
--- a/src/partest-alternative/scala/tools/partest/Compilable.scala
+++ /dev/null
@@ -1,106 +0,0 @@
-/* NEST (New Scala Test)
- * Copyright 2007-2011 LAMP/EPFL
- */
-
-package scala.tools
-package partest
-
-import scala.tools.nsc.io._
-import scala.tools.nsc.{ Global, Settings, CompilerCommand, FatalError }
-import scala.tools.nsc.util.{ ClassPath }
-import scala.tools.nsc.reporters.{ Reporter, ConsoleReporter }
-
-trait PartestCompilation {
- self: Universe =>
-
- trait CompileExecSupport extends ExecSupport {
- self: TestEntity =>
-
- def javacpArg = "-classpath " + testClasspath
- def scalacpArg = "-usejavacp"
-
- /** Not used, requires tools.jar.
- */
- // def javacInternal(args: List[String]) = {
- // import com.sun.tools.javac.Main
- // Main.compile(args.toArray, logWriter)
- // }
-
- def javac(args: List[String]): Boolean = {
- val allArgString = fromArgs(javacpArg :: javacOpts :: args)
-
- // javac -d outdir -classpath <basepath> <files>
- val cmd = "%s -d %s %s".format(javacCmd, outDir, allArgString)
- def traceMsg =
- if (isVerbose) cmd
- else "%s -d %s %s".format(tracePath(Path(javacCmd)), tracePath(outDir), fromArgs(args))
-
- trace(traceMsg)
-
- isDryRun || execAndLog(cmd)
- }
-
- def scalac(args: List[String]): Boolean = {
- val allArgs = assembleScalacArgs(args)
- val (global, files) = newGlobal(allArgs)
- def nonFileArgs = if (isVerbose) global.settings.recreateArgs else assembleScalacArgs(Nil)
- def traceArgs = fromArgs(nonFileArgs ++ (files map tracePath))
- def traceMsg = "scalac " + traceArgs
-
- trace(traceMsg)
- isDryRun || global.partestCompile(files, true)
- }
-
- /** Actually running the test, post compilation.
- * Normally args will be List("Test", "jvm"), main class and arg to it.
- */
- def runScala(args: List[String]): Boolean = {
- val scalaRunnerClass = "scala.tools.nsc.MainGenericRunner"
-
- // java $JAVA_OPTS <javaopts> -classpath <cp>
- val javaCmdAndOptions = javaCmd +: assembleJavaArgs(List(javacpArg))
- // MainGenericRunner -usejavacp <scalacopts> Test jvm
- val scalaCmdAndOptions = List(scalaRunnerClass, scalacpArg) ++ assembleScalacArgs(args)
- // Assembled
- val cmd = fromArgs(javaCmdAndOptions ++ createPropertyString() ++ scalaCmdAndOptions)
-
- def traceMsg = if (isVerbose) cmd else fromArgs(javaCmd :: args)
- trace("runScala: " + traceMsg)
-
- isDryRun || execAndLog(cmd)
- }
-
- def newReporter(settings: Settings) = new ConsoleReporter(settings, Console.in, logWriter)
-
- class PartestGlobal(settings: Settings, val creporter: ConsoleReporter) extends Global(settings, creporter) {
- def partestCompile(files: List[String], printSummary: Boolean): Boolean = {
- try { new Run compile files }
- catch {
- case FatalError(msg) => creporter.error(null, "fatal error: " + msg)
- case ae: AssertionError => creporter.error(null, ""+ae)
- case te: TypeError => creporter.error(null, ""+te)
- case ex =>
- creporter.error(null, ""+ex)
- throw ex
- }
-
- if (printSummary)
- creporter.printSummary
-
- creporter.flush()
- !creporter.hasErrors
- }
- }
-
- def newGlobal(args: List[String]): (PartestGlobal, List[String]) = {
- val settings = category createSettings self
- val command = new CompilerCommand(args, settings)
- val reporter = newReporter(settings)
-
- if (!command.ok)
- debug("Error parsing arguments: '%s'".format(args mkString ", "))
-
- (new PartestGlobal(command.settings, reporter), command.files)
- }
- }
-}
diff --git a/src/partest-alternative/scala/tools/partest/Config.scala b/src/partest-alternative/scala/tools/partest/Config.scala
deleted file mode 100644
index ee1852f..0000000
--- a/src/partest-alternative/scala/tools/partest/Config.scala
+++ /dev/null
@@ -1,115 +0,0 @@
-/* NEST (New Scala Test)
- * Copyright 2007-2011 LAMP/EPFL
- */
-
-package scala.tools
-package partest
-
-import io._
-import nsc.io._
-import Properties._
-
-trait Config {
- universe: Universe =>
-
- lazy val src = absolutize(srcDir).toDirectory
- lazy val build = new TestBuild()
-
- def javaHomeEnv = envOrElse("JAVA_HOME", null)
- def javaCmd = envOrElse("JAVACMD", "java")
- def javacCmd = Option(javaHomeEnv) map (x => Path(x) / "bin" / "javac" path) getOrElse "javac"
-
- /** Values related to actors. The timeouts are in seconds. On a dry
- * run we only allocate one worker so the output isn't interspersed.
- */
- def workerTimeout = 3600 // 1 hour, probably overly generous
- def numWorkers = if (isDryRun) 1 else propOrElse("partest.actors", "8").toInt
- def expectedErrors = propOrElse("partest.errors", "0").toInt
- def poolSize = (wrapAccessControl(propOrNone("actors.corePoolSize")) getOrElse "16").toInt
-
- def allScalaFiles = src.deepFiles filter (_ hasExtension "scala")
- def allObjDirs = src.deepDirs filter (_ hasExtension "obj")
- def allLogFiles = src.deepFiles filter (_ hasExtension "log")
- def allClassFiles = src.deepFiles filter (_ hasExtension "class")
-
- class TestBuild() extends BuildContribution {
- import nsc.util.ClassPath
-
- /** Scala core libs.
- */
- val library = pathForComponent("library")
- val compiler = pathForComponent("compiler")
- val partest = pathForComponent("partest")
- val scalap = pathForComponent("scalap", "%s.jar")
-
- /** Scala supplementary libs - these are not all needed for all build targets,
- * and some of them are copied inside other jars in later targets. However quick
- * for instance cannot be run without some of these.
- */
- val fjbg = pathForLibrary("fjbg")
- val msil = pathForLibrary("msil")
- val forkjoin = pathForLibrary("forkjoin")
- val scalacheck = pathForLibrary("scalacheck")
-
- /** Other interesting paths.
- */
- val scalaBin = testBuildDir / "bin"
-
- /** A hack for now to get quick running.
- */
- def needsForkJoin = {
- val loader = nsc.util.ScalaClassLoader.fromURLs(List(library.toURL))
- val fjMarker = "scala.concurrent.forkjoin.ForkJoinTask"
- val clazz = loader.tryToLoadClass(fjMarker)
-
- if (clazz.isDefined) debug("Loaded ForkJoinTask OK, don't need jar.")
- else debug("Could not load ForkJoinTask, putting jar on classpath.")
-
- clazz.isEmpty
- }
- lazy val forkJoinPath: List[Path] = if (needsForkJoin) List(forkjoin) else Nil
-
- /** Internal **/
- private def repo = partestDir.parent.normalize
-
- private def pathForComponent(what: String, jarFormat: String = "scala-%s.jar"): Path = {
- def asDir = testBuildDir / "classes" / what
- def asJar = testBuildDir / "lib" / jarFormat.format(what)
-
- if (asDir.isDirectory) asDir
- else if (asJar.isFile) asJar
- else ""
- }
- private def pathForLibrary(what: String) = File(repo / "lib" / (what + ".jar"))
- }
-
- def printConfigBanner() = {
- debug("Java VM started with arguments: '%s'" format fromArgs(Process.javaVmArguments))
- debug("System Properties:\n" + util.allPropertiesString())
-
- normal(configBanner())
- }
-
- /** Treat an access control failure as None. */
- private def wrapAccessControl[T](body: => Option[T]): Option[T] =
- try body catch { case _: java.security.AccessControlException => None }
-
- private def configBanner() = {
- val javaBin = Path(javaHome) / "bin"
- val javaInfoString = "%s (build %s, %s)".format(javaVmName, javaVmVersion, javaVmInfo)
-
- List(
- "Scala compiler classes in: " + testBuildDir,
- "Scala version is: " + nsc.Properties.versionMsg,
- "Scalac options are: " + universe.scalacOpts,
- "Java binaries in: " + javaBin,
- "Java runtime is: " + javaInfoString,
- "Java runtime options: " + (Process.javaVmArguments mkString " "),
- "Javac options are: " + universe.javacOpts,
- "Java options are: " + universe.javaOpts,
- "Source directory is: " + src,
- "Selected categories: " + (selectedCategories mkString " "),
- ""
- ) mkString "\n"
- }
-}
diff --git a/src/partest-alternative/scala/tools/partest/Dispatcher.scala b/src/partest-alternative/scala/tools/partest/Dispatcher.scala
deleted file mode 100644
index 69efc35..0000000
--- a/src/partest-alternative/scala/tools/partest/Dispatcher.scala
+++ /dev/null
@@ -1,162 +0,0 @@
-/* NEST (New Scala Test)
- * Copyright 2007-2011 LAMP/EPFL
- * @author Philipp Haller
- */
-
-package scala.tools
-package partest
-
-import scala.tools.nsc.io._
-import scala.actors.{ Actor, TIMEOUT }
-import scala.actors.Actor._
-import scala.collection.immutable
-import scala.util.control.Exception.ultimately
-
-/** The machinery for concurrent execution of tests. Each Worker
- * is given a bundle of tests, which it runs sequentially and then
- * sends a report back to the dispatcher.
- */
-trait Dispatcher {
- partest: Universe =>
-
- /** The public entry point. The given filter narrows down the list of
- * tests to run.
- */
- def runSelection(categories: List[TestCategory], filt: TestEntity => Boolean = _ => true): CombinedTestResults = {
- // Setting scala.home informs tests where to obtain their jars.
- setProp("scala.home", testBuildDir.path)
-
- val allTests = allCategories flatMap (_.enumerate)
- val selected = allTests filter filt
- val groups = selected groupBy (_.category)
- val count = selected.size
-
- if (count == 0) return CombinedTestResults(0, 0, 0, Nil)
- else if (count == allTests.size) verbose("Running all %d tests." format count)
- else verbose("Running %d/%d tests: %s".format(count, allTests.size, toStringTrunc(selected map (_.label) mkString ", ")))
-
- allCategories collect { case x if groups contains x => runCategory(x, groups(x)) } reduceLeft (_ ++ _)
- }
-
- private def parallelizeTests(tests: List[TestEntity]): immutable.Map[TestEntity, TestResult] = {
- // propagate verbosity
- if (isDebug) scala.actors.Debug.level = 3
-
- // "If elected, I guarantee a slice of tests for every worker!"
- val groups = tests grouped ((tests.size / numWorkers) + 1) toList
-
- // "Workers, line up for assignments!"
- val workers =
- for ((slice, workerNum) <- groups.zipWithIndex) yield {
- returning(new Worker(workerNum)) { worker =>
- worker.start()
- worker ! TestsToRun(slice)
- }
- }
-
- normal("Started %d workers with ~%d tests each.\n".format(groups.size, groups.head.size))
-
- /** Listening for news from the proletariat.
- */
- (workers map { w =>
- receiveWithin(workerTimeout * 1000) {
- case ResultsOfRun(resultMap) => resultMap
- case TIMEOUT =>
- warning("Worker %d timed out." format w.workerNum)
- // mark all the worker's tests as having timed out - should be hard to miss
- // immutable.Map[TestEntity, TestResult]()
- groups(w.workerNum) map (x => (x -> new Timeout(x))) toMap
- }
- }) reduceLeft (_ ++ _)
- }
-
- private def runCategory(category: TestCategory, tests: List[TestEntity]): CombinedTestResults = {
- val kind = category.kind
- normal("%s (%s tests in %s)\n".format(category.startMessage, tests.size, category))
-
- val (milliSeconds, resultMap) = timed2(parallelizeTests(tests))
- val (passed, failed) = resultsToStatistics(resultMap mapValues (_.state))
- val failures = resultMap.values filterNot (_.passed) toList
-
- CombinedTestResults(passed, failed, milliSeconds, failures)
- }
-
- /** A Worker is given a bundle of tests and runs them all sequentially.
- */
- class Worker(val workerNum: Int) extends Actor {
- def act() {
- react { case TestsToRun(tests) =>
- val master = sender
- runTests(tests)(results => master ! ResultsOfRun(results))
- }
- }
-
- /** Runs the tests. Passes the result Map to onCompletion when done.
- */
- private def runTests(tests: List[TestEntity])(onCompletion: immutable.Map[TestEntity, TestResult] => Unit) {
- var results = new immutable.HashMap[TestEntity, TestResult] // maps tests to results
- val numberOfTests = tests.size
- val testIterator = tests.iterator
- def processed = results.size
- def isComplete = testIterator.isEmpty
-
- def atThreshold(num: Double) = {
- require(num >= 0 && num <= 1.0)
- ((processed - 1).toDouble / numberOfTests <= num) && (processed.toDouble / numberOfTests >= num)
- }
-
- def extraMessage = {
- // for now quiet for normal people
- if (isVerbose || isTrace || isDebug) {
- if (isComplete) "(#%d 100%%)" format workerNum
- else if (isVerbose) "(#%d %d/%d)".format(workerNum, processed, numberOfTests)
- else if (isTrace && atThreshold(0.5)) "(#%d 50%%)" format workerNum
- else ""
- }
- else ""
- }
-
- def countAndReport(result: TestResult) {
- val TestResult(test, state) = result
- // refuse to count an entity twice
- if (results contains test)
- return warning("Received duplicate result for %s: was %s, now %s".format(test, results(test), state))
-
- // increment the counter for this result state
- results += (test -> result)
-
- // show on screen
- if (isDryRun) normal("\n") // blank line between dry run traces
- else result show extraMessage
-
- // remove log if successful
- if (result.passed)
- test.deleteLog()
-
- // Respond to master if this Worker is complete
- if (isComplete)
- onCompletion(results)
- }
-
- Actor.loopWhile(testIterator.hasNext) {
- val parent = self
- // pick a test and set some alarms
- val test = testIterator.next
- val alarmer = test startAlarms (parent ! new Timeout(test))
-
- actor {
- ultimately(alarmer.cancelAll()) {
- // Calling isSuccess forces the lazy val "process" inside the test, running it.
- val res = test.isSuccess
- // Cancel the alarms and alert the media.
- parent ! TestResult(test, res)
- }
- }
-
- react {
- case x: TestResult => countAndReport(x)
- }
- }
- }
- }
-}
\ No newline at end of file
diff --git a/src/partest-alternative/scala/tools/partest/Entities.scala b/src/partest-alternative/scala/tools/partest/Entities.scala
deleted file mode 100644
index 301deb9..0000000
--- a/src/partest-alternative/scala/tools/partest/Entities.scala
+++ /dev/null
@@ -1,74 +0,0 @@
-/* NEST (New Scala Test)
- * Copyright 2007-2011 LAMP/EPFL
- * @author Philipp Haller
- */
-
-package scala.tools
-package partest
-
-import nsc.io._
-
-trait Entities {
- self: Universe =>
-
- abstract class TestEntity extends AbsTestEntity
- with TestContribution
- with TestHousekeeping
- with TestAlarms
- with EntityLogging
- with CompilableTest
- with ScriptableTest
- with DiffableTest {
- def location: Path
- def category: TestCategory
-
- lazy val label = location.stripExtension
- lazy val testClasspath = returning(createClasspathString())(x => vtrace("testClasspath: " + x))
-
- /** Was this test successful? Calling this for the first time forces
- * lazy val "process" which actually runs the test.
- */
- def isSuccess = process
-
- /** Some standard files, which may or may not be present.
- */
- def scalaOptsFile = withExtension("flags").toFile // opts to scalac
- def javaOptsFile = withExtension("javaopts").toFile // opts to java (but not javac)
- def commandFile = withExtension("cmds").toFile // sequence of commands to execute
- def logFile = withExtension("log").toFile // collected output
-
- /** Some standard directories.
- */
- def outDir = withExtension("obj").toDirectory // output dir, e.g. files/pos/t14.obj
- def categoryDir = location.parent.normalize // category dir, e.g. files/pos/
- def sourcesDir = location ifDirectory (_.normalize) getOrElse categoryDir
-
- /** Standard arguments for run, exec, diff.
- */
- def argumentsToRun = List("Test", "jvm")
- def argumentsToExec = List(location.path)
-
- /** Using a .cmds file for a custom test sequence.
- */
- def commandList = safeLines(commandFile)
- def testSequence =
- if (commandFile.isFile && commandList.nonEmpty) commandList map customTestStep
- else category.testSequence
-
- def run() = runScala(argumentsToRun)
- def exec() = runExec(argumentsToExec)
- def diff() = runDiff() // checkFile, logFile
-
- /** The memoized result of the test run.
- */
- private lazy val process = {
- val outcome = runWrappers(testSequence.actions forall (f => f(this)))
-
- // an empty outcome means we've been interrupted and are shutting down.
- outcome getOrElse false
- }
- }
-
- case class TestDirectory(category: TestCategory, location: Directory) extends TestEntity { }
- case class TestFile(category: TestCategory, location: File) extends TestEntity { }
-}
diff --git a/src/partest-alternative/scala/tools/partest/Housekeeping.scala b/src/partest-alternative/scala/tools/partest/Housekeeping.scala
deleted file mode 100644
index cfdecee..0000000
--- a/src/partest-alternative/scala/tools/partest/Housekeeping.scala
+++ /dev/null
@@ -1,187 +0,0 @@
-/* NEST (New Scala Test)
- * Copyright 2007-2011 LAMP/EPFL
- */
-
-package scala.tools
-package partest
-
-import scala.util.control.Exception.catching
-import util._
-import nsc.io._
-import Process.runtime
-import Properties._
-
-/** An agglomeration of code which is low on thrills. Hopefully
- * it operates so quietly in the background that you never have to
- * look at this file.
- */
-trait Housekeeping {
- self: Universe =>
-
- /** Orderly shutdown on ctrl-C. */
- @volatile private var _shuttingDown = false
- protected def setShuttingDown() = {
- /** Whatever we want to do as shutdown begins goes here. */
- if (!_shuttingDown) {
- warning("Received shutdown signal, partest is cleaning up...\n")
- _shuttingDown = true
- }
- }
- def isShuttingDown = _shuttingDown
-
- /** Execute some code with a shutdown hook in place. This is
- * motivated by the desire not to leave the filesystem full of
- * junk when someone ctrl-Cs a test run.
- */
- def withShutdownHook[T](hook: => Unit)(body: => T): Option[T] =
- /** Java doesn't like it if you keep adding and removing shutdown
- * hooks after shutdown has begun, so we trap the failure.
- */
- catching(classOf[IllegalStateException]) opt {
- val t = new Thread() {
- override def run() = {
- setShuttingDown()
- hook
- }
- }
- runtime addShutdownHook t
-
- try body
- finally runtime removeShutdownHook t
- }
-
- /** Search for a directory, possibly given only a name, by starting
- * at the current dir and walking upward looking for it at each level.
- */
- protected def searchForDir(name: String): Directory = {
- val result = Path(name) ifDirectory (x => x.normalize) orElse {
- val cwd = Directory.Current getOrElse error("user.dir property not set")
- val dirs = cwd :: cwd.parents map (_ / name)
-
- Path onlyDirs dirs map (_.normalize) headOption
- }
-
- result getOrElse error("Fatal: could not find directory '%s'" format name)
- }
-
- /** Paths we ignore for most purposes.
- */
- def ignorePath(x: Path) = {
- (x.name startsWith ".") ||
- (x.isDirectory && ((x.name == "lib") || x.hasExtension("obj", "svn")))
- }
- /** Make a possibly relative path absolute using partestDir as the base.
- */
- def absolutize(path: String) = Path(path) toAbsoluteWithRoot partestDir
-
- /** Go on a deleting binge.
- */
- def cleanupAll() {
- if (isNoCleanup)
- return
-
- val (dirCount, fileCount) = (cleanupObjDirs(), cleanupLogs() + cleanupJunk())
- if (dirCount + fileCount > 0)
- normal("Cleaned up %d directories and %d files.\n".format(dirCount, fileCount))
- }
-
- def cleanupObjDirs() = countTrue(allObjDirs collect { case x if x.exists => x.deleteRecursively() })
- def cleanupJunk() = countTrue(allClassFiles collect { case x if x.exists => x.delete() })
- def cleanupLogs() = countTrue(allLogFiles collect { case x if x.exists => x.delete() })
-
- /** Look through every file in the partest directory and ask around
- * to make sure someone knows him. Complain about strangers.
- */
- def validateAll() {
- def denotesTest(p: Path) = allCategories exists (_ denotesTest p)
- def isMSILcheck(p: Path) = p.name endsWith "-msil.check"
-
- def analyzeCategory(cat: DirBasedCategory) = {
- val allTests = cat.enumerate
- val otherPaths = cat.root walkFilter (x => !ignorePath(x)) filterNot (cat denotesTest _) filterNot isMSILcheck toList
- val count = otherPaths.size
-
- println("Validating %d non-test paths in %s.".format(count, cat.kind))
-
- for (path <- otherPaths) {
- (allTests find (_ acknowledges path)) match {
- case Some(test) => if (isVerbose) println(" OK: '%s' is claimed by '%s'".format(path, test.label))
- case _ => println(">> Unknown path '%s'" format path)
- }
- }
- }
-
- allCategories collect { case x: DirBasedCategory => analyzeCategory(x) }
- }
-
- trait TestHousekeeping {
- self: TestEntity =>
-
- /** Calculating derived files. Given a test like
- * files/run/foo.scala or files/run/foo/
- * This creates paths like foo.check, foo.flags, etc.
- */
- def withExtension(extension: String) = categoryDir / "%s.%s".format(label, extension)
-
- /** True for a path if this test acknowledges it belongs to this test.
- * Overridden by some categories.
- */
- def acknowledges(path: Path): Boolean = {
- val loc = location.normalize
- val knownPaths = List(scalaOptsFile, javaOptsFile, commandFile, logFile, checkFile) ++ jarsInTestDir
- def isContainedSource = location.isDirectory && isJavaOrScala(path) && (path.normalize startsWith loc)
-
- (knownPaths exists (_ isSame path)) || isContainedSource
- }
-
- /** This test "responds to" this String. This could mean anything -- it's a
- * way of specifying ad-hoc collections of tests to exercise only a subset of tests.
- * At present it looks for the given String in all the test sources.
- */
- def respondsToString(str: String) = containsString(str)
- def containsString(str: String) = {
- debug("Checking %s for \"%s\"".format(sourceFiles mkString ", ", str))
- sourceFiles map safeSlurp exists (_ contains str)
- }
-
- def possiblyTimed[T](body: => T): T = {
- if (isStats) timed(recordTestTiming(label, _))(body)
- else body
- }
-
- private def prepareForTestRun() = {
- // make sure we have a clean slate
- deleteLog(force = true)
- if (outDir.exists)
- outDir.deleteRecursively()
-
- // recreate object dir
- outDir createDirectory true
- }
- def deleteOutDir() = outDir.deleteRecursively()
- def deleteShutdownHook() = { debug("Shutdown hook deleting " + outDir) ; deleteOutDir() }
-
- protected def runWrappers[T](body: => T): Option[T] = {
- prepareForTestRun()
-
- withShutdownHook(deleteShutdownHook()) {
- loggingOutAndErr {
- val result = possiblyTimed { body }
- if (!isNoCleanup)
- deleteOutDir()
-
- result
- }
- }
- }
-
- override def toString = location.path
- override def equals(other: Any) = other match {
- case x: TestEntity => location.normalize == x.location.normalize
- case _ => false
- }
- override def hashCode = location.normalize.hashCode
- }
-
- private def countTrue(f: => Iterator[Boolean]) = f filter (_ == true) length
-}
\ No newline at end of file
diff --git a/src/partest-alternative/scala/tools/partest/Partest.scala b/src/partest-alternative/scala/tools/partest/Partest.scala
deleted file mode 100644
index 74a3a6a..0000000
--- a/src/partest-alternative/scala/tools/partest/Partest.scala
+++ /dev/null
@@ -1,81 +0,0 @@
-/* NEST (New Scala Test)
- * Copyright 2007-2011 LAMP/EPFL
- */
-
-package scala.tools
-package partest
-
-import nsc.io._
-import nsc.util._
-import category.AllCategories
-
-/** Global object for a Partest run. It is completely configured by the list
- * of arguments passed to the constructor (although there are a few properties
- * and environment variables which can influence matters.) See PartestSpec.scala
- * for the complete list.
- */
-class Partest(args: List[String]) extends {
- val parsed = PartestSpec(args: _*)
-} with Universe with PartestSpec with cmd.Instance with AllCategories {
-
- if (parsed.propertyArgs.nonEmpty)
- debug("Partest property args: " + fromArgs(parsed.propertyArgs))
-
- debug("Partest created with args: " + fromArgs(args))
-
- def helpMsg = PartestSpec.helpMsg
-
- // The abstract values from Universe.
- lazy val testBuildDir = searchForDir(buildDir)
- lazy val partestDir = searchForDir(rootDir)
- lazy val allCategories = List(Pos, Neg, Run, Jvm, Res, Shootout, Scalap, Scalacheck, BuildManager, Script)
- lazy val selectedCategories = if (isAllImplied) allCategories else specifiedCats
-
- def specifiedTests = parsed.residualArgs map (x => Path(x).normalize)
- def specifiedKinds = testKinds filter (x => isSet(x) || (runSets contains x))
- def specifiedCats = specifiedKinds flatMap (x => allCategories find (_.kind == x))
- def isAllImplied = isAll || (specifiedTests.isEmpty && specifiedKinds.isEmpty)
-
- /** Assembles a filter based on command line options which restrict the test set
- * --grep limits to only matching tests
- * --failed limits to only recently failed tests (log file is present)
- * --<category> limits to only the given tests and categories (but --all overrides)
- * path/to/Test limits to only the given tests and categories
- */
- lazy val filter = {
- def indivFilter(test: TestEntity) = specifiedTests contains test.location.normalize
- def categoryFilter(test: TestEntity) = specifiedCats contains test.category
- def indivOrCat(test: TestEntity) = isAllImplied || indivFilter(test) || categoryFilter(test) // combines previous two
-
- def failedFilter(test: TestEntity) = !isFailed || (test.logFile exists)
- def grepFilter(test: TestEntity) = grepExpr.isEmpty || (test containsString grepExpr.get)
- def combinedFilter(x: TestEntity) = indivOrCat(x) && failedFilter(x) && grepFilter(x) // combines previous three
-
- combinedFilter _
- }
-
- def launchTestSuite() = {
- def onTimeout() = {
- warning("Partest test run timed out after " + timeout + " seconds.\n")
- System.exit(-1)
- }
- val alarm = new Alarmer(AlarmerAction(timeout, () => onTimeout()))
-
- try runSelection(selectedCategories, filter)
- finally alarm.cancelAll()
- }
-}
-
-object Partest {
- def fromBuild(dir: String, args: String*): Partest = apply("--builddir" +: dir +: args: _*)
- def apply(args: String*): Partest = new Partest(args.toList)
-
- // builds without partest jars won't actually work
- def starr() = fromBuild("")
- def locker() = fromBuild("build/locker")
- def quick() = fromBuild("build/quick")
- def pack() = fromBuild("build/pack")
- def strap() = fromBuild("build/strap")
- def dist() = fromBuild("dists/latest")
-}
-
diff --git a/src/partest-alternative/scala/tools/partest/PartestSpec.scala b/src/partest-alternative/scala/tools/partest/PartestSpec.scala
deleted file mode 100644
index 75d94bd..0000000
--- a/src/partest-alternative/scala/tools/partest/PartestSpec.scala
+++ /dev/null
@@ -1,104 +0,0 @@
-/* NEST (New Scala Test)
- * Copyright 2007-2011 LAMP/EPFL
- * @author Paul Phillips
- */
-
-package scala.tools
-package partest
-
-import nsc.io._
-import cmd._
-
-/** This takes advantage of bits of scala goodness to fully define a command
- * line program with a minimum of duplicated code. When the specification object
- * is created, the vals are evaluated in order and each of them side effects
- * a private accumulator. What emerges is a full list of the valid unary
- * and binary arguments, as well as autogenerated help.
- */
-trait PartestSpec extends Spec with Meta.StdOpts with Interpolation {
- def referenceSpec = PartestSpec
- def programInfo = Spec.Info("partest", "", "scala.tools.partest.Runner")
- private val kind = new Spec.Accumulator[String]()
- protected def testKinds = kind.get
-
- private implicit val tokenizeString = FromString.ArgumentsFromString // String => List[String]
-
- help("""
- |# Pro Tip! Instant bash completion: `partest --bash` (note backticks)
- |Usage: partest [<options>] [<test> <test> ...]
- | <test>: a path to a test designator, typically a .scala file or a directory.
- | Examples: files/pos/test1.scala, files/res/bug785
- |
- | Test categories:""".stripMargin)
-
- val isAll = ("all" / "run all tests (default, unless no options given)" --?)
- (kind("pos") / "Compile files that are expected to build" --?)
- (kind("neg") / "Compile files that are expected to fail" --?)
- (kind("run") / "Test JVM backend" --?)
- (kind("jvm") / "Test JVM backend" --?)
- (kind("res") / "Run resident compiler scenarii" --?)
- (kind("buildmanager") / "Run Build Manager scenarii" --?)
- (kind("scalacheck") / "Run Scalacheck tests" --?)
- (kind("script") / "Run script files" --?)
- (kind("shootout") / "Run shootout tests" --?)
- (kind("scalap") / "Run scalap tests" --?)
-
- heading ("""Test "smart" categories:""")
- val grepExpr = "grep" / "run all tests with a source file containing <expr>" --|
- val isFailed = "failed" / "run all tests which failed on the last run" --?
-
- heading ("Specifying paths and additional flags, ~ means repository root:")
-
- val rootDir = "rootdir" / "path from ~ to partest" defaultTo "test"
- val buildDir = "builddir" / "path from ~ to test build" defaultTo "build/pack"
- val srcDir = "srcdir" / "path from --rootdir to sources" defaultTo "files"
- val javaOpts = "javaopts" / "flags to java on all runs" defaultToEnv "JAVA_OPTS"
- val javacOpts = "javacopts" / "flags to javac on all runs" defaultToEnv "JAVAC_OPTS"
- val scalacOpts = "scalacopts" / "flags to scalac on all tests" defaultToEnv "SCALAC_OPTS"
-
- "pack" / "" expandTo ("--builddir", "build/pack")
- "quick" / "" expandTo ("--builddir", "build/quick")
-
- heading ("Options influencing output:")
- val isTrace = "trace" / "show the individual steps taken by each test" --?
- val isShowDiff = "show-diff" / "show diff between log and check file" --?
- val isShowLog = "show-log" / "show log on failures" --?
- val isDryRun = "dry-run" / "do not run tests, only show their traces." --?
- val isTerse = "terse" / "be less verbose (almost silent except for failures)" --?
- val isVerbose = "verbose" / "be more verbose (additive with --trace)" --?
- val isDebug = "debug" / "maximum debugging output" --?
- val isAnsi = "ansi" / "print output in color" --?
-
- heading ("Other options:")
- val timeout = "timeout" / "Overall timeout in seconds" defaultTo 7200
- val testWarning = "test-warning" / "Test warning in seconds" defaultTo 90
- val testTimeout = "test-timeout" / "Test timeout in seconds" defaultTo 900
- val isCleanup = "cleanup" / "delete all stale files and dirs before run" --?
- val isNoCleanup = "nocleanup" / "do not delete any logfiles or object dirs" --?
- val isStats = "stats" / "collect and print statistics about the tests" --?
- val isValidate = "validate" / "examine test filesystem for inconsistencies" --?
- val isUpdateCheck = "update-check" / "overwrite checkFile if diff fails" --?
-
- "version" / "print version" --> runAndExit(println(Properties.versionMsg))
-
- // no help for anything below this line - secret options
- // mostly intended for property configuration.
- val runSets = ("runsets" --^) getOrElse Nil
- val isNoAlarms = "noalarms" --?
- val isInsideAnt = "is-in-ant" --?
-}
-
-object PartestSpec extends PartestSpec with Property {
- lazy val propMapper = new PropertyMapper(PartestSpec) {
- override def isPassThrough(key: String) = key == "partest.options"
- }
-
- type ThisCommandLine = PartestCommandLine
- class PartestCommandLine(args: List[String]) extends SpecCommandLine(args) {
- override def errorFn(msg: String) = printAndExit("Error: " + msg)
-
- def propertyArgs = PartestSpec.propertyArgs
- }
-
- override def creator(args: List[String]): PartestCommandLine = new PartestCommandLine(args)
-}
diff --git a/src/partest-alternative/scala/tools/partest/Properties.scala b/src/partest-alternative/scala/tools/partest/Properties.scala
deleted file mode 100644
index 2d36f16..0000000
--- a/src/partest-alternative/scala/tools/partest/Properties.scala
+++ /dev/null
@@ -1,17 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala Parallel Testing **
-** / __/ __// _ | / / / _ | (c) 2007-2011, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-package scala.tools
-package partest
-
-/** Loads partest.properties from the jar. */
-object Properties extends scala.util.PropertiesTrait {
- protected def propCategory = "partest"
- protected def pickJarBasedOn = classOf[Application]
-}
diff --git a/src/partest-alternative/scala/tools/partest/Results.scala b/src/partest-alternative/scala/tools/partest/Results.scala
deleted file mode 100644
index e0fceed..0000000
--- a/src/partest-alternative/scala/tools/partest/Results.scala
+++ /dev/null
@@ -1,121 +0,0 @@
-/* NEST (New Scala Test)
- * Copyright 2007-2011 LAMP/EPFL
- */
-
-package scala.tools
-package partest
-
-import scala.collection.immutable
-
-trait Results {
- self: Universe =>
-
- /** A collection of tests for a Worker.
- */
- case class TestsToRun(entities: List[TestEntity])
-
- /** The response from a Worker who has been given TestsToRun.
- */
- case class ResultsOfRun(results: immutable.Map[TestEntity, TestResult])
-
- /** The result of a single test. (0: OK, 1: FAILED, 2: TIMEOUT)
- */
- sealed abstract class TestResult(val state: Int, val description: String) {
- def entity: TestEntity
-
- def passed = state == 0
- def colorize(s: String): String
- def show(msg: String) =
- if (!isShuttingDown)
- showResult(colorize(description), msg)
-
- private def outputPrefix = if (isInsideAnt) "" else markNormal("partest: ")
- private def name = src relativize entity.location // e.g. "neg/test.scala"
- private def showResult(status: String, extraMsg: String) =
- normal(outputPrefix + "[...]/%-40s [%s] %s\n".format(name, status, extraMsg))
-
- override def equals(other: Any) = other match {
- case x: TestResult => entity == x.entity
- case _ => false
- }
- override def hashCode = entity.hashCode
- override def toString = "%s [%s]".format(entity, description)
- }
-
- class Success(val entity: TestEntity) extends TestResult(0, " OK ") {
- def colorize(s: String) = markSuccess(s)
- override def show(msg: String) = if (!isTerse) super.show(msg)
- }
- class Failure(val entity: TestEntity) extends TestResult(1, " FAILED ") {
- def colorize(s: String) = markFailure(s)
-
- override def show(msg: String) = {
- super.show(msg)
-
- if (isShowDiff || isTrace)
- normal(entity.diffOutput)
-
- if (isShowLog || isTrace)
- normal(toStringTrunc(entity.failureMessage(), 1600))
- }
- override def toString = List(super.toString, toStringTrunc(entity.failureMessage(), 400)) mkString "\n"
- }
- class Timeout(val entity: TestEntity) extends TestResult(2, "TIME OUT") {
- def colorize(s: String) = markFailure(s)
- }
-
- object TestResult {
- def apply(entity: TestEntity, success: Boolean) =
- if (success) new Success(entity)
- else new Failure(entity)
-
- def apply(entity: TestEntity, state: Int) = state match {
- case 0 => new Success(entity)
- case 1 => new Failure(entity)
- case 2 => new Timeout(entity)
- }
- def unapply(x: Any) = x match {
- case x: TestResult => Some((x.entity, x.state))
- case _ => None
- }
- }
-
- /** The combined results of any number of tests.
- */
- case class CombinedTestResults(
- passed: Int,
- failed: Int,
- elapsedMilliseconds: Long,
- failures: List[TestResult]
- ) {
- // housekeeping
- val elapsedSecs = elapsedMilliseconds / 1000
- val elapsedMins = elapsedSecs / 60
- val elapsedHrs = elapsedMins / 60
- val dispMins = elapsedMins - elapsedHrs * 60
- val dispSecs = elapsedSecs - elapsedMins * 60
-
- def total = passed + failed
- def hasFailures = failed > 0
- def exitCode = if (expectedErrors == failed) 0 else 1
-
- def ++(x: CombinedTestResults) = CombinedTestResults(
- passed + x.passed,
- failed + x.failed,
- elapsedMilliseconds + x.elapsedMilliseconds,
- failures ::: x.failures
- )
-
- def elapsedString = "%02d:%02d:%02d".format(elapsedHrs, dispMins, dispSecs)
- def failuresString = {
- if (failures.isEmpty) ""
- else "Summary of failures:" :: failures mkString ("\n", "\n", "")
- }
-
- override def toString =
- if (total == 0) "There were no tests to run."
- else if (isDryRun) "%d tests would be run." format total
- else if (hasFailures) "%d of %d tests failed (elapsed time: %s)".format(failed, total, elapsedString) + failuresString
- else "All %d tests were successful (elapsed time: %s)".format(total, elapsedString)
- }
-}
\ No newline at end of file
diff --git a/src/partest-alternative/scala/tools/partest/Runner.scala b/src/partest-alternative/scala/tools/partest/Runner.scala
deleted file mode 100644
index 7fe2c98..0000000
--- a/src/partest-alternative/scala/tools/partest/Runner.scala
+++ /dev/null
@@ -1,36 +0,0 @@
-/* NEST (New Scala Test)
- * Copyright 2007-2011 LAMP/EPFL
- * @author Philipp Haller
- */
-
-package scala.tools
-package partest
-
-import nsc.io._
-
-object Runner {
- def main(args: Array[String]) {
- val runner = Partest(args: _*)
- import runner._
-
- if (args.isEmpty) return println(helpMsg)
- if (isValidate) return validateAll()
-
- printConfigBanner()
-
- if (isCleanup)
- cleanupAll()
-
- val result = launchTestSuite()
- val exitCode = result.exitCode
- val message = "\n" + result + "\n"
-
- if (exitCode == 0) success(message)
- else failure(message)
-
- if (isStats)
- showTestStatistics()
-
- System exit exitCode
- }
-}
diff --git a/src/partest-alternative/scala/tools/partest/Statistics.scala b/src/partest-alternative/scala/tools/partest/Statistics.scala
deleted file mode 100644
index 852963c..0000000
--- a/src/partest-alternative/scala/tools/partest/Statistics.scala
+++ /dev/null
@@ -1,46 +0,0 @@
-/* NEST (New Scala Test)
- * Copyright 2007-2011 LAMP/EPFL
- * @author Philipp Haller
- */
-
-package scala.tools
-package partest
-
-import scala.collection.mutable.HashMap
-
-trait Statistics {
- /** Only collected when --stats is given. */
- lazy val testStatistics = new HashMap[String, Long]
-
- /** Given function and block of code, evaluates code block,
- * calls function with milliseconds elapsed, and returns block result.
- */
- def timed[T](f: Long => Unit)(body: => T): T = {
- val start = System.currentTimeMillis
- val result = body
- val end = System.currentTimeMillis
-
- f(end - start)
- result
- }
- /** Times body and returns both values.
- */
- def timed2[T](body: => T): (Long, T) = {
- var milliSeconds = 0L
- val result = timed(x => milliSeconds = x)(body)
-
- (milliSeconds, result)
- }
-
- def resultsToStatistics(results: Iterable[(_, Int)]): (Int, Int) =
- (results partition (_._2 == 0)) match {
- case (winners, losers) => (winners.size, losers.size)
- }
-
- def recordTestTiming(name: String, milliseconds: Long) =
- synchronized { testStatistics(name) = milliseconds }
-
- def showTestStatistics() {
- testStatistics.toList sortBy (-_._2) foreach { case (k, v) => println("%s: %.2f seconds".format(k, (v.toDouble / 1000))) }
- }
-}
diff --git a/src/partest-alternative/scala/tools/partest/Universe.scala b/src/partest-alternative/scala/tools/partest/Universe.scala
deleted file mode 100644
index 3dd79e4..0000000
--- a/src/partest-alternative/scala/tools/partest/Universe.scala
+++ /dev/null
@@ -1,96 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala Parallel Testing **
-** / __/ __// _ | / / / _ | (c) 2007-2011, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-package scala.tools
-package partest
-
-import nsc.io._
-import category.AllCategories
-import io.Logging
-
-/** The high level view of the partest infrastructure.
- */
-abstract class Universe
- extends Entities
- with BuildContributors
- with Logging
- with Dispatcher
- with Statistics
- with Housekeeping
- with Results
- with PartestCompilation
- with PartestSpec
- with Config
- with Alarms
- with Actions
- with Categories {
-
- /** The abstract values from which all else is derived. */
- def partestDir: Directory
- def testBuildDir: Directory
- def allCategories: List[TestCategory]
- def selectedCategories: List[TestCategory]
-
- /** Some plausibly abstract types. */
- type TestBuild <: BuildContributor // e.g. quick, pack
- type TestCategory <: AbsTestCategory // e.g. pos, neg, run
- type TestEntity <: AbsTestEntity // e.g. files/pos/test25.scala
- type TestSequence <: AbsTestSequence // e.g. compile, run, diff
-
- /** Although TestStep isn't much more than Function1 right now,
- * it exists this way so it can become more capable.
- */
- implicit def f1ToTestStep(f: TestEntity => Boolean): TestStep =
- new TestStep { def apply(test: TestEntity) = f(test) }
-
- abstract class TestStep extends (TestEntity => Boolean) {
- def apply(test: TestEntity): Boolean
- }
-
- /** An umbrella category of tests, such as "pos" or "run".
- */
- trait AbsTestCategory extends BuildContributor {
- type TestSettings
-
- def kind: String
- def testSequence: TestSequence
- def denotesTest(location: Path): Boolean
-
- def createTest(location: Path): TestEntity
- def createSettings(entity: TestEntity): TestSettings
- def enumerate: List[TestEntity]
- }
-
- /** A single test. It may involve multiple files, but only a
- * single path is used to designate it.
- */
- trait AbsTestEntity extends BuildContributor {
- def category: TestCategory
- def location: Path
- def onException(x: Throwable): Unit
- def testClasspath: String
-
- /** Most tests will use the sequence defined by the category,
- * but the test can override and define a custom sequence.
- */
- def testSequence: TestSequence
-
- /** True if this test recognizes the given path as a piece of it.
- * For validation purposes.
- */
- def acknowledges(path: Path): Boolean
- }
-
- /** Every TestEntity is partly characterized by a series of actions
- * which are applied to the TestEntity in the given order. The test
- * passes if all those actions return true, fails otherwise.
- */
- trait AbsTestSequence {
- def actions: List[TestStep]
- }
-}
\ No newline at end of file
diff --git a/src/partest-alternative/scala/tools/partest/ant/JavaTask.scala b/src/partest-alternative/scala/tools/partest/ant/JavaTask.scala
deleted file mode 100644
index f8c0133..0000000
--- a/src/partest-alternative/scala/tools/partest/ant/JavaTask.scala
+++ /dev/null
@@ -1,57 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala Parallel Testing **
-** / __/ __// _ | / / / _ | (c) 2007-2011, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-package scala.tools
-package partest
-package ant
-
-import org.apache.tools.ant.Task
-import org.apache.tools.ant.taskdefs.Java
-import org.apache.tools.ant.types.Environment
-
-import scala.tools.nsc.io._
-import scala.tools.nsc.util.ClassPath
-import cmd.Spec._
-
-class JavaTask extends Java {
- override def getTaskName() = "partest"
- private val scalaRunnerClass = "scala.tools.nsc.MainGenericRunner"
- private val partestRunnerClass = "scala.tools.partest.Runner"
- def defaultJvmArgs = "-Xms64M -Xmx768M -Xss768K -XX:MaxPermSize=96M"
-
- protected def rootDir = prop("partest.rootdir") getOrElse (baseDir / "test").path
- protected def partestJVMArgs = prop("partest.jvm.args") getOrElse defaultJvmArgs
- protected def runnerArgs = List("-usejavacp", partestRunnerClass, "--javaopts", partestJVMArgs)
-
- private def baseDir = Directory(getProject.getBaseDir)
- private def prop(s: String) = Option(getProject getProperty s)
- private def jvmline(s: String) = returning(createJvmarg())(_ setLine s)
- private def addArg(s: String) = returning(createArg())(_ setValue s)
-
- private def newKeyValue(key: String, value: String) =
- returning(new Environment.Variable)(x => { x setKey key ; x setValue value })
-
- def setDefaults() {
- setFork(true)
- setFailonerror(true)
- getProject.setSystemProperties()
- setClassname(scalaRunnerClass)
- addSysproperty(newKeyValue("partest.is-in-ant", "true"))
- jvmline(partestJVMArgs)
- runnerArgs foreach addArg
-
- // do we want basedir or rootDir to be the cwd?
- // setDir(Path(rootDir).jfile)
- }
-
- override def init() = {
- super.init()
- setDefaults()
- }
-}
-
diff --git a/src/partest-alternative/scala/tools/partest/antlib.xml b/src/partest-alternative/scala/tools/partest/antlib.xml
deleted file mode 100644
index af36f11..0000000
--- a/src/partest-alternative/scala/tools/partest/antlib.xml
+++ /dev/null
@@ -1,3 +0,0 @@
-<antlib>
- <taskdef name="partest" classname="scala.tools.partest.ant.JavaTask"/>
-</antlib>
diff --git a/src/partest-alternative/scala/tools/partest/category/AllCategories.scala b/src/partest-alternative/scala/tools/partest/category/AllCategories.scala
deleted file mode 100644
index 1c3f4c9..0000000
--- a/src/partest-alternative/scala/tools/partest/category/AllCategories.scala
+++ /dev/null
@@ -1,20 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala Parallel Testing **
-** / __/ __// _ | / / / _ | (c) 2007-2011, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-package scala.tools
-package partest
-package category
-
-trait AllCategories extends Compiler with Analysis with Runner {
- self: Universe =>
-
- object Pos extends DirBasedCategory("pos") { lazy val testSequence: TestSequence = List(compile) }
- object Neg extends DirBasedCategory("neg") { lazy val testSequence: TestSequence = List(checkFileRequired, not(compile), diff) }
- object Run extends DirBasedCategory("run") { lazy val testSequence: TestSequence = List(compile, run, diff) }
- object Jvm extends DirBasedCategory("jvm") { lazy val testSequence: TestSequence = List(compile, run, diff) }
-}
diff --git a/src/partest-alternative/scala/tools/partest/category/Analysis.scala b/src/partest-alternative/scala/tools/partest/category/Analysis.scala
deleted file mode 100644
index 944f8c6..0000000
--- a/src/partest-alternative/scala/tools/partest/category/Analysis.scala
+++ /dev/null
@@ -1,64 +0,0 @@
-/* NEST (New Scala Test)
- * Copyright 2007-2011 LAMP/EPFL
- */
-
-package scala.tools
-package partest
-package category
-
-import java.lang.{ ClassLoader => JavaClassLoader }
-import java.net.URL
-import nsc.util.ScalaClassLoader
-import nsc.io._
-
-class PartestClassLoader(urls: Array[URL], parent: JavaClassLoader) extends ScalaClassLoader.URLClassLoader(urls, parent) {
- def this(urls: Array[URL]) = this(urls, null)
- def bytes(path: String) = findBytesForClassName(path)
- def singleton(path: String) = tryToInitializeClass(path).get getField "MODULE$" get null
-
- /** Calls a method in an object via reflection.
- */
- def apply[T](className: String, methodName: String)(args: Any*): T = {
- def fail = error("Reflection failed on %s.%s".format(className, methodName))
- val clazz = tryToLoadClass(className) getOrElse fail
- val obj = singleton(className)
- val m = clazz.getMethods find (x => x.getName == methodName && x.getParameterTypes.size == args.size) getOrElse fail
-
- m.invoke(obj, args map (_.asInstanceOf[AnyRef]): _*).asInstanceOf[T]
- }
-}
-
-trait Analysis {
- self: Universe =>
-
- object Scalap extends DirBasedCategory("scalap") {
- val testSequence: TestSequence = List(checkFileRequired, compile, run, diff)
- override def denotesTest(p: Path) = p.isDirectory && (p.toDirectory.files exists (_.name == "result.test"))
- override def createTest(location: Path) = new ScalapTest(location)
-
- class ScalapTest(val location: Path) extends TestEntity {
- val category = Scalap
- val scalapMain = "scala.tools.scalap.Main$"
- val scalapMethod = "decompileScala"
-
- override def classpathPaths = super.classpathPaths :+ build.scalap
- override def checkFile = File(location / "result.test")
-
- private def runnerURLs = build.classpathPaths ::: classpathPaths map (_.toURL)
- private def createClassLoader = new PartestClassLoader(runnerURLs.toArray, this.getClass.getClassLoader)
-
- val isPackageObject = containsString("package object")
- val suffix = if (isPackageObject) ".package" else ""
- val className = location.name.capitalize + suffix
-
- override def run() = loggingResult {
- def loader = createClassLoader
- def bytes = loader.bytes(className)
-
- trace("scalap %s".format(className))
- if (isDryRun) ""
- else loader[String](scalapMain, scalapMethod)(bytes, isPackageObject)
- }
- }
- }
-}
diff --git a/src/partest-alternative/scala/tools/partest/category/Compiler.scala b/src/partest-alternative/scala/tools/partest/category/Compiler.scala
deleted file mode 100644
index 6b65072..0000000
--- a/src/partest-alternative/scala/tools/partest/category/Compiler.scala
+++ /dev/null
@@ -1,140 +0,0 @@
-/* NEST (New Scala Test)
- * Copyright 2007-2011 LAMP/EPFL
- */
-
-package scala.tools
-package partest
-package category
-
-import nsc.io._
-import nsc.reporters._
-import nsc.{ Settings, CompilerCommand }
-import scala.tools.nsc.interactive.RefinedBuildManager
-import util.copyPath
-
-trait Compiler {
- self: Universe =>
-
- /** Resident Compiler.
- * $SCALAC -d dir.obj -Xresident -sourcepath . "$@"
- */
- object Res extends DirBasedCategory("res") {
- lazy val testSequence: TestSequence = List(checkFileRequired, compile, diff)
-
- override def denotesTest(p: Path) = p.isDirectory && resFile(p).isFile
- override def createTest(location: Path) = new ResidentTest(location.toDirectory)
-
- override def createSettings(entity: TestEntity): TestSettings =
- returning(super.createSettings(entity)) { settings =>
- settings.resident.value = true
- settings.sourcepath.value = entity.sourcesDir.path
- }
-
- class ResidentTest(val location: Directory) extends TestEntity {
- val category = Res
- override def sourcesDir = categoryDir
-
- override def acknowledges(p: Path) =
- super.acknowledges(p) || (resFile(location) isSame p)
-
- private def residentCompilerCommands = safeLines(resFile(location))
- private def compileResident(global: PartestGlobal, lines: List[String]) = {
- def printPrompt = global inform "nsc> "
- val results =
- lines map { line =>
- printPrompt
- trace("compile " + line)
- isDryRun || global.partestCompile(toArgs(line) map (categoryDir / _ path), false)
- }
-
- printPrompt
-
- /** Note - some res tests are really "neg" style tests, so we can't
- * use the return value of the compile. The diff catches failures.
- */
- true // results forall (_ == true)
- }
-
- override def compile() = compileResident(newGlobal(Nil)._1, residentCompilerCommands)
- }
- private[Res] def resFile(p: Path) = p.toFile addExtension "res"
- }
-
- object BuildManager extends DirBasedCategory("buildmanager") {
- lazy val testSequence: TestSequence = List(checkFileRequired, compile, diff)
- override def denotesTest(p: Path) = p.isDirectory && testFile(p).isFile
- override def createTest(location: Path) = new BuildManagerTest(location.toDirectory)
-
- override def createSettings(entity: TestEntity): TestSettings =
- returning[TestSettings](super.createSettings(entity)) { settings =>
- settings.Ybuildmanagerdebug.value = true
- settings.sourcepath.value = entity.sourcesDir.path
- }
-
- class PartestBuildManager(settings: Settings, val reporter: ConsoleReporter) extends RefinedBuildManager(settings) {
- def errorFn(msg: String) = Console println msg
-
- override protected def newCompiler(newSettings: Settings) =
- new BuilderGlobal(newSettings, reporter)
-
- private def filesToSet(pre: String, fs: List[String]): Set[AbstractFile] =
- fs flatMap (s => Option(AbstractFile getFile (Path(settings.sourcepath.value) / s path))) toSet
-
- def buildManagerCompile(line: String): Boolean = {
- val prompt = "builder > "
- reporter printMessage (prompt + line)
- val command = new CompilerCommand(toArgs(line), settings)
- val files = filesToSet(settings.sourcepath.value, command.files)
-
- update(files, Set.empty)
- true
- }
- }
-
- private[BuildManager] def testFile(p: Path) = (p / p.name addExtension "test").toFile
-
- class BuildManagerTest(val location: Directory) extends TestEntity {
- val category = BuildManager
-
- override def sourcesDir = outDir
- override def sourceFiles = Path onlyFiles (location walkFilter (_ != changesDir) filter isJavaOrScala toList)
- override def checkFile = File(location / location.name addExtension "check")
-
- override def acknowledges(p: Path) = super.acknowledges(p) || (p isSame testFile(location))
-
- def buildManagerCommands = safeLines(testFile(location))
- def changesDir = Directory(location / (location.name + ".changes"))
-
- override def compile() = {
- val settings = createSettings(this)
- val pbm = new PartestBuildManager(settings, newReporter(settings))
-
- // copy files
- for (source <- sourceFiles) {
- val target = outDir / (location.normalize relativize source)
- copyPath(source, target.toFile)
- }
-
- def runUpdate(line: String) = {
- val Array(srcName, replacement) = line split "=>"
- copyPath(File(changesDir / replacement), File(outDir / srcName))
- }
-
- def sendCommand(line: String): Boolean = {
- val compileRegex = """^>>compile (.*)$""".r
- val updateRegex = """^>>update\s+(.*)""".r
- trace("send: " + (line drop 2))
-
- isDryRun || (line match {
- case compileRegex(xs) => pbm.buildManagerCompile(xs)
- case updateRegex(line) => runUpdate(line)
- })
- }
-
- // send each line to the build manager
- buildManagerCommands forall sendCommand
- }
- }
- }
-}
-
diff --git a/src/partest-alternative/scala/tools/partest/category/Runner.scala b/src/partest-alternative/scala/tools/partest/category/Runner.scala
deleted file mode 100644
index add1c55..0000000
--- a/src/partest-alternative/scala/tools/partest/category/Runner.scala
+++ /dev/null
@@ -1,108 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala Parallel Testing **
-** / __/ __// _ | / / / _ | (c) 2007-2011, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-package scala.tools
-package partest
-package category
-
-import nsc.io._
-
-trait Runner {
- self: Universe =>
-
- /** Shootout.
- */
- object Shootout extends DirBasedCategory("shootout") {
- lazy val testSequence: TestSequence = List(compile, run, diff)
-
- override def denotesTest(p: Path) = isScala(p) && runner(p).isFile
- override def createTest(location: Path) = new ShootoutTest(location.toFile)
-
- class ShootoutTest(val location: File) extends TestEntity {
- val category = Shootout
- // The files in shootout are very free form, so acknowledge anything close.
- override def acknowledges(p: Path) =
- (p.parent.normalize isSame Shootout.root) && (p.name startsWith label)
-
- private def generated = File(outDir / "test.scala")
- private def runnerFile = runner(location)
- override def sourceFiles = List(generated)
-
- override def compile() = {
- trace("generate %s from %s, %s".format(tracePath(generated), tracePath(location), tracePath(runnerFile)))
- // generate source file (even on dry run, we need the path)
- generated.writeAll(location.slurp(), runnerFile.slurp())
-
- // compile generated file
- super.compile()
- }
- }
-
- private[Shootout] def runner(p: Path) = p addExtension "runner" toFile
- }
-
- object Scalacheck extends DirBasedCategory("scalacheck") {
- lazy val testSequence: TestSequence = List(compile, run)
- override def createTest(location: Path) = new ScalacheckTest(location)
-
- class ScalacheckTest(val location: Path) extends TestEntity {
- val category = Scalacheck
-
- import build.{ scalacheck, forkjoin }
- import org.scalacheck.Properties
- import org.scalacheck.Test.{ checkProperties, defaultParams, Result }
-
- override def classpathPaths = super.classpathPaths ::: List(scalacheck, forkjoin)
- private def arrayURLs = Array(scalacheck, outDir) map (_.toURL)
-
- /** For reasons I'm not entirely clear on, I've written all this
- * to avoid a source dependency on scalacheck.
- */
- class ScalacheckClassLoader extends PartestClassLoader(arrayURLs, this.getClass.getClassLoader) {
- type ScalacheckResult = { def passed: Boolean }
-
- def propCallback(name: String, passed: Int, discarded: Int): Unit = ()
- def testCallback(name: String, result: AnyRef): Unit = ()
-
- val test = singleton("Test$")
- val params = apply[AnyRef]("org.scalacheck.Test$", "defaultParams")()
- val result = apply[Seq[(String, AnyRef)]]("org.scalacheck.Test$", "checkProperties")(test, params, propCallback _, testCallback _)
-
- def allResults() =
- for ((prop, res) <- result) yield {
- ScalacheckTest.this.trace("%s: %s".format(prop, res))
- res.asInstanceOf[ScalacheckResult].passed
- }
-
- def check() = allResults forall (_ == true)
- }
-
- override def run() = {
- trace("scalacheck runs via classloader with: %s".format(arrayURLs mkString ", "))
- isDryRun || (new ScalacheckClassLoader check)
- }
- }
- }
-
- object Script extends DirBasedCategory("script") {
- val testSequence: TestSequence = List(exec, diff)
- override def createTest(location: Path) = new ScriptTest(location)
-
- class ScriptTest(val location: Path) extends TestEntity {
- val category = Script
- val scriptFile = if (location.isDirectory) location / (label + ".scala") else location
- val argsFile = withExtension("args").toFile
- def batFile = scriptFile changeExtension "bat"
- def script = if (Properties.isWin) batFile else scriptFile
-
- override def acknowledges(p: Path) = super.acknowledges(p) || (List(argsFile, batFile) exists (_ isSame p))
- override def execCwd = Some(sourcesDir)
- override def argumentsToExec = script.path :: safeArgs(argsFile)
- }
- }
-}
\ No newline at end of file
diff --git a/src/partest-alternative/scala/tools/partest/io/ANSIWriter.scala b/src/partest-alternative/scala/tools/partest/io/ANSIWriter.scala
deleted file mode 100644
index 59216cf..0000000
--- a/src/partest-alternative/scala/tools/partest/io/ANSIWriter.scala
+++ /dev/null
@@ -1,58 +0,0 @@
-/* NEST (New Scala Test)
- * Copyright 2007-2011 LAMP/EPFL
- * @author Philipp Haller
- */
-
-package scala.tools
-package partest
-package io
-
-import java.io.{ Writer, PrintWriter, OutputStream, OutputStreamWriter }
-
-object ANSIWriter {
- val NONE = 0
- val SOME = 1
- val MANY = 2
-
- def apply(isAnsi: Boolean) = if (isAnsi) MANY else NONE
-}
-import ANSIWriter._
-
-class ANSIWriter(writer: Writer) extends PrintWriter(writer, true) {
- def this(out: OutputStream) = this(new OutputStreamWriter(out))
- def colorful: Int = NONE
-
- protected val manyColors = List(
- Console.BOLD + Console.BLACK,
- Console.BOLD + Console.GREEN,
- Console.BOLD + Console.RED,
- Console.BOLD + Console.YELLOW,
- Console.RESET
- )
- protected val someColors = List(
- Console.BOLD + Console.BLACK,
- Console.RESET,
- Console.BOLD + Console.BLACK,
- Console.BOLD + Console.BLACK,
- Console.RESET
- )
- protected val noColors = List("", "", "", "", "")
-
- lazy val List(_outline, _success, _failure, _warning, _default) = colorful match {
- case NONE => noColors
- case SOME => someColors
- case MANY => manyColors
- case _ => noColors
- }
-
- private def wrprint(msg: String): Unit = synchronized {
- print(msg)
- flush()
- }
-
- def outline(msg: String) = wrprint(_outline + msg + _default)
- def success(msg: String) = wrprint(_success + msg + _default)
- def failure(msg: String) = wrprint(_failure + msg + _default)
- def warning(msg: String) = wrprint(_warning + msg + _default)
- def normal(msg: String) = wrprint(_default + msg)
-}
diff --git a/src/partest-alternative/scala/tools/partest/io/Diff.java b/src/partest-alternative/scala/tools/partest/io/Diff.java
deleted file mode 100644
index 69428d7..0000000
--- a/src/partest-alternative/scala/tools/partest/io/Diff.java
+++ /dev/null
@@ -1,873 +0,0 @@
-
-package scala.tools.partest.io;
-
-import java.util.Hashtable;
-
-/** A class to compare IndexedSeqs of objects. The result of comparison
- is a list of <code>change</code> objects which form an
- edit script. The objects compared are traditionally lines
- of text from two files. Comparison options such as "ignore
- whitespace" are implemented by modifying the <code>equals</code>
- and <code>hashcode</code> methods for the objects compared.
-<p>
- The basic algorithm is described in: </br>
- "An O(ND) Difference Algorithm and its Variations", Eugene Myers,
- Algorithmica Vol. 1 No. 2, 1986, p 251.
-<p>
- This class outputs different results from GNU diff 1.15 on some
- inputs. Our results are actually better (smaller change list, smaller
- total size of changes), but it would be nice to know why. Perhaps
- there is a memory overwrite bug in GNU diff 1.15.
-
- @author Stuart D. Gathman, translated from GNU diff 1.15
- Copyright (C) 2000 Business Management Systems, Inc.
-<p>
- This program is free software; you can redistribute it and/or modify
- it under the terms of the GNU General Public License as published by
- the Free Software Foundation; either version 1, or (at your option)
- any later version.
-<p>
- This program is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
- GNU General Public License for more details.
-<p>
- You should have received a copy of the <a href=COPYING.txt>
- GNU General Public License</a>
- along with this program; if not, write to the Free Software
- Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
-
- */
-
-public class Diff {
-
- /** Prepare to find differences between two arrays. Each element of
- the arrays is translated to an "equivalence number" based on
- the result of <code>equals</code>. The original Object arrays
- are no longer needed for computing the differences. They will
- be needed again later to print the results of the comparison as
- an edit script, if desired.
- */
- public Diff(Object[] a,Object[] b) {
- Hashtable h = new Hashtable(a.length + b.length);
- filevec[0] = new file_data(a,h);
- filevec[1] = new file_data(b,h);
- }
-
- /** 1 more than the maximum equivalence value used for this or its
- sibling file. */
- private int equiv_max = 1;
-
- /** When set to true, the comparison uses a heuristic to speed it up.
- With this heuristic, for files with a constant small density
- of changes, the algorithm is linear in the file size. */
- public boolean heuristic = false;
-
- /** When set to true, the algorithm returns a guarranteed minimal
- set of changes. This makes things slower, sometimes much slower. */
- public boolean no_discards = false;
-
- private int[] xvec, yvec; /* IndexedSeqs being compared. */
- private int[] fdiag; /* IndexedSeq, indexed by diagonal, containing
- the X coordinate of the point furthest
- along the given diagonal in the forward
- search of the edit matrix. */
- private int[] bdiag; /* IndexedSeq, indexed by diagonal, containing
- the X coordinate of the point furthest
- along the given diagonal in the backward
- search of the edit matrix. */
- private int fdiagoff, bdiagoff;
- private final file_data[] filevec = new file_data[2];
- private int cost;
-
- /** Find the midpoint of the shortest edit script for a specified
- portion of the two files.
-
- We scan from the beginnings of the files, and simultaneously from the ends,
- doing a breadth-first search through the space of edit-sequence.
- When the two searches meet, we have found the midpoint of the shortest
- edit sequence.
-
- The value returned is the number of the diagonal on which the midpoint lies.
- The diagonal number equals the number of inserted lines minus the number
- of deleted lines (counting only lines before the midpoint).
- The edit cost is stored into COST; this is the total number of
- lines inserted or deleted (counting only lines before the midpoint).
-
- This function assumes that the first lines of the specified portions
- of the two files do not match, and likewise that the last lines do not
- match. The caller must trim matching lines from the beginning and end
- of the portions it is going to specify.
-
- Note that if we return the "wrong" diagonal value, or if
- the value of bdiag at that diagonal is "wrong",
- the worst this can do is cause suboptimal diff output.
- It cannot cause incorrect diff output. */
-
- private int diag (int xoff, int xlim, int yoff, int ylim) {
- final int[] fd = fdiag; // Give the compiler a chance.
- final int[] bd = bdiag; // Additional help for the compiler.
- final int[] xv = xvec; // Still more help for the compiler.
- final int[] yv = yvec; // And more and more . . .
- final int dmin = xoff - ylim; // Minimum valid diagonal.
- final int dmax = xlim - yoff; // Maximum valid diagonal.
- final int fmid = xoff - yoff; // Center diagonal of top-down search.
- final int bmid = xlim - ylim; // Center diagonal of bottom-up search.
- int fmin = fmid, fmax = fmid; // Limits of top-down search.
- int bmin = bmid, bmax = bmid; // Limits of bottom-up search.
- /* True if southeast corner is on an odd
- diagonal with respect to the northwest. */
- final boolean odd = (fmid - bmid & 1) != 0;
-
- fd[fdiagoff + fmid] = xoff;
- bd[bdiagoff + bmid] = xlim;
-
- for (int c = 1;; ++c)
- {
- int d; /* Active diagonal. */
- boolean big_snake = false;
-
- /* Extend the top-down search by an edit step in each diagonal. */
- if (fmin > dmin)
- fd[fdiagoff + --fmin - 1] = -1;
- else
- ++fmin;
- if (fmax < dmax)
- fd[fdiagoff + ++fmax + 1] = -1;
- else
- --fmax;
- for (d = fmax; d >= fmin; d -= 2)
- {
- int x, y, oldx, tlo = fd[fdiagoff + d - 1], thi = fd[fdiagoff + d + 1];
-
- if (tlo >= thi)
- x = tlo + 1;
- else
- x = thi;
- oldx = x;
- y = x - d;
- while (x < xlim && y < ylim && xv[x] == yv[y]) {
- ++x; ++y;
- }
- if (x - oldx > 20)
- big_snake = true;
- fd[fdiagoff + d] = x;
- if (odd && bmin <= d && d <= bmax && bd[bdiagoff + d] <= fd[fdiagoff + d])
- {
- cost = 2 * c - 1;
- return d;
- }
- }
-
- /* Similar extend the bottom-up search. */
- if (bmin > dmin)
- bd[bdiagoff + --bmin - 1] = Integer.MAX_VALUE;
- else
- ++bmin;
- if (bmax < dmax)
- bd[bdiagoff + ++bmax + 1] = Integer.MAX_VALUE;
- else
- --bmax;
- for (d = bmax; d >= bmin; d -= 2)
- {
- int x, y, oldx, tlo = bd[bdiagoff + d - 1], thi = bd[bdiagoff + d + 1];
-
- if (tlo < thi)
- x = tlo;
- else
- x = thi - 1;
- oldx = x;
- y = x - d;
- while (x > xoff && y > yoff && xv[x - 1] == yv[y - 1]) {
- --x; --y;
- }
- if (oldx - x > 20)
- big_snake = true;
- bd[bdiagoff + d] = x;
- if (!odd && fmin <= d && d <= fmax && bd[bdiagoff + d] <= fd[fdiagoff + d])
- {
- cost = 2 * c;
- return d;
- }
- }
-
- /* Heuristic: check occasionally for a diagonal that has made
- lots of progress compared with the edit distance.
- If we have any such, find the one that has made the most
- progress and return it as if it had succeeded.
-
- With this heuristic, for files with a constant small density
- of changes, the algorithm is linear in the file size. */
-
- if (c > 200 && big_snake && heuristic)
- {
- int best = 0;
- int bestpos = -1;
-
- for (d = fmax; d >= fmin; d -= 2)
- {
- int dd = d - fmid;
- if ((fd[fdiagoff + d] - xoff)*2 - dd > 12 * (c + (dd > 0 ? dd : -dd)))
- {
- if (fd[fdiagoff + d] * 2 - dd > best
- && fd[fdiagoff + d] - xoff > 20
- && fd[fdiagoff + d] - d - yoff > 20)
- {
- int k;
- int x = fd[fdiagoff + d];
-
- /* We have a good enough best diagonal;
- now insist that it end with a significant snake. */
- for (k = 1; k <= 20; k++)
- if (xvec[x - k] != yvec[x - d - k])
- break;
-
- if (k == 21)
- {
- best = fd[fdiagoff + d] * 2 - dd;
- bestpos = d;
- }
- }
- }
- }
- if (best > 0)
- {
- cost = 2 * c - 1;
- return bestpos;
- }
-
- best = 0;
- for (d = bmax; d >= bmin; d -= 2)
- {
- int dd = d - bmid;
- if ((xlim - bd[bdiagoff + d])*2 + dd > 12 * (c + (dd > 0 ? dd : -dd)))
- {
- if ((xlim - bd[bdiagoff + d]) * 2 + dd > best
- && xlim - bd[bdiagoff + d] > 20
- && ylim - (bd[bdiagoff + d] - d) > 20)
- {
- /* We have a good enough best diagonal;
- now insist that it end with a significant snake. */
- int k;
- int x = bd[bdiagoff + d];
-
- for (k = 0; k < 20; k++)
- if (xvec[x + k] != yvec[x - d + k])
- break;
- if (k == 20)
- {
- best = (xlim - bd[bdiagoff + d]) * 2 + dd;
- bestpos = d;
- }
- }
- }
- }
- if (best > 0)
- {
- cost = 2 * c - 1;
- return bestpos;
- }
- }
- }
- }
-
- /** Compare in detail contiguous subsequences of the two files
- which are known, as a whole, to match each other.
-
- The results are recorded in the IndexedSeqs filevec[N].changed_flag, by
- storing a 1 in the element for each line that is an insertion or deletion.
-
- The subsequence of file 0 is [XOFF, XLIM) and likewise for file 1.
-
- Note that XLIM, YLIM are exclusive bounds.
- All line numbers are origin-0 and discarded lines are not counted. */
-
- private void compareseq (int xoff, int xlim, int yoff, int ylim) {
- /* Slide down the bottom initial diagonal. */
- while (xoff < xlim && yoff < ylim && xvec[xoff] == yvec[yoff]) {
- ++xoff; ++yoff;
- }
- /* Slide up the top initial diagonal. */
- while (xlim > xoff && ylim > yoff && xvec[xlim - 1] == yvec[ylim - 1]) {
- --xlim; --ylim;
- }
-
- /* Handle simple cases. */
- if (xoff == xlim)
- while (yoff < ylim)
- filevec[1].changed_flag[1+filevec[1].realindexes[yoff++]] = true;
- else if (yoff == ylim)
- while (xoff < xlim)
- filevec[0].changed_flag[1+filevec[0].realindexes[xoff++]] = true;
- else
- {
- /* Find a point of correspondence in the middle of the files. */
-
- int d = diag (xoff, xlim, yoff, ylim);
- int c = cost;
- int f = fdiag[fdiagoff + d];
- int b = bdiag[bdiagoff + d];
-
- if (c == 1)
- {
- /* This should be impossible, because it implies that
- one of the two subsequences is empty,
- and that case was handled above without calling `diag'.
- Let's verify that this is true. */
- throw new IllegalArgumentException("Empty subsequence");
- }
- else
- {
- /* Use that point to split this problem into two subproblems. */
- compareseq (xoff, b, yoff, b - d);
- /* This used to use f instead of b,
- but that is incorrect!
- It is not necessarily the case that diagonal d
- has a snake from b to f. */
- compareseq (b, xlim, b - d, ylim);
- }
- }
- }
-
- /** Discard lines from one file that have no matches in the other file.
- */
-
- private void discard_confusing_lines() {
- filevec[0].discard_confusing_lines(filevec[1]);
- filevec[1].discard_confusing_lines(filevec[0]);
- }
-
- private boolean inhibit = false;
-
- /** Adjust inserts/deletes of blank lines to join changes
- as much as possible.
- */
-
- private void shift_boundaries() {
- if (inhibit)
- return;
- filevec[0].shift_boundaries(filevec[1]);
- filevec[1].shift_boundaries(filevec[0]);
- }
-
- public interface ScriptBuilder {
- /** Scan the tables of which lines are inserted and deleted,
- producing an edit script.
- @param changed0 true for lines in first file which do not match 2nd
- @param len0 number of lines in first file
- @param changed1 true for lines in 2nd file which do not match 1st
- @param len1 number of lines in 2nd file
- @return a linked list of changes - or null
- */
- public change build_script(
- boolean[] changed0,int len0,
- boolean[] changed1,int len1
- );
- }
-
- /** Scan the tables of which lines are inserted and deleted,
- producing an edit script in reverse order. */
-
- static class ReverseScript implements ScriptBuilder {
- public change build_script(
- final boolean[] changed0,int len0,
- final boolean[] changed1,int len1)
- {
- change script = null;
- int i0 = 0, i1 = 0;
- while (i0 < len0 || i1 < len1) {
- if (changed0[1+i0] || changed1[1+i1]) {
- int line0 = i0, line1 = i1;
-
- /* Find # lines changed here in each file. */
- while (changed0[1+i0]) ++i0;
- while (changed1[1+i1]) ++i1;
-
- /* Record this change. */
- script = new change(line0, line1, i0 - line0, i1 - line1, script);
- }
-
- /* We have reached lines in the two files that match each other. */
- i0++; i1++;
- }
-
- return script;
- }
- }
-
- static class ForwardScript implements ScriptBuilder {
- /** Scan the tables of which lines are inserted and deleted,
- producing an edit script in forward order. */
- public change build_script(
- final boolean[] changed0,int len0,
- final boolean[] changed1,int len1)
- {
- change script = null;
- int i0 = len0, i1 = len1;
-
- while (i0 >= 0 || i1 >= 0)
- {
- if (changed0[i0] || changed1[i1])
- {
- int line0 = i0, line1 = i1;
-
- /* Find # lines changed here in each file. */
- while (changed0[i0]) --i0;
- while (changed1[i1]) --i1;
-
- /* Record this change. */
- script = new change(i0, i1, line0 - i0, line1 - i1, script);
- }
-
- /* We have reached lines in the two files that match each other. */
- i0--; i1--;
- }
-
- return script;
- }
- }
-
- /** Standard ScriptBuilders. */
- public final static ScriptBuilder
- forwardScript = new ForwardScript(),
- reverseScript = new ReverseScript();
-
- /* Report the differences of two files. DEPTH is the current directory
- depth. */
- public final change diff_2(final boolean reverse) {
- return diff(reverse ? reverseScript : forwardScript);
- }
-
- /** Get the results of comparison as an edit script. The script
- is described by a list of changes. The standard ScriptBuilder
- implementations provide for forward and reverse edit scripts.
- Alternate implementations could, for instance, list common elements
- instead of differences.
- @param bld an object to build the script from change flags
- @return the head of a list of changes
- */
- public change diff(final ScriptBuilder bld) {
-
- /* Some lines are obviously insertions or deletions
- because they don't match anything. Detect them now,
- and avoid even thinking about them in the main comparison algorithm. */
-
- discard_confusing_lines ();
-
- /* Now do the main comparison algorithm, considering just the
- undiscarded lines. */
-
- xvec = filevec[0].undiscarded;
- yvec = filevec[1].undiscarded;
-
- int diags =
- filevec[0].nondiscarded_lines + filevec[1].nondiscarded_lines + 3;
- fdiag = new int[diags];
- fdiagoff = filevec[1].nondiscarded_lines + 1;
- bdiag = new int[diags];
- bdiagoff = filevec[1].nondiscarded_lines + 1;
-
- compareseq (0, filevec[0].nondiscarded_lines,
- 0, filevec[1].nondiscarded_lines);
- fdiag = null;
- bdiag = null;
-
- /* Modify the results slightly to make them prettier
- in cases where that can validly be done. */
-
- shift_boundaries ();
-
- /* Get the results of comparison in the form of a chain
- of `struct change's -- an edit script. */
- return bld.build_script(
- filevec[0].changed_flag,
- filevec[0].buffered_lines,
- filevec[1].changed_flag,
- filevec[1].buffered_lines
- );
-
- }
-
- /** The result of comparison is an "edit script": a chain of change objects.
- Each change represents one place where some lines are deleted
- and some are inserted.
-
- LINE0 and LINE1 are the first affected lines in the two files (origin 0).
- DELETED is the number of lines deleted here from file 0.
- INSERTED is the number of lines inserted here in file 1.
-
- If DELETED is 0 then LINE0 is the number of the line before
- which the insertion was done; vice versa for INSERTED and LINE1. */
-
- public static class change {
- /** Previous or next edit command. */
- public change link;
- /** # lines of file 1 changed here. */
- public final int inserted;
- /** # lines of file 0 changed here. */
- public final int deleted;
- /** Line number of 1st deleted line. */
- public final int line0;
- /** Line number of 1st inserted line. */
- public final int line1;
-
- /** Cons an additional entry onto the front of an edit script OLD.
- LINE0 and LINE1 are the first affected lines in the two files (origin 0).
- DELETED is the number of lines deleted here from file 0.
- INSERTED is the number of lines inserted here in file 1.
-
- If DELETED is 0 then LINE0 is the number of the line before
- which the insertion was done; vice versa for INSERTED and LINE1. */
- public change(int line0, int line1, int deleted, int inserted, change old) {
- this.line0 = line0;
- this.line1 = line1;
- this.inserted = inserted;
- this.deleted = deleted;
- this.link = old;
- //System.err.println(line0+","+line1+","+inserted+","+deleted);
- }
- }
-
- /** Data on one input file being compared.
- */
-
- class file_data {
-
- /** Allocate changed array for the results of comparison. */
- void clear() {
- /* Allocate a flag for each line of each file, saying whether that line
- is an insertion or deletion.
- Allocate an extra element, always zero, at each end of each IndexedSeq.
- */
- changed_flag = new boolean[buffered_lines + 2];
- }
-
- /** Return equiv_count[I] as the number of lines in this file
- that fall in equivalence class I.
- @return the array of equivalence class counts.
- */
- int[] equivCount() {
- int[] equiv_count = new int[equiv_max];
- for (int i = 0; i < buffered_lines; ++i)
- ++equiv_count[equivs[i]];
- return equiv_count;
- }
-
- /** Discard lines that have no matches in another file.
-
- A line which is discarded will not be considered by the actual
- comparison algorithm; it will be as if that line were not in the file.
- The file's `realindexes' table maps virtual line numbers
- (which don't count the discarded lines) into real line numbers;
- this is how the actual comparison algorithm produces results
- that are comprehensible when the discarded lines are counted.
-<p>
- When we discard a line, we also mark it as a deletion or insertion
- so that it will be printed in the output.
- @param f the other file
- */
- void discard_confusing_lines(file_data f) {
- clear();
- /* Set up table of which lines are going to be discarded. */
- final byte[] discarded = discardable(f.equivCount());
-
- /* Don't really discard the provisional lines except when they occur
- in a run of discardables, with nonprovisionals at the beginning
- and end. */
- filterDiscards(discarded);
-
- /* Actually discard the lines. */
- discard(discarded);
- }
-
- /** Mark to be discarded each line that matches no line of another file.
- If a line matches many lines, mark it as provisionally discardable.
- @see equivCount()
- @param counts The count of each equivalence number for the other file.
- @return 0=nondiscardable, 1=discardable or 2=provisionally discardable
- for each line
- */
-
- private byte[] discardable(final int[] counts) {
- final int end = buffered_lines;
- final byte[] discards = new byte[end];
- final int[] equivs = this.equivs;
- int many = 5;
- int tem = end / 64;
-
- /* Multiply MANY by approximate square root of number of lines.
- That is the threshold for provisionally discardable lines. */
- while ((tem = tem >> 2) > 0)
- many *= 2;
-
- for (int i = 0; i < end; i++)
- {
- int nmatch;
- if (equivs[i] == 0)
- continue;
- nmatch = counts[equivs[i]];
- if (nmatch == 0)
- discards[i] = 1;
- else if (nmatch > many)
- discards[i] = 2;
- }
- return discards;
- }
-
- /** Don't really discard the provisional lines except when they occur
- in a run of discardables, with nonprovisionals at the beginning
- and end. */
-
- private void filterDiscards(final byte[] discards) {
- final int end = buffered_lines;
-
- for (int i = 0; i < end; i++)
- {
- /* Cancel provisional discards not in middle of run of discards. */
- if (discards[i] == 2)
- discards[i] = 0;
- else if (discards[i] != 0)
- {
- /* We have found a nonprovisional discard. */
- int j;
- int length;
- int provisional = 0;
-
- /* Find end of this run of discardable lines.
- Count how many are provisionally discardable. */
- for (j = i; j < end; j++)
- {
- if (discards[j] == 0)
- break;
- if (discards[j] == 2)
- ++provisional;
- }
-
- /* Cancel provisional discards at end, and shrink the run. */
- while (j > i && discards[j - 1] == 2) {
- discards[--j] = 0; --provisional;
- }
-
- /* Now we have the length of a run of discardable lines
- whose first and last are not provisional. */
- length = j - i;
-
- /* If 1/4 of the lines in the run are provisional,
- cancel discarding of all provisional lines in the run. */
- if (provisional * 4 > length)
- {
- while (j > i)
- if (discards[--j] == 2)
- discards[j] = 0;
- }
- else
- {
- int consec;
- int minimum = 1;
- int tem = length / 4;
-
- /* MINIMUM is approximate square root of LENGTH/4.
- A subrun of two or more provisionals can stand
- when LENGTH is at least 16.
- A subrun of 4 or more can stand when LENGTH >= 64. */
- while ((tem = tem >> 2) > 0)
- minimum *= 2;
- minimum++;
-
- /* Cancel any subrun of MINIMUM or more provisionals
- within the larger run. */
- for (j = 0, consec = 0; j < length; j++)
- if (discards[i + j] != 2)
- consec = 0;
- else if (minimum == ++consec)
- /* Back up to start of subrun, to cancel it all. */
- j -= consec;
- else if (minimum < consec)
- discards[i + j] = 0;
-
- /* Scan from beginning of run
- until we find 3 or more nonprovisionals in a row
- or until the first nonprovisional at least 8 lines in.
- Until that point, cancel any provisionals. */
- for (j = 0, consec = 0; j < length; j++)
- {
- if (j >= 8 && discards[i + j] == 1)
- break;
- if (discards[i + j] == 2) {
- consec = 0; discards[i + j] = 0;
- }
- else if (discards[i + j] == 0)
- consec = 0;
- else
- consec++;
- if (consec == 3)
- break;
- }
-
- /* I advances to the last line of the run. */
- i += length - 1;
-
- /* Same thing, from end. */
- for (j = 0, consec = 0; j < length; j++)
- {
- if (j >= 8 && discards[i - j] == 1)
- break;
- if (discards[i - j] == 2) {
- consec = 0; discards[i - j] = 0;
- }
- else if (discards[i - j] == 0)
- consec = 0;
- else
- consec++;
- if (consec == 3)
- break;
- }
- }
- }
- }
- }
-
- /** Actually discard the lines.
- @param discards flags lines to be discarded
- */
- private void discard(final byte[] discards) {
- final int end = buffered_lines;
- int j = 0;
- for (int i = 0; i < end; ++i)
- if (no_discards || discards[i] == 0)
- {
- undiscarded[j] = equivs[i];
- realindexes[j++] = i;
- }
- else
- changed_flag[1+i] = true;
- nondiscarded_lines = j;
- }
-
- file_data(Object[] data,Hashtable h) {
- buffered_lines = data.length;
-
- equivs = new int[buffered_lines];
- undiscarded = new int[buffered_lines];
- realindexes = new int[buffered_lines];
-
- for (int i = 0; i < data.length; ++i) {
- Integer ir = (Integer)h.get(data[i]);
- if (ir == null)
- h.put(data[i],new Integer(equivs[i] = equiv_max++));
- else
- equivs[i] = ir.intValue();
- }
- }
-
- /** Adjust inserts/deletes of blank lines to join changes
- as much as possible.
-
- We do something when a run of changed lines include a blank
- line at one end and have an excluded blank line at the other.
- We are free to choose which blank line is included.
- `compareseq' always chooses the one at the beginning,
- but usually it is cleaner to consider the following blank line
- to be the "change". The only exception is if the preceding blank line
- would join this change to other changes.
- @param f the file being compared against
- */
-
- void shift_boundaries(file_data f) {
- final boolean[] changed = changed_flag;
- final boolean[] other_changed = f.changed_flag;
- int i = 0;
- int j = 0;
- int i_end = buffered_lines;
- int preceding = -1;
- int other_preceding = -1;
-
- for (;;)
- {
- int start, end, other_start;
-
- /* Scan forwards to find beginning of another run of changes.
- Also keep track of the corresponding point in the other file. */
-
- while (i < i_end && !changed[1+i])
- {
- while (other_changed[1+j++])
- /* Non-corresponding lines in the other file
- will count as the preceding batch of changes. */
- other_preceding = j;
- i++;
- }
-
- if (i == i_end)
- break;
-
- start = i;
- other_start = j;
-
- for (;;)
- {
- /* Now find the end of this run of changes. */
-
- while (i < i_end && changed[1+i]) i++;
- end = i;
-
- /* If the first changed line matches the following unchanged one,
- and this run does not follow right after a previous run,
- and there are no lines deleted from the other file here,
- then classify the first changed line as unchanged
- and the following line as changed in its place. */
-
- /* You might ask, how could this run follow right after another?
- Only because the previous run was shifted here. */
-
- if (end != i_end
- && equivs[start] == equivs[end]
- && !other_changed[1+j]
- && end != i_end
- && !((preceding >= 0 && start == preceding)
- || (other_preceding >= 0
- && other_start == other_preceding)))
- {
- changed[1+end++] = true;
- changed[1+start++] = false;
- ++i;
- /* Since one line-that-matches is now before this run
- instead of after, we must advance in the other file
- to keep in synch. */
- ++j;
- }
- else
- break;
- }
-
- preceding = i;
- other_preceding = j;
- }
- }
-
- /** Number of elements (lines) in this file. */
- final int buffered_lines;
-
- /** IndexedSeq, indexed by line number, containing an equivalence code for
- each line. It is this IndexedSeq that is actually compared with that
- of another file to generate differences. */
- private final int[] equivs;
-
- /** IndexedSeq, like the previous one except that
- the elements for discarded lines have been squeezed out. */
- final int[] undiscarded;
-
- /** IndexedSeq mapping virtual line numbers (not counting discarded lines)
- to real ones (counting those lines). Both are origin-0. */
- final int[] realindexes;
-
- /** Total number of nondiscarded lines. */
- int nondiscarded_lines;
-
- /** Array, indexed by real origin-1 line number,
- containing true for a line that is an insertion or a deletion.
- The results of comparison are stored here. */
- boolean[] changed_flag;
-
- }
-}
diff --git a/src/partest-alternative/scala/tools/partest/io/DiffPrint.java b/src/partest-alternative/scala/tools/partest/io/DiffPrint.java
deleted file mode 100644
index 273b6cb..0000000
--- a/src/partest-alternative/scala/tools/partest/io/DiffPrint.java
+++ /dev/null
@@ -1,606 +0,0 @@
-
-package scala.tools.partest.io;
-
-import java.io.*;
-import java.util.Vector;
-import java.util.Date;
-//import com.objectspace.jgl.predicates.UnaryPredicate;
-
-interface UnaryPredicate {
- boolean execute(Object obj);
-}
-
-/** A simple framework for printing change lists produced by <code>Diff</code>.
- @see bmsi.util.Diff
- @author Stuart D. Gathman
- Copyright (C) 2000 Business Management Systems, Inc.
-<p>
- This program is free software; you can redistribute it and/or modify
- it under the terms of the GNU General Public License as published by
- the Free Software Foundation; either version 1, or (at your option)
- any later version.
-<p>
- This program is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
- GNU General Public License for more details.
-<p>
- You should have received a copy of the GNU General Public License
- along with this program; if not, write to the Free Software
- Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
- */
-public class DiffPrint {
- /** A Base class for printing edit scripts produced by Diff.
- This class divides the change list into "hunks", and calls
- <code>print_hunk</code> for each hunk. Various utility methods
- are provided as well.
- */
- public static abstract class Base {
- protected Base(Object[] a,Object[] b, Writer w) {
- outfile = new PrintWriter(w);
- file0 = a;
- file1 = b;
- }
- /** Set to ignore certain kinds of lines when printing
- an edit script. For example, ignoring blank lines or comments.
- */
- protected UnaryPredicate ignore = null;
-
- /** Set to the lines of the files being compared.
- */
- protected Object[] file0, file1;
-
- /** Divide SCRIPT into pieces by calling HUNKFUN and
- print each piece with PRINTFUN.
- Both functions take one arg, an edit script.
-
- PRINTFUN takes a subscript which belongs together (with a null
- link at the end) and prints it. */
- public void print_script(Diff.change script) {
- Diff.change next = script;
-
- while (next != null)
- {
- Diff.change t, end;
-
- /* Find a set of changes that belong together. */
- t = next;
- end = hunkfun(next);
-
- /* Disconnect them from the rest of the changes,
- making them a hunk, and remember the rest for next iteration. */
- next = end.link;
- end.link = null;
- //if (DEBUG)
- // debug_script(t);
-
- /* Print this hunk. */
- print_hunk(t);
-
- /* Reconnect the script so it will all be freed properly. */
- end.link = next;
- }
- outfile.flush();
- }
-
- /** Called with the tail of the script
- and returns the last link that belongs together with the start
- of the tail. */
-
- protected Diff.change hunkfun(Diff.change hunk) {
- return hunk;
- }
-
- protected int first0, last0, first1, last1, deletes, inserts;
- protected PrintWriter outfile;
-
- /** Look at a hunk of edit script and report the range of lines in each file
- that it applies to. HUNK is the start of the hunk, which is a chain
- of `struct change'. The first and last line numbers of file 0 are stored
- in *FIRST0 and *LAST0, and likewise for file 1 in *FIRST1 and *LAST1.
- Note that these are internal line numbers that count from 0.
-
- If no lines from file 0 are deleted, then FIRST0 is LAST0+1.
-
- Also set *DELETES nonzero if any lines of file 0 are deleted
- and set *INSERTS nonzero if any lines of file 1 are inserted.
- If only ignorable lines are inserted or deleted, both are
- set to 0. */
-
- protected void analyze_hunk(Diff.change hunk) {
- int f0, l0 = 0, f1, l1 = 0, show_from = 0, show_to = 0;
- int i;
- Diff.change next;
- boolean nontrivial = (ignore == null);
-
- show_from = show_to = 0;
-
- f0 = hunk.line0;
- f1 = hunk.line1;
-
- for (next = hunk; next != null; next = next.link)
- {
- l0 = next.line0 + next.deleted - 1;
- l1 = next.line1 + next.inserted - 1;
- show_from += next.deleted;
- show_to += next.inserted;
- for (i = next.line0; i <= l0 && ! nontrivial; i++)
- if (!ignore.execute(file0[i]))
- nontrivial = true;
- for (i = next.line1; i <= l1 && ! nontrivial; i++)
- if (!ignore.execute(file1[i]))
- nontrivial = true;
- }
-
- first0 = f0;
- last0 = l0;
- first1 = f1;
- last1 = l1;
-
- /* If all inserted or deleted lines are ignorable,
- tell the caller to ignore this hunk. */
-
- if (!nontrivial)
- show_from = show_to = 0;
-
- deletes = show_from;
- inserts = show_to;
- }
-
- /** Print the script header which identifies the files compared. */
- protected void print_header(String filea, String fileb) { }
-
- protected abstract void print_hunk(Diff.change hunk);
-
- protected void print_1_line(String pre,Object linbuf) {
- outfile.println(pre + linbuf.toString());
- }
-
- /** Print a pair of line numbers with SEPCHAR, translated for file FILE.
- If the two numbers are identical, print just one number.
-
- Args A and B are internal line numbers.
- We print the translated (real) line numbers. */
-
- protected void print_number_range (char sepchar, int a, int b) {
- /* Note: we can have B < A in the case of a range of no lines.
- In this case, we should print the line number before the range,
- which is B. */
- if (++b > ++a)
- outfile.print("" + a + sepchar + b);
- else
- outfile.print(b);
- }
-
- public static char change_letter(int inserts, int deletes) {
- if (inserts == 0)
- return 'd';
- else if (deletes == 0)
- return 'a';
- else
- return 'c';
- }
- }
-
- /** Print a change list in the standard diff format.
- */
- public static class NormalPrint extends Base {
-
- public NormalPrint(Object[] a,Object[] b, Writer w) {
- super(a,b,w);
- }
-
- /** Print a hunk of a normal diff.
- This is a contiguous portion of a complete edit script,
- describing changes in consecutive lines. */
-
- protected void print_hunk (Diff.change hunk) {
-
- /* Determine range of line numbers involved in each file. */
- analyze_hunk(hunk);
- if (deletes == 0 && inserts == 0)
- return;
-
- /* Print out the line number header for this hunk */
- print_number_range (',', first0, last0);
- outfile.print(change_letter(inserts, deletes));
- print_number_range (',', first1, last1);
- outfile.println();
-
- /* Print the lines that the first file has. */
- if (deletes != 0)
- for (int i = first0; i <= last0; i++)
- print_1_line ("< ", file0[i]);
-
- if (inserts != 0 && deletes != 0)
- outfile.println("---");
-
- /* Print the lines that the second file has. */
- if (inserts != 0)
- for (int i = first1; i <= last1; i++)
- print_1_line ("> ", file1[i]);
- }
- }
-
- /** Prints an edit script in a format suitable for input to <code>ed</code>.
- The edit script must be generated with the reverse option to
- be useful as actual <code>ed</code> input.
- */
- public static class EdPrint extends Base {
-
- public EdPrint(Object[] a,Object[] b, Writer w) {
- super(a,b,w);
- }
-
- /** Print a hunk of an ed diff */
- protected void print_hunk(Diff.change hunk) {
-
- /* Determine range of line numbers involved in each file. */
- analyze_hunk (hunk);
- if (deletes == 0 && inserts == 0)
- return;
-
- /* Print out the line number header for this hunk */
- print_number_range (',', first0, last0);
- outfile.println(change_letter(inserts, deletes));
-
- /* Print new/changed lines from second file, if needed */
- if (inserts != 0)
- {
- boolean inserting = true;
- for (int i = first1; i <= last1; i++)
- {
- /* Resume the insert, if we stopped. */
- if (! inserting)
- outfile.println(i - first1 + first0 + "a");
- inserting = true;
-
- /* If the file's line is just a dot, it would confuse `ed'.
- So output it with a double dot, and set the flag LEADING_DOT
- so that we will output another ed-command later
- to change the double dot into a single dot. */
-
- if (".".equals(file1[i]))
- {
- outfile.println("..");
- outfile.println(".");
- /* Now change that double dot to the desired single dot. */
- outfile.println(i - first1 + first0 + 1 + "s/^\\.\\././");
- inserting = false;
- }
- else
- /* Line is not `.', so output it unmodified. */
- print_1_line ("", file1[i]);
- }
-
- /* End insert mode, if we are still in it. */
- if (inserting)
- outfile.println(".");
- }
- }
- }
-
- /** Prints an edit script in context diff format. This and its
- 'unified' variation is used for source code patches.
- */
- public static class ContextPrint extends Base {
-
- protected int context = 3;
-
- public ContextPrint(Object[] a,Object[] b, Writer w) {
- super(a,b,w);
- }
-
- protected void print_context_label (String mark, File inf, String label) {
- if (label != null)
- outfile.println(mark + ' ' + label);
- else if (inf.lastModified() > 0)
- // FIXME: use DateFormat to get precise format needed.
- outfile.println(
- mark + ' ' + inf.getPath() + '\t' + new Date(inf.lastModified())
- );
- else
- /* Don't pretend that standard input is ancient. */
- outfile.println(mark + ' ' + inf.getPath());
- }
-
- public void print_header(String filea,String fileb) {
- print_context_label ("***", new File(filea), filea);
- print_context_label ("---", new File(fileb), fileb);
- }
-
- /** If function_regexp defined, search for start of function. */
- private String find_function(Object[] lines, int start) {
- return null;
- }
-
- protected void print_function(Object[] file,int start) {
- String function = find_function (file0, first0);
- if (function != null) {
- outfile.print(" ");
- outfile.print(
- (function.length() < 40) ? function : function.substring(0,40)
- );
- }
- }
-
- protected void print_hunk(Diff.change hunk) {
-
- /* Determine range of line numbers involved in each file. */
-
- analyze_hunk (hunk);
-
- if (deletes == 0 && inserts == 0)
- return;
-
- /* Include a context's width before and after. */
-
- first0 = Math.max(first0 - context, 0);
- first1 = Math.max(first1 - context, 0);
- last0 = Math.min(last0 + context, file0.length - 1);
- last1 = Math.min(last1 + context, file1.length - 1);
-
-
- outfile.print("***************");
-
- /* If we looked for and found a function this is part of,
- include its name in the header of the diff section. */
- print_function (file0, first0);
-
- outfile.println();
- outfile.print("*** ");
- print_number_range (',', first0, last0);
- outfile.println(" ****");
-
- if (deletes != 0) {
- Diff.change next = hunk;
-
- for (int i = first0; i <= last0; i++) {
- /* Skip past changes that apply (in file 0)
- only to lines before line I. */
-
- while (next != null && next.line0 + next.deleted <= i)
- next = next.link;
-
- /* Compute the marking for line I. */
-
- String prefix = " ";
- if (next != null && next.line0 <= i)
- /* The change NEXT covers this line.
- If lines were inserted here in file 1, this is "changed".
- Otherwise it is "deleted". */
- prefix = (next.inserted > 0) ? "!" : "-";
-
- print_1_line (prefix, file0[i]);
- }
- }
-
- outfile.print("--- ");
- print_number_range (',', first1, last1);
- outfile.println(" ----");
-
- if (inserts != 0) {
- Diff.change next = hunk;
-
- for (int i = first1; i <= last1; i++) {
- /* Skip past changes that apply (in file 1)
- only to lines before line I. */
-
- while (next != null && next.line1 + next.inserted <= i)
- next = next.link;
-
- /* Compute the marking for line I. */
-
- String prefix = " ";
- if (next != null && next.line1 <= i)
- /* The change NEXT covers this line.
- If lines were deleted here in file 0, this is "changed".
- Otherwise it is "inserted". */
- prefix = (next.deleted > 0) ? "!" : "+";
-
- print_1_line (prefix, file1[i]);
- }
- }
- }
- }
-
- /** Prints an edit script in context diff format. This and its
- 'unified' variation is used for source code patches.
- */
- public static class UnifiedPrint extends ContextPrint {
-
- public UnifiedPrint(Object[] a,Object[] b, Writer w) {
- super(a,b,w);
- }
-
- public void print_header(String filea,String fileb) {
- print_context_label ("---", new File(filea), filea);
- print_context_label ("+++", new File(fileb), fileb);
- }
-
- private void print_number_range (int a, int b) {
- //translate_range (file, a, b, &trans_a, &trans_b);
-
- /* Note: we can have B < A in the case of a range of no lines.
- In this case, we should print the line number before the range,
- which is B. */
- if (b < a)
- outfile.print(b + ",0");
- else
- super.print_number_range(',',a,b);
- }
-
- protected void print_hunk(Diff.change hunk) {
- /* Determine range of line numbers involved in each file. */
- analyze_hunk (hunk);
-
- if (deletes == 0 && inserts == 0)
- return;
-
- /* Include a context's width before and after. */
-
- first0 = Math.max(first0 - context, 0);
- first1 = Math.max(first1 - context, 0);
- last0 = Math.min(last0 + context, file0.length - 1);
- last1 = Math.min(last1 + context, file1.length - 1);
-
-
-
- outfile.print("@@ -");
- print_number_range (first0, last0);
- outfile.print(" +");
- print_number_range (first1, last1);
- outfile.print(" @@");
-
- /* If we looked for and found a function this is part of,
- include its name in the header of the diff section. */
- print_function(file0,first0);
-
- outfile.println();
-
- Diff.change next = hunk;
- int i = first0;
- int j = first1;
-
- while (i <= last0 || j <= last1) {
-
- /* If the line isn't a difference, output the context from file 0. */
-
- if (next == null || i < next.line0) {
- outfile.print(' ');
- print_1_line ("", file0[i++]);
- j++;
- }
- else {
- /* For each difference, first output the deleted part. */
-
- int k = next.deleted;
- while (k-- > 0) {
- outfile.print('-');
- print_1_line ("", file0[i++]);
- }
-
- /* Then output the inserted part. */
-
- k = next.inserted;
- while (k-- > 0) {
- outfile.print('+');
- print_1_line ("", file1[j++]);
- }
-
- /* We're done with this hunk, so on to the next! */
-
- next = next.link;
- }
- }
- }
- }
-
-
- /** Read a text file into an array of String. This provides basic diff
- functionality. A more advanced diff utility will use specialized
- objects to represent the text lines, with options to, for example,
- convert sequences of whitespace to a single space for comparison
- purposes.
- */
- static String[] slurp(String file) throws IOException {
- BufferedReader rdr = new BufferedReader(new FileReader(file));
- Vector s = new Vector();
- for (;;) {
- String line = rdr.readLine();
- if (line == null) break;
- s.addElement(line);
- }
- String[] a = new String[s.size()];
- s.copyInto(a);
- return a;
- }
-
- public static void main(String[] argv) throws IOException {
- String filea = argv[argv.length - 2];
- String fileb = argv[argv.length - 1];
- String[] a = slurp(filea);
- String[] b = slurp(fileb);
- Diff d = new Diff(a,b);
- char style = 'n';
- for (int i = 0; i < argv.length - 2; ++i) {
- String f = argv[i];
- if (f.startsWith("-")) {
- for (int j = 1; j < f.length(); ++j) {
- switch (f.charAt(j)) {
- case 'e': // Ed style
- style = 'e'; break;
- case 'c': // Context diff
- style = 'c'; break;
- case 'u':
- style = 'u'; break;
- }
- }
- }
- }
- boolean reverse = style == 'e';
- Diff.change script = d.diff_2(reverse);
- if (script == null)
- System.err.println("No differences");
- else {
- Base p;
- Writer w = new OutputStreamWriter(System.out);
- switch (style) {
- case 'e':
- p = new EdPrint(a,b,w); break;
- case 'c':
- p = new ContextPrint(a,b,w); break;
- case 'u':
- p = new UnifiedPrint(a,b,w); break;
- default:
- p = new NormalPrint(a,b,w);
- }
- p.print_header(filea,fileb);
- p.print_script(script);
- }
- }
-
- public static void doDiff(String[] argv, Writer w) throws IOException {
- String filea = argv[argv.length - 2];
- String fileb = argv[argv.length - 1];
- String[] a = slurp(filea);
- String[] b = slurp(fileb);
- Diff d = new Diff(a,b);
- char style = 'n';
- for (int i = 0; i < argv.length - 2; ++i) {
- String f = argv[i];
- if (f.startsWith("-")) {
- for (int j = 1; j < f.length(); ++j) {
- switch (f.charAt(j)) {
- case 'e': // Ed style
- style = 'e'; break;
- case 'c': // Context diff
- style = 'c'; break;
- case 'u':
- style = 'u'; break;
- }
- }
- }
- }
- boolean reverse = style == 'e';
- Diff.change script = d.diff_2(reverse);
- if (script == null)
- w.write("No differences\n");
- else {
- Base p;
- switch (style) {
- case 'e':
- p = new EdPrint(a,b,w); break;
- case 'c':
- p = new ContextPrint(a,b,w); break;
- case 'u':
- p = new UnifiedPrint(a,b,w); break;
- default:
- p = new NormalPrint(a,b,w);
- }
- p.print_header(filea,fileb);
- p.print_script(script);
- }
- }
-
-}
diff --git a/src/partest-alternative/scala/tools/partest/io/JUnitReport.scala b/src/partest-alternative/scala/tools/partest/io/JUnitReport.scala
deleted file mode 100644
index ddb3bc2..0000000
--- a/src/partest-alternative/scala/tools/partest/io/JUnitReport.scala
+++ /dev/null
@@ -1,38 +0,0 @@
-/* NEST (New Scala Test)
- * Copyright 2007-2011 LAMP/EPFL
- */
-
-package scala.tools
-package partest
-package io
-
-/** This is disabled for the moment but I can fix it up if anyone
- * is using it.
- */
-class JUnitReport {
- // create JUnit Report xml files if directory was specified
- // def junitReport(dir: Directory) = {
- // dir.mkdir()
- // val report = testReport(set.kind, results, succs, fails)
- // XML.save("%s/%s.xml".format(d.toAbsolute.path, set.kind), report)
- // }
-
- // def oneResult(res: (TestEntity, Int)) =
- // <testcase name={res._1.path}>{
- // res._2 match {
- // case 0 => scala.xml.NodeSeq.Empty
- // case 1 => <failure message="Test failed"/>
- // case 2 => <failure message="Test timed out"/>
- // }
- // }</testcase>
- //
- // def testReport(kind: String, results: Iterable[(TestEntity, Int)], succs: Int, fails: Int) = {
- // <testsuite name={kind} tests={(succs + fails).toString} failures={fails.toString}>
- // <properties/>
- // {
- // results.map(oneResult(_))
- // }
- // </testsuite>
- // }
- //
-}
\ No newline at end of file
diff --git a/src/partest-alternative/scala/tools/partest/io/Logging.scala b/src/partest-alternative/scala/tools/partest/io/Logging.scala
deleted file mode 100644
index 52239ff..0000000
--- a/src/partest-alternative/scala/tools/partest/io/Logging.scala
+++ /dev/null
@@ -1,137 +0,0 @@
-package scala.tools
-package partest
-package io
-
-import java.io.{ StringWriter, PrintWriter, Writer }
-import scala.tools.nsc.io._
-import scala.util.control.ControlThrowable
-
-trait Logging {
- universe: Universe =>
-
- class PartestANSIWriter extends ANSIWriter(Console.out) {
- override def colorful: Int = ANSIWriter(universe.isAnsi)
- private def printIf(cond: Boolean, msg: String) =
- if (cond) { outline("debug: ") ; println(msg) }
-
- val verbose = printIf(isVerbose || isDebug, _: String)
- val debug = printIf(isDebug, _: String)
- }
-
- lazy val NestUI = new PartestANSIWriter()
-
- import NestUI.{ _outline, _success, _failure, _warning, _default }
-
- def markOutline(msg: String) = _outline + msg + _default
- def markSuccess(msg: String) = _success + msg + _default
- def markFailure(msg: String) = _failure + msg + _default
- def markWarning(msg: String) = _warning + msg + _default
- def markNormal(msg: String) = _default + msg
-
- def outline(msg: String) = NestUI outline msg
- def success(msg: String) = NestUI success msg
- def failure(msg: String) = NestUI failure msg
- def warning(msg: String) = NestUI warning msg
- def normal(msg: String) = NestUI normal msg
-
- def verbose(msg: String) = NestUI verbose msg
- def debug(msg: String) = NestUI debug msg
-
- trait EntityLogging {
- self: TestEntity =>
-
- lazy val logWriter = new LogWriter(logFile)
-
- /** Redirect stdout and stderr to logFile, run body, return result.
- */
- def loggingOutAndErr[T](body: => T): T = {
- val log = logFile.printStream(append = true)
-
- try Console.withOut(log) {
- Console.withErr(log) {
- body
- }
- }
- finally log.close()
- }
-
- /** What to print in a failure summary.
- */
- def failureMessage() = if (diffOutput != "") diffOutput else safeSlurp(logFile)
-
- /** For tracing. Outputs a line describing the next action. tracePath
- * is a path wrapper which prints name or full path depending on verbosity.
- */
- def trace(msg: String) = if (isTrace || isDryRun) System.err.println(">> [%s] %s".format(label, msg))
-
- def tracePath(path: Path): String = if (isVerbose) path.path else path.name
- def tracePath(path: String): String = tracePath(Path(path))
-
- /** v == verbose.
- */
- def vtrace(msg: String) = if (isVerbose) trace(msg)
-
- /** Run body, writes result to logFile. Any throwable is
- * caught, stringified, and written to the log.
- */
- def loggingResult(body: => String) =
- try returning(true)(_ => logFile writeAll body)
- catch {
- case x: ControlThrowable => throw x
- case x: InterruptedException => debug(this + " received interrupt, failing.\n") ; false
- case x: Throwable => logException(x)
- }
-
- def throwableToString(x: Throwable): String = {
- val w = new StringWriter
- x.printStackTrace(new PrintWriter(w))
- w.toString
- }
-
- def warnAndLog(str: String) = {
- warning(toStringTrunc(str, 800))
- logWriter append str
- }
-
- def warnAndLogException(msg: String, ex: Throwable) =
- warnAndLog(msg + throwableToString(ex))
-
- def deleteLog(force: Boolean = false) =
- if (universe.isNoCleanup && !force) debug("Not cleaning up " + logFile)
- else logFile.deleteIfExists()
-
- def onException(x: Throwable) { logException(x) }
- def logException(x: Throwable) = {
- val msg = throwableToString(x)
- if (!isTerse)
- normal(msg)
-
- logWriter append msg
- false
- }
- }
-
- /** A writer which doesn't create the file until a write comes in.
- */
- class LazilyCreatedWriter(log: File) extends Writer {
- @volatile private var isCreated = false
- private lazy val underlying = {
- isCreated = true
- log.bufferedWriter()
- }
-
- def flush() = if (isCreated) underlying.flush()
- def close() = if (isCreated) underlying.close()
- def write(chars: Array[Char], off: Int, len: Int) = {
- underlying.write(chars, off, len)
- underlying.flush()
- }
- }
-
- class LogWriter(log: File) extends PrintWriter(new LazilyCreatedWriter(log), true) {
- override def print(s: String) = {
- super.print(s)
- flush()
- }
- }
-}
\ No newline at end of file
diff --git a/src/partest-alternative/scala/tools/partest/package.scala b/src/partest-alternative/scala/tools/partest/package.scala
deleted file mode 100644
index 9c515aa..0000000
--- a/src/partest-alternative/scala/tools/partest/package.scala
+++ /dev/null
@@ -1,45 +0,0 @@
-/* NEST (New Scala Test)
- * Copyright 2007-2011 LAMP/EPFL
- */
-
-package scala.tools
-
-import nsc.io.{ File, Path, Process, Directory }
-import java.nio.charset.CharacterCodingException
-
-package object partest {
- /** The CharacterCodingExceptions are thrown at least on windows trying
- * to read a file like script/utf-8.scala
- */
- private[partest] def safeSlurp(f: File) =
- try if (f.exists) f.slurp() else ""
- catch { case _: CharacterCodingException => "" }
-
- private[partest] def safeLines(f: File) = safeSlurp(f) split """\r\n|\r|\n""" toList
- private[partest] def safeArgs(f: File) = toArgs(safeSlurp(f))
- private[partest] def isJava(f: Path) = f.isFile && (f hasExtension "java")
- private[partest] def isScala(f: Path) = f.isFile && (f hasExtension "scala")
- private[partest] def isJavaOrScala(f: Path) = isJava(f) || isScala(f)
-
- private[partest] def toArgs(line: String) = cmd toArgs line
- private[partest] def fromArgs(args: List[String]) = cmd fromArgs args
-
- /** Strings, argument lists, etc. */
-
- private[partest] def fromAnyArgs(args: List[Any]) = args mkString " " // separate to avoid accidents
- private[partest] def toStringTrunc(x: Any, max: Int = 240) = {
- val s = x.toString
- if (s.length < max) s
- else (s take max) + " [...]"
- }
- private[partest] def setProp(k: String, v: String) = scala.util.Properties.setProp(k, v)
-
- /** Pretty self explanatory. */
- def printAndExit(msg: String): Unit = {
- println(msg)
- exit(1)
- }
-
- /** Apply a function and return the passed value */
- def returning[T](x: T)(f: T => Unit): T = { f(x) ; x }
-}
\ No newline at end of file
diff --git a/src/partest-alternative/scala/tools/partest/util/package.scala b/src/partest-alternative/scala/tools/partest/util/package.scala
deleted file mode 100644
index c34d641..0000000
--- a/src/partest-alternative/scala/tools/partest/util/package.scala
+++ /dev/null
@@ -1,61 +0,0 @@
-/* NEST (New Scala Test)
- * Copyright 2007-2011 LAMP/EPFL
- */
-
-package scala.tools
-package partest
-
-import java.util.{ Timer, TimerTask }
-import java.io.StringWriter
-import nsc.io._
-
-/** Misc code still looking for a good home.
- */
-package object util {
-
- def allPropertiesString() = javaHashtableToString(System.getProperties)
-
- private def javaHashtableToString(table: java.util.Hashtable[_,_]) = {
- import collection.JavaConversions._
- (table.toList map { case (k, v) => "%s -> %s\n".format(k, v) }).sorted mkString
- }
-
- def filesToSet(pre: String, fs: List[String]): Set[AbstractFile] =
- fs flatMap (x => Option(AbstractFile getFile (Path(pre) / x).path)) toSet
-
- /** Copies one Path to another Path, trying to be sensible when one or the
- * other is a Directory. Returns true if it believes it succeeded.
- */
- def copyPath(from: Path, to: Path): Boolean = {
- if (!to.parent.isDirectory)
- to.parent.createDirectory(force = true)
-
- def copyDir = {
- val sub = to / from.name createDirectory true
- from.toDirectory.list forall (x => copyPath(x, sub))
- }
- (from.isDirectory, to.isDirectory) match {
- case (true, true) => copyDir
- case (true, false) => false
- case (false, true) => from.toFile copyTo (to / from.name)
- case (false, false) => from.toFile copyTo to
- }
- }
-
- /**
- * Compares two files using a Java implementation of the GNU diff
- * available at http://www.bmsi.com/java/#diff.
- *
- * @param f1 the first file to be compared
- * @param f2 the second file to be compared
- * @return the text difference between the compared files
- */
- def diffFiles(f1: File, f2: File): String = {
- val diffWriter = new StringWriter
- val args = Array(f1.toAbsolute.path, f2.toAbsolute.path)
-
- io.DiffPrint.doDiff(args, diffWriter)
- val result = diffWriter.toString
- if (result == "No differences") "" else result
- }
-}
diff --git a/src/partest/scala/tools/partest/ASMConverters.scala b/src/partest/scala/tools/partest/ASMConverters.scala
new file mode 100644
index 0000000..d618e08
--- /dev/null
+++ b/src/partest/scala/tools/partest/ASMConverters.scala
@@ -0,0 +1,71 @@
+package scala.tools.partest
+
+import scala.collection.JavaConverters._
+import scala.tools.asm
+import asm.tree.{ClassNode, MethodNode, InsnList}
+
+/** Makes using ASM from ByteCodeTests more convenient.
+ *
+ * Wraps ASM instructions in case classes so that equals and toString work
+ * for the purpose of bytecode diffing and pretty printing.
+ */
+trait ASMConverters {
+ // wrap ASM's instructions so we get case class-style `equals` and `toString`
+ object instructions {
+ def fromMethod(meth: MethodNode): List[Instruction] = {
+ val insns = meth.instructions
+ val asmToScala = new AsmToScala{ def labelIndex(l: asm.tree.AbstractInsnNode) = insns.indexOf(l) }
+
+ asmToScala.mapOver(insns.iterator.asScala.toList).asInstanceOf[List[Instruction]]
+ }
+
+ sealed abstract class Instruction { def opcode: String }
+ case class Field (opcode: String, desc: String, name: String, owner: String) extends Instruction
+ case class Incr (opcode: String, incr: Int, `var`: Int) extends Instruction
+ case class Op (opcode: String) extends Instruction
+ case class IntOp (opcode: String, operand: Int) extends Instruction
+ case class Jump (opcode: String, label: Label) extends Instruction
+ case class Ldc (opcode: String, cst: Any) extends Instruction
+ case class LookupSwitch (opcode: String, dflt: Label, keys: List[Integer], labels: List[Label]) extends Instruction
+ case class TableSwitch (opcode: String, dflt: Label, max: Int, min: Int, labels: List[Label]) extends Instruction
+ case class Method (opcode: String, desc: String, name: String, owner: String) extends Instruction
+ case class NewArray (opcode: String, desc: String, dims: Int) extends Instruction
+ case class TypeOp (opcode: String, desc: String) extends Instruction
+ case class VarOp (opcode: String, `var`: Int) extends Instruction
+ case class Label (offset: Int) extends Instruction { def opcode: String = "" }
+ case class FrameEntry (local: List[Any], stack: List[Any]) extends Instruction { def opcode: String = "" }
+ case class LineNumber (line: Int, start: Label) extends Instruction { def opcode: String = "" }
+ }
+
+ abstract class AsmToScala {
+ import instructions._
+
+ def labelIndex(l: asm.tree.AbstractInsnNode): Int
+
+ def mapOver(is: List[Any]): List[Any] = is map {
+ case i: asm.tree.AbstractInsnNode => apply(i)
+ case x => x
+ }
+
+ def op(i: asm.tree.AbstractInsnNode) = if (asm.util.Printer.OPCODES.isDefinedAt(i.getOpcode)) asm.util.Printer.OPCODES(i.getOpcode) else "?"
+ def lst[T](xs: java.util.List[T]): List[T] = if (xs == null) Nil else xs.asScala.toList
+ def apply(l: asm.tree.LabelNode): Label = this(l: asm.tree.AbstractInsnNode).asInstanceOf[Label]
+ def apply(x: asm.tree.AbstractInsnNode): Instruction = x match {
+ case i: asm.tree.FieldInsnNode => Field (op(i), i.desc: String, i.name: String, i.owner: String)
+ case i: asm.tree.IincInsnNode => Incr (op(i), i.incr: Int, i.`var`: Int)
+ case i: asm.tree.InsnNode => Op (op(i))
+ case i: asm.tree.IntInsnNode => IntOp (op(i), i.operand: Int)
+ case i: asm.tree.JumpInsnNode => Jump (op(i), this(i.label))
+ case i: asm.tree.LdcInsnNode => Ldc (op(i), i.cst: Any)
+ case i: asm.tree.LookupSwitchInsnNode => LookupSwitch (op(i), this(i.dflt), lst(i.keys), mapOver(lst(i.labels)).asInstanceOf[List[Label]])
+ case i: asm.tree.TableSwitchInsnNode => TableSwitch (op(i), this(i.dflt), i.max: Int, i.min: Int, mapOver(lst(i.labels)).asInstanceOf[List[Label]])
+ case i: asm.tree.MethodInsnNode => Method (op(i), i.desc: String, i.name: String, i.owner: String)
+ case i: asm.tree.MultiANewArrayInsnNode => NewArray (op(i), i.desc: String, i.dims: Int)
+ case i: asm.tree.TypeInsnNode => TypeOp (op(i), i.desc: String)
+ case i: asm.tree.VarInsnNode => VarOp (op(i), i.`var`: Int)
+ case i: asm.tree.LabelNode => Label (labelIndex(x))
+ case i: asm.tree.FrameNode => FrameEntry (mapOver(lst(i.local)), mapOver(lst(i.stack)))
+ case i: asm.tree.LineNumberNode => LineNumber (i.line: Int, this(i.start): Label)
+ }
+ }
+}
\ No newline at end of file
diff --git a/src/partest/scala/tools/partest/AsmNode.scala b/src/partest/scala/tools/partest/AsmNode.scala
new file mode 100644
index 0000000..d181436
--- /dev/null
+++ b/src/partest/scala/tools/partest/AsmNode.scala
@@ -0,0 +1,60 @@
+package scala.tools.partest
+
+import scala.collection.JavaConverters._
+import scala.tools.asm
+import asm._
+import asm.tree._
+import java.lang.reflect.Modifier
+
+sealed trait AsmNode[+T] {
+ def node: T
+ def access: Int
+ def desc: String
+ def name: String
+ def signature: String
+ def attrs: List[Attribute]
+ def visibleAnnotations: List[AnnotationNode]
+ def invisibleAnnotations: List[AnnotationNode]
+ def characteristics = f"$name%15s $desc%-30s$accessString$sigString"
+
+ private def accessString = if (access == 0) "" else " " + Modifier.toString(access)
+ private def sigString = if (signature == null) "" else " " + signature
+ override def toString = characteristics
+}
+
+object AsmNode {
+ type AsmMethod = AsmNode[MethodNode]
+ type AsmField = AsmNode[FieldNode]
+ type AsmMember = AsmNode[_]
+
+ implicit class ClassNodeOps(val node: ClassNode) {
+ def fieldsAndMethods: List[AsmMember] = {
+ val xs: List[AsmMember] = (
+ node.methods.asScala.toList.map(x => (x: AsmMethod))
+ ++ node.fields.asScala.toList.map(x => (x: AsmField))
+ )
+ xs sortBy (_.characteristics)
+ }
+ }
+ implicit class AsmMethodNode(val node: MethodNode) extends AsmNode[MethodNode] {
+ def access: Int = node.access
+ def desc: String = node.desc
+ def name: String = node.name
+ def signature: String = node.signature
+ def attrs: List[Attribute] = node.attrs.asScala.toList
+ def visibleAnnotations: List[AnnotationNode] = node.visibleAnnotations.asScala.toList
+ def invisibleAnnotations: List[AnnotationNode] = node.invisibleAnnotations.asScala.toList
+ }
+ implicit class AsmFieldNode(val node: FieldNode) extends AsmNode[FieldNode] {
+ def access: Int = node.access
+ def desc: String = node.desc
+ def name: String = node.name
+ def signature: String = node.signature
+ def attrs: List[Attribute] = node.attrs.asScala.toList
+ def visibleAnnotations: List[AnnotationNode] = node.visibleAnnotations.asScala.toList
+ def invisibleAnnotations: List[AnnotationNode] = node.invisibleAnnotations.asScala.toList
+ }
+
+ def apply(node: MethodNode): AsmMethodNode = new AsmMethodNode(node)
+ def apply(node: FieldNode): AsmFieldNode = new AsmFieldNode(node)
+}
diff --git a/src/partest/scala/tools/partest/BytecodeTest.scala b/src/partest/scala/tools/partest/BytecodeTest.scala
new file mode 100644
index 0000000..2699083
--- /dev/null
+++ b/src/partest/scala/tools/partest/BytecodeTest.scala
@@ -0,0 +1,129 @@
+package scala.tools.partest
+
+import scala.tools.nsc.util.JavaClassPath
+import scala.collection.JavaConverters._
+import scala.tools.asm
+import asm.{ ClassReader }
+import asm.tree.{ClassNode, MethodNode, InsnList}
+import java.io.InputStream
+import AsmNode._
+
+/**
+ * Provides utilities for inspecting bytecode using ASM library.
+ *
+ * HOW TO USE
+ * 1. Create subdirectory in test/files/jvm for your test. Let's name it $TESTDIR.
+ * 2. Create $TESTDIR/BytecodeSrc_1.scala that contains Scala source file that you
+ * want to inspect the bytecode for. The '_1' suffix signals to partest that it
+ * should compile this file first.
+ * 3. Create $TESTDIR/Test.scala:
+ * import scala.tools.partest.BytecodeTest
+ * object Test extends BytecodeTest {
+ * def show {
+ * // your code that inspect ASM trees and prints values
+ * }
+ * }
+ * 4. Create corresponding check file.
+ *
+ * EXAMPLE
+ * See test/files/jvm/bytecode-test-example for an example of bytecode test.
+ *
+ */
+abstract class BytecodeTest extends ASMConverters {
+ import instructions._
+
+ /** produce the output to be compared against a checkfile */
+ protected def show(): Unit
+
+ def main(args: Array[String]): Unit = show
+
+ // asserts
+ def sameBytecode(methA: MethodNode, methB: MethodNode) = {
+ val isa = instructions.fromMethod(methA)
+ val isb = instructions.fromMethod(methB)
+ if (isa == isb) println("bytecode identical")
+ else diffInstructions(isa, isb)
+ }
+
+ // Do these classes have all the same methods, with the same names, access,
+ // descriptors and generic signatures? Method bodies are not considered, and
+ // the names of the classes containing the methods are substituted so they do
+ // not appear as differences.
+ def sameMethodAndFieldSignatures(clazzA: ClassNode, clazzB: ClassNode): Boolean = {
+ val ms1 = clazzA.fieldsAndMethods.toIndexedSeq
+ val ms2 = clazzB.fieldsAndMethods.toIndexedSeq
+ val name1 = clazzA.name
+ val name2 = clazzB.name
+
+ if (ms1.length != ms2.length) {
+ println("Different member counts in $name1 and $name2")
+ false
+ }
+ else (ms1, ms2).zipped forall { (m1, m2) =>
+ val c1 = m1.characteristics
+ val c2 = m2.characteristics.replaceAllLiterally(name2, name1)
+ if (c1 == c2)
+ println(s"[ok] $m1")
+ else
+ println(s"[fail]\n in $name1: $c1\n in $name2: $c2")
+
+ c1 == c2
+ }
+ }
+
+ // bytecode is equal modulo local variable numbering
+ def equalsModuloVar(a: Instruction, b: Instruction) = (a, b) match {
+ case _ if a == b => true
+ case (VarOp(op1, _), VarOp(op2, _)) if op1 == op2 => true
+ case _ => false
+ }
+
+ def similarBytecode(methA: MethodNode, methB: MethodNode, similar: (Instruction, Instruction) => Boolean) = {
+ val isa = fromMethod(methA)
+ val isb = fromMethod(methB)
+ if (isa == isb) println("bytecode identical")
+ else if ((isa, isb).zipped.forall { case (a, b) => similar(a, b) }) println("bytecode similar")
+ else diffInstructions(isa, isb)
+ }
+
+ def diffInstructions(isa: List[Instruction], isb: List[Instruction]) = {
+ val len = Math.max(isa.length, isb.length)
+ if (len > 0 ) {
+ val width = isa.map(_.toString.length).max
+ val lineWidth = len.toString.length
+ (1 to len) foreach { line =>
+ val isaPadded = isa.map(_.toString) orElse Stream.continually("")
+ val isbPadded = isb.map(_.toString) orElse Stream.continually("")
+ val a = isaPadded(line-1)
+ val b = isbPadded(line-1)
+
+ println(s"""$line${" " * (lineWidth-line.toString.length)} ${if (a==b) "==" else "<>"} $a${" " * (width-a.length)} | $b""")
+ }
+ }
+ }
+
+// loading
+ protected def getMethod(classNode: ClassNode, name: String): MethodNode =
+ classNode.methods.asScala.find(_.name == name) getOrElse
+ sys.error(s"Didn't find method '$name' in class '${classNode.name}'")
+
+ protected def loadClassNode(name: String, skipDebugInfo: Boolean = true): ClassNode = {
+ val classBytes: InputStream = (for {
+ classRep <- classpath.findClass(name)
+ binary <- classRep.binary
+ } yield binary.input) getOrElse sys.error(s"failed to load class '$name'; classpath = $classpath")
+
+ val cr = new ClassReader(classBytes)
+ val cn = new ClassNode()
+ cr.accept(cn, if (skipDebugInfo) ClassReader.SKIP_DEBUG else 0)
+ cn
+ }
+
+ protected lazy val classpath: JavaClassPath = {
+ import scala.tools.nsc.util.ClassPath.DefaultJavaContext
+ import scala.tools.util.PathResolver.Defaults
+ // logic inspired by scala.tools.util.PathResolver implementation
+ val containers = DefaultJavaContext.classesInExpandedPath(Defaults.javaUserClassPath)
+ new JavaClassPath(containers, DefaultJavaContext)
+ }
+}
diff --git a/src/partest/scala/tools/partest/CompilerTest.scala b/src/partest/scala/tools/partest/CompilerTest.scala
new file mode 100644
index 0000000..848deef
--- /dev/null
+++ b/src/partest/scala/tools/partest/CompilerTest.scala
@@ -0,0 +1,61 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2013 LAMP/EPFL
+ * @author Paul Phillips
+ */
+
+package scala.tools.partest
+
+import scala.reflect.runtime.{universe => ru}
+import scala.tools.nsc._
+
+/** For testing compiler internals directly.
+ * Each source code string in "sources" will be compiled, and
+ * the check function will be called with the source code and the
+ * resulting CompilationUnit. The check implementation should
+ * test for what it wants to test and fail (via assert or other
+ * exception) if it is not happy.
+ */
+abstract class CompilerTest extends DirectTest {
+ def check(source: String, unit: global.CompilationUnit): Unit
+
+ lazy val global: Global = newCompiler()
+ lazy val units: List[global.CompilationUnit] = compilationUnits(global)(sources: _ *)
+ import global._
+ import definitions._
+
+ override def extraSettings = "-usejavacp -d " + testOutput.path
+
+ def show() = (sources, units).zipped foreach check
+
+ // Override at least one of these...
+ def code = ""
+ def sources: List[String] = List(code)
+
+ // Utility functions
+
+ class MkType(sym: Symbol) {
+ def apply[M](implicit t: ru.TypeTag[M]): Type =
+ if (sym eq NoSymbol) NoType
+ else appliedType(sym, compilerTypeFromTag(t))
+ }
+ implicit def mkMkType(sym: Symbol) = new MkType(sym)
+
+ def allMembers(root: Symbol): List[Symbol] = {
+ def loop(seen: Set[Symbol], roots: List[Symbol]): List[Symbol] = {
+ val latest = roots flatMap (_.info.members) filterNot (seen contains _)
+ if (latest.isEmpty) seen.toList.sortWith(_ isLess _)
+ else loop(seen ++ latest, latest)
+ }
+ loop(Set(), List(root))
+ }
+
+ class SymsInPackage(pkgName: String) {
+ def pkg = rootMirror.getRequiredPackage(pkgName)
+ def classes = allMembers(pkg) filter (_.isClass)
+ def modules = allMembers(pkg) filter (_.isModule)
+ def symbols = classes ++ terms filterNot (_ eq NoSymbol)
+ def terms = allMembers(pkg) filter (s => s.isTerm && !s.isConstructor)
+ def tparams = classes flatMap (_.info.typeParams)
+ def tpes = symbols map (_.tpe) distinct
+ }
+}
diff --git a/src/partest/scala/tools/partest/DirectTest.scala b/src/partest/scala/tools/partest/DirectTest.scala
new file mode 100644
index 0000000..8fcaa64
--- /dev/null
+++ b/src/partest/scala/tools/partest/DirectTest.scala
@@ -0,0 +1,131 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2013 LAMP/EPFL
+ * @author Paul Phillips
+ */
+
+package scala.tools.partest
+
+import scala.tools.nsc._
+import settings.ScalaVersion
+import io.Directory
+import util.{ SourceFile, BatchSourceFile, CommandLineParser }
+import reporters.{Reporter, ConsoleReporter}
+
+/** A class for testing code which is embedded as a string.
+ * It allows for more complete control over settings, compiler
+ * configuration, sequence of events, etc. than does partest.
+ */
+abstract class DirectTest extends App {
+ // The program being tested in some fashion
+ def code: String
+ // produce the output to be compared against a checkfile
+ def show(): Unit
+
+ // the test file or dir, and output directory
+ def testPath = io.File(sys.props("partest.test-path"))
+ def testOutput = io.Directory(sys.props("partest.output"))
+
+ // override to add additional settings with strings
+ def extraSettings: String = ""
+ // a default Settings object
+ def settings: Settings = newSettings(CommandLineParser tokenize extraSettings)
+ // a custom Settings object
+ def newSettings(args: List[String]) = {
+ val s = new Settings
+ val allArgs = args ++ (CommandLineParser tokenize debugSettings)
+ log("newSettings: allArgs = " + allArgs)
+ s processArguments (allArgs, true)
+ s
+ }
+ // new compiler
+ def newCompiler(args: String*): Global = {
+ val settings = newSettings((CommandLineParser tokenize ("-d \"" + testOutput.path + "\" " + extraSettings)) ++ args.toList)
+ newCompiler(settings)
+ }
+
+ def newCompiler(settings: Settings): Global = {
+ if (settings.Yrangepos.value) new Global(settings, reporter(settings)) with interactive.RangePositions
+ else new Global(settings, reporter(settings))
+ }
+
+ def reporter(settings: Settings): Reporter = new ConsoleReporter(settings)
+
+ private def newSourcesWithExtension(ext: String)(codes: String*): List[BatchSourceFile] =
+ codes.toList.zipWithIndex map {
+ case (src, idx) => new BatchSourceFile(s"newSource${idx + 1}.$ext", src)
+ }
+
+ def newJavaSources(codes: String*) = newSourcesWithExtension("java")(codes: _*)
+ def newSources(codes: String*) = newSourcesWithExtension("scala")(codes: _*)
+
+ def compileString(global: Global)(sourceCode: String): Boolean = {
+ withRun(global)(_ compileSources newSources(sourceCode))
+ !global.reporter.hasErrors
+ }
+
+ def javaCompilationUnits(global: Global)(sourceCodes: String*) = {
+ sourceFilesToCompiledUnits(global)(newJavaSources(sourceCodes: _*))
+ }
+
+ def sourceFilesToCompiledUnits(global: Global)(files: List[SourceFile]) = {
+ withRun(global) { run =>
+ run compileSources files
+ run.units.toList
+ }
+ }
+
+ def compilationUnits(global: Global)(sourceCodes: String*): List[global.CompilationUnit] = {
+ val units = sourceFilesToCompiledUnits(global)(newSources(sourceCodes: _*))
+ if (global.reporter.hasErrors) {
+ global.reporter.flush()
+ sys.error("Compilation failure.")
+ }
+ units
+ }
+
+ def withRun[T](global: Global)(f: global.Run => T): T = {
+ global.reporter.reset()
+ f(new global.Run)
+ }
+
+ // compile the code, optionally first adding to the settings
+ def compile(args: String*) = compileString(newCompiler(args: _*))(code)
+
+ /** Constructor/main body **/
+ try show()
+ catch { case t: Exception => println(t.getMessage) ; t.printStackTrace ; sys.exit(1) }
+
+ /** Debugger interest only below this line **/
+ protected def isDebug = (sys.props contains "partest.debug") || (sys.env contains "PARTEST_DEBUG")
+ protected def debugSettings = sys.props.getOrElse("partest.debug.settings", "")
+
+ final def log(msg: => Any) {
+ if (isDebug) Console.err println msg
+ }
+
+ /**
+ * Run a test only if the current java version is at least the version specified.
+ */
+ def testUnderJavaAtLeast[A](version: String)(yesRun: =>A) = new TestUnderJavaAtLeast(version, { yesRun })
+
+ class TestUnderJavaAtLeast[A](version: String, yesRun: => A) {
+ val javaVersion = System.getProperty("java.specification.version")
+
+ // the "ScalaVersion" class parses Java specification versions just fine
+ val requiredJavaVersion = ScalaVersion(version)
+ val executingJavaVersion = ScalaVersion(javaVersion)
+ val shouldRun = executingJavaVersion >= requiredJavaVersion
+ val preamble = if (shouldRun) "Attempting" else "Doing fallback for"
+
+ def logInfo() = log(s"$preamble java $version specific test under java version $javaVersion")
+
+ /*
+ * If the current java version is at least 'version' then 'yesRun' is evaluated
+ * otherwise 'fallback' is
+ */
+ def otherwise(fallback: =>A): A = {
+ logInfo()
+ if (shouldRun) yesRun else fallback
+ }
+ }
+}
diff --git a/src/partest/scala/tools/partest/IcodeTest.scala b/src/partest/scala/tools/partest/IcodeTest.scala
new file mode 100644
index 0000000..f5333cc
--- /dev/null
+++ b/src/partest/scala/tools/partest/IcodeTest.scala
@@ -0,0 +1,45 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2013 LAMP/EPFL
+ * @author Paul Phillips
+ */
+
+package scala.tools.partest
+
+import scala.tools.nsc._
+import nest.FileUtil._
+import io.Directory
+
+/** A trait for testing icode. All you need is this in a
+ * partest source file:
+ * {{{
+ * object Test extends IcodeTest
+ * }}}
+ * And then the generated output will be the icode for everything
+ * in that file. See source for possible customizations.
+ */
+abstract class IcodeTest extends DirectTest {
+ // override to check icode at a different point.
+ def printIcodeAfterPhase = "icode"
+ // override to use source code other than the file being tested.
+ def code = testPath.slurp()
+
+ override def extraSettings: String = "-usejavacp -Xprint-icode:" + printIcodeAfterPhase
+
+ // Compile, read in all the *.icode files, delete them, and return their contents
+ def collectIcode(args: String*): List[String] = {
+ compile("-d" :: testOutput.path :: args.toList : _*)
+ val icodeFiles = testOutput.files.toList filter (_ hasExtension "icode")
+
+ try icodeFiles sortBy (_.name) flatMap (f => f.lines.toList)
+ finally icodeFiles foreach (f => f.delete())
+ }
+
+ // Default show() compiles the code with and without optimization and
+ // outputs the diff.
+ def show() {
+ val lines1 = collectIcode("")
+ val lines2 = collectIcode("-optimise")
+
+ println(compareContents(lines1, lines2))
+ }
+}
diff --git a/src/partest/scala/tools/partest/MemoryTest.scala b/src/partest/scala/tools/partest/MemoryTest.scala
new file mode 100644
index 0000000..58d25d2
--- /dev/null
+++ b/src/partest/scala/tools/partest/MemoryTest.scala
@@ -0,0 +1,38 @@
+package scala.tools.partest
+
+abstract class MemoryTest {
+ def maxDelta: Double
+ def calcsPerIter: Int
+ def calc(): Unit
+
+ def main(args: Array[String]) {
+ val rt = Runtime.getRuntime()
+ def memUsage() = {
+ import java.lang.management._
+ import scala.collection.JavaConverters._
+ val pools = ManagementFactory.getMemoryPoolMXBeans.asScala
+ pools.map(_.getUsage.getUsed).sum / 1000000d
+ }
+
+ val history = scala.collection.mutable.ListBuffer[Double]()
+ def stressTestIter() = {
+ var i = 0
+ while (i < calcsPerIter) { calc(); i += 1 }
+ 1 to 5 foreach (_ => rt.gc())
+ history += memUsage
+ }
+
+ 1 to 5 foreach (_ => stressTestIter())
+ val reference = memUsage()
+ 1 to 5 foreach (_ => stressTestIter())
+ 1 to 5 foreach (_ => rt.gc())
+ val result = memUsage()
+ history += result
+
+ val delta = result - reference
+ if (delta > maxDelta) {
+ println("FAILED")
+ history foreach (mb => println(mb + " Mb"))
+ }
+ }
+}
diff --git a/src/partest/scala/tools/partest/PartestDefaults.scala b/src/partest/scala/tools/partest/PartestDefaults.scala
index a113c2b..a21c602 100644
--- a/src/partest/scala/tools/partest/PartestDefaults.scala
+++ b/src/partest/scala/tools/partest/PartestDefaults.scala
@@ -2,8 +2,9 @@ package scala.tools
package partest
import nsc.io.{ File, Path, Directory }
-import util.{ PathResolver }
+import scala.tools.util.PathResolver
import nsc.Properties.{ propOrElse, propOrNone, propOrEmpty }
+import java.lang.Runtime.getRuntime
object PartestDefaults {
import nsc.Properties._
@@ -14,17 +15,17 @@ object PartestDefaults {
def srcDirName = propOrElse("partest.srcdir", "files")
def testRootDir = testRootName map (x => Directory(x))
+ // def classPath = propOrElse("partest.classpath", "")
def classPath = PathResolver.Environment.javaUserClassPath // XXX
def javaCmd = propOrElse("partest.javacmd", "java")
def javacCmd = propOrElse("partest.javac_cmd", "javac")
def javaOpts = propOrElse("partest.java_opts", "")
- def scalacOpts = propOrElse("partest.scalac_opts", "-deprecation")
+ def scalacOpts = propOrElse("partest.scalac_opts", "")
- def testBuild = propOrNone("partest.build")
- def errorCount = propOrElse("partest.errors", "0").toInt
- def numActors = propOrElse("partest.actors", "8").toInt
- def poolSize = wrapAccessControl(propOrNone("actors.corePoolSize"))
+ def testBuild = propOrNone("partest.build")
+ def errorCount = propOrElse("partest.errors", "0").toInt
+ def numThreads = propOrNone("partest.threads") map (_.toInt) getOrElse getRuntime.availableProcessors
def timeout = "1200000"
}
diff --git a/src/partest/scala/tools/partest/PartestTask.scala b/src/partest/scala/tools/partest/PartestTask.scala
index 1a75621..dc40f9f 100644
--- a/src/partest/scala/tools/partest/PartestTask.scala
+++ b/src/partest/scala/tools/partest/PartestTask.scala
@@ -1,30 +1,61 @@
/* __ *\
** ________ ___ / / ___ Scala Parallel Testing **
-** / __/ __// _ | / / / _ | (c) 2007-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2007-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-// $Id$
-
package scala.tools
package partest
-import scala.actors.Actor._
import scala.util.Properties.setProp
import scala.tools.nsc.io.{ Directory, Path => SPath }
import nsc.util.ClassPath
import util.PathResolver
import scala.tools.ant.sabbus.CompilationPathProperty
-
import java.io.File
import java.lang.reflect.Method
-
import org.apache.tools.ant.Task
import org.apache.tools.ant.types.{Path, Reference, FileSet}
-
-class PartestTask extends Task with CompilationPathProperty {
+import org.apache.tools.ant.types.Commandline.Argument
+import scala.tools.ant.ScalaTask
+
+/** An Ant task to execute the Scala test suite (NSC).
+ *
+ * This task can take the following parameters as attributes:
+ * - `srcdir`,
+ * - `classpath`,
+ * - `classpathref`,
+ * - `showlog`,
+ * - `showdiff`,
+ * - `erroronfailed`,
+ * - `javacmd`,
+ * - `javaccmd`,
+ * - `scalacopts`,
+ * - `timeout`,
+ * - `debug`,
+ * - `junitreportdir`.
+ *
+ * It also takes the following parameters as nested elements:
+ * - `compilationpath`.
+ * - `postests`,
+ * - `negtests`,
+ * - `runtests`,
+ * - `jvmtests`,
+ * - `residenttests`,
+ * - `buildmanagertests`,
+ * - `shootouttests`,
+ * - `scalaptests`,
+ * - `scalachecktests`,
+ * - `specializedtests`,
+ * - `instrumentedtests`,
+ * - `presentationtests`,
+ * - `scripttests`.
+ *
+ * @author Philippe Haller
+ */
+class PartestTask extends Task with CompilationPathProperty with ScalaTask {
def addConfiguredPosTests(input: FileSet) {
posFiles = Some(input)
@@ -70,10 +101,18 @@ class PartestTask extends Task with CompilationPathProperty {
specializedFiles = Some(input)
}
+ def addConfiguredInstrumentedTests(input: FileSet) {
+ instrumentedFiles = Some(input)
+ }
+
def addConfiguredPresentationTests(input: FileSet) {
presentationFiles = Some(input)
}
+ def addConfiguredAntTests(input: FileSet) {
+ antFiles = Some(input)
+ }
+
def setSrcDir(input: String) {
srcDir = Some(input)
@@ -115,8 +154,15 @@ class PartestTask extends Task with CompilationPathProperty {
javaccmd = Some(input)
}
- def setScalacOpts(opts: String) {
- scalacOpts = Some(opts)
+ def setScalacOpts(input: String) {
+ val s = input.split(' ').map { s => val a = new Argument; a.setValue(s); a }
+ scalacArgs = Some(scalacArgs.getOrElse(Seq()) ++ s)
+ }
+
+ def createCompilerArg(): Argument = {
+ val a = new Argument
+ scalacArgs = Some(scalacArgs.getOrElse(Seq()) :+ a)
+ a
}
def setTimeout(delay: String) {
@@ -149,9 +195,11 @@ class PartestTask extends Task with CompilationPathProperty {
private var shootoutFiles: Option[FileSet] = None
private var scalapFiles: Option[FileSet] = None
private var specializedFiles: Option[FileSet] = None
+ private var instrumentedFiles: Option[FileSet] = None
private var presentationFiles: Option[FileSet] = None
+ private var antFiles: Option[FileSet] = None
private var errorOnFailed: Boolean = false
- private var scalacOpts: Option[String] = None
+ private var scalacArgs: Option[Seq[Argument]] = None
private var timeout: Option[String] = None
private var jUnitReportDir: Option[File] = None
private var debug = false
@@ -204,9 +252,21 @@ class PartestTask extends Task with CompilationPathProperty {
private def getShootoutFiles = getFiles(shootoutFiles)
private def getScalapFiles = getFiles(scalapFiles)
private def getSpecializedFiles = getFiles(specializedFiles)
+ private def getInstrumentedFiles = getFilesAndDirs(instrumentedFiles)
private def getPresentationFiles = getDirs(presentationFiles)
+ private def getAntFiles = getFiles(antFiles)
override def execute() {
+ val opts = getProject().getProperties() get "env.PARTEST_OPTS"
+ if (opts != null && opts.toString != "")
+ opts.toString.split(" ") foreach { propDef =>
+ log("setting system property " + propDef)
+ val kv = propDef split "="
+ val key = kv(0) substring 2
+ val value = kv(1)
+ setProp(key, value)
+ }
+
if (isPartestDebug || debug) {
setProp("partest.debug", "true")
nest.NestUI._verbose = true
@@ -226,18 +286,77 @@ class PartestTask extends Task with CompilationPathProperty {
}
} getOrElse sys.error("Provided classpath does not contain a Scala library.")
+ val scalaReflect = {
+ (classpath.list map { fs => new File(fs) }) find { f =>
+ f.getName match {
+ case "scala-reflect.jar" => true
+ case "reflect" if (f.getParentFile.getName == "classes") => true
+ case _ => false
+ }
+ }
+ } getOrElse sys.error("Provided classpath does not contain a Scala reflection library.")
+
+ val scalaCompiler = {
+ (classpath.list map { fs => new File(fs) }) find { f =>
+ f.getName match {
+ case "scala-compiler.jar" => true
+ case "compiler" if (f.getParentFile.getName == "classes") => true
+ case _ => false
+ }
+ }
+ } getOrElse sys.error("Provided classpath does not contain a Scala compiler.")
+
+ val scalaPartest = {
+ (classpath.list map { fs => new File(fs) }) find { f =>
+ f.getName match {
+ case "scala-partest.jar" => true
+ case "partest" if (f.getParentFile.getName == "classes") => true
+ case _ => false
+ }
+ }
+ } getOrElse sys.error("Provided classpath does not contain a Scala partest.")
+
+ val scalaActors = {
+ (classpath.list map { fs => new File(fs) }) find { f =>
+ f.getName match {
+ case "scala-actors.jar" => true
+ case "actors" if (f.getParentFile.getName == "classes") => true
+ case _ => false
+ }
+ }
+ } getOrElse sys.error("Provided classpath does not contain a Scala actors.")
+
+ def scalacArgsFlat: Option[Seq[String]] = scalacArgs map (_ flatMap { a =>
+ val parts = a.getParts
+ if(parts eq null) Seq[String]() else parts.toSeq
+ })
+
val antRunner = new scala.tools.partest.nest.AntRunner
val antFileManager = antRunner.fileManager
+ // this is a workaround for https://issues.scala-lang.org/browse/SI-5433
+ // when that bug is fixed, this paragraph of code can be safely removed
+ // we hack into the classloader that will become parent classloader for scalac
+ // this way we ensure that reflective macro lookup will pick correct Code.lift
+ val loader = getClass.getClassLoader.asInstanceOf[org.apache.tools.ant.AntClassLoader]
+ val path = new org.apache.tools.ant.types.Path(getProject())
+ val newClassPath = ClassPath.join(nest.PathSettings.srcCodeLib.toString, loader.getClasspath)
+ path.setPath(newClassPath)
+ loader.setClassPath(path)
+
antFileManager.showDiff = showDiff
antFileManager.showLog = showLog
antFileManager.failed = runFailed
antFileManager.CLASSPATH = ClassPath.join(classpath.list: _*)
antFileManager.LATEST_LIB = scalaLibrary.getAbsolutePath
+ antFileManager.LATEST_REFLECT = scalaReflect.getAbsolutePath
+ antFileManager.LATEST_COMP = scalaCompiler.getAbsolutePath
+ antFileManager.LATEST_PARTEST = scalaPartest.getAbsolutePath
+ antFileManager.LATEST_ACTORS = scalaActors.getAbsolutePath
javacmd foreach (x => antFileManager.JAVACMD = x.getAbsolutePath)
javaccmd foreach (x => antFileManager.JAVAC_CMD = x.getAbsolutePath)
- scalacOpts foreach (antFileManager.SCALAC_OPTS = _)
+ scalacArgsFlat foreach (antFileManager.SCALAC_OPTS ++= _)
timeout foreach (antFileManager.timeout = _)
type TFSet = (Array[File], String, String)
@@ -253,7 +372,9 @@ class PartestTask extends Task with CompilationPathProperty {
(getShootoutFiles, "shootout", "Running shootout tests"),
(getScalapFiles, "scalap", "Running scalap tests"),
(getSpecializedFiles, "specialized", "Running specialized files"),
- (getPresentationFiles, "presentation", "Running presentation compiler test files")
+ (getInstrumentedFiles, "instrumented", "Running instrumented files"),
+ (getPresentationFiles, "presentation", "Running presentation compiler test files"),
+ (getAntFiles, "ant", "Running ant task tests")
)
def runSet(set: TFSet): (Int, Int, Iterable[String]) = {
@@ -261,12 +382,12 @@ class PartestTask extends Task with CompilationPathProperty {
if (files.isEmpty) (0, 0, List())
else {
log(msg)
- val results: Iterable[(String, Int)] = antRunner.reflectiveRunTestsForFiles(files, name)
+ val results: Iterable[(String, TestState)] = antRunner.reflectiveRunTestsForFiles(files, name)
val (succs, fails) = resultsToStatistics(results)
val failed: Iterable[String] = results collect {
- case (path, 1) => path + " [FAILED]"
- case (path, 2) => path + " [TIMOUT]"
+ case (path, TestState.Fail) => path + " [FAILED]"
+ case (path, TestState.Timeout) => path + " [TIMOUT]"
}
// create JUnit Report xml files if directory was specified
@@ -286,7 +407,7 @@ class PartestTask extends Task with CompilationPathProperty {
val allFailures = _results map (_._2) sum
val allFailedPaths = _results flatMap (_._3)
- def f = if (errorOnFailed && allFailures > 0) (sys error _) else log(_: String)
+ def f = if (errorOnFailed && allFailures > 0) buildError(_: String) else log(_: String)
def s = if (allFailures > 1) "s" else ""
val msg =
if (allFailures > 0)
@@ -297,20 +418,21 @@ class PartestTask extends Task with CompilationPathProperty {
f(msg)
}
- def oneResult(res: (String, Int)) =
+
+ private def oneResult(res: (String, TestState)) =
<testcase name={res._1}>{
- res._2 match {
- case 0 => scala.xml.NodeSeq.Empty
- case 1 => <failure message="Test failed"/>
- case 2 => <failure message="Test timed out"/>
- }
- }</testcase>
-
- def testReport(kind: String, results: Iterable[(String, Int)], succs: Int, fails: Int) =
+ res._2 match {
+ case TestState.Ok => scala.xml.NodeSeq.Empty
+ case TestState.Fail => <failure message="Test failed"/>
+ case TestState.Timeout => <failure message="Test timed out"/>
+ }
+ }</testcase>
+
+ private def testReport(kind: String, results: Iterable[(String, TestState)], succs: Int, fails: Int) =
<testsuite name={kind} tests={(succs + fails).toString} failures={fails.toString}>
- <properties/>
- {
- results.map(oneResult(_))
- }
+ <properties/>
+ {
+ results.map(oneResult(_))
+ }
</testsuite>
}
diff --git a/src/partest/scala/tools/partest/ReplTest.scala b/src/partest/scala/tools/partest/ReplTest.scala
index 02ab154..edd1f70 100644
--- a/src/partest/scala/tools/partest/ReplTest.scala
+++ b/src/partest/scala/tools/partest/ReplTest.scala
@@ -1,5 +1,5 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
+ * Copyright 2005-2013 LAMP/EPFL
* @author Paul Phillips
*/
@@ -12,28 +12,20 @@ import java.lang.reflect.{ Method => JMethod, Field => JField }
/** A trait for testing repl code. It drops the first line
* of output because the real repl prints a version number.
*/
-abstract class ReplTest extends App {
- def code: String
- // override to add additional settings with strings
- def extraSettings: String = ""
+abstract class ReplTest extends DirectTest {
// override to transform Settings object immediately before the finish
def transformSettings(s: Settings): Settings = s
-
// final because we need to enforce the existence of a couple settings.
- final def settings: Settings = {
- val s = new Settings
- s.Yreplsync.value = true
+ final override def settings: Settings = {
+ val s = super.settings
+ // s.Yreplsync.value = true
s.Xnojline.value = true
- val settingString = sys.props("scala.partest.debug.repl-args") match {
- case null => extraSettings
- case s => extraSettings + " " + s
- }
- s processArgumentString settingString
transformSettings(s)
}
- def eval() = ILoop.runForTranscript(code, settings).lines drop 1
+ def eval() = {
+ val s = settings
+ log("eval(): settings = " + s)
+ ILoop.runForTranscript(code, s).lines drop 1
+ }
def show() = eval() foreach println
-
- try show()
- catch { case t => println(t) ; sys.exit(1) }
}
diff --git a/src/partest/scala/tools/partest/ScaladocModelTest.scala b/src/partest/scala/tools/partest/ScaladocModelTest.scala
new file mode 100644
index 0000000..b9abff6
--- /dev/null
+++ b/src/partest/scala/tools/partest/ScaladocModelTest.scala
@@ -0,0 +1,205 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2013 LAMP/EPFL
+ * @author Vlad Ureche
+ */
+
+package scala.tools.partest
+
+import scala.tools.partest._
+import java.io._
+import scala.tools.nsc._
+import scala.tools.nsc.util.CommandLineParser
+import scala.tools.nsc.doc.{Settings, DocFactory, Universe}
+import scala.tools.nsc.doc.model._
+import scala.tools.nsc.doc.model.diagram._
+import scala.tools.nsc.doc.base.comment._
+import scala.tools.nsc.reporters.ConsoleReporter
+
+/** A class for testing scaladoc model generation
+ * - you need to specify the code in the `code` method
+ * - you need to override the testModel method to test the model
+ * - you may specify extra parameters to send to scaladoc in `scaladocSettings`
+ * {{{
+ import scala.tools.nsc.doc.model._
+ import scala.tools.partest.ScaladocModelTest
+
+ object Test extends ScaladocModelTest {
+
+ override def code = """ ... """ // or override def resourceFile = "<file>.scala" (from test/scaladoc/resources)
+ def scaladocSettings = " ... "
+ def testModel(rootPackage: Package) = {
+ // get the quick access implicit defs in scope (_package(s), _class(es), _trait(s), object(s) _method(s), _value(s))
+ import access._
+
+ // just need to check the member exists, access methods will throw an error if there's a problem
+ rootPackage._package("scala")._package("test")._class("C")._method("foo")
+ }
+ }
+ * }}}
+ */
+abstract class ScaladocModelTest extends DirectTest {
+
+ /** Override this to give scaladoc command line parameters */
+ def scaladocSettings: String
+
+ /** Override this to test the model */
+ def testModel(root: Package): Unit
+
+ /** Override to feed a file in resources to scaladoc*/
+ def resourceFile: String = null
+
+ /** Override to feed code into scaladoc */
+ override def code =
+ if (resourceFile ne null)
+ io.File(resourcePath + "/" + resourceFile).slurp()
+ else
+ sys.error("Scaladoc Model Test: You need to give a file or some code to feed to scaladoc!")
+
+ def resourcePath = io.Directory(sys.props("partest.cwd") + "/../resources")
+
+ // Implementation follows:
+ override def extraSettings: String = "-usejavacp"
+
+ override def show(): Unit = {
+ // redirect err to out, for logging
+ val prevErr = System.err
+ System.setErr(System.out)
+
+ try {
+ // 1 - compile with scaladoc and get the model out
+ val universe = model.getOrElse({sys.error("Scaladoc Model Test ERROR: No universe generated!")})
+ // 2 - check the model generated
+ testModel(universe.rootPackage)
+ println("Done.")
+ } catch {
+ case e: Exception =>
+ println(e)
+ e.printStackTrace
+ }
+ // set err back to the real err handler
+ System.setErr(prevErr)
+ }
+
+ private[this] var settings: Settings = null
+
+ // create a new scaladoc compiler
+ private[this] def newDocFactory: DocFactory = {
+ settings = new Settings(_ => ())
+ settings.scaladocQuietRun = true // yaay, no more "model contains X documentable templates"!
+ val args = extraSettings + " " + scaladocSettings
+ val command = new ScalaDoc.Command((CommandLineParser tokenize (args)), settings)
+ val docFact = new DocFactory(new ConsoleReporter(settings), settings)
+ docFact
+ }
+
+ // compile with scaladoc and output the result
+ def model: Option[Universe] = newDocFactory.makeUniverse(Right(code))
+
+ // so we don't get the newSettings warning
+ override def isDebug = false
+
+
+ // finally, enable easy navigation inside the entities
+ object access {
+
+ implicit class TemplateAccess(tpl: DocTemplateEntity) {
+ def _class(name: String): DocTemplateEntity = getTheFirst(_classes(name), tpl.qualifiedName + ".class(" + name + ")")
+ def _classes(name: String): List[DocTemplateEntity] = tpl.templates.filter(_.name == name).collect({ case c: DocTemplateEntity with Class => c})
+
+ def _classMbr(name: String): MemberTemplateEntity = getTheFirst(_classesMbr(name), tpl.qualifiedName + ".classMember(" + name + ")")
+ def _classesMbr(name: String): List[MemberTemplateEntity] = tpl.templates.filter(_.name == name).collect({ case c: MemberTemplateEntity if c.isClass => c})
+
+ def _trait(name: String): DocTemplateEntity = getTheFirst(_traits(name), tpl.qualifiedName + ".trait(" + name + ")")
+ def _traits(name: String): List[DocTemplateEntity] = tpl.templates.filter(_.name == name).collect({ case t: DocTemplateEntity with Trait => t})
+
+ def _traitMbr(name: String): MemberTemplateEntity = getTheFirst(_traitsMbr(name), tpl.qualifiedName + ".traitMember(" + name + ")")
+ def _traitsMbr(name: String): List[MemberTemplateEntity] = tpl.templates.filter(_.name == name).collect({ case t: MemberTemplateEntity if t.isTrait => t})
+
+ def _object(name: String): DocTemplateEntity = getTheFirst(_objects(name), tpl.qualifiedName + ".object(" + name + ")")
+ def _objects(name: String): List[DocTemplateEntity] = tpl.templates.filter(_.name == name).collect({ case o: DocTemplateEntity with Object => o})
+
+ def _objectMbr(name: String): MemberTemplateEntity = getTheFirst(_objectsMbr(name), tpl.qualifiedName + ".objectMember(" + name + ")")
+ def _objectsMbr(name: String): List[MemberTemplateEntity] = tpl.templates.filter(_.name == name).collect({ case o: MemberTemplateEntity if o.isObject => o})
+
+ def _method(name: String): Def = getTheFirst(_methods(name), tpl.qualifiedName + ".method(" + name + ")")
+ def _methods(name: String): List[Def] = tpl.methods.filter(_.name == name)
+
+ def _value(name: String): Val = getTheFirst(_values(name), tpl.qualifiedName + ".value(" + name + ")")
+ def _values(name: String): List[Val] = tpl.values.filter(_.name == name)
+
+ def _conversion(name: String): ImplicitConversion = getTheFirst(_conversions(name), tpl.qualifiedName + ".conversion(" + name + ")")
+ def _conversions(name: String): List[ImplicitConversion] = tpl.conversions.filter(_.conversionQualifiedName == name)
+
+ def _absType(name: String): MemberEntity = getTheFirst(_absTypes(name), tpl.qualifiedName + ".abstractType(" + name + ")")
+ def _absTypes(name: String): List[MemberEntity] = tpl.members.filter(mbr => mbr.name == name && mbr.isAbstractType)
+
+ def _absTypeTpl(name: String): DocTemplateEntity = getTheFirst(_absTypeTpls(name), tpl.qualifiedName + ".abstractType(" + name + ")")
+ def _absTypeTpls(name: String): List[DocTemplateEntity] = tpl.members.collect({ case dtpl: DocTemplateEntity with AbstractType if dtpl.name == name => dtpl })
+
+ def _aliasType(name: String): MemberEntity = getTheFirst(_aliasTypes(name), tpl.qualifiedName + ".aliasType(" + name + ")")
+ def _aliasTypes(name: String): List[MemberEntity] = tpl.members.filter(mbr => mbr.name == name && mbr.isAliasType)
+
+ def _aliasTypeTpl(name: String): DocTemplateEntity = getTheFirst(_aliasTypeTpls(name), tpl.qualifiedName + ".aliasType(" + name + ")")
+ def _aliasTypeTpls(name: String): List[DocTemplateEntity] = tpl.members.collect({ case dtpl: DocTemplateEntity with AliasType if dtpl.name == name => dtpl })
+ }
+
+ trait WithMembers {
+ def members: List[MemberEntity]
+ def _member(name: String): MemberEntity = getTheFirst(_members(name), this.toString + ".member(" + name + ")")
+ def _members(name: String): List[MemberEntity] = members.filter(_.name == name)
+ }
+ implicit class PackageAccess(pack: Package) extends TemplateAccess(pack) {
+ def _package(name: String): Package = getTheFirst(_packages(name), pack.qualifiedName + ".package(" + name + ")")
+ def _packages(name: String): List[Package] = pack.packages.filter(_.name == name)
+ }
+ implicit class DocTemplateEntityMembers(val underlying: DocTemplateEntity) extends WithMembers {
+ def members = underlying.members
+ }
+ implicit class ImplicitConversionMembers(val underlying: ImplicitConversion) extends WithMembers {
+ def members = underlying.members
+ }
+
+ def getTheFirst[T](list: List[T], expl: String): T = list.length match {
+ case 1 => list.head
+ case 0 => sys.error("Error getting " + expl + ": No such element.")
+ case _ => sys.error("Error getting " + expl + ": " + list.length + " elements with this name. " +
+ "All elements in list: [" + list.map({
+ case ent: Entity => ent.kind + " " + ent.qualifiedName
+ case other => other.toString
+ }).mkString(", ") + "]")
+ }
+
+ def extractCommentText(c: Any) = {
+ def extractText(body: Any): String = body match {
+ case s: String => s
+ case s: Seq[_] => s.toList.map(extractText(_)).mkString
+ case p: Product => p.productIterator.toList.map(extractText(_)).mkString
+ case _ => ""
+ }
+ c match {
+ case c: Comment =>
+ extractText(c.body)
+ case b: Body =>
+ extractText(b)
+ }
+ }
+
+ def countLinks(c: Comment, p: EntityLink => Boolean) = {
+ def countLinks(body: Any): Int = body match {
+ case el: EntityLink if p(el) => 1
+ case s: Seq[_] => s.toList.map(countLinks(_)).sum
+ case p: Product => p.productIterator.toList.map(countLinks(_)).sum
+ case _ => 0
+ }
+ countLinks(c.body)
+ }
+
+ def testDiagram(doc: DocTemplateEntity, diag: Option[Diagram], nodes: Int, edges: Int) = {
+ assert(diag.isDefined, doc.qualifiedName + " diagram missing")
+ assert(diag.get.nodes.length == nodes,
+ doc.qualifiedName + "'s diagram: node count " + diag.get.nodes.length + " == " + nodes)
+ assert(diag.get.edges.map(_._2.length).sum == edges,
+ doc.qualifiedName + "'s diagram: edge count " + diag.get.edges.length + " == " + edges)
+ }
+ }
+}
diff --git a/src/partest/scala/tools/partest/SecurityTest.scala b/src/partest/scala/tools/partest/SecurityTest.scala
index 54aad5a..2d6f61d 100644
--- a/src/partest/scala/tools/partest/SecurityTest.scala
+++ b/src/partest/scala/tools/partest/SecurityTest.scala
@@ -1,5 +1,5 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
+ * Copyright 2005-2013 LAMP/EPFL
* @author Paul Phillips
*/
diff --git a/src/partest/scala/tools/partest/SigTest.scala b/src/partest/scala/tools/partest/SigTest.scala
index 072ec00..fe233a4 100644
--- a/src/partest/scala/tools/partest/SigTest.scala
+++ b/src/partest/scala/tools/partest/SigTest.scala
@@ -1,5 +1,5 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
+ * Copyright 2005-2013 LAMP/EPFL
* @author Paul Phillips
*/
@@ -8,6 +8,7 @@ package scala.tools.partest
import scala.tools.nsc.Settings
import scala.tools.nsc.interpreter.ILoop
import java.lang.reflect.{ Method => JMethod, Field => JField }
+import scala.reflect.{ClassTag, classTag}
/** Support code for testing signatures.
*/
@@ -20,31 +21,31 @@ trait SigTest {
def isObjectMethodName(name: String) = classOf[Object].getMethods exists (_.getName == name)
- def fields[T: ClassManifest](p: JField => Boolean) = {
- val cl = classManifest[T].erasure
+ def fields[T: ClassTag](p: JField => Boolean) = {
+ val cl = classTag[T].runtimeClass
val fs = (cl.getFields ++ cl.getDeclaredFields).distinct sortBy (_.getName)
fs filter p
}
- def methods[T: ClassManifest](p: JMethod => Boolean) = {
- val cl = classManifest[T].erasure
+ def methods[T: ClassTag](p: JMethod => Boolean) = {
+ val cl = classTag[T].runtimeClass
val ms = (cl.getMethods ++ cl.getDeclaredMethods).distinct sortBy (x => (x.getName, x.isBridge))
ms filter p
}
- def allFields[T: ClassManifest]() = fields[T](_ => true)
- def allMethods[T: ClassManifest]() = methods[T](m => !isObjectMethodName(m.getName))
- def fieldsNamed[T: ClassManifest](name: String) = fields[T](_.getName == name)
- def methodsNamed[T: ClassManifest](name: String) = methods[T](_.getName == name)
+ def allFields[T: ClassTag]() = fields[T](_ => true)
+ def allMethods[T: ClassTag]() = methods[T](m => !isObjectMethodName(m.getName))
+ def fieldsNamed[T: ClassTag](name: String) = fields[T](_.getName == name)
+ def methodsNamed[T: ClassTag](name: String) = methods[T](_.getName == name)
- def allGenericStrings[T: ClassManifest]() =
+ def allGenericStrings[T: ClassTag]() =
(allMethods[T]() map mstr) ++ (allFields[T]() map fstr)
- def genericStrings[T: ClassManifest](name: String) =
+ def genericStrings[T: ClassTag](name: String) =
(methodsNamed[T](name) map mstr) ++ (fieldsNamed[T](name) map fstr)
- def show[T: ClassManifest](name: String = "") = {
- println(classManifest[T].erasure.getName)
+ def show[T: ClassTag](name: String = "") = {
+ println(classTag[T].runtimeClass.getName)
if (name == "") allGenericStrings[T]() foreach println
else genericStrings[T](name) foreach println
}
diff --git a/src/partest/scala/tools/partest/StoreReporterDirectTest.scala b/src/partest/scala/tools/partest/StoreReporterDirectTest.scala
new file mode 100644
index 0000000..7f3604c
--- /dev/null
+++ b/src/partest/scala/tools/partest/StoreReporterDirectTest.scala
@@ -0,0 +1,15 @@
+package scala.tools.partest
+
+import scala.tools.nsc.Settings
+import scala.tools.nsc.reporters.StoreReporter
+import scala.collection.mutable
+
+trait StoreReporterDirectTest extends DirectTest {
+ lazy val storeReporter: StoreReporter = new scala.tools.nsc.reporters.StoreReporter()
+
+ /** Discards all but the first message issued at a given position. */
+ def filteredInfos: Seq[storeReporter.Info] = storeReporter.infos.groupBy(_.pos).map(_._2.head).toList
+
+ /** Hook into [[scala.tools.partest.DirectTest]] to install the custom reporter */
+ override def reporter(settings: Settings) = storeReporter
+}
diff --git a/src/partest/scala/tools/partest/TestUtil.scala b/src/partest/scala/tools/partest/TestUtil.scala
index b86a8e2..9bfd444 100644
--- a/src/partest/scala/tools/partest/TestUtil.scala
+++ b/src/partest/scala/tools/partest/TestUtil.scala
@@ -1,5 +1,7 @@
package scala.tools.partest
+import scala.reflect.{ classTag, ClassTag }
+
trait TestUtil {
/** Given function and block of code, evaluates code block,
* calls function with nanoseconds elapsed, and returns block result.
@@ -29,8 +31,16 @@ trait TestUtil {
assert(mult <= acceptableMultiple, "Performance difference too great: multiple = " + mult)
}
+
+ def intercept[T <: Exception : ClassTag](code: => Unit): Unit =
+ try {
+ code
+ assert(false, "did not throw " + classTag[T])
+ } catch {
+ case ex: Exception if classTag[T].runtimeClass isInstance ex =>
+ }
}
object TestUtil extends TestUtil {
-}
\ No newline at end of file
+}
diff --git a/src/partest/scala/tools/partest/instrumented/Instrumentation.scala b/src/partest/scala/tools/partest/instrumented/Instrumentation.scala
new file mode 100644
index 0000000..8a284b3
--- /dev/null
+++ b/src/partest/scala/tools/partest/instrumented/Instrumentation.scala
@@ -0,0 +1,92 @@
+/* NEST (New Scala Test)
+ * Copyright 2007-2013 LAMP/EPFL
+ * @author Grzegorz Kossakowski
+ */
+
+package scala.tools.partest.instrumented
+
+import scala.collection.JavaConverters._
+
+case class MethodCallTrace(className: String, methodName: String, methodDescriptor: String) {
+ override def toString(): String = className + "." + methodName + methodDescriptor
+}
+object MethodCallTrace {
+ implicit val ordering: Ordering[MethodCallTrace] = Ordering.by(x => (x.className, x.methodName, x.methodDescriptor))
+}
+
+/**
+ * An object that controls profiling of instrumented byte-code. The instrumentation is achieved
+ * by using `java.lang.instrument` package. The instrumentation agent can be found in
+ * `scala.tools.partest.javaagent` package.
+ *
+ * At the moment the following classes are being instrumented:
+ * * all classes with empty package
+ * * all classes from scala package (except for classes responsible for instrumentation)
+ *
+ * The canonical way of using instrumentation is have a test-case in `files/instrumented` directory.
+ * The following code in main:
+ *
+ * {{{
+ * import scala.tools.partest.instrumented.Instrumentation._
+ * def main(args: Array[String]): Unit = {
+ * startProfiling()
+ * // should box the boolean
+ println(true)
+ stopProfiling()
+ printStatistics()
+ * }
+ * }}}
+ *
+ *
+ * should print:
+ *
+ * {{{
+ * true
+ * Method call statistics:
+ * scala/Predef$.println(Ljava/lang/Object;)V: 1
+ * scala/runtime/BoxesRunTime.boxToBoolean(Z)Ljava/lang/Boolean;: 1
+ * }}}
+ */
+object Instrumentation {
+
+ type Statistics = Map[MethodCallTrace, Int]
+
+ def startProfiling(): Unit = Profiler.startProfiling()
+ def stopProfiling(): Unit = Profiler.stopProfiling()
+ def resetProfiling(): Unit = Profiler.resetProfiling()
+ def isProfiling(): Boolean = Profiler.isProfiling()
+
+ def getStatistics: Statistics = {
+ val isProfiling = Profiler.isProfiling()
+ if (isProfiling) {
+ Profiler.stopProfiling()
+ }
+ val stats = Profiler.getStatistics().asScala.toSeq.map {
+ case (trace, count) => MethodCallTrace(trace.className, trace.methodName, trace.methodDescriptor) -> count.intValue
+ }
+ val res = Map(stats: _*)
+ if (isProfiling) {
+ Profiler.startProfiling()
+ }
+ res
+ }
+
+ val standardFilter: MethodCallTrace => Boolean = t => {
+ // ignore all calls to Console trigger by printing
+ t.className != "scala/Console$" &&
+ // console accesses DynamicVariable, let's discard it too
+ !t.className.startsWith("scala/util/DynamicVariable")
+ }
+
+ def printStatistics(stats: Statistics = getStatistics, filter: MethodCallTrace => Boolean = standardFilter): Unit = {
+ val stats = getStatistics
+ println("Method call statistics:")
+ val toBePrinted = stats.toSeq.filter(p => filter(p._1)).sortBy(_._1)
+ // <count> <trace>
+ val format = "%5d %s\n"
+ toBePrinted foreach {
+ case (trace, count) => printf(format, count, trace)
+ }
+ }
+
+}
diff --git a/src/partest/scala/tools/partest/instrumented/Profiler.java b/src/partest/scala/tools/partest/instrumented/Profiler.java
new file mode 100644
index 0000000..e267e19
--- /dev/null
+++ b/src/partest/scala/tools/partest/instrumented/Profiler.java
@@ -0,0 +1,82 @@
+/* NEST (New Scala Test)
+ * Copyright 2007-2013 LAMP/EPFL
+ * @author Grzegorz Kossakowski
+ */
+
+package scala.tools.partest.instrumented;
+
+import java.util.HashMap;
+import java.util.Map;
+
+/**
+ * A simple profiler class that counts method invocations. It is being used in byte-code instrumentation by inserting
+ * call to {@link Profiler#methodCalled(String, String, String)} at the beginning of every instrumented class.
+ *
+ * WARANING: This class is INTERNAL implementation detail and should never be used directly. It's made public only
+ * because it must be universally accessible for instrumentation needs. If you want to profile your test use
+ * {@link Instrumentation} instead.
+ */
+public class Profiler {
+
+ private static boolean isProfiling = false;
+ private static Map<MethodCallTrace, Integer> counts = new HashMap<MethodCallTrace, Integer>();
+
+ static public class MethodCallTrace {
+ final String className;
+ final String methodName;
+ final String methodDescriptor;
+
+ public MethodCallTrace(final String className, final String methodName, final String methodDescriptor) {
+ this.className = className;
+ this.methodName = methodName;
+ this.methodDescriptor = methodDescriptor;
+ }
+
+ @Override
+ public boolean equals(Object obj) {
+ if (!(obj instanceof MethodCallTrace)) {
+ return false;
+ } else {
+ MethodCallTrace that = (MethodCallTrace) obj;
+ return that.className.equals(className) && that.methodName.equals(methodName) && that.methodDescriptor.equals(methodDescriptor);
+ }
+ }
+ @Override
+ public int hashCode() {
+ return className.hashCode() ^ methodName.hashCode() ^ methodDescriptor.hashCode();
+ }
+ }
+
+ public static void startProfiling() {
+ isProfiling = true;
+ }
+
+ public static void stopProfiling() {
+ isProfiling = false;
+ }
+
+ public static boolean isProfiling() {
+ return isProfiling;
+ }
+
+ public static void resetProfiling() {
+ counts = new HashMap<MethodCallTrace, Integer>();
+ }
+
+ public static void methodCalled(final String className, final String methodName, final String methodDescriptor) {
+ if (isProfiling) {
+ MethodCallTrace trace = new MethodCallTrace(className, methodName, methodDescriptor);
+ Integer counter = counts.get(trace);
+ if (counter == null) {
+ counts.put(trace, 1);
+ } else {
+ counts.put(trace, counter+1);
+ }
+ }
+ }
+
+ public static Map<MethodCallTrace, Integer> getStatistics() {
+ return new HashMap<MethodCallTrace, Integer>(counts);
+ }
+
+}
diff --git a/src/partest/scala/tools/partest/javaagent/ASMTransformer.java b/src/partest/scala/tools/partest/javaagent/ASMTransformer.java
new file mode 100644
index 0000000..878c861
--- /dev/null
+++ b/src/partest/scala/tools/partest/javaagent/ASMTransformer.java
@@ -0,0 +1,49 @@
+/* NEST (New Scala Test)
+ * Copyright 2007-2013 LAMP/EPFL
+ * @author Grzegorz Kossakowski
+ */
+
+package scala.tools.partest.javaagent;
+
+import java.lang.instrument.ClassFileTransformer;
+import java.security.ProtectionDomain;
+
+import scala.tools.asm.ClassReader;
+import scala.tools.asm.ClassWriter;
+
+public class ASMTransformer implements ClassFileTransformer {
+
+ private boolean shouldTransform(String className) {
+ return
+ // do not instrument instrumentation logic (in order to avoid infinite recursion)
+ !className.startsWith("scala/tools/partest/instrumented/") &&
+ !className.startsWith("scala/tools/partest/javaagent/") &&
+ // we instrument all classes from empty package
+ (!className.contains("/") ||
+ // we instrument all classes from scala package
+ className.startsWith("scala/") ||
+ // we instrument all classes from `instrumented` package
+ className.startsWith("instrumented/"));
+ }
+
+ public byte[] transform(final ClassLoader classLoader, final String className, Class<?> classBeingRedefined, ProtectionDomain protectionDomain, byte[] classfileBuffer) {
+ if (shouldTransform(className)) {
+ ClassWriter writer = new ClassWriter(ClassWriter.COMPUTE_MAXS) {
+ @Override protected String getCommonSuperClass(final String type1, final String type2) {
+ // Since we are not recomputing stack frame map, this should never be called we override this method because
+ // default implementation uses reflection for implementation and might try to load the class that we are
+ // currently processing. That leads to weird results like swallowed exceptions and classes being not
+ // transformed.
+ throw new RuntimeException("Unexpected call to getCommonSuperClass(" + type1 + ", " + type2 +
+ ") while transforming " + className);
+ }
+ };
+ ProfilerVisitor visitor = new ProfilerVisitor(writer);
+ ClassReader reader = new ClassReader(classfileBuffer);
+ reader.accept(visitor, 0);
+ return writer.toByteArray();
+ } else {
+ return classfileBuffer;
+ }
+ }
+}
diff --git a/src/partest/scala/tools/partest/javaagent/MANIFEST.MF b/src/partest/scala/tools/partest/javaagent/MANIFEST.MF
new file mode 100644
index 0000000..be0fee4
--- /dev/null
+++ b/src/partest/scala/tools/partest/javaagent/MANIFEST.MF
@@ -0,0 +1 @@
+Premain-Class: scala.tools.partest.javaagent.ProfilingAgent
diff --git a/src/partest/scala/tools/partest/javaagent/ProfilerVisitor.java b/src/partest/scala/tools/partest/javaagent/ProfilerVisitor.java
new file mode 100644
index 0000000..8306327
--- /dev/null
+++ b/src/partest/scala/tools/partest/javaagent/ProfilerVisitor.java
@@ -0,0 +1,59 @@
+/* NEST (New Scala Test)
+ * Copyright 2007-2013 LAMP/EPFL
+ * @author Grzegorz Kossakowski
+ */
+
+package scala.tools.partest.javaagent;
+
+import scala.tools.asm.ClassVisitor;
+import scala.tools.asm.MethodVisitor;
+import scala.tools.asm.Opcodes;
+
+public class ProfilerVisitor extends ClassVisitor implements Opcodes {
+
+ private static String profilerClass = "scala/tools/partest/instrumented/Profiler";
+
+ public ProfilerVisitor(final ClassVisitor cv) {
+ super(ASM4, cv);
+ }
+
+ private String className = null;
+
+ @Override
+ public void visit(int version, int access, String name, String signature, String superName, String[] interfaces) {
+ className = name;
+ super.visit(version, access, name, signature, superName, interfaces);
+ }
+
+ public MethodVisitor visitMethod(int access, String name, String desc, String signature, String[] exceptions) {
+ // delegate the method call to the next
+ // chained visitor
+ MethodVisitor mv = cv.visitMethod(access, name, desc, signature, exceptions);
+ if (!profilerClass.equals(className)) {
+ // only instrument non-abstract methods
+ if((access & ACC_ABSTRACT) == 0) {
+ assert(className != null);
+ /* The following instructions do not modify compressed stack frame map so
+ * we don't need to worry about recalculating stack frame map. Specifically,
+ * let's quote "ASM 4.0, A Java bytecode engineering library" guide (p. 40):
+ *
+ * In order to save space, a compiled method does not contain one frame per
+ * instruction: in fact it contains only the frames for the instructions
+ * that correspond to jump targets or exception handlers, or that follow
+ * unconditional jump instructions. Indeed the other frames can be easily
+ * and quickly inferred from these ones.
+ *
+ * Instructions below are just loading constants and calling a method so according
+ * to definition above they do not contribute to compressed stack frame map.
+ */
+ mv.visitLdcInsn(className);
+ mv.visitLdcInsn(name);
+ mv.visitLdcInsn(desc);
+ mv.visitMethodInsn(INVOKESTATIC, profilerClass, "methodCalled",
+ "(Ljava/lang/String;Ljava/lang/String;Ljava/lang/String;)V");
+ }
+ }
+ return mv;
+ }
+
+}
diff --git a/src/partest/scala/tools/partest/javaagent/ProfilingAgent.java b/src/partest/scala/tools/partest/javaagent/ProfilingAgent.java
new file mode 100644
index 0000000..3b18987
--- /dev/null
+++ b/src/partest/scala/tools/partest/javaagent/ProfilingAgent.java
@@ -0,0 +1,25 @@
+/* NEST (New Scala Test)
+ * Copyright 2007-2013 LAMP/EPFL
+ * @author Grzegorz Kossakowski
+ */
+
+package scala.tools.partest.javaagent;
+
+import java.lang.instrument.Instrumentation;
+import java.lang.instrument.UnmodifiableClassException;
+
+/**
+ * Profiling agent that instruments byte-code to insert calls to
+ * {@link scala.tools.partest.instrumented.Profiler#methodCalled(String, String, String)}
+ * by using ASM library for byte-code manipulation.
+ */
+public class ProfilingAgent {
+ public static void premain(String args, Instrumentation inst) throws UnmodifiableClassException {
+ // NOTE: we are adding transformer that won't be applied to classes that are already loaded
+ // This should be ok because premain should be executed before main is executed so Scala library
+ // and the test-case itself won't be loaded yet. We rely here on the fact that ASMTransformer does
+ // not depend on Scala library. In case our assumptions are wrong we can always insert call to
+ // inst.retransformClasses.
+ inst.addTransformer(new ASMTransformer(), false);
+ }
+}
diff --git a/src/partest/scala/tools/partest/nest/AntRunner.scala b/src/partest/scala/tools/partest/nest/AntRunner.scala
index 002e454..93045b8 100644
--- a/src/partest/scala/tools/partest/nest/AntRunner.scala
+++ b/src/partest/scala/tools/partest/nest/AntRunner.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala Parallel Testing **
-** / __/ __// _ | / / / _ | (c) 2007-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2007-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -20,6 +20,10 @@ class AntRunner extends DirectRunner {
var JAVAC_CMD: String = "javac"
var CLASSPATH: String = _
var LATEST_LIB: String = _
+ var LATEST_REFLECT: String = _
+ var LATEST_COMP: String = _
+ var LATEST_PARTEST: String = _
+ var LATEST_ACTORS: String = _
val testRootPath: String = "test"
val testRootDir: Directory = Directory(testRootPath)
}
diff --git a/src/partest/scala/tools/partest/nest/CompileManager.scala b/src/partest/scala/tools/partest/nest/CompileManager.scala
index fbf758d..3d902d6 100644
--- a/src/partest/scala/tools/partest/nest/CompileManager.scala
+++ b/src/partest/scala/tools/partest/nest/CompileManager.scala
@@ -1,5 +1,5 @@
/* NEST (New Scala Test)
- * Copyright 2007-2011 LAMP/EPFL
+ * Copyright 2007-2013 LAMP/EPFL
* @author Philipp Haller
*/
@@ -9,13 +9,31 @@ package scala.tools.partest
package nest
import scala.tools.nsc.{ Global, Settings, CompilerCommand, FatalError, io }
+import scala.tools.nsc.io.{ File => SFile }
+import scala.tools.nsc.interactive.RangePositions
import scala.tools.nsc.reporters.{ Reporter, ConsoleReporter }
import scala.tools.nsc.util.{ ClassPath, FakePos }
+import scala.tools.nsc.Properties.{ setProp, propOrEmpty }
import scala.tools.util.PathResolver
import io.Path
import java.io.{ File, BufferedReader, PrintWriter, FileReader, Writer, FileWriter, StringWriter }
import File.pathSeparator
+sealed abstract class CompilationOutcome {
+ def merge(other: CompilationOutcome): CompilationOutcome
+ def isPositive = this eq CompileSuccess
+ def isNegative = this eq CompileFailed
+}
+case object CompileSuccess extends CompilationOutcome {
+ def merge(other: CompilationOutcome) = other
+}
+case object CompileFailed extends CompilationOutcome {
+ def merge(other: CompilationOutcome) = if (other eq CompileSuccess) this else other
+}
+case object CompilerCrashed extends CompilationOutcome {
+ def merge(other: CompilationOutcome) = this
+}
+
class ExtConsoleReporter(settings: Settings, val writer: PrintWriter) extends ConsoleReporter(settings, Console.in, writer) {
shortname = true
}
@@ -23,19 +41,21 @@ class ExtConsoleReporter(settings: Settings, val writer: PrintWriter) extends Co
class TestSettings(cp: String, error: String => Unit) extends Settings(error) {
def this(cp: String) = this(cp, _ => ())
- deprecation.value = true
nowarnings.value = false
- encoding.value = "ISO-8859-1"
+ encoding.value = "UTF-8"
classpath.value = cp
}
abstract class SimpleCompiler {
- def compile(out: Option[File], files: List[File], kind: String, log: File): Boolean
+ def compile(out: Option[File], files: List[File], kind: String, log: File): CompilationOutcome
}
class DirectCompiler(val fileManager: FileManager) extends SimpleCompiler {
def newGlobal(settings: Settings, reporter: Reporter): Global =
- new Global(settings, reporter)
+ if (settings.Yrangepos.value)
+ new Global(settings, reporter) with RangePositions
+ else
+ new Global(settings, reporter)
def newGlobal(settings: Settings, logWriter: FileWriter): Global =
newGlobal(settings, new ExtConsoleReporter(settings, new PrintWriter(logWriter)))
@@ -63,7 +83,7 @@ class DirectCompiler(val fileManager: FileManager) extends SimpleCompiler {
(opt2 ::: pluginOption) mkString " "
}
- def compile(out: Option[File], files: List[File], kind: String, log: File): Boolean = {
+ def compile(out: Option[File], files: List[File], kind: String, log: File): CompilationOutcome = {
val testSettings = out match {
case Some(f) => newSettings(f.getAbsolutePath)
case _ => newSettings()
@@ -71,10 +91,21 @@ class DirectCompiler(val fileManager: FileManager) extends SimpleCompiler {
val logWriter = new FileWriter(log)
// check whether there is a ".flags" file
- val flagsFileName = "%s.flags" format (basename(log.getName) dropRight 4) // 4 is "-run" or similar
+ val logFile = basename(log.getName)
+ val flagsFileName = "%s.flags" format (logFile.substring(0, logFile.lastIndexOf("-")))
val argString = (io.File(log).parent / flagsFileName) ifFile (x => updatePluginPath(x.slurp())) getOrElse ""
- val allOpts = fileManager.SCALAC_OPTS+" "+argString
- val args = (allOpts split "\\s").toList
+
+ // slurp local flags (e.g., "A_1.flags")
+ val fstFile = SFile(files(0))
+ def isInGroup(num: Int) = fstFile.stripExtension endsWith ("_" + num)
+ val inGroup = (1 to 9) flatMap (group => if (isInGroup(group)) List(group) else List())
+ val localFlagsList = if (inGroup.nonEmpty) {
+ val localArgString = (fstFile.parent / (fstFile.stripExtension + ".flags")) ifFile (x => updatePluginPath(x.slurp())) getOrElse ""
+ localArgString.split(' ').toList.filter(_.length > 0)
+ } else List()
+
+ val allOpts = fileManager.SCALAC_OPTS.toList ::: argString.split(' ').toList.filter(_.length > 0) ::: localFlagsList
+ val args = allOpts.toList
NestUI.verbose("scalac options: "+allOpts)
@@ -91,7 +122,9 @@ class DirectCompiler(val fileManager: FileManager) extends SimpleCompiler {
case "scalap" => ScalapTestFile.apply
case "scalacheck" => ScalaCheckTestFile.apply
case "specialized" => SpecializedTestFile.apply
+ case "instrumented" => InstrumentedTestFile.apply
case "presentation" => PresentationTestFile.apply
+ case "ant" => AntTestFile.apply
}
val test: TestFile = testFileFn(files.head, fileManager)
if (!test.defineSettings(command.settings, out.isEmpty)) {
@@ -105,10 +138,13 @@ class DirectCompiler(val fileManager: FileManager) extends SimpleCompiler {
try {
NestUI.verbose("compiling "+toCompile)
+ NestUI.verbose("with classpath: "+global.classPath.toString)
+ NestUI.verbose("and java classpath: "+ propOrEmpty("java.class.path"))
try new global.Run compile toCompile
catch {
case FatalError(msg) =>
testRep.error(null, "fatal error: " + msg)
+ return CompilerCrashed
}
testRep.printSummary()
@@ -116,81 +152,13 @@ class DirectCompiler(val fileManager: FileManager) extends SimpleCompiler {
}
finally logWriter.close()
- !testRep.hasErrors
+ if (testRep.hasErrors) CompileFailed
+ else CompileSuccess
}
}
-// class ReflectiveCompiler(val fileManager: ConsoleFileManager) extends SimpleCompiler {
-// import fileManager.{latestCompFile, latestPartestFile}
-//
-// val sepUrls = Array(latestCompFile.toURI.toURL, latestPartestFile.toURI.toURL)
-// //NestUI.verbose("constructing URLClassLoader from URLs "+latestCompFile+" and "+latestPartestFile)
-//
-// val sepLoader = new java.net.URLClassLoader(sepUrls, null)
-//
-// val sepCompilerClass =
-// sepLoader.loadClass("scala.tools.partest.nest.DirectCompiler")
-// val sepCompiler = sepCompilerClass.newInstance()
-//
-// // needed for reflective invocation
-// val fileClass = Class.forName("java.io.File")
-// val stringClass = Class.forName("java.lang.String")
-// val sepCompileMethod =
-// sepCompilerClass.getMethod("compile", fileClass, stringClass)
-// val sepCompileMethod2 =
-// sepCompilerClass.getMethod("compile", fileClass, stringClass, fileClass)
-//
-// /* This method throws java.lang.reflect.InvocationTargetException
-// * if the compiler crashes.
-// * This exception is handled in the shouldCompile and shouldFailCompile
-// * methods of class CompileManager.
-// */
-// def compile(out: Option[File], files: List[File], kind: String, log: File): Boolean = {
-// val res = sepCompileMethod2.invoke(sepCompiler, out, files, kind, log).asInstanceOf[java.lang.Boolean]
-// res.booleanValue()
-// }
-// }
-
class CompileManager(val fileManager: FileManager) {
- var compiler: SimpleCompiler = new DirectCompiler(fileManager)
-
- var numSeparateCompilers = 1
- def createSeparateCompiler() = {
- numSeparateCompilers += 1
- compiler = new /*ReflectiveCompiler*/ DirectCompiler(fileManager)
- }
-
- /* This method returns true iff compilation succeeds.
- */
- def shouldCompile(files: List[File], kind: String, log: File): Boolean = {
- createSeparateCompiler()
- compiler.compile(None, files, kind, log)
- }
-
- /* This method returns true iff compilation succeeds.
- */
- def shouldCompile(out: File, files: List[File], kind: String, log: File): Boolean = {
- createSeparateCompiler()
- compiler.compile(Some(out), files, kind, log)
- }
-
- /* This method returns true iff compilation fails
- * _and_ the compiler does _not_ crash or loop.
- *
- * If the compiler crashes, this method returns false.
- */
- def shouldFailCompile(files: List[File], kind: String, log: File): Boolean = {
- createSeparateCompiler()
- !compiler.compile(None, files, kind, log)
- }
-
- /* This method returns true iff compilation fails
- * _and_ the compiler does _not_ crash or loop.
- *
- * If the compiler crashes, this method returns false.
- */
- def shouldFailCompile(out: File, files: List[File], kind: String, log: File): Boolean = {
- createSeparateCompiler()
- !compiler.compile(Some(out), files, kind, log)
- }
+ private def newCompiler = new DirectCompiler(fileManager)
+ def attemptCompile(outdir: Option[File], sources: List[File], kind: String, log: File): CompilationOutcome =
+ newCompiler.compile(outdir, sources, kind, log)
}
diff --git a/src/partest/scala/tools/partest/nest/ConsoleFileManager.scala b/src/partest/scala/tools/partest/nest/ConsoleFileManager.scala
index 02edf0f..08e709d 100644
--- a/src/partest/scala/tools/partest/nest/ConsoleFileManager.scala
+++ b/src/partest/scala/tools/partest/nest/ConsoleFileManager.scala
@@ -1,5 +1,5 @@
/* NEST (New Scala Test)
- * Copyright 2007-2011 LAMP/EPFL
+ * Copyright 2007-2013 LAMP/EPFL
* @author Philipp Haller
*/
@@ -44,7 +44,7 @@ class ConsoleFileManager extends FileManager {
def this(buildPath: String, rawClasses: Boolean, moreOpts: String) = {
this(buildPath, rawClasses)
- SCALAC_OPTS = SCALAC_OPTS+" "+moreOpts
+ SCALAC_OPTS = SCALAC_OPTS ++ moreOpts.split(' ').toSeq.filter(_.length > 0)
}
lazy val srcDir = PathSettings.srcDir
@@ -65,7 +65,7 @@ class ConsoleFileManager extends FileManager {
}
CLASSPATH = {
- val libs = (srcDir / Directory("lib")).files filter (_ hasExtension "jar") map (_.normalize.path)
+ val libs = (srcDir / Directory("lib")).files filter (_ hasExtension "jar") map (_.toCanonical.path)
// add all jars in libs
(CLASSPATH :: libs.toList) mkString pathSeparator
@@ -74,15 +74,17 @@ class ConsoleFileManager extends FileManager {
def findLatest() {
NestUI.verbose("test parent: "+testParent)
- def prefixFileWith(parent: File, relPath: String) = (io.File(parent) / relPath).normalize
- def prefixFile(relPath: String) = (testParent / relPath).normalize
+ def prefixFileWith(parent: File, relPath: String) = (io.File(parent) / relPath).toCanonical
+ def prefixFile(relPath: String) = (testParent / relPath).toCanonical
if (!testClasses.isEmpty) {
- testClassesDir = Path(testClasses.get).normalize.toDirectory
+ testClassesDir = Path(testClasses.get).toCanonical.toDirectory
NestUI.verbose("Running with classes in "+testClassesDir)
latestFile = testClassesDir.parent / "bin"
latestLibFile = testClassesDir / "library"
+ latestActorsFile = testClassesDir / "library" / "actors"
+ latestReflectFile = testClassesDir / "reflect"
latestCompFile = testClassesDir / "compiler"
latestPartestFile = testClassesDir / "partest"
latestFjbgFile = testParent / "lib" / "fjbg.jar"
@@ -92,14 +94,19 @@ class ConsoleFileManager extends FileManager {
NestUI.verbose("Running on "+dir)
latestFile = dir / "bin"
latestLibFile = dir / "lib/scala-library.jar"
+ latestActorsFile = dir / "lib/scala-actors.jar"
+ latestReflectFile = dir / "lib/scala-reflect.jar"
latestCompFile = dir / "lib/scala-compiler.jar"
latestPartestFile = dir / "lib/scala-partest.jar"
+ latestFjbgFile = testParent / "lib" / "fjbg.jar"
}
else {
def setupQuick() {
NestUI.verbose("Running build/quick")
latestFile = prefixFile("build/quick/bin")
latestLibFile = prefixFile("build/quick/classes/library")
+ latestActorsFile = prefixFile("build/quick/classes/library/actors")
+ latestReflectFile = prefixFile("build/quick/classes/reflect")
latestCompFile = prefixFile("build/quick/classes/compiler")
latestPartestFile = prefixFile("build/quick/classes/partest")
}
@@ -109,6 +116,8 @@ class ConsoleFileManager extends FileManager {
val p = testParent.getParentFile
latestFile = prefixFileWith(p, "bin")
latestLibFile = prefixFileWith(p, "lib/scala-library.jar")
+ latestActorsFile = prefixFileWith(p, "lib/scala-actors.jar")
+ latestReflectFile = prefixFileWith(p, "lib/scala-reflect.jar")
latestCompFile = prefixFileWith(p, "lib/scala-compiler.jar")
latestPartestFile = prefixFileWith(p, "lib/scala-partest.jar")
}
@@ -117,6 +126,8 @@ class ConsoleFileManager extends FileManager {
NestUI.verbose("Running dists/latest")
latestFile = prefixFile("dists/latest/bin")
latestLibFile = prefixFile("dists/latest/lib/scala-library.jar")
+ latestActorsFile = prefixFile("dists/latest/lib/scala-actors.jar")
+ latestReflectFile = prefixFile("dists/latest/lib/scala-reflect.jar")
latestCompFile = prefixFile("dists/latest/lib/scala-compiler.jar")
latestPartestFile = prefixFile("dists/latest/lib/scala-partest.jar")
}
@@ -125,6 +136,8 @@ class ConsoleFileManager extends FileManager {
NestUI.verbose("Running build/pack")
latestFile = prefixFile("build/pack/bin")
latestLibFile = prefixFile("build/pack/lib/scala-library.jar")
+ latestActorsFile = prefixFile("build/pack/lib/scala-actors.jar")
+ latestReflectFile = prefixFile("build/pack/lib/scala-reflect.jar")
latestCompFile = prefixFile("build/pack/lib/scala-compiler.jar")
latestPartestFile = prefixFile("build/pack/lib/scala-partest.jar")
}
@@ -138,10 +151,10 @@ class ConsoleFileManager extends FileManager {
names map (x => prefixFile(base + "/" + x).lastModified) reduceLeft (_ max _)
// detect most recent build
- val quickTime = mostRecentOf("build/quick/classes", "compiler/compiler.properties", "library/library.properties")
- val packTime = mostRecentOf("build/pack/lib", "scala-compiler.jar", "scala-library.jar")
- val distTime = mostRecentOf("dists/latest/lib", "scala-compiler.jar", "scala-library.jar")
- val instTime = mostRecentOf("lib", "scala-compiler.jar", "scala-library.jar")
+ val quickTime = mostRecentOf("build/quick/classes", "compiler/compiler.properties", "reflect/reflect.properties", "library/library.properties")
+ val packTime = mostRecentOf("build/pack/lib", "scala-compiler.jar", "scala-reflect.jar", "scala-library.jar")
+ val distTime = mostRecentOf("dists/latest/lib", "scala-compiler.jar", "scala-reflect.jar", "scala-library.jar")
+ val instTime = mostRecentOf("lib", "scala-compiler.jar", "scala-reflect.jar", "scala-library.jar")
val pairs = Map(
(quickTime, () => setupQuick()),
@@ -157,12 +170,22 @@ class ConsoleFileManager extends FileManager {
}
LATEST_LIB = latestLibFile.getAbsolutePath
+ LATEST_REFLECT = latestReflectFile.getAbsolutePath
+ LATEST_COMP = latestCompFile.getAbsolutePath
+ LATEST_PARTEST = latestPartestFile.getAbsolutePath
+ LATEST_ACTORS = latestActorsFile.getAbsolutePath
}
var LATEST_LIB: String = ""
+ var LATEST_REFLECT: String = ""
+ var LATEST_COMP: String = ""
+ var LATEST_PARTEST: String = ""
+ var LATEST_ACTORS: String = ""
var latestFile: File = _
var latestLibFile: File = _
+ var latestActorsFile: File = _
+ var latestReflectFile: File = _
var latestCompFile: File = _
var latestPartestFile: File = _
var latestFjbgFile: File = _
diff --git a/src/partest/scala/tools/partest/nest/ConsoleRunner.scala b/src/partest/scala/tools/partest/nest/ConsoleRunner.scala
index 953823e..e016fb7 100644
--- a/src/partest/scala/tools/partest/nest/ConsoleRunner.scala
+++ b/src/partest/scala/tools/partest/nest/ConsoleRunner.scala
@@ -1,5 +1,5 @@
/* NEST (New Scala Test)
- * Copyright 2007-2011 LAMP/EPFL
+ * Copyright 2007-2013 LAMP/EPFL
* @author Philipp Haller
*/
@@ -16,12 +16,14 @@ import scala.tools.nsc.Properties.{ versionMsg, setProp }
import scala.tools.nsc.util.CommandLineParser
import scala.tools.nsc.io
import io.{ Path }
+import scala.collection.{ mutable, immutable }
class ConsoleRunner extends DirectRunner {
import PathSettings.{ srcDir, testRoot }
case class TestSet(kind: String, filter: Path => Boolean, msg: String)
- def stdFilter(p: Path) = p.isDirectory || (p hasExtension "scala")
+ private def stdFilter(p: Path) = p.isDirectory || (p hasExtension "scala")
+ private def antFilter(p: Path) = p.isFile && (p endsWith "build.xml")
val testSets = {
val pathFilter: Path => Boolean = x => x.isDirectory || (x hasExtension "scala")
@@ -38,7 +40,9 @@ class ConsoleRunner extends DirectRunner {
TestSet("scalacheck", stdFilter, "Testing ScalaCheck tests"),
TestSet("scalap", _.isDirectory, "Run scalap decompiler tests"),
TestSet("specialized", stdFilter, "Testing specialized tests"),
- TestSet("presentation", _.isDirectory, "Testing presentation compiler tests.")
+ TestSet("instrumented", stdFilter, "Testing instrumented tests"),
+ TestSet("presentation", _.isDirectory, "Testing presentation compiler tests."),
+ TestSet("ant", antFilter, "Run Ant task tests.")
)
}
@@ -103,8 +107,6 @@ class ConsoleRunner extends DirectRunner {
if (parsed isSet "--timeout") fileManager.timeout = parsed("--timeout")
if (parsed isSet "--debug") setProp("partest.debug", "true")
- setProperties() // must be done after processing command line arguments such as --debug
-
def addTestFile(file: File) = {
if (!file.exists)
NestUI.failure("Test file '%s' not found, skipping.\n" format file)
@@ -169,13 +171,10 @@ class ConsoleRunner extends DirectRunner {
if (grepMessage != "")
NestUI.normal(grepMessage + "\n")
- val start = System.currentTimeMillis
- val (successes, failures) = testCheckAll(enabledTestSets)
- val end = System.currentTimeMillis
-
+ val ((successes, failures), elapsedMillis) = timed(testCheckAll(enabledTestSets))
val total = successes + failures
- val elapsedSecs = (end - start)/1000
+ val elapsedSecs = elapsedMillis/1000
val elapsedMins = elapsedSecs/60
val elapsedHrs = elapsedMins/60
val dispMins = elapsedMins - elapsedHrs * 60
@@ -186,7 +185,6 @@ class ConsoleRunner extends DirectRunner {
form(elapsedHrs)+":"+form(dispMins)+":"+form(dispSecs)
}
- println
if (failures == 0)
NestUI.success("All of "+total+" tests were successful (elapsed time: "+dispElapsed+")\n")
else
@@ -211,10 +209,18 @@ class ConsoleRunner extends DirectRunner {
* @return (success count, failure count)
*/
def testCheckAll(enabledSets: List[TestSet]): (Int, Int) = {
- def kindOf(f: File) = (srcDir relativize Path(f).normalize).segments.head
+ def kindOf(f: File) = {
+ (srcDir relativize Path(f).toCanonical).segments match {
+ case (".." :: "scaladoc" :: xs) => xs.head
+ case xs => xs.head
+ }
+ }
val (valid, invalid) = testFiles partition (x => testSetKinds contains kindOf(x))
- invalid foreach (x => NestUI.failure("Invalid test file '%s', skipping.\n" format x))
+ invalid foreach (x => NestUI.failure(
+ "Invalid test file '%s', skipping.\n".format(x) +
+ "(Test kind '%s' not in known set '%s')".format(kindOf(x), testSetKinds))
+ )
val grouped = (valid groupBy kindOf).toList sortBy (x => testSetKinds indexOf x._1)
val runTestsFileLists =
diff --git a/src/partest/scala/tools/partest/nest/Diff.java b/src/partest/scala/tools/partest/nest/Diff.java
deleted file mode 100644
index f69fc68..0000000
--- a/src/partest/scala/tools/partest/nest/Diff.java
+++ /dev/null
@@ -1,873 +0,0 @@
-
-package scala.tools.partest.nest;
-
-import java.util.Hashtable;
-
-/** A class to compare IndexedSeqs of objects. The result of comparison
- is a list of <code>change</code> objects which form an
- edit script. The objects compared are traditionally lines
- of text from two files. Comparison options such as "ignore
- whitespace" are implemented by modifying the <code>equals</code>
- and <code>hashcode</code> methods for the objects compared.
-<p>
- The basic algorithm is described in: </br>
- "An O(ND) Difference Algorithm and its Variations", Eugene Myers,
- Algorithmica Vol. 1 No. 2, 1986, p 251.
-<p>
- This class outputs different results from GNU diff 1.15 on some
- inputs. Our results are actually better (smaller change list, smaller
- total size of changes), but it would be nice to know why. Perhaps
- there is a memory overwrite bug in GNU diff 1.15.
-
- @author Stuart D. Gathman, translated from GNU diff 1.15
- Copyright (C) 2000 Business Management Systems, Inc.
-<p>
- This program is free software; you can redistribute it and/or modify
- it under the terms of the GNU General Public License as published by
- the Free Software Foundation; either version 1, or (at your option)
- any later version.
-<p>
- This program is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
- GNU General Public License for more details.
-<p>
- You should have received a copy of the <a href=COPYING.txt>
- GNU General Public License</a>
- along with this program; if not, write to the Free Software
- Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
-
- */
-
-public class Diff {
-
- /** Prepare to find differences between two arrays. Each element of
- the arrays is translated to an "equivalence number" based on
- the result of <code>equals</code>. The original Object arrays
- are no longer needed for computing the differences. They will
- be needed again later to print the results of the comparison as
- an edit script, if desired.
- */
- public Diff(Object[] a,Object[] b) {
- Hashtable<Object, Integer> h = new Hashtable<Object, Integer>(a.length + b.length);
- filevec[0] = new file_data(a,h);
- filevec[1] = new file_data(b,h);
- }
-
- /** 1 more than the maximum equivalence value used for this or its
- sibling file. */
- private int equiv_max = 1;
-
- /** When set to true, the comparison uses a heuristic to speed it up.
- With this heuristic, for files with a constant small density
- of changes, the algorithm is linear in the file size. */
- public boolean heuristic = false;
-
- /** When set to true, the algorithm returns a guarranteed minimal
- set of changes. This makes things slower, sometimes much slower. */
- public boolean no_discards = false;
-
- private int[] xvec, yvec; /* IndexedSeqs being compared. */
- private int[] fdiag; /* IndexedSeq, indexed by diagonal, containing
- the X coordinate of the point furthest
- along the given diagonal in the forward
- search of the edit matrix. */
- private int[] bdiag; /* IndexedSeq, indexed by diagonal, containing
- the X coordinate of the point furthest
- along the given diagonal in the backward
- search of the edit matrix. */
- private int fdiagoff, bdiagoff;
- private final file_data[] filevec = new file_data[2];
- private int cost;
-
- /** Find the midpoint of the shortest edit script for a specified
- portion of the two files.
-
- We scan from the beginnings of the files, and simultaneously from the ends,
- doing a breadth-first search through the space of edit-sequence.
- When the two searches meet, we have found the midpoint of the shortest
- edit sequence.
-
- The value returned is the number of the diagonal on which the midpoint lies.
- The diagonal number equals the number of inserted lines minus the number
- of deleted lines (counting only lines before the midpoint).
- The edit cost is stored into COST; this is the total number of
- lines inserted or deleted (counting only lines before the midpoint).
-
- This function assumes that the first lines of the specified portions
- of the two files do not match, and likewise that the last lines do not
- match. The caller must trim matching lines from the beginning and end
- of the portions it is going to specify.
-
- Note that if we return the "wrong" diagonal value, or if
- the value of bdiag at that diagonal is "wrong",
- the worst this can do is cause suboptimal diff output.
- It cannot cause incorrect diff output. */
-
- private int diag (int xoff, int xlim, int yoff, int ylim) {
- final int[] fd = fdiag; // Give the compiler a chance.
- final int[] bd = bdiag; // Additional help for the compiler.
- final int[] xv = xvec; // Still more help for the compiler.
- final int[] yv = yvec; // And more and more . . .
- final int dmin = xoff - ylim; // Minimum valid diagonal.
- final int dmax = xlim - yoff; // Maximum valid diagonal.
- final int fmid = xoff - yoff; // Center diagonal of top-down search.
- final int bmid = xlim - ylim; // Center diagonal of bottom-up search.
- int fmin = fmid, fmax = fmid; // Limits of top-down search.
- int bmin = bmid, bmax = bmid; // Limits of bottom-up search.
- /* True if southeast corner is on an odd
- diagonal with respect to the northwest. */
- final boolean odd = (fmid - bmid & 1) != 0;
-
- fd[fdiagoff + fmid] = xoff;
- bd[bdiagoff + bmid] = xlim;
-
- for (int c = 1;; ++c)
- {
- int d; /* Active diagonal. */
- boolean big_snake = false;
-
- /* Extend the top-down search by an edit step in each diagonal. */
- if (fmin > dmin)
- fd[fdiagoff + --fmin - 1] = -1;
- else
- ++fmin;
- if (fmax < dmax)
- fd[fdiagoff + ++fmax + 1] = -1;
- else
- --fmax;
- for (d = fmax; d >= fmin; d -= 2)
- {
- int x, y, oldx, tlo = fd[fdiagoff + d - 1], thi = fd[fdiagoff + d + 1];
-
- if (tlo >= thi)
- x = tlo + 1;
- else
- x = thi;
- oldx = x;
- y = x - d;
- while (x < xlim && y < ylim && xv[x] == yv[y]) {
- ++x; ++y;
- }
- if (x - oldx > 20)
- big_snake = true;
- fd[fdiagoff + d] = x;
- if (odd && bmin <= d && d <= bmax && bd[bdiagoff + d] <= fd[fdiagoff + d])
- {
- cost = 2 * c - 1;
- return d;
- }
- }
-
- /* Similar extend the bottom-up search. */
- if (bmin > dmin)
- bd[bdiagoff + --bmin - 1] = Integer.MAX_VALUE;
- else
- ++bmin;
- if (bmax < dmax)
- bd[bdiagoff + ++bmax + 1] = Integer.MAX_VALUE;
- else
- --bmax;
- for (d = bmax; d >= bmin; d -= 2)
- {
- int x, y, oldx, tlo = bd[bdiagoff + d - 1], thi = bd[bdiagoff + d + 1];
-
- if (tlo < thi)
- x = tlo;
- else
- x = thi - 1;
- oldx = x;
- y = x - d;
- while (x > xoff && y > yoff && xv[x - 1] == yv[y - 1]) {
- --x; --y;
- }
- if (oldx - x > 20)
- big_snake = true;
- bd[bdiagoff + d] = x;
- if (!odd && fmin <= d && d <= fmax && bd[bdiagoff + d] <= fd[fdiagoff + d])
- {
- cost = 2 * c;
- return d;
- }
- }
-
- /* Heuristic: check occasionally for a diagonal that has made
- lots of progress compared with the edit distance.
- If we have any such, find the one that has made the most
- progress and return it as if it had succeeded.
-
- With this heuristic, for files with a constant small density
- of changes, the algorithm is linear in the file size. */
-
- if (c > 200 && big_snake && heuristic)
- {
- int best = 0;
- int bestpos = -1;
-
- for (d = fmax; d >= fmin; d -= 2)
- {
- int dd = d - fmid;
- if ((fd[fdiagoff + d] - xoff)*2 - dd > 12 * (c + (dd > 0 ? dd : -dd)))
- {
- if (fd[fdiagoff + d] * 2 - dd > best
- && fd[fdiagoff + d] - xoff > 20
- && fd[fdiagoff + d] - d - yoff > 20)
- {
- int k;
- int x = fd[fdiagoff + d];
-
- /* We have a good enough best diagonal;
- now insist that it end with a significant snake. */
- for (k = 1; k <= 20; k++)
- if (xvec[x - k] != yvec[x - d - k])
- break;
-
- if (k == 21)
- {
- best = fd[fdiagoff + d] * 2 - dd;
- bestpos = d;
- }
- }
- }
- }
- if (best > 0)
- {
- cost = 2 * c - 1;
- return bestpos;
- }
-
- best = 0;
- for (d = bmax; d >= bmin; d -= 2)
- {
- int dd = d - bmid;
- if ((xlim - bd[bdiagoff + d])*2 + dd > 12 * (c + (dd > 0 ? dd : -dd)))
- {
- if ((xlim - bd[bdiagoff + d]) * 2 + dd > best
- && xlim - bd[bdiagoff + d] > 20
- && ylim - (bd[bdiagoff + d] - d) > 20)
- {
- /* We have a good enough best diagonal;
- now insist that it end with a significant snake. */
- int k;
- int x = bd[bdiagoff + d];
-
- for (k = 0; k < 20; k++)
- if (xvec[x + k] != yvec[x - d + k])
- break;
- if (k == 20)
- {
- best = (xlim - bd[bdiagoff + d]) * 2 + dd;
- bestpos = d;
- }
- }
- }
- }
- if (best > 0)
- {
- cost = 2 * c - 1;
- return bestpos;
- }
- }
- }
- }
-
- /** Compare in detail contiguous subsequences of the two files
- which are known, as a whole, to match each other.
-
- The results are recorded in the IndexedSeqs filevec[N].changed_flag, by
- storing a 1 in the element for each line that is an insertion or deletion.
-
- The subsequence of file 0 is [XOFF, XLIM) and likewise for file 1.
-
- Note that XLIM, YLIM are exclusive bounds.
- All line numbers are origin-0 and discarded lines are not counted. */
-
- private void compareseq (int xoff, int xlim, int yoff, int ylim) {
- /* Slide down the bottom initial diagonal. */
- while (xoff < xlim && yoff < ylim && xvec[xoff] == yvec[yoff]) {
- ++xoff; ++yoff;
- }
- /* Slide up the top initial diagonal. */
- while (xlim > xoff && ylim > yoff && xvec[xlim - 1] == yvec[ylim - 1]) {
- --xlim; --ylim;
- }
-
- /* Handle simple cases. */
- if (xoff == xlim)
- while (yoff < ylim)
- filevec[1].changed_flag[1+filevec[1].realindexes[yoff++]] = true;
- else if (yoff == ylim)
- while (xoff < xlim)
- filevec[0].changed_flag[1+filevec[0].realindexes[xoff++]] = true;
- else
- {
- /* Find a point of correspondence in the middle of the files. */
-
- int d = diag (xoff, xlim, yoff, ylim);
- int c = cost;
- int f = fdiag[fdiagoff + d];
- int b = bdiag[bdiagoff + d];
-
- if (c == 1)
- {
- /* This should be impossible, because it implies that
- one of the two subsequences is empty,
- and that case was handled above without calling `diag'.
- Let's verify that this is true. */
- throw new IllegalArgumentException("Empty subsequence");
- }
- else
- {
- /* Use that point to split this problem into two subproblems. */
- compareseq (xoff, b, yoff, b - d);
- /* This used to use f instead of b,
- but that is incorrect!
- It is not necessarily the case that diagonal d
- has a snake from b to f. */
- compareseq (b, xlim, b - d, ylim);
- }
- }
- }
-
- /** Discard lines from one file that have no matches in the other file.
- */
-
- private void discard_confusing_lines() {
- filevec[0].discard_confusing_lines(filevec[1]);
- filevec[1].discard_confusing_lines(filevec[0]);
- }
-
- private boolean inhibit = false;
-
- /** Adjust inserts/deletes of blank lines to join changes
- as much as possible.
- */
-
- private void shift_boundaries() {
- if (inhibit)
- return;
- filevec[0].shift_boundaries(filevec[1]);
- filevec[1].shift_boundaries(filevec[0]);
- }
-
- public interface ScriptBuilder {
- /** Scan the tables of which lines are inserted and deleted,
- producing an edit script.
- @param changed0 true for lines in first file which do not match 2nd
- @param len0 number of lines in first file
- @param changed1 true for lines in 2nd file which do not match 1st
- @param len1 number of lines in 2nd file
- @return a linked list of changes - or null
- */
- public change build_script(
- boolean[] changed0,int len0,
- boolean[] changed1,int len1
- );
- }
-
- /** Scan the tables of which lines are inserted and deleted,
- producing an edit script in reverse order. */
-
- static class ReverseScript implements ScriptBuilder {
- public change build_script(
- final boolean[] changed0,int len0,
- final boolean[] changed1,int len1)
- {
- change script = null;
- int i0 = 0, i1 = 0;
- while (i0 < len0 || i1 < len1) {
- if (changed0[1+i0] || changed1[1+i1]) {
- int line0 = i0, line1 = i1;
-
- /* Find # lines changed here in each file. */
- while (changed0[1+i0]) ++i0;
- while (changed1[1+i1]) ++i1;
-
- /* Record this change. */
- script = new change(line0, line1, i0 - line0, i1 - line1, script);
- }
-
- /* We have reached lines in the two files that match each other. */
- i0++; i1++;
- }
-
- return script;
- }
- }
-
- static class ForwardScript implements ScriptBuilder {
- /** Scan the tables of which lines are inserted and deleted,
- producing an edit script in forward order. */
- public change build_script(
- final boolean[] changed0,int len0,
- final boolean[] changed1,int len1)
- {
- change script = null;
- int i0 = len0, i1 = len1;
-
- while (i0 >= 0 || i1 >= 0)
- {
- if (changed0[i0] || changed1[i1])
- {
- int line0 = i0, line1 = i1;
-
- /* Find # lines changed here in each file. */
- while (changed0[i0]) --i0;
- while (changed1[i1]) --i1;
-
- /* Record this change. */
- script = new change(i0, i1, line0 - i0, line1 - i1, script);
- }
-
- /* We have reached lines in the two files that match each other. */
- i0--; i1--;
- }
-
- return script;
- }
- }
-
- /** Standard ScriptBuilders. */
- public final static ScriptBuilder
- forwardScript = new ForwardScript(),
- reverseScript = new ReverseScript();
-
- /* Report the differences of two files. DEPTH is the current directory
- depth. */
- public final change diff_2(final boolean reverse) {
- return diff(reverse ? reverseScript : forwardScript);
- }
-
- /** Get the results of comparison as an edit script. The script
- is described by a list of changes. The standard ScriptBuilder
- implementations provide for forward and reverse edit scripts.
- Alternate implementations could, for instance, list common elements
- instead of differences.
- @param bld an object to build the script from change flags
- @return the head of a list of changes
- */
- public change diff(final ScriptBuilder bld) {
-
- /* Some lines are obviously insertions or deletions
- because they don't match anything. Detect them now,
- and avoid even thinking about them in the main comparison algorithm. */
-
- discard_confusing_lines ();
-
- /* Now do the main comparison algorithm, considering just the
- undiscarded lines. */
-
- xvec = filevec[0].undiscarded;
- yvec = filevec[1].undiscarded;
-
- int diags =
- filevec[0].nondiscarded_lines + filevec[1].nondiscarded_lines + 3;
- fdiag = new int[diags];
- fdiagoff = filevec[1].nondiscarded_lines + 1;
- bdiag = new int[diags];
- bdiagoff = filevec[1].nondiscarded_lines + 1;
-
- compareseq (0, filevec[0].nondiscarded_lines,
- 0, filevec[1].nondiscarded_lines);
- fdiag = null;
- bdiag = null;
-
- /* Modify the results slightly to make them prettier
- in cases where that can validly be done. */
-
- shift_boundaries ();
-
- /* Get the results of comparison in the form of a chain
- of `struct change's -- an edit script. */
- return bld.build_script(
- filevec[0].changed_flag,
- filevec[0].buffered_lines,
- filevec[1].changed_flag,
- filevec[1].buffered_lines
- );
-
- }
-
- /** The result of comparison is an "edit script": a chain of change objects.
- Each change represents one place where some lines are deleted
- and some are inserted.
-
- LINE0 and LINE1 are the first affected lines in the two files (origin 0).
- DELETED is the number of lines deleted here from file 0.
- INSERTED is the number of lines inserted here in file 1.
-
- If DELETED is 0 then LINE0 is the number of the line before
- which the insertion was done; vice versa for INSERTED and LINE1. */
-
- public static class change {
- /** Previous or next edit command. */
- public change link;
- /** # lines of file 1 changed here. */
- public final int inserted;
- /** # lines of file 0 changed here. */
- public final int deleted;
- /** Line number of 1st deleted line. */
- public final int line0;
- /** Line number of 1st inserted line. */
- public final int line1;
-
- /** Cons an additional entry onto the front of an edit script OLD.
- LINE0 and LINE1 are the first affected lines in the two files (origin 0).
- DELETED is the number of lines deleted here from file 0.
- INSERTED is the number of lines inserted here in file 1.
-
- If DELETED is 0 then LINE0 is the number of the line before
- which the insertion was done; vice versa for INSERTED and LINE1. */
- public change(int line0, int line1, int deleted, int inserted, change old) {
- this.line0 = line0;
- this.line1 = line1;
- this.inserted = inserted;
- this.deleted = deleted;
- this.link = old;
- //System.err.println(line0+","+line1+","+inserted+","+deleted);
- }
- }
-
- /** Data on one input file being compared.
- */
-
- class file_data {
-
- /** Allocate changed array for the results of comparison. */
- void clear() {
- /* Allocate a flag for each line of each file, saying whether that line
- is an insertion or deletion.
- Allocate an extra element, always zero, at each end of each IndexedSeq.
- */
- changed_flag = new boolean[buffered_lines + 2];
- }
-
- /** Return equiv_count[I] as the number of lines in this file
- that fall in equivalence class I.
- @return the array of equivalence class counts.
- */
- int[] equivCount() {
- int[] equiv_count = new int[equiv_max];
- for (int i = 0; i < buffered_lines; ++i)
- ++equiv_count[equivs[i]];
- return equiv_count;
- }
-
- /** Discard lines that have no matches in another file.
-
- A line which is discarded will not be considered by the actual
- comparison algorithm; it will be as if that line were not in the file.
- The file's `realindexes' table maps virtual line numbers
- (which don't count the discarded lines) into real line numbers;
- this is how the actual comparison algorithm produces results
- that are comprehensible when the discarded lines are counted.
-<p>
- When we discard a line, we also mark it as a deletion or insertion
- so that it will be printed in the output.
- @param f the other file
- */
- void discard_confusing_lines(file_data f) {
- clear();
- /* Set up table of which lines are going to be discarded. */
- final byte[] discarded = discardable(f.equivCount());
-
- /* Don't really discard the provisional lines except when they occur
- in a run of discardables, with nonprovisionals at the beginning
- and end. */
- filterDiscards(discarded);
-
- /* Actually discard the lines. */
- discard(discarded);
- }
-
- /** Mark to be discarded each line that matches no line of another file.
- If a line matches many lines, mark it as provisionally discardable.
- @see equivCount()
- @param counts The count of each equivalence number for the other file.
- @return 0=nondiscardable, 1=discardable or 2=provisionally discardable
- for each line
- */
-
- private byte[] discardable(final int[] counts) {
- final int end = buffered_lines;
- final byte[] discards = new byte[end];
- final int[] equivs = this.equivs;
- int many = 5;
- int tem = end / 64;
-
- /* Multiply MANY by approximate square root of number of lines.
- That is the threshold for provisionally discardable lines. */
- while ((tem = tem >> 2) > 0)
- many *= 2;
-
- for (int i = 0; i < end; i++)
- {
- int nmatch;
- if (equivs[i] == 0)
- continue;
- nmatch = counts[equivs[i]];
- if (nmatch == 0)
- discards[i] = 1;
- else if (nmatch > many)
- discards[i] = 2;
- }
- return discards;
- }
-
- /** Don't really discard the provisional lines except when they occur
- in a run of discardables, with nonprovisionals at the beginning
- and end. */
-
- private void filterDiscards(final byte[] discards) {
- final int end = buffered_lines;
-
- for (int i = 0; i < end; i++)
- {
- /* Cancel provisional discards not in middle of run of discards. */
- if (discards[i] == 2)
- discards[i] = 0;
- else if (discards[i] != 0)
- {
- /* We have found a nonprovisional discard. */
- int j;
- int length;
- int provisional = 0;
-
- /* Find end of this run of discardable lines.
- Count how many are provisionally discardable. */
- for (j = i; j < end; j++)
- {
- if (discards[j] == 0)
- break;
- if (discards[j] == 2)
- ++provisional;
- }
-
- /* Cancel provisional discards at end, and shrink the run. */
- while (j > i && discards[j - 1] == 2) {
- discards[--j] = 0; --provisional;
- }
-
- /* Now we have the length of a run of discardable lines
- whose first and last are not provisional. */
- length = j - i;
-
- /* If 1/4 of the lines in the run are provisional,
- cancel discarding of all provisional lines in the run. */
- if (provisional * 4 > length)
- {
- while (j > i)
- if (discards[--j] == 2)
- discards[j] = 0;
- }
- else
- {
- int consec;
- int minimum = 1;
- int tem = length / 4;
-
- /* MINIMUM is approximate square root of LENGTH/4.
- A subrun of two or more provisionals can stand
- when LENGTH is at least 16.
- A subrun of 4 or more can stand when LENGTH >= 64. */
- while ((tem = tem >> 2) > 0)
- minimum *= 2;
- minimum++;
-
- /* Cancel any subrun of MINIMUM or more provisionals
- within the larger run. */
- for (j = 0, consec = 0; j < length; j++)
- if (discards[i + j] != 2)
- consec = 0;
- else if (minimum == ++consec)
- /* Back up to start of subrun, to cancel it all. */
- j -= consec;
- else if (minimum < consec)
- discards[i + j] = 0;
-
- /* Scan from beginning of run
- until we find 3 or more nonprovisionals in a row
- or until the first nonprovisional at least 8 lines in.
- Until that point, cancel any provisionals. */
- for (j = 0, consec = 0; j < length; j++)
- {
- if (j >= 8 && discards[i + j] == 1)
- break;
- if (discards[i + j] == 2) {
- consec = 0; discards[i + j] = 0;
- }
- else if (discards[i + j] == 0)
- consec = 0;
- else
- consec++;
- if (consec == 3)
- break;
- }
-
- /* I advances to the last line of the run. */
- i += length - 1;
-
- /* Same thing, from end. */
- for (j = 0, consec = 0; j < length; j++)
- {
- if (j >= 8 && discards[i - j] == 1)
- break;
- if (discards[i - j] == 2) {
- consec = 0; discards[i - j] = 0;
- }
- else if (discards[i - j] == 0)
- consec = 0;
- else
- consec++;
- if (consec == 3)
- break;
- }
- }
- }
- }
- }
-
- /** Actually discard the lines.
- @param discards flags lines to be discarded
- */
- private void discard(final byte[] discards) {
- final int end = buffered_lines;
- int j = 0;
- for (int i = 0; i < end; ++i)
- if (no_discards || discards[i] == 0)
- {
- undiscarded[j] = equivs[i];
- realindexes[j++] = i;
- }
- else
- changed_flag[1+i] = true;
- nondiscarded_lines = j;
- }
-
- file_data(Object[] data, Hashtable<Object, Integer> h) {
- buffered_lines = data.length;
-
- equivs = new int[buffered_lines];
- undiscarded = new int[buffered_lines];
- realindexes = new int[buffered_lines];
-
- for (int i = 0; i < data.length; ++i) {
- Integer ir = h.get(data[i]);
- if (ir == null)
- h.put(data[i], new Integer(equivs[i] = equiv_max++));
- else
- equivs[i] = ir.intValue();
- }
- }
-
- /** Adjust inserts/deletes of blank lines to join changes
- as much as possible.
-
- We do something when a run of changed lines include a blank
- line at one end and have an excluded blank line at the other.
- We are free to choose which blank line is included.
- `compareseq' always chooses the one at the beginning,
- but usually it is cleaner to consider the following blank line
- to be the "change". The only exception is if the preceding blank line
- would join this change to other changes.
- @param f the file being compared against
- */
-
- void shift_boundaries(file_data f) {
- final boolean[] changed = changed_flag;
- final boolean[] other_changed = f.changed_flag;
- int i = 0;
- int j = 0;
- int i_end = buffered_lines;
- int preceding = -1;
- int other_preceding = -1;
-
- for (;;)
- {
- int start, end, other_start;
-
- /* Scan forwards to find beginning of another run of changes.
- Also keep track of the corresponding point in the other file. */
-
- while (i < i_end && !changed[1+i])
- {
- while (other_changed[1+j++])
- /* Non-corresponding lines in the other file
- will count as the preceding batch of changes. */
- other_preceding = j;
- i++;
- }
-
- if (i == i_end)
- break;
-
- start = i;
- other_start = j;
-
- for (;;)
- {
- /* Now find the end of this run of changes. */
-
- while (i < i_end && changed[1+i]) i++;
- end = i;
-
- /* If the first changed line matches the following unchanged one,
- and this run does not follow right after a previous run,
- and there are no lines deleted from the other file here,
- then classify the first changed line as unchanged
- and the following line as changed in its place. */
-
- /* You might ask, how could this run follow right after another?
- Only because the previous run was shifted here. */
-
- if (end != i_end
- && equivs[start] == equivs[end]
- && !other_changed[1+j]
- && end != i_end
- && !((preceding >= 0 && start == preceding)
- || (other_preceding >= 0
- && other_start == other_preceding)))
- {
- changed[1+end++] = true;
- changed[1+start++] = false;
- ++i;
- /* Since one line-that-matches is now before this run
- instead of after, we must advance in the other file
- to keep in synch. */
- ++j;
- }
- else
- break;
- }
-
- preceding = i;
- other_preceding = j;
- }
- }
-
- /** Number of elements (lines) in this file. */
- final int buffered_lines;
-
- /** IndexedSeq, indexed by line number, containing an equivalence code for
- each line. It is this IndexedSeq that is actually compared with that
- of another file to generate differences. */
- private final int[] equivs;
-
- /** IndexedSeq, like the previous one except that
- the elements for discarded lines have been squeezed out. */
- final int[] undiscarded;
-
- /** IndexedSeq mapping virtual line numbers (not counting discarded lines)
- to real ones (counting those lines). Both are origin-0. */
- final int[] realindexes;
-
- /** Total number of nondiscarded lines. */
- int nondiscarded_lines;
-
- /** Array, indexed by real origin-1 line number,
- containing true for a line that is an insertion or a deletion.
- The results of comparison are stored here. */
- boolean[] changed_flag;
-
- }
-}
diff --git a/src/partest/scala/tools/partest/nest/DiffPrint.java b/src/partest/scala/tools/partest/nest/DiffPrint.java
deleted file mode 100644
index 31f9a1b..0000000
--- a/src/partest/scala/tools/partest/nest/DiffPrint.java
+++ /dev/null
@@ -1,606 +0,0 @@
-
-package scala.tools.partest.nest;
-
-import java.io.*;
-import java.util.Vector;
-import java.util.Date;
-//import com.objectspace.jgl.predicates.UnaryPredicate;
-
-interface UnaryPredicate {
- boolean execute(Object obj);
-}
-
-/** A simple framework for printing change lists produced by <code>Diff</code>.
- @see bmsi.util.Diff
- @author Stuart D. Gathman
- Copyright (C) 2000 Business Management Systems, Inc.
-<p>
- This program is free software; you can redistribute it and/or modify
- it under the terms of the GNU General Public License as published by
- the Free Software Foundation; either version 1, or (at your option)
- any later version.
-<p>
- This program is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
- GNU General Public License for more details.
-<p>
- You should have received a copy of the GNU General Public License
- along with this program; if not, write to the Free Software
- Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
- */
-public class DiffPrint {
- /** A Base class for printing edit scripts produced by Diff.
- This class divides the change list into "hunks", and calls
- <code>print_hunk</code> for each hunk. Various utility methods
- are provided as well.
- */
- public static abstract class Base {
- protected Base(Object[] a,Object[] b, Writer w) {
- outfile = new PrintWriter(w);
- file0 = a;
- file1 = b;
- }
- /** Set to ignore certain kinds of lines when printing
- an edit script. For example, ignoring blank lines or comments.
- */
- protected UnaryPredicate ignore = null;
-
- /** Set to the lines of the files being compared.
- */
- protected Object[] file0, file1;
-
- /** Divide SCRIPT into pieces by calling HUNKFUN and
- print each piece with PRINTFUN.
- Both functions take one arg, an edit script.
-
- PRINTFUN takes a subscript which belongs together (with a null
- link at the end) and prints it. */
- public void print_script(Diff.change script) {
- Diff.change next = script;
-
- while (next != null)
- {
- Diff.change t, end;
-
- /* Find a set of changes that belong together. */
- t = next;
- end = hunkfun(next);
-
- /* Disconnect them from the rest of the changes,
- making them a hunk, and remember the rest for next iteration. */
- next = end.link;
- end.link = null;
- //if (DEBUG)
- // debug_script(t);
-
- /* Print this hunk. */
- print_hunk(t);
-
- /* Reconnect the script so it will all be freed properly. */
- end.link = next;
- }
- outfile.flush();
- }
-
- /** Called with the tail of the script
- and returns the last link that belongs together with the start
- of the tail. */
-
- protected Diff.change hunkfun(Diff.change hunk) {
- return hunk;
- }
-
- protected int first0, last0, first1, last1, deletes, inserts;
- protected PrintWriter outfile;
-
- /** Look at a hunk of edit script and report the range of lines in each file
- that it applies to. HUNK is the start of the hunk, which is a chain
- of `struct change'. The first and last line numbers of file 0 are stored
- in *FIRST0 and *LAST0, and likewise for file 1 in *FIRST1 and *LAST1.
- Note that these are internal line numbers that count from 0.
-
- If no lines from file 0 are deleted, then FIRST0 is LAST0+1.
-
- Also set *DELETES nonzero if any lines of file 0 are deleted
- and set *INSERTS nonzero if any lines of file 1 are inserted.
- If only ignorable lines are inserted or deleted, both are
- set to 0. */
-
- protected void analyze_hunk(Diff.change hunk) {
- int f0, l0 = 0, f1, l1 = 0, show_from = 0, show_to = 0;
- int i;
- Diff.change next;
- boolean nontrivial = (ignore == null);
-
- show_from = show_to = 0;
-
- f0 = hunk.line0;
- f1 = hunk.line1;
-
- for (next = hunk; next != null; next = next.link)
- {
- l0 = next.line0 + next.deleted - 1;
- l1 = next.line1 + next.inserted - 1;
- show_from += next.deleted;
- show_to += next.inserted;
- for (i = next.line0; i <= l0 && ! nontrivial; i++)
- if (!ignore.execute(file0[i]))
- nontrivial = true;
- for (i = next.line1; i <= l1 && ! nontrivial; i++)
- if (!ignore.execute(file1[i]))
- nontrivial = true;
- }
-
- first0 = f0;
- last0 = l0;
- first1 = f1;
- last1 = l1;
-
- /* If all inserted or deleted lines are ignorable,
- tell the caller to ignore this hunk. */
-
- if (!nontrivial)
- show_from = show_to = 0;
-
- deletes = show_from;
- inserts = show_to;
- }
-
- /** Print the script header which identifies the files compared. */
- protected void print_header(String filea, String fileb) { }
-
- protected abstract void print_hunk(Diff.change hunk);
-
- protected void print_1_line(String pre,Object linbuf) {
- outfile.println(pre + linbuf.toString());
- }
-
- /** Print a pair of line numbers with SEPCHAR, translated for file FILE.
- If the two numbers are identical, print just one number.
-
- Args A and B are internal line numbers.
- We print the translated (real) line numbers. */
-
- protected void print_number_range (char sepchar, int a, int b) {
- /* Note: we can have B < A in the case of a range of no lines.
- In this case, we should print the line number before the range,
- which is B. */
- if (++b > ++a)
- outfile.print("" + a + sepchar + b);
- else
- outfile.print(b);
- }
-
- public static char change_letter(int inserts, int deletes) {
- if (inserts == 0)
- return 'd';
- else if (deletes == 0)
- return 'a';
- else
- return 'c';
- }
- }
-
- /** Print a change list in the standard diff format.
- */
- public static class NormalPrint extends Base {
-
- public NormalPrint(Object[] a,Object[] b, Writer w) {
- super(a,b,w);
- }
-
- /** Print a hunk of a normal diff.
- This is a contiguous portion of a complete edit script,
- describing changes in consecutive lines. */
-
- protected void print_hunk (Diff.change hunk) {
-
- /* Determine range of line numbers involved in each file. */
- analyze_hunk(hunk);
- if (deletes == 0 && inserts == 0)
- return;
-
- /* Print out the line number header for this hunk */
- print_number_range (',', first0, last0);
- outfile.print(change_letter(inserts, deletes));
- print_number_range (',', first1, last1);
- outfile.println();
-
- /* Print the lines that the first file has. */
- if (deletes != 0)
- for (int i = first0; i <= last0; i++)
- print_1_line ("< ", file0[i]);
-
- if (inserts != 0 && deletes != 0)
- outfile.println("---");
-
- /* Print the lines that the second file has. */
- if (inserts != 0)
- for (int i = first1; i <= last1; i++)
- print_1_line ("> ", file1[i]);
- }
- }
-
- /** Prints an edit script in a format suitable for input to <code>ed</code>.
- The edit script must be generated with the reverse option to
- be useful as actual <code>ed</code> input.
- */
- public static class EdPrint extends Base {
-
- public EdPrint(Object[] a,Object[] b, Writer w) {
- super(a,b,w);
- }
-
- /** Print a hunk of an ed diff */
- protected void print_hunk(Diff.change hunk) {
-
- /* Determine range of line numbers involved in each file. */
- analyze_hunk (hunk);
- if (deletes == 0 && inserts == 0)
- return;
-
- /* Print out the line number header for this hunk */
- print_number_range (',', first0, last0);
- outfile.println(change_letter(inserts, deletes));
-
- /* Print new/changed lines from second file, if needed */
- if (inserts != 0)
- {
- boolean inserting = true;
- for (int i = first1; i <= last1; i++)
- {
- /* Resume the insert, if we stopped. */
- if (! inserting)
- outfile.println(i - first1 + first0 + "a");
- inserting = true;
-
- /* If the file's line is just a dot, it would confuse `ed'.
- So output it with a double dot, and set the flag LEADING_DOT
- so that we will output another ed-command later
- to change the double dot into a single dot. */
-
- if (".".equals(file1[i]))
- {
- outfile.println("..");
- outfile.println(".");
- /* Now change that double dot to the desired single dot. */
- outfile.println(i - first1 + first0 + 1 + "s/^\\.\\././");
- inserting = false;
- }
- else
- /* Line is not `.', so output it unmodified. */
- print_1_line ("", file1[i]);
- }
-
- /* End insert mode, if we are still in it. */
- if (inserting)
- outfile.println(".");
- }
- }
- }
-
- /** Prints an edit script in context diff format. This and its
- 'unified' variation is used for source code patches.
- */
- public static class ContextPrint extends Base {
-
- protected int context = 3;
-
- public ContextPrint(Object[] a,Object[] b, Writer w) {
- super(a,b,w);
- }
-
- protected void print_context_label (String mark, File inf, String label) {
- if (label != null)
- outfile.println(mark + ' ' + label);
- else if (inf.lastModified() > 0)
- // FIXME: use DateFormat to get precise format needed.
- outfile.println(
- mark + ' ' + inf.getPath() + '\t' + new Date(inf.lastModified())
- );
- else
- /* Don't pretend that standard input is ancient. */
- outfile.println(mark + ' ' + inf.getPath());
- }
-
- public void print_header(String filea,String fileb) {
- print_context_label ("***", new File(filea), filea);
- print_context_label ("---", new File(fileb), fileb);
- }
-
- /** If function_regexp defined, search for start of function. */
- private String find_function(Object[] lines, int start) {
- return null;
- }
-
- protected void print_function(Object[] file,int start) {
- String function = find_function (file0, first0);
- if (function != null) {
- outfile.print(" ");
- outfile.print(
- (function.length() < 40) ? function : function.substring(0,40)
- );
- }
- }
-
- protected void print_hunk(Diff.change hunk) {
-
- /* Determine range of line numbers involved in each file. */
-
- analyze_hunk (hunk);
-
- if (deletes == 0 && inserts == 0)
- return;
-
- /* Include a context's width before and after. */
-
- first0 = Math.max(first0 - context, 0);
- first1 = Math.max(first1 - context, 0);
- last0 = Math.min(last0 + context, file0.length - 1);
- last1 = Math.min(last1 + context, file1.length - 1);
-
-
- outfile.print("***************");
-
- /* If we looked for and found a function this is part of,
- include its name in the header of the diff section. */
- print_function (file0, first0);
-
- outfile.println();
- outfile.print("*** ");
- print_number_range (',', first0, last0);
- outfile.println(" ****");
-
- if (deletes != 0) {
- Diff.change next = hunk;
-
- for (int i = first0; i <= last0; i++) {
- /* Skip past changes that apply (in file 0)
- only to lines before line I. */
-
- while (next != null && next.line0 + next.deleted <= i)
- next = next.link;
-
- /* Compute the marking for line I. */
-
- String prefix = " ";
- if (next != null && next.line0 <= i)
- /* The change NEXT covers this line.
- If lines were inserted here in file 1, this is "changed".
- Otherwise it is "deleted". */
- prefix = (next.inserted > 0) ? "!" : "-";
-
- print_1_line (prefix, file0[i]);
- }
- }
-
- outfile.print("--- ");
- print_number_range (',', first1, last1);
- outfile.println(" ----");
-
- if (inserts != 0) {
- Diff.change next = hunk;
-
- for (int i = first1; i <= last1; i++) {
- /* Skip past changes that apply (in file 1)
- only to lines before line I. */
-
- while (next != null && next.line1 + next.inserted <= i)
- next = next.link;
-
- /* Compute the marking for line I. */
-
- String prefix = " ";
- if (next != null && next.line1 <= i)
- /* The change NEXT covers this line.
- If lines were deleted here in file 0, this is "changed".
- Otherwise it is "inserted". */
- prefix = (next.deleted > 0) ? "!" : "+";
-
- print_1_line (prefix, file1[i]);
- }
- }
- }
- }
-
- /** Prints an edit script in context diff format. This and its
- 'unified' variation is used for source code patches.
- */
- public static class UnifiedPrint extends ContextPrint {
-
- public UnifiedPrint(Object[] a,Object[] b, Writer w) {
- super(a,b,w);
- }
-
- public void print_header(String filea,String fileb) {
- print_context_label ("---", new File(filea), filea);
- print_context_label ("+++", new File(fileb), fileb);
- }
-
- private void print_number_range (int a, int b) {
- //translate_range (file, a, b, &trans_a, &trans_b);
-
- /* Note: we can have B < A in the case of a range of no lines.
- In this case, we should print the line number before the range,
- which is B. */
- if (b < a)
- outfile.print(b + ",0");
- else
- super.print_number_range(',',a,b);
- }
-
- protected void print_hunk(Diff.change hunk) {
- /* Determine range of line numbers involved in each file. */
- analyze_hunk (hunk);
-
- if (deletes == 0 && inserts == 0)
- return;
-
- /* Include a context's width before and after. */
-
- first0 = Math.max(first0 - context, 0);
- first1 = Math.max(first1 - context, 0);
- last0 = Math.min(last0 + context, file0.length - 1);
- last1 = Math.min(last1 + context, file1.length - 1);
-
-
-
- outfile.print("@@ -");
- print_number_range (first0, last0);
- outfile.print(" +");
- print_number_range (first1, last1);
- outfile.print(" @@");
-
- /* If we looked for and found a function this is part of,
- include its name in the header of the diff section. */
- print_function(file0,first0);
-
- outfile.println();
-
- Diff.change next = hunk;
- int i = first0;
- int j = first1;
-
- while (i <= last0 || j <= last1) {
-
- /* If the line isn't a difference, output the context from file 0. */
-
- if (next == null || i < next.line0) {
- outfile.print(' ');
- print_1_line ("", file0[i++]);
- j++;
- }
- else {
- /* For each difference, first output the deleted part. */
-
- int k = next.deleted;
- while (k-- > 0) {
- outfile.print('-');
- print_1_line ("", file0[i++]);
- }
-
- /* Then output the inserted part. */
-
- k = next.inserted;
- while (k-- > 0) {
- outfile.print('+');
- print_1_line ("", file1[j++]);
- }
-
- /* We're done with this hunk, so on to the next! */
-
- next = next.link;
- }
- }
- }
- }
-
-
- /** Read a text file into an array of String. This provides basic diff
- functionality. A more advanced diff utility will use specialized
- objects to represent the text lines, with options to, for example,
- convert sequences of whitespace to a single space for comparison
- purposes.
- */
- static String[] slurp(String file) throws IOException {
- BufferedReader rdr = new BufferedReader(new FileReader(file));
- Vector<String> s = new Vector<String>();
- for (;;) {
- String line = rdr.readLine();
- if (line == null) break;
- s.addElement(line);
- }
- String[] a = new String[s.size()];
- s.copyInto(a);
- return a;
- }
-
- public static void main(String[] argv) throws IOException {
- String filea = argv[argv.length - 2];
- String fileb = argv[argv.length - 1];
- String[] a = slurp(filea);
- String[] b = slurp(fileb);
- Diff d = new Diff(a,b);
- char style = 'n';
- for (int i = 0; i < argv.length - 2; ++i) {
- String f = argv[i];
- if (f.startsWith("-")) {
- for (int j = 1; j < f.length(); ++j) {
- switch (f.charAt(j)) {
- case 'e': // Ed style
- style = 'e'; break;
- case 'c': // Context diff
- style = 'c'; break;
- case 'u':
- style = 'u'; break;
- }
- }
- }
- }
- boolean reverse = style == 'e';
- Diff.change script = d.diff_2(reverse);
- if (script == null)
- System.err.println("No differences");
- else {
- Base p;
- Writer w = new OutputStreamWriter(System.out);
- switch (style) {
- case 'e':
- p = new EdPrint(a,b,w); break;
- case 'c':
- p = new ContextPrint(a,b,w); break;
- case 'u':
- p = new UnifiedPrint(a,b,w); break;
- default:
- p = new NormalPrint(a,b,w);
- }
- p.print_header(filea,fileb);
- p.print_script(script);
- }
- }
-
- public static void doDiff(String[] argv, Writer w) throws IOException {
- String filea = argv[argv.length - 2];
- String fileb = argv[argv.length - 1];
- String[] a = slurp(filea);
- String[] b = slurp(fileb);
- Diff d = new Diff(a,b);
- char style = 'n';
- for (int i = 0; i < argv.length - 2; ++i) {
- String f = argv[i];
- if (f.startsWith("-")) {
- for (int j = 1; j < f.length(); ++j) {
- switch (f.charAt(j)) {
- case 'e': // Ed style
- style = 'e'; break;
- case 'c': // Context diff
- style = 'c'; break;
- case 'u':
- style = 'u'; break;
- }
- }
- }
- }
- boolean reverse = style == 'e';
- Diff.change script = d.diff_2(reverse);
- if (script == null)
- w.write("No differences\n");
- else {
- Base p;
- switch (style) {
- case 'e':
- p = new EdPrint(a,b,w); break;
- case 'c':
- p = new ContextPrint(a,b,w); break;
- case 'u':
- p = new UnifiedPrint(a,b,w); break;
- default:
- p = new NormalPrint(a,b,w);
- }
- p.print_header(filea,fileb);
- p.print_script(script);
- }
- }
-
-}
diff --git a/src/partest/scala/tools/partest/nest/DirectRunner.scala b/src/partest/scala/tools/partest/nest/DirectRunner.scala
index 5f9cfd5..32ef8b4 100644
--- a/src/partest/scala/tools/partest/nest/DirectRunner.scala
+++ b/src/partest/scala/tools/partest/nest/DirectRunner.scala
@@ -1,5 +1,5 @@
/* NEST (New Scala Test)
- * Copyright 2007-2011 LAMP/EPFL
+ * Copyright 2007-2013 LAMP/EPFL
* @author Philipp Haller
*/
@@ -8,24 +8,22 @@
package scala.tools.partest
package nest
-import java.io.{ File }
-import java.util.StringTokenizer
-import scala.util.Properties.{ setProp }
+import java.io.File
+import scala.util.Properties.setProp
import scala.tools.nsc.util.ScalaClassLoader
import scala.tools.nsc.io.Path
import scala.collection.{ mutable, immutable }
-import scala.actors.Actor._
-import scala.actors.TIMEOUT
+import java.util.concurrent._
+import scala.collection.convert.decorateAll._
case class TestRunParams(val scalaCheckParentClassLoader: ScalaClassLoader)
trait DirectRunner {
-
def fileManager: FileManager
- import PartestDefaults.numActors
+ import PartestDefaults.numThreads
- def denotesTestFile(arg: String) = Path(arg).hasExtension("scala", "res")
+ def denotesTestFile(arg: String) = Path(arg).hasExtension("scala", "res", "xml")
def denotesTestDir(arg: String) = Path(arg).ifDirectory(_.files.nonEmpty) exists (x => x)
def denotesTestPath(arg: String) = denotesTestDir(arg) || denotesTestFile(arg)
@@ -38,44 +36,40 @@ trait DirectRunner {
false
})
}
+ def runTestsForFiles(_kindFiles: List[File], kind: String): immutable.Map[String, TestState] = {
+ System.setProperty("line.separator", "\n")
- def setProperties() {
- if (isPartestDebug)
- scala.actors.Debug.level = 3
-
- if (PartestDefaults.poolSize.isEmpty) {
- scala.actors.Debug.info("actors.corePoolSize not defined")
- setProp("actors.corePoolSize", "16")
- }
- }
-
- def runTestsForFiles(_kindFiles: List[File], kind: String): immutable.Map[String, Int] = {
- val kindFiles = onlyValidTestPaths(_kindFiles)
- val groupSize = (kindFiles.length / numActors) + 1
-
- val consFM = new ConsoleFileManager
- import consFM.{ latestCompFile, latestLibFile, latestPartestFile }
- val scalacheckURL = PathSettings.scalaCheck.toURL
+ // @partest maintainer: we cannot create a fresh file manager here
+ // since the FM must respect --buildpath and --classpath from the command line
+ // for example, see how it's done in ReflectiveRunner
+ //val consFM = new ConsoleFileManager
+ //import consFM.{ latestCompFile, latestLibFile, latestPartestFile }
+ val latestCompFile = new File(fileManager.LATEST_COMP)
+ val latestReflectFile = new File(fileManager.LATEST_REFLECT)
+ val latestLibFile = new File(fileManager.LATEST_LIB)
+ val latestPartestFile = new File(fileManager.LATEST_PARTEST)
+ val latestActorsFile = new File(fileManager.LATEST_ACTORS)
+ val scalacheckURL = PathSettings.scalaCheck.toURL
val scalaCheckParentClassLoader = ScalaClassLoader.fromURLs(
- List(scalacheckURL, latestCompFile.toURI.toURL, latestLibFile.toURI.toURL, latestPartestFile.toURI.toURL)
+ scalacheckURL :: (List(latestCompFile, latestReflectFile, latestLibFile, latestActorsFile, latestPartestFile).map(_.toURI.toURL))
)
- Output.init
- val workers = kindFiles.grouped(groupSize).toList map { toTest =>
- val worker = new Worker(fileManager, TestRunParams(scalaCheckParentClassLoader))
- worker.start()
- worker ! RunTests(kind, toTest)
- worker
+ val kindFiles = onlyValidTestPaths(_kindFiles)
+ val pool = Executors.newFixedThreadPool(numThreads)
+ val manager = new RunnerManager(kind, fileManager, TestRunParams(scalaCheckParentClassLoader))
+ val futures = kindFiles map (f => (f, pool submit callable(manager runTest f))) toMap
+
+ pool.shutdown()
+ try if (!pool.awaitTermination(4, TimeUnit.HOURS))
+ NestUI.warning("Thread pool timeout elapsed before all tests were complete!")
+ catch { case t: InterruptedException =>
+ NestUI.warning("Thread pool was interrupted")
+ t.printStackTrace()
}
- workers map { w =>
- receiveWithin(3600 * 1000) {
- case Results(testResults) => testResults
- case TIMEOUT =>
- // add at least one failure
- NestUI.verbose("worker timed out; adding failed test")
- Map("worker timed out; adding failed test" -> 2)
- }
- } reduceLeft (_ ++ _)
+ for ((file, future) <- futures) yield {
+ val state = if (future.isCancelled) TestState.Timeout else future.get
+ (file.getAbsolutePath, state)
+ }
}
}
diff --git a/src/partest/scala/tools/partest/nest/FileManager.scala b/src/partest/scala/tools/partest/nest/FileManager.scala
index a9bf186..70fdb33 100644
--- a/src/partest/scala/tools/partest/nest/FileManager.scala
+++ b/src/partest/scala/tools/partest/nest/FileManager.scala
@@ -1,5 +1,5 @@
/* NEST (New Scala Test)
- * Copyright 2007-2011 LAMP/EPFL
+ * Copyright 2007-2013 LAMP/EPFL
* @author Philipp Haller
*/
@@ -13,26 +13,41 @@ import java.io.{File, FilenameFilter, IOException, StringWriter,
FileReader, PrintWriter, FileWriter}
import java.net.URI
import scala.tools.nsc.io.{ Path, Directory, File => SFile }
-import scala.collection.mutable.HashMap
-import sys.process._
+import scala.sys.process._
+import scala.collection.mutable
-trait FileManager {
+trait FileUtil {
/**
- * Compares two files using a Java implementation of the GNU diff
- * available at http://www.bmsi.com/java/#diff.
+ * Compares two files using difflib to produce a unified diff.
*
* @param f1 the first file to be compared
* @param f2 the second file to be compared
- * @return the text difference between the compared files
+ * @return the unified diff of the compared files or the empty string if they're equal
*/
def compareFiles(f1: File, f2: File): String = {
- val diffWriter = new StringWriter
- val args = Array(f1.getAbsolutePath(), f2.getAbsolutePath())
+ compareContents(io.Source.fromFile(f1).getLines.toSeq, io.Source.fromFile(f2).getLines.toSeq, f1.getName, f2.getName)
+ }
- DiffPrint.doDiff(args, diffWriter)
- val res = diffWriter.toString
- if (res startsWith "No") "" else res
+ /**
+ * Compares two lists of lines using difflib to produce a unified diff.
+ *
+ * @param origLines the first seq of lines to be compared
+ * @param newLines the second seq of lines to be compared
+ * @param origName file name to be used in unified diff for `origLines`
+ * @param newName file name to be used in unified diff for `newLines`
+ * @return the unified diff of the `origLines` and `newLines` or the empty string if they're equal
+ */
+ def compareContents(origLines: Seq[String], newLines: Seq[String], origName: String = "a", newName: String = "b"): String = {
+ import collection.JavaConverters._
+
+ val diff = difflib.DiffUtils.diff(origLines.asJava, newLines.asJava)
+ if (diff.getDeltas.isEmpty) ""
+ else difflib.DiffUtils.generateUnifiedDiff(origName, newName, origLines.asJava, diff, 1).asScala.mkString("\n")
}
+}
+object FileUtil extends FileUtil { }
+
+trait FileManager extends FileUtil {
def testRootDir: Directory
def testRootPath: String
@@ -42,13 +57,17 @@ trait FileManager {
var CLASSPATH: String
var LATEST_LIB: String
+ var LATEST_REFLECT: String
+ var LATEST_COMP: String
+ var LATEST_PARTEST: String
+ var LATEST_ACTORS: String
var showDiff = false
var updateCheck = false
var showLog = false
var failed = false
- var SCALAC_OPTS = PartestDefaults.scalacOpts
+ var SCALAC_OPTS = PartestDefaults.scalacOpts.split(' ').toSeq
var JAVA_OPTS = PartestDefaults.javaOpts
var timeout = PartestDefaults.timeout
// how can 15 minutes not be enough? What are you doing, run/lisp.scala?
@@ -56,7 +75,7 @@ trait FileManager {
var oneTestTimeout = 60 * 60 * 1000
/** Only when --debug is given. */
- lazy val testTimings = new HashMap[String, Long]
+ lazy val testTimings = new mutable.HashMap[String, Long]
def recordTestTiming(name: String, milliseconds: Long) =
synchronized { testTimings(name) = milliseconds }
def showTestTimings() {
diff --git a/src/partest/scala/tools/partest/nest/NestRunner.scala b/src/partest/scala/tools/partest/nest/NestRunner.scala
index 27c1ef6..e398d2e 100644
--- a/src/partest/scala/tools/partest/nest/NestRunner.scala
+++ b/src/partest/scala/tools/partest/nest/NestRunner.scala
@@ -1,5 +1,5 @@
/* NEST (New Scala Test)
- * Copyright 2007-2011 LAMP/EPFL
+ * Copyright 2007-2013 LAMP/EPFL
* @author Philipp Haller
*/
diff --git a/src/partest/scala/tools/partest/nest/NestUI.scala b/src/partest/scala/tools/partest/nest/NestUI.scala
index 5fd8345..70db6d0 100644
--- a/src/partest/scala/tools/partest/nest/NestUI.scala
+++ b/src/partest/scala/tools/partest/nest/NestUI.scala
@@ -1,5 +1,5 @@
/* NEST (New Scala Test)
- * Copyright 2007-2011 LAMP/EPFL
+ * Copyright 2007-2013 LAMP/EPFL
* @author Philipp Haller
*/
@@ -80,10 +80,12 @@ object NestUI {
println(" --scalacheck run ScalaCheck tests")
println(" --script run script runner tests")
println(" --shootout run shootout tests")
+ println(" --instrumented run instrumented tests")
+ println(" --presentation run presentation compiler tests")
println(" --grep <expr> run all tests whose source file contains <expr>")
println
println(" Other options:")
- println(" --pack pick compiler/library in build/pack, and run all tests")
+ println(" --pack pick compiler/reflect/library in build/pack, and run all tests")
println(" --show-log show log")
println(" --show-diff show diff between log and check file")
println(" --failed run only those tests that failed during the last run")
diff --git a/src/partest/scala/tools/partest/nest/PathSettings.scala b/src/partest/scala/tools/partest/nest/PathSettings.scala
index f5670ec..0ba3477 100644
--- a/src/partest/scala/tools/partest/nest/PathSettings.scala
+++ b/src/partest/scala/tools/partest/nest/PathSettings.scala
@@ -1,5 +1,5 @@
/* NEST (New Scala Test)
- * Copyright 2007-2011 LAMP/EPFL
+ * Copyright 2007-2013 LAMP/EPFL
*/
package scala.tools.partest
@@ -28,7 +28,7 @@ object PathSettings {
}
// Directory <root>/test/files
- lazy val srcDir = Directory(testRoot / srcDirName normalize)
+ lazy val srcDir = Directory(testRoot / srcDirName toCanonical)
// Directory <root>/test/files/lib
lazy val srcLibDir = Directory(srcDir / "lib")
@@ -40,6 +40,21 @@ object PathSettings {
sys.error("No instrumented.jar found in %s".format(srcSpecLibDir))
}
+ // Directory <root>/test/files/codelib
+ lazy val srcCodeLibDir = Directory(srcDir / "codelib")
+
+ lazy val srcCodeLib: File = (
+ findJar(srcCodeLibDir, "code")
+ orElse findJar(Directory(testRoot / "files" / "codelib"), "code") // work with --srcpath pending
+ getOrElse sys.error("No code.jar found in %s".format(srcCodeLibDir))
+ )
+
+ lazy val instrumentationAgentLib: File = {
+ findJar(buildPackLibDir.files, "scala-partest-javaagent") getOrElse {
+ sys.error("No partest-javaagent jar found in '%s' or '%s'".format(buildPackLibDir, srcLibDir))
+ }
+ }
+
// Directory <root>/build
lazy val buildDir: Directory = {
val bases = testRoot :: testRoot.parents
@@ -57,6 +72,9 @@ object PathSettings {
findJar(buildPackLibDir.files ++ srcLibDir.files, "scalacheck") getOrElse {
sys.error("No scalacheck jar found in '%s' or '%s'".format(buildPackLibDir, srcLibDir))
}
+
+ lazy val diffUtils: File =
+ findJar(buildPackLibDir.files, "diffutils") getOrElse sys.error(s"No diffutils.jar found in '$buildPackLibDir'.")
}
class PathSettings() {
diff --git a/src/partest/scala/tools/partest/nest/ReflectiveRunner.scala b/src/partest/scala/tools/partest/nest/ReflectiveRunner.scala
index f39debf..700667a 100644
--- a/src/partest/scala/tools/partest/nest/ReflectiveRunner.scala
+++ b/src/partest/scala/tools/partest/nest/ReflectiveRunner.scala
@@ -1,5 +1,5 @@
/* NEST (New Scala Test)
- * Copyright 2007-2011 LAMP/EPFL
+ * Copyright 2007-2013 LAMP/EPFL
* @author Philipp Haller
*/
@@ -48,20 +48,34 @@ class ReflectiveRunner {
new ConsoleFileManager
import fileManager.
- { latestCompFile, latestLibFile, latestPartestFile, latestFjbgFile, latestScalapFile }
+ { latestCompFile, latestReflectFile, latestLibFile, latestPartestFile, latestFjbgFile, latestScalapFile, latestActorsFile }
val files =
- Array(latestCompFile, latestLibFile, latestPartestFile, latestFjbgFile, latestScalapFile) map (x => io.File(x))
+ Array(latestCompFile, latestReflectFile, latestLibFile, latestPartestFile, latestFjbgFile, latestScalapFile, latestActorsFile) map (x => io.File(x))
val sepUrls = files map (_.toURL)
- val sepLoader = new URLClassLoader(sepUrls, null)
+ // this seems to be the core classloader that determines which classes can be found when running partest from the test/partest script
+ var sepLoader = new URLClassLoader(sepUrls, null)
+
+ // this is a workaround for https://issues.scala-lang.org/browse/SI-5433
+ // we hack into the classloader that will become parent classloader for scalac
+ // this way we ensure that reflective macro lookup will pick correct Code.lift
+ // it's also used to inject diffutils into the classpath when running partest from the test/partest script
+ sepLoader = new URLClassLoader((PathSettings.srcCodeLib +: (PathSettings.diffUtils +: files)) map (_.toURL), null)
if (isPartestDebug)
println("Loading classes from:\n" + sepUrls.mkString("\n"))
- val paths = classPath match {
- case Some(cp) => Nil
- case _ => files.toList map (_.path)
- }
+ // @partest maintainer: it seems to me that commented lines are incorrect
+ // if classPath is not empty, then it has been provided by the --classpath option
+ // which points to the root of Scala home (see ConsoleFileManager's testClasses and the true flag in the ctor for more information)
+ // this doesn't mean that we had custom Java classpath set, so we don't have to override latestXXXFiles from the file manager
+ //
+ //val paths = classPath match {
+ // case Some(cp) => Nil
+ // case _ => files.toList map (_.path)
+ //}
+ val paths = files.toList map (_.path)
+
val newClasspath = ClassPath.join(paths: _*)
setProp("java.class.path", newClasspath)
diff --git a/src/partest/scala/tools/partest/nest/RunnerManager.scala b/src/partest/scala/tools/partest/nest/RunnerManager.scala
new file mode 100644
index 0000000..f80f6f3
--- /dev/null
+++ b/src/partest/scala/tools/partest/nest/RunnerManager.scala
@@ -0,0 +1,862 @@
+/* NEST (New Scala Test)
+ * Copyright 2007-2013 LAMP/EPFL
+ * @author Philipp Haller
+ */
+
+package scala.tools.partest
+package nest
+
+import java.io._
+import java.net.URL
+import java.util.{ Timer, TimerTask }
+
+import scala.tools.nsc.Properties.{ jdkHome, javaHome, propOrElse }
+import scala.util.Properties.{ envOrElse, isWin }
+import scala.tools.nsc.{ Settings, CompilerCommand, Global }
+import scala.tools.nsc.io.{ AbstractFile, PlainFile, Path, Directory, File => SFile }
+import scala.tools.nsc.reporters.ConsoleReporter
+import scala.tools.nsc.util.{ ClassPath, FakePos, ScalaClassLoader, stackTraceString }
+import ClassPath.{ join, split }
+import scala.tools.scalap.scalax.rules.scalasig.ByteCode
+import scala.collection.{ mutable, immutable }
+import scala.tools.nsc.interactive.{ BuildManager, RefinedBuildManager }
+import scala.sys.process._
+import java.util.concurrent.{ Executors, TimeUnit, TimeoutException }
+import PartestDefaults.{ javaCmd, javacCmd }
+
+class LogContext(val file: File, val writers: Option[(StringWriter, PrintWriter)])
+
+object LogContext {
+ def apply(file: File, swr: StringWriter, wr: PrintWriter): LogContext = {
+ require (file != null)
+ new LogContext(file, Some((swr, wr)))
+ }
+ def apply(file: File): LogContext = new LogContext(file, None)
+}
+
+object Output {
+ object outRedirect extends Redirecter(out)
+ object errRedirect extends Redirecter(err)
+
+ System.setOut(outRedirect)
+ System.setErr(errRedirect)
+
+ import scala.util.DynamicVariable
+ private def out = java.lang.System.out
+ private def err = java.lang.System.err
+ private val redirVar = new DynamicVariable[Option[PrintStream]](None)
+
+ class Redirecter(stream: PrintStream) extends PrintStream(new OutputStream {
+ def write(b: Int) = withStream(_ write b)
+
+ private def withStream(f: PrintStream => Unit) = f(redirVar.value getOrElse stream)
+
+ override def write(b: Array[Byte]) = withStream(_ write b)
+ override def write(b: Array[Byte], off: Int, len: Int) = withStream(_.write(b, off, len))
+ override def flush = withStream(_.flush)
+ override def close = withStream(_.close)
+ })
+
+ // this supports thread-safe nested output redirects
+ def withRedirected[T](newstream: PrintStream)(func: => T): T = {
+ // note down old redirect destination
+ // this may be None in which case outRedirect and errRedirect print to stdout and stderr
+ val saved = redirVar.value
+ // set new redirecter
+ // this one will redirect both out and err to newstream
+ redirVar.value = Some(newstream)
+
+ try func
+ finally {
+ newstream.flush()
+ redirVar.value = saved
+ }
+ }
+}
+
+class RunnerManager(kind: String, val fileManager: FileManager, params: TestRunParams) {
+ import fileManager._
+
+ val compileMgr = new CompileManager(fileManager)
+ fileManager.CLASSPATH += File.pathSeparator + PathSettings.scalaCheck
+ fileManager.CLASSPATH += File.pathSeparator + PathSettings.diffUtils // needed to put diffutils on test/partest's classpath
+
+ private def compareFiles(f1: File, f2: File): String =
+ try fileManager.compareFiles(f1, f2)
+ catch { case t: Exception => t.toString }
+
+ /** This does something about absolute paths and file separator
+ * chars before diffing.
+ */
+ private def replaceSlashes(dir: File, s: String): String = {
+ val base = (dir.getAbsolutePath + File.separator).replace('\\', '/')
+ var regex = """\Q%s\E""" format base
+ if (isWin) regex = "(?i)" + regex
+ s.replace('\\', '/').replaceAll(regex, "")
+ }
+
+ private def workerError(msg: String): Unit = System.err.println("Error: " + msg)
+
+ private def printInfoStart(file: File, printer: PrintWriter) {
+ NestUI.outline("testing: ", printer)
+ val filesdir = file.getAbsoluteFile.getParentFile.getParentFile
+ val testdir = filesdir.getParentFile
+ val totalWidth = 56
+ val name = {
+ // 1. try with [...]/files/run/test.scala
+ val name = file.getAbsolutePath drop testdir.getAbsolutePath.length
+ if (name.length <= totalWidth) name
+ // 2. try with [...]/run/test.scala
+ else file.getAbsolutePath drop filesdir.getAbsolutePath.length
+ }
+ NestUI.normal("[...]%s%s".format(name, " " * (totalWidth - name.length)), printer)
+ }
+
+ private def printInfoEnd(success: Boolean, printer: PrintWriter) {
+ NestUI.normal("[", printer)
+ if (success) NestUI.success(" OK ", printer)
+ else NestUI.failure("FAILED", printer)
+ NestUI.normal("]\n", printer)
+ }
+
+ private def printInfoTimeout(printer: PrintWriter) {
+ NestUI.normal("[", printer)
+ NestUI.failure("TIMOUT", printer)
+ NestUI.normal("]\n", printer)
+ }
+
+ private def javac(outDir: File, files: List[File], output: File): CompilationOutcome = {
+ // compile using command-line javac compiler
+ val args = Seq(
+ javacCmd,
+ "-d",
+ outDir.getAbsolutePath,
+ "-classpath",
+ join(outDir.toString, CLASSPATH)
+ ) ++ files.map("" + _)
+
+ try if (runCommand(args, output)) CompileSuccess else CompileFailed
+ catch exHandler(output, "javac command failed:\n" + args.map(" " + _ + "\n").mkString + "\n", CompilerCrashed)
+ }
+
+ /** Runs command redirecting standard out and error out to output file.
+ * Overloaded to accept a sequence of arguments.
+ */
+ private def runCommand(args: Seq[String], outFile: File): Boolean = {
+ NestUI.verbose("running command:\n"+args.map(" " + _ + "\n").mkString)
+ runCommandImpl(Process(args), outFile)
+ }
+
+ /** Runs command redirecting standard out and error out to output file.
+ * Overloaded to accept a single string = concatenated command + arguments.
+ */
+ private def runCommand(command: String, outFile: File): Boolean = {
+ NestUI.verbose("running command:"+command)
+ runCommandImpl(Process(command), outFile)
+ }
+
+ private def runCommandImpl(process: => ProcessBuilder, outFile: File): Boolean = {
+ val exitCode = (process #> outFile !)
+ // normalize line endings
+ // System.getProperty("line.separator") should be "\n" here
+ // so reading a file and writing it back should convert all CRLFs to LFs
+ SFile(outFile).printlnAll(SFile(outFile).lines.toList: _*)
+ exitCode == 0
+ }
+
+ @inline private def isJava(f: File) = SFile(f) hasExtension "java"
+ @inline private def isScala(f: File) = SFile(f) hasExtension "scala"
+ @inline private def isJavaOrScala(f: File) = isJava(f) || isScala(f)
+
+ private def outputLogFile(logFile: File) {
+ val lines = SFile(logFile).lines
+ if (lines.nonEmpty) {
+ NestUI.normal("Log file '" + logFile + "': \n")
+ lines foreach (x => NestUI.normal(x + "\n"))
+ }
+ }
+ private def logStackTrace(logFile: File, t: Throwable, msg: String): Boolean = {
+ SFile(logFile).writeAll(msg, stackTraceString(t))
+ outputLogFile(logFile) // if running the test threw an exception, output log file
+ false
+ }
+
+ private def exHandler[T](logFile: File, msg: String, value: T): PartialFunction[Throwable, T] = {
+ case e: Exception => logStackTrace(logFile, e, msg) ; value
+ }
+
+ class Runner(testFile: File) {
+ var testDiff: String = ""
+ var passed: Option[Boolean] = None
+
+ val fileBase = basename(testFile.getName)
+ val logFile = fileManager.getLogFile(testFile, kind)
+ val parent = testFile.getParentFile
+ val outDir = new File(parent, "%s-%s.obj".format(fileBase, kind))
+ def toDelete = if (isPartestDebug) Nil else List(
+ if (passed exists (x => x)) Some(logFile) else None,
+ if (outDir.isDirectory) Some(outDir) else None
+ ).flatten
+
+ private def createOutputDir(): File = {
+ outDir.mkdirs()
+ outDir
+ }
+
+ private def execTest(outDir: File, logFile: File, classpathPrefix: String = "", javaOpts: String = ""): Boolean = {
+ // check whether there is a ".javaopts" file
+ val argsFile = new File(logFile.getParentFile, fileBase + ".javaopts")
+ val argString = file2String(argsFile)
+ if (argString != "")
+ NestUI.verbose("Found javaopts file '%s', using options: '%s'".format(argsFile, argString))
+
+ val testFullPath = {
+ val d = new File(logFile.getParentFile, fileBase)
+ if (d.isDirectory) d.getAbsolutePath
+ else {
+ val f = new File(logFile.getParentFile, fileBase + ".scala")
+ if (f.isFile) f.getAbsolutePath
+ else ""
+ }
+ }
+
+ // Note! As this currently functions, JAVA_OPTS must precede argString
+ // because when an option is repeated to java only the last one wins.
+ // That means until now all the .javaopts files were being ignored because
+ // they all attempt to change options which are also defined in
+ // partest.java_opts, leading to debug output like:
+ //
+ // debug: Found javaopts file 'files/shootout/message.scala-2.javaopts', using options: '-Xss32k'
+ // debug: java -Xss32k -Xss2m -Xms256M -Xmx1024M -classpath [...]
+ val extras = if (isPartestDebug) List("-Dpartest.debug=true") else Nil
+ val propertyOptions = List(
+ "-Dfile.encoding=UTF-8",
+ "-Djava.library.path="+logFile.getParentFile.getAbsolutePath,
+ "-Dpartest.output="+outDir.getAbsolutePath,
+ "-Dpartest.lib="+LATEST_LIB,
+ "-Dpartest.reflect="+LATEST_REFLECT,
+ "-Dpartest.comp="+LATEST_COMP,
+ "-Dpartest.cwd="+outDir.getParent,
+ "-Dpartest.test-path="+testFullPath,
+ "-Dpartest.testname="+fileBase,
+ "-Djavacmd="+javaCmd,
+ "-Djavaccmd="+javacCmd,
+ "-Duser.language=en",
+ "-Duser.country=US"
+ ) ++ extras
+
+ val classpath = if (classpathPrefix != "") join(classpathPrefix, CLASSPATH) else CLASSPATH
+ val cmd = javaCmd +: (
+ (JAVA_OPTS.split(' ') ++ javaOpts.split(' ') ++ argString.split(' ')).map(_.trim).filter(_ != "") ++ Seq(
+ "-classpath",
+ join(outDir.toString, classpath)
+ ) ++ propertyOptions ++ Seq(
+ "scala.tools.nsc.MainGenericRunner",
+ "-usejavacp",
+ "Test",
+ "jvm"
+ )
+ )
+
+ runCommand(cmd, logFile)
+ }
+
+ private def getCheckFilePath(dir: File, suffix: String = "") = {
+ def chkFile(s: String) = (Directory(dir) / "%s%s.check".format(fileBase, s)).toFile
+
+ if (chkFile("").isFile || suffix == "") chkFile("")
+ else chkFile("-" + suffix)
+ }
+ private def getCheckFile(dir: File) = Some(getCheckFilePath(dir, kind)) filter (_.canRead)
+
+ private def compareOutput(dir: File, logFile: File): String = {
+ val checkFile = getCheckFilePath(dir, kind)
+ val diff =
+ if (checkFile.canRead) compareFiles(logFile, checkFile.jfile)
+ else file2String(logFile)
+
+ // if check file exists, compare with log file
+ if (diff != "" && fileManager.updateCheck) {
+ NestUI.verbose("Updating checkfile " + checkFile.jfile)
+ val toWrite = if (checkFile.exists) checkFile else getCheckFilePath(dir, "")
+ toWrite writeAll file2String(logFile)
+ ""
+ }
+ else diff
+ }
+
+ def newTestWriters() = {
+ val swr = new StringWriter
+ val wr = new PrintWriter(swr, true)
+ // diff = ""
+
+ ((swr, wr))
+ }
+
+ def fail(what: Any) = {
+ NestUI.verbose("scalac: compilation of "+what+" failed\n")
+ false
+ }
+ def diffCheck(testFile: File, diff: String) = {
+ testDiff = diff
+ testDiff == ""
+ }
+
+ /** 1. Creates log file and output directory.
+ * 2. Runs script function, providing log file and output directory as arguments.
+ */
+ def runInContext(file: File, script: (File, File) => Boolean): (Boolean, LogContext) = {
+ val (swr, wr) = newTestWriters()
+ printInfoStart(file, wr)
+
+ NestUI.verbose(this+" running test "+fileBase)
+ val outDir = createOutputDir()
+ NestUI.verbose("output directory: "+outDir)
+
+ // run test-specific code
+ val succeeded = try {
+ if (isPartestDebug) {
+ val (result, millis) = timed(script(logFile, outDir))
+ fileManager.recordTestTiming(file.getPath, millis)
+ result
+ }
+ else script(logFile, outDir)
+ }
+ catch exHandler(logFile, "", false)
+
+ (succeeded, LogContext(logFile, swr, wr))
+ }
+
+ def groupedFiles(dir: File): List[List[File]] = {
+ val testFiles = dir.listFiles.toList filter isJavaOrScala
+
+ def isInGroup(f: File, num: Int) = SFile(f).stripExtension endsWith ("_" + num)
+ val groups = (0 to 9).toList map (num => (testFiles filter (f => isInGroup(f, num))).sorted)
+ val noGroupSuffix = (testFiles filterNot (groups.flatten contains)).sorted
+
+ noGroupSuffix :: groups filterNot (_.isEmpty)
+ }
+
+ def compileFilesIn(dir: File, logFile: File, outDir: File): CompilationOutcome = {
+ def compileGroup(g: List[File]): CompilationOutcome = {
+ val (scalaFiles, javaFiles) = g partition isScala
+ val allFiles = javaFiles ++ scalaFiles
+
+ List(1, 2, 3).foldLeft(CompileSuccess: CompilationOutcome) {
+ case (CompileSuccess, 1) if scalaFiles.nonEmpty => compileMgr.attemptCompile(Some(outDir), allFiles, kind, logFile) // java + scala
+ case (CompileSuccess, 2) if javaFiles.nonEmpty => javac(outDir, javaFiles, logFile) // java
+ case (CompileSuccess, 3) if scalaFiles.nonEmpty => compileMgr.attemptCompile(Some(outDir), scalaFiles, kind, logFile) // scala
+ case (outcome, _) => outcome
+ }
+ }
+ groupedFiles(dir).foldLeft(CompileSuccess: CompilationOutcome) {
+ case (CompileSuccess, files) => compileGroup(files)
+ case (outcome, _) => outcome
+ }
+ }
+
+ def runTestCommon(file: File, expectFailure: Boolean)(
+ onSuccess: (File, File) => Boolean,
+ onFail: (File, File) => Unit = (_, _) => ()): (Boolean, LogContext) =
+ {
+ runInContext(file, (logFile: File, outDir: File) => {
+ val outcome = (
+ if (file.isDirectory) compileFilesIn(file, logFile, outDir)
+ else compileMgr.attemptCompile(None, List(file), kind, logFile)
+ )
+ val result = (
+ if (expectFailure) outcome.isNegative
+ else outcome.isPositive
+ )
+
+ if (result) onSuccess(logFile, outDir)
+ else { onFail(logFile, outDir) ; false }
+ })
+ }
+
+ def runJvmTest(file: File): (Boolean, LogContext) =
+ runTestCommon(file, expectFailure = false)((logFile, outDir) => {
+ val dir = file.getParentFile
+
+ // adding codelib.jar to the classpath
+ // codelib provides the possibility to override standard reify
+ // this shields the massive amount of reification tests from changes in the API
+ execTest(outDir, logFile, PathSettings.srcCodeLib.toString) && {
+ // cannot replace paths here since this also inverts slashes
+ // which affects a bunch of tests
+ //fileManager.mapFile(logFile, replaceSlashes(dir, _))
+ diffCheck(file, compareOutput(dir, logFile))
+ }
+ })
+
+ // Apache Ant 1.6 or newer
+ def ant(args: Seq[String], output: File): Boolean = {
+ val antDir = Directory(envOrElse("ANT_HOME", "/opt/ant/"))
+ val antLibDir = Directory(antDir / "lib")
+ val antLauncherPath = SFile(antLibDir / "ant-launcher.jar").path
+ val antOptions =
+ if (NestUI._verbose) List("-verbose", "-noinput")
+ else List("-noinput")
+ val cmd = javaCmd +: (
+ JAVA_OPTS.split(' ').map(_.trim).filter(_ != "") ++ Seq(
+ "-classpath",
+ antLauncherPath,
+ "org.apache.tools.ant.launch.Launcher"
+ ) ++ antOptions ++ args
+ )
+
+ try runCommand(cmd, output)
+ catch exHandler(output, "ant command '" + cmd + "' failed:\n", false)
+ }
+
+ def runAntTest(file: File): (Boolean, LogContext) = {
+ val (swr, wr) = newTestWriters()
+ printInfoStart(file, wr)
+
+ NestUI.verbose(this+" running test "+fileBase)
+
+ val succeeded = try {
+ val binary = "-Dbinary="+(
+ if (fileManager.LATEST_LIB endsWith "build/quick/classes/library") "quick"
+ else if (fileManager.LATEST_LIB endsWith "build/pack/lib/scala-library.jar") "pack"
+ else if (fileManager.LATEST_LIB endsWith "dists/latest/lib/scala-library.jar/") "latest"
+ else "installed"
+ )
+ val args = Array(binary, "-logfile", logFile.path, "-file", file.path)
+ NestUI.verbose("ant "+args.mkString(" "))
+ ant(args, logFile) && diffCheck(file, compareOutput(file.getParentFile, logFile))
+ }
+ catch { // *catch-all*
+ case e: Exception =>
+ NestUI.verbose("caught "+e)
+ false
+ }
+
+ (succeeded, LogContext(logFile, swr, wr))
+ }
+
+ def runSpecializedTest(file: File): (Boolean, LogContext) =
+ runTestCommon(file, expectFailure = false)((logFile, outDir) => {
+ val dir = file.getParentFile
+
+ // adding the instrumented library to the classpath
+ ( execTest(outDir, logFile, PathSettings.srcSpecLib.toString) &&
+ diffCheck(file, compareOutput(dir, logFile))
+ )
+ })
+
+ def runInstrumentedTest(file: File): (Boolean, LogContext) =
+ runTestCommon(file, expectFailure = false)((logFile, outDir) => {
+ val dir = file.getParentFile
+
+ // adding the javagent option with path to instrumentation agent
+ execTest(outDir, logFile, javaOpts = "-javaagent:"+PathSettings.instrumentationAgentLib) &&
+ diffCheck(file, compareOutput(dir, logFile))
+ })
+
+ def processSingleFile(file: File): (Boolean, LogContext) = kind match {
+ case "scalacheck" =>
+ val succFn: (File, File) => Boolean = { (logFile, outDir) =>
+ NestUI.verbose("compilation of "+file+" succeeded\n")
+
+ val outURL = outDir.getAbsoluteFile.toURI.toURL
+ val logWriter = new PrintStream(new FileOutputStream(logFile), true)
+
+ Output.withRedirected(logWriter) {
+ // this classloader is test specific: its parent contains library classes and others
+ ScalaClassLoader.fromURLs(List(outURL), params.scalaCheckParentClassLoader).run("Test", Nil)
+ }
+
+ NestUI.verbose(file2String(logFile))
+ // obviously this must be improved upon
+ val lines = SFile(logFile).lines map (_.trim) filterNot (_ == "") toBuffer;
+ lines.forall(x => !x.startsWith("!")) || {
+ NestUI.normal("ScalaCheck test failed. Output:\n")
+ lines foreach (x => NestUI.normal(x + "\n"))
+ false
+ }
+ }
+ runTestCommon(file, expectFailure = false)(
+ succFn,
+ (logFile, outDir) => outputLogFile(logFile)
+ )
+
+ case "pos" =>
+ runTestCommon(file, expectFailure = false)(
+ (logFile, outDir) => true,
+ (_, _) => ()
+ )
+
+ case "neg" =>
+ runTestCommon(file, expectFailure = true)((logFile, outDir) => {
+ // compare log file to check file
+ val dir = file.getParentFile
+
+ // diff is contents of logFile
+ fileManager.mapFile(logFile, replaceSlashes(dir, _))
+ diffCheck(file, compareOutput(dir, logFile))
+ })
+
+ case "run" | "jvm" =>
+ runJvmTest(file)
+
+ case "specialized" =>
+ runSpecializedTest(file)
+
+ case "instrumented" =>
+ runInstrumentedTest(file)
+
+ case "presentation" =>
+ runJvmTest(file) // for the moment, it's exactly the same as for a run test
+
+ case "ant" =>
+ runAntTest(file)
+
+ case "buildmanager" =>
+ val (swr, wr) = newTestWriters()
+ printInfoStart(file, wr)
+ val (outDir, testFile, changesDir) = {
+ if (!file.isDirectory)
+ (null, null, null)
+ else {
+ NestUI.verbose(this+" running test "+fileBase)
+ val outDir = createOutputDir()
+ val testFile = new File(file, fileBase + ".test")
+ val changesDir = new File(file, fileBase + ".changes")
+
+ if (changesDir.isFile || !testFile.isFile) {
+ // if changes exists then it has to be a dir
+ if (!testFile.isFile) NestUI.verbose("invalid build manager test file")
+ if (changesDir.isFile) NestUI.verbose("invalid build manager changes directory")
+ (null, null, null)
+ }
+ else {
+ copyTestFiles(file, outDir)
+ NestUI.verbose("outDir: "+outDir)
+ NestUI.verbose("logFile: "+logFile)
+ (outDir, testFile, changesDir)
+ }
+ }
+ }
+ if (outDir == null)
+ return (false, LogContext(logFile))
+
+ // Pre-conditions satisfied
+ val sourcepath = outDir.getAbsolutePath+File.separator
+
+ // configure input/output files
+ val logWriter = new PrintStream(new FileOutputStream(logFile), true)
+ val testReader = new BufferedReader(new FileReader(testFile))
+ val logConsoleWriter = new PrintWriter(logWriter, true)
+
+ // create proper settings for the compiler
+ val settings = new Settings(workerError)
+ settings.outdir.value = outDir.getAbsoluteFile.getAbsolutePath
+ settings.sourcepath.value = sourcepath
+ settings.classpath.value = fileManager.CLASSPATH
+ settings.Ybuildmanagerdebug.value = true
+
+ // simulate Build Manager loop
+ val prompt = "builder > "
+ val reporter = new ConsoleReporter(settings, scala.Console.in, logConsoleWriter)
+ val bM: BuildManager =
+ new RefinedBuildManager(settings) {
+ override protected def newCompiler(settings: Settings) =
+ new BuilderGlobal(settings, reporter)
+ }
+
+ def testCompile(line: String): Boolean = {
+ NestUI.verbose("compiling " + line)
+ val args = (line split ' ').toList
+ val command = new CompilerCommand(args, settings)
+ command.ok && {
+ bM.update(filesToSet(settings.sourcepath.value, command.files), Set.empty)
+ !reporter.hasErrors
+ }
+ }
+
+ val updateFiles = (line: String) => {
+ NestUI.verbose("updating " + line)
+ (line split ' ').toList forall (u =>
+ (u split "=>").toList match {
+ case origFileName::(newFileName::Nil) =>
+ val newFile = new File(changesDir, newFileName)
+ if (newFile.isFile) {
+ val v = overwriteFileWith(new File(outDir, origFileName), newFile)
+ if (!v)
+ NestUI.verbose("'update' operation on " + u + " failed")
+ v
+ } else {
+ NestUI.verbose("File " + newFile + " is invalid")
+ false
+ }
+ case a =>
+ NestUI.verbose("Other =: " + a)
+ false
+ }
+ )
+ }
+
+ def loop(): Boolean = {
+ testReader.readLine() match {
+ case null | "" =>
+ NestUI.verbose("finished")
+ true
+ case s if s startsWith ">>update " =>
+ updateFiles(s stripPrefix ">>update ") && loop()
+ case s if s startsWith ">>compile " =>
+ val files = s stripPrefix ">>compile "
+ logWriter.println(prompt + files)
+ // In the end, it can finish with an error
+ if (testCompile(files)) loop()
+ else {
+ val t = testReader.readLine()
+ (t == null) || (t == "")
+ }
+ case s =>
+ NestUI.verbose("wrong command in test file: " + s)
+ false
+ }
+ }
+
+ Output.withRedirected(logWriter) {
+ try loop()
+ finally testReader.close()
+ }
+ fileManager.mapFile(logFile, replaceSlashes(new File(sourcepath), _))
+
+ (diffCheck(file, compareOutput(file, logFile)), LogContext(logFile, swr, wr))
+
+ case "res" => {
+ // simulate resident compiler loop
+ val prompt = "\nnsc> "
+
+ val (swr, wr) = newTestWriters()
+ printInfoStart(file, wr)
+
+ NestUI.verbose(this+" running test "+fileBase)
+ val dir = file.getParentFile
+ val outDir = createOutputDir()
+ val resFile = new File(dir, fileBase + ".res")
+ NestUI.verbose("outDir: "+outDir)
+ NestUI.verbose("logFile: "+logFile)
+ //NestUI.verbose("logFileErr: "+logFileErr)
+ NestUI.verbose("resFile: "+resFile)
+
+ // run compiler in resident mode
+ // $SCALAC -d "$os_dstbase".obj -Xresident -sourcepath . "$@"
+ val sourcedir = logFile.getParentFile.getAbsoluteFile
+ val sourcepath = sourcedir.getAbsolutePath+File.separator
+ NestUI.verbose("sourcepath: "+sourcepath)
+
+ val argList = List(
+ "-d", outDir.getAbsoluteFile.getPath,
+ "-Xresident",
+ "-sourcepath", sourcepath)
+
+ // configure input/output files
+ val logOut = new FileOutputStream(logFile)
+ val logWriter = new PrintStream(logOut, true)
+ val resReader = new BufferedReader(new FileReader(resFile))
+ val logConsoleWriter = new PrintWriter(new OutputStreamWriter(logOut), true)
+
+ // create compiler
+ val settings = new Settings(workerError)
+ settings.sourcepath.value = sourcepath
+ settings.classpath.value = fileManager.CLASSPATH
+ val reporter = new ConsoleReporter(settings, scala.Console.in, logConsoleWriter)
+ val command = new CompilerCommand(argList, settings)
+ object compiler extends Global(command.settings, reporter)
+
+ val resCompile = (line: String) => {
+ NestUI.verbose("compiling "+line)
+ val cmdArgs = (line split ' ').toList map (fs => new File(dir, fs).getAbsolutePath)
+ NestUI.verbose("cmdArgs: "+cmdArgs)
+ val sett = new Settings(workerError)
+ sett.sourcepath.value = sourcepath
+ val command = new CompilerCommand(cmdArgs, sett)
+ command.ok && {
+ (new compiler.Run) compile command.files
+ !reporter.hasErrors
+ }
+ }
+
+ def loop(action: String => Boolean): Boolean = {
+ logWriter.print(prompt)
+ resReader.readLine() match {
+ case null | "" => logWriter.flush() ; true
+ case line => action(line) && loop(action)
+ }
+ }
+
+ Output.withRedirected(logWriter) {
+ try loop(resCompile)
+ finally resReader.close()
+ }
+ fileManager.mapFile(logFile, replaceSlashes(dir, _))
+
+ (diffCheck(file, compareOutput(dir, logFile)), LogContext(logFile, swr, wr))
+ }
+
+ case "shootout" =>
+ val (swr, wr) = newTestWriters()
+ printInfoStart(file, wr)
+
+ NestUI.verbose(this+" running test "+fileBase)
+ val outDir = createOutputDir()
+
+ // 2. define file {outDir}/test.scala that contains code to compile/run
+ val testFile = new File(outDir, "test.scala")
+ NestUI.verbose("outDir: "+outDir)
+ NestUI.verbose("logFile: "+logFile)
+ NestUI.verbose("testFile: "+testFile)
+
+ // 3. cat {test}.scala.runner {test}.scala > testFile
+ val runnerFile = new File(parent, fileBase+".scala.runner")
+ val bodyFile = new File(parent, fileBase+".scala")
+ SFile(testFile).writeAll(
+ file2String(runnerFile),
+ file2String(bodyFile)
+ )
+
+ // 4. compile testFile
+ val ok = compileMgr.attemptCompile(None, List(testFile), kind, logFile) eq CompileSuccess
+ NestUI.verbose("compilation of " + testFile + (if (ok) "succeeded" else "failed"))
+ val result = ok && {
+ execTest(outDir, logFile) && {
+ NestUI.verbose(this+" finished running "+fileBase)
+ diffCheck(file, compareOutput(parent, logFile))
+ }
+ }
+
+ (result, LogContext(logFile, swr, wr))
+
+ case "scalap" =>
+ runInContext(file, (logFile: File, outDir: File) => {
+ val sourceDir = Directory(if (file.isFile) file.getParent else file)
+ val sources = sourceDir.files filter (_ hasExtension "scala") map (_.jfile) toList
+ val results = sourceDir.files filter (_.name == "result.test") map (_.jfile) toList
+
+ if (sources.length != 1 || results.length != 1) {
+ NestUI.warning("Misconfigured scalap test directory: " + sourceDir + " \n")
+ false
+ }
+ else {
+ val resFile = results.head
+ // 2. Compile source file
+
+ if (!compileMgr.attemptCompile(Some(outDir), sources, kind, logFile).isPositive) {
+ NestUI.normal("compilerMgr failed to compile %s to %s".format(sources mkString ", ", outDir))
+ false
+ }
+ else {
+ // 3. Decompile file and compare results
+ val isPackageObject = sourceDir.name startsWith "package"
+ val className = sourceDir.name.capitalize + (if (!isPackageObject) "" else ".package")
+ val url = outDir.toURI.toURL
+ val loader = ScalaClassLoader.fromURLs(List(url), this.getClass.getClassLoader)
+ val clazz = loader.loadClass(className)
+
+ val byteCode = ByteCode.forClass(clazz)
+ val result = scala.tools.scalap.Main.decompileScala(byteCode.bytes, isPackageObject)
+
+ SFile(logFile) writeAll result
+ diffCheck(file, compareFiles(logFile, resFile))
+ }
+ }
+ })
+
+ case "script" =>
+ val (swr, wr) = newTestWriters()
+ printInfoStart(file, wr)
+
+ NestUI.verbose(this+" running test "+fileBase)
+
+ // check whether there is an args file
+ val argsFile = new File(file.getParentFile, fileBase+".args")
+ NestUI.verbose("argsFile: "+argsFile)
+ val argString = file2String(argsFile)
+ val succeeded = try {
+ val cmdString =
+ if (isWin) {
+ val batchFile = new File(file.getParentFile, fileBase+".bat")
+ NestUI.verbose("batchFile: "+batchFile)
+ batchFile.getAbsolutePath
+ }
+ else file.getAbsolutePath
+
+ val ok = runCommand(cmdString+argString, logFile)
+ ( ok && diffCheck(file, compareOutput(file.getParentFile, logFile)) )
+ }
+ catch { case e: Exception => NestUI.verbose("caught "+e) ; false }
+
+ (succeeded, LogContext(logFile, swr, wr))
+ }
+
+ private def crashContext(t: Throwable): LogContext = {
+ try {
+ logStackTrace(logFile, t, "Possible compiler crash during test of: " + testFile + "\n")
+ LogContext(logFile)
+ }
+ catch { case t: Throwable => LogContext(null) }
+ }
+
+ def run(): (Boolean, LogContext) = {
+ val result = try processSingleFile(testFile) catch { case t: Throwable => (false, crashContext(t)) }
+ passed = Some(result._1)
+ result
+ }
+
+ def reportResult(writers: Option[(StringWriter, PrintWriter)]) {
+ writers foreach { case (swr, wr) =>
+ if (passed.isEmpty) printInfoTimeout(wr)
+ else printInfoEnd(passed.get, wr)
+ wr.flush()
+ swr.flush()
+ NestUI.normal(swr.toString)
+
+ if (passed exists (x => !x)) {
+ if (fileManager.showDiff || isPartestDebug)
+ NestUI.normal(testDiff)
+ if (fileManager.showLog)
+ showLog(logFile)
+ }
+ }
+ toDelete foreach (_.deleteRecursively())
+ }
+ }
+
+ def runTest(testFile: File): TestState = {
+ val runner = new Runner(testFile)
+ // when option "--failed" is provided execute test only if log
+ // is present (which means it failed before)
+ if (fileManager.failed && !runner.logFile.canRead)
+ return TestState.Ok
+
+ // sys addShutdownHook cleanup()
+ val ((success, ctx), elapsed) = timed(runner.run())
+ val state = if (success) TestState.Ok else TestState.Fail
+
+ runner.reportResult(ctx.writers)
+ state
+ }
+
+ private def filesToSet(pre: String, fs: List[String]): Set[AbstractFile] =
+ fs flatMap (s => Option(AbstractFile getFile (pre + s))) toSet
+
+ private def copyTestFiles(testDir: File, destDir: File) {
+ val invalidExts = List("changes", "svn", "obj")
+ testDir.listFiles.toList filter (
+ f => (isJavaOrScala(f) && f.isFile) ||
+ (f.isDirectory && !(invalidExts.contains(SFile(f).extension)))) foreach
+ { f => fileManager.copyFile(f, destDir) }
+ }
+
+ private def showLog(logFile: File) {
+ file2String(logFile) match {
+ case "" if logFile.canRead => ()
+ case "" => NestUI.failure("Couldn't open log file: " + logFile + "\n")
+ case s => NestUI.normal(s)
+ }
+ }
+}
diff --git a/src/partest/scala/tools/partest/nest/RunnerUtils.scala b/src/partest/scala/tools/partest/nest/RunnerUtils.scala
index 93fff0e..6707a93 100644
--- a/src/partest/scala/tools/partest/nest/RunnerUtils.scala
+++ b/src/partest/scala/tools/partest/nest/RunnerUtils.scala
@@ -1,5 +1,5 @@
/* NEST (New Scala Test)
- * Copyright 2007-2011 LAMP/EPFL
+ * Copyright 2007-2013 LAMP/EPFL
* @author Philipp Haller
*/
diff --git a/src/partest/scala/tools/partest/nest/SBTRunner.scala b/src/partest/scala/tools/partest/nest/SBTRunner.scala
index 299296b..20f9c70 100644
--- a/src/partest/scala/tools/partest/nest/SBTRunner.scala
+++ b/src/partest/scala/tools/partest/nest/SBTRunner.scala
@@ -3,33 +3,88 @@ package nest
import java.io.File
import scala.tools.nsc.io.{ Directory }
+import scala.util.Properties.setProp
+import scala.collection.JavaConverters._
-
-class SBTRunner extends DirectRunner {
+object SBTRunner extends DirectRunner {
val fileManager = new FileManager {
var JAVACMD: String = "java"
var JAVAC_CMD: String = "javac"
var CLASSPATH: String = _
var LATEST_LIB: String = _
- val testRootPath: String = PathSettings.testRoot.path
- val testRootDir: Directory = PathSettings.testRoot
+ var LATEST_REFLECT: String = _
+ var LATEST_COMP: String = _
+ var LATEST_PARTEST: String = _
+ var LATEST_ACTORS: String = _
+ val testRootPath: String = "test"
+ val testRootDir: Directory = Directory(testRootPath)
}
- def reflectiveRunTestsForFiles(kindFiles: Array[File], kind: String):java.util.HashMap[String,Int] = {
-
- def convert(scalaM:scala.collection.immutable.Map[String,Int]):java.util.HashMap[String,Int] = {
- val javaM = new java.util.HashMap[String,Int]()
- for(elem <- scalaM) yield {javaM.put(elem._1,elem._2)}
- javaM
- }
-
+ def reflectiveRunTestsForFiles(kindFiles: Array[File], kind: String):java.util.Map[String, TestState] = {
def failedOnlyIfRequired(files:List[File]):List[File]={
if (fileManager.failed) files filter (x => fileManager.logFileExists(x, kind)) else files
}
+ runTestsForFiles(failedOnlyIfRequired(kindFiles.toList), kind).asJava
+ }
+
+ case class CommandLineOptions(classpath: Option[String] = None,
+ tests: Map[String, Array[File]] = Map(),
+ scalacOptions: Seq[String] = Seq(),
+ justFailedTests: Boolean = false)
- convert(runTestsForFiles(failedOnlyIfRequired(kindFiles.toList), kind))
+ def mainReflect(args: Array[String]): java.util.Map[String, String] = {
+ setProp("partest.debug", "true")
+ val Argument = new scala.util.matching.Regex("-(.*)")
+ def parseArgs(args: Seq[String], data: CommandLineOptions): CommandLineOptions = args match {
+ case Seq("--failed", rest @ _*) => parseArgs(rest, data.copy(justFailedTests = true))
+ case Seq("-cp", cp, rest @ _*) => parseArgs(rest, data.copy(classpath=Some(cp)))
+ case Seq("-scalacoption", opt, rest @ _*) => parseArgs(rest, data.copy(scalacOptions= data.scalacOptions :+ opt))
+ case Seq(Argument(name), runFiles, rest @ _*) => parseArgs(rest, data.copy(tests=data.tests + (name -> runFiles.split(",").map(new File(_)))))
+ case Seq() => data
+ case x => sys.error("Unknown command line options: " + x)
+ }
+ val config = parseArgs(args, CommandLineOptions())
+ fileManager.SCALAC_OPTS ++= config.scalacOptions
+ fileManager.CLASSPATH = config.classpath getOrElse sys.error("No classpath set")
+
+ def findClasspath(jar: String, name: String): Option[String] = {
+ val optJar = (fileManager.CLASSPATH split File.pathSeparator filter (_ matches (".*"+jar+".*\\.jar"))).headOption
+ val optClassDir = (fileManager.CLASSPATH split File.pathSeparator filter (_ matches (".*"+name+File.separator+"classes"))).headOption
+ optJar orElse optClassDir
+ }
+ // Find scala library jar file...
+ fileManager.LATEST_LIB = findClasspath("scala-library", "scala-library") getOrElse sys.error("No scala-library found! Classpath = " + fileManager.CLASSPATH)
+ fileManager.LATEST_REFLECT = findClasspath("scala-reflect", "scala-reflect") getOrElse sys.error("No scala-reflect found! Classpath = " + fileManager.CLASSPATH)
+ fileManager.LATEST_COMP = findClasspath("scala-compiler", "scala-compiler") getOrElse sys.error("No scala-compiler found! Classpath = " + fileManager.CLASSPATH)
+ fileManager.LATEST_PARTEST = findClasspath("scala-partest", "partest") getOrElse sys.error("No scala-partest found! Classpath = " + fileManager.CLASSPATH)
+ fileManager.LATEST_ACTORS = findClasspath("scala-actors", "actors") getOrElse sys.error("No scala-actors found! Classpath = " + fileManager.CLASSPATH)
+
+ // TODO - Do something useful here!!!
+ fileManager.JAVAC_CMD = "javac"
+ fileManager.failed = config.justFailedTests
+ // TODO - Make this a flag?
+ //fileManager.updateCheck = true
+ // Now run and report...
+ val runs = config.tests.filterNot(_._2.isEmpty)
+ (for {
+ (testType, files) <- runs
+ (path, result) <- reflectiveRunTestsForFiles(files,testType).asScala
+ } yield (path, fixResult(result))).seq.asJava
+ }
+ def fixResult(result: TestState): String = result match {
+ case TestState.Ok => "OK"
+ case TestState.Fail => "FAIL"
+ case TestState.Timeout => "TIMEOUT"
+ }
+ def main(args: Array[String]): Unit = {
+ val failures = (
+ for ((path, result) <- mainReflect(args).asScala ; if result != TestState.Ok) yield
+ path + ( if (result == TestState.Fail) " [FAILED]" else " [TIMEOUT]" )
+ )
+ // Re-list all failures so we can go figure out what went wrong.
+ failures foreach System.err.println
+ if(!failures.isEmpty) sys.exit(1)
}
}
-
diff --git a/src/partest/scala/tools/partest/nest/TestFile.scala b/src/partest/scala/tools/partest/nest/TestFile.scala
index a00b94e..8717777 100644
--- a/src/partest/scala/tools/partest/nest/TestFile.scala
+++ b/src/partest/scala/tools/partest/nest/TestFile.scala
@@ -1,5 +1,5 @@
/* NEST (New Scala Test)
- * Copyright 2007-2011 LAMP/EPFL
+ * Copyright 2007-2013 LAMP/EPFL
* @author Philipp Haller
*/
@@ -12,30 +12,40 @@ import java.io.{ File => JFile }
import scala.tools.nsc.Settings
import scala.tools.nsc.util.ClassPath
import scala.tools.nsc.io._
+import scala.util.Properties.{ propIsSet, propOrElse, setProp }
-abstract class TestFile(kind: String) {
+trait TestFileCommon {
def file: JFile
- def fileManager: FileManager
+ def kind: String
- val dir = file.toAbsolute.parent
- val fileBase = file.stripExtension
- lazy val objectDir = dir / "%s-%s.obj".format(fileBase, kind) createDirectory true
- val flags: Option[String] = dir / "%s.flags".format(fileBase) ifFile { _.slurp().trim }
+ val dir = file.toAbsolute.parent
+ val fileBase = file.stripExtension
+ val flags = dir / (fileBase + ".flags") ifFile (f => f.slurp().trim)
+ lazy val objectDir = dir / (fileBase + "-" + kind + ".obj") createDirectory true
def setOutDirTo = objectDir
+}
- def defineSettings(settings: Settings, setOutDir: Boolean): Boolean = {
+abstract class TestFile(val kind: String) extends TestFileCommon {
+ def file: JFile
+ def fileManager: FileManager
+
+ def defineSettings(settings: Settings, setOutDir: Boolean) = {
settings.classpath append dir.path
if (setOutDir)
- settings.outdir.value = setOutDirTo.path
+ settings.outputDirs setSingleOutput setOutDirTo.path
+
+ // adding codelib.jar to the classpath
+ // codelib provides the possibility to override standard reify
+ // this shields the massive amount of reification tests from changes in the API
+ settings.classpath prepend PathSettings.srcCodeLib.toString
+ if (propIsSet("java.class.path")) setProp("java.class.path", PathSettings.srcCodeLib.toString + ";" + propOrElse("java.class.path", ""))
// have to catch bad flags somewhere
- flags foreach { f =>
- if (!settings.processArgumentString(f)._1)
- return false
+ (flags forall (f => settings.processArgumentString(f)._1)) && {
+ settings.classpath append fileManager.CLASSPATH
+ true
}
- settings.classpath append fileManager.CLASSPATH
- true
}
override def toString(): String = "%s %s".format(kind, file)
@@ -58,8 +68,14 @@ case class SpecializedTestFile(file: JFile, fileManager: FileManager) extends Te
super.defineSettings(settings, setOutDir) && {
// add the instrumented library version to classpath
settings.classpath prepend PathSettings.srcSpecLib.toString
+ // @partest maintainer: if we use a custom Scala build (specified via --classpath)
+ // then the classes provided by it will come earlier than instrumented.jar in the resulting classpath
+ // this entire classpath business needs a thorough solution
+ if (propIsSet("java.class.path")) setProp("java.class.path", PathSettings.srcSpecLib.toString + ";" + propOrElse("java.class.path", ""))
true
}
}
}
case class PresentationTestFile(file: JFile, fileManager: FileManager) extends TestFile("presentation")
+case class AntTestFile(file: JFile, fileManager: FileManager) extends TestFile("ant")
+case class InstrumentedTestFile(file: JFile, fileManager: FileManager) extends TestFile("instrumented")
diff --git a/src/partest/scala/tools/partest/nest/Worker.scala b/src/partest/scala/tools/partest/nest/Worker.scala
deleted file mode 100644
index 88ea623..0000000
--- a/src/partest/scala/tools/partest/nest/Worker.scala
+++ /dev/null
@@ -1,1019 +0,0 @@
-/* NEST (New Scala Test)
- * Copyright 2007-2011 LAMP/EPFL
- * @author Philipp Haller
- */
-
-// $Id$
-
-package scala.tools.partest
-package nest
-
-import java.io._
-import java.net.URL
-import java.util.{ Timer, TimerTask }
-
-import scala.util.Properties.{ isWin }
-import scala.tools.nsc.{ Settings, CompilerCommand, Global }
-import scala.tools.nsc.io.{ AbstractFile, PlainFile, Path, Directory, File => SFile }
-import scala.tools.nsc.reporters.ConsoleReporter
-import scala.tools.nsc.util.{ ClassPath, FakePos, ScalaClassLoader, stackTraceString }
-import ClassPath.{ join, split }
-
-import scala.actors.{ Actor, Exit, TIMEOUT }
-import scala.actors.Actor._
-import scala.tools.scalap.scalax.rules.scalasig.ByteCode
-import scala.collection.{ mutable, immutable }
-import scala.tools.nsc.interactive.{ BuildManager, RefinedBuildManager }
-import scala.sys.process._
-
-case class RunTests(kind: String, files: List[File])
-case class Results(results: Map[String, Int])
-
-class LogContext(val file: File, val writers: Option[(StringWriter, PrintWriter)])
-
-object LogContext {
- def apply(file: File, swr: StringWriter, wr: PrintWriter): LogContext = {
- require (file != null)
- new LogContext(file, Some((swr, wr)))
- }
- def apply(file: File): LogContext = new LogContext(file, None)
-}
-
-abstract class TestResult {
- def file: File
-}
-case class Result(override val file: File, context: LogContext) extends TestResult
-case class Timeout(override val file: File) extends TestResult
-
-class ScalaCheckFileManager(val origmanager: FileManager) extends FileManager {
- def testRootDir: Directory = origmanager.testRootDir
- def testRootPath: String = origmanager.testRootPath
-
- var JAVACMD: String = origmanager.JAVACMD
- var JAVAC_CMD: String = origmanager.JAVAC_CMD
-
- var CLASSPATH: String = join(origmanager.CLASSPATH, PathSettings.scalaCheck.path)
- var LATEST_LIB: String = origmanager.LATEST_LIB
-}
-
-object Output {
- def init() {
- System.setOut(outRedirect)
- System.setErr(errRedirect)
- }
-
- import scala.util.DynamicVariable
- private def out = java.lang.System.out
- private def err = java.lang.System.err
- private val redirVar = new DynamicVariable[Option[PrintStream]](None)
-
- class Redirecter(stream: PrintStream) extends PrintStream(new OutputStream {
- def write(b: Int) = withStream(_ write b)
-
- private def withStream(f: PrintStream => Unit) = f(redirVar.value getOrElse stream)
-
- override def write(b: Array[Byte]) = withStream(_ write b)
- override def write(b: Array[Byte], off: Int, len: Int) = withStream(_.write(b, off, len))
- override def flush = withStream(_.flush)
- override def close = withStream(_.close)
- })
-
- object outRedirect extends Redirecter(out)
-
- object errRedirect extends Redirecter(err)
-
- // this supports thread-safe nested output redirects
- def withRedirected[T](newstream: PrintStream)(func: => T): T = {
- // note down old redirect destination
- // this may be None in which case outRedirect and errRedirect print to stdout and stderr
- val saved = redirVar.value
- // set new redirecter
- // this one will redirect both out and err to newstream
- redirVar.value = Some(newstream)
-
- try func
- finally {
- newstream.flush()
- redirVar.value = saved
- }
- }
-}
-
-
-class Worker(val fileManager: FileManager, params: TestRunParams) extends Actor {
- import fileManager._
-
- val scalaCheckFileManager = new ScalaCheckFileManager(fileManager)
- var reporter: ConsoleReporter = _
- val timer = new Timer
-
- val javacCmd = if ((fileManager.JAVAC_CMD.indexOf("${env.JAVA_HOME}") != -1) ||
- fileManager.JAVAC_CMD.equals("/bin/javac") ||
- fileManager.JAVAC_CMD.equals("\\bin\\javac")) "javac"
- else
- fileManager.JAVAC_CMD
-
-
- def cancelTimerTask() = if (currentTimerTask != null) currentTimerTask.cancel()
- def updateTimerTask(body: => Unit) = {
- cancelTimerTask()
- currentTimerTask = new KickableTimerTask(body)
- timer.schedule(currentTimerTask, fileManager.oneTestTimeout)
- }
-
- class KickableTimerTask(body: => Unit) extends TimerTask {
- def run() = body
- def kick() = {
- cancel()
- body
- }
- }
-
- /** Formerly deeper inside, these next few things are now promoted outside so
- * I can see what they're doing when the world comes to a premature stop.
- */
- private var filesRemaining: List[File] = Nil
- private val toDelete = new mutable.HashSet[File]
- private val status = new mutable.HashMap[String, Int]
-
- private var currentTimerTask: KickableTimerTask = _
- private var currentFileStart: Long = System.currentTimeMillis
- private var currentTestFile: File = _
- private var kind: String = ""
- private def fileBase = basename(currentTestFile.getName)
-
- private def compareFiles(f1: File, f2: File): String =
- try fileManager.compareFiles(f1, f2)
- catch { case t => t.toString }
-
- // maps canonical file names to the test result (0: OK, 1: FAILED, 2: TIMOUT)
- private def updateStatus(key: String, num: Int) = status(key) = num
-
- private def cleanup() {
- toDelete foreach (_.deleteRecursively())
- toDelete.clear()
- }
- sys addShutdownHook cleanup()
-
- private def resetAll() {
- cancelTimerTask()
- filesRemaining = Nil
- cleanup()
- status.clear()
- currentTestFile = null
- currentTimerTask = null
- }
-
- def currentFileElapsed = (System.currentTimeMillis - currentFileStart) / 1000
- def forceTimeout() = {
- println("Let's see what them threads are doing before I kill that test.")
- sys.allThreads foreach { t =>
- println(t)
- t.getStackTrace foreach println
- println("")
- }
- currentTimerTask.kick()
- }
-
- /** This does something about absolute paths and file separator
- * chars before diffing.
- */
- //
- private def replaceSlashes(dir: File, s: String): String = {
- val base = (dir.getAbsolutePath + File.separator).replace('\\', '/')
- s.replace('\\', '/').replaceAll("""\Q%s\E""" format base, "")
- }
-
- private def currentFileString = {
- "Current test file is: %s\n Started: %s (%s seconds ago)\n Current time: %s".format(
- currentTestFile,
- new java.util.Date(currentFileStart),
- currentFileElapsed,
- new java.util.Date()
- )
- }
- private def getNextFile(): File = {
- if (filesRemaining.isEmpty) {
- currentTestFile = null
- }
- else {
- currentTestFile = filesRemaining.head
- filesRemaining = filesRemaining.tail
- currentFileStart = System.currentTimeMillis
- }
-
- currentTestFile
- }
-
- override def toString = (
- ">> Partest Worker in state " + getState + ":\n" +
- currentFileString + "\n" +
- "There are " + filesRemaining.size + " files remaining:\n" +
- "\nstatus hashmap contains " + status.size + " entries:\n" +
- status.toList.map(x => " " + x._1 + " -> " + x._2).sorted.mkString("\n") + "\n"
- )
-
- def workerError(msg: String): Unit = reporter.error(
- FakePos("scalac"),
- msg + "\n scalac -help gives more information"
- )
-
- def act() {
- react {
- case RunTests(testKind, files) =>
- val master = sender
- kind = testKind
- runTests(files) { results =>
- master ! Results(results.toMap)
- resetAll()
- }
- }
- }
-
- def printInfoStart(file: File, printer: PrintWriter) {
- NestUI.outline("testing: ", printer)
- val filesdir = file.getAbsoluteFile.getParentFile.getParentFile
- val testdir = filesdir.getParentFile
- val totalWidth = 56
- val name = {
- // 1. try with [...]/files/run/test.scala
- val name = file.getAbsolutePath drop testdir.getAbsolutePath.length
- if (name.length <= totalWidth) name
- // 2. try with [...]/run/test.scala
- else file.getAbsolutePath drop filesdir.getAbsolutePath.length
- }
- NestUI.normal("[...]%s%s".format(name, " " * (totalWidth - name.length)), printer)
- }
-
- def printInfoEnd(success: Boolean, printer: PrintWriter) {
- NestUI.normal("[", printer)
- if (success) NestUI.success(" OK ", printer)
- else NestUI.failure("FAILED", printer)
- NestUI.normal("]\n", printer)
- }
-
- def printInfoTimeout(printer: PrintWriter) {
- NestUI.normal("[", printer)
- NestUI.failure("TIMOUT", printer)
- NestUI.normal("]\n", printer)
- }
-
- def createLogFile(file: File) = fileManager.getLogFile(file, kind)
-
- def createOutputDir(dir: File): File = {
- val outDir = Path(dir) / Directory("%s-%s.obj".format(fileBase, kind))
- outDir.createDirectory()
- toDelete += outDir.jfile
- outDir.jfile
- }
-
- def javac(outDir: File, files: List[File], output: File): Boolean = {
- // compile using command-line javac compiler
- val cmd = "%s -d %s -classpath %s %s".format(
- javacCmd,
- outDir.getAbsolutePath,
- join(outDir.toString, CLASSPATH),
- files mkString " "
- )
-
- try runCommand(cmd, output)
- catch exHandler(output, "javac command '" + cmd + "' failed:\n")
- }
-
- /** Runs command redirecting standard out and
- * error out to output file.
- */
- def runCommand(command: String, outFile: File): Boolean = {
- NestUI.verbose("running command:\n"+command)
- (command #> outFile !) == 0
- }
-
- def execTest(outDir: File, logFile: File, classpathPrefix: String = ""): Boolean = {
- // check whether there is a ".javaopts" file
- val argsFile = new File(logFile.getParentFile, fileBase + ".javaopts")
- val argString = file2String(argsFile)
- if (argString != "")
- NestUI.verbose("Found javaopts file '%s', using options: '%s'".format(argsFile, argString))
-
- // Note! As this currently functions, JAVA_OPTS must precede argString
- // because when an option is repeated to java only the last one wins.
- // That means until now all the .javaopts files were being ignored because
- // they all attempt to change options which are also defined in
- // partest.java_opts, leading to debug output like:
- //
- // debug: Found javaopts file 'files/shootout/message.scala-2.javaopts', using options: '-Xss32k'
- // debug: java -Xss32k -Xss2m -Xms256M -Xmx1024M -classpath [...]
- val extras = if (isPartestDebug) List("-Dpartest.debug=true") else Nil
- val propertyOptions = List(
- "-Djava.library.path="+logFile.getParentFile.getAbsolutePath,
- "-Dpartest.output="+outDir.getAbsolutePath,
- "-Dpartest.lib="+LATEST_LIB,
- "-Dpartest.cwd="+outDir.getParent,
- "-Dpartest.testname="+fileBase,
- "-Djavacmd="+JAVACMD,
- "-Djavaccmd="+javacCmd,
- "-Duser.language=en -Duser.country=US"
- ) ++ extras
-
- val classpath = if (classpathPrefix != "") join(classpathPrefix, CLASSPATH) else CLASSPATH
- val cmd = (
- List(
- JAVACMD,
- JAVA_OPTS,
- argString,
- "-classpath " + join(outDir.toString, classpath)
- ) ++ propertyOptions ++ List(
- "scala.tools.nsc.MainGenericRunner",
- "-usejavacp",
- "Test",
- "jvm"
- )
- ) mkString " "
-
- runCommand(cmd, logFile)
- }
-
- def getCheckFilePath(dir: File, suffix: String = "") = {
- def chkFile(s: String) = (Directory(dir) / "%s%s.check".format(fileBase, s)).toFile
-
- if (chkFile("").isFile || suffix == "") chkFile("")
- else chkFile("-" + suffix)
- }
- def getCheckFile(dir: File) = Some(getCheckFilePath(dir, kind)) filter (_.canRead)
-
- def compareOutput(dir: File, logFile: File): String = {
- val checkFile = getCheckFilePath(dir, kind)
- // if check file exists, compare with log file
- val diff =
- if (checkFile.canRead) compareFiles(logFile, checkFile.jfile)
- else file2String(logFile)
-
- if (diff != "" && fileManager.updateCheck) {
- NestUI.verbose("output differs from log file: updating checkfile\n")
- val toWrite = if (checkFile.exists) checkFile else getCheckFilePath(dir, "")
- toWrite writeAll file2String(logFile)
- ""
- }
- else diff
- }
-
- def isJava(f: File) = SFile(f) hasExtension "java"
- def isScala(f: File) = SFile(f) hasExtension "scala"
- def isJavaOrScala(f: File) = isJava(f) || isScala(f)
-
- def outputLogFile(logFile: File) {
- val lines = SFile(logFile).lines
- if (lines.nonEmpty) {
- NestUI.normal("Log file '" + logFile + "': \n")
- lines foreach (x => NestUI.normal(x + "\n"))
- }
- }
- def logStackTrace(logFile: File, t: Throwable, msg: String): Boolean = {
- SFile(logFile).writeAll(msg, stackTraceString(t))
- outputLogFile(logFile) // if running the test threw an exception, output log file
- false
- }
-
- def exHandler(logFile: File): PartialFunction[Throwable, Boolean] = exHandler(logFile, "")
- def exHandler(logFile: File, msg: String): PartialFunction[Throwable, Boolean] = {
- case e: Exception => logStackTrace(logFile, e, msg)
- }
-
- /** Runs a list of tests.
- *
- * @param files The list of test files
- */
- def runTests(files: List[File])(topcont: Map[String, Int] => Unit) {
- val compileMgr = new CompileManager(fileManager)
- if (kind == "scalacheck") fileManager.CLASSPATH += File.pathSeparator + PathSettings.scalaCheck
-
- filesRemaining = files
-
- // You don't default "succeeded" to true.
- var succeeded = false
- var done = filesRemaining.isEmpty
- var errors = 0
- var diff = ""
-
- def initNextTest() = {
- val swr = new StringWriter
- val wr = new PrintWriter(swr, true)
- diff = ""
-
- ((swr, wr))
- }
-
- def fail(what: Any) = {
- NestUI.verbose("scalac: compilation of "+what+" failed\n")
- false
- }
- def diffCheck(latestDiff: String) = {
- diff = latestDiff
- succeeded = diff == ""
- succeeded
- }
-
- def timed[T](body: => T): (T, Long) = {
- val t1 = System.currentTimeMillis
- val result = body
- val t2 = System.currentTimeMillis
-
- (result, t2 - t1)
- }
-
- /** 1. Creates log file and output directory.
- * 2. Runs script function, providing log file and output directory as arguments.
- */
- def runInContext(file: File, script: (File, File) => Boolean): LogContext = {
- // When option "--failed" is provided, execute test only if log file is present
- // (which means it failed before)
- val logFile = createLogFile(file)
-
- if (fileManager.failed && !logFile.canRead)
- LogContext(logFile)
- else {
- val (swr, wr) = initNextTest()
- printInfoStart(file, wr)
-
- NestUI.verbose(this+" running test "+fileBase)
- val dir = file.getParentFile
- val outDir = createOutputDir(dir)
- NestUI.verbose("output directory: "+outDir)
-
- // run test-specific code
- succeeded = try {
- if (isPartestDebug) {
- val (result, millis) = timed(script(logFile, outDir))
- fileManager.recordTestTiming(file.getPath, millis)
- result
- }
- else script(logFile, outDir)
- }
- catch exHandler(logFile)
-
- LogContext(logFile, swr, wr)
- }
- }
-
- def compileFilesIn(dir: File, logFile: File, outDir: File): Boolean = {
- val testFiles = dir.listFiles.toList filter isJavaOrScala
-
- def isInGroup(f: File, num: Int) = SFile(f).stripExtension endsWith ("_" + num)
- val groups = (0 to 9).toList map (num => testFiles filter (f => isInGroup(f, num)))
- val noGroupSuffix = testFiles filterNot (groups.flatten contains)
-
- def compileGroup(g: List[File]): Boolean = {
- val (scalaFiles, javaFiles) = g partition isScala
- val allFiles = javaFiles ++ scalaFiles
-
- // scala+java, then java, then scala
- (scalaFiles.isEmpty || compileMgr.shouldCompile(outDir, allFiles, kind, logFile) || fail(g)) && {
- (javaFiles.isEmpty || javac(outDir, javaFiles, logFile)) && {
- (scalaFiles.isEmpty || compileMgr.shouldCompile(outDir, scalaFiles, kind, logFile) || fail(scalaFiles))
- }
- }
- }
-
- (noGroupSuffix.isEmpty || compileGroup(noGroupSuffix)) && (groups forall compileGroup)
- }
-
- def failCompileFilesIn(dir: File, logFile: File, outDir: File): Boolean = {
- val testFiles = dir.listFiles.toList
- val sourceFiles = testFiles filter isJavaOrScala
-
- sourceFiles.isEmpty || compileMgr.shouldFailCompile(outDir, sourceFiles, kind, logFile) || fail(testFiles filter isScala)
- }
-
- def runTestCommon(file: File, expectFailure: Boolean)(
- onSuccess: (File, File) => Boolean,
- onFail: (File, File) => Unit = (_, _) => ()): LogContext =
- {
- runInContext(file, (logFile: File, outDir: File) => {
- val result =
- if (file.isDirectory) {
- if (expectFailure) failCompileFilesIn(file, logFile, outDir)
- else compileFilesIn(file, logFile, outDir)
- }
- else {
- if (expectFailure) compileMgr.shouldFailCompile(List(file), kind, logFile)
- else compileMgr.shouldCompile(List(file), kind, logFile)
- }
-
- if (result) onSuccess(logFile, outDir)
- else { onFail(logFile, outDir) ; false }
- })
- }
-
- def runJvmTest(file: File): LogContext =
- runTestCommon(file, expectFailure = false)((logFile, outDir) => {
- val dir = file.getParentFile
-
- execTest(outDir, logFile) && diffCheck(compareOutput(dir, logFile))
- })
-
- def runSpecializedTest(file: File): LogContext =
- runTestCommon(file, expectFailure = false)((logFile, outDir) => {
- val dir = file.getParentFile
-
- // adding the instrumented library to the classpath
- execTest(outDir, logFile, PathSettings.srcSpecLib.toString) &&
- diffCheck(compareOutput(dir, logFile))
- })
-
- def processSingleFile(file: File): LogContext = kind match {
- case "scalacheck" =>
- val succFn: (File, File) => Boolean = { (logFile, outDir) =>
- NestUI.verbose("compilation of "+file+" succeeded\n")
-
- val outURL = outDir.getAbsoluteFile.toURI.toURL
- val logWriter = new PrintStream(new FileOutputStream(logFile), true)
-
- Output.withRedirected(logWriter) {
- // this classloader is test specific: its parent contains library classes and others
- ScalaClassLoader.fromURLs(List(outURL), params.scalaCheckParentClassLoader).run("Test", Nil)
- }
-
- NestUI.verbose(file2String(logFile))
- // obviously this must be improved upon
- val lines = SFile(logFile).lines map (_.trim) filterNot (_ == "") toBuffer;
- if (lines forall (x => !x.startsWith("!"))) {
- NestUI.verbose("test for '" + file + "' success: " + succeeded)
- true
- }
- else {
- NestUI.normal("ScalaCheck test failed. Output:\n")
- lines foreach (x => NestUI.normal(x + "\n"))
- false
- }
- }
- runTestCommon(file, expectFailure = false)(
- succFn,
- (logFile, outDir) => outputLogFile(logFile)
- )
-
- case "pos" =>
- runTestCommon(file, expectFailure = false)(
- (logFile, outDir) => true,
- (_, _) => ()
- )
-
- case "neg" =>
- runTestCommon(file, expectFailure = true)((logFile, outDir) => {
- // compare log file to check file
- val dir = file.getParentFile
-
- // diff is contents of logFile
- diffCheck(compareOutput(dir, logFile))
- })
-
- case "run" | "jvm" =>
- runJvmTest(file)
-
- case "specialized" =>
- runSpecializedTest(file)
-
- case "presentation" =>
- runJvmTest(file) // for the moment, it's exactly the same as for a run test
-
- case "buildmanager" =>
- val logFile = createLogFile(file)
- if (!fileManager.failed || logFile.canRead) {
- val (swr, wr) = initNextTest()
- printInfoStart(file, wr)
- val (outDir, testFile, changesDir) = (
- if (!file.isDirectory)
- (null, null, null)
- else {
- NestUI.verbose(this+" running test "+fileBase)
- val outDir = createOutputDir(file)
- val testFile = new File(file, fileBase + ".test")
- val changesDir = new File(file, fileBase + ".changes")
-
- if (changesDir.isFile || !testFile.isFile) {
- // if changes exists then it has to be a dir
- if (!testFile.isFile) NestUI.verbose("invalid build manager test file")
- if (changesDir.isFile) NestUI.verbose("invalid build manager changes directory")
- (null, null, null)
- }
- else {
- copyTestFiles(file, outDir)
- NestUI.verbose("outDir: "+outDir)
- NestUI.verbose("logFile: "+logFile)
- (outDir, testFile, changesDir)
- }
- }
- )
-
- if (outDir != null) {
- // Pre-conditions satisfied
- try {
- val sourcepath = outDir.getAbsolutePath+File.separator
-
- // configure input/output files
- val logWriter = new PrintStream(new FileOutputStream(logFile), true)
- val testReader = new BufferedReader(new FileReader(testFile))
- val logConsoleWriter = new PrintWriter(logWriter, true)
-
- // create proper settings for the compiler
- val settings = new Settings(workerError)
- settings.outdir.value = outDir.getAbsoluteFile.getAbsolutePath
- settings.sourcepath.value = sourcepath
- settings.classpath.value = fileManager.CLASSPATH
- settings.Ybuildmanagerdebug.value = true
-
- // simulate Build Manager loop
- val prompt = "builder > "
- reporter = new ConsoleReporter(settings, scala.Console.in, logConsoleWriter)
- val bM: BuildManager =
- new RefinedBuildManager(settings) {
- override protected def newCompiler(settings: Settings) =
- new BuilderGlobal(settings, reporter)
- }
-
- def testCompile(line: String): Boolean = {
- NestUI.verbose("compiling " + line)
- val args = (line split ' ').toList
- val command = new CompilerCommand(args, settings)
- command.ok && {
- bM.update(filesToSet(settings.sourcepath.value, command.files), Set.empty)
- !reporter.hasErrors
- }
- }
-
- val updateFiles = (line: String) => {
- NestUI.verbose("updating " + line)
- val res =
- ((line split ' ').toList).forall(u => {
- (u split "=>").toList match {
- case origFileName::(newFileName::Nil) =>
- val newFile = new File(changesDir, newFileName)
- if (newFile.isFile) {
- val v = overwriteFileWith(new File(outDir, origFileName), newFile)
- if (!v)
- NestUI.verbose("'update' operation on " + u + " failed")
- v
- } else {
- NestUI.verbose("File " + newFile + " is invalid")
- false
- }
- case a =>
- NestUI.verbose("Other =: " + a)
- false
- }
- })
- NestUI.verbose("updating " + (if (res) "succeeded" else "failed"))
- res
- }
-
- def loop(): Boolean = {
- testReader.readLine() match {
- case null | "" =>
- NestUI.verbose("finished")
- true
- case s if s startsWith ">>update " =>
- updateFiles(s stripPrefix ">>update ") && loop()
- case s if s startsWith ">>compile " =>
- val files = s stripPrefix ">>compile "
- logWriter.println(prompt + files)
- // In the end, it can finish with an error
- if (testCompile(files)) loop()
- else {
- val t = testReader.readLine()
- (t == null) || (t == "")
- }
- case s =>
- NestUI.verbose("wrong command in test file: " + s)
- false
- }
- }
-
- Output.withRedirected(logWriter) {
- try loop()
- finally testReader.close()
- }
- fileManager.mapFile(logFile, replaceSlashes(new File(sourcepath), _))
- diffCheck(compareOutput(file, logFile))
- }
- LogContext(logFile, swr, wr)
- } else
- LogContext(logFile)
- } else
- LogContext(logFile)
-
- case "res" => {
- // simulate resident compiler loop
- val prompt = "\nnsc> "
-
- // when option "--failed" is provided
- // execute test only if log file is present
- // (which means it failed before)
- val logFile = createLogFile(file)
- if (!fileManager.failed || logFile.canRead) {
- val (swr, wr) = initNextTest()
- printInfoStart(file, wr)
-
- NestUI.verbose(this+" running test "+fileBase)
- val dir = file.getParentFile
- val outDir = createOutputDir(dir)
- val resFile = new File(dir, fileBase + ".res")
- NestUI.verbose("outDir: "+outDir)
- NestUI.verbose("logFile: "+logFile)
- //NestUI.verbose("logFileErr: "+logFileErr)
- NestUI.verbose("resFile: "+resFile)
-
- // run compiler in resident mode
- // $SCALAC -d "$os_dstbase".obj -Xresident -sourcepath . "$@"
- val sourcedir = logFile.getParentFile.getAbsoluteFile
- val sourcepath = sourcedir.getAbsolutePath+File.separator
- NestUI.verbose("sourcepath: "+sourcepath)
-
- val argString =
- "-d "+outDir.getAbsoluteFile.getPath+
- " -Xresident"+
- " -sourcepath "+sourcepath
- val argList = argString split ' ' toList
-
- // configure input/output files
- val logOut = new FileOutputStream(logFile)
- val logWriter = new PrintStream(logOut, true)
- val resReader = new BufferedReader(new FileReader(resFile))
- val logConsoleWriter = new PrintWriter(new OutputStreamWriter(logOut), true)
-
- // create compiler
- val settings = new Settings(workerError)
- settings.sourcepath.value = sourcepath
- settings.classpath.value = fileManager.CLASSPATH
- reporter = new ConsoleReporter(settings, scala.Console.in, logConsoleWriter)
- val command = new CompilerCommand(argList, settings)
- object compiler extends Global(command.settings, reporter)
-
- val resCompile = (line: String) => {
- NestUI.verbose("compiling "+line)
- val cmdArgs = (line split ' ').toList map (fs => new File(dir, fs).getAbsolutePath)
- NestUI.verbose("cmdArgs: "+cmdArgs)
- val sett = new Settings(workerError)
- sett.sourcepath.value = sourcepath
- val command = new CompilerCommand(cmdArgs, sett)
- command.ok && {
- (new compiler.Run) compile command.files
- !reporter.hasErrors
- }
- }
-
- def loop(action: String => Boolean): Boolean = {
- logWriter.print(prompt)
- resReader.readLine() match {
- case null | "" => logWriter.flush() ; true
- case line => action(line) && loop(action)
- }
- }
-
- Output.withRedirected(logWriter) {
- try loop(resCompile)
- finally resReader.close()
- }
- fileManager.mapFile(logFile, replaceSlashes(dir, _))
- diffCheck(compareOutput(dir, logFile))
- LogContext(logFile, swr, wr)
- } else
- LogContext(logFile)
- }
-
- case "shootout" => {
- // when option "--failed" is provided
- // execute test only if log file is present
- // (which means it failed before)
- val logFile = createLogFile(file)
- if (!fileManager.failed || logFile.canRead) {
- val (swr, wr) = initNextTest()
- printInfoStart(file, wr)
-
- NestUI.verbose(this+" running test "+fileBase)
- val dir = file.getParentFile
- val outDir = createOutputDir(dir)
-
- // 2. define file {outDir}/test.scala that contains code to compile/run
- val testFile = new File(outDir, "test.scala")
- NestUI.verbose("outDir: "+outDir)
- NestUI.verbose("logFile: "+logFile)
- NestUI.verbose("testFile: "+testFile)
-
- // 3. cat {test}.scala.runner {test}.scala > testFile
- val runnerFile = new File(dir, fileBase+".scala.runner")
- val bodyFile = new File(dir, fileBase+".scala")
- SFile(testFile).writeAll(
- file2String(runnerFile),
- file2String(bodyFile)
- )
-
- // 4. compile testFile
- val ok = compileMgr.shouldCompile(List(testFile), kind, logFile)
- NestUI.verbose("compilation of " + testFile + (if (ok) "succeeded" else "failed"))
- if (ok) {
- execTest(outDir, logFile) && {
- NestUI.verbose(this+" finished running "+fileBase)
- diffCheck(compareOutput(dir, logFile))
- }
- }
-
- LogContext(logFile, swr, wr)
- }
- else
- LogContext(logFile)
- }
-
- case "scalap" =>
- runInContext(file, (logFile: File, outDir: File) => {
- val sourceDir = Directory(if (file.isFile) file.getParent else file)
- val sources = sourceDir.files filter (_ hasExtension "scala") map (_.jfile) toList
- val results = sourceDir.files filter (_.name == "result.test") map (_.jfile) toList
-
- if (sources.length != 1 || results.length != 1) {
- NestUI.warning("Misconfigured scalap test directory: " + sourceDir + " \n")
- false
- }
- else {
- val resFile = results.head
- // 2. Compile source file
- if (!compileMgr.shouldCompile(outDir, sources, kind, logFile)) {
- NestUI.normal("compilerMgr failed to compile %s to %s".format(sources mkString ", ", outDir))
- false
- }
- else {
- // 3. Decompile file and compare results
- val isPackageObject = sourceDir.name startsWith "package"
- val className = sourceDir.name.capitalize + (if (!isPackageObject) "" else ".package")
- val url = outDir.toURI.toURL
- val loader = ScalaClassLoader.fromURLs(List(url), this.getClass.getClassLoader)
- val clazz = loader.loadClass(className)
-
- val byteCode = ByteCode.forClass(clazz)
- val result = scala.tools.scalap.Main.decompileScala(byteCode.bytes, isPackageObject)
-
- SFile(logFile) writeAll result
- diffCheck(compareFiles(logFile, resFile))
- }
- }
- })
-
- case "script" => {
- // when option "--failed" is provided
- // execute test only if log file is present
- // (which means it failed before)
- val logFile = createLogFile(file)
- if (!fileManager.failed || logFile.canRead) {
- val (swr, wr) = initNextTest()
- printInfoStart(file, wr)
-
- NestUI.verbose(this+" running test "+fileBase)
-
- // check whether there is an args file
- val argsFile = new File(file.getParentFile, fileBase+".args")
- NestUI.verbose("argsFile: "+argsFile)
- val argString = file2String(argsFile)
-
- try {
- val cmdString =
- if (isWin) {
- val batchFile = new File(file.getParentFile, fileBase+".bat")
- NestUI.verbose("batchFile: "+batchFile)
- batchFile.getAbsolutePath
- }
- else file.getAbsolutePath
-
- succeeded = ((cmdString+argString) #> logFile !) == 0
- diffCheck(compareOutput(file.getParentFile, logFile))
- }
- catch { // *catch-all*
- case e: Exception =>
- NestUI.verbose("caught "+e)
- succeeded = false
- }
-
- LogContext(logFile, swr, wr)
- } else
- LogContext(logFile)
- }
- }
-
- def reportAll(results: Map[String, Int], cont: Map[String, Int] => Unit) {
- timer.cancel()
- cont(results)
- }
-
- object TestState {
- val Ok = 0
- val Fail = 1
- val Timeout = 2
- }
-
- def reportResult(state: Int, logFile: File, writers: Option[(StringWriter, PrintWriter)]) {
- val isGood = state == TestState.Ok
- val isFail = state == TestState.Fail
- val isTimeout = state == TestState.Timeout
- val hasLog = logFile != null
-
- if (isGood) {
- // add logfile from deletion list if test passed
- if (hasLog)
- toDelete += logFile
- }
- else {
- errors += 1
- NestUI.verbose("incremented errors: "+errors)
- }
-
- writers foreach { case (swr, wr) =>
- if (isTimeout) printInfoTimeout(wr)
- else printInfoEnd(isGood, wr)
- wr.flush()
- swr.flush()
- NestUI.normal(swr.toString)
- if (isFail) {
- if ((fileManager.showDiff || isPartestDebug) && diff != "")
- NestUI.normal(diff)
- else if (fileManager.showLog)
- showLog(logFile)
- }
- }
- cleanup()
- }
-
- def finish() = {
- done = true
- cancelTimerTask()
- reportAll(status.toMap, topcont)
- }
-
- Actor.loopWhile(!done) {
- val parent = self
-
- actor {
- val testFile = getNextFile()
-
- if (testFile == null)
- finish()
- else {
- updateTimerTask(parent ! Timeout(testFile))
-
- val context =
- try processSingleFile(testFile)
- catch {
- case t =>
- succeeded = false
- try {
- val logFile = createLogFile(testFile)
- logStackTrace(logFile, t, "Possible compiler crash during test of: " + testFile + "\n")
- LogContext(logFile)
- }
- catch {
- case t => LogContext(null)
- }
- }
- parent ! Result(testFile, context)
- }
- }
-
- react {
- case Timeout(file) =>
- updateStatus(file.getAbsolutePath, TestState.Timeout)
- val swr = new StringWriter
- val wr = new PrintWriter(swr, true)
- printInfoStart(file, wr)
- reportResult(
- TestState.Timeout,
- null,
- Some((swr, wr))
- )
-
- case Result(file, logs) =>
- val state = if (succeeded) TestState.Ok else TestState.Fail
- updateStatus(file.getAbsolutePath, state)
- reportResult(
- state,
- logs.file,
- logs.writers
- )
- }
- }
- }
-
- private def filesToSet(pre: String, fs: List[String]): Set[AbstractFile] =
- fs flatMap (s => Option(AbstractFile getFile (pre + s))) toSet
-
- private def copyTestFiles(testDir: File, destDir: File) {
- val invalidExts = List("changes", "svn", "obj")
- testDir.listFiles.toList filter (
- f => (isJavaOrScala(f) && f.isFile) ||
- (f.isDirectory && !(invalidExts.contains(SFile(f).extension)))) foreach
- { f => fileManager.copyFile(f, destDir) }
- }
-
- def showLog(logFile: File) {
- file2String(logFile) match {
- case "" if logFile.canRead => ()
- case "" => NestUI.failure("Couldn't open log file: " + logFile + "\n")
- case s => NestUI.normal(s)
- }
- }
-}
diff --git a/src/partest/scala/tools/partest/package.scala b/src/partest/scala/tools/partest/package.scala
index fe33064..d38ce69 100644
--- a/src/partest/scala/tools/partest/package.scala
+++ b/src/partest/scala/tools/partest/package.scala
@@ -1,14 +1,28 @@
/* NEST (New Scala Test)
- * Copyright 2007-2011 LAMP/EPFL
+ * Copyright 2007-2013 LAMP/EPFL
*/
package scala.tools
import java.io.{ FileNotFoundException, File => JFile }
import nsc.io.{ Path, Directory, File => SFile }
-import util.{ PathResolver }
+import scala.tools.util.PathResolver
import nsc.Properties.{ propOrElse, propOrNone, propOrEmpty }
import scala.sys.process.javaVmArguments
+import java.util.concurrent.Callable
+
+package partest {
+ class TestState {
+ def isOk = this eq TestState.Ok
+ def isFail = this eq TestState.Fail
+ def isTimeout = this eq TestState.Timeout
+ }
+ object TestState {
+ val Ok = new TestState
+ val Fail = new TestState
+ val Timeout = new TestState
+ }
+}
package object partest {
import nest.NestUI
@@ -16,6 +30,19 @@ package object partest {
implicit private[partest] def temporaryPath2File(x: Path): JFile = x.jfile
implicit private[partest] def temporaryFile2Path(x: JFile): Path = Path(x)
+ implicit lazy val postfixOps = scala.language.postfixOps
+ implicit lazy val implicitConversions = scala.language.implicitConversions
+
+ def timed[T](body: => T): (T, Long) = {
+ val t1 = System.currentTimeMillis
+ val result = body
+ val t2 = System.currentTimeMillis
+
+ (result, t2 - t1)
+ }
+
+ def callable[T](body: => T): Callable[T] = new Callable[T] { override def call() = body }
+
def path2String(path: String) = file2String(new JFile(path))
def file2String(f: JFile) =
try SFile(f).slurp()
@@ -23,8 +50,8 @@ package object partest {
def basename(name: String): String = Path(name).stripExtension
- def resultsToStatistics(results: Iterable[(_, Int)]): (Int, Int) = {
- val (files, failures) = results map (_._2 == 0) partition (_ == true)
+ def resultsToStatistics(results: Iterable[(_, TestState)]): (Int, Int) = {
+ val (files, failures) = results map (_._2 == TestState.Ok) partition (_ == true)
(files.size, failures.size)
}
@@ -35,8 +62,8 @@ package object partest {
)
def allPropertiesString = {
- import collection.JavaConversions._
- System.getProperties.toList.sorted map { case (k, v) => "%s -> %s\n".format(k, v) } mkString
+ import scala.collection.JavaConversions._
+ System.getProperties.toList.sorted map { case (k, v) => "%s -> %s\n".format(k, v) } mkString ""
}
def showAllJVMInfo() {
@@ -46,4 +73,54 @@ package object partest {
def isPartestDebug: Boolean =
propOrEmpty("partest.debug") == "true"
+
+
+ import scala.language.experimental.macros
+
+ /**
+ * `trace("".isEmpty)` will return `true` and as a side effect print the following to standard out.
+ * {{{
+ * trace> "".isEmpty
+ * res: Boolean = true
+ *
+ * }}}
+ *
+ * An alternative to [[scala.tools.partest.ReplTest]] that avoids the inconvenience of embedding
+ * test code in a string.
+ */
+ def trace[A](a: A) = macro traceImpl[A]
+
+ import scala.reflect.macros.Context
+ def traceImpl[A: c.WeakTypeTag](c: Context)(a: c.Expr[A]): c.Expr[A] = {
+ import c.universe._
+ import definitions._
+
+ // xeno.by: reify shouldn't be used explicitly before the final release of 2.10.0,
+ // because this impairs reflection refactorings
+ //
+ // val exprCode = c.literal(show(a.tree))
+ // val exprType = c.literal(show(a.actualType))
+ // reify {
+ // println(s"trace> ${exprCode.splice}\nres: ${exprType.splice} = ${a.splice}\n")
+ // a.splice
+ // }
+
+ c.Expr(Block(
+ List(Apply(
+ Select(Ident(PredefModule), newTermName("println")),
+ List(Apply(
+ Select(Apply(
+ Select(Ident(ScalaPackage), newTermName("StringContext")),
+ List(
+ Literal(Constant("trace> ")),
+ Literal(Constant("\\nres: ")),
+ Literal(Constant(" = ")),
+ Literal(Constant("\\n")))),
+ newTermName("s")),
+ List(
+ Literal(Constant(show(a.tree))),
+ Literal(Constant(show(a.actualType))),
+ a.tree))))),
+ a.tree))
+ }
}
diff --git a/src/partest/scala/tools/partest/utils/PrintMgr.scala b/src/partest/scala/tools/partest/utils/PrintMgr.scala
index ff61c2b..d25be87 100644
--- a/src/partest/scala/tools/partest/utils/PrintMgr.scala
+++ b/src/partest/scala/tools/partest/utils/PrintMgr.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala Parallel Testing **
-** / __/ __// _ | / / / _ | (c) 2007-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2007-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/partest/scala/tools/partest/utils/Properties.scala b/src/partest/scala/tools/partest/utils/Properties.scala
index b8a6ec8..1263c96 100644
--- a/src/partest/scala/tools/partest/utils/Properties.scala
+++ b/src/partest/scala/tools/partest/utils/Properties.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala Parallel Testing **
-** / __/ __// _ | / / / _ | (c) 2007-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2007-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -13,5 +13,5 @@ package utils
/** Loads partest.properties from the jar. */
object Properties extends scala.util.PropertiesTrait {
protected def propCategory = "partest"
- protected def pickJarBasedOn = classOf[nest.Worker]
+ protected def pickJarBasedOn = classOf[nest.RunnerManager]
}
diff --git a/src/reflect/scala/reflect/api/Annotations.scala b/src/reflect/scala/reflect/api/Annotations.scala
new file mode 100644
index 0000000..09eaf7a
--- /dev/null
+++ b/src/reflect/scala/reflect/api/Annotations.scala
@@ -0,0 +1,203 @@
+package scala.reflect
+package api
+
+import scala.collection.immutable.ListMap
+
+/**
+ * <span class="badge badge-red" style="float: right;">EXPERIMENTAL</span>
+ *
+ * This trait provides annotation support for the reflection API.
+ *
+ * The API distinguishes between two kinds of annotations:
+ *
+ * <ul>
+ * <li>''Java annotations'': annotations on definitions produced by the Java compiler, i.e., subtypes of [[java.lang.annotation.Annotation]]
+ * attached to program definitions. When read by Scala reflection, the [[scala.annotation.ClassfileAnnotation]] trait
+ * is automatically added as a subclass to every Java annotation.</li>
+ * <li>''Scala annotations'': annotations on definitions or types produced by the Scala compiler.</li>
+ * </ul>
+ *
+ * When a Scala annotation that inherits from [[scala.annotation.StaticAnnotation]] or [[scala.annotation.ClassfileAnnotation]] is compiled,
+ * it is stored as special attributes in the corresponding classfile, and not as a Java annotation. Note that subclassing
+ * just [[scala.annotation.Annotation]] is not enough to have the corresponding metadata persisted for runtime reflection.
+ *
+ * The distinction between Java and Scala annotations is manifested in the contract of [[scala.reflect.api.Annotations#Annotation]], which exposes
+ * both `scalaArgs` and `javaArgs`. For Scala or Java annotations extending [[scala.annotation.ClassfileAnnotation]] `scalaArgs` is empty
+ * and arguments are stored in `javaArgs`. For all other Scala annotations, arguments are stored in `scalaArgs` and `javaArgs` is empty.
+ *
+ * Arguments in `scalaArgs` are represented as typed trees. Note that these trees are not transformed by any phases
+ * following the type-checker. Arguments in `javaArgs` are repesented as a map from [[scala.reflect.api.Names#Name]] to
+ * [[scala.reflect.api.Annotations#JavaArgument]]. Instances of `JavaArgument` represent different kinds of Java annotation arguments:
+ * - literals (primitive and string constants),
+ * - arrays and
+ * - nested annotations.
+ *
+ * For more information about `Annotation`s, see the [[http://docs.scala-lang.org/overviews/reflection/annotations-names-scopes.html Reflection Guide: Annotations, Names, Scopes, and More]]
+ *
+ * @contentDiagram hideNodes "*Api"
+ * @group ReflectionAPI
+ */
+trait Annotations { self: Universe =>
+
+ /** Information about an annotation.
+ * @template
+ * @group Annotations
+ */
+ type Annotation >: Null <: AnyRef with AnnotationApi
+
+ /** A tag that preserves the identity of the `Annotation` abstract type from erasure.
+ * Can be used for pattern matching, instance tests, serialization and likes.
+ * @group Tags
+ */
+ implicit val AnnotationTag: ClassTag[Annotation]
+
+ /** The constructor/extractor for `Annotation` instances.
+ * @group Extractors
+ */
+ val Annotation: AnnotationExtractor
+
+ /** An extractor class to create and pattern match with syntax `Annotation(tpe, scalaArgs, javaArgs)`.
+ * Here, `tpe` is the annotation type, `scalaArgs` the payload of Scala annotations, and `javaArgs` the payload of Java annotations.
+ * @group Extractors
+ */
+ abstract class AnnotationExtractor {
+ def apply(tpe: Type, scalaArgs: List[Tree], javaArgs: ListMap[Name, JavaArgument]): Annotation
+ def unapply(ann: Annotation): Option[(Type, List[Tree], ListMap[Name, JavaArgument])]
+ }
+
+ /** The API of `Annotation` instances.
+ * The main source of information about annotations is the [[scala.reflect.api.Annotations]] page.
+ * @group API
+ */
+ trait AnnotationApi {
+ /** The type of the annotation. */
+ def tpe: Type
+
+ /** Payload of the Scala annotation: a list of abstract syntax trees that represent the argument.
+ * Empty for Java annotations.
+ */
+ def scalaArgs: List[Tree]
+
+ /** Payload of the Java annotation: a list of name-value pairs.
+ * Empty for Scala annotations.
+ */
+ def javaArgs: ListMap[Name, JavaArgument]
+ }
+
+ /** A Java annotation argument
+ * @template
+ * @group Annotations
+ */
+ type JavaArgument >: Null <: AnyRef
+
+ /** A tag that preserves the identity of the `JavaArgument` abstract type from erasure.
+ * Can be used for pattern matching, instance tests, serialization and likes.
+ * @group Tags
+ */
+ implicit val JavaArgumentTag: ClassTag[JavaArgument]
+
+ /** A literal argument to a Java annotation as `"Use X instead"` in `@Deprecated("Use X instead")`
+ * @template
+ * @group Annotations
+ */
+ type LiteralArgument >: Null <: AnyRef with JavaArgument with LiteralArgumentApi
+
+ /** A tag that preserves the identity of the `LiteralArgument` abstract type from erasure.
+ * Can be used for pattern matching, instance tests, serialization and likes.
+ * @group Tags
+ */
+ implicit val LiteralArgumentTag: ClassTag[LiteralArgument]
+
+ /** The constructor/extractor for `LiteralArgument` instances.
+ * @group Extractors
+ */
+ val LiteralArgument: LiteralArgumentExtractor
+
+ /** An extractor class to create and pattern match with syntax `LiteralArgument(value)`
+ * where `value` is the constant argument.
+ * @group Extractors
+ */
+ abstract class LiteralArgumentExtractor {
+ def apply(value: Constant): LiteralArgument
+ def unapply(arg: LiteralArgument): Option[Constant]
+ }
+
+ /** The API of `LiteralArgument` instances.
+ * The main source of information about annotations is the [[scala.reflect.api.Annotations]] page.
+ * @group API
+ */
+ trait LiteralArgumentApi {
+ /** The underlying compile-time constant value. */
+ def value: Constant
+ }
+
+ /** An array argument to a Java annotation as in `@Target(value={TYPE,FIELD,METHOD,PARAMETER})`
+ * @template
+ * @group Annotations
+ */
+ type ArrayArgument >: Null <: AnyRef with JavaArgument with ArrayArgumentApi
+
+ /** A tag that preserves the identity of the `ArrayArgument` abstract type from erasure.
+ * Can be used for pattern matching, instance tests, serialization and likes.
+ * @group Tags
+ */
+ implicit val ArrayArgumentTag: ClassTag[ArrayArgument]
+
+ /** The constructor/extractor for `ArrayArgument` instances.
+ * @group Extractors
+ */
+ val ArrayArgument: ArrayArgumentExtractor
+
+ /** An extractor class to create and pattern match with syntax `ArrayArgument(args)`
+ * where `args` is the argument array.
+ * @group Extractors
+ */
+ abstract class ArrayArgumentExtractor {
+ def apply(args: Array[JavaArgument]): ArrayArgument
+ def unapply(arg: ArrayArgument): Option[Array[JavaArgument]]
+ }
+
+ /** API of `ArrayArgument` instances.
+ * The main source of information about annotations is the [[scala.reflect.api.Annotations]] page.
+ * @group API
+ */
+ trait ArrayArgumentApi {
+ /** The underlying array of Java annotation arguments. */
+ def args: Array[JavaArgument]
+ }
+
+ /** A nested annotation argument to a Java annotation as `@Nested` in `@Outer(@Nested)`.
+ * @template
+ * @group Annotations
+ */
+ type NestedArgument >: Null <: AnyRef with JavaArgument with NestedArgumentApi
+
+ /** A tag that preserves the identity of the `NestedArgument` abstract type from erasure.
+ * Can be used for pattern matching, instance tests, serialization and likes.
+ * @group Tags
+ */
+ implicit val NestedArgumentTag: ClassTag[NestedArgument]
+
+ /** The constructor/extractor for `NestedArgument` instances.
+ * @group Extractors
+ */
+ val NestedArgument: NestedArgumentExtractor
+
+ /** An extractor class to create and pattern match with syntax `NestedArgument(annotation)`
+ * where `annotation` is the nested annotation.
+ * @group Extractors
+ */
+ abstract class NestedArgumentExtractor {
+ def apply(annotation: Annotation): NestedArgument
+ def unapply(arg: NestedArgument): Option[Annotation]
+ }
+
+ /** API of `NestedArgument` instances.
+ * The main source of information about annotations is the [[scala.reflect.api.Annotations]] page.
+ * @group API
+ */
+ trait NestedArgumentApi {
+ /** The underlying nested annotation. */
+ def annotation: Annotation
+ }
+}
\ No newline at end of file
diff --git a/src/reflect/scala/reflect/api/BuildUtils.scala b/src/reflect/scala/reflect/api/BuildUtils.scala
new file mode 100644
index 0000000..0c8e81a
--- /dev/null
+++ b/src/reflect/scala/reflect/api/BuildUtils.scala
@@ -0,0 +1,78 @@
+package scala.reflect
+package api
+
+/**
+ * This is an internal implementation class.
+ * @groupname TreeBuilders Tree Building
+ */
+private[reflect] trait BuildUtils { self: Universe =>
+
+ /** @group TreeBuilders */
+ val build: BuildApi
+
+ // this API abstracts away the functionality necessary for reification
+ // it's too gimmicky and unstructured to be exposed directly in the universe
+ // but we need it in a publicly available place for reification to work
+
+ /** @group TreeBuilders */
+ abstract class BuildApi {
+ /** Selects type symbol with given simple name `name` from the defined members of `owner`.
+ */
+ def selectType(owner: Symbol, name: String): TypeSymbol
+
+ /** Selects term symbol with given name and type from the defined members of prefix type
+ */
+ def selectTerm(owner: Symbol, name: String): TermSymbol
+
+ /** Selects overloaded method symbol with given name and index
+ */
+ def selectOverloadedMethod(owner: Symbol, name: String, index: Int): MethodSymbol
+
+ /** A fresh symbol with given name `name`, position `pos` and flags `flags` that has
+ * the current symbol as its owner.
+ */
+ def newNestedSymbol(owner: Symbol, name: Name, pos: Position, flags: FlagSet, isClass: Boolean): Symbol
+
+ /** Create a fresh free term symbol.
+ * @param name the name of the free variable
+ * @param value the value of the free variable at runtime
+ * @param flags (optional) flags of the free variable
+ * @param origin debug information that tells where this symbol comes from
+ */
+ def newFreeTerm(name: String, value: => Any, flags: FlagSet = NoFlags, origin: String = null): FreeTermSymbol
+
+ /** Create a fresh free type symbol.
+ * @param name the name of the free variable
+ * @param flags (optional) flags of the free variable
+ * @param origin debug information that tells where this symbol comes from
+ */
+ def newFreeType(name: String, flags: FlagSet = NoFlags, origin: String = null): FreeTypeSymbol
+
+ /** Set symbol's type signature to given type.
+ * @return the symbol itself
+ */
+ def setTypeSignature[S <: Symbol](sym: S, tpe: Type): S
+
+ /** Set symbol's annotations to given annotations `annots`.
+ */
+ def setAnnotations[S <: Symbol](sym: S, annots: List[Annotation]): S
+
+ def flagsFromBits(bits: Long): FlagSet
+
+ def emptyValDef: ValDef
+
+ def This(sym: Symbol): Tree
+
+ def Select(qualifier: Tree, sym: Symbol): Select
+
+ def Ident(sym: Symbol): Ident
+
+ def TypeTree(tp: Type): TypeTree
+
+ def thisPrefix(sym: Symbol): Type
+
+ def setType[T <: Tree](tree: T, tpe: Type): T
+
+ def setSymbol[T <: Tree](tree: T, sym: Symbol): T
+ }
+}
diff --git a/src/reflect/scala/reflect/api/Constants.scala b/src/reflect/scala/reflect/api/Constants.scala
new file mode 100644
index 0000000..f3d75c3
--- /dev/null
+++ b/src/reflect/scala/reflect/api/Constants.scala
@@ -0,0 +1,223 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2013 LAMP/EPFL
+ * @author Martin Odersky
+ */
+
+package scala.reflect
+package api
+
+/**
+ * <span class="badge badge-red" style="float: right;">EXPERIMENTAL</span>
+ *
+ * According to the section 6.24 "Constant Expressions" of the Scala language specification,
+ * certain expressions (dubbed ''constant expressions'') can be evaluated by the Scala compiler at compile-time.
+ *
+ * [[scala.reflect.api.Constants#Constant]] instances represent certain kinds of these expressions
+ * (with values stored in the `value` field and its strongly-typed views named `booleanValue`, `intValue` etc.), namely:
+ * 1. Literals of primitive value classes (bytes, shorts, ints, longs, floats, doubles, chars, booleans and voids).
+ * 1. String literals.
+ * 1. References to classes (typically constructed with [[scala.Predef#classOf]]).
+ * 1. References to enumeration values.
+ *
+ * Such constants are used to represent literals in abstract syntax trees (the [[scala.reflect.api.Trees#Literal]] node)
+ * and literal arguments for Java class file annotations (the [[scala.reflect.api.Annotations#LiteralArgument]] class).
+ *
+ * === Example ===
+ *
+ * The `value` field deserves some explanation. Primitive and string values are represented as themselves, whereas
+ * references to classes and enums are a bit roundabout.
+ *
+ * Class references are represented as instances of [[scala.reflect.api.Types#Type]]
+ * (because when the Scala compiler processes a class reference, the underlying runtime class might not yet have been compiled).
+ * To convert such a reference to a runtime class, one should use the `runtimeClass` method of a mirror such as [[scala.reflect.api.Mirrors#RuntimeMirror]]
+ * (the simplest way to get such a mirror is using [[scala.reflect.runtime.package#currentMirror]]).
+ *
+ * Enumeration value references are represented as instances of [[scala.reflect.api.Symbols#Symbol]], which on JVM point to methods
+ * that return underlying enum values. To inspect an underlying enumeration or to get runtime value of a reference to an enum,
+ * one should use a [[scala.reflect.api.Mirrors#RuntimeMirror]] (the simplest way to get such a mirror is again [[scala.reflect.runtime.package#currentMirror]]).
+
+ * {{{
+ * enum JavaSimpleEnumeration { FOO, BAR }
+ *
+ * import java.lang.annotation.*;
+ * @Retention(RetentionPolicy.RUNTIME)
+ * @Target({ElementType.TYPE})
+ * public @interface JavaSimpleAnnotation {
+ * Class<?> classRef();
+ * JavaSimpleEnumeration enumRef();
+ * }
+ *
+ * @JavaSimpleAnnotation(
+ * classRef = JavaAnnottee.class,
+ * enumRef = JavaSimpleEnumeration.BAR
+ * )
+ * public class JavaAnnottee {}
+ * }}}
+ * {{{
+ * import scala.reflect.runtime.universe._
+ * import scala.reflect.runtime.{currentMirror => cm}
+ *
+ * object Test extends App {
+ * val jann = typeOf[JavaAnnottee].typeSymbol.annotations(0).javaArgs
+ * def jarg(name: String) = jann(newTermName(name)).asInstanceOf[LiteralArgument].value
+ *
+ * val classRef = jarg("classRef").typeValue
+ * println(showRaw(classRef)) // TypeRef(ThisType(<empty>), JavaAnnottee, List())
+ * println(cm.runtimeClass(classRef)) // class JavaAnnottee
+ *
+ * val enumRef = jarg("enumRef").symbolValue
+ * println(enumRef) // value BAR
+ *
+ * val siblings = enumRef.owner.typeSignature.declarations
+ * val enumValues = siblings.filter(sym => sym.isVal && sym.isPublic)
+ * println(enumValues) // Scope{
+ * // final val FOO: JavaSimpleEnumeration;
+ * // final val BAR: JavaSimpleEnumeration
+ * // }
+ *
+ * // doesn't work because of https://issues.scala-lang.org/browse/SI-6459
+ * // val enumValue = mirror.reflectField(enumRef.asTerm).get
+ * val enumClass = cm.runtimeClass(enumRef.owner.asClass)
+ * val enumValue = enumClass.getDeclaredField(enumRef.name.toString).get(null)
+ * println(enumValue) // BAR
+ * }
+ * }}}
+ *
+ * @contentDiagram hideNodes "*Api"
+ * @group ReflectionAPI
+ */
+trait Constants {
+ self: Universe =>
+
+ /**
+ * This "virtual" case class represents the reflection interface for literal expressions which can not be further
+ * broken down or evaluated, such as "true", "0", "classOf[List]". Such values become parts of the Scala abstract
+ * syntax tree representing the program. The constants
+ * correspond to section 6.24 "Constant Expressions" of the
+ * [[http://www.scala-lang.org/docu/files/ScalaReference.pdf Scala language specification]].
+ *
+ * Such constants are used to represent literals in abstract syntax trees (the [[scala.reflect.api.Trees#Literal]] node)
+ * and literal arguments for Java class file annotations (the [[scala.reflect.api.Annotations#LiteralArgument]] class).
+ *
+ * Constants can be matched against and can be constructed directly, as if they were case classes:
+ * {{{
+ * assert(Constant(true).value == true)
+ * Constant(true) match {
+ * case Constant(s: String) => println("A string: " + s)
+ * case Constant(b: Boolean) => println("A boolean value: " + b)
+ * case Constant(x) => println("Something else: " + x)
+ * }
+ * }}}
+ *
+ * `Constant` instances can wrap certain kinds of these expressions:
+ * 1. Literals of primitive value classes ([[scala.Byte `Byte`]], [[scala.Short `Short`]], [[scala.Int `Int`]], [[scala.Long `Long`]], [[scala.Float `Float`]], [[scala.Double `Double`]], [[scala.Char `Char`]], [[scala.Boolean `Boolean`]] and [[scala.Unit `Unit`]]) - represented directly as the corresponding type
+ * 1. String literals - represented as instances of the `String`.
+ * 1. References to classes, typically constructed with [[scala.Predef#classOf]] - represented as [[scala.reflect.api.Types#Type types]].
+ * 1. References to enumeration values - represented as [[scala.reflect.api.Symbols#Symbol symbols]].
+ *
+ * Class references are represented as instances of [[scala.reflect.api.Types#Type]]
+ * (because when the Scala compiler processes a class reference, the underlying runtime class might not yet have
+ * been compiled). To convert such a reference to a runtime class, one should use the [[scala.reflect.api.Mirrors#RuntimeMirror#runtimeClass `runtimeClass`]] method of a
+ * mirror such as [[scala.reflect.api.Mirrors#RuntimeMirror `RuntimeMirror`]] (the simplest way to get such a mirror is using
+ * [[scala.reflect.runtime#currentMirror `scala.reflect.runtime.currentMirror`]]).
+ *
+ * Enumeration value references are represented as instances of [[scala.reflect.api.Symbols#Symbol]], which on JVM point to methods
+ * that return underlying enum values. To inspect an underlying enumeration or to get runtime value of a reference to an enum,
+ * one should use a [[scala.reflect.api.Mirrors#RuntimeMirror]] (the simplest way to get such a mirror is again [[scala.reflect.runtime.package#currentMirror]]).
+ *
+ * Usage example:
+ * {{{
+ * enum JavaSimpleEnumeration { FOO, BAR }
+ *
+ * import java.lang.annotation.*;
+ * @Retention(RetentionPolicy.RUNTIME)
+ * @Target({ElementType.TYPE})
+ * public @interface JavaSimpleAnnotation {
+ * Class<?> classRef();
+ * JavaSimpleEnumeration enumRef();
+ * }
+ *
+ * @JavaSimpleAnnotation(
+ * classRef = JavaAnnottee.class,
+ * enumRef = JavaSimpleEnumeration.BAR
+ * )
+ * public class JavaAnnottee {}
+ * }}}
+ * {{{
+ * import scala.reflect.runtime.universe._
+ * import scala.reflect.runtime.{currentMirror => cm}
+ *
+ * object Test extends App {
+ * val jann = typeOf[JavaAnnottee].typeSymbol.annotations(0).javaArgs
+ * def jarg(name: String) = jann(newTermName(name)) match {
+ * // Constant is always wrapped into a Literal or LiteralArgument tree node
+ * case LiteralArgument(ct: Constant) => value
+ * case _ => sys.error("Not a constant")
+ * }
+ *
+ * val classRef = jarg("classRef").value.asInstanceOf[Type]
+ * // ideally one should match instead of casting
+ * println(showRaw(classRef)) // TypeRef(ThisType(<empty>), JavaAnnottee, List())
+ * println(cm.runtimeClass(classRef)) // class JavaAnnottee
+ *
+ * val enumRef = jarg("enumRef").value.asInstanceOf[Symbol]
+ * // ideally one should match instead of casting
+ * println(enumRef) // value BAR
+ *
+ * val siblings = enumRef.owner.typeSignature.declarations
+ * val enumValues = siblings.filter(sym => sym.isVal && sym.isPublic)
+ * println(enumValues) // Scope{
+ * // final val FOO: JavaSimpleEnumeration;
+ * // final val BAR: JavaSimpleEnumeration
+ * // }
+ *
+ * // doesn't work because of https://issues.scala-lang.org/browse/SI-6459
+ * // val enumValue = mirror.reflectField(enumRef.asTerm).get
+ * val enumClass = cm.runtimeClass(enumRef.owner.asClass)
+ * val enumValue = enumClass.getDeclaredField(enumRef.name.toString).get(null)
+ * println(enumValue) // BAR
+ * }
+ * }}}
+ * @template
+ * @group Constants
+ */
+ type Constant >: Null <: AnyRef with ConstantApi
+
+ /** A tag that preserves the identity of the `Constant` abstract type from erasure.
+ * Can be used for pattern matching, instance tests, serialization and likes.
+ * @group Tags
+ */
+ implicit val ConstantTag: ClassTag[Constant]
+
+ /** The constructor/extractor for `Constant` instances.
+ * @group Extractors
+ */
+ val Constant: ConstantExtractor
+
+ /** An extractor class to create and pattern match with syntax `Constant(value)`
+ * where `value` is the Scala value of the constant.
+ * @group Extractors
+ */
+ abstract class ConstantExtractor {
+ /** A factory method that produces [[Constant `Constant`]] instances.
+ *
+ * Notice that not any value can be passed to a constant: it must be either a primitive, a `String`, a
+ * [[scala.reflect.api.Types#Type type]] or a [[scala.reflect.api.Symbols#Symbol symbol]].
+ * See [[Constant the `Constant` class]] for more information.
+ */
+ def apply(value: Any): Constant
+ /** An extractor that enables writing pattern matches against the [[Constant `Constant`]] class. */
+ def unapply(arg: Constant): Option[Any]
+ }
+
+ /** The API of [[Constant]] instances.
+ * @group API
+ */
+ abstract class ConstantApi {
+ /** Payload of the constant, that can be accessed directly or pattern matched against. */
+ val value: Any
+
+ /** Scala type that describes the constant. It is generated automatically based on the type of the value. */
+ def tpe: Type
+ }
+}
diff --git a/src/reflect/scala/reflect/api/Exprs.scala b/src/reflect/scala/reflect/api/Exprs.scala
new file mode 100644
index 0000000..2ba18a8
--- /dev/null
+++ b/src/reflect/scala/reflect/api/Exprs.scala
@@ -0,0 +1,177 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2013 LAMP/EPFL
+ * @author Martin Odersky
+ */
+
+package scala.reflect
+package api
+
+import scala.reflect.runtime.{universe => ru}
+
+/**
+ * <span class="badge badge-red" style="float: right;">EXPERIMENTAL</span>
+ *
+ * A trait that defines strongly-typed tree wrappers and operations on them for use in Scala Reflection.
+ *
+ * `Expr` wraps an abstract syntax tree ([[scala.reflect.api.Trees#Tree]]) and tags it with its type ([[scala.reflect.api.Types#Type]]).
+ *
+ * Usually `Expr`s are created via [[scala.reflect.api.Universe#reify]], in which case a compiler
+ * produces a [[scala.reflect.api.TreeCreator]] for the provided expression and also
+ * creates a complementary [[scala.reflect.api.TypeTags#WeakTypeTag]] that corresponds to the type of that expression.
+ *
+ * `Expr`s can also be created manually via the `Expr` companion object, but then the burden of providing a `TreeCreator` lies on the programmer.
+ * Compile-time reflection via macros, as described in [[scala.reflect.macros.Aliases]], provides an easier way to instantiate exprs manually.
+ * Manual creation, however, is very rarely needed when working with runtime reflection.
+ *
+ * `Expr` can be migrated from one mirror to another by using the `in` method. Migration means that all symbolic references
+ * to classes/objects/packages in the expression are re-resolved within the new mirror
+ * (typically using that mirror's classloader). The default universe of an `Expr` is typically
+ * [[scala.reflect.runtime#universe]], the default mirror is typically [[scala.reflect.runtime#currentMirror]].
+ *
+ * @group ReflectionAPI
+ */
+trait Exprs { self: Universe =>
+
+ /** Expr wraps an abstract syntax tree and tags it with its type.
+ * The main source of information about exprs is the [[scala.reflect.api.Exprs]] page.
+ * @group Expressions
+ */
+ trait Expr[+T] extends Equals with Serializable {
+ /**
+ * Underlying mirror of this expr.
+ */
+ val mirror: Mirror
+
+ /**
+ * Migrates the expression into another mirror, jumping into a different universe if necessary.
+ */
+ def in[U <: Universe with Singleton](otherMirror: scala.reflect.api.Mirror[U]): U # Expr[T]
+
+ /**
+ * The Scala abstract syntax tree representing the wrapped expression.
+ */
+ def tree: Tree
+
+ /**
+ * Type of the wrapped expression tree as provided during creation.
+ *
+ * When exprs are created by the compiler, `staticType` represents
+ * a statically known type of the tree as calculated at that point by the compiler.
+ */
+ def staticType: Type
+
+ /**
+ * Type of the wrapped expression tree as found in the underlying tree.
+ */
+ def actualType: Type
+
+ /**
+ * A dummy method to mark expression splicing in reification.
+ *
+ * It should only be used within a `reify` call, which eliminates the `splice` call and embeds
+ * the wrapped tree into the reified surrounding expression.
+ * If used alone `splice` throws an exception when called at runtime.
+ *
+ * If you want to use an Expr in reification of some Scala code, you need to splice it in.
+ * For an expr of type `Expr[T]`, where `T` has a method `foo`, the following code
+ * {{{
+ * reify{ expr.splice.foo }
+ * }}}
+ * uses splice to turn an expr of type Expr[T] into a value of type T in the context of `reify`.
+ *
+ * It is equivalent to
+ * {{{
+ * Select( expr.tree, newTermName("foo") )
+ * }}}
+ *
+ * The following example code however does not compile
+ * {{{
+ * reify{ expr.foo }
+ * }}}
+ * because expr of type Expr[T] itself does not have a method foo.
+ */
+ // @compileTimeOnly("Cannot use splice outside reify")
+ def splice: T
+
+ /**
+ * A dummy value to denote cross-stage path-dependent type dependencies.
+ *
+ * For example for the following macro definition:
+ * {{{
+ * class X { type T }
+ * object Macros { def foo(x: X): x.T = macro Impls.foo_impl }
+ * }}}
+ *
+ * The corresponding macro implementation should have the following signature (note how the return type denotes path-dependency on x):
+ * {{{
+ * object Impls { def foo_impl(c: Context)(x: c.Expr[X]): c.Expr[x.value.T] = ... }
+ * }}}
+ */
+ // @compileTimeOnly("Cannot use value except for signatures of macro implementations")
+ val value: T
+
+ override def canEqual(x: Any) = x.isInstanceOf[Expr[_]]
+
+ override def equals(x: Any) = x.isInstanceOf[Expr[_]] && this.mirror == x.asInstanceOf[Expr[_]].mirror && this.tree == x.asInstanceOf[Expr[_]].tree
+
+ override def hashCode = mirror.hashCode * 31 + tree.hashCode
+
+ override def toString = "Expr["+staticType+"]("+tree+")"
+ }
+
+ /**
+ * Constructor/Extractor for Expr.
+ *
+ * Can be useful, when having a tree and wanting to splice it in reify call,
+ * in which case the tree first needs to be wrapped in an expr.
+
+ * The main source of information about exprs is the [[scala.reflect.api.Exprs]] page.
+ * @group Expressions
+ */
+ object Expr {
+ def apply[T: WeakTypeTag](mirror: scala.reflect.api.Mirror[self.type], treec: TreeCreator): Expr[T] = new ExprImpl[T](mirror.asInstanceOf[Mirror], treec)
+ def unapply[T](expr: Expr[T]): Option[Tree] = Some(expr.tree)
+ }
+
+ private class ExprImpl[+T: WeakTypeTag](val mirror: Mirror, val treec: TreeCreator) extends Expr[T] {
+ def in[U <: Universe with Singleton](otherMirror: scala.reflect.api.Mirror[U]): U # Expr[T] = {
+ val otherMirror1 = otherMirror.asInstanceOf[scala.reflect.api.Mirror[otherMirror.universe.type]]
+ val tag1 = (implicitly[WeakTypeTag[T]] in otherMirror).asInstanceOf[otherMirror.universe.WeakTypeTag[T]]
+ otherMirror.universe.Expr[T](otherMirror1, treec)(tag1)
+ }
+
+ lazy val tree: Tree = treec(mirror)
+ lazy val staticType: Type = implicitly[WeakTypeTag[T]].tpe
+ def actualType: Type = tree.tpe
+
+ def splice: T = throw new UnsupportedOperationException("""
+ |the function you're calling has not been spliced by the compiler.
+ |this means there is a cross-stage evaluation involved, and it needs to be invoked explicitly.
+ |if you're sure this is not an oversight, add scala-compiler.jar to the classpath,
+ |import `scala.tools.reflect.Eval` and call `<your expr>.eval` instead.""".trim.stripMargin)
+ lazy val value: T = throw new UnsupportedOperationException("""
+ |the value you're calling is only meant to be used in cross-stage path-dependent types.
+ |if you want to splice the underlying expression, use `<your expr>.splice`.
+ |if you want to get a value of the underlying expression, add scala-compiler.jar to the classpath,
+ |import `scala.tools.reflect.Eval` and call `<your expr>.eval` instead.""".trim.stripMargin)
+
+ private def writeReplace(): AnyRef = new SerializedExpr(treec, implicitly[WeakTypeTag[T]].in(ru.rootMirror))
+ }
+}
+
+private[scala] class SerializedExpr(var treec: TreeCreator, var tag: ru.WeakTypeTag[_]) extends Serializable {
+ private def writeObject(out: java.io.ObjectOutputStream): Unit = {
+ out.writeObject(treec)
+ out.writeObject(tag)
+ }
+
+ private def readObject(in: java.io.ObjectInputStream): Unit = {
+ treec = in.readObject().asInstanceOf[TreeCreator]
+ tag = in.readObject().asInstanceOf[ru.WeakTypeTag[_]]
+ }
+
+ private def readResolve(): AnyRef = {
+ import ru._
+ Expr(rootMirror, treec)(tag)
+ }
+}
\ No newline at end of file
diff --git a/src/reflect/scala/reflect/api/FlagSets.scala b/src/reflect/scala/reflect/api/FlagSets.scala
new file mode 100644
index 0000000..4357aec
--- /dev/null
+++ b/src/reflect/scala/reflect/api/FlagSets.scala
@@ -0,0 +1,180 @@
+package scala.reflect
+package api
+
+import scala.language.implicitConversions
+
+/**
+ * <span class="badge badge-red" style="float: right;">EXPERIMENTAL</span>
+ *
+ * The trait that defines flag sets and operations on them.
+ *
+ * `Flag`s are used to provide modifiers for abstract syntax trees that represent definitions
+ * via the `flags` field of [[scala.reflect.api.Trees#Modifiers]]. Trees that accept modifiers are:
+ *
+ * - '''[[scala.reflect.api.Trees#ClassDef]]'''. Classes and traits.
+ * - '''[[scala.reflect.api.Trees#ModuleDef]]'''. Objects.
+ * - '''[[scala.reflect.api.Trees#ValDef]]'''. Vals, vars, parameters and self-type annotations.
+ * - '''[[scala.reflect.api.Trees#DefDef]]'''. Methods and constructors.
+ * - '''[[scala.reflect.api.Trees#TypeDef]]'''. Type aliases, abstract type members and type parameters.
+ *
+ * For example, to create a class named `C` one would write something like:
+ * {{{
+ * ClassDef(Modifiers(NoFlags), newTypeName("C"), Nil, ...)
+ * }}}
+ *
+ * Here, the flag set is empty.
+ *
+ * To make `C` private, one would write something like:
+ * {{{
+ * ClassDef(Modifiers(PRIVATE), newTypeName("C"), Nil, ...)
+ * }}}
+ *
+ * Flags can also be combined with the vertical bar operator (`|`).
+ * For example, a private final class is written something like:
+ * {{{
+ * ClassDef(Modifiers(PRIVATE | FINAL), newTypeName("C"), Nil, ...)
+ * }}}
+ *
+ * The list of all available flags is defined in [[scala.reflect.api.FlagSets#FlagValues]], available via
+ * [[scala.reflect.api.FlagSets#Flag]]. (Typically one writes a wildcard import for this, e.g.
+ * `import scala.reflect.runtime.universe.Flag._`).
+ *
+ * Definition trees are compiled down to symbols, so flags on modifiers of these trees are transformed into flags
+ * on the resulting symbols. Unlike trees, symbols don't expose flags, but rather provide `isXXX` test methods
+ * (e.g. `isFinal` can be used to test finality). These test methods might require an upcast with `asTerm`,
+ * `asType` or `asClass` as some flags only make sense for certain kinds of symbols.
+ *
+ * ''Of Note:'' This part of the Reflection API is being considered as a candidate for redesign. It is
+ * quite possible that in future releases of the reflection API, flag sets could be replaced with something else.
+ *
+ * For more details about `FlagSet`s and other aspects of Scala reflection, see the
+ * [[http://docs.scala-lang.org/overviews/reflection/overview.html Reflection Guide]]
+ *
+ * @group ReflectionAPI
+ *
+ */
+trait FlagSets { self: Universe =>
+
+ /** An abstract type representing sets of flags (like private, final, etc.) that apply to definition trees and symbols
+ * @template
+ * @group Flags
+ */
+ type FlagSet
+
+ /** A tag that preserves the identity of the `FlagSet` abstract type from erasure.
+ * Can be used for pattern matching, instance tests, serialization and likes.
+ * @group Tags
+ */
+ implicit val FlagSetTag: ClassTag[FlagSet]
+
+ /** The API of `FlagSet` instances.
+ * The main source of information about flag sets is the [[scala.reflect.api.FlagSets]] page.
+ * @group Flags
+ */
+ trait FlagOps extends Any {
+ /** Produces a flag set that's a union of this flag set and the provided flag set. */
+ def | (right: FlagSet): FlagSet
+ }
+
+ /** The API of `FlagSet` instances.
+ * @group Flags
+ */
+ implicit def addFlagOps(left: FlagSet): FlagOps
+
+ /** A module that contains all possible values that can constitute flag sets.
+ * @group Flags
+ */
+ val Flag: FlagValues
+
+ // Q: I have a pretty flag. Can I put it here?
+ // A: Only if there's a tree that cannot be built without it.
+ // If you want to put a flag here so that it can be tested against,
+ // introduce an `isXXX` method in one of the `api.Symbols` classes instead.
+
+ /** All possible values that can constitute flag sets.
+ * The main source of information about flag sets is the [[scala.reflect.api.FlagSets]] page.
+ * @group Flags
+ */
+ trait FlagValues {
+
+ /** Flag indicating that tree represents a trait */
+ val TRAIT: FlagSet
+
+ /** Flag indicating that a tree is an interface (i.e. a trait which defines only abstract methods) */
+ val INTERFACE: FlagSet
+
+ /** Flag indicating that tree represents a mutable variable */
+ val MUTABLE: FlagSet
+
+ /** Flag indicating that tree represents a macro definition. */
+ val MACRO: FlagSet
+
+ /** Flag indicating that tree represents an abstract type, method, or value */
+ val DEFERRED: FlagSet
+
+ /** Flag indicating that tree represents an abstract class */
+ val ABSTRACT: FlagSet
+
+ /** Flag indicating that tree has `final` modifier set */
+ val FINAL: FlagSet
+
+ /** Flag indicating that tree has `sealed` modifier set */
+ val SEALED: FlagSet
+
+ /** Flag indicating that tree has `implicit` modifier set */
+ val IMPLICIT: FlagSet
+
+ /** Flag indicating that tree has `lazy` modifier set */
+ val LAZY: FlagSet
+
+ /** Flag indicating that tree has `override` modifier set */
+ val OVERRIDE: FlagSet
+
+ /** Flag indicating that tree has `private` modifier set */
+ val PRIVATE: FlagSet
+
+ /** Flag indicating that tree has `protected` modifier set */
+ val PROTECTED: FlagSet
+
+ /** Flag indicating that tree represents a member local to current class
+ * (i.e. private[this] or protected[this].
+ * This requires having either PRIVATE or PROTECTED set as well.
+ */
+ val LOCAL: FlagSet
+
+ /** Flag indicating that tree has `case` modifier set */
+ val CASE: FlagSet
+
+ /** Flag indicating that tree has `abstract` and `override` modifiers set */
+ val ABSOVERRIDE: FlagSet
+
+ /** Flag indicating that tree represents a by-name parameter */
+ val BYNAMEPARAM: FlagSet
+
+ /** Flag indicating that tree represents a class or parameter.
+ * Both type and value parameters carry the flag. */
+ val PARAM: FlagSet
+
+ /** Flag indicating that tree represents a covariant
+ * type parameter (marked with `+`). */
+ val COVARIANT: FlagSet
+
+ /** Flag indicating that tree represents a contravariant
+ * type parameter (marked with `-`). */
+ val CONTRAVARIANT: FlagSet
+
+ /** Flag indicating that tree represents a parameter that has a default value */
+ val DEFAULTPARAM: FlagSet
+
+ /** Flag indicating that tree represents an early definition */
+ val PRESUPER: FlagSet
+
+ /** Flag indicating that tree represents a variable or a member initialized to the default value */
+ val DEFAULTINIT: FlagSet
+ }
+
+ /** The empty set of flags
+ * @group Flags
+ */
+ val NoFlags: FlagSet
+}
diff --git a/src/reflect/scala/reflect/api/Importers.scala b/src/reflect/scala/reflect/api/Importers.scala
new file mode 100644
index 0000000..afc4f2f
--- /dev/null
+++ b/src/reflect/scala/reflect/api/Importers.scala
@@ -0,0 +1,103 @@
+package scala.reflect
+package api
+
+/**
+ * <span class="badge badge-red" style="float: right;">EXPERIMENTAL</span>
+ *
+ * This trait provides support for importers, a facility to migrate reflection artifacts between universes.
+ * ''Note: this trait should typically be used only rarely.''
+ *
+ * Reflection artifacts, such as [[scala.reflect.api.Symbols Symbols]] and [[scala.reflect.api.Types Types]],
+ * are contained in [[scala.reflect.api.Universes Universe]]s. Typically all processing happens
+ * within a single `Universe` (e.g. a compile-time macro `Universe` or a runtime reflection `Universe`), but sometimes
+ * there is a need to migrate artifacts from one `Universe` to another. For example, runtime compilation works by
+ * importing runtime reflection trees into a runtime compiler universe, compiling the importees and exporting the
+ * result back.
+ *
+ * Reflection artifacts are firmly grounded in their `Universe`s, which is reflected by the fact that types of artifacts
+ * from different universes are not compatible. By using `Importer`s, however, they be imported from one universe
+ * into another. For example, to import `foo.bar.Baz` from the source `Universe` to the target `Universe`,
+ * an importer will first check whether the entire owner chain exists in the target `Universe`.
+ * If it does, then nothing else will be done. Otherwise, the importer will recreate the entire owner chain
+ * and will import the corresponding type signatures into the target `Universe`.
+ *
+ * Since importers match `Symbol` tables of the source and the target `Universe`s using plain string names,
+ * it is programmer's responsibility to make sure that imports don't distort semantics, e.g., that
+ * `foo.bar.Baz` in the source `Universe` means the same that `foo.bar.Baz` does in the target `Universe`.
+ *
+ * === Example ===
+ *
+ * Here's how one might implement a macro that performs compile-time evaluation of its argument
+ * by using a runtime compiler to compile and evaluate a tree that belongs to a compile-time compiler:
+ *
+ * {{{
+ * def staticEval[T](x: T) = macro staticEval[T]
+ *
+ * def staticEval[T](c: scala.reflect.macros.Context)(x: c.Expr[T]) = {
+ * // creates a runtime reflection universe to host runtime compilation
+ * import scala.reflect.runtime.{universe => ru}
+ * val mirror = ru.runtimeMirror(c.libraryClassLoader)
+ * import scala.tools.reflect.ToolBox
+ * val toolBox = mirror.mkToolBox()
+ *
+ * // runtime reflection universe and compile-time macro universe are different
+ * // therefore an importer is needed to bridge them
+ * // currently mkImporter requires a cast to correctly assign the path-dependent types
+ * val importer0 = ru.mkImporter(c.universe)
+ * val importer = importer0.asInstanceOf[ru.Importer { val from: c.universe.type }]
+ *
+ * // the created importer is used to turn a compiler tree into a runtime compiler tree
+ * // both compilers use the same classpath, so semantics remains intact
+ * val imported = importer.importTree(tree)
+ *
+ * // after the tree is imported, it can be evaluated as usual
+ * val tree = toolBox.resetAllAttrs(imported.duplicate)
+ * val valueOfX = toolBox.eval(imported).asInstanceOf[T]
+ * ...
+ * }
+ * }}}
+ *
+ * @group ReflectionAPI
+ */
+trait Importers { self: Universe =>
+
+ /** Creates an importer that moves reflection artifacts between universes.
+ * @group Importers
+ */
+ def mkImporter(from0: Universe): Importer { val from: from0.type }
+
+ /** The API of importers.
+ * The main source of information about importers is the [[scala.reflect.api.Importers]] page.
+ * @group Importers
+ */
+ trait Importer {
+ /** The source universe of reflection artifacts that will be processed.
+ * The target universe is universe that created this importer with `mkImporter`.
+ */
+ val from: Universe
+
+ /** An importer that works in reverse direction, namely:
+ * imports reflection artifacts from the current universe to the universe specified in `from`.
+ */
+ val reverse: from.Importer { val from: self.type }
+
+ /** In the current universe, locates or creates a symbol that corresponds to the provided symbol in the source universe.
+ * If necessary imports the owner chain, companions, type signature, annotations and attachments.
+ */
+ def importSymbol(sym: from.Symbol): Symbol
+
+ /** In the current universe, locates or creates a type that corresponds to the provided type in the source universe.
+ * If necessary imports the underlying symbols, annotations, scopes and trees.
+ */
+ def importType(tpe: from.Type): Type
+
+ /** In the current universe, creates a tree that corresponds to the provided tree in the source universe.
+ * If necessary imports the underlying symbols, types and attachments.
+ */
+ def importTree(tree: from.Tree): Tree
+
+ /** In the current universe, creates a position that corresponds to the provided position in the source universe.
+ */
+ def importPosition(pos: from.Position): Position
+ }
+}
\ No newline at end of file
diff --git a/src/reflect/scala/reflect/api/JavaMirrors.scala b/src/reflect/scala/reflect/api/JavaMirrors.scala
new file mode 100644
index 0000000..b678033
--- /dev/null
+++ b/src/reflect/scala/reflect/api/JavaMirrors.scala
@@ -0,0 +1,56 @@
+package scala.reflect
+package api
+
+/**
+ * <span class="badge badge-red" style="float: right;">EXPERIMENTAL</span>
+ *
+ * A refinement of [[scala.reflect.api.Mirror]] for runtime reflection using JVM classloaders.
+ *
+ * This refinement equips mirrors with reflection capabilities for the JVM. `JavaMirror` can
+ * convert Scala reflection artifacts (symbols and types) into Java reflection artifacts (classes)
+ * and vice versa. It can also perform reflective invocations (getting/setting field values,
+ * calling methods, etc).
+ *
+ * For more information about `Mirrors`s, see [[scala.reflect.api.Mirrors]] or the
+ * [[http://docs.scala-lang.org/overviews/reflection/environment-universes-mirrors.html Reflection Guide: Mirrors]]
+ *
+ * @groupname JavaMirrors Java Mirrors
+ * @group ReflectionAPI
+ */
+trait JavaMirrors { self: JavaUniverse =>
+
+ /** In runtime reflection universes, runtime representation of a class is `java.lang.Class`.
+ * @group JavaMirrors
+ */
+ type RuntimeClass = java.lang.Class[_]
+
+ /** In runtime reflection universes, mirrors are `JavaMirrors`.
+ * @group JavaMirrors
+ */
+ override type Mirror >: Null <: JavaMirror
+
+ /** A refinement of [[scala.reflect.api.Mirror]] for runtime reflection using JVM classloaders.
+ *
+ * With this upgrade, mirrors become capable of converting Scala reflection artifacts (symbols and types)
+ * into Java reflection artifacts (classes) and vice versa. Consequently, refined mirrors
+ * become capable of performing reflective invocations (getting/setting field values, calling methods, etc).
+ *
+ * For more information about `Mirrors`s, see [[scala.reflect.api.Mirrors]] or the
+ * [[http://docs.scala-lang.org/overviews/reflection/environment-universes-mirrors.html Reflection Guide: Mirrors]]
+ *
+ * @group JavaMirrors
+ */
+ trait JavaMirror extends scala.reflect.api.Mirror[self.type] with RuntimeMirror {
+ val classLoader: ClassLoader
+ override def toString = s"JavaMirror with ${runtime.ReflectionUtils.show(classLoader)}"
+ }
+
+ /** Creates a runtime reflection mirror from a JVM classloader.
+ *
+ * For more information about `Mirrors`s, see [[scala.reflect.api.Mirrors]] or the
+ * [[http://docs.scala-lang.org/overviews/reflection/environment-universes-mirrors.html Reflection Guide: Mirrors]]
+ *
+ * @group JavaMirrors
+ */
+ def runtimeMirror(cl: ClassLoader): Mirror
+}
diff --git a/src/reflect/scala/reflect/api/JavaUniverse.scala b/src/reflect/scala/reflect/api/JavaUniverse.scala
new file mode 100644
index 0000000..04d091e
--- /dev/null
+++ b/src/reflect/scala/reflect/api/JavaUniverse.scala
@@ -0,0 +1,46 @@
+package scala.reflect
+package api
+
+/**
+ * <span class="badge badge-red" style="float: right;">EXPERIMENTAL</span>
+ *
+ * A refinement of [[scala.reflect.api.Universe]] for runtime reflection using JVM classloaders.
+ *
+ * The refinement consists of an upgrade to the mirror API, which gets extended from [[scala.reflect.api.Mirror]]
+ * to [[scala.reflect.api.JavaMirrors#JavaMirror]].
+ *
+ * See the [[http://docs.scala-lang.org/overviews/reflection/overview.html Reflection Guide]] for details on how to use runtime reflection.
+ *
+ * @groupname JavaUniverse Java Mirrors
+ * @group ReflectionAPI
+ *
+ * @contentDiagram hideNodes "*Api"
+ */
+trait JavaUniverse extends Universe with JavaMirrors { self =>
+
+ /* @group JavaUniverse */
+ override def typeTagToManifest[T: ClassTag](mirror0: Any, tag: Universe # TypeTag[T]): Manifest[T] = {
+ // SI-6239: make this conversion more precise
+ val mirror = mirror0.asInstanceOf[Mirror]
+ val runtimeClass = mirror.runtimeClass(tag.in(mirror).tpe)
+ Manifest.classType(runtimeClass).asInstanceOf[Manifest[T]]
+ }
+
+ /* @group JavaUniverse */
+ override def manifestToTypeTag[T](mirror0: Any, manifest: Manifest[T]): Universe # TypeTag[T] =
+ TypeTag(mirror0.asInstanceOf[Mirror], new TypeCreator {
+ def apply[U <: Universe with Singleton](mirror: scala.reflect.api.Mirror[U]): U # Type = {
+ mirror.universe match {
+ case ju: JavaUniverse =>
+ val jm = mirror.asInstanceOf[ju.Mirror]
+ val sym = jm.classSymbol(manifest.erasure)
+ val tpe =
+ if (manifest.typeArguments.isEmpty) sym.toType
+ else ju.appliedType(sym.toTypeConstructor, manifest.typeArguments map (targ => ju.manifestToTypeTag(jm, targ)) map (_.in(jm).tpe))
+ tpe.asInstanceOf[U # Type]
+ case u =>
+ u.manifestToTypeTag(mirror.asInstanceOf[u.Mirror], manifest).in(mirror).tpe
+ }
+ }
+ })
+}
diff --git a/src/reflect/scala/reflect/api/Mirror.scala b/src/reflect/scala/reflect/api/Mirror.scala
new file mode 100644
index 0000000..1223326
--- /dev/null
+++ b/src/reflect/scala/reflect/api/Mirror.scala
@@ -0,0 +1,122 @@
+package scala.reflect
+package api
+
+/**
+ * <span class="badge badge-red" style="float: right;">EXPERIMENTAL</span>
+ *
+ * The base class for all mirrors.
+ *
+ * See [[scala.reflect.api.Mirrors]] or [[docs.scala-lang.org/overviews/reflection/overview.html Reflection Guide]]
+ * for a complete overview of `Mirror`s.
+ *
+ * @tparam U the type of the universe this mirror belongs to.
+ * @group ReflectionAPI
+ */
+// Note: Unlike most Scala reflection artifact classes, `Mirror` is not defined as an inner class,
+// so that it can be referenced from outside. For example, [[scala.reflect.api.TypeCreator]] and [[scala.reflect.api.TreeCreator]]
+// reference `Mirror` and also need to be defined outside the cake as they are used by type tags, which can be migrated between
+// different universes and consequently cannot be bound to a fixed one.
+abstract class Mirror[U <: Universe with Singleton] {
+ /** The universe this mirror belongs to.
+ * @group Mirror
+ */
+ val universe: U
+
+ /** The class symbol of the `_root_` package
+ * @group Mirror
+ */
+ def RootClass: U#ClassSymbol
+
+ /** The module symbol of the `_root_` package
+ * @group Mirror
+ */
+ def RootPackage: U#ModuleSymbol
+
+ /** The module class symbol of the default (unnamed) package
+ * @group Mirror
+ */
+ def EmptyPackageClass: U#ClassSymbol
+
+ /** The module symbol of the default (unnamed) package
+ * @group Mirror
+ */
+ def EmptyPackage: U#ModuleSymbol
+
+ /** The symbol corresponding to the globally accessible class with the
+ * given fully qualified name `fullName`.
+ *
+ * If the name points to a type alias, it's recursively dealiased and its target is returned.
+ * If you need a symbol that corresponds to the type alias itself, load it directly from the package class:
+ *
+ * scala> cm.staticClass("scala.List")
+ * res0: reflect.runtime.universe.ClassSymbol = class List
+ *
+ * scala> res0.fullName
+ * res1: String = scala.collection.immutable.List
+ *
+ * scala> cm.staticPackage("scala")
+ * res2: reflect.runtime.universe.ModuleSymbol = package scala
+ *
+ * scala> res2.moduleClass.typeSignature member newTypeName("List")
+ * res3: reflect.runtime.universe.Symbol = type List
+ *
+ * scala> res3.fullName
+ * res4: String = scala.List
+ *
+ * To be consistent with Scala name resolution rules, in case of ambiguity between
+ * a package and an object, the object is never been considered.
+ *
+ * For example for the following code:
+ *
+ * package foo {
+ * class B
+ * }
+ *
+ * object foo {
+ * class A
+ * class B
+ * }
+ *
+ * staticClass("foo.B") will resolve to the symbol corresponding to the class B declared in the package foo, and
+ * staticClass("foo.A") will throw a MissingRequirementException (which is exactly what scalac would do if this
+ * fully qualified class name is written inside any package in a Scala program).
+ *
+ * In the example above, to load a symbol that corresponds to the class B declared in the object foo,
+ * use staticModule("foo") to load the module symbol and then navigate typeSignature.members of its moduleClass.
+ * @group Mirror
+ */
+ def staticClass(fullName: String): U#ClassSymbol
+
+ /** The symbol corresponding to the globally accessible object with the
+ * given fully qualified name `fullName`.
+ *
+ * To be consistent with Scala name resolution rules, in case of ambiguity between
+ * a package and an object, the object is never been considered.
+ *
+ * For example for the following code:
+ *
+ * package foo {
+ * object B
+ * }
+ *
+ * object foo {
+ * object A
+ * object B
+ * }
+ *
+ * staticModule("foo.B") will resolve to the symbol corresponding to the object B declared in the package foo, and
+ * staticModule("foo.A") will throw a MissingRequirementException (which is exactly what scalac would do if this
+ * fully qualified class name is written inside any package in a Scala program).
+ *
+ * In the example above, to load a symbol that corresponds to the object B declared in the object foo,
+ * use staticModule("foo") to load the module symbol and then navigate typeSignature.members of its moduleClass.
+ * @group Mirror
+ */
+ def staticModule(fullName: String): U#ModuleSymbol
+
+ /** The symbol corresponding to a package with the
+ * given fully qualified name `fullName`.
+ * @group Mirror
+ */
+ def staticPackage(fullName: String): U#ModuleSymbol
+}
diff --git a/src/reflect/scala/reflect/api/Mirrors.scala b/src/reflect/scala/reflect/api/Mirrors.scala
new file mode 100644
index 0000000..76a7594
--- /dev/null
+++ b/src/reflect/scala/reflect/api/Mirrors.scala
@@ -0,0 +1,504 @@
+package scala.reflect
+package api
+
+/**
+ * <span class="badge badge-red" style="float: right;">EXPERIMENTAL</span>
+ *
+ * This trait provides support for Mirrors in the Scala Reflection API.
+ *
+ * `Mirror`s are a central part of Scala Reflection. All information provided by
+ * reflection is made accessible through `Mirror`s. Depending on the type of information
+ * to be obtained, or the reflective action to be taken, different flavors of mirrors
+ * must be used. "Classloader" mirrors can be used to obtain representations of types
+ * and members. From a classloader `Mirror`, it's possible to obtain more specialized
+ * "invoker" `Mirror`s (the most commonly-used mirrors), which implement reflective
+ * invocations, such as method/constructor calls and field accesses.
+ *
+ * The two flavors of mirrors:
+ *
+ * <ul>
+ * <li>'''“Classloader” mirrors'''. These mirrors translate names to symbols
+ * (via methods `staticClass`/`staticModule`/`staticPackage`).</li>
+ * <li>'''"Invoker” mirrors'''. These mirrors implement reflective invocations
+ * (via methods `MethodMirror.apply`, `FieldMirror.get`, etc). These "invoker"
+ * mirrors are the types of mirrors that are most commonly used.</li>
+ * </ul>
+ *
+ * === Compile-time Mirrors ===
+ * Compile-time `Mirror`s make use of only classloader `Mirror`s to load `Symbol`s
+ * by name.
+ *
+ * The entry point to classloader `Mirror`s is via [[scala.reflect.macros.Context#mirror]].
+ * Typical methods which use classloader `Mirror`s include [[scala.reflect.api.Mirror#staticClass]],
+ * [[scala.reflect.api.Mirror#staticModule]], and [[scala.reflect.api.Mirror#staticPackage]]. For
+ * example:
+ * {{{
+ * import scala.reflect.macros.Context
+ *
+ * case class Location(filename: String, line: Int, column: Int)
+ *
+ * object Macros {
+ * def currentLocation: Location = macro impl
+ *
+ * def impl(c: Context): c.Expr[Location] = {
+ * import c.universe._
+ * val pos = c.macroApplication.pos
+ * val clsLocation = c.mirror.staticModule("Location") // get symbol of "Location" object
+ * c.Expr(Apply(Ident(clsLocation), List(Literal(Constant(pos.source.path)), Literal(Constant(pos.line)), Literal(Constant(pos.column)))))
+ * }
+ * }
+ * }}}
+ *
+ * ''Of Note:'' There are several high-level alternatives that one can use to avoid having to manually
+ * lookup symbols. For example, `typeOf[Location.type].termSymbol` (or `typeOf[Location].typeSymbol`
+ * if we needed a `ClassSymbol`), which are type safe since we don’t have to use `String`s to lookup
+ * the `Symbol`.
+ *
+ * === Runtime Mirrors ===
+ *
+ * Runtime `Mirror`s make use of both classloader and invoker `Mirror`s.
+ *
+ * The entry point to `Mirror`s for use at runtime is via `ru.runtimeMirror(<classloader>)`, where
+ * `ru` is [[scala.reflect.runtime.universe]].
+ *
+ * The result of a [[scala.reflect.api.JavaMirrors#runtimeMirror]] call is a classloader mirror,
+ * of type [[scala.reflect.api.Mirrors#ReflectiveMirror]], which can load symbols by names as
+ * discussed above (in the “Compile-time” section).
+ *
+ * A classloader mirror can create invoker mirrors, which include: [[scala.reflect.api.Mirrors#InstanceMirror]],
+ * [[scala.reflect.api.Mirrors#MethodMirror]], [[scala.reflect.api.Mirrors#FieldMirror]],
+ * [[scala.reflect.api.Mirrors#ClassMirror]] and [[scala.reflect.api.Mirrors#ModuleMirror]].
+ *
+ * Examples of how these two types of `Mirror`s interact are available below.
+ *
+ * === Types of Mirrors, Their Use Cases & Examples ===
+ *
+ * '''[[scala.reflect.api.Mirrors#ReflectiveMirror]]'''. Used for loading `Symbol`s by name, and
+ * as an entry point into invoker mirrors. Entry point: `val m = ru.runtimeMirror(<classloader>)`.
+ * Example:
+ * {{{
+ * scala> val ru = scala.reflect.runtime.universe
+ * ru: scala.reflect.api.JavaUniverse = ...
+ *
+ * scala> val m = ru.runtimeMirror(getClass.getClassLoader)
+ * m: reflect.runtime.universe.Mirror = JavaMirror ...
+ * }}}
+ *
+ * '''[[scala.reflect.api.Mirrors#InstanceMirror]]'''. Used for creating invoker `Mirror`s for methods
+ * and fields and for inner classes and inner objects (modules). Entry point: `val im = m.reflect(<value>)`.
+ * Example:
+ * {{{
+ * scala> class C { def x = 2 }
+ * defined class C
+ *
+ * scala> val im = m.reflect(new C)
+ * im: reflect.runtime.universe.InstanceMirror = instance mirror for C at 3442299e
+ * }}}
+ *
+ * '''[[scala.reflect.api.Mirrors#MethodMirror]]'''. Used for invoking instance methods (Scala only has
+ * instance methods-- methods of objects are instance methods of object instances, obtainable
+ * via `ModuleMirror.instance`). Entry point: `val mm = im.reflectMethod(<method symbol>)`.
+ * Example:
+ * {{{
+ * scala> val methodX = typeOf[C].declaration(newTermName("x")).asMethod
+ * methodX: reflect.runtime.universe.MethodSymbol = method x
+ *
+ * scala> val mm = im.reflectMethod(methodX)
+ * mm: reflect.runtime.universe.MethodMirror = method mirror for C.x: scala.Int (bound to C at 3442299e)
+ *
+ * scala> mm()
+ * res0: Any = 2
+ * }}}
+ *
+ * '''[[scala.reflect.api.Mirrors#FieldMirror]]'''. Used for getting/setting instance fields
+ * (Scala only has instance fields-- fields of objects are instance methods of object instances
+ * obtainable via ModuleMirror.instance). Entry point:
+ * `val fm = im.reflectMethod(<field or accessor symbol>)`.
+ * Example:
+ * {{{
+ * scala> class C { val x = 2; val y = 3 }
+ * defined class C
+ *
+ * scala> val m = ru.runtimeMirror(getClass.getClassLoader)
+ * m: reflect.runtime.universe.Mirror = JavaMirror ...
+ *
+ * scala> val im = m.reflect(new C)
+ * im: reflect.runtime.universe.InstanceMirror = instance mirror for C at 5f0c8ac1
+ *
+ * scala> val fieldX = typeOf[C].declaration(newTermName("x")).asTerm.accessed.asTerm
+ * fieldX: reflect.runtime.universe.TermSymbol = value x
+ * scala> val fmX = im.reflectField(fieldX)
+ * fmX: reflect.runtime.universe.FieldMirror = field mirror for C.x (bound to C at 5f0c8ac1)
+ *
+ * scala> fmX.get
+ * res0: Any = 2
+ *
+ * scala> fmX.set(3) // NOTE: can set an underlying value of an immutable field!
+ *
+ * scala> val fieldY = typeOf[C].declaration(newTermName("y")).asTerm.accessed.asTerm
+ * fieldY: reflect.runtime.universe.TermSymbol = variable y
+ *
+ * scala> val fmY = im.reflectField(fieldY)
+ * fmY: reflect.runtime.universe.FieldMirror = field mirror for C.y (bound to C at 5f0c8ac1)
+ *
+ * scala> fmY.get
+ * res1: Any = 3
+ *
+ * scala> fmY.set(4)
+ *
+ * scala> fmY.get
+ * res2: Any = 4
+ * }}}
+ *
+ * '''[[scala.reflect.api.Mirrors#ClassMirror]]'''. Used for creating invoker mirrors for constructors.
+ * Entry points: for ''static classes'' `val cm1 = m.reflectClass(<class symbol>)`,
+ * for ''inner classes'' `val mm2 = im.reflectClass(<module symbol>)`.
+ * Example:
+ * {{{
+ * scala> case class C(x: Int)
+ * defined class C
+ *
+ * scala> val m = ru.runtimeMirror(getClass.getClassLoader)
+ * m: reflect.runtime.universe.Mirror = JavaMirror ...
+ *
+ * scala> val classC = typeOf[C].typeSymbol.asClass
+ *
+ * classC: reflect.runtime.universe.Symbol = class C
+ *
+ * scala> val cm = m.reflectClass(classC)
+ * cm: reflect.runtime.universe.ClassMirror = class mirror for C (bound to null)
+ *
+ * scala> val ctorC = typeOf[C].declaration(ru.nme.CONSTRUCTOR).asMethod
+ * ctorC: reflect.runtime.universe.MethodSymbol = constructor C
+ *
+ * scala> val ctorm = cm.reflectConstructor(ctorC)
+ * ctorm: reflect.runtime.universe.MethodMirror = constructor mirror for C.<init>(x: scala.Int): C (bound to null)
+ *
+ * scala> ctorm(2)
+ * res0: Any = C(2)
+ * }}}
+ *
+ * '''[[scala.reflect.api.Mirrors#ModuleMirror]]'''. Used for getting singleton instances of objects.
+ * Entry points: for ''static objects (modules)'' `val mm1 = m.reflectModule(<module symbol>)`,
+ * for ''inner objects (modules)'' `val mm2 = im.reflectModule(<module symbol>)`.
+ * Example:
+ * {{{
+ * scala> object C { def x = 2 }
+ * defined module C
+ *
+ * scala> val m = ru.runtimeMirror(getClass.getClassLoader)
+ * m: reflect.runtime.universe.Mirror = JavaMirror ...
+ *
+ * scala> val objectC = typeOf[C.type].termSymbol.asModule
+ * objectC: reflect.runtime.universe.ModuleSymbol = object C
+ *
+ * scala> val mm = m.reflectModule(objectC)
+ * mm: reflect.runtime.universe.ModuleMirror = module mirror for C (bound to null)
+ *
+ * scala> val obj = mm.instance
+ * obj: Any = C$@1005ec04
+ * }}}
+ *
+ * For more information about `Mirrors`s, see the
+ * [[http://docs.scala-lang.org/overviews/reflection/environment-universes-mirrors.html Reflection Guide: Mirrors]]
+ *
+ * @contentDiagram hideNodes "*Api"
+ * @group ReflectionAPI
+ */
+trait Mirrors { self: Universe =>
+
+ /** The base type of all mirrors of this universe.
+ *
+ * This abstract type conforms the base interface for all mirrors defined in [[scala.reflect.api.Mirror]]
+ * and is gradually refined in specific universes (e.g. `Mirror` of a [[scala.reflect.api.JavaUniverse]] is capable of reflection).
+ * @group Mirrors
+ */
+ type Mirror >: Null <: scala.reflect.api.Mirror[self.type]
+
+ /** The root mirror of this universe. This mirror contains standard Scala classes and types such as `Any`, `AnyRef`, `AnyVal`,
+ * `Nothing`, `Null`, and all classes loaded from scala-library, which are shared across all mirrors within the enclosing universe.
+ * @group Mirrors
+ */
+ val rootMirror: Mirror
+
+ /** Abstracts the runtime representation of a class on the underlying platform.
+ * @group Mirrors
+ */
+ type RuntimeClass >: Null
+
+ // todo. an improvement might be having mirrors reproduce the structure of the reflection domain
+ // e.g. a ClassMirror could also have a list of fields, methods, constructors and so on
+ // read up more on the proposed design in "Reflecting Scala" by Y. Coppel
+
+ /** A mirror that reflects a runtime value.
+ * See [[scala.reflect.api.package the overview page]] for details on how to use runtime reflection.
+ * @group Mirrors
+ */
+ trait InstanceMirror {
+
+ /** The instance value reflected by this mirror */
+ def instance: Any
+
+ /** The symbol corresponding to the runtime class of the reflected instance */
+ def symbol: ClassSymbol
+
+ /** Reflects against a field symbol and returns a mirror
+ * that can be used to get and, if appropriate, set the value of the field.
+ *
+ * FieldMirrors are the only way to get at private[this] vals and vars and
+ * might be useful to inspect the data of underlying Java fields.
+ * For all other uses, it's better to go through the fields accessor.
+ *
+ * In particular, there should be no need to ever access a field mirror
+ * when reflecting on just the public members of a class or trait.
+ * Note also that only accessor MethodMirrors, but not FieldMirrors will accurately reflect overriding behavior.
+ *
+ * To get a field symbol by the name of the field you would like to reflect,
+ * use `<this mirror>.symbol.typeSignature.member(newTermName(<name of the field>)).asTerm.accessed`.
+ * For further information about member lookup refer to `Symbol.typeSignature`.
+ *
+ * The input symbol can be either private or non-private (Scala reflection transparently deals with visibility).
+ * It must be a member (declared or inherited) of the class of the instance underlying this mirror.
+ *
+ * The input symbol can represent either a field itself or one of the corresponding accessors
+ * (in all cases the resulting mirror will refer to the field symbol).
+ *
+ * If a field symbol doesn't correspond to a reflectable entity of the underlying platform,
+ * a `ScalaReflectionException` exception will be thrown. This might happen, for example, for primary constructor parameters.
+ * Typically they produce class fields, however, private parameters that aren't used outside the constructor
+ * remain plain parameters of a constructor method of the class.
+ */
+ def reflectField(field: TermSymbol): FieldMirror
+
+ /** Reflects against a method symbol and returns a mirror
+ * that can be used to invoke the method provided.
+ *
+ * To get a method symbol by the name of the method you would like to reflect,
+ * use `<this mirror>.symbol.typeSignature.member(newTermName(<name of the method>)).asMethod`.
+ * For further information about member lookup refer to `Symbol.typeSignature`.
+ *
+ * The input symbol can be either private or non-private (Scala reflection transparently deals with visibility).
+ * It must be a member (declared or inherited) of the instance underlying this mirror.
+ */
+ def reflectMethod(method: MethodSymbol): MethodMirror
+
+ /** Reflects against an inner class symbol and returns a mirror
+ * that can be used to create instances of the class, inspect its companion object or perform further reflections.
+ *
+ * To get a class symbol by the name of the class you would like to reflect,
+ * use `<this mirror>.symbol.typeSignature.member(newTypeName(<name of the class>)).asClass`.
+ * For further information about member lookup refer to `Symbol.typeSignature`.
+ *
+ * The input symbol can be either private or non-private (Scala reflection transparently deals with visibility).
+ * It must be a member (declared or inherited) of the instance underlying this mirror.
+ */
+ def reflectClass(cls: ClassSymbol): ClassMirror
+
+ /** Reflects against an inner module symbol and returns a mirror
+ * that can be used to get the instance of the object or inspect its companion class.
+ *
+ * To get a module symbol by the name of the object you would like to reflect,
+ * use `<this mirror>.symbol.typeSignature.member(newTermName(<name of the object>)).asModule`.
+ * For further information about member lookup refer to `Symbol.typeSignature`.
+ *
+ * The input symbol can be either private or non-private (Scala reflection transparently deals with visibility).
+ * It must be a member (declared or inherited) of the instance underlying this mirror.
+ */
+ def reflectModule(mod: ModuleSymbol): ModuleMirror
+ }
+
+ /** A mirror that reflects a field.
+ * See [[scala.reflect.api.package the overview page]] for details on how to use runtime reflection.
+ * @group Mirrors
+ */
+ trait FieldMirror {
+
+ /** The object containing the field */
+ def receiver: Any
+
+ /** The field symbol representing the field.
+ *
+ * In Scala `val` and `var` declarations are usually compiled down to a pair of
+ * a backing field and corresponding accessor/accessors, which means that a single
+ * declaration might correspond to up to three different symbols. Nevertheless
+ * the `FieldMirror.symbol` field always points to a backing field symbol.
+ */
+ def symbol: TermSymbol
+
+ /** Retrieves the value stored in the field.
+ *
+ * Scala reflection uses reflection capabilities of the underlying platform,
+ * so `FieldMirror.get` might throw platform-specific exceptions associated
+ * with getting a field or invoking a getter method of the field.
+ *
+ * If `symbol` represents a field of a base class with respect to the class of the receiver,
+ * and this base field is overriden in the class of the receiver, then this method will retrieve
+ * the value of the base field. To achieve overriding behavior, use reflectMethod on an accessor.
+ */
+ def get: Any
+
+ /** Updates the value stored in the field.
+ *
+ * If a field is immutable, a `ScalaReflectionException` will be thrown.
+ *
+ * Scala reflection uses reflection capabilities of the underlying platform,
+ * so `FieldMirror.get` might throw platform-specific exceptions associated
+ * with setting a field or invoking a setter method of the field.
+ *
+ * If `symbol` represents a field of a base class with respect to the class of the receiver,
+ * and this base field is overriden in the class of the receiver, then this method will set
+ * the value of the base field. To achieve overriding behavior, use reflectMethod on an accessor.
+ */
+ def set(value: Any): Unit
+ }
+
+ /** A mirror that reflects a method.
+ * See [[scala.reflect.api.package the overview page]] for details on how to use runtime reflection.
+ * @group Mirrors
+ */
+ trait MethodMirror {
+
+ /** The receiver object of the method */
+ def receiver: Any
+
+ /** The method symbol representing the method */
+ def symbol: MethodSymbol
+
+ /** The result of applying the method to the given arguments
+ *
+ * Scala reflection uses reflection capabilities of the underlying platform,
+ * so `FieldMirror.get` might throw platform-specific exceptions associated
+ * with invoking the corresponding method or constructor.
+ */
+ def apply(args: Any*): Any
+ }
+
+ /** A mirror that reflects the instance or static parts of a runtime class.
+ * See [[scala.reflect.api.package the overview page]] for details on how to use runtime reflection.
+ * @group Mirrors
+ */
+ trait TemplateMirror {
+
+ /** True if the mirror represents the static part
+ * of a runtime class or the companion object of a Scala class.
+ * One has:
+ *
+ * this.isStatic == this.isInstanceOf[ModuleMirror]
+ * !this.isStatic == this.isInstanceOf[ClassMirror]
+ */
+ def isStatic: Boolean
+
+ /** The Scala symbol corresponding to the reflected runtime class or object */
+ def symbol: Symbol
+ }
+
+ /** A mirror that reflects a Scala object definition or the static parts of a runtime class.
+ * See [[scala.reflect.api.package the overview page]] for details on how to use runtime reflection.
+ * @group Mirrors
+ */
+ trait ModuleMirror extends TemplateMirror {
+
+ /** The Scala module symbol corresponding to the reflected object */
+ override def symbol: ModuleSymbol
+
+ /** If the reflected runtime class corresponds to a Scala object definition,
+ * returns the single instance representing that object.
+ * If this mirror reflects the static part of a runtime class, returns `null`.
+ */
+ def instance: Any
+ }
+
+ /** A mirror that reflects the instance parts of a runtime class.
+ * See [[scala.reflect.api.package the overview page]] for details on how to use runtime reflection.
+ * @group Mirrors
+ */
+ trait ClassMirror extends TemplateMirror {
+
+ /** The Scala class symbol corresponding to the reflected class */
+ override def symbol: ClassSymbol
+
+ /** Reflects against a constructor symbol and returns a mirror
+ * that can be used to invoke it and construct instances of this mirror's symbols.
+ *
+ * To get a constructor symbol you would like to reflect,
+ * use `<this mirror>.symbol.typeSignature.member(nme.CONSTRUCTOR).asMethod`.
+ * For further information about member lookup refer to `Symbol.typeSignature`.
+ *
+ * The input symbol can be either private or non-private (Scala reflection transparently deals with visibility).
+ * It must be a member (declared or inherited) of the class underlying this mirror.
+ */
+ def reflectConstructor(constructor: MethodSymbol): MethodMirror
+ }
+
+ /** A mirror that reflects instances and static classes.
+ * See [[scala.reflect.api.package the overview page]] for details on how to use runtime reflection.
+ * @group Mirrors
+ */
+ trait ReflectiveMirror extends scala.reflect.api.Mirror[Mirrors.this.type] {
+
+ /** A reflective mirror for the given object.
+ *
+ * Such a mirror can be used to further reflect against the members of the object
+ * to get/set fields, invoke methods and inspect inner classes and objects.
+ */
+ // we need a ClassTag here to preserve boxity of primitives
+ // the class tag lets us tell apart `mirror.reflect(2)` and `mirror.reflect(new Integer(2))`
+ def reflect[T: ClassTag](obj: T): InstanceMirror
+
+ /** Reflects against a static class symbol and returns a mirror
+ * that can be used to create instances of the class, inspect its companion object or perform further reflections.
+ *
+ * To get a class symbol by the name of the class you would like to reflect,
+ * use `<this mirror>.classSymbol(<runtime class loaded by its name>)`.
+ *
+ * The input symbol can be either private or non-private (Scala reflection transparently deals with visibility).
+ * It must be static, i.e. either top-level or nested within one or several static objects.
+ */
+ def reflectClass(cls: ClassSymbol): ClassMirror
+
+ /** Reflects against a static module symbol and returns a mirror
+ * that can be used to get the instance of the object or inspect its companion class.
+ *
+ * To get a module symbol by the name of its companion class you would like to reflect,
+ * use `<this mirror>.classSymbol(<runtime class loaded by its name>).companion.get`.
+ *
+ * The input symbol can be either private or non-private (Scala reflection transparently deals with visibility).
+ * It must be static, i.e. either top-level or nested within one or several static objects.
+ */
+ def reflectModule(mod: ModuleSymbol): ModuleMirror
+ }
+
+ /** The API of a mirror for a reflective universe.
+ * See [[scala.reflect.api.package the overview page]] for details on how to use runtime reflection.
+ * @group Mirrors
+ */
+ trait RuntimeMirror extends ReflectiveMirror { self =>
+
+ /** Maps a Scala type to the corresponding Java class object */
+ def runtimeClass(tpe: Type): RuntimeClass
+
+ /** Maps a Scala class symbol to the corresponding Java class object
+ * @throws ClassNotFoundException if there is no Java class
+ * corresponding to the given Scala class symbol.
+ * Note: If the Scala symbol is ArrayClass, a ClassNotFound exception is thrown
+ * because there is no unique Java class corresponding to a Scala generic array
+ */
+ def runtimeClass(cls: ClassSymbol): RuntimeClass
+
+ /** A class symbol for the specified runtime class.
+ * @return The class symbol for the runtime class in the current class loader.
+ * @throws java.lang.ClassNotFoundException if no class with that name exists
+ * @throws scala.reflect.internal.MissingRequirementError if no corresponding symbol exists
+ * to do: throws anything else?
+ */
+ def classSymbol(rtcls: RuntimeClass): ClassSymbol
+
+ /** A module symbol for the specified runtime class.
+ * @return The module symbol for the runtime class in the current class loader.
+ * @throws java.lang.ClassNotFoundException if no class with that name exists
+ * @throws scala.reflect.internal.MissingRequirementError if no corresponding symbol exists
+ * to do: throws anything else?
+ */
+ def moduleSymbol(rtcls: RuntimeClass): ModuleSymbol
+ }
+}
diff --git a/src/reflect/scala/reflect/api/Names.scala b/src/reflect/scala/reflect/api/Names.scala
new file mode 100644
index 0000000..7c12f18
--- /dev/null
+++ b/src/reflect/scala/reflect/api/Names.scala
@@ -0,0 +1,118 @@
+package scala.reflect
+package api
+
+/**
+ * <span class="badge badge-red" style="float: right;">EXPERIMENTAL</span>
+ *
+ * This trait defines `Name`s in Scala Reflection, and operations on them.
+ *
+ * Names are simple wrappers for strings. [[scala.reflect.api.Names#Name Name]] has two subtypes
+ * [[scala.reflect.api.Names#TermName TermName]] and [[scala.reflect.api.Names#TypeName TypeName]]
+ * which distinguish names of terms (like objects or members) and types. A term and a type of the
+ * same name can co-exist in an object.
+ *
+ * To search for the `map` method (which is a term) declared in the `List` class, one can do:
+ *
+ * {{{
+ * scala> typeOf[List[_]].member(newTermName("map"))
+ * res0: reflect.runtime.universe.Symbol = method map
+ * }}}
+ *
+ * To search for a type member, one can follow the same procedure, using `newTypeName` instead.
+ *
+ * For more information about creating and using `Name`s, see the [[http://docs.scala-lang.org/overviews/reflection/annotations-names-scopes.html Reflection Guide: Annotations, Names, Scopes, and More]]
+ *
+ * @contentDiagram hideNodes "*Api"
+ * @group ReflectionAPI
+ */
+trait Names {
+ /** An implicit conversion from String to TermName.
+ * Enables an alternative notation `"map": TermName` as opposed to `newTermName("map")`.
+ * @group Names
+ */
+ implicit def stringToTermName(s: String): TermName = newTermName(s)
+
+ /** An implicit conversion from String to TypeName.
+ * Enables an alternative notation `"List": TypeName` as opposed to `newTypeName("List")`.
+ * @group Names
+ */
+ implicit def stringToTypeName(s: String): TypeName = newTypeName(s)
+
+ /** The abstract type of names.
+ * @group Names
+ */
+ type Name >: Null <: NameApi
+
+ /** A tag that preserves the identity of the `Name` abstract type from erasure.
+ * Can be used for pattern matching, instance tests, serialization and likes.
+ * @group Tags
+ */
+ implicit val NameTag: ClassTag[Name]
+
+ /** The abstract type of names representing terms.
+ * @group Names
+ */
+ type TypeName >: Null <: Name
+
+ /** A tag that preserves the identity of the `TypeName` abstract type from erasure.
+ * Can be used for pattern matching, instance tests, serialization and likes.
+ * @group Tags
+ */
+implicit val TypeNameTag: ClassTag[TypeName]
+
+ /** The abstract type of names representing types.
+ * @group Names
+ */
+ type TermName >: Null <: Name
+
+ /** A tag that preserves the identity of the `TermName` abstract type from erasure.
+ * Can be used for pattern matching, instance tests, serialization and likes.
+ * @group Tags
+ */
+ implicit val TermNameTag: ClassTag[TermName]
+
+ /** The API of Name instances.
+ * @group API
+ */
+ abstract class NameApi {
+ /** Checks wether the name is a a term name */
+ def isTermName: Boolean
+
+ /** Checks wether the name is a a type name */
+ def isTypeName: Boolean
+
+ /** Returns a term name that wraps the same string as `this` */
+ def toTermName: TermName
+
+ /** Returns a type name that wraps the same string as `this` */
+ def toTypeName: TypeName
+
+ /** Replaces all occurrences of \$op_names in this name by corresponding operator symbols.
+ * Example: `foo_\$plus\$eq` becomes `foo_+=`
+ */
+ def decoded: String
+
+ /** Replaces all occurrences of operator symbols in this name by corresponding \$op_names.
+ * Example: `foo_+=` becomes `foo_\$plus\$eq`.
+ */
+ def encoded: String
+
+ /** The decoded name, still represented as a name.
+ */
+ def decodedName: Name
+
+ /** The encoded name, still represented as a name.
+ */
+ def encodedName: Name
+ }
+
+ /** Create a new term name.
+ * @group Names
+ */
+ def newTermName(s: String): TermName
+
+ /** Creates a new type name.
+ * @group Names
+ */
+ def newTypeName(s: String): TypeName
+}
diff --git a/src/reflect/scala/reflect/api/Position.scala b/src/reflect/scala/reflect/api/Position.scala
new file mode 100644
index 0000000..63c6762
--- /dev/null
+++ b/src/reflect/scala/reflect/api/Position.scala
@@ -0,0 +1,164 @@
+package scala.reflect
+package api
+
+import scala.reflect.macros.Attachments
+
+/**
+ * <span class="badge badge-red" style="float: right;">EXPERIMENTAL</span>
+ *
+ * Position tracks the origin of [[Symbols#Symbol symbols]] and [[Trees#Tree tree nodes]]. They are commonly used when
+ * displaying warnings and errors, to indicate the incorrect point in the program.
+ *
+ * <b>Please note that this trait may be refactored in future versions of the Scala reflection API.</b>
+ *
+ * For more information about `Position`s, see the [[http://docs.scala-lang.org/overviews/reflection/annotations-names-scopes.html Reflection Guide: Annotations, Names, Scopes, and More]]
+ *
+ * @groupname Common Commonly used methods
+ * @group ReflectionAPI
+ */
+trait Position extends Attachments {
+
+ /** @inheritdoc */
+ type Pos >: Null <: Position
+
+ /** Java file corresponding to the source file of this position.
+ *
+ * The return type is `scala.reflect.io.AbstractFile`, which belongs to an experimental part of Scala reflection.
+ * It should not be used unless you know what you are doing. In subsequent releases, this API will be refined
+ * and exposed as a part of scala.reflect.api.
+ *
+ * @group Common
+ */
+ def source: scala.reflect.internal.util.SourceFile
+
+ /** Is this position neither a NoPosition nor a FakePosition?
+ * If isDefined is true, offset and source are both defined.
+ * @group Common
+ */
+ def isDefined: Boolean
+
+ /** Is this position a range position? */
+ def isRange: Boolean
+
+ /** Is this position a transparent position? */
+ def isTransparent: Boolean
+
+ /** Is this position a non-transparent range position? */
+ def isOpaqueRange: Boolean
+
+ /** If opaque range, make this position transparent. */
+ def makeTransparent: Pos
+
+ /** The start of the position's range, error if not a range position. */
+ def start: Int
+
+ /** The start of the position's range, or point if not a range position. */
+ def startOrPoint: Int
+
+ /** The point (where the ^ is) of the position, which is easiest to access using the [[line]] and [[column]] values.
+ * The [[lineContent line content]] is also available.
+ * @group Common
+ */
+ def point: Int
+
+ /** The point (where the ^ is) of the position, or else `default` if undefined.
+ * @group Common
+ */
+ def pointOrElse(default: Int): Int
+
+ /** The end of the position's range, error if not a range position.
+ */
+ def end: Int
+
+ /** The end of the position's range, or point if not a range position.
+ */
+ def endOrPoint: Int
+
+ /** The same position with a different start value (if a range).
+ */
+ def withStart(off: Int): Pos
+
+ /** The same position with a different end value (if a range).
+ */
+ def withEnd(off: Int): Pos
+
+ /** The same position with a different point value (if a range or offset).
+ */
+ def withPoint(off: Int): Pos
+
+ /** If this is a range, the union with the other range, with the point of this position.
+ * Otherwise, this position
+ */
+ def union(pos: Pos): Pos
+
+ /** If this is a range position, the offset position of its point.
+ * Otherwise the position itself
+ */
+ def focus: Pos
+
+ /** If this is a range position, the offset position of its start.
+ * Otherwise the position itself
+ */
+ def focusStart: Pos
+
+ /** If this is a range position, the offset position of its end.
+ * Otherwise the position itself
+ */
+ def focusEnd: Pos
+
+ /** Does this position include the given position `pos`?
+ * This holds if `this` is a range position and its range [start..end]
+ * is the same or covers the range of the given position, which may or may not be a range position.
+ */
+ def includes(pos: Pos): Boolean
+
+ /** Does this position properly include the given position `pos` ("properly" meaning their
+ * ranges are not the same)?
+ */
+ def properlyIncludes(pos: Pos): Boolean
+
+ /** Does this position precede that position?
+ * This holds if both positions are defined and the end point of this position
+ * is not larger than the start point of the given position.
+ */
+ def precedes(pos: Pos): Boolean
+
+ /** Does this position properly precede the given position `pos` ("properly" meaning their ranges
+ * do not share a common point).
+ */
+ def properlyPrecedes(pos: Pos): Boolean
+
+ /** Does this position overlap with that position?
+ * This holds if both positions are ranges and there is an interval of
+ * non-zero length that is shared by both position ranges.
+ */
+ def overlaps(pos: Pos): Boolean
+
+ /** Does this position cover the same range as that position?
+ * Holds only if both position are ranges
+ */
+ def sameRange(pos: Pos): Boolean
+
+ /** The position indicates a [[column `column`]] and the `line` in the source file.
+ * @group Common
+ */
+ def line: Int
+
+ /** The position indicates a `column` and the [[line `line`]] in the source file.
+ * @group Common
+ */
+ def column: Int
+
+ /** Convert this to a position around `point` that spans a single source line
+ */
+ def toSingleLine: Pos
+
+ /** The content of the line this Position refers to.
+ * @group Common
+ */
+ def lineContent: String
+
+ /** Show a textual representation of the position.
+ */
+ def show: String
+}
diff --git a/src/reflect/scala/reflect/api/Positions.scala b/src/reflect/scala/reflect/api/Positions.scala
new file mode 100644
index 0000000..87f00fd
--- /dev/null
+++ b/src/reflect/scala/reflect/api/Positions.scala
@@ -0,0 +1,54 @@
+package scala.reflect
+package api
+
+/**
+ * <span class="badge badge-red" style="float: right;">EXPERIMENTAL</span>
+ *
+ * This trait defines the concept of positions and operations on them.
+ *
+ * @see [[scala.reflect.api.Position]]
+ *
+ * @contentDiagram hideNodes "*Api"
+ * @group ReflectionAPI
+ */
+trait Positions {
+ self: Universe =>
+
+ /** Defines a universe-specific notion of positions.
+ * The main documentation entry about positions is located at [[scala.reflect.api.Position]].
+ * @group Positions
+ */
+ type Position >: Null <: scala.reflect.api.Position { type Pos = Position }
+
+ /** A tag that preserves the identity of the `Position` abstract type from erasure.
+ * Can be used for pattern matching, instance tests, serialization and likes.
+ * @group Tags
+ */
+ implicit val PositionTag: ClassTag[Position]
+
+ /** A special "missing" position.
+ * @group Positions
+ */
+ val NoPosition: Position
+
+ /** Assigns a given position to all position-less nodes of a given AST.
+ * @group Positions
+ */
+ def atPos[T <: Tree](pos: Position)(tree: T): T
+
+ /** A position that wraps a set of trees.
+ * The point of the wrapping position is the point of the default position.
+ * If some of the trees are ranges, returns a range position enclosing all ranges
+ * Otherwise returns default position.
+ * @group Positions
+ */
+ def wrappingPos(default: Position, trees: List[Tree]): Position
+
+ /** A position that wraps the non-empty set of trees.
+ * The point of the wrapping position is the point of the first trees' position.
+ * If all some the trees are non-synthetic, returns a range position enclosing the non-synthetic trees
+ * Otherwise returns a synthetic offset position to point.
+ * @group Positions
+ */
+ def wrappingPos(trees: List[Tree]): Position
+}
diff --git a/src/reflect/scala/reflect/api/Printers.scala b/src/reflect/scala/reflect/api/Printers.scala
new file mode 100644
index 0000000..85ddcc6
--- /dev/null
+++ b/src/reflect/scala/reflect/api/Printers.scala
@@ -0,0 +1,229 @@
+package scala.reflect
+package api
+
+import java.io.{ PrintWriter, StringWriter }
+
+/**
+ * <span class="badge badge-red" style="float: right;">EXPERIMENTAL</span>
+ *
+ * Utilities for nicely printing [[scala.reflect.api.Trees]] and [[scala.reflect.api.Types]].
+ *
+ * === Printing Trees ===
+ * The method `show` displays the "prettified" representation of reflection artifacts.
+ * This representation provides one with the desugared Java representation of Scala code.
+ * For example:
+ *
+ * {{{
+ * scala> import scala.reflect.runtime.universe._
+ * import scala.reflect.runtime.universe._
+ *
+ * scala> def tree = reify{ final class C { def x = 2 } }.tree
+ * tree: reflect.runtime.universe.Tree
+ *
+ * scala> show(tree)
+ * res0: String =
+ * {
+ * final class C extends AnyRef {
+ * def <init>() = {
+ * super.<init>();
+ * ()
+ * };
+ * def x = 2
+ * };
+ * ()
+ * }
+ * }}}
+ *
+ * The method `showRaw` displays internal structure of a given reflection object
+ * as a Scala abstract syntax tree (AST), the representation that the Scala typechecker
+ * operates on.
+ *
+ * Note, that while this representation appears to generate correct trees that one
+ * might think would be possible to use in a macro implementation, this is not usually
+ * the case. Symbols aren't fully represented (only their names are). Thus, this method
+ * is best-suited for use simply inspecting ASTs given some valid Scala code.
+ * {{{
+ * scala> showRaw(tree)
+ * res1: String = Block(List(
+ * ClassDef(Modifiers(FINAL), newTypeName("C"), List(), Template(
+ * List(Ident(newTypeName("AnyRef"))),
+ * emptyValDef,
+ * List(
+ * DefDef(Modifiers(), nme.CONSTRUCTOR, List(), List(List()), TypeTree(),
+ * Block(List(
+ * Apply(Select(Super(This(tpnme.EMPTY), tpnme.EMPTY), nme.CONSTRUCTOR), List())),
+ * Literal(Constant(())))),
+ * DefDef(Modifiers(), newTermName("x"), List(), List(), TypeTree(),
+ * Literal(Constant(2))))))),
+ * Literal(Constant(())))
+ * }}}
+ *
+ * The method `showRaw` can also print [[scala.reflect.api.Types]] next to the artifacts
+ * being inspected
+ * {{{
+ * scala> import scala.tools.reflect.ToolBox // requires scala-compiler.jar
+ * import scala.tools.reflect.ToolBox
+ *
+ * scala> import scala.reflect.runtime.{currentMirror => cm}
+ * import scala.reflect.runtime.{currentMirror=>cm}
+ *
+ * scala> showRaw(cm.mkToolBox().typeCheck(tree), printTypes = true)
+ * res2: String = Block[1](List(
+ * ClassDef[2](Modifiers(FINAL), newTypeName("C"), List(), Template[3](
+ * List(Ident[4](newTypeName("AnyRef"))),
+ * emptyValDef,
+ * List(
+ * DefDef[2](Modifiers(), nme.CONSTRUCTOR, List(), List(List()), TypeTree[3](),
+ * Block[1](List(
+ * Apply[4](Select[5](Super[6](This[3](newTypeName("C")), tpnme.EMPTY), ...))),
+ * Literal[1](Constant(())))),
+ * DefDef[2](Modifiers(), newTermName("x"), List(), List(), TypeTree[7](),
+ * Literal[8](Constant(2))))))),
+ * Literal[1](Constant(())))
+ * [1] TypeRef(ThisType(scala), scala.Unit, List())
+ * [2] NoType
+ * [3] TypeRef(NoPrefix, newTypeName("C"), List())
+ * [4] TypeRef(ThisType(java.lang), java.lang.Object, List())
+ * [5] MethodType(List(), TypeRef(ThisType(java.lang), java.lang.Object, List()))
+ * [6] SuperType(ThisType(newTypeName("C")), TypeRef(... java.lang.Object ...))
+ * [7] TypeRef(ThisType(scala), scala.Int, List())
+ * [8] ConstantType(Constant(2))
+ * }}}
+ *
+ * === Printing Types ===
+ *
+ * The method `show`
+ * {{{
+ * scala> import scala.reflect.runtime.universe._
+ * import scala.reflect.runtime.universe._
+ *
+ * scala> def tpe = typeOf[{ def x: Int; val y: List[Int] }]
+ * tpe: reflect.runtime.universe.Type
+ *
+ * scala> show(tpe)
+ * res0: String = scala.AnyRef{def x: Int; val y: scala.List[Int]}
+ * }}}
+ *
+ * Like the method `showRaw` for [[scala.reflect.api.Trees]], `showRaw`
+ * for [[scala.reflect.api.Types]] provides a visualization of the Scala
+ * AST operated on by the Scala typechecker.
+ * {{{
+ * // showRaw has already been discussed above
+ * scala> showRaw(tpe)
+ * res1: String = RefinedType(
+ * List(TypeRef(ThisType(scala), newTypeName("AnyRef"), List())),
+ * Scope(
+ * newTermName("x"),
+ * newTermName("y")))
+ * }}}
+ *
+ * `printIds` and/or `printKinds` can additionally be supplied as arguments in a call to
+ * `showRaw` which additionally shows the unique identifiers of symbols.
+ *
+ * {{{
+ * scala> showRaw(tpe, printIds = true, printKinds = true)
+ * res2: String = RefinedType(
+ * List(TypeRef(ThisType(scala#2043#PK), newTypeName("AnyRef")#691#TPE, List())),
+ * Scope(
+ * newTermName("x")#2540#METH,
+ * newTermName("y")#2541#GET))
+ * }}}
+ *
+ * For more details about `Printer`s and other aspects of Scala reflection, see the
+ * [[http://docs.scala-lang.org/overviews/reflection/overview.html Reflection Guide]]
+ *
+ * @group ReflectionAPI
+ */
+trait Printers { self: Universe =>
+
+ /** @group Printers */
+ protected trait TreePrinter {
+ def print(args: Any*)
+ protected var printTypes = false
+ protected var printIds = false
+ protected var printKinds = false
+ protected var printMirrors = false
+ def withTypes: this.type = { printTypes = true; this }
+ def withoutTypes: this.type = { printTypes = false; this }
+ def withIds: this.type = { printIds = true; this }
+ def withoutIds: this.type = { printIds = false; this }
+ def withKinds: this.type = { printKinds = true; this }
+ def withoutKinds: this.type = { printKinds = false; this }
+ def withMirrors: this.type = { printMirrors = true; this }
+ def withoutMirrors: this.type = { printMirrors = false; this }
+ }
+
+ /** @group Printers */
+ case class BooleanFlag(val value: Option[Boolean])
+ /** @group Printers */
+ object BooleanFlag {
+ import scala.language.implicitConversions
+ implicit def booleanToBooleanFlag(value: Boolean): BooleanFlag = BooleanFlag(Some(value))
+ implicit def optionToBooleanFlag(value: Option[Boolean]): BooleanFlag = BooleanFlag(value)
+ }
+
+ /** @group Printers */
+ protected def render(what: Any, mkPrinter: PrintWriter => TreePrinter, printTypes: BooleanFlag = None, printIds: BooleanFlag = None, printKinds: BooleanFlag = None, printMirrors: BooleanFlag = None): String = {
+ val buffer = new StringWriter()
+ val writer = new PrintWriter(buffer)
+ var printer = mkPrinter(writer)
+ printTypes.value.map(printTypes => if (printTypes) printer.withTypes else printer.withoutTypes)
+ printIds.value.map(printIds => if (printIds) printer.withIds else printer.withoutIds)
+ printKinds.value.map(printKinds => if (printKinds) printer.withKinds else printer.withoutKinds)
+ printMirrors.value.map(printMirrors => if (printMirrors) printer.withMirrors else printer.withoutMirrors)
+ printer.print(what)
+ writer.flush()
+ buffer.toString
+ }
+
+ /** By default trees are printed with `show`
+ * @group Printers
+ */
+ override protected def treeToString(tree: Tree) = show(tree)
+
+ /** Renders a representation of a reflection artifact
+ * as desugared Java code.
+ *
+ * @group Printers
+ */
+ def show(any: Any, printTypes: BooleanFlag = None, printIds: BooleanFlag = None, printKinds: BooleanFlag = None, printMirrors: BooleanFlag = None): String =
+ render(any, newTreePrinter(_), printTypes, printIds, printKinds, printMirrors)
+
+ /** Hook to define what `show(...)` means.
+ * @group Printers
+ */
+ protected def newTreePrinter(out: PrintWriter): TreePrinter
+
+ /** Renders internal structure of a reflection artifact as the
+ * visualization of a Scala syntax tree.
+ *
+ * @group Printers
+ */
+ def showRaw(any: Any, printTypes: BooleanFlag = None, printIds: BooleanFlag = None, printKinds: BooleanFlag = None, printMirrors: BooleanFlag = None): String =
+ render(any, newRawTreePrinter(_), printTypes, printIds, printKinds, printMirrors)
+
+ /** Hook to define what `showRaw(...)` means.
+ * @group Printers
+ */
+ protected def newRawTreePrinter(out: PrintWriter): TreePrinter
+
+ /** Renders a prettified representation of a name.
+ * @group Printers
+ */
+ def show(name: Name): String
+
+ /** Renders internal structure of a name.
+ * @group Printers
+ */
+ def showRaw(name: Name): String = name.toString
+
+ /** Renders a prettified representation of a flag set.
+ * @group Printers
+ */
+ def show(flags: FlagSet): String
+
+ /** Renders internal structure of a flag set.
+ * @group Printers
+ */
+ def showRaw(flags: FlagSet): String = flags.toString
+}
diff --git a/src/reflect/scala/reflect/api/Scopes.scala b/src/reflect/scala/reflect/api/Scopes.scala
new file mode 100644
index 0000000..7f97993
--- /dev/null
+++ b/src/reflect/scala/reflect/api/Scopes.scala
@@ -0,0 +1,70 @@
+package scala.reflect
+package api
+
+/**
+ * <span class="badge badge-red" style="float: right;">EXPERIMENTAL</span>
+ *
+ * This trait provides support for scopes in the reflection API.
+ *
+ * A scope object generally maps names to symbols available in a corresponding lexical scope.
+ * Scopes can be nested. The base type exposed to the reflection API, however,
+ * only exposes a minimal interface, representing a scope as an iterable of symbols.
+ *
+ * For rare occasions when it is necessary to create a scope manually,
+ * e.g., to populate members of [[scala.reflect.api.Types#RefinedType]],
+ * there is the `newScopeWith` function.
+ *
+ * Additional functionality is exposed in member scopes that are returned by
+ * `members` and `declarations` defined in [[scala.reflect.api.Types#TypeApi]].
+ * Such scopes support the `sorted` method, which sorts members in declaration order.
+ *
+ * @group ReflectionAPI
+ */
+trait Scopes { self: Universe =>
+
+ /** The base type of all scopes.
+ * @template
+ * @group Scopes
+ */
+ type Scope >: Null <: ScopeApi
+
+ /** The API that all scopes support
+ * @group API
+ */
+ trait ScopeApi extends Iterable[Symbol]
+
+ /** A tag that preserves the identity of the `Scope` abstract type from erasure.
+ * Can be used for pattern matching, instance tests, serialization and likes.
+ * @group Tags
+ */
+ implicit val ScopeTag: ClassTag[Scope]
+
+ /** Create a new scope with the given initial elements.
+ * @group Scopes
+ */
+ def newScopeWith(elems: Symbol*): Scope
+
+ /** The type of member scopes, as in class definitions, for example.
+ * @template
+ * @group Scopes
+ */
+ type MemberScope >: Null <: Scope with MemberScopeApi
+
+ /** The API that all member scopes support
+ * @group API
+ */
+ trait MemberScopeApi extends ScopeApi {
+ /** Sorts the symbols included in this scope so that:
+ * 1) Symbols appear in the linearization order of their owners.
+ * 2) Symbols with the same owner appear in same order of their declarations.
+ * 3) Synthetic members (e.g. getters/setters for vals/vars) might appear in arbitrary order.
+ */
+ def sorted: List[Symbol]
+ }
+
+ /** A tag that preserves the identity of the `MemberScope` abstract type from erasure.
+ * Can be used for pattern matching, instance tests, serialization and likes.
+ * @group Tags
+ */
+ implicit val MemberScopeTag: ClassTag[MemberScope]
+}
\ No newline at end of file
diff --git a/src/reflect/scala/reflect/api/StandardDefinitions.scala b/src/reflect/scala/reflect/api/StandardDefinitions.scala
new file mode 100644
index 0000000..721b0bc
--- /dev/null
+++ b/src/reflect/scala/reflect/api/StandardDefinitions.scala
@@ -0,0 +1,314 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2013 LAMP/EPFL
+ * @author Martin Odersky
+ */
+package scala.reflect
+package api
+
+/**
+ * <span class="badge badge-red" style="float: right;">EXPERIMENTAL</span>
+ *
+ * All Scala standard symbols and types.
+ *
+ * These standard definitions can accessed to using `definitions`.
+ * They're typically imported with a wildcard import, `import definitions._`, and are
+ * listed in [[scala.reflect.api.StandardDefinitions#DefinitionsApi]].
+ *
+ * @group ReflectionAPI
+ */
+trait StandardDefinitions {
+ self: Universe =>
+
+ /** A value containing all standard definitions in [[DefinitionsApi]]
+ * @group Definitions
+ */
+ val definitions: DefinitionsApi
+
+ /** Defines standard symbols (and types via its base trait).
+ * @group API
+ */
+ trait DefinitionsApi extends StandardTypes {
+ /** The module class symbol of package `scala`. */
+ def ScalaPackageClass: ClassSymbol
+
+ /** The module symbol of package `scala`. */
+ def ScalaPackage: ModuleSymbol
+
+ /** The class symbol of core class `scala.Any`. */
+ def AnyClass : ClassSymbol
+
+ /** The class symbol of core class `scala.AnyVal`. */
+ def AnyValClass: ClassSymbol
+
+ /** The class symbol of core class `java.lang.Object`. */
+ def ObjectClass: ClassSymbol
+
+ /** The type symbol of core class `scala.AnyRef`. */
+ def AnyRefClass: TypeSymbol
+
+ /** The class symbol of core class `scala.Null`. */
+ def NullClass : ClassSymbol
+
+ /** The class symbol of core class `scala.Nothing`. */
+ def NothingClass: ClassSymbol
+
+ /** The class symbol of primitive class `scala.Unit`. */
+ def UnitClass : ClassSymbol
+
+ /** The class symbol of primitive class `scala.Byte`. */
+ def ByteClass : ClassSymbol
+
+ /** The class symbol of primitive class `scala.Short`. */
+ def ShortClass : ClassSymbol
+
+ /** The class symbol of primitive class `scala.Char`. */
+ def CharClass : ClassSymbol
+
+ /** The class symbol of primitive class `scala.Int`. */
+ def IntClass : ClassSymbol
+
+ /** The class symbol of primitive class `scala.Long`. */
+ def LongClass : ClassSymbol
+
+ /** The class symbol of primitive class `scala.Float`. */
+ def FloatClass : ClassSymbol
+
+ /** The class symbol of primitive class `scala.Double`. */
+ def DoubleClass : ClassSymbol
+
+ /** The class symbol of primitive class `scala.Boolean`. */
+ def BooleanClass: ClassSymbol
+
+ /** The class symbol of class `scala.String`. */
+ def StringClass : ClassSymbol
+
+ /** The class symbol of class `java.lang.Class`. */
+ def ClassClass : ClassSymbol
+
+ /** The class symbol of class `scala.Array`. */
+ def ArrayClass : ClassSymbol
+
+ /** The class symbol of class `scala.List`. */
+ def ListClass : ClassSymbol
+
+ /** The module symbol of module `scala.Predef`. */
+ def PredefModule: ModuleSymbol
+
+ /** The module class symbol of package `java.lang`. */
+ def JavaLangPackageClass: ClassSymbol
+
+ /** The module symbol of package `java.lang`. */
+ def JavaLangPackage: ModuleSymbol
+
+ /** The module symbol of module `scala.Array`. */
+ def ArrayModule: ModuleSymbol
+
+ /** The method symbol of method `apply` in module `scala.Array`. */
+ def ArrayModule_overloadedApply: TermSymbol // todo. fix the bug in Definitions.getMemberMethod
+
+ /** The method symbol of method `apply` in class `scala.Array`. */
+ def Array_apply: TermSymbol // todo. fix the bug in Definitions.getMemberMethod
+
+ /** The method symbol of method `clone` in class `scala.Array`. */
+ def Array_clone: TermSymbol // todo. fix the bug in Definitions.getMemberMethod
+
+ /** The method symbol of method `length` in class `scala.Array`. */
+ def Array_length: TermSymbol // todo. fix the bug in Definitions.getMemberMethod
+
+ /** The method symbol of method `update` in class `scala.Array`. */
+ def Array_update: TermSymbol // todo. fix the bug in Definitions.getMemberMethod
+
+ /** A dummy class symbol that is used to indicate by-name parameters.
+ *
+ * {{{
+ * scala> class C { def m(x: => Int) = ??? }
+ * defined class C
+ *
+ * scala> import scala.reflect.runtime.universe._
+ * import scala.reflect.runtime.universe._
+ *
+ * scala> val m = typeOf[C].member(newTermName("m")).asMethod
+ * m: reflect.runtime.universe.MethodSymbol = method m
+ *
+ * scala> m.params(0)(0).typeSignature
+ * res1: reflect.runtime.universe.Type = => scala.Int
+ *
+ * scala> showRaw(m.params(0)(0).typeSignature)
+ * res2: String = TypeRef(
+ * ThisType(scala),
+ * scala.<byname>, // <-- ByNameParamClass
+ * List(TypeRef(ThisType(scala), scala.Int, List())))
+ * }}}
+ */
+ def ByNameParamClass: ClassSymbol
+
+ /** A dummy class symbol that is used to indicate repeated parameters
+ * compiled by the Java compiler.
+ *
+ * {{{
+ * class C {
+ * public void m(Object... x) {}
+ * }
+ * }}}
+ *
+ * {{{
+ * scala> import scala.reflect.runtime.universe._
+ * import scala.reflect.runtime.universe._
+ *
+ * scala> val m = typeOf[C].member(newTermName("m")).asMethod
+ * m: reflect.runtime.universe.MethodSymbol = method m
+ *
+ * scala> m.params(0)(0).typeSignature
+ * res1: reflect.runtime.universe.Type = <repeated...>[Object]
+ *
+ * scala> showRaw(m.params(0)(0).typeSignature)
+ * res2: String = TypeRef(
+ * ThisType(scala),
+ * scala.<repeated...>, // <-- JavaRepeatedParamClass
+ * List(TypeRef(ThisType(java.lang), Object, List())))
+ * }}}
+ */
+ def JavaRepeatedParamClass: ClassSymbol
+
+ /** A dummy class symbol that is used to indicate repeated parameters
+ * compiled by the Scala compiler.
+ *
+ * {{{
+ * scala> class C { def m(x: Int*) = ??? }
+ * defined class C
+ *
+ * scala> import scala.reflect.runtime.universe._
+ * import scala.reflect.runtime.universe._
+ *
+ * scala> val m = typeOf[C].member(newTermName("m")).asMethod
+ * m: reflect.runtime.universe.MethodSymbol = method m
+ *
+ * scala> m.params(0)(0).typeSignature
+ * res1: reflect.runtime.universe.Type = scala.Int*
+ *
+ * scala> showRaw(m.params(0)(0).typeSignature)
+ * res2: String = TypeRef(
+ * ThisType(scala),
+ * scala.<repeated>, // <-- RepeatedParamClass
+ * List(TypeRef(ThisType(scala), scala.Int, List())))
+ * }}}
+ */
+ def RepeatedParamClass: ClassSymbol
+
+ /** The module symbol of module `scala.List`. */
+ def ListModule: ModuleSymbol
+
+ /** The method symbol of method `apply` in class `scala.List`. */
+ def List_apply: TermSymbol // todo. fix the bug in Definitions.getMemberMethod
+
+ /** The module symbol of module `scala.collection.immutable.Nil`. */
+ def NilModule: ModuleSymbol
+
+ /** The class symbol of class `scala.Option`. */
+ def OptionClass: ClassSymbol
+
+ /** The module symbol of module `scala.None`. */
+ def NoneModule: ModuleSymbol
+
+ /** The module symbol of module `scala.Some`. */
+ def SomeModule: ModuleSymbol
+
+ /** The array of class symbols for classes `scala.ProductX`.
+ * - 0th element is `Unit`
+ * - 1st element is `Product1`
+ * - ...
+ * - 22nd element is `Product22`
+ */
+ def ProductClass : Array[ClassSymbol]
+
+ /** The array of class symbols for classes `scala.FunctionX`.
+ * - 0th element is `Function0`
+ * - 1st element is `Function1`
+ * - ...
+ * - 22nd element is `Function22`
+ */
+ def FunctionClass : Array[ClassSymbol]
+
+ /** The array of class symbols for classes `scala.TupleX`.
+ * - 0th element is `NoSymbol`
+ * - 1st element is `Product1`
+ * - ...
+ * - 22nd element is `Product22`
+ */
+ def TupleClass: Array[Symbol] // cannot make it Array[ClassSymbol], because TupleClass(0) is supposed to be NoSymbol. weird
+
+ /** Contains Scala primitive value classes:
+ * - Byte
+ * - Short
+ * - Int
+ * - Long
+ * - Float
+ * - Double
+ * - Char
+ * - Boolean
+ * - Unit
+ */
+ def ScalaPrimitiveValueClasses: List[ClassSymbol]
+
+ /** Contains Scala numeric value classes:
+ * - Byte
+ * - Short
+ * - Int
+ * - Long
+ * - Float
+ * - Double
+ * - Char
+ */
+ def ScalaNumericValueClasses: List[ClassSymbol]
+ }
+
+ /** Defines standard types.
+ * @group Definitions
+ */
+ trait StandardTypes {
+ /** The type of primitive type `Unit`. */
+ val UnitTpe: Type
+
+ /** The type of primitive type `Byte`. */
+ val ByteTpe: Type
+
+ /** The type of primitive type `Short`. */
+ val ShortTpe: Type
+
+ /** The type of primitive type `Char`. */
+ val CharTpe: Type
+
+ /** The type of primitive type `Int`. */
+ val IntTpe: Type
+
+ /** The type of primitive type `Long`. */
+ val LongTpe: Type
+
+ /** The type of primitive type `Float`. */
+ val FloatTpe: Type
+
+ /** The type of primitive type `Double`. */
+ val DoubleTpe: Type
+
+ /** The type of primitive type `Boolean`. */
+ val BooleanTpe: Type
+
+ /** The type of core type `Any`. */
+ val AnyTpe: Type
+
+ /** The type of core type `AnyVal`. */
+ val AnyValTpe: Type
+
+ /** The type of core type `AnyRef`. */
+ val AnyRefTpe: Type
+
+ /** The type of core type `Object`. */
+ val ObjectTpe: Type
+
+ /** The type of core type `Nothing`. */
+ val NothingTpe: Type
+
+ /** The type of core type `Null`. */
+ val NullTpe: Type
+ }
+}
diff --git a/src/reflect/scala/reflect/api/StandardNames.scala b/src/reflect/scala/reflect/api/StandardNames.scala
new file mode 100644
index 0000000..4886e4f
--- /dev/null
+++ b/src/reflect/scala/reflect/api/StandardNames.scala
@@ -0,0 +1,107 @@
+/* NSC -- new Scala compiler
+* Copyright 2005-2013 LAMP/EPFL
+* @author Martin Odersky
+*/
+package scala.reflect
+package api
+
+// Q: I have a pretty name. Can I put it here?
+// A: Is it necessary to construct trees (like EMPTY or WILDCARD_STAR)? If yes, then sure.
+// Is it necessary to perform reflection (like ERROR or LOCAL_SUFFIX_STRING)? If yes, then sure.
+// Otherwise you'd better not - reflection API should stay minimalistic.
+
+/**
+ * <span class="badge badge-red" style="float: right;">EXPERIMENTAL</span>
+ *
+ * Standard names are names that are essential to creating trees or to reflecting Scala artifacts.
+ * For example, `CONSTRUCTOR` (aka `<init>` on JVM) is necessary to create and invoke constructors.
+ *
+ * These standard names can be referred to using [[nme `nme`]] for term names and [[tpnme `tpnme`]] for type names
+ *
+ * @see [[Names]]
+ *
+ * The API for names in Scala reflection.
+ * @groupname StandardNames Standard Names
+ * @group ReflectionAPI
+ */
+trait StandardNames {
+ self: Universe =>
+
+ /** A value containing all [[TermNamesApi standard term names]].
+ * @group StandardNames
+ */
+ val nme: TermNamesApi
+
+ /** A value containing all [[TypeNamesApi standard type names]].
+ * @group StandardNames
+ */
+ val tpnme: TypeNamesApi
+
+ /** Defines standard names, common for term and type names: These can be accessed via the [[nme]] and [[tpnme]] members.
+ * @group API
+ */
+ trait NamesApi {
+ /** An abstract type that represents the exact flavor of the name. */
+ type NameType >: Null <: Name
+
+ /** The term or type name `_`.
+ * Used to construct trees that correspond to underscores in Scala.
+ */
+ val WILDCARD: NameType
+
+ /** The term or type name corresponding to an empty string.
+ * Represents an empty name, used to denote the fact that no name was specified
+ * for `privateWithin` in [[Trees#Modifiers]], for [[Trees#This]],
+ * for [[Trees#Super]], etc.
+ */
+ val EMPTY: NameType
+
+ /** The term or type name `<error>`.
+ * Indicates that the enclosing tree or symbol contains a compilation error.
+ */
+ val ERROR: NameType
+
+ /** The term or type name `package`.
+ * Used to get modules representing package objects.
+ */
+ val PACKAGE: NameType
+ }
+
+ /** Defines standard term names that can be accessed via the [[nme]] member.
+ * @group API
+ */
+ trait TermNamesApi extends NamesApi {
+ /** @inheritdoc */
+ type NameType = TermName
+
+ /** The term name `<init>`.
+ * Represents the constructor name on the JVM.
+ */
+ val CONSTRUCTOR: NameType
+
+ /** The term name `_root_`.
+ * Represents the root package.
+ */
+ val ROOTPKG: NameType
+
+ /** The string " " (a single whitespace).
+ * `LOCAL_SUFFIX_STRING` is appended to the names of local identifiers,
+ * when it's necessary to prevent a naming conflict. For example, underlying fields
+ * of non-private vals and vars are renamed using `LOCAL_SUFFIX_STRING`.
+ */
+ val LOCAL_SUFFIX_STRING: String
+ }
+
+ /** Defines standard type names that can be accessed via the [[tpnme]] member.
+ * @group API
+ */
+ trait TypeNamesApi extends NamesApi {
+ /** @inheritdoc */
+ type NameType = TypeName
+
+ /** The type name `_*`.
+ * Used to construct types that specify sequence arguments to repeated parameters.
+ */
+ val WILDCARD_STAR: NameType
+ }
+}
diff --git a/src/reflect/scala/reflect/api/Symbols.scala b/src/reflect/scala/reflect/api/Symbols.scala
new file mode 100644
index 0000000..c8e03f1
--- /dev/null
+++ b/src/reflect/scala/reflect/api/Symbols.scala
@@ -0,0 +1,1022 @@
+package scala.reflect
+package api
+
+/**
+ * <span class="badge badge-red" style="float: right;">EXPERIMENTAL</span>
+ *
+ * This trait defines symbols and operations on them.
+ *
+ * Symbols are used to establish bindings between a name and the entity it refers to, such as a class or a method.
+ * Anything you define and can give a name to in Scala has an associated symbol.
+ *
+ * Symbols contain all available information about the declaration of an entity (class/object/trait etc.) or a
+ * member (vals/vars/defs etc.), and as such are an integral abstraction central to both runtime
+ * reflection and macros.
+ *
+ * A symbol can provide a wealth of information ranging from the basic `name` method available on all symbols to
+ * other, more involved, concepts such as getting the `baseClasses` from `ClassSymbol`. Other common use cases of
+ * symbols include inspecting members' signatures, getting type parameters of a class, getting the parameter type
+ * of a method or finding out the type of a field.
+ *
+ * Example usage of runtime reflection; getting a method's type signature:
+ * {{{
+ * scala> import scala.reflect.runtime.universe._
+ * import scala.reflect.runtime.universe._
+ *
+ * scala> class C[T] { def test[U](x: T)(y: U): Int = ??? }
+ * defined class C
+ *
+ * scala> val test = typeOf[C[Int]].member(newTermName("test")).asMethod
+ * test: reflect.runtime.universe.MethodSymbol = method test
+ *
+ * scala> test.typeSignature
+ * res0: reflect.runtime.universe.Type = [U](x: T)(y: U)scala.Int
+ * }}}
+ *
+ * Symbols are organized in a hierarchy. For example, a symbol that represents a parameter of a method is owned by
+ * the corresponding method symbol, a method symbol is owned by its enclosing class, a class is owned by a
+ * containing package and so on.
+ *
+ * Certain types of tree nodes, such as [[Trees#Ident Ident]] (references to identifiers) and
+ * [[Trees#Select Select]] (references to members) expose method [[Trees.SymTreeApi.symbol `symbol`]]
+ * to obtain the symbol that represents their declaration. During the typechecking phase, the compiler looks up the
+ * symbol based on the name and scope and sets the [[Trees.SymTreeApi.symbol `symbol` field]] of tree nodes.
+ *
+ * For more information about `Symbol` usage and attached intricacies, see the [[http://docs.scala-lang.org/overviews/reflection/symbols-trees-types.html Reflection Guide: Symbols]]
+ *
+ * @group ReflectionAPI
+ *
+ * @contentDiagram hideNodes "*Api"
+ *
+ * @define SYMACCESSORS Class [[Symbol]] defines `isXXX` test methods such as `isPublic` or `isFinal`, `params` and
+ * `returnType` methods for method symbols, `baseClasses` for class symbols and so on. Some of these methods don't
+ * make sense for certain subclasses of `Symbol` and return `NoSymbol`, `Nil` or other empty values.
+ *
+ */
+trait Symbols { self: Universe =>
+
+ /** The type of symbols representing declarations.
+ * @group Symbols
+ * @template
+ */
+ type Symbol >: Null <: SymbolApi
+
+ /** A tag that preserves the identity of the `Symbol` abstract type from erasure.
+ * Can be used for pattern matching, instance tests, serialization and likes.
+ * @group Tags
+ */
+ implicit val SymbolTag: ClassTag[Symbol]
+
+ /** The type of type symbols representing type, class, and trait declarations,
+ * as well as type parameters.
+ * @group Symbols
+ * @template
+ */
+ type TypeSymbol >: Null <: Symbol with TypeSymbolApi
+
+ /** A tag that preserves the identity of the `TypeSymbol` abstract type from erasure.
+ * Can be used for pattern matching, instance tests, serialization and likes.
+ * @group Tags
+ */
+ implicit val TypeSymbolTag: ClassTag[TypeSymbol]
+
+ /** The type of term symbols representing val, var, def, and object declarations as
+ * well as packages and value parameters.
+ * @group Symbols
+ * @template
+ */
+ type TermSymbol >: Null <: Symbol with TermSymbolApi
+
+ /** A tag that preserves the identity of the `TermSymbol` abstract type from erasure.
+ * Can be used for pattern matching, instance tests, serialization and likes.
+ * @group Tags
+ */
+ implicit val TermSymbolTag: ClassTag[TermSymbol]
+
+ /** The type of method symbols representing def declarations.
+ * @group Symbols
+ * @template
+ */
+ type MethodSymbol >: Null <: TermSymbol with MethodSymbolApi
+
+ /** A tag that preserves the identity of the `MethodSymbol` abstract type from erasure.
+ * Can be used for pattern matching, instance tests, serialization and likes.
+ * @group Tags
+ */
+ implicit val MethodSymbolTag: ClassTag[MethodSymbol]
+
+ /** The type of module symbols representing object declarations.
+ * @group Symbols
+ * @template
+ */
+ type ModuleSymbol >: Null <: TermSymbol with ModuleSymbolApi
+
+ /** A tag that preserves the identity of the `ModuleSymbol` abstract type from erasure.
+ * Can be used for pattern matching, instance tests, serialization and likes.
+ * @group Tags
+ */
+ implicit val ModuleSymbolTag: ClassTag[ModuleSymbol]
+
+ /** The type of class symbols representing class and trait definitions.
+ * @group Symbols
+ * @template
+ */
+ type ClassSymbol >: Null <: TypeSymbol with ClassSymbolApi
+
+ /** A tag that preserves the identity of the `ClassSymbol` abstract type from erasure.
+ * Can be used for pattern matching, instance tests, serialization and likes.
+ * @group Tags
+ */
+ implicit val ClassSymbolTag: ClassTag[ClassSymbol]
+
+ /** The type of free terms introduced by reification.
+ * @group Symbols
+ * @template
+ */
+ type FreeTermSymbol >: Null <: TermSymbol with FreeTermSymbolApi
+
+ /** A tag that preserves the identity of the `FreeTermSymbol` abstract type from erasure.
+ * Can be used for pattern matching, instance tests, serialization and likes.
+ * @group Tags
+ */
+ implicit val FreeTermSymbolTag: ClassTag[FreeTermSymbol]
+
+ /** The type of free types introduced by reification.
+ * @group Symbols
+ * @template
+ */
+ type FreeTypeSymbol >: Null <: TypeSymbol with FreeTypeSymbolApi
+
+ /** A tag that preserves the identity of the `FreeTypeSymbol` abstract type from erasure.
+ * Can be used for pattern matching, instance tests, serialization and likes.
+ * @group Tags
+ */
+ implicit val FreeTypeSymbolTag: ClassTag[FreeTypeSymbol]
+
+ /** A special "missing" symbol. Commonly used in the API to denote a default or empty value.
+ * @group Symbols
+ * @template
+ */
+ val NoSymbol: Symbol
+
+ /** The API of symbols.
+ * The main source of information about symbols is the [[Symbols]] page.
+ *
+ * $SYMACCESSORS
+ * @group API
+ * @groupname Basics Symbol Basic Information
+ * @groupprio Basics 0
+ * @groupname Tests Symbol Type Tests
+ * @groupprio Tests 1
+ * @groupname Conversions Symbol Conversions
+ * @groupprio Conversions 2
+ * @groupname Constructors New Symbol Constructors
+ * @groupprio Constructors 3
+ * @groupdesc Constructors These methods construct new symbols owned by the current symbol.
+ * @groupname Helpers Iteration Helpers
+ * @groupprio Helpers 4
+ * @groupdesc Helpers These methods enable collections-like operations on symbols.
+ * @groupname Type TypeSymbol Members
+ * @groupprio Type -1
+ * @groupname FreeType FreeType Symbol Members
+ * @groupprio FreeType -2
+ * @groupname Term TermSymbol Members
+ * @groupprio Term -1
+ * @groupname FreeTerm FreeTerm Symbol Members
+ * @groupprio FreeTerm -2
+ * @groupname Class Class Symbol Members
+ * @groupprio Class -2
+ * @groupname Method Method Symbol Members
+ * @groupprio Method -2
+ * @groupname Module Module Symbol Members
+ * @groupprio Module -2
+ */
+ trait SymbolApi { this: Symbol =>
+
+ /** The owner of this symbol. This is the symbol
+ * that directly contains the current symbol's definition.
+ * The `NoSymbol` symbol does not have an owner, and calling this method
+ * on one causes an internal error.
+ * The owner of the Scala root class [[scala.reflect.api.Mirror.RootClass]]
+ * and the Scala root object [[scala.reflect.api.Mirror.RootPackage]] is `NoSymbol`.
+ * Every other symbol has a chain of owners that ends in
+ * [[scala.reflect.api.Mirror.RootClass]].
+ *
+ * @group Basics
+ */
+ def owner: Symbol
+
+ /** The type of the symbol name.
+ * Can be either `TermName` or `TypeName` depending on whether this is a `TermSymbol` or a `TypeSymbol`.
+ *
+ * Type name namespaces do not intersect with term name namespaces.
+ * This fact is reflected in different types for names of `TermSymbol` and `TypeSymbol`.
+ * @group Basics
+ */
+ type NameType >: Null <: Name
+
+ /** The name of the symbol as a member of the `Name` type.
+ * @group Basics
+ */
+ def name: Name
+
+ /** The encoded full path name of this symbol, where outer names and inner names
+ * are separated by periods.
+ * @group Basics
+ */
+ def fullName: String
+
+ /** Does this symbol represent the definition of a type?
+ * Note that every symbol is either a term or a type.
+ * So for every symbol `sym` (except for `NoSymbol`),
+ * either `sym.isTerm` is true or `sym.isType` is true.
+ *
+ * @group Tests
+ */
+ def isType: Boolean = false
+
+ /** This symbol cast to a TypeSymbol.
+ * @throws ScalaReflectionException if `isType` is false.
+ *
+ * @group Conversions
+ */
+ def asType: TypeSymbol = throw new ScalaReflectionException(s"$this is not a type")
+
+ /** Does this symbol represent the definition of a term?
+ * Note that every symbol is either a term or a type.
+ * So for every symbol `sym` (except for `NoSymbol`),
+ * either `sym.isTerm` is true or `sym.isTerm` is true.
+ *
+ * @group Tests
+ */
+ def isTerm: Boolean = false
+
+ /** This symbol cast to a TermSymbol.
+ * @throws ScalaReflectionException if `isTerm` is false.
+ *
+ * @group Conversions
+ */
+ def asTerm: TermSymbol = throw new ScalaReflectionException(s"$this is not a term")
+
+ /** Does this symbol represent the definition of a method?
+ * If yes, `isTerm` is also guaranteed to be true.
+ *
+ * @group Tests
+ */
+ def isMethod: Boolean = false
+
+ /** This symbol cast to a MethodSymbol.
+ * @throws ScalaReflectionException if `isMethod` is false.
+ *
+ * @group Conversions
+ */
+ def asMethod: MethodSymbol = {
+ def overloadedMsg =
+ "encapsulates multiple overloaded alternatives and cannot be treated as a method. "+
+ "Consider invoking `<offending symbol>.asTerm.alternatives` and manually picking the required method"
+ def vanillaMsg = "is not a method"
+ val msg = if (isOverloadedMethod) overloadedMsg else vanillaMsg
+ throw new ScalaReflectionException(s"$this $msg")
+ }
+
+ /** Used to provide a better error message for `asMethod`
+ *
+ * @group Tests
+ */
+ protected def isOverloadedMethod = false
+
+ /** Does this symbol represent the definition of a module (i.e. it
+ * results from an object definition?).
+ * If yes, `isTerm` is also guaranteed to be true.
+ *
+ * @group Tests
+ */
+ def isModule: Boolean = false
+
+ /** This symbol cast to a ModuleSymbol defined by an object definition.
+ * @throws ScalaReflectionException if `isModule` is false.
+ *
+ * @group Conversions
+ */
+ def asModule: ModuleSymbol = throw new ScalaReflectionException(s"$this is not a module")
+
+ /** Does this symbol represent the definition of a class or trait?
+ * If yes, `isType` is also guaranteed to be true.
+ *
+ * @group Tests
+ */
+ def isClass: Boolean = false
+
+ /** Does this symbol represent the definition of a class implicitly associated
+ * with an object definition (module class in scala compiler parlance).
+ * If yes, `isType` is also guaranteed to be true.
+ *
+ * @group Tests
+ */
+ def isModuleClass: Boolean = false
+
+ /** This symbol cast to a ClassSymbol representing a class or trait.
+ * @throws ScalaReflectionException if `isClass` is false.
+ *
+ * @group Conversions
+ */
+ def asClass: ClassSymbol = throw new ScalaReflectionException(s"$this is not a class")
+
+ /** Does this symbol represent a free term captured by reification?
+ * If yes, `isTerm` is also guaranteed to be true.
+ *
+ * @group Tests
+ */
+ def isFreeTerm: Boolean = false
+
+ /** This symbol cast to a free term symbol.
+ * @throws ScalaReflectionException if `isFreeTerm` is false.
+ *
+ * @group Conversions
+ */
+ def asFreeTerm: FreeTermSymbol = throw new ScalaReflectionException(s"$this is not a free term")
+
+ /** Does this symbol represent a free type captured by reification?
+ * If yes, `isType` is also guaranteed to be true.
+ *
+ * @group Tests
+ */
+ def isFreeType: Boolean = false
+
+ /** This symbol cast to a free type symbol.
+ * @throws ScalaReflectionException if `isFreeType` is false.
+ *
+ * @group Conversions
+ */
+ def asFreeType: FreeTypeSymbol = throw new ScalaReflectionException(s"$this is not a free type")
+
+ /** @group Constructors */
+ def newTermSymbol(name: TermName, pos: Position = NoPosition, flags: FlagSet = NoFlags): TermSymbol
+ /** @group Constructors */
+ def newModuleAndClassSymbol(name: Name, pos: Position = NoPosition, flags: FlagSet = NoFlags): (ModuleSymbol, ClassSymbol)
+ /** @group Constructors */
+ def newMethodSymbol(name: TermName, pos: Position = NoPosition, flags: FlagSet = NoFlags): MethodSymbol
+ /** @group Constructors */
+ def newTypeSymbol(name: TypeName, pos: Position = NoPosition, flags: FlagSet = NoFlags): TypeSymbol
+ /** @group Constructors */
+ def newClassSymbol(name: TypeName, pos: Position = NoPosition, flags: FlagSet = NoFlags): ClassSymbol
+
+ /** Source file if this symbol is created during this compilation run,
+ * or a class file if this symbol is loaded from a *.class or *.jar.
+ *
+ * The return type is `scala.reflect.io.AbstractFile`, which belongs to an experimental part of Scala reflection.
+ * It should not be used unless you know what you are doing. In subsequent releases, this API will be refined
+ * and exposed as a part of scala.reflect.api.
+ *
+ * @group Basics
+ */
+ def associatedFile: scala.reflect.io.AbstractFile
+
+ /** A list of annotations attached to this Symbol.
+ *
+ * @group Basics
+ */
+ def annotations: List[Annotation]
+
+ /** For a class: the module or case class factory with the same name in the same package.
+ * For a module: the class with the same name in the same package.
+ * For all others: NoSymbol
+ *
+ * @group Basics
+ */
+ def companionSymbol: Symbol
+
+ /** The type signature of this symbol seen as a member of given type `site`.
+ *
+ * @group Basics
+ */
+ def typeSignatureIn(site: Type): Type
+
+ /** The type signature of this symbol.
+ *
+ * This method always returns signatures in the most generic way possible, even if the underlying symbol is obtained from an
+ * instantiation of a generic type. For example, signature
+ * of the method `def map[B](f: (A) ⇒ B): List[B]`, which refers to the type parameter `A` of the declaring class `List[A]`,
+ * will always feature `A`, regardless of whether `map` is loaded from the `List[_]` or from `List[Int]`. To get a signature
+ * with type parameters appropriately instantiated, one should use `typeSignatureIn`.
+ *
+ * @group Basics
+ */
+ def typeSignature: Type
+
+ /** Returns all symbols overriden by this symbol.
+ *
+ * @group Basics
+ */
+ def allOverriddenSymbols: List[Symbol]
+
+ /******************* tests *******************/
+
+ /** Does this symbol represent a synthetic (i.e. a compiler-generated) entity?
+ * Examples of synthetic entities are accessors for vals and vars
+ * or mixin constructors in trait implementation classes.
+ *
+ * @group Tests
+ */
+ def isSynthetic: Boolean
+
+ /** Does this symbol represent an implementation artifact that isn't meant for public use?
+ * Examples of such artifacts are erasure bridges and outer fields.
+ *
+ * @group Tests
+ */
+ def isImplementationArtifact: Boolean
+
+ /** Does this symbol represent a local declaration or definition?
+ *
+ * If yes, either `isPrivate` or `isProtected` are guaranteed to be true.
+ * Local symbols can only be accessed from the same object instance.
+ *
+ * If yes, `privateWithin` might tell more about this symbol's visibility scope.
+ *
+ * @group Tests
+ */
+ def isLocal: Boolean
+
+ /** Does this symbol represent a private declaration or definition?
+ * If yes, `privateWithin` might tell more about this symbol's visibility scope.
+ *
+ * @group Tests
+ */
+ def isPrivate: Boolean
+
+ /** Does this symbol represent a protected declaration or definition?
+ * If yes, `privateWithin` might tell more about this symbol's visibility scope.
+ *
+ * @group Tests
+ */
+ def isProtected: Boolean
+
+ /** Does this symbol represent a public declaration or definition?
+ *
+ * @group Tests
+ */
+ def isPublic: Boolean
+
+ /**
+ * Set when symbol has a modifier of the form private[X], NoSymbol otherwise.
+ *
+ * Access level encoding: there are three scala flags (PRIVATE, PROTECTED,
+ * and LOCAL) which combine with value privateWithin (the "foo" in private[foo])
+ * to define from where an entity can be accessed. The meanings are as follows:
+ *
+ * PRIVATE access restricted to class only.
+ * PROTECTED access restricted to class and subclasses only.
+ * LOCAL can only be set in conjunction with PRIVATE or PROTECTED.
+ * Further restricts access to the same object instance.
+ *
+ * In addition, privateWithin can be used to set a visibility barrier.
+ * When set, everything contained in the named enclosing package or class
+ * has access. It is incompatible with PRIVATE or LOCAL, but is additive
+ * with PROTECTED (i.e. if either the flags or privateWithin allow access,
+ * then it is allowed.)
+ *
+ * The java access levels translate as follows:
+ *
+ * java private: isPrivate && (privateWithin == NoSymbol)
+ * java package: !isPrivate && !isProtected && (privateWithin == enclosingPackage)
+ * java protected: isProtected && (privateWithin == enclosingPackage)
+ * java public: !isPrivate && !isProtected && (privateWithin == NoSymbol)
+ *
+ * @group Tests
+ */
+ def privateWithin: Symbol
+
+ /** Does this symbol represent the definition of a package?
+ * If yes, `isTerm` is also guaranteed to be true.
+ *
+ * @group Tests
+ */
+ def isPackage: Boolean
+
+ /** Does this symbol represent a package class?
+ * If yes, `isClass` is also guaranteed to be true.
+ *
+ * @group Tests
+ */
+ def isPackageClass: Boolean
+
+ /** Does this symbol or its underlying type represent a typechecking error?
+ *
+ * @group Tests
+ */
+ def isErroneous : Boolean
+
+ /** Is this symbol static (i.e. with no outer instance)?
+ * Q: When exactly is a sym marked as STATIC?
+ * A: If it's a member of a toplevel object, or of an object contained in a toplevel object, or any number of levels deep.
+ * http://groups.google.com/group/scala-internals/browse_thread/thread/d385bcd60b08faf6
+ *
+ * @group Tests
+ */
+ def isStatic: Boolean
+
+ /** Is this symbol final?
+ *
+ * @group Tests
+ */
+ def isFinal: Boolean
+
+ /** Is this symbol overriding something?
+ *
+ * @group Tests
+ */
+ def isOverride: Boolean
+
+ /** Is this symbol labelled as "abstract override"?
+ *
+ * @group Tests
+ */
+ def isAbstractOverride: Boolean
+
+ /** Is this symbol a macro?
+ *
+ * @group Tests
+ */
+ def isMacro: Boolean
+
+ /** Is this symbol a parameter (either a method parameter or a type parameter)?
+ *
+ * @group Tests
+ */
+ def isParameter: Boolean
+
+ /** Is this symbol a specialized type parameter or a generated specialized member?
+ *
+ * @group Tests
+ */
+ def isSpecialized: Boolean
+
+ /** Is this symbol defined by Java?
+ *
+ * @group Tests
+ */
+ def isJava: Boolean
+
+ /** Does this symbol represent an implicit value, definition, class or parameter?
+ *
+ * @group Tests
+ */
+ def isImplicit: Boolean
+
+ /******************* helpers *******************/
+
+ /** Provides an alternate if symbol is a NoSymbol.
+ *
+ * @group Helpers
+ */
+ def orElse(alt: => Symbol): Symbol
+
+ /** Filters the underlying alternatives (or a single-element list
+ * composed of the symbol itself if the symbol is not overloaded).
+ * Returns an overloaded symbol is there are multiple matches.
+ * Returns a NoSymbol if there are no matches.
+ *
+ * @group Helpers
+ */
+ def filter(cond: Symbol => Boolean): Symbol
+
+ /** If this is a NoSymbol, returns NoSymbol, otherwise
+ * returns the result of applying `f` to this symbol.
+ *
+ * @group Helpers
+ */
+ def map(f: Symbol => Symbol): Symbol
+
+ /** Does the same as `filter`, but crashes if there are multiple matches.
+ *
+ * @group Helpers
+ */
+ def suchThat(cond: Symbol => Boolean): Symbol
+ }
+
+ /** The API of term symbols.
+ * The main source of information about symbols is the [[Symbols]] page.
+ *
+ * $SYMACCESSORS
+ * @group API
+ */
+ trait TermSymbolApi extends SymbolApi { this: TermSymbol =>
+ /** Term symbols have their names of type `TermName`.
+ */
+ final type NameType = TermName
+
+ final override def isTerm = true
+ final override def asTerm = this
+
+ /** Is this symbol introduced as `val`?
+ *
+ * @group Term
+ */
+ def isVal: Boolean
+
+ /** Does this symbol denote a stable value?
+ *
+ * @group Term
+ */
+ def isStable: Boolean
+
+ /** Is this symbol introduced as `var`?
+ *
+ * @group Term
+ */
+ def isVar: Boolean
+
+ /** Does this symbol represent a getter or a setter?
+ *
+ * @group Term
+ */
+ def isAccessor: Boolean
+
+ /** Does this symbol represent a getter of a field?
+ * If yes, `isMethod` is also guaranteed to be true.
+ *
+ * @group Term
+ */
+ def isGetter: Boolean
+
+ /** Does this symbol represent a setter of a field?
+ * If yes, `isMethod` is also guaranteed to be true.
+ *
+ * @group Term
+ */
+ def isSetter: Boolean
+
+ /** Does this symbol represent an overloaded method?
+ * If yes, `isMethod` is false, and the list of the enclosed alternatives can be found out via `alternatives`.
+ *
+ * @group Term
+ */
+ def isOverloaded : Boolean
+
+ /** Does this symbol represent a lazy value?
+ *
+ * @group Term
+ */
+ def isLazy: Boolean
+
+ /** The overloaded alternatives of this symbol
+ *
+ * @group Term
+ */
+ def alternatives: List[Symbol]
+
+ /** Used to provide a better error message for `asMethod` */
+ override protected def isOverloadedMethod = alternatives exists (_.isMethod)
+
+ /** Backing field for an accessor method, NoSymbol for all other term symbols.
+ *
+ * @group Term
+ */
+ def accessed: Symbol
+
+ /** Getter method for a backing field of a val or a val, NoSymbol for all other term symbols.
+ *
+ * @group Term
+ */
+ def getter: Symbol
+
+ /** Setter method for a backing field of a val or a val, NoSymbol for all other term symbols.
+ *
+ * @group Term
+ */
+ def setter: Symbol
+
+ /** Does this symbol represent a field of a class
+ * that was generated from a parameter of that class?
+ *
+ * @group Term
+ */
+ def isParamAccessor: Boolean
+
+ /** Does this symbol represent a field of a case class
+ * that corresponds to a parameter in the first parameter list of the
+ * primary constructor of that class?
+ *
+ * @group Term
+ */
+ def isCaseAccessor: Boolean
+
+ /** Does this symbol represent a parameter with a default value?
+ *
+ * @group Term
+ */
+ def isParamWithDefault: Boolean
+
+ /** Does this symbol represent a by-name parameter?
+ *
+ * @group Term
+ */
+ def isByNameParam: Boolean
+ }
+
+ /** The API of type symbols.
+ * The main source of information about symbols is the [[Symbols]] page.
+ *
+ * $SYMACCESSORS
+ * @group API
+ */
+ trait TypeSymbolApi extends SymbolApi { this: TypeSymbol =>
+ /** Type symbols have their names of type `TypeName`.
+ */
+ final type NameType = TypeName
+
+ /** The type constructor corresponding to this type symbol.
+ * This is different from `toType` in that type parameters
+ * are part of results of `toType`, but not of `toTypeConstructor`.
+ *
+ * Example: Given a class declaration `class C[T] { ... } `, that generates a symbol
+ * `C`. Then `C.toType` is the type `C[T]`, but `C.toTypeConstructor` is `C`.
+ *
+ * @group Type
+ */
+ def toTypeConstructor: Type
+
+ /** A type reference that refers to this type symbol seen
+ * as a member of given type `site`.
+ *
+ * @group Type
+ */
+ def toTypeIn(site: Type): Type
+
+ /** A type reference that refers to this type symbol
+ * Note if symbol is a member of a class, one almost always is interested
+ * in `asTypeIn` with a site type instead.
+ *
+ * Example: Given a class declaration `class C[T] { ... } `, that generates a symbol
+ * `C`. Then `C.toType` is the type `C[T]`.
+ *
+ * By contrast, `C.typeSignature` would be a type signature of form
+ * `PolyType(ClassInfoType(...))` that describes type parameters, value
+ * parameters, parent types, and members of `C`.
+ *
+ * @group Type
+ */
+ def toType: Type
+
+ final override def isType = true
+ final override def asType = this
+
+ /** Is the type parameter represented by this symbol contravariant?
+ *
+ * @group Type
+ */
+ def isContravariant : Boolean
+
+ /** Is the type parameter represented by this symbol contravariant?
+ *
+ * @group Type
+ */
+ def isCovariant : Boolean
+
+ /** Does this symbol represent the definition of a skolem?
+ * Skolems are used during typechecking to represent type parameters viewed from inside their scopes.
+ *
+ * @group Type
+ */
+ def isSkolem : Boolean
+
+ /** Does this symbol represent the definition of a type alias?
+ *
+ * @group Type
+ */
+ def isAliasType : Boolean
+
+ /** Does this symbol represent the definition of an abstract type?
+ *
+ * @group Type
+ */
+ def isAbstractType : Boolean
+
+ /** Does this symbol represent an existentially bound type?
+ *
+ * @group Type
+ */
+ def isExistential : Boolean
+
+ /** For a polymorphic type, its type parameters, the empty list for all other types
+ *
+ * @group Type
+ */
+ def typeParams: List[Symbol]
+ }
+
+ /** The API of method symbols.
+ * The main source of information about symbols is the [[Symbols]] page.
+ *
+ * $SYMACCESSORS
+ * @group API
+ */
+ trait MethodSymbolApi extends TermSymbolApi { this: MethodSymbol =>
+ final override def isMethod = true
+ final override def asMethod = this
+
+ /** Does this method represent a constructor?
+ *
+ * If `owner` is a class, then this is a vanilla JVM constructor.
+ * If `owner` is a trait, then this is a mixin constructor.
+ *
+ * @group Method
+ */
+ def isConstructor: Boolean
+
+ /** Does this symbol denote the primary constructor of its enclosing class?
+ *
+ * @group Method
+ */
+ def isPrimaryConstructor: Boolean
+
+ /** For a polymorphic method, its type parameters, the empty list for all other methods
+ *
+ * @group Method
+ */
+ def typeParams: List[Symbol]
+
+ /** All parameter lists of the method.
+ * The name ending with "ss" indicates that the result type is a list of lists.
+ *
+ * Can be used to distinguish nullary methods and methods with empty parameter lists.
+ * For a nullary method, returns the empty list (i.e. `List()`).
+ * For a method with an empty parameter list, returns a list that contains the empty list (i.e. `List(List())`).
+ *
+ * @group Method
+ */
+ def paramss: List[List[Symbol]]
+
+ /** Does this method support variable length argument lists?
+ *
+ * @group Method
+ */
+ def isVarargs: Boolean
+
+ /** The return type of the method
+ *
+ * @group Method
+ */
+ def returnType: Type
+ }
+
+ /** The API of module symbols.
+ * The main source of information about symbols is the [[Symbols]] page.
+ *
+ * $SYMACCESSORS
+ * @group API
+ */
+ trait ModuleSymbolApi extends TermSymbolApi { this: ModuleSymbol =>
+ /** The class implicitly associated with the object definition.
+ * One can go back from a module class to the associated module symbol
+ * by inspecting its `selfType.termSymbol`.
+ *
+ * @group Module
+ */
+ def moduleClass: Symbol // needed for tree traversals
+ // when this becomes `moduleClass: ClassSymbol`, it will be the happiest day in my life
+
+ final override def isModule = true
+ final override def asModule = this
+ }
+
+ /** The API of class symbols.
+ * The main source of information about symbols is the [[Symbols]] page.
+ *
+ * $SYMACCESSORS
+ * @group API
+ */
+ trait ClassSymbolApi extends TypeSymbolApi { this: ClassSymbol =>
+ final override def isClass = true
+ final override def asClass = this
+
+ /** Does this symbol represent the definition of a primitive class?
+ * Namely, is it one of [[scala.Double]], [[scala.Float]], [[scala.Long]], [[scala.Int]], [[scala.Char]],
+ * [[scala.Short]], [[scala.Byte]], [[scala.Unit]] or [[scala.Boolean]]?
+ *
+ * @group Class
+ */
+ def isPrimitive: Boolean
+
+ /** Does this symbol represent the definition of a numeric value class?
+ * Namely, is it one of [[scala.Double]], [[scala.Float]], [[scala.Long]], [[scala.Int]], [[scala.Char]],
+ * [[scala.Short]], [[scala.Byte]], [[scala.Unit]] or [[scala.Boolean]]?
+ *
+ * @group Class
+ */
+ def isNumeric: Boolean
+
+ /** Does this symbol represent the definition of a custom value class?
+ * Namely, is AnyVal among its parent classes?
+ *
+ * @group Class
+ */
+ def isDerivedValueClass: Boolean
+
+ /** Does this symbol represent a trait?
+ *
+ * @group Class
+ */
+ def isTrait: Boolean
+
+ /** Does this symbol represent an abstract class?
+ *
+ * @group Class
+ */
+ def isAbstractClass: Boolean
+
+ /** Does this symbol represent a case class?
+ *
+ * @group Class
+ */
+ def isCaseClass: Boolean
+
+ /** Does this symbol represent a sealed class?
+ *
+ * @group Class
+ */
+ def isSealed: Boolean
+
+ /** If this is a sealed class, its known direct subclasses.
+ * Otherwise, the empty set.
+ *
+ * @group Class
+ */
+ def knownDirectSubclasses: Set[Symbol]
+
+ /** The list of all base classes of this type (including its own typeSymbol)
+ * in linearization order, starting with the class itself and ending
+ * in class Any.
+ *
+ * @group Class
+ */
+ def baseClasses: List[Symbol]
+
+ /** The module corresponding to this module class,
+ * or NoSymbol if this symbol is not a module class.
+ *
+ * @group Class
+ */
+ def module: Symbol
+
+ /** If this symbol is a class or trait, its self type, otherwise the type
+ * of the symbol itself.
+ *
+ * @group Class
+ */
+ def selfType: Type
+
+ /** The type `C.this`, where `C` is the current class
+ *
+ * @group Class
+ */
+ def thisPrefix: Type
+
+ /** For a polymorphic class/trait, its type parameters, the empty list for all other classes/trait
+ *
+ * @group Class
+ */
+ def typeParams: List[Symbol]
+ }
+
+ /** The API of free term symbols.
+ * The main source of information about symbols is the [[Symbols]] page.
+ *
+ * $SYMACCESSORS
+ * @group API
+ */
+ trait FreeTermSymbolApi extends TermSymbolApi { this: FreeTermSymbol =>
+ final override def isFreeTerm = true
+ final override def asFreeTerm = this
+
+ /** The place where this symbol has been spawned
+ *
+ * @group FreeTerm
+ */
+ def origin: String
+
+ /** The valus this symbol refers to
+ *
+ * @group FreeTerm
+ */
+ def value: Any
+ }
+
+ /** The API of free type symbols.
+ * The main source of information about symbols is the [[Symbols]] page.
+ *
+ * $SYMACCESSORS
+ * @group API
+ */
+ trait FreeTypeSymbolApi extends TypeSymbolApi { this: FreeTypeSymbol =>
+ final override def isFreeType = true
+ final override def asFreeType = this
+
+ /** The place where this symbol has been spawned
+ *
+ * @group FreeType
+ */
+ def origin: String
+ }
+}
diff --git a/src/reflect/scala/reflect/api/TagInterop.scala b/src/reflect/scala/reflect/api/TagInterop.scala
new file mode 100644
index 0000000..5de8115
--- /dev/null
+++ b/src/reflect/scala/reflect/api/TagInterop.scala
@@ -0,0 +1,43 @@
+package scala.reflect
+package api
+
+/**
+ * <span class="badge badge-red" style="float: right;">EXPERIMENTAL</span>
+ *
+ * This trait provides type tag <-> manifest interoperability.
+ * @group ReflectionAPI
+ *
+ * @groupname TagInterop TypeTag and Manifest Interoperability
+ */
+trait TagInterop { self: Universe =>
+ // TODO `mirror` parameters are now of type `Any`, because I can't make these path-dependent types work
+ // if you're brave enough, replace `Any` with `Mirror`, recompile and run interop_typetags_are_manifests.scala
+
+ /**
+ * Convert a [[scala.reflect.api.TypeTags#TypeTag]] to a [[scala.reflect.Manifest]].
+ *
+ * Compiler usually generates these conversions automatically, when a type tag for a type `T` is in scope,
+ * and an implicit of type `Manifest[T]` is requested, but this method can also be called manually.
+ * For example:
+ * {{{
+ * typeTagToManifest(scala.reflect.runtime.currentMirror, implicitly[TypeTag[String]])
+ * }}}
+ * @group TagInterop
+ */
+ def typeTagToManifest[T: ClassTag](mirror: Any, tag: Universe#TypeTag[T]): Manifest[T] =
+ throw new UnsupportedOperationException("This universe does not support tag -> manifest conversions. Use a JavaUniverse, e.g. the scala.reflect.runtime.universe.")
+
+ /**
+ * Convert a [[scala.reflect.Manifest]] to a [[scala.reflect.api.TypeTags#TypeTag]].
+ *
+ * Compiler usually generates these conversions automatically, when a manifest for a type `T` is in scope,
+ * and an implicit of type `TypeTag[T]` is requested, but this method can also be called manually.
+ * For example:
+ * {{{
+ * manifestToTypeTag(scala.reflect.runtime.currentMirror, implicitly[Manifest[String]])
+ * }}}
+ * @group TagInterop
+ */
+ def manifestToTypeTag[T](mirror: Any, manifest: Manifest[T]): Universe#TypeTag[T] =
+ throw new UnsupportedOperationException("This universe does not support manifest -> tag conversions. Use a JavaUniverse, e.g. the scala.reflect.runtime.universe.")
+}
diff --git a/src/reflect/scala/reflect/api/TreeCreator.scala b/src/reflect/scala/reflect/api/TreeCreator.scala
new file mode 100644
index 0000000..6969418
--- /dev/null
+++ b/src/reflect/scala/reflect/api/TreeCreator.scala
@@ -0,0 +1,12 @@
+package scala.reflect
+package api
+
+/** This is an internal implementation class.
+ *
+ * This class is used internally by Scala Reflection, and is not recommended for use in client code.
+ *
+ * @group ReflectionAPI
+ */
+abstract class TreeCreator {
+ def apply[U <: Universe with Singleton](m: scala.reflect.api.Mirror[U]): U # Tree
+}
diff --git a/src/reflect/scala/reflect/api/Trees.scala b/src/reflect/scala/reflect/api/Trees.scala
new file mode 100644
index 0000000..0937a93
--- /dev/null
+++ b/src/reflect/scala/reflect/api/Trees.scala
@@ -0,0 +1,3021 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2013 LAMP/EPFL
+ * @author Martin Odersky
+ */
+package scala.reflect
+package api
+
+/**
+ * <span class="badge badge-red" style="float: right;">EXPERIMENTAL</span>
+ *
+ * This trait defines the node types used in Scala abstract syntax trees (AST) and operations on them.
+ *
+ * Trees are the basis for Scala's abstract syntax that is used to represent programs. They are also called
+ * abstract syntax trees and commonly abbreviated as ASTs.
+ *
+ * In Scala reflection, APIs that produce or use `Tree`s are:
+ *
+ * - '''Annotations''' which use trees to represent their arguments, exposed in [[scala.reflect.api.Annotations#scalaArgs Annotation.scalaArgs]].
+ * - '''[[scala.reflect.api.Universe#reify reify]]''', a special method on [[scala.reflect.api.Universe]] that takes an expression and returns an AST which represents the expression.
+ * - '''Macros and runtime compilation with toolboxes''' which both use trees as their program representation medium.
+ *
+ * Trees are immutable, except for three fields
+ * [[Trees#TreeApi.pos pos]], [[Trees#TreeApi.symbol symbol]], and [[Trees#TreeApi.tpe tpe]], which are assigned when a tree is typechecked
+ * to attribute it with the information gathered by the typechecker.
+ *
+ * === Examples ===
+ *
+ * The following creates an AST representing a literal 5 in Scala source code:
+ * {{{
+ * Literal(Constant(5))
+ * }}}
+ *
+ * The following creates an AST representing `print("Hello World")`:
+ * {{{
+ * Apply(Select(Select(This(newTypeName("scala")), newTermName("Predef")), newTermName("print")), List(Literal(Constant("Hello World"))))
+ * }}}
+ *
+ * The following creates an AST from a literal 5, and then uses `showRaw` to print it in a readable format.
+ * {{{
+ * import scala.reflect.runtime.universe.{ reify, showRaw }
+ * print( showRaw( reify{5}.tree ) )` // prints Literal(Constant(5))
+ * }}}
+ *
+ * For more information about `Tree`s, see the [[http://docs.scala-lang.org/overviews/reflection/symbols-trees-types.html Reflection Guide: Symbols, Trees, Types]].
+ *
+ * @groupname Traversal Tree Traversal and Transformation
+ * @groupprio Traversal 1
+ * @groupprio Factories 1
+ * @groupname Copying Tree Copying
+ * @groupprio Copying 1
+ *
+ * @contentDiagram hideNodes "*Api"
+ * @group ReflectionAPI
+ */
+trait Trees { self: Universe =>
+
+ /** The type of Scala abstract syntax trees.
+ * @group Trees
+ * @template
+ */
+ type Tree >: Null <: TreeApi
+
+ /** A tag that preserves the identity of the `Tree` abstract type from erasure.
+ * Can be used for pattern matching, instance tests, serialization and likes.
+ * @group Tags
+ */
+ implicit val TreeTag: ClassTag[Tree]
+
+ /** The API that all trees support.
+ * The main source of information about trees is the [[scala.reflect.api.Trees]] page.
+ * @group API
+ */
+ trait TreeApi extends Product { this: Tree =>
+ /** Does this tree represent a definition? (of a method, of a class, etc) */
+ def isDef: Boolean
+
+ /** Is this tree one of the empty trees?
+ * Empty trees are: the `EmptyTree` null object, `TypeTree` instances that don't carry a type
+ * and the special `emptyValDef` singleton.
+ */
+ def isEmpty: Boolean
+
+ /** The canonical way to test if a Tree represents a term.
+ */
+ def isTerm: Boolean
+
+ /** The canonical way to test if a Tree represents a type.
+ */
+ def isType: Boolean
+
+ /** Position of the tree. */
+ def pos: Position
+
+ /** Type of the tree.
+ *
+ * Upon creation most trees have their `tpe` set to `null`.
+ * Types are typically assigned to trees during typechecking.
+ * Some node factory methods set `tpe` immediately after creation.
+ *
+ * When the typechecker encounters a tree with a non-null tpe,
+ * it will assume it to be correct and not check it again. This means one has
+ * to be careful not to erase the `tpe` field of subtrees.
+ */
+ def tpe: Type
+
+ /** Symbol of the tree.
+ *
+ * For most trees symbol is `null`. In `SymTree`s,
+ * it is overridden and implemented with a var, initialized to `NoSymbol`.
+ *
+ * Trees which are not `SymTree`s but which carry symbols do so by
+ * overriding `def symbol` to forward it elsewhere. Examples:
+ *
+ * - `Super(qual, _)` has `qual`'s symbol,
+ * - `Apply(fun, args)` has `fun`'s symbol,
+ * - `TypeApply(fun, args)` has `fun`'s symbol,
+ * - `AppliedTypeTree(tpt, args)` has `tpt`'s symbol,
+ * - `TypeTree(tpe)` has `tpe`'s `typeSymbol`, if `tpe != null`.
+ */
+ def symbol: Symbol
+
+ /** Provides an alternate if tree is empty
+ * @param alt The alternate tree
+ * @return If this tree is non empty, this tree, otherwise `alt`.
+ */
+ def orElse(alt: => Tree): Tree
+
+ /** Apply `f` to each subtree */
+ def foreach(f: Tree => Unit): Unit
+
+ /** Find all subtrees matching predicate `p`. Same as `filter` */
+ def withFilter(f: Tree => Boolean): List[Tree]
+
+ /** Find all subtrees matching predicate `p`. Same as `withFilter` */
+ def filter(f: Tree => Boolean): List[Tree]
+
+ /** Apply `pf' to each subtree on which the function is defined and collect the results.
+ */
+ def collect[T](pf: PartialFunction[Tree, T]): List[T]
+
+ /** Returns optionally first tree (in a preorder traversal) which satisfies predicate `p`,
+ * or None if none exists.
+ */
+ def find(p: Tree => Boolean): Option[Tree]
+
+ /** Is there exists a part of this tree which satisfies predicate `p`? */
+ def exists(p: Tree => Boolean): Boolean
+
+ /** Do all parts of this tree satisfy predicate `p`? */
+ def forAll(p: Tree => Boolean): Boolean
+
+ /** Tests whether two trees are structurall equal.
+ * Note that `==` on trees is reference equality.
+ */
+ def equalsStructure(that : Tree): Boolean
+
+ /** The direct child trees of this tree.
+ * EmptyTrees are always omitted. Lists are flattened.
+ */
+ def children: List[Tree]
+
+ /** Extracts free term symbols from a tree that is reified or contains reified subtrees.
+ */
+ def freeTerms: List[FreeTermSymbol]
+
+ /** Extracts free type symbols from a tree that is reified or contains reified subtrees.
+ */
+ def freeTypes: List[FreeTypeSymbol]
+
+ /** Substitute symbols in `to` for corresponding occurrences of references to
+ * symbols `from` in this type.
+ */
+ def substituteSymbols(from: List[Symbol], to: List[Symbol]): Tree
+
+ /** Substitute types in `to` for corresponding occurrences of references to
+ * symbols `from` in this tree.
+ */
+ def substituteTypes(from: List[Symbol], to: List[Type]): Tree
+
+ /** Substitute given tree `to` for occurrences of nodes that represent
+ * `C.this`, where `C` referes to the given class `clazz`.
+ */
+ def substituteThis(clazz: Symbol, to: Tree): Tree
+
+ /** Make a copy of this tree, keeping all attributes,
+ * except that all positions are focused (so nothing
+ * in this tree will be found when searching by position).
+ */
+ def duplicate: this.type
+
+ /** Obtains string representation of a tree */
+ override def toString: String = treeToString(this)
+ }
+
+ /** Obtains string representation of a tree
+ * @group Trees
+ */
+ protected def treeToString(tree: Tree): String
+
+ /** The empty tree
+ * @group Trees
+ */
+ val EmptyTree: Tree
+
+ /** A tree for a term. Not all trees representing terms are TermTrees; use isTerm
+ * to reliably identify terms.
+ * @group Trees
+ * @template
+ */
+ type TermTree >: Null <: AnyRef with Tree with TermTreeApi
+
+ /** A tag that preserves the identity of the `TermTree` abstract type from erasure.
+ * Can be used for pattern matching, instance tests, serialization and likes.
+ * @group Tags
+ */
+ implicit val TermTreeTag: ClassTag[TermTree]
+
+ /** The API that all term trees support
+ * @group API
+ */
+ trait TermTreeApi extends TreeApi { this: TermTree =>
+ }
+
+ /** A tree for a type. Not all trees representing types are TypTrees; use isType
+ * to reliably identify types.
+ * @group Trees
+ * @template
+ */
+ type TypTree >: Null <: AnyRef with Tree with TypTreeApi
+
+ /** A tag that preserves the identity of the `TypTree` abstract type from erasure.
+ * Can be used for pattern matching, instance tests, serialization and likes.
+ * @group Tags
+ */
+ implicit val TypTreeTag: ClassTag[TypTree]
+
+ /** The API that all typ trees support
+ * @group API
+ */
+ trait TypTreeApi extends TreeApi { this: TypTree =>
+ }
+
+ /** A tree with a mutable symbol field, initialized to NoSymbol.
+ * @group Trees
+ * @template
+ */
+ type SymTree >: Null <: AnyRef with Tree with SymTreeApi
+
+ /** A tag that preserves the identity of the `SymTree` abstract type from erasure.
+ * Can be used for pattern matching, instance tests, serialization and likes.
+ * @group Tags
+ */
+ implicit val SymTreeTag: ClassTag[SymTree]
+
+ /** The API that all sym trees support
+ * @group API
+ */
+ trait SymTreeApi extends TreeApi { this: SymTree =>
+ /** @inheritdoc */
+ def symbol: Symbol
+ }
+
+ /** A tree with a name - effectively, a DefTree or RefTree.
+ * @group Trees
+ * @template
+ */
+ type NameTree >: Null <: AnyRef with Tree with NameTreeApi
+
+ /** A tag that preserves the identity of the `NameTree` abstract type from erasure.
+ * Can be used for pattern matching, instance tests, serialization and likes.
+ * @group Tags
+ */
+ implicit val NameTreeTag: ClassTag[NameTree]
+
+ /** The API that all name trees support
+ * @group API
+ */
+ trait NameTreeApi extends TreeApi { this: NameTree =>
+ /** The underlying name.
+ * For example, the `<List>` part of `Ident("List": TermName)`.
+ */
+ def name: Name
+ }
+
+ /** A tree which references a symbol-carrying entity.
+ * References one, as opposed to defining one; definitions
+ * are in DefTrees.
+ * @group Trees
+ * @template
+ */
+ type RefTree >: Null <: SymTree with NameTree with RefTreeApi
+
+ /** A tag that preserves the identity of the `RefTree` abstract type from erasure.
+ * Can be used for pattern matching, instance tests, serialization and likes.
+ * @group Tags
+ */
+ implicit val RefTreeTag: ClassTag[RefTree]
+
+ /** The API that all ref trees support
+ * @group API
+ */
+ trait RefTreeApi extends SymTreeApi with NameTreeApi { this: RefTree =>
+ /** The qualifier of the reference.
+ * For example, the `<scala>` part of `Select("scala": TermName, "List": TermName)`.
+ * `EmptyTree` for `Ident` instances.
+ */
+ def qualifier: Tree
+
+ /** @inheritdoc */
+ def name: Name
+ }
+
+ /** A tree which defines a symbol-carrying entity.
+ * @group Trees
+ * @template
+ */
+ type DefTree >: Null <: SymTree with NameTree with DefTreeApi
+
+ /** A tag that preserves the identity of the `DefTree` abstract type from erasure.
+ * Can be used for pattern matching, instance tests, serialization and likes.
+ * @group Tags
+ */
+ implicit val DefTreeTag: ClassTag[DefTree]
+
+ /** The API that all def trees support
+ * @group API
+ */
+ trait DefTreeApi extends SymTreeApi with NameTreeApi { this: DefTree =>
+ /** @inheritdoc */
+ def name: Name
+ }
+
+ /** Common base class for all member definitions: types, classes,
+ * objects, packages, vals and vars, defs.
+ * @group Trees
+ * @template
+ */
+ type MemberDef >: Null <: DefTree with MemberDefApi
+
+ /** A tag that preserves the identity of the `MemberDef` abstract type from erasure.
+ * Can be used for pattern matching, instance tests, serialization and likes.
+ * @group Tags
+ */
+ implicit val MemberDefTag: ClassTag[MemberDef]
+
+ /** The API that all member defs support
+ * @group API
+ */
+ trait MemberDefApi extends DefTreeApi { this: MemberDef =>
+ /** Modifiers of the declared member. */
+ def mods: Modifiers
+ }
+
+ /** A packaging, such as `package pid { stats }`
+ * @group Trees
+ * @template
+ */
+ type PackageDef >: Null <: MemberDef with PackageDefApi
+
+ /** A tag that preserves the identity of the `PackageDef` abstract type from erasure.
+ * Can be used for pattern matching, instance tests, serialization and likes.
+ * @group Tags
+ */
+ implicit val PackageDefTag: ClassTag[PackageDef]
+
+ /** The constructor/extractor for `PackageDef` instances.
+ * @group Extractors
+ */
+ val PackageDef: PackageDefExtractor
+
+ /** An extractor class to create and pattern match with syntax `PackageDef(pid, stats)`.
+ * This AST node corresponds to the following Scala code:
+ *
+ * `package` pid { stats }
+ * @group Extractors
+ */
+ abstract class PackageDefExtractor {
+ def apply(pid: RefTree, stats: List[Tree]): PackageDef
+ def unapply(packageDef: PackageDef): Option[(RefTree, List[Tree])]
+ }
+
+ /** The API that all package defs support
+ * @group API
+ */
+ trait PackageDefApi extends MemberDefApi { this: PackageDef =>
+ /** The (possibly, fully-qualified) name of the package. */
+ def pid: RefTree
+
+ /** Body of the package definition. */
+ def stats: List[Tree]
+ }
+
+ /** A common base class for class and object definitions.
+ * @group Trees
+ * @template
+ */
+ type ImplDef >: Null <: MemberDef with ImplDefApi
+
+ /** A tag that preserves the identity of the `ImplDef` abstract type from erasure.
+ * Can be used for pattern matching, instance tests, serialization and likes.
+ * @group Tags
+ */
+ implicit val ImplDefTag: ClassTag[ImplDef]
+
+ /** The API that all impl defs support
+ * @group API
+ */
+ trait ImplDefApi extends MemberDefApi { this: ImplDef =>
+ /** The body of the definition. */
+ def impl: Template
+ }
+
+ /** A class definition.
+ * @group Trees
+ * @template
+ */
+ type ClassDef >: Null <: ImplDef with ClassDefApi
+
+ /** A tag that preserves the identity of the `ClassDef` abstract type from erasure.
+ * Can be used for pattern matching, instance tests, serialization and likes.
+ * @group Tags
+ */
+ implicit val ClassDefTag: ClassTag[ClassDef]
+
+ /** The constructor/extractor for `ClassDef` instances.
+ * @group Extractors
+ */
+ val ClassDef: ClassDefExtractor
+
+ /** An extractor class to create and pattern match with syntax `ClassDef(mods, name, tparams, impl)`.
+ * This AST node corresponds to the following Scala code:
+ *
+ * mods `class` name [tparams] impl
+ *
+ * Where impl stands for:
+ *
+ * `extends` parents { defs }
+ * @group Extractors
+ */
+ abstract class ClassDefExtractor {
+ def apply(mods: Modifiers, name: TypeName, tparams: List[TypeDef], impl: Template): ClassDef
+ def unapply(classDef: ClassDef): Option[(Modifiers, TypeName, List[TypeDef], Template)]
+ }
+
+ /** The API that all class defs support
+ * @group API
+ */
+ trait ClassDefApi extends ImplDefApi { this: ClassDef =>
+ /** @inheritdoc */
+ def mods: Modifiers
+
+ /** The name of the class. */
+ def name: TypeName
+
+ /** The type parameters of the class. */
+ def tparams: List[TypeDef]
+
+ /** @inheritdoc */
+ def impl: Template
+ }
+
+ /** An object definition, e.g. `object Foo`. Internally, objects are
+ * quite frequently called modules to reduce ambiguity.
+ * Eliminated by compiler phase refcheck.
+ * @group Trees
+ * @template
+ */
+ type ModuleDef >: Null <: ImplDef with ModuleDefApi
+
+ /** A tag that preserves the identity of the `ModuleDef` abstract type from erasure.
+ * Can be used for pattern matching, instance tests, serialization and likes.
+ * @group Tags
+ */
+ implicit val ModuleDefTag: ClassTag[ModuleDef]
+
+ /** The constructor/extractor for `ModuleDef` instances.
+ * @group Extractors
+ */
+ val ModuleDef: ModuleDefExtractor
+
+ /** An extractor class to create and pattern match with syntax `ModuleDef(mods, name, impl)`.
+ * This AST node corresponds to the following Scala code:
+ *
+ * mods `object` name impl
+ *
+ * Where impl stands for:
+ *
+ * `extends` parents { defs }
+ * @group Extractors
+ */
+ abstract class ModuleDefExtractor {
+ def apply(mods: Modifiers, name: TermName, impl: Template): ModuleDef
+ def unapply(moduleDef: ModuleDef): Option[(Modifiers, TermName, Template)]
+ }
+
+ /** The API that all module defs support
+ * @group API
+ */
+ trait ModuleDefApi extends ImplDefApi { this: ModuleDef =>
+ /** @inheritdoc */
+ def mods: Modifiers
+
+ /** The name of the module. */
+ def name: TermName
+
+ /** @inheritdoc */
+ def impl: Template
+ }
+
+ /** A common base class for ValDefs and DefDefs.
+ * @group Trees
+ * @template
+ */
+ type ValOrDefDef >: Null <: MemberDef with ValOrDefDefApi
+
+ /** A tag that preserves the identity of the `ValOrDefDef` abstract type from erasure.
+ * Can be used for pattern matching, instance tests, serialization and likes.
+ * @group Tags
+ */
+ implicit val ValOrDefDefTag: ClassTag[ValOrDefDef]
+
+ /** The API that all val defs and def defs support
+ * @group API
+ */
+ trait ValOrDefDefApi extends MemberDefApi { this: ValOrDefDef =>
+ /** @inheritdoc */
+ def name: Name // can't be a TermName because macros can be type names.
+
+ /** The type ascribed to the definition.
+ * An empty `TypeTree` if the type hasn't been specified explicitly
+ * and is supposed to be inferred.
+ */
+ def tpt: Tree
+
+ /** The body of the definition.
+ * The `EmptyTree` is the body is empty (e.g. for abstract members).
+ */
+ def rhs: Tree
+ }
+
+ /** Broadly speaking, a value definition. All these are encoded as ValDefs:
+ *
+ * - immutable values, e.g. "val x"
+ * - mutable values, e.g. "var x" - the MUTABLE flag set in mods
+ * - lazy values, e.g. "lazy val x" - the LAZY flag set in mods
+ * - method parameters, see vparamss in [[scala.reflect.api.Trees#DefDef]] - the PARAM flag is set in mods
+ * - explicit self-types, e.g. class A { self: Bar => }
+ * @group Trees
+ * @template
+ */
+ type ValDef >: Null <: ValOrDefDef with ValDefApi
+
+ /** A tag that preserves the identity of the `ValDef` abstract type from erasure.
+ * Can be used for pattern matching, instance tests, serialization and likes.
+ * @group Tags
+ */
+ implicit val ValDefTag: ClassTag[ValDef]
+
+ /** The constructor/extractor for `ValDef` instances.
+ * @group Extractors
+ */
+ val ValDef: ValDefExtractor
+
+ /** An extractor class to create and pattern match with syntax `ValDef(mods, name, tpt, rhs)`.
+ * This AST node corresponds to any of the following Scala code:
+ *
+ * mods `val` name: tpt = rhs
+ *
+ * mods `var` name: tpt = rhs
+ *
+ * mods name: tpt = rhs // in signatures of function and method definitions
+ *
+ * self: Bar => // self-types
+ *
+ * If the type of a value is not specified explicitly (i.e. is meant to be inferred),
+ * this is expressed by having `tpt` set to `TypeTree()` (but not to an `EmptyTree`!).
+ * @group Extractors
+ */
+ abstract class ValDefExtractor {
+ def apply(mods: Modifiers, name: TermName, tpt: Tree, rhs: Tree): ValDef
+ def unapply(valDef: ValDef): Option[(Modifiers, TermName, Tree, Tree)]
+ }
+
+ /** The API that all val defs support
+ * @group API
+ */
+ trait ValDefApi extends ValOrDefDefApi { this: ValDef =>
+ /** @inheritdoc */
+ def mods: Modifiers
+
+ /** @inheritdoc */
+ def name: TermName
+
+ /** @inheritdoc */
+ def tpt: Tree
+
+ /** @inheritdoc */
+ def rhs: Tree
+ }
+
+ /** A method or macro definition.
+ * @param name The name of the method or macro. Can be a type name in case this is a type macro
+ * @group Trees
+ * @template
+ */
+ type DefDef >: Null <: ValOrDefDef with DefDefApi
+
+ /** A tag that preserves the identity of the `DefDef` abstract type from erasure.
+ * Can be used for pattern matching, instance tests, serialization and likes.
+ * @group Tags
+ */
+ implicit val DefDefTag: ClassTag[DefDef]
+
+ /** The constructor/extractor for `DefDef` instances.
+ * @group Extractors
+ */
+ val DefDef: DefDefExtractor
+
+ /** An extractor class to create and pattern match with syntax `DefDef(mods, name, tparams, vparamss, tpt, rhs)`.
+ * This AST node corresponds to the following Scala code:
+ *
+ * mods `def` name[tparams](vparams_1)...(vparams_n): tpt = rhs
+ *
+ * If the return type is not specified explicitly (i.e. is meant to be inferred),
+ * this is expressed by having `tpt` set to `TypeTree()` (but not to an `EmptyTree`!).
+ * @group Extractors
+ */
+ abstract class DefDefExtractor {
+ def apply(mods: Modifiers, name: Name, tparams: List[TypeDef], vparamss: List[List[ValDef]], tpt: Tree, rhs: Tree): DefDef
+ def unapply(defDef: DefDef): Option[(Modifiers, Name, List[TypeDef], List[List[ValDef]], Tree, Tree)]
+ }
+
+ /** The API that all def defs support
+ * @group API
+ */
+ trait DefDefApi extends ValOrDefDefApi { this: DefDef =>
+ /** @inheritdoc */
+ def mods: Modifiers
+
+ /** @inheritdoc */
+ def name: Name
+
+ /** The type parameters of the method. */
+ def tparams: List[TypeDef]
+
+ /** The parameter lists of the method. */
+ def vparamss: List[List[ValDef]]
+
+ /** @inheritdoc */
+ def tpt: Tree
+
+ /** @inheritdoc */
+ def rhs: Tree
+ }
+
+ /** An abstract type, a type parameter, or a type alias.
+ * Eliminated by erasure.
+ * @group Trees
+ * @template
+ */
+ type TypeDef >: Null <: MemberDef with TypeDefApi
+
+ /** A tag that preserves the identity of the `TypeDef` abstract type from erasure.
+ * Can be used for pattern matching, instance tests, serialization and likes.
+ * @group Tags
+ */
+ implicit val TypeDefTag: ClassTag[TypeDef]
+
+ /** The constructor/extractor for `TypeDef` instances.
+ * @group Extractors
+ */
+ val TypeDef: TypeDefExtractor
+
+ /** An extractor class to create and pattern match with syntax `TypeDef(mods, name, tparams, rhs)`.
+ * This AST node corresponds to the following Scala code:
+ *
+ * mods `type` name[tparams] = rhs
+ *
+ * mods `type` name[tparams] >: lo <: hi
+ *
+ * First usage illustrates `TypeDefs` representing type aliases and type parameters.
+ * Second usage illustrates `TypeDefs` representing abstract types,
+ * where lo and hi are both `TypeBoundsTrees` and `Modifier.deferred` is set in mods.
+ * @group Extractors
+ */
+ abstract class TypeDefExtractor {
+ def apply(mods: Modifiers, name: TypeName, tparams: List[TypeDef], rhs: Tree): TypeDef
+ def unapply(typeDef: TypeDef): Option[(Modifiers, TypeName, List[TypeDef], Tree)]
+ }
+
+ /** The API that all type defs support
+ * @group API
+ */
+ trait TypeDefApi extends MemberDefApi { this: TypeDef =>
+ /** @inheritdoc */
+ def mods: Modifiers
+
+ /** @inheritdoc */
+ def name: TypeName
+
+ /** The type parameters of this type definition. */
+ def tparams: List[TypeDef]
+
+ /** The body of the definition.
+ * The `EmptyTree` is the body is empty (e.g. for abstract type members).
+ */
+ def rhs: Tree
+ }
+
+ /** A labelled expression. Not expressible in language syntax, but
+ * generated by the compiler to simulate while/do-while loops, and
+ * also by the pattern matcher.
+ *
+ * The label acts much like a nested function, where `params` represents
+ * the incoming parameters. The symbol given to the LabelDef should have
+ * a MethodType, as if it were a nested function.
+ *
+ * Jumps are apply nodes attributed with a label's symbol. The
+ * arguments from the apply node will be passed to the label and
+ * assigned to the Idents.
+ *
+ * Forward jumps within a block are allowed.
+ * @group Trees
+ * @template
+ */
+ type LabelDef >: Null <: DefTree with TermTree with LabelDefApi
+
+ /** A tag that preserves the identity of the `LabelDef` abstract type from erasure.
+ * Can be used for pattern matching, instance tests, serialization and likes.
+ * @group Tags
+ */
+ implicit val LabelDefTag: ClassTag[LabelDef]
+
+ /** The constructor/extractor for `LabelDef` instances.
+ * @group Extractors
+ */
+ val LabelDef: LabelDefExtractor
+
+ /** An extractor class to create and pattern match with syntax `LabelDef(name, params, rhs)`.
+ *
+ * This AST node does not have direct correspondence to Scala code.
+ * It is used for tailcalls and like.
+ * For example, while/do are desugared to label defs as follows:
+ * {{{
+ * while (cond) body ==> LabelDef(\$L, List(), if (cond) { body; L\$() } else ())
+ * }}}
+ * {{{
+ * do body while (cond) ==> LabelDef(\$L, List(), body; if (cond) L\$() else ())
+ * }}}
+ * @group Extractors
+ */
+ abstract class LabelDefExtractor {
+ def apply(name: TermName, params: List[Ident], rhs: Tree): LabelDef
+ def unapply(labelDef: LabelDef): Option[(TermName, List[Ident], Tree)]
+ }
+
+ /** The API that all label defs support
+ * @group API
+ */
+ trait LabelDefApi extends DefTreeApi with TermTreeApi { this: LabelDef =>
+ /** @inheritdoc */
+ def name: TermName
+
+ /** Label's parameters - names that can be used in the body of the label.
+ * See the example for [[scala.reflect.api.Trees#LabelDefExtractor]].
+ */
+ def params: List[Ident]
+
+ /** The body of the label.
+ * See the example for [[scala.reflect.api.Trees#LabelDefExtractor]].
+ */
+ def rhs: Tree
+ }
+
+ /** Import selector
+ *
+ * Representation of an imported name its optional rename and their optional positions
+ *
+ * Eliminated by typecheck.
+ *
+ * @param name the imported name
+ * @param namePos its position or -1 if undefined
+ * @param rename the name the import is renamed to (== name if no renaming)
+ * @param renamePos the position of the rename or -1 if undefined
+ * @group Trees
+ * @template
+ */
+ type ImportSelector >: Null <: AnyRef with ImportSelectorApi
+
+ /** A tag that preserves the identity of the `ImportSelector` abstract type from erasure.
+ * Can be used for pattern matching, instance tests, serialization and likes.
+ * @group Tags
+ */
+ implicit val ImportSelectorTag: ClassTag[ImportSelector]
+
+ /** The constructor/extractor for `ImportSelector` instances.
+ * @group Extractors
+ */
+ val ImportSelector: ImportSelectorExtractor
+
+ /** An extractor class to create and pattern match with syntax `ImportSelector(name:, namePos, rename, renamePos)`.
+ * This is not an AST node, it is used as a part of the `Import` node.
+ * @group Extractors
+ */
+ abstract class ImportSelectorExtractor {
+ def apply(name: Name, namePos: Int, rename: Name, renamePos: Int): ImportSelector
+ def unapply(importSelector: ImportSelector): Option[(Name, Int, Name, Int)]
+ }
+
+ /** The API that all import selectors support
+ * @group API
+ */
+ trait ImportSelectorApi { this: ImportSelector =>
+ /** The imported name. */
+ def name: Name
+
+ /** Offset of the position of the importing part of the selector in the source file.
+ * Is equal to -1 is the position is unknown.
+ */
+ def namePos: Int
+
+ /** The name the import is renamed to.
+ * Is equal to `name` if it's not a renaming import.
+ */
+ def rename: Name
+
+ /** Offset of the position of the renaming part of the selector in the source file.
+ * Is equal to -1 is the position is unknown.
+ */
+ def renamePos: Int
+ }
+
+ /** Import clause
+ *
+ * @param expr
+ * @param selectors
+ * @group Trees
+ * @template
+ */
+ type Import >: Null <: SymTree with ImportApi
+
+ /** A tag that preserves the identity of the `Import` abstract type from erasure.
+ * Can be used for pattern matching, instance tests, serialization and likes.
+ * @group Tags
+ */
+ implicit val ImportTag: ClassTag[Import]
+
+ /** The constructor/extractor for `Import` instances.
+ * @group Extractors
+ */
+ val Import: ImportExtractor
+
+ /** An extractor class to create and pattern match with syntax `Import(expr, selectors)`.
+ * This AST node corresponds to the following Scala code:
+ *
+ * import expr.{selectors}
+ *
+ * Selectors are a list of ImportSelectors, which conceptually are pairs of names (from, to).
+ * The last (and maybe only name) may be a nme.WILDCARD. For instance:
+ *
+ * import qual.{x, y => z, _}
+ *
+ * Would be represented as:
+ *
+ * Import(qual, List(("x", "x"), ("y", "z"), (WILDCARD, null)))
+ *
+ * The symbol of an `Import` is an import symbol @see Symbol.newImport.
+ * It's used primarily as a marker to check that the import has been typechecked.
+ * @group Extractors
+ */
+ abstract class ImportExtractor {
+ def apply(expr: Tree, selectors: List[ImportSelector]): Import
+ def unapply(import_ : Import): Option[(Tree, List[ImportSelector])]
+ }
+
+ /** The API that all imports support
+ * @group API
+ */
+ trait ImportApi extends SymTreeApi { this: Import =>
+ /** The qualifier of the import.
+ * See the example for [[scala.reflect.api.Trees#ImportExtractor]].
+ */
+ def expr: Tree
+
+ /** The selectors of the import.
+ * See the example for [[scala.reflect.api.Trees#ImportExtractor]].
+ */
+ def selectors: List[ImportSelector]
+ }
+
+ /** Instantiation template of a class or trait
+ *
+ * @param parents
+ * @param body
+ * @group Trees
+ * @template
+ */
+ type Template >: Null <: SymTree with TemplateApi
+
+ /** A tag that preserves the identity of the `Template` abstract type from erasure.
+ * Can be used for pattern matching, instance tests, serialization and likes.
+ * @group Tags
+ */
+ implicit val TemplateTag: ClassTag[Template]
+
+ /** The constructor/extractor for `Template` instances.
+ * @group Extractors
+ */
+ val Template: TemplateExtractor
+
+ /** An extractor class to create and pattern match with syntax `Template(parents, self, body)`.
+ * This AST node corresponds to the following Scala code:
+ *
+ * `extends` parents { self => body }
+ *
+ * In case when the self-type annotation is missing, it is represented as
+ * an empty value definition with nme.WILDCARD as name and NoType as type.
+ *
+ * The symbol of a template is a local dummy. @see Symbol.newLocalDummy
+ * The owner of the local dummy is the enclosing trait or class.
+ * The local dummy is itself the owner of any local blocks. For example:
+ *
+ * class C {
+ * def foo { // owner is C
+ * def bar // owner is local dummy
+ * }
+ * }
+ * @group Extractors
+ */
+ abstract class TemplateExtractor {
+ def apply(parents: List[Tree], self: ValDef, body: List[Tree]): Template
+ def unapply(template: Template): Option[(List[Tree], ValDef, List[Tree])]
+ }
+
+ /** The API that all templates support
+ * @group API
+ */
+ trait TemplateApi extends SymTreeApi { this: Template =>
+ /** Superclasses of the template. */
+ def parents: List[Tree]
+
+ /** Self type of the template.
+ * Is equal to `emptyValDef` if the self type is not specified.
+ */
+ def self: ValDef
+
+ /** Body of the template.
+ */
+ def body: List[Tree]
+ }
+
+ /** Block of expressions (semicolon separated expressions)
+ * @group Trees
+ * @template
+ */
+ type Block >: Null <: TermTree with BlockApi
+
+ /** A tag that preserves the identity of the `Block` abstract type from erasure.
+ * Can be used for pattern matching, instance tests, serialization and likes.
+ * @group Tags
+ */
+ implicit val BlockTag: ClassTag[Block]
+
+ /** The constructor/extractor for `Block` instances.
+ * @group Extractors
+ */
+ val Block: BlockExtractor
+
+ /** An extractor class to create and pattern match with syntax `Block(stats, expr)`.
+ * This AST node corresponds to the following Scala code:
+ *
+ * { stats; expr }
+ *
+ * If the block is empty, the `expr` is set to `Literal(Constant(()))`.
+ * @group Extractors
+ */
+ abstract class BlockExtractor {
+ def apply(stats: List[Tree], expr: Tree): Block
+ def unapply(block: Block): Option[(List[Tree], Tree)]
+ }
+
+ /** The API that all blocks support
+ * @group API
+ */
+ trait BlockApi extends TermTreeApi { this: Block =>
+ /** All, but the last, expressions in the block.
+ * Can very well be an empty list.
+ */
+ def stats: List[Tree]
+
+ /** The last expression in the block. */
+ def expr: Tree
+ }
+
+ /** Case clause in a pattern match.
+ * (except for occurrences in switch statements).
+ * Eliminated by compiler phases patmat (in the new pattern matcher of 2.10) or explicitouter (in the old pre-2.10 pattern matcher)
+ * @group Trees
+ * @template
+ */
+ type CaseDef >: Null <: AnyRef with Tree with CaseDefApi
+
+ /** A tag that preserves the identity of the `CaseDef` abstract type from erasure.
+ * Can be used for pattern matching, instance tests, serialization and likes.
+ * @group Tags
+ */
+ implicit val CaseDefTag: ClassTag[CaseDef]
+
+ /** The constructor/extractor for `CaseDef` instances.
+ * @group Extractors
+ */
+ val CaseDef: CaseDefExtractor
+
+ /** An extractor class to create and pattern match with syntax `CaseDef(pat, guard, body)`.
+ * This AST node corresponds to the following Scala code:
+ *
+ * `case` pat `if` guard => body
+ *
+ * If the guard is not present, the `guard` is set to `EmptyTree`.
+ * If the body is not specified, the `body` is set to `Literal(Constant())`
+ * @group Extractors
+ */
+ abstract class CaseDefExtractor {
+ def apply(pat: Tree, guard: Tree, body: Tree): CaseDef
+ def unapply(caseDef: CaseDef): Option[(Tree, Tree, Tree)]
+ }
+
+ /** The API that all case defs support
+ * @group API
+ */
+ trait CaseDefApi extends TreeApi { this: CaseDef =>
+ /** The pattern of the pattern matching clause. */
+ def pat: Tree
+
+ /** The guard of the pattern matching clause.
+ * Is equal to `EmptyTree` if the guard is not specified.
+ */
+ def guard: Tree
+
+ /** The body of the pattern matching clause.
+ * Is equal to `Literal(Constant())` if the body is not specified.
+ */
+ def body: Tree
+ }
+
+ /** Alternatives of patterns.
+ *
+ * Eliminated by compiler phases Eliminated by compiler phases patmat (in the new pattern matcher of 2.10) or explicitouter (in the old pre-2.10 pattern matcher),
+ * except for
+ * occurrences in encoded Switch stmt (i.e. remaining Match(CaseDef(...)))
+ * @group Trees
+ * @template
+ */
+ type Alternative >: Null <: TermTree with AlternativeApi
+
+ /** A tag that preserves the identity of the `Alternative` abstract type from erasure.
+ * Can be used for pattern matching, instance tests, serialization and likes.
+ * @group Tags
+ */
+ implicit val AlternativeTag: ClassTag[Alternative]
+
+ /** The constructor/extractor for `Alternative` instances.
+ * @group Extractors
+ */
+ val Alternative: AlternativeExtractor
+
+ /** An extractor class to create and pattern match with syntax `Alternative(trees)`.
+ * This AST node corresponds to the following Scala code:
+ *
+ * pat1 | ... | patn
+ * @group Extractors
+ */
+ abstract class AlternativeExtractor {
+ def apply(trees: List[Tree]): Alternative
+ def unapply(alternative: Alternative): Option[List[Tree]]
+ }
+
+ /** The API that all alternatives support
+ * @group API
+ */
+ trait AlternativeApi extends TermTreeApi { this: Alternative =>
+ /** Alternatives of the pattern matching clause. */
+ def trees: List[Tree]
+ }
+
+ /** Repetition of pattern.
+ *
+ * Eliminated by compiler phases patmat (in the new pattern matcher of 2.10) or explicitouter (in the old pre-2.10 pattern matcher).
+ * @group Trees
+ * @template
+ */
+ type Star >: Null <: TermTree with StarApi
+
+ /** A tag that preserves the identity of the `Star` abstract type from erasure.
+ * Can be used for pattern matching, instance tests, serialization and likes.
+ * @group Tags
+ */
+ implicit val StarTag: ClassTag[Star]
+
+ /** The constructor/extractor for `Star` instances.
+ * @group Extractors
+ */
+ val Star: StarExtractor
+
+ /** An extractor class to create and pattern match with syntax `Star(elem)`.
+ * This AST node corresponds to the following Scala code:
+ *
+ * pat*
+ * @group Extractors
+ */
+ abstract class StarExtractor {
+ def apply(elem: Tree): Star
+ def unapply(star: Star): Option[Tree]
+ }
+
+ /** The API that all stars support
+ * @group API
+ */
+ trait StarApi extends TermTreeApi { this: Star =>
+ /** The quantified pattern. */
+ def elem: Tree
+ }
+
+ /** Bind a variable to a rhs pattern.
+ *
+ * Eliminated by compiler phases patmat (in the new pattern matcher of 2.10) or explicitouter (in the old pre-2.10 pattern matcher).
+ *
+ * @param name
+ * @param body
+ * @group Trees
+ * @template
+ */
+ type Bind >: Null <: DefTree with BindApi
+
+ /** A tag that preserves the identity of the `Bind` abstract type from erasure.
+ * Can be used for pattern matching, instance tests, serialization and likes.
+ * @group Tags
+ */
+ implicit val BindTag: ClassTag[Bind]
+
+ /** The constructor/extractor for `Bind` instances.
+ * @group Extractors
+ */
+ val Bind: BindExtractor
+
+ /** An extractor class to create and pattern match with syntax `Bind(name, body)`.
+ * This AST node corresponds to the following Scala code:
+ *
+ * pat*
+ * @group Extractors
+ */
+ abstract class BindExtractor {
+ def apply(name: Name, body: Tree): Bind
+ def unapply(bind: Bind): Option[(Name, Tree)]
+ }
+
+ /** The API that all binds support
+ * @group API
+ */
+ trait BindApi extends DefTreeApi { this: Bind =>
+ /** The name that can be used to refer to this fragment of the matched expression.
+ * The `list` part of the `list @ List(x, y)`.
+ */
+ def name: Name
+
+ /** The pattern that represents this fragment of the matched expression.
+ * The `List(x, y)` part of the `list @ List(x, y)`.
+ * Is equal to `EmptyTree` if the pattern is not specified as in `case x => x`.
+ */
+ def body: Tree
+ }
+
+ /**
+ * Used to represent `unapply` methods in pattern matching.
+ *
+ * For example:
+ * {{{
+ * 2 match { case Foo(x) => x }
+ * }}}
+ *
+ * Is represented as:
+ * {{{
+ * Match(
+ * Literal(Constant(2)),
+ * List(
+ * CaseDef(
+ * UnApply(
+ * // a dummy node that carries the type of unapplication to patmat
+ * // the <unapply-selector> here doesn't have an underlying symbol
+ * // it only has a type assigned, therefore after `resetAllAttrs` this tree is no longer typeable
+ * Apply(Select(Ident(Foo), newTermName("unapply")), List(Ident(newTermName("<unapply-selector>")))),
+ * // arguments of the unapply => nothing synthetic here
+ * List(Bind(newTermName("x"), Ident(nme.WILDCARD)))),
+ * EmptyTree,
+ * Ident(newTermName("x")))))
+ * }}}
+ *
+ * Introduced by typer. Eliminated by compiler phases patmat (in the new pattern matcher of 2.10) or explicitouter (in the old pre-2.10 pattern matcher).
+ * @group Trees
+ * @template
+ */
+ type UnApply >: Null <: TermTree with UnApplyApi
+
+ /** A tag that preserves the identity of the `UnApply` abstract type from erasure.
+ * Can be used for pattern matching, instance tests, serialization and likes.
+ * @group Tags
+ */
+ implicit val UnApplyTag: ClassTag[UnApply]
+
+ /** The constructor/extractor for `UnApply` instances.
+ * @group Extractors
+ */
+ val UnApply: UnApplyExtractor
+
+ /** An extractor class to create and pattern match with syntax `UnApply(fun, args)`.
+ * This AST node does not have direct correspondence to Scala code,
+ * and is introduced when typechecking pattern matches and `try` blocks.
+ * @group Extractors
+ */
+ abstract class UnApplyExtractor {
+ def apply(fun: Tree, args: List[Tree]): UnApply
+ def unapply(unApply: UnApply): Option[(Tree, List[Tree])]
+ }
+
+ /** The API that all unapplies support
+ * @group API
+ */
+ trait UnApplyApi extends TermTreeApi { this: UnApply =>
+ /** A dummy node that carries the type of unapplication.
+ * See the example for [[scala.reflect.api.Trees#UnApplyExtractor]].
+ */
+ def fun: Tree
+
+ /** The arguments of the unapplication.
+ * See the example for [[scala.reflect.api.Trees#UnApplyExtractor]].
+ */
+ def args: List[Tree]
+ }
+
+ /** Anonymous function, eliminated by compiler phase lambdalift
+ * @group Trees
+ * @template
+ */
+ type Function >: Null <: TermTree with SymTree with FunctionApi
+
+ /** A tag that preserves the identity of the `Function` abstract type from erasure.
+ * Can be used for pattern matching, instance tests, serialization and likes.
+ * @group Tags
+ */
+ implicit val FunctionTag: ClassTag[Function]
+
+ /** The constructor/extractor for `Function` instances.
+ * @group Extractors
+ */
+ val Function: FunctionExtractor
+
+ /** An extractor class to create and pattern match with syntax `Function(vparams, body)`.
+ * This AST node corresponds to the following Scala code:
+ *
+ * vparams => body
+ *
+ * The symbol of a Function is a synthetic TermSymbol.
+ * It is the owner of the function's parameters.
+ * @group Extractors
+ */
+ abstract class FunctionExtractor {
+ def apply(vparams: List[ValDef], body: Tree): Function
+ def unapply(function: Function): Option[(List[ValDef], Tree)]
+ }
+
+ /** The API that all functions support
+ * @group API
+ */
+ trait FunctionApi extends TermTreeApi with SymTreeApi { this: Function =>
+ /** The list of parameters of the function.
+ */
+ def vparams: List[ValDef]
+
+ /** The body of the function.
+ */
+ def body: Tree
+ }
+
+ /** Assignment
+ * @group Trees
+ * @template
+ */
+ type Assign >: Null <: TermTree with AssignApi
+
+ /** A tag that preserves the identity of the `Assign` abstract type from erasure.
+ * Can be used for pattern matching, instance tests, serialization and likes.
+ * @group Tags
+ */
+ implicit val AssignTag: ClassTag[Assign]
+
+ /** The constructor/extractor for `Assign` instances.
+ * @group Extractors
+ */
+ val Assign: AssignExtractor
+
+ /** An extractor class to create and pattern match with syntax `Assign(lhs, rhs)`.
+ * This AST node corresponds to the following Scala code:
+ *
+ * lhs = rhs
+ * @group Extractors
+ */
+ abstract class AssignExtractor {
+ def apply(lhs: Tree, rhs: Tree): Assign
+ def unapply(assign: Assign): Option[(Tree, Tree)]
+ }
+
+ /** The API that all assigns support
+ * @group API
+ */
+ trait AssignApi extends TermTreeApi { this: Assign =>
+ /** The left-hand side of the assignment.
+ */
+ def lhs: Tree
+
+ /** The right-hand side of the assignment.
+ */
+ def rhs: Tree
+ }
+
+ /** Either an assignment or a named argument. Only appears in argument lists,
+ * eliminated by compiler phase typecheck (doTypedApply), resurrected by reifier.
+ * @group Trees
+ * @template
+ */
+ type AssignOrNamedArg >: Null <: TermTree with AssignOrNamedArgApi
+
+ /** A tag that preserves the identity of the `AssignOrNamedArg` abstract type from erasure.
+ * Can be used for pattern matching, instance tests, serialization and likes.
+ * @group Tags
+ */
+ implicit val AssignOrNamedArgTag: ClassTag[AssignOrNamedArg]
+
+ /** The constructor/extractor for `AssignOrNamedArg` instances.
+ * @group Extractors
+ */
+ val AssignOrNamedArg: AssignOrNamedArgExtractor
+
+ /** An extractor class to create and pattern match with syntax `AssignOrNamedArg(lhs, rhs)`.
+ * This AST node corresponds to the following Scala code:
+ *
+ * {{{
+ * m.f(lhs = rhs)
+ * }}}
+ * {{{
+ * @annotation(lhs = rhs)
+ * }}}
+ *
+ * @group Extractors
+ */
+ abstract class AssignOrNamedArgExtractor {
+ def apply(lhs: Tree, rhs: Tree): AssignOrNamedArg
+ def unapply(assignOrNamedArg: AssignOrNamedArg): Option[(Tree, Tree)]
+ }
+
+ /** The API that all assigns support
+ * @group API
+ */
+ trait AssignOrNamedArgApi extends TermTreeApi { this: AssignOrNamedArg =>
+ /** The left-hand side of the expression.
+ */
+ def lhs: Tree
+
+ /** The right-hand side of the expression.
+ */
+ def rhs: Tree
+ }
+
+ /** Conditional expression
+ * @group Trees
+ * @template
+ */
+ type If >: Null <: TermTree with IfApi
+
+ /** A tag that preserves the identity of the `If` abstract type from erasure.
+ * Can be used for pattern matching, instance tests, serialization and likes.
+ * @group Tags
+ */
+ implicit val IfTag: ClassTag[If]
+
+ /** The constructor/extractor for `If` instances.
+ * @group Extractors
+ */
+ val If: IfExtractor
+
+ /** An extractor class to create and pattern match with syntax `If(cond, thenp, elsep)`.
+ * This AST node corresponds to the following Scala code:
+ *
+ * `if` (cond) thenp `else` elsep
+ *
+ * If the alternative is not present, the `elsep` is set to `Literal(Constant(()))`.
+ * @group Extractors
+ */
+ abstract class IfExtractor {
+ def apply(cond: Tree, thenp: Tree, elsep: Tree): If
+ def unapply(if_ : If): Option[(Tree, Tree, Tree)]
+ }
+
+ /** The API that all ifs support
+ * @group API
+ */
+ trait IfApi extends TermTreeApi { this: If =>
+ /** The condition of the if.
+ */
+ def cond: Tree
+
+ /** The main branch of the if.
+ */
+ def thenp: Tree
+
+ /** The alternative of the if.
+ * Is equal to `Literal(Constant(()))` if not specified.
+ */
+ def elsep: Tree
+ }
+
+ /** - Pattern matching expression (before compiler phase explicitouter before 2.10 / patmat from 2.10)
+ * - Switch statements (after compiler phase explicitouter before 2.10 / patmat from 2.10)
+ *
+ * After compiler phase explicitouter before 2.10 / patmat from 2.10, cases will satisfy the following constraints:
+ *
+ * - all guards are `EmptyTree`,
+ * - all patterns will be either `Literal(Constant(x:Int))`
+ * or `Alternative(lit|...|lit)`
+ * - except for an "otherwise" branch, which has pattern
+ * `Ident(nme.WILDCARD)`
+ * @group Trees
+ * @template
+ */
+ type Match >: Null <: TermTree with MatchApi
+
+ /** A tag that preserves the identity of the `Match` abstract type from erasure.
+ * Can be used for pattern matching, instance tests, serialization and likes.
+ * @group Tags
+ */
+ implicit val MatchTag: ClassTag[Match]
+
+ /** The constructor/extractor for `Match` instances.
+ * @group Extractors
+ */
+ val Match: MatchExtractor
+
+ /** An extractor class to create and pattern match with syntax `Match(selector, cases)`.
+ * This AST node corresponds to the following Scala code:
+ *
+ * selector `match` { cases }
+ *
+ * `Match` is also used in pattern matching assignments like `val (foo, bar) = baz`.
+ * @group Extractors
+ */
+ abstract class MatchExtractor {
+ def apply(selector: Tree, cases: List[CaseDef]): Match
+ def unapply(match_ : Match): Option[(Tree, List[CaseDef])]
+ }
+
+ /** The API that all matches support
+ * @group API
+ */
+ trait MatchApi extends TermTreeApi { this: Match =>
+ /** The scrutinee of the pattern match. */
+ def selector: Tree
+
+ /** The arms of the pattern match. */
+ def cases: List[CaseDef]
+ }
+
+ /** Return expression
+ * @group Trees
+ * @template
+ */
+ type Return >: Null <: TermTree with SymTree with ReturnApi
+
+ /** A tag that preserves the identity of the `Return` abstract type from erasure.
+ * Can be used for pattern matching, instance tests, serialization and likes.
+ * @group Tags
+ */
+ implicit val ReturnTag: ClassTag[Return]
+
+ /** The constructor/extractor for `Return` instances.
+ * @group Extractors
+ */
+ val Return: ReturnExtractor
+
+ /** An extractor class to create and pattern match with syntax `Return(expr)`.
+ * This AST node corresponds to the following Scala code:
+ *
+ * `return` expr
+ *
+ * The symbol of a Return node is the enclosing method.
+ * @group Extractors
+ */
+ abstract class ReturnExtractor {
+ def apply(expr: Tree): Return
+ def unapply(return_ : Return): Option[Tree]
+ }
+
+ /** The API that all returns support
+ * @group API
+ */
+ trait ReturnApi extends TermTreeApi { this: Return =>
+ /** The returned expression. */
+ def expr: Tree
+ }
+
+ /** Try catch node
+ * @group Trees
+ * @template
+ */
+ type Try >: Null <: TermTree with TryApi
+
+ /** A tag that preserves the identity of the `Try` abstract type from erasure.
+ * Can be used for pattern matching, instance tests, serialization and likes.
+ * @group Tags
+ */
+ implicit val TryTag: ClassTag[Try]
+
+ /** The constructor/extractor for `Try` instances.
+ * @group Extractors
+ */
+ val Try: TryExtractor
+
+ /** An extractor class to create and pattern match with syntax `Try(block, catches, finalizer)`.
+ * This AST node corresponds to the following Scala code:
+ *
+ * `try` block `catch` { catches } `finally` finalizer
+ *
+ * If the finalizer is not present, the `finalizer` is set to `EmptyTree`.
+ * @group Extractors
+ */
+ abstract class TryExtractor {
+ def apply(block: Tree, catches: List[CaseDef], finalizer: Tree): Try
+ def unapply(try_ : Try): Option[(Tree, List[CaseDef], Tree)]
+ }
+
+ /** The API that all tries support
+ * @group API
+ */
+ trait TryApi extends TermTreeApi { this: Try =>
+ /** The protected block. */
+ def block: Tree
+
+ /** The `catch` pattern-matching clauses of the try. */
+ def catches: List[CaseDef]
+
+ /** The `finally` part of the try. */
+ def finalizer: Tree
+ }
+
+ /** Throw expression
+ * @group Trees
+ * @template
+ */
+ type Throw >: Null <: TermTree with ThrowApi
+
+ /** A tag that preserves the identity of the `Throw` abstract type from erasure.
+ * Can be used for pattern matching, instance tests, serialization and likes.
+ * @group Tags
+ */
+ implicit val ThrowTag: ClassTag[Throw]
+
+ /** The constructor/extractor for `Throw` instances.
+ * @group Extractors
+ */
+ val Throw: ThrowExtractor
+
+ /** An extractor class to create and pattern match with syntax `Throw(expr)`.
+ * This AST node corresponds to the following Scala code:
+ *
+ * `throw` expr
+ * @group Extractors
+ */
+ abstract class ThrowExtractor {
+ def apply(expr: Tree): Throw
+ def unapply(throw_ : Throw): Option[Tree]
+ }
+
+ /** The API that all tries support
+ * @group API
+ */
+ trait ThrowApi extends TermTreeApi { this: Throw =>
+ /** The thrown expression. */
+ def expr: Tree
+ }
+
+ /** Object instantiation
+ * @group Trees
+ * @template
+ */
+ type New >: Null <: TermTree with NewApi
+
+ /** A tag that preserves the identity of the `New` abstract type from erasure.
+ * Can be used for pattern matching, instance tests, serialization and likes.
+ * @group Tags
+ */
+ implicit val NewTag: ClassTag[New]
+
+ /** The constructor/extractor for `New` instances.
+ * @group Extractors
+ */
+ val New: NewExtractor
+
+ /** An extractor class to create and pattern match with syntax `New(tpt)`.
+ * This AST node corresponds to the following Scala code:
+ *
+ * `new` T
+ *
+ * This node always occurs in the following context:
+ *
+ * (`new` tpt).<init>[targs](args)
+ *
+ * For example, an AST representation of:
+ *
+ * new Example[Int](2)(3)
+ *
+ * is the following code:
+ *
+ * Apply(
+ * Apply(
+ * TypeApply(
+ * Select(New(TypeTree(typeOf[Example])), nme.CONSTRUCTOR)
+ * TypeTree(typeOf[Int])),
+ * List(Literal(Constant(2)))),
+ * List(Literal(Constant(3))))
+ * @group Extractors
+ */
+ abstract class NewExtractor {
+ def apply(tpt: Tree): New
+ def unapply(new_ : New): Option[Tree]
+ }
+
+ /** The API that all news support
+ * @group API
+ */
+ trait NewApi extends TermTreeApi { this: New =>
+ /** The tree that represents the type being instantiated.
+ * See the example for [[scala.reflect.api.Trees#NewExtractor]].
+ */
+ def tpt: Tree
+ }
+
+ /** Type annotation, eliminated by compiler phase cleanup
+ * @group Trees
+ * @template
+ */
+ type Typed >: Null <: TermTree with TypedApi
+
+ /** A tag that preserves the identity of the `Typed` abstract type from erasure.
+ * Can be used for pattern matching, instance tests, serialization and likes.
+ * @group Tags
+ */
+ implicit val TypedTag: ClassTag[Typed]
+
+ /** The constructor/extractor for `Typed` instances.
+ * @group Extractors
+ */
+ val Typed: TypedExtractor
+
+ /** An extractor class to create and pattern match with syntax `Typed(expr, tpt)`.
+ * This AST node corresponds to the following Scala code:
+ *
+ * expr: tpt
+ * @group Extractors
+ */
+ abstract class TypedExtractor {
+ def apply(expr: Tree, tpt: Tree): Typed
+ def unapply(typed: Typed): Option[(Tree, Tree)]
+ }
+
+ /** The API that all typeds support
+ * @group API
+ */
+ trait TypedApi extends TermTreeApi { this: Typed =>
+ /** The expression being ascribed with the type. */
+ def expr: Tree
+
+ /** The type being ascribed to the expression. */
+ def tpt: Tree
+ }
+
+ /** Common base class for Apply and TypeApply.
+ * @group Trees
+ * @template
+ */
+ type GenericApply >: Null <: TermTree with GenericApplyApi
+
+ /** A tag that preserves the identity of the `GenericApply` abstract type from erasure.
+ * Can be used for pattern matching, instance tests, serialization and likes.
+ * @group Tags
+ */
+ implicit val GenericApplyTag: ClassTag[GenericApply]
+
+ /** The API that all applies support
+ * @group API
+ */
+ trait GenericApplyApi extends TermTreeApi { this: GenericApply =>
+ /** The target of the application. */
+ def fun: Tree
+
+ /** The arguments of the application. */
+ def args: List[Tree]
+ }
+
+ /* @PP: All signs point toward it being a requirement that args.nonEmpty,
+ * but I can't find that explicitly stated anywhere. Unless your last name
+ * is odersky, you should probably treat it as true.
+ */
+ /** Explicit type application.
+ * @group Trees
+ * @template
+ */
+ type TypeApply >: Null <: GenericApply with TypeApplyApi
+
+ /** A tag that preserves the identity of the `TypeApply` abstract type from erasure.
+ * Can be used for pattern matching, instance tests, serialization and likes.
+ * @group Tags
+ */
+ implicit val TypeApplyTag: ClassTag[TypeApply]
+
+ /** The constructor/extractor for `TypeApply` instances.
+ * @group Extractors
+ */
+ val TypeApply: TypeApplyExtractor
+
+ /** An extractor class to create and pattern match with syntax `TypeApply(fun, args)`.
+ * This AST node corresponds to the following Scala code:
+ *
+ * fun[args]
+ *
+ * Should only be used with `fun` nodes which are terms, i.e. which have `isTerm` returning `true`.
+ * Otherwise `AppliedTypeTree` should be used instead.
+ *
+ * def foo[T] = ???
+ * foo[Int] // represented as TypeApply(Ident(<foo>), List(TypeTree(<Int>)))
+ *
+ * List[Int] as in `val x: List[Int] = ???`
+ * // represented as AppliedTypeTree(Ident(<List>), List(TypeTree(<Int>)))
+ *
+ * @group Extractors
+ */
+ abstract class TypeApplyExtractor {
+ def apply(fun: Tree, args: List[Tree]): TypeApply
+ def unapply(typeApply: TypeApply): Option[(Tree, List[Tree])]
+ }
+
+ /** The API that all type applies support
+ * @group API
+ */
+ trait TypeApplyApi extends GenericApplyApi { this: TypeApply =>
+ }
+
+ /** Value application
+ * @group Trees
+ * @template
+ */
+ type Apply >: Null <: GenericApply with ApplyApi
+
+ /** A tag that preserves the identity of the `Apply` abstract type from erasure.
+ * Can be used for pattern matching, instance tests, serialization and likes.
+ * @group Tags
+ */
+ implicit val ApplyTag: ClassTag[Apply]
+
+ /** The constructor/extractor for `Apply` instances.
+ * @group Extractors
+ */
+ val Apply: ApplyExtractor
+
+ /** An extractor class to create and pattern match with syntax `Apply(fun, args)`.
+ * This AST node corresponds to the following Scala code:
+ *
+ * fun(args)
+ *
+ * For instance:
+ *
+ * fun[targs](args)
+ *
+ * Is expressed as:
+ *
+ * Apply(TypeApply(fun, targs), args)
+ * @group Extractors
+ */
+ abstract class ApplyExtractor {
+ def apply(fun: Tree, args: List[Tree]): Apply
+ def unapply(apply: Apply): Option[(Tree, List[Tree])]
+ }
+
+ /** The API that all applies support
+ * @group API
+ */
+ trait ApplyApi extends GenericApplyApi { this: Apply =>
+ }
+
+ /** Super reference, where `qual` is the corresponding `this` reference.
+ * A super reference `C.super[M]` is represented as `Super(This(C), M)`.
+ * @group Trees
+ * @template
+ */
+ type Super >: Null <: TermTree with SuperApi
+
+ /** A tag that preserves the identity of the `Super` abstract type from erasure.
+ * Can be used for pattern matching, instance tests, serialization and likes.
+ * @group Tags
+ */
+ implicit val SuperTag: ClassTag[Super]
+
+ /** The constructor/extractor for `Super` instances.
+ * @group Extractors
+ */
+ val Super: SuperExtractor
+
+ /** An extractor class to create and pattern match with syntax `Super(qual, mix)`.
+ * This AST node corresponds to the following Scala code:
+ *
+ * C.super[M]
+ *
+ * Which is represented as:
+ *
+ * Super(This(C), M)
+ *
+ * If `mix` is empty, it is tpnme.EMPTY.
+ *
+ * The symbol of a Super is the class _from_ which the super reference is made.
+ * For instance in C.super(...), it would be C.
+ * @group Extractors
+ */
+ abstract class SuperExtractor {
+ def apply(qual: Tree, mix: TypeName): Super
+ def unapply(super_ : Super): Option[(Tree, TypeName)]
+ }
+
+ /** The API that all supers support
+ * @group API
+ */
+ trait SuperApi extends TermTreeApi { this: Super =>
+ /** The qualifier of the `super` expression.
+ * See the example for [[scala.reflect.api.Trees#SuperExtractor]].
+ */
+ def qual: Tree
+
+ /** The selector of the `super` expression.
+ * See the example for [[scala.reflect.api.Trees#SuperExtractor]].
+ */
+ def mix: TypeName
+ }
+
+ /** Self reference
+ * @group Trees
+ * @template
+ */
+ type This >: Null <: TermTree with SymTree with ThisApi
+
+ /** A tag that preserves the identity of the `This` abstract type from erasure.
+ * Can be used for pattern matching, instance tests, serialization and likes.
+ * @group Tags
+ */
+ implicit val ThisTag: ClassTag[This]
+
+ /** The constructor/extractor for `This` instances.
+ * @group Extractors
+ */
+ val This: ThisExtractor
+
+ /** An extractor class to create and pattern match with syntax `This(qual)`.
+ * This AST node corresponds to the following Scala code:
+ *
+ * qual.this
+ *
+ * The symbol of a This is the class to which the this refers.
+ * For instance in C.this, it would be C.
+ * @group Extractors
+ */
+ abstract class ThisExtractor {
+ def apply(qual: TypeName): This
+ def unapply(this_ : This): Option[TypeName]
+ }
+
+ /** The API that all thises support
+ * @group API
+ */
+ trait ThisApi extends TermTreeApi with SymTreeApi { this: This =>
+ /** The qualifier of the `this` expression.
+ * For an unqualified `this` refers to the enclosing class.
+ */
+ def qual: TypeName
+ }
+
+ /** A member selection <qualifier> . <name>
+ * @group Trees
+ * @template
+ */
+ type Select >: Null <: RefTree with SelectApi
+
+ /** A tag that preserves the identity of the `Select` abstract type from erasure.
+ * Can be used for pattern matching, instance tests, serialization and likes.
+ * @group Tags
+ */
+ implicit val SelectTag: ClassTag[Select]
+
+ /** The constructor/extractor for `Select` instances.
+ * @group Extractors
+ */
+ val Select: SelectExtractor
+
+ /** An extractor class to create and pattern match with syntax `Select(qual, name)`.
+ * This AST node corresponds to the following Scala code:
+ *
+ * qualifier.selector
+ *
+ * Should only be used with `qualifier` nodes which are terms, i.e. which have `isTerm` returning `true`.
+ * Otherwise `SelectFromTypeTree` should be used instead.
+ *
+ * foo.Bar // represented as Select(Ident(<foo>), <Bar>)
+ * Foo#Bar // represented as SelectFromTypeTree(Ident(<Foo>), <Bar>)
+ * @group Extractors
+ */
+ abstract class SelectExtractor {
+ def apply(qualifier: Tree, name: Name): Select
+ def unapply(select: Select): Option[(Tree, Name)]
+ }
+
+ /** The API that all selects support
+ * @group API
+ */
+ trait SelectApi extends RefTreeApi { this: Select =>
+ /** @inheritdoc */
+ def qualifier: Tree
+
+ /** @inheritdoc */
+ def name: Name
+ }
+
+ /** A reference to identifier `name`.
+ * @group Trees
+ * @template
+ */
+ type Ident >: Null <: RefTree with IdentApi
+
+ /** A tag that preserves the identity of the `Ident` abstract type from erasure.
+ * Can be used for pattern matching, instance tests, serialization and likes.
+ * @group Tags
+ */
+ implicit val IdentTag: ClassTag[Ident]
+
+ /** The constructor/extractor for `Ident` instances.
+ * @group Extractors
+ */
+ val Ident: IdentExtractor
+
+ /** An extractor class to create and pattern match with syntax `Ident(qual, name)`.
+ * This AST node corresponds to the following Scala code:
+ *
+ * name
+ *
+ * Type checker converts idents that refer to enclosing fields or methods to selects.
+ * For example, name ==> this.name
+ * @group Extractors
+ */
+ abstract class IdentExtractor {
+ def apply(name: Name): Ident
+ def unapply(ident: Ident): Option[Name]
+ }
+
+ /** The API that all idents support
+ * @group API
+ */
+ trait IdentApi extends RefTreeApi { this: Ident =>
+ /** @inheritdoc */
+ def name: Name
+ }
+
+ /** Marks underlying reference to id as boxed.
+ *
+ * <b>Precondition:<\b> id must refer to a captured variable
+ * A reference such marked will refer to the boxed entity, no dereferencing
+ * with `.elem` is done on it.
+ * This tree node can be emitted by macros such as reify that call referenceCapturedVariable.
+ * It is eliminated in LambdaLift, where the boxing conversion takes place.
+ * @group Trees
+ * @template
+ */
+ type ReferenceToBoxed >: Null <: TermTree with ReferenceToBoxedApi
+
+ /** A tag that preserves the identity of the `ReferenceToBoxed` abstract type from erasure.
+ * Can be used for pattern matching, instance tests, serialization and likes.
+ * @group Tags
+ */
+ implicit val ReferenceToBoxedTag: ClassTag[ReferenceToBoxed]
+
+ /** The constructor/extractor for `ReferenceToBoxed` instances.
+ * @group Extractors
+ */
+ val ReferenceToBoxed: ReferenceToBoxedExtractor
+
+ /** An extractor class to create and pattern match with syntax `ReferenceToBoxed(ident)`.
+ * This AST node does not have direct correspondence to Scala code,
+ * and is emitted by macros to reference capture vars directly without going through `elem`.
+ *
+ * For example:
+ *
+ * var x = ...
+ * fun { x }
+ *
+ * Will emit:
+ *
+ * Ident(x)
+ *
+ * Which gets transformed to:
+ *
+ * Select(Ident(x), "elem")
+ *
+ * If `ReferenceToBoxed` were used instead of Ident, no transformation would be performed.
+ * @group Extractors
+ */
+ abstract class ReferenceToBoxedExtractor {
+ def apply(ident: Ident): ReferenceToBoxed
+ def unapply(referenceToBoxed: ReferenceToBoxed): Option[Ident]
+ }
+
+ /** The API that all references support
+ * @group API
+ */
+ trait ReferenceToBoxedApi extends TermTreeApi { this: ReferenceToBoxed =>
+ /** The underlying reference. */
+ def ident: Tree
+ }
+
+ /** Literal
+ * @group Trees
+ * @template
+ */
+ type Literal >: Null <: TermTree with LiteralApi
+
+ /** A tag that preserves the identity of the `Literal` abstract type from erasure.
+ * Can be used for pattern matching, instance tests, serialization and likes.
+ * @group Tags
+ */
+ implicit val LiteralTag: ClassTag[Literal]
+
+ /** The constructor/extractor for `Literal` instances.
+ * @group Extractors
+ */
+ val Literal: LiteralExtractor
+
+ /** An extractor class to create and pattern match with syntax `Literal(value)`.
+ * This AST node corresponds to the following Scala code:
+ *
+ * value
+ * @group Extractors
+ */
+ abstract class LiteralExtractor {
+ def apply(value: Constant): Literal
+ def unapply(literal: Literal): Option[Constant]
+ }
+
+ /** The API that all literals support
+ * @group API
+ */
+ trait LiteralApi extends TermTreeApi { this: Literal =>
+ /** The compile-time constant underlying the literal. */
+ def value: Constant
+ }
+
+ /** A tree that has an annotation attached to it. Only used for annotated types and
+ * annotation ascriptions, annotations on definitions are stored in the Modifiers.
+ * Eliminated by typechecker (typedAnnotated), the annotations are then stored in
+ * an AnnotatedType.
+ * @group Trees
+ * @template
+ */
+ type Annotated >: Null <: AnyRef with Tree with AnnotatedApi
+
+ /** A tag that preserves the identity of the `Annotated` abstract type from erasure.
+ * Can be used for pattern matching, instance tests, serialization and likes.
+ * @group Tags
+ */
+ implicit val AnnotatedTag: ClassTag[Annotated]
+
+ /** The constructor/extractor for `Annotated` instances.
+ * @group Extractors
+ */
+ val Annotated: AnnotatedExtractor
+
+ /** An extractor class to create and pattern match with syntax `Annotated(annot, arg)`.
+ * This AST node corresponds to the following Scala code:
+ *
+ * arg @annot // for types
+ * arg: @annot // for exprs
+ * @group Extractors
+ */
+ abstract class AnnotatedExtractor {
+ def apply(annot: Tree, arg: Tree): Annotated
+ def unapply(annotated: Annotated): Option[(Tree, Tree)]
+ }
+
+ /** The API that all annotateds support
+ * @group API
+ */
+ trait AnnotatedApi extends TreeApi { this: Annotated =>
+ /** The annotation. */
+ def annot: Tree
+
+ /** The annotee. */
+ def arg: Tree
+ }
+
+ /** Singleton type, eliminated by RefCheck
+ * @group Trees
+ * @template
+ */
+ type SingletonTypeTree >: Null <: TypTree with SingletonTypeTreeApi
+
+ /** A tag that preserves the identity of the `SingletonTypeTree` abstract type from erasure.
+ * Can be used for pattern matching, instance tests, serialization and likes.
+ * @group Tags
+ */
+ implicit val SingletonTypeTreeTag: ClassTag[SingletonTypeTree]
+
+ /** The constructor/extractor for `SingletonTypeTree` instances.
+ * @group Extractors
+ */
+ val SingletonTypeTree: SingletonTypeTreeExtractor
+
+ /** An extractor class to create and pattern match with syntax `SingletonTypeTree(ref)`.
+ * This AST node corresponds to the following Scala code:
+ *
+ * ref.type
+ * @group Extractors
+ */
+ abstract class SingletonTypeTreeExtractor {
+ def apply(ref: Tree): SingletonTypeTree
+ def unapply(singletonTypeTree: SingletonTypeTree): Option[Tree]
+ }
+
+ /** The API that all singleton type trees support
+ * @group API
+ */
+ trait SingletonTypeTreeApi extends TypTreeApi { this: SingletonTypeTree =>
+ /** The underlying reference. */
+ def ref: Tree
+ }
+
+ /** Type selection <qualifier> # <name>, eliminated by RefCheck
+ * @group Trees
+ * @template
+ */
+ type SelectFromTypeTree >: Null <: TypTree with RefTree with SelectFromTypeTreeApi
+
+ /** A tag that preserves the identity of the `SelectFromTypeTree` abstract type from erasure.
+ * Can be used for pattern matching, instance tests, serialization and likes.
+ * @group Tags
+ */
+ implicit val SelectFromTypeTreeTag: ClassTag[SelectFromTypeTree]
+
+ /** The constructor/extractor for `SelectFromTypeTree` instances.
+ * @group Extractors
+ */
+ val SelectFromTypeTree: SelectFromTypeTreeExtractor
+
+ /** An extractor class to create and pattern match with syntax `SelectFromTypeTree(qualifier, name)`.
+ * This AST node corresponds to the following Scala code:
+ *
+ * qualifier # selector
+ *
+ * Note: a path-dependent type p.T is expressed as p.type # T
+ *
+ * Should only be used with `qualifier` nodes which are types, i.e. which have `isType` returning `true`.
+ * Otherwise `Select` should be used instead.
+ *
+ * Foo#Bar // represented as SelectFromTypeTree(Ident(<Foo>), <Bar>)
+ * foo.Bar // represented as Select(Ident(<foo>), <Bar>)
+ * @group Extractors
+ */
+ abstract class SelectFromTypeTreeExtractor {
+ def apply(qualifier: Tree, name: TypeName): SelectFromTypeTree
+ def unapply(selectFromTypeTree: SelectFromTypeTree): Option[(Tree, TypeName)]
+ }
+
+ /** The API that all selects from type trees support
+ * @group API
+ */
+ trait SelectFromTypeTreeApi extends TypTreeApi with RefTreeApi { this: SelectFromTypeTree =>
+ /** @inheritdoc */
+ def qualifier: Tree
+
+ /** @inheritdoc */
+ def name: TypeName
+ }
+
+ /** Intersection type <parent1> with ... with <parentN> { <decls> }, eliminated by RefCheck
+ * @group Trees
+ * @template
+ */
+ type CompoundTypeTree >: Null <: TypTree with CompoundTypeTreeApi
+
+ /** A tag that preserves the identity of the `CompoundTypeTree` abstract type from erasure.
+ * Can be used for pattern matching, instance tests, serialization and likes.
+ * @group Tags
+ */
+ implicit val CompoundTypeTreeTag: ClassTag[CompoundTypeTree]
+
+ /** The constructor/extractor for `CompoundTypeTree` instances.
+ * @group Extractors
+ */
+ val CompoundTypeTree: CompoundTypeTreeExtractor
+
+ /** An extractor class to create and pattern match with syntax `CompoundTypeTree(templ)`.
+ * This AST node corresponds to the following Scala code:
+ *
+ * parent1 with ... with parentN { refinement }
+ * @group Extractors
+ */
+ abstract class CompoundTypeTreeExtractor {
+ def apply(templ: Template): CompoundTypeTree
+ def unapply(compoundTypeTree: CompoundTypeTree): Option[Template]
+ }
+
+ /** The API that all compound type trees support
+ * @group API
+ */
+ trait CompoundTypeTreeApi extends TypTreeApi { this: CompoundTypeTree =>
+ /** The template of the compound type - represents the parents, the optional self-type and the optional definitions. */
+ def templ: Template
+ }
+
+ /** Applied type <tpt> [ <args> ], eliminated by RefCheck
+ * @group Trees
+ * @template
+ */
+ type AppliedTypeTree >: Null <: TypTree with AppliedTypeTreeApi
+
+ /** A tag that preserves the identity of the `AppliedTypeTree` abstract type from erasure.
+ * Can be used for pattern matching, instance tests, serialization and likes.
+ * @group Tags
+ */
+ implicit val AppliedTypeTreeTag: ClassTag[AppliedTypeTree]
+
+ /** The constructor/extractor for `AppliedTypeTree` instances.
+ * @group Extractors
+ */
+ val AppliedTypeTree: AppliedTypeTreeExtractor
+
+ /** An extractor class to create and pattern match with syntax `AppliedTypeTree(tpt, args)`.
+ * This AST node corresponds to the following Scala code:
+ *
+ * tpt[args]
+ *
+ * Should only be used with `tpt` nodes which are types, i.e. which have `isType` returning `true`.
+ * Otherwise `TypeApply` should be used instead.
+ *
+ * List[Int] as in `val x: List[Int] = ???`
+ * // represented as AppliedTypeTree(Ident(<List>), List(TypeTree(<Int>)))
+ *
+ * def foo[T] = ???
+ * foo[Int] // represented as TypeApply(Ident(<foo>), List(TypeTree(<Int>)))
+ * @group Extractors
+ */
+ abstract class AppliedTypeTreeExtractor {
+ def apply(tpt: Tree, args: List[Tree]): AppliedTypeTree
+ def unapply(appliedTypeTree: AppliedTypeTree): Option[(Tree, List[Tree])]
+ }
+
+ /** The API that all applied type trees support
+ * @group API
+ */
+ trait AppliedTypeTreeApi extends TypTreeApi { this: AppliedTypeTree =>
+ /** The target of the application. */
+ def tpt: Tree
+
+ /** The arguments of the application. */
+ def args: List[Tree]
+ }
+
+ /** Type bounds tree node
+ * @group Trees
+ * @template
+ */
+ type TypeBoundsTree >: Null <: TypTree with TypeBoundsTreeApi
+
+ /** A tag that preserves the identity of the `TypeBoundsTree` abstract type from erasure.
+ * Can be used for pattern matching, instance tests, serialization and likes.
+ * @group Tags
+ */
+ implicit val TypeBoundsTreeTag: ClassTag[TypeBoundsTree]
+
+ /** The constructor/extractor for `TypeBoundsTree` instances.
+ * @group Extractors
+ */
+ val TypeBoundsTree: TypeBoundsTreeExtractor
+
+ /** An extractor class to create and pattern match with syntax `TypeBoundsTree(lo, hi)`.
+ * This AST node corresponds to the following Scala code:
+ *
+ * >: lo <: hi
+ * @group Extractors
+ */
+ abstract class TypeBoundsTreeExtractor {
+ def apply(lo: Tree, hi: Tree): TypeBoundsTree
+ def unapply(typeBoundsTree: TypeBoundsTree): Option[(Tree, Tree)]
+ }
+
+ /** The API that all type bound trees support
+ * @group API
+ */
+ trait TypeBoundsTreeApi extends TypTreeApi { this: TypeBoundsTree =>
+ /** The lower bound.
+ * Is equal to `Ident(<scala.Nothing>)` if not specified explicitly.
+ */
+ def lo: Tree
+
+ /** The upper bound.
+ * Is equal to `Ident(<scala.Any>)` if not specified explicitly.
+ */
+ def hi: Tree
+ }
+
+ /** Existential type tree node
+ * @group Trees
+ * @template
+ */
+ type ExistentialTypeTree >: Null <: TypTree with ExistentialTypeTreeApi
+
+ /** A tag that preserves the identity of the `ExistentialTypeTree` abstract type from erasure.
+ * Can be used for pattern matching, instance tests, serialization and likes.
+ * @group Tags
+ */
+ implicit val ExistentialTypeTreeTag: ClassTag[ExistentialTypeTree]
+
+ /** The constructor/extractor for `ExistentialTypeTree` instances.
+ * @group Extractors
+ */
+ val ExistentialTypeTree: ExistentialTypeTreeExtractor
+
+ /** An extractor class to create and pattern match with syntax `ExistentialTypeTree(tpt, whereClauses)`.
+ * This AST node corresponds to the following Scala code:
+ *
+ * tpt forSome { whereClauses }
+ * @group Extractors
+ */
+ abstract class ExistentialTypeTreeExtractor {
+ def apply(tpt: Tree, whereClauses: List[Tree]): ExistentialTypeTree
+ def unapply(existentialTypeTree: ExistentialTypeTree): Option[(Tree, List[Tree])]
+ }
+
+ /** The API that all existential type trees support
+ * @group API
+ */
+ trait ExistentialTypeTreeApi extends TypTreeApi { this: ExistentialTypeTree =>
+ /** The underlying type of the existential type. */
+ def tpt: Tree
+
+ /** The clauses of the definition of the existential type. */
+ def whereClauses: List[Tree]
+ }
+
+ /** A synthetic tree holding an arbitrary type. Not to be confused with
+ * with TypTree, the trait for trees that are only used for type trees.
+ * TypeTree's are inserted in several places, but most notably in
+ * `RefCheck`, where the arbitrary type trees are all replaced by
+ * TypeTree's.
+ * @group Trees
+ * @template
+ */
+ type TypeTree >: Null <: TypTree with TypeTreeApi
+
+ /** A tag that preserves the identity of the `TypeTree` abstract type from erasure.
+ * Can be used for pattern matching, instance tests, serialization and likes.
+ * @group Tags
+ */
+ implicit val TypeTreeTag: ClassTag[TypeTree]
+
+ /** The constructor/extractor for `TypeTree` instances.
+ * @group Extractors
+ */
+ val TypeTree: TypeTreeExtractor
+
+ /** An extractor class to create and pattern match with syntax `TypeTree()`.
+ * This AST node does not have direct correspondence to Scala code,
+ * and is emitted by everywhere when we want to wrap a `Type` in a `Tree`.
+ * @group Extractors
+ */
+ abstract class TypeTreeExtractor {
+ def apply(): TypeTree
+ def unapply(typeTree: TypeTree): Boolean
+ }
+
+ /** The API that all type trees support
+ * @group API
+ */
+ trait TypeTreeApi extends TypTreeApi { this: TypeTree =>
+ /** The precursor of this tree.
+ * Is equal to `EmptyTree` if this type tree doesn't have precursors.
+ */
+ def original: Tree
+ }
+
+ /** An empty deferred value definition corresponding to:
+ * val _: _
+ * This is used as a placeholder in the `self` parameter Template if there is
+ * no definition of a self value of self type.
+ * @group Trees
+ */
+ val emptyValDef: ValDef
+
+// ---------------------- factories ----------------------------------------------
+
+ /** A factory method for `ClassDef` nodes.
+ * @group Factories
+ */
+ @deprecated("Use the canonical ClassDef constructor to create a class and then initialize its position and symbol manually", "2.10.1")
+ def ClassDef(sym: Symbol, impl: Template): ClassDef
+
+ /** A factory method for `ModuleDef` nodes.
+ * @group Factories
+ */
+ @deprecated("Use the canonical ModuleDef constructor to create an object and then initialize its position and symbol manually", "2.10.1")
+ def ModuleDef(sym: Symbol, impl: Template): ModuleDef
+
+ /** A factory method for `ValDef` nodes.
+ * @group Factories
+ */
+ @deprecated("Use the canonical ValDef constructor to create a val and then initialize its position and symbol manually", "2.10.1")
+ def ValDef(sym: Symbol, rhs: Tree): ValDef
+
+ /** A factory method for `ValDef` nodes.
+ * @group Factories
+ */
+ @deprecated("Use the canonical ValDef constructor to create a val with an empty right-hand side and then initialize its position and symbol manually", "2.10.1")
+ def ValDef(sym: Symbol): ValDef
+
+ /** A factory method for `ValDef` nodes.
+ * @group Factories
+ */
+ @deprecated("Use the canonical DefDef constructor to create a method and then initialize its position and symbol manually", "2.10.1")
+ def DefDef(sym: Symbol, mods: Modifiers, vparamss: List[List[ValDef]], rhs: Tree): DefDef
+
+ /** A factory method for `ValDef` nodes.
+ * @group Factories
+ */
+ @deprecated("Use the canonical DefDef constructor to create a method and then initialize its position and symbol manually", "2.10.1")
+ def DefDef(sym: Symbol, vparamss: List[List[ValDef]], rhs: Tree): DefDef
+
+ /** A factory method for `ValDef` nodes.
+ * @group Factories
+ */
+ @deprecated("Use the canonical DefDef constructor to create a method and then initialize its position and symbol manually", "2.10.1")
+ def DefDef(sym: Symbol, mods: Modifiers, rhs: Tree): DefDef
+
+ /** A factory method for `ValDef` nodes.
+ * @group Factories
+ */
+ @deprecated("Use the canonical DefDef constructor to create a method and then initialize its position and symbol manually", "2.10.1")
+ def DefDef(sym: Symbol, rhs: Tree): DefDef
+
+ /** A factory method for `ValDef` nodes.
+ * @group Factories
+ */
+ @deprecated("Use the canonical DefDef constructor to create a method and then initialize its position and symbol manually", "2.10.1")
+ def DefDef(sym: Symbol, rhs: List[List[Symbol]] => Tree): DefDef
+
+ /** A factory method for `TypeDef` nodes.
+ * @group Factories
+ */
+ @deprecated("Use the canonical TypeDef constructor to create a type alias and then initialize its position and symbol manually", "2.10.1")
+ def TypeDef(sym: Symbol, rhs: Tree): TypeDef
+
+ /** A factory method for `TypeDef` nodes.
+ * @group Factories
+ */
+ @deprecated("Use the canonical TypeDef constructor to create an abstract type or type parameter and then initialize its position and symbol manually", "2.10.1")
+ def TypeDef(sym: Symbol): TypeDef
+
+ /** A factory method for `LabelDef` nodes.
+ * @group Factories
+ */
+ @deprecated("Use the canonical LabelDef constructor to create a label and then initialize its position and symbol manually", "2.10.1")
+ def LabelDef(sym: Symbol, params: List[Symbol], rhs: Tree): LabelDef
+
+ /** A factory method for `Block` nodes.
+ * Flattens directly nested blocks.
+ * @group Factories
+ */
+ @deprecated("Use the canonical Block constructor, explicitly specifying its expression if necessary. Flatten directly nested blocks manually if needed", "2.10.1")
+ def Block(stats: Tree*): Block
+
+ /** A factory method for `CaseDef` nodes.
+ * @group Factories
+ */
+ @deprecated("Use the canonical CaseDef constructor passing EmptyTree for guard", "2.10.1")
+ def CaseDef(pat: Tree, body: Tree): CaseDef
+
+ /** A factory method for `Bind` nodes.
+ * @group Factories
+ */
+ @deprecated("Use the canonical Bind constructor to create a bind and then initialize its symbol manually", "2.10.1")
+ def Bind(sym: Symbol, body: Tree): Bind
+
+ /** A factory method for `Try` nodes.
+ * @group Factories
+ */
+ @deprecated("Use canonical CaseDef constructors to to create exception catching expressions and then wrap them in Try", "2.10.1")
+ def Try(body: Tree, cases: (Tree, Tree)*): Try
+
+ /** A factory method for `Throw` nodes.
+ * @group Factories
+ */
+ @deprecated("Use the canonical New constructor to create an object instantiation expression and then wrap it in Throw", "2.10.1")
+ def Throw(tpe: Type, args: Tree*): Throw
+
+ /** Factory method for object creation `new tpt(args_1)...(args_n)`
+ * A `New(t, as)` is expanded to: `(new t).<init>(as)`
+ * @group Factories
+ */
+ @deprecated("Use Apply(...Apply(Select(New(tpt), nme.CONSTRUCTOR), args1)...argsN) instead", "2.10.1")
+ def New(tpt: Tree, argss: List[List[Tree]]): Tree
+
+ /** 0-1 argument list new, based on a type.
+ * @group Factories
+ */
+ @deprecated("Use New(TypeTree(tpe), args.toList) instead", "2.10.1")
+ def New(tpe: Type, args: Tree*): Tree
+
+ /** 0-1 argument list new, based on a symbol.
+ * @group Factories
+ */
+ @deprecated("Use New(sym.toType, args) instead", "2.10.1")
+ def New(sym: Symbol, args: Tree*): Tree
+
+ /** A factory method for `Apply` nodes.
+ * @group Factories
+ */
+ @deprecated("Use Apply(Ident(sym), args.toList) instead", "2.10.1")
+ def Apply(sym: Symbol, args: Tree*): Tree
+
+ /** 0-1 argument list new, based on a type tree.
+ * @group Factories
+ */
+ @deprecated("Use Apply(Select(New(tpt), nme.CONSTRUCTOR), args) instead", "2.10.1")
+ def ApplyConstructor(tpt: Tree, args: List[Tree]): Tree
+
+ /** A factory method for `Super` nodes.
+ * @group Factories
+ */
+ @deprecated("Use Super(This(sym), mix) instead", "2.10.1")
+ def Super(sym: Symbol, mix: TypeName): Tree
+
+ /** A factory method for `This` nodes.
+ * @group Factories
+ */
+ def This(sym: Symbol): Tree
+
+ /** A factory method for `Select` nodes.
+ * The string `name` argument is assumed to represent a [[scala.reflect.api.Names#TermName `TermName`]].
+ * @group Factories
+ */
+ @deprecated("Use Select(tree, newTermName(name)) instead", "2.10.1")
+ def Select(qualifier: Tree, name: String): Select
+
+ /** A factory method for `Select` nodes.
+ * @group Factories
+ */
+ def Select(qualifier: Tree, sym: Symbol): Select
+
+ /** A factory method for `Ident` nodes.
+ * @group Factories
+ */
+ @deprecated("Use Ident(newTermName(name)) instead", "2.10.1")
+ def Ident(name: String): Ident
+
+ /** A factory method for `Ident` nodes.
+ * @group Factories
+ */
+ def Ident(sym: Symbol): Ident
+
+ /** A factory method for `TypeTree` nodes.
+ * @group Factories
+ */
+ def TypeTree(tp: Type): TypeTree
+
+// ---------------------- copying ------------------------------------------------
+
+ /** The type of standard (lazy) tree copiers.
+ * @template
+ * @group Copying
+ */
+ type TreeCopier <: TreeCopierOps
+
+ /** The standard (lazy) tree copier.
+ * @group Copying
+ */
+ val treeCopy: TreeCopier = newLazyTreeCopier
+
+ /** Creates a strict tree copier.
+ * @group Copying
+ */
+ def newStrictTreeCopier: TreeCopier
+
+ /** Creates a lazy tree copier.
+ * @group Copying
+ */
+ def newLazyTreeCopier: TreeCopier
+
+ /** The API of a tree copier.
+ * @group API
+ */
+ abstract class TreeCopierOps {
+ /** Creates a `ClassDef` node from the given components, having a given `tree` as a prototype.
+ * Having a tree as a prototype means that the tree's attachments, type and symbol will be copied into the result.
+ */
+ def ClassDef(tree: Tree, mods: Modifiers, name: Name, tparams: List[TypeDef], impl: Template): ClassDef
+
+ /** Creates a `PackageDef` node from the given components, having a given `tree` as a prototype.
+ * Having a tree as a prototype means that the tree's attachments, type and symbol will be copied into the result.
+ */
+ def PackageDef(tree: Tree, pid: RefTree, stats: List[Tree]): PackageDef
+
+ /** Creates a `ModuleDef` node from the given components, having a given `tree` as a prototype.
+ * Having a tree as a prototype means that the tree's attachments, type and symbol will be copied into the result.
+ */
+ def ModuleDef(tree: Tree, mods: Modifiers, name: Name, impl: Template): ModuleDef
+
+ /** Creates a `ValDef` node from the given components, having a given `tree` as a prototype.
+ * Having a tree as a prototype means that the tree's attachments, type and symbol will be copied into the result.
+ */
+ def ValDef(tree: Tree, mods: Modifiers, name: Name, tpt: Tree, rhs: Tree): ValDef
+
+ /** Creates a `DefDef` node from the given components, having a given `tree` as a prototype.
+ * Having a tree as a prototype means that the tree's attachments, type and symbol will be copied into the result.
+ */
+ def DefDef(tree: Tree, mods: Modifiers, name: Name, tparams: List[TypeDef], vparamss: List[List[ValDef]], tpt: Tree, rhs: Tree): DefDef
+
+ /** Creates a `TypeDef` node from the given components, having a given `tree` as a prototype.
+ * Having a tree as a prototype means that the tree's attachments, type and symbol will be copied into the result.
+ */
+ def TypeDef(tree: Tree, mods: Modifiers, name: Name, tparams: List[TypeDef], rhs: Tree): TypeDef
+
+ /** Creates a `LabelDef` node from the given components, having a given `tree` as a prototype.
+ * Having a tree as a prototype means that the tree's attachments, type and symbol will be copied into the result.
+ */
+ def LabelDef(tree: Tree, name: Name, params: List[Ident], rhs: Tree): LabelDef
+
+ /** Creates a `Import` node from the given components, having a given `tree` as a prototype.
+ * Having a tree as a prototype means that the tree's attachments, type and symbol will be copied into the result.
+ */
+ def Import(tree: Tree, expr: Tree, selectors: List[ImportSelector]): Import
+
+ /** Creates a `Template` node from the given components, having a given `tree` as a prototype.
+ * Having a tree as a prototype means that the tree's attachments, type and symbol will be copied into the result.
+ */
+ def Template(tree: Tree, parents: List[Tree], self: ValDef, body: List[Tree]): Template
+
+ /** Creates a `Block` node from the given components, having a given `tree` as a prototype.
+ * Having a tree as a prototype means that the tree's attachments, type and symbol will be copied into the result.
+ */
+ def Block(tree: Tree, stats: List[Tree], expr: Tree): Block
+
+ /** Creates a `CaseDef` node from the given components, having a given `tree` as a prototype.
+ * Having a tree as a prototype means that the tree's attachments, type and symbol will be copied into the result.
+ */
+ def CaseDef(tree: Tree, pat: Tree, guard: Tree, body: Tree): CaseDef
+
+ /** Creates a `Alternative` node from the given components, having a given `tree` as a prototype.
+ * Having a tree as a prototype means that the tree's attachments, type and symbol will be copied into the result.
+ */
+ def Alternative(tree: Tree, trees: List[Tree]): Alternative
+
+ /** Creates a `Star` node from the given components, having a given `tree` as a prototype.
+ * Having a tree as a prototype means that the tree's attachments, type and symbol will be copied into the result.
+ */
+ def Star(tree: Tree, elem: Tree): Star
+
+ /** Creates a `Bind` node from the given components, having a given `tree` as a prototype.
+ * Having a tree as a prototype means that the tree's attachments, type and symbol will be copied into the result.
+ */
+ def Bind(tree: Tree, name: Name, body: Tree): Bind
+
+ /** Creates a `UnApply` node from the given components, having a given `tree` as a prototype.
+ * Having a tree as a prototype means that the tree's attachments, type and symbol will be copied into the result.
+ */
+ def UnApply(tree: Tree, fun: Tree, args: List[Tree]): UnApply
+
+ /** Creates a `Function` node from the given components, having a given `tree` as a prototype.
+ * Having a tree as a prototype means that the tree's attachments, type and symbol will be copied into the result.
+ */
+ def Function(tree: Tree, vparams: List[ValDef], body: Tree): Function
+
+ /** Creates a `Assign` node from the given components, having a given `tree` as a prototype.
+ * Having a tree as a prototype means that the tree's attachments, type and symbol will be copied into the result.
+ */
+ def Assign(tree: Tree, lhs: Tree, rhs: Tree): Assign
+
+ /** Creates a `AssignOrNamedArg` node from the given components, having a given `tree` as a prototype.
+ * Having a tree as a prototype means that the tree's attachments, type and symbol will be copied into the result.
+ */
+ def AssignOrNamedArg(tree: Tree, lhs: Tree, rhs: Tree): AssignOrNamedArg
+
+ /** Creates a `If` node from the given components, having a given `tree` as a prototype.
+ * Having a tree as a prototype means that the tree's attachments, type and symbol will be copied into the result.
+ */
+ def If(tree: Tree, cond: Tree, thenp: Tree, elsep: Tree): If
+
+ /** Creates a `Match` node from the given components, having a given `tree` as a prototype.
+ * Having a tree as a prototype means that the tree's attachments, type and symbol will be copied into the result.
+ */
+ def Match(tree: Tree, selector: Tree, cases: List[CaseDef]): Match
+
+ /** Creates a `Return` node from the given components, having a given `tree` as a prototype.
+ * Having a tree as a prototype means that the tree's attachments, type and symbol will be copied into the result.
+ */
+ def Return(tree: Tree, expr: Tree): Return
+
+ /** Creates a `Try` node from the given components, having a given `tree` as a prototype.
+ * Having a tree as a prototype means that the tree's attachments, type and symbol will be copied into the result.
+ */
+ def Try(tree: Tree, block: Tree, catches: List[CaseDef], finalizer: Tree): Try
+
+ /** Creates a `Throw` node from the given components, having a given `tree` as a prototype.
+ * Having a tree as a prototype means that the tree's attachments, type and symbol will be copied into the result.
+ */
+ def Throw(tree: Tree, expr: Tree): Throw
+
+ /** Creates a `New` node from the given components, having a given `tree` as a prototype.
+ * Having a tree as a prototype means that the tree's attachments, type and symbol will be copied into the result.
+ */
+ def New(tree: Tree, tpt: Tree): New
+
+ /** Creates a `Typed` node from the given components, having a given `tree` as a prototype.
+ * Having a tree as a prototype means that the tree's attachments, type and symbol will be copied into the result.
+ */
+ def Typed(tree: Tree, expr: Tree, tpt: Tree): Typed
+
+ /** Creates a `TypeApply` node from the given components, having a given `tree` as a prototype.
+ * Having a tree as a prototype means that the tree's attachments, type and symbol will be copied into the result.
+ */
+ def TypeApply(tree: Tree, fun: Tree, args: List[Tree]): TypeApply
+
+ /** Creates a `Apply` node from the given components, having a given `tree` as a prototype.
+ * Having a tree as a prototype means that the tree's attachments, type and symbol will be copied into the result.
+ */
+ def Apply(tree: Tree, fun: Tree, args: List[Tree]): Apply
+
+ /** Creates a `Super` node from the given components, having a given `tree` as a prototype.
+ * Having a tree as a prototype means that the tree's attachments, type and symbol will be copied into the result.
+ */
+ def Super(tree: Tree, qual: Tree, mix: TypeName): Super
+
+ /** Creates a `This` node from the given components, having a given `tree` as a prototype.
+ * Having a tree as a prototype means that the tree's attachments, type and symbol will be copied into the result.
+ */
+ def This(tree: Tree, qual: Name): This
+
+ /** Creates a `Select` node from the given components, having a given `tree` as a prototype.
+ * Having a tree as a prototype means that the tree's attachments, type and symbol will be copied into the result.
+ */
+ def Select(tree: Tree, qualifier: Tree, selector: Name): Select
+
+ /** Creates a `Ident` node from the given components, having a given `tree` as a prototype.
+ * Having a tree as a prototype means that the tree's attachments, type and symbol will be copied into the result.
+ */
+ def Ident(tree: Tree, name: Name): Ident
+
+ /** Creates a `ReferenceToBoxed` node from the given components, having a given `tree` as a prototype.
+ * Having a tree as a prototype means that the tree's attachments, type and symbol will be copied into the result.
+ */
+ def ReferenceToBoxed(tree: Tree, idt: Ident): ReferenceToBoxed
+
+ /** Creates a `Literal` node from the given components, having a given `tree` as a prototype.
+ * Having a tree as a prototype means that the tree's attachments, type and symbol will be copied into the result.
+ */
+ def Literal(tree: Tree, value: Constant): Literal
+
+ /** Creates a `TypeTree` node from the given components, having a given `tree` as a prototype.
+ * Having a tree as a prototype means that the tree's attachments, type and symbol will be copied into the result.
+ */
+ def TypeTree(tree: Tree): TypeTree
+
+ /** Creates a `Annotated` node from the given components, having a given `tree` as a prototype.
+ * Having a tree as a prototype means that the tree's attachments, type and symbol will be copied into the result.
+ */
+ def Annotated(tree: Tree, annot: Tree, arg: Tree): Annotated
+
+ /** Creates a `SingletonTypeTree` node from the given components, having a given `tree` as a prototype.
+ * Having a tree as a prototype means that the tree's attachments, type and symbol will be copied into the result.
+ */
+ def SingletonTypeTree(tree: Tree, ref: Tree): SingletonTypeTree
+
+ /** Creates a `SelectFromTypeTree` node from the given components, having a given `tree` as a prototype.
+ * Having a tree as a prototype means that the tree's attachments, type and symbol will be copied into the result.
+ */
+ def SelectFromTypeTree(tree: Tree, qualifier: Tree, selector: Name): SelectFromTypeTree
+
+ /** Creates a `CompoundTypeTree` node from the given components, having a given `tree` as a prototype.
+ * Having a tree as a prototype means that the tree's attachments, type and symbol will be copied into the result.
+ */
+ def CompoundTypeTree(tree: Tree, templ: Template): CompoundTypeTree
+
+ /** Creates a `AppliedTypeTree` node from the given components, having a given `tree` as a prototype.
+ * Having a tree as a prototype means that the tree's attachments, type and symbol will be copied into the result.
+ */
+ def AppliedTypeTree(tree: Tree, tpt: Tree, args: List[Tree]): AppliedTypeTree
+
+ /** Creates a `TypeBoundsTree` node from the given components, having a given `tree` as a prototype.
+ * Having a tree as a prototype means that the tree's attachments, type and symbol will be copied into the result.
+ */
+ def TypeBoundsTree(tree: Tree, lo: Tree, hi: Tree): TypeBoundsTree
+
+ /** Creates a `ExistentialTypeTree` node from the given components, having a given `tree` as a prototype.
+ * Having a tree as a prototype means that the tree's attachments, type and symbol will be copied into the result.
+ */
+ def ExistentialTypeTree(tree: Tree, tpt: Tree, whereClauses: List[Tree]): ExistentialTypeTree
+ }
+
+// ---------------------- traversing and transforming ------------------------------
+
+ /** A class that implement a default tree traversal strategy: breadth-first component-wise.
+ * @group Traversal
+ */
+ class Traverser {
+ protected[scala] var currentOwner: Symbol = rootMirror.RootClass
+
+ /** Traverses a single tree. */
+ def traverse(tree: Tree): Unit = itraverse(this, tree)
+
+ /** Traverses a list of trees. */
+ def traverseTrees(trees: List[Tree]) {
+ trees foreach traverse
+ }
+
+ /** Traverses a list of lists of trees. */
+ def traverseTreess(treess: List[List[Tree]]) {
+ treess foreach traverseTrees
+ }
+
+ /** Traverses a list of trees with a given owner symbol. */
+ def traverseStats(stats: List[Tree], exprOwner: Symbol) {
+ stats foreach (stat =>
+ if (exprOwner != currentOwner) atOwner(exprOwner)(traverse(stat))
+ else traverse(stat)
+ )
+ }
+
+ /** Performs a traversal with a given owner symbol. */
+ def atOwner(owner: Symbol)(traverse: => Unit) {
+ val prevOwner = currentOwner
+ currentOwner = owner
+ traverse
+ currentOwner = prevOwner
+ }
+
+ /** Leave apply available in the generic traverser to do something else.
+ */
+ def apply[T <: Tree](tree: T): T = { traverse(tree); tree }
+ }
+
+ /** Delegates the traversal strategy to `scala.reflect.internal.Trees`,
+ * because pattern matching on abstract types we have here degrades performance.
+ * @group Traversal
+ */
+ protected def itraverse(traverser: Traverser, tree: Tree): Unit = throw new MatchError(tree)
+
+ /** Provides an extension hook for the traversal strategy.
+ * Future-proofs against new node types.
+ * @group Traversal
+ */
+ protected def xtraverse(traverser: Traverser, tree: Tree): Unit = throw new MatchError(tree)
+
+ /** A class that implement a default tree transformation strategy: breadth-first component-wise cloning.
+ * @group Traversal
+ */
+ abstract class Transformer {
+ /** The underlying tree copier. */
+ val treeCopy: TreeCopier = newLazyTreeCopier
+
+ /** The current owner symbol. */
+ protected[scala] var currentOwner: Symbol = rootMirror.RootClass
+
+ /** The enclosing method of the currently transformed tree. */
+ protected def currentMethod = {
+ def enclosingMethod(sym: Symbol): Symbol =
+ if (sym.isMethod || sym == NoSymbol) sym else enclosingMethod(sym.owner)
+ enclosingMethod(currentOwner)
+ }
+
+ /** The enclosing class of the currently transformed tree. */
+ protected def currentClass = {
+ def enclosingClass(sym: Symbol): Symbol =
+ if (sym.isClass || sym == NoSymbol) sym else enclosingClass(sym.owner)
+ enclosingClass(currentOwner)
+ }
+
+// protected def currentPackage = currentOwner.enclosingTopLevelClass.owner
+
+ /** Transforms a single tree. */
+ def transform(tree: Tree): Tree = itransform(this, tree)
+
+ /** Transforms a list of trees. */
+ def transformTrees(trees: List[Tree]): List[Tree] = trees mapConserve (transform(_))
+
+ /** Transforms a `Template`. */
+ def transformTemplate(tree: Template): Template =
+ transform(tree: Tree).asInstanceOf[Template]
+ /** Transforms a list of `TypeDef` trees. */
+ def transformTypeDefs(trees: List[TypeDef]): List[TypeDef] =
+ trees mapConserve (tree => transform(tree).asInstanceOf[TypeDef])
+ /** Transforms a `ValDef`. */
+ def transformValDef(tree: ValDef): ValDef =
+ if (tree.isEmpty) tree else transform(tree).asInstanceOf[ValDef]
+ /** Transforms a list of `ValDef` nodes. */
+ def transformValDefs(trees: List[ValDef]): List[ValDef] =
+ trees mapConserve (transformValDef(_))
+ /** Transforms a list of lists of `ValDef` nodes. */
+ def transformValDefss(treess: List[List[ValDef]]): List[List[ValDef]] =
+ treess mapConserve (transformValDefs(_))
+ /** Transforms a list of `CaseDef` nodes. */
+ def transformCaseDefs(trees: List[CaseDef]): List[CaseDef] =
+ trees mapConserve (tree => transform(tree).asInstanceOf[CaseDef])
+ /** Transforms a list of `Ident` nodes. */
+ def transformIdents(trees: List[Ident]): List[Ident] =
+ trees mapConserve (tree => transform(tree).asInstanceOf[Ident])
+ /** Traverses a list of trees with a given owner symbol. */
+ def transformStats(stats: List[Tree], exprOwner: Symbol): List[Tree] =
+ stats mapConserve (stat =>
+ if (exprOwner != currentOwner && stat.isTerm) atOwner(exprOwner)(transform(stat))
+ else transform(stat)) filter (EmptyTree != _)
+ /** Transforms `Modifiers`. */
+ def transformModifiers(mods: Modifiers): Modifiers =
+ mods.mapAnnotations(transformTrees)
+
+ /** Transforms a tree with a given owner symbol. */
+ def atOwner[A](owner: Symbol)(trans: => A): A = {
+ val prevOwner = currentOwner
+ currentOwner = owner
+ val result = trans
+ currentOwner = prevOwner
+ result
+ }
+ }
+
+ /** Delegates the transformation strategy to `scala.reflect.internal.Trees`,
+ * because pattern matching on abstract types we have here degrades performance.
+ * @group Traversal
+ */
+ protected def itransform(transformer: Transformer, tree: Tree): Tree = throw new MatchError(tree)
+
+ /** Provides an extension hook for the transformation strategy.
+ * Future-proofs against new node types.
+ * @group Traversal
+ */
+ protected def xtransform(transformer: Transformer, tree: Tree): Tree = throw new MatchError(tree)
+
+ /** The type of tree modifiers.
+ * @group Traversal
+ */
+ type Modifiers >: Null <: AnyRef with ModifiersApi
+
+ /** A tag that preserves the identity of the `Modifiers` abstract type from erasure.
+ * Can be used for pattern matching, instance tests, serialization and likes.
+ * @group Traversal
+ */
+ implicit val ModifiersTag: ClassTag[Modifiers]
+
+ /** The API that all Modifiers support
+ * @group API
+ */
+ abstract class ModifiersApi {
+ /** The underlying flags of the enclosing definition.
+ * Is equal to `NoFlags` if none are specified explicitly.
+ */
+ def flags: FlagSet
+
+ def hasFlag(flag: FlagSet): Boolean
+
+ /** The visibility scope of the enclosing definition.
+ * Is equal to `tpnme.EMPTY` if none is specified explicitly.
+ */
+ def privateWithin: Name
+
+ /** The annotations of the enclosing definition.
+ * Empty list if none are specified explicitly.
+ */
+ def annotations: List[Tree]
+
+ /** Creates a new instance of `Modifiers` with
+ * the annotations transformed according to the given function.
+ */
+ def mapAnnotations(f: List[Tree] => List[Tree]): Modifiers =
+ Modifiers(flags, privateWithin, f(annotations))
+ }
+
+ /** The constructor/extractor for `Modifiers` instances.
+ * @group Traversal
+ */
+ val Modifiers: ModifiersCreator
+
+ /** An extractor class to create and pattern match with syntax `Modifiers(flags, privateWithin, annotations)`.
+ * Modifiers encapsulate flags, visibility annotations and Scala annotations for member definitions.
+ * @group Traversal
+ */
+ abstract class ModifiersCreator {
+ def apply(): Modifiers = Modifiers(NoFlags, tpnme.EMPTY, List())
+ def apply(flags: FlagSet, privateWithin: Name, annotations: List[Tree]): Modifiers
+ }
+
+ /** The factory for `Modifiers` instances.
+ * @group Traversal
+ */
+ def Modifiers(flags: FlagSet, privateWithin: Name): Modifiers = Modifiers(flags, privateWithin, List())
+
+ /** The factory for `Modifiers` instances.
+ * @group Traversal
+ */
+ def Modifiers(flags: FlagSet): Modifiers = Modifiers(flags, tpnme.EMPTY)
+
+ /** An empty `Modifiers` object: no flags, empty visibility annotation and no Scala annotations.
+ * @group Traversal
+ */
+ lazy val NoMods = Modifiers()
+}
diff --git a/src/reflect/scala/reflect/api/TypeCreator.scala b/src/reflect/scala/reflect/api/TypeCreator.scala
new file mode 100644
index 0000000..24271cb
--- /dev/null
+++ b/src/reflect/scala/reflect/api/TypeCreator.scala
@@ -0,0 +1,12 @@
+package scala.reflect
+package api
+
+/** A mirror-aware factory for types.
+ *
+ * This class is used internally by Scala Reflection, and is not recommended for use in client code.
+ *
+ * @group ReflectionAPI
+ */
+abstract class TypeCreator {
+ def apply[U <: Universe with Singleton](m: scala.reflect.api.Mirror[U]): U # Type
+}
diff --git a/src/reflect/scala/reflect/api/TypeTags.scala b/src/reflect/scala/reflect/api/TypeTags.scala
new file mode 100644
index 0000000..7457910
--- /dev/null
+++ b/src/reflect/scala/reflect/api/TypeTags.scala
@@ -0,0 +1,354 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2013 LAMP/EPFL
+ * @author Martin Odersky
+ */
+
+package scala
+package reflect
+package api
+
+import java.lang.{ Class => jClass }
+import scala.language.implicitConversions
+
+/**
+ * A `TypeTag[T]` encapsulates the runtime type representation of some type `T`.
+ * Like [[scala.reflect.Manifest]], the prime use case of `TypeTag`s is to give access
+ * to erased types. However, `TypeTag`s should be considered to be a richer
+ * replacement of the pre-2.10 notion of a [[scala.reflect.Manifest Manifest]], that
+ * are, in addition, fully integrated with Scala reflection.
+ *
+ * There exist three different types of `TypeTags`:
+ *
+ * <ul>
+ * <li>[[scala.reflect.api.TypeTags#TypeTag]]. <br/>A full type descriptor of a Scala type.
+ * For example, a `TypeTag[List[String]]` contains all type information,
+ * in this case, of type `scala.List[String]`.</li>
+ *
+ * <li>[[scala.reflect.ClassTag]]. <br/>A partial type descriptor of a Scala type. For
+ * example, a `ClassTag[List[String]]` contains only the erased class
+ * type information, in this case, of type `scala.collection.immutable.List`.
+ * `ClassTag`s provide access only to the runtime class of a type.
+ * Analogous to [[scala.reflect.ClassManifest]]</li>
+ *
+ * <li>[[scala.reflect.api.TypeTags#WeakTypeTag]]. <br/>A type descriptor for abstract
+ * types (see description below).</li>
+ * </ul>
+ *
+ * Like [[scala.reflect.Manifest Manifest]]s, `TypeTag`s are always generated by the
+ * compiler, and can be obtained in three ways:
+ *
+ * === #1 Via the methods [[scala.reflect.api.TypeTags#typeTag typeTag]],
+ * [[scala.reflect#classTag classTag]], or [[scala.reflect.api.TypeTags#weakTypeTag weakTypeTag]] ===
+ *
+ * For example:
+ * {{{
+ * import scala.reflect.runtime.universe._
+ * val tt = typeTag[Int]
+ *
+ * import scala.reflect._
+ * val ct = classTag[String]
+ * }}}
+ *
+ * Each of these methods constructs a `TypeTag[T]` or `ClassTag[T]` for the given
+ * type argument `T`.
+ *
+ * === #2 Using an implicit parameter of type `TypeTag[T]`, `ClassTag[T]`, or `WeakTypeTag[T]
+ *
+ * For example:
+ * {{{
+ * import scala.reflect.runtime.universe._
+ *
+ * def paramInfo[T](x: T)(implicit tag: TypeTag[T]): Unit = {
+ * val targs = tag.tpe match { case TypeRef(_, _, args) => args }
+ * println(s"type of $x has type arguments $targs")
+ * }
+ *
+ * scala> paramInfo(42)
+ * type of 42 has type arguments List()
+ *
+ * scala> paramInfo(List(1, 2))
+ * type of List(1, 2) has type arguments List(Int)
+ * }}}
+ *
+ * === #3 Context bound of a type parameter ===
+ *
+ * ...on methods or classes. The above example can be implemented as follows:
+ *
+ * {{{
+ * import scala.reflect.runtime.universe._
+ *
+ * def paramInfo[T: TypeTag](x: T): Unit = {
+ * val targs = typeOf[T] match { case TypeRef(_, _, args) => args }
+ * println(s"type of $x has type arguments $targs")
+ * }
+ *
+ * scala> paramInfo(42)
+ * type of 42 has type arguments List()
+ *
+ * scala> paramInfo(List(1, 2))
+ * type of List(1, 2) has type arguments List(Int)
+ * }}}
+ *
+ * === `WeakTypeTag`s ===
+ *
+ *`WeakTypeTag[T]` generalizes `TypeTag[T]`. Unlike a regular `TypeTag`, components of
+ * its type representation can be references to type parameters or abstract types.
+ * However, `WeakTypeTag[T]` tries to be as concrete as possible, i.e. if type tags
+ * are available for the referenced type arguments or abstract types, they are used to
+ * embed the concrete types into the `WeakTypeTag[T]`.
+ *
+ * Continuing the example above:
+ * {{{
+ * def weakParamInfo[T](x: T)(implicit tag: WeakTypeTag[T]): Unit = {
+ * val targs = tag.tpe match { case TypeRef(_, _, args) => args }
+ * println(s"type of $x has type arguments $targs")
+ * }
+ *
+ * scala> def foo[T] = weakParamInfo(List[T]())
+ * foo: [T]=> Unit
+ *
+ * scala> foo[Int]
+ * type of List() has type arguments List(T)
+ * }}}
+ *
+ * === TypeTags and Manifests ===
+ *
+ * `TypeTag`s correspond loosely to the pre-2.10 notion of
+ * [[scala.reflect.Manifest]]s. While [[scala.reflect.ClassTag]] corresponds to
+ * [[scala.reflect.ClassManifest]] and [[scala.reflect.api.TypeTags#TypeTag]] mostly
+ * corresponds to [[scala.reflect.Manifest]], other pre-2.10 `Manifest` types do not
+ * have a direct correspondence with a 2.10 "`Tag`" type.
+ *
+ * <ul>
+ * <li>'''[[scala.reflect.OptManifest]] is not supported.''' <br/>This is because `Tag`s
+ * can reify arbitrary types, so they are always available.<li>
+ *
+ * <li>'''There is no equivalent for [[scala.reflect.AnyValManifest]].''' <br/>Instead, one
+ * can compare their `Tag` with one of the base `Tag`s (defined in the corresponding
+ * companion objects) in order to find out whether or not it represents a primitive
+ * value class. Additionally, it's possible to simply use
+ * `<tag>.tpe.typeSymbol.isPrimitiveValueClass`.</li>
+ *
+ * <li>'''There are no replacement for factory methods defined in the `Manifest`
+ * companion objects'''. <br/>Instead, one could generate corresponding types using the
+ * reflection APIs provided by Java (for classes) and Scala (for types).</li>
+ *
+ * <li>'''Certain manifest operations(i.e., <:<, >:> and typeArguments) are not
+ * supported.''' <br/>Instead, one culd use the reflection APIs provided by Java (for
+ * classes) and Scala (for types).</li>
+ *</ul>
+ *
+ * In Scala 2.10, [[scala.reflect.ClassManifest]]s are deprecated, and it is planned
+ * to deprecate [[scala.reflect.Manifest]] in favor of `TypeTag`s and `ClassTag`s in
+ * an upcoming point release. Thus, it is advisable to migrate any `Manifest`-based
+ * APIs to use `Tag`s.
+ *
+ * For more information about `TypeTag`s, see the
+ * [[http://docs.scala-lang.org/overviews/reflection/typetags-manifests.html Reflection Guide: TypeTags]]
+ *
+ * @see [[scala.reflect.ClassTag]], [[scala.reflect.api.TypeTags#TypeTag]], [[scala.reflect.api.TypeTags#WeakTypeTag]]
+ * @group ReflectionAPI
+ */
+trait TypeTags { self: Universe =>
+
+ import definitions._
+
+ /**
+ * If an implicit value of type `WeakTypeTag[T]` is required, the compiler will create one,
+ * and the reflective representation of `T` can be accessed via the `tpe` field.
+ * Components of `T` can be references to type parameters or abstract types. Note that `WeakTypeTag`
+ * makes an effort to be as concrete as possible, i.e. if `TypeTag`s are available for the referenced type arguments
+ * or abstract types, they are used to embed the concrete types into the WeakTypeTag. Otherwise the WeakTypeTag will
+ * contain a reference to an abstract type. This behavior can be useful, when one expects `T` to be perhaps be partially
+ * abstract, but requires special care to handle this case. However, if `T` is expected to be fully known, use
+ * [[scala.reflect.api.TypeTags#TypeTag]] instead, which statically guarantees this property.
+ *
+ * For more information about `TypeTag`s, see the
+ * [[http://docs.scala-lang.org/overviews/reflection/typetags-manifests.html Reflection Guide: TypeTags]]
+ *
+ * @see [[scala.reflect.api.TypeTags]]
+ * @group TypeTags
+ */
+ @annotation.implicitNotFound(msg = "No WeakTypeTag available for ${T}")
+ trait WeakTypeTag[T] extends Equals with Serializable {
+ /**
+ * The underlying `Mirror` of this type tag.
+ */
+ val mirror: Mirror
+
+ /**
+ * Migrates the expression into another mirror, jumping into a different universe if necessary.
+ *
+ * Migration means that all symbolic references to classes/objects/packages in the expression
+ * will be re-resolved within the new mirror (typically using that mirror's classloader).
+ */
+ def in[U <: Universe with Singleton](otherMirror: scala.reflect.api.Mirror[U]): U # WeakTypeTag[T]
+
+ /**
+ * Reflective representation of type T.
+ */
+ def tpe: Type
+
+ override def canEqual(x: Any) = x.isInstanceOf[WeakTypeTag[_]]
+
+ override def equals(x: Any) = x.isInstanceOf[WeakTypeTag[_]] && this.mirror == x.asInstanceOf[WeakTypeTag[_]].mirror && this.tpe == x.asInstanceOf[WeakTypeTag[_]].tpe
+
+ override def hashCode = mirror.hashCode * 31 + tpe.hashCode
+
+ override def toString = "WeakTypeTag[" + tpe + "]"
+ }
+
+ /**
+ * Type tags corresponding to primitive types and constructor/extractor for WeakTypeTags.
+ * @group TypeTags
+ */
+ object WeakTypeTag {
+ val Byte : WeakTypeTag[scala.Byte] = TypeTag.Byte
+ val Short : WeakTypeTag[scala.Short] = TypeTag.Short
+ val Char : WeakTypeTag[scala.Char] = TypeTag.Char
+ val Int : WeakTypeTag[scala.Int] = TypeTag.Int
+ val Long : WeakTypeTag[scala.Long] = TypeTag.Long
+ val Float : WeakTypeTag[scala.Float] = TypeTag.Float
+ val Double : WeakTypeTag[scala.Double] = TypeTag.Double
+ val Boolean : WeakTypeTag[scala.Boolean] = TypeTag.Boolean
+ val Unit : WeakTypeTag[scala.Unit] = TypeTag.Unit
+ val Any : WeakTypeTag[scala.Any] = TypeTag.Any
+ val AnyVal : WeakTypeTag[scala.AnyVal] = TypeTag.AnyVal
+ val AnyRef : WeakTypeTag[scala.AnyRef] = TypeTag.AnyRef
+ val Object : WeakTypeTag[java.lang.Object] = TypeTag.Object
+ val Nothing : WeakTypeTag[scala.Nothing] = TypeTag.Nothing
+ val Null : WeakTypeTag[scala.Null] = TypeTag.Null
+
+
+ def apply[T](mirror1: scala.reflect.api.Mirror[self.type], tpec1: TypeCreator): WeakTypeTag[T] =
+ new WeakTypeTagImpl[T](mirror1.asInstanceOf[Mirror], tpec1)
+
+ def unapply[T](ttag: WeakTypeTag[T]): Option[Type] = Some(ttag.tpe)
+ }
+
+ /* @group TypeTags */
+ private class WeakTypeTagImpl[T](val mirror: Mirror, val tpec: TypeCreator) extends WeakTypeTag[T] {
+ lazy val tpe: Type = tpec(mirror)
+ def in[U <: Universe with Singleton](otherMirror: scala.reflect.api.Mirror[U]): U # WeakTypeTag[T] = {
+ val otherMirror1 = otherMirror.asInstanceOf[scala.reflect.api.Mirror[otherMirror.universe.type]]
+ otherMirror.universe.WeakTypeTag[T](otherMirror1, tpec)
+ }
+ private def writeReplace(): AnyRef = new SerializedTypeTag(tpec, concrete = false)
+ }
+
+ /**
+ * A `TypeTag` is a [[scala.reflect.api.TypeTags#WeakTypeTag]] with the additional
+ * static guarantee that all type references are concrete, i.e. it does <b>not</b> contain any references to
+ * unresolved type parameters or abstract types.
+ *
+ * @see [[scala.reflect.api.TypeTags]]
+ * @group TypeTags
+ */
+ @annotation.implicitNotFound(msg = "No TypeTag available for ${T}")
+ trait TypeTag[T] extends WeakTypeTag[T] with Equals with Serializable {
+ /**
+ * @inheritdoc
+ */
+ override def in[U <: Universe with Singleton](otherMirror: scala.reflect.api.Mirror[U]): U # TypeTag[T]
+
+ override def canEqual(x: Any) = x.isInstanceOf[TypeTag[_]]
+
+ override def equals(x: Any) = x.isInstanceOf[TypeTag[_]] && this.mirror == x.asInstanceOf[TypeTag[_]].mirror && this.tpe == x.asInstanceOf[TypeTag[_]].tpe
+
+ override def hashCode = mirror.hashCode * 31 + tpe.hashCode
+
+ override def toString = "TypeTag[" + tpe + "]"
+ }
+
+ /**
+ * Type tags corresponding to primitive types and constructor/extractor for WeakTypeTags.
+ * @group TypeTags
+ */
+ object TypeTag {
+ val Byte: TypeTag[scala.Byte] = new PredefTypeTag[scala.Byte] (ByteTpe, _.TypeTag.Byte)
+ val Short: TypeTag[scala.Short] = new PredefTypeTag[scala.Short] (ShortTpe, _.TypeTag.Short)
+ val Char: TypeTag[scala.Char] = new PredefTypeTag[scala.Char] (CharTpe, _.TypeTag.Char)
+ val Int: TypeTag[scala.Int] = new PredefTypeTag[scala.Int] (IntTpe, _.TypeTag.Int)
+ val Long: TypeTag[scala.Long] = new PredefTypeTag[scala.Long] (LongTpe, _.TypeTag.Long)
+ val Float: TypeTag[scala.Float] = new PredefTypeTag[scala.Float] (FloatTpe, _.TypeTag.Float)
+ val Double: TypeTag[scala.Double] = new PredefTypeTag[scala.Double] (DoubleTpe, _.TypeTag.Double)
+ val Boolean: TypeTag[scala.Boolean] = new PredefTypeTag[scala.Boolean] (BooleanTpe, _.TypeTag.Boolean)
+ val Unit: TypeTag[scala.Unit] = new PredefTypeTag[scala.Unit] (UnitTpe, _.TypeTag.Unit)
+ val Any: TypeTag[scala.Any] = new PredefTypeTag[scala.Any] (AnyTpe, _.TypeTag.Any)
+ val AnyVal: TypeTag[scala.AnyVal] = new PredefTypeTag[scala.AnyVal] (AnyValTpe, _.TypeTag.AnyVal)
+ val AnyRef: TypeTag[scala.AnyRef] = new PredefTypeTag[scala.AnyRef] (AnyRefTpe, _.TypeTag.AnyRef)
+ val Object: TypeTag[java.lang.Object] = new PredefTypeTag[java.lang.Object] (ObjectTpe, _.TypeTag.Object)
+ val Nothing: TypeTag[scala.Nothing] = new PredefTypeTag[scala.Nothing] (NothingTpe, _.TypeTag.Nothing)
+ val Null: TypeTag[scala.Null] = new PredefTypeTag[scala.Null] (NullTpe, _.TypeTag.Null)
+
+ def apply[T](mirror1: scala.reflect.api.Mirror[self.type], tpec1: TypeCreator): TypeTag[T] =
+ new TypeTagImpl[T](mirror1.asInstanceOf[Mirror], tpec1)
+
+ def unapply[T](ttag: TypeTag[T]): Option[Type] = Some(ttag.tpe)
+ }
+
+ /* @group TypeTags */
+ private class TypeTagImpl[T](mirror: Mirror, tpec: TypeCreator) extends WeakTypeTagImpl[T](mirror, tpec) with TypeTag[T] {
+ override def in[U <: Universe with Singleton](otherMirror: scala.reflect.api.Mirror[U]): U # TypeTag[T] = {
+ val otherMirror1 = otherMirror.asInstanceOf[scala.reflect.api.Mirror[otherMirror.universe.type]]
+ otherMirror.universe.TypeTag[T](otherMirror1, tpec)
+ }
+ private def writeReplace(): AnyRef = new SerializedTypeTag(tpec, concrete = true)
+ }
+
+ /* @group TypeTags */
+ private class PredefTypeCreator[T](copyIn: Universe => Universe#TypeTag[T]) extends TypeCreator {
+ def apply[U <: Universe with Singleton](m: scala.reflect.api.Mirror[U]): U # Type = {
+ copyIn(m.universe).asInstanceOf[U # TypeTag[T]].tpe
+ }
+ }
+
+ /* @group TypeTags */
+ private class PredefTypeTag[T](_tpe: Type, copyIn: Universe => Universe#TypeTag[T]) extends TypeTagImpl[T](rootMirror, new PredefTypeCreator(copyIn)) {
+ override lazy val tpe: Type = _tpe
+ private def writeReplace(): AnyRef = new SerializedTypeTag(tpec, concrete = true)
+ }
+
+ /**
+ * Shortcut for `implicitly[WeakTypeTag[T]]`
+ * @group TypeTags
+ */
+ def weakTypeTag[T](implicit attag: WeakTypeTag[T]) = attag
+
+ /**
+ * Shortcut for `implicitly[TypeTag[T]]`
+ * @group TypeTags
+ */
+ def typeTag[T](implicit ttag: TypeTag[T]) = ttag
+
+ // big thanks to Viktor Klang for this brilliant idea!
+ /**
+ * Shortcut for `implicitly[WeakTypeTag[T]].tpe`
+ * @group TypeTags
+ */
+ def weakTypeOf[T](implicit attag: WeakTypeTag[T]): Type = attag.tpe
+
+ /**
+ * Shortcut for `implicitly[TypeTag[T]].tpe`
+ * @group TypeTags
+ */
+ def typeOf[T](implicit ttag: TypeTag[T]): Type = ttag.tpe
+}
+
+private[scala] class SerializedTypeTag(var tpec: TypeCreator, var concrete: Boolean) extends Serializable {
+ private def writeObject(out: java.io.ObjectOutputStream): Unit = {
+ out.writeObject(tpec)
+ out.writeBoolean(concrete)
+ }
+
+ private def readObject(in: java.io.ObjectInputStream): Unit = {
+ tpec = in.readObject().asInstanceOf[TypeCreator]
+ concrete = in.readBoolean()
+ }
+
+ private def readResolve(): AnyRef = {
+ import scala.reflect.runtime.universe._
+ if (concrete) TypeTag(rootMirror, tpec)
+ else WeakTypeTag(rootMirror, tpec)
+ }
+}
diff --git a/src/reflect/scala/reflect/api/Types.scala b/src/reflect/scala/reflect/api/Types.scala
new file mode 100644
index 0000000..72163ef
--- /dev/null
+++ b/src/reflect/scala/reflect/api/Types.scala
@@ -0,0 +1,997 @@
+package scala.reflect
+package api
+
+/**
+ * <span class="badge badge-red" style="float: right;">EXPERIMENTAL</span>
+ *
+ * A trait that defines types and operations on them.
+ *
+ * Type instances represent information about the type of a corresponding symbol. This includes its members
+ * (methods, fields, type parameters, nested classes, traits, etc.) either declared directly or inherited, its base types,
+ * its erasure and so on. Types also provide operations to test for type conformance or equivalence or for widening.
+ *
+ * To instantiate a type, most of the time, the [[scala.reflect.api.TypeTags#typeOf]] method can be used. It takes
+ * a type argument and produces a `Type` instance which represents that argument. For example:
+ *
+ * {{{
+ * scala> typeOf[List[Int]]
+ * res0: reflect.runtime.universe.Type = scala.List[Int]
+ * }}}
+ *
+ * In this example, a [[scala.reflect.api.Types#TypeRef]] is returned, which corresponds to the type constructor `List`
+ * applied to the type argument `Int`.
+ *
+ * ''Note:'' Method `typeOf` does not work for types with type parameters, such as `typeOf[List[A]]` where `A` is
+ * a type parameter. In this case, use [[scala.reflect.api.TypeTags#weakTypeOf]] instead.
+ *
+ * For other ways to instantiate types, see the [[http://docs.scala-lang.org/overviews/reflection/symbols-trees-types.html corresponding section of the Reflection Guide]].
+ *
+ * === Common Operations on Types ===
+ *
+ * Types are typically used for type conformance tests or are queried for declarations of members or inner types.
+ *
+ * - '''Subtyping Relationships''' can be tested using `<:<` and `weak_<:<`.
+ * - '''Type Equality''' can be checked with `=:=`. It's important to note that `==` should not be used to compare types for equality-- `==` can't check for type equality in the presence of type aliases, while `=:=` can.
+ *
+ * Types can be queried for members and declarations by using the `members` and `declarations` methods (along with
+ * their singular counterparts `member` and `declaration`), which provide the list of definitions associated with that type.
+ * For example, to look up the `map` method of `List`, one can do:
+ *
+ * {{{
+ * scala> typeOf[List[_]].member("map": TermName)
+ * res1: reflect.runtime.universe.Symbol = method map
+ * }}}
+ *
+ * For more information about `Type`s, see the [[http://docs.scala-lang.org/overviews/reflection/symbols-trees-types.html Reflection Guide: Symbols, Trees, and Types]]
+ *
+ * @groupname TypeCreators Types - Creation
+ * @groupname TypeOps Types - Operations
+ * @group ReflectionAPI
+ *
+ * @contentDiagram hideNodes "*Api"
+ */
+trait Types { self: Universe =>
+
+ /** The type of Scala types, and also Scala type signatures.
+ * (No difference is internally made between the two).
+ * @template
+ * @group Types
+ */
+ type Type >: Null <: TypeApi
+
+ /** A tag that preserves the identity of the `Type` abstract type from erasure.
+ * Can be used for pattern matching, instance tests, serialization and likes.
+ * @group Tags
+ */
+ implicit val TypeTagg: ClassTag[Type]
+
+ /** This constant is used as a special value that indicates that no meaningful type exists.
+ * @group Types
+ */
+ val NoType: Type
+
+ /** This constant is used as a special value denoting the empty prefix in a path dependent type.
+ * For instance `x.type` is represented as `SingleType(NoPrefix, <x>)`, where `<x>` stands for
+ * the symbol for `x`.
+ * @group Types
+ */
+ val NoPrefix: Type
+
+ /** The API of types.
+ * The main source of information about types is the [[scala.reflect.api.Types]] page.
+ * @group API
+ */
+ abstract class TypeApi {
+ /** The term symbol associated with the type, or `NoSymbol` for types
+ * that do not refer to a term symbol.
+ */
+ def termSymbol: Symbol
+
+ /** The type symbol associated with the type, or `NoSymbol` for types
+ * that do not refer to a type symbol.
+ */
+ def typeSymbol: Symbol
+
+ /** The defined or declared members with name `name` in this type;
+ * an OverloadedSymbol if several exist, NoSymbol if none exist.
+ * Alternatives of overloaded symbol appear in the order they are declared.
+ */
+ def declaration(name: Name): Symbol
+
+ /** A `Scope` containing directly declared members of this type.
+ * Unlike `members` this method doesn't returns inherited members.
+ *
+ * Members in the returned scope might appear in arbitrary order.
+ * Use `declarations.sorted` to get an ordered list of members.
+ */
+ def declarations: MemberScope
+
+ /** The member with given name, either directly declared or inherited,
+ * an OverloadedSymbol if several exist, NoSymbol if none exist.
+ */
+ def member(name: Name): Symbol
+
+ /** A `Scope` containing all members of this type (directly declared or inherited).
+ * Unlike `declarations` this method also returns inherited members.
+ *
+ * Members in the returned scope might appear in arbitrary order.
+ * Use `declarations.sorted` to get an ordered list of members.
+ */
+ def members: MemberScope
+
+ /** Is this type a type constructor that is missing its type arguments?
+ */
+ def takesTypeArgs: Boolean
+
+ /** Returns the corresponding type constructor (e.g. List for List[T] or List[String])
+ */
+ def typeConstructor: Type
+
+ /**
+ * Expands type aliases and converts higher-kinded TypeRefs to PolyTypes.
+ * Functions on types are also implemented as PolyTypes.
+ *
+ * Example: (in the below, <List> is the type constructor of List)
+ * TypeRef(pre, <List>, List()) is replaced by
+ * PolyType(X, TypeRef(pre, <List>, List(X)))
+ */
+ def normalize: Type
+
+ /** Does this type conform to given type argument `that`? */
+ def <:< (that: Type): Boolean
+
+ /** Does this type weakly conform to given type argument `that`, i.e., either conforms in terms of `<:<` or both are primitive number types
+ * that conform according to Section "Weak Conformance" in the spec. For example, Int weak_<:< Long.
+ */
+ def weak_<:<(that: Type): Boolean
+
+ /** Is this type equivalent to given type argument `that`? */
+ def =:= (that: Type): Boolean
+
+ /** The list of all base classes of this type (including its own typeSymbol)
+ * in linearization order, starting with the class itself and ending
+ * in class Any.
+ */
+ def baseClasses: List[Symbol]
+
+ /** The least type instance of given class which is a super-type
+ * of this type. Example:
+ * {{{
+ * class D[T]
+ * class C extends p.D[Int]
+ * ThisType(C).baseType(D) = p.D[Int]
+ * }}}
+ */
+ def baseType(clazz: Symbol): Type
+
+ /** This type as seen from prefix `pre` and class `clazz`. This means:
+ * Replace all `ThisType`s of `clazz` or one of its subclasses
+ * by `pre` and instantiate all parameters by arguments of `pre`.
+ * Proceed analogously for `ThisType`s referring to outer classes.
+ *
+ * Example:
+ * {{{
+ * scala> import scala.reflect.runtime.universe._
+ * import scala.reflect.runtime.universe._
+ *
+ * scala> class D[T] { def m: T = ??? }
+ * defined class D
+ *
+ * scala> class C extends D[Int]
+ * defined class C
+ *
+ * scala> val D = typeOf[D[_]].typeSymbol.asClass
+ * D: reflect.runtime.universe.ClassSymbol = class D
+ *
+ * scala> val C = typeOf[C].typeSymbol.asClass
+ * C: reflect.runtime.universe.ClassSymbol = class C
+ *
+ * scala> val T = D.typeParams(0).asType.toType
+ * T: reflect.runtime.universe.Type = T
+ *
+ * scala> T.asSeenFrom(ThisType(C), D)
+ * res0: reflect.runtime.universe.Type = scala.Int
+ * }}}
+ */
+ def asSeenFrom(pre: Type, clazz: Symbol): Type
+
+ /** The erased type corresponding to this type after
+ * all transformations from Scala to Java have been performed.
+ */
+ def erasure: Type
+
+ /** If this is a singleton type, widen it to its nearest underlying non-singleton
+ * base type by applying one or more `underlying` dereferences.
+ * If this is not a singleton type, returns this type itself.
+ *
+ * Example:
+ *
+ * class Outer { class C ; val x: C }
+ * val o: Outer
+ * <o.x.type>.widen = o.C
+ */
+ def widen: Type
+
+ /******************* helpers *******************/
+
+ /** Substitute symbols in `to` for corresponding occurrences of references to
+ * symbols `from` in this type.
+ */
+ def substituteSymbols(from: List[Symbol], to: List[Symbol]): Type
+
+ /** Substitute types in `to` for corresponding occurrences of references to
+ * symbols `from` in this type.
+ */
+ def substituteTypes(from: List[Symbol], to: List[Type]): Type
+
+ /** Apply `f` to each part of this type, returning
+ * a new type. children get mapped before their parents */
+ def map(f: Type => Type): Type
+
+ /** Apply `f` to each part of this type, for side effects only */
+ def foreach(f: Type => Unit)
+
+ /** Returns optionally first type (in a preorder traversal) which satisfies predicate `p`,
+ * or None if none exists.
+ */
+ def find(p: Type => Boolean): Option[Type]
+
+ /** Is there part of this type which satisfies predicate `p`? */
+ def exists(p: Type => Boolean): Boolean
+
+ /** Does this type contain a reference to given symbol? */
+ def contains(sym: Symbol): Boolean
+ }
+
+ /** The type of Scala singleton types, i.e., types that are inhabited
+ * by only one nun-null value. These include types of the forms
+ * {{{
+ * C.this.type
+ * C.super.type
+ * x.type
+ * }}}
+ * as well as [[ConstantType constant types]].
+ * @template
+ * @group Types
+ */
+ type SingletonType >: Null <: Type
+
+ /** A tag that preserves the identity of the `SingletonType` abstract type from erasure.
+ * Can be used for pattern matching, instance tests, serialization and likes.
+ * @group Tags
+ */
+ implicit val SingletonTypeTag: ClassTag[SingletonType]
+
+ /** A singleton type that describes types of the form on the left with the
+ * corresponding `ThisType` representation to the right:
+ * {{{
+ * C.this.type ThisType(C)
+ * }}}
+ * @template
+ * @group Types
+ */
+ type ThisType >: Null <: AnyRef with SingletonType with ThisTypeApi
+
+ /** A tag that preserves the identity of the `ThisType` abstract type from erasure.
+ * Can be used for pattern matching, instance tests, serialization and likes.
+ * @group Tags
+ */
+ implicit val ThisTypeTag: ClassTag[ThisType]
+
+ /** The constructor/extractor for `ThisType` instances.
+ * @group Extractors
+ */
+ val ThisType: ThisTypeExtractor
+
+ /** An extractor class to create and pattern match with syntax `ThisType(sym)`
+ * where `sym` is the class prefix of the this type.
+ * @group Extractors
+ */
+ abstract class ThisTypeExtractor {
+ /**
+ * Creates a ThisType from the given class symbol.
+ */
+ def apply(sym: Symbol): Type
+ def unapply(tpe: ThisType): Option[Symbol]
+ }
+
+ /** The API that all this types support.
+ * The main source of information about types is the [[scala.reflect.api.Types]] page.
+ * @group API
+ */
+ trait ThisTypeApi extends TypeApi { this: ThisType =>
+ /** The underlying class symbol. */
+ def sym: Symbol
+ }
+
+ /** The `SingleType` type describes types of any of the forms on the left,
+ * with their TypeRef representations to the right.
+ * {{{
+ * (T # x).type SingleType(T, x)
+ * p.x.type SingleType(p.type, x)
+ * x.type SingleType(NoPrefix, x)
+ * }}}
+ * @template
+ * @group Types
+ */
+ type SingleType >: Null <: AnyRef with SingletonType with SingleTypeApi
+
+ /** A tag that preserves the identity of the `SingleType` abstract type from erasure.
+ * Can be used for pattern matching, instance tests, serialization and likes.
+ * @group Tags
+ */
+ implicit val SingleTypeTag: ClassTag[SingleType]
+
+ /** The constructor/extractor for `SingleType` instances.
+ * @group Extractors
+ */
+ val SingleType: SingleTypeExtractor
+
+ /** An extractor class to create and pattern match with syntax `SingleType(pre, sym)`
+ * Here, `pre` is the prefix of the single-type, and `sym` is the stable value symbol
+ * referred to by the single-type.
+ * @group Extractors
+ */
+ abstract class SingleTypeExtractor {
+ def apply(pre: Type, sym: Symbol): Type // not SingleTypebecause of implementation details
+ def unapply(tpe: SingleType): Option[(Type, Symbol)]
+ }
+
+ /** The API that all single types support.
+ * The main source of information about types is the [[scala.reflect.api.Types]] page.
+ * @group API
+ */
+ trait SingleTypeApi extends TypeApi { this: SingleType =>
+ /** The type of the qualifier. */
+ def pre: Type
+
+ /** The underlying symbol. */
+ def sym: Symbol
+ }
+ /** The `SuperType` type is not directly written, but arises when `C.super` is used
+ * as a prefix in a `TypeRef` or `SingleType`. It's internal presentation is
+ * {{{
+ * SuperType(thistpe, supertpe)
+ * }}}
+ * Here, `thistpe` is the type of the corresponding this-type. For instance,
+ * in the type arising from C.super, the `thistpe` part would be `ThisType(C)`.
+ * `supertpe` is the type of the super class referred to by the `super`.
+ * @template
+ * @group Types
+ */
+ type SuperType >: Null <: AnyRef with SingletonType with SuperTypeApi
+
+ /** A tag that preserves the identity of the `SuperType` abstract type from erasure.
+ * Can be used for pattern matching, instance tests, serialization and likes.
+ * @group Tags
+ */
+ implicit val SuperTypeTag: ClassTag[SuperType]
+
+ /** The constructor/extractor for `SuperType` instances.
+ * @group Extractors
+ */
+ val SuperType: SuperTypeExtractor
+
+ /** An extractor class to create and pattern match with syntax `SingleType(thistpe, supertpe)`
+ * @group Extractors
+ */
+ abstract class SuperTypeExtractor {
+ def apply(thistpe: Type, supertpe: Type): Type // not SuperTypebecause of implementation details
+ def unapply(tpe: SuperType): Option[(Type, Type)]
+ }
+
+ /** The API that all super types support.
+ * The main source of information about types is the [[scala.reflect.api.Types]] page.
+ * @group API
+ */
+ trait SuperTypeApi extends TypeApi { this: SuperType =>
+ /** The type of the qualifier.
+ * See the example for [[scala.reflect.api.Trees#SuperExtractor]].
+ */
+ def thistpe: Type
+
+ /** The type of the selector.
+ * See the example for [[scala.reflect.api.Trees#SuperExtractor]].
+ */
+ def supertpe: Type
+ }
+ /** The `ConstantType` type is not directly written in user programs, but arises as the type of a constant.
+ * The REPL expresses constant types like `Int(11)`. Here are some constants with their types:
+ * {{{
+ * 1 ConstantType(Constant(1))
+ * "abc" ConstantType(Constant("abc"))
+ * }}}
+ * @template
+ * @group Types
+ */
+ type ConstantType >: Null <: AnyRef with SingletonType with ConstantTypeApi
+
+ /** A tag that preserves the identity of the `ConstantType` abstract type from erasure.
+ * Can be used for pattern matching, instance tests, serialization and likes.
+ * @group Tags
+ */
+ implicit val ConstantTypeTag: ClassTag[ConstantType]
+
+ /** The constructor/extractor for `ConstantType` instances.
+ * @group Extractors
+ */
+ val ConstantType: ConstantTypeExtractor
+
+ /** An extractor class to create and pattern match with syntax `ConstantType(constant)`
+ * Here, `constant` is the constant value represented by the type.
+ * @group Extractors
+ */
+ abstract class ConstantTypeExtractor {
+ def apply(value: Constant): ConstantType
+ def unapply(tpe: ConstantType): Option[Constant]
+ }
+
+ /** The API that all constant types support.
+ * The main source of information about types is the [[scala.reflect.api.Types]] page.
+ * @group API
+ */
+ trait ConstantTypeApi extends TypeApi { this: ConstantType =>
+ /** The compile-time constant underlying this type. */
+ def value: Constant
+ }
+
+ /** The `TypeRef` type describes types of any of the forms on the left,
+ * with their TypeRef representations to the right.
+ * {{{
+ * T # C[T_1, ..., T_n] TypeRef(T, C, List(T_1, ..., T_n))
+ * p.C[T_1, ..., T_n] TypeRef(p.type, C, List(T_1, ..., T_n))
+ * C[T_1, ..., T_n] TypeRef(NoPrefix, C, List(T_1, ..., T_n))
+ * T # C TypeRef(T, C, Nil)
+ * p.C TypeRef(p.type, C, Nil)
+ * C TypeRef(NoPrefix, C, Nil)
+ * }}}
+ * @template
+ * @group Types
+ */
+ type TypeRef >: Null <: AnyRef with Type with TypeRefApi
+
+ /** A tag that preserves the identity of the `TypeRef` abstract type from erasure.
+ * Can be used for pattern matching, instance tests, serialization and likes.
+ * @group Tags
+ */
+ implicit val TypeRefTag: ClassTag[TypeRef]
+
+ /** The constructor/extractor for `TypeRef` instances.
+ * @group Extractors
+ */
+ val TypeRef: TypeRefExtractor
+
+ /** An extractor class to create and pattern match with syntax `TypeRef(pre, sym, args)`
+ * Here, `pre` is the prefix of the type reference, `sym` is the symbol
+ * referred to by the type reference, and `args` is a possible empty list of
+ * type argumenrts.
+ * @group Extractors
+ */
+ abstract class TypeRefExtractor {
+ def apply(pre: Type, sym: Symbol, args: List[Type]): Type // not TypeRefbecause of implementation details
+ def unapply(tpe: TypeRef): Option[(Type, Symbol, List[Type])]
+ }
+
+ /** The API that all type refs support.
+ * The main source of information about types is the [[scala.reflect.api.Types]] page.
+ * @group API
+ */
+ trait TypeRefApi extends TypeApi { this: TypeRef =>
+ /** The prefix of the type reference.
+ * Is equal to `NoPrefix` if the prefix is not applicable.
+ */
+ def pre: Type
+
+ /** The underlying symbol of the type reference. */
+ def sym: Symbol
+
+ /** The arguments of the type reference.
+ * Is equal to `Nil` if the arguments are not provided.
+ */
+ def args: List[Type]
+ }
+
+ /** A subtype of Type representing refined types as well as `ClassInfo` signatures.
+ * @template
+ * @group Types
+ */
+ type CompoundType >: Null <: AnyRef with Type
+
+ /** A tag that preserves the identity of the `CompoundType` abstract type from erasure.
+ * Can be used for pattern matching, instance tests, serialization and likes.
+ * @group Tags
+ */
+ implicit val CompoundTypeTag: ClassTag[CompoundType]
+
+ /** The `RefinedType` type defines types of any of the forms on the left,
+ * with their RefinedType representations to the right.
+ * {{{
+ * P_1 with ... with P_m { D_1; ...; D_n} RefinedType(List(P_1, ..., P_m), Scope(D_1, ..., D_n))
+ * P_1 with ... with P_m RefinedType(List(P_1, ..., P_m), Scope())
+ * { D_1; ...; D_n} RefinedType(List(AnyRef), Scope(D_1, ..., D_n))
+ * }}}
+ * @template
+ * @group Types
+ */
+ type RefinedType >: Null <: AnyRef with CompoundType with RefinedTypeApi
+
+ /** A tag that preserves the identity of the `RefinedType` abstract type from erasure.
+ * Can be used for pattern matching, instance tests, serialization and likes.
+ * @group Tags
+ */
+ implicit val RefinedTypeTag: ClassTag[RefinedType]
+
+ /** The constructor/extractor for `RefinedType` instances.
+ * @group Extractors
+ */
+ val RefinedType: RefinedTypeExtractor
+
+ /** An extractor class to create and pattern match with syntax `RefinedType(parents, decls)`
+ * Here, `parents` is the list of parent types of the class, and `decls` is the scope
+ * containing all declarations in the class.
+ * @group Extractors
+ */
+ abstract class RefinedTypeExtractor {
+ def apply(parents: List[Type], decls: Scope): RefinedType
+
+ /** An alternative constructor that passes in the synthetic classs symbol
+ * that backs the refined type. (Normally, a fresh class symbol is created automatically).
+ */
+ def apply(parents: List[Type], decls: Scope, clazz: Symbol): RefinedType
+ def unapply(tpe: RefinedType): Option[(List[Type], Scope)]
+ }
+
+ /** The API that all refined types support.
+ * The main source of information about types is the [[scala.reflect.api.Types]] page.
+ * @group API
+ */
+ trait RefinedTypeApi extends TypeApi { this: RefinedType =>
+ /** The superclasses of the type. */
+ def parents: List[Type]
+
+ /** The scope that holds the definitions comprising the type. */
+ def decls: Scope
+ }
+
+ /** The `ClassInfo` type signature is used to define parents and declarations
+ * of classes, traits, and objects. If a class, trait, or object C is declared like this
+ * {{{
+ * C extends P_1 with ... with P_m { D_1; ...; D_n}
+ * }}}
+ * its `ClassInfo` type has the following form:
+ * {{{
+ * ClassInfo(List(P_1, ..., P_m), Scope(D_1, ..., D_n), C)
+ * }}}
+ * @template
+ * @group Types
+ */
+ type ClassInfoType >: Null <: AnyRef with CompoundType with ClassInfoTypeApi
+
+ /** A tag that preserves the identity of the `ClassInfoType` abstract type from erasure.
+ * Can be used for pattern matching, instance tests, serialization and likes.
+ * @group Tags
+ */
+ implicit val ClassInfoTypeTag: ClassTag[ClassInfoType]
+
+ /** The constructor/extractor for `ClassInfoType` instances.
+ * @group Extractors
+ */
+ val ClassInfoType: ClassInfoTypeExtractor
+
+ /** An extractor class to create and pattern match with syntax `ClassInfo(parents, decls, clazz)`
+ * Here, `parents` is the list of parent types of the class, `decls` is the scope
+ * containing all declarations in the class, and `clazz` is the symbol of the class
+ * itself.
+ * @group Extractors
+ */
+ abstract class ClassInfoTypeExtractor {
+ def apply(parents: List[Type], decls: Scope, typeSymbol: Symbol): ClassInfoType
+ def unapply(tpe: ClassInfoType): Option[(List[Type], Scope, Symbol)]
+ }
+
+ /** The API that all class info types support.
+ * The main source of information about types is the [[scala.reflect.api.Types]] page.
+ * @group API
+ */
+ trait ClassInfoTypeApi extends TypeApi { this: ClassInfoType =>
+ /** The superclasses of the class type. */
+ def parents: List[Type]
+
+ /** The scope that holds the definitions comprising the class type. */
+ def decls: Scope
+
+ /** The symbol underlying the class type. */
+ def typeSymbol: Symbol
+ }
+
+ /** The `MethodType` type signature is used to indicate parameters and result type of a method
+ * @template
+ * @group Types
+ */
+ type MethodType >: Null <: AnyRef with Type with MethodTypeApi
+
+ /** A tag that preserves the identity of the `MethodType` abstract type from erasure.
+ * Can be used for pattern matching, instance tests, serialization and likes.
+ * @group Tags
+ */
+ implicit val MethodTypeTag: ClassTag[MethodType]
+
+ /** The constructor/extractor for `MethodType` instances.
+ * @group Extractors
+ */
+ val MethodType: MethodTypeExtractor
+
+ /** An extractor class to create and pattern match with syntax `MethodType(params, respte)`
+ * Here, `params` is a potentially empty list of parameter symbols of the method,
+ * and `restpe` is the result type of the method. If the method is curried, `restpe` would
+ * be another `MethodType`.
+ * Note: `MethodType(Nil, Int)` would be the type of a method defined with an empty parameter list.
+ * {{{
+ * def f(): Int
+ * }}}
+ * If the method is completely parameterless, as in
+ * {{{
+ * def f: Int
+ * }}}
+ * its type is a `NullaryMethodType`.
+ * @group Extractors
+ */
+ abstract class MethodTypeExtractor {
+ def apply(params: List[Symbol], resultType: Type): MethodType
+ def unapply(tpe: MethodType): Option[(List[Symbol], Type)]
+ }
+
+ /** The API that all method types support.
+ * The main source of information about types is the [[scala.reflect.api.Types]] page.
+ * @group API
+ */
+ trait MethodTypeApi extends TypeApi { this: MethodType =>
+ /** The symbols that correspond to the parameters of the method. */
+ def params: List[Symbol]
+
+ /** The result type of the method. */
+ def resultType: Type
+ }
+
+ /** The `NullaryMethodType` type signature is used for parameterless methods
+ * with declarations of the form `def foo: T`
+ * @template
+ * @group Types
+ */
+ type NullaryMethodType >: Null <: AnyRef with Type with NullaryMethodTypeApi
+
+ /** A tag that preserves the identity of the `NullaryMethodType` abstract type from erasure.
+ * Can be used for pattern matching, instance tests, serialization and likes.
+ * @group Tags
+ */
+ implicit val NullaryMethodTypeTag: ClassTag[NullaryMethodType]
+
+ /** The constructor/extractor for `NullaryMethodType` instances.
+ * @group Extractors
+ */
+ val NullaryMethodType: NullaryMethodTypeExtractor
+
+ /** An extractor class to create and pattern match with syntax `NullaryMethodType(resultType)`.
+ * Here, `resultType` is the result type of the parameterless method.
+ * @group Extractors
+ */
+ abstract class NullaryMethodTypeExtractor {
+ def apply(resultType: Type): NullaryMethodType
+ def unapply(tpe: NullaryMethodType): Option[(Type)]
+ }
+
+ /** The API that all nullary method types support.
+ * The main source of information about types is the [[scala.reflect.api.Types]] page.
+ * @group API
+ */
+ trait NullaryMethodTypeApi extends TypeApi { this: NullaryMethodType =>
+ /** The result type of the method. */
+ def resultType: Type
+ }
+
+ /** The `PolyType` type signature is used for polymorphic methods
+ * that have at least one type parameter.
+ * @template
+ * @group Types
+ */
+ type PolyType >: Null <: AnyRef with Type with PolyTypeApi
+
+ /** A tag that preserves the identity of the `PolyType` abstract type from erasure.
+ * Can be used for pattern matching, instance tests, serialization and likes.
+ * @group Tags
+ */
+ implicit val PolyTypeTag: ClassTag[PolyType]
+
+ /** The constructor/extractor for `PolyType` instances.
+ * @group Extractors
+ */
+ val PolyType: PolyTypeExtractor
+
+ /** An extractor class to create and pattern match with syntax `PolyType(typeParams, resultType)`.
+ * Here, `typeParams` are the type parameters of the method and `resultType`
+ * is the type signature following the type parameters.
+ * @group Extractors
+ */
+ abstract class PolyTypeExtractor {
+ def apply(typeParams: List[Symbol], resultType: Type): PolyType
+ def unapply(tpe: PolyType): Option[(List[Symbol], Type)]
+ }
+
+ /** The API that all polymorphic types support.
+ * The main source of information about types is the [[scala.reflect.api.Types]] page.
+ * @group API
+ */
+ trait PolyTypeApi extends TypeApi { this: PolyType =>
+ /** The symbols corresponding to the type parameters. */
+ def typeParams: List[Symbol]
+
+ /** The underlying type. */
+ def resultType: Type
+ }
+
+ /** The `ExistentialType` type signature is used for existential types and
+ * wildcard types.
+ * @template
+ * @group Types
+ */
+ type ExistentialType >: Null <: AnyRef with Type with ExistentialTypeApi
+
+ /** A tag that preserves the identity of the `ExistentialType` abstract type from erasure.
+ * Can be used for pattern matching, instance tests, serialization and likes.
+ * @group Tags
+ */
+ implicit val ExistentialTypeTag: ClassTag[ExistentialType]
+
+ /** The constructor/extractor for `ExistentialType` instances.
+ * @group Extractors
+ */
+ val ExistentialType: ExistentialTypeExtractor
+
+ /** An extractor class to create and pattern match with syntax
+ * `ExistentialType(quantified, underlying)`.
+ * Here, `quantified` are the type variables bound by the existential type and `underlying`
+ * is the type that's existentially quantified.
+ * @group Extractors
+ */
+ abstract class ExistentialTypeExtractor {
+ def apply(quantified: List[Symbol], underlying: Type): ExistentialType
+ def unapply(tpe: ExistentialType): Option[(List[Symbol], Type)]
+ }
+
+ /** The API that all existential types support.
+ * The main source of information about types is the [[scala.reflect.api.Types]] page.
+ * @group API
+ */
+ trait ExistentialTypeApi extends TypeApi { this: ExistentialType =>
+ /** The symbols corresponding to the `forSome` clauses of the existential type. */
+ def quantified: List[Symbol]
+
+ /** The underlying type of the existential type. */
+ def underlying: Type
+ }
+
+ /** The `AnnotatedType` type signature is used for annotated types of the
+ * for `<type> @<annotation>`.
+ * @template
+ * @group Types
+ */
+ type AnnotatedType >: Null <: AnyRef with Type with AnnotatedTypeApi
+
+ /** A tag that preserves the identity of the `AnnotatedType` abstract type from erasure.
+ * Can be used for pattern matching, instance tests, serialization and likes.
+ * @group Tags
+ */
+ implicit val AnnotatedTypeTag: ClassTag[AnnotatedType]
+
+ /** The constructor/extractor for `AnnotatedType` instances.
+ * @group Extractors
+ */
+ val AnnotatedType: AnnotatedTypeExtractor
+
+ /** An extractor class to create and pattern match with syntax
+ * `AnnotatedType(annotations, underlying, selfsym)`.
+ * Here, `annotations` are the annotations decorating the underlying type `underlying`.
+ * `selfSym` is a symbol representing the annotated type itself.
+ * @group Extractors
+ */
+ abstract class AnnotatedTypeExtractor {
+ def apply(annotations: List[Annotation], underlying: Type, selfsym: Symbol): AnnotatedType
+ def unapply(tpe: AnnotatedType): Option[(List[Annotation], Type, Symbol)]
+ }
+
+ /** The API that all annotated types support.
+ * The main source of information about types is the [[scala.reflect.api.Types]] page.
+ * @group API
+ */
+ trait AnnotatedTypeApi extends TypeApi { this: AnnotatedType =>
+ /** The annotations. */
+ def annotations: List[Annotation]
+
+ /** The annotee. */
+ def underlying: Type
+
+ /** A symbol that represents the annotated type itself. */
+ def selfsym: Symbol
+ }
+
+ /** The `TypeBounds` type signature is used to indicate lower and upper type bounds
+ * of type parameters and abstract types. It is not a first-class type.
+ * If an abstract type or type parameter is declared with any of the forms
+ * on the left, its type signature is the TypeBounds type on the right.
+ * {{{
+ * T >: L <: U TypeBounds(L, U)
+ * T >: L TypeBounds(L, Any)
+ * T <: U TypeBounds(Nothing, U)
+ * }}}
+ * @template
+ * @group Types
+ */
+ type TypeBounds >: Null <: AnyRef with Type with TypeBoundsApi
+
+ /** A tag that preserves the identity of the `TypeBounds` abstract type from erasure.
+ * Can be used for pattern matching, instance tests, serialization and likes.
+ * @group Tags
+ */
+ implicit val TypeBoundsTag: ClassTag[TypeBounds]
+
+ /** The constructor/extractor for `TypeBounds` instances.
+ * @group Extractors
+ */
+ val TypeBounds: TypeBoundsExtractor
+
+ /** An extractor class to create and pattern match with syntax `TypeBound(lower, upper)`
+ * Here, `lower` is the lower bound of the `TypeBounds` pair, and `upper` is
+ * the upper bound.
+ * @group Extractors
+ */
+ abstract class TypeBoundsExtractor {
+ def apply(lo: Type, hi: Type): TypeBounds
+ def unapply(tpe: TypeBounds): Option[(Type, Type)]
+ }
+
+ /** The API that all type bounds support.
+ * The main source of information about types is the [[scala.reflect.api.Types]] page.
+ * @group API
+ */
+ trait TypeBoundsApi extends TypeApi { this: TypeBounds =>
+ /** The lower bound.
+ * Is equal to `definitions.NothingTpe` if not specified explicitly.
+ */
+ def lo: Type
+
+ /** The upper bound.
+ * Is equal to `definitions.AnyTpe` if not specified explicitly.
+ */
+ def hi: Type
+ }
+
+ /** An object representing an unknown type, used during type inference.
+ * If you see WildcardType outside of inference it is almost certainly a bug.
+ * @group Types
+ */
+ val WildcardType: Type
+
+ /** BoundedWildcardTypes, used only during type inference, are created in
+ * two places:
+ *
+ * 1. If the expected type of an expression is an existential type,
+ * its hidden symbols are replaced with bounded wildcards.
+ * 2. When an implicit conversion is being sought based in part on
+ * the name of a method in the converted type, a HasMethodMatching
+ * type is created: a MethodType with parameters typed as
+ * BoundedWildcardTypes.
+ * @template
+ * @group Types
+ */
+ type BoundedWildcardType >: Null <: AnyRef with Type with BoundedWildcardTypeApi
+
+ /** A tag that preserves the identity of the `BoundedWildcardType` abstract type from erasure.
+ * Can be used for pattern matching, instance tests, serialization and likes.
+ * @group Tags
+ */
+ implicit val BoundedWildcardTypeTag: ClassTag[BoundedWildcardType]
+
+ /** The constructor/extractor for `BoundedWildcardType` instances.
+ * @group Extractors
+ */
+ val BoundedWildcardType: BoundedWildcardTypeExtractor
+
+ /** An extractor class to create and pattern match with syntax `BoundedWildcardTypeExtractor(bounds)`
+ * with `bounds` denoting the type bounds.
+ * @group Extractors
+ */
+ abstract class BoundedWildcardTypeExtractor {
+ def apply(bounds: TypeBounds): BoundedWildcardType
+ def unapply(tpe: BoundedWildcardType): Option[TypeBounds]
+ }
+
+ /** The API that all this types support.
+ * The main source of information about types is the [[scala.reflect.api.Types]] page.
+ * @group API
+ */
+ trait BoundedWildcardTypeApi extends TypeApi { this: BoundedWildcardType =>
+ /** Type bounds for the wildcard type. */
+ def bounds: TypeBounds
+ }
+
+ /** The least upper bound of a list of types, as determined by `<:<`.
+ * @group TypeOps
+ */
+ def lub(xs: List[Type]): Type
+
+ /** The greatest lower bound of a list of types, as determined by `<:<`.
+ * @group TypeOps
+ */
+ def glb(ts: List[Type]): Type
+
+ // Creators ---------------------------------------------------------------
+ // too useful and too non-trivial to be left out of public API
+
+ /** The canonical creator for single-types
+ * @group TypeCreators
+ */
+ def singleType(pre: Type, sym: Symbol): Type
+
+ /** the canonical creator for a refined type with a given scope
+ * @group TypeCreators
+ */
+ def refinedType(parents: List[Type], owner: Symbol, decls: Scope, pos: Position): Type
+
+ /** The canonical creator for a refined type with an initially empty scope.
+ * @group TypeCreators
+ */
+ def refinedType(parents: List[Type], owner: Symbol): Type
+
+ /** The canonical creator for typerefs
+ * @group TypeCreators
+ */
+ def typeRef(pre: Type, sym: Symbol, args: List[Type]): Type
+
+ /** A creator for intersection type where intersections of a single type are
+ * replaced by the type itself.
+ * @group TypeCreators
+ */
+ def intersectionType(tps: List[Type]): Type
+
+ /** A creator for intersection type where intersections of a single type are
+ * replaced by the type itself, and repeated parent classes are merged.
+ *
+ * !!! Repeated parent classes are not merged - is this a bug in the
+ * comment or in the code?
+ * @group TypeCreators
+ */
+ def intersectionType(tps: List[Type], owner: Symbol): Type
+
+ /** A creator for type applications
+ * @group Types
+ */
+ def appliedType(tycon: Type, args: List[Type]): Type
+
+ /** A creator for type parameterizations that strips empty type parameter lists.
+ * Use this factory method to indicate the type has kind * (it's a polymorphic value)
+ * until we start tracking explicit kinds equivalent to typeFun (except that the latter requires tparams nonEmpty).
+ * @group Types
+ */
+ def polyType(tparams: List[Symbol], tpe: Type): Type
+
+ /** A creator for existential types. This generates:
+ *
+ * {{{
+ * tpe1 where { tparams }
+ * }}}
+ *
+ * where `tpe1` is the result of extrapolating `tpe` with regard to `tparams`.
+ * Extrapolating means that type variables in `tparams` occurring
+ * in covariant positions are replaced by upper bounds, (minus any
+ * SingletonClass markers), type variables in `tparams` occurring in
+ * contravariant positions are replaced by upper bounds, provided the
+ * resulting type is legal with regard to stability, and does not contain
+ * any type variable in `tparams`.
+ *
+ * The abstraction drops all type parameters that are not directly or
+ * indirectly referenced by type `tpe1`. If there are no remaining type
+ * parameters, simply returns result type `tpe`.
+ * @group TypeCreators
+ */
+ def existentialAbstraction(tparams: List[Symbol], tpe0: Type): Type
+}
diff --git a/src/reflect/scala/reflect/api/Universe.scala b/src/reflect/scala/reflect/api/Universe.scala
new file mode 100644
index 0000000..15fa11c
--- /dev/null
+++ b/src/reflect/scala/reflect/api/Universe.scala
@@ -0,0 +1,96 @@
+package scala.reflect
+package api
+
+/**
+ * <span class="badge badge-red" style="float: right;">EXPERIMENTAL</span>
+ *
+ * `Universe` provides a complete set of reflection operations which make it possible for one
+ * to reflectively inspect Scala type relations, such as membership or subtyping.
+ *
+ * [[scala.reflect.api.Universe]] has two specialized sub-universes for different scenarios.
+ * [[scala.reflect.api.JavaUniverse]] adds operations that link symbols and types to the underlying
+ * classes and runtime values of a JVM instance-- this can be thought of as the `Universe` that
+ * should be used for all typical use-cases of Scala reflection. [[scala.reflect.macros.Universe]]
+ * adds operations which allow macros to access selected compiler data structures and operations--
+ * this type of `Universe` should only ever exist within the implementation of a Scala macro.
+ *
+ * `Universe` can be thought of as the entry point to Scala reflection. It mixes-in, and thus provides
+ * an interface to the following main types:
+ *
+ * - [[scala.reflect.api.Types#Type Types]] represent types
+ * - [[scala.reflect.api.Symbols#Symbol Symbols]] represent definitions
+ * - [[scala.reflect.api.Trees#Tree Trees]] represent abstract syntax trees
+ * - [[scala.reflect.api.Names#Name Names]] represent term and type names
+ * - [[scala.reflect.api.Annotations#Annotation Annotations]] represent annotations
+ * - [[scala.reflect.api.Positions#Position Positions]] represent source positions of tree nodes
+ * - [[scala.reflect.api.FlagSets#FlagSet FlagSet]] represent sets of flags that apply to symbols and
+ * definition trees
+ * - [[scala.reflect.api.Constants#Constant Constants]] represent compile-time constants.
+ *
+ * To obtain a `Universe` to use with Scala runtime reflection, simply make sure to use or import
+ * `scala.reflect.runtime.universe._`
+ * {{{
+ * scala> import scala.reflect.runtime.universe._
+ * import scala.reflect.runtime.universe._
+ *
+ * scala> typeOf[List[Int]]
+ * res0: reflect.runtime.universe.Type = scala.List[Int]
+ *
+ * scala> typeOf[Either[String, Int]]
+ * res1: reflect.runtime.universe.Type = scala.Either[String,Int]
+ * }}}
+ *
+ * To obtain a `Universe` for use within a Scala macro, use [[scala.reflect.macros.Context#universe]]. For example:
+ * {{{
+ * def printf(format: String, params: Any*): Unit = macro impl
+ * def impl(c: Context)(format: c.Expr[String], params: c.Expr[Any]*): c.Expr[Unit] = {
+ * import c.universe._
+ * ...
+ * }
+ * }}}
+ *
+ * For more information about `Universe`s, see the [[http://docs.scala-lang.org/overviews/reflection/environment-universes-mirrors.html Reflection Guide: Universes]]
+ *
+ * @groupprio Universe -1
+ * @group ReflectionAPI
+ *
+ * @contentDiagram hideNodes "*Api"
+ */
+abstract class Universe extends Symbols
+ with Types
+ with FlagSets
+ with Scopes
+ with Names
+ with Trees
+ with Constants
+ with Annotations
+ with Positions
+ with Exprs
+ with TypeTags
+ with TagInterop
+ with StandardDefinitions
+ with StandardNames
+ with BuildUtils
+ with Mirrors
+ with Printers
+ with Importers
+{
+ /** Use `reify` to produce the abstract syntax tree representing a given Scala expression.
+ *
+ * For example:
+ *
+ * {{{
+ * val five = reify{ 5 } // Literal(Constant(5))
+ * reify{ 5.toString } // Apply(Select(Literal(Constant(5)), TermName("toString")), List())
+ * reify{ five.splice.toString } // Apply(Select(five, TermName("toString")), List())
+ * }}}
+ *
+ * The produced tree is path dependent on the Universe `reify` was called from.
+ *
+ * Use [[scala.reflect.api.Exprs#Expr.splice]] to embed an existing expression into a `reify` call. Use [[Expr]] to turn a [[Tree]] into an expression that can be spliced.
+ * @group Universe
+ */
+ // implementation is hardwired to `scala.reflect.reify.Taggers`
+ // using the mechanism implemented in `scala.tools.reflect.FastTrack`
+ def reify[T](expr: T): Expr[T] = ??? // macro
+}
diff --git a/src/reflect/scala/reflect/api/package.scala b/src/reflect/scala/reflect/api/package.scala
new file mode 100644
index 0000000..dbda84d
--- /dev/null
+++ b/src/reflect/scala/reflect/api/package.scala
@@ -0,0 +1,47 @@
+package scala.reflect
+
+import scala.reflect.api.{Universe => ApiUniverse}
+
+/**
+ * <span class="badge badge-red" style="float: right;">EXPERIMENTAL</span>
+ *
+ * The Scala Reflection API (located in scala-reflect.jar).
+ *
+ * In Scala 2.10.0, the Scala Reflection API and its implementation have an "experimental" status.
+ * This means that the API and the docs are not complete and can be changed in binary- and source-incompatible
+ * manner in 2.10.1. This also means that the implementation has some known issues.
+ *
+ * The following types are the backbone of the Scala Reflection API, and serve as a good starting point
+ * for information about Scala Reflection:
+ *
+ * - [[scala.reflect.api.Symbols]]
+ * - [[scala.reflect.api.Types]]
+ * - [[scala.reflect.api.Mirrors]]
+ * - [[scala.reflect.api.Universe]]
+ *
+ * For more information about Scala Reflection, see the
+ * [[http://docs.scala-lang.org/overviews/reflection/overview.html Reflection Guide]]
+ *
+ * @groupname ReflectionAPI Scala Reflection API
+ * @groupprio API 9
+ * @groupprio Extractors 10
+ * @groupprio Tags 11
+ * @groupdesc API The methods available for each reflection entity, without the implementation. Since the
+ * reflection entities are later overridden by runtime reflection and macros, their API
+ * counterparts guarantee a minimum set of methods that are implemented.
+ * @groupdesc Extractors Extractors provide the machinery necessary to allow pattern matching and construction of
+ * reflection entities that is similar to case classes, although the entities are only abstract
+ * types that are later overridden.
+ * @groupdesc Tags Implicit values that provide [[scala.reflect.ClassTag `ClassTags`]] for the reflection
+ * classes. These are abstract in the interface but are later filled in to provide ClassTags
+ * for the either the runtime reflection or macros entities, depending on the use.
+ */
+package object api {
+
+ // anchors for materialization macros emitted during tag materialization in Implicits.scala
+ // implementation is hardwired into `scala.reflect.reify.Taggers`
+ // using the mechanism implemented in `scala.tools.reflect.FastTrack`
+ // todo. once we have implicit macros for tag generation, we can remove these anchors
+ private[scala] def materializeWeakTypeTag[T](u: ApiUniverse): u.WeakTypeTag[T] = ??? // macro
+ private[scala] def materializeTypeTag[T](u: ApiUniverse): u.TypeTag[T] = ??? // macro
+}
\ No newline at end of file
diff --git a/src/reflect/scala/reflect/internal/AnnotationCheckers.scala b/src/reflect/scala/reflect/internal/AnnotationCheckers.scala
new file mode 100644
index 0000000..1ab975b
--- /dev/null
+++ b/src/reflect/scala/reflect/internal/AnnotationCheckers.scala
@@ -0,0 +1,144 @@
+/* NSC -- new Scala compiler
+ * Copyright 2007-2013 LAMP/EPFL
+ * @author Martin Odersky
+ */
+
+package scala.reflect
+package internal
+
+/** Additions to the type checker that can be added at
+ * run time. Typically these are added by
+ * compiler plugins. */
+trait AnnotationCheckers {
+ self: SymbolTable =>
+
+
+ /** An additional checker for annotations on types.
+ * Typically these are registered by compiler plugins
+ * with the addAnnotationChecker method. */
+ trait AnnotationChecker {
+
+ /**
+ * Selectively activate this annotation checker. When using both an annotation checker
+ * and an analyzer plugin, it is common to run both of them only during selected
+ * compiler phases. See documentation in AnalyzerPlugin.isActive.
+ */
+ def isActive(): Boolean = true
+
+ /** Check the annotations on two types conform. */
+ def annotationsConform(tpe1: Type, tpe2: Type): Boolean
+
+ /** Refine the computed least upper bound of a list of types.
+ * All this should do is add annotations. */
+ def annotationsLub(tp: Type, ts: List[Type]): Type = tp
+
+ /** Refine the computed greatest lower bound of a list of types.
+ * All this should do is add annotations. */
+ def annotationsGlb(tp: Type, ts: List[Type]): Type = tp
+
+ /** Refine the bounds on type parameters to the given type arguments. */
+ def adaptBoundsToAnnotations(bounds: List[TypeBounds], tparams: List[Symbol],
+ targs: List[Type]): List[TypeBounds] = bounds
+
+ /**
+ * Modify the type that has thus far been inferred for a tree. All this should
+ * do is add annotations.
+ */
+ @deprecated("Create an AnalyzerPlugin and use pluginsTyped", "2.10.1")
+ def addAnnotations(tree: Tree, tpe: Type): Type = tpe
+
+ /**
+ * Decide whether this analyzer plugin can adapt a tree that has an annotated type to the
+ * given type tp, taking into account the given mode (see method adapt in trait Typers).
+ */
+ @deprecated("Create an AnalyzerPlugin and use canAdaptAnnotations", "2.10.1")
+ def canAdaptAnnotations(tree: Tree, mode: Int, pt: Type): Boolean = false
+
+ /**
+ * Adapt a tree that has an annotated type to the given type tp, taking into account the given
+ * mode (see method adapt in trait Typers).
+ *
+ * An implementation cannot rely on canAdaptAnnotations being called before. If the implementing
+ * class cannot do the adaptiong, it should return the tree unchanged.
+ */
+ @deprecated("Create an AnalyzerPlugin and use adaptAnnotations", "2.10.1")
+ def adaptAnnotations(tree: Tree, mode: Int, pt: Type): Tree = tree
+
+ /**
+ * Adapt the type of a return expression. The decision of a typer plugin whether the type
+ * should be adapted is based on the type of the expression which is returned, as well as the
+ * result type of the method (pt).
+ *
+ * By default, this method simply returns the passed `default` type.
+ */
+ @deprecated("Create an AnalyzerPlugin and use pluginsTypedReturn. Note: the 'tree' argument here is\n"+
+ "the 'expr' of a Return tree; 'pluginsTypedReturn' takes the Return tree itself as argument", "2.10.1")
+ def adaptTypeOfReturn(tree: Tree, pt: Type, default: => Type): Type = default
+ }
+
+ // Syncnote: Annotation checkers inaccessible to reflection, so no sync in var necessary.
+
+ /** The list of annotation checkers that have been registered */
+ private var annotationCheckers: List[AnnotationChecker] = Nil
+
+ /** Register an annotation checker. Typically these are added by compiler plugins. */
+ def addAnnotationChecker(checker: AnnotationChecker) {
+ if (!(annotationCheckers contains checker))
+ annotationCheckers = checker :: annotationCheckers
+ }
+
+ /** Remove all annotation checkers */
+ def removeAllAnnotationCheckers() {
+ annotationCheckers = Nil
+ }
+
+ /** @see AnnotationChecker.annotationsConform */
+ def annotationsConform(tp1: Type, tp2: Type): Boolean =
+ if (annotationCheckers.isEmpty || (tp1.annotations.isEmpty && tp2.annotations.isEmpty)) true
+ else annotationCheckers.forall(checker => {
+ !checker.isActive() || checker.annotationsConform(tp1,tp2)
+ })
+
+ /** @see AnnotationChecker.annotationsLub */
+ def annotationsLub(tpe: Type, ts: List[Type]): Type =
+ if (annotationCheckers.isEmpty) tpe
+ else annotationCheckers.foldLeft(tpe)((tpe, checker) =>
+ if (!checker.isActive()) tpe else checker.annotationsLub(tpe, ts))
+
+ /** @see AnnotationChecker.annotationsGlb */
+ def annotationsGlb(tpe: Type, ts: List[Type]): Type =
+ if (annotationCheckers.isEmpty) tpe
+ else annotationCheckers.foldLeft(tpe)((tpe, checker) =>
+ if (!checker.isActive()) tpe else checker.annotationsGlb(tpe, ts))
+
+ /** @see AnnotationChecker.adaptBoundsToAnnotations */
+ def adaptBoundsToAnnotations(bounds: List[TypeBounds], tparams: List[Symbol],
+ targs: List[Type]): List[TypeBounds] =
+ if (annotationCheckers.isEmpty) bounds
+ else annotationCheckers.foldLeft(bounds)((bounds, checker) =>
+ if (!checker.isActive()) bounds else checker.adaptBoundsToAnnotations(bounds, tparams, targs))
+
+
+ /* The following methods will be removed with the deprecated methods is AnnotationChecker. */
+
+ def addAnnotations(tree: Tree, tpe: Type): Type =
+ if (annotationCheckers.isEmpty) tpe
+ else annotationCheckers.foldLeft(tpe)((tpe, checker) =>
+ if (!checker.isActive()) tpe else checker.addAnnotations(tree, tpe))
+
+ def canAdaptAnnotations(tree: Tree, mode: Int, pt: Type): Boolean =
+ if (annotationCheckers.isEmpty) false
+ else annotationCheckers.exists(checker => {
+ checker.isActive() && checker.canAdaptAnnotations(tree, mode, pt)
+ })
+
+ def adaptAnnotations(tree: Tree, mode: Int, pt: Type): Tree =
+ if (annotationCheckers.isEmpty) tree
+ else annotationCheckers.foldLeft(tree)((tree, checker) =>
+ if (!checker.isActive()) tree else checker.adaptAnnotations(tree, mode, pt))
+
+ def adaptTypeOfReturn(tree: Tree, pt: Type, default: => Type): Type =
+ if (annotationCheckers.isEmpty) default
+ else annotationCheckers.foldLeft(default)((tpe, checker) =>
+ if (!checker.isActive()) tpe else checker.adaptTypeOfReturn(tree, pt, tpe))
+}
diff --git a/src/reflect/scala/reflect/internal/AnnotationInfos.scala b/src/reflect/scala/reflect/internal/AnnotationInfos.scala
new file mode 100644
index 0000000..032b453
--- /dev/null
+++ b/src/reflect/scala/reflect/internal/AnnotationInfos.scala
@@ -0,0 +1,363 @@
+/* NSC -- new Scala compiler
+ * Copyright 2007-2013 LAMP/EPFL
+ * @author Martin Odersky
+ */
+
+package scala.reflect
+package internal
+
+import util._
+import pickling.ByteCodecs
+import scala.annotation.tailrec
+import scala.collection.immutable.ListMap
+
+/** AnnotationInfo and its helpers */
+trait AnnotationInfos extends api.Annotations { self: SymbolTable =>
+ import definitions.{ ThrowsClass, StaticAnnotationClass, isMetaAnnotation }
+
+ // Common annotation code between Symbol and Type.
+ // For methods altering the annotation list, on Symbol it mutates
+ // the Symbol's field directly. For Type, a new AnnotatedType is
+ // created which wraps the original type.
+ trait Annotatable[Self] {
+ /** The annotations on this type. */
+ def annotations: List[AnnotationInfo] // Annotations on this type.
+ def setAnnotations(annots: List[AnnotationInfo]): Self // Replace annotations with argument list.
+ def withAnnotations(annots: List[AnnotationInfo]): Self // Add annotations to this type.
+ def filterAnnotations(p: AnnotationInfo => Boolean): Self // Retain only annotations meeting the condition.
+ def withoutAnnotations: Self // Remove all annotations from this type.
+
+ /** Symbols of any @throws annotations on this symbol.
+ */
+ def throwsAnnotations(): List[Symbol] = annotations collect {
+ case ThrownException(exc) => exc
+ }
+
+ def addThrowsAnnotation(throwableSym: Symbol): Self = {
+ val throwableTpe = if (throwableSym.isMonomorphicType) throwableSym.tpe else {
+ debuglog(s"Encountered polymorphic exception `${throwableSym.fullName}` while parsing class file.")
+ // in case we encounter polymorphic exception the best we can do is to convert that type to
+ // monomorphic one by introducing existentials, see SI-7009 for details
+ existentialAbstraction(throwableSym.typeParams, throwableSym.tpe)
+ }
+ val throwsAnn = AnnotationInfo(appliedType(definitions.ThrowsClass, throwableTpe), List(Literal(Constant(throwableTpe))), Nil)
+ withAnnotations(List(throwsAnn))
+ }
+
+ /** Tests for, get, or remove an annotation */
+ def hasAnnotation(cls: Symbol): Boolean =
+ //OPT inlined from exists to save on #closures; was: annotations exists (_ matches cls)
+ dropOtherAnnotations(annotations, cls).nonEmpty
+
+ def getAnnotation(cls: Symbol): Option[AnnotationInfo] =
+ //OPT inlined from exists to save on #closures; was: annotations find (_ matches cls)
+ dropOtherAnnotations(annotations, cls) match {
+ case ann :: _ => Some(ann)
+ case _ => None
+ }
+
+ def removeAnnotation(cls: Symbol): Self = filterAnnotations(ann => !(ann matches cls))
+
+ final def withAnnotation(annot: AnnotationInfo): Self = withAnnotations(List(annot))
+
+ @tailrec private
+ def dropOtherAnnotations(anns: List[AnnotationInfo], cls: Symbol): List[AnnotationInfo] = anns match {
+ case ann :: rest => if (ann matches cls) anns else dropOtherAnnotations(rest, cls)
+ case Nil => Nil
+ }
+ }
+
+ /** Arguments to classfile annotations (which are written to
+ * bytecode as java annotations) are either:
+ *
+ * - constants
+ * - arrays of constants
+ * - or nested classfile annotations
+ */
+ abstract class ClassfileAnnotArg extends Product
+ implicit val JavaArgumentTag = ClassTag[ClassfileAnnotArg](classOf[ClassfileAnnotArg])
+ case object UnmappableAnnotArg extends ClassfileAnnotArg
+
+ /** Represents a compile-time Constant (`Boolean`, `Byte`, `Short`,
+ * `Char`, `Int`, `Long`, `Float`, `Double`, `String`, `java.lang.Class` or
+ * an instance of a Java enumeration value).
+ */
+ case class LiteralAnnotArg(const: Constant)
+ extends ClassfileAnnotArg with LiteralArgumentApi {
+ def value = const
+ override def toString = const.escapedStringValue
+ }
+ object LiteralAnnotArg extends LiteralArgumentExtractor
+
+ /** Represents an array of classfile annotation arguments */
+ case class ArrayAnnotArg(args: Array[ClassfileAnnotArg])
+ extends ClassfileAnnotArg with ArrayArgumentApi {
+ override def toString = args.mkString("[", ", ", "]")
+ }
+ object ArrayAnnotArg extends ArrayArgumentExtractor
+
+ /** Represents a nested classfile annotation */
+ case class NestedAnnotArg(annInfo: AnnotationInfo)
+ extends ClassfileAnnotArg with NestedArgumentApi {
+ // The nested annotation should not have any Scala annotation arguments
+ assert(annInfo.args.isEmpty, annInfo.args)
+ def annotation = annInfo
+ override def toString = annInfo.toString
+ }
+ object NestedAnnotArg extends NestedArgumentExtractor
+
+ type JavaArgument = ClassfileAnnotArg
+ type LiteralArgument = LiteralAnnotArg
+ val LiteralArgument = LiteralAnnotArg
+ implicit val LiteralArgumentTag = ClassTag[LiteralAnnotArg](classOf[LiteralAnnotArg])
+ type ArrayArgument = ArrayAnnotArg
+ val ArrayArgument = ArrayAnnotArg
+ implicit val ArrayArgumentTag = ClassTag[ArrayAnnotArg](classOf[ArrayAnnotArg])
+ type NestedArgument = NestedAnnotArg
+ val NestedArgument = NestedAnnotArg
+ implicit val NestedArgumentTag = ClassTag[NestedAnnotArg](classOf[NestedAnnotArg])
+
+ /** A specific annotation argument that encodes an array of bytes as an
+ * array of `Long`. The type of the argument declared in the annotation
+ * must be `String`. This specialised class is used to encode Scala
+ * signatures for reasons of efficiency, both in term of class-file size
+ * and in term of compiler performance.
+ */
+ case class ScalaSigBytes(bytes: Array[Byte]) extends ClassfileAnnotArg {
+ override def toString = (bytes map { byte => (byte & 0xff).toHexString }).mkString("[ ", " ", " ]")
+ lazy val encodedBytes = ByteCodecs.encode(bytes) // TODO remove after migration to ASM-based GenJVM complete
+ def isLong: Boolean = (encodedBytes.length > 65535) // TODO remove after migration to ASM-based GenJVM complete
+ lazy val sevenBitsMayBeZero: Array[Byte] = {
+ mapToNextModSevenBits(scala.reflect.internal.pickling.ByteCodecs.encode8to7(bytes))
+ }
+ def fitsInOneString: Boolean = {
+ val numZeros = (sevenBitsMayBeZero count { b => b == 0 })
+ val res = (sevenBitsMayBeZero.length + numZeros) <= 65535
+ assert(this.isLong == !res, "As things stand, can't just swap in `fitsInOneString()` for `isLong()`")
+ res
+ }
+ def sigAnnot: Type =
+ if (this.isLong)
+ definitions.ScalaLongSignatureAnnotation.tpe
+ else
+ definitions.ScalaSignatureAnnotation.tpe
+
+ private def mapToNextModSevenBits(src: Array[Byte]): Array[Byte] = {
+ var i = 0
+ val srclen = src.length
+ while (i < srclen) {
+ val in = src(i)
+ src(i) = (if (in == 0x7f) 0.toByte else (in + 1).toByte)
+ i += 1
+ }
+ src
+ }
+ }
+
+ object AnnotationInfo {
+ def marker(atp: Type): AnnotationInfo =
+ apply(atp, Nil, Nil)
+
+ def lazily(lazyInfo: => AnnotationInfo) =
+ new LazyAnnotationInfo(lazyInfo)
+
+ def apply(atp: Type, args: List[Tree], assocs: List[(Name, ClassfileAnnotArg)]): AnnotationInfo =
+ new CompleteAnnotationInfo(atp, args, assocs)
+
+ def unapply(info: AnnotationInfo): Option[(Type, List[Tree], List[(Name, ClassfileAnnotArg)])] =
+ Some((info.atp, info.args, info.assocs))
+ }
+
+ class CompleteAnnotationInfo(
+ val atp: Type,
+ val args: List[Tree],
+ val assocs: List[(Name, ClassfileAnnotArg)]
+ ) extends AnnotationInfo {
+ // Classfile annot: args empty. Scala annot: assocs empty.
+ assert(args.isEmpty || assocs.isEmpty, atp)
+
+ // necessary for reification, see Reifiers.scala for more info
+ private var orig: Tree = EmptyTree
+ def original = orig
+ def setOriginal(t: Tree): this.type = {
+ orig = t
+ this setPos t.pos
+ this
+ }
+
+ override def toString = completeAnnotationToString(this)
+ }
+
+ private[scala] def completeAnnotationToString(annInfo: AnnotationInfo) = {
+ import annInfo._
+ val s_args = if (!args.isEmpty) args.mkString("(", ", ", ")") else ""
+ val s_assocs = if (!assocs.isEmpty) (assocs map { case (x, y) => x+" = "+y } mkString ("(", ", ", ")")) else ""
+ s"${atp}${s_args}${s_assocs}"
+ }
+
+ /** Symbol annotations parsed in `Namer` (typeCompleter of
+ * definitions) have to be lazy (#1782)
+ */
+ final class LazyAnnotationInfo(lazyInfo: => AnnotationInfo) extends AnnotationInfo {
+ private var forced = false
+ private lazy val forcedInfo = try lazyInfo finally forced = true
+
+ def atp: Type = forcedInfo.atp
+ def args: List[Tree] = forcedInfo.args
+ def assocs: List[(Name, ClassfileAnnotArg)] = forcedInfo.assocs
+ def original: Tree = forcedInfo.original
+ def setOriginal(t: Tree): this.type = { forcedInfo.setOriginal(t); this }
+
+ // We should always be able to print things without forcing them.
+ override def toString = if (forced) forcedInfo.toString else "@<?>"
+
+ override def pos: Position = if (forced) forcedInfo.pos else NoPosition
+
+ override def completeInfo(): Unit = forcedInfo
+ }
+
+ /** Typed information about an annotation. It can be attached to either
+ * a symbol or an annotated type.
+ *
+ * Annotations are written to the classfile as Java annotations
+ * if `atp` conforms to `ClassfileAnnotation` (the classfile parser adds
+ * this interface to any Java annotation class).
+ *
+ * Annotations are pickled (written to scala symtab attribute in the
+ * classfile) if `atp` inherits form `StaticAnnotation`.
+ *
+ * `args` stores arguments to Scala annotations, represented as typed
+ * trees. Note that these trees are not transformed by any phases
+ * following the type-checker.
+ *
+ * `assocs` stores arguments to classfile annotations as name-value pairs.
+ */
+ abstract class AnnotationInfo extends AnnotationApi {
+ def atp: Type
+ def args: List[Tree]
+ def assocs: List[(Name, ClassfileAnnotArg)]
+
+ def tpe = atp
+ def scalaArgs = args
+ def javaArgs = ListMap(assocs: _*)
+
+ // necessary for reification, see Reifiers.scala for more info
+ def original: Tree
+ def setOriginal(t: Tree): this.type
+
+ // see annotationArgRewriter
+ lazy val isTrivial = atp.isTrivial && !hasArgWhich(_.isInstanceOf[This])
+
+ private var rawpos: Position = NoPosition
+ def pos = rawpos
+ def setPos(pos: Position): this.type = { // Syncnote: Setpos inaccessible to reflection, so no sync in rawpos necessary.
+ rawpos = pos
+ this
+ }
+
+ // Forces LazyAnnotationInfo, no op otherwise
+ def completeInfo(): Unit = ()
+
+ /** Annotations annotating annotations are confusing so I drew
+ * an example. Given the following code:
+ *
+ * class A {
+ * @(deprecated @setter) @(inline @getter)
+ * var x: Int = 0
+ * }
+ *
+ * For the setter `x_=` in A, annotations contains one AnnotationInfo =
+ * List(deprecated @setter)
+ * The single AnnotationInfo in that list, i.e. `@(deprecated @setter)`, has metaAnnotations =
+ * List(setter)
+ *
+ * Similarly, the getter `x` in A has an @inline annotation, which has
+ * metaAnnotations = List(getter).
+ */
+ def symbol = atp.typeSymbol
+
+ /** These are meta-annotations attached at the use site; they
+ * only apply to this annotation usage. For instance, in
+ * `@(deprecated @setter @field) val ...`
+ * metaAnnotations = List(setter, field).
+ */
+ def metaAnnotations: List[AnnotationInfo] = atp match {
+ case AnnotatedType(metas, _, _) => metas
+ case _ => Nil
+ }
+
+ /** The default kind of members to which this annotation is attached.
+ * For instance, for scala.deprecated defaultTargets =
+ * List(getter, setter, beanGetter, beanSetter).
+ */
+ def defaultTargets = symbol.annotations map (_.symbol) filter isMetaAnnotation
+ // Test whether the typeSymbol of atp conforms to the given class.
+ def matches(clazz: Symbol) = symbol isNonBottomSubClass clazz
+ // All subtrees of all args are considered.
+ def hasArgWhich(p: Tree => Boolean) = args exists (_ exists p)
+
+ /** Check whether the type or any of the arguments are erroneous */
+ def isErroneous = atp.isErroneous || args.exists(_.isErroneous)
+
+ def isStatic = symbol isNonBottomSubClass StaticAnnotationClass
+
+ /** Check whether any of the arguments mention a symbol */
+ def refsSymbol(sym: Symbol) = hasArgWhich(_.symbol == sym)
+
+ /** Change all ident's with Symbol "from" to instead use symbol "to" */
+ def substIdentSyms(from: Symbol, to: Symbol) =
+ AnnotationInfo(atp, args map (_ substituteSymbols (List(from), List(to))), assocs) setPos pos
+
+ def stringArg(index: Int) = constantAtIndex(index) map (_.stringValue)
+ def intArg(index: Int) = constantAtIndex(index) map (_.intValue)
+ def symbolArg(index: Int) = argAtIndex(index) collect {
+ case Apply(fun, Literal(str) :: Nil) if fun.symbol == definitions.Symbol_apply =>
+ newTermName(str.stringValue)
+ }
+
+ // !!! when annotation arguments are not literals, but any sort of
+ // expression, there is a fair chance they will turn up here not as
+ // Literal(const) but some arbitrary AST.
+ def constantAtIndex(index: Int): Option[Constant] =
+ argAtIndex(index) collect { case Literal(x) => x }
+
+ def argAtIndex(index: Int): Option[Tree] =
+ if (index < args.size) Some(args(index)) else None
+
+ override def hashCode = atp.## + args.## + assocs.##
+ override def equals(other: Any) = other match {
+ case x: AnnotationInfo => (atp == x.atp) && (args == x.args) && (assocs == x.assocs)
+ case _ => false
+ }
+ }
+
+ type Annotation = AnnotationInfo
+ object Annotation extends AnnotationExtractor {
+ def apply(tpe: Type, scalaArgs: List[Tree], javaArgs: ListMap[Name, ClassfileAnnotArg]): Annotation =
+ AnnotationInfo(tpe, scalaArgs, javaArgs.toList)
+ def unapply(annotation: Annotation): Option[(Type, List[Tree], ListMap[Name, ClassfileAnnotArg])] =
+ Some((annotation.tpe, annotation.scalaArgs, annotation.javaArgs))
+ }
+ implicit val AnnotationTag = ClassTag[AnnotationInfo](classOf[AnnotationInfo])
+
+ object UnmappableAnnotation extends CompleteAnnotationInfo(NoType, Nil, Nil)
+
+ /** Extracts symbol of thrown exception from AnnotationInfo.
+ *
+ * Supports both “old-style” `@throws(classOf[Exception])`
+ * as well as “new-stye” `@throws[Exception]("cause")` annotations.
+ */
+ object ThrownException {
+ def unapply(ann: AnnotationInfo): Option[Symbol] =
+ ann match {
+ case AnnotationInfo(tpe, _, _) if tpe.typeSymbol != ThrowsClass =>
+ None
+ // old-style: @throws(classOf[Exception]) (which is throws[T](classOf[Exception]))
+ case AnnotationInfo(_, List(Literal(Constant(tpe: Type))), _) =>
+ Some(tpe.typeSymbol)
+ // new-style: @throws[Exception], @throws[Exception]("cause")
+ case AnnotationInfo(TypeRef(_, _, args), _, _) =>
+ Some(args.head.typeSymbol)
+ }
+ }
+}
diff --git a/src/reflect/scala/reflect/internal/BaseTypeSeqs.scala b/src/reflect/scala/reflect/internal/BaseTypeSeqs.scala
new file mode 100644
index 0000000..3c2b128
--- /dev/null
+++ b/src/reflect/scala/reflect/internal/BaseTypeSeqs.scala
@@ -0,0 +1,239 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2013 LAMP/EPFL
+ * @author Martin Odersky
+ */
+package scala.reflect
+package internal
+
+// todo implement in terms of BitSet
+import scala.collection.{ mutable, immutable }
+import scala.math.max
+import util.Statistics
+
+/** A base type sequence (BaseTypeSeq) is an ordered sequence spanning all the base types
+ * of a type. It characterized by the following two laws:
+ *
+ * (1) Each element of `tp.baseTypeSeq` is a basetype of `tp`
+ * (2) For each basetype `bt1` of `tp` there is an element `bt` in `tp.baseTypeSeq` such that
+ *
+ * bt.typeSymbol = bt1.typeSymbol
+ * bt <: bt1
+ *
+ * (3) The type symbols of different elements are different.
+ *
+ * Elements in the sequence are ordered by Symbol.isLess.
+ * @note base type sequences were called closures up to 2.7.1. The name has been changed
+ * to avoid confusion with function closures.
+ */
+trait BaseTypeSeqs {
+ this: SymbolTable =>
+ import definitions._
+ import BaseTypeSeqsStats._
+
+ protected def newBaseTypeSeq(parents: List[Type], elems: Array[Type]) =
+ new BaseTypeSeq(parents, elems)
+
+ /** Note: constructor is protected to force everyone to use the factory method newBaseTypeSeq instead.
+ * This is necessary because when run from reflection every base type sequence needs to have a
+ * SynchronizedBaseTypeSeq as mixin.
+ */
+ class BaseTypeSeq protected[BaseTypeSeqs] (private[BaseTypeSeqs] val parents: List[Type], private[BaseTypeSeqs] val elems: Array[Type]) {
+ self =>
+ if (Statistics.canEnable) Statistics.incCounter(baseTypeSeqCount)
+ if (Statistics.canEnable) Statistics.incCounter(baseTypeSeqLenTotal, elems.length)
+
+ /** The number of types in the sequence */
+ def length: Int = elems.length
+
+ // #3676 shows why we can't store NoType in elems to mark cycles
+ // (while NoType is in there to indicate a cycle in this BTS, during the execution of
+ // the mergePrefixAndArgs below, the elems get copied without the pending map,
+ // so that NoType's are seen instead of the original type --> spurious compile error)
+ private val pending = new mutable.BitSet(length)
+
+ /** The type at i'th position in this sequence; lazy types are returned evaluated. */
+ def apply(i: Int): Type =
+ if(pending contains i) {
+ pending.clear()
+ throw CyclicInheritance
+ } else
+ elems(i) match {
+ case rtp @ RefinedType(variants, decls) =>
+ // can't assert decls.isEmpty; see t0764
+ //if (!decls.isEmpty) abort("computing closure of "+this+":"+this.isInstanceOf[RefinedType]+"/"+closureCache(j))
+ //Console.println("compute closure of "+this+" => glb("+variants+")")
+ pending += i
+ try {
+ mergePrefixAndArgs(variants, -1, lubDepth(variants)) match {
+ case Some(tp0) =>
+ pending(i) = false
+ elems(i) = tp0
+ tp0
+ case None =>
+ typeError(
+ "no common type instance of base types "+(variants mkString ", and ")+" exists.")
+ }
+ } catch {
+ case CyclicInheritance =>
+ typeError(
+ "computing the common type instance of base types "+(variants mkString ", and ")+" leads to a cycle.")
+ }
+ case tp =>
+ tp
+ }
+
+ def rawElem(i: Int) = elems(i)
+
+ /** The type symbol of the type at i'th position in this sequence;
+ * no evaluation needed.
+ */
+ def typeSymbol(i: Int): Symbol = {
+ elems(i) match {
+ case RefinedType(v :: vs, _) => v.typeSymbol
+ case tp => tp.typeSymbol
+ }
+ }
+
+ /** Return all evaluated types in this sequence as a list */
+ def toList: List[Type] = elems.toList
+
+ def copy(head: Type, offset: Int): BaseTypeSeq = {
+ val arr = new Array[Type](elems.length + offset)
+ scala.compat.Platform.arraycopy(elems, 0, arr, offset, elems.length)
+ arr(0) = head
+ newBaseTypeSeq(parents, arr)
+ }
+
+ /** Compute new base type sequence with `tp` prepended to this sequence */
+ def prepend(tp: Type): BaseTypeSeq = copy(tp, 1)
+
+ /** Compute new base type sequence with `tp` replacing the head of this sequence */
+ def updateHead(tp: Type): BaseTypeSeq = copy(tp, 0)
+
+ /** Compute new base type sequence where every element is mapped
+ * with function `f`. Lazy types are mapped but not evaluated */
+ def map(f: Type => Type): BaseTypeSeq = {
+ // inlined `elems map f` for performance
+ val len = length
+ var arr = new Array[Type](len)
+ var i = 0
+ while (i < len) {
+ arr(i) = f(elems(i))
+ i += 1
+ }
+ newBaseTypeSeq(parents, arr)
+ }
+
+ def lateMap(f: Type => Type): BaseTypeSeq = new MappedBaseTypeSeq(this, f)
+
+ def exists(p: Type => Boolean): Boolean = elems exists p
+
+ lazy val maxDepth = maxDepthOfElems
+
+ protected def maxDepthOfElems: Int = {
+ var d = 0
+ for (i <- 1 until length) d = max(d, typeDepth(elems(i)))
+ d
+ }
+
+ override def toString = elems.mkString("BTS(", ",", ")")
+
+ private def typeError(msg: String): Nothing =
+ throw new TypeError(
+ "the type intersection "+(parents mkString " with ")+" is malformed"+
+ "\n --- because ---\n"+msg)
+ }
+
+ /** A merker object for a base type sequence that's no yet computed.
+ * used to catch inheritance cycles
+ */
+ val undetBaseTypeSeq: BaseTypeSeq = newBaseTypeSeq(List(), Array())
+
+ /** Create a base type sequence consisting of a single type */
+ def baseTypeSingletonSeq(tp: Type): BaseTypeSeq = newBaseTypeSeq(List(), Array(tp))
+
+ /** Create the base type sequence of a compound type wuth given tp.parents */
+ def compoundBaseTypeSeq(tp: Type): BaseTypeSeq = {
+ val tsym = tp.typeSymbol
+ val parents = tp.parents
+// Console.println("computing baseTypeSeq of " + tsym.tpe + " " + parents)//DEBUG
+ val buf = new mutable.ListBuffer[Type]
+ buf += tsym.tpe
+ var btsSize = 1
+ if (parents.nonEmpty) {
+ val nparents = parents.length
+ val pbtss = new Array[BaseTypeSeq](nparents)
+ val index = new Array[Int](nparents)
+ var i = 0
+ for (p <- parents) {
+ pbtss(i) =
+ if (p.baseTypeSeq eq undetBaseTypeSeq) AnyClass.info.baseTypeSeq
+ else p.baseTypeSeq
+ index(i) = 0
+ i += 1
+ }
+ def nextTypeSymbol(i: Int): Symbol = {
+ val j = index(i)
+ val pbts = pbtss(i)
+ if (j < pbts.length) pbts.typeSymbol(j) else AnyClass
+ }
+ def nextRawElem(i: Int): Type = {
+ val j = index(i)
+ val pbts = pbtss(i)
+ if (j < pbts.length) pbts.rawElem(j) else AnyClass.tpe
+ }
+ var minSym: Symbol = NoSymbol
+ while (minSym != AnyClass) {
+ minSym = nextTypeSymbol(0)
+ i = 1
+ while (i < nparents) {
+ val nextSym = nextTypeSymbol(i)
+ if (nextSym isLess minSym)
+ minSym = nextSym
+ i += 1
+ }
+ var minTypes: List[Type] = List()
+ i = 0
+ while (i < nparents) {
+ if (nextTypeSymbol(i) == minSym) {
+ nextRawElem(i) match {
+ case RefinedType(variants, decls) =>
+ for (tp <- variants)
+ if (!(minTypes exists (tp =:= _))) minTypes = tp :: minTypes
+ case tp =>
+ if (!(minTypes exists (tp =:= _))) minTypes = tp :: minTypes
+ }
+ index(i) = index(i) + 1
+ }
+ i += 1
+ }
+ buf += intersectionType(minTypes)
+ btsSize += 1
+ }
+ }
+ val elems = new Array[Type](btsSize)
+ buf.copyToArray(elems, 0)
+// Console.println("computed baseTypeSeq of " + tsym.tpe + " " + parents + ": "+elems.toString)//DEBUG
+ newBaseTypeSeq(parents, elems)
+ }
+
+ class MappedBaseTypeSeq(orig: BaseTypeSeq, f: Type => Type) extends BaseTypeSeq(orig.parents map f, orig.elems) {
+ override def apply(i: Int) = f(orig.apply(i))
+ override def rawElem(i: Int) = f(orig.rawElem(i))
+ override def typeSymbol(i: Int) = orig.typeSymbol(i)
+ override def toList = orig.toList map f
+ override def copy(head: Type, offset: Int) = (orig map f).copy(head, offset)
+ override def map(g: Type => Type) = lateMap(g)
+ override def lateMap(g: Type => Type) = orig.lateMap(x => g(f(x)))
+ override def exists(p: Type => Boolean) = elems exists (x => p(f(x)))
+ override protected def maxDepthOfElems: Int = elems.map(x => typeDepth(f(x))).max
+ override def toString = elems.mkString("MBTS(", ",", ")")
+ }
+
+ val CyclicInheritance = new Throwable
+}
+
+object BaseTypeSeqsStats {
+ val baseTypeSeqCount = Statistics.newCounter("#base type seqs")
+ val baseTypeSeqLenTotal = Statistics.newRelCounter("avg base type seq length", baseTypeSeqCount)
+}
diff --git a/src/reflect/scala/reflect/internal/BuildUtils.scala b/src/reflect/scala/reflect/internal/BuildUtils.scala
new file mode 100644
index 0000000..9f41f03
--- /dev/null
+++ b/src/reflect/scala/reflect/internal/BuildUtils.scala
@@ -0,0 +1,68 @@
+package scala.reflect
+package internal
+
+import Flags._
+
+trait BuildUtils { self: SymbolTable =>
+
+ class BuildImpl extends BuildApi {
+
+ def selectType(owner: Symbol, name: String): TypeSymbol =
+ select(owner, newTypeName(name)).asType
+
+ def selectTerm(owner: Symbol, name: String): TermSymbol = {
+ val result = select(owner, newTermName(name)).asTerm
+ if (result.isOverloaded) result.suchThat(!_.isMethod).asTerm
+ else result
+ }
+
+ private def select(owner: Symbol, name: Name): Symbol = {
+ val result = owner.info decl name
+ if (result ne NoSymbol) result
+ else
+ mirrorThatLoaded(owner).missingHook(owner, name) orElse
+ MissingRequirementError.notFound("%s %s in %s".format(if (name.isTermName) "term" else "type", name, owner.fullName))
+ }
+
+ def selectOverloadedMethod(owner: Symbol, name: String, index: Int): MethodSymbol = {
+ val result = owner.info.decl(newTermName(name)).alternatives(index)
+ if (result ne NoSymbol) result.asMethod
+ else MissingRequirementError.notFound("overloaded method %s #%d in %s".format(name, index, owner.fullName))
+ }
+
+ def newFreeTerm(name: String, value: => Any, flags: Long = 0L, origin: String = null): FreeTermSymbol =
+ newFreeTermSymbol(newTermName(name), value, flags, origin)
+
+ def newFreeType(name: String, flags: Long = 0L, origin: String = null): FreeTypeSymbol =
+ newFreeTypeSymbol(newTypeName(name), flags, origin)
+
+ def newNestedSymbol(owner: Symbol, name: Name, pos: Position, flags: Long, isClass: Boolean): Symbol =
+ owner.newNestedSymbol(name, pos, flags, isClass)
+
+ def setAnnotations[S <: Symbol](sym: S, annots: List[AnnotationInfo]): S =
+ sym.setAnnotations(annots)
+
+ def setTypeSignature[S <: Symbol](sym: S, tpe: Type): S =
+ sym.setTypeSignature(tpe)
+
+ def flagsFromBits(bits: Long): FlagSet = bits
+
+ def emptyValDef: ValDef = self.emptyValDef
+
+ def This(sym: Symbol): Tree = self.This(sym)
+
+ def Select(qualifier: Tree, sym: Symbol): Select = self.Select(qualifier, sym)
+
+ def Ident(sym: Symbol): Ident = self.Ident(sym)
+
+ def TypeTree(tp: Type): TypeTree = self.TypeTree(tp)
+
+ def thisPrefix(sym: Symbol): Type = sym.thisPrefix
+
+ def setType[T <: Tree](tree: T, tpe: Type): T = { tree.setType(tpe); tree }
+
+ def setSymbol[T <: Tree](tree: T, sym: Symbol): T = { tree.setSymbol(sym); tree }
+ }
+
+ val build: BuildApi = new BuildImpl
+}
diff --git a/src/reflect/scala/reflect/internal/CapturedVariables.scala b/src/reflect/scala/reflect/internal/CapturedVariables.scala
new file mode 100644
index 0000000..a3d2a8b
--- /dev/null
+++ b/src/reflect/scala/reflect/internal/CapturedVariables.scala
@@ -0,0 +1,36 @@
+package scala.reflect
+package internal
+
+import Flags._
+
+trait CapturedVariables { self: SymbolTable =>
+
+ import definitions._
+
+ /** Mark a variable as captured; i.e. force boxing in a *Ref type.
+ */
+ def captureVariable(vble: Symbol): Unit = vble setFlag CAPTURED
+
+ /** Mark given identifier as a reference to a captured variable itself
+ * suppressing dereferencing with the `elem` field.
+ */
+ def referenceCapturedVariable(vble: Symbol): Tree = ReferenceToBoxed(Ident(vble))
+
+ /** Convert type of a captured variable to *Ref type.
+ */
+ def capturedVariableType(vble: Symbol): Type =
+ capturedVariableType(vble, NoType, false)
+
+ /** Convert type of a captured variable to *Ref type.
+ */
+ def capturedVariableType(vble: Symbol, tpe: Type = NoType, erasedTypes: Boolean = false): Type = {
+ val tpe1 = if (tpe == NoType) vble.tpe else tpe
+ val symClass = tpe1.typeSymbol
+ def refType(valueRef: Map[Symbol, Symbol], objectRefClass: Symbol) =
+ if (isPrimitiveValueClass(symClass) && symClass != UnitClass) valueRef(symClass).tpe
+ else if (erasedTypes) objectRefClass.tpe
+ else appliedType(objectRefClass, tpe1)
+ if (vble.hasAnnotation(VolatileAttr)) refType(volatileRefClass, VolatileObjectRefClass)
+ else refType(refClass, ObjectRefClass)
+ }
+}
diff --git a/src/reflect/scala/reflect/internal/Chars.scala b/src/reflect/scala/reflect/internal/Chars.scala
new file mode 100644
index 0000000..2d07092
--- /dev/null
+++ b/src/reflect/scala/reflect/internal/Chars.scala
@@ -0,0 +1,98 @@
+/* NSC -- new Scala compiler
+ * Copyright 2006-2013 LAMP/EPFL
+ * @author Martin Odersky
+ */
+package scala.reflect
+package internal
+
+import scala.annotation.{ tailrec, switch }
+import java.lang.{ Character => JCharacter }
+import scala.language.postfixOps
+
+/** Contains constants and classifier methods for characters */
+trait Chars {
+ // Be very careful touching these.
+ // Apparently trivial changes to the way you write these constants
+ // will cause Scanners.scala to go from a nice efficient switch to
+ // a ghastly nested if statement which will bring the type checker
+ // to its knees. See ticket #1456
+ // Martin: (this should be verified now that the pattern rules have been redesigned).
+ final val LF = '\u000A'
+ final val FF = '\u000C'
+ final val CR = '\u000D'
+ final val SU = '\u001A'
+
+ /** Convert a character digit to an Int according to given base,
+ * -1 if no success
+ */
+ def digit2int(ch: Char, base: Int): Int = {
+ val num = (
+ if (ch <= '9') ch - '0'
+ else if ('a' <= ch && ch <= 'z') ch - 'a' + 10
+ else if ('A' <= ch && ch <= 'Z') ch - 'A' + 10
+ else -1
+ )
+ if (0 <= num && num < base) num else -1
+ }
+ /** Buffer for creating '\ u XXXX' strings. */
+ private[this] val char2uescapeArray = Array[Char]('\\', 'u', 0, 0, 0, 0)
+
+ /** Convert a character to a backslash-u escape */
+ def char2uescape(c: Char): String = {
+ @inline def hexChar(ch: Int): Char =
+ ( if (ch < 10) '0' else 'A' - 10 ) + ch toChar
+
+ char2uescapeArray(2) = hexChar((c >> 12) )
+ char2uescapeArray(3) = hexChar((c >> 8) % 16)
+ char2uescapeArray(4) = hexChar((c >> 4) % 16)
+ char2uescapeArray(5) = hexChar((c ) % 16)
+
+ new String(char2uescapeArray)
+ }
+
+ /** Is character a line break? */
+ def isLineBreakChar(c: Char) = (c: @switch) match {
+ case LF|FF|CR|SU => true
+ case _ => false
+ }
+
+ /** Is character a whitespace character (but not a new line)? */
+ def isWhitespace(c: Char) =
+ c == ' ' || c == '\t' || c == CR
+
+ /** Can character form part of a doc comment variable $xxx? */
+ def isVarPart(c: Char) =
+ '0' <= c && c <= '9' || 'A' <= c && c <= 'Z' || 'a' <= c && c <= 'z'
+
+ /** Can character start an alphanumeric Scala identifier? */
+ def isIdentifierStart(c: Char): Boolean =
+ (c == '_') || (c == '$') || Character.isUnicodeIdentifierStart(c)
+
+ /** Can character form part of an alphanumeric Scala identifier? */
+ def isIdentifierPart(c: Char) =
+ (c == '$') || Character.isUnicodeIdentifierPart(c)
+
+ /** Is character a math or other symbol in Unicode? */
+ def isSpecial(c: Char) = {
+ val chtp = Character.getType(c)
+ chtp == Character.MATH_SYMBOL.toInt || chtp == Character.OTHER_SYMBOL.toInt
+ }
+
+ private final val otherLetters = Set[Char]('\u0024', '\u005F') // '$' and '_'
+ private final val letterGroups = {
+ import JCharacter._
+ Set[Byte](LOWERCASE_LETTER, UPPERCASE_LETTER, OTHER_LETTER, TITLECASE_LETTER, LETTER_NUMBER)
+ }
+ def isScalaLetter(ch: Char) = letterGroups(JCharacter.getType(ch).toByte) || otherLetters(ch)
+
+ /** Can character form part of a Scala operator name? */
+ def isOperatorPart(c : Char) : Boolean = (c: @switch) match {
+ case '~' | '!' | '@' | '#' | '%' |
+ '^' | '*' | '+' | '-' | '<' |
+ '>' | '?' | ':' | '=' | '&' |
+ '|' | '/' | '\\' => true
+ case c => isSpecial(c)
+ }
+}
+
+object Chars extends Chars { }
diff --git a/src/reflect/scala/reflect/internal/ClassfileConstants.scala b/src/reflect/scala/reflect/internal/ClassfileConstants.scala
new file mode 100644
index 0000000..eb70ff3
--- /dev/null
+++ b/src/reflect/scala/reflect/internal/ClassfileConstants.scala
@@ -0,0 +1,385 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2013 LAMP/EPFL
+ * @author Martin Odersky
+ */
+
+package scala.reflect
+package internal
+
+import scala.annotation.switch
+
+object ClassfileConstants {
+
+ final val JAVA_MAGIC = 0xCAFEBABE
+ final val JAVA_MAJOR_VERSION = 45
+ final val JAVA_MINOR_VERSION = 3
+
+ /** (see http://java.sun.com/docs/books/jvms/second_edition/jvms-clarify.html)
+ *
+ * If the `ACC_INTERFACE` flag is set, the `ACC_ABSTRACT` flag must also
+ * be set (ch. 2.13.1).
+ *
+ * A class file cannot have both its `ACC_FINAL` and `ACC_ABSTRACT` flags
+ * set (ch. 2.8.2).
+ *
+ * A field may have at most one of its `ACC_PRIVATE`, `ACC_PROTECTED`,
+ * `ACC_PUBLIC` flags set (ch. 2.7.4).
+ *
+ * A field may not have both its `ACC_FINAL` and `ACC_VOLATILE` flags set
+ * (ch. 2.9.1).
+ *
+ * If a method has its `ACC_ABSTRACT` flag set it must not have any of its
+ * `ACC_FINAL`, `ACC_NATIVE`, `ACC_PRIVATE`, `ACC_STATIC`, `ACC_STRICT`,
+ * or `ACC_SYNCHRONIZED` flags set (ch. 2.13.3.2).
+ *
+ * All interface methods must have their `ACC_ABSTRACT` and
+ * `ACC_PUBLIC` flags set.
+ *
+ * Note for future reference: see this thread on ACC_SUPER and
+ * how its enforcement differs on the android vm.
+ * https://groups.google.com/forum/?hl=en#!topic/jvm-languages/jVhzvq8-ZIk
+ *
+ */ // Class Field Method
+ final val JAVA_ACC_PUBLIC = 0x0001 // X X X
+ final val JAVA_ACC_PRIVATE = 0x0002 // X X
+ final val JAVA_ACC_PROTECTED = 0x0004 // X X
+ final val JAVA_ACC_STATIC = 0x0008 // X X
+ final val JAVA_ACC_FINAL = 0x0010 // X X X
+ final val JAVA_ACC_SUPER = 0x0020 // X
+ final val JAVA_ACC_SYNCHRONIZED = 0x0020 // X
+ final val JAVA_ACC_VOLATILE = 0x0040 // X
+ final val JAVA_ACC_BRIDGE = 0x0040 // X
+ final val JAVA_ACC_TRANSIENT = 0x0080 // X
+ final val JAVA_ACC_VARARGS = 0x0080 // X
+ final val JAVA_ACC_NATIVE = 0x0100 // X
+ final val JAVA_ACC_INTERFACE = 0x0200 // X
+ final val JAVA_ACC_ABSTRACT = 0x0400 // X X
+ final val JAVA_ACC_STRICT = 0x0800 // X
+ final val JAVA_ACC_SYNTHETIC = 0x1000 // X X X
+ final val JAVA_ACC_ANNOTATION = 0x2000 // X
+ final val JAVA_ACC_ENUM = 0x4000 // X X
+
+ // tags describing the type of a literal in the constant pool
+ final val CONSTANT_UTF8 = 1
+ final val CONSTANT_UNICODE = 2
+ final val CONSTANT_INTEGER = 3
+ final val CONSTANT_FLOAT = 4
+ final val CONSTANT_LONG = 5
+ final val CONSTANT_DOUBLE = 6
+ final val CONSTANT_CLASS = 7
+ final val CONSTANT_STRING = 8
+ final val CONSTANT_FIELDREF = 9
+ final val CONSTANT_METHODREF = 10
+ final val CONSTANT_INTFMETHODREF = 11
+ final val CONSTANT_NAMEANDTYPE = 12
+ final val CONSTANT_METHODHANDLE = 15
+ final val CONSTANT_METHODTYPE = 16
+ final val CONSTANT_INVOKEDYNAMIC = 18
+
+ // tags describing the type of a literal in attribute values
+ final val BYTE_TAG = 'B'
+ final val CHAR_TAG = 'C'
+ final val DOUBLE_TAG = 'D'
+ final val FLOAT_TAG = 'F'
+ final val INT_TAG = 'I'
+ final val LONG_TAG = 'J'
+ final val SHORT_TAG = 'S'
+ final val BOOL_TAG = 'Z'
+ final val STRING_TAG = 's'
+ final val ENUM_TAG = 'e'
+ final val CLASS_TAG = 'c'
+ final val ARRAY_TAG = '['
+ final val VOID_TAG = 'V'
+ final val TVAR_TAG = 'T'
+ final val OBJECT_TAG = 'L'
+ final val ANNOTATION_TAG = '@'
+ final val SCALA_NOTHING = "scala.runtime.Nothing$"
+ final val SCALA_NULL = "scala.runtime.Null$"
+
+
+ // tags describing the type of newarray
+ final val T_BOOLEAN = 4
+ final val T_CHAR = 5
+ final val T_FLOAT = 6
+ final val T_DOUBLE = 7
+ final val T_BYTE = 8
+ final val T_SHORT = 9
+ final val T_INT = 10
+ final val T_LONG = 11
+
+ // JVM mnemonics
+ final val nop = 0x00
+ final val aconst_null = 0x01
+ final val iconst_m1 = 0x02
+
+ final val iconst_0 = 0x03
+ final val iconst_1 = 0x04
+ final val iconst_2 = 0x05
+ final val iconst_3 = 0x06
+ final val iconst_4 = 0x07
+ final val iconst_5 = 0x08
+
+ final val lconst_0 = 0x09
+ final val lconst_1 = 0x0a
+ final val fconst_0 = 0x0b
+ final val fconst_1 = 0x0c
+ final val fconst_2 = 0x0d
+ final val dconst_0 = 0x0e
+ final val dconst_1 = 0x0f
+
+ final val bipush = 0x10
+ final val sipush = 0x11
+ final val ldc = 0x12
+ final val ldc_w = 0x13
+ final val ldc2_w = 0x14
+
+ final val iload = 0x15
+ final val lload = 0x16
+ final val fload = 0x17
+ final val dload = 0x18
+ final val aload = 0x19
+
+ final val iload_0 = 0x1a
+ final val iload_1 = 0x1b
+ final val iload_2 = 0x1c
+ final val iload_3 = 0x1d
+ final val lload_0 = 0x1e
+ final val lload_1 = 0x1f
+ final val lload_2 = 0x20
+ final val lload_3 = 0x21
+ final val fload_0 = 0x22
+ final val fload_1 = 0x23
+ final val fload_2 = 0x24
+ final val fload_3 = 0x25
+ final val dload_0 = 0x26
+ final val dload_1 = 0x27
+ final val dload_2 = 0x28
+ final val dload_3 = 0x29
+ final val aload_0 = 0x2a
+ final val aload_1 = 0x2b
+ final val aload_2 = 0x2c
+ final val aload_3 = 0x2d
+ final val iaload = 0x2e
+ final val laload = 0x2f
+ final val faload = 0x30
+ final val daload = 0x31
+ final val aaload = 0x32
+ final val baload = 0x33
+ final val caload = 0x34
+ final val saload = 0x35
+
+ final val istore = 0x36
+ final val lstore = 0x37
+ final val fstore = 0x38
+ final val dstore = 0x39
+ final val astore = 0x3a
+ final val istore_0 = 0x3b
+ final val istore_1 = 0x3c
+ final val istore_2 = 0x3d
+ final val istore_3 = 0x3e
+ final val lstore_0 = 0x3f
+ final val lstore_1 = 0x40
+ final val lstore_2 = 0x41
+ final val lstore_3 = 0x42
+ final val fstore_0 = 0x43
+ final val fstore_1 = 0x44
+ final val fstore_2 = 0x45
+ final val fstore_3 = 0x46
+ final val dstore_0 = 0x47
+ final val dstore_1 = 0x48
+ final val dstore_2 = 0x49
+ final val dstore_3 = 0x4a
+ final val astore_0 = 0x4b
+ final val astore_1 = 0x4c
+ final val astore_2 = 0x4d
+ final val astore_3 = 0x4e
+ final val iastore = 0x4f
+ final val lastore = 0x50
+ final val fastore = 0x51
+ final val dastore = 0x52
+ final val aastore = 0x53
+ final val bastore = 0x54
+ final val castore = 0x55
+ final val sastore = 0x56
+
+ final val pop = 0x57
+ final val pop2 = 0x58
+ final val dup = 0x59
+ final val dup_x1 = 0x5a
+ final val dup_x2 = 0x5b
+ final val dup2 = 0x5c
+ final val dup2_x1 = 0x5d
+ final val dup2_x2 = 0x5e
+ final val swap = 0x5f
+
+ final val iadd = 0x60
+ final val ladd = 0x61
+ final val fadd = 0x62
+ final val dadd = 0x63
+ final val isub = 0x64
+ final val lsub = 0x65
+ final val fsub = 0x66
+ final val dsub = 0x67
+ final val imul = 0x68
+ final val lmul = 0x69
+ final val fmul = 0x6a
+ final val dmul = 0x6b
+ final val idiv = 0x6c
+ final val ldiv = 0x6d
+ final val fdiv = 0x6e
+ final val ddiv = 0x6f
+ final val irem = 0x70
+ final val lrem = 0x71
+ final val frem = 0x72
+ final val drem = 0x73
+
+ final val ineg = 0x74
+ final val lneg = 0x75
+ final val fneg = 0x76
+ final val dneg = 0x77
+
+ final val ishl = 0x78
+ final val lshl = 0x79
+ final val ishr = 0x7a
+ final val lshr = 0x7b
+ final val iushr = 0x7c
+ final val lushr = 0x7d
+ final val iand = 0x7e
+ final val land = 0x7f
+ final val ior = 0x80
+ final val lor = 0x81
+ final val ixor = 0x82
+ final val lxor = 0x83
+ final val iinc = 0x84
+
+ final val i2l = 0x85
+ final val i2f = 0x86
+ final val i2d = 0x87
+ final val l2i = 0x88
+ final val l2f = 0x89
+ final val l2d = 0x8a
+ final val f2i = 0x8b
+ final val f2l = 0x8c
+ final val f2d = 0x8d
+ final val d2i = 0x8e
+ final val d2l = 0x8f
+ final val d2f = 0x90
+ final val i2b = 0x91
+ final val i2c = 0x92
+ final val i2s = 0x93
+
+ final val lcmp = 0x94
+ final val fcmpl = 0x95
+ final val fcmpg = 0x96
+ final val dcmpl = 0x97
+ final val dcmpg = 0x98
+
+ final val ifeq = 0x99
+ final val ifne = 0x9a
+ final val iflt = 0x9b
+ final val ifge = 0x9c
+ final val ifgt = 0x9d
+ final val ifle = 0x9e
+ final val if_icmpeq = 0x9f
+ final val if_icmpne = 0xa0
+ final val if_icmplt = 0xa1
+ final val if_icmpge = 0xa2
+ final val if_icmpgt = 0xa3
+ final val if_icmple = 0xa4
+ final val if_acmpeq = 0xa5
+ final val if_acmpne = 0xa6
+ final val goto = 0xa7
+ final val jsr = 0xa8
+ final val ret = 0xa9
+ final val tableswitch = 0xaa
+ final val lookupswitch = 0xab
+ final val ireturn = 0xac
+ final val lreturn = 0xad
+ final val freturn = 0xae
+ final val dreturn = 0xaf
+ final val areturn = 0xb0
+ final val return_ = 0xb1
+
+ final val getstatic = 0xb2
+ final val putstatic = 0xb3
+ final val getfield = 0xb4
+ final val putfield = 0xb5
+
+ final val invokevirtual = 0xb6
+ final val invokespecial = 0xb7
+ final val invokestatic = 0xb8
+ final val invokeinterface = 0xb9
+ final val invokedynamic = 0xba
+
+ final val new_ = 0xbb
+ final val newarray = 0xbc
+ final val anewarray = 0xbd
+ final val arraylength = 0xbe
+ final val athrow = 0xbf
+ final val checkcast = 0xc0
+ final val instanceof = 0xc1
+ final val monitorenter = 0xc2
+ final val monitorexit = 0xc3
+ final val wide = 0xc4
+ final val multianewarray = 0xc5
+ final val ifnull = 0xc6
+ final val ifnonnull = 0xc7
+ final val goto_w = 0xc8
+ final val jsr_w = 0xc9
+
+ // reserved opcodes
+ final val breakpoint = 0xca
+ final val impdep1 = 0xfe
+ final val impdep2 = 0xff
+
+ abstract class FlagTranslation {
+ import Flags._
+
+ private def isAnnotation(flags: Int): Boolean = (flags & JAVA_ACC_ANNOTATION) != 0
+ private def translateFlag(jflag: Int, isAnnotation: Boolean, isClass: Boolean): Long = (jflag: @switch) match {
+ case JAVA_ACC_PRIVATE => PRIVATE
+ case JAVA_ACC_PROTECTED => PROTECTED
+ case JAVA_ACC_FINAL => FINAL
+ case JAVA_ACC_SYNTHETIC => SYNTHETIC
+ case JAVA_ACC_STATIC => STATIC
+ case JAVA_ACC_ABSTRACT => if (isAnnotation) 0L else if (isClass) ABSTRACT else DEFERRED
+ case JAVA_ACC_INTERFACE => if (isAnnotation) 0L else TRAIT | INTERFACE | ABSTRACT
+ case _ => 0L
+ }
+ private def translateFlags(jflags: Int, baseFlags: Long, isAnnotation: Boolean, isClass: Boolean): Long = {
+ def translateFlag0(jflags: Int): Long = translateFlag(jflags, isAnnotation, isClass)
+ var res: Long = JAVA | baseFlags
+ /** fast, elegant, maintainable, pick any two... */
+ res |= translateFlag0(jflags & JAVA_ACC_PRIVATE)
+ res |= translateFlag0(jflags & JAVA_ACC_PROTECTED)
+ res |= translateFlag0(jflags & JAVA_ACC_FINAL)
+ res |= translateFlag0(jflags & JAVA_ACC_SYNTHETIC)
+ res |= translateFlag0(jflags & JAVA_ACC_STATIC)
+ res |= translateFlag0(jflags & JAVA_ACC_ABSTRACT)
+ res |= translateFlag0(jflags & JAVA_ACC_INTERFACE)
+ res
+ }
+
+ def classFlags(jflags: Int): Long = {
+ translateFlags(jflags, 0, isAnnotation(jflags), isClass = true)
+ }
+ def fieldFlags(jflags: Int): Long = {
+ translateFlags(jflags, if ((jflags & JAVA_ACC_FINAL) == 0) MUTABLE else 0 , isAnnotation(jflags), isClass = false)
+ }
+ def methodFlags(jflags: Int): Long = {
+ translateFlags(jflags, if ((jflags & JAVA_ACC_BRIDGE) != 0) BRIDGE else 0, isAnnotation(jflags), isClass = false)
+ }
+ }
+ object FlagTranslation extends FlagTranslation { }
+
+ def toScalaMethodFlags(flags: Int): Long = FlagTranslation methodFlags flags
+ def toScalaClassFlags(flags: Int): Long = FlagTranslation classFlags flags
+ def toScalaFieldFlags(flags: Int): Long = FlagTranslation fieldFlags flags
+
+ @deprecated("Use another method in this object", "2.10.0")
+ def toScalaFlags(flags: Int, isClass: Boolean = false, isField: Boolean = false): Long = (
+ if (isClass) toScalaClassFlags(flags)
+ else if (isField) toScalaFieldFlags(flags)
+ else toScalaMethodFlags(flags)
+ )
+}
diff --git a/src/reflect/scala/reflect/internal/Constants.scala b/src/reflect/scala/reflect/internal/Constants.scala
new file mode 100644
index 0000000..28bc3e1
--- /dev/null
+++ b/src/reflect/scala/reflect/internal/Constants.scala
@@ -0,0 +1,266 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2013 LAMP/EPFL
+ * @author Martin Odersky
+ */
+
+package scala.reflect
+package internal
+
+import java.lang.Integer.toOctalString
+import scala.annotation.switch
+
+trait Constants extends api.Constants {
+ self: SymbolTable =>
+
+ import definitions._
+
+ final val NoTag = 0
+ final val UnitTag = 1
+ final val BooleanTag = 2
+ final val ByteTag = 3
+ final val ShortTag = 4
+ final val CharTag = 5
+ final val IntTag = 6
+ final val LongTag = 7
+ final val FloatTag = 8
+ final val DoubleTag = 9
+ final val StringTag = 10
+ final val NullTag = 11
+ final val ClazzTag = 12
+ // For supporting java enumerations inside java annotations (see ClassfileParser)
+ final val EnumTag = 13
+
+ case class Constant(value: Any) extends ConstantApi {
+ import java.lang.Double.doubleToRawLongBits
+ import java.lang.Float.floatToRawIntBits
+
+ val tag: Int = value match {
+ case null => NullTag
+ case x: Unit => UnitTag
+ case x: Boolean => BooleanTag
+ case x: Byte => ByteTag
+ case x: Short => ShortTag
+ case x: Int => IntTag
+ case x: Long => LongTag
+ case x: Float => FloatTag
+ case x: Double => DoubleTag
+ case x: String => StringTag
+ case x: Char => CharTag
+ case x: Type => ClazzTag
+ case x: Symbol => EnumTag
+ case _ => throw new Error("bad constant value: " + value + " of class " + value.getClass)
+ }
+
+ def isByteRange: Boolean = isIntRange && Byte.MinValue <= intValue && intValue <= Byte.MaxValue
+ def isShortRange: Boolean = isIntRange && Short.MinValue <= intValue && intValue <= Short.MaxValue
+ def isCharRange: Boolean = isIntRange && Char.MinValue <= intValue && intValue <= Char.MaxValue
+ def isIntRange: Boolean = ByteTag <= tag && tag <= IntTag
+ def isLongRange: Boolean = ByteTag <= tag && tag <= LongTag
+ def isFloatRange: Boolean = ByteTag <= tag && tag <= FloatTag
+ def isNumeric: Boolean = ByteTag <= tag && tag <= DoubleTag
+ def isNonUnitAnyVal = BooleanTag <= tag && tag <= DoubleTag
+ def isAnyVal = UnitTag <= tag && tag <= DoubleTag
+
+ def tpe: Type = tag match {
+ case UnitTag => UnitClass.tpe
+ case BooleanTag => BooleanClass.tpe
+ case ByteTag => ByteClass.tpe
+ case ShortTag => ShortClass.tpe
+ case CharTag => CharClass.tpe
+ case IntTag => IntClass.tpe
+ case LongTag => LongClass.tpe
+ case FloatTag => FloatClass.tpe
+ case DoubleTag => DoubleClass.tpe
+ case StringTag => StringClass.tpe
+ case NullTag => NullClass.tpe
+ case ClazzTag => ClassType(typeValue)
+ case EnumTag => EnumType(symbolValue)
+ }
+
+ /** We need the equals method to take account of tags as well as values.
+ */
+ // !!! In what circumstance could `equalHashValue == that.equalHashValue && tag != that.tag` be true?
+ override def equals(other: Any): Boolean = other match {
+ case that: Constant =>
+ this.tag == that.tag && equalHashValue == that.equalHashValue
+ case _ => false
+ }
+
+ def isNaN = value match {
+ case f: Float => f.isNaN
+ case d: Double => d.isNaN
+ case _ => false
+ }
+
+ def booleanValue: Boolean =
+ if (tag == BooleanTag) value.asInstanceOf[Boolean]
+ else throw new Error("value " + value + " is not a boolean");
+
+ def byteValue: Byte = tag match {
+ case ByteTag => value.asInstanceOf[Byte]
+ case ShortTag => value.asInstanceOf[Short].toByte
+ case CharTag => value.asInstanceOf[Char].toByte
+ case IntTag => value.asInstanceOf[Int].toByte
+ case LongTag => value.asInstanceOf[Long].toByte
+ case FloatTag => value.asInstanceOf[Float].toByte
+ case DoubleTag => value.asInstanceOf[Double].toByte
+ case _ => throw new Error("value " + value + " is not a Byte")
+ }
+
+ def shortValue: Short = tag match {
+ case ByteTag => value.asInstanceOf[Byte].toShort
+ case ShortTag => value.asInstanceOf[Short]
+ case CharTag => value.asInstanceOf[Char].toShort
+ case IntTag => value.asInstanceOf[Int].toShort
+ case LongTag => value.asInstanceOf[Long].toShort
+ case FloatTag => value.asInstanceOf[Float].toShort
+ case DoubleTag => value.asInstanceOf[Double].toShort
+ case _ => throw new Error("value " + value + " is not a Short")
+ }
+
+ def charValue: Char = tag match {
+ case ByteTag => value.asInstanceOf[Byte].toChar
+ case ShortTag => value.asInstanceOf[Short].toChar
+ case CharTag => value.asInstanceOf[Char]
+ case IntTag => value.asInstanceOf[Int].toChar
+ case LongTag => value.asInstanceOf[Long].toChar
+ case FloatTag => value.asInstanceOf[Float].toChar
+ case DoubleTag => value.asInstanceOf[Double].toChar
+ case _ => throw new Error("value " + value + " is not a Char")
+ }
+
+ def intValue: Int = tag match {
+ case ByteTag => value.asInstanceOf[Byte].toInt
+ case ShortTag => value.asInstanceOf[Short].toInt
+ case CharTag => value.asInstanceOf[Char].toInt
+ case IntTag => value.asInstanceOf[Int]
+ case LongTag => value.asInstanceOf[Long].toInt
+ case FloatTag => value.asInstanceOf[Float].toInt
+ case DoubleTag => value.asInstanceOf[Double].toInt
+ case _ => throw new Error("value " + value + " is not an Int")
+ }
+
+ def longValue: Long = tag match {
+ case ByteTag => value.asInstanceOf[Byte].toLong
+ case ShortTag => value.asInstanceOf[Short].toLong
+ case CharTag => value.asInstanceOf[Char].toLong
+ case IntTag => value.asInstanceOf[Int].toLong
+ case LongTag => value.asInstanceOf[Long]
+ case FloatTag => value.asInstanceOf[Float].toLong
+ case DoubleTag => value.asInstanceOf[Double].toLong
+ case _ => throw new Error("value " + value + " is not a Long")
+ }
+
+ def floatValue: Float = tag match {
+ case ByteTag => value.asInstanceOf[Byte].toFloat
+ case ShortTag => value.asInstanceOf[Short].toFloat
+ case CharTag => value.asInstanceOf[Char].toFloat
+ case IntTag => value.asInstanceOf[Int].toFloat
+ case LongTag => value.asInstanceOf[Long].toFloat
+ case FloatTag => value.asInstanceOf[Float]
+ case DoubleTag => value.asInstanceOf[Double].toFloat
+ case _ => throw new Error("value " + value + " is not a Float")
+ }
+
+ def doubleValue: Double = tag match {
+ case ByteTag => value.asInstanceOf[Byte].toDouble
+ case ShortTag => value.asInstanceOf[Short].toDouble
+ case CharTag => value.asInstanceOf[Char].toDouble
+ case IntTag => value.asInstanceOf[Int].toDouble
+ case LongTag => value.asInstanceOf[Long].toDouble
+ case FloatTag => value.asInstanceOf[Float].toDouble
+ case DoubleTag => value.asInstanceOf[Double]
+ case _ => throw new Error("value " + value + " is not a Double")
+ }
+
+ /** Convert constant value to conform to given type.
+ */
+ def convertTo(pt: Type): Constant = {
+ val target = pt.typeSymbol
+ if (target == tpe.typeSymbol)
+ this
+ else if (target == ByteClass && isByteRange)
+ Constant(byteValue)
+ else if (target == ShortClass && isShortRange)
+ Constant(shortValue)
+ else if (target == CharClass && isCharRange)
+ Constant(charValue)
+ else if (target == IntClass && isIntRange)
+ Constant(intValue)
+ else if (target == LongClass && isLongRange)
+ Constant(longValue)
+ else if (target == FloatClass && isFloatRange)
+ Constant(floatValue)
+ else if (target == DoubleClass && isNumeric)
+ Constant(doubleValue)
+ else
+ null
+ }
+
+ def stringValue: String =
+ if (value == null) "null"
+ else if (tag == ClazzTag) signature(typeValue)
+ else value.toString()
+
+ @switch def escapedChar(ch: Char): String = ch match {
+ case '\b' => "\\b"
+ case '\t' => "\\t"
+ case '\n' => "\\n"
+ case '\f' => "\\f"
+ case '\r' => "\\r"
+ case '"' => "\\\""
+ case '\'' => "\\\'"
+ case '\\' => "\\\\"
+ case _ => if (ch.isControl) "\\0" + toOctalString(ch) else String.valueOf(ch)
+ }
+
+ def escapedStringValue: String = {
+ def escape(text: String): String = text flatMap escapedChar
+ tag match {
+ case NullTag => "null"
+ case StringTag => "\"" + escape(stringValue) + "\""
+ case ClazzTag =>
+ def show(tpe: Type) = "classOf[" + signature(tpe) + "]"
+ typeValue match {
+ case ErasedValueType(orig) => show(orig)
+ case _ => show(typeValue)
+ }
+ case CharTag => "'" + escapedChar(charValue) + "'"
+ case LongTag => longValue.toString() + "L"
+ case EnumTag => symbolValue.name.toString()
+ case _ => String.valueOf(value)
+ }
+ }
+ def typeValue: Type = value.asInstanceOf[Type]
+ def symbolValue: Symbol = value.asInstanceOf[Symbol]
+
+ /**
+ * Consider two `NaN`s to be identical, despite non-equality
+ * Consider -0d to be distinct from 0d, despite equality
+ *
+ * We use the raw versions (i.e. `floatToRawIntBits` rather than `floatToIntBits`)
+ * to avoid treating different encodings of `NaN` as the same constant.
+ * You probably can't express different `NaN` varieties as compile time
+ * constants in regular Scala code, but it is conceivable that you could
+ * conjure them with a macro.
+ */
+ private def equalHashValue: Any = value match {
+ case f: Float => floatToRawIntBits(f)
+ case d: Double => doubleToRawLongBits(d)
+ case v => v
+ }
+
+ override def hashCode: Int = {
+ import scala.util.hashing.MurmurHash3._
+ val seed = 17
+ var h = seed
+ h = mix(h, tag.##) // include tag in the hash, otherwise 0, 0d, 0L, 0f collide.
+ h = mix(h, equalHashValue.##)
+ finalizeHash(h, length = 2)
+ }
+ }
+
+ object Constant extends ConstantExtractor
+
+ implicit val ConstantTag = ClassTag[Constant](classOf[Constant])
+}
diff --git a/src/reflect/scala/reflect/internal/Definitions.scala b/src/reflect/scala/reflect/internal/Definitions.scala
new file mode 100644
index 0000000..09d7af8
--- /dev/null
+++ b/src/reflect/scala/reflect/internal/Definitions.scala
@@ -0,0 +1,1292 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2013 LAMP/EPFL
+ * @author Martin Odersky
+ */
+
+package scala.reflect
+package internal
+
+import scala.annotation.{ switch, meta }
+import scala.collection.{ mutable, immutable }
+import Flags._
+import PartialFunction._
+import scala.reflect.api.{Universe => ApiUniverse}
+
+trait Definitions extends api.StandardDefinitions {
+ self: SymbolTable =>
+
+ import rootMirror.{getModule, getClassByName, getRequiredClass, getRequiredModule, getRequiredPackage, getClassIfDefined, getModuleIfDefined, getPackageObject, getPackageObjectIfDefined, requiredClass, requiredModule}
+
+ object definitions extends DefinitionsClass
+
+ /** Since both the value parameter types and the result type may
+ * require access to the type parameter symbols, we model polymorphic
+ * creation as a function from those symbols to (formal types, result type).
+ * The Option is to distinguish between nullary methods and empty-param-list
+ * methods.
+ */
+ private type PolyMethodCreator = List[Symbol] => (Option[List[Type]], Type)
+
+ private def enterNewClass(owner: Symbol, name: TypeName, parents: List[Type], flags: Long = 0L): ClassSymbol = {
+ val clazz = owner.newClassSymbol(name, NoPosition, flags)
+ clazz setInfoAndEnter ClassInfoType(parents, newScope, clazz)
+ }
+ private def newMethod(owner: Symbol, name: TermName, formals: List[Type], restpe: Type, flags: Long = 0L): MethodSymbol = {
+ val msym = owner.newMethod(name.encode, NoPosition, flags)
+ val params = msym.newSyntheticValueParams(formals)
+ msym setInfo MethodType(params, restpe)
+ }
+ private def enterNewMethod(owner: Symbol, name: TermName, formals: List[Type], restpe: Type, flags: Long = 0L): MethodSymbol =
+ owner.info.decls enter newMethod(owner, name, formals, restpe, flags)
+
+ // the scala value classes
+ trait ValueClassDefinitions {
+ self: DefinitionsClass =>
+
+ import ClassfileConstants._
+
+ private val nameToWeight = Map[Name, Int](
+ tpnme.Byte -> 2,
+ tpnme.Char -> 3,
+ tpnme.Short -> 4,
+ tpnme.Int -> 12,
+ tpnme.Long -> 24,
+ tpnme.Float -> 48,
+ tpnme.Double -> 96
+ )
+
+ private val nameToTag = Map[Name, Char](
+ tpnme.Byte -> BYTE_TAG,
+ tpnme.Char -> CHAR_TAG,
+ tpnme.Short -> SHORT_TAG,
+ tpnme.Int -> INT_TAG,
+ tpnme.Long -> LONG_TAG,
+ tpnme.Float -> FLOAT_TAG,
+ tpnme.Double -> DOUBLE_TAG,
+ tpnme.Boolean -> BOOL_TAG,
+ tpnme.Unit -> VOID_TAG
+ )
+
+ private def catastrophicFailure() =
+ abort("Could not find value classes! This is a catastrophic failure. scala " +
+ scala.util.Properties.versionString)
+
+ private def valueClassSymbol(name: TypeName): ClassSymbol = {
+ getMember(ScalaPackageClass, name) match {
+ case x: ClassSymbol => x
+ case _ => catastrophicFailure()
+ }
+ }
+ private def valueClassCompanion(name: TermName): ModuleSymbol = {
+ getMember(ScalaPackageClass, name) match {
+ case x: ModuleSymbol => x
+ case _ => catastrophicFailure()
+ }
+ }
+ private def valueCompanionMember(className: Name, methodName: TermName): TermSymbol =
+ getMemberMethod(valueClassCompanion(className.toTermName).moduleClass, methodName)
+
+ private def classesMap[T](f: Name => T) = symbolsMap(ScalaValueClassesNoUnit, f)
+ private def symbolsMap[T](syms: List[Symbol], f: Name => T): Map[Symbol, T] = mapFrom(syms)(x => f(x.name))
+ private def symbolsMapFilt[T](syms: List[Symbol], p: Name => Boolean, f: Name => T) = symbolsMap(syms filter (x => p(x.name)), f)
+
+ private def boxedName(name: Name) = sn.Boxed(name.toTypeName)
+
+ lazy val abbrvTag = symbolsMap(ScalaValueClasses, nameToTag) withDefaultValue OBJECT_TAG
+ lazy val numericWeight = symbolsMapFilt(ScalaValueClasses, nameToWeight.keySet, nameToWeight)
+ lazy val boxedModule = classesMap(x => getModule(boxedName(x)))
+ lazy val boxedClass = classesMap(x => getClassByName(boxedName(x)))
+ lazy val refClass = classesMap(x => getRequiredClass("scala.runtime." + x + "Ref"))
+ lazy val volatileRefClass = classesMap(x => getRequiredClass("scala.runtime.Volatile" + x + "Ref"))
+ lazy val boxMethod = classesMap(x => valueCompanionMember(x, nme.box))
+ lazy val unboxMethod = classesMap(x => valueCompanionMember(x, nme.unbox))
+
+ def isNumericSubClass(sub: Symbol, sup: Symbol) = (
+ (numericWeight contains sub)
+ && (numericWeight contains sup)
+ && (numericWeight(sup) % numericWeight(sub) == 0)
+ )
+
+ /** Is symbol a numeric value class? */
+ def isNumericValueClass(sym: Symbol) = ScalaNumericValueClasses contains sym
+
+ def isGetClass(sym: Symbol) = (
+ sym.name == nme.getClass_ // this condition is for performance only, this is called from `Typer#stabliize`.
+ && getClassMethods(sym)
+ )
+
+ lazy val UnitClass = valueClassSymbol(tpnme.Unit)
+ lazy val ByteClass = valueClassSymbol(tpnme.Byte)
+ lazy val ShortClass = valueClassSymbol(tpnme.Short)
+ lazy val CharClass = valueClassSymbol(tpnme.Char)
+ lazy val IntClass = valueClassSymbol(tpnme.Int)
+ lazy val LongClass = valueClassSymbol(tpnme.Long)
+ lazy val FloatClass = valueClassSymbol(tpnme.Float)
+ lazy val DoubleClass = valueClassSymbol(tpnme.Double)
+ lazy val BooleanClass = valueClassSymbol(tpnme.Boolean)
+ lazy val Boolean_and = getMemberMethod(BooleanClass, nme.ZAND)
+ lazy val Boolean_or = getMemberMethod(BooleanClass, nme.ZOR)
+ lazy val Boolean_not = getMemberMethod(BooleanClass, nme.UNARY_!)
+
+ lazy val UnitTpe = UnitClass.toTypeConstructor
+ lazy val ByteTpe = ByteClass.toTypeConstructor
+ lazy val ShortTpe = ShortClass.toTypeConstructor
+ lazy val CharTpe = CharClass.toTypeConstructor
+ lazy val IntTpe = IntClass.toTypeConstructor
+ lazy val LongTpe = LongClass.toTypeConstructor
+ lazy val FloatTpe = FloatClass.toTypeConstructor
+ lazy val DoubleTpe = DoubleClass.toTypeConstructor
+ lazy val BooleanTpe = BooleanClass.toTypeConstructor
+
+ lazy val ScalaNumericValueClasses = ScalaValueClasses filterNot Set[Symbol](UnitClass, BooleanClass)
+ lazy val ScalaValueClassesNoUnit = ScalaValueClasses filterNot (_ eq UnitClass)
+ lazy val ScalaValueClasses: List[ClassSymbol] = List(
+ UnitClass,
+ BooleanClass,
+ ByteClass,
+ ShortClass,
+ CharClass,
+ IntClass,
+ LongClass,
+ FloatClass,
+ DoubleClass
+ )
+ def ScalaValueClassCompanions: List[Symbol] = ScalaValueClasses map (_.companionSymbol)
+ def ScalaPrimitiveValueClasses: List[ClassSymbol] = ScalaValueClasses
+ }
+
+ abstract class DefinitionsClass extends DefinitionsApi with ValueClassDefinitions {
+ private var isInitialized = false
+ def isDefinitionsInitialized = isInitialized
+
+ // symbols related to packages
+ var emptypackagescope: Scope = null //debug
+
+ @deprecated("Moved to rootMirror.RootPackage", "2.10.0")
+ val RootPackage: ModuleSymbol = rootMirror.RootPackage
+
+ @deprecated("Moved to rootMirror.RootClass", "2.10.0")
+ val RootClass: ClassSymbol = rootMirror.RootClass
+
+ @deprecated("Moved to rootMirror.EmptyPackage", "2.10.0")
+ val EmptyPackage: ModuleSymbol = rootMirror.EmptyPackage
+
+ @deprecated("Moved to rootMirror.EmptyPackageClass", "2.10.0")
+ val EmptyPackageClass: ClassSymbol = rootMirror.EmptyPackageClass
+
+ // It becomes tricky to create dedicated objects for other symbols because
+ // of initialization order issues.
+ lazy val JavaLangPackage = getRequiredPackage(sn.JavaLang)
+ lazy val JavaLangPackageClass = JavaLangPackage.moduleClass.asClass
+ lazy val ScalaPackage = getRequiredPackage(nme.scala_)
+ lazy val ScalaPackageClass = ScalaPackage.moduleClass.asClass
+ lazy val RuntimePackage = getRequiredPackage("scala.runtime")
+ lazy val RuntimePackageClass = RuntimePackage.moduleClass.asClass
+
+ lazy val JavaLangEnumClass = requiredClass[java.lang.Enum[_]]
+
+ // convenient one-argument parameter lists
+ lazy val anyparam = List(AnyClass.tpe)
+ lazy val anyvalparam = List(AnyValClass.typeConstructor)
+ lazy val anyrefparam = List(AnyRefClass.typeConstructor)
+
+ // private parameter conveniences
+ private def booltype = BooleanClass.tpe
+ private def inttype = IntClass.tpe
+ private def stringtype = StringClass.tpe
+
+ def javaTypeToValueClass(jtype: Class[_]): Symbol = jtype match {
+ case java.lang.Void.TYPE => UnitClass
+ case java.lang.Byte.TYPE => ByteClass
+ case java.lang.Character.TYPE => CharClass
+ case java.lang.Short.TYPE => ShortClass
+ case java.lang.Integer.TYPE => IntClass
+ case java.lang.Long.TYPE => LongClass
+ case java.lang.Float.TYPE => FloatClass
+ case java.lang.Double.TYPE => DoubleClass
+ case java.lang.Boolean.TYPE => BooleanClass
+ case _ => NoSymbol
+ }
+ def valueClassToJavaType(sym: Symbol): Class[_] = sym match {
+ case UnitClass => java.lang.Void.TYPE
+ case ByteClass => java.lang.Byte.TYPE
+ case CharClass => java.lang.Character.TYPE
+ case ShortClass => java.lang.Short.TYPE
+ case IntClass => java.lang.Integer.TYPE
+ case LongClass => java.lang.Long.TYPE
+ case FloatClass => java.lang.Float.TYPE
+ case DoubleClass => java.lang.Double.TYPE
+ case BooleanClass => java.lang.Boolean.TYPE
+ case _ => null
+ }
+
+ /** Fully initialize the symbol, type, or scope.
+ */
+ def fullyInitializeSymbol(sym: Symbol): Symbol = {
+ sym.initialize
+ fullyInitializeType(sym.info)
+ fullyInitializeType(sym.tpe)
+ sym
+ }
+ def fullyInitializeType(tp: Type): Type = {
+ tp.typeParams foreach fullyInitializeSymbol
+ tp.paramss.flatten foreach fullyInitializeSymbol
+ tp
+ }
+ def fullyInitializeScope(scope: Scope): Scope = {
+ scope.sorted foreach fullyInitializeSymbol
+ scope
+ }
+ /** Is this type equivalent to Any, AnyVal, or AnyRef? */
+ def isTrivialTopType(tp: Type) = (
+ tp =:= AnyClass.tpe
+ || tp =:= AnyValClass.tpe
+ || tp =:= AnyRefClass.tpe
+ )
+ /** Does this type have a parent which is none of Any, AnyVal, or AnyRef? */
+ def hasNonTrivialParent(tp: Type) = tp.parents exists (t => !isTrivialTopType(tp))
+
+ private def fixupAsAnyTrait(tpe: Type): Type = tpe match {
+ case ClassInfoType(parents, decls, clazz) =>
+ if (parents.head.typeSymbol == AnyClass) tpe
+ else {
+ assert(parents.head.typeSymbol == ObjectClass, parents)
+ ClassInfoType(AnyClass.tpe :: parents.tail, decls, clazz)
+ }
+ case PolyType(tparams, restpe) =>
+ PolyType(tparams, fixupAsAnyTrait(restpe))
+// case _ => tpe
+ }
+
+ // top types
+ lazy val AnyClass = enterNewClass(ScalaPackageClass, tpnme.Any, Nil, ABSTRACT)
+ lazy val AnyRefClass = newAlias(ScalaPackageClass, tpnme.AnyRef, ObjectClass.tpe)
+ lazy val ObjectClass = getRequiredClass(sn.Object.toString)
+ lazy val AnyTpe = definitions.AnyClass.toTypeConstructor
+ lazy val AnyRefTpe = definitions.AnyRefClass.toTypeConstructor
+ lazy val ObjectTpe = definitions.ObjectClass.toTypeConstructor
+
+ // Note: this is not the type alias AnyRef, it's a companion-like
+ // object used by the @specialize annotation.
+ lazy val AnyRefModule = getMemberModule(ScalaPackageClass, nme.AnyRef)
+ @deprecated("Use AnyRefModule", "2.10.0")
+ def Predef_AnyRef = AnyRefModule
+
+ lazy val AnyValClass: ClassSymbol = (ScalaPackageClass.info member tpnme.AnyVal orElse {
+ val anyval = enterNewClass(ScalaPackageClass, tpnme.AnyVal, List(AnyClass.tpe, NotNullClass.tpe), ABSTRACT)
+ val av_constr = anyval.newClassConstructor(NoPosition)
+ anyval.info.decls enter av_constr
+ anyval
+ }).asInstanceOf[ClassSymbol]
+ lazy val AnyValTpe = definitions.AnyValClass.toTypeConstructor
+ def AnyVal_getClass = getMemberMethod(AnyValClass, nme.getClass_)
+
+ // bottom types
+ lazy val RuntimeNothingClass = getClassByName(fulltpnme.RuntimeNothing)
+ lazy val RuntimeNullClass = getClassByName(fulltpnme.RuntimeNull)
+
+ sealed abstract class BottomClassSymbol(name: TypeName, parent: Symbol) extends ClassSymbol(ScalaPackageClass, NoPosition, name) {
+ locally {
+ this initFlags ABSTRACT | FINAL
+ this setInfoAndEnter ClassInfoType(List(parent.tpe), newScope, this)
+ }
+ final override def isBottomClass = true
+ }
+ final object NothingClass extends BottomClassSymbol(tpnme.Nothing, AnyClass) {
+ override def isSubClass(that: Symbol) = true
+ }
+ final object NullClass extends BottomClassSymbol(tpnme.Null, AnyRefClass) {
+ override def isSubClass(that: Symbol) = (
+ (that eq AnyClass)
+ || (that ne NothingClass) && (that isSubClass ObjectClass)
+ )
+ }
+ lazy val NothingTpe = definitions.NothingClass.toTypeConstructor
+ lazy val NullTpe = definitions.NullClass.toTypeConstructor
+
+ // exceptions and other throwables
+ lazy val ClassCastExceptionClass = requiredClass[ClassCastException]
+ lazy val IndexOutOfBoundsExceptionClass = getClassByName(sn.IOOBException)
+ lazy val InvocationTargetExceptionClass = getClassByName(sn.InvTargetException)
+ lazy val MatchErrorClass = requiredClass[MatchError]
+ lazy val NonLocalReturnControlClass = requiredClass[scala.runtime.NonLocalReturnControl[_]]
+ lazy val NullPointerExceptionClass = getClassByName(sn.NPException)
+ lazy val ThrowableClass = getClassByName(sn.Throwable)
+ lazy val UninitializedErrorClass = requiredClass[UninitializedFieldError]
+
+ // fundamental reference classes
+ lazy val PartialFunctionClass = requiredClass[PartialFunction[_,_]]
+ lazy val AbstractPartialFunctionClass = requiredClass[scala.runtime.AbstractPartialFunction[_,_]]
+ lazy val SymbolClass = requiredClass[scala.Symbol]
+ lazy val StringClass = requiredClass[java.lang.String]
+ lazy val StringModule = StringClass.linkedClassOfClass
+ lazy val ClassClass = requiredClass[java.lang.Class[_]]
+ def Class_getMethod = getMemberMethod(ClassClass, nme.getMethod_)
+ lazy val DynamicClass = requiredClass[Dynamic]
+
+ // fundamental modules
+ lazy val SysPackage = getPackageObject("scala.sys")
+ def Sys_error = getMemberMethod(SysPackage, nme.error)
+
+ // Modules whose members are in the default namespace
+ // SI-5941: ScalaPackage and JavaLangPackage are never ever shared between mirrors
+ // as a result, `Int` becomes `scala.Int` and `String` becomes `java.lang.String`
+ // I could just change `isOmittablePrefix`, but there's more to it, so I'm leaving this as a todo for now
+ lazy val UnqualifiedModules = List(PredefModule, ScalaPackage, JavaLangPackage)
+ // Those modules and their module classes
+ lazy val UnqualifiedOwners = UnqualifiedModules.toSet ++ UnqualifiedModules.map(_.moduleClass)
+
+ lazy val PredefModule = requiredModule[scala.Predef.type]
+ lazy val PredefModuleClass = PredefModule.moduleClass
+
+ def Predef_classOf = getMemberMethod(PredefModule, nme.classOf)
+ def Predef_identity = getMemberMethod(PredefModule, nme.identity)
+ def Predef_conforms = getMemberMethod(PredefModule, nme.conforms)
+ def Predef_wrapRefArray = getMemberMethod(PredefModule, nme.wrapRefArray)
+ def Predef_wrapArray(tp: Type) = getMemberMethod(PredefModule, wrapArrayMethodName(tp))
+ def Predef_??? = getMemberMethod(PredefModule, nme.???)
+ def Predef_implicitly = getMemberMethod(PredefModule, nme.implicitly)
+
+ /** Is `sym` a member of Predef with the given name?
+ * Note: DON't replace this by sym == Predef_conforms/etc, as Predef_conforms is a `def`
+ * which does a member lookup (it can't be a lazy val because we might reload Predef
+ * during resident compilations).
+ */
+ def isPredefMemberNamed(sym: Symbol, name: Name) = (
+ (sym.name == name) && (sym.owner == PredefModule.moduleClass)
+ )
+
+ /** Specialization.
+ */
+ lazy val SpecializableModule = requiredModule[Specializable]
+ lazy val GroupOfSpecializable = getMemberClass(SpecializableModule, tpnme.Group)
+
+ lazy val ConsoleModule = requiredModule[scala.Console.type]
+ lazy val ScalaRunTimeModule = requiredModule[scala.runtime.ScalaRunTime.type]
+ lazy val SymbolModule = requiredModule[scala.Symbol.type]
+ lazy val Symbol_apply = getMemberMethod(SymbolModule, nme.apply)
+
+ def arrayApplyMethod = getMemberMethod(ScalaRunTimeModule, nme.array_apply)
+ def arrayUpdateMethod = getMemberMethod(ScalaRunTimeModule, nme.array_update)
+ def arrayLengthMethod = getMemberMethod(ScalaRunTimeModule, nme.array_length)
+ def arrayCloneMethod = getMemberMethod(ScalaRunTimeModule, nme.array_clone)
+ def ensureAccessibleMethod = getMemberMethod(ScalaRunTimeModule, nme.ensureAccessible)
+ def scalaRuntimeSameElements = getMemberMethod(ScalaRunTimeModule, nme.sameElements)
+ def arrayClassMethod = getMemberMethod(ScalaRunTimeModule, nme.arrayClass)
+ def arrayElementClassMethod = getMemberMethod(ScalaRunTimeModule, nme.arrayElementClass)
+
+ // classes with special meanings
+ lazy val StringAddClass = requiredClass[scala.runtime.StringAdd]
+ lazy val ArrowAssocClass = getRequiredClass("scala.Predef.ArrowAssoc") // SI-5731
+ lazy val StringAdd_+ = getMemberMethod(StringAddClass, nme.PLUS)
+ lazy val NotNullClass = getRequiredClass("scala.NotNull")
+ lazy val ScalaNumberClass = requiredClass[scala.math.ScalaNumber]
+ lazy val TraitSetterAnnotationClass = requiredClass[scala.runtime.TraitSetter]
+ lazy val DelayedInitClass = requiredClass[scala.DelayedInit]
+ def delayedInitMethod = getMemberMethod(DelayedInitClass, nme.delayedInit)
+ // a dummy value that communicates that a delayedInit call is compiler-generated
+ // from phase UnCurry to phase Constructors
+ // !!! This is not used anywhere (it was checked in that way.)
+ // def delayedInitArgVal = EmptyPackageClass.newValue(NoPosition, nme.delayedInitArg)
+ // .setInfo(UnitClass.tpe)
+
+ lazy val TypeConstraintClass = requiredClass[scala.annotation.TypeConstraint]
+ lazy val SingletonClass = enterNewClass(ScalaPackageClass, tpnme.Singleton, anyparam, ABSTRACT | TRAIT | FINAL)
+ lazy val SerializableClass = requiredClass[scala.Serializable]
+ lazy val JavaSerializableClass = requiredClass[java.io.Serializable] modifyInfo fixupAsAnyTrait
+ lazy val ComparableClass = requiredClass[java.lang.Comparable[_]] modifyInfo fixupAsAnyTrait
+ lazy val CloneableClass = requiredClass[scala.Cloneable]
+ lazy val JavaCloneableClass = requiredClass[java.lang.Cloneable]
+ lazy val JavaNumberClass = requiredClass[java.lang.Number]
+ lazy val RemoteInterfaceClass = requiredClass[java.rmi.Remote]
+ lazy val RemoteExceptionClass = requiredClass[java.rmi.RemoteException]
+
+ lazy val ByNameParamClass = specialPolyClass(tpnme.BYNAME_PARAM_CLASS_NAME, COVARIANT)(_ => AnyClass.tpe)
+ lazy val EqualsPatternClass = specialPolyClass(tpnme.EQUALS_PATTERN_NAME, 0L)(_ => AnyClass.tpe)
+ lazy val JavaRepeatedParamClass = specialPolyClass(tpnme.JAVA_REPEATED_PARAM_CLASS_NAME, COVARIANT)(tparam => arrayType(tparam.tpe))
+ lazy val RepeatedParamClass = specialPolyClass(tpnme.REPEATED_PARAM_CLASS_NAME, COVARIANT)(tparam => seqType(tparam.tpe))
+
+ def isByNameParamType(tp: Type) = tp.typeSymbol == ByNameParamClass
+ def isScalaRepeatedParamType(tp: Type) = tp.typeSymbol == RepeatedParamClass
+ def isJavaRepeatedParamType(tp: Type) = tp.typeSymbol == JavaRepeatedParamClass
+ def isRepeatedParamType(tp: Type) = isScalaRepeatedParamType(tp) || isJavaRepeatedParamType(tp)
+ def isRepeated(param: Symbol) = isRepeatedParamType(param.tpe)
+ def isCastSymbol(sym: Symbol) = sym == Any_asInstanceOf || sym == Object_asInstanceOf
+
+ def isJavaVarArgsMethod(m: Symbol) = m.isMethod && isJavaVarArgs(m.info.params)
+ def isJavaVarArgs(params: Seq[Symbol]) = params.nonEmpty && isJavaRepeatedParamType(params.last.tpe)
+ def isScalaVarArgs(params: Seq[Symbol]) = params.nonEmpty && isScalaRepeatedParamType(params.last.tpe)
+ def isVarArgsList(params: Seq[Symbol]) = params.nonEmpty && isRepeatedParamType(params.last.tpe)
+ def isVarArgTypes(formals: Seq[Type]) = formals.nonEmpty && isRepeatedParamType(formals.last)
+
+ def hasRepeatedParam(tp: Type): Boolean = tp match {
+ case MethodType(formals, restpe) => isScalaVarArgs(formals) || hasRepeatedParam(restpe)
+ case PolyType(_, restpe) => hasRepeatedParam(restpe)
+ case _ => false
+ }
+
+ def repeatedToSeq(tp: Type): Type = (tp baseType RepeatedParamClass) match {
+ case TypeRef(_, RepeatedParamClass, arg :: Nil) => seqType(arg)
+ case _ => tp
+ }
+
+ def seqToRepeated(tp: Type): Type = (tp baseType SeqClass) match {
+ case TypeRef(_, SeqClass, arg :: Nil) => scalaRepeatedType(arg)
+ case _ => tp
+ }
+
+ def isPrimitiveArray(tp: Type) = tp match {
+ case TypeRef(_, ArrayClass, arg :: Nil) => isPrimitiveValueClass(arg.typeSymbol)
+ case _ => false
+ }
+ def isReferenceArray(tp: Type) = tp match {
+ case TypeRef(_, ArrayClass, arg :: Nil) => arg <:< AnyRefClass.tpe
+ case _ => false
+ }
+ def isArrayOfSymbol(tp: Type, elem: Symbol) = tp match {
+ case TypeRef(_, ArrayClass, arg :: Nil) => arg.typeSymbol == elem
+ case _ => false
+ }
+
+ lazy val MatchingStrategyClass = getRequiredClass("scala.MatchingStrategy")
+
+ // collections classes
+ lazy val ConsClass = requiredClass[scala.collection.immutable.::[_]]
+ lazy val IterableClass = requiredClass[scala.collection.Iterable[_]]
+ lazy val IteratorClass = requiredClass[scala.collection.Iterator[_]]
+ lazy val ListClass = requiredClass[scala.collection.immutable.List[_]]
+ lazy val SeqClass = requiredClass[scala.collection.Seq[_]]
+ lazy val StringBuilderClass = requiredClass[scala.collection.mutable.StringBuilder]
+ lazy val TraversableClass = requiredClass[scala.collection.Traversable[_]]
+
+ lazy val ListModule = requiredModule[scala.collection.immutable.List.type]
+ lazy val List_apply = getMemberMethod(ListModule, nme.apply)
+ lazy val NilModule = requiredModule[scala.collection.immutable.Nil.type]
+ lazy val SeqModule = requiredModule[scala.collection.Seq.type]
+ lazy val IteratorModule = requiredModule[scala.collection.Iterator.type]
+ lazy val Iterator_apply = getMemberMethod(IteratorModule, nme.apply)
+
+ // arrays and their members
+ lazy val ArrayModule = requiredModule[scala.Array.type]
+ lazy val ArrayModule_overloadedApply = getMemberMethod(ArrayModule, nme.apply)
+ def ArrayModule_genericApply = ArrayModule_overloadedApply.suchThat(_.paramss.flatten.last.tpe.typeSymbol == ClassTagClass) // [T: ClassTag](xs: T*): Array[T]
+ def ArrayModule_apply(tp: Type) = ArrayModule_overloadedApply.suchThat(_.tpe.resultType =:= arrayType(tp)) // (p1: AnyVal1, ps: AnyVal1*): Array[AnyVal1]
+ lazy val ArrayClass = getRequiredClass("scala.Array") // requiredClass[scala.Array[_]]
+ lazy val Array_apply = getMemberMethod(ArrayClass, nme.apply)
+ lazy val Array_update = getMemberMethod(ArrayClass, nme.update)
+ lazy val Array_length = getMemberMethod(ArrayClass, nme.length)
+ lazy val Array_clone = getMemberMethod(ArrayClass, nme.clone_)
+
+ // reflection / structural types
+ lazy val SoftReferenceClass = requiredClass[java.lang.ref.SoftReference[_]]
+ lazy val WeakReferenceClass = requiredClass[java.lang.ref.WeakReference[_]]
+ lazy val MethodClass = getClassByName(sn.MethodAsObject)
+ def methodClass_setAccessible = getMemberMethod(MethodClass, nme.setAccessible)
+ lazy val EmptyMethodCacheClass = requiredClass[scala.runtime.EmptyMethodCache]
+ lazy val MethodCacheClass = requiredClass[scala.runtime.MethodCache]
+ def methodCache_find = getMemberMethod(MethodCacheClass, nme.find_)
+ def methodCache_add = getMemberMethod(MethodCacheClass, nme.add_)
+
+ // scala.reflect
+ lazy val ReflectPackage = requiredModule[scala.reflect.`package`.type]
+ lazy val ReflectApiPackage = getPackageObjectIfDefined("scala.reflect.api") // defined in scala-reflect.jar, so we need to be careful
+ lazy val ReflectRuntimePackage = getPackageObjectIfDefined("scala.reflect.runtime") // defined in scala-reflect.jar, so we need to be careful
+ def ReflectRuntimeUniverse = if (ReflectRuntimePackage != NoSymbol) getMemberValue(ReflectRuntimePackage, nme.universe) else NoSymbol
+ def ReflectRuntimeCurrentMirror = if (ReflectRuntimePackage != NoSymbol) getMemberMethod(ReflectRuntimePackage, nme.currentMirror) else NoSymbol
+
+ lazy val PartialManifestClass = getTypeMember(ReflectPackage, tpnme.ClassManifest)
+ lazy val PartialManifestModule = requiredModule[scala.reflect.ClassManifestFactory.type]
+ lazy val FullManifestClass = requiredClass[scala.reflect.Manifest[_]]
+ lazy val FullManifestModule = requiredModule[scala.reflect.ManifestFactory.type]
+ lazy val OptManifestClass = requiredClass[scala.reflect.OptManifest[_]]
+ lazy val NoManifest = requiredModule[scala.reflect.NoManifest.type]
+
+ lazy val ExprsClass = getClassIfDefined("scala.reflect.api.Exprs") // defined in scala-reflect.jar, so we need to be careful
+ lazy val ExprClass = if (ExprsClass != NoSymbol) getMemberClass(ExprsClass, tpnme.Expr) else NoSymbol
+ def ExprSplice = if (ExprsClass != NoSymbol) getMemberMethod(ExprClass, nme.splice) else NoSymbol
+ def ExprValue = if (ExprsClass != NoSymbol) getMemberMethod(ExprClass, nme.value) else NoSymbol
+ lazy val ExprModule = if (ExprsClass != NoSymbol) getMemberModule(ExprsClass, nme.Expr) else NoSymbol
+
+ lazy val ClassTagModule = requiredModule[scala.reflect.ClassTag[_]]
+ lazy val ClassTagClass = requiredClass[scala.reflect.ClassTag[_]]
+ lazy val TypeTagsClass = getClassIfDefined("scala.reflect.api.TypeTags") // defined in scala-reflect.jar, so we need to be careful
+ lazy val WeakTypeTagClass = if (TypeTagsClass != NoSymbol) getMemberClass(TypeTagsClass, tpnme.WeakTypeTag) else NoSymbol
+ lazy val WeakTypeTagModule = if (TypeTagsClass != NoSymbol) getMemberModule(TypeTagsClass, nme.WeakTypeTag) else NoSymbol
+ lazy val TypeTagClass = if (TypeTagsClass != NoSymbol) getMemberClass(TypeTagsClass, tpnme.TypeTag) else NoSymbol
+ lazy val TypeTagModule = if (TypeTagsClass != NoSymbol) getMemberModule(TypeTagsClass, nme.TypeTag) else NoSymbol
+ def materializeClassTag = getMemberMethod(ReflectPackage, nme.materializeClassTag)
+ def materializeWeakTypeTag = if (ReflectApiPackage != NoSymbol) getMemberMethod(ReflectApiPackage, nme.materializeWeakTypeTag) else NoSymbol
+ def materializeTypeTag = if (ReflectApiPackage != NoSymbol) getMemberMethod(ReflectApiPackage, nme.materializeTypeTag) else NoSymbol
+
+ lazy val ApiUniverseClass = getClassIfDefined("scala.reflect.api.Universe") // defined in scala-reflect.jar, so we need to be careful
+ def ApiUniverseReify = if (ApiUniverseClass != NoSymbol) getMemberMethod(ApiUniverseClass, nme.reify) else NoSymbol
+ lazy val JavaUniverseClass = getClassIfDefined("scala.reflect.api.JavaUniverse") // defined in scala-reflect.jar, so we need to be careful
+
+ lazy val MirrorClass = getClassIfDefined("scala.reflect.api.Mirror") // defined in scala-reflect.jar, so we need to be careful
+
+ lazy val TypeCreatorClass = getClassIfDefined("scala.reflect.api.TypeCreator") // defined in scala-reflect.jar, so we need to be careful
+ lazy val TreeCreatorClass = getClassIfDefined("scala.reflect.api.TreeCreator") // defined in scala-reflect.jar, so we need to be careful
+
+ lazy val MacroContextClass = getClassIfDefined("scala.reflect.macros.Context") // defined in scala-reflect.jar, so we need to be careful
+ def MacroContextPrefix = if (MacroContextClass != NoSymbol) getMemberMethod(MacroContextClass, nme.prefix) else NoSymbol
+ def MacroContextPrefixType = if (MacroContextClass != NoSymbol) getTypeMember(MacroContextClass, tpnme.PrefixType) else NoSymbol
+ def MacroContextUniverse = if (MacroContextClass != NoSymbol) getMemberMethod(MacroContextClass, nme.universe) else NoSymbol
+ def MacroContextMirror = if (MacroContextClass != NoSymbol) getMemberMethod(MacroContextClass, nme.mirror) else NoSymbol
+ lazy val MacroImplAnnotation = requiredClass[scala.reflect.macros.internal.macroImpl]
+
+ lazy val StringContextClass = requiredClass[scala.StringContext]
+ def StringContext_f = getMemberMethod(StringContextClass, nme.f)
+
+ lazy val ScalaSignatureAnnotation = requiredClass[scala.reflect.ScalaSignature]
+ lazy val ScalaLongSignatureAnnotation = requiredClass[scala.reflect.ScalaLongSignature]
+
+ // Option classes
+ lazy val OptionClass: ClassSymbol = requiredClass[Option[_]]
+ lazy val OptionModule: ModuleSymbol = requiredModule[scala.Option.type]
+ lazy val Option_apply = getMemberMethod(OptionModule, nme.apply)
+ lazy val SomeClass: ClassSymbol = requiredClass[Some[_]]
+ lazy val NoneModule: ModuleSymbol = requiredModule[scala.None.type]
+ lazy val SomeModule: ModuleSymbol = requiredModule[scala.Some.type]
+
+ def compilerTypeFromTag(tt: ApiUniverse # WeakTypeTag[_]): Type = tt.in(rootMirror).tpe
+ def compilerSymbolFromTag(tt: ApiUniverse # WeakTypeTag[_]): Symbol = tt.in(rootMirror).tpe.typeSymbol
+
+ // The given symbol represents either String.+ or StringAdd.+
+ def isStringAddition(sym: Symbol) = sym == String_+ || sym == StringAdd_+
+ def isArrowAssoc(sym: Symbol) = ArrowAssocClass.tpe.decls.toList contains sym
+
+ // The given symbol is a method with the right name and signature to be a runnable java program.
+ def isJavaMainMethod(sym: Symbol) = (sym.name == nme.main) && (sym.info match {
+ case MethodType(p :: Nil, restpe) => isArrayOfSymbol(p.tpe, StringClass) && restpe.typeSymbol == UnitClass
+ case _ => false
+ })
+ // The given class has a main method.
+ def hasJavaMainMethod(sym: Symbol): Boolean =
+ (sym.tpe member nme.main).alternatives exists isJavaMainMethod
+ def hasJavaMainMethod(path: String): Boolean =
+ hasJavaMainMethod(getModuleIfDefined(path))
+
+ def isOptionType(tp: Type) = tp.typeSymbol isSubClass OptionClass
+ def isSomeType(tp: Type) = tp.typeSymbol eq SomeClass
+ def isNoneType(tp: Type) = tp.typeSymbol eq NoneModule
+
+ // Product, Tuple, Function, AbstractFunction
+ private def mkArityArray(name: String, arity: Int, countFrom: Int): Array[ClassSymbol] = {
+ val list = countFrom to arity map (i => getRequiredClass("scala." + name + i))
+ list.toArray
+ }
+ def prepend[S >: ClassSymbol : ClassTag](elem0: S, elems: Array[ClassSymbol]): Array[S] = elem0 +: elems
+
+ private def aritySpecificType[S <: Symbol](symbolArray: Array[S], args: List[Type], others: Type*): Type = {
+ val arity = args.length
+ if (arity >= symbolArray.length) NoType
+ else appliedType(symbolArray(arity), args ++ others: _*)
+ }
+
+ val MaxTupleArity, MaxProductArity, MaxFunctionArity = 22
+ lazy val ProductClass: Array[ClassSymbol] = prepend(UnitClass, mkArityArray("Product", MaxProductArity, 1))
+ lazy val TupleClass: Array[Symbol] = prepend(NoSymbol, mkArityArray("Tuple", MaxTupleArity, 1))
+ lazy val FunctionClass = mkArityArray("Function", MaxFunctionArity, 0)
+ lazy val AbstractFunctionClass = mkArityArray("runtime.AbstractFunction", MaxFunctionArity, 0)
+
+ /** Creators for TupleN, ProductN, FunctionN. */
+ def tupleType(elems: List[Type]) = aritySpecificType(TupleClass, elems)
+ def productType(elems: List[Type]) = aritySpecificType(ProductClass, elems)
+ def functionType(formals: List[Type], restpe: Type) = aritySpecificType(FunctionClass, formals, restpe)
+ def abstractFunctionType(formals: List[Type], restpe: Type) = aritySpecificType(AbstractFunctionClass, formals, restpe)
+
+ def wrapArrayMethodName(elemtp: Type): TermName = elemtp.typeSymbol match {
+ case ByteClass => nme.wrapByteArray
+ case ShortClass => nme.wrapShortArray
+ case CharClass => nme.wrapCharArray
+ case IntClass => nme.wrapIntArray
+ case LongClass => nme.wrapLongArray
+ case FloatClass => nme.wrapFloatArray
+ case DoubleClass => nme.wrapDoubleArray
+ case BooleanClass => nme.wrapBooleanArray
+ case UnitClass => nme.wrapUnitArray
+ case _ =>
+ if ((elemtp <:< AnyRefClass.tpe) && !isPhantomClass(elemtp.typeSymbol)) nme.wrapRefArray
+ else nme.genericWrapArray
+ }
+
+ @deprecated("Use isTupleType", "2.10.0")
+ def isTupleTypeOrSubtype(tp: Type) = isTupleType(tp)
+
+ def tupleField(n: Int, j: Int) = getMemberValue(TupleClass(n), nme.productAccessorName(j))
+ // NOTE: returns true for NoSymbol since it's included in the TupleClass array -- is this intensional?
+ def isTupleSymbol(sym: Symbol) = TupleClass contains unspecializedSymbol(sym)
+ def isProductNClass(sym: Symbol) = ProductClass contains sym
+
+ def unspecializedSymbol(sym: Symbol): Symbol = {
+ if (sym hasFlag SPECIALIZED) {
+ // add initialization from its generic class constructor
+ val genericName = nme.unspecializedName(sym.name)
+ val member = sym.owner.info.decl(genericName.toTypeName)
+ member
+ }
+ else sym
+ }
+
+ // Checks whether the given type is true for the given condition,
+ // or if it is a specialized subtype of a type for which it is true.
+ //
+ // Origins notes:
+ // An issue was introduced with specialization in that the implementation
+ // of "isTupleType" in Definitions relied upon sym == TupleClass(elems.length).
+ // This test is untrue for specialized tuples, causing mysterious behavior
+ // because only some tuples are specialized.
+ def isPossiblySpecializedType(tp: Type)(cond: Type => Boolean) = {
+ cond(tp) || (tp match {
+ case TypeRef(pre, sym, args) if sym hasFlag SPECIALIZED =>
+ cond(tp baseType unspecializedSymbol(sym))
+ case _ =>
+ false
+ })
+ }
+ // No normalization.
+ def isTupleTypeDirect(tp: Type) = isPossiblySpecializedType(tp) {
+ case TypeRef(_, sym, args) if args.nonEmpty =>
+ val len = args.length
+ len <= MaxTupleArity && sym == TupleClass(len)
+ case _ => false
+ }
+ def isTupleType(tp: Type) = isTupleTypeDirect(tp.normalize)
+
+ lazy val ProductRootClass: ClassSymbol = requiredClass[scala.Product]
+ def Product_productArity = getMemberMethod(ProductRootClass, nme.productArity)
+ def Product_productElement = getMemberMethod(ProductRootClass, nme.productElement)
+ def Product_iterator = getMemberMethod(ProductRootClass, nme.productIterator)
+ def Product_productPrefix = getMemberMethod(ProductRootClass, nme.productPrefix)
+ def Product_canEqual = getMemberMethod(ProductRootClass, nme.canEqual_)
+ // def Product_productElementName = getMemberMethod(ProductRootClass, nme.productElementName)
+
+ def productProj(z:Symbol, j: Int): TermSymbol = getMemberValue(z, nme.productAccessorName(j))
+ def productProj(n: Int, j: Int): TermSymbol = productProj(ProductClass(n), j)
+
+ /** returns true if this type is exactly ProductN[T1,...,Tn], not some subclass */
+ def isExactProductType(tp: Type): Boolean = isProductNClass(tp.typeSymbol)
+
+ /** if tpe <: ProductN[T1,...,TN], returns List(T1,...,TN) else Nil */
+ def getProductArgs(tpe: Type): List[Type] = tpe.baseClasses find isProductNClass match {
+ case Some(x) => tpe.baseType(x).typeArgs
+ case _ => Nil
+ }
+
+ def dropNullaryMethod(tp: Type) = tp match {
+ case NullaryMethodType(restpe) => restpe
+ case _ => tp
+ }
+
+ def unapplyUnwrap(tpe:Type) = tpe.finalResultType.normalize match {
+ case RefinedType(p :: _, _) => p.normalize
+ case tp => tp
+ }
+
+ def functionApply(n: Int) = getMemberMethod(FunctionClass(n), nme.apply)
+
+ def abstractFunctionForFunctionType(tp: Type) = {
+ assert(isFunctionType(tp), tp)
+ abstractFunctionType(tp.typeArgs.init, tp.typeArgs.last)
+ }
+
+ def isFunctionType(tp: Type): Boolean = tp.normalize match {
+ case TypeRef(_, sym, args) if args.nonEmpty =>
+ val arity = args.length - 1 // -1 is the return type
+ arity <= MaxFunctionArity && sym == FunctionClass(arity)
+ case _ =>
+ false
+ }
+
+ def isPartialFunctionType(tp: Type): Boolean = {
+ val sym = tp.typeSymbol
+ (sym eq PartialFunctionClass) || (sym eq AbstractPartialFunctionClass)
+ }
+
+ def isSeqType(tp: Type) = elementType(SeqClass, tp.normalize) != NoType
+
+ def elementType(container: Symbol, tp: Type): Type = tp match {
+ case TypeRef(_, `container`, arg :: Nil) => arg
+ case _ => NoType
+ }
+
+ def arrayType(arg: Type) = appliedType(ArrayClass, arg)
+ def byNameType(arg: Type) = appliedType(ByNameParamClass, arg)
+ def iteratorOfType(tp: Type) = appliedType(IteratorClass, tp)
+ def javaRepeatedType(arg: Type) = appliedType(JavaRepeatedParamClass, arg)
+ def optionType(tp: Type) = appliedType(OptionClass, tp)
+ def scalaRepeatedType(arg: Type) = appliedType(RepeatedParamClass, arg)
+ def seqType(arg: Type) = appliedType(SeqClass, arg)
+ def someType(tp: Type) = appliedType(SomeClass, tp)
+
+ def StringArray = arrayType(StringClass.tpe)
+ lazy val ObjectArray = arrayType(ObjectClass.tpe)
+
+ def ClassType(arg: Type) =
+ if (phase.erasedTypes || forMSIL) ClassClass.tpe
+ else appliedType(ClassClass, arg)
+
+ def EnumType(sym: Symbol) =
+ // given (in java): "class A { enum E { VAL1 } }"
+ // - sym: the symbol of the actual enumeration value (VAL1)
+ // - .owner: the ModuleClassSymbol of the enumeration (object E)
+ // - .linkedClassOfClass: the ClassSymbol of the enumeration (class E)
+ sym.owner.linkedClassOfClass.tpe
+
+ def vmClassType(arg: Type): Type = ClassType(arg)
+ def vmSignature(sym: Symbol, info: Type): String = signature(info) // !!!
+
+ /** Given a class symbol C with type parameters T1, T2, ... Tn
+ * which have upper/lower bounds LB1/UB1, LB1/UB2, ..., LBn/UBn,
+ * returns an existential type of the form
+ *
+ * C[E1, ..., En] forSome { E1 >: LB1 <: UB1 ... en >: LBn <: UBn }.
+ */
+ def classExistentialType(clazz: Symbol): Type =
+ newExistentialType(clazz.typeParams, clazz.tpe)
+
+ /** Given type U, creates a Type representing Class[_ <: U].
+ */
+ def boundedClassType(upperBound: Type) =
+ appliedTypeAsUpperBounds(ClassClass.typeConstructor, List(upperBound))
+
+ /** To avoid unchecked warnings on polymorphic classes, translate
+ * a Foo[T] into a Foo[_] for use in the pattern matcher.
+ */
+ @deprecated("Use classExistentialType", "2.10.0")
+ def typeCaseType(clazz: Symbol): Type = classExistentialType(clazz)
+
+ //
+ // .NET backend
+ //
+
+ lazy val ComparatorClass = getRequiredClass("scala.runtime.Comparator")
+ // System.ValueType
+ lazy val ValueTypeClass: ClassSymbol = getClassByName(sn.ValueType)
+ // System.MulticastDelegate
+ lazy val DelegateClass: ClassSymbol = getClassByName(sn.Delegate)
+ var Delegate_scalaCallers: List[Symbol] = List() // Syncnote: No protection necessary yet as only for .NET where reflection is not supported.
+ // Symbol -> (Symbol, Type): scalaCaller -> (scalaMethodSym, DelegateType)
+ // var Delegate_scalaCallerInfos: HashMap[Symbol, (Symbol, Type)] = _
+ lazy val Delegate_scalaCallerTargets: mutable.HashMap[Symbol, Symbol] = mutable.HashMap()
+
+ def isCorrespondingDelegate(delegateType: Type, functionType: Type): Boolean = {
+ isSubType(delegateType, DelegateClass.tpe) &&
+ (delegateType.member(nme.apply).tpe match {
+ case MethodType(delegateParams, delegateReturn) =>
+ isFunctionType(functionType) &&
+ (functionType.normalize match {
+ case TypeRef(_, _, args) =>
+ (delegateParams.map(pt => {
+ if (pt.tpe == AnyClass.tpe) definitions.ObjectClass.tpe else pt})
+ ::: List(delegateReturn)) == args
+ case _ => false
+ })
+ case _ => false
+ })
+ }
+
+ // members of class scala.Any
+ lazy val Any_== = enterNewMethod(AnyClass, nme.EQ, anyparam, booltype, FINAL)
+ lazy val Any_!= = enterNewMethod(AnyClass, nme.NE, anyparam, booltype, FINAL)
+ lazy val Any_equals = enterNewMethod(AnyClass, nme.equals_, anyparam, booltype)
+ lazy val Any_hashCode = enterNewMethod(AnyClass, nme.hashCode_, Nil, inttype)
+ lazy val Any_toString = enterNewMethod(AnyClass, nme.toString_, Nil, stringtype)
+ lazy val Any_## = enterNewMethod(AnyClass, nme.HASHHASH, Nil, inttype, FINAL)
+
+ // Any_getClass requires special handling. The return type is determined on
+ // a per-call-site basis as if the function being called were actually:
+ //
+ // // Assuming `target.getClass()`
+ // def getClass[T](target: T): Class[_ <: T]
+ //
+ // Since getClass is not actually a polymorphic method, this requires compiler
+ // participation. At the "Any" level, the return type is Class[_] as it is in
+ // java.lang.Object. Java also special cases the return type.
+ lazy val Any_getClass = enterNewMethod(AnyClass, nme.getClass_, Nil, getMemberMethod(ObjectClass, nme.getClass_).tpe.resultType, DEFERRED)
+ lazy val Any_isInstanceOf = newT1NullaryMethod(AnyClass, nme.isInstanceOf_, FINAL)(_ => booltype)
+ lazy val Any_asInstanceOf = newT1NullaryMethod(AnyClass, nme.asInstanceOf_, FINAL)(_.typeConstructor)
+
+ lazy val primitiveGetClassMethods = Set[Symbol](Any_getClass, AnyVal_getClass) ++ (
+ ScalaValueClasses map (_.tpe member nme.getClass_)
+ )
+
+ lazy val getClassMethods: Set[Symbol] = primitiveGetClassMethods + Object_getClass
+
+ // A type function from T => Class[U], used to determine the return
+ // type of getClass calls. The returned type is:
+ //
+ // 1. If T is a value type, Class[T].
+ // 2. If T is a phantom type (Any or AnyVal), Class[_].
+ // 3. If T is a local class, Class[_ <: |T|].
+ // 4. Otherwise, Class[_ <: T].
+ //
+ // Note: AnyVal cannot be Class[_ <: AnyVal] because if the static type of the
+ // receiver is AnyVal, it implies the receiver is boxed, so the correct
+ // class object is that of java.lang.Integer, not Int.
+ //
+ // TODO: If T is final, return type could be Class[T]. Should it?
+ def getClassReturnType(tp: Type): Type = {
+ val sym = tp.typeSymbol
+
+ if (phase.erasedTypes) ClassClass.tpe
+ else if (isPrimitiveValueClass(sym)) ClassType(tp.widen)
+ else {
+ val eparams = typeParamsToExistentials(ClassClass, ClassClass.typeParams)
+ val upperBound = (
+ if (isPhantomClass(sym)) AnyClass.tpe
+ else if (sym.isLocalClass) erasure.intersectionDominator(tp.parents)
+ else tp.widen
+ )
+
+ existentialAbstraction(
+ eparams,
+ ClassType((eparams.head setInfo TypeBounds.upper(upperBound)).tpe)
+ )
+ }
+ }
+
+ /** Remove references to class Object (other than the head) in a list of parents */
+ def removeLaterObjects(tps: List[Type]): List[Type] = tps match {
+ case Nil => Nil
+ case x :: xs => x :: xs.filterNot(_.typeSymbol == ObjectClass)
+ }
+ /** Remove all but one reference to class Object from a list of parents. */
+ def removeRedundantObjects(tps: List[Type]): List[Type] = tps match {
+ case Nil => Nil
+ case x :: xs =>
+ if (x.typeSymbol == ObjectClass)
+ x :: xs.filterNot(_.typeSymbol == ObjectClass)
+ else
+ x :: removeRedundantObjects(xs)
+ }
+ /** Order a list of types with non-trait classes before others. */
+ def classesFirst(tps: List[Type]): List[Type] = {
+ val (classes, others) = tps partition (t => t.typeSymbol.isClass && !t.typeSymbol.isTrait)
+ if (classes.isEmpty || others.isEmpty || (tps startsWith classes)) tps
+ else classes ::: others
+ }
+ /** The following transformations applied to a list of parents.
+ * If any parent is a class/trait, all parents which normalize to
+ * Object are discarded. Otherwise, all parents which normalize
+ * to Object except the first one found are discarded.
+ */
+ def normalizedParents(parents: List[Type]): List[Type] = {
+ if (parents exists (t => (t.typeSymbol ne ObjectClass) && t.typeSymbol.isClass))
+ parents filterNot (_.typeSymbol eq ObjectClass)
+ else
+ removeRedundantObjects(parents)
+ }
+
+ /** Flatten curried parameter lists of a method type. */
+ def allParameters(tpe: Type): List[Symbol] = tpe match {
+ case MethodType(params, res) => params ::: allParameters(res)
+ case _ => Nil
+ }
+
+ def typeStringNoPackage(tp: Type) =
+ "" + tp stripPrefix tp.typeSymbol.enclosingPackage.fullName + "."
+
+ def briefParentsString(parents: List[Type]) =
+ normalizedParents(parents) map typeStringNoPackage mkString " with "
+
+ def parentsString(parents: List[Type]) =
+ normalizedParents(parents) mkString " with "
+
+ def typeParamsString(tp: Type) = tp match {
+ case PolyType(tparams, _) => tparams map (_.defString) mkString ("[", ",", "]")
+ case _ => ""
+ }
+ def valueParamsString(tp: Type) = tp match {
+ case MethodType(params, _) => params map (_.defString) mkString ("(", ",", ")")
+ case _ => ""
+ }
+
+ // members of class java.lang.{ Object, String }
+ lazy val Object_## = enterNewMethod(ObjectClass, nme.HASHHASH, Nil, inttype, FINAL)
+ lazy val Object_== = enterNewMethod(ObjectClass, nme.EQ, anyrefparam, booltype, FINAL)
+ lazy val Object_!= = enterNewMethod(ObjectClass, nme.NE, anyrefparam, booltype, FINAL)
+ lazy val Object_eq = enterNewMethod(ObjectClass, nme.eq, anyrefparam, booltype, FINAL)
+ lazy val Object_ne = enterNewMethod(ObjectClass, nme.ne, anyrefparam, booltype, FINAL)
+ lazy val Object_isInstanceOf = newT1NoParamsMethod(ObjectClass, nme.isInstanceOf_Ob, FINAL | SYNTHETIC)(_ => booltype)
+ lazy val Object_asInstanceOf = newT1NoParamsMethod(ObjectClass, nme.asInstanceOf_Ob, FINAL | SYNTHETIC)(_.typeConstructor)
+ lazy val Object_synchronized = newPolyMethod(1, ObjectClass, nme.synchronized_, FINAL)(tps =>
+ (Some(List(tps.head.typeConstructor)), tps.head.typeConstructor)
+ )
+ lazy val String_+ = enterNewMethod(StringClass, nme.raw.PLUS, anyparam, stringtype, FINAL)
+
+ def Object_getClass = getMemberMethod(ObjectClass, nme.getClass_)
+ def Object_clone = getMemberMethod(ObjectClass, nme.clone_)
+ def Object_finalize = getMemberMethod(ObjectClass, nme.finalize_)
+ def Object_notify = getMemberMethod(ObjectClass, nme.notify_)
+ def Object_notifyAll = getMemberMethod(ObjectClass, nme.notifyAll_)
+ def Object_equals = getMemberMethod(ObjectClass, nme.equals_)
+ def Object_hashCode = getMemberMethod(ObjectClass, nme.hashCode_)
+ def Object_toString = getMemberMethod(ObjectClass, nme.toString_)
+
+ // boxed classes
+ lazy val ObjectRefClass = requiredClass[scala.runtime.ObjectRef[_]]
+ lazy val VolatileObjectRefClass = requiredClass[scala.runtime.VolatileObjectRef[_]]
+ lazy val RuntimeStaticsModule = getRequiredModule("scala.runtime.Statics")
+ lazy val BoxesRunTimeModule = getRequiredModule("scala.runtime.BoxesRunTime")
+ lazy val BoxesRunTimeClass = BoxesRunTimeModule.moduleClass
+ lazy val BoxedNumberClass = getClassByName(sn.BoxedNumber)
+ lazy val BoxedCharacterClass = getClassByName(sn.BoxedCharacter)
+ lazy val BoxedBooleanClass = getClassByName(sn.BoxedBoolean)
+ lazy val BoxedByteClass = requiredClass[java.lang.Byte]
+ lazy val BoxedShortClass = requiredClass[java.lang.Short]
+ lazy val BoxedIntClass = requiredClass[java.lang.Integer]
+ lazy val BoxedLongClass = requiredClass[java.lang.Long]
+ lazy val BoxedFloatClass = requiredClass[java.lang.Float]
+ lazy val BoxedDoubleClass = requiredClass[java.lang.Double]
+
+ lazy val Boxes_isNumberOrBool = getDecl(BoxesRunTimeClass, nme.isBoxedNumberOrBoolean)
+ lazy val Boxes_isNumber = getDecl(BoxesRunTimeClass, nme.isBoxedNumber)
+
+ lazy val BoxedUnitClass = requiredClass[scala.runtime.BoxedUnit]
+ lazy val BoxedUnitModule = getRequiredModule("scala.runtime.BoxedUnit")
+ def BoxedUnit_UNIT = getMemberValue(BoxedUnitModule, nme.UNIT)
+ def BoxedUnit_TYPE = getMemberValue(BoxedUnitModule, nme.TYPE_)
+
+ // Annotation base classes
+ lazy val AnnotationClass = requiredClass[scala.annotation.Annotation]
+ lazy val ClassfileAnnotationClass = requiredClass[scala.annotation.ClassfileAnnotation]
+ lazy val StaticAnnotationClass = requiredClass[scala.annotation.StaticAnnotation]
+
+ // Annotations
+ lazy val BridgeClass = requiredClass[scala.annotation.bridge]
+ lazy val ElidableMethodClass = requiredClass[scala.annotation.elidable]
+ lazy val ImplicitNotFoundClass = requiredClass[scala.annotation.implicitNotFound]
+ lazy val MigrationAnnotationClass = requiredClass[scala.annotation.migration]
+ lazy val ScalaStrictFPAttr = requiredClass[scala.annotation.strictfp]
+ lazy val SerializableAttr = requiredClass[scala.annotation.serializable] // @serializable is deprecated
+ lazy val SwitchClass = requiredClass[scala.annotation.switch]
+ lazy val TailrecClass = requiredClass[scala.annotation.tailrec]
+ lazy val VarargsClass = requiredClass[scala.annotation.varargs]
+ lazy val uncheckedStableClass = requiredClass[scala.annotation.unchecked.uncheckedStable]
+ lazy val uncheckedVarianceClass = requiredClass[scala.annotation.unchecked.uncheckedVariance]
+
+ lazy val BeanPropertyAttr = requiredClass[scala.beans.BeanProperty]
+ lazy val BooleanBeanPropertyAttr = requiredClass[scala.beans.BooleanBeanProperty]
+ lazy val CloneableAttr = requiredClass[scala.annotation.cloneable]
+ lazy val CompileTimeOnlyAttr = getClassIfDefined("scala.reflect.internal.annotations.compileTimeOnly")
+ lazy val DeprecatedAttr = requiredClass[scala.deprecated]
+ lazy val DeprecatedNameAttr = requiredClass[scala.deprecatedName]
+ lazy val DeprecatedInheritanceAttr = requiredClass[scala.deprecatedInheritance]
+ lazy val DeprecatedOverridingAttr = requiredClass[scala.deprecatedOverriding]
+ lazy val NativeAttr = requiredClass[scala.native]
+ lazy val RemoteAttr = requiredClass[scala.remote]
+ lazy val ScalaInlineClass = requiredClass[scala.inline]
+ lazy val ScalaNoInlineClass = requiredClass[scala.noinline]
+ lazy val SerialVersionUIDAttr = requiredClass[scala.SerialVersionUID]
+ lazy val SpecializedClass = requiredClass[scala.specialized]
+ lazy val ThrowsClass = requiredClass[scala.throws[_]]
+ lazy val TransientAttr = requiredClass[scala.transient]
+ lazy val UncheckedClass = requiredClass[scala.unchecked]
+ lazy val UncheckedBoundsClass = getClassIfDefined("scala.reflect.internal.annotations.uncheckedBounds")
+ lazy val UnspecializedClass = requiredClass[scala.annotation.unspecialized]
+ lazy val VolatileAttr = requiredClass[scala.volatile]
+
+ // Meta-annotations
+ lazy val BeanGetterTargetClass = requiredClass[meta.beanGetter]
+ lazy val BeanSetterTargetClass = requiredClass[meta.beanSetter]
+ lazy val FieldTargetClass = requiredClass[meta.field]
+ lazy val GetterTargetClass = requiredClass[meta.getter]
+ lazy val ParamTargetClass = requiredClass[meta.param]
+ lazy val SetterTargetClass = requiredClass[meta.setter]
+ lazy val ClassTargetClass = requiredClass[meta.companionClass]
+ lazy val ObjectTargetClass = requiredClass[meta.companionObject]
+ lazy val MethodTargetClass = requiredClass[meta.companionMethod] // TODO: module, moduleClass? package, packageObject?
+ lazy val LanguageFeatureAnnot = requiredClass[meta.languageFeature]
+
+ // Language features
+ lazy val languageFeatureModule = getRequiredModule("scala.languageFeature")
+ lazy val experimentalModule = getMemberModule(languageFeatureModule, nme.experimental)
+ lazy val MacrosFeature = getLanguageFeature("macros", experimentalModule)
+ lazy val DynamicsFeature = getLanguageFeature("dynamics")
+ lazy val PostfixOpsFeature = getLanguageFeature("postfixOps")
+ lazy val ReflectiveCallsFeature = getLanguageFeature("reflectiveCalls")
+ lazy val ImplicitConversionsFeature = getLanguageFeature("implicitConversions")
+ lazy val HigherKindsFeature = getLanguageFeature("higherKinds")
+ lazy val ExistentialsFeature = getLanguageFeature("existentials")
+
+ def isMetaAnnotation(sym: Symbol): Boolean = metaAnnotations(sym) || (
+ // Trying to allow for deprecated locations
+ sym.isAliasType && isMetaAnnotation(sym.info.typeSymbol)
+ )
+ lazy val metaAnnotations = Set[Symbol](
+ FieldTargetClass, ParamTargetClass,
+ GetterTargetClass, SetterTargetClass,
+ BeanGetterTargetClass, BeanSetterTargetClass
+ )
+
+ lazy val AnnotationDefaultAttr: ClassSymbol = {
+ val attr = enterNewClass(RuntimePackageClass, tpnme.AnnotationDefaultATTR, List(AnnotationClass.tpe))
+ // This attribute needs a constructor so that modifiers in parsed Java code make sense
+ attr.info.decls enter attr.newClassConstructor(NoPosition)
+ attr
+ }
+
+ @deprecated("Moved to rootMirror.getClass", "2.10.0")
+ def getClass(fullname: Name): ClassSymbol = rootMirror.getClassByName(fullname)
+
+ @deprecated("Moved to rootMirror.getModule", "2.10.0")
+ def getModule(fullname: Name): ModuleSymbol = rootMirror.getModule(fullname)
+
+ private def fatalMissingSymbol(owner: Symbol, name: Name, what: String = "member") = {
+ throw new FatalError(owner + " does not have a " + what + " " + name)
+ }
+
+ def getLanguageFeature(name: String, owner: Symbol = languageFeatureModule): Symbol = getMember(owner, newTypeName(name))
+
+ def termMember(owner: Symbol, name: String): Symbol = owner.info.member(newTermName(name))
+ def typeMember(owner: Symbol, name: String): Symbol = owner.info.member(newTypeName(name))
+
+ def findNamedMember(fullName: Name, root: Symbol): Symbol = {
+ val segs = nme.segments(fullName.toString, fullName.isTermName)
+ if (segs.isEmpty || segs.head != root.simpleName) NoSymbol
+ else findNamedMember(segs.tail, root)
+ }
+ def findNamedMember(segs: List[Name], root: Symbol): Symbol =
+ if (segs.isEmpty) root
+ else findNamedMember(segs.tail, root.info member segs.head)
+
+ def getMember(owner: Symbol, name: Name): Symbol = {
+ getMemberIfDefined(owner, name) orElse {
+ if (phase.flatClasses && name.isTypeName && !owner.isPackageObjectOrClass) {
+ val pkg = owner.owner
+ val flatname = tpnme.flattenedName(owner.name, name)
+ getMember(pkg, flatname)
+ }
+ else fatalMissingSymbol(owner, name)
+ }
+ }
+ def getMemberValue(owner: Symbol, name: Name): TermSymbol = {
+ getMember(owner, name.toTermName) match {
+ case x: TermSymbol => x
+ case _ => fatalMissingSymbol(owner, name, "member value")
+ }
+ }
+ def getMemberModule(owner: Symbol, name: Name): ModuleSymbol = {
+ getMember(owner, name.toTermName) match {
+ case x: ModuleSymbol => x
+ case _ => fatalMissingSymbol(owner, name, "member object")
+ }
+ }
+ def getTypeMember(owner: Symbol, name: Name): TypeSymbol = {
+ getMember(owner, name.toTypeName) match {
+ case x: TypeSymbol => x
+ case _ => fatalMissingSymbol(owner, name, "type member")
+ }
+ }
+ def getMemberClass(owner: Symbol, name: Name): ClassSymbol = {
+ val y = getMember(owner, name.toTypeName)
+ getMember(owner, name.toTypeName) match {
+ case x: ClassSymbol => x
+ case _ => fatalMissingSymbol(owner, name, "member class")
+ }
+ }
+ def getMemberMethod(owner: Symbol, name: Name): TermSymbol = {
+ getMember(owner, name.toTermName) match {
+ // todo. member symbol becomes a term symbol in cleanup. is this a bug?
+ // case x: MethodSymbol => x
+ case x: TermSymbol => x
+ case _ => fatalMissingSymbol(owner, name, "method")
+ }
+ }
+
+ def getMemberIfDefined(owner: Symbol, name: Name): Symbol =
+ owner.info.nonPrivateMember(name)
+
+ /** Using getDecl rather than getMember may avoid issues with
+ * OverloadedTypes turning up when you don't want them, if you
+ * know the method in question is uniquely declared in the given owner.
+ */
+ def getDecl(owner: Symbol, name: Name): Symbol = {
+ getDeclIfDefined(owner, name) orElse fatalMissingSymbol(owner, name, "decl")
+ }
+ def getDeclIfDefined(owner: Symbol, name: Name): Symbol =
+ owner.info.nonPrivateDecl(name)
+
+ def packageExists(packageName: String): Boolean =
+ getModuleIfDefined(packageName).isPackage
+
+ private def newAlias(owner: Symbol, name: TypeName, alias: Type): AliasTypeSymbol =
+ owner.newAliasType(name) setInfoAndEnter alias
+
+ private def specialPolyClass(name: TypeName, flags: Long)(parentFn: Symbol => Type): ClassSymbol = {
+ val clazz = enterNewClass(ScalaPackageClass, name, Nil)
+ val tparam = clazz.newSyntheticTypeParam("T0", flags)
+ val parents = List(AnyRefClass.tpe, parentFn(tparam))
+
+ clazz setInfo GenPolyType(List(tparam), ClassInfoType(parents, newScope, clazz))
+ }
+
+ def newPolyMethod(typeParamCount: Int, owner: Symbol, name: TermName, flags: Long)(createFn: PolyMethodCreator): MethodSymbol = {
+ val msym = owner.newMethod(name.encode, NoPosition, flags)
+ val tparams = msym.newSyntheticTypeParams(typeParamCount)
+ val mtpe = createFn(tparams) match {
+ case (Some(formals), restpe) => MethodType(msym.newSyntheticValueParams(formals), restpe)
+ case (_, restpe) => NullaryMethodType(restpe)
+ }
+
+ msym setInfoAndEnter genPolyType(tparams, mtpe)
+ }
+
+ /** T1 means one type parameter.
+ */
+ def newT1NullaryMethod(owner: Symbol, name: TermName, flags: Long)(createFn: Symbol => Type): MethodSymbol = {
+ newPolyMethod(1, owner, name, flags)(tparams => (None, createFn(tparams.head)))
+ }
+ def newT1NoParamsMethod(owner: Symbol, name: TermName, flags: Long)(createFn: Symbol => Type): MethodSymbol = {
+ newPolyMethod(1, owner, name, flags)(tparams => (Some(Nil), createFn(tparams.head)))
+ }
+
+ lazy val boxedClassValues = boxedClass.values.toSet[Symbol]
+ lazy val isUnbox = unboxMethod.values.toSet[Symbol]
+ lazy val isBox = boxMethod.values.toSet[Symbol]
+
+ /** Is symbol a phantom class for which no runtime representation exists? */
+ lazy val isPhantomClass = Set[Symbol](AnyClass, AnyValClass, NullClass, NothingClass)
+ /** Lists core classes that don't have underlying bytecode, but are synthesized on-the-fly in every reflection universe */
+ lazy val syntheticCoreClasses = List(
+ AnnotationDefaultAttr, // #2264
+ RepeatedParamClass,
+ JavaRepeatedParamClass,
+ ByNameParamClass,
+ AnyClass,
+ AnyRefClass,
+ AnyValClass,
+ NullClass,
+ NothingClass,
+ SingletonClass,
+ EqualsPatternClass
+ )
+ /** Lists core methods that don't have underlying bytecode, but are synthesized on-the-fly in every reflection universe */
+ lazy val syntheticCoreMethods = List(
+ Any_==,
+ Any_!=,
+ Any_equals,
+ Any_hashCode,
+ Any_toString,
+ Any_getClass,
+ Any_isInstanceOf,
+ Any_asInstanceOf,
+ Any_##,
+ Object_eq,
+ Object_ne,
+ Object_==,
+ Object_!=,
+ Object_##,
+ Object_synchronized,
+ Object_isInstanceOf,
+ Object_asInstanceOf,
+ String_+
+ )
+ /** Lists core classes that do have underlying bytecode, but are adjusted on-the-fly in every reflection universe */
+ lazy val hijackedCoreClasses = List(
+ ComparableClass,
+ JavaSerializableClass
+ )
+ /** Lists symbols that are synthesized or hijacked by the compiler.
+ *
+ * Such symbols either don't have any underlying bytecode at all ("synthesized")
+ * or get loaded from bytecode but have their metadata adjusted ("hijacked").
+ */
+ lazy val symbolsNotPresentInBytecode = syntheticCoreClasses ++ syntheticCoreMethods ++ hijackedCoreClasses
+
+ /** Is the symbol that of a parent which is added during parsing? */
+ lazy val isPossibleSyntheticParent = ProductClass.toSet[Symbol] + ProductRootClass + SerializableClass
+
+ private lazy val boxedValueClassesSet = boxedClass.values.toSet[Symbol] + BoxedUnitClass
+
+ /** Is symbol a value class? */
+ def isPrimitiveValueClass(sym: Symbol) = ScalaValueClasses contains sym
+ def isNonUnitValueClass(sym: Symbol) = isPrimitiveValueClass(sym) && (sym != UnitClass)
+ def isSpecializableClass(sym: Symbol) = isPrimitiveValueClass(sym) || (sym == AnyRefClass)
+ def isPrimitiveValueType(tp: Type) = isPrimitiveValueClass(tp.typeSymbol)
+
+ /** Is symbol a boxed value class, e.g. java.lang.Integer? */
+ def isBoxedValueClass(sym: Symbol) = boxedValueClassesSet(sym)
+
+ /** If symbol is a value class (boxed or not), return the unboxed
+ * value class. Otherwise, NoSymbol.
+ */
+ def unboxedValueClass(sym: Symbol): Symbol =
+ if (isPrimitiveValueClass(sym)) sym
+ else if (sym == BoxedUnitClass) UnitClass
+ else boxedClass.map(kvp => (kvp._2: Symbol, kvp._1)).getOrElse(sym, NoSymbol)
+
+ /** Is type's symbol a numeric value class? */
+ def isNumericValueType(tp: Type): Boolean = tp match {
+ case TypeRef(_, sym, _) => isNumericValueClass(sym)
+ case _ => false
+ }
+
+ // todo: reconcile with javaSignature!!!
+ def signature(tp: Type): String = {
+ def erasure(tp: Type): Type = tp match {
+ case st: SubType => erasure(st.supertype)
+ case RefinedType(parents, _) => erasure(parents.head)
+ case _ => tp
+ }
+ def flatNameString(sym: Symbol, separator: Char): String =
+ if (sym == NoSymbol) "" // be more resistant to error conditions, e.g. neg/t3222.scala
+ else if (sym.owner.isPackageClass) sym.javaClassName
+ else flatNameString(sym.owner, separator) + nme.NAME_JOIN_STRING + sym.simpleName
+ def signature1(etp: Type): String = {
+ if (etp.typeSymbol == ArrayClass) "[" + signature1(erasure(etp.normalize.typeArgs.head))
+ else if (isPrimitiveValueClass(etp.typeSymbol)) abbrvTag(etp.typeSymbol).toString()
+ else "L" + flatNameString(etp.typeSymbol, '/') + ";"
+ }
+ val etp = erasure(tp)
+ if (etp.typeSymbol == ArrayClass) signature1(etp)
+ else flatNameString(etp.typeSymbol, '.')
+ }
+
+ /** Surgery on the value classes. Without this, AnyVals defined in source
+ * files end up with an AnyRef parent. It is likely there is a better way
+ * to evade that AnyRef.
+ */
+ private def setParents(sym: Symbol, parents: List[Type]): Symbol = sym.rawInfo match {
+ case ClassInfoType(_, scope, clazz) =>
+ sym setInfo ClassInfoType(parents, scope, clazz)
+ case _ =>
+ sym
+ }
+
+ def init() {
+ if (isInitialized) return
+ // force initialization of every symbol that is synthesized or hijacked by the compiler
+ val forced = symbolsNotPresentInBytecode
+ isInitialized = true
+ } //init
+
+ var nbScalaCallers: Int = 0
+ def newScalaCaller(delegateType: Type): MethodSymbol = {
+ assert(forMSIL, "scalaCallers can only be created if target is .NET")
+ // object: reference to object on which to call (scala-)method
+ val paramTypes: List[Type] = List(ObjectClass.tpe)
+ val name = newTermName("$scalaCaller$$" + nbScalaCallers)
+ // tparam => resultType, which is the resultType of PolyType, i.e. the result type after applying the
+ // type parameter =-> a MethodType in this case
+ // TODO: set type bounds manually (-> MulticastDelegate), see newTypeParam
+ val newCaller = enterNewMethod(DelegateClass, name, paramTypes, delegateType, FINAL | STATIC)
+ // val newCaller = newPolyMethod(DelegateClass, name,
+ // tparam => MethodType(paramTypes, tparam.typeConstructor)) setFlag (FINAL | STATIC)
+ Delegate_scalaCallers = Delegate_scalaCallers ::: List(newCaller)
+ nbScalaCallers += 1
+ newCaller
+ }
+
+ // def addScalaCallerInfo(scalaCaller: Symbol, methSym: Symbol, delType: Type) {
+ // assert(Delegate_scalaCallers contains scalaCaller)
+ // Delegate_scalaCallerInfos += (scalaCaller -> (methSym, delType))
+ // }
+
+ def addScalaCallerInfo(scalaCaller: Symbol, methSym: Symbol) {
+ assert(Delegate_scalaCallers contains scalaCaller)
+ Delegate_scalaCallerTargets += (scalaCaller -> methSym)
+ }
+ }
+}
diff --git a/src/reflect/scala/reflect/internal/ExistentialsAndSkolems.scala b/src/reflect/scala/reflect/internal/ExistentialsAndSkolems.scala
new file mode 100644
index 0000000..2c2ed35
--- /dev/null
+++ b/src/reflect/scala/reflect/internal/ExistentialsAndSkolems.scala
@@ -0,0 +1,119 @@
+/* NSC -- new scala compiler
+ * Copyright 2005-2013 LAMP/EPFL
+ * @author Martin Odersky
+ */
+
+package scala.reflect
+package internal
+
+import scala.collection.{ mutable, immutable }
+import util._
+
+/** The name of this trait defines the eventual intent better than
+ * it does the initial contents.
+ */
+trait ExistentialsAndSkolems {
+ self: SymbolTable =>
+
+ /** Map a list of type parameter symbols to skolemized symbols, which
+ * can be deskolemized to the original type parameter. (A skolem is a
+ * representation of a bound variable when viewed inside its scope.)
+ * !!!Adriaan: this does not work for hk types.
+ *
+ * Skolems will be created at level 0, rather than the current value
+ * of `skolemizationLevel`. (See SI-7782)
+ */
+ def deriveFreshSkolems(tparams: List[Symbol]): List[Symbol] = {
+ class Deskolemizer extends LazyType {
+ override val typeParams = tparams
+ val typeSkolems = typeParams map (_.newTypeSkolem setInfo this)
+ override def complete(sym: Symbol) {
+ // The info of a skolem is the skolemized info of the
+ // actual type parameter of the skolem
+ sym setInfo sym.deSkolemize.info.substSym(typeParams, typeSkolems)
+ }
+ }
+
+ val saved = skolemizationLevel
+ skolemizationLevel = 0
+ try new Deskolemizer().typeSkolems
+ finally skolemizationLevel = saved
+ }
+
+ def isRawParameter(sym: Symbol) = // is it a type parameter leaked by a raw type?
+ sym.isTypeParameter && sym.owner.isJavaDefined
+
+ /** If we map a set of hidden symbols to their existential bounds, we
+ * have a problem: the bounds may themselves contain references to the
+ * hidden symbols. So this recursively calls existentialBound until
+ * the typeSymbol is not amongst the symbols being hidden.
+ */
+ private def existentialBoundsExcludingHidden(hidden: List[Symbol]): Map[Symbol, Type] = {
+ def safeBound(t: Type): Type =
+ if (hidden contains t.typeSymbol) safeBound(t.typeSymbol.existentialBound.bounds.hi) else t
+
+ def hiBound(s: Symbol): Type = safeBound(s.existentialBound.bounds.hi) match {
+ case tp @ RefinedType(parents, decls) =>
+ val parents1 = parents mapConserve safeBound
+ if (parents eq parents1) tp
+ else copyRefinedType(tp, parents1, decls)
+ case tp => tp
+ }
+
+ // Hanging onto lower bound in case anything interesting
+ // happens with it.
+ mapFrom(hidden)(s => s.existentialBound match {
+ case TypeBounds(lo, hi) => TypeBounds(lo, hiBound(s))
+ case _ => hiBound(s)
+ })
+ }
+
+ /** Given a set `rawSyms` of term- and type-symbols, and a type
+ * `tp`, produce a set of fresh type parameters and a type so that
+ * it can be abstracted to an existential type. Every type symbol
+ * `T` in `rawSyms` is mapped to a clone. Every term symbol `x` of
+ * type `T` in `rawSyms` is given an associated type symbol of the
+ * following form:
+ *
+ * type x.type <: T with Singleton
+ *
+ * The name of the type parameter is `x.type`, to produce nice
+ * diagnostics. The Singleton parent ensures that the type
+ * parameter is still seen as a stable type. Type symbols in
+ * rawSyms are fully replaced by the new symbols. Term symbols are
+ * also replaced, except for term symbols of an Ident tree, where
+ * only the type of the Ident is changed.
+ */
+ final def existentialTransform[T](rawSyms: List[Symbol], tp: Type, rawOwner: Option[Symbol] = None)(creator: (List[Symbol], Type) => T): T = {
+ val allBounds = existentialBoundsExcludingHidden(rawSyms)
+ val typeParams: List[Symbol] = rawSyms map { sym =>
+ val name = sym.name match {
+ case x: TypeName => x
+ case x => tpnme.singletonName(x)
+ }
+ def rawOwner0 = rawOwner.getOrElse(abort(s"no owner provided for existential transform over raw parameter: $sym"))
+ val bound = allBounds(sym)
+ val sowner = if (isRawParameter(sym)) rawOwner0 else sym.owner
+ val quantified = sowner.newExistential(name, sym.pos)
+
+ quantified setInfo bound.cloneInfo(quantified)
+ }
+ // Higher-kinded existentials are not yet supported, but this is
+ // tpeHK for when they are: "if a type constructor is expected/allowed,
+ // tpeHK must be called instead of tpe."
+ val typeParamTypes = typeParams map (_.tpeHK)
+ def doSubst(info: Type) = info.subst(rawSyms, typeParamTypes)
+
+ creator(typeParams map (_ modifyInfo doSubst), doSubst(tp))
+ }
+
+ /**
+ * Compute an existential type from hidden symbols `hidden` and type `tp`.
+ * @param hidden The symbols that will be existentially abstracted
+ * @param hidden The original type
+ * @param rawOwner The owner for Java raw types.
+ */
+ final def packSymbols(hidden: List[Symbol], tp: Type, rawOwner: Option[Symbol] = None): Type =
+ if (hidden.isEmpty) tp
+ else existentialTransform(hidden, tp, rawOwner)(existentialAbstraction)
+}
diff --git a/src/reflect/scala/reflect/internal/FatalError.scala b/src/reflect/scala/reflect/internal/FatalError.scala
new file mode 100644
index 0000000..a084fc2
--- /dev/null
+++ b/src/reflect/scala/reflect/internal/FatalError.scala
@@ -0,0 +1,6 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2013 LAMP/EPFL
+ * @author Martin Odersky
+ */
+package scala.reflect.internal
+case class FatalError(msg: String) extends Exception(msg)
diff --git a/src/reflect/scala/reflect/internal/FlagSets.scala b/src/reflect/scala/reflect/internal/FlagSets.scala
new file mode 100644
index 0000000..6a3b687
--- /dev/null
+++ b/src/reflect/scala/reflect/internal/FlagSets.scala
@@ -0,0 +1,45 @@
+package scala.reflect
+package internal
+
+import scala.language.implicitConversions
+
+trait FlagSets extends api.FlagSets { self: SymbolTable =>
+
+ type FlagSet = Long
+ implicit val FlagSetTag = ClassTag[FlagSet](classOf[FlagSet])
+
+ implicit def addFlagOps(left: FlagSet): FlagOps =
+ new FlagOpsImpl(left)
+
+ private class FlagOpsImpl(left: Long) extends FlagOps {
+ def | (right: Long): Long = left | right
+ }
+
+ val NoFlags: FlagSet = 0L
+
+ object Flag extends FlagValues {
+ val TRAIT : FlagSet = Flags.TRAIT
+ val INTERFACE : FlagSet = Flags.INTERFACE
+ val MUTABLE : FlagSet = Flags.MUTABLE
+ val MACRO : FlagSet = Flags.MACRO
+ val DEFERRED : FlagSet = Flags.DEFERRED
+ val ABSTRACT : FlagSet = Flags.ABSTRACT
+ val FINAL : FlagSet = Flags.FINAL
+ val SEALED : FlagSet = Flags.SEALED
+ val IMPLICIT : FlagSet = Flags.IMPLICIT
+ val LAZY : FlagSet = Flags.LAZY
+ val OVERRIDE : FlagSet = Flags.OVERRIDE
+ val PRIVATE : FlagSet = Flags.PRIVATE
+ val PROTECTED : FlagSet = Flags.PROTECTED
+ val LOCAL : FlagSet = Flags.LOCAL
+ val CASE : FlagSet = Flags.CASE
+ val ABSOVERRIDE : FlagSet = Flags.ABSOVERRIDE
+ val BYNAMEPARAM : FlagSet = Flags.BYNAMEPARAM
+ val PARAM : FlagSet = Flags.PARAM
+ val COVARIANT : FlagSet = Flags.COVARIANT
+ val CONTRAVARIANT : FlagSet = Flags.CONTRAVARIANT
+ val DEFAULTPARAM : FlagSet = Flags.DEFAULTPARAM
+ val PRESUPER : FlagSet = Flags.PRESUPER
+ val DEFAULTINIT : FlagSet = Flags.DEFAULTINIT
+ }
+}
diff --git a/src/reflect/scala/reflect/internal/Flags.scala b/src/reflect/scala/reflect/internal/Flags.scala
new file mode 100644
index 0000000..5ebe02d
--- /dev/null
+++ b/src/reflect/scala/reflect/internal/Flags.scala
@@ -0,0 +1,500 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2013 LAMP/EPFL
+ * @author Martin Odersky
+ */
+
+package scala.reflect
+package internal
+
+import scala.collection.{ mutable, immutable }
+
+// Flags at each index of a flags Long. Those marked with /M are used in
+// Parsers/JavaParsers and therefore definitely appear on Modifiers; but the
+// absence of /M on the other flags does not imply they aren't.
+//
+// Generated by mkFlagsTable() at Thu Feb 02 20:31:52 PST 2012
+//
+// 0: PROTECTED/M
+// 1: OVERRIDE/M
+// 2: PRIVATE/M
+// 3: ABSTRACT/M
+// 4: DEFERRED/M
+// 5: FINAL/M
+// 6: METHOD
+// 7: INTERFACE/M
+// 8: MODULE
+// 9: IMPLICIT/M
+// 10: SEALED/M
+// 11: CASE/M
+// 12: MUTABLE/M
+// 13: PARAM/M
+// 14: PACKAGE
+// 15: MACRO/M
+// 16: BYNAMEPARAM/M CAPTURED COVARIANT/M
+// 17: CONTRAVARIANT/M INCONSTRUCTOR LABEL
+// 18: ABSOVERRIDE/M
+// 19: LOCAL/M
+// 20: JAVA/M
+// 21: SYNTHETIC
+// 22: STABLE
+// 23: STATIC/M
+// 24: CASEACCESSOR/M
+// 25: DEFAULTPARAM/M TRAIT/M
+// 26: BRIDGE
+// 27: ACCESSOR
+// 28: SUPERACCESSOR
+// 29: PARAMACCESSOR/M
+// 30: MODULEVAR
+// 31: LAZY/M
+// 32: IS_ERROR
+// 33: OVERLOADED
+// 34: LIFTED
+// 35: EXISTENTIAL MIXEDIN
+// 36: EXPANDEDNAME
+// 37: IMPLCLASS PRESUPER/M
+// 38: TRANS_FLAG
+// 39: LOCKED
+// 40: SPECIALIZED
+// 41: DEFAULTINIT/M
+// 42: VBRIDGE
+// 43: VARARGS
+// 44: TRIEDCOOKING
+// 45: SYNCHRONIZED/M
+// 46: ARTIFACT
+// 47: DEFAULTMETHOD/M
+// 48:
+// 49:
+// 50:
+// 51: lateDEFERRED
+// 52: lateFINAL
+// 53: lateMETHOD
+// 54: lateINTERFACE
+// 55: lateMODULE
+// 56: notPROTECTED
+// 57: notOVERRIDE
+// 58: notPRIVATE
+// 59:
+// 60:
+// 61:
+// 62:
+// 63:
+
+/** Flags set on Modifiers instances in the parsing stage.
+ */
+class ModifierFlags {
+ final val IMPLICIT = 1 << 9
+ final val FINAL = 1 << 5 // May not be overridden. Note that java final implies much more than scala final.
+ final val PRIVATE = 1 << 2
+ final val PROTECTED = 1 << 0
+
+ final val SEALED = 1 << 10
+ final val OVERRIDE = 1 << 1
+ final val CASE = 1 << 11
+ final val ABSTRACT = 1 << 3 // abstract class, or used in conjunction with abstract override.
+ // Note difference to DEFERRED!
+ final val DEFERRED = 1 << 4 // was `abstract' for members | trait is virtual
+ final val INTERFACE = 1 << 7 // symbol is an interface (i.e. a trait which defines only abstract methods)
+ final val MUTABLE = 1 << 12 // symbol is a mutable variable.
+ final val PARAM = 1 << 13 // symbol is a (value or type) parameter to a method
+ final val MACRO = 1 << 15 // symbol is a macro definition
+
+ final val COVARIANT = 1 << 16 // symbol is a covariant type variable
+ final val BYNAMEPARAM = 1 << 16 // parameter is by name
+ final val CONTRAVARIANT = 1 << 17 // symbol is a contravariant type variable
+ final val ABSOVERRIDE = 1 << 18 // combination of abstract & override
+ final val LOCAL = 1 << 19 // symbol is local to current class (i.e. private[this] or protected[this]
+ // pre: PRIVATE or PROTECTED are also set
+ final val JAVA = 1 << 20 // symbol was defined by a Java class
+ final val STATIC = 1 << 23 // static field, method or class
+ final val CASEACCESSOR = 1 << 24 // symbol is a case parameter (or its accessor, or a GADT skolem)
+ final val TRAIT = 1 << 25 // symbol is a trait
+ final val DEFAULTPARAM = 1 << 25 // the parameter has a default value
+ final val PARAMACCESSOR = 1 << 29 // for field definitions generated for primary constructor
+ // parameters (no matter if it's a 'val' parameter or not)
+ // for parameters of a primary constructor ('val' or not)
+ // for the accessor methods generated for 'val' or 'var' parameters
+ final val LAZY = 1L << 31 // symbol is a lazy val. can't have MUTABLE unless transformed by typer
+ final val PRESUPER = 1L << 37 // value is evaluated before super call
+ final val DEFAULTINIT = 1L << 41 // symbol is initialized to the default value: used by -Xcheckinit
+ // ARTIFACT at #46 in 2.11+
+ final val DEFAULTMETHOD = 1L << 47 // symbol is a java default method
+
+ // Overridden.
+ def flagToString(flag: Long): String = ""
+
+ final val PrivateLocal = PRIVATE | LOCAL
+ final val ProtectedLocal = PROTECTED | LOCAL
+ final val AccessFlags = PRIVATE | PROTECTED | LOCAL
+}
+object ModifierFlags extends ModifierFlags
+
+/** All flags and associated operatins */
+class Flags extends ModifierFlags {
+ final val METHOD = 1 << 6 // a method
+ final val MODULE = 1 << 8 // symbol is module or class implementing a module
+ final val PACKAGE = 1 << 14 // symbol is a java package
+
+ final val CAPTURED = 1 << 16 // variable is accessed from nested function. Set by LambdaLift.
+ final val LABEL = 1 << 17 // method symbol is a label. Set by TailCall
+ final val INCONSTRUCTOR = 1 << 17 // class symbol is defined in this/superclass constructor.
+ final val SYNTHETIC = 1 << 21 // symbol is compiler-generated (compare with ARTIFACT)
+ final val STABLE = 1 << 22 // functions that are assumed to be stable
+ // (typically, access methods for valdefs)
+ // or classes that do not contain abstract types.
+ final val BRIDGE = 1 << 26 // function is a bridge method. Set by Erasure
+ final val ACCESSOR = 1 << 27 // a value or variable accessor (getter or setter)
+
+ final val SUPERACCESSOR = 1 << 28 // a super accessor
+ final val MODULEVAR = 1 << 30 // for variables: is the variable caching a module value
+
+ final val IS_ERROR = 1L << 32 // symbol is an error symbol
+ final val OVERLOADED = 1L << 33 // symbol is overloaded
+ final val LIFTED = 1L << 34 // class has been lifted out to package level
+ // local value has been lifted out to class level
+ // todo: make LIFTED = latePRIVATE?
+ final val MIXEDIN = 1L << 35 // term member has been mixed in
+ final val EXISTENTIAL = 1L << 35 // type is an existential parameter or skolem
+ final val EXPANDEDNAME = 1L << 36 // name has been expanded with class suffix
+ final val IMPLCLASS = 1L << 37 // symbol is an implementation class
+ final val TRANS_FLAG = 1L << 38 // transient flag guaranteed to be reset after each phase.
+
+ final val LOCKED = 1L << 39 // temporary flag to catch cyclic dependencies
+ final val SPECIALIZED = 1L << 40 // symbol is a generated specialized member
+ final val VBRIDGE = 1L << 42 // symbol is a varargs bridge
+
+ final val VARARGS = 1L << 43 // symbol is a Java-style varargs method
+ final val TRIEDCOOKING = 1L << 44 // ``Cooking'' has been tried on this symbol
+ // A Java method's type is ``cooked'' by transforming raw types to existentials
+
+ final val SYNCHRONIZED = 1L << 45 // symbol is a method which should be marked ACC_SYNCHRONIZED
+ final val ARTIFACT = 1L << 46 // symbol should be ignored when typechecking; will be marked ACC_SYNTHETIC in bytecode
+
+ // ------- shift definitions -------------------------------------------------------
+
+ final val InitialFlags = 0x0001FFFFFFFFFFFFL // flags that are enabled from phase 1.
+ final val LateFlags = 0x00FE000000000000L // flags that override flags in 0x1FC.
+ final val AntiFlags = 0x7F00000000000000L // flags that cancel flags in 0x07F
+ final val LateShift = 47L
+ final val AntiShift = 56L
+
+ // Flags which sketchily share the same slot
+ // 16: BYNAMEPARAM/M CAPTURED COVARIANT/M
+ // 17: CONTRAVARIANT/M INCONSTRUCTOR LABEL
+ // 25: DEFAULTPARAM/M TRAIT/M
+ // 35: EXISTENTIAL MIXEDIN
+ // 37: IMPLCLASS PRESUPER/M
+ val OverloadedFlagsMask = 0L | BYNAMEPARAM | CONTRAVARIANT | DEFAULTPARAM | EXISTENTIAL | IMPLCLASS
+
+ // ------- late flags (set by a transformer phase) ---------------------------------
+ //
+ // Summary of when these are claimed to be first used.
+ // You can get this output with scalac -Xshow-phases -Ydebug.
+ //
+ // refchecks 7 [START] <latemethod>
+ // specialize 13 [START] <latefinal> <notprivate>
+ // explicitouter 14 [START] <notprotected>
+ // erasure 15 [START] <latedeferred> <lateinterface>
+ // mixin 20 [START] <latemodule> <notoverride>
+ //
+ // lateMETHOD set in RefChecks#transformInfo.
+ // lateFINAL set in Symbols#makeNotPrivate.
+ // notPRIVATE set in Symbols#makeNotPrivate, IExplicitOuter#transform, Inliners.
+ // notPROTECTED set in ExplicitOuter#transform.
+ // lateDEFERRED set in AddInterfaces, Mixin, etc.
+ // lateINTERFACE set in AddInterfaces#transformMixinInfo.
+ // lateMODULE set in Mixin#transformInfo.
+ // notOVERRIDE set in Mixin#preTransform.
+
+ final val lateDEFERRED = (DEFERRED: Long) << LateShift
+ final val lateFINAL = (FINAL: Long) << LateShift
+ final val lateINTERFACE = (INTERFACE: Long) << LateShift
+ final val lateMETHOD = (METHOD: Long) << LateShift
+ final val lateMODULE = (MODULE: Long) << LateShift
+
+ final val notOVERRIDE = (OVERRIDE: Long) << AntiShift
+ final val notPRIVATE = (PRIVATE: Long) << AntiShift
+ final val notPROTECTED = (PROTECTED: Long) << AntiShift
+
+ // ------- masks -----------------------------------------------------------------------
+
+ /** To be a little clearer to people who aren't habitual bit twiddlers.
+ */
+ final val AllFlags = -1L
+
+ /** These flags can be set when class or module symbol is first created.
+ * They are the only flags to survive a call to resetFlags().
+ */
+ final val TopLevelCreationFlags =
+ MODULE | PACKAGE | FINAL | JAVA
+
+ // TODO - there's no call to slap four flags onto every package.
+ final val PackageFlags = TopLevelCreationFlags
+
+ // FINAL not included here due to possibility of object overriding.
+ // In fact, FINAL should not be attached regardless. We should be able
+ // to reconstruct whether an object was marked final in source.
+ final val ModuleFlags = MODULE
+
+ /** These modifiers can be set explicitly in source programs. This is
+ * used only as the basis for the default flag mask (which ones to display
+ * when printing a normal message.)
+ */
+ final val ExplicitFlags =
+ PRIVATE | PROTECTED | ABSTRACT | FINAL | SEALED |
+ OVERRIDE | CASE | IMPLICIT | ABSOVERRIDE | LAZY | DEFAULTMETHOD
+
+ /** The two bridge flags */
+ final val BridgeFlags = BRIDGE | VBRIDGE
+ final val BridgeAndPrivateFlags = BridgeFlags | PRIVATE
+
+ /** These modifiers appear in TreePrinter output. */
+ final val PrintableFlags =
+ ExplicitFlags | BridgeFlags | LOCAL | SYNTHETIC | STABLE | CASEACCESSOR | MACRO |
+ ACCESSOR | SUPERACCESSOR | PARAMACCESSOR | STATIC | SPECIALIZED | SYNCHRONIZED
+
+ /** When a symbol for a field is created, only these flags survive
+ * from Modifiers. Others which may be applied at creation time are:
+ * PRIVATE, LOCAL.
+ */
+ final val FieldFlags =
+ MUTABLE | CASEACCESSOR | PARAMACCESSOR | STATIC | FINAL | PRESUPER | LAZY
+
+ /** Masks for getters and setters, where the flags are derived from those
+ * on the field's modifiers. Both getters and setters get the ACCESSOR flag.
+ * Getters of immutable values also get STABLE.
+ */
+ final val GetterFlags = ~(PRESUPER | MUTABLE)
+ final val SetterFlags = ~(PRESUPER | MUTABLE | STABLE | CASEACCESSOR | IMPLICIT)
+
+ /** When a symbol for a default getter is created, it inherits these
+ * flags from the method with the default. Other flags applied at creation
+ * time are SYNTHETIC, DEFAULTPARAM, and possibly OVERRIDE, and maybe PRESUPER.
+ */
+ final val DefaultGetterFlags = PRIVATE | PROTECTED | FINAL
+
+ /** When a symbol for a method parameter is created, only these flags survive
+ * from Modifiers. Others which may be applied at creation time are:
+ * SYNTHETIC.
+ */
+ final val ValueParameterFlags = BYNAMEPARAM | IMPLICIT | DEFAULTPARAM | STABLE | SYNTHETIC
+ final val BeanPropertyFlags = DEFERRED | OVERRIDE | STATIC
+ final val VarianceFlags = COVARIANT | CONTRAVARIANT
+
+ /** These appear to be flags which should be transferred from owner symbol
+ * to a newly created constructor symbol.
+ */
+ final val ConstrFlags = JAVA
+
+ /** Module flags inherited by their module-class */
+ final val ModuleToClassFlags = AccessFlags | TopLevelCreationFlags | CASE | SYNTHETIC
+
+ /** These flags are not pickled */
+ final val FlagsNotPickled = IS_ERROR | OVERLOADED | LIFTED | TRANS_FLAG | LOCKED | TRIEDCOOKING
+
+ // A precaution against future additions to FlagsNotPickled turning out
+ // to be overloaded flags thus not-pickling more than intended.
+ assert((OverloadedFlagsMask & FlagsNotPickled) == 0, flagsToString(OverloadedFlagsMask & FlagsNotPickled))
+
+ /** These flags are pickled */
+ final val PickledFlags = InitialFlags & ~FlagsNotPickled
+
+ /** If we have a top-level class or module
+ * and someone asks us for a flag not in TopLevelPickledFlags,
+ * then we don't need unpickling to give a definite answer.
+ */
+ final val TopLevelPickledFlags = PickledFlags & ~(MODULE | METHOD | PACKAGE | PARAM | EXISTENTIAL)
+
+ def getterFlags(fieldFlags: Long): Long = ACCESSOR + (
+ if ((fieldFlags & MUTABLE) != 0) fieldFlags & ~MUTABLE & ~PRESUPER
+ else fieldFlags & ~PRESUPER | STABLE
+ )
+
+ def setterFlags(fieldFlags: Long): Long =
+ getterFlags(fieldFlags) & ~STABLE & ~CASEACCESSOR
+
+ // ------- pickling and unpickling of flags -----------------------------------------------
+
+ // The flags from 0x001 to 0x800 are different in the raw flags
+ // and in the pickled format.
+
+ private final val IMPLICIT_PKL = (1 << 0)
+ private final val FINAL_PKL = (1 << 1)
+ private final val PRIVATE_PKL = (1 << 2)
+ private final val PROTECTED_PKL = (1 << 3)
+ private final val SEALED_PKL = (1 << 4)
+ private final val OVERRIDE_PKL = (1 << 5)
+ private final val CASE_PKL = (1 << 6)
+ private final val ABSTRACT_PKL = (1 << 7)
+ private final val DEFERRED_PKL = (1 << 8)
+ private final val METHOD_PKL = (1 << 9)
+ private final val MODULE_PKL = (1 << 10)
+ private final val INTERFACE_PKL = (1 << 11)
+
+ private final val PKL_MASK = 0x00000FFF
+
+ /** Pickler correspondence, ordered roughly by frequency of occurrence */
+ private def rawPickledCorrespondence = Array[(Long, Long)](
+ (METHOD, METHOD_PKL),
+ (PRIVATE, PRIVATE_PKL),
+ (FINAL, FINAL_PKL),
+ (PROTECTED, PROTECTED_PKL),
+ (CASE, CASE_PKL),
+ (DEFERRED, DEFERRED_PKL),
+ (MODULE, MODULE_PKL),
+ (OVERRIDE, OVERRIDE_PKL),
+ (INTERFACE, INTERFACE_PKL),
+ (IMPLICIT, IMPLICIT_PKL),
+ (SEALED, SEALED_PKL),
+ (ABSTRACT, ABSTRACT_PKL)
+ )
+
+ private val mappedRawFlags = rawPickledCorrespondence map (_._1)
+ private val mappedPickledFlags = rawPickledCorrespondence map (_._2)
+
+ private class MapFlags(from: Array[Long], to: Array[Long]) extends (Long => Long) {
+ val fromSet = (0L /: from) (_ | _)
+
+ def apply(flags: Long): Long = {
+ var result = flags & ~fromSet
+ var tobeMapped = flags & fromSet
+ var i = 0
+ while (tobeMapped != 0) {
+ if ((tobeMapped & from(i)) != 0) {
+ result |= to(i)
+ tobeMapped &= ~from(i)
+ }
+ i += 1
+ }
+ result
+ }
+ }
+
+ val rawToPickledFlags: Long => Long = new MapFlags(mappedRawFlags, mappedPickledFlags)
+ val pickledToRawFlags: Long => Long = new MapFlags(mappedPickledFlags, mappedRawFlags)
+
+ // ------ displaying flags --------------------------------------------------------
+
+ // Generated by mkFlagToStringMethod() at Thu Feb 02 20:31:52 PST 2012
+ @annotation.switch override def flagToString(flag: Long): String = flag match {
+ case PROTECTED => "protected" // (1L << 0)
+ case OVERRIDE => "override" // (1L << 1)
+ case PRIVATE => "private" // (1L << 2)
+ case ABSTRACT => "abstract" // (1L << 3)
+ case DEFERRED => "<deferred>" // (1L << 4)
+ case FINAL => "final" // (1L << 5)
+ case METHOD => "<method>" // (1L << 6)
+ case INTERFACE => "<interface>" // (1L << 7)
+ case MODULE => "<module>" // (1L << 8)
+ case IMPLICIT => "implicit" // (1L << 9)
+ case SEALED => "sealed" // (1L << 10)
+ case CASE => "case" // (1L << 11)
+ case MUTABLE => "<mutable>" // (1L << 12)
+ case PARAM => "<param>" // (1L << 13)
+ case PACKAGE => "<package>" // (1L << 14)
+ case MACRO => "<macro>" // (1L << 15)
+ case BYNAMEPARAM => "<bynameparam/captured/covariant>" // (1L << 16)
+ case CONTRAVARIANT => "<contravariant/inconstructor/label>" // (1L << 17)
+ case ABSOVERRIDE => "absoverride" // (1L << 18)
+ case LOCAL => "<local>" // (1L << 19)
+ case JAVA => "<java>" // (1L << 20)
+ case SYNTHETIC => "<synthetic>" // (1L << 21)
+ case STABLE => "<stable>" // (1L << 22)
+ case STATIC => "<static>" // (1L << 23)
+ case CASEACCESSOR => "<caseaccessor>" // (1L << 24)
+ case DEFAULTPARAM => "<defaultparam/trait>" // (1L << 25)
+ case BRIDGE => "<bridge>" // (1L << 26)
+ case ACCESSOR => "<accessor>" // (1L << 27)
+ case SUPERACCESSOR => "<superaccessor>" // (1L << 28)
+ case PARAMACCESSOR => "<paramaccessor>" // (1L << 29)
+ case MODULEVAR => "<modulevar>" // (1L << 30)
+ case LAZY => "lazy" // (1L << 31)
+ case IS_ERROR => "<is_error>" // (1L << 32)
+ case OVERLOADED => "<overloaded>" // (1L << 33)
+ case LIFTED => "<lifted>" // (1L << 34)
+ case EXISTENTIAL => "<existential/mixedin>" // (1L << 35)
+ case EXPANDEDNAME => "<expandedname>" // (1L << 36)
+ case IMPLCLASS => "<implclass/presuper>" // (1L << 37)
+ case TRANS_FLAG => "<trans_flag>" // (1L << 38)
+ case LOCKED => "<locked>" // (1L << 39)
+ case SPECIALIZED => "<specialized>" // (1L << 40)
+ case DEFAULTINIT => "<defaultinit>" // (1L << 41)
+ case VBRIDGE => "<vbridge>" // (1L << 42)
+ case VARARGS => "<varargs>" // (1L << 43)
+ case TRIEDCOOKING => "<triedcooking>" // (1L << 44)
+ case SYNCHRONIZED => "<synchronized>" // (1L << 45)
+ case 0x400000000000L => "" // (1L << 46)
+ case DEFAULTMETHOD => "<defaultmethod>" // (1L << 47)
+ case 0x1000000000000L => "" // (1L << 48)
+ case 0x2000000000000L => "" // (1L << 49)
+ case 0x4000000000000L => "" // (1L << 50)
+ case `lateDEFERRED` => "<latedeferred>" // (1L << 51)
+ case `lateFINAL` => "<latefinal>" // (1L << 52)
+ case `lateMETHOD` => "<latemethod>" // (1L << 53)
+ case `lateINTERFACE` => "<lateinterface>" // (1L << 54)
+ case `lateMODULE` => "<latemodule>" // (1L << 55)
+ case `notPROTECTED` => "<notprotected>" // (1L << 56)
+ case `notOVERRIDE` => "<notoverride>" // (1L << 57)
+ case `notPRIVATE` => "<notprivate>" // (1L << 58)
+ case 0x800000000000000L => "" // (1L << 59)
+ case 0x1000000000000000L => "" // (1L << 60)
+ case 0x2000000000000000L => "" // (1L << 61)
+ case 0x4000000000000000L => "" // (1L << 62)
+ case 0x8000000000000000L => "" // (1L << 63)
+ case _ => ""
+ }
+
+ private def accessString(flags: Long, privateWithin: String)= (
+ if (privateWithin == "") {
+ if ((flags & PrivateLocal) == PrivateLocal) "private[this]"
+ else if ((flags & ProtectedLocal) == ProtectedLocal) "protected[this]"
+ else if ((flags & PRIVATE) != 0) "private"
+ else if ((flags & PROTECTED) != 0) "protected"
+ else ""
+ }
+ else if ((flags & PROTECTED) != 0) "protected[" + privateWithin + "]"
+ else "private[" + privateWithin + "]"
+ )
+
+ @deprecated("Use flagString on the flag-carrying member", "2.10.0")
+ def flagsToString(flags: Long, privateWithin: String): String = {
+ val access = accessString(flags, privateWithin)
+ val nonAccess = flagsToString(flags & ~AccessFlags)
+
+ List(nonAccess, access) filterNot (_ == "") mkString " "
+ }
+
+ @deprecated("Use flagString on the flag-carrying member", "2.10.0")
+ def flagsToString(flags: Long): String = {
+ // Fast path for common case
+ if (flags == 0L) "" else {
+ var sb: StringBuilder = null
+ var i = 0
+ while (i <= MaxBitPosition) {
+ val mask = rawFlagPickledOrder(i)
+ if ((flags & mask) != 0L) {
+ val s = flagToString(mask)
+ if (s.length > 0) {
+ if (sb eq null) sb = new StringBuilder append s
+ else if (sb.length == 0) sb append s
+ else sb append " " append s
+ }
+ }
+ i += 1
+ }
+ if (sb eq null) "" else sb.toString
+ }
+ }
+
+ // List of the raw flags, in pickled order
+ final val MaxBitPosition = 62
+
+ final val pickledListOrder: List[Long] = {
+ val all = 0 to MaxBitPosition map (1L << _)
+ val front = mappedRawFlags map (_.toLong)
+
+ front.toList ++ (all filterNot (front contains _))
+ }
+ final val rawFlagPickledOrder: Array[Long] = pickledListOrder.toArray
+}
+
+object Flags extends Flags { }
diff --git a/src/reflect/scala/reflect/internal/HasFlags.scala b/src/reflect/scala/reflect/internal/HasFlags.scala
new file mode 100644
index 0000000..12fd3a3
--- /dev/null
+++ b/src/reflect/scala/reflect/internal/HasFlags.scala
@@ -0,0 +1,175 @@
+package scala.reflect
+package internal
+
+import Flags._
+
+/** Common code utilized by Modifiers (which carry the flags associated
+ * with Trees) and Symbol.
+ */
+trait HasFlags {
+ type AccessBoundaryType
+ type AnnotationType
+
+ /** Though both Symbol and Modifiers widen this method to public, it's
+ * defined protected here to give us the option in the future to route
+ * flag methods through accessors and disallow raw flag manipulation.
+ * And after that, perhaps, on some magical day: a typesafe enumeration.
+ */
+ protected def flags: Long
+
+ /** Access level encoding: there are three scala flags (PRIVATE, PROTECTED,
+ * and LOCAL) which combine with value privateWithin (the "foo" in private[foo])
+ * to define from where an entity can be accessed. The meanings are as follows:
+ *
+ * PRIVATE access restricted to class only.
+ * PROTECTED access restricted to class and subclasses only.
+ * LOCAL can only be set in conjunction with PRIVATE or PROTECTED.
+ * Further restricts access to the same object instance.
+ *
+ * In addition, privateWithin can be used to set a visibility barrier.
+ * When set, everything contained in the named enclosing package or class
+ * has access. It is incompatible with PRIVATE or LOCAL, but is additive
+ * with PROTECTED (i.e. if either the flags or privateWithin allow access,
+ * then it is allowed.)
+ *
+ * The java access levels translate as follows:
+ *
+ * java private: hasFlag(PRIVATE) && !hasAccessBoundary
+ * java package: !hasFlag(PRIVATE | PROTECTED) && (privateWithin == enclosing package)
+ * java protected: hasFlag(PROTECTED) && (privateWithin == enclosing package)
+ * java public: !hasFlag(PRIVATE | PROTECTED) && !hasAccessBoundary
+ */
+ def privateWithin: AccessBoundaryType
+
+ /** A list of annotations attached to this entity.
+ */
+ def annotations: List[AnnotationType]
+
+ /** Whether this entity has a "privateWithin" visibility barrier attached.
+ */
+ def hasAccessBoundary: Boolean
+
+ /** Whether this entity has ANY of the flags in the given mask.
+ */
+ def hasFlag(flag: Long): Boolean
+
+ /** Whether this entity has ALL of the flags in the given mask.
+ */
+ def hasAllFlags(mask: Long): Boolean
+
+ /** Whether this entity has NONE of the flags in the given mask.
+ */
+ def hasNoFlags(mask: Long): Boolean = !hasFlag(mask)
+
+ /** The printable representation of this entity's flags and access boundary,
+ * restricted to flags in the given mask.
+ */
+ def flagString: String = flagString(flagMask)
+ def flagString(mask: Long): String = calculateFlagString(flags & mask)
+
+ /** The default mask determining which flags to display.
+ */
+ def flagMask: Long = AllFlags
+
+ /** The string representation of a single bit, seen from this
+ * flag carrying entity.
+ */
+ def resolveOverloadedFlag(flag: Long): String = Flags.flagToString(flag)
+
+ // Tests which come through cleanly: both Symbol and Modifiers use these
+ // identically, testing for a single flag.
+ def hasAbstractFlag = hasFlag(ABSTRACT)
+ def hasAccessorFlag = hasFlag(ACCESSOR)
+ def hasDefault = hasAllFlags(DEFAULTPARAM | PARAM)
+ def hasLocalFlag = hasFlag(LOCAL)
+ def hasModuleFlag = hasFlag(MODULE)
+ def hasPackageFlag = hasFlag(PACKAGE)
+ def hasStableFlag = hasFlag(STABLE)
+ def hasStaticFlag = hasFlag(STATIC)
+ def isAbstractOverride = hasFlag(ABSOVERRIDE)
+ def isAnyOverride = hasFlag(OVERRIDE | ABSOVERRIDE)
+ def isCase = hasFlag(CASE)
+ def isCaseAccessor = hasFlag(CASEACCESSOR)
+ def isDeferred = hasFlag(DEFERRED)
+ def isFinal = hasFlag(FINAL)
+ def isArtifact = hasFlag(ARTIFACT)
+ def isImplicit = hasFlag(IMPLICIT)
+ def isInterface = hasFlag(INTERFACE)
+ def isJavaDefined = hasFlag(JAVA)
+ def isLabel = hasAllFlags(LABEL | METHOD) && !hasAccessorFlag
+ def isLazy = hasFlag(LAZY)
+ def isLifted = hasFlag(LIFTED)
+ def isMacro = hasFlag(MACRO)
+ def isMutable = hasFlag(MUTABLE)
+ def isOverride = hasFlag(OVERRIDE)
+ def isParamAccessor = hasFlag(PARAMACCESSOR)
+ def isPrivate = hasFlag(PRIVATE)
+ def isPackage = hasFlag(PACKAGE)
+ def isPrivateLocal = hasAllFlags(PrivateLocal)
+ def isProtected = hasFlag(PROTECTED)
+ def isProtectedLocal = hasAllFlags(ProtectedLocal)
+ def isPublic = hasNoFlags(PRIVATE | PROTECTED) && !hasAccessBoundary
+ def isSealed = hasFlag(SEALED)
+ def isSpecialized = hasFlag(SPECIALIZED)
+ def isSuperAccessor = hasFlag(SUPERACCESSOR)
+ def isSynthetic = hasFlag(SYNTHETIC)
+ def isTrait = hasFlag(TRAIT) && !hasFlag(PARAM)
+
+ def isDeferredOrDefault = hasFlag(DEFERRED | DEFAULTMETHOD)
+ def isDeferredNotDefault = isDeferred && !hasFlag(DEFAULTMETHOD)
+
+ def flagBitsToString(bits: Long): String = {
+ // Fast path for common case
+ if (bits == 0L) "" else {
+ var sb: StringBuilder = null
+ var i = 0
+ while (i <= MaxBitPosition) {
+ val flag = Flags.rawFlagPickledOrder(i)
+ if ((bits & flag) != 0L) {
+ val s = resolveOverloadedFlag(flag)
+ if (s.length > 0) {
+ if (sb eq null) sb = new StringBuilder append s
+ else if (sb.length == 0) sb append s
+ else sb append " " append s
+ }
+ }
+ i += 1
+ }
+ if (sb eq null) "" else sb.toString
+ }
+ }
+
+ def accessString: String = {
+ val pw = if (hasAccessBoundary) privateWithin.toString else ""
+
+ if (pw == "") {
+ if (hasAllFlags(PrivateLocal)) "private[this]"
+ else if (hasAllFlags(ProtectedLocal)) "protected[this]"
+ else if (hasFlag(PRIVATE)) "private"
+ else if (hasFlag(PROTECTED)) "protected"
+ else ""
+ }
+ else if (hasFlag(PROTECTED)) "protected[" + pw + "]"
+ else "private[" + pw + "]"
+ }
+ protected def calculateFlagString(basis: Long): String = {
+ val access = accessString
+ val nonAccess = flagBitsToString(basis & ~AccessFlags)
+
+ if (access == "") nonAccess
+ else if (nonAccess == "") access
+ else nonAccess + " " + access
+ }
+
+ // Backward compat section
+ @deprecated( "Use isTrait", "2.10.0")
+ def hasTraitFlag = hasFlag(TRAIT)
+ @deprecated("Use hasDefault", "2.10.0")
+ def hasDefaultFlag = hasFlag(DEFAULTPARAM)
+ @deprecated("Use isValueParameter or isTypeParameter", "2.10.0")
+ def isParameter = hasFlag(PARAM)
+ @deprecated("Use flagString", "2.10.0")
+ def defaultFlagString = flagString
+ @deprecated("Use flagString(mask)", "2.10.0")
+ def hasFlagsToString(mask: Long): String = flagString(mask)
+}
diff --git a/src/reflect/scala/reflect/internal/Importers.scala b/src/reflect/scala/reflect/internal/Importers.scala
new file mode 100644
index 0000000..9a8dee1
--- /dev/null
+++ b/src/reflect/scala/reflect/internal/Importers.scala
@@ -0,0 +1,466 @@
+package scala.reflect
+package internal
+
+import scala.collection.mutable.WeakHashMap
+import scala.ref.WeakReference
+
+// SI-6241: move importers to a mirror
+trait Importers extends api.Importers { self: SymbolTable =>
+
+ def mkImporter(from0: api.Universe): Importer { val from: from0.type } = (
+ if (self eq from0) {
+ new Importer {
+ val from = from0
+ val reverse = this.asInstanceOf[from.Importer{ val from: self.type }]
+ def importSymbol(sym: from.Symbol) = sym.asInstanceOf[self.Symbol]
+ def importType(tpe: from.Type) = tpe.asInstanceOf[self.Type]
+ def importTree(tree: from.Tree) = tree.asInstanceOf[self.Tree]
+ def importPosition(pos: from.Position) = pos.asInstanceOf[self.Position]
+ }
+ } else {
+ // todo. fix this loophole
+ assert(from0.isInstanceOf[SymbolTable], "`from` should be an instance of scala.reflect.internal.SymbolTable")
+ new StandardImporter { val from = from0.asInstanceOf[SymbolTable] }
+ }
+ ).asInstanceOf[Importer { val from: from0.type }]
+
+ abstract class StandardImporter extends Importer {
+
+ val from: SymbolTable
+
+ protected lazy val symMap = new Cache[from.Symbol, Symbol]()
+ protected lazy val tpeMap = new Cache[from.Type, Type]()
+ protected class Cache[K <: AnyRef, V <: AnyRef] extends WeakHashMap[K, WeakReference[V]] {
+ def weakGet(key: K): Option[V] = this get key flatMap WeakReference.unapply
+ def weakUpdate(key: K, value: V) = this.update(key, WeakReference(value))
+ }
+
+ // fixups and maps prevent stackoverflows in importer
+ var pendingSyms = 0
+ var pendingTpes = 0
+ lazy val fixups = scala.collection.mutable.MutableList[Function0[Unit]]()
+ def addFixup(fixup: => Unit): Unit = fixups += (() => fixup)
+ def tryFixup(): Unit = {
+ if (pendingSyms == 0 && pendingTpes == 0) {
+ val fixups = this.fixups.toList
+ this.fixups.clear()
+ fixups foreach { _() }
+ }
+ }
+
+ object reverse extends from.StandardImporter {
+ val from: self.type = self
+ // FIXME this and reverse should be constantly kept in sync
+ // not just synced once upon the first usage of reverse
+ for ((fromsym, WeakReference(mysym)) <- StandardImporter.this.symMap) symMap += ((mysym, WeakReference(fromsym)))
+ for ((fromtpe, WeakReference(mytpe)) <- StandardImporter.this.tpeMap) tpeMap += ((mytpe, WeakReference(fromtpe)))
+ }
+
+ // todo. careful import of positions
+ def importPosition(pos: from.Position): Position =
+ pos.asInstanceOf[Position]
+
+ def importSymbol(sym0: from.Symbol): Symbol = {
+ def doImport(sym: from.Symbol): Symbol =
+ symMap weakGet sym match {
+ case Some(result) => result
+ case _ =>
+ val myowner = importSymbol(sym.owner)
+ val mypos = importPosition(sym.pos)
+ val myname = importName(sym.name).toTermName
+ val myflags = sym.flags
+ def linkReferenced(mysym: TermSymbol, x: from.TermSymbol, op: from.Symbol => Symbol): Symbol = {
+ symMap.weakUpdate(x, mysym)
+ mysym.referenced = op(x.referenced)
+ mysym
+ }
+ val mysym = sym match {
+ case x: from.MethodSymbol =>
+ linkReferenced(myowner.newMethod(myname, mypos, myflags), x, importSymbol)
+ case x: from.ModuleSymbol =>
+ linkReferenced(myowner.newModuleSymbol(myname, mypos, myflags), x, importSymbol)
+ case x: from.FreeTermSymbol =>
+ newFreeTermSymbol(importName(x.name).toTermName, x.value, x.flags, x.origin) setInfo importType(x.info)
+ case x: from.FreeTypeSymbol =>
+ newFreeTypeSymbol(importName(x.name).toTypeName, x.flags, x.origin)
+ case x: from.TermSymbol =>
+ linkReferenced(myowner.newValue(myname, mypos, myflags), x, importSymbol)
+ case x: from.TypeSkolem =>
+ val origin = x.unpackLocation match {
+ case null => null
+ case y: from.Tree => importTree(y)
+ case y: from.Symbol => importSymbol(y)
+ }
+ myowner.newTypeSkolemSymbol(myname.toTypeName, origin, mypos, myflags)
+ case x: from.ModuleClassSymbol =>
+ val mysym = myowner.newModuleClass(myname.toTypeName, mypos, myflags)
+ symMap.weakUpdate(x, mysym)
+ mysym.sourceModule = importSymbol(x.sourceModule)
+ mysym
+ case x: from.ClassSymbol =>
+ val mysym = myowner.newClassSymbol(myname.toTypeName, mypos, myflags)
+ symMap.weakUpdate(x, mysym)
+ if (sym.thisSym != sym) {
+ mysym.typeOfThis = importType(sym.typeOfThis)
+ mysym.thisSym setName importName(sym.thisSym.name)
+ }
+ mysym
+ case x: from.TypeSymbol =>
+ myowner.newTypeSymbol(myname.toTypeName, mypos, myflags)
+ }
+ symMap.weakUpdate(sym, mysym)
+ mysym setFlag Flags.LOCKED
+ mysym setInfo {
+ val mytypeParams = sym.typeParams map importSymbol
+ new LazyPolyType(mytypeParams) with FlagAgnosticCompleter {
+ override def complete(s: Symbol) {
+ val result = sym.info match {
+ case from.PolyType(_, res) => res
+ case result => result
+ }
+ s setInfo GenPolyType(mytypeParams, importType(result))
+ s setAnnotations (sym.annotations map importAnnotationInfo)
+ }
+ }
+ }
+ mysym resetFlag Flags.LOCKED
+ } // end doImport
+
+ def importOrRelink: Symbol = {
+ val sym = sym0 // makes sym visible in the debugger
+ if (sym == null)
+ null
+ else if (sym == from.NoSymbol)
+ NoSymbol
+ else if (sym.isRoot)
+ rootMirror.RootClass // !!! replace with actual mirror when we move importers to the mirror
+ else {
+ val name = sym.name
+ val owner = sym.owner
+ var scope = if (owner.isClass && !owner.isRefinementClass) owner.info else from.NoType
+ var existing = scope.decl(name)
+ if (sym.isModuleClass)
+ existing = existing.moduleClass
+
+ if (!existing.exists) scope = from.NoType
+
+ val myname = importName(name)
+ val myowner = importSymbol(owner)
+ val myscope = if (scope != from.NoType && !(myowner hasFlag Flags.LOCKED)) myowner.info else NoType
+ var myexisting = if (myscope != NoType) myowner.info.decl(myname) else NoSymbol // cannot load myexisting in general case, because it creates cycles for methods
+ if (sym.isModuleClass)
+ myexisting = importSymbol(sym.sourceModule).moduleClass
+
+ if (!sym.isOverloaded && myexisting.isOverloaded) {
+ myexisting =
+ if (sym.isMethod) {
+ val localCopy = doImport(sym)
+ myexisting filter (_.tpe matches localCopy.tpe)
+ } else {
+ myexisting filter (!_.isMethod)
+ }
+ assert(!myexisting.isOverloaded,
+ "import failure: cannot determine unique overloaded method alternative from\n "+
+ (myexisting.alternatives map (_.defString) mkString "\n")+"\n that matches "+sym+":"+sym.tpe)
+ }
+
+ val mysym = {
+ if (sym.isOverloaded) {
+ myowner.newOverloaded(myowner.thisType, sym.alternatives map importSymbol)
+ } else if (sym.isTypeParameter && sym.paramPos >= 0 && !(myowner hasFlag Flags.LOCKED)) {
+ assert(myowner.typeParams.length > sym.paramPos,
+ "import failure: cannot determine parameter "+sym+" (#"+sym.paramPos+") in "+
+ myowner+typeParamsString(myowner.rawInfo)+"\n original symbol was: "+
+ sym.owner+from.typeParamsString(sym.owner.info))
+ myowner.typeParams(sym.paramPos)
+ } else {
+ if (myexisting != NoSymbol) {
+ myexisting
+ } else {
+ val mysym = doImport(sym)
+
+ if (myscope != NoType) {
+ assert(myowner.info.decls.lookup(myname) == NoSymbol, myname+" "+myowner.info.decl(myname)+" "+myexisting)
+ myowner.info.decls enter mysym
+ }
+
+ mysym
+ }
+ }
+ }
+
+ mysym
+ }
+ } // end importOrRelink
+
+ val sym = sym0
+ symMap.weakGet(sym) match {
+ case Some(result) => result
+ case None =>
+ pendingSyms += 1
+ try {
+ val result = importOrRelink
+ symMap.weakUpdate(sym, result)
+ result
+ } finally {
+ pendingSyms -= 1
+ tryFixup()
+ }
+ }
+ }
+
+ def importType(tpe: from.Type): Type = {
+ def doImport(tpe: from.Type): Type = tpe match {
+ case from.TypeRef(pre, sym, args) =>
+ TypeRef(importType(pre), importSymbol(sym), args map importType)
+ case from.ThisType(clazz) =>
+ ThisType(importSymbol(clazz))
+ case from.SingleType(pre, sym) =>
+ SingleType(importType(pre), importSymbol(sym))
+ case from.MethodType(params, restpe) =>
+ MethodType(params map importSymbol, importType(restpe))
+ case from.PolyType(tparams, restpe) =>
+ PolyType(tparams map importSymbol, importType(restpe))
+ case from.NullaryMethodType(restpe) =>
+ NullaryMethodType(importType(restpe))
+ case from.ConstantType(constant @ from.Constant(_)) =>
+ ConstantType(importConstant(constant))
+ case from.SuperType(thistpe, supertpe) =>
+ SuperType(importType(thistpe), importType(supertpe))
+ case from.TypeBounds(lo, hi) =>
+ TypeBounds(importType(lo), importType(hi))
+ case from.BoundedWildcardType(bounds) =>
+ BoundedWildcardType(importTypeBounds(bounds))
+ case from.ClassInfoType(parents, decls, clazz) =>
+ val myclazz = importSymbol(clazz)
+ val myscope = if (myclazz.isPackageClass) newPackageScope(myclazz) else newScope
+ val myclazzTpe = ClassInfoType(parents map importType, myscope, myclazz)
+ myclazz setInfo GenPolyType(myclazz.typeParams, myclazzTpe) // needed so that newly created symbols find their scope
+ decls foreach importSymbol // will enter itself into myclazz
+ myclazzTpe
+ case from.RefinedType(parents, decls) =>
+ RefinedType(parents map importType, importScope(decls), importSymbol(tpe.typeSymbol))
+ case from.ExistentialType(tparams, restpe) =>
+ newExistentialType(tparams map importSymbol, importType(restpe))
+ case from.OverloadedType(pre, alts) =>
+ OverloadedType(importType(pre), alts map importSymbol)
+ case from.AntiPolyType(pre, targs) =>
+ AntiPolyType(importType(pre), targs map importType)
+ case x: from.TypeVar =>
+ TypeVar(importType(x.origin), importTypeConstraint(x.constr), x.typeArgs map importType, x.params map importSymbol)
+ case from.NotNullType(tpe) =>
+ NotNullType(importType(tpe))
+ case from.AnnotatedType(annots, tpe, selfsym) =>
+ AnnotatedType(annots map importAnnotationInfo, importType(tpe), importSymbol(selfsym))
+ case from.ErrorType =>
+ ErrorType
+ case from.WildcardType =>
+ WildcardType
+ case from.NoType =>
+ NoType
+ case from.NoPrefix =>
+ NoPrefix
+ case null =>
+ null
+ } // end doImport
+
+ def importOrRelink: Type =
+ doImport(tpe)
+
+ tpeMap.weakGet(tpe) match {
+ case Some(result) => result
+ case None =>
+ pendingTpes += 1
+ try {
+ val result = importOrRelink
+ tpeMap.weakUpdate(tpe, result)
+ result
+ } finally {
+ pendingTpes -= 1
+ tryFixup()
+ }
+ }
+ }
+
+ def importTypeBounds(bounds: from.TypeBounds) = importType(bounds).asInstanceOf[TypeBounds]
+
+ def importAnnotationInfo(ann: from.AnnotationInfo): AnnotationInfo = {
+ val atp1 = importType(ann.atp)
+ val args1 = ann.args map importTree
+ val assocs1 = ann.assocs map { case (name, arg) => (importName(name), importAnnotArg(arg)) }
+ val original1 = importTree(ann.original)
+ AnnotationInfo(atp1, args1, assocs1) setOriginal original1
+ }
+
+ def importAnnotArg(arg: from.ClassfileAnnotArg): ClassfileAnnotArg = arg match {
+ case from.LiteralAnnotArg(constant @ from.Constant(_)) =>
+ LiteralAnnotArg(importConstant(constant))
+ case from.ArrayAnnotArg(args) =>
+ ArrayAnnotArg(args map importAnnotArg)
+ case from.ScalaSigBytes(bytes) =>
+ ScalaSigBytes(bytes)
+ case from.NestedAnnotArg(annInfo) =>
+ NestedAnnotArg(importAnnotationInfo(annInfo))
+ }
+
+ def importTypeConstraint(constr: from.TypeConstraint): TypeConstraint = {
+ val result = new TypeConstraint(constr.loBounds map importType, constr.hiBounds map importType)
+ result.inst = importType(constr.inst)
+ result
+ }
+
+ // !!! todo: override to cater for PackageScopes
+ def importScope(decls: from.Scope): Scope =
+ newScopeWith(decls.toList map importSymbol: _*)
+
+ def importName(name: from.Name): Name =
+ if (name.isTypeName) newTypeName(name.toString) else newTermName(name.toString)
+ def importTypeName(name: from.TypeName): TypeName = importName(name).toTypeName
+ def importTermName(name: from.TermName): TermName = importName(name).toTermName
+
+ def importModifiers(mods: from.Modifiers): Modifiers =
+ new Modifiers(mods.flags, importName(mods.privateWithin), mods.annotations map importTree)
+
+ def importImportSelector(sel: from.ImportSelector): ImportSelector =
+ new ImportSelector(importName(sel.name), sel.namePos, if (sel.rename != null) importName(sel.rename) else null, sel.renamePos)
+
+ def importTree(tree: from.Tree): Tree = {
+ val mytree = tree match {
+ case from.ClassDef(mods, name, tparams, impl) =>
+ new ClassDef(importModifiers(mods), importName(name).toTypeName, tparams map importTypeDef, importTemplate(impl))
+ case from.PackageDef(pid, stats) =>
+ new PackageDef(importRefTree(pid), stats map importTree)
+ case from.ModuleDef(mods, name, impl) =>
+ new ModuleDef(importModifiers(mods), importName(name).toTermName, importTemplate(impl))
+ case from.emptyValDef =>
+ emptyValDef
+ case from.ValDef(mods, name, tpt, rhs) =>
+ new ValDef(importModifiers(mods), importName(name).toTermName, importTree(tpt), importTree(rhs))
+ case from.DefDef(mods, name, tparams, vparamss, tpt, rhs) =>
+ new DefDef(importModifiers(mods), importName(name).toTermName, tparams map importTypeDef, mmap(vparamss)(importValDef), importTree(tpt), importTree(rhs))
+ case from.TypeDef(mods, name, tparams, rhs) =>
+ new TypeDef(importModifiers(mods), importName(name).toTypeName, tparams map importTypeDef, importTree(rhs))
+ case from.LabelDef(name, params, rhs) =>
+ new LabelDef(importName(name).toTermName, params map importIdent, importTree(rhs))
+ case from.Import(expr, selectors) =>
+ new Import(importTree(expr), selectors map importImportSelector)
+ case from.Template(parents, self, body) =>
+ new Template(parents map importTree, importValDef(self), body map importTree)
+ case from.Block(stats, expr) =>
+ new Block(stats map importTree, importTree(expr))
+ case from.CaseDef(pat, guard, body) =>
+ new CaseDef(importTree(pat), importTree(guard), importTree(body))
+ case from.Alternative(trees) =>
+ new Alternative(trees map importTree)
+ case from.Star(elem) =>
+ new Star(importTree(elem))
+ case from.Bind(name, body) =>
+ new Bind(importName(name), importTree(body))
+ case from.UnApply(fun, args) =>
+ new UnApply(importTree(fun), args map importTree)
+ case from.ArrayValue(elemtpt ,elems) =>
+ new ArrayValue(importTree(elemtpt), elems map importTree)
+ case from.Function(vparams, body) =>
+ new Function(vparams map importValDef, importTree(body))
+ case from.Assign(lhs, rhs) =>
+ new Assign(importTree(lhs), importTree(rhs))
+ case from.AssignOrNamedArg(lhs, rhs) =>
+ new AssignOrNamedArg(importTree(lhs), importTree(rhs))
+ case from.If(cond, thenp, elsep) =>
+ new If(importTree(cond), importTree(thenp), importTree(elsep))
+ case from.Match(selector, cases) =>
+ new Match(importTree(selector), cases map importCaseDef)
+ case from.Return(expr) =>
+ new Return(importTree(expr))
+ case from.Try(block, catches, finalizer) =>
+ new Try(importTree(block), catches map importCaseDef, importTree(finalizer))
+ case from.Throw(expr) =>
+ new Throw(importTree(expr))
+ case from.New(tpt) =>
+ new New(importTree(tpt))
+ case from.Typed(expr, tpt) =>
+ new Typed(importTree(expr), importTree(tpt))
+ case from.TypeApply(fun, args) =>
+ new TypeApply(importTree(fun), args map importTree)
+ case from.Apply(fun, args) => tree match {
+ case _: from.ApplyToImplicitArgs =>
+ new ApplyToImplicitArgs(importTree(fun), args map importTree)
+ case _: from.ApplyImplicitView =>
+ new ApplyImplicitView(importTree(fun), args map importTree)
+ case _ =>
+ new Apply(importTree(fun), args map importTree)
+ }
+ case from.ApplyDynamic(qual, args) =>
+ new ApplyDynamic(importTree(qual), args map importTree)
+ case from.Super(qual, mix) =>
+ new Super(importTree(qual), importTypeName(mix))
+ case from.This(qual) =>
+ new This(importName(qual).toTypeName)
+ case from.Select(qual, name) =>
+ new Select(importTree(qual), importName(name))
+ case from.Ident(name) =>
+ new Ident(importName(name))
+ case from.ReferenceToBoxed(ident) =>
+ new ReferenceToBoxed(importTree(ident) match { case ident: Ident => ident })
+ case from.Literal(constant @ from.Constant(_)) =>
+ new Literal(importConstant(constant))
+ case from.TypeTree() =>
+ new TypeTree()
+ case from.Annotated(annot, arg) =>
+ new Annotated(importTree(annot), importTree(arg))
+ case from.SingletonTypeTree(ref) =>
+ new SingletonTypeTree(importTree(ref))
+ case from.SelectFromTypeTree(qual, name) =>
+ new SelectFromTypeTree(importTree(qual), importName(name).toTypeName)
+ case from.CompoundTypeTree(templ) =>
+ new CompoundTypeTree(importTemplate(templ))
+ case from.AppliedTypeTree(tpt, args) =>
+ new AppliedTypeTree(importTree(tpt), args map importTree)
+ case from.TypeBoundsTree(lo, hi) =>
+ new TypeBoundsTree(importTree(lo), importTree(hi))
+ case from.ExistentialTypeTree(tpt, whereClauses) =>
+ new ExistentialTypeTree(importTree(tpt), whereClauses map importTree)
+ case from.EmptyTree =>
+ EmptyTree
+ case null =>
+ null
+ }
+ addFixup({
+ if (mytree != null) {
+ val mysym = if (tree.hasSymbol) importSymbol(tree.symbol) else NoSymbol
+ val mytpe = importType(tree.tpe)
+
+ mytree match {
+ case mytt: TypeTree =>
+ val tt = tree.asInstanceOf[from.TypeTree]
+ if (mytree.hasSymbol) mytt.symbol = mysym
+ if (tt.wasEmpty) mytt.defineType(mytpe) else mytt.setType(mytpe)
+ if (tt.original != null) mytt.setOriginal(importTree(tt.original))
+ case _ =>
+ if (mytree.hasSymbol) mytree.symbol = importSymbol(tree.symbol)
+ mytree.tpe = importType(tree.tpe)
+ }
+ }
+ })
+ tryFixup()
+ // we have to be careful with position import as some shared trees
+ // like EmptyTree, emptyValDef don't support position assignment
+ if (tree.pos != NoPosition)
+ mytree.setPos(importPosition(tree.pos))
+ else
+ mytree
+ }
+
+ def importValDef(tree: from.ValDef): ValDef = importTree(tree).asInstanceOf[ValDef]
+ def importTypeDef(tree: from.TypeDef): TypeDef = importTree(tree).asInstanceOf[TypeDef]
+ def importTemplate(tree: from.Template): Template = importTree(tree).asInstanceOf[Template]
+ def importRefTree(tree: from.RefTree): RefTree = importTree(tree).asInstanceOf[RefTree]
+ def importIdent(tree: from.Ident): Ident = importTree(tree).asInstanceOf[Ident]
+ def importCaseDef(tree: from.CaseDef): CaseDef = importTree(tree).asInstanceOf[CaseDef]
+ def importConstant(constant: from.Constant): Constant = new Constant(constant.tag match {
+ case ClazzTag => importType(constant.value.asInstanceOf[from.Type])
+ case EnumTag => importSymbol(constant.value.asInstanceOf[from.Symbol])
+ case _ => constant.value
+ })
+ }
+}
diff --git a/src/reflect/scala/reflect/internal/InfoTransformers.scala b/src/reflect/scala/reflect/internal/InfoTransformers.scala
new file mode 100644
index 0000000..82904b0
--- /dev/null
+++ b/src/reflect/scala/reflect/internal/InfoTransformers.scala
@@ -0,0 +1,51 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2013 LAMP/EPFL
+ * @author Martin Odersky
+ */
+
+package scala.reflect
+package internal
+
+trait InfoTransformers {
+ self: SymbolTable =>
+
+ /* Syncnote: This should not need to be protected, as reflection does not run in multiple phases.
+ */
+ abstract class InfoTransformer {
+ var prev: InfoTransformer = this
+ var next: InfoTransformer = this
+
+ val pid: Phase#Id
+ val changesBaseClasses: Boolean
+ def transform(sym: Symbol, tpe: Type): Type
+
+ def insert(that: InfoTransformer) {
+ assert(this.pid != that.pid, this.pid)
+
+ if (that.pid < this.pid) {
+ prev insert that
+ } else if (next.pid <= that.pid && next.pid != NoPhase.id) {
+ next insert that
+ } else {
+ log("Inserting info transformer %s following %s".format(phaseOf(that.pid), phaseOf(this.pid)))
+ that.next = next
+ that.prev = this
+ next.prev = that
+ this.next = that
+ }
+ }
+
+ /** The InfoTransformer whose (pid == from).
+ * If no such exists, the InfoTransformer with the next
+ * higher pid.
+ */
+ def nextFrom(from: Phase#Id): InfoTransformer =
+ if (from == this.pid) this
+ else if (from < this.pid)
+ if (prev.pid < from) this
+ else prev.nextFrom(from);
+ else if (next.pid == NoPhase.id) next
+ else next.nextFrom(from)
+ }
+}
+
diff --git a/src/reflect/scala/reflect/internal/Kinds.scala b/src/reflect/scala/reflect/internal/Kinds.scala
new file mode 100644
index 0000000..3ab7b20
--- /dev/null
+++ b/src/reflect/scala/reflect/internal/Kinds.scala
@@ -0,0 +1,233 @@
+/* NSC -- new scala compiler
+ * Copyright 2005-2013 LAMP/EPFL
+ * @author Martin Odersky
+ */
+
+package scala.reflect
+package internal
+
+import scala.collection.{ mutable, immutable }
+import scala.reflect.internal.util.StringOps.{ countAsString, countElementsAsString }
+
+trait Kinds {
+ self: SymbolTable =>
+
+ import definitions._
+
+ private type SymPair = ((Symbol, Symbol)) // ((Argument, Parameter))
+
+ case class KindErrors(
+ arity: List[SymPair] = Nil,
+ variance: List[SymPair] = Nil,
+ strictness: List[SymPair] = Nil
+ ) {
+ def isEmpty = arity.isEmpty && variance.isEmpty && strictness.isEmpty
+
+ def arityError(syms: SymPair) = copy(arity = arity :+ syms)
+ def varianceError(syms: SymPair) = copy(variance = variance :+ syms)
+ def strictnessError(syms: SymPair) = copy(strictness = strictness :+ syms)
+
+ def ++(errs: KindErrors) = KindErrors(
+ arity ++ errs.arity,
+ variance ++ errs.variance,
+ strictness ++ errs.strictness
+ )
+ // @M TODO this method is duplicated all over the place (varianceString)
+ private def varStr(s: Symbol): String =
+ if (s.isCovariant) "covariant"
+ else if (s.isContravariant) "contravariant"
+ else "invariant";
+
+ private def qualify(a0: Symbol, b0: Symbol): String = if (a0.toString != b0.toString) "" else {
+ if((a0 eq b0) || (a0.owner eq b0.owner)) ""
+ else {
+ var a = a0; var b = b0
+ while (a.owner.name == b.owner.name) { a = a.owner; b = b.owner}
+ if (a.locationString ne "") " (" + a.locationString.trim + ")" else ""
+ }
+ }
+ private def kindMessage(a: Symbol, p: Symbol)(f: (String, String) => String): String =
+ f(a+qualify(a,p), p+qualify(p,a))
+
+ // Normally it's nicer to print nothing rather than '>: Nothing <: Any' all over
+ // the place, but here we need it for the message to make sense.
+ private def strictnessMessage(a: Symbol, p: Symbol) =
+ kindMessage(a, p)("%s's bounds%s are stricter than %s's declared bounds%s".format(
+ _, a.info, _, p.info match {
+ case tb @ TypeBounds(_, _) if tb.isEmptyBounds => " >: Nothing <: Any"
+ case tb => "" + tb
+ })
+ )
+
+ private def varianceMessage(a: Symbol, p: Symbol) =
+ kindMessage(a, p)("%s is %s, but %s is declared %s".format(_, varStr(a), _, varStr(p)))
+
+ private def arityMessage(a: Symbol, p: Symbol) =
+ kindMessage(a, p)("%s has %s, but %s has %s".format(
+ _, countElementsAsString(a.typeParams.length, "type parameter"),
+ _, countAsString(p.typeParams.length))
+ )
+
+ private def buildMessage(xs: List[SymPair], f: (Symbol, Symbol) => String) = (
+ if (xs.isEmpty) ""
+ else xs map f.tupled mkString ("\n", ", ", "")
+ )
+
+ def errorMessage(targ: Type, tparam: Symbol): String = (
+ (targ+"'s type parameters do not match "+tparam+"'s expected parameters:")
+ + buildMessage(arity, arityMessage)
+ + buildMessage(variance, varianceMessage)
+ + buildMessage(strictness, strictnessMessage)
+ )
+ }
+ val NoKindErrors = KindErrors(Nil, Nil, Nil)
+
+ // TODO: this desperately needs to be cleaned up
+ // plan: split into kind inference and subkinding
+ // every Type has a (cached) Kind
+ def kindsConform(tparams: List[Symbol], targs: List[Type], pre: Type, owner: Symbol): Boolean =
+ checkKindBounds0(tparams, targs, pre, owner, false).isEmpty
+
+ /** Check whether `sym1`'s variance conforms to `sym2`'s variance.
+ *
+ * If `sym2` is invariant, `sym1`'s variance is irrelevant. Otherwise they must be equal.
+ */
+ private def variancesMatch(sym1: Symbol, sym2: Symbol) = (
+ sym2.variance==0
+ || sym1.variance==sym2.variance
+ )
+
+ /** Check well-kindedness of type application (assumes arities are already checked) -- @M
+ *
+ * This check is also performed when abstract type members become concrete (aka a "type alias") -- then tparams.length==1
+ * (checked one type member at a time -- in that case, prefix is the name of the type alias)
+ *
+ * Type application is just like value application: it's "contravariant" in the sense that
+ * the type parameters of the supplied type arguments must conform to the type parameters of
+ * the required type parameters:
+ * - their bounds must be less strict
+ * - variances must match (here, variances are absolute, the variance of a type parameter does not influence the variance of its higher-order parameters)
+ * - @M TODO: are these conditions correct,sufficient&necessary?
+ *
+ * e.g. class Iterable[t, m[+x <: t]] --> the application Iterable[Int, List] is okay, since
+ * List's type parameter is also covariant and its bounds are weaker than <: Int
+ */
+ def checkKindBounds0(
+ tparams: List[Symbol],
+ targs: List[Type],
+ pre: Type,
+ owner: Symbol,
+ explainErrors: Boolean
+ ): List[(Type, Symbol, KindErrors)] = {
+
+ // instantiate type params that come from outside the abstract type we're currently checking
+ def transform(tp: Type, clazz: Symbol): Type = tp.asSeenFrom(pre, clazz)
+
+ // check that the type parameters hkargs to a higher-kinded type conform to the
+ // expected params hkparams
+ def checkKindBoundsHK(
+ hkargs: List[Symbol],
+ arg: Symbol,
+ param: Symbol,
+ paramowner: Symbol,
+ underHKParams: List[Symbol],
+ withHKArgs: List[Symbol]
+ ): KindErrors = {
+
+ var kindErrors: KindErrors = NoKindErrors
+ def bindHKParams(tp: Type) = tp.substSym(underHKParams, withHKArgs)
+ // @M sometimes hkargs != arg.typeParams, the symbol and the type may
+ // have very different type parameters
+ val hkparams = param.typeParams
+
+ def kindCheck(cond: Boolean, f: KindErrors => KindErrors) {
+ if (!cond)
+ kindErrors = f(kindErrors)
+ }
+
+ if (settings.debug.value) {
+ log("checkKindBoundsHK expected: "+ param +" with params "+ hkparams +" by definition in "+ paramowner)
+ log("checkKindBoundsHK supplied: "+ arg +" with params "+ hkargs +" from "+ owner)
+ log("checkKindBoundsHK under params: "+ underHKParams +" with args "+ withHKArgs)
+ }
+
+ if (!sameLength(hkargs, hkparams)) {
+ // Any and Nothing are kind-overloaded
+ if (arg == AnyClass || arg == NothingClass) NoKindErrors
+ // shortcut: always set error, whether explainTypesOrNot
+ else return kindErrors.arityError(arg -> param)
+ }
+ else foreach2(hkargs, hkparams) { (hkarg, hkparam) =>
+ if (hkparam.typeParams.isEmpty && hkarg.typeParams.isEmpty) { // base-case: kind *
+ kindCheck(variancesMatch(hkarg, hkparam), _ varianceError (hkarg -> hkparam))
+ // instantiateTypeParams(tparams, targs)
+ // higher-order bounds, may contain references to type arguments
+ // substSym(hkparams, hkargs)
+ // these types are going to be compared as types of kind *
+ //
+ // Their arguments use different symbols, but are
+ // conceptually the same. Could also replace the types by
+ // polytypes, but can't just strip the symbols, as ordering
+ // is lost then.
+ val declaredBounds = transform(hkparam.info.instantiateTypeParams(tparams, targs).bounds, paramowner)
+ val declaredBoundsInst = transform(bindHKParams(declaredBounds), owner)
+ val argumentBounds = transform(hkarg.info.bounds, owner)
+
+ kindCheck(declaredBoundsInst <:< argumentBounds, _ strictnessError (hkarg -> hkparam))
+
+ debuglog(
+ "checkKindBoundsHK base case: " + hkparam +
+ " declared bounds: " + declaredBounds +
+ " after instantiating earlier hkparams: " + declaredBoundsInst + "\n" +
+ "checkKindBoundsHK base case: "+ hkarg +
+ " has bounds: " + argumentBounds
+ )
+ }
+ else {
+ hkarg.initialize // SI-7902 otherwise hkarg.typeParams yields List(NoSymbol)!
+ debuglog("checkKindBoundsHK recursing to compare params of "+ hkparam +" with "+ hkarg)
+ kindErrors ++= checkKindBoundsHK(
+ hkarg.typeParams,
+ hkarg,
+ hkparam,
+ paramowner,
+ underHKParams ++ hkparam.typeParams,
+ withHKArgs ++ hkarg.typeParams
+ )
+ }
+ if (!explainErrors && !kindErrors.isEmpty)
+ return kindErrors
+ }
+ if (explainErrors) kindErrors
+ else NoKindErrors
+ }
+
+ if (settings.debug.value && (tparams.nonEmpty || targs.nonEmpty)) log(
+ "checkKindBounds0(" + tparams + ", " + targs + ", " + pre + ", "
+ + owner + ", " + explainErrors + ")"
+ )
+
+ flatMap2(tparams, targs) { (tparam, targ) =>
+ // Prevent WildcardType from causing kind errors, as typevars may be higher-order
+ if (targ == WildcardType) Nil else {
+ // force symbol load for #4205
+ targ.typeSymbolDirect.info
+ // @M must use the typeParams of the *type* targ, not of the *symbol* of targ!!
+ val tparamsHO = targ.typeParams
+ if (targ.isHigherKinded || tparam.typeParams.nonEmpty) {
+ // NOTE: *not* targ.typeSymbol, which normalizes
+ val kindErrors = checkKindBoundsHK(
+ tparamsHO, targ.typeSymbolDirect, tparam,
+ tparam.owner, tparam.typeParams, tparamsHO
+ )
+ if (kindErrors.isEmpty) Nil else {
+ if (explainErrors) List((targ, tparam, kindErrors))
+ // Return as soon as an error is seen if there's nothing to explain.
+ else return List((NoType, NoSymbol, NoKindErrors))
+ }
+ }
+ else Nil
+ }
+ }
+ }
+}
diff --git a/src/reflect/scala/reflect/internal/Mirrors.scala b/src/reflect/scala/reflect/internal/Mirrors.scala
new file mode 100644
index 0000000..0beb8e3
--- /dev/null
+++ b/src/reflect/scala/reflect/internal/Mirrors.scala
@@ -0,0 +1,295 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2013 LAMP/EPFL
+ * @author Martin Odersky
+ */
+
+package scala.reflect
+package internal
+
+import Flags._
+
+trait Mirrors extends api.Mirrors {
+ thisUniverse: SymbolTable =>
+
+ override type Mirror >: Null <: RootsBase
+
+ // root symbols hold a strong reference to the enclosing mirror
+ // this prevents the mirror from being collected
+ // if there are any symbols created by that mirror
+ trait RootSymbol extends Symbol { def mirror: Mirror }
+
+ abstract class RootsBase(rootOwner: Symbol) extends scala.reflect.api.Mirror[Mirrors.this.type] { thisMirror =>
+
+ protected[scala] def rootLoader: LazyType
+
+ val RootClass: ClassSymbol
+ val RootPackage: ModuleSymbol
+ val EmptyPackageClass: ClassSymbol
+ val EmptyPackage: ModuleSymbol
+
+ def findMemberFromRoot(fullName: Name): Symbol = {
+ val segs = nme.segments(fullName.toString, fullName.isTermName)
+ if (segs.isEmpty) NoSymbol
+ else definitions.findNamedMember(segs.tail, RootClass.info member segs.head)
+ }
+
+ /** Todo: organize similar to mkStatic in reflect.Base */
+ private def getModuleOrClass(path: Name, len: Int): Symbol = {
+ val point = path lastPos('.', len - 1)
+ val owner =
+ if (point > 0) getModuleOrClass(path.toTermName, point)
+ else RootClass
+ val name = path subName (point + 1, len)
+ var sym = owner.info member name
+ val result = if (path.isTermName) sym.suchThat(_ hasFlag MODULE) else sym
+ if (result != NoSymbol) result
+ else {
+ if (settings.debug.value) { log(sym.info); log(sym.info.members) }//debug
+ thisMirror.missingHook(owner, name) orElse {
+ MissingRequirementError.notFound((if (path.isTermName) "object " else "class ")+path+" in "+thisMirror)
+ }
+ }
+ }
+
+ /** If you're looking for a class, pass a type name.
+ * If a module, a term name.
+ *
+ * Unlike `getModuleOrClass`, this function
+ * loads unqualified names from the root package.
+ */
+ private def getModuleOrClass(path: Name): Symbol =
+ getModuleOrClass(path, path.length)
+
+ /** If you're looking for a class, pass a type name.
+ * If a module, a term name.
+ *
+ * Unlike `getModuleOrClass`, this function
+ * loads unqualified names from the empty package.
+ */
+ private def staticModuleOrClass(path: Name): Symbol = {
+ val isPackageless = path.pos('.') == path.length
+ if (isPackageless) EmptyPackageClass.info decl path
+ else getModuleOrClass(path)
+ }
+
+ protected def mirrorMissingHook(owner: Symbol, name: Name): Symbol = NoSymbol
+
+ protected def universeMissingHook(owner: Symbol, name: Name): Symbol = thisUniverse.missingHook(owner, name)
+
+ private[scala] def missingHook(owner: Symbol, name: Name): Symbol = mirrorMissingHook(owner, name) orElse universeMissingHook(owner, name)
+
+ // todo: get rid of most the methods here and keep just staticClass/Module/Package
+
+ /************************ loaders of class symbols ************************/
+
+ private def ensureClassSymbol(fullname: String, sym: Symbol): ClassSymbol = {
+ var result = sym
+ while (result.isAliasType) result = result.info.typeSymbol
+ result match {
+ case x: ClassSymbol => x
+ case _ => MissingRequirementError.notFound("class " + fullname)
+ }
+ }
+
+ @deprecated("Use getClassByName", "2.10.0")
+ def getClass(fullname: Name): ClassSymbol =
+ getClassByName(fullname)
+
+ def getClassByName(fullname: Name): ClassSymbol =
+ ensureClassSymbol(fullname.toString, getModuleOrClass(fullname.toTypeName))
+
+ def getRequiredClass(fullname: String): ClassSymbol =
+ getClassByName(newTypeNameCached(fullname))
+
+ def requiredClass[T: ClassTag] : ClassSymbol =
+ getRequiredClass(erasureName[T])
+
+ def getClassIfDefined(fullname: String): Symbol =
+ getClassIfDefined(newTypeNameCached(fullname))
+
+ def getClassIfDefined(fullname: Name): Symbol =
+ wrapMissing(getClassByName(fullname.toTypeName))
+
+ /** @inheritdoc
+ *
+ * Unlike getClassByName/getRequiredClass this function can also load packageless symbols.
+ * Compiler might ignore them, but they should be loadable with macros.
+ */
+ override def staticClass(fullname: String): ClassSymbol =
+ ensureClassSymbol(fullname, staticModuleOrClass(newTypeNameCached(fullname)))
+
+ /************************ loaders of module symbols ************************/
+
+ private def ensureModuleSymbol(fullname: String, sym: Symbol, allowPackages: Boolean): ModuleSymbol =
+ sym match {
+ case x: ModuleSymbol if allowPackages || !x.isPackage => x
+ case _ => MissingRequirementError.notFound("object " + fullname)
+ }
+
+ @deprecated("Use getModuleByName", "2.10.0")
+ def getModule(fullname: Name): ModuleSymbol =
+ getModuleByName(fullname)
+
+ def getModuleByName(fullname: Name): ModuleSymbol =
+ ensureModuleSymbol(fullname.toString, getModuleOrClass(fullname.toTermName), allowPackages = true)
+
+ def getRequiredModule(fullname: String): ModuleSymbol =
+ getModule(newTermNameCached(fullname))
+
+ // TODO: What syntax do we think should work here? Say you have an object
+ // like scala.Predef. You can't say requiredModule[scala.Predef] since there's
+ // no accompanying Predef class, and if you say requiredModule[scala.Predef.type]
+ // the name found via the erasure is scala.Predef$. For now I am
+ // removing the trailing $, but I think that classTag should have
+ // a method which returns a usable name, one which doesn't expose this
+ // detail of the backend.
+ def requiredModule[T: ClassTag] : ModuleSymbol =
+ getRequiredModule(erasureName[T] stripSuffix "$")
+
+ def getModuleIfDefined(fullname: String): Symbol =
+ getModuleIfDefined(newTermNameCached(fullname))
+
+ def getModuleIfDefined(fullname: Name): Symbol =
+ wrapMissing(getModule(fullname.toTermName))
+
+ /** @inheritdoc
+ *
+ * Unlike getModule/getRequiredModule this function can also load packageless symbols.
+ * Compiler might ignore them, but they should be loadable with macros.
+ */
+ override def staticModule(fullname: String): ModuleSymbol =
+ ensureModuleSymbol(fullname, staticModuleOrClass(newTermNameCached(fullname)), allowPackages = false)
+
+ /************************ loaders of package symbols ************************/
+
+ private def ensurePackageSymbol(fullname: String, sym: Symbol, allowModules: Boolean): ModuleSymbol =
+ sym match {
+ case x: ModuleSymbol if allowModules || x.isPackage => x
+ case _ => MissingRequirementError.notFound("package " + fullname)
+ }
+
+ def getPackage(fullname: Name): ModuleSymbol =
+ ensurePackageSymbol(fullname.toString, getModuleOrClass(fullname), allowModules = true)
+
+ def getRequiredPackage(fullname: String): ModuleSymbol =
+ getPackage(newTermNameCached(fullname))
+
+ def getPackageObject(fullname: String): ModuleSymbol =
+ (getPackage(newTermName(fullname)).info member nme.PACKAGE) match {
+ case x: ModuleSymbol => x
+ case _ => MissingRequirementError.notFound("package object " + fullname)
+ }
+
+ def getPackageObjectIfDefined(fullname: String): Symbol =
+ getPackageObjectIfDefined(newTermNameCached(fullname))
+
+ def getPackageObjectIfDefined(fullname: Name): Symbol =
+ wrapMissing(getPackageObject(fullname.toTermName))
+
+ override def staticPackage(fullname: String): ModuleSymbol =
+ ensurePackageSymbol(fullname.toString, getModuleOrClass(newTermNameCached(fullname)), allowModules = false)
+
+ /************************ helpers ************************/
+
+ def erasureName[T: ClassTag] : String = {
+ /** We'd like the String representation to be a valid
+ * scala type, so we have to decode the jvm's secret language.
+ */
+ def erasureString(clazz: Class[_]): String = {
+ if (clazz.isArray) "Array[" + erasureString(clazz.getComponentType) + "]"
+ else clazz.getName
+ }
+ erasureString(classTag[T].runtimeClass)
+ }
+
+ @inline private def wrapMissing(body: => Symbol): Symbol =
+ try body
+ catch { case _: MissingRequirementError => NoSymbol }
+
+ /** getModule2/getClass2 aren't needed at present but may be again,
+ * so for now they're mothballed.
+ */
+ // def getModule2(name1: Name, name2: Name) = {
+ // try getModuleOrClass(name1.toTermName)
+ // catch { case ex1: FatalError =>
+ // try getModuleOrClass(name2.toTermName)
+ // catch { case ex2: FatalError => throw ex1 }
+ // }
+ // }
+ // def getClass2(name1: Name, name2: Name) = {
+ // try {
+ // val result = getModuleOrClass(name1.toTypeName)
+ // if (result.isAliasType) getClass(name2) else result
+ // }
+ // catch { case ex1: FatalError =>
+ // try getModuleOrClass(name2.toTypeName)
+ // catch { case ex2: FatalError => throw ex1 }
+ // }
+ // }
+
+ def init() {
+ // Still fiddling with whether it's cleaner to do some of this setup here
+ // or from constructors. The latter approach tends to invite init order issues.
+
+ EmptyPackageClass setInfo rootLoader
+ EmptyPackage setInfo EmptyPackageClass.tpe
+
+ connectModuleToClass(EmptyPackage, EmptyPackageClass)
+ connectModuleToClass(RootPackage, RootClass)
+
+ RootClass.info.decls enter EmptyPackage
+ RootClass.info.decls enter RootPackage
+ }
+ }
+
+ abstract class Roots(rootOwner: Symbol) extends RootsBase(rootOwner) { thisMirror =>
+
+ // TODO - having these as objects means they elude the attempt to
+ // add synchronization in SynchronizedSymbols. But we should either
+ // flip on object overrides or find some other accomodation, because
+ // lazy vals are unnecessarily expensive relative to objects and it
+ // is very beneficial for a handful of bootstrap symbols to have
+ // first class identities
+ sealed trait WellKnownSymbol extends Symbol {
+ this initFlags (TopLevelCreationFlags | STATIC)
+ }
+ // Features common to RootClass and RootPackage, the roots of all
+ // type and term symbols respectively.
+ sealed trait RootSymbol extends WellKnownSymbol with thisUniverse.RootSymbol {
+ final override def isRootSymbol = true
+ override def owner = rootOwner
+ override def typeOfThis = thisSym.tpe
+ def mirror = thisMirror.asInstanceOf[Mirror]
+ }
+
+ // This is the package _root_. The actual root cannot be referenced at
+ // the source level, but _root_ is essentially a function => <root>.
+ final object RootPackage extends ModuleSymbol(rootOwner, NoPosition, nme.ROOTPKG) with RootSymbol {
+ this setInfo NullaryMethodType(RootClass.tpe)
+ RootClass.sourceModule = this
+
+ override def isRootPackage = true
+ }
+ // This is <root>, the actual root of everything except the package _root_.
+ // <root> and _root_ (RootPackage and RootClass) should be the only "well known"
+ // symbols owned by NoSymbol. All owner chains should go through RootClass,
+ // although it is probable that some symbols are created as direct children
+ // of NoSymbol to ensure they will not be stumbled upon. (We should designate
+ // a better encapsulated place for that.)
+ final object RootClass extends PackageClassSymbol(rootOwner, NoPosition, tpnme.ROOT) with RootSymbol {
+ this setInfo rootLoader
+
+ override def isRoot = true
+ override def isEffectiveRoot = true
+ override def isNestedClass = false
+ }
+ // The empty package, which holds all top level types without given packages.
+ final object EmptyPackage extends ModuleSymbol(RootClass, NoPosition, nme.EMPTY_PACKAGE_NAME) with WellKnownSymbol {
+ override def isEmptyPackage = true
+ }
+ final object EmptyPackageClass extends PackageClassSymbol(RootClass, NoPosition, tpnme.EMPTY_PACKAGE_NAME) with WellKnownSymbol {
+ override def isEffectiveRoot = true
+ override def isEmptyPackageClass = true
+ }
+ }
+}
diff --git a/src/reflect/scala/reflect/internal/MissingRequirementError.scala b/src/reflect/scala/reflect/internal/MissingRequirementError.scala
new file mode 100644
index 0000000..48203ca
--- /dev/null
+++ b/src/reflect/scala/reflect/internal/MissingRequirementError.scala
@@ -0,0 +1,24 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2013 LAMP/EPFL
+ * @author Martin Odersky
+ */
+
+package scala.reflect
+package internal
+
+class MissingRequirementError private (msg: String) extends FatalError(msg) {
+ import MissingRequirementError.suffix
+ def req: String = if (msg endsWith suffix) msg dropRight suffix.length else msg
+}
+
+object MissingRequirementError {
+ private val suffix = " not found."
+ def signal(msg: String): Nothing = throw new MissingRequirementError(msg)
+ def notFound(req: String): Nothing = signal(req + suffix)
+ def unapply(x: Throwable): Option[String] = x match {
+ case x: MissingRequirementError => Some(x.req)
+ case _ => None
+ }
+}
+
+
diff --git a/src/reflect/scala/reflect/internal/Names.scala b/src/reflect/scala/reflect/internal/Names.scala
new file mode 100644
index 0000000..c78ba72
--- /dev/null
+++ b/src/reflect/scala/reflect/internal/Names.scala
@@ -0,0 +1,542 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2013 LAMP/EPFL
+ * @author Martin Odersky
+ */
+
+package scala.reflect
+package internal
+
+import scala.io.Codec
+import java.security.MessageDigest
+import scala.language.implicitConversions
+
+trait LowPriorityNames {
+ self: Names =>
+
+ implicit def nameToNameOps(name: Name): NameOps[Name] = new NameOps[Name](name)
+}
+
+/** The class Names ...
+ *
+ * @author Martin Odersky
+ * @version 1.0, 05/02/2005
+ */
+trait Names extends api.Names with LowPriorityNames {
+ implicit def promoteTermNamesAsNecessary(name: Name): TermName = name.toTermName
+
+// Operations -------------------------------------------------------------
+
+ private final val HASH_SIZE = 0x8000
+ private final val HASH_MASK = 0x7FFF
+ private final val NAME_SIZE = 0x20000
+
+ final val nameDebug = false
+
+ /** Memory to store all names sequentially. */
+ var chrs: Array[Char] = new Array[Char](NAME_SIZE)
+ private var nc = 0
+
+ /** Hashtable for finding term names quickly. */
+ private val termHashtable = new Array[TermName](HASH_SIZE)
+
+ /** Hashtable for finding type names quickly. */
+ private val typeHashtable = new Array[TypeName](HASH_SIZE)
+
+ /** The hashcode of a name. */
+ private def hashValue(cs: Array[Char], offset: Int, len: Int): Int =
+ if (len > 0)
+ (len * (41 * 41 * 41) +
+ cs(offset) * (41 * 41) +
+ cs(offset + len - 1) * 41 +
+ cs(offset + (len >> 1)))
+ else 0;
+
+ /** Is (the ASCII representation of) name at given index equal to
+ * cs[offset..offset+len-1]?
+ */
+ private def equals(index: Int, cs: Array[Char], offset: Int, len: Int): Boolean = {
+ var i = 0
+ while ((i < len) && (chrs(index + i) == cs(offset + i)))
+ i += 1;
+ i == len
+ }
+
+ /** Enter characters into chrs array. */
+ private def enterChars(cs: Array[Char], offset: Int, len: Int) {
+ var i = 0
+ while (i < len) {
+ if (nc + i == chrs.length) {
+ val newchrs = new Array[Char](chrs.length * 2)
+ scala.compat.Platform.arraycopy(chrs, 0, newchrs, 0, chrs.length)
+ chrs = newchrs
+ }
+ chrs(nc + i) = cs(offset + i)
+ i += 1
+ }
+ if (len == 0) nc += 1
+ else nc = nc + len
+ }
+
+ /** Create a term name from the characters in cs[offset..offset+len-1]. */
+ def newTermName(cs: Array[Char], offset: Int, len: Int): TermName =
+ newTermName(cs, offset, len, cachedString = null)
+
+ def newTermName(cs: Array[Char]): TermName = newTermName(cs, 0, cs.length)
+ def newTypeName(cs: Array[Char]): TypeName = newTypeName(cs, 0, cs.length)
+
+ /** Create a term name from the characters in cs[offset..offset+len-1].
+ * TODO - have a mode where name validation is performed at creation time
+ * (e.g. if a name has the string "$class" in it, then fail if that
+ * string is not at the very end.)
+ */
+ protected def newTermName(cs: Array[Char], offset: Int, len: Int, cachedString: String): TermName = {
+ val h = hashValue(cs, offset, len) & HASH_MASK
+ var n = termHashtable(h)
+ while ((n ne null) && (n.length != len || !equals(n.start, cs, offset, len)))
+ n = n.next
+
+ if (n ne null) n
+ else {
+ // The logic order here is future-proofing against the possibility
+ // that name.toString will become an eager val, in which case the call
+ // to enterChars cannot follow the construction of the TermName.
+ val ncStart = nc
+ enterChars(cs, offset, len)
+ if (cachedString ne null) new TermName_S(ncStart, len, h, cachedString)
+ else new TermName_R(ncStart, len, h)
+ }
+ }
+ protected def newTypeName(cs: Array[Char], offset: Int, len: Int, cachedString: String): TypeName =
+ newTermName(cs, offset, len, cachedString).toTypeName
+
+ /** Create a term name from string. */
+ def newTermName(s: String): TermName = newTermName(s.toCharArray(), 0, s.length(), null)
+
+ /** Create a type name from string. */
+ def newTypeName(s: String): TypeName = newTermName(s).toTypeName
+
+ /** Create a term name from the UTF8 encoded bytes in bs[offset..offset+len-1]. */
+ def newTermName(bs: Array[Byte], offset: Int, len: Int): TermName = {
+ val chars = Codec.fromUTF8(bs, offset, len)
+ newTermName(chars, 0, chars.length)
+ }
+
+ def newTermNameCached(s: String): TermName =
+ newTermName(s.toCharArray(), 0, s.length(), cachedString = s)
+
+ def newTypeNameCached(s: String): TypeName =
+ newTypeName(s.toCharArray(), 0, s.length(), cachedString = s)
+
+ /** Create a type name from the characters in cs[offset..offset+len-1]. */
+ def newTypeName(cs: Array[Char], offset: Int, len: Int): TypeName =
+ newTermName(cs, offset, len, cachedString = null).toTypeName
+
+ /** Create a type name from the UTF8 encoded bytes in bs[offset..offset+len-1]. */
+ def newTypeName(bs: Array[Byte], offset: Int, len: Int): TypeName =
+ newTermName(bs, offset, len).toTypeName
+
+ def nameChars: Array[Char] = chrs
+ @deprecated("", "2.9.0") def view(s: String): TermName = newTermName(s)
+
+// Classes ----------------------------------------------------------------------
+
+ /** The name class.
+ * TODO - resolve schizophrenia regarding whether to treat Names as Strings
+ * or Strings as Names. Give names the key functions the absence of which
+ * make people want Strings all the time.
+ */
+ sealed abstract class Name(protected val index: Int, protected val len: Int) extends NameApi {
+ type ThisNameType >: Null <: Name
+ protected[this] def thisName: ThisNameType
+
+ // Note that "Name with ThisNameType" should be redundant
+ // because ThisNameType <: Name, but due to SI-6161 the
+ // compile loses track of this fact.
+
+ /** Index into name table */
+ def start: Int = index
+
+ /** The next name in the same hash bucket. */
+ def next: Name with ThisNameType
+
+ /** The length of this name. */
+ final def length: Int = len
+ final def isEmpty = length == 0
+ final def nonEmpty = !isEmpty
+
+ def nameKind: String
+ def isTermName: Boolean
+ def isTypeName: Boolean
+ def toTermName: TermName
+ def toTypeName: TypeName
+ def companionName: Name
+ def bothNames: List[Name] = List(toTermName, toTypeName)
+
+ /** Return the subname with characters from from to to-1. */
+ def subName(from: Int, to: Int): Name with ThisNameType
+
+ /** Return a new name of the same variety. */
+ def newName(str: String): Name with ThisNameType
+
+ /** Return a new name based on string transformation. */
+ def mapName(f: String => String): Name with ThisNameType = newName(f(toString))
+
+ /** Copy bytes of this name to buffer cs, starting at position `offset`. */
+ final def copyChars(cs: Array[Char], offset: Int) =
+ scala.compat.Platform.arraycopy(chrs, index, cs, offset, len)
+
+ /** @return the ascii representation of this name */
+ final def toChars: Array[Char] = {
+ val cs = new Array[Char](len)
+ copyChars(cs, 0)
+ cs
+ }
+
+ /** Write to UTF8 representation of this name to given character array.
+ * Start copying to index `to`. Return index of next free byte in array.
+ * Array must have enough remaining space for all bytes
+ * (i.e. maximally 3*length bytes).
+ */
+ final def copyUTF8(bs: Array[Byte], offset: Int): Int = {
+ val bytes = Codec.toUTF8(chrs, index, len)
+ scala.compat.Platform.arraycopy(bytes, 0, bs, offset, bytes.length)
+ offset + bytes.length
+ }
+
+ /** @return the hash value of this name */
+ final override def hashCode(): Int = index
+
+ // Presently disabled.
+ // override def equals(other: Any) = paranoidEquals(other)
+ private def paranoidEquals(other: Any): Boolean = {
+ val cmp = this eq other.asInstanceOf[AnyRef]
+ if (cmp || !nameDebug)
+ return cmp
+
+ other match {
+ case x: String =>
+ Console.println("Compared " + debugString + " and String '" + x + "'")
+ case x: Name =>
+ if (this.isTermName != x.isTermName) {
+ val panic = this.toTermName == x.toTermName
+ Console.println("Compared '%s' and '%s', one term, one type.%s".format(this, x,
+ if (panic) " And they contain the same name string!"
+ else ""
+ ))
+ }
+ case _ =>
+ }
+ false
+ }
+
+ /** @return the i'th Char of this name */
+ final def charAt(i: Int): Char = chrs(index + i)
+
+ /** @return the index of first occurrence of char c in this name, length if not found */
+ final def pos(c: Char): Int = pos(c, 0)
+
+ /** @return the index of first occurrence of char c in this name, length if not found */
+ final def pos(s: String): Int = pos(s, 0)
+
+ /** Returns the index of the first occurrence of character c in
+ * this name from start, length if not found.
+ *
+ * @param c the character
+ * @param start ...
+ * @return the index of the first occurrence of c
+ */
+ final def pos(c: Char, start: Int): Int = {
+ var i = start
+ while (i < len && chrs(index + i) != c) i += 1
+ i
+ }
+
+ /** Returns the index of the first occurrence of nonempty string s
+ * in this name from start, length if not found.
+ *
+ * @param s the string
+ * @param start ...
+ * @return the index of the first occurrence of s
+ */
+ final def pos(s: String, start: Int): Int = {
+ var i = pos(s.charAt(0), start)
+ while (i + s.length() <= len) {
+ var j = 1
+ while (s.charAt(j) == chrs(index + i + j)) {
+ j += 1
+ if (j == s.length()) return i
+ }
+ i = pos(s.charAt(0), i + 1)
+ }
+ len
+ }
+
+ /** Returns the index of last occurrence of char c in this
+ * name, -1 if not found.
+ *
+ * @param c the character
+ * @return the index of the last occurrence of c
+ */
+ final def lastPos(c: Char): Int = lastPos(c, len - 1)
+
+ final def lastPos(s: String): Int = lastPos(s, len - s.length)
+
+ /** Returns the index of the last occurrence of char c in this
+ * name from start, -1 if not found.
+ *
+ * @param c the character
+ * @param start ...
+ * @return the index of the last occurrence of c
+ */
+ final def lastPos(c: Char, start: Int): Int = {
+ var i = start
+ while (i >= 0 && chrs(index + i) != c) i -= 1
+ i
+ }
+
+ /** Returns the index of the last occurrence of string s in this
+ * name from start, -1 if not found.
+ *
+ * @param s the string
+ * @param start ...
+ * @return the index of the last occurrence of s
+ */
+ final def lastPos(s: String, start: Int): Int = {
+ var i = lastPos(s.charAt(0), start)
+ while (i >= 0) {
+ var j = 1;
+ while (s.charAt(j) == chrs(index + i + j)) {
+ j += 1
+ if (j == s.length()) return i;
+ }
+ i = lastPos(s.charAt(0), i - 1)
+ }
+ -s.length()
+ }
+
+ /** Does this name start with prefix? */
+ final def startsWith(prefix: Name): Boolean = startsWith(prefix, 0)
+
+ /** Does this name start with prefix at given start index? */
+ final def startsWith(prefix: Name, start: Int): Boolean = {
+ var i = 0
+ while (i < prefix.length && start + i < len &&
+ chrs(index + start + i) == chrs(prefix.start + i))
+ i += 1;
+ i == prefix.length
+ }
+
+ /** Does this name end with suffix? */
+ final def endsWith(suffix: Name): Boolean = endsWith(suffix, len)
+
+ /** Does this name end with suffix just before given end index? */
+ final def endsWith(suffix: Name, end: Int): Boolean = {
+ var i = 1
+ while (i <= suffix.length && i <= end &&
+ chrs(index + end - i) == chrs(suffix.start + suffix.length - i))
+ i += 1;
+ i > suffix.length
+ }
+
+ final def containsName(subname: String): Boolean = containsName(newTermName(subname))
+ final def containsName(subname: Name): Boolean = {
+ var start = 0
+ val last = len - subname.length
+ while (start <= last && !startsWith(subname, start)) start += 1
+ start <= last
+ }
+ final def containsChar(ch: Char): Boolean = {
+ var i = index
+ val max = index + len
+ while (i < max) {
+ if (chrs(i) == ch)
+ return true
+ i += 1
+ }
+ false
+ }
+
+ /** Some thoroughly self-explanatory convenience functions. They
+ * assume that what they're being asked to do is known to be valid.
+ */
+ final def startChar: Char = this charAt 0
+ final def endChar: Char = this charAt len - 1
+ final def startsWith(char: Char): Boolean = len > 0 && startChar == char
+ final def startsWith(name: String): Boolean = startsWith(newTermName(name))
+ final def endsWith(char: Char): Boolean = len > 0 && endChar == char
+ final def endsWith(name: String): Boolean = endsWith(newTermName(name))
+
+ def indexOf(ch: Char) = {
+ val idx = pos(ch)
+ if (idx == length) -1 else idx
+ }
+ def indexOf(ch: Char, fromIndex: Int) = {
+ val idx = pos(ch, fromIndex)
+ if (idx == length) -1 else idx
+ }
+ def lastIndexOf(ch: Char) = lastPos(ch)
+ def lastIndexOf(ch: Char, fromIndex: Int) = lastPos(ch, fromIndex)
+
+ /** Replace all occurrences of `from` by `to` in
+ * name; result is always a term name.
+ */
+ def replace(from: Char, to: Char): Name = {
+ val cs = new Array[Char](len)
+ var i = 0
+ while (i < len) {
+ val ch = charAt(i)
+ cs(i) = if (ch == from) to else ch
+ i += 1
+ }
+ newTermName(cs, 0, len)
+ }
+
+ /* TODO - reconcile/fix that encode returns a Name but
+ * decode returns a String.
+ */
+
+ /** !!! Duplicative but consistently named.
+ */
+ def decoded: String = decode
+ def encoded: String = "" + encode
+ // def decodedName: ThisNameType = newName(decoded)
+ def encodedName: ThisNameType = encode
+
+ /** Replace operator symbols by corresponding \$op_name. */
+ def encode: ThisNameType = {
+ val str = toString
+ val res = NameTransformer.encode(str)
+ if (res == str) thisName else newName(res)
+ }
+
+ /** Replace \$op_name by corresponding operator symbol. */
+ def decode: String = {
+ if (this containsChar '$') {
+ val str = toString
+ val res = NameTransformer.decode(str)
+ if (res == str) str
+ else res
+ }
+ else toString
+ }
+
+ /** TODO - find some efficiency. */
+ def append(ch: Char) = newName("" + this + ch)
+ def append(suffix: String) = newName("" + this + suffix)
+ def append(suffix: Name) = newName("" + this + suffix)
+ def prepend(ch: Char) = newName("" + ch + this)
+ def prepend(prefix: String) = newName("" + prefix + this)
+ def prepend(prefix: Name) = newName("" + prefix + this)
+
+ def decodedName: ThisNameType = newName(decode)
+ def isOperatorName: Boolean = decode != toString
+ def longString: String = nameKind + " " + decode
+ def debugString = { val s = decode ; if (isTypeName) s + "!" else s }
+ }
+
+ implicit def TermNameOps(name: TermName): NameOps[TermName] = new NameOps(name)
+ implicit def TypeNameOps(name: TypeName): NameOps[TypeName] = new NameOps(name)
+
+ final class NameOps[T <: Name](name: T) {
+ def stripSuffix(suffix: Name): T = if (name endsWith suffix) dropRight(suffix.length) else name
+ def dropRight(n: Int): T = name.subName(0, name.length - n).asInstanceOf[T]
+ def drop(n: Int): T = name.subName(n, name.length).asInstanceOf[T]
+ def nonEmpty: Boolean = name.length > 0
+ }
+
+ implicit val NameTag = ClassTag[Name](classOf[Name])
+
+ /** A name that contains no operator chars nor dollar signs.
+ * TODO - see if it's any faster to do something along these lines.
+ * Cute: now that exhaustivity kind of works, the mere presence of
+ * this trait causes TermName and TypeName to stop being exhaustive.
+ * Commented out.
+ */
+ // trait AlphaNumName extends Name {
+ // final override def encode = thisName
+ // final override def decodedName = thisName
+ // final override def decode = toString
+ // final override def isOperatorName = false
+ // }
+
+ /** TermName_S and TypeName_S have fields containing the string version of the name.
+ * TermName_R and TypeName_R recreate it each time toString is called.
+ */
+ private class TermName_S(index0: Int, len0: Int, hash: Int, override val toString: String) extends TermName(index0, len0, hash) {
+ protected def createCompanionName(h: Int): TypeName = new TypeName_S(index, len, h, toString)
+ override def newName(str: String): TermName = newTermNameCached(str)
+ }
+ private class TypeName_S(index0: Int, len0: Int, hash: Int, override val toString: String) extends TypeName(index0, len0, hash) {
+ protected def createCompanionName(h: Int): TermName = new TermName_S(index, len, h, toString)
+ override def newName(str: String): TypeName = newTypeNameCached(str)
+ }
+
+ private class TermName_R(index0: Int, len0: Int, hash: Int) extends TermName(index0, len0, hash) {
+ protected def createCompanionName(h: Int): TypeName = new TypeName_R(index, len, h)
+ override def toString = new String(chrs, index, len)
+ }
+
+ private class TypeName_R(index0: Int, len0: Int, hash: Int) extends TypeName(index0, len0, hash) {
+ protected def createCompanionName(h: Int): TermName = new TermName_R(index, len, h)
+ override def toString = new String(chrs, index, len)
+ }
+
+ sealed abstract class TermName(index0: Int, len0: Int, hash: Int) extends Name(index0, len0) {
+ type ThisNameType = TermName
+ protected[this] def thisName: TermName = this
+
+ var next: TermName = termHashtable(hash)
+ termHashtable(hash) = this
+ def isTermName: Boolean = true
+ def isTypeName: Boolean = false
+ def toTermName: TermName = this
+ def toTypeName: TypeName = {
+ val h = hashValue(chrs, index, len) & HASH_MASK
+ var n = typeHashtable(h)
+ while ((n ne null) && n.start != index)
+ n = n.next
+
+ if (n ne null) n
+ else createCompanionName(h)
+ }
+ def newName(str: String): TermName = newTermName(str)
+ def companionName: TypeName = toTypeName
+ def subName(from: Int, to: Int): TermName =
+ newTermName(chrs, start + from, to - from)
+
+ def nameKind = "term"
+ protected def createCompanionName(h: Int): TypeName
+ }
+
+ implicit val TermNameTag = ClassTag[TermName](classOf[TermName])
+
+ sealed abstract class TypeName(index0: Int, len0: Int, hash: Int) extends Name(index0, len0) {
+ type ThisNameType = TypeName
+ protected[this] def thisName: TypeName = this
+
+ var next: TypeName = typeHashtable(hash)
+ typeHashtable(hash) = this
+ def isTermName: Boolean = false
+ def isTypeName: Boolean = true
+ def toTermName: TermName = {
+ val h = hashValue(chrs, index, len) & HASH_MASK
+ var n = termHashtable(h)
+ while ((n ne null) && n.start != index)
+ n = n.next
+
+ if (n ne null) n
+ else createCompanionName(h)
+ }
+ def toTypeName: TypeName = this
+ def newName(str: String): TypeName = newTypeName(str)
+ def companionName: TermName = toTermName
+ def subName(from: Int, to: Int): TypeName =
+ newTypeName(chrs, start + from, to - from)
+
+ def nameKind = "type"
+ override def decode = if (nameDebug) super.decode + "!" else super.decode
+ protected def createCompanionName(h: Int): TermName
+ }
+
+ implicit val TypeNameTag = ClassTag[TypeName](classOf[TypeName])
+}
diff --git a/src/reflect/scala/reflect/internal/Phase.scala b/src/reflect/scala/reflect/internal/Phase.scala
new file mode 100644
index 0000000..c0f4232
--- /dev/null
+++ b/src/reflect/scala/reflect/internal/Phase.scala
@@ -0,0 +1,66 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2013 LAMP/EPFL
+ * @author Martin Odersky
+ */
+
+package scala.reflect
+package internal
+
+abstract class Phase(val prev: Phase) {
+ if ((prev ne null) && (prev ne NoPhase))
+ prev.nx = this
+
+ type Id = Int
+ val id: Id = if (prev eq null) 0 else prev.id + 1
+
+ /** New flags visible after this phase has completed */
+ def nextFlags: Long = 0l
+
+ /** New flags visible once this phase has started */
+ def newFlags: Long = 0l
+
+ val fmask = (
+ if (prev eq null) Flags.InitialFlags
+ else prev.flagMask | prev.nextFlags | newFlags
+ )
+ def flagMask: Long = fmask
+
+ private var nx: Phase = this
+
+ def next: Phase = nx
+ def hasNext = next != this
+ def iterator = Iterator.iterate(this)(_.next) takeWhile (p => p.next != p)
+
+ def name: String
+ def description: String = name
+ // Will running with -Ycheck:name work?
+ def checkable: Boolean = true
+ def specialized: Boolean = false
+ def erasedTypes: Boolean = false
+ def flatClasses: Boolean = false
+ def refChecked: Boolean = false
+
+ /** This is used only in unsafeTypeParams, and at this writing is
+ * overridden to false in parser, namer, typer, and erasure. (And NoPhase.)
+ */
+ def keepsTypeParams = true
+ def run(): Unit
+
+ override def toString() = name
+ override def hashCode = id.## + name.##
+ override def equals(other: Any) = other match {
+ case x: Phase => id == x.id && name == x.name
+ case _ => false
+ }
+}
+
+object NoPhase extends Phase(null) {
+ def name = "<no phase>"
+ override def keepsTypeParams = false
+ def run() { throw new Error("NoPhase.run") }
+}
+
+object SomePhase extends Phase(NoPhase) {
+ def name = "<some phase>"
+ def run() { throw new Error("SomePhase.run") }
+}
diff --git a/src/reflect/scala/reflect/internal/Positions.scala b/src/reflect/scala/reflect/internal/Positions.scala
new file mode 100644
index 0000000..faa161d
--- /dev/null
+++ b/src/reflect/scala/reflect/internal/Positions.scala
@@ -0,0 +1,65 @@
+package scala.reflect
+package internal
+
+trait Positions extends api.Positions { self: SymbolTable =>
+
+ type Position = scala.reflect.internal.util.Position
+ val NoPosition = scala.reflect.internal.util.NoPosition
+ implicit val PositionTag = ClassTag[Position](classOf[Position])
+
+ /** A position that wraps a set of trees.
+ * The point of the wrapping position is the point of the default position.
+ * If some of the trees are ranges, returns a range position enclosing all ranges
+ * Otherwise returns default position that is either focused or not.
+ */
+ def wrappingPos(default: Position, trees: List[Tree]) = wrappingPos(default, trees, true)
+ def wrappingPos(default: Position, trees: List[Tree], focus: Boolean): Position = default
+
+ /** A position that wraps the non-empty set of trees.
+ * The point of the wrapping position is the point of the first trees' position.
+ * If some of the trees are non-synthetic, returns a range position enclosing the non-synthetic trees
+ * Otherwise returns a synthetic offset position to point.
+ */
+ def wrappingPos(trees: List[Tree]): Position = trees.head.pos
+
+ /** Ensure that given tree has no positions that overlap with
+ * any of the positions of `others`. This is done by
+ * shortening the range, assigning TransparentPositions
+ * to some of the nodes in `tree` or focusing on the position.
+ */
+ def ensureNonOverlapping(tree: Tree, others: List[Tree]){ ensureNonOverlapping(tree, others, true) }
+ def ensureNonOverlapping(tree: Tree, others: List[Tree], focus: Boolean) {}
+
+ trait PosAssigner extends Traverser {
+ var pos: Position
+ }
+ protected[this] lazy val posAssigner: PosAssigner = new DefaultPosAssigner
+
+ protected class DefaultPosAssigner extends PosAssigner {
+ var pos: Position = _
+ override def traverse(t: Tree) {
+ if (t eq EmptyTree) ()
+ else if (t.pos == NoPosition) {
+ t.setPos(pos)
+ super.traverse(t) // TODO: bug? shouldn't the traverse be outside of the if?
+ // @PP: it's pruning whenever it encounters a node with a
+ // position, which I interpret to mean that (in the author's
+ // mind at least) either the children of a positioned node will
+ // already be positioned, or the children of a positioned node
+ // do not merit positioning.
+ //
+ // Whatever the author's rationale, it does seem like a bad idea
+ // to press on through a positioned node to find unpositioned
+ // children beneath it and then to assign whatever happens to
+ // be in `pos` to such nodes. There are supposed to be some
+ // position invariants which I can't imagine surviving that.
+ }
+ }
+ }
+
+ def atPos[T <: Tree](pos: Position)(tree: T): T = {
+ posAssigner.pos = pos
+ posAssigner.traverse(tree)
+ tree
+ }
+}
\ No newline at end of file
diff --git a/src/reflect/scala/reflect/internal/Printers.scala b/src/reflect/scala/reflect/internal/Printers.scala
new file mode 100644
index 0000000..35cb749
--- /dev/null
+++ b/src/reflect/scala/reflect/internal/Printers.scala
@@ -0,0 +1,691 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2013 LAMP/EPFL
+ * @author Martin Odersky
+ */
+
+// todo. we need to unify this prettyprinter with NodePrinters
+
+package scala.reflect
+package internal
+
+import java.io.{ OutputStream, PrintWriter, StringWriter, Writer }
+import Flags._
+import scala.compat.Platform.EOL
+
+trait Printers extends api.Printers { self: SymbolTable =>
+
+ //nsc import treeInfo.{ IsTrue, IsFalse }
+
+ final val showOuterTests = false
+
+ /** Adds backticks if the name is a scala keyword. */
+ def quotedName(name: Name, decode: Boolean): String = {
+ val s = if (decode) name.decode else name.toString
+ val term = name.toTermName
+ if (nme.keywords(term) && term != nme.USCOREkw) "`%s`" format s
+ else s
+ }
+ def quotedName(name: Name): String = quotedName(name, false)
+ def quotedName(name: String): String = quotedName(newTermName(name), false)
+
+ private def symNameInternal(tree: Tree, name: Name, decoded: Boolean): String = {
+ val sym = tree.symbol
+ if (sym.name.toString == nme.ERROR.toString) {
+ "<" + quotedName(name, decoded) + ": error>"
+ } else if (sym != null && sym != NoSymbol) {
+ val prefix = if (sym.isMixinConstructor) "/*%s*/".format(quotedName(sym.owner.name, decoded)) else ""
+ var suffix = ""
+ if (settings.uniqid.value) suffix += ("#" + sym.id)
+ if (settings.Yshowsymkinds.value) suffix += ("#" + sym.abbreviatedKindString)
+ prefix + quotedName(tree.symbol.decodedName) + suffix
+ } else {
+ quotedName(name, decoded)
+ }
+ }
+
+ def decodedSymName(tree: Tree, name: Name) = symNameInternal(tree, name, true)
+ def symName(tree: Tree, name: Name) = symNameInternal(tree, name, false)
+
+ /** Turns a path into a String, introducing backquotes
+ * as necessary.
+ */
+ def backquotedPath(t: Tree): String = {
+ t match {
+ case Select(qual, name) if name.isTermName => "%s.%s".format(backquotedPath(qual), symName(t, name))
+ case Select(qual, name) if name.isTypeName => "%s#%s".format(backquotedPath(qual), symName(t, name))
+ case Ident(name) => symName(t, name)
+ case _ => t.toString
+ }
+ }
+
+ class TreePrinter(out: PrintWriter) extends super.TreePrinter {
+ protected var indentMargin = 0
+ protected val indentStep = 2
+ protected var indentString = " " // 40
+
+ printTypes = settings.printtypes.value
+ printIds = settings.uniqid.value
+ printKinds = settings.Yshowsymkinds.value
+ printMirrors = false // typically there's no point to print mirrors inside the compiler, as there is only one mirror there
+ protected def doPrintPositions = settings.Xprintpos.value
+
+ def indent() = indentMargin += indentStep
+ def undent() = indentMargin -= indentStep
+
+ def printPosition(tree: Tree) = if (doPrintPositions) print(tree.pos.show)
+
+ def println() {
+ out.println()
+ while (indentMargin > indentString.length())
+ indentString += indentString
+ if (indentMargin > 0)
+ out.write(indentString, 0, indentMargin)
+ }
+
+ def printSeq[a](ls: List[a])(printelem: a => Unit)(printsep: => Unit) {
+ ls match {
+ case List() =>
+ case List(x) => printelem(x)
+ case x :: rest => printelem(x); printsep; printSeq(rest)(printelem)(printsep)
+ }
+ }
+
+ def printColumn(ts: List[Tree], start: String, sep: String, end: String) {
+ print(start); indent; println()
+ printSeq(ts){print(_)}{print(sep); println()}; undent; println(); print(end)
+ }
+
+ def printRow(ts: List[Tree], start: String, sep: String, end: String) {
+ print(start); printSeq(ts){print(_)}{print(sep)}; print(end)
+ }
+
+ def printRow(ts: List[Tree], sep: String) { printRow(ts, "", sep, "") }
+
+ def printTypeParams(ts: List[TypeDef]) {
+ if (!ts.isEmpty) {
+ print("["); printSeq(ts){ t =>
+ printAnnotations(t)
+ printParam(t)
+ }{print(", ")}; print("]")
+ }
+ }
+
+ def printLabelParams(ps: List[Ident]) {
+ print("(")
+ printSeq(ps){printLabelParam}{print(", ")}
+ print(")")
+ }
+
+ def printLabelParam(p: Ident) {
+ print(symName(p, p.name)); printOpt(": ", TypeTree() setType p.tpe)
+ }
+
+ def printValueParams(ts: List[ValDef]) {
+ print("(")
+ if (!ts.isEmpty) printFlags(ts.head.mods.flags & IMPLICIT, "")
+ printSeq(ts){printParam}{print(", ")}
+ print(")")
+ }
+
+ def printParam(tree: Tree) {
+ tree match {
+ case ValDef(mods, name, tp, rhs) =>
+ printPosition(tree)
+ printAnnotations(tree)
+ print(symName(tree, name)); printOpt(": ", tp); printOpt(" = ", rhs)
+ case TypeDef(mods, name, tparams, rhs) =>
+ printPosition(tree)
+ print(symName(tree, name))
+ printTypeParams(tparams); print(rhs)
+ }
+ }
+
+ def printBlock(tree: Tree) {
+ tree match {
+ case Block(_, _) =>
+ print(tree)
+ case _ =>
+ printColumn(List(tree), "{", ";", "}")
+ }
+ }
+
+ private def symFn[T](tree: Tree, f: Symbol => T, orElse: => T): T = tree.symbol match {
+ case null | NoSymbol => orElse
+ case sym => f(sym)
+ }
+ private def ifSym(tree: Tree, p: Symbol => Boolean) = symFn(tree, p, false)
+
+ def printOpt(prefix: String, tree: Tree) {
+ if (!tree.isEmpty) { print(prefix, tree) }
+ }
+
+ def printModifiers(tree: Tree, mods: Modifiers): Unit = printFlags(
+ if (tree.symbol == NoSymbol) mods.flags else tree.symbol.flags, "" + (
+ if (tree.symbol == NoSymbol) mods.privateWithin
+ else if (tree.symbol.hasAccessBoundary) tree.symbol.privateWithin.name
+ else ""
+ )
+ )
+
+ def printFlags(flags: Long, privateWithin: String) {
+ var mask: Long = if (settings.debug.value) -1L else PrintableFlags
+ val s = flagsToString(flags & mask, privateWithin)
+ if (s != "") print(s + " ")
+ }
+
+ def printAnnotations(tree: Tree) {
+ // SI-5885: by default this won't print annotations of not yet initialized symbols
+ val annots = tree.symbol.annotations match {
+ case Nil => tree.asInstanceOf[MemberDef].mods.annotations
+ case anns => anns
+ }
+ annots foreach (annot => print("@"+annot+" "))
+ }
+
+ private var currentOwner: Symbol = NoSymbol
+ private var selectorType: Type = NoType
+
+ def printTree(tree: Tree) {
+ tree match {
+ case EmptyTree =>
+ print("<empty>")
+
+ case ClassDef(mods, name, tparams, impl) =>
+ printAnnotations(tree)
+ printModifiers(tree, mods)
+ val word =
+ if (mods.isTrait) "trait"
+ else if (ifSym(tree, _.isModuleClass)) "object"
+ else "class"
+
+ print(word, " ", symName(tree, name))
+ printTypeParams(tparams)
+ print(if (mods.isDeferred) " <: " else " extends ", impl)
+
+ case PackageDef(packaged, stats) =>
+ printAnnotations(tree)
+ print("package ", packaged); printColumn(stats, " {", ";", "}")
+
+ case ModuleDef(mods, name, impl) =>
+ printAnnotations(tree)
+ printModifiers(tree, mods);
+ print("object " + symName(tree, name), " extends ", impl)
+
+ case ValDef(mods, name, tp, rhs) =>
+ printAnnotations(tree)
+ printModifiers(tree, mods)
+ print(if (mods.isMutable) "var " else "val ", symName(tree, name))
+ printOpt(": ", tp)
+ if (!mods.isDeferred)
+ print(" = ", if (rhs.isEmpty) "_" else rhs)
+
+ case DefDef(mods, name, tparams, vparamss, tp, rhs) =>
+ printAnnotations(tree)
+ printModifiers(tree, mods)
+ print("def " + symName(tree, name))
+ printTypeParams(tparams); vparamss foreach printValueParams
+ printOpt(": ", tp); printOpt(" = ", rhs)
+
+ case TypeDef(mods, name, tparams, rhs) =>
+ if (mods hasFlag (PARAM | DEFERRED)) {
+ printAnnotations(tree)
+ printModifiers(tree, mods); print("type "); printParam(tree)
+ } else {
+ printAnnotations(tree)
+ printModifiers(tree, mods); print("type " + symName(tree, name))
+ printTypeParams(tparams); printOpt(" = ", rhs)
+ }
+
+ case LabelDef(name, params, rhs) =>
+ print(symName(tree, name)); printLabelParams(params); printBlock(rhs)
+
+ case Import(expr, selectors) =>
+ // Is this selector remapping a name (i.e, {name1 => name2})
+ def isNotRemap(s: ImportSelector) : Boolean = (s.name == nme.WILDCARD || s.name == s.rename)
+ def selectorToString(s: ImportSelector): String = {
+ val from = quotedName(s.name)
+ if (isNotRemap(s)) from
+ else from + "=>" + quotedName(s.rename)
+ }
+ print("import ", backquotedPath(expr), ".")
+ selectors match {
+ case List(s) =>
+ // If there is just one selector and it is not remapping a name, no braces are needed
+ if (isNotRemap(s)) print(selectorToString(s))
+ else print("{", selectorToString(s), "}")
+ // If there is more than one selector braces are always needed
+ case many =>
+ print(many.map(selectorToString).mkString("{", ", ", "}"))
+ }
+
+ case Template(parents, self, body) =>
+ val currentOwner1 = currentOwner
+ if (tree.symbol != NoSymbol) currentOwner = tree.symbol.owner
+// if (parents exists isReferenceToAnyVal) {
+// print("AnyVal")
+// }
+// else {
+ printRow(parents, " with ")
+ if (!body.isEmpty) {
+ if (self.name != nme.WILDCARD) {
+ print(" { ", self.name); printOpt(": ", self.tpt); print(" => ")
+ } else if (!self.tpt.isEmpty) {
+ print(" { _ : ", self.tpt, " => ")
+ } else {
+ print(" {")
+ }
+ printColumn(body, "", ";", "}")
+ }
+// }
+ currentOwner = currentOwner1
+
+ case Block(stats, expr) =>
+ printColumn(stats ::: List(expr), "{", ";", "}")
+
+ case Match(selector, cases) =>
+ val selectorType1 = selectorType
+ selectorType = selector.tpe
+ print(selector); printColumn(cases, " match {", "", "}")
+ selectorType = selectorType1
+
+ case CaseDef(pat, guard, body) =>
+ print("case ")
+ def patConstr(pat: Tree): Tree = pat match {
+ case Apply(fn, args) => patConstr(fn)
+ case _ => pat
+ }
+ if (showOuterTests &&
+ needsOuterTest(
+ patConstr(pat).tpe.finalResultType, selectorType, currentOwner))
+ print("???")
+ print(pat); printOpt(" if ", guard)
+ print(" => ", body)
+
+ case Alternative(trees) =>
+ printRow(trees, "(", "| ", ")")
+
+ case Star(elem) =>
+ print("(", elem, ")*")
+
+ case Bind(name, t) =>
+ print("(", symName(tree, name), " @ ", t, ")")
+
+ case UnApply(fun, args) =>
+ print(fun, " <unapply> "); printRow(args, "(", ", ", ")")
+
+ case ArrayValue(elemtpt, trees) =>
+ print("Array[", elemtpt); printRow(trees, "]{", ", ", "}")
+
+ case Function(vparams, body) =>
+ print("("); printValueParams(vparams); print(" => ", body, ")")
+ if (printIds && tree.symbol != null) print("#"+tree.symbol.id)
+
+ case Assign(lhs, rhs) =>
+ print(lhs, " = ", rhs)
+
+ case AssignOrNamedArg(lhs, rhs) =>
+ print(lhs, " = ", rhs)
+
+ case If(cond, thenp, elsep) =>
+ print("if (", cond, ")"); indent; println()
+ print(thenp); undent
+ if (!elsep.isEmpty) {
+ println(); print("else"); indent; println(); print(elsep); undent
+ }
+
+ case Return(expr) =>
+ print("return ", expr)
+
+ case Try(block, catches, finalizer) =>
+ print("try "); printBlock(block)
+ if (!catches.isEmpty) printColumn(catches, " catch {", "", "}")
+ printOpt(" finally ", finalizer)
+
+ case Throw(expr) =>
+ print("throw ", expr)
+
+ case New(tpe) =>
+ print("new ", tpe)
+
+ case Typed(expr, tp) =>
+ print("(", expr, ": ", tp, ")")
+
+ case TypeApply(fun, targs) =>
+ print(fun); printRow(targs, "[", ", ", "]")
+
+ case Apply(fun, vargs) =>
+ print(fun); printRow(vargs, "(", ", ", ")")
+
+ case ApplyDynamic(qual, vargs) =>
+ print("<apply-dynamic>(", qual, "#", tree.symbol.nameString)
+ printRow(vargs, ", (", ", ", "))")
+
+ case Super(This(qual), mix) =>
+ if (!qual.isEmpty || tree.symbol != NoSymbol) print(symName(tree, qual) + ".")
+ print("super")
+ if (!mix.isEmpty)
+ print("[" + mix + "]")
+
+ case Super(qual, mix) =>
+ print(qual, ".super")
+ if (!mix.isEmpty)
+ print("[" + mix + "]")
+
+ case This(qual) =>
+ if (!qual.isEmpty) print(symName(tree, qual) + ".")
+ print("this")
+
+ case Select(qual @ New(tpe), name) if (!settings.debug.value) =>
+ print(qual)
+
+ case Select(qualifier, name) =>
+ print(backquotedPath(qualifier), ".", symName(tree, name))
+
+ case id @ Ident(name) =>
+ val str = symName(tree, name)
+ print( if (id.isBackquoted) "`" + str + "`" else str )
+
+ case Literal(x) =>
+ print(x.escapedStringValue)
+
+ case tt: TypeTree =>
+ if ((tree.tpe eq null) || (doPrintPositions && tt.original != null)) {
+ if (tt.original != null) print("<type: ", tt.original, ">")
+ else print("<type ?>")
+ } else if ((tree.tpe.typeSymbol ne null) && tree.tpe.typeSymbol.isAnonymousClass) {
+ print(tree.tpe.typeSymbol.toString)
+ } else {
+ print(tree.tpe.toString)
+ }
+
+ case Annotated(Apply(Select(New(tpt), nme.CONSTRUCTOR), args), tree) =>
+ def printAnnot() {
+ print("@", tpt)
+ if (!args.isEmpty)
+ printRow(args, "(", ",", ")")
+ }
+ print(tree, if (tree.isType) " " else ": ")
+ printAnnot()
+
+ case SingletonTypeTree(ref) =>
+ print(ref, ".type")
+
+ case SelectFromTypeTree(qualifier, selector) =>
+ print(qualifier, "#", symName(tree, selector))
+
+ case CompoundTypeTree(templ) =>
+ print(templ)
+
+ case AppliedTypeTree(tp, args) =>
+ print(tp); printRow(args, "[", ", ", "]")
+
+ case TypeBoundsTree(lo, hi) =>
+ printOpt(" >: ", lo); printOpt(" <: ", hi)
+
+ case ExistentialTypeTree(tpt, whereClauses) =>
+ print(tpt);
+ printColumn(whereClauses, " forSome { ", ";", "}")
+
+// SelectFromArray is no longer visible in reflect.internal.
+// eliminated until we figure out what we will do with both Printers and
+// SelectFromArray.
+// case SelectFromArray(qualifier, name, _) =>
+// print(qualifier); print(".<arr>"); print(symName(tree, name))
+
+ case tree =>
+ xprintTree(this, tree)
+ }
+ if (printTypes && tree.isTerm && !tree.isEmpty) {
+ print("{", if (tree.tpe eq null) "<null>" else tree.tpe.toString, "}")
+ }
+ }
+
+ def print(args: Any*): Unit = args foreach {
+ case tree: Tree =>
+ printPosition(tree)
+ printTree(tree)
+ case name: Name =>
+ print(quotedName(name))
+ case arg =>
+ out.print(if (arg == null) "null" else arg.toString)
+ }
+ }
+
+ /** Hook for extensions */
+ def xprintTree(treePrinter: TreePrinter, tree: Tree) =
+ treePrinter.print(tree.productPrefix+tree.productIterator.mkString("(", ", ", ")"))
+
+ def newTreePrinter(writer: PrintWriter): TreePrinter = new TreePrinter(writer)
+ def newTreePrinter(stream: OutputStream): TreePrinter = newTreePrinter(new PrintWriter(stream))
+ def newTreePrinter(): TreePrinter = newTreePrinter(new PrintWriter(ConsoleWriter))
+
+ /** A writer that writes to the current Console and
+ * is sensitive to replacement of the Console's
+ * output stream.
+ */
+ object ConsoleWriter extends Writer {
+ override def write(str: String) { Console.print(str) }
+
+ def write(cbuf: Array[Char], off: Int, len: Int) {
+ write(new String(cbuf, off, len))
+ }
+
+ def close = { /* do nothing */ }
+ def flush = { /* do nothing */ }
+ }
+
+ def newRawTreePrinter(writer: PrintWriter): RawTreePrinter = new RawTreePrinter(writer)
+ def newRawTreePrinter(stream: OutputStream): RawTreePrinter = newRawTreePrinter(new PrintWriter(stream))
+ def newRawTreePrinter(): RawTreePrinter = newRawTreePrinter(new PrintWriter(ConsoleWriter))
+
+ // provides footnotes for types and mirrors
+ import scala.collection.mutable.{Map, WeakHashMap, SortedSet}
+ private val footnoteIndex = new FootnoteIndex
+ private class FootnoteIndex {
+ private val index = Map[Class[_], WeakHashMap[Any, Int]]()
+ private def classIndex[T: ClassTag] = index.getOrElseUpdate(classTag[T].runtimeClass, WeakHashMap[Any, Int]())
+ private val counters = Map[Class[_], Int]()
+ private def nextCounter[T: ClassTag] = {
+ val clazz = classTag[T].runtimeClass
+ counters.getOrElseUpdate(clazz, 0)
+ counters(clazz) = counters(clazz) + 1
+ counters(clazz)
+ }
+
+ def mkFootnotes() = new Footnotes
+ class Footnotes {
+ private val footnotes = Map[Class[_], SortedSet[Int]]()
+ private def classFootnotes[T: ClassTag] = footnotes.getOrElseUpdate(classTag[T].runtimeClass, SortedSet[Int]())
+
+ def put[T: ClassTag](any: T): Int = {
+ val index = classIndex[T].getOrElseUpdate(any, nextCounter[T])
+ classFootnotes[T] += index
+ index
+ }
+
+ def get[T: ClassTag]: List[(Int, Any)] =
+ classFootnotes[T].toList map (fi => (fi, classIndex[T].find{ case (any, ii) => ii == fi }.get._1))
+
+ def print[T: ClassTag](printer: Printers.super.TreePrinter): Unit = {
+ val footnotes = get[T]
+ if (footnotes.nonEmpty) {
+ printer.print(EOL)
+ footnotes.zipWithIndex foreach {
+ case ((fi, any), ii) =>
+ printer.print("[", fi, "] ", any)
+ if (ii < footnotes.length - 1) printer.print(EOL)
+ }
+ }
+ }
+ }
+ }
+
+ // emits more or less verbatim representation of the provided tree
+ class RawTreePrinter(out: PrintWriter) extends super.TreePrinter {
+ private var depth = 0
+ private var printTypesInFootnotes = true
+ private var printingFootnotes = false
+ private var footnotes = footnoteIndex.mkFootnotes()
+
+ def print(args: Any*): Unit = {
+ // don't print type footnotes if the argument is a mere type
+ if (depth == 0 && args.length == 1 && args(0) != null && args(0).isInstanceOf[Type])
+ printTypesInFootnotes = false
+
+ depth += 1
+ args foreach {
+ case expr: Expr[_] =>
+ print("Expr")
+ if (printTypes) print(expr.staticType)
+ print("(")
+ print(expr.tree)
+ print(")")
+ case EmptyTree =>
+ print("EmptyTree")
+ case emptyValDef: AnyRef if emptyValDef eq self.emptyValDef =>
+ print("emptyValDef")
+ case tree: Tree =>
+ val hasSymbol = tree.hasSymbol && tree.symbol != NoSymbol
+ val isError = hasSymbol && tree.symbol.name.toString == nme.ERROR.toString
+ printProduct(
+ tree,
+ preamble = _ => {
+ print(tree.productPrefix)
+ if (printTypes && tree.tpe != null) print(tree.tpe)
+ },
+ body = {
+ case name: Name =>
+ if (isError) {
+ if (isError) print("<")
+ print(name)
+ if (isError) print(": error>")
+ } else if (hasSymbol) {
+ tree match {
+ case refTree: RefTree =>
+ if (tree.symbol.name != refTree.name) print("[", tree.symbol, " aka ", refTree.name, "]")
+ else print(tree.symbol)
+ case defTree: DefTree =>
+ print(tree.symbol)
+ case _ =>
+ print(tree.symbol.name)
+ }
+ } else {
+ print(name)
+ }
+ case Constant(s: String) =>
+ print("Constant(\"" + s + "\")")
+ case Constant(null) =>
+ print("Constant(null)")
+ case Constant(value) =>
+ print("Constant(" + value + ")")
+ case arg =>
+ print(arg)
+ },
+ postamble = {
+ case tree @ TypeTree() if tree.original != null => print(".setOriginal(", tree.original, ")")
+ case _ => // do nothing
+ })
+ case sym: Symbol =>
+ if (sym == NoSymbol) print("NoSymbol")
+ else if (sym.isStatic && (sym.isClass || sym.isModule)) print(sym.fullName)
+ else print(sym.name)
+ if (printIds) print("#", sym.id)
+ if (printKinds) print("#", sym.abbreviatedKindString)
+ if (printMirrors) print("%M", footnotes.put[scala.reflect.api.Mirror[_]](mirrorThatLoaded(sym)))
+ case tag: TypeTag[_] =>
+ print("TypeTag(", tag.tpe, ")")
+ case tag: WeakTypeTag[_] =>
+ print("WeakTypeTag(", tag.tpe, ")")
+ case tpe: Type =>
+ val defer = printTypesInFootnotes && !printingFootnotes
+ if (defer) print("[", footnotes.put(tpe), "]")
+ else tpe match {
+ case NoType => print("NoType")
+ case NoPrefix => print("NoPrefix")
+ case _ => printProduct(tpe.asInstanceOf[Product])
+ }
+ case mods: Modifiers =>
+ print("Modifiers(")
+ if (mods.flags != NoFlags || mods.privateWithin != tpnme.EMPTY || mods.annotations.nonEmpty) print(show(mods.flags))
+ if (mods.privateWithin != tpnme.EMPTY || mods.annotations.nonEmpty) { print(", "); print(mods.privateWithin) }
+ if (mods.annotations.nonEmpty) { print(", "); print(mods.annotations); }
+ print(")")
+ case name: Name =>
+ print(show(name))
+ case scope: Scope =>
+ print("Scope")
+ printIterable(scope.toList)
+ case list: List[_] =>
+ print("List")
+ printIterable(list)
+ case product: Product =>
+ printProduct(product)
+ case arg =>
+ out.print(arg)
+ }
+ depth -= 1
+ if (depth == 0 && !printingFootnotes) {
+ printingFootnotes = true
+ footnotes.print[Type](this)
+ footnotes.print[scala.reflect.api.Mirror[_]](this)
+ printingFootnotes = false
+ }
+ }
+
+ def printProduct(
+ p: Product,
+ preamble: Product => Unit = p => print(p.productPrefix),
+ body: Any => Unit = print(_),
+ postamble: Product => Unit = p => print("")): Unit =
+ {
+ preamble(p)
+ printIterable(p.productIterator.toList, body = body)
+ postamble(p)
+ }
+
+ def printIterable(
+ iterable: List[_],
+ preamble: => Unit = print(""),
+ body: Any => Unit = print(_),
+ postamble: => Unit = print("")): Unit =
+ {
+ preamble
+ print("(")
+ val it = iterable.iterator
+ while (it.hasNext) {
+ body(it.next)
+ print(if (it.hasNext) ", " else "")
+ }
+ print(")")
+ postamble
+ }
+ }
+
+ def show(name: Name): String = name match {
+ case tpnme.WILDCARD => "tpnme.WILDCARD"
+ case tpnme.EMPTY => "tpnme.EMPTY"
+ case tpnme.ERROR => "tpnme.ERROR"
+ case tpnme.PACKAGE => "tpnme.PACKAGE"
+ case tpnme.WILDCARD_STAR => "tpnme.WILDCARD_STAR"
+ case nme.WILDCARD => "nme.WILDCARD"
+ case nme.EMPTY => "nme.EMPTY"
+ case nme.ERROR => "tpnme.ERROR"
+ case nme.PACKAGE => "nme.PACKAGE"
+ case nme.CONSTRUCTOR => "nme.CONSTRUCTOR"
+ case nme.ROOTPKG => "nme.ROOTPKG"
+ case _ =>
+ val prefix = if (name.isTermName) "newTermName(\"" else "newTypeName(\""
+ prefix + name.toString + "\")"
+ }
+
+ def show(flags: FlagSet): String = {
+ if (flags == NoFlags) nme.NoFlags.toString
+ else {
+ val s_flags = new scala.collection.mutable.ListBuffer[String]
+ def hasFlag(left: Long, right: Long): Boolean = (left & right) != 0
+ for (i <- 0 to 63 if hasFlag(flags, 1L << i))
+ s_flags += flagToString(1L << i).replace("<", "").replace(">", "").toUpperCase
+ s_flags mkString " | "
+ }
+ }
+}
diff --git a/src/reflect/scala/reflect/internal/Required.scala b/src/reflect/scala/reflect/internal/Required.scala
new file mode 100644
index 0000000..842491d
--- /dev/null
+++ b/src/reflect/scala/reflect/internal/Required.scala
@@ -0,0 +1,15 @@
+package scala.reflect
+package internal
+
+import settings.MutableSettings
+
+trait Required { self: SymbolTable =>
+
+ def picklerPhase: Phase
+
+ def settings: MutableSettings
+
+ def forInteractive: Boolean
+
+ def forScaladoc: Boolean
+}
diff --git a/src/reflect/scala/reflect/internal/Scopes.scala b/src/reflect/scala/reflect/internal/Scopes.scala
new file mode 100644
index 0000000..ab3b9b7
--- /dev/null
+++ b/src/reflect/scala/reflect/internal/Scopes.scala
@@ -0,0 +1,401 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2013 LAMP/EPFL
+ * @author Martin Odersky
+ */
+
+package scala.reflect
+package internal
+
+trait Scopes extends api.Scopes { self: SymbolTable =>
+
+ class ScopeEntry(val sym: Symbol, val owner: Scope) {
+ /** the next entry in the hash bucket
+ */
+ var tail: ScopeEntry = null
+
+ /** the next entry in this scope
+ */
+ var next: ScopeEntry = null
+
+ override def hashCode(): Int = sym.name.start
+ override def toString(): String = sym.toString()
+ }
+
+ /**
+ * @param sym ...
+ * @param owner ...
+ * @return ...
+ */
+ private def newScopeEntry(sym: Symbol, owner: Scope): ScopeEntry = {
+ val e = new ScopeEntry(sym, owner)
+ e.next = owner.elems
+ owner.elems = e
+ e
+ }
+
+ object Scope {
+ def unapplySeq(decls: Scope): Some[Seq[Symbol]] = Some(decls.toList)
+ }
+
+ /** Note: constructor is protected to force everyone to use the factory methods newScope or newNestedScope instead.
+ * This is necessary because when run from reflection every scope needs to have a
+ * SynchronizedScope as mixin.
+ */
+ class Scope protected[Scopes] (initElems: ScopeEntry = null, initFingerPrints: Long = 0L) extends ScopeApi with MemberScopeApi {
+
+ protected[Scopes] def this(base: Scope) = {
+ this(base.elems)
+ nestinglevel = base.nestinglevel + 1
+ }
+
+ private[scala] var elems: ScopeEntry = initElems
+
+ /** The number of times this scope is nested in another
+ */
+ private var nestinglevel = 0
+
+ /** the hash table
+ */
+ private var hashtable: Array[ScopeEntry] = null
+
+ /** a cache for all elements, to be used by symbol iterator.
+ */
+ private var elemsCache: List[Symbol] = null
+
+ /** size and mask of hash tables
+ * todo: make hashtables grow?
+ */
+ private val HASHSIZE = 0x80
+ private val HASHMASK = 0x7f
+
+ /** the threshold number of entries from which a hashtable is constructed.
+ */
+ private val MIN_HASH = 8
+
+ if (size >= MIN_HASH) createHash()
+
+ /** Returns a new scope with the same content as this one. */
+ def cloneScope: Scope = newScopeWith(this.toList: _*)
+
+ /** is the scope empty? */
+ override def isEmpty: Boolean = elems eq null
+
+ /** the number of entries in this scope */
+ override def size: Int = {
+ var s = 0
+ var e = elems
+ while (e ne null) {
+ s += 1
+ e = e.next
+ }
+ s
+ }
+
+ /** enter a scope entry
+ *
+ * @param e ...
+ */
+ protected def enterEntry(e: ScopeEntry) {
+ elemsCache = null
+ if (hashtable ne null)
+ enterInHash(e)
+ else if (size >= MIN_HASH)
+ createHash()
+ }
+
+ private def enterInHash(e: ScopeEntry): Unit = {
+ val i = e.sym.name.start & HASHMASK
+ e.tail = hashtable(i)
+ hashtable(i) = e
+ }
+
+ /** enter a symbol
+ *
+ * @param sym ...
+ */
+ def enter[T <: Symbol](sym: T): T = {
+ enterEntry(newScopeEntry(sym, this))
+ sym
+ }
+
+ /** enter a symbol, asserting that no symbol with same name exists in scope
+ *
+ * @param sym ...
+ */
+ def enterUnique(sym: Symbol) {
+ assert(lookup(sym.name) == NoSymbol, (sym.fullLocationString, lookup(sym.name).fullLocationString))
+ enter(sym)
+ }
+
+ private def createHash() {
+ hashtable = new Array[ScopeEntry](HASHSIZE)
+ enterAllInHash(elems)
+ }
+
+ private def enterAllInHash(e: ScopeEntry, n: Int = 0) {
+ if (e ne null) {
+ if (n < maxRecursions) {
+ enterAllInHash(e.next, n + 1)
+ enterInHash(e)
+ } else {
+ var entries: List[ScopeEntry] = List()
+ var ee = e
+ while (ee ne null) {
+ entries = ee :: entries
+ ee = ee.next
+ }
+ entries foreach enterInHash
+ }
+ }
+ }
+
+ def rehash(sym: Symbol, newname: Name) {
+ if (hashtable ne null) {
+ val index = sym.name.start & HASHMASK
+ var e1 = hashtable(index)
+ var e: ScopeEntry = null
+ if (e1 != null) {
+ if (e1.sym == sym) {
+ hashtable(index) = e1.tail
+ e = e1
+ } else {
+ while (e1.tail != null && e1.tail.sym != sym) e1 = e1.tail
+ if (e1.tail != null) {
+ e = e1.tail
+ e1.tail = e.tail
+ }
+ }
+ }
+ if (e != null) {
+ val newindex = newname.start & HASHMASK
+ e.tail = hashtable(newindex)
+ hashtable(newindex) = e
+ }
+ }
+ }
+
+ /** remove entry
+ *
+ * @param e ...
+ */
+ def unlink(e: ScopeEntry) {
+ if (elems == e) {
+ elems = e.next
+ } else {
+ var e1 = elems
+ while (e1.next != e) e1 = e1.next
+ e1.next = e.next
+ }
+ if (hashtable ne null) {
+ val index = e.sym.name.start & HASHMASK
+ var e1 = hashtable(index)
+ if (e1 == e) {
+ hashtable(index) = e.tail
+ } else {
+ while (e1.tail != e) e1 = e1.tail;
+ e1.tail = e.tail
+ }
+ }
+ elemsCache = null
+ }
+
+ /** remove symbol */
+ def unlink(sym: Symbol) {
+ var e = lookupEntry(sym.name)
+ while (e ne null) {
+ if (e.sym == sym) unlink(e);
+ e = lookupNextEntry(e)
+ }
+ }
+
+ /** lookup a symbol
+ *
+ * @param name ...
+ * @return ...
+ */
+ def lookup(name: Name): Symbol = {
+ val e = lookupEntry(name)
+ if (e eq null) NoSymbol else e.sym
+ }
+
+ /** Returns an iterator yielding every symbol with given name in this scope.
+ */
+ def lookupAll(name: Name): Iterator[Symbol] = new Iterator[Symbol] {
+ var e = lookupEntry(name)
+ def hasNext: Boolean = e ne null
+ def next(): Symbol = { val r = e.sym; e = lookupNextEntry(e); r }
+ }
+
+ /** lookup a symbol entry matching given name.
+ * @note from Martin: I believe this is a hotspot or will be one
+ * in future versions of the type system. I have reverted the previous
+ * change to use iterators as too costly.
+ */
+ def lookupEntry(name: Name): ScopeEntry = {
+ var e: ScopeEntry = null
+ if (hashtable ne null) {
+ e = hashtable(name.start & HASHMASK)
+ while ((e ne null) && e.sym.name != name) {
+ e = e.tail
+ }
+ } else {
+ e = elems
+ while ((e ne null) && e.sym.name != name) {
+ e = e.next
+ }
+ }
+ e
+ }
+
+ /** lookup next entry with same name as this one
+ * @note from Martin: I believe this is a hotspot or will be one
+ * in future versions of the type system. I have reverted the previous
+ * change to use iterators as too costly.
+ */
+ def lookupNextEntry(entry: ScopeEntry): ScopeEntry = {
+ var e = entry
+ if (hashtable ne null)
+ do { e = e.tail } while ((e ne null) && e.sym.name != entry.sym.name)
+ else
+ do { e = e.next } while ((e ne null) && e.sym.name != entry.sym.name);
+ e
+ }
+
+ /** Return all symbols as a list in the order they were entered in this scope.
+ */
+ override def toList: List[Symbol] = {
+ if (elemsCache eq null) {
+ elemsCache = Nil
+ var e = elems
+ while ((e ne null) && e.owner == this) {
+ elemsCache = e.sym :: elemsCache
+ e = e.next
+ }
+ }
+ elemsCache
+ }
+
+ /** Vanilla scope - symbols are stored in declaration order.
+ */
+ def sorted: List[Symbol] = toList
+
+ /** Return the nesting level of this scope, i.e. the number of times this scope
+ * was nested in another */
+ def nestingLevel = nestinglevel
+
+ /** Return all symbols as an iterator in the order they were entered in this scope.
+ */
+ def iterator: Iterator[Symbol] = toList.iterator
+
+/*
+ /** Does this scope contain an entry for `sym`?
+ */
+ def contains(sym: Symbol): Boolean = lookupAll(sym.name) contains sym
+
+ /** A scope that contains all symbols of this scope and that also contains `sym`.
+ */
+ def +(sym: Symbol): Scope =
+ if (contains(sym)) this
+ else {
+ val result = cloneScope
+ result enter sym
+ result
+ }
+
+ /** A scope that contains all symbols of this scope except `sym`.
+ */
+ def -(sym: Symbol): Scope =
+ if (!contains(sym)) this
+ else {
+ val result = cloneScope
+ result unlink sym
+ result
+ }
+*/
+ override def foreach[U](p: Symbol => U): Unit = toList foreach p
+
+ override def filter(p: Symbol => Boolean): Scope =
+ if (!(toList forall p)) newScopeWith(toList filter p: _*) else this
+
+ @deprecated("Use `toList.reverse` instead", "2.10.0")
+ def reverse: List[Symbol] = toList.reverse
+
+ override def mkString(start: String, sep: String, end: String) =
+ toList.map(_.defString).mkString(start, sep, end)
+
+ override def toString(): String = mkString("Scope{\n ", ";\n ", "\n}")
+ }
+
+ implicit val ScopeTag = ClassTag[Scope](classOf[Scope])
+
+ type MemberScope = Scope
+
+ implicit val MemberScopeTag = ClassTag[MemberScope](classOf[MemberScope])
+
+ /** Create a new scope */
+ def newScope: Scope = new Scope()
+
+ /** Create a new scope to be used in `findMembers`.
+ *
+ * But why do we need a special scope for `findMembers`?
+ * Let me tell you a story.
+ *
+ * `findMembers` creates a synthetic scope and then iterates over
+ * base classes in linearization order, and for every scrutinized class
+ * iterates over `decls`, the collection of symbols declared in that class.
+ * Declarations that fit the filter get appended to the created scope.
+ *
+ * The problem is that `decls` returns a Scope, and to iterate a scope performantly
+ * one needs to go from its end to its beginning.
+ *
+ * Hence the `findMembers` scope is populated in a wicked order:
+ * symbols that belong to the same declaring class come in reverse order of their declaration,
+ * however, the scope itself is ordered w.r.t the linearization of the target type.
+ *
+ * Once `members` became a public API, this has been confusing countless numbers of users.
+ * Therefore we introduce a special flavor of scopes to accommodate this quirk of `findMembers`
+ */
+ private[scala] def newFindMemberScope: Scope = new Scope() {
+ override def sorted = {
+ val members = toList
+ val owners = members.map(_.owner).distinct
+ val grouped = members groupBy (_.owner)
+ owners.flatMap(owner => grouped(owner).reverse)
+ }
+ }
+
+ /** Create a new scope nested in another one with which it shares its elements */
+ def newNestedScope(outer: Scope): Scope = new Scope(outer)
+
+ /** Create a new scope with given initial elements */
+ def newScopeWith(elems: Symbol*): Scope = {
+ val scope = newScope
+ elems foreach scope.enter
+ scope
+ }
+
+ /** Create new scope for the members of package `pkg` */
+ def newPackageScope(pkgClass: Symbol): Scope = newScope
+
+ /** Transform scope of members of `owner` using operation `op`
+ * This is overridden by the reflective compiler to avoid creating new scopes for packages
+ */
+ def scopeTransform(owner: Symbol)(op: => Scope): Scope = op
+
+
+ /** The empty scope (immutable).
+ */
+ object EmptyScope extends Scope {
+ override def enterEntry(e: ScopeEntry) {
+ abort("EmptyScope.enter")
+ }
+ }
+
+ /** The error scope.
+ */
+ class ErrorScope(owner: Symbol) extends Scope
+
+ private final val maxRecursions = 1000
+
+}
+
diff --git a/src/reflect/scala/reflect/internal/StdAttachments.scala b/src/reflect/scala/reflect/internal/StdAttachments.scala
new file mode 100644
index 0000000..539d191
--- /dev/null
+++ b/src/reflect/scala/reflect/internal/StdAttachments.scala
@@ -0,0 +1,76 @@
+package scala.reflect
+package internal
+
+trait StdAttachments {
+ self: SymbolTable =>
+
+ /**
+ * Common code between reflect-internal Symbol and Tree related to Attachments.
+ */
+ trait Attachable {
+ protected var rawatt: scala.reflect.macros.Attachments { type Pos = Position } = NoPosition
+ def attachments = rawatt
+ def setAttachments(attachments: scala.reflect.macros.Attachments { type Pos = Position }): this.type = { rawatt = attachments; this }
+ def updateAttachment[T: ClassTag](attachment: T): this.type = { rawatt = rawatt.update(attachment); this }
+ def removeAttachment[T: ClassTag]: this.type = { rawatt = rawatt.remove[T]; this }
+
+ // cannot be final due to SynchronizedSymbols
+ def pos: Position = rawatt.pos
+ def pos_=(pos: Position): Unit = rawatt = (rawatt withPos pos)
+ def setPos(newpos: Position): this.type = { pos = newpos; this }
+ }
+
+ /** When present, indicates that the host `Ident` has been created from a backquoted identifier.
+ */
+ case object BackquotedIdentifierAttachment
+
+ /** Stores the trees that give rise to a refined type to be used in reification.
+ * Unfortunately typed `CompoundTypeTree` is lacking essential info, and the reifier cannot use `CompoundTypeTree.tpe`.
+ * Therefore we need this hack (see `Reshape.toPreTyperTypeTree` for a detailed explanation).
+ */
+ case class CompoundTypeTreeOriginalAttachment(parents: List[Tree], stats: List[Tree])
+
+ /** Is added by the macro engine to the results of macro expansions.
+ * Stores the original expandee as it entered the `macroExpand` function.
+ */
+ case class MacroExpansionAttachment(original: Tree)
+
+ /** When present, suppresses macro expansion for the host.
+ * This is occasionally necessary, e.g. to prohibit eta-expansion of macros.
+ *
+ * Does not affect expandability of child nodes, there's context.withMacrosDisabled for that
+ * (but think thrice before using that API - see the discussion at https://github.com/scala/scala/pull/1639).
+ */
+ case object SuppressMacroExpansionAttachment
+
+ /** Suppresses macro expansion of the tree by putting SuppressMacroExpansionAttachment on it.
+ */
+ def suppressMacroExpansion(tree: Tree) = tree.updateAttachment(SuppressMacroExpansionAttachment)
+
+ /** Unsuppresses macro expansion of the tree by removing SuppressMacroExpansionAttachment from it and its children.
+ */
+ def unsuppressMacroExpansion(tree: Tree): Tree = {
+ tree.removeAttachment[SuppressMacroExpansionAttachment.type]
+ tree match {
+ // see the comment to `isMacroExpansionSuppressed` to learn why we need
+ // a special traversal strategy here
+ case Apply(fn, _) => unsuppressMacroExpansion(fn)
+ case TypeApply(fn, _) => unsuppressMacroExpansion(fn)
+ case _ => // do nothing
+ }
+ tree
+ }
+
+ /** Determines whether a tree should not be expanded, because someone has put SuppressMacroExpansionAttachment on it or one of its children.
+ */
+ def isMacroExpansionSuppressed(tree: Tree): Boolean =
+ if (tree.attachments.get[SuppressMacroExpansionAttachment.type].isDefined) true
+ else tree match {
+ // we have to account for the fact that during typechecking an expandee might become wrapped,
+ // i.e. surrounded by an inferred implicit argument application or by an inferred type argument application.
+ // in that case the expandee itself will no longer be suppressed and we need to look at the core
+ case Apply(fn, _) => isMacroExpansionSuppressed(fn)
+ case TypeApply(fn, _) => isMacroExpansionSuppressed(fn)
+ case _ => false
+ }
+}
diff --git a/src/reflect/scala/reflect/internal/StdCreators.scala b/src/reflect/scala/reflect/internal/StdCreators.scala
new file mode 100644
index 0000000..5e5e4f9
--- /dev/null
+++ b/src/reflect/scala/reflect/internal/StdCreators.scala
@@ -0,0 +1,21 @@
+package scala.reflect
+package internal
+
+import scala.reflect.api.{TreeCreator, TypeCreator}
+import scala.reflect.api.{Universe => ApiUniverse}
+
+trait StdCreators {
+ self: SymbolTable =>
+
+ case class FixedMirrorTreeCreator(mirror: scala.reflect.api.Mirror[StdCreators.this.type], tree: Tree) extends TreeCreator {
+ def apply[U <: ApiUniverse with Singleton](m: scala.reflect.api.Mirror[U]): U # Tree =
+ if (m eq mirror) tree.asInstanceOf[U # Tree]
+ else throw new IllegalArgumentException(s"Expr defined in $mirror cannot be migrated to other mirrors.")
+ }
+
+ case class FixedMirrorTypeCreator(mirror: scala.reflect.api.Mirror[StdCreators.this.type], tpe: Type) extends TypeCreator {
+ def apply[U <: ApiUniverse with Singleton](m: scala.reflect.api.Mirror[U]): U # Type =
+ if (m eq mirror) tpe.asInstanceOf[U # Type]
+ else throw new IllegalArgumentException(s"Type tag defined in $mirror cannot be migrated to other mirrors.")
+ }
+}
\ No newline at end of file
diff --git a/src/reflect/scala/reflect/internal/StdNames.scala b/src/reflect/scala/reflect/internal/StdNames.scala
new file mode 100644
index 0000000..c3b7f24
--- /dev/null
+++ b/src/reflect/scala/reflect/internal/StdNames.scala
@@ -0,0 +1,1241 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2013 LAMP/EPFL
+ * @author Martin Odersky
+ */
+
+package scala.reflect
+package internal
+
+import java.security.MessageDigest
+import Chars.isOperatorPart
+import scala.annotation.switch
+import scala.language.implicitConversions
+import scala.collection.immutable
+import scala.io.Codec
+
+trait StdNames {
+ self: SymbolTable =>
+
+ def encode(str: String): TermName = newTermNameCached(NameTransformer.encode(str))
+
+ implicit def lowerTermNames(n: TermName): String = n.toString
+
+ /** Tensions: would like the keywords to be the very first names entered into the names
+ * storage so their ids count from 0, which simplifies the parser. Switched to abstract
+ * classes to avoid all the indirection which is generated with implementation-containing
+ * traits. Since all these classes use eager vals, that means the constructor with the
+ * keywords must run first. If it's the top in the superclass chain, then CommonNames
+ * must inherit from it, which means TypeNames would inherit keywords as well.
+ *
+ * Solution: Keywords extends CommonNames and uses early defs to beat the
+ * CommonNames constructor out of the starting gate. This is its builder.
+ */
+ private class KeywordSetBuilder {
+ private var kws: Set[TermName] = Set()
+ def apply(s: String): TermName = {
+ val result = newTermNameCached(s)
+ kws = kws + result
+ result
+ }
+ def result: Set[TermName] = {
+ val result = kws
+ kws = null
+ result
+ }
+ }
+
+ private[reflect] def compactifyName(orig: String): String = compactify(orig)
+ private final object compactify extends (String => String) {
+ val md5 = MessageDigest.getInstance("MD5")
+
+ /**
+ * COMPACTIFY
+ *
+ * The hashed name has the form (prefix + marker + md5 + marker + suffix), where
+ * - prefix/suffix.length = MaxNameLength / 4
+ * - md5.length = 32
+ *
+ * We obtain the formula:
+ *
+ * FileNameLength = 2*(MaxNameLength / 4) + 2.marker.length + 32 + 6
+ *
+ * (+6 for ".class"). MaxNameLength can therefore be computed as follows:
+ */
+ val marker = "$$$$"
+ val MaxNameLength = math.min(
+ settings.maxClassfileName.value - 6,
+ 2 * (settings.maxClassfileName.value - 6 - 2*marker.length - 32)
+ )
+ def toMD5(s: String, edge: Int): String = {
+ val prefix = s take edge
+ val suffix = s takeRight edge
+
+ val cs = s.toArray
+ val bytes = Codec toUTF8 cs
+ md5 update bytes
+ val md5chars = (md5.digest() map (b => (b & 0xFF).toHexString)).mkString
+
+ prefix + marker + md5chars + marker + suffix
+ }
+ def apply(s: String): String = (
+ if (s.length <= MaxNameLength) s
+ else toMD5(s, MaxNameLength / 4)
+ )
+ }
+
+ abstract class CommonNames extends NamesApi {
+ type NameType >: Null <: Name
+ // Masking some implicits so as to allow our targeted => NameType.
+ protected val stringToTermName = null
+ protected val stringToTypeName = null
+ protected implicit def createNameType(name: String): NameType
+
+ def flattenedName(segments: Name*): NameType =
+ compactify(segments mkString NAME_JOIN_STRING)
+
+ val MODULE_SUFFIX_STRING: String = NameTransformer.MODULE_SUFFIX_STRING
+ val NAME_JOIN_STRING: String = NameTransformer.NAME_JOIN_STRING
+ val SINGLETON_SUFFIX: String = ".type"
+
+ val ANON_CLASS_NAME: NameType = "$anon"
+ val ANON_FUN_NAME: NameType = "$anonfun"
+ val EMPTY: NameType = ""
+ val EMPTY_PACKAGE_NAME: NameType = "<empty>"
+ val IMPL_CLASS_SUFFIX = "$class"
+ val IMPORT: NameType = "<import>"
+ val MODULE_SUFFIX_NAME: NameType = MODULE_SUFFIX_STRING
+ val MODULE_VAR_SUFFIX: NameType = "$module"
+ val NAME_JOIN_NAME: NameType = NAME_JOIN_STRING
+ val PACKAGE: NameType = "package"
+ val ROOT: NameType = "<root>"
+ val SPECIALIZED_SUFFIX: NameType = "$sp"
+
+ // value types (and AnyRef) are all used as terms as well
+ // as (at least) arguments to the @specialize annotation.
+ final val Boolean: NameType = "Boolean"
+ final val Byte: NameType = "Byte"
+ final val Char: NameType = "Char"
+ final val Double: NameType = "Double"
+ final val Float: NameType = "Float"
+ final val Int: NameType = "Int"
+ final val Long: NameType = "Long"
+ final val Short: NameType = "Short"
+ final val Unit: NameType = "Unit"
+
+ final val ScalaValueNames: scala.List[NameType] =
+ scala.List(Byte, Char, Short, Int, Long, Float, Double, Boolean, Unit)
+
+ // some types whose companions we utilize
+ final val AnyRef: NameType = "AnyRef"
+ final val Array: NameType = "Array"
+ final val List: NameType = "List"
+ final val Seq: NameType = "Seq"
+ final val Symbol: NameType = "Symbol"
+ final val ClassTag: NameType = "ClassTag"
+ final val WeakTypeTag: NameType = "WeakTypeTag"
+ final val TypeTag : NameType = "TypeTag"
+ final val Expr: NameType = "Expr"
+ final val String: NameType = "String"
+
+ // fictions we use as both types and terms
+ final val ERROR: NameType = "<error>"
+ final val NO_NAME: NameType = "<none>" // formerly NOSYMBOL
+ final val WILDCARD: NameType = "_"
+ }
+
+ /** This should be the first trait in the linearization. */
+ // abstract class Keywords extends CommonNames {
+ abstract class Keywords extends {
+ private val kw = new KeywordSetBuilder
+
+ final val ABSTRACTkw: TermName = kw("abstract")
+ final val CASEkw: TermName = kw("case")
+ final val CLASSkw: TermName = kw("class")
+ final val CATCHkw: TermName = kw("catch")
+ final val DEFkw: TermName = kw("def")
+ final val DOkw: TermName = kw("do")
+ final val ELSEkw: TermName = kw("else")
+ final val EXTENDSkw: TermName = kw("extends")
+ final val FALSEkw: TermName = kw("false")
+ final val FINALkw: TermName = kw("final")
+ final val FINALLYkw: TermName = kw("finally")
+ final val FORkw: TermName = kw("for")
+ final val FORSOMEkw: TermName = kw("forSome")
+ final val IFkw: TermName = kw("if")
+ final val IMPLICITkw: TermName = kw("implicit")
+ final val IMPORTkw: TermName = kw("import")
+ final val LAZYkw: TermName = kw("lazy")
+ final val MACROkw: TermName = kw("macro")
+ final val MATCHkw: TermName = kw("match")
+ final val NEWkw: TermName = kw("new")
+ final val NULLkw: TermName = kw("null")
+ final val OBJECTkw: TermName = kw("object")
+ final val OVERRIDEkw: TermName = kw("override")
+ final val PACKAGEkw: TermName = kw("package")
+ final val PRIVATEkw: TermName = kw("private")
+ final val PROTECTEDkw: TermName = kw("protected")
+ final val RETURNkw: TermName = kw("return")
+ final val SEALEDkw: TermName = kw("sealed")
+ final val SUPERkw: TermName = kw("super")
+ final val THENkw: TermName = kw("then")
+ final val THISkw: TermName = kw("this")
+ final val THROWkw: TermName = kw("throw")
+ final val TRAITkw: TermName = kw("trait")
+ final val TRUEkw: TermName = kw("true")
+ final val TRYkw: TermName = kw("try")
+ final val TYPEkw: TermName = kw("type")
+ final val VALkw: TermName = kw("val")
+ final val VARkw: TermName = kw("var")
+ final val WITHkw: TermName = kw("with")
+ final val WHILEkw: TermName = kw("while")
+ final val YIELDkw: TermName = kw("yield")
+ final val DOTkw: TermName = kw(".")
+ final val USCOREkw: TermName = kw("_")
+ final val COLONkw: TermName = kw(":")
+ final val EQUALSkw: TermName = kw("=")
+ final val ARROWkw: TermName = kw("=>")
+ final val LARROWkw: TermName = kw("<-")
+ final val SUBTYPEkw: TermName = kw("<:")
+ final val VIEWBOUNDkw: TermName = kw("<%")
+ final val SUPERTYPEkw: TermName = kw(">:")
+ final val HASHkw: TermName = kw("#")
+ final val ATkw: TermName = kw("@")
+
+ final val keywords = kw.result
+ } with CommonNames {
+ final val javaKeywords = new JavaKeywords()
+ }
+
+ abstract class TypeNames extends Keywords with TypeNamesApi {
+ protected implicit def createNameType(name: String): TypeName = newTypeNameCached(name)
+
+ final val BYNAME_PARAM_CLASS_NAME: NameType = "<byname>"
+ final val EQUALS_PATTERN_NAME: NameType = "<equals>"
+ final val JAVA_REPEATED_PARAM_CLASS_NAME: NameType = "<repeated...>"
+ final val LOCAL_CHILD: NameType = "<local child>"
+ final val REFINE_CLASS_NAME: NameType = "<refinement>"
+ final val REPEATED_PARAM_CLASS_NAME: NameType = "<repeated>"
+ final val WILDCARD_STAR: NameType = "_*"
+ final val REIFY_TREECREATOR_PREFIX: NameType = "$treecreator"
+ final val REIFY_TYPECREATOR_PREFIX: NameType = "$typecreator"
+
+ final val Any: NameType = "Any"
+ final val AnyVal: NameType = "AnyVal"
+ final val ExprApi: NameType = "ExprApi"
+ final val Mirror: NameType = "Mirror"
+ final val Nothing: NameType = "Nothing"
+ final val Null: NameType = "Null"
+ final val Object: NameType = "Object"
+ final val PartialFunction: NameType = "PartialFunction"
+ final val PrefixType: NameType = "PrefixType"
+ final val Product: NameType = "Product"
+ final val Serializable: NameType = "Serializable"
+ final val Singleton: NameType = "Singleton"
+ final val Throwable: NameType = "Throwable"
+
+ final val Annotation: NameType = "Annotation"
+ final val ClassfileAnnotation: NameType = "ClassfileAnnotation"
+ final val ClassManifest: NameType = "ClassManifest"
+ final val Enum: NameType = "Enum"
+ final val Group: NameType = "Group"
+ final val Tree: NameType = "Tree"
+ final val Type : NameType = "Type"
+ final val TypeTree: NameType = "TypeTree"
+
+ // Annotation simple names, used in Namer
+ final val BeanPropertyAnnot: NameType = "BeanProperty"
+ final val BooleanBeanPropertyAnnot: NameType = "BooleanBeanProperty"
+ final val bridgeAnnot: NameType = "bridge"
+
+ // Classfile Attributes
+ final val AnnotationDefaultATTR: NameType = "AnnotationDefault"
+ final val BridgeATTR: NameType = "Bridge"
+ final val ClassfileAnnotationATTR: NameType = "RuntimeInvisibleAnnotations" // RetentionPolicy.CLASS. Currently not used (Apr 2009).
+ final val CodeATTR: NameType = "Code"
+ final val ConstantValueATTR: NameType = "ConstantValue"
+ final val DeprecatedATTR: NameType = "Deprecated"
+ final val ExceptionsATTR: NameType = "Exceptions"
+ final val InnerClassesATTR: NameType = "InnerClasses"
+ final val LineNumberTableATTR: NameType = "LineNumberTable"
+ final val LocalVariableTableATTR: NameType = "LocalVariableTable"
+ final val RuntimeAnnotationATTR: NameType = "RuntimeVisibleAnnotations" // RetentionPolicy.RUNTIME
+ final val RuntimeParamAnnotationATTR: NameType = "RuntimeVisibleParameterAnnotations" // RetentionPolicy.RUNTIME (annotations on parameters)
+ final val ScalaATTR: NameType = "Scala"
+ final val ScalaSignatureATTR: NameType = "ScalaSig"
+ final val SignatureATTR: NameType = "Signature"
+ final val SourceFileATTR: NameType = "SourceFile"
+ final val SyntheticATTR: NameType = "Synthetic"
+
+ def dropSingletonName(name: Name): TypeName = (name dropRight SINGLETON_SUFFIX.length).toTypeName
+ def singletonName(name: Name): TypeName = (name append SINGLETON_SUFFIX).toTypeName
+ def implClassName(name: Name): TypeName = (name append IMPL_CLASS_SUFFIX).toTypeName
+ def interfaceName(implname: Name): TypeName = (implname dropRight IMPL_CLASS_SUFFIX.length).toTypeName
+ }
+
+ abstract class TermNames extends Keywords with TermNamesApi {
+ protected implicit def createNameType(name: String): TermName = newTermNameCached(name)
+
+ /** Base strings from which synthetic names are derived. */
+ val BITMAP_PREFIX = "bitmap$"
+ val CHECK_IF_REFUTABLE_STRING = "check$ifrefutable$"
+ val DEFAULT_GETTER_STRING = "$default$"
+ val DEFAULT_GETTER_INIT_STRING = "$lessinit$greater" // CONSTRUCTOR.encoded, less is more
+ val DO_WHILE_PREFIX = "doWhile$"
+ val EVIDENCE_PARAM_PREFIX = "evidence$"
+ val EXCEPTION_RESULT_PREFIX = "exceptionResult"
+ val EXPAND_SEPARATOR_STRING = "$$"
+ val INTERPRETER_IMPORT_WRAPPER = "$iw"
+ val INTERPRETER_LINE_PREFIX = "line"
+ val INTERPRETER_VAR_PREFIX = "res"
+ val INTERPRETER_WRAPPER_SUFFIX = "$object"
+ val LOCALDUMMY_PREFIX = "<local " // owner of local blocks
+ val PROTECTED_PREFIX = "protected$"
+ val PROTECTED_SET_PREFIX = PROTECTED_PREFIX + "set"
+ val SUPER_PREFIX_STRING = "super$"
+ val TRAIT_SETTER_SEPARATOR_STRING = "$_setter_$"
+ val WHILE_PREFIX = "while$"
+
+ // Compiler internal names
+ val ANYname: NameType = "<anyname>"
+ val CONSTRUCTOR: NameType = "<init>"
+ val DEFAULT_CASE: NameType = "defaultCase$"
+ val EQEQ_LOCAL_VAR: NameType = "eqEqTemp$"
+ val FAKE_LOCAL_THIS: NameType = "this$"
+ val INITIALIZER: NameType = CONSTRUCTOR // Is this buying us something?
+ val LAZY_LOCAL: NameType = "$lzy"
+ val LAZY_SLOW_SUFFIX: NameType = "$lzycompute"
+ val LOCAL_SUFFIX_STRING = " "
+ val UNIVERSE_BUILD_PREFIX: NameType = "$u.build."
+ val UNIVERSE_BUILD: NameType = "$u.build"
+ val UNIVERSE_PREFIX: NameType = "$u."
+ val UNIVERSE_SHORT: NameType = "$u"
+ val MIRROR_PREFIX: NameType = "$m."
+ val MIRROR_SHORT: NameType = "$m"
+ val MIRROR_UNTYPED: NameType = "$m$untyped"
+ val REIFY_FREE_PREFIX: NameType = "free$"
+ val REIFY_FREE_THIS_SUFFIX: NameType = "$this"
+ val REIFY_FREE_VALUE_SUFFIX: NameType = "$value"
+ val REIFY_SYMDEF_PREFIX: NameType = "symdef$"
+ val MIXIN_CONSTRUCTOR: NameType = "$init$"
+ val MODULE_INSTANCE_FIELD: NameType = NameTransformer.MODULE_INSTANCE_NAME // "MODULE$"
+ val OUTER: NameType = "$outer"
+ val OUTER_LOCAL: NameType = OUTER + LOCAL_SUFFIX_STRING // "$outer ", note the space
+ val OUTER_SYNTH: NameType = "<outer>" // emitted by virtual pattern matcher, replaced by outer accessor in explicitouter
+ val ROOTPKG: NameType = "_root_"
+ val SELECTOR_DUMMY: NameType = "<unapply-selector>"
+ val SELF: NameType = "$this"
+ val SETTER_SUFFIX: NameType = encode("_=")
+ val SPECIALIZED_INSTANCE: NameType = "specInstance$"
+ val STAR: NameType = "*"
+ val THIS: NameType = "_$this"
+
+ @deprecated("Use SPECIALIZED_SUFFIX", "2.10.0")
+ def SPECIALIZED_SUFFIX_STRING = SPECIALIZED_SUFFIX.toString
+ @deprecated("Use SPECIALIZED_SUFFIX", "2.10.0")
+ def SPECIALIZED_SUFFIX_NAME: TermName = SPECIALIZED_SUFFIX.toTermName
+
+ def isConstructorName(name: Name) = name == CONSTRUCTOR || name == MIXIN_CONSTRUCTOR
+ def isExceptionResultName(name: Name) = name startsWith EXCEPTION_RESULT_PREFIX
+ def isImplClassName(name: Name) = name endsWith IMPL_CLASS_SUFFIX
+ def isLocalDummyName(name: Name) = name startsWith LOCALDUMMY_PREFIX
+ def isLocalName(name: Name) = name endsWith LOCAL_SUFFIX_STRING
+ def isLoopHeaderLabel(name: Name) = (name startsWith WHILE_PREFIX) || (name startsWith DO_WHILE_PREFIX)
+ def isProtectedAccessorName(name: Name) = name startsWith PROTECTED_PREFIX
+ def isSuperAccessorName(name: Name) = name startsWith SUPER_PREFIX_STRING
+ def isReplWrapperName(name: Name) = name containsName INTERPRETER_IMPORT_WRAPPER
+ def isSetterName(name: Name) = name endsWith SETTER_SUFFIX
+ def isTraitSetterName(name: Name) = isSetterName(name) && (name containsName TRAIT_SETTER_SEPARATOR_STRING)
+ def isSingletonName(name: Name) = name endsWith SINGLETON_SUFFIX
+ def isModuleName(name: Name) = name endsWith MODULE_SUFFIX_NAME
+
+ /** Is name a variable name? */
+ def isVariableName(name: Name): Boolean = {
+ val first = name.startChar
+ ( ((first.isLower && first.isLetter) || first == '_')
+ && (name != nme.false_)
+ && (name != nme.true_)
+ && (name != nme.null_)
+ )
+ }
+
+ def isDeprecatedIdentifierName(name: Name) = name.toTermName match {
+ case nme.`then` | nme.`macro` => true
+ case _ => false
+ }
+
+ def isOpAssignmentName(name: Name) = name match {
+ case raw.NE | raw.LE | raw.GE | EMPTY => false
+ case _ =>
+ name.endChar == '=' && name.startChar != '=' && isOperatorPart(name.startChar)
+ }
+
+ /** The expanded name of `name` relative to this class `base` with given `separator`
+ */
+ def expandedName(name: TermName, base: Symbol, separator: String = EXPAND_SEPARATOR_STRING): TermName =
+ newTermNameCached(base.fullName('$') + separator + name)
+
+ /** The expanded setter name of `name` relative to this class `base`
+ */
+ def expandedSetterName(name: TermName, base: Symbol): TermName =
+ expandedName(name, base, separator = TRAIT_SETTER_SEPARATOR_STRING)
+
+ /** If `name` is an expandedName name, the original name.
+ * Otherwise `name` itself.
+ */
+ def originalName(name: Name): Name = name.toString lastIndexOf "$$" match {
+ case -1 | 0 => name
+ case idx0 =>
+ // Sketchville - We've found $$ but if it's part of $$$ or $$$$
+ // or something we need to keep the bonus dollars, so e.g. foo$$$outer
+ // has an original name of $outer.
+ var idx = idx0
+ while (idx > 0 && name.charAt(idx - 1) == '$')
+ idx -= 1
+ name drop idx + 2
+ }
+
+ def unspecializedName(name: Name): Name = (
+ if (name endsWith SPECIALIZED_SUFFIX)
+ name.subName(0, name.lastIndexOf('m') - 1)
+ else name
+ )
+
+ /*
+ def anonNumberSuffix(name: Name): Name = {
+ ("" + name) lastIndexOf '$' match {
+ case -1 => nme.EMPTY
+ case idx =>
+ val s = name drop idx
+ if (s.toString forall (_.isDigit)) s
+ else nme.EMPTY
+ }
+ }
+ */
+
+ /** Return the original name and the types on which this name
+ * is specialized. For example,
+ * {{{
+ * splitSpecializedName("foo$mIcD$sp") == ('foo', "I", "D")
+ * }}}
+ * `foo$mIcD$sp` is the name of a method specialized on two type
+ * parameters, the first one belonging to the method itself, on Int,
+ * and another one belonging to the enclosing class, on Double.
+ */
+ def splitSpecializedName(name: Name): (Name, String, String) =
+ if (name endsWith SPECIALIZED_SUFFIX) {
+ val name1 = name dropRight SPECIALIZED_SUFFIX.length
+ val idxC = name1 lastIndexOf 'c'
+ val idxM = name1 lastIndexOf 'm'
+
+ (name1.subName(0, idxM - 1),
+ name1.subName(idxC + 1, name1.length).toString,
+ name1.subName(idxM + 1, idxC).toString)
+ } else
+ (name, "", "")
+
+ def getterName(name: TermName): TermName = if (isLocalName(name)) localToGetter(name) else name
+ def getterToLocal(name: TermName): TermName = name append LOCAL_SUFFIX_STRING
+ def getterToSetter(name: TermName): TermName = name append SETTER_SUFFIX
+ def localToGetter(name: TermName): TermName = name dropRight LOCAL_SUFFIX_STRING.length
+
+ def dropLocalSuffix(name: Name): Name = if (name endsWith ' ') name dropRight 1 else name
+
+ def setterToGetter(name: TermName): TermName = {
+ val p = name.pos(TRAIT_SETTER_SEPARATOR_STRING)
+ if (p < name.length)
+ setterToGetter(name drop (p + TRAIT_SETTER_SEPARATOR_STRING.length))
+ else
+ name.subName(0, name.length - SETTER_SUFFIX.length)
+ }
+
+ // Nominally, name$default$N, encoded for <init>
+ def defaultGetterName(name: Name, pos: Int): TermName = {
+ val prefix = if (isConstructorName(name)) DEFAULT_GETTER_INIT_STRING else name
+ newTermName(prefix + DEFAULT_GETTER_STRING + pos)
+ }
+ // Nominally, name from name$default$N, CONSTRUCTOR for <init>
+ def defaultGetterToMethod(name: Name): TermName = {
+ val p = name.pos(DEFAULT_GETTER_STRING)
+ if (p < name.length) {
+ val q = name.toTermName.subName(0, p)
+ // i.e., if (q.decoded == CONSTRUCTOR.toString) CONSTRUCTOR else q
+ if (q.toString == DEFAULT_GETTER_INIT_STRING) CONSTRUCTOR else q
+ } else name.toTermName
+ }
+
+ // If the name ends with $nn where nn are
+ // all digits, strip the $ and the digits.
+ // Otherwise return the argument.
+ def stripAnonNumberSuffix(name: Name): Name = {
+ var pos = name.length
+ while (pos > 0 && name.charAt(pos - 1).isDigit)
+ pos -= 1
+
+ if (pos <= 0 || pos == name.length || name.charAt(pos - 1) != '$') name
+ else name.subName(0, pos - 1)
+ }
+
+ def stripModuleSuffix(name: Name): Name = (
+ if (isModuleName(name)) name dropRight MODULE_SUFFIX_STRING.length else name
+ )
+ def localDummyName(clazz: Symbol): TermName = newTermName(LOCALDUMMY_PREFIX + clazz.name + ">")
+ def superName(name: Name): TermName = newTermName(SUPER_PREFIX_STRING + name)
+
+ /** The name of an accessor for protected symbols. */
+ def protName(name: Name): TermName = newTermName(PROTECTED_PREFIX + name)
+
+ /** The name of a setter for protected symbols. Used for inherited Java fields. */
+ def protSetterName(name: Name): TermName = newTermName(PROTECTED_SET_PREFIX + name)
+
+ final val Nil: NameType = "Nil"
+ final val Predef: NameType = "Predef"
+ final val ScalaRunTime: NameType = "ScalaRunTime"
+ final val Some: NameType = "Some"
+
+ val _1 : NameType = "_1"
+ val _2 : NameType = "_2"
+ val _3 : NameType = "_3"
+ val _4 : NameType = "_4"
+ val _5 : NameType = "_5"
+ val _6 : NameType = "_6"
+ val _7 : NameType = "_7"
+ val _8 : NameType = "_8"
+ val _9 : NameType = "_9"
+ val _10 : NameType = "_10"
+ val _11 : NameType = "_11"
+ val _12 : NameType = "_12"
+ val _13 : NameType = "_13"
+ val _14 : NameType = "_14"
+ val _15 : NameType = "_15"
+ val _16 : NameType = "_16"
+ val _17 : NameType = "_17"
+ val _18 : NameType = "_18"
+ val _19 : NameType = "_19"
+ val _20 : NameType = "_20"
+ val _21 : NameType = "_21"
+ val _22 : NameType = "_22"
+
+ val x_0 : NameType = "x$0"
+ val x_1 : NameType = "x$1"
+ val x_2 : NameType = "x$2"
+ val x_3 : NameType = "x$3"
+ val x_4 : NameType = "x$4"
+ val x_5 : NameType = "x$5"
+ val x_6 : NameType = "x$6"
+ val x_7 : NameType = "x$7"
+ val x_8 : NameType = "x$8"
+ val x_9 : NameType = "x$9"
+
+ @switch def syntheticParamName(i: Int): TermName = i match {
+ case 0 => nme.x_0
+ case 1 => nme.x_1
+ case 2 => nme.x_2
+ case 3 => nme.x_3
+ case 4 => nme.x_4
+ case 5 => nme.x_5
+ case 6 => nme.x_6
+ case 7 => nme.x_7
+ case 8 => nme.x_8
+ case 9 => nme.x_9
+ case _ => newTermName("x$" + i)
+ }
+
+ @switch def productAccessorName(j: Int): TermName = j match {
+ case 1 => nme._1
+ case 2 => nme._2
+ case 3 => nme._3
+ case 4 => nme._4
+ case 5 => nme._5
+ case 6 => nme._6
+ case 7 => nme._7
+ case 8 => nme._8
+ case 9 => nme._9
+ case 10 => nme._10
+ case 11 => nme._11
+ case 12 => nme._12
+ case 13 => nme._13
+ case 14 => nme._14
+ case 15 => nme._15
+ case 16 => nme._16
+ case 17 => nme._17
+ case 18 => nme._18
+ case 19 => nme._19
+ case 20 => nme._20
+ case 21 => nme._21
+ case 22 => nme._22
+ case _ => newTermName("_" + j)
+ }
+
+ val ??? = encode("???")
+
+ val wrapRefArray: NameType = "wrapRefArray"
+ val wrapByteArray: NameType = "wrapByteArray"
+ val wrapShortArray: NameType = "wrapShortArray"
+ val wrapCharArray: NameType = "wrapCharArray"
+ val wrapIntArray: NameType = "wrapIntArray"
+ val wrapLongArray: NameType = "wrapLongArray"
+ val wrapFloatArray: NameType = "wrapFloatArray"
+ val wrapDoubleArray: NameType = "wrapDoubleArray"
+ val wrapBooleanArray: NameType = "wrapBooleanArray"
+ val wrapUnitArray: NameType = "wrapUnitArray"
+ val genericWrapArray: NameType = "genericWrapArray"
+
+ // Compiler utilized names
+
+ val AnnotatedType: NameType = "AnnotatedType"
+ val Annotation: NameType = "Annotation"
+ val Any: NameType = "Any"
+ val AnyVal: NameType = "AnyVal"
+ val AppliedTypeTree: NameType = "AppliedTypeTree"
+ val Apply: NameType = "Apply"
+ val ArrayAnnotArg: NameType = "ArrayAnnotArg"
+ val Constant: NameType = "Constant"
+ val ConstantType: NameType = "ConstantType"
+ val EmptyPackage: NameType = "EmptyPackage"
+ val EmptyPackageClass: NameType = "EmptyPackageClass"
+ val ExistentialTypeTree: NameType = "ExistentialTypeTree"
+ val Flag : NameType = "Flag"
+ val Ident: NameType = "Ident"
+ val Import: NameType = "Import"
+ val Literal: NameType = "Literal"
+ val LiteralAnnotArg: NameType = "LiteralAnnotArg"
+ val Modifiers: NameType = "Modifiers"
+ val NestedAnnotArg: NameType = "NestedAnnotArg"
+ val NoFlags: NameType = "NoFlags"
+ val NoPrefix: NameType = "NoPrefix"
+ val NoSymbol: NameType = "NoSymbol"
+ val Nothing: NameType = "Nothing"
+ val NoType: NameType = "NoType"
+ val Null: NameType = "Null"
+ val Object: NameType = "Object"
+ val RootPackage: NameType = "RootPackage"
+ val RootClass: NameType = "RootClass"
+ val Select: NameType = "Select"
+ val SelectFromTypeTree: NameType = "SelectFromTypeTree"
+ val StringContext: NameType = "StringContext"
+ val This: NameType = "This"
+ val ThisType: NameType = "ThisType"
+ val Tree : NameType = "Tree"
+ val Tuple2: NameType = "Tuple2"
+ val TYPE_ : NameType = "TYPE"
+ val TypeApply: NameType = "TypeApply"
+ val TypeRef: NameType = "TypeRef"
+ val TypeTree: NameType = "TypeTree"
+ val UNIT : NameType = "UNIT"
+ val add_ : NameType = "add"
+ val annotation: NameType = "annotation"
+ val anyValClass: NameType = "anyValClass"
+ val append: NameType = "append"
+ val apply: NameType = "apply"
+ val applyDynamic: NameType = "applyDynamic"
+ val applyDynamicNamed: NameType = "applyDynamicNamed"
+ val applyOrElse: NameType = "applyOrElse"
+ val args : NameType = "args"
+ val argv : NameType = "argv"
+ val arrayClass: NameType = "arrayClass"
+ val arrayElementClass: NameType = "arrayElementClass"
+ val arrayValue: NameType = "arrayValue"
+ val array_apply : NameType = "array_apply"
+ val array_clone : NameType = "array_clone"
+ val array_length : NameType = "array_length"
+ val array_update : NameType = "array_update"
+ val arraycopy: NameType = "arraycopy"
+ val asTerm: NameType = "asTerm"
+ val asModule: NameType = "asModule"
+ val asMethod: NameType = "asMethod"
+ val asType: NameType = "asType"
+ val asClass: NameType = "asClass"
+ val asInstanceOf_ : NameType = "asInstanceOf"
+ val asInstanceOf_Ob : NameType = "$asInstanceOf"
+ val assert_ : NameType = "assert"
+ val assume_ : NameType = "assume"
+ val box: NameType = "box"
+ val build : NameType = "build"
+ val bytes: NameType = "bytes"
+ val canEqual_ : NameType = "canEqual"
+ val checkInitialized: NameType = "checkInitialized"
+ val ClassManifestFactory: NameType = "ClassManifestFactory"
+ val classOf: NameType = "classOf"
+ val clone_ : NameType = if (forMSIL) "MemberwiseClone" else "clone" // sn.OClone causes checkinit failure
+ val conforms: NameType = "conforms"
+ val copy: NameType = "copy"
+ val currentMirror: NameType = "currentMirror"
+ val definitions: NameType = "definitions"
+ val delayedInit: NameType = "delayedInit"
+ val delayedInitArg: NameType = "delayedInit$body"
+ val drop: NameType = "drop"
+ val elem: NameType = "elem"
+ val emptyValDef: NameType = "emptyValDef"
+ val ensureAccessible : NameType = "ensureAccessible"
+ val eq: NameType = "eq"
+ val equalsNumChar : NameType = "equalsNumChar"
+ val equalsNumNum : NameType = "equalsNumNum"
+ val equalsNumObject : NameType = "equalsNumObject"
+ val equals_ : NameType = if (forMSIL) "Equals" else "equals"
+ val error: NameType = "error"
+ val eval: NameType = "eval"
+ val ex: NameType = "ex"
+ val experimental: NameType = "experimental"
+ val f: NameType = "f"
+ val false_ : NameType = "false"
+ val filter: NameType = "filter"
+ val finalize_ : NameType = if (forMSIL) "Finalize" else "finalize"
+ val find_ : NameType = "find"
+ val flagsFromBits : NameType = "flagsFromBits"
+ val flatMap: NameType = "flatMap"
+ val foreach: NameType = "foreach"
+ val genericArrayOps: NameType = "genericArrayOps"
+ val get: NameType = "get"
+ val getOrElse: NameType = "getOrElse"
+ val hasNext: NameType = "hasNext"
+ val hashCode_ : NameType = if (forMSIL) "GetHashCode" else "hashCode"
+ val hash_ : NameType = "hash"
+ val head: NameType = "head"
+ val identity: NameType = "identity"
+ val implicitly: NameType = "implicitly"
+ val in: NameType = "in"
+ val info: NameType = "info"
+ val inlinedEquals: NameType = "inlinedEquals"
+ val isArray: NameType = "isArray"
+ val isDefinedAt: NameType = "isDefinedAt"
+ val isEmpty: NameType = "isEmpty"
+ val isInstanceOf_ : NameType = "isInstanceOf"
+ val isInstanceOf_Ob : NameType = "$isInstanceOf"
+ val java: NameType = "java"
+ val key: NameType = "key"
+ val lang: NameType = "lang"
+ val length: NameType = "length"
+ val lengthCompare: NameType = "lengthCompare"
+ val liftedTree: NameType = "liftedTree"
+ val `macro` : NameType = "macro"
+ val macroThis : NameType = "_this"
+ val macroContext : NameType = "c"
+ val main: NameType = "main"
+ val manifest: NameType = "manifest"
+ val ManifestFactory: NameType = "ManifestFactory"
+ val manifestToTypeTag: NameType = "manifestToTypeTag"
+ val map: NameType = "map"
+ val materializeClassTag: NameType = "materializeClassTag"
+ val materializeWeakTypeTag: NameType = "materializeWeakTypeTag"
+ val materializeTypeTag: NameType = "materializeTypeTag"
+ val mirror : NameType = "mirror"
+ val moduleClass : NameType = "moduleClass"
+ val name: NameType = "name"
+ val ne: NameType = "ne"
+ val newArray: NameType = "newArray"
+ val newFreeTerm: NameType = "newFreeTerm"
+ val newFreeType: NameType = "newFreeType"
+ val newNestedSymbol: NameType = "newNestedSymbol"
+ val newScopeWith: NameType = "newScopeWith"
+ val next: NameType = "next"
+ val nmeNewTermName: NameType = "newTermName"
+ val nmeNewTypeName: NameType = "newTypeName"
+ val normalize: NameType = "normalize"
+ val notifyAll_ : NameType = "notifyAll"
+ val notify_ : NameType = "notify"
+ val null_ : NameType = "null"
+ val ofDim: NameType = "ofDim"
+ val origin: NameType = "origin"
+ val prefix : NameType = "prefix"
+ val productArity: NameType = "productArity"
+ val productElement: NameType = "productElement"
+ val productIterator: NameType = "productIterator"
+ val productPrefix: NameType = "productPrefix"
+ val readResolve: NameType = "readResolve"
+ val reflect : NameType = "reflect"
+ val reify : NameType = "reify"
+ val rootMirror : NameType = "rootMirror"
+ val runOrElse: NameType = "runOrElse"
+ val runtime: NameType = "runtime"
+ val runtimeClass: NameType = "runtimeClass"
+ val runtimeMirror: NameType = "runtimeMirror"
+ val sameElements: NameType = "sameElements"
+ val scala_ : NameType = "scala"
+ val selectDynamic: NameType = "selectDynamic"
+ val selectOverloadedMethod: NameType = "selectOverloadedMethod"
+ val selectTerm: NameType = "selectTerm"
+ val selectType: NameType = "selectType"
+ val self: NameType = "self"
+ val setAccessible: NameType = "setAccessible"
+ val setAnnotations: NameType = "setAnnotations"
+ val setSymbol: NameType = "setSymbol"
+ val setType: NameType = "setType"
+ val setTypeSignature: NameType = "setTypeSignature"
+ val splice: NameType = "splice"
+ val staticClass : NameType = "staticClass"
+ val staticModule : NameType = "staticModule"
+ val staticPackage : NameType = "staticPackage"
+ val synchronized_ : NameType = "synchronized"
+ val tail: NameType = "tail"
+ val `then` : NameType = "then"
+ val this_ : NameType = "this"
+ val thisPrefix : NameType = "thisPrefix"
+ val throw_ : NameType = "throw"
+ val toArray: NameType = "toArray"
+ val toList: NameType = "toList"
+ val toObjectArray : NameType = "toObjectArray"
+ val toSeq: NameType = "toSeq"
+ val toString_ : NameType = if (forMSIL) "ToString" else "toString"
+ val toTypeConstructor: NameType = "toTypeConstructor"
+ val tpe : NameType = "tpe"
+ val tree : NameType = "tree"
+ val true_ : NameType = "true"
+ val typedProductIterator: NameType = "typedProductIterator"
+ val typeTagToManifest: NameType = "typeTagToManifest"
+ val unapply: NameType = "unapply"
+ val unapplySeq: NameType = "unapplySeq"
+ val unbox: NameType = "unbox"
+ val universe: NameType = "universe"
+ val update: NameType = "update"
+ val updateDynamic: NameType = "updateDynamic"
+ val value: NameType = "value"
+ val valueOf : NameType = "valueOf"
+ val values : NameType = "values"
+ val view_ : NameType = "view"
+ val wait_ : NameType = "wait"
+ val withFilter: NameType = "withFilter"
+ val wrap: NameType = "wrap"
+ val zip: NameType = "zip"
+
+ val synthSwitch: NameType = "$synthSwitch"
+
+ // unencoded operators
+ object raw {
+ final val AMP : NameType = "&"
+ final val BANG : NameType = "!"
+ final val BAR : NameType = "|"
+ final val DOLLAR: NameType = "$"
+ final val GE: NameType = ">="
+ final val LE: NameType = "<="
+ final val MINUS: NameType = "-"
+ final val NE: NameType = "!="
+ final val PLUS : NameType = "+"
+ final val SLASH: NameType = "/"
+ final val STAR : NameType = "*"
+ final val TILDE: NameType = "~"
+
+ final val isUnary: Set[Name] = Set(MINUS, PLUS, TILDE, BANG)
+ }
+
+ // value-conversion methods
+ val toByte: NameType = "toByte"
+ val toShort: NameType = "toShort"
+ val toChar: NameType = "toChar"
+ val toInt: NameType = "toInt"
+ val toLong: NameType = "toLong"
+ val toFloat: NameType = "toFloat"
+ val toDouble: NameType = "toDouble"
+
+ // primitive operation methods for structual types mostly
+ // overlap with the above, but not for these two.
+ val toCharacter: NameType = "toCharacter"
+ val toInteger: NameType = "toInteger"
+
+ def newLazyValSlowComputeName(lzyValName: Name) = lzyValName append LAZY_SLOW_SUFFIX
+
+ // ASCII names for operators
+ val ADD = encode("+")
+ val AND = encode("&")
+ val ASR = encode(">>")
+ val DIV = encode("/")
+ val EQ = encode("==")
+ val EQL = encode("=")
+ val GE = encode(">=")
+ val GT = encode(">")
+ val HASHHASH = encode("##")
+ val LE = encode("<=")
+ val LSL = encode("<<")
+ val LSR = encode(">>>")
+ val LT = encode("<")
+ val MINUS = encode("-")
+ val MOD = encode("%")
+ val MUL = encode("*")
+ val NE = encode("!=")
+ val OR = encode("|")
+ val PLUS = ADD // technically redundant, but ADD looks funny with MINUS
+ val SUB = MINUS // ... as does SUB with PLUS
+ val XOR = encode("^")
+ val ZAND = encode("&&")
+ val ZOR = encode("||")
+
+ // unary operators
+ val UNARY_~ = encode("unary_~")
+ val UNARY_+ = encode("unary_+")
+ val UNARY_- = encode("unary_-")
+ val UNARY_! = encode("unary_!")
+
+ // Grouped here so Cleanup knows what tests to perform.
+ val CommonOpNames = Set[Name](OR, XOR, AND, EQ, NE)
+ val ConversionNames = Set[Name](toByte, toChar, toDouble, toFloat, toInt, toLong, toShort)
+ val BooleanOpNames = Set[Name](ZOR, ZAND, UNARY_!) ++ CommonOpNames
+ val NumberOpNames = (
+ Set[Name](ADD, SUB, MUL, DIV, MOD, LSL, LSR, ASR, LT, LE, GE, GT)
+ ++ Set(UNARY_+, UNARY_-, UNARY_!)
+ ++ ConversionNames
+ ++ CommonOpNames
+ )
+
+ val add: NameType = "add"
+ val complement: NameType = "complement"
+ val divide: NameType = "divide"
+ val multiply: NameType = "multiply"
+ val negate: NameType = "negate"
+ val positive: NameType = "positive"
+ val shiftLogicalRight: NameType = "shiftLogicalRight"
+ val shiftSignedLeft: NameType = "shiftSignedLeft"
+ val shiftSignedRight: NameType = "shiftSignedRight"
+ val subtract: NameType = "subtract"
+ val takeAnd: NameType = "takeAnd"
+ val takeConditionalAnd: NameType = "takeConditionalAnd"
+ val takeConditionalOr: NameType = "takeConditionalOr"
+ val takeModulo: NameType = "takeModulo"
+ val takeNot: NameType = "takeNot"
+ val takeOr: NameType = "takeOr"
+ val takeXor: NameType = "takeXor"
+ val testEqual: NameType = "testEqual"
+ val testGreaterOrEqualThan: NameType = "testGreaterOrEqualThan"
+ val testGreaterThan: NameType = "testGreaterThan"
+ val testLessOrEqualThan: NameType = "testLessOrEqualThan"
+ val testLessThan: NameType = "testLessThan"
+ val testNotEqual: NameType = "testNotEqual"
+
+ def toUnaryName(name: TermName): TermName = name match {
+ case raw.MINUS => UNARY_-
+ case raw.PLUS => UNARY_+
+ case raw.TILDE => UNARY_~
+ case raw.BANG => UNARY_!
+ case _ => name
+ }
+ /** The name of a method which stands in for a primitive operation
+ * during structural type dispatch.
+ */
+ def primitiveInfixMethodName(name: Name): TermName = name match {
+ case OR => takeOr
+ case XOR => takeXor
+ case AND => takeAnd
+ case EQ => testEqual
+ case NE => testNotEqual
+ case ADD => add
+ case SUB => subtract
+ case MUL => multiply
+ case DIV => divide
+ case MOD => takeModulo
+ case LSL => shiftSignedLeft
+ case LSR => shiftLogicalRight
+ case ASR => shiftSignedRight
+ case LT => testLessThan
+ case LE => testLessOrEqualThan
+ case GE => testGreaterOrEqualThan
+ case GT => testGreaterThan
+ case ZOR => takeConditionalOr
+ case ZAND => takeConditionalAnd
+ case _ => NO_NAME
+ }
+ /** Postfix/prefix, really.
+ */
+ def primitivePostfixMethodName(name: Name): TermName = name match {
+ case UNARY_! => takeNot
+ case UNARY_+ => positive
+ case UNARY_- => negate
+ case UNARY_~ => complement
+ case `toByte` => toByte
+ case `toShort` => toShort
+ case `toChar` => toCharacter
+ case `toInt` => toInteger
+ case `toLong` => toLong
+ case `toFloat` => toFloat
+ case `toDouble` => toDouble
+ case _ => NO_NAME
+ }
+
+ def primitiveMethodName(name: Name): TermName =
+ primitiveInfixMethodName(name) match {
+ case NO_NAME => primitivePostfixMethodName(name)
+ case name => name
+ }
+
+ /** Translate a String into a list of simple TypeNames and TermNames.
+ * In all segments before the last, type/term is determined by whether
+ * the following separator char is '.' or '#'. In the last segment,
+ * the argument "assumeTerm" determines it. Examples:
+ *
+ * package foo {
+ * object Lorax { object Wog ; class Wog }
+ * class Lorax { object Zax ; class Zax }
+ * }
+ *
+ * f("foo.Lorax", true) == List("foo": Term, "Lorax": Term) // object Lorax
+ * f("foo.Lorax", false) == List("foo": Term, "Lorax": Type) // class Lorax
+ * f("Lorax.Wog", true) == List("Lorax": Term, "Wog": Term) // object Wog
+ * f("Lorax.Wog", false) == List("Lorax": Term, "Wog": Type) // class Wog
+ * f("Lorax#Zax", true) == List("Lorax": Type, "Zax": Term) // object Zax
+ * f("Lorax#Zax", false) == List("Lorax": Type, "Zax": Type) // class Zax
+ *
+ * Note that in actual scala syntax you cannot refer to object Zax without an
+ * instance of Lorax, so Lorax#Zax could only mean the type. One might think
+ * that Lorax#Zax.type would work, but this is not accepted by the parser.
+ * For the purposes of referencing that object, the syntax is allowed.
+ */
+ def segments(name: String, assumeTerm: Boolean): List[Name] = {
+ def mkName(str: String, term: Boolean): Name =
+ if (term) newTermName(str) else newTypeName(str)
+
+ name.indexWhere(ch => ch == '.' || ch == '#') match {
+ // it's the last segment: the parameter tells us whether type or term
+ case -1 => if (name == "") scala.Nil else scala.List(mkName(name, assumeTerm))
+ // otherwise, we can tell based on whether '#' or '.' is the following char.
+ case idx =>
+ val (simple, div, rest) = (name take idx, name charAt idx, name drop idx + 1)
+ mkName(simple, div == '.') :: segments(rest, assumeTerm)
+ }
+ }
+
+ def newBitmapName(bitmapPrefix: Name, n: Int) = bitmapPrefix append ("" + n)
+
+ val BITMAP_NORMAL: NameType = BITMAP_PREFIX + "" // initialization bitmap for public/protected lazy vals
+ val BITMAP_TRANSIENT: NameType = BITMAP_PREFIX + "trans$" // initialization bitmap for transient lazy vals
+ val BITMAP_CHECKINIT: NameType = BITMAP_PREFIX + "init$" // initialization bitmap for checkinit values
+ val BITMAP_CHECKINIT_TRANSIENT: NameType = BITMAP_PREFIX + "inittrans$" // initialization bitmap for transient checkinit values
+ }
+
+ object tpnme extends TypeNames { }
+
+ /** For fully qualified type names.
+ */
+ object fulltpnme extends TypeNames {
+ val RuntimeNothing: NameType = "scala.runtime.Nothing$"
+ val RuntimeNull: NameType = "scala.runtime.Null$"
+ val JavaLangEnum: NameType = "java.lang.Enum"
+ }
+
+ /** Java binary names, like scala/runtime/Nothing$.
+ */
+ object binarynme {
+ def toBinary(name: Name) = name mapName (_.replace('.', '/'))
+
+ val RuntimeNothing = toBinary(fulltpnme.RuntimeNothing).toTypeName
+ val RuntimeNull = toBinary(fulltpnme.RuntimeNull).toTypeName
+ }
+
+ val javanme = nme.javaKeywords
+
+ object nme extends TermNames {
+
+ def isModuleVarName(name: Name): Boolean =
+ stripAnonNumberSuffix(name) endsWith MODULE_VAR_SUFFIX
+
+ def moduleVarName(name: TermName): TermName =
+ newTermNameCached("" + name + MODULE_VAR_SUFFIX)
+
+ def getCause = sn.GetCause
+ def getClass_ = sn.GetClass
+ def getComponentType = sn.GetComponentType
+ def getMethod_ = sn.GetMethod
+ def invoke_ = sn.Invoke
+
+ val isBoxedNumberOrBoolean: NameType = "isBoxedNumberOrBoolean"
+ val isBoxedNumber: NameType = "isBoxedNumber"
+
+ val reflPolyCacheName: NameType = "reflPoly$Cache"
+ val reflClassCacheName: NameType = "reflClass$Cache"
+ val reflParamsCacheName: NameType = "reflParams$Cache"
+ val reflMethodCacheName: NameType = "reflMethod$Cache"
+ val reflMethodName: NameType = "reflMethod$Method"
+
+ private val reflectionCacheNames = Set[NameType](
+ reflPolyCacheName,
+ reflClassCacheName,
+ reflParamsCacheName,
+ reflMethodCacheName,
+ reflMethodName
+ )
+ def isReflectionCacheName(name: Name) = reflectionCacheNames exists (name startsWith _)
+
+ @deprecated("Use a method in tpnme", "2.10.0") def dropSingletonName(name: Name): TypeName = tpnme.dropSingletonName(name)
+ @deprecated("Use a method in tpnme", "2.10.0") def singletonName(name: Name): TypeName = tpnme.singletonName(name)
+ @deprecated("Use a method in tpnme", "2.10.0") def implClassName(name: Name): TypeName = tpnme.implClassName(name)
+ @deprecated("Use a method in tpnme", "2.10.0") def interfaceName(implname: Name): TypeName = tpnme.interfaceName(implname)
+ }
+
+ abstract class SymbolNames {
+ protected val stringToTermName = null
+ protected val stringToTypeName = null
+ protected implicit def createNameType(s: String): TypeName = newTypeNameCached(s)
+
+ val BeanProperty : TypeName
+ val BooleanBeanProperty : TypeName
+ val BoxedBoolean : TypeName
+ val BoxedCharacter : TypeName
+ val BoxedNumber : TypeName
+ val Class : TypeName
+ val Delegate : TypeName
+ val IOOBException : TypeName // IndexOutOfBoundsException
+ val InvTargetException : TypeName // InvocationTargetException
+ val JavaSerializable : TypeName
+ val MethodAsObject : TypeName
+ val NPException : TypeName // NullPointerException
+ val Object : TypeName
+ val String : TypeName
+ val Throwable : TypeName
+ val ValueType : TypeName
+
+ val ForName : TermName
+ val GetCause : TermName
+ val GetClass : TermName
+ val GetClassLoader : TermName
+ val GetComponentType : TermName
+ val GetMethod : TermName
+ val Invoke : TermName
+ val JavaLang : TermName
+
+ val Boxed: immutable.Map[TypeName, TypeName]
+ }
+
+ class JavaKeywords {
+ private val kw = new KeywordSetBuilder
+
+ final val ABSTRACTkw: TermName = kw("abstract")
+ final val ASSERTkw: TermName = kw("assert")
+ final val BOOLEANkw: TermName = kw("boolean")
+ final val BREAKkw: TermName = kw("break")
+ final val BYTEkw: TermName = kw("byte")
+ final val CASEkw: TermName = kw("case")
+ final val CATCHkw: TermName = kw("catch")
+ final val CHARkw: TermName = kw("char")
+ final val CLASSkw: TermName = kw("class")
+ final val CONSTkw: TermName = kw("const")
+ final val CONTINUEkw: TermName = kw("continue")
+ final val DEFAULTkw: TermName = kw("default")
+ final val DOkw: TermName = kw("do")
+ final val DOUBLEkw: TermName = kw("double")
+ final val ELSEkw: TermName = kw("else")
+ final val ENUMkw: TermName = kw("enum")
+ final val EXTENDSkw: TermName = kw("extends")
+ final val FINALkw: TermName = kw("final")
+ final val FINALLYkw: TermName = kw("finally")
+ final val FLOATkw: TermName = kw("float")
+ final val FORkw: TermName = kw("for")
+ final val IFkw: TermName = kw("if")
+ final val GOTOkw: TermName = kw("goto")
+ final val IMPLEMENTSkw: TermName = kw("implements")
+ final val IMPORTkw: TermName = kw("import")
+ final val INSTANCEOFkw: TermName = kw("instanceof")
+ final val INTkw: TermName = kw("int")
+ final val INTERFACEkw: TermName = kw("interface")
+ final val LONGkw: TermName = kw("long")
+ final val NATIVEkw: TermName = kw("native")
+ final val NEWkw: TermName = kw("new")
+ final val PACKAGEkw: TermName = kw("package")
+ final val PRIVATEkw: TermName = kw("private")
+ final val PROTECTEDkw: TermName = kw("protected")
+ final val PUBLICkw: TermName = kw("public")
+ final val RETURNkw: TermName = kw("return")
+ final val SHORTkw: TermName = kw("short")
+ final val STATICkw: TermName = kw("static")
+ final val STRICTFPkw: TermName = kw("strictfp")
+ final val SUPERkw: TermName = kw("super")
+ final val SWITCHkw: TermName = kw("switch")
+ final val SYNCHRONIZEDkw: TermName = kw("synchronized")
+ final val THISkw: TermName = kw("this")
+ final val THROWkw: TermName = kw("throw")
+ final val THROWSkw: TermName = kw("throws")
+ final val TRANSIENTkw: TermName = kw("transient")
+ final val TRYkw: TermName = kw("try")
+ final val VOIDkw: TermName = kw("void")
+ final val VOLATILEkw: TermName = kw("volatile")
+ final val WHILEkw: TermName = kw("while")
+
+ final val keywords = kw.result
+ }
+
+ private abstract class JavaNames extends SymbolNames {
+ final val BoxedBoolean: TypeName = "java.lang.Boolean"
+ final val BoxedByte: TypeName = "java.lang.Byte"
+ final val BoxedCharacter: TypeName = "java.lang.Character"
+ final val BoxedDouble: TypeName = "java.lang.Double"
+ final val BoxedFloat: TypeName = "java.lang.Float"
+ final val BoxedInteger: TypeName = "java.lang.Integer"
+ final val BoxedLong: TypeName = "java.lang.Long"
+ final val BoxedNumber: TypeName = "java.lang.Number"
+ final val BoxedShort: TypeName = "java.lang.Short"
+ final val Class: TypeName = "java.lang.Class"
+ final val Delegate: TypeName = tpnme.NO_NAME
+ final val IOOBException: TypeName = "java.lang.IndexOutOfBoundsException"
+ final val InvTargetException: TypeName = "java.lang.reflect.InvocationTargetException"
+ final val MethodAsObject: TypeName = "java.lang.reflect.Method"
+ final val NPException: TypeName = "java.lang.NullPointerException"
+ final val Object: TypeName = "java.lang.Object"
+ final val String: TypeName = "java.lang.String"
+ final val Throwable: TypeName = "java.lang.Throwable"
+ final val ValueType: TypeName = tpnme.NO_NAME
+
+ final val ForName: TermName = newTermName("forName")
+ final val GetCause: TermName = newTermName("getCause")
+ final val GetClass: TermName = newTermName("getClass")
+ final val GetClassLoader: TermName = newTermName("getClassLoader")
+ final val GetComponentType: TermName = newTermName("getComponentType")
+ final val GetMethod: TermName = newTermName("getMethod")
+ final val Invoke: TermName = newTermName("invoke")
+ final val JavaLang: TermName = newTermName("java.lang")
+
+ val Boxed = immutable.Map[TypeName, TypeName](
+ tpnme.Boolean -> BoxedBoolean,
+ tpnme.Byte -> BoxedByte,
+ tpnme.Char -> BoxedCharacter,
+ tpnme.Short -> BoxedShort,
+ tpnme.Int -> BoxedInteger,
+ tpnme.Long -> BoxedLong,
+ tpnme.Float -> BoxedFloat,
+ tpnme.Double -> BoxedDouble
+ )
+ }
+
+ private class MSILNames extends SymbolNames {
+ final val BeanProperty: TypeName = tpnme.NO_NAME
+ final val BooleanBeanProperty: TypeName = tpnme.NO_NAME
+ final val BoxedBoolean: TypeName = "System.IConvertible"
+ final val BoxedCharacter: TypeName = "System.IConvertible"
+ final val BoxedNumber: TypeName = "System.IConvertible"
+ final val Class: TypeName = "System.Type"
+ final val Delegate: TypeName = "System.MulticastDelegate"
+ final val IOOBException: TypeName = "System.IndexOutOfRangeException"
+ final val InvTargetException: TypeName = "System.Reflection.TargetInvocationException"
+ final val JavaSerializable: TypeName = tpnme.NO_NAME
+ final val MethodAsObject: TypeName = "System.Reflection.MethodInfo"
+ final val NPException: TypeName = "System.NullReferenceException"
+ final val Object: TypeName = "System.Object"
+ final val String: TypeName = "System.String"
+ final val Throwable: TypeName = "System.Exception"
+ final val ValueType: TypeName = "System.ValueType"
+
+ final val ForName: TermName = newTermName("GetType")
+ final val GetCause: TermName = newTermName("InnerException") /* System.Reflection.TargetInvocationException.InnerException */
+ final val GetClass: TermName = newTermName("GetType")
+ final lazy val GetClassLoader: TermName = throw new UnsupportedOperationException("Scala reflection is not supported on this platform");
+ final val GetComponentType: TermName = newTermName("GetElementType")
+ final val GetMethod: TermName = newTermName("GetMethod")
+ final val Invoke: TermName = newTermName("Invoke")
+ final val JavaLang: TermName = newTermName("System")
+
+ val Boxed = immutable.Map[TypeName, TypeName](
+ tpnme.Boolean -> "System.Boolean",
+ tpnme.Byte -> "System.SByte", // a scala.Byte is signed and a System.SByte too (unlike a System.Byte)
+ tpnme.Char -> "System.Char",
+ tpnme.Short -> "System.Int16",
+ tpnme.Int -> "System.Int32",
+ tpnme.Long -> "System.Int64",
+ tpnme.Float -> "System.Single",
+ tpnme.Double -> "System.Double"
+ )
+ }
+
+ private class J2SENames extends JavaNames {
+ final val BeanProperty: TypeName = "scala.beans.BeanProperty"
+ final val BooleanBeanProperty: TypeName = "scala.beans.BooleanBeanProperty"
+ final val JavaSerializable: TypeName = "java.io.Serializable"
+ }
+
+ lazy val sn: SymbolNames =
+ if (forMSIL) new MSILNames
+ else new J2SENames
+}
diff --git a/src/reflect/scala/reflect/internal/SymbolTable.scala b/src/reflect/scala/reflect/internal/SymbolTable.scala
new file mode 100644
index 0000000..6ca8900
--- /dev/null
+++ b/src/reflect/scala/reflect/internal/SymbolTable.scala
@@ -0,0 +1,366 @@
+/* NSC -- new scala compiler
+ * Copyright 2005-2013 LAMP/EPFL
+ * @author Martin Odersky
+ */
+
+package scala.reflect
+package internal
+
+import scala.annotation.elidable
+import scala.collection.{ mutable, immutable }
+import util._
+
+abstract class SymbolTable extends macros.Universe
+ with Collections
+ with Names
+ with Symbols
+ with Types
+ with Kinds
+ with ExistentialsAndSkolems
+ with FlagSets
+ with Scopes
+ with Mirrors
+ with Definitions
+ with Constants
+ with BaseTypeSeqs
+ with InfoTransformers
+ with transform.Transforms
+ with StdNames
+ with AnnotationInfos
+ with AnnotationCheckers
+ with Trees
+ with Printers
+ with Positions
+ with TypeDebugging
+ with Importers
+ with Required
+ with CapturedVariables
+ with StdAttachments
+ with StdCreators
+ with BuildUtils
+{
+
+ val gen = new TreeGen { val global: SymbolTable.this.type = SymbolTable.this }
+ lazy val treeBuild = gen
+
+ def log(msg: => AnyRef): Unit
+ def warning(msg: String): Unit = Console.err.println(msg)
+ def globalError(msg: String): Unit = abort(msg)
+ def abort(msg: String): Nothing = throw new FatalError(supplementErrorMessage(msg))
+
+ def shouldLogAtThisPhase = false
+
+ @deprecated("Give us a reason", "2.10.0")
+ def abort(): Nothing = abort("unknown error")
+
+ /** Override with final implementation for inlining. */
+ def debuglog(msg: => String): Unit = if (settings.debug.value) log(msg)
+ def debugwarn(msg: => String): Unit = if (settings.debug.value) Console.err.println(msg)
+ def throwableAsString(t: Throwable): String = "" + t
+
+ /** Prints a stack trace if -Ydebug or equivalent was given, otherwise does nothing. */
+ def debugStack(t: Throwable): Unit = debugwarn(throwableAsString(t))
+
+ /** Overridden when we know more about what was happening during a failure. */
+ def supplementErrorMessage(msg: String): String = msg
+
+ private[scala] def printCaller[T](msg: String)(result: T) = {
+ Console.err.println("%s: %s\nCalled from: %s".format(msg, result,
+ (new Throwable).getStackTrace.drop(2).take(15).mkString("\n")))
+
+ result
+ }
+
+ private[scala] def printResult[T](msg: String)(result: T) = {
+ Console.err.println(msg + ": " + result)
+ result
+ }
+ @inline
+ final private[scala] def logResult[T](msg: => String)(result: T): T = {
+ log(msg + ": " + result)
+ result
+ }
+ @inline
+ final private[scala] def logResultIf[T](msg: => String, cond: T => Boolean)(result: T): T = {
+ if (cond(result))
+ log(msg + ": " + result)
+
+ result
+ }
+
+ // For too long have we suffered in order to sort NAMES.
+ // I'm pretty sure there's a reasonable default for that.
+ // Notice challenge created by Ordering's invariance.
+ implicit def lowPriorityNameOrdering[T <: Names#Name]: Ordering[T] =
+ SimpleNameOrdering.asInstanceOf[Ordering[T]]
+
+ private object SimpleNameOrdering extends Ordering[Names#Name] {
+ def compare(n1: Names#Name, n2: Names#Name) = (
+ if (n1 eq n2) 0
+ else n1.toString compareTo n2.toString
+ )
+ }
+
+ /** Dump each symbol to stdout after shutdown.
+ */
+ final val traceSymbolActivity = sys.props contains "scalac.debug.syms"
+ object traceSymbols extends {
+ val global: SymbolTable.this.type = SymbolTable.this
+ } with util.TraceSymbolActivity
+
+ /** Check that the executing thread is the compiler thread. No-op here,
+ * overridden in interactive.Global. */
+ @elidable(elidable.WARNING)
+ def assertCorrectThread() {}
+
+ /** Are we compiling for Java SE? */
+ // def forJVM: Boolean
+
+ /** Are we compiling for .NET? */
+ def forMSIL: Boolean = false
+
+ /** A last effort if symbol in a select <owner>.<name> is not found.
+ * This is overridden by the reflection compiler to make up a package
+ * when it makes sense (i.e. <owner> is a package and <name> is a term name).
+ */
+ def missingHook(owner: Symbol, name: Name): Symbol = NoSymbol
+
+ /** Returns the mirror that loaded given symbol */
+ def mirrorThatLoaded(sym: Symbol): Mirror
+
+ /** A period is an ordinal number for a phase in a run.
+ * Phases in later runs have higher periods than phases in earlier runs.
+ * Later phases have higher periods than earlier phases in the same run.
+ */
+ type Period = Int
+ final val NoPeriod = 0
+
+ /** An ordinal number for compiler runs. First run has number 1. */
+ type RunId = Int
+ final val NoRunId = 0
+
+ // sigh, this has to be public or atPhase doesn't inline.
+ var phStack: List[Phase] = Nil
+ private[this] var ph: Phase = NoPhase
+ private[this] var per = NoPeriod
+
+ final def atPhaseStack: List[Phase] = phStack
+ final def phase: Phase = {
+ if (Statistics.hotEnabled)
+ Statistics.incCounter(SymbolTableStats.phaseCounter)
+ ph
+ }
+
+ def atPhaseStackMessage = atPhaseStack match {
+ case Nil => ""
+ case ps => ps.reverseMap("->" + _).mkString("(", " ", ")")
+ }
+
+ final def phase_=(p: Phase) {
+ //System.out.println("setting phase to " + p)
+ assert((p ne null) && p != NoPhase, p)
+ ph = p
+ per = period(currentRunId, p.id)
+ }
+ final def pushPhase(ph: Phase): Phase = {
+ val current = phase
+ phase = ph
+ phStack ::= ph
+ current
+ }
+ final def popPhase(ph: Phase) {
+ phStack = phStack.tail
+ phase = ph
+ }
+
+ /** The current compiler run identifier. */
+ def currentRunId: RunId
+
+ /** The run identifier of the given period. */
+ final def runId(period: Period): RunId = period >> 8
+
+ /** The phase identifier of the given period. */
+ final def phaseId(period: Period): Phase#Id = period & 0xFF
+
+ /** The period at the start of run that includes `period`. */
+ final def startRun(period: Period): Period = period & 0xFFFFFF00
+
+ /** The current period. */
+ final def currentPeriod: Period = {
+ //assert(per == (currentRunId << 8) + phase.id)
+ per
+ }
+
+ /** The phase associated with given period. */
+ final def phaseOf(period: Period): Phase = phaseWithId(phaseId(period))
+
+ final def period(rid: RunId, pid: Phase#Id): Period =
+ (rid << 8) + pid
+
+ /** Are we later than given phase in compilation? */
+ final def isAtPhaseAfter(p: Phase) =
+ p != NoPhase && phase.id > p.id
+
+ /** Perform given operation at given phase. */
+ @inline final def atPhase[T](ph: Phase)(op: => T): T = {
+ val saved = pushPhase(ph)
+ try op
+ finally popPhase(saved)
+ }
+
+
+ /** Since when it is to be "at" a phase is inherently ambiguous,
+ * a couple unambiguously named methods.
+ */
+ @inline final def beforePhase[T](ph: Phase)(op: => T): T = atPhase(ph)(op)
+ @inline final def afterPhase[T](ph: Phase)(op: => T): T = atPhase(ph.next)(op)
+ @inline final def afterCurrentPhase[T](op: => T): T = atPhase(phase.next)(op)
+ @inline final def beforePrevPhase[T](op: => T): T = atPhase(phase.prev)(op)
+
+ @inline final def atPhaseNotLaterThan[T](target: Phase)(op: => T): T =
+ if (isAtPhaseAfter(target)) atPhase(target)(op) else op
+
+ final def isValid(period: Period): Boolean =
+ period != 0 && runId(period) == currentRunId && {
+ val pid = phaseId(period)
+ if (phase.id > pid) infoTransformers.nextFrom(pid).pid >= phase.id
+ else infoTransformers.nextFrom(phase.id).pid >= pid
+ }
+
+ final def isValidForBaseClasses(period: Period): Boolean = {
+ def noChangeInBaseClasses(it: InfoTransformer, limit: Phase#Id): Boolean = (
+ it.pid >= limit ||
+ !it.changesBaseClasses && noChangeInBaseClasses(it.next, limit)
+ );
+ period != 0 && runId(period) == currentRunId && {
+ val pid = phaseId(period)
+ if (phase.id > pid) noChangeInBaseClasses(infoTransformers.nextFrom(pid), phase.id)
+ else noChangeInBaseClasses(infoTransformers.nextFrom(phase.id), pid)
+ }
+ }
+
+ def openPackageModule(container: Symbol, dest: Symbol) {
+ // unlink existing symbols in the package
+ for (member <- container.info.decls.iterator) {
+ if (!member.isPrivate && !member.isConstructor) {
+ // todo: handle overlapping definitions in some way: mark as errors
+ // or treat as abstractions. For now the symbol in the package module takes precedence.
+ for (existing <- dest.info.decl(member.name).alternatives)
+ dest.info.decls.unlink(existing)
+ }
+ }
+ // enter non-private decls the class
+ for (member <- container.info.decls.iterator) {
+ if (!member.isPrivate && !member.isConstructor) {
+ dest.info.decls.enter(member)
+ }
+ }
+ // enter decls of parent classes
+ for (p <- container.parentSymbols) {
+ if (p != definitions.ObjectClass) {
+ openPackageModule(p, dest)
+ }
+ }
+ }
+
+ /** Convert array parameters denoting a repeated parameter of a Java method
+ * to `JavaRepeatedParamClass` types.
+ */
+ def arrayToRepeated(tp: Type): Type = tp match {
+ case MethodType(params, rtpe) =>
+ val formals = tp.paramTypes
+ assert(formals.last.typeSymbol == definitions.ArrayClass, formals)
+ val method = params.last.owner
+ val elemtp = formals.last.typeArgs.head match {
+ case RefinedType(List(t1, t2), _) if (t1.typeSymbol.isAbstractType && t2.typeSymbol == definitions.ObjectClass) =>
+ t1 // drop intersection with Object for abstract types in varargs. UnCurry can handle them.
+ case t =>
+ t
+ }
+ val newParams = method.newSyntheticValueParams(formals.init :+ definitions.javaRepeatedType(elemtp))
+ MethodType(newParams, rtpe)
+ case PolyType(tparams, rtpe) =>
+ PolyType(tparams, arrayToRepeated(rtpe))
+ }
+
+ abstract class SymLoader extends LazyType {
+ def fromSource = false
+ }
+
+ /** if there's a `package` member object in `pkgClass`, enter its members into it. */
+ def openPackageModule(pkgClass: Symbol) {
+
+ val pkgModule = pkgClass.info.decl(nme.PACKAGEkw)
+ def fromSource = pkgModule.rawInfo match {
+ case ltp: SymLoader => ltp.fromSource
+ case _ => false
+ }
+ if (pkgModule.isModule && !fromSource) {
+ // println("open "+pkgModule)//DEBUG
+ openPackageModule(pkgModule, pkgClass)
+ }
+ }
+
+ object perRunCaches {
+ import scala.runtime.ScalaRunTime.stringOf
+ import scala.collection.generic.Clearable
+
+ // Weak references so the garbage collector will take care of
+ // letting us know when a cache is really out of commission.
+ private val caches = WeakHashSet[Clearable]()
+
+ def recordCache[T <: Clearable](cache: T): T = {
+ caches += cache
+ cache
+ }
+
+ def clearAll() = {
+ debuglog("Clearing " + caches.size + " caches.")
+ caches foreach (_.clear)
+ }
+
+ def newWeakMap[K, V]() = recordCache(mutable.WeakHashMap[K, V]())
+ def newMap[K, V]() = recordCache(mutable.HashMap[K, V]())
+ def newSet[K]() = recordCache(mutable.HashSet[K]())
+ def newWeakSet[K <: AnyRef]() = recordCache(new WeakHashSet[K]())
+ }
+
+ /** The set of all installed infotransformers. */
+ var infoTransformers = new InfoTransformer {
+ val pid = NoPhase.id
+ val changesBaseClasses = true
+ def transform(sym: Symbol, tpe: Type): Type = tpe
+ }
+
+ /** The phase which has given index as identifier. */
+ val phaseWithId: Array[Phase]
+
+ /** Is this symbol table a part of a compiler universe?
+ */
+ def isCompilerUniverse = false
+
+ /**
+ * Adds the `sm` String interpolator to a [[scala.StringContext]].
+ */
+ implicit val StringContextStripMarginOps: StringContext => StringContextStripMarginOps = util.StringContextStripMarginOps
+
+ def importPrivateWithinFromJavaFlags(sym: Symbol, jflags: Int): Symbol = {
+ import ClassfileConstants._
+ if ((jflags & (JAVA_ACC_PRIVATE | JAVA_ACC_PROTECTED | JAVA_ACC_PUBLIC)) == 0)
+ // See ticket #1687 for an example of when topLevelClass is NoSymbol: it
+ // apparently occurs when processing v45.3 bytecode.
+ if (sym.enclosingTopLevelClass != NoSymbol)
+ sym.privateWithin = sym.enclosingTopLevelClass.owner
+
+ // protected in java means package protected. #3946
+ if ((jflags & JAVA_ACC_PROTECTED) != 0)
+ if (sym.enclosingTopLevelClass != NoSymbol)
+ sym.privateWithin = sym.enclosingTopLevelClass.owner
+
+ sym
+ }
+}
+
+object SymbolTableStats {
+ val phaseCounter = Statistics.newCounter("#phase calls")
+}
diff --git a/src/reflect/scala/reflect/internal/Symbols.scala b/src/reflect/scala/reflect/internal/Symbols.scala
new file mode 100644
index 0000000..579f768
--- /dev/null
+++ b/src/reflect/scala/reflect/internal/Symbols.scala
@@ -0,0 +1,3385 @@
+ /* NSC -- new Scala compiler
+ * Copyright 2005-2013 LAMP/EPFL
+ * @author Martin Odersky
+ */
+
+package scala.reflect
+package internal
+
+import scala.collection.{ mutable, immutable }
+import scala.collection.mutable.ListBuffer
+import util.Statistics
+import Flags._
+import scala.annotation.tailrec
+import scala.reflect.io.AbstractFile
+
+trait Symbols extends api.Symbols { self: SymbolTable =>
+ import definitions._
+ import SymbolsStats._
+
+ protected var ids = 0
+
+ val emptySymbolArray = new Array[Symbol](0)
+
+ protected def nextId() = { ids += 1; ids }
+
+ /** Used for deciding in the IDE whether we can interrupt the compiler */
+ //protected var activeLocks = 0
+
+ /** Used for debugging only */
+ //protected var lockedSyms = scala.collection.immutable.Set[Symbol]()
+
+ /** Used to keep track of the recursion depth on locked symbols */
+ private var recursionTable = immutable.Map.empty[Symbol, Int]
+
+ private var nextexid = 0
+ protected def freshExistentialName(suffix: String) = {
+ nextexid += 1
+ newTypeName("_" + nextexid + suffix)
+ }
+
+ // Set the fields which point companions at one another. Returns the module.
+ def connectModuleToClass(m: ModuleSymbol, moduleClass: ClassSymbol): ModuleSymbol = {
+ moduleClass.sourceModule = m
+ m setModuleClass moduleClass
+ m
+ }
+
+ /** Create a new free term. Its owner is NoSymbol.
+ */
+ def newFreeTermSymbol(name: TermName, value: => Any, flags: Long = 0L, origin: String): FreeTermSymbol =
+ new FreeTermSymbol(name, value, origin) initFlags flags
+
+ /** Create a new free type. Its owner is NoSymbol.
+ */
+ def newFreeTypeSymbol(name: TypeName, flags: Long = 0L, origin: String): FreeTypeSymbol =
+ new FreeTypeSymbol(name, origin) initFlags flags
+
+ /** Determines whether the given information request should trigger the given symbol's completer.
+ * See comments to `Symbol.needsInitialize` for details.
+ */
+ protected def shouldTriggerCompleter(symbol: Symbol, completer: Type, isFlagRelated: Boolean, mask: Long) =
+ completer match {
+ case null => false
+ case _: FlagAgnosticCompleter => !isFlagRelated
+ case _ => abort(s"unsupported completer: $completer of class ${if (completer != null) completer.getClass else null} for symbol ${symbol.fullName}")
+ }
+
+ /** The original owner of a class. Used by the backend to generate
+ * EnclosingMethod attributes.
+ */
+ val originalOwner = perRunCaches.newMap[Symbol, Symbol]()
+
+ abstract class SymbolContextApiImpl extends SymbolContextApi {
+ this: Symbol =>
+
+ def isExistential: Boolean = this.isExistentiallyBound
+ def isParamWithDefault: Boolean = this.hasDefault
+ def isByNameParam: Boolean = this.isValueParameter && (this hasFlag BYNAMEPARAM)
+ def isImplementationArtifact: Boolean = (this hasFlag BRIDGE) || (this hasFlag VBRIDGE) || (this hasFlag ARTIFACT)
+ def isJava: Boolean = isJavaDefined
+ def isVal: Boolean = isTerm && !isModule && !isMethod && !isMutable
+ def isVar: Boolean = isTerm && !isModule && !isMethod && !isLazy && isMutable
+
+ def newNestedSymbol(name: Name, pos: Position, newFlags: Long, isClass: Boolean): Symbol = name match {
+ case n: TermName => newTermSymbol(n, pos, newFlags)
+ case n: TypeName => if (isClass) newClassSymbol(n, pos, newFlags) else newNonClassSymbol(n, pos, newFlags)
+ }
+
+ def knownDirectSubclasses = {
+ if (!isCompilerUniverse && needsInitialize(isFlagRelated = false, mask = 0)) initialize
+ children
+ }
+
+ def baseClasses = info.baseClasses
+ def module = sourceModule
+ def thisPrefix: Type = thisType
+ def selfType: Type = typeOfThis
+ def typeSignature: Type = { fullyInitializeSymbol(this); info }
+ def typeSignatureIn(site: Type): Type = { fullyInitializeSymbol(this); site memberInfo this }
+
+ def toType: Type = tpe
+ def toTypeIn(site: Type): Type = site.memberType(this)
+ def toTypeConstructor: Type = typeConstructor
+ def setTypeSignature(tpe: Type): this.type = { setInfo(tpe); this }
+ def setAnnotations(annots: AnnotationInfo*): this.type = { setAnnotations(annots.toList); this }
+
+ def getter: Symbol = getter(owner)
+ def setter: Symbol = setter(owner)
+ }
+
+ /** The class for all symbols */
+ abstract class Symbol protected[Symbols] (initOwner: Symbol, initPos: Position, initName: Name)
+ extends SymbolContextApiImpl
+ with HasFlags
+ with Annotatable[Symbol]
+ with Attachable {
+
+ type AccessBoundaryType = Symbol
+ type AnnotationType = AnnotationInfo
+
+ // TODO - don't allow names to be renamed in this unstructured a fashion.
+ // Rename as little as possible. Enforce invariants on all renames.
+ type TypeOfClonedSymbol >: Null <: Symbol { type NameType = Symbol.this.NameType }
+
+ // Abstract here so TypeSymbol and TermSymbol can have a private[this] field
+ // with the proper specific type.
+ def rawname: NameType
+ def name: NameType
+ def name_=(n: Name): Unit = {
+ if (shouldLogAtThisPhase) {
+ val msg = s"Renaming $fullLocationString to $n"
+ if (isSpecialized) debuglog(msg) else log(msg)
+ }
+ }
+ def asNameType(n: Name): NameType
+
+ private[this] var _rawowner = initOwner // Syncnote: need not be protected, as only assignment happens in owner_=, which is not exposed to api
+ private[this] var _rawflags: Long = _
+
+ def rawowner = _rawowner
+ def rawflags = _rawflags
+
+ rawatt = initPos
+
+ val id = nextId() // identity displayed when -uniqid
+ //assert(id != 3390, initName)
+
+ private[this] var _validTo: Period = NoPeriod
+
+ if (traceSymbolActivity)
+ traceSymbols.recordNewSymbol(this)
+
+ def validTo = _validTo
+ def validTo_=(x: Period) { _validTo = x}
+
+ def setName(name: Name): this.type = { this.name = asNameType(name) ; this }
+
+ // Update the surrounding scopes
+ protected[this] def changeNameInOwners(name: Name) {
+ if (owner.isClass) {
+ var ifs = owner.infos
+ while (ifs != null) {
+ ifs.info.decls.rehash(this, name)
+ ifs = ifs.prev
+ }
+ }
+ }
+
+ def rawFlagString(mask: Long): String = calculateFlagString(rawflags & mask)
+ def rawFlagString: String = rawFlagString(flagMask)
+ def debugFlagString: String = flagString(AllFlags)
+
+ /** String representation of symbol's variance */
+ def varianceString: String =
+ if (variance == 1) "+"
+ else if (variance == -1) "-"
+ else ""
+
+ override def flagMask =
+ if (settings.debug.value && !isAbstractType) AllFlags
+ else if (owner.isRefinementClass) ExplicitFlags & ~OVERRIDE
+ else ExplicitFlags
+
+ // make the error message more googlable
+ def flagsExplanationString =
+ if (isGADTSkolem) " (this is a GADT skolem)"
+ else ""
+
+ def shortSymbolClass = getClass.getName.split('.').last.stripPrefix("Symbols$")
+ def symbolCreationString: String = (
+ "%s%25s | %-40s | %s".format(
+ if (settings.uniqid.value) "%06d | ".format(id) else "",
+ shortSymbolClass,
+ name.decode + " in " + owner,
+ rawFlagString
+ )
+ )
+
+// ------ creators -------------------------------------------------------------------
+
+ final def newValue(name: TermName, pos: Position = NoPosition, newFlags: Long = 0L): TermSymbol =
+ newTermSymbol(name, pos, newFlags)
+ final def newVariable(name: TermName, pos: Position = NoPosition, newFlags: Long = 0L): TermSymbol =
+ newTermSymbol(name, pos, MUTABLE | newFlags)
+ final def newValueParameter(name: TermName, pos: Position = NoPosition, newFlags: Long = 0L): TermSymbol =
+ newTermSymbol(name, pos, PARAM | newFlags)
+
+ /** Create local dummy for template (owner of local blocks) */
+ final def newLocalDummy(pos: Position): TermSymbol =
+ newTermSymbol(nme.localDummyName(this), pos) setInfo NoType
+ final def newMethod(name: TermName, pos: Position = NoPosition, newFlags: Long = 0L): MethodSymbol =
+ createMethodSymbol(name, pos, METHOD | newFlags)
+ final def newMethodSymbol(name: TermName, pos: Position = NoPosition, newFlags: Long = 0L): MethodSymbol =
+ createMethodSymbol(name, pos, METHOD | newFlags)
+ final def newLabel(name: TermName, pos: Position = NoPosition): MethodSymbol =
+ newMethod(name, pos, LABEL)
+
+ /** Propagates ConstrFlags (JAVA, specifically) from owner to constructor. */
+ final def newConstructor(pos: Position, newFlags: Long = 0L): MethodSymbol =
+ newMethod(nme.CONSTRUCTOR, pos, getFlag(ConstrFlags) | newFlags)
+
+ /** Static constructor with info set. */
+ def newStaticConstructor(pos: Position): MethodSymbol =
+ newConstructor(pos, STATIC) setInfo UnitClass.tpe
+
+ /** Instance constructor with info set. */
+ def newClassConstructor(pos: Position): MethodSymbol =
+ newConstructor(pos) setInfo MethodType(Nil, this.tpe)
+
+ def newLinkedModule(clazz: Symbol, newFlags: Long = 0L): ModuleSymbol = {
+ val m = newModuleSymbol(clazz.name.toTermName, clazz.pos, MODULE | newFlags)
+ connectModuleToClass(m, clazz.asInstanceOf[ClassSymbol])
+ }
+ final def newModule(name: TermName, pos: Position = NoPosition, newFlags0: Long = 0L): ModuleSymbol = {
+ val newFlags = newFlags0 | MODULE
+ val m = newModuleSymbol(name, pos, newFlags)
+ val clazz = newModuleClass(name.toTypeName, pos, newFlags & ModuleToClassFlags)
+ connectModuleToClass(m, clazz)
+ }
+
+ final def newPackage(name: TermName, pos: Position = NoPosition, newFlags: Long = 0L): ModuleSymbol = {
+ assert(name == nme.ROOT || isPackageClass, this)
+ newModule(name, pos, PackageFlags | newFlags)
+ }
+
+ final def newThisSym(name: TermName = nme.this_, pos: Position = NoPosition): TermSymbol =
+ newTermSymbol(name, pos, SYNTHETIC)
+
+ final def newImport(pos: Position): TermSymbol =
+ newTermSymbol(nme.IMPORT, pos)
+
+ final def newModuleSymbol(name: TermName, pos: Position = NoPosition, newFlags: Long = 0L): ModuleSymbol =
+ newTermSymbol(name, pos, newFlags).asInstanceOf[ModuleSymbol]
+
+ final def newModuleAndClassSymbol(name: Name, pos: Position, flags0: FlagSet): (ModuleSymbol, ClassSymbol) = {
+ val flags = flags0 | MODULE
+ val m = newModuleSymbol(name, pos, flags)
+ val c = newModuleClass(name.toTypeName, pos, flags & ModuleToClassFlags)
+ connectModuleToClass(m, c)
+ (m, c)
+ }
+
+ final def newPackageSymbol(name: TermName, pos: Position = NoPosition, newFlags: Long = 0L): ModuleSymbol =
+ newTermSymbol(name, pos, newFlags).asInstanceOf[ModuleSymbol]
+
+ final def newModuleClassSymbol(name: TypeName, pos: Position = NoPosition, newFlags: Long = 0L): ModuleClassSymbol =
+ newClassSymbol(name, pos, newFlags).asInstanceOf[ModuleClassSymbol]
+
+ final def newTypeSkolemSymbol(name: TypeName, origin: AnyRef, pos: Position = NoPosition, newFlags: Long = 0L): TypeSkolem =
+ createTypeSkolemSymbol(name, origin, pos, newFlags)
+
+ /** @param pre type relative to which alternatives are seen.
+ * for instance:
+ * class C[T] {
+ * def m(x: T): T
+ * def m'(): T
+ * }
+ * val v: C[Int]
+ *
+ * Then v.m has symbol TermSymbol(flags = {OVERLOADED},
+ * tpe = OverloadedType(C[Int], List(m, m')))
+ * You recover the type of m doing a
+ *
+ * m.tpe.asSeenFrom(pre, C) (generally, owner of m, which is C here).
+ *
+ * or:
+ *
+ * pre.memberType(m)
+ */
+ final def newOverloaded(pre: Type, alternatives: List[Symbol]): TermSymbol = (
+ newTermSymbol(alternatives.head.name.toTermName, alternatives.head.pos, OVERLOADED)
+ setInfo OverloadedType(pre, alternatives)
+ )
+
+ final def newErrorValue(name: TermName): TermSymbol =
+ newTermSymbol(name, pos, SYNTHETIC | IS_ERROR) setInfo ErrorType
+
+ /** Symbol of a type definition type T = ...
+ */
+ final def newAliasType(name: TypeName, pos: Position = NoPosition, newFlags: Long = 0L): AliasTypeSymbol =
+ createAliasTypeSymbol(name, pos, newFlags)
+
+ /** Symbol of an abstract type type T >: ... <: ...
+ */
+ final def newAbstractType(name: TypeName, pos: Position = NoPosition, newFlags: Long = 0L): AbstractTypeSymbol =
+ createAbstractTypeSymbol(name, pos, DEFERRED | newFlags)
+
+ /** Symbol of a type parameter
+ */
+ final def newTypeParameter(name: TypeName, pos: Position = NoPosition, newFlags: Long = 0L): TypeSymbol =
+ newAbstractType(name, pos, PARAM | newFlags)
+
+// is defined in SymbolCreations
+// final def newTypeSymbol(name: TypeName, pos: Position = NoPosition, newFlags: Long = 0L): TypeSymbol =
+// (if ((newFlags & DEFERRED) != 0) new AbstractTypeSymbol(this, pos, name)
+// else new AbstractTypeSymbol(this, pos, name)) setFlag newFlags
+
+ /** Symbol of an existential type T forSome { ... }
+ */
+ final def newExistential(name: TypeName, pos: Position = NoPosition, newFlags: Long = 0L): TypeSymbol =
+ newAbstractType(name, pos, EXISTENTIAL | newFlags)
+
+ private def freshNamer: () => TermName = {
+ var cnt = 0
+ () => { cnt += 1; nme.syntheticParamName(cnt) }
+ }
+
+ /** Synthetic value parameters when parameter symbols are not available
+ */
+ final def newSyntheticValueParamss(argtypess: List[List[Type]]): List[List[TermSymbol]] =
+ argtypess map (xs => newSyntheticValueParams(xs, freshNamer))
+
+ /** Synthetic value parameters when parameter symbols are not available.
+ * Calling this method multiple times will re-use the same parameter names.
+ */
+ final def newSyntheticValueParams(argtypes: List[Type]): List[TermSymbol] =
+ newSyntheticValueParams(argtypes, freshNamer)
+
+ final def newSyntheticValueParams(argtypes: List[Type], freshName: () => TermName): List[TermSymbol] =
+ argtypes map (tp => newSyntheticValueParam(tp, freshName()))
+
+ /** Synthetic value parameter when parameter symbol is not available.
+ * Calling this method multiple times will re-use the same parameter name.
+ */
+ final def newSyntheticValueParam(argtype: Type, name: TermName = nme.syntheticParamName(1)): TermSymbol =
+ newValueParameter(name, owner.pos.focus, SYNTHETIC) setInfo argtype
+
+ def newSyntheticTypeParam(): TypeSymbol = newSyntheticTypeParam("T0", 0L)
+ def newSyntheticTypeParam(name: String, newFlags: Long): TypeSymbol = newTypeParameter(newTypeName(name), NoPosition, newFlags) setInfo TypeBounds.empty
+ def newSyntheticTypeParams(num: Int): List[TypeSymbol] = (0 until num).toList map (n => newSyntheticTypeParam("T" + n, 0L))
+
+ /** Create a new existential type skolem with this symbol its owner,
+ * based on the given symbol and origin.
+ */
+ def newExistentialSkolem(basis: Symbol, origin: AnyRef): TypeSkolem = {
+ val skolem = newTypeSkolemSymbol(basis.name.toTypeName, origin, basis.pos, (basis.flags | EXISTENTIAL) & ~PARAM)
+ skolem setInfo (basis.info cloneInfo skolem)
+ }
+
+ // don't test directly -- use isGADTSkolem
+ // used to single out a gadt skolem symbol in deskolemizeGADT
+ // gadtskolems are created in adaptConstrPattern and removed at the end of typedCase
+ final protected[Symbols] def GADT_SKOLEM_FLAGS = CASEACCESSOR | SYNTHETIC
+
+ // flags set up to maintain TypeSkolem's invariant: origin.isInstanceOf[Symbol] == !hasFlag(EXISTENTIAL)
+ // GADT_SKOLEM_FLAGS (== CASEACCESSOR | SYNTHETIC) used to single this symbol out in deskolemizeGADT
+ // TODO: it would be better to allocate a new bit in the flag long for GADTSkolem rather than OR'ing together CASEACCESSOR | SYNTHETIC
+ def newGADTSkolem(name: TypeName, origin: Symbol, info: Type): TypeSkolem =
+ newTypeSkolemSymbol(name, origin, origin.pos, origin.flags & ~(EXISTENTIAL | PARAM) | GADT_SKOLEM_FLAGS) setInfo info
+
+ final def freshExistential(suffix: String): TypeSymbol =
+ newExistential(freshExistentialName(suffix), pos)
+
+ /** Type skolems are type parameters ''seen from the inside''
+ * Assuming a polymorphic method m[T], its type is a PolyType which has a TypeParameter
+ * with name `T` in its typeParams list. While type checking the parameters, result type and
+ * body of the method, there's a local copy of `T` which is a TypeSkolem.
+ */
+ final def newTypeSkolem: TypeSkolem =
+ owner.newTypeSkolemSymbol(name.toTypeName, this, pos, flags)
+
+ final def newClass(name: TypeName, pos: Position = NoPosition, newFlags: Long = 0L): ClassSymbol =
+ newClassSymbol(name, pos, newFlags)
+
+ /** A new class with its info set to a ClassInfoType with given scope and parents. */
+ def newClassWithInfo(name: TypeName, parents: List[Type], scope: Scope, pos: Position = NoPosition, newFlags: Long = 0L): ClassSymbol = {
+ val clazz = newClass(name, pos, newFlags)
+ clazz setInfo ClassInfoType(parents, scope, clazz)
+ }
+ final def newErrorClass(name: TypeName): ClassSymbol =
+ newClassWithInfo(name, Nil, new ErrorScope(this), pos, SYNTHETIC | IS_ERROR)
+
+ final def newModuleClass(name: TypeName, pos: Position = NoPosition, newFlags: Long = 0L): ModuleClassSymbol =
+ newModuleClassSymbol(name, pos, newFlags | MODULE)
+
+ final def newAnonymousFunctionClass(pos: Position = NoPosition, newFlags: Long = 0L): ClassSymbol =
+ newClassSymbol(tpnme.ANON_FUN_NAME, pos, FINAL | SYNTHETIC | newFlags)
+
+ final def newAnonymousFunctionValue(pos: Position, newFlags: Long = 0L): TermSymbol =
+ newTermSymbol(nme.ANON_FUN_NAME, pos, SYNTHETIC | newFlags) setInfo NoType
+
+ def newImplClass(name: TypeName, pos: Position = NoPosition, newFlags: Long = 0L): ClassSymbol = {
+ newClassSymbol(name, pos, newFlags | IMPLCLASS)
+ }
+
+ /** Refinement types P { val x: String; type T <: Number }
+ * also have symbols, they are refinementClasses
+ */
+ final def newRefinementClass(pos: Position): RefinementClassSymbol =
+ createRefinementClassSymbol(pos, 0L)
+
+ /** Create a new getter for current symbol (which must be a field)
+ */
+ final def newGetter: MethodSymbol = (
+ owner.newMethod(nme.getterName(name.toTermName), NoPosition, getterFlags(flags))
+ setPrivateWithin privateWithin
+ setInfo MethodType(Nil, tpe)
+ )
+
+ final def newErrorSymbol(name: Name): Symbol = name match {
+ case x: TypeName => newErrorClass(x)
+ case x: TermName => newErrorValue(x)
+ }
+
+ /** Creates a placeholder symbol for when a name is encountered during
+ * unpickling for which there is no corresponding classfile. This defers
+ * failure to the point when that name is used for something, which is
+ * often to the point of never.
+ */
+ def newStubSymbol(name: Name, missingMessage: String): Symbol = name match {
+ case n: TypeName => new StubClassSymbol(this, n, missingMessage)
+ case _ => new StubTermSymbol(this, name.toTermName, missingMessage)
+ }
+
+ @deprecated("Use the other signature", "2.10.0")
+ def newClass(pos: Position, name: TypeName): Symbol = newClass(name, pos)
+ @deprecated("Use the other signature", "2.10.0")
+ def newModuleClass(pos: Position, name: TypeName): Symbol = newModuleClass(name, pos)
+ @deprecated("Use the other signature", "2.10.0")
+ def newLabel(pos: Position, name: TermName): MethodSymbol = newLabel(name, pos)
+ @deprecated("Use the other signature", "2.10.0")
+ def newValue(pos: Position, name: TermName): TermSymbol = newTermSymbol(name, pos)
+ @deprecated("Use the other signature", "2.10.0")
+ def newAliasType(pos: Position, name: TypeName): Symbol = newAliasType(name, pos)
+ @deprecated("Use the other signature", "2.10.0")
+ def newAbstractType(pos: Position, name: TypeName): Symbol = newAbstractType(name, pos)
+ @deprecated("Use the other signature", "2.10.0")
+ def newExistential(pos: Position, name: TypeName): Symbol = newExistential(name, pos)
+ @deprecated("Use the other signature", "2.10.0")
+ def newMethod(pos: Position, name: TermName): MethodSymbol = newMethod(name, pos)
+
+// ----- locking and unlocking ------------------------------------------------------
+
+ // True if the symbol is unlocked.
+ // True if the symbol is locked but still below the allowed recursion depth.
+ // False otherwise
+ private[scala] def lockOK: Boolean = {
+ ((_rawflags & LOCKED) == 0L) ||
+ ((settings.Yrecursion.value != 0) &&
+ (recursionTable get this match {
+ case Some(n) => (n <= settings.Yrecursion.value)
+ case None => true }))
+ }
+
+ // Lock a symbol, using the handler if the recursion depth becomes too great.
+ private[scala] def lock(handler: => Unit): Boolean = {
+ if ((_rawflags & LOCKED) != 0L) {
+ if (settings.Yrecursion.value != 0) {
+ recursionTable get this match {
+ case Some(n) =>
+ if (n > settings.Yrecursion.value) {
+ handler
+ false
+ } else {
+ recursionTable += (this -> (n + 1))
+ true
+ }
+ case None =>
+ recursionTable += (this -> 1)
+ true
+ }
+ } else { handler; false }
+ } else {
+ _rawflags |= LOCKED
+ true
+// activeLocks += 1
+// lockedSyms += this
+ }
+ }
+
+ // Unlock a symbol
+ private[scala] def unlock() = {
+ if ((_rawflags & LOCKED) != 0L) {
+// activeLocks -= 1
+// lockedSyms -= this
+ _rawflags &= ~LOCKED
+ if (settings.Yrecursion.value != 0)
+ recursionTable -= this
+ }
+ }
+
+// ----- tests ----------------------------------------------------------------------
+
+ def isAliasType = false
+ def isAbstractType = false
+ def isSkolem = false
+
+ /** A Type, but not a Class. */
+ def isNonClassType = false
+
+ /** The bottom classes are Nothing and Null, found in Definitions. */
+ def isBottomClass = false
+
+ /** These are all tests for varieties of ClassSymbol, which has these subclasses:
+ * - ModuleClassSymbol
+ * - RefinementClassSymbol
+ * - PackageClassSymbol (extends ModuleClassSymbol)
+ */
+ def isAbstractClass = false
+ def isAnonOrRefinementClass = false
+ def isAnonymousClass = false
+ def isCaseClass = false
+ def isConcreteClass = false
+ def isImplClass = false // the implementation class of a trait
+ def isJavaInterface = false
+ def isNumericValueClass = false
+ def isPrimitiveValueClass = false
+ def isRefinementClass = false
+ override def isTrait = false
+
+ /** Qualities of Types, always false for TermSymbols.
+ */
+ def isContravariant = false
+ def isCovariant = false
+ def isExistentialQuantified = false
+ def isExistentialSkolem = false
+ def isExistentiallyBound = false
+ def isGADTSkolem = false
+ def isTypeParameter = false
+ def isTypeParameterOrSkolem = false
+ def isTypeSkolem = false
+ def isTypeMacro = false
+ def isInvariant = !isCovariant && !isContravariant
+
+ /** Qualities of Terms, always false for TypeSymbols.
+ */
+ def isAccessor = false
+ def isBridge = false
+ def isCapturedVariable = false
+ def isClassConstructor = false
+ def isConstructor = false
+ def isEarlyInitialized = false
+ def isGetter = false
+ def isLocalDummy = false
+ def isMixinConstructor = false
+ def isOverloaded = false
+ def isSetter = false
+ def isSetterParameter = false
+ def isValue = false
+ def isValueParameter = false
+ def isVariable = false
+ override def hasDefault = false
+ def isTermMacro = false
+
+ /** Qualities of MethodSymbols, always false for TypeSymbols
+ * and other TermSymbols.
+ */
+ def isCaseAccessorMethod = false
+ def isLiftedMethod = false
+ def isSourceMethod = false
+ def isVarargsMethod = false
+ override def isLabel = false
+
+ /** Package/package object tests */
+ def isPackageClass = false
+ def isPackageObject = false
+ def isPackageObjectClass = false
+ def isPackageObjectOrClass = isPackageObject || isPackageObjectClass
+ def isModuleOrModuleClass = isModule || isModuleClass
+
+ /** Overridden in custom objects in Definitions */
+ def isRoot = false
+ def isRootPackage = false
+ def isRootSymbol = false // RootPackage and RootClass. TODO: also NoSymbol.
+ def isEmptyPackage = false
+ def isEmptyPackageClass = false
+
+ /** Is this symbol an effective root for fullname string?
+ */
+ def isEffectiveRoot = false
+
+ final def isLazyAccessor = isLazy && lazyAccessor != NoSymbol
+ final def isOverridableMember = !(isClass || isEffectivelyFinal) && (this ne NoSymbol) && owner.isClass
+
+ /** Does this symbol denote a wrapper created by the repl? */
+ final def isInterpreterWrapper = (
+ (this hasFlag MODULE)
+ && owner.isPackageClass
+ && nme.isReplWrapperName(name)
+ )
+ final def getFlag(mask: Long): Long = {
+ if (!isCompilerUniverse && needsInitialize(isFlagRelated = true, mask = mask)) initialize
+ flags & mask
+ }
+ /** Does symbol have ANY flag in `mask` set? */
+ final def hasFlag(mask: Long): Boolean = {
+ if (!isCompilerUniverse && needsInitialize(isFlagRelated = true, mask = mask)) initialize
+ (flags & mask) != 0
+ }
+ /** Does symbol have ALL the flags in `mask` set? */
+ final def hasAllFlags(mask: Long): Boolean = {
+ if (!isCompilerUniverse && needsInitialize(isFlagRelated = true, mask = mask)) initialize
+ (flags & mask) == mask
+ }
+
+ def setFlag(mask: Long): this.type = { _rawflags |= mask ; this }
+ def resetFlag(mask: Long): this.type = { _rawflags &= ~mask ; this }
+ def resetFlags() { rawflags &= TopLevelCreationFlags }
+
+ /** Default implementation calls the generic string function, which
+ * will print overloaded flags as <flag1/flag2/flag3>. Subclasses
+ * of Symbol refine.
+ */
+ override def resolveOverloadedFlag(flag: Long): String = Flags.flagToString(flag)
+
+ /** Set the symbol's flags to the given value, asserting
+ * that the previous value was 0.
+ */
+ def initFlags(mask: Long): this.type = {
+ assert(rawflags == 0L, symbolCreationString)
+ _rawflags = mask
+ this
+ }
+
+ final def flags: Long = {
+ if (Statistics.hotEnabled) Statistics.incCounter(flagsCount)
+ val fs = _rawflags & phase.flagMask
+ (fs | ((fs & LateFlags) >>> LateShift)) & ~(fs >>> AntiShift)
+ }
+ def flags_=(fs: Long) = _rawflags = fs
+ def rawflags_=(x: Long) { _rawflags = x }
+
+ final def hasGetter = isTerm && nme.isLocalName(name)
+
+ final def isInitializedToDefault = !isType && hasAllFlags(DEFAULTINIT | ACCESSOR)
+ final def isStaticModule = isModule && isStatic && !isMethod
+ final def isThisSym = isTerm && owner.thisSym == this
+ final def isError = hasFlag(IS_ERROR)
+ final def isErroneous = isError || isInitialized && tpe.isErroneous
+
+ def isHigherOrderTypeParameter = owner.isTypeParameterOrSkolem
+
+ // class C extends D( { class E { ... } ... } ). Here, E is a class local to a constructor
+ def isClassLocalToConstructor = false
+
+ final def isDerivedValueClass =
+ isClass && !hasFlag(PACKAGE | TRAIT) &&
+ info.firstParent.typeSymbol == AnyValClass && !isPrimitiveValueClass
+
+ final def isMethodWithExtension =
+ isMethod && owner.isDerivedValueClass && !isParamAccessor && !isConstructor && !hasFlag(SUPERACCESSOR) && !isTermMacro
+
+ final def isAnonymousFunction = isSynthetic && (name containsName tpnme.ANON_FUN_NAME)
+ final def isDefinedInPackage = effectiveOwner.isPackageClass
+ final def needsFlatClasses = phase.flatClasses && rawowner != NoSymbol && !rawowner.isPackageClass
+
+ /** change name by appending $$<fully-qualified-name-of-class `base`>
+ * Do the same for any accessed symbols or setters/getters.
+ * Implementation in TermSymbol.
+ */
+ def expandName(base: Symbol) { }
+
+ // In java.lang, Predef, or scala package/package object
+ def isInDefaultNamespace = UnqualifiedOwners(effectiveOwner)
+
+ /** The owner, skipping package objects.
+ */
+ def effectiveOwner = owner.skipPackageObject
+
+ /** If this is a package object or its implementing class, its owner: otherwise this.
+ */
+ def skipPackageObject: Symbol = this
+
+ /** If this is a constructor, its owner: otherwise this.
+ */
+ final def skipConstructor: Symbol = if (isConstructor) owner else this
+
+ /** Conditions where we omit the prefix when printing a symbol, to avoid
+ * unpleasantries like Predef.String, $iw.$iw.Foo and <empty>.Bippy.
+ */
+ final def isOmittablePrefix = /*!settings.debug.value &&*/ (
+ UnqualifiedOwners(skipPackageObject)
+ || isEmptyPrefix
+ )
+ def isEmptyPrefix = (
+ isEffectiveRoot // has no prefix for real, <empty> or <root>
+ || isAnonOrRefinementClass // has uninteresting <anon> or <refinement> prefix
+ || nme.isReplWrapperName(name) // has ugly $iw. prefix (doesn't call isInterpreterWrapper due to nesting)
+ )
+ def isFBounded = info match {
+ case TypeBounds(_, _) => info.baseTypeSeq exists (_ contains this)
+ case _ => false
+ }
+
+ /** Is symbol a monomorphic type?
+ * assumption: if a type starts out as monomorphic, it will not acquire
+ * type parameters in later phases.
+ */
+ final def isMonomorphicType =
+ isType && {
+ val info = originalInfo
+ ( (info eq null)
+ || (info.isComplete && !info.isHigherKinded)
+ )
+ }
+
+ def isStrictFP = hasAnnotation(ScalaStrictFPAttr) || (enclClass hasAnnotation ScalaStrictFPAttr)
+ def isSerializable = (
+ info.baseClasses.exists(p => p == SerializableClass || p == JavaSerializableClass)
+ || hasAnnotation(SerializableAttr) // last part can be removed, @serializable annotation is deprecated
+ )
+ def hasBridgeAnnotation = hasAnnotation(BridgeClass)
+ def isDeprecated = hasAnnotation(DeprecatedAttr)
+ def deprecationMessage = getAnnotation(DeprecatedAttr) flatMap (_ stringArg 0)
+ def deprecationVersion = getAnnotation(DeprecatedAttr) flatMap (_ stringArg 1)
+ def deprecatedParamName = getAnnotation(DeprecatedNameAttr) flatMap (_ symbolArg 0)
+ def hasDeprecatedInheritanceAnnotation
+ = hasAnnotation(DeprecatedInheritanceAttr)
+ def deprecatedInheritanceMessage
+ = getAnnotation(DeprecatedInheritanceAttr) flatMap (_ stringArg 0)
+ def deprecatedInheritanceVersion
+ = getAnnotation(DeprecatedInheritanceAttr) flatMap (_ stringArg 1)
+ def hasDeprecatedOverridingAnnotation
+ = hasAnnotation(DeprecatedOverridingAttr)
+ def deprecatedOverridingMessage
+ = getAnnotation(DeprecatedOverridingAttr) flatMap (_ stringArg 0)
+ def deprecatedOverridingVersion
+ = getAnnotation(DeprecatedOverridingAttr) flatMap (_ stringArg 1)
+
+ // !!! when annotation arguments are not literal strings, but any sort of
+ // assembly of strings, there is a fair chance they will turn up here not as
+ // Literal(const) but some arbitrary AST. However nothing in the compiler
+ // prevents someone from writing a @migration annotation with a calculated
+ // string. So this needs attention. For now the fact that migration is
+ // private[scala] ought to provide enough protection.
+ def hasMigrationAnnotation = hasAnnotation(MigrationAnnotationClass)
+ def migrationMessage = getAnnotation(MigrationAnnotationClass) flatMap { _.stringArg(0) }
+ def migrationVersion = getAnnotation(MigrationAnnotationClass) flatMap { _.stringArg(1) }
+ def elisionLevel = getAnnotation(ElidableMethodClass) flatMap { _.intArg(0) }
+ def implicitNotFoundMsg = getAnnotation(ImplicitNotFoundClass) flatMap { _.stringArg(0) }
+
+ def isCompileTimeOnly = hasAnnotation(CompileTimeOnlyAttr)
+ def compileTimeOnlyMessage = getAnnotation(CompileTimeOnlyAttr) flatMap (_ stringArg 0)
+
+ /** Is this symbol an accessor method for outer? */
+ final def isOuterAccessor = {
+ hasFlag(STABLE | ARTIFACT) &&
+ originalName == nme.OUTER
+ }
+
+ /** Is this symbol an accessor method for outer? */
+ final def isOuterField = {
+ hasFlag(ARTIFACT) &&
+ originalName == nme.OUTER_LOCAL
+ }
+
+ /** Does this symbol denote a stable value? */
+ def isStable = false
+
+ /** Does this symbol denote the primary constructor of its enclosing class? */
+ final def isPrimaryConstructor =
+ isConstructor && owner.primaryConstructor == this
+
+ /** Does this symbol denote an auxiliary constructor of its enclosing class? */
+ final def isAuxiliaryConstructor =
+ isConstructor && !isPrimaryConstructor
+
+ /** Is this symbol a synthetic apply or unapply method in a companion object of a case class? */
+ final def isCaseApplyOrUnapply =
+ isMethod && isCase && isSynthetic
+
+ /** Is this symbol a trait which needs an implementation class? */
+ final def needsImplClass = (
+ isTrait
+ && (!isInterface || hasFlag(lateINTERFACE))
+ && !isImplClass
+ )
+
+ /** Is this a symbol which exists only in the implementation class, not in its trait? */
+ final def isImplOnly = isPrivate || (
+ (owner.isTrait || owner.isImplClass) && (
+ hasAllFlags(LIFTED | MODULE | METHOD)
+ || isConstructor
+ || hasFlag(notPRIVATE | LIFTED) && !hasFlag(ACCESSOR | SUPERACCESSOR | MODULE)
+ )
+ )
+ final def isModuleVar = hasFlag(MODULEVAR)
+
+ /** Is this symbol static (i.e. with no outer instance)?
+ * Q: When exactly is a sym marked as STATIC?
+ * A: If it's a member of a toplevel object, or of an object contained in a toplevel object, or any number of levels deep.
+ * http://groups.google.com/group/scala-internals/browse_thread/thread/d385bcd60b08faf6
+ */
+ def isStatic = (this hasFlag STATIC) || owner.isStaticOwner
+
+ /** Is this symbol a static constructor? */
+ final def isStaticConstructor: Boolean =
+ isStaticMember && isClassConstructor
+
+ /** Is this symbol a static member of its class? (i.e. needs to be implemented as a Java static?) */
+ final def isStaticMember: Boolean =
+ hasFlag(STATIC) || owner.isImplClass
+
+ /** Does this symbol denote a class that defines static symbols? */
+ final def isStaticOwner: Boolean =
+ isPackageClass || isModuleClass && isStatic
+
+ def isTopLevelModule = hasFlag(MODULE) && owner.isPackageClass
+
+ /** Is this symbol effectively final? I.e, it cannot be overridden */
+ final def isEffectivelyFinal: Boolean = (
+ (this hasFlag FINAL | PACKAGE)
+ || isModuleOrModuleClass && (owner.isPackageClass || !settings.overrideObjects.value)
+ || isTerm && (
+ isPrivate
+ || isLocal
+ || owner.isClass && owner.isEffectivelyFinal
+ )
+ )
+
+ /** Is this symbol locally defined? I.e. not accessed from outside `this` instance */
+ final def isLocal: Boolean = owner.isTerm
+
+ /** Is this symbol a constant? */
+ final def isConstant: Boolean = isStable && isConstantType(tpe.resultType)
+
+ /** Is this class nested in another class or module (not a package)? */
+ def isNestedClass = false
+
+ /** Is this class locally defined?
+ * A class is local, if
+ * - it is anonymous, or
+ * - its owner is a value
+ * - it is defined within a local class
+ */
+ def isLocalClass = false
+
+ def isStableClass = false
+
+/* code for fixing nested objects
+ override final def isModuleClass: Boolean =
+ super.isModuleClass && !isExpandedModuleClass
+*/
+ /** Is this class or type defined as a structural refinement type?
+ */
+ final def isStructuralRefinement: Boolean =
+ (isClass || isType || isModule) && info.normalize/*.underlying*/.isStructuralRefinement
+
+ /** Is this a term symbol only defined in a refinement (so that it needs
+ * to be accessed by reflection)?
+ */
+ def isOnlyRefinementMember: Boolean =
+ isTerm && // type members are not affected
+ owner.isRefinementClass && // owner must be a refinement class
+ (owner.info decl name) == this && // symbol must be explicitly declared in the refinement (not synthesized from glb)
+ allOverriddenSymbols.isEmpty && // symbol must not override a symbol in a base class
+ !isConstant // symbol must not be a constant. Question: Can we exclude @inline methods as well?
+
+ final def isStructuralRefinementMember = owner.isStructuralRefinement && isPossibleInRefinement && isPublic
+ final def isPossibleInRefinement = !isConstructor && !isOverridingSymbol
+
+ /** Is this symbol a member of class `clazz`? */
+ def isMemberOf(clazz: Symbol) =
+ clazz.info.member(name).alternatives contains this
+
+ /** A a member of class `base` is incomplete if
+ * (1) it is declared deferred or
+ * (2) it is abstract override and its super symbol in `base` is
+ * nonexistent or incomplete.
+ *
+ * @param base ...
+ * @return ...
+ */
+ final def isIncompleteIn(base: Symbol): Boolean =
+ this.isDeferred ||
+ (this hasFlag ABSOVERRIDE) && {
+ val supersym = superSymbol(base)
+ supersym == NoSymbol || supersym.isIncompleteIn(base)
+ }
+
+ // Does not always work if the rawInfo is a SourcefileLoader, see comment
+ // in "def coreClassesFirst" in Global.
+ def exists = !owner.isPackageClass || { rawInfo.load(this); rawInfo != NoType }
+
+ final def isInitialized: Boolean =
+ validTo != NoPeriod
+
+ /** Can this symbol be loaded by a reflective mirror?
+ *
+ * Scalac relies on `ScalaSignature' annotation to retain symbols across compilation runs.
+ * Such annotations (also called "pickles") are applied on top-level classes and include information
+ * about all symbols reachable from the annotee. However, local symbols (e.g. classes or definitions local to a block)
+ * are typically unreachable and information about them gets lost.
+ *
+ * This method is useful for macro writers who wish to save certain ASTs to be used at runtime.
+ * With `isLocatable' it's possible to check whether a tree can be retained as is, or it needs special treatment.
+ */
+ final def isLocatable: Boolean = {
+ if (this == NoSymbol) return false
+ if (isRoot || isRootPackage) return true
+
+ if (!owner.isLocatable) return false
+ if (owner.isTerm) return false
+ if (isLocalDummy) return false
+
+ if (isAliasType) return true
+ if (isType && isNonClassType) return false
+ if (isRefinementClass) return false
+ return true
+ }
+
+ /** The variance of this symbol as an integer */
+ final def variance: Int =
+ if (isCovariant) 1
+ else if (isContravariant) -1
+ else 0
+
+ /** The sequence number of this parameter symbol among all type
+ * and value parameters of symbol's owner. -1 if symbol does not
+ * appear among the parameters of its owner.
+ */
+ def paramPos: Int = {
+ def searchIn(tpe: Type, base: Int): Int = {
+ def searchList(params: List[Symbol], fallback: Type): Int = {
+ val idx = params indexOf this
+ if (idx >= 0) idx + base
+ else searchIn(fallback, base + params.length)
+ }
+ tpe match {
+ case PolyType(tparams, res) => searchList(tparams, res)
+ case MethodType(params, res) => searchList(params, res)
+ case _ => -1
+ }
+ }
+ searchIn(owner.info, 0)
+ }
+
+// ------ owner attribute --------------------------------------------------------------
+
+ def owner: Symbol = {
+ if (Statistics.hotEnabled) Statistics.incCounter(ownerCount)
+ rawowner
+ }
+
+ // TODO - don't allow the owner to be changed without checking invariants, at least
+ // when under some flag. Define per-phase invariants for owner/owned relationships,
+ // e.g. after flatten all classes are owned by package classes, there are lots and
+ // lots of these to be declared (or more realistically, discovered.)
+ def owner_=(owner: Symbol) {
+ // don't keep the original owner in presentation compiler runs
+ // (the map will grow indefinitely, and the only use case is the
+ // backend).
+ if (!forInteractive) {
+ if (originalOwner contains this) ()
+ else originalOwner(this) = rawowner
+ }
+ assert(isCompilerUniverse, "owner_= is not thread-safe; cannot be run in reflexive code")
+ if (traceSymbolActivity)
+ traceSymbols.recordNewSymbolOwner(this, owner)
+ _rawowner = owner
+ }
+
+ def ownerChain: List[Symbol] = this :: owner.ownerChain
+ def originalOwnerChain: List[Symbol] = this :: originalOwner.getOrElse(this, rawowner).originalOwnerChain
+
+ // Non-classes skip self and return rest of owner chain; overridden in ClassSymbol.
+ def enclClassChain: List[Symbol] = owner.enclClassChain
+
+ def ownersIterator: Iterator[Symbol] = new Iterator[Symbol] {
+ private var current = Symbol.this
+ def hasNext = current ne NoSymbol
+ def next = { val r = current; current = current.owner; r }
+ }
+
+ /** Same as `ownerChain contains sym` but more efficient, and
+ * with a twist for refinement classes (see RefinementClassSymbol.)
+ */
+ def hasTransOwner(sym: Symbol): Boolean = {
+ var o = this
+ while ((o ne sym) && (o ne NoSymbol)) o = o.owner
+ (o eq sym)
+ }
+
+// ------ name attribute --------------------------------------------------------------
+
+ /** If this symbol has an expanded name, its original name, otherwise its name itself.
+ * @see expandName
+ */
+ def originalName: Name = nme.originalName(nme.dropLocalSuffix(name))
+
+ /** The name of the symbol before decoding, e.g. `\$eq\$eq` instead of `==`.
+ */
+ def encodedName: String = name.toString
+
+ /** The decoded name of the symbol, e.g. `==` instead of `\$eq\$eq`.
+ */
+ def decodedName: String = nme.dropLocalSuffix(name).decode
+
+ private def addModuleSuffix(n: Name): Name =
+ if (needsModuleSuffix) n append nme.MODULE_SUFFIX_STRING else n
+
+ def moduleSuffix: String = (
+ if (needsModuleSuffix) nme.MODULE_SUFFIX_STRING
+ else ""
+ )
+ /** Whether this symbol needs nme.MODULE_SUFFIX_STRING (aka $) appended on the java platform.
+ */
+ def needsModuleSuffix = (
+ hasModuleFlag
+ && !isMethod
+ && !isImplClass
+ && !isJavaDefined
+ )
+ /** These should be moved somewhere like JavaPlatform.
+ */
+ def javaSimpleName: Name = addModuleSuffix(nme.dropLocalSuffix(simpleName))
+ def javaBinaryName: Name = addModuleSuffix(fullNameInternal('/'))
+ def javaClassName: String = addModuleSuffix(fullNameInternal('.')).toString
+
+ /** The encoded full path name of this symbol, where outer names and inner names
+ * are separated by `separator` characters.
+ * Never translates expansions of operators back to operator symbol.
+ * Never adds id.
+ * Drops package objects.
+ */
+ final def fullName(separator: Char): String = fullNameAsName(separator).toString
+
+ /** Doesn't drop package objects, for those situations (e.g. classloading)
+ * where the true path is needed.
+ */
+ private def fullNameInternal(separator: Char): Name = (
+ if (isRoot || isRootPackage || this == NoSymbol) name
+ else if (owner.isEffectiveRoot) name
+ else ((effectiveOwner.enclClass.fullNameAsName(separator) append separator): Name) append name
+ )
+
+ def fullNameAsName(separator: Char): Name = nme.dropLocalSuffix(fullNameInternal(separator))
+
+ /** The encoded full path name of this symbol, where outer names and inner names
+ * are separated by periods.
+ */
+ final def fullName: String = fullName('.')
+
+ /**
+ * Symbol creation implementations.
+ */
+
+ protected def createAbstractTypeSymbol(name: TypeName, pos: Position, newFlags: Long): AbstractTypeSymbol =
+ new AbstractTypeSymbol(this, pos, name) initFlags newFlags
+
+ protected def createAliasTypeSymbol(name: TypeName, pos: Position, newFlags: Long): AliasTypeSymbol =
+ new AliasTypeSymbol(this, pos, name) initFlags newFlags
+
+ protected def createTypeSkolemSymbol(name: TypeName, origin: AnyRef, pos: Position, newFlags: Long): TypeSkolem =
+ new TypeSkolem(this, pos, name, origin) initFlags newFlags
+
+ protected def createClassSymbol(name: TypeName, pos: Position, newFlags: Long): ClassSymbol =
+ new ClassSymbol(this, pos, name) initFlags newFlags
+
+ protected def createModuleClassSymbol(name: TypeName, pos: Position, newFlags: Long): ModuleClassSymbol =
+ new ModuleClassSymbol(this, pos, name) initFlags newFlags
+
+ protected def createPackageClassSymbol(name: TypeName, pos: Position, newFlags: Long): PackageClassSymbol =
+ new PackageClassSymbol(this, pos, name) initFlags newFlags
+
+ protected def createRefinementClassSymbol(pos: Position, newFlags: Long): RefinementClassSymbol =
+ new RefinementClassSymbol(this, pos) initFlags newFlags
+
+ protected def createPackageObjectClassSymbol(pos: Position, newFlags: Long): PackageObjectClassSymbol =
+ new PackageObjectClassSymbol(this, pos) initFlags newFlags
+
+ protected def createImplClassSymbol(name: TypeName, pos: Position, newFlags: Long): ClassSymbol =
+ new ClassSymbol(this, pos, name) with ImplClassSymbol initFlags newFlags
+
+ protected def createTermSymbol(name: TermName, pos: Position, newFlags: Long): TermSymbol =
+ new TermSymbol(this, pos, name) initFlags newFlags
+
+ protected def createMethodSymbol(name: TermName, pos: Position, newFlags: Long): MethodSymbol =
+ new MethodSymbol(this, pos, name) initFlags newFlags
+
+ protected def createModuleSymbol(name: TermName, pos: Position, newFlags: Long): ModuleSymbol =
+ new ModuleSymbol(this, pos, name) initFlags newFlags
+
+ protected def createPackageSymbol(name: TermName, pos: Position, newFlags: Long): ModuleSymbol =
+ new ModuleSymbol(this, pos, name) initFlags newFlags
+
+ protected def createValueParameterSymbol(name: TermName, pos: Position, newFlags: Long): TermSymbol =
+ new TermSymbol(this, pos, name) initFlags newFlags
+
+ protected def createValueMemberSymbol(name: TermName, pos: Position, newFlags: Long): TermSymbol =
+ new TermSymbol(this, pos, name) initFlags newFlags
+
+ final def newTermSymbol(name: TermName, pos: Position = NoPosition, newFlags: Long = 0L): TermSymbol = {
+ if ((newFlags & METHOD) != 0)
+ createMethodSymbol(name, pos, newFlags)
+ else if ((newFlags & PACKAGE) != 0)
+ createPackageSymbol(name, pos, newFlags | PackageFlags)
+ else if ((newFlags & MODULE) != 0)
+ createModuleSymbol(name, pos, newFlags)
+ else if ((newFlags & PARAM) != 0)
+ createValueParameterSymbol(name, pos, newFlags)
+ else
+ createValueMemberSymbol(name, pos, newFlags)
+ }
+
+ final def newClassSymbol(name: TypeName, pos: Position = NoPosition, newFlags: Long = 0L): ClassSymbol = {
+ if (name == tpnme.REFINE_CLASS_NAME)
+ createRefinementClassSymbol(pos, newFlags)
+ else if ((newFlags & PACKAGE) != 0)
+ createPackageClassSymbol(name, pos, newFlags | PackageFlags)
+ else if (name == tpnme.PACKAGE)
+ createPackageObjectClassSymbol(pos, newFlags)
+ else if ((newFlags & MODULE) != 0)
+ createModuleClassSymbol(name, pos, newFlags)
+ else if ((newFlags & IMPLCLASS) != 0)
+ createImplClassSymbol(name, pos, newFlags)
+ else
+ createClassSymbol(name, pos, newFlags)
+ }
+
+ final def newNonClassSymbol(name: TypeName, pos: Position = NoPosition, newFlags: Long = 0L): TypeSymbol = {
+ if ((newFlags & DEFERRED) != 0)
+ createAbstractTypeSymbol(name, pos, newFlags)
+ else
+ createAliasTypeSymbol(name, pos, newFlags)
+ }
+
+ def newTypeSymbol(name: TypeName, pos: Position = NoPosition, newFlags: Long = 0L): TypeSymbol =
+ newNonClassSymbol(name, pos, newFlags)
+
+ /** The class or term up to which this symbol is accessible,
+ * or RootClass if it is public. As java protected statics are
+ * otherwise completely inaccessible in scala, they are treated
+ * as public.
+ */
+ def accessBoundary(base: Symbol): Symbol = {
+ if (hasFlag(PRIVATE) || isLocal) owner
+ else if (hasAllFlags(PROTECTED | STATIC | JAVA)) enclosingRootClass
+ else if (hasAccessBoundary && !phase.erasedTypes) privateWithin
+ else if (hasFlag(PROTECTED)) base
+ else enclosingRootClass
+ }
+
+ def isLessAccessibleThan(other: Symbol): Boolean = {
+ val tb = this.accessBoundary(owner)
+ val ob1 = other.accessBoundary(owner)
+ val ob2 = ob1.linkedClassOfClass
+ var o = tb
+ while (o != NoSymbol && o != ob1 && o != ob2) {
+ o = o.owner
+ }
+ o != NoSymbol && o != tb
+ }
+
+ /** See comment in HasFlags for how privateWithin combines with flags.
+ */
+ private[this] var _privateWithin: Symbol = _
+ def privateWithin = {
+ if (!isCompilerUniverse && needsInitialize(isFlagRelated = false, mask = 0)) initialize
+ _privateWithin
+ }
+ def privateWithin_=(sym: Symbol) { _privateWithin = sym }
+ def setPrivateWithin(sym: Symbol): this.type = { privateWithin_=(sym) ; this }
+
+ /** Does symbol have a private or protected qualifier set? */
+ final def hasAccessBoundary = (privateWithin != null) && (privateWithin != NoSymbol)
+
+// ------ info and type -------------------------------------------------------------------
+
+ private[Symbols] var infos: TypeHistory = null
+ def originalInfo = {
+ if (infos eq null) null
+ else {
+ var is = infos
+ while (is.prev ne null) { is = is.prev }
+ is.info
+ }
+ }
+
+ /** Get type. The type of a symbol is:
+ * for a type symbol, the type corresponding to the symbol itself,
+ * @M you should use tpeHK for a type symbol with type parameters if
+ * the kind of the type need not be *, as tpe introduces dummy arguments
+ * to generate a type of kind *
+ * for a term symbol, its usual type.
+ * See the tpe/tpeHK overrides in TypeSymbol for more.
+ *
+ * For type symbols, `tpe` is different than `info`. `tpe` returns a typeRef
+ * to the type symbol, `info` returns the type information of the type symbol,
+ * e.g. a ClassInfoType for classes or a TypeBounds for abstract types.
+ */
+ def tpe: Type = info
+ def tpeHK: Type = tpe
+
+ /** Get type info associated with symbol at current phase, after
+ * ensuring that symbol is initialized (i.e. type is completed).
+ */
+ def info: Type = try {
+ var cnt = 0
+ while (validTo == NoPeriod) {
+ //if (settings.debug.value) System.out.println("completing " + this);//DEBUG
+ assert(infos ne null, this.name)
+ assert(infos.prev eq null, this.name)
+ val tp = infos.info
+ //if (settings.debug.value) System.out.println("completing " + this.rawname + tp.getClass());//debug
+
+ if ((_rawflags & LOCKED) != 0L) { // rolled out once for performance
+ lock {
+ setInfo(ErrorType)
+ throw CyclicReference(this, tp)
+ }
+ } else {
+ _rawflags |= LOCKED
+// activeLocks += 1
+ // lockedSyms += this
+ }
+ val current = phase
+ try {
+ assertCorrectThread()
+ phase = phaseOf(infos.validFrom)
+ tp.complete(this)
+ } finally {
+ unlock()
+ phase = current
+ }
+ cnt += 1
+ // allow for two completions:
+ // one: sourceCompleter to LazyType, two: LazyType to completed type
+ if (cnt == 3) abort("no progress in completing " + this + ":" + tp)
+ }
+ rawInfo
+ }
+ catch {
+ case ex: CyclicReference =>
+ debugwarn("... hit cycle trying to complete " + this.fullLocationString)
+ throw ex
+ }
+
+ def info_=(info: Type) {
+ assert(info ne null)
+ infos = TypeHistory(currentPeriod, info, null)
+ unlock()
+ _validTo = if (info.isComplete) currentPeriod else NoPeriod
+ }
+
+ /** Set initial info. */
+ def setInfo(info: Type): this.type = { info_=(info); this }
+ /** Modifies this symbol's info in place. */
+ def modifyInfo(f: Type => Type): this.type = setInfo(f(info))
+ /** Substitute second list of symbols for first in current info. */
+ def substInfo(syms0: List[Symbol], syms1: List[Symbol]): this.type =
+ if (syms0.isEmpty) this
+ else modifyInfo(_.substSym(syms0, syms1))
+
+ def setInfoOwnerAdjusted(info: Type): this.type = setInfo(info atOwner this)
+
+ /** Set the info and enter this symbol into the owner's scope. */
+ def setInfoAndEnter(info: Type): this.type = {
+ setInfo(info)
+ owner.info.decls enter this
+ this
+ }
+
+ /** Set new info valid from start of this phase. */
+ def updateInfo(info: Type): Symbol = {
+ val pid = phaseId(infos.validFrom)
+ assert(pid <= phase.id, (pid, phase.id))
+ if (pid == phase.id) infos = infos.prev
+ infos = TypeHistory(currentPeriod, info, infos)
+ _validTo = if (info.isComplete) currentPeriod else NoPeriod
+ this
+ }
+
+ def hasRawInfo: Boolean = infos ne null
+ def hasCompleteInfo = hasRawInfo && rawInfo.isComplete
+
+ // does not run adaptToNewRun, which is prone to trigger cycles (SI-8029)
+ // TODO: give this a better name if you understand the intent of the caller.
+ // Is it something to do with `reallyExists` or `isStale`?
+ final def rawInfoIsNoType: Boolean = {
+ hasRawInfo && (infos.info eq NoType)
+ }
+
+ /** Return info without checking for initialization or completing */
+ def rawInfo: Type = {
+ var infos = this.infos
+ assert(infos != null)
+ val curPeriod = currentPeriod
+ val curPid = phaseId(curPeriod)
+
+ if (validTo != NoPeriod) {
+ // skip any infos that concern later phases
+ while (curPid < phaseId(infos.validFrom) && infos.prev != null)
+ infos = infos.prev
+
+ if (validTo < curPeriod) {
+ assertCorrectThread()
+ // adapt any infos that come from previous runs
+ val current = phase
+ try {
+ infos = adaptInfos(infos)
+
+ //assert(runId(validTo) == currentRunId, name)
+ //assert(runId(infos.validFrom) == currentRunId, name)
+
+ if (validTo < curPeriod) {
+ var itr = infoTransformers.nextFrom(phaseId(validTo))
+ infoTransformers = itr; // caching optimization
+ while (itr.pid != NoPhase.id && itr.pid < current.id) {
+ phase = phaseWithId(itr.pid)
+ val info1 = itr.transform(this, infos.info)
+ if (info1 ne infos.info) {
+ infos = TypeHistory(currentPeriod + 1, info1, infos)
+ this.infos = infos
+ }
+ _validTo = currentPeriod + 1 // to enable reads from same symbol during info-transform
+ itr = itr.next
+ }
+ _validTo = if (itr.pid == NoPhase.id) curPeriod
+ else period(currentRunId, itr.pid)
+ }
+ } finally {
+ phase = current
+ }
+ }
+ }
+ infos.info
+ }
+
+ // adapt to new run in fsc.
+ private def adaptInfos(infos: TypeHistory): TypeHistory = {
+ assert(isCompilerUniverse)
+ if (infos == null || runId(infos.validFrom) == currentRunId) {
+ infos
+ } else {
+ val prev1 = adaptInfos(infos.prev)
+ if (prev1 ne infos.prev) prev1
+ else {
+ val pid = phaseId(infos.validFrom)
+
+ _validTo = period(currentRunId, pid)
+ phase = phaseWithId(pid)
+
+ val info1 = (
+ if (isPackageClass) infos.info
+ else adaptToNewRunMap(infos.info)
+ )
+ if (info1 eq infos.info) {
+ infos.validFrom = validTo
+ infos
+ } else {
+ this.infos = TypeHistory(validTo, info1, prev1)
+ this.infos
+ }
+ }
+ }
+ }
+
+ /** Raises a `MissingRequirementError` if this symbol is a `StubSymbol` */
+ def failIfStub() {}
+
+ /** Initialize the symbol */
+ final def initialize: this.type = {
+ if (!isInitialized) info
+ this
+ }
+
+ /** Called when the programmer requests information that might require initialization of the underlying symbol.
+ *
+ * `isFlagRelated` and `mask` describe the nature of this information.
+ * isFlagRelated = true means that the programmer needs particular bits in flags.
+ * isFlagRelated = false means that the request is unrelated to flags (annotations or privateWithin).
+ *
+ * In our current architecture, symbols for top-level classes and modules
+ * are created as dummies. Package symbols just call newClass(name) or newModule(name) and
+ * consider their job done.
+ *
+ * In order for such a dummy to provide meaningful info (e.g. a list of its members),
+ * it needs to go through unpickling. Unpickling is a process of reading Scala metadata
+ * from ScalaSignature annotations and assigning it to symbols and types.
+ *
+ * A single unpickling session takes a top-level class or module, parses the ScalaSignature annotation
+ * and then reads metadata for the unpicklee, its companion (if any) and all their members recursively
+ * (i.e. the pickle not only contains info about directly nested classes/modules, but also about
+ * classes/modules nested into those and so on).
+ *
+ * Unpickling is triggered automatically whenever typeSignature (info in compiler parlance) is called.
+ * This happens because package symbols assign completer thunks to the dummies they create.
+ * Therefore metadata loading happens lazily and transparently.
+ *
+ * Almost transparently. Unfortunately metadata isn't limited to just signatures (i.e. lists of members).
+ * It also includes flags (which determine e.g. whether a class is sealed or not), annotations and privateWithin.
+ * This gives rise to unpleasant effects like in SI-6277, when a flag test called on an uninitialize symbol
+ * produces incorrect results.
+ *
+ * One might think that the solution is simple: automatically call the completer whenever one needs
+ * flags, annotations and privateWithin - just like it's done for typeSignature. Unfortunately, this
+ * leads to weird crashes in scalac, and currently we can't attempt to fix the core of the compiler
+ * risk stability a few weeks before the final release.
+ *
+ * However we do need to fix this for runtime reflection, since it's not something we'd like to
+ * expose to reflection users. Therefore a proposed solution is to check whether we're in a
+ * runtime reflection universe and if yes then to commence initialization.
+ */
+ protected def needsInitialize(isFlagRelated: Boolean, mask: Long) =
+ !isInitialized && (flags & LOCKED) == 0 && shouldTriggerCompleter(this, if (infos ne null) infos.info else null, isFlagRelated, mask)
+
+ /** Was symbol's type updated during given phase? */
+ final def isUpdatedAt(pid: Phase#Id): Boolean = {
+ assert(isCompilerUniverse)
+ var infos = this.infos
+ while ((infos ne null) && phaseId(infos.validFrom) != pid + 1) infos = infos.prev
+ infos ne null
+ }
+
+ /** Was symbol's type updated during given phase? */
+ final def hasTypeAt(pid: Phase#Id): Boolean = {
+ assert(isCompilerUniverse)
+ var infos = this.infos
+ while ((infos ne null) && phaseId(infos.validFrom) > pid) infos = infos.prev
+ infos ne null
+ }
+
+ /** Modify term symbol's type so that a raw type C is converted to an existential C[_]
+ *
+ * This is done in checkAccessible and overriding checks in refchecks
+ * We can't do this on class loading because it would result in infinite cycles.
+ */
+ final def cookJavaRawInfo() {
+ if (hasFlag(TRIEDCOOKING)) return else setFlag(TRIEDCOOKING) // only try once...
+ val oldInfo = info
+ doCookJavaRawInfo()
+ }
+
+ protected def doCookJavaRawInfo(): Unit
+
+ /** The type constructor of a symbol is:
+ * For a type symbol, the type corresponding to the symbol itself,
+ * excluding parameters.
+ * Not applicable for term symbols.
+ */
+ def typeConstructor: Type =
+ abort("typeConstructor inapplicable for " + this)
+
+ /** The logic approximately boils down to finding the most recent phase
+ * which immediately follows any of parser, namer, typer, or erasure.
+ * In effect that means this will return one of:
+ *
+ * - packageobjects (follows namer)
+ * - superaccessors (follows typer)
+ * - lazyvals (follows erasure)
+ * - null
+ */
+ private def unsafeTypeParamPhase = {
+ var ph = phase
+ while (ph.prev.keepsTypeParams)
+ ph = ph.prev
+
+ ph
+ }
+ /** The type parameters of this symbol, without ensuring type completion.
+ * assumption: if a type starts out as monomorphic, it will not acquire
+ * type parameters later.
+ */
+ def unsafeTypeParams: List[Symbol] =
+ if (isMonomorphicType) Nil
+ else atPhase(unsafeTypeParamPhase)(rawInfo.typeParams)
+
+ /** The type parameters of this symbol.
+ * assumption: if a type starts out as monomorphic, it will not acquire
+ * type parameters later.
+ */
+ def typeParams: List[Symbol] =
+ if (isMonomorphicType) Nil
+ else {
+ // analogously to the "info" getter, here we allow for two completions:
+ // one: sourceCompleter to LazyType, two: LazyType to completed type
+ if (validTo == NoPeriod)
+ atPhase(phaseOf(infos.validFrom))(rawInfo load this)
+ if (validTo == NoPeriod)
+ atPhase(phaseOf(infos.validFrom))(rawInfo load this)
+
+ rawInfo.typeParams
+ }
+
+ /** The value parameter sections of this symbol.
+ */
+ def paramss: List[List[Symbol]] = info.paramss
+
+ /** The least proper supertype of a class; includes all parent types
+ * and refinement where needed. You need to compute that in a situation like this:
+ * {
+ * class C extends P { ... }
+ * new C
+ * }
+ */
+ def classBound: Type = {
+ val tp = refinedType(info.parents, owner)
+ // SI-4589 refinedType only creates a new refinement class symbol before erasure; afterwards
+ // the first parent class is returned, to which we must not add members.
+ if (!phase.erasedTypes) {
+ val thistp = tp.typeSymbol.thisType
+ val oldsymbuf = new ListBuffer[Symbol]
+ val newsymbuf = new ListBuffer[Symbol]
+ for (sym <- info.decls) {
+ // todo: what about public references to private symbols?
+ if (sym.isPublic && !sym.isConstructor) {
+ oldsymbuf += sym
+ newsymbuf += (
+ if (sym.isClass)
+ tp.typeSymbol.newAbstractType(sym.name.toTypeName, sym.pos).setInfo(sym.existentialBound)
+ else
+ sym.cloneSymbol(tp.typeSymbol))
+ }
+ }
+ val oldsyms = oldsymbuf.toList
+ val newsyms = newsymbuf.toList
+ for (sym <- newsyms) {
+ addMember(thistp, tp, sym modifyInfo (_ substThisAndSym(this, thistp, oldsyms, newsyms)))
+ }
+ }
+ tp
+ }
+
+ /** If we quantify existentially over this symbol,
+ * the bound of the type variable that stands for it
+ * pre: symbol is a term, a class, or an abstract type (no alias type allowed)
+ */
+ def existentialBound: Type
+
+ /** Reset symbol to initial state
+ */
+ def reset(completer: Type): this.type = {
+ resetFlags()
+ infos = null
+ _validTo = NoPeriod
+ //limit = NoPhase.id
+ setInfo(completer)
+ }
+
+ /**
+ * Adds the interface scala.Serializable to the parents of a ClassInfoType.
+ * Note that the tree also has to be updated accordingly.
+ */
+ def makeSerializable() {
+ info match {
+ case ci @ ClassInfoType(_, _, _) =>
+ setInfo(ci.copy(parents = ci.parents :+ SerializableClass.tpe))
+ case i =>
+ abort("Only ClassInfoTypes can be made serializable: "+ i)
+ }
+ }
+
+// ----- setters implemented in selected subclasses -------------------------------------
+
+ def typeOfThis_=(tp: Type) { throw new UnsupportedOperationException("typeOfThis_= inapplicable for " + this) }
+ def sourceModule_=(sym: Symbol) { throw new UnsupportedOperationException("sourceModule_= inapplicable for " + this) }
+ def addChild(sym: Symbol) { throw new UnsupportedOperationException("addChild inapplicable for " + this) }
+
+// ----- annotations ------------------------------------------------------------
+
+ // null is a marker that they still need to be obtained.
+ private[this] var _annotations: List[AnnotationInfo] = Nil
+
+ def annotationsString = if (annotations.isEmpty) "" else annotations.mkString("(", ", ", ")")
+
+ /** After the typer phase (before, look at the definition's Modifiers), contains
+ * the annotations attached to member a definition (class, method, type, field).
+ */
+ def annotations: List[AnnotationInfo] = {
+ if (!isCompilerUniverse && needsInitialize(isFlagRelated = false, mask = 0)) initialize
+ _annotations
+ }
+
+ def setAnnotations(annots: List[AnnotationInfo]): this.type = {
+ _annotations = annots
+ this
+ }
+
+ def withAnnotations(annots: List[AnnotationInfo]): this.type =
+ setAnnotations(annots ::: annotations)
+
+ def withoutAnnotations: this.type =
+ setAnnotations(Nil)
+
+ def filterAnnotations(p: AnnotationInfo => Boolean): this.type =
+ setAnnotations(annotations filter p)
+
+ def addAnnotation(annot: AnnotationInfo): this.type =
+ setAnnotations(annot :: annotations)
+
+ // Convenience for the overwhelmingly common case
+ def addAnnotation(sym: Symbol, args: Tree*): this.type = {
+ // The assertion below is meant to prevent from issues like SI-7009 but it's disabled
+ // due to problems with cycles while compiling Scala library. It's rather shocking that
+ // just checking if sym is monomorphic type introduces nasty cycles. We are definitively
+ // forcing too much because monomorphism is a local property of a type that can be checked
+ // syntactically
+ // assert(sym.initialize.isMonomorphicType, sym)
+ addAnnotation(AnnotationInfo(sym.tpe, args.toList, Nil))
+ }
+
+ /** Use that variant if you want to pass (for example) an applied type */
+ def addAnnotation(tp: Type, args: Tree*): this.type = {
+ assert(tp.typeParams.isEmpty, tp)
+ addAnnotation(AnnotationInfo(tp, args.toList, Nil))
+ }
+
+// ------ comparisons ----------------------------------------------------------------
+
+ /** A total ordering between symbols that refines the class
+ * inheritance graph (i.e. subclass.isLess(superclass) always holds).
+ * the ordering is given by: (_.isType, -_.baseTypeSeq.length) for type symbols, followed by `id`.
+ */
+ final def isLess(that: Symbol): Boolean = {
+ def baseTypeSeqLength(sym: Symbol) =
+ if (sym.isAbstractType) 1 + sym.info.bounds.hi.baseTypeSeq.length
+ else sym.info.baseTypeSeq.length
+ if (this.isType)
+ (that.isType &&
+ { val diff = baseTypeSeqLength(this) - baseTypeSeqLength(that)
+ diff > 0 || diff == 0 && this.id < that.id })
+ else
+ that.isType || this.id < that.id
+ }
+
+ /** A partial ordering between symbols.
+ * (this isNestedIn that) holds iff this symbol is defined within
+ * a class or method defining that symbol
+ */
+ final def isNestedIn(that: Symbol): Boolean =
+ owner == that || owner != NoSymbol && (owner isNestedIn that)
+
+ /** Is this class symbol a subclass of that symbol,
+ * and is this class symbol also different from Null or Nothing? */
+ def isNonBottomSubClass(that: Symbol): Boolean = false
+
+ /** Overridden in NullClass and NothingClass for custom behavior.
+ */
+ def isSubClass(that: Symbol) = isNonBottomSubClass(that)
+
+ final def isNumericSubClass(that: Symbol): Boolean =
+ definitions.isNumericSubClass(this, that)
+
+ final def isWeakSubClass(that: Symbol) =
+ isSubClass(that) || isNumericSubClass(that)
+
+// ------ overloaded alternatives ------------------------------------------------------
+
+ def alternatives: List[Symbol] =
+ if (isOverloaded) info.asInstanceOf[OverloadedType].alternatives
+ else this :: Nil
+
+ def filter(cond: Symbol => Boolean): Symbol =
+ if (isOverloaded) {
+ val alts = alternatives
+ val alts1 = alts filter cond
+ if (alts1 eq alts) this
+ else if (alts1.isEmpty) NoSymbol
+ else if (alts1.tail.isEmpty) alts1.head
+ else owner.newOverloaded(info.prefix, alts1)
+ }
+ else if (cond(this)) this
+ else NoSymbol
+
+ def suchThat(cond: Symbol => Boolean): Symbol = {
+ val result = filter(cond)
+ assert(!result.isOverloaded, result.alternatives)
+ result
+ }
+
+ @inline final def map(f: Symbol => Symbol): Symbol = if (this eq NoSymbol) this else f(this)
+
+ final def toOption: Option[Symbol] = if (exists) Some(this) else None
+
+// ------ cloneing -------------------------------------------------------------------
+
+ /** A clone of this symbol. */
+ final def cloneSymbol: TypeOfClonedSymbol =
+ cloneSymbol(owner)
+
+ /** A clone of this symbol, but with given owner. */
+ final def cloneSymbol(newOwner: Symbol): TypeOfClonedSymbol =
+ cloneSymbol(newOwner, _rawflags)
+ final def cloneSymbol(newOwner: Symbol, newFlags: Long): TypeOfClonedSymbol =
+ cloneSymbol(newOwner, newFlags, null)
+ final def cloneSymbol(newOwner: Symbol, newFlags: Long, newName: Name): TypeOfClonedSymbol = {
+ val clone = cloneSymbolImpl(newOwner, newFlags)
+ ( clone
+ setPrivateWithin privateWithin
+ setInfo (this.info cloneInfo clone)
+ setAnnotations this.annotations
+ )
+ this.attachments.all.foreach(clone.updateAttachment)
+ if (clone.thisSym != clone)
+ clone.typeOfThis = (clone.typeOfThis cloneInfo clone)
+
+ if (newName ne null)
+ clone setName asNameType(newName)
+
+ clone
+ }
+
+ /** Internal method to clone a symbol's implementation with the given flags and no info. */
+ def cloneSymbolImpl(owner: Symbol, newFlags: Long): TypeOfClonedSymbol
+
+// ------ access to related symbols --------------------------------------------------
+
+ /** The next enclosing class. */
+ def enclClass: Symbol = if (isClass) this else owner.enclClass
+
+ /** The next enclosing method. */
+ def enclMethod: Symbol = if (isSourceMethod) this else owner.enclMethod
+
+ /** The primary constructor of a class. */
+ def primaryConstructor: Symbol = NoSymbol
+
+ /** The self symbol (a TermSymbol) of a class with explicit self type, or else the
+ * symbol itself (a TypeSymbol).
+ *
+ * WARNING: you're probably better off using typeOfThis, as it's more uniform across classes with and without self variables.
+ *
+ * Example by Paul:
+ * scala> trait Foo1 { }
+ * scala> trait Foo2 { self => }
+ * scala> intp("Foo1").thisSym
+ * res0: $r.intp.global.Symbol = trait Foo1
+ *
+ * scala> intp("Foo2").thisSym
+ * res1: $r.intp.global.Symbol = value self
+ *
+ * Martin says: The reason `thisSym' is `this' is so that thisType can be this.thisSym.tpe.
+ * It's a trick to shave some cycles off.
+ *
+ * Morale: DO: if (clazz.typeOfThis.typeConstructor ne clazz.typeConstructor) ...
+ * DON'T: if (clazz.thisSym ne clazz) ...
+ *
+ */
+ def thisSym: Symbol = this
+
+ /** The type of `this` in a class, or else the type of the symbol itself. */
+ def typeOfThis = thisSym.tpe
+
+ /** If symbol is a class, the type <code>this.type</code> in this class,
+ * otherwise <code>NoPrefix</code>.
+ * We always have: thisType <:< typeOfThis
+ */
+ def thisType: Type = NoPrefix
+
+ /** For a case class, the symbols of the accessor methods, one for each
+ * argument in the first parameter list of the primary constructor.
+ * The empty list for all other classes.
+ *
+ * This list will be sorted to correspond to the declaration order
+ * in the constructor parameter
+ */
+ final def caseFieldAccessors: List[Symbol] = {
+ // We can't rely on the ordering of the case field accessors within decls --
+ // handling of non-public parameters seems to change the order (see SI-7035.)
+ //
+ // Luckily, the constrParamAccessors are still sorted properly, so sort the field-accessors using them
+ // (need to undo name-mangling, including the sneaky trailing whitespace)
+ //
+ // The slightly more principled approach of using the paramss of the
+ // primary constructor leads to cycles in, for example, pos/t5084.scala.
+ val primaryNames = constrParamAccessors.map(acc => nme.dropLocalSuffix(acc.name))
+ caseFieldAccessorsUnsorted.sortBy { acc =>
+ primaryNames indexWhere { orig =>
+ (acc.name == orig) || (acc.name startsWith (orig append "$"))
+ }
+ }
+ }
+ private final def caseFieldAccessorsUnsorted: List[Symbol] =
+ (info.decls filter (_.isCaseAccessorMethod)).toList
+
+ final def constrParamAccessors: List[Symbol] =
+ info.decls.filter(sym => !sym.isMethod && sym.isParamAccessor).toList
+
+ /** The symbol accessed by this accessor (getter or setter) function. */
+ final def accessed: Symbol = accessed(owner.info)
+
+ /** The symbol accessed by this accessor function, but with given owner type. */
+ final def accessed(ownerTp: Type): Symbol = {
+ assert(hasAccessorFlag, this)
+ ownerTp decl nme.getterToLocal(getterName.toTermName)
+ }
+
+ /** The module corresponding to this module class (note that this
+ * is not updated when a module is cloned), or NoSymbol if this is not a ModuleClass.
+ */
+ def sourceModule: Symbol = NoSymbol
+
+ /** The implementation class of a trait. If available it will be the
+ * symbol with the same owner, and the name of this symbol with $class
+ * appended to it.
+ */
+ final def implClass: Symbol = owner.info.decl(tpnme.implClassName(name))
+
+ /** The class that is logically an outer class of given `clazz`.
+ * This is the enclosing class, except for classes defined locally to constructors,
+ * where it is the outer class of the enclosing class.
+ */
+ final def outerClass: Symbol =
+ if (owner.isClass) owner
+ else if (isClassLocalToConstructor) owner.enclClass.outerClass
+ else owner.outerClass
+
+ /** For a paramaccessor: a superclass paramaccessor for which this symbol
+ * is an alias, NoSymbol for all others.
+ */
+ def alias: Symbol = NoSymbol
+
+ /** For a lazy value, its lazy accessor. NoSymbol for all others. */
+ def lazyAccessor: Symbol = NoSymbol
+
+ /** If this is a lazy value, the lazy accessor; otherwise this symbol. */
+ def lazyAccessorOrSelf: Symbol = if (isLazy) lazyAccessor else this
+
+ /** If this is an accessor, the accessed symbol. Otherwise, this symbol. */
+ def accessedOrSelf: Symbol = if (hasAccessorFlag) accessed else this
+
+ /** For an outer accessor: The class from which the outer originates.
+ * For all other symbols: NoSymbol
+ */
+ def outerSource: Symbol = NoSymbol
+
+ /** The superclass of this class. */
+ def superClass: Symbol = if (info.parents.isEmpty) NoSymbol else info.parents.head.typeSymbol
+ def parentSymbols: List[Symbol] = info.parents map (_.typeSymbol)
+
+ /** The directly or indirectly inherited mixins of this class
+ * except for mixin classes inherited by the superclass. Mixin classes appear
+ * in linearization order.
+ */
+ def mixinClasses: List[Symbol] = {
+ val sc = superClass
+ ancestors takeWhile (sc ne _)
+ }
+
+ /** All directly or indirectly inherited classes. */
+ def ancestors: List[Symbol] = info.baseClasses drop 1
+
+ @inline final def enclosingSuchThat(p: Symbol => Boolean): Symbol = {
+ var sym = this
+ while (sym != NoSymbol && !p(sym))
+ sym = sym.owner
+ sym
+ }
+
+ /** The package class containing this symbol, or NoSymbol if there
+ * is not one.
+ * TODO: formulate as enclosingSuchThat, after making sure
+ * we can start with current symbol rather than onwner.
+ * TODO: Also harmonize with enclClass, enclMethod etc.
+ */
+ def enclosingPackageClass: Symbol = {
+ var sym = this.owner
+ while (sym != NoSymbol && !sym.isPackageClass)
+ sym = sym.owner
+ sym
+ }
+
+ /** The package class containing this symbol, or NoSymbol if there
+ * is not one. */
+ def enclosingRootClass: Symbol = enclosingSuchThat(_.isRoot)
+
+ /** The package containing this symbol, or NoSymbol if there
+ * is not one. */
+ def enclosingPackage: Symbol = enclosingPackageClass.companionModule
+
+ /** Return the original enclosing method of this symbol. It should return
+ * the same thing as enclMethod when called before lambda lift,
+ * but it preserves the original nesting when called afterwards.
+ *
+ * @note This method is NOT available in the presentation compiler run. The
+ * originalOwner map is not populated for memory considerations (the symbol
+ * may hang on to lazy types and in turn to whole (outdated) compilation units.
+ */
+ def originalEnclosingMethod: Symbol = {
+ assert(!forInteractive, "originalOwner is not kept in presentation compiler runs.")
+ if (isMethod) this
+ else {
+ val owner = originalOwner.getOrElse(this, rawowner)
+ if (isLocalDummy) owner.enclClass.primaryConstructor
+ else owner.originalEnclosingMethod
+ }
+ }
+
+ /** The method or class which logically encloses the current symbol.
+ * If the symbol is defined in the initialization part of a template
+ * this is the template's primary constructor, otherwise it is
+ * the physically enclosing method or class.
+ *
+ * Example 1:
+ *
+ * def f() { val x = { def g() = ...; g() } }
+ *
+ * In this case the owner chain of `g` is `x`, followed by `f` and
+ * `g.logicallyEnclosingMember == f`.
+ *
+ * Example 2:
+ *
+ * class C {
+ * def <init> = { ... }
+ * val x = { def g() = ...; g() } }
+ * }
+ *
+ * In this case the owner chain of `g` is `x`, followed by `C` but
+ * g.logicallyEnclosingMember is the primary constructor symbol `<init>`
+ * (or, for traits: `$init`) of `C`.
+ *
+ */
+ def logicallyEnclosingMember: Symbol =
+ if (isLocalDummy) enclClass.primaryConstructor
+ else if (isMethod || isClass) this
+ else owner.logicallyEnclosingMember
+
+ /** Kept for source compatibility with 2.9. Scala IDE for Eclipse relies on this. */
+ @deprecated("Use enclosingTopLevelClass", "2.10.0")
+ def toplevelClass: Symbol = enclosingTopLevelClass
+
+ /** The top-level class containing this symbol. */
+ def enclosingTopLevelClass: Symbol =
+ if (owner.isPackageClass) {
+ if (isClass) this else moduleClass
+ } else owner.enclosingTopLevelClass
+
+ /** Is this symbol defined in the same scope and compilation unit as `that` symbol? */
+ def isCoDefinedWith(that: Symbol) = (
+ !rawInfoIsNoType
+ && (this.effectiveOwner == that.effectiveOwner)
+ && ( !this.effectiveOwner.isPackageClass
+ || (this.sourceFile eq null)
+ || (that.sourceFile eq null)
+ || (this.sourceFile.path == that.sourceFile.path) // Cheap possibly wrong check, then expensive normalization
+ || (this.sourceFile.canonicalPath == that.sourceFile.canonicalPath)
+ )
+ )
+
+ /** The internal representation of classes and objects:
+ *
+ * class Foo is "the class" or sometimes "the plain class"
+ * object Foo is "the module"
+ * class Foo$ is "the module class" (invisible to the user: it implements object Foo)
+ *
+ * class Foo <
+ * ^ ^ (2) \
+ * | | | \
+ * | (5) | (3)
+ * | | | \
+ * (1) v v \
+ * object Foo (4)-> > class Foo$
+ *
+ * (1) companionClass
+ * (2) companionModule
+ * (3) linkedClassOfClass
+ * (4) moduleClass
+ * (5) companionSymbol
+ */
+
+ /** For a module: the class with the same name in the same package.
+ * For all others: NoSymbol
+ * Note: does not work for classes owned by methods, see Namers.companionClassOf
+ *
+ * object Foo . companionClass --> class Foo
+ *
+ * !!! linkedClassOfClass depends on companionClass on the module class getting
+ * to the class. As presently implemented this potentially returns class for
+ * any symbol except NoSymbol.
+ */
+ def companionClass: Symbol = flatOwnerInfo.decl(name.toTypeName).suchThat(_ isCoDefinedWith this)
+
+ /** For a class: the module or case class factory with the same name in the same package.
+ * For all others: NoSymbol
+ * Note: does not work for modules owned by methods, see Namers.companionModuleOf
+ *
+ * class Foo . companionModule --> object Foo
+ */
+ def companionModule: Symbol = NoSymbol
+
+ /** For a module: its linked class
+ * For a plain class: its linked module or case factory.
+ * Note: does not work for modules owned by methods, see Namers.companionSymbolOf
+ *
+ * class Foo <-- companionSymbol --> object Foo
+ */
+ def companionSymbol: Symbol = NoSymbol
+
+ /** For a module class: its linked class
+ * For a plain class: the module class of its linked module.
+ *
+ * class Foo <-- linkedClassOfClass --> class Foo$
+ */
+ def linkedClassOfClass: Symbol = NoSymbol
+
+ /**
+ * Returns the rawInfo of the owner. If the current phase has flat classes,
+ * it first applies all pending type maps to this symbol.
+ *
+ * assume this is the ModuleSymbol for B in the following definition:
+ * package p { class A { object B { val x = 1 } } }
+ *
+ * The owner after flatten is "package p" (see "def owner"). The flatten type map enters
+ * symbol B in the decls of p. So to find a linked symbol ("object B" or "class B")
+ * we need to apply flatten to B first. Fixes #2470.
+ */
+ protected final def flatOwnerInfo: Type = {
+ if (needsFlatClasses)
+ info
+ owner.rawInfo
+ }
+
+ /** If this symbol is an implementation class, its interface, otherwise the symbol itself
+ * The method follows two strategies to determine the interface.
+ * - during or after erasure, it takes the last parent of the implementation class
+ * (which is always the interface, by convention)
+ * - before erasure, it looks up the interface name in the scope of the owner of the class.
+ * This only works for implementation classes owned by other classes or traits.
+ * !!! Why?
+ */
+ def toInterface: Symbol = this
+
+ /** The module class corresponding to this module.
+ */
+ def moduleClass: Symbol = NoSymbol
+
+ /** The non-private symbol whose type matches the type of this symbol
+ * in in given class.
+ *
+ * @param ofclazz The class containing the symbol's definition
+ * @param site The base type from which member types are computed
+ */
+ final def matchingSymbol(ofclazz: Symbol, site: Type): Symbol = {
+ //OPT cut down on #closures by special casing non-overloaded case
+ // was: ofclazz.info.nonPrivateDecl(name) filter (sym =>
+ // !sym.isTerm || (site.memberType(this) matches site.memberType(sym)))
+ val result = ofclazz.info.nonPrivateDecl(name)
+ def qualifies(sym: Symbol) = !sym.isTerm || (site.memberType(this) matches site.memberType(sym))
+ if ((result eq NoSymbol) || !result.isOverloaded && qualifies(result)) result
+ else result filter qualifies
+ }
+
+ /** The non-private member of `site` whose type and name match the type of this symbol. */
+ final def matchingSymbol(site: Type, admit: Long = 0L): Symbol =
+ site.nonPrivateMemberAdmitting(name, admit).filter(sym =>
+ !sym.isTerm || (site.memberType(this) matches site.memberType(sym)))
+
+ /** The symbol, in class `ofclazz`, that is overridden by this symbol.
+ *
+ * @param ofclazz is a base class of this symbol's owner.
+ */
+ final def overriddenSymbol(ofclazz: Symbol): Symbol =
+ if (isClassConstructor) NoSymbol else matchingSymbol(ofclazz, owner.thisType)
+
+ /** The symbol overriding this symbol in given subclass `ofclazz`.
+ *
+ * @param ofclazz is a subclass of this symbol's owner
+ */
+ final def overridingSymbol(ofclazz: Symbol): Symbol =
+ if (isClassConstructor) NoSymbol else matchingSymbol(ofclazz, ofclazz.thisType)
+
+ /** Returns all symbols overriden by this symbol. */
+ final def allOverriddenSymbols: List[Symbol] =
+ if (!owner.isClass) Nil
+ else owner.ancestors map overriddenSymbol filter (_ != NoSymbol)
+
+ /** Equivalent to allOverriddenSymbols.nonEmpty, but more efficient. */
+ // !!! When if ever will this answer differ from .isOverride?
+ // How/where is the OVERRIDE flag managed, as compared to how checks
+ // based on type membership will evaluate?
+ def isOverridingSymbol = owner.isClass && (
+ owner.ancestors exists (cls => matchingSymbol(cls, owner.thisType) != NoSymbol)
+ )
+ /** Equivalent to allOverriddenSymbols.head (or NoSymbol if no overrides) but more efficient. */
+ def nextOverriddenSymbol: Symbol = {
+ if (owner.isClass) owner.ancestors foreach { base =>
+ val sym = overriddenSymbol(base)
+ if (sym != NoSymbol)
+ return sym
+ }
+ NoSymbol
+ }
+
+ /** Returns all symbols overridden by this symbol, plus all matching symbols
+ * defined in parents of the selftype.
+ */
+ final def extendedOverriddenSymbols: List[Symbol] =
+ if (!owner.isClass) Nil
+ else owner.thisSym.ancestors map overriddenSymbol filter (_ != NoSymbol)
+
+ /** The symbol accessed by a super in the definition of this symbol when
+ * seen from class `base`. This symbol is always concrete.
+ * pre: `this.owner` is in the base class sequence of `base`.
+ */
+ final def superSymbol(base: Symbol): Symbol = {
+ var bcs = base.info.baseClasses.dropWhile(owner != _).tail
+ var sym: Symbol = NoSymbol
+ while (!bcs.isEmpty && sym == NoSymbol) {
+ if (!bcs.head.isImplClass)
+ sym = matchingSymbol(bcs.head, base.thisType).suchThat(!_.isDeferred)
+ bcs = bcs.tail
+ }
+ sym
+ }
+
+ /** The getter of this value or setter definition in class `base`, or NoSymbol if
+ * none exists.
+ */
+ final def getter(base: Symbol): Symbol = base.info.decl(getterName) filter (_.hasAccessorFlag)
+
+ def getterName: TermName = (
+ if (isSetter) nme.setterToGetter(name.toTermName)
+ else if (nme.isLocalName(name)) nme.localToGetter(name.toTermName)
+ else name.toTermName
+ )
+
+ /** The setter of this value or getter definition, or NoSymbol if none exists */
+ final def setter(base: Symbol): Symbol = setter(base, false)
+
+ final def setter(base: Symbol, hasExpandedName: Boolean): Symbol = {
+ var sname = nme.getterToSetter(nme.getterName(name.toTermName))
+ if (hasExpandedName) sname = nme.expandedSetterName(sname, base)
+ base.info.decl(sname) filter (_.hasAccessorFlag)
+ }
+
+ /** If this is a derived value class, return its unbox method
+ * or NoSymbol if it does not exist.
+ */
+ def derivedValueClassUnbox: Symbol = NoSymbol
+
+ /** The case module corresponding to this case class
+ * @pre case class is a member of some other class or package
+ */
+ final def caseModule: Symbol = {
+ var modname = name.toTermName
+ if (privateWithin.isClass && !privateWithin.isModuleClass && !hasFlag(EXPANDEDNAME))
+ modname = nme.expandedName(modname, privateWithin)
+ initialize.owner.info.decl(modname).suchThat(_.isModule)
+ }
+
+ /** If this symbol is a type parameter skolem (not an existential skolem!)
+ * its corresponding type parameter, otherwise this */
+ def deSkolemize: Symbol = this
+
+ /** If this symbol is an existential skolem the location (a Tree or null)
+ * where it was unpacked. Resulttype is AnyRef because trees are not visible here. */
+ def unpackLocation: AnyRef = null
+
+ /** Remove private modifier from symbol `sym`s definition. If `sym` is a
+ * is not a constructor nor a static module rename it by expanding its name to avoid name clashes
+ * @param base the fully qualified name of this class will be appended if name expansion is needed
+ */
+ final def makeNotPrivate(base: Symbol) {
+ if (this.isPrivate) {
+ setFlag(notPRIVATE)
+ // Marking these methods final causes problems for proxies which use subclassing. If people
+ // write their code with no usage of final, we probably shouldn't introduce it ourselves
+ // unless we know it is safe. ... Unfortunately if they aren't marked final the inliner
+ // thinks it can't inline them. So once again marking lateFINAL, and in genjvm we no longer
+ // generate ACC_FINAL on "final" methods which are actually lateFINAL.
+ if (isMethod && !isDeferred)
+ setFlag(lateFINAL)
+ if (!isStaticModule && !isClassConstructor) {
+ expandName(base)
+ if (isModule) moduleClass.makeNotPrivate(base)
+ }
+ }
+ }
+
+ /** Remove any access boundary and clear flags PROTECTED | PRIVATE.
+ */
+ def makePublic = this setPrivateWithin NoSymbol resetFlag AccessFlags
+
+ /** The first parameter to the first argument list of this method,
+ * or NoSymbol if inapplicable.
+ */
+ def firstParam = info.params match {
+ case p :: _ => p
+ case _ => NoSymbol
+ }
+/* code for fixing nested objects
+ def expandModuleClassName() {
+ name = newTypeName(name.toString + "$")
+ }
+
+ def isExpandedModuleClass: Boolean = name(name.length - 1) == '$'
+*/
+
+ /** Desire to re-use the field in ClassSymbol which stores the source
+ * file to also store the classfile, but without changing the behavior
+ * of sourceFile (which is expected at least in the IDE only to
+ * return actual source code.) So sourceFile has classfiles filtered out.
+ */
+ private def sourceFileOnly(file: AbstractFile): AbstractFile =
+ if ((file eq null) || (file.path endsWith ".class")) null else file
+
+ private def binaryFileOnly(file: AbstractFile): AbstractFile =
+ if ((file eq null) || !(file.path endsWith ".class")) null else file
+
+ final def binaryFile: AbstractFile = binaryFileOnly(associatedFile)
+ final def sourceFile: AbstractFile = sourceFileOnly(associatedFile)
+
+ /** Overridden in ModuleSymbols to delegate to the module class. */
+ def associatedFile: AbstractFile = enclosingTopLevelClass.associatedFile
+ def associatedFile_=(f: AbstractFile) { abort("associatedFile_= inapplicable for " + this) }
+
+ @deprecated("Use associatedFile_= instead", "2.10.0")
+ def sourceFile_=(f: AbstractFile): Unit = associatedFile_=(f)
+
+ /** If this is a sealed class, its known direct subclasses.
+ * Otherwise, the empty set.
+ */
+ def children: Set[Symbol] = Set()
+
+ /** Recursively assemble all children of this symbol.
+ */
+ def sealedDescendants: Set[Symbol] = children.flatMap(_.sealedDescendants) + this
+
+ @inline final def orElse(alt: => Symbol): Symbol = if (this ne NoSymbol) this else alt
+ @inline final def andAlso(f: Symbol => Unit): Symbol = { if (this ne NoSymbol) f(this) ; this }
+
+// ------ toString -------------------------------------------------------------------
+
+ /** A tag which (in the ideal case) uniquely identifies class symbols */
+ final def tag: Int = fullName.##
+
+ /** The simple name of this Symbol */
+ final def simpleName: Name = name
+
+ /** The String used to order otherwise identical sealed symbols.
+ * This uses data which is stable across runs and variable classpaths
+ * (the initial Name) before falling back on id, which varies depending
+ * on exactly when a symbol is loaded.
+ */
+ final def sealedSortName: String = initName + "#" + id
+
+ /** String representation of symbol's definition key word */
+ final def keyString: String =
+ if (isJavaInterface) "interface"
+ else if (isTrait && !isImplClass) "trait"
+ else if (isClass) "class"
+ else if (isType && !isParameter) "type"
+ else if (isVariable) "var"
+ else if (isPackage) "package"
+ else if (isModule) "object"
+ else if (isSourceMethod) "def"
+ else if (isTerm && (!isParameter || isParamAccessor)) "val"
+ else ""
+
+ private case class SymbolKind(accurate: String, sanitized: String, abbreviation: String)
+ private def symbolKind: SymbolKind = {
+ var kind =
+ if (isTermMacro) ("macro method", "macro method", "MAC")
+ else if (isInstanceOf[FreeTermSymbol]) ("free term", "free term", "FTE")
+ else if (isInstanceOf[FreeTypeSymbol]) ("free type", "free type", "FTY")
+ else if (isPackage) ("package", "package", "PK")
+ else if (isPackageClass) ("package class", "package", "PKC")
+ else if (isPackageObject) ("package object", "package", "PKO")
+ else if (isPackageObjectClass) ("package object class", "package", "PKOC")
+ else if (isAnonymousClass) ("anonymous class", "anonymous class", "AC")
+ else if (isRefinementClass) ("refinement class", "", "RC")
+ else if (isModule) ("module", "object", "MOD")
+ else if (isModuleClass) ("module class", "object", "MODC")
+ else if (isGetter) ("getter", if (isSourceMethod) "method" else "value", "GET")
+ else if (isSetter) ("setter", if (isSourceMethod) "method" else "value", "SET")
+ else if (isTerm && isLazy) ("lazy value", "lazy value", "LAZ")
+ else if (isVariable) ("field", "variable", "VAR")
+ else if (isImplClass) ("implementation class", "class", "IMPL")
+ else if (isTrait) ("trait", "trait", "TRT")
+ else if (isClass) ("class", "class", "CLS")
+ else if (isType) ("type", "type", "TPE")
+ else if (isClassConstructor && isPrimaryConstructor) ("primary constructor", "constructor", "PCTOR")
+ else if (isClassConstructor) ("constructor", "constructor", "CTOR")
+ else if (isSourceMethod) ("method", "method", "METH")
+ else if (isTerm) ("value", "value", "VAL")
+ else ("", "", "???")
+ if (isSkolem) kind = (kind._1, kind._2, kind._3 + "#SKO")
+ SymbolKind(kind._1, kind._2, kind._3)
+ }
+
+ /** Accurate string representation of symbols' kind, suitable for developers. */
+ final def accurateKindString: String =
+ symbolKind.accurate
+
+ /** String representation of symbol's kind, suitable for the masses. */
+ private def sanitizedKindString: String =
+ symbolKind.sanitized
+
+ /** String representation of symbol's kind, suitable for the masses. */
+ protected[scala] def abbreviatedKindString: String =
+ symbolKind.abbreviation
+
+ final def kindString: String =
+ if (settings.debug.value) accurateKindString
+ else sanitizedKindString
+
+ /** If the name of the symbol's owner should be used when you care about
+ * seeing an interesting name: in such cases this symbol is e.g. a method
+ * parameter with a synthetic name, a constructor named "this", an object
+ * "package", etc. The kind string, if non-empty, will be phrased relative
+ * to the name of the owner.
+ */
+ def hasMeaninglessName = (
+ isSetterParameter // x$1
+ || isClassConstructor // this
+ || isRefinementClass // <refinement>
+ || (name == nme.PACKAGE) // package
+ )
+
+ /** String representation of symbol's simple name.
+ * If !settings.debug translates expansions of operators back to operator symbol.
+ * E.g. $eq => =.
+ * If settings.uniqid, adds id.
+ * If settings.Yshowsymkinds, adds abbreviated symbol kind.
+ */
+ def nameString: String = (
+ if (!settings.uniqid.value && !settings.Yshowsymkinds.value) "" + originalName.decode
+ else if (settings.uniqid.value && !settings.Yshowsymkinds.value) originalName.decode + "#" + id
+ else if (!settings.uniqid.value && settings.Yshowsymkinds.value) originalName.decode + "#" + abbreviatedKindString
+ else originalName.decode + "#" + id + "#" + abbreviatedKindString
+ )
+
+ def fullNameString: String = {
+ def recur(sym: Symbol): String = {
+ if (sym.isRootSymbol || sym == NoSymbol) sym.nameString
+ else if (sym.owner.isEffectiveRoot) sym.nameString
+ else recur(sym.effectiveOwner.enclClass) + "." + sym.nameString
+ }
+
+ recur(this)
+ }
+
+ /** If settings.uniqid is set, the symbol's id, else "" */
+ final def idString = if (settings.uniqid.value) "#"+id else ""
+
+ /** String representation, including symbol's kind e.g., "class Foo", "method Bar".
+ * If hasMeaninglessName is true, uses the owner's name to disambiguate identity.
+ */
+ override def toString: String = compose(
+ kindString,
+ if (hasMeaninglessName) owner.decodedName + idString else nameString
+ )
+
+ /** String representation of location.
+ */
+ def ownsString: String = {
+ val owns = effectiveOwner
+ if (owns.isClass && !owns.isEmptyPrefix) "" + owns else ""
+ }
+
+ /** String representation of location, plus a preposition. Doesn't do much,
+ * for backward compatibility reasons.
+ */
+ def locationString: String = ownsString match {
+ case "" => ""
+ case s => " in " + s
+ }
+ def fullLocationString: String = toString + locationString
+ def signatureString: String = if (hasRawInfo) infoString(rawInfo) else "<_>"
+
+ /** String representation of symbol's definition following its name */
+ final def infoString(tp: Type): String = {
+ def parents = (
+ if (settings.debug.value) parentsString(tp.parents)
+ else briefParentsString(tp.parents)
+ )
+ if (isType) typeParamsString(tp) + (
+ if (isClass) " extends " + parents
+ else if (isAliasType) " = " + tp.resultType
+ else tp.resultType match {
+ case rt @ TypeBounds(_, _) => "" + rt
+ case rt => " <: " + rt
+ }
+ )
+ else if (isModule) "" // avoid "object X of type X.type"
+ else tp match {
+ case PolyType(tparams, res) => typeParamsString(tp) + infoString(res)
+ case NullaryMethodType(res) => infoString(res)
+ case MethodType(params, res) => valueParamsString(tp) + infoString(res)
+ case _ => ": " + tp
+ }
+ }
+
+ def infosString = infos.toString
+ def debugLocationString = fullLocationString + " (flags: " + debugFlagString + ")"
+
+ private def defStringCompose(infoString: String) = compose(
+ flagString,
+ keyString,
+ varianceString + nameString + infoString + flagsExplanationString
+ )
+ /** String representation of symbol's definition. It uses the
+ * symbol's raw info to avoid forcing types.
+ */
+ def defString = defStringCompose(signatureString)
+
+ /** String representation of symbol's definition, using the supplied
+ * info rather than the symbol's.
+ */
+ def defStringSeenAs(info: Type) = defStringCompose(infoString(info))
+
+ /** Concatenate strings separated by spaces */
+ private def compose(ss: String*) = ss filter (_ != "") mkString " "
+
+ def isSingletonExistential =
+ nme.isSingletonName(name) && (info.bounds.hi.typeSymbol isSubClass SingletonClass)
+
+ /** String representation of existentially bound variable */
+ def existentialToString =
+ if (isSingletonExistential && !settings.debug.value)
+ "val " + tpnme.dropSingletonName(name) + ": " + dropSingletonType(info.bounds.hi)
+ else defString
+ }
+ implicit val SymbolTag = ClassTag[Symbol](classOf[Symbol])
+
+ /** A class for term symbols */
+ class TermSymbol protected[Symbols] (initOwner: Symbol, initPos: Position, initName: TermName)
+ extends Symbol(initOwner, initPos, initName) with TermSymbolApi {
+ private[this] var _referenced: Symbol = NoSymbol
+ privateWithin = NoSymbol
+
+ type TypeOfClonedSymbol = TermSymbol
+
+ private[this] var _rawname: TermName = initName
+ def rawname = _rawname
+ def name = {
+ if (Statistics.hotEnabled) Statistics.incCounter(nameCount)
+ _rawname
+ }
+ override def name_=(name: Name) {
+ if (name != rawname) {
+ super.name_=(name) // logging
+ changeNameInOwners(name)
+ _rawname = name.toTermName
+ }
+ }
+ final def asNameType(n: Name) = n.toTermName
+
+ /** Term symbols with the exception of static parts of Java classes and packages.
+ */
+ override def isValue = !(isModule && hasFlag(PACKAGE | JAVA))
+ override def isVariable = isMutable && !isMethod
+ override def isTermMacro = hasFlag(MACRO)
+
+ // interesting only for lambda lift. Captured variables are accessed from inner lambdas.
+ override def isCapturedVariable = hasAllFlags(MUTABLE | CAPTURED) && !hasFlag(METHOD)
+
+ override def companionSymbol: Symbol = companionClass
+ override def moduleClass = if (isModule) referenced else NoSymbol
+
+ override def hasDefault = this hasFlag DEFAULTPARAM // overloaded with TRAIT
+ override def isBridge = this hasFlag BRIDGE
+ override def isEarlyInitialized = this hasFlag PRESUPER
+ override def isMethod = this hasFlag METHOD
+ override def isModule = this hasFlag MODULE
+ override def isOverloaded = this hasFlag OVERLOADED
+ override def isPackage = this hasFlag PACKAGE
+ override def isValueParameter = this hasFlag PARAM
+
+ override def isSetterParameter = isValueParameter && owner.isSetter
+ override def isAccessor = this hasFlag ACCESSOR
+ override def isGetter = isAccessor && !isSetter
+ override def isSetter = isAccessor && nme.isSetterName(name) // todo: make independent of name, as this can be forged.
+ override def isLocalDummy = nme.isLocalDummyName(name)
+ override def isClassConstructor = name == nme.CONSTRUCTOR
+ override def isMixinConstructor = name == nme.MIXIN_CONSTRUCTOR
+ override def isConstructor = nme.isConstructorName(name)
+
+ override def isPackageObject = isModule && (name == nme.PACKAGE)
+ override def isStable = !isUnstable
+ private def isUnstable = (
+ isMutable
+ || (hasFlag(METHOD | BYNAMEPARAM) && !hasFlag(STABLE))
+ || (tpe.isVolatile && !hasAnnotation(uncheckedStableClass))
+ )
+
+ // The name in comments is what it is being disambiguated from.
+ // TODO - rescue CAPTURED from BYNAMEPARAM so we can see all the names.
+ override def resolveOverloadedFlag(flag: Long) = flag match {
+ case DEFAULTPARAM => "<defaultparam>" // TRAIT
+ case MIXEDIN => "<mixedin>" // EXISTENTIAL
+ case LABEL => "<label>" // CONTRAVARIANT / INCONSTRUCTOR
+ case PRESUPER => "<presuper>" // IMPLCLASS
+ case BYNAMEPARAM => if (this.isValueParameter) "<bynameparam>" else "<captured>" // COVARIANT
+ case _ => super.resolveOverloadedFlag(flag)
+ }
+
+ def referenced: Symbol = _referenced
+ def referenced_=(x: Symbol) { _referenced = x }
+
+ def existentialBound = singletonBounds(this.tpe)
+
+ def cloneSymbolImpl(owner: Symbol, newFlags: Long): TermSymbol =
+ owner.newTermSymbol(name, pos, newFlags).copyAttrsFrom(this)
+
+ def copyAttrsFrom(original: TermSymbol): this.type = {
+ referenced = original.referenced
+ this
+ }
+
+ private val validAliasFlags = SUPERACCESSOR | PARAMACCESSOR | MIXEDIN | SPECIALIZED
+
+ override def alias: Symbol =
+ if (hasFlag(validAliasFlags)) initialize.referenced
+ else NoSymbol
+
+ def setAlias(alias: Symbol): TermSymbol = {
+ assert(alias != NoSymbol, this)
+ assert(!alias.isOverloaded, alias)
+ assert(hasFlag(validAliasFlags), this)
+
+ referenced = alias
+ this
+ }
+
+ override def outerSource: Symbol =
+ // SI-6888 Approximate the name to workaround the deficiencies in `nme.originalName`
+ // in the face of clases named '$'. SI-2806 remains open to address the deeper problem.
+ if (originalName endsWith (nme.OUTER)) initialize.referenced
+ else NoSymbol
+
+ def setModuleClass(clazz: Symbol): TermSymbol = {
+ assert(isModule, this)
+ referenced = clazz
+ this
+ }
+
+ def setLazyAccessor(sym: Symbol): TermSymbol = {
+ assert(isLazy && (referenced == NoSymbol || referenced == sym), (this, debugFlagString, referenced, sym))
+ referenced = sym
+ this
+ }
+
+ override def lazyAccessor: Symbol = {
+ assert(isLazy, this)
+ referenced
+ }
+
+ /** change name by appending $$<fully-qualified-name-of-class `base`>
+ * Do the same for any accessed symbols or setters/getters
+ */
+ override def expandName(base: Symbol) {
+ if (!hasFlag(EXPANDEDNAME)) {
+ setFlag(EXPANDEDNAME)
+ if (hasAccessorFlag && !isDeferred) {
+ accessed.expandName(base)
+ }
+ else if (hasGetter) {
+ getter(owner).expandName(base)
+ setter(owner).expandName(base)
+ }
+ name = nme.expandedName(name.toTermName, base)
+ }
+ }
+
+ protected def doCookJavaRawInfo() {
+ def cook(sym: Symbol) {
+ require(sym.isJavaDefined, sym)
+ // @M: I think this is more desirable, but Martin prefers to leave raw-types as-is as much as possible
+ // object rawToExistentialInJava extends TypeMap {
+ // def apply(tp: Type): Type = tp match {
+ // // any symbol that occurs in a java sig, not just java symbols
+ // // see http://lampsvn.epfl.ch/trac/scala/ticket/2454#comment:14
+ // case TypeRef(pre, sym, List()) if !sym.typeParams.isEmpty =>
+ // val eparams = typeParamsToExistentials(sym, sym.typeParams)
+ // existentialAbstraction(eparams, TypeRef(pre, sym, eparams map (_.tpe)))
+ // case _ =>
+ // mapOver(tp)
+ // }
+ // }
+ val tpe1 = rawToExistential(sym.tpe)
+ // println("cooking: "+ sym +": "+ sym.tpe +" to "+ tpe1)
+ if (tpe1 ne sym.tpe) {
+ sym.setInfo(tpe1)
+ }
+ }
+
+ if (isJavaDefined)
+ cook(this)
+ else if (isOverloaded)
+ for (sym2 <- alternatives)
+ if (sym2.isJavaDefined)
+ cook(sym2)
+ }
+ }
+ implicit val TermSymbolTag = ClassTag[TermSymbol](classOf[TermSymbol])
+
+ /** A class for module symbols */
+ class ModuleSymbol protected[Symbols] (initOwner: Symbol, initPos: Position, initName: TermName)
+ extends TermSymbol(initOwner, initPos, initName) with ModuleSymbolApi {
+ private var flatname: TermName = null
+
+ override def associatedFile = moduleClass.associatedFile
+ override def associatedFile_=(f: AbstractFile) { moduleClass.associatedFile = f }
+
+ override def moduleClass = referenced
+ override def companionClass =
+ flatOwnerInfo.decl(name.toTypeName).suchThat(sym => sym.isClass && (sym isCoDefinedWith this))
+
+ override def owner = {
+ if (Statistics.hotEnabled) Statistics.incCounter(ownerCount)
+ if (!isMethod && needsFlatClasses) rawowner.owner
+ else rawowner
+ }
+ override def name: TermName = {
+ if (Statistics.hotEnabled) Statistics.incCounter(nameCount)
+ if (!isMethod && needsFlatClasses) {
+ if (flatname eq null)
+ flatname = nme.flattenedName(rawowner.name, rawname)
+
+ flatname
+ }
+ else rawname
+ }
+ }
+ implicit val ModuleSymbolTag = ClassTag[ModuleSymbol](classOf[ModuleSymbol])
+
+ /** A class for method symbols */
+ class MethodSymbol protected[Symbols] (initOwner: Symbol, initPos: Position, initName: TermName)
+ extends TermSymbol(initOwner, initPos, initName) with MethodSymbolApi {
+ private[this] var mtpePeriod = NoPeriod
+ private[this] var mtpePre: Type = _
+ private[this] var mtpeResult: Type = _
+ private[this] var mtpeInfo: Type = _
+
+ override def isLabel = this hasFlag LABEL
+ override def isVarargsMethod = this hasFlag VARARGS
+ override def isLiftedMethod = this hasFlag LIFTED
+
+ // TODO - this seems a strange definition for "isSourceMethod", given that
+ // it does not make any specific effort to exclude synthetics. Figure out what
+ // this method is really for and what logic makes sense.
+ override def isSourceMethod = !(this hasFlag STABLE) // exclude all accessors
+ // unfortunately having the CASEACCESSOR flag does not actually mean you
+ // are a case accessor (you can also be a field.)
+ override def isCaseAccessorMethod = isCaseAccessor
+
+ def typeAsMemberOf(pre: Type): Type = {
+ if (mtpePeriod == currentPeriod) {
+ if ((mtpePre eq pre) && (mtpeInfo eq info)) return mtpeResult
+ } else if (isValid(mtpePeriod)) {
+ mtpePeriod = currentPeriod
+ if ((mtpePre eq pre) && (mtpeInfo eq info)) return mtpeResult
+ }
+ val res = pre.computeMemberType(this)
+ mtpePeriod = currentPeriod
+ mtpePre = pre
+ mtpeInfo = info
+ mtpeResult = res
+ res
+ }
+
+ override def isVarargs: Boolean = definitions.isVarArgsList(paramss.flatten)
+
+ override def returnType: Type = {
+ def loop(tpe: Type): Type =
+ tpe match {
+ case NullaryMethodType(ret) => loop(ret)
+ case MethodType(_, ret) => loop(ret)
+ case PolyType(_, tpe) => loop(tpe)
+ case tpe => tpe
+ }
+ loop(info)
+ }
+ }
+ implicit val MethodSymbolTag = ClassTag[MethodSymbol](classOf[MethodSymbol])
+
+ class AliasTypeSymbol protected[Symbols] (initOwner: Symbol, initPos: Position, initName: TypeName)
+ extends TypeSymbol(initOwner, initPos, initName) {
+ type TypeOfClonedSymbol = TypeSymbol
+ final override def isAliasType = true
+ override def cloneSymbolImpl(owner: Symbol, newFlags: Long): TypeSymbol =
+ owner.newNonClassSymbol(name, pos, newFlags)
+ }
+
+ class AbstractTypeSymbol protected[Symbols] (initOwner: Symbol, initPos: Position, initName: TypeName)
+ extends TypeSymbol(initOwner, initPos, initName) {
+ type TypeOfClonedSymbol = TypeSymbol
+ final override def isAbstractType = true
+ override def existentialBound = this.info
+ override def cloneSymbolImpl(owner: Symbol, newFlags: Long): TypeSymbol =
+ owner.newNonClassSymbol(name, pos, newFlags)
+ }
+
+ /** A class of type symbols. Alias and abstract types are direct instances
+ * of this class. Classes are instances of a subclass.
+ */
+ abstract class TypeSymbol protected[Symbols] (initOwner: Symbol, initPos: Position, initName: TypeName)
+ extends Symbol(initOwner, initPos, initName) with TypeSymbolApi {
+ privateWithin = NoSymbol
+ private[this] var _rawname: TypeName = initName
+
+ type TypeOfClonedSymbol >: Null <: TypeSymbol
+ // cloneSymbolImpl still abstract in TypeSymbol.
+
+ def rawname = _rawname
+ def name = {
+ if (Statistics.hotEnabled) Statistics.incCounter(nameCount)
+ _rawname
+ }
+ final def asNameType(n: Name) = n.toTypeName
+
+ override def isNonClassType = true
+ override def isTypeMacro = hasFlag(MACRO)
+
+ override def resolveOverloadedFlag(flag: Long) = flag match {
+ case TRAIT => "<trait>" // DEFAULTPARAM
+ case EXISTENTIAL => "<existential>" // MIXEDIN
+ case COVARIANT => "<covariant>" // BYNAMEPARAM / CAPTURED
+ case CONTRAVARIANT => "<contravariant>" // LABEL / INCONSTRUCTOR (overridden again in ClassSymbol)
+ case _ => super.resolveOverloadedFlag(flag)
+ }
+
+ private var tyconCache: Type = null
+ private var tyconRunId = NoRunId
+ private var tpeCache: Type = _
+ private var tpePeriod = NoPeriod
+
+ override def isAbstractType = this hasFlag DEFERRED
+ override def isContravariant = this hasFlag CONTRAVARIANT
+ override def isCovariant = this hasFlag COVARIANT
+ override def isExistentialQuantified = isExistentiallyBound && !isSkolem
+ override def isExistentiallyBound = this hasFlag EXISTENTIAL
+ override def isTypeParameter = isTypeParameterOrSkolem && !isSkolem
+ override def isTypeParameterOrSkolem = this hasFlag PARAM
+
+ /** Overridden in subclasses for which it makes sense.
+ */
+ def existentialBound: Type = abort("unexpected type: "+this.getClass+ " "+debugLocationString)
+
+ // TODO - don't allow names to be renamed in this unstructured a fashion.
+ // Rename as little as possible. Enforce invariants on all renames.
+ override def name_=(name: Name) {
+ if (name != rawname) {
+ super.name_=(name) // logging
+ changeNameInOwners(name)
+ _rawname = name.toTypeName
+ }
+ }
+
+ private def newPrefix = if (this hasFlag EXISTENTIAL | PARAM) NoPrefix else owner.thisType
+ private def newTypeRef(targs: List[Type]) = typeRef(newPrefix, this, targs)
+
+ /** Let's say you have a type definition
+ *
+ * {{{
+ * type T <: Number
+ * }}}
+ *
+ * and tsym is the symbol corresponding to T. Then
+ *
+ * {{{
+ * tsym.info = TypeBounds(Nothing, Number)
+ * tsym.tpe = TypeRef(NoPrefix, T, List())
+ * }}}
+ */
+ override def tpe: Type = {
+ if (tpeCache eq NoType) throw CyclicReference(this, typeConstructor)
+ if (tpePeriod != currentPeriod) {
+ if (isValid(tpePeriod)) {
+ tpePeriod = currentPeriod
+ } else {
+ if (isInitialized) tpePeriod = currentPeriod
+ tpeCache = NoType
+ val targs =
+ if (phase.erasedTypes && this != ArrayClass) List()
+ else unsafeTypeParams map (_.typeConstructor)
+ //@M! use typeConstructor to generate dummy type arguments,
+ // sym.tpe should not be called on a symbol that's supposed to be a higher-kinded type
+ // memberType should be used instead, that's why it uses tpeHK and not tpe
+ tpeCache = newTypeRef(targs)
+ }
+ }
+ assert(tpeCache ne null/*, "" + this + " " + phase*/)//debug
+ tpeCache
+ }
+
+ /** @M -- tpe vs tpeHK:
+ *
+ * tpe: creates a TypeRef with dummy type arguments and kind *
+ * tpeHK: creates a TypeRef with no type arguments but with type parameters
+ *
+ * If typeParams is nonEmpty, calling tpe may hide errors or
+ * introduce spurious ones. (For example, when deriving a type from
+ * the symbol of a type argument that may be higher-kinded.) As far
+ * as I can tell, it only makes sense to call tpe in conjunction
+ * with a substitution that replaces the generated dummy type
+ * arguments by their actual types.
+ *
+ * TODO: the above conditions desperately need to be enforced by code.
+ */
+ override def tpeHK = typeConstructor // @M! used in memberType
+
+ override def typeConstructor: Type = {
+ if ((tyconCache eq null) || tyconRunId != currentRunId) {
+ tyconCache = newTypeRef(Nil)
+ tyconRunId = currentRunId
+ }
+ assert(tyconCache ne null)
+ tyconCache
+ }
+
+ override def info_=(tp: Type) {
+ tpePeriod = NoPeriod
+ tyconCache = null
+ super.info_=(tp)
+ }
+
+ final override def isNonBottomSubClass(that: Symbol): Boolean = (
+ (this eq that) || this.isError || that.isError ||
+ info.baseTypeIndex(that) >= 0
+ )
+
+ override def reset(completer: Type): this.type = {
+ super.reset(completer)
+ tpePeriod = NoPeriod
+ tyconRunId = NoRunId
+ this
+ }
+
+ /*** example:
+ * public class Test3<T> {}
+ * public class Test1<T extends Test3> {}
+ * info for T in Test1 should be >: Nothing <: Test3[_]
+ */
+ protected def doCookJavaRawInfo() {
+ if (isJavaDefined || owner.isJavaDefined) {
+ val tpe1 = rawToExistential(info)
+ // println("cooking type: "+ this +": "+ info +" to "+ tpe1)
+ if (tpe1 ne info) {
+ setInfo(tpe1)
+ }
+ }
+ }
+
+ if (Statistics.hotEnabled) Statistics.incCounter(typeSymbolCount)
+ }
+ implicit val TypeSymbolTag = ClassTag[TypeSymbol](classOf[TypeSymbol])
+
+ /** A class for type parameters viewed from inside their scopes
+ *
+ * @param origin Can be either a tree, or a symbol, or null.
+ * If skolem got created from newTypeSkolem (called in Namers), origin denotes
+ * the type parameter from which the skolem was created. If it got created from
+ * skolemizeExistential, origin is either null or a Tree. If it is a Tree, it indicates
+ * where the skolem was introduced (this is important for knowing when to pack it
+ * again into ab Existential). origin is `null` only in skolemizeExistentials called
+ * from <:< or isAsSpecific, because here its value does not matter.
+ * I believe the following invariant holds:
+ *
+ * origin.isInstanceOf[Symbol] == !hasFlag(EXISTENTIAL)
+ */
+ class TypeSkolem protected[Symbols] (initOwner: Symbol, initPos: Position, initName: TypeName, origin: AnyRef)
+ extends TypeSymbol(initOwner, initPos, initName) {
+ type TypeOfClonedSymbol = TypeSkolem
+ /** The skolemization level in place when the skolem was constructed */
+ val level = skolemizationLevel
+
+ final override def isSkolem = true
+
+ // a type symbol bound by an existential type, for instance the T in
+ // List[T] forSome { type T }
+ override def isExistentialSkolem = this hasFlag EXISTENTIAL
+ override def isGADTSkolem = this hasAllFlags GADT_SKOLEM_FLAGS
+ override def isTypeSkolem = this hasFlag PARAM
+ override def isAbstractType = this hasFlag DEFERRED
+
+ override def isExistentialQuantified = false
+ override def existentialBound = if (isAbstractType) this.info else super.existentialBound
+
+ /** If typeskolem comes from a type parameter, that parameter, otherwise skolem itself */
+ override def deSkolemize = origin match {
+ case s: Symbol => s
+ case _ => this
+ }
+
+ /** If type skolem comes from an existential, the tree where it was created */
+ override def unpackLocation = origin
+
+ //@M! (not deSkolemize.typeParams!!), also can't leave superclass definition: use info, not rawInfo
+ override def typeParams = info.typeParams
+
+ override def cloneSymbolImpl(owner: Symbol, newFlags: Long): TypeSkolem =
+ owner.newTypeSkolemSymbol(name, origin, pos, newFlags)
+
+ override def nameString: String =
+ if (settings.debug.value) (super.nameString + "&" + level)
+ else super.nameString
+ }
+
+ /** A class for class symbols */
+ class ClassSymbol protected[Symbols] (initOwner: Symbol, initPos: Position, initName: TypeName)
+ extends TypeSymbol(initOwner, initPos, initName) with ClassSymbolApi {
+ type TypeOfClonedSymbol = ClassSymbol
+
+ private[this] var flatname: TypeName = _
+ private[this] var _associatedFile: AbstractFile = _
+ private[this] var thissym: Symbol = this
+
+ private[this] var thisTypeCache: Type = _
+ private[this] var thisTypePeriod = NoPeriod
+
+ override def resolveOverloadedFlag(flag: Long) = flag match {
+ case INCONSTRUCTOR => "<inconstructor>" // INCONSTRUCTOR / CONTRAVARIANT / LABEL
+ case EXISTENTIAL => "<existential>" // EXISTENTIAL / MIXEDIN
+ case IMPLCLASS => "<implclass>" // IMPLCLASS / PRESUPER
+ case _ => super.resolveOverloadedFlag(flag)
+ }
+
+ final override def isNonClassType = false
+ final override def isAbstractType = false
+ final override def isAliasType = false
+ final override def isContravariant = false
+
+ override def isAbstractClass = this hasFlag ABSTRACT
+ override def isCaseClass = this hasFlag CASE
+ override def isClassLocalToConstructor = this hasFlag INCONSTRUCTOR
+ override def isImplClass = this hasFlag IMPLCLASS
+ override def isModuleClass = this hasFlag MODULE
+ override def isPackageClass = this hasFlag PACKAGE
+ override def isTrait = this hasFlag TRAIT
+
+ override def isAnonOrRefinementClass = isAnonymousClass || isRefinementClass
+ override def isAnonymousClass = name containsName tpnme.ANON_CLASS_NAME
+ override def isConcreteClass = !(this hasFlag ABSTRACT | TRAIT)
+ override def isJavaInterface = hasAllFlags(JAVA | TRAIT)
+ override def isNestedClass = !owner.isPackageClass
+ override def isNumericValueClass = definitions.isNumericValueClass(this)
+ override def isNumeric = isNumericValueClass
+ override def isPackageObjectClass = isModuleClass && (name == tpnme.PACKAGE)
+ override def isPrimitiveValueClass = definitions.isPrimitiveValueClass(this)
+ override def isPrimitive = isPrimitiveValueClass
+
+ // The corresponding interface is the last parent by convention.
+ private def lastParent = if (tpe.parents.isEmpty) NoSymbol else tpe.parents.last.typeSymbol
+ override def toInterface: Symbol = (
+ if (isImplClass) {
+ if (phase.next.erasedTypes) lastParent
+ else owner.info.decl(tpnme.interfaceName(name))
+ }
+ else super.toInterface
+ )
+
+ /** Is this class locally defined?
+ * A class is local, if
+ * - it is anonymous, or
+ * - its owner is a value
+ * - it is defined within a local class
+ */
+ override def isLocalClass = (
+ isAnonOrRefinementClass
+ || isLocal
+ || !owner.isPackageClass && owner.isLocalClass
+ )
+ override def isStableClass = (this hasFlag STABLE) || checkStable()
+
+ private def checkStable() = {
+ def hasNoAbstractTypeMember(clazz: Symbol): Boolean =
+ (clazz hasFlag STABLE) || {
+ var e = clazz.info.decls.elems
+ while ((e ne null) && !(e.sym.isAbstractType && info.member(e.sym.name) == e.sym))
+ e = e.next
+ e == null
+ }
+ (info.baseClasses forall hasNoAbstractTypeMember) && {
+ setFlag(STABLE)
+ true
+ }
+ }
+
+ override def enclClassChain = this :: owner.enclClassChain
+
+ /** A helper method that factors the common code used the discover a
+ * companion module of a class. If a companion module exists, its symbol is
+ * returned, otherwise, `NoSymbol` is returned.
+ */
+ protected final def companionModule0: Symbol =
+ flatOwnerInfo.decl(name.toTermName).suchThat(
+ sym => sym.isModule && (sym isCoDefinedWith this) && !sym.isMethod)
+
+ override def companionModule = companionModule0
+ override def companionSymbol = companionModule0
+ override def linkedClassOfClass = companionModule.moduleClass
+
+ override def sourceModule = if (isModuleClass) companionModule else NoSymbol
+
+ override def existentialBound = GenPolyType(this.typeParams, TypeBounds.upper(this.classBound))
+
+ def primaryConstructorName = if (this hasFlag TRAIT | IMPLCLASS) nme.MIXIN_CONSTRUCTOR else nme.CONSTRUCTOR
+
+ override def primaryConstructor = {
+ val c = info decl primaryConstructorName
+ if (c.isOverloaded) c.alternatives.head else c
+ }
+
+ override def associatedFile = if (owner.isPackageClass) _associatedFile else super.associatedFile
+ override def associatedFile_=(f: AbstractFile) { _associatedFile = f }
+
+ override def reset(completer: Type): this.type = {
+ super.reset(completer)
+ thissym = this
+ this
+ }
+
+ /** the type this.type in this class */
+ override def thisType: Type = {
+ val period = thisTypePeriod
+ if (period != currentPeriod) {
+ thisTypePeriod = currentPeriod
+ if (!isValid(period)) thisTypeCache = ThisType(this)
+ }
+ thisTypeCache
+ }
+
+ override def owner: Symbol = {
+ if (Statistics.hotEnabled) Statistics.incCounter(ownerCount)
+ if (needsFlatClasses) rawowner.owner else rawowner
+ }
+
+ override def name: TypeName = {
+ if (Statistics.canEnable) Statistics.incCounter(nameCount)
+ if (needsFlatClasses) {
+ if (flatname eq null)
+ flatname = tpnme.flattenedName(rawowner.name, rawname)
+
+ flatname
+ }
+ else rawname
+ }
+
+ /** A symbol carrying the self type of the class as its type */
+ override def thisSym: Symbol = thissym
+
+ /** Sets the self type of the class */
+ override def typeOfThis_=(tp: Type) {
+ thissym = newThisSym(nme.this_, pos).setInfo(tp)
+ }
+
+ override def cloneSymbolImpl(owner: Symbol, newFlags: Long): ClassSymbol = {
+ val clone = owner.newClassSymbol(name, pos, newFlags)
+ if (thisSym != this) {
+ clone.typeOfThis = typeOfThis
+ clone.thisSym setName thisSym.name
+ }
+ if (_associatedFile ne null)
+ clone.associatedFile = _associatedFile
+
+ clone
+ }
+
+ override def derivedValueClassUnbox =
+ // (info.decl(nme.unbox)) orElse uncomment once we accept unbox methods
+ (info.decls.find(_ hasAllFlags PARAMACCESSOR | METHOD) getOrElse
+ NoSymbol)
+
+ private[this] var childSet: Set[Symbol] = Set()
+ override def children = childSet
+ override def addChild(sym: Symbol) { childSet = childSet + sym }
+
+ if (Statistics.hotEnabled) Statistics.incCounter(classSymbolCount)
+ }
+ implicit val ClassSymbolTag = ClassTag[ClassSymbol](classOf[ClassSymbol])
+
+ /** A class for module class symbols
+ * Note: Not all module classes are of this type; when unpickled, we get
+ * plain class symbols!
+ */
+ class ModuleClassSymbol protected[Symbols] (owner: Symbol, pos: Position, name: TypeName)
+ extends ClassSymbol(owner, pos, name) {
+ private[this] var module: Symbol = _
+ private[this] var typeOfThisCache: Type = _
+ private[this] var typeOfThisPeriod = NoPeriod
+
+ private var implicitMembersCacheValue: Scope = EmptyScope
+ private var implicitMembersCacheKey1: Type = NoType
+ private var implicitMembersCacheKey2: ScopeEntry = null
+
+ override def isModuleClass = true
+ override def linkedClassOfClass = companionClass
+
+ /** the self type of an object foo is foo.type, not class<foo>.this.type
+ */
+ override def typeOfThis = {
+ val period = typeOfThisPeriod
+ if (period != currentPeriod) {
+ typeOfThisPeriod = currentPeriod
+ if (!isValid(period))
+ typeOfThisCache = singleType(owner.thisType, sourceModule)
+ }
+ typeOfThisCache
+ }
+
+ def implicitMembers: Scope = {
+ val tp = info
+ if ((implicitMembersCacheKey1 ne tp) || (implicitMembersCacheKey2 ne tp.decls.elems)) {
+ // Skip a package object class, because the members are also in
+ // the package and we wish to avoid spurious ambiguities as in pos/t3999.
+ if (!isPackageObjectClass) {
+ implicitMembersCacheKey1 = tp
+ implicitMembersCacheKey2 = tp.decls.elems
+ implicitMembersCacheValue = tp.implicitMembers
+ }
+ }
+ implicitMembersCacheValue
+ }
+ // The null check seems to be necessary for the reifier.
+ override def sourceModule = if (module ne null) module else companionModule
+ override def sourceModule_=(module: Symbol) { this.module = module }
+ }
+
+ class PackageObjectClassSymbol protected[Symbols] (owner0: Symbol, pos0: Position)
+ extends ModuleClassSymbol(owner0, pos0, tpnme.PACKAGE) {
+ final override def isPackageObjectClass = true
+ final override def isPackageObjectOrClass = true
+ final override def skipPackageObject = owner
+ final override def setName(name: Name): this.type = {
+ abort("Can't rename a package object to " + name)
+ }
+ }
+
+ trait ImplClassSymbol extends ClassSymbol {
+ override def sourceModule = companionModule
+ // override def isImplClass = true
+ override def typeOfThis = thisSym.tpe // don't use the ModuleClassSymbol typeOfThisCache.
+ }
+
+ class PackageClassSymbol protected[Symbols] (owner0: Symbol, pos0: Position, name0: TypeName)
+ extends ModuleClassSymbol(owner0, pos0, name0) {
+ override def sourceModule = companionModule
+ override def enclClassChain = Nil
+ override def isPackageClass = true
+ }
+
+ class RefinementClassSymbol protected[Symbols] (owner0: Symbol, pos0: Position)
+ extends ClassSymbol(owner0, pos0, tpnme.REFINE_CLASS_NAME) {
+ override def name_=(name: Name) {
+ abort("Cannot set name of RefinementClassSymbol to " + name)
+ super.name_=(name)
+ }
+ override def isRefinementClass = true
+ override def isAnonOrRefinementClass = true
+ override def isLocalClass = true
+ override def hasMeaninglessName = true
+ override def companionModule: Symbol = NoSymbol
+
+ /** The mentioned twist. A refinement class has transowner X
+ * if any of its parents has transowner X.
+ */
+ override def hasTransOwner(sym: Symbol) = (
+ super.hasTransOwner(sym)
+ || info.parents.exists(_.typeSymbol hasTransOwner sym)
+ )
+ }
+ trait StubSymbol extends Symbol {
+ protected def missingMessage: String
+
+ /** Fail the stub by throwing a [[scala.reflect.internal.MissingRequirementError]]. */
+ override final def failIfStub() = {MissingRequirementError.signal(missingMessage)} //
+
+ /** Fail the stub by reporting an error to the reporter, setting the IS_ERROR flag
+ * on this symbol, and returning the dummy value `alt`.
+ */
+ private def fail[T](alt: T): T = {
+ // Avoid issuing lots of redundant errors
+ if (!hasFlag(IS_ERROR)) {
+ globalError(missingMessage)
+ if (settings.debug.value)
+ (new Throwable).printStackTrace
+
+ this setFlag IS_ERROR
+ }
+ alt
+ }
+ // This one doesn't call fail because SpecializeTypes winds up causing
+ // isMonomorphicType to be called, which calls this, which would fail us
+ // in all the scenarios we're trying to keep from failing.
+ override def originalInfo = NoType
+ override def associatedFile = owner.associatedFile
+ override def info = fail(NoType)
+ override def rawInfo = fail(NoType)
+ override def companionSymbol = fail(NoSymbol)
+
+ debugwarn("creating stub symbol to defer error: " + missingMessage)
+ }
+ class StubClassSymbol(owner0: Symbol, name0: TypeName, protected val missingMessage: String) extends ClassSymbol(owner0, owner0.pos, name0) with StubSymbol
+ class StubTermSymbol(owner0: Symbol, name0: TermName, protected val missingMessage: String) extends TermSymbol(owner0, owner0.pos, name0) with StubSymbol
+
+ trait FreeSymbol extends Symbol {
+ def origin: String
+ }
+ class FreeTermSymbol(name0: TermName, value0: => Any, val origin: String) extends TermSymbol(NoSymbol, NoPosition, name0) with FreeSymbol with FreeTermSymbolApi {
+ def value = value0
+ }
+ implicit val FreeTermSymbolTag = ClassTag[FreeTermSymbol](classOf[FreeTermSymbol])
+
+ class FreeTypeSymbol(name0: TypeName, val origin: String) extends TypeSkolem(NoSymbol, NoPosition, name0, NoSymbol) with FreeSymbol with FreeTypeSymbolApi
+ implicit val FreeTypeSymbolTag = ClassTag[FreeTypeSymbol](classOf[FreeTypeSymbol])
+
+ /** An object representing a missing symbol */
+ class NoSymbol protected[Symbols]() extends Symbol(null, NoPosition, nme.NO_NAME) {
+ final type NameType = TermName
+ type TypeOfClonedSymbol = NoSymbol
+
+ def asNameType(n: Name) = n.toTermName
+ def rawname = nme.NO_NAME
+ def name = nme.NO_NAME
+ override def name_=(n: Name) = abort("Cannot set NoSymbol's name to " + n)
+
+ synchronized {
+ setInfo(NoType)
+ privateWithin = this
+ }
+ override def info_=(info: Type) = {
+ infos = TypeHistory(1, NoType, null)
+ unlock()
+ validTo = currentPeriod
+ }
+ override def flagMask = AllFlags
+ override def exists = false
+ override def isHigherOrderTypeParameter = false
+ override def companionClass = NoSymbol
+ override def companionModule = NoSymbol
+ override def companionSymbol = NoSymbol
+ override def isSubClass(that: Symbol) = false
+ override def filter(cond: Symbol => Boolean) = this
+ override def defString: String = toString
+ override def locationString: String = ""
+ override def enclClassChain = Nil
+ override def enclClass: Symbol = this
+ override def enclosingTopLevelClass: Symbol = this
+ override def enclosingPackageClass: Symbol = this
+ override def enclMethod: Symbol = this
+ override def associatedFile = null
+ override def ownerChain: List[Symbol] = List()
+ override def ownersIterator: Iterator[Symbol] = Iterator.empty
+ override def alternatives: List[Symbol] = List()
+ override def reset(completer: Type): this.type = this
+ override def info: Type = NoType
+ override def existentialBound: Type = NoType
+ override def rawInfo: Type = NoType
+ protected def doCookJavaRawInfo() {}
+ override def accessBoundary(base: Symbol): Symbol = enclosingRootClass
+ def cloneSymbolImpl(owner: Symbol, newFlags: Long) = abort("NoSymbol.clone()")
+ override def originalEnclosingMethod = this
+
+ override def owner: Symbol =
+ abort("no-symbol does not have an owner")
+ override def typeConstructor: Type =
+ abort("no-symbol does not have a type constructor (this may indicate scalac cannot find fundamental classes)")
+ }
+
+ protected def makeNoSymbol: NoSymbol = new NoSymbol
+
+ lazy val NoSymbol: NoSymbol = makeNoSymbol
+
+ /** Derives a new list of symbols from the given list by mapping the given
+ * list across the given function. Then fixes the info of all the new symbols
+ * by substituting the new symbols for the original symbols.
+ *
+ * @param syms the prototypical symbols
+ * @param symFn the function to create new symbols
+ * @return the new list of info-adjusted symbols
+ */
+ def deriveSymbols(syms: List[Symbol], symFn: Symbol => Symbol): List[Symbol] = {
+ val syms1 = syms map symFn
+ syms1 map (_ substInfo (syms, syms1))
+ }
+
+ /** Derives a new Type by first deriving new symbols as in deriveSymbols,
+ * then performing the same oldSyms => newSyms substitution on `tpe` as is
+ * performed on the symbol infos in deriveSymbols.
+ *
+ * @param syms the prototypical symbols
+ * @param symFn the function to create new symbols
+ * @param tpe the prototypical type
+ * @return the new symbol-subsituted type
+ */
+ def deriveType(syms: List[Symbol], symFn: Symbol => Symbol)(tpe: Type): Type = {
+ val syms1 = deriveSymbols(syms, symFn)
+ tpe.substSym(syms, syms1)
+ }
+ /** Derives a new Type by instantiating the given list of symbols as
+ * WildcardTypes.
+ *
+ * @param syms the symbols to replace
+ * @return the new type with WildcardType replacing those syms
+ */
+ def deriveTypeWithWildcards(syms: List[Symbol])(tpe: Type): Type = {
+ if (syms.isEmpty) tpe
+ else tpe.instantiateTypeParams(syms, syms map (_ => WildcardType))
+ }
+ /** Convenience functions which derive symbols by cloning.
+ */
+ def cloneSymbols(syms: List[Symbol]): List[Symbol] =
+ deriveSymbols(syms, _.cloneSymbol)
+ def cloneSymbolsAtOwner(syms: List[Symbol], owner: Symbol): List[Symbol] =
+ deriveSymbols(syms, _ cloneSymbol owner)
+
+ /** Clone symbols and apply the given function to each new symbol's info.
+ *
+ * @param syms the prototypical symbols
+ * @param infoFn the function to apply to the infos
+ * @return the newly created, info-adjusted symbols
+ */
+ def cloneSymbolsAndModify(syms: List[Symbol], infoFn: Type => Type): List[Symbol] =
+ cloneSymbols(syms) map (_ modifyInfo infoFn)
+ def cloneSymbolsAtOwnerAndModify(syms: List[Symbol], owner: Symbol, infoFn: Type => Type): List[Symbol] =
+ cloneSymbolsAtOwner(syms, owner) map (_ modifyInfo infoFn)
+
+ /** Functions which perform the standard clone/substituting on the given symbols and type,
+ * then call the creator function with the new symbols and type as arguments.
+ */
+ def createFromClonedSymbols[T](syms: List[Symbol], tpe: Type)(creator: (List[Symbol], Type) => T): T = {
+ val syms1 = cloneSymbols(syms)
+ creator(syms1, tpe.substSym(syms, syms1))
+ }
+ def createFromClonedSymbolsAtOwner[T](syms: List[Symbol], owner: Symbol, tpe: Type)(creator: (List[Symbol], Type) => T): T = {
+ val syms1 = cloneSymbolsAtOwner(syms, owner)
+ creator(syms1, tpe.substSym(syms, syms1))
+ }
+
+ /** A deep map on a symbol's paramss.
+ */
+ def mapParamss[T](sym: Symbol)(f: Symbol => T): List[List[T]] = mmap(sym.info.paramss)(f)
+
+ /** Return closest enclosing method, unless shadowed by an enclosing class. */
+ // TODO Move back to ExplicitOuter when the other call site is removed.
+ // no use of closures here in the interest of speed.
+ final def closestEnclMethod(from: Symbol): Symbol =
+ if (from.isSourceMethod) from
+ else if (from.isClass) NoSymbol
+ else closestEnclMethod(from.owner)
+
+ /** An exception for cyclic references of symbol definitions */
+ case class CyclicReference(sym: Symbol, info: Type)
+ extends TypeError("illegal cyclic reference involving " + sym) {
+ if (settings.debug.value) printStackTrace()
+ }
+
+ /** A class for type histories */
+ private sealed case class TypeHistory(var validFrom: Period, info: Type, prev: TypeHistory) {
+ assert((prev eq null) || phaseId(validFrom) > phaseId(prev.validFrom), this)
+ assert(validFrom != NoPeriod, this)
+
+ override def toString() =
+ "TypeHistory(" + phaseOf(validFrom)+":"+runId(validFrom) + "," + info + "," + prev + ")"
+
+ def toList: List[TypeHistory] = this :: ( if (prev eq null) Nil else prev.toList )
+ }
+
+// ----- Hoisted closures and convenience methods, for compile time reductions -------
+
+ private[scala] final val symbolIsPossibleInRefinement = (sym: Symbol) => sym.isPossibleInRefinement
+ private[scala] final val symbolIsNonVariant = (sym: Symbol) => sym.variance == 0
+
+ @tailrec private[scala] final
+ def allSymbolsHaveOwner(syms: List[Symbol], owner: Symbol): Boolean = syms match {
+ case sym :: rest => sym.owner == owner && allSymbolsHaveOwner(rest, owner)
+ case _ => true
+ }
+
+
+// -------------- Statistics --------------------------------------------------------
+
+ Statistics.newView("#symbols")(ids)
+
+}
+
+object SymbolsStats {
+ val typeSymbolCount = Statistics.newCounter("#type symbols")
+ val classSymbolCount = Statistics.newCounter("#class symbols")
+ val flagsCount = Statistics.newCounter("#flags ops")
+ val ownerCount = Statistics.newCounter("#owner ops")
+ val nameCount = Statistics.newCounter("#name ops")
+}
diff --git a/src/reflect/scala/reflect/internal/TreeGen.scala b/src/reflect/scala/reflect/internal/TreeGen.scala
new file mode 100644
index 0000000..c1753fc
--- /dev/null
+++ b/src/reflect/scala/reflect/internal/TreeGen.scala
@@ -0,0 +1,298 @@
+package scala.reflect
+package internal
+
+abstract class TreeGen extends macros.TreeBuilder {
+ val global: SymbolTable
+
+ import global._
+ import definitions._
+
+ def rootId(name: Name) = Select(Ident(nme.ROOTPKG), name)
+ def rootScalaDot(name: Name) = Select(rootId(nme.scala_) setSymbol ScalaPackage, name)
+ def scalaDot(name: Name) = Select(Ident(nme.scala_) setSymbol ScalaPackage, name)
+ def scalaAnnotationDot(name: Name) = Select(scalaDot(nme.annotation), name)
+ def scalaAnyRefConstr = scalaDot(tpnme.AnyRef) setSymbol AnyRefClass
+ def scalaUnitConstr = scalaDot(tpnme.Unit) setSymbol UnitClass
+ def productConstr = scalaDot(tpnme.Product) setSymbol ProductRootClass
+ def serializableConstr = scalaDot(tpnme.Serializable) setSymbol SerializableClass
+
+ def scalaFunctionConstr(argtpes: List[Tree], restpe: Tree, abstractFun: Boolean = false): Tree = {
+ val cls = if (abstractFun)
+ mkAttributedRef(AbstractFunctionClass(argtpes.length))
+ else
+ mkAttributedRef(FunctionClass(argtpes.length))
+ AppliedTypeTree(cls, argtpes :+ restpe)
+ }
+
+ /** A creator for method calls, e.g. fn[T1, T2, ...](v1, v2, ...)
+ * There are a number of variations.
+ *
+ * @param receiver symbol of the method receiver
+ * @param methodName name of the method to call
+ * @param targs type arguments (if Nil, no TypeApply node will be generated)
+ * @param args value arguments
+ * @return the newly created trees.
+ */
+ def mkMethodCall(receiver: Symbol, methodName: Name, targs: List[Type], args: List[Tree]): Tree =
+ mkMethodCall(Select(mkAttributedRef(receiver), methodName), targs, args)
+ def mkMethodCall(method: Symbol, targs: List[Type], args: List[Tree]): Tree =
+ mkMethodCall(mkAttributedRef(method), targs, args)
+ def mkMethodCall(method: Symbol, args: List[Tree]): Tree =
+ mkMethodCall(method, Nil, args)
+ def mkMethodCall(target: Tree, args: List[Tree]): Tree =
+ mkMethodCall(target, Nil, args)
+ def mkMethodCall(receiver: Symbol, methodName: Name, args: List[Tree]): Tree =
+ mkMethodCall(receiver, methodName, Nil, args)
+ def mkMethodCall(receiver: Tree, method: Symbol, targs: List[Type], args: List[Tree]): Tree =
+ mkMethodCall(Select(receiver, method), targs, args)
+
+ def mkMethodCall(target: Tree, targs: List[Type], args: List[Tree]): Tree =
+ Apply(mkTypeApply(target, targs map TypeTree), args)
+
+ def mkNullaryCall(method: Symbol, targs: List[Type]): Tree =
+ mkTypeApply(mkAttributedRef(method), targs map TypeTree)
+
+ /** Builds a reference to value whose type is given stable prefix.
+ * The type must be suitable for this. For example, it
+ * must not be a TypeRef pointing to an abstract type variable.
+ */
+ def mkAttributedQualifier(tpe: Type): Tree =
+ mkAttributedQualifier(tpe, NoSymbol)
+
+ /** Builds a reference to value whose type is given stable prefix.
+ * If the type is unsuitable, e.g. it is a TypeRef for an
+ * abstract type variable, then an Ident will be made using
+ * termSym as the Ident's symbol. In that case, termSym must
+ * not be NoSymbol.
+ */
+ def mkAttributedQualifier(tpe: Type, termSym: Symbol): Tree = {
+ def failMessage = "mkAttributedQualifier(" + tpe + ", " + termSym + ")"
+ tpe match {
+ case NoPrefix =>
+ EmptyTree
+ case ThisType(clazz) =>
+ if (clazz.isEffectiveRoot) EmptyTree
+ else mkAttributedThis(clazz)
+ case SingleType(pre, sym) =>
+ mkApplyIfNeeded(mkAttributedStableRef(pre, sym))
+ case TypeRef(pre, sym, args) =>
+ if (sym.isRoot) {
+ mkAttributedThis(sym)
+ } else if (sym.isModuleClass) {
+ mkApplyIfNeeded(mkAttributedRef(pre, sym.sourceModule))
+ } else if (sym.isModule || sym.isClass) {
+ assert(phase.erasedTypes, failMessage)
+ mkAttributedThis(sym)
+ } else if (sym.isType) {
+ assert(termSym != NoSymbol, failMessage)
+ mkAttributedIdent(termSym) setType tpe
+ } else {
+ mkAttributedRef(pre, sym)
+ }
+
+ case ConstantType(value) =>
+ Literal(value) setType tpe
+
+ case AnnotatedType(_, atp, _) =>
+ mkAttributedQualifier(atp)
+
+ case RefinedType(parents, _) =>
+ // I am unclear whether this is reachable, but
+ // the following implementation looks logical -Lex
+ val firstStable = parents.find(_.isStable)
+ assert(!firstStable.isEmpty, failMessage + " parents = " + parents)
+ mkAttributedQualifier(firstStable.get)
+
+ case _ =>
+ abort("bad qualifier received: " + failMessage)
+ }
+ }
+ /** If this is a reference to a method with an empty
+ * parameter list, wrap it in an apply.
+ */
+ def mkApplyIfNeeded(qual: Tree) = qual.tpe match {
+ case MethodType(Nil, restpe) => atPos(qual.pos)(Apply(qual, Nil) setType restpe)
+ case _ => qual
+ }
+
+ /** Builds a reference to given symbol with given stable prefix. */
+ def mkAttributedRef(pre: Type, sym: Symbol): Tree = {
+ val qual = mkAttributedQualifier(pre)
+ qual match {
+ case EmptyTree => mkAttributedIdent(sym)
+ case This(clazz) if qual.symbol.isEffectiveRoot => mkAttributedIdent(sym)
+ case _ => mkAttributedSelect(qual, sym)
+ }
+ }
+
+ /** Builds a reference to given symbol. */
+ def mkAttributedRef(sym: Symbol): Tree =
+ if (sym.owner.isClass) mkAttributedRef(sym.owner.thisType, sym)
+ else mkAttributedIdent(sym)
+
+ /** Builds an untyped reference to given symbol. */
+ def mkUnattributedRef(sym: Symbol): Tree =
+ if (sym.owner.isClass) Select(This(sym.owner), sym)
+ else Ident(sym)
+
+ /** Replaces tree type with a stable type if possible */
+ def stabilize(tree: Tree): Tree = {
+ for(tp <- stableTypeFor(tree)) tree.tpe = tp
+ tree
+ }
+
+ /** Computes stable type for a tree if possible */
+ def stableTypeFor(tree: Tree): Option[Type] = tree match {
+ case This(_) if tree.symbol != null && !tree.symbol.isError =>
+ Some(ThisType(tree.symbol))
+ case Ident(_) if tree.symbol.isStable =>
+ Some(singleType(tree.symbol.owner.thisType, tree.symbol))
+ case Select(qual, _) if ((tree.symbol ne null) && (qual.tpe ne null)) && // turned assert into guard for #4064
+ tree.symbol.isStable && qual.tpe.isStable =>
+ Some(singleType(qual.tpe, tree.symbol))
+ case _ =>
+ None
+ }
+
+ /** Builds a reference with stable type to given symbol */
+ def mkAttributedStableRef(pre: Type, sym: Symbol): Tree =
+ stabilize(mkAttributedRef(pre, sym))
+
+ def mkAttributedStableRef(sym: Symbol): Tree =
+ stabilize(mkAttributedRef(sym))
+
+ def mkAttributedThis(sym: Symbol): Tree =
+ This(sym.name.toTypeName) setSymbol sym setType sym.thisType
+
+ def mkAttributedIdent(sym: Symbol): Tree =
+ Ident(sym.name) setSymbol sym setType sym.tpe
+
+ def mkAttributedSelect(qual: Tree, sym: Symbol): Tree = {
+ // Tests involving the repl fail without the .isEmptyPackage condition.
+ if (qual.symbol != null && (qual.symbol.isEffectiveRoot || qual.symbol.isEmptyPackage))
+ mkAttributedIdent(sym)
+ else {
+ // Have to recognize anytime a selection is made on a package
+ // so it can be rewritten to foo.bar.`package`.name rather than
+ // foo.bar.name if name is in the package object.
+ // TODO - factor out the common logic between this and
+ // the Typers method "isInPackageObject", used in typedIdent.
+ val qualsym = (
+ if (qual.tpe ne null) qual.tpe.typeSymbol
+ else if (qual.symbol ne null) qual.symbol
+ else NoSymbol
+ )
+ val needsPackageQualifier = (
+ (sym ne null)
+ && qualsym.isPackage
+ && !sym.isDefinedInPackage
+ )
+ val pkgQualifier =
+ if (needsPackageQualifier) {
+ // The owner of a symbol which requires package qualification may be the
+ // package object iself, but it also could be any superclass of the package
+ // object. In the latter case, we must go through the qualifier's info
+ // to obtain the right symbol.
+ val packageObject = if (sym.owner.isModuleClass) sym.owner.sourceModule else qual.tpe member nme.PACKAGE
+ Select(qual, nme.PACKAGE) setSymbol packageObject setType singleType(qual.tpe, packageObject)
+ }
+ else qual
+
+ val tree = Select(pkgQualifier, sym)
+ if (pkgQualifier.tpe == null) tree
+ else tree setType (qual.tpe memberType sym)
+ }
+ }
+
+ /** Builds a type application node if args.nonEmpty, returns fun otherwise. */
+ def mkTypeApply(fun: Tree, targs: List[Tree]): Tree =
+ if (targs.isEmpty) fun else TypeApply(fun, targs)
+ def mkTypeApply(target: Tree, method: Symbol, targs: List[Type]): Tree =
+ mkTypeApply(Select(target, method), targs map TypeTree)
+ def mkAttributedTypeApply(target: Tree, method: Symbol, targs: List[Type]): Tree =
+ mkTypeApply(mkAttributedSelect(target, method), targs map TypeTree)
+
+ private def mkSingleTypeApply(value: Tree, tpe: Type, what: Symbol, wrapInApply: Boolean) = {
+ val tapp = mkAttributedTypeApply(value, what, tpe.normalize :: Nil)
+ if (wrapInApply) Apply(tapp, Nil) else tapp
+ }
+ private def typeTestSymbol(any: Boolean) = if (any) Any_isInstanceOf else Object_isInstanceOf
+ private def typeCastSymbol(any: Boolean) = if (any) Any_asInstanceOf else Object_asInstanceOf
+
+ /** Builds an instance test with given value and type. */
+ def mkIsInstanceOf(value: Tree, tpe: Type, any: Boolean = true, wrapInApply: Boolean = true): Tree =
+ mkSingleTypeApply(value, tpe, typeTestSymbol(any), wrapInApply)
+
+ /** Builds a cast with given value and type. */
+ def mkAsInstanceOf(value: Tree, tpe: Type, any: Boolean = true, wrapInApply: Boolean = true): Tree =
+ mkSingleTypeApply(value, tpe, typeCastSymbol(any), wrapInApply)
+
+ /** Cast `tree` to `pt`, unless tpe is a subtype of pt, or pt is Unit. */
+ def maybeMkAsInstanceOf(tree: Tree, pt: Type, tpe: Type, beforeRefChecks: Boolean = false): Tree =
+ if ((pt == UnitClass.tpe) || (tpe <:< pt)) tree
+ else atPos(tree.pos)(mkAsInstanceOf(tree, pt, any = true, wrapInApply = !beforeRefChecks))
+
+ /** Apparently we smuggle a Type around as a Literal(Constant(tp))
+ * and the implementation of Constant#tpe is such that x.tpe becomes
+ * ClassType(value.asInstanceOf[Type]), i.e. java.lang.Class[Type].
+ * Can't find any docs on how/why it's done this way. See ticket
+ * SI-490 for some interesting comments from lauri alanko suggesting
+ * that the type given by classOf[T] is too strong and should be
+ * weakened so as not to suggest that classOf[List[String]] is any
+ * different from classOf[List[Int]].
+ *
+ * !!! See deconstMap in Erasure for one bug this encoding has induced:
+ * I would be very surprised if there aren't more.
+ */
+ def mkClassOf(tp: Type): Tree =
+ Literal(Constant(tp)) setType ConstantType(Constant(tp))
+
+ /** Builds a list with given head and tail. */
+ def mkNewCons(head: Tree, tail: Tree): Tree =
+ New(Apply(mkAttributedRef(ConsClass), List(head, tail)))
+
+ /** Builds a list with given head and tail. */
+ def mkNil: Tree = mkAttributedRef(NilModule)
+
+ /** Builds a tree representing an undefined local, as in
+ * var x: T = _
+ * which is appropriate to the given Type.
+ */
+ def mkZero(tp: Type): Tree = tp.typeSymbol match {
+ case NothingClass => mkMethodCall(Predef_???, Nil) setType NothingClass.tpe
+ case _ => Literal(mkConstantZero(tp)) setType tp
+ }
+
+ def mkConstantZero(tp: Type): Constant = tp.typeSymbol match {
+ case UnitClass => Constant(())
+ case BooleanClass => Constant(false)
+ case FloatClass => Constant(0.0f)
+ case DoubleClass => Constant(0.0d)
+ case ByteClass => Constant(0.toByte)
+ case ShortClass => Constant(0.toShort)
+ case IntClass => Constant(0)
+ case LongClass => Constant(0L)
+ case CharClass => Constant(0.toChar)
+ case _ => Constant(null)
+ }
+
+ /** Builds a tuple */
+ def mkTuple(elems: List[Tree]): Tree =
+ if (elems.isEmpty) Literal(Constant())
+ else Apply(
+ Select(mkAttributedRef(TupleClass(elems.length).caseModule), nme.apply),
+ elems)
+
+ // tree1 AND tree2
+ def mkAnd(tree1: Tree, tree2: Tree): Tree =
+ Apply(Select(tree1, Boolean_and), List(tree2))
+
+ // tree1 OR tree2
+ def mkOr(tree1: Tree, tree2: Tree): Tree =
+ Apply(Select(tree1, Boolean_or), List(tree2))
+
+ def mkRuntimeUniverseRef: Tree = {
+ assert(ReflectRuntimeUniverse != NoSymbol)
+ mkAttributedRef(ReflectRuntimeUniverse) setType singleType(ReflectRuntimeUniverse.owner.thisPrefix, ReflectRuntimeUniverse)
+ }
+}
diff --git a/src/reflect/scala/reflect/internal/TreeInfo.scala b/src/reflect/scala/reflect/internal/TreeInfo.scala
new file mode 100644
index 0000000..fa4441e
--- /dev/null
+++ b/src/reflect/scala/reflect/internal/TreeInfo.scala
@@ -0,0 +1,764 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2013 LAMP/EPFL
+ * @author Martin Odersky
+ */
+
+package scala.reflect
+package internal
+
+import Flags._
+
+/** This class ...
+ *
+ * @author Martin Odersky
+ * @version 1.0
+ */
+abstract class TreeInfo {
+ val global: SymbolTable
+
+ import global._
+ import definitions.{ isTupleSymbol, isVarArgsList, isCastSymbol, ThrowableClass, TupleClass, MacroContextClass, MacroContextPrefixType }
+
+ /* Does not seem to be used. Not sure what it does anyway.
+ def isOwnerDefinition(tree: Tree): Boolean = tree match {
+ case PackageDef(_, _)
+ | ClassDef(_, _, _, _)
+ | ModuleDef(_, _, _)
+ | DefDef(_, _, _, _, _, _)
+ | Import(_, _) => true
+ case _ => false
+ }
+*/
+
+ // def isDefinition(tree: Tree): Boolean = tree.isDef
+
+ /** Is tree a declaration or type definition?
+ */
+ def isDeclarationOrTypeDef(tree: Tree): Boolean = tree match {
+ case x: ValOrDefDef => x.rhs eq EmptyTree
+ case _ => tree.isInstanceOf[TypeDef]
+ }
+
+ /** Is tree legal as a member definition of an interface?
+ */
+ def isInterfaceMember(tree: Tree): Boolean = tree match {
+ case EmptyTree => true
+ case Import(_, _) => true
+ case TypeDef(_, _, _, _) => true
+ case DefDef(mods, _, _, _, _, __) => mods.isDeferred
+ case ValDef(mods, _, _, _) => mods.isDeferred
+ case _ => false
+ }
+
+ /** Is tree a pure (i.e. non-side-effecting) definition?
+ */
+ def isPureDef(tree: Tree): Boolean = tree match {
+ case EmptyTree
+ | ClassDef(_, _, _, _)
+ | TypeDef(_, _, _, _)
+ | Import(_, _)
+ | DefDef(_, _, _, _, _, _) =>
+ true
+ case ValDef(mods, _, _, rhs) =>
+ !mods.isMutable && isExprSafeToInline(rhs)
+ case _ =>
+ false
+ }
+
+ /** Is tree an expression which can be inlined without affecting program semantics?
+ *
+ * Note that this is not called "isExprPure" since purity (lack of side-effects)
+ * is not the litmus test. References to modules and lazy vals are side-effecting,
+ * both because side-effecting code may be executed and because the first reference
+ * takes a different code path than all to follow; but they are safe to inline
+ * because the expression result from evaluating them is always the same.
+ */
+ def isExprSafeToInline(tree: Tree): Boolean = tree match {
+ case EmptyTree
+ | This(_)
+ | Super(_, _)
+ | Literal(_) =>
+ true
+ case Ident(_) =>
+ tree.symbol.isStable
+ // this case is mostly to allow expressions like -5 and +7, but any
+ // member of an anyval should be safely pure
+ case Select(Literal(const), name) =>
+ const.isAnyVal && (const.tpe.member(name) != NoSymbol)
+ case Select(qual, _) =>
+ tree.symbol.isStable && isExprSafeToInline(qual)
+ case TypeApply(fn, _) =>
+ isExprSafeToInline(fn)
+ case Apply(Select(free @ Ident(_), nme.apply), _) if free.symbol.name endsWith nme.REIFY_FREE_VALUE_SUFFIX =>
+ // see a detailed explanation of this trick in `GenSymbols.reifyFreeTerm`
+ free.symbol.hasStableFlag && isExprSafeToInline(free)
+ case Apply(fn, List()) =>
+ // Note: After uncurry, field accesses are represented as Apply(getter, Nil),
+ // so an Apply can also be pure.
+ // However, before typing, applications of nullary functional values are also
+ // Apply(function, Nil) trees. To prevent them from being treated as pure,
+ // we check that the callee is a method.
+ // The callee might also be a Block, which has a null symbol, so we guard against that (SI-7185)
+ fn.symbol != null && fn.symbol.isMethod && !fn.symbol.isLazy && isExprSafeToInline(fn)
+ case Typed(expr, _) =>
+ isExprSafeToInline(expr)
+ case Block(stats, expr) =>
+ (stats forall isPureDef) && isExprSafeToInline(expr)
+ case _ =>
+ false
+ }
+
+ @deprecated("Use isExprSafeToInline instead", "2.10.0")
+ def isPureExpr(tree: Tree) = isExprSafeToInline(tree)
+
+ def zipMethodParamsAndArgs(params: List[Symbol], args: List[Tree]): List[(Symbol, Tree)] =
+ mapMethodParamsAndArgs(params, args)((param, arg) => ((param, arg)))
+
+ def mapMethodParamsAndArgs[R](params: List[Symbol], args: List[Tree])(f: (Symbol, Tree) => R): List[R] = {
+ val b = List.newBuilder[R]
+ foreachMethodParamAndArg(params, args)((param, arg) => b += f(param, arg))
+ b.result
+ }
+ def foreachMethodParamAndArg(params: List[Symbol], args: List[Tree])(f: (Symbol, Tree) => Unit): Boolean = {
+ val plen = params.length
+ val alen = args.length
+ def fail() = {
+ global.debugwarn(
+ "Mismatch trying to zip method parameters and argument list:\n" +
+ " params = " + params + "\n" +
+ " args = " + args + "\n"
+ )
+ false
+ }
+
+ if (plen == alen) foreach2(params, args)(f)
+ else if (params.isEmpty) return fail
+ else if (isVarArgsList(params)) {
+ val plenInit = plen - 1
+ if (alen == plenInit) {
+ if (alen == 0) Nil // avoid calling mismatched zip
+ else foreach2(params.init, args)(f)
+ }
+ else if (alen < plenInit) return fail
+ else {
+ foreach2(params.init, args take plenInit)(f)
+ val remainingArgs = args drop plenInit
+ foreach2(List.fill(remainingArgs.size)(params.last), remainingArgs)(f)
+ }
+ }
+ else return fail
+
+ true
+ }
+
+ /**
+ * Selects the correct parameter list when there are nested applications.
+ * Given Apply(fn, args), args might correspond to any of fn.symbol's parameter
+ * lists. To choose the correct one before uncurry, we have to unwrap any
+ * applies: for instance Apply(fn @ Apply(Apply(_, _), _), args) implies args
+ * correspond to the third parameter list.
+ *
+ * The argument fn is the function part of the apply node being considered.
+ *
+ * Also accounts for varargs.
+ */
+ private def applyMethodParameters(fn: Tree): List[Symbol] = {
+ val depth = dissectApplied(fn).applyDepth
+ // There could be applies which go beyond the parameter list(s),
+ // being applied to the result of the method call.
+ // !!! Note that this still doesn't seem correct, although it should
+ // be closer than what it replaced.
+ if (depth < fn.symbol.paramss.size) fn.symbol.paramss(depth)
+ else if (fn.symbol.paramss.isEmpty) Nil
+ else fn.symbol.paramss.last
+ }
+
+ def zipMethodParamsAndArgs(t: Tree): List[(Symbol, Tree)] = t match {
+ case Apply(fn, args) => zipMethodParamsAndArgs(applyMethodParameters(fn), args)
+ case _ => Nil
+ }
+ def foreachMethodParamAndArg(t: Tree)(f: (Symbol, Tree) => Unit): Unit = t match {
+ case Apply(fn, args) => foreachMethodParamAndArg(applyMethodParameters(fn), args)(f)
+ case _ =>
+ }
+
+ /** Is symbol potentially a getter of a variable?
+ */
+ def mayBeVarGetter(sym: Symbol): Boolean = sym.info match {
+ case NullaryMethodType(_) => sym.owner.isClass && !sym.isStable
+ case PolyType(_, NullaryMethodType(_)) => sym.owner.isClass && !sym.isStable
+ case mt @ MethodType(_, _) => mt.isImplicit && sym.owner.isClass && !sym.isStable
+ case _ => false
+ }
+
+ /** Is tree a mutable variable, or the getter of a mutable field?
+ */
+ def isVariableOrGetter(tree: Tree) = {
+ def sym = tree.symbol
+ def isVar = sym.isVariable
+ def isGetter = mayBeVarGetter(sym) && sym.owner.info.member(nme.getterToSetter(sym.name.toTermName)) != NoSymbol
+
+ tree match {
+ case Ident(_) => isVar
+ case Select(_, _) => isVar || isGetter
+ case Applied(Select(qual, nme.apply), _, _) => qual.tpe.member(nme.update) != NoSymbol
+ case _ => false
+ }
+ }
+
+ /** Is tree a self constructor call this(...)? I.e. a call to a constructor of the
+ * same object?
+ */
+ def isSelfConstrCall(tree: Tree): Boolean = tree match {
+ case Applied(Ident(nme.CONSTRUCTOR), _, _) => true
+ case Applied(Select(This(_), nme.CONSTRUCTOR), _, _) => true
+ case _ => false
+ }
+
+ /** Is tree a super constructor call?
+ */
+ def isSuperConstrCall(tree: Tree): Boolean = tree match {
+ case Applied(Select(Super(_, _), nme.CONSTRUCTOR), _, _) => true
+ case _ => false
+ }
+
+ /**
+ * Named arguments can transform a constructor call into a block, e.g.
+ * <init>(b = foo, a = bar)
+ * is transformed to
+ * { val x$1 = foo
+ * val x$2 = bar
+ * <init>(x$2, x$1)
+ * }
+ */
+ def stripNamedApplyBlock(tree: Tree) = tree match {
+ case Block(stats, expr) if stats.forall(_.isInstanceOf[ValDef]) =>
+ expr
+ case _ =>
+ tree
+ }
+
+ /** Strips layers of `.asInstanceOf[T]` / `_.$asInstanceOf[T]()` from an expression */
+ def stripCast(tree: Tree): Tree = tree match {
+ case TypeApply(sel @ Select(inner, _), _) if isCastSymbol(sel.symbol) =>
+ stripCast(inner)
+ case Apply(TypeApply(sel @ Select(inner, _), _), Nil) if isCastSymbol(sel.symbol) =>
+ stripCast(inner)
+ case t =>
+ t
+ }
+
+ object StripCast {
+ def unapply(tree: Tree): Some[Tree] = Some(stripCast(tree))
+ }
+
+ /** Is tree a self or super constructor call? */
+ def isSelfOrSuperConstrCall(tree: Tree) = {
+ // stripNamedApply for SI-3584: adaptToImplicitMethod in Typers creates a special context
+ // for implicit search in constructor calls, adaptToImplicitMethod(isSelfOrConstrCall)
+ val tree1 = stripNamedApplyBlock(tree)
+ isSelfConstrCall(tree1) || isSuperConstrCall(tree1)
+ }
+
+ /**
+ * Does this tree represent an irrefutable pattern match
+ * in the position `for { <tree> <- expr }` based only
+ * on information at the `parser` phase? To qualify, there
+ * may be no subtree that will be interpreted as a
+ * Stable Identifier Pattern, nor any type tests, even
+ * on TupleN. See SI-6968.
+ *
+ * For instance:
+ *
+ * {{{
+ * (foo @ (bar @ _)) = 0
+ * }}}
+ *
+ * is a not a variable pattern; if only binds names.
+ *
+ * The following are not variable patterns.
+ *
+ * {{{
+ * `bar`
+ * Bar
+ * (a, b)
+ * _: T
+ * }}}
+ *
+ * If the pattern is a simple identifier, it is always
+ * a variable pattern. For example, the following
+ * introduce new bindings:
+ *
+ * {{{
+ * for { X <- xs } yield X
+ * for { `backquoted` <- xs } yield `backquoted`
+ * }}}
+ *
+ * Note that this differs from a case clause:
+ *
+ * {{{
+ * object X
+ * scrut match {
+ * case X => // case _ if scrut == X
+ * }
+ * }}}
+ *
+ * Background: [[https://groups.google.com/d/msg/scala-internals/qwa_XOw_7Ks/IktkeTBYqg0J]]
+ *
+ */
+ def isVarPatternDeep(tree: Tree): Boolean = {
+ def isVarPatternDeep0(tree: Tree): Boolean = {
+ tree match {
+ case Bind(name, pat) => isVarPatternDeep0(pat)
+ case Ident(name) => isVarPattern(tree)
+ case _ => false
+ }
+ }
+ tree match {
+ case Ident(name) => true
+ case _ => isVarPatternDeep0(tree)
+ }
+ }
+
+ /** Is tree a variable pattern? */
+ def isVarPattern(pat: Tree): Boolean = pat match {
+ case x: Ident => !x.isBackquoted && nme.isVariableName(x.name)
+ case _ => false
+ }
+ def isDeprecatedIdentifier(tree: Tree): Boolean = tree match {
+ case x: Ident => !x.isBackquoted && nme.isDeprecatedIdentifierName(x.name)
+ case _ => false
+ }
+
+ /** The first constructor definitions in `stats` */
+ def firstConstructor(stats: List[Tree]): Tree = stats find {
+ case x: DefDef => nme.isConstructorName(x.name)
+ case _ => false
+ } getOrElse EmptyTree
+
+ /** The arguments to the first constructor in `stats`. */
+ def firstConstructorArgs(stats: List[Tree]): List[Tree] = firstConstructor(stats) match {
+ case DefDef(_, _, _, args :: _, _, _) => args
+ case _ => Nil
+ }
+
+ /** The value definitions marked PRESUPER in this statement sequence */
+ def preSuperFields(stats: List[Tree]): List[ValDef] =
+ stats collect { case vd: ValDef if isEarlyValDef(vd) => vd }
+
+ def isEarlyDef(tree: Tree) = tree match {
+ case TypeDef(mods, _, _, _) => mods hasFlag PRESUPER
+ case ValDef(mods, _, _, _) => mods hasFlag PRESUPER
+ case _ => false
+ }
+
+ def isEarlyValDef(tree: Tree) = tree match {
+ case ValDef(mods, _, _, _) => mods hasFlag PRESUPER
+ case _ => false
+ }
+
+ def isEarlyTypeDef(tree: Tree) = tree match {
+ case TypeDef(mods, _, _, _) => mods hasFlag PRESUPER
+ case _ => false
+ }
+
+ /** Is tpt a vararg type of the form T* ? */
+ def isRepeatedParamType(tpt: Tree) = tpt match {
+ case TypeTree() => definitions.isRepeatedParamType(tpt.tpe)
+ case AppliedTypeTree(Select(_, tpnme.REPEATED_PARAM_CLASS_NAME), _) => true
+ case AppliedTypeTree(Select(_, tpnme.JAVA_REPEATED_PARAM_CLASS_NAME), _) => true
+ case _ => false
+ }
+
+ /** The parameter ValDefs of a method definition that have vararg types of the form T*
+ */
+ def repeatedParams(tree: Tree): List[ValDef] = tree match {
+ case DefDef(_, _, _, vparamss, _, _) => vparamss.flatten filter (vd => isRepeatedParamType(vd.tpt))
+ case _ => Nil
+ }
+
+ /** Is tpt a by-name parameter type of the form => T? */
+ def isByNameParamType(tpt: Tree) = tpt match {
+ case TypeTree() => definitions.isByNameParamType(tpt.tpe)
+ case AppliedTypeTree(Select(_, tpnme.BYNAME_PARAM_CLASS_NAME), _) => true
+ case _ => false
+ }
+
+ /** Is name a left-associative operator? */
+ def isLeftAssoc(operator: Name) = operator.nonEmpty && (operator.endChar != ':')
+
+ /** Is tree a `this` node which belongs to `enclClass`? */
+ def isSelf(tree: Tree, enclClass: Symbol): Boolean = tree match {
+ case This(_) => tree.symbol == enclClass
+ case _ => false
+ }
+
+ /** a Match(Typed(_, tpt), _) must be translated into a switch if isSwitchAnnotation(tpt.tpe) */
+ def isSwitchAnnotation(tpe: Type) = tpe hasAnnotation definitions.SwitchClass
+
+ /** can this type be a type pattern */
+ def mayBeTypePat(tree: Tree): Boolean = tree match {
+ case CompoundTypeTree(Template(tps, _, Nil)) => tps exists mayBeTypePat
+ case Annotated(_, tp) => mayBeTypePat(tp)
+ case AppliedTypeTree(constr, args) => mayBeTypePat(constr) || args.exists(_.isInstanceOf[Bind])
+ case SelectFromTypeTree(tp, _) => mayBeTypePat(tp)
+ case _ => false
+ }
+
+ /** Is this argument node of the form <expr> : _* ?
+ */
+ def isWildcardStarArg(tree: Tree): Boolean = tree match {
+ case Typed(_, Ident(tpnme.WILDCARD_STAR)) => true
+ case _ => false
+ }
+
+ /** If this tree has type parameters, those. Otherwise Nil.
+ */
+ def typeParameters(tree: Tree): List[TypeDef] = tree match {
+ case DefDef(_, _, tparams, _, _, _) => tparams
+ case ClassDef(_, _, tparams, _) => tparams
+ case TypeDef(_, _, tparams, _) => tparams
+ case _ => Nil
+ }
+
+ /** Does this argument list end with an argument of the form <expr> : _* ? */
+ def isWildcardStarArgList(trees: List[Tree]) =
+ trees.nonEmpty && isWildcardStarArg(trees.last)
+
+ /** Is the argument a wildcard argument of the form `_` or `x @ _`?
+ */
+ def isWildcardArg(tree: Tree): Boolean = unbind(tree) match {
+ case Ident(nme.WILDCARD) => true
+ case _ => false
+ }
+
+ /** Is the argument a wildcard star type of the form `_*`?
+ */
+ def isWildcardStarType(tree: Tree): Boolean = tree match {
+ case Ident(tpnme.WILDCARD_STAR) => true
+ case _ => false
+ }
+
+ /** Is this pattern node a catch-all (wildcard or variable) pattern? */
+ def isDefaultCase(cdef: CaseDef) = cdef match {
+ case CaseDef(pat, EmptyTree, _) => isWildcardArg(pat)
+ case _ => false
+ }
+
+ /** Is this pattern node a synthetic catch-all case, added during PartialFuction synthesis before we know
+ * whether the user provided cases are exhaustive. */
+ def isSyntheticDefaultCase(cdef: CaseDef) = cdef match {
+ case CaseDef(Bind(nme.DEFAULT_CASE, _), EmptyTree, _) => true
+ case _ => false
+ }
+
+ /** Does this CaseDef catch Throwable? */
+ def catchesThrowable(cdef: CaseDef) = catchesAllOf(cdef, ThrowableClass.tpe)
+
+ /** Does this CaseDef catch everything of a certain Type? */
+ def catchesAllOf(cdef: CaseDef, threshold: Type) =
+ isDefaultCase(cdef) || (cdef.guard.isEmpty && (unbind(cdef.pat) match {
+ case Typed(Ident(nme.WILDCARD), tpt) => (tpt.tpe != null) && (threshold <:< tpt.tpe)
+ case _ => false
+ }))
+
+ /** Is this pattern node a catch-all or type-test pattern? */
+ def isCatchCase(cdef: CaseDef) = cdef match {
+ case CaseDef(Typed(Ident(nme.WILDCARD), tpt), EmptyTree, _) =>
+ isSimpleThrowable(tpt.tpe)
+ case CaseDef(Bind(_, Typed(Ident(nme.WILDCARD), tpt)), EmptyTree, _) =>
+ isSimpleThrowable(tpt.tpe)
+ case _ =>
+ isDefaultCase(cdef)
+ }
+
+ private def isSimpleThrowable(tp: Type): Boolean = tp match {
+ case TypeRef(pre, sym, args) =>
+ (pre == NoPrefix || pre.widen.typeSymbol.isStatic) &&
+ (sym isNonBottomSubClass ThrowableClass) && /* bq */ !sym.isTrait
+ case _ =>
+ false
+ }
+
+ /* If we have run-time types, and these are used for pattern matching,
+ we should replace this by something like:
+
+ tp match {
+ case TypeRef(pre, sym, args) =>
+ args.isEmpty && (sym.owner.isPackageClass || isSimple(pre))
+ case NoPrefix =>
+ true
+ case _ =>
+ false
+ }
+*/
+
+ /** Is this case guarded? */
+ def isGuardedCase(cdef: CaseDef) = cdef.guard != EmptyTree
+
+ /** Is this pattern node a sequence-valued pattern? */
+ def isSequenceValued(tree: Tree): Boolean = unbind(tree) match {
+ case Alternative(ts) => ts exists isSequenceValued
+ case ArrayValue(_, _) | Star(_) => true
+ case _ => false
+ }
+
+ /** The underlying pattern ignoring any bindings */
+ def unbind(x: Tree): Tree = x match {
+ case Bind(_, y) => unbind(y)
+ case y => y
+ }
+
+ /** Is this tree a Star(_) after removing bindings? */
+ def isStar(x: Tree) = unbind(x) match {
+ case Star(_) => true
+ case _ => false
+ }
+
+ /**
+ * {{{
+ * //------------------------ => effectivePatternArity(args)
+ * case Extractor(a) => 1
+ * case Extractor(a, b) => 2
+ * case Extractor((a, b)) => 2
+ * case Extractor(a @ (b, c)) => 2
+ * }}}
+ */
+ def effectivePatternArity(args: List[Tree]): Int = (args.map(unbind) match {
+ case Apply(fun, xs) :: Nil if isTupleSymbol(fun.symbol) => xs
+ case xs => xs
+ }).length
+
+
+ // used in the symbols for labeldefs and valdefs emitted by the pattern matcher
+ // tailcalls, cps,... use this flag combination to detect translated matches
+ // TODO: move to Flags
+ final val SYNTH_CASE_FLAGS = CASE | SYNTHETIC
+
+ def isSynthCaseSymbol(sym: Symbol) = sym hasAllFlags SYNTH_CASE_FLAGS
+ def hasSynthCaseSymbol(t: Tree) = t.symbol != null && isSynthCaseSymbol(t.symbol)
+
+
+ /** Applications in Scala can have one of the following shapes:
+ *
+ * 1) naked core: Ident(_) or Select(_, _) or basically anything else
+ * 2) naked core with targs: TypeApply(core, targs) or AppliedTypeTree(core, targs)
+ * 3) apply or several applies wrapping a core: Apply(core, _), or Apply(Apply(core, _), _), etc
+ *
+ * This class provides different ways to decompose applications and simplifies their analysis.
+ *
+ * ***Examples***
+ * (TypeApply in the examples can be replaced with AppliedTypeTree)
+ *
+ * Ident(foo):
+ * * callee = Ident(foo)
+ * * core = Ident(foo)
+ * * targs = Nil
+ * * argss = Nil
+ *
+ * TypeApply(foo, List(targ1, targ2...))
+ * * callee = TypeApply(foo, List(targ1, targ2...))
+ * * core = foo
+ * * targs = List(targ1, targ2...)
+ * * argss = Nil
+ *
+ * Apply(foo, List(arg1, arg2...))
+ * * callee = foo
+ * * core = foo
+ * * targs = Nil
+ * * argss = List(List(arg1, arg2...))
+ *
+ * Apply(Apply(foo, List(arg21, arg22, ...)), List(arg11, arg12...))
+ * * callee = foo
+ * * core = foo
+ * * targs = Nil
+ * * argss = List(List(arg11, arg12...), List(arg21, arg22, ...))
+ *
+ * Apply(Apply(TypeApply(foo, List(targs1, targs2, ...)), List(arg21, arg22, ...)), List(arg11, arg12...))
+ * * callee = TypeApply(foo, List(targs1, targs2, ...))
+ * * core = foo
+ * * targs = Nil
+ * * argss = List(List(arg11, arg12...), List(arg21, arg22, ...))
+ */
+ class Applied(val tree: Tree) {
+ /** The tree stripped of the possibly nested applications.
+ * The original tree if it's not an application.
+ */
+ def callee: Tree = {
+ def loop(tree: Tree): Tree = tree match {
+ case Apply(fn, _) => loop(fn)
+ case tree => tree
+ }
+ loop(tree)
+ }
+
+ /** The `callee` unwrapped from type applications.
+ * The original `callee` if it's not a type application.
+ */
+ def core: Tree = callee match {
+ case TypeApply(fn, _) => fn
+ case AppliedTypeTree(fn, _) => fn
+ case tree => tree
+ }
+
+ /** The type arguments of the `callee`.
+ * `Nil` if the `callee` is not a type application.
+ */
+ def targs: List[Tree] = callee match {
+ case TypeApply(_, args) => args
+ case AppliedTypeTree(_, args) => args
+ case _ => Nil
+ }
+
+ /** (Possibly multiple lists of) value arguments of an application.
+ * `Nil` if the `callee` is not an application.
+ */
+ def argss: List[List[Tree]] = {
+ def loop(tree: Tree): List[List[Tree]] = tree match {
+ case Apply(fn, args) => loop(fn) :+ args
+ case _ => Nil
+ }
+ loop(tree)
+ }
+
+ /** The depth of the nested applies: e.g. Apply(Apply(Apply(_, _), _), _)
+ * has depth 3. Continues through type applications (without counting them.)
+ */
+ def applyDepth: Int = {
+ def loop(tree: Tree): Int = tree match {
+ case Apply(fn, _) => 1 + loop(fn)
+ case TypeApply(fn, _) => loop(fn)
+ case AppliedTypeTree(fn, _) => loop(fn)
+ case _ => 0
+ }
+ loop(tree)
+ }
+ }
+
+ /** Returns a wrapper that knows how to destructure and analyze applications.
+ */
+ def dissectApplied(tree: Tree) = new Applied(tree)
+
+ /** Destructures applications into important subparts described in `Applied` class,
+ * namely into: core, targs and argss (in the specified order).
+ *
+ * Trees which are not applications are also accepted. Their callee and core will
+ * be equal to the input, while targs and argss will be Nil.
+ *
+ * The provided extractors don't expose all the API of the `Applied` class.
+ * For advanced use, call `dissectApplied` explicitly and use its methods instead of pattern matching.
+ */
+ object Applied {
+ def unapply(applied: Applied): Option[(Tree, List[Tree], List[List[Tree]])] =
+ Some((applied.core, applied.targs, applied.argss))
+
+ def unapply(tree: Tree): Option[(Tree, List[Tree], List[List[Tree]])] =
+ unapply(dissectApplied(tree))
+ }
+
+ /** Does list of trees start with a definition of
+ * a class of module with given name (ignoring imports)
+ */
+ def firstDefinesClassOrObject(trees: List[Tree], name: Name): Boolean = trees match {
+ case Import(_, _) :: xs => firstDefinesClassOrObject(xs, name)
+ case Annotated(_, tree1) :: Nil => firstDefinesClassOrObject(List(tree1), name)
+ case ModuleDef(_, `name`, _) :: Nil => true
+ case ClassDef(_, `name`, _, _) :: Nil => true
+ case _ => false
+ }
+
+
+ /** Is this file the body of a compilation unit which should not
+ * have Predef imported?
+ */
+ def noPredefImportForUnit(body: Tree) = {
+ // Top-level definition whose leading imports include Predef.
+ def isLeadingPredefImport(defn: Tree): Boolean = defn match {
+ case PackageDef(_, defs1) => defs1 exists isLeadingPredefImport
+ case Import(expr, _) => isReferenceToPredef(expr)
+ case _ => false
+ }
+ // Compilation unit is class or object 'name' in package 'scala'
+ def isUnitInScala(tree: Tree, name: Name) = tree match {
+ case PackageDef(Ident(nme.scala_), defs) => firstDefinesClassOrObject(defs, name)
+ case _ => false
+ }
+
+ isUnitInScala(body, nme.Predef) || isLeadingPredefImport(body)
+ }
+
+ def isAbsTypeDef(tree: Tree) = tree match {
+ case TypeDef(_, _, _, TypeBoundsTree(_, _)) => true
+ case TypeDef(_, _, _, rhs) => rhs.tpe.isInstanceOf[TypeBounds]
+ case _ => false
+ }
+
+ def isAliasTypeDef(tree: Tree) = tree match {
+ case TypeDef(_, _, _, _) => !isAbsTypeDef(tree)
+ case _ => false
+ }
+
+ /** Some handy extractors for spotting trees through the
+ * the haze of irrelevant braces: i.e. Block(Nil, SomeTree)
+ * should not keep us from seeing SomeTree.
+ */
+ abstract class SeeThroughBlocks[T] {
+ protected def unapplyImpl(x: Tree): T
+ def unapply(x: Tree): T = x match {
+ case Block(Nil, expr) => unapply(expr)
+ case _ => unapplyImpl(x)
+ }
+ }
+ object IsTrue extends SeeThroughBlocks[Boolean] {
+ protected def unapplyImpl(x: Tree): Boolean = x match {
+ case Literal(Constant(true)) => true
+ case _ => false
+ }
+ }
+ object IsFalse extends SeeThroughBlocks[Boolean] {
+ protected def unapplyImpl(x: Tree): Boolean = x match {
+ case Literal(Constant(false)) => true
+ case _ => false
+ }
+ }
+ object IsIf extends SeeThroughBlocks[Option[(Tree, Tree, Tree)]] {
+ protected def unapplyImpl(x: Tree) = x match {
+ case If(cond, thenp, elsep) => Some((cond, thenp, elsep))
+ case _ => None
+ }
+ }
+
+ def isApplyDynamicName(name: Name) = (name == nme.updateDynamic) || (name == nme.selectDynamic) || (name == nme.applyDynamic) || (name == nme.applyDynamicNamed)
+
+ class DynamicApplicationExtractor(nameTest: Name => Boolean) {
+ def unapply(tree: Tree) = tree match {
+ case Apply(TypeApply(Select(qual, oper), _), List(Literal(Constant(name)))) if nameTest(oper) => Some((qual, name))
+ case Apply(Select(qual, oper), List(Literal(Constant(name)))) if nameTest(oper) => Some((qual, name))
+ case Apply(Ident(oper), List(Literal(Constant(name)))) if nameTest(oper) => Some((EmptyTree, name))
+ case _ => None
+ }
+ }
+ object DynamicUpdate extends DynamicApplicationExtractor(_ == nme.updateDynamic)
+ object DynamicApplication extends DynamicApplicationExtractor(isApplyDynamicName)
+ object DynamicApplicationNamed extends DynamicApplicationExtractor(_ == nme.applyDynamicNamed)
+
+ object MacroImplReference {
+ private def refPart(tree: Tree): Tree = tree match {
+ case TypeApply(fun, _) => refPart(fun)
+ case ref: RefTree => ref
+ case _ => EmptyTree
+ }
+
+ def unapply(tree: Tree) = refPart(tree) match {
+ case ref: RefTree => Some((ref.qualifier.symbol, ref.symbol, dissectApplied(tree).targs))
+ case _ => None
+ }
+ }
+
+ def isNullaryInvocation(tree: Tree): Boolean =
+ tree.symbol != null && tree.symbol.isMethod && (tree match {
+ case TypeApply(fun, _) => isNullaryInvocation(fun)
+ case tree: RefTree => true
+ case _ => false
+ })
+}
diff --git a/src/reflect/scala/reflect/internal/Trees.scala b/src/reflect/scala/reflect/internal/Trees.scala
new file mode 100644
index 0000000..53b9b1d
--- /dev/null
+++ b/src/reflect/scala/reflect/internal/Trees.scala
@@ -0,0 +1,1701 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2013 LAMP/EPFL
+ * @author Martin Odersky
+ */
+
+package scala.reflect
+package internal
+
+import Flags._
+import scala.collection.mutable.{ListBuffer, LinkedHashSet}
+import util.Statistics
+
+trait Trees extends api.Trees { self: SymbolTable =>
+
+ private[scala] var nodeCount = 0
+
+ abstract class Tree extends TreeContextApiImpl with Attachable with Product {
+ val id = nodeCount // TODO: add to attachment?
+ nodeCount += 1
+
+ if (Statistics.canEnable) Statistics.incCounter(TreesStats.nodeByType, getClass)
+
+ final override def pos: Position = rawatt.pos
+
+ private[this] var rawtpe: Type = _
+ final def tpe = rawtpe
+ def tpe_=(t: Type) = rawtpe = t
+ def setType(tp: Type): this.type = { rawtpe = tp; this }
+ def defineType(tp: Type): this.type = setType(tp)
+
+ def symbol: Symbol = null //!!!OPT!!! symbol is about 3% of hot compile times -- megamorphic dispatch?
+ def symbol_=(sym: Symbol) { throw new UnsupportedOperationException("symbol_= inapplicable for " + this) }
+ def setSymbol(sym: Symbol): this.type = { symbol = sym; this }
+ def hasSymbol = false
+
+ def isDef = false
+
+ def isEmpty = false
+
+ /** The canonical way to test if a Tree represents a term.
+ */
+ def isTerm: Boolean = this match {
+ case _: TermTree => true
+ case Bind(name, _) => name.isTermName
+ case Select(_, name) => name.isTermName
+ case Ident(name) => name.isTermName
+ case Annotated(_, arg) => arg.isTerm
+ case _ => false
+ }
+
+ /** The canonical way to test if a Tree represents a type.
+ */
+ def isType: Boolean = this match {
+ case _: TypTree => true
+ case Bind(name, _) => name.isTypeName
+ case Select(_, name) => name.isTypeName
+ case Ident(name) => name.isTypeName
+ case Annotated(_, arg) => arg.isType
+ case _ => false
+ }
+
+ private[scala] def copyAttrs(tree: Tree): this.type = {
+ rawatt = tree.rawatt
+ tpe = tree.tpe
+ if (hasSymbol) symbol = tree.symbol
+ this
+ }
+
+ override def hashCode(): Int = System.identityHashCode(this)
+ override def equals(that: Any) = this eq that.asInstanceOf[AnyRef]
+
+ override def duplicate: this.type =
+ (duplicator transform this).asInstanceOf[this.type]
+ }
+
+ abstract class TreeContextApiImpl extends TreeContextApi { this: Tree =>
+
+ override def orElse(alt: => Tree) = if (!isEmpty) this else alt
+
+ override def foreach(f: Tree => Unit) { new ForeachTreeTraverser(f).traverse(this) }
+
+ override def withFilter(f: Tree => Boolean): List[Tree] = {
+ val ft = new FilterTreeTraverser(f)
+ ft.traverse(this)
+ ft.hits.toList
+ }
+
+ override def filter(f: Tree => Boolean): List[Tree] = withFilter(f)
+
+ override def collect[T](pf: PartialFunction[Tree, T]): List[T] = {
+ val ctt = new CollectTreeTraverser[T](pf)
+ ctt.traverse(this)
+ ctt.results.toList
+ }
+
+ override def find(p: Tree => Boolean): Option[Tree] = {
+ val ft = new FindTreeTraverser(p)
+ ft.traverse(this)
+ ft.result
+ }
+
+ override def exists(p: Tree => Boolean): Boolean = !find(p).isEmpty
+
+ override def forAll(p: Tree => Boolean): Boolean = find(!p(_)).isEmpty
+
+ override def equalsStructure(that : Tree) = correspondsStructure(that)(_ eq _)
+
+ def correspondsStructure(that: Tree)(f: (Tree,Tree) => Boolean): Boolean =
+ f(this, that) || ((productArity == that.productArity) && {
+ def equals0(this0: Any, that0: Any): Boolean = (this0, that0) match {
+ case (x: Tree, y: Tree) => f(x, y) || (x correspondsStructure y)(f)
+ case (xs: List[_], ys: List[_]) => (xs corresponds ys)(equals0)
+ case _ => this0 == that0
+ }
+ def compareOriginals() = (this, that) match {
+ case (x: TypeTree, y: TypeTree) if x.original != null && y.original != null =>
+ (x.original correspondsStructure y.original)(f)
+ case _ =>
+ true
+ }
+
+ (productIterator zip that.productIterator forall { case (x, y) => equals0(x, y) }) && compareOriginals()
+ })
+
+ override def children: List[Tree] = {
+ def subtrees(x: Any): List[Tree] = x match {
+ case EmptyTree => Nil
+ case t: Tree => List(t)
+ case xs: List[_] => xs flatMap subtrees
+ case _ => Nil
+ }
+ productIterator.toList flatMap subtrees
+ }
+
+ override def freeTerms: List[FreeTermSymbol] = freeSyms[FreeTermSymbol](_.isFreeTerm, _.termSymbol)
+ override def freeTypes: List[FreeTypeSymbol] = freeSyms[FreeTypeSymbol](_.isFreeType, _.typeSymbol)
+
+ private def freeSyms[S <: Symbol](isFree: Symbol => Boolean, symOfType: Type => Symbol): List[S] = {
+ val s = scala.collection.mutable.LinkedHashSet[S]()
+ def addIfFree(sym: Symbol): Unit = if (sym != null && isFree(sym)) s += sym.asInstanceOf[S]
+ for (t <- this) {
+ addIfFree(t.symbol)
+ if (t.tpe != null) {
+ for (tp <- t.tpe) {
+ addIfFree(symOfType(tp))
+ }
+ }
+ }
+ s.toList
+ }
+
+ override def substituteSymbols(from: List[Symbol], to: List[Symbol]): Tree =
+ new TreeSymSubstituter(from, to)(this)
+
+ override def substituteTypes(from: List[Symbol], to: List[Type]): Tree =
+ new TreeTypeSubstituter(from, to)(this)
+
+ override def substituteThis(clazz: Symbol, to: Tree): Tree =
+ new ThisSubstituter(clazz, to) transform this
+
+ def hasSymbolWhich(f: Symbol => Boolean) =
+ (symbol ne null) && (symbol ne NoSymbol) && f(symbol)
+
+ def isErroneous = (tpe ne null) && tpe.isErroneous
+ def isTyped = (tpe ne null) && !tpe.isErroneous
+
+ /** Sets the tree's type to the result of the given function.
+ * If the type is null, it remains null - the function is not called.
+ */
+ def modifyType(f: Type => Type): Tree =
+ if (tpe eq null) this
+ else this setType f(tpe)
+
+ /** If `pf` is defined for a given subtree, call super.traverse(pf(tree)),
+ * otherwise super.traverse(tree).
+ */
+ def foreachPartial(pf: PartialFunction[Tree, Tree]) {
+ new ForeachPartialTreeTraverser(pf).traverse(this)
+ }
+
+ def changeOwner(pairs: (Symbol, Symbol)*): Tree = {
+ pairs.foldLeft(this) { case (t, (oldOwner, newOwner)) =>
+ new ChangeOwnerTraverser(oldOwner, newOwner) apply t
+ }
+ }
+
+ def shallowDuplicate: Tree = new ShallowDuplicator(this) transform this
+ def shortClass: String = (getClass.getName split "[.$]").last
+
+ def isErrorTyped = (tpe ne null) && tpe.isError
+
+ /** When you want to know a little more than the class, but a lot
+ * less than the whole tree.
+ */
+ def summaryString: String = this match {
+ case Literal(const) => "Literal(" + const + ")"
+ case Ident(name) => "Ident(%s)".format(name.decode)
+ case Select(qual, name) => "Select(%s, %s)".format(qual.summaryString, name.decode)
+ case t: NameTree => t.name.longString
+ case t =>
+ t.shortClass + (
+ if (t.symbol != null && t.symbol != NoSymbol) "(" + t.symbol + ")"
+ else ""
+ )
+ }
+ }
+
+ trait TermTree extends Tree with TermTreeApi
+
+ trait TypTree extends Tree with TypTreeApi
+
+ abstract class SymTree extends Tree with SymTreeContextApi {
+ override def hasSymbol = true
+ override var symbol: Symbol = NoSymbol
+ }
+
+ trait NameTree extends Tree with NameTreeApi {
+ def name: Name
+ }
+
+ trait RefTree extends SymTree with NameTree with RefTreeApi {
+ def qualifier: Tree // empty for Idents
+ def name: Name
+ }
+
+ abstract class DefTree extends SymTree with NameTree with DefTreeApi {
+ def name: Name
+ override def isDef = true
+ }
+
+ case object EmptyTree extends TermTree {
+ val asList = List(this)
+ super.tpe_=(NoType)
+ override def tpe_=(t: Type) =
+ if (t != NoType) throw new UnsupportedOperationException("tpe_=("+t+") inapplicable for <empty>")
+ override def isEmpty = true
+ }
+
+ abstract class MemberDef extends DefTree with MemberDefApi {
+ def mods: Modifiers
+ def keyword: String = this match {
+ case TypeDef(_, _, _, _) => "type"
+ case ClassDef(mods, _, _, _) => if (mods hasFlag TRAIT) "trait" else "class"
+ case DefDef(_, _, _, _, _, _) => "def"
+ case ModuleDef(_, _, _) => "object"
+ case PackageDef(_, _) => "package"
+ case ValDef(mods, _, _, _) => if (mods hasFlag MUTABLE) "var" else "val"
+ case _ => ""
+ }
+ }
+
+ case class PackageDef(pid: RefTree, stats: List[Tree])
+ extends MemberDef with PackageDefApi {
+ def name = pid.name
+ def mods = NoMods
+ }
+ object PackageDef extends PackageDefExtractor
+
+ abstract class ImplDef extends MemberDef with ImplDefApi {
+ def impl: Template
+ }
+
+ case class ClassDef(mods: Modifiers, name: TypeName, tparams: List[TypeDef], impl: Template)
+ extends ImplDef with ClassDefApi
+ object ClassDef extends ClassDefExtractor
+
+ case class ModuleDef(mods: Modifiers, name: TermName, impl: Template)
+ extends ImplDef with ModuleDefApi
+ object ModuleDef extends ModuleDefExtractor
+
+ abstract class ValOrDefDef extends MemberDef with ValOrDefDefApi {
+ def name: Name
+ def tpt: Tree
+ def rhs: Tree
+ }
+
+ case class ValDef(mods: Modifiers, name: TermName, tpt: Tree, rhs: Tree) extends ValOrDefDef with ValDefApi
+ object ValDef extends ValDefExtractor
+
+ case class DefDef(mods: Modifiers, name: Name, tparams: List[TypeDef],
+ vparamss: List[List[ValDef]], tpt: Tree, rhs: Tree) extends ValOrDefDef with DefDefApi
+ object DefDef extends DefDefExtractor
+
+ case class TypeDef(mods: Modifiers, name: TypeName, tparams: List[TypeDef], rhs: Tree)
+ extends MemberDef with TypeDefApi
+ object TypeDef extends TypeDefExtractor
+
+ case class LabelDef(name: TermName, params: List[Ident], rhs: Tree)
+ extends DefTree with TermTree with LabelDefApi
+ object LabelDef extends LabelDefExtractor
+
+ case class ImportSelector(name: Name, namePos: Int, rename: Name, renamePos: Int) extends ImportSelectorApi
+ object ImportSelector extends ImportSelectorExtractor {
+ val wild = ImportSelector(nme.WILDCARD, -1, null, -1)
+ val wildList = List(wild)
+ }
+
+ case class Import(expr: Tree, selectors: List[ImportSelector])
+ extends SymTree with ImportApi
+ object Import extends ImportExtractor
+
+ case class Template(parents: List[Tree], self: ValDef, body: List[Tree])
+ extends SymTree with TemplateApi
+ object Template extends TemplateExtractor
+
+ case class Block(stats: List[Tree], expr: Tree)
+ extends TermTree with BlockApi
+ object Block extends BlockExtractor
+
+ case class CaseDef(pat: Tree, guard: Tree, body: Tree)
+ extends Tree with CaseDefApi
+ object CaseDef extends CaseDefExtractor
+
+ case class Alternative(trees: List[Tree])
+ extends TermTree with AlternativeApi
+ object Alternative extends AlternativeExtractor
+
+ case class Star(elem: Tree)
+ extends TermTree with StarApi
+ object Star extends StarExtractor
+
+ case class Bind(name: Name, body: Tree)
+ extends DefTree with BindApi
+ object Bind extends BindExtractor
+
+ case class UnApply(fun: Tree, args: List[Tree])
+ extends TermTree with UnApplyApi
+ object UnApply extends UnApplyExtractor
+
+ /** An array of expressions. This AST node needs to be translated in backend.
+ * It is used to pass arguments to vararg arguments.
+ * Introduced by compiler phase uncurry.
+ *
+ * This AST node does not have direct correspondence to Scala code,
+ * and is used to pass arguments to vararg arguments. For instance:
+ *
+ * printf("%s%d", foo, 42)
+ *
+ * Is translated to after compiler phase uncurry to:
+ *
+ * Apply(
+ * Ident("printf"),
+ * Literal("%s%d"),
+ * ArrayValue(<Any>, List(Ident("foo"), Literal(42))))
+ */
+ case class ArrayValue(elemtpt: Tree, elems: List[Tree]) extends TermTree
+
+ case class Function(vparams: List[ValDef], body: Tree)
+ extends SymTree with TermTree with FunctionApi
+ object Function extends FunctionExtractor
+
+ case class Assign(lhs: Tree, rhs: Tree)
+ extends TermTree with AssignApi
+ object Assign extends AssignExtractor
+
+ case class AssignOrNamedArg(lhs: Tree, rhs: Tree)
+ extends TermTree with AssignOrNamedArgApi
+ object AssignOrNamedArg extends AssignOrNamedArgExtractor
+
+ case class If(cond: Tree, thenp: Tree, elsep: Tree)
+ extends TermTree with IfApi
+ object If extends IfExtractor
+
+ case class Match(selector: Tree, cases: List[CaseDef])
+ extends TermTree with MatchApi
+ object Match extends MatchExtractor
+
+ case class Return(expr: Tree)
+ extends SymTree with TermTree with ReturnApi
+ object Return extends ReturnExtractor
+
+ case class Try(block: Tree, catches: List[CaseDef], finalizer: Tree)
+ extends TermTree with TryApi
+ object Try extends TryExtractor
+
+ case class Throw(expr: Tree)
+ extends TermTree with ThrowApi
+ object Throw extends ThrowExtractor
+
+ case class New(tpt: Tree) extends TermTree with NewApi
+ object New extends NewExtractor
+
+ case class Typed(expr: Tree, tpt: Tree)
+ extends TermTree with TypedApi
+ object Typed extends TypedExtractor
+
+ abstract class GenericApply extends TermTree with GenericApplyApi {
+ val fun: Tree
+ val args: List[Tree]
+ }
+
+ case class TypeApply(fun: Tree, args: List[Tree])
+ extends GenericApply with TypeApplyApi {
+
+ assert(fun.isTerm, fun)
+
+ override def symbol: Symbol = fun.symbol
+ override def symbol_=(sym: Symbol) { fun.symbol = sym }
+ }
+ object TypeApply extends TypeApplyExtractor
+
+ case class Apply(fun: Tree, args: List[Tree])
+ extends GenericApply with ApplyApi {
+ override def symbol: Symbol = fun.symbol
+ override def symbol_=(sym: Symbol) { fun.symbol = sym }
+ }
+ object Apply extends ApplyExtractor
+
+ // TODO remove this class, add a tree attachment to Apply to track whether implicits were involved
+ // copying trees will all too easily forget to distinguish subclasses
+ class ApplyToImplicitArgs(fun: Tree, args: List[Tree]) extends Apply(fun, args)
+
+ // TODO remove this class, add a tree attachment to Apply to track whether implicits were involved
+ // copying trees will all too easily forget to distinguish subclasses
+ class ApplyImplicitView(fun: Tree, args: List[Tree]) extends Apply(fun, args)
+
+ def ApplyConstructor(tpt: Tree, args: List[Tree]) = Apply(Select(New(tpt), nme.CONSTRUCTOR), args)
+
+ case class ApplyDynamic(qual: Tree, args: List[Tree]) extends SymTree with TermTree
+
+ case class Super(qual: Tree, mix: TypeName) extends TermTree with SuperApi {
+ override def symbol: Symbol = qual.symbol
+ override def symbol_=(sym: Symbol) { qual.symbol = sym }
+ }
+ object Super extends SuperExtractor
+
+ case class This(qual: TypeName)
+ extends SymTree with TermTree with ThisApi
+ object This extends ThisExtractor
+
+ case class Select(qualifier: Tree, name: Name)
+ extends RefTree with SelectApi {
+
+ // !!! assert disabled due to test case pos/annotDepMethType.scala triggering it.
+ // assert(qualifier.isTerm, qualifier)
+ }
+ object Select extends SelectExtractor
+
+ case class Ident(name: Name) extends RefTree with IdentContextApi {
+ def qualifier: Tree = EmptyTree
+ def isBackquoted = this.attachments.get[BackquotedIdentifierAttachment.type].isDefined
+ }
+ object Ident extends IdentExtractor
+
+ case class ReferenceToBoxed(ident: Ident) extends TermTree with ReferenceToBoxedApi {
+ override def symbol: Symbol = ident.symbol
+ override def symbol_=(sym: Symbol) { ident.symbol = sym }
+ }
+ object ReferenceToBoxed extends ReferenceToBoxedExtractor
+
+ case class Literal(value: Constant)
+ extends TermTree with LiteralApi {
+ assert(value ne null)
+ }
+ object Literal extends LiteralExtractor
+
+// @deprecated("will be removed and then be re-introduced with changed semantics, use Literal(Constant(x)) instead")
+// def Literal(x: Any) = new Literal(Constant(x))
+
+ case class Annotated(annot: Tree, arg: Tree) extends Tree with AnnotatedApi
+ object Annotated extends AnnotatedExtractor
+
+ case class SingletonTypeTree(ref: Tree)
+ extends TypTree with SingletonTypeTreeApi
+ object SingletonTypeTree extends SingletonTypeTreeExtractor
+
+ case class SelectFromTypeTree(qualifier: Tree, name: TypeName)
+ extends RefTree with TypTree with SelectFromTypeTreeApi {
+
+ assert(qualifier.isType, qualifier)
+ }
+ object SelectFromTypeTree extends SelectFromTypeTreeExtractor
+
+ case class CompoundTypeTree(templ: Template)
+ extends TypTree with CompoundTypeTreeApi
+ object CompoundTypeTree extends CompoundTypeTreeExtractor
+
+ case class AppliedTypeTree(tpt: Tree, args: List[Tree])
+ extends TypTree with AppliedTypeTreeApi {
+
+ assert(tpt.isType, tpt)
+
+ override def symbol: Symbol = tpt.symbol
+ override def symbol_=(sym: Symbol) { tpt.symbol = sym }
+ }
+ object AppliedTypeTree extends AppliedTypeTreeExtractor
+
+ case class TypeBoundsTree(lo: Tree, hi: Tree)
+ extends TypTree with TypeBoundsTreeApi
+ object TypeBoundsTree extends TypeBoundsTreeExtractor
+
+ case class ExistentialTypeTree(tpt: Tree, whereClauses: List[Tree])
+ extends TypTree with ExistentialTypeTreeApi
+ object ExistentialTypeTree extends ExistentialTypeTreeExtractor
+
+ case class TypeTree() extends TypTree with TypeTreeContextApi {
+ private var orig: Tree = null
+ /** Was this type tree originally empty? That is, does it now contain
+ * an inferred type that must be forgotten in `resetAttrs` to
+ * enable retyping.
+ */
+ private[scala] var wasEmpty: Boolean = false
+
+ override def symbol = typeTreeSymbol(this) // if (tpe == null) null else tpe.typeSymbol
+ override def isEmpty = (tpe eq null) || tpe == NoType
+
+ def original: Tree = orig
+ def setOriginal(tree: Tree): this.type = {
+ def followOriginal(t: Tree): Tree = t match {
+ case tt: TypeTree => followOriginal(tt.original)
+ case t => t
+ }
+
+ orig = followOriginal(tree); setPos(tree.pos);
+ this
+ }
+
+ override def defineType(tp: Type): this.type = {
+ wasEmpty = isEmpty
+ setType(tp)
+ }
+
+ override private[scala] def copyAttrs(tree: Tree) = {
+ super.copyAttrs(tree)
+ tree match {
+ case other: TypeTree =>
+ // SI-6648 Critical for correct operation of `resetAttrs`.
+ wasEmpty = other.wasEmpty
+ if (other.orig != null)
+ orig = other.orig.duplicate
+ case _ =>
+ }
+ this
+ }
+ }
+ object TypeTree extends TypeTreeExtractor
+
+ def TypeTree(tp: Type): TypeTree = TypeTree() setType tp
+
+ override type TreeCopier <: InternalTreeCopierOps
+ abstract class InternalTreeCopierOps extends TreeCopierOps {
+ def ApplyDynamic(tree: Tree, qual: Tree, args: List[Tree]): ApplyDynamic
+ def ArrayValue(tree: Tree, elemtpt: Tree, trees: List[Tree]): ArrayValue
+ }
+
+ class StrictTreeCopier extends InternalTreeCopierOps {
+ def ClassDef(tree: Tree, mods: Modifiers, name: Name, tparams: List[TypeDef], impl: Template) =
+ new ClassDef(mods, name.toTypeName, tparams, impl).copyAttrs(tree)
+ def PackageDef(tree: Tree, pid: RefTree, stats: List[Tree]) =
+ new PackageDef(pid, stats).copyAttrs(tree)
+ def ModuleDef(tree: Tree, mods: Modifiers, name: Name, impl: Template) =
+ new ModuleDef(mods, name.toTermName, impl).copyAttrs(tree)
+ def ValDef(tree: Tree, mods: Modifiers, name: Name, tpt: Tree, rhs: Tree) =
+ new ValDef(mods, name.toTermName, tpt, rhs).copyAttrs(tree)
+ def DefDef(tree: Tree, mods: Modifiers, name: Name, tparams: List[TypeDef], vparamss: List[List[ValDef]], tpt: Tree, rhs: Tree) =
+ new DefDef(mods, name.toTermName, tparams, vparamss, tpt, rhs).copyAttrs(tree)
+ def TypeDef(tree: Tree, mods: Modifiers, name: Name, tparams: List[TypeDef], rhs: Tree) =
+ new TypeDef(mods, name.toTypeName, tparams, rhs).copyAttrs(tree)
+ def LabelDef(tree: Tree, name: Name, params: List[Ident], rhs: Tree) =
+ new LabelDef(name.toTermName, params, rhs).copyAttrs(tree)
+ def Import(tree: Tree, expr: Tree, selectors: List[ImportSelector]) =
+ new Import(expr, selectors).copyAttrs(tree)
+ def Template(tree: Tree, parents: List[Tree], self: ValDef, body: List[Tree]) =
+ new Template(parents, self, body).copyAttrs(tree)
+ def Block(tree: Tree, stats: List[Tree], expr: Tree) =
+ new Block(stats, expr).copyAttrs(tree)
+ def CaseDef(tree: Tree, pat: Tree, guard: Tree, body: Tree) =
+ new CaseDef(pat, guard, body).copyAttrs(tree)
+ def Alternative(tree: Tree, trees: List[Tree]) =
+ new Alternative(trees).copyAttrs(tree)
+ def Star(tree: Tree, elem: Tree) =
+ new Star(elem).copyAttrs(tree)
+ def Bind(tree: Tree, name: Name, body: Tree) =
+ new Bind(name, body).copyAttrs(tree)
+ def UnApply(tree: Tree, fun: Tree, args: List[Tree]) =
+ new UnApply(fun, args).copyAttrs(tree)
+ def ArrayValue(tree: Tree, elemtpt: Tree, trees: List[Tree]) =
+ new ArrayValue(elemtpt, trees).copyAttrs(tree)
+ def Function(tree: Tree, vparams: List[ValDef], body: Tree) =
+ new Function(vparams, body).copyAttrs(tree)
+ def Assign(tree: Tree, lhs: Tree, rhs: Tree) =
+ new Assign(lhs, rhs).copyAttrs(tree)
+ def AssignOrNamedArg(tree: Tree, lhs: Tree, rhs: Tree) =
+ new AssignOrNamedArg(lhs, rhs).copyAttrs(tree)
+ def If(tree: Tree, cond: Tree, thenp: Tree, elsep: Tree) =
+ new If(cond, thenp, elsep).copyAttrs(tree)
+ def Match(tree: Tree, selector: Tree, cases: List[CaseDef]) =
+ new Match(selector, cases).copyAttrs(tree)
+ def Return(tree: Tree, expr: Tree) =
+ new Return(expr).copyAttrs(tree)
+ def Try(tree: Tree, block: Tree, catches: List[CaseDef], finalizer: Tree) =
+ new Try(block, catches, finalizer).copyAttrs(tree)
+ def Throw(tree: Tree, expr: Tree) =
+ new Throw(expr).copyAttrs(tree)
+ def New(tree: Tree, tpt: Tree) =
+ new New(tpt).copyAttrs(tree)
+ def Typed(tree: Tree, expr: Tree, tpt: Tree) =
+ new Typed(expr, tpt).copyAttrs(tree)
+ def TypeApply(tree: Tree, fun: Tree, args: List[Tree]) =
+ new TypeApply(fun, args).copyAttrs(tree)
+ def Apply(tree: Tree, fun: Tree, args: List[Tree]) =
+ (tree match { // TODO: use a tree attachment to track whether this is an apply to implicit args or a view
+ case _: ApplyToImplicitArgs => new ApplyToImplicitArgs(fun, args)
+ case _: ApplyImplicitView => new ApplyImplicitView(fun, args)
+ // TODO: ApplyConstructor ???
+ case _ => new Apply(fun, args)
+ }).copyAttrs(tree)
+ def ApplyDynamic(tree: Tree, qual: Tree, args: List[Tree]) =
+ new ApplyDynamic(qual, args).copyAttrs(tree)
+ def Super(tree: Tree, qual: Tree, mix: TypeName) =
+ new Super(qual, mix).copyAttrs(tree)
+ def This(tree: Tree, qual: Name) =
+ new This(qual.toTypeName).copyAttrs(tree)
+ def Select(tree: Tree, qualifier: Tree, selector: Name) =
+ new Select(qualifier, selector).copyAttrs(tree)
+ def Ident(tree: Tree, name: Name) =
+ new Ident(name) copyAttrs tree
+ def ReferenceToBoxed(tree: Tree, idt: Ident) =
+ new ReferenceToBoxed(idt).copyAttrs(tree)
+ def Literal(tree: Tree, value: Constant) =
+ new Literal(value).copyAttrs(tree)
+ def TypeTree(tree: Tree) =
+ new TypeTree().copyAttrs(tree)
+ def Annotated(tree: Tree, annot: Tree, arg: Tree) =
+ new Annotated(annot, arg).copyAttrs(tree)
+ def SingletonTypeTree(tree: Tree, ref: Tree) =
+ new SingletonTypeTree(ref).copyAttrs(tree)
+ def SelectFromTypeTree(tree: Tree, qualifier: Tree, selector: Name) =
+ new SelectFromTypeTree(qualifier, selector.toTypeName).copyAttrs(tree)
+ def CompoundTypeTree(tree: Tree, templ: Template) =
+ new CompoundTypeTree(templ).copyAttrs(tree)
+ def AppliedTypeTree(tree: Tree, tpt: Tree, args: List[Tree]) =
+ new AppliedTypeTree(tpt, args).copyAttrs(tree)
+ def TypeBoundsTree(tree: Tree, lo: Tree, hi: Tree) =
+ new TypeBoundsTree(lo, hi).copyAttrs(tree)
+ def ExistentialTypeTree(tree: Tree, tpt: Tree, whereClauses: List[Tree]) =
+ new ExistentialTypeTree(tpt, whereClauses).copyAttrs(tree)
+ }
+
+ class LazyTreeCopier extends InternalTreeCopierOps {
+ val treeCopy: TreeCopier = newStrictTreeCopier
+ def ClassDef(tree: Tree, mods: Modifiers, name: Name, tparams: List[TypeDef], impl: Template) = tree match {
+ case t @ ClassDef(mods0, name0, tparams0, impl0)
+ if (mods0 == mods) && (name0 == name) && (tparams0 == tparams) && (impl0 == impl) => t
+ case _ => treeCopy.ClassDef(tree, mods, name, tparams, impl)
+ }
+ def PackageDef(tree: Tree, pid: RefTree, stats: List[Tree]) = tree match {
+ case t @ PackageDef(pid0, stats0)
+ if (pid0 == pid) && (stats0 == stats) => t
+ case _ => treeCopy.PackageDef(tree, pid, stats)
+ }
+ def ModuleDef(tree: Tree, mods: Modifiers, name: Name, impl: Template) = tree match {
+ case t @ ModuleDef(mods0, name0, impl0)
+ if (mods0 == mods) && (name0 == name) && (impl0 == impl) => t
+ case _ => treeCopy.ModuleDef(tree, mods, name, impl)
+ }
+ def ValDef(tree: Tree, mods: Modifiers, name: Name, tpt: Tree, rhs: Tree) = tree match {
+ case t @ ValDef(mods0, name0, tpt0, rhs0)
+ if (mods0 == mods) && (name0 == name) && (tpt0 == tpt) && (rhs0 == rhs) => t
+ case _ => treeCopy.ValDef(tree, mods, name, tpt, rhs)
+ }
+ def DefDef(tree: Tree, mods: Modifiers, name: Name, tparams: List[TypeDef], vparamss: List[List[ValDef]], tpt: Tree, rhs: Tree) = tree match {
+ case t @ DefDef(mods0, name0, tparams0, vparamss0, tpt0, rhs0)
+ if (mods0 == mods) && (name0 == name) && (tparams0 == tparams) &&
+ (vparamss0 == vparamss) && (tpt0 == tpt) && (rhs == rhs0) => t
+ case _ => treeCopy.DefDef(tree, mods, name, tparams, vparamss, tpt, rhs)
+ }
+ def TypeDef(tree: Tree, mods: Modifiers, name: Name, tparams: List[TypeDef], rhs: Tree) = tree match {
+ case t @ TypeDef(mods0, name0, tparams0, rhs0)
+ if (mods0 == mods) && (name0 == name) && (tparams0 == tparams) && (rhs0 == rhs) => t
+ case _ => treeCopy.TypeDef(tree, mods, name, tparams, rhs)
+ }
+ def LabelDef(tree: Tree, name: Name, params: List[Ident], rhs: Tree) = tree match {
+ case t @ LabelDef(name0, params0, rhs0)
+ if (name0 == name) && (params0 == params) && (rhs0 == rhs) => t
+ case _ => treeCopy.LabelDef(tree, name, params, rhs)
+ }
+ def Import(tree: Tree, expr: Tree, selectors: List[ImportSelector]) = tree match {
+ case t @ Import(expr0, selectors0)
+ if (expr0 == expr) && (selectors0 == selectors) => t
+ case _ => treeCopy.Import(tree, expr, selectors)
+ }
+ def Template(tree: Tree, parents: List[Tree], self: ValDef, body: List[Tree]) = tree match {
+ case t @ Template(parents0, self0, body0)
+ if (parents0 == parents) && (self0 == self) && (body0 == body) => t
+ case _ => treeCopy.Template(tree, parents, self, body)
+ }
+ def Block(tree: Tree, stats: List[Tree], expr: Tree) = tree match {
+ case t @ Block(stats0, expr0)
+ if ((stats0 == stats) && (expr0 == expr)) => t
+ case _ => treeCopy.Block(tree, stats, expr)
+ }
+ def CaseDef(tree: Tree, pat: Tree, guard: Tree, body: Tree) = tree match {
+ case t @ CaseDef(pat0, guard0, body0)
+ if (pat0 == pat) && (guard0 == guard) && (body0 == body) => t
+ case _ => treeCopy.CaseDef(tree, pat, guard, body)
+ }
+ def Alternative(tree: Tree, trees: List[Tree]) = tree match {
+ case t @ Alternative(trees0)
+ if trees0 == trees => t
+ case _ => treeCopy.Alternative(tree, trees)
+ }
+ def Star(tree: Tree, elem: Tree) = tree match {
+ case t @ Star(elem0)
+ if elem0 == elem => t
+ case _ => treeCopy.Star(tree, elem)
+ }
+ def Bind(tree: Tree, name: Name, body: Tree) = tree match {
+ case t @ Bind(name0, body0)
+ if (name0 == name) && (body0 == body) => t
+ case _ => treeCopy.Bind(tree, name, body)
+ }
+ def UnApply(tree: Tree, fun: Tree, args: List[Tree]) = tree match {
+ case t @ UnApply(fun0, args0)
+ if (fun0 == fun) && (args0 == args) => t
+ case _ => treeCopy.UnApply(tree, fun, args)
+ }
+ def ArrayValue(tree: Tree, elemtpt: Tree, trees: List[Tree]) = tree match {
+ case t @ ArrayValue(elemtpt0, trees0)
+ if (elemtpt0 == elemtpt) && (trees0 == trees) => t
+ case _ => treeCopy.ArrayValue(tree, elemtpt, trees)
+ }
+ def Function(tree: Tree, vparams: List[ValDef], body: Tree) = tree match {
+ case t @ Function(vparams0, body0)
+ if (vparams0 == vparams) && (body0 == body) => t
+ case _ => treeCopy.Function(tree, vparams, body)
+ }
+ def Assign(tree: Tree, lhs: Tree, rhs: Tree) = tree match {
+ case t @ Assign(lhs0, rhs0)
+ if (lhs0 == lhs) && (rhs0 == rhs) => t
+ case _ => treeCopy.Assign(tree, lhs, rhs)
+ }
+ def AssignOrNamedArg(tree: Tree, lhs: Tree, rhs: Tree) = tree match {
+ case t @ AssignOrNamedArg(lhs0, rhs0)
+ if (lhs0 == lhs) && (rhs0 == rhs) => t
+ case _ => treeCopy.AssignOrNamedArg(tree, lhs, rhs)
+ }
+ def If(tree: Tree, cond: Tree, thenp: Tree, elsep: Tree) = tree match {
+ case t @ If(cond0, thenp0, elsep0)
+ if (cond0 == cond) && (thenp0 == thenp) && (elsep0 == elsep) => t
+ case _ => treeCopy.If(tree, cond, thenp, elsep)
+ }
+ def Match(tree: Tree, selector: Tree, cases: List[CaseDef]) = tree match {
+ case t @ Match(selector0, cases0)
+ if (selector0 == selector) && (cases0 == cases) => t
+ case _ => treeCopy.Match(tree, selector, cases)
+ }
+ def Return(tree: Tree, expr: Tree) = tree match {
+ case t @ Return(expr0)
+ if expr0 == expr => t
+ case _ => treeCopy.Return(tree, expr)
+ }
+ def Try(tree: Tree, block: Tree, catches: List[CaseDef], finalizer: Tree) = tree match {
+ case t @ Try(block0, catches0, finalizer0)
+ if (block0 == block) && (catches0 == catches) && (finalizer0 == finalizer) => t
+ case _ => treeCopy.Try(tree, block, catches, finalizer)
+ }
+ def Throw(tree: Tree, expr: Tree) = tree match {
+ case t @ Throw(expr0)
+ if expr0 == expr => t
+ case _ => treeCopy.Throw(tree, expr)
+ }
+ def New(tree: Tree, tpt: Tree) = tree match {
+ case t @ New(tpt0)
+ if tpt0 == tpt => t
+ case _ => treeCopy.New(tree, tpt)
+ }
+ def Typed(tree: Tree, expr: Tree, tpt: Tree) = tree match {
+ case t @ Typed(expr0, tpt0)
+ if (expr0 == expr) && (tpt0 == tpt) => t
+ case _ => treeCopy.Typed(tree, expr, tpt)
+ }
+ def TypeApply(tree: Tree, fun: Tree, args: List[Tree]) = tree match {
+ case t @ TypeApply(fun0, args0)
+ if (fun0 == fun) && (args0 == args) => t
+ case _ => treeCopy.TypeApply(tree, fun, args)
+ }
+ def Apply(tree: Tree, fun: Tree, args: List[Tree]) = tree match {
+ case t @ Apply(fun0, args0)
+ if (fun0 == fun) && (args0 == args) => t
+ case _ => treeCopy.Apply(tree, fun, args)
+ }
+ def ApplyDynamic(tree: Tree, qual: Tree, args: List[Tree]) = tree match {
+ case t @ ApplyDynamic(qual0, args0)
+ if (qual0 == qual) && (args0 == args) => t
+ case _ => treeCopy.ApplyDynamic(tree, qual, args)
+ }
+ def Super(tree: Tree, qual: Tree, mix: TypeName) = tree match {
+ case t @ Super(qual0, mix0)
+ if (qual0 == qual) && (mix0 == mix) => t
+ case _ => treeCopy.Super(tree, qual, mix)
+ }
+ def This(tree: Tree, qual: Name) = tree match {
+ case t @ This(qual0)
+ if qual0 == qual => t
+ case _ => treeCopy.This(tree, qual)
+ }
+ def Select(tree: Tree, qualifier: Tree, selector: Name) = tree match {
+ case t @ Select(qualifier0, selector0)
+ if (qualifier0 == qualifier) && (selector0 == selector) => t
+ case _ => treeCopy.Select(tree, qualifier, selector)
+ }
+ def Ident(tree: Tree, name: Name) = tree match {
+ case t @ Ident(name0)
+ if name0 == name => t
+ case _ => treeCopy.Ident(tree, name)
+ }
+ def ReferenceToBoxed(tree: Tree, idt: Ident) = tree match {
+ case t @ ReferenceToBoxed(idt0)
+ if (idt0 == idt) => t
+ case _ => this.treeCopy.ReferenceToBoxed(tree, idt)
+ }
+ def Literal(tree: Tree, value: Constant) = tree match {
+ case t @ Literal(value0)
+ if value0 == value => t
+ case _ => treeCopy.Literal(tree, value)
+ }
+ def TypeTree(tree: Tree) = tree match {
+ case t @ TypeTree() => t
+ case _ => treeCopy.TypeTree(tree)
+ }
+ def Annotated(tree: Tree, annot: Tree, arg: Tree) = tree match {
+ case t @ Annotated(annot0, arg0)
+ if (annot0==annot && arg0==arg) => t
+ case _ => treeCopy.Annotated(tree, annot, arg)
+ }
+ def SingletonTypeTree(tree: Tree, ref: Tree) = tree match {
+ case t @ SingletonTypeTree(ref0)
+ if ref0 == ref => t
+ case _ => treeCopy.SingletonTypeTree(tree, ref)
+ }
+ def SelectFromTypeTree(tree: Tree, qualifier: Tree, selector: Name) = tree match {
+ case t @ SelectFromTypeTree(qualifier0, selector0)
+ if (qualifier0 == qualifier) && (selector0 == selector) => t
+ case _ => treeCopy.SelectFromTypeTree(tree, qualifier, selector)
+ }
+ def CompoundTypeTree(tree: Tree, templ: Template) = tree match {
+ case t @ CompoundTypeTree(templ0)
+ if templ0 == templ => t
+ case _ => treeCopy.CompoundTypeTree(tree, templ)
+ }
+ def AppliedTypeTree(tree: Tree, tpt: Tree, args: List[Tree]) = tree match {
+ case t @ AppliedTypeTree(tpt0, args0)
+ if (tpt0 == tpt) && (args0 == args) => t
+ case _ => treeCopy.AppliedTypeTree(tree, tpt, args)
+ }
+ def TypeBoundsTree(tree: Tree, lo: Tree, hi: Tree) = tree match {
+ case t @ TypeBoundsTree(lo0, hi0)
+ if (lo0 == lo) && (hi0 == hi) => t
+ case _ => treeCopy.TypeBoundsTree(tree, lo, hi)
+ }
+ def ExistentialTypeTree(tree: Tree, tpt: Tree, whereClauses: List[Tree]) = tree match {
+ case t @ ExistentialTypeTree(tpt0, whereClauses0)
+ if (tpt0 == tpt) && (whereClauses0 == whereClauses) => t
+ case _ => treeCopy.ExistentialTypeTree(tree, tpt, whereClauses)
+ }
+ }
+
+ // Belongs in TreeInfo but then I can't reach it from Printers.
+ def isReferenceToScalaMember(t: Tree, Id: Name) = t match {
+ case Ident(Id) => true
+ case Select(Ident(nme.scala_), Id) => true
+ case Select(Select(Ident(nme.ROOTPKG), nme.scala_), Id) => true
+ case _ => false
+ }
+ /** Is the tree Predef, scala.Predef, or _root_.scala.Predef?
+ */
+ def isReferenceToPredef(t: Tree) = isReferenceToScalaMember(t, nme.Predef)
+ def isReferenceToAnyVal(t: Tree) = isReferenceToScalaMember(t, tpnme.AnyVal)
+
+ // --- modifiers implementation ---------------------------------------
+
+ /** @param privateWithin the qualifier for a private (a type name)
+ * or tpnme.EMPTY, if none is given.
+ * @param annotations the annotations for the definition.
+ * '''Note:''' the typechecker drops these annotations,
+ * use the AnnotationInfo's (Symbol.annotations) in later phases.
+ */
+ case class Modifiers(flags: Long,
+ privateWithin: Name,
+ annotations: List[Tree]) extends ModifiersApi with HasFlags {
+
+ var positions: Map[Long, Position] = Map()
+
+ def setPositions(poss: Map[Long, Position]): this.type = {
+ positions = poss; this
+ }
+
+ /* Abstract types from HasFlags. */
+ type AccessBoundaryType = Name
+ type AnnotationType = Tree
+
+ def hasAnnotationNamed(name: TypeName) = {
+ annotations exists {
+ case Apply(Select(New(Ident(`name`)), _), _) => true
+ case Apply(Select(New(Select(_, `name`)), _), _) => true
+ case _ => false
+ }
+ }
+
+ def hasAccessBoundary = privateWithin != tpnme.EMPTY
+ def hasAllFlags(mask: Long): Boolean = (flags & mask) == mask
+ def hasFlag(flag: Long) = (flag & flags) != 0L
+
+ def & (flag: Long): Modifiers = {
+ val flags1 = flags & flag
+ if (flags1 == flags) this
+ else Modifiers(flags1, privateWithin, annotations) setPositions positions
+ }
+ def &~ (flag: Long): Modifiers = {
+ val flags1 = flags & (~flag)
+ if (flags1 == flags) this
+ else Modifiers(flags1, privateWithin, annotations) setPositions positions
+ }
+ def | (flag: Long): Modifiers = {
+ val flags1 = flags | flag
+ if (flags1 == flags) this
+ else Modifiers(flags1, privateWithin, annotations) setPositions positions
+ }
+ def withAnnotations(annots: List[Tree]) =
+ if (annots.isEmpty) this
+ else copy(annotations = annotations ::: annots) setPositions positions
+
+ def withPosition(flag: Long, position: Position) =
+ copy() setPositions positions + (flag -> position)
+
+ override def mapAnnotations(f: List[Tree] => List[Tree]): Modifiers =
+ Modifiers(flags, privateWithin, f(annotations)) setPositions positions
+
+ override def toString = "Modifiers(%s, %s, %s)".format(flagString, annotations mkString ", ", positions)
+ }
+
+ object Modifiers extends ModifiersCreator
+
+ implicit val ModifiersTag = ClassTag[Modifiers](classOf[Modifiers])
+
+ // ---- values and creators ---------------------------------------
+
+ /** @param sym the class symbol
+ * @return the implementation template
+ */
+ def ClassDef(sym: Symbol, impl: Template): ClassDef =
+ atPos(sym.pos) {
+ ClassDef(Modifiers(sym.flags),
+ sym.name.toTypeName,
+ sym.typeParams map TypeDef,
+ impl) setSymbol sym
+ }
+
+ /**
+ * @param sym the class symbol
+ * @param impl the implementation template
+ */
+ def ModuleDef(sym: Symbol, impl: Template): ModuleDef =
+ atPos(sym.pos) {
+ ModuleDef(Modifiers(sym.flags), sym.name.toTermName, impl) setSymbol sym
+ }
+
+ def ValDef(sym: Symbol, rhs: Tree): ValDef =
+ atPos(sym.pos) {
+ ValDef(Modifiers(sym.flags), sym.name.toTermName,
+ TypeTree(sym.tpe) setPos sym.pos.focus,
+ rhs) setSymbol sym
+ }
+
+ def ValDef(sym: Symbol): ValDef = ValDef(sym, EmptyTree)
+
+ object emptyValDef extends ValDef(Modifiers(PRIVATE), nme.WILDCARD, TypeTree(NoType), EmptyTree) {
+ override def isEmpty = true
+ super.setPos(NoPosition)
+ override def setPos(pos: Position) = { assert(false); this }
+ }
+
+ def DefDef(sym: Symbol, mods: Modifiers, vparamss: List[List[ValDef]], rhs: Tree): DefDef =
+ atPos(sym.pos) {
+ assert(sym != NoSymbol)
+ DefDef(mods,
+ sym.name.toTermName,
+ sym.typeParams map TypeDef,
+ vparamss,
+ TypeTree(sym.tpe.finalResultType) setPos sym.pos.focus,
+ rhs) setSymbol sym
+ }
+
+ def DefDef(sym: Symbol, vparamss: List[List[ValDef]], rhs: Tree): DefDef =
+ DefDef(sym, Modifiers(sym.flags), vparamss, rhs)
+
+ def DefDef(sym: Symbol, mods: Modifiers, rhs: Tree): DefDef =
+ DefDef(sym, mods, mapParamss(sym)(ValDef), rhs)
+
+ /** A DefDef with original trees attached to the TypeTree of each parameter */
+ def DefDef(sym: Symbol, mods: Modifiers, originalParamTpts: Symbol => Tree, rhs: Tree): DefDef = {
+ val paramms = mapParamss(sym){ sym =>
+ val vd = ValDef(sym, EmptyTree)
+ (vd.tpt : @unchecked) match {
+ case tt: TypeTree => tt setOriginal (originalParamTpts(sym) setPos sym.pos.focus)
+ }
+ vd
+ }
+ DefDef(sym, mods, paramms, rhs)
+ }
+
+ def DefDef(sym: Symbol, rhs: Tree): DefDef =
+ DefDef(sym, Modifiers(sym.flags), rhs)
+
+ def DefDef(sym: Symbol, rhs: List[List[Symbol]] => Tree): DefDef =
+ DefDef(sym, rhs(sym.info.paramss))
+
+ /** A TypeDef node which defines given `sym` with given tight hand side `rhs`. */
+ def TypeDef(sym: Symbol, rhs: Tree): TypeDef =
+ atPos(sym.pos) {
+ TypeDef(Modifiers(sym.flags), sym.name.toTypeName, sym.typeParams map TypeDef, rhs) setSymbol sym
+ }
+
+ /** A TypeDef node which defines abstract type or type parameter for given `sym` */
+ def TypeDef(sym: Symbol): TypeDef =
+ TypeDef(sym, TypeBoundsTree(TypeTree(sym.info.bounds.lo), TypeTree(sym.info.bounds.hi)))
+
+ def LabelDef(sym: Symbol, params: List[Symbol], rhs: Tree): LabelDef =
+ atPos(sym.pos) {
+ LabelDef(sym.name.toTermName, params map Ident, rhs) setSymbol sym
+ }
+
+ /** casedef shorthand */
+ def CaseDef(pat: Tree, body: Tree): CaseDef =
+ CaseDef(pat, EmptyTree, body)
+
+ def Bind(sym: Symbol, body: Tree): Bind =
+ Bind(sym.name, body) setSymbol sym
+
+ def Try(body: Tree, cases: (Tree, Tree)*): Try =
+ Try(body, cases.toList map { case (pat, rhs) => CaseDef(pat, EmptyTree, rhs) }, EmptyTree)
+
+ def Throw(tpe: Type, args: Tree*): Throw =
+ Throw(New(tpe, args: _*))
+
+ def Apply(sym: Symbol, args: Tree*): Tree =
+ Apply(Ident(sym), args.toList)
+
+ /** Factory method for object creation `new tpt(args_1)...(args_n)`
+ * A `New(t, as)` is expanded to: `(new t).<init>(as)`
+ */
+ def New(tpt: Tree, argss: List[List[Tree]]): Tree = argss match {
+ case Nil => ApplyConstructor(tpt, Nil)
+ case xs :: rest => rest.foldLeft(ApplyConstructor(tpt, xs): Tree)(Apply.apply)
+ }
+
+ /** 0-1 argument list new, based on a type.
+ */
+ def New(tpe: Type, args: Tree*): Tree =
+ ApplyConstructor(TypeTree(tpe), args.toList)
+
+ def New(sym: Symbol, args: Tree*): Tree =
+ New(sym.tpe, args: _*)
+
+ def Super(sym: Symbol, mix: TypeName): Tree =
+ Super(This(sym), mix)
+
+ def This(sym: Symbol): Tree =
+ This(sym.name.toTypeName) setSymbol sym
+
+ def Select(qualifier: Tree, name: String): Select =
+ Select(qualifier, newTermName(name))
+
+ def Select(qualifier: Tree, sym: Symbol): Select =
+ Select(qualifier, sym.name) setSymbol sym
+
+ def Ident(name: String): Ident =
+ Ident(newTermName(name))
+
+ def Ident(sym: Symbol): Ident =
+ Ident(sym.name) setSymbol sym
+
+ /** Block factory that flattens directly nested blocks.
+ */
+ def Block(stats: Tree*): Block = {
+ if (stats.isEmpty) Block(Nil, Literal(Constant(())))
+ else stats match {
+ case Seq(b @ Block(_, _)) => b
+ case Seq(stat) => Block(stats.toList, Literal(Constant(())))
+ case Seq(_, rest @ _*) => Block(stats.init.toList, stats.last)
+ }
+ }
+
+
+ /** Delegate for a TypeTree symbol. This operation is unsafe because
+ * it may trigger type checking when forcing the type symbol of the
+ * underlying type.
+ */
+ protected def typeTreeSymbol(tree: TypeTree): Symbol =
+ if (tree.tpe == null) null else tree.tpe.typeSymbol
+
+ // --- generic traversers and transformers
+
+ override protected def itraverse(traverser: Traverser, tree: Tree): Unit = {
+ import traverser._
+ tree match {
+ case EmptyTree =>
+ ;
+ case PackageDef(pid, stats) =>
+ traverse(pid)
+ atOwner(mclass(tree.symbol)) {
+ traverseTrees(stats)
+ }
+ case ClassDef(mods, name, tparams, impl) =>
+ atOwner(tree.symbol) {
+ traverseTrees(mods.annotations); traverseTrees(tparams); traverse(impl)
+ }
+ case ModuleDef(mods, name, impl) =>
+ atOwner(mclass(tree.symbol)) {
+ traverseTrees(mods.annotations); traverse(impl)
+ }
+ case ValDef(mods, name, tpt, rhs) =>
+ atOwner(tree.symbol) {
+ traverseTrees(mods.annotations); traverse(tpt); traverse(rhs)
+ }
+ case DefDef(mods, name, tparams, vparamss, tpt, rhs) =>
+ atOwner(tree.symbol) {
+ traverseTrees(mods.annotations); traverseTrees(tparams); traverseTreess(vparamss); traverse(tpt); traverse(rhs)
+ }
+ case TypeDef(mods, name, tparams, rhs) =>
+ atOwner(tree.symbol) {
+ traverseTrees(mods.annotations); traverseTrees(tparams); traverse(rhs)
+ }
+ case LabelDef(name, params, rhs) =>
+ traverseTrees(params); traverse(rhs)
+ case Import(expr, selectors) =>
+ traverse(expr)
+ case Annotated(annot, arg) =>
+ traverse(annot); traverse(arg)
+ case Template(parents, self, body) =>
+ traverseTrees(parents)
+ if (!self.isEmpty) traverse(self)
+ traverseStats(body, tree.symbol)
+ case Block(stats, expr) =>
+ traverseTrees(stats); traverse(expr)
+ case CaseDef(pat, guard, body) =>
+ traverse(pat); traverse(guard); traverse(body)
+ case Alternative(trees) =>
+ traverseTrees(trees)
+ case Star(elem) =>
+ traverse(elem)
+ case Bind(name, body) =>
+ traverse(body)
+ case UnApply(fun, args) =>
+ traverse(fun); traverseTrees(args)
+ case ArrayValue(elemtpt, trees) =>
+ traverse(elemtpt); traverseTrees(trees)
+ case Function(vparams, body) =>
+ atOwner(tree.symbol) {
+ traverseTrees(vparams); traverse(body)
+ }
+ case Assign(lhs, rhs) =>
+ traverse(lhs); traverse(rhs)
+ case AssignOrNamedArg(lhs, rhs) =>
+ traverse(lhs); traverse(rhs)
+ case If(cond, thenp, elsep) =>
+ traverse(cond); traverse(thenp); traverse(elsep)
+ case Match(selector, cases) =>
+ traverse(selector); traverseTrees(cases)
+ case Return(expr) =>
+ traverse(expr)
+ case Try(block, catches, finalizer) =>
+ traverse(block); traverseTrees(catches); traverse(finalizer)
+ case Throw(expr) =>
+ traverse(expr)
+ case New(tpt) =>
+ traverse(tpt)
+ case Typed(expr, tpt) =>
+ traverse(expr); traverse(tpt)
+ case TypeApply(fun, args) =>
+ traverse(fun); traverseTrees(args)
+ case Apply(fun, args) =>
+ traverse(fun); traverseTrees(args)
+ case ApplyDynamic(qual, args) =>
+ traverse(qual); traverseTrees(args)
+ case Super(qual, _) =>
+ traverse(qual)
+ case This(_) =>
+ ;
+ case Select(qualifier, selector) =>
+ traverse(qualifier)
+ case Ident(_) =>
+ ;
+ case ReferenceToBoxed(idt) =>
+ traverse(idt)
+ case Literal(_) =>
+ ;
+ case TypeTree() =>
+ ;
+ case SingletonTypeTree(ref) =>
+ traverse(ref)
+ case SelectFromTypeTree(qualifier, selector) =>
+ traverse(qualifier)
+ case CompoundTypeTree(templ) =>
+ traverse(templ)
+ case AppliedTypeTree(tpt, args) =>
+ traverse(tpt); traverseTrees(args)
+ case TypeBoundsTree(lo, hi) =>
+ traverse(lo); traverse(hi)
+ case ExistentialTypeTree(tpt, whereClauses) =>
+ traverse(tpt); traverseTrees(whereClauses)
+ case _ => xtraverse(traverser, tree)
+ }
+ }
+
+ //OPT ordered according to frequency to speed it up.
+ override protected def itransform(transformer: Transformer, tree: Tree): Tree = {
+ import transformer._
+ val treeCopy = transformer.treeCopy
+
+ // begin itransform
+ tree match {
+ case Ident(name) =>
+ treeCopy.Ident(tree, name)
+ case Select(qualifier, selector) =>
+ treeCopy.Select(tree, transform(qualifier), selector)
+ case Apply(fun, args) =>
+ treeCopy.Apply(tree, transform(fun), transformTrees(args))
+ case TypeTree() =>
+ treeCopy.TypeTree(tree)
+ case Literal(value) =>
+ treeCopy.Literal(tree, value)
+ case This(qual) =>
+ treeCopy.This(tree, qual)
+ case ValDef(mods, name, tpt, rhs) =>
+ atOwner(tree.symbol) {
+ treeCopy.ValDef(tree, transformModifiers(mods),
+ name, transform(tpt), transform(rhs))
+ }
+ case DefDef(mods, name, tparams, vparamss, tpt, rhs) =>
+ atOwner(tree.symbol) {
+ treeCopy.DefDef(tree, transformModifiers(mods), name,
+ transformTypeDefs(tparams), transformValDefss(vparamss),
+ transform(tpt), transform(rhs))
+ }
+ case Block(stats, expr) =>
+ treeCopy.Block(tree, transformStats(stats, currentOwner), transform(expr))
+ case If(cond, thenp, elsep) =>
+ treeCopy.If(tree, transform(cond), transform(thenp), transform(elsep))
+ case CaseDef(pat, guard, body) =>
+ treeCopy.CaseDef(tree, transform(pat), transform(guard), transform(body))
+ case TypeApply(fun, args) =>
+ treeCopy.TypeApply(tree, transform(fun), transformTrees(args))
+ case AppliedTypeTree(tpt, args) =>
+ treeCopy.AppliedTypeTree(tree, transform(tpt), transformTrees(args))
+ case Bind(name, body) =>
+ treeCopy.Bind(tree, name, transform(body))
+ case Function(vparams, body) =>
+ atOwner(tree.symbol) {
+ treeCopy.Function(tree, transformValDefs(vparams), transform(body))
+ }
+ case Match(selector, cases) =>
+ treeCopy.Match(tree, transform(selector), transformCaseDefs(cases))
+ case New(tpt) =>
+ treeCopy.New(tree, transform(tpt))
+ case Assign(lhs, rhs) =>
+ treeCopy.Assign(tree, transform(lhs), transform(rhs))
+ case AssignOrNamedArg(lhs, rhs) =>
+ treeCopy.AssignOrNamedArg(tree, transform(lhs), transform(rhs))
+ case Try(block, catches, finalizer) =>
+ treeCopy.Try(tree, transform(block), transformCaseDefs(catches), transform(finalizer))
+ case EmptyTree =>
+ tree
+ case Throw(expr) =>
+ treeCopy.Throw(tree, transform(expr))
+ case Super(qual, mix) =>
+ treeCopy.Super(tree, transform(qual), mix)
+ case TypeBoundsTree(lo, hi) =>
+ treeCopy.TypeBoundsTree(tree, transform(lo), transform(hi))
+ case Typed(expr, tpt) =>
+ treeCopy.Typed(tree, transform(expr), transform(tpt))
+ case Import(expr, selectors) =>
+ treeCopy.Import(tree, transform(expr), selectors)
+ case Template(parents, self, body) =>
+ treeCopy.Template(tree, transformTrees(parents), transformValDef(self), transformStats(body, tree.symbol))
+ case ClassDef(mods, name, tparams, impl) =>
+ atOwner(tree.symbol) {
+ treeCopy.ClassDef(tree, transformModifiers(mods), name,
+ transformTypeDefs(tparams), transformTemplate(impl))
+ }
+ case ModuleDef(mods, name, impl) =>
+ atOwner(mclass(tree.symbol)) {
+ treeCopy.ModuleDef(tree, transformModifiers(mods),
+ name, transformTemplate(impl))
+ }
+ case TypeDef(mods, name, tparams, rhs) =>
+ atOwner(tree.symbol) {
+ treeCopy.TypeDef(tree, transformModifiers(mods), name,
+ transformTypeDefs(tparams), transform(rhs))
+ }
+ case LabelDef(name, params, rhs) =>
+ treeCopy.LabelDef(tree, name, transformIdents(params), transform(rhs)) //bq: Martin, once, atOwner(...) works, also change `LamdaLifter.proxy'
+ case PackageDef(pid, stats) =>
+ treeCopy.PackageDef(
+ tree, transform(pid).asInstanceOf[RefTree],
+ atOwner(mclass(tree.symbol)) {
+ transformStats(stats, currentOwner)
+ }
+ )
+ case Annotated(annot, arg) =>
+ treeCopy.Annotated(tree, transform(annot), transform(arg))
+ case SingletonTypeTree(ref) =>
+ treeCopy.SingletonTypeTree(tree, transform(ref))
+ case SelectFromTypeTree(qualifier, selector) =>
+ treeCopy.SelectFromTypeTree(tree, transform(qualifier), selector)
+ case CompoundTypeTree(templ) =>
+ treeCopy.CompoundTypeTree(tree, transformTemplate(templ))
+ case ExistentialTypeTree(tpt, whereClauses) =>
+ treeCopy.ExistentialTypeTree(tree, transform(tpt), transformTrees(whereClauses))
+ case Return(expr) =>
+ treeCopy.Return(tree, transform(expr))
+ case Alternative(trees) =>
+ treeCopy.Alternative(tree, transformTrees(trees))
+ case Star(elem) =>
+ treeCopy.Star(tree, transform(elem))
+ case UnApply(fun, args) =>
+ treeCopy.UnApply(tree, fun, transformTrees(args)) // bq: see test/.../unapplyContexts2.scala
+ case ArrayValue(elemtpt, trees) =>
+ treeCopy.ArrayValue(tree, transform(elemtpt), transformTrees(trees))
+ case ApplyDynamic(qual, args) =>
+ treeCopy.ApplyDynamic(tree, transform(qual), transformTrees(args))
+ case ReferenceToBoxed(idt) =>
+ treeCopy.ReferenceToBoxed(tree, transform(idt) match { case idt1: Ident => idt1 })
+ case _ =>
+ xtransform(transformer, tree)
+ }
+ }
+
+ private def mclass(sym: Symbol) = sym map (_.asModule.moduleClass)
+
+ // --- specific traversers and transformers
+
+ @deprecated("Moved to tree.duplicate", "2.10.0")
+ protected[scala] def duplicateTree(tree: Tree): Tree = tree.duplicate
+
+ class ForeachPartialTreeTraverser(pf: PartialFunction[Tree, Tree]) extends Traverser {
+ override def traverse(tree: Tree) {
+ val t = if (pf isDefinedAt tree) pf(tree) else tree
+ super.traverse(t)
+ }
+ }
+
+ class ChangeOwnerTraverser(val oldowner: Symbol, val newowner: Symbol) extends Traverser {
+ final def change(sym: Symbol) = {
+ if (sym != NoSymbol && sym.owner == oldowner)
+ sym.owner = newowner
+ }
+ override def traverse(tree: Tree) {
+ tree match {
+ case _: Return =>
+ if (tree.symbol == oldowner) {
+ // SI-5612
+ if (newowner hasTransOwner oldowner)
+ log("NOT changing owner of %s because %s is nested in %s".format(tree, newowner, oldowner))
+ else {
+ log("changing owner of %s: %s => %s".format(tree, oldowner, newowner))
+ tree.symbol = newowner
+ }
+ }
+ case _: DefTree | _: Function =>
+ change(tree.symbol)
+ case _ =>
+ }
+ super.traverse(tree)
+ }
+ }
+
+ private class ShallowDuplicator(orig: Tree) extends Transformer {
+ override val treeCopy = newStrictTreeCopier
+ override def transform(tree: Tree) =
+ if (tree eq orig) super.transform(tree)
+ else tree
+ }
+ // Create a readable string describing a substitution.
+ private def substituterString(fromStr: String, toStr: String, from: List[Any], to: List[Any]): String = {
+ "subst[%s, %s](%s)".format(fromStr, toStr, (from, to).zipped map (_ + " -> " + _) mkString ", ")
+ }
+
+ // NOTE: calls shallowDuplicate on trees in `to` to avoid problems when symbols in `from`
+ // occur multiple times in the `tree` passed to `transform`,
+ // otherwise, the resulting Tree would be a graph, not a tree... this breaks all sorts of stuff,
+ // notably concerning the mutable aspects of Trees (such as setting their .tpe)
+ class TreeSubstituter(from: List[Symbol], to: List[Tree]) extends Transformer {
+ override def transform(tree: Tree): Tree = tree match {
+ case Ident(_) =>
+ def subst(from: List[Symbol], to: List[Tree]): Tree =
+ if (from.isEmpty) tree
+ else if (tree.symbol == from.head) to.head.shallowDuplicate // TODO: does it ever make sense *not* to perform a shallowDuplicate on `to.head`?
+ else subst(from.tail, to.tail);
+ subst(from, to)
+ case _ =>
+ super.transform(tree)
+ }
+ override def toString = substituterString("Symbol", "Tree", from, to)
+ }
+
+ /** Substitute clazz.this with `to`. `to` must be an attributed tree.
+ */
+ class ThisSubstituter(clazz: Symbol, to: => Tree) extends Transformer {
+ val newtpe = to.tpe
+ override def transform(tree: Tree) = {
+ if (tree.tpe ne null) tree.tpe = tree.tpe.substThis(clazz, newtpe)
+ tree match {
+ case This(_) if tree.symbol == clazz => to
+ case _ => super.transform(tree)
+ }
+ }
+ }
+
+ class TypeMapTreeSubstituter(val typeMap: TypeMap) extends Traverser {
+ override def traverse(tree: Tree) {
+ if (tree.tpe ne null)
+ tree.tpe = typeMap(tree.tpe)
+ if (tree.isDef)
+ tree.symbol modifyInfo typeMap
+
+ super.traverse(tree)
+ }
+ override def apply[T <: Tree](tree: T): T = super.apply(tree.duplicate)
+ }
+
+ class TreeTypeSubstituter(val from: List[Symbol], val to: List[Type]) extends TypeMapTreeSubstituter(new SubstTypeMap(from, to)) {
+ def isEmpty = from.isEmpty && to.isEmpty
+ override def toString() = "TreeTypeSubstituter("+from+","+to+")"
+ }
+
+ lazy val EmptyTreeTypeSubstituter = new TreeTypeSubstituter(List(), List())
+
+ class TreeSymSubstTraverser(val from: List[Symbol], val to: List[Symbol]) extends TypeMapTreeSubstituter(new SubstSymMap(from, to)) {
+ override def toString() = "TreeSymSubstTraverser/" + substituterString("Symbol", "Symbol", from, to)
+ }
+
+ /** Substitute symbols in `from` with symbols in `to`. Returns a new
+ * tree using the new symbols and whose Ident and Select nodes are
+ * name-consistent with the new symbols.
+ *
+ * Note: This is currently a destructive operation on the original Tree.
+ * Trees currently assigned a symbol in `from` will be assigned the new symbols
+ * without copying, and trees that define symbols with an `info` that refer
+ * a symbol in `from` will have a new type assigned.
+ */
+ class TreeSymSubstituter(from: List[Symbol], to: List[Symbol]) extends Transformer {
+ val symSubst = new SubstSymMap(from, to)
+ override def transform(tree: Tree): Tree = {
+ def subst(from: List[Symbol], to: List[Symbol]) {
+ if (!from.isEmpty)
+ if (tree.symbol == from.head) tree setSymbol to.head
+ else subst(from.tail, to.tail)
+ }
+
+ if (tree.tpe ne null) tree.tpe = symSubst(tree.tpe)
+ if (tree.hasSymbol) {
+ subst(from, to)
+ tree match {
+ case _: DefTree =>
+ val newInfo = symSubst(tree.symbol.info)
+ if (!(newInfo =:= tree.symbol.info)) {
+ debuglog(sm"""
+ |TreeSymSubstituter: updated info of symbol ${tree.symbol}
+ | Old: ${showRaw(tree.symbol.info, printTypes = true, printIds = true)}
+ | New: ${showRaw(newInfo, printTypes = true, printIds = true)}""")
+ tree.symbol updateInfo newInfo
+ }
+ case _ =>
+ // no special handling is required for Function or Import nodes here.
+ // as they don't have interesting infos attached to their symbols.
+ // Subsitution of the referenced symbol of Return nodes is handled
+ // in .ChangeOwnerTraverser
+ }
+ tree match {
+ case Ident(name0) if tree.symbol != NoSymbol =>
+ treeCopy.Ident(tree, tree.symbol.name)
+ case Select(qual, name0) if tree.symbol != NoSymbol =>
+ treeCopy.Select(tree, transform(qual), tree.symbol.name)
+ case _ =>
+ super.transform(tree)
+ }
+ } else
+ super.transform(tree)
+ }
+ def apply[T <: Tree](tree: T): T = transform(tree).asInstanceOf[T]
+ override def toString() = "TreeSymSubstituter/" + substituterString("Symbol", "Symbol", from, to)
+ }
+
+
+ class ForeachTreeTraverser(f: Tree => Unit) extends Traverser {
+ override def traverse(t: Tree) {
+ f(t)
+ super.traverse(t)
+ }
+ }
+
+ class FilterTreeTraverser(p: Tree => Boolean) extends Traverser {
+ val hits = new ListBuffer[Tree]
+ override def traverse(t: Tree) {
+ if (p(t)) hits += t
+ super.traverse(t)
+ }
+ }
+
+ class CollectTreeTraverser[T](pf: PartialFunction[Tree, T]) extends Traverser {
+ val results = new ListBuffer[T]
+ override def traverse(t: Tree) {
+ if (pf.isDefinedAt(t)) results += pf(t)
+ super.traverse(t)
+ }
+ }
+
+ class FindTreeTraverser(p: Tree => Boolean) extends Traverser {
+ var result: Option[Tree] = None
+ override def traverse(t: Tree) {
+ if (result.isEmpty) {
+ if (p(t)) result = Some(t)
+ super.traverse(t)
+ }
+ }
+ }
+
+ trait TreeStackTraverser extends Traverser {
+ import collection.mutable
+ val path: mutable.Stack[Tree] = mutable.Stack()
+ abstract override def traverse(t: Tree) = {
+ path push t
+ try super.traverse(t) finally path.pop()
+ }
+ }
+
+ private lazy val duplicator = new Duplicator(focusPositions = true)
+ private class Duplicator(focusPositions: Boolean) extends Transformer {
+ override val treeCopy = newStrictTreeCopier
+ override def transform(t: Tree) = {
+ val t1 = super.transform(t)
+ if ((t1 ne t) && t1.pos.isRange && focusPositions) t1 setPos t.pos.focus
+ t1
+ }
+ }
+
+ def duplicateAndKeepPositions(tree: Tree) = new Duplicator(focusPositions = false) transform tree
+
+ // ------ copiers -------------------------------------------
+
+ def copyDefDef(tree: Tree)(
+ mods: Modifiers = null,
+ name: Name = null,
+ tparams: List[TypeDef] = null,
+ vparamss: List[List[ValDef]] = null,
+ tpt: Tree = null,
+ rhs: Tree = null
+ ): DefDef = tree match {
+ case DefDef(mods0, name0, tparams0, vparamss0, tpt0, rhs0) =>
+ treeCopy.DefDef(tree,
+ if (mods eq null) mods0 else mods,
+ if (name eq null) name0 else name,
+ if (tparams eq null) tparams0 else tparams,
+ if (vparamss eq null) vparamss0 else vparamss,
+ if (tpt eq null) tpt0 else tpt,
+ if (rhs eq null) rhs0 else rhs
+ )
+ case t =>
+ sys.error("Not a DefDef: " + t + "/" + t.getClass)
+ }
+ def copyValDef(tree: Tree)(
+ mods: Modifiers = null,
+ name: Name = null,
+ tpt: Tree = null,
+ rhs: Tree = null
+ ): ValDef = tree match {
+ case ValDef(mods0, name0, tpt0, rhs0) =>
+ treeCopy.ValDef(tree,
+ if (mods eq null) mods0 else mods,
+ if (name eq null) name0 else name,
+ if (tpt eq null) tpt0 else tpt,
+ if (rhs eq null) rhs0 else rhs
+ )
+ case t =>
+ sys.error("Not a ValDef: " + t + "/" + t.getClass)
+ }
+ def copyClassDef(tree: Tree)(
+ mods: Modifiers = null,
+ name: Name = null,
+ tparams: List[TypeDef] = null,
+ impl: Template = null
+ ): ClassDef = tree match {
+ case ClassDef(mods0, name0, tparams0, impl0) =>
+ treeCopy.ClassDef(tree,
+ if (mods eq null) mods0 else mods,
+ if (name eq null) name0 else name,
+ if (tparams eq null) tparams0 else tparams,
+ if (impl eq null) impl0 else impl
+ )
+ case t =>
+ sys.error("Not a ClassDef: " + t + "/" + t.getClass)
+ }
+
+ def deriveDefDef(ddef: Tree)(applyToRhs: Tree => Tree): DefDef = ddef match {
+ case DefDef(mods0, name0, tparams0, vparamss0, tpt0, rhs0) =>
+ treeCopy.DefDef(ddef, mods0, name0, tparams0, vparamss0, tpt0, applyToRhs(rhs0))
+ case t =>
+ sys.error("Not a DefDef: " + t + "/" + t.getClass)
+ }
+ def deriveValDef(vdef: Tree)(applyToRhs: Tree => Tree): ValDef = vdef match {
+ case ValDef(mods0, name0, tpt0, rhs0) =>
+ treeCopy.ValDef(vdef, mods0, name0, tpt0, applyToRhs(rhs0))
+ case t =>
+ sys.error("Not a ValDef: " + t + "/" + t.getClass)
+ }
+ def deriveTemplate(templ: Tree)(applyToBody: List[Tree] => List[Tree]): Template = templ match {
+ case Template(parents0, self0, body0) =>
+ treeCopy.Template(templ, parents0, self0, applyToBody(body0))
+ case t =>
+ sys.error("Not a Template: " + t + "/" + t.getClass)
+ }
+ def deriveClassDef(cdef: Tree)(applyToImpl: Template => Template): ClassDef = cdef match {
+ case ClassDef(mods0, name0, tparams0, impl0) =>
+ treeCopy.ClassDef(cdef, mods0, name0, tparams0, applyToImpl(impl0))
+ case t =>
+ sys.error("Not a ClassDef: " + t + "/" + t.getClass)
+ }
+ def deriveModuleDef(mdef: Tree)(applyToImpl: Template => Template): ModuleDef = mdef match {
+ case ModuleDef(mods0, name0, impl0) =>
+ treeCopy.ModuleDef(mdef, mods0, name0, applyToImpl(impl0))
+ case t =>
+ sys.error("Not a ModuleDef: " + t + "/" + t.getClass)
+ }
+ def deriveCaseDef(cdef: Tree)(applyToBody: Tree => Tree): CaseDef = cdef match {
+ case CaseDef(pat0, guard0, body0) =>
+ treeCopy.CaseDef(cdef, pat0, guard0, applyToBody(body0))
+ case t =>
+ sys.error("Not a CaseDef: " + t + "/" + t.getClass)
+ }
+ def deriveLabelDef(ldef: Tree)(applyToRhs: Tree => Tree): LabelDef = ldef match {
+ case LabelDef(name0, params0, rhs0) =>
+ treeCopy.LabelDef(ldef, name0, params0, applyToRhs(rhs0))
+ case t =>
+ sys.error("Not a LabelDef: " + t + "/" + t.getClass)
+ }
+
+// -------------- Classtags --------------------------------------------------------
+
+ implicit val TreeTag = ClassTag[Tree](classOf[Tree])
+ implicit val TermTreeTag = ClassTag[TermTree](classOf[TermTree])
+ implicit val TypTreeTag = ClassTag[TypTree](classOf[TypTree])
+ implicit val SymTreeTag = ClassTag[SymTree](classOf[SymTree])
+ implicit val NameTreeTag = ClassTag[NameTree](classOf[NameTree])
+ implicit val RefTreeTag = ClassTag[RefTree](classOf[RefTree])
+ implicit val DefTreeTag = ClassTag[DefTree](classOf[DefTree])
+ implicit val MemberDefTag = ClassTag[MemberDef](classOf[MemberDef])
+ implicit val PackageDefTag = ClassTag[PackageDef](classOf[PackageDef])
+ implicit val ImplDefTag = ClassTag[ImplDef](classOf[ImplDef])
+ implicit val ClassDefTag = ClassTag[ClassDef](classOf[ClassDef])
+ implicit val ModuleDefTag = ClassTag[ModuleDef](classOf[ModuleDef])
+ implicit val ValOrDefDefTag = ClassTag[ValOrDefDef](classOf[ValOrDefDef])
+ implicit val ValDefTag = ClassTag[ValDef](classOf[ValDef])
+ implicit val DefDefTag = ClassTag[DefDef](classOf[DefDef])
+ implicit val TypeDefTag = ClassTag[TypeDef](classOf[TypeDef])
+ implicit val LabelDefTag = ClassTag[LabelDef](classOf[LabelDef])
+ implicit val ImportSelectorTag = ClassTag[ImportSelector](classOf[ImportSelector])
+ implicit val ImportTag = ClassTag[Import](classOf[Import])
+ implicit val TemplateTag = ClassTag[Template](classOf[Template])
+ implicit val BlockTag = ClassTag[Block](classOf[Block])
+ implicit val CaseDefTag = ClassTag[CaseDef](classOf[CaseDef])
+ implicit val AlternativeTag = ClassTag[Alternative](classOf[Alternative])
+ implicit val StarTag = ClassTag[Star](classOf[Star])
+ implicit val BindTag = ClassTag[Bind](classOf[Bind])
+ implicit val UnApplyTag = ClassTag[UnApply](classOf[UnApply])
+ implicit val FunctionTag = ClassTag[Function](classOf[Function])
+ implicit val AssignTag = ClassTag[Assign](classOf[Assign])
+ implicit val AssignOrNamedArgTag = ClassTag[AssignOrNamedArg](classOf[AssignOrNamedArg])
+ implicit val IfTag = ClassTag[If](classOf[If])
+ implicit val MatchTag = ClassTag[Match](classOf[Match])
+ implicit val ReturnTag = ClassTag[Return](classOf[Return])
+ implicit val TryTag = ClassTag[Try](classOf[Try])
+ implicit val ThrowTag = ClassTag[Throw](classOf[Throw])
+ implicit val NewTag = ClassTag[New](classOf[New])
+ implicit val TypedTag = ClassTag[Typed](classOf[Typed])
+ implicit val GenericApplyTag = ClassTag[GenericApply](classOf[GenericApply])
+ implicit val TypeApplyTag = ClassTag[TypeApply](classOf[TypeApply])
+ implicit val ApplyTag = ClassTag[Apply](classOf[Apply])
+ implicit val SuperTag = ClassTag[Super](classOf[Super])
+ implicit val ThisTag = ClassTag[This](classOf[This])
+ implicit val SelectTag = ClassTag[Select](classOf[Select])
+ implicit val IdentTag = ClassTag[Ident](classOf[Ident])
+ implicit val ReferenceToBoxedTag = ClassTag[ReferenceToBoxed](classOf[ReferenceToBoxed])
+ implicit val LiteralTag = ClassTag[Literal](classOf[Literal])
+ implicit val AnnotatedTag = ClassTag[Annotated](classOf[Annotated])
+ implicit val SingletonTypeTreeTag = ClassTag[SingletonTypeTree](classOf[SingletonTypeTree])
+ implicit val SelectFromTypeTreeTag = ClassTag[SelectFromTypeTree](classOf[SelectFromTypeTree])
+ implicit val CompoundTypeTreeTag = ClassTag[CompoundTypeTree](classOf[CompoundTypeTree])
+ implicit val AppliedTypeTreeTag = ClassTag[AppliedTypeTree](classOf[AppliedTypeTree])
+ implicit val TypeBoundsTreeTag = ClassTag[TypeBoundsTree](classOf[TypeBoundsTree])
+ implicit val ExistentialTypeTreeTag = ClassTag[ExistentialTypeTree](classOf[ExistentialTypeTree])
+ implicit val TypeTreeTag = ClassTag[TypeTree](classOf[TypeTree])
+
+ val treeNodeCount = Statistics.newView("#created tree nodes")(nodeCount)
+}
+
+object TreesStats {
+ // statistics
+ val nodeByType = Statistics.newByClass("#created tree nodes by type")(Statistics.newCounter(""))
+}
diff --git a/src/reflect/scala/reflect/internal/TypeDebugging.scala b/src/reflect/scala/reflect/internal/TypeDebugging.scala
new file mode 100644
index 0000000..68b4fa6
--- /dev/null
+++ b/src/reflect/scala/reflect/internal/TypeDebugging.scala
@@ -0,0 +1,71 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2013 LAMP/EPFL
+ * @author Paul Phillips
+ */
+
+package scala.reflect
+package internal
+
+trait TypeDebugging {
+ self: SymbolTable =>
+
+ import definitions._
+
+ // @M toString that is safe during debugging (does not normalize, ...)
+ object typeDebug {
+ private def to_s(x: Any): String = x match {
+ // otherwise case classes are caught looking like products
+ case _: Tree | _: Type => "" + x
+ case x: TraversableOnce[_] => x mkString ", "
+ case x: Product => x.productIterator mkString ("(", ", ", ")")
+ case _ => "" + x
+ }
+ def ptIndent(x: Any) = ("" + x).replaceAll("\\n", " ")
+ def ptBlock(label: String, pairs: (String, Any)*): String = {
+ if (pairs.isEmpty) label + "{ }"
+ else {
+ val width = (pairs map (_._1.length)).max
+ val fmt = "%-" + (width + 1) + "s %s"
+ val strs = pairs map { case (k, v) => fmt.format(k, to_s(v)) }
+
+ strs.mkString(label + " {\n ", "\n ", "\n}")
+ }
+ }
+ def ptLine(label: String, pairs: (String, Any)*): String = {
+ val strs = pairs map { case (k, v) => k + "=" + to_s(v) }
+ strs.mkString(label + ": ", ", ", "")
+ }
+ def ptTree(t: Tree) = t match {
+ case PackageDef(pid, _) => "package " + pid
+ case ModuleDef(_, name, _) => "object " + name
+ case ClassDef(_, name, tparams, _) => "class " + name + str.brackets(tparams)
+ case _ => to_s(t)
+ }
+
+ object str {
+ def parentheses(xs: List[_]): String = xs.mkString("(", ", ", ")")
+ def brackets(xs: List[_]): String = if (xs.isEmpty) "" else xs.mkString("[", ", ", "]")
+ def tparams(tparams: List[Type]): String = brackets(tparams map debug)
+ def parents(ps: List[Type]): String = (ps map debug).mkString(" with ")
+ def refine(defs: Scope): String = defs.toList.mkString("{", " ;\n ", "}")
+ }
+
+ private def debug(tp: Type): String = tp match {
+ case TypeRef(pre, sym, args) => debug(pre) + "." + sym.nameString + str.tparams(args)
+ case ThisType(sym) => sym.nameString + ".this"
+ case SingleType(pre, sym) => debug(pre) +"."+ sym.nameString +".type"
+ case RefinedType(parents, defs) => str.parents(parents) + str.refine(defs)
+ case ClassInfoType(parents, defs, clazz) => "class "+ clazz.nameString + str.parents(parents) + str.refine(defs)
+ case PolyType(tparams, result) => str.brackets(tparams) + " " + debug(result)
+ case TypeBounds(lo, hi) => ">: "+ debug(lo) +" <: "+ debug(hi)
+ case tv @ TypeVar(_, _) => tv.toString
+ case ExistentialType(tparams, qtpe) => "forSome "+ str.brackets(tparams) + " " + debug(qtpe)
+ case _ => "?"+tp.getClass.getName+"?"//tp.toString might produce cyclic error...
+ }
+ def debugString(tp: Type) = debug(tp)
+ }
+ def paramString(tp: Type) = typeDebug.str parentheses (tp.params map (_.defString))
+ def typeParamsString(tp: Type) = typeDebug.str brackets (tp.typeParams map (_.defString))
+ def typeArgsString(tp: Type) = typeDebug.str brackets (tp.typeArgs map (_.safeToString))
+ def debugString(tp: Type) = typeDebug debugString tp
+}
diff --git a/src/reflect/scala/reflect/internal/Types.scala b/src/reflect/scala/reflect/internal/Types.scala
new file mode 100644
index 0000000..2f49995
--- /dev/null
+++ b/src/reflect/scala/reflect/internal/Types.scala
@@ -0,0 +1,7435 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2013 LAMP/EPFL
+ * @author Martin Odersky
+ */
+
+package scala.reflect
+package internal
+
+import scala.collection.{ mutable, immutable, generic }
+import generic.Clearable
+import scala.ref.WeakReference
+import mutable.ListBuffer
+import Flags._
+import scala.util.control.ControlThrowable
+import scala.annotation.tailrec
+import util.Statistics
+import scala.runtime.ObjectRef
+import util.ThreeValues._
+
+/* A standard type pattern match:
+ case ErrorType =>
+ // internal: error
+ case WildcardType =>
+ // internal: unknown
+ case BoundedWildcardType(bounds) =>
+ // internal: unknown
+ case NoType =>
+ case NoPrefix =>
+ case ThisType(sym) =>
+ // sym.this.type
+ case SuperType(thistpe, supertpe) =>
+ // super references
+ case SingleType(pre, sym) =>
+ // pre.sym.type
+ case ConstantType(value) =>
+ // Int(2)
+ case TypeRef(pre, sym, args) =>
+ // pre.sym[targs]
+ // Outer.this.C would be represented as TypeRef(ThisType(Outer), C, List())
+ case RefinedType(parents, defs) =>
+ // parent1 with ... with parentn { defs }
+ case ExistentialType(tparams, result) =>
+ // result forSome { tparams }
+ case AnnotatedType(annots, tp, selfsym) =>
+ // tp @annots
+
+ // the following are non-value types; you cannot write them down in Scala source.
+
+ case TypeBounds(lo, hi) =>
+ // >: lo <: hi
+ case ClassInfoType(parents, defs, clazz) =>
+ // same as RefinedType except as body of class
+ case MethodType(paramtypes, result) =>
+ // (paramtypes)result
+ // For instance def m(): T is represented as MethodType(List(), T)
+ case NullaryMethodType(result) => // eliminated by uncurry
+ // an eval-by-name type
+ // For instance def m: T is represented as NullaryMethodType(T)
+ case PolyType(tparams, result) =>
+ // [tparams]result where result is a (Nullary)MethodType or ClassInfoType
+
+ // The remaining types are not used after phase `typer`.
+ case OverloadedType(pre, tparams, alts) =>
+ // all alternatives of an overloaded ident
+ case AntiPolyType(pre, targs) =>
+ // rarely used, disappears when combined with a PolyType
+ case TypeVar(inst, constr) =>
+ // a type variable
+ // Replace occurrences of type parameters with type vars, where
+ // inst is the instantiation and constr is a list of bounds.
+ case DeBruijnIndex(level, index, args)
+ // for dependent method types: a type referring to a method parameter.
+ case ErasedValueType(tref)
+ // only used during erasure of derived value classes.
+*/
+
+trait Types extends api.Types { self: SymbolTable =>
+ import definitions._
+ import TypesStats._
+
+ private var explainSwitch = false
+ private final val emptySymbolSet = immutable.Set.empty[Symbol]
+
+ private final val LogPendingSubTypesThreshold = 50
+ private final val LogPendingBaseTypesThreshold = 50
+ private final val LogVolatileThreshold = 50
+
+ /** A don't care value for the depth parameter in lubs/glbs and related operations. */
+ private final val AnyDepth = -3
+
+ /** Decrement depth unless it is a don't care. */
+ private final def decr(depth: Int) = if (depth == AnyDepth) AnyDepth else depth - 1
+
+ private final val printLubs = sys.props contains "scalac.debug.lub"
+ private final val traceTypeVars = sys.props contains "scalac.debug.tvar"
+ /** In case anyone wants to turn off lub verification without reverting anything. */
+ private final val verifyLubs = true
+ /** In case anyone wants to turn off type parameter bounds being used
+ * to seed type constraints.
+ */
+ private final val propagateParameterBoundsToTypeVars = sys.props contains "scalac.debug.prop-constraints"
+
+ protected val enableTypeVarExperimentals = settings.Xexperimental.value
+
+ /** Empty immutable maps to avoid allocations. */
+ private val emptySymMap = immutable.Map[Symbol, Symbol]()
+ private val emptySymCount = immutable.Map[Symbol, Int]()
+
+ /** The current skolemization level, needed for the algorithms
+ * in isSameType, isSubType that do constraint solving under a prefix.
+ */
+ var skolemizationLevel = 0
+
+ /** A log of type variable with their original constraints. Used in order
+ * to undo constraints in the case of isSubType/isSameType failure.
+ */
+ lazy val undoLog = newUndoLog
+
+ protected def newUndoLog = new UndoLog
+
+ class UndoLog extends Clearable {
+ private type UndoPairs = List[(TypeVar, TypeConstraint)]
+ //OPT this method is public so we can do `manual inlining`
+ var log: UndoPairs = List()
+
+ /*
+ * These two methods provide explicit locking mechanism that is overridden in SynchronizedUndoLog.
+ *
+ * The idea behind explicit locking mechanism is that all public methods that access mutable state
+ * will have to obtain the lock for their entire execution so both reads and writes can be kept in
+ * right order. Originally, that was achieved by overriding those public methods in
+ * `SynchronizedUndoLog` which was fine but expensive. The reason is that those public methods take
+ * thunk as argument and if we keep them non-final there's no way to make them inlined so thunks
+ * can go away.
+ *
+ * By using explicit locking we can achieve inlining.
+ *
+ * NOTE: They are made public for now so we can apply 'manual inlining' (copy&pasting into hot
+ * places implementation of `undo` or `undoUnless`). This should be changed back to protected
+ * once inliner is fixed.
+ */
+ def lock(): Unit = ()
+ def unlock(): Unit = ()
+
+ // register with the auto-clearing cache manager
+ perRunCaches.recordCache(this)
+
+ /** Undo all changes to constraints to type variables upto `limit`. */
+ //OPT this method is public so we can do `manual inlining`
+ def undoTo(limit: UndoPairs) {
+ assertCorrectThread()
+ while ((log ne limit) && log.nonEmpty) {
+ val (tv, constr) = log.head
+ tv.constr = constr
+ log = log.tail
+ }
+ }
+
+ /** No sync necessary, because record should only
+ * be called from within a undo or undoUnless block,
+ * which is already synchronized.
+ */
+ private[reflect] def record(tv: TypeVar) = {
+ log ::= ((tv, tv.constr.cloneInternal))
+ }
+
+ def clear() {
+ lock()
+ try {
+ if (settings.debug.value)
+ self.log("Clearing " + log.size + " entries from the undoLog.")
+ log = Nil
+ } finally unlock()
+ }
+ def size = {
+ lock()
+ try log.size finally unlock()
+ }
+
+ // `block` should not affect constraints on typevars
+ def undo[T](block: => T): T = {
+ lock()
+ try {
+ val before = log
+
+ try block
+ finally undoTo(before)
+ } finally unlock()
+ }
+
+ // if `block` evaluates to false, it should not affect constraints on typevars
+ def undoUnless(block: => Boolean): Boolean = {
+ lock()
+ try {
+ val before = log
+ var result = false
+
+ try result = block
+ finally if (!result) undoTo(before)
+
+ result
+ } finally unlock()
+ }
+ }
+
+ /** A map from lists to compound types that have the given list as parents.
+ * This is used to avoid duplication in the computation of base type sequences and baseClasses.
+ * It makes use of the fact that these two operations depend only on the parents,
+ * not on the refinement.
+ */
+ val intersectionWitness = perRunCaches.newWeakMap[List[Type], WeakReference[Type]]()
+
+ /** A proxy for a type (identified by field `underlying`) that forwards most
+ * operations to it (for exceptions, see WrappingProxy, which forwards even more operations).
+ * every operation that is overridden for some kind of types should be forwarded.
+ */
+ trait SimpleTypeProxy extends Type {
+ def underlying: Type
+
+ // the following operations + those in RewrappingTypeProxy are all operations
+ // in class Type that are overridden in some subclass
+ // Important to keep this up-to-date when new operations are added!
+ override def isTrivial = underlying.isTrivial
+ override def isHigherKinded: Boolean = underlying.isHigherKinded
+ override def typeConstructor: Type = underlying.typeConstructor
+ override def isNotNull = underlying.isNotNull
+ override def isError = underlying.isError
+ override def isErroneous = underlying.isErroneous
+ override def isStable: Boolean = underlying.isStable
+ override def isVolatile = underlying.isVolatile
+ override def finalResultType = underlying.finalResultType
+ override def paramSectionCount = underlying.paramSectionCount
+ override def paramss = underlying.paramss
+ override def params = underlying.params
+ override def paramTypes = underlying.paramTypes
+ override def termSymbol = underlying.termSymbol
+ override def termSymbolDirect = underlying.termSymbolDirect
+ override def typeParams = underlying.typeParams
+ override def boundSyms = underlying.boundSyms
+ override def typeSymbol = underlying.typeSymbol
+ override def typeSymbolDirect = underlying.typeSymbolDirect
+ override def widen = underlying.widen
+ override def typeOfThis = underlying.typeOfThis
+ override def bounds = underlying.bounds
+ override def parents = underlying.parents
+ override def prefix = underlying.prefix
+ override def decls = underlying.decls
+ override def baseType(clazz: Symbol) = underlying.baseType(clazz)
+ override def baseTypeSeq = underlying.baseTypeSeq
+ override def baseTypeSeqDepth = underlying.baseTypeSeqDepth
+ override def baseClasses = underlying.baseClasses
+ }
+
+ /** A proxy for a type (identified by field `underlying`) that forwards most
+ * operations to it. Every operation that is overridden for some kind of types is
+ * forwarded here. Some operations are rewrapped again.
+ */
+ trait RewrappingTypeProxy extends SimpleTypeProxy {
+ protected def maybeRewrap(newtp: Type) = if (newtp eq underlying) this else rewrap(newtp)
+ protected def rewrap(newtp: Type): Type
+
+ // the following are all operations in class Type that are overridden in some subclass
+ // Important to keep this up-to-date when new operations are added!
+ override def widen = maybeRewrap(underlying.widen)
+ override def narrow = underlying.narrow
+ override def deconst = maybeRewrap(underlying.deconst)
+ override def resultType = maybeRewrap(underlying.resultType)
+ override def resultType(actuals: List[Type]) = maybeRewrap(underlying.resultType(actuals))
+ override def finalResultType = maybeRewrap(underlying.finalResultType)
+ override def paramSectionCount = 0
+ override def paramss: List[List[Symbol]] = List()
+ override def params: List[Symbol] = List()
+ override def paramTypes: List[Type] = List()
+ override def typeArgs = underlying.typeArgs
+ override def notNull = maybeRewrap(underlying.notNull)
+ override def instantiateTypeParams(formals: List[Symbol], actuals: List[Type]) = underlying.instantiateTypeParams(formals, actuals)
+ override def skolemizeExistential(owner: Symbol, origin: AnyRef) = underlying.skolemizeExistential(owner, origin)
+ override def normalize = maybeRewrap(underlying.normalize)
+ override def dealias = maybeRewrap(underlying.dealias)
+ override def cloneInfo(owner: Symbol) = maybeRewrap(underlying.cloneInfo(owner))
+ override def atOwner(owner: Symbol) = maybeRewrap(underlying.atOwner(owner))
+ override def prefixString = underlying.prefixString
+ override def isComplete = underlying.isComplete
+ override def complete(sym: Symbol) = underlying.complete(sym)
+ override def load(sym: Symbol) { underlying.load(sym) }
+ override def withAnnotations(annots: List[AnnotationInfo]) = maybeRewrap(underlying.withAnnotations(annots))
+ override def withoutAnnotations = maybeRewrap(underlying.withoutAnnotations)
+ }
+
+ case object UnmappableTree extends TermTree {
+ override def toString = "<unmappable>"
+ super.tpe_=(NoType)
+ override def tpe_=(t: Type) = if (t != NoType) {
+ throw new UnsupportedOperationException("tpe_=("+t+") inapplicable for <empty>")
+ }
+ }
+
+ abstract class TypeApiImpl extends TypeApi { this: Type =>
+ def declaration(name: Name): Symbol = decl(name)
+ def nonPrivateDeclaration(name: Name): Symbol = nonPrivateDecl(name)
+ def declarations = decls
+ def typeArguments = typeArgs
+ def erasure = this match {
+ case ConstantType(value) => widen.erasure
+ case _ =>
+ var result: Type = transformedType(this)
+ result = result.normalize match { // necessary to deal with erasures of HK types, typeConstructor won't work
+ case PolyType(undets, underlying) => existentialAbstraction(undets, underlying) // we don't want undets in the result
+ case _ => result
+ }
+ // erasure screws up all ThisTypes for modules into PackageTypeRefs
+ // we need to unscrew them, or certain typechecks will fail mysteriously
+ // http://groups.google.com/group/scala-internals/browse_thread/thread/6d3277ae21b6d581
+ result = result.map(tpe => tpe match {
+ case tpe: PackageTypeRef => ThisType(tpe.sym)
+ case _ => tpe
+ })
+ result
+ }
+ def substituteSymbols(from: List[Symbol], to: List[Symbol]): Type = substSym(from, to)
+ def substituteTypes(from: List[Symbol], to: List[Type]): Type = subst(from, to)
+
+ // the only thingies that we want to splice are: 1) type parameters, 2) abstract type members
+ // the thingies that we don't want to splice are: 1) concrete types (obviously), 2) existential skolems
+ def isSpliceable = {
+ this.isInstanceOf[TypeRef] && typeSymbol.isAbstractType && !typeSymbol.isExistential
+ }
+ }
+
+ /** Same as a call to narrow unless existentials are visible
+ * after widening the type. In that case, narrow from the widened
+ * type instead of the proxy. This gives buried existentials a
+ * chance to make peace with the other types. See SI-5330.
+ */
+ private def narrowForFindMember(tp: Type): Type = {
+ val w = tp.widen
+ // Only narrow on widened type when we have to -- narrow is expensive unless the target is a singleton type.
+ if ((tp ne w) && containsExistential(w)) w.narrow
+ else tp.narrow
+ }
+
+ /** The base class for all types */
+ abstract class Type extends TypeApiImpl with Annotatable[Type] {
+ /** Types for which asSeenFrom always is the identity, no matter what
+ * prefix or owner.
+ */
+ def isTrivial: Boolean = false
+
+ /** Is this type higher-kinded, i.e., is it a type constructor @M */
+ def isHigherKinded: Boolean = false
+ def takesTypeArgs: Boolean = this.isHigherKinded
+
+ /** Does this type denote a stable reference (i.e. singleton type)? */
+ def isStable: Boolean = false
+
+ /** Is this type dangerous (i.e. it might contain conflicting
+ * type information when empty, so that it can be constructed
+ * so that type unsoundness results.) A dangerous type has an underlying
+ * type of the form T_1 with T_n { decls }, where one of the
+ * T_i (i > 1) is an abstract type.
+ */
+ def isVolatile: Boolean = false
+
+ /** Is this type guaranteed not to have `null` as a value? */
+ def isNotNull: Boolean = false
+
+ /** Is this type a structural refinement type (it ''refines'' members that have not been inherited) */
+ def isStructuralRefinement: Boolean = false
+
+ /** Does this type depend immediately on an enclosing method parameter?
+ * I.e., is it a singleton type whose termSymbol refers to an argument of the symbol's owner (which is a method)?
+ */
+ def isImmediatelyDependent: Boolean = false
+
+ /** Is this type a dependent method type? */
+ def isDependentMethodType: Boolean = false
+
+ /** True for WildcardType or BoundedWildcardType. */
+ def isWildcard = false
+
+ /** Is this type produced as a repair for an error? */
+ def isError: Boolean = typeSymbol.isError || termSymbol.isError
+
+ /** Is this type produced as a repair for an error? */
+ def isErroneous: Boolean = ErroneousCollector.collect(this)
+
+ /** Does this type denote a reference type which can be null? */
+ // def isNullable: Boolean = false
+
+ /** Can this type only be subtyped by bottom types?
+ * This is assessed to be the case if the class is final,
+ * and all type parameters (if any) are invariant.
+ */
+ def isFinalType: Boolean =
+ typeSymbol.isFinal && (typeSymbol.typeParams forall symbolIsNonVariant) && prefix.isStable
+
+ /** Is this type completed (i.e. not a lazy type)? */
+ def isComplete: Boolean = true
+
+ /** If this is a lazy type, assign a new type to `sym`. */
+ def complete(sym: Symbol) {}
+
+ /** The term symbol associated with the type
+ * Note that the symbol of the normalized type is returned (@see normalize)
+ */
+ def termSymbol: Symbol = NoSymbol
+
+ /** The type symbol associated with the type
+ * Note that the symbol of the normalized type is returned (@see normalize)
+ * A type's typeSymbol should if possible not be inspected directly, due to
+ * the likelihood that what is true for tp.typeSymbol is not true for
+ * tp.sym, due to normalization.
+ */
+ def typeSymbol: Symbol = NoSymbol
+
+ /** The term symbol ''directly'' associated with the type.
+ */
+ def termSymbolDirect: Symbol = termSymbol
+
+ /** The type symbol ''directly'' associated with the type.
+ * In other words, no normalization is performed: if this is an alias type,
+ * the symbol returned is that of the alias, not the underlying type.
+ */
+ def typeSymbolDirect: Symbol = typeSymbol
+
+ /** The base type underlying a type proxy, identity on all other types */
+ def underlying: Type = this
+
+ /** Widen from singleton type to its underlying non-singleton
+ * base type by applying one or more `underlying` dereferences,
+ * identity for all other types.
+ *
+ * class Outer { class C ; val x: C }
+ * val o: Outer
+ * <o.x.type>.widen = o.C
+ */
+ def widen: Type = this
+
+ /** Map a constant type or not-null-type to its underlying base type,
+ * identity for all other types.
+ */
+ def deconst: Type = this
+
+ /** The type of `this` of a class type or reference type. */
+ def typeOfThis: Type = typeSymbol.typeOfThis
+
+ /** Map to a singleton type which is a subtype of this type.
+ * The fallback implemented here gives
+ * T.narrow = T' forSome { type T' <: T with Singleton }
+ * Overridden where we know more about where types come from.
+ */
+ /*
+ Note: this implementation of narrow is theoretically superior to the one
+ in use below, but imposed a significant performance penalty. It was in trunk
+ from svn r24960 through r25080.
+ */
+ /*
+ def narrow: Type =
+ if (phase.erasedTypes) this
+ else commonOwner(this) freshExistential ".type" setInfo singletonBounds(this) tpe
+ */
+
+ /** Map to a singleton type which is a subtype of this type.
+ * The fallback implemented here gives:
+ * {{{
+ * T.narrow = (T {}).this.type
+ * }}}
+ * Overridden where we know more about where types come from.
+ */
+ def narrow: Type =
+ if (phase.erasedTypes) this
+ else {
+ val cowner = commonOwner(this)
+ refinedType(this :: Nil, cowner, EmptyScope, cowner.pos).narrow
+ }
+
+ /** For a TypeBounds type, itself;
+ * for a reference denoting an abstract type, its bounds,
+ * for all other types, a TypeBounds type all of whose bounds are this type.
+ */
+ def bounds: TypeBounds = TypeBounds(this, this)
+
+ /** For a class or intersection type, its parents.
+ * For a TypeBounds type, the parents of its hi bound.
+ * inherited by typerefs, singleton types, and refinement types,
+ * The empty list for all other types */
+ def parents: List[Type] = List()
+
+ /** For a class with nonEmpty parents, the first parent.
+ * Otherwise some specific fixed top type.
+ */
+ def firstParent = if (parents.nonEmpty) parents.head else ObjectClass.tpe
+
+ /** For a typeref or single-type, the prefix of the normalized type (@see normalize).
+ * NoType for all other types. */
+ def prefix: Type = NoType
+
+ /** A chain of all typeref or singletype prefixes of this type, longest first.
+ * (Only used from safeToString.)
+ */
+ def prefixChain: List[Type] = this match {
+ case TypeRef(pre, _, _) => pre :: pre.prefixChain
+ case SingleType(pre, _) => pre :: pre.prefixChain
+ case _ => List()
+ }
+
+ /** This type, without its type arguments @M */
+ def typeConstructor: Type = this
+
+ /** For a typeref, its arguments. The empty list for all other types */
+ def typeArgs: List[Type] = List()
+
+ /** A list of placeholder types derived from the type parameters.
+ * Used by RefinedType and TypeRef.
+ */
+ protected def dummyArgs: List[Type] = typeParams map (_.typeConstructor)
+
+ /** For a (nullary) method or poly type, its direct result type,
+ * the type itself for all other types. */
+ def resultType: Type = this
+
+ def resultType(actuals: List[Type]) = this
+
+ /** Only used for dependent method types. */
+ def resultApprox: Type = ApproximateDependentMap(resultType)
+
+ /** If this is a TypeRef `clazz`[`T`], return the argument `T`
+ * otherwise return this type
+ */
+ def remove(clazz: Symbol): Type = this
+
+ /** For a curried/nullary method or poly type its non-method result type,
+ * the type itself for all other types */
+ def finalResultType: Type = this
+
+ /** For a method type, the number of its value parameter sections,
+ * 0 for all other types */
+ def paramSectionCount: Int = 0
+
+ /** For a method or poly type, a list of its value parameter sections,
+ * the empty list for all other types */
+ def paramss: List[List[Symbol]] = List()
+
+ /** For a method or poly type, its first value parameter section,
+ * the empty list for all other types */
+ def params: List[Symbol] = List()
+
+ /** For a method or poly type, the types of its first value parameter section,
+ * the empty list for all other types */
+ def paramTypes: List[Type] = List()
+
+ /** For a (potentially wrapped) poly type, its type parameters,
+ * the empty list for all other types */
+ def typeParams: List[Symbol] = List()
+
+ /** For a (potentially wrapped) poly or existential type, its bound symbols,
+ * the empty list for all other types */
+ def boundSyms: immutable.Set[Symbol] = emptySymbolSet
+
+ /** Mixin a NotNull trait unless type already has one
+ * ...if the option is given, since it is causing typing bugs.
+ */
+ def notNull: Type =
+ if (!settings.Ynotnull.value || isNotNull || phase.erasedTypes) this
+ else NotNullType(this)
+
+ /** Replace formal type parameter symbols with actual type arguments.
+ *
+ * Amounts to substitution except for higher-kinded types. (See overridden method in TypeRef) -- @M
+ */
+ def instantiateTypeParams(formals: List[Symbol], actuals: List[Type]): Type =
+ if (sameLength(formals, actuals)) this.subst(formals, actuals) else ErrorType
+
+ /** If this type is an existential, turn all existentially bound variables to type skolems.
+ * @param owner The owner of the created type skolems
+ * @param origin The tree whose type was an existential for which the skolem was created.
+ */
+ def skolemizeExistential(owner: Symbol, origin: AnyRef): Type = this
+
+ /** A simple version of skolemizeExistential for situations where
+ * owner or unpack location do not matter (typically used in subtype tests)
+ */
+ def skolemizeExistential: Type = skolemizeExistential(NoSymbol, null)
+
+ /** Reduce to beta eta-long normal form.
+ * Expands type aliases and converts higher-kinded TypeRefs to PolyTypes.
+ * Functions on types are also implemented as PolyTypes.
+ *
+ * Example: (in the below, `<List>` is the type constructor of List)
+ * TypeRef(pre, `<List>`, List()) is replaced by
+ * PolyType(X, TypeRef(pre, `<List>`, List(X)))
+ */
+ def normalize = this // @MAT
+
+ /** Expands type aliases. */
+ def dealias = this
+
+ /** Repeatedly apply widen and dealias until they have no effect.
+ * This compensates for the fact that type aliases can hide beneath
+ * singleton types and singleton types can hide inside type aliases.
+ */
+ def dealiasWiden: Type = (
+ if (this ne widen) widen.dealiasWiden
+ else if (this ne dealias) dealias.dealiasWiden
+ else this
+ )
+
+ /** All the types encountered in the course of dealiasing/widening,
+ * including each intermediate beta reduction step (whereas calling
+ * dealias applies as many as possible.)
+ */
+ def dealiasWidenChain: List[Type] = this :: (
+ if (this ne widen) widen.dealiasWidenChain
+ else if (this ne betaReduce) betaReduce.dealiasWidenChain
+ else Nil
+ )
+
+ def etaExpand: Type = this
+
+ /** Performs a single step of beta-reduction on types.
+ * Given:
+ *
+ * type C[T] = B[T]
+ * type B[T] = A
+ * class A
+ *
+ * The following will happen after `betaReduce` is invoked:
+ * TypeRef(pre, <C>, List(Int)) is replaced by
+ * TypeRef(pre, <B>, List(Int))
+ *
+ * Unlike `dealias`, which recursively applies beta reduction, until it's stuck,
+ * `betaReduce` performs exactly one step and then returns.
+ */
+ def betaReduce: Type = this
+
+ /** For a classtype or refined type, its defined or declared members;
+ * inherited by subtypes and typerefs.
+ * The empty scope for all other types.
+ */
+ def decls: Scope = EmptyScope
+
+ /** The defined or declared members with name `name` in this type;
+ * an OverloadedSymbol if several exist, NoSymbol if none exist.
+ * Alternatives of overloaded symbol appear in the order they are declared.
+ */
+ def decl(name: Name): Symbol = findDecl(name, 0)
+
+ /** A list of all non-private members defined or declared in this type. */
+ def nonPrivateDecls: List[Symbol] = decls.filterNot(_.isPrivate).toList
+
+ /** The non-private defined or declared members with name `name` in this type;
+ * an OverloadedSymbol if several exist, NoSymbol if none exist.
+ * Alternatives of overloaded symbol appear in the order they are declared.
+ */
+ def nonPrivateDecl(name: Name): Symbol = findDecl(name, PRIVATE)
+
+ /** A list of all members of this type (defined or inherited)
+ * Members appear in linearization order of their owners.
+ * Members with the same owner appear in reverse order of their declarations.
+ */
+ def members: Scope = membersBasedOnFlags(0, 0)
+
+ /** A list of all non-private members of this type (defined or inherited) */
+ def nonPrivateMembers: Scope = membersBasedOnFlags(BridgeAndPrivateFlags, 0)
+
+ /** A list of all non-private members of this type (defined or inherited),
+ * admitting members with given flags `admit`
+ */
+ def nonPrivateMembersAdmitting(admit: Long): Scope = membersBasedOnFlags(BridgeAndPrivateFlags & ~admit, 0)
+
+ /** A list of all implicit symbols of this type (defined or inherited) */
+ def implicitMembers: Scope = membersBasedOnFlags(BridgeFlags, IMPLICIT)
+
+ /** A list of all deferred symbols of this type (defined or inherited) */
+ def deferredMembers: Scope = membersBasedOnFlags(BridgeFlags, DEFERRED)
+
+ /** The member with given name,
+ * an OverloadedSymbol if several exist, NoSymbol if none exist */
+ def member(name: Name): Symbol =
+ memberBasedOnName(name, BridgeFlags)
+
+ /** The non-private member with given name,
+ * an OverloadedSymbol if several exist, NoSymbol if none exist.
+ * Bridges are excluded from the result
+ */
+ def nonPrivateMember(name: Name): Symbol =
+ memberBasedOnName(name, BridgeAndPrivateFlags)
+
+ /** All members with the given flags, excluding bridges.
+ */
+ def membersWithFlags(requiredFlags: Long): Scope =
+ membersBasedOnFlags(BridgeFlags, requiredFlags)
+
+ /** All non-private members with the given flags, excluding bridges.
+ */
+ def nonPrivateMembersWithFlags(requiredFlags: Long): Scope =
+ membersBasedOnFlags(BridgeAndPrivateFlags, requiredFlags)
+
+ /** The non-private member with given name, admitting members with given flags `admit`.
+ * "Admitting" refers to the fact that members with a PRIVATE, BRIDGE, or VBRIDGE
+ * flag are usually excluded from findMember results, but supplying any of those flags
+ * to this method disables that exclusion.
+ *
+ * An OverloadedSymbol if several exist, NoSymbol if none exists.
+ */
+ def nonPrivateMemberAdmitting(name: Name, admit: Long): Symbol =
+ memberBasedOnName(name, BridgeAndPrivateFlags & ~admit)
+
+ /** The non-local member with given name,
+ * an OverloadedSymbol if several exist, NoSymbol if none exist */
+ def nonLocalMember(name: Name): Symbol =
+ memberBasedOnName(name, BridgeFlags | LOCAL)
+
+ /** Members excluding and requiring the given flags.
+ * Note: unfortunately it doesn't work to exclude DEFERRED this way.
+ */
+ def membersBasedOnFlags(excludedFlags: Long, requiredFlags: Long): Scope =
+ findMembers(excludedFlags, requiredFlags)
+// findMember(nme.ANYNAME, excludedFlags, requiredFlags, false).alternatives
+
+ def memberBasedOnName(name: Name, excludedFlags: Long): Symbol =
+ findMember(name, excludedFlags, 0, false)
+
+ /** The least type instance of given class which is a supertype
+ * of this type. Example:
+ * class D[T]
+ * class C extends p.D[Int]
+ * ThisType(C).baseType(D) = p.D[Int]
+ */
+ def baseType(clazz: Symbol): Type = NoType
+
+ /** This type as seen from prefix `pre` and class `clazz`. This means:
+ * Replace all thistypes of `clazz` or one of its subclasses
+ * by `pre` and instantiate all parameters by arguments of `pre`.
+ * Proceed analogously for thistypes referring to outer classes.
+ *
+ * Example:
+ * class D[T] { def m: T }
+ * class C extends p.D[Int]
+ * T.asSeenFrom(ThisType(C), D) (where D is owner of m)
+ * = Int
+ */
+ def asSeenFrom(pre: Type, clazz: Symbol): Type = {
+ val start = if (Statistics.canEnable) Statistics.pushTimer(typeOpsStack, asSeenFromNanos) else null
+ try {
+ val trivial = (
+ this.isTrivial
+ || phase.erasedTypes && pre.typeSymbol != ArrayClass
+ || skipPrefixOf(pre, clazz)
+ )
+ if (trivial) this
+ else {
+ val m = new AsSeenFromMap(pre.normalize, clazz)
+ val tp = m(this)
+ val tp1 = existentialAbstraction(m.capturedParams, tp)
+
+ if (m.capturedSkolems.isEmpty) tp1
+ else deriveType(m.capturedSkolems, _.cloneSymbol setFlag CAPTURED)(tp1)
+ }
+ } finally if (Statistics.canEnable) Statistics.popTimer(typeOpsStack, start)
+ }
+
+ /** The info of `sym`, seen as a member of this type.
+ *
+ * Example:
+ * {{{
+ * class D[T] { def m: T }
+ * class C extends p.D[Int]
+ * ThisType(C).memberType(m) = Int
+ * }}}
+ */
+ def memberInfo(sym: Symbol): Type = {
+ sym.info.asSeenFrom(this, sym.owner)
+ }
+
+ /** The type of `sym`, seen as a member of this type. */
+ def memberType(sym: Symbol): Type = sym match {
+ case meth: MethodSymbol =>
+ meth.typeAsMemberOf(this)
+ case _ =>
+ computeMemberType(sym)
+ }
+
+ def computeMemberType(sym: Symbol): Type = sym.tpeHK match { //@M don't prematurely instantiate higher-kinded types, they will be instantiated by transform, typedTypeApply, etc. when really necessary
+ case OverloadedType(_, alts) =>
+ OverloadedType(this, alts)
+ case tp =>
+ tp.asSeenFrom(this, sym.owner)
+ }
+
+ /** Substitute types `to` for occurrences of references to
+ * symbols `from` in this type.
+ */
+ def subst(from: List[Symbol], to: List[Type]): Type =
+ if (from.isEmpty) this
+ else new SubstTypeMap(from, to) apply this
+
+ /** Substitute symbols `to` for occurrences of symbols `from` in this type.
+ *
+ * !!! NOTE !!!: If you need to do a substThis and a substSym, the substThis has to come
+ * first, as otherwise symbols will immediately get rebound in typeRef to the old
+ * symbol.
+ */
+ def substSym(from: List[Symbol], to: List[Symbol]): Type =
+ if ((from eq to) || from.isEmpty) this
+ else new SubstSymMap(from, to) apply this
+
+ /** Substitute all occurrences of `ThisType(from)` in this type by `to`.
+ *
+ * !!! NOTE !!!: If you need to do a substThis and a substSym, the substThis has to come
+ * first, as otherwise symbols will immediately get rebound in typeRef to the old
+ * symbol.
+ */
+ def substThis(from: Symbol, to: Type): Type =
+ new SubstThisMap(from, to) apply this
+ def substThis(from: Symbol, to: Symbol): Type =
+ substThis(from, to.thisType)
+
+ /** Performs both substThis and substSym, in that order.
+ *
+ * [JZ] Reverted `SubstThisAndSymMap` from 334872, which was not the same as
+ * `substThis(from, to).substSym(symsFrom, symsTo)`.
+ *
+ * `SubstThisAndSymMap` performs a breadth-first map over this type, which meant that
+ * symbol substitution occured before `ThisType` substitution. Consequently, in substitution
+ * of a `SingleType(ThisType(`from`), sym), symbols were rebound to `from` rather than `to`.
+ */
+ def substThisAndSym(from: Symbol, to: Type, symsFrom: List[Symbol], symsTo: List[Symbol]): Type =
+ if (symsFrom eq symsTo) substThis(from, to)
+ else substThis(from, to).substSym(symsFrom, symsTo)
+
+ /** Returns all parts of this type which satisfy predicate `p` */
+ def filter(p: Type => Boolean): List[Type] = new FilterTypeCollector(p) collect this
+ def withFilter(p: Type => Boolean) = new FilterMapForeach(p)
+
+ class FilterMapForeach(p: Type => Boolean) extends FilterTypeCollector(p){
+ def foreach[U](f: Type => U): Unit = collect(Type.this) foreach f
+ def map[T](f: Type => T): List[T] = collect(Type.this) map f
+ }
+
+ /** Returns optionally first type (in a preorder traversal) which satisfies predicate `p`,
+ * or None if none exists.
+ */
+ def find(p: Type => Boolean): Option[Type] = new FindTypeCollector(p).collect(this)
+
+ /** Apply `f` to each part of this type */
+ def foreach(f: Type => Unit) { new ForEachTypeTraverser(f).traverse(this) }
+
+ /** Apply `pf' to each part of this type on which the function is defined */
+ def collect[T](pf: PartialFunction[Type, T]): List[T] = new CollectTypeCollector(pf).collect(this)
+
+ /** Apply `f` to each part of this type; children get mapped before their parents */
+ def map(f: Type => Type): Type = new TypeMap {
+ def apply(x: Type) = f(mapOver(x))
+ } apply this
+
+ /** Is there part of this type which satisfies predicate `p`? */
+ def exists(p: Type => Boolean): Boolean = !find(p).isEmpty
+
+ /** Does this type contain a reference to this symbol? */
+ def contains(sym: Symbol): Boolean = new ContainsCollector(sym).collect(this)
+
+ /** Does this type contain a reference to this type */
+ def containsTp(tp: Type): Boolean = new ContainsTypeCollector(tp).collect(this)
+
+ /** Is this type a subtype of that type? */
+ def <:<(that: Type): Boolean = {
+ if (Statistics.canEnable) stat_<:<(that)
+ else {
+ (this eq that) ||
+ (if (explainSwitch) explain("<:", isSubType, this, that)
+ else isSubType(this, that, AnyDepth))
+ }
+ }
+
+ /** Is this type a subtype of that type in a pattern context?
+ * Any type arguments on the right hand side are replaced with
+ * fresh existentials, except for Arrays.
+ *
+ * See bug1434.scala for an example of code which would fail
+ * if only a <:< test were applied.
+ */
+ def matchesPattern(that: Type): Boolean = {
+ (this <:< that) || ((this, that) match {
+ case (TypeRef(_, ArrayClass, List(arg1)), TypeRef(_, ArrayClass, List(arg2))) if arg2.typeSymbol.typeParams.nonEmpty =>
+ arg1 matchesPattern arg2
+ case (_, TypeRef(_, _, args)) =>
+ val newtp = existentialAbstraction(args map (_.typeSymbol), that)
+ !(that =:= newtp) && (this <:< newtp)
+ case _ =>
+ false
+ })
+ }
+
+ def stat_<:<(that: Type): Boolean = {
+ if (Statistics.canEnable) Statistics.incCounter(subtypeCount)
+ val start = if (Statistics.canEnable) Statistics.pushTimer(typeOpsStack, subtypeNanos) else null
+ val result =
+ (this eq that) ||
+ (if (explainSwitch) explain("<:", isSubType, this, that)
+ else isSubType(this, that, AnyDepth))
+ if (Statistics.canEnable) Statistics.popTimer(typeOpsStack, start)
+ result
+ }
+
+ /** Is this type a weak subtype of that type? True also for numeric types, i.e. Int weak_<:< Long.
+ */
+ def weak_<:<(that: Type): Boolean = {
+ if (Statistics.canEnable) Statistics.incCounter(subtypeCount)
+ val start = if (Statistics.canEnable) Statistics.pushTimer(typeOpsStack, subtypeNanos) else null
+ val result =
+ ((this eq that) ||
+ (if (explainSwitch) explain("weak_<:", isWeakSubType, this, that)
+ else isWeakSubType(this, that)))
+ if (Statistics.canEnable) Statistics.popTimer(typeOpsStack, start)
+ result
+ }
+
+ /** Is this type equivalent to that type? */
+ def =:=(that: Type): Boolean = (
+ (this eq that) ||
+ (if (explainSwitch) explain("=", isSameType, this, that)
+ else isSameType(this, that))
+ );
+
+ /** Does this type implement symbol `sym` with same or stronger type? */
+ def specializes(sym: Symbol): Boolean =
+ if (explainSwitch) explain("specializes", specializesSym, this, sym)
+ else specializesSym(this, sym)
+
+ /** Is this type close enough to that type so that members
+ * with the two type would override each other?
+ * This means:
+ * - Either both types are polytypes with the same number of
+ * type parameters and their result types match after renaming
+ * corresponding type parameters
+ * - Or both types are (nullary) method types with equivalent type parameter types
+ * and matching result types
+ * - Or both types are equivalent
+ * - Or phase.erasedTypes is false and both types are neither method nor
+ * poly types.
+ */
+ def matches(that: Type): Boolean = matchesType(this, that, !phase.erasedTypes)
+
+ /** Same as matches, except that non-method types are always assumed to match. */
+ def looselyMatches(that: Type): Boolean = matchesType(this, that, true)
+
+ /** The shortest sorted upwards closed array of types that contains
+ * this type as first element.
+ *
+ * A list or array of types ts is upwards closed if
+ *
+ * for all t in ts:
+ * for all typerefs p.s[args] such that t <: p.s[args]
+ * there exists a typeref p'.s[args'] in ts such that
+ * t <: p'.s['args] <: p.s[args],
+ *
+ * and
+ *
+ * for all singleton types p.s such that t <: p.s
+ * there exists a singleton type p'.s in ts such that
+ * t <: p'.s <: p.s
+ *
+ * Sorting is with respect to Symbol.isLess() on type symbols.
+ */
+ def baseTypeSeq: BaseTypeSeq = baseTypeSingletonSeq(this)
+
+ /** The maximum depth (@see typeDepth)
+ * of each type in the BaseTypeSeq of this type except the first.
+ */
+ def baseTypeSeqDepth: Int = 1
+
+ /** The list of all baseclasses of this type (including its own typeSymbol)
+ * in reverse linearization order, starting with the class itself and ending
+ * in class Any.
+ */
+ def baseClasses: List[Symbol] = List()
+
+ /**
+ * @param sym the class symbol
+ * @return the index of given class symbol in the BaseTypeSeq of this type,
+ * or -1 if no base type with given class symbol exists.
+ */
+ def baseTypeIndex(sym: Symbol): Int = {
+ val bts = baseTypeSeq
+ var lo = 0
+ var hi = bts.length - 1
+ while (lo <= hi) {
+ val mid = (lo + hi) / 2
+ val btssym = bts.typeSymbol(mid)
+ if (sym == btssym) return mid
+ else if (sym isLess btssym) hi = mid - 1
+ else if (btssym isLess sym) lo = mid + 1
+ else abort()
+ }
+ -1
+ }
+
+ /** If this is a poly- or methodtype, a copy with cloned type / value parameters
+ * owned by `owner`. Identity for all other types.
+ */
+ def cloneInfo(owner: Symbol) = this
+
+ /** Make sure this type is correct as the info of given owner; clone it if not. */
+ def atOwner(owner: Symbol) = this
+
+ protected def objectPrefix = "object "
+ protected def packagePrefix = "package "
+ def trimPrefix(str: String) = str stripPrefix objectPrefix stripPrefix packagePrefix
+
+ /** The string representation of this type used as a prefix */
+ def prefixString = trimPrefix(toString) + "#"
+
+ /** Convert toString avoiding infinite recursions by cutting off
+ * after `maxTostringRecursions` recursion levels. Uses `safeToString`
+ * to produce a string on each level.
+ */
+ override def toString: String = typeToString(this)
+
+ /** Method to be implemented in subclasses.
+ * Converts this type to a string in calling toString for its parts.
+ */
+ def safeToString: String = super.toString
+
+ /** The string representation of this type, with singletypes explained. */
+ def toLongString = {
+ val str = toString
+ if (str == "type") widen.toString
+ else if ((str endsWith ".type") && !typeSymbol.isModuleClass)
+ widen match {
+ case RefinedType(_, _) => "" + widen
+ case _ => s"$str (with underlying type $widen)"
+ }
+ else str
+ }
+
+ /** The string representation of this type when the direct object in a sentence.
+ * Normally this is no different from the regular representation, but modules
+ * read better as "object Foo" here and "Foo.type" the rest of the time.
+ */
+ def directObjectString = safeToString
+
+ /** A test whether a type contains any unification type variables.
+ * Overridden with custom logic except where trivially true.
+ */
+ def isGround: Boolean = this match {
+ case ThisType(_) | NoPrefix | WildcardType | NoType | ErrorType | ConstantType(_) =>
+ true
+ case _ =>
+ typeVarToOriginMap(this) eq this
+ }
+
+ /** If this is a symbol loader type, load and assign a new type to `sym`. */
+ def load(sym: Symbol) {}
+
+ private def findDecl(name: Name, excludedFlags: Int): Symbol = {
+ var alts: List[Symbol] = List()
+ var sym: Symbol = NoSymbol
+ var e: ScopeEntry = decls.lookupEntry(name)
+ while (e ne null) {
+ if (!e.sym.hasFlag(excludedFlags)) {
+ if (sym == NoSymbol) sym = e.sym
+ else {
+ if (alts.isEmpty) alts = sym :: Nil
+ alts = e.sym :: alts
+ }
+ }
+ e = decls.lookupNextEntry(e)
+ }
+ if (alts.isEmpty) sym
+ else (baseClasses.head.newOverloaded(this, alts))
+ }
+
+ def findMembers(excludedFlags: Long, requiredFlags: Long): Scope = {
+ // if this type contains type variables, put them to sleep for a while -- don't just wipe them out by
+ // replacing them by the corresponding type parameter, as that messes up (e.g.) type variables in type refinements
+ // without this, the matchesType call would lead to type variables on both sides
+ // of a subtyping/equality judgement, which can lead to recursive types being constructed.
+ // See (t0851) for a situation where this happens.
+ val suspension: List[TypeVar] = if (this.isGround) null else suspendTypeVarsInType(this)
+
+ if (Statistics.canEnable) Statistics.incCounter(findMembersCount)
+ val start = if (Statistics.canEnable) Statistics.pushTimer(typeOpsStack, findMembersNanos) else null
+
+ //Console.println("find member " + name.decode + " in " + this + ":" + this.baseClasses)//DEBUG
+ var members: Scope = null
+ var required = requiredFlags
+ var excluded = excludedFlags | DEFERRED
+ var continue = true
+ var self: Type = null
+ while (continue) {
+ continue = false
+ val bcs0 = baseClasses
+ var bcs = bcs0
+ while (!bcs.isEmpty) {
+ val decls = bcs.head.info.decls
+ var entry = decls.elems
+ while (entry ne null) {
+ val sym = entry.sym
+ val flags = sym.flags
+ if ((flags & required) == required) {
+ val excl = flags & excluded
+ if (excl == 0L &&
+ (// omit PRIVATE LOCALS unless selector class is contained in class owning the def.
+ (bcs eq bcs0) ||
+ (flags & PrivateLocal) != PrivateLocal ||
+ (bcs0.head.hasTransOwner(bcs.head)))) {
+ if (members eq null) members = newFindMemberScope
+ var others: ScopeEntry = members.lookupEntry(sym.name)
+ var symtpe: Type = null
+ while ((others ne null) && {
+ val other = others.sym
+ (other ne sym) &&
+ ((other.owner eq sym.owner) ||
+ (flags & PRIVATE) != 0 || {
+ if (self eq null) self = narrowForFindMember(this)
+ if (symtpe eq null) symtpe = self.memberType(sym)
+ !(self.memberType(other) matches symtpe)
+ })}) {
+ others = members lookupNextEntry others
+ }
+ if (others eq null) members enter sym
+ } else if (excl == DEFERRED) {
+ continue = true
+ }
+ }
+ entry = entry.next
+ } // while (entry ne null)
+ // excluded = excluded | LOCAL
+ bcs = bcs.tail
+ } // while (!bcs.isEmpty)
+ required |= DEFERRED
+ excluded &= ~(DEFERRED.toLong)
+ } // while (continue)
+ if (Statistics.canEnable) Statistics.popTimer(typeOpsStack, start)
+ if (suspension ne null) suspension foreach (_.suspended = false)
+ if (members eq null) EmptyScope else members
+ }
+
+ /**
+ * Find member(s) in this type. If several members matching criteria are found, they are
+ * returned in an OverloadedSymbol
+ *
+ * @param name The member's name, where nme.ANYNAME means `unspecified`
+ * @param excludedFlags Returned members do not have these flags
+ * @param requiredFlags Returned members do have these flags
+ * @param stableOnly If set, return only members that are types or stable values
+ */
+ //TODO: use narrow only for modules? (correct? efficiency gain?)
+ def findMember(name: Name, excludedFlags: Long, requiredFlags: Long, stableOnly: Boolean): Symbol = {
+ // if this type contains type variables, put them to sleep for a while -- don't just wipe them out by
+ // replacing them by the corresponding type parameter, as that messes up (e.g.) type variables in type refinements
+ // without this, the matchesType call would lead to type variables on both sides
+ // of a subtyping/equality judgement, which can lead to recursive types being constructed.
+ // See (t0851) for a situation where this happens.
+ val suspension: List[TypeVar] = if (this.isGround) null else suspendTypeVarsInType(this)
+
+ if (Statistics.canEnable) Statistics.incCounter(findMemberCount)
+ val start = if (Statistics.canEnable) Statistics.pushTimer(typeOpsStack, findMemberNanos) else null
+
+ //Console.println("find member " + name.decode + " in " + this + ":" + this.baseClasses)//DEBUG
+ var member: Symbol = NoSymbol
+ var members: List[Symbol] = null
+ var lastM: ::[Symbol] = null
+ var membertpe: Type = null
+ var required = requiredFlags
+ var excluded = excludedFlags | DEFERRED
+ var continue = true
+ var self: Type = null
+
+ while (continue) {
+ continue = false
+ val bcs0 = baseClasses
+ var bcs = bcs0
+ // omit PRIVATE LOCALS unless selector class is contained in class owning the def.
+ def admitPrivateLocal(owner: Symbol): Boolean = {
+ val selectorClass = this match {
+ case tt: ThisType => tt.sym // SI-7507 the first base class is not necessarily the selector class.
+ case _ => bcs0.head
+ }
+ selectorClass.hasTransOwner(owner)
+ }
+ while (!bcs.isEmpty) {
+ val decls = bcs.head.info.decls
+ var entry = decls.lookupEntry(name)
+ while (entry ne null) {
+ val sym = entry.sym
+ val flags = sym.flags
+ if ((flags & required) == required) {
+ val excl = flags & excluded
+ if (excl == 0L &&
+ (
+ (bcs eq bcs0) ||
+ (flags & PrivateLocal) != PrivateLocal ||
+ admitPrivateLocal(bcs.head))) {
+ if (name.isTypeName || stableOnly && sym.isStable) {
+ if (Statistics.canEnable) Statistics.popTimer(typeOpsStack, start)
+ if (suspension ne null) suspension foreach (_.suspended = false)
+ return sym
+ } else if (member eq NoSymbol) {
+ member = sym
+ } else if (members eq null) {
+ if ((member ne sym) &&
+ ((member.owner eq sym.owner) ||
+ (flags & PRIVATE) != 0 || {
+ if (self eq null) self = narrowForFindMember(this)
+ if (membertpe eq null) membertpe = self.memberType(member)
+ !(membertpe matches self.memberType(sym))
+ })) {
+ lastM = new ::(sym, null)
+ members = member :: lastM
+ }
+ } else {
+ var others: List[Symbol] = members
+ var symtpe: Type = null
+ while ((others ne null) && {
+ val other = others.head
+ (other ne sym) &&
+ ((other.owner eq sym.owner) ||
+ (flags & PRIVATE) != 0 || {
+ if (self eq null) self = narrowForFindMember(this)
+ if (symtpe eq null) symtpe = self.memberType(sym)
+ !(self.memberType(other) matches symtpe)
+ })}) {
+ others = others.tail
+ }
+ if (others eq null) {
+ val lastM1 = new ::(sym, null)
+ lastM.tl = lastM1
+ lastM = lastM1
+ }
+ }
+ } else if (excl == DEFERRED) {
+ continue = true
+ }
+ }
+ entry = decls lookupNextEntry entry
+ } // while (entry ne null)
+ // excluded = excluded | LOCAL
+ bcs = if (name == nme.CONSTRUCTOR) Nil else bcs.tail
+ } // while (!bcs.isEmpty)
+ required |= DEFERRED
+ excluded &= ~(DEFERRED.toLong)
+ } // while (continue)
+ if (Statistics.canEnable) Statistics.popTimer(typeOpsStack, start)
+ if (suspension ne null) suspension foreach (_.suspended = false)
+ if (members eq null) {
+ if (member == NoSymbol) if (Statistics.canEnable) Statistics.incCounter(noMemberCount)
+ member
+ } else {
+ if (Statistics.canEnable) Statistics.incCounter(multMemberCount)
+ lastM.tl = Nil
+ baseClasses.head.newOverloaded(this, members)
+ }
+ }
+
+ /** The (existential or otherwise) skolems and existentially quantified variables which are free in this type */
+ def skolemsExceptMethodTypeParams: List[Symbol] = {
+ var boundSyms: List[Symbol] = List()
+ var skolems: List[Symbol] = List()
+ for (t <- this) {
+ t match {
+ case ExistentialType(quantified, qtpe) =>
+ boundSyms = boundSyms ::: quantified
+ case TypeRef(_, sym, _) =>
+ if ((sym.isExistentialSkolem || sym.isGADTSkolem) && // treat GADT skolems like existential skolems
+ !((boundSyms contains sym) || (skolems contains sym)))
+ skolems = sym :: skolems
+ case _ =>
+ }
+ }
+ skolems
+ }
+
+ // Implementation of Annotatable for all types but AnnotatedType, which
+ // overrides these.
+ def annotations: List[AnnotationInfo] = Nil
+ def withoutAnnotations: Type = this
+ def filterAnnotations(p: AnnotationInfo => Boolean): Type = this
+ def setAnnotations(annots: List[AnnotationInfo]): Type = annotatedType(annots, this)
+ def withAnnotations(annots: List[AnnotationInfo]): Type = annotatedType(annots, this)
+
+ /** Remove any annotations from this type and from any
+ * types embedded in this type. */
+ def stripAnnotations = StripAnnotationsMap(this)
+
+ /** Set the self symbol of an annotated type, or do nothing
+ * otherwise. */
+ def withSelfsym(sym: Symbol) = this
+
+ /** The selfsym of an annotated type, or NoSymbol of anything else */
+ def selfsym: Symbol = NoSymbol
+
+ /** The kind of this type; used for debugging */
+ def kind: String = "unknown type of class "+getClass()
+ }
+
+// Subclasses ------------------------------------------------------------
+
+ /**
+ * A type that can be passed to unique(..) and be stored in the uniques map.
+ */
+ abstract class UniqueType extends Type with Product {
+ final override val hashCode = computeHashCode
+ protected def computeHashCode = scala.runtime.ScalaRunTime._hashCode(this)
+ }
+
+ /** A base class for types that defer some operations
+ * to their immediate supertype.
+ */
+ abstract class SubType extends UniqueType {
+ def supertype: Type
+ override def parents: List[Type] = supertype.parents
+ override def decls: Scope = supertype.decls
+ override def baseType(clazz: Symbol): Type = supertype.baseType(clazz)
+ override def baseTypeSeq: BaseTypeSeq = supertype.baseTypeSeq
+ override def baseTypeSeqDepth: Int = supertype.baseTypeSeqDepth
+ override def baseClasses: List[Symbol] = supertype.baseClasses
+ override def isNotNull = supertype.isNotNull
+ }
+
+ case class NotNullType(override val underlying: Type) extends SubType with RewrappingTypeProxy {
+ def supertype = underlying
+ protected def rewrap(newtp: Type): Type = NotNullType(newtp)
+ override def isNotNull: Boolean = true
+ override def notNull = this
+ override def deconst: Type = underlying //todo: needed?
+ override def safeToString: String = underlying.toString + " with NotNull"
+ override def kind = "NotNullType"
+ }
+
+ /** A base class for types that represent a single value
+ * (single-types and this-types).
+ */
+ abstract class SingletonType extends SubType with SimpleTypeProxy {
+ def supertype = underlying
+ override def isTrivial = false
+ override def isStable = true
+ override def isVolatile = underlying.isVolatile
+ override def widen: Type = underlying.widen
+ override def baseTypeSeq: BaseTypeSeq = {
+ if (Statistics.canEnable) Statistics.incCounter(singletonBaseTypeSeqCount)
+ underlying.baseTypeSeq prepend this
+ }
+ override def isHigherKinded = false // singleton type classifies objects, thus must be kind *
+ override def safeToString: String = {
+ // Avoiding printing Predef.type and scala.package.type as "type",
+ // since in all other cases we omit those prefixes.
+ val pre = underlying.typeSymbol.skipPackageObject
+ if (pre.isOmittablePrefix) pre.fullName + ".type"
+ else prefixString + "type"
+ }
+
+/*
+ override def typeOfThis: Type = typeSymbol.typeOfThis
+ override def bounds: TypeBounds = TypeBounds(this, this)
+ override def prefix: Type = NoType
+ override def typeArgs: List[Type] = List()
+ override def typeParams: List[Symbol] = List()
+*/
+ }
+
+ /** An object representing an erroneous type */
+ case object ErrorType extends Type {
+ // todo see whether we can do without
+ override def isError: Boolean = true
+ override def decls: Scope = new ErrorScope(NoSymbol)
+ override def findMember(name: Name, excludedFlags: Long, requiredFlags: Long, stableOnly: Boolean): Symbol = {
+ var sym = decls lookup name
+ if (sym == NoSymbol) {
+ sym = NoSymbol.newErrorSymbol(name)
+ decls enter sym
+ }
+ sym
+ }
+ override def baseType(clazz: Symbol): Type = this
+ override def safeToString: String = "<error>"
+ override def narrow: Type = this
+ // override def isNullable: Boolean = true
+ override def kind = "ErrorType"
+ }
+
+ /** An object representing an unknown type, used during type inference.
+ * If you see WildcardType outside of inference it is almost certainly a bug.
+ */
+ case object WildcardType extends Type {
+ override def isWildcard = true
+ override def safeToString: String = "?"
+ // override def isNullable: Boolean = true
+ override def kind = "WildcardType"
+ }
+ /** BoundedWildcardTypes, used only during type inference, are created in
+ * two places that I can find:
+ *
+ * 1. If the expected type of an expression is an existential type,
+ * its hidden symbols are replaced with bounded wildcards.
+ * 2. When an implicit conversion is being sought based in part on
+ * the name of a method in the converted type, a HasMethodMatching
+ * type is created: a MethodType with parameters typed as
+ * BoundedWildcardTypes.
+ */
+ case class BoundedWildcardType(override val bounds: TypeBounds) extends Type with BoundedWildcardTypeApi {
+ override def isWildcard = true
+ override def safeToString: String = "?" + bounds
+ override def kind = "BoundedWildcardType"
+ }
+
+ object BoundedWildcardType extends BoundedWildcardTypeExtractor
+
+ /** An object representing a non-existing type */
+ case object NoType extends Type {
+ override def isTrivial: Boolean = true
+ override def safeToString: String = "<notype>"
+ // override def isNullable: Boolean = true
+ override def kind = "NoType"
+ }
+
+ /** An object representing a non-existing prefix */
+ case object NoPrefix extends Type {
+ override def isTrivial: Boolean = true
+ override def isStable: Boolean = true
+ override def prefixString = ""
+ override def safeToString: String = "<noprefix>"
+ // override def isNullable: Boolean = true
+ override def kind = "NoPrefixType"
+ }
+
+ /** A class for this-types of the form <sym>.this.type
+ */
+ abstract case class ThisType(sym: Symbol) extends SingletonType with ThisTypeApi {
+ if (!sym.isClass) {
+ // SI-6640 allow StubSymbols to reveal what's missing from the classpath before we trip the assertion.
+ sym.failIfStub()
+ abort(s"ThisType($sym) for sym which is not a class")
+ }
+
+ //assert(sym.isClass && !sym.isModuleClass || sym.isRoot, sym)
+ override def isTrivial: Boolean = sym.isPackageClass
+ override def isNotNull = true
+ override def typeSymbol = sym
+ override def underlying: Type = sym.typeOfThis
+ override def isVolatile = false
+ override def isHigherKinded = sym.isRefinementClass && underlying.isHigherKinded
+ override def prefixString =
+ if (settings.debug.value) sym.nameString + ".this."
+ else if (sym.isAnonOrRefinementClass) "this."
+ else if (sym.isOmittablePrefix) ""
+ else if (sym.isModuleClass) sym.fullNameString + "."
+ else sym.nameString + ".this."
+ override def safeToString: String =
+ if (sym.isEffectiveRoot) "" + sym.name
+ else super.safeToString
+ override def narrow: Type = this
+ override def kind = "ThisType"
+ }
+
+ final class UniqueThisType(sym: Symbol) extends ThisType(sym) { }
+
+ object ThisType extends ThisTypeExtractor {
+ def apply(sym: Symbol): Type = (
+ if (!phase.erasedTypes) unique(new UniqueThisType(sym))
+ else if (sym.isImplClass) sym.typeOfThis
+ else sym.tpe
+ )
+ }
+
+ /** A class for singleton types of the form `<prefix>.<sym.name>.type`.
+ * Cannot be created directly; one should always use `singleType` for creation.
+ */
+ abstract case class SingleType(pre: Type, sym: Symbol) extends SingletonType with SingleTypeApi {
+ private var trivial: ThreeValue = UNKNOWN
+ override def isTrivial: Boolean = {
+ if (trivial == UNKNOWN) trivial = fromBoolean(pre.isTrivial)
+ toBoolean(trivial)
+ }
+ override def isGround = sym.isPackageClass || pre.isGround
+
+ // override def isNullable = underlying.isNullable
+ override def isNotNull = underlying.isNotNull
+ private[reflect] var underlyingCache: Type = NoType
+ private[reflect] var underlyingPeriod = NoPeriod
+ override def underlying: Type = {
+ val cache = underlyingCache
+ if (underlyingPeriod == currentPeriod && cache != null) cache
+ else {
+ defineUnderlyingOfSingleType(this)
+ underlyingCache
+ }
+ }
+
+ // more precise conceptually, but causes cyclic errors: (paramss exists (_ contains sym))
+ override def isImmediatelyDependent = (sym ne NoSymbol) && (sym.owner.isMethod && sym.isValueParameter)
+
+ override def isVolatile : Boolean = underlying.isVolatile && !sym.isStable
+/*
+ override def narrow: Type = {
+ if (phase.erasedTypes) this
+ else {
+ val thissym = refinedType(List(this), sym.owner, EmptyScope).typeSymbol
+ if (sym.owner != NoSymbol) {
+ //Console.println("narrowing module " + sym + thissym.owner);
+ thissym.typeOfThis = this
+ }
+ thissym.thisType
+ }
+ }
+*/
+ override def narrow: Type = this
+
+ override def termSymbol = sym
+ override def prefix: Type = pre
+ override def prefixString = (
+ if (sym.skipPackageObject.isOmittablePrefix) ""
+ else if (sym.isPackageObjectOrClass) pre.prefixString
+ else pre.prefixString + sym.nameString + "."
+ )
+ override def kind = "SingleType"
+ }
+
+ final class UniqueSingleType(pre: Type, sym: Symbol) extends SingleType(pre, sym)
+
+ object SingleType extends SingleTypeExtractor {
+ def apply(pre: Type, sym: Symbol): Type = {
+ unique(new UniqueSingleType(pre, sym))
+ }
+ }
+
+ protected def defineUnderlyingOfSingleType(tpe: SingleType) = {
+ val period = tpe.underlyingPeriod
+ if (period != currentPeriod) {
+ tpe.underlyingPeriod = currentPeriod
+ if (!isValid(period)) {
+ // [Eugene to Paul] needs review
+ tpe.underlyingCache = if (tpe.sym == NoSymbol) ThisType(rootMirror.RootClass) else tpe.pre.memberType(tpe.sym).resultType;
+ assert(tpe.underlyingCache ne tpe, tpe)
+ }
+ }
+ }
+
+ abstract case class SuperType(thistpe: Type, supertpe: Type) extends SingletonType with SuperTypeApi {
+ private var trivial: ThreeValue = UNKNOWN
+ override def isTrivial: Boolean = {
+ if (trivial == UNKNOWN) trivial = fromBoolean(thistpe.isTrivial && supertpe.isTrivial)
+ toBoolean(trivial)
+ }
+ override def isNotNull = true;
+ override def typeSymbol = thistpe.typeSymbol
+ override def underlying = supertpe
+ override def prefix: Type = supertpe.prefix
+ override def prefixString = thistpe.prefixString.replaceAll("""\bthis\.$""", "super.")
+ override def narrow: Type = thistpe.narrow
+ override def kind = "SuperType"
+ }
+
+ final class UniqueSuperType(thistp: Type, supertp: Type) extends SuperType(thistp, supertp)
+
+ object SuperType extends SuperTypeExtractor {
+ def apply(thistp: Type, supertp: Type): Type = {
+ if (phase.erasedTypes) supertp
+ else unique(new UniqueSuperType(thistp, supertp))
+ }
+ }
+
+ /** A class for the bounds of abstract types and type parameters
+ */
+ abstract case class TypeBounds(lo: Type, hi: Type) extends SubType with TypeBoundsApi {
+ def supertype = hi
+ override def isTrivial: Boolean = lo.isTrivial && hi.isTrivial
+ override def bounds: TypeBounds = this
+ def containsType(that: Type) = that match {
+ case TypeBounds(_, _) => that <:< this
+ case _ => lo <:< that && that <:< hi
+ }
+ private def lowerString = if (emptyLowerBound) "" else " >: " + lo
+ private def upperString = if (emptyUpperBound) "" else " <: " + hi
+ private def emptyLowerBound = typeIsNothing(lo)
+ private def emptyUpperBound = typeIsAny(hi)
+ def isEmptyBounds = emptyLowerBound && emptyUpperBound
+
+ // override def isNullable: Boolean = NullClass.tpe <:< lo;
+ override def safeToString = lowerString + upperString
+ override def kind = "TypeBoundsType"
+ }
+
+ final class UniqueTypeBounds(lo: Type, hi: Type) extends TypeBounds(lo, hi)
+
+ object TypeBounds extends TypeBoundsExtractor {
+ def empty: TypeBounds = apply(NothingClass.tpe, AnyClass.tpe)
+ def upper(hi: Type): TypeBounds = apply(NothingClass.tpe, hi)
+ def lower(lo: Type): TypeBounds = apply(lo, AnyClass.tpe)
+ def apply(lo: Type, hi: Type): TypeBounds = {
+ unique(new UniqueTypeBounds(lo, hi)).asInstanceOf[TypeBounds]
+ }
+ }
+
+ /** A common base class for intersection types and class types
+ */
+ abstract class CompoundType extends Type {
+
+ private[reflect] var baseTypeSeqCache: BaseTypeSeq = _
+ private[reflect] var baseTypeSeqPeriod = NoPeriod
+ private[reflect] var baseClassesCache: List[Symbol] = _
+ private[reflect] var baseClassesPeriod = NoPeriod
+
+ override def baseTypeSeq: BaseTypeSeq = {
+ val cached = baseTypeSeqCache
+ if (baseTypeSeqPeriod == currentPeriod && cached != null && cached != undetBaseTypeSeq)
+ cached
+ else {
+ defineBaseTypeSeqOfCompoundType(this)
+ if (baseTypeSeqCache eq undetBaseTypeSeq)
+ throw new RecoverableCyclicReference(typeSymbol)
+
+ baseTypeSeqCache
+ }
+ }
+
+ override def baseTypeSeqDepth: Int = baseTypeSeq.maxDepth
+
+ override def baseClasses: List[Symbol] = {
+ val cached = baseClassesCache
+ if (baseClassesPeriod == currentPeriod && cached != null) cached
+ else {
+ defineBaseClassesOfCompoundType(this)
+ if (baseClassesCache eq null)
+ throw new RecoverableCyclicReference(typeSymbol)
+
+ baseClassesCache
+ }
+ }
+
+ /** The slightly less idiomatic use of Options is due to
+ * performance considerations. A version using for comprehensions
+ * might be too slow (this is deemed a hotspot of the type checker).
+ *
+ * See with Martin before changing this method.
+ */
+ def memo[A](op1: => A)(op2: Type => A): A = {
+ def updateCache(): A = {
+ intersectionWitness(parents) = new WeakReference(this)
+ op1
+ }
+
+ intersectionWitness get parents match {
+ case Some(ref) =>
+ ref.get match {
+ case Some(w) => if (w eq this) op1 else op2(w)
+ case None => updateCache()
+ }
+ case None => updateCache()
+ }
+ }
+
+ override def baseType(sym: Symbol): Type = {
+ val index = baseTypeIndex(sym)
+ if (index >= 0) baseTypeSeq(index) else NoType
+ }
+
+ override def narrow: Type = typeSymbol.thisType
+ override def isNotNull: Boolean = parents exists typeIsNotNull
+
+ override def isStructuralRefinement: Boolean =
+ typeSymbol.isAnonOrRefinementClass && (decls exists symbolIsPossibleInRefinement)
+
+ // override def isNullable: Boolean =
+ // parents forall (p => p.isNullable && !p.typeSymbol.isAbstractType);
+
+ override def safeToString: String = parentsString(parents) + (
+ (if (settings.debug.value || parents.isEmpty || (decls.elems ne null))
+ fullyInitializeScope(decls).mkString("{", "; ", "}") else "")
+ )
+ }
+
+ protected def defineBaseTypeSeqOfCompoundType(tpe: CompoundType) = {
+ val period = tpe.baseTypeSeqPeriod
+ if (period != currentPeriod) {
+ tpe.baseTypeSeqPeriod = currentPeriod
+ if (!isValidForBaseClasses(period)) {
+ if (tpe.parents exists typeContainsTypeVar) {
+ // rename type vars to fresh type params, take base type sequence of
+ // resulting type, and rename back all the entries in that sequence
+ var tvs = Set[TypeVar]()
+ for (p <- tpe.parents)
+ for (t <- p) t match {
+ case tv: TypeVar => tvs += tv
+ case _ =>
+ }
+ val varToParamMap: Map[Type, Symbol] =
+ mapFrom[TypeVar, Type, Symbol](tvs.toList)(_.origin.typeSymbol.cloneSymbol)
+ val paramToVarMap = varToParamMap map (_.swap)
+ val varToParam = new TypeMap {
+ def apply(tp: Type) = varToParamMap get tp match {
+ case Some(sym) => sym.tpe
+ case _ => mapOver(tp)
+ }
+ }
+ val paramToVar = new TypeMap {
+ def apply(tp: Type) = tp match {
+ case TypeRef(_, tsym, _) if paramToVarMap.isDefinedAt(tsym) => paramToVarMap(tsym)
+ case _ => mapOver(tp)
+ }
+ }
+ val bts = copyRefinedType(tpe.asInstanceOf[RefinedType], tpe.parents map varToParam, varToParam mapOver tpe.decls).baseTypeSeq
+ tpe.baseTypeSeqCache = bts lateMap paramToVar
+ } else {
+ if (Statistics.canEnable) Statistics.incCounter(compoundBaseTypeSeqCount)
+ val start = if (Statistics.canEnable) Statistics.pushTimer(typeOpsStack, baseTypeSeqNanos) else null
+ try {
+ tpe.baseTypeSeqCache = undetBaseTypeSeq
+ tpe.baseTypeSeqCache =
+ if (tpe.typeSymbol.isRefinementClass)
+ tpe.memo(compoundBaseTypeSeq(tpe))(_.baseTypeSeq updateHead tpe.typeSymbol.tpe)
+ else
+ compoundBaseTypeSeq(tpe)
+ } finally {
+ if (Statistics.canEnable) Statistics.popTimer(typeOpsStack, start)
+ }
+ // [Martin] suppressing memo-ization solves the problem with "same type after erasure" errors
+ // when compiling with
+ // scalac scala.collection.IterableViewLike.scala scala.collection.IterableLike.scala
+ // I have not yet figured out precisely why this is the case.
+ // My current assumption is that taking memos forces baseTypeSeqs to be computed
+ // at stale types (i.e. the underlying typeSymbol has already another type).
+ // I do not yet see precisely why this would cause a problem, but it looks
+ // fishy in any case.
+ }
+ }
+ }
+ //Console.println("baseTypeSeq(" + typeSymbol + ") = " + baseTypeSeqCache.toList);//DEBUG
+ if (tpe.baseTypeSeqCache eq undetBaseTypeSeq)
+ throw new TypeError("illegal cyclic inheritance involving " + tpe.typeSymbol)
+ }
+
+ protected def defineBaseClassesOfCompoundType(tpe: CompoundType) = {
+ def computeBaseClasses: List[Symbol] =
+ if (tpe.parents.isEmpty) List(tpe.typeSymbol)
+ else {
+ //Console.println("computing base classes of " + typeSymbol + " at phase " + phase);//DEBUG
+ // optimized, since this seems to be performance critical
+ val superclazz = tpe.firstParent
+ var mixins = tpe.parents.tail
+ val sbcs = superclazz.baseClasses
+ var bcs = sbcs
+ def isNew(clazz: Symbol): Boolean =
+ superclazz.baseTypeIndex(clazz) < 0 &&
+ { var p = bcs;
+ while ((p ne sbcs) && (p.head != clazz)) p = p.tail;
+ p eq sbcs
+ }
+ while (!mixins.isEmpty) {
+ def addMixinBaseClasses(mbcs: List[Symbol]): List[Symbol] =
+ if (mbcs.isEmpty) bcs
+ else if (isNew(mbcs.head)) mbcs.head :: addMixinBaseClasses(mbcs.tail)
+ else addMixinBaseClasses(mbcs.tail)
+ bcs = addMixinBaseClasses(mixins.head.baseClasses)
+ mixins = mixins.tail
+ }
+ tpe.typeSymbol :: bcs
+ }
+ val period = tpe.baseClassesPeriod
+ if (period != currentPeriod) {
+ tpe.baseClassesPeriod = currentPeriod
+ if (!isValidForBaseClasses(period)) {
+ val start = if (Statistics.canEnable) Statistics.pushTimer(typeOpsStack, baseClassesNanos) else null
+ try {
+ tpe.baseClassesCache = null
+ tpe.baseClassesCache = tpe.memo(computeBaseClasses)(tpe.typeSymbol :: _.baseClasses.tail)
+ } finally {
+ if (Statistics.canEnable) Statistics.popTimer(typeOpsStack, start)
+ }
+ }
+ }
+ if (tpe.baseClassesCache eq null)
+ throw new TypeError("illegal cyclic reference involving " + tpe.typeSymbol)
+ }
+
+ /** A class representing intersection types with refinements of the form
+ * `<parents_0> with ... with <parents_n> { decls }`
+ * Cannot be created directly;
+ * one should always use `refinedType` for creation.
+ */
+ case class RefinedType(override val parents: List[Type],
+ override val decls: Scope) extends CompoundType with RefinedTypeApi {
+
+ override def isHigherKinded = (
+ parents.nonEmpty &&
+ (parents forall typeIsHigherKinded) &&
+ !phase.erasedTypes
+ )
+
+ override def typeParams =
+ if (isHigherKinded) firstParent.typeParams
+ else super.typeParams
+
+ //@M may result in an invalid type (references to higher-order args become dangling )
+ override def typeConstructor =
+ copyRefinedType(this, parents map (_.typeConstructor), decls)
+
+ final override def normalize: Type =
+ if (phase.erasedTypes) normalizeImpl
+ else {
+ if (normalized eq null) normalized = normalizeImpl
+ normalized
+ }
+
+ private var normalized: Type = _
+ private def normalizeImpl = {
+ // TODO see comments around def intersectionType and def merge
+ def flatten(tps: List[Type]): List[Type] = tps flatMap { case RefinedType(parents, ds) if ds.isEmpty => flatten(parents) case tp => List(tp) }
+ val flattened = flatten(parents).distinct
+ if (decls.isEmpty && hasLength(flattened, 1)) {
+ flattened.head
+ } else if (flattened != parents) {
+ refinedType(flattened, if (typeSymbol eq NoSymbol) NoSymbol else typeSymbol.owner, decls, NoPosition)
+ } else if (isHigherKinded) {
+ // MO to AM: This is probably not correct
+ // If they are several higher-kinded parents with different bounds we need
+ // to take the intersection of their bounds
+ typeFun(
+ typeParams,
+ RefinedType(
+ parents map {
+ case TypeRef(pre, sym, List()) => TypeRef(pre, sym, dummyArgs)
+ case p => p
+ },
+ decls,
+ typeSymbol))
+ } else super.normalize
+ }
+
+ /** A refined type P1 with ... with Pn { decls } is volatile if
+ * one of the parent types Pi is an abstract type, and
+ * either i > 1, or decls or a following parent Pj, j > 1, contributes
+ * an abstract member.
+ * A type contributes an abstract member if it has an abstract member which
+ * is also a member of the whole refined type. A scope `decls` contributes
+ * an abstract member if it has an abstract definition which is also
+ * a member of the whole type.
+ */
+ override def isVolatile = {
+ def isVisible(m: Symbol) =
+ this.nonPrivateMember(m.name).alternatives contains m
+ def contributesAbstractMembers(p: Type) =
+ p.deferredMembers exists isVisible
+
+ ((parents exists (_.isVolatile))
+ ||
+ (parents dropWhile (! _.typeSymbol.isAbstractType) match {
+ case ps @ (_ :: ps1) =>
+ (ps ne parents) ||
+ (ps1 exists contributesAbstractMembers) ||
+ (decls.iterator exists (m => m.isDeferred && isVisible(m)))
+ case _ =>
+ false
+ }))
+ }
+ override def kind = "RefinedType"
+ }
+
+ final class RefinedType0(parents: List[Type], decls: Scope, clazz: Symbol) extends RefinedType(parents, decls) {
+ override def typeSymbol = clazz
+ }
+
+ object RefinedType extends RefinedTypeExtractor {
+ def apply(parents: List[Type], decls: Scope, clazz: Symbol): RefinedType =
+ new RefinedType0(parents, decls, clazz)
+ }
+
+ /** Overridden in reflection compiler */
+ def validateClassInfo(tp: ClassInfoType) {}
+
+ /** A class representing a class info
+ */
+ case class ClassInfoType(
+ override val parents: List[Type],
+ override val decls: Scope,
+ override val typeSymbol: Symbol) extends CompoundType with ClassInfoTypeApi
+ {
+ validateClassInfo(this)
+
+ /** refs indices */
+ private final val NonExpansive = 0
+ private final val Expansive = 1
+
+ /** initialization states */
+ private final val UnInitialized = 0
+ private final val Initializing = 1
+ private final val Initialized = 2
+
+ private type RefMap = Map[Symbol, immutable.Set[Symbol]]
+
+ /** All type parameters reachable from given type parameter
+ * by a path which contains at least one expansive reference.
+ * @See Kennedy, Pierce: On Decidability of Nominal Subtyping with Variance
+ */
+ private[scala] def expansiveRefs(tparam: Symbol) = {
+ if (state == UnInitialized) {
+ computeRefs()
+ while (state != Initialized) propagate()
+ }
+ getRefs(Expansive, tparam)
+ }
+
+ /* The rest of this class is auxiliary code for `expansiveRefs`
+ */
+
+ /** The type parameters which are referenced type parameters of this class.
+ * Two entries: refs(0): Non-expansive references
+ * refs(1): Expansive references
+ * Syncnote: This var need not be protected with synchronized, because
+ * it is accessed only from expansiveRefs, which is called only from
+ * Typer.
+ */
+ private var refs: Array[RefMap] = _
+
+ /** The initialization state of the class: UnInialized --> Initializing --> Initialized
+ * Syncnote: This var need not be protected with synchronized, because
+ * it is accessed only from expansiveRefs, which is called only from
+ * Typer.
+ */
+ private var state = UnInitialized
+
+ /** Get references for given type parameter
+ * @param which in {NonExpansive, Expansive}
+ * @param from The type parameter from which references originate.
+ */
+ private def getRefs(which: Int, from: Symbol): Set[Symbol] = refs(which) get from match {
+ case Some(set) => set
+ case none => Set()
+ }
+
+ /** Augment existing refs map with reference <pre>from -> to</pre>
+ * @param which <- {NonExpansive, Expansive}
+ */
+ private def addRef(which: Int, from: Symbol, to: Symbol) {
+ refs(which) = refs(which) + (from -> (getRefs(which, from) + to))
+ }
+
+ /** Augment existing refs map with references <pre>from -> sym</pre>, for
+ * all elements <pre>sym</pre> of set `to`.
+ * @param which <- {NonExpansive, Expansive}
+ */
+ private def addRefs(which: Int, from: Symbol, to: Set[Symbol]) {
+ refs(which) = refs(which) + (from -> (getRefs(which, from) ++ to))
+ }
+
+ /** The ClassInfoType which belongs to the class containing given type parameter
+ */
+ private def classInfo(tparam: Symbol): ClassInfoType =
+ tparam.owner.info.resultType match {
+ case ci: ClassInfoType => ci
+ case _ => classInfo(ObjectClass) // something's wrong; fall back to safe value
+ // (this can happen only for erroneous programs).
+ }
+
+ private object enterRefs extends TypeMap {
+ private var tparam: Symbol = _
+
+ def apply(tp: Type): Type = {
+ tp match {
+ case tr @ TypeRef(_, sym, args) if args.nonEmpty =>
+ val tparams = tr.initializedTypeParams
+ if (settings.debug.value && !sameLength(tparams, args))
+ debugwarn("Mismatched zip in computeRefs(): " + sym.info.typeParams + ", " + args)
+
+ foreach2(tparams, args) { (tparam1, arg) =>
+ if (arg contains tparam) {
+ addRef(NonExpansive, tparam, tparam1)
+ if (arg.typeSymbol != tparam)
+ addRef(Expansive, tparam, tparam1)
+ }
+ }
+ case _ =>
+ }
+ mapOver(tp)
+ }
+ def enter(tparam0: Symbol, parent: Type) {
+ this.tparam = tparam0
+ this(parent)
+ }
+ }
+
+ /** Compute initial (one-step) references and set state to `Initializing`.
+ */
+ private def computeRefs() {
+ refs = Array(Map(), Map())
+ typeSymbol.typeParams foreach { tparam =>
+ parents foreach { p =>
+ enterRefs.enter(tparam, p)
+ }
+ }
+ state = Initializing
+ }
+
+ /** Propagate to form transitive closure.
+ * Set state to Initialized if no change resulted from propagation.
+ * @return true iff there as a change in last iteration
+ */
+ private def propagate(): Boolean = {
+ if (state == UnInitialized) computeRefs()
+ //Console.println("Propagate "+symbol+", initial expansive = "+refs(Expansive)+", nonexpansive = "+refs(NonExpansive))//DEBUG
+ val lastRefs = Array(refs(0), refs(1))
+ state = Initialized
+ var change = false
+ for ((from, targets) <- refs(NonExpansive).iterator)
+ for (target <- targets) {
+ var thatInfo = classInfo(target)
+ if (thatInfo.state != Initialized)
+ change = change | thatInfo.propagate()
+ addRefs(NonExpansive, from, thatInfo.getRefs(NonExpansive, target))
+ addRefs(Expansive, from, thatInfo.getRefs(Expansive, target))
+ }
+ for ((from, targets) <- refs(Expansive).iterator)
+ for (target <- targets) {
+ var thatInfo = classInfo(target)
+ if (thatInfo.state != Initialized)
+ change = change | thatInfo.propagate()
+ addRefs(Expansive, from, thatInfo.getRefs(NonExpansive, target))
+ }
+ change = change || refs(0) != lastRefs(0) || refs(1) != lastRefs(1)
+ if (change) state = Initializing
+ //else Console.println("Propagate "+symbol+", final expansive = "+refs(Expansive)+", nonexpansive = "+refs(NonExpansive))//DEBUG
+ change
+ }
+
+ // override def isNullable: Boolean =
+ // symbol == AnyClass ||
+ // symbol != NothingClass && (symbol isSubClass ObjectClass) && !(symbol isSubClass NonNullClass);
+
+ // override def isNonNull: Boolean = symbol == NonNullClass || super.isNonNull;
+ override def kind = "ClassInfoType"
+
+ override def safeToString =
+ if (settings.debug.value || decls.size > 1)
+ formattedToString
+ else
+ super.safeToString
+
+ /** A nicely formatted string with newlines and such.
+ */
+ def formattedToString: String =
+ parents.mkString("\n with ") + (
+ if (settings.debug.value || parents.isEmpty || (decls.elems ne null))
+ fullyInitializeScope(decls).mkString(" {\n ", "\n ", "\n}")
+ else ""
+ )
+ }
+
+ object ClassInfoType extends ClassInfoTypeExtractor
+
+ class PackageClassInfoType(decls: Scope, clazz: Symbol)
+ extends ClassInfoType(List(), decls, clazz)
+
+ /** A class representing a constant type.
+ *
+ * @param value ...
+ */
+ abstract case class ConstantType(value: Constant) extends SingletonType with ConstantTypeApi {
+ override def underlying: Type = value.tpe
+ assert(underlying.typeSymbol != UnitClass)
+ override def isTrivial: Boolean = true
+ override def isNotNull = value.value != null
+ override def deconst: Type = underlying
+ override def safeToString: String =
+ underlying.toString + "(" + value.escapedStringValue + ")"
+ // override def isNullable: Boolean = value.value eq null
+ // override def isNonNull: Boolean = value.value ne null
+ override def kind = "ConstantType"
+ }
+
+ final class UniqueConstantType(value: Constant) extends ConstantType(value)
+
+ object ConstantType extends ConstantTypeExtractor {
+ def apply(value: Constant) = unique(new UniqueConstantType(value))
+ }
+
+ /* Syncnote: The `volatile` var and `pendingVolatiles` mutable set need not be protected
+ * with synchronized, because they are accessed only from isVolatile, which is called only from
+ * Typer.
+ */
+ private var volatileRecursions: Int = 0
+ private val pendingVolatiles = new mutable.HashSet[Symbol]
+
+ class ArgsTypeRef(pre0: Type, sym0: Symbol, args0: List[Type]) extends TypeRef(pre0, sym0, args0) {
+ require(args0.nonEmpty, this)
+
+ /** No unapplied type params size it has (should have) equally as many args. */
+ override def isHigherKinded = false
+ override def typeParams = Nil
+
+ override def transform(tp: Type): Type = {
+ // This situation arises when a typevar is encountered for which
+ // too little information is known to determine its kind, and
+ // it later turns out not to have kind *. See SI-4070. Only
+ // logging it for now.
+ if (sym.typeParams.size != args.size)
+ log("!!! %s.transform(%s), but tparams.isEmpty and args=".format(this, tp, args))
+
+ asSeenFromOwner(tp).instantiateTypeParams(sym.typeParams, args)
+ }
+
+ // note: does not go through typeRef. There's no need to because
+ // neither `pre` nor `sym` changes. And there's a performance
+ // advantage to call TypeRef directly.
+ override def typeConstructor = TypeRef(pre, sym, Nil)
+ }
+
+ class ModuleTypeRef(pre0: Type, sym0: Symbol) extends NoArgsTypeRef(pre0, sym0) with ClassTypeRef {
+ require(sym.isModuleClass, sym)
+ private[this] var narrowedCache: Type = _
+ override def isStable = true
+ override def narrow = {
+ if (narrowedCache eq null)
+ narrowedCache = singleType(pre, sym.sourceModule)
+
+ narrowedCache
+ }
+ final override def isNotNull = true
+ override protected def finishPrefix(rest: String) = objectPrefix + rest
+ override def directObjectString = super.safeToString
+ override def toLongString = toString
+ override def safeToString = prefixString + "type"
+ override def prefixString = if (sym.isOmittablePrefix) "" else prefix.prefixString + sym.nameString + "."
+ }
+ class PackageTypeRef(pre0: Type, sym0: Symbol) extends ModuleTypeRef(pre0, sym0) {
+ require(sym.isPackageClass, sym)
+ override protected def finishPrefix(rest: String) = packagePrefix + rest
+ }
+ class RefinementTypeRef(pre0: Type, sym0: Symbol) extends NoArgsTypeRef(pre0, sym0) with ClassTypeRef {
+ require(sym.isRefinementClass, sym)
+
+ // I think this is okay, but see #1241 (r12414), #2208, and typedTypeConstructor in Typers
+ override protected def normalizeImpl: Type = sym.info.normalize
+ override protected def finishPrefix(rest: String) = "" + thisInfo
+ }
+
+ class NoArgsTypeRef(pre0: Type, sym0: Symbol) extends TypeRef(pre0, sym0, Nil) {
+ // A reference (in a Scala program) to a type that has type parameters, but where the reference
+ // does not include type arguments. Note that it doesn't matter whether the symbol refers
+ // to a java or scala symbol, but it does matter whether it occurs in java or scala code.
+ // TypeRefs w/o type params that occur in java signatures/code are considered raw types, and are
+ // represented as existential types.
+ override def isHigherKinded = typeParams.nonEmpty
+ override def typeParams = if (isDefinitionsInitialized) sym.typeParams else sym.unsafeTypeParams
+ private def isRaw = !phase.erasedTypes && isRawIfWithoutArgs(sym)
+
+ override def instantiateTypeParams(formals: List[Symbol], actuals: List[Type]): Type =
+ if (isHigherKinded) {
+ if (sameLength(formals intersect typeParams, typeParams))
+ copyTypeRef(this, pre, sym, actuals)
+ // partial application (needed in infer when bunching type arguments from classes and methods together)
+ else
+ copyTypeRef(this, pre, sym, dummyArgs).instantiateTypeParams(formals, actuals)
+ }
+ else
+ super.instantiateTypeParams(formals, actuals)
+
+ override def transform(tp: Type): Type = {
+ val res = asSeenFromOwner(tp)
+ if (isHigherKinded && !isRaw)
+ res.instantiateTypeParams(typeParams, dummyArgs)
+ else
+ res
+ }
+
+ override def transformInfo(tp: Type): Type =
+ appliedType(asSeenFromOwner(tp), dummyArgs)
+
+ override def narrow =
+ if (sym.isModuleClass) singleType(pre, sym.sourceModule)
+ else super.narrow
+
+ override def typeConstructor = this
+ // eta-expand, subtyping relies on eta-expansion of higher-kinded types
+
+ override protected def normalizeImpl: Type =
+ if (isHigherKinded) etaExpand else super.normalizeImpl
+ }
+
+ trait ClassTypeRef extends TypeRef {
+ // !!! There are scaladoc-created symbols arriving which violate this require.
+ // require(sym.isClass, sym)
+
+ override def baseType(clazz: Symbol): Type =
+ if (sym == clazz) this
+ else transform(sym.info.baseType(clazz))
+ }
+
+ trait NonClassTypeRef extends TypeRef {
+ require(sym.isNonClassType, sym)
+
+ /* Syncnote: These are pure caches for performance; no problem to evaluate these
+ * several times. Hence, no need to protected with synchronzied in a mutli-threaded
+ * usage scenario.
+ */
+ private var relativeInfoCache: Type = _
+ private var memberInfoCache: Type = _
+
+ private[Types] def relativeInfo = {
+ val memberInfo = pre.memberInfo(sym)
+ if (relativeInfoCache == null || (memberInfo ne memberInfoCache)) {
+ memberInfoCache = memberInfo
+ relativeInfoCache = transformInfo(memberInfo)
+ }
+ relativeInfoCache
+ }
+
+ override def baseType(clazz: Symbol): Type =
+ if (sym == clazz) this else baseTypeOfNonClassTypeRef(this, clazz)
+ }
+
+ protected def baseTypeOfNonClassTypeRef(tpe: NonClassTypeRef, clazz: Symbol) = try {
+ basetypeRecursions += 1
+ if (basetypeRecursions < LogPendingBaseTypesThreshold)
+ tpe.relativeInfo.baseType(clazz)
+ else if (pendingBaseTypes contains tpe)
+ if (clazz == AnyClass) clazz.tpe else NoType
+ else
+ try {
+ pendingBaseTypes += tpe
+ tpe.relativeInfo.baseType(clazz)
+ } finally {
+ pendingBaseTypes -= tpe
+ }
+ } finally {
+ basetypeRecursions -= 1
+ }
+
+ trait AliasTypeRef extends NonClassTypeRef {
+ require(sym.isAliasType, sym)
+
+ override def dealias = if (typeParamsMatchArgs) betaReduce.dealias else super.dealias
+ override def isStable = normalize.isStable
+ override def isVolatile = normalize.isVolatile
+ override def narrow = normalize.narrow
+ override def thisInfo = normalize
+ override def prefix = if (this ne normalize) normalize.prefix else pre
+ override def termSymbol = if (this ne normalize) normalize.termSymbol else super.termSymbol
+ override def typeSymbol = if (this ne normalize) normalize.typeSymbol else sym
+
+ // beta-reduce, but don't do partial application -- cycles have been checked in typeRef
+ override protected def normalizeImpl =
+ if (typeParamsMatchArgs) betaReduce.normalize
+ else if (isHigherKinded) super.normalizeImpl
+ else {
+ // if we are overriding a type alias in an erroneous way, don't just
+ // return an ErrorType since that will result in useless error msg.
+ // Instead let's try to recover from it and rely on refcheck reporting the correct error,
+ // if that fails fallback to the old behaviour.
+ val overriddenSym = sym.nextOverriddenSymbol
+ if (overriddenSym != NoSymbol) pre.memberType(overriddenSym).normalize
+ else ErrorType
+ }
+
+ // isHKSubType0 introduces synthetic type params so that
+ // betaReduce can first apply sym.info to typeArgs before calling
+ // asSeenFrom. asSeenFrom then skips synthetic type params, which
+ // are used to reduce HO subtyping to first-order subtyping, but
+ // which can't be instantiated from the given prefix and class.
+ //
+ // this crashes pos/depmet_implicit_tpbetareduce.scala
+ // appliedType(sym.info, typeArgs).asSeenFrom(pre, sym.owner)
+ override def betaReduce = transform(sym.info.resultType)
+
+ // #3731: return sym1 for which holds: pre bound sym.name to sym and
+ // pre1 now binds sym.name to sym1, conceptually exactly the same
+ // symbol as sym. The selection of sym on pre must be updated to the
+ // selection of sym1 on pre1, since sym's info was probably updated
+ // by the TypeMap to yield a new symbol, sym1 with transformed info.
+ // @returns sym1
+ override def coevolveSym(pre1: Type): Symbol =
+ if (pre eq pre1) sym else (pre, pre1) match {
+ // don't look at parents -- it would be an error to override alias types anyway
+ case (RefinedType(_, _), RefinedType(_, decls1)) => decls1 lookup sym.name
+ // TODO: is there another way a typeref's symbol can refer to a symbol defined in its pre?
+ case _ => sym
+ }
+ override def kind = "AliasTypeRef"
+ }
+
+ trait AbstractTypeRef extends NonClassTypeRef {
+ require(sym.isAbstractType, sym)
+
+ /** Syncnote: Pure performance caches; no need to synchronize in multi-threaded environment
+ */
+ private var symInfoCache: Type = _
+ private var thisInfoCache: Type = _
+
+ override def isVolatile = {
+ // need to be careful not to fall into an infinite recursion here
+ // because volatile checking is done before all cycles are detected.
+ // the case to avoid is an abstract type directly or
+ // indirectly upper-bounded by itself. See #2918
+ try {
+ volatileRecursions += 1
+ if (volatileRecursions < LogVolatileThreshold)
+ bounds.hi.isVolatile
+ else if (pendingVolatiles(sym))
+ true // we can return true here, because a cycle will be detected
+ // here afterwards and an error will result anyway.
+ else
+ try {
+ pendingVolatiles += sym
+ bounds.hi.isVolatile
+ } finally {
+ pendingVolatiles -= sym
+ }
+ } finally {
+ volatileRecursions -= 1
+ }
+ }
+
+ override def thisInfo = {
+ val symInfo = sym.info
+ if (thisInfoCache == null || (symInfo ne symInfoCache)) {
+ symInfoCache = symInfo
+ thisInfoCache = transformInfo(symInfo) match {
+ // If a subtyping cycle is not detected here, we'll likely enter an infinite
+ // loop before a sensible error can be issued. SI-5093 is one example.
+ case x: SubType if x.supertype eq this =>
+ throw new RecoverableCyclicReference(sym)
+ case tp => tp
+ }
+ }
+ thisInfoCache
+ }
+ override def isStable = bounds.hi.typeSymbol isSubClass SingletonClass
+ override def bounds = thisInfo.bounds
+ // def transformInfo(tp: Type): Type = appliedType(tp.asSeenFrom(pre, sym.owner), typeArgsOrDummies)
+ override protected[Types] def baseTypeSeqImpl: BaseTypeSeq = transform(bounds.hi).baseTypeSeq prepend this
+ override def kind = "AbstractTypeRef"
+ }
+
+ /** A class for named types of the form
+ * `<prefix>.<sym.name>[args]`
+ * Cannot be created directly; one should always use `typeRef`
+ * for creation. (@M: Otherwise hashing breaks)
+ *
+ * @M: a higher-kinded type is represented as a TypeRef with sym.typeParams.nonEmpty, but args.isEmpty
+ */
+ abstract case class TypeRef(pre: Type, sym: Symbol, args: List[Type]) extends UniqueType with TypeRefApi {
+ private var trivial: ThreeValue = UNKNOWN
+ override def isTrivial: Boolean = {
+ if (trivial == UNKNOWN)
+ trivial = fromBoolean(!sym.isTypeParameter && pre.isTrivial && areTrivialTypes(args))
+ toBoolean(trivial)
+ }
+ private[reflect] var parentsCache: List[Type] = _
+ private[reflect] var parentsPeriod = NoPeriod
+ private[reflect] var baseTypeSeqCache: BaseTypeSeq = _
+ private[reflect] var baseTypeSeqPeriod = NoPeriod
+ private var normalized: Type = _
+
+ //OPT specialize hashCode
+ override final def computeHashCode = {
+ import scala.util.hashing.MurmurHash3._
+ val hasArgs = args.nonEmpty
+ var h = productSeed
+ h = mix(h, pre.hashCode)
+ h = mix(h, sym.hashCode)
+ if (hasArgs)
+ finalizeHash(mix(h, args.hashCode), 3)
+ else
+ finalizeHash(h, 2)
+ }
+
+ // @M: propagate actual type params (args) to `tp`, by replacing
+ // formal type parameters with actual ones. If tp is higher kinded,
+ // the "actual" type arguments are types that simply reference the
+ // corresponding type parameters (unbound type variables)
+ def transform(tp: Type): Type
+
+ // eta-expand, subtyping relies on eta-expansion of higher-kinded types
+ protected def normalizeImpl: Type = if (isHigherKinded) etaExpand else super.normalize
+
+ // TODO: test case that is compiled in a specific order and in different runs
+ final override def normalize: Type = {
+ // arises when argument-dependent types are approximated (see def depoly in implicits)
+ if (pre eq WildcardType) WildcardType
+ else if (phase.erasedTypes) normalizeImpl
+ else {
+ if (normalized eq null)
+ normalized = normalizeImpl
+ normalized
+ }
+ }
+
+ override def isGround = (
+ sym.isPackageClass
+ || pre.isGround && args.forall(_.isGround)
+ )
+
+ override def etaExpand: Type = {
+ // must initialise symbol, see test/files/pos/ticket0137.scala
+ val tpars = initializedTypeParams
+ if (tpars.isEmpty) this
+ else typeFunAnon(tpars, copyTypeRef(this, pre, sym, tpars map (_.tpeHK))) // todo: also beta-reduce?
+ }
+
+ // only need to rebind type aliases, as typeRef already handles abstract types
+ // (they are allowed to be rebound more liberally)
+ def coevolveSym(pre1: Type): Symbol = sym
+
+ //@M! use appliedType on the polytype that represents the bounds (or if aliastype, the rhs)
+ def transformInfo(tp: Type): Type = appliedType(asSeenFromOwner(tp), args)
+
+ def thisInfo = sym.info
+ def initializedTypeParams = sym.info.typeParams
+ def typeParamsMatchArgs = sameLength(initializedTypeParams, args)
+ def asSeenFromOwner(tp: Type) = tp.asSeenFrom(pre, sym.owner)
+
+ override def baseClasses = thisInfo.baseClasses
+ override def baseTypeSeqDepth = baseTypeSeq.maxDepth
+ override def isStable = (sym eq NothingClass) || (sym eq SingletonClass)
+ override def prefix = pre
+ override def termSymbol = super.termSymbol
+ override def termSymbolDirect = super.termSymbol
+ override def typeArgs = args
+ override def typeOfThis = transform(sym.typeOfThis)
+ override def typeSymbol = sym
+ override def typeSymbolDirect = sym
+
+ override def isNotNull =
+ sym.isModuleClass || sym == NothingClass || (sym isNonBottomSubClass NotNullClass) || super.isNotNull
+
+ override def parents: List[Type] = {
+ val cache = parentsCache
+ if (parentsPeriod == currentPeriod && cache != null) cache
+ else {
+ defineParentsOfTypeRef(this)
+ parentsCache
+ }
+ }
+
+ override def decls: Scope = {
+ sym.info match {
+ case TypeRef(_, sym1, _) =>
+ assert(sym1 != sym, this) // @MAT was != typeSymbol
+ case _ =>
+ }
+ thisInfo.decls
+ }
+
+ protected[Types] def baseTypeSeqImpl: BaseTypeSeq = sym.info.baseTypeSeq map transform
+
+ override def baseTypeSeq: BaseTypeSeq = {
+ val cache = baseTypeSeqCache
+ if (baseTypeSeqPeriod == currentPeriod && cache != null && cache != undetBaseTypeSeq)
+ cache
+ else {
+ defineBaseTypeSeqOfTypeRef(this)
+ if (baseTypeSeqCache == undetBaseTypeSeq)
+ throw new RecoverableCyclicReference(sym)
+
+ baseTypeSeqCache
+ }
+ }
+
+ // ensure that symbol is not a local copy with a name coincidence
+ private def needsPreString = (
+ settings.debug.value
+ || !shorthands(sym.fullName)
+ || (sym.ownersIterator exists (s => !s.isClass))
+ )
+ private def preString = if (needsPreString) pre.prefixString else ""
+ private def argsString = if (args.isEmpty) "" else args.mkString("[", ",", "]")
+
+ def refinementString = (
+ if (sym.isStructuralRefinement) (
+ fullyInitializeScope(decls) filter (sym => sym.isPossibleInRefinement && sym.isPublic)
+ map (_.defString)
+ mkString("{", "; ", "}")
+ )
+ else ""
+ )
+
+ protected def finishPrefix(rest: String) = (
+ if (sym.isInitialized && sym.isAnonymousClass && !phase.erasedTypes)
+ parentsString(thisInfo.parents) + refinementString
+ else rest
+ )
+ private def customToString = sym match {
+ case RepeatedParamClass => args.head + "*"
+ case ByNameParamClass => "=> " + args.head
+ case _ =>
+ def targs = normalize.typeArgs
+
+ if (isFunctionType(this)) {
+ // Aesthetics: printing Function1 as T => R rather than (T) => R
+ // ...but only if it's not a tuple, so ((T1, T2)) => R is distinguishable
+ // from (T1, T2) => R.
+ targs match {
+ case in :: out :: Nil if !isTupleType(in) =>
+ // A => B => C should be (A => B) => C or A => (B => C).
+ // Also if A is byname, then we want (=> A) => B because => is right associative and => A => B
+ // would mean => (A => B) which is a different type
+ val in_s = if (isFunctionType(in) || isByNameParamType(in)) "(" + in + ")" else "" + in
+ val out_s = if (isFunctionType(out)) "(" + out + ")" else "" + out
+ in_s + " => " + out_s
+ case xs =>
+ xs.init.mkString("(", ", ", ")") + " => " + xs.last
+ }
+ }
+ else if (isTupleType(this))
+ targs.mkString("(", ", ", if (hasLength(targs, 1)) ",)" else ")")
+ else if (sym.isAliasType && prefixChain.exists(_.termSymbol.isSynthetic) && (this ne this.normalize))
+ "" + normalize
+ else
+ ""
+ }
+ override def safeToString = {
+ val custom = if (settings.debug.value) "" else customToString
+ if (custom != "") custom
+ else finishPrefix(preString + sym.nameString + argsString)
+ }
+ override def prefixString = "" + (
+ if (settings.debug.value)
+ super.prefixString
+ else if (sym.isOmittablePrefix)
+ ""
+ else if (sym.isPackageClass || sym.isPackageObjectOrClass)
+ sym.skipPackageObject.fullName + "."
+ else if (isStable && nme.isSingletonName(sym.name))
+ tpnme.dropSingletonName(sym.name) + "."
+ else
+ super.prefixString
+ )
+ override def kind = "TypeRef"
+ }
+
+ object TypeRef extends TypeRefExtractor {
+ def apply(pre: Type, sym: Symbol, args: List[Type]): Type = unique({
+ if (args.nonEmpty) {
+ if (sym.isAliasType) new ArgsTypeRef(pre, sym, args) with AliasTypeRef
+ else if (sym.isAbstractType) new ArgsTypeRef(pre, sym, args) with AbstractTypeRef
+ else new ArgsTypeRef(pre, sym, args) with ClassTypeRef
+ }
+ else {
+ if (sym.isAliasType) new NoArgsTypeRef(pre, sym) with AliasTypeRef
+ else if (sym.isAbstractType) new NoArgsTypeRef(pre, sym) with AbstractTypeRef
+ else if (sym.isRefinementClass) new RefinementTypeRef(pre, sym)
+ else if (sym.isPackageClass) new PackageTypeRef(pre, sym)
+ else if (sym.isModuleClass) new ModuleTypeRef(pre, sym)
+ else new NoArgsTypeRef(pre, sym) with ClassTypeRef
+ }
+ })
+ }
+
+ protected def defineParentsOfTypeRef(tpe: TypeRef) = {
+ val period = tpe.parentsPeriod
+ if (period != currentPeriod) {
+ tpe.parentsPeriod = currentPeriod
+ if (!isValidForBaseClasses(period)) {
+ tpe.parentsCache = tpe.thisInfo.parents map tpe.transform
+ } else if (tpe.parentsCache == null) { // seems this can happen if things are corrupted enough, see #2641
+ tpe.parentsCache = List(AnyClass.tpe)
+ }
+ }
+ }
+
+ protected def defineBaseTypeSeqOfTypeRef(tpe: TypeRef) = {
+ val period = tpe.baseTypeSeqPeriod
+ if (period != currentPeriod) {
+ tpe.baseTypeSeqPeriod = currentPeriod
+ if (!isValidForBaseClasses(period)) {
+ if (Statistics.canEnable) Statistics.incCounter(typerefBaseTypeSeqCount)
+ val start = if (Statistics.canEnable) Statistics.pushTimer(typeOpsStack, baseTypeSeqNanos) else null
+ try {
+ tpe.baseTypeSeqCache = undetBaseTypeSeq
+ tpe.baseTypeSeqCache = tpe.baseTypeSeqImpl
+ } finally {
+ if (Statistics.canEnable) Statistics.popTimer(typeOpsStack, start)
+ }
+ }
+ }
+ if (tpe.baseTypeSeqCache == undetBaseTypeSeq)
+ throw new TypeError("illegal cyclic inheritance involving " + tpe.sym)
+ }
+
+ /** A class representing a method type with parameters.
+ * Note that a parameterless method is represented by a NullaryMethodType:
+ *
+ * def m(): Int MethodType(Nil, Int)
+ * def m: Int NullaryMethodType(Int)
+ */
+ case class MethodType(override val params: List[Symbol],
+ override val resultType: Type) extends Type with MethodTypeApi {
+
+ private var trivial: ThreeValue = UNKNOWN
+ override def isTrivial: Boolean = {
+ if (trivial == UNKNOWN) trivial = fromBoolean(isTrivialResult && areTrivialParams(params))
+ toBoolean(trivial)
+ }
+
+ private def isTrivialResult =
+ resultType.isTrivial && (resultType eq resultType.withoutAnnotations)
+
+ private def areTrivialParams(ps: List[Symbol]): Boolean = ps match {
+ case p :: rest =>
+ p.tpe.isTrivial && !typesContain(paramTypes, p) && !(resultType contains p) &&
+ areTrivialParams(rest)
+ case _ =>
+ true
+ }
+
+ def isImplicit = params.nonEmpty && params.head.isImplicit
+ def isJava = false // can we do something like for implicits? I.e. do Java methods without parameters need to be recognized?
+
+ //assert(paramTypes forall (pt => !pt.typeSymbol.isImplClass))//DEBUG
+ override def paramSectionCount: Int = resultType.paramSectionCount + 1
+
+ override def paramss: List[List[Symbol]] = params :: resultType.paramss
+
+ override def paramTypes = params map (_.tpe)
+
+ override def boundSyms = resultType.boundSyms ++ params
+
+ override def resultType(actuals: List[Type]) =
+ if (isTrivial || phase.erasedTypes) resultType
+ else if (/*isDependentMethodType &&*/ sameLength(actuals, params)) {
+ val idm = new InstantiateDependentMap(params, actuals)
+ val res = idm(resultType)
+ existentialAbstraction(idm.existentialsNeeded, res)
+ }
+ else existentialAbstraction(params, resultType)
+
+ private var isdepmeth: ThreeValue = UNKNOWN
+ override def isDependentMethodType: Boolean = {
+ if (isdepmeth == UNKNOWN) isdepmeth = fromBoolean(IsDependentCollector.collect(resultType.dealias))
+ toBoolean(isdepmeth)
+ }
+
+ // implicit args can only be depended on in result type:
+ //TODO this may be generalised so that the only constraint is dependencies are acyclic
+ def approximate: MethodType = MethodType(params, resultApprox)
+
+ override def finalResultType: Type = resultType.finalResultType
+
+ override def safeToString = paramString(this) + resultType
+
+ override def cloneInfo(owner: Symbol) = {
+ val vparams = cloneSymbolsAtOwner(params, owner)
+ copyMethodType(this, vparams, resultType.substSym(params, vparams).cloneInfo(owner))
+ }
+
+ override def atOwner(owner: Symbol) =
+ if (!allSymbolsHaveOwner(params, owner) || (resultType.atOwner(owner) ne resultType))
+ cloneInfo(owner)
+ else
+ this
+
+ override def kind = "MethodType"
+ }
+
+ object MethodType extends MethodTypeExtractor
+
+ class JavaMethodType(ps: List[Symbol], rt: Type) extends MethodType(ps, rt) {
+ override def isJava = true
+ }
+
+ case class NullaryMethodType(override val resultType: Type) extends Type with NullaryMethodTypeApi {
+ override def isTrivial = resultType.isTrivial && (resultType eq resultType.withoutAnnotations)
+ override def prefix: Type = resultType.prefix
+ override def narrow: Type = resultType.narrow
+ override def finalResultType: Type = resultType.finalResultType
+ override def termSymbol: Symbol = resultType.termSymbol
+ override def typeSymbol: Symbol = resultType.typeSymbol
+ override def parents: List[Type] = resultType.parents
+ override def decls: Scope = resultType.decls
+ override def baseTypeSeq: BaseTypeSeq = resultType.baseTypeSeq
+ override def baseTypeSeqDepth: Int = resultType.baseTypeSeqDepth
+ override def baseClasses: List[Symbol] = resultType.baseClasses
+ override def baseType(clazz: Symbol): Type = resultType.baseType(clazz)
+ override def boundSyms = resultType.boundSyms
+ override def isVolatile = resultType.isVolatile
+ override def safeToString: String = "=> "+ resultType
+ override def kind = "NullaryMethodType"
+ }
+
+ object NullaryMethodType extends NullaryMethodTypeExtractor
+
+ /** A type function or the type of a polymorphic value (and thus of kind *).
+ *
+ * Before the introduction of NullaryMethodType, a polymorphic nullary method (e.g, def isInstanceOf[T]: Boolean)
+ * used to be typed as PolyType(tps, restpe), and a monomorphic one as PolyType(Nil, restpe)
+ * This is now: PolyType(tps, NullaryMethodType(restpe)) and NullaryMethodType(restpe)
+ * by symmetry to MethodTypes: PolyType(tps, MethodType(params, restpe)) and MethodType(params, restpe)
+ *
+ * Thus, a PolyType(tps, TypeRef(...)) unambiguously indicates a type function (which results from eta-expanding a type constructor alias).
+ * Similarly, PolyType(tps, ClassInfoType(...)) is a type constructor.
+ *
+ * A polytype is of kind * iff its resultType is a (nullary) method type.
+ */
+ case class PolyType(override val typeParams: List[Symbol], override val resultType: Type)
+ extends Type with PolyTypeApi {
+ //assert(!(typeParams contains NoSymbol), this)
+ assert(typeParams.nonEmpty, this) // used to be a marker for nullary method type, illegal now (see @NullaryMethodType)
+
+ override def paramSectionCount: Int = resultType.paramSectionCount
+ override def paramss: List[List[Symbol]] = resultType.paramss
+ override def params: List[Symbol] = resultType.params
+ override def paramTypes: List[Type] = resultType.paramTypes
+ override def parents: List[Type] = resultType.parents
+ override def decls: Scope = resultType.decls
+ override def termSymbol: Symbol = resultType.termSymbol
+ override def typeSymbol: Symbol = resultType.typeSymbol
+ override def boundSyms = immutable.Set[Symbol](typeParams ++ resultType.boundSyms: _*)
+ override def prefix: Type = resultType.prefix
+ override def baseTypeSeq: BaseTypeSeq = resultType.baseTypeSeq
+ override def baseTypeSeqDepth: Int = resultType.baseTypeSeqDepth
+ override def baseClasses: List[Symbol] = resultType.baseClasses
+ override def baseType(clazz: Symbol): Type = resultType.baseType(clazz)
+ override def narrow: Type = resultType.narrow
+ override def isVolatile = resultType.isVolatile
+ override def finalResultType: Type = resultType.finalResultType
+
+ /** @M: typeDefSig wraps a TypeBounds in a PolyType
+ * to represent a higher-kinded type parameter
+ * wrap lo&hi in polytypes to bind variables
+ */
+ override def bounds: TypeBounds =
+ TypeBounds(typeFun(typeParams, resultType.bounds.lo),
+ typeFun(typeParams, resultType.bounds.hi))
+
+ override def isHigherKinded = !typeParams.isEmpty
+
+ override def safeToString = typeParamsString(this) + resultType
+
+ override def cloneInfo(owner: Symbol) = {
+ val tparams = cloneSymbolsAtOwner(typeParams, owner)
+ PolyType(tparams, resultType.substSym(typeParams, tparams).cloneInfo(owner))
+ }
+
+ override def atOwner(owner: Symbol) =
+ if (!allSymbolsHaveOwner(typeParams, owner) || (resultType.atOwner(owner) ne resultType))
+ cloneInfo(owner)
+ else
+ this
+
+ override def kind = "PolyType"
+ }
+
+ object PolyType extends PolyTypeExtractor
+
+ /** A creator for existential types which flattens nested existentials.
+ */
+ def newExistentialType(quantified: List[Symbol], underlying: Type): Type =
+ if (quantified.isEmpty) underlying
+ else underlying match {
+ case ExistentialType(qs, restpe) => newExistentialType(quantified ::: qs, restpe)
+ case _ => ExistentialType(quantified, underlying)
+ }
+
+ case class ExistentialType(quantified: List[Symbol],
+ override val underlying: Type) extends RewrappingTypeProxy with ExistentialTypeApi
+ {
+ override protected def rewrap(newtp: Type) = existentialAbstraction(quantified, newtp)
+
+ override def isTrivial = false
+ override def isStable: Boolean = false
+ override def bounds = TypeBounds(maybeRewrap(underlying.bounds.lo), maybeRewrap(underlying.bounds.hi))
+ override def parents = underlying.parents map maybeRewrap
+ override def boundSyms = quantified.toSet
+ override def prefix = maybeRewrap(underlying.prefix)
+ override def typeArgs = underlying.typeArgs map maybeRewrap
+ override def params = underlying.params mapConserve { param =>
+ val tpe1 = rewrap(param.tpeHK)
+ if (tpe1 eq param.tpeHK) param else param.cloneSymbol.setInfo(tpe1)
+ }
+ override def paramTypes = underlying.paramTypes map maybeRewrap
+ override def instantiateTypeParams(formals: List[Symbol], actuals: List[Type]) = {
+// maybeRewrap(underlying.instantiateTypeParams(formals, actuals))
+
+ val quantified1 = new SubstTypeMap(formals, actuals) mapOver quantified
+ val underlying1 = underlying.instantiateTypeParams(formals, actuals)
+ if ((quantified1 eq quantified) && (underlying1 eq underlying)) this
+ else existentialAbstraction(quantified1, underlying1.substSym(quantified, quantified1))
+
+ }
+ override def baseType(clazz: Symbol) = maybeRewrap(underlying.baseType(clazz))
+ override def baseTypeSeq = underlying.baseTypeSeq map maybeRewrap
+ override def isHigherKinded = false
+
+ override def skolemizeExistential(owner: Symbol, origin: AnyRef) =
+ deriveType(quantified, tparam => (owner orElse tparam.owner).newExistentialSkolem(tparam, origin))(underlying)
+
+ private def wildcardArgsString(qset: Set[Symbol], args: List[Type]): List[String] = args map {
+ case TypeRef(_, sym, _) if (qset contains sym) =>
+ "_"+sym.infoString(sym.info)
+ case arg =>
+ arg.toString
+ }
+
+ /** An existential can only be printed with wildcards if:
+ * - the underlying type is a typeref
+ * - every quantified variable appears at most once as a type argument and
+ * nowhere inside a type argument
+ * - no quantified type argument contains a quantified variable in its bound
+ * - the typeref's symbol is not itself quantified
+ * - the prefix is not quanitified
+ */
+ def isRepresentableWithWildcards = {
+ val qset = quantified.toSet
+ underlying match {
+ case TypeRef(pre, sym, args) =>
+ def isQuantified(tpe: Type): Boolean = {
+ (tpe exists (t => qset contains t.typeSymbol)) ||
+ tpe.typeSymbol.isRefinementClass && (tpe.parents exists isQuantified)
+ }
+ val (wildcardArgs, otherArgs) = args partition (arg => qset contains arg.typeSymbol)
+ wildcardArgs.distinct == wildcardArgs &&
+ !(otherArgs exists (arg => isQuantified(arg))) &&
+ !(wildcardArgs exists (arg => isQuantified(arg.typeSymbol.info.bounds))) &&
+ !(qset contains sym) &&
+ !isQuantified(pre)
+ case _ => false
+ }
+ }
+
+ override def safeToString: String = {
+ def clauses = {
+ val str = quantified map (_.existentialToString) mkString (" forSome { ", "; ", " }")
+ if (settings.explaintypes.value) "(" + str + ")" else str
+ }
+ underlying match {
+ case TypeRef(pre, sym, args) if !settings.debug.value && isRepresentableWithWildcards =>
+ "" + TypeRef(pre, sym, Nil) + wildcardArgsString(quantified.toSet, args).mkString("[", ", ", "]")
+ case MethodType(_, _) | NullaryMethodType(_) | PolyType(_, _) =>
+ "(" + underlying + ")" + clauses
+ case _ =>
+ "" + underlying + clauses
+ }
+ }
+
+ override def cloneInfo(owner: Symbol) =
+ createFromClonedSymbolsAtOwner(quantified, owner, underlying)(newExistentialType)
+
+ override def atOwner(owner: Symbol) =
+ if (!allSymbolsHaveOwner(quantified, owner)) cloneInfo(owner) else this
+
+ override def kind = "ExistentialType"
+
+ def withTypeVars(op: Type => Boolean): Boolean = withTypeVars(op, AnyDepth)
+
+ def withTypeVars(op: Type => Boolean, depth: Int): Boolean = {
+ val quantifiedFresh = cloneSymbols(quantified)
+ val tvars = quantifiedFresh map (tparam => TypeVar(tparam))
+ val underlying1 = underlying.instantiateTypeParams(quantified, tvars) // fuse subst quantified -> quantifiedFresh -> tvars
+ op(underlying1) && {
+ solve(tvars, quantifiedFresh, quantifiedFresh map (x => 0), false, depth) &&
+ isWithinBounds(NoPrefix, NoSymbol, quantifiedFresh, tvars map (_.constr.inst))
+ }
+ }
+ }
+
+ object ExistentialType extends ExistentialTypeExtractor
+
+ /** A class containing the alternatives and type prefix of an overloaded symbol.
+ * Not used after phase `typer`.
+ */
+ case class OverloadedType(pre: Type, alternatives: List[Symbol]) extends Type {
+ override def prefix: Type = pre
+ override def safeToString =
+ (alternatives map pre.memberType).mkString("", " <and> ", "")
+ override def kind = "OverloadedType"
+ }
+
+ def overloadedType(pre: Type, alternatives: List[Symbol]): Type =
+ if (alternatives.tail.isEmpty) pre memberType alternatives.head
+ else OverloadedType(pre, alternatives)
+
+ /** A class remembering a type instantiation for some a set of overloaded
+ * polymorphic symbols.
+ * Not used after phase `typer`.
+ */
+ case class AntiPolyType(pre: Type, targs: List[Type]) extends Type {
+ override def safeToString =
+ pre.toString + targs.mkString("(with type arguments ", ", ", ")");
+ override def memberType(sym: Symbol) = appliedType(pre.memberType(sym), targs)
+// override def memberType(sym: Symbol) = pre.memberType(sym) match {
+// case PolyType(tparams, restp) =>
+// restp.subst(tparams, targs)
+// /* I don't think this is needed, as existential types close only over value types
+// case ExistentialType(tparams, qtpe) =>
+// existentialAbstraction(tparams, qtpe.memberType(sym))
+// */
+// case ErrorType =>
+// ErrorType
+// }
+ override def kind = "AntiPolyType"
+ }
+
+ //private var tidCount = 0 //DEBUG
+
+ object HasTypeMember {
+ def apply(name: TypeName, tp: Type): Type = {
+ val bound = refinedType(List(WildcardType), NoSymbol)
+ val bsym = bound.typeSymbol.newAliasType(name)
+ bsym setInfo tp
+ bound.decls enter bsym
+ bound
+ }
+ def unapply(tp: Type): Option[(TypeName, Type)] = tp match {
+ case RefinedType(List(WildcardType), Scope(sym)) => Some((sym.name.toTypeName, sym.info))
+ case _ => None
+ }
+ }
+
+ // Not used yet.
+ object HasTypeParams {
+ def unapply(tp: Type): Option[(List[Symbol], Type)] = tp match {
+ case AnnotatedType(_, tp, _) => unapply(tp)
+ case ExistentialType(tparams, qtpe) => Some((tparams, qtpe))
+ case PolyType(tparams, restpe) => Some((tparams, restpe))
+ case _ => None
+ }
+ }
+
+ //@M
+ // a TypeVar used to be a case class with only an origin and a constr
+ // then, constr became mutable (to support UndoLog, I guess),
+ // but pattern-matching returned the original constr0 (a bug)
+ // now, pattern-matching returns the most recent constr
+ object TypeVar {
+ @inline final def trace[T](action: String, msg: => String)(value: T): T = {
+ if (traceTypeVars) {
+ val s = msg match {
+ case "" => ""
+ case str => "( " + str + " )"
+ }
+ Console.err.println("[%10s] %-25s%s".format(action, value, s))
+ }
+ value
+ }
+
+ /** Create a new TypeConstraint based on the given symbol.
+ */
+ private def deriveConstraint(tparam: Symbol): TypeConstraint = {
+ /** Must force the type parameter's info at this point
+ * or things don't end well for higher-order type params.
+ * See SI-5359.
+ */
+ val bounds = tparam.info.bounds
+ /** We can seed the type constraint with the type parameter
+ * bounds as long as the types are concrete. This should lower
+ * the complexity of the search even if it doesn't improve
+ * any results.
+ */
+ if (propagateParameterBoundsToTypeVars) {
+ val exclude = bounds.isEmptyBounds || (bounds exists typeIsNonClassType)
+
+ if (exclude) new TypeConstraint
+ else TypeVar.trace("constraint", "For " + tparam.fullLocationString)(new TypeConstraint(bounds))
+ }
+ else new TypeConstraint
+ }
+ def untouchable(tparam: Symbol): TypeVar = createTypeVar(tparam, untouchable = true)
+ def apply(tparam: Symbol): TypeVar = createTypeVar(tparam, untouchable = false)
+ def apply(origin: Type, constr: TypeConstraint): TypeVar = apply(origin, constr, Nil, Nil)
+ def apply(origin: Type, constr: TypeConstraint, args: List[Type], params: List[Symbol]): TypeVar =
+ createTypeVar(origin, constr, args, params, untouchable = false)
+
+ /** This is the only place TypeVars should be instantiated.
+ */
+ private def createTypeVar(origin: Type, constr: TypeConstraint, args: List[Type], params: List[Symbol], untouchable: Boolean): TypeVar = {
+ val tv = (
+ if (args.isEmpty && params.isEmpty) {
+ if (untouchable) new TypeVar(origin, constr) with UntouchableTypeVar
+ else new TypeVar(origin, constr) {}
+ }
+ else if (args.size == params.size) {
+ if (untouchable) new AppliedTypeVar(origin, constr, params zip args) with UntouchableTypeVar
+ else new AppliedTypeVar(origin, constr, params zip args)
+ }
+ else if (args.isEmpty) {
+ if (untouchable) new HKTypeVar(origin, constr, params) with UntouchableTypeVar
+ else new HKTypeVar(origin, constr, params)
+ }
+ else throw new Error("Invalid TypeVar construction: " + ((origin, constr, args, params)))
+ )
+
+ trace("create", "In " + tv.originLocation)(tv)
+ }
+ private def createTypeVar(tparam: Symbol, untouchable: Boolean): TypeVar =
+ createTypeVar(tparam.tpeHK, deriveConstraint(tparam), Nil, tparam.typeParams, untouchable)
+ }
+
+ /** Repack existential types, otherwise they sometimes get unpacked in the
+ * wrong location (type inference comes up with an unexpected skolem)
+ */
+ def repackExistential(tp: Type): Type = (
+ if (tp == NoType) tp
+ else existentialAbstraction(existentialsInType(tp), tp)
+ )
+
+ def containsExistential(tpe: Type) =
+ tpe exists typeIsExistentiallyBound
+
+ def existentialsInType(tpe: Type) =
+ tpe withFilter typeIsExistentiallyBound map (_.typeSymbol)
+
+ /** Precondition: params.nonEmpty. (args.nonEmpty enforced structurally.)
+ */
+ class HKTypeVar(
+ _origin: Type,
+ _constr: TypeConstraint,
+ override val params: List[Symbol]
+ ) extends TypeVar(_origin, _constr) {
+
+ require(params.nonEmpty, this)
+ override def isHigherKinded = true
+ override protected def typeVarString = params.map(_.name).mkString("[", ", ", "]=>" + originName)
+ }
+
+ /** Precondition: zipped params/args nonEmpty. (Size equivalence enforced structurally.)
+ */
+ class AppliedTypeVar(
+ _origin: Type,
+ _constr: TypeConstraint,
+ zippedArgs: List[(Symbol, Type)]
+ ) extends TypeVar(_origin, _constr) {
+
+ require(zippedArgs.nonEmpty, this)
+
+ override def params: List[Symbol] = zippedArgs map (_._1)
+ override def typeArgs: List[Type] = zippedArgs map (_._2)
+
+ override protected def typeVarString = (
+ zippedArgs map { case (p, a) => p.name + "=" + a } mkString (origin + "[", ", ", "]")
+ )
+ }
+
+ trait UntouchableTypeVar extends TypeVar {
+ override def untouchable = true
+ override def isGround = true
+ override def registerTypeEquality(tp: Type, typeVarLHS: Boolean) = tp match {
+ case t: TypeVar if !t.untouchable =>
+ t.registerTypeEquality(this, !typeVarLHS)
+ case _ =>
+ super.registerTypeEquality(tp, typeVarLHS)
+ }
+ override def registerBound(tp: Type, isLowerBound: Boolean, isNumericBound: Boolean = false): Boolean = tp match {
+ case t: TypeVar if !t.untouchable =>
+ t.registerBound(this, !isLowerBound, isNumericBound)
+ case _ =>
+ super.registerBound(tp, isLowerBound, isNumericBound)
+ }
+ }
+
+ /** A class representing a type variable: not used after phase `typer`.
+ *
+ * A higher-kinded TypeVar has params (Symbols) and typeArgs (Types).
+ * A TypeVar with nonEmpty typeArgs can only be instantiated by a higher-kinded
+ * type that can be applied to those args. A TypeVar is much like a TypeRef,
+ * except it has special logic for equality and subtyping.
+ *
+ * Precondition for this class, enforced structurally: args.isEmpty && params.isEmpty.
+ */
+ abstract case class TypeVar(
+ val origin: Type,
+ var constr: TypeConstraint
+ ) extends Type {
+
+ // We don't want case class equality/hashing as TypeVar-s are mutable,
+ // and TypeRefs based on them get wrongly `uniqued` otherwise. See SI-7226.
+ override def hashCode(): Int = System.identityHashCode(this)
+ override def equals(other: Any): Boolean = this eq other.asInstanceOf[AnyRef]
+
+ def untouchable = false // by other typevars
+ override def params: List[Symbol] = Nil
+ override def typeArgs: List[Type] = Nil
+ override def isHigherKinded = false
+
+ /** The constraint associated with the variable
+ * Syncnote: Type variables are assumed to be used from only one
+ * thread. They are not exposed in api.Types and are used only locally
+ * in operations that are exposed from types. Hence, no syncing of `constr`
+ * or `encounteredHigherLevel` or `suspended` accesses should be necessary.
+ */
+// var constr = constr0
+ def instValid = constr.instValid
+ override def isGround = instValid && constr.inst.isGround
+
+ /** The variable's skolemization level */
+ val level = skolemizationLevel
+
+ /** Applies this TypeVar to type arguments, if arity matches.
+ *
+ * Different applications of the same type constructor variable `?CC`,
+ * e.g. `?CC[Int]` and `?CC[String]`, are modeled as distinct instances of `TypeVar`
+ * that share a `TypeConstraint`, so that the comparisons `?CC[Int] <:< List[Int]`
+ * and `?CC[String] <:< Iterable[String]` result in `?CC` being upper-bounded by `List` and `Iterable`.
+ *
+ * Applying the wrong number of type args results in a TypeVar whose instance is set to `ErrorType`.
+ */
+ def applyArgs(newArgs: List[Type]): TypeVar = (
+ if (newArgs.isEmpty && typeArgs.isEmpty)
+ this
+ else if (newArgs.size == params.size) {
+ val tv = TypeVar(origin, constr, newArgs, params)
+ TypeVar.trace("applyArgs", "In " + originLocation + ", apply args " + newArgs.mkString(", ") + " to " + originName)(tv)
+ }
+ else
+ TypeVar(typeSymbol).setInst(ErrorType)
+ )
+ // newArgs.length may differ from args.length (could've been empty before)
+ //
+ // !!! @PP - I need an example of this, since this exception never triggers
+ // even though I am requiring the size match.
+ //
+ // example: when making new typevars, you start out with C[A], then you replace C by ?C, which should yield ?C[A], then A by ?A, ?C[?A]
+ // we need to track a TypeVar's arguments, and map over them (see TypeMap::mapOver)
+ // TypeVars get applied to different arguments over time (in asSeenFrom)
+ // -- see pos/tcpoly_infer_implicit_tuplewrapper.scala
+ // thus: make new TypeVar's for every application of a TV to args,
+ // inference may generate several TypeVar's for a single type parameter that must be inferred,
+ // only one of them is in the set of tvars that need to be solved, but
+ // they share the same TypeConstraint instance
+
+ // When comparing to types containing skolems, remember the highest level
+ // of skolemization. If that highest level is higher than our initial
+ // skolemizationLevel, we can't re-use those skolems as the solution of this
+ // typevar, which means we'll need to repack our constr.inst into a fresh
+ // existential.
+ // were we compared to skolems at a higher skolemizationLevel?
+ // EXPERIMENTAL: value will not be considered unless enableTypeVarExperimentals is true
+ // see SI-5729 for why this is still experimental
+ private var encounteredHigherLevel = false
+ private def shouldRepackType = enableTypeVarExperimentals && encounteredHigherLevel
+
+ // <region name="constraint mutators + undoLog">
+ // invariant: before mutating constr, save old state in undoLog
+ // (undoLog is used to reset constraints to avoid piling up unrelated ones)
+ def setInst(tp: Type): this.type = {
+// assert(!(tp containsTp this), this)
+ undoLog record this
+ // if we were compared against later typeskolems, repack the existential,
+ // because skolems are only compatible if they were created at the same level
+ val res = if (shouldRepackType) repackExistential(tp) else tp
+ constr.inst = TypeVar.trace("setInst", "In " + originLocation + ", " + originName + "=" + res)(res)
+ this
+ }
+
+ def addLoBound(tp: Type, isNumericBound: Boolean = false) {
+ assert(tp != this, tp) // implies there is a cycle somewhere (?)
+ //println("addLoBound: "+(safeToString, debugString(tp))) //DEBUG
+ undoLog record this
+ constr.addLoBound(tp, isNumericBound)
+ }
+
+ def addHiBound(tp: Type, isNumericBound: Boolean = false) {
+ // assert(tp != this)
+ //println("addHiBound: "+(safeToString, debugString(tp))) //DEBUG
+ undoLog record this
+ constr.addHiBound(tp, isNumericBound)
+ }
+ // </region>
+
+ // ignore subtyping&equality checks while true -- see findMember
+ private[Types] var suspended = false
+
+ /** Called when a TypeVar is involved in a subtyping check. Result is whether
+ * this TypeVar could plausibly be a [super/sub]type of argument `tp` and if so,
+ * tracks tp as a [lower/upper] bound of this TypeVar.
+ *
+ * if (isLowerBound) this typevar could be a subtype, track tp as a lower bound
+ * if (!isLowerBound) this typevar could be a supertype, track tp as an upper bound
+ *
+ * If isNumericBound is true, the subtype check is performed with weak_<:< instead of <:<.
+ */
+ def registerBound(tp: Type, isLowerBound: Boolean, isNumericBound: Boolean = false): Boolean = {
+ // println("regBound: "+(safeToString, debugString(tp), isLowerBound)) //@MDEBUG
+ if (isLowerBound)
+ assert(tp != this)
+
+ // side effect: adds the type to upper or lower bounds
+ def addBound(tp: Type) {
+ if (isLowerBound) addLoBound(tp, isNumericBound)
+ else addHiBound(tp, isNumericBound)
+ }
+ // swaps the arguments if it's an upper bound
+ def checkSubtype(tp1: Type, tp2: Type) = {
+ val lhs = if (isLowerBound) tp1 else tp2
+ val rhs = if (isLowerBound) tp2 else tp1
+
+ if (isNumericBound) lhs weak_<:< rhs
+ else lhs <:< rhs
+ }
+
+ /** Simple case: type arguments can be ignored, because either this typevar has
+ * no type parameters, or we are comparing to Any/Nothing.
+ *
+ * The latter condition is needed because HK unification is limited to constraints of the shape
+ * {{{
+ * TC1[T1,..., TN] <: TC2[T'1,...,T'N]
+ * }}}
+ * which would preclude the following important constraints:
+ * {{{
+ * Nothing <: ?TC[?T]
+ * ?TC[?T] <: Any
+ * }}}
+ */
+ def unifySimple = {
+ val sym = tp.typeSymbol
+ if (sym == NothingClass || sym == AnyClass) { // kind-polymorphic
+ // SI-7126 if we register some type alias `T=Any`, we can later end
+ // with malformed types like `T[T]` during type inference in
+ // `handlePolymorphicCall`. No such problem if we register `Any`.
+ addBound(sym.tpe)
+ true
+ } else if (params.isEmpty) {
+ addBound(tp)
+ true
+ } else false
+ }
+
+ /** Full case: involving a check of the form
+ * {{{
+ * TC1[T1,..., TN] <: TC2[T'1,...,T'N]
+ * }}}
+ * Checks subtyping of higher-order type vars, and uses variances as defined in the
+ * type parameter we're trying to infer (the result will be sanity-checked later).
+ */
+ def unifyFull(tpe: Type): Boolean = {
+ def unifySpecific(tp: Type) = {
+ sameLength(typeArgs, tp.typeArgs) && {
+ val lhs = if (isLowerBound) tp.typeArgs else typeArgs
+ val rhs = if (isLowerBound) typeArgs else tp.typeArgs
+ // This is a higher-kinded type var with same arity as tp.
+ // If so (see SI-7517), side effect: adds the type constructor itself as a bound.
+ isSubArgs(lhs, rhs, params, AnyDepth) && { addBound(tp.typeConstructor); true }
+ }
+ }
+ // The type with which we can successfully unify can be hidden
+ // behind singleton types and type aliases.
+ tpe.dealiasWidenChain exists unifySpecific
+ }
+
+ // There's a <: test taking place right now, where tp is a concrete type and this is a typevar
+ // attempting to satisfy that test. Either the test will be unsatisfiable, in which case
+ // registerBound will return false; or the upper or lower bounds of this type var will be
+ // supplemented with the type being tested against.
+ //
+ // Eventually the types which have accumulated in the upper and lower bounds will be lubbed
+ // (resp. glbbed) to instantiate the typevar.
+ //
+ // The only types which are eligible for unification are those with the same number of
+ // typeArgs as this typevar, or Any/Nothing, which are kind-polymorphic. For the upper bound,
+ // any parent or base type of `tp` may be tested here (leading to a corresponding relaxation
+ // in the upper bound.) The universe of possible glbs, being somewhat more infinite, is not
+ // addressed here: all lower bounds are retained and their intersection calculated when the
+ // bounds are solved.
+ //
+ // In a side-effect free universe, checking tp and tp.parents beofre checking tp.baseTypeSeq
+ // would be pointless. In this case, each check we perform causes us to lose specificity: in
+ // the end the best we'll do is the least specific type we tested against, since the typevar
+ // does not see these checks as "probes" but as requirements to fulfill.
+ // TODO: can the `suspended` flag be used to poke around without leaving a trace?
+ //
+ // So the strategy used here is to test first the type, then the direct parents, and finally
+ // to fall back on the individual base types. This warrants eventual re-examination.
+
+ // AM: I think we could use the `suspended` flag to avoid side-effecting during unification
+ if (suspended) // constraint accumulation is disabled
+ checkSubtype(tp, origin)
+ else if (constr.instValid) // type var is already set
+ checkSubtype(tp, constr.inst)
+ else isRelatable(tp) && {
+ unifySimple || unifyFull(tp) || (
+ // only look harder if our gaze is oriented toward Any
+ isLowerBound && (
+ (tp.parents exists unifyFull) || (
+ // @PP: Is it going to be faster to filter out the parents we just checked?
+ // That's what's done here but I'm not sure it matters.
+ tp.baseTypeSeq.toList.tail filterNot (tp.parents contains _) exists unifyFull
+ )
+ )
+ )
+ }
+ }
+
+ def registerTypeEquality(tp: Type, typeVarLHS: Boolean): Boolean = {
+// println("regTypeEq: "+(safeToString, debugString(tp), tp.getClass, if (typeVarLHS) "in LHS" else "in RHS", if (suspended) "ZZ" else if (constr.instValid) "IV" else "")) //@MDEBUG
+// println("constr: "+ constr)
+ def checkIsSameType(tp: Type) =
+ if(typeVarLHS) constr.inst =:= tp
+ else tp =:= constr.inst
+
+ if (suspended) tp =:= origin
+ else if (constr.instValid) checkIsSameType(tp)
+ else isRelatable(tp) && {
+ val newInst = wildcardToTypeVarMap(tp)
+ (constr isWithinBounds newInst) && { setInst(tp); true }
+ }
+ }
+
+ /**
+ * `?A.T =:= tp` is rewritten as the constraint `?A <: {type T = tp}`
+ *
+ * TODO: make these constraints count (incorporate them into implicit search in `applyImplicitArgs`)
+ * (`T` corresponds to @param sym)
+ */
+ def registerTypeSelection(sym: Symbol, tp: Type): Boolean = {
+ registerBound(HasTypeMember(sym.name.toTypeName, tp), false)
+ }
+
+ private def isSkolemAboveLevel(tp: Type) = tp.typeSymbol match {
+ case ts: TypeSkolem => ts.level > level
+ case _ => false
+ }
+ // side-effects encounteredHigherLevel
+ private def containsSkolemAboveLevel(tp: Type) =
+ (tp exists isSkolemAboveLevel) && { encounteredHigherLevel = true ; true }
+
+ /** Can this variable be related in a constraint to type `tp`?
+ * This is not the case if `tp` contains type skolems whose
+ * skolemization level is higher than the level of this variable.
+ */
+ def isRelatable(tp: Type) = (
+ shouldRepackType // short circuit if we already know we've seen higher levels
+ || !containsSkolemAboveLevel(tp) // side-effects tracking boolean
+ || enableTypeVarExperimentals // -Xexperimental: always say we're relatable, track consequences
+ )
+
+ override def normalize: Type = (
+ if (constr.instValid) constr.inst
+ // get here when checking higher-order subtyping of the typevar by itself
+ // TODO: check whether this ever happens?
+ else if (isHigherKinded) logResult("Normalizing HK $this")(typeFun(params, applyArgs(params map (_.typeConstructor))))
+ else super.normalize
+ )
+ override def typeSymbol = origin.typeSymbol
+ override def isStable = origin.isStable
+ override def isVolatile = origin.isVolatile
+
+ private def tparamsOfSym(sym: Symbol) = sym.info match {
+ case PolyType(tparams, _) if tparams.nonEmpty =>
+ tparams map (_.defString) mkString("[", ",", "]")
+ case _ => ""
+ }
+ def originName = origin.typeSymbolDirect.decodedName
+ def originLocation = {
+ val sym = origin.typeSymbolDirect
+ val encl = sym.owner.logicallyEnclosingMember
+
+ // This should display somewhere between one and three
+ // things which enclose the origin: at most, a class, a
+ // a method, and a term. At least, a class.
+ List(
+ Some(encl.enclClass),
+ if (encl.isMethod) Some(encl) else None,
+ if (sym.owner.isTerm && (sym.owner != encl)) Some(sym.owner) else None
+ ).flatten map (s => s.decodedName + tparamsOfSym(s)) mkString "#"
+ }
+ private def levelString = if (settings.explaintypes.value) level else ""
+ protected def typeVarString = originName
+ override def safeToString = (
+ if ((constr eq null) || (constr.inst eq null)) "TVar<" + originName + "=null>"
+ else if (constr.inst ne NoType) "=?" + constr.inst
+ else (if(untouchable) "!?" else "?") + levelString + originName
+ )
+ override def kind = "TypeVar"
+
+ def cloneInternal = {
+ // cloning a suspended type variable when it's suspended will cause the clone
+ // to never be resumed with the current implementation
+ assert(!suspended, this)
+ TypeVar.trace("clone", originLocation)(
+ TypeVar(origin, constr.cloneInternal, typeArgs, params) // @M TODO: clone args/params?
+ )
+ }
+ }
+
+ /** A type carrying some annotations. Created by the typechecker
+ * when eliminating ''Annotated'' trees (see typedAnnotated).
+ *
+ * @param annotations the list of annotations on the type
+ * @param underlying the type without the annotation
+ * @param selfsym a "self" symbol with type `underlying`;
+ * only available if -Yself-in-annots is turned on. Can be `NoSymbol`
+ * if it is not used.
+ */
+ case class AnnotatedType(override val annotations: List[AnnotationInfo],
+ override val underlying: Type,
+ override val selfsym: Symbol)
+ extends RewrappingTypeProxy with AnnotatedTypeApi {
+
+ assert(!annotations.isEmpty, "" + underlying)
+
+ override protected def rewrap(tp: Type) = copy(underlying = tp)
+
+ override def isTrivial: Boolean = underlying.isTrivial && annotations.forall(_.isTrivial)
+
+ override def safeToString = annotations.mkString(underlying + " @", " @", "")
+
+ override def filterAnnotations(p: AnnotationInfo => Boolean): Type = {
+ val (yes, no) = annotations partition p
+ if (yes.isEmpty) underlying
+ else if (no.isEmpty) this
+ else copy(annotations = yes)
+ }
+ override def setAnnotations(annots: List[AnnotationInfo]): Type =
+ if (annots.isEmpty) underlying
+ else copy(annotations = annots)
+
+ /** Add a number of annotations to this type */
+ override def withAnnotations(annots: List[AnnotationInfo]): Type =
+ if (annots.isEmpty) this
+ else copy(annots ::: this.annotations)
+
+ /** Remove any annotations from this type.
+ * TODO - is it allowed to nest AnnotatedTypes? If not then let's enforce
+ * that at creation. At the moment if they do ever turn up nested this
+ * recursively calls withoutAnnotations.
+ */
+ override def withoutAnnotations = underlying.withoutAnnotations
+
+ /** Set the self symbol */
+ override def withSelfsym(sym: Symbol) = copy(selfsym = sym)
+
+ /** Drop the annotations on the bounds, unless the low and high
+ * bounds are exactly tp.
+ */
+ override def bounds: TypeBounds = underlying.bounds match {
+ case TypeBounds(_: this.type, _: this.type) => TypeBounds(this, this)
+ case oftp => oftp
+ }
+
+ // ** Replace formal type parameter symbols with actual type arguments. * /
+ override def instantiateTypeParams(formals: List[Symbol], actuals: List[Type]) = {
+ val annotations1 = annotations.map(info => AnnotationInfo(info.atp.instantiateTypeParams(
+ formals, actuals), info.args, info.assocs).setPos(info.pos))
+ val underlying1 = underlying.instantiateTypeParams(formals, actuals)
+ if ((annotations1 eq annotations) && (underlying1 eq underlying)) this
+ else AnnotatedType(annotations1, underlying1, selfsym)
+ }
+
+ /** Return the base type sequence of tp, dropping the annotations, unless the base type sequence of tp
+ * is precisely tp itself. */
+ override def baseTypeSeq: BaseTypeSeq = {
+ val oftp = underlying.baseTypeSeq
+ if ((oftp.length == 1) && (oftp(0) eq underlying))
+ baseTypeSingletonSeq(this)
+ else
+ oftp
+ }
+
+ override def kind = "AnnotatedType"
+ }
+
+ /** Creator for AnnotatedTypes. It returns the underlying type if annotations.isEmpty
+ * rather than walking into the assertion.
+ */
+ def annotatedType(annots: List[AnnotationInfo], underlying: Type, selfsym: Symbol = NoSymbol): Type =
+ if (annots.isEmpty) underlying
+ else AnnotatedType(annots, underlying, selfsym)
+
+ object AnnotatedType extends AnnotatedTypeExtractor
+
+ /** A class representing types with a name. When an application uses
+ * named arguments, the named argument types for calling isApplicable
+ * are represented as NamedType.
+ */
+ case class NamedType(name: Name, tp: Type) extends Type {
+ override def safeToString: String = name.toString +": "+ tp
+ }
+
+ /** A De Bruijn index referring to a previous type argument. Only used
+ * as a serialization format.
+ */
+ case class DeBruijnIndex(level: Int, idx: Int, args: List[Type]) extends Type {
+ override def safeToString: String = "De Bruijn index("+level+","+idx+")"
+ }
+
+ /** A binder defining data associated with De Bruijn indices. Only used
+ * as a serialization format.
+ */
+ case class DeBruijnBinder(pnames: List[Name], ptypes: List[Type], restpe: Type) extends Type {
+ override def safeToString = {
+ val kind = if (pnames.head.isTypeName) "poly" else "method"
+ "De Bruijn "+kind+"("+(pnames mkString ",")+";"+(ptypes mkString ",")+";"+restpe+")"
+ }
+ }
+
+ /** A temporary type representing the erasure of a user-defined value type.
+ * Created during phase erasure, eliminated again in posterasure.
+ *
+ * @param original The underlying type before erasure
+ */
+ abstract case class ErasedValueType(original: TypeRef) extends UniqueType {
+ override def safeToString = "ErasedValueType("+original+")"
+ }
+
+ final class UniqueErasedValueType(original: TypeRef) extends ErasedValueType(original)
+
+ object ErasedValueType {
+ def apply(original: TypeRef): Type = {
+ assert(original.sym ne NoSymbol, "ErasedValueType over NoSymbol")
+ unique(new UniqueErasedValueType(original))
+ }
+ }
+
+ /** A class representing an as-yet unevaluated type.
+ */
+ abstract class LazyType extends Type {
+ override def isComplete: Boolean = false
+ override def complete(sym: Symbol)
+ override def safeToString = "<?>"
+ override def kind = "LazyType"
+ }
+
+ /** A marker trait representing an as-yet unevaluated type
+ * which doesn't assign flags to the underlying symbol.
+ */
+ trait FlagAgnosticCompleter extends LazyType
+
+ /** A marker trait representing an as-yet unevaluated type
+ * which assigns flags to the underlying symbol.
+ */
+ trait FlagAssigningCompleter extends LazyType
+
+ abstract class LazyPolyType(override val typeParams: List[Symbol]) extends LazyType {
+ override def safeToString =
+ (if (typeParams.isEmpty) "" else typeParamsString(this)) + super.safeToString
+ }
+
+ // def mkLazyType(tparams: Symbol*)(f: Symbol => Unit): LazyType = (
+ // if (tparams.isEmpty) new LazyType { override def complete(sym: Symbol) = f(sym) }
+ // else new LazyPolyType(tparams.toList) { override def complete(sym: Symbol) = f(sym) }
+ // )
+
+// Creators ---------------------------------------------------------------
+
+ /** Rebind symbol `sym` to an overriding member in type `pre`. */
+ private def rebind(pre: Type, sym: Symbol): Symbol = {
+ if (!sym.isOverridableMember || sym.owner == pre.typeSymbol) sym
+ else pre.nonPrivateMember(sym.name).suchThat(sym => sym.isType || sym.isStable) orElse sym
+ }
+
+ /** Convert a `super` prefix to a this-type if `sym` is abstract or final. */
+ private def removeSuper(tp: Type, sym: Symbol): Type = tp match {
+ case SuperType(thistp, _) =>
+ if (sym.isEffectivelyFinal || sym.isDeferred) thistp
+ else tp
+ case _ =>
+ tp
+ }
+
+ /** The canonical creator for single-types */
+ def singleType(pre: Type, sym: Symbol): Type = {
+ if (phase.erasedTypes)
+ sym.tpe.resultType
+ else if (sym.isRootPackage)
+ ThisType(sym.moduleClass)
+ else {
+ var sym1 = rebind(pre, sym)
+ val pre1 = removeSuper(pre, sym1)
+ if (pre1 ne pre) sym1 = rebind(pre1, sym1)
+ SingleType(pre1, sym1)
+ }
+ }
+
+ /** the canonical creator for a refined type with a given scope */
+ def refinedType(parents: List[Type], owner: Symbol, decls: Scope, pos: Position): Type = {
+ if (phase.erasedTypes)
+ if (parents.isEmpty) ObjectClass.tpe else parents.head
+ else {
+ val clazz = owner.newRefinementClass(pos)
+ val result = RefinedType(parents, decls, clazz)
+ clazz.setInfo(result)
+ result
+ }
+ }
+
+ /** The canonical creator for a refined type with an initially empty scope.
+ *
+ * @param parents ...
+ * @param owner ...
+ * @return ...
+ */
+ def refinedType(parents: List[Type], owner: Symbol): Type =
+ refinedType(parents, owner, newScope, owner.pos)
+
+ def copyRefinedType(original: RefinedType, parents: List[Type], decls: Scope) =
+ if ((parents eq original.parents) && (decls eq original.decls)) original
+ else {
+ val owner = if (original.typeSymbol == NoSymbol) NoSymbol else original.typeSymbol.owner
+ val result = refinedType(parents, owner)
+ val syms1 = decls.toList
+ for (sym <- syms1)
+ result.decls.enter(sym.cloneSymbol(result.typeSymbol))
+ val syms2 = result.decls.toList
+ val resultThis = result.typeSymbol.thisType
+ for (sym <- syms2)
+ sym modifyInfo (_ substThisAndSym(original.typeSymbol, resultThis, syms1, syms2))
+
+ result
+ }
+
+ /** The canonical creator for typerefs
+ * todo: see how we can clean this up a bit
+ */
+ def typeRef(pre: Type, sym: Symbol, args: List[Type]): Type = {
+ // type alias selections are rebound in TypeMap ("coevolved",
+ // actually -- see #3731) e.g., when type parameters that are
+ // referenced by the alias are instantiated in the prefix. See
+ // pos/depmet_rebind_typealias.
+
+ val sym1 = if (sym.isAbstractType) rebind(pre, sym) else sym
+ // don't expand cyclical type alias
+ // we require that object is initialized, thus info.typeParams instead of typeParams.
+ if (sym1.isAliasType && sameLength(sym1.info.typeParams, args) && !sym1.lockOK)
+ throw new RecoverableCyclicReference(sym1)
+
+ val pre1 = pre match {
+ case x: SuperType if sym1.isEffectivelyFinal || sym1.isDeferred =>
+ x.thistpe
+ case _ => pre
+ }
+ if (pre eq pre1) TypeRef(pre, sym1, args)
+ else if (sym1.isAbstractType && !sym1.isClass) typeRef(pre1, rebind(pre1, sym1), args)
+ else typeRef(pre1, sym1, args)
+ }
+
+ // Optimization to avoid creating unnecessary new typerefs.
+ def copyTypeRef(tp: Type, pre: Type, sym: Symbol, args: List[Type]): Type = tp match {
+ case TypeRef(pre0, sym0, _) if pre == pre0 && sym0.name == sym.name =>
+ if (sym.isAliasType && sameLength(sym.info.typeParams, args) && !sym.lockOK)
+ throw new RecoverableCyclicReference(sym)
+
+ TypeRef(pre, sym, args)
+ case _ =>
+ typeRef(pre, sym, args)
+ }
+
+ /** The canonical creator for implicit method types */
+ def JavaMethodType(params: List[Symbol], resultType: Type): JavaMethodType =
+ new JavaMethodType(params, resultType) // don't unique this!
+
+ /** Create a new MethodType of the same class as tp, i.e. keep JavaMethodType */
+ def copyMethodType(tp: Type, params: List[Symbol], restpe: Type): Type = tp match {
+ case _: JavaMethodType => JavaMethodType(params, restpe)
+ case _ => MethodType(params, restpe)
+ }
+
+ /** A creator for intersection type where intersections of a single type are
+ * replaced by the type itself, and repeated parent classes are merged.
+ *
+ * !!! Repeated parent classes are not merged - is this a bug in the
+ * comment or in the code?
+ */
+ def intersectionType(tps: List[Type], owner: Symbol): Type = tps match {
+ case tp :: Nil => tp
+ case _ => refinedType(tps, owner)
+ }
+ /** A creator for intersection type where intersections of a single type are
+ * replaced by the type itself.
+ */
+ def intersectionType(tps: List[Type]): Type = tps match {
+ case tp :: Nil => tp
+ case _ => refinedType(tps, commonOwner(tps))
+ }
+
+/**** This implementation to merge parents was checked in in commented-out
+ form and has languished unaltered for five years. I think we should
+ use it or lose it.
+
+ def merge(tps: List[Type]): List[Type] = tps match {
+ case tp :: tps1 =>
+ val tps1a = tps1 filter (_.typeSymbol.==(tp.typeSymbol))
+ val tps1b = tps1 filter (_.typeSymbol.!=(tp.typeSymbol))
+ mergePrefixAndArgs(tps1a, -1) match {
+ case Some(tp1) => tp1 :: merge(tps1b)
+ case None => throw new MalformedType(
+ "malformed type: "+refinedType(tps, owner)+" has repeated parent class "+
+ tp.typeSymbol+" with incompatible prefixes or type arguments")
+ }
+ case _ => tps
+ }
+ refinedType(merge(tps), owner)
+*/
+
+ /** A creator for type applications */
+ def appliedType(tycon: Type, args: List[Type]): Type = {
+ if (args.isEmpty)
+ return tycon //@M! `if (args.isEmpty) tycon' is crucial (otherwise we create new types in phases after typer and then they don't get adapted (??))
+
+ /** Disabled - causes cycles in tcpoly tests. */
+ if (false && isDefinitionsInitialized) {
+ assert(isUseableAsTypeArgs(args), {
+ val tapp_s = s"""$tycon[${args mkString ", "}]"""
+ val arg_s = args filterNot isUseableAsTypeArg map (t => t + "/" + t.getClass) mkString ", "
+ s"$tapp_s includes illegal type argument $arg_s"
+ })
+ }
+
+ tycon match {
+ case TypeRef(pre, sym @ (NothingClass|AnyClass), _) => copyTypeRef(tycon, pre, sym, Nil) //@M drop type args to Any/Nothing
+ case TypeRef(pre, sym, _) => copyTypeRef(tycon, pre, sym, args)
+ case PolyType(tparams, restpe) => restpe.instantiateTypeParams(tparams, args)
+ case ExistentialType(tparams, restpe) => newExistentialType(tparams, appliedType(restpe, args))
+ case st: SingletonType => appliedType(st.widen, args) // @M TODO: what to do? see bug1
+ case RefinedType(parents, decls) => RefinedType(parents map (appliedType(_, args)), decls) // MO to AM: please check
+ case TypeBounds(lo, hi) => TypeBounds(appliedType(lo, args), appliedType(hi, args))
+ case tv at TypeVar(_, _) => tv.applyArgs(args)
+ case AnnotatedType(annots, underlying, self) => AnnotatedType(annots, appliedType(underlying, args), self)
+ case ErrorType => tycon
+ case WildcardType => tycon // needed for neg/t0226
+ case _ => abort(debugString(tycon))
+ }
+ }
+
+ /** Very convenient. */
+ def appliedType(tyconSym: Symbol, args: Type*): Type =
+ appliedType(tyconSym.typeConstructor, args.toList)
+
+ /** A creator for existential types where the type arguments,
+ * rather than being applied directly, are interpreted as the
+ * upper bounds of unknown types. For instance if the type argument
+ * list given is List(AnyRefClass), the resulting type would be
+ * e.g. Set[_ <: AnyRef] rather than Set[AnyRef] .
+ */
+ def appliedTypeAsUpperBounds(tycon: Type, args: List[Type]): Type = {
+ tycon match {
+ case TypeRef(pre, sym, _) if sameLength(sym.typeParams, args) =>
+ val eparams = typeParamsToExistentials(sym)
+ val bounds = args map (TypeBounds upper _)
+ foreach2(eparams, bounds)(_ setInfo _)
+
+ newExistentialType(eparams, typeRef(pre, sym, eparams map (_.tpe)))
+ case _ =>
+ appliedType(tycon, args)
+ }
+ }
+
+ /** A creator and extractor for type parameterizations that strips empty type parameter lists.
+ * Use this factory method to indicate the type has kind * (it's a polymorphic value)
+ * until we start tracking explicit kinds equivalent to typeFun (except that the latter requires tparams nonEmpty).
+ *
+ * PP to AM: I've co-opted this for where I know tparams may well be empty, and
+ * expecting to get back `tpe` in such cases. Re being "forgiving" below,
+ * can we instead say this is the canonical creator for polyTypes which
+ * may or may not be poly? (It filched the standard "canonical creator" name.)
+ */
+ object GenPolyType {
+ def apply(tparams: List[Symbol], tpe: Type): Type = {
+ tpe match {
+ case MethodType(_, _) =>
+ assert(tparams forall (_.isInvariant), "Trying to create a method with variant type parameters: " + ((tparams, tpe)))
+ case _ =>
+ }
+ if (tparams.nonEmpty) typeFun(tparams, tpe)
+ else tpe // it's okay to be forgiving here
+ }
+ def unapply(tpe: Type): Option[(List[Symbol], Type)] = tpe match {
+ case PolyType(tparams, restpe) => Some((tparams, restpe))
+ case _ => Some((Nil, tpe))
+ }
+ }
+ def genPolyType(params: List[Symbol], tpe: Type): Type = GenPolyType(params, tpe)
+
+ @deprecated("use genPolyType(...) instead", "2.10.0")
+ def polyType(params: List[Symbol], tpe: Type): Type = GenPolyType(params, tpe)
+
+ /** A creator for anonymous type functions, where the symbol for the type function still needs to be created.
+ *
+ * TODO:
+ * type params of anonymous type functions, which currently can only arise from normalising type aliases, are owned by the type alias of which they are the eta-expansion
+ * higher-order subtyping expects eta-expansion of type constructors that arise from a class; here, the type params are owned by that class, but is that the right thing to do?
+ */
+ def typeFunAnon(tps: List[Symbol], body: Type): Type = typeFun(tps, body)
+
+ /** A creator for a type functions, assuming the type parameters tps already have the right owner. */
+ def typeFun(tps: List[Symbol], body: Type): Type = PolyType(tps, body)
+
+ /** A creator for existential types. This generates:
+ *
+ * tpe1 where { tparams }
+ *
+ * where `tpe1` is the result of extrapolating `tpe` with respect to `tparams`.
+ * Extrapolating means that type variables in `tparams` occurring
+ * in covariant positions are replaced by upper bounds, (minus any
+ * SingletonClass markers), type variables in `tparams` occurring in
+ * contravariant positions are replaced by upper bounds, provided the
+ * resulting type is legal with regard to stability, and does not contain any type
+ * variable in `tparams`.
+ *
+ * The abstraction drops all type parameters that are not directly or
+ * indirectly referenced by type `tpe1`. If there are no remaining type
+ * parameters, simply returns result type `tpe`.
+ */
+ def existentialAbstraction(tparams: List[Symbol], tpe0: Type): Type =
+ if (tparams.isEmpty) tpe0
+ else {
+ val tpe = normalizeAliases(tpe0)
+ val tpe1 = new ExistentialExtrapolation(tparams) extrapolate tpe
+ var tparams0 = tparams
+ var tparams1 = tparams0 filter tpe1.contains
+
+ while (tparams1 != tparams0) {
+ tparams0 = tparams1
+ tparams1 = tparams filter { p =>
+ tparams1 exists { p1 => p1 == p || (p1.info contains p) }
+ }
+ }
+ newExistentialType(tparams1, tpe1)
+ }
+
+ /** Normalize any type aliases within this type (@see Type#normalize).
+ * Note that this depends very much on the call to "normalize", not "dealias",
+ * so it is no longer carries the too-stealthy name "deAlias".
+ */
+ object normalizeAliases extends TypeMap {
+ def apply(tp: Type): Type = tp match {
+ case TypeRef(_, sym, _) if sym.isAliasType =>
+ def msg = if (tp.isHigherKinded) s"Normalizing type alias function $tp" else s"Dealiasing type alias $tp"
+ mapOver(logResult(msg)(tp.normalize))
+ case _ => mapOver(tp)
+ }
+ }
+
+ /** Remove any occurrence of type <singleton> from this type and its parents */
+ object dropSingletonType extends TypeMap {
+ def apply(tp: Type): Type = {
+ tp match {
+ case TypeRef(_, SingletonClass, _) =>
+ AnyClass.tpe
+ case tp1 @ RefinedType(parents, decls) =>
+ parents filter (_.typeSymbol != SingletonClass) match {
+ case Nil => AnyClass.tpe
+ case p :: Nil if decls.isEmpty => mapOver(p)
+ case ps => mapOver(copyRefinedType(tp1, ps, decls))
+ }
+ case tp1 =>
+ mapOver(tp1)
+ }
+ }
+ }
+
+ /** Substitutes the empty scope for any non-empty decls in the type. */
+ object dropAllRefinements extends TypeMap {
+ def apply(tp: Type): Type = tp match {
+ case rt @ RefinedType(parents, decls) if !decls.isEmpty =>
+ mapOver(copyRefinedType(rt, parents, EmptyScope))
+ case ClassInfoType(parents, decls, clazz) if !decls.isEmpty =>
+ mapOver(ClassInfoType(parents, EmptyScope, clazz))
+ case _ =>
+ mapOver(tp)
+ }
+ }
+
+ /** Type with all top-level occurrences of abstract types replaced by their bounds */
+ def abstractTypesToBounds(tp: Type): Type = tp match { // @M don't normalize here (compiler loops on pos/bug1090.scala )
+ case TypeRef(_, sym, _) if sym.isAbstractType =>
+ abstractTypesToBounds(tp.bounds.hi)
+ case TypeRef(_, sym, _) if sym.isAliasType =>
+ abstractTypesToBounds(tp.normalize)
+ case rtp @ RefinedType(parents, decls) =>
+ copyRefinedType(rtp, parents mapConserve abstractTypesToBounds, decls)
+ case AnnotatedType(_, underlying, _) =>
+ abstractTypesToBounds(underlying)
+ case _ =>
+ tp
+ }
+
+ // Set to true for A* => Seq[A]
+ // (And it will only rewrite A* in method result types.)
+ // This is the pre-existing behavior.
+ // Or false for Seq[A] => Seq[A]
+ // (It will rewrite A* everywhere but method parameters.)
+ // This is the specified behavior.
+ protected def etaExpandKeepsStar = false
+
+ /** Turn any T* types into Seq[T] except when
+ * in method parameter position.
+ */
+ object dropRepeatedParamType extends TypeMap {
+ def apply(tp: Type): Type = tp match {
+ case MethodType(params, restpe) =>
+ // Not mapping over params
+ val restpe1 = apply(restpe)
+ if (restpe eq restpe1) tp
+ else MethodType(params, restpe1)
+ case TypeRef(_, RepeatedParamClass, arg :: Nil) =>
+ seqType(arg)
+ case _ =>
+ if (etaExpandKeepsStar) tp else mapOver(tp)
+ }
+ }
+
+ object toDeBruijn extends TypeMap {
+ private var paramStack: List[List[Symbol]] = Nil
+ def mkDebruijnBinder(params: List[Symbol], restpe: Type) = {
+ paramStack = params :: paramStack
+ try {
+ DeBruijnBinder(params map (_.name), params map (p => this(p.info)), this(restpe))
+ } finally paramStack = paramStack.tail
+ }
+ def apply(tp: Type): Type = tp match {
+ case PolyType(tparams, restpe) =>
+ mkDebruijnBinder(tparams, restpe)
+ case MethodType(params, restpe) =>
+ mkDebruijnBinder(params, restpe)
+ case TypeRef(NoPrefix, sym, args) =>
+ val level = paramStack indexWhere (_ contains sym)
+ if (level < 0) mapOver(tp)
+ else DeBruijnIndex(level, paramStack(level) indexOf sym, args mapConserve this)
+ case _ =>
+ mapOver(tp)
+ }
+ }
+
+ def fromDeBruijn(owner: Symbol) = new TypeMap {
+ private var paramStack: List[List[Symbol]] = Nil
+ def apply(tp: Type): Type = tp match {
+ case DeBruijnBinder(pnames, ptypes, restpe) =>
+ val isType = pnames.head.isTypeName
+ val newParams = for (name <- pnames) yield
+ if (isType) owner.newTypeParameter(name.toTypeName)
+ else owner.newValueParameter(name.toTermName)
+ paramStack = newParams :: paramStack
+ try {
+ foreach2(newParams, ptypes)((p, t) => p setInfo this(t))
+ val restpe1 = this(restpe)
+ if (isType) PolyType(newParams, restpe1)
+ else MethodType(newParams, restpe1)
+ } finally paramStack = paramStack.tail
+ case DeBruijnIndex(level, idx, args) =>
+ TypeRef(NoPrefix, paramStack(level)(idx), args map this)
+ case _ =>
+ mapOver(tp)
+ }
+ }
+
+// Hash consing --------------------------------------------------------------
+
+ private val initialUniquesCapacity = 4096
+ private var uniques: util.WeakHashSet[Type] = _
+ private var uniqueRunId = NoRunId
+
+ protected def unique[T <: Type](tp: T): T = {
+ if (Statistics.canEnable) Statistics.incCounter(rawTypeCount)
+ if (uniqueRunId != currentRunId) {
+ uniques = util.WeakHashSet[Type](initialUniquesCapacity)
+ perRunCaches.recordCache(uniques)
+ uniqueRunId = currentRunId
+ }
+ (uniques findEntryOrUpdate tp).asInstanceOf[T]
+ }
+
+// Helper Classes ---------------------------------------------------------
+
+ /** @PP: Unable to see why these apparently constant types should need vals
+ * in every TypeConstraint, I lifted them out.
+ */
+ private lazy val numericLoBound = IntClass.tpe
+ private lazy val numericHiBound = intersectionType(List(ByteClass.tpe, CharClass.tpe), ScalaPackageClass)
+
+ /** A class expressing upper and lower bounds constraints of type variables,
+ * as well as their instantiations.
+ */
+ class TypeConstraint(lo0: List[Type], hi0: List[Type], numlo0: Type, numhi0: Type, avoidWidening0: Boolean = false) {
+ def this(lo0: List[Type], hi0: List[Type]) = this(lo0, hi0, NoType, NoType)
+ def this(bounds: TypeBounds) = this(List(bounds.lo), List(bounds.hi))
+ def this() = this(List(), List())
+
+ /* Syncnote: Type constraints are assumed to be used from only one
+ * thread. They are not exposed in api.Types and are used only locally
+ * in operations that are exposed from types. Hence, no syncing of any
+ * variables should be ncessesary.
+ */
+
+ /** Guard these lists against AnyClass and NothingClass appearing,
+ * else loBounds.isEmpty will have different results for an empty
+ * constraint and one with Nothing as a lower bound. [Actually
+ * guarding addLoBound/addHiBound somehow broke raw types so it
+ * only guards against being created with them.]
+ */
+ private var lobounds = lo0 filterNot typeIsNothing
+ private var hibounds = hi0 filterNot typeIsAny
+ private var numlo = numlo0
+ private var numhi = numhi0
+ private var avoidWidening = avoidWidening0
+
+ def loBounds: List[Type] = if (numlo == NoType) lobounds else numlo :: lobounds
+ def hiBounds: List[Type] = if (numhi == NoType) hibounds else numhi :: hibounds
+ def avoidWiden: Boolean = avoidWidening
+
+ def addLoBound(tp: Type, isNumericBound: Boolean = false) {
+ // For some reason which is still a bit fuzzy, we must let Nothing through as
+ // a lower bound despite the fact that Nothing is always a lower bound. My current
+ // supposition is that the side-effecting type constraint accumulation mechanism
+ // depends on these subtype tests being performed to make forward progress when
+ // there are mutally recursive type vars.
+ // See pos/t6367 and pos/t6499 for the competing test cases.
+ val mustConsider = tp.typeSymbol match {
+ case NothingClass => true
+ case _ => !(lobounds contains tp)
+ }
+ if (mustConsider) {
+ if (isNumericBound && isNumericValueType(tp)) {
+ if (numlo == NoType || isNumericSubType(numlo, tp))
+ numlo = tp
+ else if (!isNumericSubType(tp, numlo))
+ numlo = numericLoBound
+ }
+ else lobounds ::= tp
+ }
+ }
+
+ def checkWidening(tp: Type) {
+ if(tp.isStable) avoidWidening = true
+ else tp match {
+ case HasTypeMember(_, _) => avoidWidening = true
+ case _ =>
+ }
+ }
+
+ def addHiBound(tp: Type, isNumericBound: Boolean = false) {
+ // My current test case only demonstrates the need to let Nothing through as
+ // a lower bound, but I suspect the situation is symmetrical.
+ val mustConsider = tp.typeSymbol match {
+ case AnyClass => true
+ case _ => !(hibounds contains tp)
+ }
+ if (mustConsider) {
+ checkWidening(tp)
+ if (isNumericBound && isNumericValueType(tp)) {
+ if (numhi == NoType || isNumericSubType(tp, numhi))
+ numhi = tp
+ else if (!isNumericSubType(numhi, tp))
+ numhi = numericHiBound
+ }
+ else hibounds ::= tp
+ }
+ }
+
+ def isWithinBounds(tp: Type): Boolean =
+ lobounds.forall(_ <:< tp) &&
+ hibounds.forall(tp <:< _) &&
+ (numlo == NoType || (numlo weak_<:< tp)) &&
+ (numhi == NoType || (tp weak_<:< numhi))
+
+ var inst: Type = NoType // @M reduce visibility?
+
+ def instValid = (inst ne null) && (inst ne NoType)
+
+ def cloneInternal = {
+ val tc = new TypeConstraint(lobounds, hibounds, numlo, numhi, avoidWidening)
+ tc.inst = inst
+ tc
+ }
+
+ override def toString = {
+ val boundsStr = {
+ val lo = loBounds filterNot typeIsNothing
+ val hi = hiBounds filterNot typeIsAny
+ val lostr = if (lo.isEmpty) Nil else List(lo.mkString(" >: (", ", ", ")"))
+ val histr = if (hi.isEmpty) Nil else List(hi.mkString(" <: (", ", ", ")"))
+
+ lostr ++ histr mkString ("[", " | ", "]")
+ }
+ if (inst eq NoType) boundsStr
+ else boundsStr + " _= " + inst.safeToString
+ }
+ }
+
+ class TypeUnwrapper(poly: Boolean, existential: Boolean, annotated: Boolean, nullary: Boolean) extends (Type => Type) {
+ def apply(tp: Type): Type = tp match {
+ case AnnotatedType(_, underlying, _) if annotated => apply(underlying)
+ case ExistentialType(_, underlying) if existential => apply(underlying)
+ case PolyType(_, underlying) if poly => apply(underlying)
+ case NullaryMethodType(underlying) if nullary => apply(underlying)
+ case tp => tp
+ }
+ }
+ class ClassUnwrapper(existential: Boolean) extends TypeUnwrapper(poly = true, existential, annotated = true, nullary = false) {
+ override def apply(tp: Type) = super.apply(tp.normalize)
+ }
+
+ object unwrapToClass extends ClassUnwrapper(existential = true) { }
+ object unwrapToStableClass extends ClassUnwrapper(existential = false) { }
+ object unwrapWrapperTypes extends TypeUnwrapper(true, true, true, true) { }
+
+ trait AnnotationFilter extends TypeMap {
+ def keepAnnotation(annot: AnnotationInfo): Boolean
+
+ override def mapOver(annot: AnnotationInfo) =
+ if (keepAnnotation(annot)) super.mapOver(annot)
+ else UnmappableAnnotation
+ }
+
+ trait KeepOnlyTypeConstraints extends AnnotationFilter {
+ // filter keeps only type constraint annotations
+ def keepAnnotation(annot: AnnotationInfo) = annot matches TypeConstraintClass
+ }
+
+ trait VariantTypeMap extends TypeMap {
+ private[this] var _variance = 1
+
+ override def variance = _variance
+ def variance_=(x: Int) = _variance = x
+
+ override protected def noChangeToSymbols(origSyms: List[Symbol]) =
+ //OPT inline from forall to save on #closures
+ origSyms match {
+ case sym :: rest =>
+ val v = variance
+ if (sym.isAliasType) variance = 0
+ val result = this(sym.info)
+ variance = v
+ (result eq sym.info) && noChangeToSymbols(rest)
+ case _ =>
+ true
+ }
+
+ override protected def mapOverArgs(args: List[Type], tparams: List[Symbol]): List[Type] =
+ map2Conserve(args, tparams) { (arg, tparam) =>
+ val v = variance
+ if (tparam.isContravariant) variance = -variance
+ else if (!tparam.isCovariant) variance = 0
+ val arg1 = this(arg)
+ variance = v
+ arg1
+ }
+
+ /** Map this function over given type */
+ override def mapOver(tp: Type): Type = tp match {
+ case MethodType(params, result) =>
+ variance = -variance
+ val params1 = mapOver(params)
+ variance = -variance
+ val result1 = this(result)
+ if ((params1 eq params) && (result1 eq result)) tp
+ else copyMethodType(tp, params1, result1.substSym(params, params1))
+ case PolyType(tparams, result) =>
+ variance = -variance
+ val tparams1 = mapOver(tparams)
+ variance = -variance
+ var result1 = this(result)
+ if ((tparams1 eq tparams) && (result1 eq result)) tp
+ else PolyType(tparams1, result1.substSym(tparams, tparams1))
+ case TypeBounds(lo, hi) =>
+ variance = -variance
+ val lo1 = this(lo)
+ variance = -variance
+ val hi1 = this(hi)
+ if ((lo1 eq lo) && (hi1 eq hi)) tp
+ else TypeBounds(lo1, hi1)
+ case tr @ TypeRef(pre, sym, args) =>
+ val pre1 = this(pre)
+ val args1 =
+ if (args.isEmpty)
+ args
+ else if (variance == 0) // fast & safe path: don't need to look at typeparams
+ args mapConserve this
+ else {
+ val tparams = sym.typeParams
+ if (tparams.isEmpty) args
+ else mapOverArgs(args, tparams)
+ }
+ if ((pre1 eq pre) && (args1 eq args)) tp
+ else copyTypeRef(tp, pre1, tr.coevolveSym(pre1), args1)
+ case _ =>
+ super.mapOver(tp)
+ }
+ }
+
+ // todo. move these into scala.reflect.api
+
+ /** A prototype for mapping a function over all possible types
+ */
+ abstract class TypeMap extends (Type => Type) {
+ def apply(tp: Type): Type
+
+ /** Mix in VariantTypeMap if you want variances to be significant.
+ */
+ def variance = 0
+
+ /** Map this function over given type */
+ def mapOver(tp: Type): Type = tp match {
+ case tr @ TypeRef(pre, sym, args) =>
+ val pre1 = this(pre)
+ val args1 = args mapConserve this
+ if ((pre1 eq pre) && (args1 eq args)) tp
+ else copyTypeRef(tp, pre1, tr.coevolveSym(pre1), args1)
+ case ThisType(_) => tp
+ case SingleType(pre, sym) =>
+ if (sym.isPackageClass) tp // short path
+ else {
+ val pre1 = this(pre)
+ if (pre1 eq pre) tp
+ else singleType(pre1, sym)
+ }
+ case MethodType(params, result) =>
+ val params1 = mapOver(params)
+ val result1 = this(result)
+ if ((params1 eq params) && (result1 eq result)) tp
+ else copyMethodType(tp, params1, result1.substSym(params, params1))
+ case PolyType(tparams, result) =>
+ val tparams1 = mapOver(tparams)
+ var result1 = this(result)
+ if ((tparams1 eq tparams) && (result1 eq result)) tp
+ else PolyType(tparams1, result1.substSym(tparams, tparams1))
+ case NullaryMethodType(result) =>
+ val result1 = this(result)
+ if (result1 eq result) tp
+ else NullaryMethodType(result1)
+ case ConstantType(_) => tp
+ case SuperType(thistp, supertp) =>
+ val thistp1 = this(thistp)
+ val supertp1 = this(supertp)
+ if ((thistp1 eq thistp) && (supertp1 eq supertp)) tp
+ else SuperType(thistp1, supertp1)
+ case TypeBounds(lo, hi) =>
+ val lo1 = this(lo)
+ val hi1 = this(hi)
+ if ((lo1 eq lo) && (hi1 eq hi)) tp
+ else TypeBounds(lo1, hi1)
+ case BoundedWildcardType(bounds) =>
+ val bounds1 = this(bounds)
+ if (bounds1 eq bounds) tp
+ else BoundedWildcardType(bounds1.asInstanceOf[TypeBounds])
+ case rtp @ RefinedType(parents, decls) =>
+ val parents1 = parents mapConserve this
+ val decls1 = mapOver(decls)
+ //if ((parents1 eq parents) && (decls1 eq decls)) tp
+ //else refinementOfClass(tp.typeSymbol, parents1, decls1)
+ copyRefinedType(rtp, parents1, decls1)
+ case ExistentialType(tparams, result) =>
+ val tparams1 = mapOver(tparams)
+ var result1 = this(result)
+ if ((tparams1 eq tparams) && (result1 eq result)) tp
+ else newExistentialType(tparams1, result1.substSym(tparams, tparams1))
+ case OverloadedType(pre, alts) =>
+ val pre1 = if (pre.isInstanceOf[ClassInfoType]) pre else this(pre)
+ if (pre1 eq pre) tp
+ else OverloadedType(pre1, alts)
+ case AntiPolyType(pre, args) =>
+ val pre1 = this(pre)
+ val args1 = args mapConserve (this)
+ if ((pre1 eq pre) && (args1 eq args)) tp
+ else AntiPolyType(pre1, args1)
+ case tv at TypeVar(_, constr) =>
+ if (constr.instValid) this(constr.inst)
+ else tv.applyArgs(mapOverArgs(tv.typeArgs, tv.params)) //@M !args.isEmpty implies !typeParams.isEmpty
+ case NotNullType(tp) =>
+ val tp1 = this(tp)
+ if (tp1 eq tp) tp
+ else NotNullType(tp1)
+ case AnnotatedType(annots, atp, selfsym) =>
+ val annots1 = mapOverAnnotations(annots)
+ val atp1 = this(atp)
+ if ((annots1 eq annots) && (atp1 eq atp)) tp
+ else if (annots1.isEmpty) atp1
+ else AnnotatedType(annots1, atp1, selfsym)
+ case DeBruijnIndex(shift, idx, args) =>
+ val args1 = args mapConserve this
+ if (args1 eq args) tp
+ else DeBruijnIndex(shift, idx, args1)
+/*
+ case ErrorType => tp
+ case WildcardType => tp
+ case NoType => tp
+ case NoPrefix => tp
+ case ErasedSingleType(sym) => tp
+*/
+ case _ =>
+ tp
+ // throw new Error("mapOver inapplicable for " + tp);
+ }
+
+ protected def mapOverArgs(args: List[Type], tparams: List[Symbol]): List[Type] =
+ args mapConserve this
+
+ /** Called by mapOver to determine whether the original symbols can
+ * be returned, or whether they must be cloned. Overridden in VariantTypeMap.
+ */
+ protected def noChangeToSymbols(origSyms: List[Symbol]) =
+ origSyms forall (sym => sym.info eq this(sym.info))
+
+ /** Map this function over given scope */
+ def mapOver(scope: Scope): Scope = {
+ val elems = scope.toList
+ val elems1 = mapOver(elems)
+ if (elems1 eq elems) scope
+ else newScopeWith(elems1: _*)
+ }
+
+ /** Map this function over given list of symbols */
+ def mapOver(origSyms: List[Symbol]): List[Symbol] = {
+ // fast path in case nothing changes due to map
+ if (noChangeToSymbols(origSyms)) origSyms
+ // map is not the identity --> do cloning properly
+ else cloneSymbolsAndModify(origSyms, TypeMap.this)
+ }
+
+ def mapOver(annot: AnnotationInfo): AnnotationInfo = {
+ val AnnotationInfo(atp, args, assocs) = annot
+ val atp1 = mapOver(atp)
+ val args1 = mapOverAnnotArgs(args)
+ // there is no need to rewrite assocs, as they are constants
+
+ if ((args eq args1) && (atp eq atp1)) annot
+ else if (args1.isEmpty && args.nonEmpty) UnmappableAnnotation // some annotation arg was unmappable
+ else AnnotationInfo(atp1, args1, assocs) setPos annot.pos
+ }
+
+ def mapOverAnnotations(annots: List[AnnotationInfo]): List[AnnotationInfo] = {
+ val annots1 = annots mapConserve mapOver
+ if (annots1 eq annots) annots
+ else annots1 filterNot (_ eq UnmappableAnnotation)
+ }
+
+ /** Map over a set of annotation arguments. If any
+ * of the arguments cannot be mapped, then return Nil. */
+ def mapOverAnnotArgs(args: List[Tree]): List[Tree] = {
+ val args1 = args mapConserve mapOver
+ if (args1 contains UnmappableTree) Nil
+ else args1
+ }
+
+ def mapOver(tree: Tree): Tree =
+ mapOver(tree, () => return UnmappableTree)
+
+ /** Map a tree that is part of an annotation argument.
+ * If the tree cannot be mapped, then invoke giveup().
+ * The default is to transform the tree with
+ * TypeMapTransformer.
+ */
+ def mapOver(tree: Tree, giveup: ()=>Nothing): Tree =
+ (new TypeMapTransformer).transform(tree)
+
+ /** This transformer leaves the tree alone except to remap
+ * its types. */
+ class TypeMapTransformer extends Transformer {
+ override def transform(tree: Tree) = {
+ val tree1 = super.transform(tree)
+ val tpe1 = TypeMap.this(tree1.tpe)
+ if ((tree eq tree1) && (tree.tpe eq tpe1))
+ tree
+ else
+ tree1.shallowDuplicate.setType(tpe1)
+ }
+ }
+ }
+
+ abstract class TypeTraverser extends TypeMap {
+ def traverse(tp: Type): Unit
+ def apply(tp: Type): Type = { traverse(tp); tp }
+ }
+
+ abstract class TypeTraverserWithResult[T] extends TypeTraverser {
+ def result: T
+ def clear(): Unit
+ }
+
+ abstract class TypeCollector[T](initial: T) extends TypeTraverser {
+ var result: T = _
+ def collect(tp: Type) = {
+ result = initial
+ traverse(tp)
+ result
+ }
+ }
+
+ /** A collector that tests for existential types appearing at given variance in a type
+ * @PP: Commenting out due to not being used anywhere.
+ */
+ // class ContainsVariantExistentialCollector(v: Int) extends TypeCollector(false) with VariantTypeMap {
+ // variance = v
+ //
+ // def traverse(tp: Type) = tp match {
+ // case ExistentialType(_, _) if (variance == v) => result = true
+ // case _ => mapOver(tp)
+ // }
+ // }
+ //
+ // val containsCovariantExistentialCollector = new ContainsVariantExistentialCollector(1)
+ // val containsContravariantExistentialCollector = new ContainsVariantExistentialCollector(-1)
+
+ def typeParamsToExistentials(clazz: Symbol, tparams: List[Symbol]): List[Symbol] = {
+ val eparams = mapWithIndex(tparams)((tparam, i) =>
+ clazz.newExistential(newTypeName("?"+i), clazz.pos) setInfo tparam.info.bounds)
+
+ eparams map (_ substInfo (tparams, eparams))
+ }
+ def typeParamsToExistentials(clazz: Symbol): List[Symbol] =
+ typeParamsToExistentials(clazz, clazz.typeParams)
+
+ // note: it's important to write the two tests in this order,
+ // as only typeParams forces the classfile to be read. See #400
+ private def isRawIfWithoutArgs(sym: Symbol) =
+ sym.isClass && sym.typeParams.nonEmpty && sym.isJavaDefined
+
+ def isRaw(sym: Symbol, args: List[Type]) =
+ !phase.erasedTypes && isRawIfWithoutArgs(sym) && args.isEmpty
+
+ /** Is type tp a ''raw type''? */
+ def isRawType(tp: Type) = tp match {
+ case TypeRef(_, sym, args) => isRaw(sym, args)
+ case _ => false
+ }
+
+ /** The raw to existential map converts a ''raw type'' to an existential type.
+ * It is necessary because we might have read a raw type of a
+ * parameterized Java class from a class file. At the time we read the type
+ * the corresponding class file might still not be read, so we do not
+ * know what the type parameters of the type are. Therefore
+ * the conversion of raw types to existential types might not have taken place
+ * in ClassFileparser.sigToType (where it is usually done).
+ */
+ def rawToExistential = new TypeMap {
+ private var expanded = immutable.Set[Symbol]()
+ def apply(tp: Type): Type = tp match {
+ case TypeRef(pre, sym, List()) if isRawIfWithoutArgs(sym) =>
+ if (expanded contains sym) AnyRefClass.tpe
+ else try {
+ expanded += sym
+ val eparams = mapOver(typeParamsToExistentials(sym))
+ existentialAbstraction(eparams, typeRef(apply(pre), sym, eparams map (_.tpe)))
+ } finally {
+ expanded -= sym
+ }
+ case _ =>
+ mapOver(tp)
+ }
+ }
+
+ /** Used by existentialAbstraction.
+ */
+ class ExistentialExtrapolation(tparams: List[Symbol]) extends VariantTypeMap {
+ private val occurCount = mutable.HashMap[Symbol, Int]()
+ private def countOccs(tp: Type) = {
+ tp foreach {
+ case TypeRef(_, sym, _) =>
+ if (tparams contains sym)
+ occurCount(sym) += 1
+ case _ => ()
+ }
+ }
+ def extrapolate(tpe: Type): Type = {
+ tparams foreach (t => occurCount(t) = 0)
+ countOccs(tpe)
+ for (tparam <- tparams)
+ countOccs(tparam.info)
+
+ apply(tpe)
+ }
+
+ def apply(tp: Type): Type = {
+ val tp1 = mapOver(tp)
+ if (variance == 0) tp1
+ else tp1 match {
+ case TypeRef(pre, sym, args) if tparams contains sym =>
+ val repl = if (variance == 1) dropSingletonType(tp1.bounds.hi) else tp1.bounds.lo
+ //println("eliminate "+sym+"/"+repl+"/"+occurCount(sym)+"/"+(tparams exists (repl.contains)))//DEBUG
+ if (!repl.typeSymbol.isBottomClass && occurCount(sym) == 1 && !(tparams exists (repl.contains)))
+ repl
+ else tp1
+ case _ =>
+ tp1
+ }
+ }
+ override def mapOver(tp: Type): Type = tp match {
+ case SingleType(pre, sym) =>
+ if (sym.isPackageClass) tp // short path
+ else {
+ val pre1 = this(pre)
+ if ((pre1 eq pre) || !pre1.isStable) tp
+ else singleType(pre1, sym)
+ }
+ case _ => super.mapOver(tp)
+ }
+
+ // Do not discard the types of existential ident's. The
+ // symbol of the Ident itself cannot be listed in the
+ // existential's parameters, so the resulting existential
+ // type would be ill-formed.
+ override def mapOver(tree: Tree) = tree match {
+ case Ident(_) if tree.tpe.isStable => tree
+ case _ => super.mapOver(tree)
+ }
+ }
+
+ def singletonBounds(hi: Type) = TypeBounds.upper(intersectionType(List(hi, SingletonClass.tpe)))
+
+ /** Might the given symbol be important when calculating the prefix
+ * of a type? When tp.asSeenFrom(pre, clazz) is called on `tp`,
+ * the result will be `tp` unchanged if `pre` is trivial and `clazz`
+ * is a symbol such that isPossiblePrefix(clazz) == false.
+ */
+ def isPossiblePrefix(clazz: Symbol) = clazz.isClass && !clazz.isPackageClass
+
+ private def skipPrefixOf(pre: Type, clazz: Symbol) = (
+ (pre eq NoType) || (pre eq NoPrefix) || !isPossiblePrefix(clazz)
+ )
+
+ /** A map to compute the asSeenFrom method */
+ class AsSeenFromMap(pre: Type, clazz: Symbol) extends TypeMap with KeepOnlyTypeConstraints {
+ var capturedSkolems: List[Symbol] = List()
+ var capturedParams: List[Symbol] = List()
+
+ override def mapOver(tree: Tree, giveup: ()=>Nothing): Tree = {
+ object annotationArgRewriter extends TypeMapTransformer {
+ private def canRewriteThis(sym: Symbol) = (
+ (sym isNonBottomSubClass clazz)
+ && (pre.widen.typeSymbol isNonBottomSubClass sym)
+ && (pre.isStable || giveup())
+ )
+ // what symbol should really be used?
+ private def newTermSym() = {
+ val p = pre.typeSymbol
+ p.owner.newValue(p.name.toTermName, p.pos) setInfo pre
+ }
+ /** Rewrite `This` trees in annotation argument trees */
+ override def transform(tree: Tree): Tree = super.transform(tree) match {
+ case This(_) if canRewriteThis(tree.symbol) => gen.mkAttributedQualifier(pre, newTermSym())
+ case tree => tree
+ }
+ }
+ annotationArgRewriter.transform(tree)
+ }
+
+ def stabilize(pre: Type, clazz: Symbol): Type = {
+ capturedParams find (_.owner == clazz) match {
+ case Some(qvar) => qvar.tpe
+ case _ =>
+ val qvar = clazz freshExistential nme.SINGLETON_SUFFIX setInfo singletonBounds(pre)
+ capturedParams ::= qvar
+ qvar.tpe
+ }
+ }
+
+ def apply(tp: Type): Type =
+ tp match {
+ case ThisType(sym) =>
+ def toPrefix(pre: Type, clazz: Symbol): Type =
+ if (skipPrefixOf(pre, clazz)) tp
+ else if ((sym isNonBottomSubClass clazz) &&
+ (pre.widen.typeSymbol isNonBottomSubClass sym)) {
+ val pre1 = pre match {
+ case SuperType(thistp, _) => thistp
+ case _ => pre
+ }
+ if (!(pre1.isStable ||
+ pre1.typeSymbol.isPackageClass ||
+ pre1.typeSymbol.isModuleClass && pre1.typeSymbol.isStatic)) {
+ stabilize(pre1, sym)
+ } else {
+ pre1
+ }
+ } else {
+ toPrefix(pre.baseType(clazz).prefix, clazz.owner)
+ }
+ toPrefix(pre, clazz)
+ case SingleType(pre, sym) =>
+ if (sym.isPackageClass) tp // short path
+ else {
+ val pre1 = this(pre)
+ if (pre1 eq pre) tp
+ else if (pre1.isStable) singleType(pre1, sym)
+ else pre1.memberType(sym).resultType //todo: this should be rolled into existential abstraction
+ }
+ // AM: Martin, is this description accurate?
+ // walk the owner chain of `clazz` (the original argument to asSeenFrom) until we find the type param's owner (while rewriting pre as we crawl up the owner chain)
+ // once we're at the owner, extract the information that pre encodes about the type param,
+ // by minimally subsuming pre to the type instance of the class that owns the type param,
+ // the type we're looking for is the type instance's type argument at the position corresponding to the type parameter
+ // optimisation: skip this type parameter if it's not owned by a class, as those params are not influenced by the prefix through which they are seen
+ // (concretely: type params of anonymous type functions, which currently can only arise from normalising type aliases, are owned by the type alias of which they are the eta-expansion)
+ // (skolems also aren't affected: they are ruled out by the isTypeParameter check)
+ case TypeRef(prefix, sym, args) if (sym.isTypeParameter && sym.owner.isClass) =>
+ def toInstance(pre: Type, clazz: Symbol): Type =
+ if (skipPrefixOf(pre, clazz)) mapOver(tp)
+ //@M! see test pos/tcpoly_return_overriding.scala why mapOver is necessary
+ else {
+ def throwError = abort("" + tp + sym.locationString + " cannot be instantiated from " + pre.widen)
+
+ val symclazz = sym.owner
+ if (symclazz == clazz && !pre.widen.isInstanceOf[TypeVar] && (pre.widen.typeSymbol isNonBottomSubClass symclazz)) {
+ // have to deconst because it may be a Class[T].
+ pre.baseType(symclazz).deconst match {
+ case TypeRef(_, basesym, baseargs) =>
+
+ def instParam(ps: List[Symbol], as: List[Type]): Type =
+ if (ps.isEmpty) {
+ if (forInteractive) {
+ val saved = settings.uniqid.value
+ try {
+ settings.uniqid.value = true
+ println("*** stale type parameter: " + tp + sym.locationString + " cannot be instantiated from " + pre.widen)
+ println("*** confused with params: " + sym + " in " + sym.owner + " not in " + ps + " of " + basesym)
+ println("*** stacktrace = ")
+ new Error().printStackTrace()
+ } finally settings.uniqid.value = saved
+ instParamRelaxed(basesym.typeParams, baseargs)
+ } else throwError
+ } else if (sym eq ps.head)
+ // @M! don't just replace the whole thing, might be followed by type application
+ appliedType(as.head, args mapConserve (this)) // @M: was as.head
+ else instParam(ps.tail, as.tail)
+
+ /** Relaxed version of instParams which matches on names not symbols.
+ * This is a last fallback in interactive mode because races in calls
+ * from the IDE to the compiler may in rare cases lead to symbols referring
+ * to type parameters that are no longer current.
+ */
+ def instParamRelaxed(ps: List[Symbol], as: List[Type]): Type =
+ if (ps.isEmpty) throwError
+ else if (sym.name == ps.head.name)
+ // @M! don't just replace the whole thing, might be followed by type application
+ appliedType(as.head, args mapConserve (this)) // @M: was as.head
+ else instParamRelaxed(ps.tail, as.tail)
+
+ //Console.println("instantiating " + sym + " from " + basesym + " with " + basesym.typeParams + " and " + baseargs+", pre = "+pre+", symclazz = "+symclazz);//DEBUG
+ if (sameLength(basesym.typeParams, baseargs))
+ instParam(basesym.typeParams, baseargs)
+ else
+ if (symclazz.tpe.parents exists typeIsErroneous)
+ ErrorType // don't be to overzealous with throwing exceptions, see #2641
+ else
+ throw new Error(
+ "something is wrong (wrong class file?): "+basesym+
+ " with type parameters "+
+ basesym.typeParams.map(_.name).mkString("[",",","]")+
+ " gets applied to arguments "+baseargs.mkString("[",",","]")+", phase = "+phase)
+ case ExistentialType(tparams, qtpe) =>
+ capturedSkolems = capturedSkolems union tparams
+ toInstance(qtpe, clazz)
+ case t =>
+ throwError
+ }
+ } else toInstance(pre.baseType(clazz).prefix, clazz.owner)
+ }
+ toInstance(pre, clazz)
+ case _ =>
+ mapOver(tp)
+ }
+ }
+
+ /** A base class to compute all substitutions */
+ abstract class SubstMap[T](from: List[Symbol], to: List[T]) extends TypeMap {
+ assert(sameLength(from, to), "Unsound substitution from "+ from +" to "+ to)
+
+ /** Are `sym` and `sym1` the same? Can be tuned by subclasses. */
+ protected def matches(sym: Symbol, sym1: Symbol): Boolean = sym eq sym1
+
+ /** Map target to type, can be tuned by subclasses */
+ protected def toType(fromtp: Type, tp: T): Type
+
+ protected def renameBoundSyms(tp: Type): Type = tp match {
+ case MethodType(ps, restp) =>
+ createFromClonedSymbols(ps, restp)((ps1, tp1) => copyMethodType(tp, ps1, renameBoundSyms(tp1)))
+ case PolyType(bs, restp) =>
+ createFromClonedSymbols(bs, restp)((ps1, tp1) => PolyType(ps1, renameBoundSyms(tp1)))
+ case ExistentialType(bs, restp) =>
+ createFromClonedSymbols(bs, restp)(newExistentialType)
+ case _ =>
+ tp
+ }
+
+ def apply(tp0: Type): Type = if (from.isEmpty) tp0 else {
+ @tailrec def subst(tp: Type, sym: Symbol, from: List[Symbol], to: List[T]): Type =
+ if (from.isEmpty) tp
+ // else if (to.isEmpty) error("Unexpected substitution on '%s': from = %s but to == Nil".format(tp, from))
+ else if (matches(from.head, sym)) toType(tp, to.head)
+ else subst(tp, sym, from.tail, to.tail)
+
+ val boundSyms = tp0.boundSyms
+ val tp1 = if (boundSyms.nonEmpty && (boundSyms exists from.contains)) renameBoundSyms(tp0) else tp0
+ val tp = mapOver(tp1)
+
+ tp match {
+ // @M
+ // 1) arguments must also be substituted (even when the "head" of the
+ // applied type has already been substituted)
+ // example: (subst RBound[RT] from [type RT,type RBound] to
+ // [type RT&,type RBound&]) = RBound&[RT&]
+ // 2) avoid loops (which occur because alpha-conversion is
+ // not performed properly imo)
+ // e.g. if in class Iterable[a] there is a new Iterable[(a,b)],
+ // we must replace the a in Iterable[a] by (a,b)
+ // (must not recurse --> loops)
+ // 3) replacing m by List in m[Int] should yield List[Int], not just List
+ case TypeRef(NoPrefix, sym, args) =>
+ appliedType(subst(tp, sym, from, to), args) // if args.isEmpty, appliedType is the identity
+ case SingleType(NoPrefix, sym) =>
+ subst(tp, sym, from, to)
+ case _ =>
+ tp
+ }
+ }
+ }
+
+ /** A map to implement the `substSym` method. */
+ class SubstSymMap(from: List[Symbol], to: List[Symbol]) extends SubstMap(from, to) {
+ def this(pairs: (Symbol, Symbol)*) = this(pairs.toList.map(_._1), pairs.toList.map(_._2))
+
+ protected def toType(fromtp: Type, sym: Symbol) = fromtp match {
+ case TypeRef(pre, _, args) => copyTypeRef(fromtp, pre, sym, args)
+ case SingleType(pre, _) => singleType(pre, sym)
+ }
+ override def apply(tp: Type): Type = if (from.isEmpty) tp else {
+ @tailrec def subst(sym: Symbol, from: List[Symbol], to: List[Symbol]): Symbol =
+ if (from.isEmpty) sym
+ // else if (to.isEmpty) error("Unexpected substitution on '%s': from = %s but to == Nil".format(sym, from))
+ else if (matches(from.head, sym)) to.head
+ else subst(sym, from.tail, to.tail)
+ tp match {
+ case TypeRef(pre, sym, args) if pre ne NoPrefix =>
+ val newSym = subst(sym, from, to)
+ // mapOver takes care of subst'ing in args
+ mapOver ( if (sym eq newSym) tp else copyTypeRef(tp, pre, newSym, args) )
+ // assert(newSym.typeParams.length == sym.typeParams.length, "typars mismatch in SubstSymMap: "+(sym, sym.typeParams, newSym, newSym.typeParams))
+ case SingleType(pre, sym) if pre ne NoPrefix =>
+ val newSym = subst(sym, from, to)
+ mapOver( if (sym eq newSym) tp else singleType(pre, newSym) )
+ case _ =>
+ super.apply(tp)
+ }
+ }
+
+ object mapTreeSymbols extends TypeMapTransformer {
+ val strictCopy = newStrictTreeCopier
+
+ def termMapsTo(sym: Symbol) = from indexOf sym match {
+ case -1 => None
+ case idx => Some(to(idx))
+ }
+
+ // if tree.symbol is mapped to another symbol, passes the new symbol into the
+ // constructor `trans` and sets the symbol and the type on the resulting tree.
+ def transformIfMapped(tree: Tree)(trans: Symbol => Tree) = termMapsTo(tree.symbol) match {
+ case Some(toSym) => trans(toSym) setSymbol toSym setType tree.tpe
+ case None => tree
+ }
+
+ // changes trees which refer to one of the mapped symbols. trees are copied before attributes are modified.
+ override def transform(tree: Tree) = {
+ // super.transform maps symbol references in the types of `tree`. it also copies trees where necessary.
+ super.transform(tree) match {
+ case id @ Ident(_) =>
+ transformIfMapped(id)(toSym =>
+ strictCopy.Ident(id, toSym.name))
+
+ case sel @ Select(qual, name) =>
+ transformIfMapped(sel)(toSym =>
+ strictCopy.Select(sel, qual, toSym.name))
+
+ case tree => tree
+ }
+ }
+ }
+ override def mapOver(tree: Tree, giveup: ()=>Nothing): Tree = {
+ mapTreeSymbols.transform(tree)
+ }
+ }
+
+ /** A map to implement the `subst` method. */
+ class SubstTypeMap(from: List[Symbol], to: List[Type])
+ extends SubstMap(from, to) {
+ protected def toType(fromtp: Type, tp: Type) = tp
+
+ override def mapOver(tree: Tree, giveup: () => Nothing): Tree = {
+ object trans extends TypeMapTransformer {
+ override def transform(tree: Tree) = tree match {
+ case Ident(name) =>
+ from indexOf tree.symbol match {
+ case -1 => super.transform(tree)
+ case idx =>
+ val totpe = to(idx)
+ if (totpe.isStable) tree.duplicate setType totpe
+ else giveup()
+ }
+ case _ =>
+ super.transform(tree)
+ }
+ }
+ trans.transform(tree)
+ }
+ }
+
+ /** A map to implement the `substThis` method. */
+ class SubstThisMap(from: Symbol, to: Type) extends TypeMap {
+ def apply(tp: Type): Type = tp match {
+ case ThisType(sym) if (sym == from) => to
+ case _ => mapOver(tp)
+ }
+ }
+
+ class SubstWildcardMap(from: List[Symbol]) extends TypeMap {
+ def apply(tp: Type): Type = try {
+ tp match {
+ case TypeRef(_, sym, _) if from contains sym =>
+ BoundedWildcardType(sym.info.bounds)
+ case _ =>
+ mapOver(tp)
+ }
+ } catch {
+ case ex: MalformedType =>
+ WildcardType
+ }
+ }
+
+// dependent method types
+ object IsDependentCollector extends TypeCollector(false) {
+ def traverse(tp: Type) {
+ if (tp.isImmediatelyDependent) result = true
+ else if (!result) mapOver(tp.dealias)
+ }
+ }
+
+ object ApproximateDependentMap extends TypeMap {
+ def apply(tp: Type): Type =
+ if (tp.isImmediatelyDependent) WildcardType
+ else mapOver(tp)
+ }
+
+ /** Note: This map is needed even for non-dependent method types, despite what the name might imply.
+ */
+ class InstantiateDependentMap(params: List[Symbol], actuals0: List[Type]) extends TypeMap with KeepOnlyTypeConstraints {
+ private val actuals = actuals0.toIndexedSeq
+ private val existentials = new Array[Symbol](actuals.size)
+ def existentialsNeeded: List[Symbol] = existentials.filter(_ ne null).toList
+
+ private object StableArg {
+ def unapply(param: Symbol) = Arg unapply param map actuals filter (tp =>
+ tp.isStable && (tp.typeSymbol != NothingClass)
+ )
+ }
+ private object Arg {
+ def unapply(param: Symbol) = Some(params indexOf param) filter (_ >= 0)
+ }
+
+ def apply(tp: Type): Type = mapOver(tp) match {
+ // unsound to replace args by unstable actual #3873
+ case SingleType(NoPrefix, StableArg(arg)) => arg
+ // (soundly) expand type alias selections on implicit arguments,
+ // see depmet_implicit_oopsla* test cases -- typically, `param.isImplicit`
+ case tp1 @ TypeRef(SingleType(NoPrefix, Arg(pid)), sym, targs) =>
+ val arg = actuals(pid)
+ val res = typeRef(arg, sym, targs)
+ if (res.typeSymbolDirect.isAliasType) res.dealias else tp1
+ // don't return the original `tp`, which may be different from `tp1`,
+ // due to dropping annotations
+ case tp1 => tp1
+ }
+
+ /* Return the type symbol for referencing a parameter inside the existential quantifier.
+ * (Only needed if the actual is unstable.)
+ */
+ private def existentialFor(pid: Int) = {
+ if (existentials(pid) eq null) {
+ val param = params(pid)
+ existentials(pid) = (
+ param.owner.newExistential(param.name.toTypeName append nme.SINGLETON_SUFFIX, param.pos, param.flags)
+ setInfo singletonBounds(actuals(pid))
+ )
+ }
+ existentials(pid)
+ }
+
+ //AM propagate more info to annotations -- this seems a bit ad-hoc... (based on code by spoon)
+ override def mapOver(arg: Tree, giveup: ()=>Nothing): Tree = {
+ // TODO: this should be simplified; in the stable case, one can
+ // probably just use an Ident to the tree.symbol.
+ //
+ // @PP: That leads to failure here, where stuff no longer has type
+ // 'String @Annot("stuff")' but 'String @Annot(x)'.
+ //
+ // def m(x: String): String @Annot(x) = x
+ // val stuff = m("stuff")
+ //
+ // (TODO cont.) Why an existential in the non-stable case?
+ //
+ // @PP: In the following:
+ //
+ // def m = { val x = "three" ; val y: String @Annot(x) = x; y }
+ //
+ // m is typed as 'String @Annot(x) forSome { val x: String }'.
+ //
+ // Both examples are from run/constrained-types.scala.
+ object treeTrans extends Transformer {
+ override def transform(tree: Tree): Tree = tree.symbol match {
+ case StableArg(actual) =>
+ gen.mkAttributedQualifier(actual, tree.symbol)
+ case Arg(pid) =>
+ val sym = existentialFor(pid)
+ Ident(sym) copyAttrs tree setType typeRef(NoPrefix, sym, Nil)
+ case _ =>
+ super.transform(tree)
+ }
+ }
+ treeTrans transform arg
+ }
+ }
+
+ object StripAnnotationsMap extends TypeMap {
+ def apply(tp: Type): Type = tp match {
+ case AnnotatedType(_, atp, _) =>
+ mapOver(atp)
+ case tp =>
+ mapOver(tp)
+ }
+ }
+
+ /** A map to convert every occurrence of a wildcard type to a fresh
+ * type variable */
+ object wildcardToTypeVarMap extends TypeMap {
+ def apply(tp: Type): Type = tp match {
+ case WildcardType =>
+ TypeVar(tp, new TypeConstraint)
+ case BoundedWildcardType(bounds) =>
+ TypeVar(tp, new TypeConstraint(bounds))
+ case _ =>
+ mapOver(tp)
+ }
+ }
+
+ /** A map to convert every occurrence of a type variable to a wildcard type. */
+ object typeVarToOriginMap extends TypeMap {
+ def apply(tp: Type): Type = tp match {
+ case TypeVar(origin, _) => origin
+ case _ => mapOver(tp)
+ }
+ }
+
+ /** A map to implement the `contains` method. */
+ class ContainsCollector(sym: Symbol) extends TypeCollector(false) {
+ def traverse(tp: Type) {
+ if (!result) {
+ tp.normalize match {
+ case TypeRef(_, sym1, _) if (sym == sym1) => result = true
+ case SingleType(_, sym1) if (sym == sym1) => result = true
+ case _ => mapOver(tp)
+ }
+ }
+ }
+
+ override def mapOver(arg: Tree) = {
+ for (t <- arg) {
+ traverse(t.tpe)
+ if (t.symbol == sym)
+ result = true
+ }
+ arg
+ }
+ }
+
+ /** A map to implement the `contains` method. */
+ class ContainsTypeCollector(t: Type) extends TypeCollector(false) {
+ def traverse(tp: Type) {
+ if (!result) {
+ if (tp eq t) result = true
+ else mapOver(tp)
+ }
+ }
+ override def mapOver(arg: Tree) = {
+ for (t <- arg)
+ traverse(t.tpe)
+
+ arg
+ }
+ }
+
+ /** A map to implement the `filter` method. */
+ class FilterTypeCollector(p: Type => Boolean) extends TypeCollector[List[Type]](Nil) {
+ def withFilter(q: Type => Boolean) = new FilterTypeCollector(tp => p(tp) && q(tp))
+
+ override def collect(tp: Type) = super.collect(tp).reverse
+
+ def traverse(tp: Type) {
+ if (p(tp)) result ::= tp
+ mapOver(tp)
+ }
+ }
+
+ /** A map to implement the `collect` method. */
+ class CollectTypeCollector[T](pf: PartialFunction[Type, T]) extends TypeCollector[List[T]](Nil) {
+ override def collect(tp: Type) = super.collect(tp).reverse
+
+ def traverse(tp: Type) {
+ if (pf.isDefinedAt(tp)) result ::= pf(tp)
+ mapOver(tp)
+ }
+ }
+
+ class ForEachTypeTraverser(f: Type => Unit) extends TypeTraverser {
+ def traverse(tp: Type) {
+ f(tp)
+ mapOver(tp)
+ }
+ }
+
+ /** A map to implement the `filter` method. */
+ class FindTypeCollector(p: Type => Boolean) extends TypeCollector[Option[Type]](None) {
+ def traverse(tp: Type) {
+ if (result.isEmpty) {
+ if (p(tp)) result = Some(tp)
+ mapOver(tp)
+ }
+ }
+ }
+
+ /** A map to implement the `contains` method. */
+ object ErroneousCollector extends TypeCollector(false) {
+ def traverse(tp: Type) {
+ if (!result) {
+ result = tp.isError
+ mapOver(tp)
+ }
+ }
+ }
+
+ /**
+ * A more persistent version of `Type#memberType` which does not require
+ * that the symbol is a direct member of the prefix.
+ *
+ * For instance:
+ *
+ * {{{
+ * class C[T] {
+ * sealed trait F[A]
+ * object X {
+ * object S1 extends F[T]
+ * }
+ * class S2 extends F[T]
+ * }
+ * object O extends C[Int] {
+ * def foo(f: F[Int]) = f match {...} // need to enumerate sealed subtypes of the scrutinee here.
+ * }
+ * class S3 extends O.F[String]
+ *
+ * nestedMemberType(<S1>, <O.type>, <C>) = O.X.S1.type
+ * nestedMemberType(<S2>, <O.type>, <C>) = O.S2.type
+ * nestedMemberType(<S3>, <O.type>, <C>) = S3.type
+ * }}}
+ *
+ * @param sym The symbol of the subtype
+ * @param pre The prefix from which the symbol is seen
+ * @param owner
+ */
+ def nestedMemberType(sym: Symbol, pre: Type, owner: Symbol): Type = {
+ def loop(tp: Type): Type =
+ if (tp.isTrivial) tp
+ else if (tp.prefix.typeSymbol isNonBottomSubClass owner) {
+ val widened = tp match {
+ case _: ConstantType => tp // Java enum constants: don't widen to the enum type!
+ case _ => tp.widen // C.X.type widens to C.this.X.type, otherwise `tp asSeenFrom (pre, C)` has no effect.
+ }
+ val memType = widened asSeenFrom (pre, tp.typeSymbol.owner)
+ if (tp eq widened) memType else memType.narrow
+ }
+ else loop(tp.prefix) memberType tp.typeSymbol
+
+ val result = loop(sym.tpeHK)
+ assert(sym.isTerm || result.typeSymbol == sym, s"($result).typeSymbol = ${result.typeSymbol}; expected ${sym}")
+ result
+ }
+
+ /** The most deeply nested owner that contains all the symbols
+ * of thistype or prefixless typerefs/singletype occurrences in given type.
+ */
+ private def commonOwner(t: Type): Symbol = commonOwner(t :: Nil)
+
+ /** The most deeply nested owner that contains all the symbols
+ * of thistype or prefixless typerefs/singletype occurrences in given list
+ * of types.
+ */
+ private def commonOwner(tps: List[Type]): Symbol = {
+ if (tps.isEmpty) NoSymbol
+ else {
+ commonOwnerMap.clear()
+ tps foreach (commonOwnerMap traverse _)
+ if (commonOwnerMap.result ne null) commonOwnerMap.result else NoSymbol
+ }
+ }
+
+ protected def commonOwnerMap: CommonOwnerMap = commonOwnerMapObj
+
+ protected class CommonOwnerMap extends TypeTraverserWithResult[Symbol] {
+ var result: Symbol = _
+
+ def clear() { result = null }
+
+ private def register(sym: Symbol) {
+ // First considered type is the trivial result.
+ if ((result eq null) || (sym eq NoSymbol))
+ result = sym
+ else
+ while ((result ne NoSymbol) && (result ne sym) && !(sym isNestedIn result))
+ result = result.owner
+ }
+ def traverse(tp: Type) = tp.normalize match {
+ case ThisType(sym) => register(sym)
+ case TypeRef(NoPrefix, sym, args) => register(sym.owner) ; args foreach traverse
+ case SingleType(NoPrefix, sym) => register(sym.owner)
+ case _ => mapOver(tp)
+ }
+ }
+
+ private lazy val commonOwnerMapObj = new CommonOwnerMap
+
+ class MissingAliasControl extends ControlThrowable
+ val missingAliasException = new MissingAliasControl
+ class MissingTypeControl extends ControlThrowable
+
+ object adaptToNewRunMap extends TypeMap {
+
+ private def adaptToNewRun(pre: Type, sym: Symbol): Symbol = {
+ if (phase.flatClasses || sym.isRootSymbol || (pre eq NoPrefix) || (pre eq NoType) || sym.isPackageClass)
+ sym
+ else if (sym.isModuleClass) {
+ val sourceModule1 = adaptToNewRun(pre, sym.sourceModule)
+
+ sourceModule1.moduleClass orElse sourceModule1.initialize.moduleClass orElse {
+ val msg = "Cannot adapt module class; sym = %s, sourceModule = %s, sourceModule.moduleClass = %s => sourceModule1 = %s, sourceModule1.moduleClass = %s"
+ debuglog(msg.format(sym, sym.sourceModule, sym.sourceModule.moduleClass, sourceModule1, sourceModule1.moduleClass))
+ sym
+ }
+ }
+ else {
+ var rebind0 = pre.findMember(sym.name, BRIDGE, 0, true) orElse {
+ if (sym.isAliasType) throw missingAliasException
+ debugwarn(pre+"."+sym+" does no longer exist, phase = "+phase)
+ throw new MissingTypeControl // For build manager and presentation compiler purposes
+ }
+ /** The two symbols have the same fully qualified name */
+ def corresponds(sym1: Symbol, sym2: Symbol): Boolean =
+ sym1.name == sym2.name && (sym1.isPackageClass || corresponds(sym1.owner, sym2.owner))
+ if (!corresponds(sym.owner, rebind0.owner)) {
+ debuglog("ADAPT1 pre = "+pre+", sym = "+sym.fullLocationString+", rebind = "+rebind0.fullLocationString)
+ val bcs = pre.baseClasses.dropWhile(bc => !corresponds(bc, sym.owner));
+ if (bcs.isEmpty)
+ assert(pre.typeSymbol.isRefinementClass, pre) // if pre is a refinementclass it might be a structural type => OK to leave it in.
+ else
+ rebind0 = pre.baseType(bcs.head).member(sym.name)
+ debuglog(
+ "ADAPT2 pre = " + pre +
+ ", bcs.head = " + bcs.head +
+ ", sym = " + sym.fullLocationString +
+ ", rebind = " + rebind0.fullLocationString
+ )
+ }
+ rebind0.suchThat(sym => sym.isType || sym.isStable) orElse {
+ debuglog("" + phase + " " +phase.flatClasses+sym.owner+sym.name+" "+sym.isType)
+ throw new MalformedType(pre, sym.nameString)
+ }
+ }
+ }
+ def apply(tp: Type): Type = tp match {
+ case ThisType(sym) =>
+ try {
+ val sym1 = adaptToNewRun(sym.owner.thisType, sym)
+ if (sym1 == sym) tp else ThisType(sym1)
+ } catch {
+ case ex: MissingTypeControl =>
+ tp
+ }
+ case SingleType(pre, sym) =>
+ if (sym.isPackage) tp
+ else {
+ val pre1 = this(pre)
+ try {
+ val sym1 = adaptToNewRun(pre1, sym)
+ if ((pre1 eq pre) && (sym1 eq sym)) tp
+ else singleType(pre1, sym1)
+ } catch {
+ case _: MissingTypeControl =>
+ tp
+ }
+ }
+ case TypeRef(pre, sym, args) =>
+ if (sym.isPackageClass) tp
+ else {
+ val pre1 = this(pre)
+ val args1 = args mapConserve (this)
+ try {
+ val sym1 = adaptToNewRun(pre1, sym)
+ if ((pre1 eq pre) && (sym1 eq sym) && (args1 eq args)/* && sym.isExternal*/) {
+ tp
+ } else if (sym1 == NoSymbol) {
+ debugwarn("adapt fail: "+pre+" "+pre1+" "+sym)
+ tp
+ } else {
+ copyTypeRef(tp, pre1, sym1, args1)
+ }
+ } catch {
+ case ex: MissingAliasControl =>
+ apply(tp.dealias)
+ case _: MissingTypeControl =>
+ tp
+ }
+ }
+ case MethodType(params, restp) =>
+ val restp1 = this(restp)
+ if (restp1 eq restp) tp
+ else copyMethodType(tp, params, restp1)
+ case NullaryMethodType(restp) =>
+ val restp1 = this(restp)
+ if (restp1 eq restp) tp
+ else NullaryMethodType(restp1)
+ case PolyType(tparams, restp) =>
+ val restp1 = this(restp)
+ if (restp1 eq restp) tp
+ else PolyType(tparams, restp1)
+
+ // Lukas: we need to check (together) whether we should also include parameter types
+ // of PolyType and MethodType in adaptToNewRun
+
+ case ClassInfoType(parents, decls, clazz) =>
+ if (clazz.isPackageClass) tp
+ else {
+ val parents1 = parents mapConserve (this)
+ if (parents1 eq parents) tp
+ else ClassInfoType(parents1, decls, clazz)
+ }
+ case RefinedType(parents, decls) =>
+ val parents1 = parents mapConserve (this)
+ if (parents1 eq parents) tp
+ else refinedType(parents1, tp.typeSymbol.owner, decls, tp.typeSymbol.owner.pos)
+ case SuperType(_, _) => mapOver(tp)
+ case TypeBounds(_, _) => mapOver(tp)
+ case TypeVar(_, _) => mapOver(tp)
+ case AnnotatedType(_,_,_) => mapOver(tp)
+ case NotNullType(_) => mapOver(tp)
+ case ExistentialType(_, _) => mapOver(tp)
+ case _ => tp
+ }
+ }
+
+ final case class SubTypePair(tp1: Type, tp2: Type) {
+ // SI-8146 we used to implement equality here in terms of pairwise =:=.
+ // But, this was inconsistent with hashCode, which was based on the
+ // Type#hashCode, based on the structure of types, not the meaning.
+ // Now, we use `Type#{equals,hashCode}` as the (consistent) basis for
+ // detecting cycles (aka keeping subtyping decidable.)
+ //
+ // I added tests to show that we detect the cycle: neg/t8146-no-finitary*
+
+ override def toString = tp1+" <:<? "+tp2
+ }
+
+// Helper Methods -------------------------------------------------------------
+
+ /** The maximum allowable depth of lubs or glbs over types `ts`.
+ */
+ def lubDepth(ts: List[Type]): Int = {
+ val td = typeDepth(ts)
+ val bd = baseTypeSeqDepth(ts)
+ lubDepthAdjust(td, td max bd)
+ }
+
+ /** The maximum allowable depth of lubs or glbs over given types,
+ * as a function over the maximum depth `td` of these types, and
+ * the maximum depth `bd` of all types in the base type sequences of these types.
+ */
+ private def lubDepthAdjust(td: Int, bd: Int): Int =
+ if (settings.XfullLubs.value) bd
+ else if (bd <= 3) bd
+ else if (bd <= 5) td max (bd - 1)
+ else if (bd <= 7) td max (bd - 2)
+ else (td - 1) max (bd - 3)
+
+ /** The maximum depth of type `tp` */
+ def typeDepth(tp: Type): Int = tp match {
+ case TypeRef(pre, sym, args) =>
+ typeDepth(pre) max typeDepth(args) + 1
+ case RefinedType(parents, decls) =>
+ typeDepth(parents) max typeDepth(decls.toList.map(_.info)) + 1
+ case TypeBounds(lo, hi) =>
+ typeDepth(lo) max typeDepth(hi)
+ case MethodType(paramtypes, result) =>
+ typeDepth(result)
+ case NullaryMethodType(result) =>
+ typeDepth(result)
+ case PolyType(tparams, result) =>
+ typeDepth(result) max typeDepth(tparams map (_.info)) + 1
+ case ExistentialType(tparams, result) =>
+ typeDepth(result) max typeDepth(tparams map (_.info)) + 1
+ case _ =>
+ 1
+ }
+
+ private def maxDepth(tps: List[Type], by: Type => Int): Int = {
+ //OPT replaced with tailrecursive function to save on #closures
+ // was:
+ // var d = 0
+ // for (tp <- tps) d = d max by(tp) //!!!OPT!!!
+ // d
+ def loop(tps: List[Type], acc: Int): Int = tps match {
+ case tp :: rest => loop(rest, acc max by(tp))
+ case _ => acc
+ }
+ loop(tps, 0)
+ }
+
+ private def typeDepth(tps: List[Type]): Int = maxDepth(tps, typeDepth)
+ private def baseTypeSeqDepth(tps: List[Type]): Int = maxDepth(tps, _.baseTypeSeqDepth)
+
+ /** Is intersection of given types populated? That is,
+ * for all types tp1, tp2 in intersection
+ * for all common base classes bc of tp1 and tp2
+ * let bt1, bt2 be the base types of tp1, tp2 relative to class bc
+ * Then:
+ * bt1 and bt2 have the same prefix, and
+ * any corresponding non-variant type arguments of bt1 and bt2 are the same
+ */
+ def isPopulated(tp1: Type, tp2: Type): Boolean = {
+ def isConsistent(tp1: Type, tp2: Type): Boolean = (tp1, tp2) match {
+ case (TypeRef(pre1, sym1, args1), TypeRef(pre2, sym2, args2)) =>
+ assert(sym1 == sym2)
+ pre1 =:= pre2 &&
+ forall3(args1, args2, sym1.typeParams) { (arg1, arg2, tparam) =>
+ //if (tparam.variance == 0 && !(arg1 =:= arg2)) Console.println("inconsistent: "+arg1+"!="+arg2)//DEBUG
+ if (tparam.variance == 0) arg1 =:= arg2
+ else if (arg1.isInstanceOf[TypeVar])
+ // if left-hand argument is a typevar, make it compatible with variance
+ // this is for more precise pattern matching
+ // todo: work this in the spec of this method
+ // also: think what happens if there are embedded typevars?
+ if (tparam.variance < 0) arg1 <:< arg2 else arg2 <:< arg1
+ else true
+ }
+ case (et: ExistentialType, _) =>
+ et.withTypeVars(isConsistent(_, tp2))
+ case (_, et: ExistentialType) =>
+ et.withTypeVars(isConsistent(tp1, _))
+ }
+
+ def check(tp1: Type, tp2: Type) =
+ if (tp1.typeSymbol.isClass && tp1.typeSymbol.hasFlag(FINAL))
+ tp1 <:< tp2 || isNumericValueClass(tp1.typeSymbol) && isNumericValueClass(tp2.typeSymbol)
+ else tp1.baseClasses forall (bc =>
+ tp2.baseTypeIndex(bc) < 0 || isConsistent(tp1.baseType(bc), tp2.baseType(bc)))
+
+ check(tp1, tp2)/* && check(tp2, tp1)*/ // need to investgate why this can't be made symmetric -- neg/gadts1 fails, and run/existials also.
+ }
+
+ /** Does a pattern of type `patType` need an outer test when executed against
+ * selector type `selType` in context defined by `currentOwner`?
+ */
+ def needsOuterTest(patType: Type, selType: Type, currentOwner: Symbol) = {
+ def createDummyClone(pre: Type): Type = {
+ val dummy = currentOwner.enclClass.newValue(nme.ANYname).setInfo(pre.widen)
+ singleType(ThisType(currentOwner.enclClass), dummy)
+ }
+ def maybeCreateDummyClone(pre: Type, sym: Symbol): Type = pre match {
+ case SingleType(pre1, sym1) =>
+ if (sym1.isModule && sym1.isStatic) {
+ NoType
+ } else if (sym1.isModule && sym.owner == sym1.moduleClass) {
+ val pre2 = maybeCreateDummyClone(pre1, sym1)
+ if (pre2 eq NoType) pre2
+ else singleType(pre2, sym1)
+ } else {
+ createDummyClone(pre)
+ }
+ case ThisType(clazz) =>
+ if (clazz.isModuleClass)
+ maybeCreateDummyClone(clazz.typeOfThis, sym)
+ else if (sym.owner == clazz && (sym.hasFlag(PRIVATE) || sym.privateWithin == clazz))
+ NoType
+ else
+ createDummyClone(pre)
+ case _ =>
+ NoType
+ }
+ // See the test for SI-7214 for motivation for dealias. Later `treeCondStrategy#outerTest`
+ // generates an outer test based on `patType.prefix` with automatically dealises.
+ patType.dealias match {
+ case TypeRef(pre, sym, args) =>
+ val pre1 = maybeCreateDummyClone(pre, sym)
+ (pre1 ne NoType) && isPopulated(copyTypeRef(patType, pre1, sym, args), selType)
+ case _ =>
+ false
+ }
+ }
+
+ private var subsametypeRecursions: Int = 0
+
+ private def isUnifiable(pre1: Type, pre2: Type) =
+ (beginsWithTypeVarOrIsRefined(pre1) || beginsWithTypeVarOrIsRefined(pre2)) && (pre1 =:= pre2)
+
+ /** Returns true iff we are past phase specialize,
+ * sym1 and sym2 are two existential skolems with equal names and bounds,
+ * and pre1 and pre2 are equal prefixes
+ */
+ private def isSameSpecializedSkolem(sym1: Symbol, sym2: Symbol, pre1: Type, pre2: Type) = {
+ sym1.isExistentialSkolem && sym2.isExistentialSkolem &&
+ sym1.name == sym2.name &&
+ phase.specialized &&
+ sym1.info =:= sym2.info &&
+ pre1 =:= pre2
+ }
+
+ private def isSubPre(pre1: Type, pre2: Type, sym: Symbol) =
+ if ((pre1 ne pre2) && (pre1 ne NoPrefix) && (pre2 ne NoPrefix) && pre1 <:< pre2) {
+ if (settings.debug.value) println(s"new isSubPre $sym: $pre1 <:< $pre2")
+ true
+ } else
+ false
+
+ private def equalSymsAndPrefixes(sym1: Symbol, pre1: Type, sym2: Symbol, pre2: Type): Boolean =
+ if (sym1 == sym2) sym1.hasPackageFlag || sym1.owner.hasPackageFlag || phase.erasedTypes || pre1 =:= pre2
+ else (sym1.name == sym2.name) && isUnifiable(pre1, pre2)
+
+ /** Do `tp1` and `tp2` denote equivalent types? */
+ def isSameType(tp1: Type, tp2: Type): Boolean = try {
+ if (Statistics.canEnable) Statistics.incCounter(sametypeCount)
+ subsametypeRecursions += 1
+ //OPT cutdown on Function0 allocation
+ //was:
+// undoLog undoUnless {
+// isSameType1(tp1, tp2)
+// }
+
+ undoLog.lock()
+ try {
+ val before = undoLog.log
+ var result = false
+
+ try result = {
+ isSameType1(tp1, tp2)
+ } finally if (!result) undoLog.undoTo(before)
+ result
+ } finally undoLog.unlock()
+ } finally {
+ subsametypeRecursions -= 1
+ // XXX AM TODO: figure out when it is safe and needed to clear the log -- the commented approach below is too eager (it breaks #3281, #3866)
+ // it doesn't help to keep separate recursion counts for the three methods that now share it
+ // if (subsametypeRecursions == 0) undoLog.clear()
+ }
+
+ def isDifferentType(tp1: Type, tp2: Type): Boolean = try {
+ subsametypeRecursions += 1
+ undoLog undo { // undo type constraints that arise from operations in this block
+ !isSameType1(tp1, tp2)
+ }
+ } finally {
+ subsametypeRecursions -= 1
+ // XXX AM TODO: figure out when it is safe and needed to clear the log -- the commented approach below is too eager (it breaks #3281, #3866)
+ // it doesn't help to keep separate recursion counts for the three methods that now share it
+ // if (subsametypeRecursions == 0) undoLog.clear()
+ }
+
+ def isDifferentTypeConstructor(tp1: Type, tp2: Type): Boolean = tp1 match {
+ case TypeRef(pre1, sym1, _) =>
+ tp2 match {
+ case TypeRef(pre2, sym2, _) => sym1 != sym2 || isDifferentType(pre1, pre2)
+ case _ => true
+ }
+ case _ => true
+ }
+
+ def normalizePlus(tp: Type) =
+ if (isRawType(tp)) rawToExistential(tp)
+ else tp.normalize
+
+ /*
+ todo: change to:
+ def normalizePlus(tp: Type) = tp match {
+ case TypeRef(pre, sym, List()) =>
+ if (!sym.isInitialized) sym.rawInfo.load(sym)
+ if (sym.isJavaDefined && !sym.typeParams.isEmpty) rawToExistential(tp)
+ else tp.normalize
+ case _ => tp.normalize
+ }
+ */
+/*
+ private def isSameType0(tp1: Type, tp2: Type): Boolean = {
+ if (tp1 eq tp2) return true
+ ((tp1, tp2) match {
+ case (ErrorType, _) => true
+ case (WildcardType, _) => true
+ case (_, ErrorType) => true
+ case (_, WildcardType) => true
+
+ case (NoType, _) => false
+ case (NoPrefix, _) => tp2.typeSymbol.isPackageClass
+ case (_, NoType) => false
+ case (_, NoPrefix) => tp1.typeSymbol.isPackageClass
+
+ case (ThisType(sym1), ThisType(sym2))
+ if (sym1 == sym2) =>
+ true
+ case (SingleType(pre1, sym1), SingleType(pre2, sym2))
+ if (equalSymsAndPrefixes(sym1, pre1, sym2, pre2)) =>
+ true
+/*
+ case (SingleType(pre1, sym1), ThisType(sym2))
+ if (sym1.isModule &&
+ sym1.moduleClass == sym2 &&
+ pre1 =:= sym2.owner.thisType) =>
+ true
+ case (ThisType(sym1), SingleType(pre2, sym2))
+ if (sym2.isModule &&
+ sym2.moduleClass == sym1 &&
+ pre2 =:= sym1.owner.thisType) =>
+ true
+*/
+ case (ConstantType(value1), ConstantType(value2)) =>
+ value1 == value2
+ case (TypeRef(pre1, sym1, args1), TypeRef(pre2, sym2, args2)) =>
+ equalSymsAndPrefixes(sym1, pre1, sym2, pre2) &&
+ ((tp1.isHigherKinded && tp2.isHigherKinded && tp1.normalize =:= tp2.normalize) ||
+ isSameTypes(args1, args2))
+ // @M! normalize reduces higher-kinded case to PolyType's
+ case (RefinedType(parents1, ref1), RefinedType(parents2, ref2)) =>
+ def isSubScope(s1: Scope, s2: Scope): Boolean = s2.toList.forall {
+ sym2 =>
+ var e1 = s1.lookupEntry(sym2.name)
+ (e1 ne null) && {
+ val substSym = sym2.info.substThis(sym2.owner, e1.sym.owner.thisType)
+ var isEqual = false
+ while (!isEqual && (e1 ne null)) {
+ isEqual = e1.sym.info =:= substSym
+ e1 = s1.lookupNextEntry(e1)
+ }
+ isEqual
+ }
+ }
+ //Console.println("is same? " + tp1 + " " + tp2 + " " + tp1.typeSymbol.owner + " " + tp2.typeSymbol.owner)//DEBUG
+ isSameTypes(parents1, parents2) && isSubScope(ref1, ref2) && isSubScope(ref2, ref1)
+ case (MethodType(params1, res1), MethodType(params2, res2)) =>
+ // new dependent types: probably fix this, use substSym as done for PolyType
+ (isSameTypes(tp1.paramTypes, tp2.paramTypes) &&
+ res1 =:= res2 &&
+ tp1.isImplicit == tp2.isImplicit)
+ case (PolyType(tparams1, res1), PolyType(tparams2, res2)) =>
+ // assert((tparams1 map (_.typeParams.length)) == (tparams2 map (_.typeParams.length)))
+ (tparams1.length == tparams2.length) && (tparams1 corresponds tparams2)(_.info =:= _.info.substSym(tparams2, tparams1)) && // @M looks like it might suffer from same problem as #2210
+ res1 =:= res2.substSym(tparams2, tparams1)
+ case (ExistentialType(tparams1, res1), ExistentialType(tparams2, res2)) =>
+ (tparams1.length == tparams2.length) && (tparams1 corresponds tparams2)(_.info =:= _.info.substSym(tparams2, tparams1)) && // @M looks like it might suffer from same problem as #2210
+ res1 =:= res2.substSym(tparams2, tparams1)
+ case (TypeBounds(lo1, hi1), TypeBounds(lo2, hi2)) =>
+ lo1 =:= lo2 && hi1 =:= hi2
+ case (BoundedWildcardType(bounds), _) =>
+ bounds containsType tp2
+ case (_, BoundedWildcardType(bounds)) =>
+ bounds containsType tp1
+ case (tv @ TypeVar(_,_), tp) =>
+ tv.registerTypeEquality(tp, true)
+ case (tp, tv @ TypeVar(_,_)) =>
+ tv.registerTypeEquality(tp, false)
+ case (AnnotatedType(_,_,_), _) =>
+ annotationsConform(tp1, tp2) && annotationsConform(tp2, tp1) && tp1.withoutAnnotations =:= tp2.withoutAnnotations
+ case (_, AnnotatedType(_,_,_)) =>
+ annotationsConform(tp1, tp2) && annotationsConform(tp2, tp1) && tp1.withoutAnnotations =:= tp2.withoutAnnotations
+ case (_: SingletonType, _: SingletonType) =>
+ var origin1 = tp1
+ while (origin1.underlying.isInstanceOf[SingletonType]) {
+ assert(origin1 ne origin1.underlying, origin1)
+ origin1 = origin1.underlying
+ }
+ var origin2 = tp2
+ while (origin2.underlying.isInstanceOf[SingletonType]) {
+ assert(origin2 ne origin2.underlying, origin2)
+ origin2 = origin2.underlying
+ }
+ ((origin1 ne tp1) || (origin2 ne tp2)) && (origin1 =:= origin2)
+ case _ =>
+ false
+ }) || {
+ val tp1n = normalizePlus(tp1)
+ val tp2n = normalizePlus(tp2)
+ ((tp1n ne tp1) || (tp2n ne tp2)) && isSameType(tp1n, tp2n)
+ }
+ }
+*/
+ private def isSameType1(tp1: Type, tp2: Type): Boolean = {
+ if ((tp1 eq tp2) ||
+ (tp1 eq ErrorType) || (tp1 eq WildcardType) ||
+ (tp2 eq ErrorType) || (tp2 eq WildcardType))
+ true
+ else if ((tp1 eq NoType) || (tp2 eq NoType))
+ false
+ else if (tp1 eq NoPrefix) // !! I do not see how this would be warranted by the spec
+ tp2.typeSymbol.isPackageClass
+ else if (tp2 eq NoPrefix) // !! I do not see how this would be warranted by the spec
+ tp1.typeSymbol.isPackageClass
+ else {
+ isSameType2(tp1, tp2) || {
+ val tp1n = normalizePlus(tp1)
+ val tp2n = normalizePlus(tp2)
+ ((tp1n ne tp1) || (tp2n ne tp2)) && isSameType(tp1n, tp2n)
+ }
+ }
+ }
+
+ def isSameType2(tp1: Type, tp2: Type): Boolean = {
+ tp1 match {
+ case tr1: TypeRef =>
+ tp2 match {
+ case tr2: TypeRef =>
+ return (equalSymsAndPrefixes(tr1.sym, tr1.pre, tr2.sym, tr2.pre) &&
+ ((tp1.isHigherKinded && tp2.isHigherKinded && tp1.normalize =:= tp2.normalize) ||
+ isSameTypes(tr1.args, tr2.args))) ||
+ ((tr1.pre, tr2.pre) match {
+ case (tv @ TypeVar(_,_), _) => tv.registerTypeSelection(tr1.sym, tr2)
+ case (_, tv @ TypeVar(_,_)) => tv.registerTypeSelection(tr2.sym, tr1)
+ case _ => false
+ })
+ case _: SingleType =>
+ return isSameType2(tp2, tp1) // put singleton type on the left, caught below
+ case _ =>
+ }
+ case tt1: ThisType =>
+ tp2 match {
+ case tt2: ThisType =>
+ if (tt1.sym == tt2.sym) return true
+ case _ =>
+ }
+ case st1: SingleType =>
+ tp2 match {
+ case st2: SingleType =>
+ if (equalSymsAndPrefixes(st1.sym, st1.pre, st2.sym, st2.pre)) return true
+ case TypeRef(pre2, sym2, Nil) =>
+ if (sym2.isModuleClass && equalSymsAndPrefixes(st1.sym, st1.pre, sym2.sourceModule, pre2)) return true
+ case _ =>
+ }
+ case ct1: ConstantType =>
+ tp2 match {
+ case ct2: ConstantType =>
+ return (ct1.value == ct2.value)
+ case _ =>
+ }
+ case rt1: RefinedType =>
+ tp2 match {
+ case rt2: RefinedType => //
+ def isSubScope(s1: Scope, s2: Scope): Boolean = s2.toList.forall {
+ sym2 =>
+ var e1 = s1.lookupEntry(sym2.name)
+ (e1 ne null) && {
+ val substSym = sym2.info.substThis(sym2.owner, e1.sym.owner)
+ var isEqual = false
+ while (!isEqual && (e1 ne null)) {
+ isEqual = e1.sym.info =:= substSym
+ e1 = s1.lookupNextEntry(e1)
+ }
+ isEqual
+ }
+ }
+ //Console.println("is same? " + tp1 + " " + tp2 + " " + tp1.typeSymbol.owner + " " + tp2.typeSymbol.owner)//DEBUG
+ return isSameTypes(rt1.parents, rt2.parents) && {
+ val decls1 = rt1.decls
+ val decls2 = rt2.decls
+ isSubScope(decls1, decls2) && isSubScope(decls2, decls1)
+ }
+ case _ =>
+ }
+ case mt1: MethodType =>
+ tp2 match {
+ case mt2: MethodType =>
+ return isSameTypes(mt1.paramTypes, mt2.paramTypes) &&
+ mt1.resultType =:= mt2.resultType.substSym(mt2.params, mt1.params) &&
+ mt1.isImplicit == mt2.isImplicit
+ // note: no case NullaryMethodType(restpe) => return mt1.params.isEmpty && mt1.resultType =:= restpe
+ case _ =>
+ }
+ case NullaryMethodType(restpe1) =>
+ tp2 match {
+ // note: no case mt2: MethodType => return mt2.params.isEmpty && restpe =:= mt2.resultType
+ case NullaryMethodType(restpe2) =>
+ return restpe1 =:= restpe2
+ case _ =>
+ }
+ case PolyType(tparams1, res1) =>
+ tp2 match {
+ case PolyType(tparams2, res2) =>
+// assert((tparams1 map (_.typeParams.length)) == (tparams2 map (_.typeParams.length)))
+ // @M looks like it might suffer from same problem as #2210
+ return (
+ (sameLength(tparams1, tparams2)) && // corresponds does not check length of two sequences before checking the predicate
+ (tparams1 corresponds tparams2)(_.info =:= _.info.substSym(tparams2, tparams1)) &&
+ res1 =:= res2.substSym(tparams2, tparams1)
+ )
+ case _ =>
+ }
+ case ExistentialType(tparams1, res1) =>
+ tp2 match {
+ case ExistentialType(tparams2, res2) =>
+ // @M looks like it might suffer from same problem as #2210
+ return (
+ // corresponds does not check length of two sequences before checking the predicate -- faster & needed to avoid crasher in #2956
+ sameLength(tparams1, tparams2) &&
+ (tparams1 corresponds tparams2)(_.info =:= _.info.substSym(tparams2, tparams1)) &&
+ res1 =:= res2.substSym(tparams2, tparams1)
+ )
+ case _ =>
+ }
+ case TypeBounds(lo1, hi1) =>
+ tp2 match {
+ case TypeBounds(lo2, hi2) =>
+ return lo1 =:= lo2 && hi1 =:= hi2
+ case _ =>
+ }
+ case BoundedWildcardType(bounds) =>
+ return bounds containsType tp2
+ case _ =>
+ }
+ tp2 match {
+ case BoundedWildcardType(bounds) =>
+ return bounds containsType tp1
+ case _ =>
+ }
+ tp1 match {
+ case tv @ TypeVar(_,_) =>
+ return tv.registerTypeEquality(tp2, true)
+ case _ =>
+ }
+ tp2 match {
+ case tv @ TypeVar(_,_) =>
+ return tv.registerTypeEquality(tp1, false)
+ case _ =>
+ }
+ tp1 match {
+ case _: AnnotatedType =>
+ return annotationsConform(tp1, tp2) && annotationsConform(tp2, tp1) && tp1.withoutAnnotations =:= tp2.withoutAnnotations
+ case _ =>
+ }
+ tp2 match {
+ case _: AnnotatedType =>
+ return annotationsConform(tp1, tp2) && annotationsConform(tp2, tp1) && tp1.withoutAnnotations =:= tp2.withoutAnnotations
+ case _ =>
+ }
+ tp1 match {
+ case _: SingletonType =>
+ tp2 match {
+ case _: SingletonType =>
+ def chaseDealiasedUnderlying(tp: Type): Type = {
+ var origin = tp
+ var next = origin.underlying.dealias
+ while (next.isInstanceOf[SingletonType]) {
+ assert(origin ne next, origin)
+ origin = next
+ next = origin.underlying.dealias
+ }
+ origin
+ }
+ val origin1 = chaseDealiasedUnderlying(tp1)
+ val origin2 = chaseDealiasedUnderlying(tp2)
+ ((origin1 ne tp1) || (origin2 ne tp2)) && (origin1 =:= origin2)
+ case _ =>
+ false
+ }
+ case _ =>
+ false
+ }
+ }
+
+ /** Are `tps1` and `tps2` lists of pairwise equivalent types? */
+ def isSameTypes(tps1: List[Type], tps2: List[Type]): Boolean = (tps1 corresponds tps2)(_ =:= _)
+
+ /** True if two lists have the same length. Since calling length on linear sequences
+ * is O(n), it is an inadvisable way to test length equality.
+ */
+ final def sameLength(xs1: List[_], xs2: List[_]) = compareLengths(xs1, xs2) == 0
+ @tailrec final def compareLengths(xs1: List[_], xs2: List[_]): Int =
+ if (xs1.isEmpty) { if (xs2.isEmpty) 0 else -1 }
+ else if (xs2.isEmpty) 1
+ else compareLengths(xs1.tail, xs2.tail)
+
+ /** Again avoiding calling length, but the lengthCompare interface is clunky.
+ */
+ final def hasLength(xs: List[_], len: Int) = xs.lengthCompare(len) == 0
+
+ private val pendingSubTypes = new mutable.HashSet[SubTypePair]
+ private var basetypeRecursions: Int = 0
+ private val pendingBaseTypes = new mutable.HashSet[Type]
+
+ def isSubType(tp1: Type, tp2: Type): Boolean = isSubType(tp1, tp2, AnyDepth)
+
+ def isSubType(tp1: Type, tp2: Type, depth: Int): Boolean = try {
+ subsametypeRecursions += 1
+
+ //OPT cutdown on Function0 allocation
+ //was:
+// undoLog undoUnless { // if subtype test fails, it should not affect constraints on typevars
+// if (subsametypeRecursions >= LogPendingSubTypesThreshold) {
+// val p = new SubTypePair(tp1, tp2)
+// if (pendingSubTypes(p))
+// false
+// else
+// try {
+// pendingSubTypes += p
+// isSubType2(tp1, tp2, depth)
+// } finally {
+// pendingSubTypes -= p
+// }
+// } else {
+// isSubType2(tp1, tp2, depth)
+// }
+// }
+
+ undoLog.lock()
+ try {
+ val before = undoLog.log
+ var result = false
+
+ try result = { // if subtype test fails, it should not affect constraints on typevars
+ if (subsametypeRecursions >= LogPendingSubTypesThreshold) {
+ val p = new SubTypePair(tp1, tp2)
+ if (pendingSubTypes(p))
+ false // see neg/t8146-no-finitary*
+ else
+ try {
+ pendingSubTypes += p
+ isSubType2(tp1, tp2, depth)
+ } finally {
+ pendingSubTypes -= p
+ }
+ } else {
+ isSubType2(tp1, tp2, depth)
+ }
+ } finally if (!result) undoLog.undoTo(before)
+
+ result
+ } finally undoLog.unlock()
+ } finally {
+ subsametypeRecursions -= 1
+ // XXX AM TODO: figure out when it is safe and needed to clear the log -- the commented approach below is too eager (it breaks #3281, #3866)
+ // it doesn't help to keep separate recursion counts for the three methods that now share it
+ // if (subsametypeRecursions == 0) undoLog.clear()
+ }
+
+ /** Does this type have a prefix that begins with a type variable,
+ * or is it a refinement type? For type prefixes that fulfil this condition,
+ * type selections with the same name of equal (as determined by `=:=`) prefixes are
+ * considered equal in regard to `=:=`.
+ */
+ def beginsWithTypeVarOrIsRefined(tp: Type): Boolean = tp match {
+ case SingleType(pre, sym) =>
+ !(sym hasFlag PACKAGE) && beginsWithTypeVarOrIsRefined(pre)
+ case tv at TypeVar(_, constr) =>
+ !tv.instValid || beginsWithTypeVarOrIsRefined(constr.inst)
+ case RefinedType(_, _) =>
+ true
+ case _ =>
+ false
+ }
+
+ @deprecated("The compiler doesn't use this so you shouldn't either - it will be removed", "2.10.0")
+ def instTypeVar(tp: Type): Type = tp match {
+ case TypeRef(pre, sym, args) =>
+ copyTypeRef(tp, instTypeVar(pre), sym, args)
+ case SingleType(pre, sym) =>
+ singleType(instTypeVar(pre), sym)
+ case TypeVar(_, constr) =>
+ instTypeVar(constr.inst)
+ case _ =>
+ tp
+ }
+
+ def isErrorOrWildcard(tp: Type) = (tp eq ErrorType) || (tp eq WildcardType)
+
+ def isSingleType(tp: Type) = tp match {
+ case ThisType(_) | SuperType(_, _) | SingleType(_, _) => true
+ case _ => false
+ }
+
+ def isConstantType(tp: Type) = tp match {
+ case ConstantType(_) => true
+ case _ => false
+ }
+
+ /** This is defined and named as it is because the goal is to exclude source
+ * level types which are not value types (e.g. MethodType) without excluding
+ * necessary internal types such as WildcardType. There are also non-value
+ * types which can be used as type arguments (e.g. type constructors.)
+ */
+ def isUseableAsTypeArg(tp: Type) = (
+ isInternalTypeUsedAsTypeArg(tp) // the subset of internal types which can be type args
+ || isHKTypeRef(tp) // not a value type, but ok as a type arg
+ || isValueElseNonValue(tp) // otherwise only value types
+ )
+
+ private def isHKTypeRef(tp: Type) = tp match {
+ case TypeRef(_, sym, Nil) => tp.isHigherKinded
+ case _ => false
+ }
+ @tailrec final def isUseableAsTypeArgs(tps: List[Type]): Boolean = tps match {
+ case Nil => true
+ case x :: xs => isUseableAsTypeArg(x) && isUseableAsTypeArgs(xs)
+ }
+
+ /** The "third way", types which are neither value types nor
+ * non-value types as defined in the SLS, further divided into
+ * types which are used internally in type applications and
+ * types which are not.
+ */
+ private def isInternalTypeNotUsedAsTypeArg(tp: Type): Boolean = tp match {
+ case AntiPolyType(pre, targs) => true
+ case ClassInfoType(parents, defs, clazz) => true
+ case DeBruijnIndex(level, index, args) => true
+ case ErasedValueType(tref) => true
+ case NoPrefix => true
+ case NoType => true
+ case SuperType(thistpe, supertpe) => true
+ case TypeBounds(lo, hi) => true
+ case _ => false
+ }
+ private def isInternalTypeUsedAsTypeArg(tp: Type): Boolean = tp match {
+ case WildcardType => true
+ case BoundedWildcardType(_) => true
+ case ErrorType => true
+ case _: TypeVar => true
+ case _ => false
+ }
+ private def isAlwaysValueType(tp: Type) = tp match {
+ case RefinedType(_, _) => true
+ case ExistentialType(_, _) => true
+ case ConstantType(_) => true
+ case _ => false
+ }
+ private def isAlwaysNonValueType(tp: Type) = tp match {
+ case OverloadedType(_, _) => true
+ case NullaryMethodType(_) => true
+ case MethodType(_, _) => true
+ case PolyType(_, MethodType(_, _)) => true
+ case _ => false
+ }
+ /** Should be called only with types for which a clear true/false answer
+ * can be given: true == value type, false == non-value type. Otherwise,
+ * an exception is thrown.
+ */
+ private def isValueElseNonValue(tp: Type): Boolean = tp match {
+ case tp if isAlwaysValueType(tp) => true
+ case tp if isAlwaysNonValueType(tp) => false
+ case AnnotatedType(_, underlying, _) => isValueElseNonValue(underlying)
+ case SingleType(_, sym) => sym.isValue // excludes packages and statics
+ case TypeRef(_, _, _) if tp.isHigherKinded => false // excludes type constructors
+ case ThisType(sym) => !sym.isPackageClass // excludes packages
+ case TypeRef(_, sym, _) => !sym.isPackageClass // excludes packages
+ case PolyType(_, _) => true // poly-methods excluded earlier
+ case tp => sys.error("isValueElseNonValue called with third-way type " + tp)
+ }
+
+ /** SLS 3.2, Value Types
+ * Is the given type definitely a value type? A true result means
+ * it verifiably is, but a false result does not mean it is not,
+ * only that it cannot be assured. To avoid false positives, this
+ * defaults to false, but since Type is not sealed, one should take
+ * a false answer with a grain of salt. This method may be primarily
+ * useful as documentation; it is likely that !isNonValueType(tp)
+ * will serve better than isValueType(tp).
+ */
+ def isValueType(tp: Type) = isValueElseNonValue(tp)
+
+ /** SLS 3.3, Non-Value Types
+ * Is the given type definitely a non-value type, as defined in SLS 3.3?
+ * The specification-enumerated non-value types are method types, polymorphic
+ * method types, and type constructors. Supplements to the specified set of
+ * non-value types include: types which wrap non-value symbols (packages
+ * abd statics), overloaded types. Varargs and by-name types T* and (=>T) are
+ * not designated non-value types because there is code which depends on using
+ * them as type arguments, but their precise status is unclear.
+ */
+ def isNonValueType(tp: Type) = !isValueElseNonValue(tp)
+
+ def isNonRefinementClassType(tpe: Type) = tpe match {
+ case SingleType(_, sym) => sym.isModuleClass
+ case TypeRef(_, sym, _) => sym.isClass && !sym.isRefinementClass
+ case ErrorType => true
+ case _ => false
+ }
+
+ // @assume tp1.isHigherKinded || tp2.isHigherKinded
+ def isHKSubType0(tp1: Type, tp2: Type, depth: Int): Boolean = (
+ tp1.typeSymbol == NothingClass
+ ||
+ tp2.typeSymbol == AnyClass // @M Any and Nothing are super-type resp. subtype of every well-kinded type
+ || // @M! normalize reduces higher-kinded case to PolyType's
+ ((tp1.normalize.withoutAnnotations , tp2.normalize.withoutAnnotations) match {
+ case (PolyType(tparams1, res1), PolyType(tparams2, res2)) => // @assume tp1.isHigherKinded && tp2.isHigherKinded (as they were both normalized to PolyType)
+ sameLength(tparams1, tparams2) && {
+ if (tparams1.head.owner.isMethod) { // fast-path: polymorphic method type -- type params cannot be captured
+ (tparams1 corresponds tparams2)((p1, p2) => p2.info.substSym(tparams2, tparams1) <:< p1.info) &&
+ res1 <:< res2.substSym(tparams2, tparams1)
+ } else { // normalized higher-kinded type
+ //@M for an example of why we need to generate fresh symbols, see neg/tcpoly_ticket2101.scala
+ val tpsFresh = cloneSymbols(tparams1)
+
+ (tparams1 corresponds tparams2)((p1, p2) =>
+ p2.info.substSym(tparams2, tpsFresh) <:< p1.info.substSym(tparams1, tpsFresh)) &&
+ res1.substSym(tparams1, tpsFresh) <:< res2.substSym(tparams2, tpsFresh)
+
+ //@M the forall in the previous test could be optimised to the following,
+ // but not worth the extra complexity since it only shaves 1s from quick.comp
+ // (List.forall2(tpsFresh/*optimisation*/, tparams2)((p1, p2) =>
+ // p2.info.substSym(tparams2, tpsFresh) <:< p1.info /*optimisation, == (p1 from tparams1).info.substSym(tparams1, tpsFresh)*/) &&
+ // this optimisation holds because inlining cloneSymbols in `val tpsFresh = cloneSymbols(tparams1)` gives:
+ // val tpsFresh = tparams1 map (_.cloneSymbol)
+ // for (tpFresh <- tpsFresh) tpFresh.setInfo(tpFresh.info.substSym(tparams1, tpsFresh))
+ }
+ } && annotationsConform(tp1.normalize, tp2.normalize)
+ case (_, _) => false // @assume !tp1.isHigherKinded || !tp2.isHigherKinded
+ // --> thus, cannot be subtypes (Any/Nothing has already been checked)
+ }))
+
+ def isSubArgs(tps1: List[Type], tps2: List[Type], tparams: List[Symbol], depth: Int): Boolean = {
+ def isSubArg(t1: Type, t2: Type, variance: Int) =
+ (variance > 0 || isSubType(t2, t1, depth)) &&
+ (variance < 0 || isSubType(t1, t2, depth))
+ corresponds3(tps1, tps2, tparams map (_.variance))(isSubArg)
+ }
+
+ def differentOrNone(tp1: Type, tp2: Type) = if (tp1 eq tp2) NoType else tp1
+
+ /** Does type `tp1` conform to `tp2`? */
+ private def isSubType2(tp1: Type, tp2: Type, depth: Int): Boolean = {
+ if ((tp1 eq tp2) || isErrorOrWildcard(tp1) || isErrorOrWildcard(tp2)) return true
+ if ((tp1 eq NoType) || (tp2 eq NoType)) return false
+ if (tp1 eq NoPrefix) return (tp2 eq NoPrefix) || tp2.typeSymbol.isPackageClass // !! I do not see how the "isPackageClass" would be warranted by the spec
+ if (tp2 eq NoPrefix) return tp1.typeSymbol.isPackageClass
+ if (isSingleType(tp1) && isSingleType(tp2) || isConstantType(tp1) && isConstantType(tp2)) return tp1 =:= tp2
+ if (tp1.isHigherKinded || tp2.isHigherKinded) return isHKSubType0(tp1, tp2, depth)
+
+ /** First try, on the right:
+ * - unwrap Annotated types, BoundedWildcardTypes,
+ * - bind TypeVars on the right, if lhs is not Annotated nor BoundedWildcard
+ * - handle common cases for first-kind TypeRefs on both sides as a fast path.
+ */
+ def firstTry = tp2 match {
+ // fast path: two typerefs, none of them HK
+ case tr2: TypeRef =>
+ tp1 match {
+ case tr1: TypeRef =>
+ val sym1 = tr1.sym
+ val sym2 = tr2.sym
+ val pre1 = tr1.pre
+ val pre2 = tr2.pre
+ (((if (sym1 == sym2) phase.erasedTypes || sym1.owner.hasPackageFlag || isSubType(pre1, pre2, depth)
+ else (sym1.name == sym2.name && !sym1.isModuleClass && !sym2.isModuleClass &&
+ (isUnifiable(pre1, pre2) ||
+ isSameSpecializedSkolem(sym1, sym2, pre1, pre2) ||
+ sym2.isAbstractType && isSubPre(pre1, pre2, sym2)))) &&
+ isSubArgs(tr1.args, tr2.args, sym1.typeParams, depth))
+ ||
+ sym2.isClass && {
+ val base = tr1 baseType sym2
+ (base ne tr1) && isSubType(base, tr2, depth)
+ }
+ ||
+ thirdTryRef(tr1, tr2))
+ case _ =>
+ secondTry
+ }
+ case AnnotatedType(_, _, _) =>
+ isSubType(tp1.withoutAnnotations, tp2.withoutAnnotations, depth) &&
+ annotationsConform(tp1, tp2)
+ case BoundedWildcardType(bounds) =>
+ isSubType(tp1, bounds.hi, depth)
+ case tv2 @ TypeVar(_, constr2) =>
+ tp1 match {
+ case AnnotatedType(_, _, _) | BoundedWildcardType(_) =>
+ secondTry
+ case _ =>
+ tv2.registerBound(tp1, true)
+ }
+ case _ =>
+ secondTry
+ }
+
+ /** Second try, on the left:
+ * - unwrap AnnotatedTypes, BoundedWildcardTypes,
+ * - bind typevars,
+ * - handle existential types by skolemization.
+ */
+ def secondTry = tp1 match {
+ case AnnotatedType(_, _, _) =>
+ isSubType(tp1.withoutAnnotations, tp2.withoutAnnotations, depth) &&
+ annotationsConform(tp1, tp2)
+ case BoundedWildcardType(bounds) =>
+ isSubType(tp1.bounds.lo, tp2, depth)
+ case tv @ TypeVar(_,_) =>
+ tv.registerBound(tp2, false)
+ case ExistentialType(_, _) =>
+ try {
+ skolemizationLevel += 1
+ isSubType(tp1.skolemizeExistential, tp2, depth)
+ } finally {
+ skolemizationLevel -= 1
+ }
+ case _ =>
+ thirdTry
+ }
+
+ def thirdTryRef(tp1: Type, tp2: TypeRef): Boolean = {
+ val sym2 = tp2.sym
+ sym2 match {
+ case NotNullClass => tp1.isNotNull
+ case SingletonClass => tp1.isStable || fourthTry
+ case _: ClassSymbol =>
+ if (isRaw(sym2, tp2.args))
+ isSubType(tp1, rawToExistential(tp2), depth)
+ else if (sym2.name == tpnme.REFINE_CLASS_NAME)
+ isSubType(tp1, sym2.info, depth)
+ else
+ fourthTry
+ case _: TypeSymbol =>
+ if (sym2 hasFlag DEFERRED) {
+ val tp2a = tp2.bounds.lo
+ isDifferentTypeConstructor(tp2, tp2a) &&
+ isSubType(tp1, tp2a, depth) ||
+ fourthTry
+ } else {
+ isSubType(tp1.normalize, tp2.normalize, depth)
+ }
+ case _ =>
+ fourthTry
+ }
+ }
+
+ /** Third try, on the right:
+ * - decompose refined types.
+ * - handle typerefs, existentials, and notnull types.
+ * - handle left+right method types, polytypes, typebounds
+ */
+ def thirdTry = tp2 match {
+ case tr2: TypeRef =>
+ thirdTryRef(tp1, tr2)
+ case rt2: RefinedType =>
+ (rt2.parents forall (isSubType(tp1, _, depth))) &&
+ (rt2.decls forall (specializesSym(tp1, _, depth)))
+ case et2: ExistentialType =>
+ et2.withTypeVars(isSubType(tp1, _, depth), depth) || fourthTry
+ case nn2: NotNullType =>
+ tp1.isNotNull && isSubType(tp1, nn2.underlying, depth)
+ case mt2: MethodType =>
+ tp1 match {
+ case mt1 @ MethodType(params1, res1) =>
+ val params2 = mt2.params
+ val res2 = mt2.resultType
+ (sameLength(params1, params2) &&
+ mt1.isImplicit == mt2.isImplicit &&
+ matchingParams(params1, params2, mt1.isJava, mt2.isJava) &&
+ isSubType(res1.substSym(params1, params2), res2, depth))
+ // TODO: if mt1.params.isEmpty, consider NullaryMethodType?
+ case _ =>
+ false
+ }
+ case pt2 @ NullaryMethodType(_) =>
+ tp1 match {
+ // TODO: consider MethodType mt for which mt.params.isEmpty??
+ case pt1 @ NullaryMethodType(_) =>
+ isSubType(pt1.resultType, pt2.resultType, depth)
+ case _ =>
+ false
+ }
+ case TypeBounds(lo2, hi2) =>
+ tp1 match {
+ case TypeBounds(lo1, hi1) =>
+ isSubType(lo2, lo1, depth) && isSubType(hi1, hi2, depth)
+ case _ =>
+ false
+ }
+ case _ =>
+ fourthTry
+ }
+
+ /** Fourth try, on the left:
+ * - handle typerefs, refined types, notnull and singleton types.
+ */
+ def fourthTry = tp1 match {
+ case tr1 @ TypeRef(pre1, sym1, _) =>
+ sym1 match {
+ case NothingClass => true
+ case NullClass =>
+ tp2 match {
+ case TypeRef(_, sym2, _) =>
+ containsNull(sym2)
+ case _ =>
+ isSingleType(tp2) && isSubType(tp1, tp2.widen, depth)
+ }
+ case _: ClassSymbol =>
+ if (isRaw(sym1, tr1.args))
+ isSubType(rawToExistential(tp1), tp2, depth)
+ else if (sym1.isModuleClass) tp2 match {
+ case SingleType(pre2, sym2) => equalSymsAndPrefixes(sym1.sourceModule, pre1, sym2, pre2)
+ case _ => false
+ }
+ else if (sym1.isRefinementClass)
+ isSubType(sym1.info, tp2, depth)
+ else false
+
+ case _: TypeSymbol =>
+ if (sym1 hasFlag DEFERRED) {
+ val tp1a = tp1.bounds.hi
+ isDifferentTypeConstructor(tp1, tp1a) && isSubType(tp1a, tp2, depth)
+ } else {
+ isSubType(tp1.normalize, tp2.normalize, depth)
+ }
+ case _ =>
+ false
+ }
+ case RefinedType(parents1, _) =>
+ parents1 exists (isSubType(_, tp2, depth))
+ case _: SingletonType | _: NotNullType =>
+ isSubType(tp1.underlying, tp2, depth)
+ case _ =>
+ false
+ }
+
+ firstTry
+ }
+
+ private def containsNull(sym: Symbol): Boolean =
+ sym.isClass && sym != NothingClass &&
+ !(sym isNonBottomSubClass AnyValClass) &&
+ !(sym isNonBottomSubClass NotNullClass)
+
+ /** Are `tps1` and `tps2` lists of equal length such that all elements
+ * of `tps1` conform to corresponding elements of `tps2`?
+ */
+ def isSubTypes(tps1: List[Type], tps2: List[Type]): Boolean = (tps1 corresponds tps2)(_ <:< _)
+
+ /** Does type `tp` implement symbol `sym` with same or
+ * stronger type? Exact only if `sym` is a member of some
+ * refinement type, otherwise we might return false negatives.
+ */
+ def specializesSym(tp: Type, sym: Symbol): Boolean =
+ specializesSym(tp, sym, AnyDepth)
+
+ def specializesSym(tp: Type, sym: Symbol, depth: Int): Boolean =
+ tp.typeSymbol == NothingClass ||
+ tp.typeSymbol == NullClass && containsNull(sym.owner) || {
+ def specializedBy(membr: Symbol): Boolean =
+ membr == sym || specializesSym(tp.narrow, membr, sym.owner.thisType, sym, depth)
+ val member = tp.nonPrivateMember(sym.name)
+ if (member eq NoSymbol) false
+ else if (member.isOverloaded) member.alternatives exists specializedBy
+ else specializedBy(member)
+ // was
+ // (tp.nonPrivateMember(sym.name).alternatives exists
+ // (alt => sym == alt || specializesSym(tp.narrow, alt, sym.owner.thisType, sym, depth)))
+ }
+
+ /** Does member `sym1` of `tp1` have a stronger type
+ * than member `sym2` of `tp2`?
+ */
+ private def specializesSym(tp1: Type, sym1: Symbol, tp2: Type, sym2: Symbol, depth: Int): Boolean = {
+ val info1 = tp1.memberInfo(sym1)
+ val info2 = tp2.memberInfo(sym2).substThis(tp2.typeSymbol, tp1)
+ //System.out.println("specializes "+tp1+"."+sym1+":"+info1+sym1.locationString+" AND "+tp2+"."+sym2+":"+info2)//DEBUG
+ ( sym2.isTerm && isSubType(info1, info2, depth) && (!sym2.isStable || sym1.isStable)
+ || sym2.isAbstractType && {
+ val memberTp1 = tp1.memberType(sym1)
+ // println("kinds conform? "+(memberTp1, tp1, sym2, kindsConform(List(sym2), List(memberTp1), tp2, sym2.owner)))
+ info2.bounds.containsType(memberTp1) &&
+ kindsConform(List(sym2), List(memberTp1), tp1, sym1.owner)
+ }
+ || sym2.isAliasType && tp2.memberType(sym2).substThis(tp2.typeSymbol, tp1) =:= tp1.memberType(sym1) //@MAT ok
+ )
+ }
+
+ /** A function implementing `tp1` matches `tp2`. */
+ final def matchesType(tp1: Type, tp2: Type, alwaysMatchSimple: Boolean): Boolean = {
+ def matchesQuantified(tparams1: List[Symbol], tparams2: List[Symbol], res1: Type, res2: Type): Boolean = (
+ sameLength(tparams1, tparams2) &&
+ matchesType(res1, res2.substSym(tparams2, tparams1), alwaysMatchSimple)
+ )
+ def lastTry =
+ tp2 match {
+ case ExistentialType(_, res2) if alwaysMatchSimple =>
+ matchesType(tp1, res2, true)
+ case MethodType(_, _) =>
+ false
+ case PolyType(_, _) =>
+ false
+ case _ =>
+ alwaysMatchSimple || tp1 =:= tp2
+ }
+ tp1 match {
+ case mt1 @ MethodType(params1, res1) =>
+ tp2 match {
+ case mt2 @ MethodType(params2, res2) =>
+ // sameLength(params1, params2) was used directly as pre-screening optimization (now done by matchesQuantified -- is that ok, performancewise?)
+ mt1.isImplicit == mt2.isImplicit &&
+ matchingParams(params1, params2, mt1.isJava, mt2.isJava) &&
+ matchesQuantified(params1, params2, res1, res2)
+ case NullaryMethodType(res2) =>
+ if (params1.isEmpty) matchesType(res1, res2, alwaysMatchSimple)
+ else matchesType(tp1, res2, alwaysMatchSimple)
+ case ExistentialType(_, res2) =>
+ alwaysMatchSimple && matchesType(tp1, res2, true)
+ case TypeRef(_, sym, Nil) =>
+ params1.isEmpty && sym.isModuleClass && matchesType(res1, tp2, alwaysMatchSimple)
+ case _ =>
+ false
+ }
+ case mt1 @ NullaryMethodType(res1) =>
+ tp2 match {
+ case mt2 @ MethodType(Nil, res2) => // could never match if params nonEmpty, and !mt2.isImplicit is implied by empty param list
+ matchesType(res1, res2, alwaysMatchSimple)
+ case NullaryMethodType(res2) =>
+ matchesType(res1, res2, alwaysMatchSimple)
+ case ExistentialType(_, res2) =>
+ alwaysMatchSimple && matchesType(tp1, res2, true)
+ case TypeRef(_, sym, Nil) if sym.isModuleClass =>
+ matchesType(res1, tp2, alwaysMatchSimple)
+ case _ =>
+ matchesType(res1, tp2, alwaysMatchSimple)
+ }
+ case PolyType(tparams1, res1) =>
+ tp2 match {
+ case PolyType(tparams2, res2) =>
+ if ((tparams1 corresponds tparams2)(_ eq _))
+ matchesType(res1, res2, alwaysMatchSimple)
+ else
+ matchesQuantified(tparams1, tparams2, res1, res2)
+ case ExistentialType(_, res2) =>
+ alwaysMatchSimple && matchesType(tp1, res2, true)
+ case _ =>
+ false // remember that tparams1.nonEmpty is now an invariant of PolyType
+ }
+ case ExistentialType(tparams1, res1) =>
+ tp2 match {
+ case ExistentialType(tparams2, res2) =>
+ matchesQuantified(tparams1, tparams2, res1, res2)
+ case _ =>
+ if (alwaysMatchSimple) matchesType(res1, tp2, true)
+ else lastTry
+ }
+ case TypeRef(_, sym, Nil) if sym.isModuleClass =>
+ tp2 match {
+ case MethodType(Nil, res2) => matchesType(tp1, res2, alwaysMatchSimple)
+ case NullaryMethodType(res2) => matchesType(tp1, res2, alwaysMatchSimple)
+ case _ => lastTry
+ }
+ case _ =>
+ lastTry
+ }
+ }
+
+/** matchesType above is an optimized version of the following implementation:
+
+ def matchesType2(tp1: Type, tp2: Type, alwaysMatchSimple: Boolean): Boolean = {
+ def matchesQuantified(tparams1: List[Symbol], tparams2: List[Symbol], res1: Type, res2: Type): Boolean =
+ tparams1.length == tparams2.length &&
+ matchesType(res1, res2.substSym(tparams2, tparams1), alwaysMatchSimple)
+ (tp1, tp2) match {
+ case (MethodType(params1, res1), MethodType(params2, res2)) =>
+ params1.length == params2.length && // useful pre-secreening optimization
+ matchingParams(params1, params2, tp1.isInstanceOf[JavaMethodType], tp2.isInstanceOf[JavaMethodType]) &&
+ matchesType(res1, res2, alwaysMatchSimple) &&
+ tp1.isImplicit == tp2.isImplicit
+ case (PolyType(tparams1, res1), PolyType(tparams2, res2)) =>
+ matchesQuantified(tparams1, tparams2, res1, res2)
+ case (NullaryMethodType(rtp1), MethodType(List(), rtp2)) =>
+ matchesType(rtp1, rtp2, alwaysMatchSimple)
+ case (MethodType(List(), rtp1), NullaryMethodType(rtp2)) =>
+ matchesType(rtp1, rtp2, alwaysMatchSimple)
+ case (ExistentialType(tparams1, res1), ExistentialType(tparams2, res2)) =>
+ matchesQuantified(tparams1, tparams2, res1, res2)
+ case (ExistentialType(_, res1), _) if alwaysMatchSimple =>
+ matchesType(res1, tp2, alwaysMatchSimple)
+ case (_, ExistentialType(_, res2)) if alwaysMatchSimple =>
+ matchesType(tp1, res2, alwaysMatchSimple)
+ case (NullaryMethodType(rtp1), _) =>
+ matchesType(rtp1, tp2, alwaysMatchSimple)
+ case (_, NullaryMethodType(rtp2)) =>
+ matchesType(tp1, rtp2, alwaysMatchSimple)
+ case (MethodType(_, _), _) => false
+ case (PolyType(_, _), _) => false
+ case (_, MethodType(_, _)) => false
+ case (_, PolyType(_, _)) => false
+ case _ =>
+ alwaysMatchSimple || tp1 =:= tp2
+ }
+ }
+*/
+
+ /** Are `syms1` and `syms2` parameter lists with pairwise equivalent types? */
+ private def matchingParams(syms1: List[Symbol], syms2: List[Symbol], syms1isJava: Boolean, syms2isJava: Boolean): Boolean = syms1 match {
+ case Nil =>
+ syms2.isEmpty
+ case sym1 :: rest1 =>
+ syms2 match {
+ case Nil =>
+ false
+ case sym2 :: rest2 =>
+ val tp1 = sym1.tpe
+ val tp2 = sym2.tpe
+ (tp1 =:= tp2 ||
+ syms1isJava && tp2.typeSymbol == ObjectClass && tp1.typeSymbol == AnyClass ||
+ syms2isJava && tp1.typeSymbol == ObjectClass && tp2.typeSymbol == AnyClass) &&
+ matchingParams(rest1, rest2, syms1isJava, syms2isJava)
+ }
+ }
+
+ /** like map2, but returns list `xs` itself - instead of a copy - if function
+ * `f` maps all elements to themselves.
+ */
+ def map2Conserve[A <: AnyRef, B](xs: List[A], ys: List[B])(f: (A, B) => A): List[A] =
+ if (xs.isEmpty) xs
+ else {
+ val x1 = f(xs.head, ys.head)
+ val xs1 = map2Conserve(xs.tail, ys.tail)(f)
+ if ((x1 eq xs.head) && (xs1 eq xs.tail)) xs
+ else x1 :: xs1
+ }
+
+ /** Solve constraint collected in types `tvars`.
+ *
+ * @param tvars All type variables to be instantiated.
+ * @param tparams The type parameters corresponding to `tvars`
+ * @param variances The variances of type parameters; need to reverse
+ * solution direction for all contravariant variables.
+ * @param upper When `true` search for max solution else min.
+ */
+ def solve(tvars: List[TypeVar], tparams: List[Symbol],
+ variances: List[Int], upper: Boolean): Boolean =
+ solve(tvars, tparams, variances, upper, AnyDepth)
+
+ def solve(tvars: List[TypeVar], tparams: List[Symbol],
+ variances: List[Int], upper: Boolean, depth: Int): Boolean = {
+
+ def solveOne(tvar: TypeVar, tparam: Symbol, variance: Int) {
+ if (tvar.constr.inst == NoType) {
+ val up = if (variance != CONTRAVARIANT) upper else !upper
+ tvar.constr.inst = null
+ val bound: Type = if (up) tparam.info.bounds.hi else tparam.info.bounds.lo
+ //Console.println("solveOne0(tv, tp, v, b)="+(tvar, tparam, variance, bound))
+ var cyclic = bound contains tparam
+ foreach3(tvars, tparams, variances)((tvar2, tparam2, variance2) => {
+ val ok = (tparam2 != tparam) && (
+ (bound contains tparam2)
+ || up && (tparam2.info.bounds.lo =:= tparam.tpeHK)
+ || !up && (tparam2.info.bounds.hi =:= tparam.tpeHK)
+ )
+ if (ok) {
+ if (tvar2.constr.inst eq null) cyclic = true
+ solveOne(tvar2, tparam2, variance2)
+ }
+ })
+ if (!cyclic) {
+ if (up) {
+ if (bound.typeSymbol != AnyClass) {
+ log(s"$tvar addHiBound $bound.instantiateTypeParams($tparams, $tvars)")
+ tvar addHiBound bound.instantiateTypeParams(tparams, tvars)
+ }
+ for (tparam2 <- tparams)
+ tparam2.info.bounds.lo.dealias match {
+ case TypeRef(_, `tparam`, _) =>
+ log(s"$tvar addHiBound $tparam2.tpeHK.instantiateTypeParams($tparams, $tvars)")
+ tvar addHiBound tparam2.tpeHK.instantiateTypeParams(tparams, tvars)
+ case _ =>
+ }
+ } else {
+ if (bound.typeSymbol != NothingClass && bound.typeSymbol != tparam) {
+ log(s"$tvar addLoBound $bound.instantiateTypeParams($tparams, $tvars)")
+ tvar addLoBound bound.instantiateTypeParams(tparams, tvars)
+ }
+ for (tparam2 <- tparams)
+ tparam2.info.bounds.hi.dealias match {
+ case TypeRef(_, `tparam`, _) =>
+ log(s"$tvar addLoBound $tparam2.tpeHK.instantiateTypeParams($tparams, $tvars)")
+ tvar addLoBound tparam2.tpeHK.instantiateTypeParams(tparams, tvars)
+ case _ =>
+ }
+ }
+ }
+ tvar.constr.inst = NoType // necessary because hibounds/lobounds may contain tvar
+
+ //println("solving "+tvar+" "+up+" "+(if (up) (tvar.constr.hiBounds) else tvar.constr.loBounds)+((if (up) (tvar.constr.hiBounds) else tvar.constr.loBounds) map (_.widen)))
+ val newInst = (
+ if (up) {
+ if (depth != AnyDepth) glb(tvar.constr.hiBounds, depth) else glb(tvar.constr.hiBounds)
+ } else {
+ if (depth != AnyDepth) lub(tvar.constr.loBounds, depth) else lub(tvar.constr.loBounds)
+ }
+ )
+ log(s"$tvar setInst $newInst")
+ tvar setInst newInst
+ //Console.println("solving "+tvar+" "+up+" "+(if (up) (tvar.constr.hiBounds) else tvar.constr.loBounds)+((if (up) (tvar.constr.hiBounds) else tvar.constr.loBounds) map (_.widen))+" = "+tvar.constr.inst)//@MDEBUG
+ }
+ }
+
+ // println("solving "+tvars+"/"+tparams+"/"+(tparams map (_.info)))
+ foreach3(tvars, tparams, variances)(solveOne)
+ tvars forall (tvar => tvar.constr.isWithinBounds(tvar.constr.inst))
+ }
+
+ /** Do type arguments `targs` conform to formal parameters `tparams`?
+ */
+ def isWithinBounds(pre: Type, owner: Symbol, tparams: List[Symbol], targs: List[Type]): Boolean = {
+ var bounds = instantiatedBounds(pre, owner, tparams, targs)
+ if (targs exists typeHasAnnotations)
+ bounds = adaptBoundsToAnnotations(bounds, tparams, targs)
+ (bounds corresponds targs)(boundsContainType)
+ }
+
+ def instantiatedBounds(pre: Type, owner: Symbol, tparams: List[Symbol], targs: List[Type]): List[TypeBounds] =
+ tparams map (_.info.asSeenFrom(pre, owner).instantiateTypeParams(tparams, targs).bounds)
+
+// Lubs and Glbs ---------------------------------------------------------
+
+ private def printLubMatrix(btsMap: Map[Type, List[Type]], depth: Int) {
+ import util.TableDef
+ import TableDef.Column
+ def str(tp: Type) = {
+ if (tp == NoType) ""
+ else {
+ val s = ("" + tp).replaceAll("""[\w.]+\.(\w+)""", "$1")
+ if (s.length < 60) s
+ else (s take 57) + "..."
+ }
+ }
+
+ val sorted = btsMap.toList.sortWith((x, y) => x._1.typeSymbol isLess y._1.typeSymbol)
+ val maxSeqLength = sorted.map(_._2.size).max
+ val padded = sorted map (_._2.padTo(maxSeqLength, NoType))
+ val transposed = padded.transpose
+
+ val columns: List[Column[List[Type]]] = mapWithIndex(sorted) {
+ case ((k, v), idx) =>
+ Column(str(k), (xs: List[Type]) => str(xs(idx)), true)
+ }
+
+ val tableDef = TableDef(columns: _*)
+ val formatted = tableDef.table(transposed)
+ println("** Depth is " + depth + "\n" + formatted)
+ }
+
+ /** From a list of types, find any which take type parameters
+ * where the type parameter bounds contain references to other
+ * any types in the list (including itself.)
+ *
+ * @return List of symbol pairs holding the recursive type
+ * parameter and the parameter which references it.
+ */
+ def findRecursiveBounds(ts: List[Type]): List[(Symbol, Symbol)] = {
+ if (ts.isEmpty) Nil
+ else {
+ val sym = ts.head.typeSymbol
+ require(ts.tail forall (_.typeSymbol == sym), ts)
+ for (p <- sym.typeParams ; in <- sym.typeParams ; if in.info.bounds contains p) yield
+ p -> in
+ }
+ }
+
+ /** Given a matrix `tsBts` whose columns are basetype sequences (and the symbols `tsParams` that should be interpreted as type parameters in this matrix),
+ * compute its least sorted upwards closed upper bound relative to the following ordering <= between lists of types:
+ *
+ * xs <= ys iff forall y in ys exists x in xs such that x <: y
+ *
+ * @arg tsParams for each type in the original list of types `ts0`, its list of type parameters (if that type is a type constructor)
+ * (these type parameters may be referred to by type arguments in the BTS column of those types,
+ * and must be interpreted as bound variables; i.e., under a type lambda that wraps the types that refer to these type params)
+ * @arg tsBts a matrix whose columns are basetype sequences
+ * the first row is the original list of types for which we're computing the lub
+ * (except that type constructors have been applied to their dummyArgs)
+ * @See baseTypeSeq for a definition of sorted and upwards closed.
+ */
+ private def lubList(ts: List[Type], depth: Int): List[Type] = {
+ // Matching the type params of one of the initial types means dummies.
+ val initialTypeParams = ts map (_.typeParams)
+ def isHotForTs(xs: List[Type]) = initialTypeParams contains (xs map (_.typeSymbol))
+
+ def elimHigherOrderTypeParam(tp: Type) = tp match {
+ case TypeRef(pre, sym, args) if args.nonEmpty && isHotForTs(args) => tp.typeConstructor
+ case _ => tp
+ }
+ var lubListDepth = 0
+ def loop(tsBts: List[List[Type]]): List[Type] = {
+ lubListDepth += 1
+
+ if (tsBts.isEmpty || (tsBts exists typeListIsEmpty)) Nil
+ else if (tsBts.tail.isEmpty) tsBts.head
+ else {
+ // ts0 is the 1-dimensional frontier of symbols cutting through 2-dimensional tsBts.
+ // Invariant: all symbols "under" (closer to the first row) the frontier
+ // are smaller (according to _.isLess) than the ones "on and beyond" the frontier
+ val ts0 = tsBts map (_.head)
+
+ // Is the frontier made up of types with the same symbol?
+ val isUniformFrontier = (ts0: @unchecked) match {
+ case t :: ts => ts forall (_.typeSymbol == t.typeSymbol)
+ }
+
+ // Produce a single type for this frontier by merging the prefixes and arguments of those
+ // typerefs that share the same symbol: that symbol is the current maximal symbol for which
+ // the invariant holds, i.e., the one that conveys most information regarding subtyping. Before
+ // merging, strip targs that refer to bound tparams (when we're computing the lub of type
+ // constructors.) Also filter out all types that are a subtype of some other type.
+ if (isUniformFrontier) {
+ if (settings.debug.value || printLubs) {
+ val fbounds = findRecursiveBounds(ts0)
+ if (fbounds.nonEmpty) {
+ println("Encountered " + fbounds.size + " recursive bounds while lubbing " + ts0.size + " types.")
+ for ((p0, p1) <- fbounds) {
+ val desc = if (p0 == p1) "its own bounds" else "the bounds of " + p1
+
+ println(" " + p0.fullLocationString + " appears in " + desc)
+ println(" " + p1 + " " + p1.info.bounds)
+ }
+ println("")
+ }
+ }
+ val tails = tsBts map (_.tail)
+ mergePrefixAndArgs(elimSub(ts0 map elimHigherOrderTypeParam, depth), 1, depth) match {
+ case Some(tp) => tp :: loop(tails)
+ case _ => loop(tails)
+ }
+ }
+ else {
+ // frontier is not uniform yet, move it beyond the current minimal symbol;
+ // lather, rinSe, repeat
+ val sym = minSym(ts0)
+ val newtps = tsBts map (ts => if (ts.head.typeSymbol == sym) ts.tail else ts)
+ if (printLubs) {
+ val str = (newtps.zipWithIndex map { case (tps, idx) =>
+ tps.map(" " + _ + "\n").mkString(" (" + idx + ")\n", "", "\n")
+ }).mkString("")
+
+ println("Frontier(\n" + str + ")")
+ printLubMatrix((ts zip tsBts).toMap, lubListDepth)
+ }
+
+ loop(newtps)
+ }
+ }
+ }
+
+ val initialBTSes = ts map (_.baseTypeSeq.toList)
+ if (printLubs)
+ printLubMatrix((ts zip initialBTSes).toMap, depth)
+
+ loop(initialBTSes)
+ }
+
+ /** The minimal symbol of a list of types (as determined by `Symbol.isLess`). */
+ private def minSym(tps: List[Type]): Symbol =
+ (tps.head.typeSymbol /: tps.tail) {
+ (sym1, tp2) => if (tp2.typeSymbol isLess sym1) tp2.typeSymbol else sym1
+ }
+
+ /** A minimal type list which has a given list of types as its base type sequence */
+ def spanningTypes(ts: List[Type]): List[Type] = ts match {
+ case List() => List()
+ case first :: rest =>
+ first :: spanningTypes(
+ rest filter (t => !first.typeSymbol.isSubClass(t.typeSymbol)))
+ }
+
+ /** Eliminate from list of types all elements which are a supertype
+ * of some other element of the list. */
+ private def elimSuper(ts: List[Type]): List[Type] = ts match {
+ case List() => List()
+ case List(t) => List(t)
+ case t :: ts1 =>
+ val rest = elimSuper(ts1 filter (t1 => !(t <:< t1)))
+ if (rest exists (t1 => t1 <:< t)) rest else t :: rest
+ }
+
+ def elimAnonymousClass(t: Type) = t match {
+ case TypeRef(pre, clazz, Nil) if clazz.isAnonymousClass =>
+ clazz.classBound.asSeenFrom(pre, clazz.owner)
+ case _ =>
+ t
+ }
+ def elimRefinement(t: Type) = t match {
+ case RefinedType(parents, decls) if !decls.isEmpty => intersectionType(parents)
+ case _ => t
+ }
+
+ /** Eliminate from list of types all elements which are a subtype
+ * of some other element of the list. */
+ private def elimSub(ts: List[Type], depth: Int): List[Type] = {
+ def elimSub0(ts: List[Type]): List[Type] = ts match {
+ case List() => List()
+ case List(t) => List(t)
+ case t :: ts1 =>
+ val rest = elimSub0(ts1 filter (t1 => !isSubType(t1, t, decr(depth))))
+ if (rest exists (t1 => isSubType(t, t1, decr(depth)))) rest else t :: rest
+ }
+ val ts0 = elimSub0(ts)
+ if (ts0.isEmpty || ts0.tail.isEmpty) ts0
+ else {
+ val ts1 = ts0 mapConserve (t => elimAnonymousClass(t.dealiasWiden))
+ if (ts1 eq ts0) ts0
+ else elimSub(ts1, depth)
+ }
+ }
+
+ private def stripExistentialsAndTypeVars(ts: List[Type]): (List[Type], List[Symbol]) = {
+ val quantified = ts flatMap {
+ case ExistentialType(qs, _) => qs
+ case t => List()
+ }
+ def stripType(tp: Type): Type = tp match {
+ case ExistentialType(_, res) =>
+ res
+ case tv at TypeVar(_, constr) =>
+ if (tv.instValid) stripType(constr.inst)
+ else if (tv.untouchable) tv
+ else abort("trying to do lub/glb of typevar "+tp)
+ case t => t
+ }
+ val strippedTypes = ts mapConserve stripType
+ (strippedTypes, quantified)
+ }
+
+ def weakLub(ts: List[Type]) =
+ if (ts.nonEmpty && (ts forall isNumericValueType)) (numericLub(ts), true)
+ else if (ts exists typeHasAnnotations)
+ (annotationsLub(lub(ts map (_.withoutAnnotations)), ts), true)
+ else (lub(ts), false)
+
+ def weakGlb(ts: List[Type]) = {
+ if (ts.nonEmpty && (ts forall isNumericValueType)) {
+ val nglb = numericGlb(ts)
+ if (nglb != NoType) (nglb, true)
+ else (glb(ts), false)
+ } else if (ts exists typeHasAnnotations) {
+ (annotationsGlb(glb(ts map (_.withoutAnnotations)), ts), true)
+ } else (glb(ts), false)
+ }
+
+ def numericLub(ts: List[Type]) =
+ ts reduceLeft ((t1, t2) =>
+ if (isNumericSubType(t1, t2)) t2
+ else if (isNumericSubType(t2, t1)) t1
+ else IntClass.tpe)
+
+ def numericGlb(ts: List[Type]) =
+ ts reduceLeft ((t1, t2) =>
+ if (isNumericSubType(t1, t2)) t1
+ else if (isNumericSubType(t2, t1)) t2
+ else NoType)
+
+ def isWeakSubType(tp1: Type, tp2: Type) =
+ tp1.deconst.normalize match {
+ case TypeRef(_, sym1, _) if isNumericValueClass(sym1) =>
+ tp2.deconst.normalize match {
+ case TypeRef(_, sym2, _) if isNumericValueClass(sym2) =>
+ isNumericSubClass(sym1, sym2)
+ case tv2 @ TypeVar(_, _) =>
+ tv2.registerBound(tp1, isLowerBound = true, isNumericBound = true)
+ case _ =>
+ isSubType(tp1, tp2)
+ }
+ case tv1 @ TypeVar(_, _) =>
+ tp2.deconst.normalize match {
+ case TypeRef(_, sym2, _) if isNumericValueClass(sym2) =>
+ tv1.registerBound(tp2, isLowerBound = false, isNumericBound = true)
+ case _ =>
+ isSubType(tp1, tp2)
+ }
+ case _ =>
+ isSubType(tp1, tp2)
+ }
+
+ /** The isNumericValueType tests appear redundant, but without them
+ * test/continuations-neg/function3.scala goes into an infinite loop.
+ * (Even if the calls are to typeSymbolDirect.)
+ */
+ def isNumericSubType(tp1: Type, tp2: Type): Boolean = (
+ isNumericValueType(tp1)
+ && isNumericValueType(tp2)
+ && isNumericSubClass(tp1.typeSymbol, tp2.typeSymbol)
+ )
+
+ private val lubResults = new mutable.HashMap[(Int, List[Type]), Type]
+ private val glbResults = new mutable.HashMap[(Int, List[Type]), Type]
+
+ def lub(ts: List[Type]): Type = ts match {
+ case List() => NothingClass.tpe
+ case List(t) => t
+ case _ =>
+ if (Statistics.canEnable) Statistics.incCounter(lubCount)
+ val start = if (Statistics.canEnable) Statistics.pushTimer(typeOpsStack, lubNanos) else null
+ try {
+ lub(ts, lubDepth(ts))
+ } finally {
+ lubResults.clear()
+ glbResults.clear()
+ if (Statistics.canEnable) Statistics.popTimer(typeOpsStack, start)
+ }
+ }
+
+ /** The least upper bound wrt <:< of a list of types */
+ private def lub(ts: List[Type], depth: Int): Type = {
+ def lub0(ts0: List[Type]): Type = elimSub(ts0, depth) match {
+ case List() => NothingClass.tpe
+ case List(t) => t
+ case ts @ PolyType(tparams, _) :: _ =>
+ val tparams1 = map2(tparams, matchingBounds(ts, tparams).transpose)((tparam, bounds) =>
+ tparam.cloneSymbol.setInfo(glb(bounds, depth)))
+ PolyType(tparams1, lub0(matchingInstTypes(ts, tparams1)))
+ case ts @ (mt @ MethodType(params, _)) :: rest =>
+ MethodType(params, lub0(matchingRestypes(ts, mt.paramTypes)))
+ case ts @ NullaryMethodType(_) :: rest =>
+ NullaryMethodType(lub0(matchingRestypes(ts, Nil)))
+ case ts @ TypeBounds(_, _) :: rest =>
+ TypeBounds(glb(ts map (_.bounds.lo), depth), lub(ts map (_.bounds.hi), depth))
+ case ts @ AnnotatedType(annots, tpe, _) :: rest =>
+ annotationsLub(lub0(ts map (_.withoutAnnotations)), ts)
+ case ts =>
+ lubResults get (depth, ts) match {
+ case Some(lubType) =>
+ lubType
+ case None =>
+ lubResults((depth, ts)) = AnyClass.tpe
+ val res = if (depth < 0) AnyClass.tpe else lub1(ts)
+ lubResults((depth, ts)) = res
+ res
+ }
+ }
+ def lub1(ts0: List[Type]): Type = {
+ val (ts, tparams) = stripExistentialsAndTypeVars(ts0)
+ val lubBaseTypes: List[Type] = lubList(ts, depth)
+ val lubParents = spanningTypes(lubBaseTypes)
+ val lubOwner = commonOwner(ts)
+ val lubBase = intersectionType(lubParents, lubOwner)
+ val lubType =
+ if (phase.erasedTypes || depth == 0) lubBase
+ else {
+ val lubRefined = refinedType(lubParents, lubOwner)
+ val lubThisType = lubRefined.typeSymbol.thisType
+ val narrowts = ts map (_.narrow)
+ def excludeFromLub(sym: Symbol) = (
+ sym.isClass
+ || sym.isConstructor
+ || !sym.isPublic
+ || isGetClass(sym)
+ || narrowts.exists(t => !refines(t, sym))
+ )
+ def lubsym(proto: Symbol): Symbol = {
+ val prototp = lubThisType.memberInfo(proto)
+ val syms = narrowts map (t =>
+ t.nonPrivateMember(proto.name).suchThat(sym =>
+ sym.tpe matches prototp.substThis(lubThisType.typeSymbol, t)))
+ if (syms contains NoSymbol) NoSymbol
+ else {
+ val symtypes =
+ map2(narrowts, syms)((t, sym) => t.memberInfo(sym).substThis(t.typeSymbol, lubThisType))
+ if (proto.isTerm) // possible problem: owner of info is still the old one, instead of new refinement class
+ proto.cloneSymbol(lubRefined.typeSymbol).setInfoOwnerAdjusted(lub(symtypes, decr(depth)))
+ else if (symtypes.tail forall (symtypes.head =:= _))
+ proto.cloneSymbol(lubRefined.typeSymbol).setInfoOwnerAdjusted(symtypes.head)
+ else {
+ def lubBounds(bnds: List[TypeBounds]): TypeBounds =
+ TypeBounds(glb(bnds map (_.lo), decr(depth)), lub(bnds map (_.hi), decr(depth)))
+ lubRefined.typeSymbol.newAbstractType(proto.name.toTypeName, proto.pos)
+ .setInfoOwnerAdjusted(lubBounds(symtypes map (_.bounds)))
+ }
+ }
+ }
+ def refines(tp: Type, sym: Symbol): Boolean = {
+ val syms = tp.nonPrivateMember(sym.name).alternatives;
+ !syms.isEmpty && (syms forall (alt =>
+ // todo alt != sym is strictly speaking not correct, but without it we lose
+ // efficiency.
+ alt != sym && !specializesSym(lubThisType, sym, tp, alt, depth)))
+ }
+ // add a refinement symbol for all non-class members of lubBase
+ // which are refined by every type in ts.
+ for (sym <- lubBase.nonPrivateMembers ; if !excludeFromLub(sym)) {
+ try {
+ val lsym = lubsym(sym)
+ if (lsym != NoSymbol) addMember(lubThisType, lubRefined, lsym, depth)
+ } catch {
+ case ex: NoCommonType =>
+ }
+ }
+ if (lubRefined.decls.isEmpty) lubBase
+ else if (!verifyLubs) lubRefined
+ else {
+ // Verify that every given type conforms to the calculated lub.
+ // In theory this should not be necessary, but higher-order type
+ // parameters are not handled correctly.
+ val ok = ts forall { t =>
+ isSubType(t, lubRefined, depth) || {
+ if (settings.debug.value || printLubs) {
+ Console.println(
+ "Malformed lub: " + lubRefined + "\n" +
+ "Argument " + t + " does not conform. Falling back to " + lubBase
+ )
+ }
+ false
+ }
+ }
+ // If not, fall back on the more conservative calculation.
+ if (ok) lubRefined
+ else lubBase
+ }
+ }
+ // dropRepeatedParamType is a localized fix for SI-6897. We should probably
+ // integrate that transformation at a lower level in master, but lubs are
+ // the likely and maybe only spot they escape, so fixing here for 2.10.1.
+ existentialAbstraction(tparams, dropRepeatedParamType(lubType))
+ }
+ if (printLubs) {
+ println(indent + "lub of " + ts + " at depth "+depth)//debug
+ indent = indent + " "
+ assert(indent.length <= 100)
+ }
+ if (Statistics.canEnable) Statistics.incCounter(nestedLubCount)
+ val res = lub0(ts)
+ if (printLubs) {
+ indent = indent stripSuffix " "
+ println(indent + "lub of " + ts + " is " + res)//debug
+ }
+ if (ts forall typeIsNotNull) res.notNull else res
+ }
+
+ val GlbFailure = new Throwable
+
+ /** A global counter for glb calls in the `specializes` query connected to the `addMembers`
+ * call in `glb`. There's a possible infinite recursion when `specializes` calls
+ * memberType, which calls baseTypeSeq, which calls mergePrefixAndArgs, which calls glb.
+ * The counter breaks this recursion after two calls.
+ * If the recursion is broken, no member is added to the glb.
+ */
+ private var globalGlbDepth = 0
+ private final val globalGlbLimit = 2
+
+ /** The greatest lower bound of a list of types (as determined by `<:<`). */
+ def glb(ts: List[Type]): Type = elimSuper(ts) match {
+ case List() => AnyClass.tpe
+ case List(t) => t
+ case ts0 =>
+ if (Statistics.canEnable) Statistics.incCounter(lubCount)
+ val start = if (Statistics.canEnable) Statistics.pushTimer(typeOpsStack, lubNanos) else null
+ try {
+ glbNorm(ts0, lubDepth(ts0))
+ } finally {
+ lubResults.clear()
+ glbResults.clear()
+ if (Statistics.canEnable) Statistics.popTimer(typeOpsStack, start)
+ }
+ }
+
+ private def glb(ts: List[Type], depth: Int): Type = elimSuper(ts) match {
+ case List() => AnyClass.tpe
+ case List(t) => t
+ case ts0 => glbNorm(ts0, depth)
+ }
+
+ /** The greatest lower bound of a list of types (as determined by `<:<`), which have been normalized
+ * with regard to `elimSuper`. */
+ protected def glbNorm(ts: List[Type], depth: Int): Type = {
+ def glb0(ts0: List[Type]): Type = ts0 match {
+ case List() => AnyClass.tpe
+ case List(t) => t
+ case ts @ PolyType(tparams, _) :: _ =>
+ val tparams1 = map2(tparams, matchingBounds(ts, tparams).transpose)((tparam, bounds) =>
+ tparam.cloneSymbol.setInfo(lub(bounds, depth)))
+ PolyType(tparams1, glbNorm(matchingInstTypes(ts, tparams1), depth))
+ case ts @ (mt @ MethodType(params, _)) :: rest =>
+ MethodType(params, glbNorm(matchingRestypes(ts, mt.paramTypes), depth))
+ case ts @ NullaryMethodType(_) :: rest =>
+ NullaryMethodType(glbNorm(matchingRestypes(ts, Nil), depth))
+ case ts @ TypeBounds(_, _) :: rest =>
+ TypeBounds(lub(ts map (_.bounds.lo), depth), glb(ts map (_.bounds.hi), depth))
+ case ts =>
+ glbResults get (depth, ts) match {
+ case Some(glbType) =>
+ glbType
+ case _ =>
+ glbResults((depth, ts)) = NothingClass.tpe
+ val res = if (depth < 0) NothingClass.tpe else glb1(ts)
+ glbResults((depth, ts)) = res
+ res
+ }
+ }
+ def glb1(ts0: List[Type]): Type = {
+ try {
+ val (ts, tparams) = stripExistentialsAndTypeVars(ts0)
+ val glbOwner = commonOwner(ts)
+ def refinedToParents(t: Type): List[Type] = t match {
+ case RefinedType(ps, _) => ps flatMap refinedToParents
+ case _ => List(t)
+ }
+ def refinedToDecls(t: Type): List[Scope] = t match {
+ case RefinedType(ps, decls) =>
+ val dss = ps flatMap refinedToDecls
+ if (decls.isEmpty) dss else decls :: dss
+ case _ => List()
+ }
+ val ts1 = ts flatMap refinedToParents
+ val glbBase = intersectionType(ts1, glbOwner)
+ val glbType =
+ if (phase.erasedTypes || depth == 0) glbBase
+ else {
+ val glbRefined = refinedType(ts1, glbOwner)
+ val glbThisType = glbRefined.typeSymbol.thisType
+ def glbsym(proto: Symbol): Symbol = {
+ val prototp = glbThisType.memberInfo(proto)
+ val syms = for (t <- ts;
+ alt <- (t.nonPrivateMember(proto.name).alternatives);
+ if glbThisType.memberInfo(alt) matches prototp
+ ) yield alt
+ val symtypes = syms map glbThisType.memberInfo
+ assert(!symtypes.isEmpty)
+ proto.cloneSymbol(glbRefined.typeSymbol).setInfoOwnerAdjusted(
+ if (proto.isTerm) glb(symtypes, decr(depth))
+ else {
+ def isTypeBound(tp: Type) = tp match {
+ case TypeBounds(_, _) => true
+ case _ => false
+ }
+ def glbBounds(bnds: List[Type]): TypeBounds = {
+ val lo = lub(bnds map (_.bounds.lo), decr(depth))
+ val hi = glb(bnds map (_.bounds.hi), decr(depth))
+ if (lo <:< hi) TypeBounds(lo, hi)
+ else throw GlbFailure
+ }
+ val symbounds = symtypes filter isTypeBound
+ var result: Type =
+ if (symbounds.isEmpty)
+ TypeBounds.empty
+ else glbBounds(symbounds)
+ for (t <- symtypes if !isTypeBound(t))
+ if (result.bounds containsType t) result = t
+ else throw GlbFailure
+ result
+ })
+ }
+ if (globalGlbDepth < globalGlbLimit)
+ try {
+ globalGlbDepth += 1
+ val dss = ts flatMap refinedToDecls
+ for (ds <- dss; sym <- ds.iterator)
+ if (globalGlbDepth < globalGlbLimit && !specializesSym(glbThisType, sym, depth))
+ try {
+ addMember(glbThisType, glbRefined, glbsym(sym), depth)
+ } catch {
+ case ex: NoCommonType =>
+ }
+ } finally {
+ globalGlbDepth -= 1
+ }
+ if (glbRefined.decls.isEmpty) glbBase else glbRefined
+ }
+ existentialAbstraction(tparams, glbType)
+ } catch {
+ case GlbFailure =>
+ if (ts forall (t => NullClass.tpe <:< t)) NullClass.tpe
+ else NothingClass.tpe
+ }
+ }
+ // if (settings.debug.value) { println(indent + "glb of " + ts + " at depth "+depth); indent = indent + " " } //DEBUG
+
+ if (Statistics.canEnable) Statistics.incCounter(nestedLubCount)
+ val res = glb0(ts)
+
+ // if (settings.debug.value) { indent = indent.substring(0, indent.length() - 2); log(indent + "glb of " + ts + " is " + res) }//DEBUG
+
+ if (ts exists typeIsNotNull) res.notNull else res
+ }
+
+ /** A list of the typevars in a type. */
+ def typeVarsInType(tp: Type): List[TypeVar] = {
+ var tvs: List[TypeVar] = Nil
+ tp foreach {
+ case t: TypeVar => tvs ::= t
+ case _ =>
+ }
+ tvs.reverse
+ }
+ /** Make each type var in this type use its original type for comparisons instead
+ * of collecting constraints.
+ */
+ def suspendTypeVarsInType(tp: Type): List[TypeVar] = {
+ val tvs = typeVarsInType(tp)
+ // !!! Is it somehow guaranteed that this will not break under nesting?
+ // In general one has to save and restore the contents of the field...
+ tvs foreach (_.suspended = true)
+ tvs
+ }
+
+ /** Compute lub (if `variance == 1`) or glb (if `variance == -1`) of given list
+ * of types `tps`. All types in `tps` are typerefs or singletypes
+ * with the same symbol.
+ * Return `Some(x)` if the computation succeeds with result `x`.
+ * Return `None` if the computation fails.
+ */
+ def mergePrefixAndArgs(tps: List[Type], variance: Int, depth: Int): Option[Type] = tps match {
+ case List(tp) =>
+ Some(tp)
+ case TypeRef(_, sym, _) :: rest =>
+ val pres = tps map (_.prefix) // prefix normalizes automatically
+ val pre = if (variance == 1) lub(pres, depth) else glb(pres, depth)
+ val argss = tps map (_.normalize.typeArgs) // symbol equality (of the tp in tps) was checked using typeSymbol, which normalizes, so should normalize before retrieving arguments
+ val capturedParams = new ListBuffer[Symbol]
+ try {
+ if (sym == ArrayClass && phase.erasedTypes) {
+ // special treatment for lubs of array types after erasure:
+ // if argss contain one value type and some other type, the lub is Object
+ // if argss contain several reference types, the lub is an array over lub of argtypes
+ if (argss exists typeListIsEmpty) {
+ None // something is wrong: an array without a type arg.
+ } else {
+ val args = argss map (_.head)
+ if (args.tail forall (_ =:= args.head)) Some(typeRef(pre, sym, List(args.head)))
+ else if (args exists (arg => isPrimitiveValueClass(arg.typeSymbol))) Some(ObjectClass.tpe)
+ else Some(typeRef(pre, sym, List(lub(args))))
+ }
+ }
+ else transposeSafe(argss) match {
+ case None =>
+ // transpose freaked out because of irregular argss
+ // catching just in case (shouldn't happen, but also doesn't cost us)
+ // [JZ] It happens: see SI-5683.
+ debuglog("transposed irregular matrix!?" +(tps, argss))
+ None
+ case Some(argsst) =>
+ val args = map2(sym.typeParams, argsst) { (tparam, as) =>
+ if (depth == 0) {
+ if (tparam.variance == variance) {
+ // Take the intersection of the upper bounds of the type parameters
+ // rather than falling all the way back to "Any", otherwise we end up not
+ // conforming to bounds.
+ val bounds0 = sym.typeParams map (_.info.bounds.hi) filterNot (_.typeSymbol == AnyClass)
+ if (bounds0.isEmpty) AnyClass.tpe
+ else intersectionType(bounds0 map (b => b.asSeenFrom(tps.head, sym)))
+ }
+ else if (tparam.variance == -variance) NothingClass.tpe
+ else NoType
+ }
+ else {
+ if (tparam.variance == variance) lub(as, decr(depth))
+ else if (tparam.variance == -variance) glb(as, decr(depth))
+ else {
+ val l = lub(as, decr(depth))
+ val g = glb(as, decr(depth))
+ if (l <:< g) l
+ else { // Martin: I removed this, because incomplete. Not sure there is a good way to fix it. For the moment we
+ // just err on the conservative side, i.e. with a bound that is too high.
+ // if(!(tparam.info.bounds contains tparam)) //@M can't deal with f-bounds, see #2251
+
+ val qvar = commonOwner(as) freshExistential "" setInfo TypeBounds(g, l)
+ capturedParams += qvar
+ qvar.tpe
+ }
+ }
+ }
+ }
+ if (args contains NoType) None
+ else Some(existentialAbstraction(capturedParams.toList, typeRef(pre, sym, args)))
+ }
+ } catch {
+ case ex: MalformedType => None
+ }
+ case SingleType(_, sym) :: rest =>
+ val pres = tps map (_.prefix)
+ val pre = if (variance == 1) lub(pres, depth) else glb(pres, depth)
+ try {
+ Some(singleType(pre, sym))
+ } catch {
+ case ex: MalformedType => None
+ }
+ case ExistentialType(tparams, quantified) :: rest =>
+ mergePrefixAndArgs(quantified :: rest, variance, depth) map (existentialAbstraction(tparams, _))
+ case _ =>
+ abort(s"mergePrefixAndArgs($tps, $variance, $depth): unsupported tps")
+ }
+
+ def addMember(thistp: Type, tp: Type, sym: Symbol): Unit = addMember(thistp, tp, sym, AnyDepth)
+
+ /** Make symbol `sym` a member of scope `tp.decls`
+ * where `thistp` is the narrowed owner type of the scope.
+ */
+ def addMember(thistp: Type, tp: Type, sym: Symbol, depth: Int) {
+ assert(sym != NoSymbol)
+ // debuglog("add member " + sym+":"+sym.info+" to "+thistp) //DEBUG
+ if (!specializesSym(thistp, sym, depth)) {
+ if (sym.isTerm)
+ for (alt <- tp.nonPrivateDecl(sym.name).alternatives)
+ if (specializesSym(thistp, sym, thistp, alt, depth))
+ tp.decls unlink alt;
+ tp.decls enter sym
+ }
+ }
+
+ def isJavaVarargsAncestor(clazz: Symbol) = (
+ clazz.isClass
+ && clazz.isJavaDefined
+ && (clazz.info.nonPrivateDecls exists isJavaVarArgsMethod)
+ )
+ def inheritsJavaVarArgsMethod(clazz: Symbol) =
+ clazz.thisType.baseClasses exists isJavaVarargsAncestor
+
+ /** All types in list must be polytypes with type parameter lists of
+ * same length as tparams.
+ * Returns list of list of bounds infos, where corresponding type
+ * parameters are renamed to tparams.
+ */
+ private def matchingBounds(tps: List[Type], tparams: List[Symbol]): List[List[Type]] = {
+ def getBounds(tp: Type): List[Type] = tp match {
+ case PolyType(tparams1, _) if sameLength(tparams1, tparams) =>
+ tparams1 map (tparam => tparam.info.substSym(tparams1, tparams))
+ case tp =>
+ if (tp ne tp.normalize) getBounds(tp.normalize)
+ else throw new NoCommonType(tps)
+ }
+ tps map getBounds
+ }
+
+ /** All types in list must be polytypes with type parameter lists of
+ * same length as tparams.
+ * Returns list of instance types, where corresponding type
+ * parameters are renamed to tparams.
+ */
+ private def matchingInstTypes(tps: List[Type], tparams: List[Symbol]): List[Type] = {
+ def transformResultType(tp: Type): Type = tp match {
+ case PolyType(tparams1, restpe) if sameLength(tparams1, tparams) =>
+ restpe.substSym(tparams1, tparams)
+ case tp =>
+ if (tp ne tp.normalize) transformResultType(tp.normalize)
+ else throw new NoCommonType(tps)
+ }
+ tps map transformResultType
+ }
+
+ /** All types in list must be method types with equal parameter types.
+ * Returns list of their result types.
+ */
+ private def matchingRestypes(tps: List[Type], pts: List[Type]): List[Type] =
+ tps map {
+ case mt @ MethodType(params1, res) if isSameTypes(mt.paramTypes, pts) =>
+ res
+ case NullaryMethodType(res) if pts.isEmpty =>
+ res
+ case _ =>
+ throw new NoCommonType(tps)
+ }
+
+// Errors and Diagnostics -----------------------------------------------------
+
+ /** A throwable signalling a type error */
+ class TypeError(var pos: Position, val msg: String) extends Throwable(msg) {
+ def this(msg: String) = this(NoPosition, msg)
+ }
+
+ // TODO: RecoverableCyclicReference should be separated from TypeError,
+ // but that would be a big change. Left for further refactoring.
+ /** An exception for cyclic references from which we can recover */
+ case class RecoverableCyclicReference(sym: Symbol)
+ extends TypeError("illegal cyclic reference involving " + sym) {
+ if (settings.debug.value) printStackTrace()
+ }
+
+ class NoCommonType(tps: List[Type]) extends Throwable(
+ "lub/glb of incompatible types: " + tps.mkString("", " and ", "")) with ControlThrowable
+
+ /** A throwable signalling a malformed type */
+ class MalformedType(msg: String) extends TypeError(msg) {
+ def this(pre: Type, tp: String) = this("malformed type: " + pre + "#" + tp)
+ }
+
+ /** The current indentation string for traces */
+ private var indent: String = ""
+
+ /** Perform operation `p` on arguments `tp1`, `arg2` and print trace of computation. */
+ protected def explain[T](op: String, p: (Type, T) => Boolean, tp1: Type, arg2: T): Boolean = {
+ Console.println(indent + tp1 + " " + op + " " + arg2 + "?" /* + "("+tp1.getClass+","+arg2.getClass+")"*/)
+ indent = indent + " "
+ val result = p(tp1, arg2)
+ indent = indent stripSuffix " "
+ Console.println(indent + result)
+ result
+ }
+
+ /** If option `explaintypes` is set, print a subtype trace for `found <:< required`. */
+ def explainTypes(found: Type, required: Type) {
+ if (settings.explaintypes.value) withTypesExplained(found <:< required)
+ }
+
+ /** If option `explaintypes` is set, print a subtype trace for `op(found, required)`. */
+ def explainTypes(op: (Type, Type) => Any, found: Type, required: Type) {
+ if (settings.explaintypes.value) withTypesExplained(op(found, required))
+ }
+
+ /** Execute `op` while printing a trace of the operations on types executed. */
+ def withTypesExplained[A](op: => A): A = {
+ val s = explainSwitch
+ try { explainSwitch = true; op } finally { explainSwitch = s }
+ }
+
+ def isUnboundedGeneric(tp: Type) = tp match {
+ case t @ TypeRef(_, sym, _) => sym.isAbstractType && !(t <:< AnyRefClass.tpe)
+ case _ => false
+ }
+ def isBoundedGeneric(tp: Type) = tp match {
+ case TypeRef(_, sym, _) if sym.isAbstractType => (tp <:< AnyRefClass.tpe)
+ case TypeRef(_, sym, _) => !isPrimitiveValueClass(sym)
+ case _ => false
+ }
+ // Add serializable to a list of parents, unless one of them already is
+ def addSerializable(ps: Type*): List[Type] = (
+ if (ps exists typeIsSubTypeOfSerializable) ps.toList
+ else (ps :+ SerializableClass.tpe).toList
+ )
+
+ /** Adds the @uncheckedBound annotation if the given `tp` has type arguments */
+ final def uncheckedBounds(tp: Type): Type = {
+ if (tp.typeArgs.isEmpty || UncheckedBoundsClass == NoSymbol) tp // second condition for backwards compatibilty with older scala-reflect.jar
+ else tp.withAnnotation(AnnotationInfo marker UncheckedBoundsClass.tpe)
+ }
+
+ /** Members of the given class, other than those inherited
+ * from Any or AnyRef.
+ */
+ def nonTrivialMembers(clazz: Symbol): Iterable[Symbol] =
+ clazz.info.members filterNot (sym => sym.owner == ObjectClass || sym.owner == AnyClass)
+
+ def objToAny(tp: Type): Type =
+ if (!phase.erasedTypes && tp.typeSymbol == ObjectClass) AnyClass.tpe
+ else tp
+
+ val shorthands = Set(
+ "scala.collection.immutable.List",
+ "scala.collection.immutable.Nil",
+ "scala.collection.Seq",
+ "scala.collection.Traversable",
+ "scala.collection.Iterable",
+ "scala.collection.mutable.StringBuilder",
+ "scala.collection.IndexedSeq",
+ "scala.collection.Iterator")
+
+
+ /** The maximum number of recursions allowed in toString
+ */
+ final val maxTostringRecursions = 50
+
+ private var tostringRecursions = 0
+
+ protected def typeToString(tpe: Type): String =
+ if (tostringRecursions >= maxTostringRecursions) {
+ debugwarn("Exceeded recursion depth attempting to print type.")
+ if (settings.debug.value)
+ (new Throwable).printStackTrace
+
+ "..."
+ }
+ else
+ try {
+ tostringRecursions += 1
+ tpe.safeToString
+ } finally {
+ tostringRecursions -= 1
+ }
+
+// ----- Hoisted closures and convenience methods, for compile time reductions -------
+
+ private[scala] val typeIsNotNull = (tp: Type) => tp.isNotNull
+ private[scala] val isTypeVar = (tp: Type) => tp.isInstanceOf[TypeVar]
+ private[scala] val typeContainsTypeVar = (tp: Type) => tp exists isTypeVar
+ private[scala] val typeIsNonClassType = (tp: Type) => tp.typeSymbolDirect.isNonClassType
+ private[scala] val typeIsExistentiallyBound = (tp: Type) => tp.typeSymbol.isExistentiallyBound
+ private[scala] val typeIsErroneous = (tp: Type) => tp.isErroneous
+ private[scala] val typeIsError = (tp: Type) => tp.isError
+ private[scala] val typeHasAnnotations = (tp: Type) => tp.annotations.nonEmpty
+ private[scala] val boundsContainType = (bounds: TypeBounds, tp: Type) => bounds containsType tp
+ private[scala] val typeListIsEmpty = (ts: List[Type]) => ts.isEmpty
+ private[scala] val typeIsSubTypeOfSerializable = (tp: Type) => tp <:< SerializableClass.tpe
+ private[scala] val typeIsNothing = (tp: Type) => tp.typeSymbolDirect eq NothingClass
+ private[scala] val typeIsAny = (tp: Type) => tp.typeSymbolDirect eq AnyClass
+ private[scala] val typeIsHigherKinded = (tp: Type) => tp.isHigherKinded
+
+ @tailrec private def typesContain(tps: List[Type], sym: Symbol): Boolean = tps match {
+ case tp :: rest => (tp contains sym) || typesContain(rest, sym)
+ case _ => false
+ }
+
+ @tailrec private def areTrivialTypes(tps: List[Type]): Boolean = tps match {
+ case tp :: rest => tp.isTrivial && areTrivialTypes(rest)
+ case _ => true
+ }
+
+// -------------- Classtags --------------------------------------------------------
+
+ implicit val AnnotatedTypeTag = ClassTag[AnnotatedType](classOf[AnnotatedType])
+ implicit val BoundedWildcardTypeTag = ClassTag[BoundedWildcardType](classOf[BoundedWildcardType])
+ implicit val ClassInfoTypeTag = ClassTag[ClassInfoType](classOf[ClassInfoType])
+ implicit val CompoundTypeTag = ClassTag[CompoundType](classOf[CompoundType])
+ implicit val ConstantTypeTag = ClassTag[ConstantType](classOf[ConstantType])
+ implicit val ExistentialTypeTag = ClassTag[ExistentialType](classOf[ExistentialType])
+ implicit val MethodTypeTag = ClassTag[MethodType](classOf[MethodType])
+ implicit val NullaryMethodTypeTag = ClassTag[NullaryMethodType](classOf[NullaryMethodType])
+ implicit val PolyTypeTag = ClassTag[PolyType](classOf[PolyType])
+ implicit val RefinedTypeTag = ClassTag[RefinedType](classOf[RefinedType])
+ implicit val SingletonTypeTag = ClassTag[SingletonType](classOf[SingletonType])
+ implicit val SingleTypeTag = ClassTag[SingleType](classOf[SingleType])
+ implicit val SuperTypeTag = ClassTag[SuperType](classOf[SuperType])
+ implicit val ThisTypeTag = ClassTag[ThisType](classOf[ThisType])
+ implicit val TypeBoundsTag = ClassTag[TypeBounds](classOf[TypeBounds])
+ implicit val TypeRefTag = ClassTag[TypeRef](classOf[TypeRef])
+ implicit val TypeTagg = ClassTag[Type](classOf[Type])
+
+// -------------- Statistics --------------------------------------------------------
+
+ Statistics.newView("#unique types") { if (uniques == null) 0 else uniques.size }
+
+}
+
+object TypesStats {
+ import BaseTypeSeqsStats._
+ val rawTypeCount = Statistics.newCounter ("#raw type creations")
+ val asSeenFromCount = Statistics.newCounter ("#asSeenFrom ops")
+ val subtypeCount = Statistics.newCounter ("#subtype ops")
+ val sametypeCount = Statistics.newCounter ("#sametype ops")
+ val lubCount = Statistics.newCounter ("#toplevel lubs/glbs")
+ val nestedLubCount = Statistics.newCounter ("#all lubs/glbs")
+ val findMemberCount = Statistics.newCounter ("#findMember ops")
+ val findMembersCount = Statistics.newCounter ("#findMembers ops")
+ val noMemberCount = Statistics.newSubCounter(" of which not found", findMemberCount)
+ val multMemberCount = Statistics.newSubCounter(" of which multiple overloaded", findMemberCount)
+ val typerNanos = Statistics.newTimer ("time spent typechecking", "typer")
+ val lubNanos = Statistics.newStackableTimer("time spent in lubs", typerNanos)
+ val subtypeNanos = Statistics.newStackableTimer("time spent in <:<", typerNanos)
+ val findMemberNanos = Statistics.newStackableTimer("time spent in findmember", typerNanos)
+ val findMembersNanos = Statistics.newStackableTimer("time spent in findmembers", typerNanos)
+ val asSeenFromNanos = Statistics.newStackableTimer("time spent in asSeenFrom", typerNanos)
+ val baseTypeSeqNanos = Statistics.newStackableTimer("time spent in baseTypeSeq", typerNanos)
+ val baseClassesNanos = Statistics.newStackableTimer("time spent in baseClasses", typerNanos)
+ val compoundBaseTypeSeqCount = Statistics.newSubCounter(" of which for compound types", baseTypeSeqCount)
+ val typerefBaseTypeSeqCount = Statistics.newSubCounter(" of which for typerefs", baseTypeSeqCount)
+ val singletonBaseTypeSeqCount = Statistics.newSubCounter(" of which for singletons", baseTypeSeqCount)
+ val typeOpsStack = Statistics.newTimerStack()
+
+ /** Commented out, because right now this does not inline, so creates a closure which will distort statistics
+ @inline final def timedTypeOp[T](c: Statistics.StackableTimer)(op: => T): T = {
+ val start = Statistics.pushTimer(typeOpsStack, c)
+ try op
+ finally
+ }
+ */
+}
diff --git a/src/reflect/scala/reflect/internal/annotations/compileTimeOnly.scala b/src/reflect/scala/reflect/internal/annotations/compileTimeOnly.scala
new file mode 100644
index 0000000..058ff61
--- /dev/null
+++ b/src/reflect/scala/reflect/internal/annotations/compileTimeOnly.scala
@@ -0,0 +1,31 @@
+package scala.reflect
+package internal
+package annotations
+
+import scala.annotation.meta._
+
+/**
+ * An annotation that designates a member should not be referred to after
+ * type checking (which includes macro expansion); it must only be used in
+ * the arguments of some other macro that will eliminate it from the AST.
+ *
+ * Later on, this annotation should be removed and implemented with domain-specific macros.
+ * If a certain method `inner` mustn't be called outside the context of a given macro `outer`,
+ * then it should itself be declared as a macro.
+ *
+ * Approach #1. Expansion of `inner` checks whether its enclosures contain `outer` and
+ * report an error if `outer` is not detected. In principle, we could use this approach right now,
+ * but currently enclosures are broken, because contexts aren't exactly famous for keeping precise
+ * track of the stack of the trees being typechecked.
+ *
+ * Approach #2. Default implementation of `inner` is just an invocation of `c.abort`.
+ * `outer` is an untyped macro, which expands into a block, which contains a redefinition of `inner`
+ * and a call to itself. The redefined `inner` could either be a stub like `Expr.splice` or carry out
+ * domain-specific logic.
+ *
+ * @param message the error message to print during compilation if a reference remains
+ * after type checking
+ * @since 2.10.1
+ */
+ at getter @setter @beanGetter @beanSetter
+final class compileTimeOnly(message: String) extends scala.annotation.StaticAnnotation
diff --git a/src/reflect/scala/reflect/internal/annotations/uncheckedBounds.scala b/src/reflect/scala/reflect/internal/annotations/uncheckedBounds.scala
new file mode 100644
index 0000000..a44bb54
--- /dev/null
+++ b/src/reflect/scala/reflect/internal/annotations/uncheckedBounds.scala
@@ -0,0 +1,13 @@
+package scala.reflect
+package internal
+package annotations
+
+/**
+ * An annotation that designates the annotated type should not be checked for violations of
+ * type parameter bounds in the `refchecks` phase of the compiler. This can be used by synthesized
+ * code the uses an inferred type of an expression as the type of an artifict val/def (for example,
+ * a temporary value introduced by an ANF transform). See [[https://issues.scala-lang.org/browse/SI-7694]].
+ *
+ * @since 2.10.3
+ */
+final class uncheckedBounds extends scala.annotation.StaticAnnotation
diff --git a/src/reflect/scala/reflect/internal/pickling/ByteCodecs.scala b/src/reflect/scala/reflect/internal/pickling/ByteCodecs.scala
new file mode 100644
index 0000000..367a3b8
--- /dev/null
+++ b/src/reflect/scala/reflect/internal/pickling/ByteCodecs.scala
@@ -0,0 +1,221 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2007-2013, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+package scala.reflect.internal.pickling
+
+object ByteCodecs {
+
+ def avoidZero(src: Array[Byte]): Array[Byte] = {
+ var i = 0
+ val srclen = src.length
+ var count = 0
+ while (i < srclen) {
+ if (src(i) == 0x7f) count += 1
+ i += 1
+ }
+ val dst = new Array[Byte](srclen + count)
+ i = 0
+ var j = 0
+ while (i < srclen) {
+ val in = src(i)
+ if (in == 0x7f) {
+ dst(j) = (0xc0).toByte
+ dst(j + 1) = (0x80).toByte
+ j += 2
+ } else {
+ dst(j) = (in + 1).toByte
+ j += 1
+ }
+ i += 1
+ }
+ dst
+ }
+
+ def regenerateZero(src: Array[Byte]): Int = {
+ var i = 0
+ val srclen = src.length
+ var j = 0
+ while (i < srclen) {
+ val in: Int = src(i) & 0xff
+ if (in == 0xc0 && (src(i + 1) & 0xff) == 0x80) {
+ src(j) = 0x7f
+ i += 2
+ } else if (in == 0) {
+ src(j) = 0x7f
+ i += 1
+ } else {
+ src(j) = (in - 1).toByte
+ i += 1
+ }
+ j += 1
+ }
+ j
+ }
+
+ def encode8to7(src: Array[Byte]): Array[Byte] = {
+ val srclen = src.length
+ val dstlen = (srclen * 8 + 6) / 7
+ val dst = new Array[Byte](dstlen)
+ var i = 0
+ var j = 0
+ while (i + 6 < srclen) {
+ var in: Int = src(i) & 0xff
+ dst(j) = (in & 0x7f).toByte
+ var out: Int = in >>> 7
+ in = src(i + 1) & 0xff
+ dst(j + 1) = (out | (in << 1) & 0x7f).toByte
+ out = in >>> 6
+ in = src(i + 2) & 0xff
+ dst(j + 2) = (out | (in << 2) & 0x7f).toByte
+ out = in >>> 5
+ in = src(i + 3) & 0xff
+ dst(j + 3) = (out | (in << 3) & 0x7f).toByte
+ out = in >>> 4
+ in = src(i + 4) & 0xff
+ dst(j + 4) = (out | (in << 4) & 0x7f).toByte
+ out = in >>> 3
+ in = src(i + 5) & 0xff
+ dst(j + 5) = (out | (in << 5) & 0x7f).toByte
+ out = in >>> 2
+ in = src(i + 6) & 0xff
+ dst(j + 6) = (out | (in << 6) & 0x7f).toByte
+ out = in >>> 1
+ dst(j + 7) = out.toByte
+ i += 7
+ j += 8
+ }
+ if (i < srclen) {
+ var in: Int = src(i) & 0xff
+ dst(j) = (in & 0x7f).toByte; j += 1
+ var out: Int = in >>> 7
+ if (i + 1 < srclen) {
+ in = src(i + 1) & 0xff
+ dst(j) = (out | (in << 1) & 0x7f).toByte; j += 1
+ out = in >>> 6
+ if (i + 2 < srclen) {
+ in = src(i + 2) & 0xff
+ dst(j) = (out | (in << 2) & 0x7f).toByte; j += 1
+ out = in >>> 5
+ if (i + 3 < srclen) {
+ in = src(i + 3) & 0xff
+ dst(j) = (out | (in << 3) & 0x7f).toByte; j += 1
+ out = in >>> 4
+ if (i + 4 < srclen) {
+ in = src(i + 4) & 0xff
+ dst(j) = (out | (in << 4) & 0x7f).toByte; j += 1
+ out = in >>> 3
+ if (i + 5 < srclen) {
+ in = src(i + 5) & 0xff
+ dst(j) = (out | (in << 5) & 0x7f).toByte; j += 1
+ out = in >>> 2
+ }
+ }
+ }
+ }
+ }
+ if (j < dstlen) dst(j) = out.toByte
+ }
+ dst
+ }
+
+ def decode7to8(src: Array[Byte], srclen: Int): Int = {
+ var i = 0
+ var j = 0
+ val dstlen = (srclen * 7 + 7) / 8
+ while (i + 7 < srclen) {
+ var out: Int = src(i)
+ var in: Byte = src(i + 1)
+ src(j) = (out | (in & 0x01) << 7).toByte
+ out = in >>> 1
+ in = src(i + 2)
+ src(j + 1) = (out | (in & 0x03) << 6).toByte
+ out = in >>> 2
+ in = src(i + 3)
+ src(j + 2) = (out | (in & 0x07) << 5).toByte
+ out = in >>> 3
+ in = src(i + 4)
+ src(j + 3) = (out | (in & 0x0f) << 4).toByte
+ out = in >>> 4
+ in = src(i + 5)
+ src(j + 4) = (out | (in & 0x1f) << 3).toByte
+ out = in >>> 5
+ in = src(i + 6)
+ src(j + 5) = (out | (in & 0x3f) << 2).toByte
+ out = in >>> 6
+ in = src(i + 7)
+ src(j + 6) = (out | in << 1).toByte
+ i += 8
+ j += 7
+ }
+ if (i < srclen) {
+ var out: Int = src(i)
+ if (i + 1 < srclen) {
+ var in: Byte = src(i + 1)
+ src(j) = (out | (in & 0x01) << 7).toByte; j += 1
+ out = in >>> 1
+ if (i + 2 < srclen) {
+ in = src(i + 2)
+ src(j) = (out | (in & 0x03) << 6).toByte; j += 1
+ out = in >>> 2
+ if (i + 3 < srclen) {
+ in = src(i + 3)
+ src(j) = (out | (in & 0x07) << 5).toByte; j += 1
+ out = in >>> 3
+ if (i + 4 < srclen) {
+ in = src(i + 4)
+ src(j) = (out | (in & 0x0f) << 4).toByte; j += 1
+ out = in >>> 4
+ if (i + 5 < srclen) {
+ in = src(i + 5)
+ src(j) = (out | (in & 0x1f) << 3).toByte; j += 1
+ out = in >>> 5
+ if (i + 6 < srclen) {
+ in = src(i + 6)
+ src(j) = (out | (in & 0x3f) << 2).toByte; j += 1
+ out = in >>> 6
+ }
+ }
+ }
+ }
+ }
+ }
+ if (j < dstlen) src(j) = out.toByte
+ }
+ dstlen
+ }
+
+ def encode(xs: Array[Byte]): Array[Byte] = avoidZero(encode8to7(xs))
+
+ /**
+ * Destructively decodes array xs and returns the length of the decoded array.
+ *
+ * Sometimes returns (length+1) of the decoded array. Example:
+ *
+ * scala> val enc = reflect.generic.ByteCodecs.encode(Array(1,2,3))
+ * enc: Array[Byte] = Array(2, 5, 13, 1)
+ *
+ * scala> reflect.generic.ByteCodecs.decode(enc)
+ * res43: Int = 4
+ *
+ * scala> enc
+ * res44: Array[Byte] = Array(1, 2, 3, 0)
+ *
+ * However, this does not always happen.
+ */
+ def decode(xs: Array[Byte]): Int = {
+ val len = regenerateZero(xs)
+ decode7to8(xs, len)
+ }
+}
+
+
+
+
+
+
+
+
diff --git a/src/reflect/scala/reflect/internal/pickling/PickleBuffer.scala b/src/reflect/scala/reflect/internal/pickling/PickleBuffer.scala
new file mode 100644
index 0000000..6170fcb
--- /dev/null
+++ b/src/reflect/scala/reflect/internal/pickling/PickleBuffer.scala
@@ -0,0 +1,188 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2013 LAMP/EPFL
+ * @author Martin Odersky
+ */
+
+package scala.reflect
+package internal
+package pickling
+
+/** Variable length byte arrays, with methods for basic pickling and unpickling.
+ *
+ * @param data The initial buffer
+ * @param from The first index where defined data are found
+ * @param to The first index where new data can be written
+ */
+class PickleBuffer(data: Array[Byte], from: Int, to: Int) {
+
+ var bytes = data
+ var readIndex = from
+ var writeIndex = to
+
+ /** Double bytes array */
+ private def dble() {
+ val bytes1 = new Array[Byte](bytes.length * 2)
+ Array.copy(bytes, 0, bytes1, 0, writeIndex)
+ bytes = bytes1
+ }
+
+ def ensureCapacity(capacity: Int) =
+ while (bytes.length < writeIndex + capacity) dble()
+
+ // -- Basic output routines --------------------------------------------
+
+ /** Write a byte of data */
+ def writeByte(b: Int) {
+ if (writeIndex == bytes.length) dble()
+ bytes(writeIndex) = b.toByte
+ writeIndex += 1
+ }
+
+ /** Write a natural number in big endian format, base 128.
+ * All but the last digits have bit 0x80 set.
+ */
+ def writeNat(x: Int) =
+ writeLongNat(x.toLong & 0x00000000FFFFFFFFL)
+
+ /**
+ * Like writeNat, but for longs. This is not the same as
+ * writeLong, which writes in base 256. Note that the
+ * binary representation of LongNat is identical to Nat
+ * if the long value is in the range Int.MIN_VALUE to
+ * Int.MAX_VALUE.
+ */
+ def writeLongNat(x: Long) {
+ def writeNatPrefix(x: Long) {
+ val y = x >>> 7
+ if (y != 0L) writeNatPrefix(y)
+ writeByte(((x & 0x7f) | 0x80).toInt)
+ }
+ val y = x >>> 7
+ if (y != 0L) writeNatPrefix(y)
+ writeByte((x & 0x7f).toInt)
+ }
+
+ /** Write a natural number <code>x</code> at position <code>pos</code>.
+ * If number is more than one byte, shift rest of array to make space.
+ *
+ * @param pos ...
+ * @param x ...
+ */
+ def patchNat(pos: Int, x: Int) {
+ def patchNatPrefix(x: Int) {
+ writeByte(0)
+ Array.copy(bytes, pos, bytes, pos+1, writeIndex - (pos+1))
+ bytes(pos) = ((x & 0x7f) | 0x80).toByte
+ val y = x >>> 7
+ if (y != 0) patchNatPrefix(y)
+ }
+ bytes(pos) = (x & 0x7f).toByte
+ val y = x >>> 7
+ if (y != 0) patchNatPrefix(y)
+ }
+
+ /** Write a long number <code>x</code> in signed big endian format, base 256.
+ *
+ * @param x The long number to be written.
+ */
+ def writeLong(x: Long) {
+ val y = x >> 8
+ val z = x & 0xff
+ if (-y != (z >> 7)) writeLong(y)
+ writeByte(z.toInt)
+ }
+
+ // -- Basic input routines --------------------------------------------
+
+ /** Peek at the current byte without moving the read index */
+ def peekByte(): Int = bytes(readIndex)
+
+ /** Read a byte */
+ def readByte(): Int = {
+ val x = bytes(readIndex); readIndex += 1; x
+ }
+
+ /** Read a natural number in big endian format, base 128.
+ * All but the last digits have bit 0x80 set.*/
+ def readNat(): Int = readLongNat().toInt
+
+ def readLongNat(): Long = {
+ var b = 0L
+ var x = 0L
+ do {
+ b = readByte()
+ x = (x << 7) + (b & 0x7f)
+ } while ((b & 0x80) != 0L);
+ x
+ }
+
+ /** Read a long number in signed big endian format, base 256. */
+ def readLong(len: Int): Long = {
+ var x = 0L
+ var i = 0
+ while (i < len) {
+ x = (x << 8) + (readByte() & 0xff)
+ i += 1
+ }
+ val leading = 64 - (len << 3)
+ x << leading >> leading
+ }
+
+ /** Returns the buffer as a sequence of (Int, Array[Byte]) representing
+ * (tag, data) of the individual entries. Saves and restores buffer state.
+ */
+
+ def toIndexedSeq: IndexedSeq[(Int, Array[Byte])] = {
+ val saved = readIndex
+ readIndex = 0
+ readNat() ; readNat() // discarding version
+ val result = new Array[(Int, Array[Byte])](readNat())
+
+ result.indices foreach { index =>
+ val tag = readNat()
+ val len = readNat()
+ val bytes = data.slice(readIndex, len + readIndex)
+ readIndex += len
+
+ result(index) = tag -> bytes
+ }
+
+ readIndex = saved
+ result.toIndexedSeq
+ }
+
+ /** Perform operation <code>op</code> until the condition
+ * <code>readIndex == end</code> is satisfied.
+ * Concatenate results into a list.
+ *
+ * @param end ...
+ * @param op ...
+ * @return ...
+ */
+ def until[T](end: Int, op: () => T): List[T] =
+ if (readIndex == end) List() else op() :: until(end, op);
+
+ /** Perform operation <code>op</code> the number of
+ * times specified. Concatenate the results into a list.
+ */
+ def times[T](n: Int, op: ()=>T): List[T] =
+ if (n == 0) List() else op() :: times(n-1, op)
+
+ /** Pickle = majorVersion_Nat minorVersion_Nat nbEntries_Nat {Entry}
+ * Entry = type_Nat length_Nat [actual entries]
+ *
+ * Assumes that the ..Version_Nat are already consumed.
+ *
+ * @return an array mapping entry numbers to locations in
+ * the byte array where the entries start.
+ */
+ def createIndex: Array[Int] = {
+ val index = new Array[Int](readNat()) // nbEntries_Nat
+ for (i <- 0 until index.length) {
+ index(i) = readIndex
+ readByte() // skip type_Nat
+ readIndex = readNat() + readIndex // read length_Nat, jump to next entry
+ }
+ index
+ }
+}
diff --git a/src/reflect/scala/reflect/internal/pickling/PickleFormat.scala b/src/reflect/scala/reflect/internal/pickling/PickleFormat.scala
new file mode 100644
index 0000000..16747af
--- /dev/null
+++ b/src/reflect/scala/reflect/internal/pickling/PickleFormat.scala
@@ -0,0 +1,225 @@
+package scala.reflect
+package internal
+package pickling
+
+/** This object provides constants for pickling attributes.
+ *
+ * If you extend the format, be sure to increase the
+ * version minor number.
+ *
+ * @author Martin Odersky
+ * @version 1.0
+ */
+object PickleFormat {
+
+/***************************************************
+ * Symbol table attribute format:
+ * Symtab = nentries_Nat {Entry}
+ * Entry = 1 TERMNAME len_Nat NameInfo
+ * | 2 TYPENAME len_Nat NameInfo
+ * | 3 NONEsym len_Nat
+ * | 4 TYPEsym len_Nat SymbolInfo
+ * | 5 ALIASsym len_Nat SymbolInfo
+ * | 6 CLASSsym len_Nat SymbolInfo [thistype_Ref]
+ * | 7 MODULEsym len_Nat SymbolInfo
+ * | 8 VALsym len_Nat [defaultGetter_Ref /* no longer needed*/] SymbolInfo [alias_Ref]
+ * | 9 EXTref len_Nat name_Ref [owner_Ref]
+ * | 10 EXTMODCLASSref len_Nat name_Ref [owner_Ref]
+ * | 11 NOtpe len_Nat
+ * | 12 NOPREFIXtpe len_Nat
+ * | 13 THIStpe len_Nat sym_Ref
+ * | 14 SINGLEtpe len_Nat type_Ref sym_Ref
+ * | 15 CONSTANTtpe len_Nat constant_Ref
+ * | 16 TYPEREFtpe len_Nat type_Ref sym_Ref {targ_Ref}
+ * | 17 TYPEBOUNDStpe len_Nat tpe_Ref tpe_Ref
+ * | 18 REFINEDtpe len_Nat classsym_Ref {tpe_Ref}
+ * | 19 CLASSINFOtpe len_Nat classsym_Ref {tpe_Ref}
+ * | 20 METHODtpe len_Nat tpe_Ref {sym_Ref}
+ * | 21 POLYTtpe len_Nat tpe_Ref {sym_Ref}
+ * | 22 IMPLICITMETHODtpe len_Nat tpe_Ref {sym_Ref} /* no longer needed */
+ * | 52 SUPERtpe len_Nat tpe_Ref tpe_Ref
+ * | 24 LITERALunit len_Nat
+ * | 25 LITERALboolean len_Nat value_Long
+ * | 26 LITERALbyte len_Nat value_Long
+ * | 27 LITERALshort len_Nat value_Long
+ * | 28 LITERALchar len_Nat value_Long
+ * | 29 LITERALint len_Nat value_Long
+ * | 30 LITERALlong len_Nat value_Long
+ * | 31 LITERALfloat len_Nat value_Long
+ * | 32 LITERALdouble len_Nat value_Long
+ * | 33 LITERALstring len_Nat name_Ref
+ * | 34 LITERALnull len_Nat
+ * | 35 LITERALclass len_Nat tpe_Ref
+ * | 36 LITERALenum len_Nat sym_Ref
+ * | 40 SYMANNOT len_Nat sym_Ref AnnotInfoBody
+ * | 41 CHILDREN len_Nat sym_Ref {sym_Ref}
+ * | 42 ANNOTATEDtpe len_Nat [sym_Ref /* no longer needed */] tpe_Ref {annotinfo_Ref}
+ * | 43 ANNOTINFO len_Nat AnnotInfoBody
+ * | 44 ANNOTARGARRAY len_Nat {constAnnotArg_Ref}
+ * | 47 DEBRUIJNINDEXtpe len_Nat level_Nat index_Nat
+ * | 48 EXISTENTIALtpe len_Nat type_Ref {symbol_Ref}
+ * | 49 TREE len_Nat 1 EMPTYtree
+ * | 49 TREE len_Nat 2 PACKAGEtree type_Ref sym_Ref mods_Ref name_Ref {tree_Ref}
+ * | 49 TREE len_Nat 3 CLASStree type_Ref sym_Ref mods_Ref name_Ref tree_Ref {tree_Ref}
+ * | 49 TREE len_Nat 4 MODULEtree type_Ref sym_Ref mods_Ref name_Ref tree_Ref
+ * | 49 TREE len_Nat 5 VALDEFtree type_Ref sym_Ref mods_Ref name_Ref tree_Ref tree_Ref
+ * | 49 TREE len_Nat 6 DEFDEFtree type_Ref sym_Ref mods_Ref name_Ref numtparams_Nat {tree_Ref} numparamss_Nat {numparams_Nat {tree_Ref}} tree_Ref tree_Ref
+ * | 49 TREE len_Nat 7 TYPEDEFtree type_Ref sym_Ref mods_Ref name_Ref tree_Ref {tree_Ref}
+ * | 49 TREE len_Nat 8 LABELtree type_Ref sym_Ref tree_Ref {tree_Ref}
+ * | 49 TREE len_Nat 9 IMPORTtree type_Ref sym_Ref tree_Ref {name_Ref name_Ref}
+ * | 49 TREE len_Nat 11 DOCDEFtree type_Ref sym_Ref string_Ref tree_Ref
+ * | 49 TREE len_Nat 12 TEMPLATEtree type_Ref sym_Ref numparents_Nat {tree_Ref} tree_Ref {tree_Ref}
+ * | 49 TREE len_Nat 13 BLOCKtree type_Ref tree_Ref {tree_Ref}
+ * | 49 TREE len_Nat 14 CASEtree type_Ref tree_Ref tree_Ref tree_Ref
+ * | 49 TREE len_Nat 15 SEQUENCEtree type_Ref {tree_Ref}
+ * | 49 TREE len_Nat 16 ALTERNATIVEtree type_Ref {tree_Ref}
+ * | 49 TREE len_Nat 17 STARtree type_Ref {tree_Ref}
+ * | 49 TREE len_Nat 18 BINDtree type_Ref sym_Ref name_Ref tree_Ref
+ * | 49 TREE len_Nat 19 UNAPPLYtree type_Ref tree_Ref {tree_Ref}
+ * | 49 TREE len_Nat 20 ARRAYVALUEtree type_Ref tree_Ref {tree_Ref}
+ * | 49 TREE len_Nat 21 FUNCTIONtree type_Ref sym_Ref tree_Ref {tree_Ref}
+ * | 49 TREE len_Nat 22 ASSIGNtree type_Ref tree_Ref tree_Ref
+ * | 49 TREE len_Nat 23 IFtree type_Ref tree_Ref tree_Ref tree_Ref
+ * | 49 TREE len_Nat 24 MATCHtree type_Ref tree_Ref {tree_Ref}
+ * | 49 TREE len_Nat 25 RETURNtree type_Ref sym_Ref tree_Ref
+ * | 49 TREE len_Nat 26 TREtree type_Ref tree_Ref tree_Ref {tree_Ref}
+ * | 49 TREE len_Nat 27 THROWtree type_Ref tree_Ref
+ * | 49 TREE len_Nat 28 NEWtree type_Ref tree_Ref
+ * | 49 TREE len_Nat 29 TYPEDtree type_Ref tree_Ref tree_Ref
+ * | 49 TREE len_Nat 30 TYPEAPPLYtree type_Ref tree_Ref {tree_Ref}
+ * | 49 TREE len_Nat 31 APPLYtree type_Ref tree_Ref {tree_Ref}
+ * | 49 TREE len_Nat 32 APPLYDYNAMICtree type_Ref sym_Ref tree_Ref {tree_Ref}
+ * | 49 TREE len_Nat 33 SUPERtree type_Ref sym_Ref tree_Ref name_Ref
+ * | 49 TREE len_Nat 34 THIStree type_Ref sym_Ref name_Ref
+ * | 49 TREE len_Nat 35 SELECTtree type_Ref sym_Ref tree_Ref name_Ref
+ * | 49 TREE len_Nat 36 IDENTtree type_Ref sym_Ref name_Ref
+ * | 49 TREE len_Nat 37 LITERALtree type_Ref constant_Ref
+ * | 49 TREE len_Nat 38 TYPEtree type_Ref
+ * | 49 TREE len_Nat 39 ANNOTATEDtree type_Ref tree_Ref tree_Ref
+ * | 49 TREE len_Nat 40 SINGLETONTYPEtree type_Ref tree_Ref
+ * | 49 TREE len_Nat 41 SELECTFROMTYPEtree type_Ref tree_Ref name_Ref
+ * | 49 TREE len_Nat 42 COMPOUNDTYPEtree type_Ref tree_Ref
+ * | 49 TREE len_Nat 43 APPLIEDTYPEtree type_Ref tree_Ref {tree_Ref}
+ * | 49 TREE len_Nat 44 TYPEBOUNDStree type_Ref tree_Ref tree_Ref
+ * | 49 TREE len_Nat 45 EXISTENTIALTYPEtree type_Ref tree_Ref {tree_Ref}
+ * | 50 MODIFIERS len_Nat flags_Long privateWithin_Ref
+ * SymbolInfo = name_Ref owner_Ref flags_LongNat [privateWithin_Ref] info_Ref
+ * NameInfo = <character sequence of length len_Nat in Utf8 format>
+ * NumInfo = <len_Nat-byte signed number in big endian format>
+ * Ref = Nat
+ * AnnotInfoBody = info_Ref {annotArg_Ref} {name_Ref constAnnotArg_Ref}
+ * AnnotArg = Tree | Constant
+ * ConstAnnotArg = Constant | AnnotInfo | AnnotArgArray
+ *
+ * len is remaining length after `len`.
+ */
+ val MajorVersion = 5
+ val MinorVersion = 0
+ def VersionString = "V" + MajorVersion + "." + MinorVersion
+
+ final val TERMname = 1
+ final val TYPEname = 2
+ final val NONEsym = 3
+ final val TYPEsym = 4
+ final val ALIASsym = 5
+ final val CLASSsym = 6
+ final val MODULEsym = 7
+ final val VALsym = 8
+ final val EXTref = 9
+ final val EXTMODCLASSref = 10
+ final val NOtpe = 11
+ final val NOPREFIXtpe = 12
+ final val THIStpe = 13
+ final val SINGLEtpe = 14
+ final val CONSTANTtpe = 15
+ final val TYPEREFtpe = 16
+ final val TYPEBOUNDStpe = 17
+ final val REFINEDtpe = 18
+ final val CLASSINFOtpe = 19
+ final val METHODtpe = 20
+ final val POLYtpe = 21
+ final val IMPLICITMETHODtpe = 22 // no longer generated
+
+ final val LITERAL = 23 // base line for literals
+ final val LITERALunit = 24
+ final val LITERALboolean = 25
+ final val LITERALbyte = 26
+ final val LITERALshort = 27
+ final val LITERALchar = 28
+ final val LITERALint = 29
+ final val LITERALlong = 30
+ final val LITERALfloat = 31
+ final val LITERALdouble = 32
+ final val LITERALstring = 33
+ final val LITERALnull = 34
+ final val LITERALclass = 35
+ final val LITERALenum = 36
+ final val SYMANNOT = 40
+ final val CHILDREN = 41
+ final val ANNOTATEDtpe = 42
+ final val ANNOTINFO = 43
+ final val ANNOTARGARRAY = 44
+
+ final val SUPERtpe = 46
+ final val DEBRUIJNINDEXtpe = 47
+ final val EXISTENTIALtpe = 48
+
+ final val TREE = 49 // prefix code that means a tree is coming
+ final val EMPTYtree = 1
+ final val PACKAGEtree = 2
+ final val CLASStree = 3
+ final val MODULEtree = 4
+ final val VALDEFtree = 5
+ final val DEFDEFtree = 6
+ final val TYPEDEFtree = 7
+ final val LABELtree = 8
+ final val IMPORTtree = 9
+ final val DOCDEFtree = 11
+ final val TEMPLATEtree = 12
+ final val BLOCKtree = 13
+ final val CASEtree = 14
+ // This node type has been removed.
+ // final val SEQUENCEtree = 15
+ final val ALTERNATIVEtree = 16
+ final val STARtree = 17
+ final val BINDtree = 18
+ final val UNAPPLYtree = 19
+ final val ARRAYVALUEtree = 20
+ final val FUNCTIONtree = 21
+ final val ASSIGNtree = 22
+ final val IFtree = 23
+ final val MATCHtree = 24
+ final val RETURNtree = 25
+ final val TREtree = 26
+ final val THROWtree = 27
+ final val NEWtree = 28
+ final val TYPEDtree = 29
+ final val TYPEAPPLYtree = 30
+ final val APPLYtree = 31
+ final val APPLYDYNAMICtree = 32
+ final val SUPERtree = 33
+ final val THIStree = 34
+ final val SELECTtree = 35
+ final val IDENTtree = 36
+ final val LITERALtree = 37
+ final val TYPEtree = 38
+ final val ANNOTATEDtree = 39
+ final val SINGLETONTYPEtree = 40
+ final val SELECTFROMTYPEtree = 41
+ final val COMPOUNDTYPEtree = 42
+ final val APPLIEDTYPEtree = 43
+ final val TYPEBOUNDStree = 44
+ final val EXISTENTIALTYPEtree = 45
+
+ final val MODIFIERS = 50
+
+ final val firstSymTag = NONEsym
+ final val lastSymTag = VALsym
+ final val lastExtSymTag = EXTMODCLASSref
+
+
+ //The following two are no longer accurate, because ANNOTATEDtpe,
+ //SUPERtpe, ... are not in the same range as the other types
+ //final val firstTypeTag = NOtpe
+ //final val lastTypeTag = POLYtpe
+}
diff --git a/src/reflect/scala/reflect/internal/pickling/UnPickler.scala b/src/reflect/scala/reflect/internal/pickling/UnPickler.scala
new file mode 100644
index 0000000..603fff4
--- /dev/null
+++ b/src/reflect/scala/reflect/internal/pickling/UnPickler.scala
@@ -0,0 +1,883 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2013 LAMP/EPFL
+ * @author Martin Odersky
+ */
+
+package scala.reflect
+package internal
+package pickling
+
+import java.io.IOException
+import java.lang.Float.intBitsToFloat
+import java.lang.Double.longBitsToDouble
+
+import Flags._
+import PickleFormat._
+import scala.collection.{ mutable, immutable }
+import scala.collection.mutable.ListBuffer
+import scala.annotation.switch
+
+/** @author Martin Odersky
+ * @version 1.0
+ */
+abstract class UnPickler {
+ val global: SymbolTable
+ import global._
+
+ /** Unpickle symbol table information descending from a class and/or module root
+ * from an array of bytes.
+ * @param bytes bytearray from which we unpickle
+ * @param offset offset from which unpickling starts
+ * @param classroot the top-level class which is unpickled, or NoSymbol if inapplicable
+ * @param moduleroot the top-level module which is unpickled, or NoSymbol if inapplicable
+ * @param filename filename associated with bytearray, only used for error messages
+ */
+ def unpickle(bytes: Array[Byte], offset: Int, classRoot: Symbol, moduleRoot: Symbol, filename: String) {
+ try {
+ new Scan(bytes, offset, classRoot, moduleRoot, filename).run()
+ } catch {
+ case ex: IOException =>
+ throw ex
+ case ex: MissingRequirementError =>
+ throw ex
+ case ex: Throwable =>
+ /*if (settings.debug.value)*/ ex.printStackTrace()
+ throw new RuntimeException("error reading Scala signature of "+filename+": "+ex.getMessage())
+ }
+ }
+
+ class Scan(_bytes: Array[Byte], offset: Int, classRoot: Symbol, moduleRoot: Symbol, filename: String) extends PickleBuffer(_bytes, offset, -1) {
+ //println("unpickle " + classRoot + " and " + moduleRoot)//debug
+
+ protected def debug = settings.debug.value
+
+ checkVersion()
+
+ private val loadingMirror = mirrorThatLoaded(classRoot)
+
+ /** A map from entry numbers to array offsets */
+ private val index = createIndex
+
+ /** A map from entry numbers to symbols, types, or annotations */
+ private val entries = new Array[AnyRef](index.length)
+
+ /** A map from symbols to their associated `decls` scopes */
+ private val symScopes = mutable.HashMap[Symbol, Scope]()
+
+ //println("unpickled " + classRoot + ":" + classRoot.rawInfo + ", " + moduleRoot + ":" + moduleRoot.rawInfo);//debug
+
+ // Laboriously unrolled for performance.
+ def run() {
+ var i = 0
+ while (i < index.length) {
+ if (entries(i) == null && isSymbolEntry(i)) {
+ val savedIndex = readIndex
+ readIndex = index(i)
+ entries(i) = readSymbol()
+ readIndex = savedIndex
+ }
+ i += 1
+ }
+ // read children last, fix for #3951
+ i = 0
+ while (i < index.length) {
+ if (entries(i) == null) {
+ if (isSymbolAnnotationEntry(i)) {
+ val savedIndex = readIndex
+ readIndex = index(i)
+ readSymbolAnnotation()
+ readIndex = savedIndex
+ }
+ else if (isChildrenEntry(i)) {
+ val savedIndex = readIndex
+ readIndex = index(i)
+ readChildren()
+ readIndex = savedIndex
+ }
+ }
+ i += 1
+ }
+ }
+
+ private def checkVersion() {
+ val major = readNat()
+ val minor = readNat()
+ if (major != MajorVersion || minor > MinorVersion)
+ throw new IOException("Scala signature " + classRoot.decodedName +
+ " has wrong version\n expected: " +
+ MajorVersion + "." + MinorVersion +
+ "\n found: " + major + "." + minor +
+ " in "+filename)
+ }
+
+ /** The `decls` scope associated with given symbol */
+ protected def symScope(sym: Symbol) = symScopes.getOrElseUpdate(sym, newScope)
+
+ /** Does entry represent an (internal) symbol */
+ protected def isSymbolEntry(i: Int): Boolean = {
+ val tag = bytes(index(i)).toInt
+ (firstSymTag <= tag && tag <= lastSymTag &&
+ (tag != CLASSsym || !isRefinementSymbolEntry(i)))
+ }
+
+ /** Does entry represent an (internal or external) symbol */
+ protected def isSymbolRef(i: Int): Boolean = {
+ val tag = bytes(index(i))
+ (firstSymTag <= tag && tag <= lastExtSymTag)
+ }
+
+ /** Does entry represent a name? */
+ protected def isNameEntry(i: Int): Boolean = {
+ val tag = bytes(index(i)).toInt
+ tag == TERMname || tag == TYPEname
+ }
+
+ /** Does entry represent a symbol annotation? */
+ protected def isSymbolAnnotationEntry(i: Int): Boolean = {
+ val tag = bytes(index(i)).toInt
+ tag == SYMANNOT
+ }
+
+ /** Does the entry represent children of a symbol? */
+ protected def isChildrenEntry(i: Int): Boolean = {
+ val tag = bytes(index(i)).toInt
+ tag == CHILDREN
+ }
+
+ /** Does entry represent a refinement symbol?
+ * pre: Entry is a class symbol
+ */
+ protected def isRefinementSymbolEntry(i: Int): Boolean = {
+ val savedIndex = readIndex
+ readIndex = index(i)
+ val tag = readByte().toInt
+ assert(tag == CLASSsym)
+
+ readNat(); // read length
+ val result = readNameRef() == tpnme.REFINE_CLASS_NAME
+ readIndex = savedIndex
+ result
+ }
+
+ /** If entry at <code>i</code> is undefined, define it by performing
+ * operation <code>op</code> with <code>readIndex at start of i'th
+ * entry. Restore <code>readIndex</code> afterwards.
+ */
+ protected def at[T <: AnyRef](i: Int, op: () => T): T = {
+ var r = entries(i)
+ if (r eq null) {
+ val savedIndex = readIndex
+ readIndex = index(i)
+ r = op()
+ assert(entries(i) eq null, entries(i))
+ entries(i) = r
+ readIndex = savedIndex
+ }
+ r.asInstanceOf[T]
+ }
+
+ /** Read a name */
+ protected def readName(): Name = {
+ val tag = readByte()
+ val len = readNat()
+ tag match {
+ case TERMname => newTermName(bytes, readIndex, len)
+ case TYPEname => newTypeName(bytes, readIndex, len)
+ case _ => errorBadSignature("bad name tag: " + tag)
+ }
+ }
+ protected def readTermName(): TermName = readName().toTermName
+ protected def readTypeName(): TypeName = readName().toTypeName
+
+ /** Read a symbol */
+ protected def readSymbol(): Symbol = {
+ val tag = readByte()
+ val end = readNat() + readIndex
+ def atEnd = readIndex == end
+
+ def readExtSymbol(): Symbol = {
+ val name = readNameRef()
+ val owner = if (atEnd) loadingMirror.RootClass else readSymbolRef()
+
+ def adjust(sym: Symbol) = if (tag == EXTref) sym else sym.moduleClass
+
+ def fromName(name: Name) = name.toTermName match {
+ case nme.ROOT => loadingMirror.RootClass
+ case nme.ROOTPKG => loadingMirror.RootPackage
+ case _ => adjust(owner.info.decl(name))
+ }
+ def nestedObjectSymbol: Symbol = {
+ // If the owner is overloaded (i.e. a method), it's not possible to select the
+ // right member, so return NoSymbol. This can only happen when unpickling a tree.
+ // the "case Apply" in readTree() takes care of selecting the correct alternative
+ // after parsing the arguments.
+ if (owner.isOverloaded)
+ return NoSymbol
+
+ if (tag == EXTMODCLASSref) {
+ val moduleVar = owner.info.decl(nme.moduleVarName(name.toTermName))
+ if (moduleVar.isLazyAccessor)
+ return moduleVar.lazyAccessor.lazyAccessor
+ }
+ NoSymbol
+ }
+
+ // (1) Try name.
+ fromName(name) orElse {
+ // (2) Try with expanded name. Can happen if references to private
+ // symbols are read from outside: for instance when checking the children
+ // of a class. See #1722.
+ fromName(nme.expandedName(name.toTermName, owner)) orElse {
+ // (3) Try as a nested object symbol.
+ nestedObjectSymbol orElse {
+ // (4) Call the mirror's "missing" hook.
+ adjust(mirrorThatLoaded(owner).missingHook(owner, name)) orElse {
+ // (5) Create a stub symbol to defer hard failure a little longer.
+ val missingMessage =
+ s"""|bad symbolic reference. A signature in $filename refers to ${name.longString}
+ |in ${owner.kindString} ${owner.fullName} which is not available.
+ |It may be completely missing from the current classpath, or the version on
+ |the classpath might be incompatible with the version used when compiling $filename.""".stripMargin
+ owner.newStubSymbol(name, missingMessage)
+ }
+ }
+ }
+ }
+ }
+
+ tag match {
+ case NONEsym => return NoSymbol
+ case EXTref | EXTMODCLASSref => return readExtSymbol()
+ case _ => ()
+ }
+
+ // symbols that were pickled with Pickler.writeSymInfo
+ val nameref = readNat()
+ val name = at(nameref, readName)
+ val owner = readSymbolRef()
+ val flags = pickledToRawFlags(readLongNat())
+ var inforef = readNat()
+ val privateWithin =
+ if (!isSymbolRef(inforef)) NoSymbol
+ else {
+ val pw = at(inforef, readSymbol)
+ inforef = readNat()
+ pw
+ }
+
+ def isModuleFlag = (flags & MODULE) != 0L
+ def isClassRoot = (name == classRoot.name) && (owner == classRoot.owner)
+ def isModuleRoot = (name == moduleRoot.name) && (owner == moduleRoot.owner)
+ def pflags = flags & PickledFlags
+
+ def finishSym(sym: Symbol): Symbol = {
+ sym.privateWithin = privateWithin
+ sym.info = (
+ if (atEnd) {
+ assert(!sym.isSuperAccessor, sym)
+ newLazyTypeRef(inforef)
+ }
+ else {
+ assert(sym.isSuperAccessor || sym.isParamAccessor, sym)
+ newLazyTypeRefAndAlias(inforef, readNat())
+ }
+ )
+ if (sym.owner.isClass && sym != classRoot && sym != moduleRoot &&
+ !sym.isModuleClass && !sym.isRefinementClass && !sym.isTypeParameter && !sym.isExistentiallyBound)
+ symScope(sym.owner) enter sym
+
+ sym
+ }
+
+ finishSym(tag match {
+ case TYPEsym | ALIASsym =>
+ owner.newNonClassSymbol(name.toTypeName, NoPosition, pflags)
+ case CLASSsym =>
+ val sym = (
+ if (isClassRoot) {
+ if (isModuleFlag) moduleRoot.moduleClass setFlag pflags
+ else classRoot setFlag pflags
+ }
+ else owner.newClassSymbol(name.toTypeName, NoPosition, pflags)
+ )
+ if (!atEnd)
+ sym.typeOfThis = newLazyTypeRef(readNat())
+
+ sym
+ case MODULEsym =>
+ val clazz = at(inforef, () => readType()).typeSymbol // after the NMT_TRANSITION period, we can leave off the () => ... ()
+ if (isModuleRoot) moduleRoot setFlag pflags
+ else owner.newLinkedModule(clazz, pflags)
+ case VALsym =>
+ if (isModuleRoot) { abort(s"VALsym at module root: owner = $owner, name = $name") }
+ else owner.newTermSymbol(name.toTermName, NoPosition, pflags)
+
+ case _ =>
+ errorBadSignature("bad symbol tag: " + tag)
+ })
+ }
+
+ /** Read a type
+ *
+ * @param forceProperType is used to ease the transition to NullaryMethodTypes (commentmarker: NMT_TRANSITION)
+ * the flag say that a type of kind * is expected, so that PolyType(tps, restpe) can be disambiguated to PolyType(tps, NullaryMethodType(restpe))
+ * (if restpe is not a ClassInfoType, a MethodType or a NullaryMethodType, which leaves TypeRef/SingletonType -- the latter would make the polytype a type constructor)
+ */
+ protected def readType(forceProperType: Boolean = false): Type = {
+ val tag = readByte()
+ val end = readNat() + readIndex
+ (tag: @switch) match {
+ case NOtpe =>
+ NoType
+ case NOPREFIXtpe =>
+ NoPrefix
+ case THIStpe =>
+ ThisType(readSymbolRef())
+ case SINGLEtpe =>
+ SingleType(readTypeRef(), readSymbolRef()) // !!! was singleType
+ case SUPERtpe =>
+ val thistpe = readTypeRef()
+ val supertpe = readTypeRef()
+ SuperType(thistpe, supertpe)
+ case CONSTANTtpe =>
+ ConstantType(readConstantRef())
+ case TYPEREFtpe =>
+ val pre = readTypeRef()
+ val sym = readSymbolRef()
+ var args = until(end, readTypeRef)
+ TypeRef(pre, sym, args)
+ case TYPEBOUNDStpe =>
+ TypeBounds(readTypeRef(), readTypeRef())
+ case REFINEDtpe =>
+ val clazz = readSymbolRef()
+ RefinedType(until(end, readTypeRef), symScope(clazz), clazz)
+ case CLASSINFOtpe =>
+ val clazz = readSymbolRef()
+ ClassInfoType(until(end, readTypeRef), symScope(clazz), clazz)
+ case METHODtpe | IMPLICITMETHODtpe =>
+ val restpe = readTypeRef()
+ val params = until(end, readSymbolRef)
+ // if the method is overloaded, the params cannot be determined (see readSymbol) => return NoType.
+ // Only happen for trees, "case Apply" in readTree() takes care of selecting the correct
+ // alternative after parsing the arguments.
+ if (params.contains(NoSymbol) || restpe == NoType) NoType
+ else MethodType(params, restpe)
+ case POLYtpe =>
+ val restpe = readTypeRef()
+ val typeParams = until(end, readSymbolRef)
+ if (typeParams.nonEmpty) {
+ // NMT_TRANSITION: old class files denoted a polymorphic nullary method as PolyType(tps, restpe), we now require PolyType(tps, NullaryMethodType(restpe))
+ // when a type of kind * is expected (forceProperType is true), we know restpe should be wrapped in a NullaryMethodType (if it wasn't suitably wrapped yet)
+ def transitionNMT(restpe: Type) = {
+ val resTpeCls = restpe.getClass.toString // what's uglier than isInstanceOf? right! -- isInstanceOf does not work since the concrete types are defined in the compiler (not in scope here)
+ if(forceProperType /*&& pickleformat < 2.9 */ && !(resTpeCls.endsWith("MethodType"))) { assert(!resTpeCls.contains("ClassInfoType"))
+ NullaryMethodType(restpe) }
+ else restpe
+ }
+ PolyType(typeParams, transitionNMT(restpe))
+ }
+ else
+ NullaryMethodType(restpe)
+ case EXISTENTIALtpe =>
+ val restpe = readTypeRef()
+ newExistentialType(until(end, readSymbolRef), restpe)
+
+ case ANNOTATEDtpe =>
+ var typeRef = readNat()
+ val selfsym = if (isSymbolRef(typeRef)) {
+ val s = at(typeRef, readSymbol)
+ typeRef = readNat()
+ s
+ } else NoSymbol // selfsym can go.
+ val tp = at(typeRef, () => readType(forceProperType)) // NMT_TRANSITION
+ val annots = until(end, readAnnotationRef)
+ if (selfsym == NoSymbol) AnnotatedType(annots, tp, selfsym)
+ else tp
+ case _ =>
+ noSuchTypeTag(tag, end)
+ }
+ }
+
+ def noSuchTypeTag(tag: Int, end: Int): Type =
+ errorBadSignature("bad type tag: " + tag)
+
+ /** Read a constant */
+ protected def readConstant(): Constant = {
+ val tag = readByte().toInt
+ val len = readNat()
+ (tag: @switch) match {
+ case LITERALunit => Constant(())
+ case LITERALboolean => Constant(readLong(len) != 0L)
+ case LITERALbyte => Constant(readLong(len).toByte)
+ case LITERALshort => Constant(readLong(len).toShort)
+ case LITERALchar => Constant(readLong(len).toChar)
+ case LITERALint => Constant(readLong(len).toInt)
+ case LITERALlong => Constant(readLong(len))
+ case LITERALfloat => Constant(intBitsToFloat(readLong(len).toInt))
+ case LITERALdouble => Constant(longBitsToDouble(readLong(len)))
+ case LITERALstring => Constant(readNameRef().toString)
+ case LITERALnull => Constant(null)
+ case LITERALclass => Constant(readTypeRef())
+ case LITERALenum => Constant(readSymbolRef())
+ case _ => noSuchConstantTag(tag, len)
+ }
+ }
+
+ def noSuchConstantTag(tag: Int, len: Int): Constant =
+ errorBadSignature("bad constant tag: " + tag)
+
+ /** Read children and store them into the corresponding symbol.
+ */
+ protected def readChildren() {
+ val tag = readByte()
+ assert(tag == CHILDREN)
+ val end = readNat() + readIndex
+ val target = readSymbolRef()
+ while (readIndex != end) target addChild readSymbolRef()
+ }
+
+ /** Read an annotation argument, which is pickled either
+ * as a Constant or a Tree.
+ */
+ protected def readAnnotArg(i: Int): Tree = bytes(index(i)) match {
+ case TREE => at(i, readTree)
+ case _ =>
+ val const = at(i, readConstant)
+ Literal(const) setType const.tpe
+ }
+
+ /** Read a ClassfileAnnotArg (argument to a classfile annotation)
+ */
+ private def readArrayAnnot() = {
+ readByte() // skip the `annotargarray` tag
+ val end = readNat() + readIndex
+ until(end, () => readClassfileAnnotArg(readNat())).toArray(JavaArgumentTag)
+ }
+ protected def readClassfileAnnotArg(i: Int): ClassfileAnnotArg = bytes(index(i)) match {
+ case ANNOTINFO => NestedAnnotArg(at(i, readAnnotation))
+ case ANNOTARGARRAY => at(i, () => ArrayAnnotArg(readArrayAnnot()))
+ case _ => LiteralAnnotArg(at(i, readConstant))
+ }
+
+ /** Read an AnnotationInfo. Not to be called directly, use
+ * readAnnotation or readSymbolAnnotation
+ */
+ protected def readAnnotationInfo(end: Int): AnnotationInfo = {
+ val atp = readTypeRef()
+ val args = new ListBuffer[Tree]
+ val assocs = new ListBuffer[(Name, ClassfileAnnotArg)]
+ while (readIndex != end) {
+ val argref = readNat()
+ if (isNameEntry(argref)) {
+ val name = at(argref, readName)
+ val arg = readClassfileAnnotArg(readNat())
+ assocs += ((name, arg))
+ }
+ else
+ args += readAnnotArg(argref)
+ }
+ AnnotationInfo(atp, args.toList, assocs.toList)
+ }
+
+ /** Read an annotation and as a side effect store it into
+ * the symbol it requests. Called at top-level, for all
+ * (symbol, annotInfo) entries. */
+ protected def readSymbolAnnotation() {
+ val tag = readByte()
+ if (tag != SYMANNOT)
+ errorBadSignature("symbol annotation expected ("+ tag +")")
+ val end = readNat() + readIndex
+ val target = readSymbolRef()
+ target.addAnnotation(readAnnotationInfo(end))
+ }
+
+ /** Read an annotation and return it. Used when unpickling
+ * an ANNOTATED(WSELF)tpe or a NestedAnnotArg */
+ protected def readAnnotation(): AnnotationInfo = {
+ val tag = readByte()
+ if (tag != ANNOTINFO)
+ errorBadSignature("annotation expected (" + tag + ")")
+ val end = readNat() + readIndex
+ readAnnotationInfo(end)
+ }
+
+ /* Read an abstract syntax tree */
+ protected def readTree(): Tree = {
+ val outerTag = readByte()
+ if (outerTag != TREE)
+ errorBadSignature("tree expected (" + outerTag + ")")
+ val end = readNat() + readIndex
+ val tag = readByte()
+ val tpe = if (tag == EMPTYtree) NoType else readTypeRef()
+
+ // Set by the three functions to follow. If symbol is non-null
+ // after the new tree 't' has been created, t has its Symbol
+ // set to symbol; and it always has its Type set to tpe.
+ var symbol: Symbol = null
+ var mods: Modifiers = null
+ var name: Name = null
+
+ /** Read a Symbol, Modifiers, and a Name */
+ def setSymModsName() {
+ symbol = readSymbolRef()
+ mods = readModifiersRef()
+ name = readNameRef()
+ }
+ /** Read a Symbol and a Name */
+ def setSymName() {
+ symbol = readSymbolRef()
+ name = readNameRef()
+ }
+ /** Read a Symbol */
+ def setSym() {
+ symbol = readSymbolRef()
+ }
+
+ val t = tag match {
+ case EMPTYtree =>
+ EmptyTree
+
+ case PACKAGEtree =>
+ setSym()
+ val pid = readTreeRef().asInstanceOf[RefTree]
+ val stats = until(end, readTreeRef)
+ PackageDef(pid, stats)
+
+ case CLASStree =>
+ setSymModsName()
+ val impl = readTemplateRef()
+ val tparams = until(end, readTypeDefRef)
+ ClassDef(mods, name.toTypeName, tparams, impl)
+
+ case MODULEtree =>
+ setSymModsName()
+ ModuleDef(mods, name.toTermName, readTemplateRef())
+
+ case VALDEFtree =>
+ setSymModsName()
+ val tpt = readTreeRef()
+ val rhs = readTreeRef()
+ ValDef(mods, name.toTermName, tpt, rhs)
+
+ case DEFDEFtree =>
+ setSymModsName()
+ val tparams = times(readNat(), readTypeDefRef)
+ val vparamss = times(readNat(), () => times(readNat(), readValDefRef))
+ val tpt = readTreeRef()
+ val rhs = readTreeRef()
+ DefDef(mods, name.toTermName, tparams, vparamss, tpt, rhs)
+
+ case TYPEDEFtree =>
+ setSymModsName()
+ val rhs = readTreeRef()
+ val tparams = until(end, readTypeDefRef)
+ TypeDef(mods, name.toTypeName, tparams, rhs)
+
+ case LABELtree =>
+ setSymName()
+ val rhs = readTreeRef()
+ val params = until(end, readIdentRef)
+ LabelDef(name.toTermName, params, rhs)
+
+ case IMPORTtree =>
+ setSym()
+ val expr = readTreeRef()
+ val selectors = until(end, () => {
+ val from = readNameRef()
+ val to = readNameRef()
+ ImportSelector(from, -1, to, -1)
+ })
+
+ Import(expr, selectors)
+
+ case TEMPLATEtree =>
+ setSym()
+ val parents = times(readNat(), readTreeRef)
+ val self = readValDefRef()
+ val body = until(end, readTreeRef)
+
+ Template(parents, self, body)
+
+ case BLOCKtree =>
+ val expr = readTreeRef()
+ val stats = until(end, readTreeRef)
+ Block(stats, expr)
+
+ case CASEtree =>
+ val pat = readTreeRef()
+ val guard = readTreeRef()
+ val body = readTreeRef()
+ CaseDef(pat, guard, body)
+
+ case ALTERNATIVEtree =>
+ Alternative(until(end, readTreeRef))
+
+ case STARtree =>
+ Star(readTreeRef())
+
+ case BINDtree =>
+ setSymName()
+ Bind(name, readTreeRef())
+
+ case UNAPPLYtree =>
+ val fun = readTreeRef()
+ val args = until(end, readTreeRef)
+ UnApply(fun, args)
+
+ case ARRAYVALUEtree =>
+ val elemtpt = readTreeRef()
+ val trees = until(end, readTreeRef)
+ ArrayValue(elemtpt, trees)
+
+ case FUNCTIONtree =>
+ setSym()
+ val body = readTreeRef()
+ val vparams = until(end, readValDefRef)
+ Function(vparams, body)
+
+ case ASSIGNtree =>
+ val lhs = readTreeRef()
+ val rhs = readTreeRef()
+ Assign(lhs, rhs)
+
+ case IFtree =>
+ val cond = readTreeRef()
+ val thenp = readTreeRef()
+ val elsep = readTreeRef()
+ If(cond, thenp, elsep)
+
+ case MATCHtree =>
+ val selector = readTreeRef()
+ val cases = until(end, readCaseDefRef)
+ Match(selector, cases)
+
+ case RETURNtree =>
+ setSym()
+ Return(readTreeRef())
+
+ case TREtree =>
+ val block = readTreeRef()
+ val finalizer = readTreeRef()
+ val catches = until(end, readCaseDefRef)
+ Try(block, catches, finalizer)
+
+ case THROWtree =>
+ Throw(readTreeRef())
+
+ case NEWtree =>
+ New(readTreeRef())
+
+ case TYPEDtree =>
+ val expr = readTreeRef()
+ val tpt = readTreeRef()
+ Typed(expr, tpt)
+
+ case TYPEAPPLYtree =>
+ val fun = readTreeRef()
+ val args = until(end, readTreeRef)
+ TypeApply(fun, args)
+
+ case APPLYtree =>
+ val fun = readTreeRef()
+ val args = until(end, readTreeRef)
+ if (fun.symbol.isOverloaded) {
+ fun.setType(fun.symbol.info)
+ inferMethodAlternative(fun, args map (_.tpe), tpe)
+ }
+ Apply(fun, args)
+
+ case APPLYDYNAMICtree =>
+ setSym()
+ val qual = readTreeRef()
+ val args = until(end, readTreeRef)
+ ApplyDynamic(qual, args)
+
+ case SUPERtree =>
+ setSym()
+ val qual = readTreeRef()
+ val mix = readTypeNameRef()
+ Super(qual, mix)
+
+ case THIStree =>
+ setSym()
+ This(readTypeNameRef())
+
+ case SELECTtree =>
+ setSym()
+ val qualifier = readTreeRef()
+ val selector = readNameRef()
+ Select(qualifier, selector)
+
+ case IDENTtree =>
+ setSymName()
+ Ident(name)
+
+ case LITERALtree =>
+ Literal(readConstantRef())
+
+ case TYPEtree =>
+ TypeTree()
+
+ case ANNOTATEDtree =>
+ val annot = readTreeRef()
+ val arg = readTreeRef()
+ Annotated(annot, arg)
+
+ case SINGLETONTYPEtree =>
+ SingletonTypeTree(readTreeRef())
+
+ case SELECTFROMTYPEtree =>
+ val qualifier = readTreeRef()
+ val selector = readTypeNameRef()
+ SelectFromTypeTree(qualifier, selector)
+
+ case COMPOUNDTYPEtree =>
+ CompoundTypeTree(readTemplateRef())
+
+ case APPLIEDTYPEtree =>
+ val tpt = readTreeRef()
+ val args = until(end, readTreeRef)
+ AppliedTypeTree(tpt, args)
+
+ case TYPEBOUNDStree =>
+ val lo = readTreeRef()
+ val hi = readTreeRef()
+ TypeBoundsTree(lo, hi)
+
+ case EXISTENTIALTYPEtree =>
+ val tpt = readTreeRef()
+ val whereClauses = until(end, readTreeRef)
+ ExistentialTypeTree(tpt, whereClauses)
+
+ case _ =>
+ noSuchTreeTag(tag, end)
+ }
+
+ if (symbol == null) t setType tpe
+ else t setSymbol symbol setType tpe
+ }
+
+ def noSuchTreeTag(tag: Int, end: Int) =
+ errorBadSignature("unknown tree type (" + tag + ")")
+
+ def readModifiers(): Modifiers = {
+ val tag = readNat()
+ if (tag != MODIFIERS)
+ errorBadSignature("expected a modifiers tag (" + tag + ")")
+ val end = readNat() + readIndex
+ val pflagsHi = readNat()
+ val pflagsLo = readNat()
+ val pflags = (pflagsHi.toLong << 32) + pflagsLo
+ val flags = pickledToRawFlags(pflags)
+ val privateWithin = readNameRef()
+ Modifiers(flags, privateWithin, Nil)
+ }
+
+ /* Read a reference to a pickled item */
+ protected def readSymbolRef(): Symbol = {//OPT inlined from: at(readNat(), readSymbol) to save on closure creation
+ val i = readNat()
+ var r = entries(i)
+ if (r eq null) {
+ val savedIndex = readIndex
+ readIndex = index(i)
+ r = readSymbol()
+ assert(entries(i) eq null, entries(i))
+ entries(i) = r
+ readIndex = savedIndex
+ }
+ r.asInstanceOf[Symbol]
+ }
+
+ protected def readNameRef(): Name = at(readNat(), readName)
+ protected def readTypeRef(): Type = at(readNat(), () => readType()) // after the NMT_TRANSITION period, we can leave off the () => ... ()
+ protected def readConstantRef(): Constant = at(readNat(), readConstant)
+ protected def readAnnotationRef(): AnnotationInfo = at(readNat(), readAnnotation)
+ protected def readModifiersRef(): Modifiers = at(readNat(), readModifiers)
+ protected def readTreeRef(): Tree = at(readNat(), readTree)
+
+ protected def readTypeNameRef(): TypeName = readNameRef().toTypeName
+ protected def readTermNameRef(): TermName = readNameRef().toTermName
+
+ protected def readTemplateRef(): Template =
+ readTreeRef() match {
+ case templ:Template => templ
+ case other =>
+ errorBadSignature("expected a template (" + other + ")")
+ }
+ protected def readCaseDefRef(): CaseDef =
+ readTreeRef() match {
+ case tree:CaseDef => tree
+ case other =>
+ errorBadSignature("expected a case def (" + other + ")")
+ }
+ protected def readValDefRef(): ValDef =
+ readTreeRef() match {
+ case tree:ValDef => tree
+ case other =>
+ errorBadSignature("expected a ValDef (" + other + ")")
+ }
+ protected def readIdentRef(): Ident =
+ readTreeRef() match {
+ case tree:Ident => tree
+ case other =>
+ errorBadSignature("expected an Ident (" + other + ")")
+ }
+ protected def readTypeDefRef(): TypeDef =
+ readTreeRef() match {
+ case tree:TypeDef => tree
+ case other =>
+ errorBadSignature("expected an TypeDef (" + other + ")")
+ }
+
+ protected def errorBadSignature(msg: String) =
+ throw new RuntimeException("malformed Scala signature of " + classRoot.name + " at " + readIndex + "; " + msg)
+
+ def inferMethodAlternative(fun: Tree, argtpes: List[Type], restpe: Type) {} // can't do it; need a compiler for that.
+
+ def newLazyTypeRef(i: Int): LazyType = new LazyTypeRef(i)
+ def newLazyTypeRefAndAlias(i: Int, j: Int): LazyType = new LazyTypeRefAndAlias(i, j)
+
+ /** Convert to a type error, that is printed gracefully instead of crashing.
+ *
+ * Similar in intent to what SymbolLoader does (but here we don't have access to
+ * error reporting, so we rely on the typechecker to report the error).
+ */
+ def toTypeError(e: MissingRequirementError) = {
+ // e.printStackTrace()
+ new TypeError(e.msg)
+ }
+
+ /** A lazy type which when completed returns type at index `i`. */
+ private class LazyTypeRef(i: Int) extends LazyType with FlagAgnosticCompleter {
+ private val definedAtRunId = currentRunId
+ private val p = phase
+ override def complete(sym: Symbol) : Unit = try {
+ val tp = at(i, () => readType(sym.isTerm)) // after NMT_TRANSITION, revert `() => readType(sym.isTerm)` to `readType`
+ atPhase(p) (sym setInfo tp)
+ if (currentRunId != definedAtRunId)
+ sym.setInfo(adaptToNewRunMap(tp))
+ }
+ catch {
+ case e: MissingRequirementError => throw toTypeError(e)
+ }
+ override def load(sym: Symbol) { complete(sym) }
+ }
+
+ /** A lazy type which when completed returns type at index `i` and sets alias
+ * of completed symbol to symbol at index `j`.
+ */
+ private class LazyTypeRefAndAlias(i: Int, j: Int) extends LazyTypeRef(i) {
+ override def complete(sym: Symbol) = try {
+ super.complete(sym)
+ var alias = at(j, readSymbol)
+ if (alias.isOverloaded)
+ alias = atPhase(picklerPhase)((alias suchThat (alt => sym.tpe =:= sym.owner.thisType.memberType(alt))))
+
+ sym.asInstanceOf[TermSymbol].setAlias(alias)
+ }
+ catch {
+ case e: MissingRequirementError => throw toTypeError(e)
+ }
+ }
+ }
+}
diff --git a/src/reflect/scala/reflect/internal/settings/AbsSettings.scala b/src/reflect/scala/reflect/internal/settings/AbsSettings.scala
new file mode 100644
index 0000000..a6fb418
--- /dev/null
+++ b/src/reflect/scala/reflect/internal/settings/AbsSettings.scala
@@ -0,0 +1,23 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2013 LAMP/EPFL
+ * @author Paul Phillips
+ */
+
+package scala.reflect.internal
+package settings
+
+/** A Settings abstraction boiled out of the original highly mutable Settings
+ * class with the intention of creating an ImmutableSettings which can be used
+ * interchangeably. Except of course without the mutants.
+ */
+
+trait AbsSettings {
+ type Setting <: AbsSettingValue // Fix to the concrete Setting type
+
+ trait AbsSettingValue {
+ type T <: Any
+ def value: T
+ def isDefault: Boolean
+ }
+}
+
diff --git a/src/reflect/scala/reflect/internal/settings/MutableSettings.scala b/src/reflect/scala/reflect/internal/settings/MutableSettings.scala
new file mode 100644
index 0000000..81368df
--- /dev/null
+++ b/src/reflect/scala/reflect/internal/settings/MutableSettings.scala
@@ -0,0 +1,50 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2013 LAMP/EPFL
+ * @author Martin Odersky
+ */
+// $Id$
+
+package scala.reflect.internal
+package settings
+
+/** A mutable Settings object.
+ */
+abstract class MutableSettings extends AbsSettings {
+
+ type Setting <: SettingValue
+ type BooleanSetting <: Setting { type T = Boolean }
+ type IntSetting <: Setting { type T = Int }
+ type MultiStringSetting <: Setting { type T = List[String] }
+
+ // basically this is a value which remembers if it's been modified
+ trait SettingValue extends AbsSettingValue {
+ protected var v: T
+ protected var setByUser: Boolean = false
+
+ def postSetHook(): Unit = ()
+ def isDefault = !setByUser
+ def isSetByUser = setByUser
+ def value: T = v
+ def value_=(arg: T) = {
+ setByUser = true
+ v = arg
+ postSetHook()
+ }
+ }
+
+ def overrideObjects: BooleanSetting
+ def printtypes: BooleanSetting
+ def debug: BooleanSetting
+ def Ynotnull: BooleanSetting
+ def explaintypes: BooleanSetting
+ def verbose: BooleanSetting
+ def uniqid: BooleanSetting
+ def Yshowsymkinds: BooleanSetting
+ def Xprintpos: BooleanSetting
+ def Yrecursion: IntSetting
+ def maxClassfileName: IntSetting
+ def Xexperimental: BooleanSetting
+ def XoldPatmat: BooleanSetting
+ def XnoPatmatAnalysis: BooleanSetting
+ def XfullLubs: BooleanSetting
+}
diff --git a/src/reflect/scala/reflect/internal/transform/Erasure.scala b/src/reflect/scala/reflect/internal/transform/Erasure.scala
new file mode 100644
index 0000000..52d1657
--- /dev/null
+++ b/src/reflect/scala/reflect/internal/transform/Erasure.scala
@@ -0,0 +1,383 @@
+package scala.reflect
+package internal
+package transform
+
+import Flags.{PARAMACCESSOR, METHOD}
+
+trait Erasure {
+
+ val global: SymbolTable
+ import global._
+ import definitions._
+
+ /** An extractor object for generic arrays */
+ object GenericArray {
+
+ /** Is `tp` an unbounded generic type (i.e. which could be instantiated
+ * with primitive as well as class types)?.
+ */
+ private def genericCore(tp: Type): Type = tp.normalize match {
+ /* A Java Array<T> is erased to Array[Object] (T can only be a reference type), where as a Scala Array[T] is
+ * erased to Object. However, there is only symbol for the Array class. So to make the distinction between
+ * a Java and a Scala array, we check if the owner of T comes from a Java class.
+ * This however caused issue SI-5654. The additional test for EXSITENTIAL fixes it, see the ticket comments.
+ * In short, members of an existential type (e.g. `T` in `forSome { type T }`) can have pretty arbitrary
+ * owners (e.g. when computing lubs, <root> is used). All packageClass symbols have `isJavaDefined == true`.
+ */
+ case TypeRef(_, sym, _) if sym.isAbstractType && (!sym.owner.isJavaDefined || sym.hasFlag(Flags.EXISTENTIAL)) =>
+ tp
+ case ExistentialType(tparams, restp) =>
+ genericCore(restp)
+ case _ =>
+ NoType
+ }
+
+ /** If `tp` is of the form Array[...Array[T]...] where `T` is an abstract type
+ * then Some((N, T)) where N is the number of Array constructors enclosing `T`,
+ * otherwise None. Existentials on any level are ignored.
+ */
+ def unapply(tp: Type): Option[(Int, Type)] = tp.normalize match {
+ case TypeRef(_, ArrayClass, List(arg)) =>
+ genericCore(arg) match {
+ case NoType =>
+ unapply(arg) match {
+ case Some((level, core)) => Some((level + 1, core))
+ case None => None
+ }
+ case core =>
+ Some((1, core))
+ }
+ case ExistentialType(tparams, restp) =>
+ unapply(restp)
+ case _ =>
+ None
+ }
+ }
+
+ protected def unboundedGenericArrayLevel(tp: Type): Int = tp match {
+ case GenericArray(level, core) if !(core <:< AnyRefClass.tpe) => level
+ case _ => 0
+ }
+
+ // @M #2585 when generating a java generic signature that includes
+ // a selection of an inner class p.I, (p = `pre`, I = `cls`) must
+ // rewrite to p'.I, where p' refers to the class that directly defines
+ // the nested class I.
+ //
+ // See also #2585 marker in javaSig: there, type arguments must be
+ // included (use pre.baseType(cls.owner)).
+ //
+ // This requires that cls.isClass.
+ protected def rebindInnerClass(pre: Type, cls: Symbol): Type = {
+ if (cls.owner.isClass) cls.owner.tpe else pre // why not cls.isNestedClass?
+ }
+
+ def unboxDerivedValueClassMethod(clazz: Symbol): Symbol =
+ (clazz.info.decl(nme.unbox)) orElse
+ (clazz.info.decls.find(_ hasAllFlags PARAMACCESSOR | METHOD) getOrElse
+ NoSymbol)
+
+ def underlyingOfValueClass(clazz: Symbol): Type =
+ clazz.derivedValueClassUnbox.tpe.resultType
+
+ /** The type of the argument of a value class reference after erasure
+ * This method needs to be called at a phase no later than erasurephase
+ */
+ def erasedValueClassArg(tref: TypeRef): Type = {
+ assert(!phase.erasedTypes)
+ val clazz = tref.sym
+ if (valueClassIsParametric(clazz)) {
+ val underlying = tref.memberType(clazz.derivedValueClassUnbox).resultType
+ boxingErasure(underlying)
+ } else {
+ scalaErasure(underlyingOfValueClass(clazz))
+ }
+ }
+
+ /** Does this vakue class have an underlying type that's a type parameter of
+ * the class itself?
+ * This method needs to be called at a phase no later than erasurephase
+ */
+ def valueClassIsParametric(clazz: Symbol): Boolean = {
+ assert(!phase.erasedTypes)
+ clazz.typeParams contains
+ clazz.derivedValueClassUnbox.tpe.resultType.normalize.typeSymbol
+ }
+
+ abstract class ErasureMap extends TypeMap {
+ private lazy val ObjectArray = arrayType(ObjectClass.tpe)
+ private lazy val ErasedObject = erasedTypeRef(ObjectClass)
+
+ def mergeParents(parents: List[Type]): Type
+
+ def eraseNormalClassRef(pre: Type, clazz: Symbol): Type =
+ typeRef(apply(rebindInnerClass(pre, clazz)), clazz, List()) // #2585
+
+ protected def eraseDerivedValueClassRef(tref: TypeRef): Type = erasedValueClassArg(tref)
+
+ def apply(tp: Type): Type = tp match {
+ case ConstantType(_) =>
+ tp
+ case st: SubType =>
+ apply(st.supertype)
+ case tref @ TypeRef(pre, sym, args) =>
+ if (sym == ArrayClass)
+ if (unboundedGenericArrayLevel(tp) == 1) ObjectClass.tpe
+ else if (args.head.typeSymbol.isBottomClass) ObjectArray
+ else typeRef(apply(pre), sym, args map applyInArray)
+ else if (sym == AnyClass || sym == AnyValClass || sym == SingletonClass || sym == NotNullClass) ErasedObject
+ else if (sym == UnitClass) erasedTypeRef(BoxedUnitClass)
+ else if (sym.isRefinementClass) apply(mergeParents(tp.parents))
+ else if (sym.isDerivedValueClass) eraseDerivedValueClassRef(tref)
+ else if (sym.isClass) eraseNormalClassRef(pre, sym)
+ else apply(sym.info) // alias type or abstract type
+ case PolyType(tparams, restpe) =>
+ apply(restpe)
+ case ExistentialType(tparams, restpe) =>
+ apply(restpe)
+ case mt @ MethodType(params, restpe) =>
+ MethodType(
+ cloneSymbolsAndModify(params, ErasureMap.this),
+ if (restpe.typeSymbol == UnitClass) erasedTypeRef(UnitClass)
+ // this replaces each typeref that refers to an argument
+ // by the type `p.tpe` of the actual argument p (p in params)
+ else apply(mt.resultType(mt.paramTypes)))
+ case RefinedType(parents, decls) =>
+ apply(mergeParents(parents))
+ case AnnotatedType(_, atp, _) =>
+ apply(atp)
+ case ClassInfoType(parents, decls, clazz) =>
+ ClassInfoType(
+ if (clazz == ObjectClass || isPrimitiveValueClass(clazz)) Nil
+ else if (clazz == ArrayClass) List(ErasedObject)
+ else removeLaterObjects(parents map this),
+ decls, clazz)
+ case _ =>
+ mapOver(tp)
+ }
+
+ def applyInArray(tp: Type): Type = tp match {
+ case TypeRef(pre, sym, args) if (sym.isDerivedValueClass) => eraseNormalClassRef(pre, sym)
+ case _ => apply(tp)
+ }
+ }
+
+ protected def verifyJavaErasure = false
+
+ /** The erasure |T| of a type T. This is:
+ *
+ * - For a constant type, itself.
+ * - For a type-bounds structure, the erasure of its upper bound.
+ * - For every other singleton type, the erasure of its supertype.
+ * - For a typeref scala.Array+[T] where T is an abstract type, AnyRef.
+ * - For a typeref scala.Array+[T] where T is not an abstract type, scala.Array+[|T|].
+ * - For a typeref scala.Any or scala.AnyVal, java.lang.Object.
+ * - For a typeref scala.Unit, scala.runtime.BoxedUnit.
+ * - For a typeref P.C[Ts] where C refers to a class, |P|.C.
+ * (Where P is first rebound to the class that directly defines C.)
+ * - For a typeref P.C[Ts] where C refers to an alias type, the erasure of C's alias.
+ * - For a typeref P.C[Ts] where C refers to an abstract type, the
+ * erasure of C's upper bound.
+ * - For a non-empty type intersection (possibly with refinement)
+ * - in scala, the erasure of the intersection dominator
+ * - in java, the erasure of its first parent <--- @PP: not yet in spec.
+ * - For an empty type intersection, java.lang.Object.
+ * - For a method type (Fs)scala.Unit, (|Fs|)scala#Unit.
+ * - For any other method type (Fs)Y, (|Fs|)|T|.
+ * - For a polymorphic type, the erasure of its result type.
+ * - For the class info type of java.lang.Object, the same type without any parents.
+ * - For a class info type of a value class, the same type without any parents.
+ * - For any other class info type with parents Ps, the same type with
+ * parents |Ps|, but with duplicate references of Object removed.
+ * - for all other types, the type itself (with any sub-components erased)
+ */
+ def erasure(sym: Symbol): ErasureMap =
+ if (sym == NoSymbol || !sym.enclClass.isJavaDefined) scalaErasure
+ else if (verifyJavaErasure && sym.isMethod) verifiedJavaErasure
+ else javaErasure
+
+ /** This is used as the Scala erasure during the erasure phase itself
+ * It differs from normal erasure in that value classes are erased to ErasedValueTypes which
+ * are then later converted to the underlying parameter type in phase posterasure.
+ */
+ def specialErasure(sym: Symbol)(tp: Type): Type =
+ if (sym != NoSymbol && sym.enclClass.isJavaDefined)
+ erasure(sym)(tp)
+ else if (sym.isClassConstructor)
+ specialConstructorErasure(sym.owner, tp)
+ else
+ specialScalaErasure(tp)
+
+ def specialConstructorErasure(clazz: Symbol, tpe: Type): Type = {
+ tpe match {
+ case PolyType(tparams, restpe) =>
+ specialConstructorErasure(clazz, restpe)
+ case ExistentialType(tparams, restpe) =>
+ specialConstructorErasure(clazz, restpe)
+ case RefinedType(parents, decls) =>
+ specialConstructorErasure(
+ clazz, specialScalaErasure.mergeParents(parents))
+ case mt @ MethodType(params, restpe) =>
+ MethodType(
+ cloneSymbolsAndModify(params, specialScalaErasure),
+ specialConstructorErasure(clazz, restpe))
+ case TypeRef(pre, `clazz`, args) =>
+ typeRef(pre, clazz, List())
+ case tp =>
+ if (!(clazz == ArrayClass || tp.isError))
+ // See SI-6556. It seems in some cases the result constructor
+ // type of an anonymous class is a different version of the class.
+ // This has nothing to do with value classes per se.
+ // We simply used a less discriminating transform before, that
+ // did not look at the cases in detail.
+ // It seems there is a deeper problem here, which needs
+ // following up to. But we will not risk regressions
+ // in 2.10 because of it.
+ log(s"!!! unexpected constructor erasure $tp for $clazz")
+ specialScalaErasure(tp)
+ }
+ }
+
+ /** Scala's more precise erasure than java's is problematic as follows:
+ *
+ * - Symbols are read from classfiles and populated with types
+ * - The textual signature read from the bytecode is forgotten
+ * - Bytecode generation must know the precise signature of a method
+ * - the signature is derived from the erasure of the method type
+ * - If that derivation does not adhere to the rules by which the original
+ * signature was created, a NoSuchMethod error will result.
+ *
+ * For this reason and others (such as distinguishing constructors from other methods)
+ * erasure is now (Symbol, Type) => Type rather than Type => Type.
+ */
+ class ScalaErasureMap extends ErasureMap {
+ /** In scala, calculate a useful parent.
+ * An intersection such as `Object with Trait` erases to Trait.
+ */
+ def mergeParents(parents: List[Type]): Type =
+ intersectionDominator(parents)
+ }
+
+ class JavaErasureMap extends ErasureMap {
+ /** In java, always take the first parent.
+ * An intersection such as `Object with Trait` erases to Object.
+ */
+ def mergeParents(parents: List[Type]): Type =
+ if (parents.isEmpty) ObjectClass.tpe
+ else parents.head
+ }
+
+ object scalaErasure extends ScalaErasureMap
+
+ /** This is used as the Scala erasure during the erasure phase itself
+ * It differs from normal erasure in that value classes are erased to ErasedValueTypes which
+ * are then later converted to the underlying parameter type in phase posterasure.
+ */
+ object specialScalaErasure extends ScalaErasureMap {
+ override def eraseDerivedValueClassRef(tref: TypeRef): Type =
+ ErasedValueType(tref)
+ }
+
+ object javaErasure extends JavaErasureMap
+
+ object verifiedJavaErasure extends JavaErasureMap {
+ override def apply(tp: Type): Type = {
+ val res = javaErasure(tp)
+ val old = scalaErasure(tp)
+ if (!(res =:= old))
+ log("Identified divergence between java/scala erasure:\n scala: " + old + "\n java: " + res)
+ res
+ }
+ }
+
+ object boxingErasure extends ScalaErasureMap {
+ override def eraseNormalClassRef(pre: Type, clazz: Symbol) =
+ if (isPrimitiveValueClass(clazz)) boxedClass(clazz).tpe
+ else super.eraseNormalClassRef(pre, clazz)
+ override def eraseDerivedValueClassRef(tref: TypeRef) =
+ super.eraseNormalClassRef(tref.pre, tref.sym)
+ }
+
+ /** The intersection dominator (SLS 3.7) of a list of types is computed as follows.
+ *
+ * - If the list contains one or more occurrences of scala.Array with
+ * type parameters El1, El2, ... then the dominator is scala.Array with
+ * type parameter of intersectionDominator(List(El1, El2, ...)). <--- @PP: not yet in spec.
+ * - Otherwise, the list is reduced to a subsequence containing only types
+ * which are not subtypes of other listed types (the span.)
+ * - If the span is empty, the dominator is Object.
+ * - If the span contains a class Tc which is not a trait and which is
+ * not Object, the dominator is Tc. <--- @PP: "which is not Object" not in spec.
+ * - Otherwise, the dominator is the first element of the span.
+ */
+ def intersectionDominator(parents: List[Type]): Type = {
+ if (parents.isEmpty) ObjectClass.tpe
+ else {
+ val psyms = parents map (_.typeSymbol)
+ if (psyms contains ArrayClass) {
+ // treat arrays specially
+ arrayType(
+ intersectionDominator(
+ parents filter (_.typeSymbol == ArrayClass) map (_.typeArgs.head)))
+ } else {
+ // implement new spec for erasure of refined types.
+ def isUnshadowed(psym: Symbol) =
+ !(psyms exists (qsym => (psym ne qsym) && (qsym isNonBottomSubClass psym)))
+ val cs = parents.iterator.filter { p => // isUnshadowed is a bit expensive, so try classes first
+ val psym = p.typeSymbol
+ psym.initialize
+ psym.isClass && !psym.isTrait && isUnshadowed(psym)
+ }
+ (if (cs.hasNext) cs else parents.iterator.filter(p => isUnshadowed(p.typeSymbol))).next()
+ }
+ }
+ }
+
+ /** Type reference after erasure */
+ def erasedTypeRef(sym: Symbol): Type =
+ typeRef(erasure(sym)(sym.owner.tpe), sym, Nil)
+
+ /** The symbol's erased info. This is the type's erasure, except for the following symbols:
+ *
+ * - For $asInstanceOf : [T]T
+ * - For $isInstanceOf : [T]scala#Boolean
+ * - For class Array : [T]C where C is the erased classinfo of the Array class.
+ * - For Array[T].<init> : {scala#Int)Array[T]
+ * - For a type parameter : A type bounds type consisting of the erasures of its bounds.
+ */
+ def transformInfo(sym: Symbol, tp: Type): Type = {
+ if (sym == Object_asInstanceOf)
+ sym.info
+ else if (sym == Object_isInstanceOf || sym == ArrayClass)
+ PolyType(sym.info.typeParams, specialErasure(sym)(sym.info.resultType))
+ else if (sym.isAbstractType)
+ TypeBounds(WildcardType, WildcardType)
+ else if (sym.isTerm && sym.owner == ArrayClass) {
+ if (sym.isClassConstructor)
+ tp match {
+ case MethodType(params, TypeRef(pre, sym1, args)) =>
+ MethodType(cloneSymbolsAndModify(params, specialErasure(sym)),
+ typeRef(specialErasure(sym)(pre), sym1, args))
+ }
+ else if (sym.name == nme.apply)
+ tp
+ else if (sym.name == nme.update)
+ (tp: @unchecked) match {
+ case MethodType(List(index, tvar), restpe) =>
+ MethodType(List(index.cloneSymbol.setInfo(specialErasure(sym)(index.tpe)), tvar),
+ erasedTypeRef(UnitClass))
+ }
+ else specialErasure(sym)(tp)
+ } else if (
+ sym.owner != NoSymbol &&
+ sym.owner.owner == ArrayClass &&
+ sym == Array_update.paramss.head(1)) {
+ // special case for Array.update: the non-erased type remains, i.e. (Int,A)Unit
+ // since the erasure type map gets applied to every symbol, we have to catch the
+ // symbol here
+ tp
+ } else {
+ specialErasure(sym)(tp)
+ }
+ }
+}
diff --git a/src/reflect/scala/reflect/internal/transform/RefChecks.scala b/src/reflect/scala/reflect/internal/transform/RefChecks.scala
new file mode 100644
index 0000000..d6108ab
--- /dev/null
+++ b/src/reflect/scala/reflect/internal/transform/RefChecks.scala
@@ -0,0 +1,13 @@
+package scala.reflect
+package internal
+package transform
+
+trait RefChecks {
+
+ val global: SymbolTable
+ import global._
+
+ def transformInfo(sym: Symbol, tp: Type): Type =
+ if (sym.isModule && !sym.isStatic) NullaryMethodType(tp)
+ else tp
+}
\ No newline at end of file
diff --git a/src/reflect/scala/reflect/internal/transform/Transforms.scala b/src/reflect/scala/reflect/internal/transform/Transforms.scala
new file mode 100644
index 0000000..71cc808
--- /dev/null
+++ b/src/reflect/scala/reflect/internal/transform/Transforms.scala
@@ -0,0 +1,41 @@
+package scala.reflect
+package internal
+package transform
+
+import scala.language.existentials
+
+trait Transforms { self: SymbolTable =>
+
+ /** We need to encode laziness by hand here because the three components refChecks, uncurry and erasure
+ * are overwritten by objects in Global.
+ * It would be best of objects could override lazy values. See SI-5187.
+ * In the absence of this, the Lazy functionality should probably be somewhere
+ * in the standard library. Or is it already?
+ */
+ private class Lazy[T](op: => T) {
+ private var value: T = _
+ private var _isDefined = false
+ def isDefined = _isDefined
+ def force: T = {
+ if (!isDefined) { value = op; _isDefined = true }
+ value
+ }
+ }
+
+ private val refChecksLazy = new Lazy(new { val global: Transforms.this.type = self } with RefChecks)
+ private val uncurryLazy = new Lazy(new { val global: Transforms.this.type = self } with UnCurry)
+ private val erasureLazy = new Lazy(new { val global: Transforms.this.type = self } with Erasure)
+
+ def refChecks = refChecksLazy.force
+ def uncurry = uncurryLazy.force
+ def erasure = erasureLazy.force
+
+ def transformedType(sym: Symbol) =
+ erasure.transformInfo(sym,
+ uncurry.transformInfo(sym,
+ refChecks.transformInfo(sym, sym.info)))
+
+ def transformedType(tpe: Type) =
+ erasure.scalaErasure(uncurry.uncurry(tpe))
+
+}
diff --git a/src/reflect/scala/reflect/internal/transform/UnCurry.scala b/src/reflect/scala/reflect/internal/transform/UnCurry.scala
new file mode 100644
index 0000000..00c7c3d
--- /dev/null
+++ b/src/reflect/scala/reflect/internal/transform/UnCurry.scala
@@ -0,0 +1,70 @@
+package scala.reflect
+package internal
+package transform
+
+import Flags._
+
+trait UnCurry {
+
+ val global: SymbolTable
+ import global._
+ import definitions._
+
+ private def expandAlias(tp: Type): Type = if (!tp.isHigherKinded) tp.normalize else tp
+
+ val uncurry: TypeMap = new TypeMap {
+ def apply(tp0: Type): Type = {
+ val tp = expandAlias(tp0)
+ tp match {
+ case MethodType(params, MethodType(params1, restpe)) =>
+ // This transformation is described in UnCurryTransformer.dependentParamTypeErasure
+ val packSymbolsMap = new TypeMap {
+ // Wrapping in a TypeMap to reuse the code that opts for a fast path if the function is an identity.
+ def apply(tp: Type): Type = packSymbols(params, tp)
+ }
+ val existentiallyAbstractedParam1s = packSymbolsMap.mapOver(params1)
+ val substitutedResult = restpe.substSym(params1, existentiallyAbstractedParam1s)
+ apply(MethodType(params ::: existentiallyAbstractedParam1s, substitutedResult))
+ case MethodType(params, ExistentialType(tparams, restpe @ MethodType(_, _))) =>
+ abort("unexpected curried method types with intervening existential")
+ case MethodType(h :: t, restpe) if h.isImplicit =>
+ apply(MethodType(h.cloneSymbol.resetFlag(IMPLICIT) :: t, restpe))
+ case NullaryMethodType(restpe) =>
+ apply(MethodType(List(), restpe))
+ case TypeRef(pre, ByNameParamClass, arg :: Nil) =>
+ apply(functionType(List(), arg))
+ case TypeRef(pre, RepeatedParamClass, arg :: Nil) =>
+ apply(seqType(arg))
+ case TypeRef(pre, JavaRepeatedParamClass, arg :: Nil) =>
+ apply(arrayType(
+ if (isUnboundedGeneric(arg)) ObjectClass.tpe else arg))
+ case _ =>
+ expandAlias(mapOver(tp))
+ }
+ }
+ }
+
+ private val uncurryType = new TypeMap {
+ def apply(tp0: Type): Type = {
+ val tp = expandAlias(tp0)
+ tp match {
+ case ClassInfoType(parents, decls, clazz) =>
+ val parents1 = parents mapConserve uncurry
+ if (parents1 eq parents) tp
+ else ClassInfoType(parents1, decls, clazz) // @MAT normalize in decls??
+ case PolyType(_, _) =>
+ mapOver(tp)
+ case _ =>
+ tp
+ }
+ }
+ }
+
+ /** - return symbol's transformed type,
+ * - if symbol is a def parameter with transformed type T, return () => T
+ *
+ * @MAT: starting with this phase, the info of every symbol will be normalized
+ */
+ def transformInfo(sym: Symbol, tp: Type): Type =
+ if (sym.isType) uncurryType(tp) else uncurry(tp)
+}
diff --git a/src/reflect/scala/reflect/internal/util/Collections.scala b/src/reflect/scala/reflect/internal/util/Collections.scala
new file mode 100644
index 0000000..2ba15e0
--- /dev/null
+++ b/src/reflect/scala/reflect/internal/util/Collections.scala
@@ -0,0 +1,227 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2013 LAMP/EPFL
+ * @author Paul Phillips
+ */
+
+package scala.reflect.internal.util
+
+import scala.collection.{ mutable, immutable }
+import scala.annotation.tailrec
+import mutable.ListBuffer
+
+/** Profiler driven changes.
+ * TODO - inlining doesn't work from here because of the bug that
+ * methods in traits aren't inlined.
+ */
+trait Collections {
+ /** True if all three arguments have the same number of elements and
+ * the function is true for all the triples.
+ */
+ @tailrec final def corresponds3[A, B, C](xs1: List[A], xs2: List[B], xs3: List[C])
+ (f: (A, B, C) => Boolean): Boolean = (
+ if (xs1.isEmpty) xs2.isEmpty && xs3.isEmpty
+ else !xs2.isEmpty && !xs3.isEmpty && f(xs1.head, xs2.head, xs3.head) && corresponds3(xs1.tail, xs2.tail, xs3.tail)(f)
+ )
+
+ /** All these mm methods are "deep map" style methods for
+ * mapping etc. on a list of lists while avoiding unnecessary
+ * intermediate structures like those created via flatten.
+ */
+ final def mexists[A](xss: List[List[A]])(p: A => Boolean) =
+ xss exists (_ exists p)
+ final def mforall[A](xss: List[List[A]])(p: A => Boolean) =
+ xss forall (_ forall p)
+ final def mmap[A, B](xss: List[List[A]])(f: A => B) =
+ xss map (_ map f)
+ final def mforeach[A](xss: List[List[A]])(f: A => Unit) =
+ xss foreach (_ foreach f)
+ final def mfind[A](xss: List[List[A]])(p: A => Boolean): Option[A] = {
+ var res: Option[A] = null
+ mforeach(xss)(x => if ((res eq null) && p(x)) res = Some(x))
+ if (res eq null) None else res
+ }
+ final def mfilter[A](xss: List[List[A]])(p: A => Boolean) =
+ for (xs <- xss; x <- xs; if p(x)) yield x
+
+ final def map2[A, B, C](xs1: List[A], xs2: List[B])(f: (A, B) => C): List[C] = {
+ val lb = new ListBuffer[C]
+ var ys1 = xs1
+ var ys2 = xs2
+ while (!ys1.isEmpty && !ys2.isEmpty) {
+ lb += f(ys1.head, ys2.head)
+ ys1 = ys1.tail
+ ys2 = ys2.tail
+ }
+ lb.toList
+ }
+ final def map3[A, B, C, D](xs1: List[A], xs2: List[B], xs3: List[C])(f: (A, B, C) => D): List[D] = {
+ if (xs1.isEmpty || xs2.isEmpty || xs3.isEmpty) Nil
+ else f(xs1.head, xs2.head, xs3.head) :: map3(xs1.tail, xs2.tail, xs3.tail)(f)
+ }
+ final def flatMap2[A, B, C](xs1: List[A], xs2: List[B])(f: (A, B) => List[C]): List[C] = {
+ val lb = new ListBuffer[C]
+ var ys1 = xs1
+ var ys2 = xs2
+ while (!ys1.isEmpty && !ys2.isEmpty) {
+ lb ++= f(ys1.head, ys2.head)
+ ys1 = ys1.tail
+ ys2 = ys2.tail
+ }
+ lb.toList
+ }
+
+ final def flatCollect[A, B](elems: List[A])(pf: PartialFunction[A, Traversable[B]]): List[B] = {
+ val lb = new ListBuffer[B]
+ for (x <- elems ; if pf isDefinedAt x)
+ lb ++= pf(x)
+
+ lb.toList
+ }
+
+ final def distinctBy[A, B](xs: List[A])(f: A => B): List[A] = {
+ val buf = new ListBuffer[A]
+ val seen = mutable.Set[B]()
+ xs foreach { x =>
+ val y = f(x)
+ if (!seen(y)) {
+ buf += x
+ seen += y
+ }
+ }
+ buf.toList
+ }
+
+ @tailrec final def flattensToEmpty(xss: Seq[Seq[_]]): Boolean = {
+ xss.isEmpty || xss.head.isEmpty && flattensToEmpty(xss.tail)
+ }
+
+ final def foreachWithIndex[A, B](xs: List[A])(f: (A, Int) => Unit) {
+ var index = 0
+ var ys = xs
+ while (!ys.isEmpty) {
+ f(ys.head, index)
+ ys = ys.tail
+ index += 1
+ }
+ }
+
+ // @inline
+ final def findOrElse[A](xs: TraversableOnce[A])(p: A => Boolean)(orElse: => A): A = {
+ xs find p getOrElse orElse
+ }
+
+ final def mapFrom[A, A1 >: A, B](xs: List[A])(f: A => B): Map[A1, B] = {
+ Map[A1, B](xs map (x => (x, f(x))): _*)
+ }
+
+ final def mapWithIndex[A, B](xs: List[A])(f: (A, Int) => B): List[B] = {
+ val lb = new ListBuffer[B]
+ var index = 0
+ var ys = xs
+ while (!ys.isEmpty) {
+ lb += f(ys.head, index)
+ ys = ys.tail
+ index += 1
+ }
+ lb.toList
+ }
+ final def collectMap2[A, B, C](xs1: List[A], xs2: List[B])(p: (A, B) => Boolean): Map[A, B] = {
+ if (xs1.isEmpty || xs2.isEmpty)
+ return Map()
+
+ val buf = immutable.Map.newBuilder[A, B]
+ var ys1 = xs1
+ var ys2 = xs2
+ while (!ys1.isEmpty && !ys2.isEmpty) {
+ val x1 = ys1.head
+ val x2 = ys2.head
+ if (p(x1, x2))
+ buf += ((x1, x2))
+
+ ys1 = ys1.tail
+ ys2 = ys2.tail
+ }
+ buf.result
+ }
+ final def foreach2[A, B](xs1: List[A], xs2: List[B])(f: (A, B) => Unit): Unit = {
+ var ys1 = xs1
+ var ys2 = xs2
+ while (!ys1.isEmpty && !ys2.isEmpty) {
+ f(ys1.head, ys2.head)
+ ys1 = ys1.tail
+ ys2 = ys2.tail
+ }
+ }
+ final def foreach3[A, B, C](xs1: List[A], xs2: List[B], xs3: List[C])(f: (A, B, C) => Unit): Unit = {
+ var ys1 = xs1
+ var ys2 = xs2
+ var ys3 = xs3
+ while (!ys1.isEmpty && !ys2.isEmpty && !ys3.isEmpty) {
+ f(ys1.head, ys2.head, ys3.head)
+ ys1 = ys1.tail
+ ys2 = ys2.tail
+ ys3 = ys3.tail
+ }
+ }
+ final def exists2[A, B](xs1: List[A], xs2: List[B])(f: (A, B) => Boolean): Boolean = {
+ var ys1 = xs1
+ var ys2 = xs2
+ while (!ys1.isEmpty && !ys2.isEmpty) {
+ if (f(ys1.head, ys2.head))
+ return true
+
+ ys1 = ys1.tail
+ ys2 = ys2.tail
+ }
+ false
+ }
+ final def exists3[A, B, C](xs1: List[A], xs2: List[B], xs3: List[C])(f: (A, B, C) => Boolean): Boolean = {
+ var ys1 = xs1
+ var ys2 = xs2
+ var ys3 = xs3
+ while (!ys1.isEmpty && !ys2.isEmpty && !ys3.isEmpty) {
+ if (f(ys1.head, ys2.head, ys3.head))
+ return true
+
+ ys1 = ys1.tail
+ ys2 = ys2.tail
+ ys3 = ys3.tail
+ }
+ false
+ }
+ final def forall2[A, B](xs1: List[A], xs2: List[B])(f: (A, B) => Boolean): Boolean = {
+ var ys1 = xs1
+ var ys2 = xs2
+ while (!ys1.isEmpty && !ys2.isEmpty) {
+ if (!f(ys1.head, ys2.head))
+ return false
+
+ ys1 = ys1.tail
+ ys2 = ys2.tail
+ }
+ true
+ }
+ final def forall3[A, B, C](xs1: List[A], xs2: List[B], xs3: List[C])(f: (A, B, C) => Boolean): Boolean = {
+ var ys1 = xs1
+ var ys2 = xs2
+ var ys3 = xs3
+ while (!ys1.isEmpty && !ys2.isEmpty && !ys3.isEmpty) {
+ if (!f(ys1.head, ys2.head, ys3.head))
+ return false
+
+ ys1 = ys1.tail
+ ys2 = ys2.tail
+ ys3 = ys3.tail
+ }
+ true
+ }
+
+ final def transposeSafe[A](ass: List[List[A]]): Option[List[List[A]]] = try {
+ Some(ass.transpose)
+ } catch {
+ case _: IllegalArgumentException => None
+ }
+}
+
+object Collections extends Collections { }
+
diff --git a/src/reflect/scala/reflect/internal/util/HashSet.scala b/src/reflect/scala/reflect/internal/util/HashSet.scala
new file mode 100644
index 0000000..4135f3c
--- /dev/null
+++ b/src/reflect/scala/reflect/internal/util/HashSet.scala
@@ -0,0 +1,106 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2013 LAMP/EPFL
+ * @author Martin Odersky
+ */
+
+package scala.reflect.internal.util
+
+object HashSet {
+ def apply[T >: Null <: AnyRef](): HashSet[T] = this(16)
+ def apply[T >: Null <: AnyRef](label: String): HashSet[T] = this(label, 16)
+ def apply[T >: Null <: AnyRef](initialCapacity: Int): HashSet[T] = this("No Label", initialCapacity)
+ def apply[T >: Null <: AnyRef](label: String, initialCapacity: Int): HashSet[T] =
+ new HashSet[T](label, initialCapacity)
+}
+
+class HashSet[T >: Null <: AnyRef](val label: String, initialCapacity: Int) extends Set[T] with scala.collection.generic.Clearable {
+ private var used = 0
+ private var table = new Array[AnyRef](initialCapacity)
+ private def index(x: Int): Int = math.abs(x % table.length)
+
+ def size: Int = used
+ def clear() {
+ used = 0
+ table = new Array[AnyRef](initialCapacity)
+ }
+
+ def findEntryOrUpdate(x: T): T = {
+ var h = index(x.##)
+ var entry = table(h)
+ while (entry ne null) {
+ if (x == entry)
+ return entry.asInstanceOf[T]
+
+ h = index(h + 1)
+ entry = table(h)
+ }
+ table(h) = x
+ used += 1
+ if (used > (table.length >> 2)) growTable()
+ x
+ }
+
+ def findEntry(x: T): T = {
+ var h = index(x.##)
+ var entry = table(h)
+ while ((entry ne null) && x != entry) {
+ h = index(h + 1)
+ entry = table(h)
+ }
+ entry.asInstanceOf[T]
+ }
+
+ def addEntry(x: T) {
+ var h = index(x.##)
+ var entry = table(h)
+ while (entry ne null) {
+ if (x == entry) return
+ h = index(h + 1)
+ entry = table(h)
+ }
+ table(h) = x
+ used += 1
+ if (used > (table.length >> 2)) growTable()
+ }
+ def addEntries(xs: TraversableOnce[T]) {
+ xs foreach addEntry
+ }
+
+ def iterator = new Iterator[T] {
+ private var i = 0
+ def hasNext: Boolean = {
+ while (i < table.length && (table(i) eq null)) i += 1
+ i < table.length
+ }
+ def next(): T =
+ if (hasNext) { i += 1; table(i - 1).asInstanceOf[T] }
+ else null
+ }
+
+ private def addOldEntry(x: T) {
+ var h = index(x.##)
+ var entry = table(h)
+ while (entry ne null) {
+ h = index(h + 1)
+ entry = table(h)
+ }
+ table(h) = x
+ }
+
+ private def growTable() {
+ val oldtable = table
+ val growthFactor =
+ if (table.length <= initialCapacity) 8
+ else if (table.length <= (initialCapacity * 8)) 4
+ else 2
+
+ table = new Array[AnyRef](table.length * growthFactor)
+ var i = 0
+ while (i < oldtable.length) {
+ val entry = oldtable(i)
+ if (entry ne null) addOldEntry(entry.asInstanceOf[T])
+ i += 1
+ }
+ }
+ override def toString() = "HashSet %s(%d / %d)".format(label, used, table.length)
+}
diff --git a/src/reflect/scala/reflect/internal/util/Origins.scala b/src/reflect/scala/reflect/internal/util/Origins.scala
new file mode 100644
index 0000000..3259a12
--- /dev/null
+++ b/src/reflect/scala/reflect/internal/util/Origins.scala
@@ -0,0 +1,119 @@
+/* NSC -- new scala compiler
+ * Copyright 2005-2013 LAMP/EPFL
+ * @author Paul Phillips
+ */
+
+package scala.reflect
+package internal.util
+
+import NameTransformer._
+import scala.collection.{ mutable, immutable }
+import Origins._
+
+/** A debugging class for logging from whence a method is being called.
+ * Say you wanted to discover who was calling phase_= in SymbolTable.
+ * You could do this:
+ *
+ * {{{
+ * private lazy val origins = Origins("arbitraryTag")
+ * // Commented out original enclosed for contrast
+ * // final def phase_=(p: Phase): Unit = {
+ * final def phase_=(p: Phase): Unit = origins {
+ * }}}
+ *
+ * And that's it. When the JVM exits it would issue a report something like this:
+ {{{
+ >> Origins tag 'arbitraryTag' logged 145585 calls from 51 distinguished sources.
+
+ 71114 scala.tools.nsc.symtab.Symbols$Symbol.unsafeTypeParams(Symbols.scala:862)
+ 16584 scala.tools.nsc.symtab.Symbols$Symbol.rawInfo(Symbols.scala:757)
+ 15411 scala.tools.nsc.symtab.Symbols$Symbol.unsafeTypeParams(Symbols.scala:869)
+ 11507 scala.tools.nsc.symtab.Symbols$Symbol.rawInfo(Symbols.scala:770)
+ 10285 scala.tools.nsc.symtab.Symbols$Symbol.unsafeTypeParams(Symbols.scala:864)
+ 6860 scala.tools.nsc.transform.SpecializeTypes.specializedTypeVars(SpecializeTypes.scala:304)
+ ...
+ }}}
+ *
+ */
+abstract class Origins {
+ type Rep
+ type StackSlice = Array[StackTraceElement]
+
+ def tag: String
+ def isCutoff(el: StackTraceElement): Boolean
+ def newRep(xs: StackSlice): Rep
+ def repString(rep: Rep): String
+
+ private val origins = new mutable.HashMap[Rep, Int] withDefaultValue 0
+ private def add(xs: Rep) = origins(xs) += 1
+ private def total = origins.values.foldLeft(0L)(_ + _)
+
+ // Create a stack and whittle it down to the interesting part.
+ def readStack(): Array[StackTraceElement] = (
+ Thread.currentThread.getStackTrace dropWhile (x => !isCutoff(x)) dropWhile isCutoff drop 1
+ )
+
+ def apply[T](body: => T): T = {
+ add(newRep(readStack()))
+ body
+ }
+ def clear() = origins.clear()
+ def show() = {
+ println("\n>> Origins tag '%s' logged %s calls from %s distinguished sources.\n".format(tag, total, origins.keys.size))
+ origins.toList sortBy (-_._2) foreach {
+ case (k, v) => println("%7s %s".format(v, repString(k)))
+ }
+ }
+ def purge() = {
+ show()
+ clear()
+ }
+}
+
+object Origins {
+ private val counters = mutable.HashMap[String, Origins]()
+ private val thisClass = this.getClass.getName
+
+ locally {
+ sys.addShutdownHook(counters.values foreach (_.purge()))
+ }
+
+ case class OriginId(className: String, methodName: String) {
+ def matches(el: StackTraceElement) = (
+ (methodName == el.getMethodName) && (className startsWith el.getClassName)
+ )
+ }
+
+ def lookup(tag: String, orElse: String => Origins): Origins =
+ counters.getOrElseUpdate(tag, orElse(tag))
+ def register(x: Origins): Origins = {
+ counters(x.tag) = x
+ x
+ }
+
+ private def preCutoff(el: StackTraceElement) = (
+ (el.getClassName == thisClass)
+ || (el.getClassName startsWith "java.lang.")
+ )
+ private def findCutoff() = {
+ val cutoff = (Thread.currentThread.getStackTrace dropWhile preCutoff).head
+ OriginId(cutoff.getClassName, cutoff.getMethodName)
+ }
+
+ def apply(tag: String): Origins = counters.getOrElseUpdate(tag, new OneLine(tag, findCutoff()))
+ def apply(tag: String, frames: Int): Origins = counters.getOrElseUpdate(tag, new MultiLine(tag, findCutoff(), frames))
+
+ class OneLine(val tag: String, id: OriginId) extends Origins {
+ type Rep = StackTraceElement
+ def isCutoff(el: StackTraceElement) = id matches el
+ def newRep(xs: StackSlice): Rep = if ((xs eq null) || (xs.length == 0)) null else xs(0)
+ def repString(rep: Rep) = " " + rep
+ }
+ class MultiLine(val tag: String, id: OriginId, numLines: Int) extends Origins {
+ type Rep = List[StackTraceElement]
+ def isCutoff(el: StackTraceElement) = id matches el
+ def newRep(xs: StackSlice): Rep = (xs take numLines).toList
+ def repString(rep: Rep) = rep.map("\n " + _).mkString
+ override def readStack() = super.readStack() drop 1
+ }
+}
diff --git a/src/reflect/scala/reflect/internal/util/Position.scala b/src/reflect/scala/reflect/internal/util/Position.scala
new file mode 100644
index 0000000..8f287a1
--- /dev/null
+++ b/src/reflect/scala/reflect/internal/util/Position.scala
@@ -0,0 +1,311 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2013 LAMP/EPFL
+ * @author Martin Odersky
+ *
+ */
+
+package scala.reflect.internal.util
+
+import scala.reflect.ClassTag
+import scala.reflect.internal.FatalError
+import scala.reflect.macros.Attachments
+
+object Position {
+ val tabInc = 8
+
+ /** Prints the message with the given position indication. */
+ def formatMessage(posIn: Position, msg: String, shortenFile: Boolean): String = {
+ val pos = (
+ if (posIn eq null) NoPosition
+ else if (posIn.isDefined) posIn.inUltimateSource(posIn.source)
+ else posIn
+ )
+ def file = pos.source.file
+ def prefix = if (shortenFile) file.name else file.path
+
+ pos match {
+ case FakePos(fmsg) => fmsg+" "+msg
+ case NoPosition => msg
+ case _ =>
+ List(
+ "%s:%s: %s".format(prefix, pos.line, msg),
+ pos.lineContent.stripLineEnd,
+ " " * (pos.column - 1) + "^"
+ ) mkString "\n"
+ }
+ }
+}
+
+/** The Position class and its subclasses represent positions of ASTs and symbols.
+ * Except for NoPosition and FakePos, every position refers to a SourceFile
+ * and to an offset in the sourcefile (its `point`). For batch compilation,
+ * that's all. For interactive IDE's there are also RangePositions
+ * and TransparentPositions. A RangePosition indicates a start and an end
+ * in addition to its point. TransparentPositions are a subclass of RangePositions.
+ * Range positions that are not transparent are called opaque.
+ * Trees with RangePositions need to satisfy the following invariants.
+ *
+ * INV1: A tree with an offset position never contains a child
+ * with a range position
+ * INV2: If the child of a tree with a range position also has a range position,
+ * then the child's range is contained in the parent's range.
+ * INV3: Opaque range positions of children of the same node are non-overlapping
+ * (this means their overlap is at most a single point).
+ *
+ * The following tests are useful on positions:
+ *
+ * pos.isDefined true if position is not a NoPosition nor a FakePosition
+ * pos.isRange true if position is a range
+ * pos.isOpaqueRange true if position is an opaque range
+ *
+ * The following accessor methods are provided:
+ *
+ * pos.source The source file of the position, which must be defined
+ * pos.point The offset of the position's point, which must be defined
+ * pos.start The start of the position, which must be a range
+ * pos.end The end of the position, which must be a range
+ *
+ * There are also convenience methods, such as
+ *
+ * pos.startOrPoint
+ * pos.endOrPoint
+ * pos.pointOrElse(default)
+ *
+ * These are less strict about the kind of position on which they can be applied.
+ *
+ * The following conversion methods are often used:
+ *
+ * pos.focus converts a range position to an offset position, keeping its point;
+ * returns all other positions unchanged.
+ * pos.makeTransparent converts an opaque range position into a transparent one.
+ * returns all other positions unchanged.
+ */
+abstract class Position extends scala.reflect.api.Position { self =>
+
+ type Pos = Position
+
+ def pos: Position = this
+
+ def withPos(newPos: Position): Attachments { type Pos = self.Pos } = newPos
+
+ /** An optional value containing the source file referred to by this position, or
+ * None if not defined.
+ */
+ def source: SourceFile = throw new UnsupportedOperationException(s"Position.source on ${this.getClass}")
+
+ /** Is this position neither a NoPosition nor a FakePosition?
+ * If isDefined is true, offset and source are both defined.
+ */
+ def isDefined: Boolean = false
+
+ /** Is this position a transparent position? */
+ def isTransparent: Boolean = false
+
+ /** Is this position a range position? */
+ def isRange: Boolean = false
+
+ /** Is this position a non-transparent range position? */
+ def isOpaqueRange: Boolean = false
+
+ /** if opaque range, make this position transparent */
+ def makeTransparent: Position = this
+
+ /** The start of the position's range, error if not a range position */
+ def start: Int = throw new UnsupportedOperationException(s"Position.start on ${this.getClass}")
+
+ /** The start of the position's range, or point if not a range position */
+ def startOrPoint: Int = point
+
+ /** The point (where the ^ is) of the position */
+ def point: Int = throw new UnsupportedOperationException(s"Position.point on ${this.getClass}")
+
+ /** The point (where the ^ is) of the position, or else `default` if undefined */
+ def pointOrElse(default: Int): Int = default
+
+ /** The end of the position's range, error if not a range position */
+ def end: Int = throw new UnsupportedOperationException(s"Position.end on ${this.getClass}")
+
+ /** The end of the position's range, or point if not a range position */
+ def endOrPoint: Int = point
+
+ @deprecated("use point instead", "2.9.0")
+ def offset: Option[Int] = if (isDefined) Some(point) else None
+
+ /** The same position with a different start value (if a range) */
+ def withStart(off: Int): Position = this
+
+ /** The same position with a different end value (if a range) */
+ def withEnd(off: Int): Position = this
+
+ /** The same position with a different point value (if a range or offset) */
+ def withPoint(off: Int): Position = this
+
+ /** The same position with a different source value, and its values shifted by given offset */
+ def withSource(source: SourceFile, shift: Int): Position = this
+
+ /** If this is a range, the union with the other range, with the point of this position.
+ * Otherwise, this position
+ */
+ def union(pos: Position): Position = this
+
+ /** If this is a range position, the offset position of its start.
+ * Otherwise the position itself
+ */
+ def focusStart: Position = this
+
+ /** If this is a range position, the offset position of its point.
+ * Otherwise the position itself
+ */
+ def focus: Position = this
+
+ /** If this is a range position, the offset position of its end.
+ * Otherwise the position itself
+ */
+ def focusEnd: Position = this
+
+ /** Does this position include the given position `pos`.
+ * This holds if `this` is a range position and its range [start..end]
+ * is the same or covers the range of the given position, which may or may not be a range position.
+ */
+ def includes(pos: Position): Boolean = false
+
+ /** Does this position properly include the given position `pos` ("properly" meaning their
+ * ranges are not the same)?
+ */
+ def properlyIncludes(pos: Position): Boolean =
+ includes(pos) && (start < pos.startOrPoint || pos.endOrPoint < end)
+
+ /** Does this position precede that position?
+ * This holds if both positions are defined and the end point of this position
+ * is not larger than the start point of the given position.
+ */
+ def precedes(pos: Position): Boolean =
+ isDefined && pos.isDefined && endOrPoint <= pos.startOrPoint
+
+ /** Does this position properly precede the given position `pos` ("properly" meaning their ranges
+ * do not share a common point).
+ */
+ def properlyPrecedes(pos: Position): Boolean =
+ isDefined && pos.isDefined && endOrPoint < pos.startOrPoint
+
+ /** Does this position overlap with that position?
+ * This holds if both positions are ranges and there is an interval of
+ * non-zero length that is shared by both position ranges.
+ */
+ def overlaps(pos: Position): Boolean =
+ isRange && pos.isRange &&
+ ((pos.start < end && start < pos.end) || (start < pos.end && pos.start < end))
+
+ /** Does this position cover the same range as that position?
+ * Holds only if both position are ranges
+ */
+ def sameRange(pos: Position): Boolean =
+ isRange && pos.isRange && start == pos.start && end == pos.end
+
+ def line: Int = throw new UnsupportedOperationException("Position.line")
+
+ def column: Int = throw new UnsupportedOperationException("Position.column")
+
+ /** Convert this to a position around `point` that spans a single source line */
+ def toSingleLine: Position = this
+
+ def lineContent: String =
+ if (isDefined) source.lineToString(line - 1)
+ else "NO_LINE"
+
+ /** Map this position to a position in an original source
+ * file. If the SourceFile is a normal SourceFile, simply
+ * return this.
+ */
+ def inUltimateSource(source : SourceFile): Position =
+ if (source == null) this else source.positionInUltimateSource(this)
+
+ def dbgString: String = toString
+ def safeLine: Int = try line catch { case _: UnsupportedOperationException => -1 }
+
+ def show: String = "["+toString+"]"
+}
+
+case object NoPosition extends Position {
+ override def dbgString = toString
+}
+
+case class FakePos(msg: String) extends Position {
+ override def toString = msg
+}
+
+class OffsetPosition(override val source: SourceFile, override val point: Int) extends Position {
+ override def isDefined = true
+ override def pointOrElse(default: Int): Int = point
+ override def withPoint(off: Int) = new OffsetPosition(source, off)
+ override def withSource(source: SourceFile, shift: Int) = new OffsetPosition(source, point + shift)
+
+ override def line: Int = source.offsetToLine(point) + 1
+
+ override def column: Int = {
+ var idx = source.lineToOffset(source.offsetToLine(point))
+ var col = 0
+ while (idx != point) {
+ col += (if (source.content(idx) == '\t') Position.tabInc - col % Position.tabInc else 1)
+ idx += 1
+ }
+ col + 1
+ }
+
+ override def union(pos: Position) = if (pos.isRange) pos else this
+
+ override def equals(that : Any) = that match {
+ case that : OffsetPosition => point == that.point && source.file == that.source.file
+ case that => false
+ }
+ override def hashCode = point * 37 + source.file.hashCode
+
+ override def toString = {
+ val pointmsg = if (point > source.length) "out-of-bounds-" else "offset="
+ "source-%s,line-%s,%s%s".format(source.file.canonicalPath, line, pointmsg, point)
+ }
+ override def show = "["+point+"]"
+}
+
+/** new for position ranges */
+class RangePosition(source: SourceFile, override val start: Int, point: Int, override val end: Int)
+extends OffsetPosition(source, point) {
+ if (start > end) sys.error("bad position: "+show)
+ override def isRange: Boolean = true
+ override def isOpaqueRange: Boolean = true
+ override def startOrPoint: Int = start
+ override def endOrPoint: Int = end
+ override def withStart(off: Int) = new RangePosition(source, off, point, end)
+ override def withEnd(off: Int) = new RangePosition(source, start, point, off)
+ override def withPoint(off: Int) = new RangePosition(source, start, off, end)
+ override def withSource(source: SourceFile, shift: Int) = new RangePosition(source, start + shift, point + shift, end + shift)
+ override def focusStart = new OffsetPosition(source, start)
+ override def focus = {
+ if (focusCache eq NoPosition) focusCache = new OffsetPosition(source, point)
+ focusCache
+ }
+ override def focusEnd = new OffsetPosition(source, end)
+ override def makeTransparent = new TransparentPosition(source, start, point, end)
+ override def includes(pos: Position) = pos.isDefined && start <= pos.startOrPoint && pos.endOrPoint <= end
+ override def union(pos: Position): Position =
+ if (pos.isRange) new RangePosition(source, start min pos.start, point, end max pos.end) else this
+
+ override def toSingleLine: Position = source match {
+ case bs: BatchSourceFile
+ if end > 0 && bs.offsetToLine(start) < bs.offsetToLine(end - 1) =>
+ val pointLine = bs.offsetToLine(point)
+ new RangePosition(source, bs.lineToOffset(pointLine), point, bs.lineToOffset(pointLine + 1))
+ case _ => this
+ }
+
+ override def toString = "RangePosition("+source.file.canonicalPath+", "+start+", "+point+", "+end+")"
+ override def show = "["+start+":"+end+"]"
+ private var focusCache: Position = NoPosition
+}
+
+class TransparentPosition(source: SourceFile, start: Int, point: Int, end: Int) extends RangePosition(source, start, point, end) {
+ override def isOpaqueRange: Boolean = false
+ override def isTransparent = true
+ override def makeTransparent = this
+ override def show = "<"+start+":"+end+">"
+}
diff --git a/src/reflect/scala/reflect/internal/util/Set.scala b/src/reflect/scala/reflect/internal/util/Set.scala
new file mode 100644
index 0000000..36bdb81
--- /dev/null
+++ b/src/reflect/scala/reflect/internal/util/Set.scala
@@ -0,0 +1,28 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2013 LAMP/EPFL
+ * @author Martin Odersky
+ */
+package scala.reflect.internal.util
+
+/** A common class for lightweight sets.
+ */
+abstract class Set[T <: AnyRef] {
+
+ def findEntry(x: T): T
+
+ def addEntry(x: T): Unit
+
+ def iterator: Iterator[T]
+
+ def foreach[U](f: T => U): Unit = iterator foreach f
+
+ def apply(x: T): Boolean = contains(x)
+
+ @deprecated("use `iterator` instead", "2.9.0") def elements = iterator
+
+ def contains(x: T): Boolean =
+ findEntry(x) ne null
+
+ def toList = iterator.toList
+
+}
diff --git a/src/reflect/scala/reflect/internal/util/SourceFile.scala b/src/reflect/scala/reflect/internal/util/SourceFile.scala
new file mode 100644
index 0000000..bc2d0ee
--- /dev/null
+++ b/src/reflect/scala/reflect/internal/util/SourceFile.scala
@@ -0,0 +1,171 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2013 LAMP/EPFL
+ * @author Martin Odersky
+ */
+
+
+package scala.reflect.internal.util
+
+import scala.reflect.io.{ AbstractFile, VirtualFile }
+import scala.collection.mutable.ArrayBuffer
+import scala.annotation.tailrec
+import java.util.regex.Pattern
+import java.io.IOException
+import scala.reflect.internal.Chars._
+
+/** abstract base class of a source file used in the compiler */
+abstract class SourceFile {
+ def content : Array[Char] // normalized, must end in SU
+ def file : AbstractFile
+ def isLineBreak(idx : Int) : Boolean
+ def isSelfContained: Boolean
+ def length : Int
+ def position(offset: Int) : Position = {
+ assert(offset < length, file + ": " + offset + " >= " + length)
+ new OffsetPosition(this, offset)
+ }
+ def position(line: Int, column: Int) : Position = new OffsetPosition(this, lineToOffset(line) + column)
+
+ def offsetToLine(offset: Int): Int
+ def lineToOffset(index : Int): Int
+
+ /** Map a position to a position in the underlying source file.
+ * For regular source files, simply return the argument.
+ */
+ def positionInUltimateSource(position: Position) = position
+ override def toString() = file.name
+ def dbg(offset: Int) = (new OffsetPosition(this, offset)).dbgString
+ def path = file.path
+
+ def beginsWith(offset: Int, text: String): Boolean =
+ (content drop offset) startsWith text
+
+ def lineToString(index: Int): String =
+ content drop lineToOffset(index) takeWhile (c => !isLineBreakChar(c.toChar)) mkString ""
+
+ @tailrec
+ final def skipWhitespace(offset: Int): Int =
+ if (content(offset).isWhitespace) skipWhitespace(offset + 1) else offset
+
+ def identifier(pos: Position): Option[String] = None
+}
+
+/** An object representing a missing source file.
+ */
+object NoSourceFile extends SourceFile {
+ def content = Array()
+ def file = NoFile
+ def isLineBreak(idx: Int) = false
+ def isSelfContained = true
+ def length = -1
+ def offsetToLine(offset: Int) = -1
+ def lineToOffset(index : Int) = -1
+ override def toString = "<no source file>"
+}
+
+object NoFile extends VirtualFile("<no file>", "<no file>")
+
+object ScriptSourceFile {
+ /** Length of the script header from the given content, if there is one.
+ * The header begins with "#!" or "::#!" and ends with a line starting
+ * with "!#" or "::!#".
+ */
+ def headerLength(cs: Array[Char]): Int = {
+ val headerPattern = Pattern.compile("""((?m)^(::)?!#.*|^.*/env .*)(\r|\n|\r\n)""")
+ val headerStarts = List("#!", "::#!")
+
+ if (headerStarts exists (cs startsWith _)) {
+ val matcher = headerPattern matcher cs.mkString
+ if (matcher.find) matcher.end
+ else throw new IOException("script file does not close its header with !# or ::!#")
+ }
+ else 0
+ }
+ def stripHeader(cs: Array[Char]): Array[Char] = cs drop headerLength(cs)
+
+ def apply(file: AbstractFile, content: Array[Char]) = {
+ val underlying = new BatchSourceFile(file, content)
+ val headerLen = headerLength(content)
+ val stripped = new ScriptSourceFile(underlying, content drop headerLen, headerLen)
+
+ stripped
+ }
+}
+import ScriptSourceFile._
+
+class ScriptSourceFile(underlying: BatchSourceFile, content: Array[Char], override val start: Int) extends BatchSourceFile(underlying.file, content) {
+ override def isSelfContained = false
+
+ override def positionInUltimateSource(pos: Position) =
+ if (!pos.isDefined) super.positionInUltimateSource(pos)
+ else pos.withSource(underlying, start)
+}
+
+/** a file whose contents do not change over time */
+class BatchSourceFile(val file : AbstractFile, val content0: Array[Char]) extends SourceFile {
+ def this(_file: AbstractFile) = this(_file, _file.toCharArray)
+ def this(sourceName: String, cs: Seq[Char]) = this(new VirtualFile(sourceName), cs.toArray)
+ def this(file: AbstractFile, cs: Seq[Char]) = this(file, cs.toArray)
+
+ // If non-whitespace tokens run all the way up to EOF,
+ // positions go wrong because the correct end of the last
+ // token cannot be used as an index into the char array.
+ // The least painful way to address this was to add a
+ // newline to the array.
+ val content = (
+ if (content0.length == 0 || !content0.last.isWhitespace)
+ content0 :+ '\n'
+ else content0
+ )
+ val length = content.length
+ def start = 0
+ def isSelfContained = true
+
+ override def identifier(pos: Position) =
+ if (pos.isDefined && pos.source == this && pos.point != -1) {
+ def isOK(c: Char) = isIdentifierPart(c) || isOperatorPart(c)
+ Some(new String(content drop pos.point takeWhile isOK))
+ } else {
+ super.identifier(pos)
+ }
+
+ def isLineBreak(idx: Int) =
+ if (idx >= length) false else {
+ val ch = content(idx)
+ // don't identify the CR in CR LF as a line break, since LF will do.
+ if (ch == CR) (idx + 1 == length) || (content(idx + 1) != LF)
+ else isLineBreakChar(ch)
+ }
+
+ def calculateLineIndices(cs: Array[Char]) = {
+ val buf = new ArrayBuffer[Int]
+ buf += 0
+ for (i <- 0 until cs.length) if (isLineBreak(i)) buf += i + 1
+ buf += cs.length // sentinel, so that findLine below works smoother
+ buf.toArray
+ }
+ private lazy val lineIndices: Array[Int] = calculateLineIndices(content)
+
+ def lineToOffset(index : Int): Int = lineIndices(index)
+
+ private var lastLine = 0
+
+ /** Convert offset to line in this source file
+ * Lines are numbered from 0
+ */
+ def offsetToLine(offset: Int): Int = {
+ val lines = lineIndices
+ def findLine(lo: Int, hi: Int, mid: Int): Int =
+ if (offset < lines(mid)) findLine(lo, mid - 1, (lo + mid - 1) / 2)
+ else if (offset >= lines(mid + 1)) findLine(mid + 1, hi, (mid + 1 + hi) / 2)
+ else mid
+ lastLine = findLine(0, lines.length, lastLine)
+ lastLine
+ }
+
+ override def equals(that : Any) = that match {
+ case that : BatchSourceFile => file.path == that.file.path && start == that.start
+ case _ => false
+ }
+ override def hashCode = file.path.## + start.##
+}
diff --git a/src/reflect/scala/reflect/internal/util/Statistics.scala b/src/reflect/scala/reflect/internal/util/Statistics.scala
new file mode 100644
index 0000000..cbd27b0
--- /dev/null
+++ b/src/reflect/scala/reflect/internal/util/Statistics.scala
@@ -0,0 +1,273 @@
+package scala.reflect.internal.util
+
+import scala.collection.mutable
+
+object Statistics {
+
+ type TimerSnapshot = (Long, Long)
+
+ /** If enabled, increment counter by one */
+ @inline final def incCounter(c: Counter) {
+ if (_enabled && c != null) c.value += 1
+ }
+
+ /** If enabled, increment counter by given delta */
+ @inline final def incCounter(c: Counter, delta: Int) {
+ if (_enabled && c != null) c.value += delta
+ }
+
+ /** If enabled, increment counter in map `ctrs` at index `key` by one */
+ @inline final def incCounter[K](ctrs: QuantMap[K, Counter], key: K) =
+ if (_enabled && ctrs != null) ctrs(key).value += 1
+
+ /** If enabled, start subcounter. While active it will track all increments of
+ * its base counter.
+ */
+ @inline final def startCounter(sc: SubCounter): (Int, Int) =
+ if (_enabled && sc != null) sc.start() else null
+
+ /** If enabled, stop subcounter from tracking its base counter. */
+ @inline final def stopCounter(sc: SubCounter, start: (Int, Int)) {
+ if (_enabled && sc != null) sc.stop(start)
+ }
+
+ /** If enabled, start timer */
+ @inline final def startTimer(tm: Timer): TimerSnapshot =
+ if (_enabled && tm != null) tm.start() else null
+
+ /** If enabled, stop timer */
+ @inline final def stopTimer(tm: Timer, start: TimerSnapshot) {
+ if (_enabled && tm != null) tm.stop(start)
+ }
+
+ /** If enabled, push and start a new timer in timer stack */
+ @inline final def pushTimer(timers: TimerStack, timer: => StackableTimer): TimerSnapshot =
+ if (_enabled && timers != null) timers.push(timer) else null
+
+ /** If enabled, stop and pop timer from timer stack */
+ @inline final def popTimer(timers: TimerStack, prev: TimerSnapshot) {
+ if (_enabled && timers != null) timers.pop(prev)
+ }
+
+ /** Create a new counter that shows as `prefix` and is active in given phases */
+ def newCounter(prefix: String, phases: String*) = new Counter(prefix, phases)
+
+ /** Create a new relative counter that shows as `prefix` and is active
+ * in the same phases as its base counter. Relative counters print as percentages
+ * of their base counters.
+ */
+ def newRelCounter(prefix: String, ctr: Counter): Counter = new RelCounter(prefix, ctr)
+
+ /** Create a new subcounter that shows as `prefix` and is active
+ * in the same phases as its base counter. Subcounters can track
+ * increments of their base counters and print as percentages
+ * of their base counters.
+ */
+ def newSubCounter(prefix: String, ctr: Counter): SubCounter = new SubCounter(prefix, ctr)
+
+ /** Create a new counter that shows as `prefix` and is active in given phases */
+ def newTimer(prefix: String, phases: String*): Timer = new Timer(prefix, phases)
+
+ /** Create a new subtimer that shows as `prefix` and is active
+ * in the same phases as its base timer. Subtimers can track
+ * increments of their base timers and print as percentages
+ * of their base timers.
+ */
+ def newSubTimer(prefix: String, timer: Timer): Timer = new SubTimer(prefix, timer)
+
+ /** Create a new stackable that shows as `prefix` and is active
+ * in the same phases as its base timer. Stackable timers are subtimers
+ * that can be stacked ina timerstack, and that print aggregate, as well as specific
+ * durations.
+ */
+ def newStackableTimer(prefix: String, timer: Timer): StackableTimer = new StackableTimer(prefix, timer)
+
+ /** Create a new view that shows as `prefix` and is active in given phases.
+ * The view always reflects the current value of `quant` as a quantity.
+ */
+ def newView(prefix: String, phases: String*)(quant: => Any): View = new View(prefix, phases,
+quant)
+
+ /** Create a new quantity map that shows as `prefix` and is active in given phases.
+ */
+ def newQuantMap[K, V <% Ordered[V]](prefix: String, phases: String*)(initValue: => V): QuantMap[K, V] = new QuantMap(prefix, phases, initValue)
+
+ /** Same as newQuantMap, where the key type is fixed to be Class[_] */
+ def newByClass[V <% Ordered[V]](prefix: String, phases: String*)(initValue: => V): QuantMap[Class[_], V] = new QuantMap(prefix, phases, initValue)
+
+ /** Create a new timer stack */
+ def newTimerStack() = new TimerStack()
+
+ def allQuantities: Iterable[Quantity] =
+ for ((_, q) <- qs if q.underlying == q;
+ r <- q :: q.children.toList if r.prefix.nonEmpty) yield r
+
+ private def showPercent(x: Double, base: Double) =
+ if (base == 0) "" else f" (${x / base * 100}%2.1f%%)"
+
+ /** The base trait for quantities.
+ * Quantities with non-empty prefix are printed in the statistics info.
+ */
+ trait Quantity {
+ if (enabled && prefix.nonEmpty) {
+ val key = s"${if (underlying != this) underlying.prefix else ""}/$prefix"
+ qs(key) = this
+ }
+ val prefix: String
+ val phases: Seq[String]
+ def underlying: Quantity = this
+ def showAt(phase: String) = phases.isEmpty || (phases contains phase)
+ def line = f"$prefix%-30s: ${this}"
+ val children = new mutable.ListBuffer[Quantity]
+ }
+
+ trait SubQuantity extends Quantity {
+ protected def underlying: Quantity
+ underlying.children += this
+ }
+
+ class Counter(val prefix: String, val phases: Seq[String]) extends Quantity with Ordered[Counter] {
+ var value: Int = 0
+ def compare(that: Counter): Int =
+ if (this.value < that.value) -1
+ else if (this.value > that.value) 1
+ else 0
+ override def toString = value.toString
+ }
+
+ class View(val prefix: String, val phases: Seq[String], quant: => Any) extends Quantity {
+ override def toString = quant.toString
+ }
+
+ private class RelCounter(prefix: String, override val underlying: Counter) extends Counter(prefix, underlying.phases) with SubQuantity {
+ override def toString =
+ if (value == 0) "0"
+ else {
+ assert(underlying.value != 0, prefix+"/"+underlying.line)
+ f"${value.toFloat / underlying.value}%2.1f"
+ }
+ }
+
+ class SubCounter(prefix: String, override val underlying: Counter) extends Counter(prefix, underlying.phases) with SubQuantity {
+ def start() = (value, underlying.value)
+ def stop(prev: (Int, Int)) {
+ val (value0, uvalue0) = prev
+ value = value0 + underlying.value - uvalue0
+ }
+ override def toString =
+ value + showPercent(value, underlying.value)
+ }
+
+ class Timer(val prefix: String, val phases: Seq[String]) extends Quantity {
+ var nanos: Long = 0
+ var timings = 0
+ def start() = {
+ (nanos, System.nanoTime())
+ }
+ def stop(prev: TimerSnapshot) {
+ val (nanos0, start) = prev
+ nanos = nanos0 + System.nanoTime() - start
+ timings += 1
+ }
+ protected def show(ns: Long) = s"${ns/1000000}ms"
+ override def toString = s"$timings spans, ${show(nanos)}"
+ }
+
+ class SubTimer(prefix: String, override val underlying: Timer) extends Timer(prefix, underlying.phases) with SubQuantity {
+ override protected def show(ns: Long) = super.show(ns) + showPercent(ns, underlying.nanos)
+ }
+
+ class StackableTimer(prefix: String, underlying: Timer) extends SubTimer(prefix, underlying) with Ordered[StackableTimer] {
+ var specificNanos: Long = 0
+ def compare(that: StackableTimer): Int =
+ if (this.specificNanos < that.specificNanos) -1
+ else if (this.specificNanos > that.specificNanos) 1
+ else 0
+ override def toString = s"${super.toString} aggregate, ${show(specificNanos)} specific"
+ }
+
+ /** A mutable map quantity where missing elements are automatically inserted
+ * on access by executing `initValue`.
+ */
+ class QuantMap[K, V <% Ordered[V]](val prefix: String, val phases: Seq[String], initValue: => V)
+ extends mutable.HashMap[K, V] with mutable.SynchronizedMap[K, V] with Quantity {
+ override def default(key: K) = {
+ val elem = initValue
+ this(key) = elem
+ elem
+ }
+ override def toString =
+ this.toSeq.sortWith(_._2 > _._2).map {
+ case (cls: Class[_], elem) =>
+ s"${cls.toString.substring(cls.toString.lastIndexOf("$") + 1)}: $elem"
+ case (key, elem) =>
+ s"$key: $elem"
+ }.mkString(", ")
+ }
+
+ /** A stack of timers, all active, where a timer's specific "clock"
+ * is stopped as long as it is buried by some other timer in the stack, but
+ * its aggregate clock keeps on ticking.
+ */
+ class TimerStack {
+ private var elems: List[(StackableTimer, Long)] = Nil
+ /** Start given timer and push it onto the stack */
+ def push(t: StackableTimer): TimerSnapshot = {
+ elems = (t, 0L) :: elems
+ t.start()
+ }
+ /** Stop and pop top timer in stack
+ */
+ def pop(prev: TimerSnapshot) = {
+ val (nanos0, start) = prev
+ val duration = System.nanoTime() - start
+ val (topTimer, nestedNanos) :: rest = elems
+ topTimer.nanos = nanos0 + duration
+ topTimer.specificNanos += duration - nestedNanos
+ topTimer.timings += 1
+ elems = rest match {
+ case (outerTimer, outerNested) :: elems1 =>
+ (outerTimer, outerNested + duration) :: elems1
+ case Nil =>
+ Nil
+ }
+ }
+ }
+
+ private var _enabled = false
+ private val qs = new mutable.HashMap[String, Quantity]
+
+ /** replace with
+ *
+ * final val canEnable = false
+ *
+ * to remove all Statistics code from build
+ */
+ final val canEnable = _enabled
+
+ /** replace with
+ *
+ * final def hotEnabled = _enabled
+ *
+ * and rebuild, to also count tiny but super-hot methods
+ * such as phase, flags, owner, name.
+ */
+ final val hotEnabled = false
+
+ def enabled = _enabled
+ def enabled_=(cond: Boolean) = {
+ if (cond && !_enabled) {
+ val test = new Timer("", Nil)
+ val start = System.nanoTime()
+ var total = 0L
+ for (i <- 1 to 10000) {
+ val time = System.nanoTime()
+ total += System.nanoTime() - time
+ }
+ val total2 = System.nanoTime() - start
+ println("Enabling statistics, measuring overhead = "+
+ total/10000.0+"ns to "+total2/10000.0+"ns per timer")
+ _enabled = true
+ }
+ }
+}
diff --git a/src/reflect/scala/reflect/internal/util/StringOps.scala b/src/reflect/scala/reflect/internal/util/StringOps.scala
new file mode 100644
index 0000000..8f6c409
--- /dev/null
+++ b/src/reflect/scala/reflect/internal/util/StringOps.scala
@@ -0,0 +1,99 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+
+package scala.reflect.internal.util
+
+/** This object provides utility methods to extract elements
+ * from Strings.
+ *
+ * @author Martin Odersky
+ * @version 1.0
+ */
+trait StringOps {
+ def onull(s: String) = if (s == null) "" else s
+ def oempty(xs: String*) = xs filterNot (x => x == null || x == "")
+ def ojoin(xs: String*): String = oempty(xs: _*) mkString " "
+ def ojoin(xs: Seq[String], sep: String): String = oempty(xs: _*) mkString sep
+ def ojoinOr(xs: Seq[String], sep: String, orElse: String) = {
+ val ys = oempty(xs: _*)
+ if (ys.isEmpty) orElse else ys mkString sep
+ }
+ def trimTrailingSpace(s: String) = {
+ if (s.length == 0 || !s.charAt(s.length - 1).isWhitespace) s
+ else {
+ var idx = s.length - 1
+ while (idx >= 0 && s.charAt(idx).isWhitespace)
+ idx -= 1
+
+ s.substring(0, idx + 1)
+ }
+ }
+
+ def decompose(str: String, sep: Char): List[String] = {
+ def ws(start: Int): List[String] =
+ if (start == str.length) List()
+ else if (str.charAt(start) == sep) ws(start + 1)
+ else {
+ val end = str.indexOf(sep, start)
+ if (end < 0) List(str.substring(start))
+ else str.substring(start, end) :: ws(end + 1)
+ }
+ ws(0)
+ }
+
+ def words(str: String): List[String] = decompose(str, ' ')
+
+ def stripPrefixOpt(str: String, prefix: String): Option[String] =
+ if (str startsWith prefix) Some(str drop prefix.length)
+ else None
+
+ def stripSuffixOpt(str: String, suffix: String): Option[String] =
+ if (str endsWith suffix) Some(str dropRight suffix.length)
+ else None
+
+ def splitWhere(str: String, f: Char => Boolean, doDropIndex: Boolean = false): Option[(String, String)] =
+ splitAt(str, str indexWhere f, doDropIndex)
+
+ def splitAt(str: String, idx: Int, doDropIndex: Boolean = false): Option[(String, String)] =
+ if (idx == -1) None
+ else Some((str take idx, str drop (if (doDropIndex) idx + 1 else idx)))
+
+ /** Returns a string meaning "n elements".
+ *
+ * @param n ...
+ * @param elements ...
+ * @return ...
+ */
+ def countElementsAsString(n: Int, elements: String): String =
+ n match {
+ case 0 => "no " + elements + "s"
+ case 1 => "one " + elements
+ case 2 => "two " + elements + "s"
+ case 3 => "three " + elements + "s"
+ case 4 => "four " + elements + "s"
+ case _ => "" + n + " " + elements + "s"
+ }
+
+ /** Turns a count into a friendly English description if n<=4.
+ *
+ * @param n ...
+ * @return ...
+ */
+ def countAsString(n: Int): String =
+ n match {
+ case 0 => "none"
+ case 1 => "one"
+ case 2 => "two"
+ case 3 => "three"
+ case 4 => "four"
+ case _ => "" + n
+ }
+}
+
+object StringOps extends StringOps { }
diff --git a/src/reflect/scala/reflect/internal/util/StripMarginInterpolator.scala b/src/reflect/scala/reflect/internal/util/StripMarginInterpolator.scala
new file mode 100644
index 0000000..e757922
--- /dev/null
+++ b/src/reflect/scala/reflect/internal/util/StripMarginInterpolator.scala
@@ -0,0 +1,40 @@
+package scala.reflect
+package internal
+package util
+
+trait StripMarginInterpolator {
+ def stringContext: StringContext
+
+ /**
+ * A safe combination of `[[scala.collection.immutable.StringLike#stripMargin]]
+ * and [[scala.StringContext#raw]].
+ *
+ * The margin of each line is defined by whitespace leading up to a '|' character.
+ * This margin is stripped '''before''' the arguments are interpolated into to string.
+ *
+ * String escape sequences are '''not''' processed; this interpolater is designed to
+ * be used with triple quoted Strings.
+ *
+ * {{{
+ * scala> val foo = "f|o|o"
+ * foo: String = f|o|o
+ * scala> sm"""|${foo}
+ * |"""
+ * res0: String =
+ * "f|o|o
+ * "
+ * }}}
+ */
+ final def sm(args: Any*): String = {
+ def isLineBreak(c: Char) = c == '\n' || c == '\f' // compatible with StringLike#isLineBreak
+ def stripTrailingPart(s: String) = {
+ val (pre, post) = s.span(c => !isLineBreak(c))
+ pre + post.stripMargin
+ }
+ val stripped: List[String] = stringContext.parts.toList match {
+ case head :: tail => head.stripMargin :: (tail map stripTrailingPart)
+ case Nil => Nil
+ }
+ new StringContext(stripped: _*).raw(args: _*)
+ }
+}
diff --git a/src/reflect/scala/reflect/internal/util/TableDef.scala b/src/reflect/scala/reflect/internal/util/TableDef.scala
new file mode 100644
index 0000000..8e2bcc2
--- /dev/null
+++ b/src/reflect/scala/reflect/internal/util/TableDef.scala
@@ -0,0 +1,95 @@
+package scala.reflect.internal.util
+
+import TableDef._
+import scala.language.postfixOps
+
+/** A class for representing tabular data in a way that preserves
+ * its inner beauty. See Exceptional for an example usage.
+ * One creates an instance of TableDef by defining the columns of
+ * the table, then uses that to create an instance of Table by
+ * passing in a sequence of rows.
+ */
+class TableDef[T](_cols: Column[T]*) {
+ /** These operators are about all there is to it.
+ *
+ * ~ appends a column to the table
+ * >> creates a right-justified column and appends it
+ * << creates a left-justified column and appends it
+ * >+ specifies a string to separate the previous column from the next.
+ * if none is specified, a space is used.
+ */
+ def ~(next: Column[T]) = retThis(cols :+= next)
+ def >>(pair: (String, T => Any)) = this ~ Column(pair._1, pair._2, false)
+ def <<(pair: (String, T => Any)) = this ~ Column(pair._1, pair._2, true)
+ def >+(sep: String) = retThis(separators += ((cols.size - 1, sep)))
+
+ /** Below this point should all be considered private/internal.
+ */
+ private var cols: List[Column[T]] = _cols.toList
+ private var separators: Map[Int, String] = Map()
+
+ def defaultSep(index: Int) = if (index > (cols.size - 2)) "" else " "
+ def sepAfter(i: Int): String = separators.getOrElse(i, defaultSep(i))
+ def sepWidths = cols.indices map (i => sepAfter(i).length)
+
+ def columns = cols
+ def colNames = cols map (_.name)
+ def colFunctions = cols map (_.f)
+ def colApply(el: T) = colFunctions map (f => f(el))
+ def retThis(body: => Unit): this.type = { body ; this }
+
+ class Table(val rows: Seq[T]) extends Seq[T] {
+ def iterator = rows.iterator
+ def apply(index: Int) = rows(index)
+ def length = rows.length
+
+ def maxColWidth(col: Column[T]) = col.name +: (rows map col.f) map (_.toString.length) max
+ def specs = cols map (_ formatSpec rows)
+
+ val colWidths = cols map maxColWidth
+ val rowFormat = mkFormatString(sepAfter)
+ val headFormat = mkFormatString(i => " " * sepWidths(i))
+ val argLists = rows map colApply
+
+ val headers = List(
+ headFormat.format(colNames: _*),
+ (colWidths, sepWidths).zipped map ((w1, w2) => "-" * w1 + " " * w2) mkString
+ )
+
+ def mkFormatString(sepf: Int => String): String =
+ specs.zipWithIndex map { case (c, i) => c + sepf(i) } mkString
+
+ def pp(): Unit = allToSeq foreach println
+
+ def toFormattedSeq = argLists map (xs => rowFormat.format(xs: _*))
+ def allToSeq = headers ++ toFormattedSeq
+
+ override def toString = allToSeq mkString "\n"
+ }
+
+ def formatterFor(rows: Seq[T]): T => String = {
+ val formatStr = new Table(rows).rowFormat
+
+ x => formatStr.format(colApply(x) : _*)
+ }
+
+ def table(rows: Seq[T]) = new Table(rows)
+
+ override def toString = cols.mkString("TableDef(", ", ", ")")
+}
+
+object TableDef {
+ case class Column[-T](name: String, f: T => Any, left: Boolean) {
+ def maxWidth(elems: Seq[T]): Int = name +: (elems map f) map (_.toString.length) max
+ def formatSpec(elems: Seq[T]): String = {
+ val justify = if (left) "-" else ""
+ "%" + justify + maxWidth(elems) + "s"
+ }
+ override def toString = {
+ val justify = if (left) "<<" else ">>"
+ justify + "(" + name + ")"
+ }
+ }
+
+ def apply[T](cols: Column[T]*) = new TableDef[T](cols: _*)
+}
diff --git a/src/reflect/scala/reflect/internal/util/ThreeValues.scala b/src/reflect/scala/reflect/internal/util/ThreeValues.scala
new file mode 100644
index 0000000..f89bd9e
--- /dev/null
+++ b/src/reflect/scala/reflect/internal/util/ThreeValues.scala
@@ -0,0 +1,14 @@
+package scala.reflect.internal.util
+
+/** A simple three value type for booleans with an unknown value */
+object ThreeValues {
+
+ type ThreeValue = Byte
+
+ final val YES = 1
+ final val NO = -1
+ final val UNKNOWN = 0
+
+ def fromBoolean(b: Boolean): ThreeValue = if (b) YES else NO
+ def toBoolean(x: ThreeValue): Boolean = x == YES
+}
diff --git a/src/reflect/scala/reflect/internal/util/TraceSymbolActivity.scala b/src/reflect/scala/reflect/internal/util/TraceSymbolActivity.scala
new file mode 100644
index 0000000..fa83f70
--- /dev/null
+++ b/src/reflect/scala/reflect/internal/util/TraceSymbolActivity.scala
@@ -0,0 +1,174 @@
+package scala.reflect.internal
+package util
+
+import scala.collection.{ mutable, immutable }
+import scala.language.postfixOps
+
+trait TraceSymbolActivity {
+ val global: SymbolTable
+ import global._
+
+ private[this] var enabled = traceSymbolActivity
+ if (enabled && global.isCompilerUniverse)
+ scala.sys addShutdownHook showAllSymbols()
+
+ private type Set[T] = scala.collection.immutable.Set[T]
+ private val Set = scala.collection.immutable.Set
+
+ val allSymbols = mutable.Map[Int, Symbol]()
+ val allChildren = mutable.Map[Int, List[Int]]() withDefaultValue Nil
+ val prevOwners = mutable.Map[Int, List[(Int, Phase)]]() withDefaultValue Nil
+ val symsCaused = mutable.Map[Int, Int]() withDefaultValue 0
+ val allTrees = mutable.Set[Tree]()
+
+ def recordSymbolsInTree(tree: Tree) {
+ if (enabled)
+ allTrees += tree
+ }
+
+ def recordNewSymbol(sym: Symbol) {
+ if (enabled && sym.id > 1) {
+ allSymbols(sym.id) = sym
+ allChildren(sym.owner.id) ::= sym.id
+ }
+ }
+ def recordNewSymbolOwner(sym: Symbol, newOwner: Symbol) {
+ if (enabled) {
+ val sid = sym.id
+ val oid = sym.owner.id
+ val nid = newOwner.id
+
+ prevOwners(sid) ::= (oid -> phase)
+ allChildren(oid) = allChildren(oid) filterNot (_ == sid)
+ allChildren(nid) ::= sid
+ }
+ }
+
+ /** TODO.
+ */
+ private def reachableDirectlyFromSymbol(sym: Symbol): List[Symbol] = (
+ List(sym.owner, sym.alias, sym.thisSym)
+ ++ sym.children
+ ++ sym.info.parents.map(_.typeSymbol)
+ ++ sym.typeParams
+ ++ sym.paramss.flatten
+ )
+ private def reachable[T](inputs: Traversable[T], mkSymbol: T => Symbol): Set[Symbol] = {
+ def loop(seen: Set[Symbol], remaining: List[Symbol]): Set[Symbol] = {
+ remaining match {
+ case Nil => seen
+ case head :: rest =>
+ if ((head eq null) || (head eq NoSymbol) || seen(head)) loop(seen, rest)
+ else loop(seen + head, rest ++ reachableDirectlyFromSymbol(head).filterNot(seen))
+ }
+ }
+ loop(immutable.Set(), inputs.toList map mkSymbol filterNot (_ eq null) distinct)
+ }
+ private def treeList(t: Tree) = {
+ val buf = mutable.ListBuffer[Tree]()
+ t foreach (buf += _)
+ buf.toList
+ }
+
+ private def reachableFromSymbol(root: Symbol): Set[Symbol] =
+ reachable[Symbol](List(root, root.info.typeSymbol), x => x)
+
+ private def reachableFromTree(tree: Tree): Set[Symbol] =
+ reachable[Tree](treeList(tree), _.symbol)
+
+ private def signature(id: Int) = runBeforeErasure(allSymbols(id).defString)
+
+ private def dashes(s: Any): String = ("" + s) map (_ => '-')
+ private def show(s1: Any, ss: Any*) {
+ println("%-12s".format(s1) +: ss mkString " ")
+ }
+ private def showHeader(s1: Any, ss: Any*) {
+ show(s1, ss: _*)
+ show(dashes(s1), ss map dashes: _*)
+ }
+ private def showSym(sym: Symbol) {
+ def prefix = (" " * (sym.ownerChain.length - 1)) + sym.id
+ try println("%s#%s %s".format(prefix, sym.accurateKindString, sym.name.decode))
+ catch {
+ case x: Throwable => println(prefix + " failed: " + x)
+ }
+ allChildren(sym.id).sorted foreach showIdAndRemove
+ }
+ private def showIdAndRemove(id: Int) {
+ allSymbols remove id foreach showSym
+ }
+ private def symbolStr(id: Int): String = {
+ if (id == 1) "NoSymbol" else {
+ val sym = allSymbols(id)
+ sym.accurateKindString + " " + sym.name.decode
+ }
+ }
+ private def ownerStr(id: Int): String = {
+ val sym = allSymbols(id)
+ sym.name.decode + "#" + sym.id
+ }
+
+ private def freq[T, U](xs: scala.collection.Traversable[T])(fn: T => U): List[(U, Int)] = {
+ val ys = xs groupBy fn mapValues (_.size)
+ ys.toList sortBy (-_._2)
+ }
+
+ private def showMapFreq[T](xs: scala.collection.Map[T, Traversable[_]])(showFn: T => String) {
+ xs.mapValues(_.size).toList.sortBy(-_._2) take 100 foreach { case (k, size) =>
+ show(size, showFn(k))
+ }
+ println("\n")
+ }
+ private def showFreq[T, U](xs: Traversable[T])(groupFn: T => U, showFn: U => String = (x: U) => "" + x) = {
+ showMapFreq(xs.toList groupBy groupFn)(showFn)
+ }
+ private lazy val findErasurePhase: Phase = {
+ var ph = phase
+ while (ph != NoPhase && ph.name != "erasure") {
+ ph = ph.prev
+ }
+ ph
+ }
+ private def runBeforeErasure[T](body: => T): T = atPhase(findErasurePhase)(body)
+
+ def showAllSymbols() {
+ if (!enabled) return
+ enabled = false
+ allSymbols(1) = NoSymbol
+
+ println("" + allSymbols.size + " symbols created.")
+ println("")
+
+ showHeader("descendants", "symbol")
+ showFreq(allSymbols.values flatMap (_.ownerChain drop 1))(_.id, symbolStr)
+
+ showHeader("children", "symbol")
+ showMapFreq(allChildren)(symbolStr)
+
+ if (prevOwners.nonEmpty) {
+ showHeader("prev owners", "symbol")
+ showMapFreq(prevOwners) { k =>
+ val owners = (((allSymbols(k).owner.id, NoPhase)) :: prevOwners(k)) map {
+ case (oid, NoPhase) => "-> owned by " + ownerStr(oid)
+ case (oid, ph) => "-> owned by %s (until %s)".format(ownerStr(oid), ph)
+ }
+ signature(k) :: owners mkString "\n "
+ }
+ }
+
+ val nameFreq = allSymbols.values.toList groupBy (_.name)
+ showHeader("frequency", "%-15s".format("name"), "owners")
+ showMapFreq(nameFreq) { name =>
+ "%-15s %s".format(name.decode, {
+ val owners = freq(nameFreq(name))(_.owner)
+
+ "%4s owners (%s)".format(
+ owners.size,
+ owners.take(3).map({ case (k, v) => v + "/" + k }).mkString(", ") + ", ..."
+ )
+ })
+ }
+
+ allSymbols.keys.toList.sorted foreach showIdAndRemove
+ }
+}
diff --git a/src/reflect/scala/reflect/internal/util/WeakHashSet.scala b/src/reflect/scala/reflect/internal/util/WeakHashSet.scala
new file mode 100644
index 0000000..fc12e31
--- /dev/null
+++ b/src/reflect/scala/reflect/internal/util/WeakHashSet.scala
@@ -0,0 +1,430 @@
+package scala
+package reflect.internal.util
+
+import java.lang.ref.{WeakReference, ReferenceQueue}
+import scala.annotation.tailrec
+import scala.collection.generic.Clearable
+import scala.collection.mutable.{Set => mSet}
+
+/**
+ * A HashSet where the elements are stored weakly. Elements in this set are elligible for GC if no other
+ * hard references are associated with them. Its primary use case is as a canonical reference
+ * identity holder (aka "hash-consing") via findEntryOrUpdate
+ *
+ * This Set implementation cannot hold null. Any attempt to put a null in it will result in a NullPointerException
+ *
+ * This set implmeentation is not in general thread safe without external concurrency control. However it behaves
+ * properly when GC concurrently collects elements in this set.
+ */
+final class WeakHashSet[A <: AnyRef](val initialCapacity: Int, val loadFactor: Double) extends Set[A] with Function1[A, Boolean] with mSet[A] {
+
+ import WeakHashSet._
+
+ def this() = this(initialCapacity = WeakHashSet.defaultInitialCapacity, loadFactor = WeakHashSet.defaultLoadFactor)
+
+ type This = WeakHashSet[A]
+
+ /**
+ * queue of Entries that hold elements scheduled for GC
+ * the removeStaleEntries() method works through the queue to remeove
+ * stale entries from the table
+ */
+ private[this] val queue = new ReferenceQueue[A]
+
+ /**
+ * the number of elements in this set
+ */
+ private[this] var count = 0
+
+ /**
+ * from a specified initial capacity compute the capacity we'll use as being the next
+ * power of two equal to or greater than the specified initial capacity
+ */
+ private def computeCapacity = {
+ if (initialCapacity < 0) throw new IllegalArgumentException("initial capacity cannot be less than 0");
+ var candidate = 1
+ while (candidate < initialCapacity) {
+ candidate *= 2
+ }
+ candidate
+ }
+
+ /**
+ * the underlying table of entries which is an array of Entry linked lists
+ */
+ private[this] var table = new Array[Entry[A]](computeCapacity)
+
+ /**
+ * the limit at which we'll increase the size of the hash table
+ */
+ var threshhold = computeThreshHold
+
+ private[this] def computeThreshHold: Int = (table.size * loadFactor).ceil.toInt
+
+ /**
+ * find the bucket associated with an elements's hash code
+ */
+ private[this] def bucketFor(hash: Int): Int = {
+ // spread the bits around to try to avoid accidental collisions using the
+ // same algorithm as java.util.HashMap
+ var h = hash
+ h ^= h >>> 20 ^ h >>> 12
+ h ^= h >>> 7 ^ h >>> 4
+
+ // this is finding h % table.length, but takes advantage of the
+ // fact that table length is a power of 2,
+ // if you don't do bit flipping in your head, if table.length
+ // is binary 100000.. (with n 0s) then table.length - 1
+ // is 1111.. with n 1's.
+ // In other words this masks on the last n bits in the hash
+ h & (table.length - 1)
+ }
+
+ /**
+ * remove a single entry from a linked list in a given bucket
+ */
+ private[this] def remove(bucket: Int, prevEntry: Entry[A], entry: Entry[A]) {
+ prevEntry match {
+ case null => table(bucket) = entry.tail
+ case _ => prevEntry.tail = entry.tail
+ }
+ count -= 1
+ }
+
+ /**
+ * remove entries associated with elements that have been gc'ed
+ */
+ private[this] def removeStaleEntries() {
+ def poll(): Entry[A] = queue.poll().asInstanceOf[Entry[A]]
+
+ @tailrec
+ def queueLoop {
+ val stale = poll()
+ if (stale != null) {
+ val bucket = bucketFor(stale.hash)
+
+ @tailrec
+ def linkedListLoop(prevEntry: Entry[A], entry: Entry[A]): Unit = if (stale eq entry) remove(bucket, prevEntry, entry)
+ else if (entry != null) linkedListLoop(entry, entry.tail)
+
+ linkedListLoop(null, table(bucket))
+
+ queueLoop
+ }
+ }
+
+ queueLoop
+ }
+
+ /**
+ * Double the size of the internal table
+ */
+ private[this] def resize() {
+ val oldTable = table
+ table = new Array[Entry[A]](oldTable.size * 2)
+ threshhold = computeThreshHold
+
+ @tailrec
+ def tableLoop(oldBucket: Int): Unit = if (oldBucket < oldTable.size) {
+ @tailrec
+ def linkedListLoop(entry: Entry[A]): Unit = entry match {
+ case null => ()
+ case _ => {
+ val bucket = bucketFor(entry.hash)
+ val oldNext = entry.tail
+ entry.tail = table(bucket)
+ table(bucket) = entry
+ linkedListLoop(oldNext)
+ }
+ }
+ linkedListLoop(oldTable(oldBucket))
+
+ tableLoop(oldBucket + 1)
+ }
+ tableLoop(0)
+ }
+
+ // from scala.reflect.internal.Set, find an element or null if it isn't contained
+ override def findEntry(elem: A): A = elem match {
+ case null => throw new NullPointerException("WeakHashSet cannot hold nulls")
+ case _ => {
+ removeStaleEntries()
+ val hash = elem.hashCode
+ val bucket = bucketFor(hash)
+
+ @tailrec
+ def linkedListLoop(entry: Entry[A]): A = entry match {
+ case null => null.asInstanceOf[A]
+ case _ => {
+ val entryElem = entry.get
+ if (elem == entryElem) entryElem
+ else linkedListLoop(entry.tail)
+ }
+ }
+
+ linkedListLoop(table(bucket))
+ }
+ }
+ // add an element to this set unless it's already in there and return the element
+ def findEntryOrUpdate(elem: A): A = elem match {
+ case null => throw new NullPointerException("WeakHashSet cannot hold nulls")
+ case _ => {
+ removeStaleEntries()
+ val hash = elem.hashCode
+ val bucket = bucketFor(hash)
+ val oldHead = table(bucket)
+
+ def add() = {
+ table(bucket) = new Entry(elem, hash, oldHead, queue)
+ count += 1
+ if (count > threshhold) resize()
+ elem
+ }
+
+ @tailrec
+ def linkedListLoop(entry: Entry[A]): A = entry match {
+ case null => add()
+ case _ => {
+ val entryElem = entry.get
+ if (elem == entryElem) entryElem
+ else linkedListLoop(entry.tail)
+ }
+ }
+
+ linkedListLoop(oldHead)
+ }
+ }
+
+ // add an element to this set unless it's already in there and return this set
+ override def +(elem: A): this.type = elem match {
+ case null => throw new NullPointerException("WeakHashSet cannot hold nulls")
+ case _ => {
+ removeStaleEntries()
+ val hash = elem.hashCode
+ val bucket = bucketFor(hash)
+ val oldHead = table(bucket)
+
+ def add() {
+ table(bucket) = new Entry(elem, hash, oldHead, queue)
+ count += 1
+ if (count > threshhold) resize()
+ }
+
+ @tailrec
+ def linkedListLoop(entry: Entry[A]): Unit = entry match {
+ case null => add()
+ case _ if (elem == entry.get) => ()
+ case _ => linkedListLoop(entry.tail)
+ }
+
+ linkedListLoop(oldHead)
+ this
+ }
+ }
+
+ def +=(elem: A) = this + elem
+
+ // from scala.reflect.interanl.Set
+ override def addEntry(x: A) { this += x }
+
+ // remove an element from this set and return this set
+ override def -(elem: A): this.type = elem match {
+ case null => this
+ case _ => {
+ removeStaleEntries()
+ val bucket = bucketFor(elem.hashCode)
+
+
+
+ @tailrec
+ def linkedListLoop(prevEntry: Entry[A], entry: Entry[A]): Unit = entry match {
+ case null => ()
+ case _ if (elem == entry.get) => remove(bucket, prevEntry, entry)
+ case _ => linkedListLoop(entry, entry.tail)
+ }
+
+ linkedListLoop(null, table(bucket))
+ this
+ }
+ }
+
+ def -=(elem: A) = this - elem
+
+ // empty this set
+ override def clear(): Unit = {
+ table = new Array[Entry[A]](table.size)
+ threshhold = computeThreshHold
+ count = 0
+
+ // drain the queue - doesn't do anything because we're throwing away all the values anyway
+ @tailrec def queueLoop(): Unit = if (queue.poll() != null) queueLoop()
+ queueLoop()
+ }
+
+ // true if this set is empty
+ override def empty: This = new WeakHashSet[A](initialCapacity, loadFactor)
+
+ // the number of elements in this set
+ override def size: Int = {
+ removeStaleEntries()
+ count
+ }
+
+ override def apply(x: A): Boolean = this contains x
+
+ override def foreach[U](f: A => U): Unit = iterator foreach f
+
+ override def toList(): List[A] = iterator.toList
+
+ // Iterator over all the elements in this set in no particular order
+ override def iterator: Iterator[A] = {
+ removeStaleEntries()
+
+ new Iterator[A] {
+
+ /**
+ * the bucket currently being examined. Initially it's set past the last bucket and will be decremented
+ */
+ private[this] var currentBucket: Int = table.size
+
+ /**
+ * the entry that was last examined
+ */
+ private[this] var entry: Entry[A] = null
+
+ /**
+ * the element that will be the result of the next call to next()
+ */
+ private[this] var lookaheadelement: A = null.asInstanceOf[A]
+
+ @tailrec
+ def hasNext: Boolean = {
+ while (entry == null && currentBucket > 0) {
+ currentBucket -= 1
+ entry = table(currentBucket)
+ }
+
+ if (entry == null) false
+ else {
+ lookaheadelement = entry.get
+ if (lookaheadelement == null) {
+ // element null means the weakref has been cleared since we last did a removeStaleEntries(), move to the next entry
+ entry = entry.tail
+ hasNext
+ } else {
+ true
+ }
+ }
+ }
+
+ def next(): A = if (lookaheadelement == null)
+ throw new IndexOutOfBoundsException("next on an empty iterator")
+ else {
+ val result = lookaheadelement
+ lookaheadelement = null.asInstanceOf[A]
+ entry = entry.tail
+ result
+ }
+ }
+ }
+
+ /**
+ * Diagnostic information about the internals of this set. Not normally
+ * needed by ordinary code, but may be useful for diagnosing performance problems
+ */
+ private[util] class Diagnostics {
+ /**
+ * Verify that the internal structure of this hash set is fully consistent.
+ * Throws an assertion error on any problem. In order for it to be reliable
+ * the entries must be stable. If any are garbage collected during validation
+ * then an assertion may inappropriately fire.
+ */
+ def fullyValidate {
+ var computedCount = 0
+ var bucket = 0
+ while (bucket < table.size) {
+ var entry = table(bucket)
+ while (entry != null) {
+ assert(entry.get != null, s"$entry had a null value indicated that gc activity was happening during diagnostic validation or that a null value was inserted")
+ computedCount += 1
+ val cachedHash = entry.hash
+ val realHash = entry.get.hashCode
+ assert(cachedHash == realHash, s"for $entry cached hash was $cachedHash but should have been $realHash")
+ val computedBucket = bucketFor(realHash)
+ assert(computedBucket == bucket, s"for $entry the computed bucket was $computedBucket but should have been $bucket")
+
+ entry = entry.tail
+ }
+
+ bucket += 1
+ }
+
+ assert(computedCount == count, s"The computed count was $computedCount but should have been $count")
+ }
+
+ /**
+ * Produces a diagnostic dump of the table that underlies this hash set.
+ */
+ def dump = table.deep
+
+ /**
+ * Number of buckets that hold collisions. Useful for diagnosing performance issues.
+ */
+ def collisionBucketsCount: Int =
+ (table filter (entry => entry != null && entry.tail != null)).size
+
+ /**
+ * Number of buckets that are occupied in this hash table.
+ */
+ def fullBucketsCount: Int =
+ (table filter (entry => entry != null)).size
+
+ /**
+ * Number of buckets in the table
+ */
+ def bucketsCount: Int = table.size
+
+ /**
+ * Number of buckets that don't hold anything
+ */
+ def emptyBucketsCount = bucketsCount - fullBucketsCount
+
+ /**
+ * Number of elements that are in collision. Useful for diagnosing performance issues.
+ */
+ def collisionsCount = size - (fullBucketsCount - collisionBucketsCount)
+
+ /**
+ * A map from a count of elements to the number of buckets with that count
+ */
+ def elementCountDistribution = table map linkedListSize groupBy identity map {case (size, list) => (size, list.size)}
+
+ private def linkedListSize(entry: Entry[A]) = {
+ var e = entry
+ var count = 0
+ while (e != null) {
+ count += 1
+ e = e.tail
+ }
+ count
+ }
+ }
+
+ private[util] def diagnostics = new Diagnostics
+}
+
+/**
+ * Companion object for WeakHashSet
+ */
+object WeakHashSet {
+ /**
+ * A single entry in a WeakHashSet. It's a WeakReference plus a cached hash code and
+ * a link to the next Entry in the same bucket
+ */
+ private class Entry[A](element: A, val hash:Int, var tail: Entry[A], queue: ReferenceQueue[A]) extends WeakReference[A](element, queue)
+
+ val defaultInitialCapacity = 16
+ val defaultLoadFactor = .75
+
+ def apply[A <: AnyRef](initialCapacity: Int = WeakHashSet.defaultInitialCapacity, loadFactor: Double = WeakHashSet.defaultLoadFactor) = new WeakHashSet[A](initialCapacity, defaultLoadFactor)
+}
diff --git a/src/reflect/scala/reflect/internal/util/package.scala b/src/reflect/scala/reflect/internal/util/package.scala
new file mode 100644
index 0000000..6d77235
--- /dev/null
+++ b/src/reflect/scala/reflect/internal/util/package.scala
@@ -0,0 +1,9 @@
+package scala.reflect
+package internal
+
+package object util {
+ /**
+ * Adds the `sm` String interpolator to a [[scala.StringContext]].
+ */
+ implicit class StringContextStripMarginOps(val stringContext: StringContext) extends StripMarginInterpolator
+}
diff --git a/src/reflect/scala/reflect/io/AbstractFile.scala b/src/reflect/scala/reflect/io/AbstractFile.scala
new file mode 100644
index 0000000..15befb6
--- /dev/null
+++ b/src/reflect/scala/reflect/io/AbstractFile.scala
@@ -0,0 +1,264 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2013 LAMP/EPFL
+ * @author Martin Odersky
+ */
+
+
+package scala.reflect
+package io
+
+import java.io.{ FileOutputStream, IOException, InputStream, OutputStream, BufferedOutputStream }
+import java.io.{ File => JFile }
+import java.net.URL
+import scala.collection.mutable.ArrayBuffer
+
+/**
+ * An abstraction over files for use in the reflection/compiler libraries.
+ *
+ * ''Note: This library is considered experimental and should not be used unless you know what you are doing.''
+ *
+ * @author Philippe Altherr
+ * @version 1.0, 23/03/2004
+ */
+object AbstractFile {
+ /** Returns "getFile(new File(path))". */
+ def getFile(path: String): AbstractFile = getFile(File(path))
+ def getFile(path: Path): AbstractFile = getFile(path.toFile)
+
+ /**
+ * If the specified File exists and is a regular file, returns an
+ * abstract regular file backed by it. Otherwise, returns <code>null</code>.
+ */
+ def getFile(file: File): AbstractFile =
+ if (file.isFile) new PlainFile(file) else null
+
+ /** Returns "getDirectory(new File(path))". */
+ def getDirectory(path: Path): AbstractFile = getDirectory(path.toFile)
+
+ /**
+ * If the specified File exists and is either a directory or a
+ * readable zip or jar archive, returns an abstract directory
+ * backed by it. Otherwise, returns <code>null</code>.
+ *
+ * @param file ...
+ * @return ...
+ */
+ def getDirectory(file: File): AbstractFile =
+ if (file.isDirectory) new PlainFile(file)
+ else if (file.isFile && Path.isExtensionJarOrZip(file.jfile)) ZipArchive fromFile file
+ else null
+
+ /**
+ * If the specified URL exists and is a readable zip or jar archive,
+ * returns an abstract directory backed by it. Otherwise, returns
+ * <code>null</code>.
+ *
+ * @param file ...
+ * @return ...
+ */
+ def getURL(url: URL): AbstractFile = {
+ if (url == null || !Path.isExtensionJarOrZip(url.getPath)) null
+ else ZipArchive fromURL url
+ }
+}
+
+/**
+ * <p>
+ * This class and its children serve to unify handling of files and
+ * directories. These files and directories may or may not have some
+ * real counter part within the file system. For example, some file
+ * handles reference files within a zip archive or virtual ones
+ * that exist only in memory.
+ * </p>
+ * <p>
+ * Every abstract file has a path (i.e. a full name) and a name
+ * (i.e. a short name) and may be backed by some real File. There are
+ * two different kinds of abstract files: regular files and
+ * directories. Regular files may be read and have a last modification
+ * time. Directories may list their content and look for subfiles with
+ * a specified name or path and of a specified kind.
+ * </p>
+ * <p>
+ * The interface does <b>not</b> allow to access the content.
+ * The class <code>symtab.classfile.AbstractFileReader</code> accesses
+ * bytes, knowing that the character set of classfiles is UTF-8. For
+ * all other cases, the class <code>SourceFile</code> is used, which honors
+ * <code>global.settings.encoding.value</code>.
+ * </p>
+ *
+ * ''Note: This library is considered experimental and should not be used unless you know what you are doing.''
+ */
+abstract class AbstractFile extends Iterable[AbstractFile] {
+
+ /** Returns the name of this abstract file. */
+ def name: String
+
+ /** Returns the path of this abstract file. */
+ def path: String
+
+ /** Returns the path of this abstract file in a canonical form. */
+ def canonicalPath: String = if (file == null) path else file.getCanonicalPath
+
+ /** Checks extension case insensitively. */
+ def hasExtension(other: String) = extension == other.toLowerCase
+ private lazy val extension: String = Path.extension(name)
+
+ /** The absolute file, if this is a relative file. */
+ def absolute: AbstractFile
+
+ /** Returns the containing directory of this abstract file */
+ def container : AbstractFile
+
+ /** Returns the underlying File if any and null otherwise. */
+ def file: JFile
+
+ /** An underlying source, if known. Mostly, a zip/jar file. */
+ def underlyingSource: Option[AbstractFile] = None
+
+ /** Does this abstract file denote an existing file? */
+ def exists: Boolean = (file eq null) || file.exists
+
+ /** Does this abstract file represent something which can contain classfiles? */
+ def isClassContainer = isDirectory || (file != null && (extension == "jar" || extension == "zip"))
+
+ /** Create a file on disk, if one does not exist already. */
+ def create(): Unit
+
+ /** Delete the underlying file or directory (recursively). */
+ def delete(): Unit
+
+ /** Is this abstract file a directory? */
+ def isDirectory: Boolean
+
+ /** Returns the time that this abstract file was last modified. */
+ def lastModified: Long
+
+ /** returns an input stream so the file can be read */
+ def input: InputStream
+
+ /** Returns an output stream for writing the file */
+ def output: OutputStream
+
+ /** Returns a buffered output stream for writing the file - defaults to out */
+ def bufferedOutput: BufferedOutputStream = new BufferedOutputStream(output)
+
+ /** size of this file if it is a concrete file. */
+ def sizeOption: Option[Int] = None
+
+ def toURL: URL = if (file == null) null else file.toURI.toURL
+
+ /** Returns contents of file (if applicable) in a Char array.
+ * warning: use <code>Global.getSourceFile()</code> to use the proper
+ * encoding when converting to the char array.
+ */
+ @throws(classOf[IOException])
+ def toCharArray = new String(toByteArray).toCharArray
+
+ /** Returns contents of file (if applicable) in a byte array.
+ */
+ @throws(classOf[IOException])
+ def toByteArray: Array[Byte] = {
+ val in = input
+ var rest = sizeOption.getOrElse(0)
+ val arr = new Array[Byte](rest)
+ while (rest > 0) {
+ val res = in.read(arr, arr.length - rest, rest)
+ if (res == -1)
+ throw new IOException("read error")
+ rest -= res
+ }
+ in.close()
+ arr
+ }
+
+ /** Returns all abstract subfiles of this abstract directory. */
+ def iterator: Iterator[AbstractFile]
+
+ /** Returns the abstract file in this abstract directory with the specified
+ * name. If there is no such file, returns <code>null</code>. The argument
+ * <code>directory</code> tells whether to look for a directory or
+ * a regular file.
+ */
+ def lookupName(name: String, directory: Boolean): AbstractFile
+
+ /** Returns an abstract file with the given name. It does not
+ * check that it exists.
+ */
+ def lookupNameUnchecked(name: String, directory: Boolean): AbstractFile
+
+ /** Returns the abstract file in this abstract directory with the specified
+ * path relative to it, If there is no such file, returns null. The argument
+ * <code>directory</code> tells whether to look for a directory or a regular
+ * file.
+ *
+ * @param path ...
+ * @param directory ...
+ * @return ...
+ */
+ def lookupPath(path: String, directory: Boolean): AbstractFile = {
+ lookup((f, p, dir) => f.lookupName(p, dir), path, directory)
+ }
+
+ /** Return an abstract file that does not check that `path` denotes
+ * an existing file.
+ */
+ def lookupPathUnchecked(path: String, directory: Boolean): AbstractFile = {
+ lookup((f, p, dir) => f.lookupNameUnchecked(p, dir), path, directory)
+ }
+
+ private def lookup(getFile: (AbstractFile, String, Boolean) => AbstractFile,
+ path0: String,
+ directory: Boolean): AbstractFile = {
+ val separator = java.io.File.separatorChar
+ // trim trailing '/'s
+ val path: String = if (path0.last == separator) path0 dropRight 1 else path0
+ val length = path.length()
+ assert(length > 0 && !(path.last == separator), path)
+ var file = this
+ var start = 0
+ while (true) {
+ val index = path.indexOf(separator, start)
+ assert(index < 0 || start < index, ((path, directory, start, index)))
+ val name = path.substring(start, if (index < 0) length else index)
+ file = getFile(file, name, if (index < 0) directory else true)
+ if ((file eq null) || index < 0) return file
+ start = index + 1
+ }
+ file
+ }
+
+ private def fileOrSubdirectoryNamed(name: String, isDir: Boolean): AbstractFile = {
+ val lookup = lookupName(name, isDir)
+ if (lookup != null) lookup
+ else {
+ val jfile = new JFile(file, name)
+ if (isDir) jfile.mkdirs() else jfile.createNewFile()
+ new PlainFile(jfile)
+ }
+ }
+
+ /**
+ * Get the file in this directory with the given name,
+ * creating an empty file if it does not already existing.
+ */
+ def fileNamed(name: String): AbstractFile = {
+ assert(isDirectory, "Tried to find '%s' in '%s' but it is not a directory".format(name, path))
+ fileOrSubdirectoryNamed(name, false)
+ }
+
+ /**
+ * Get the subdirectory with a given name, creating it if it
+ * does not already exist.
+ */
+ def subdirectoryNamed(name: String): AbstractFile = {
+ assert (isDirectory, "Tried to find '%s' in '%s' but it is not a directory".format(name, path))
+ fileOrSubdirectoryNamed(name, true)
+ }
+
+ protected def unsupported(): Nothing = unsupported(null)
+ protected def unsupported(msg: String): Nothing = throw new UnsupportedOperationException(msg)
+
+ /** Returns the path of this abstract file. */
+ override def toString() = path
+
+}
diff --git a/src/reflect/scala/reflect/io/Directory.scala b/src/reflect/scala/reflect/io/Directory.scala
new file mode 100644
index 0000000..c040d1e
--- /dev/null
+++ b/src/reflect/scala/reflect/io/Directory.scala
@@ -0,0 +1,79 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+package scala.reflect
+package io
+
+import java.io.{ File => JFile }
+/**
+ * ''Note: This library is considered experimental and should not be used unless you know what you are doing.''
+ */
+object Directory {
+ import scala.util.Properties.{ tmpDir, userHome, userDir }
+
+ private def normalizePath(s: String) = Some(apply(Path(s).normalize))
+ def Current: Option[Directory] = if (userDir == "") None else normalizePath(userDir)
+ def Home: Option[Directory] = if (userHome == "") None else normalizePath(userHome)
+ def TmpDir: Option[Directory] = if (tmpDir == "") None else normalizePath(tmpDir)
+
+ def apply(path: Path): Directory = path.toDirectory
+
+ // Like File.makeTemp but creates a directory instead
+ def makeTemp(prefix: String = Path.randomPrefix, suffix: String = null, dir: JFile = null): Directory = {
+ val path = File.makeTemp(prefix, suffix, dir)
+ path.delete()
+ path.createDirectory()
+ }
+}
+import Path._
+
+/** An abstraction for directories.
+ *
+ * @author Paul Phillips
+ * @since 2.8
+ *
+ * ''Note: This is library is considered experimental and should not be used unless you know what you are doing.''
+ */
+class Directory(jfile: JFile) extends Path(jfile) {
+ override def toAbsolute: Directory = if (isAbsolute) this else super.toAbsolute.toDirectory
+ override def toDirectory: Directory = this
+ override def toFile: File = new File(jfile)
+ override def isValid = jfile.isDirectory() || !jfile.exists()
+ override def normalize: Directory = super.normalize.toDirectory
+
+ /** An iterator over the contents of this directory.
+ */
+ def list: Iterator[Path] =
+ jfile.listFiles match {
+ case null => Iterator.empty
+ case xs => xs.iterator map Path.apply
+ }
+
+ def dirs: Iterator[Directory] = list collect { case x: Directory => x }
+ def files: Iterator[File] = list collect { case x: File => x }
+
+ override def walkFilter(cond: Path => Boolean): Iterator[Path] =
+ list filter cond flatMap (_ walkFilter cond)
+
+ def deepDirs: Iterator[Directory] = Path.onlyDirs(deepList())
+ def deepFiles: Iterator[File] = Path.onlyFiles(deepList())
+
+ /** If optional depth argument is not given, will recurse
+ * until it runs out of contents.
+ */
+ def deepList(depth: Int = -1): Iterator[Path] =
+ if (depth < 0) list ++ (dirs flatMap (_ deepList (depth)))
+ else if (depth == 0) Iterator.empty
+ else list ++ (dirs flatMap (_ deepList (depth - 1)))
+
+ /** An iterator over the directories underneath this directory,
+ * to the (optionally) given depth.
+ */
+ def subdirs(depth: Int = 1): Iterator[Directory] =
+ deepList(depth) collect { case x: Directory => x }
+}
diff --git a/src/reflect/scala/reflect/io/File.scala b/src/reflect/scala/reflect/io/File.scala
new file mode 100644
index 0000000..736ba5d
--- /dev/null
+++ b/src/reflect/scala/reflect/io/File.scala
@@ -0,0 +1,197 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+
+package scala.reflect
+package io
+
+import java.io.{
+ FileInputStream, FileOutputStream, BufferedReader, BufferedWriter, InputStreamReader, OutputStreamWriter,
+ BufferedInputStream, BufferedOutputStream, IOException, PrintStream, PrintWriter, Closeable => JCloseable }
+import java.io.{ File => JFile }
+import java.nio.channels.{ Channel, FileChannel }
+import scala.io.Codec
+import scala.language.{reflectiveCalls, implicitConversions}
+/**
+ * ''Note: This library is considered experimental and should not be used unless you know what you are doing.''
+ */
+object File {
+ def pathSeparator = java.io.File.pathSeparator
+ def separator = java.io.File.separator
+
+ def apply(path: Path)(implicit codec: Codec) = new File(path.jfile)(codec)
+
+ // Create a temporary file, which will be deleted upon jvm exit.
+ def makeTemp(prefix: String = Path.randomPrefix, suffix: String = null, dir: JFile = null) = {
+ val jfile = java.io.File.createTempFile(prefix, suffix, dir)
+ jfile.deleteOnExit()
+ apply(jfile)
+ }
+
+ type HasClose = { def close(): Unit }
+
+ def closeQuietly(target: HasClose) {
+ try target.close() catch { case e: IOException => }
+ }
+ def closeQuietly(target: JCloseable) {
+ try target.close() catch { case e: IOException => }
+ }
+
+ // this is a workaround for http://bugs.sun.com/bugdatabase/view_bug.do?bug_id=6503430
+ // we are using a static initializer to statically initialize a java class so we don't
+ // trigger java.lang.InternalErrors later when using it concurrently. We ignore all
+ // the exceptions so as not to cause spurious failures when no write access is available,
+ // e.g. google app engine.
+ //
+ // XXX need to put this behind a setting.
+ //
+ // try {
+ // import Streamable.closing
+ // val tmp = java.io.File.createTempFile("bug6503430", null, null)
+ // try closing(new FileInputStream(tmp)) { in =>
+ // val inc = in.getChannel()
+ // closing(new FileOutputStream(tmp, true)) { out =>
+ // out.getChannel().transferFrom(inc, 0, 0)
+ // }
+ // }
+ // finally tmp.delete()
+ // }
+ // catch {
+ // case _: IllegalArgumentException | _: IllegalStateException | _: IOException | _: SecurityException => ()
+ // }
+}
+import File._
+import Path._
+
+/** An abstraction for files. For character data, a Codec
+ * can be supplied at either creation time or when a method
+ * involving character data is called (with the latter taking
+ * precedence if supplied.) If neither is available, the value
+ * of scala.io.Codec.default is used.
+ *
+ * @author Paul Phillips
+ * @since 2.8
+ *
+ * ''Note: This is library is considered experimental and should not be used unless you know what you are doing.''
+ */
+class File(jfile: JFile)(implicit constructorCodec: Codec) extends Path(jfile) with Streamable.Chars {
+ override val creationCodec = constructorCodec
+ def withCodec(codec: Codec): File = new File(jfile)(codec)
+
+ override def addExtension(ext: String): File = super.addExtension(ext).toFile
+ override def toAbsolute: File = if (isAbsolute) this else super.toAbsolute.toFile
+ override def toDirectory: Directory = new Directory(jfile)
+ override def toFile: File = this
+ override def normalize: File = super.normalize.toFile
+ override def isValid = jfile.isFile() || !jfile.exists()
+ override def length = super[Path].length
+ override def walkFilter(cond: Path => Boolean): Iterator[Path] =
+ if (cond(this)) Iterator.single(this) else Iterator.empty
+
+ /** Obtains an InputStream. */
+ def inputStream() = new FileInputStream(jfile)
+
+ /** Obtains a OutputStream. */
+ def outputStream(append: Boolean = false) = new FileOutputStream(jfile, append)
+ def bufferedOutput(append: Boolean = false) = new BufferedOutputStream(outputStream(append))
+ def printStream(append: Boolean = false) = new PrintStream(outputStream(append), true)
+
+ /** Obtains an OutputStreamWriter wrapped around a FileOutputStream.
+ * This should behave like a less broken version of java.io.FileWriter,
+ * in that unlike the java version you can specify the encoding.
+ */
+ def writer(): OutputStreamWriter = writer(false)
+ def writer(append: Boolean): OutputStreamWriter = writer(append, creationCodec)
+ def writer(append: Boolean, codec: Codec): OutputStreamWriter =
+ new OutputStreamWriter(outputStream(append), codec.charSet)
+
+ /** Wraps a BufferedWriter around the result of writer().
+ */
+ def bufferedWriter(): BufferedWriter = bufferedWriter(false)
+ def bufferedWriter(append: Boolean): BufferedWriter = bufferedWriter(append, creationCodec)
+ def bufferedWriter(append: Boolean, codec: Codec): BufferedWriter =
+ new BufferedWriter(writer(append, codec))
+
+ def printWriter(): PrintWriter = new PrintWriter(bufferedWriter(), true)
+ def printWriter(append: Boolean): PrintWriter = new PrintWriter(bufferedWriter(append), true)
+
+ /** Creates a new file and writes all the Strings to it. */
+ def writeAll(strings: String*): Unit = {
+ val out = bufferedWriter()
+ try strings foreach (out write _)
+ finally out.close()
+ }
+
+ def writeBytes(bytes: Array[Byte]): Unit = {
+ val out = bufferedOutput()
+ try out write bytes
+ finally out.close()
+ }
+
+ def appendAll(strings: String*): Unit = {
+ val out = bufferedWriter(append = true)
+ try strings foreach (out write _)
+ finally out.close()
+ }
+
+ /** Calls println on each string (so it adds a newline in the PrintWriter fashion.) */
+ def printlnAll(strings: String*): Unit = {
+ val out = printWriter()
+ try strings foreach (out println _)
+ finally out.close()
+ }
+
+ def safeSlurp(): Option[String] =
+ try Some(slurp())
+ catch { case _: IOException => None }
+
+ def copyTo(destPath: Path, preserveFileDate: Boolean = false): Boolean = {
+ val CHUNK = 1024 * 1024 * 16 // 16 MB
+ val dest = destPath.toFile
+ if (!isValid) fail("Source %s is not a valid file." format name)
+ if (this.normalize == dest.normalize) fail("Source and destination are the same.")
+ if (!dest.parent.exists) fail("Destination cannot be created.")
+ if (dest.exists && !dest.canWrite) fail("Destination exists but is not writable.")
+ if (dest.isDirectory) fail("Destination exists but is a directory.")
+
+ lazy val in_s = inputStream()
+ lazy val out_s = dest.outputStream()
+ lazy val in = in_s.getChannel()
+ lazy val out = out_s.getChannel()
+
+ try {
+ val size = in.size()
+ var pos, count = 0L
+ while (pos < size) {
+ count = (size - pos) min CHUNK
+ pos += out.transferFrom(in, pos, count)
+ }
+ }
+ finally List[HasClose](out, out_s, in, in_s) foreach closeQuietly
+
+ if (this.length != dest.length)
+ fail("Failed to completely copy %s to %s".format(name, dest.name))
+
+ if (preserveFileDate)
+ dest.lastModified = this.lastModified
+
+ true
+ }
+
+ /** Reflection since we're into the java 6+ API.
+ */
+ def setExecutable(executable: Boolean, ownerOnly: Boolean = true): Boolean = {
+ type JBoolean = java.lang.Boolean
+ val method =
+ try classOf[JFile].getMethod("setExecutable", classOf[Boolean], classOf[Boolean])
+ catch { case _: NoSuchMethodException => return false }
+
+ try method.invoke(jfile, executable: JBoolean, ownerOnly: JBoolean).asInstanceOf[JBoolean].booleanValue
+ catch { case _: Exception => false }
+ }
+}
diff --git a/src/reflect/scala/reflect/io/FileOperationException.scala b/src/reflect/scala/reflect/io/FileOperationException.scala
new file mode 100644
index 0000000..13a1322
--- /dev/null
+++ b/src/reflect/scala/reflect/io/FileOperationException.scala
@@ -0,0 +1,13 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+
+package scala.reflect
+package io
+/** ''Note: This library is considered experimental and should not be used unless you know what you are doing.'' */
+case class FileOperationException(msg: String) extends RuntimeException(msg)
diff --git a/src/reflect/scala/reflect/io/NoAbstractFile.scala b/src/reflect/scala/reflect/io/NoAbstractFile.scala
new file mode 100644
index 0000000..8c88d3a
--- /dev/null
+++ b/src/reflect/scala/reflect/io/NoAbstractFile.scala
@@ -0,0 +1,33 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2013 LAMP/EPFL
+ * @author Paul Phillips
+ */
+
+package scala.reflect
+package io
+
+import java.io.InputStream
+import java.io.{ File => JFile }
+
+/** A distinguished object so you can avoid both null
+ * and Option.
+ *
+ * ''Note: This library is considered experimental and should not be used unless you know what you are doing.''
+ */
+object NoAbstractFile extends AbstractFile {
+ def absolute: AbstractFile = this
+ def container: AbstractFile = this
+ def create(): Unit = ???
+ def delete(): Unit = ???
+ def file: JFile = null
+ def input: InputStream = null
+ def isDirectory: Boolean = false
+ def iterator: Iterator[AbstractFile] = Iterator.empty
+ def lastModified: Long = 0L
+ def lookupName(name: String, directory: Boolean): AbstractFile = null
+ def lookupNameUnchecked(name: String, directory: Boolean): AbstractFile = null
+ def name: String = ""
+ def output: java.io.OutputStream = null
+ def path: String = ""
+ override def toByteArray = Array[Byte]()
+}
diff --git a/src/reflect/scala/reflect/io/Path.scala b/src/reflect/scala/reflect/io/Path.scala
new file mode 100644
index 0000000..36fdc04
--- /dev/null
+++ b/src/reflect/scala/reflect/io/Path.scala
@@ -0,0 +1,289 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2013 LAMP/EPFL
+ * @author Paul Phillips
+ */
+
+package scala.reflect
+package io
+
+import java.io.{
+ FileInputStream, FileOutputStream, BufferedReader, BufferedWriter, InputStreamReader, OutputStreamWriter,
+ BufferedInputStream, BufferedOutputStream, RandomAccessFile }
+import java.io.{ File => JFile }
+import java.net.{ URI, URL }
+import scala.util.Random.alphanumeric
+import scala.language.implicitConversions
+
+/** An abstraction for filesystem paths. The differences between
+ * Path, File, and Directory are primarily to communicate intent.
+ * Since the filesystem can change at any time, there is no way to
+ * reliably associate Files only with files and so on. Any Path
+ * can be converted to a File or Directory (and thus gain access to
+ * the additional entity specific methods) by calling toFile or
+ * toDirectory, which has no effect on the filesystem.
+ *
+ * Also available are createFile and createDirectory, which attempt
+ * to create the path in question.
+ *
+ * @author Paul Phillips
+ * @since 2.8
+ *
+ * ''Note: This library is considered experimental and should not be used unless you know what you are doing.''
+ */
+object Path {
+ def isExtensionJarOrZip(jfile: JFile): Boolean = isExtensionJarOrZip(jfile.getName)
+ def isExtensionJarOrZip(name: String): Boolean = {
+ val ext = extension(name)
+ ext == "jar" || ext == "zip"
+ }
+ def extension(name: String): String = {
+ var i = name.length - 1
+ while (i >= 0 && name.charAt(i) != '.')
+ i -= 1
+
+ if (i < 0) ""
+ else name.substring(i + 1).toLowerCase
+ }
+
+ // not certain these won't be problematic, but looks good so far
+ implicit def string2path(s: String): Path = apply(s)
+ implicit def jfile2path(jfile: JFile): Path = apply(jfile)
+
+ // java 7 style, we don't use it yet
+ // object AccessMode extends Enumeration {
+ // val EXECUTE, READ, WRITE = Value
+ // }
+ // def checkAccess(modes: AccessMode*): Boolean = {
+ // modes foreach {
+ // case EXECUTE => throw new Exception("Unsupported") // can't check in java 5
+ // case READ => if (!jfile.canRead()) return false
+ // case WRITE => if (!jfile.canWrite()) return false
+ // }
+ // true
+ // }
+
+ def onlyDirs(xs: Iterator[Path]): Iterator[Directory] = xs filter (_.isDirectory) map (_.toDirectory)
+ def onlyDirs(xs: List[Path]): List[Directory] = xs filter (_.isDirectory) map (_.toDirectory)
+ def onlyFiles(xs: Iterator[Path]): Iterator[File] = xs filter (_.isFile) map (_.toFile)
+ def onlyFiles(xs: List[Path]): List[File] = xs filter (_.isFile) map (_.toFile)
+
+ def roots: List[Path] = java.io.File.listRoots().toList map Path.apply
+
+ def apply(segments: Seq[String]): Path = apply(segments mkString java.io.File.separator)
+ def apply(path: String): Path = apply(new JFile(path))
+ def apply(jfile: JFile): Path =
+ if (jfile.isFile) new File(jfile)
+ else if (jfile.isDirectory) new Directory(jfile)
+ else new Path(jfile)
+
+ /** Avoiding any shell/path issues by only using alphanumerics. */
+ private[io] def randomPrefix = alphanumeric take 6 mkString ""
+ private[io] def fail(msg: String) = throw FileOperationException(msg)
+}
+import Path._
+
+/** The Path constructor is private so we can enforce some
+ * semantics regarding how a Path might relate to the world.
+ *
+ * ''Note: This library is considered experimental and should not be used unless you know what you are doing.''
+ */
+class Path private[io] (val jfile: JFile) {
+ val separator = java.io.File.separatorChar
+ val separatorStr = java.io.File.separator
+
+ // Validation: this verifies that the type of this object and the
+ // contents of the filesystem are in agreement. All objects are
+ // valid except File objects whose path points to a directory and
+ // Directory objects whose path points to a file.
+ def isValid: Boolean = true
+
+ // conversions
+ def toFile: File = new File(jfile)
+ def toDirectory: Directory = new Directory(jfile)
+ def toAbsolute: Path = if (isAbsolute) this else Path(jfile.getAbsolutePath())
+ def toCanonical: Path = Path(jfile.getCanonicalPath())
+ def toURI: URI = jfile.toURI()
+ def toURL: URL = toURI.toURL()
+ /** If this path is absolute, returns it: otherwise, returns an absolute
+ * path made up of root / this.
+ */
+ def toAbsoluteWithRoot(root: Path) = if (isAbsolute) this else root.toAbsolute / this
+
+ /** Creates a new Path with the specified path appended. Assumes
+ * the type of the new component implies the type of the result.
+ */
+ def /(child: Path): Path = if (isEmpty) child else new Path(new JFile(jfile, child.path))
+ def /(child: Directory): Directory = /(child: Path).toDirectory
+ def /(child: File): File = /(child: Path).toFile
+
+ /** If this path is a container, recursively iterate over its contents.
+ * The supplied condition is a filter which is applied to each element,
+ * with that branch of the tree being closed off if it is true. So for
+ * example if the condition is true for some subdirectory, nothing
+ * under that directory will be in the Iterator; but otherwise each
+ * file and subdirectory underneath it will appear.
+ */
+ def walkFilter(cond: Path => Boolean): Iterator[Path] =
+ if (isFile) toFile walkFilter cond
+ else if (isDirectory) toDirectory walkFilter cond
+ else Iterator.empty
+
+ /** Equivalent to walkFilter(_ => false).
+ */
+ def walk: Iterator[Path] = walkFilter(_ => true)
+
+ // identity
+ def name: String = jfile.getName()
+ def path: String = jfile.getPath()
+ def normalize: Path = Path(jfile.getAbsolutePath())
+ def isRootPath: Boolean = roots exists (_ isSame this)
+
+ def resolve(other: Path) = if (other.isAbsolute || isEmpty) other else /(other)
+ def relativize(other: Path) = {
+ assert(isAbsolute == other.isAbsolute, "Paths not of same type: "+this+", "+other)
+
+ def createRelativePath(baseSegs: List[String], otherSegs: List[String]) : String = {
+ (baseSegs, otherSegs) match {
+ case (b :: bs, o :: os) if b == o => createRelativePath(bs, os)
+ case (bs, os) => ((".."+separator)*bs.length)+os.mkString(separatorStr)
+ }
+ }
+
+ Path(createRelativePath(segments, other.segments))
+ }
+
+ // derived from identity
+ def root: Option[Path] = roots find (this startsWith _)
+ def segments: List[String] = (path split separator).toList filterNot (_.length == 0)
+ /**
+ * @return The path of the parent directory, or root if path is already root
+ */
+ def parent: Directory = path match {
+ case "" | "." => Directory("..")
+ case _ =>
+ // the only solution <-- a comment which could have used elaboration
+ if (segments.nonEmpty && segments.last == "..")
+ (path / "..").toDirectory
+ else jfile.getParent match {
+ case null =>
+ if (isAbsolute) toDirectory // it should be a root. BTW, don't need to worry about relative pathed root
+ else Directory(".") // a dir under pwd
+ case x =>
+ Directory(x)
+ }
+ }
+ def parents: List[Directory] = {
+ val p = parent
+ if (p isSame this) Nil else p :: p.parents
+ }
+ // if name ends with an extension (e.g. "foo.jpg") returns the extension ("jpg"), otherwise ""
+ def extension: String = {
+ var i = name.length - 1
+ while (i >= 0 && name.charAt(i) != '.')
+ i -= 1
+
+ if (i < 0) ""
+ else name.substring(i + 1)
+ }
+ // def extension: String = (name lastIndexOf '.') match {
+ // case -1 => ""
+ // case idx => name drop (idx + 1)
+ // }
+ // compares against extensions in a CASE INSENSITIVE way.
+ def hasExtension(ext: String, exts: String*) = {
+ val lower = extension.toLowerCase
+ ext.toLowerCase == lower || exts.exists(_.toLowerCase == lower)
+ }
+ // returns the filename without the extension.
+ def stripExtension: String = name stripSuffix ("." + extension)
+ // returns the Path with the extension.
+ def addExtension(ext: String): Path = Path(path + "." + ext)
+ // changes the existing extension out for a new one, or adds it
+ // if the current path has none.
+ def changeExtension(ext: String): Path = (
+ if (extension == "") addExtension(ext)
+ else Path(path.stripSuffix(extension) + ext)
+ )
+
+ // conditionally execute
+ def ifFile[T](f: File => T): Option[T] = if (isFile) Some(f(toFile)) else None
+ def ifDirectory[T](f: Directory => T): Option[T] = if (isDirectory) Some(f(toDirectory)) else None
+
+ // Boolean tests
+ def canRead = jfile.canRead()
+ def canWrite = jfile.canWrite()
+ def exists = jfile.exists()
+ def notExists = try !jfile.exists() catch { case ex: SecurityException => false }
+
+ def isFile = jfile.isFile()
+ def isDirectory = jfile.isDirectory()
+ def isAbsolute = jfile.isAbsolute()
+ def isHidden = jfile.isHidden()
+ def isEmpty = path.length == 0
+
+ // Information
+ def lastModified = jfile.lastModified()
+ def lastModified_=(time: Long) = jfile setLastModified time // should use setXXX function?
+ def length = jfile.length()
+
+ // Boolean path comparisons
+ def endsWith(other: Path) = segments endsWith other.segments
+ def startsWith(other: Path) = segments startsWith other.segments
+ def isSame(other: Path) = toCanonical == other.toCanonical
+ def isFresher(other: Path) = lastModified > other.lastModified
+
+ // creations
+ def createDirectory(force: Boolean = true, failIfExists: Boolean = false): Directory = {
+ val res = if (force) jfile.mkdirs() else jfile.mkdir()
+ if (!res && failIfExists && exists) fail("Directory '%s' already exists." format name)
+ else if (isDirectory) toDirectory
+ else new Directory(jfile)
+ }
+ def createFile(failIfExists: Boolean = false): File = {
+ val res = jfile.createNewFile()
+ if (!res && failIfExists && exists) fail("File '%s' already exists." format name)
+ else if (isFile) toFile
+ else new File(jfile)
+ }
+
+ // deletions
+ def delete() = jfile.delete()
+ def deleteIfExists() = if (jfile.exists()) delete() else false
+
+ /** Deletes the path recursively. Returns false on failure.
+ * Use with caution!
+ */
+ def deleteRecursively(): Boolean = deleteRecursively(jfile)
+ private def deleteRecursively(f: JFile): Boolean = {
+ if (f.isDirectory) f.listFiles match {
+ case null =>
+ case xs => xs foreach deleteRecursively
+ }
+ f.delete()
+ }
+
+ def truncate() =
+ isFile && {
+ val raf = new RandomAccessFile(jfile, "rw")
+ raf setLength 0
+ raf.close()
+ length == 0
+ }
+
+ def touch(modTime: Long = System.currentTimeMillis) = {
+ createFile()
+ if (isFile)
+ lastModified = modTime
+ }
+
+ // todo
+ // def copyTo(target: Path, options ...): Boolean
+ // def moveTo(target: Path, options ...): Boolean
+
+ override def toString() = path
+ override def equals(other: Any) = other match {
+ case x: Path => path == x.path
+ case _ => false
+ }
+ override def hashCode() = path.hashCode()
+}
diff --git a/src/reflect/scala/reflect/io/PlainFile.scala b/src/reflect/scala/reflect/io/PlainFile.scala
new file mode 100644
index 0000000..82b0568
--- /dev/null
+++ b/src/reflect/scala/reflect/io/PlainFile.scala
@@ -0,0 +1,104 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2013 LAMP/EPFL
+ * @author Martin Odersky
+ */
+
+
+package scala.reflect
+package io
+
+import java.io.{ FileInputStream, FileOutputStream, IOException }
+import PartialFunction._
+/** ''Note: This library is considered experimental and should not be used unless you know what you are doing.'' */
+object PlainFile {
+ /**
+ * If the specified File exists, returns an abstract file backed
+ * by it. Otherwise, returns null.
+ */
+ def fromPath(file: Path): PlainFile =
+ if (file.isDirectory) new PlainDirectory(file.toDirectory)
+ else if (file.isFile) new PlainFile(file)
+ else null
+}
+/** ''Note: This library is considered experimental and should not be used unless you know what you are doing.'' */
+class PlainDirectory(givenPath: Directory) extends PlainFile(givenPath) {
+ override def isDirectory = true
+ override def iterator = givenPath.list filter (_.exists) map (x => new PlainFile(x))
+ override def delete(): Unit = givenPath.deleteRecursively()
+}
+
+/** This class implements an abstract file backed by a File.
+ *
+ * ''Note: This library is considered experimental and should not be used unless you know what you are doing.''
+ */
+class PlainFile(val givenPath: Path) extends AbstractFile {
+ assert(path ne null)
+
+ val file = givenPath.jfile
+ override def underlyingSource = Some(this)
+
+ private val fpath = givenPath.toAbsolute
+
+ /** Returns the name of this abstract file. */
+ def name = givenPath.name
+
+ /** Returns the path of this abstract file. */
+ def path = givenPath.path
+
+ /** The absolute file. */
+ def absolute = new PlainFile(givenPath.toAbsolute)
+
+ override def container: AbstractFile = new PlainFile(givenPath.parent)
+ override def input = givenPath.toFile.inputStream()
+ override def output = givenPath.toFile.outputStream()
+ override def sizeOption = Some(givenPath.length.toInt)
+
+ override def toString = path
+ override def hashCode(): Int = fpath.hashCode
+ override def equals(that: Any): Boolean = that match {
+ case x: PlainFile => fpath == x.fpath
+ case _ => false
+ }
+
+ /** Is this abstract file a directory? */
+ def isDirectory: Boolean = givenPath.isDirectory
+
+ /** Returns the time that this abstract file was last modified. */
+ def lastModified: Long = givenPath.lastModified
+
+ /** Returns all abstract subfiles of this abstract directory. */
+ def iterator: Iterator[AbstractFile] = {
+ if (!isDirectory) Iterator.empty
+ else givenPath.toDirectory.list filter (_.exists) map (new PlainFile(_))
+ }
+
+ /**
+ * Returns the abstract file in this abstract directory with the
+ * specified name. If there is no such file, returns null. The
+ * argument "directory" tells whether to look for a directory or
+ * or a regular file.
+ *
+ * @param name ...
+ * @param directory ...
+ * @return ...
+ */
+ def lookupName(name: String, directory: Boolean): AbstractFile = {
+ val child = givenPath / name
+ if ((child.isDirectory && directory) || (child.isFile && !directory)) new PlainFile(child)
+ else null
+ }
+
+ /** Does this abstract file denote an existing file? */
+ def create(): Unit = if (!exists) givenPath.createFile()
+
+ /** Delete the underlying file or directory (recursively). */
+ def delete(): Unit =
+ if (givenPath.isFile) givenPath.delete()
+ else if (givenPath.isDirectory) givenPath.toDirectory.deleteRecursively()
+
+ /** Returns a plain file with the given name. It does not
+ * check that it exists.
+ */
+ def lookupNameUnchecked(name: String, directory: Boolean): AbstractFile =
+ new PlainFile(givenPath / name)
+}
diff --git a/src/reflect/scala/reflect/io/Streamable.scala b/src/reflect/scala/reflect/io/Streamable.scala
new file mode 100644
index 0000000..61ec8a4
--- /dev/null
+++ b/src/reflect/scala/reflect/io/Streamable.scala
@@ -0,0 +1,127 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2013 LAMP/EPFL
+ * @author Paul Phillips
+ */
+
+package scala.reflect
+package io
+
+import java.net.{ URI, URL }
+import java.io.{ BufferedInputStream, InputStream, PrintStream }
+import java.io.{ BufferedReader, InputStreamReader, Closeable => JCloseable }
+import scala.io.{ Codec, BufferedSource, Source }
+import scala.collection.mutable.ArrayBuffer
+import Path.fail
+
+/** Traits for objects which can be represented as Streams.
+ *
+ * @author Paul Phillips
+ * @since 2.8
+ *
+ * ''Note: This library is considered experimental and should not be used unless you know what you are doing.''
+ */
+object Streamable {
+ /** Traits which can be viewed as a sequence of bytes. Source types
+ * which know their length should override def length: Long for more
+ * efficient method implementations.
+ *
+ * ''Note: This library is considered experimental and should not be used unless you know what you are doing.''
+ */
+ trait Bytes {
+ def inputStream(): InputStream
+ def length: Long = -1
+
+ def bufferedInput() = new BufferedInputStream(inputStream())
+ def bytes(): Iterator[Byte] = bytesAsInts() map (_.toByte)
+ def bytesAsInts(): Iterator[Int] = {
+ val in = bufferedInput()
+ Iterator continually in.read() takeWhile (_ != -1)
+ }
+
+ /** This method aspires to be the fastest way to read
+ * a stream of known length into memory.
+ */
+ def toByteArray(): Array[Byte] = {
+ // if we don't know the length, fall back on relative inefficiency
+ if (length == -1L)
+ return (new ArrayBuffer[Byte]() ++= bytes()).toArray
+
+ val arr = new Array[Byte](length.toInt)
+ val len = arr.length
+ lazy val in = bufferedInput()
+ var offset = 0
+
+ def loop() {
+ if (offset < len) {
+ val read = in.read(arr, offset, len - offset)
+ if (read >= 0) {
+ offset += read
+ loop()
+ }
+ }
+ }
+ try loop()
+ finally in.close()
+
+ if (offset == arr.length) arr
+ else fail("Could not read entire source (%d of %d bytes)".format(offset, len))
+ }
+ }
+
+ /** For objects which can be viewed as Chars.
+ *
+ * ''Note: This library is considered experimental and should not be used unless you know what you are doing.''
+ */
+ trait Chars extends Bytes {
+ /** Calls to methods requiring byte<->char transformations should be offered
+ * in a form which allows specifying the codec. When it is not specified,
+ * the one discovered at creation time will be used, which will always find the
+ * one in scala.io.Codec if no other is available. This can be overridden
+ * to use a different default.
+ */
+ def creationCodec: Codec = implicitly[Codec]
+
+ def chars(): BufferedSource = chars(creationCodec)
+ def chars(codec: Codec): BufferedSource = Source.fromInputStream(inputStream())(codec)
+
+ def lines(): Iterator[String] = lines(creationCodec)
+ def lines(codec: Codec): Iterator[String] = chars(codec).getLines()
+
+ /** Obtains an InputStreamReader wrapped around a FileInputStream.
+ */
+ def reader(): InputStreamReader = reader(creationCodec)
+ def reader(codec: Codec): InputStreamReader = new InputStreamReader(inputStream, codec.charSet)
+
+ /** Wraps a BufferedReader around the result of reader().
+ */
+ def bufferedReader(): BufferedReader = bufferedReader(creationCodec)
+ def bufferedReader(codec: Codec) = new BufferedReader(reader(codec))
+
+ /** Creates a BufferedReader and applies the closure, automatically closing it on completion.
+ */
+ def applyReader[T](f: BufferedReader => T): T = {
+ val in = bufferedReader()
+ try f(in)
+ finally in.close()
+ }
+
+ /** Convenience function to import entire file into a String.
+ */
+ def slurp(): String = slurp(creationCodec)
+ def slurp(codec: Codec) = chars(codec).mkString
+ }
+
+ /** Call a function on something Closeable, finally closing it. */
+ def closing[T <: JCloseable, U](stream: T)(f: T => U): U =
+ try f(stream)
+ finally stream.close()
+
+ def bytes(is: => InputStream): Array[Byte] =
+ (new Bytes { def inputStream() = is }).toByteArray
+
+ def slurp(is: => InputStream)(implicit codec: Codec): String =
+ new Chars { def inputStream() = is } slurp codec
+
+ def slurp(url: URL)(implicit codec: Codec): String =
+ slurp(url.openStream())
+}
diff --git a/src/reflect/scala/reflect/io/VirtualDirectory.scala b/src/reflect/scala/reflect/io/VirtualDirectory.scala
new file mode 100644
index 0000000..78713c2
--- /dev/null
+++ b/src/reflect/scala/reflect/io/VirtualDirectory.scala
@@ -0,0 +1,72 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2013 LAMP/EPFL
+ */
+
+package scala.reflect
+package io
+
+import scala.collection.mutable
+
+/**
+ * An in-memory directory.
+ *
+ * @author Lex Spoon
+ *
+ * ''Note: This library is considered experimental and should not be used unless you know what you are doing.''
+ */
+class VirtualDirectory(val name: String, maybeContainer: Option[VirtualDirectory])
+extends AbstractFile {
+ def path: String =
+ maybeContainer match {
+ case None => name
+ case Some(parent) => parent.path+'/'+ name
+ }
+
+ def absolute = this
+
+ def container = maybeContainer.get
+ def isDirectory = true
+ var lastModified: Long = System.currentTimeMillis
+
+ override def file = null
+ override def input = sys.error("directories cannot be read")
+ override def output = sys.error("directories cannot be written")
+
+ /** Does this abstract file denote an existing file? */
+ def create() { unsupported }
+
+ /** Delete the underlying file or directory (recursively). */
+ def delete() { unsupported }
+
+ /** Returns an abstract file with the given name. It does not
+ * check that it exists.
+ */
+ def lookupNameUnchecked(name: String, directory: Boolean): AbstractFile = unsupported
+
+ private val files = mutable.Map.empty[String, AbstractFile]
+
+ // the toList is so that the directory may continue to be
+ // modified while its elements are iterated
+ def iterator = files.values.toList.iterator
+
+ override def lookupName(name: String, directory: Boolean): AbstractFile =
+ (files get name filter (_.isDirectory == directory)).orNull
+
+ override def fileNamed(name: String): AbstractFile =
+ Option(lookupName(name, false)) getOrElse {
+ val newFile = new VirtualFile(name, path+'/'+name)
+ files(name) = newFile
+ newFile
+ }
+
+ override def subdirectoryNamed(name: String): AbstractFile =
+ Option(lookupName(name, true)) getOrElse {
+ val dir = new VirtualDirectory(name, Some(this))
+ files(name) = dir
+ dir
+ }
+
+ def clear() {
+ files.clear();
+ }
+}
diff --git a/src/reflect/scala/reflect/io/VirtualFile.scala b/src/reflect/scala/reflect/io/VirtualFile.scala
new file mode 100644
index 0000000..95f4429
--- /dev/null
+++ b/src/reflect/scala/reflect/io/VirtualFile.scala
@@ -0,0 +1,103 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2013 LAMP/EPFL
+ * @author Martin Odersky
+ */
+
+
+package scala.reflect
+package io
+
+import java.io.{ ByteArrayInputStream, ByteArrayOutputStream, InputStream, OutputStream }
+import java.io.{ File => JFile }
+
+/** This class implements an in-memory file.
+ *
+ * @author Philippe Altherr
+ * @version 1.0, 23/03/2004
+ *
+ * ''Note: This library is considered experimental and should not be used unless you know what you are doing.''
+ */
+class VirtualFile(val name: String, override val path: String) extends AbstractFile {
+ /**
+ * Initializes this instance with the specified name and an
+ * identical path.
+ *
+ * @param name the name of the virtual file to be created
+ * @return the created virtual file
+ */
+ def this(name: String) = this(name, name)
+
+ override def hashCode = path.hashCode
+ override def equals(that: Any) = that match {
+ case x: VirtualFile => x.path == path
+ case _ => false
+ }
+
+ //########################################################################
+ // Private data
+ private var content = Array.emptyByteArray
+
+ //########################################################################
+ // Public Methods
+ def absolute = this
+
+ /** Returns null. */
+ final def file: JFile = null
+
+ override def sizeOption: Option[Int] = Some(content.size)
+
+ def input : InputStream = new ByteArrayInputStream(content);
+
+ override def output: OutputStream = {
+ new ByteArrayOutputStream() {
+ override def close() {
+ super.close()
+ content = toByteArray()
+ }
+ }
+ }
+
+ def container: AbstractFile = NoAbstractFile
+
+ /** Is this abstract file a directory? */
+ def isDirectory: Boolean = false
+
+ /** Returns the time that this abstract file was last modified. */
+ private var _lastModified: Long = 0
+ def lastModified: Long = _lastModified
+ def lastModified_=(x: Long) = _lastModified = x
+
+ /** Returns all abstract subfiles of this abstract directory. */
+ def iterator: Iterator[AbstractFile] = {
+ assert(isDirectory, "not a directory '" + this + "'")
+ Iterator.empty
+ }
+
+ /** Does this abstract file denote an existing file? */
+ def create() { unsupported }
+
+ /** Delete the underlying file or directory (recursively). */
+ def delete() { unsupported }
+
+ /**
+ * Returns the abstract file in this abstract directory with the
+ * specified name. If there is no such file, returns null. The
+ * argument "directory" tells whether to look for a directory or
+ * or a regular file.
+ *
+ * @param name ...
+ * @param directory ...
+ * @return ...
+ */
+ def lookupName(name: String, directory: Boolean): AbstractFile = {
+ assert(isDirectory, "not a directory '" + this + "'")
+ null
+ }
+
+ /** Returns an abstract file with the given name. It does not
+ * check that it exists.
+ */
+ def lookupNameUnchecked(name: String, directory: Boolean) = unsupported
+
+ //########################################################################
+}
diff --git a/src/reflect/scala/reflect/io/ZipArchive.scala b/src/reflect/scala/reflect/io/ZipArchive.scala
new file mode 100644
index 0000000..3b57721
--- /dev/null
+++ b/src/reflect/scala/reflect/io/ZipArchive.scala
@@ -0,0 +1,234 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2013 LAMP/EPFL
+ * @author Paul Phillips
+ */
+
+package scala.reflect
+package io
+
+import java.net.URL
+import java.io.{ IOException, InputStream, ByteArrayInputStream }
+import java.io.{ File => JFile }
+import java.util.zip.{ ZipEntry, ZipFile, ZipInputStream }
+import scala.collection.{ immutable, mutable }
+import scala.annotation.tailrec
+
+/** An abstraction for zip files and streams. Everything is written the way
+ * it is for performance: we come through here a lot on every run. Be careful
+ * about changing it.
+ *
+ * @author Philippe Altherr (original version)
+ * @author Paul Phillips (this one)
+ * @version 2.0,
+ *
+ * ''Note: This library is considered experimental and should not be used unless you know what you are doing.''
+ */
+object ZipArchive {
+ def fromPath(path: String): FileZipArchive = fromFile(new JFile(path))
+ def fromPath(path: Path): FileZipArchive = fromFile(path.toFile)
+
+ /**
+ * @param file a File
+ * @return A ZipArchive if `file` is a readable zip file, otherwise null.
+ */
+ def fromFile(file: File): FileZipArchive = fromFile(file.jfile)
+ def fromFile(file: JFile): FileZipArchive =
+ try { new FileZipArchive(file) }
+ catch { case _: IOException => null }
+
+ /**
+ * @param url the url of a zip file
+ * @return A ZipArchive backed by the given url.
+ */
+ def fromURL(url: URL): URLZipArchive = new URLZipArchive(url)
+ def fromURL(url: String): URLZipArchive = fromURL(new URL(url))
+
+ private def dirName(path: String) = splitPath(path, true)
+ private def baseName(path: String) = splitPath(path, false)
+ private def splitPath(path0: String, front: Boolean): String = {
+ val isDir = path0.charAt(path0.length - 1) == '/'
+ val path = if (isDir) path0.substring(0, path0.length - 1) else path0
+ val idx = path.lastIndexOf('/')
+
+ if (idx < 0)
+ if (front) "/"
+ else path
+ else
+ if (front) path.substring(0, idx + 1)
+ else path.substring(idx + 1)
+ }
+}
+import ZipArchive._
+/** ''Note: This library is considered experimental and should not be used unless you know what you are doing.'' */
+abstract class ZipArchive(override val file: JFile) extends AbstractFile with Equals {
+ self =>
+
+ override def underlyingSource = Some(this)
+ def isDirectory = true
+ def lookupName(name: String, directory: Boolean) = unsupported
+ def lookupNameUnchecked(name: String, directory: Boolean) = unsupported
+ def create() = unsupported
+ def delete() = unsupported
+ def output = unsupported
+ def container = unsupported
+ def absolute = unsupported
+
+ private def walkIterator(its: Iterator[AbstractFile]): Iterator[AbstractFile] = {
+ its flatMap { f =>
+ if (f.isDirectory) walkIterator(f.iterator)
+ else Iterator(f)
+ }
+ }
+ def deepIterator = walkIterator(iterator)
+ /** ''Note: This library is considered experimental and should not be used unless you know what you are doing.'' */
+ sealed abstract class Entry(path: String) extends VirtualFile(baseName(path), path) {
+ // have to keep this name for compat with sbt's compiler-interface
+ def getArchive: ZipFile = null
+ override def underlyingSource = Some(self)
+ override def toString = self.path + "(" + path + ")"
+ }
+ /** ''Note: This library is considered experimental and should not be used unless you know what you are doing.'' */
+ class DirEntry(path: String) extends Entry(path) {
+ val entries = mutable.HashMap[String, Entry]()
+
+ override def isDirectory = true
+ override def iterator: Iterator[Entry] = entries.valuesIterator
+ override def lookupName(name: String, directory: Boolean): Entry = {
+ if (directory) entries(name + "/")
+ else entries(name)
+ }
+ }
+
+ private def ensureDir(dirs: mutable.Map[String, DirEntry], path: String, zipEntry: ZipEntry): DirEntry =
+ //OPT inlined from getOrElseUpdate; saves ~50K closures on test run.
+ // was:
+ // dirs.getOrElseUpdate(path, {
+ // val parent = ensureDir(dirs, dirName(path), null)
+ // val dir = new DirEntry(path)
+ // parent.entries(baseName(path)) = dir
+ // dir
+ // })
+ dirs get path match {
+ case Some(v) => v
+ case None =>
+ val parent = ensureDir(dirs, dirName(path), null)
+ val dir = new DirEntry(path)
+ parent.entries(baseName(path)) = dir
+ dirs(path) = dir
+ dir
+ }
+
+ protected def getDir(dirs: mutable.Map[String, DirEntry], entry: ZipEntry): DirEntry = {
+ if (entry.isDirectory) ensureDir(dirs, entry.getName, entry)
+ else ensureDir(dirs, dirName(entry.getName), null)
+ }
+}
+/** ''Note: This library is considered experimental and should not be used unless you know what you are doing.'' */
+final class FileZipArchive(file: JFile) extends ZipArchive(file) {
+ def iterator: Iterator[Entry] = {
+ val zipFile = new ZipFile(file)
+ val root = new DirEntry("/")
+ val dirs = mutable.HashMap[String, DirEntry]("/" -> root)
+ val enum = zipFile.entries()
+
+ while (enum.hasMoreElements) {
+ val zipEntry = enum.nextElement
+ val dir = getDir(dirs, zipEntry)
+ if (zipEntry.isDirectory) dir
+ else {
+ class FileEntry() extends Entry(zipEntry.getName) {
+ override def getArchive = zipFile
+ override def lastModified = zipEntry.getTime()
+ override def input = getArchive getInputStream zipEntry
+ override def sizeOption = Some(zipEntry.getSize().toInt)
+ }
+ val f = new FileEntry()
+ dir.entries(f.name) = f
+ }
+ }
+
+ try root.iterator
+ finally dirs.clear()
+ }
+
+ def name = file.getName
+ def path = file.getPath
+ def input = File(file).inputStream()
+ def lastModified = file.lastModified
+
+ override def sizeOption = Some(file.length.toInt)
+ override def canEqual(other: Any) = other.isInstanceOf[FileZipArchive]
+ override def hashCode() = file.hashCode
+ override def equals(that: Any) = that match {
+ case x: FileZipArchive => file.getAbsoluteFile == x.file.getAbsoluteFile
+ case _ => false
+ }
+}
+/** ''Note: This library is considered experimental and should not be used unless you know what you are doing.'' */
+final class URLZipArchive(val url: URL) extends ZipArchive(null) {
+ def iterator: Iterator[Entry] = {
+ val root = new DirEntry("/")
+ val dirs = mutable.HashMap[String, DirEntry]("/" -> root)
+ val in = new ZipInputStream(new ByteArrayInputStream(Streamable.bytes(input)))
+
+ @tailrec def loop() {
+ val zipEntry = in.getNextEntry()
+ class EmptyFileEntry() extends Entry(zipEntry.getName) {
+ override def toByteArray: Array[Byte] = null
+ override def sizeOption = Some(0)
+ }
+ class FileEntry() extends Entry(zipEntry.getName) {
+ override val toByteArray: Array[Byte] = {
+ val len = zipEntry.getSize().toInt
+ val arr = if (len == 0) Array.emptyByteArray else new Array[Byte](len)
+ var offset = 0
+
+ def loop() {
+ if (offset < len) {
+ val read = in.read(arr, offset, len - offset)
+ if (read >= 0) {
+ offset += read
+ loop()
+ }
+ }
+ }
+ loop()
+
+ if (offset == arr.length) arr
+ else throw new IOException("Input stream truncated: read %d of %d bytes".format(offset, len))
+ }
+ override def sizeOption = Some(zipEntry.getSize().toInt)
+ }
+
+ if (zipEntry != null) {
+ val dir = getDir(dirs, zipEntry)
+ if (zipEntry.isDirectory)
+ dir
+ else {
+ val f = if (zipEntry.getSize() == 0) new EmptyFileEntry() else new FileEntry()
+ dir.entries(f.name) = f
+ }
+ in.closeEntry()
+ loop()
+ }
+ }
+
+ loop()
+ try root.iterator
+ finally dirs.clear()
+ }
+
+ def name = url.getFile()
+ def path = url.getPath()
+ def input = url.openStream()
+ def lastModified =
+ try url.openConnection().getLastModified()
+ catch { case _: IOException => 0 }
+
+ override def canEqual(other: Any) = other.isInstanceOf[URLZipArchive]
+ override def hashCode() = url.hashCode
+ override def equals(that: Any) = that match {
+ case x: URLZipArchive => url == x.url
+ case _ => false
+ }
+}
diff --git a/src/reflect/scala/reflect/macros/Aliases.scala b/src/reflect/scala/reflect/macros/Aliases.scala
new file mode 100644
index 0000000..92d76f4
--- /dev/null
+++ b/src/reflect/scala/reflect/macros/Aliases.scala
@@ -0,0 +1,112 @@
+package scala.reflect
+package macros
+
+/**
+ * <span class="badge badge-red" style="float: right;">EXPERIMENTAL</span>
+ *
+ * A slice of [[scala.reflect.macros.Context the Scala macros context]] that defines shorthands for the
+ * most frequently used types and functions of the underlying compiler universe.
+ */
+trait Aliases {
+ self: Context =>
+
+ /** The type of symbols representing declarations. */
+ type Symbol = universe.Symbol
+
+ /** The type of Scala types, and also Scala type signatures.
+ * (No difference is internally made between the two).
+ */
+ type Type = universe.Type
+
+ /** The abstract type of names. */
+ type Name = universe.Name
+
+ /** The abstract type of names representing terms. */
+ type TermName = universe.TermName
+
+ /** The abstract type of names representing types. */
+ type TypeName = universe.TypeName
+
+ /** The type of Scala abstract syntax trees. */
+ type Tree = universe.Tree
+
+ /** Defines a universe-specific notion of positions. */
+ type Position = universe.Position
+
+ /** The base type of all scopes. */
+ type Scope = universe.Scope
+
+ /** The type of tree modifiers. */
+ type Modifiers = universe.Modifiers
+
+ /** The type of compilation runs. */
+ type Run = universe.Run
+
+ /** The type of compilation units. */
+ type CompilationUnit = universe.CompilationUnit
+
+ /** Expr wraps an abstract syntax tree and tags it with its type. */
+ type Expr[+T] = universe.Expr[T]
+
+ /** Constructor/Extractor for `Expr`. */
+ val Expr = universe.Expr
+
+ /** A shorthand to create an expr.
+ *
+ * Unlike the conventional expr factory, which requires a [[scala.reflect.api.TreeCreator]],
+ * this one accepts a regular tree, but the resulting exprs are unable of being migrated
+ * to other universes/mirrors (the functionality normally not needed for macros, since there is
+ * only one compile-time universe and only one compile-time mirror).
+ */
+ def Expr[T: WeakTypeTag](tree: Tree): Expr[T]
+
+ /** The type of weak type tags. */
+ type WeakTypeTag[T] = universe.WeakTypeTag[T]
+
+ /** The type of type tags. */
+ type TypeTag[T] = universe.TypeTag[T]
+
+ /** Constructor/Extractor for `WeakTypeTag`. */
+ val WeakTypeTag = universe.WeakTypeTag
+
+ /** Constructor/Extractor for `TypeTag`. */
+ val TypeTag = universe.TypeTag
+
+ /** A shorthand to create a weak type tag.
+ *
+ * Unlike the conventional type tag factory, which requires a [[scala.reflect.api.TypeCreator]],
+ * this one accepts a regular type, but the resulting type tags are unable of being migrated
+ * to other universes/mirrors (the functionality normally not needed for macros, since there is
+ * only one compile-time universe and only one compile-time mirror).
+ */
+ def WeakTypeTag[T](tpe: Type): WeakTypeTag[T]
+
+ /** A shorthand to create a type tag.
+ *
+ * Unlike the conventional type tag factory, which requires a [[scala.reflect.api.TypeCreator]],
+ * this one accepts a regular type, but the resulting type tags are unable of being migrated
+ * to other universes/mirrors (the functionality normally not needed for macros, since there is
+ * only one compile-time universe and only one compile-time mirror).
+ */
+ def TypeTag[T](tpe: Type): TypeTag[T]
+
+ /**
+ * Shortcut for `implicitly[WeakTypeTag[T]]`
+ */
+ def weakTypeTag[T](implicit attag: WeakTypeTag[T]) = attag
+
+ /**
+ * Shortcut for `implicitly[TypeTag[T]]`
+ */
+ def typeTag[T](implicit ttag: TypeTag[T]) = ttag
+
+ /**
+ * Shortcut for `implicitly[WeakTypeTag[T]].tpe`
+ */
+ def weakTypeOf[T](implicit attag: WeakTypeTag[T]): Type = attag.tpe
+
+ /**
+ * Shortcut for `implicitly[TypeTag[T]].tpe`
+ */
+ def typeOf[T](implicit ttag: TypeTag[T]): Type = ttag.tpe
+}
diff --git a/src/reflect/scala/reflect/macros/Attachments.scala b/src/reflect/scala/reflect/macros/Attachments.scala
new file mode 100644
index 0000000..007df3b
--- /dev/null
+++ b/src/reflect/scala/reflect/macros/Attachments.scala
@@ -0,0 +1,64 @@
+package scala.reflect
+package macros
+
+/**
+ * <span class="badge badge-red" style="float: right;">EXPERIMENTAL</span>
+ *
+ * Attachments provide a way to associate custom metadata with symbols and trees.
+ *
+ * Along with `symbol` and `tpe`, which represent core metadata of trees, each tree
+ * carries the `attachments` field that can store other metadata: compiler-defined (e.g. positions) or user-defined.
+ * Same story is true for symbols, which also have extensible metadata by the virtue
+ * of the same `attachments` field.
+ *
+ * Typically attachments just store a [[scala.reflect.api.Position]], but they can be extended to
+ * encompass arbitrary payloads. Payloads are stored in type-indexed slots, which can be read with `get[T]` and written
+ * with `update[T]` and `remove[T]`.
+ *
+ * This API doesn't have much use in the runtime reflection API (the [[scala.reflect.api]] package), but it might be of help
+ * for macro writers, providing a way to coordinate multiple macros operating on the same code. Therefore the `attachments`
+ * field is only declared in trees and symbols belonging to [[scala.reflect.macros.Universe]].
+ */
+abstract class Attachments { self =>
+
+ /** The position type of this attachment */
+ type Pos >: Null
+
+ /** The underlying position */
+ def pos: Pos
+
+ /** Creates a copy of this attachment with the position replaced by `newPos` */
+ def withPos(newPos: Pos): Attachments { type Pos = self.Pos }
+
+ /** The underlying payload with the guarantee that no two elements have the same type. */
+ def all: Set[Any] = Set.empty
+
+ private def matchesTag[T: ClassTag](datum: Any) =
+ classTag[T].runtimeClass == datum.getClass
+
+ /** An underlying payload of the given class type `T`. */
+ def get[T: ClassTag]: Option[T] =
+ (all filter matchesTag[T]).headOption.asInstanceOf[Option[T]]
+
+ /** Creates a copy of this attachment with the payload slot of T added/updated with the provided value.
+ * Replaces an existing payload of the same type, if exists.
+ */
+ def update[T: ClassTag](attachment: T): Attachments { type Pos = self.Pos } =
+ new NonemptyAttachments[Pos](this.pos, remove[T].all + attachment)
+
+ /** Creates a copy of this attachment with the payload of the given class type `T` removed. */
+ def remove[T: ClassTag]: Attachments { type Pos = self.Pos } = {
+ val newAll = all filterNot matchesTag[T]
+ if (newAll.isEmpty) pos.asInstanceOf[Attachments { type Pos = self.Pos }]
+ else new NonemptyAttachments[Pos](this.pos, newAll)
+ }
+}
+
+// SI-7018: This used to be an inner class of `Attachments`, but that led to a memory leak in the
+// IDE via $outer pointers.
+// Forward compatibility note: This class used to be Attachments$NonemptyAttachments.
+// However it's private, therefore it transcends the compatibility policy for 2.10.x.
+private final class NonemptyAttachments[P >: Null](override val pos: P, override val all: Set[Any]) extends Attachments {
+ type Pos = P
+ def withPos(newPos: Pos) = new NonemptyAttachments(newPos, all)
+}
diff --git a/src/reflect/scala/reflect/macros/Context.scala b/src/reflect/scala/reflect/macros/Context.scala
new file mode 100644
index 0000000..aa1c1db
--- /dev/null
+++ b/src/reflect/scala/reflect/macros/Context.scala
@@ -0,0 +1,90 @@
+package scala.reflect
+package macros
+
+// todo. introduce context hierarchy
+// the most lightweight context should just expose the stuff from the SIP
+// the full context should include all traits from scala.reflect.macros (and probably reside in scala-compiler.jar)
+
+/**
+ * <span class="badge badge-red" style="float: right;">EXPERIMENTAL</span>
+ *
+ * The Scala macros context.
+ *
+ * See [[scala.reflect.macros.package the overview page]] for a description of how macros work. This documentation
+ * entry provides information on the API available to macro writers.
+ *
+ * A macro context wraps a compiler universe exposed in `universe` and having type [[scala.reflect.macros.Universe]].
+ * This type is a refinement over the generic reflection API provided in [[scala.reflect.api.Universe]]. The
+ * extended Universe provides mutability for reflection artifacts (e.g. macros can change types of compiler trees,
+ * add annotation to symbols representing definitions, etc) and exposes some internal compiler functionality
+ * such as `Symbol.deSkolemize` or `Tree.attachments`.
+ *
+ * Another fundamental part of a macro context is `macroApplication`, which provides access to the tree undergoing
+ * macro expansion. Parts of this tree can be found in arguments of the corresponding macro implementations and
+ * in `prefix`, but `macroApplication` gives the full picture.
+ *
+ * Other than that, macro contexts provide facilities for typechecking, exploring the compiler's symbol table and
+ * enclosing trees and compilation units, evaluating trees, logging warnings/errors and much more.
+ * Refer to the documentation of top-level traits in this package to learn the details.
+ */
+trait Context extends Aliases
+ with Enclosures
+ with Names
+ with Reifiers
+ with FrontEnds
+ with Infrastructure
+ with Typers
+ with Parsers
+ with Evals
+ with ExprUtils {
+
+ /** The compile-time universe. */
+ val universe: Universe
+
+ /** The mirror of the compile-time universe. */
+ val mirror: universe.Mirror
+
+ /** The type of the prefix tree from which the macro is selected.
+ * See the documentation entry for `prefix` for an example.
+ */
+ type PrefixType
+
+ /** The prefix tree from which the macro is selected.
+ *
+ * For a example, for a macro `filter` defined as an instance method on a collection `Coll`,
+ * `prefix` represents an equivalent of `this` for normal instance methods:
+ *
+ * {{{
+ * scala> class Coll[T] {
+ * | def filter(p: T => Boolean): Coll[T] = macro M.filter[T]
+ * | }; object M {
+ * | def filter[T](c: Context { type PrefixType = Coll[T] })
+ * | (p: c.Expr[T => Boolean]): c.Expr[Coll[T]] =
+ * | {
+ * | println(c.prefix.tree)
+ * | c.prefix
+ * | }
+ * | }
+ * defined class Coll
+ * defined module Macros
+ *
+ * scala> new Coll[Int]().filter(_ % 2 == 0)
+ * new Coll[Int]()
+ * res0: Coll[Int] = ...
+ *
+ * scala> val x = new Coll[String]()
+ * x: Coll[String] = ...
+ *
+ * scala> x.filter(_ != "")
+ * \$line11.\$read.\$iw.\$iw.\$iw.\$iw.\$iw.\$iw.\$iw.\$iw.\$iw.\$iw.\$iw.\$iw.x
+ * res1 @ 35563b4b: x.type = ...
+ * }}}
+ *
+ * Note how the value of `prefix` changes depending on the qualifier of the macro call
+ * (i.e. the expression that is at the left-hand side of the dot).
+ *
+ * Another noteworthy thing about the snippet above is the `Context { type PrefixType = Coll[T] }`
+ * type that is used to stress that the macro implementation works with prefixes of type `Coll[T]`.
+ */
+ val prefix: Expr[PrefixType]
+}
diff --git a/src/reflect/scala/reflect/macros/Enclosures.scala b/src/reflect/scala/reflect/macros/Enclosures.scala
new file mode 100644
index 0000000..a4ad71c
--- /dev/null
+++ b/src/reflect/scala/reflect/macros/Enclosures.scala
@@ -0,0 +1,66 @@
+package scala.reflect
+package macros
+
+/**
+ * <span class="badge badge-red" style="float: right;">EXPERIMENTAL</span>
+ *
+ * A slice of [[scala.reflect.macros.Context the Scala macros context]] that exposes
+ * enclosing trees (method, class, compilation unit and currently compiled application),
+ * the enclosing position of the macro expansion, as well as macros and implicits
+ * that are currently in-flight.
+ */
+trait Enclosures {
+ self: Context =>
+
+ /** The tree that undergoes macro expansion.
+ * Can be useful to get an offset or a range position of the entire tree being processed.
+ */
+ val macroApplication: Tree
+
+ /** Contexts that represent macros in-flight, including the current one. Very much like a stack trace, but for macros only.
+ * Can be useful for interoperating with other macros and for imposing compiler-friendly limits on macro expansion.
+ *
+ * Is also priceless for emitting sane error messages for macros that are called by other macros on synthetic (i.e. position-less) trees.
+ * In that dire case navigate the ``enclosingMacros'' stack, and it will most likely contain at least one macro with a position-ful macro application.
+ * See ``enclosingPosition'' for a default implementation of this logic.
+ *
+ * Unlike `openMacros`, this is a val, which means that it gets initialized when the context is created
+ * and always stays the same regardless of whatever happens during macro expansion.
+ */
+ val enclosingMacros: List[Context]
+
+ /** Information about one of the currently considered implicit candidates.
+ * Candidates are used in plural form, because implicit parameters may themselves have implicit parameters,
+ * hence implicit searches can recursively trigger other implicit searches.
+ *
+ * Can be useful to get information about an application with an implicit parameter that is materialized during current macro expansion.
+ * If we're in an implicit macro being expanded, it's included in this list.
+ *
+ * Unlike `openImplicits`, this is a val, which means that it gets initialized when the context is created
+ * and always stays the same regardless of whatever happens during macro expansion.
+ */
+ val enclosingImplicits: List[(Type, Tree)]
+
+ /** Tries to guess a position for the enclosing application.
+ * But that is simple, right? Just dereference ``pos'' of ``macroApplication''? Not really.
+ * If we're in a synthetic macro expansion (no positions), we must do our best to infer the position of something that triggerd this expansion.
+ * Surprisingly, quite often we can do this by navigation the ``enclosingMacros'' stack.
+ */
+ val enclosingPosition: Position
+
+ /** Tree that corresponds to the enclosing method, or EmptyTree if not applicable.
+ */
+ val enclosingMethod: Tree
+
+ /** Tree that corresponds to the enclosing class, or EmptyTree if not applicable.
+ */
+ val enclosingClass: Tree
+
+ /** Compilation unit that contains this macro application.
+ */
+ val enclosingUnit: CompilationUnit
+
+ /** Compilation run that contains this macro application.
+ */
+ val enclosingRun: Run
+}
\ No newline at end of file
diff --git a/src/reflect/scala/reflect/macros/Evals.scala b/src/reflect/scala/reflect/macros/Evals.scala
new file mode 100644
index 0000000..37680c2
--- /dev/null
+++ b/src/reflect/scala/reflect/macros/Evals.scala
@@ -0,0 +1,57 @@
+package scala.reflect
+package macros
+
+/**
+ * <span class="badge badge-red" style="float: right;">EXPERIMENTAL</span>
+ *
+ * A slice of [[scala.reflect.macros.Context the Scala macros context]] that provides
+ * a facility to evaluate trees.
+ */
+trait Evals {
+ self: Context =>
+
+ /** Takes a typed wrapper for a tree of type `T` and evaluates it to a value of type `T`.
+ *
+ * Can be used to perform compile-time computations on macro arguments to the extent
+ * permitted by the shape of the arguments.
+ *
+ * Known issues: because of [[https://issues.scala-lang.org/browse/SI-5748 https://issues.scala-lang.org/browse/SI-5748]]
+ * trees being evaluated first need to undergo `resetAllAttrs`. Resetting symbols and types
+ * mutates the tree in place, therefore the conventional approach is to `duplicate` the tree first.
+ *
+ * {{{
+ * scala> def impl(c: Context)(x: c.Expr[String]) = {
+ * | val x1 = c.Expr[String](c.resetAllAttrs(x.tree.duplicate))
+ * | println(s"compile-time value is: \${c.eval(x1)}")
+ * | x
+ * | }
+ * impl: (c: Context)(x: c.Expr[String])c.Expr[String]
+ *
+ * scala> def test(x: String) = macro impl
+ * test: (x: String)String
+ *
+ * scala> test("x")
+ * compile-time value is: x
+ * res0: String = x
+ *
+ * scala> test("x" + "y")
+ * compile-time value is: xy
+ * res1: String = xy
+ *
+ * scala> val x = "x"
+ * x: String = x
+ *
+ * scala> test(x + "y")
+ * compile-time value is: xy
+ * res2: String = xy
+ *
+ * scala> { val x = "x"; test(x + "y") }
+ * error: exception during macro expansion:
+ * scala.tools.reflect.ToolBoxError: reflective compilation failed
+ * }}}
+ *
+ * Note that in the last case evaluation has failed, because the argument of a macro
+ * refers to a runtime value `x`, which is unknown at compile time.
+ */
+ def eval[T](expr: Expr[T]): T
+}
\ No newline at end of file
diff --git a/src/reflect/scala/reflect/macros/ExprUtils.scala b/src/reflect/scala/reflect/macros/ExprUtils.scala
new file mode 100644
index 0000000..458cde9
--- /dev/null
+++ b/src/reflect/scala/reflect/macros/ExprUtils.scala
@@ -0,0 +1,51 @@
+package scala.reflect
+package macros
+
+/**
+ * <span class="badge badge-red" style="float: right;">EXPERIMENTAL</span>
+ *
+ * A slice of [[scala.reflect.macros.Context the Scala macros context]] that defines shorthands for the
+ * most common `Expr`-creating functions.
+ */
+trait ExprUtils {
+ self: Context =>
+
+ /** Shorthand for `Literal(Constant(null))` in the underlying `universe`. */
+ def literalNull: Expr[Null]
+
+ /** Shorthand for `Literal(Constant(()))` in the underlying `universe`. */
+ def literalUnit: Expr[Unit]
+
+ /** Shorthand for `Literal(Constant(true))` in the underlying `universe`. */
+ def literalTrue: Expr[Boolean]
+
+ /** Shorthand for `Literal(Constant(false))` in the underlying `universe`. */
+ def literalFalse: Expr[Boolean]
+
+ /** Shorthand for `Literal(Constant(x: Boolean))` in the underlying `universe`. */
+ def literal(x: Boolean): Expr[Boolean]
+
+ /** Shorthand for `Literal(Constant(x: Byte))` in the underlying `universe`. */
+ def literal(x: Byte): Expr[Byte]
+
+ /** Shorthand for `Literal(Constant(x: Short))` in the underlying `universe`. */
+ def literal(x: Short): Expr[Short]
+
+ /** Shorthand for `Literal(Constant(x: Int))` in the underlying `universe`. */
+ def literal(x: Int): Expr[Int]
+
+ /** Shorthand for `Literal(Constant(x: Long))` in the underlying `universe`. */
+ def literal(x: Long): Expr[Long]
+
+ /** Shorthand for `Literal(Constant(x: Float))` in the underlying `universe`. */
+ def literal(x: Float): Expr[Float]
+
+ /** Shorthand for `Literal(Constant(x: Double))` in the underlying `universe`. */
+ def literal(x: Double): Expr[Double]
+
+ /** Shorthand for `Literal(Constant(x: String))` in the underlying `universe`. */
+ def literal(x: String): Expr[String]
+
+ /** Shorthand for `Literal(Constant(x: Char))` in the underlying `universe`. */
+ def literal(x: Char): Expr[Char]
+}
diff --git a/src/reflect/scala/reflect/macros/FrontEnds.scala b/src/reflect/scala/reflect/macros/FrontEnds.scala
new file mode 100644
index 0000000..67b2408
--- /dev/null
+++ b/src/reflect/scala/reflect/macros/FrontEnds.scala
@@ -0,0 +1,47 @@
+package scala.reflect
+package macros
+
+/**
+ * <span class="badge badge-red" style="float: right;">EXPERIMENTAL</span>
+ *
+ * A slice of [[scala.reflect.macros.Context the Scala macros context]] that
+ * provides facilities to communicate with the compiler's front end
+ * (emit warnings, errors and other sorts of messages).
+ */
+trait FrontEnds {
+ self: Context =>
+
+ /** For sending a message which should not be labeled as a warning/error,
+ * but also shouldn't require -verbose to be visible.
+ * Use `enclosingPosition` if you're in doubt what position to pass to `pos`.
+ */
+ def echo(pos: Position, msg: String): Unit
+
+ /** Emits an informational message, suppressed unless `-verbose` or `force=true`.
+ * Use `enclosingPosition` if you're in doubt what position to pass to `pos`.
+ */
+ def info(pos: Position, msg: String, force: Boolean): Unit
+
+ /** Does the compilation session have any warnings?
+ */
+ def hasWarnings: Boolean
+
+ /** Emits a warning.
+ * Use `enclosingPosition` if you're in doubt what position to pass to `pos`.
+ */
+ def warning(pos: Position, msg: String): Unit
+
+ /** Does the compilation session have any errors?
+ */
+ def hasErrors: Boolean
+
+ /** Emits a compilation error.
+ * Use `enclosingPosition` if you're in doubt what position to pass to `pos`.
+ */
+ def error(pos: Position, msg: String): Unit
+
+ /** Abruptly terminates current macro expansion leaving a note about what happened.
+ * Use `enclosingPosition` if you're in doubt what position to pass to `pos`.
+ */
+ def abort(pos: Position, msg: String): Nothing
+}
\ No newline at end of file
diff --git a/src/reflect/scala/reflect/macros/Infrastructure.scala b/src/reflect/scala/reflect/macros/Infrastructure.scala
new file mode 100644
index 0000000..99706e8
--- /dev/null
+++ b/src/reflect/scala/reflect/macros/Infrastructure.scala
@@ -0,0 +1,25 @@
+package scala.reflect
+package macros
+
+/**
+ * <span class="badge badge-red" style="float: right;">EXPERIMENTAL</span>
+ *
+ * A slice of [[scala.reflect.macros.Context the Scala macros context]] that
+ * provides facilities to communicate with the compiler's infrastructure.
+ */
+trait Infrastructure {
+ self: Context =>
+
+ /** Exposes macro-specific settings as a list of strings.
+ * These settings are passed to the compiler via the "-Xmacro-settings:setting1,setting2...,settingN" command-line option.
+ */
+ def settings: List[String]
+
+ /** Exposes current compiler settings as a list of options.
+ * Use `scalac -help`, `scalac -X` and `scalac -Y` to learn about currently supported options.
+ */
+ def compilerSettings: List[String]
+
+ /** Exposes current classpath. */
+ def classPath: List[java.net.URL]
+}
\ No newline at end of file
diff --git a/src/reflect/scala/reflect/macros/Names.scala b/src/reflect/scala/reflect/macros/Names.scala
new file mode 100644
index 0000000..8bbaa5f
--- /dev/null
+++ b/src/reflect/scala/reflect/macros/Names.scala
@@ -0,0 +1,23 @@
+package scala.reflect
+package macros
+
+/**
+ * <span class="badge badge-red" style="float: right;">EXPERIMENTAL</span>
+ *
+ * A slice of [[scala.reflect.macros.Context the Scala macros context]] that
+ * provides functions that generate unique names.
+ */
+trait Names {
+ self: Context =>
+
+ /** Creates a unique string. */
+ def fresh(): String
+
+ /** Creates a unique string having a given prefix. */
+ def fresh(name: String): String
+
+ /** Creates a unique name having a given name as a prefix and
+ * having the same flavor (term name or type name) as the given name.
+ */
+ def fresh[NameType <: Name](name: NameType): NameType
+}
diff --git a/src/reflect/scala/reflect/macros/Parsers.scala b/src/reflect/scala/reflect/macros/Parsers.scala
new file mode 100644
index 0000000..93a7637
--- /dev/null
+++ b/src/reflect/scala/reflect/macros/Parsers.scala
@@ -0,0 +1,22 @@
+package scala.reflect
+package macros
+
+/**
+ * <span class="badge badge-red" style="float: right;">EXPERIMENTAL</span>
+ *
+ * A slice of [[scala.reflect.macros.Context the Scala macros context]] that
+ * exposes functions to parse strings with Scala code into trees.
+ */
+trait Parsers {
+ self: Context =>
+
+ /** Parses a string with a Scala expression into an abstract syntax tree.
+ * Only works for expressions, i.e. parsing a package declaration will fail.
+ * @throws [[scala.reflect.macros.ParseException]]
+ */
+ def parse(code: String): Tree
+}
+
+/** Indicates an error during [[scala.reflect.macros.Parsers#parse]].
+ */
+case class ParseException(val pos: scala.reflect.api.Position, val msg: String) extends Exception(msg)
diff --git a/src/reflect/scala/reflect/macros/Reifiers.scala b/src/reflect/scala/reflect/macros/Reifiers.scala
new file mode 100644
index 0000000..3db7b9a
--- /dev/null
+++ b/src/reflect/scala/reflect/macros/Reifiers.scala
@@ -0,0 +1,95 @@
+package scala.reflect
+package macros
+
+/**
+ * <span class="badge badge-red" style="float: right;">EXPERIMENTAL</span>
+ *
+ * A slice of [[scala.reflect.macros.Context the Scala macros context]] that
+ * exposes functions to save reflection artifacts for runtime.
+ */
+trait Reifiers {
+ self: Context =>
+
+ /** Given a tree, generate a tree that when compiled and executed produces the original tree.
+ * For more information and examples see the documentation for ``Universe.reify''.
+ *
+ * The produced tree will be bound to the specified ``universe'' and ``mirror''.
+ * Possible values for ``universe'' include ``universe.treeBuild.mkRuntimeUniverseRef''.
+ * Possible values for ``mirror'' include ``EmptyTree'' (in that case the reifier will automatically pick an appropriate mirror).
+ *
+ * This function is deeply connected to ``Universe.reify'', a macro that reifies arbitrary expressions into runtime trees.
+ * They do very similar things (``Universe.reify'' calls ``Context.reifyTree'' to implement itself), but they operate on different metalevels (see below).
+ *
+ * Let's study the differences between ``Context.reifyTree'' and ``Universe.reify'' on an example of using them inside a ``fooMacro'' macro:
+ *
+ * * Since reify itself is a macro, it will be executed when fooMacro is being compiled (metalevel -1)
+ * and will produce a tree that when evaluated during macro expansion of fooMacro (metalevel 0) will recreate the input tree.
+ *
+ * This provides a facility analogous to quasi-quoting. Writing "reify{ expr }" will generate an AST that represents expr.
+ * Afterwards this AST (or its parts) can be used to construct the return value of fooMacro.
+ *
+ * * reifyTree is evaluated during macro expansion (metalevel 0)
+ * and will produce a tree that when evaluated during the runtime of the program (metalevel 1) will recreate the input tree.
+ *
+ * This provides a way to retain certain trees from macro expansion time to be inspected later, in the runtime.
+ * For example, DSL authors may find it useful to capture DSL snippets into ASTs that are then processed at runtime in a domain-specific way.
+ *
+ * Also note the difference between universes of the runtime trees produced by two reifies:
+ *
+ * * The result of compiling and running the result of reify will be bound to the Universe that called reify.
+ * This is possible because it's a macro, so it can generate whatever code it wishes.
+ *
+ * * The result of compiling and running the result of reifyTree will be the ``prefix'' that needs to be passed explicitly.
+ * This happens because the Universe of the evaluated result is from a different metalevel than the Context the called reify.
+ *
+ * Typical usage of this function is to retain some of the trees received/created by a macro
+ * into the form that can be inspected (via pattern matching) or compiled/run (by a reflective ToolBox) during the runtime.
+ */
+ def reifyTree(universe: Tree, mirror: Tree, tree: Tree): Tree
+
+ /** Given a type, generate a tree that when compiled and executed produces the original type.
+ * The produced tree will be bound to the specified ``universe'' and ``mirror''.
+ * For more information and examples see the documentation for ``Context.reifyTree'' and ``Universe.reify''.
+ */
+ def reifyType(universe: Tree, mirror: Tree, tpe: Type, concrete: Boolean = false): Tree
+
+ /** Given a type, generate a tree that when compiled and executed produces the runtime class of the original type.
+ * If ``concrete'' is true, then this function will bail on types, who refer to abstract types (like `ClassTag` does).
+ */
+ def reifyRuntimeClass(tpe: Type, concrete: Boolean = true): Tree
+
+ /** Given a type, generate a tree that when compiled and executed produces the runtime class of the enclosing class or module.
+ * Returns `EmptyTree` if there does not exist an enclosing class or module.
+ */
+ def reifyEnclosingRuntimeClass: Tree
+
+ /** Undoes reification of a tree.
+ *
+ * This reversion doesn't simply restore the original tree (that would lose the context of reification),
+ * but does something more involved that conforms to the following laws:
+ *
+ * 1) unreifyTree(reifyTree(tree)) != tree // unreified tree is tree + saved context
+ * // in current implementation, the result of unreify is opaque
+ * // i.e. there's no possibility to inspect underlying tree/context
+ *
+ * 2) reifyTree(unreifyTree(reifyTree(tree))) == reifyTree(tree) // the result of reifying a tree in its original context equals to
+ * // the result of reifying a tree along with its saved context
+ *
+ * 3) compileAndEval(unreifyTree(reifyTree(tree))) ~ compileAndEval(tree) // at runtime original and unreified trees are behaviorally equivalent
+ */
+ def unreifyTree(tree: Tree): Tree
+}
+
+// made these guys non path-dependent, otherwise exception handling quickly becomes a mess
+
+/** Indicates an expected error during one of the `reifyXXX` methods in [[scala.reflect.macros.Reifiers]].
+ * Such errors represent one of the standard ways for reification to go wrong, e.g.
+ * an attempt to create a `TypeTag` from a weak type.
+ */
+case class ReificationException(val pos: scala.reflect.api.Position, val msg: String) extends Exception(msg)
+
+/** Indicates an unexpected expected error during one of the `reifyXXX` methods in [[scala.reflect.macros.Reifiers]].
+ * Such errors wrap random crashes in reification logic and are distinguished from expected [[scala.reflect.macros.ReificationException]]s
+ * so that the latter can be reported as compilation errors, while the former manifest themselves as compiler crashes.
+ */
+case class UnexpectedReificationException(val pos: scala.reflect.api.Position, val msg: String, val cause: Throwable = null) extends Exception(msg, cause)
diff --git a/src/reflect/scala/reflect/macros/TreeBuilder.scala b/src/reflect/scala/reflect/macros/TreeBuilder.scala
new file mode 100644
index 0000000..204dc40
--- /dev/null
+++ b/src/reflect/scala/reflect/macros/TreeBuilder.scala
@@ -0,0 +1,72 @@
+package scala.reflect
+package macros
+
+/**
+ * <span class="badge badge-red" style="float: right;">EXPERIMENTAL</span>
+ *
+ * A helper available in [[scala.reflect.macros.Universe]] that defines shorthands for the
+ * most common tree-creating functions.
+ */
+abstract class TreeBuilder {
+ val global: Universe
+
+ import global._
+ import definitions._
+
+ /** Builds a reference to value whose type is given stable prefix.
+ * The type must be suitable for this. For example, it
+ * must not be a TypeRef pointing to an abstract type variable.
+ */
+ def mkAttributedQualifier(tpe: Type): Tree
+
+ /** Builds a reference to value whose type is given stable prefix.
+ * If the type is unsuitable, e.g. it is a TypeRef for an
+ * abstract type variable, then an Ident will be made using
+ * termSym as the Ident's symbol. In that case, termSym must
+ * not be NoSymbol.
+ */
+ def mkAttributedQualifier(tpe: Type, termSym: Symbol): Tree
+
+ /** Builds a typed reference to given symbol with given stable prefix. */
+ def mkAttributedRef(pre: Type, sym: Symbol): Tree
+
+ /** Builds a typed reference to given symbol. */
+ def mkAttributedRef(sym: Symbol): Tree
+
+ /** Builds a typed This reference to given symbol. */
+ def mkAttributedThis(sym: Symbol): Tree
+
+ /** Builds a typed Ident with an underlying symbol. */
+ def mkAttributedIdent(sym: Symbol): Tree
+
+ /** Builds a typed Select with an underlying symbol. */
+ def mkAttributedSelect(qual: Tree, sym: Symbol): Tree
+
+ /** A creator for method calls, e.g. fn[T1, T2, ...](v1, v2, ...)
+ * There are a number of variations.
+ *
+ * @param receiver symbol of the method receiver
+ * @param methodName name of the method to call
+ * @param targs type arguments (if Nil, no TypeApply node will be generated)
+ * @param args value arguments
+ * @return the newly created trees.
+ */
+ def mkMethodCall(receiver: Symbol, methodName: Name, targs: List[Type], args: List[Tree]): Tree
+
+ def mkMethodCall(method: Symbol, targs: List[Type], args: List[Tree]): Tree
+
+ def mkMethodCall(method: Symbol, args: List[Tree]): Tree
+
+ def mkMethodCall(target: Tree, args: List[Tree]): Tree
+
+ def mkMethodCall(receiver: Symbol, methodName: Name, args: List[Tree]): Tree
+
+ def mkMethodCall(receiver: Tree, method: Symbol, targs: List[Type], args: List[Tree]): Tree
+
+ def mkMethodCall(target: Tree, targs: List[Type], args: List[Tree]): Tree
+
+ def mkNullaryCall(method: Symbol, targs: List[Type]): Tree
+
+ /** A tree that refers to the runtime reflexive universe, ``scala.reflect.runtime.universe''. */
+ def mkRuntimeUniverseRef: Tree
+}
diff --git a/src/reflect/scala/reflect/macros/Typers.scala b/src/reflect/scala/reflect/macros/Typers.scala
new file mode 100644
index 0000000..d36636a
--- /dev/null
+++ b/src/reflect/scala/reflect/macros/Typers.scala
@@ -0,0 +1,95 @@
+package scala.reflect
+package macros
+
+/**
+ * <span class="badge badge-red" style="float: right;">EXPERIMENTAL</span>
+ *
+ * A slice of [[scala.reflect.macros.Context the Scala macros context]] that
+ * partially exposes the type checker to macro writers.
+ */
+trait Typers {
+ self: Context =>
+
+ import universe._
+
+ /** Contexts that represent macros in-flight, including the current one. Very much like a stack trace, but for macros only.
+ * Can be useful for interoperating with other macros and for imposing compiler-friendly limits on macro expansion.
+ *
+ * Is also priceless for emitting sane error messages for macros that are called by other macros on synthetic (i.e. position-less) trees.
+ * In that dire case navigate the `openMacros` stack, and it will most likely contain at least one macro with a position-ful macro application.
+ * See `enclosingPosition` for a default implementation of this logic.
+ *
+ * Unlike `enclosingMacros`, this is a def, which means that it gets recalculated on every invocation,
+ * so it might change depending on what is going on during macro expansion.
+ */
+ def openMacros: List[Context]
+
+ /** Information about one of the currently considered implicit candidates.
+ * Candidates are used in plural form, because implicit parameters may themselves have implicit parameters,
+ * hence implicit searches can recursively trigger other implicit searches.
+ *
+ * Can be useful to get information about an application with an implicit parameter that is materialized during current macro expansion.
+ * If we're in an implicit macro being expanded, it's included in this list.
+ *
+ * Unlike `enclosingImplicits`, this is a def, which means that it gets recalculated on every invocation,
+ * so it might change depending on what is going on during macro expansion.
+ */
+ def openImplicits: List[(Type, Tree)]
+
+ /** Typechecks the provided tree against the expected type `pt` in the macro callsite context.
+ *
+ * If `silent` is false, `TypecheckException` will be thrown in case of a typecheck error.
+ * If `silent` is true, the typecheck is silent and will return `EmptyTree` if an error occurs.
+ * Such errors don't vanish and can be inspected by turning on -Ymacro-debug-verbose.
+ * Unlike in `inferImplicitValue` and `inferImplicitView`, `silent` is false by default.
+ *
+ * Typechecking can be steered with the following optional parameters:
+ * `withImplicitViewsDisabled` recursively prohibits implicit views (though, implicit vals will still be looked up and filled in), default value is false
+ * `withMacrosDisabled` recursively prohibits macro expansions and macro-based implicits, default value is false
+ *
+ * @throws [[scala.reflect.macros.TypecheckException]]
+ */
+ def typeCheck(tree: Tree, pt: Type = WildcardType, silent: Boolean = false, withImplicitViewsDisabled: Boolean = false, withMacrosDisabled: Boolean = false): Tree
+
+ /** Infers an implicit value of the expected type `pt` in the macro callsite context.
+ * Optional `pos` parameter provides a position that will be associated with the implicit search.
+ *
+ * If `silent` is false, `TypecheckException` will be thrown in case of an inference error.
+ * If `silent` is true, the typecheck is silent and will return `EmptyTree` if an error occurs.
+ * Such errors don't vanish and can be inspected by turning on -Xlog-implicits.
+ * Unlike in `typeCheck`, `silent` is true by default.
+ *
+ * @throws [[scala.reflect.macros.TypecheckException]]
+ */
+ def inferImplicitValue(pt: Type, silent: Boolean = true, withMacrosDisabled: Boolean = false, pos: Position = enclosingPosition): Tree
+
+ /** Infers an implicit view from the provided tree `tree` of the type `from` to the type `to` in the macro callsite context.
+ * Optional `pos` parameter provides a position that will be associated with the implicit search.
+ *
+ * If `silent` is false, `TypecheckException` will be thrown in case of an inference error.
+ * If `silent` is true, the typecheck is silent and will return `EmptyTree` if an error occurs.
+ * Such errors don't vanish and can be inspected by turning on -Xlog-implicits.
+ * Unlike in `typeCheck`, `silent` is true by default.
+ *
+ * @throws [[scala.reflect.macros.TypecheckException]]
+ */
+ def inferImplicitView(tree: Tree, from: Type, to: Type, silent: Boolean = true, withMacrosDisabled: Boolean = false, pos: Position = enclosingPosition): Tree
+
+ /** Recursively resets symbols and types in a given tree.
+ *
+ * Note that this does not revert the tree to its pre-typer shape.
+ * For more info, read up https://issues.scala-lang.org/browse/SI-5464.
+ */
+ def resetAllAttrs(tree: Tree): Tree
+
+ /** Recursively resets locally defined symbols and types in a given tree.
+ *
+ * Note that this does not revert the tree to its pre-typer shape.
+ * For more info, read up https://issues.scala-lang.org/browse/SI-5464.
+ */
+ def resetLocalAttrs(tree: Tree): Tree
+}
+
+/** Indicates an error during one of the methods in [[scala.reflect.macros.Typers]].
+ */
+case class TypecheckException(val pos: scala.reflect.api.Position, val msg: String) extends Exception(msg)
diff --git a/src/reflect/scala/reflect/macros/Universe.scala b/src/reflect/scala/reflect/macros/Universe.scala
new file mode 100644
index 0000000..4e76f7c
--- /dev/null
+++ b/src/reflect/scala/reflect/macros/Universe.scala
@@ -0,0 +1,241 @@
+package scala.reflect
+package macros
+
+/**
+ * <span class="badge badge-red" style="float: right;">EXPERIMENTAL</span>
+ *
+ * The refinement of [[scala.reflect.api.Universe]] for the use by macro writers.
+ *
+ * This universe provides mutability for reflection artifacts (e.g. macros can change types of compiler trees,
+ * add annotation to symbols representing definitions, etc) and exposes some internal compiler functionality
+ * such as `Symbol.deSkolemize` or `Tree.attachments`.
+ * @groupname Macros Macro Specific Additions
+ * @groupprio Macros -1
+ *
+ * @contentDiagram hideNodes "*Api"
+ */
+abstract class Universe extends scala.reflect.api.Universe {
+
+ /** A factory that encapsulates common tree-building functions.
+ * @group Macros
+ */
+ val treeBuild: TreeBuilder { val global: Universe.this.type }
+
+ /** The API of reflection artifacts that support [[scala.reflect.macros.Attachments]].
+ * These artifacts are trees and symbols.
+ * @group Macros
+ */
+ trait AttachableApi {
+ /** The attachment of the reflection artifact. */
+ def attachments: Attachments { type Pos = Position }
+
+ /** Updates the attachment with the payload slot of T added/updated with the provided value.
+ * Replaces an existing payload of the same type, if exists.
+ * Returns the reflection artifact itself.
+ */
+ def updateAttachment[T: ClassTag](attachment: T): AttachableApi.this.type
+
+ /** Update the attachment with the payload of the given class type `T` removed.
+ * Returns the reflection artifact itself.
+ */
+ def removeAttachment[T: ClassTag]: AttachableApi.this.type
+ }
+
+ // Symbol extensions ---------------------------------------------------------------
+
+ /** The `Symbol` API is extended for macros: See [[SymbolContextApi]] for details.
+ *
+ * @group Macros
+ */
+ override type Symbol >: Null <: SymbolContextApi
+
+ /** The extended API of symbols that's supported in macro context universes
+ * @group API
+ */
+ trait SymbolContextApi extends SymbolApi with AttachableApi { self: Symbol =>
+
+ /** If this symbol is a skolem, its corresponding type parameter, otherwise the symbol itself.
+ *
+ * [[https://groups.google.com/forum/#!msg/scala-internals/0j8laVNTQsI/kRXMF_c8bGsJ To quote Martin Odersky]],
+ * skolems are synthetic type "constants" that are copies of existentially bound or universally
+ * bound type variables. E.g. if one is inside the right-hand side of a method:
+ *
+ * {{{
+ * def foo[T](x: T) = ... foo[List[T]]....
+ * }}}
+ *
+ * the skolem named `T` refers to the unknown type instance of `T` when `foo` is called. It needs to be different
+ * from the type parameter because in a recursive call as in the `foo[List[T]]` above the type parameter gets
+ * substituted with `List[T]`, but the ''type skolem'' stays what it is.
+ *
+ * The other form of skolem is an ''existential skolem''. Say one has a function
+ *
+ * {{{
+ * def bar(xs: List[T] forSome { type T }) = xs.head
+ * }}}
+ *
+ * then each occurrence of `xs` on the right will have type `List[T']` where `T'` is a fresh copy of `T`.
+ */
+ def deSkolemize: Symbol
+
+ /** The position of this symbol. */
+ def pos: Position
+
+ /** Sets the `typeSignature` of the symbol. */
+ def setTypeSignature(tpe: Type): Symbol
+
+ /** Sets the `annotations` of the symbol. */
+ def setAnnotations(annots: Annotation*): Symbol
+
+ /** Sets the `name` of the symbol. */
+ def setName(name: Name): Symbol
+
+ /** Sets the `privateWithin` of the symbol. */
+ def setPrivateWithin(sym: Symbol): Symbol
+ }
+
+ // Tree extensions ---------------------------------------------------------------
+
+ /** The `Tree` API is extended for macros: See [[TreeContextApi]] for details.
+ *
+ * @group Macros
+ */
+ override type Tree >: Null <: TreeContextApi
+
+ /** The extended API of trees that's supported in macro context universes
+ * @group API
+ */
+ trait TreeContextApi extends TreeApi with AttachableApi { self: Tree =>
+
+ /** Sets the `pos` of the tree. Returns `Unit`. */
+ def pos_=(pos: Position): Unit
+
+ /** Sets the `pos` of the tree. Returns the tree itself. */
+ def setPos(newpos: Position): Tree
+
+ /** Sets the `tpe` of the tree. Returns `Unit`. */
+ def tpe_=(t: Type): Unit
+
+ /** Sets the `tpe` of the tree. Returns the tree itself. */
+ def setType(tp: Type): Tree
+
+ /** Like `setType`, but if this is a previously empty TypeTree that
+ * fact is remembered so that resetAllAttrs will snap back.
+ *
+ * \@PP: Attempting to elaborate on the above, I find: If defineType
+ * is called on a TypeTree whose type field is null or NoType,
+ * this is recorded as "wasEmpty = true". That value is used in
+ * ResetAttrsTraverser, which nulls out the type field of TypeTrees
+ * for which wasEmpty is true, leaving the others alone.
+ *
+ * resetAllAttrs is used in situations where some speculative
+ * typing of a tree takes place, fails, and the tree needs to be
+ * returned to its former state to try again. So according to me:
+ * using `defineType` instead of `setType` is how you communicate
+ * that the type being set does not depend on any previous state,
+ * and therefore should be abandoned if the current line of type
+ * inquiry doesn't work out.
+ */
+ def defineType(tp: Type): Tree
+
+ /** Sets the `symbol` of the tree. Returns `Unit`. */
+ def symbol_=(sym: Symbol): Unit
+
+ /** Sets the `symbol` of the tree. Returns the tree itself. */
+ def setSymbol(sym: Symbol): Tree
+ }
+
+ /** @inheritdoc */
+ override type SymTree >: Null <: Tree with SymTreeContextApi
+
+ /** The extended API of sym trees that's supported in macro context universes
+ * @group API
+ */
+ trait SymTreeContextApi extends SymTreeApi { this: SymTree =>
+ /** Sets the `symbol` field of the sym tree. */
+ var symbol: Symbol
+ }
+
+ /** @inheritdoc */
+ override type TypeTree >: Null <: TypTree with TypeTreeContextApi
+
+ /** The extended API of sym trees that's supported in macro context universes
+ * @group API
+ */
+ trait TypeTreeContextApi extends TypeTreeApi { this: TypeTree =>
+ /** Sets the `original` field of the type tree. */
+ def setOriginal(tree: Tree): this.type
+ }
+
+ /** @inheritdoc */
+ override type Ident >: Null <: RefTree with IdentContextApi
+
+ /** The extended API of idents that's supported in macro context universes
+ * @group API
+ */
+ trait IdentContextApi extends IdentApi { this: Ident =>
+ /** Was this ident created from a backquoted identifier? */
+ def isBackquoted: Boolean
+ }
+
+ /** Mark a variable as captured; i.e. force boxing in a *Ref type.
+ * @group Macros
+ */
+ def captureVariable(vble: Symbol): Unit
+
+ /** Mark given identifier as a reference to a captured variable itself
+ * suppressing dereferencing with the `elem` field.
+ * @group Macros
+ */
+ def referenceCapturedVariable(vble: Symbol): Tree
+
+ /** Convert type of a captured variable to *Ref type.
+ * @group Macros
+ */
+ def capturedVariableType(vble: Symbol): Type
+
+ /** The type of compilation runs.
+ * @template
+ * @group Macros
+ */
+ type Run <: RunContextApi
+
+ /** Compilation run uniquely identifies current invocation of the compiler
+ * (e.g. can be used to implement per-run caches for macros) and provides access to units of work
+ * of the invocation (currently processed unit of work and the list of all units).
+ * @group API
+ */
+ trait RunContextApi {
+ /** Currently processed unit of work (a real or a virtual file). */
+ def currentUnit: CompilationUnit
+
+ /** All units of work comprising this compilation run. */
+ def units: Iterator[CompilationUnit]
+ }
+
+ /** The type of compilation units.
+ * @template
+ * @group Macros
+ */
+ type CompilationUnit <: CompilationUnitContextApi
+
+ /** Compilation unit describes a unit of work of the compilation run.
+ * It provides such information as file name, textual representation of the unit and the underlying AST.
+ * @group API
+ */
+ trait CompilationUnitContextApi {
+ /** Source file corresponding to this compilation unit.
+ *
+ * Exposes information about the file as a part of a real or virtual file system
+ * along with the contents of that file.
+ *
+ * The return type is `scala.reflect.io.AbstractFile`, which belongs to an experimental part of Scala reflection.
+ * It should not be used unless you know what you are doing. In subsequent releases, this API will be refined
+ * and exposed as a part of scala.reflect.api.
+ */
+ def source: scala.reflect.internal.util.SourceFile
+
+ /** The AST that corresponds to this compilation unit. */
+ def body: Tree
+ }
+}
\ No newline at end of file
diff --git a/src/reflect/scala/reflect/macros/package.scala b/src/reflect/scala/reflect/macros/package.scala
new file mode 100644
index 0000000..21d189b
--- /dev/null
+++ b/src/reflect/scala/reflect/macros/package.scala
@@ -0,0 +1,15 @@
+package scala.reflect
+
+/**
+ * <span class="badge badge-red" style="float: right;">EXPERIMENTAL</span>
+ *
+ * The base package for Scala macros.
+ *
+ * Macros are functions that are called by the compiler during compilation.
+ * Within these functions the programmer has access to compiler APIs exposed in [[scala.reflect.macros.Context]].
+ * For example, it is possible to generate, analyze and typecheck code.
+ *
+ * See the [[http://docs.scala-lang.org/overviews/macros.html Macros Guide]] on how to get started with Scala macros.
+ */
+package object macros {
+}
\ No newline at end of file
diff --git a/src/reflect/scala/reflect/runtime/JavaMirrors.scala b/src/reflect/scala/reflect/runtime/JavaMirrors.scala
new file mode 100644
index 0000000..6fdb238
--- /dev/null
+++ b/src/reflect/scala/reflect/runtime/JavaMirrors.scala
@@ -0,0 +1,1315 @@
+package scala.reflect
+package runtime
+
+import scala.ref.WeakReference
+import scala.collection.mutable.WeakHashMap
+
+import java.lang.{Class => jClass, Package => jPackage}
+import java.lang.reflect.{
+ Method => jMethod, Constructor => jConstructor, Modifier => jModifier, Field => jField,
+ Member => jMember, Type => jType, TypeVariable => jTypeVariable, Array => jArray,
+ GenericDeclaration, GenericArrayType, ParameterizedType, WildcardType, AnnotatedElement }
+import java.lang.annotation.{Annotation => jAnnotation}
+import java.io.IOException
+import internal.MissingRequirementError
+import internal.pickling.ByteCodecs
+import internal.ClassfileConstants._
+import internal.pickling.UnPickler
+import scala.collection.mutable.{ HashMap, ListBuffer }
+import internal.Flags._
+//import scala.tools.nsc.util.ScalaClassLoader
+//import scala.tools.nsc.util.ScalaClassLoader._
+import ReflectionUtils.{staticSingletonInstance, innerSingletonInstance, scalacShouldntLoadClass}
+import scala.language.existentials
+import scala.runtime.{ScalaRunTime, BoxesRunTime}
+import scala.reflect.internal.util.Collections._
+
+private[reflect] trait JavaMirrors extends internal.SymbolTable with api.JavaUniverse { thisUniverse: SymbolTable =>
+
+ private lazy val mirrors = new WeakHashMap[ClassLoader, WeakReference[JavaMirror]]()
+
+ private def createMirror(owner: Symbol, cl: ClassLoader): Mirror = {
+ val jm = new JavaMirror(owner, cl)
+ mirrors(cl) = new WeakReference(jm)
+ jm.init()
+ jm
+ }
+
+ override type RuntimeClass = java.lang.Class[_]
+
+ override type Mirror = JavaMirror
+
+ override lazy val rootMirror: Mirror = createMirror(NoSymbol, rootClassLoader)
+
+ // overriden by ReflectGlobal
+ def rootClassLoader: ClassLoader = this.getClass.getClassLoader
+
+ trait JavaClassCompleter extends FlagAssigningCompleter
+
+ def init() = {
+ definitions.AnyValClass // force it.
+
+ // establish root association to avoid cyclic dependency errors later
+ rootMirror.classToScala(classOf[java.lang.Object]).initialize
+
+ // println("initializing definitions")
+ definitions.init()
+ }
+
+ def runtimeMirror(cl: ClassLoader): Mirror = mirrors get cl match {
+ case Some(WeakReference(m)) => m
+ case _ => createMirror(rootMirror.RootClass, cl)
+ }
+
+ /** The API of a mirror for a reflective universe */
+ class JavaMirror(owner: Symbol,
+ /** Class loader that is a mastermind behind the reflexive mirror */
+ val classLoader: ClassLoader
+ ) extends Roots(owner) with super.JavaMirror { thisMirror =>
+
+ val universe: thisUniverse.type = thisUniverse
+
+ import definitions._
+
+ /** The lazy type for root.
+ */
+ override lazy val rootLoader = new LazyType with FlagAgnosticCompleter {
+ override def complete(sym: Symbol) = sym setInfo new LazyPackageType
+ }
+
+ // reflective mirrors can't know the exhaustive list of available packages
+ // (that's because compiler mirrors are based on directories and reflective mirrors are based on classloaders,
+ // and unlike directories classloaders might make up stuff on the fly)
+ // hence we need to be optimistic and create packages out of thin air
+ // the same thing is done by the `missingHook` below
+ override def staticPackage(fullname: String): ModuleSymbol =
+ try super.staticPackage(fullname)
+ catch {
+ case _: MissingRequirementError =>
+ makeScalaPackage(fullname)
+ }
+
+// ----------- Caching ------------------------------------------------------------------
+
+ private val classCache = new TwoWayCache[jClass[_], ClassSymbol]
+ private val packageCache = new TwoWayCache[Package, ModuleSymbol]
+ private val methodCache = new TwoWayCache[jMethod, MethodSymbol]
+ private val constructorCache = new TwoWayCache[jConstructor[_], MethodSymbol]
+ private val fieldCache = new TwoWayCache[jField, TermSymbol]
+ private val tparamCache = new TwoWayCache[jTypeVariable[_ <: GenericDeclaration], TypeSymbol]
+
+ private[runtime] def toScala[J: HasJavaClass, S](cache: TwoWayCache[J, S], key: J)(body: (JavaMirror, J) => S): S =
+ cache.toScala(key){
+ val jclazz = implicitly[HasJavaClass[J]] getClazz key
+ body(mirrorDefining(jclazz), key)
+ }
+
+ private implicit val classHasJavaClass: HasJavaClass[jClass[_]] =
+ new HasJavaClass(identity)
+ private implicit val methHasJavaClass: HasJavaClass[jMethod]
+ = new HasJavaClass(_.getDeclaringClass)
+ private implicit val fieldHasJavaClass: HasJavaClass[jField] =
+ new HasJavaClass(_.getDeclaringClass)
+ private implicit val constrHasJavaClass: HasJavaClass[jConstructor[_]] =
+ new HasJavaClass(_.getDeclaringClass)
+ private implicit val tparamHasJavaClass: HasJavaClass[jTypeVariable[_ <: GenericDeclaration]] =
+ new HasJavaClass ( (tparam: jTypeVariable[_ <: GenericDeclaration]) => {
+ tparam.getGenericDeclaration match {
+ case jclazz: jClass[_] => jclazz
+ case jmeth: jMethod => jmeth.getDeclaringClass
+ case jconstr: jConstructor[_] => jconstr.getDeclaringClass
+ }
+ })
+
+// ----------- Implementations of mirror operations and classes -------------------
+
+ private def ErrorInnerClass(sym: Symbol) = throw new ScalaReflectionException(s"$sym is an inner class, use reflectClass on an InstanceMirror to obtain its ClassMirror")
+ private def ErrorInnerModule(sym: Symbol) = throw new ScalaReflectionException(s"$sym is an inner module, use reflectModule on an InstanceMirror to obtain its ModuleMirror")
+ private def ErrorStaticClass(sym: Symbol) = throw new ScalaReflectionException(s"$sym is a static class, use reflectClass on a RuntimeMirror to obtain its ClassMirror")
+ private def ErrorStaticModule(sym: Symbol) = throw new ScalaReflectionException(s"$sym is a static module, use reflectModule on a RuntimeMirror to obtain its ModuleMirror")
+ private def ErrorNotMember(sym: Symbol, owner: Symbol) = throw new ScalaReflectionException(s"expected a member of $owner, you provided ${sym.kindString} ${sym.fullName}")
+ private def ErrorNotField(sym: Symbol) = throw new ScalaReflectionException(s"expected a field or an accessor method symbol, you provided $sym")
+ private def ErrorNonExistentField(sym: Symbol) = throw new ScalaReflectionException(
+ sm"""Scala field ${sym.name} isn't represented as a Java field, neither it has a Java accessor method
+ |note that private parameters of class constructors don't get mapped onto fields and/or accessors,
+ |unless they are used outside of their declaring constructors.""")
+ @deprecated("corresponding check has been removed from FieldMirror.set, this method is also being phased out", "2.11.0")
+ private def ErrorSetImmutableField(sym: Symbol) = throw new ScalaReflectionException(s"cannot set an immutable field ${sym.name}")
+ private def ErrorNotConstructor(sym: Symbol, owner: Symbol) = throw new ScalaReflectionException(s"expected a constructor of $owner, you provided $sym")
+ private def ErrorFree(member: Symbol, freeType: Symbol) = throw new ScalaReflectionException(s"cannot reflect ${member.kindString} ${member.name}, because it's a member of a weak type ${freeType.name}")
+
+ /** Helper functions for extracting typed values from a (Class[_], Any)
+ * representing an annotation argument.
+ */
+ private object toAnnotArg {
+ val StringClass = classOf[String]
+ val ClassClass = classOf[jClass[_]]
+ object PrimitiveClass { def unapply(x: jClass[_]) = x.isPrimitive }
+ object EnumClass { def unapply(x: jClass[_]) = x.isEnum }
+ object ArrayClass { def unapply(x: jClass[_]) = x.isArray }
+ object AnnotationClass { def unapply(x: jClass[_]) = x.isAnnotation }
+
+ object ConstantArg {
+ def enumToSymbol(enum: Enum[_]): Symbol = {
+ val staticPartOfEnum = classToScala(enum.getClass).companionSymbol
+ staticPartOfEnum.typeSignature.declaration(enum.name: TermName)
+ }
+
+ def unapply(schemaAndValue: (jClass[_], Any)): Option[Any] = schemaAndValue match {
+ case (StringClass | PrimitiveClass(), value) => Some(value)
+ case (ClassClass, value: jClass[_]) => Some(classToScala(value).toType)
+ case (EnumClass(), value: Enum[_]) => Some(enumToSymbol(value))
+ case _ => None
+ }
+ }
+ def apply(schemaAndValue: (jClass[_], Any)): ClassfileAnnotArg = schemaAndValue match {
+ case ConstantArg(value) => LiteralAnnotArg(Constant(value))
+ case (clazz @ ArrayClass(), value: Array[_]) => ArrayAnnotArg(value map (x => apply(ScalaRunTime.arrayElementClass(clazz) -> x)))
+ case (AnnotationClass(), value: jAnnotation) => NestedAnnotArg(JavaAnnotationProxy(value))
+ case _ => UnmappableAnnotArg
+ }
+ }
+ private case class JavaAnnotationProxy(jann: jAnnotation) extends AnnotationInfo {
+ override val atp: Type = classToScala(jann.annotationType).toType
+ override val args: List[Tree] = Nil
+ override def original: Tree = EmptyTree
+ override def setOriginal(t: Tree): this.type = throw new Exception("setOriginal inapplicable for " + this)
+ override def pos: Position = NoPosition
+ override def setPos(pos: Position): this.type = throw new Exception("setPos inapplicable for " + this)
+ override def toString = completeAnnotationToString(this)
+
+ // todo. find out the exact order of assocs as they are written in the class file
+ // currently I'm simply sorting the methods to guarantee stability of the output
+ override lazy val assocs: List[(Name, ClassfileAnnotArg)] = (
+ jann.annotationType.getDeclaredMethods.sortBy(_.getName).toList map (m =>
+ (m.getName: TermName) -> toAnnotArg(m.getReturnType -> m.invoke(jann))
+ )
+ )
+ }
+
+ def reflect[T: ClassTag](obj: T): InstanceMirror = new JavaInstanceMirror(obj)
+
+ def reflectClass(cls: ClassSymbol): ClassMirror = {
+ if (!cls.isStatic) ErrorInnerClass(cls)
+ new JavaClassMirror(null, cls)
+ }
+
+ def reflectModule(mod: ModuleSymbol): ModuleMirror = {
+ if (!mod.isStatic) ErrorInnerModule(mod)
+ new JavaModuleMirror(null, mod)
+ }
+
+ def runtimeClass(tpe: Type): RuntimeClass = typeToJavaClass(tpe)
+
+ def runtimeClass(cls: ClassSymbol): RuntimeClass = classToJava(cls)
+
+ def classSymbol(rtcls: RuntimeClass): ClassSymbol = classToScala(rtcls)
+
+ def moduleSymbol(rtcls: RuntimeClass): ModuleSymbol = classToScala(rtcls).companionModule.asModule
+
+ private def ensuringNotFree(sym: Symbol)(body: => Any) {
+ val freeType = sym.ownerChain find (_.isFreeType)
+ freeType match {
+ case Some(freeType) => ErrorFree(sym, freeType)
+ case _ => body
+ }
+ }
+
+ private def checkMemberOf(sym: Symbol, owner: ClassSymbol) {
+ if (sym.owner == AnyClass || sym.owner == AnyRefClass || sym.owner == ObjectClass) {
+ // do nothing
+ } else if (sym.owner == AnyValClass) {
+ if (!owner.isPrimitiveValueClass && !owner.isDerivedValueClass) ErrorNotMember(sym, owner)
+ } else {
+ ensuringNotFree(sym) {
+ if (!(owner.info.baseClasses contains sym.owner)) ErrorNotMember(sym, owner)
+ }
+ }
+ }
+
+ private def checkConstructorOf(sym: Symbol, owner: ClassSymbol) {
+ if (!sym.isClassConstructor) ErrorNotConstructor(sym, owner)
+ ensuringNotFree(sym) {
+ if (!owner.info.decls.toList.contains(sym)) ErrorNotConstructor(sym, owner)
+ }
+ }
+
+ private def preciseClass[T: ClassTag](instance: T) = {
+ val staticClazz = classTag[T].runtimeClass
+ val dynamicClazz = instance.getClass
+ if (staticClazz.isPrimitive) staticClazz else dynamicClazz
+ }
+
+ private class JavaInstanceMirror[T: ClassTag](val instance: T)
+ extends InstanceMirror {
+ def symbol = thisMirror.classSymbol(preciseClass(instance))
+ def reflectField(field: TermSymbol): FieldMirror = {
+ checkMemberOf(field, symbol)
+ if ((field.isMethod && !field.isAccessor) || field.isModule) ErrorNotField(field)
+ val name =
+ if (field.isGetter) nme.getterToLocal(field.name)
+ else if (field.isSetter) nme.getterToLocal(nme.setterToGetter(field.name))
+ else field.name
+ val field1 = (field.owner.info decl name).asTerm
+ try fieldToJava(field1)
+ catch {
+ case _: NoSuchFieldException => ErrorNonExistentField(field1)
+ }
+ new JavaFieldMirror(instance, field1)
+ }
+ def reflectMethod(method: MethodSymbol): MethodMirror = {
+ checkMemberOf(method, symbol)
+ mkJavaMethodMirror(instance, method)
+ }
+ def reflectClass(cls: ClassSymbol): ClassMirror = {
+ if (cls.isStatic) ErrorStaticClass(cls)
+ checkMemberOf(cls, symbol)
+ new JavaClassMirror(instance.asInstanceOf[AnyRef], cls)
+ }
+ def reflectModule(mod: ModuleSymbol): ModuleMirror = {
+ if (mod.isStatic) ErrorStaticModule(mod)
+ checkMemberOf(mod, symbol)
+ new JavaModuleMirror(instance.asInstanceOf[AnyRef], mod)
+ }
+ override def toString = s"instance mirror for $instance"
+ }
+
+ private class JavaFieldMirror(val receiver: Any, val symbol: TermSymbol)
+ extends FieldMirror {
+ lazy val jfield = {
+ val jfield = fieldToJava(symbol)
+ if (!jfield.isAccessible) jfield.setAccessible(true)
+ jfield
+ }
+ def get = jfield.get(receiver)
+ def set(value: Any) = {
+ // it appears useful to be able to set values of vals, therefore I'm disabling this check
+ // if (!symbol.isMutable) ErrorSetImmutableField(symbol)
+ jfield.set(receiver, value)
+ }
+ // this dummy method is necessary to prevent the optimizer from stripping off ErrorSetImmutableField
+ // which would break binary compatibility with 2.10.0
+ private def dummy(symbol: Symbol) = ErrorSetImmutableField(symbol)
+ override def toString = s"field mirror for ${symbol.fullName} (bound to $receiver)"
+ }
+
+ private def showMethodSig(symbol: MethodSymbol): String = {
+ var sig = s"${symbol.fullName}"
+ if (symbol.typeParams.nonEmpty) {
+ def showTparam(tparam: Symbol) =
+ tparam.typeSignature match {
+ case tpe @ TypeBounds(_, _) => s"${tparam.name}$tpe"
+ case _ => tparam.name
+ }
+ def showTparams(tparams: List[Symbol]) = "[" + (tparams map showTparam mkString ", ") + "]"
+ sig += showTparams(symbol.typeParams)
+ }
+ if (symbol.paramss.nonEmpty) {
+ def showParam(param: Symbol) = s"${param.name}: ${param.typeSignature}"
+ def showParams(params: List[Symbol]) = {
+ val s_mods = if (params.nonEmpty && params(0).hasFlag(IMPLICIT)) "implicit " else ""
+ val s_params = params map showParam mkString ", "
+ "(" + s_mods + s_params + ")"
+ }
+ def showParamss(paramss: List[List[Symbol]]) = paramss map showParams mkString ""
+ sig += showParamss(symbol.paramss)
+ }
+ sig += s": ${symbol.returnType}"
+ sig
+ }
+
+ // the "symbol == Any_getClass || symbol == Object_getClass" test doesn't cut it
+ // because both AnyVal and its primitive descendants define their own getClass methods
+ private def isGetClass(meth: MethodSymbol) = meth.name.toString == "getClass" && meth.paramss.flatten.isEmpty
+ private def isStringConcat(meth: MethodSymbol) = meth == String_+ || (meth.owner.isPrimitiveValueClass && meth.returnType =:= StringClass.toType)
+ lazy val bytecodelessMethodOwners = Set[Symbol](AnyClass, AnyValClass, AnyRefClass, ObjectClass, ArrayClass) ++ ScalaPrimitiveValueClasses
+ lazy val bytecodefulObjectMethods = Set[Symbol](Object_clone, Object_equals, Object_finalize, Object_hashCode, Object_toString,
+ Object_notify, Object_notifyAll) ++ ObjectClass.info.member(nme.wait_).asTerm.alternatives.map(_.asMethod)
+ private def isBytecodelessMethod(meth: MethodSymbol): Boolean = {
+ if (isGetClass(meth) || isStringConcat(meth) || meth.owner.isPrimitiveValueClass || meth == Predef_classOf || meth.isTermMacro) return true
+ bytecodelessMethodOwners(meth.owner) && !bytecodefulObjectMethods(meth)
+ }
+
+ // unlike other mirrors, method mirrors are created by a factory
+ // that's because we want to have decent performance
+ // therefore we move special cases into separate subclasses
+ // rather than have them on a hot path them in a unified implementation of the `apply` method
+ private def mkJavaMethodMirror[T: ClassTag](receiver: T, symbol: MethodSymbol): JavaMethodMirror = {
+ if (isBytecodelessMethod(symbol)) new JavaBytecodelessMethodMirror(receiver, symbol)
+ else if (symbol.paramss.flatten exists (p => isByNameParamType(p.info))) new JavaByNameMethodMirror(receiver, symbol)
+ else new JavaVanillaMethodMirror(receiver, symbol)
+ }
+
+ private abstract class JavaMethodMirror(val symbol: MethodSymbol)
+ extends MethodMirror {
+ lazy val jmeth = {
+ val jmeth = methodToJava(symbol)
+ if (!jmeth.isAccessible) jmeth.setAccessible(true)
+ jmeth
+ }
+
+ def jinvoke(jmeth: jMethod, receiver: Any, args: Seq[Any]): Any = {
+ val result = jmeth.invoke(receiver, args.asInstanceOf[Seq[AnyRef]]: _*)
+ if (jmeth.getReturnType == java.lang.Void.TYPE) ()
+ else result
+ }
+
+ override def toString = s"method mirror for ${showMethodSig(symbol)} (bound to $receiver)"
+ }
+
+ private class JavaVanillaMethodMirror(val receiver: Any, symbol: MethodSymbol)
+ extends JavaMethodMirror(symbol) {
+ def apply(args: Any*): Any = jinvoke(jmeth, receiver, args)
+ }
+
+ private class JavaByNameMethodMirror(val receiver: Any, symbol: MethodSymbol)
+ extends JavaMethodMirror(symbol) {
+ def apply(args: Any*): Any = {
+ val transformed = map2(args.toList, symbol.paramss.flatten)((arg, param) => if (isByNameParamType(param.info)) () => arg else arg)
+ jinvoke(jmeth, receiver, transformed)
+ }
+ }
+
+ private class JavaBytecodelessMethodMirror[T: ClassTag](val receiver: T, symbol: MethodSymbol)
+ extends JavaMethodMirror(symbol) {
+ def apply(args: Any*): Any = {
+ // checking type conformance is too much of a hassle, so we don't do it here
+ // actually it's not even necessary, because we manually dispatch arguments below
+ val params = symbol.paramss.flatten
+ val perfectMatch = args.length == params.length
+ // todo. this doesn't account for multiple vararg parameter lists
+ // however those aren't supported by the mirror API: https://issues.scala-lang.org/browse/SI-6182
+ // hence I leave this code as is, to be fixed when the corresponding bug is fixed
+ val varargMatch = args.length >= params.length - 1 && isVarArgsList(params)
+ if (!perfectMatch && !varargMatch) {
+ val n_arguments = if (isVarArgsList(params)) s"${params.length - 1} or more" else s"${params.length}"
+ var s_arguments = if (params.length == 1 && !isVarArgsList(params)) "argument" else "arguments"
+ throw new ScalaReflectionException(s"${showMethodSig(symbol)} takes $n_arguments $s_arguments")
+ }
+
+ def objReceiver = receiver.asInstanceOf[AnyRef]
+ def objArg0 = args(0).asInstanceOf[AnyRef]
+ def objArgs = args.asInstanceOf[Seq[AnyRef]]
+ def fail(msg: String) = throw new ScalaReflectionException(msg + ", it cannot be invoked with mirrors")
+
+ def invokePrimitiveMethod = {
+ val jmeths = classOf[BoxesRunTime].getDeclaredMethods.filter(_.getName == nme.primitiveMethodName(symbol.name).toString)
+ assert(jmeths.length == 1, jmeths.toList)
+ jinvoke(jmeths.head, null, objReceiver +: objArgs)
+ }
+
+ symbol match {
+ case Any_== | Object_== => ScalaRunTime.inlinedEquals(objReceiver, objArg0)
+ case Any_!= | Object_!= => !ScalaRunTime.inlinedEquals(objReceiver, objArg0)
+ case Any_## | Object_## => ScalaRunTime.hash(objReceiver)
+ case Any_equals => receiver.equals(objArg0)
+ case Any_hashCode => receiver.hashCode
+ case Any_toString => receiver.toString
+ case Object_eq => objReceiver eq objArg0
+ case Object_ne => objReceiver ne objArg0
+ case Object_synchronized => objReceiver.synchronized(objArg0)
+ case sym if isGetClass(sym) => preciseClass(receiver)
+ case Any_asInstanceOf => fail("Any.asInstanceOf requires a type argument")
+ case Any_isInstanceOf => fail("Any.isInstanceOf requires a type argument")
+ case Object_asInstanceOf => fail("AnyRef.$asInstanceOf is an internal method")
+ case Object_isInstanceOf => fail("AnyRef.$isInstanceOf is an internal method")
+ case Array_length => ScalaRunTime.array_length(objReceiver)
+ case Array_apply => ScalaRunTime.array_apply(objReceiver, args(0).asInstanceOf[Int])
+ case Array_update => ScalaRunTime.array_update(objReceiver, args(0).asInstanceOf[Int], args(1))
+ case Array_clone => ScalaRunTime.array_clone(objReceiver)
+ case sym if isStringConcat(sym) => receiver.toString + objArg0
+ case sym if sym.owner.isPrimitiveValueClass => invokePrimitiveMethod
+ case sym if sym == Predef_classOf => fail("Predef.classOf is a compile-time function")
+ case sym if sym.isTermMacro => fail(s"${symbol.fullName} is a macro, i.e. a compile-time function")
+ case _ => abort(s"unsupported symbol $symbol when invoking $this")
+ }
+ }
+ }
+
+ private class JavaConstructorMirror(val outer: AnyRef, val symbol: MethodSymbol)
+ extends MethodMirror {
+ override val receiver = outer
+ lazy val jconstr = {
+ val jconstr = constructorToJava(symbol)
+ if (!jconstr.isAccessible) jconstr.setAccessible(true)
+ jconstr
+ }
+ def apply(args: Any*): Any = {
+ if (symbol.owner == ArrayClass)
+ throw new ScalaReflectionException("Cannot instantiate arrays with mirrors. Consider using `scala.reflect.ClassTag(<class of element>).newArray(<length>)` instead")
+
+ val effectiveArgs =
+ if (outer == null) args.asInstanceOf[Seq[AnyRef]]
+ else outer +: args.asInstanceOf[Seq[AnyRef]]
+ jconstr.newInstance(effectiveArgs: _*)
+ }
+ override def toString = s"constructor mirror for ${showMethodSig(symbol)} (bound to $outer)"
+ }
+
+ private abstract class JavaTemplateMirror
+ extends TemplateMirror {
+ def outer: AnyRef
+ def erasure: ClassSymbol
+ lazy val signature = typeToScala(classToJava(erasure))
+ }
+
+ private class JavaClassMirror(val outer: AnyRef, val symbol: ClassSymbol)
+ extends JavaTemplateMirror with ClassMirror {
+ def erasure = symbol
+ def isStatic = false
+ def reflectConstructor(constructor: MethodSymbol) = {
+ checkConstructorOf(constructor, symbol)
+ new JavaConstructorMirror(outer, constructor)
+ }
+ override def toString = s"class mirror for ${symbol.fullName} (bound to $outer)"
+ }
+
+ private class JavaModuleMirror(val outer: AnyRef, val symbol: ModuleSymbol)
+ extends JavaTemplateMirror with ModuleMirror {
+ def erasure = symbol.moduleClass.asClass
+ def isStatic = true
+ def instance = {
+ if (symbol.owner.isPackageClass)
+ staticSingletonInstance(classLoader, symbol.fullName)
+ else
+ if (outer == null) staticSingletonInstance(classToJava(symbol.moduleClass.asClass))
+ else innerSingletonInstance(outer, symbol.name)
+ }
+ override def toString = s"module mirror for ${symbol.fullName} (bound to $outer)"
+ }
+
+// -------------------- Java to Scala -----------------------------------
+
+ /** Does method `meth` erase to Java method `jmeth`?
+ * This is true if the Java method type is the same as the Scala method type after performing
+ * all Scala-specific transformations in InfoTransformers. (to be done)
+ */
+ private def erasesTo(meth: Symbol, jmeth: jMethod): Boolean = {
+ val mtpe = transformedType(meth)
+ (mtpe.paramTypes map runtimeClass) == jmeth.getParameterTypes.toList &&
+ runtimeClass(mtpe.resultType) == jmeth.getReturnType
+ }
+
+ private def erasesTo(meth: Symbol, jconstr: jConstructor[_]): Boolean = {
+ val mtpe = transformedType(meth)
+ (mtpe.paramTypes map runtimeClass) == jconstr.getParameterTypes.toList &&
+ runtimeClass(mtpe.resultType) == jconstr.getDeclaringClass
+ }
+
+ def javaClass(path: String): jClass[_] =
+ Class.forName(path, true, classLoader)
+
+ /** Does `path` correspond to a Java class with that fully qualified name in the current class loader? */
+ def tryJavaClass(path: String): Option[jClass[_]] =
+ try {
+ Some(javaClass(path))
+ } catch {
+ case (_: ClassNotFoundException) | (_: NoClassDefFoundError) | (_: IncompatibleClassChangeError) =>
+ None
+ }
+
+ /** The mirror that corresponds to the classloader that original defined the given Java class */
+ def mirrorDefining(jclazz: jClass[_]): JavaMirror = {
+ val cl = jclazz.getClassLoader
+ if (cl == this.classLoader) this else runtimeMirror(cl)
+ }
+
+ private object unpickler extends UnPickler {
+ val global: thisUniverse.type = thisUniverse
+ }
+
+ /** how connected????
+ * Generate types for top-level Scala root class and root companion object
+ * from the pickled information stored in a corresponding Java class
+ * @param clazz The top-level Scala class for which info is unpickled
+ * @param module The top-level Scala companion object for which info is unpickled
+ * @param jclazz The Java class which contains the unpickled information in a
+ * ScalaSignature or ScalaLongSignature annotation.
+ */
+ def unpickleClass(clazz: Symbol, module: Symbol, jclazz: jClass[_]): Unit = {
+ def markAbsent(tpe: Type) = setAllInfos(clazz, module, tpe)
+ def handleError(ex: Exception) = {
+ markAbsent(ErrorType)
+ if (settings.debug.value) ex.printStackTrace()
+ val msg = ex.getMessage()
+ MissingRequirementError.signal(
+ (if (msg eq null) "reflection error while loading " + clazz.name
+ else "error while loading " + clazz.name) + ", " + msg)
+ }
+ // don't use classOf[scala.reflect.ScalaSignature] here, because it will use getClass.getClassLoader, not mirror's classLoader
+ // don't use asInstanceOf either because of the same reason (lol, I cannot believe I fell for it)
+ // don't use structural types to simplify reflective invocations because of the same reason
+ def loadAnnotation(name: String): Option[java.lang.annotation.Annotation] =
+ tryJavaClass(name) flatMap { annotClass =>
+ val anns = jclazz.getAnnotations
+ val result = anns find (_.annotationType == annotClass)
+ if (result.isEmpty && (anns exists (_.annotationType.getName == name)))
+ throw new ClassNotFoundException(
+ sm"""Mirror classloader mismatch: $jclazz (loaded by ${ReflectionUtils.show(jclazz.getClassLoader)})
+ |is unrelated to the mirror's classloader: (${ReflectionUtils.show(classLoader)})""")
+ result
+ }
+ def loadBytes[T: ClassTag](name: String): Option[T] =
+ loadAnnotation(name) map { ssig =>
+ val bytesMethod = ssig.annotationType.getMethod("bytes")
+ bytesMethod.invoke(ssig).asInstanceOf[T]
+ }
+
+ try {
+ markAbsent(NoType)
+ loadBytes[String]("scala.reflect.ScalaSignature") match {
+ case Some(ssig) =>
+ info(s"unpickling Scala $clazz and $module, owner = ${clazz.owner}")
+ val bytes = ssig.getBytes
+ val len = ByteCodecs.decode(bytes)
+ unpickler.unpickle(bytes take len, 0, clazz, module, jclazz.getName)
+ case None =>
+ loadBytes[Array[String]]("scala.reflect.ScalaLongSignature") match {
+ case Some(slsig) =>
+ info(s"unpickling Scala $clazz and $module with long Scala signature")
+ val encoded = slsig flatMap (_.getBytes)
+ val len = ByteCodecs.decode(encoded)
+ val decoded = encoded.take(len)
+ unpickler.unpickle(decoded, 0, clazz, module, jclazz.getName)
+ case None =>
+ // class does not have a Scala signature; it's a Java class
+ info("translating reflection info for Java " + jclazz) //debug
+ initClassModule(clazz, module, new FromJavaClassCompleter(clazz, module, jclazz))
+ }
+ }
+ } catch {
+ case ex: MissingRequirementError =>
+ handleError(ex)
+ case ex: IOException =>
+ handleError(ex)
+ }
+ }
+
+ /**
+ * A fresh Scala type parameter that corresponds to a Java type variable.
+ * The association between Scala type parameter and Java type variable is entered in the cache.
+ * @param jtvar The Java type variable
+ */
+ private def createTypeParameter(jtvar: jTypeVariable[_ <: GenericDeclaration]): TypeSymbol = {
+ val tparam = sOwner(jtvar).newTypeParameter(newTypeName(jtvar.getName))
+ .setInfo(new TypeParamCompleter(jtvar))
+ tparamCache enter (jtvar, tparam)
+ tparam
+ }
+
+ /**
+ * A completer that fills in the type of a Scala type parameter from the bounds of a Java type variable.
+ * @param jtvar The Java type variable
+ */
+ private class TypeParamCompleter(jtvar: jTypeVariable[_ <: GenericDeclaration]) extends LazyType with FlagAgnosticCompleter {
+ override def load(sym: Symbol) = complete(sym)
+ override def complete(sym: Symbol) = {
+ sym setInfo TypeBounds.upper(glb(jtvar.getBounds.toList map typeToScala map objToAny))
+ }
+ }
+
+ /**
+ * Copy all annotations of Java annotated element `jann` over to Scala symbol `sym`.
+ * Also creates `@throws` annotations if necessary.
+ * Pre: `sym` is already initialized with a concrete type.
+ * Note: If `sym` is a method or constructor, its parameter annotations are copied as well.
+ */
+ private def copyAnnotations(sym: Symbol, jann: AnnotatedElement) {
+ sym setAnnotations (jann.getAnnotations map JavaAnnotationProxy).toList
+ // SI-7065: we're not using getGenericExceptionTypes here to be consistent with ClassfileParser
+ val jexTpes = jann match {
+ case jm: jMethod => jm.getExceptionTypes.toList
+ case jconstr: jConstructor[_] => jconstr.getExceptionTypes.toList
+ case _ => Nil
+ }
+ jexTpes foreach (jexTpe => sym.addThrowsAnnotation(classSymbol(jexTpe)))
+ }
+
+ /**
+ * A completer that fills in the types of a Scala class and its companion object
+ * by copying corresponding type info from a Java class. This completer is used
+ * to reflect classes in Scala that do not have a Scala pickle info, be it
+ * because they are local classes or have been compiled from Java sources.
+ * @param clazz The Scala class for which info is copied
+ * @param module The Scala companion object for which info is copied
+ * @param jclazz The Java class
+ */
+ private class FromJavaClassCompleter(clazz: Symbol, module: Symbol, jclazz: jClass[_]) extends LazyType with JavaClassCompleter with FlagAssigningCompleter {
+
+ /** used to avoid cycles while initializing classes */
+ private var parentsLevel = 0
+ private var pendingLoadActions: List[() => Unit] = Nil
+ private val relatedSymbols = clazz +: (if (module != NoSymbol) List(module, module.moduleClass) else Nil)
+
+ override def load(sym: Symbol): Unit = {
+ debugInfo("completing from Java " + sym + "/" + clazz.fullName)//debug
+ assert(sym == clazz || (module != NoSymbol && (sym == module || sym == module.moduleClass)), sym)
+ val flags = toScalaClassFlags(jclazz.getModifiers)
+ clazz setFlag (flags | JAVA)
+ if (module != NoSymbol) {
+ module setFlag (flags & PRIVATE | JAVA)
+ module.moduleClass setFlag (flags & PRIVATE | JAVA)
+ }
+
+ relatedSymbols foreach (importPrivateWithinFromJavaFlags(_, jclazz.getModifiers))
+ copyAnnotations(clazz, jclazz)
+ // to do: annotations to set also for module?
+
+ clazz setInfo new LazyPolyType(jclazz.getTypeParameters.toList map createTypeParameter)
+ if (module != NoSymbol) {
+ module setInfo module.moduleClass.tpe
+ module.moduleClass setInfo new LazyPolyType(List())
+ }
+ }
+
+ override def complete(sym: Symbol): Unit = {
+ load(sym)
+ completeRest()
+ }
+
+ def completeRest(): Unit = thisUniverse.synchronized {
+ val tparams = clazz.rawInfo.typeParams
+
+ val parents = try {
+ parentsLevel += 1
+ val jsuperclazz = jclazz.getGenericSuperclass
+ val ifaces = jclazz.getGenericInterfaces.toList map typeToScala
+ val isAnnotation = (jclazz.getModifiers & JAVA_ACC_ANNOTATION) != 0
+ if (isAnnotation) AnnotationClass.tpe :: ClassfileAnnotationClass.tpe :: ifaces
+ else (if (jsuperclazz == null) AnyClass.tpe else typeToScala(jsuperclazz)) :: ifaces
+ } finally {
+ parentsLevel -= 1
+ }
+ clazz setInfo GenPolyType(tparams, new ClassInfoType(parents, newScope, clazz))
+ if (module != NoSymbol) {
+ module.moduleClass setInfo new ClassInfoType(List(), newScope, module.moduleClass)
+ }
+
+ def enter(sym: Symbol, mods: Int) =
+ (if (jModifier.isStatic(mods)) module.moduleClass else clazz).info.decls enter sym
+
+ def enterEmptyCtorIfNecessary(): Unit = {
+ if (jclazz.getConstructors.isEmpty)
+ clazz.info.decls.enter(clazz.newClassConstructor(NoPosition))
+ }
+
+ for (jinner <- jclazz.getDeclaredClasses) {
+ jclassAsScala(jinner) // inner class is entered as a side-effect
+ // no need to call enter explicitly
+ }
+
+ pendingLoadActions = { () =>
+
+ for (jfield <- jclazz.getDeclaredFields)
+ enter(jfieldAsScala(jfield), jfield.getModifiers)
+
+ for (jmeth <- jclazz.getDeclaredMethods)
+ enter(jmethodAsScala(jmeth), jmeth.getModifiers)
+
+ for (jconstr <- jclazz.getConstructors)
+ enter(jconstrAsScala(jconstr), jconstr.getModifiers)
+
+ enterEmptyCtorIfNecessary()
+
+ } :: pendingLoadActions
+
+ if (parentsLevel == 0) {
+ while (!pendingLoadActions.isEmpty) {
+ val item = pendingLoadActions.head
+ pendingLoadActions = pendingLoadActions.tail
+ item()
+ }
+ }
+ }
+
+ class LazyPolyType(override val typeParams: List[Symbol]) extends LazyType with FlagAgnosticCompleter {
+ override def complete(sym: Symbol) {
+ completeRest()
+ }
+ }
+ }
+
+ /**
+ * If Java modifiers `mods` contain STATIC, return the module class
+ * of the companion module of `clazz`, otherwise the class `clazz` itself.
+ */
+ private def followStatic(clazz: Symbol, mods: Int) =
+ if (jModifier.isStatic(mods)) clazz.companionModule.moduleClass else clazz
+
+ /** Methods which need to be treated with care
+ * because they either are getSimpleName or call getSimpleName:
+ *
+ * public String getSimpleName()
+ * public boolean isAnonymousClass()
+ * public boolean isLocalClass()
+ * public String getCanonicalName()
+ *
+ * A typical manifestation:
+ *
+ * // java.lang.Error: sOwner(class Test$A$1) has failed
+ * // Caused by: java.lang.InternalError: Malformed class name
+ * // at java.lang.Class.getSimpleName(Class.java:1133)
+ * // at java.lang.Class.isAnonymousClass(Class.java:1188)
+ * // at java.lang.Class.isLocalClass(Class.java:1199)
+ * // (see t5256c.scala for more details)
+ *
+ * TODO - find all such calls and wrap them.
+ * TODO - create mechanism to avoid the recurrence of unwrapped calls.
+ */
+ implicit class RichClass(jclazz: jClass[_]) {
+ // `jclazz.isLocalClass` doesn't work because of problems with `getSimpleName`
+ // hence we have to approximate by removing the `isAnonymousClass` check
+// def isLocalClass0: Boolean = jclazz.isLocalClass
+ def isLocalClass0: Boolean = jclazz.getEnclosingMethod != null || jclazz.getEnclosingConstructor != null
+ }
+
+ /**
+ * The Scala owner of the Scala class corresponding to the Java class `jclazz`
+ */
+ private def sOwner(jclazz: jClass[_]): Symbol =
+ if (jclazz.isMemberClass) {
+ val jEnclosingClass = jclazz.getEnclosingClass
+ val sEnclosingClass = classToScala(jEnclosingClass)
+ followStatic(sEnclosingClass, jclazz.getModifiers)
+ } else if (jclazz.isLocalClass0) {
+ val jEnclosingMethod = jclazz.getEnclosingMethod
+ if (jEnclosingMethod != null) {
+ methodToScala(jEnclosingMethod)
+ } else {
+ val jEnclosingConstructor = jclazz.getEnclosingConstructor
+ constructorToScala(jEnclosingConstructor)
+ }
+ } else if (jclazz.isPrimitive || jclazz.isArray) {
+ ScalaPackageClass
+ } else if (jclazz.getPackage != null) {
+ val jPackage = jclazz.getPackage
+ packageToScala(jPackage).moduleClass
+ } else {
+ // @eb: a weird classloader might return a null package for something with a non-empty package name
+ // for example, http://groups.google.com/group/scala-internals/browse_thread/thread/7be09ff8f67a1e5c
+ // in that case we could invoke packageNameToScala(jPackageName) and, probably, be okay
+ // however, I think, it's better to blow up, since weirdness of the class loader might bite us elsewhere
+ // [martin] I think it's better to be forgiving here. Restoring packageNameToScala.
+ val jPackageName = jclazz.getName take jclazz.getName.lastIndexOf('.')
+ packageNameToScala(jPackageName).moduleClass
+ }
+
+ /**
+ * The Scala owner of the Scala symbol corresponding to the Java member `jmember`
+ */
+ private def sOwner(jmember: jMember): Symbol = {
+ followStatic(classToScala(jmember.getDeclaringClass), jmember.getModifiers)
+ }
+
+ /**
+ * The Scala owner of the Scala type parameter corresponding to the Java type variable `jtvar`
+ */
+ private def sOwner(jtvar: jTypeVariable[_ <: GenericDeclaration]): Symbol =
+ genericDeclarationToScala(jtvar.getGenericDeclaration)
+
+ /**
+ * Find declarations or definition in class `clazz` that maps to a Java
+ * entity with name `jname`. Because of name-mangling, this is more difficult
+ * than a simple name-based lookup via `decl`. If `decl` fails, members
+ * that start with the given name are searched instead.
+ */
+ private def lookup(clazz: Symbol, jname: String): Symbol = {
+ def approximateMatch(sym: Symbol, jstr: String): Boolean =
+ (sym.name.toString == jstr) ||
+ sym.isPrivate && nme.expandedName(sym.name.toTermName, sym.owner).toString == jstr
+
+ clazz.info.decl(newTermName(jname)) orElse {
+ (clazz.info.decls.iterator filter (approximateMatch(_, jname))).toList match {
+ case List() => NoSymbol
+ case List(sym) => sym
+ case alts => clazz.newOverloaded(alts.head.tpe.prefix, alts)
+ }
+ }
+ }
+
+ /**
+ * The Scala method corresponding to given Java method.
+ * @param jmeth The Java method
+ * @return A Scala method object that corresponds to `jmeth`.
+ */
+ def methodToScala(jmeth: jMethod): MethodSymbol =
+ toScala(methodCache, jmeth)(_ methodToScala1 _)
+
+ private def methodToScala1(jmeth: jMethod): MethodSymbol = {
+ val jOwner = jmeth.getDeclaringClass
+ val preOwner = classToScala(jOwner)
+ val owner = followStatic(preOwner, jmeth.getModifiers)
+ (lookup(owner, jmeth.getName) suchThat (erasesTo(_, jmeth)) orElse jmethodAsScala(jmeth))
+ .asMethod
+ }
+
+ /**
+ * The Scala constructor corresponding to given Java constructor.
+ * @param jconstr The Java constructor
+ * @return A Scala method object that corresponds to `jconstr`.
+ */
+ def constructorToScala(jconstr: jConstructor[_]): MethodSymbol =
+ toScala(constructorCache, jconstr)(_ constructorToScala1 _)
+
+ private def constructorToScala1(jconstr: jConstructor[_]): MethodSymbol = {
+ val owner = followStatic(classToScala(jconstr.getDeclaringClass), jconstr.getModifiers)
+ (lookup(owner, jconstr.getName) suchThat (erasesTo(_, jconstr)) orElse jconstrAsScala(jconstr))
+ .asMethod
+ }
+
+ /**
+ * The Scala field corresponding to given Java field.
+ * @param jfield The Java field
+ * @return A Scala field object that corresponds to `jfield`.
+ * // ??? should we return the getter instead?
+ */
+ def fieldToScala(jfield: jField): TermSymbol =
+ toScala(fieldCache, jfield)(_ fieldToScala1 _)
+
+ private def fieldToScala1(jfield: jField): TermSymbol = {
+ val owner = followStatic(classToScala(jfield.getDeclaringClass), jfield.getModifiers)
+ (lookup(owner, jfield.getName) suchThat (!_.isMethod) orElse jfieldAsScala(jfield)).asTerm
+ }
+
+ /**
+ * The Scala package corresponding to given Java package
+ */
+ def packageToScala(jpkg: jPackage): ModuleSymbol = packageCache.toScala(jpkg) {
+ makeScalaPackage(jpkg.getName)
+ }
+
+ /**
+ * The Scala package with given fully qualified name.
+ */
+ def packageNameToScala(fullname: String): ModuleSymbol = {
+ if (fullname == "") EmptyPackage
+ else {
+ val jpkg = jPackage.getPackage(fullname)
+ if (jpkg != null) packageToScala(jpkg) else makeScalaPackage(fullname)
+ }
+ }
+
+ /**
+ * The Scala package with given fully qualified name. Unlike `packageNameToScala`,
+ * this one bypasses the cache.
+ */
+ private[JavaMirrors] def makeScalaPackage(fullname: String): ModuleSymbol = {
+ val split = fullname lastIndexOf '.'
+ val ownerModule: ModuleSymbol =
+ if (split > 0) packageNameToScala(fullname take split) else this.RootPackage
+ val owner = ownerModule.moduleClass
+ val name = (fullname: TermName) drop split + 1
+ val opkg = owner.info decl name
+ if (opkg.isPackage)
+ opkg.asModule
+ else if (opkg == NoSymbol) {
+ val pkg = owner.newPackage(name)
+ pkg.moduleClass setInfo new LazyPackageType
+ pkg setInfoAndEnter pkg.moduleClass.tpe
+ info("made Scala "+pkg)
+ pkg
+ } else
+ throw new ReflectError(opkg+" is not a package")
+ }
+
+ private def scalaSimpleName(jclazz: jClass[_]): TypeName = {
+ val owner = sOwner(jclazz)
+ val enclosingClass = jclazz.getEnclosingClass
+ var prefix = if (enclosingClass != null) enclosingClass.getName else ""
+ val isObject = owner.isModuleClass && !owner.isPackageClass
+ if (isObject && !prefix.endsWith(nme.MODULE_SUFFIX_STRING)) prefix += nme.MODULE_SUFFIX_STRING
+ assert(jclazz.getName.startsWith(prefix))
+ var name = jclazz.getName.substring(prefix.length)
+ name = name.substring(name.lastIndexOf(".") + 1)
+ newTypeName(name)
+ }
+
+ /**
+ * The Scala class that corresponds to a given Java class.
+ * @param jclazz The Java class
+ * @return A Scala class symbol that reflects all elements of the Java class,
+ * in the form they appear in the Scala pickling info, or, if that is
+ * not available, wrapped from the Java reflection info.
+ */
+ def classToScala(jclazz: jClass[_]): ClassSymbol =
+ toScala(classCache, jclazz)(_ classToScala1 _)
+
+ private def classToScala1(jclazz: jClass[_]): ClassSymbol = {
+ val jname = newTypeName(jclazz.getName)
+ if (jname == fulltpnme.RuntimeNothing) NothingClass
+ else if (jname == fulltpnme.RuntimeNull) NullClass
+ else {
+ val owner = sOwner(jclazz)
+ val simpleName = scalaSimpleName(jclazz)
+
+ def lookupClass = {
+ def coreLookup(name: Name): Symbol =
+ owner.info.decl(name) orElse {
+ if (name.startsWith(nme.NAME_JOIN_STRING)) coreLookup(name drop 1) else NoSymbol
+ }
+ if (nme.isModuleName(simpleName))
+ coreLookup(nme.stripModuleSuffix(simpleName).toTermName) map (_.moduleClass)
+ else
+ coreLookup(simpleName)
+ }
+
+ val cls =
+ if (jclazz.isMemberClass && !nme.isImplClassName(jname))
+ lookupClass
+ else if (jclazz.isLocalClass0 || scalacShouldntLoadClass(jname))
+ // local classes and implementation classes not preserved by unpickling - treat as Java
+ //
+ // upd. but only if they cannot be loaded as top-level classes
+ // otherwise we may mistake mangled symbolic names for mangled nested names
+ //
+ // in case when a Java binary name can be treated both as a top-level class and as a nested class
+ // (as described in http://groups.google.com/group/scala-internals/browse_thread/thread/10855403bbf04298)
+ // we check for a top-level class first
+ // this is totally correct, because a top-level class and a nested class with the same name cannot coexist
+ // so it's either one or another, but not both - therefore we always load $-bearing classes correctly
+ lookupClass orElse jclassAsScala(jclazz)
+ else if (jclazz.isArray)
+ ArrayClass
+ else
+ javaTypeToValueClass(jclazz) orElse lookupClass
+
+ assert (cls.isType,
+ sm"""${if (cls == NoSymbol) "not a type: symbol" else "no symbol could be"}
+ | loaded from $jclazz in $owner with name $simpleName and classloader $classLoader""")
+
+ cls.asClass
+ }
+ }
+
+ /**
+ * The Scala type parameter that corresponds to a given Java type parameter.
+ * @param jparam The Java type parameter
+ * @return A Scala type parameter symbol that has the same owner and name as the Java type parameter
+ */
+ def typeParamToScala(jparam: jTypeVariable[_ <: GenericDeclaration]): TypeSymbol =
+ toScala(tparamCache, jparam)(_ typeParamToScala1 _)
+
+ private def typeParamToScala1(jparam: jTypeVariable[_ <: GenericDeclaration]): TypeSymbol = {
+ val owner = genericDeclarationToScala(jparam.getGenericDeclaration)
+ owner.info match {
+ case PolyType(tparams, _) => tparams.find(_.name.toString == jparam.getName).get.asType
+ }
+ }
+
+ /**
+ * The Scala symbol that corresponds to a given Java generic declaration (class, method, or constructor)
+ */
+ def genericDeclarationToScala(jdecl: GenericDeclaration): Symbol = jdecl match {
+ case jclazz: jClass[_] => classToScala(jclazz)
+ case jmeth: jMethod => methodToScala(jmeth)
+ case jconstr: jConstructor[_] => constructorToScala(jconstr)
+ }
+
+ /**
+ * Given some Java type arguments, a corresponding list of Scala types, plus potentially
+ * some existentially bound type variables that represent wildcard arguments.
+ */
+ private def targsToScala(owner: Symbol, args: List[jType]): (List[Type], List[TypeSymbol]) = {
+ val tparams = new ListBuffer[TypeSymbol]
+ def targToScala(arg: jType): Type = arg match {
+ case jwild: WildcardType =>
+ val tparam = owner.newExistential(newTypeName("T$" + tparams.length))
+ .setInfo(TypeBounds(
+ lub(jwild.getLowerBounds.toList map typeToScala),
+ glb(jwild.getUpperBounds.toList map typeToScala map objToAny)))
+ tparams += tparam
+ typeRef(NoPrefix, tparam, List())
+ case _ =>
+ typeToScala(arg)
+ }
+ (args map targToScala, tparams.toList)
+ }
+
+ /**
+ * The Scala type that corresponds to given Java type
+ */
+ def typeToScala(jtpe: jType): Type = jtpe match {
+ case jclazz: jClass[_] =>
+ if (jclazz.isArray)
+ arrayType(typeToScala(jclazz.getComponentType))
+ else {
+ val clazz = classToScala(jclazz)
+ rawToExistential(typeRef(clazz.owner.thisType, clazz, List()))
+ }
+ case japplied: ParameterizedType =>
+ // http://stackoverflow.com/questions/5767122/parameterizedtype-getrawtype-returns-j-l-r-type-not-class
+ val sym = classToScala(japplied.getRawType.asInstanceOf[jClass[_]])
+ val pre = sym.owner.thisType
+ val args0 = japplied.getActualTypeArguments
+ val (args, bounds) = targsToScala(pre.typeSymbol, args0.toList)
+ newExistentialType(bounds, typeRef(pre, sym, args))
+ case jarr: GenericArrayType =>
+ arrayType(typeToScala(jarr.getGenericComponentType))
+ case jtvar: jTypeVariable[_] =>
+ val tparam = typeParamToScala(jtvar)
+ typeRef(NoPrefix, tparam, List())
+ }
+
+ /**
+ * The Scala class that corresponds to given Java class without taking
+ * Scala pickling info into account.
+ * @param jclazz The Java class
+ * @return A Scala class symbol that wraps all reflection info of `jclazz`
+ */
+ private def jclassAsScala(jclazz: jClass[_]): Symbol = {
+ val clazz = sOwner(jclazz) // sOwner called outside of closure for binary compatibility
+ toScala(classCache, jclazz){ (mirror, jclazz) =>
+ mirror.jclassAsScala(jclazz, clazz)
+ }
+ }
+
+ private def jclassAsScala(jclazz: jClass[_], owner: Symbol): ClassSymbol = {
+ val name = scalaSimpleName(jclazz)
+ val completer = (clazz: Symbol, module: Symbol) => new FromJavaClassCompleter(clazz, module, jclazz)
+ val (clazz, _) = createClassModule(owner, name, completer)
+ clazz
+ }
+
+ /**
+ * The Scala field that corresponds to given Java field without taking
+ * Scala pickling info into account.
+ * @param jfield The Java field
+ * @return A Scala value symbol that wraps all reflection info of `jfield`
+ */
+ private def jfieldAsScala(jfield: jField): TermSymbol =
+ toScala(fieldCache, jfield)(_ jfieldAsScala1 _)
+
+ private def jfieldAsScala1(jfield: jField): TermSymbol = {
+ val field = sOwner(jfield)
+ .newValue(newTermName(jfield.getName), NoPosition, toScalaFieldFlags(jfield.getModifiers))
+ .setInfo(typeToScala(jfield.getGenericType))
+ fieldCache enter (jfield, field)
+ importPrivateWithinFromJavaFlags(field, jfield.getModifiers)
+ copyAnnotations(field, jfield)
+ field
+ }
+
+ private def setMethType(meth: Symbol, tparams: List[Symbol], paramtpes: List[Type], restpe: Type) = {
+ meth setInfo GenPolyType(tparams, MethodType(meth.owner.newSyntheticValueParams(paramtpes map objToAny), restpe))
+ }
+
+ /**
+ * The Scala method that corresponds to given Java method without taking
+ * Scala pickling info into account.
+ * @param jmeth The Java method
+ * @return A Scala method symbol that wraps all reflection info of `jmethod`
+ */
+ private def jmethodAsScala(jmeth: jMethod): MethodSymbol =
+ toScala(methodCache, jmeth)(_ jmethodAsScala1 _)
+
+ private def jmethodAsScala1(jmeth: jMethod): MethodSymbol = {
+ val clazz = sOwner(jmeth)
+ val meth = clazz.newMethod(newTermName(jmeth.getName), NoPosition, toScalaMethodFlags(jmeth.getModifiers))
+ methodCache enter (jmeth, meth)
+ val tparams = jmeth.getTypeParameters.toList map createTypeParameter
+ val paramtpes = jmeth.getGenericParameterTypes.toList map typeToScala
+ val resulttpe = typeToScala(jmeth.getGenericReturnType)
+ setMethType(meth, tparams, paramtpes, resulttpe)
+ importPrivateWithinFromJavaFlags(meth, jmeth.getModifiers)
+ copyAnnotations(meth, jmeth)
+ if ((jmeth.getModifiers & JAVA_ACC_VARARGS) != 0) meth.setInfo(arrayToRepeated(meth.info))
+ meth
+ }
+
+ /**
+ * The Scala constructor that corresponds to given Java constructor without taking
+ * Scala pickling info into account.
+ * @param jconstr The Java constructor
+ * @return A Scala constructor symbol that wraps all reflection info of `jconstr`
+ */
+ private def jconstrAsScala(jconstr: jConstructor[_]): MethodSymbol =
+ toScala(constructorCache, jconstr)(_ jconstrAsScala1 _)
+
+ private def jconstrAsScala1(jconstr: jConstructor[_]): MethodSymbol = {
+ // [Martin] Note: I know there's a lot of duplication wrt jmethodAsScala, but don't think it's worth it to factor this out.
+ val clazz = sOwner(jconstr)
+ val constr = clazz.newConstructor(NoPosition, toScalaMethodFlags(jconstr.getModifiers))
+ constructorCache enter (jconstr, constr)
+ val tparams = jconstr.getTypeParameters.toList map createTypeParameter
+ val paramtpes = jconstr.getGenericParameterTypes.toList map typeToScala
+ setMethType(constr, tparams, paramtpes, clazz.tpe)
+ constr setInfo GenPolyType(tparams, MethodType(clazz.newSyntheticValueParams(paramtpes), clazz.tpe))
+ importPrivateWithinFromJavaFlags(constr, jconstr.getModifiers)
+ copyAnnotations(constr, jconstr)
+ constr
+ }
+
+// -------------------- Scala to Java -----------------------------------
+
+ /** Optionally, the Java package corresponding to a given Scala package, or None if no such Java package exists.
+ * @param pkg The Scala package
+ */
+ def packageToJavaOption(pkg: ModuleSymbol): Option[jPackage] = packageCache.toJavaOption(pkg) {
+ Option(jPackage.getPackage(pkg.fullName.toString))
+ }
+
+ /** The Java class corresponding to given Scala class.
+ * Note: This only works for
+ * - top-level classes
+ * - Scala classes that were generated via jclassToScala
+ * - classes that have a class owner that has a corresponding Java class
+ * @throws A `ClassNotFoundException` for all Scala classes not in one of these categories.
+ */
+ @throws(classOf[ClassNotFoundException])
+ def classToJava(clazz: ClassSymbol): jClass[_] = classCache.toJava(clazz) {
+ def noClass = throw new ClassNotFoundException("no Java class corresponding to "+clazz+" found")
+ //println("classToJava "+clazz+" "+clazz.owner+" "+clazz.owner.isPackageClass)//debug
+ if (clazz.isPrimitiveValueClass)
+ valueClassToJavaType(clazz)
+ else if (clazz == ArrayClass)
+ noClass
+ else if (clazz.owner.isPackageClass)
+ javaClass(clazz.javaClassName)
+ else if (clazz.owner.isClass) {
+ val childOfClass = !clazz.owner.isModuleClass
+ val childOfTopLevel = clazz.owner.owner.isPackageClass
+ val childOfTopLevelObject = clazz.owner.isModuleClass && childOfTopLevel
+
+ // suggested in https://issues.scala-lang.org/browse/SI-4023?focusedCommentId=54759#comment-54759
+ var ownerClazz = classToJava(clazz.owner.asClass)
+ if (childOfTopLevelObject) ownerClazz = Class.forName(ownerClazz.getName stripSuffix "$", true, ownerClazz.getClassLoader)
+ val ownerChildren = ownerClazz.getDeclaredClasses
+
+ var fullNameOfJavaClass = ownerClazz.getName
+ if (childOfClass || childOfTopLevel) fullNameOfJavaClass += "$"
+ fullNameOfJavaClass += clazz.name
+
+ // compactify (see SI-7779)
+ fullNameOfJavaClass = fullNameOfJavaClass match {
+ case PackageAndClassPattern(pack, clazzName) =>
+ // in a package
+ pack + compactifyName(clazzName)
+ case _ =>
+ // in the empty package
+ compactifyName(fullNameOfJavaClass)
+ }
+
+ if (clazz.isModuleClass) fullNameOfJavaClass += "$"
+
+ // println(s"ownerChildren = ${ownerChildren.toList}")
+ // println(s"fullNameOfJavaClass = $fullNameOfJavaClass")
+ ownerChildren.find(_.getName == fullNameOfJavaClass).getOrElse(noClass)
+ } else
+ noClass
+ }
+
+ private val PackageAndClassPattern = """(.*\.)(.*)$""".r
+
+ private def expandedName(sym: Symbol): String =
+ if (sym.isPrivate) nme.expandedName(sym.name.toTermName, sym.owner).toString
+ else sym.name.toString
+
+ /** The Java field corresponding to a given Scala field.
+ * @param meth The Scala field.
+ */
+ def fieldToJava(fld: TermSymbol): jField = fieldCache.toJava(fld) {
+ val jclazz = classToJava(fld.owner.asClass)
+ val jname = nme.dropLocalSuffix(fld.name).toString
+ try jclazz getDeclaredField jname
+ catch {
+ case ex: NoSuchFieldException => jclazz getDeclaredField expandedName(fld)
+ }
+ }
+
+ /** The Java method corresponding to a given Scala method.
+ * @param meth The Scala method
+ */
+ def methodToJava(meth: MethodSymbol): jMethod = methodCache.toJava(meth) {
+ val jclazz = classToJava(meth.owner.asClass)
+ val paramClasses = transformedType(meth).paramTypes map typeToJavaClass
+ val jname = nme.dropLocalSuffix(meth.name).toString
+ try jclazz getDeclaredMethod (jname, paramClasses: _*)
+ catch {
+ case ex: NoSuchMethodException =>
+ jclazz getDeclaredMethod (expandedName(meth), paramClasses: _*)
+ }
+ }
+
+ /** The Java constructor corresponding to a given Scala constructor.
+ * @param constr The Scala constructor
+ */
+ def constructorToJava(constr: MethodSymbol): jConstructor[_] = constructorCache.toJava(constr) {
+ val jclazz = classToJava(constr.owner.asClass)
+ val paramClasses = transformedType(constr).paramTypes map typeToJavaClass
+ val effectiveParamClasses =
+ if (!constr.owner.owner.isStaticOwner) jclazz.getEnclosingClass +: paramClasses
+ else paramClasses
+ jclazz getConstructor (effectiveParamClasses: _*)
+ }
+
+ private def jArrayClass(elemClazz: jClass[_]): jClass[_] = {
+ jArray.newInstance(elemClazz, 0).getClass
+ }
+
+ /** The Java class that corresponds to given Scala type.
+ * Pre: Scala type is already transformed to Java level.
+ */
+ def typeToJavaClass(tpe: Type): jClass[_] = tpe match {
+ case ExistentialType(_, rtpe) => typeToJavaClass(rtpe)
+ case TypeRef(_, ArrayClass, List(elemtpe)) => jArrayClass(typeToJavaClass(elemtpe))
+ case TypeRef(_, sym: ClassSymbol, _) => classToJava(sym.asClass)
+ case tpe @ TypeRef(_, sym: AliasTypeSymbol, _) => typeToJavaClass(tpe.dealias)
+ case SingleType(_, sym: ModuleSymbol) => classToJava(sym.moduleClass.asClass)
+ case _ => throw new NoClassDefFoundError("no Java class corresponding to "+tpe+" found")
+ }
+ }
+
+ /** Assert that packages have package scopes */
+ override def validateClassInfo(tp: ClassInfoType) {
+ assert(!tp.typeSymbol.isPackageClass || tp.decls.isInstanceOf[PackageScope])
+ }
+
+ override def newPackageScope(pkgClass: Symbol) = new PackageScope(pkgClass)
+
+ override def scopeTransform(owner: Symbol)(op: => Scope): Scope =
+ if (owner.isPackageClass) owner.info.decls else op
+
+ override def mirrorThatLoaded(sym: Symbol): Mirror = sym.enclosingRootClass match {
+ case root: RootSymbol => root.mirror
+ case _ => abort(s"${sym}.enclosingRootClass = ${sym.enclosingRootClass}, which is not a RootSymbol")
+ }
+
+ private lazy val syntheticCoreClasses: Map[(String, Name), Symbol] = {
+ def mapEntry(sym: Symbol): ((String, Name), Symbol) = (sym.owner.fullName, sym.name) -> sym
+ Map() ++ (definitions.syntheticCoreClasses map mapEntry)
+ }
+
+ /** 1. If `owner` is a package class (but not the empty package) and `name` is a term name, make a new package
+ * <owner>.<name>, otherwise return NoSymbol.
+ * Exception: If owner is root and a java class with given name exists, create symbol in empty package instead
+ * 2. If `owner` is the scala package and `name` designates a phantom class, return
+ * the corresponding class symbol and enter it into this mirror's ScalaPackage.
+ */
+ override def missingHook(owner: Symbol, name: Name): Symbol = {
+ if (owner.hasPackageFlag) {
+ val mirror = mirrorThatLoaded(owner)
+ // todo. this makes toolbox tests pass, but it's a mere workaround for SI-5865
+// assert((owner.info decl name) == NoSymbol, s"already exists: $owner . $name")
+ if (owner.isRootSymbol && mirror.tryJavaClass(name.toString).isDefined)
+ return mirror.EmptyPackageClass.info decl name
+ if (name.isTermName && !owner.isEmptyPackageClass)
+ return mirror.makeScalaPackage(
+ if (owner.isRootSymbol) name.toString else owner.fullName+"."+name)
+ syntheticCoreClasses get (owner.fullName, name) match {
+ case Some(tsym) =>
+ // synthetic core classes are only present in root mirrors
+ // because Definitions.scala, which initializes and enters them, only affects rootMirror
+ // therefore we need to enter them manually for non-root mirrors
+ if (mirror ne thisUniverse.rootMirror) owner.info.decls enter tsym
+ return tsym
+ case None =>
+ }
+ }
+ info("*** missing: "+name+"/"+name.isTermName+"/"+owner+"/"+owner.hasPackageFlag+"/"+owner.info.decls.getClass)
+ super.missingHook(owner, name)
+ }
+}
+
+private[reflect] class ReflectError(msg: String) extends java.lang.Error(msg)
+
+private[reflect] class HasJavaClass[J](val getClazz: J => java.lang.Class[_])
diff --git a/src/reflect/scala/reflect/runtime/JavaUniverse.scala b/src/reflect/scala/reflect/runtime/JavaUniverse.scala
new file mode 100644
index 0000000..1b69ca4
--- /dev/null
+++ b/src/reflect/scala/reflect/runtime/JavaUniverse.scala
@@ -0,0 +1,28 @@
+package scala.reflect
+package runtime
+
+import internal.{SomePhase, NoPhase, Phase, TreeGen}
+
+/** An implementation of [[scala.reflect.api.Universe]] for runtime reflection using JVM classloaders.
+ *
+ * Should not be instantiated directly, use [[scala.reflect.runtime.universe]] instead.
+ *
+ * @contentDiagram hideNodes "*Api" "*Extractor"
+ */
+class JavaUniverse extends internal.SymbolTable with ReflectSetup with runtime.SymbolTable { self =>
+
+ def picklerPhase = SomePhase
+
+ lazy val settings = new Settings
+ def forInteractive = false
+ def forScaladoc = false
+
+ def log(msg: => AnyRef): Unit = if (settings.debug.value) println(" [] "+msg)
+
+ type TreeCopier = InternalTreeCopierOps
+ def newStrictTreeCopier: TreeCopier = new StrictTreeCopier
+ def newLazyTreeCopier: TreeCopier = new LazyTreeCopier
+
+ init()
+}
+
diff --git a/src/reflect/scala/reflect/runtime/ReflectSetup.scala b/src/reflect/scala/reflect/runtime/ReflectSetup.scala
new file mode 100644
index 0000000..6e28fc8
--- /dev/null
+++ b/src/reflect/scala/reflect/runtime/ReflectSetup.scala
@@ -0,0 +1,12 @@
+package scala.reflect
+package runtime
+
+import internal.{SomePhase, NoPhase, Phase, TreeGen}
+
+/** A helper trait to initialize things that need to be set before JavaMirrors and other
+ * reflect specific traits are initialized */
+private[runtime] trait ReflectSetup extends internal.SymbolTable {
+ override val phaseWithId: Array[Phase] = Array(NoPhase, SomePhase)
+ override val currentRunId = 1 // fake a run id so that it is different from NoRunId
+ phase = SomePhase // set to a phase different from NoPhase
+}
diff --git a/src/reflect/scala/reflect/runtime/ReflectionUtils.scala b/src/reflect/scala/reflect/runtime/ReflectionUtils.scala
new file mode 100644
index 0000000..ffed3cc
--- /dev/null
+++ b/src/reflect/scala/reflect/runtime/ReflectionUtils.scala
@@ -0,0 +1,85 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2013 LAMP/EPFL
+ * @author Paul Phillips
+ */
+
+package scala.reflect.runtime
+
+import java.lang.{Class => jClass}
+import java.lang.reflect.{ Method, InvocationTargetException, UndeclaredThrowableException }
+
+/** A few java-reflection oriented utility functions useful during reflection bootstrapping.
+ */
+private[scala] object ReflectionUtils {
+ // Unwraps some chained exceptions which arise during reflective calls.
+ def unwrapThrowable(x: Throwable): Throwable = x match {
+ case _: InvocationTargetException | // thrown by reflectively invoked method or constructor
+ _: ExceptionInInitializerError | // thrown when running a static initializer (e.g. a scala module constructor)
+ _: UndeclaredThrowableException | // invocation on a proxy instance if its invocation handler's `invoke` throws an exception
+ _: ClassNotFoundException | // no definition for a class instantiated by name
+ _: NoClassDefFoundError // the definition existed when the executing class was compiled, but can no longer be found
+ if x.getCause != null =>
+ unwrapThrowable(x.getCause)
+ case _ => x
+ }
+ // Transforms an exception handler into one which will only receive the unwrapped
+ // exceptions (for the values of wrap covered in unwrapThrowable.)
+ def unwrapHandler[T](pf: PartialFunction[Throwable, T]): PartialFunction[Throwable, T] = {
+ case ex if pf isDefinedAt unwrapThrowable(ex) => pf(unwrapThrowable(ex))
+ }
+
+ def show(cl: ClassLoader): String = {
+ import scala.language.reflectiveCalls
+
+ def isAbstractFileClassLoader(clazz: Class[_]): Boolean = {
+ if (clazz == null) return false
+ if (clazz.getName == "scala.tools.nsc.interpreter.AbstractFileClassLoader") return true
+ return isAbstractFileClassLoader(clazz.getSuperclass)
+ }
+ def inferClasspath(cl: ClassLoader): String = cl match {
+ case cl: java.net.URLClassLoader =>
+ (cl.getURLs mkString ",")
+ case cl if cl != null && isAbstractFileClassLoader(cl.getClass) =>
+ cl.asInstanceOf[{val root: scala.reflect.io.AbstractFile}].root.canonicalPath
+ case null =>
+ val loadBootCp = (flavor: String) => scala.util.Properties.propOrNone(flavor + ".boot.class.path")
+ loadBootCp("sun") orElse loadBootCp("java") getOrElse "<unknown>"
+ case _ =>
+ "<unknown>"
+ }
+ cl match {
+ case cl if cl != null =>
+ "%s of type %s with classpath [%s] and parent being %s".format(cl, cl.getClass, inferClasspath(cl), show(cl.getParent))
+ case null =>
+ "primordial classloader with boot classpath [%s]".format(inferClasspath(cl))
+ }
+ }
+
+ def staticSingletonInstance(cl: ClassLoader, className: String): AnyRef = {
+ val name = if (className endsWith "$") className else className + "$"
+ val clazz = java.lang.Class.forName(name, true, cl)
+ staticSingletonInstance(clazz)
+ }
+
+ def staticSingletonInstance(clazz: Class[_]): AnyRef = clazz getField "MODULE$" get null
+
+ def innerSingletonInstance(outer: AnyRef, className: String): AnyRef = {
+ val accessorName = if (className endsWith "$") className.substring(0, className.length - 1) else className
+ def singletonAccessor(clazz: Class[_]): Option[Method] =
+ if (clazz == null) None
+ else {
+ val declaredAccessor = clazz.getDeclaredMethods.filter(_.getName == accessorName).headOption
+ declaredAccessor orElse singletonAccessor(clazz.getSuperclass)
+ }
+
+ val accessor = singletonAccessor(outer.getClass) getOrElse { throw new NoSuchMethodException(s"${outer.getClass.getName}.$accessorName") }
+ accessor setAccessible true
+ accessor invoke outer
+ }
+
+ def isTraitImplementation(fileName: String) = fileName endsWith "$class.class"
+
+ def scalacShouldntLoadClassfile(fileName: String) = isTraitImplementation(fileName)
+
+ def scalacShouldntLoadClass(name: scala.reflect.internal.SymbolTable#Name) = scalacShouldntLoadClassfile(name + ".class")
+}
diff --git a/src/reflect/scala/reflect/runtime/Settings.scala b/src/reflect/scala/reflect/runtime/Settings.scala
new file mode 100644
index 0000000..0e0cf3f
--- /dev/null
+++ b/src/reflect/scala/reflect/runtime/Settings.scala
@@ -0,0 +1,49 @@
+package scala.reflect
+package runtime
+
+import scala.reflect.internal.settings.MutableSettings
+
+/** The Settings class for runtime reflection.
+ * This should be refined, so that settings are settable via command
+ * line options or properties.
+ */
+private[reflect] class Settings extends MutableSettings {
+
+ trait Setting extends SettingValue { }
+
+ class BooleanSetting(x: Boolean) extends Setting {
+ type T = Boolean
+ protected var v: Boolean = x
+ override def value: Boolean = v
+ }
+
+ class IntSetting(x: Int) extends Setting {
+ type T = Int
+ protected var v: Int = x
+ override def value: Int = v
+ }
+
+ class MultiStringSetting(xs: List[String]) extends Setting {
+ type T = List[String]
+ protected var v: List[String] = xs
+ override def value: List[String] = v
+ }
+
+ val Xexperimental = new BooleanSetting(false)
+ val XfullLubs = new BooleanSetting(false)
+ val XnoPatmatAnalysis = new BooleanSetting(false)
+ val XoldPatmat = new BooleanSetting(false)
+ val Xprintpos = new BooleanSetting(false)
+ val Ynotnull = new BooleanSetting(false)
+ val Yshowsymkinds = new BooleanSetting(false)
+ val debug = new BooleanSetting(false)
+ val deepCloning = new BooleanSetting(false)
+ val explaintypes = new BooleanSetting(false)
+ val overrideObjects = new BooleanSetting(false)
+ val printtypes = new BooleanSetting(false)
+ val uniqid = new BooleanSetting(false)
+ val verbose = new BooleanSetting(false)
+
+ val Yrecursion = new IntSetting(0)
+ val maxClassfileName = new IntSetting(255)
+}
diff --git a/src/reflect/scala/reflect/runtime/SymbolLoaders.scala b/src/reflect/scala/reflect/runtime/SymbolLoaders.scala
new file mode 100644
index 0000000..b895092
--- /dev/null
+++ b/src/reflect/scala/reflect/runtime/SymbolLoaders.scala
@@ -0,0 +1,145 @@
+package scala.reflect
+package runtime
+
+import internal.Flags
+import java.lang.{Class => jClass, Package => jPackage}
+import scala.collection.mutable
+import scala.reflect.runtime.ReflectionUtils.scalacShouldntLoadClass
+
+private[reflect] trait SymbolLoaders { self: SymbolTable =>
+
+ /** The standard completer for top-level classes
+ * @param clazz The top-level class
+ * @param module The companion object of `clazz`
+ * Calling `complete` on this type will assign the infos of `clazz` and `module`
+ * by unpickling information from the corresponding Java class. If no Java class
+ * is found, a package is created instead.
+ */
+ class TopClassCompleter(clazz: Symbol, module: Symbol) extends SymLoader with FlagAssigningCompleter {
+// def makePackage() {
+// println("wrong guess; making package "+clazz)
+// val ptpe = newPackageType(module.moduleClass)
+// for (sym <- List(clazz, module, module.moduleClass)) {
+// sym setFlag Flags.PACKAGE
+// sym setInfo ptpe
+// }
+// }
+
+ override def complete(sym: Symbol) = {
+ debugInfo("completing "+sym+"/"+clazz.fullName)
+ assert(sym == clazz || sym == module || sym == module.moduleClass)
+// try {
+ atPhaseNotLaterThan(picklerPhase) {
+ val loadingMirror = mirrorThatLoaded(sym)
+ val javaClass = loadingMirror.javaClass(clazz.javaClassName)
+ loadingMirror.unpickleClass(clazz, module, javaClass)
+// } catch {
+// case ex: ClassNotFoundException => makePackage()
+// case ex: NoClassDefFoundError => makePackage()
+ // Note: We catch NoClassDefFoundError because there are situations
+ // where a package and a class have the same name except for capitalization.
+ // It seems in this case the class is loaded even if capitalization differs
+ // but then a NoClassDefFound error is issued with a ("wrong name: ...")
+ // reason. (I guess this is a concession to Windows).
+ // The present behavior is a bit too forgiving, in that it masks
+ // all class load errors, not just wrong name errors. We should try
+ // to be more discriminating. To get on the right track simply delete
+ // the clause above and load a collection class such as collection.Iterable.
+ // You'll see an error that class `parallel` has the wrong name.
+// }
+ }
+ }
+ override def load(sym: Symbol) = complete(sym)
+ }
+
+ /** Create a class and a companion object, enter in enclosing scope,
+ * and initialize with a lazy type completer.
+ * @param owner The owner of the newly created class and object
+ * @param name The simple name of the newly created class
+ * @param completer The completer to be used to set the info of the class and the module
+ */
+ protected def createClassModule(owner: Symbol, name: TypeName, completer: (Symbol, Symbol) => LazyType) = {
+ assert(!(name.toString endsWith "[]"), name)
+ val clazz = owner.newClass(name)
+ val module = owner.newModule(name.toTermName)
+ // without this check test/files/run/t5256g and test/files/run/t5256h will crash
+ // todo. reflection meeting verdict: need to enter the symbols into the first symbol in the owner chain that has a non-empty scope
+ if (owner.info.decls != EmptyScope) {
+ owner.info.decls enter clazz
+ owner.info.decls enter module
+ }
+ initClassModule(clazz, module, completer(clazz, module))
+ (clazz, module)
+ }
+
+ protected def setAllInfos(clazz: Symbol, module: Symbol, info: Type) = {
+ List(clazz, module, module.moduleClass) foreach (_ setInfo info)
+ }
+
+ protected def initClassModule(clazz: Symbol, module: Symbol, completer: LazyType) =
+ setAllInfos(clazz, module, completer)
+
+ /** The type completer for packages.
+ */
+ class LazyPackageType extends LazyType with FlagAgnosticCompleter {
+ override def complete(sym: Symbol) {
+ assert(sym.isPackageClass)
+ sym setInfo new ClassInfoType(List(), new PackageScope(sym), sym)
+ // override def safeToString = pkgClass.toString
+ openPackageModule(sym)
+ }
+ }
+
+ class PackageScope(pkgClass: Symbol) extends Scope(initFingerPrints = -1L) // disable fingerprinting as we do not know entries beforehand
+ with SynchronizedScope {
+ assert(pkgClass.isType)
+ // disable fingerprinting as we do not know entries beforehand
+ private val negatives = mutable.Set[Name]() // Syncnote: Performance only, so need not be protected.
+ override def lookupEntry(name: Name): ScopeEntry = {
+ val e = super.lookupEntry(name)
+ if (e != null)
+ e
+ else if (scalacShouldntLoadClass(name) || (negatives contains name))
+ null
+ else {
+ val path =
+ if (pkgClass.isEmptyPackageClass) name.toString
+ else pkgClass.fullName + "." + name
+ val currentMirror = mirrorThatLoaded(pkgClass)
+ currentMirror.tryJavaClass(path) match {
+ case Some(cls) =>
+ val loadingMirror = currentMirror.mirrorDefining(cls)
+ val (clazz, module) =
+ if (loadingMirror eq currentMirror) {
+ createClassModule(pkgClass, name.toTypeName, new TopClassCompleter(_, _))
+ } else {
+ val origOwner = loadingMirror.packageNameToScala(pkgClass.fullName)
+ val clazz = origOwner.info decl name.toTypeName
+ val module = origOwner.info decl name.toTermName
+ assert(clazz != NoSymbol)
+ assert(module != NoSymbol)
+ pkgClass.info.decls enter clazz
+ pkgClass.info.decls enter module
+ (clazz, module)
+ }
+ debugInfo(s"created $module/${module.moduleClass} in $pkgClass")
+ lookupEntry(name)
+ case none =>
+ debugInfo("*** not found : "+path)
+ negatives += name
+ null
+ }
+ }
+ }
+ }
+
+ /** Assert that packages have package scopes */
+ override def validateClassInfo(tp: ClassInfoType) {
+ assert(!tp.typeSymbol.isPackageClass || tp.decls.isInstanceOf[PackageScope])
+ }
+
+ override def newPackageScope(pkgClass: Symbol) = new PackageScope(pkgClass)
+
+ override def scopeTransform(owner: Symbol)(op: => Scope): Scope =
+ if (owner.isPackageClass) owner.info.decls else op
+}
diff --git a/src/reflect/scala/reflect/runtime/SymbolTable.scala b/src/reflect/scala/reflect/runtime/SymbolTable.scala
new file mode 100644
index 0000000..5c08e9a
--- /dev/null
+++ b/src/reflect/scala/reflect/runtime/SymbolTable.scala
@@ -0,0 +1,45 @@
+package scala.reflect
+package runtime
+
+import scala.reflect.internal.Flags._
+
+/**
+ * This symbol table trait fills in the definitions so that class information is obtained by refection.
+ * It can be used either from a reflexive universe (class scala.reflect.runtime.JavaUniverse), or else from
+ * a runtime compiler that uses reflection to get a class information (class scala.tools.reflect.ReflectGlobal)
+ */
+private[scala] trait SymbolTable extends internal.SymbolTable with JavaMirrors with SymbolLoaders with SynchronizedOps {
+
+ def info(msg: => String) =
+ if (settings.verbose.value) println("[reflect-compiler] "+msg)
+
+ def debugInfo(msg: => String) =
+ if (settings.debug.value) info(msg)
+
+ /** Declares that this is a runtime reflection universe.
+ *
+ * This means that we can make certain assumptions to optimize the universe.
+ * For example, we may auto-initialize symbols on flag and annotation requests
+ * (see `shouldTriggerCompleter` below for more details).
+ *
+ * On the other hand, this also means that usage scenarios of the universe
+ * will differ from the conventional ones. For example, we have to do additional cleanup
+ * in order to prevent memory leaks: http://groups.google.com/group/scala-internals/browse_thread/thread/eabcf3d406dab8b2.
+ */
+ override def isCompilerUniverse = false
+
+ /** Unlike compiler universes, reflective universes can auto-initialize symbols on flag requests.
+ *
+ * scalac wasn't designed with such auto-initialization in mind, and quite often it makes assumptions
+ * that flag requests won't cause initialization. Therefore enabling auto-init leads to cyclic errors.
+ * We could probably fix those, but at the moment it's too risky.
+ *
+ * Reflective universes share codebase with scalac, but their surface is much smaller, which means less assumptions.
+ * These assumptions are taken care of in this overriden `shouldTriggerCompleter` method.
+ */
+ override protected def shouldTriggerCompleter(symbol: Symbol, completer: Type, isFlagRelated: Boolean, mask: Long) =
+ completer match {
+ case _: TopClassCompleter | _: JavaClassCompleter => !isFlagRelated || (mask & TopLevelPickledFlags) != 0
+ case _ => super.shouldTriggerCompleter(symbol, completer, isFlagRelated, mask)
+ }
+}
diff --git a/src/reflect/scala/reflect/runtime/SynchronizedOps.scala b/src/reflect/scala/reflect/runtime/SynchronizedOps.scala
new file mode 100644
index 0000000..7b280e5
--- /dev/null
+++ b/src/reflect/scala/reflect/runtime/SynchronizedOps.scala
@@ -0,0 +1,52 @@
+package scala.reflect
+package runtime
+
+// SI-6240: test thread-safety, make trees synchronized as well
+private[reflect] trait SynchronizedOps extends internal.SymbolTable
+ with SynchronizedSymbols
+ with SynchronizedTypes { self: SymbolTable =>
+
+// Names
+
+ private lazy val nameLock = new Object
+
+ override def newTermName(s: String): TermName = nameLock.synchronized { super.newTermName(s) }
+ override def newTypeName(s: String): TypeName = nameLock.synchronized { super.newTypeName(s) }
+
+// BaseTypeSeqs
+
+ override protected def newBaseTypeSeq(parents: List[Type], elems: Array[Type]) =
+ new BaseTypeSeq(parents, elems) with SynchronizedBaseTypeSeq
+
+ trait SynchronizedBaseTypeSeq extends BaseTypeSeq {
+ override def apply(i: Int): Type = synchronized { super.apply(i) }
+ override def rawElem(i: Int) = synchronized { super.rawElem(i) }
+ override def typeSymbol(i: Int): Symbol = synchronized { super.typeSymbol(i) }
+ override def toList: List[Type] = synchronized { super.toList }
+ override def copy(head: Type, offset: Int): BaseTypeSeq = synchronized { super.copy(head, offset) }
+ override def map(f: Type => Type): BaseTypeSeq = synchronized { super.map(f) }
+ override def exists(p: Type => Boolean): Boolean = synchronized { super.exists(p) }
+ override lazy val maxDepth = synchronized { maxDepthOfElems }
+ override def toString = synchronized { super.toString }
+
+ override def lateMap(f: Type => Type): BaseTypeSeq = new MappedBaseTypeSeq(this, f) with SynchronizedBaseTypeSeq
+ }
+
+// Scopes
+
+ override def newScope = new Scope() with SynchronizedScope
+ override def newNestedScope(outer: Scope): Scope = new Scope(outer) with SynchronizedScope
+
+ trait SynchronizedScope extends Scope {
+ override def isEmpty: Boolean = synchronized { super.isEmpty }
+ override def size: Int = synchronized { super.size }
+ override def enter[T <: Symbol](sym: T): T = synchronized { super.enter(sym) }
+ override def rehash(sym: Symbol, newname: Name) = synchronized { super.rehash(sym, newname) }
+ override def unlink(e: ScopeEntry) = synchronized { super.unlink(e) }
+ override def unlink(sym: Symbol) = synchronized { super.unlink(sym) }
+ override def lookupAll(name: Name) = synchronized { super.lookupAll(name) }
+ override def lookupEntry(name: Name) = synchronized { super.lookupEntry(name) }
+ override def lookupNextEntry(entry: ScopeEntry) = synchronized { super.lookupNextEntry(entry) }
+ override def toList: List[Symbol] = synchronized { super.toList }
+ }
+}
diff --git a/src/reflect/scala/reflect/runtime/SynchronizedSymbols.scala b/src/reflect/scala/reflect/runtime/SynchronizedSymbols.scala
new file mode 100644
index 0000000..00f6952
--- /dev/null
+++ b/src/reflect/scala/reflect/runtime/SynchronizedSymbols.scala
@@ -0,0 +1,140 @@
+package scala.reflect
+package runtime
+
+import scala.reflect.io.AbstractFile
+
+private[reflect] trait SynchronizedSymbols extends internal.Symbols { self: SymbolTable =>
+
+ override protected def nextId() = synchronized { super.nextId() }
+
+ override protected def freshExistentialName(suffix: String) =
+ synchronized { super.freshExistentialName(suffix) }
+
+ // Set the fields which point companions at one another. Returns the module.
+ override def connectModuleToClass(m: ModuleSymbol, moduleClass: ClassSymbol): ModuleSymbol =
+ synchronized { super.connectModuleToClass(m, moduleClass) }
+
+ override def newFreeTermSymbol(name: TermName, value: => Any, flags: Long = 0L, origin: String = null): FreeTermSymbol =
+ new FreeTermSymbol(name, value, origin) with SynchronizedTermSymbol initFlags flags
+
+ override def newFreeTypeSymbol(name: TypeName, flags: Long = 0L, origin: String = null): FreeTypeSymbol =
+ new FreeTypeSymbol(name, origin) with SynchronizedTypeSymbol initFlags flags
+
+ override protected def makeNoSymbol: NoSymbol = new NoSymbol with SynchronizedSymbol
+
+ trait SynchronizedSymbol extends Symbol {
+
+ override def rawflags = synchronized { super.rawflags }
+ override def rawflags_=(x: Long) = synchronized { super.rawflags_=(x) }
+
+ override def rawowner = synchronized { super.rawowner }
+ override def owner_=(owner: Symbol) = synchronized { super.owner_=(owner) }
+
+ override def validTo = synchronized { super.validTo }
+ override def validTo_=(x: Period) = synchronized { super.validTo_=(x) }
+
+ override def pos = synchronized { super.pos }
+ override def setPos(pos: Position): this.type = { synchronized { super.setPos(pos) }; this }
+
+ override def privateWithin = synchronized { super.privateWithin }
+ override def privateWithin_=(sym: Symbol) = synchronized { super.privateWithin_=(sym) }
+
+ override def info = synchronized { super.info }
+ override def info_=(info: Type) = synchronized { super.info_=(info) }
+ override def updateInfo(info: Type): Symbol = synchronized { super.updateInfo(info) }
+ override def rawInfo: Type = synchronized { super.rawInfo }
+
+ override def typeParams: List[Symbol] = synchronized { super.typeParams }
+
+ override def reset(completer: Type): this.type = synchronized { super.reset(completer) }
+
+ override def infosString: String = synchronized { super.infosString }
+
+ override def annotations: List[AnnotationInfo] = synchronized { super.annotations }
+ override def setAnnotations(annots: List[AnnotationInfo]): this.type = { synchronized { super.setAnnotations(annots) }; this }
+
+
+// ------ creators -------------------------------------------------------------------
+
+ override protected def createAbstractTypeSymbol(name: TypeName, pos: Position, newFlags: Long): AbstractTypeSymbol =
+ new AbstractTypeSymbol(this, pos, name) with SynchronizedTypeSymbol initFlags newFlags
+
+ override protected def createAliasTypeSymbol(name: TypeName, pos: Position, newFlags: Long): AliasTypeSymbol =
+ new AliasTypeSymbol(this, pos, name) with SynchronizedTypeSymbol initFlags newFlags
+
+ override protected def createTypeSkolemSymbol(name: TypeName, origin: AnyRef, pos: Position, newFlags: Long): TypeSkolem =
+ new TypeSkolem(this, pos, name, origin) with SynchronizedTypeSymbol initFlags newFlags
+
+ override protected def createClassSymbol(name: TypeName, pos: Position, newFlags: Long): ClassSymbol =
+ new ClassSymbol(this, pos, name) with SynchronizedClassSymbol initFlags newFlags
+
+ override protected def createModuleClassSymbol(name: TypeName, pos: Position, newFlags: Long): ModuleClassSymbol =
+ new ModuleClassSymbol(this, pos, name) with SynchronizedModuleClassSymbol initFlags newFlags
+
+ override protected def createPackageClassSymbol(name: TypeName, pos: Position, newFlags: Long): PackageClassSymbol =
+ new PackageClassSymbol(this, pos, name) with SynchronizedModuleClassSymbol initFlags newFlags
+
+ override protected def createRefinementClassSymbol(pos: Position, newFlags: Long): RefinementClassSymbol =
+ new RefinementClassSymbol(this, pos) with SynchronizedClassSymbol initFlags newFlags
+
+ override protected def createImplClassSymbol(name: TypeName, pos: Position, newFlags: Long): ClassSymbol =
+ new ClassSymbol(this, pos, name) with ImplClassSymbol with SynchronizedClassSymbol initFlags newFlags
+
+ override protected def createPackageObjectClassSymbol(pos: Position, newFlags: Long): PackageObjectClassSymbol =
+ new PackageObjectClassSymbol(this, pos) with SynchronizedClassSymbol initFlags newFlags
+
+ override protected def createTermSymbol(name: TermName, pos: Position, newFlags: Long): TermSymbol =
+ new TermSymbol(this, pos, name) with SynchronizedTermSymbol initFlags newFlags
+
+ override protected def createMethodSymbol(name: TermName, pos: Position, newFlags: Long): MethodSymbol =
+ new MethodSymbol(this, pos, name) with SynchronizedMethodSymbol initFlags newFlags
+
+ override protected def createModuleSymbol(name: TermName, pos: Position, newFlags: Long): ModuleSymbol =
+ new ModuleSymbol(this, pos, name) with SynchronizedTermSymbol initFlags newFlags
+
+ override protected def createPackageSymbol(name: TermName, pos: Position, newFlags: Long): ModuleSymbol = createModuleSymbol(name, pos, newFlags)
+
+ // TODO
+ // override protected def createValueParameterSymbol(name: TermName, pos: Position, newFlags: Long)
+ // override protected def createValueMemberSymbol(name: TermName, pos: Position, newFlags: Long)
+ }
+
+// ------- subclasses ---------------------------------------------------------------------
+
+ trait SynchronizedTermSymbol extends TermSymbol with SynchronizedSymbol {
+ override def name_=(x: Name) = synchronized { super.name_=(x) }
+ override def rawname = synchronized { super.rawname }
+ override def referenced: Symbol = synchronized { super.referenced }
+ override def referenced_=(x: Symbol) = synchronized { super.referenced_=(x) }
+ }
+
+ trait SynchronizedMethodSymbol extends MethodSymbol with SynchronizedTermSymbol {
+ override def typeAsMemberOf(pre: Type): Type = synchronized { super.typeAsMemberOf(pre) }
+ override def paramss: List[List[Symbol]] = synchronized { super.paramss }
+ override def returnType: Type = synchronized { super.returnType }
+ }
+
+ trait SynchronizedTypeSymbol extends TypeSymbol with SynchronizedSymbol {
+ override def name_=(x: Name) = synchronized { super.name_=(x) }
+ override def rawname = synchronized { super.rawname }
+ override def typeConstructor: Type = synchronized { super.typeConstructor }
+ override def tpe: Type = synchronized { super.tpe }
+ }
+
+ trait SynchronizedClassSymbol extends ClassSymbol with SynchronizedTypeSymbol {
+ override def associatedFile = synchronized { super.associatedFile }
+ override def associatedFile_=(f: AbstractFile) = synchronized { super.associatedFile_=(f) }
+ override def thisSym: Symbol = synchronized { super.thisSym }
+ override def thisType: Type = synchronized { super.thisType }
+ override def typeOfThis: Type = synchronized { super.typeOfThis }
+ override def typeOfThis_=(tp: Type) = synchronized { super.typeOfThis_=(tp) }
+ override def children = synchronized { super.children }
+ override def addChild(sym: Symbol) = synchronized { super.addChild(sym) }
+ }
+
+ trait SynchronizedModuleClassSymbol extends ModuleClassSymbol with SynchronizedClassSymbol {
+ override def sourceModule = synchronized { super.sourceModule }
+ override def implicitMembers: Scope = synchronized { super.implicitMembers }
+ }
+}
+
diff --git a/src/reflect/scala/reflect/runtime/SynchronizedTypes.scala b/src/reflect/scala/reflect/runtime/SynchronizedTypes.scala
new file mode 100644
index 0000000..a3e7c28
--- /dev/null
+++ b/src/reflect/scala/reflect/runtime/SynchronizedTypes.scala
@@ -0,0 +1,106 @@
+package scala.reflect
+package runtime
+
+import scala.collection.mutable.WeakHashMap
+import java.lang.ref.WeakReference
+
+/** This trait overrides methods in reflect.internal, bracketing
+ * them in synchronized { ... } to make them thread-safe
+ */
+private[reflect] trait SynchronizedTypes extends internal.Types { self: SymbolTable =>
+
+ // No sharing of map objects:
+ override protected def commonOwnerMap = new CommonOwnerMap
+
+ private object uniqueLock
+
+ private val uniques = WeakHashMap[Type, WeakReference[Type]]()
+ override def unique[T <: Type](tp: T): T = uniqueLock.synchronized {
+ // we need to have weak uniques for runtime reflection
+ // because unlike the normal compiler universe, reflective universe isn't organized in runs
+ // therefore perRunCaches can grow infinitely large
+ //
+ // despite that toolbox universes are decorated, toolboxes are compilers,
+ // i.e. they have their caches cleaned up automatically on per-run basis,
+ // therefore they should use vanilla uniques, which are faster
+ if (!isCompilerUniverse) {
+ val inCache = uniques get tp
+ val result = if (inCache.isDefined) inCache.get.get else null
+ if (result ne null) result.asInstanceOf[T]
+ else {
+ uniques(tp) = new WeakReference(tp)
+ tp
+ }
+ } else {
+ super.unique(tp)
+ }
+ }
+
+ class SynchronizedUndoLog extends UndoLog {
+ private val actualLock = new java.util.concurrent.locks.ReentrantLock
+
+ final override def lock(): Unit = actualLock.lock()
+ final override def unlock(): Unit = actualLock.unlock()
+ }
+
+ override protected def newUndoLog = new SynchronizedUndoLog
+
+ override protected def baseTypeOfNonClassTypeRef(tpe: NonClassTypeRef, clazz: Symbol) =
+ synchronized { super.baseTypeOfNonClassTypeRef(tpe, clazz) }
+
+ private object subsametypeLock
+
+ override def isSameType(tp1: Type, tp2: Type): Boolean =
+ subsametypeLock.synchronized { super.isSameType(tp1, tp2) }
+
+ override def isDifferentType(tp1: Type, tp2: Type): Boolean =
+ subsametypeLock.synchronized { super.isDifferentType(tp1, tp2) }
+
+ override def isSubType(tp1: Type, tp2: Type, depth: Int): Boolean =
+ subsametypeLock.synchronized { super.isSubType(tp1, tp2, depth) }
+
+ private object lubglbLock
+
+ override def glb(ts: List[Type]): Type =
+ lubglbLock.synchronized { super.glb(ts) }
+
+ override def lub(ts: List[Type]): Type =
+ lubglbLock.synchronized { super.lub(ts) }
+
+ private object indentLock
+
+ override protected def explain[T](op: String, p: (Type, T) => Boolean, tp1: Type, arg2: T): Boolean = {
+ indentLock.synchronized { super.explain(op, p, tp1, arg2) }
+ }
+
+ private object toStringLock
+
+ override protected def typeToString(tpe: Type): String =
+ toStringLock.synchronized(super.typeToString(tpe))
+
+ /* The idea of caches is as follows.
+ * When in reflexive mode, a cache is either null, or one sentinal
+ * value representing undefined or the final defined
+ * value. Hence, we can ask in non-synchronized ode whether the cache field
+ * is non null and different from the sentinel (if a sentinel exists).
+ * If that's true, the cache value is current.
+ * Otherwise we arrive in one of the defined... methods listed below
+ * which go through all steps in synchronized mode.
+ */
+
+ override protected def defineUnderlyingOfSingleType(tpe: SingleType) =
+ tpe.synchronized { super.defineUnderlyingOfSingleType(tpe) }
+
+ override protected def defineBaseTypeSeqOfCompoundType(tpe: CompoundType) =
+ tpe.synchronized { super.defineBaseTypeSeqOfCompoundType(tpe) }
+
+ override protected def defineBaseClassesOfCompoundType(tpe: CompoundType) =
+ tpe.synchronized { super.defineBaseClassesOfCompoundType(tpe) }
+
+ override protected def defineParentsOfTypeRef(tpe: TypeRef) =
+ tpe.synchronized { super.defineParentsOfTypeRef(tpe) }
+
+ override protected def defineBaseTypeSeqOfTypeRef(tpe: TypeRef) =
+ tpe.synchronized { super.defineBaseTypeSeqOfTypeRef(tpe) }
+
+}
diff --git a/src/reflect/scala/reflect/runtime/TwoWayCache.scala b/src/reflect/scala/reflect/runtime/TwoWayCache.scala
new file mode 100644
index 0000000..05debcb
--- /dev/null
+++ b/src/reflect/scala/reflect/runtime/TwoWayCache.scala
@@ -0,0 +1,66 @@
+package scala.reflect
+package runtime
+
+import scala.collection.mutable.WeakHashMap
+import java.lang.ref.WeakReference
+
+/** A cache that maintains a bijection between Java reflection type `J`
+ * and Scala reflection type `S`.
+ *
+ * The cache is two-way weak (i.e. is powered by weak references),
+ * so that neither Java artifacts prevent Scala artifacts from being garbage collected,
+ * nor the other way around.
+ */
+private[runtime] class TwoWayCache[J, S] {
+
+ private val toScalaMap = new WeakHashMap[J, WeakReference[S]]
+ private val toJavaMap = new WeakHashMap[S, WeakReference[J]]
+
+ def enter(j: J, s: S) = synchronized {
+ // debugInfo("cached: "+j+"/"+s)
+ toScalaMap(j) = new WeakReference(s)
+ toJavaMap(s) = new WeakReference(j)
+ }
+
+ private object SomeRef {
+ def unapply[T](optRef: Option[WeakReference[T]]): Option[T] =
+ if (optRef.nonEmpty) {
+ val result = optRef.get.get
+ if (result != null) Some(result) else None
+ } else None
+ }
+
+ def toScala(key: J)(body: => S): S = synchronized {
+ toScalaMap get key match {
+ case SomeRef(v) =>
+ v
+ case _ =>
+ val result = body
+ enter(key, result)
+ result
+ }
+ }
+
+ def toJava(key: S)(body: => J): J = synchronized {
+ toJavaMap get key match {
+ case SomeRef(v) =>
+ v
+ case _ =>
+ val result = body
+ enter(result, key)
+ result
+ }
+ }
+
+ def toJavaOption(key: S)(body: => Option[J]): Option[J] = synchronized {
+ toJavaMap get key match {
+ case SomeRef(v) =>
+ Some(v)
+ case _ =>
+ val result = body
+ for (value <- result) enter(value, key)
+ result
+ }
+ }
+}
+
diff --git a/src/reflect/scala/reflect/runtime/package.scala b/src/reflect/scala/reflect/runtime/package.scala
new file mode 100644
index 0000000..b97913d
--- /dev/null
+++ b/src/reflect/scala/reflect/runtime/package.scala
@@ -0,0 +1,37 @@
+package scala.reflect
+
+/** Entry points into runtime reflection.
+ * See [[scala.reflect.api.package the overview page]] for details on how to use them.
+ */
+package object runtime {
+
+ /** The entry point into Scala runtime reflection.
+ *
+ * To use Scala runtime reflection, simply use or import `scala.reflect.runtime.universe._`
+ *
+ * See [[scala.reflect.api.Universe]] or the
+ * [[http://docs.scala-lang.org/overviews/reflection/environment-universes-mirrors.html Reflection Guide: Universes]]
+ * for more details.
+ */
+ lazy val universe: api.JavaUniverse = new runtime.JavaUniverse
+
+ /** The runtime reflection mirror that corresponds to the current lexical context.
+ * It's typically equivalent to `universe.runtimeMirror(getClass.getClassLoader)` invoked at the call site.
+ */
+ // implementation hardwired to the `currentMirror` method below
+ // using the mechanism implemented in `scala.tools.reflect.FastTrack`
+ def currentMirror: universe.Mirror = ??? // macro
+}
+
+package runtime {
+ private[scala] object Macros {
+ def currentMirror(c: scala.reflect.macros.Context): c.Expr[universe.Mirror] = {
+ import c.universe._
+ val runtimeClass = c.reifyEnclosingRuntimeClass
+ if (runtimeClass.isEmpty) c.abort(c.enclosingPosition, "call site does not have an enclosing class")
+ val runtimeUniverse = Select(Select(Select(Ident(newTermName("scala")), newTermName("reflect")), newTermName("runtime")), newTermName("universe"))
+ val currentMirror = Apply(Select(runtimeUniverse, newTermName("runtimeMirror")), List(Select(runtimeClass, newTermName("getClassLoader"))))
+ c.Expr[Nothing](currentMirror)(c.WeakTypeTag.Nothing)
+ }
+ }
+}
diff --git a/src/scalacheck/org/scalacheck/Arbitrary.scala b/src/scalacheck/org/scalacheck/Arbitrary.scala
index 14d2b9b..8c43cda 100644
--- a/src/scalacheck/org/scalacheck/Arbitrary.scala
+++ b/src/scalacheck/org/scalacheck/Arbitrary.scala
@@ -1,15 +1,16 @@
/*-------------------------------------------------------------------------*\
** ScalaCheck **
-** Copyright (c) 2007-2010 Rickard Nilsson. All rights reserved. **
+** Copyright (c) 2007-2011 Rickard Nilsson. All rights reserved. **
** http://www.scalacheck.org **
** **
** This software is released under the terms of the Revised BSD License. **
** There is NO WARRANTY. See the file LICENSE for the full text. **
-\*-------------------------------------------------------------------------*/
+\*------------------------------------------------------------------------ */
package org.scalacheck
import util.{FreqMap,Buildable}
+import scala.reflect.ClassTag
sealed abstract class Arbitrary[T] {
val arbitrary: Gen[T]
@@ -115,7 +116,10 @@ object Arbitrary {
/** Arbitrary instance of Char */
implicit lazy val arbChar: Arbitrary[Char] = Arbitrary(
- Gen.choose(Char.MinValue, Char.MaxValue)
+ Gen.frequency(
+ (0xD800-Char.MinValue, Gen.choose(Char.MinValue,0xD800-1)),
+ (Char.MaxValue-0xDFFF, Gen.choose(0xDFFF+1,Char.MaxValue))
+ )
)
/** Arbitrary instance of Byte */
@@ -174,9 +178,10 @@ object Arbitrary {
import java.math.MathContext._
val mcGen = oneOf(UNLIMITED, DECIMAL32, DECIMAL64, DECIMAL128)
val bdGen = for {
- mc <- mcGen
- scale <- arbInt.arbitrary
x <- arbBigInt.arbitrary
+ mc <- mcGen
+ limit <- value(if(mc == UNLIMITED) 0 else math.max(x.abs.toString.length - mc.getPrecision, 0))
+ scale <- Gen.chooseNum(Int.MinValue + limit , Int.MaxValue)
} yield BigDecimal(x, scale, mc)
Arbitrary(bdGen)
}
@@ -193,23 +198,37 @@ object Arbitrary {
}
/** Generates an arbitrary property */
- implicit lazy val arbProp: Arbitrary[Prop] =
+ implicit lazy val arbProp: Arbitrary[Prop] = {
+ import Prop._
+ val undecidedOrPassed = forAll { b: Boolean =>
+ b ==> true
+ }
Arbitrary(frequency(
- (5, Prop.proved),
- (4, Prop.falsified),
- (2, Prop.undecided),
- (1, Prop.exception(null))
+ (4, falsified),
+ (4, passed),
+ (3, proved),
+ (3, undecidedOrPassed),
+ (2, undecided),
+ (1, exception(null))
))
+ }
/** Arbitrary instance of test params */
implicit lazy val arbTestParams: Arbitrary[Test.Params] =
Arbitrary(for {
- minSuccTests <- choose(10,150)
- maxDiscTests <- choose(100,500)
+ minSuccTests <- choose(10,200)
+ maxDiscardRatio <- choose(0.2f,10f)
minSize <- choose(0,500)
sizeDiff <- choose(0,500)
maxSize <- choose(minSize, minSize + sizeDiff)
- } yield Test.Params(minSuccTests,maxDiscTests,minSize,maxSize))
+ ws <- choose(1,4)
+ } yield Test.Params(
+ minSuccessfulTests = minSuccTests,
+ maxDiscardRatio = maxDiscardRatio,
+ minSize = minSize,
+ maxSize = maxSize,
+ workers = ws
+ ))
/** Arbitrary instance of gen params */
implicit lazy val arbGenParams: Arbitrary[Gen.Params] =
@@ -259,7 +278,7 @@ object Arbitrary {
): Arbitrary[C[T]] = Arbitrary(containerOf[C,T](arbitrary[T]))
/** Arbitrary instance of any array. */
- implicit def arbArray[T](implicit a: Arbitrary[T], c: ClassManifest[T]
+ implicit def arbArray[T](implicit a: Arbitrary[T], c: ClassTag[T]
): Arbitrary[Array[T]] = Arbitrary(containerOf[Array,T](arbitrary[T]))
diff --git a/src/scalacheck/org/scalacheck/Arg.scala b/src/scalacheck/org/scalacheck/Arg.scala
index 99657db..8959211 100644
--- a/src/scalacheck/org/scalacheck/Arg.scala
+++ b/src/scalacheck/org/scalacheck/Arg.scala
@@ -1,11 +1,11 @@
/*-------------------------------------------------------------------------*\
** ScalaCheck **
-** Copyright (c) 2007-2010 Rickard Nilsson. All rights reserved. **
+** Copyright (c) 2007-2011 Rickard Nilsson. All rights reserved. **
** http://www.scalacheck.org **
** **
** This software is released under the terms of the Revised BSD License. **
** There is NO WARRANTY. See the file LICENSE for the full text. **
-\*-------------------------------------------------------------------------*/
+\*------------------------------------------------------------------------ */
package org.scalacheck
diff --git a/src/scalacheck/org/scalacheck/Commands.scala b/src/scalacheck/org/scalacheck/Commands.scala
index 952dd15..88ef8ae 100644
--- a/src/scalacheck/org/scalacheck/Commands.scala
+++ b/src/scalacheck/org/scalacheck/Commands.scala
@@ -1,11 +1,11 @@
/*-------------------------------------------------------------------------*\
** ScalaCheck **
-** Copyright (c) 2007-2010 Rickard Nilsson. All rights reserved. **
+** Copyright (c) 2007-2011 Rickard Nilsson. All rights reserved. **
** http://www.scalacheck.org **
** **
** This software is released under the terms of the Revised BSD License. **
** There is NO WARRANTY. See the file LICENSE for the full text. **
-\*-------------------------------------------------------------------------*/
+\*------------------------------------------------------------------------ */
package org.scalacheck
@@ -27,7 +27,7 @@ trait Commands extends Prop {
class Binding(private val key: State) {
def get: Any = bindings.find(_._1 eq key) match {
case None => sys.error("No value bound")
- case Some(x) => x
+ case Some(x) => x._2
}
}
@@ -46,13 +46,6 @@ trait Commands extends Prop {
def run(s: State): Any
def nextState(s: State): State
- /** @deprecated Use <code>preConditions += ...</code> instead. */
- @deprecated("Use 'preConditions += ...' instead.")
- def preCondition_=(f: State => Boolean) = {
- preConditions.clear
- preConditions += f
- }
-
/** Returns all preconditions merged into a single function */
def preCondition: (State => Boolean) = s => preConditions.toList.forall(_.apply(s))
@@ -60,21 +53,7 @@ trait Commands extends Prop {
* takes the current abstract state as parameter and returns a boolean
* that says if the precondition is fulfilled or not. You can add several
* conditions to the precondition list */
- val preConditions = new collection.mutable.ListBuffer[State => Boolean]
-
- /** @deprecated Use <code>postConditions += ...</code> instead. */
- @deprecated("Use 'postConditions += ...' instead.")
- def postCondition_=(f: (State,Any) => Prop) = {
- postConditions.clear
- postConditions += ((s0,s1,r) => f(s0,r))
- }
-
- /** @deprecated Use <code>postConditions += ...</code> instead. */
- @deprecated("Use 'postConditions += ...' instead.")
- def postCondition_=(f: (State,State,Any) => Prop) = {
- postConditions.clear
- postConditions += f
- }
+ val preConditions = new scala.collection.mutable.ListBuffer[State => Boolean]
/** Returns all postconditions merged into a single function */
def postCondition: (State,State,Any) => Prop = (s0,s1,r) => all(postConditions.map(_.apply(s0,s1,r)): _*)
@@ -86,7 +65,7 @@ trait Commands extends Prop {
* method. The postcondition function should return a Boolean (or
* a Prop instance) that says if the condition holds or not. You can add several
* conditions to the postConditions list. */
- val postConditions = new collection.mutable.ListBuffer[(State,State,Any) => Prop]
+ val postConditions = new scala.collection.mutable.ListBuffer[(State,State,Any) => Prop]
}
/** A command that binds its result for later use */
@@ -138,18 +117,16 @@ trait Commands extends Prop {
private def runCommands(cmds: Cmds): Prop = cmds match {
case Cmds(Nil, _) => proved
case Cmds(c::cs, s::ss) =>
- c.postCondition(s,c.nextState(s),c.run(s)) && runCommands(Cmds(cs,ss))
+ c.postCondition(s,c.nextState(s),c.run_(s)) && runCommands(Cmds(cs,ss))
case _ => sys.error("Should not be here")
}
private def commandsProp: Prop = {
-
def shrinkCmds(cmds: Cmds) = cmds match { case Cmds(cs,_) =>
shrink(cs)(shrinkContainer).flatMap(cs => validCmds(initialState(), cs).toList)
}
forAllShrink(genCmds label "COMMANDS", shrinkCmds)(runCommands _)
-
}
def apply(p: Prop.Params) = commandsProp(p)
diff --git a/src/scalacheck/org/scalacheck/ConsoleReporter.scala b/src/scalacheck/org/scalacheck/ConsoleReporter.scala
index a94d327..93f1dc2 100644
--- a/src/scalacheck/org/scalacheck/ConsoleReporter.scala
+++ b/src/scalacheck/org/scalacheck/ConsoleReporter.scala
@@ -1,11 +1,11 @@
/*-------------------------------------------------------------------------*\
** ScalaCheck **
-** Copyright (c) 2007-2010 Rickard Nilsson. All rights reserved. **
+** Copyright (c) 2007-2011 Rickard Nilsson. All rights reserved. **
** http://www.scalacheck.org **
** **
** This software is released under the terms of the Revised BSD License. **
** There is NO WARRANTY. See the file LICENSE for the full text. **
-\*-------------------------------------------------------------------------*/
+\*------------------------------------------------------------------------ */
package org.scalacheck
@@ -16,28 +16,16 @@ class ConsoleReporter(val verbosity: Int) extends Test.TestCallback {
private val prettyPrms = Params(verbosity)
- override def onPropEval(name: String, w: Int, s: Int, d: Int) =
+ override def onTestResult(name: String, res: Test.Result) = {
if(verbosity > 0) {
if(name == "") {
- if(d == 0) printf("\rPassed %s tests\r", s)
- else printf("\rPassed %s tests; %s discarded\r", s, d)
+ val s = (if(res.passed) "+ " else "! ") + pretty(res, prettyPrms)
+ printf("\r%s\n", format(s, "", "", 75))
} else {
- if(d == 0) printf("\r %s: Passed %s tests\r", name, s)
- else printf("\r %s: Passed %s tests; %s discarded\r", name, s, d)
+ val s = (if(res.passed) "+ " else "! ") + name + ": " +
+ pretty(res, prettyPrms)
+ printf("\r%s\n", format(s, "", "", 75))
}
- Console.flush
- }
-
- override def onTestResult(name: String, res: Test.Result) = {
- if(name == "") {
- print(List.fill(78)(' ').mkString)
- val s = (if(res.passed) "+ " else "! ") + pretty(res, prettyPrms)
- printf("\r%s\n", format(s, "", "", 75))
- } else {
- print(List.fill(78)(' ').mkString)
- val s = (if(res.passed) "+ " else "! ") + name + ": " +
- pretty(res, prettyPrms)
- printf("\r%s\n", format(s, "", "", 75))
}
}
@@ -49,31 +37,6 @@ object ConsoleReporter {
* the given verbosity */
def apply(verbosity: Int = 0) = new ConsoleReporter(verbosity)
- @deprecated("(v1.8)")
- def propReport(s: Int, d: Int) = {
- if(d == 0) printf("\rPassed %s tests\r", s)
- else printf("\rPassed %s tests; %s discarded\r", s, d)
- Console.flush
- }
-
- @deprecated("(v1.8)")
- def propReport(pName: String, s: Int, d: Int) = {
- if(d == 0) printf("\r %s: Passed %s tests\r", pName, s)
- else printf("\r %s: Passed %s tests; %s discarded\r", pName, s, d)
- Console.flush
- }
-
- @deprecated("(v1.8)")
- def testReport(res: Test.Result) = {
- print(List.fill(78)(' ').mkString)
- val s = (if(res.passed) "+ " else "! ") + pretty(res, Params(0))
- printf("\r%s\n", format(s, "", "", 75))
- res
- }
-
- @deprecated("(v1.8)")
- def testStatsEx(res: Test.Result): Unit = testStatsEx("", res)
-
def testStatsEx(msg: String, res: Test.Result) = {
lazy val m = if(msg.length == 0) "" else msg + ": "
res.status match {
diff --git a/src/scalacheck/org/scalacheck/Gen.scala b/src/scalacheck/org/scalacheck/Gen.scala
index ca1dae0..64bb61c 100644
--- a/src/scalacheck/org/scalacheck/Gen.scala
+++ b/src/scalacheck/org/scalacheck/Gen.scala
@@ -1,11 +1,11 @@
/*-------------------------------------------------------------------------*\
** ScalaCheck **
-** Copyright (c) 2007-2010 Rickard Nilsson. All rights reserved. **
+** Copyright (c) 2007-2011 Rickard Nilsson. All rights reserved. **
** http://www.scalacheck.org **
** **
** This software is released under the terms of the Revised BSD License. **
** There is NO WARRANTY. See the file LICENSE for the full text. **
-\*-------------------------------------------------------------------------*/
+\*------------------------------------------------------------------------ */
package org.scalacheck
@@ -59,6 +59,12 @@ object Choose {
}
}
+case class FiniteGenRes[+T](
+ r: T
+)
+
+sealed trait FiniteGen[+T] extends Gen[FiniteGenRes[T]]
+
/** Class that represents a generator. */
sealed trait Gen[+T] {
@@ -150,13 +156,6 @@ sealed trait Gen[+T] {
/** Returns a new property that holds if and only if both this
* and the given generator generates the same result, or both
- * generators generate no result.
- * @deprecated Use <code>==</code> instead */
- @deprecated("Use == instead")
- def ===[U](g: Gen[U]): Prop = this == g
-
- /** Returns a new property that holds if and only if both this
- * and the given generator generates the same result, or both
* generators generate no result. */
def ==[U](g: Gen[U]) = Prop(prms =>
(this(prms.genPrms), g(prms.genPrms)) match {
@@ -207,12 +206,7 @@ object Gen {
def choose(l: Long, h: Long): Long = {
val d = h-l
if (d < 0) throw new IllegalArgumentException("Invalid range")
- else if (d == 0) l
- else {
- val r = math.abs(rng.nextLong)
- val a = if(r == Long.MinValue) 1 else 0
- l + (math.abs(r+a) % d) + a
- }
+ else l + math.abs(rng.nextLong % (d+1))
}
/** @throws IllegalArgumentException if l is greater than h, or if
@@ -226,11 +220,6 @@ object Gen {
}
}
- /* Default generator parameters
- * @deprecated Use <code>Gen.Params()</code> instead */
- @deprecated("Use Gen.Params() instead")
- val defaultParams = Params()
-
/* Generator factory method */
def apply[T](g: Gen.Params => Option[T]) = new Gen[T] {
def apply(p: Gen.Params) = g(p)
@@ -315,20 +304,6 @@ object Gen {
x <- if(i == 0) g1 else if(i == 1) g2 else gs(i-2)
} yield x
- /** Chooses one of the given values, with a weighted random distribution.
- * @deprecated Use <code>frequency</code> with constant generators
- * instead. */
- @deprecated("Use 'frequency' with constant generators instead.")
- def elementsFreq[T](vs: (Int, T)*): Gen[T] =
- frequency(vs.map { case (w,v) => (w, value(v)) } : _*)
-
- /** A generator that returns a random element from a list
- * @deprecated Use <code>oneOf</code> with constant generators instead. */
- @deprecated("Use 'oneOf' with constant generators instead.")
- def elements[T](xs: T*): Gen[T] = if(xs.isEmpty) fail else for {
- i <- choose(0,xs.length-1)
- } yield xs(i)
-
//// List Generators ////
@@ -373,12 +348,6 @@ object Gen {
* <code>containerOfN[List,T](n,g)</code>. */
def listOfN[T](n: Int, g: Gen[T]) = containerOfN[List,T](n,g)
- /** Generates a list of the given length. This method is equal to calling
- * <code>containerOfN[List,T](n,g)</code>.
- * @deprecated Use the method <code>listOfN</code> instead. */
- @deprecated("Use 'listOfN' instead.")
- def vectorOf[T](n: Int, g: Gen[T]) = containerOfN[List,T](n,g)
-
/** A generator that picks a random number of elements from a list */
def someOf[T](l: Iterable[T]) = choose(0,l.size) flatMap (pick(_,l))
@@ -443,16 +412,6 @@ object Gen {
//// Number Generators ////
- /* Generates positive integers
- * @deprecated Use <code>posNum[Int]code> instead */
- @deprecated("Use posNum[Int] instead")
- def posInt: Gen[Int] = sized(max => choose(1, max))
-
- /* Generates negative integers
- * @deprecated Use <code>negNum[Int]code> instead */
- @deprecated("Use negNum[Int] instead")
- def negInt: Gen[Int] = sized(max => choose(-max, -1))
-
/** Generates positive numbers of uniform distribution, with an
* upper bound of the generation size parameter. */
def posNum[T](implicit num: Numeric[T], c: Choose[T]): Gen[T] = {
@@ -484,4 +443,89 @@ object Gen {
)
frequency(allGens: _*)
}
+
+ /** Takes a function and returns a generator that generates arbitrary
+ * results of that function by feeding it with arbitrarily generated input
+ * parameters. */
+ def resultOf[T,R](f: T => R)(implicit a: Arbitrary[T]): Gen[R] =
+ arbitrary[T] map f
+
+ /** Takes a function and returns a generator that generates arbitrary
+ * results of that function by feeding it with arbitrarily generated input
+ * parameters. */
+ def resultOf[T1,T2,R](f: (T1,T2) => R)(implicit
+ a1: Arbitrary[T1], a2: Arbitrary[T2]
+ ): Gen[R] = arbitrary[T1] flatMap { t => resultOf(f(t, _:T2)) }
+
+ /** Takes a function and returns a generator that generates arbitrary
+ * results of that function by feeding it with arbitrarily generated input
+ * parameters. */
+ def resultOf[T1,T2,T3,R](f: (T1,T2,T3) => R)(implicit
+ a1: Arbitrary[T1], a2: Arbitrary[T2], a3: Arbitrary[T3]
+ ): Gen[R] = arbitrary[T1] flatMap { t => resultOf(f(t, _:T2, _:T3)) }
+
+ /** Takes a function and returns a generator that generates arbitrary
+ * results of that function by feeding it with arbitrarily generated input
+ * parameters. */
+ def resultOf[T1,T2,T3,T4,R](f: (T1,T2,T3,T4) => R)(implicit
+ a1: Arbitrary[T1], a2: Arbitrary[T2], a3: Arbitrary[T3], a4: Arbitrary[T4]
+ ): Gen[R] = arbitrary[T1] flatMap {
+ t => resultOf(f(t, _:T2, _:T3, _:T4))
+ }
+
+ /** Takes a function and returns a generator that generates arbitrary
+ * results of that function by feeding it with arbitrarily generated input
+ * parameters. */
+ def resultOf[T1,T2,T3,T4,T5,R](f: (T1,T2,T3,T4,T5) => R)(implicit
+ a1: Arbitrary[T1], a2: Arbitrary[T2], a3: Arbitrary[T3], a4: Arbitrary[T4],
+ a5: Arbitrary[T5]
+ ): Gen[R] = arbitrary[T1] flatMap {
+ t => resultOf(f(t, _:T2, _:T3, _:T4, _:T5))
+ }
+
+ /** Takes a function and returns a generator that generates arbitrary
+ * results of that function by feeding it with arbitrarily generated input
+ * parameters. */
+ def resultOf[T1,T2,T3,T4,T5,T6,R](
+ f: (T1,T2,T3,T4,T5,T6) => R)(implicit
+ a1: Arbitrary[T1], a2: Arbitrary[T2], a3: Arbitrary[T3],
+ a4: Arbitrary[T4], a5: Arbitrary[T5], a6: Arbitrary[T6]
+ ): Gen[R] = arbitrary[T1] flatMap {
+ t => resultOf(f(t, _:T2, _:T3, _:T4, _:T5, _:T6))
+ }
+
+ /** Takes a function and returns a generator that generates arbitrary
+ * results of that function by feeding it with arbitrarily generated input
+ * parameters. */
+ def resultOf[T1,T2,T3,T4,T5,T6,T7,R](
+ f: (T1,T2,T3,T4,T5,T6,T7) => R)(implicit
+ a1: Arbitrary[T1], a2: Arbitrary[T2], a3: Arbitrary[T3],
+ a4: Arbitrary[T4], a5: Arbitrary[T5], a6: Arbitrary[T6], a7: Arbitrary[T7]
+ ): Gen[R] = arbitrary[T1] flatMap {
+ t => resultOf(f(t, _:T2, _:T3, _:T4, _:T5, _:T6, _:T7))
+ }
+
+ /** Takes a function and returns a generator that generates arbitrary
+ * results of that function by feeding it with arbitrarily generated input
+ * parameters. */
+ def resultOf[T1,T2,T3,T4,T5,T6,T7,T8,R](
+ f: (T1,T2,T3,T4,T5,T6,T7,T8) => R)(implicit
+ a1: Arbitrary[T1], a2: Arbitrary[T2], a3: Arbitrary[T3], a4: Arbitrary[T4],
+ a5: Arbitrary[T5], a6: Arbitrary[T6], a7: Arbitrary[T7], a8: Arbitrary[T8]
+ ): Gen[R] = arbitrary[T1] flatMap {
+ t => resultOf(f(t, _:T2, _:T3, _:T4, _:T5, _:T6, _:T7, _:T8))
+ }
+
+ /** Takes a function and returns a generator that generates arbitrary
+ * results of that function by feeding it with arbitrarily generated input
+ * parameters. */
+ def resultOf[T1,T2,T3,T4,T5,T6,T7,T8,T9,R](
+ f: (T1,T2,T3,T4,T5,T6,T7,T8,T9) => R)(implicit
+ a1: Arbitrary[T1], a2: Arbitrary[T2], a3: Arbitrary[T3], a4: Arbitrary[T4],
+ a5: Arbitrary[T5], a6: Arbitrary[T6], a7: Arbitrary[T7], a8: Arbitrary[T8],
+ a9: Arbitrary[T9]
+ ): Gen[R] = arbitrary[T1] flatMap {
+ t => resultOf(f(t, _:T2, _:T3, _:T4, _:T5, _:T6, _:T7, _:T8, _:T9))
+ }
+
}
diff --git a/src/scalacheck/org/scalacheck/Pretty.scala b/src/scalacheck/org/scalacheck/Pretty.scala
index d3945a1..eeb5936 100644
--- a/src/scalacheck/org/scalacheck/Pretty.scala
+++ b/src/scalacheck/org/scalacheck/Pretty.scala
@@ -1,11 +1,11 @@
/*-------------------------------------------------------------------------*\
** ScalaCheck **
-** Copyright (c) 2007-2010 Rickard Nilsson. All rights reserved. **
+** Copyright (c) 2007-2011 Rickard Nilsson. All rights reserved. **
** http://www.scalacheck.org **
** **
** This software is released under the terms of the Revised BSD License. **
** There is NO WARRANTY. See the file LICENSE for the full text. **
-\*-------------------------------------------------------------------------*/
+\*------------------------------------------------------------------------ */
package org.scalacheck
@@ -49,6 +49,8 @@ object Pretty {
implicit def prettyAny(t: Any) = Pretty { p => t.toString }
+ implicit def prettyString(t: String) = Pretty { p => "\""++t++"\"" }
+
implicit def prettyList(l: List[Any]) = Pretty { p =>
l.map("\""+_+"\"").mkString("List(", ", ", ")")
}
@@ -59,7 +61,10 @@ object Pretty {
getClassName+"."+getMethodName + "("+getFileName+":"+getLineNumber+")"
}
- val strs2 = if(prms.verbosity > 0) strs else strs.take(5)
+ val strs2 =
+ if(prms.verbosity <= 0) Array[String]()
+ else if(prms.verbosity <= 1) strs.take(5)
+ else strs
e.getClass.getName + ": " + e.getMessage / strs2.mkString("\n")
}
@@ -69,7 +74,7 @@ object Pretty {
for((a,i) <- args.zipWithIndex) yield {
val l = if(a.label == "") "ARG_"+i else a.label
val s =
- if(a.shrinks == 0) ""
+ if(a.shrinks == 0 || prms.verbosity <= 1) ""
else " (orig arg: "+a.prettyOrigArg(prms)+")"
"> "+l+": "+a.prettyArg(prms)+""+s
@@ -91,7 +96,7 @@ object Pretty {
}
implicit def prettyTestRes(res: Test.Result) = Pretty { prms =>
- def labels(ls: collection.immutable.Set[String]) =
+ def labels(ls: scala.collection.immutable.Set[String]) =
if(ls.isEmpty) ""
else "> Labels of failing property: " / ls.mkString("\n")
val s = res.status match {
@@ -106,9 +111,17 @@ object Pretty {
"Exception raised on property evaluation."/labels(l)/pretty(args,prms)/
"> Exception: "+pretty(e,prms)
case Test.GenException(e) =>
- "Exception raised on argument generation."/"> Stack trace: "/pretty(e,prms)
+ "Exception raised on argument generation."/
+ "> Exception: "+pretty(e,prms)
}
- s/pretty(res.freqMap,prms)
+ val t = if(prms.verbosity <= 1) "" else "Elapsed time: "+prettyTime(res.time)
+ s/t/pretty(res.freqMap,prms)
}
+ def prettyTime(millis: Long): String = {
+ val min = millis/(60*1000)
+ val sec = (millis-(60*1000*min)) / 1000d
+ if(min <= 0) "%.3f sec ".format(sec)
+ else "%d min %.3f sec ".format(min, sec)
+ }
}
diff --git a/src/scalacheck/org/scalacheck/Prop.scala b/src/scalacheck/org/scalacheck/Prop.scala
index 199a668..dfd85a8 100644
--- a/src/scalacheck/org/scalacheck/Prop.scala
+++ b/src/scalacheck/org/scalacheck/Prop.scala
@@ -1,16 +1,17 @@
/*-------------------------------------------------------------------------*\
** ScalaCheck **
-** Copyright (c) 2007-2010 Rickard Nilsson. All rights reserved. **
+** Copyright (c) 2007-2011 Rickard Nilsson. All rights reserved. **
** http://www.scalacheck.org **
** **
** This software is released under the terms of the Revised BSD License. **
** There is NO WARRANTY. See the file LICENSE for the full text. **
-\*-------------------------------------------------------------------------*/
+\*------------------------------------------------------------------------ */
package org.scalacheck
import util.{FreqMap,Buildable}
import scala.collection._
+import scala.annotation.tailrec
/** A property is a generator that generates a property result */
trait Prop {
@@ -39,17 +40,35 @@ trait Prop {
/** Convenience method that checks this property and reports the
* result on the console. If you need to get the results from the test use
* the <code>check</code> methods in <code>Test</code> instead. */
- def check(): Unit = check(Test.Params())
+ def check: Unit = check(Test.Params())
- /** Convenience method that makes it possible to use a this property
- * as an application that checks itself on execution */
- def main(args: Array[String]): Unit =
+ /** The logic for main, separated out to make it easier to
+ * avoid System.exit calls. Returns exit code.
+ */
+ def mainRunner(args: Array[String]): Int = {
Test.cmdLineParser.parseParams(args) match {
- case Success(params, _) => Test.check(params, this)
+ case Success(params, _) =>
+ if (Test.check(params, this).passed) 0
+ else 1
case e: NoSuccess =>
println("Incorrect options:"+"\n"+e+"\n")
Test.cmdLineParser.printHelp
+ -1
}
+ }
+
+ /** Whether main should call System.exit with an exit code.
+ * Defaults to true; override to change.
+ */
+ def mainCallsExit = false
+
+ /** Convenience method that makes it possible to use this property
+ * as an application that checks itself on execution */
+ def main(args: Array[String]): Unit = {
+ val code = mainRunner(args)
+ if (mainCallsExit)
+ System exit code
+ }
/** Returns a new property that holds if and only if both this
* and the given property hold. If one of the properties doesn't
@@ -84,15 +103,6 @@ trait Prop {
}
}
- /** Returns a new property that holds if and only if both this
- * and the given property generates a result with the exact
- * same status. Note that this means that if one of the properties is
- * proved, and the other one passed, then the resulting property
- * will fail.
- * @deprecated Use <code>==</code> instead */
- @deprecated("Use == instead.")
- def ===(p: Prop): Prop = this == p
-
override def toString = "Prop"
/** Put a label on the property to make test reports clearer */
@@ -183,7 +193,7 @@ object Prop {
case (_,Undecided) => r
case (_,Proof) => merge(this, r, this.status)
- case (Proof,_) => merge(this, r, this.status)
+ case (Proof,_) => merge(this, r, r.status)
case (True,True) => merge(this, r, True)
}
@@ -318,13 +328,12 @@ object Prop {
def =?[T](x: T, y: T)(implicit pp: T => Pretty): Prop = ?=(y, x)
/** A property that depends on the generator size */
- def sizedProp(f: Int => Prop): Prop = Prop(prms => f(prms.genPrms.size)(prms))
-
- /** Implication
- * @deprecated Use the implication operator of the Prop class instead
- */
- @deprecated("Use the implication operator of the Prop class instead")
- def ==>(b: => Boolean, p: => Prop): Prop = (b: Prop) ==> p
+ def sizedProp(f: Int => Prop): Prop = Prop { prms =>
+ // provedToTrue since if the property is proved for
+ // one size, it shouldn't be regarded as proved for
+ // all sizes.
+ provedToTrue(f(prms.genPrms.size)(prms))
+ }
/** Implication with several conditions */
def imply[T](x: T, f: PartialFunction[T,Prop]): Prop =
@@ -738,4 +747,17 @@ object Prop {
a8: Arbitrary[A8], s8: Shrink[A8], pp8: A8 => Pretty
): Prop = forAll((a: A1) => forAll(f(a, _:A2, _:A3, _:A4, _:A5, _:A6, _:A7, _:A8)))
+ /** Ensures that the property expression passed in completes within the given space of time. */
+ def within(maximumMs: Long)(wrappedProp: => Prop): Prop = new Prop {
+ @tailrec private def attempt(prms: Params, endTime: Long): Result = {
+ val result = wrappedProp.apply(prms)
+ if (System.currentTimeMillis > endTime) {
+ (if (result.failure) result else Result(False)).label("Timeout")
+ } else {
+ if (result.success) result
+ else attempt(prms, endTime)
+ }
+ }
+ def apply(prms: Params) = attempt(prms, System.currentTimeMillis + maximumMs)
+ }
}
diff --git a/src/scalacheck/org/scalacheck/Properties.scala b/src/scalacheck/org/scalacheck/Properties.scala
index 7fceb4b..2605923 100644
--- a/src/scalacheck/org/scalacheck/Properties.scala
+++ b/src/scalacheck/org/scalacheck/Properties.scala
@@ -1,11 +1,11 @@
/*-------------------------------------------------------------------------*\
** ScalaCheck **
-** Copyright (c) 2007-2010 Rickard Nilsson. All rights reserved. **
+** Copyright (c) 2007-2011 Rickard Nilsson. All rights reserved. **
** http://www.scalacheck.org **
** **
** This software is released under the terms of the Revised BSD License. **
** There is NO WARRANTY. See the file LICENSE for the full text. **
-\*-------------------------------------------------------------------------*/
+\*------------------------------------------------------------------------ */
package org.scalacheck
@@ -51,17 +51,23 @@ class Properties(val name: String) extends Prop {
/** Convenience method that checks the properties and reports the
* result on the console. If you need to get the results from the test use
* the <code>check</code> methods in <code>Test</code> instead. */
- override def check(): Unit = check(Test.Params())
+ override def check: Unit = check(Test.Params())
- /** Convenience method that makes it possible to use a this instance
- * as an application that checks itself on execution */
- override def main(args: Array[String]): Unit =
+ /** The logic for main, separated out to make it easier to
+ * avoid System.exit calls. Returns exit code.
+ */
+ override def mainRunner(args: Array[String]): Int = {
Test.cmdLineParser.parseParams(args) match {
- case Success(params, _) => Test.checkProperties(params, this)
+ case Success(params, _) =>
+ val res = Test.checkProperties(params, this)
+ val failed = res.filter(!_._2.passed).size
+ failed
case e: NoSuccess =>
println("Incorrect options:"+"\n"+e+"\n")
Test.cmdLineParser.printHelp
+ -1
}
+ }
/** Adds all properties from another property collection to this one. */
def include(ps: Properties) = for((n,p) <- ps.properties) property(n) = p
diff --git a/src/scalacheck/org/scalacheck/Shrink.scala b/src/scalacheck/org/scalacheck/Shrink.scala
index 70ab5f6..ae15bd9 100644
--- a/src/scalacheck/org/scalacheck/Shrink.scala
+++ b/src/scalacheck/org/scalacheck/Shrink.scala
@@ -1,11 +1,11 @@
/*-------------------------------------------------------------------------*\
** ScalaCheck **
-** Copyright (c) 2007-2010 Rickard Nilsson. All rights reserved. **
+** Copyright (c) 2007-2011 Rickard Nilsson. All rights reserved. **
** http://www.scalacheck.org **
** **
** This software is released under the terms of the Revised BSD License. **
** There is NO WARRANTY. See the file LICENSE for the full text. **
-\*-------------------------------------------------------------------------*/
+\*------------------------------------------------------------------------ */
package org.scalacheck
diff --git a/src/scalacheck/org/scalacheck/Test.scala b/src/scalacheck/org/scalacheck/Test.scala
index 3261862..4368184 100644
--- a/src/scalacheck/org/scalacheck/Test.scala
+++ b/src/scalacheck/org/scalacheck/Test.scala
@@ -1,11 +1,11 @@
/*-------------------------------------------------------------------------*\
** ScalaCheck **
-** Copyright (c) 2007-2010 Rickard Nilsson. All rights reserved. **
+** Copyright (c) 2007-2011 Rickard Nilsson. All rights reserved. **
** http://www.scalacheck.org **
** **
** This software is released under the terms of the Revised BSD License. **
** There is NO WARRANTY. See the file LICENSE for the full text. **
-\*-------------------------------------------------------------------------*/
+\*------------------------------------------------------------------------ */
package org.scalacheck
@@ -19,16 +19,22 @@ object Test {
/** Test parameters */
case class Params(
minSuccessfulTests: Int = 100,
- maxDiscardedTests: Int = 500,
+
+ /** @deprecated Use maxDiscardRatio instead. */
+ @deprecated("Use maxDiscardRatio instead.", "1.10")
+ maxDiscardedTests: Int = -1,
+
minSize: Int = 0,
maxSize: Int = Gen.Params().size,
rng: java.util.Random = Gen.Params().rng,
workers: Int = 1,
- testCallback: TestCallback = new TestCallback {}
+ testCallback: TestCallback = new TestCallback {},
+ maxDiscardRatio: Float = 5,
+ customClassLoader: Option[ClassLoader] = None
)
/** Test statistics */
- case class Result(status: Status, succeeded: Int, discarded: Int, freqMap: FM) {
+ case class Result(status: Status, succeeded: Int, discarded: Int, freqMap: FM, time: Long = 0) {
def passed = status match {
case Passed => true
case Proved(_) => true
@@ -90,7 +96,7 @@ object Test {
import prms._
if(
minSuccessfulTests <= 0 ||
- maxDiscardedTests < 0 ||
+ maxDiscardRatio <= 0 ||
minSize < 0 ||
maxSize < minSize ||
workers <= 0
@@ -106,12 +112,13 @@ object Test {
val names = Set("minSuccessfulTests", "s")
val help = "Number of tests that must succeed in order to pass a property"
}
- object OptMaxDiscarded extends IntOpt {
- val default = Test.Params().maxDiscardedTests
- val names = Set("maxDiscardedTests", "d")
+ object OptMaxDiscardRatio extends FloatOpt {
+ val default = Test.Params().maxDiscardRatio
+ val names = Set("maxDiscardRatio", "r")
val help =
- "Number of tests that can be discarded before ScalaCheck stops " +
- "testing a property"
+ "The maximum ratio between discarded and succeeded tests " +
+ "allowed before ScalaCheck stops testing a property. At " +
+ "least minSuccessfulTests will always be tested, though."
}
object OptMinSize extends IntOpt {
val default = Test.Params().minSize
@@ -135,46 +142,54 @@ object Test {
}
val opts = Set[Opt[_]](
- OptMinSuccess, OptMaxDiscarded, OptMinSize,
+ OptMinSuccess, OptMaxDiscardRatio, OptMinSize,
OptMaxSize, OptWorkers, OptVerbosity
)
def parseParams(args: Array[String]) = parseArgs(args) {
optMap => Test.Params(
- optMap(OptMinSuccess),
- optMap(OptMaxDiscarded),
- optMap(OptMinSize),
- optMap(OptMaxSize),
- Test.Params().rng,
- optMap(OptWorkers),
- ConsoleReporter(optMap(OptVerbosity))
+ minSuccessfulTests = optMap(OptMinSuccess),
+ maxDiscardRatio = optMap(OptMaxDiscardRatio),
+ minSize = optMap(OptMinSize),
+ maxSize = optMap(OptMaxSize),
+ rng = Test.Params().rng,
+ workers = optMap(OptWorkers),
+ testCallback = ConsoleReporter(optMap(OptVerbosity))
)
}
}
/** Tests a property with the given testing parameters, and returns
* the test results. */
- def check(prms: Params, p: Prop): Result = {
+ def check(params: Params, p: Prop): Result = {
+
+ // maxDiscardedTests is deprecated, but if someone
+ // uses it let it override maxDiscardRatio
+ val mdr =
+ if(params.maxDiscardedTests < 0) params.maxDiscardRatio
+ else (params.maxDiscardedTests: Float)/(params.minSuccessfulTests: Float)
+ val prms = params.copy( maxDiscardRatio = mdr)
+
import prms._
- import actors.Futures.future
- //import scala.concurrent.ops.future
+ import scala.actors.Futures.future
assertParams(prms)
if(workers > 1)
assert(!p.isInstanceOf[Commands], "Commands cannot be checked multi-threaded")
- val iterations = minSuccessfulTests / workers
- val sizeStep = (maxSize-minSize) / (minSuccessfulTests: Float)
+ val iterations = math.ceil(minSuccessfulTests / (workers: Double))
+ val sizeStep = (maxSize-minSize) / (iterations*workers)
var stop = false
- def worker(workerdIdx: Int) = future {
- var n = 0
- var d = 0
- var size = workerdIdx*sizeStep
+ def worker(workerIdx: Int) = future {
+ params.customClassLoader.map(Thread.currentThread.setContextClassLoader(_))
+ var n = 0 // passed tests
+ var d = 0 // discarded tests
var res: Result = null
var fm = FreqMap.empty[immutable.Set[Any]]
while(!stop && res == null && n < iterations) {
- val propPrms = Prop.Params(Gen.Params(size.round, prms.rng), fm)
+ val size = (minSize: Double) + (sizeStep * (workerIdx + (workers*(n+d))))
+ val propPrms = Prop.Params(Gen.Params(size.round.toInt, prms.rng), fm)
secure(p(propPrms)) match {
case Right(e) => res =
Result(GenException(e), n, d, FreqMap.empty[immutable.Set[Any]])
@@ -185,43 +200,58 @@ object Test {
propRes.status match {
case Prop.Undecided =>
d += 1
- testCallback.onPropEval("", workerdIdx, n, d)
- if(d >= maxDiscardedTests) res = Result(Exhausted, n, d, fm)
+ testCallback.onPropEval("", workerIdx, n, d)
+ // The below condition is kind of hacky. We have to have
+ // some margin, otherwise workers might stop testing too
+ // early because they have been exhausted, but the overall
+ // test has not.
+ if (n+d > minSuccessfulTests && 1+workers*maxDiscardRatio*n < d)
+ res = Result(Exhausted, n, d, fm)
case Prop.True =>
n += 1
- testCallback.onPropEval("", workerdIdx, n, d)
+ testCallback.onPropEval("", workerIdx, n, d)
case Prop.Proof =>
n += 1
res = Result(Proved(propRes.args), n, d, fm)
- case Prop.False => res =
- Result(Failed(propRes.args, propRes.labels), n, d, fm)
- case Prop.Exception(e) => res =
- Result(PropException(propRes.args, e, propRes.labels), n, d, fm)
+ stop = true
+ case Prop.False =>
+ res = Result(Failed(propRes.args,propRes.labels), n, d, fm)
+ stop = true
+ case Prop.Exception(e) =>
+ res = Result(PropException(propRes.args,e,propRes.labels), n, d, fm)
+ stop = true
}
}
- size += sizeStep
}
- if(res != null) stop = true
- else res = Result(Passed, n, d, fm)
- res
+ if (res == null) {
+ if (maxDiscardRatio*n > d) Result(Passed, n, d, fm)
+ else Result(Exhausted, n, d, fm)
+ } else res
}
- def mergeResults(r1: () => Result, r2: () => Result) = r1() match {
- case Result(Passed, s1, d1, fm1) => r2() match {
- case Result(Passed, s2, d2, fm2) if d1+d2 >= maxDiscardedTests =>
- () => Result(Exhausted, s1+s2, d1+d2, fm1++fm2)
- case Result(st, s2, d2, fm2) =>
- () => Result(st, s1+s2, d1+d2, fm1++fm2)
+ def mergeResults(r1: () => Result, r2: () => Result) = {
+ val Result(st1, s1, d1, fm1, _) = r1()
+ val Result(st2, s2, d2, fm2, _) = r2()
+ if (st1 != Passed && st1 != Exhausted)
+ () => Result(st1, s1+s2, d1+d2, fm1++fm2, 0)
+ else if (st2 != Passed && st2 != Exhausted)
+ () => Result(st2, s1+s2, d1+d2, fm1++fm2, 0)
+ else {
+ if (s1+s2 >= minSuccessfulTests && maxDiscardRatio*(s1+s2) >= (d1+d2))
+ () => Result(Passed, s1+s2, d1+d2, fm1++fm2, 0)
+ else
+ () => Result(Exhausted, s1+s2, d1+d2, fm1++fm2, 0)
}
- case r => () => r
}
+ val start = System.currentTimeMillis
val results = for(i <- 0 until workers) yield worker(i)
val r = results.reduceLeft(mergeResults)()
stop = true
results foreach (_.apply())
- prms.testCallback.onTestResult("", r)
- r
+ val timedRes = r.copy(time = System.currentTimeMillis-start)
+ prms.testCallback.onTestResult("", timedRes)
+ timedRes
}
def checkProperties(prms: Params, ps: Properties): Seq[(String,Result)] =
@@ -236,78 +266,4 @@ object Test {
(name,res)
}
-
- // Deprecated methods //
-
- /** Default testing parameters
- * @deprecated Use <code>Test.Params()</code> instead */
- @deprecated("Use Test.Params() instead")
- val defaultParams = Params()
-
- /** Property evaluation callback. Takes number of passed and
- * discarded tests, respectively */
- @deprecated("(v1.8)")
- type PropEvalCallback = (Int,Int) => Unit
-
- /** Property evaluation callback. Takes property name, and number of passed
- * and discarded tests, respectively */
- @deprecated("(v1.8)")
- type NamedPropEvalCallback = (String,Int,Int) => Unit
-
- /** Test callback. Takes property name, and test results. */
- @deprecated("(v1.8)")
- type TestResCallback = (String,Result) => Unit
-
- /** @deprecated (v1.8) Use <code>check(prms.copy(testCallback = myCallback), p)</code> instead. */
- @deprecated("(v1.8) Use check(prms.copy(testCallback = myCallback), p) instead")
- def check(prms: Params, p: Prop, propCallb: PropEvalCallback): Result = {
- val testCallback = new TestCallback {
- override def onPropEval(n: String, t: Int, s: Int, d: Int) = propCallb(s,d)
- }
- check(prms copy (testCallback = testCallback), p)
- }
-
- /** Tests a property and prints results to the console. The
- * <code>maxDiscarded</code> parameter specifies how many
- * discarded tests that should be allowed before ScalaCheck
- * @deprecated (v1.8) Use <code>check(Params(maxDiscardedTests = n, testCallback = ConsoleReporter()), p)</code> instead. */
- @deprecated("(v1.8) Use check(Params(maxDiscardedTests = n, testCallback = ConsoleReporter()), p) instead.")
- def check(p: Prop, maxDiscarded: Int): Result =
- check(Params(maxDiscardedTests = maxDiscarded, testCallback = ConsoleReporter()), p)
-
- /** Tests a property and prints results to the console
- * @deprecated (v1.8) Use <code>check(Params(testCallback = ConsoleReporter()), p)</code> instead. */
- @deprecated("(v1.8) Use check(Params(testCallback = ConsoleReporter()), p) instead.")
- def check(p: Prop): Result = check(Params(testCallback = ConsoleReporter()), p)
-
- /** Tests all properties with the given testing parameters, and returns
- * the test results. <code>f</code> is a function which is called each
- * time a property is evaluted. <code>g</code> is a function called each
- * time a property has been fully tested.
- * @deprecated (v1.8) Use <code>checkProperties(prms.copy(testCallback = myCallback), ps)</code> instead. */
- @deprecated("(v1.8) Use checkProperties(prms.copy(testCallback = myCallback), ps) instead.")
- def checkProperties(ps: Properties, prms: Params,
- propCallb: NamedPropEvalCallback, testCallb: TestResCallback
- ): Seq[(String,Result)] = {
- val testCallback = new TestCallback {
- override def onPropEval(n: String, t: Int, s: Int, d: Int) = propCallb(n,s,d)
- override def onTestResult(n: String, r: Result) = testCallb(n,r)
- }
- checkProperties(prms copy (testCallback = testCallback), ps)
- }
-
- /** Tests all properties with the given testing parameters, and returns
- * the test results.
- * @deprecated (v1.8) Use checkProperties(prms, ps) instead */
- @deprecated("(v1.8) Use checkProperties(prms, ps) instead")
- def checkProperties(ps: Properties, prms: Params): Seq[(String,Result)] =
- checkProperties(ps, prms, (n,s,d) => (), (n,s) => ())
-
- /** Tests all properties with default testing parameters, and returns
- * the test results. The results are also printed on the console during
- * testing.
- * @deprecated (v1.8) Use <code>checkProperties(Params(), ps)</code> instead. */
- @deprecated("(v1.8) Use checkProperties(Params(), ps) instead.")
- def checkProperties(ps: Properties): Seq[(String,Result)] =
- checkProperties(Params(), ps)
}
diff --git a/src/scalacheck/org/scalacheck/util/Buildable.scala b/src/scalacheck/org/scalacheck/util/Buildable.scala
index a41448e..221b8a6 100644
--- a/src/scalacheck/org/scalacheck/util/Buildable.scala
+++ b/src/scalacheck/org/scalacheck/util/Buildable.scala
@@ -1,15 +1,16 @@
/*-------------------------------------------------------------------------*\
** ScalaCheck **
-** Copyright (c) 2007-2010 Rickard Nilsson. All rights reserved. **
+** Copyright (c) 2007-2011 Rickard Nilsson. All rights reserved. **
** http://www.scalacheck.org **
** **
** This software is released under the terms of the Revised BSD License. **
** There is NO WARRANTY. See the file LICENSE for the full text. **
-\*-------------------------------------------------------------------------*/
+\*------------------------------------------------------------------------ */
package org.scalacheck.util
import scala.collection._
+import scala.reflect.ClassTag
trait Buildable[T,C[_]] {
def builder: mutable.Builder[T,C[T]]
@@ -30,7 +31,7 @@ object Buildable {
def builder = (new mutable.ListBuffer[T]).mapResult(_.toStream)
}
- implicit def buildableArray[T](implicit cm: ClassManifest[T]) =
+ implicit def buildableArray[T](implicit cm: ClassTag[T]) =
new Buildable[T,Array] {
def builder = mutable.ArrayBuilder.make[T]
}
diff --git a/src/scalacheck/org/scalacheck/util/CmdLineParser.scala b/src/scalacheck/org/scalacheck/util/CmdLineParser.scala
index 5fb572a..4683c34 100644
--- a/src/scalacheck/org/scalacheck/util/CmdLineParser.scala
+++ b/src/scalacheck/org/scalacheck/util/CmdLineParser.scala
@@ -1,11 +1,11 @@
/*-------------------------------------------------------------------------*\
** ScalaCheck **
-** Copyright (c) 2007-2010 Rickard Nilsson. All rights reserved. **
+** Copyright (c) 2007-2011 Rickard Nilsson. All rights reserved. **
** http://www.scalacheck.org **
** **
** This software is released under the terms of the Revised BSD License. **
** There is NO WARRANTY. See the file LICENSE for the full text. **
-\*-------------------------------------------------------------------------*/
+\*------------------------------------------------------------------------ */
package org.scalacheck.util
@@ -26,10 +26,11 @@ trait CmdLineParser extends Parsers {
}
trait Flag extends Opt[Unit]
trait IntOpt extends Opt[Int]
+ trait FloatOpt extends Opt[Float]
trait StrOpt extends Opt[String]
class OptMap {
- private val opts = new collection.mutable.HashMap[Opt[_], Any]
+ private val opts = new scala.collection.mutable.HashMap[Opt[_], Any]
def apply(flag: Flag): Boolean = opts.contains(flag)
def apply[T](opt: Opt[T]): T = opts.get(opt) match {
case None => opt.default
@@ -68,11 +69,17 @@ trait CmdLineParser extends Parsers {
case s if s != null && s.length > 0 && s.forall(_.isDigit) => s.toInt
})
+ private val floatVal: Parser[Float] = accept("float", {
+ case s if s != null && s.matches("[0987654321]+\\.?[0987654321]*")
+ => s.toFloat
+ })
+
private case class OptVal[T](o: Opt[T], v: T)
private val optVal: Parser[OptVal[Any]] = opt into {
case o: Flag => success(OptVal(o, ()))
case o: IntOpt => intVal ^^ (v => OptVal(o, v))
+ case o: FloatOpt => floatVal ^^ (v => OptVal(o, v))
case o: StrOpt => strVal ^^ (v => OptVal(o, v))
}
diff --git a/src/scalacheck/org/scalacheck/util/FreqMap.scala b/src/scalacheck/org/scalacheck/util/FreqMap.scala
index 11cdb3e..c7474d3 100644
--- a/src/scalacheck/org/scalacheck/util/FreqMap.scala
+++ b/src/scalacheck/org/scalacheck/util/FreqMap.scala
@@ -1,11 +1,11 @@
/*-------------------------------------------------------------------------*\
** ScalaCheck **
-** Copyright (c) 2007-2010 Rickard Nilsson. All rights reserved. **
+** Copyright (c) 2007-2011 Rickard Nilsson. All rights reserved. **
** http://www.scalacheck.org **
** **
** This software is released under the terms of the Revised BSD License. **
** There is NO WARRANTY. See the file LICENSE for the full text. **
-\*-------------------------------------------------------------------------*/
+\*------------------------------------------------------------------------ */
package org.scalacheck.util
diff --git a/src/scalacheck/org/scalacheck/util/StdRand.scala b/src/scalacheck/org/scalacheck/util/StdRand.scala
index ccdbbea..317b0cc 100644
--- a/src/scalacheck/org/scalacheck/util/StdRand.scala
+++ b/src/scalacheck/org/scalacheck/util/StdRand.scala
@@ -1,11 +1,11 @@
/*-------------------------------------------------------------------------*\
** ScalaCheck **
-** Copyright (c) 2007-2010 Rickard Nilsson. All rights reserved. **
+** Copyright (c) 2007-2011 Rickard Nilsson. All rights reserved. **
** http://www.scalacheck.org **
** **
** This software is released under the terms of the Revised BSD License. **
** There is NO WARRANTY. See the file LICENSE for the full text. **
-\*-------------------------------------------------------------------------*/
+\*------------------------------------------------------------------------ */
package org.scalacheck.util
diff --git a/src/scalap/scala/tools/scalap/Arguments.scala b/src/scalap/scala/tools/scalap/Arguments.scala
index 37f6cd8..a151e30 100644
--- a/src/scalap/scala/tools/scalap/Arguments.scala
+++ b/src/scalap/scala/tools/scalap/Arguments.scala
@@ -1,6 +1,6 @@
/* ___ ____ ___ __ ___ ___
** / _// __// _ | / / / _ | / _ \ Scala classfile decoder
-** __\ \/ /__/ __ |/ /__/ __ |/ ___/ (c) 2003-2011, LAMP/EPFL
+** __\ \/ /__/ __ |/ /__/ __ |/ ___/ (c) 2003-2013, LAMP/EPFL
** /____/\___/_/ |_/____/_/ |_/_/ http://scala-lang.org/
**
*/
@@ -48,7 +48,7 @@ object Arguments {
def parseBinding(str: String, separator: Char): (String, String) = (str indexOf separator) match {
case -1 => argumentError("missing '" + separator + "' in binding '" + str + "'") ; Pair("", "")
- case idx => Pair(str take idx trim, str drop (idx + 1) trim)
+ case idx => Pair((str take idx).trim, (str drop (idx + 1)).trim)
}
def parse(args: Array[String]): Arguments = {
diff --git a/src/scalap/scala/tools/scalap/ByteArrayReader.scala b/src/scalap/scala/tools/scalap/ByteArrayReader.scala
index 73220d1..bb00162 100644
--- a/src/scalap/scala/tools/scalap/ByteArrayReader.scala
+++ b/src/scalap/scala/tools/scalap/ByteArrayReader.scala
@@ -1,6 +1,6 @@
/* ___ ____ ___ __ ___ ___
** / _// __// _ | / / / _ | / _ \ Scala classfile decoder
-** __\ \/ /__/ __ |/ /__/ __ |/ ___/ (c) 2003-2011, LAMP/EPFL
+** __\ \/ /__/ __ |/ /__/ __ |/ ___/ (c) 2003-2013, LAMP/EPFL
** /____/\___/_/ |_/____/_/ |_/_/ http://scala-lang.org/
**
*/
@@ -72,7 +72,7 @@ class ByteArrayReader(content: Array[Byte]) {
/** read an UTF8 encoded string
*/
def nextUTF8(len: Int): String = {
- val cs = scala.io.Codec.fromUTF8(buf.slice(bp, bp + len))
+ val cs = scala.io.Codec.fromUTF8(buf, bp, len)
bp += len
new String(cs)
}
diff --git a/src/scalap/scala/tools/scalap/Classfile.scala b/src/scalap/scala/tools/scalap/Classfile.scala
index ac3ee23..8082b6b 100644
--- a/src/scalap/scala/tools/scalap/Classfile.scala
+++ b/src/scalap/scala/tools/scalap/Classfile.scala
@@ -1,6 +1,6 @@
/* ___ ____ ___ __ ___ ___
** / _// __// _ | / / / _ | / _ \ Scala classfile decoder
-** __\ \/ /__/ __ |/ /__/ __ |/ ___/ (c) 2003-2011, LAMP/EPFL
+** __\ \/ /__/ __ |/ /__/ __ |/ ___/ (c) 2003-2013, LAMP/EPFL
** /____/\___/_/ |_/____/_/ |_/_/
**
*/
diff --git a/src/scalap/scala/tools/scalap/Classfiles.scala b/src/scalap/scala/tools/scalap/Classfiles.scala
index 72b3824..9295dd7 100644
--- a/src/scalap/scala/tools/scalap/Classfiles.scala
+++ b/src/scalap/scala/tools/scalap/Classfiles.scala
@@ -1,6 +1,6 @@
/* ___ ____ ___ __ ___ ___
** / _// __// _ | / / / _ | / _ \ Scala classfile decoder
-** __\ \/ /__/ __ |/ /__/ __ |/ ___/ (c) 2003-2011, LAMP/EPFL
+** __\ \/ /__/ __ |/ /__/ __ |/ ___/ (c) 2003-2013, LAMP/EPFL
** /____/\___/_/ |_/____/_/ |_/_/ http://scala-lang.org/
**
*/
@@ -41,31 +41,5 @@ object Classfiles {
CONSTANT_INTFMETHODREF -> "InterfaceMethod",
CONSTANT_NAMEANDTYPE -> "NameAndType"
)
-
- final val BAD_ATTR = 0x00000
- final val SOURCEFILE_ATTR = 0x00001
- final val SYNTHETIC_ATTR = 0x00002
- final val DEPRECATED_ATTR = 0x00004
- final val CODE_ATTR = 0x00008
- final val EXCEPTIONS_ATTR = 0x00010
- final val CONSTANT_VALUE_ATTR = 0x00020
- final val LINE_NUM_TABLE_ATTR = 0x00040
- final val LOCAL_VAR_TABLE_ATTR = 0x00080
- final val INNERCLASSES_ATTR = 0x08000
- final val META_ATTR = 0x10000
- final val SCALA_ATTR = 0x20000
-
- final val SOURCEFILE_N = "SourceFile"
- final val SYNTHETIC_N = "Synthetic"
- final val DEPRECATED_N = "Deprecated"
- final val CODE_N = "Code"
- final val EXCEPTIONS_N = "Exceptions"
- final val CONSTANT_VALUE_N = "ConstantValue"
- final val LINE_NUM_TABLE_N = "LineNumberTable"
- final val LOCAL_VAR_TABLE_N = "LocalVariableTable"
- final val INNERCLASSES_N = "InnerClasses"
- final val META_N = "JacoMeta"
- final val SCALA_N = "ScalaSignature"
- final val CONSTR_N = "<init>"
}
diff --git a/src/scalap/scala/tools/scalap/CodeWriter.scala b/src/scalap/scala/tools/scalap/CodeWriter.scala
index d895ce3..8254c2d 100644
--- a/src/scalap/scala/tools/scalap/CodeWriter.scala
+++ b/src/scalap/scala/tools/scalap/CodeWriter.scala
@@ -1,6 +1,6 @@
/* ___ ____ ___ __ ___ ___
** / _// __// _ | / / / _ | / _ \ Scala classfile decoder
-** __\ \/ /__/ __ |/ /__/ __ |/ ___/ (c) 2003-2011, LAMP/EPFL
+** __\ \/ /__/ __ |/ /__/ __ |/ ___/ (c) 2003-2013, LAMP/EPFL
** /____/\___/_/ |_/____/_/ |_/_/ http://scala-lang.org/
**
*/
@@ -13,7 +13,7 @@ import java.io._
class CodeWriter(writer: Writer) {
- private val nl = compat.Platform.EOL
+ private val nl = scala.compat.Platform.EOL
private var step = " "
private var level = 0
private var align = false
@@ -58,7 +58,7 @@ class CodeWriter(writer: Writer) {
try {
writer.write(nl)
} catch {
- case e => sys.error("IO error")
+ case e: Exception => sys.error("IO error")
}
line = align
align = true
@@ -127,7 +127,7 @@ class CodeWriter(writer: Writer) {
line = false
this
} catch {
- case e => sys.error("IO error")
+ case e: Exception => sys.error("IO error")
}
override def toString(): String = writer.toString()
diff --git a/src/scalap/scala/tools/scalap/Decode.scala b/src/scalap/scala/tools/scalap/Decode.scala
index 8160417..76ce3f4 100644
--- a/src/scalap/scala/tools/scalap/Decode.scala
+++ b/src/scalap/scala/tools/scalap/Decode.scala
@@ -1,6 +1,6 @@
/* ___ ____ ___ __ ___ ___
** / _// __// _ | / / / _ | / _ \ Scala classfile decoder
-** __\ \/ /__/ __ |/ /__/ __ |/ ___/ (c) 2003-2011, LAMP/EPFL
+** __\ \/ /__/ __ |/ /__/ __ |/ ___/ (c) 2003-2013, LAMP/EPFL
** /____/\___/_/ |_/____/_/ |_/_/ http://scala-lang.org/
**
*/
@@ -11,8 +11,8 @@ package scala.tools.scalap
import scala.tools.scalap.scalax.rules.scalasig._
import scala.tools.nsc.util.ScalaClassLoader
-import scala.tools.nsc.util.ScalaClassLoader.getSystemLoader
-import scala.reflect.generic.ByteCodecs
+import scala.tools.nsc.util.ScalaClassLoader.appLoader
+import scala.reflect.internal.pickling.ByteCodecs
import ClassFileParser.{ ConstValueIndex, Annotation }
import Main.{ SCALA_SIG, SCALA_SIG_ANNOTATION, BYTES_VALUE }
@@ -31,7 +31,7 @@ object Decode {
/** Return the classfile bytes representing the scala sig classfile attribute.
* This has been obsoleted by the switch to annotations.
*/
- def scalaSigBytes(name: String): Option[Array[Byte]] = scalaSigBytes(name, getSystemLoader())
+ def scalaSigBytes(name: String): Option[Array[Byte]] = scalaSigBytes(name, appLoader)
def scalaSigBytes(name: String, classLoader: ScalaClassLoader): Option[Array[Byte]] = {
val bytes = classLoader.classBytes(name)
val reader = new ByteArrayReader(bytes)
@@ -41,7 +41,7 @@ object Decode {
/** Return the bytes representing the annotation
*/
- def scalaSigAnnotationBytes(name: String): Option[Array[Byte]] = scalaSigAnnotationBytes(name, getSystemLoader())
+ def scalaSigAnnotationBytes(name: String): Option[Array[Byte]] = scalaSigAnnotationBytes(name, appLoader)
def scalaSigAnnotationBytes(name: String, classLoader: ScalaClassLoader): Option[Array[Byte]] = {
val bytes = classLoader.classBytes(name)
val byteCode = ByteCode(bytes)
@@ -49,7 +49,7 @@ object Decode {
import classFile._
classFile annotation SCALA_SIG_ANNOTATION map { case Annotation(_, els) =>
- val bytesElem = els find (x => constant(x.elementNameIndex) == BYTES_VALUE) get
+ val bytesElem = els find (x => constant(x.elementNameIndex) == BYTES_VALUE) getOrElse null
val _bytes = bytesElem.elementValue match { case ConstValueIndex(x) => constantWrapped(x) }
val bytes = _bytes.asInstanceOf[StringBytesPair].bytes
val length = ByteCodecs.decode(bytes)
@@ -67,7 +67,7 @@ object Decode {
}
for {
- clazz <- getSystemLoader.tryToLoadClass[AnyRef](outer)
+ clazz <- appLoader.tryToLoadClass[AnyRef](outer)
ssig <- ScalaSigParser.parse(clazz)
}
yield {
@@ -81,7 +81,7 @@ object Decode {
xs.toList map (_.name dropRight 1)
}
- (ssig.symbols collect f).flatten toList
+ (ssig.symbols collect f).flatten.toList
}
}
@@ -89,7 +89,7 @@ object Decode {
*/
private[scala] def typeAliases(pkg: String) = {
for {
- clazz <- getSystemLoader.tryToLoadClass[AnyRef](pkg + ".package")
+ clazz <- appLoader.tryToLoadClass[AnyRef](pkg + ".package")
ssig <- ScalaSigParser.parse(clazz)
}
yield {
diff --git a/src/scalap/scala/tools/scalap/JavaWriter.scala b/src/scalap/scala/tools/scalap/JavaWriter.scala
index db9d6c5..d64c54a 100644
--- a/src/scalap/scala/tools/scalap/JavaWriter.scala
+++ b/src/scalap/scala/tools/scalap/JavaWriter.scala
@@ -1,6 +1,6 @@
/* ___ ____ ___ __ ___ ___
** / _// __// _ | / / / _ | / _ \ Scala classfile decoder
-** __\ \/ /__/ __ |/ /__/ __ |/ ___/ (c) 2003-2011, LAMP/EPFL
+** __\ \/ /__/ __ |/ /__/ __ |/ ___/ (c) 2003-2013, LAMP/EPFL
** /____/\___/_/ |_/____/_/ |_/_/ http://scala-lang.org/
**
*/
@@ -9,7 +9,7 @@
package scala.tools.scalap
import java.io._
-
+import scala.reflect.NameTransformer
class JavaWriter(classfile: Classfile, writer: Writer) extends CodeWriter(writer) {
@@ -32,22 +32,22 @@ class JavaWriter(classfile: Classfile, writer: Writer) extends CodeWriter(writer
}
def nameToClass(str: String): String = {
- val res = Names.decode(str.replace('/', '.'))
+ val res = NameTransformer.decode(str.replace('/', '.'))
if (res == "java.lang.Object") "scala.Any" else res
}
def nameToClass0(str: String) = {
- val res = Names.decode(str.replace('/', '.'))
+ val res = NameTransformer.decode(str.replace('/', '.'))
if (res == "java.lang.Object") "scala.AnyRef" else res
}
def nameToSimpleClass(str: String) =
- Names.decode(str.substring(str.lastIndexOf('/') + 1))
+ NameTransformer.decode(str.substring(str.lastIndexOf('/') + 1))
def nameToPackage(str: String) = {
val inx = str.lastIndexOf('/')
val name = if (inx == -1) str else str.substring(0, inx).replace('/', '.')
- Names.decode(name)
+ NameTransformer.decode(name)
}
def sigToType(str: String): String =
@@ -119,42 +119,21 @@ class JavaWriter(classfile: Classfile, writer: Writer) extends CodeWriter(writer
def printField(flags: Int, name: Int, tpe: Int, attribs: List[cf.Attribute]) {
print(flagsToStr(false, flags))
if ((flags & 0x0010) != 0)
- print("val " + Names.decode(getName(name)))
+ print("val " + NameTransformer.decode(getName(name)))
else
- print("final var " + Names.decode(getName(name)))
+ print("final var " + NameTransformer.decode(getName(name)))
print(": " + getType(tpe) + ";").newline
}
def printMethod(flags: Int, name: Int, tpe: Int, attribs: List[cf.Attribute]) {
if (getName(name) == "<init>")
print(flagsToStr(false, flags))
- attribs find {
- case cf.Attribute(name, _) => getName(name) == "JacoMeta"
- } match {
- case Some(cf.Attribute(_, data)) =>
- val mp = new MetaParser(getName(
- ((data(0) & 0xff) << 8) + (data(1) & 0xff)).trim())
- mp.parse match {
- case None =>
- if (getName(name) == "<init>") {
- print("def this" + getType(tpe) + ";").newline
- } else {
- print("def " + Names.decode(getName(name)))
- print(getType(tpe) + ";").newline
- }
- case Some(str) =>
- if (getName(name) == "<init>")
- print("def this" + str + ";").newline
- else
- print("def " + Names.decode(getName(name)) + str + ";").newline
- }
- case None =>
- if (getName(name) == "<init>") {
- print("def this" + getType(tpe) + ";").newline
- } else {
- print("def " + Names.decode(getName(name)))
- print(getType(tpe) + ";").newline
- }
+ if (getName(name) == "<init>") {
+ print("def this" + getType(tpe) + ";").newline
+ }
+ else {
+ print("def " + NameTransformer.decode(getName(name)))
+ print(getType(tpe) + ";").newline
}
attribs find {
case cf.Attribute(name, _) => getName(name) == "Exceptions"
diff --git a/src/scalap/scala/tools/scalap/Main.scala b/src/scalap/scala/tools/scalap/Main.scala
index 24f26e2..90f8cb8 100644
--- a/src/scalap/scala/tools/scalap/Main.scala
+++ b/src/scalap/scala/tools/scalap/Main.scala
@@ -1,6 +1,6 @@
/* ___ ____ ___ __ ___ ___
** / _// __// _ | / / / _ | / _ \ Scala classfile decoder
-** __\ \/ /__/ __ |/ /__/ __ |/ ___/ (c) 2003-2011, LAMP/EPFL
+** __\ \/ /__/ __ |/ /__/ __ |/ ___/ (c) 2003-2013, LAMP/EPFL
** /____/\___/_/ |_/____/_/ |_/_/ http://scala-lang.org/
**
*/
@@ -8,11 +8,12 @@
package scala.tools.scalap
import java.io.{ PrintStream, OutputStreamWriter, ByteArrayOutputStream }
+import scala.reflect.NameTransformer
import scalax.rules.scalasig._
-import tools.nsc.util.{ ClassPath, JavaClassPath }
-import tools.util.PathResolver
+import scala.tools.nsc.util.{ ClassPath, JavaClassPath }
+import scala.tools.util.PathResolver
import ClassPath.DefaultJavaContext
-import tools.nsc.io.{ PlainFile, AbstractFile }
+import scala.tools.nsc.io.{ PlainFile, AbstractFile }
/**The main object used to execute scalap on the command-line.
*
@@ -96,9 +97,14 @@ class Main {
*/
def process(args: Arguments, path: ClassPath[AbstractFile])(classname: String): Unit = {
// find the classfile
- val encName = Names.encode(
- if (classname == "scala.AnyRef") "java.lang.Object"
- else classname)
+ val encName = classname match {
+ case "scala.AnyRef" => "java.lang.Object"
+ case _ =>
+ // we have to encode every fragment of a name separately, otherwise the NameTransformer
+ // will encode using unicode escaping dot separators as well
+ // we can afford allocations because this is not a performance critical code
+ classname.split('.').map(NameTransformer.encode).mkString(".")
+ }
val cls = path.findClass(encName)
if (cls.isDefined && cls.get.binary.isDefined) {
val cfile = cls.get.binary.get
@@ -131,9 +137,9 @@ class Main {
def asClasspathString = ""
val context = DefaultJavaContext
- val classes = IndexedSeq[ClassRep]()
- val packages = IndexedSeq[ClassPath[AbstractFile]]()
- val sourcepaths = IndexedSeq[AbstractFile]()
+ val classes = IndexedSeq()
+ val packages = IndexedSeq()
+ val sourcepaths = IndexedSeq()
}
}
@@ -178,7 +184,7 @@ object Main extends Main {
val cparg = List("-classpath", "-cp") map (arguments getArgument _) reduceLeft (_ orElse _)
val path = cparg match {
case Some(cp) => new JavaClassPath(DefaultJavaContext.classesInExpandedPath(cp), DefaultJavaContext)
- case _ => PathResolver.fromPathString("")
+ case _ => PathResolver.fromPathString(".") // include '.' in the default classpath SI-6669
}
// print the classpath if output is verbose
if (verbose)
diff --git a/src/scalap/scala/tools/scalap/MetaParser.scala b/src/scalap/scala/tools/scalap/MetaParser.scala
index ca9bd93..00678ab 100644
--- a/src/scalap/scala/tools/scalap/MetaParser.scala
+++ b/src/scalap/scala/tools/scalap/MetaParser.scala
@@ -1,6 +1,6 @@
/* ___ ____ ___ __ ___ ___
** / _// __// _ | / / / _ | / _ \ Scala classfile decoder
-** __\ \/ /__/ __ |/ /__/ __ |/ ___/ (c) 2003-2011, LAMP/EPFL
+** __\ \/ /__/ __ |/ /__/ __ |/ ___/ (c) 2003-2013, LAMP/EPFL
** /____/\___/_/ |_/____/_/ |_/_/ http://scala-lang.org/
**
*/
@@ -61,7 +61,7 @@ class MetaParser(meta: String) {
else
None
} catch {
- case _ => None
+ case _: Exception => None
}
} else
None;
diff --git a/src/scalap/scala/tools/scalap/Names.scala b/src/scalap/scala/tools/scalap/Names.scala
deleted file mode 100644
index 1d66b31..0000000
--- a/src/scalap/scala/tools/scalap/Names.scala
+++ /dev/null
@@ -1,96 +0,0 @@
-/* ___ ____ ___ __ ___ ___
-** / _// __// _ | / / / _ | / _ \ Scala classfile decoder
-** __\ \/ /__/ __ |/ /__/ __ |/ ___/ (c) 2003-2011, LAMP/EPFL
-** /____/\___/_/ |_/____/_/ |_/_/ http://scala-lang.org/
-**
-*/
-
-
-package scala.tools.scalap
-
-
-object Names {
-
- val operatorName = new Array[String](128)
- operatorName('$') = "$"
- operatorName('~') = "$tilde"
- operatorName('=') = "$eq"
- operatorName('<') = "$less"
- operatorName('>') = "$greater"
- operatorName('!') = "$bang"
- operatorName('#') = "$hash"
- operatorName('%') = "$percent"
- operatorName('^') = "$up"
- operatorName('&') = "$amp"
- operatorName('|') = "$bar"
- operatorName('*') = "$times"
- operatorName('/') = "$div"
- operatorName('\\') = "$bslash"
- operatorName('+') = "$plus"
- operatorName('-') = "$minus"
- operatorName(':') = "$colon"
-
- /** Replace operator symbols by corresponding "$op_name" in names.
- */
- def encode(name: String): String = {
- var i = 0
- val len = name.length()
- val res = new StringBuffer()
- while (i < len) {
- val c = name.charAt(i)
- if (c < 128) {
- val nop = operatorName(c)
- if (nop == null)
- res.append(c)
- else
- res.append(nop)
- } else
- res.append(c)
- i = i + 1
- }
- res.toString()
- }
-
- /** Replace "$op_name" by corresponding operator symbols in names.
- */
- def decode(name: String): String = {
- var i = 0
- val len = name.length()
- val res = new StringBuffer()
- while (i < len) {
- val c = name.charAt(i)
- if (c == '$') {
- var j = len
- while (j > i) {
- val prefix = name.substring(i, j)
- val c = lookup(prefix)
- if (c != null) {
- i = j
- res.append(c)
- } else
- j = j - 1
- }
- } else {
- i = i + 1
- res.append(c)
- }
- }
- res.toString()
- }
-
- /** Looks up the array entry for the operator name.
- */
- def lookup(string: String): String = {
- var i = 0
- var res: String = null
- while (i < 128) {
- if (string.equals(operatorName(i))) {
- res = String.valueOf(i.asInstanceOf[Char])
- i = 128
- }
- i = i + 1
- }
- res
- }
-
-}
diff --git a/src/scalap/scala/tools/scalap/Properties.scala b/src/scalap/scala/tools/scalap/Properties.scala
index 90e7a6b..8f9a9d8 100644
--- a/src/scalap/scala/tools/scalap/Properties.scala
+++ b/src/scalap/scala/tools/scalap/Properties.scala
@@ -1,6 +1,6 @@
/* ___ ____ ___ __ ___ ___
** / _// __// _ | / / / _ | / _ \ Scala classfile decoder
-** __\ \/ /__/ __ |/ /__/ __ |/ ___/ (c) 2003-2011, LAMP/EPFL
+** __\ \/ /__/ __ |/ /__/ __ |/ ___/ (c) 2003-2013, LAMP/EPFL
** /____/\___/_/ |_/____/_/ |_/_/ http://scala-lang.org/
**
*/
diff --git a/src/scalap/scala/tools/scalap/scalax/rules/Memoisable.scala b/src/scalap/scala/tools/scalap/scalax/rules/Memoisable.scala
index 827c2df..b4ce8ca 100644
--- a/src/scalap/scala/tools/scalap/scalax/rules/Memoisable.scala
+++ b/src/scalap/scala/tools/scalap/scalax/rules/Memoisable.scala
@@ -14,7 +14,7 @@ package scala.tools.scalap
package scalax
package rules
-import scala.collection.mutable.HashMap
+import scala.collection.mutable
trait MemoisableRules extends Rules {
def memo[In <: Memoisable, Out, A, X](key : AnyRef)(toRule : => In => Result[Out, A, X]) = {
@@ -38,7 +38,7 @@ object DefaultMemoisable {
}
trait DefaultMemoisable extends Memoisable {
- protected val map = new HashMap[AnyRef, Any]
+ protected val map = new mutable.HashMap[AnyRef, Any]
def memo[A](key : AnyRef, a : => A) = {
map.getOrElseUpdate(key, compute(key, a)).asInstanceOf[A]
diff --git a/src/scalap/scala/tools/scalap/scalax/rules/Rules.scala b/src/scalap/scala/tools/scalap/scalax/rules/Rules.scala
index 43f9c20..7092620 100644
--- a/src/scalap/scala/tools/scalap/scalax/rules/Rules.scala
+++ b/src/scalap/scala/tools/scalap/scalax/rules/Rules.scala
@@ -130,7 +130,7 @@ trait StateRules {
def rep(in : S, t : T) : Result[S, T, X] = {
if (finished(t)) Success(in, t)
else rule(in) match {
- case Success(out, f) => rep(out, f(t))
+ case Success(out, f) => rep(out, f(t)) // SI-5189 f.asInstanceOf[T => T]
case Failure => Failure
case Error(x) => Error(x)
}
diff --git a/src/scalap/scala/tools/scalap/scalax/rules/SeqRule.scala b/src/scalap/scala/tools/scalap/scalax/rules/SeqRule.scala
index 34f52a1..51a789e 100644
--- a/src/scalap/scala/tools/scalap/scalax/rules/SeqRule.scala
+++ b/src/scalap/scala/tools/scalap/scalax/rules/SeqRule.scala
@@ -82,7 +82,7 @@ class SeqRule[S, +A, +X](rule : Rule[S, S, A, X]) {
/** Repeats this rule num times */
def times(num : Int) : Rule[S, S, Seq[A], X] = from[S] {
- val result = new collection.mutable.ArraySeq[A](num)
+ val result = new scala.collection.mutable.ArraySeq[A](num)
// more compact using HoF but written this way so it's tail-recursive
def rep(i : Int, in : S) : Result[S, Seq[A], X] = {
if (i == num) Success(in, result)
diff --git a/src/scalap/scala/tools/scalap/scalax/rules/package.scala b/src/scalap/scala/tools/scalap/scalax/rules/package.scala
new file mode 100644
index 0000000..b1cc18f
--- /dev/null
+++ b/src/scalap/scala/tools/scalap/scalax/rules/package.scala
@@ -0,0 +1,9 @@
+package scala.tools.scalap
+package scalax
+
+package object rules {
+ implicit lazy val higherKinds = scala.language.higherKinds
+ implicit lazy val postfixOps = scala.language.postfixOps
+ implicit lazy val implicitConversions = scala.language.implicitConversions
+ implicit lazy val reflectiveCalls = scala.language.reflectiveCalls
+}
diff --git a/src/scalap/scala/tools/scalap/scalax/rules/scalasig/ClassFileParser.scala b/src/scalap/scala/tools/scalap/scalax/rules/scalasig/ClassFileParser.scala
index d53d8e1..1a4b345 100644
--- a/src/scalap/scala/tools/scalap/scalax/rules/scalasig/ClassFileParser.scala
+++ b/src/scalap/scala/tools/scalap/scalax/rules/scalasig/ClassFileParser.scala
@@ -65,8 +65,11 @@ class ByteCode(val bytes : Array[Byte], val pos : Int, val length : Int) {
* stores and array of bytes for the decompiler
*/
def fromUTF8StringAndBytes = {
- val chunk: Array[Byte] = bytes drop pos take length
- StringBytesPair(io.Codec.fromUTF8(chunk).mkString, chunk)
+ val chunk: Array[Byte] = new Array[Byte](length)
+ System.arraycopy(bytes, pos, chunk, 0, length)
+ val str = new String(io.Codec.fromUTF8(bytes, pos, length))
+
+ StringBytesPair(str, chunk)
}
def byte(i : Int) = bytes(pos) & 0xFF
diff --git a/src/scalap/scala/tools/scalap/scalax/rules/scalasig/ScalaSig.scala b/src/scalap/scala/tools/scalap/scalax/rules/scalasig/ScalaSig.scala
index 995c629..aa5acbb 100644
--- a/src/scalap/scala/tools/scalap/scalax/rules/scalasig/ScalaSig.scala
+++ b/src/scalap/scala/tools/scalap/scalax/rules/scalasig/ScalaSig.scala
@@ -1,6 +1,6 @@
/* ___ ____ ___ __ ___ ___
** / _// __// _ | / / / _ | / _ \ Scala classfile decoder
-** __\ \/ /__/ __ |/ /__/ __ |/ ___/ (c) 2003-2011, LAMP/EPFL
+** __\ \/ /__/ __ |/ /__/ __ |/ ___/ (c) 2003-2013, LAMP/EPFL
** /____/\___/_/ |_/____/_/ |_/_/ http://scala-lang.org/
**
*/
@@ -12,7 +12,7 @@ package rules
package scalasig
import ClassFileParser.{ ConstValueIndex, Annotation }
-import scala.reflect.generic.ByteCodecs
+import scala.reflect.internal.pickling.ByteCodecs
object ScalaSigParser {
import Main.{ SCALA_SIG, SCALA_SIG_ANNOTATION, BYTES_VALUE }
@@ -264,7 +264,7 @@ object ScalaSigEntryParsers extends RulesWithState with MemoisableRules {
48 -~ typeRef ~ (symbolRef*) ^~^ ExistentialType) as "type"
lazy val literal = oneOf(
- 24 -^ (),
+ 24 -^ (()),
25 -~ longValue ^^ (_ != 0L),
26 -~ longValue ^^ (_.toByte),
27 -~ longValue ^^ (_.toShort),
@@ -350,5 +350,5 @@ object ScalaSigEntryParsers extends RulesWithState with MemoisableRules {
* AnnotArg = Tree | Constant
* ConstAnnotArg = Constant | AnnotInfo | AnnotArgArray
*
- * len is remaining length after `len'.
+ * len is remaining length after `len`.
*/
diff --git a/src/scalap/scala/tools/scalap/scalax/rules/scalasig/ScalaSigPrinter.scala b/src/scalap/scala/tools/scalap/scalax/rules/scalasig/ScalaSigPrinter.scala
index df78bad..cfe615a 100644
--- a/src/scalap/scala/tools/scalap/scalax/rules/scalasig/ScalaSigPrinter.scala
+++ b/src/scalap/scala/tools/scalap/scalax/rules/scalasig/ScalaSigPrinter.scala
@@ -1,6 +1,6 @@
/* ___ ____ ___ __ ___ ___
** / _// __// _ | / / / _ | / _ \ Scala classfile decoder
-** __\ \/ /__/ __ |/ /__/ __ |/ ___/ (c) 2003-2011, LAMP/EPFL
+** __\ \/ /__/ __ |/ /__/ __ |/ ___/ (c) 2003-2013, LAMP/EPFL
** /____/\___/_/ |_/____/_/ |_/_/ http://scala-lang.org/
**
*/
@@ -13,9 +13,8 @@ package scalasig
import java.io.{PrintStream, ByteArrayOutputStream}
import java.util.regex.Pattern
-
import scala.tools.scalap.scalax.util.StringUtil
-import reflect.NameTransformer
+import scala.reflect.NameTransformer
import java.lang.String
class ScalaSigPrinter(stream: PrintStream, printPrivates: Boolean) {
@@ -157,7 +156,7 @@ class ScalaSigPrinter(stream: PrintStream, printPrivates: Boolean) {
val printer = new ScalaSigPrinter(stream, printPrivates)
printer.printMethodType(m.infoType, false)(())
baos.toString
- case None =>
+ case _ =>
""
}
}
diff --git a/src/swing/scala/swing/AbstractButton.scala b/src/swing/scala/swing/AbstractButton.scala
index 6d106e8..fd84d6f 100644
--- a/src/swing/scala/swing/AbstractButton.scala
+++ b/src/swing/scala/swing/AbstractButton.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2007-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2007-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/swing/scala/swing/Action.scala b/src/swing/scala/swing/Action.scala
index 9deea0f..8740f63 100644
--- a/src/swing/scala/swing/Action.scala
+++ b/src/swing/scala/swing/Action.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2007-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2007-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -135,7 +135,7 @@ abstract class Action(title0: String) {
def accelerator: Option[KeyStroke] =
toOption(peer.getValue(javax.swing.Action.ACCELERATOR_KEY))
def accelerator_=(k: Option[KeyStroke]) {
- peer.putValue(javax.swing.Action.ACCELERATOR_KEY, k orNull)
+ peer.putValue(javax.swing.Action.ACCELERATOR_KEY, k.orNull)
}
/**
diff --git a/src/swing/scala/swing/Alignment.scala b/src/swing/scala/swing/Alignment.scala
index a58f704..b49e89d 100644
--- a/src/swing/scala/swing/Alignment.scala
+++ b/src/swing/scala/swing/Alignment.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2007-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2007-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/swing/scala/swing/Applet.scala b/src/swing/scala/swing/Applet.scala
index 0f0380e..b8ba4ea 100644
--- a/src/swing/scala/swing/Applet.scala
+++ b/src/swing/scala/swing/Applet.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2007-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2007-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/swing/scala/swing/BorderPanel.scala b/src/swing/scala/swing/BorderPanel.scala
index f7875ce..75bb721 100644
--- a/src/swing/scala/swing/BorderPanel.scala
+++ b/src/swing/scala/swing/BorderPanel.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2007-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2007-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/swing/scala/swing/BoxPanel.scala b/src/swing/scala/swing/BoxPanel.scala
index 0d9041f..f5859a8 100644
--- a/src/swing/scala/swing/BoxPanel.scala
+++ b/src/swing/scala/swing/BoxPanel.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2007-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2007-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/swing/scala/swing/BufferWrapper.scala b/src/swing/scala/swing/BufferWrapper.scala
index 04fb25f..38230ba 100644
--- a/src/swing/scala/swing/BufferWrapper.scala
+++ b/src/swing/scala/swing/BufferWrapper.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2007-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2007-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/swing/scala/swing/Button.scala b/src/swing/scala/swing/Button.scala
index 1dd3388..f10d49d 100644
--- a/src/swing/scala/swing/Button.scala
+++ b/src/swing/scala/swing/Button.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2007-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2007-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/swing/scala/swing/ButtonGroup.scala b/src/swing/scala/swing/ButtonGroup.scala
index bfecea0..2075df7 100644
--- a/src/swing/scala/swing/ButtonGroup.scala
+++ b/src/swing/scala/swing/ButtonGroup.scala
@@ -1,19 +1,16 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2007-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2007-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-
-
package scala.swing
import event._
import javax.swing.{AbstractButton => JAbstractButton,Icon}
-import scala.collection._
-import scala.collection.mutable.Buffer
+import scala.collection.{ mutable, immutable }
/**
* A button mutex. At most one of its associated buttons is selected
diff --git a/src/swing/scala/swing/CheckBox.scala b/src/swing/scala/swing/CheckBox.scala
index 64d371a..7287c95 100644
--- a/src/swing/scala/swing/CheckBox.scala
+++ b/src/swing/scala/swing/CheckBox.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2007-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2007-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/swing/scala/swing/ComboBox.scala b/src/swing/scala/swing/ComboBox.scala
index 75f9810..5b70f6f 100644
--- a/src/swing/scala/swing/ComboBox.scala
+++ b/src/swing/scala/swing/ComboBox.scala
@@ -1,13 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2007-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2007-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-
-
package scala.swing
import event._
@@ -205,6 +203,6 @@ class ComboBox[A](items: Seq[A]) extends Component with Publisher {
def prototypeDisplayValue: Option[A] = toOption[A](peer.getPrototypeDisplayValue)
def prototypeDisplayValue_=(v: Option[A]) {
- peer.setPrototypeDisplayValue(v map toAnyRef orNull)
+ peer.setPrototypeDisplayValue((v map toAnyRef).orNull)
}
}
diff --git a/src/swing/scala/swing/Component.scala b/src/swing/scala/swing/Component.scala
index 5eaff35..b7dd856 100644
--- a/src/swing/scala/swing/Component.scala
+++ b/src/swing/scala/swing/Component.scala
@@ -1,13 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2007-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2007-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-
-
package scala.swing
import event._
@@ -119,11 +117,6 @@ abstract class Component extends UIElement {
def verify(c: javax.swing.JComponent) = v(UIElement.cachedWrapper[Component](c))
})
}*/
-
-
-
- @deprecated("Use mouse instead", "2.8.0") lazy val Mouse = mouse
-
/**
* Contains publishers for various mouse events. They are separated for
* efficiency reasons.
@@ -132,10 +125,10 @@ abstract class Component extends UIElement {
/**
* Publishes clicks, presses and releases.
*/
- val clicks: Publisher = new Publisher {
- peer.addMouseListener(new MouseListener {
- def mouseEntered(e: java.awt.event.MouseEvent) { }
- def mouseExited(e: java.awt.event.MouseEvent) { }
+ val clicks: Publisher = new LazyPublisher {
+ lazy val l = new MouseListener {
+ def mouseEntered(e: java.awt.event.MouseEvent) {}
+ def mouseExited(e: java.awt.event.MouseEvent) {}
def mouseClicked(e: java.awt.event.MouseEvent) {
publish(new MouseClicked(e))
}
@@ -145,13 +138,16 @@ abstract class Component extends UIElement {
def mouseReleased(e: java.awt.event.MouseEvent) {
publish(new MouseReleased(e))
}
- })
+ }
+
+ def onFirstSubscribe() = peer.addMouseListener(l)
+ def onLastUnsubscribe() = peer.removeMouseListener(l)
}
/**
* Publishes enters, exits, moves, and drags.
*/
- val moves: Publisher = new Publisher {
- peer.addMouseListener(new MouseListener {
+ val moves: Publisher = new LazyPublisher {
+ lazy val mouseListener = new MouseListener {
def mouseEntered(e: java.awt.event.MouseEvent) {
publish(new MouseEntered(e))
}
@@ -159,17 +155,26 @@ abstract class Component extends UIElement {
publish(new MouseExited(e))
}
def mouseClicked(e: java.awt.event.MouseEvent) {}
- def mousePressed(e: java.awt.event.MouseEvent) { }
- def mouseReleased(e: java.awt.event.MouseEvent) { }
- })
- peer.addMouseMotionListener(new MouseMotionListener {
+ def mousePressed(e: java.awt.event.MouseEvent) {}
+ def mouseReleased(e: java.awt.event.MouseEvent) {}
+ }
+
+ lazy val mouseMotionListener = new MouseMotionListener {
def mouseMoved(e: java.awt.event.MouseEvent) {
publish(new MouseMoved(e))
}
def mouseDragged(e: java.awt.event.MouseEvent) {
publish(new MouseDragged(e))
}
- })
+ }
+ def onFirstSubscribe() {
+ peer.addMouseListener(mouseListener)
+ peer.addMouseMotionListener(mouseMotionListener)
+ }
+ def onLastUnsubscribe() {
+ peer.removeMouseListener(mouseListener)
+ peer.removeMouseMotionListener(mouseMotionListener)
+ }
}
/**
* Publishes mouse wheel moves.
@@ -178,9 +183,10 @@ abstract class Component extends UIElement {
// We need to subscribe lazily and unsubscribe, since components in scroll panes capture
// mouse wheel events if there is a listener installed. See ticket #1442.
lazy val l = new MouseWheelListener {
- def mouseWheelMoved(e: java.awt.event.MouseWheelEvent) {
- publish(new MouseWheelMoved(e)) }
+ def mouseWheelMoved(e: java.awt.event.MouseWheelEvent) {
+ publish(new MouseWheelMoved(e))
}
+ }
def onFirstSubscribe() = peer.addMouseWheelListener(l)
def onLastUnsubscribe() = peer.removeMouseWheelListener(l)
}
diff --git a/src/swing/scala/swing/Container.scala b/src/swing/scala/swing/Container.scala
index b39bfe0..24889f0 100644
--- a/src/swing/scala/swing/Container.scala
+++ b/src/swing/scala/swing/Container.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2007-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2007-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/swing/scala/swing/EditorPane.scala b/src/swing/scala/swing/EditorPane.scala
index 15d415a..b8c506d 100644
--- a/src/swing/scala/swing/EditorPane.scala
+++ b/src/swing/scala/swing/EditorPane.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2007-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2007-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/swing/scala/swing/FileChooser.scala b/src/swing/scala/swing/FileChooser.scala
index 0459af3..e731c67 100644
--- a/src/swing/scala/swing/FileChooser.scala
+++ b/src/swing/scala/swing/FileChooser.scala
@@ -1,13 +1,11 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2007-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2007-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-
-
package scala.swing
import java.io.File
@@ -16,9 +14,9 @@ import javax.swing.filechooser._
object FileChooser {
/**
- * The result of a file dialog. The precise meaning of the <code>Approve</code>
- * result depends on the specific dialog type. Could be "save" or "open" for
- * example.
+ * The result of a file dialog. The precise meaning of the `Approve`
+ * result depends on the specific dialog type. Could be `"save"` or
+ * `"open"` for instance.
*/
object Result extends Enumeration {
val Cancel = Value(JFileChooser.CANCEL_OPTION)
@@ -39,7 +37,7 @@ object FileChooser {
/**
* Used to open file dialogs.
*
- * @see javax.swing.JFileChooser
+ * @see [[javax.swing.JFileChooser]]
*/
class FileChooser(dir: File) {
import FileChooser._
@@ -66,7 +64,7 @@ class FileChooser(dir: File) {
def fileSelectionMode: SelectionMode.Value = SelectionMode(peer.getFileSelectionMode)
def fileSelectionMode_=(s: SelectionMode.Value) { peer.setFileSelectionMode(s.id) }
def fileFilter: FileFilter = peer.getFileFilter
- def fileFilter_=(f: FileFilter) { peer.setFileFilter(f) }
+ def fileFilter_=(f: FileFilter) { peer setFileFilter f }
def selectedFile: File = peer.getSelectedFile
def selectedFile_=(file: File) { peer.setSelectedFile(file) }
diff --git a/src/swing/scala/swing/FlowPanel.scala b/src/swing/scala/swing/FlowPanel.scala
index 330cb77..feeb3d4 100644
--- a/src/swing/scala/swing/FlowPanel.scala
+++ b/src/swing/scala/swing/FlowPanel.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2007-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2007-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/swing/scala/swing/Font.scala.disabled b/src/swing/scala/swing/Font.scala.disabled
index 6eebd66..9e21eb8 100644
--- a/src/swing/scala/swing/Font.scala.disabled
+++ b/src/swing/scala/swing/Font.scala.disabled
@@ -1,36 +1,36 @@
package scala.swing
-/*object Font {
- def apply(fontFormat: Int, fontFile: java.io.File) = java.awt.Font.createFont(fontFormat, fontFile)
- def apply(fontFormat: Int, fontStream: java.io.InputStream) = java.awt.Font.createFont(fontFormat, fontStream)
+/*object Font {
+ def apply(fontFormat: Int, fontFile: java.io.File) = java.awt.Font.createFont(fontFormat, fontFile)
+ def apply(fontFormat: Int, fontStream: java.io.InputStream) = java.awt.Font.createFont(fontFormat, fontStream)
def decode(str: String) = java.awt.Font.decode(str)
-
+
/* TODO: finish implementation
/**
* See [java.awt.Font.getFont].
*/
- def get(attributes: Map[_ <: java.text.AttributedCharacterIterator.Attribute, _]) =
+ def get(attributes: Map[_ <: java.text.AttributedCharacterIterator.Attribute, _]) =
java.awt.Font.getFont(ImmutableMapWrapper(attributes))
-
+
import java.{util => ju}
- private case class ImmutableMapWrapper[A, B](underlying : Map[A, B])(m : ClassManifest[A]) extends ju.AbstractMap[A, B] {
+ private case class ImmutableMapWrapper[A, B](underlying : Map[A, B])(t : ClassTag[A]) extends ju.AbstractMap[A, B] {
self =>
override def size = underlying.size
- override def put(k : A, v : B) =
+ override def put(k : A, v : B) =
throw new UnsupportedOperationException("This is a wrapper that does not support mutation")
- override def remove(k : AnyRef) =
+ override def remove(k : AnyRef) =
throw new UnsupportedOperationException("This is a wrapper that does not support mutation")
-
+
override def entrySet : ju.Set[ju.Map.Entry[A, B]] = new ju.AbstractSet[ju.Map.Entry[A, B]] {
def size = self.size
def iterator = new ju.Iterator[ju.Map.Entry[A, B]] {
val ui = underlying.iterator
var prev : Option[A] = None
-
+
def hasNext = ui.hasNext
-
+
def next = {
val (k, v) = ui.next
prev = Some(k)
@@ -44,7 +44,7 @@ package scala.swing
}
}
}
-
+
def remove = prev match {
case Some(k) => val v = self.remove(k.asInstanceOf[AnyRef]) ; prev = None ; v
case _ => throw new IllegalStateException("next must be called at least once before remove")
@@ -53,7 +53,7 @@ package scala.swing
}
}
*/
-
+
/**
* See [java.awt.Font.getFont].
*/
@@ -62,9 +62,9 @@ package scala.swing
* See [java.awt.Font.getFont].
*/
def get(nm: String, font: Font) = java.awt.Font.getFont(nm, font)
-
+
def Insets(x: Int, y: Int, width: Int, height: Int) = new Insets(x, y, width, height)
def Rectangle(x: Int, y: Int, width: Int, height: Int) = new Insets(x, y, width, height)
def Point(x: Int, y: Int) = new Point(x, y)
- def Dimension(x: Int, y: Int) = new Dimension(x, y)
+ def Dimension(x: Int, y: Int) = new Dimension(x, y)
}*/
\ No newline at end of file
diff --git a/src/swing/scala/swing/FormattedTextField.scala b/src/swing/scala/swing/FormattedTextField.scala
index 11e7b97..311ff42 100644
--- a/src/swing/scala/swing/FormattedTextField.scala
+++ b/src/swing/scala/swing/FormattedTextField.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2007-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2007-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/swing/scala/swing/GUIApplication.scala b/src/swing/scala/swing/GUIApplication.scala
deleted file mode 100644
index 8c63115..0000000
--- a/src/swing/scala/swing/GUIApplication.scala
+++ /dev/null
@@ -1,30 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2007-2011, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-
-package scala.swing
-
-import event.Event
-import javax.swing._
-
-/**
- * Convenience class with utility methods for GUI applications.
- */
- at deprecated("Use SwingApplication instead", "2.8.0") class GUIApplication {
-
- /**
- * Called before the GUI is created. Override to customize.
- */
- def init() {}
-
- /**
- * Initializes the framework and runs the given program.
- */
- def run(prog: => Unit) = Swing.onEDT { init(); prog }
-}
diff --git a/src/swing/scala/swing/GridBagPanel.scala b/src/swing/scala/swing/GridBagPanel.scala
index d5b2472..7d181af 100644
--- a/src/swing/scala/swing/GridBagPanel.scala
+++ b/src/swing/scala/swing/GridBagPanel.scala
@@ -1,18 +1,15 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2007-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2007-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-
-
package scala.swing
import java.awt.{GridBagConstraints, GridBagLayout}
-
object GridBagPanel {
object Fill extends Enumeration {
val None = Value(GridBagConstraints.NONE)
diff --git a/src/swing/scala/swing/GridPanel.scala b/src/swing/scala/swing/GridPanel.scala
index 53d9f92..d41f9e1 100644
--- a/src/swing/scala/swing/GridPanel.scala
+++ b/src/swing/scala/swing/GridPanel.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2007-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2007-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/swing/scala/swing/Label.scala b/src/swing/scala/swing/Label.scala
index 72a4ea6..65d43cb 100644
--- a/src/swing/scala/swing/Label.scala
+++ b/src/swing/scala/swing/Label.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2007-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2007-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/swing/scala/swing/LayoutContainer.scala b/src/swing/scala/swing/LayoutContainer.scala
index 0e4a1af..37d3514 100644
--- a/src/swing/scala/swing/LayoutContainer.scala
+++ b/src/swing/scala/swing/LayoutContainer.scala
@@ -1,27 +1,24 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2007-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2007-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-
-
package scala.swing
import javax.swing.JComponent
-import scala.collection.mutable.Map
+import scala.collection.mutable
-/** <p>
- * A container that associates layout constraints of member type
- * <code>Constraints</code> with its children. See <code>GridBagPanel</code>
- * for an example container with custom constraints.
- * </p>
+/** A container that associates layout constraints of member type
+ * `Constraints` with its children.
+ *
+ * See `GridBagPanel` for an example container with custom constraints.
*
* @note [Java Swing] In scala.swing, panels and layout managers are
- * combined into subclasses of this base class. This approach allows for typed
- * component constraints.
+ * combined into subclasses of this base class. This approach allows for
+ * typed component constraints.
*/
trait LayoutContainer extends Container.Wrapper {
/**
@@ -57,14 +54,13 @@ trait LayoutContainer extends Container.Wrapper {
*
* also ensures that myComponent is properly added to this container.
*/
- def layout: Map[Component, Constraints] = new Map[Component, Constraints] {
+ def layout: mutable.Map[Component, Constraints] = new mutable.Map[Component, Constraints] {
def -= (c: Component): this.type = { _contents -= c; this }
def += (cl: (Component, Constraints)): this.type = { update(cl._1, cl._2); this }
override def update (c: Component, l: Constraints) {
val (v, msg) = areValid(l)
if (!v) throw new IllegalArgumentException(msg)
add(c, l)
- this
}
def get(c: Component) = Option(constraintsFor(c))
override def size = peer.getComponentCount
diff --git a/src/swing/scala/swing/ListView.scala b/src/swing/scala/swing/ListView.scala
index c5a5075..40639aa 100644
--- a/src/swing/scala/swing/ListView.scala
+++ b/src/swing/scala/swing/ListView.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2007-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2007-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -195,24 +195,13 @@ class ListView[A] extends Component {
object indices extends Indices(peer.getSelectedIndices) {
def -=(n: Int): this.type = { peer.removeSelectionInterval(n,n); this }
def +=(n: Int): this.type = { peer.addSelectionInterval(n,n); this }
- @deprecated("Use ListView.selection.leadIndex", "2.8.0")
- def leadIndex: Int = peer.getSelectionModel.getLeadSelectionIndex
- @deprecated("Use ListView.selection.anchorIndex", "2.8.0")
- def anchorIndex: Int = peer.getSelectionModel.getAnchorSelectionIndex
}
- @deprecated("Use ListView.selectIndices", "2.8.0")
- def selectIndices(ind: Int*) = peer.setSelectedIndices(ind.toArray)
-
/**
* The currently selected items.
*/
object items extends scala.collection.SeqProxy[A] {
def self = peer.getSelectedValues.map(_.asInstanceOf[A])
- @deprecated("Use ListView.selection.leadIndex", "2.8.0")
- def leadIndex: Int = peer.getSelectionModel.getLeadSelectionIndex
- @deprecated("Use ListView.selection.anchorIndex", "2.8.0")
- def anchorIndex: Int = peer.getSelectionModel.getAnchorSelectionIndex
}
def intervalMode: IntervalMode.Value = IntervalMode(peer.getSelectionModel.getSelectionMode)
@@ -239,6 +228,11 @@ class ListView[A] extends Component {
def prototypeCellValue: A = peer.getPrototypeCellValue.asInstanceOf[A]
def prototypeCellValue_=(a: A) { peer.setPrototypeCellValue(a) }
+ def visibleRowCount = peer.getVisibleRowCount
+ def visibleRowCount_=(n: Int) = peer.setVisibleRowCount(n)
+
+ def ensureIndexIsVisible(idx: Int) = peer.ensureIndexIsVisible(idx)
+
def selectionForeground: Color = peer.getSelectionForeground
def selectionForeground_=(c: Color) = peer.setSelectionForeground(c)
def selectionBackground: Color = peer.getSelectionBackground
diff --git a/src/swing/scala/swing/MainFrame.scala b/src/swing/scala/swing/MainFrame.scala
index e8b276e..85ce075 100644
--- a/src/swing/scala/swing/MainFrame.scala
+++ b/src/swing/scala/swing/MainFrame.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2007-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2007-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -16,6 +16,6 @@ import event._
* A frame that can be used for main application windows. Shuts down the
* framework and quits the application when closed.
*/
-class MainFrame extends Frame {
+class MainFrame(gc: java.awt.GraphicsConfiguration = null) extends Frame(gc) {
override def closeOperation() { sys.exit(0) }
}
diff --git a/src/swing/scala/swing/Menu.scala b/src/swing/scala/swing/Menu.scala
index dc4922f..38b1787 100644
--- a/src/swing/scala/swing/Menu.scala
+++ b/src/swing/scala/swing/Menu.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2007-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2007-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/swing/scala/swing/Orientable.scala b/src/swing/scala/swing/Orientable.scala
index 71e7973..a73bafb 100644
--- a/src/swing/scala/swing/Orientable.scala
+++ b/src/swing/scala/swing/Orientable.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2007-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2007-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/swing/scala/swing/Orientation.scala b/src/swing/scala/swing/Orientation.scala
index c1274e2..ad616ec 100644
--- a/src/swing/scala/swing/Orientation.scala
+++ b/src/swing/scala/swing/Orientation.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2007-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2007-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/swing/scala/swing/Oriented.scala b/src/swing/scala/swing/Oriented.scala
index 081d5e3..7996d21 100644
--- a/src/swing/scala/swing/Oriented.scala
+++ b/src/swing/scala/swing/Oriented.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2007-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2007-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/swing/scala/swing/Panel.scala b/src/swing/scala/swing/Panel.scala
index b54be1a..89ad4d3 100644
--- a/src/swing/scala/swing/Panel.scala
+++ b/src/swing/scala/swing/Panel.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2007-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2007-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/swing/scala/swing/PasswordField.scala b/src/swing/scala/swing/PasswordField.scala
index 7575dea..d2fdd0d 100644
--- a/src/swing/scala/swing/PasswordField.scala
+++ b/src/swing/scala/swing/PasswordField.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2007-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2007-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/swing/scala/swing/ProgressBar.scala b/src/swing/scala/swing/ProgressBar.scala
index 2b53206..33dd716 100644
--- a/src/swing/scala/swing/ProgressBar.scala
+++ b/src/swing/scala/swing/ProgressBar.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2007-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2007-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/swing/scala/swing/Publisher.scala b/src/swing/scala/swing/Publisher.scala
index 3c3e99f..578ef71 100644
--- a/src/swing/scala/swing/Publisher.scala
+++ b/src/swing/scala/swing/Publisher.scala
@@ -1,17 +1,15 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2007-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2007-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-
-
package scala.swing
-import scala.collection._
-import scala.collection.mutable.{Buffer, HashSet, Set}
+import scala.collection.mutable
+import mutable.Buffer
import event.Event
/** <p>
@@ -33,7 +31,7 @@ trait Publisher extends Reactor {
protected val listeners = new RefSet[Reaction] {
import scala.ref._
- val underlying = new HashSet[Reference[Reaction]]
+ val underlying = new mutable.HashSet[Reference[Reaction]]
protected def Ref(a: Reaction) = a match {
case a: StronglyReferenced => new StrongReference[Reaction](a) with super.Ref[Reaction]
case _ => new WeakReference[Reaction](a, referenceQueue) with super.Ref[Reaction]
@@ -46,7 +44,7 @@ trait Publisher extends Reactor {
/**
* Notify all registered reactions.
*/
- def publish(e: Event) { for (l <- listeners) l(e) }
+ def publish(e: Event) { for (l <- listeners) if (l.isDefinedAt(e)) l(e) }
listenTo(this)
}
@@ -164,8 +162,8 @@ abstract class RefBuffer[A <: AnyRef] extends Buffer[A] with SingleRefCollection
protected[this] def removeReference(ref: Reference[A]) { underlying -= ref }
}
-private[swing] abstract class RefSet[A <: AnyRef] extends Set[A] with SingleRefCollection[A] { self =>
- protected val underlying: Set[Reference[A]]
+private[swing] abstract class RefSet[A <: AnyRef] extends mutable.Set[A] with SingleRefCollection[A] { self =>
+ protected val underlying: mutable.Set[Reference[A]]
def -=(el: A): this.type = { underlying -= Ref(el); purgeReferences(); this }
def +=(el: A): this.type = { purgeReferences(); underlying += Ref(el); this }
diff --git a/src/swing/scala/swing/RadioButton.scala b/src/swing/scala/swing/RadioButton.scala
index 6909f52..64f8b23 100644
--- a/src/swing/scala/swing/RadioButton.scala
+++ b/src/swing/scala/swing/RadioButton.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2007-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2007-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/swing/scala/swing/Reactions.scala b/src/swing/scala/swing/Reactions.scala
index 43818ad..d8a62aa 100644
--- a/src/swing/scala/swing/Reactions.scala
+++ b/src/swing/scala/swing/Reactions.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2007-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2007-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/swing/scala/swing/Reactor.scala b/src/swing/scala/swing/Reactor.scala
index 3d045a9..8f74831 100644
--- a/src/swing/scala/swing/Reactor.scala
+++ b/src/swing/scala/swing/Reactor.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2007-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2007-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/swing/scala/swing/RichWindow.scala b/src/swing/scala/swing/RichWindow.scala
index 721172f..a60cdd3 100644
--- a/src/swing/scala/swing/RichWindow.scala
+++ b/src/swing/scala/swing/RichWindow.scala
@@ -1,16 +1,14 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2007-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2007-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-
-
package scala.swing
-import java.awt.{Window => AWTWindow, Frame => AWTFrame}
+import java.awt.{ Window => AWTWindow, Frame => AWTFrame }
import javax.swing._
import Swing._
@@ -52,15 +50,15 @@ sealed trait RichWindow extends Window {
* The menu bar of this frame or `NoMenuBar` if no menu bar is set.
*/
def menuBar: MenuBar = {
- val m = UIElement.cachedWrapper[MenuBar](peer.getJMenuBar)
- if (m != null) m else MenuBar.NoMenuBar
+ val m = UIElement.cachedWrapper[MenuBar](peer.getJMenuBar)
+ if (m != null) m else MenuBar.NoMenuBar
}
/**
* Set the current menu bar of this frame. Pass `NoMenuBar` if this frame
* should not show a menu bar.
*/
def menuBar_=(m: MenuBar) =
- peer.setJMenuBar(if(m == MenuBar.NoMenuBar) null else m.peer)
+ peer.setJMenuBar(if (m == MenuBar.NoMenuBar) null else m.peer)
def resizable_=(b: Boolean) { peer.setResizable(b) }
def resizable = peer.isResizable
@@ -74,23 +72,15 @@ sealed trait RichWindow extends Window {
*
* @see javax.swing.JFrame
*/
-class Frame extends RichWindow {
- override lazy val peer: JFrame with InterfaceMixin = new JFrame with InterfaceMixin with SuperMixin
-
- protected trait SuperMixin extends JFrame {
- override protected def processWindowEvent(e: java.awt.event.WindowEvent) {
- super.processWindowEvent(e)
- if (e.getID() == java.awt.event.WindowEvent.WINDOW_CLOSING)
- closeOperation()
- }
- }
+class Frame(gc: java.awt.GraphicsConfiguration = null) extends RichWindow {
+ override lazy val peer: JFrame with InterfaceMixin = new JFrame(gc) with InterfaceMixin with SuperMixin
def iconify() { peer.setExtendedState(peer.getExtendedState | AWTFrame.ICONIFIED) }
def uniconify() { peer.setExtendedState(peer.getExtendedState & ~AWTFrame.ICONIFIED) }
- def iconified() { (peer.getExtendedState & AWTFrame.ICONIFIED) != 0 }
+ def iconified: Boolean = (peer.getExtendedState & AWTFrame.ICONIFIED) != 0
def maximize() { peer.setExtendedState(peer.getExtendedState | AWTFrame.MAXIMIZED_BOTH) }
def unmaximize() { peer.setExtendedState(peer.getExtendedState & ~AWTFrame.MAXIMIZED_BOTH) }
- def maximized() { (peer.getExtendedState & AWTFrame.MAXIMIZED_BOTH) != 0 }
+ def maximized: Boolean = (peer.getExtendedState & AWTFrame.MAXIMIZED_BOTH) != 0
def iconImage: Image = peer.getIconImage
def iconImage_=(i: Image) { peer.setIconImage(i) }
@@ -142,8 +132,8 @@ object Dialog {
optionType: Options.Value = Options.YesNo,
messageType: Message.Value = Message.Question,
icon: Icon = EmptyIcon): Result.Value =
- Result(JOptionPane.showConfirmDialog(nullPeer(parent), message, title,
- optionType.id, messageType.id, Swing.wrapIcon(icon)))
+ Result(JOptionPane.showConfirmDialog(nullPeer(parent), message, title,
+ optionType.id, messageType.id, Swing.wrapIcon(icon)))
def showOptions(parent: Component = null,
message: Any,
@@ -154,8 +144,8 @@ object Dialog {
entries: Seq[Any],
initial: Int): Result.Value = {
val r = JOptionPane.showOptionDialog(nullPeer(parent), message, title,
- optionType.id, messageType.id, Swing.wrapIcon(icon),
- entries map toAnyRef toArray, entries(initial))
+ optionType.id, messageType.id, Swing.wrapIcon(icon),
+ (entries map toAnyRef).toArray, entries(initial))
Result(r)
}
@@ -167,10 +157,10 @@ object Dialog {
entries: Seq[A] = Nil,
initial: A): Option[A] = {
val e = if (entries.isEmpty) null
- else entries map toAnyRef toArray
+ else (entries map toAnyRef).toArray
val r = JOptionPane.showInputDialog(nullPeer(parent), message, title,
- messageType.id, Swing.wrapIcon(icon),
- e, initial)
+ messageType.id, Swing.wrapIcon(icon),
+ e, initial)
toOption[A](r)
}
@@ -179,8 +169,8 @@ object Dialog {
title: String = uiString("OptionPane.messageDialogTitle"),
messageType: Message.Value = Message.Info,
icon: Icon = EmptyIcon) {
- JOptionPane.showMessageDialog(nullPeer(parent), message, title,
- messageType.id, Swing.wrapIcon(icon))
+ JOptionPane.showMessageDialog(nullPeer(parent), message, title,
+ messageType.id, Swing.wrapIcon(icon))
}
}
@@ -189,12 +179,12 @@ object Dialog {
*
* @see javax.swing.JDialog
*/
-class Dialog(owner: Window) extends RichWindow {
+class Dialog(owner: Window, gc: java.awt.GraphicsConfiguration = null) extends RichWindow {
override lazy val peer: JDialog with InterfaceMixin =
- if (owner == null) new JDialog with InterfaceMixin
+ if (owner == null) new JDialog with InterfaceMixin with SuperMixin
else owner match {
- case f: Frame => new JDialog(f.peer) with InterfaceMixin
- case d: Dialog => new JDialog(d.peer) with InterfaceMixin
+ case f: Frame => new JDialog(f.peer, "", false, gc) with InterfaceMixin with SuperMixin
+ case d: Dialog => new JDialog(d.peer, "", false, gc) with InterfaceMixin with SuperMixin
}
def this() = this(null)
diff --git a/src/swing/scala/swing/RootPanel.scala b/src/swing/scala/swing/RootPanel.scala
index 413f514..7e4882d 100644
--- a/src/swing/scala/swing/RootPanel.scala
+++ b/src/swing/scala/swing/RootPanel.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2007-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2007-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/swing/scala/swing/ScrollBar.scala b/src/swing/scala/swing/ScrollBar.scala
index 81ce16b..6a1acdc 100644
--- a/src/swing/scala/swing/ScrollBar.scala
+++ b/src/swing/scala/swing/ScrollBar.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2007-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2007-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/swing/scala/swing/ScrollPane.scala b/src/swing/scala/swing/ScrollPane.scala
index 5c18bb1..afd6cf2 100644
--- a/src/swing/scala/swing/ScrollPane.scala
+++ b/src/swing/scala/swing/ScrollPane.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2007-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2007-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -65,17 +65,17 @@ class ScrollPane extends Component with Container {
def rowHeaderView: Option[Component] =
Option(peer.getRowHeader.getView) map UIElement.cachedWrapper[Component]
def rowHeaderView_=(c: Component) = peer.setRowHeaderView(c.peer)
- def rowHeaderView_=(c: Option[Component]) = peer.setRowHeaderView(c map (_.peer) orNull)
+ def rowHeaderView_=(c: Option[Component]) = peer.setRowHeaderView(c.map(_.peer).orNull)
def columnHeaderView: Option[Component] =
Option(peer.getColumnHeader.getView) map UIElement.cachedWrapper[Component]
def columnHeaderView_=(c: Component) = peer.setColumnHeaderView(c.peer)
- def columnHeaderView_=(c: Option[Component]) = peer.setColumnHeaderView(c map (_.peer) orNull)
+ def columnHeaderView_=(c: Option[Component]) = peer.setColumnHeaderView(c.map(_.peer).orNull)
def viewportView: Option[Component] =
Option(peer.getViewport.getView) map UIElement.cachedWrapper[Component]
def viewportView_=(c: Component) = peer.setViewportView(c.peer)
- def viewportView_=(c: Option[Component]) = peer.setViewportView(c map (_.peer) orNull)
+ def viewportView_=(c: Option[Component]) = peer.setViewportView(c.map(_.peer).orNull)
def verticalScrollBarPolicy = BarPolicy.wrap(peer.getVerticalScrollBarPolicy)
def verticalScrollBarPolicy_=(p: BarPolicy.Value) = peer.setVerticalScrollBarPolicy(p.verticalPeer)
diff --git a/src/swing/scala/swing/Scrollable.scala b/src/swing/scala/swing/Scrollable.scala
index 555ba30..1253ac8 100644
--- a/src/swing/scala/swing/Scrollable.scala
+++ b/src/swing/scala/swing/Scrollable.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2007-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2007-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/swing/scala/swing/Separator.scala b/src/swing/scala/swing/Separator.scala
index 3c61ac5..32d209d 100644
--- a/src/swing/scala/swing/Separator.scala
+++ b/src/swing/scala/swing/Separator.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2007-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2007-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/swing/scala/swing/SequentialContainer.scala b/src/swing/scala/swing/SequentialContainer.scala
index 969aef0..5f32b08 100644
--- a/src/swing/scala/swing/SequentialContainer.scala
+++ b/src/swing/scala/swing/SequentialContainer.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2007-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2007-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/swing/scala/swing/SimpleGUIApplication.scala b/src/swing/scala/swing/SimpleGUIApplication.scala
deleted file mode 100644
index 74d5d5e..0000000
--- a/src/swing/scala/swing/SimpleGUIApplication.scala
+++ /dev/null
@@ -1,47 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2007-2011, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-
-package scala.swing
-
-import javax.swing._
-
-/**
- * Extend this class for most simple UI applications. Clients need to implement the
- * <code>top</code> method. Framework initialization is done by this class.
- *
- * In order to conform to Swing's threading policy, never implement top or any additional
- * member that created Swing components as a value unless component creation happens on
- * the EDT (see Swing.onEDT and Swing.onEDTWait). Lazy values are okay for the same reason
- * if they are initialized on the EDT always.
- */
- at deprecated("Use SimpleSwingApplication instead", "2.8.0") abstract class SimpleGUIApplication extends GUIApplication {
-
- /**
- * A GUI application's version of the main method. Called by the default
- * main method implementation provided by this class.
- * Implement to return the top-level frame of this application.
- */
- def top: Frame
-
- /**
- * Calls top, packs the frame, and displays it.
- */
- def main(args: Array[String]) = run {
- val t = top
- t.pack()
- t.visible = true
- }
-
- def resourceFromClassloader(path: String): java.net.URL =
- this.getClass.getResource(path)
-
- def resourceFromUserDirectory(path: String): java.io.File =
- new java.io.File(util.Properties.userDir, path)
-}
diff --git a/src/swing/scala/swing/SimpleSwingApplication.scala b/src/swing/scala/swing/SimpleSwingApplication.scala
index 786c7b4..cd0f2be 100644
--- a/src/swing/scala/swing/SimpleSwingApplication.scala
+++ b/src/swing/scala/swing/SimpleSwingApplication.scala
@@ -1,8 +1,27 @@
package scala.swing
+/**
+ * Extend this class for most simple UI applications. Clients need to
+ * implement the `top` method. Framework initialization is done by this class.
+ *
+ * In order to conform to Swing's threading policy, never implement top or any
+ * additional member that created Swing components as a value unless component
+ * creation happens on the EDT (see `Swing.onEDT` and `Swing.onEDTWait`).
+ * Lazy values are okay for the same reason if they are initialized on the EDT
+ * always.
+ */
abstract class SimpleSwingApplication extends SwingApplication {
+
+ /**
+ * A GUI application's version of the main method. Called by the default
+ * main method implementation provided by this class.
+ * Implement to return the top-level frame of this application.
+ */
def top: Frame
+ /**
+ * Calls `top`, packs the frame, and displays it.
+ */
override def startup(args: Array[String]) {
val t = top
if (t.size == new Dimension(0,0)) t.pack()
diff --git a/src/swing/scala/swing/Slider.scala b/src/swing/scala/swing/Slider.scala
index 4fa9fcf..e329c31 100644
--- a/src/swing/scala/swing/Slider.scala
+++ b/src/swing/scala/swing/Slider.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2007-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2007-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/swing/scala/swing/SplitPane.scala b/src/swing/scala/swing/SplitPane.scala
index 9d1b228..dd4f290 100644
--- a/src/swing/scala/swing/SplitPane.scala
+++ b/src/swing/scala/swing/SplitPane.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2007-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2007-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -28,16 +28,16 @@ class SplitPane(o: Orientation.Value, left: Component, right: Component) extends
def contents: Seq[Component] = List(leftComponent, rightComponent)
def contents_=(left: Component, right: Component) {
- peer.setLeftComponent(left.peer)
- peer.setRightComponent(right.peer)
+ peer.setLeftComponent(nullPeer(left))
+ peer.setRightComponent(nullPeer(right))
}
def topComponent: Component =
UIElement.cachedWrapper[Component](peer.getTopComponent.asInstanceOf[javax.swing.JComponent])
- def topComponent_=(c: Component) { peer.setTopComponent(c.peer) }
+ def topComponent_=(c: Component) { peer.setTopComponent(nullPeer(c)) }
def bottomComponent: Component =
UIElement.cachedWrapper[Component](peer.getBottomComponent.asInstanceOf[javax.swing.JComponent])
- def bottomComponent_=(c: Component) { peer.setBottomComponent(c.peer) }
+ def bottomComponent_=(c: Component) { peer.setBottomComponent(nullPeer(c)) }
def leftComponent: Component = topComponent
def leftComponent_=(c: Component) { topComponent = c }
diff --git a/src/swing/scala/swing/Swing.scala b/src/swing/scala/swing/Swing.scala
index 519ccba..cd5bbf2 100644
--- a/src/swing/scala/swing/Swing.scala
+++ b/src/swing/scala/swing/Swing.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2007-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2007-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -15,6 +15,7 @@ import javax.swing.event._
import javax.swing.border._
import javax.swing.{JComponent, Icon, BorderFactory, SwingUtilities}
+
/**
* Helpers for this package.
*/
diff --git a/src/swing/scala/swing/SwingActor.scala b/src/swing/scala/swing/SwingActor.scala
index bd400de..035e979 100644
--- a/src/swing/scala/swing/SwingActor.scala
+++ b/src/swing/scala/swing/SwingActor.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2007-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2007-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -10,26 +10,6 @@
package scala.swing
-import scala.actors._
-
// Dummy to keep ant from recompiling on every run.
+ at deprecated("Will be removed in 2.11.0", "2.10.1")
trait SwingActor { }
-
-/*object SwingActor {
- /**
- * Similar to Actor.actor, but creates an instance of a SwingActor.
- */
- def apply(body: => Unit): Actor =
- new SwingActor { def act() = body }.start()
-}
-
-/**
- * An actor that runs on the Swing event dispatching thread (EDT).
- */
-abstract class SwingActor extends Actor {
- override val scheduler = new SchedulerAdapter {
- def execute(op: =>Unit) = Swing onEDT op
- def onTerminate(a: Actor)(op: => Unit) {}
- def terminated(a: Actor) {}
- }
-}*/
diff --git a/src/swing/scala/swing/SwingApplication.scala b/src/swing/scala/swing/SwingApplication.scala
index 6da3ab2..214001f 100644
--- a/src/swing/scala/swing/SwingApplication.scala
+++ b/src/swing/scala/swing/SwingApplication.scala
@@ -1,9 +1,17 @@
package scala.swing
+/** Convenience class with utility methods for GUI applications. */
abstract class SwingApplication extends Reactor {
+
+ /** Initializes the application and runs the given program. */
def main(args: Array[String]) = Swing.onEDT { startup(args) }
+ /** Called before the GUI is created. Override to customize. */
def startup(args: Array[String])
+
+ /** Finalizes the application by calling `shutdown` and exits.*/
def quit() { shutdown(); sys.exit(0) }
+
+ /** Called before the application is exited. Override to customize. */
def shutdown() {}
}
diff --git a/src/swing/scala/swing/SwingWorker.scala b/src/swing/scala/swing/SwingWorker.scala
index 0e514e3..f4eeb58 100644
--- a/src/swing/scala/swing/SwingWorker.scala
+++ b/src/swing/scala/swing/SwingWorker.scala
@@ -2,10 +2,12 @@ package scala.swing
import scala.actors._
+ at deprecated("Will be removed in 2.11.0", "2.10.1")
object SwingWorker {
}
+ at deprecated("Depends on the deprecated package scala.actors. Will be removed in 2.11.0", "2.10.1")
abstract class SwingWorker extends Actor {
def queue() {
@@ -18,4 +20,4 @@ abstract class SwingWorker extends Actor {
private var _cancelled = false
def cancelled: Boolean = _cancelled
def cancelled_=(b: Boolean) { _cancelled = b }
-}
\ No newline at end of file
+}
diff --git a/src/swing/scala/swing/TabbedPane.scala b/src/swing/scala/swing/TabbedPane.scala
index 1c4a2b2..3380505 100644
--- a/src/swing/scala/swing/TabbedPane.scala
+++ b/src/swing/scala/swing/TabbedPane.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2007-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2007-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -110,6 +110,9 @@ class TabbedPane extends Component with Publisher {
/**
* Possible values are Left, Right, Top, Bottom.
*/
+ def tabPlacement_=(b: Alignment.Value) { peer.setTabPlacement(b.id) }
+
+ @deprecated("Use tabPlacement_=() instead.", "2.9.1")
def tabPlacement(b: Alignment.Value) { peer.setTabPlacement(b.id) }
/**
diff --git a/src/swing/scala/swing/Table.scala b/src/swing/scala/swing/Table.scala
index 0cbef30..45053f0 100644
--- a/src/swing/scala/swing/Table.scala
+++ b/src/swing/scala/swing/Table.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2007-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2007-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -276,6 +276,11 @@ class Table extends Component with Scrollable.Wrapper {
*/
def apply(row: Int, column: Int): Any = model.getValueAt(row, viewToModelColumn(column))
+ // TODO: this is Java 6 stuff
+ // def apply(row: Int, column: Int): Any = model.getValueAt(viewToModelRow(row), viewToModelColumn(column))
+ //def viewToModelRow(idx: Int) = peer.convertRowIndexToModel(idx)
+ //def modelToViewRow(idx: Int) = peer.convertRowIndexToView(idx)
+
def viewToModelColumn(idx: Int) = peer.convertColumnIndexToModel(idx)
def modelToViewColumn(idx: Int) = peer.convertColumnIndexToView(idx)
diff --git a/src/swing/scala/swing/TextArea.scala b/src/swing/scala/swing/TextArea.scala
index d1ae462..01bf115 100644
--- a/src/swing/scala/swing/TextArea.scala
+++ b/src/swing/scala/swing/TextArea.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2007-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2007-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/swing/scala/swing/TextComponent.scala b/src/swing/scala/swing/TextComponent.scala
index 765fdb1..48c03a5 100644
--- a/src/swing/scala/swing/TextComponent.scala
+++ b/src/swing/scala/swing/TextComponent.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2007-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2007-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/swing/scala/swing/TextField.scala b/src/swing/scala/swing/TextField.scala
index 789a8f4..a28e8f8 100644
--- a/src/swing/scala/swing/TextField.scala
+++ b/src/swing/scala/swing/TextField.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2007-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2007-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/swing/scala/swing/ToggleButton.scala b/src/swing/scala/swing/ToggleButton.scala
index 5833b11..3d3d0b9 100644
--- a/src/swing/scala/swing/ToggleButton.scala
+++ b/src/swing/scala/swing/ToggleButton.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2007-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2007-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/swing/scala/swing/UIElement.scala b/src/swing/scala/swing/UIElement.scala
index 9c5120a..16b8738 100644
--- a/src/swing/scala/swing/UIElement.scala
+++ b/src/swing/scala/swing/UIElement.scala
@@ -1,18 +1,15 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2007-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2007-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-
-
package scala.swing
import java.awt.Cursor
import event._
-import scala.collection.mutable.HashMap
import scala.ref._
import java.util.WeakHashMap
@@ -40,15 +37,15 @@ object UIElement {
case c: javax.swing.JComponent => c.getClientProperty(ClientKey)
case _ => wrapperCache.get(c)
}
- try { w.asInstanceOf[C] } catch { case _ => null }
+ try { w.asInstanceOf[C] } catch { case _: Exception => null }
}
/**
* Returns a wrapper for a given Java Swing peer. If there is a
* compatible wrapper in use, this method will return it.
*
- * `wrap` methods in companion objects of subclasses of UIElement have the
- * same behavior, except that they return more specific wrappers.
+ * `wrap` methods in companion objects of subclasses of `UIElement` have
+ * the same behavior, except that they return more specific wrappers.
*/
def wrap(c: java.awt.Component): UIElement = {
val w = cachedWrapper[UIElement](c)
@@ -60,7 +57,7 @@ object UIElement {
/**
* The base trait of all user interface elements. Subclasses belong to one
* of two groups: top-level elements such as windows and dialogs, or
- * <code>Component</code>s.
+ * `Component`s.
*
* @note [Java Swing] This trait does not have an exact counterpart in
* Java Swing. The peer is of type java.awt.Component since this is the
@@ -81,27 +78,24 @@ trait UIElement extends Proxy with LazyPublisher {
UIElement.cache(this)
def foreground: Color = peer.getForeground
- def foreground_=(c: Color) = peer.setForeground(c)
+ def foreground_=(c: Color) = peer setForeground c
def background: Color = peer.getBackground
- def background_=(c: Color) = peer.setBackground(c)
+ def background_=(c: Color) = peer setBackground c
def minimumSize = peer.getMinimumSize
- def minimumSize_=(x: Dimension) = peer.setMinimumSize(x)
+ def minimumSize_=(x: Dimension) = peer setMinimumSize x
def maximumSize = peer.getMaximumSize
- def maximumSize_=(x: Dimension) = peer.setMaximumSize(x)
+ def maximumSize_=(x: Dimension) = peer setMaximumSize x
def preferredSize = peer.getPreferredSize
- def preferredSize_=(x: Dimension) = peer.setPreferredSize(x)
+ def preferredSize_=(x: Dimension) = peer setPreferredSize x
def font: Font = peer.getFont
- def font_=(f: Font) = peer.setFont(f)
+ def font_=(f: Font) = peer setFont f
def locationOnScreen = peer.getLocationOnScreen
def location = peer.getLocation
def bounds = peer.getBounds
def size = peer.getSize
- @deprecated("Explicit size assignment for UIElements is not supported anymore. " +
- "Use a layout manager or subclass Window.", "2.8.0")
- def size_=(dim: Dimension) = peer.setSize(dim)
def locale = peer.getLocale
def toolkit = peer.getToolkit
diff --git a/src/swing/scala/swing/Window.scala b/src/swing/scala/swing/Window.scala
index 71982a9..5bdb50e 100644
--- a/src/swing/scala/swing/Window.scala
+++ b/src/swing/scala/swing/Window.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2007-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2007-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -27,6 +27,14 @@ abstract class Window extends UIElement with RootPanel with Publisher { outer =>
protected trait InterfaceMixin extends javax.swing.RootPaneContainer
+ protected trait SuperMixin extends AWTWindow {
+ override protected def processWindowEvent(e: java.awt.event.WindowEvent) {
+ super.processWindowEvent(e)
+ if (e.getID() == java.awt.event.WindowEvent.WINDOW_CLOSING)
+ closeOperation()
+ }
+ }
+
/**
* This method is called when the window is closing, after all other window
* event listeners have been processed.
@@ -43,7 +51,7 @@ abstract class Window extends UIElement with RootPanel with Publisher { outer =>
peer.getRootPane.setDefaultButton(b.peer)
}
def defaultButton_=(b: Option[Button]) {
- peer.getRootPane.setDefaultButton(b map (_.peer) orNull)
+ peer.getRootPane.setDefaultButton(b.map(_.peer).orNull)
}
def dispose() { peer.dispose() }
@@ -53,7 +61,7 @@ abstract class Window extends UIElement with RootPanel with Publisher { outer =>
def setLocationRelativeTo(c: UIElement) { peer.setLocationRelativeTo(c.peer) }
def centerOnScreen() { peer.setLocationRelativeTo(null) }
def location_=(p: Point) { peer.setLocation(p) }
- override def size_=(size: Dimension) { peer.setSize(size) }
+ def size_=(size: Dimension) { peer.setSize(size) }
def bounds_=(rect: Rectangle) { peer.setBounds(rect) }
def owner: Window = UIElement.cachedWrapper[Window](peer.getOwner)
diff --git a/src/swing/scala/swing/event/ActionEvent.scala b/src/swing/scala/swing/event/ActionEvent.scala
index 6b291be..7b2de43 100644
--- a/src/swing/scala/swing/event/ActionEvent.scala
+++ b/src/swing/scala/swing/event/ActionEvent.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2007-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2007-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/swing/scala/swing/event/AdjustingEvent.scala b/src/swing/scala/swing/event/AdjustingEvent.scala
index b9469bc..a4b7d29 100644
--- a/src/swing/scala/swing/event/AdjustingEvent.scala
+++ b/src/swing/scala/swing/event/AdjustingEvent.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2007-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2007-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -11,13 +11,12 @@
package scala.swing
package event
-/** <p>
- * An event that indicates some editing operation that can be still in
- * progress.<br/>
- * Example: dragging a slider creates a number of <code>AdjustmentEvents</code>
- * with <code>adjusting == true</code> until the user finally releases the
- * mouse button.
- * </p>
+/** An event that indicates some editing operation that can be still
+ * in progress.
+ *
+ * Example: dragging a slider creates a number of `AdjustmentEvents`
+ * with `adjusting == '''true'''` until the user finally releases the
+ * mouse button.
*/
trait AdjustingEvent extends ComponentEvent {
def adjusting: Boolean
diff --git a/src/swing/scala/swing/event/BackgroundChanged.scala b/src/swing/scala/swing/event/BackgroundChanged.scala
index 7ed5b96..bdd67f9 100644
--- a/src/swing/scala/swing/event/BackgroundChanged.scala
+++ b/src/swing/scala/swing/event/BackgroundChanged.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2007-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2007-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/swing/scala/swing/event/ButtonClicked.scala b/src/swing/scala/swing/event/ButtonClicked.scala
index 9585f37..d022019 100644
--- a/src/swing/scala/swing/event/ButtonClicked.scala
+++ b/src/swing/scala/swing/event/ButtonClicked.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2007-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2007-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/swing/scala/swing/event/CaretUpdate.scala b/src/swing/scala/swing/event/CaretUpdate.scala
index 6d186a4..2821175 100644
--- a/src/swing/scala/swing/event/CaretUpdate.scala
+++ b/src/swing/scala/swing/event/CaretUpdate.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2007-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2007-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/swing/scala/swing/event/ComponentEvent.scala b/src/swing/scala/swing/event/ComponentEvent.scala
index d8cff46..701b962 100644
--- a/src/swing/scala/swing/event/ComponentEvent.scala
+++ b/src/swing/scala/swing/event/ComponentEvent.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2007-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2007-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -14,12 +14,3 @@ package event
trait ComponentEvent extends UIEvent {
val source: Component
}
-
- at deprecated("Use UIElementMoved instead.", "2.8.0")
-case class ComponentMoved(source: Component) extends ComponentEvent
- at deprecated("Use UIElementResized instead.", "2.8.0")
-case class ComponentResized(source: Component) extends ComponentEvent
- at deprecated("Use UIElementShown instead.", "2.8.0")
-case class ComponentShown(source: Component) extends ComponentEvent
- at deprecated("Use UIElementHidden instead.", "2.8.0")
-case class ComponentHidden(source: Component) extends ComponentEvent
diff --git a/src/swing/scala/swing/event/ContainerEvent.scala b/src/swing/scala/swing/event/ContainerEvent.scala
index 3671e21..46f3768 100644
--- a/src/swing/scala/swing/event/ContainerEvent.scala
+++ b/src/swing/scala/swing/event/ContainerEvent.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2007-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2007-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/swing/scala/swing/event/EditDone.scala b/src/swing/scala/swing/event/EditDone.scala
index 1017465..9d38234 100644
--- a/src/swing/scala/swing/event/EditDone.scala
+++ b/src/swing/scala/swing/event/EditDone.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2007-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2007-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/swing/scala/swing/event/Event.scala b/src/swing/scala/swing/event/Event.scala
index d6f096b..fd11356 100644
--- a/src/swing/scala/swing/event/Event.scala
+++ b/src/swing/scala/swing/event/Event.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2007-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2007-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/swing/scala/swing/event/FocusEvent.scala b/src/swing/scala/swing/event/FocusEvent.scala
index 3d1c0f4..5c29d8f 100644
--- a/src/swing/scala/swing/event/FocusEvent.scala
+++ b/src/swing/scala/swing/event/FocusEvent.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2007-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2007-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/swing/scala/swing/event/FontChanged.scala b/src/swing/scala/swing/event/FontChanged.scala
index e3f46e4..ca936e1 100644
--- a/src/swing/scala/swing/event/FontChanged.scala
+++ b/src/swing/scala/swing/event/FontChanged.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2007-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2007-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/swing/scala/swing/event/ForegroundChanged.scala b/src/swing/scala/swing/event/ForegroundChanged.scala
index 9b45834..42b45aa 100644
--- a/src/swing/scala/swing/event/ForegroundChanged.scala
+++ b/src/swing/scala/swing/event/ForegroundChanged.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2007-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2007-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/swing/scala/swing/event/InputEvent.scala b/src/swing/scala/swing/event/InputEvent.scala
index 2df0bbc..b515b01 100644
--- a/src/swing/scala/swing/event/InputEvent.scala
+++ b/src/swing/scala/swing/event/InputEvent.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2007-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2007-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/swing/scala/swing/event/Key.scala b/src/swing/scala/swing/event/Key.scala
index e26acf7..5e9e0cb 100644
--- a/src/swing/scala/swing/event/Key.scala
+++ b/src/swing/scala/swing/event/Key.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2007-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2007-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/swing/scala/swing/event/KeyEvent.scala b/src/swing/scala/swing/event/KeyEvent.scala
index 6251d2d..1345c77 100644
--- a/src/swing/scala/swing/event/KeyEvent.scala
+++ b/src/swing/scala/swing/event/KeyEvent.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2007-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2007-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/swing/scala/swing/event/ListEvent.scala b/src/swing/scala/swing/event/ListEvent.scala
index 9f8fcf7..bdb769d 100644
--- a/src/swing/scala/swing/event/ListEvent.scala
+++ b/src/swing/scala/swing/event/ListEvent.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2007-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2007-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -28,7 +28,7 @@ object ListChanged {
class ListChanged[A](override val source: ListView[A]) extends ListChange(source)
object ListElementsAdded {
- def unapply[A](e: ListElementsAdded[A]) = Some(e.source, e.range)
+ def unapply[A](e: ListElementsAdded[A]) = Some((e.source, e.range))
def apply[A](source: ListView[A], range: Range) = new ListElementsAdded(source, range)
}
@@ -36,7 +36,7 @@ class ListElementsAdded[A](override val source: ListView[A], val range: Range)
extends ListChange(source)
object ListElementsRemoved {
- def unapply[A](e: ListElementsRemoved[A]) = Some(e.source, e.range)
+ def unapply[A](e: ListElementsRemoved[A]) = Some((e.source, e.range))
def apply[A](source: ListView[A], range: Range) = new ListElementsRemoved(source, range)
}
class ListElementsRemoved[A](override val source: ListView[A], val range: Range)
diff --git a/src/swing/scala/swing/event/MouseEvent.scala b/src/swing/scala/swing/event/MouseEvent.scala
index bc139c4..8629d71 100644
--- a/src/swing/scala/swing/event/MouseEvent.scala
+++ b/src/swing/scala/swing/event/MouseEvent.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2007-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2007-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/swing/scala/swing/event/SelectionEvent.scala b/src/swing/scala/swing/event/SelectionEvent.scala
index 38eb2ea..39d6a13 100644
--- a/src/swing/scala/swing/event/SelectionEvent.scala
+++ b/src/swing/scala/swing/event/SelectionEvent.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2007-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2007-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/swing/scala/swing/event/TableEvent.scala b/src/swing/scala/swing/event/TableEvent.scala
index 9900566..c420ea2 100644
--- a/src/swing/scala/swing/event/TableEvent.scala
+++ b/src/swing/scala/swing/event/TableEvent.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2007-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2007-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/swing/scala/swing/event/UIEvent.scala b/src/swing/scala/swing/event/UIEvent.scala
index 0960b10..a4644b0 100644
--- a/src/swing/scala/swing/event/UIEvent.scala
+++ b/src/swing/scala/swing/event/UIEvent.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2007-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2007-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/swing/scala/swing/event/ValueChanged.scala b/src/swing/scala/swing/event/ValueChanged.scala
index 4ce1ffa..ef08085 100644
--- a/src/swing/scala/swing/event/ValueChanged.scala
+++ b/src/swing/scala/swing/event/ValueChanged.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2007-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2007-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/swing/scala/swing/event/WindowActivated.scala b/src/swing/scala/swing/event/WindowActivated.scala
index 755c76f..1473242 100644
--- a/src/swing/scala/swing/event/WindowActivated.scala
+++ b/src/swing/scala/swing/event/WindowActivated.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2007-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2007-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/swing/scala/swing/event/WindowClosing.scala b/src/swing/scala/swing/event/WindowClosing.scala
index 47ba4fc..3c64aeb 100644
--- a/src/swing/scala/swing/event/WindowClosing.scala
+++ b/src/swing/scala/swing/event/WindowClosing.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2007-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2007-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/swing/scala/swing/event/WindowDeactivated.scala b/src/swing/scala/swing/event/WindowDeactivated.scala
index 96ccb2a..f0eec57 100644
--- a/src/swing/scala/swing/event/WindowDeactivated.scala
+++ b/src/swing/scala/swing/event/WindowDeactivated.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2007-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2007-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/swing/scala/swing/event/WindowDeiconified.scala b/src/swing/scala/swing/event/WindowDeiconified.scala
index 659b5dc..6e07f85 100644
--- a/src/swing/scala/swing/event/WindowDeiconified.scala
+++ b/src/swing/scala/swing/event/WindowDeiconified.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2007-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2007-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/swing/scala/swing/event/WindowEvent.scala b/src/swing/scala/swing/event/WindowEvent.scala
index 6653094..b8ca329 100644
--- a/src/swing/scala/swing/event/WindowEvent.scala
+++ b/src/swing/scala/swing/event/WindowEvent.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2007-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2007-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/swing/scala/swing/event/WindowIconified.scala b/src/swing/scala/swing/event/WindowIconified.scala
index 64c8c61..3b5139f 100644
--- a/src/swing/scala/swing/event/WindowIconified.scala
+++ b/src/swing/scala/swing/event/WindowIconified.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2007-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2007-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/swing/scala/swing/event/WindowOpened.scala b/src/swing/scala/swing/event/WindowOpened.scala
index 7b812f5..f5854ed 100644
--- a/src/swing/scala/swing/event/WindowOpened.scala
+++ b/src/swing/scala/swing/event/WindowOpened.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2007-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2007-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/swing/scala/swing/model/Matrix.scala b/src/swing/scala/swing/model/Matrix.scala
index dd116c4..664d44a 100644
--- a/src/swing/scala/swing/model/Matrix.scala
+++ b/src/swing/scala/swing/model/Matrix.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2007-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2007-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/swing/scala/swing/package.scala b/src/swing/scala/swing/package.scala
index d5095f0..4549766 100644
--- a/src/swing/scala/swing/package.scala
+++ b/src/swing/scala/swing/package.scala
@@ -14,6 +14,9 @@ package object swing {
type Image = java.awt.Image
type Font = java.awt.Font
+ implicit lazy val reflectiveCalls = scala.language.reflectiveCalls
+ implicit lazy val implicitConversions = scala.language.implicitConversions
+
private[swing] def ifNull[A](o: Object, a: A): A = if(o eq null) a else o.asInstanceOf[A]
private[swing] def toOption[A](o: Object): Option[A] = if(o eq null) None else Some(o.asInstanceOf[A])
private[swing] def toAnyRef(x: Any): AnyRef = x.asInstanceOf[AnyRef]
diff --git a/src/swing/scala/swing/test/ButtonApp.scala b/src/swing/scala/swing/test/ButtonApp.scala
deleted file mode 100644
index dcf567d..0000000
--- a/src/swing/scala/swing/test/ButtonApp.scala
+++ /dev/null
@@ -1,25 +0,0 @@
-package scala.swing
-package test
-
-import java.awt.Dimension
-
-import swing._
-import swing.event._
-
-object ButtonApp extends SimpleSwingApplication {
- def top = new MainFrame {
- title = "My Frame"
- contents = new GridPanel(2, 2) {
- hGap = 3
- vGap = 3
- contents += new Button {
- text = "Press Me!"
- reactions += {
- case ButtonClicked(_) => text = "Hello Scala"
- }
- }
- }
- size = new Dimension(300, 80)
- }
-}
-
diff --git a/src/swing/scala/swing/test/CelsiusConverter.scala b/src/swing/scala/swing/test/CelsiusConverter.scala
deleted file mode 100644
index 4ead632..0000000
--- a/src/swing/scala/swing/test/CelsiusConverter.scala
+++ /dev/null
@@ -1,43 +0,0 @@
-package scala.swing
-package test
-
-import swing._
-import event._
-
-/** A GUI app to convert celsius to centigrade
- */
-object CelsiusConverter extends SimpleSwingApplication {
- def top = new MainFrame {
- title = "Convert Celsius to Fahrenheit"
- val tempCelsius = new TextField
- val celsiusLabel = new Label {
- text = "Celsius"
- border = Swing.EmptyBorder(5, 5, 5, 5)
- }
- val convertButton = new Button {
- text = "Convert"//new javax.swing.ImageIcon("c:\\workspace\\gui\\images\\convert.gif")
- //border = Border.Empty(5, 5, 5, 5)
- }
- val fahrenheitLabel = new Label {
- text = "Fahrenheit "
- border = Swing.EmptyBorder(5, 5, 5, 5)
- listenTo(convertButton, tempCelsius)
-
- def convert() {
- val c = Integer.parseInt(tempCelsius.text)
- val f = c * 9 / 5 + 32
- text = "<html><font color = red>"+f+"</font> Fahrenheit</html>"
- }
-
- reactions += {
- case ButtonClicked(_) | EditDone(_) => convert()
- }
- }
- contents = new GridPanel(2,2) {
- contents.append(tempCelsius, celsiusLabel, convertButton, fahrenheitLabel)
- border = Swing.EmptyBorder(10, 10, 10, 10)
- }
- //defaultButton = Some(convertButton)
- }
-}
-
diff --git a/src/swing/scala/swing/test/CelsiusConverter2.scala b/src/swing/scala/swing/test/CelsiusConverter2.scala
deleted file mode 100644
index 5ce1b15..0000000
--- a/src/swing/scala/swing/test/CelsiusConverter2.scala
+++ /dev/null
@@ -1,37 +0,0 @@
-package scala.swing
-package test
-
-import swing._
-import event._
-
-object CelsiusConverter2 extends SimpleSwingApplication {
- def newField = new TextField {
- text = "0"
- columns = 5
- horizontalAlignment = Alignment.Right
- }
- val celsius = newField
- val fahrenheit = newField
-
- listenTo(fahrenheit, celsius)
- reactions += {
- case EditDone(`fahrenheit`) =>
- val f = Integer.parseInt(fahrenheit.text)
- val c = (f - 32) * 5 / 9
- celsius.text = c.toString
- case EditDone(`celsius`) =>
- val c = Integer.parseInt(celsius.text)
- val f = c * 9 / 5 + 32
- fahrenheit.text = f.toString
- }
-
- lazy val ui = new FlowPanel(celsius, new Label(" Celsius = "),
- fahrenheit, new Label(" Fahrenheit")) {
- border = Swing.EmptyBorder(15, 10, 10, 10)
- }
- def top = new MainFrame {
- title = "Convert Celsius / Fahrenheit"
- contents = ui
- }
-}
-
diff --git a/src/swing/scala/swing/test/ComboBoxes.scala b/src/swing/scala/swing/test/ComboBoxes.scala
deleted file mode 100644
index cf1a70d..0000000
--- a/src/swing/scala/swing/test/ComboBoxes.scala
+++ /dev/null
@@ -1,87 +0,0 @@
-package scala.swing
-package test
-
-import swing._
-import event._
-import java.util.Date
-import java.awt.Color
-import java.text.SimpleDateFormat
-import javax.swing.{Icon, ImageIcon}
-
-/**
- * Demonstrates how to use combo boxes and custom item renderers.
- *
- * TODO: clean up layout
- */
-object ComboBoxes extends SimpleSwingApplication {
- import ComboBox._
- lazy val ui = new FlowPanel {
- contents += new ComboBox(List(1,2,3,4))
-
- val patterns = List("dd MMMMM yyyy",
- "dd.MM.yy",
- "MM/dd/yy",
- "yyyy.MM.dd G 'at' hh:mm:ss z",
- "EEE, MMM d, ''yy",
- "h:mm a",
- "H:mm:ss:SSS",
- "K:mm a,z",
- "yyyy.MMMMM.dd GGG hh:mm aaa")
- val dateBox = new ComboBox(patterns) { makeEditable() }
- contents += dateBox
- val field = new TextField(20) { editable = false }
- contents += field
-
- reactions += {
- case SelectionChanged(`dateBox`) => reformat()
- }
- listenTo(dateBox.selection)
-
- def reformat() {
- try {
- val today = new Date
- val formatter = new SimpleDateFormat(dateBox.selection.item)
- val dateString = formatter.format(today)
- field.foreground = Color.black
- field.text = dateString
- } catch {
- case e: IllegalArgumentException =>
- field.foreground = Color.red
- field.text = "Error: " + e.getMessage
- }
- }
-
-
- val icons = try {
- List(new ImageIcon(resourceFromClassloader("images/margarita1.jpg")),
- new ImageIcon(resourceFromClassloader("images/margarita2.jpg")),
- new ImageIcon(resourceFromClassloader("images/rose.jpg")),
- new ImageIcon(resourceFromClassloader("images/banana.jpg")))
- } catch {
- case _ =>
- println("Couldn't load images for combo box")
- List(Swing.EmptyIcon)
- }
-
- val iconBox = new ComboBox(icons) {
- renderer = new ListView.AbstractRenderer[Icon, Label](new Label) {
- def configure(list: ListView[_], isSelected: Boolean, focused: Boolean, icon: Icon, index: Int) {
- component.icon = icon
- component.xAlignment = Alignment.Center
- if(isSelected) {
- component.border = Swing.LineBorder(list.selectionBackground, 3)
- } else {
- component.border = Swing.EmptyBorder(3)
- }
- }
- }
- }
- contents += iconBox
- }
-
- def top = new MainFrame {
- title = "ComboBoxes Demo"
- contents = ui
- }
-}
-
diff --git a/src/swing/scala/swing/test/CountButton.scala b/src/swing/scala/swing/test/CountButton.scala
deleted file mode 100644
index 373db78..0000000
--- a/src/swing/scala/swing/test/CountButton.scala
+++ /dev/null
@@ -1,31 +0,0 @@
-package scala.swing
-package test
-
-import scala.swing._
-import scala.swing.event._
-
-object CountButton extends SimpleSwingApplication {
- def top = new MainFrame {
- title = "My Frame"
- contents = new GridPanel(2, 2) {
- hGap = 3
- vGap = 3
- val button = new Button {
- text = "Press Me!"
- }
- contents += button
- val label = new Label {
- text = "No button clicks registered"
- }
- contents += label
-
- listenTo(button)
- var nclicks = 0
- reactions += {
- case ButtonClicked(b) =>
- nclicks += 1
- label.text = "Number of button clicks: "+nclicks
- }
- }
- }
-}
diff --git a/src/swing/scala/swing/test/Dialogs.scala b/src/swing/scala/swing/test/Dialogs.scala
deleted file mode 100644
index 14fa2fe..0000000
--- a/src/swing/scala/swing/test/Dialogs.scala
+++ /dev/null
@@ -1,177 +0,0 @@
-package scala.swing
-package test
-
-import swing._
-import swing.event._
-
-object Dialogs extends SimpleSwingApplication {
- import TabbedPane._
-
- lazy val label = new Label("No Result yet")
- lazy val tabs = new TabbedPane {
- pages += new Page("File", new GridBagPanel { grid =>
- import GridBagPanel._
- val buttonText = new TextField("Click Me")
-
- val c = new Constraints
- c.fill = Fill.Horizontal
- c.grid = (1,1)
-
- val chooser = new FileChooser
- layout(new Button(Action("Open") {
- chooser.showOpenDialog(grid)
- })) = c
-
- c.grid = (1,2)
- layout(new Button(Action("Save") {
- chooser.showSaveDialog(grid)
- })) = c
-
- c.grid = (1,3)
- layout(new Button(Action("Custom") {
- chooser.showDialog(grid, buttonText.text)
- })) = c
-
- c.grid = (2,3)
- layout(new Label(" with Text ")) = c
-
- c.grid = (3,3)
- c.ipadx = 50
- layout(buttonText) = c
-
- border = Swing.EmptyBorder(5, 5, 5, 5)
- })
- pages += new Page("Simple Modal Dialogs", new BorderPanel {
- import BorderPanel._
- val mutex = new ButtonGroup
- val ok = new RadioButton("OK (in the L&F's words)")
- val ynlf = new RadioButton("Yes/No (in the L&F's words)")
- val ynp = new RadioButton("Yes/No (in the programmer's words)")
- val yncp = new RadioButton("Yes/No/Cancel (in the programmer's words)")
- val radios = List(ok, ynlf, ynp, yncp)
- mutex.buttons ++= radios
- mutex.select(ok)
- val buttons = new BoxPanel(Orientation.Vertical) {
- contents ++= radios
- }
- layout(buttons) = Position.North
- layout(new Button(Action("Show It!") {
- import Dialog._
- mutex.selected.get match {
- case `ok` =>
- showMessage(buttons, "Eggs aren't supposed to be green.")
- case `ynlf` =>
- label.text = showConfirmation(buttons,
- "Would you like green eggs and ham?",
- "An Inane Question") match {
- case Result.Yes => "Ewww!"
- case Result.No => "Me neither!"
- case _ => "Come on -- tell me!"
- }
- case `ynp` =>
- val options = List("Yes, please",
- "No, thanks",
- "No eggs, no ham!")
- label.text = showOptions(buttons,
- "Would you like some green eggs to go with that ham?",
- "A Silly Question",
- entries = options,
- initial = 2) match {
- case Result.Yes => "You're kidding!"
- case Result.No => "I don't like them, either."
- case _ => "Come on -- 'fess up!"
- }
- case `yncp` =>
- val options = List("Yes, please",
- "No, thanks",
- "No eggs, no ham!")
- label.text = showOptions(buttons,
- message = "Would you like some green eggs to go with that ham?",
- title = "A Silly Question",
- entries = options,
- initial = 2) match {
- case Result.Yes => "Here you go: green eggs and ham!"
- case Result.No => "OK, just the ham, then."
- case Result.Cancel => "Well, I'm certainly not going to eat them!"
- case _ => "Please tell me what you want!"
- }
- }
- })) = Position.South
- })
- pages += new Page("More Dialogs", new BorderPanel {
- import BorderPanel._
- val mutex = new ButtonGroup
- val pick = new RadioButton("Pick one of several choices")
- val enter = new RadioButton("Enter some text")
- val custom = new RadioButton("Custom")
- val customUndec = new RadioButton("Custom undecorated")
- val custom2 = new RadioButton("2 custom dialogs")
- val radios = List(pick, enter, custom, customUndec, custom2)
- mutex.buttons ++= radios
- mutex.select(pick)
- val buttons = new BoxPanel(Orientation.Vertical) {
- contents ++= radios
- }
- layout(buttons) = Position.North
- layout(new Button(Action("Show It!") {
- import Dialog._
- mutex.selected.get match {
- case `pick` =>
- val possibilities = List("ham", "spam", "yam")
- val s = showInput(buttons,
- "Complete the sentence:\n\"Green eggs and...\"",
- "Customized Dialog",
- Message.Plain,
- Swing.EmptyIcon,
- possibilities, "ham")
-
- //If a string was returned, say so.
- label.text = if ((s != None) && (s.get.length > 0))
- "Green eggs and... " + s.get + "!"
- else
- "Come on, finish the sentence!"
- case `enter` =>
- val s = showInput(buttons,
- "Complete the sentence:\n\"Green eggs and...\"",
- "Customized Dialog",
- Message.Plain,
- Swing.EmptyIcon,
- Nil, "ham")
-
- //If a string was returned, say so.
- label.text = if ((s != None) && (s.get.length > 0))
- "Green eggs and... " + s.get + "!"
- else
- "Come on, finish the sentence!"
- case `custom` =>
- val dialog = new Dialog(top)
- dialog.open()
- dialog.contents = Button("Close Me!") { dialog.close() }
- case `customUndec` =>
- val dialog = new Dialog with RichWindow.Undecorated
- dialog.open()
- dialog.contents = Button("Close Me!") { dialog.close() }
- case `custom2` =>
- val d1 = new Dialog
- val d2 = new Dialog(d1)
- d1.open()
- d2.open()
- d1.contents = Button("Close Me! I am the owner and will automatically close the other one") { d1.close() }
- d2.contents = Button("Close Me!") { d2.close() }
- }
- })) = Position.South
- })
- }
-
- lazy val ui: Panel = new BorderPanel {
- layout(tabs) = BorderPanel.Position.Center
- layout(label) = BorderPanel.Position.South
- }
-
-
- lazy val top = new MainFrame {
- title = "Dialog Demo"
- contents = ui
- }
-}
-
diff --git a/src/swing/scala/swing/test/GridBagDemo.scala b/src/swing/scala/swing/test/GridBagDemo.scala
deleted file mode 100644
index ebb538f..0000000
--- a/src/swing/scala/swing/test/GridBagDemo.scala
+++ /dev/null
@@ -1,65 +0,0 @@
-package scala.swing
-package test
-
-import swing._
-import swing.event._
-import GridBagPanel._
-import java.awt.Insets
-
-object GridBagDemo extends SimpleSwingApplication {
- lazy val ui = new GridBagPanel {
- val c = new Constraints
- val shouldFill = true
- if (shouldFill) {
- c.fill = Fill.Horizontal
- }
-
- val button1 = new Button("Button 1")
-
- c.weightx = 0.5
-
- c.fill = Fill.Horizontal
- c.gridx = 0;
- c.gridy = 0;
- layout(button1) = c
-
- val button2 = new Button("Button 2")
- c.fill = Fill.Horizontal
- c.weightx = 0.5;
- c.gridx = 1;
- c.gridy = 0;
- layout(button2) = c
-
- val button3 = new Button("Button 3")
- c.fill = Fill.Horizontal
- c.weightx = 0.5;
- c.gridx = 2;
- c.gridy = 0;
- layout(button3) = c
-
- val button4 = new Button("Long-Named Button 4")
- c.fill = Fill.Horizontal
- c.ipady = 40; //make this component tall
- c.weightx = 0.0;
- c.gridwidth = 3;
- c.gridx = 0;
- c.gridy = 1;
- layout(button4) = c
-
- val button5 = new Button("5")
- c.fill = Fill.Horizontal
- c.ipady = 0; //reset to default
- c.weighty = 1.0; //request any extra vertical space
- c.anchor = Anchor.PageEnd
- c.insets = new Insets(10,0,0,0); //top padding
- c.gridx = 1; //aligned with button 2
- c.gridwidth = 2; //2 columns wide
- c.gridy = 2; //third row
- layout(button5) = c
- }
-
- def top = new MainFrame {
- title = "GridBag Demo"
- contents = ui
- }
-}
diff --git a/src/swing/scala/swing/test/HelloWorld.scala b/src/swing/scala/swing/test/HelloWorld.scala
deleted file mode 100644
index 6014a14..0000000
--- a/src/swing/scala/swing/test/HelloWorld.scala
+++ /dev/null
@@ -1,14 +0,0 @@
-package scala.swing
-package test
-
-import swing._
-
-/**
- * A simple swing demo.
- */
-object HelloWorld extends SimpleSwingApplication {
- def top = new MainFrame {
- title = "Hello, World!"
- contents = new Button("Click Me!")
- }
-}
\ No newline at end of file
diff --git a/src/swing/scala/swing/test/LabelTest.scala b/src/swing/scala/swing/test/LabelTest.scala
deleted file mode 100644
index 47eedb8..0000000
--- a/src/swing/scala/swing/test/LabelTest.scala
+++ /dev/null
@@ -1,20 +0,0 @@
-package scala.swing
-package test
-
-import scala.swing._
-import scala.swing.event._
-
-object LabelTest extends SimpleSwingApplication {
- def top = new MainFrame{
- contents = new Label {
- text = "Hello"
- import java.awt.event._
- listenTo(mouse.clicks)
- reactions += {
- case MousePressed(_,_,_,_,_) =>
- println("Mouse pressed2")
- }
- }
- }
-}
-
diff --git a/src/swing/scala/swing/test/LinePainting.scala b/src/swing/scala/swing/test/LinePainting.scala
deleted file mode 100644
index 78a94db..0000000
--- a/src/swing/scala/swing/test/LinePainting.scala
+++ /dev/null
@@ -1,53 +0,0 @@
-package scala.swing
-package test
-import scala.swing.Swing._
-import scala.swing.{MainFrame, Panel, SimpleGUIApplication}
-import scala.swing.event._
-import java.awt.{Color, Dimension, Graphics, Graphics2D, Point, geom}
-
-/**
- * Dragging the mouse draws a simple graph
- *
- * @author Frank Teubler, Ingo Maier
- */
-object LinePainting extends SimpleSwingApplication {
- lazy val ui = new Panel {
- background = Color.white
- preferredSize = (200,200)
-
- focusable = true
- listenTo(mouse.clicks, mouse.moves, keys)
-
- reactions += {
- case e: MousePressed =>
- moveTo(e.point)
- requestFocusInWindow()
- case e: MouseDragged => lineTo(e.point)
- case e: MouseReleased => lineTo(e.point)
- case KeyTyped(_,'c',_,_) =>
- path = new geom.GeneralPath
- repaint()
- case _: FocusLost => repaint()
- }
-
- /* records the dragging */
- var path = new geom.GeneralPath
-
- def lineTo(p: Point) { path.lineTo(p.x, p.y); repaint() }
- def moveTo(p: Point) { path.moveTo(p.x, p.y); repaint() }
-
- override def paintComponent(g: Graphics2D) = {
- super.paintComponent(g)
- g.setColor(new Color(100,100,100))
- g.drawString("Press left mouse button and drag to paint." +
- (if(hasFocus) " Press 'c' to clear." else ""), 10, size.height-10)
- g.setColor(Color.black)
- g.draw(path)
- }
- }
-
- def top = new MainFrame {
- title = "Simple Line Painting Demo"
- contents = ui
- }
-}
diff --git a/src/swing/scala/swing/test/ListViewDemo.scala b/src/swing/scala/swing/test/ListViewDemo.scala
deleted file mode 100644
index 2b8c8c0..0000000
--- a/src/swing/scala/swing/test/ListViewDemo.scala
+++ /dev/null
@@ -1,18 +0,0 @@
-package scala.swing
-package test
-
-object ListViewDemo extends SimpleSwingApplication {
- def top = new MainFrame {
- case class City(name: String, country: String, population: Int, capital: Boolean)
- val items = List(City("Lausanne", "Switzerland", 129273, false),
- City("Paris", "France", 2203817, true),
- City("New York", "USA", 8363710 , false),
- City("Berlin", "Germany", 3416300, true),
- City("Tokio", "Japan", 12787981, true))
- import ListView._
- contents = new FlowPanel(new ScrollPane(new ListView(items) {
- renderer = Renderer(_.name)
- }))
- //new ScrollPane(new Table(items)))
- }
-}
diff --git a/src/swing/scala/swing/test/SimpleApplet.scala b/src/swing/scala/swing/test/SimpleApplet.scala
deleted file mode 100644
index d5f17f8..0000000
--- a/src/swing/scala/swing/test/SimpleApplet.scala
+++ /dev/null
@@ -1,19 +0,0 @@
-package scala.swing
-package test
-
-import event._
-
-class SimpleApplet extends Applet {
- object ui extends UI with Reactor {
- def init() = {
- val button = new Button("Press here!")
- val text = new TextArea("Java Version: " + util.Properties.javaVersion + "\n")
- listenTo(button)
- reactions += {
- case ButtonClicked(_) => text.text += "Button Pressed!\n"
- case _ =>
- }
- contents = new BoxPanel(Orientation.Vertical) { contents.append(button, text) }
- }
- }
-}
diff --git a/src/swing/scala/swing/test/SwingApp.scala b/src/swing/scala/swing/test/SwingApp.scala
deleted file mode 100644
index b47d778..0000000
--- a/src/swing/scala/swing/test/SwingApp.scala
+++ /dev/null
@@ -1,30 +0,0 @@
-package scala.swing
-package test
-
-import swing._
-import swing.event._
-
-object SwingApp extends SimpleSwingApplication {
- def top = new MainFrame {
- title = "SwingApp"
- var numclicks = 0
- object label extends Label {
- val prefix = "Number of button clicks: "
- text = prefix + "0 "
- listenTo(button)
- reactions += {
- case ButtonClicked(button) =>
- numclicks = numclicks + 1
- text = prefix + numclicks
- }
- }
- object button extends Button {
- text = "I am a button"
- }
- contents = new FlowPanel {
- contents.append(button, label)
- border = Swing.EmptyBorder(5, 5, 5, 5)
- }
- }
-}
-
diff --git a/src/swing/scala/swing/test/TableSelection.scala b/src/swing/scala/swing/test/TableSelection.scala
deleted file mode 100644
index bbfef80..0000000
--- a/src/swing/scala/swing/test/TableSelection.scala
+++ /dev/null
@@ -1,97 +0,0 @@
-package scala.swing
-package test
-
-import java.awt.Dimension
-import swing.event._
-
-object TableSelection extends SimpleSwingApplication {
- val model = Array(List("Mary", "Campione", "Snowboarding", 5, false).toArray,
- List("Alison", "Huml", "Rowing", 5, false).toArray,
- List("Kathy", "Walrath", "Knitting", 5, false).toArray,
- List("Sharon", "Zakhour", "Speed reading", 5, false).toArray,
- List("Philip", "Milne", "Pool", 5, false).toArray)
- /*val model = Array.tabulate(10000) { i =>
- List("Mary", "Campione", "Snowboarding", i, false).toArray
- }*/
-
- lazy val ui = new BoxPanel(Orientation.Vertical) {
- val table = new Table(model, Array("First Name", "Last Name", "Sport", "# of Years", "Vegetarian")) {
- preferredViewportSize = new Dimension(500, 70)
- }
- //1.6:table.fillsViewportHeight = true
- listenTo(table.selection)
-
- contents += new ScrollPane(table)
- contents += new Label("Selection Mode")
-
- def radio(mutex: ButtonGroup, text: String): RadioButton = {
- val b = new RadioButton(text)
- listenTo(b)
- mutex.buttons += b
- contents += b
- b
- }
-
- val intervalMutex = new ButtonGroup
- val multiInterval = radio(intervalMutex, "Multiple Interval Selection")
- val elementInterval = radio(intervalMutex, "Single Selection")
- val singleInterval = radio(intervalMutex, "Single Interval Selection")
- intervalMutex.select(multiInterval)
-
- contents += new Label("Selection Options")
- val elemMutex = new ButtonGroup
- val rowSelection = radio(elemMutex, "Row Selection")
- val columnSelection = radio(elemMutex, "Column Selection")
- val cellSelection = radio(elemMutex, "Cell Selection")
- elemMutex.select(rowSelection)
-
- val output = new TextArea(5, 40) { editable = false }
- contents += new ScrollPane(output)
-
- def outputSelection() {
- output.append("Lead: " + table.selection.rows.leadIndex + "," +
- table.selection.columns.leadIndex + ". ")
- output.append("Rows:")
- for (c <- table.selection.rows) output.append(" " + c)
- output.append(". Columns:")
- for (c <- table.selection.columns) output.append(" " + c)
- output.append(".\n")
- }
-
- reactions += {
- case ButtonClicked(`multiInterval`) =>
- table.selection.intervalMode = Table.IntervalMode.MultiInterval
- if (cellSelection.selected) {
- elemMutex.select(rowSelection)
- table.selection.elementMode = Table.ElementMode.None
- }
- cellSelection.enabled = false
- case ButtonClicked(`elementInterval`) =>
- table.selection.intervalMode = Table.IntervalMode.Single
- cellSelection.enabled = true
- case ButtonClicked(`singleInterval`) =>
- table.selection.intervalMode = Table.IntervalMode.SingleInterval
- cellSelection.enabled = true
- case ButtonClicked(`rowSelection`) =>
- if (rowSelection.selected)
- table.selection.elementMode = Table.ElementMode.Row
- case ButtonClicked(`columnSelection`) =>
- if (columnSelection.selected)
- table.selection.elementMode = Table.ElementMode.Column
- case ButtonClicked(`cellSelection`) =>
- if (cellSelection.selected)
- table.selection.elementMode = Table.ElementMode.Cell
- case TableRowsSelected(_, range, false) =>
- output.append("Rows selected, changes: " + range + "\n")
- outputSelection()
- case TableColumnsSelected(_, range, false) =>
- output.append("Columns selected, changes " + range + "\n")
- outputSelection()
- }
- }
-
- def top = new MainFrame {
- title = "Table Selection"
- contents = ui
- }
-}
diff --git a/src/swing/scala/swing/test/UIDemo.scala b/src/swing/scala/swing/test/UIDemo.scala
deleted file mode 100644
index 9207c82..0000000
--- a/src/swing/scala/swing/test/UIDemo.scala
+++ /dev/null
@@ -1,148 +0,0 @@
-package scala.swing
-package test
-
-import swing._
-import event._
-import Swing._
-import ListView._
-
-object UIDemo extends SimpleSwingApplication {
- def top = new MainFrame {
- title = "Scala Swing Demo"
-
- /*
- * Create a menu bar with a couple of menus and menu items and
- * set the result as this frame's menu bar.
- */
- menuBar = new MenuBar {
- contents += new Menu("A Menu") {
- contents += new MenuItem("An item")
- contents += new MenuItem(Action("An action item") {
- println("Action '"+ title +"' invoked")
- })
- contents += new Separator
- contents += new CheckMenuItem("Check me")
- contents += new CheckMenuItem("Me too!")
- contents += new Separator
- val a = new RadioMenuItem("a")
- val b = new RadioMenuItem("b")
- val c = new RadioMenuItem("c")
- val mutex = new ButtonGroup(a,b,c)
- contents ++= mutex.buttons
- }
- contents += new Menu("Empty Menu")
- }
-
- /*
- * The root component in this frame is a panel with a border layout.
- */
- contents = new BorderPanel {
- import BorderPanel.Position._
-
- var reactLive = false
-
- val tabs = new TabbedPane {
- import TabbedPane._
- val buttons = new FlowPanel {
- border = Swing.EmptyBorder(5,5,5,5)
-
- contents += new BoxPanel(Orientation.Vertical) {
- border = CompoundBorder(TitledBorder(EtchedBorder, "Radio Buttons"), EmptyBorder(5,5,5,10))
- val a = new RadioButton("Green Vegetables")
- val b = new RadioButton("Red Meat")
- val c = new RadioButton("White Tofu")
- val mutex = new ButtonGroup(a,b,c)
- contents ++= mutex.buttons
- }
- contents += new BoxPanel(Orientation.Vertical) {
- border = CompoundBorder(TitledBorder(EtchedBorder, "Check Boxes"), EmptyBorder(5,5,5,10))
- val paintLabels = new CheckBox("Paint Labels")
- val paintTicks = new CheckBox("Paint Ticks")
- val snapTicks = new CheckBox("Snap To Ticks")
- val live = new CheckBox("Live")
- contents.append(paintLabels, paintTicks, snapTicks, live)
- listenTo(paintLabels, paintTicks, snapTicks, live)
- reactions += {
- case ButtonClicked(`paintLabels`) =>
- slider.paintLabels = paintLabels.selected
- case ButtonClicked(`paintTicks`) =>
- slider.paintTicks = paintTicks.selected
- case ButtonClicked(`snapTicks`) =>
- slider.snapToTicks = snapTicks.selected
- case ButtonClicked(`live`) =>
- reactLive = live.selected
- }
- }
- contents += new Button(Action("Center Frame") { centerOnScreen() })
- }
- pages += new Page("Buttons", buttons)
- pages += new Page("GridBag", GridBagDemo.ui)
- pages += new Page("Converter", CelsiusConverter2.ui)
- pages += new Page("Tables", TableSelection.ui)
- pages += new Page("Dialogs", Dialogs.ui)
- pages += new Page("Combo Boxes", ComboBoxes.ui)
- pages += new Page("Split Panes",
- new SplitPane(Orientation.Vertical, new Button("Hello"), new Button("World")) {
- continuousLayout = true
- })
-
- val password = new FlowPanel {
- contents += new Label("Enter your secret password here ")
- val field = new PasswordField(10)
- contents += field
- val label = new Label(field.text)
- contents += label
- listenTo(field)
- reactions += {
- case EditDone(`field`) => label.text = field.password.mkString
- }
- }
-
- pages += new Page("Password", password)
- pages += new Page("Painting", LinePainting.ui)
- //pages += new Page("Text Editor", TextEditor.ui)
- }
-
- val list = new ListView(tabs.pages) {
- selectIndices(0)
- selection.intervalMode = ListView.IntervalMode.Single
- renderer = ListView.Renderer(_.title)
- }
- val center = new SplitPane(Orientation.Vertical, new ScrollPane(list), tabs) {
- oneTouchExpandable = true
- continuousLayout = true
- }
- layout(center) = Center
-
- /*
- * This slider is used above, so we need lazy initialization semantics.
- * Objects or lazy vals are the way to go, but objects give us better
- * type inference at times.
- */
- object slider extends Slider {
- min = 0
- value = tabs.selection.index
- max = tabs.pages.size-1
- majorTickSpacing = 1
- }
- layout(slider) = South
-
- /*
- * Establish connection between the tab pane, slider, and list view.
- */
- listenTo(slider)
- listenTo(tabs.selection)
- listenTo(list.selection)
- reactions += {
- case ValueChanged(`slider`) =>
- if(!slider.adjusting || reactLive) tabs.selection.index = slider.value
- case SelectionChanged(`tabs`) =>
- slider.value = tabs.selection.index
- list.selectIndices(tabs.selection.index)
- case SelectionChanged(`list`) =>
- if (list.selection.items.length == 1)
- tabs.selection.page = list.selection.items(0)
- }
- }
- }
-}
\ No newline at end of file
diff --git a/src/swing/scala/swing/test/images/banana.jpg b/src/swing/scala/swing/test/images/banana.jpg
deleted file mode 100644
index 62267a4..0000000
Binary files a/src/swing/scala/swing/test/images/banana.jpg and /dev/null differ
diff --git a/src/swing/scala/swing/test/images/margarita1.jpg b/src/swing/scala/swing/test/images/margarita1.jpg
deleted file mode 100644
index d315f7c..0000000
Binary files a/src/swing/scala/swing/test/images/margarita1.jpg and /dev/null differ
diff --git a/src/swing/scala/swing/test/images/margarita2.jpg b/src/swing/scala/swing/test/images/margarita2.jpg
deleted file mode 100644
index c8b076e..0000000
Binary files a/src/swing/scala/swing/test/images/margarita2.jpg and /dev/null differ
diff --git a/src/swing/scala/swing/test/images/rose.jpg b/src/swing/scala/swing/test/images/rose.jpg
deleted file mode 100644
index d4a2b58..0000000
Binary files a/src/swing/scala/swing/test/images/rose.jpg and /dev/null differ
diff --git a/src/yourkit/scala/tools/util/YourkitProfiling.scala b/src/yourkit/scala/tools/util/YourkitProfiling.scala
deleted file mode 100644
index fb07eb1..0000000
--- a/src/yourkit/scala/tools/util/YourkitProfiling.scala
+++ /dev/null
@@ -1,63 +0,0 @@
-package scala.tools
-package util
-
-import com.yourkit.api._
-import com.yourkit.runtime._
-import nsc.io._
-
-class YourkitProfiling extends Profiling {
- @volatile private var active = false
- @volatile private var freq: Option[Int] = None
- lazy val controller = new Controller
-
- def defaultFreq = 100
- def allocationFreq = freq
- def setAllocationFreq(x: Int) = freq = if (x <= 0) None else Some(x)
-
- def startRecordingAllocations() = {
- controller.startAllocationRecording(true, freq getOrElse defaultFreq, false, 0)
- }
- def stopRecordingAllocations() = {
- controller.stopAllocationRecording()
- }
-
- def startProfiling(): Unit = {
- if (isActive)
- return
-
- active = true
- daemonize(true) {
- try {
- controller.startCPUProfiling(ProfilingModes.CPU_SAMPLING, Controller.DEFAULT_FILTERS)
- if (freq.isDefined)
- startRecordingAllocations()
- }
- catch {
- case _: PresentableException => () // if it's already running, no big deal
- }
- }
- }
-
- def captureSnapshot() = {
- daemonize(true)(controller.captureSnapshot(ProfilingModes.SNAPSHOT_WITH_HEAP))
- }
-
- def stopProfiling() = {
- try {
- if (freq.isDefined)
- stopRecordingAllocations()
-
- controller.stopCPUProfiling()
- }
- catch {
- case _: PresentableException => () // if it's already running, no big deal
- }
- finally active = false
- }
-
- def advanceGeneration(desc: String) {
- controller.advanceGeneration(desc)
- }
-
- def isActive = active
-}
diff --git a/starr.number b/starr.number
new file mode 100644
index 0000000..8f1f615
--- /dev/null
+++ b/starr.number
@@ -0,0 +1,2 @@
+starr.version=2.10.3
+starr.use.released=1
\ No newline at end of file
diff --git a/test/ant/test-basic/build.xml b/test/ant/test-basic/build.xml
new file mode 100644
index 0000000..acc2108
--- /dev/null
+++ b/test/ant/test-basic/build.xml
@@ -0,0 +1,33 @@
+<?xml version="1.0" encoding="UTF-8"?>
+
+<project name="test-simple" default="compile">
+ <description>Super simple test for Scala</description>
+
+ <target name="init">
+ <!-- Define project CLASSPATH. -->
+ <property name="base.dir" value="../../.."/>
+ <property name="pack.dir" value="${base.dir}/build/pack/"/>
+ <property name="build.dir" value="classes"/>
+ <property name="src.dir" value="src"/>
+ <property name="jvmargs" value=""/>
+ <path id="scala.classpath">
+ <fileset dir="${pack.dir}/lib/"> <include name="*.jar" /> </fileset>
+ </path>
+
+ <!-- Define scala compiler, scaladoc, etc command -->
+ <taskdef resource="scala/tools/ant/antlib.xml">
+ <classpath refid="scala.classpath" />
+ </taskdef>
+ </target>
+
+ <target name="compile" depends="init">
+ <mkdir dir="${build.dir}"/>
+
+ <scalac srcdir="${src.dir}" destdir="${build.dir}"
+ classpathref="scala.classpath" fork="true" target="jvm-1.5"
+ deprecation="no" addparams="-no-specialization"
+ jvmargs="${jvmargs} -XX:+UseConcMarkSweepGC">
+ <include name="**/*.scala"/>
+ </scalac>
+ </target>
+</project>
diff --git a/test/files/jvm/bug680.check b/test/ant/test-basic/src/test-1.scala
similarity index 100%
copy from test/files/jvm/bug680.check
copy to test/ant/test-basic/src/test-1.scala
diff --git a/test/benchmarking/AVL-insert-random.scala b/test/benchmarking/AVL-insert-random.scala
new file mode 100644
index 0000000..7299e33
--- /dev/null
+++ b/test/benchmarking/AVL-insert-random.scala
@@ -0,0 +1,67 @@
+package scala.collection
+
+
+
+
+
+class Dummy(val a: Int) extends math.Ordered[Dummy] {
+ def compare(other: Dummy) = this.a - other.a
+ override def toString = a.toString
+}
+
+
+object RandomGlobal {
+ val sz = 500000
+ val data = util.Random.shuffle((0 until sz) map { new Dummy(_) }) toArray;
+}
+
+
+import RandomGlobal._
+
+
+object RandomAVL extends testing.Benchmark {
+
+ def run() {
+ val avl = new collection.mutable.TreeSet[Dummy]
+
+ var i = 0
+ while (i < sz) {
+ val elem = data(i)
+ avl += elem
+ i += 1
+ }
+ }
+
+}
+
+
+object RandomImmutableTreeSet extends testing.Benchmark {
+
+ def run() {
+ var tree = new collection.immutable.TreeSet[Dummy]
+
+ var i = 0
+ while (i < sz) {
+ val elem = data(i)
+ tree += elem
+ i += 1
+ }
+ }
+
+}
+
+
+object RandomJavaTreeSet extends testing.Benchmark {
+
+ def run() {
+ val tree = new java.util.TreeSet[Dummy]
+
+ var i = 0
+ while (i < sz) {
+ val elem = data(i)
+ tree add elem
+ i += 1
+ }
+ }
+
+}
diff --git a/test/benchmarking/AVL-insert.scala b/test/benchmarking/AVL-insert.scala
new file mode 100644
index 0000000..4f3ab39
--- /dev/null
+++ b/test/benchmarking/AVL-insert.scala
@@ -0,0 +1,67 @@
+package scala.collection
+
+
+
+
+
+class Dummy(val a: Int) extends math.Ordered[Dummy] {
+ def compare(other: Dummy) = this.a - other.a
+ override def toString = a.toString
+}
+
+
+object Global {
+ val sz = 500000
+ val data = (0 until sz) map { new Dummy(_) } toArray
+}
+
+
+import Global._
+
+
+object AVL extends testing.Benchmark {
+
+ def run() {
+ val avl = new collection.mutable.TreeSet[Dummy]
+
+ var i = 0
+ while (i < sz) {
+ val elem = data(i)
+ avl += elem
+ i += 1
+ }
+ }
+
+}
+
+
+object ImmutableTreeSet extends testing.Benchmark {
+
+ def run() {
+ var tree = new collection.immutable.TreeSet[Dummy]
+
+ var i = 0
+ while (i < sz) {
+ val elem = data(i)
+ tree += elem
+ i += 1
+ }
+ }
+
+}
+
+
+object JavaTreeSet extends testing.Benchmark {
+
+ def run() {
+ val tree = new java.util.TreeSet[Dummy]
+
+ var i = 0
+ while (i < sz) {
+ val elem = data(i)
+ tree add elem
+ i += 1
+ }
+ }
+
+}
diff --git a/test/benchmarking/ParCtrie-bfs.scala b/test/benchmarking/ParCtrie-bfs.scala
new file mode 100644
index 0000000..59149ff
--- /dev/null
+++ b/test/benchmarking/ParCtrie-bfs.scala
@@ -0,0 +1,73 @@
+
+
+
+
+
+import collection.parallel.mutable.ParCtrie
+
+
+object Bfs extends testing.Benchmark {
+ val length = sys.props("length").toInt
+ val par = sys.props("par").toInt
+
+ type Node = (Int, Int);
+ type Parent = (Int, Int);
+
+ def up(n: Node) = (n._1, n._2 - 1);
+ def down(n: Node) = (n._1, n._2 + 1);
+ def left(n: Node) = (n._1 - 1, n._2);
+ def right(n: Node) = (n._1 + 1, n._2);
+
+ // create a map and a target
+ val target = (length / 2, length / 2);
+ val map = Array.tabulate(length, length)((x, y) => (x % 3) != 0 || (y % 3) != 0 || (x, y) == target)
+ def onMap(n: Node) = n._1 >= 0 && n._1 < length && n._2 >= 0 && n._2 < length
+
+ // open and closed lists
+ val open = ParCtrie[Node, Parent]()
+ val closed = ParCtrie[Node, Parent]()
+
+ collection.parallel.ForkJoinTasks.defaultForkJoinPool.setParallelism(par)
+
+ override def setUp() {
+ open.clear()
+ closed.clear()
+
+ // a couple of starting positions
+ open((0, 0)) = null
+ open((length - 1, length - 1)) = null
+ open((0, length - 1)) = null
+ open((length - 1, 0)) = null
+ }
+
+ def run() = {
+ // greedy bfs path search
+ while (open.nonEmpty && !open.contains(target)) {
+ for ((node, parent) <- open) {
+ def expand(next: Node) {
+ if (onMap(next) && map(next._1)(next._2) && !closed.contains(next) && !open.contains(next)) {
+ open(next) = node
+ }
+ }
+ expand(up(node))
+ expand(down(node))
+ expand(left(node))
+ expand(right(node))
+ closed(node) = parent
+ open.remove(node)
+ }
+ }
+ }
+
+ override def tearDown() {
+ // print path
+ var pathnode = open(target)
+ while (closed.contains(pathnode)) {
+ print(pathnode + "->")
+ pathnode = closed(pathnode)
+ }
+ println()
+ }
+
+}
+
diff --git a/test/benchmarking/ParCtrie-map.scala b/test/benchmarking/ParCtrie-map.scala
new file mode 100644
index 0000000..f1d2f56
--- /dev/null
+++ b/test/benchmarking/ParCtrie-map.scala
@@ -0,0 +1,21 @@
+
+
+
+import collection.parallel.mutable.ParCtrie
+
+
+
+object Map extends testing.Benchmark {
+ val length = sys.props("length").toInt
+ val par = sys.props("par").toInt
+ val parctrie = ParCtrie((0 until length) zip (0 until length): _*)
+
+ parctrie.tasksupport = new collection.parallel.ForkJoinTaskSupport(new scala.concurrent.forkjoin.ForkJoinPool(par))
+
+ def run = {
+ parctrie map {
+ kv => kv
+ }
+ }
+}
+
diff --git a/test/benchmarking/ParCtrie-nums.scala b/test/benchmarking/ParCtrie-nums.scala
new file mode 100644
index 0000000..76d1966
--- /dev/null
+++ b/test/benchmarking/ParCtrie-nums.scala
@@ -0,0 +1,39 @@
+
+
+
+
+
+import collection.parallel.mutable.ParCtrie
+
+
+case class Entry(num: Double) {
+ var sqrt = num
+}
+
+
+object Nums extends testing.Benchmark {
+ val length = sys.props("length").toInt
+ val par = sys.props("par").toInt
+ var entries: Seq[Entry] = null
+ var results: ParCtrie[Double, Entry] = null
+
+ collection.parallel.ForkJoinTasks.defaultForkJoinPool.setParallelism(par)
+
+ override def setUp() {
+ entries = (1 until length) map { num => Entry(num.toDouble) }
+ results = ParCtrie()
+ for (e <- entries) results += ((e.num, e))
+ }
+
+ def run() = {
+ while (results.nonEmpty) {
+ for ((num, e) <- results) {
+ val nsqrt = 0.5 * (e.sqrt + e.num / e.sqrt)
+ if (math.abs(nsqrt - e.sqrt) < 0.01) {
+ results.remove(num)
+ } else e.sqrt = nsqrt
+ }
+ }
+ }
+}
+
diff --git a/test/benchmarking/ParCtrie-size.scala b/test/benchmarking/ParCtrie-size.scala
new file mode 100644
index 0000000..3f47dc2
--- /dev/null
+++ b/test/benchmarking/ParCtrie-size.scala
@@ -0,0 +1,37 @@
+
+
+
+
+import collection.parallel.mutable.ParTrieMap
+
+
+
+object Size extends testing.Benchmark {
+ val length = sys.props("length").toInt
+ val par = sys.props("par").toInt
+ var parctrie = ParTrieMap((0 until length) zip (0 until length): _*)
+
+ //collection.parallel.ForkJoinTasks.defaultForkJoinPool.setParallelism(par)
+ val ts = new collection.parallel.ForkJoinTaskSupport(new concurrent.forkjoin.ForkJoinPool(par))
+ parctrie.tasksupport = ts
+
+ def run = {
+ parctrie.size
+ }
+
+ var iteration = 0
+
+ override def tearDown() {
+ iteration += 1
+ if (iteration % 4 == 0) parctrie = ParTrieMap((0 until length) zip (0 until length): _*)
+ parctrie.tasksupport = ts
+ }
+
+}
+
+
+
+
+
+
+
diff --git a/test/benchmarking/ParHashMap.scala b/test/benchmarking/ParHashMap.scala
new file mode 100644
index 0000000..33a378f
--- /dev/null
+++ b/test/benchmarking/ParHashMap.scala
@@ -0,0 +1,33 @@
+
+
+
+import collection.parallel.mutable.ParHashMap
+
+
+
+object Map extends testing.Benchmark {
+ val length = sys.props("length").toInt
+ val par = sys.props("par").toInt
+ val phm = ParHashMap((0 until length) zip (0 until length): _*)
+
+ phm.tasksupport = new collection.parallel.ForkJoinTaskSupport(new scala.concurrent.forkjoin.ForkJoinPool(par))
+
+ def run = {
+ phm map {
+ kv => kv
+ }
+ }
+}
+
+
+object MapSeq extends testing.Benchmark {
+ val length = sys.props("length").toInt
+ val hm = collection.mutable.HashMap((0 until length) zip (0 until length): _*)
+
+ def run = {
+ hm map {
+ kv => kv
+ }
+ }
+}
+
diff --git a/test/benchmarking/ParVector-reduce.scala b/test/benchmarking/ParVector-reduce.scala
new file mode 100644
index 0000000..2b4594e
--- /dev/null
+++ b/test/benchmarking/ParVector-reduce.scala
@@ -0,0 +1,33 @@
+
+
+
+import collection.parallel.immutable.ParVector
+
+
+
+object Reduce extends testing.Benchmark {
+ val length = sys.props("length").toInt
+ val par = sys.props("par").toInt
+ val parvector = ParVector((0 until length): _*)
+
+ parvector.tasksupport = new collection.parallel.ForkJoinTaskSupport(new scala.concurrent.forkjoin.ForkJoinPool(par))
+
+ def run = {
+ parvector reduce {
+ (a, b) => a + b
+ }
+ }
+}
+
+
+object ReduceSeq extends testing.Benchmark {
+ val length = sys.props("length").toInt
+ val vector = collection.immutable.Vector((0 until length): _*)
+
+ def run = {
+ vector reduce {
+ (a, b) => a + b
+ }
+ }
+}
+
diff --git a/test/benchmarking/TreeSetInsert.scala b/test/benchmarking/TreeSetInsert.scala
new file mode 100644
index 0000000..23444aa
--- /dev/null
+++ b/test/benchmarking/TreeSetInsert.scala
@@ -0,0 +1,70 @@
+
+object TreeSetInsert {
+
+ def main(args: Array[String]): Unit = {
+ val n = 500000
+ JavaUtilTS.main(args)
+ MutableTS.main(args)
+ ImmutableTS.main(args)
+ }
+}
+
+class Dummy(val a: Int) extends math.Ordered[Dummy] {
+ def compare(other: Dummy) = this.a - other.a
+
+ override def toString = a.toString
+ }
+
+
+object JavaUtilTS extends testing.Benchmark {
+ val length = sys.props("length").toInt
+ var data: Array[Dummy] = (0 until length) map { a => new Dummy(a) } toArray
+ var t: java.util.TreeSet[Dummy] = null
+
+ def run = {
+ t = new java.util.TreeSet[Dummy]()
+
+ var i = 0
+ while (i < length) {
+ val elem = data(i)
+ t add elem
+ i += 1
+ }
+ }
+}
+
+
+object MutableTS extends testing.Benchmark {
+ val length = sys.props("length").toInt
+ var data: Array[Dummy] = (0 until length) map { a => new Dummy(a) } toArray
+ var t: collection.mutable.TreeSet[Dummy] = null
+
+ def run = {
+ t = collection.mutable.TreeSet[Dummy]()
+
+ var i = 0
+ while (i < length) {
+ val elem = data(i)
+ t += elem
+ i += 1
+ }
+ }
+}
+
+
+object ImmutableTS extends testing.Benchmark {
+ val length = sys.props("length").toInt
+ var data: Array[Dummy] = (0 until length) map { a => new Dummy(a) } toArray
+ var t: collection.immutable.TreeSet[Dummy] = null
+
+ def run = {
+ t = collection.immutable.TreeSet[Dummy]()
+
+ var i = 0
+ while (i < length) {
+ val elem = data(i)
+ t += elem
+ i += 1
+ }
+ }
+}
diff --git a/test/benchmarking/TreeSetInsertRandom.scala b/test/benchmarking/TreeSetInsertRandom.scala
new file mode 100644
index 0000000..7f18254
--- /dev/null
+++ b/test/benchmarking/TreeSetInsertRandom.scala
@@ -0,0 +1,65 @@
+
+object TreeSetInsertRandom {
+
+ def main(args: Array[String]): Unit = {
+ val n = 500000
+ new JavaUtilTS(n).main(args)
+ new MutableTS(n).main(args)
+ new ImmutableTS(n).main(args)
+ }
+}
+
+class Dummy(val a: Int) extends math.Ordered[Dummy] {
+ def compare(other: Dummy) = this.a - other.a
+
+ override def toString = a.toString
+ }
+
+
+class JavaUtilTS(val length: Int) extends testing.Benchmark {
+ var data: Array[Dummy] = util.Random.shuffle((0 until length) map { a => new Dummy(a) }) toArray
+ var t: java.util.TreeSet[Dummy] = null
+
+ def run = {
+ t = new java.util.TreeSet[Dummy]()
+
+ var i = 0
+ while (i < length) {
+ val elem = data(i)
+ t add elem
+ i += 1
+ }
+ }
+}
+
+class MutableTS(val length: Int) extends testing.Benchmark {
+ var data: Array[Dummy] = util.Random.shuffle((0 until length) map { a => new Dummy(a) }) toArray
+ var t: collection.mutable.TreeSet[Dummy] = null
+
+ def run = {
+ t = collection.mutable.TreeSet[Dummy]()
+
+ var i = 0
+ while (i < length) {
+ val elem = data(i)
+ t += elem
+ i += 1
+ }
+ }
+}
+
+class ImmutableTS(val length: Int) extends testing.Benchmark {
+ var data: Array[Dummy] = util.Random.shuffle((0 until length) map { a => new Dummy(a) }) toArray
+ var t: collection.immutable.TreeSet[Dummy] = null
+
+ def run = {
+ t = collection.immutable.TreeSet[Dummy]()
+
+ var i = 0
+ while (i < length) {
+ val elem = data(i)
+ t += elem
+ i += 1
+ }
+ }
+}
diff --git a/test/benchmarking/TreeSetIterator.scala b/test/benchmarking/TreeSetIterator.scala
new file mode 100644
index 0000000..08c20e8
--- /dev/null
+++ b/test/benchmarking/TreeSetIterator.scala
@@ -0,0 +1,69 @@
+
+object TreeSetIterator {
+
+ def main(args: Array[String]): Unit = {
+ val n = 500000
+ JavaUtilTS.main(args)
+ MutableTS.main(args)
+ ImmutableTS.main(args)
+ }
+}
+
+class Dummy(val a: Int) extends math.Ordered[Dummy] {
+ def compare(other: Dummy) = this.a - other.a
+
+ override def toString = a.toString
+ }
+
+
+object JavaUtilTS extends testing.Benchmark {
+ val length = sys.props("length").toInt
+ var data: Array[Dummy] = (0 until length) map { a => new Dummy(a) } toArray
+ var t: java.util.TreeSet[Dummy] = null
+
+ def run = {
+ t = new java.util.TreeSet[Dummy]()
+ data foreach { a => t add a }
+
+ var i: Dummy = null
+ var it = t.iterator
+ while (it.hasNext) {
+ i = it.next
+ }
+ i
+ }
+}
+
+object MutableTS extends testing.Benchmark {
+ val length = sys.props("length").toInt
+ var data: Array[Dummy] = (0 until length) map { a => new Dummy(a) } toArray
+ var t: collection.mutable.TreeSet[Dummy] = null
+
+ def run = {
+ t = collection.mutable.TreeSet[Dummy](data: _*)
+
+ var i: Dummy = null
+ var it = t.iterator
+ while (it.hasNext) {
+ i = it.next
+ }
+ i
+ }
+}
+
+object ImmutableTS extends testing.Benchmark {
+ val length = sys.props("length").toInt
+ var data: Array[Dummy] = (0 until length) map { a => new Dummy(a) } toArray
+ var t: collection.immutable.TreeSet[Dummy] = null
+
+ def run = {
+ t = collection.immutable.TreeSet[Dummy](data: _*)
+
+ var i: Dummy = null
+ var it = t.iterator
+ while (it.hasNext) {
+ i = it.next
+ }
+ i
+ }
+}
diff --git a/test/benchmarking/TreeSetRemove.scala b/test/benchmarking/TreeSetRemove.scala
new file mode 100644
index 0000000..f84066f
--- /dev/null
+++ b/test/benchmarking/TreeSetRemove.scala
@@ -0,0 +1,69 @@
+
+object TreeSetRemove {
+
+ def main(args: Array[String]): Unit = {
+ val n = 500000
+ JavaUtilTS.main(args)
+ MutableTS.main(args)
+ ImmutableTS.main(args)
+ }
+}
+
+class Dummy(val a: Int) extends math.Ordered[Dummy] {
+ def compare(other: Dummy) = this.a - other.a
+
+ override def toString = a.toString
+ }
+
+
+object JavaUtilTS extends testing.Benchmark {
+ val length = sys.props("length").toInt
+ var data: Array[Dummy] = (0 until length) map { a => new Dummy(a) } toArray
+ var t: java.util.TreeSet[Dummy] = null
+
+ def run = {
+ t = new java.util.TreeSet[Dummy]()
+ data foreach { a => t add a }
+
+ var i = 0
+ while (i < length) {
+ val elem = data(i)
+ t remove elem
+ i += 1
+ }
+ }
+}
+
+object MutableTS extends testing.Benchmark {
+ val length = sys.props("length").toInt
+ var data: Array[Dummy] = (0 until length) map { a => new Dummy(a) } toArray
+ var t: collection.mutable.TreeSet[Dummy] = null
+
+ def run = {
+ t = collection.mutable.TreeSet[Dummy](data: _*)
+
+ var i = 0
+ while (i < length) {
+ val elem = data(i)
+ t -= elem
+ i += 1
+ }
+ }
+}
+
+object ImmutableTS extends testing.Benchmark {
+ val length = sys.props("length").toInt
+ var data: Array[Dummy] = (0 until length) map { a => new Dummy(a) } toArray
+ var t: collection.immutable.TreeSet[Dummy] = null
+
+ def run = {
+ t = collection.immutable.TreeSet[Dummy](data: _*)
+
+ var i = 0
+ while (i < length) {
+ val elem = data(i)
+ t -= elem
+ i += 1
+ }
+ }
+}
diff --git a/test/benchmarking/TreeSetRemoveRandom.scala b/test/benchmarking/TreeSetRemoveRandom.scala
new file mode 100644
index 0000000..4d31167
--- /dev/null
+++ b/test/benchmarking/TreeSetRemoveRandom.scala
@@ -0,0 +1,66 @@
+
+object TreeSetRemoveRandom {
+
+ def main(args: Array[String]): Unit = {
+ val n = 500000
+ new JavaUtilTS(n).main(args)
+ new MutableTS(n).main(args)
+ new ImmutableTS(n).main(args)
+ }
+}
+
+class Dummy(val a: Int) extends math.Ordered[Dummy] {
+ def compare(other: Dummy) = this.a - other.a
+
+ override def toString = a.toString
+ }
+
+
+class JavaUtilTS(val length: Int) extends testing.Benchmark {
+ var data: Array[Dummy] = util.Random.shuffle((0 until length) map { a => new Dummy(a) }) toArray
+ var t: java.util.TreeSet[Dummy] = null
+
+ def run = {
+ t = new java.util.TreeSet[Dummy]()
+ data foreach { a => t add a }
+
+ var i = 0
+ while (i < length) {
+ val elem = data(i)
+ t remove elem
+ i += 1
+ }
+ }
+}
+
+class MutableTS(val length: Int) extends testing.Benchmark {
+ var data: Array[Dummy] = util.Random.shuffle((0 until length) map { a => new Dummy(a) }) toArray
+ var t: collection.mutable.TreeSet[Dummy] = null
+
+ def run = {
+ t = collection.mutable.TreeSet[Dummy](data: _*)
+
+ var i = 0
+ while (i < length) {
+ val elem = data(i)
+ t -= elem
+ i += 1
+ }
+ }
+}
+
+class ImmutableTS(val length: Int) extends testing.Benchmark {
+ var data: Array[Dummy] = util.Random.shuffle((0 until length) map { a => new Dummy(a) }) toArray
+ var t: collection.immutable.TreeSet[Dummy] = null
+
+ def run = {
+ t = collection.immutable.TreeSet[Dummy](data: _*)
+
+ var i = 0
+ while (i < length) {
+ val elem = data(i)
+ t -= elem
+ i += 1
+ }
+ }
+}
diff --git a/test/benchmarking/t6726-patmat-analysis.scala b/test/benchmarking/t6726-patmat-analysis.scala
new file mode 100644
index 0000000..bcb7f6c
--- /dev/null
+++ b/test/benchmarking/t6726-patmat-analysis.scala
@@ -0,0 +1,4005 @@
+trait Foo{
+abstract class Base
+case class Dummy0(x: Int) extends Base
+case class Dummy1(x: Int) extends Base
+case class Dummy2(x: Int) extends Base
+case class Dummy3(x: Int) extends Base
+case class Dummy4(x: Int) extends Base
+case class Dummy5(x: Int) extends Base
+case class Dummy6(x: Int) extends Base
+case class Dummy7(x: Int) extends Base
+case class Dummy8(x: Int) extends Base
+case class Dummy9(x: Int) extends Base
+case class Dummy10(x: Int) extends Base
+case class Dummy11(x: Int) extends Base
+case class Dummy12(x: Int) extends Base
+case class Dummy13(x: Int) extends Base
+case class Dummy14(x: Int) extends Base
+case class Dummy15(x: Int) extends Base
+case class Dummy16(x: Int) extends Base
+case class Dummy17(x: Int) extends Base
+case class Dummy18(x: Int) extends Base
+case class Dummy19(x: Int) extends Base
+case class Dummy20(x: Int) extends Base
+case class Dummy21(x: Int) extends Base
+case class Dummy22(x: Int) extends Base
+case class Dummy23(x: Int) extends Base
+case class Dummy24(x: Int) extends Base
+case class Dummy25(x: Int) extends Base
+case class Dummy26(x: Int) extends Base
+case class Dummy27(x: Int) extends Base
+case class Dummy28(x: Int) extends Base
+case class Dummy29(x: Int) extends Base
+case class Dummy30(x: Int) extends Base
+case class Dummy31(x: Int) extends Base
+case class Dummy32(x: Int) extends Base
+case class Dummy33(x: Int) extends Base
+case class Dummy34(x: Int) extends Base
+case class Dummy35(x: Int) extends Base
+case class Dummy36(x: Int) extends Base
+case class Dummy37(x: Int) extends Base
+case class Dummy38(x: Int) extends Base
+case class Dummy39(x: Int) extends Base
+case class Dummy40(x: Int) extends Base
+case class Dummy41(x: Int) extends Base
+case class Dummy42(x: Int) extends Base
+case class Dummy43(x: Int) extends Base
+case class Dummy44(x: Int) extends Base
+case class Dummy45(x: Int) extends Base
+case class Dummy46(x: Int) extends Base
+case class Dummy47(x: Int) extends Base
+case class Dummy48(x: Int) extends Base
+case class Dummy49(x: Int) extends Base
+case class Dummy50(x: Int) extends Base
+case class Dummy51(x: Int) extends Base
+case class Dummy52(x: Int) extends Base
+case class Dummy53(x: Int) extends Base
+case class Dummy54(x: Int) extends Base
+case class Dummy55(x: Int) extends Base
+case class Dummy56(x: Int) extends Base
+case class Dummy57(x: Int) extends Base
+case class Dummy58(x: Int) extends Base
+case class Dummy59(x: Int) extends Base
+case class Dummy60(x: Int) extends Base
+case class Dummy61(x: Int) extends Base
+case class Dummy62(x: Int) extends Base
+case class Dummy63(x: Int) extends Base
+case class Dummy64(x: Int) extends Base
+case class Dummy65(x: Int) extends Base
+case class Dummy66(x: Int) extends Base
+case class Dummy67(x: Int) extends Base
+case class Dummy68(x: Int) extends Base
+case class Dummy69(x: Int) extends Base
+case class Dummy70(x: Int) extends Base
+case class Dummy71(x: Int) extends Base
+case class Dummy72(x: Int) extends Base
+case class Dummy73(x: Int) extends Base
+case class Dummy74(x: Int) extends Base
+case class Dummy75(x: Int) extends Base
+case class Dummy76(x: Int) extends Base
+case class Dummy77(x: Int) extends Base
+case class Dummy78(x: Int) extends Base
+case class Dummy79(x: Int) extends Base
+case class Dummy80(x: Int) extends Base
+case class Dummy81(x: Int) extends Base
+case class Dummy82(x: Int) extends Base
+case class Dummy83(x: Int) extends Base
+case class Dummy84(x: Int) extends Base
+case class Dummy85(x: Int) extends Base
+case class Dummy86(x: Int) extends Base
+case class Dummy87(x: Int) extends Base
+case class Dummy88(x: Int) extends Base
+case class Dummy89(x: Int) extends Base
+case class Dummy90(x: Int) extends Base
+case class Dummy91(x: Int) extends Base
+case class Dummy92(x: Int) extends Base
+case class Dummy93(x: Int) extends Base
+case class Dummy94(x: Int) extends Base
+case class Dummy95(x: Int) extends Base
+case class Dummy96(x: Int) extends Base
+case class Dummy97(x: Int) extends Base
+case class Dummy98(x: Int) extends Base
+case class Dummy99(x: Int) extends Base
+case class Dummy100(x: Int) extends Base
+case class Dummy101(x: Int) extends Base
+case class Dummy102(x: Int) extends Base
+case class Dummy103(x: Int) extends Base
+case class Dummy104(x: Int) extends Base
+case class Dummy105(x: Int) extends Base
+case class Dummy106(x: Int) extends Base
+case class Dummy107(x: Int) extends Base
+case class Dummy108(x: Int) extends Base
+case class Dummy109(x: Int) extends Base
+case class Dummy110(x: Int) extends Base
+case class Dummy111(x: Int) extends Base
+case class Dummy112(x: Int) extends Base
+case class Dummy113(x: Int) extends Base
+case class Dummy114(x: Int) extends Base
+case class Dummy115(x: Int) extends Base
+case class Dummy116(x: Int) extends Base
+case class Dummy117(x: Int) extends Base
+case class Dummy118(x: Int) extends Base
+case class Dummy119(x: Int) extends Base
+case class Dummy120(x: Int) extends Base
+case class Dummy121(x: Int) extends Base
+case class Dummy122(x: Int) extends Base
+case class Dummy123(x: Int) extends Base
+case class Dummy124(x: Int) extends Base
+case class Dummy125(x: Int) extends Base
+case class Dummy126(x: Int) extends Base
+case class Dummy127(x: Int) extends Base
+case class Dummy128(x: Int) extends Base
+case class Dummy129(x: Int) extends Base
+case class Dummy130(x: Int) extends Base
+case class Dummy131(x: Int) extends Base
+case class Dummy132(x: Int) extends Base
+case class Dummy133(x: Int) extends Base
+case class Dummy134(x: Int) extends Base
+case class Dummy135(x: Int) extends Base
+case class Dummy136(x: Int) extends Base
+case class Dummy137(x: Int) extends Base
+case class Dummy138(x: Int) extends Base
+case class Dummy139(x: Int) extends Base
+case class Dummy140(x: Int) extends Base
+case class Dummy141(x: Int) extends Base
+case class Dummy142(x: Int) extends Base
+case class Dummy143(x: Int) extends Base
+case class Dummy144(x: Int) extends Base
+case class Dummy145(x: Int) extends Base
+case class Dummy146(x: Int) extends Base
+case class Dummy147(x: Int) extends Base
+case class Dummy148(x: Int) extends Base
+case class Dummy149(x: Int) extends Base
+case class Dummy150(x: Int) extends Base
+case class Dummy151(x: Int) extends Base
+case class Dummy152(x: Int) extends Base
+case class Dummy153(x: Int) extends Base
+case class Dummy154(x: Int) extends Base
+case class Dummy155(x: Int) extends Base
+case class Dummy156(x: Int) extends Base
+case class Dummy157(x: Int) extends Base
+case class Dummy158(x: Int) extends Base
+case class Dummy159(x: Int) extends Base
+case class Dummy160(x: Int) extends Base
+case class Dummy161(x: Int) extends Base
+case class Dummy162(x: Int) extends Base
+case class Dummy163(x: Int) extends Base
+case class Dummy164(x: Int) extends Base
+case class Dummy165(x: Int) extends Base
+case class Dummy166(x: Int) extends Base
+case class Dummy167(x: Int) extends Base
+case class Dummy168(x: Int) extends Base
+case class Dummy169(x: Int) extends Base
+case class Dummy170(x: Int) extends Base
+case class Dummy171(x: Int) extends Base
+case class Dummy172(x: Int) extends Base
+case class Dummy173(x: Int) extends Base
+case class Dummy174(x: Int) extends Base
+case class Dummy175(x: Int) extends Base
+case class Dummy176(x: Int) extends Base
+case class Dummy177(x: Int) extends Base
+case class Dummy178(x: Int) extends Base
+case class Dummy179(x: Int) extends Base
+case class Dummy180(x: Int) extends Base
+case class Dummy181(x: Int) extends Base
+case class Dummy182(x: Int) extends Base
+case class Dummy183(x: Int) extends Base
+case class Dummy184(x: Int) extends Base
+case class Dummy185(x: Int) extends Base
+case class Dummy186(x: Int) extends Base
+case class Dummy187(x: Int) extends Base
+case class Dummy188(x: Int) extends Base
+case class Dummy189(x: Int) extends Base
+case class Dummy190(x: Int) extends Base
+case class Dummy191(x: Int) extends Base
+case class Dummy192(x: Int) extends Base
+case class Dummy193(x: Int) extends Base
+case class Dummy194(x: Int) extends Base
+case class Dummy195(x: Int) extends Base
+case class Dummy196(x: Int) extends Base
+case class Dummy197(x: Int) extends Base
+case class Dummy198(x: Int) extends Base
+case class Dummy199(x: Int) extends Base
+case class Dummy200(x: Int) extends Base
+case class Dummy201(x: Int) extends Base
+case class Dummy202(x: Int) extends Base
+case class Dummy203(x: Int) extends Base
+case class Dummy204(x: Int) extends Base
+case class Dummy205(x: Int) extends Base
+case class Dummy206(x: Int) extends Base
+case class Dummy207(x: Int) extends Base
+case class Dummy208(x: Int) extends Base
+case class Dummy209(x: Int) extends Base
+case class Dummy210(x: Int) extends Base
+case class Dummy211(x: Int) extends Base
+case class Dummy212(x: Int) extends Base
+case class Dummy213(x: Int) extends Base
+case class Dummy214(x: Int) extends Base
+case class Dummy215(x: Int) extends Base
+case class Dummy216(x: Int) extends Base
+case class Dummy217(x: Int) extends Base
+case class Dummy218(x: Int) extends Base
+case class Dummy219(x: Int) extends Base
+case class Dummy220(x: Int) extends Base
+case class Dummy221(x: Int) extends Base
+case class Dummy222(x: Int) extends Base
+case class Dummy223(x: Int) extends Base
+case class Dummy224(x: Int) extends Base
+case class Dummy225(x: Int) extends Base
+case class Dummy226(x: Int) extends Base
+case class Dummy227(x: Int) extends Base
+case class Dummy228(x: Int) extends Base
+case class Dummy229(x: Int) extends Base
+case class Dummy230(x: Int) extends Base
+case class Dummy231(x: Int) extends Base
+case class Dummy232(x: Int) extends Base
+case class Dummy233(x: Int) extends Base
+case class Dummy234(x: Int) extends Base
+case class Dummy235(x: Int) extends Base
+case class Dummy236(x: Int) extends Base
+case class Dummy237(x: Int) extends Base
+case class Dummy238(x: Int) extends Base
+case class Dummy239(x: Int) extends Base
+case class Dummy240(x: Int) extends Base
+case class Dummy241(x: Int) extends Base
+case class Dummy242(x: Int) extends Base
+case class Dummy243(x: Int) extends Base
+case class Dummy244(x: Int) extends Base
+case class Dummy245(x: Int) extends Base
+case class Dummy246(x: Int) extends Base
+case class Dummy247(x: Int) extends Base
+case class Dummy248(x: Int) extends Base
+case class Dummy249(x: Int) extends Base
+case class Dummy250(x: Int) extends Base
+case class Dummy251(x: Int) extends Base
+case class Dummy252(x: Int) extends Base
+case class Dummy253(x: Int) extends Base
+case class Dummy254(x: Int) extends Base
+case class Dummy255(x: Int) extends Base
+case class Dummy256(x: Int) extends Base
+case class Dummy257(x: Int) extends Base
+case class Dummy258(x: Int) extends Base
+case class Dummy259(x: Int) extends Base
+case class Dummy260(x: Int) extends Base
+case class Dummy261(x: Int) extends Base
+case class Dummy262(x: Int) extends Base
+case class Dummy263(x: Int) extends Base
+case class Dummy264(x: Int) extends Base
+case class Dummy265(x: Int) extends Base
+case class Dummy266(x: Int) extends Base
+case class Dummy267(x: Int) extends Base
+case class Dummy268(x: Int) extends Base
+case class Dummy269(x: Int) extends Base
+case class Dummy270(x: Int) extends Base
+case class Dummy271(x: Int) extends Base
+case class Dummy272(x: Int) extends Base
+case class Dummy273(x: Int) extends Base
+case class Dummy274(x: Int) extends Base
+case class Dummy275(x: Int) extends Base
+case class Dummy276(x: Int) extends Base
+case class Dummy277(x: Int) extends Base
+case class Dummy278(x: Int) extends Base
+case class Dummy279(x: Int) extends Base
+case class Dummy280(x: Int) extends Base
+case class Dummy281(x: Int) extends Base
+case class Dummy282(x: Int) extends Base
+case class Dummy283(x: Int) extends Base
+case class Dummy284(x: Int) extends Base
+case class Dummy285(x: Int) extends Base
+case class Dummy286(x: Int) extends Base
+case class Dummy287(x: Int) extends Base
+case class Dummy288(x: Int) extends Base
+case class Dummy289(x: Int) extends Base
+case class Dummy290(x: Int) extends Base
+case class Dummy291(x: Int) extends Base
+case class Dummy292(x: Int) extends Base
+case class Dummy293(x: Int) extends Base
+case class Dummy294(x: Int) extends Base
+case class Dummy295(x: Int) extends Base
+case class Dummy296(x: Int) extends Base
+case class Dummy297(x: Int) extends Base
+case class Dummy298(x: Int) extends Base
+case class Dummy299(x: Int) extends Base
+case class Dummy300(x: Int) extends Base
+case class Dummy301(x: Int) extends Base
+case class Dummy302(x: Int) extends Base
+case class Dummy303(x: Int) extends Base
+case class Dummy304(x: Int) extends Base
+case class Dummy305(x: Int) extends Base
+case class Dummy306(x: Int) extends Base
+case class Dummy307(x: Int) extends Base
+case class Dummy308(x: Int) extends Base
+case class Dummy309(x: Int) extends Base
+case class Dummy310(x: Int) extends Base
+case class Dummy311(x: Int) extends Base
+case class Dummy312(x: Int) extends Base
+case class Dummy313(x: Int) extends Base
+case class Dummy314(x: Int) extends Base
+case class Dummy315(x: Int) extends Base
+case class Dummy316(x: Int) extends Base
+case class Dummy317(x: Int) extends Base
+case class Dummy318(x: Int) extends Base
+case class Dummy319(x: Int) extends Base
+case class Dummy320(x: Int) extends Base
+case class Dummy321(x: Int) extends Base
+case class Dummy322(x: Int) extends Base
+case class Dummy323(x: Int) extends Base
+case class Dummy324(x: Int) extends Base
+case class Dummy325(x: Int) extends Base
+case class Dummy326(x: Int) extends Base
+case class Dummy327(x: Int) extends Base
+case class Dummy328(x: Int) extends Base
+case class Dummy329(x: Int) extends Base
+case class Dummy330(x: Int) extends Base
+case class Dummy331(x: Int) extends Base
+case class Dummy332(x: Int) extends Base
+case class Dummy333(x: Int) extends Base
+case class Dummy334(x: Int) extends Base
+case class Dummy335(x: Int) extends Base
+case class Dummy336(x: Int) extends Base
+case class Dummy337(x: Int) extends Base
+case class Dummy338(x: Int) extends Base
+case class Dummy339(x: Int) extends Base
+case class Dummy340(x: Int) extends Base
+case class Dummy341(x: Int) extends Base
+case class Dummy342(x: Int) extends Base
+case class Dummy343(x: Int) extends Base
+case class Dummy344(x: Int) extends Base
+case class Dummy345(x: Int) extends Base
+case class Dummy346(x: Int) extends Base
+case class Dummy347(x: Int) extends Base
+case class Dummy348(x: Int) extends Base
+case class Dummy349(x: Int) extends Base
+case class Dummy350(x: Int) extends Base
+case class Dummy351(x: Int) extends Base
+case class Dummy352(x: Int) extends Base
+case class Dummy353(x: Int) extends Base
+case class Dummy354(x: Int) extends Base
+case class Dummy355(x: Int) extends Base
+case class Dummy356(x: Int) extends Base
+case class Dummy357(x: Int) extends Base
+case class Dummy358(x: Int) extends Base
+case class Dummy359(x: Int) extends Base
+case class Dummy360(x: Int) extends Base
+case class Dummy361(x: Int) extends Base
+case class Dummy362(x: Int) extends Base
+case class Dummy363(x: Int) extends Base
+case class Dummy364(x: Int) extends Base
+case class Dummy365(x: Int) extends Base
+case class Dummy366(x: Int) extends Base
+case class Dummy367(x: Int) extends Base
+case class Dummy368(x: Int) extends Base
+case class Dummy369(x: Int) extends Base
+case class Dummy370(x: Int) extends Base
+case class Dummy371(x: Int) extends Base
+case class Dummy372(x: Int) extends Base
+case class Dummy373(x: Int) extends Base
+case class Dummy374(x: Int) extends Base
+case class Dummy375(x: Int) extends Base
+case class Dummy376(x: Int) extends Base
+case class Dummy377(x: Int) extends Base
+case class Dummy378(x: Int) extends Base
+case class Dummy379(x: Int) extends Base
+case class Dummy380(x: Int) extends Base
+case class Dummy381(x: Int) extends Base
+case class Dummy382(x: Int) extends Base
+case class Dummy383(x: Int) extends Base
+case class Dummy384(x: Int) extends Base
+case class Dummy385(x: Int) extends Base
+case class Dummy386(x: Int) extends Base
+case class Dummy387(x: Int) extends Base
+case class Dummy388(x: Int) extends Base
+case class Dummy389(x: Int) extends Base
+case class Dummy390(x: Int) extends Base
+case class Dummy391(x: Int) extends Base
+case class Dummy392(x: Int) extends Base
+case class Dummy393(x: Int) extends Base
+case class Dummy394(x: Int) extends Base
+case class Dummy395(x: Int) extends Base
+case class Dummy396(x: Int) extends Base
+case class Dummy397(x: Int) extends Base
+case class Dummy398(x: Int) extends Base
+case class Dummy399(x: Int) extends Base
+case class Dummy400(x: Int) extends Base
+case class Dummy401(x: Int) extends Base
+case class Dummy402(x: Int) extends Base
+case class Dummy403(x: Int) extends Base
+case class Dummy404(x: Int) extends Base
+case class Dummy405(x: Int) extends Base
+case class Dummy406(x: Int) extends Base
+case class Dummy407(x: Int) extends Base
+case class Dummy408(x: Int) extends Base
+case class Dummy409(x: Int) extends Base
+case class Dummy410(x: Int) extends Base
+case class Dummy411(x: Int) extends Base
+case class Dummy412(x: Int) extends Base
+case class Dummy413(x: Int) extends Base
+case class Dummy414(x: Int) extends Base
+case class Dummy415(x: Int) extends Base
+case class Dummy416(x: Int) extends Base
+case class Dummy417(x: Int) extends Base
+case class Dummy418(x: Int) extends Base
+case class Dummy419(x: Int) extends Base
+case class Dummy420(x: Int) extends Base
+case class Dummy421(x: Int) extends Base
+case class Dummy422(x: Int) extends Base
+case class Dummy423(x: Int) extends Base
+case class Dummy424(x: Int) extends Base
+case class Dummy425(x: Int) extends Base
+case class Dummy426(x: Int) extends Base
+case class Dummy427(x: Int) extends Base
+case class Dummy428(x: Int) extends Base
+case class Dummy429(x: Int) extends Base
+case class Dummy430(x: Int) extends Base
+case class Dummy431(x: Int) extends Base
+case class Dummy432(x: Int) extends Base
+case class Dummy433(x: Int) extends Base
+case class Dummy434(x: Int) extends Base
+case class Dummy435(x: Int) extends Base
+case class Dummy436(x: Int) extends Base
+case class Dummy437(x: Int) extends Base
+case class Dummy438(x: Int) extends Base
+case class Dummy439(x: Int) extends Base
+case class Dummy440(x: Int) extends Base
+case class Dummy441(x: Int) extends Base
+case class Dummy442(x: Int) extends Base
+case class Dummy443(x: Int) extends Base
+case class Dummy444(x: Int) extends Base
+case class Dummy445(x: Int) extends Base
+case class Dummy446(x: Int) extends Base
+case class Dummy447(x: Int) extends Base
+case class Dummy448(x: Int) extends Base
+case class Dummy449(x: Int) extends Base
+case class Dummy450(x: Int) extends Base
+case class Dummy451(x: Int) extends Base
+case class Dummy452(x: Int) extends Base
+case class Dummy453(x: Int) extends Base
+case class Dummy454(x: Int) extends Base
+case class Dummy455(x: Int) extends Base
+case class Dummy456(x: Int) extends Base
+case class Dummy457(x: Int) extends Base
+case class Dummy458(x: Int) extends Base
+case class Dummy459(x: Int) extends Base
+case class Dummy460(x: Int) extends Base
+case class Dummy461(x: Int) extends Base
+case class Dummy462(x: Int) extends Base
+case class Dummy463(x: Int) extends Base
+case class Dummy464(x: Int) extends Base
+case class Dummy465(x: Int) extends Base
+case class Dummy466(x: Int) extends Base
+case class Dummy467(x: Int) extends Base
+case class Dummy468(x: Int) extends Base
+case class Dummy469(x: Int) extends Base
+case class Dummy470(x: Int) extends Base
+case class Dummy471(x: Int) extends Base
+case class Dummy472(x: Int) extends Base
+case class Dummy473(x: Int) extends Base
+case class Dummy474(x: Int) extends Base
+case class Dummy475(x: Int) extends Base
+case class Dummy476(x: Int) extends Base
+case class Dummy477(x: Int) extends Base
+case class Dummy478(x: Int) extends Base
+case class Dummy479(x: Int) extends Base
+case class Dummy480(x: Int) extends Base
+case class Dummy481(x: Int) extends Base
+case class Dummy482(x: Int) extends Base
+case class Dummy483(x: Int) extends Base
+case class Dummy484(x: Int) extends Base
+case class Dummy485(x: Int) extends Base
+case class Dummy486(x: Int) extends Base
+case class Dummy487(x: Int) extends Base
+case class Dummy488(x: Int) extends Base
+case class Dummy489(x: Int) extends Base
+case class Dummy490(x: Int) extends Base
+case class Dummy491(x: Int) extends Base
+case class Dummy492(x: Int) extends Base
+case class Dummy493(x: Int) extends Base
+case class Dummy494(x: Int) extends Base
+case class Dummy495(x: Int) extends Base
+case class Dummy496(x: Int) extends Base
+case class Dummy497(x: Int) extends Base
+case class Dummy498(x: Int) extends Base
+case class Dummy499(x: Int) extends Base
+case class Dummy500(x: Int) extends Base
+case class Dummy501(x: Int) extends Base
+case class Dummy502(x: Int) extends Base
+case class Dummy503(x: Int) extends Base
+case class Dummy504(x: Int) extends Base
+case class Dummy505(x: Int) extends Base
+case class Dummy506(x: Int) extends Base
+case class Dummy507(x: Int) extends Base
+case class Dummy508(x: Int) extends Base
+case class Dummy509(x: Int) extends Base
+case class Dummy510(x: Int) extends Base
+case class Dummy511(x: Int) extends Base
+case class Dummy512(x: Int) extends Base
+case class Dummy513(x: Int) extends Base
+case class Dummy514(x: Int) extends Base
+case class Dummy515(x: Int) extends Base
+case class Dummy516(x: Int) extends Base
+case class Dummy517(x: Int) extends Base
+case class Dummy518(x: Int) extends Base
+case class Dummy519(x: Int) extends Base
+case class Dummy520(x: Int) extends Base
+case class Dummy521(x: Int) extends Base
+case class Dummy522(x: Int) extends Base
+case class Dummy523(x: Int) extends Base
+case class Dummy524(x: Int) extends Base
+case class Dummy525(x: Int) extends Base
+case class Dummy526(x: Int) extends Base
+case class Dummy527(x: Int) extends Base
+case class Dummy528(x: Int) extends Base
+case class Dummy529(x: Int) extends Base
+case class Dummy530(x: Int) extends Base
+case class Dummy531(x: Int) extends Base
+case class Dummy532(x: Int) extends Base
+case class Dummy533(x: Int) extends Base
+case class Dummy534(x: Int) extends Base
+case class Dummy535(x: Int) extends Base
+case class Dummy536(x: Int) extends Base
+case class Dummy537(x: Int) extends Base
+case class Dummy538(x: Int) extends Base
+case class Dummy539(x: Int) extends Base
+case class Dummy540(x: Int) extends Base
+case class Dummy541(x: Int) extends Base
+case class Dummy542(x: Int) extends Base
+case class Dummy543(x: Int) extends Base
+case class Dummy544(x: Int) extends Base
+case class Dummy545(x: Int) extends Base
+case class Dummy546(x: Int) extends Base
+case class Dummy547(x: Int) extends Base
+case class Dummy548(x: Int) extends Base
+case class Dummy549(x: Int) extends Base
+case class Dummy550(x: Int) extends Base
+case class Dummy551(x: Int) extends Base
+case class Dummy552(x: Int) extends Base
+case class Dummy553(x: Int) extends Base
+case class Dummy554(x: Int) extends Base
+case class Dummy555(x: Int) extends Base
+case class Dummy556(x: Int) extends Base
+case class Dummy557(x: Int) extends Base
+case class Dummy558(x: Int) extends Base
+case class Dummy559(x: Int) extends Base
+case class Dummy560(x: Int) extends Base
+case class Dummy561(x: Int) extends Base
+case class Dummy562(x: Int) extends Base
+case class Dummy563(x: Int) extends Base
+case class Dummy564(x: Int) extends Base
+case class Dummy565(x: Int) extends Base
+case class Dummy566(x: Int) extends Base
+case class Dummy567(x: Int) extends Base
+case class Dummy568(x: Int) extends Base
+case class Dummy569(x: Int) extends Base
+case class Dummy570(x: Int) extends Base
+case class Dummy571(x: Int) extends Base
+case class Dummy572(x: Int) extends Base
+case class Dummy573(x: Int) extends Base
+case class Dummy574(x: Int) extends Base
+case class Dummy575(x: Int) extends Base
+case class Dummy576(x: Int) extends Base
+case class Dummy577(x: Int) extends Base
+case class Dummy578(x: Int) extends Base
+case class Dummy579(x: Int) extends Base
+case class Dummy580(x: Int) extends Base
+case class Dummy581(x: Int) extends Base
+case class Dummy582(x: Int) extends Base
+case class Dummy583(x: Int) extends Base
+case class Dummy584(x: Int) extends Base
+case class Dummy585(x: Int) extends Base
+case class Dummy586(x: Int) extends Base
+case class Dummy587(x: Int) extends Base
+case class Dummy588(x: Int) extends Base
+case class Dummy589(x: Int) extends Base
+case class Dummy590(x: Int) extends Base
+case class Dummy591(x: Int) extends Base
+case class Dummy592(x: Int) extends Base
+case class Dummy593(x: Int) extends Base
+case class Dummy594(x: Int) extends Base
+case class Dummy595(x: Int) extends Base
+case class Dummy596(x: Int) extends Base
+case class Dummy597(x: Int) extends Base
+case class Dummy598(x: Int) extends Base
+case class Dummy599(x: Int) extends Base
+case class Dummy600(x: Int) extends Base
+case class Dummy601(x: Int) extends Base
+case class Dummy602(x: Int) extends Base
+case class Dummy603(x: Int) extends Base
+case class Dummy604(x: Int) extends Base
+case class Dummy605(x: Int) extends Base
+case class Dummy606(x: Int) extends Base
+case class Dummy607(x: Int) extends Base
+case class Dummy608(x: Int) extends Base
+case class Dummy609(x: Int) extends Base
+case class Dummy610(x: Int) extends Base
+case class Dummy611(x: Int) extends Base
+case class Dummy612(x: Int) extends Base
+case class Dummy613(x: Int) extends Base
+case class Dummy614(x: Int) extends Base
+case class Dummy615(x: Int) extends Base
+case class Dummy616(x: Int) extends Base
+case class Dummy617(x: Int) extends Base
+case class Dummy618(x: Int) extends Base
+case class Dummy619(x: Int) extends Base
+case class Dummy620(x: Int) extends Base
+case class Dummy621(x: Int) extends Base
+case class Dummy622(x: Int) extends Base
+case class Dummy623(x: Int) extends Base
+case class Dummy624(x: Int) extends Base
+case class Dummy625(x: Int) extends Base
+case class Dummy626(x: Int) extends Base
+case class Dummy627(x: Int) extends Base
+case class Dummy628(x: Int) extends Base
+case class Dummy629(x: Int) extends Base
+case class Dummy630(x: Int) extends Base
+case class Dummy631(x: Int) extends Base
+case class Dummy632(x: Int) extends Base
+case class Dummy633(x: Int) extends Base
+case class Dummy634(x: Int) extends Base
+case class Dummy635(x: Int) extends Base
+case class Dummy636(x: Int) extends Base
+case class Dummy637(x: Int) extends Base
+case class Dummy638(x: Int) extends Base
+case class Dummy639(x: Int) extends Base
+case class Dummy640(x: Int) extends Base
+case class Dummy641(x: Int) extends Base
+case class Dummy642(x: Int) extends Base
+case class Dummy643(x: Int) extends Base
+case class Dummy644(x: Int) extends Base
+case class Dummy645(x: Int) extends Base
+case class Dummy646(x: Int) extends Base
+case class Dummy647(x: Int) extends Base
+case class Dummy648(x: Int) extends Base
+case class Dummy649(x: Int) extends Base
+case class Dummy650(x: Int) extends Base
+case class Dummy651(x: Int) extends Base
+case class Dummy652(x: Int) extends Base
+case class Dummy653(x: Int) extends Base
+case class Dummy654(x: Int) extends Base
+case class Dummy655(x: Int) extends Base
+case class Dummy656(x: Int) extends Base
+case class Dummy657(x: Int) extends Base
+case class Dummy658(x: Int) extends Base
+case class Dummy659(x: Int) extends Base
+case class Dummy660(x: Int) extends Base
+case class Dummy661(x: Int) extends Base
+case class Dummy662(x: Int) extends Base
+case class Dummy663(x: Int) extends Base
+case class Dummy664(x: Int) extends Base
+case class Dummy665(x: Int) extends Base
+case class Dummy666(x: Int) extends Base
+case class Dummy667(x: Int) extends Base
+case class Dummy668(x: Int) extends Base
+case class Dummy669(x: Int) extends Base
+case class Dummy670(x: Int) extends Base
+case class Dummy671(x: Int) extends Base
+case class Dummy672(x: Int) extends Base
+case class Dummy673(x: Int) extends Base
+case class Dummy674(x: Int) extends Base
+case class Dummy675(x: Int) extends Base
+case class Dummy676(x: Int) extends Base
+case class Dummy677(x: Int) extends Base
+case class Dummy678(x: Int) extends Base
+case class Dummy679(x: Int) extends Base
+case class Dummy680(x: Int) extends Base
+case class Dummy681(x: Int) extends Base
+case class Dummy682(x: Int) extends Base
+case class Dummy683(x: Int) extends Base
+case class Dummy684(x: Int) extends Base
+case class Dummy685(x: Int) extends Base
+case class Dummy686(x: Int) extends Base
+case class Dummy687(x: Int) extends Base
+case class Dummy688(x: Int) extends Base
+case class Dummy689(x: Int) extends Base
+case class Dummy690(x: Int) extends Base
+case class Dummy691(x: Int) extends Base
+case class Dummy692(x: Int) extends Base
+case class Dummy693(x: Int) extends Base
+case class Dummy694(x: Int) extends Base
+case class Dummy695(x: Int) extends Base
+case class Dummy696(x: Int) extends Base
+case class Dummy697(x: Int) extends Base
+case class Dummy698(x: Int) extends Base
+case class Dummy699(x: Int) extends Base
+case class Dummy700(x: Int) extends Base
+case class Dummy701(x: Int) extends Base
+case class Dummy702(x: Int) extends Base
+case class Dummy703(x: Int) extends Base
+case class Dummy704(x: Int) extends Base
+case class Dummy705(x: Int) extends Base
+case class Dummy706(x: Int) extends Base
+case class Dummy707(x: Int) extends Base
+case class Dummy708(x: Int) extends Base
+case class Dummy709(x: Int) extends Base
+case class Dummy710(x: Int) extends Base
+case class Dummy711(x: Int) extends Base
+case class Dummy712(x: Int) extends Base
+case class Dummy713(x: Int) extends Base
+case class Dummy714(x: Int) extends Base
+case class Dummy715(x: Int) extends Base
+case class Dummy716(x: Int) extends Base
+case class Dummy717(x: Int) extends Base
+case class Dummy718(x: Int) extends Base
+case class Dummy719(x: Int) extends Base
+case class Dummy720(x: Int) extends Base
+case class Dummy721(x: Int) extends Base
+case class Dummy722(x: Int) extends Base
+case class Dummy723(x: Int) extends Base
+case class Dummy724(x: Int) extends Base
+case class Dummy725(x: Int) extends Base
+case class Dummy726(x: Int) extends Base
+case class Dummy727(x: Int) extends Base
+case class Dummy728(x: Int) extends Base
+case class Dummy729(x: Int) extends Base
+case class Dummy730(x: Int) extends Base
+case class Dummy731(x: Int) extends Base
+case class Dummy732(x: Int) extends Base
+case class Dummy733(x: Int) extends Base
+case class Dummy734(x: Int) extends Base
+case class Dummy735(x: Int) extends Base
+case class Dummy736(x: Int) extends Base
+case class Dummy737(x: Int) extends Base
+case class Dummy738(x: Int) extends Base
+case class Dummy739(x: Int) extends Base
+case class Dummy740(x: Int) extends Base
+case class Dummy741(x: Int) extends Base
+case class Dummy742(x: Int) extends Base
+case class Dummy743(x: Int) extends Base
+case class Dummy744(x: Int) extends Base
+case class Dummy745(x: Int) extends Base
+case class Dummy746(x: Int) extends Base
+case class Dummy747(x: Int) extends Base
+case class Dummy748(x: Int) extends Base
+case class Dummy749(x: Int) extends Base
+case class Dummy750(x: Int) extends Base
+case class Dummy751(x: Int) extends Base
+case class Dummy752(x: Int) extends Base
+case class Dummy753(x: Int) extends Base
+case class Dummy754(x: Int) extends Base
+case class Dummy755(x: Int) extends Base
+case class Dummy756(x: Int) extends Base
+case class Dummy757(x: Int) extends Base
+case class Dummy758(x: Int) extends Base
+case class Dummy759(x: Int) extends Base
+case class Dummy760(x: Int) extends Base
+case class Dummy761(x: Int) extends Base
+case class Dummy762(x: Int) extends Base
+case class Dummy763(x: Int) extends Base
+case class Dummy764(x: Int) extends Base
+case class Dummy765(x: Int) extends Base
+case class Dummy766(x: Int) extends Base
+case class Dummy767(x: Int) extends Base
+case class Dummy768(x: Int) extends Base
+case class Dummy769(x: Int) extends Base
+case class Dummy770(x: Int) extends Base
+case class Dummy771(x: Int) extends Base
+case class Dummy772(x: Int) extends Base
+case class Dummy773(x: Int) extends Base
+case class Dummy774(x: Int) extends Base
+case class Dummy775(x: Int) extends Base
+case class Dummy776(x: Int) extends Base
+case class Dummy777(x: Int) extends Base
+case class Dummy778(x: Int) extends Base
+case class Dummy779(x: Int) extends Base
+case class Dummy780(x: Int) extends Base
+case class Dummy781(x: Int) extends Base
+case class Dummy782(x: Int) extends Base
+case class Dummy783(x: Int) extends Base
+case class Dummy784(x: Int) extends Base
+case class Dummy785(x: Int) extends Base
+case class Dummy786(x: Int) extends Base
+case class Dummy787(x: Int) extends Base
+case class Dummy788(x: Int) extends Base
+case class Dummy789(x: Int) extends Base
+case class Dummy790(x: Int) extends Base
+case class Dummy791(x: Int) extends Base
+case class Dummy792(x: Int) extends Base
+case class Dummy793(x: Int) extends Base
+case class Dummy794(x: Int) extends Base
+case class Dummy795(x: Int) extends Base
+case class Dummy796(x: Int) extends Base
+case class Dummy797(x: Int) extends Base
+case class Dummy798(x: Int) extends Base
+case class Dummy799(x: Int) extends Base
+case class Dummy800(x: Int) extends Base
+case class Dummy801(x: Int) extends Base
+case class Dummy802(x: Int) extends Base
+case class Dummy803(x: Int) extends Base
+case class Dummy804(x: Int) extends Base
+case class Dummy805(x: Int) extends Base
+case class Dummy806(x: Int) extends Base
+case class Dummy807(x: Int) extends Base
+case class Dummy808(x: Int) extends Base
+case class Dummy809(x: Int) extends Base
+case class Dummy810(x: Int) extends Base
+case class Dummy811(x: Int) extends Base
+case class Dummy812(x: Int) extends Base
+case class Dummy813(x: Int) extends Base
+case class Dummy814(x: Int) extends Base
+case class Dummy815(x: Int) extends Base
+case class Dummy816(x: Int) extends Base
+case class Dummy817(x: Int) extends Base
+case class Dummy818(x: Int) extends Base
+case class Dummy819(x: Int) extends Base
+case class Dummy820(x: Int) extends Base
+case class Dummy821(x: Int) extends Base
+case class Dummy822(x: Int) extends Base
+case class Dummy823(x: Int) extends Base
+case class Dummy824(x: Int) extends Base
+case class Dummy825(x: Int) extends Base
+case class Dummy826(x: Int) extends Base
+case class Dummy827(x: Int) extends Base
+case class Dummy828(x: Int) extends Base
+case class Dummy829(x: Int) extends Base
+case class Dummy830(x: Int) extends Base
+case class Dummy831(x: Int) extends Base
+case class Dummy832(x: Int) extends Base
+case class Dummy833(x: Int) extends Base
+case class Dummy834(x: Int) extends Base
+case class Dummy835(x: Int) extends Base
+case class Dummy836(x: Int) extends Base
+case class Dummy837(x: Int) extends Base
+case class Dummy838(x: Int) extends Base
+case class Dummy839(x: Int) extends Base
+case class Dummy840(x: Int) extends Base
+case class Dummy841(x: Int) extends Base
+case class Dummy842(x: Int) extends Base
+case class Dummy843(x: Int) extends Base
+case class Dummy844(x: Int) extends Base
+case class Dummy845(x: Int) extends Base
+case class Dummy846(x: Int) extends Base
+case class Dummy847(x: Int) extends Base
+case class Dummy848(x: Int) extends Base
+case class Dummy849(x: Int) extends Base
+case class Dummy850(x: Int) extends Base
+case class Dummy851(x: Int) extends Base
+case class Dummy852(x: Int) extends Base
+case class Dummy853(x: Int) extends Base
+case class Dummy854(x: Int) extends Base
+case class Dummy855(x: Int) extends Base
+case class Dummy856(x: Int) extends Base
+case class Dummy857(x: Int) extends Base
+case class Dummy858(x: Int) extends Base
+case class Dummy859(x: Int) extends Base
+case class Dummy860(x: Int) extends Base
+case class Dummy861(x: Int) extends Base
+case class Dummy862(x: Int) extends Base
+case class Dummy863(x: Int) extends Base
+case class Dummy864(x: Int) extends Base
+case class Dummy865(x: Int) extends Base
+case class Dummy866(x: Int) extends Base
+case class Dummy867(x: Int) extends Base
+case class Dummy868(x: Int) extends Base
+case class Dummy869(x: Int) extends Base
+case class Dummy870(x: Int) extends Base
+case class Dummy871(x: Int) extends Base
+case class Dummy872(x: Int) extends Base
+case class Dummy873(x: Int) extends Base
+case class Dummy874(x: Int) extends Base
+case class Dummy875(x: Int) extends Base
+case class Dummy876(x: Int) extends Base
+case class Dummy877(x: Int) extends Base
+case class Dummy878(x: Int) extends Base
+case class Dummy879(x: Int) extends Base
+case class Dummy880(x: Int) extends Base
+case class Dummy881(x: Int) extends Base
+case class Dummy882(x: Int) extends Base
+case class Dummy883(x: Int) extends Base
+case class Dummy884(x: Int) extends Base
+case class Dummy885(x: Int) extends Base
+case class Dummy886(x: Int) extends Base
+case class Dummy887(x: Int) extends Base
+case class Dummy888(x: Int) extends Base
+case class Dummy889(x: Int) extends Base
+case class Dummy890(x: Int) extends Base
+case class Dummy891(x: Int) extends Base
+case class Dummy892(x: Int) extends Base
+case class Dummy893(x: Int) extends Base
+case class Dummy894(x: Int) extends Base
+case class Dummy895(x: Int) extends Base
+case class Dummy896(x: Int) extends Base
+case class Dummy897(x: Int) extends Base
+case class Dummy898(x: Int) extends Base
+case class Dummy899(x: Int) extends Base
+case class Dummy900(x: Int) extends Base
+case class Dummy901(x: Int) extends Base
+case class Dummy902(x: Int) extends Base
+case class Dummy903(x: Int) extends Base
+case class Dummy904(x: Int) extends Base
+case class Dummy905(x: Int) extends Base
+case class Dummy906(x: Int) extends Base
+case class Dummy907(x: Int) extends Base
+case class Dummy908(x: Int) extends Base
+case class Dummy909(x: Int) extends Base
+case class Dummy910(x: Int) extends Base
+case class Dummy911(x: Int) extends Base
+case class Dummy912(x: Int) extends Base
+case class Dummy913(x: Int) extends Base
+case class Dummy914(x: Int) extends Base
+case class Dummy915(x: Int) extends Base
+case class Dummy916(x: Int) extends Base
+case class Dummy917(x: Int) extends Base
+case class Dummy918(x: Int) extends Base
+case class Dummy919(x: Int) extends Base
+case class Dummy920(x: Int) extends Base
+case class Dummy921(x: Int) extends Base
+case class Dummy922(x: Int) extends Base
+case class Dummy923(x: Int) extends Base
+case class Dummy924(x: Int) extends Base
+case class Dummy925(x: Int) extends Base
+case class Dummy926(x: Int) extends Base
+case class Dummy927(x: Int) extends Base
+case class Dummy928(x: Int) extends Base
+case class Dummy929(x: Int) extends Base
+case class Dummy930(x: Int) extends Base
+case class Dummy931(x: Int) extends Base
+case class Dummy932(x: Int) extends Base
+case class Dummy933(x: Int) extends Base
+case class Dummy934(x: Int) extends Base
+case class Dummy935(x: Int) extends Base
+case class Dummy936(x: Int) extends Base
+case class Dummy937(x: Int) extends Base
+case class Dummy938(x: Int) extends Base
+case class Dummy939(x: Int) extends Base
+case class Dummy940(x: Int) extends Base
+case class Dummy941(x: Int) extends Base
+case class Dummy942(x: Int) extends Base
+case class Dummy943(x: Int) extends Base
+case class Dummy944(x: Int) extends Base
+case class Dummy945(x: Int) extends Base
+case class Dummy946(x: Int) extends Base
+case class Dummy947(x: Int) extends Base
+case class Dummy948(x: Int) extends Base
+case class Dummy949(x: Int) extends Base
+case class Dummy950(x: Int) extends Base
+case class Dummy951(x: Int) extends Base
+case class Dummy952(x: Int) extends Base
+case class Dummy953(x: Int) extends Base
+case class Dummy954(x: Int) extends Base
+case class Dummy955(x: Int) extends Base
+case class Dummy956(x: Int) extends Base
+case class Dummy957(x: Int) extends Base
+case class Dummy958(x: Int) extends Base
+case class Dummy959(x: Int) extends Base
+case class Dummy960(x: Int) extends Base
+case class Dummy961(x: Int) extends Base
+case class Dummy962(x: Int) extends Base
+case class Dummy963(x: Int) extends Base
+case class Dummy964(x: Int) extends Base
+case class Dummy965(x: Int) extends Base
+case class Dummy966(x: Int) extends Base
+case class Dummy967(x: Int) extends Base
+case class Dummy968(x: Int) extends Base
+case class Dummy969(x: Int) extends Base
+case class Dummy970(x: Int) extends Base
+case class Dummy971(x: Int) extends Base
+case class Dummy972(x: Int) extends Base
+case class Dummy973(x: Int) extends Base
+case class Dummy974(x: Int) extends Base
+case class Dummy975(x: Int) extends Base
+case class Dummy976(x: Int) extends Base
+case class Dummy977(x: Int) extends Base
+case class Dummy978(x: Int) extends Base
+case class Dummy979(x: Int) extends Base
+case class Dummy980(x: Int) extends Base
+case class Dummy981(x: Int) extends Base
+case class Dummy982(x: Int) extends Base
+case class Dummy983(x: Int) extends Base
+case class Dummy984(x: Int) extends Base
+case class Dummy985(x: Int) extends Base
+case class Dummy986(x: Int) extends Base
+case class Dummy987(x: Int) extends Base
+case class Dummy988(x: Int) extends Base
+case class Dummy989(x: Int) extends Base
+case class Dummy990(x: Int) extends Base
+case class Dummy991(x: Int) extends Base
+case class Dummy992(x: Int) extends Base
+case class Dummy993(x: Int) extends Base
+case class Dummy994(x: Int) extends Base
+case class Dummy995(x: Int) extends Base
+case class Dummy996(x: Int) extends Base
+case class Dummy997(x: Int) extends Base
+case class Dummy998(x: Int) extends Base
+case class Dummy999(x: Int) extends Base
+case class Dummy1000(x: Int) extends Base
+case class Dummy1001(x: Int) extends Base
+case class Dummy1002(x: Int) extends Base
+case class Dummy1003(x: Int) extends Base
+case class Dummy1004(x: Int) extends Base
+case class Dummy1005(x: Int) extends Base
+case class Dummy1006(x: Int) extends Base
+case class Dummy1007(x: Int) extends Base
+case class Dummy1008(x: Int) extends Base
+case class Dummy1009(x: Int) extends Base
+case class Dummy1010(x: Int) extends Base
+case class Dummy1011(x: Int) extends Base
+case class Dummy1012(x: Int) extends Base
+case class Dummy1013(x: Int) extends Base
+case class Dummy1014(x: Int) extends Base
+case class Dummy1015(x: Int) extends Base
+case class Dummy1016(x: Int) extends Base
+case class Dummy1017(x: Int) extends Base
+case class Dummy1018(x: Int) extends Base
+case class Dummy1019(x: Int) extends Base
+case class Dummy1020(x: Int) extends Base
+case class Dummy1021(x: Int) extends Base
+case class Dummy1022(x: Int) extends Base
+case class Dummy1023(x: Int) extends Base
+case class Dummy1024(x: Int) extends Base
+case class Dummy1025(x: Int) extends Base
+case class Dummy1026(x: Int) extends Base
+case class Dummy1027(x: Int) extends Base
+case class Dummy1028(x: Int) extends Base
+case class Dummy1029(x: Int) extends Base
+case class Dummy1030(x: Int) extends Base
+case class Dummy1031(x: Int) extends Base
+case class Dummy1032(x: Int) extends Base
+case class Dummy1033(x: Int) extends Base
+case class Dummy1034(x: Int) extends Base
+case class Dummy1035(x: Int) extends Base
+case class Dummy1036(x: Int) extends Base
+case class Dummy1037(x: Int) extends Base
+case class Dummy1038(x: Int) extends Base
+case class Dummy1039(x: Int) extends Base
+case class Dummy1040(x: Int) extends Base
+case class Dummy1041(x: Int) extends Base
+case class Dummy1042(x: Int) extends Base
+case class Dummy1043(x: Int) extends Base
+case class Dummy1044(x: Int) extends Base
+case class Dummy1045(x: Int) extends Base
+case class Dummy1046(x: Int) extends Base
+case class Dummy1047(x: Int) extends Base
+case class Dummy1048(x: Int) extends Base
+case class Dummy1049(x: Int) extends Base
+case class Dummy1050(x: Int) extends Base
+case class Dummy1051(x: Int) extends Base
+case class Dummy1052(x: Int) extends Base
+case class Dummy1053(x: Int) extends Base
+case class Dummy1054(x: Int) extends Base
+case class Dummy1055(x: Int) extends Base
+case class Dummy1056(x: Int) extends Base
+case class Dummy1057(x: Int) extends Base
+case class Dummy1058(x: Int) extends Base
+case class Dummy1059(x: Int) extends Base
+case class Dummy1060(x: Int) extends Base
+case class Dummy1061(x: Int) extends Base
+case class Dummy1062(x: Int) extends Base
+case class Dummy1063(x: Int) extends Base
+case class Dummy1064(x: Int) extends Base
+case class Dummy1065(x: Int) extends Base
+case class Dummy1066(x: Int) extends Base
+case class Dummy1067(x: Int) extends Base
+case class Dummy1068(x: Int) extends Base
+case class Dummy1069(x: Int) extends Base
+case class Dummy1070(x: Int) extends Base
+case class Dummy1071(x: Int) extends Base
+case class Dummy1072(x: Int) extends Base
+case class Dummy1073(x: Int) extends Base
+case class Dummy1074(x: Int) extends Base
+case class Dummy1075(x: Int) extends Base
+case class Dummy1076(x: Int) extends Base
+case class Dummy1077(x: Int) extends Base
+case class Dummy1078(x: Int) extends Base
+case class Dummy1079(x: Int) extends Base
+case class Dummy1080(x: Int) extends Base
+case class Dummy1081(x: Int) extends Base
+case class Dummy1082(x: Int) extends Base
+case class Dummy1083(x: Int) extends Base
+case class Dummy1084(x: Int) extends Base
+case class Dummy1085(x: Int) extends Base
+case class Dummy1086(x: Int) extends Base
+case class Dummy1087(x: Int) extends Base
+case class Dummy1088(x: Int) extends Base
+case class Dummy1089(x: Int) extends Base
+case class Dummy1090(x: Int) extends Base
+case class Dummy1091(x: Int) extends Base
+case class Dummy1092(x: Int) extends Base
+case class Dummy1093(x: Int) extends Base
+case class Dummy1094(x: Int) extends Base
+case class Dummy1095(x: Int) extends Base
+case class Dummy1096(x: Int) extends Base
+case class Dummy1097(x: Int) extends Base
+case class Dummy1098(x: Int) extends Base
+case class Dummy1099(x: Int) extends Base
+case class Dummy1100(x: Int) extends Base
+case class Dummy1101(x: Int) extends Base
+case class Dummy1102(x: Int) extends Base
+case class Dummy1103(x: Int) extends Base
+case class Dummy1104(x: Int) extends Base
+case class Dummy1105(x: Int) extends Base
+case class Dummy1106(x: Int) extends Base
+case class Dummy1107(x: Int) extends Base
+case class Dummy1108(x: Int) extends Base
+case class Dummy1109(x: Int) extends Base
+case class Dummy1110(x: Int) extends Base
+case class Dummy1111(x: Int) extends Base
+case class Dummy1112(x: Int) extends Base
+case class Dummy1113(x: Int) extends Base
+case class Dummy1114(x: Int) extends Base
+case class Dummy1115(x: Int) extends Base
+case class Dummy1116(x: Int) extends Base
+case class Dummy1117(x: Int) extends Base
+case class Dummy1118(x: Int) extends Base
+case class Dummy1119(x: Int) extends Base
+case class Dummy1120(x: Int) extends Base
+case class Dummy1121(x: Int) extends Base
+case class Dummy1122(x: Int) extends Base
+case class Dummy1123(x: Int) extends Base
+case class Dummy1124(x: Int) extends Base
+case class Dummy1125(x: Int) extends Base
+case class Dummy1126(x: Int) extends Base
+case class Dummy1127(x: Int) extends Base
+case class Dummy1128(x: Int) extends Base
+case class Dummy1129(x: Int) extends Base
+case class Dummy1130(x: Int) extends Base
+case class Dummy1131(x: Int) extends Base
+case class Dummy1132(x: Int) extends Base
+case class Dummy1133(x: Int) extends Base
+case class Dummy1134(x: Int) extends Base
+case class Dummy1135(x: Int) extends Base
+case class Dummy1136(x: Int) extends Base
+case class Dummy1137(x: Int) extends Base
+case class Dummy1138(x: Int) extends Base
+case class Dummy1139(x: Int) extends Base
+case class Dummy1140(x: Int) extends Base
+case class Dummy1141(x: Int) extends Base
+case class Dummy1142(x: Int) extends Base
+case class Dummy1143(x: Int) extends Base
+case class Dummy1144(x: Int) extends Base
+case class Dummy1145(x: Int) extends Base
+case class Dummy1146(x: Int) extends Base
+case class Dummy1147(x: Int) extends Base
+case class Dummy1148(x: Int) extends Base
+case class Dummy1149(x: Int) extends Base
+case class Dummy1150(x: Int) extends Base
+case class Dummy1151(x: Int) extends Base
+case class Dummy1152(x: Int) extends Base
+case class Dummy1153(x: Int) extends Base
+case class Dummy1154(x: Int) extends Base
+case class Dummy1155(x: Int) extends Base
+case class Dummy1156(x: Int) extends Base
+case class Dummy1157(x: Int) extends Base
+case class Dummy1158(x: Int) extends Base
+case class Dummy1159(x: Int) extends Base
+case class Dummy1160(x: Int) extends Base
+case class Dummy1161(x: Int) extends Base
+case class Dummy1162(x: Int) extends Base
+case class Dummy1163(x: Int) extends Base
+case class Dummy1164(x: Int) extends Base
+case class Dummy1165(x: Int) extends Base
+case class Dummy1166(x: Int) extends Base
+case class Dummy1167(x: Int) extends Base
+case class Dummy1168(x: Int) extends Base
+case class Dummy1169(x: Int) extends Base
+case class Dummy1170(x: Int) extends Base
+case class Dummy1171(x: Int) extends Base
+case class Dummy1172(x: Int) extends Base
+case class Dummy1173(x: Int) extends Base
+case class Dummy1174(x: Int) extends Base
+case class Dummy1175(x: Int) extends Base
+case class Dummy1176(x: Int) extends Base
+case class Dummy1177(x: Int) extends Base
+case class Dummy1178(x: Int) extends Base
+case class Dummy1179(x: Int) extends Base
+case class Dummy1180(x: Int) extends Base
+case class Dummy1181(x: Int) extends Base
+case class Dummy1182(x: Int) extends Base
+case class Dummy1183(x: Int) extends Base
+case class Dummy1184(x: Int) extends Base
+case class Dummy1185(x: Int) extends Base
+case class Dummy1186(x: Int) extends Base
+case class Dummy1187(x: Int) extends Base
+case class Dummy1188(x: Int) extends Base
+case class Dummy1189(x: Int) extends Base
+case class Dummy1190(x: Int) extends Base
+case class Dummy1191(x: Int) extends Base
+case class Dummy1192(x: Int) extends Base
+case class Dummy1193(x: Int) extends Base
+case class Dummy1194(x: Int) extends Base
+case class Dummy1195(x: Int) extends Base
+case class Dummy1196(x: Int) extends Base
+case class Dummy1197(x: Int) extends Base
+case class Dummy1198(x: Int) extends Base
+case class Dummy1199(x: Int) extends Base
+case class Dummy1200(x: Int) extends Base
+case class Dummy1201(x: Int) extends Base
+case class Dummy1202(x: Int) extends Base
+case class Dummy1203(x: Int) extends Base
+case class Dummy1204(x: Int) extends Base
+case class Dummy1205(x: Int) extends Base
+case class Dummy1206(x: Int) extends Base
+case class Dummy1207(x: Int) extends Base
+case class Dummy1208(x: Int) extends Base
+case class Dummy1209(x: Int) extends Base
+case class Dummy1210(x: Int) extends Base
+case class Dummy1211(x: Int) extends Base
+case class Dummy1212(x: Int) extends Base
+case class Dummy1213(x: Int) extends Base
+case class Dummy1214(x: Int) extends Base
+case class Dummy1215(x: Int) extends Base
+case class Dummy1216(x: Int) extends Base
+case class Dummy1217(x: Int) extends Base
+case class Dummy1218(x: Int) extends Base
+case class Dummy1219(x: Int) extends Base
+case class Dummy1220(x: Int) extends Base
+case class Dummy1221(x: Int) extends Base
+case class Dummy1222(x: Int) extends Base
+case class Dummy1223(x: Int) extends Base
+case class Dummy1224(x: Int) extends Base
+case class Dummy1225(x: Int) extends Base
+case class Dummy1226(x: Int) extends Base
+case class Dummy1227(x: Int) extends Base
+case class Dummy1228(x: Int) extends Base
+case class Dummy1229(x: Int) extends Base
+case class Dummy1230(x: Int) extends Base
+case class Dummy1231(x: Int) extends Base
+case class Dummy1232(x: Int) extends Base
+case class Dummy1233(x: Int) extends Base
+case class Dummy1234(x: Int) extends Base
+case class Dummy1235(x: Int) extends Base
+case class Dummy1236(x: Int) extends Base
+case class Dummy1237(x: Int) extends Base
+case class Dummy1238(x: Int) extends Base
+case class Dummy1239(x: Int) extends Base
+case class Dummy1240(x: Int) extends Base
+case class Dummy1241(x: Int) extends Base
+case class Dummy1242(x: Int) extends Base
+case class Dummy1243(x: Int) extends Base
+case class Dummy1244(x: Int) extends Base
+case class Dummy1245(x: Int) extends Base
+case class Dummy1246(x: Int) extends Base
+case class Dummy1247(x: Int) extends Base
+case class Dummy1248(x: Int) extends Base
+case class Dummy1249(x: Int) extends Base
+case class Dummy1250(x: Int) extends Base
+case class Dummy1251(x: Int) extends Base
+case class Dummy1252(x: Int) extends Base
+case class Dummy1253(x: Int) extends Base
+case class Dummy1254(x: Int) extends Base
+case class Dummy1255(x: Int) extends Base
+case class Dummy1256(x: Int) extends Base
+case class Dummy1257(x: Int) extends Base
+case class Dummy1258(x: Int) extends Base
+case class Dummy1259(x: Int) extends Base
+case class Dummy1260(x: Int) extends Base
+case class Dummy1261(x: Int) extends Base
+case class Dummy1262(x: Int) extends Base
+case class Dummy1263(x: Int) extends Base
+case class Dummy1264(x: Int) extends Base
+case class Dummy1265(x: Int) extends Base
+case class Dummy1266(x: Int) extends Base
+case class Dummy1267(x: Int) extends Base
+case class Dummy1268(x: Int) extends Base
+case class Dummy1269(x: Int) extends Base
+case class Dummy1270(x: Int) extends Base
+case class Dummy1271(x: Int) extends Base
+case class Dummy1272(x: Int) extends Base
+case class Dummy1273(x: Int) extends Base
+case class Dummy1274(x: Int) extends Base
+case class Dummy1275(x: Int) extends Base
+case class Dummy1276(x: Int) extends Base
+case class Dummy1277(x: Int) extends Base
+case class Dummy1278(x: Int) extends Base
+case class Dummy1279(x: Int) extends Base
+case class Dummy1280(x: Int) extends Base
+case class Dummy1281(x: Int) extends Base
+case class Dummy1282(x: Int) extends Base
+case class Dummy1283(x: Int) extends Base
+case class Dummy1284(x: Int) extends Base
+case class Dummy1285(x: Int) extends Base
+case class Dummy1286(x: Int) extends Base
+case class Dummy1287(x: Int) extends Base
+case class Dummy1288(x: Int) extends Base
+case class Dummy1289(x: Int) extends Base
+case class Dummy1290(x: Int) extends Base
+case class Dummy1291(x: Int) extends Base
+case class Dummy1292(x: Int) extends Base
+case class Dummy1293(x: Int) extends Base
+case class Dummy1294(x: Int) extends Base
+case class Dummy1295(x: Int) extends Base
+case class Dummy1296(x: Int) extends Base
+case class Dummy1297(x: Int) extends Base
+case class Dummy1298(x: Int) extends Base
+case class Dummy1299(x: Int) extends Base
+case class Dummy1300(x: Int) extends Base
+case class Dummy1301(x: Int) extends Base
+case class Dummy1302(x: Int) extends Base
+case class Dummy1303(x: Int) extends Base
+case class Dummy1304(x: Int) extends Base
+case class Dummy1305(x: Int) extends Base
+case class Dummy1306(x: Int) extends Base
+case class Dummy1307(x: Int) extends Base
+case class Dummy1308(x: Int) extends Base
+case class Dummy1309(x: Int) extends Base
+case class Dummy1310(x: Int) extends Base
+case class Dummy1311(x: Int) extends Base
+case class Dummy1312(x: Int) extends Base
+case class Dummy1313(x: Int) extends Base
+case class Dummy1314(x: Int) extends Base
+case class Dummy1315(x: Int) extends Base
+case class Dummy1316(x: Int) extends Base
+case class Dummy1317(x: Int) extends Base
+case class Dummy1318(x: Int) extends Base
+case class Dummy1319(x: Int) extends Base
+case class Dummy1320(x: Int) extends Base
+case class Dummy1321(x: Int) extends Base
+case class Dummy1322(x: Int) extends Base
+case class Dummy1323(x: Int) extends Base
+case class Dummy1324(x: Int) extends Base
+case class Dummy1325(x: Int) extends Base
+case class Dummy1326(x: Int) extends Base
+case class Dummy1327(x: Int) extends Base
+case class Dummy1328(x: Int) extends Base
+case class Dummy1329(x: Int) extends Base
+case class Dummy1330(x: Int) extends Base
+case class Dummy1331(x: Int) extends Base
+case class Dummy1332(x: Int) extends Base
+case class Dummy1333(x: Int) extends Base
+case class Dummy1334(x: Int) extends Base
+case class Dummy1335(x: Int) extends Base
+case class Dummy1336(x: Int) extends Base
+case class Dummy1337(x: Int) extends Base
+case class Dummy1338(x: Int) extends Base
+case class Dummy1339(x: Int) extends Base
+case class Dummy1340(x: Int) extends Base
+case class Dummy1341(x: Int) extends Base
+case class Dummy1342(x: Int) extends Base
+case class Dummy1343(x: Int) extends Base
+case class Dummy1344(x: Int) extends Base
+case class Dummy1345(x: Int) extends Base
+case class Dummy1346(x: Int) extends Base
+case class Dummy1347(x: Int) extends Base
+case class Dummy1348(x: Int) extends Base
+case class Dummy1349(x: Int) extends Base
+case class Dummy1350(x: Int) extends Base
+case class Dummy1351(x: Int) extends Base
+case class Dummy1352(x: Int) extends Base
+case class Dummy1353(x: Int) extends Base
+case class Dummy1354(x: Int) extends Base
+case class Dummy1355(x: Int) extends Base
+case class Dummy1356(x: Int) extends Base
+case class Dummy1357(x: Int) extends Base
+case class Dummy1358(x: Int) extends Base
+case class Dummy1359(x: Int) extends Base
+case class Dummy1360(x: Int) extends Base
+case class Dummy1361(x: Int) extends Base
+case class Dummy1362(x: Int) extends Base
+case class Dummy1363(x: Int) extends Base
+case class Dummy1364(x: Int) extends Base
+case class Dummy1365(x: Int) extends Base
+case class Dummy1366(x: Int) extends Base
+case class Dummy1367(x: Int) extends Base
+case class Dummy1368(x: Int) extends Base
+case class Dummy1369(x: Int) extends Base
+case class Dummy1370(x: Int) extends Base
+case class Dummy1371(x: Int) extends Base
+case class Dummy1372(x: Int) extends Base
+case class Dummy1373(x: Int) extends Base
+case class Dummy1374(x: Int) extends Base
+case class Dummy1375(x: Int) extends Base
+case class Dummy1376(x: Int) extends Base
+case class Dummy1377(x: Int) extends Base
+case class Dummy1378(x: Int) extends Base
+case class Dummy1379(x: Int) extends Base
+case class Dummy1380(x: Int) extends Base
+case class Dummy1381(x: Int) extends Base
+case class Dummy1382(x: Int) extends Base
+case class Dummy1383(x: Int) extends Base
+case class Dummy1384(x: Int) extends Base
+case class Dummy1385(x: Int) extends Base
+case class Dummy1386(x: Int) extends Base
+case class Dummy1387(x: Int) extends Base
+case class Dummy1388(x: Int) extends Base
+case class Dummy1389(x: Int) extends Base
+case class Dummy1390(x: Int) extends Base
+case class Dummy1391(x: Int) extends Base
+case class Dummy1392(x: Int) extends Base
+case class Dummy1393(x: Int) extends Base
+case class Dummy1394(x: Int) extends Base
+case class Dummy1395(x: Int) extends Base
+case class Dummy1396(x: Int) extends Base
+case class Dummy1397(x: Int) extends Base
+case class Dummy1398(x: Int) extends Base
+case class Dummy1399(x: Int) extends Base
+case class Dummy1400(x: Int) extends Base
+case class Dummy1401(x: Int) extends Base
+case class Dummy1402(x: Int) extends Base
+case class Dummy1403(x: Int) extends Base
+case class Dummy1404(x: Int) extends Base
+case class Dummy1405(x: Int) extends Base
+case class Dummy1406(x: Int) extends Base
+case class Dummy1407(x: Int) extends Base
+case class Dummy1408(x: Int) extends Base
+case class Dummy1409(x: Int) extends Base
+case class Dummy1410(x: Int) extends Base
+case class Dummy1411(x: Int) extends Base
+case class Dummy1412(x: Int) extends Base
+case class Dummy1413(x: Int) extends Base
+case class Dummy1414(x: Int) extends Base
+case class Dummy1415(x: Int) extends Base
+case class Dummy1416(x: Int) extends Base
+case class Dummy1417(x: Int) extends Base
+case class Dummy1418(x: Int) extends Base
+case class Dummy1419(x: Int) extends Base
+case class Dummy1420(x: Int) extends Base
+case class Dummy1421(x: Int) extends Base
+case class Dummy1422(x: Int) extends Base
+case class Dummy1423(x: Int) extends Base
+case class Dummy1424(x: Int) extends Base
+case class Dummy1425(x: Int) extends Base
+case class Dummy1426(x: Int) extends Base
+case class Dummy1427(x: Int) extends Base
+case class Dummy1428(x: Int) extends Base
+case class Dummy1429(x: Int) extends Base
+case class Dummy1430(x: Int) extends Base
+case class Dummy1431(x: Int) extends Base
+case class Dummy1432(x: Int) extends Base
+case class Dummy1433(x: Int) extends Base
+case class Dummy1434(x: Int) extends Base
+case class Dummy1435(x: Int) extends Base
+case class Dummy1436(x: Int) extends Base
+case class Dummy1437(x: Int) extends Base
+case class Dummy1438(x: Int) extends Base
+case class Dummy1439(x: Int) extends Base
+case class Dummy1440(x: Int) extends Base
+case class Dummy1441(x: Int) extends Base
+case class Dummy1442(x: Int) extends Base
+case class Dummy1443(x: Int) extends Base
+case class Dummy1444(x: Int) extends Base
+case class Dummy1445(x: Int) extends Base
+case class Dummy1446(x: Int) extends Base
+case class Dummy1447(x: Int) extends Base
+case class Dummy1448(x: Int) extends Base
+case class Dummy1449(x: Int) extends Base
+case class Dummy1450(x: Int) extends Base
+case class Dummy1451(x: Int) extends Base
+case class Dummy1452(x: Int) extends Base
+case class Dummy1453(x: Int) extends Base
+case class Dummy1454(x: Int) extends Base
+case class Dummy1455(x: Int) extends Base
+case class Dummy1456(x: Int) extends Base
+case class Dummy1457(x: Int) extends Base
+case class Dummy1458(x: Int) extends Base
+case class Dummy1459(x: Int) extends Base
+case class Dummy1460(x: Int) extends Base
+case class Dummy1461(x: Int) extends Base
+case class Dummy1462(x: Int) extends Base
+case class Dummy1463(x: Int) extends Base
+case class Dummy1464(x: Int) extends Base
+case class Dummy1465(x: Int) extends Base
+case class Dummy1466(x: Int) extends Base
+case class Dummy1467(x: Int) extends Base
+case class Dummy1468(x: Int) extends Base
+case class Dummy1469(x: Int) extends Base
+case class Dummy1470(x: Int) extends Base
+case class Dummy1471(x: Int) extends Base
+case class Dummy1472(x: Int) extends Base
+case class Dummy1473(x: Int) extends Base
+case class Dummy1474(x: Int) extends Base
+case class Dummy1475(x: Int) extends Base
+case class Dummy1476(x: Int) extends Base
+case class Dummy1477(x: Int) extends Base
+case class Dummy1478(x: Int) extends Base
+case class Dummy1479(x: Int) extends Base
+case class Dummy1480(x: Int) extends Base
+case class Dummy1481(x: Int) extends Base
+case class Dummy1482(x: Int) extends Base
+case class Dummy1483(x: Int) extends Base
+case class Dummy1484(x: Int) extends Base
+case class Dummy1485(x: Int) extends Base
+case class Dummy1486(x: Int) extends Base
+case class Dummy1487(x: Int) extends Base
+case class Dummy1488(x: Int) extends Base
+case class Dummy1489(x: Int) extends Base
+case class Dummy1490(x: Int) extends Base
+case class Dummy1491(x: Int) extends Base
+case class Dummy1492(x: Int) extends Base
+case class Dummy1493(x: Int) extends Base
+case class Dummy1494(x: Int) extends Base
+case class Dummy1495(x: Int) extends Base
+case class Dummy1496(x: Int) extends Base
+case class Dummy1497(x: Int) extends Base
+case class Dummy1498(x: Int) extends Base
+case class Dummy1499(x: Int) extends Base
+case class Dummy1500(x: Int) extends Base
+case class Dummy1501(x: Int) extends Base
+case class Dummy1502(x: Int) extends Base
+case class Dummy1503(x: Int) extends Base
+case class Dummy1504(x: Int) extends Base
+case class Dummy1505(x: Int) extends Base
+case class Dummy1506(x: Int) extends Base
+case class Dummy1507(x: Int) extends Base
+case class Dummy1508(x: Int) extends Base
+case class Dummy1509(x: Int) extends Base
+case class Dummy1510(x: Int) extends Base
+case class Dummy1511(x: Int) extends Base
+case class Dummy1512(x: Int) extends Base
+case class Dummy1513(x: Int) extends Base
+case class Dummy1514(x: Int) extends Base
+case class Dummy1515(x: Int) extends Base
+case class Dummy1516(x: Int) extends Base
+case class Dummy1517(x: Int) extends Base
+case class Dummy1518(x: Int) extends Base
+case class Dummy1519(x: Int) extends Base
+case class Dummy1520(x: Int) extends Base
+case class Dummy1521(x: Int) extends Base
+case class Dummy1522(x: Int) extends Base
+case class Dummy1523(x: Int) extends Base
+case class Dummy1524(x: Int) extends Base
+case class Dummy1525(x: Int) extends Base
+case class Dummy1526(x: Int) extends Base
+case class Dummy1527(x: Int) extends Base
+case class Dummy1528(x: Int) extends Base
+case class Dummy1529(x: Int) extends Base
+case class Dummy1530(x: Int) extends Base
+case class Dummy1531(x: Int) extends Base
+case class Dummy1532(x: Int) extends Base
+case class Dummy1533(x: Int) extends Base
+case class Dummy1534(x: Int) extends Base
+case class Dummy1535(x: Int) extends Base
+case class Dummy1536(x: Int) extends Base
+case class Dummy1537(x: Int) extends Base
+case class Dummy1538(x: Int) extends Base
+case class Dummy1539(x: Int) extends Base
+case class Dummy1540(x: Int) extends Base
+case class Dummy1541(x: Int) extends Base
+case class Dummy1542(x: Int) extends Base
+case class Dummy1543(x: Int) extends Base
+case class Dummy1544(x: Int) extends Base
+case class Dummy1545(x: Int) extends Base
+case class Dummy1546(x: Int) extends Base
+case class Dummy1547(x: Int) extends Base
+case class Dummy1548(x: Int) extends Base
+case class Dummy1549(x: Int) extends Base
+case class Dummy1550(x: Int) extends Base
+case class Dummy1551(x: Int) extends Base
+case class Dummy1552(x: Int) extends Base
+case class Dummy1553(x: Int) extends Base
+case class Dummy1554(x: Int) extends Base
+case class Dummy1555(x: Int) extends Base
+case class Dummy1556(x: Int) extends Base
+case class Dummy1557(x: Int) extends Base
+case class Dummy1558(x: Int) extends Base
+case class Dummy1559(x: Int) extends Base
+case class Dummy1560(x: Int) extends Base
+case class Dummy1561(x: Int) extends Base
+case class Dummy1562(x: Int) extends Base
+case class Dummy1563(x: Int) extends Base
+case class Dummy1564(x: Int) extends Base
+case class Dummy1565(x: Int) extends Base
+case class Dummy1566(x: Int) extends Base
+case class Dummy1567(x: Int) extends Base
+case class Dummy1568(x: Int) extends Base
+case class Dummy1569(x: Int) extends Base
+case class Dummy1570(x: Int) extends Base
+case class Dummy1571(x: Int) extends Base
+case class Dummy1572(x: Int) extends Base
+case class Dummy1573(x: Int) extends Base
+case class Dummy1574(x: Int) extends Base
+case class Dummy1575(x: Int) extends Base
+case class Dummy1576(x: Int) extends Base
+case class Dummy1577(x: Int) extends Base
+case class Dummy1578(x: Int) extends Base
+case class Dummy1579(x: Int) extends Base
+case class Dummy1580(x: Int) extends Base
+case class Dummy1581(x: Int) extends Base
+case class Dummy1582(x: Int) extends Base
+case class Dummy1583(x: Int) extends Base
+case class Dummy1584(x: Int) extends Base
+case class Dummy1585(x: Int) extends Base
+case class Dummy1586(x: Int) extends Base
+case class Dummy1587(x: Int) extends Base
+case class Dummy1588(x: Int) extends Base
+case class Dummy1589(x: Int) extends Base
+case class Dummy1590(x: Int) extends Base
+case class Dummy1591(x: Int) extends Base
+case class Dummy1592(x: Int) extends Base
+case class Dummy1593(x: Int) extends Base
+case class Dummy1594(x: Int) extends Base
+case class Dummy1595(x: Int) extends Base
+case class Dummy1596(x: Int) extends Base
+case class Dummy1597(x: Int) extends Base
+case class Dummy1598(x: Int) extends Base
+case class Dummy1599(x: Int) extends Base
+case class Dummy1600(x: Int) extends Base
+case class Dummy1601(x: Int) extends Base
+case class Dummy1602(x: Int) extends Base
+case class Dummy1603(x: Int) extends Base
+case class Dummy1604(x: Int) extends Base
+case class Dummy1605(x: Int) extends Base
+case class Dummy1606(x: Int) extends Base
+case class Dummy1607(x: Int) extends Base
+case class Dummy1608(x: Int) extends Base
+case class Dummy1609(x: Int) extends Base
+case class Dummy1610(x: Int) extends Base
+case class Dummy1611(x: Int) extends Base
+case class Dummy1612(x: Int) extends Base
+case class Dummy1613(x: Int) extends Base
+case class Dummy1614(x: Int) extends Base
+case class Dummy1615(x: Int) extends Base
+case class Dummy1616(x: Int) extends Base
+case class Dummy1617(x: Int) extends Base
+case class Dummy1618(x: Int) extends Base
+case class Dummy1619(x: Int) extends Base
+case class Dummy1620(x: Int) extends Base
+case class Dummy1621(x: Int) extends Base
+case class Dummy1622(x: Int) extends Base
+case class Dummy1623(x: Int) extends Base
+case class Dummy1624(x: Int) extends Base
+case class Dummy1625(x: Int) extends Base
+case class Dummy1626(x: Int) extends Base
+case class Dummy1627(x: Int) extends Base
+case class Dummy1628(x: Int) extends Base
+case class Dummy1629(x: Int) extends Base
+case class Dummy1630(x: Int) extends Base
+case class Dummy1631(x: Int) extends Base
+case class Dummy1632(x: Int) extends Base
+case class Dummy1633(x: Int) extends Base
+case class Dummy1634(x: Int) extends Base
+case class Dummy1635(x: Int) extends Base
+case class Dummy1636(x: Int) extends Base
+case class Dummy1637(x: Int) extends Base
+case class Dummy1638(x: Int) extends Base
+case class Dummy1639(x: Int) extends Base
+case class Dummy1640(x: Int) extends Base
+case class Dummy1641(x: Int) extends Base
+case class Dummy1642(x: Int) extends Base
+case class Dummy1643(x: Int) extends Base
+case class Dummy1644(x: Int) extends Base
+case class Dummy1645(x: Int) extends Base
+case class Dummy1646(x: Int) extends Base
+case class Dummy1647(x: Int) extends Base
+case class Dummy1648(x: Int) extends Base
+case class Dummy1649(x: Int) extends Base
+case class Dummy1650(x: Int) extends Base
+case class Dummy1651(x: Int) extends Base
+case class Dummy1652(x: Int) extends Base
+case class Dummy1653(x: Int) extends Base
+case class Dummy1654(x: Int) extends Base
+case class Dummy1655(x: Int) extends Base
+case class Dummy1656(x: Int) extends Base
+case class Dummy1657(x: Int) extends Base
+case class Dummy1658(x: Int) extends Base
+case class Dummy1659(x: Int) extends Base
+case class Dummy1660(x: Int) extends Base
+case class Dummy1661(x: Int) extends Base
+case class Dummy1662(x: Int) extends Base
+case class Dummy1663(x: Int) extends Base
+case class Dummy1664(x: Int) extends Base
+case class Dummy1665(x: Int) extends Base
+case class Dummy1666(x: Int) extends Base
+case class Dummy1667(x: Int) extends Base
+case class Dummy1668(x: Int) extends Base
+case class Dummy1669(x: Int) extends Base
+case class Dummy1670(x: Int) extends Base
+case class Dummy1671(x: Int) extends Base
+case class Dummy1672(x: Int) extends Base
+case class Dummy1673(x: Int) extends Base
+case class Dummy1674(x: Int) extends Base
+case class Dummy1675(x: Int) extends Base
+case class Dummy1676(x: Int) extends Base
+case class Dummy1677(x: Int) extends Base
+case class Dummy1678(x: Int) extends Base
+case class Dummy1679(x: Int) extends Base
+case class Dummy1680(x: Int) extends Base
+case class Dummy1681(x: Int) extends Base
+case class Dummy1682(x: Int) extends Base
+case class Dummy1683(x: Int) extends Base
+case class Dummy1684(x: Int) extends Base
+case class Dummy1685(x: Int) extends Base
+case class Dummy1686(x: Int) extends Base
+case class Dummy1687(x: Int) extends Base
+case class Dummy1688(x: Int) extends Base
+case class Dummy1689(x: Int) extends Base
+case class Dummy1690(x: Int) extends Base
+case class Dummy1691(x: Int) extends Base
+case class Dummy1692(x: Int) extends Base
+case class Dummy1693(x: Int) extends Base
+case class Dummy1694(x: Int) extends Base
+case class Dummy1695(x: Int) extends Base
+case class Dummy1696(x: Int) extends Base
+case class Dummy1697(x: Int) extends Base
+case class Dummy1698(x: Int) extends Base
+case class Dummy1699(x: Int) extends Base
+case class Dummy1700(x: Int) extends Base
+case class Dummy1701(x: Int) extends Base
+case class Dummy1702(x: Int) extends Base
+case class Dummy1703(x: Int) extends Base
+case class Dummy1704(x: Int) extends Base
+case class Dummy1705(x: Int) extends Base
+case class Dummy1706(x: Int) extends Base
+case class Dummy1707(x: Int) extends Base
+case class Dummy1708(x: Int) extends Base
+case class Dummy1709(x: Int) extends Base
+case class Dummy1710(x: Int) extends Base
+case class Dummy1711(x: Int) extends Base
+case class Dummy1712(x: Int) extends Base
+case class Dummy1713(x: Int) extends Base
+case class Dummy1714(x: Int) extends Base
+case class Dummy1715(x: Int) extends Base
+case class Dummy1716(x: Int) extends Base
+case class Dummy1717(x: Int) extends Base
+case class Dummy1718(x: Int) extends Base
+case class Dummy1719(x: Int) extends Base
+case class Dummy1720(x: Int) extends Base
+case class Dummy1721(x: Int) extends Base
+case class Dummy1722(x: Int) extends Base
+case class Dummy1723(x: Int) extends Base
+case class Dummy1724(x: Int) extends Base
+case class Dummy1725(x: Int) extends Base
+case class Dummy1726(x: Int) extends Base
+case class Dummy1727(x: Int) extends Base
+case class Dummy1728(x: Int) extends Base
+case class Dummy1729(x: Int) extends Base
+case class Dummy1730(x: Int) extends Base
+case class Dummy1731(x: Int) extends Base
+case class Dummy1732(x: Int) extends Base
+case class Dummy1733(x: Int) extends Base
+case class Dummy1734(x: Int) extends Base
+case class Dummy1735(x: Int) extends Base
+case class Dummy1736(x: Int) extends Base
+case class Dummy1737(x: Int) extends Base
+case class Dummy1738(x: Int) extends Base
+case class Dummy1739(x: Int) extends Base
+case class Dummy1740(x: Int) extends Base
+case class Dummy1741(x: Int) extends Base
+case class Dummy1742(x: Int) extends Base
+case class Dummy1743(x: Int) extends Base
+case class Dummy1744(x: Int) extends Base
+case class Dummy1745(x: Int) extends Base
+case class Dummy1746(x: Int) extends Base
+case class Dummy1747(x: Int) extends Base
+case class Dummy1748(x: Int) extends Base
+case class Dummy1749(x: Int) extends Base
+case class Dummy1750(x: Int) extends Base
+case class Dummy1751(x: Int) extends Base
+case class Dummy1752(x: Int) extends Base
+case class Dummy1753(x: Int) extends Base
+case class Dummy1754(x: Int) extends Base
+case class Dummy1755(x: Int) extends Base
+case class Dummy1756(x: Int) extends Base
+case class Dummy1757(x: Int) extends Base
+case class Dummy1758(x: Int) extends Base
+case class Dummy1759(x: Int) extends Base
+case class Dummy1760(x: Int) extends Base
+case class Dummy1761(x: Int) extends Base
+case class Dummy1762(x: Int) extends Base
+case class Dummy1763(x: Int) extends Base
+case class Dummy1764(x: Int) extends Base
+case class Dummy1765(x: Int) extends Base
+case class Dummy1766(x: Int) extends Base
+case class Dummy1767(x: Int) extends Base
+case class Dummy1768(x: Int) extends Base
+case class Dummy1769(x: Int) extends Base
+case class Dummy1770(x: Int) extends Base
+case class Dummy1771(x: Int) extends Base
+case class Dummy1772(x: Int) extends Base
+case class Dummy1773(x: Int) extends Base
+case class Dummy1774(x: Int) extends Base
+case class Dummy1775(x: Int) extends Base
+case class Dummy1776(x: Int) extends Base
+case class Dummy1777(x: Int) extends Base
+case class Dummy1778(x: Int) extends Base
+case class Dummy1779(x: Int) extends Base
+case class Dummy1780(x: Int) extends Base
+case class Dummy1781(x: Int) extends Base
+case class Dummy1782(x: Int) extends Base
+case class Dummy1783(x: Int) extends Base
+case class Dummy1784(x: Int) extends Base
+case class Dummy1785(x: Int) extends Base
+case class Dummy1786(x: Int) extends Base
+case class Dummy1787(x: Int) extends Base
+case class Dummy1788(x: Int) extends Base
+case class Dummy1789(x: Int) extends Base
+case class Dummy1790(x: Int) extends Base
+case class Dummy1791(x: Int) extends Base
+case class Dummy1792(x: Int) extends Base
+case class Dummy1793(x: Int) extends Base
+case class Dummy1794(x: Int) extends Base
+case class Dummy1795(x: Int) extends Base
+case class Dummy1796(x: Int) extends Base
+case class Dummy1797(x: Int) extends Base
+case class Dummy1798(x: Int) extends Base
+case class Dummy1799(x: Int) extends Base
+case class Dummy1800(x: Int) extends Base
+case class Dummy1801(x: Int) extends Base
+case class Dummy1802(x: Int) extends Base
+case class Dummy1803(x: Int) extends Base
+case class Dummy1804(x: Int) extends Base
+case class Dummy1805(x: Int) extends Base
+case class Dummy1806(x: Int) extends Base
+case class Dummy1807(x: Int) extends Base
+case class Dummy1808(x: Int) extends Base
+case class Dummy1809(x: Int) extends Base
+case class Dummy1810(x: Int) extends Base
+case class Dummy1811(x: Int) extends Base
+case class Dummy1812(x: Int) extends Base
+case class Dummy1813(x: Int) extends Base
+case class Dummy1814(x: Int) extends Base
+case class Dummy1815(x: Int) extends Base
+case class Dummy1816(x: Int) extends Base
+case class Dummy1817(x: Int) extends Base
+case class Dummy1818(x: Int) extends Base
+case class Dummy1819(x: Int) extends Base
+case class Dummy1820(x: Int) extends Base
+case class Dummy1821(x: Int) extends Base
+case class Dummy1822(x: Int) extends Base
+case class Dummy1823(x: Int) extends Base
+case class Dummy1824(x: Int) extends Base
+case class Dummy1825(x: Int) extends Base
+case class Dummy1826(x: Int) extends Base
+case class Dummy1827(x: Int) extends Base
+case class Dummy1828(x: Int) extends Base
+case class Dummy1829(x: Int) extends Base
+case class Dummy1830(x: Int) extends Base
+case class Dummy1831(x: Int) extends Base
+case class Dummy1832(x: Int) extends Base
+case class Dummy1833(x: Int) extends Base
+case class Dummy1834(x: Int) extends Base
+case class Dummy1835(x: Int) extends Base
+case class Dummy1836(x: Int) extends Base
+case class Dummy1837(x: Int) extends Base
+case class Dummy1838(x: Int) extends Base
+case class Dummy1839(x: Int) extends Base
+case class Dummy1840(x: Int) extends Base
+case class Dummy1841(x: Int) extends Base
+case class Dummy1842(x: Int) extends Base
+case class Dummy1843(x: Int) extends Base
+case class Dummy1844(x: Int) extends Base
+case class Dummy1845(x: Int) extends Base
+case class Dummy1846(x: Int) extends Base
+case class Dummy1847(x: Int) extends Base
+case class Dummy1848(x: Int) extends Base
+case class Dummy1849(x: Int) extends Base
+case class Dummy1850(x: Int) extends Base
+case class Dummy1851(x: Int) extends Base
+case class Dummy1852(x: Int) extends Base
+case class Dummy1853(x: Int) extends Base
+case class Dummy1854(x: Int) extends Base
+case class Dummy1855(x: Int) extends Base
+case class Dummy1856(x: Int) extends Base
+case class Dummy1857(x: Int) extends Base
+case class Dummy1858(x: Int) extends Base
+case class Dummy1859(x: Int) extends Base
+case class Dummy1860(x: Int) extends Base
+case class Dummy1861(x: Int) extends Base
+case class Dummy1862(x: Int) extends Base
+case class Dummy1863(x: Int) extends Base
+case class Dummy1864(x: Int) extends Base
+case class Dummy1865(x: Int) extends Base
+case class Dummy1866(x: Int) extends Base
+case class Dummy1867(x: Int) extends Base
+case class Dummy1868(x: Int) extends Base
+case class Dummy1869(x: Int) extends Base
+case class Dummy1870(x: Int) extends Base
+case class Dummy1871(x: Int) extends Base
+case class Dummy1872(x: Int) extends Base
+case class Dummy1873(x: Int) extends Base
+case class Dummy1874(x: Int) extends Base
+case class Dummy1875(x: Int) extends Base
+case class Dummy1876(x: Int) extends Base
+case class Dummy1877(x: Int) extends Base
+case class Dummy1878(x: Int) extends Base
+case class Dummy1879(x: Int) extends Base
+case class Dummy1880(x: Int) extends Base
+case class Dummy1881(x: Int) extends Base
+case class Dummy1882(x: Int) extends Base
+case class Dummy1883(x: Int) extends Base
+case class Dummy1884(x: Int) extends Base
+case class Dummy1885(x: Int) extends Base
+case class Dummy1886(x: Int) extends Base
+case class Dummy1887(x: Int) extends Base
+case class Dummy1888(x: Int) extends Base
+case class Dummy1889(x: Int) extends Base
+case class Dummy1890(x: Int) extends Base
+case class Dummy1891(x: Int) extends Base
+case class Dummy1892(x: Int) extends Base
+case class Dummy1893(x: Int) extends Base
+case class Dummy1894(x: Int) extends Base
+case class Dummy1895(x: Int) extends Base
+case class Dummy1896(x: Int) extends Base
+case class Dummy1897(x: Int) extends Base
+case class Dummy1898(x: Int) extends Base
+case class Dummy1899(x: Int) extends Base
+case class Dummy1900(x: Int) extends Base
+case class Dummy1901(x: Int) extends Base
+case class Dummy1902(x: Int) extends Base
+case class Dummy1903(x: Int) extends Base
+case class Dummy1904(x: Int) extends Base
+case class Dummy1905(x: Int) extends Base
+case class Dummy1906(x: Int) extends Base
+case class Dummy1907(x: Int) extends Base
+case class Dummy1908(x: Int) extends Base
+case class Dummy1909(x: Int) extends Base
+case class Dummy1910(x: Int) extends Base
+case class Dummy1911(x: Int) extends Base
+case class Dummy1912(x: Int) extends Base
+case class Dummy1913(x: Int) extends Base
+case class Dummy1914(x: Int) extends Base
+case class Dummy1915(x: Int) extends Base
+case class Dummy1916(x: Int) extends Base
+case class Dummy1917(x: Int) extends Base
+case class Dummy1918(x: Int) extends Base
+case class Dummy1919(x: Int) extends Base
+case class Dummy1920(x: Int) extends Base
+case class Dummy1921(x: Int) extends Base
+case class Dummy1922(x: Int) extends Base
+case class Dummy1923(x: Int) extends Base
+case class Dummy1924(x: Int) extends Base
+case class Dummy1925(x: Int) extends Base
+case class Dummy1926(x: Int) extends Base
+case class Dummy1927(x: Int) extends Base
+case class Dummy1928(x: Int) extends Base
+case class Dummy1929(x: Int) extends Base
+case class Dummy1930(x: Int) extends Base
+case class Dummy1931(x: Int) extends Base
+case class Dummy1932(x: Int) extends Base
+case class Dummy1933(x: Int) extends Base
+case class Dummy1934(x: Int) extends Base
+case class Dummy1935(x: Int) extends Base
+case class Dummy1936(x: Int) extends Base
+case class Dummy1937(x: Int) extends Base
+case class Dummy1938(x: Int) extends Base
+case class Dummy1939(x: Int) extends Base
+case class Dummy1940(x: Int) extends Base
+case class Dummy1941(x: Int) extends Base
+case class Dummy1942(x: Int) extends Base
+case class Dummy1943(x: Int) extends Base
+case class Dummy1944(x: Int) extends Base
+case class Dummy1945(x: Int) extends Base
+case class Dummy1946(x: Int) extends Base
+case class Dummy1947(x: Int) extends Base
+case class Dummy1948(x: Int) extends Base
+case class Dummy1949(x: Int) extends Base
+case class Dummy1950(x: Int) extends Base
+case class Dummy1951(x: Int) extends Base
+case class Dummy1952(x: Int) extends Base
+case class Dummy1953(x: Int) extends Base
+case class Dummy1954(x: Int) extends Base
+case class Dummy1955(x: Int) extends Base
+case class Dummy1956(x: Int) extends Base
+case class Dummy1957(x: Int) extends Base
+case class Dummy1958(x: Int) extends Base
+case class Dummy1959(x: Int) extends Base
+case class Dummy1960(x: Int) extends Base
+case class Dummy1961(x: Int) extends Base
+case class Dummy1962(x: Int) extends Base
+case class Dummy1963(x: Int) extends Base
+case class Dummy1964(x: Int) extends Base
+case class Dummy1965(x: Int) extends Base
+case class Dummy1966(x: Int) extends Base
+case class Dummy1967(x: Int) extends Base
+case class Dummy1968(x: Int) extends Base
+case class Dummy1969(x: Int) extends Base
+case class Dummy1970(x: Int) extends Base
+case class Dummy1971(x: Int) extends Base
+case class Dummy1972(x: Int) extends Base
+case class Dummy1973(x: Int) extends Base
+case class Dummy1974(x: Int) extends Base
+case class Dummy1975(x: Int) extends Base
+case class Dummy1976(x: Int) extends Base
+case class Dummy1977(x: Int) extends Base
+case class Dummy1978(x: Int) extends Base
+case class Dummy1979(x: Int) extends Base
+case class Dummy1980(x: Int) extends Base
+case class Dummy1981(x: Int) extends Base
+case class Dummy1982(x: Int) extends Base
+case class Dummy1983(x: Int) extends Base
+case class Dummy1984(x: Int) extends Base
+case class Dummy1985(x: Int) extends Base
+case class Dummy1986(x: Int) extends Base
+case class Dummy1987(x: Int) extends Base
+case class Dummy1988(x: Int) extends Base
+case class Dummy1989(x: Int) extends Base
+case class Dummy1990(x: Int) extends Base
+case class Dummy1991(x: Int) extends Base
+case class Dummy1992(x: Int) extends Base
+case class Dummy1993(x: Int) extends Base
+case class Dummy1994(x: Int) extends Base
+case class Dummy1995(x: Int) extends Base
+case class Dummy1996(x: Int) extends Base
+case class Dummy1997(x: Int) extends Base
+case class Dummy1998(x: Int) extends Base
+case class Dummy1999(x: Int) extends Base
+def test(y: Base) = y match {
+ case Dummy0(p) => p
+ case Dummy1(p) => p
+ case Dummy2(p) => p
+ case Dummy3(p) => p
+ case Dummy4(p) => p
+ case Dummy5(p) => p
+ case Dummy6(p) => p
+ case Dummy7(p) => p
+ case Dummy8(p) => p
+ case Dummy9(p) => p
+ case Dummy10(p) => p
+ case Dummy11(p) => p
+ case Dummy12(p) => p
+ case Dummy13(p) => p
+ case Dummy14(p) => p
+ case Dummy15(p) => p
+ case Dummy16(p) => p
+ case Dummy17(p) => p
+ case Dummy18(p) => p
+ case Dummy19(p) => p
+ case Dummy20(p) => p
+ case Dummy21(p) => p
+ case Dummy22(p) => p
+ case Dummy23(p) => p
+ case Dummy24(p) => p
+ case Dummy25(p) => p
+ case Dummy26(p) => p
+ case Dummy27(p) => p
+ case Dummy28(p) => p
+ case Dummy29(p) => p
+ case Dummy30(p) => p
+ case Dummy31(p) => p
+ case Dummy32(p) => p
+ case Dummy33(p) => p
+ case Dummy34(p) => p
+ case Dummy35(p) => p
+ case Dummy36(p) => p
+ case Dummy37(p) => p
+ case Dummy38(p) => p
+ case Dummy39(p) => p
+ case Dummy40(p) => p
+ case Dummy41(p) => p
+ case Dummy42(p) => p
+ case Dummy43(p) => p
+ case Dummy44(p) => p
+ case Dummy45(p) => p
+ case Dummy46(p) => p
+ case Dummy47(p) => p
+ case Dummy48(p) => p
+ case Dummy49(p) => p
+ case Dummy50(p) => p
+ case Dummy51(p) => p
+ case Dummy52(p) => p
+ case Dummy53(p) => p
+ case Dummy54(p) => p
+ case Dummy55(p) => p
+ case Dummy56(p) => p
+ case Dummy57(p) => p
+ case Dummy58(p) => p
+ case Dummy59(p) => p
+ case Dummy60(p) => p
+ case Dummy61(p) => p
+ case Dummy62(p) => p
+ case Dummy63(p) => p
+ case Dummy64(p) => p
+ case Dummy65(p) => p
+ case Dummy66(p) => p
+ case Dummy67(p) => p
+ case Dummy68(p) => p
+ case Dummy69(p) => p
+ case Dummy70(p) => p
+ case Dummy71(p) => p
+ case Dummy72(p) => p
+ case Dummy73(p) => p
+ case Dummy74(p) => p
+ case Dummy75(p) => p
+ case Dummy76(p) => p
+ case Dummy77(p) => p
+ case Dummy78(p) => p
+ case Dummy79(p) => p
+ case Dummy80(p) => p
+ case Dummy81(p) => p
+ case Dummy82(p) => p
+ case Dummy83(p) => p
+ case Dummy84(p) => p
+ case Dummy85(p) => p
+ case Dummy86(p) => p
+ case Dummy87(p) => p
+ case Dummy88(p) => p
+ case Dummy89(p) => p
+ case Dummy90(p) => p
+ case Dummy91(p) => p
+ case Dummy92(p) => p
+ case Dummy93(p) => p
+ case Dummy94(p) => p
+ case Dummy95(p) => p
+ case Dummy96(p) => p
+ case Dummy97(p) => p
+ case Dummy98(p) => p
+ case Dummy99(p) => p
+ case Dummy100(p) => p
+ case Dummy101(p) => p
+ case Dummy102(p) => p
+ case Dummy103(p) => p
+ case Dummy104(p) => p
+ case Dummy105(p) => p
+ case Dummy106(p) => p
+ case Dummy107(p) => p
+ case Dummy108(p) => p
+ case Dummy109(p) => p
+ case Dummy110(p) => p
+ case Dummy111(p) => p
+ case Dummy112(p) => p
+ case Dummy113(p) => p
+ case Dummy114(p) => p
+ case Dummy115(p) => p
+ case Dummy116(p) => p
+ case Dummy117(p) => p
+ case Dummy118(p) => p
+ case Dummy119(p) => p
+ case Dummy120(p) => p
+ case Dummy121(p) => p
+ case Dummy122(p) => p
+ case Dummy123(p) => p
+ case Dummy124(p) => p
+ case Dummy125(p) => p
+ case Dummy126(p) => p
+ case Dummy127(p) => p
+ case Dummy128(p) => p
+ case Dummy129(p) => p
+ case Dummy130(p) => p
+ case Dummy131(p) => p
+ case Dummy132(p) => p
+ case Dummy133(p) => p
+ case Dummy134(p) => p
+ case Dummy135(p) => p
+ case Dummy136(p) => p
+ case Dummy137(p) => p
+ case Dummy138(p) => p
+ case Dummy139(p) => p
+ case Dummy140(p) => p
+ case Dummy141(p) => p
+ case Dummy142(p) => p
+ case Dummy143(p) => p
+ case Dummy144(p) => p
+ case Dummy145(p) => p
+ case Dummy146(p) => p
+ case Dummy147(p) => p
+ case Dummy148(p) => p
+ case Dummy149(p) => p
+ case Dummy150(p) => p
+ case Dummy151(p) => p
+ case Dummy152(p) => p
+ case Dummy153(p) => p
+ case Dummy154(p) => p
+ case Dummy155(p) => p
+ case Dummy156(p) => p
+ case Dummy157(p) => p
+ case Dummy158(p) => p
+ case Dummy159(p) => p
+ case Dummy160(p) => p
+ case Dummy161(p) => p
+ case Dummy162(p) => p
+ case Dummy163(p) => p
+ case Dummy164(p) => p
+ case Dummy165(p) => p
+ case Dummy166(p) => p
+ case Dummy167(p) => p
+ case Dummy168(p) => p
+ case Dummy169(p) => p
+ case Dummy170(p) => p
+ case Dummy171(p) => p
+ case Dummy172(p) => p
+ case Dummy173(p) => p
+ case Dummy174(p) => p
+ case Dummy175(p) => p
+ case Dummy176(p) => p
+ case Dummy177(p) => p
+ case Dummy178(p) => p
+ case Dummy179(p) => p
+ case Dummy180(p) => p
+ case Dummy181(p) => p
+ case Dummy182(p) => p
+ case Dummy183(p) => p
+ case Dummy184(p) => p
+ case Dummy185(p) => p
+ case Dummy186(p) => p
+ case Dummy187(p) => p
+ case Dummy188(p) => p
+ case Dummy189(p) => p
+ case Dummy190(p) => p
+ case Dummy191(p) => p
+ case Dummy192(p) => p
+ case Dummy193(p) => p
+ case Dummy194(p) => p
+ case Dummy195(p) => p
+ case Dummy196(p) => p
+ case Dummy197(p) => p
+ case Dummy198(p) => p
+ case Dummy199(p) => p
+ case Dummy200(p) => p
+ case Dummy201(p) => p
+ case Dummy202(p) => p
+ case Dummy203(p) => p
+ case Dummy204(p) => p
+ case Dummy205(p) => p
+ case Dummy206(p) => p
+ case Dummy207(p) => p
+ case Dummy208(p) => p
+ case Dummy209(p) => p
+ case Dummy210(p) => p
+ case Dummy211(p) => p
+ case Dummy212(p) => p
+ case Dummy213(p) => p
+ case Dummy214(p) => p
+ case Dummy215(p) => p
+ case Dummy216(p) => p
+ case Dummy217(p) => p
+ case Dummy218(p) => p
+ case Dummy219(p) => p
+ case Dummy220(p) => p
+ case Dummy221(p) => p
+ case Dummy222(p) => p
+ case Dummy223(p) => p
+ case Dummy224(p) => p
+ case Dummy225(p) => p
+ case Dummy226(p) => p
+ case Dummy227(p) => p
+ case Dummy228(p) => p
+ case Dummy229(p) => p
+ case Dummy230(p) => p
+ case Dummy231(p) => p
+ case Dummy232(p) => p
+ case Dummy233(p) => p
+ case Dummy234(p) => p
+ case Dummy235(p) => p
+ case Dummy236(p) => p
+ case Dummy237(p) => p
+ case Dummy238(p) => p
+ case Dummy239(p) => p
+ case Dummy240(p) => p
+ case Dummy241(p) => p
+ case Dummy242(p) => p
+ case Dummy243(p) => p
+ case Dummy244(p) => p
+ case Dummy245(p) => p
+ case Dummy246(p) => p
+ case Dummy247(p) => p
+ case Dummy248(p) => p
+ case Dummy249(p) => p
+ case Dummy250(p) => p
+ case Dummy251(p) => p
+ case Dummy252(p) => p
+ case Dummy253(p) => p
+ case Dummy254(p) => p
+ case Dummy255(p) => p
+ case Dummy256(p) => p
+ case Dummy257(p) => p
+ case Dummy258(p) => p
+ case Dummy259(p) => p
+ case Dummy260(p) => p
+ case Dummy261(p) => p
+ case Dummy262(p) => p
+ case Dummy263(p) => p
+ case Dummy264(p) => p
+ case Dummy265(p) => p
+ case Dummy266(p) => p
+ case Dummy267(p) => p
+ case Dummy268(p) => p
+ case Dummy269(p) => p
+ case Dummy270(p) => p
+ case Dummy271(p) => p
+ case Dummy272(p) => p
+ case Dummy273(p) => p
+ case Dummy274(p) => p
+ case Dummy275(p) => p
+ case Dummy276(p) => p
+ case Dummy277(p) => p
+ case Dummy278(p) => p
+ case Dummy279(p) => p
+ case Dummy280(p) => p
+ case Dummy281(p) => p
+ case Dummy282(p) => p
+ case Dummy283(p) => p
+ case Dummy284(p) => p
+ case Dummy285(p) => p
+ case Dummy286(p) => p
+ case Dummy287(p) => p
+ case Dummy288(p) => p
+ case Dummy289(p) => p
+ case Dummy290(p) => p
+ case Dummy291(p) => p
+ case Dummy292(p) => p
+ case Dummy293(p) => p
+ case Dummy294(p) => p
+ case Dummy295(p) => p
+ case Dummy296(p) => p
+ case Dummy297(p) => p
+ case Dummy298(p) => p
+ case Dummy299(p) => p
+ case Dummy300(p) => p
+ case Dummy301(p) => p
+ case Dummy302(p) => p
+ case Dummy303(p) => p
+ case Dummy304(p) => p
+ case Dummy305(p) => p
+ case Dummy306(p) => p
+ case Dummy307(p) => p
+ case Dummy308(p) => p
+ case Dummy309(p) => p
+ case Dummy310(p) => p
+ case Dummy311(p) => p
+ case Dummy312(p) => p
+ case Dummy313(p) => p
+ case Dummy314(p) => p
+ case Dummy315(p) => p
+ case Dummy316(p) => p
+ case Dummy317(p) => p
+ case Dummy318(p) => p
+ case Dummy319(p) => p
+ case Dummy320(p) => p
+ case Dummy321(p) => p
+ case Dummy322(p) => p
+ case Dummy323(p) => p
+ case Dummy324(p) => p
+ case Dummy325(p) => p
+ case Dummy326(p) => p
+ case Dummy327(p) => p
+ case Dummy328(p) => p
+ case Dummy329(p) => p
+ case Dummy330(p) => p
+ case Dummy331(p) => p
+ case Dummy332(p) => p
+ case Dummy333(p) => p
+ case Dummy334(p) => p
+ case Dummy335(p) => p
+ case Dummy336(p) => p
+ case Dummy337(p) => p
+ case Dummy338(p) => p
+ case Dummy339(p) => p
+ case Dummy340(p) => p
+ case Dummy341(p) => p
+ case Dummy342(p) => p
+ case Dummy343(p) => p
+ case Dummy344(p) => p
+ case Dummy345(p) => p
+ case Dummy346(p) => p
+ case Dummy347(p) => p
+ case Dummy348(p) => p
+ case Dummy349(p) => p
+ case Dummy350(p) => p
+ case Dummy351(p) => p
+ case Dummy352(p) => p
+ case Dummy353(p) => p
+ case Dummy354(p) => p
+ case Dummy355(p) => p
+ case Dummy356(p) => p
+ case Dummy357(p) => p
+ case Dummy358(p) => p
+ case Dummy359(p) => p
+ case Dummy360(p) => p
+ case Dummy361(p) => p
+ case Dummy362(p) => p
+ case Dummy363(p) => p
+ case Dummy364(p) => p
+ case Dummy365(p) => p
+ case Dummy366(p) => p
+ case Dummy367(p) => p
+ case Dummy368(p) => p
+ case Dummy369(p) => p
+ case Dummy370(p) => p
+ case Dummy371(p) => p
+ case Dummy372(p) => p
+ case Dummy373(p) => p
+ case Dummy374(p) => p
+ case Dummy375(p) => p
+ case Dummy376(p) => p
+ case Dummy377(p) => p
+ case Dummy378(p) => p
+ case Dummy379(p) => p
+ case Dummy380(p) => p
+ case Dummy381(p) => p
+ case Dummy382(p) => p
+ case Dummy383(p) => p
+ case Dummy384(p) => p
+ case Dummy385(p) => p
+ case Dummy386(p) => p
+ case Dummy387(p) => p
+ case Dummy388(p) => p
+ case Dummy389(p) => p
+ case Dummy390(p) => p
+ case Dummy391(p) => p
+ case Dummy392(p) => p
+ case Dummy393(p) => p
+ case Dummy394(p) => p
+ case Dummy395(p) => p
+ case Dummy396(p) => p
+ case Dummy397(p) => p
+ case Dummy398(p) => p
+ case Dummy399(p) => p
+ case Dummy400(p) => p
+ case Dummy401(p) => p
+ case Dummy402(p) => p
+ case Dummy403(p) => p
+ case Dummy404(p) => p
+ case Dummy405(p) => p
+ case Dummy406(p) => p
+ case Dummy407(p) => p
+ case Dummy408(p) => p
+ case Dummy409(p) => p
+ case Dummy410(p) => p
+ case Dummy411(p) => p
+ case Dummy412(p) => p
+ case Dummy413(p) => p
+ case Dummy414(p) => p
+ case Dummy415(p) => p
+ case Dummy416(p) => p
+ case Dummy417(p) => p
+ case Dummy418(p) => p
+ case Dummy419(p) => p
+ case Dummy420(p) => p
+ case Dummy421(p) => p
+ case Dummy422(p) => p
+ case Dummy423(p) => p
+ case Dummy424(p) => p
+ case Dummy425(p) => p
+ case Dummy426(p) => p
+ case Dummy427(p) => p
+ case Dummy428(p) => p
+ case Dummy429(p) => p
+ case Dummy430(p) => p
+ case Dummy431(p) => p
+ case Dummy432(p) => p
+ case Dummy433(p) => p
+ case Dummy434(p) => p
+ case Dummy435(p) => p
+ case Dummy436(p) => p
+ case Dummy437(p) => p
+ case Dummy438(p) => p
+ case Dummy439(p) => p
+ case Dummy440(p) => p
+ case Dummy441(p) => p
+ case Dummy442(p) => p
+ case Dummy443(p) => p
+ case Dummy444(p) => p
+ case Dummy445(p) => p
+ case Dummy446(p) => p
+ case Dummy447(p) => p
+ case Dummy448(p) => p
+ case Dummy449(p) => p
+ case Dummy450(p) => p
+ case Dummy451(p) => p
+ case Dummy452(p) => p
+ case Dummy453(p) => p
+ case Dummy454(p) => p
+ case Dummy455(p) => p
+ case Dummy456(p) => p
+ case Dummy457(p) => p
+ case Dummy458(p) => p
+ case Dummy459(p) => p
+ case Dummy460(p) => p
+ case Dummy461(p) => p
+ case Dummy462(p) => p
+ case Dummy463(p) => p
+ case Dummy464(p) => p
+ case Dummy465(p) => p
+ case Dummy466(p) => p
+ case Dummy467(p) => p
+ case Dummy468(p) => p
+ case Dummy469(p) => p
+ case Dummy470(p) => p
+ case Dummy471(p) => p
+ case Dummy472(p) => p
+ case Dummy473(p) => p
+ case Dummy474(p) => p
+ case Dummy475(p) => p
+ case Dummy476(p) => p
+ case Dummy477(p) => p
+ case Dummy478(p) => p
+ case Dummy479(p) => p
+ case Dummy480(p) => p
+ case Dummy481(p) => p
+ case Dummy482(p) => p
+ case Dummy483(p) => p
+ case Dummy484(p) => p
+ case Dummy485(p) => p
+ case Dummy486(p) => p
+ case Dummy487(p) => p
+ case Dummy488(p) => p
+ case Dummy489(p) => p
+ case Dummy490(p) => p
+ case Dummy491(p) => p
+ case Dummy492(p) => p
+ case Dummy493(p) => p
+ case Dummy494(p) => p
+ case Dummy495(p) => p
+ case Dummy496(p) => p
+ case Dummy497(p) => p
+ case Dummy498(p) => p
+ case Dummy499(p) => p
+ case Dummy500(p) => p
+ case Dummy501(p) => p
+ case Dummy502(p) => p
+ case Dummy503(p) => p
+ case Dummy504(p) => p
+ case Dummy505(p) => p
+ case Dummy506(p) => p
+ case Dummy507(p) => p
+ case Dummy508(p) => p
+ case Dummy509(p) => p
+ case Dummy510(p) => p
+ case Dummy511(p) => p
+ case Dummy512(p) => p
+ case Dummy513(p) => p
+ case Dummy514(p) => p
+ case Dummy515(p) => p
+ case Dummy516(p) => p
+ case Dummy517(p) => p
+ case Dummy518(p) => p
+ case Dummy519(p) => p
+ case Dummy520(p) => p
+ case Dummy521(p) => p
+ case Dummy522(p) => p
+ case Dummy523(p) => p
+ case Dummy524(p) => p
+ case Dummy525(p) => p
+ case Dummy526(p) => p
+ case Dummy527(p) => p
+ case Dummy528(p) => p
+ case Dummy529(p) => p
+ case Dummy530(p) => p
+ case Dummy531(p) => p
+ case Dummy532(p) => p
+ case Dummy533(p) => p
+ case Dummy534(p) => p
+ case Dummy535(p) => p
+ case Dummy536(p) => p
+ case Dummy537(p) => p
+ case Dummy538(p) => p
+ case Dummy539(p) => p
+ case Dummy540(p) => p
+ case Dummy541(p) => p
+ case Dummy542(p) => p
+ case Dummy543(p) => p
+ case Dummy544(p) => p
+ case Dummy545(p) => p
+ case Dummy546(p) => p
+ case Dummy547(p) => p
+ case Dummy548(p) => p
+ case Dummy549(p) => p
+ case Dummy550(p) => p
+ case Dummy551(p) => p
+ case Dummy552(p) => p
+ case Dummy553(p) => p
+ case Dummy554(p) => p
+ case Dummy555(p) => p
+ case Dummy556(p) => p
+ case Dummy557(p) => p
+ case Dummy558(p) => p
+ case Dummy559(p) => p
+ case Dummy560(p) => p
+ case Dummy561(p) => p
+ case Dummy562(p) => p
+ case Dummy563(p) => p
+ case Dummy564(p) => p
+ case Dummy565(p) => p
+ case Dummy566(p) => p
+ case Dummy567(p) => p
+ case Dummy568(p) => p
+ case Dummy569(p) => p
+ case Dummy570(p) => p
+ case Dummy571(p) => p
+ case Dummy572(p) => p
+ case Dummy573(p) => p
+ case Dummy574(p) => p
+ case Dummy575(p) => p
+ case Dummy576(p) => p
+ case Dummy577(p) => p
+ case Dummy578(p) => p
+ case Dummy579(p) => p
+ case Dummy580(p) => p
+ case Dummy581(p) => p
+ case Dummy582(p) => p
+ case Dummy583(p) => p
+ case Dummy584(p) => p
+ case Dummy585(p) => p
+ case Dummy586(p) => p
+ case Dummy587(p) => p
+ case Dummy588(p) => p
+ case Dummy589(p) => p
+ case Dummy590(p) => p
+ case Dummy591(p) => p
+ case Dummy592(p) => p
+ case Dummy593(p) => p
+ case Dummy594(p) => p
+ case Dummy595(p) => p
+ case Dummy596(p) => p
+ case Dummy597(p) => p
+ case Dummy598(p) => p
+ case Dummy599(p) => p
+ case Dummy600(p) => p
+ case Dummy601(p) => p
+ case Dummy602(p) => p
+ case Dummy603(p) => p
+ case Dummy604(p) => p
+ case Dummy605(p) => p
+ case Dummy606(p) => p
+ case Dummy607(p) => p
+ case Dummy608(p) => p
+ case Dummy609(p) => p
+ case Dummy610(p) => p
+ case Dummy611(p) => p
+ case Dummy612(p) => p
+ case Dummy613(p) => p
+ case Dummy614(p) => p
+ case Dummy615(p) => p
+ case Dummy616(p) => p
+ case Dummy617(p) => p
+ case Dummy618(p) => p
+ case Dummy619(p) => p
+ case Dummy620(p) => p
+ case Dummy621(p) => p
+ case Dummy622(p) => p
+ case Dummy623(p) => p
+ case Dummy624(p) => p
+ case Dummy625(p) => p
+ case Dummy626(p) => p
+ case Dummy627(p) => p
+ case Dummy628(p) => p
+ case Dummy629(p) => p
+ case Dummy630(p) => p
+ case Dummy631(p) => p
+ case Dummy632(p) => p
+ case Dummy633(p) => p
+ case Dummy634(p) => p
+ case Dummy635(p) => p
+ case Dummy636(p) => p
+ case Dummy637(p) => p
+ case Dummy638(p) => p
+ case Dummy639(p) => p
+ case Dummy640(p) => p
+ case Dummy641(p) => p
+ case Dummy642(p) => p
+ case Dummy643(p) => p
+ case Dummy644(p) => p
+ case Dummy645(p) => p
+ case Dummy646(p) => p
+ case Dummy647(p) => p
+ case Dummy648(p) => p
+ case Dummy649(p) => p
+ case Dummy650(p) => p
+ case Dummy651(p) => p
+ case Dummy652(p) => p
+ case Dummy653(p) => p
+ case Dummy654(p) => p
+ case Dummy655(p) => p
+ case Dummy656(p) => p
+ case Dummy657(p) => p
+ case Dummy658(p) => p
+ case Dummy659(p) => p
+ case Dummy660(p) => p
+ case Dummy661(p) => p
+ case Dummy662(p) => p
+ case Dummy663(p) => p
+ case Dummy664(p) => p
+ case Dummy665(p) => p
+ case Dummy666(p) => p
+ case Dummy667(p) => p
+ case Dummy668(p) => p
+ case Dummy669(p) => p
+ case Dummy670(p) => p
+ case Dummy671(p) => p
+ case Dummy672(p) => p
+ case Dummy673(p) => p
+ case Dummy674(p) => p
+ case Dummy675(p) => p
+ case Dummy676(p) => p
+ case Dummy677(p) => p
+ case Dummy678(p) => p
+ case Dummy679(p) => p
+ case Dummy680(p) => p
+ case Dummy681(p) => p
+ case Dummy682(p) => p
+ case Dummy683(p) => p
+ case Dummy684(p) => p
+ case Dummy685(p) => p
+ case Dummy686(p) => p
+ case Dummy687(p) => p
+ case Dummy688(p) => p
+ case Dummy689(p) => p
+ case Dummy690(p) => p
+ case Dummy691(p) => p
+ case Dummy692(p) => p
+ case Dummy693(p) => p
+ case Dummy694(p) => p
+ case Dummy695(p) => p
+ case Dummy696(p) => p
+ case Dummy697(p) => p
+ case Dummy698(p) => p
+ case Dummy699(p) => p
+ case Dummy700(p) => p
+ case Dummy701(p) => p
+ case Dummy702(p) => p
+ case Dummy703(p) => p
+ case Dummy704(p) => p
+ case Dummy705(p) => p
+ case Dummy706(p) => p
+ case Dummy707(p) => p
+ case Dummy708(p) => p
+ case Dummy709(p) => p
+ case Dummy710(p) => p
+ case Dummy711(p) => p
+ case Dummy712(p) => p
+ case Dummy713(p) => p
+ case Dummy714(p) => p
+ case Dummy715(p) => p
+ case Dummy716(p) => p
+ case Dummy717(p) => p
+ case Dummy718(p) => p
+ case Dummy719(p) => p
+ case Dummy720(p) => p
+ case Dummy721(p) => p
+ case Dummy722(p) => p
+ case Dummy723(p) => p
+ case Dummy724(p) => p
+ case Dummy725(p) => p
+ case Dummy726(p) => p
+ case Dummy727(p) => p
+ case Dummy728(p) => p
+ case Dummy729(p) => p
+ case Dummy730(p) => p
+ case Dummy731(p) => p
+ case Dummy732(p) => p
+ case Dummy733(p) => p
+ case Dummy734(p) => p
+ case Dummy735(p) => p
+ case Dummy736(p) => p
+ case Dummy737(p) => p
+ case Dummy738(p) => p
+ case Dummy739(p) => p
+ case Dummy740(p) => p
+ case Dummy741(p) => p
+ case Dummy742(p) => p
+ case Dummy743(p) => p
+ case Dummy744(p) => p
+ case Dummy745(p) => p
+ case Dummy746(p) => p
+ case Dummy747(p) => p
+ case Dummy748(p) => p
+ case Dummy749(p) => p
+ case Dummy750(p) => p
+ case Dummy751(p) => p
+ case Dummy752(p) => p
+ case Dummy753(p) => p
+ case Dummy754(p) => p
+ case Dummy755(p) => p
+ case Dummy756(p) => p
+ case Dummy757(p) => p
+ case Dummy758(p) => p
+ case Dummy759(p) => p
+ case Dummy760(p) => p
+ case Dummy761(p) => p
+ case Dummy762(p) => p
+ case Dummy763(p) => p
+ case Dummy764(p) => p
+ case Dummy765(p) => p
+ case Dummy766(p) => p
+ case Dummy767(p) => p
+ case Dummy768(p) => p
+ case Dummy769(p) => p
+ case Dummy770(p) => p
+ case Dummy771(p) => p
+ case Dummy772(p) => p
+ case Dummy773(p) => p
+ case Dummy774(p) => p
+ case Dummy775(p) => p
+ case Dummy776(p) => p
+ case Dummy777(p) => p
+ case Dummy778(p) => p
+ case Dummy779(p) => p
+ case Dummy780(p) => p
+ case Dummy781(p) => p
+ case Dummy782(p) => p
+ case Dummy783(p) => p
+ case Dummy784(p) => p
+ case Dummy785(p) => p
+ case Dummy786(p) => p
+ case Dummy787(p) => p
+ case Dummy788(p) => p
+ case Dummy789(p) => p
+ case Dummy790(p) => p
+ case Dummy791(p) => p
+ case Dummy792(p) => p
+ case Dummy793(p) => p
+ case Dummy794(p) => p
+ case Dummy795(p) => p
+ case Dummy796(p) => p
+ case Dummy797(p) => p
+ case Dummy798(p) => p
+ case Dummy799(p) => p
+ case Dummy800(p) => p
+ case Dummy801(p) => p
+ case Dummy802(p) => p
+ case Dummy803(p) => p
+ case Dummy804(p) => p
+ case Dummy805(p) => p
+ case Dummy806(p) => p
+ case Dummy807(p) => p
+ case Dummy808(p) => p
+ case Dummy809(p) => p
+ case Dummy810(p) => p
+ case Dummy811(p) => p
+ case Dummy812(p) => p
+ case Dummy813(p) => p
+ case Dummy814(p) => p
+ case Dummy815(p) => p
+ case Dummy816(p) => p
+ case Dummy817(p) => p
+ case Dummy818(p) => p
+ case Dummy819(p) => p
+ case Dummy820(p) => p
+ case Dummy821(p) => p
+ case Dummy822(p) => p
+ case Dummy823(p) => p
+ case Dummy824(p) => p
+ case Dummy825(p) => p
+ case Dummy826(p) => p
+ case Dummy827(p) => p
+ case Dummy828(p) => p
+ case Dummy829(p) => p
+ case Dummy830(p) => p
+ case Dummy831(p) => p
+ case Dummy832(p) => p
+ case Dummy833(p) => p
+ case Dummy834(p) => p
+ case Dummy835(p) => p
+ case Dummy836(p) => p
+ case Dummy837(p) => p
+ case Dummy838(p) => p
+ case Dummy839(p) => p
+ case Dummy840(p) => p
+ case Dummy841(p) => p
+ case Dummy842(p) => p
+ case Dummy843(p) => p
+ case Dummy844(p) => p
+ case Dummy845(p) => p
+ case Dummy846(p) => p
+ case Dummy847(p) => p
+ case Dummy848(p) => p
+ case Dummy849(p) => p
+ case Dummy850(p) => p
+ case Dummy851(p) => p
+ case Dummy852(p) => p
+ case Dummy853(p) => p
+ case Dummy854(p) => p
+ case Dummy855(p) => p
+ case Dummy856(p) => p
+ case Dummy857(p) => p
+ case Dummy858(p) => p
+ case Dummy859(p) => p
+ case Dummy860(p) => p
+ case Dummy861(p) => p
+ case Dummy862(p) => p
+ case Dummy863(p) => p
+ case Dummy864(p) => p
+ case Dummy865(p) => p
+ case Dummy866(p) => p
+ case Dummy867(p) => p
+ case Dummy868(p) => p
+ case Dummy869(p) => p
+ case Dummy870(p) => p
+ case Dummy871(p) => p
+ case Dummy872(p) => p
+ case Dummy873(p) => p
+ case Dummy874(p) => p
+ case Dummy875(p) => p
+ case Dummy876(p) => p
+ case Dummy877(p) => p
+ case Dummy878(p) => p
+ case Dummy879(p) => p
+ case Dummy880(p) => p
+ case Dummy881(p) => p
+ case Dummy882(p) => p
+ case Dummy883(p) => p
+ case Dummy884(p) => p
+ case Dummy885(p) => p
+ case Dummy886(p) => p
+ case Dummy887(p) => p
+ case Dummy888(p) => p
+ case Dummy889(p) => p
+ case Dummy890(p) => p
+ case Dummy891(p) => p
+ case Dummy892(p) => p
+ case Dummy893(p) => p
+ case Dummy894(p) => p
+ case Dummy895(p) => p
+ case Dummy896(p) => p
+ case Dummy897(p) => p
+ case Dummy898(p) => p
+ case Dummy899(p) => p
+ case Dummy900(p) => p
+ case Dummy901(p) => p
+ case Dummy902(p) => p
+ case Dummy903(p) => p
+ case Dummy904(p) => p
+ case Dummy905(p) => p
+ case Dummy906(p) => p
+ case Dummy907(p) => p
+ case Dummy908(p) => p
+ case Dummy909(p) => p
+ case Dummy910(p) => p
+ case Dummy911(p) => p
+ case Dummy912(p) => p
+ case Dummy913(p) => p
+ case Dummy914(p) => p
+ case Dummy915(p) => p
+ case Dummy916(p) => p
+ case Dummy917(p) => p
+ case Dummy918(p) => p
+ case Dummy919(p) => p
+ case Dummy920(p) => p
+ case Dummy921(p) => p
+ case Dummy922(p) => p
+ case Dummy923(p) => p
+ case Dummy924(p) => p
+ case Dummy925(p) => p
+ case Dummy926(p) => p
+ case Dummy927(p) => p
+ case Dummy928(p) => p
+ case Dummy929(p) => p
+ case Dummy930(p) => p
+ case Dummy931(p) => p
+ case Dummy932(p) => p
+ case Dummy933(p) => p
+ case Dummy934(p) => p
+ case Dummy935(p) => p
+ case Dummy936(p) => p
+ case Dummy937(p) => p
+ case Dummy938(p) => p
+ case Dummy939(p) => p
+ case Dummy940(p) => p
+ case Dummy941(p) => p
+ case Dummy942(p) => p
+ case Dummy943(p) => p
+ case Dummy944(p) => p
+ case Dummy945(p) => p
+ case Dummy946(p) => p
+ case Dummy947(p) => p
+ case Dummy948(p) => p
+ case Dummy949(p) => p
+ case Dummy950(p) => p
+ case Dummy951(p) => p
+ case Dummy952(p) => p
+ case Dummy953(p) => p
+ case Dummy954(p) => p
+ case Dummy955(p) => p
+ case Dummy956(p) => p
+ case Dummy957(p) => p
+ case Dummy958(p) => p
+ case Dummy959(p) => p
+ case Dummy960(p) => p
+ case Dummy961(p) => p
+ case Dummy962(p) => p
+ case Dummy963(p) => p
+ case Dummy964(p) => p
+ case Dummy965(p) => p
+ case Dummy966(p) => p
+ case Dummy967(p) => p
+ case Dummy968(p) => p
+ case Dummy969(p) => p
+ case Dummy970(p) => p
+ case Dummy971(p) => p
+ case Dummy972(p) => p
+ case Dummy973(p) => p
+ case Dummy974(p) => p
+ case Dummy975(p) => p
+ case Dummy976(p) => p
+ case Dummy977(p) => p
+ case Dummy978(p) => p
+ case Dummy979(p) => p
+ case Dummy980(p) => p
+ case Dummy981(p) => p
+ case Dummy982(p) => p
+ case Dummy983(p) => p
+ case Dummy984(p) => p
+ case Dummy985(p) => p
+ case Dummy986(p) => p
+ case Dummy987(p) => p
+ case Dummy988(p) => p
+ case Dummy989(p) => p
+ case Dummy990(p) => p
+ case Dummy991(p) => p
+ case Dummy992(p) => p
+ case Dummy993(p) => p
+ case Dummy994(p) => p
+ case Dummy995(p) => p
+ case Dummy996(p) => p
+ case Dummy997(p) => p
+ case Dummy998(p) => p
+ case Dummy999(p) => p
+ case Dummy1000(p) => p
+ case Dummy1001(p) => p
+ case Dummy1002(p) => p
+ case Dummy1003(p) => p
+ case Dummy1004(p) => p
+ case Dummy1005(p) => p
+ case Dummy1006(p) => p
+ case Dummy1007(p) => p
+ case Dummy1008(p) => p
+ case Dummy1009(p) => p
+ case Dummy1010(p) => p
+ case Dummy1011(p) => p
+ case Dummy1012(p) => p
+ case Dummy1013(p) => p
+ case Dummy1014(p) => p
+ case Dummy1015(p) => p
+ case Dummy1016(p) => p
+ case Dummy1017(p) => p
+ case Dummy1018(p) => p
+ case Dummy1019(p) => p
+ case Dummy1020(p) => p
+ case Dummy1021(p) => p
+ case Dummy1022(p) => p
+ case Dummy1023(p) => p
+ case Dummy1024(p) => p
+ case Dummy1025(p) => p
+ case Dummy1026(p) => p
+ case Dummy1027(p) => p
+ case Dummy1028(p) => p
+ case Dummy1029(p) => p
+ case Dummy1030(p) => p
+ case Dummy1031(p) => p
+ case Dummy1032(p) => p
+ case Dummy1033(p) => p
+ case Dummy1034(p) => p
+ case Dummy1035(p) => p
+ case Dummy1036(p) => p
+ case Dummy1037(p) => p
+ case Dummy1038(p) => p
+ case Dummy1039(p) => p
+ case Dummy1040(p) => p
+ case Dummy1041(p) => p
+ case Dummy1042(p) => p
+ case Dummy1043(p) => p
+ case Dummy1044(p) => p
+ case Dummy1045(p) => p
+ case Dummy1046(p) => p
+ case Dummy1047(p) => p
+ case Dummy1048(p) => p
+ case Dummy1049(p) => p
+ case Dummy1050(p) => p
+ case Dummy1051(p) => p
+ case Dummy1052(p) => p
+ case Dummy1053(p) => p
+ case Dummy1054(p) => p
+ case Dummy1055(p) => p
+ case Dummy1056(p) => p
+ case Dummy1057(p) => p
+ case Dummy1058(p) => p
+ case Dummy1059(p) => p
+ case Dummy1060(p) => p
+ case Dummy1061(p) => p
+ case Dummy1062(p) => p
+ case Dummy1063(p) => p
+ case Dummy1064(p) => p
+ case Dummy1065(p) => p
+ case Dummy1066(p) => p
+ case Dummy1067(p) => p
+ case Dummy1068(p) => p
+ case Dummy1069(p) => p
+ case Dummy1070(p) => p
+ case Dummy1071(p) => p
+ case Dummy1072(p) => p
+ case Dummy1073(p) => p
+ case Dummy1074(p) => p
+ case Dummy1075(p) => p
+ case Dummy1076(p) => p
+ case Dummy1077(p) => p
+ case Dummy1078(p) => p
+ case Dummy1079(p) => p
+ case Dummy1080(p) => p
+ case Dummy1081(p) => p
+ case Dummy1082(p) => p
+ case Dummy1083(p) => p
+ case Dummy1084(p) => p
+ case Dummy1085(p) => p
+ case Dummy1086(p) => p
+ case Dummy1087(p) => p
+ case Dummy1088(p) => p
+ case Dummy1089(p) => p
+ case Dummy1090(p) => p
+ case Dummy1091(p) => p
+ case Dummy1092(p) => p
+ case Dummy1093(p) => p
+ case Dummy1094(p) => p
+ case Dummy1095(p) => p
+ case Dummy1096(p) => p
+ case Dummy1097(p) => p
+ case Dummy1098(p) => p
+ case Dummy1099(p) => p
+ case Dummy1100(p) => p
+ case Dummy1101(p) => p
+ case Dummy1102(p) => p
+ case Dummy1103(p) => p
+ case Dummy1104(p) => p
+ case Dummy1105(p) => p
+ case Dummy1106(p) => p
+ case Dummy1107(p) => p
+ case Dummy1108(p) => p
+ case Dummy1109(p) => p
+ case Dummy1110(p) => p
+ case Dummy1111(p) => p
+ case Dummy1112(p) => p
+ case Dummy1113(p) => p
+ case Dummy1114(p) => p
+ case Dummy1115(p) => p
+ case Dummy1116(p) => p
+ case Dummy1117(p) => p
+ case Dummy1118(p) => p
+ case Dummy1119(p) => p
+ case Dummy1120(p) => p
+ case Dummy1121(p) => p
+ case Dummy1122(p) => p
+ case Dummy1123(p) => p
+ case Dummy1124(p) => p
+ case Dummy1125(p) => p
+ case Dummy1126(p) => p
+ case Dummy1127(p) => p
+ case Dummy1128(p) => p
+ case Dummy1129(p) => p
+ case Dummy1130(p) => p
+ case Dummy1131(p) => p
+ case Dummy1132(p) => p
+ case Dummy1133(p) => p
+ case Dummy1134(p) => p
+ case Dummy1135(p) => p
+ case Dummy1136(p) => p
+ case Dummy1137(p) => p
+ case Dummy1138(p) => p
+ case Dummy1139(p) => p
+ case Dummy1140(p) => p
+ case Dummy1141(p) => p
+ case Dummy1142(p) => p
+ case Dummy1143(p) => p
+ case Dummy1144(p) => p
+ case Dummy1145(p) => p
+ case Dummy1146(p) => p
+ case Dummy1147(p) => p
+ case Dummy1148(p) => p
+ case Dummy1149(p) => p
+ case Dummy1150(p) => p
+ case Dummy1151(p) => p
+ case Dummy1152(p) => p
+ case Dummy1153(p) => p
+ case Dummy1154(p) => p
+ case Dummy1155(p) => p
+ case Dummy1156(p) => p
+ case Dummy1157(p) => p
+ case Dummy1158(p) => p
+ case Dummy1159(p) => p
+ case Dummy1160(p) => p
+ case Dummy1161(p) => p
+ case Dummy1162(p) => p
+ case Dummy1163(p) => p
+ case Dummy1164(p) => p
+ case Dummy1165(p) => p
+ case Dummy1166(p) => p
+ case Dummy1167(p) => p
+ case Dummy1168(p) => p
+ case Dummy1169(p) => p
+ case Dummy1170(p) => p
+ case Dummy1171(p) => p
+ case Dummy1172(p) => p
+ case Dummy1173(p) => p
+ case Dummy1174(p) => p
+ case Dummy1175(p) => p
+ case Dummy1176(p) => p
+ case Dummy1177(p) => p
+ case Dummy1178(p) => p
+ case Dummy1179(p) => p
+ case Dummy1180(p) => p
+ case Dummy1181(p) => p
+ case Dummy1182(p) => p
+ case Dummy1183(p) => p
+ case Dummy1184(p) => p
+ case Dummy1185(p) => p
+ case Dummy1186(p) => p
+ case Dummy1187(p) => p
+ case Dummy1188(p) => p
+ case Dummy1189(p) => p
+ case Dummy1190(p) => p
+ case Dummy1191(p) => p
+ case Dummy1192(p) => p
+ case Dummy1193(p) => p
+ case Dummy1194(p) => p
+ case Dummy1195(p) => p
+ case Dummy1196(p) => p
+ case Dummy1197(p) => p
+ case Dummy1198(p) => p
+ case Dummy1199(p) => p
+ case Dummy1200(p) => p
+ case Dummy1201(p) => p
+ case Dummy1202(p) => p
+ case Dummy1203(p) => p
+ case Dummy1204(p) => p
+ case Dummy1205(p) => p
+ case Dummy1206(p) => p
+ case Dummy1207(p) => p
+ case Dummy1208(p) => p
+ case Dummy1209(p) => p
+ case Dummy1210(p) => p
+ case Dummy1211(p) => p
+ case Dummy1212(p) => p
+ case Dummy1213(p) => p
+ case Dummy1214(p) => p
+ case Dummy1215(p) => p
+ case Dummy1216(p) => p
+ case Dummy1217(p) => p
+ case Dummy1218(p) => p
+ case Dummy1219(p) => p
+ case Dummy1220(p) => p
+ case Dummy1221(p) => p
+ case Dummy1222(p) => p
+ case Dummy1223(p) => p
+ case Dummy1224(p) => p
+ case Dummy1225(p) => p
+ case Dummy1226(p) => p
+ case Dummy1227(p) => p
+ case Dummy1228(p) => p
+ case Dummy1229(p) => p
+ case Dummy1230(p) => p
+ case Dummy1231(p) => p
+ case Dummy1232(p) => p
+ case Dummy1233(p) => p
+ case Dummy1234(p) => p
+ case Dummy1235(p) => p
+ case Dummy1236(p) => p
+ case Dummy1237(p) => p
+ case Dummy1238(p) => p
+ case Dummy1239(p) => p
+ case Dummy1240(p) => p
+ case Dummy1241(p) => p
+ case Dummy1242(p) => p
+ case Dummy1243(p) => p
+ case Dummy1244(p) => p
+ case Dummy1245(p) => p
+ case Dummy1246(p) => p
+ case Dummy1247(p) => p
+ case Dummy1248(p) => p
+ case Dummy1249(p) => p
+ case Dummy1250(p) => p
+ case Dummy1251(p) => p
+ case Dummy1252(p) => p
+ case Dummy1253(p) => p
+ case Dummy1254(p) => p
+ case Dummy1255(p) => p
+ case Dummy1256(p) => p
+ case Dummy1257(p) => p
+ case Dummy1258(p) => p
+ case Dummy1259(p) => p
+ case Dummy1260(p) => p
+ case Dummy1261(p) => p
+ case Dummy1262(p) => p
+ case Dummy1263(p) => p
+ case Dummy1264(p) => p
+ case Dummy1265(p) => p
+ case Dummy1266(p) => p
+ case Dummy1267(p) => p
+ case Dummy1268(p) => p
+ case Dummy1269(p) => p
+ case Dummy1270(p) => p
+ case Dummy1271(p) => p
+ case Dummy1272(p) => p
+ case Dummy1273(p) => p
+ case Dummy1274(p) => p
+ case Dummy1275(p) => p
+ case Dummy1276(p) => p
+ case Dummy1277(p) => p
+ case Dummy1278(p) => p
+ case Dummy1279(p) => p
+ case Dummy1280(p) => p
+ case Dummy1281(p) => p
+ case Dummy1282(p) => p
+ case Dummy1283(p) => p
+ case Dummy1284(p) => p
+ case Dummy1285(p) => p
+ case Dummy1286(p) => p
+ case Dummy1287(p) => p
+ case Dummy1288(p) => p
+ case Dummy1289(p) => p
+ case Dummy1290(p) => p
+ case Dummy1291(p) => p
+ case Dummy1292(p) => p
+ case Dummy1293(p) => p
+ case Dummy1294(p) => p
+ case Dummy1295(p) => p
+ case Dummy1296(p) => p
+ case Dummy1297(p) => p
+ case Dummy1298(p) => p
+ case Dummy1299(p) => p
+ case Dummy1300(p) => p
+ case Dummy1301(p) => p
+ case Dummy1302(p) => p
+ case Dummy1303(p) => p
+ case Dummy1304(p) => p
+ case Dummy1305(p) => p
+ case Dummy1306(p) => p
+ case Dummy1307(p) => p
+ case Dummy1308(p) => p
+ case Dummy1309(p) => p
+ case Dummy1310(p) => p
+ case Dummy1311(p) => p
+ case Dummy1312(p) => p
+ case Dummy1313(p) => p
+ case Dummy1314(p) => p
+ case Dummy1315(p) => p
+ case Dummy1316(p) => p
+ case Dummy1317(p) => p
+ case Dummy1318(p) => p
+ case Dummy1319(p) => p
+ case Dummy1320(p) => p
+ case Dummy1321(p) => p
+ case Dummy1322(p) => p
+ case Dummy1323(p) => p
+ case Dummy1324(p) => p
+ case Dummy1325(p) => p
+ case Dummy1326(p) => p
+ case Dummy1327(p) => p
+ case Dummy1328(p) => p
+ case Dummy1329(p) => p
+ case Dummy1330(p) => p
+ case Dummy1331(p) => p
+ case Dummy1332(p) => p
+ case Dummy1333(p) => p
+ case Dummy1334(p) => p
+ case Dummy1335(p) => p
+ case Dummy1336(p) => p
+ case Dummy1337(p) => p
+ case Dummy1338(p) => p
+ case Dummy1339(p) => p
+ case Dummy1340(p) => p
+ case Dummy1341(p) => p
+ case Dummy1342(p) => p
+ case Dummy1343(p) => p
+ case Dummy1344(p) => p
+ case Dummy1345(p) => p
+ case Dummy1346(p) => p
+ case Dummy1347(p) => p
+ case Dummy1348(p) => p
+ case Dummy1349(p) => p
+ case Dummy1350(p) => p
+ case Dummy1351(p) => p
+ case Dummy1352(p) => p
+ case Dummy1353(p) => p
+ case Dummy1354(p) => p
+ case Dummy1355(p) => p
+ case Dummy1356(p) => p
+ case Dummy1357(p) => p
+ case Dummy1358(p) => p
+ case Dummy1359(p) => p
+ case Dummy1360(p) => p
+ case Dummy1361(p) => p
+ case Dummy1362(p) => p
+ case Dummy1363(p) => p
+ case Dummy1364(p) => p
+ case Dummy1365(p) => p
+ case Dummy1366(p) => p
+ case Dummy1367(p) => p
+ case Dummy1368(p) => p
+ case Dummy1369(p) => p
+ case Dummy1370(p) => p
+ case Dummy1371(p) => p
+ case Dummy1372(p) => p
+ case Dummy1373(p) => p
+ case Dummy1374(p) => p
+ case Dummy1375(p) => p
+ case Dummy1376(p) => p
+ case Dummy1377(p) => p
+ case Dummy1378(p) => p
+ case Dummy1379(p) => p
+ case Dummy1380(p) => p
+ case Dummy1381(p) => p
+ case Dummy1382(p) => p
+ case Dummy1383(p) => p
+ case Dummy1384(p) => p
+ case Dummy1385(p) => p
+ case Dummy1386(p) => p
+ case Dummy1387(p) => p
+ case Dummy1388(p) => p
+ case Dummy1389(p) => p
+ case Dummy1390(p) => p
+ case Dummy1391(p) => p
+ case Dummy1392(p) => p
+ case Dummy1393(p) => p
+ case Dummy1394(p) => p
+ case Dummy1395(p) => p
+ case Dummy1396(p) => p
+ case Dummy1397(p) => p
+ case Dummy1398(p) => p
+ case Dummy1399(p) => p
+ case Dummy1400(p) => p
+ case Dummy1401(p) => p
+ case Dummy1402(p) => p
+ case Dummy1403(p) => p
+ case Dummy1404(p) => p
+ case Dummy1405(p) => p
+ case Dummy1406(p) => p
+ case Dummy1407(p) => p
+ case Dummy1408(p) => p
+ case Dummy1409(p) => p
+ case Dummy1410(p) => p
+ case Dummy1411(p) => p
+ case Dummy1412(p) => p
+ case Dummy1413(p) => p
+ case Dummy1414(p) => p
+ case Dummy1415(p) => p
+ case Dummy1416(p) => p
+ case Dummy1417(p) => p
+ case Dummy1418(p) => p
+ case Dummy1419(p) => p
+ case Dummy1420(p) => p
+ case Dummy1421(p) => p
+ case Dummy1422(p) => p
+ case Dummy1423(p) => p
+ case Dummy1424(p) => p
+ case Dummy1425(p) => p
+ case Dummy1426(p) => p
+ case Dummy1427(p) => p
+ case Dummy1428(p) => p
+ case Dummy1429(p) => p
+ case Dummy1430(p) => p
+ case Dummy1431(p) => p
+ case Dummy1432(p) => p
+ case Dummy1433(p) => p
+ case Dummy1434(p) => p
+ case Dummy1435(p) => p
+ case Dummy1436(p) => p
+ case Dummy1437(p) => p
+ case Dummy1438(p) => p
+ case Dummy1439(p) => p
+ case Dummy1440(p) => p
+ case Dummy1441(p) => p
+ case Dummy1442(p) => p
+ case Dummy1443(p) => p
+ case Dummy1444(p) => p
+ case Dummy1445(p) => p
+ case Dummy1446(p) => p
+ case Dummy1447(p) => p
+ case Dummy1448(p) => p
+ case Dummy1449(p) => p
+ case Dummy1450(p) => p
+ case Dummy1451(p) => p
+ case Dummy1452(p) => p
+ case Dummy1453(p) => p
+ case Dummy1454(p) => p
+ case Dummy1455(p) => p
+ case Dummy1456(p) => p
+ case Dummy1457(p) => p
+ case Dummy1458(p) => p
+ case Dummy1459(p) => p
+ case Dummy1460(p) => p
+ case Dummy1461(p) => p
+ case Dummy1462(p) => p
+ case Dummy1463(p) => p
+ case Dummy1464(p) => p
+ case Dummy1465(p) => p
+ case Dummy1466(p) => p
+ case Dummy1467(p) => p
+ case Dummy1468(p) => p
+ case Dummy1469(p) => p
+ case Dummy1470(p) => p
+ case Dummy1471(p) => p
+ case Dummy1472(p) => p
+ case Dummy1473(p) => p
+ case Dummy1474(p) => p
+ case Dummy1475(p) => p
+ case Dummy1476(p) => p
+ case Dummy1477(p) => p
+ case Dummy1478(p) => p
+ case Dummy1479(p) => p
+ case Dummy1480(p) => p
+ case Dummy1481(p) => p
+ case Dummy1482(p) => p
+ case Dummy1483(p) => p
+ case Dummy1484(p) => p
+ case Dummy1485(p) => p
+ case Dummy1486(p) => p
+ case Dummy1487(p) => p
+ case Dummy1488(p) => p
+ case Dummy1489(p) => p
+ case Dummy1490(p) => p
+ case Dummy1491(p) => p
+ case Dummy1492(p) => p
+ case Dummy1493(p) => p
+ case Dummy1494(p) => p
+ case Dummy1495(p) => p
+ case Dummy1496(p) => p
+ case Dummy1497(p) => p
+ case Dummy1498(p) => p
+ case Dummy1499(p) => p
+ case Dummy1500(p) => p
+ case Dummy1501(p) => p
+ case Dummy1502(p) => p
+ case Dummy1503(p) => p
+ case Dummy1504(p) => p
+ case Dummy1505(p) => p
+ case Dummy1506(p) => p
+ case Dummy1507(p) => p
+ case Dummy1508(p) => p
+ case Dummy1509(p) => p
+ case Dummy1510(p) => p
+ case Dummy1511(p) => p
+ case Dummy1512(p) => p
+ case Dummy1513(p) => p
+ case Dummy1514(p) => p
+ case Dummy1515(p) => p
+ case Dummy1516(p) => p
+ case Dummy1517(p) => p
+ case Dummy1518(p) => p
+ case Dummy1519(p) => p
+ case Dummy1520(p) => p
+ case Dummy1521(p) => p
+ case Dummy1522(p) => p
+ case Dummy1523(p) => p
+ case Dummy1524(p) => p
+ case Dummy1525(p) => p
+ case Dummy1526(p) => p
+ case Dummy1527(p) => p
+ case Dummy1528(p) => p
+ case Dummy1529(p) => p
+ case Dummy1530(p) => p
+ case Dummy1531(p) => p
+ case Dummy1532(p) => p
+ case Dummy1533(p) => p
+ case Dummy1534(p) => p
+ case Dummy1535(p) => p
+ case Dummy1536(p) => p
+ case Dummy1537(p) => p
+ case Dummy1538(p) => p
+ case Dummy1539(p) => p
+ case Dummy1540(p) => p
+ case Dummy1541(p) => p
+ case Dummy1542(p) => p
+ case Dummy1543(p) => p
+ case Dummy1544(p) => p
+ case Dummy1545(p) => p
+ case Dummy1546(p) => p
+ case Dummy1547(p) => p
+ case Dummy1548(p) => p
+ case Dummy1549(p) => p
+ case Dummy1550(p) => p
+ case Dummy1551(p) => p
+ case Dummy1552(p) => p
+ case Dummy1553(p) => p
+ case Dummy1554(p) => p
+ case Dummy1555(p) => p
+ case Dummy1556(p) => p
+ case Dummy1557(p) => p
+ case Dummy1558(p) => p
+ case Dummy1559(p) => p
+ case Dummy1560(p) => p
+ case Dummy1561(p) => p
+ case Dummy1562(p) => p
+ case Dummy1563(p) => p
+ case Dummy1564(p) => p
+ case Dummy1565(p) => p
+ case Dummy1566(p) => p
+ case Dummy1567(p) => p
+ case Dummy1568(p) => p
+ case Dummy1569(p) => p
+ case Dummy1570(p) => p
+ case Dummy1571(p) => p
+ case Dummy1572(p) => p
+ case Dummy1573(p) => p
+ case Dummy1574(p) => p
+ case Dummy1575(p) => p
+ case Dummy1576(p) => p
+ case Dummy1577(p) => p
+ case Dummy1578(p) => p
+ case Dummy1579(p) => p
+ case Dummy1580(p) => p
+ case Dummy1581(p) => p
+ case Dummy1582(p) => p
+ case Dummy1583(p) => p
+ case Dummy1584(p) => p
+ case Dummy1585(p) => p
+ case Dummy1586(p) => p
+ case Dummy1587(p) => p
+ case Dummy1588(p) => p
+ case Dummy1589(p) => p
+ case Dummy1590(p) => p
+ case Dummy1591(p) => p
+ case Dummy1592(p) => p
+ case Dummy1593(p) => p
+ case Dummy1594(p) => p
+ case Dummy1595(p) => p
+ case Dummy1596(p) => p
+ case Dummy1597(p) => p
+ case Dummy1598(p) => p
+ case Dummy1599(p) => p
+ case Dummy1600(p) => p
+ case Dummy1601(p) => p
+ case Dummy1602(p) => p
+ case Dummy1603(p) => p
+ case Dummy1604(p) => p
+ case Dummy1605(p) => p
+ case Dummy1606(p) => p
+ case Dummy1607(p) => p
+ case Dummy1608(p) => p
+ case Dummy1609(p) => p
+ case Dummy1610(p) => p
+ case Dummy1611(p) => p
+ case Dummy1612(p) => p
+ case Dummy1613(p) => p
+ case Dummy1614(p) => p
+ case Dummy1615(p) => p
+ case Dummy1616(p) => p
+ case Dummy1617(p) => p
+ case Dummy1618(p) => p
+ case Dummy1619(p) => p
+ case Dummy1620(p) => p
+ case Dummy1621(p) => p
+ case Dummy1622(p) => p
+ case Dummy1623(p) => p
+ case Dummy1624(p) => p
+ case Dummy1625(p) => p
+ case Dummy1626(p) => p
+ case Dummy1627(p) => p
+ case Dummy1628(p) => p
+ case Dummy1629(p) => p
+ case Dummy1630(p) => p
+ case Dummy1631(p) => p
+ case Dummy1632(p) => p
+ case Dummy1633(p) => p
+ case Dummy1634(p) => p
+ case Dummy1635(p) => p
+ case Dummy1636(p) => p
+ case Dummy1637(p) => p
+ case Dummy1638(p) => p
+ case Dummy1639(p) => p
+ case Dummy1640(p) => p
+ case Dummy1641(p) => p
+ case Dummy1642(p) => p
+ case Dummy1643(p) => p
+ case Dummy1644(p) => p
+ case Dummy1645(p) => p
+ case Dummy1646(p) => p
+ case Dummy1647(p) => p
+ case Dummy1648(p) => p
+ case Dummy1649(p) => p
+ case Dummy1650(p) => p
+ case Dummy1651(p) => p
+ case Dummy1652(p) => p
+ case Dummy1653(p) => p
+ case Dummy1654(p) => p
+ case Dummy1655(p) => p
+ case Dummy1656(p) => p
+ case Dummy1657(p) => p
+ case Dummy1658(p) => p
+ case Dummy1659(p) => p
+ case Dummy1660(p) => p
+ case Dummy1661(p) => p
+ case Dummy1662(p) => p
+ case Dummy1663(p) => p
+ case Dummy1664(p) => p
+ case Dummy1665(p) => p
+ case Dummy1666(p) => p
+ case Dummy1667(p) => p
+ case Dummy1668(p) => p
+ case Dummy1669(p) => p
+ case Dummy1670(p) => p
+ case Dummy1671(p) => p
+ case Dummy1672(p) => p
+ case Dummy1673(p) => p
+ case Dummy1674(p) => p
+ case Dummy1675(p) => p
+ case Dummy1676(p) => p
+ case Dummy1677(p) => p
+ case Dummy1678(p) => p
+ case Dummy1679(p) => p
+ case Dummy1680(p) => p
+ case Dummy1681(p) => p
+ case Dummy1682(p) => p
+ case Dummy1683(p) => p
+ case Dummy1684(p) => p
+ case Dummy1685(p) => p
+ case Dummy1686(p) => p
+ case Dummy1687(p) => p
+ case Dummy1688(p) => p
+ case Dummy1689(p) => p
+ case Dummy1690(p) => p
+ case Dummy1691(p) => p
+ case Dummy1692(p) => p
+ case Dummy1693(p) => p
+ case Dummy1694(p) => p
+ case Dummy1695(p) => p
+ case Dummy1696(p) => p
+ case Dummy1697(p) => p
+ case Dummy1698(p) => p
+ case Dummy1699(p) => p
+ case Dummy1700(p) => p
+ case Dummy1701(p) => p
+ case Dummy1702(p) => p
+ case Dummy1703(p) => p
+ case Dummy1704(p) => p
+ case Dummy1705(p) => p
+ case Dummy1706(p) => p
+ case Dummy1707(p) => p
+ case Dummy1708(p) => p
+ case Dummy1709(p) => p
+ case Dummy1710(p) => p
+ case Dummy1711(p) => p
+ case Dummy1712(p) => p
+ case Dummy1713(p) => p
+ case Dummy1714(p) => p
+ case Dummy1715(p) => p
+ case Dummy1716(p) => p
+ case Dummy1717(p) => p
+ case Dummy1718(p) => p
+ case Dummy1719(p) => p
+ case Dummy1720(p) => p
+ case Dummy1721(p) => p
+ case Dummy1722(p) => p
+ case Dummy1723(p) => p
+ case Dummy1724(p) => p
+ case Dummy1725(p) => p
+ case Dummy1726(p) => p
+ case Dummy1727(p) => p
+ case Dummy1728(p) => p
+ case Dummy1729(p) => p
+ case Dummy1730(p) => p
+ case Dummy1731(p) => p
+ case Dummy1732(p) => p
+ case Dummy1733(p) => p
+ case Dummy1734(p) => p
+ case Dummy1735(p) => p
+ case Dummy1736(p) => p
+ case Dummy1737(p) => p
+ case Dummy1738(p) => p
+ case Dummy1739(p) => p
+ case Dummy1740(p) => p
+ case Dummy1741(p) => p
+ case Dummy1742(p) => p
+ case Dummy1743(p) => p
+ case Dummy1744(p) => p
+ case Dummy1745(p) => p
+ case Dummy1746(p) => p
+ case Dummy1747(p) => p
+ case Dummy1748(p) => p
+ case Dummy1749(p) => p
+ case Dummy1750(p) => p
+ case Dummy1751(p) => p
+ case Dummy1752(p) => p
+ case Dummy1753(p) => p
+ case Dummy1754(p) => p
+ case Dummy1755(p) => p
+ case Dummy1756(p) => p
+ case Dummy1757(p) => p
+ case Dummy1758(p) => p
+ case Dummy1759(p) => p
+ case Dummy1760(p) => p
+ case Dummy1761(p) => p
+ case Dummy1762(p) => p
+ case Dummy1763(p) => p
+ case Dummy1764(p) => p
+ case Dummy1765(p) => p
+ case Dummy1766(p) => p
+ case Dummy1767(p) => p
+ case Dummy1768(p) => p
+ case Dummy1769(p) => p
+ case Dummy1770(p) => p
+ case Dummy1771(p) => p
+ case Dummy1772(p) => p
+ case Dummy1773(p) => p
+ case Dummy1774(p) => p
+ case Dummy1775(p) => p
+ case Dummy1776(p) => p
+ case Dummy1777(p) => p
+ case Dummy1778(p) => p
+ case Dummy1779(p) => p
+ case Dummy1780(p) => p
+ case Dummy1781(p) => p
+ case Dummy1782(p) => p
+ case Dummy1783(p) => p
+ case Dummy1784(p) => p
+ case Dummy1785(p) => p
+ case Dummy1786(p) => p
+ case Dummy1787(p) => p
+ case Dummy1788(p) => p
+ case Dummy1789(p) => p
+ case Dummy1790(p) => p
+ case Dummy1791(p) => p
+ case Dummy1792(p) => p
+ case Dummy1793(p) => p
+ case Dummy1794(p) => p
+ case Dummy1795(p) => p
+ case Dummy1796(p) => p
+ case Dummy1797(p) => p
+ case Dummy1798(p) => p
+ case Dummy1799(p) => p
+ case Dummy1800(p) => p
+ case Dummy1801(p) => p
+ case Dummy1802(p) => p
+ case Dummy1803(p) => p
+ case Dummy1804(p) => p
+ case Dummy1805(p) => p
+ case Dummy1806(p) => p
+ case Dummy1807(p) => p
+ case Dummy1808(p) => p
+ case Dummy1809(p) => p
+ case Dummy1810(p) => p
+ case Dummy1811(p) => p
+ case Dummy1812(p) => p
+ case Dummy1813(p) => p
+ case Dummy1814(p) => p
+ case Dummy1815(p) => p
+ case Dummy1816(p) => p
+ case Dummy1817(p) => p
+ case Dummy1818(p) => p
+ case Dummy1819(p) => p
+ case Dummy1820(p) => p
+ case Dummy1821(p) => p
+ case Dummy1822(p) => p
+ case Dummy1823(p) => p
+ case Dummy1824(p) => p
+ case Dummy1825(p) => p
+ case Dummy1826(p) => p
+ case Dummy1827(p) => p
+ case Dummy1828(p) => p
+ case Dummy1829(p) => p
+ case Dummy1830(p) => p
+ case Dummy1831(p) => p
+ case Dummy1832(p) => p
+ case Dummy1833(p) => p
+ case Dummy1834(p) => p
+ case Dummy1835(p) => p
+ case Dummy1836(p) => p
+ case Dummy1837(p) => p
+ case Dummy1838(p) => p
+ case Dummy1839(p) => p
+ case Dummy1840(p) => p
+ case Dummy1841(p) => p
+ case Dummy1842(p) => p
+ case Dummy1843(p) => p
+ case Dummy1844(p) => p
+ case Dummy1845(p) => p
+ case Dummy1846(p) => p
+ case Dummy1847(p) => p
+ case Dummy1848(p) => p
+ case Dummy1849(p) => p
+ case Dummy1850(p) => p
+ case Dummy1851(p) => p
+ case Dummy1852(p) => p
+ case Dummy1853(p) => p
+ case Dummy1854(p) => p
+ case Dummy1855(p) => p
+ case Dummy1856(p) => p
+ case Dummy1857(p) => p
+ case Dummy1858(p) => p
+ case Dummy1859(p) => p
+ case Dummy1860(p) => p
+ case Dummy1861(p) => p
+ case Dummy1862(p) => p
+ case Dummy1863(p) => p
+ case Dummy1864(p) => p
+ case Dummy1865(p) => p
+ case Dummy1866(p) => p
+ case Dummy1867(p) => p
+ case Dummy1868(p) => p
+ case Dummy1869(p) => p
+ case Dummy1870(p) => p
+ case Dummy1871(p) => p
+ case Dummy1872(p) => p
+ case Dummy1873(p) => p
+ case Dummy1874(p) => p
+ case Dummy1875(p) => p
+ case Dummy1876(p) => p
+ case Dummy1877(p) => p
+ case Dummy1878(p) => p
+ case Dummy1879(p) => p
+ case Dummy1880(p) => p
+ case Dummy1881(p) => p
+ case Dummy1882(p) => p
+ case Dummy1883(p) => p
+ case Dummy1884(p) => p
+ case Dummy1885(p) => p
+ case Dummy1886(p) => p
+ case Dummy1887(p) => p
+ case Dummy1888(p) => p
+ case Dummy1889(p) => p
+ case Dummy1890(p) => p
+ case Dummy1891(p) => p
+ case Dummy1892(p) => p
+ case Dummy1893(p) => p
+ case Dummy1894(p) => p
+ case Dummy1895(p) => p
+ case Dummy1896(p) => p
+ case Dummy1897(p) => p
+ case Dummy1898(p) => p
+ case Dummy1899(p) => p
+ case Dummy1900(p) => p
+ case Dummy1901(p) => p
+ case Dummy1902(p) => p
+ case Dummy1903(p) => p
+ case Dummy1904(p) => p
+ case Dummy1905(p) => p
+ case Dummy1906(p) => p
+ case Dummy1907(p) => p
+ case Dummy1908(p) => p
+ case Dummy1909(p) => p
+ case Dummy1910(p) => p
+ case Dummy1911(p) => p
+ case Dummy1912(p) => p
+ case Dummy1913(p) => p
+ case Dummy1914(p) => p
+ case Dummy1915(p) => p
+ case Dummy1916(p) => p
+ case Dummy1917(p) => p
+ case Dummy1918(p) => p
+ case Dummy1919(p) => p
+ case Dummy1920(p) => p
+ case Dummy1921(p) => p
+ case Dummy1922(p) => p
+ case Dummy1923(p) => p
+ case Dummy1924(p) => p
+ case Dummy1925(p) => p
+ case Dummy1926(p) => p
+ case Dummy1927(p) => p
+ case Dummy1928(p) => p
+ case Dummy1929(p) => p
+ case Dummy1930(p) => p
+ case Dummy1931(p) => p
+ case Dummy1932(p) => p
+ case Dummy1933(p) => p
+ case Dummy1934(p) => p
+ case Dummy1935(p) => p
+ case Dummy1936(p) => p
+ case Dummy1937(p) => p
+ case Dummy1938(p) => p
+ case Dummy1939(p) => p
+ case Dummy1940(p) => p
+ case Dummy1941(p) => p
+ case Dummy1942(p) => p
+ case Dummy1943(p) => p
+ case Dummy1944(p) => p
+ case Dummy1945(p) => p
+ case Dummy1946(p) => p
+ case Dummy1947(p) => p
+ case Dummy1948(p) => p
+ case Dummy1949(p) => p
+ case Dummy1950(p) => p
+ case Dummy1951(p) => p
+ case Dummy1952(p) => p
+ case Dummy1953(p) => p
+ case Dummy1954(p) => p
+ case Dummy1955(p) => p
+ case Dummy1956(p) => p
+ case Dummy1957(p) => p
+ case Dummy1958(p) => p
+ case Dummy1959(p) => p
+ case Dummy1960(p) => p
+ case Dummy1961(p) => p
+ case Dummy1962(p) => p
+ case Dummy1963(p) => p
+ case Dummy1964(p) => p
+ case Dummy1965(p) => p
+ case Dummy1966(p) => p
+ case Dummy1967(p) => p
+ case Dummy1968(p) => p
+ case Dummy1969(p) => p
+ case Dummy1970(p) => p
+ case Dummy1971(p) => p
+ case Dummy1972(p) => p
+ case Dummy1973(p) => p
+ case Dummy1974(p) => p
+ case Dummy1975(p) => p
+ case Dummy1976(p) => p
+ case Dummy1977(p) => p
+ case Dummy1978(p) => p
+ case Dummy1979(p) => p
+ case Dummy1980(p) => p
+ case Dummy1981(p) => p
+ case Dummy1982(p) => p
+ case Dummy1983(p) => p
+ case Dummy1984(p) => p
+ case Dummy1985(p) => p
+ case Dummy1986(p) => p
+ case Dummy1987(p) => p
+ case Dummy1988(p) => p
+ case Dummy1989(p) => p
+ case Dummy1990(p) => p
+ case Dummy1991(p) => p
+ case Dummy1992(p) => p
+ case Dummy1993(p) => p
+ case Dummy1994(p) => p
+ case Dummy1995(p) => p
+ case Dummy1996(p) => p
+ case Dummy1997(p) => p
+ case Dummy1998(p) => p
+ case Dummy1999(p) => p
+}
+}
diff --git a/test/benchmarks/bench b/test/benchmarks/bench
old mode 100644
new mode 100755
diff --git a/test/benchmarks/src/scala/collection/immutable/range-bench.scala b/test/benchmarks/src/scala/collection/immutable/range-bench.scala
new file mode 100644
index 0000000..e167ff0
--- /dev/null
+++ b/test/benchmarks/src/scala/collection/immutable/range-bench.scala
@@ -0,0 +1,61 @@
+package scala.collection.immutable
+package benchmarks
+
+object RangeTest {
+ // not inlined any more, needs investigation
+ //
+ // class XXS {
+ // private val array = Array.range(0, 100)
+ // def tst = { var sum = 0; for (i <- 0 until array.length) sum += array(i); sum }
+ // }
+
+ var x: Int = 0
+
+ def foreachSum(max: Int): Int = {
+ var sum = 0
+ 1 to max foreach (sum += _)
+ sum
+ }
+ def whileSum(max: Int) = {
+ var sum = 0
+ var num = 1
+ while (num <= max) {
+ sum += num
+ num += 1
+ }
+ sum
+ }
+
+ def show(max: Int, foreachNanos: Long, whileNanos: Long) {
+ val winner = if (foreachNanos < whileNanos) "foreachSum" else "whileSum"
+ val ratio = if (foreachNanos < whileNanos) foreachNanos.toDouble / whileNanos else whileNanos.toDouble / foreachNanos
+ println("1 to %d:, %12s wins, %.3f: foreach %.3f while %.3f".format(
+ max, winner, ratio,
+ foreachNanos.toDouble / 1000000L,
+ whileNanos.toDouble / 1000000L)
+ )
+ }
+
+ def run(max: Int) = {
+ val foreachFirst = util.Random.nextBoolean
+ val t1 = System.nanoTime
+ x = if (foreachFirst) foreachSum(max) else whileSum(max)
+ val t2 = System.nanoTime
+ x = if (foreachFirst) whileSum(max) else foreachSum(max)
+ val t3 = System.nanoTime
+
+ val foreachNanos = if (foreachFirst) t2 - t1 else t3 - t2
+ val whileNanos = if (foreachFirst) t3 - t2 else t2 - t1
+ show(max, foreachNanos, whileNanos)
+ }
+
+ def main(args: Array[String]): Unit = {
+ var max = if (args.isEmpty) 100 else args(0).toInt
+ while (max > 0) {
+ run(max)
+ run(max)
+ run(max)
+ max += (max / 7)
+ }
+ }
+}
diff --git a/test/benchmarks/src/scala/collection/mutable/hashtable-bench.scala b/test/benchmarks/src/scala/collection/mutable/hashtable-bench.scala
new file mode 100644
index 0000000..c01e7cb
--- /dev/null
+++ b/test/benchmarks/src/scala/collection/mutable/hashtable-bench.scala
@@ -0,0 +1,61 @@
+import scala.collection.mutable.HashMap
+
+object Test {
+ var dummy: Long = 0L
+ var _foreach: Long = 0L
+ var _iterator: Long = 0L
+
+ def numbers: Seq[Int] = 1 to 1000000
+ val map: HashMap[Int, Int] = HashMap(numbers zip numbers: _*)
+
+ @inline final def timed(body: => Unit): Long = {
+ val start = System.nanoTime
+ body
+ System.nanoTime - start
+ }
+
+ def go(xs: Iterable[Int], reps: Int) = {
+ _foreach = 0L
+ _iterator = 0L
+
+ 0 until reps foreach { _ =>
+ _foreach += timed(xs foreach (dummy += _))
+ _iterator += timed(xs.iterator foreach (dummy += _))
+ }
+
+ " foreach avg " + (_foreach / reps) + "\n iterator avg " + (_iterator / reps) + "\n"
+ }
+
+ def go2(xs: collection.Map[Int, Int], reps: Int) = {
+ _foreach = 0L
+ _iterator = 0L
+
+ def incDummy(nums: (Int, Int)) = {
+ dummy += nums._1
+ dummy -= nums._2
+ }
+
+ 0 until reps foreach { _ =>
+ _foreach += timed(xs foreach incDummy)
+ _iterator += timed(xs.iterator foreach incDummy)
+ }
+
+ " foreach avg " + (_foreach / reps) + "\n iterator avg " + (_iterator / reps) + "\n"
+ }
+
+ def main(args: Array[String]): Unit = {
+ println("map.keys:")
+ go(map.keys, 10) // warm
+ println(go(map.keys, 10))
+
+ println("map.values:")
+ go(map.values, 10) // warm
+ println(go(map.values, 10))
+
+ println("map:")
+ go2(map, 10) // warm
+ println(go2(map, 10))
+
+ println("// pay me no mind ... " + dummy)
+ }
+}
diff --git a/test/benchmarks/src/scala/collection/parallel/benchmarks/arrays/Resetting.scala b/test/benchmarks/src/scala/collection/parallel/benchmarks/arrays/Resetting.scala
index e6feb59..22d2107 100644
--- a/test/benchmarks/src/scala/collection/parallel/benchmarks/arrays/Resetting.scala
+++ b/test/benchmarks/src/scala/collection/parallel/benchmarks/arrays/Resetting.scala
@@ -4,19 +4,19 @@ package scala.collection.parallel.benchmarks.arrays
import scala.collection.parallel.benchmarks._
-abstract class Resetting[T: Manifest](elemcreate: Int => T, sz: Int, p: Int, what: String)
+abstract class Resetting[T: ClassTag](elemcreate: Int => T, sz: Int, p: Int, what: String)
extends Bench {
val size = sz
val parallelism = p
val runWhat = what
-
+
var anyarray: Array[Any] = null
var castarray: AnyRef = null
var gencastarray: Array[T] = null
var manifarray: Array[T] = null
-
+
reset
-
+
def reset = what match {
case "any" =>
anyarray = new Array[Any](sz)
diff --git a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/MatrixMultiplication.scala b/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/MatrixMultiplication.scala
index 5f902ff..6d5b189 100644
--- a/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/MatrixMultiplication.scala
+++ b/test/benchmarks/src/scala/collection/parallel/benchmarks/parallel_array/MatrixMultiplication.scala
@@ -16,54 +16,54 @@ class MatrixMultiplication(sz: Int, p: Int, what: String)
extends Resettable(sz, p, what, new Cont(_), new Array[Any](_), classOf[Cont]) {
def companion = MatrixMultiplication
collection.parallel.tasksupport.environment = forkjoinpool
-
+
val a = Matrix.unit[Int](sz)
val b = Matrix.unit[Int](sz)
var c = new Matrix[Int](sz)
-
+
def runpar = c = a * b //{ c.assignProduct(a, b) } //; println("--------"); c.output }
def runseq = throw new UnsupportedOperationException
def comparisonMap = collection.Map()
-
- class Matrix[T](n: Int)(implicit num: Numeric[T], man: Manifest[T]) {
+
+ class Matrix[T](n: Int)(implicit num: Numeric[T], tag: ClassTag[T]) {
val array = new Array[T](n * n)
-
+
def apply(y: Int, x: Int) = array(y * n + x)
-
+
def update(y: Int, x: Int, elem: T) = array(y * n + x) = elem
-
+
def *(b: Matrix[T]) = {
val m = new Matrix[T](n)
m.assignProduct(this, b)
m
}
-
+
def assignProduct(a: Matrix[T], b: Matrix[T]) = {
val range = ParRange(0, n * n, 1, false)
for (i <- range) this(i / n, i % n) = calcProduct(a, b, i / n, i % n);
}
-
+
private def calcProduct(a: Matrix[T], b: Matrix[T], y: Int, x: Int): T = {
import num._
var sum = zero
for (i <- 0 until n) sum += a(y, i) * b(i, x)
sum
}
-
+
def output = for (y <- 0 until n) {
for (x <- 0 until n) print(this(y, x))
println
}
}
-
+
object Matrix {
- def unit[T](n: Int)(implicit num: Numeric[T], man: Manifest[T]) = {
+ def unit[T](n: Int)(implicit num: Numeric[T], tag: ClassTag[T]) = {
val m = new Matrix[T](n)
for (i <- 0 until n) m(i, i) = num.one
m
}
}
-
+
}
diff --git a/test/benchmarks/src/scala/util/HashSpeedTest.scala b/test/benchmarks/src/scala/util/HashSpeedTest.scala
index 1cc0b73..a4d310e 100644
--- a/test/benchmarks/src/scala/util/HashSpeedTest.scala
+++ b/test/benchmarks/src/scala/util/HashSpeedTest.scala
@@ -1,65 +1,71 @@
object HashSpeedTest {
- import System.{nanoTime => now}
+
+ import System.{ nanoTime => now }
def time[A](f: => A) = {
val t0 = now
val ans = f
(ans, now - t0)
}
+
def ptime[A](f: => A) = {
- val (ans,dt) = time(f)
- printf("Elapsed: %.3f\n",dt*1e-9)
+ val (ans, dt) = time(f)
+ printf("Elapsed: %.3f\n", dt * 1e-9)
ans
}
object HashHist {
var enabled = true
- val counts = new collection.mutable.HashMap[Int,Int]
- def add (i: Int) { if (enabled) counts(i) = counts.get(i).getOrElse(0)+1 }
+ val counts = new collection.mutable.HashMap[Int, Int]
+ def add(i: Int) { if (enabled) counts(i) = counts.get(i).getOrElse(0) + 1 }
def resultAndReset = {
var s = 0L
var o = 0L
var m = 0
counts.valuesIterator.foreach(i => {
s += i
- if (i>0) o += 1
- if (i>m) m = i
+ if (i > 0) o += 1
+ if (i > m) m = i
})
counts.clear
- (s,o,m)
+ (s, o, m)
}
}
- def report(s: String,res: (Long,Long,Int)) {
- println("Hash quality of "+s)
- printf(" %5.2f%% of entries are collisions\n",100*(res._1 - res._2).toDouble/res._1)
- printf(" Max of %d entries mapped to the same value\n",res._3)
+ def report(s: String, res: (Long, Long, Int)) {
+ println("Hash quality of " + s)
+ printf(" %5.2f%% of entries are collisions\n", 100 * (res._1 - res._2).toDouble / res._1)
+ printf(" Max of %d entries mapped to the same value\n", res._3)
}
// If you have MurmurHash3 installed, uncomment below (and in main)
- import scala.util.{MurmurHash3 => MH3}
+ import scala.util.{ MurmurHash3 => MH3 }
+
val justCountString: String => Unit = str => {
- var s,i = 0
+ var s, i = 0
while (i < str.length) { s += str.charAt(i); i += 1 }
HashHist.add(s)
}
+
val defaultHashString: String => Unit = str => HashHist.add(str.hashCode)
+
val murmurHashString: String => Unit = str => HashHist.add(MH3.stringHash(str))
+
def makeCharStrings = {
val a = new Array[Byte](4)
val buffer = new collection.mutable.ArrayBuffer[String]
var i: Int = 'A'
while (i <= 'Z') {
- a(0) = (i&0xFF).toByte
+ a(0) = (i & 0xFF).toByte
var j: Int = 'a'
while (j <= 'z') {
- a(1) = (j&0xFF).toByte
+ a(1) = (j & 0xFF).toByte
var k: Int = 'A'
while (k <= 'z') {
- a(2) = (k&0xFF).toByte
+ a(2) = (k & 0xFF).toByte
var l: Int = 'A'
while (l <= 'z') {
- a(3) = (l&0xFF).toByte
+ a(3) = (l & 0xFF).toByte
buffer += new String(a)
l += 1
}
@@ -71,6 +77,7 @@ object HashSpeedTest {
}
buffer.toArray
}
+
def hashCharStrings(ss: Array[String], hash: String => Unit) {
var i = 0
while (i < ss.length) {
@@ -78,19 +85,21 @@ object HashSpeedTest {
i += 1
}
}
-
+
def justCountList: List[List[Int]] => Unit = lli => {
var s = 0
lli.foreach(_.foreach(s += _))
HashHist.add(s)
}
+
def defaultHashList: List[List[Int]] => Unit = lli => HashHist.add(lli.hashCode)
+
def makeBinaryLists = {
def singleLists(depth: Int): List[List[Int]] = {
if (depth <= 0) List(Nil)
else {
- val set = singleLists(depth-1)
- val longest = set filter (_.length == depth-1)
+ val set = singleLists(depth - 1)
+ val longest = set filter (_.length == depth - 1)
set ::: (longest.map(0 :: _)) ::: (longest.map(1 :: _))
}
}
@@ -123,6 +132,7 @@ object HashSpeedTest {
}
buffer.toArray
}
+
def hashBinaryLists(ls: Array[List[List[Int]]], hash: List[List[Int]] => Unit) {
var i = 0
while (i < ls.length) {
@@ -136,17 +146,20 @@ object HashSpeedTest {
si.foreach(s += _)
HashHist.add(s)
}
+
def defaultHashSets: Set[Int] => Unit = si => HashHist.add(si.hashCode)
+
def makeIntSets = {
def sets(depth: Int): List[Set[Int]] = {
if (depth <= 0) List(Set.empty[Int])
else {
- val set = sets(depth-1)
+ val set = sets(depth - 1)
set ::: set.map(_ + depth)
}
}
sets(20).toArray
}
+
def hashIntSets(ss: Array[Set[Int]], hash: Set[Int] => Unit) {
var i = 0
while (i < ss.length) {
@@ -156,36 +169,37 @@ object HashSpeedTest {
}
def defaultHashTuples: (Product with Serializable) => Unit = p => HashHist.add(p.hashCode)
+
def makeNestedTuples = {
val basic = Array(
- (0,0),
- (0,1),
- (1,0),
- (1,1),
- (0,0,0),
- (0,0,1),
- (0,1,0),
- (1,0,0),
- (0,0,0,0),
- (0,0,0,0,0),
- (false,false),
- (true,false),
- (false,true),
- (true,true),
- (0.7,true,"fish"),
- ((),true,'c',400,9.2,"galactic")
- )
+ (0, 0),
+ (0, 1),
+ (1, 0),
+ (1, 1),
+ (0, 0, 0),
+ (0, 0, 1),
+ (0, 1, 0),
+ (1, 0, 0),
+ (0, 0, 0, 0),
+ (0, 0, 0, 0, 0),
+ (false, false),
+ (true, false),
+ (false, true),
+ (true, true),
+ (0.7, true, "fish"),
+ ((), true, 'c', 400, 9.2, "galactic"))
basic ++
- (for (i <- basic; j <- basic) yield (i,j)) ++
- (for (i <- basic; j <- basic; k <- basic) yield (i,j,k)) ++
- (for (i <- basic; j <- basic; k <- basic) yield ((i,j),k)) ++
- (for (i <- basic; j <- basic; k <- basic) yield (i,(j,k))) ++
- (for (i <- basic; j <- basic; k <- basic; l <- basic) yield (i,j,k,l)) ++
- (for (i <- basic; j <- basic; k <- basic; l <- basic) yield ((i,j),(k,l))) ++
- (for (i <- basic; j <- basic; k <- basic; l <- basic) yield (i,(j,k,l))) ++
- (for (i <- basic; j <- basic; k <- basic; l <- basic; m <- basic) yield (i,j,k,l,m)) ++
- (for (i <- basic; j <- basic; k <- basic; l <- basic; m <- basic) yield (i,(j,(k,(l,m)))))
+ (for (i <- basic; j <- basic) yield (i, j)) ++
+ (for (i <- basic; j <- basic; k <- basic) yield (i, j, k)) ++
+ (for (i <- basic; j <- basic; k <- basic) yield ((i, j), k)) ++
+ (for (i <- basic; j <- basic; k <- basic) yield (i, (j, k))) ++
+ (for (i <- basic; j <- basic; k <- basic; l <- basic) yield (i, j, k, l)) ++
+ (for (i <- basic; j <- basic; k <- basic; l <- basic) yield ((i, j), (k, l))) ++
+ (for (i <- basic; j <- basic; k <- basic; l <- basic) yield (i, (j, k, l))) ++
+ (for (i <- basic; j <- basic; k <- basic; l <- basic; m <- basic) yield (i, j, k, l, m)) ++
+ (for (i <- basic; j <- basic; k <- basic; l <- basic; m <- basic) yield (i, (j, (k, (l, m)))))
}
+
def hashNestedTuples(ts: Array[Product with Serializable], hash: (Product with Serializable) => Unit) {
var i = 0
while (i < ts.length) {
@@ -194,21 +208,21 @@ object HashSpeedTest {
}
}
- def findSpeed[A](n: Int, h: (Array[A],A=>Unit)=>Unit, aa: Array[A], f: A=>Unit) = {
- (time { for (i <- 1 to n) { h(aa,f) } }._2, aa.length.toLong*n)
+ def findSpeed[A](n: Int, h: (Array[A], A => Unit) => Unit, aa: Array[A], f: A => Unit) = {
+ (time { for (i <- 1 to n) { h(aa, f) } }._2, aa.length.toLong * n)
}
- def reportSpeed[A](repeats: Int, xs: List[(String, ()=>(Long,Long))]) {
- val tn = Array.fill(xs.length)((0L,0L))
+ def reportSpeed[A](repeats: Int, xs: List[(String, () => (Long, Long))]) {
+ val tn = Array.fill(xs.length)((0L, 0L))
for (j <- 1 to repeats) {
- for ((l,i) <- xs zipWithIndex) {
+ for ((l, i) <- xs zipWithIndex) {
val x = l._2()
tn(i) = (tn(i)._1 + x._1, tn(i)._2 + x._2)
}
}
- for (((t,n),(title,_)) <- (tn zip xs)) {
- val rate = (n*1e-6)/(t*1e-9)
- printf("Hash rate for %s: %4.2f million/second\n",title,rate)
+ for (((t, n), (title, _)) <- (tn zip xs)) {
+ val rate = (n * 1e-6) / (t * 1e-9)
+ printf("Hash rate for %s: %4.2f million/second\n", title, rate)
}
}
@@ -218,24 +232,22 @@ object HashSpeedTest {
val nt = makeNestedTuples
// Uncomment the following for string stats if MurmurHash3 available
val cs = makeCharStrings
- report("Java String hash for strings",{ hashCharStrings(cs,defaultHashString); HashHist.resultAndReset })
- report("MurmurHash3 for strings",{ hashCharStrings(cs,murmurHashString); HashHist.resultAndReset })
+ report("Java String hash for strings", { hashCharStrings(cs, defaultHashString); HashHist.resultAndReset })
+ report("MurmurHash3 for strings", { hashCharStrings(cs, murmurHashString); HashHist.resultAndReset })
HashHist.enabled = false
reportSpeed(3, List(
- ("Java string hash", () => findSpeed[String](30, (x, y) => hashCharStrings(x, y),cs,defaultHashString)),
- ("MurmurHash3 string hash", () => findSpeed[String](30,(x, y) => hashCharStrings(x, y),cs,murmurHashString))
- ))
+ ("Java string hash", () => findSpeed[String](30, (x, y) => hashCharStrings(x, y), cs, defaultHashString)),
+ ("MurmurHash3 string hash", () => findSpeed[String](30, (x, y) => hashCharStrings(x, y), cs, murmurHashString))))
// reportSpeed("Java string hash",30,hashCharStrings.tupled,cs,defaultHashString)
// reportSpeed("MurmurHash3 string hash",30,hashCharStrings.tupled,cs,murmurHashString)
HashHist.enabled = true
- report("lists of binary int lists",{ hashBinaryLists(bl,defaultHashList); HashHist.resultAndReset })
- report("small integer sets",{ hashIntSets(is,defaultHashSets); HashHist.resultAndReset })
- report("small nested tuples",{ hashNestedTuples(nt,defaultHashTuples); HashHist.resultAndReset })
+ report("lists of binary int lists", { hashBinaryLists(bl, defaultHashList); HashHist.resultAndReset })
+ report("small integer sets", { hashIntSets(is, defaultHashSets); HashHist.resultAndReset })
+ report("small nested tuples", { hashNestedTuples(nt, defaultHashTuples); HashHist.resultAndReset })
HashHist.enabled = false
- reportSpeed(3,List(
- ("lists of lists of binary ints", () => findSpeed(20,hashBinaryLists,bl,defaultHashList)),
- ("small integer sets", () => findSpeed(10,hashIntSets,is,defaultHashSets)),
- ("small nested tuples", () => findSpeed(5,hashNestedTuples,nt,defaultHashTuples))
- ))
+ reportSpeed(3, List(
+ ("lists of lists of binary ints", () => findSpeed(20, hashBinaryLists, bl, defaultHashList)),
+ ("small integer sets", () => findSpeed(10, hashIntSets, is, defaultHashSets)),
+ ("small nested tuples", () => findSpeed(5, hashNestedTuples, nt, defaultHashTuples))))
}
}
diff --git a/test/files/jvm/bug680.check b/test/debug/OBSOLETE
similarity index 100%
copy from test/files/jvm/bug680.check
copy to test/debug/OBSOLETE
diff --git a/test/files/jvm/bug680.check b/test/debug/buildmanager/.gitignore
similarity index 100%
copy from test/files/jvm/bug680.check
copy to test/debug/buildmanager/.gitignore
diff --git a/test/files/jvm/bug680.check b/test/debug/jvm/.gitignore
similarity index 100%
copy from test/files/jvm/bug680.check
copy to test/debug/jvm/.gitignore
diff --git a/test/files/jvm/bug680.check b/test/debug/neg/.gitignore
similarity index 100%
copy from test/files/jvm/bug680.check
copy to test/debug/neg/.gitignore
diff --git a/test/files/jvm/bug680.check b/test/debug/pos/.gitignore
similarity index 100%
copy from test/files/jvm/bug680.check
copy to test/debug/pos/.gitignore
diff --git a/test/files/jvm/bug680.check b/test/debug/res/.gitignore
similarity index 100%
copy from test/files/jvm/bug680.check
copy to test/debug/res/.gitignore
diff --git a/test/files/jvm/bug680.check b/test/debug/run/.gitignore
similarity index 100%
copy from test/files/jvm/bug680.check
copy to test/debug/run/.gitignore
diff --git a/test/files/jvm/bug680.check b/test/debug/scalacheck/.gitignore
similarity index 100%
copy from test/files/jvm/bug680.check
copy to test/debug/scalacheck/.gitignore
diff --git a/test/files/jvm/bug680.check b/test/debug/scalap/.gitignore
similarity index 100%
copy from test/files/jvm/bug680.check
copy to test/debug/scalap/.gitignore
diff --git a/test/files/jvm/bug680.check b/test/debug/shootout/.gitignore
similarity index 100%
copy from test/files/jvm/bug680.check
copy to test/debug/shootout/.gitignore
diff --git a/test/disabled-windows/script/loadAndExecute/lAndE1.scala b/test/disabled-windows/script/loadAndExecute/lAndE1.scala
old mode 100644
new mode 100755
diff --git a/test/disabled-windows/script/loadAndExecute/lAndE2.scala b/test/disabled-windows/script/loadAndExecute/lAndE2.scala
old mode 100644
new mode 100755
diff --git a/test/disabled-windows/script/loadAndExecute/loadAndExecute.scala b/test/disabled-windows/script/loadAndExecute/loadAndExecute.scala
old mode 100644
new mode 100755
diff --git a/test/disabled-windows/script/utf8.bat b/test/disabled-windows/script/utf8.bat
old mode 100644
new mode 100755
index 8bc5c88..73c7256
--- a/test/disabled-windows/script/utf8.bat
+++ b/test/disabled-windows/script/utf8.bat
@@ -1,11 +1,11 @@
-::#!
-:: utf8 - <description>.
-
- at echo off
-call scala -nocompdaemon %0 %*
-goto :eof
-::!#
-
-/*Comment Комментарий*/
-Console.println("QWERTY");
-Console.println("ЙЦУКЕН");
+::#!
+:: utf8 - <description>.
+
+ at echo off
+call scala -nocompdaemon %0 %*
+goto :eof
+::!#
+
+/*Comment Комментарий*/
+Console.println("QWERTY");
+Console.println("ЙЦУКЕН");
diff --git a/test/disabled-windows/script/utf8.scala b/test/disabled-windows/script/utf8.scala
old mode 100644
new mode 100755
diff --git a/test/files/buildmanager/t2652/A.scala b/test/disabled/buildmanager/t2652/A.scala
similarity index 100%
rename from test/files/buildmanager/t2652/A.scala
rename to test/disabled/buildmanager/t2652/A.scala
diff --git a/test/files/buildmanager/t2652/B.scala b/test/disabled/buildmanager/t2652/B.scala
similarity index 100%
rename from test/files/buildmanager/t2652/B.scala
rename to test/disabled/buildmanager/t2652/B.scala
diff --git a/test/files/buildmanager/t2652/t2652.changes/A2.scala b/test/disabled/buildmanager/t2652/t2652.changes/A2.scala
similarity index 100%
rename from test/files/buildmanager/t2652/t2652.changes/A2.scala
rename to test/disabled/buildmanager/t2652/t2652.changes/A2.scala
diff --git a/test/disabled/buildmanager/t2652/t2652.check b/test/disabled/buildmanager/t2652/t2652.check
new file mode 100644
index 0000000..071281c
--- /dev/null
+++ b/test/disabled/buildmanager/t2652/t2652.check
@@ -0,0 +1,9 @@
+builder > A.scala B.scala
+compiling Set(A.scala, B.scala)
+Changes: Map()
+builder > A.scala
+compiling Set(A.scala)
+Changes: Map(class A -> List(Added(Definition(A.x$mBc$sp)), Added(Definition(A.x$mCc$sp)), Added(Definition(A.x$mDc$sp)), Added(Definition(A.x$mFc$sp)), Added(Definition(A.x$mIc$sp)), Added(Definition(A.x$mJc$sp)), Added(Definition(A.x$mSc$sp)), Added(Definition(A.x$mVc$sp)), Added(Definition(A.x$mZc$sp)), Changed(Definition(A.x))[method x changed from [T](t: T)T to [T](t: T)T flags: <method> <triedcooking>]))
+invalidate B.scala because it references changed definition [Changed(Definition(A.x))[method x changed from [T](t: T)T to [T](t: T)T flags: <method> <triedcooking>]]
+compiling Set(B.scala)
+Changes: Map(object B -> List())
diff --git a/test/files/buildmanager/t2652/t2652.test b/test/disabled/buildmanager/t2652/t2652.test
similarity index 100%
rename from test/files/buildmanager/t2652/t2652.test
rename to test/disabled/buildmanager/t2652/t2652.test
diff --git a/test/files/continuations-neg/infer0.check b/test/disabled/continuations-neg/infer0.check
similarity index 100%
rename from test/files/continuations-neg/infer0.check
rename to test/disabled/continuations-neg/infer0.check
diff --git a/test/disabled/continuations-neg/infer0.scala b/test/disabled/continuations-neg/infer0.scala
new file mode 100644
index 0000000..6d97d75
--- /dev/null
+++ b/test/disabled/continuations-neg/infer0.scala
@@ -0,0 +1,12 @@
+// $Id$
+
+import scala.util.continuations._
+
+
+object Test {
+ def test(x: => Int @cpsParam[String,Int]) = 7
+
+ def main(args: Array[String]) {
+ test(8)
+ }
+}
diff --git a/test/disabled/jvm/concurrent-future.check b/test/disabled/jvm/concurrent-future.check
new file mode 100644
index 0000000..715ac90
--- /dev/null
+++ b/test/disabled/jvm/concurrent-future.check
@@ -0,0 +1,14 @@
+test1: hai world
+test1: kthxbye
+test2: hai world
+test2: awsum thx
+test2: kthxbye
+test3: hai world
+test4: hai world
+test4: kthxbye
+test5: hai world
+test5: kthxbye
+test6: hai world
+test6: kthxbye
+test7: hai world
+test7: kthxbye
diff --git a/test/disabled/jvm/concurrent-future.scala b/test/disabled/jvm/concurrent-future.scala
new file mode 100644
index 0000000..eda0542
--- /dev/null
+++ b/test/disabled/jvm/concurrent-future.scala
@@ -0,0 +1,122 @@
+
+
+
+import scala.concurrent._
+
+
+
+object Test extends App {
+
+ def once(body: (() => Unit) => Unit) {
+ val sv = new SyncVar[Boolean]
+ body(() => sv put true)
+ sv.take()
+ }
+
+ def output(num: Int, msg: String) {
+ println("test" + num + ": " + msg)
+ }
+
+ def testOnSuccess(): Unit = once {
+ done =>
+ val f = future {
+ output(1, "hai world")
+ }
+ f onSuccess { case _ =>
+ output(1, "kthxbye")
+ done()
+ }
+ }
+
+ def testOnSuccessWhenCompleted(): Unit = once {
+ done =>
+ val f = future {
+ output(2, "hai world")
+ }
+ f onSuccess { case _ =>
+ output(2, "awsum thx")
+ f onSuccess { case _ =>
+ output(2, "kthxbye")
+ done()
+ }
+ }
+ }
+
+ def testOnSuccessWhenFailed(): Unit = once {
+ done =>
+ val f = future[Unit] {
+ output(3, "hai world")
+ done()
+ throw new Exception
+ }
+ f onSuccess { case _ =>
+ output(3, "onoes")
+ }
+ }
+
+ def testOnFailure(): Unit = once {
+ done =>
+ val f = future[Unit] {
+ output(4, "hai world")
+ throw new Exception
+ }
+ f onSuccess { case _ =>
+ output(4, "onoes")
+ done()
+ }
+ f onFailure { case _ =>
+ output(4, "kthxbye")
+ done()
+ }
+ }
+
+ def testOnFailureWhenSpecialThrowable(num: Int, cause: Throwable): Unit = once {
+ done =>
+ val f = future[Unit] {
+ output(num, "hai world")
+ throw cause
+ }
+ f onSuccess { case _ =>
+ output(num, "onoes")
+ done()
+ }
+ f onFailure {
+ case e: ExecutionException if (e.getCause == cause) =>
+ output(num, "kthxbye")
+ done()
+ case _ =>
+ output(num, "onoes")
+ done()
+ }
+ }
+
+ // def testOnFailureWhenFutureTimeoutException(): Unit = once {
+ // done =>
+ // val f = future[Unit] {
+ // output(8, "hai world")
+ // throw new FutureTimeoutException(null)
+ // }
+ // f onSuccess { case _ =>
+ // output(8, "onoes")
+ // done()
+ // }
+ // f onFailure {
+ // case e: FutureTimeoutException =>
+ // output(8, "im in yr loop")
+ // done()
+ // case other =>
+ // output(8, "onoes: " + other)
+ // done()
+ // }
+ // }
+
+ testOnSuccess()
+ testOnSuccessWhenCompleted()
+ testOnSuccessWhenFailed()
+ testOnFailure()
+ testOnFailureWhenSpecialThrowable(5, new Error)
+ testOnFailureWhenSpecialThrowable(6, new scala.util.control.ControlThrowable { })
+ testOnFailureWhenSpecialThrowable(7, new InterruptedException)
+ // testOnFailureWhenFutureTimeoutException()
+
+}
diff --git a/test/disabled/lib/annotations.jar b/test/disabled/lib/annotations.jar
deleted file mode 100644
index 59fa4b7..0000000
Binary files a/test/disabled/lib/annotations.jar and /dev/null differ
diff --git a/test/disabled/lib/annotations.jar.desired.sha1 b/test/disabled/lib/annotations.jar.desired.sha1
deleted file mode 100644
index 2b4292d..0000000
--- a/test/disabled/lib/annotations.jar.desired.sha1
+++ /dev/null
@@ -1 +0,0 @@
-02fe2ed93766323a13f22c7a7e2ecdcd84259b6c ?annotations.jar
diff --git a/test/disabled/lib/enums.jar b/test/disabled/lib/enums.jar
deleted file mode 100644
index f661d13..0000000
Binary files a/test/disabled/lib/enums.jar and /dev/null differ
diff --git a/test/disabled/lib/enums.jar.desired.sha1 b/test/disabled/lib/enums.jar.desired.sha1
deleted file mode 100644
index 46cd8e9..0000000
--- a/test/disabled/lib/enums.jar.desired.sha1
+++ /dev/null
@@ -1 +0,0 @@
-981392dbd1f727b152cd1c908c5fce60ad9d07f7 ?enums.jar
diff --git a/test/disabled/lib/genericNest.jar b/test/disabled/lib/genericNest.jar
deleted file mode 100644
index bc08c1e..0000000
Binary files a/test/disabled/lib/genericNest.jar and /dev/null differ
diff --git a/test/disabled/lib/genericNest.jar.desired.sha1 b/test/disabled/lib/genericNest.jar.desired.sha1
deleted file mode 100644
index e932126..0000000
--- a/test/disabled/lib/genericNest.jar.desired.sha1
+++ /dev/null
@@ -1 +0,0 @@
-b1ec8a095cec4902b3609d74d274c04365c59c04 ?genericNest.jar
diff --git a/test/disabled/lib/methvsfield.jar b/test/disabled/lib/methvsfield.jar
deleted file mode 100644
index f266071..0000000
Binary files a/test/disabled/lib/methvsfield.jar and /dev/null differ
diff --git a/test/disabled/lib/methvsfield.jar.desired.sha1 b/test/disabled/lib/methvsfield.jar.desired.sha1
deleted file mode 100644
index 8c01532..0000000
--- a/test/disabled/lib/methvsfield.jar.desired.sha1
+++ /dev/null
@@ -1 +0,0 @@
-be8454d5e7751b063ade201c225dcedefd252775 ?methvsfield.jar
diff --git a/test/disabled/lib/nest.jar b/test/disabled/lib/nest.jar
deleted file mode 100644
index 4eda4be..0000000
Binary files a/test/disabled/lib/nest.jar and /dev/null differ
diff --git a/test/disabled/lib/nest.jar.desired.sha1 b/test/disabled/lib/nest.jar.desired.sha1
deleted file mode 100644
index 674ca79..0000000
--- a/test/disabled/lib/nest.jar.desired.sha1
+++ /dev/null
@@ -1 +0,0 @@
-cd33e0a0ea249eb42363a2f8ba531186345ff68c ?nest.jar
diff --git a/test/disabled/lib/scalacheck.jar b/test/disabled/lib/scalacheck.jar
deleted file mode 100644
index 10712ad..0000000
Binary files a/test/disabled/lib/scalacheck.jar and /dev/null differ
diff --git a/test/disabled/lib/scalacheck.jar.desired.sha1 b/test/disabled/lib/scalacheck.jar.desired.sha1
deleted file mode 100644
index cdbdc53..0000000
--- a/test/disabled/lib/scalacheck.jar.desired.sha1
+++ /dev/null
@@ -1 +0,0 @@
-77dca656258fe983ec64461860ab1ca0f7e2fd65 ?scalacheck.jar
diff --git a/test/disabled/neg/abstract-report3.check b/test/disabled/neg/abstract-report3.check
new file mode 100644
index 0000000..ac3f4ab
--- /dev/null
+++ b/test/disabled/neg/abstract-report3.check
@@ -0,0 +1,39 @@
+abstract-report3.scala:1: error: class Foo needs to be abstract, since:
+it has 25 unimplemented members.
+/** As seen from class Foo, the missing signatures are as follows.
+ * For convenience, these are usable as stub implementations.
+ */
+ // Members declared in java.util.concurrent.BlockingQueue
+ def add(x$1: T): Boolean = ???
+ def contains(x$1: Any): Boolean = ???
+ def drainTo(x$1: java.util.Collection[_ >: T],x$2: Int): Int = ???
+ def drainTo(x$1: java.util.Collection[_ >: T]): Int = ???
+ def offer(x$1: T,x$2: Long,x$3: java.util.concurrent.TimeUnit): Boolean = ???
+ def offer(x$1: T): Boolean = ???
+ def poll(x$1: Long,x$2: java.util.concurrent.TimeUnit): T = ???
+ def put(x$1: T): Unit = ???
+ def remainingCapacity(): Int = ???
+ def remove(x$1: Any): Boolean = ???
+ def take(): T = ???
+
+ // Members declared in java.util.Collection
+ def addAll(x$1: java.util.Collection[_ <: T]): Boolean = ???
+ def clear(): Unit = ???
+ def containsAll(x$1: java.util.Collection[_]): Boolean = ???
+ def isEmpty(): Boolean = ???
+ def iterator(): java.util.Iterator[T] = ???
+ def removeAll(x$1: java.util.Collection[_]): Boolean = ???
+ def retainAll(x$1: java.util.Collection[_]): Boolean = ???
+ def size(): Int = ???
+ def toArray[T](x$1: Array[T with Object]): Array[T with Object] = ???
+ def toArray(): Array[Object] = ???
+
+ // Members declared in java.util.Queue
+ def element(): T = ???
+ def peek(): T = ???
+ def poll(): T = ???
+ def remove(): T = ???
+
+class Foo[T] extends java.util.concurrent.BlockingQueue[T] { }
+ ^
+one error found
diff --git a/test/disabled/neg/abstract-report3.scala b/test/disabled/neg/abstract-report3.scala
new file mode 100644
index 0000000..d3cce86
--- /dev/null
+++ b/test/disabled/neg/abstract-report3.scala
@@ -0,0 +1 @@
+class Foo[T] extends java.util.concurrent.BlockingQueue[T] { }
\ No newline at end of file
diff --git a/test/files/pos/annotDepMethType.flags b/test/disabled/pos/caseclass-parents.flags
similarity index 100%
copy from test/files/pos/annotDepMethType.flags
copy to test/disabled/pos/caseclass-parents.flags
diff --git a/test/disabled/pos/caseclass-parents.scala b/test/disabled/pos/caseclass-parents.scala
new file mode 100644
index 0000000..d4bc521
--- /dev/null
+++ b/test/disabled/pos/caseclass-parents.scala
@@ -0,0 +1,11 @@
+case class Foo() extends Serializable
+case object Bar extends Serializable
+
+case class Bippy[T, U](x: T, y: U) extends Product2[T, U] { }
+
+case class Bounded[T <: util.Random, U <: util.Random](x: T, y: U) { }
+
+class A {
+ def f(x: Bounded[_, _]) = x.productIterator foreach g
+ def g(rand: util.Random) = ()
+}
\ No newline at end of file
diff --git a/test/files/pos/annotDepMethType.flags b/test/disabled/pos/caseclass-productN.flags
similarity index 100%
copy from test/files/pos/annotDepMethType.flags
copy to test/disabled/pos/caseclass-productN.flags
diff --git a/test/disabled/pos/caseclass-productN.scala b/test/disabled/pos/caseclass-productN.scala
new file mode 100644
index 0000000..e217785
--- /dev/null
+++ b/test/disabled/pos/caseclass-productN.scala
@@ -0,0 +1,20 @@
+object Test {
+ class A
+ class B extends A
+ class C extends B
+
+ case class Bippy[T](x: Int, y: List[T], z: T) { }
+ case class Bippy2[T](x: Int, y: List[T], z: T) { }
+
+ def bippies = List(
+ Bippy(5, List(new C), new B),
+ Bippy2(5, List(new B), new C)
+ )
+
+ def bmethod(x: B) = ()
+
+ def main(args: Array[String]): Unit = {
+ bippies flatMap (_._2) foreach bmethod
+ bippies map (_._3) foreach bmethod
+ }
+}
diff --git a/test/disabled/pos/t1545.scala b/test/disabled/pos/t1545.scala
new file mode 100755
index 0000000..4c5908b
--- /dev/null
+++ b/test/disabled/pos/t1545.scala
@@ -0,0 +1,18 @@
+// According to the spec this code should not be legal.
+// Disabling for now.
+object Main extends App {
+
+ case class Foo (field : Option[String])
+
+ val x : PartialFunction[Foo,Int] =
+ {
+ c => c.field match {
+ case Some (s) => 42
+ case None => 99
+ }
+ }
+
+ println (x (Foo (None))) // prints 99
+ println (x (Foo (Some ("foo")))) // prints 42
+
+}
diff --git a/test/files/pos/bug1737/A.java b/test/disabled/pos/t1737/A.java
similarity index 100%
rename from test/files/pos/bug1737/A.java
rename to test/disabled/pos/t1737/A.java
diff --git a/test/files/pos/bug1737/B.java b/test/disabled/pos/t1737/B.java
similarity index 100%
rename from test/files/pos/bug1737/B.java
rename to test/disabled/pos/t1737/B.java
diff --git a/test/files/pos/bug1737/c.scala b/test/disabled/pos/t1737/c.scala
similarity index 100%
rename from test/files/pos/bug1737/c.scala
rename to test/disabled/pos/t1737/c.scala
diff --git a/test/disabled/pos/bug2919.scala b/test/disabled/pos/t2919.scala
similarity index 100%
rename from test/disabled/pos/bug2919.scala
rename to test/disabled/pos/t2919.scala
diff --git a/test/disabled/presentation/akka.check b/test/disabled/presentation/akka.check
new file mode 100644
index 0000000..5105d85
--- /dev/null
+++ b/test/disabled/presentation/akka.check
@@ -0,0 +1,492 @@
+reload: Actor.scala, ActorRef.scala, ActorRegistry.scala, Actors.java, Address.scala, AkkaException.scala, AkkaLoader.scala, Bootable.scala, BootableActorLoaderService.scala, BoundedBlockingQueue.scala, Config.scala, ConfigParser.scala, Configuration.scala, Configurator.scala, Crypt.scala, DataFlow.scala, Dispatchers.scala, Duration.scala, EventHandler.scala, ExecutorBasedEventDrivenDispatcher.scala, ExecutorBasedEventDrivenWorkStealingDispatcher.scala, FSM.scala, Future.scala, HashCode. [...]
+
+askTypeCompletion at pi.scala(52,59)
+================================================================================
+[response] aksTypeCompletion at (52,59)
+retrieved 0 members
+
+================================================================================
+
+askTypeCompletion at pi.scala(55,25)
+================================================================================
+[response] aksTypeCompletion at (55,25)
+retrieved 45 members
+`class Broadcastakka.routing.Routing.Broadcast`
+`method !=(x$1: Any)Boolean`
+`method !=(x$1: AnyRef)Boolean`
+`method ##()Int`
+`method +(other: String)String`
+`method ->[B](y: B)(akka.routing.Routing.type, B)`
+`method ==(x$1: Any)Boolean`
+`method ==(x$1: AnyRef)Boolean`
+`method asInstanceOf[T0]=> T0`
+`method clone()Object`
+`method dispatcherActor(routing: akka.routing.Routing.PF[Any,akka.actor.ActorRef])akka.actor.ActorRef`
+`method dispatcherActor(routing: akka.routing.Routing.PF[Any,akka.actor.ActorRef], msgTransformer: Any => Any)akka.actor.ActorRef`
+`method ensuring(cond: Boolean)akka.routing.Routing.type`
+`method ensuring(cond: Boolean, msg: => Any)akka.routing.Routing.type`
+`method ensuring(cond: akka.routing.Routing.type => Boolean)akka.routing.Routing.type`
+`method ensuring(cond: akka.routing.Routing.type => Boolean, msg: => Any)akka.routing.Routing.type`
+`method eq(x$1: AnyRef)Boolean`
+`method equals(x$1: Any)Boolean`
+`method filter[A, B](filter: akka.routing.Routing.PF[A,Unit], filtered: akka.routing.Routing.PF[A,B])akka.routing.Routing.PF[A,B]`
+`method finalize()Unit`
+`method formatted(fmtstr: String)String`
+`method hashCode()Int`
+`method intercept[A, B](interceptor: A => Unit, interceptee: akka.routing.Routing.PF[A,B])akka.routing.Routing.PF[A,B]`
+`method isInstanceOf[T0]=> Boolean`
+`method loadBalancerActor(actors: => akka.routing.InfiniteIterator[akka.actor.ActorRef])akka.actor.ActorRef`
+`method loggerActor(actorToLog: akka.actor.ActorRef, logger: Any => Unit)akka.actor.ActorRef`
+`method ne(x$1: AnyRef)Boolean`
+`method notify()Unit`
+`method notifyAll()Unit`
+`method synchronized[T0](x$1: T0)T0`
+`method toString()String`
+`method wait()Unit`
+`method wait(x$1: Long)Unit`
+`method wait(x$1: Long, x$2: Int)Unit`
+`method x=> akka.routing.Routing.type`
+`method →[B](y: B)(akka.routing.Routing.type, B)`
+`trait RoutingMessageakka.routing.Routing.RoutingMessage`
+`type PFakka.routing.Routing.PF`
+`value __leftOfArrowakka.routing.Routing.type`
+`value __resultOfEnsuringakka.routing.Routing.type`
+`value selfAny`
+================================================================================
+
+askTypeCompletion at pi.scala(55,73)
+================================================================================
+[response] aksTypeCompletion at (55,73)
+retrieved 131 members
+`method !!(implicit sender: Option[akka.actor.ActorRef])akka.dispatch.Future[T]`
+`method !!(message: Any, timeout: Long)(implicit sender: Option[akka.actor.ActorRef])Option[Any]`
+`method !(message: Any)(implicit sender: Option[akka.actor.ActorRef])Unit`
+`method !=(x$1: Any)Boolean`
+`method !=(x$1: AnyRef)Boolean`
+`method ##()Int`
+`method +(other: String)String`
+`method ->[B](y: B)(akka.actor.ActorRef, B)`
+`method ==(x$1: Any)Boolean`
+`method ==(x$1: AnyRef)Boolean`
+`method actor=> akka.actor.Actor`
+`method actorClass=> Class[_ <: akka.actor.Actor]`
+`method actorClassName=> String`
+`method actorInstance=> java.util.concurrent.atomic.AtomicReference[akka.actor.Actor]`
+`method asInstanceOf[T0]=> T0`
+`method channel=> akka.actor.Channel[Any]`
+`method clone()Object`
+`method compareTo(other: akka.actor.ActorRef)Int`
+`method dispatcher=> akka.dispatch.MessageDispatcher`
+`method dispatcher_=(md: akka.dispatch.MessageDispatcher)Unit`
+`method ensuring(cond: Boolean)akka.actor.ActorRef`
+`method ensuring(cond: Boolean, msg: => Any)akka.actor.ActorRef`
+`method ensuring(cond: akka.actor.ActorRef => Boolean)akka.actor.ActorRef`
+`method ensuring(cond: akka.actor.ActorRef => Boolean, msg: => Any)akka.actor.ActorRef`
+`method eq(x$1: AnyRef)Boolean`
+`method equals(that: Any)Boolean`
+`method exit()Unit`
+`method finalize()Unit`
+`method formatted(fmtstr: String)String`
+`method forward(message: Any)(implicit sender: Some[akka.actor.ActorRef])Any`
+`method forward(message: AnyRef, sender: akka.actor.ActorRef)Unit`
+`method getActorClass()Class[_ <: akka.actor.Actor]`
+`method getActorClassName()String`
+`method getChannel=> akka.actor.Channel[Any]`
+`method getDispatcher()akka.dispatch.MessageDispatcher`
+`method getFaultHandler()akka.config.Supervision.FaultHandlingStrategy`
+`method getHomeAddress()java.net.InetSocketAddress`
+`method getId()String`
+`method getLifeCycle()akka.config.Supervision.LifeCycle`
+`method getLinkedActors()java.util.Map[akka.actor.Uuid,akka.actor.ActorRef]`
+`method getMailboxSize()Int`
+`method getReceiveTimeout()Option[Long]`
+`method getSender()Option[akka.actor.ActorRef]`
+`method getSenderFuture()Option[akka.dispatch.CompletableFuture[Any]]`
+`method getSupervisor()akka.actor.ActorRef`
+`method getTimeout()Long`
+`method getUuid()akka.actor.Uuid`
+`method handleTrapExit(dead: akka.actor.ActorRef, reason: Throwable)Unit`
+`method hashCode()Int`
+`method homeAddress=> Option[java.net.InetSocketAddress]`
+`method id=> String`
+`method id_=(id: String)Unit`
+`method invoke(messageHandle: akka.dispatch.MessageInvocation)Unit`
+`method isBeingRestarted=> Boolean`
+`method isDefinedAt(message: Any)Boolean`
+`method isInstanceOf[T0]=> Boolean`
+`method isRunning=> Boolean`
+`method isShutdown=> Boolean`
+`method isUnstarted=> Boolean`
+`method link(actorRef: akka.actor.ActorRef)Unit`
+`method linkedActors=> java.util.Map[akka.actor.Uuid,akka.actor.ActorRef]`
+`method mailbox=> AnyRef`
+`method mailboxSize=> Int`
+`method mailbox_=(value: AnyRef)AnyRef`
+`method ne(x$1: AnyRef)Boolean`
+`method notify()Unit`
+`method notifyAll()Unit`
+`method postMessageToMailbox(message: Any, senderOption: Option[akka.actor.ActorRef])Unit`
+`method postMessageToMailboxAndCreateFutureResultWithTimeout[T](message: Any, timeout: Long, senderOption: Option[akka.actor.ActorRef], senderFuture: Option[akka.dispatch.CompletableFuture[T]])akka.dispatch.CompletableFuture[T]`
+`method registerSupervisorAsRemoteActor=> Option[akka.actor.Uuid]`
+`method reply(message: Any)Unit`
+`method replySafe(message: AnyRef)Boolean`
+`method replyUnsafe(message: AnyRef)Unit`
+`method reply_?(message: Any)Boolean`
+`method restart(reason: Throwable, maxNrOfRetries: Option[Int], withinTimeRange: Option[Int])Unit`
+`method restartLinkedActors(reason: Throwable, maxNrOfRetries: Option[Int], withinTimeRange: Option[Int])Unit`
+`method sendOneWay(message: AnyRef)Unit`
+`method sendOneWay(message: AnyRef, sender: akka.actor.ActorRef)Unit`
+`method sendRequestReply(message: AnyRef)AnyRef`
+`method sendRequestReply(message: AnyRef, sender: akka.actor.ActorRef)AnyRef`
+`method sendRequestReply(message: AnyRef, timeout: Long, sender: akka.actor.ActorRef)AnyRef`
+`method sendRequestReplyFuture[T <: AnyRef](message: AnyRef)akka.dispatch.Future[T]`
+`method sendRequestReplyFuture[T <: AnyRef](message: AnyRef, sender: akka.actor.ActorRef)akka.dispatch.Future[T]`
+`method sendRequestReplyFuture[T <: AnyRef](message: AnyRef, timeout: Long, sender: akka.actor.ActorRef)akka.dispatch.Future[T]`
+`method sender=> Option[akka.actor.ActorRef]`
+`method senderFuture()Option[akka.dispatch.CompletableFuture[Any]]`
+`method setDispatcher(dispatcher: akka.dispatch.MessageDispatcher)Unit`
+`method setFaultHandler(handler: akka.config.Supervision.FaultHandlingStrategy)Unit`
+`method setId(x$1: String)Unit`
+`method setLifeCycle(lifeCycle: akka.config.Supervision.LifeCycle)Unit`
+`method setReceiveTimeout(timeout: Long)Unit`
+`method setTimeout(x$1: Long)Unit`
+`method spawn(clazz: Class[_ <: akka.actor.Actor])akka.actor.ActorRef`
+`method spawnLink(clazz: Class[_ <: akka.actor.Actor])akka.actor.ActorRef`
+`method spawnLinkRemote(clazz: Class[_ <: akka.actor.Actor], hostname: String, port: Int, timeout: Long)akka.actor.ActorRef`
+`method spawnLinkRemote[T <: akka.actor.Actor](hostname: String, port: Int, timeout: Long)(implicit evidence$4: ClassTag[T])akka.actor.ActorRef`
+`method spawnLink[T <: akka.actor.Actor](implicit evidence$3: ClassTag[T])akka.actor.ActorRef`
+`method spawnRemote(clazz: Class[_ <: akka.actor.Actor], hostname: String, port: Int, timeout: Long)akka.actor.ActorRef`
+`method spawnRemote[T <: akka.actor.Actor](hostname: String, port: Int, timeout: Long)(implicit evidence$2: ClassTag[T])akka.actor.ActorRef`
+`method spawn[T <: akka.actor.Actor](implicit evidence$1: ClassTag[T])akka.actor.ActorRef`
+`method start()akka.actor.ActorRef`
+`method startLink(actorRef: akka.actor.ActorRef)Unit`
+`method stop()Unit`
+`method supervisor=> Option[akka.actor.ActorRef]`
+`method supervisor_=(sup: Option[akka.actor.ActorRef])Unit`
+`method synchronized[T0](x$1: T0)T0`
+`method toString()String`
+`method unlink(actorRef: akka.actor.ActorRef)Unit`
+`method uuid=> akka.actor.Uuid`
+`method uuid_=(uid: akka.actor.Uuid)Unit`
+`method wait()Unit`
+`method wait(x$1: Long)Unit`
+`method wait(x$1: Long, x$2: Int)Unit`
+`method x=> akka.actor.ActorRef`
+`method →[B](y: B)(akka.actor.ActorRef, B)`
+`value __leftOfArrowakka.actor.ActorRef`
+`value __resultOfEnsuringakka.actor.ActorRef`
+`value selfAny`
+`variable _statusakka.actor.ActorRefInternals.StatusType`
+`variable _uuidakka.actor.Uuid`
+`variable currentMessageakka.dispatch.MessageInvocation`
+`variable faultHandlerakka.config.Supervision.FaultHandlingStrategy`
+`variable hotswapscala.collection.immutable.Stack[PartialFunction[Any,Unit]]`
+`variable idString`
+`variable lifeCycleakka.config.Supervision.LifeCycle`
+`variable receiveTimeoutOption[Long]`
+`variable timeoutLong`
+================================================================================
+
+askTypeCompletion at pi.scala(65,15)
+================================================================================
+[response] aksTypeCompletion at (65,15)
+retrieved 131 members
+`method !!(implicit sender: Option[akka.actor.ActorRef])akka.dispatch.Future[T]`
+`method !!(message: Any, timeout: Long)(implicit sender: Option[akka.actor.ActorRef])Option[Any]`
+`method !(message: Any)(implicit sender: Option[akka.actor.ActorRef])Unit`
+`method !=(x$1: Any)Boolean`
+`method !=(x$1: AnyRef)Boolean`
+`method ##()Int`
+`method +(other: String)String`
+`method ->[B](y: B)(akka.actor.ActorRef, B)`
+`method ==(x$1: Any)Boolean`
+`method ==(x$1: AnyRef)Boolean`
+`method actor=> akka.actor.Actor`
+`method actorClass=> Class[_ <: akka.actor.Actor]`
+`method actorClassName=> String`
+`method actorInstance=> java.util.concurrent.atomic.AtomicReference[akka.actor.Actor]`
+`method asInstanceOf[T0]=> T0`
+`method channel=> akka.actor.Channel[Any]`
+`method clone()Object`
+`method compareTo(other: akka.actor.ActorRef)Int`
+`method dispatcher=> akka.dispatch.MessageDispatcher`
+`method dispatcher_=(md: akka.dispatch.MessageDispatcher)Unit`
+`method ensuring(cond: Boolean)akka.actor.ActorRef`
+`method ensuring(cond: Boolean, msg: => Any)akka.actor.ActorRef`
+`method ensuring(cond: akka.actor.ActorRef => Boolean)akka.actor.ActorRef`
+`method ensuring(cond: akka.actor.ActorRef => Boolean, msg: => Any)akka.actor.ActorRef`
+`method eq(x$1: AnyRef)Boolean`
+`method equals(that: Any)Boolean`
+`method exit()Unit`
+`method finalize()Unit`
+`method formatted(fmtstr: String)String`
+`method forward(message: Any)(implicit sender: Some[akka.actor.ActorRef])Any`
+`method forward(message: AnyRef, sender: akka.actor.ActorRef)Unit`
+`method getActorClass()Class[_ <: akka.actor.Actor]`
+`method getActorClassName()String`
+`method getChannel=> akka.actor.Channel[Any]`
+`method getDispatcher()akka.dispatch.MessageDispatcher`
+`method getFaultHandler()akka.config.Supervision.FaultHandlingStrategy`
+`method getHomeAddress()java.net.InetSocketAddress`
+`method getId()String`
+`method getLifeCycle()akka.config.Supervision.LifeCycle`
+`method getLinkedActors()java.util.Map[akka.actor.Uuid,akka.actor.ActorRef]`
+`method getMailboxSize()Int`
+`method getReceiveTimeout()Option[Long]`
+`method getSender()Option[akka.actor.ActorRef]`
+`method getSenderFuture()Option[akka.dispatch.CompletableFuture[Any]]`
+`method getSupervisor()akka.actor.ActorRef`
+`method getTimeout()Long`
+`method getUuid()akka.actor.Uuid`
+`method handleTrapExit(dead: akka.actor.ActorRef, reason: Throwable)Unit`
+`method hashCode()Int`
+`method homeAddress=> Option[java.net.InetSocketAddress]`
+`method id=> String`
+`method id_=(id: String)Unit`
+`method invoke(messageHandle: akka.dispatch.MessageInvocation)Unit`
+`method isBeingRestarted=> Boolean`
+`method isDefinedAt(message: Any)Boolean`
+`method isInstanceOf[T0]=> Boolean`
+`method isRunning=> Boolean`
+`method isShutdown=> Boolean`
+`method isUnstarted=> Boolean`
+`method link(actorRef: akka.actor.ActorRef)Unit`
+`method linkedActors=> java.util.Map[akka.actor.Uuid,akka.actor.ActorRef]`
+`method mailbox=> AnyRef`
+`method mailboxSize=> Int`
+`method mailbox_=(value: AnyRef)AnyRef`
+`method ne(x$1: AnyRef)Boolean`
+`method notify()Unit`
+`method notifyAll()Unit`
+`method postMessageToMailbox(message: Any, senderOption: Option[akka.actor.ActorRef])Unit`
+`method postMessageToMailboxAndCreateFutureResultWithTimeout[T](message: Any, timeout: Long, senderOption: Option[akka.actor.ActorRef], senderFuture: Option[akka.dispatch.CompletableFuture[T]])akka.dispatch.CompletableFuture[T]`
+`method registerSupervisorAsRemoteActor=> Option[akka.actor.Uuid]`
+`method reply(message: Any)Unit`
+`method replySafe(message: AnyRef)Boolean`
+`method replyUnsafe(message: AnyRef)Unit`
+`method reply_?(message: Any)Boolean`
+`method restart(reason: Throwable, maxNrOfRetries: Option[Int], withinTimeRange: Option[Int])Unit`
+`method restartLinkedActors(reason: Throwable, maxNrOfRetries: Option[Int], withinTimeRange: Option[Int])Unit`
+`method sendOneWay(message: AnyRef)Unit`
+`method sendOneWay(message: AnyRef, sender: akka.actor.ActorRef)Unit`
+`method sendRequestReply(message: AnyRef)AnyRef`
+`method sendRequestReply(message: AnyRef, sender: akka.actor.ActorRef)AnyRef`
+`method sendRequestReply(message: AnyRef, timeout: Long, sender: akka.actor.ActorRef)AnyRef`
+`method sendRequestReplyFuture[T <: AnyRef](message: AnyRef)akka.dispatch.Future[T]`
+`method sendRequestReplyFuture[T <: AnyRef](message: AnyRef, sender: akka.actor.ActorRef)akka.dispatch.Future[T]`
+`method sendRequestReplyFuture[T <: AnyRef](message: AnyRef, timeout: Long, sender: akka.actor.ActorRef)akka.dispatch.Future[T]`
+`method sender=> Option[akka.actor.ActorRef]`
+`method senderFuture()Option[akka.dispatch.CompletableFuture[Any]]`
+`method setDispatcher(dispatcher: akka.dispatch.MessageDispatcher)Unit`
+`method setFaultHandler(handler: akka.config.Supervision.FaultHandlingStrategy)Unit`
+`method setId(x$1: String)Unit`
+`method setLifeCycle(lifeCycle: akka.config.Supervision.LifeCycle)Unit`
+`method setReceiveTimeout(timeout: Long)Unit`
+`method setTimeout(x$1: Long)Unit`
+`method spawn(clazz: Class[_ <: akka.actor.Actor])akka.actor.ActorRef`
+`method spawnLink(clazz: Class[_ <: akka.actor.Actor])akka.actor.ActorRef`
+`method spawnLinkRemote(clazz: Class[_ <: akka.actor.Actor], hostname: String, port: Int, timeout: Long)akka.actor.ActorRef`
+`method spawnLinkRemote[T <: akka.actor.Actor](hostname: String, port: Int, timeout: Long)(implicit evidence$4: ClassTag[T])akka.actor.ActorRef`
+`method spawnLink[T <: akka.actor.Actor](implicit evidence$3: ClassTag[T])akka.actor.ActorRef`
+`method spawnRemote(clazz: Class[_ <: akka.actor.Actor], hostname: String, port: Int, timeout: Long)akka.actor.ActorRef`
+`method spawnRemote[T <: akka.actor.Actor](hostname: String, port: Int, timeout: Long)(implicit evidence$2: ClassTag[T])akka.actor.ActorRef`
+`method spawn[T <: akka.actor.Actor](implicit evidence$1: ClassTag[T])akka.actor.ActorRef`
+`method start()akka.actor.ActorRef`
+`method startLink(actorRef: akka.actor.ActorRef)Unit`
+`method stop()Unit`
+`method supervisor=> Option[akka.actor.ActorRef]`
+`method supervisor_=(sup: Option[akka.actor.ActorRef])Unit`
+`method synchronized[T0](x$1: T0)T0`
+`method toString()String`
+`method unlink(actorRef: akka.actor.ActorRef)Unit`
+`method uuid=> akka.actor.Uuid`
+`method uuid_=(uid: akka.actor.Uuid)Unit`
+`method wait()Unit`
+`method wait(x$1: Long)Unit`
+`method wait(x$1: Long, x$2: Int)Unit`
+`method x=> akka.actor.ActorRef`
+`method →[B](y: B)(akka.actor.ActorRef, B)`
+`value __leftOfArrowakka.actor.ActorRef`
+`value __resultOfEnsuringakka.actor.ActorRef`
+`value selfAny`
+`variable _statusakka.actor.ActorRefInternals.StatusType`
+`variable _uuidakka.actor.Uuid`
+`variable currentMessageakka.dispatch.MessageInvocation`
+`variable faultHandlerakka.config.Supervision.FaultHandlingStrategy`
+`variable hotswapscala.collection.immutable.Stack[PartialFunction[Any,Unit]]`
+`variable idString`
+`variable lifeCycleakka.config.Supervision.LifeCycle`
+`variable receiveTimeoutOption[Long]`
+`variable timeoutLong`
+================================================================================
+
+askTypeCompletion at pi.scala(74,46)
+================================================================================
+[response] aksTypeCompletion at (74,46)
+retrieved 131 members
+`method !!(implicit sender: Option[akka.actor.ActorRef])akka.dispatch.Future[T]`
+`method !!(message: Any, timeout: Long)(implicit sender: Option[akka.actor.ActorRef])Option[Any]`
+`method !(message: Any)(implicit sender: Option[akka.actor.ActorRef])Unit`
+`method !=(x$1: Any)Boolean`
+`method !=(x$1: AnyRef)Boolean`
+`method ##()Int`
+`method +(other: String)String`
+`method ->[B](y: B)(akka.actor.ScalaActorRef, B)`
+`method ==(x$1: Any)Boolean`
+`method ==(x$1: AnyRef)Boolean`
+`method actor=> akka.actor.Actor`
+`method actorClass=> Class[_ <: akka.actor.Actor]`
+`method actorClassName=> String`
+`method actorInstance=> java.util.concurrent.atomic.AtomicReference[akka.actor.Actor]`
+`method asInstanceOf[T0]=> T0`
+`method channel=> akka.actor.Channel[Any]`
+`method clone()Object`
+`method compareTo(other: akka.actor.ActorRef)Int`
+`method dispatcher=> akka.dispatch.MessageDispatcher`
+`method dispatcher_=(md: akka.dispatch.MessageDispatcher)Unit`
+`method ensuring(cond: Boolean)akka.actor.ScalaActorRef`
+`method ensuring(cond: Boolean, msg: => Any)akka.actor.ScalaActorRef`
+`method ensuring(cond: akka.actor.ScalaActorRef => Boolean)akka.actor.ScalaActorRef`
+`method ensuring(cond: akka.actor.ScalaActorRef => Boolean, msg: => Any)akka.actor.ScalaActorRef`
+`method eq(x$1: AnyRef)Boolean`
+`method equals(x$1: Any)Boolean`
+`method exit()Unit`
+`method finalize()Unit`
+`method formatted(fmtstr: String)String`
+`method forward(message: Any)(implicit sender: Some[akka.actor.ActorRef])Any`
+`method forward(message: AnyRef, sender: akka.actor.ActorRef)Unit`
+`method getActorClass()Class[_ <: akka.actor.Actor]`
+`method getActorClassName()String`
+`method getChannel=> akka.actor.Channel[Any]`
+`method getDispatcher()akka.dispatch.MessageDispatcher`
+`method getFaultHandler()akka.config.Supervision.FaultHandlingStrategy`
+`method getHomeAddress()java.net.InetSocketAddress`
+`method getId()String`
+`method getLifeCycle()akka.config.Supervision.LifeCycle`
+`method getLinkedActors()java.util.Map[akka.actor.Uuid,akka.actor.ActorRef]`
+`method getMailboxSize()Int`
+`method getReceiveTimeout()Option[Long]`
+`method getSender()Option[akka.actor.ActorRef]`
+`method getSenderFuture()Option[akka.dispatch.CompletableFuture[Any]]`
+`method getSupervisor()akka.actor.ActorRef`
+`method getTimeout()Long`
+`method getUuid()akka.actor.Uuid`
+`method handleTrapExit(dead: akka.actor.ActorRef, reason: Throwable)Unit`
+`method hashCode()Int`
+`method homeAddress=> Option[java.net.InetSocketAddress]`
+`method id=> String`
+`method id_=(id: String)Unit`
+`method invoke(messageHandle: akka.dispatch.MessageInvocation)Unit`
+`method isBeingRestarted=> Boolean`
+`method isDefinedAt(message: Any)Boolean`
+`method isInstanceOf[T0]=> Boolean`
+`method isRunning=> Boolean`
+`method isShutdown=> Boolean`
+`method isUnstarted=> Boolean`
+`method link(actorRef: akka.actor.ActorRef)Unit`
+`method linkedActors=> java.util.Map[akka.actor.Uuid,akka.actor.ActorRef]`
+`method mailbox=> AnyRef`
+`method mailboxSize=> Int`
+`method mailbox_=(value: AnyRef)AnyRef`
+`method ne(x$1: AnyRef)Boolean`
+`method notify()Unit`
+`method notifyAll()Unit`
+`method postMessageToMailbox(message: Any, senderOption: Option[akka.actor.ActorRef])Unit`
+`method postMessageToMailboxAndCreateFutureResultWithTimeout[T](message: Any, timeout: Long, senderOption: Option[akka.actor.ActorRef], senderFuture: Option[akka.dispatch.CompletableFuture[T]])akka.dispatch.CompletableFuture[T]`
+`method registerSupervisorAsRemoteActor=> Option[akka.actor.Uuid]`
+`method reply(message: Any)Unit`
+`method replySafe(message: AnyRef)Boolean`
+`method replyUnsafe(message: AnyRef)Unit`
+`method reply_?(message: Any)Boolean`
+`method restart(reason: Throwable, maxNrOfRetries: Option[Int], withinTimeRange: Option[Int])Unit`
+`method restartLinkedActors(reason: Throwable, maxNrOfRetries: Option[Int], withinTimeRange: Option[Int])Unit`
+`method sendOneWay(message: AnyRef)Unit`
+`method sendOneWay(message: AnyRef, sender: akka.actor.ActorRef)Unit`
+`method sendRequestReply(message: AnyRef)AnyRef`
+`method sendRequestReply(message: AnyRef, sender: akka.actor.ActorRef)AnyRef`
+`method sendRequestReply(message: AnyRef, timeout: Long, sender: akka.actor.ActorRef)AnyRef`
+`method sendRequestReplyFuture[T <: AnyRef](message: AnyRef)akka.dispatch.Future[T]`
+`method sendRequestReplyFuture[T <: AnyRef](message: AnyRef, sender: akka.actor.ActorRef)akka.dispatch.Future[T]`
+`method sendRequestReplyFuture[T <: AnyRef](message: AnyRef, timeout: Long, sender: akka.actor.ActorRef)akka.dispatch.Future[T]`
+`method sender=> Option[akka.actor.ActorRef]`
+`method senderFuture()Option[akka.dispatch.CompletableFuture[Any]]`
+`method setDispatcher(dispatcher: akka.dispatch.MessageDispatcher)Unit`
+`method setFaultHandler(x$1: akka.config.Supervision.FaultHandlingStrategy)Unit`
+`method setId(x$1: String)Unit`
+`method setLifeCycle(x$1: akka.config.Supervision.LifeCycle)Unit`
+`method setReceiveTimeout(timeout: Long)Unit`
+`method setTimeout(x$1: Long)Unit`
+`method spawn(clazz: Class[_ <: akka.actor.Actor])akka.actor.ActorRef`
+`method spawnLink(clazz: Class[_ <: akka.actor.Actor])akka.actor.ActorRef`
+`method spawnLinkRemote(clazz: Class[_ <: akka.actor.Actor], hostname: String, port: Int, timeout: Long)akka.actor.ActorRef`
+`method spawnLinkRemote[T <: akka.actor.Actor](hostname: String, port: Int, timeout: Long)(implicit evidence$4: ClassTag[T])akka.actor.ActorRef`
+`method spawnLink[T <: akka.actor.Actor](implicit evidence$3: ClassTag[T])akka.actor.ActorRef`
+`method spawnRemote(clazz: Class[_ <: akka.actor.Actor], hostname: String, port: Int, timeout: Long)akka.actor.ActorRef`
+`method spawnRemote[T <: akka.actor.Actor](hostname: String, port: Int, timeout: Long)(implicit evidence$2: ClassTag[T])akka.actor.ActorRef`
+`method spawn[T <: akka.actor.Actor](implicit evidence$1: ClassTag[T])akka.actor.ActorRef`
+`method start()akka.actor.ActorRef`
+`method startLink(actorRef: akka.actor.ActorRef)Unit`
+`method stop()Unit`
+`method supervisor=> Option[akka.actor.ActorRef]`
+`method supervisor_=(sup: Option[akka.actor.ActorRef])Unit`
+`method synchronized[T0](x$1: T0)T0`
+`method toString()String`
+`method unlink(actorRef: akka.actor.ActorRef)Unit`
+`method uuid=> akka.actor.Uuid`
+`method uuid_=(uid: akka.actor.Uuid)Unit`
+`method wait()Unit`
+`method wait(x$1: Long)Unit`
+`method wait(x$1: Long, x$2: Int)Unit`
+`method x=> akka.actor.ScalaActorRef`
+`method →[B](y: B)(akka.actor.ScalaActorRef, B)`
+`value __leftOfArrowakka.actor.ScalaActorRef`
+`value __resultOfEnsuringakka.actor.ScalaActorRef`
+`value selfAny`
+`variable _statusakka.actor.ActorRefInternals.StatusType`
+`variable _uuidakka.actor.Uuid`
+`variable currentMessageakka.dispatch.MessageInvocation`
+`variable faultHandlerakka.config.Supervision.FaultHandlingStrategy`
+`variable hotswapscala.collection.immutable.Stack[PartialFunction[Any,Unit]]`
+`variable idString`
+`variable lifeCycleakka.config.Supervision.LifeCycle`
+`variable receiveTimeoutOption[Long]`
+`variable timeoutLong`
+================================================================================
+
+askType at pi.scala(34,16)
+================================================================================
+[response] askTypeAt at (34,16)
+def receive: PartialFunction[Any,Unit] = ((x0$3: Any) => x0$3 match {
+ case (start: Int, nrOfElements: Int)Pi.Work((start @ _), (nrOfElements @ _)) => Worker.this.self.reply(Pi.this.Result.apply(Worker.this.calculatePiFor(start, nrOfElements)))
+})
+================================================================================
+
+askHyperlinkPos for `calculate` at (11,11) pi.scala
+================================================================================
+[response] found askHyperlinkPos for `calculate` at (93,7) pi.scala
+================================================================================
+
+askHyperlinkPos for `PiMessage` at (17,41) pi.scala
+================================================================================
+[response] found askHyperlinkPos for `PiMessage` at (16,16) pi.scala
+================================================================================
+
+askHyperlinkPos for `Actor` at (24,28) pi.scala
+================================================================================
+[response] found askHyperlinkPos for `Actor` at (289,7) Actor.scala
+================================================================================
+
+askHyperlinkPos for `reply` at (36,18) pi.scala
+================================================================================
+[response] found askHyperlinkPos for `reply` at (1382,7) ActorRef.scala
+================================================================================
+
+askHyperlinkPos for `nrOfResults` at (73,19) pi.scala
+================================================================================
+[response] found askHyperlinkPos for `nrOfResults` at (48,9) pi.scala
+================================================================================
+
+askHyperlinkPos for `latch` at (86,11) pi.scala
+================================================================================
+[response] found askHyperlinkPos for `latch` at (44,61) pi.scala
+================================================================================
diff --git a/test/disabled/presentation/akka.flags b/test/disabled/presentation/akka.flags
new file mode 100644
index 0000000..56d026a
--- /dev/null
+++ b/test/disabled/presentation/akka.flags
@@ -0,0 +1,18 @@
+# This file contains command line options that are passed to the presentation compiler
+# Lines starting with # are stripped, and you can split arguments on several lines.
+
+# The -bootclasspath option is treated specially by the test framework: if it's not specified
+# in this file, the presentation compiler will pick up the scala-library/compiler that's on the
+# java classpath used to run this test (usually build/pack)
+
+# Any option can be passed this way, like presentation debug
+# -Ypresentation-debug -Ypresentation-verbose
+
+# the classpath is relative to the current working directory. That means it depends where you're
+# running partest from. Run it from the root scala checkout for these files to resolve correctly
+# (by default when running 'ant test', or 'test/partest'). Paths use Unix separators, the test
+# framework translates them to the platform dependent representation.
+# -bootclasspath lib/scala-compiler.jar:lib/scala-library.jar:lib/fjbg.jar
+
+# the following line would test using the quick compiler
+# -bootclasspath build/quick/classes/compiler:build/quick/classes/library:lib/fjbg.jar
diff --git a/test/disabled/presentation/akka/Runner.scala b/test/disabled/presentation/akka/Runner.scala
new file mode 100644
index 0000000..14a6aa8
--- /dev/null
+++ b/test/disabled/presentation/akka/Runner.scala
@@ -0,0 +1,3 @@
+import scala.tools.nsc.interactive.tests.InteractiveTest
+
+object Test extends InteractiveTest
diff --git a/test/disabled/presentation/akka/src/akka/AkkaException.scala b/test/disabled/presentation/akka/src/akka/AkkaException.scala
new file mode 100644
index 0000000..155a7a1
--- /dev/null
+++ b/test/disabled/presentation/akka/src/akka/AkkaException.scala
@@ -0,0 +1,40 @@
+/**
+ * Copyright (C) 2009-2011 Scalable Solutions AB <http://scalablesolutions.se>
+ */
+
+package akka
+
+import akka.actor.newUuid
+import java.net.{ InetAddress, UnknownHostException }
+
+/**
+ * Akka base Exception. Each Exception gets:
+ * <ul>
+ * <li>a uuid for tracking purposes</li>
+ * <li>toString that includes exception name, message, uuid, and the stacktrace</li>
+ * </ul>
+ *
+ * @author <a href="http://jonasboner.com">Jonas Bonér</a>
+ */
+class AkkaException(message: String = "", cause: Throwable = null) extends RuntimeException(message, cause) with Serializable {
+ val uuid = "%s_%s".format(AkkaException.hostname, newUuid)
+
+ override lazy val toString =
+ "%s: %s\n[%s]\n%s".format(getClass.getName, message, uuid, stackTraceToString)
+
+ def stackTraceToString = {
+ val trace = getStackTrace
+ val sb = new StringBuffer
+ for (i ← 0 until trace.length)
+ sb.append("\tat %s\n" format trace(i))
+ sb.toString
+ }
+}
+
+object AkkaException {
+ val hostname = try {
+ InetAddress.getLocalHost.getHostName
+ } catch {
+ case e: UnknownHostException => "unknown"
+ }
+}
diff --git a/test/disabled/presentation/akka/src/akka/actor/Actor.scala b/test/disabled/presentation/akka/src/akka/actor/Actor.scala
new file mode 100644
index 0000000..b9bc51b
--- /dev/null
+++ b/test/disabled/presentation/akka/src/akka/actor/Actor.scala
@@ -0,0 +1,503 @@
+/** Copyright (C) 2009-2011 Scalable Solutions AB <http://scalablesolutions.se>
+ */
+
+package akka.actor
+
+import akka.dispatch._
+import akka.config.Config._
+import akka.util.Helpers.{ narrow, narrowSilently }
+import akka.util.ListenerManagement
+import akka.AkkaException
+
+import scala.beans.BeanProperty
+import akka.util.{ ReflectiveAccess, Duration }
+import akka.remoteinterface.RemoteSupport
+import akka.japi.{ Creator, Procedure }
+import java.lang.reflect.InvocationTargetException
+
+/** Life-cycle messages for the Actors
+ */
+sealed trait LifeCycleMessage extends Serializable
+
+/* Marker trait to show which Messages are automatically handled by Akka */
+sealed trait AutoReceivedMessage { self: LifeCycleMessage => }
+
+case class HotSwap(code: ActorRef => Actor.Receive, discardOld: Boolean = true)
+ extends AutoReceivedMessage with LifeCycleMessage {
+
+ /** Java API
+ */
+ def this(code: akka.japi.Function[ActorRef, Procedure[Any]], discardOld: Boolean) =
+ this((self: ActorRef) => {
+ val behavior = code(self)
+ val result: Actor.Receive = { case msg => behavior(msg) }
+ result
+ }, discardOld)
+
+ /** Java API with default non-stacking behavior
+ */
+ def this(code: akka.japi.Function[ActorRef, Procedure[Any]]) = this(code, true)
+}
+
+case object RevertHotSwap extends AutoReceivedMessage with LifeCycleMessage
+
+case class Restart(reason: Throwable) extends AutoReceivedMessage with LifeCycleMessage
+
+case class Exit(dead: ActorRef, killer: Throwable) extends AutoReceivedMessage with LifeCycleMessage
+
+case class Link(child: ActorRef) extends AutoReceivedMessage with LifeCycleMessage
+
+case class Unlink(child: ActorRef) extends AutoReceivedMessage with LifeCycleMessage
+
+case class UnlinkAndStop(child: ActorRef) extends AutoReceivedMessage with LifeCycleMessage
+
+case object PoisonPill extends AutoReceivedMessage with LifeCycleMessage
+
+case object Kill extends AutoReceivedMessage with LifeCycleMessage
+
+case object ReceiveTimeout extends LifeCycleMessage
+
+case class MaximumNumberOfRestartsWithinTimeRangeReached(
+ @BeanProperty val victim: ActorRef,
+ @BeanProperty val maxNrOfRetries: Option[Int],
+ @BeanProperty val withinTimeRange: Option[Int],
+ @BeanProperty val lastExceptionCausingRestart: Throwable) extends LifeCycleMessage
+
+// Exceptions for Actors
+class ActorStartException private[akka] (message: String, cause: Throwable = null) extends AkkaException(message, cause)
+class IllegalActorStateException private[akka] (message: String, cause: Throwable = null) extends AkkaException(message, cause)
+class ActorKilledException private[akka] (message: String, cause: Throwable = null) extends AkkaException(message, cause)
+class ActorInitializationException private[akka] (message: String, cause: Throwable = null) extends AkkaException(message, cause)
+class ActorTimeoutException private[akka] (message: String, cause: Throwable = null) extends AkkaException(message, cause)
+class InvalidMessageException private[akka] (message: String, cause: Throwable = null) extends AkkaException(message, cause)
+
+/** This message is thrown by default when an Actors behavior doesn't match a message
+ */
+case class UnhandledMessageException(msg: Any, ref: ActorRef) extends Exception {
+ override def getMessage() = "Actor %s does not handle [%s]".format(ref, msg)
+ override def fillInStackTrace() = this //Don't waste cycles generating stack trace
+}
+
+/** Actor factory module with factory methods for creating various kinds of Actors.
+ *
+ * @author <a href="http://jonasboner.com">Jonas Bonér</a>
+ */
+object Actor extends ListenerManagement {
+
+ /** Add shutdown cleanups
+ */
+ private[akka] lazy val shutdownHook = {
+ val hook = new Runnable {
+ override def run {
+ // Clear Thread.subclassAudits
+ val tf = classOf[java.lang.Thread].getDeclaredField("subclassAudits")
+ tf.setAccessible(true)
+ val subclassAudits = tf.get(null).asInstanceOf[java.util.Map[_, _]]
+ subclassAudits synchronized { subclassAudits.clear }
+ }
+ }
+ Runtime.getRuntime.addShutdownHook(new Thread(hook))
+ hook
+ }
+
+ val registry = new ActorRegistry
+
+ lazy val remote: RemoteSupport = {
+ ReflectiveAccess
+ .Remote
+ .defaultRemoteSupport
+ .map(_())
+ .getOrElse(throw new UnsupportedOperationException("You need to have akka-remote.jar on classpath"))
+ }
+
+ private[akka] val TIMEOUT = Duration(config.getInt("akka.actor.timeout", 5), TIME_UNIT).toMillis
+ private[akka] val SERIALIZE_MESSAGES = config.getBool("akka.actor.serialize-messages", false)
+
+ /** A Receive is a convenience type that defines actor message behavior currently modeled as
+ * a PartialFunction[Any, Unit].
+ */
+ type Receive = PartialFunction[Any, Unit]
+
+ private[actor] val actorRefInCreation = new ThreadLocal[Option[ActorRef]] {
+ override def initialValue = None
+ }
+
+ /** Creates an ActorRef out of the Actor with type T.
+ * <pre>
+ * import Actor._
+ * val actor = actorOf[MyActor]
+ * actor.start()
+ * actor ! message
+ * actor.stop()
+ * </pre>
+ * You can create and start the actor in one statement like this:
+ * <pre>
+ * val actor = actorOf[MyActor].start()
+ * </pre>
+ */
+ def actorOf[T <: Actor: ClassTag]: ActorRef = actorOf(classTag[T].erasure.asInstanceOf[Class[_ <: Actor]])
+
+ /** Creates an ActorRef out of the Actor of the specified Class.
+ * <pre>
+ * import Actor._
+ * val actor = actorOf(classOf[MyActor])
+ * actor.start()
+ * actor ! message
+ * actor.stop()
+ * </pre>
+ * You can create and start the actor in one statement like this:
+ * <pre>
+ * val actor = actorOf(classOf[MyActor]).start()
+ * </pre>
+ */
+ def actorOf(clazz: Class[_ <: Actor]): ActorRef = new LocalActorRef(() => {
+ import ReflectiveAccess.{ createInstance, noParams, noArgs }
+ createInstance[Actor](clazz.asInstanceOf[Class[_]], noParams, noArgs) match {
+ case Right(actor) => actor
+ case Left(exception) =>
+ val cause = exception match {
+ case i: InvocationTargetException => i.getTargetException
+ case _ => exception
+ }
+
+ throw new ActorInitializationException(
+ "Could not instantiate Actor of " + clazz +
+ "\nMake sure Actor is NOT defined inside a class/trait," +
+ "\nif so put it outside the class/trait, f.e. in a companion object," +
+ "\nOR try to change: 'actorOf[MyActor]' to 'actorOf(new MyActor)'.", cause)
+ }
+
+ }, None)
+
+ /** Creates an ActorRef out of the Actor. Allows you to pass in a factory function
+ * that creates the Actor. Please note that this function can be invoked multiple
+ * times if for example the Actor is supervised and needs to be restarted.
+ * <p/>
+ * This function should <b>NOT</b> be used for remote actors.
+ * <pre>
+ * import Actor._
+ * val actor = actorOf(new MyActor)
+ * actor.start()
+ * actor ! message
+ * actor.stop()
+ * </pre>
+ * You can create and start the actor in one statement like this:
+ * <pre>
+ * val actor = actorOf(new MyActor).start()
+ * </pre>
+ */
+ def actorOf(factory: => Actor): ActorRef = new LocalActorRef(() => factory, None)
+
+ /** Creates an ActorRef out of the Actor. Allows you to pass in a factory (Creator<Actor>)
+ * that creates the Actor. Please note that this function can be invoked multiple
+ * times if for example the Actor is supervised and needs to be restarted.
+ * <p/>
+ * This function should <b>NOT</b> be used for remote actors.
+ * JAVA API
+ */
+ def actorOf(creator: Creator[Actor]): ActorRef = new LocalActorRef(() => creator.create, None)
+
+ /** Use to spawn out a block of code in an event-driven actor. Will shut actor down when
+ * the block has been executed.
+ * <p/>
+ * NOTE: If used from within an Actor then has to be qualified with 'Actor.spawn' since
+ * there is a method 'spawn[ActorType]' in the Actor trait already.
+ * Example:
+ * <pre>
+ * import Actor.{spawn}
+ *
+ * spawn {
+ * ... // do stuff
+ * }
+ * </pre>
+ */
+ def spawn(body: => Unit)(implicit dispatcher: MessageDispatcher = Dispatchers.defaultGlobalDispatcher): Unit = {
+ case object Spawn
+ actorOf(new Actor() {
+ self.dispatcher = dispatcher
+ def receive = {
+ case Spawn => try { body } finally { self.stop() }
+ }
+ }).start() ! Spawn
+ }
+
+ /** Implicitly converts the given Option[Any] to a AnyOptionAsTypedOption which offers the method <code>as[T]</code>
+ * to convert an Option[Any] to an Option[T].
+ */
+ implicit def toAnyOptionAsTypedOption(anyOption: Option[Any]) = new AnyOptionAsTypedOption(anyOption)
+
+ /** Implicitly converts the given Future[_] to a AnyOptionAsTypedOption which offers the method <code>as[T]</code>
+ * to convert an Option[Any] to an Option[T].
+ * This means that the following code is equivalent:
+ * (actor !! "foo").as[Int] (Deprecated)
+ * and
+ * (actor !!! "foo").as[Int] (Recommended)
+ */
+ implicit def futureToAnyOptionAsTypedOption(anyFuture: Future[_]) = new AnyOptionAsTypedOption({
+ try { anyFuture.await } catch { case t: FutureTimeoutException => }
+ anyFuture.resultOrException
+ })
+}
+
+/** Actor base trait that should be extended by or mixed to create an Actor with the semantics of the 'Actor Model':
+ * <a href="http://en.wikipedia.org/wiki/Actor_model">http://en.wikipedia.org/wiki/Actor_model</a>
+ * <p/>
+ * An actor has a well-defined (non-cyclic) life-cycle.
+ * <pre>
+ * => NEW (newly created actor) - can't receive messages (yet)
+ * => STARTED (when 'start' is invoked) - can receive messages
+ * => SHUT DOWN (when 'exit' is invoked) - can't do anything
+ * </pre>
+ *
+ * <p/>
+ * The Actor's API is available in the 'self' member variable.
+ *
+ * <p/>
+ * Here you find functions like:
+ * - !, !!, !!! and forward
+ * - link, unlink, startLink, spawnLink etc
+ * - makeRemote etc.
+ * - start, stop
+ * - etc.
+ *
+ * <p/>
+ * Here you also find fields like
+ * - dispatcher = ...
+ * - id = ...
+ * - lifeCycle = ...
+ * - faultHandler = ...
+ * - trapExit = ...
+ * - etc.
+ *
+ * <p/>
+ * This means that to use them you have to prefix them with 'self', like this: <tt>self ! Message</tt>
+ *
+ * However, for convenience you can import these functions and fields like below, which will allow you do
+ * drop the 'self' prefix:
+ * <pre>
+ * class MyActor extends Actor {
+ * import self._
+ * id = ...
+ * dispatcher = ...
+ * spawnLink[OtherActor]
+ * ...
+ * }
+ * </pre>
+ *
+ * @author <a href="http://jonasboner.com">Jonas Bonér</a>
+ */
+trait Actor {
+
+ /** Type alias because traits cannot have companion objects.
+ */
+ type Receive = Actor.Receive
+
+ /*
+ * Some[ActorRef] representation of the 'self' ActorRef reference.
+ * <p/>
+ * Mainly for internal use, functions as the implicit sender references when invoking
+ * the 'forward' function.
+ */
+ @transient
+ implicit val someSelf: Some[ActorRef] = {
+ val optRef = Actor.actorRefInCreation.get
+ if (optRef.isEmpty) throw new ActorInitializationException(
+ "ActorRef for instance of actor [" + getClass.getName + "] is not in scope." +
+ "\n\tYou can not create an instance of an actor explicitly using 'new MyActor'." +
+ "\n\tYou have to use one of the factory methods in the 'Actor' object to create a new actor." +
+ "\n\tEither use:" +
+ "\n\t\t'val actor = Actor.actorOf[MyActor]', or" +
+ "\n\t\t'val actor = Actor.actorOf(new MyActor(..))'")
+ Actor.actorRefInCreation.set(None)
+ optRef.asInstanceOf[Some[ActorRef]].get.id = getClass.getName //FIXME: Is this needed?
+ optRef.asInstanceOf[Some[ActorRef]]
+ }
+
+ /*
+ * Option[ActorRef] representation of the 'self' ActorRef reference.
+ * <p/>
+ * Mainly for internal use, functions as the implicit sender references when invoking
+ * one of the message send functions ('!', '!!' and '!!!').
+ */
+ implicit def optionSelf: Option[ActorRef] = someSelf
+
+ /** The 'self' field holds the ActorRef for this actor.
+ * <p/>
+ * Can be used to send messages to itself:
+ * <pre>
+ * self ! message
+ * </pre>
+ * Here you also find most of the Actor API.
+ * <p/>
+ * For example fields like:
+ * <pre>
+ * self.dispatcher = ...
+ * self.trapExit = ...
+ * self.faultHandler = ...
+ * self.lifeCycle = ...
+ * self.sender
+ * </pre>
+ * <p/>
+ * Here you also find methods like:
+ * <pre>
+ * self.reply(..)
+ * self.link(..)
+ * self.unlink(..)
+ * self.start(..)
+ * self.stop(..)
+ * </pre>
+ */
+ @transient
+ val self: ScalaActorRef = someSelf.get
+
+ /** User overridable callback/setting.
+ * <p/>
+ * Partial function implementing the actor logic.
+ * To be implemented by concrete actor class.
+ * <p/>
+ * Example code:
+ * <pre>
+ * def receive = {
+ * case Ping =>
+ * println("got a 'Ping' message")
+ * self.reply("pong")
+ *
+ * case OneWay =>
+ * println("got a 'OneWay' message")
+ *
+ * case unknown =>
+ * println("unknown message: " + unknown)
+ * }
+ * </pre>
+ */
+ protected def receive: Receive
+
+ /** User overridable callback.
+ * <p/>
+ * Is called when an Actor is started by invoking 'actor.start()'.
+ */
+ def preStart() {}
+
+ /** User overridable callback.
+ * <p/>
+ * Is called when 'actor.stop()' is invoked.
+ */
+ def postStop() {}
+
+ /** User overridable callback.
+ * <p/>
+ * Is called on a crashed Actor right BEFORE it is restarted to allow clean up of resources before Actor is terminated.
+ */
+ def preRestart(reason: Throwable) {}
+
+ /** User overridable callback.
+ * <p/>
+ * Is called right AFTER restart on the newly created Actor to allow reinitialization after an Actor crash.
+ */
+ def postRestart(reason: Throwable) {}
+
+ /** User overridable callback.
+ * <p/>
+ * Is called when a message isn't handled by the current behavior of the actor
+ * by default it throws an UnhandledMessageException
+ */
+ def unhandled(msg: Any) {
+ throw new UnhandledMessageException(msg, self)
+ }
+
+ /** Is the actor able to handle the message passed in as arguments?
+ */
+ def isDefinedAt(message: Any): Boolean = {
+ val behaviorStack = self.hotswap
+ message match { //Same logic as apply(msg) but without the unhandled catch-all
+ case l: AutoReceivedMessage => true
+ case msg if behaviorStack.nonEmpty &&
+ behaviorStack.head.isDefinedAt(msg) => true
+ case msg if behaviorStack.isEmpty &&
+ processingBehavior.isDefinedAt(msg) => true
+ case _ => false
+ }
+ }
+
+ /** Changes the Actor's behavior to become the new 'Receive' (PartialFunction[Any, Unit]) handler.
+ * Puts the behavior on top of the hotswap stack.
+ * If "discardOld" is true, an unbecome will be issued prior to pushing the new behavior to the stack
+ */
+ def become(behavior: Receive, discardOld: Boolean = true) {
+ if (discardOld) unbecome()
+ self.hotswap = self.hotswap.push(behavior)
+ }
+
+ /** Reverts the Actor behavior to the previous one in the hotswap stack.
+ */
+ def unbecome(): Unit = {
+ val h = self.hotswap
+ if (h.nonEmpty) self.hotswap = h.pop
+ }
+
+ // =========================================
+ // ==== INTERNAL IMPLEMENTATION DETAILS ====
+ // =========================================
+
+ private[akka] final def apply(msg: Any) = {
+ if (msg.isInstanceOf[AnyRef] && (msg.asInstanceOf[AnyRef] eq null))
+ throw new InvalidMessageException("Message from [" + self.sender + "] to [" + self.toString + "] is null")
+ val behaviorStack = self.hotswap
+ msg match {
+ case l: AutoReceivedMessage => autoReceiveMessage(l)
+ case msg if behaviorStack.nonEmpty &&
+ behaviorStack.head.isDefinedAt(msg) => behaviorStack.head.apply(msg)
+ case msg if behaviorStack.isEmpty &&
+ processingBehavior.isDefinedAt(msg) => processingBehavior.apply(msg)
+ case unknown => unhandled(unknown) //This is the only line that differs from processingbehavior
+ }
+ }
+
+ private final def autoReceiveMessage(msg: AutoReceivedMessage): Unit = msg match {
+ case HotSwap(code, discardOld) => become(code(self), discardOld)
+ case RevertHotSwap => unbecome()
+ case Exit(dead, reason) => self.handleTrapExit(dead, reason)
+ case Link(child) => self.link(child)
+ case Unlink(child) => self.unlink(child)
+ case UnlinkAndStop(child) => self.unlink(child); child.stop()
+ case Restart(reason) => throw reason
+ case Kill => throw new ActorKilledException("Kill")
+ case PoisonPill =>
+ val f = self.senderFuture
+ self.stop()
+ if (f.isDefined) f.get.completeWithException(new ActorKilledException("PoisonPill"))
+ }
+
+ private lazy val processingBehavior = receive //ProcessingBehavior is the original behavior
+}
+
+private[actor] class AnyOptionAsTypedOption(anyOption: Option[Any]) {
+
+ /** Convenience helper to cast the given Option of Any to an Option of the given type. Will throw a ClassCastException
+ * if the actual type is not assignable from the given one.
+ */
+ def as[T]: Option[T] = narrow[T](anyOption)
+
+ /** Convenience helper to cast the given Option of Any to an Option of the given type. Will swallow a possible
+ * ClassCastException and return None in that case.
+ */
+ def asSilently[T: ClassTag]: Option[T] = narrowSilently[T](anyOption)
+}
+
+/** Marker interface for proxyable actors (such as typed actor).
+ *
+ * @author <a href="http://jonasboner.com">Jonas Bonér</a>
+ */
+trait Proxyable {
+ private[actor] def swapProxiedActor(newInstance: Actor)
+}
+
+/** Represents the different Actor types.
+ *
+ * @author <a href="http://jonasboner.com">Jonas Bonér</a>
+ */
+sealed trait ActorType
+object ActorType {
+ case object ScalaActor extends ActorType
+ case object TypedActor extends ActorType
+}
diff --git a/test/disabled/presentation/akka/src/akka/actor/ActorRef.scala b/test/disabled/presentation/akka/src/akka/actor/ActorRef.scala
new file mode 100644
index 0000000..da0b630
--- /dev/null
+++ b/test/disabled/presentation/akka/src/akka/actor/ActorRef.scala
@@ -0,0 +1,1433 @@
+/**
+ * Copyright (C) 2009-2011 Scalable Solutions AB <http://scalablesolutions.se>
+ */
+
+package akka.actor
+
+import akka.event.EventHandler
+import akka.dispatch._
+import akka.config.Supervision._
+import akka.util._
+import ReflectiveAccess._
+
+import java.net.InetSocketAddress
+import java.util.concurrent.atomic.AtomicReference
+import java.util.concurrent.{ ScheduledFuture, ConcurrentHashMap, TimeUnit }
+import java.util.{ Map => JMap }
+
+import scala.beans.BeanProperty
+import scala.collection.immutable.Stack
+import scala.annotation.tailrec
+
+private[akka] object ActorRefInternals {
+
+ /**
+ * LifeCycles for ActorRefs.
+ */
+ private[akka] sealed trait StatusType
+ object UNSTARTED extends StatusType
+ object RUNNING extends StatusType
+ object BEING_RESTARTED extends StatusType
+ object SHUTDOWN extends StatusType
+}
+
+/**
+ * Abstraction for unification of sender and senderFuture for later reply.
+ * Can be stored away and used at a later point in time.
+ */
+abstract class Channel[T] {
+
+ /**
+ * Scala API. <p/>
+ * Sends the specified message to the channel.
+ */
+ def !(msg: T): Unit
+
+ /**
+ * Java API. <p/>
+ * Sends the specified message to the channel.
+ */
+ def sendOneWay(msg: T): Unit = this.!(msg)
+}
+
+/**
+ * ActorRef is an immutable and serializable handle to an Actor.
+ * <p/>
+ * Create an ActorRef for an Actor by using the factory method on the Actor object.
+ * <p/>
+ * Here is an example on how to create an actor with a default constructor.
+ * <pre>
+ * import Actor._
+ *
+ * val actor = actorOf[MyActor]
+ * actor.start()
+ * actor ! message
+ * actor.stop()
+ * </pre>
+ *
+ * You can also create and start actors like this:
+ * <pre>
+ * val actor = actorOf[MyActor].start()
+ * </pre>
+ *
+ * Here is an example on how to create an actor with a non-default constructor.
+ * <pre>
+ * import Actor._
+ *
+ * val actor = actorOf(new MyActor(...))
+ * actor.start()
+ * actor ! message
+ * actor.stop()
+ * </pre>
+ *
+ * @author <a href="http://jonasboner.com">Jonas Bonér</a>
+ */
+trait ActorRef extends ActorRefShared with java.lang.Comparable[ActorRef] { scalaRef: ScalaActorRef =>
+ // Only mutable for RemoteServer in order to maintain identity across nodes
+ @volatile
+ protected[akka] var _uuid = newUuid
+ @volatile
+ protected[this] var _status: ActorRefInternals.StatusType = ActorRefInternals.UNSTARTED
+
+ /**
+ * User overridable callback/setting.
+ * <p/>
+ * Identifier for actor, does not have to be a unique one. Default is the 'uuid'.
+ * <p/>
+ * This field is used for logging, AspectRegistry.actorsFor(id), identifier for remote
+ * actor in RemoteServer etc.But also as the identifier for persistence, which means
+ * that you can use a custom name to be able to retrieve the "correct" persisted state
+ * upon restart, remote restart etc.
+ */
+ @BeanProperty
+ @volatile
+ var id: String = _uuid.toString
+
+ /**
+ * User overridable callback/setting.
+ * <p/>
+ * Defines the default timeout for '!!' and '!!!' invocations,
+ * e.g. the timeout for the future returned by the call to '!!' and '!!!'.
+ */
+ @deprecated("Will be replaced by implicit-scoped timeout on all methods that needs it, will default to timeout specified in config", "1.1")
+ @BeanProperty
+ @volatile
+ var timeout: Long = Actor.TIMEOUT
+
+ /**
+ * User overridable callback/setting.
+ * <p/>
+ * Defines the default timeout for an initial receive invocation.
+ * When specified, the receive function should be able to handle a 'ReceiveTimeout' message.
+ */
+ @volatile
+ var receiveTimeout: Option[Long] = None
+
+ /**
+ * Akka Java API. <p/>
+ * Defines the default timeout for an initial receive invocation.
+ * When specified, the receive function should be able to handle a 'ReceiveTimeout' message.
+ */
+ def setReceiveTimeout(timeout: Long) = this.receiveTimeout = Some(timeout)
+ def getReceiveTimeout(): Option[Long] = receiveTimeout
+
+ /**
+ * Akka Java API. <p/>
+ * A faultHandler defines what should be done when a linked actor signals an error.
+ * <p/>
+ * Can be one of:
+ * <pre>
+ * getContext().setFaultHandler(new AllForOneStrategy(new Class[]{Throwable.class},maxNrOfRetries, withinTimeRange));
+ * </pre>
+ * Or:
+ * <pre>
+ * getContext().setFaultHandler(new OneForOneStrategy(new Class[]{Throwable.class},maxNrOfRetries, withinTimeRange));
+ * </pre>
+ */
+ def setFaultHandler(handler: FaultHandlingStrategy)
+ def getFaultHandler(): FaultHandlingStrategy
+
+ /**
+ * Akka Java API. <p/>
+ * A lifeCycle defines whether the actor will be stopped on error (Temporary) or if it can be restarted (Permanent)
+ * <p/>
+ * Can be one of:
+ *
+ * import static akka.config.Supervision.*;
+ * <pre>
+ * getContext().setLifeCycle(permanent());
+ * </pre>
+ * Or:
+ * <pre>
+ * getContext().setLifeCycle(temporary());
+ * </pre>
+ */
+ def setLifeCycle(lifeCycle: LifeCycle): Unit
+ def getLifeCycle(): LifeCycle
+
+ /**
+ * Akka Java API. <p/>
+ * The default dispatcher is the <tt>Dispatchers.globalExecutorBasedEventDrivenDispatcher</tt>.
+ * This means that all actors will share the same event-driven executor based dispatcher.
+ * <p/>
+ * You can override it so it fits the specific use-case that the actor is used for.
+ * See the <tt>akka.dispatch.Dispatchers</tt> class for the different
+ * dispatchers available.
+ * <p/>
+ * The default is also that all actors that are created and spawned from within this actor
+ * is sharing the same dispatcher as its creator.
+ */
+ def setDispatcher(dispatcher: MessageDispatcher) = this.dispatcher = dispatcher
+ def getDispatcher(): MessageDispatcher = dispatcher
+
+ /**
+ * Returns on which node this actor lives if None it lives in the local ActorRegistry
+ */
+ @deprecated("Remoting will become fully transparent in the future", "1.1")
+ def homeAddress: Option[InetSocketAddress]
+
+ /**
+ * Java API. <p/>
+ */
+ @deprecated("Remoting will become fully transparent in the future", "1.1")
+ def getHomeAddress(): InetSocketAddress = homeAddress getOrElse null
+
+ /**
+ * Holds the hot swapped partial function.
+ */
+ @volatile
+ protected[akka] var hotswap = Stack[PartialFunction[Any, Unit]]()
+
+ /**
+ * This is a reference to the message currently being processed by the actor
+ */
+ @volatile
+ protected[akka] var currentMessage: MessageInvocation = null
+
+ /**
+ * Comparison only takes uuid into account.
+ */
+ def compareTo(other: ActorRef) = this.uuid compareTo other.uuid
+
+ /**
+ * Returns the uuid for the actor.
+ */
+ def getUuid() = _uuid
+ def uuid = _uuid
+
+ /**
+ * Akka Java API. <p/>
+ * The reference sender Actor of the last received message.
+ * Is defined if the message was sent from another Actor, else None.
+ */
+ def getSender(): Option[ActorRef] = sender
+
+ /**
+ * Akka Java API. <p/>
+ * The reference sender future of the last received message.
+ * Is defined if the message was sent with sent with '!!' or '!!!', else None.
+ */
+ def getSenderFuture(): Option[CompletableFuture[Any]] = senderFuture
+
+ /**
+ * Is the actor being restarted?
+ */
+ def isBeingRestarted: Boolean = _status == ActorRefInternals.BEING_RESTARTED
+
+ /**
+ * Is the actor running?
+ */
+ def isRunning: Boolean = _status match {
+ case ActorRefInternals.BEING_RESTARTED | ActorRefInternals.RUNNING => true
+ case _ => false
+ }
+
+ /**
+ * Is the actor shut down?
+ */
+ def isShutdown: Boolean = _status == ActorRefInternals.SHUTDOWN
+
+ /**
+ * Is the actor ever started?
+ */
+ def isUnstarted: Boolean = _status == ActorRefInternals.UNSTARTED
+
+ /**
+ * Is the actor able to handle the message passed in as arguments?
+ */
+ @deprecated("Will be removed without replacement, it's just not reliable in the face of `become` and `unbecome`", "1.1")
+ def isDefinedAt(message: Any): Boolean = actor.isDefinedAt(message)
+
+ /**
+ * Only for internal use. UUID is effectively final.
+ */
+ protected[akka] def uuid_=(uid: Uuid) = _uuid = uid
+
+ /**
+ * Akka Java API. <p/>
+ * Sends a one-way asynchronous message. E.g. fire-and-forget semantics.
+ * <p/>
+ * <pre>
+ * actor.sendOneWay(message);
+ * </pre>
+ * <p/>
+ */
+ def sendOneWay(message: AnyRef): Unit = sendOneWay(message, null)
+
+ /**
+ * Akka Java API. <p/>
+ * Sends a one-way asynchronous message. E.g. fire-and-forget semantics.
+ * <p/>
+ * Allows you to pass along the sender of the message.
+ * <p/>
+ * <pre>
+ * actor.sendOneWay(message, context);
+ * </pre>
+ * <p/>
+ */
+ def sendOneWay(message: AnyRef, sender: ActorRef): Unit = this.!(message)(Option(sender))
+
+ /**
+ * Akka Java API. <p/>
+ * @see sendRequestReply(message: AnyRef, timeout: Long, sender: ActorRef)
+ * Uses the default timeout of the Actor (setTimeout()) and omits the sender reference
+ */
+ def sendRequestReply(message: AnyRef): AnyRef = sendRequestReply(message, timeout, null)
+
+ /**
+ * Akka Java API. <p/>
+ * @see sendRequestReply(message: AnyRef, timeout: Long, sender: ActorRef)
+ * Uses the default timeout of the Actor (setTimeout())
+ */
+ def sendRequestReply(message: AnyRef, sender: ActorRef): AnyRef = sendRequestReply(message, timeout, sender)
+
+ /**
+ * Akka Java API. <p/>
+ * Sends a message asynchronously and waits on a future for a reply message under the hood.
+ * <p/>
+ * It waits on the reply either until it receives it or until the timeout expires
+ * (which will throw an ActorTimeoutException). E.g. send-and-receive-eventually semantics.
+ * <p/>
+ * <b>NOTE:</b>
+ * Use this method with care. In most cases it is better to use 'sendOneWay' together with 'getContext().getSender()' to
+ * implement request/response message exchanges.
+ * <p/>
+ * If you are sending messages using <code>sendRequestReply</code> then you <b>have to</b> use <code>getContext().reply(..)</code>
+ * to send a reply message to the original sender. If not then the sender will block until the timeout expires.
+ */
+ def sendRequestReply(message: AnyRef, timeout: Long, sender: ActorRef): AnyRef = {
+ !!(message, timeout)(Option(sender)).getOrElse(throw new ActorTimeoutException(
+ "Message [" + message +
+ "]\n\tsent to [" + actorClassName +
+ "]\n\tfrom [" + (if (sender ne null) sender.actorClassName else "nowhere") +
+ "]\n\twith timeout [" + timeout +
+ "]\n\ttimed out."))
+ .asInstanceOf[AnyRef]
+ }
+
+ /**
+ * Akka Java API. <p/>
+ * @see sendRequestReplyFuture(message: AnyRef, sender: ActorRef): Future[_]
+ * Uses the Actors default timeout (setTimeout()) and omits the sender
+ */
+ def sendRequestReplyFuture[T <: AnyRef](message: AnyRef): Future[T] = sendRequestReplyFuture(message, timeout, null).asInstanceOf[Future[T]]
+
+ /**
+ * Akka Java API. <p/>
+ * @see sendRequestReplyFuture(message: AnyRef, sender: ActorRef): Future[_]
+ * Uses the Actors default timeout (setTimeout())
+ */
+ def sendRequestReplyFuture[T <: AnyRef](message: AnyRef, sender: ActorRef): Future[T] = sendRequestReplyFuture(message, timeout, sender).asInstanceOf[Future[T]]
+
+ /**
+ * Akka Java API. <p/>
+ * Sends a message asynchronously returns a future holding the eventual reply message.
+ * <p/>
+ * <b>NOTE:</b>
+ * Use this method with care. In most cases it is better to use 'sendOneWay' together with the 'getContext().getSender()' to
+ * implement request/response message exchanges.
+ * <p/>
+ * If you are sending messages using <code>sendRequestReplyFuture</code> then you <b>have to</b> use <code>getContext().reply(..)</code>
+ * to send a reply message to the original sender. If not then the sender will block until the timeout expires.
+ */
+ def sendRequestReplyFuture[T <: AnyRef](message: AnyRef, timeout: Long, sender: ActorRef): Future[T] = !!!(message, timeout)(Option(sender)).asInstanceOf[Future[T]]
+
+ /**
+ * Akka Java API. <p/>
+ * Forwards the message specified to this actor and preserves the original sender of the message
+ */
+ def forward(message: AnyRef, sender: ActorRef): Unit =
+ if (sender eq null) throw new IllegalArgumentException("The 'sender' argument to 'forward' can't be null")
+ else forward(message)(Some(sender))
+
+ /**
+ * Akka Java API. <p/>
+ * Use <code>getContext().replyUnsafe(..)</code> to reply with a message to the original sender of the message currently
+ * being processed.
+ * <p/>
+ * Throws an IllegalStateException if unable to determine what to reply to.
+ */
+ def replyUnsafe(message: AnyRef) = reply(message)
+
+ /**
+ * Akka Java API. <p/>
+ * Use <code>getContext().replySafe(..)</code> to reply with a message to the original sender of the message currently
+ * being processed.
+ * <p/>
+ * Returns true if reply was sent, and false if unable to determine what to reply to.
+ */
+ def replySafe(message: AnyRef): Boolean = reply_?(message)
+
+ /**
+ * Returns the class for the Actor instance that is managed by the ActorRef.
+ */
+ @deprecated("Will be removed without replacement, doesn't make any sense to have in the face of `become` and `unbecome`", "1.1")
+ def actorClass: Class[_ <: Actor]
+
+ /**
+ * Akka Java API. <p/>
+ * Returns the class for the Actor instance that is managed by the ActorRef.
+ */
+ @deprecated("Will be removed without replacement, doesn't make any sense to have in the face of `become` and `unbecome`", "1.1")
+ def getActorClass(): Class[_ <: Actor] = actorClass
+
+ /**
+ * Returns the class name for the Actor instance that is managed by the ActorRef.
+ */
+ @deprecated("Will be removed without replacement, doesn't make any sense to have in the face of `become` and `unbecome`", "1.1")
+ def actorClassName: String
+
+ /**
+ * Akka Java API. <p/>
+ * Returns the class name for the Actor instance that is managed by the ActorRef.
+ */
+ @deprecated("Will be removed without replacement, doesn't make any sense to have in the face of `become` and `unbecome`", "1.1")
+ def getActorClassName(): String = actorClassName
+
+ /**
+ * Sets the dispatcher for this actor. Needs to be invoked before the actor is started.
+ */
+ def dispatcher_=(md: MessageDispatcher): Unit
+
+ /**
+ * Get the dispatcher for this actor.
+ */
+ def dispatcher: MessageDispatcher
+
+ /**
+ * Starts up the actor and its message queue.
+ */
+ def start(): ActorRef
+
+ /**
+ * Shuts down the actor its dispatcher and message queue.
+ * Alias for 'stop'.
+ */
+ def exit() = stop()
+
+ /**
+ * Shuts down the actor its dispatcher and message queue.
+ */
+ def stop(): Unit
+
+ /**
+ * Links an other actor to this actor. Links are unidirectional and means that a the linking actor will
+ * receive a notification if the linked actor has crashed.
+ * <p/>
+ * If the 'trapExit' member field of the 'faultHandler' has been set to at contain at least one exception class then it will
+ * 'trap' these exceptions and automatically restart the linked actors according to the restart strategy
+ * defined by the 'faultHandler'.
+ */
+ def link(actorRef: ActorRef): Unit
+
+ /**
+ * Unlink the actor.
+ */
+ def unlink(actorRef: ActorRef): Unit
+
+ /**
+ * Atomically start and link an actor.
+ */
+ def startLink(actorRef: ActorRef): Unit
+
+ /**
+ * Atomically create (from actor class) and start an actor.
+ * <p/>
+ * To be invoked from within the actor itself.
+ */
+ @deprecated("Will be removed after 1.1, use Actor.actorOf instead", "1.1")
+ def spawn(clazz: Class[_ <: Actor]): ActorRef
+
+ /**
+ * Atomically create (from actor class), make it remote and start an actor.
+ * <p/>
+ * To be invoked from within the actor itself.
+ */
+ @deprecated("Will be removed after 1.1, client managed actors will be removed", "1.1")
+ def spawnRemote(clazz: Class[_ <: Actor], hostname: String, port: Int, timeout: Long): ActorRef
+
+ /**
+ * Atomically create (from actor class), link and start an actor.
+ * <p/>
+ * To be invoked from within the actor itself.
+ */
+ @deprecated("Will be removed after 1.1, use Actor.remote.actorOf instead and then link on success", "1.1")
+ def spawnLink(clazz: Class[_ <: Actor]): ActorRef
+
+ /**
+ * Atomically create (from actor class), make it remote, link and start an actor.
+ * <p/>
+ * To be invoked from within the actor itself.
+ */
+ @deprecated("Will be removed after 1.1, client managed actors will be removed", "1.1")
+ def spawnLinkRemote(clazz: Class[_ <: Actor], hostname: String, port: Int, timeout: Long): ActorRef
+
+ /**
+ * Returns the mailbox size.
+ */
+ def mailboxSize = dispatcher.mailboxSize(this)
+
+ /**
+ * Akka Java API. <p/>
+ * Returns the mailbox size.
+ */
+ def getMailboxSize(): Int = mailboxSize
+
+ /**
+ * Returns the supervisor, if there is one.
+ */
+ def supervisor: Option[ActorRef]
+
+ /**
+ * Akka Java API. <p/>
+ * Returns the supervisor, if there is one.
+ */
+ def getSupervisor(): ActorRef = supervisor getOrElse null
+
+ /**
+ * Returns an unmodifiable Java Map containing the linked actors,
+ * please note that the backing map is thread-safe but not immutable
+ */
+ def linkedActors: JMap[Uuid, ActorRef]
+
+ /**
+ * Java API. <p/>
+ * Returns an unmodifiable Java Map containing the linked actors,
+ * please note that the backing map is thread-safe but not immutable
+ */
+ def getLinkedActors(): JMap[Uuid, ActorRef] = linkedActors
+
+ /**
+ * Abstraction for unification of sender and senderFuture for later reply
+ */
+ def channel: Channel[Any] = {
+ if (senderFuture.isDefined) {
+ new Channel[Any] {
+ val future = senderFuture.get
+ def !(msg: Any) = future completeWithResult msg
+ }
+ } else if (sender.isDefined) {
+ val someSelf = Some(this)
+ new Channel[Any] {
+ val client = sender.get
+ def !(msg: Any) = client.!(msg)(someSelf)
+ }
+ } else throw new IllegalActorStateException("No channel available")
+ }
+
+ /**
+ * Java API. <p/>
+ * Abstraction for unification of sender and senderFuture for later reply
+ */
+ def getChannel: Channel[Any] = channel
+
+ protected[akka] def invoke(messageHandle: MessageInvocation): Unit
+
+ protected[akka] def postMessageToMailbox(message: Any, senderOption: Option[ActorRef]): Unit
+
+ protected[akka] def postMessageToMailboxAndCreateFutureResultWithTimeout[T](
+ message: Any,
+ timeout: Long,
+ senderOption: Option[ActorRef],
+ senderFuture: Option[CompletableFuture[T]]): CompletableFuture[T]
+
+ protected[akka] def actorInstance: AtomicReference[Actor]
+
+ protected[akka] def actor: Actor = actorInstance.get
+
+ protected[akka] def supervisor_=(sup: Option[ActorRef]): Unit
+
+ protected[akka] def mailbox: AnyRef
+ protected[akka] def mailbox_=(value: AnyRef): AnyRef
+
+ protected[akka] def handleTrapExit(dead: ActorRef, reason: Throwable): Unit
+
+ protected[akka] def restart(reason: Throwable, maxNrOfRetries: Option[Int], withinTimeRange: Option[Int]): Unit
+
+ protected[akka] def restartLinkedActors(reason: Throwable, maxNrOfRetries: Option[Int], withinTimeRange: Option[Int]): Unit
+
+ protected[akka] def registerSupervisorAsRemoteActor: Option[Uuid]
+
+ override def hashCode: Int = HashCode.hash(HashCode.SEED, uuid)
+
+ override def equals(that: Any): Boolean = {
+ that.isInstanceOf[ActorRef] &&
+ that.asInstanceOf[ActorRef].uuid == uuid
+ }
+
+ override def toString = "Actor[" + id + ":" + uuid + "]"
+}
+
+/**
+ * Local (serializable) ActorRef that is used when referencing the Actor on its "home" node.
+ *
+ * @author <a href="http://jonasboner.com">Jonas Bonér</a>
+ */
+class LocalActorRef private[akka] (
+ private[this] val actorFactory: () => Actor,
+ val homeAddress: Option[InetSocketAddress],
+ val clientManaged: Boolean = false)
+ extends ActorRef with ScalaActorRef {
+ protected[akka] val guard = new ReentrantGuard
+
+ @volatile
+ protected[akka] var _futureTimeout: Option[ScheduledFuture[AnyRef]] = None
+ @volatile
+ private[akka] lazy val _linkedActors = new ConcurrentHashMap[Uuid, ActorRef]
+ @volatile
+ private[akka] var _supervisor: Option[ActorRef] = None
+ @volatile
+ private var maxNrOfRetriesCount: Int = 0
+ @volatile
+ private var restartsWithinTimeRangeTimestamp: Long = 0L
+ @volatile
+ private var _mailbox: AnyRef = _
+ @volatile
+ private[akka] var _dispatcher: MessageDispatcher = Dispatchers.defaultGlobalDispatcher
+
+ protected[akka] val actorInstance = guard.withGuard { new AtomicReference[Actor](newActor) }
+
+ //If it was started inside "newActor", initialize it
+ if (isRunning) initializeActorInstance
+
+ // used only for deserialization
+ private[akka] def this(
+ __uuid: Uuid,
+ __id: String,
+ __timeout: Long,
+ __receiveTimeout: Option[Long],
+ __lifeCycle: LifeCycle,
+ __supervisor: Option[ActorRef],
+ __hotswap: Stack[PartialFunction[Any, Unit]],
+ __factory: () => Actor,
+ __homeAddress: Option[InetSocketAddress]) = {
+ this(__factory, __homeAddress)
+ _uuid = __uuid
+ id = __id
+ timeout = __timeout
+ receiveTimeout = __receiveTimeout
+ lifeCycle = __lifeCycle
+ _supervisor = __supervisor
+ hotswap = __hotswap
+ setActorSelfFields(actor, this)
+ start
+ }
+
+ /**
+ * Returns whether this actor ref is client-managed remote or not
+ */
+ private[akka] final def isClientManaged_? = clientManaged && homeAddress.isDefined && isRemotingEnabled
+
+ // ========= PUBLIC FUNCTIONS =========
+
+ /**
+ * Returns the class for the Actor instance that is managed by the ActorRef.
+ */
+ @deprecated("Will be removed without replacement, doesn't make any sense to have in the face of `become` and `unbecome`", "1.1")
+ def actorClass: Class[_ <: Actor] = actor.getClass.asInstanceOf[Class[_ <: Actor]]
+
+ /**
+ * Returns the class name for the Actor instance that is managed by the ActorRef.
+ */
+ @deprecated("Will be removed without replacement, doesn't make any sense to have in the face of `become` and `unbecome`", "1.1")
+ def actorClassName: String = actorClass.getName
+
+ /**
+ * Sets the dispatcher for this actor. Needs to be invoked before the actor is started.
+ */
+ def dispatcher_=(md: MessageDispatcher): Unit = guard.withGuard {
+ if (!isBeingRestarted) {
+ if (!isRunning) _dispatcher = md
+ else throw new ActorInitializationException(
+ "Can not swap dispatcher for " + toString + " after it has been started")
+ }
+ }
+
+ /**
+ * Get the dispatcher for this actor.
+ */
+ def dispatcher: MessageDispatcher = _dispatcher
+
+ /**
+ * Starts up the actor and its message queue.
+ */
+ def start(): ActorRef = guard.withGuard {
+ if (isShutdown) throw new ActorStartException(
+ "Can't restart an actor that has been shut down with 'stop' or 'exit'")
+ if (!isRunning) {
+ dispatcher.attach(this)
+
+ _status = ActorRefInternals.RUNNING
+
+ // If we are not currently creating this ActorRef instance
+ if ((actorInstance ne null) && (actorInstance.get ne null))
+ initializeActorInstance
+
+ if (isClientManaged_?)
+ Actor.remote.registerClientManagedActor(homeAddress.get.getAddress.getHostAddress, homeAddress.get.getPort, uuid)
+
+ checkReceiveTimeout //Schedule the initial Receive timeout
+ }
+ this
+ }
+
+ /**
+ * Shuts down the actor its dispatcher and message queue.
+ */
+ def stop() = guard.withGuard {
+ if (isRunning) {
+ receiveTimeout = None
+ cancelReceiveTimeout
+ dispatcher.detach(this)
+ _status = ActorRefInternals.SHUTDOWN
+ try {
+ actor.postStop
+ } finally {
+ currentMessage = null
+ Actor.registry.unregister(this)
+ if (isRemotingEnabled) {
+ if (isClientManaged_?)
+ Actor.remote.unregisterClientManagedActor(homeAddress.get.getAddress.getHostAddress, homeAddress.get.getPort, uuid)
+ Actor.remote.unregister(this)
+ }
+ setActorSelfFields(actorInstance.get, null)
+ }
+ } //else if (isBeingRestarted) throw new ActorKilledException("Actor [" + toString + "] is being restarted.")
+ }
+
+ /**
+ * Links an other actor to this actor. Links are unidirectional and means that a the linking actor will
+ * receive a notification if the linked actor has crashed.
+ * <p/>
+ * If the 'trapExit' member field of the 'faultHandler' has been set to at contain at least one exception class then it will
+ * 'trap' these exceptions and automatically restart the linked actors according to the restart strategy
+ * defined by the 'faultHandler'.
+ * <p/>
+ * To be invoked from within the actor itself.
+ */
+ def link(actorRef: ActorRef): Unit = guard.withGuard {
+ val actorRefSupervisor = actorRef.supervisor
+ val hasSupervisorAlready = actorRefSupervisor.isDefined
+ if (hasSupervisorAlready && actorRefSupervisor.get.uuid == uuid) return // we already supervise this guy
+ else if (hasSupervisorAlready) throw new IllegalActorStateException(
+ "Actor can only have one supervisor [" + actorRef + "], e.g. link(actor) fails")
+ else {
+ _linkedActors.put(actorRef.uuid, actorRef)
+ actorRef.supervisor = Some(this)
+ }
+ }
+
+ /**
+ * Unlink the actor.
+ * <p/>
+ * To be invoked from within the actor itself.
+ */
+ def unlink(actorRef: ActorRef) = guard.withGuard {
+ if (_linkedActors.remove(actorRef.uuid) eq null)
+ throw new IllegalActorStateException("Actor [" + actorRef + "] is not a linked actor, can't unlink")
+
+ actorRef.supervisor = None
+ }
+
+ /**
+ * Atomically start and link an actor.
+ * <p/>
+ * To be invoked from within the actor itself.
+ */
+ def startLink(actorRef: ActorRef): Unit = guard.withGuard {
+ link(actorRef)
+ actorRef.start()
+ }
+
+ /**
+ * Atomically create (from actor class) and start an actor.
+ * <p/>
+ * To be invoked from within the actor itself.
+ */
+ def spawn(clazz: Class[_ <: Actor]): ActorRef =
+ Actor.actorOf(clazz).start()
+
+ /**
+ * Atomically create (from actor class), start and make an actor remote.
+ * <p/>
+ * To be invoked from within the actor itself.
+ */
+ def spawnRemote(clazz: Class[_ <: Actor], hostname: String, port: Int, timeout: Long = Actor.TIMEOUT): ActorRef = {
+ ensureRemotingEnabled
+ val ref = Actor.remote.actorOf(clazz, hostname, port)
+ ref.timeout = timeout
+ ref.start()
+ }
+
+ /**
+ * Atomically create (from actor class), start and link an actor.
+ * <p/>
+ * To be invoked from within the actor itself.
+ */
+ def spawnLink(clazz: Class[_ <: Actor]): ActorRef = {
+ val actor = spawn(clazz)
+ link(actor)
+ actor.start()
+ actor
+ }
+
+ /**
+ * Atomically create (from actor class), start, link and make an actor remote.
+ * <p/>
+ * To be invoked from within the actor itself.
+ */
+ def spawnLinkRemote(clazz: Class[_ <: Actor], hostname: String, port: Int, timeout: Long = Actor.TIMEOUT): ActorRef = {
+ ensureRemotingEnabled
+ val actor = Actor.remote.actorOf(clazz, hostname, port)
+ actor.timeout = timeout
+ link(actor)
+ actor.start()
+ actor
+ }
+
+ /**
+ * Returns the mailbox.
+ */
+ def mailbox: AnyRef = _mailbox
+
+ protected[akka] def mailbox_=(value: AnyRef): AnyRef = { _mailbox = value; value }
+
+ /**
+ * Returns the supervisor, if there is one.
+ */
+ def supervisor: Option[ActorRef] = _supervisor
+
+ // ========= AKKA PROTECTED FUNCTIONS =========
+
+ protected[akka] def supervisor_=(sup: Option[ActorRef]): Unit = _supervisor = sup
+
+ protected[akka] def postMessageToMailbox(message: Any, senderOption: Option[ActorRef]): Unit =
+ if (isClientManaged_?) {
+ Actor.remote.send[Any](
+ message, senderOption, None, homeAddress.get, timeout, true, this, None, ActorType.ScalaActor, None)
+ } else
+ dispatcher dispatchMessage new MessageInvocation(this, message, senderOption, None)
+
+ protected[akka] def postMessageToMailboxAndCreateFutureResultWithTimeout[T](
+ message: Any,
+ timeout: Long,
+ senderOption: Option[ActorRef],
+ senderFuture: Option[CompletableFuture[T]]): CompletableFuture[T] = {
+ if (isClientManaged_?) {
+ val future = Actor.remote.send[T](
+ message, senderOption, senderFuture, homeAddress.get, timeout, false, this, None, ActorType.ScalaActor, None)
+ if (future.isDefined) future.get
+ else throw new IllegalActorStateException("Expected a future from remote call to actor " + toString)
+ } else {
+ val future = if (senderFuture.isDefined) senderFuture else Some(new DefaultCompletableFuture[T](timeout))
+ dispatcher dispatchMessage new MessageInvocation(
+ this, message, senderOption, future.asInstanceOf[Some[CompletableFuture[Any]]])
+ future.get
+ }
+ }
+
+ /**
+ * Callback for the dispatcher. This is the single entry point to the user Actor implementation.
+ */
+ protected[akka] def invoke(messageHandle: MessageInvocation): Unit = {
+ guard.lock.lock
+ try {
+ if (!isShutdown) {
+ currentMessage = messageHandle
+ try {
+ try {
+ cancelReceiveTimeout // FIXME: leave this here?
+ actor(messageHandle.message)
+ currentMessage = null // reset current message after successful invocation
+ } catch {
+ case e: InterruptedException =>
+ currentMessage = null // received message while actor is shutting down, ignore
+ case e =>
+ handleExceptionInDispatch(e, messageHandle.message)
+ }
+ finally {
+ checkReceiveTimeout // Reschedule receive timeout
+ }
+ } catch {
+ case e =>
+ EventHandler.error(e, this, messageHandle.message.toString)
+ throw e
+ }
+ }
+ } finally { guard.lock.unlock }
+ }
+
+ protected[akka] def handleTrapExit(dead: ActorRef, reason: Throwable) {
+ faultHandler match {
+ case AllForOneStrategy(trapExit, maxRetries, within) if trapExit.exists(_.isAssignableFrom(reason.getClass)) =>
+ restartLinkedActors(reason, maxRetries, within)
+
+ case OneForOneStrategy(trapExit, maxRetries, within) if trapExit.exists(_.isAssignableFrom(reason.getClass)) =>
+ dead.restart(reason, maxRetries, within)
+
+ case _ =>
+ if (_supervisor.isDefined) notifySupervisorWithMessage(Exit(this, reason))
+ else dead.stop()
+ }
+ }
+
+ private def requestRestartPermission(maxNrOfRetries: Option[Int], withinTimeRange: Option[Int]): Boolean = {
+ val denied = if (maxNrOfRetries.isEmpty && withinTimeRange.isEmpty) { //Immortal
+ false
+ } else if (withinTimeRange.isEmpty) { // restrict number of restarts
+ maxNrOfRetriesCount += 1 //Increment number of retries
+ maxNrOfRetriesCount > maxNrOfRetries.get
+ } else { // cannot restart more than N within M timerange
+ maxNrOfRetriesCount += 1 //Increment number of retries
+ val windowStart = restartsWithinTimeRangeTimestamp
+ val now = System.currentTimeMillis
+ val retries = maxNrOfRetriesCount
+ //We are within the time window if it isn't the first restart, or if the window hasn't closed
+ val insideWindow = if (windowStart == 0) false
+ else (now - windowStart) <= withinTimeRange.get
+
+ //The actor is dead if it dies X times within the window of restart
+ val unrestartable = insideWindow && retries > maxNrOfRetries.getOrElse(1)
+
+ if (windowStart == 0 || !insideWindow) //(Re-)set the start of the window
+ restartsWithinTimeRangeTimestamp = now
+
+ if (windowStart != 0 && !insideWindow) //Reset number of restarts if window has expired
+ maxNrOfRetriesCount = 1
+
+ unrestartable
+ }
+
+ denied == false //If we weren't denied, we have a go
+ }
+
+ protected[akka] def restart(reason: Throwable, maxNrOfRetries: Option[Int], withinTimeRange: Option[Int]) {
+ def performRestart() {
+ val failedActor = actorInstance.get
+
+ failedActor match {
+ case p: Proxyable =>
+ failedActor.preRestart(reason)
+ failedActor.postRestart(reason)
+ case _ =>
+ failedActor.preRestart(reason)
+ val freshActor = newActor
+ setActorSelfFields(failedActor, null) // Only null out the references if we could instantiate the new actor
+ actorInstance.set(freshActor) // Assign it here so if preStart fails, we can null out the sef-refs next call
+ freshActor.preStart
+ freshActor.postRestart(reason)
+ }
+ }
+
+ def tooManyRestarts() {
+ _supervisor.foreach { sup =>
+ // can supervisor handle the notification?
+ val notification = MaximumNumberOfRestartsWithinTimeRangeReached(this, maxNrOfRetries, withinTimeRange, reason)
+ if (sup.isDefinedAt(notification)) notifySupervisorWithMessage(notification)
+ }
+ stop
+ }
+
+ @tailrec
+ def attemptRestart() {
+ val success = if (requestRestartPermission(maxNrOfRetries, withinTimeRange)) {
+ guard.withGuard[Boolean] {
+ _status = ActorRefInternals.BEING_RESTARTED
+
+ lifeCycle match {
+ case Temporary =>
+ shutDownTemporaryActor(this)
+ true
+
+ case _ => // either permanent or none where default is permanent
+ val success = try {
+ performRestart()
+ true
+ } catch {
+ case e =>
+ EventHandler.error(e, this, "Exception in restart of Actor [%s]".format(toString))
+ false // an error or exception here should trigger a retry
+ }
+ finally {
+ currentMessage = null
+ }
+ if (success) {
+ _status = ActorRefInternals.RUNNING
+ dispatcher.resume(this)
+ restartLinkedActors(reason, maxNrOfRetries, withinTimeRange)
+ }
+ success
+ }
+ }
+ } else {
+ tooManyRestarts()
+ true // done
+ }
+
+ if (success) () // alles gut
+ else attemptRestart()
+ }
+
+ attemptRestart() // recur
+ }
+
+ protected[akka] def restartLinkedActors(reason: Throwable, maxNrOfRetries: Option[Int], withinTimeRange: Option[Int]) = {
+ val i = _linkedActors.values.iterator
+ while (i.hasNext) {
+ val actorRef = i.next
+ actorRef.lifeCycle match {
+ // either permanent or none where default is permanent
+ case Temporary => shutDownTemporaryActor(actorRef)
+ case _ => actorRef.restart(reason, maxNrOfRetries, withinTimeRange)
+ }
+ }
+ }
+
+ protected[akka] def registerSupervisorAsRemoteActor: Option[Uuid] = guard.withGuard {
+ ensureRemotingEnabled
+ if (_supervisor.isDefined) {
+ if (homeAddress.isDefined) Actor.remote.registerSupervisorForActor(this)
+ Some(_supervisor.get.uuid)
+ } else None
+ }
+
+ def linkedActors: JMap[Uuid, ActorRef] = java.util.Collections.unmodifiableMap(_linkedActors)
+
+ // ========= PRIVATE FUNCTIONS =========
+
+ private[this] def newActor: Actor = {
+ try {
+ Actor.actorRefInCreation.set(Some(this))
+ val a = actorFactory()
+ if (a eq null) throw new ActorInitializationException("Actor instance passed to ActorRef can not be 'null'")
+ a
+ } finally {
+ Actor.actorRefInCreation.set(None)
+ }
+ }
+
+ private def shutDownTemporaryActor(temporaryActor: ActorRef) {
+ temporaryActor.stop()
+ _linkedActors.remove(temporaryActor.uuid) // remove the temporary actor
+ // if last temporary actor is gone, then unlink me from supervisor
+ if (_linkedActors.isEmpty) notifySupervisorWithMessage(UnlinkAndStop(this))
+ true
+ }
+
+ private def handleExceptionInDispatch(reason: Throwable, message: Any) = {
+ EventHandler.error(reason, this, message.toString)
+
+ //Prevent any further messages to be processed until the actor has been restarted
+ dispatcher.suspend(this)
+
+ senderFuture.foreach(_.completeWithException(reason))
+
+ if (supervisor.isDefined) notifySupervisorWithMessage(Exit(this, reason))
+ else {
+ lifeCycle match {
+ case Temporary => shutDownTemporaryActor(this)
+ case _ => dispatcher.resume(this) //Resume processing for this actor
+ }
+ }
+ }
+
+ private def notifySupervisorWithMessage(notification: LifeCycleMessage) = {
+ // FIXME to fix supervisor restart of remote actor for oneway calls, inject a supervisor proxy that can send notification back to client
+ _supervisor.foreach { sup =>
+ if (sup.isShutdown) { // if supervisor is shut down, game over for all linked actors
+ //Scoped stop all linked actors, to avoid leaking the 'i' val
+ {
+ val i = _linkedActors.values.iterator
+ while (i.hasNext) {
+ i.next.stop()
+ i.remove
+ }
+ }
+ //Stop the actor itself
+ stop
+ } else sup ! notification // else notify supervisor
+ }
+ }
+
+ private def setActorSelfFields(actor: Actor, value: ActorRef) {
+
+ @tailrec
+ def lookupAndSetSelfFields(clazz: Class[_], actor: Actor, value: ActorRef): Boolean = {
+ val success = try {
+ val selfField = clazz.getDeclaredField("self")
+ val someSelfField = clazz.getDeclaredField("someSelf")
+ selfField.setAccessible(true)
+ someSelfField.setAccessible(true)
+ selfField.set(actor, value)
+ someSelfField.set(actor, if (value ne null) Some(value) else null)
+ true
+ } catch {
+ case e: NoSuchFieldException => false
+ }
+
+ if (success) true
+ else {
+ val parent = clazz.getSuperclass
+ if (parent eq null)
+ throw new IllegalActorStateException(toString + " is not an Actor since it have not mixed in the 'Actor' trait")
+ lookupAndSetSelfFields(parent, actor, value)
+ }
+ }
+
+ lookupAndSetSelfFields(actor.getClass, actor, value)
+ }
+
+ private def initializeActorInstance = {
+ actor.preStart // run actor preStart
+ Actor.registry.register(this)
+ }
+
+ protected[akka] def checkReceiveTimeout = {
+ cancelReceiveTimeout
+ if (receiveTimeout.isDefined && dispatcher.mailboxSize(this) <= 0) { //Only reschedule if desired and there are currently no more messages to be processed
+ _futureTimeout = Some(Scheduler.scheduleOnce(this, ReceiveTimeout, receiveTimeout.get, TimeUnit.MILLISECONDS))
+ }
+ }
+
+ protected[akka] def cancelReceiveTimeout = {
+ if (_futureTimeout.isDefined) {
+ _futureTimeout.get.cancel(true)
+ _futureTimeout = None
+ }
+ }
+}
+
+/**
+ * System messages for RemoteActorRef.
+ *
+ * @author <a href="http://jonasboner.com">Jonas Bonér</a>
+ */
+object RemoteActorSystemMessage {
+ val Stop = "RemoteActorRef:stop".intern
+}
+
+/**
+ * Remote ActorRef that is used when referencing the Actor on a different node than its "home" node.
+ * This reference is network-aware (remembers its origin) and immutable.
+ *
+ * @author <a href="http://jonasboner.com">Jonas Bonér</a>
+ */
+private[akka] case class RemoteActorRef private[akka] (
+ classOrServiceName: String,
+ val actorClassName: String,
+ val hostname: String,
+ val port: Int,
+ _timeout: Long,
+ loader: Option[ClassLoader],
+ val actorType: ActorType = ActorType.ScalaActor)
+ extends ActorRef with ScalaActorRef {
+
+ ensureRemotingEnabled
+
+ val homeAddress = Some(new InetSocketAddress(hostname, port))
+
+ //protected def clientManaged = classOrServiceName.isEmpty //If no class or service name, it's client managed
+ id = classOrServiceName
+ //id = classOrServiceName.getOrElse("uuid:" + uuid) //If we're a server-managed we want to have classOrServiceName as id, or else, we're a client-managed and we want to have our uuid as id
+
+ timeout = _timeout
+
+ start
+
+ def postMessageToMailbox(message: Any, senderOption: Option[ActorRef]): Unit =
+ Actor.remote.send[Any](message, senderOption, None, homeAddress.get, timeout, true, this, None, actorType, loader)
+
+ def postMessageToMailboxAndCreateFutureResultWithTimeout[T](
+ message: Any,
+ timeout: Long,
+ senderOption: Option[ActorRef],
+ senderFuture: Option[CompletableFuture[T]]): CompletableFuture[T] = {
+ val future = Actor.remote.send[T](
+ message, senderOption, senderFuture,
+ homeAddress.get, timeout,
+ false, this, None,
+ actorType, loader)
+ if (future.isDefined) future.get
+ else throw new IllegalActorStateException("Expected a future from remote call to actor " + toString)
+ }
+
+ def start: ActorRef = synchronized {
+ _status = ActorRefInternals.RUNNING
+ this
+ }
+
+ def stop: Unit = synchronized {
+ if (_status == ActorRefInternals.RUNNING) {
+ _status = ActorRefInternals.SHUTDOWN
+ postMessageToMailbox(RemoteActorSystemMessage.Stop, None)
+ }
+ }
+
+ protected[akka] def registerSupervisorAsRemoteActor: Option[Uuid] = None
+
+ // ==== NOT SUPPORTED ====
+ @deprecated("Will be removed without replacement, doesn't make any sense to have in the face of `become` and `unbecome`", "1.1")
+ def actorClass: Class[_ <: Actor] = unsupported
+ def dispatcher_=(md: MessageDispatcher): Unit = unsupported
+ def dispatcher: MessageDispatcher = unsupported
+ def link(actorRef: ActorRef): Unit = unsupported
+ def unlink(actorRef: ActorRef): Unit = unsupported
+ def startLink(actorRef: ActorRef): Unit = unsupported
+ def spawn(clazz: Class[_ <: Actor]): ActorRef = unsupported
+ def spawnRemote(clazz: Class[_ <: Actor], hostname: String, port: Int, timeout: Long): ActorRef = unsupported
+ def spawnLink(clazz: Class[_ <: Actor]): ActorRef = unsupported
+ def spawnLinkRemote(clazz: Class[_ <: Actor], hostname: String, port: Int, timeout: Long): ActorRef = unsupported
+ def supervisor: Option[ActorRef] = unsupported
+ def linkedActors: JMap[Uuid, ActorRef] = unsupported
+ protected[akka] def mailbox: AnyRef = unsupported
+ protected[akka] def mailbox_=(value: AnyRef): AnyRef = unsupported
+ protected[akka] def handleTrapExit(dead: ActorRef, reason: Throwable): Unit = unsupported
+ protected[akka] def restart(reason: Throwable, maxNrOfRetries: Option[Int], withinTimeRange: Option[Int]): Unit = unsupported
+ protected[akka] def restartLinkedActors(reason: Throwable, maxNrOfRetries: Option[Int], withinTimeRange: Option[Int]): Unit = unsupported
+ protected[akka] def invoke(messageHandle: MessageInvocation): Unit = unsupported
+ protected[akka] def supervisor_=(sup: Option[ActorRef]): Unit = unsupported
+ protected[akka] def actorInstance: AtomicReference[Actor] = unsupported
+ private def unsupported = throw new UnsupportedOperationException("Not supported for RemoteActorRef")
+}
+
+/**
+ * This trait represents the common (external) methods for all ActorRefs
+ * Needed because implicit conversions aren't applied when instance imports are used
+ *
+ * i.e.
+ * var self: ScalaActorRef = ...
+ * import self._
+ * //can't call ActorRef methods here unless they are declared in a common
+ * //superclass, which ActorRefShared is.
+ */
+trait ActorRefShared {
+ /**
+ * Returns the uuid for the actor.
+ */
+ def uuid: Uuid
+}
+
+/**
+ * This trait represents the Scala Actor API
+ * There are implicit conversions in ../actor/Implicits.scala
+ * from ActorRef -> ScalaActorRef and back
+ */
+trait ScalaActorRef extends ActorRefShared { ref: ActorRef =>
+
+ /**
+ * Identifier for actor, does not have to be a unique one. Default is the 'uuid'.
+ * <p/>
+ * This field is used for logging, AspectRegistry.actorsFor(id), identifier for remote
+ * actor in RemoteServer etc.But also as the identifier for persistence, which means
+ * that you can use a custom name to be able to retrieve the "correct" persisted state
+ * upon restart, remote restart etc.
+ */
+ def id: String
+
+ def id_=(id: String): Unit
+
+ /**
+ * User overridable callback/setting.
+ * <p/>
+ * Defines the life-cycle for a supervised actor.
+ */
+ @volatile
+ @BeanProperty
+ var lifeCycle: LifeCycle = UndefinedLifeCycle
+
+ /**
+ * User overridable callback/setting.
+ * <p/>
+ * Don't forget to supply a List of exception types to intercept (trapExit)
+ * <p/>
+ * Can be one of:
+ * <pre>
+ * faultHandler = AllForOneStrategy(trapExit = List(classOf[Exception]), maxNrOfRetries, withinTimeRange)
+ * </pre>
+ * Or:
+ * <pre>
+ * faultHandler = OneForOneStrategy(trapExit = List(classOf[Exception]), maxNrOfRetries, withinTimeRange)
+ * </pre>
+ */
+ @volatile
+ @BeanProperty
+ var faultHandler: FaultHandlingStrategy = NoFaultHandlingStrategy
+
+ /**
+ * The reference sender Actor of the last received message.
+ * Is defined if the message was sent from another Actor, else None.
+ */
+ def sender: Option[ActorRef] = {
+ val msg = currentMessage
+ if (msg eq null) None
+ else msg.sender
+ }
+
+ /**
+ * The reference sender future of the last received message.
+ * Is defined if the message was sent with sent with '!!' or '!!!', else None.
+ */
+ def senderFuture(): Option[CompletableFuture[Any]] = {
+ val msg = currentMessage
+ if (msg eq null) None
+ else msg.senderFuture
+ }
+
+ /**
+ * Sends a one-way asynchronous message. E.g. fire-and-forget semantics.
+ * <p/>
+ *
+ * If invoked from within an actor then the actor reference is implicitly passed on as the implicit 'sender' argument.
+ * <p/>
+ *
+ * This actor 'sender' reference is then available in the receiving actor in the 'sender' member variable,
+ * if invoked from within an Actor. If not then no sender is available.
+ * <pre>
+ * actor ! message
+ * </pre>
+ * <p/>
+ */
+ def !(message: Any)(implicit sender: Option[ActorRef] = None): Unit = {
+ if (isRunning) postMessageToMailbox(message, sender)
+ else throw new ActorInitializationException(
+ "Actor has not been started, you need to invoke 'actor.start()' before using it")
+ }
+
+ /**
+ * Sends a message asynchronously and waits on a future for a reply message.
+ * <p/>
+ * It waits on the reply either until it receives it (in the form of <code>Some(replyMessage)</code>)
+ * or until the timeout expires (which will return None). E.g. send-and-receive-eventually semantics.
+ * <p/>
+ * <b>NOTE:</b>
+ * Use this method with care. In most cases it is better to use '!' together with the 'sender' member field to
+ * implement request/response message exchanges.
+ * If you are sending messages using <code>!!</code> then you <b>have to</b> use <code>self.reply(..)</code>
+ * to send a reply message to the original sender. If not then the sender will block until the timeout expires.
+ */
+ def !!(message: Any, timeout: Long = this.timeout)(implicit sender: Option[ActorRef] = None): Option[Any] = {
+ if (isRunning) {
+ val future = postMessageToMailboxAndCreateFutureResultWithTimeout[Any](message, timeout, sender, None)
+ val isMessageJoinPoint = if (isTypedActorEnabled) TypedActorModule.resolveFutureIfMessageIsJoinPoint(message, future)
+ else false
+ try {
+ future.await
+ } catch {
+ case e: FutureTimeoutException =>
+ if (isMessageJoinPoint) {
+ EventHandler.error(e, this, e.getMessage)
+ throw e
+ } else None
+ }
+ future.resultOrException
+ } else throw new ActorInitializationException(
+ "Actor has not been started, you need to invoke 'actor.start()' before using it")
+ }
+
+ /**
+ * Sends a message asynchronously returns a future holding the eventual reply message.
+ * <p/>
+ * <b>NOTE:</b>
+ * Use this method with care. In most cases it is better to use '!' together with the 'sender' member field to
+ * implement request/response message exchanges.
+ * If you are sending messages using <code>!!!</code> then you <b>have to</b> use <code>self.reply(..)</code>
+ * to send a reply message to the original sender. If not then the sender will block until the timeout expires.
+ */
+ def !!(implicit sender: Option[ActorRef] = None): Future[T] = {
+ if (isRunning) postMessageToMailboxAndCreateFutureResultWithTimeout[T](message, timeout, sender, None)
+ else throw new ActorInitializationException(
+ "Actor has not been started, you need to invoke 'actor.start()' before using it")
+ }
+
+ /**
+ * Forwards the message and passes the original sender actor as the sender.
+ * <p/>
+ * Works with '!', '!!' and '!!!'.
+ */
+ def forward(message: Any)(implicit sender: Some[ActorRef]) = {
+ if (isRunning) {
+ if (sender.get.senderFuture.isDefined)
+ postMessageToMailboxAndCreateFutureResultWithTimeout(message, timeout, sender.get.sender, sender.get.senderFuture)
+ else
+ postMessageToMailbox(message, sender.get.sender)
+ } else throw new ActorInitializationException("Actor has not been started, you need to invoke 'actor.start()' before using it")
+ }
+
+ /**
+ * Use <code>self.reply(..)</code> to reply with a message to the original sender of the message currently
+ * being processed.
+ * <p/>
+ * Throws an IllegalStateException if unable to determine what to reply to.
+ */
+ def reply(message: Any) = if (!reply_?(message)) throw new IllegalActorStateException(
+ "\n\tNo sender in scope, can't reply. " +
+ "\n\tYou have probably: " +
+ "\n\t\t1. Sent a message to an Actor from an instance that is NOT an Actor." +
+ "\n\t\t2. Invoked a method on an TypedActor from an instance NOT an TypedActor." +
+ "\n\tElse you might want to use 'reply_?' which returns Boolean(true) if succes and Boolean(false) if no sender in scope")
+
+ /**
+ * Use <code>reply_?(..)</code> to reply with a message to the original sender of the message currently
+ * being processed.
+ * <p/>
+ * Returns true if reply was sent, and false if unable to determine what to reply to.
+ */
+ def reply_?(message: Any): Boolean = {
+ if (senderFuture.isDefined) {
+ senderFuture.get completeWithResult message
+ true
+ } else if (sender.isDefined) {
+ //TODO: optimize away this allocation, perhaps by having implicit self: Option[ActorRef] in signature
+ sender.get.!(message)(Some(this))
+ true
+ } else false
+ }
+
+ /**
+ * Atomically create (from actor class) and start an actor.
+ */
+ def spawn[T <: Actor: ClassTag]: ActorRef =
+ spawn(classTag[T].erasure.asInstanceOf[Class[_ <: Actor]])
+
+ /**
+ * Atomically create (from actor class), start and make an actor remote.
+ */
+ def spawnRemote[T <: Actor: ClassTag](hostname: String, port: Int, timeout: Long): ActorRef = {
+ ensureRemotingEnabled
+ spawnRemote(classTag[T].erasure.asInstanceOf[Class[_ <: Actor]], hostname, port, timeout)
+ }
+
+ /**
+ * Atomically create (from actor class), start and link an actor.
+ */
+ def spawnLink[T <: Actor: ClassTag]: ActorRef =
+ spawnLink(classTag[T].erasure.asInstanceOf[Class[_ <: Actor]])
+
+ /**
+ * Atomically create (from actor class), start, link and make an actor remote.
+ */
+ def spawnLinkRemote[T <: Actor: ClassTag](hostname: String, port: Int, timeout: Long): ActorRef = {
+ ensureRemotingEnabled
+ spawnLinkRemote(classTag[T].erasure.asInstanceOf[Class[_ <: Actor]], hostname, port, timeout)
+ }
+}
diff --git a/test/disabled/presentation/akka/src/akka/actor/ActorRegistry.scala b/test/disabled/presentation/akka/src/akka/actor/ActorRegistry.scala
new file mode 100644
index 0000000..5d649fc
--- /dev/null
+++ b/test/disabled/presentation/akka/src/akka/actor/ActorRegistry.scala
@@ -0,0 +1,389 @@
+/**
+ * Copyright (C) 2009-2011 Scalable Solutions AB <http://scalablesolutions.se>
+ */
+
+package akka.actor
+
+import scala.collection.mutable.{ ListBuffer, Map }
+import scala.reflect.ArrayTag
+
+import java.util.concurrent.{ ConcurrentSkipListSet, ConcurrentHashMap }
+import java.util.{ Set => JSet }
+
+import annotation.tailrec
+import akka.util.ReflectiveAccess._
+import akka.util.{ ReflectiveAccess, ReadWriteGuard, ListenerManagement }
+
+/**
+ * Base trait for ActorRegistry events, allows listen to when an actor is added and removed from the ActorRegistry.
+ *
+ * @author <a href="http://jonasboner.com">Jonas Bonér</a>
+ */
+sealed trait ActorRegistryEvent
+case class ActorRegistered(actor: ActorRef) extends ActorRegistryEvent
+case class ActorUnregistered(actor: ActorRef) extends ActorRegistryEvent
+
+/**
+ * Registry holding all Actor instances in the whole system.
+ * Mapped by:
+ * <ul>
+ * <li>the Actor's UUID</li>
+ * <li>the Actor's id field (which can be set by user-code)</li>
+ * <li>the Actor's class</li>
+ * <li>all Actors that are subtypes of a specific type</li>
+ * <ul>
+ *
+ * @author <a href="http://jonasboner.com">Jonas Bonér</a>
+ */
+
+final class ActorRegistry private[actor] () extends ListenerManagement {
+
+ private val actorsByUUID = new ConcurrentHashMap[Uuid, ActorRef]
+ private val actorsById = new Index[String, ActorRef]
+ private val guard = new ReadWriteGuard
+
+ /**
+ * Returns all actors in the system.
+ */
+ def actors: Array[ActorRef] = filter(_ => true)
+
+ /**
+ * Returns the number of actors in the system.
+ */
+ def size: Int = actorsByUUID.size
+
+ /**
+ * Invokes a function for all actors.
+ */
+ def foreach(f: (ActorRef) => Unit) = {
+ val elements = actorsByUUID.elements
+ while (elements.hasMoreElements) f(elements.nextElement)
+ }
+
+ /**
+ * Invokes the function on all known actors until it returns Some
+ * Returns None if the function never returns Some
+ */
+ def find[T](f: PartialFunction[ActorRef, T]): Option[T] = {
+ val elements = actorsByUUID.elements
+ while (elements.hasMoreElements) {
+ val element = elements.nextElement
+ if (f isDefinedAt element) return Some(f(element))
+ }
+ None
+ }
+
+ /**
+ * Finds all actors that are subtypes of the class passed in as the ClassTag argument and supporting passed message.
+ */
+ def actorsFor[T <: Actor](message: Any)(implicit classTag: ClassTag[T]): Array[ActorRef] =
+ filter(a => classTag.erasure.isAssignableFrom(a.actor.getClass) && a.isDefinedAt(message))
+
+ /**
+ * Finds all actors that satisfy a predicate.
+ */
+ def filter(p: ActorRef => Boolean): Array[ActorRef] = {
+ val all = new ListBuffer[ActorRef]
+ val elements = actorsByUUID.elements
+ while (elements.hasMoreElements) {
+ val actorId = elements.nextElement
+ if (p(actorId)) all += actorId
+ }
+ all.toArray
+ }
+
+ /**
+ * Finds all actors that are subtypes of the class passed in as the ClassTag argument.
+ */
+ def actorsFor[T <: Actor](implicit classTag: ClassTag[T]): Array[ActorRef] =
+ actorsFor[T](classTag.erasure.asInstanceOf[Class[T]])
+
+ /**
+ * Finds any actor that matches T. Very expensive, traverses ALL alive actors.
+ */
+ def actorFor[T <: Actor](implicit classTag: ClassTag[T]): Option[ActorRef] =
+ find({ case a: ActorRef if classTag.erasure.isAssignableFrom(a.actor.getClass) => a })
+
+ /**
+ * Finds all actors of type or sub-type specified by the class passed in as the Class argument.
+ */
+ def actorsFor[T <: Actor](clazz: Class[T]): Array[ActorRef] =
+ filter(a => clazz.isAssignableFrom(a.actor.getClass))
+
+ /**
+ * Finds all actors that has a specific id.
+ */
+ def actorsFor(id: String): Array[ActorRef] = actorsById values id
+
+ /**
+ * Finds the actor that has a specific UUID.
+ */
+ def actorFor(uuid: Uuid): Option[ActorRef] = Option(actorsByUUID get uuid)
+
+ /**
+ * Returns all typed actors in the system.
+ */
+ def typedActors: Array[AnyRef] = filterTypedActors(_ => true)
+
+ /**
+ * Invokes a function for all typed actors.
+ */
+ def foreachTypedActor(f: (AnyRef) => Unit) = {
+ TypedActorModule.ensureEnabled
+ val elements = actorsByUUID.elements
+ while (elements.hasMoreElements) {
+ val proxy = typedActorFor(elements.nextElement)
+ if (proxy.isDefined) f(proxy.get)
+ }
+ }
+
+ /**
+ * Invokes the function on all known typed actors until it returns Some
+ * Returns None if the function never returns Some
+ */
+ def findTypedActor[T](f: PartialFunction[AnyRef, T]): Option[T] = {
+ TypedActorModule.ensureEnabled
+ val elements = actorsByUUID.elements
+ while (elements.hasMoreElements) {
+ val proxy = typedActorFor(elements.nextElement)
+ if (proxy.isDefined && (f isDefinedAt proxy)) return Some(f(proxy))
+ }
+ None
+ }
+
+ /**
+ * Finds all typed actors that satisfy a predicate.
+ */
+ def filterTypedActors(p: AnyRef => Boolean): Array[AnyRef] = {
+ TypedActorModule.ensureEnabled
+ val all = new ListBuffer[AnyRef]
+ val elements = actorsByUUID.elements
+ while (elements.hasMoreElements) {
+ val proxy = typedActorFor(elements.nextElement)
+ if (proxy.isDefined && p(proxy.get)) all += proxy.get
+ }
+ all.toArray
+ }
+
+ /**
+ * Finds all typed actors that are subtypes of the class passed in as the ClassTag argument.
+ */
+ def typedActorsFor[T <: AnyRef](implicit classTag: ClassTag[T]): Array[AnyRef] = {
+ TypedActorModule.ensureEnabled
+ typedActorsFor[T](classTag.erasure.asInstanceOf[Class[T]])
+ }
+
+ /**
+ * Finds any typed actor that matches T.
+ */
+ def typedActorFor[T <: AnyRef](implicit classTag: ClassTag[T]): Option[AnyRef] = {
+ TypedActorModule.ensureEnabled
+ def predicate(proxy: AnyRef): Boolean = {
+ val actorRef = TypedActorModule.typedActorObjectInstance.get.actorFor(proxy)
+ actorRef.isDefined && classTag.erasure.isAssignableFrom(actorRef.get.actor.getClass)
+ }
+ findTypedActor({ case a: Some[AnyRef] if predicate(a.get) => a })
+ }
+
+ /**
+ * Finds all typed actors of type or sub-type specified by the class passed in as the Class argument.
+ */
+ def typedActorsFor[T <: AnyRef](clazz: Class[T]): Array[AnyRef] = {
+ TypedActorModule.ensureEnabled
+ def predicate(proxy: AnyRef): Boolean = {
+ val actorRef = TypedActorModule.typedActorObjectInstance.get.actorFor(proxy)
+ actorRef.isDefined && clazz.isAssignableFrom(actorRef.get.actor.getClass)
+ }
+ filterTypedActors(predicate)
+ }
+
+ /**
+ * Finds all typed actors that have a specific id.
+ */
+ def typedActorsFor(id: String): Array[AnyRef] = {
+ TypedActorModule.ensureEnabled
+ val actorRefs = actorsById values id
+ actorRefs.flatMap(typedActorFor(_))
+ }
+
+ /**
+ * Finds the typed actor that has a specific UUID.
+ */
+ def typedActorFor(uuid: Uuid): Option[AnyRef] = {
+ TypedActorModule.ensureEnabled
+ val actorRef = actorsByUUID get uuid
+ if (actorRef eq null) None
+ else typedActorFor(actorRef)
+ }
+
+ /**
+ * Get the typed actor proxy for a given typed actor ref.
+ */
+ private def typedActorFor(actorRef: ActorRef): Option[AnyRef] = {
+ TypedActorModule.typedActorObjectInstance.get.proxyFor(actorRef)
+ }
+
+ /**
+ * Registers an actor in the ActorRegistry.
+ */
+ private[akka] def register(actor: ActorRef) {
+ val id = actor.id
+ val uuid = actor.uuid
+
+ actorsById.put(id, actor)
+ actorsByUUID.put(uuid, actor)
+
+ // notify listeners
+ notifyListeners(ActorRegistered(actor))
+ }
+
+ /**
+ * Unregisters an actor in the ActorRegistry.
+ */
+ private[akka] def unregister(actor: ActorRef) {
+ val id = actor.id
+ val uuid = actor.uuid
+
+ actorsByUUID remove uuid
+ actorsById.remove(id, actor)
+
+ // notify listeners
+ notifyListeners(ActorUnregistered(actor))
+ }
+
+ /**
+ * Shuts down and unregisters all actors in the system.
+ */
+ def shutdownAll() {
+ if (TypedActorModule.isEnabled) {
+ val elements = actorsByUUID.elements
+ while (elements.hasMoreElements) {
+ val actorRef = elements.nextElement
+ val proxy = typedActorFor(actorRef)
+ if (proxy.isDefined) TypedActorModule.typedActorObjectInstance.get.stop(proxy.get)
+ else actorRef.stop()
+ }
+ } else foreach(_.stop())
+ if (Remote.isEnabled) {
+ Actor.remote.clear //TODO: REVISIT: Should this be here?
+ }
+ actorsByUUID.clear
+ actorsById.clear
+ }
+}
+
+/**
+ * An implementation of a ConcurrentMultiMap
+ * Adds/remove is serialized over the specified key
+ * Reads are fully concurrent <-- el-cheapo
+ *
+ * @author Viktor Klang
+ */
+class Index[K <: AnyRef, V <: AnyRef: ArrayTag] {
+ private val Naught = Array[V]() //Nil for Arrays
+ private val container = new ConcurrentHashMap[K, JSet[V]]
+ private val emptySet = new ConcurrentSkipListSet[V]
+
+ /**
+ * Associates the value of type V with the key of type K
+ * @return true if the value didn't exist for the key previously, and false otherwise
+ */
+ def put(key: K, value: V): Boolean = {
+ //Tailrecursive spin-locking put
+ @tailrec
+ def spinPut(k: K, v: V): Boolean = {
+ var retry = false
+ var added = false
+ val set = container get k
+
+ if (set ne null) {
+ set.synchronized {
+ if (set.isEmpty) retry = true //IF the set is empty then it has been removed, so signal retry
+ else { //Else add the value to the set and signal that retry is not needed
+ added = set add v
+ retry = false
+ }
+ }
+ } else {
+ val newSet = new ConcurrentSkipListSet[V]
+ newSet add v
+
+ // Parry for two simultaneous putIfAbsent(id,newSet)
+ val oldSet = container.putIfAbsent(k, newSet)
+ if (oldSet ne null) {
+ oldSet.synchronized {
+ if (oldSet.isEmpty) retry = true //IF the set is empty then it has been removed, so signal retry
+ else { //Else try to add the value to the set and signal that retry is not needed
+ added = oldSet add v
+ retry = false
+ }
+ }
+ } else added = true
+ }
+
+ if (retry) spinPut(k, v)
+ else added
+ }
+
+ spinPut(key, value)
+ }
+
+ /**
+ * @return a _new_ array of all existing values for the given key at the time of the call
+ */
+ def values(key: K): Array[V] = {
+ val set: JSet[V] = container get key
+ val result = if (set ne null) set toArray Naught else Naught
+ result.asInstanceOf[Array[V]]
+ }
+
+ /**
+ * @return Some(value) for the first matching value where the supplied function returns true for the given key,
+ * if no matches it returns None
+ */
+ def findValue(key: K)(f: (V) => Boolean): Option[V] = {
+ import scala.collection.JavaConversions._
+ val set = container get key
+ if (set ne null) set.iterator.find(f)
+ else None
+ }
+
+ /**
+ * Applies the supplied function to all keys and their values
+ */
+ def foreach(fun: (K, V) => Unit) {
+ import scala.collection.JavaConversions._
+ container.entrySet foreach { (e) =>
+ e.getValue.foreach(fun(e.getKey, _))
+ }
+ }
+
+ /**
+ * Disassociates the value of type V from the key of type K
+ * @return true if the value was disassociated from the key and false if it wasn't previously associated with the key
+ */
+ def remove(key: K, value: V): Boolean = {
+ val set = container get key
+
+ if (set ne null) {
+ set.synchronized {
+ if (set.remove(value)) { //If we can remove the value
+ if (set.isEmpty) //and the set becomes empty
+ container.remove(key, emptySet) //We try to remove the key if it's mapped to an empty set
+
+ true //Remove succeeded
+ } else false //Remove failed
+ }
+ } else false //Remove failed
+ }
+
+ /**
+ * @return true if the underlying containers is empty, may report false negatives when the last remove is underway
+ */
+ def isEmpty: Boolean = container.isEmpty
+
+ /**
+ * Removes all keys and all values
+ */
+ def clear = foreach { case (k, v) => remove(k, v) }
+}
diff --git a/test/disabled/presentation/akka/src/akka/actor/Actors.java b/test/disabled/presentation/akka/src/akka/actor/Actors.java
new file mode 100644
index 0000000..a5ec9f3
--- /dev/null
+++ b/test/disabled/presentation/akka/src/akka/actor/Actors.java
@@ -0,0 +1,108 @@
+package akka.actor;
+
+import akka.japi.Creator;
+import akka.remoteinterface.RemoteSupport;
+
+/**
+ * JAVA API for
+ * - creating actors,
+ * - creating remote actors,
+ * - locating actors
+ */
+public class Actors {
+ /**
+ *
+ * @return The actor registry
+ */
+ public static ActorRegistry registry() {
+ return Actor$.MODULE$.registry();
+ }
+
+ /**
+ *
+ * @return
+ * @throws UnsupportedOperationException If remoting isn't configured
+ * @throws ModuleNotAvailableException If the class for the remote support cannot be loaded
+ */
+ public static RemoteSupport remote() {
+ return Actor$.MODULE$.remote();
+ }
+
+ /**
+ * NOTE: Use this convenience method with care, do NOT make it possible to get a reference to the
+ * UntypedActor instance directly, but only through its 'ActorRef' wrapper reference.
+ * <p/>
+ * Creates an ActorRef out of the Actor. Allows you to pass in the instance for the UntypedActor.
+ * Only use this method when you need to pass in constructor arguments into the 'UntypedActor'.
+ * <p/>
+ * You use it by implementing the UntypedActorFactory interface.
+ * Example in Java:
+ * <pre>
+ * ActorRef actor = Actors.actorOf(new UntypedActorFactory() {
+ * public UntypedActor create() {
+ * return new MyUntypedActor("service:name", 5);
+ * }
+ * });
+ * actor.start();
+ * actor.sendOneWay(message, context);
+ * actor.stop();
+ * </pre>
+ */
+ public static ActorRef actorOf(final Creator<Actor> factory) {
+ return Actor$.MODULE$.actorOf(factory);
+ }
+
+ /**
+ * Creates an ActorRef out of the Actor type represented by the class provided.
+ * Example in Java:
+ * <pre>
+ * ActorRef actor = Actors.actorOf(MyUntypedActor.class);
+ * actor.start();
+ * actor.sendOneWay(message, context);
+ * actor.stop();
+ * </pre>
+ * You can create and start the actor in one statement like this:
+ * <pre>
+ * val actor = Actors.actorOf(MyActor.class).start();
+ * </pre>
+ */
+ public static ActorRef actorOf(final Class<? extends Actor> type) {
+ return Actor$.MODULE$.actorOf(type);
+ }
+
+ /**
+ * The message that is sent when an Actor gets a receive timeout.
+ * <pre>
+ * if( message == receiveTimeout() ) {
+ * //Timed out
+ * }
+ * </pre>
+ * @return the single instance of ReceiveTimeout
+ */
+ public final static ReceiveTimeout$ receiveTimeout() {
+ return ReceiveTimeout$.MODULE$;
+ }
+
+ /**
+ * The message that when sent to an Actor kills it by throwing an exception.
+ * <pre>
+ * actor.sendOneWay(kill());
+ * </pre>
+ * @return the single instance of Kill
+ */
+ public final static Kill$ kill() {
+ return Kill$.MODULE$;
+ }
+
+
+ /**
+ * The message that when sent to an Actor shuts it down by calling 'stop'.
+ * <pre>
+ * actor.sendOneWay(poisonPill());
+ * </pre>
+ * @return the single instance of PoisonPill
+ */
+ public final static PoisonPill$ poisonPill() {
+ return PoisonPill$.MODULE$;
+ }
+}
diff --git a/test/disabled/presentation/akka/src/akka/actor/BootableActorLoaderService.scala b/test/disabled/presentation/akka/src/akka/actor/BootableActorLoaderService.scala
new file mode 100644
index 0000000..a54fca9
--- /dev/null
+++ b/test/disabled/presentation/akka/src/akka/actor/BootableActorLoaderService.scala
@@ -0,0 +1,60 @@
+/**
+ * Copyright (C) 2009-2011 Scalable Solutions AB <http://scalablesolutions.se>
+ */
+
+package akka.actor
+
+import java.io.File
+import java.net.{ URL, URLClassLoader }
+import java.util.jar.JarFile
+
+import akka.util.{ Bootable }
+import akka.config.Config._
+
+/**
+ * Handles all modules in the deploy directory (load and unload)
+ */
+trait BootableActorLoaderService extends Bootable {
+
+ val BOOT_CLASSES = config.getList("akka.boot")
+ lazy val applicationLoader: Option[ClassLoader] = createApplicationClassLoader
+
+ protected def createApplicationClassLoader: Option[ClassLoader] = Some({
+ if (HOME.isDefined) {
+ val DEPLOY = HOME.get + "/deploy"
+ val DEPLOY_DIR = new File(DEPLOY)
+ if (!DEPLOY_DIR.exists) {
+ System.exit(-1)
+ }
+ val filesToDeploy = DEPLOY_DIR.listFiles.toArray.toList
+ .asInstanceOf[List[File]].filter(_.getName.endsWith(".jar"))
+ var dependencyJars: List[URL] = Nil
+ filesToDeploy.map { file =>
+ val jarFile = new JarFile(file)
+ val en = jarFile.entries
+ while (en.hasMoreElements) {
+ val name = en.nextElement.getName
+ if (name.endsWith(".jar")) dependencyJars ::= new File(
+ String.format("jar:file:%s!/%s", jarFile.getName, name)).toURI.toURL
+ }
+ }
+ val toDeploy = filesToDeploy.map(_.toURI.toURL)
+ val allJars = toDeploy ::: dependencyJars
+
+ new URLClassLoader(allJars.toArray, Thread.currentThread.getContextClassLoader)
+ } else Thread.currentThread.getContextClassLoader
+ })
+
+ abstract override def onLoad = {
+ super.onLoad
+
+ for (loader ← applicationLoader; clazz ← BOOT_CLASSES) {
+ loader.loadClass(clazz).newInstance
+ }
+ }
+
+ abstract override def onUnload = {
+ super.onUnload
+ Actor.registry.shutdownAll()
+ }
+}
diff --git a/test/disabled/presentation/akka/src/akka/actor/FSM.scala b/test/disabled/presentation/akka/src/akka/actor/FSM.scala
new file mode 100644
index 0000000..d9cd9a9
--- /dev/null
+++ b/test/disabled/presentation/akka/src/akka/actor/FSM.scala
@@ -0,0 +1,527 @@
+/**
+ * Copyright (C) 2009-2011 Scalable Solutions AB <http://scalablesolutions.se>
+ */
+package akka.actor
+
+import akka.util._
+import akka.event.EventHandler
+
+import scala.collection.mutable
+import java.util.concurrent.ScheduledFuture
+
+object FSM {
+
+ object NullFunction extends PartialFunction[Any, Nothing] {
+ def isDefinedAt(o: Any) = false
+ def apply(o: Any) = sys.error("undefined")
+ }
+
+ case class CurrentState[S](fsmRef: ActorRef, state: S)
+ case class Transition[S](fsmRef: ActorRef, from: S, to: S)
+ case class SubscribeTransitionCallBack(actorRef: ActorRef)
+ case class UnsubscribeTransitionCallBack(actorRef: ActorRef)
+
+ sealed trait Reason
+ case object Normal extends Reason
+ case object Shutdown extends Reason
+ case class Failure(cause: Any) extends Reason
+
+ case object StateTimeout
+ case class TimeoutMarker(generation: Long)
+
+ case class Timer(name: String, msg: AnyRef, repeat: Boolean, generation: Int) {
+ private var ref: Option[ScheduledFuture[AnyRef]] = _
+
+ def schedule(actor: ActorRef, timeout: Duration) {
+ if (repeat) {
+ ref = Some(Scheduler.schedule(actor, this, timeout.length, timeout.length, timeout.unit))
+ } else {
+ ref = Some(Scheduler.scheduleOnce(actor, this, timeout.length, timeout.unit))
+ }
+ }
+
+ def cancel {
+ if (ref.isDefined) {
+ ref.get.cancel(true)
+ ref = None
+ }
+ }
+ }
+
+ /*
+ * This extractor is just convenience for matching a (S, S) pair, including a
+ * reminder what the new state is.
+ */
+ object -> {
+ def unapply[S](in: (S, S)) = Some(in)
+ }
+
+ /*
+ * With these implicits in scope, you can write "5 seconds" anywhere a
+ * Duration or Option[Duration] is expected. This is conveniently true
+ * for derived classes.
+ */
+ implicit def d2od(d: Duration): Option[Duration] = Some(d)
+}
+
+/**
+ * Finite State Machine actor trait. Use as follows:
+ *
+ * <pre>
+ * object A {
+ * trait State
+ * case class One extends State
+ * case class Two extends State
+ *
+ * case class Data(i : Int)
+ * }
+ *
+ * class A extends Actor with FSM[A.State, A.Data] {
+ * import A._
+ *
+ * startWith(One, Data(42))
+ * when(One) {
+ * case Event(SomeMsg, Data(x)) => ...
+ * case Ev(SomeMsg) => ... // convenience when data not needed
+ * }
+ * when(Two, stateTimeout = 5 seconds) { ... }
+ * initialize
+ * }
+ * </pre>
+ *
+ * Within the partial function the following values are returned for effecting
+ * state transitions:
+ *
+ * - <code>stay</code> for staying in the same state
+ * - <code>stay using Data(...)</code> for staying in the same state, but with
+ * different data
+ * - <code>stay forMax 5.millis</code> for staying with a state timeout; can be
+ * combined with <code>using</code>
+ * - <code>goto(...)</code> for changing into a different state; also supports
+ * <code>using</code> and <code>forMax</code>
+ * - <code>stop</code> for terminating this FSM actor
+ *
+ * Each of the above also supports the method <code>replying(AnyRef)</code> for
+ * sending a reply before changing state.
+ *
+ * While changing state, custom handlers may be invoked which are registered
+ * using <code>onTransition</code>. This is meant to enable concentrating
+ * different concerns in different places; you may choose to use
+ * <code>when</code> for describing the properties of a state, including of
+ * course initiating transitions, but you can describe the transitions using
+ * <code>onTransition</code> to avoid having to duplicate that code among
+ * multiple paths which lead to a transition:
+ *
+ * <pre>
+ * onTransition {
+ * case Active -> _ => cancelTimer("activeTimer")
+ * }
+ * </pre>
+ *
+ * Multiple such blocks are supported and all of them will be called, not only
+ * the first matching one.
+ *
+ * Another feature is that other actors may subscribe for transition events by
+ * sending a <code>SubscribeTransitionCallback</code> message to this actor;
+ * use <code>UnsubscribeTransitionCallback</code> before stopping the other
+ * actor.
+ *
+ * State timeouts set an upper bound to the time which may pass before another
+ * message is received in the current state. If no external message is
+ * available, then upon expiry of the timeout a StateTimeout message is sent.
+ * Note that this message will only be received in the state for which the
+ * timeout was set and that any message received will cancel the timeout
+ * (possibly to be started again by the next transition).
+ *
+ * Another feature is the ability to install and cancel single-shot as well as
+ * repeated timers which arrange for the sending of a user-specified message:
+ *
+ * <pre>
+ * setTimer("tock", TockMsg, 1 second, true) // repeating
+ * setTimer("lifetime", TerminateMsg, 1 hour, false) // single-shot
+ * cancelTimer("tock")
+ * timerActive_? ("tock")
+ * </pre>
+ */
+trait FSM[S, D] extends ListenerManagement {
+ this: Actor =>
+
+ import FSM._
+
+ type StateFunction = scala.PartialFunction[Event[D], State]
+ type Timeout = Option[Duration]
+ type TransitionHandler = PartialFunction[(S, S), Unit]
+
+ /**
+ * ****************************************
+ * DSL
+ * ****************************************
+ */
+
+ /**
+ * Insert a new StateFunction at the end of the processing chain for the
+ * given state. If the stateTimeout parameter is set, entering this state
+ * without a differing explicit timeout setting will trigger a StateTimeout
+ * event; the same is true when using #stay.
+ *
+ * @param stateName designator for the state
+ * @param stateTimeout default state timeout for this state
+ * @param stateFunction partial function describing response to input
+ */
+ protected final def when(stateName: S, stateTimeout: Timeout = None)(stateFunction: StateFunction) = {
+ register(stateName, stateFunction, stateTimeout)
+ }
+
+ /**
+ * Set initial state. Call this method from the constructor before the #initialize method.
+ *
+ * @param stateName initial state designator
+ * @param stateData initial state data
+ * @param timeout state timeout for the initial state, overriding the default timeout for that state
+ */
+ protected final def startWith(stateName: S,
+ stateData: D,
+ timeout: Timeout = None) = {
+ currentState = State(stateName, stateData, timeout)
+ }
+
+ /**
+ * Produce transition to other state. Return this from a state function in
+ * order to effect the transition.
+ *
+ * @param nextStateName state designator for the next state
+ * @return state transition descriptor
+ */
+ protected final def goto(nextStateName: S): State = {
+ State(nextStateName, currentState.stateData)
+ }
+
+ /**
+ * Produce "empty" transition descriptor. Return this from a state function
+ * when no state change is to be effected.
+ *
+ * @return descriptor for staying in current state
+ */
+ protected final def stay(): State = {
+ // cannot directly use currentState because of the timeout field
+ goto(currentState.stateName)
+ }
+
+ /**
+ * Produce change descriptor to stop this FSM actor with reason "Normal".
+ */
+ protected final def stop(): State = {
+ stop(Normal)
+ }
+
+ /**
+ * Produce change descriptor to stop this FSM actor including specified reason.
+ */
+ protected final def stop(reason: Reason): State = {
+ stop(reason, currentState.stateData)
+ }
+
+ /**
+ * Produce change descriptor to stop this FSM actor including specified reason.
+ */
+ protected final def stop(reason: Reason, stateData: D): State = {
+ stay using stateData withStopReason (reason)
+ }
+
+ /**
+ * Schedule named timer to deliver message after given delay, possibly repeating.
+ * @param name identifier to be used with cancelTimer()
+ * @param msg message to be delivered
+ * @param timeout delay of first message delivery and between subsequent messages
+ * @param repeat send once if false, scheduleAtFixedRate if true
+ * @return current state descriptor
+ */
+ protected final def setTimer(name: String, msg: AnyRef, timeout: Duration, repeat: Boolean): State = {
+ if (timers contains name) {
+ timers(name).cancel
+ }
+ val timer = Timer(name, msg, repeat, timerGen.next)
+ timer.schedule(self, timeout)
+ timers(name) = timer
+ stay
+ }
+
+ /**
+ * Cancel named timer, ensuring that the message is not subsequently delivered (no race).
+ * @param name of the timer to cancel
+ */
+ protected final def cancelTimer(name: String) = {
+ if (timers contains name) {
+ timers(name).cancel
+ timers -= name
+ }
+ }
+
+ /**
+ * Inquire whether the named timer is still active. Returns true unless the
+ * timer does not exist, has previously been canceled or if it was a
+ * single-shot timer whose message was already received.
+ */
+ protected final def timerActive_?(name: String) = timers contains name
+
+ /**
+ * Set state timeout explicitly. This method can safely be used from within a
+ * state handler.
+ */
+ protected final def setStateTimeout(state: S, timeout: Timeout) {
+ stateTimeouts(state) = timeout
+ }
+
+ /**
+ * Set handler which is called upon each state transition, i.e. not when
+ * staying in the same state. This may use the pair extractor defined in the
+ * FSM companion object like so:
+ *
+ * <pre>
+ * onTransition {
+ * case Old -> New => doSomething
+ * }
+ * </pre>
+ *
+ * It is also possible to supply a 2-ary function object:
+ *
+ * <pre>
+ * onTransition(handler _)
+ *
+ * private def handler(from: S, to: S) { ... }
+ * </pre>
+ *
+ * The underscore is unfortunately necessary to enable the nicer syntax shown
+ * above (it uses the implicit conversion total2pf under the hood).
+ *
+ * <b>Multiple handlers may be installed, and every one of them will be
+ * called, not only the first one matching.</b>
+ */
+ protected final def onTransition(transitionHandler: TransitionHandler) {
+ transitionEvent :+= transitionHandler
+ }
+
+ /**
+ * Convenience wrapper for using a total function instead of a partial
+ * function literal. To be used with onTransition.
+ */
+ implicit protected final def total2pf(transitionHandler: (S, S) => Unit) =
+ new PartialFunction[(S, S), Unit] {
+ def isDefinedAt(in: (S, S)) = true
+ def apply(in: (S, S)) { transitionHandler(in._1, in._2) }
+ }
+
+ /**
+ * Set handler which is called upon termination of this FSM actor.
+ */
+ protected final def onTermination(terminationHandler: PartialFunction[StopEvent[S, D], Unit]) = {
+ terminateEvent = terminationHandler
+ }
+
+ /**
+ * Set handler which is called upon reception of unhandled messages.
+ */
+ protected final def whenUnhandled(stateFunction: StateFunction) = {
+ handleEvent = stateFunction orElse handleEventDefault
+ }
+
+ /**
+ * Verify existence of initial state and setup timers. This should be the
+ * last call within the constructor.
+ */
+ def initialize {
+ makeTransition(currentState)
+ }
+
+ /**
+ * ****************************************************************
+ * PRIVATE IMPLEMENTATION DETAILS
+ * ****************************************************************
+ */
+
+ /*
+ * FSM State data and current timeout handling
+ */
+ private var currentState: State = _
+ private var timeoutFuture: Option[ScheduledFuture[AnyRef]] = None
+ private var generation: Long = 0L
+
+ /*
+ * Timer handling
+ */
+ private val timers = mutable.Map[String, Timer]()
+ private val timerGen = Iterator from 0
+
+ /*
+ * State definitions
+ */
+ private val stateFunctions = mutable.Map[S, StateFunction]()
+ private val stateTimeouts = mutable.Map[S, Timeout]()
+
+ private def register(name: S, function: StateFunction, timeout: Timeout) {
+ if (stateFunctions contains name) {
+ stateFunctions(name) = stateFunctions(name) orElse function
+ stateTimeouts(name) = timeout orElse stateTimeouts(name)
+ } else {
+ stateFunctions(name) = function
+ stateTimeouts(name) = timeout
+ }
+ }
+
+ /*
+ * unhandled event handler
+ */
+ private val handleEventDefault: StateFunction = {
+ case Event(value, stateData) =>
+ stay
+ }
+ private var handleEvent: StateFunction = handleEventDefault
+
+ /*
+ * termination handling
+ */
+ private var terminateEvent: PartialFunction[StopEvent[S, D], Unit] = {
+ case StopEvent(Failure(cause), _, _) =>
+ case StopEvent(reason, _, _) =>
+ }
+
+ /*
+ * transition handling
+ */
+ private var transitionEvent: List[TransitionHandler] = Nil
+ private def handleTransition(prev: S, next: S) {
+ val tuple = (prev, next)
+ for (te ← transitionEvent) { if (te.isDefinedAt(tuple)) te(tuple) }
+ }
+
+ // ListenerManagement shall not start() or stop() listener actors
+ override protected val manageLifeCycleOfListeners = false
+
+ /**
+ * *******************************************
+ * Main actor receive() method
+ * *******************************************
+ */
+ override final protected def receive: Receive = {
+ case TimeoutMarker(gen) =>
+ if (generation == gen) {
+ processEvent(StateTimeout)
+ }
+ case t at Timer(name, msg, repeat, generation) =>
+ if ((timers contains name) && (timers(name).generation == generation)) {
+ processEvent(msg)
+ if (!repeat) {
+ timers -= name
+ }
+ }
+ case SubscribeTransitionCallBack(actorRef) =>
+ addListener(actorRef)
+ // send current state back as reference point
+ try {
+ actorRef ! CurrentState(self, currentState.stateName)
+ } catch {
+ case e: ActorInitializationException =>
+ EventHandler.warning(this, "trying to register not running listener")
+ }
+ case UnsubscribeTransitionCallBack(actorRef) =>
+ removeListener(actorRef)
+ case value => {
+ if (timeoutFuture.isDefined) {
+ timeoutFuture.get.cancel(true)
+ timeoutFuture = None
+ }
+ generation += 1
+ processEvent(value)
+ }
+ }
+
+ private def processEvent(value: Any) = {
+ val event = Event(value, currentState.stateData)
+ val stateFunc = stateFunctions(currentState.stateName)
+ val nextState = if (stateFunc isDefinedAt event) {
+ stateFunc(event)
+ } else {
+ // handleEventDefault ensures that this is always defined
+ handleEvent(event)
+ }
+ nextState.stopReason match {
+ case Some(reason) => terminate(reason)
+ case None => makeTransition(nextState)
+ }
+ }
+
+ private def makeTransition(nextState: State) = {
+ if (!stateFunctions.contains(nextState.stateName)) {
+ terminate(Failure("Next state %s does not exist".format(nextState.stateName)))
+ } else {
+ if (currentState.stateName != nextState.stateName) {
+ handleTransition(currentState.stateName, nextState.stateName)
+ notifyListeners(Transition(self, currentState.stateName, nextState.stateName))
+ }
+ applyState(nextState)
+ }
+ }
+
+ private def applyState(nextState: State) = {
+ currentState = nextState
+ val timeout = if (currentState.timeout.isDefined) currentState.timeout else stateTimeouts(currentState.stateName)
+ if (timeout.isDefined) {
+ val t = timeout.get
+ if (t.finite_? && t.length >= 0) {
+ timeoutFuture = Some(Scheduler.scheduleOnce(self, TimeoutMarker(generation), t.length, t.unit))
+ }
+ }
+ }
+
+ private def terminate(reason: Reason) = {
+ terminateEvent.apply(StopEvent(reason, currentState.stateName, currentState.stateData))
+ self.stop()
+ }
+
+ case class Event[D](event: Any, stateData: D)
+ object Ev {
+ def unapply[D](e: Event[D]): Option[Any] = Some(e.event)
+ }
+
+ case class State(stateName: S, stateData: D, timeout: Timeout = None) {
+
+ /**
+ * Modify state transition descriptor to include a state timeout for the
+ * next state. This timeout overrides any default timeout set for the next
+ * state.
+ */
+ def forMax(timeout: Duration): State = {
+ copy(timeout = Some(timeout))
+ }
+
+ /**
+ * Send reply to sender of the current message, if available.
+ *
+ * @return this state transition descriptor
+ */
+ def replying(replyValue: Any): State = {
+ self.sender match {
+ case Some(sender) => sender ! replyValue
+ case None =>
+ }
+ this
+ }
+
+ /**
+ * Modify state transition descriptor with new state data. The data will be
+ * set when transitioning to the new state.
+ */
+ def using(nextStateDate: D): State = {
+ copy(stateData = nextStateDate)
+ }
+
+ private[akka] var stopReason: Option[Reason] = None
+
+ private[akka] def withStopReason(reason: Reason): State = {
+ stopReason = Some(reason)
+ this
+ }
+ }
+
+ case class StopEvent[S, D](reason: Reason, currentState: S, stateData: D)
+}
diff --git a/test/disabled/presentation/akka/src/akka/actor/Scheduler.scala b/test/disabled/presentation/akka/src/akka/actor/Scheduler.scala
new file mode 100644
index 0000000..128584f
--- /dev/null
+++ b/test/disabled/presentation/akka/src/akka/actor/Scheduler.scala
@@ -0,0 +1,133 @@
+/*
+ * Copyright 2007 WorldWide Conferencing, LLC
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * Rework of David Pollak's ActorPing class in the Lift Project
+ * which is licensed under the Apache 2 License.
+ */
+package akka.actor
+
+import scala.collection.JavaConversions
+
+import java.util.concurrent._
+
+import akka.event.EventHandler
+import akka.AkkaException
+
+object Scheduler {
+ import Actor._
+
+ case class SchedulerException(msg: String, e: Throwable) extends RuntimeException(msg, e)
+
+ @volatile
+ private var service = Executors.newSingleThreadScheduledExecutor(SchedulerThreadFactory)
+
+ /**
+ * Schedules to send the specified message to the receiver after initialDelay and then repeated after delay
+ */
+ def schedule(receiver: ActorRef, message: AnyRef, initialDelay: Long, delay: Long, timeUnit: TimeUnit): ScheduledFuture[AnyRef] = {
+ try {
+ service.scheduleAtFixedRate(
+ new Runnable { def run = receiver ! message },
+ initialDelay, delay, timeUnit).asInstanceOf[ScheduledFuture[AnyRef]]
+ } catch {
+ case e: Exception =>
+ val error = SchedulerException(message + " could not be scheduled on " + receiver, e)
+ EventHandler.error(error, this, "%s @ %s".format(receiver, message))
+ throw error
+ }
+ }
+
+ /**
+ * Schedules to run specified function to the receiver after initialDelay and then repeated after delay,
+ * avoid blocking operations since this is executed in the schedulers thread
+ */
+ def schedule(f: () => Unit, initialDelay: Long, delay: Long, timeUnit: TimeUnit): ScheduledFuture[AnyRef] =
+ schedule(new Runnable { def run = f() }, initialDelay, delay, timeUnit)
+
+ /**
+ * Schedules to run specified runnable to the receiver after initialDelay and then repeated after delay,
+ * avoid blocking operations since this is executed in the schedulers thread
+ */
+ def schedule(runnable: Runnable, initialDelay: Long, delay: Long, timeUnit: TimeUnit): ScheduledFuture[AnyRef] = {
+ try {
+ service.scheduleAtFixedRate(runnable, initialDelay, delay, timeUnit).asInstanceOf[ScheduledFuture[AnyRef]]
+ } catch {
+ case e: Exception =>
+ val error = SchedulerException("Failed to schedule a Runnable", e)
+ EventHandler.error(error, this, error.getMessage)
+ throw error
+ }
+ }
+
+ /**
+ * Schedules to send the specified message to the receiver after delay
+ */
+ def scheduleOnce(receiver: ActorRef, message: AnyRef, delay: Long, timeUnit: TimeUnit): ScheduledFuture[AnyRef] = {
+ try {
+ service.schedule(
+ new Runnable { def run = receiver ! message },
+ delay, timeUnit).asInstanceOf[ScheduledFuture[AnyRef]]
+ } catch {
+ case e: Exception =>
+ val error = SchedulerException(message + " could not be scheduleOnce'd on " + receiver, e)
+ EventHandler.error(e, this, receiver + " @ " + message)
+ throw error
+ }
+ }
+
+ /**
+ * Schedules a function to be run after delay,
+ * avoid blocking operations since the runnable is executed in the schedulers thread
+ */
+ def scheduleOnce(f: () => Unit, delay: Long, timeUnit: TimeUnit): ScheduledFuture[AnyRef] =
+ scheduleOnce(new Runnable { def run = f() }, delay, timeUnit)
+
+ /**
+ * Schedules a runnable to be run after delay,
+ * avoid blocking operations since the runnable is executed in the schedulers thread
+ */
+ def scheduleOnce(runnable: Runnable, delay: Long, timeUnit: TimeUnit): ScheduledFuture[AnyRef] = {
+ try {
+ service.schedule(runnable, delay, timeUnit).asInstanceOf[ScheduledFuture[AnyRef]]
+ } catch {
+ case e: Exception =>
+ val error = SchedulerException("Failed to scheduleOnce a Runnable", e)
+ EventHandler.error(e, this, error.getMessage)
+ throw error
+ }
+ }
+
+ def shutdown() {
+ synchronized {
+ service.shutdown()
+ }
+ }
+
+ def restart() {
+ synchronized {
+ shutdown()
+ service = Executors.newSingleThreadScheduledExecutor(SchedulerThreadFactory)
+ }
+ }
+}
+
+private object SchedulerThreadFactory extends ThreadFactory {
+ private var count = 0
+ val threadFactory = Executors.defaultThreadFactory()
+
+ def newThread(r: Runnable): Thread = {
+ val thread = threadFactory.newThread(r)
+ thread.setName("akka:scheduler-" + count)
+ thread.setDaemon(true)
+ thread
+ }
+}
diff --git a/test/disabled/presentation/akka/src/akka/actor/Supervisor.scala b/test/disabled/presentation/akka/src/akka/actor/Supervisor.scala
new file mode 100644
index 0000000..4a1309f
--- /dev/null
+++ b/test/disabled/presentation/akka/src/akka/actor/Supervisor.scala
@@ -0,0 +1,176 @@
+/**
+ * Copyright (C) 2009-2011 Scalable Solutions AB <http://scalablesolutions.se>
+ */
+
+package akka.actor
+
+import akka.AkkaException
+import akka.util._
+import ReflectiveAccess._
+import Actor._
+
+import java.util.concurrent.{ CopyOnWriteArrayList, ConcurrentHashMap }
+import java.net.InetSocketAddress
+import akka.config.Supervision._
+
+class SupervisorException private[akka] (message: String, cause: Throwable = null) extends AkkaException(message, cause)
+
+/**
+ * Factory object for creating supervisors declarative. It creates instances of the 'Supervisor' class.
+ * These are not actors, if you need a supervisor that is an Actor then you have to use the 'SupervisorActor'
+ * factory object.
+ * <p/>
+ *
+ * Here is a sample on how to use it:
+ * <pre>
+ * val supervisor = Supervisor(
+ * SupervisorConfig(
+ * RestartStrategy(OneForOne, 3, 10, List(classOf[Exception]),
+ * Supervise(
+ * myFirstActor,
+ * Permanent) ::
+ * Supervise(
+ * mySecondActor,
+ * Permanent) ::
+ * Nil))
+ * </pre>
+ *
+ * You dynamically link and unlink child children using the 'link' and 'unlink' methods.
+ * <pre>
+ * supervisor.link(child)
+ * supervisor.unlink(child)
+ * </pre>
+ *
+ * @author <a href="http://jonasboner.com">Jonas Bonér</a>
+ */
+object Supervisor {
+ def apply(config: SupervisorConfig): Supervisor = SupervisorFactory(config).newInstance.start
+}
+
+/**
+ * Use this factory instead of the Supervisor factory object if you want to control
+ * instantiation and starting of the Supervisor, if not then it is easier and better
+ * to use the Supervisor factory object.
+ * <p>
+ * Example usage:
+ * <pre>
+ * val factory = SupervisorFactory(
+ * SupervisorConfig(
+ * RestartStrategy(OneForOne, 3, 10, List(classOf[Exception]),
+ * Supervise(
+ * myFirstActor,
+ * Permanent) ::
+ * Supervise(
+ * mySecondActor,
+ * Permanent) ::
+ * Nil))
+ * </pre>
+ *
+ * Then create a new Supervisor tree with the concrete Services we have defined.
+ *
+ * <pre>
+ * val supervisor = factory.newInstance
+ * supervisor.start // start up all managed servers
+ * </pre>
+ *
+ * @author <a href="http://jonasboner.com">Jonas Bonér</a>
+ */
+case class SupervisorFactory(val config: SupervisorConfig) {
+
+ def newInstance: Supervisor = newInstanceFor(config)
+
+ def newInstanceFor(config: SupervisorConfig): Supervisor = {
+ val supervisor = new Supervisor(config.restartStrategy, config.maxRestartsHandler)
+ supervisor.configure(config)
+ supervisor.start
+ supervisor
+ }
+}
+
+/**
+ * <b>NOTE:</b>
+ * <p/>
+ * The supervisor class is only used for the configuration system when configuring supervisor
+ * hierarchies declaratively. Should not be used as part of the regular programming API. Instead
+ * wire the children together using 'link', 'spawnLink' etc. and set the 'trapExit' flag in the
+ * children that should trap error signals and trigger restart.
+ * <p/>
+ * See the ScalaDoc for the SupervisorFactory for an example on how to declaratively wire up children.
+ *
+ * @author <a href="http://jonasboner.com">Jonas Bonér</a>
+ */
+sealed class Supervisor(handler: FaultHandlingStrategy, maxRestartsHandler: (ActorRef, MaximumNumberOfRestartsWithinTimeRangeReached) => Unit) {
+ import Supervisor._
+
+ private val _childActors = new ConcurrentHashMap[String, List[ActorRef]]
+ private val _childSupervisors = new CopyOnWriteArrayList[Supervisor]
+
+ private[akka] val supervisor = actorOf(new SupervisorActor(handler, maxRestartsHandler)).start()
+
+ def uuid = supervisor.uuid
+
+ def start: Supervisor = {
+ this
+ }
+
+ def shutdown(): Unit = supervisor.stop()
+
+ def link(child: ActorRef) = supervisor.link(child)
+
+ def unlink(child: ActorRef) = supervisor.unlink(child)
+
+ def children: List[ActorRef] =
+ _childActors.values.toArray.toList.asInstanceOf[List[List[ActorRef]]].flatten
+
+ def childSupervisors: List[Supervisor] =
+ _childActors.values.toArray.toList.asInstanceOf[List[Supervisor]]
+
+ def configure(config: SupervisorConfig): Unit = config match {
+ case SupervisorConfig(_, servers, _) =>
+
+ servers.map(server =>
+ server match {
+ case Supervise(actorRef, lifeCycle, registerAsRemoteService) =>
+ actorRef.start()
+ val className = actorRef.actor.getClass.getName
+ val currentActors = {
+ val list = _childActors.get(className)
+ if (list eq null) List[ActorRef]()
+ else list
+ }
+ _childActors.put(className, actorRef :: currentActors)
+ actorRef.lifeCycle = lifeCycle
+ supervisor.link(actorRef)
+ if (registerAsRemoteService)
+ Actor.remote.register(actorRef)
+ case supervisorConfig at SupervisorConfig(_, _, _) => // recursive supervisor configuration
+ val childSupervisor = Supervisor(supervisorConfig)
+ supervisor.link(childSupervisor.supervisor)
+ _childSupervisors.add(childSupervisor)
+ })
+ }
+}
+
+/**
+ * For internal use only.
+ *
+ * @author <a href="http://jonasboner.com">Jonas Bonér</a>
+ */
+final class SupervisorActor private[akka] (handler: FaultHandlingStrategy, maxRestartsHandler: (ActorRef, MaximumNumberOfRestartsWithinTimeRangeReached) => Unit) extends Actor {
+ self.faultHandler = handler
+
+ override def postStop(): Unit = {
+ val i = self.linkedActors.values.iterator
+ while (i.hasNext) {
+ val ref = i.next
+ ref.stop()
+ self.unlink(ref)
+ }
+ }
+
+ def receive = {
+ case max at MaximumNumberOfRestartsWithinTimeRangeReached(_, _, _, _) => maxRestartsHandler(self, max)
+ case unknown => throw new SupervisorException(
+ "SupervisorActor can not respond to messages.\n\tUnknown message [" + unknown + "]")
+ }
+}
diff --git a/test/disabled/presentation/akka/src/akka/actor/UntypedActor.scala b/test/disabled/presentation/akka/src/akka/actor/UntypedActor.scala
new file mode 100644
index 0000000..cbc43f2
--- /dev/null
+++ b/test/disabled/presentation/akka/src/akka/actor/UntypedActor.scala
@@ -0,0 +1,134 @@
+/**
+ * Copyright (C) 2009-2011 Scalable Solutions AB <http://scalablesolutions.se>
+ */
+
+package akka.actor
+
+import akka.japi.{ Creator, Procedure }
+
+/**
+ * Subclass this abstract class to create a MDB-style untyped actor.
+ * <p/>
+ * This class is meant to be used from Java.
+ * <p/>
+ * Here is an example on how to create and use an UntypedActor:
+ * <pre>
+ * public class SampleUntypedActor extends UntypedActor {
+ * public void onReceive(Object message) throws Exception {
+ * if (message instanceof String) {
+ * String msg = (String)message;
+ *
+ * if (msg.equals("UseReply")) {
+ * // Reply to original sender of message using the 'replyUnsafe' method
+ * getContext().replyUnsafe(msg + ":" + getContext().getUuid());
+ *
+ * } else if (msg.equals("UseSender") && getContext().getSender().isDefined()) {
+ * // Reply to original sender of message using the sender reference
+ * // also passing along my own reference (the context)
+ * getContext().getSender().get().sendOneWay(msg, context);
+ *
+ * } else if (msg.equals("UseSenderFuture") && getContext().getSenderFuture().isDefined()) {
+ * // Reply to original sender of message using the sender future reference
+ * getContext().getSenderFuture().get().completeWithResult(msg);
+ *
+ * } else if (msg.equals("SendToSelf")) {
+ * // Send message to the actor itself recursively
+ * getContext().sendOneWay(msg)
+ *
+ * } else if (msg.equals("ForwardMessage")) {
+ * // Retrieve an actor from the ActorRegistry by ID and get an ActorRef back
+ * ActorRef actorRef = Actor.registry.actorsFor("some-actor-id").head();
+ *
+ * } else throw new IllegalArgumentException("Unknown message: " + message);
+ * } else throw new IllegalArgumentException("Unknown message: " + message);
+ * }
+ *
+ * public static void main(String[] args) {
+ * ActorRef actor = Actors.actorOf(SampleUntypedActor.class);
+ * actor.start();
+ * actor.sendOneWay("SendToSelf");
+ * actor.stop();
+ * }
+ * }
+ * </pre>
+ *
+ * @author <a href="http://jonasboner.com">Jonas Bonér</a>
+ */
+abstract class UntypedActor extends Actor {
+
+ /**
+ * To be implemented by concrete UntypedActor. Defines the message handler.
+ */
+ @throws(classOf[Exception])
+ def onReceive(message: Any): Unit
+
+ /**
+ * Returns the 'self' reference with the API.
+ */
+ def getContext(): ActorRef = self
+
+ /**
+ * Returns the 'self' reference with the API.
+ */
+ def context(): ActorRef = self
+
+ /**
+ * Java API for become
+ */
+ def become(behavior: Procedure[Any]): Unit = become(behavior, false)
+
+ /*
+ * Java API for become with optional discardOld
+ */
+ def become(behavior: Procedure[Any], discardOld: Boolean): Unit =
+ super.become({ case msg => behavior.apply(msg) }, discardOld)
+
+ /**
+ * User overridable callback.
+ * <p/>
+ * Is called when an Actor is started by invoking 'actor.start()'.
+ */
+ override def preStart() {}
+
+ /**
+ * User overridable callback.
+ * <p/>
+ * Is called when 'actor.stop()' is invoked.
+ */
+ override def postStop() {}
+
+ /**
+ * User overridable callback.
+ * <p/>
+ * Is called on a crashed Actor right BEFORE it is restarted to allow clean up of resources before Actor is terminated.
+ */
+ override def preRestart(reason: Throwable) {}
+
+ /**
+ * User overridable callback.
+ * <p/>
+ * Is called right AFTER restart on the newly created Actor to allow reinitialization after an Actor crash.
+ */
+ override def postRestart(reason: Throwable) {}
+
+ /**
+ * User overridable callback.
+ * <p/>
+ * Is called when a message isn't handled by the current behavior of the actor
+ * by default it throws an UnhandledMessageException
+ */
+ override def unhandled(msg: Any) {
+ throw new UnhandledMessageException(msg, self)
+ }
+
+ final protected def receive = {
+ case msg => onReceive(msg)
+ }
+}
+
+/**
+ * Factory closure for an UntypedActor, to be used with 'Actors.actorOf(factory)'.
+ *
+ * @author <a href="http://jonasboner.com">Jonas Bonér</a>
+ */
+trait UntypedActorFactory extends Creator[Actor]
diff --git a/test/disabled/presentation/akka/src/akka/actor/package.scala b/test/disabled/presentation/akka/src/akka/actor/package.scala
new file mode 100644
index 0000000..fbeeed4
--- /dev/null
+++ b/test/disabled/presentation/akka/src/akka/actor/package.scala
@@ -0,0 +1,23 @@
+/**
+ * Copyright (C) 2009-2011 Scalable Solutions AB <http://scalablesolutions.se>
+ */
+
+package akka
+
+import actor.{ ScalaActorRef, ActorRef }
+
+package object actor {
+ implicit def actorRef2Scala(ref: ActorRef): ScalaActorRef =
+ ref.asInstanceOf[ScalaActorRef]
+
+ implicit def scala2ActorRef(ref: ScalaActorRef): ActorRef =
+ ref.asInstanceOf[ActorRef]
+
+ type Uuid = com.eaio.uuid.UUID
+
+ def newUuid(): Uuid = new Uuid()
+
+ def uuidFrom(time: Long, clockSeqAndNode: Long): Uuid = new Uuid(time, clockSeqAndNode)
+
+ def uuidFrom(uuid: String): Uuid = new Uuid(uuid)
+}
diff --git a/test/disabled/presentation/akka/src/akka/config/Config.scala b/test/disabled/presentation/akka/src/akka/config/Config.scala
new file mode 100644
index 0000000..6578c66
--- /dev/null
+++ b/test/disabled/presentation/akka/src/akka/config/Config.scala
@@ -0,0 +1,93 @@
+/**
+ * Copyright (C) 2009-2011 Scalable Solutions AB <http://scalablesolutions.se>
+ */
+
+package akka.config
+
+import akka.AkkaException
+
+class ConfigurationException(message: String, cause: Throwable = null) extends AkkaException(message, cause)
+class ModuleNotAvailableException(message: String, cause: Throwable = null) extends AkkaException(message, cause)
+
+/**
+ * Loads up the configuration (from the akka.conf file).
+ *
+ * @author <a href="http://jonasboner.com">Jonas Bonér</a>
+ */
+object Config {
+ val VERSION = "1.1.3"
+
+ val HOME = {
+ val envHome = System.getenv("AKKA_HOME") match {
+ case null | "" | "." => None
+ case value => Some(value)
+ }
+
+ val systemHome = System.getProperty("akka.home") match {
+ case null | "" => None
+ case value => Some(value)
+ }
+
+ envHome orElse systemHome
+ }
+
+ val config: Configuration = try {
+ val confName = {
+ val envConf = System.getenv("AKKA_MODE") match {
+ case null | "" => None
+ case value => Some(value)
+ }
+
+ val systemConf = System.getProperty("akka.mode") match {
+ case null | "" => None
+ case value => Some(value)
+ }
+
+ (envConf orElse systemConf).map("akka." + _ + ".conf").getOrElse("akka.conf")
+ }
+
+ val newInstance =
+ if (System.getProperty("akka.config", "") != "") {
+ val configFile = System.getProperty("akka.config", "")
+ println("Loading config from -Dakka.config=" + configFile)
+ Configuration.fromFile(configFile)
+ } else if (getClass.getClassLoader.getResource(confName) ne null) {
+ println("Loading config [" + confName + "] from the application classpath.")
+ Configuration.fromResource(confName, getClass.getClassLoader)
+ } else if (HOME.isDefined) {
+ val configFile = HOME.get + "/config/" + confName
+ println("AKKA_HOME is defined as [" + HOME.get + "], loading config from [" + configFile + "].")
+ Configuration.fromFile(configFile)
+ } else {
+ println(
+ "\nCan't load '" + confName + "'." +
+ "\nOne of the three ways of locating the '" + confName + "' file needs to be defined:" +
+ "\n\t1. Define the '-Dakka.config=...' system property option." +
+ "\n\t2. Put the '" + confName + "' file on the classpath." +
+ "\n\t3. Define 'AKKA_HOME' environment variable pointing to the root of the Akka distribution." +
+ "\nI have no way of finding the '" + confName + "' configuration file." +
+ "\nUsing default values everywhere.")
+ Configuration.fromString("akka {}") // default empty config
+ }
+
+ val configVersion = newInstance.getString("akka.version", VERSION)
+ if (configVersion != VERSION)
+ throw new ConfigurationException(
+ "Akka JAR version [" + VERSION + "] is different than the provided config version [" + configVersion + "]")
+
+ newInstance
+ } catch {
+ case e =>
+ System.err.println("Couldn't parse config, fatal error.")
+ e.printStackTrace(System.err)
+ System.exit(-1)
+ throw e
+ }
+
+ val CONFIG_VERSION = config.getString("akka.version", VERSION)
+
+ val TIME_UNIT = config.getString("akka.time-unit", "seconds")
+
+ val startTime = System.currentTimeMillis
+ def uptime = (System.currentTimeMillis - startTime) / 1000
+}
diff --git a/test/disabled/presentation/akka/src/akka/config/ConfigParser.scala b/test/disabled/presentation/akka/src/akka/config/ConfigParser.scala
new file mode 100644
index 0000000..73fac5e
--- /dev/null
+++ b/test/disabled/presentation/akka/src/akka/config/ConfigParser.scala
@@ -0,0 +1,74 @@
+/**
+ * Copyright (C) 2009-2011 Scalable Solutions AB <http://scalablesolutions.se>
+ *
+ * Based on Configgy by Robey Pointer.
+ * Copyright 2009 Robey Pointer <robeypointer at gmail.com>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ */
+
+package akka.config
+
+import scala.collection.mutable
+import scala.util.parsing.combinator._
+
+class ConfigParser(var prefix: String = "", map: mutable.Map[String, Any] = mutable.Map.empty[String, Any], importer: Importer) extends RegexParsers {
+ val sections = mutable.Stack[String]()
+
+ def createPrefix = {
+ prefix = if (sections.isEmpty) "" else sections.toList.reverse.mkString("", ".", ".")
+ }
+
+ override val whiteSpace = """(\s+|#[^\n]*\n)+""".r
+
+ // tokens
+
+ val numberToken: Parser[String] = """-?\d+(\.\d+)?""".r
+ val stringToken: Parser[String] = ("\"" + """([^\\\"]|\\[^ux]|\\\n|\\u[0-9a-fA-F]{4}|\\x[0-9a-fA-F]{2})*""" + "\"").r
+ val booleanToken: Parser[String] = "(true|on|false|off)".r
+ val identToken: Parser[String] = """([\da-zA-Z_][-\w]*)(\.[a-zA-Z_][-\w]*)*""".r
+ val assignToken: Parser[String] = "=".r
+ val sectionToken: Parser[String] = """[a-zA-Z][-\w]*""".r
+
+ // values
+
+ def value: Parser[Any] = number | string | list | boolean
+ def number = numberToken
+ def string = stringToken ^^ { s => s.substring(1, s.length - 1) }
+ def list = "[" ~> repsep(string | numberToken, opt(",")) <~ (opt(",") ~ "]")
+ def boolean = booleanToken
+
+ // parser
+
+ def root = rep(includeFile | assignment | sectionOpen | sectionClose)
+
+ def includeFile = "include" ~> string ^^ {
+ case filename: String =>
+ new ConfigParser(prefix, map, importer) parse importer.importFile(filename)
+ }
+
+ def assignment = identToken ~ assignToken ~ value ^^ {
+ case k ~ a ~ v => map(prefix + k) = v
+ }
+
+ def sectionOpen = sectionToken <~ "{" ^^ { name =>
+ sections push name
+ createPrefix
+ }
+
+ def sectionClose = "}" ^^ { _ =>
+ if (sections.isEmpty) {
+ failure("dangling close tag")
+ } else {
+ sections.pop
+ createPrefix
+ }
+ }
+
+ def parse(in: String): Map[String, Any] = {
+ parseAll(root, in) match {
+ case Success(result, _) => map.toMap
+ case x at Failure(msg, _) => throw new ConfigurationException(x.toString)
+ case x at Error(msg, _) => throw new ConfigurationException(x.toString)
+ }
+ }
+}
diff --git a/test/disabled/presentation/akka/src/akka/config/Configuration.scala b/test/disabled/presentation/akka/src/akka/config/Configuration.scala
new file mode 100644
index 0000000..81c32fc
--- /dev/null
+++ b/test/disabled/presentation/akka/src/akka/config/Configuration.scala
@@ -0,0 +1,166 @@
+/**
+ * Copyright (C) 2009-2011 Scalable Solutions AB <http://scalablesolutions.se>
+ *
+ * Based on Configgy by Robey Pointer.
+ * Copyright 2009 Robey Pointer <robeypointer at gmail.com>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ */
+
+package akka.config
+
+import java.io.File
+
+object Configuration {
+ val DefaultPath = new File(".").getCanonicalPath
+ val DefaultImporter = new FilesystemImporter(DefaultPath)
+
+ def load(data: String, importer: Importer = DefaultImporter): Configuration = {
+ val parser = new ConfigParser(importer = importer)
+ new Configuration(parser parse data)
+ }
+
+ def fromFile(filename: String, importer: Importer): Configuration = {
+ load(importer.importFile(filename), importer)
+ }
+
+ def fromFile(path: String, filename: String): Configuration = {
+ val importer = new FilesystemImporter(path)
+ fromFile(filename, importer)
+ }
+
+ def fromFile(filename: String): Configuration = {
+ val n = filename.lastIndexOf('/')
+ if (n < 0) {
+ fromFile(DefaultPath, filename)
+ } else {
+ fromFile(filename.substring(0, n), filename.substring(n + 1))
+ }
+ }
+
+ def fromResource(filename: String): Configuration = {
+ fromResource(filename, ClassLoader.getSystemClassLoader)
+ }
+
+ def fromResource(filename: String, classLoader: ClassLoader): Configuration = {
+ val importer = new ResourceImporter(classLoader)
+ fromFile(filename, importer)
+ }
+
+ def fromMap(map: Map[String, Any]) = {
+ new Configuration(map)
+ }
+
+ def fromString(data: String): Configuration = {
+ load(data)
+ }
+}
+
+class Configuration(val map: Map[String, Any]) {
+ private val trueValues = Set("true", "on")
+ private val falseValues = Set("false", "off")
+
+ def contains(key: String): Boolean = map contains key
+
+ def keys: Iterable[String] = map.keys
+
+ def getAny(key: String): Option[Any] = {
+ try {
+ Some(map(key))
+ } catch {
+ case _ => None
+ }
+ }
+
+ def getAny(key: String, defaultValue: Any): Any = getAny(key).getOrElse(defaultValue)
+
+ def getSeqAny(key: String): Seq[Any] = {
+ try {
+ map(key).asInstanceOf[Seq[Any]]
+ } catch {
+ case _ => Seq.empty[Any]
+ }
+ }
+
+ def getString(key: String): Option[String] = map.get(key).map(_.toString)
+
+ def getString(key: String, defaultValue: String): String = getString(key).getOrElse(defaultValue)
+
+ def getList(key: String): Seq[String] = {
+ try {
+ map(key).asInstanceOf[Seq[String]]
+ } catch {
+ case _ => Seq.empty[String]
+ }
+ }
+
+ def getInt(key: String): Option[Int] = {
+ try {
+ Some(map(key).toString.toInt)
+ } catch {
+ case _ => None
+ }
+ }
+
+ def getInt(key: String, defaultValue: Int): Int = getInt(key).getOrElse(defaultValue)
+
+ def getLong(key: String): Option[Long] = {
+ try {
+ Some(map(key).toString.toLong)
+ } catch {
+ case _ => None
+ }
+ }
+
+ def getLong(key: String, defaultValue: Long): Long = getLong(key).getOrElse(defaultValue)
+
+ def getFloat(key: String): Option[Float] = {
+ try {
+ Some(map(key).toString.toFloat)
+ } catch {
+ case _ => None
+ }
+ }
+
+ def getFloat(key: String, defaultValue: Float): Float = getFloat(key).getOrElse(defaultValue)
+
+ def getDouble(key: String): Option[Double] = {
+ try {
+ Some(map(key).toString.toDouble)
+ } catch {
+ case _ => None
+ }
+ }
+
+ def getDouble(key: String, defaultValue: Double): Double = getDouble(key).getOrElse(defaultValue)
+
+ def getBoolean(key: String): Option[Boolean] = {
+ getString(key) flatMap { s =>
+ val isTrue = trueValues.contains(s)
+ if (!isTrue && !falseValues.contains(s)) None
+ else Some(isTrue)
+ }
+ }
+
+ def getBoolean(key: String, defaultValue: Boolean): Boolean = getBool(key).getOrElse(defaultValue)
+
+ def getBool(key: String): Option[Boolean] = getBoolean(key)
+
+ def getBool(key: String, defaultValue: Boolean): Boolean = getBoolean(key, defaultValue)
+
+ def apply(key: String): String = getString(key) match {
+ case None => throw new ConfigurationException("undefined config: " + key)
+ case Some(v) => v
+ }
+
+ def apply(key: String, defaultValue: String) = getString(key, defaultValue)
+ def apply(key: String, defaultValue: Int) = getInt(key, defaultValue)
+ def apply(key: String, defaultValue: Long) = getLong(key, defaultValue)
+ def apply(key: String, defaultValue: Boolean) = getBool(key, defaultValue)
+
+ def getSection(name: String): Option[Configuration] = {
+ val l = name.length + 1
+ val m = map.collect { case (k, v) if k.startsWith(name) => (k.substring(l), v) }
+ if (m.isEmpty) None
+ else Some(new Configuration(m))
+ }
+}
diff --git a/test/disabled/presentation/akka/src/akka/config/Configurator.scala b/test/disabled/presentation/akka/src/akka/config/Configurator.scala
new file mode 100644
index 0000000..2818339
--- /dev/null
+++ b/test/disabled/presentation/akka/src/akka/config/Configurator.scala
@@ -0,0 +1,21 @@
+/**
+ * Copyright (C) 2009-2011 Scalable Solutions AB <http://scalablesolutions.se>
+ */
+
+package akka.config
+
+import akka.config.Supervision.{ SuperviseTypedActor, FaultHandlingStrategy }
+
+private[akka] trait TypedActorConfiguratorBase {
+ def getExternalDependency[T](clazz: Class[T]): T
+
+ def configure(restartStrategy: FaultHandlingStrategy, components: List[SuperviseTypedActor]): TypedActorConfiguratorBase
+
+ def inject: TypedActorConfiguratorBase
+
+ def supervise: TypedActorConfiguratorBase
+
+ def reset
+
+ def stop
+}
diff --git a/test/disabled/presentation/akka/src/akka/config/Importer.scala b/test/disabled/presentation/akka/src/akka/config/Importer.scala
new file mode 100644
index 0000000..eebda1d
--- /dev/null
+++ b/test/disabled/presentation/akka/src/akka/config/Importer.scala
@@ -0,0 +1,64 @@
+/**
+ * Copyright (C) 2009-2011 Scalable Solutions AB <http://scalablesolutions.se>
+ *
+ * Based on Configgy by Robey Pointer.
+ * Copyright 2009 Robey Pointer <robeypointer at gmail.com>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ */
+
+package akka.config
+
+import java.io.{ BufferedReader, File, FileInputStream, InputStream, InputStreamReader }
+
+/**
+ * An interface for finding config files and reading them into strings for
+ * parsing. This is used to handle `include` directives in config files.
+ */
+trait Importer {
+
+ def importFile(filename: String): String
+
+ private val BUFFER_SIZE = 8192
+
+ protected def streamToString(in: InputStream): String = {
+ try {
+ val reader = new BufferedReader(new InputStreamReader(in, "UTF-8"))
+ val buffer = new Array[Char](BUFFER_SIZE)
+ val sb = new StringBuilder
+ var n = 0
+ while (n >= 0) {
+ n = reader.read(buffer, 0, buffer.length)
+ if (n >= 0) {
+ sb.appendAll(buffer, 0, n)
+ }
+ }
+ in.close()
+ sb.toString
+ } catch {
+ case x => throw new ConfigurationException(x.toString)
+ }
+ }
+}
+
+/**
+ * An Importer that looks for imported config files in the filesystem.
+ * This is the default importer.
+ */
+class FilesystemImporter(val baseDir: String) extends Importer {
+ def importFile(filename: String): String = {
+ val f = new File(filename)
+ val file = if (f.isAbsolute) f else new File(baseDir, filename)
+ streamToString(new FileInputStream(file))
+ }
+}
+
+/**
+ * An Importer that looks for imported config files in the java resources
+ * of the system class loader (usually the jar used to launch this app).
+ */
+class ResourceImporter(classLoader: ClassLoader) extends Importer {
+ def importFile(filename: String): String = {
+ val stream = classLoader.getResourceAsStream(filename)
+ streamToString(stream)
+ }
+}
diff --git a/test/disabled/presentation/akka/src/akka/config/SupervisionConfig.scala b/test/disabled/presentation/akka/src/akka/config/SupervisionConfig.scala
new file mode 100644
index 0000000..40f61f6
--- /dev/null
+++ b/test/disabled/presentation/akka/src/akka/config/SupervisionConfig.scala
@@ -0,0 +1,134 @@
+/**
+ * Copyright (C) 2009-2011 Scalable Solutions AB <http://scalablesolutions.se>
+ */
+
+package akka.config
+
+import akka.dispatch.MessageDispatcher
+import akka.actor.{ MaximumNumberOfRestartsWithinTimeRangeReached, ActorRef }
+import akka.japi.{ Procedure2, Procedure }
+
+case class RemoteAddress(val hostname: String, val port: Int)
+
+/**
+ * Configuration classes - not to be used as messages.
+ *
+ * @author <a href="http://jonasboner.com">Jonas Bonér</a>
+ */
+object Supervision {
+ sealed abstract class ConfigElement
+
+ abstract class Server extends ConfigElement
+ sealed abstract class LifeCycle extends ConfigElement
+ sealed abstract class FaultHandlingStrategy(val trapExit: List[Class[_ <: Throwable]]) extends ConfigElement
+
+ case class SupervisorConfig(restartStrategy: FaultHandlingStrategy, worker: List[Server], maxRestartsHandler: (ActorRef, MaximumNumberOfRestartsWithinTimeRangeReached) => Unit = { (aRef, max) => () }) extends Server {
+ //Java API
+ def this(restartStrategy: FaultHandlingStrategy, worker: Array[Server]) = this(restartStrategy, worker.toList)
+ def this(restartStrategy: FaultHandlingStrategy, worker: Array[Server], restartHandler: Procedure2[ActorRef, MaximumNumberOfRestartsWithinTimeRangeReached]) = this(restartStrategy, worker.toList, { (aRef, max) => restartHandler.apply(aRef, max) })
+ }
+
+ class Supervise(val actorRef: ActorRef, val lifeCycle: LifeCycle, val registerAsRemoteService: Boolean = false) extends Server {
+ //Java API
+ def this(actorRef: ActorRef, lifeCycle: LifeCycle) =
+ this(actorRef, lifeCycle, false)
+ }
+
+ object Supervise {
+ def apply(actorRef: ActorRef, lifeCycle: LifeCycle, registerAsRemoteService: Boolean = false) = new Supervise(actorRef, lifeCycle, registerAsRemoteService)
+ def apply(actorRef: ActorRef, lifeCycle: LifeCycle) = new Supervise(actorRef, lifeCycle, false)
+ def unapply(supervise: Supervise) = Some((supervise.actorRef, supervise.lifeCycle, supervise.registerAsRemoteService))
+ }
+
+ object AllForOneStrategy {
+ def apply(trapExit: List[Class[_ <: Throwable]], maxNrOfRetries: Int, withinTimeRange: Int): AllForOneStrategy =
+ new AllForOneStrategy(trapExit,
+ if (maxNrOfRetries < 0) None else Some(maxNrOfRetries), if (withinTimeRange < 0) None else Some(withinTimeRange))
+ }
+
+ case class AllForOneStrategy(override val trapExit: List[Class[_ <: Throwable]],
+ maxNrOfRetries: Option[Int] = None,
+ withinTimeRange: Option[Int] = None) extends FaultHandlingStrategy(trapExit) {
+ def this(trapExit: List[Class[_ <: Throwable]], maxNrOfRetries: Int, withinTimeRange: Int) =
+ this(trapExit,
+ if (maxNrOfRetries < 0) None else Some(maxNrOfRetries), if (withinTimeRange < 0) None else Some(withinTimeRange))
+
+ def this(trapExit: Array[Class[_ <: Throwable]], maxNrOfRetries: Int, withinTimeRange: Int) =
+ this(trapExit.toList,
+ if (maxNrOfRetries < 0) None else Some(maxNrOfRetries), if (withinTimeRange < 0) None else Some(withinTimeRange))
+
+ def this(trapExit: java.util.List[Class[_ <: Throwable]], maxNrOfRetries: Int, withinTimeRange: Int) =
+ this(trapExit.toArray.toList.asInstanceOf[List[Class[_ <: Throwable]]],
+ if (maxNrOfRetries < 0) None else Some(maxNrOfRetries), if (withinTimeRange < 0) None else Some(withinTimeRange))
+ }
+
+ object OneForOneStrategy {
+ def apply(trapExit: List[Class[_ <: Throwable]], maxNrOfRetries: Int, withinTimeRange: Int): OneForOneStrategy =
+ new OneForOneStrategy(trapExit,
+ if (maxNrOfRetries < 0) None else Some(maxNrOfRetries), if (withinTimeRange < 0) None else Some(withinTimeRange))
+ }
+
+ case class OneForOneStrategy(override val trapExit: List[Class[_ <: Throwable]],
+ maxNrOfRetries: Option[Int] = None,
+ withinTimeRange: Option[Int] = None) extends FaultHandlingStrategy(trapExit) {
+ def this(trapExit: List[Class[_ <: Throwable]], maxNrOfRetries: Int, withinTimeRange: Int) =
+ this(trapExit,
+ if (maxNrOfRetries < 0) None else Some(maxNrOfRetries), if (withinTimeRange < 0) None else Some(withinTimeRange))
+
+ def this(trapExit: Array[Class[_ <: Throwable]], maxNrOfRetries: Int, withinTimeRange: Int) =
+ this(trapExit.toList,
+ if (maxNrOfRetries < 0) None else Some(maxNrOfRetries), if (withinTimeRange < 0) None else Some(withinTimeRange))
+
+ def this(trapExit: java.util.List[Class[_ <: Throwable]], maxNrOfRetries: Int, withinTimeRange: Int) =
+ this(trapExit.toArray.toList.asInstanceOf[List[Class[_ <: Throwable]]],
+ if (maxNrOfRetries < 0) None else Some(maxNrOfRetries), if (withinTimeRange < 0) None else Some(withinTimeRange))
+ }
+
+ case object NoFaultHandlingStrategy extends FaultHandlingStrategy(Nil)
+
+ //Scala API
+ case object Permanent extends LifeCycle
+ case object Temporary extends LifeCycle
+ case object UndefinedLifeCycle extends LifeCycle
+
+ //Java API (& Scala if you fancy)
+ def permanent(): LifeCycle = Permanent
+ def temporary(): LifeCycle = Temporary
+ def undefinedLifeCycle(): LifeCycle = UndefinedLifeCycle
+
+ //Java API
+ def noFaultHandlingStrategy = NoFaultHandlingStrategy
+
+ case class SuperviseTypedActor(_intf: Class[_],
+ val target: Class[_],
+ val lifeCycle: LifeCycle,
+ val timeout: Long,
+ _dispatcher: MessageDispatcher, // optional
+ _remoteAddress: RemoteAddress // optional
+ ) extends Server {
+ val intf: Option[Class[_]] = Option(_intf)
+ val dispatcher: Option[MessageDispatcher] = Option(_dispatcher)
+ val remoteAddress: Option[RemoteAddress] = Option(_remoteAddress)
+
+ def this(target: Class[_], lifeCycle: LifeCycle, timeout: Long) =
+ this(null: Class[_], target, lifeCycle, timeout, null: MessageDispatcher, null: RemoteAddress)
+
+ def this(intf: Class[_], target: Class[_], lifeCycle: LifeCycle, timeout: Long) =
+ this(intf, target, lifeCycle, timeout, null: MessageDispatcher, null: RemoteAddress)
+
+ def this(intf: Class[_], target: Class[_], lifeCycle: LifeCycle, timeout: Long, dispatcher: MessageDispatcher) =
+ this(intf, target, lifeCycle, timeout, dispatcher, null: RemoteAddress)
+
+ def this(target: Class[_], lifeCycle: LifeCycle, timeout: Long, dispatcher: MessageDispatcher) =
+ this(null: Class[_], target, lifeCycle, timeout, dispatcher, null: RemoteAddress)
+
+ def this(intf: Class[_], target: Class[_], lifeCycle: LifeCycle, timeout: Long, remoteAddress: RemoteAddress) =
+ this(intf, target, lifeCycle, timeout, null: MessageDispatcher, remoteAddress)
+
+ def this(target: Class[_], lifeCycle: LifeCycle, timeout: Long, remoteAddress: RemoteAddress) =
+ this(null: Class[_], target, lifeCycle, timeout, null: MessageDispatcher, remoteAddress)
+
+ def this(target: Class[_], lifeCycle: LifeCycle, timeout: Long, dispatcher: MessageDispatcher, remoteAddress: RemoteAddress) =
+ this(null: Class[_], target, lifeCycle, timeout, dispatcher, remoteAddress)
+ }
+}
diff --git a/test/disabled/presentation/akka/src/akka/dataflow/DataFlow.scala b/test/disabled/presentation/akka/src/akka/dataflow/DataFlow.scala
new file mode 100644
index 0000000..bca7936
--- /dev/null
+++ b/test/disabled/presentation/akka/src/akka/dataflow/DataFlow.scala
@@ -0,0 +1,165 @@
+/**
+ * Copyright (C) 2009-2011 Scalable Solutions AB <http://scalablesolutions.se>
+ */
+
+package akka.dataflow
+
+import java.util.concurrent.atomic.AtomicReference
+import java.util.concurrent.{ ConcurrentLinkedQueue, LinkedBlockingQueue }
+
+import akka.event.EventHandler
+import akka.actor.{ Actor, ActorRef }
+import akka.actor.Actor._
+import akka.dispatch.CompletableFuture
+import akka.AkkaException
+import akka.japi.{ Function, Effect }
+
+/**
+ * Implements Oz-style dataflow (single assignment) variables.
+ *
+ * @author <a href="http://jonasboner.com">Jonas Bonér</a>
+ */
+object DataFlow {
+ object Start
+ object Exit
+
+ class DataFlowVariableException(message: String, cause: Throwable = null) extends AkkaException(message, cause)
+
+ /**
+ * Executes the supplied thunk in another thread.
+ */
+ def thread(body: => Unit): Unit = spawn(body)
+
+ /**
+ * JavaAPI.
+ * Executes the supplied Effect in another thread.
+ */
+ def thread(body: Effect): Unit = spawn(body.apply)
+
+ /**
+ * Executes the supplied function in another thread.
+ */
+ def thread[A <: AnyRef, R <: AnyRef](body: A => R) =
+ actorOf(new ReactiveEventBasedThread(body)).start()
+
+ /**
+ * JavaAPI.
+ * Executes the supplied Function in another thread.
+ */
+ def thread[A <: AnyRef, R <: AnyRef](body: Function[A, R]) =
+ actorOf(new ReactiveEventBasedThread(body.apply)).start()
+
+ private class ReactiveEventBasedThread[A <: AnyRef, T <: AnyRef](body: A => T)
+ extends Actor {
+ def receive = {
+ case Exit => self.stop()
+ case message => self.reply(body(message.asInstanceOf[A]))
+ }
+ }
+
+ private object DataFlowVariable {
+ private sealed abstract class DataFlowVariableMessage
+ private case class Set[T <: Any](value: T) extends DataFlowVariableMessage
+ private object Get extends DataFlowVariableMessage
+ }
+
+ /**
+ * @author <a href="http://jonasboner.com">Jonas Bonér</a>
+ */
+ @deprecated("Superceeded by Future and CompletableFuture as of 1.1", "1.1")
+ sealed class DataFlowVariable[T <: Any](timeoutMs: Long) {
+ import DataFlowVariable._
+
+ def this() = this(1000 * 60)
+
+ private val value = new AtomicReference[Option[T]](None)
+ private val blockedReaders = new ConcurrentLinkedQueue[ActorRef]
+
+ private class In[T <: Any](dataFlow: DataFlowVariable[T]) extends Actor {
+ self.timeout = timeoutMs
+ def receive = {
+ case s at Set(v) =>
+ if (dataFlow.value.compareAndSet(None, Some(v.asInstanceOf[T]))) {
+ while (dataFlow.blockedReaders.peek ne null)
+ dataFlow.blockedReaders.poll ! s
+ } else throw new DataFlowVariableException(
+ "Attempt to change data flow variable (from [" + dataFlow.value.get + "] to [" + v + "])")
+ case Exit => self.stop()
+ }
+ }
+
+ private class Out[T <: Any](dataFlow: DataFlowVariable[T]) extends Actor {
+ self.timeout = timeoutMs
+ private var readerFuture: Option[CompletableFuture[Any]] = None
+ def receive = {
+ case Get => dataFlow.value.get match {
+ case Some(value) => self reply value
+ case None => readerFuture = self.senderFuture
+ }
+ case Set(v: T) => readerFuture.map(_ completeWithResult v)
+ case Exit => self.stop()
+ }
+ }
+
+ private[this] val in = actorOf(new In(this)).start()
+
+ /**
+ * Sets the value of this variable (if unset) with the value of the supplied variable.
+ */
+ def <<(ref: DataFlowVariable[T]) {
+ if (this.value.get.isEmpty) in ! Set(ref())
+ else throw new DataFlowVariableException(
+ "Attempt to change data flow variable (from [" + this.value.get + "] to [" + ref() + "])")
+ }
+
+ /**
+ * JavaAPI.
+ * Sets the value of this variable (if unset) with the value of the supplied variable.
+ */
+ def set(ref: DataFlowVariable[T]) { this << ref }
+
+ /**
+ * Sets the value of this variable (if unset).
+ */
+ def <<(value: T) {
+ if (this.value.get.isEmpty) in ! Set(value)
+ else throw new DataFlowVariableException(
+ "Attempt to change data flow variable (from [" + this.value.get + "] to [" + value + "])")
+ }
+
+ /**
+ * JavaAPI.
+ * Sets the value of this variable (if unset) with the value of the supplied variable.
+ */
+ def set(value: T) { this << value }
+
+ /**
+ * Retrieves the value of variable, throws a DataFlowVariableException if it times out.
+ */
+ def get(): T = this()
+
+ /**
+ * Retrieves the value of variable, throws a DataFlowVariableException if it times out.
+ */
+ def apply(): T = {
+ value.get getOrElse {
+ val out = actorOf(new Out(this)).start()
+
+ val result = try {
+ blockedReaders offer out
+ (out !! Get).as[T]
+ } catch {
+ case e: Exception =>
+ EventHandler.error(e, this, e.getMessage)
+ out ! Exit
+ throw e
+ }
+
+ result.getOrElse(throw new DataFlowVariableException(
+ "Timed out (after " + timeoutMs + " milliseconds) while waiting for result"))
+ }
+ }
+
+ def shutdown() { in ! Exit }
+ }
+}
diff --git a/test/disabled/presentation/akka/src/akka/dispatch/Dispatchers.scala b/test/disabled/presentation/akka/src/akka/dispatch/Dispatchers.scala
new file mode 100644
index 0000000..7dd1bf6
--- /dev/null
+++ b/test/disabled/presentation/akka/src/akka/dispatch/Dispatchers.scala
@@ -0,0 +1,227 @@
+/**
+ * Copyright (C) 2009-2011 Scalable Solutions AB <http://scalablesolutions.se>
+ */
+
+package akka.dispatch
+
+import akka.actor.{ Actor, ActorRef }
+import akka.actor.newUuid
+import akka.config.Config._
+import akka.util.{ Duration, ReflectiveAccess }
+
+import akka.config.Configuration
+
+import java.util.concurrent.TimeUnit
+
+/**
+ * Scala API. Dispatcher factory.
+ * <p/>
+ * Example usage:
+ * <pre/>
+ * val dispatcher = Dispatchers.newExecutorBasedEventDrivenDispatcher("name")
+ * dispatcher
+ * .withNewThreadPoolWithLinkedBlockingQueueWithCapacity(100)
+ * .setCorePoolSize(16)
+ * .setMaxPoolSize(128)
+ * .setKeepAliveTimeInMillis(60000)
+ * .setRejectionPolicy(new CallerRunsPolicy)
+ * .build
+ * </pre>
+ * <p/>
+ * Java API. Dispatcher factory.
+ * <p/>
+ * Example usage:
+ * <pre/>
+ * MessageDispatcher dispatcher = Dispatchers.newExecutorBasedEventDrivenDispatcher("name");
+ * dispatcher
+ * .withNewThreadPoolWithLinkedBlockingQueueWithCapacity(100)
+ * .setCorePoolSize(16)
+ * .setMaxPoolSize(128)
+ * .setKeepAliveTimeInMillis(60000)
+ * .setRejectionPolicy(new CallerRunsPolicy())
+ * .build();
+ * </pre>
+ * <p/>
+ *
+ * @author <a href="http://jonasboner.com">Jonas Bonér</a>
+ */
+object Dispatchers {
+ val THROUGHPUT = config.getInt("akka.actor.throughput", 5)
+ val DEFAULT_SHUTDOWN_TIMEOUT = config.getLong("akka.actor.dispatcher-shutdown-timeout").
+ map(time => Duration(time, TIME_UNIT)).
+ getOrElse(Duration(1000, TimeUnit.MILLISECONDS))
+ val MAILBOX_CAPACITY = config.getInt("akka.actor.default-dispatcher.mailbox-capacity", -1)
+ val MAILBOX_PUSH_TIME_OUT = Duration(config.getInt("akka.actor.default-dispatcher.mailbox-push-timeout-time", 10), TIME_UNIT)
+ val THROUGHPUT_DEADLINE_TIME = Duration(config.getInt("akka.actor.throughput-deadline-time", -1), TIME_UNIT)
+ val THROUGHPUT_DEADLINE_TIME_MILLIS = THROUGHPUT_DEADLINE_TIME.toMillis.toInt
+ val MAILBOX_TYPE: MailboxType = if (MAILBOX_CAPACITY < 1) UnboundedMailbox() else BoundedMailbox()
+
+ lazy val defaultGlobalDispatcher = {
+ config.getSection("akka.actor.default-dispatcher").flatMap(from).getOrElse(globalExecutorBasedEventDrivenDispatcher)
+ }
+
+ object globalExecutorBasedEventDrivenDispatcher extends ExecutorBasedEventDrivenDispatcher("global", THROUGHPUT, THROUGHPUT_DEADLINE_TIME_MILLIS, MAILBOX_TYPE)
+
+ /**
+ * Creates an thread based dispatcher serving a single actor through the same single thread.
+ * Uses the default timeout
+ * <p/>
+ * E.g. each actor consumes its own thread.
+ */
+ def newThreadBasedDispatcher(actor: ActorRef) = new ThreadBasedDispatcher(actor)
+
+ /**
+ * Creates an thread based dispatcher serving a single actor through the same single thread.
+ * Uses the default timeout
+ * If capacity is negative, it's Integer.MAX_VALUE
+ * <p/>
+ * E.g. each actor consumes its own thread.
+ */
+ def newThreadBasedDispatcher(actor: ActorRef, mailboxCapacity: Int) = new ThreadBasedDispatcher(actor, mailboxCapacity)
+
+ /**
+ * Creates an thread based dispatcher serving a single actor through the same single thread.
+ * If capacity is negative, it's Integer.MAX_VALUE
+ * <p/>
+ * E.g. each actor consumes its own thread.
+ */
+ def newThreadBasedDispatcher(actor: ActorRef, mailboxCapacity: Int, pushTimeOut: Duration) =
+ new ThreadBasedDispatcher(actor, mailboxCapacity, pushTimeOut)
+
+ /**
+ * Creates a executor-based event-driven dispatcher serving multiple (millions) of actors through a thread pool.
+ * <p/>
+ * Has a fluent builder interface for configuring its semantics.
+ */
+ def newExecutorBasedEventDrivenDispatcher(name: String) =
+ ThreadPoolConfigDispatcherBuilder(config => new ExecutorBasedEventDrivenDispatcher(name, config), ThreadPoolConfig())
+
+ /**
+ * Creates a executor-based event-driven dispatcher serving multiple (millions) of actors through a thread pool.
+ * <p/>
+ * Has a fluent builder interface for configuring its semantics.
+ */
+ def newExecutorBasedEventDrivenDispatcher(name: String, throughput: Int, mailboxType: MailboxType) =
+ ThreadPoolConfigDispatcherBuilder(config =>
+ new ExecutorBasedEventDrivenDispatcher(name, throughput, THROUGHPUT_DEADLINE_TIME_MILLIS, mailboxType, config), ThreadPoolConfig())
+
+ /**
+ * Creates a executor-based event-driven dispatcher serving multiple (millions) of actors through a thread pool.
+ * <p/>
+ * Has a fluent builder interface for configuring its semantics.
+ */
+ def newExecutorBasedEventDrivenDispatcher(name: String, throughput: Int, throughputDeadlineMs: Int, mailboxType: MailboxType) =
+ ThreadPoolConfigDispatcherBuilder(config =>
+ new ExecutorBasedEventDrivenDispatcher(name, throughput, throughputDeadlineMs, mailboxType, config), ThreadPoolConfig())
+
+ /**
+ * Creates a executor-based event-driven dispatcher, with work-stealing, serving multiple (millions) of actors through a thread pool.
+ * <p/>
+ * Has a fluent builder interface for configuring its semantics.
+ */
+ def newExecutorBasedEventDrivenWorkStealingDispatcher(name: String) =
+ ThreadPoolConfigDispatcherBuilder(config => new ExecutorBasedEventDrivenWorkStealingDispatcher(name, config), ThreadPoolConfig())
+
+ /**
+ * Creates a executor-based event-driven dispatcher, with work-stealing, serving multiple (millions) of actors through a thread pool.
+ * <p/>
+ * Has a fluent builder interface for configuring its semantics.
+ */
+ def newExecutorBasedEventDrivenWorkStealingDispatcher(name: String, throughput: Int) =
+ ThreadPoolConfigDispatcherBuilder(config =>
+ new ExecutorBasedEventDrivenWorkStealingDispatcher(name, throughput, THROUGHPUT_DEADLINE_TIME_MILLIS, MAILBOX_TYPE, config), ThreadPoolConfig())
+
+ /**
+ * Creates a executor-based event-driven dispatcher, with work-stealing, serving multiple (millions) of actors through a thread pool.
+ * <p/>
+ * Has a fluent builder interface for configuring its semantics.
+ */
+ def newExecutorBasedEventDrivenWorkStealingDispatcher(name: String, throughput: Int, mailboxType: MailboxType) =
+ ThreadPoolConfigDispatcherBuilder(config =>
+ new ExecutorBasedEventDrivenWorkStealingDispatcher(name, throughput, THROUGHPUT_DEADLINE_TIME_MILLIS, mailboxType, config), ThreadPoolConfig())
+
+ /**
+ * Creates a executor-based event-driven dispatcher, with work-stealing, serving multiple (millions) of actors through a thread pool.
+ * <p/>
+ * Has a fluent builder interface for configuring its semantics.
+ */
+ def newExecutorBasedEventDrivenWorkStealingDispatcher(name: String, throughput: Int, throughputDeadlineMs: Int, mailboxType: MailboxType) =
+ ThreadPoolConfigDispatcherBuilder(config =>
+ new ExecutorBasedEventDrivenWorkStealingDispatcher(name, throughput, throughputDeadlineMs, mailboxType, config), ThreadPoolConfig())
+ /**
+ * Utility function that tries to load the specified dispatcher config from the akka.conf
+ * or else use the supplied default dispatcher
+ */
+ def fromConfig(key: String, default: => MessageDispatcher = defaultGlobalDispatcher): MessageDispatcher =
+ config getSection key flatMap from getOrElse default
+
+ /*
+ * Creates of obtains a dispatcher from a ConfigMap according to the format below
+ *
+ * default-dispatcher {
+ * type = "GlobalExecutorBasedEventDriven" # Must be one of the following, all "Global*" are non-configurable
+ * # (ExecutorBasedEventDrivenWorkStealing), ExecutorBasedEventDriven,
+ * # GlobalExecutorBasedEventDriven
+ * # A FQCN to a class inheriting MessageDispatcherConfigurator with a no-arg visible constructor
+ * keep-alive-time = 60 # Keep alive time for threads
+ * core-pool-size-factor = 1.0 # No of core threads ... ceil(available processors * factor)
+ * max-pool-size-factor = 4.0 # Max no of threads ... ceil(available processors * factor)
+ * executor-bounds = -1 # Makes the Executor bounded, -1 is unbounded
+ * allow-core-timeout = on # Allow core threads to time out
+ * rejection-policy = "caller-runs" # abort, caller-runs, discard-oldest, discard
+ * throughput = 5 # Throughput for ExecutorBasedEventDrivenDispatcher
+ * }
+ * ex: from(config.getConfigMap(identifier).get)
+ *
+ * Gotcha: Only configures the dispatcher if possible
+ * Returns: None if "type" isn't specified in the config
+ * Throws: IllegalArgumentException if the value of "type" is not valid
+ * IllegalArgumentException if it cannot
+ */
+ def from(cfg: Configuration): Option[MessageDispatcher] = {
+ cfg.getString("type") map {
+ case "ExecutorBasedEventDriven" => new ExecutorBasedEventDrivenDispatcherConfigurator()
+ case "ExecutorBasedEventDrivenWorkStealing" => new ExecutorBasedEventDrivenWorkStealingDispatcherConfigurator()
+ case "GlobalExecutorBasedEventDriven" => GlobalExecutorBasedEventDrivenDispatcherConfigurator
+ case fqn =>
+ ReflectiveAccess.getClassFor[MessageDispatcherConfigurator](fqn) match {
+ case r: Right[_, Class[MessageDispatcherConfigurator]] =>
+ ReflectiveAccess.createInstance[MessageDispatcherConfigurator](r.b, Array[Class[_]](), Array[AnyRef]()) match {
+ case r: Right[Exception, MessageDispatcherConfigurator] => r.b
+ case l: Left[Exception, MessageDispatcherConfigurator] =>
+ throw new IllegalArgumentException("Cannot instantiate MessageDispatcherConfigurator type [%s], make sure it has a default no-args constructor" format fqn, l.a)
+ }
+ case l: Left[Exception, _] =>
+ throw new IllegalArgumentException("Unknown MessageDispatcherConfigurator type [%s]" format fqn, l.a)
+ }
+ } map {
+ _ configure cfg
+ }
+ }
+}
+
+object GlobalExecutorBasedEventDrivenDispatcherConfigurator extends MessageDispatcherConfigurator {
+ def configure(config: Configuration): MessageDispatcher = Dispatchers.globalExecutorBasedEventDrivenDispatcher
+}
+
+class ExecutorBasedEventDrivenDispatcherConfigurator extends MessageDispatcherConfigurator {
+ def configure(config: Configuration): MessageDispatcher = {
+ configureThreadPool(config, threadPoolConfig => new ExecutorBasedEventDrivenDispatcher(
+ config.getString("name", newUuid.toString),
+ config.getInt("throughput", Dispatchers.THROUGHPUT),
+ config.getInt("throughput-deadline-time", Dispatchers.THROUGHPUT_DEADLINE_TIME_MILLIS),
+ mailboxType(config),
+ threadPoolConfig)).build
+ }
+}
+
+class ExecutorBasedEventDrivenWorkStealingDispatcherConfigurator extends MessageDispatcherConfigurator {
+ def configure(config: Configuration): MessageDispatcher = {
+ configureThreadPool(config, threadPoolConfig => new ExecutorBasedEventDrivenWorkStealingDispatcher(
+ config.getString("name", newUuid.toString),
+ config.getInt("throughput", Dispatchers.THROUGHPUT),
+ config.getInt("throughput-deadline-time", Dispatchers.THROUGHPUT_DEADLINE_TIME_MILLIS),
+ mailboxType(config),
+ threadPoolConfig)).build
+ }
+}
\ No newline at end of file
diff --git a/test/disabled/presentation/akka/src/akka/dispatch/ExecutorBasedEventDrivenDispatcher.scala b/test/disabled/presentation/akka/src/akka/dispatch/ExecutorBasedEventDrivenDispatcher.scala
new file mode 100644
index 0000000..bc3f29a
--- /dev/null
+++ b/test/disabled/presentation/akka/src/akka/dispatch/ExecutorBasedEventDrivenDispatcher.scala
@@ -0,0 +1,305 @@
+/**
+ * Copyright (C) 2009-2011 Scalable Solutions AB <http://scalablesolutions.se>
+ */
+
+package akka.dispatch
+
+import akka.event.EventHandler
+import akka.actor.{ ActorRef, IllegalActorStateException }
+import akka.util.{ ReflectiveAccess, Switch }
+
+import java.util.Queue
+import java.util.concurrent.atomic.AtomicReference
+import java.util.concurrent.{ TimeUnit, ExecutorService, RejectedExecutionException, ConcurrentLinkedQueue, LinkedBlockingQueue }
+
+/**
+ * Default settings are:
+ * <pre/>
+ * - withNewThreadPoolWithLinkedBlockingQueueWithUnboundedCapacity
+ * - NR_START_THREADS = 16
+ * - NR_MAX_THREADS = 128
+ * - KEEP_ALIVE_TIME = 60000L // one minute
+ * </pre>
+ * <p/>
+ *
+ * The dispatcher has a fluent builder interface to build up a thread pool to suite your use-case.
+ * There is a default thread pool defined but make use of the builder if you need it. Here are some examples.
+ * <p/>
+ *
+ * Scala API.
+ * <p/>
+ * Example usage:
+ * <pre/>
+ * val dispatcher = new ExecutorBasedEventDrivenDispatcher("name")
+ * dispatcher
+ * .withNewThreadPoolWithBoundedBlockingQueue(100)
+ * .setCorePoolSize(16)
+ * .setMaxPoolSize(128)
+ * .setKeepAliveTimeInMillis(60000)
+ * .setRejectionPolicy(new CallerRunsPolicy)
+ * .buildThreadPool
+ * </pre>
+ * <p/>
+ *
+ * Java API.
+ * <p/>
+ * Example usage:
+ * <pre/>
+ * ExecutorBasedEventDrivenDispatcher dispatcher = new ExecutorBasedEventDrivenDispatcher("name");
+ * dispatcher
+ * .withNewThreadPoolWithBoundedBlockingQueue(100)
+ * .setCorePoolSize(16)
+ * .setMaxPoolSize(128)
+ * .setKeepAliveTimeInMillis(60000)
+ * .setRejectionPolicy(new CallerRunsPolicy())
+ * .buildThreadPool();
+ * </pre>
+ * <p/>
+ *
+ * But the preferred way of creating dispatchers is to use
+ * the {@link akka.dispatch.Dispatchers} factory object.
+ *
+ * @author <a href="http://jonasboner.com">Jonas Bonér</a>
+ * @param throughput positive integer indicates the dispatcher will only process so much messages at a time from the
+ * mailbox, without checking the mailboxes of other actors. Zero or negative means the dispatcher
+ * always continues until the mailbox is empty.
+ * Larger values (or zero or negative) increase throughput, smaller values increase fairness
+ */
+class ExecutorBasedEventDrivenDispatcher(
+ _name: String,
+ val throughput: Int = Dispatchers.THROUGHPUT,
+ val throughputDeadlineTime: Int = Dispatchers.THROUGHPUT_DEADLINE_TIME_MILLIS,
+ val mailboxType: MailboxType = Dispatchers.MAILBOX_TYPE,
+ val config: ThreadPoolConfig = ThreadPoolConfig())
+ extends MessageDispatcher {
+
+ def this(_name: String, throughput: Int, throughputDeadlineTime: Int, mailboxType: MailboxType) =
+ this(_name, throughput, throughputDeadlineTime, mailboxType, ThreadPoolConfig()) // Needed for Java API usage
+
+ def this(_name: String, throughput: Int, mailboxType: MailboxType) =
+ this(_name, throughput, Dispatchers.THROUGHPUT_DEADLINE_TIME_MILLIS, mailboxType) // Needed for Java API usage
+
+ def this(_name: String, throughput: Int) =
+ this(_name, throughput, Dispatchers.THROUGHPUT_DEADLINE_TIME_MILLIS, Dispatchers.MAILBOX_TYPE) // Needed for Java API usage
+
+ def this(_name: String, _config: ThreadPoolConfig) =
+ this(_name, Dispatchers.THROUGHPUT, Dispatchers.THROUGHPUT_DEADLINE_TIME_MILLIS, Dispatchers.MAILBOX_TYPE, _config)
+
+ def this(_name: String) =
+ this(_name, Dispatchers.THROUGHPUT, Dispatchers.THROUGHPUT_DEADLINE_TIME_MILLIS, Dispatchers.MAILBOX_TYPE) // Needed for Java API usage
+
+ val name = "akka:event-driven:dispatcher:" + _name
+
+ private[akka] val threadFactory = new MonitorableThreadFactory(name)
+ private[akka] val executorService = new AtomicReference[ExecutorService](config.createLazyExecutorService(threadFactory))
+
+ private[akka] def dispatch(invocation: MessageInvocation) = {
+ val mbox = getMailbox(invocation.receiver)
+ mbox enqueue invocation
+ registerForExecution(mbox)
+ }
+
+ private[akka] def executeFuture(invocation: FutureInvocation[_]): Unit = if (active.isOn) {
+ try executorService.get() execute invocation
+ catch {
+ case e: RejectedExecutionException =>
+ EventHandler.warning(this, e.toString)
+ throw e
+ }
+ }
+
+ /**
+ * @return the mailbox associated with the actor
+ */
+ protected def getMailbox(receiver: ActorRef) = receiver.mailbox.asInstanceOf[MessageQueue with ExecutableMailbox]
+
+ override def mailboxSize(actorRef: ActorRef) = getMailbox(actorRef).size
+
+ def createMailbox(actorRef: ActorRef): AnyRef = mailboxType match {
+ case b: UnboundedMailbox =>
+ new ConcurrentLinkedQueue[MessageInvocation] with MessageQueue with ExecutableMailbox {
+ @inline
+ final def dispatcher = ExecutorBasedEventDrivenDispatcher.this
+ @inline
+ final def enqueue(m: MessageInvocation) = this.add(m)
+ @inline
+ final def dequeue(): MessageInvocation = this.poll()
+ }
+ case b: BoundedMailbox =>
+ new DefaultBoundedMessageQueue(b.capacity, b.pushTimeOut) with ExecutableMailbox {
+ @inline
+ final def dispatcher = ExecutorBasedEventDrivenDispatcher.this
+ }
+ }
+
+ private[akka] def start {}
+
+ private[akka] def shutdown {
+ val old = executorService.getAndSet(config.createLazyExecutorService(threadFactory))
+ if (old ne null) {
+ old.shutdownNow()
+ }
+ }
+
+ private[akka] def registerForExecution(mbox: MessageQueue with ExecutableMailbox): Unit = {
+ if (mbox.dispatcherLock.tryLock()) {
+ if (active.isOn && !mbox.suspended.locked) { //If the dispatcher is active and the actor not suspended
+ try {
+ executorService.get() execute mbox
+ } catch {
+ case e: RejectedExecutionException =>
+ EventHandler.warning(this, e.toString)
+ mbox.dispatcherLock.unlock()
+ throw e
+ }
+ } else {
+ mbox.dispatcherLock.unlock() //If the dispatcher isn't active or if the actor is suspended, unlock the dispatcher lock
+ }
+ }
+ }
+
+ private[akka] def reRegisterForExecution(mbox: MessageQueue with ExecutableMailbox): Unit =
+ registerForExecution(mbox)
+
+ override val toString = getClass.getSimpleName + "[" + name + "]"
+
+ def suspend(actorRef: ActorRef) {
+ getMailbox(actorRef).suspended.tryLock
+ }
+
+ def resume(actorRef: ActorRef) {
+ val mbox = getMailbox(actorRef)
+ mbox.suspended.tryUnlock
+ reRegisterForExecution(mbox)
+ }
+}
+
+/**
+ * This is the behavior of an ExecutorBasedEventDrivenDispatchers mailbox.
+ */
+trait ExecutableMailbox extends Runnable { self: MessageQueue =>
+
+ def dispatcher: ExecutorBasedEventDrivenDispatcher
+
+ final def run = {
+ try {
+ processMailbox()
+ } catch {
+ case ie: InterruptedException =>
+ }
+ finally {
+ dispatcherLock.unlock()
+ }
+ if (!self.isEmpty)
+ dispatcher.reRegisterForExecution(this)
+ }
+
+ /**
+ * Process the messages in the mailbox
+ *
+ * @return true if the processing finished before the mailbox was empty, due to the throughput constraint
+ */
+ final def processMailbox() {
+ if (!self.suspended.locked) {
+ var nextMessage = self.dequeue
+ if (nextMessage ne null) { //If we have a message
+ if (dispatcher.throughput <= 1) //If we only run one message per process
+ nextMessage.invoke //Just run it
+ else { //But otherwise, if we are throttled, we need to do some book-keeping
+ var processedMessages = 0
+ val isDeadlineEnabled = dispatcher.throughputDeadlineTime > 0
+ val deadlineNs = if (isDeadlineEnabled) System.nanoTime + TimeUnit.MILLISECONDS.toNanos(dispatcher.throughputDeadlineTime)
+ else 0
+ do {
+ nextMessage.invoke
+ nextMessage =
+ if (self.suspended.locked) {
+ null // If we are suspended, abort
+ } else { // If we aren't suspended, we need to make sure we're not overstepping our boundaries
+ processedMessages += 1
+ if ((processedMessages >= dispatcher.throughput) || (isDeadlineEnabled && System.nanoTime >= deadlineNs)) // If we're throttled, break out
+ null //We reached our boundaries, abort
+ else self.dequeue //Dequeue the next message
+ }
+ } while (nextMessage ne null)
+ }
+ }
+ }
+ }
+}
+
+object PriorityGenerator {
+ /**
+ * Creates a PriorityGenerator that uses the supplied function as priority generator
+ */
+ def apply(priorityFunction: Any => Int): PriorityGenerator = new PriorityGenerator {
+ def gen(message: Any): Int = priorityFunction(message)
+ }
+}
+
+/**
+ * A PriorityGenerator is a convenience API to create a Comparator that orders the messages of a
+ * PriorityExecutorBasedEventDrivenDispatcher
+ */
+abstract class PriorityGenerator extends java.util.Comparator[MessageInvocation] {
+ def gen(message: Any): Int
+
+ final def compare(thisMessage: MessageInvocation, thatMessage: MessageInvocation): Int =
+ gen(thisMessage.message) - gen(thatMessage.message)
+}
+
+/**
+ * A version of ExecutorBasedEventDrivenDispatcher that gives all actors registered to it a priority mailbox,
+ * prioritized according to the supplied comparator.
+ *
+ * The dispatcher will process the messages with the _lowest_ priority first.
+ */
+class PriorityExecutorBasedEventDrivenDispatcher(
+ name: String,
+ val comparator: java.util.Comparator[MessageInvocation],
+ throughput: Int = Dispatchers.THROUGHPUT,
+ throughputDeadlineTime: Int = Dispatchers.THROUGHPUT_DEADLINE_TIME_MILLIS,
+ mailboxType: MailboxType = Dispatchers.MAILBOX_TYPE,
+ config: ThreadPoolConfig = ThreadPoolConfig()) extends ExecutorBasedEventDrivenDispatcher(name, throughput, throughputDeadlineTime, mailboxType, config) with PriorityMailbox {
+
+ def this(name: String, comparator: java.util.Comparator[MessageInvocation], throughput: Int, throughputDeadlineTime: Int, mailboxType: MailboxType) =
+ this(name, comparator, throughput, throughputDeadlineTime, mailboxType, ThreadPoolConfig()) // Needed for Java API usage
+
+ def this(name: String, comparator: java.util.Comparator[MessageInvocation], throughput: Int, mailboxType: MailboxType) =
+ this(name, comparator, throughput, Dispatchers.THROUGHPUT_DEADLINE_TIME_MILLIS, mailboxType) // Needed for Java API usage
+
+ def this(name: String, comparator: java.util.Comparator[MessageInvocation], throughput: Int) =
+ this(name, comparator, throughput, Dispatchers.THROUGHPUT_DEADLINE_TIME_MILLIS, Dispatchers.MAILBOX_TYPE) // Needed for Java API usage
+
+ def this(name: String, comparator: java.util.Comparator[MessageInvocation], config: ThreadPoolConfig) =
+ this(name, comparator, Dispatchers.THROUGHPUT, Dispatchers.THROUGHPUT_DEADLINE_TIME_MILLIS, Dispatchers.MAILBOX_TYPE, config)
+
+ def this(name: String, comparator: java.util.Comparator[MessageInvocation]) =
+ this(name, comparator, Dispatchers.THROUGHPUT, Dispatchers.THROUGHPUT_DEADLINE_TIME_MILLIS, Dispatchers.MAILBOX_TYPE) // Needed for Java API usage
+}
+
+/**
+ * Can be used to give an ExecutorBasedEventDrivenDispatcher's actors priority-enabled mailboxes
+ *
+ * Usage:
+ * new ExecutorBasedEventDrivenDispatcher(...) with PriorityMailbox {
+ * val comparator = ...comparator that determines mailbox priority ordering...
+ * }
+ */
+trait PriorityMailbox { self: ExecutorBasedEventDrivenDispatcher =>
+ def comparator: java.util.Comparator[MessageInvocation]
+
+ override def createMailbox(actorRef: ActorRef): AnyRef = self.mailboxType match {
+ case b: UnboundedMailbox =>
+ new UnboundedPriorityMessageQueue(comparator) with ExecutableMailbox {
+ @inline
+ final def dispatcher = self
+ }
+
+ case b: BoundedMailbox =>
+ new BoundedPriorityMessageQueue(b.capacity, b.pushTimeOut, comparator) with ExecutableMailbox {
+ @inline
+ final def dispatcher = self
+ }
+ }
+}
diff --git a/test/disabled/presentation/akka/src/akka/dispatch/ExecutorBasedEventDrivenWorkStealingDispatcher.scala b/test/disabled/presentation/akka/src/akka/dispatch/ExecutorBasedEventDrivenWorkStealingDispatcher.scala
new file mode 100644
index 0000000..4cba8ee
--- /dev/null
+++ b/test/disabled/presentation/akka/src/akka/dispatch/ExecutorBasedEventDrivenWorkStealingDispatcher.scala
@@ -0,0 +1,165 @@
+/**
+ * Copyright (C) 2009-2011 Scalable Solutions AB <http://scalablesolutions.se>
+ */
+
+package akka.dispatch
+
+import akka.actor.{ ActorRef, Actor, IllegalActorStateException }
+import akka.util.{ ReflectiveAccess, Switch }
+
+import java.util.Queue
+import java.util.concurrent.atomic.{ AtomicReference, AtomicInteger }
+import java.util.concurrent.{ TimeUnit, ExecutorService, RejectedExecutionException, ConcurrentLinkedQueue, LinkedBlockingQueue }
+import util.DynamicVariable
+
+/**
+ * An executor based event driven dispatcher which will try to redistribute work from busy actors to idle actors. It is assumed
+ * that all actors using the same instance of this dispatcher can process all messages that have been sent to one of the actors. I.e. the
+ * actors belong to a pool of actors, and to the client there is no guarantee about which actor instance actually processes a given message.
+ * <p/>
+ * Although the technique used in this implementation is commonly known as "work stealing", the actual implementation is probably
+ * best described as "work donating" because the actor of which work is being stolen takes the initiative.
+ * <p/>
+ * The preferred way of creating dispatchers is to use
+ * the {@link akka.dispatch.Dispatchers} factory object.
+ *
+ * @see akka.dispatch.ExecutorBasedEventDrivenWorkStealingDispatcher
+ * @see akka.dispatch.Dispatchers
+ *
+ * @author Viktor Klang
+ */
+class ExecutorBasedEventDrivenWorkStealingDispatcher(
+ _name: String,
+ throughput: Int = Dispatchers.THROUGHPUT,
+ throughputDeadlineTime: Int = Dispatchers.THROUGHPUT_DEADLINE_TIME_MILLIS,
+ mailboxType: MailboxType = Dispatchers.MAILBOX_TYPE,
+ config: ThreadPoolConfig = ThreadPoolConfig())
+ extends ExecutorBasedEventDrivenDispatcher(_name, throughput, throughputDeadlineTime, mailboxType, config) {
+
+ def this(_name: String, throughput: Int, throughputDeadlineTime: Int, mailboxType: MailboxType) =
+ this(_name, throughput, throughputDeadlineTime, mailboxType, ThreadPoolConfig()) // Needed for Java API usage
+
+ def this(_name: String, throughput: Int, mailboxType: MailboxType) =
+ this(_name, throughput, Dispatchers.THROUGHPUT_DEADLINE_TIME_MILLIS, mailboxType) // Needed for Java API usage
+
+ def this(_name: String, throughput: Int) =
+ this(_name, throughput, Dispatchers.THROUGHPUT_DEADLINE_TIME_MILLIS, Dispatchers.MAILBOX_TYPE) // Needed for Java API usage
+
+ def this(_name: String, _config: ThreadPoolConfig) =
+ this(_name, Dispatchers.THROUGHPUT, Dispatchers.THROUGHPUT_DEADLINE_TIME_MILLIS, Dispatchers.MAILBOX_TYPE, _config)
+
+ def this(_name: String, memberType: Class[_ <: Actor]) =
+ this(_name, Dispatchers.THROUGHPUT, Dispatchers.THROUGHPUT_DEADLINE_TIME_MILLIS, Dispatchers.MAILBOX_TYPE) // Needed for Java API usage
+
+ def this(_name: String, mailboxType: MailboxType) =
+ this(_name, Dispatchers.THROUGHPUT, Dispatchers.THROUGHPUT_DEADLINE_TIME_MILLIS, mailboxType) // Needed for Java API usage
+
+ @volatile
+ private var actorType: Option[Class[_]] = None
+ @volatile
+ private var members = Vector[ActorRef]()
+ private val donationInProgress = new DynamicVariable(false)
+
+ private[akka] override def register(actorRef: ActorRef) = {
+ //Verify actor type conformity
+ actorType match {
+ case None => actorType = Some(actorRef.actor.getClass)
+ case Some(aType) =>
+ if (aType != actorRef.actor.getClass)
+ throw new IllegalActorStateException(String.format(
+ "Can't register actor %s in a work stealing dispatcher which already knows actors of type %s",
+ actorRef, aType))
+ }
+
+ synchronized { members :+= actorRef } //Update members
+ super.register(actorRef)
+ }
+
+ private[akka] override def unregister(actorRef: ActorRef) = {
+ synchronized { members = members.filterNot(actorRef eq) } //Update members
+ super.unregister(actorRef)
+ }
+
+ override private[akka] def dispatch(invocation: MessageInvocation) = {
+ val mbox = getMailbox(invocation.receiver)
+ if (donationInProgress.value == false && (!mbox.isEmpty || mbox.dispatcherLock.locked) && attemptDonationOf(invocation, mbox)) {
+ //We were busy and we got to donate the message to some other lucky guy, we're done here
+ } else {
+ mbox enqueue invocation
+ registerForExecution(mbox)
+ }
+ }
+
+ override private[akka] def reRegisterForExecution(mbox: MessageQueue with ExecutableMailbox): Unit = {
+ try {
+ donationInProgress.value = true
+ while (donateFrom(mbox)) {} //When we reregister, first donate messages to another actor
+ } finally { donationInProgress.value = false }
+
+ if (!mbox.isEmpty) //If we still have messages left to process, reschedule for execution
+ super.reRegisterForExecution(mbox)
+ }
+
+ /**
+ * Returns true if it successfully donated a message
+ */
+ protected def donateFrom(donorMbox: MessageQueue with ExecutableMailbox): Boolean = {
+ val actors = members // copy to prevent concurrent modifications having any impact
+
+ // we risk to pick a thief which is unregistered from the dispatcher in the meantime, but that typically means
+ // the dispatcher is being shut down...
+ // Starts at is seeded by current time
+ doFindDonorRecipient(donorMbox, actors, (System.currentTimeMillis % actors.size).asInstanceOf[Int]) match {
+ case null => false
+ case recipient => donate(donorMbox.dequeue, recipient)
+ }
+ }
+
+ /**
+ * Returns true if the donation succeeded or false otherwise
+ */
+ protected def attemptDonationOf(message: MessageInvocation, donorMbox: MessageQueue with ExecutableMailbox): Boolean = try {
+ donationInProgress.value = true
+ val actors = members // copy to prevent concurrent modifications having any impact
+ doFindDonorRecipient(donorMbox, actors, System.identityHashCode(message) % actors.size) match {
+ case null => false
+ case recipient => donate(message, recipient)
+ }
+ } finally { donationInProgress.value = false }
+
+ /**
+ * Rewrites the message and adds that message to the recipients mailbox
+ * returns true if the message is non-null
+ */
+ protected def donate(organ: MessageInvocation, recipient: ActorRef): Boolean = {
+ if (organ ne null) {
+ if (organ.senderFuture.isDefined) recipient.postMessageToMailboxAndCreateFutureResultWithTimeout[Any](
+ organ.message, recipient.timeout, organ.sender, organ.senderFuture)
+ else if (organ.sender.isDefined) recipient.postMessageToMailbox(organ.message, organ.sender)
+ else recipient.postMessageToMailbox(organ.message, None)
+ true
+ } else false
+ }
+
+ /**
+ * Returns an available recipient for the message, if any
+ */
+ protected def doFindDonorRecipient(donorMbox: MessageQueue with ExecutableMailbox, potentialRecipients: Vector[ActorRef], startIndex: Int): ActorRef = {
+ val prSz = potentialRecipients.size
+ var i = 0
+ var recipient: ActorRef = null
+
+ while ((i < prSz) && (recipient eq null)) {
+ val actor = potentialRecipients((i + startIndex) % prSz) //Wrap-around, one full lap
+ val mbox = getMailbox(actor)
+
+ if ((mbox ne donorMbox) && mbox.isEmpty) { //Don't donate to yourself
+ recipient = actor //Found!
+ }
+
+ i += 1
+ }
+
+ recipient // nothing found, reuse same start index next time
+ }
+}
diff --git a/test/disabled/presentation/akka/src/akka/dispatch/MailboxHandling.scala b/test/disabled/presentation/akka/src/akka/dispatch/MailboxHandling.scala
new file mode 100644
index 0000000..4c00577
--- /dev/null
+++ b/test/disabled/presentation/akka/src/akka/dispatch/MailboxHandling.scala
@@ -0,0 +1,68 @@
+/**
+ * Copyright (C) 2009-2011 Scalable Solutions AB <http://scalablesolutions.se>
+ */
+
+package akka.dispatch
+
+import akka.AkkaException
+
+import java.util.{ Comparator, PriorityQueue }
+import java.util.concurrent._
+import akka.util._
+
+class MessageQueueAppendFailedException(message: String, cause: Throwable = null) extends AkkaException(message, cause)
+
+/**
+ * @author <a href="http://jonasboner.com">Jonas Bonér</a>
+ */
+trait MessageQueue {
+ val dispatcherLock = new SimpleLock
+ val suspended = new SimpleLock
+ def enqueue(handle: MessageInvocation)
+ def dequeue(): MessageInvocation
+ def size: Int
+ def isEmpty: Boolean
+}
+
+/**
+ * Mailbox configuration.
+ */
+sealed trait MailboxType
+
+case class UnboundedMailbox() extends MailboxType
+case class BoundedMailbox(
+ val capacity: Int = { if (Dispatchers.MAILBOX_CAPACITY < 0) Int.MaxValue else Dispatchers.MAILBOX_CAPACITY },
+ val pushTimeOut: Duration = Dispatchers.MAILBOX_PUSH_TIME_OUT) extends MailboxType {
+ if (capacity < 0) throw new IllegalArgumentException("The capacity for BoundedMailbox can not be negative")
+ if (pushTimeOut eq null) throw new IllegalArgumentException("The push time-out for BoundedMailbox can not be null")
+}
+
+trait UnboundedMessageQueueSemantics extends MessageQueue { self: BlockingQueue[MessageInvocation] =>
+ @inline
+ final def enqueue(handle: MessageInvocation): Unit = this add handle
+ @inline
+ final def dequeue(): MessageInvocation = this.poll()
+}
+
+trait BoundedMessageQueueSemantics extends MessageQueue { self: BlockingQueue[MessageInvocation] =>
+ def pushTimeOut: Duration
+
+ final def enqueue(handle: MessageInvocation) {
+ if (pushTimeOut.length > 0) {
+ this.offer(handle, pushTimeOut.length, pushTimeOut.unit) || {
+ throw new MessageQueueAppendFailedException("Couldn't enqueue message " + handle + " to " + toString)
+ }
+ } else this put handle
+ }
+
+ @inline
+ final def dequeue(): MessageInvocation = this.poll()
+}
+
+class DefaultUnboundedMessageQueue extends LinkedBlockingQueue[MessageInvocation] with UnboundedMessageQueueSemantics
+
+class DefaultBoundedMessageQueue(capacity: Int, val pushTimeOut: Duration) extends LinkedBlockingQueue[MessageInvocation](capacity) with BoundedMessageQueueSemantics
+
+class UnboundedPriorityMessageQueue(cmp: Comparator[MessageInvocation]) extends PriorityBlockingQueue[MessageInvocation](11, cmp) with UnboundedMessageQueueSemantics
+
+class BoundedPriorityMessageQueue(capacity: Int, val pushTimeOut: Duration, cmp: Comparator[MessageInvocation]) extends BoundedBlockingQueue[MessageInvocation](capacity, new PriorityQueue[MessageInvocation](11, cmp)) with BoundedMessageQueueSemantics
diff --git a/test/disabled/presentation/akka/src/akka/dispatch/MessageHandling.scala b/test/disabled/presentation/akka/src/akka/dispatch/MessageHandling.scala
new file mode 100644
index 0000000..20887c3
--- /dev/null
+++ b/test/disabled/presentation/akka/src/akka/dispatch/MessageHandling.scala
@@ -0,0 +1,260 @@
+/**
+ * Copyright (C) 2009-2011 Scalable Solutions AB <http://scalablesolutions.se>
+ */
+
+package akka.dispatch
+
+import java.util.concurrent._
+import java.util.concurrent.atomic.AtomicLong
+import akka.event.EventHandler
+import akka.config.Configuration
+import akka.config.Config.TIME_UNIT
+import akka.util.{ Duration, Switch, ReentrantGuard }
+import java.util.concurrent.ThreadPoolExecutor.{ AbortPolicy, CallerRunsPolicy, DiscardOldestPolicy, DiscardPolicy }
+import akka.actor._
+
+/**
+ * @author <a href="http://jonasboner.com">Jonas Bonér</a>
+ */
+final case class MessageInvocation(val receiver: ActorRef,
+ val message: Any,
+ val sender: Option[ActorRef],
+ val senderFuture: Option[CompletableFuture[Any]]) {
+ if (receiver eq null) throw new IllegalArgumentException("Receiver can't be null")
+
+ def invoke = try {
+ receiver.invoke(this)
+ } catch {
+ case e: NullPointerException => throw new ActorInitializationException(
+ "Don't call 'self ! message' in the Actor's constructor (in Scala this means in the body of the class).")
+ }
+}
+
+final case class FutureInvocation[T](future: CompletableFuture[T], function: () => T, cleanup: () => Unit) extends Runnable {
+ def run = {
+ future complete (try {
+ Right(function())
+ } catch {
+ case e =>
+ EventHandler.error(e, this, e.getMessage)
+ Left(e)
+ }
+ finally {
+ cleanup()
+ })
+ }
+}
+
+object MessageDispatcher {
+ val UNSCHEDULED = 0
+ val SCHEDULED = 1
+ val RESCHEDULED = 2
+
+ implicit def defaultGlobalDispatcher = Dispatchers.defaultGlobalDispatcher
+}
+
+/**
+ * @author <a href="http://jonasboner.com">Jonas Bonér</a>
+ */
+trait MessageDispatcher {
+ import MessageDispatcher._
+
+ protected val uuids = new ConcurrentSkipListSet[Uuid]
+ protected val futures = new AtomicLong(0L)
+ protected val guard = new ReentrantGuard
+ protected val active = new Switch(false)
+
+ private var shutdownSchedule = UNSCHEDULED //This can be non-volatile since it is protected by guard withGuard
+
+ /**
+ * Creates and returns a mailbox for the given actor.
+ */
+ private[akka] def createMailbox(actorRef: ActorRef): AnyRef
+
+ /**
+ * Attaches the specified actorRef to this dispatcher
+ */
+ final def attach(actorRef: ActorRef): Unit = guard withGuard {
+ register(actorRef)
+ }
+
+ /**
+ * Detaches the specified actorRef from this dispatcher
+ */
+ final def detach(actorRef: ActorRef): Unit = guard withGuard {
+ unregister(actorRef)
+ }
+
+ private[akka] final def dispatchMessage(invocation: MessageInvocation): Unit = dispatch(invocation)
+
+ private[akka] final def dispatchFuture[T](block: () => T, timeout: Long): Future[T] = {
+ futures.getAndIncrement()
+ try {
+ val future = new DefaultCompletableFuture[T](timeout)
+
+ if (active.isOff)
+ guard withGuard { active.switchOn { start } }
+
+ executeFuture(FutureInvocation[T](future, block, futureCleanup))
+ future
+ } catch {
+ case e =>
+ futures.decrementAndGet
+ throw e
+ }
+ }
+
+ private val futureCleanup: () => Unit =
+ () => if (futures.decrementAndGet() == 0) {
+ guard withGuard {
+ if (futures.get == 0 && uuids.isEmpty) {
+ shutdownSchedule match {
+ case UNSCHEDULED =>
+ shutdownSchedule = SCHEDULED
+ Scheduler.scheduleOnce(shutdownAction, timeoutMs, TimeUnit.MILLISECONDS)
+ case SCHEDULED =>
+ shutdownSchedule = RESCHEDULED
+ case RESCHEDULED => //Already marked for reschedule
+ }
+ }
+ }
+ }
+
+ private[akka] def register(actorRef: ActorRef) {
+ if (actorRef.mailbox eq null)
+ actorRef.mailbox = createMailbox(actorRef)
+
+ uuids add actorRef.uuid
+ if (active.isOff) {
+ active.switchOn {
+ start
+ }
+ }
+ }
+
+ private[akka] def unregister(actorRef: ActorRef) = {
+ if (uuids remove actorRef.uuid) {
+ actorRef.mailbox = null
+ if (uuids.isEmpty && futures.get == 0) {
+ shutdownSchedule match {
+ case UNSCHEDULED =>
+ shutdownSchedule = SCHEDULED
+ Scheduler.scheduleOnce(shutdownAction, timeoutMs, TimeUnit.MILLISECONDS)
+ case SCHEDULED =>
+ shutdownSchedule = RESCHEDULED
+ case RESCHEDULED => //Already marked for reschedule
+ }
+ }
+ }
+ }
+
+ /**
+ * Traverses the list of actors (uuids) currently being attached to this dispatcher and stops those actors
+ */
+ def stopAllAttachedActors {
+ val i = uuids.iterator
+ while (i.hasNext()) {
+ val uuid = i.next()
+ Actor.registry.actorFor(uuid) match {
+ case Some(actor) => actor.stop()
+ case None => {}
+ }
+ }
+ }
+
+ private val shutdownAction = new Runnable {
+ def run = guard withGuard {
+ shutdownSchedule match {
+ case RESCHEDULED =>
+ shutdownSchedule = SCHEDULED
+ Scheduler.scheduleOnce(this, timeoutMs, TimeUnit.MILLISECONDS)
+ case SCHEDULED =>
+ if (uuids.isEmpty && futures.get == 0) {
+ active switchOff {
+ shutdown // shut down in the dispatcher's references is zero
+ }
+ }
+ shutdownSchedule = UNSCHEDULED
+ case UNSCHEDULED => //Do nothing
+ }
+ }
+ }
+
+ /**
+ * When the dispatcher no longer has any actors registered, how long will it wait until it shuts itself down, in Ms
+ * defaulting to your akka configs "akka.actor.dispatcher-shutdown-timeout" or otherwise, 1 Second
+ */
+ private[akka] def timeoutMs: Long = Dispatchers.DEFAULT_SHUTDOWN_TIMEOUT.toMillis
+
+ /**
+ * After the call to this method, the dispatcher mustn't begin any new message processing for the specified reference
+ */
+ def suspend(actorRef: ActorRef): Unit
+
+ /*
+ * After the call to this method, the dispatcher must begin any new message processing for the specified reference
+ */
+ def resume(actorRef: ActorRef): Unit
+
+ /**
+ * Will be called when the dispatcher is to queue an invocation for execution
+ */
+ private[akka] def dispatch(invocation: MessageInvocation): Unit
+
+ private[akka] def executeFuture(invocation: FutureInvocation[_]): Unit
+
+ /**
+ * Called one time every time an actor is attached to this dispatcher and this dispatcher was previously shutdown
+ */
+ private[akka] def start(): Unit
+
+ /**
+ * Called one time every time an actor is detached from this dispatcher and this dispatcher has no actors left attached
+ */
+ private[akka] def shutdown(): Unit
+
+ /**
+ * Returns the size of the mailbox for the specified actor
+ */
+ def mailboxSize(actorRef: ActorRef): Int
+
+ /**
+ * Returns the amount of futures queued for execution
+ */
+ def pendingFutures: Long = futures.get
+}
+
+/**
+ * Trait to be used for hooking in new dispatchers into Dispatchers.fromConfig
+ */
+abstract class MessageDispatcherConfigurator {
+ /**
+ * Returns an instance of MessageDispatcher given a Configuration
+ */
+ def configure(config: Configuration): MessageDispatcher
+
+ def mailboxType(config: Configuration): MailboxType = {
+ val capacity = config.getInt("mailbox-capacity", Dispatchers.MAILBOX_CAPACITY)
+ if (capacity < 1) UnboundedMailbox()
+ else BoundedMailbox(capacity, Duration(config.getInt("mailbox-push-timeout-time", Dispatchers.MAILBOX_PUSH_TIME_OUT.toMillis.toInt), TIME_UNIT))
+ }
+
+ def configureThreadPool(config: Configuration, createDispatcher: => (ThreadPoolConfig) => MessageDispatcher): ThreadPoolConfigDispatcherBuilder = {
+ import ThreadPoolConfigDispatcherBuilder.conf_?
+
+ //Apply the following options to the config if they are present in the config
+ ThreadPoolConfigDispatcherBuilder(createDispatcher, ThreadPoolConfig()).configure(
+ conf_?(config getInt "keep-alive-time")(time => _.setKeepAliveTime(Duration(time, TIME_UNIT))),
+ conf_?(config getDouble "core-pool-size-factor")(factor => _.setCorePoolSizeFromFactor(factor)),
+ conf_?(config getDouble "max-pool-size-factor")(factor => _.setMaxPoolSizeFromFactor(factor)),
+ conf_?(config getInt "executor-bounds")(bounds => _.setExecutorBounds(bounds)),
+ conf_?(config getBool "allow-core-timeout")(allow => _.setAllowCoreThreadTimeout(allow)),
+ conf_?(config getString "rejection-policy" map {
+ case "abort" => new AbortPolicy()
+ case "caller-runs" => new CallerRunsPolicy()
+ case "discard-oldest" => new DiscardOldestPolicy()
+ case "discard" => new DiscardPolicy()
+ case x => throw new IllegalArgumentException("[%s] is not a valid rejectionPolicy!" format x)
+ })(policy => _.setRejectionPolicy(policy)))
+ }
+}
diff --git a/test/disabled/presentation/akka/src/akka/dispatch/ThreadBasedDispatcher.scala b/test/disabled/presentation/akka/src/akka/dispatch/ThreadBasedDispatcher.scala
new file mode 100644
index 0000000..3169c70
--- /dev/null
+++ b/test/disabled/presentation/akka/src/akka/dispatch/ThreadBasedDispatcher.scala
@@ -0,0 +1,52 @@
+/**
+ * Copyright (C) 2009-2011 Scalable Solutions AB <http://scalablesolutions.se>
+ */
+
+package akka.dispatch
+
+import akka.actor.{ Actor, ActorRef }
+import akka.config.Config.config
+import akka.util.Duration
+
+import java.util.Queue
+import java.util.concurrent.{ ConcurrentLinkedQueue, BlockingQueue, TimeUnit, LinkedBlockingQueue }
+import akka.actor
+import java.util.concurrent.atomic.AtomicReference
+
+/**
+ * Dedicates a unique thread for each actor passed in as reference. Served through its messageQueue.
+ *
+ * @author <a href="http://jonasboner.com">Jonas Bonér</a>
+ */
+class ThreadBasedDispatcher(_actor: ActorRef, _mailboxType: MailboxType)
+ extends ExecutorBasedEventDrivenDispatcher(
+ _actor.uuid.toString, Dispatchers.THROUGHPUT, -1, _mailboxType, ThreadBasedDispatcher.oneThread) {
+
+ private[akka] val owner = new AtomicReference[ActorRef](_actor)
+
+ def this(actor: ActorRef) =
+ this(actor, UnboundedMailbox()) // For Java API
+
+ def this(actor: ActorRef, capacity: Int) =
+ this(actor, BoundedMailbox(capacity)) //For Java API
+
+ def this(actor: ActorRef, capacity: Int, pushTimeOut: Duration) = //For Java API
+ this(actor, BoundedMailbox(capacity, pushTimeOut))
+
+ override def register(actorRef: ActorRef) = {
+ val actor = owner.get()
+ if ((actor ne null) && actorRef != actor) throw new IllegalArgumentException("Cannot register to anyone but " + actor)
+ owner.compareAndSet(null, actorRef) //Register if unregistered
+ super.register(actorRef)
+ }
+
+ override def unregister(actorRef: ActorRef) = {
+ super.unregister(actorRef)
+ owner.compareAndSet(actorRef, null) //Unregister (prevent memory leak)
+ }
+}
+
+object ThreadBasedDispatcher {
+ val oneThread: ThreadPoolConfig = ThreadPoolConfig(allowCorePoolTimeout = true, corePoolSize = 1, maxPoolSize = 1)
+}
+
diff --git a/test/disabled/presentation/akka/src/akka/dispatch/ThreadPoolBuilder.scala b/test/disabled/presentation/akka/src/akka/dispatch/ThreadPoolBuilder.scala
new file mode 100644
index 0000000..e847610
--- /dev/null
+++ b/test/disabled/presentation/akka/src/akka/dispatch/ThreadPoolBuilder.scala
@@ -0,0 +1,259 @@
+/**
+ * Copyright (C) 2009-2011 Scalable Solutions AB <http://scalablesolutions.se>
+ */
+
+package akka.dispatch
+
+import java.util.Collection
+import java.util.concurrent._
+import atomic.{ AtomicLong, AtomicInteger }
+import ThreadPoolExecutor.CallerRunsPolicy
+
+import akka.util.Duration
+import akka.event.EventHandler
+
+object ThreadPoolConfig {
+ type Bounds = Int
+ type FlowHandler = Either[RejectedExecutionHandler, Bounds]
+ type QueueFactory = () => BlockingQueue[Runnable]
+
+ val defaultAllowCoreThreadTimeout: Boolean = false
+ val defaultCorePoolSize: Int = 16
+ val defaultMaxPoolSize: Int = 128
+ val defaultTimeout: Duration = Duration(60000L, TimeUnit.MILLISECONDS)
+ def defaultFlowHandler: FlowHandler = flowHandler(new CallerRunsPolicy)
+
+ def flowHandler(rejectionHandler: RejectedExecutionHandler): FlowHandler = Left(rejectionHandler)
+ def flowHandler(bounds: Int): FlowHandler = Right(bounds)
+
+ def fixedPoolSize(size: Int): Int = size
+ def scaledPoolSize(multiplier: Double): Int =
+ (Runtime.getRuntime.availableProcessors * multiplier).ceil.toInt
+
+ def arrayBlockingQueue(capacity: Int, fair: Boolean): QueueFactory =
+ () => new ArrayBlockingQueue[Runnable](capacity, fair)
+
+ def synchronousQueue(fair: Boolean): QueueFactory =
+ () => new SynchronousQueue[Runnable](fair)
+
+ def linkedBlockingQueue(): QueueFactory =
+ () => new LinkedBlockingQueue[Runnable]()
+
+ def linkedBlockingQueue(capacity: Int): QueueFactory =
+ () => new LinkedBlockingQueue[Runnable](capacity)
+
+ def reusableQueue(queue: BlockingQueue[Runnable]): QueueFactory =
+ () => queue
+
+ def reusableQueue(queueFactory: QueueFactory): QueueFactory = {
+ val queue = queueFactory()
+ () => queue
+ }
+}
+
+case class ThreadPoolConfig(allowCorePoolTimeout: Boolean = ThreadPoolConfig.defaultAllowCoreThreadTimeout,
+ corePoolSize: Int = ThreadPoolConfig.defaultCorePoolSize,
+ maxPoolSize: Int = ThreadPoolConfig.defaultMaxPoolSize,
+ threadTimeout: Duration = ThreadPoolConfig.defaultTimeout,
+ flowHandler: ThreadPoolConfig.FlowHandler = ThreadPoolConfig.defaultFlowHandler,
+ queueFactory: ThreadPoolConfig.QueueFactory = ThreadPoolConfig.linkedBlockingQueue()) {
+
+ final def createLazyExecutorService(threadFactory: ThreadFactory): ExecutorService =
+ new LazyExecutorServiceWrapper(createExecutorService(threadFactory))
+
+ final def createExecutorService(threadFactory: ThreadFactory): ExecutorService = {
+ flowHandler match {
+ case Left(rejectHandler) =>
+ val service = new ThreadPoolExecutor(corePoolSize, maxPoolSize, threadTimeout.length, threadTimeout.unit, queueFactory(), threadFactory, rejectHandler)
+ service.allowCoreThreadTimeOut(allowCorePoolTimeout)
+ service
+ case Right(bounds) =>
+ val service = new ThreadPoolExecutor(corePoolSize, maxPoolSize, threadTimeout.length, threadTimeout.unit, queueFactory(), threadFactory)
+ service.allowCoreThreadTimeOut(allowCorePoolTimeout)
+ new BoundedExecutorDecorator(service, bounds)
+ }
+ }
+}
+
+trait DispatcherBuilder {
+ def build: MessageDispatcher
+}
+
+object ThreadPoolConfigDispatcherBuilder {
+ def conf_?[T](opt: Option[T])(fun: (T) => ThreadPoolConfigDispatcherBuilder => ThreadPoolConfigDispatcherBuilder): Option[(ThreadPoolConfigDispatcherBuilder) => ThreadPoolConfigDispatcherBuilder] = opt map fun
+}
+
+case class ThreadPoolConfigDispatcherBuilder(dispatcherFactory: (ThreadPoolConfig) => MessageDispatcher, config: ThreadPoolConfig) extends DispatcherBuilder {
+ import ThreadPoolConfig._
+ def build = dispatcherFactory(config)
+
+ //TODO remove this, for backwards compat only
+ @deprecated("Use .build instead", "1.1")
+ def buildThreadPool = build
+
+ def withNewBoundedThreadPoolWithLinkedBlockingQueueWithUnboundedCapacity(bounds: Int): ThreadPoolConfigDispatcherBuilder =
+ this.copy(config = config.copy(flowHandler = flowHandler(bounds), queueFactory = linkedBlockingQueue()))
+
+ def withNewThreadPoolWithCustomBlockingQueue(newQueueFactory: QueueFactory): ThreadPoolConfigDispatcherBuilder =
+ this.copy(config = config.copy(flowHandler = defaultFlowHandler, queueFactory = newQueueFactory))
+
+ def withNewThreadPoolWithCustomBlockingQueue(queue: BlockingQueue[Runnable]): ThreadPoolConfigDispatcherBuilder =
+ withNewThreadPoolWithCustomBlockingQueue(reusableQueue(queue))
+
+ def withNewThreadPoolWithLinkedBlockingQueueWithUnboundedCapacity: ThreadPoolConfigDispatcherBuilder =
+ this.copy(config = config.copy(queueFactory = linkedBlockingQueue(), flowHandler = defaultFlowHandler))
+
+ def withNewThreadPoolWithLinkedBlockingQueueWithCapacity(capacity: Int): ThreadPoolConfigDispatcherBuilder =
+ this.copy(config = config.copy(queueFactory = linkedBlockingQueue(capacity), flowHandler = defaultFlowHandler))
+
+ def withNewThreadPoolWithSynchronousQueueWithFairness(fair: Boolean): ThreadPoolConfigDispatcherBuilder =
+ this.copy(config = config.copy(queueFactory = synchronousQueue(fair), flowHandler = defaultFlowHandler))
+
+ def withNewThreadPoolWithArrayBlockingQueueWithCapacityAndFairness(capacity: Int, fair: Boolean): ThreadPoolConfigDispatcherBuilder =
+ this.copy(config = config.copy(queueFactory = arrayBlockingQueue(capacity, fair), flowHandler = defaultFlowHandler))
+
+ def setCorePoolSize(size: Int): ThreadPoolConfigDispatcherBuilder =
+ this.copy(config = config.copy(corePoolSize = size))
+
+ def setMaxPoolSize(size: Int): ThreadPoolConfigDispatcherBuilder =
+ this.copy(config = config.copy(maxPoolSize = size))
+
+ def setCorePoolSizeFromFactor(multiplier: Double): ThreadPoolConfigDispatcherBuilder =
+ setCorePoolSize(scaledPoolSize(multiplier))
+
+ def setMaxPoolSizeFromFactor(multiplier: Double): ThreadPoolConfigDispatcherBuilder =
+ setMaxPoolSize(scaledPoolSize(multiplier))
+
+ def setExecutorBounds(bounds: Int): ThreadPoolConfigDispatcherBuilder =
+ this.copy(config = config.copy(flowHandler = flowHandler(bounds)))
+
+ def setKeepAliveTimeInMillis(time: Long): ThreadPoolConfigDispatcherBuilder =
+ setKeepAliveTime(Duration(time, TimeUnit.MILLISECONDS))
+
+ def setKeepAliveTime(time: Duration): ThreadPoolConfigDispatcherBuilder =
+ this.copy(config = config.copy(threadTimeout = time))
+
+ def setRejectionPolicy(policy: RejectedExecutionHandler): ThreadPoolConfigDispatcherBuilder =
+ setFlowHandler(flowHandler(policy))
+
+ def setFlowHandler(newFlowHandler: FlowHandler): ThreadPoolConfigDispatcherBuilder =
+ this.copy(config = config.copy(flowHandler = newFlowHandler))
+
+ def setAllowCoreThreadTimeout(allow: Boolean): ThreadPoolConfigDispatcherBuilder =
+ this.copy(config = config.copy(allowCorePoolTimeout = allow))
+
+ def configure(fs: Option[Function[ThreadPoolConfigDispatcherBuilder, ThreadPoolConfigDispatcherBuilder]]*): ThreadPoolConfigDispatcherBuilder = fs.foldLeft(this)((c, f) => f.map(_(c)).getOrElse(c))
+}
+
+/**
+ * @author <a href="http://jonasboner.com">Jonas Bonér</a>
+ */
+class MonitorableThreadFactory(val name: String) extends ThreadFactory {
+ protected val counter = new AtomicLong
+
+ def newThread(runnable: Runnable) = new MonitorableThread(runnable, name)
+}
+
+/**
+ * @author <a href="http://jonasboner.com">Jonas Bonér</a>
+ */
+object MonitorableThread {
+ val DEFAULT_NAME = "MonitorableThread"
+
+ // FIXME use MonitorableThread.created and MonitorableThread.alive in monitoring
+ val created = new AtomicInteger
+ val alive = new AtomicInteger
+}
+
+/**
+ * @author <a href="http://jonasboner.com">Jonas Bonér</a>
+ */
+class MonitorableThread(runnable: Runnable, name: String)
+ extends Thread(runnable, name + "-" + MonitorableThread.created.incrementAndGet) {
+
+ setUncaughtExceptionHandler(new Thread.UncaughtExceptionHandler() {
+ def uncaughtException(thread: Thread, cause: Throwable) = {}
+ })
+
+ override def run = {
+ try {
+ MonitorableThread.alive.incrementAndGet
+ super.run
+ } finally {
+ MonitorableThread.alive.decrementAndGet
+ }
+ }
+}
+
+/**
+ * @author <a href="http://jonasboner.com">Jonas Bonér</a>
+ */
+class BoundedExecutorDecorator(val executor: ExecutorService, bound: Int) extends ExecutorServiceDelegate {
+ protected val semaphore = new Semaphore(bound)
+
+ override def execute(command: Runnable) = {
+ semaphore.acquire
+ try {
+ executor.execute(new Runnable() {
+ def run = {
+ try {
+ command.run
+ } finally {
+ semaphore.release
+ }
+ }
+ })
+ } catch {
+ case e: RejectedExecutionException =>
+ EventHandler.warning(this, e.toString)
+ semaphore.release
+ case e: Throwable =>
+ EventHandler.error(e, this, e.getMessage)
+ throw e
+ }
+ }
+}
+
+trait ExecutorServiceDelegate extends ExecutorService {
+
+ def executor: ExecutorService
+
+ def execute(command: Runnable) = executor.execute(command)
+
+ def shutdown() { executor.shutdown() }
+
+ def shutdownNow() = executor.shutdownNow()
+
+ def isShutdown = executor.isShutdown
+
+ def isTerminated = executor.isTerminated
+
+ def awaitTermination(l: Long, timeUnit: TimeUnit) = executor.awaitTermination(l, timeUnit)
+
+ def submit[T](callable: Callable[T]) = executor.submit(callable)
+
+ def submit[T](runnable: Runnable, t: T) = executor.submit(runnable, t)
+
+ def submit(runnable: Runnable) = executor.submit(runnable)
+
+ def invokeAll[T](callables: Collection[_ <: Callable[T]]) = executor.invokeAll(callables)
+
+ def invokeAll[T](callables: Collection[_ <: Callable[T]], l: Long, timeUnit: TimeUnit) = executor.invokeAll(callables, l, timeUnit)
+
+ def invokeAny[T](callables: Collection[_ <: Callable[T]]) = executor.invokeAny(callables)
+
+ def invokeAny[T](callables: Collection[_ <: Callable[T]], l: Long, timeUnit: TimeUnit) = executor.invokeAny(callables, l, timeUnit)
+}
+
+trait LazyExecutorService extends ExecutorServiceDelegate {
+
+ def createExecutor: ExecutorService
+
+ lazy val executor = {
+ createExecutor
+ }
+}
+
+class LazyExecutorServiceWrapper(executorFactory: => ExecutorService) extends LazyExecutorService {
+ def createExecutor = executorFactory
+}
diff --git a/test/disabled/presentation/akka/src/akka/event/EventHandler.scala b/test/disabled/presentation/akka/src/akka/event/EventHandler.scala
new file mode 100644
index 0000000..af2fee6
--- /dev/null
+++ b/test/disabled/presentation/akka/src/akka/event/EventHandler.scala
@@ -0,0 +1,235 @@
+/**
+ * Copyright (C) 2009-2011 Scalable Solutions AB <http://scalablesolutions.se>
+ */
+
+package akka.event
+
+import akka.actor._
+import akka.config.Config._
+import akka.config.ConfigurationException
+import akka.util.{ ListenerManagement, ReflectiveAccess }
+import akka.AkkaException
+
+/**
+ * Event handler.
+ * <p/>
+ * Create, add and remove a listener:
+ * <pre>
+ * val eventHandlerListener = Actor.actorOf(new Actor {
+ * self.dispatcher = EventHandler.EventHandlerDispatcher
+ *
+ * def receive = {
+ * case EventHandler.Error(cause, instance, message) => ...
+ * case EventHandler.Warning(instance, message) => ...
+ * case EventHandler.Info(instance, message) => ...
+ * case EventHandler.Debug(instance, message) => ...
+ * case genericEvent => ...
+ * }
+ * })
+ *
+ * EventHandler.addListener(eventHandlerListener)
+ * ...
+ * EventHandler.removeListener(eventHandlerListener)
+ * </pre>
+ * <p/>
+ * However best is probably to register the listener in the 'akka.conf'
+ * configuration file.
+ * <p/>
+ * Log an error event:
+ * <pre>
+ * EventHandler.notify(EventHandler.Error(exception, this, message))
+ * </pre>
+ * Or use the direct methods (better performance):
+ * <pre>
+ * EventHandler.error(exception, this, message)
+ * </pre>
+ *
+ * Shut down the EventHandler:
+ * <pre>
+ * EventHandler.shutdown()
+ * </pre>
+ *
+ * @author <a href="http://jonasboner.com">Jonas Bonér</a>
+ */
+object EventHandler extends ListenerManagement {
+ import java.io.{ StringWriter, PrintWriter }
+ import java.text.DateFormat
+ import java.util.Date
+ import akka.dispatch.Dispatchers
+
+ val ErrorLevel = 1
+ val WarningLevel = 2
+ val InfoLevel = 3
+ val DebugLevel = 4
+
+ sealed trait Event {
+ @transient
+ val thread: Thread = Thread.currentThread
+ val level: Int
+ }
+ case class Error(cause: Throwable, instance: AnyRef, message: Any = "") extends Event {
+ override val level = ErrorLevel
+ }
+ case class Warning(instance: AnyRef, message: Any = "") extends Event {
+ override val level = WarningLevel
+ }
+ case class Info(instance: AnyRef, message: Any = "") extends Event {
+ override val level = InfoLevel
+ }
+ case class Debug(instance: AnyRef, message: Any = "") extends Event {
+ override val level = DebugLevel
+ }
+
+ val error = "[ERROR] [%s] [%s] [%s] %s\n%s".intern
+ val warning = "[WARN] [%s] [%s] [%s] %s".intern
+ val info = "[INFO] [%s] [%s] [%s] %s".intern
+ val debug = "[DEBUG] [%s] [%s] [%s] %s".intern
+ val generic = "[GENERIC] [%s] [%s]".intern
+ val ID = "event:handler".intern
+
+ class EventHandlerException extends AkkaException
+
+ lazy val EventHandlerDispatcher = Dispatchers.newExecutorBasedEventDrivenDispatcher(ID).build
+
+ val level: Int = config.getString("akka.event-handler-level", "INFO") match {
+ case "ERROR" => ErrorLevel
+ case "WARNING" => WarningLevel
+ case "INFO" => InfoLevel
+ case "DEBUG" => DebugLevel
+ case unknown => throw new ConfigurationException(
+ "Configuration option 'akka.event-handler-level' is invalid [" + unknown + "]")
+ }
+
+ /**
+ * Shuts down all event handler listeners including the event handle dispatcher.
+ */
+ def shutdown() {
+ foreachListener(_.stop())
+ EventHandlerDispatcher.shutdown()
+ }
+
+ def notify(event: Any) {
+ if (event.isInstanceOf[Event]) {
+ if (level >= event.asInstanceOf[Event].level) notifyListeners(event)
+ } else
+ notifyListeners(event)
+ }
+
+ def notify[T <: Event: ClassTag](event: => T) {
+ if (level >= levelFor(classTag[T].erasure.asInstanceOf[Class[_ <: Event]])) notifyListeners(event)
+ }
+
+ def error(cause: Throwable, instance: AnyRef, message: => String) {
+ if (level >= ErrorLevel) notifyListeners(Error(cause, instance, message))
+ }
+
+ def error(cause: Throwable, instance: AnyRef, message: Any) {
+ if (level >= ErrorLevel) notifyListeners(Error(cause, instance, message))
+ }
+
+ def error(instance: AnyRef, message: => String) {
+ if (level >= ErrorLevel) notifyListeners(Error(new EventHandlerException, instance, message))
+ }
+
+ def error(instance: AnyRef, message: Any) {
+ if (level >= ErrorLevel) notifyListeners(Error(new EventHandlerException, instance, message))
+ }
+
+ def warning(instance: AnyRef, message: => String) {
+ if (level >= WarningLevel) notifyListeners(Warning(instance, message))
+ }
+
+ def warning(instance: AnyRef, message: Any) {
+ if (level >= WarningLevel) notifyListeners(Warning(instance, message))
+ }
+
+ def info(instance: AnyRef, message: => String) {
+ if (level >= InfoLevel) notifyListeners(Info(instance, message))
+ }
+
+ def info(instance: AnyRef, message: Any) {
+ if (level >= InfoLevel) notifyListeners(Info(instance, message))
+ }
+
+ def debug(instance: AnyRef, message: => String) {
+ if (level >= DebugLevel) notifyListeners(Debug(instance, message))
+ }
+
+ def debug(instance: AnyRef, message: Any) {
+ if (level >= DebugLevel) notifyListeners(Debug(instance, message))
+ }
+
+ def isInfoEnabled = level >= InfoLevel
+
+ def isDebugEnabled = level >= DebugLevel
+
+ def formattedTimestamp = DateFormat.getInstance.format(new Date)
+
+ def stackTraceFor(e: Throwable) = {
+ val sw = new StringWriter
+ val pw = new PrintWriter(sw)
+ e.printStackTrace(pw)
+ sw.toString
+ }
+
+ private def levelFor(eventClass: Class[_ <: Event]) = {
+ if (eventClass.isInstanceOf[Error]) ErrorLevel
+ else if (eventClass.isInstanceOf[Warning]) WarningLevel
+ else if (eventClass.isInstanceOf[Info]) InfoLevel
+ else if (eventClass.isInstanceOf[Debug]) DebugLevel
+ else DebugLevel
+ }
+
+ class DefaultListener extends Actor {
+ self.id = ID
+ self.dispatcher = EventHandlerDispatcher
+
+ def receive = {
+ case event at Error(cause, instance, message) =>
+ println(error.format(
+ formattedTimestamp,
+ event.thread.getName,
+ instance.getClass.getSimpleName,
+ message,
+ stackTraceFor(cause)))
+ case event at Warning(instance, message) =>
+ println(warning.format(
+ formattedTimestamp,
+ event.thread.getName,
+ instance.getClass.getSimpleName,
+ message))
+ case event at Info(instance, message) =>
+ println(info.format(
+ formattedTimestamp,
+ event.thread.getName,
+ instance.getClass.getSimpleName,
+ message))
+ case event at Debug(instance, message) =>
+ println(debug.format(
+ formattedTimestamp,
+ event.thread.getName,
+ instance.getClass.getSimpleName,
+ message))
+ case event =>
+ println(generic.format(formattedTimestamp, event.toString))
+ }
+ }
+
+ val defaultListeners = config.getList("akka.event-handlers") match {
+ case Nil => "akka.event.EventHandler$DefaultListener" :: Nil
+ case listeners => listeners
+ }
+ defaultListeners foreach { listenerName =>
+ try {
+ ReflectiveAccess.getClassFor[Actor](listenerName) match {
+ case r: Right[_, Class[Actor]] => addListener(Actor.actorOf(r.b).start())
+ case l: Left[Exception, _] => throw l.a
+ }
+ } catch {
+ case e: Exception =>
+ throw new ConfigurationException(
+ "Event Handler specified in config can't be loaded [" + listenerName +
+ "] due to [" + e.toString + "]", e)
+ }
+ }
+}
diff --git a/test/disabled/presentation/akka/src/akka/event/JavaEventHandler.java b/test/disabled/presentation/akka/src/akka/event/JavaEventHandler.java
new file mode 100644
index 0000000..7e6e2d4
--- /dev/null
+++ b/test/disabled/presentation/akka/src/akka/event/JavaEventHandler.java
@@ -0,0 +1,35 @@
+package akka.event;
+
+
+import akka.actor.ActorRef;
+
+/**
+ * Java API for Akka EventHandler
+ */
+
+public class JavaEventHandler {
+
+
+ public static void notify(Object message){
+ EventHandler$.MODULE$.notify(message);
+ }
+
+ public static void debug(ActorRef instance, Object message){
+ EventHandler$.MODULE$.debug(instance, message);
+ }
+
+ public static void info(ActorRef instance, Object message){
+ EventHandler$.MODULE$.info(instance,message);
+ }
+
+ public static void warning(ActorRef instance, Object message){
+ EventHandler$.MODULE$.warning(instance,message);
+ }
+
+ public static void error(ActorRef instance, Object message){
+ EventHandler$.MODULE$.debug(instance,message);
+ }
+
+}
+
+
diff --git a/test/disabled/presentation/akka/src/akka/japi/JavaAPI.scala b/test/disabled/presentation/akka/src/akka/japi/JavaAPI.scala
new file mode 100644
index 0000000..f5c4ccd
--- /dev/null
+++ b/test/disabled/presentation/akka/src/akka/japi/JavaAPI.scala
@@ -0,0 +1,108 @@
+package akka.japi
+
+/**
+ * A Function interface. Used to create first-class-functions is Java (sort of).
+ */
+trait Function[T, R] {
+ def apply(param: T): R
+}
+
+/**
+ * A Function interface. Used to create 2-arg first-class-functions is Java (sort of).
+ */
+trait Function2[T1, T2, R] {
+ def apply(arg1: T1, arg2: T2): R
+}
+
+/**
+ * A Procedure is like a Function, but it doesn't produce a return value
+ */
+trait Procedure[T] {
+ def apply(param: T): Unit
+}
+
+/**
+ * A Procedure is like a Function, but it doesn't produce a return value
+ */
+trait Procedure2[T1, T2] {
+ def apply(param: T1, param2: T2): Unit
+}
+
+/**
+ * An executable piece of code that takes no parameters and doesn't return any value.
+ */
+trait SideEffect {
+ def apply: Unit
+}
+
+/**
+ * An executable piece of code that takes no parameters and doesn't return any value.
+ */
+trait Effect {
+ def apply: Unit
+}
+
+/**
+ * + * A constructor/factory, takes no parameters but creates a new value of type T every call
+ * +
+ */
+trait Creator[T] {
+ def create: T
+}
+
+/**
+ * This class represents optional values. Instances of <code>Option</code>
+ * are either instances of case class <code>Some</code> or it is case
+ * object <code>None</code>.
+ * <p>
+ * Java API
+ */
+sealed abstract class Option[A] extends java.lang.Iterable[A] {
+ import scala.collection.JavaConversions._
+
+ def get: A
+ def isEmpty: Boolean
+ def isDefined = !isEmpty
+ def asScala: scala.Option[A]
+ def iterator = if (isEmpty) Iterator.empty else Iterator.single(get)
+}
+
+object Option {
+ /**
+ * <code>Option</code> factory that creates <code>Some</code>
+ */
+ def some[A](v: A): Option[A] = Some(v)
+
+ /**
+ * <code>Option</code> factory that creates <code>None</code>
+ */
+ def none[A] = None.asInstanceOf[Option[A]]
+
+ /**
+ * <code>Option</code> factory that creates <code>None</code> if
+ * <code>v</code> is <code>null</code>, <code>Some(v)</code> otherwise.
+ */
+ def option[A](v: A): Option[A] = if (v == null) none else some(v)
+
+ /**
+ * Class <code>Some[A]</code> represents existing values of type
+ * <code>A</code>.
+ */
+ final case class Some[A](v: A) extends Option[A] {
+ def get = v
+ def isEmpty = false
+ def asScala = scala.Some(v)
+ }
+
+ /**
+ * This case object represents non-existent values.
+ */
+ private case object None extends Option[Nothing] {
+ def get = throw new NoSuchElementException("None.get")
+ def isEmpty = true
+ def asScala = scala.None
+ }
+
+ implicit def java2ScalaOption[A](o: Option[A]): scala.Option[A] = o.asScala
+ implicit def scala2JavaOption[A](o: scala.Option[A]): Option[A] = option(o.get)
+}
diff --git a/test/disabled/presentation/akka/src/akka/remoteinterface/RemoteEventHandler.scala b/test/disabled/presentation/akka/src/akka/remoteinterface/RemoteEventHandler.scala
new file mode 100644
index 0000000..1c75618
--- /dev/null
+++ b/test/disabled/presentation/akka/src/akka/remoteinterface/RemoteEventHandler.scala
@@ -0,0 +1,43 @@
+package akka.remoteinterface
+
+/**
+ * Copyright (C) 2009-2011 Scalable Solutions AB <http://scalablesolutions.se>
+ */
+
+import akka.actor.Actor
+import akka.event.EventHandler
+
+/**
+ * Remote client and server event listener that pipes the events to the standard Akka EventHander.
+ *
+ * @author <a href="http://jonasboner.com">Jonas Bonér</a>
+ */
+class RemoteEventHandler extends Actor {
+ import EventHandler._
+
+ self.id = ID
+ self.dispatcher = EventHandlerDispatcher
+
+ def receive = {
+
+ // client
+ case RemoteClientError(cause, client, address) => EventHandler.error(cause, client, "RemoteClientError - Address[%s]" format address.toString)
+ case RemoteClientWriteFailed(request, cause, client, address) => EventHandler.error(cause, client, "RemoteClientWriteFailed - Request[%s] Address[%s]".format(address.toString))
+ case RemoteClientDisconnected(client, address) => EventHandler.info(client, "RemoteClientDisconnected - Address[%s]" format address.toString)
+ case RemoteClientConnected(client, address) => EventHandler.info(client, "RemoteClientConnected - Address[%s]" format address.toString)
+ case RemoteClientStarted(client, address) => EventHandler.info(client, "RemoteClientStarted - Address[%s]" format address.toString)
+ case RemoteClientShutdown(client, address) => EventHandler.info(client, "RemoteClientShutdown - Address[%s]" format address.toString)
+
+ // server
+ case RemoteServerError(cause, server) => EventHandler.error(cause, server, "RemoteServerError")
+ case RemoteServerWriteFailed(request, cause, server, clientAddress) => EventHandler.error(cause, server, "RemoteServerWriteFailed - Request[%s] Address[%s]" format (request, clientAddress.toString))
+ case RemoteServerStarted(server) => EventHandler.info(server, "RemoteServerStarted")
+ case RemoteServerShutdown(server) => EventHandler.info(server, "RemoteServerShutdown")
+ case RemoteServerClientConnected(server, clientAddress) => EventHandler.info(server, "RemoteServerClientConnected - Address[%s]" format clientAddress.toString)
+ case RemoteServerClientDisconnected(server, clientAddress) => EventHandler.info(server, "RemoteServerClientDisconnected - Address[%s]" format clientAddress.toString)
+ case RemoteServerClientClosed(server, clientAddress) => EventHandler.info(server, "RemoteServerClientClosed - Address[%s]" format clientAddress.toString)
+
+ case _ => //ignore other
+ }
+}
+
diff --git a/test/disabled/presentation/akka/src/akka/remoteinterface/RemoteInterface.scala b/test/disabled/presentation/akka/src/akka/remoteinterface/RemoteInterface.scala
new file mode 100644
index 0000000..5219c49
--- /dev/null
+++ b/test/disabled/presentation/akka/src/akka/remoteinterface/RemoteInterface.scala
@@ -0,0 +1,493 @@
+/**
+ * Copyright (C) 2009-2010 Scalable Solutions AB <http://scalablesolutions.se>
+ */
+
+package akka.remoteinterface
+
+import akka.japi.Creator
+import akka.actor._
+import akka.util._
+import akka.dispatch.CompletableFuture
+import akka.AkkaException
+
+import scala.beans.BeanProperty
+
+import java.net.InetSocketAddress
+import java.util.concurrent.ConcurrentHashMap
+import java.io.{ PrintWriter, PrintStream }
+import java.lang.reflect.InvocationTargetException
+
+trait RemoteModule {
+ val UUID_PREFIX = "uuid:".intern
+
+ def optimizeLocalScoped_?(): Boolean //Apply optimizations for remote operations in local scope
+ protected[akka] def notifyListeners(message: => Any): Unit
+
+ private[akka] def actors: ConcurrentHashMap[String, ActorRef]
+ private[akka] def actorsByUuid: ConcurrentHashMap[String, ActorRef]
+ private[akka] def actorsFactories: ConcurrentHashMap[String, () => ActorRef]
+ private[akka] def typedActors: ConcurrentHashMap[String, AnyRef]
+ private[akka] def typedActorsByUuid: ConcurrentHashMap[String, AnyRef]
+ private[akka] def typedActorsFactories: ConcurrentHashMap[String, () => AnyRef]
+
+ /** Lookup methods **/
+
+ private[akka] def findActorById(id: String): ActorRef = actors.get(id)
+
+ private[akka] def findActorByUuid(uuid: String): ActorRef = actorsByUuid.get(uuid)
+
+ private[akka] def findActorFactory(id: String): () => ActorRef = actorsFactories.get(id)
+
+ private[akka] def findTypedActorById(id: String): AnyRef = typedActors.get(id)
+
+ private[akka] def findTypedActorFactory(id: String): () => AnyRef = typedActorsFactories.get(id)
+
+ private[akka] def findTypedActorByUuid(uuid: String): AnyRef = typedActorsByUuid.get(uuid)
+
+ private[akka] def findActorByIdOrUuid(id: String, uuid: String): ActorRef = {
+ var actorRefOrNull = if (id.startsWith(UUID_PREFIX)) findActorByUuid(id.substring(UUID_PREFIX.length))
+ else findActorById(id)
+ if (actorRefOrNull eq null) actorRefOrNull = findActorByUuid(uuid)
+ actorRefOrNull
+ }
+
+ private[akka] def findTypedActorByIdOrUuid(id: String, uuid: String): AnyRef = {
+ var actorRefOrNull = if (id.startsWith(UUID_PREFIX)) findTypedActorByUuid(id.substring(UUID_PREFIX.length))
+ else findTypedActorById(id)
+ if (actorRefOrNull eq null) actorRefOrNull = findTypedActorByUuid(uuid)
+ actorRefOrNull
+ }
+}
+
+/**
+ * Life-cycle events for RemoteClient.
+ */
+sealed trait RemoteClientLifeCycleEvent
+case class RemoteClientError(
+ @BeanProperty cause: Throwable,
+ @BeanProperty client: RemoteClientModule,
+ @BeanProperty remoteAddress: InetSocketAddress) extends RemoteClientLifeCycleEvent
+case class RemoteClientDisconnected(
+ @BeanProperty client: RemoteClientModule,
+ @BeanProperty remoteAddress: InetSocketAddress) extends RemoteClientLifeCycleEvent
+case class RemoteClientConnected(
+ @BeanProperty client: RemoteClientModule,
+ @BeanProperty remoteAddress: InetSocketAddress) extends RemoteClientLifeCycleEvent
+case class RemoteClientStarted(
+ @BeanProperty client: RemoteClientModule,
+ @BeanProperty remoteAddress: InetSocketAddress) extends RemoteClientLifeCycleEvent
+case class RemoteClientShutdown(
+ @BeanProperty client: RemoteClientModule,
+ @BeanProperty remoteAddress: InetSocketAddress) extends RemoteClientLifeCycleEvent
+case class RemoteClientWriteFailed(
+ @BeanProperty request: AnyRef,
+ @BeanProperty cause: Throwable,
+ @BeanProperty client: RemoteClientModule,
+ @BeanProperty remoteAddress: InetSocketAddress) extends RemoteClientLifeCycleEvent
+
+/**
+ * Life-cycle events for RemoteServer.
+ */
+sealed trait RemoteServerLifeCycleEvent
+case class RemoteServerStarted(
+ @BeanProperty val server: RemoteServerModule) extends RemoteServerLifeCycleEvent
+case class RemoteServerShutdown(
+ @BeanProperty val server: RemoteServerModule) extends RemoteServerLifeCycleEvent
+case class RemoteServerError(
+ @BeanProperty val cause: Throwable,
+ @BeanProperty val server: RemoteServerModule) extends RemoteServerLifeCycleEvent
+case class RemoteServerClientConnected(
+ @BeanProperty val server: RemoteServerModule,
+ @BeanProperty val clientAddress: Option[InetSocketAddress]) extends RemoteServerLifeCycleEvent
+case class RemoteServerClientDisconnected(
+ @BeanProperty val server: RemoteServerModule,
+ @BeanProperty val clientAddress: Option[InetSocketAddress]) extends RemoteServerLifeCycleEvent
+case class RemoteServerClientClosed(
+ @BeanProperty val server: RemoteServerModule,
+ @BeanProperty val clientAddress: Option[InetSocketAddress]) extends RemoteServerLifeCycleEvent
+case class RemoteServerWriteFailed(
+ @BeanProperty request: AnyRef,
+ @BeanProperty cause: Throwable,
+ @BeanProperty server: RemoteServerModule,
+ @BeanProperty clientAddress: Option[InetSocketAddress]) extends RemoteServerLifeCycleEvent
+
+/**
+ * Thrown for example when trying to send a message using a RemoteClient that is either not started or shut down.
+ */
+class RemoteClientException private[akka] (
+ message: String,
+ @BeanProperty val client: RemoteClientModule,
+ val remoteAddress: InetSocketAddress, cause: Throwable = null) extends AkkaException(message, cause)
+
+/**
+ * Thrown when the remote server actor dispatching fails for some reason.
+ */
+class RemoteServerException private[akka] (message: String) extends AkkaException(message)
+
+/**
+ * Thrown when a remote exception sent over the wire cannot be loaded and instantiated
+ */
+case class CannotInstantiateRemoteExceptionDueToRemoteProtocolParsingErrorException private[akka] (cause: Throwable, originalClassName: String, originalMessage: String)
+ extends AkkaException("\nParsingError[%s]\nOriginalException[%s]\nOriginalMessage[%s]"
+ .format(cause.toString, originalClassName, originalMessage)) {
+ override def printStackTrace = cause.printStackTrace
+ override def printStackTrace(printStream: PrintStream) = cause.printStackTrace(printStream)
+ override def printStackTrace(printWriter: PrintWriter) = cause.printStackTrace(printWriter)
+}
+
+abstract class RemoteSupport extends ListenerManagement with RemoteServerModule with RemoteClientModule {
+
+ lazy val eventHandler: ActorRef = {
+ val handler = Actor.actorOf[RemoteEventHandler].start()
+ // add the remote client and server listener that pipes the events to the event handler system
+ addListener(handler)
+ handler
+ }
+
+ def shutdown() {
+ eventHandler.stop()
+ removeListener(eventHandler)
+ this.shutdownClientModule()
+ this.shutdownServerModule()
+ clear
+ }
+
+ /**
+ * Creates a Client-managed ActorRef out of the Actor of the specified Class.
+ * If the supplied host and port is identical of the configured local node, it will be a local actor
+ * <pre>
+ * import Actor._
+ * val actor = actorOf(classOf[MyActor],"www.akka.io", 2552)
+ * actor.start()
+ * actor ! message
+ * actor.stop()
+ * </pre>
+ * You can create and start the actor in one statement like this:
+ * <pre>
+ * val actor = actorOf(classOf[MyActor],"www.akka.io", 2552).start()
+ * </pre>
+ */
+ @deprecated("Will be removed after 1.1", "1.1")
+ def actorOf(factory: => Actor, host: String, port: Int): ActorRef =
+ Actor.remote.clientManagedActorOf(() => factory, host, port)
+
+ /**
+ * Creates a Client-managed ActorRef out of the Actor of the specified Class.
+ * If the supplied host and port is identical of the configured local node, it will be a local actor
+ * <pre>
+ * import Actor._
+ * val actor = actorOf(classOf[MyActor],"www.akka.io",2552)
+ * actor.start()
+ * actor ! message
+ * actor.stop()
+ * </pre>
+ * You can create and start the actor in one statement like this:
+ * <pre>
+ * val actor = actorOf(classOf[MyActor],"www.akka.io",2552).start()
+ * </pre>
+ */
+ @deprecated("Will be removed after 1.1", "1.1")
+ def actorOf(clazz: Class[_ <: Actor], host: String, port: Int): ActorRef =
+ clientManagedActorOf(() => createActorFromClass(clazz), host, port)
+
+ /**
+ * Creates a Client-managed ActorRef out of the Actor of the specified Class.
+ * If the supplied host and port is identical of the configured local node, it will be a local actor
+ * <pre>
+ * import Actor._
+ * val actor = actorOf[MyActor]("www.akka.io",2552)
+ * actor.start()
+ * actor ! message
+ * actor.stop()
+ * </pre>
+ * You can create and start the actor in one statement like this:
+ * <pre>
+ * val actor = actorOf[MyActor]("www.akka.io",2552).start()
+ * </pre>
+ */
+ @deprecated("Will be removed after 1.1", "1.1")
+ def actorOf[T <: Actor: ClassTag](host: String, port: Int): ActorRef =
+ clientManagedActorOf(() => createActorFromClass(classTag[T].erasure), host, port)
+
+ protected def createActorFromClass(clazz: Class[_]): Actor = {
+ import ReflectiveAccess.{ createInstance, noParams, noArgs }
+ createInstance[Actor](clazz, noParams, noArgs) match {
+ case Right(actor) => actor
+ case Left(exception) =>
+ val cause = exception match {
+ case i: InvocationTargetException => i.getTargetException
+ case _ => exception
+ }
+
+ throw new ActorInitializationException(
+ "Could not instantiate Actor of " + clazz +
+ "\nMake sure Actor is NOT defined inside a class/trait," +
+ "\nif so put it outside the class/trait, f.e. in a companion object," +
+ "\nOR try to change: 'actorOf[MyActor]' to 'actorOf(new MyActor)'.", cause)
+ }
+ }
+
+ protected override def manageLifeCycleOfListeners = false
+ protected[akka] override def notifyListeners(message: => Any): Unit = super.notifyListeners(message)
+
+ private[akka] val actors = new ConcurrentHashMap[String, ActorRef]
+ private[akka] val actorsByUuid = new ConcurrentHashMap[String, ActorRef]
+ private[akka] val actorsFactories = new ConcurrentHashMap[String, () => ActorRef]
+ private[akka] val typedActors = new ConcurrentHashMap[String, AnyRef]
+ private[akka] val typedActorsByUuid = new ConcurrentHashMap[String, AnyRef]
+ private[akka] val typedActorsFactories = new ConcurrentHashMap[String, () => AnyRef]
+
+ def clear {
+ actors.clear
+ actorsByUuid.clear
+ typedActors.clear
+ typedActorsByUuid.clear
+ actorsFactories.clear
+ typedActorsFactories.clear
+ }
+}
+
+/**
+ * This is the interface for the RemoteServer functionality, it's used in Actor.remote
+ */
+trait RemoteServerModule extends RemoteModule {
+ protected val guard = new ReentrantGuard
+
+ /**
+ * Signals whether the server is up and running or not
+ */
+ def isRunning: Boolean
+
+ /**
+ * Gets the name of the server instance
+ */
+ def name: String
+
+ /**
+ * Gets the address of the server instance
+ */
+ def address: InetSocketAddress
+
+ /**
+ * Starts the server up
+ */
+ def start(): RemoteServerModule =
+ start(ReflectiveAccess.Remote.configDefaultAddress.getAddress.getHostAddress,
+ ReflectiveAccess.Remote.configDefaultAddress.getPort,
+ None)
+
+ /**
+ * Starts the server up
+ */
+ def start(loader: ClassLoader): RemoteServerModule =
+ start(ReflectiveAccess.Remote.configDefaultAddress.getAddress.getHostAddress,
+ ReflectiveAccess.Remote.configDefaultAddress.getPort,
+ Option(loader))
+
+ /**
+ * Starts the server up
+ */
+ def start(host: String, port: Int): RemoteServerModule =
+ start(host, port, None)
+
+ /**
+ * Starts the server up
+ */
+ def start(host: String, port: Int, loader: ClassLoader): RemoteServerModule =
+ start(host, port, Option(loader))
+
+ /**
+ * Starts the server up
+ */
+ def start(host: String, port: Int, loader: Option[ClassLoader]): RemoteServerModule
+
+ /**
+ * Shuts the server down
+ */
+ def shutdownServerModule(): Unit
+
+ /**
+ * Register typed actor by interface name.
+ */
+ def registerTypedActor(intfClass: Class[_], typedActor: AnyRef): Unit = registerTypedActor(intfClass.getName, typedActor)
+
+ /**
+ * Register remote typed actor by a specific id.
+ * @param id custom actor id
+ * @param typedActor typed actor to register
+ */
+ def registerTypedActor(id: String, typedActor: AnyRef): Unit
+
+ /**
+ * Register typed actor by interface name.
+ */
+ def registerTypedPerSessionActor(intfClass: Class[_], factory: => AnyRef): Unit = registerTypedActor(intfClass.getName, factory)
+
+ /**
+ * Register typed actor by interface name.
+ * Java API
+ */
+ def registerTypedPerSessionActor(intfClass: Class[_], factory: Creator[AnyRef]): Unit = registerTypedActor(intfClass.getName, factory)
+
+ /**
+ * Register remote typed actor by a specific id.
+ * @param id custom actor id
+ * @param typedActor typed actor to register
+ */
+ def registerTypedPerSessionActor(id: String, factory: => AnyRef): Unit
+
+ /**
+ * Register remote typed actor by a specific id.
+ * @param id custom actor id
+ * @param typedActor typed actor to register
+ * Java API
+ */
+ def registerTypedPerSessionActor(id: String, factory: Creator[AnyRef]): Unit = registerTypedPerSessionActor(id, factory.create)
+
+ /**
+ * Register Remote Actor by the Actor's 'id' field. It starts the Actor if it is not started already.
+ */
+ def register(actorRef: ActorRef): Unit = register(actorRef.id, actorRef)
+
+ /**
+ * Register Remote Actor by the Actor's uuid field. It starts the Actor if it is not started already.
+ */
+ def registerByUuid(actorRef: ActorRef): Unit
+
+ /**
+ * Register Remote Actor by a specific 'id' passed as argument. The actor is registered by UUID rather than ID
+ * when prefixing the handle with the “uuid:” protocol.
+ * <p/>
+ * NOTE: If you use this method to register your remote actor then you must unregister the actor by this ID yourself.
+ */
+ def register(id: String, actorRef: ActorRef): Unit
+
+ /**
+ * Register Remote Session Actor by a specific 'id' passed as argument.
+ * <p/>
+ * NOTE: If you use this method to register your remote actor then you must unregister the actor by this ID yourself.
+ */
+ def registerPerSession(id: String, factory: => ActorRef): Unit
+
+ /**
+ * Register Remote Session Actor by a specific 'id' passed as argument.
+ * <p/>
+ * NOTE: If you use this method to register your remote actor then you must unregister the actor by this ID yourself.
+ * Java API
+ */
+ def registerPerSession(id: String, factory: Creator[ActorRef]): Unit = registerPerSession(id, factory.create)
+
+ /**
+ * Unregister Remote Actor that is registered using its 'id' field (not custom ID).
+ */
+ def unregister(actorRef: ActorRef): Unit
+
+ /**
+ * Unregister Remote Actor by specific 'id'.
+ * <p/>
+ * NOTE: You need to call this method if you have registered an actor by a custom ID.
+ */
+ def unregister(id: String): Unit
+
+ /**
+ * Unregister Remote Actor by specific 'id'.
+ * <p/>
+ * NOTE: You need to call this method if you have registered an actor by a custom ID.
+ */
+ def unregisterPerSession(id: String): Unit
+
+ /**
+ * Unregister Remote Typed Actor by specific 'id'.
+ * <p/>
+ * NOTE: You need to call this method if you have registered an actor by a custom ID.
+ */
+ def unregisterTypedActor(id: String): Unit
+
+ /**
+ * Unregister Remote Typed Actor by specific 'id'.
+ * <p/>
+ * NOTE: You need to call this method if you have registered an actor by a custom ID.
+ */
+ def unregisterTypedPerSessionActor(id: String): Unit
+}
+
+trait RemoteClientModule extends RemoteModule { self: RemoteModule =>
+
+ def actorFor(classNameOrServiceId: String, hostname: String, port: Int): ActorRef =
+ actorFor(classNameOrServiceId, classNameOrServiceId, Actor.TIMEOUT, hostname, port, None)
+
+ def actorFor(classNameOrServiceId: String, hostname: String, port: Int, loader: ClassLoader): ActorRef =
+ actorFor(classNameOrServiceId, classNameOrServiceId, Actor.TIMEOUT, hostname, port, Some(loader))
+
+ def actorFor(serviceId: String, className: String, hostname: String, port: Int): ActorRef =
+ actorFor(serviceId, className, Actor.TIMEOUT, hostname, port, None)
+
+ def actorFor(serviceId: String, className: String, hostname: String, port: Int, loader: ClassLoader): ActorRef =
+ actorFor(serviceId, className, Actor.TIMEOUT, hostname, port, Some(loader))
+
+ def actorFor(classNameOrServiceId: String, timeout: Long, hostname: String, port: Int): ActorRef =
+ actorFor(classNameOrServiceId, classNameOrServiceId, timeout, hostname, port, None)
+
+ def actorFor(classNameOrServiceId: String, timeout: Long, hostname: String, port: Int, loader: ClassLoader): ActorRef =
+ actorFor(classNameOrServiceId, classNameOrServiceId, timeout, hostname, port, Some(loader))
+
+ def actorFor(serviceId: String, className: String, timeout: Long, hostname: String, port: Int): ActorRef =
+ actorFor(serviceId, className, timeout, hostname, port, None)
+
+ def typedActorFor[T](intfClass: Class[T], serviceIdOrClassName: String, hostname: String, port: Int): T =
+ typedActorFor(intfClass, serviceIdOrClassName, serviceIdOrClassName, Actor.TIMEOUT, hostname, port, None)
+
+ def typedActorFor[T](intfClass: Class[T], serviceIdOrClassName: String, timeout: Long, hostname: String, port: Int): T =
+ typedActorFor(intfClass, serviceIdOrClassName, serviceIdOrClassName, timeout, hostname, port, None)
+
+ def typedActorFor[T](intfClass: Class[T], serviceIdOrClassName: String, timeout: Long, hostname: String, port: Int, loader: ClassLoader): T =
+ typedActorFor(intfClass, serviceIdOrClassName, serviceIdOrClassName, timeout, hostname, port, Some(loader))
+
+ def typedActorFor[T](intfClass: Class[T], serviceId: String, implClassName: String, timeout: Long, hostname: String, port: Int, loader: ClassLoader): T =
+ typedActorFor(intfClass, serviceId, implClassName, timeout, hostname, port, Some(loader))
+
+ @deprecated("Will be removed after 1.1", "1.1")
+ def clientManagedActorOf(factory: () => Actor, host: String, port: Int): ActorRef
+
+ /**
+ * Clean-up all open connections.
+ */
+ def shutdownClientModule(): Unit
+
+ /**
+ * Shuts down a specific client connected to the supplied remote address returns true if successful
+ */
+ def shutdownClientConnection(address: InetSocketAddress): Boolean
+
+ /**
+ * Restarts a specific client connected to the supplied remote address, but only if the client is not shut down
+ */
+ def restartClientConnection(address: InetSocketAddress): Boolean
+
+ /** Methods that needs to be implemented by a transport **/
+
+ protected[akka] def typedActorFor[T](intfClass: Class[T], serviceId: String, implClassName: String, timeout: Long, host: String, port: Int, loader: Option[ClassLoader]): T
+
+ protected[akka] def actorFor(serviceId: String, className: String, timeout: Long, hostname: String, port: Int, loader: Option[ClassLoader]): ActorRef
+
+ protected[akka] def send[T](message: Any,
+ senderOption: Option[ActorRef],
+ senderFuture: Option[CompletableFuture[T]],
+ remoteAddress: InetSocketAddress,
+ timeout: Long,
+ isOneWay: Boolean,
+ actorRef: ActorRef,
+ typedActorInfo: Option[Tuple2[String, String]],
+ actorType: ActorType,
+ loader: Option[ClassLoader]): Option[CompletableFuture[T]]
+
+ private[akka] def registerSupervisorForActor(actorRef: ActorRef): ActorRef
+
+ private[akka] def deregisterSupervisorForActor(actorRef: ActorRef): ActorRef
+
+ @deprecated("Will be removed after 1.1", "1.1")
+ private[akka] def registerClientManagedActor(hostname: String, port: Int, uuid: Uuid): Unit
+
+ @deprecated("Will be removed after 1.1", "1.1")
+ private[akka] def unregisterClientManagedActor(hostname: String, port: Int, uuid: Uuid): Unit
+}
diff --git a/test/disabled/presentation/akka/src/akka/routing/Iterators.scala b/test/disabled/presentation/akka/src/akka/routing/Iterators.scala
new file mode 100644
index 0000000..315e7be
--- /dev/null
+++ b/test/disabled/presentation/akka/src/akka/routing/Iterators.scala
@@ -0,0 +1,49 @@
+/**
+ * Copyright (C) 2009-2011 Scalable Solutions AB <http://scalablesolutions.se>
+ */
+
+package akka.routing
+
+import akka.actor.ActorRef
+import scala.collection.JavaConversions._
+import scala.collection.immutable.Seq
+
+/**
+ * An Iterator that is either always empty or yields an infinite number of Ts.
+ */
+trait InfiniteIterator[T] extends Iterator[T] {
+ val items: Seq[T]
+}
+
+/**
+ * CyclicIterator is a round-robin style InfiniteIterator that cycles the supplied List.
+ */
+case class CyclicIterator[T](val items: Seq[T]) extends InfiniteIterator[T] {
+ def this(items: java.util.List[T]) = this(items.toList)
+
+ @volatile
+ private[this] var current: Seq[T] = items
+
+ def hasNext = items != Nil
+
+ def next = {
+ val nc = if (current == Nil) items else current
+ current = nc.tail
+ nc.head
+ }
+
+ override def exists(f: T => Boolean): Boolean = items.exists(f)
+}
+
+/**
+ * This InfiniteIterator always returns the Actor that has the currently smallest mailbox
+ * useful for work-stealing.
+ */
+case class SmallestMailboxFirstIterator(val items: Seq[ActorRef]) extends InfiniteIterator[ActorRef] {
+ def this(items: java.util.List[ActorRef]) = this(items.toList)
+ def hasNext = items != Nil
+
+ def next = items.reduceLeft((a1, a2) => if (a1.mailboxSize < a2.mailboxSize) a1 else a2)
+
+ override def exists(f: ActorRef => Boolean): Boolean = items.exists(f)
+}
diff --git a/test/disabled/presentation/akka/src/akka/routing/Listeners.scala b/test/disabled/presentation/akka/src/akka/routing/Listeners.scala
new file mode 100644
index 0000000..04f6c12
--- /dev/null
+++ b/test/disabled/presentation/akka/src/akka/routing/Listeners.scala
@@ -0,0 +1,37 @@
+/**
+ * Copyright (C) 2009-2011 Scalable Solutions AB <http://scalablesolutions.se>
+ */
+
+package akka.routing
+
+import akka.actor.{ Actor, ActorRef }
+import java.util.concurrent.ConcurrentSkipListSet
+import scala.collection.JavaConversions._
+
+sealed trait ListenerMessage
+case class Listen(listener: ActorRef) extends ListenerMessage
+case class Deafen(listener: ActorRef) extends ListenerMessage
+case class WithListeners(f: (ActorRef) => Unit) extends ListenerMessage
+
+/**
+ * Listeners is a generic trait to implement listening capability on an Actor.
+ * <p/>
+ * Use the <code>gossip(msg)</code> method to have it sent to the listeners.
+ * <p/>
+ * Send <code>Listen(self)</code> to start listening.
+ * <p/>
+ * Send <code>Deafen(self)</code> to stop listening.
+ * <p/>
+ * Send <code>WithListeners(fun)</code> to traverse the current listeners.
+ */
+trait Listeners { self: Actor =>
+ private val listeners = new ConcurrentSkipListSet[ActorRef]
+
+ protected def listenerManagement: Receive = {
+ case Listen(l) => listeners add l
+ case Deafen(l) => listeners remove l
+ case WithListeners(f) => listeners foreach f
+ }
+
+ protected def gossip(msg: Any) = listeners foreach (_ ! msg)
+}
diff --git a/test/disabled/presentation/akka/src/akka/routing/Pool.scala b/test/disabled/presentation/akka/src/akka/routing/Pool.scala
new file mode 100644
index 0000000..d972bb8
--- /dev/null
+++ b/test/disabled/presentation/akka/src/akka/routing/Pool.scala
@@ -0,0 +1,292 @@
+/**
+ * Copyright (C) 2009-2011 Scalable Solutions AB <http://scalablesolutions.se>
+ */
+
+package akka.routing
+
+import akka.actor.{ Actor, ActorRef, PoisonPill }
+import java.util.concurrent.TimeUnit
+
+/**
+ * Actor pooling
+ *
+ * An actor pool is an message router for a set of delegate actors. The pool is an actor itself.
+ * There are a handful of basic concepts that need to be understood when working with and defining your pool.
+ *
+ * Selectors - A selector is a trait that determines how and how many pooled actors will receive an incoming message.
+ * Capacitors - A capacitor is a trait that influences the size of pool. There are effectively two types.
+ * The first determines the size itself - either fixed or bounded.
+ * The second determines how to adjust of the pool according to some internal pressure characteristic.
+ * Filters - A filter can be used to refine the raw pressure value returned from a capacitor.
+ *
+ * It should be pointed out that all actors in the pool are treated as essentially equivalent. This is not to say
+ * that one couldn't instance different classes within the pool, only that the pool, when selecting and routing,
+ * will not take any type information into consideration.
+ *
+ * @author Garrick Evans
+ */
+
+object ActorPool {
+ case object Stat
+ case class Stats(size: Int)
+}
+
+/**
+ * Defines the nature of an actor pool.
+ */
+trait ActorPool {
+ def instance(): ActorRef //Question, Instance of what?
+ def capacity(delegates: Seq[ActorRef]): Int //Question, What is the semantics of this return value?
+ def select(delegates: Seq[ActorRef]): Tuple2[Iterator[ActorRef], Int] //Question, Why does select return this instead of an ordered Set?
+}
+
+/**
+ * A default implementation of a pool, on each message to route,
+ * - checks the current capacity and adjusts accordingly if needed
+ * - routes the incoming message to a selection set of delegate actors
+ */
+trait DefaultActorPool extends ActorPool { this: Actor =>
+ import ActorPool._
+ import collection.mutable.LinkedList
+ import akka.actor.MaximumNumberOfRestartsWithinTimeRangeReached
+
+ protected var _delegates = Vector[ActorRef]()
+ private var _lastCapacityChange = 0
+ private var _lastSelectorCount = 0
+
+ override def postStop() = _delegates foreach { delegate =>
+ try {
+ delegate ! PoisonPill
+ } catch { case e: Exception => } //Ignore any exceptions here
+ }
+
+ protected def _route(): Receive = {
+ // for testing...
+ case Stat =>
+ self reply_? Stats(_delegates length)
+ case max: MaximumNumberOfRestartsWithinTimeRangeReached =>
+ _delegates = _delegates filterNot { _.uuid == max.victim.uuid }
+ case msg =>
+ resizeIfAppropriate()
+
+ select(_delegates) match {
+ case (selectedDelegates, count) =>
+ _lastSelectorCount = count
+ selectedDelegates foreach { _ forward msg } //Should we really send the same message to several actors?
+ }
+ }
+
+ private def resizeIfAppropriate() {
+ val requestedCapacity = capacity(_delegates)
+ val newDelegates = requestedCapacity match {
+ case qty if qty > 0 =>
+ _delegates ++ {
+ for (i ← 0 until requestedCapacity) yield {
+ val delegate = instance()
+ self startLink delegate
+ delegate
+ }
+ }
+ case qty if qty < 0 =>
+ _delegates.splitAt(_delegates.length + requestedCapacity) match {
+ case (keep, abandon) =>
+ abandon foreach { _ ! PoisonPill }
+ keep
+ }
+ case _ => _delegates //No change
+ }
+
+ _lastCapacityChange = requestedCapacity
+ _delegates = newDelegates
+ }
+}
+
+/**
+ * Selectors
+ * These traits define how, when a message needs to be routed, delegate(s) are chosen from the pool
+ */
+
+/**
+ * Returns the set of delegates with the least amount of message backlog.
+ */
+trait SmallestMailboxSelector {
+ def selectionCount: Int
+ def partialFill: Boolean
+
+ def select(delegates: Seq[ActorRef]): Tuple2[Iterator[ActorRef], Int] = {
+ var set: Seq[ActorRef] = Nil
+ var take = if (partialFill) math.min(selectionCount, delegates.length) else selectionCount
+
+ while (take > 0) {
+ set = delegates.sortWith(_.mailboxSize < _.mailboxSize).take(take) ++ set //Question, doesn't this risk selecting the same actor multiple times?
+ take -= set.size
+ }
+
+ (set.iterator, set.size)
+ }
+}
+
+/**
+ * Returns the set of delegates that occur sequentially 'after' the last delegate from the previous selection
+ */
+trait RoundRobinSelector {
+ private var _last: Int = -1;
+
+ def selectionCount: Int
+ def partialFill: Boolean
+
+ def select(delegates: Seq[ActorRef]): Tuple2[Iterator[ActorRef], Int] = {
+ val length = delegates.length
+ val take = if (partialFill) math.min(selectionCount, length)
+ else selectionCount
+
+ val set =
+ for (i ← 0 until take) yield {
+ _last = (_last + 1) % length
+ delegates(_last)
+ }
+
+ (set.iterator, set.size)
+ }
+}
+
+/**
+ * Capacitors
+ * These traits define how to alter the size of the pool
+ */
+
+/**
+ * Ensures a fixed number of delegates in the pool
+ */
+trait FixedSizeCapacitor {
+ def limit: Int
+ def capacity(delegates: Seq[ActorRef]): Int = (limit - delegates.size) max 0
+}
+
+/**
+ * Constrains the pool capacity to a bounded range
+ */
+trait BoundedCapacitor {
+ def lowerBound: Int
+ def upperBound: Int
+
+ def capacity(delegates: Seq[ActorRef]): Int = {
+ val current = delegates length
+ val delta = _eval(delegates)
+ val proposed = current + delta
+
+ if (proposed < lowerBound) delta + (lowerBound - proposed)
+ else if (proposed > upperBound) delta - (proposed - upperBound)
+ else delta
+ }
+
+ protected def _eval(delegates: Seq[ActorRef]): Int
+}
+
+/**
+ * Returns the number of delegates required to manage the current message backlogs
+ */
+trait MailboxPressureCapacitor {
+ def pressureThreshold: Int
+ def pressure(delegates: Seq[ActorRef]): Int =
+ delegates count { _.mailboxSize > pressureThreshold }
+}
+
+/**
+ * Returns the number of delegates required to respond to the number of pending futures
+ */
+trait ActiveFuturesPressureCapacitor {
+ def pressure(delegates: Seq[ActorRef]): Int =
+ delegates count { _.senderFuture.isDefined }
+}
+
+/**
+ */
+trait CapacityStrategy {
+ import ActorPool._
+
+ def pressure(delegates: Seq[ActorRef]): Int
+ def filter(pressure: Int, capacity: Int): Int
+
+ protected def _eval(delegates: Seq[ActorRef]): Int = filter(pressure(delegates), delegates.size)
+}
+
+trait FixedCapacityStrategy extends FixedSizeCapacitor
+trait BoundedCapacityStrategy extends CapacityStrategy with BoundedCapacitor
+
+/**
+ * Filters
+ * These traits refine the raw pressure reading into a more appropriate capacity delta.
+ */
+
+/**
+ * The basic filter trait that composes ramp-up and back-off subfiltering.
+ */
+trait Filter {
+ def rampup(pressure: Int, capacity: Int): Int
+ def backoff(pressure: Int, capacity: Int): Int
+
+ // pass through both filters just to be sure any internal counters
+ // are updated consistently. ramping up is always + and backing off
+ // is always - and each should return 0 otherwise...
+ def filter(pressure: Int, capacity: Int): Int =
+ rampup(pressure, capacity) + backoff(pressure, capacity)
+}
+
+trait BasicFilter extends Filter with BasicRampup with BasicBackoff
+
+/**
+ * Filter performs steady incremental growth using only the basic ramp-up subfilter
+ */
+trait BasicNoBackoffFilter extends BasicRampup {
+ def filter(pressure: Int, capacity: Int): Int = rampup(pressure, capacity)
+}
+
+/**
+ * Basic incremental growth as a percentage of the current pool capacity
+ */
+trait BasicRampup {
+ def rampupRate: Double
+
+ def rampup(pressure: Int, capacity: Int): Int =
+ if (pressure < capacity) 0 else math.ceil(rampupRate * capacity) toInt
+}
+
+/**
+ * Basic decrement as a percentage of the current pool capacity
+ */
+trait BasicBackoff {
+ def backoffThreshold: Double
+ def backoffRate: Double
+
+ def backoff(pressure: Int, capacity: Int): Int =
+ if (capacity > 0 && pressure / capacity < backoffThreshold) math.ceil(-1.0 * backoffRate * capacity) toInt else 0
+}
+/**
+ * This filter tracks the average pressure over the lifetime of the pool (or since last reset) and
+ * will begin to reduce capacity once this value drops below the provided threshold. The number of
+ * delegates to cull from the pool is determined by some scaling factor (the backoffRate) multiplied
+ * by the difference in capacity and pressure.
+ */
+trait RunningMeanBackoff {
+ def backoffThreshold: Double
+ def backoffRate: Double
+
+ private var _pressure: Double = 0.0
+ private var _capacity: Double = 0.0
+
+ def backoff(pressure: Int, capacity: Int): Int = {
+ _pressure += pressure
+ _capacity += capacity
+
+ if (capacity > 0 && pressure / capacity < backoffThreshold
+ && _capacity > 0 && _pressure / _capacity < backoffThreshold) //Why does the entire clause need to be true?
+ math.floor(-1.0 * backoffRate * (capacity - pressure)).toInt
+ else 0
+ }
+
+ def backoffReset {
+ _pressure = 0.0
+ _capacity = 0.0
+ }
+}
diff --git a/test/disabled/presentation/akka/src/akka/routing/Routers.scala b/test/disabled/presentation/akka/src/akka/routing/Routers.scala
new file mode 100644
index 0000000..a4c34c5
--- /dev/null
+++ b/test/disabled/presentation/akka/src/akka/routing/Routers.scala
@@ -0,0 +1,87 @@
+/**
+ * Copyright (C) 2009-2011 Scalable Solutions AB <http://scalablesolutions.se>
+ */
+
+package akka.routing
+
+import akka.actor.{ UntypedActor, Actor, ActorRef }
+
+/**
+ * A Dispatcher is a trait whose purpose is to route incoming messages to actors.
+ */
+trait Dispatcher { this: Actor =>
+
+ protected def transform(msg: Any): Any = msg
+
+ protected def routes: PartialFunction[Any, ActorRef]
+
+ protected def broadcast(message: Any) {}
+
+ protected def dispatch: Receive = {
+ case Routing.Broadcast(message) =>
+ broadcast(message)
+ case a if routes.isDefinedAt(a) =>
+ if (isSenderDefined) routes(a).forward(transform(a))(someSelf)
+ else routes(a).!(transform(a))(None)
+ }
+
+ def receive = dispatch
+
+ private def isSenderDefined = self.senderFuture.isDefined || self.sender.isDefined
+}
+
+/**
+ * An UntypedDispatcher is an abstract class whose purpose is to route incoming messages to actors.
+ */
+abstract class UntypedDispatcher extends UntypedActor {
+ protected def transform(msg: Any): Any = msg
+
+ protected def route(msg: Any): ActorRef
+
+ protected def broadcast(message: Any) {}
+
+ private def isSenderDefined = self.senderFuture.isDefined || self.sender.isDefined
+
+ @throws(classOf[Exception])
+ def onReceive(msg: Any): Unit = {
+ if (msg.isInstanceOf[Routing.Broadcast]) broadcast(msg.asInstanceOf[Routing.Broadcast].message)
+ else {
+ val r = route(msg)
+ if (r eq null) throw new IllegalStateException("No route for " + msg + " defined!")
+ if (isSenderDefined) r.forward(transform(msg))(someSelf)
+ else r.!(transform(msg))(None)
+ }
+ }
+}
+
+/**
+ * A LoadBalancer is a specialized kind of Dispatcher, that is supplied an InfiniteIterator of targets
+ * to dispatch incoming messages to.
+ */
+trait LoadBalancer extends Dispatcher { self: Actor =>
+ protected def seq: InfiniteIterator[ActorRef]
+
+ protected def routes = {
+ case x if seq.hasNext => seq.next
+ }
+
+ override def broadcast(message: Any) = seq.items.foreach(_ ! message)
+
+ override def isDefinedAt(msg: Any) = seq.exists(_.isDefinedAt(msg))
+}
+
+/**
+ * A UntypedLoadBalancer is a specialized kind of UntypedDispatcher, that is supplied an InfiniteIterator of targets
+ * to dispatch incoming messages to.
+ */
+abstract class UntypedLoadBalancer extends UntypedDispatcher {
+ protected def seq: InfiniteIterator[ActorRef]
+
+ protected def route(msg: Any) =
+ if (seq.hasNext) seq.next
+ else null
+
+ override def broadcast(message: Any) = seq.items.foreach(_ ! message)
+
+ override def isDefinedAt(msg: Any) = seq.exists(_.isDefinedAt(msg))
+}
diff --git a/test/disabled/presentation/akka/src/akka/routing/Routing.scala b/test/disabled/presentation/akka/src/akka/routing/Routing.scala
new file mode 100644
index 0000000..befc124
--- /dev/null
+++ b/test/disabled/presentation/akka/src/akka/routing/Routing.scala
@@ -0,0 +1,64 @@
+/**
+ * Copyright (C) 2009-2011 Scalable Solutions AB <http://scalablesolutions.se>
+ */
+
+package akka.routing
+
+import akka.actor.{ Actor, ActorRef }
+import akka.actor.Actor._
+
+object Routing {
+
+ sealed trait RoutingMessage
+ case class Broadcast(message: Any) extends RoutingMessage
+
+ type PF[A, B] = PartialFunction[A, B]
+
+ /**
+ * Creates a new PartialFunction whose isDefinedAt is a combination
+ * of the two parameters, and whose apply is first to call filter.apply
+ * and then filtered.apply.
+ */
+ def filter[A, B](filter: PF[A, Unit], filtered: PF[A, B]): PF[A, B] = {
+ case a: A if filtered.isDefinedAt(a) && filter.isDefinedAt(a) =>
+ filter(a)
+ filtered(a)
+ }
+
+ /**
+ * Interceptor is a filter(x,y) where x.isDefinedAt is considered to be always true.
+ */
+ def intercept[A, B](interceptor: (A) => Unit, interceptee: PF[A, B]): PF[A, B] =
+ filter({ case a if a.isInstanceOf[A] => interceptor(a) }, interceptee)
+
+ /**
+ * Creates a LoadBalancer from the thunk-supplied InfiniteIterator.
+ */
+ def loadBalancerActor(actors: => InfiniteIterator[ActorRef]): ActorRef =
+ actorOf(new Actor with LoadBalancer {
+ val seq = actors
+ }).start()
+
+ /**
+ * Creates a Dispatcher given a routing and a message-transforming function.
+ */
+ def dispatcherActor(routing: PF[Any, ActorRef], msgTransformer: (Any) => Any): ActorRef =
+ actorOf(new Actor with Dispatcher {
+ override def transform(msg: Any) = msgTransformer(msg)
+ def routes = routing
+ }).start()
+
+ /**
+ * Creates a Dispatcher given a routing.
+ */
+ def dispatcherActor(routing: PF[Any, ActorRef]): ActorRef = actorOf(new Actor with Dispatcher {
+ def routes = routing
+ }).start()
+
+ /**
+ * Creates an actor that pipes all incoming messages to
+ * both another actor and through the supplied function
+ */
+ def loggerActor(actorToLog: ActorRef, logger: (Any) => Unit): ActorRef =
+ dispatcherActor({ case _ => actorToLog }, logger)
+}
diff --git a/test/disabled/presentation/akka/src/akka/util/Address.scala b/test/disabled/presentation/akka/src/akka/util/Address.scala
new file mode 100644
index 0000000..65b5c0a
--- /dev/null
+++ b/test/disabled/presentation/akka/src/akka/util/Address.scala
@@ -0,0 +1,29 @@
+/**
+ * Copyright (C) 2009-2011 Scalable Solutions AB <http://scalablesolutions.se>
+ */
+package akka.util
+
+import java.net.InetSocketAddress
+
+object Address {
+ def apply(hostname: String, port: Int) = new Address(hostname, port)
+ def apply(inetAddress: InetSocketAddress): Address = inetAddress match {
+ case null => null
+ case inet => new Address(inet.getAddress.getHostAddress, inet.getPort)
+ }
+}
+
+class Address(val hostname: String, val port: Int) {
+ override val hashCode: Int = {
+ var result = HashCode.SEED
+ result = HashCode.hash(result, hostname)
+ result = HashCode.hash(result, port)
+ result
+ }
+
+ override def equals(that: Any): Boolean = {
+ that.isInstanceOf[Address] &&
+ that.asInstanceOf[Address].hostname == hostname &&
+ that.asInstanceOf[Address].port == port
+ }
+}
diff --git a/test/disabled/presentation/akka/src/akka/util/AkkaLoader.scala b/test/disabled/presentation/akka/src/akka/util/AkkaLoader.scala
new file mode 100644
index 0000000..cb246f2
--- /dev/null
+++ b/test/disabled/presentation/akka/src/akka/util/AkkaLoader.scala
@@ -0,0 +1,94 @@
+/**
+ * Copyright (C) 2009-2011 Scalable Solutions AB <http://scalablesolutions.se>
+ */
+
+package akka.util
+
+import akka.config.Config
+import akka.actor.Actor
+
+/*
+ * This class is responsible for booting up a stack of bundles and then shutting them down
+ */
+class AkkaLoader {
+ private val hasBooted = new Switch(false)
+
+ @volatile
+ private var _bundles: Option[Bootable] = None
+
+ def bundles = _bundles;
+
+ /*
+ * Boot initializes the specified bundles
+ */
+ def boot(withBanner: Boolean, b: Bootable): Unit = hasBooted switchOn {
+ if (withBanner) printBanner()
+ println("Starting Akka...")
+ b.onLoad
+ Thread.currentThread.setContextClassLoader(getClass.getClassLoader)
+ _bundles = Some(b)
+ println("Akka started successfully")
+ }
+
+ /*
+ * Shutdown, well, shuts down the bundles used in boot
+ */
+ def shutdown() {
+ hasBooted switchOff {
+ println("Shutting down Akka...")
+ _bundles.foreach(_.onUnload)
+ _bundles = None
+ Actor.shutdownHook.run
+ println("Akka succesfully shut down")
+ }
+ }
+
+ private def printBanner() {
+ println("""
+==============================================================================
+
+ ZZ:
+ ZZZZ
+ ZZZZZZ
+ ZZZ' ZZZ
+ ~7 7ZZ' ZZZ
+ :ZZZ: IZZ' ZZZ
+ ,OZZZZ.~ZZ? ZZZ
+ ZZZZ' 'ZZZ$ ZZZ
+ . $ZZZ ~ZZ$ ZZZ
+ .=Z?. .ZZZO ~ZZ7 OZZ
+ .ZZZZ7..:ZZZ~ 7ZZZ ZZZ~
+ .$ZZZ$Z+.ZZZZ ZZZ: ZZZ$
+ .,ZZZZ?' =ZZO= .OZZ 'ZZZ
+ .$ZZZZ+ .ZZZZ IZZZ ZZZ$
+ .ZZZZZ' .ZZZZ' .ZZZ$ ?ZZZ
+ .ZZZZZZ' .OZZZ? ?ZZZ 'ZZZ$
+ .?ZZZZZZ' .ZZZZ? .ZZZ? 'ZZZO
+ .+ZZZZZZ?' .7ZZZZ' .ZZZZ :ZZZZ
+ .ZZZZZZ$' .?ZZZZZ' .~ZZZZ 'ZZZZ.
+
+
+ NNNNN $NNNN+
+ NNNNN $NNNN+
+ NNNNN $NNNN+
+ NNNNN $NNNN+
+ NNNNN $NNNN+
+ =NNNNNNNNND$ NNNNN DDDDDD: $NNNN+ DDDDDN NDDNNNNNNNN,
+ NNNNNNNNNNNNND NNNNN DNNNNN $NNNN+ 8NNNNN= :NNNNNNNNNNNNNN
+ NNNNN$ DNNNNN NNNNN $NNNNN~ $NNNN+ NNNNNN NNNNN, :NNNNN+
+ ?DN~ NNNNN NNNNN MNNNNN $NNNN+:NNNNN7 $ND =NNNNN
+ DNNNNN NNNNNDNNNN$ $NNNNDNNNNN :DNNNNN
+ ZNDNNNNNNNNND NNNNNNNNNND, $NNNNNNNNNNN DNDNNNNNNNNNN
+ NNNNNNNDDINNNNN NNNNNNNNNNND $NNNNNNNNNNND ONNNNNNND8+NNNNN
+ :NNNND NNNNN NNNNNN DNNNN, $NNNNNO 7NNNND NNNNNO :NNNNN
+ DNNNN NNNNN NNNNN DNNNN $NNNN+ 8NNNNN NNNNN $NNNNN
+ DNNNNO NNNNNN NNNNN NNNNN $NNNN+ NNNNN$ NNNND, ,NNNNND
+ NNNNNNDDNNNNNNNN NNNNN =NNNNN $NNNN+ DNNNN? DNNNNNNDNNNNNNNND
+ NNNNNNNNN NNNN$ NNNNN 8NNNND $NNNN+ NNNNN= ,DNNNNNNND NNNNN$
+
+==============================================================================
+ Running version %s
+==============================================================================
+""".format(Config.VERSION))
+ }
+}
diff --git a/test/disabled/presentation/akka/src/akka/util/Bootable.scala b/test/disabled/presentation/akka/src/akka/util/Bootable.scala
new file mode 100644
index 0000000..d07643e
--- /dev/null
+++ b/test/disabled/presentation/akka/src/akka/util/Bootable.scala
@@ -0,0 +1,10 @@
+/**
+ * Copyright (C) 2009-2011 Scalable Solutions AB <http://scalablesolutions.se>
+ */
+
+package akka.util
+
+trait Bootable {
+ def onLoad() {}
+ def onUnload() {}
+}
diff --git a/test/disabled/presentation/akka/src/akka/util/BoundedBlockingQueue.scala b/test/disabled/presentation/akka/src/akka/util/BoundedBlockingQueue.scala
new file mode 100644
index 0000000..f8deda7
--- /dev/null
+++ b/test/disabled/presentation/akka/src/akka/util/BoundedBlockingQueue.scala
@@ -0,0 +1,326 @@
+/**
+ * Copyright (C) 2009-2011 Scalable Solutions AB <http://scalablesolutions.se>
+ */
+
+package akka.util
+
+import java.util.concurrent.locks.ReentrantLock
+import java.util.concurrent.{ TimeUnit, BlockingQueue }
+import java.util.{ AbstractQueue, Queue, Collection, Iterator }
+
+class BoundedBlockingQueue[E <: AnyRef](
+ val maxCapacity: Int, private val backing: Queue[E]) extends AbstractQueue[E] with BlockingQueue[E] {
+
+ backing match {
+ case null => throw new IllegalArgumentException("Backing Queue may not be null")
+ case b: BlockingQueue[_] =>
+ require(maxCapacity > 0)
+ require(b.size() == 0)
+ require(b.remainingCapacity >= maxCapacity)
+ case b: Queue[_] =>
+ require(b.size() == 0)
+ require(maxCapacity > 0)
+ }
+
+ protected val lock = new ReentrantLock(false)
+
+ private val notEmpty = lock.newCondition()
+ private val notFull = lock.newCondition()
+
+ def put(e: E): Unit = { //Blocks until not full
+ if (e eq null) throw new NullPointerException
+ lock.lock()
+ try {
+ while (backing.size() == maxCapacity)
+ notFull.await()
+ require(backing.offer(e))
+ notEmpty.signal()
+ } finally {
+ lock.unlock()
+ }
+ }
+
+ def take(): E = { //Blocks until not empty
+ lock.lockInterruptibly()
+ try {
+ while (backing.size() == 0)
+ notEmpty.await()
+ val e = backing.poll()
+ require(e ne null)
+ notFull.signal()
+ e
+ } finally {
+ lock.unlock()
+ }
+ }
+
+ def offer(e: E): Boolean = { //Tries to do it immediately, if fail return false
+ if (e eq null) throw new NullPointerException
+ lock.lock()
+ try {
+ if (backing.size() == maxCapacity) false
+ else {
+ require(backing.offer(e)) //Should never fail
+ notEmpty.signal()
+ true
+ }
+ } finally {
+ lock.unlock()
+ }
+ }
+
+ def offer(e: E, timeout: Long, unit: TimeUnit): Boolean = { //Tries to do it within the timeout, return false if fail
+ if (e eq null) throw new NullPointerException
+ var nanos = unit.toNanos(timeout)
+ lock.lockInterruptibly()
+ try {
+ while (backing.size() == maxCapacity) {
+ if (nanos <= 0)
+ return false
+ else
+ nanos = notFull.awaitNanos(nanos)
+ }
+ require(backing.offer(e)) //Should never fail
+ notEmpty.signal()
+ true
+ } finally {
+ lock.unlock()
+ }
+ }
+
+ def poll(timeout: Long, unit: TimeUnit): E = { //Tries to do it within the timeout, returns null if fail
+ var nanos = unit.toNanos(timeout)
+ lock.lockInterruptibly()
+ try {
+ var result: E = null.asInstanceOf[E]
+ var hasResult = false
+ while (!hasResult) {
+ hasResult = backing.poll() match {
+ case null if nanos <= 0 =>
+ result = null.asInstanceOf[E]
+ true
+ case null =>
+ try {
+ nanos = notEmpty.awaitNanos(nanos)
+ } catch {
+ case ie: InterruptedException =>
+ notEmpty.signal()
+ throw ie
+ }
+ false
+ case e =>
+ notFull.signal()
+ result = e
+ true
+ }
+ }
+ result
+ } finally {
+ lock.unlock()
+ }
+ }
+
+ def poll(): E = { //Tries to remove the head of the queue immediately, if fail, return null
+ lock.lock()
+ try {
+ backing.poll() match {
+ case null => null.asInstanceOf[E]
+ case e =>
+ notFull.signal()
+ e
+ }
+ } finally {
+ lock.unlock
+ }
+ }
+
+ override def remove(e: AnyRef): Boolean = { //Tries to do it immediately, if fail, return false
+ if (e eq null) throw new NullPointerException
+ lock.lock()
+ try {
+ if (backing remove e) {
+ notFull.signal()
+ true
+ } else false
+ } finally {
+ lock.unlock()
+ }
+ }
+
+ override def contains(e: AnyRef): Boolean = {
+ if (e eq null) throw new NullPointerException
+ lock.lock()
+ try {
+ backing contains e
+ } finally {
+ lock.unlock()
+ }
+ }
+
+ override def clear(): Unit = {
+ lock.lock()
+ try {
+ backing.clear
+ } finally {
+ lock.unlock()
+ }
+ }
+
+ def remainingCapacity(): Int = {
+ lock.lock()
+ try {
+ maxCapacity - backing.size()
+ } finally {
+ lock.unlock()
+ }
+ }
+
+ def size(): Int = {
+ lock.lock()
+ try {
+ backing.size()
+ } finally {
+ lock.unlock()
+ }
+ }
+
+ def peek(): E = {
+ lock.lock()
+ try {
+ backing.peek()
+ } finally {
+ lock.unlock()
+ }
+ }
+
+ def drainTo(c: Collection[_ >: E]): Int = drainTo(c, Int.MaxValue)
+
+ def drainTo(c: Collection[_ >: E], maxElements: Int): Int = {
+ if (c eq null) throw new NullPointerException
+ if (c eq this) throw new IllegalArgumentException
+ if (maxElements <= 0) 0
+ else {
+ lock.lock()
+ try {
+ var n = 0
+ var e: E = null.asInstanceOf[E]
+ while (n < maxElements) {
+ backing.poll() match {
+ case null => return n
+ case e =>
+ c add e
+ n += 1
+ }
+ }
+ n
+ } finally {
+ lock.unlock()
+ }
+ }
+ }
+
+ override def containsAll(c: Collection[_]): Boolean = {
+ lock.lock()
+ try {
+ backing containsAll c
+ } finally {
+ lock.unlock()
+ }
+ }
+
+ override def removeAll(c: Collection[_]): Boolean = {
+ lock.lock()
+ try {
+ if (backing.removeAll(c)) {
+ val sz = backing.size()
+ if (sz < maxCapacity) notFull.signal()
+ if (sz > 0) notEmpty.signal() //FIXME needed?
+ true
+ } else false
+ } finally {
+ lock.unlock()
+ }
+ }
+
+ override def retainAll(c: Collection[_]): Boolean = {
+ lock.lock()
+ try {
+ if (backing.retainAll(c)) {
+ val sz = backing.size()
+ if (sz < maxCapacity) notFull.signal() //FIXME needed?
+ if (sz > 0) notEmpty.signal()
+ true
+ } else false
+ } finally {
+ lock.unlock()
+ }
+ }
+
+ def iterator(): Iterator[E] = {
+ lock.lock
+ try {
+ val elements = backing.toArray
+ new Iterator[E] {
+ var at = 0
+ var last = -1
+
+ def hasNext(): Boolean = at < elements.length
+
+ def next(): E = {
+ if (at >= elements.length) throw new NoSuchElementException
+ last = at
+ at += 1
+ elements(last).asInstanceOf[E]
+ }
+
+ def remove(): Unit = {
+ if (last < 0) throw new IllegalStateException
+ val target = elements(last)
+ last = -1 //To avoid 2 subsequent removes without a next in between
+ lock.lock()
+ try {
+ val i = backing.iterator()
+ while (i.hasNext) {
+ if (i.next eq target) {
+ i.remove()
+ notFull.signal()
+ return ()
+ }
+ }
+ } finally {
+ lock.unlock()
+ }
+ }
+ }
+ } finally {
+ lock.unlock
+ }
+ }
+
+ override def toArray(): Array[AnyRef] = {
+ lock.lock()
+ try {
+ backing.toArray
+ } finally {
+ lock.unlock()
+ }
+ }
+
+ override def isEmpty(): Boolean = {
+ lock.lock()
+ try {
+ backing.isEmpty()
+ } finally {
+ lock.unlock()
+ }
+ }
+
+ override def toArray[X](a: Array[X with AnyRef]) = {
+ lock.lock()
+ try {
+ backing.toArray[X](a)
+ } finally {
+ lock.unlock()
+ }
+ }
+}
diff --git a/test/disabled/presentation/akka/src/akka/util/Crypt.scala b/test/disabled/presentation/akka/src/akka/util/Crypt.scala
new file mode 100644
index 0000000..3ce2d55
--- /dev/null
+++ b/test/disabled/presentation/akka/src/akka/util/Crypt.scala
@@ -0,0 +1,44 @@
+/**
+ * Copyright (C) 2009-2011 Scalable Solutions AB <http://scalablesolutions.se>
+ */
+
+package akka.util
+
+import java.security.{ MessageDigest, SecureRandom }
+
+/**
+ * @author <a href="http://jonasboner.com">Jonas Bonér</a>
+ */
+object Crypt {
+ val hex = "0123456789ABCDEF"
+ val lineSeparator = System.getProperty("line.separator")
+
+ lazy val random = SecureRandom.getInstance("SHA1PRNG")
+
+ def md5(text: String): String = md5(unifyLineSeparator(text).getBytes("ASCII"))
+
+ def md5(bytes: Array[Byte]): String = digest(bytes, MessageDigest.getInstance("MD5"))
+
+ def sha1(text: String): String = sha1(unifyLineSeparator(text).getBytes("ASCII"))
+
+ def sha1(bytes: Array[Byte]): String = digest(bytes, MessageDigest.getInstance("SHA1"))
+
+ def generateSecureCookie: String = {
+ val bytes = Array.fill(32)(0.byteValue)
+ random.nextBytes(bytes)
+ sha1(bytes)
+ }
+
+ def digest(bytes: Array[Byte], md: MessageDigest): String = {
+ md.update(bytes)
+ hexify(md.digest)
+ }
+
+ def hexify(bytes: Array[Byte]): String = {
+ val builder = new StringBuilder
+ bytes.foreach { byte => builder.append(hex.charAt((byte & 0xF) >> 4)).append(hex.charAt(byte & 0xF)) }
+ builder.toString
+ }
+
+ private def unifyLineSeparator(text: String): String = text.replaceAll(lineSeparator, "\n")
+}
diff --git a/test/disabled/presentation/akka/src/akka/util/Duration.scala b/test/disabled/presentation/akka/src/akka/util/Duration.scala
new file mode 100644
index 0000000..316cb86
--- /dev/null
+++ b/test/disabled/presentation/akka/src/akka/util/Duration.scala
@@ -0,0 +1,437 @@
+/**
+ * Copyright (C) 2009-2011 Scalable Solutions AB <http://scalablesolutions.se>
+ */
+
+package akka.util
+
+import java.util.concurrent.TimeUnit
+import TimeUnit._
+import java.lang.{ Long => JLong, Double => JDouble }
+
+object Duration {
+ def apply(length: Long, unit: TimeUnit): Duration = new FiniteDuration(length, unit)
+ def apply(length: Double, unit: TimeUnit): Duration = fromNanos(unit.toNanos(1) * length)
+ def apply(length: Long, unit: String): Duration = new FiniteDuration(length, timeUnit(unit))
+
+ def fromNanos(nanos: Long): Duration = {
+ if (nanos % 86400000000000L == 0) {
+ Duration(nanos / 86400000000000L, DAYS)
+ } else if (nanos % 3600000000000L == 0) {
+ Duration(nanos / 3600000000000L, HOURS)
+ } else if (nanos % 60000000000L == 0) {
+ Duration(nanos / 60000000000L, MINUTES)
+ } else if (nanos % 1000000000L == 0) {
+ Duration(nanos / 1000000000L, SECONDS)
+ } else if (nanos % 1000000L == 0) {
+ Duration(nanos / 1000000L, MILLISECONDS)
+ } else if (nanos % 1000L == 0) {
+ Duration(nanos / 1000L, MICROSECONDS)
+ } else {
+ Duration(nanos, NANOSECONDS)
+ }
+ }
+
+ def fromNanos(nanos: Double): Duration = fromNanos((nanos + 0.5).asInstanceOf[Long])
+
+ /**
+ * Construct a Duration by parsing a String. In case of a format error, a
+ * RuntimeException is thrown. See `unapply(String)` for more information.
+ */
+ def apply(s: String): Duration = unapply(s) getOrElse sys.error("format error")
+
+ /**
+ * Deconstruct a Duration into length and unit if it is finite.
+ */
+ def unapply(d: Duration): Option[(Long, TimeUnit)] = {
+ if (d.finite_?) {
+ Some((d.length, d.unit))
+ } else {
+ None
+ }
+ }
+
+ private val RE = ("""^\s*(\d+(?:\.\d+)?)\s*""" + // length part
+ "(?:" + // units are distinguished in separate match groups
+ "(d|day|days)|" +
+ "(h|hour|hours)|" +
+ "(min|minute|minutes)|" +
+ "(s|sec|second|seconds)|" +
+ "(ms|milli|millis|millisecond|milliseconds)|" +
+ "(µs|micro|micros|microsecond|microseconds)|" +
+ "(ns|nano|nanos|nanosecond|nanoseconds)" +
+ """)\s*$""").r // close the non-capturing group
+ private val REinf = """^\s*Inf\s*$""".r
+ private val REminf = """^\s*(?:-\s*|Minus)Inf\s*""".r
+
+ /**
+ * Parse String, return None if no match. Format is `"<length><unit>"`, where
+ * whitespace is allowed before, between and after the parts. Infinities are
+ * designated by `"Inf"` and `"-Inf"` or `"MinusInf"`.
+ */
+ def unapply(s: String): Option[Duration] = s match {
+ case RE(length, d, h, m, s, ms, mus, ns) =>
+ if (d ne null) Some(Duration(JDouble.parseDouble(length), DAYS)) else if (h ne null) Some(Duration(JDouble.parseDouble(length), HOURS)) else if (m ne null) Some(Duration(JDouble.parseDouble(length), MINUTES)) else if (s ne null) Some(Duration(JDouble.parseDouble(length), SECONDS)) else if (ms ne null) Some(Duration(JDouble.parseDouble(length), MILLISECONDS)) else if (mus ne null) Some(Duration(JDouble.parseDouble(length), MICROSECONDS)) else if (ns ne null) Some(Duration(JDouble.pa [...]
+ sys.error("made some error in regex (should not be possible)")
+ case REinf() => Some(Inf)
+ case REminf() => Some(MinusInf)
+ case _ => None
+ }
+
+ /**
+ * Parse TimeUnit from string representation.
+ */
+ def timeUnit(unit: String) = unit.toLowerCase match {
+ case "d" | "day" | "days" => DAYS
+ case "h" | "hour" | "hours" => HOURS
+ case "min" | "minute" | "minutes" => MINUTES
+ case "s" | "sec" | "second" | "seconds" => SECONDS
+ case "ms" | "milli" | "millis" | "millisecond" | "milliseconds" => MILLISECONDS
+ case "µs" | "micro" | "micros" | "microsecond" | "microseconds" => MICROSECONDS
+ case "ns" | "nano" | "nanos" | "nanosecond" | "nanoseconds" => NANOSECONDS
+ }
+
+ val Zero: Duration = new FiniteDuration(0, NANOSECONDS)
+
+ trait Infinite {
+ this: Duration =>
+
+ override def equals(other: Any) = false
+
+ def +(other: Duration): Duration =
+ other match {
+ case _: this.type => this
+ case _: Infinite => throw new IllegalArgumentException("illegal addition of infinities")
+ case _ => this
+ }
+ def -(other: Duration): Duration =
+ other match {
+ case _: this.type => throw new IllegalArgumentException("illegal subtraction of infinities")
+ case _ => this
+ }
+ def *(factor: Double): Duration = this
+ def /(factor: Double): Duration = this
+ def /(other: Duration): Double =
+ other match {
+ case _: Infinite => throw new IllegalArgumentException("illegal division of infinities")
+ // maybe questionable but pragmatic: Inf / 0 => Inf
+ case x => Double.PositiveInfinity * (if ((this > Zero) ^ (other >= Zero)) -1 else 1)
+ }
+
+ def finite_? = false
+
+ def length: Long = throw new IllegalArgumentException("length not allowed on infinite Durations")
+ def unit: TimeUnit = throw new IllegalArgumentException("unit not allowed on infinite Durations")
+ def toNanos: Long = throw new IllegalArgumentException("toNanos not allowed on infinite Durations")
+ def toMicros: Long = throw new IllegalArgumentException("toMicros not allowed on infinite Durations")
+ def toMillis: Long = throw new IllegalArgumentException("toMillis not allowed on infinite Durations")
+ def toSeconds: Long = throw new IllegalArgumentException("toSeconds not allowed on infinite Durations")
+ def toMinutes: Long = throw new IllegalArgumentException("toMinutes not allowed on infinite Durations")
+ def toHours: Long = throw new IllegalArgumentException("toHours not allowed on infinite Durations")
+ def toDays: Long = throw new IllegalArgumentException("toDays not allowed on infinite Durations")
+ def toUnit(unit: TimeUnit): Double = throw new IllegalArgumentException("toUnit not allowed on infinite Durations")
+
+ def printHMS = toString
+ }
+
+ /**
+ * Infinite duration: greater than any other and not equal to any other,
+ * including itself.
+ */
+ val Inf: Duration = new Duration with Infinite {
+ override def toString = "Duration.Inf"
+ def >(other: Duration) = true
+ def >=(other: Duration) = true
+ def <(other: Duration) = false
+ def <=(other: Duration) = false
+ def unary_- : Duration = MinusInf
+ }
+
+ /**
+ * Infinite negative duration: lesser than any other and not equal to any other,
+ * including itself.
+ */
+ val MinusInf: Duration = new Duration with Infinite {
+ override def toString = "Duration.MinusInf"
+ def >(other: Duration) = false
+ def >=(other: Duration) = false
+ def <(other: Duration) = true
+ def <=(other: Duration) = true
+ def unary_- : Duration = Inf
+ }
+
+ // Java Factories
+ def create(length: Long, unit: TimeUnit): Duration = apply(length, unit)
+ def create(length: Double, unit: TimeUnit): Duration = apply(length, unit)
+ def create(length: Long, unit: String): Duration = apply(length, unit)
+ def parse(s: String): Duration = unapply(s).get
+}
+
+/**
+ * Utility for working with java.util.concurrent.TimeUnit durations.
+ *
+ * <p/>
+ * Examples of usage from Java:
+ * <pre>
+ * import akka.util.FiniteDuration;
+ * import java.util.concurrent.TimeUnit;
+ *
+ * Duration duration = new FiniteDuration(100, MILLISECONDS);
+ * Duration duration = new FiniteDuration(5, "seconds");
+ *
+ * duration.toNanos();
+ * </pre>
+ *
+ * <p/>
+ * Examples of usage from Scala:
+ * <pre>
+ * import akka.util.Duration
+ * import java.util.concurrent.TimeUnit
+ *
+ * val duration = Duration(100, MILLISECONDS)
+ * val duration = Duration(100, "millis")
+ *
+ * duration.toNanos
+ * duration < 1.second
+ * duration <= Duration.Inf
+ * </pre>
+ *
+ * <p/>
+ * Implicits are also provided for Int, Long and Double. Example usage:
+ * <pre>
+ * import akka.util.duration._
+ *
+ * val duration = 100 millis
+ * </pre>
+ *
+ * Extractors, parsing and arithmetic are also included:
+ * <pre>
+ * val d = Duration("1.2 µs")
+ * val Duration(length, unit) = 5 millis
+ * val d2 = d * 2.5
+ * val d3 = d2 + 1.millisecond
+ * </pre>
+ */
+abstract class Duration {
+ def length: Long
+ def unit: TimeUnit
+ def toNanos: Long
+ def toMicros: Long
+ def toMillis: Long
+ def toSeconds: Long
+ def toMinutes: Long
+ def toHours: Long
+ def toDays: Long
+ def toUnit(unit: TimeUnit): Double
+ def printHMS: String
+ def <(other: Duration): Boolean
+ def <=(other: Duration): Boolean
+ def >(other: Duration): Boolean
+ def >=(other: Duration): Boolean
+ def +(other: Duration): Duration
+ def -(other: Duration): Duration
+ def *(factor: Double): Duration
+ def /(factor: Double): Duration
+ def /(other: Duration): Double
+ def unary_- : Duration
+ def finite_? : Boolean
+
+ // Java API
+ def lt(other: Duration) = this < other
+ def lteq(other: Duration) = this <= other
+ def gt(other: Duration) = this > other
+ def gteq(other: Duration) = this >= other
+ def plus(other: Duration) = this + other
+ def minus(other: Duration) = this - other
+ def mul(factor: Double) = this * factor
+ def div(factor: Double) = this / factor
+ def div(other: Duration) = this / other
+ def neg() = -this
+ def isFinite() = finite_?
+}
+
+class FiniteDuration(val length: Long, val unit: TimeUnit) extends Duration {
+ import Duration._
+
+ def this(length: Long, unit: String) = this(length, Duration.timeUnit(unit))
+
+ def toNanos = unit.toNanos(length)
+ def toMicros = unit.toMicros(length)
+ def toMillis = unit.toMillis(length)
+ def toSeconds = unit.toSeconds(length)
+ def toMinutes = unit.toMinutes(length)
+ def toHours = unit.toHours(length)
+ def toDays = unit.toDays(length)
+ def toUnit(u: TimeUnit) = long2double(toNanos) / NANOSECONDS.convert(1, u)
+
+ override def toString = this match {
+ case Duration(1, DAYS) => "1 day"
+ case Duration(x, DAYS) => x + " days"
+ case Duration(1, HOURS) => "1 hour"
+ case Duration(x, HOURS) => x + " hours"
+ case Duration(1, MINUTES) => "1 minute"
+ case Duration(x, MINUTES) => x + " minutes"
+ case Duration(1, SECONDS) => "1 second"
+ case Duration(x, SECONDS) => x + " seconds"
+ case Duration(1, MILLISECONDS) => "1 millisecond"
+ case Duration(x, MILLISECONDS) => x + " milliseconds"
+ case Duration(1, MICROSECONDS) => "1 microsecond"
+ case Duration(x, MICROSECONDS) => x + " microseconds"
+ case Duration(1, NANOSECONDS) => "1 nanosecond"
+ case Duration(x, NANOSECONDS) => x + " nanoseconds"
+ }
+
+ def printHMS = "%02d:%02d:%06.3f".format(toHours, toMinutes % 60, toMillis / 1000. % 60)
+
+ def <(other: Duration) = {
+ if (other.finite_?) {
+ toNanos < other.asInstanceOf[FiniteDuration].toNanos
+ } else {
+ other > this
+ }
+ }
+
+ def <=(other: Duration) = {
+ if (other.finite_?) {
+ toNanos <= other.asInstanceOf[FiniteDuration].toNanos
+ } else {
+ other >= this
+ }
+ }
+
+ def >(other: Duration) = {
+ if (other.finite_?) {
+ toNanos > other.asInstanceOf[FiniteDuration].toNanos
+ } else {
+ other < this
+ }
+ }
+
+ def >=(other: Duration) = {
+ if (other.finite_?) {
+ toNanos >= other.asInstanceOf[FiniteDuration].toNanos
+ } else {
+ other <= this
+ }
+ }
+
+ def +(other: Duration) = {
+ if (!other.finite_?) {
+ other
+ } else {
+ val nanos = toNanos + other.asInstanceOf[FiniteDuration].toNanos
+ fromNanos(nanos)
+ }
+ }
+
+ def -(other: Duration) = {
+ if (!other.finite_?) {
+ other
+ } else {
+ val nanos = toNanos - other.asInstanceOf[FiniteDuration].toNanos
+ fromNanos(nanos)
+ }
+ }
+
+ def *(factor: Double) = fromNanos(long2double(toNanos) * factor)
+
+ def /(factor: Double) = fromNanos(long2double(toNanos) / factor)
+
+ def /(other: Duration) = if (other.finite_?) long2double(toNanos) / other.toNanos else 0
+
+ def unary_- = Duration(-length, unit)
+
+ def finite_? = true
+
+ override def equals(other: Any) =
+ other.isInstanceOf[FiniteDuration] &&
+ toNanos == other.asInstanceOf[FiniteDuration].toNanos
+
+ override def hashCode = toNanos.asInstanceOf[Int]
+}
+
+class DurationInt(n: Int) {
+ def nanoseconds = Duration(n, NANOSECONDS)
+ def nanos = Duration(n, NANOSECONDS)
+ def nanosecond = Duration(n, NANOSECONDS)
+ def nano = Duration(n, NANOSECONDS)
+
+ def microseconds = Duration(n, MICROSECONDS)
+ def micros = Duration(n, MICROSECONDS)
+ def microsecond = Duration(n, MICROSECONDS)
+ def micro = Duration(n, MICROSECONDS)
+
+ def milliseconds = Duration(n, MILLISECONDS)
+ def millis = Duration(n, MILLISECONDS)
+ def millisecond = Duration(n, MILLISECONDS)
+ def milli = Duration(n, MILLISECONDS)
+
+ def seconds = Duration(n, SECONDS)
+ def second = Duration(n, SECONDS)
+
+ def minutes = Duration(n, MINUTES)
+ def minute = Duration(n, MINUTES)
+
+ def hours = Duration(n, HOURS)
+ def hour = Duration(n, HOURS)
+
+ def days = Duration(n, DAYS)
+ def day = Duration(n, DAYS)
+}
+
+class DurationLong(n: Long) {
+ def nanoseconds = Duration(n, NANOSECONDS)
+ def nanos = Duration(n, NANOSECONDS)
+ def nanosecond = Duration(n, NANOSECONDS)
+ def nano = Duration(n, NANOSECONDS)
+
+ def microseconds = Duration(n, MICROSECONDS)
+ def micros = Duration(n, MICROSECONDS)
+ def microsecond = Duration(n, MICROSECONDS)
+ def micro = Duration(n, MICROSECONDS)
+
+ def milliseconds = Duration(n, MILLISECONDS)
+ def millis = Duration(n, MILLISECONDS)
+ def millisecond = Duration(n, MILLISECONDS)
+ def milli = Duration(n, MILLISECONDS)
+
+ def seconds = Duration(n, SECONDS)
+ def second = Duration(n, SECONDS)
+
+ def minutes = Duration(n, MINUTES)
+ def minute = Duration(n, MINUTES)
+
+ def hours = Duration(n, HOURS)
+ def hour = Duration(n, HOURS)
+
+ def days = Duration(n, DAYS)
+ def day = Duration(n, DAYS)
+}
+
+class DurationDouble(d: Double) {
+ def nanoseconds = Duration(d, NANOSECONDS)
+ def nanos = Duration(d, NANOSECONDS)
+ def nanosecond = Duration(d, NANOSECONDS)
+ def nano = Duration(d, NANOSECONDS)
+
+ def microseconds = Duration(d, MICROSECONDS)
+ def micros = Duration(d, MICROSECONDS)
+ def microsecond = Duration(d, MICROSECONDS)
+ def micro = Duration(d, MICROSECONDS)
+
+ def milliseconds = Duration(d, MILLISECONDS)
+ def millis = Duration(d, MILLISECONDS)
+ def millisecond = Duration(d, MILLISECONDS)
+ def milli = Duration(d, MILLISECONDS)
+
+ def seconds = Duration(d, SECONDS)
+ def second = Duration(d, SECONDS)
+
+ def minutes = Duration(d, MINUTES)
+ def minute = Duration(d, MINUTES)
+
+ def hours = Duration(d, HOURS)
+ def hour = Duration(d, HOURS)
+
+ def days = Duration(d, DAYS)
+ def day = Duration(d, DAYS)
+}
diff --git a/test/disabled/presentation/akka/src/akka/util/HashCode.scala b/test/disabled/presentation/akka/src/akka/util/HashCode.scala
new file mode 100644
index 0000000..d015f12
--- /dev/null
+++ b/test/disabled/presentation/akka/src/akka/util/HashCode.scala
@@ -0,0 +1,57 @@
+/**
+ * Copyright (C) 2009-2011 Scalable Solutions AB <http://scalablesolutions.se>
+ */
+
+package akka.util
+
+import java.lang.reflect.{ Array => JArray }
+import java.lang.{ Float => JFloat, Double => JDouble }
+
+/**
+ * Set of methods which allow easy implementation of <code>hashCode</code>.
+ *
+ * Example:
+ * <pre>
+ * override def hashCode: Int = {
+ * var result = HashCode.SEED
+ * //collect the contributions of various fields
+ * result = HashCode.hash(result, fPrimitive)
+ * result = HashCode.hash(result, fObject)
+ * result = HashCode.hash(result, fArray)
+ * result
+ * }
+ * </pre>
+ *
+ * @author <a href="http://jonasboner.com">Jonas Bonér</a>
+ */
+object HashCode {
+ val SEED = 23
+
+ def hash(seed: Int, any: Any): Int = any match {
+ case value: Boolean => hash(seed, value)
+ case value: Char => hash(seed, value)
+ case value: Short => hash(seed, value)
+ case value: Int => hash(seed, value)
+ case value: Long => hash(seed, value)
+ case value: Float => hash(seed, value)
+ case value: Double => hash(seed, value)
+ case value: Byte => hash(seed, value)
+ case value: AnyRef =>
+ var result = seed
+ if (value eq null) result = hash(result, 0)
+ else if (!isArray(value)) result = hash(result, value.hashCode())
+ else for (id ← 0 until JArray.getLength(value)) result = hash(result, JArray.get(value, id)) // is an array
+ result
+ }
+ def hash(seed: Int, value: Boolean): Int = firstTerm(seed) + (if (value) 1 else 0)
+ def hash(seed: Int, value: Char): Int = firstTerm(seed) + value.asInstanceOf[Int]
+ def hash(seed: Int, value: Int): Int = firstTerm(seed) + value
+ def hash(seed: Int, value: Long): Int = firstTerm(seed) + (value ^ (value >>> 32)).asInstanceOf[Int]
+ def hash(seed: Int, value: Float): Int = hash(seed, JFloat.floatToIntBits(value))
+ def hash(seed: Int, value: Double): Int = hash(seed, JDouble.doubleToLongBits(value))
+
+ private def firstTerm(seed: Int): Int = PRIME * seed
+ private def isArray(anyRef: AnyRef): Boolean = anyRef.getClass.isArray
+ private val PRIME = 37
+}
+
diff --git a/test/disabled/presentation/akka/src/akka/util/Helpers.scala b/test/disabled/presentation/akka/src/akka/util/Helpers.scala
new file mode 100644
index 0000000..0ff4540
--- /dev/null
+++ b/test/disabled/presentation/akka/src/akka/util/Helpers.scala
@@ -0,0 +1,99 @@
+/**
+ * Copyright (C) 2009-2011 Scalable Solutions AB <http://scalablesolutions.se>
+ */
+
+package akka.util
+
+/**
+ * @author <a href="http://jonasboner.com">Jonas Bonér</a>
+ */
+object Helpers {
+
+ implicit def null2Option[T](t: T): Option[T] = Option(t)
+
+ def intToBytes(value: Int): Array[Byte] = {
+ val bytes = new Array[Byte](4)
+ bytes(0) = (value >>> 24).asInstanceOf[Byte]
+ bytes(1) = (value >>> 16).asInstanceOf[Byte]
+ bytes(2) = (value >>> 8).asInstanceOf[Byte]
+ bytes(3) = value.asInstanceOf[Byte]
+ bytes
+ }
+
+ def bytesToInt(bytes: Array[Byte], offset: Int): Int = {
+ (0 until 4).foldLeft(0)((value, index) => value + ((bytes(index + offset) & 0x000000FF) << ((4 - 1 - index) * 8)))
+ }
+
+ /**
+ * Convenience helper to cast the given Option of Any to an Option of the given type. Will throw a ClassCastException
+ * if the actual type is not assignable from the given one.
+ */
+ def narrow[T](o: Option[Any]): Option[T] = {
+ require((o ne null), "Option to be narrowed must not be null!")
+ o.asInstanceOf[Option[T]]
+ }
+
+ /**
+ * Convenience helper to cast the given Option of Any to an Option of the given type. Will swallow a possible
+ * ClassCastException and return None in that case.
+ */
+ def narrowSilently[T: ClassTag](o: Option[Any]): Option[T] =
+ try {
+ narrow(o)
+ } catch {
+ case e: ClassCastException =>
+ None
+ }
+
+ /**
+ * Reference that can hold either a typed value or an exception.
+ *
+ * Usage:
+ * <pre>
+ * scala> ResultOrError(1)
+ * res0: ResultOrError[Int] = ResultOrError at a96606
+ *
+ * scala> res0()
+ * res1: Int = 1
+ *
+ * scala> res0() = 3
+ *
+ * scala> res0()
+ * res3: Int = 3
+ *
+ * scala> res0() = { println("Hello world"); 3}
+ * Hello world
+ *
+ * scala> res0()
+ * res5: Int = 3
+ *
+ * scala> res0() = error("Lets see what happens here...")
+ *
+ * scala> res0()
+ * java.lang.RuntimeException: Lets see what happens here...
+ * at ResultOrError.apply(Helper.scala:11)
+ * at .<init>(<console>:6)
+ * at .<clinit>(<console>)
+ * at Re...
+ * </pre>
+ */
+ class ResultOrError[R](result: R) {
+ private[this] var contents: Either[R, Throwable] = Left(result)
+
+ def update(value: => R) = {
+ contents = try {
+ Left(value)
+ } catch {
+ case (error: Throwable) => Right(error)
+ }
+ }
+
+ def apply() = contents match {
+ case Left(result) => result
+ case Right(error) => throw error.fillInStackTrace
+ }
+ }
+ object ResultOrError {
+ def apply[R](result: R) = new ResultOrError(result)
+ }
+}
diff --git a/test/disabled/presentation/akka/src/akka/util/ListenerManagement.scala b/test/disabled/presentation/akka/src/akka/util/ListenerManagement.scala
new file mode 100644
index 0000000..863e905
--- /dev/null
+++ b/test/disabled/presentation/akka/src/akka/util/ListenerManagement.scala
@@ -0,0 +1,81 @@
+/**
+ * Copyright (C) 2009-2011 Scalable Solutions AB <http://scalablesolutions.se>
+ */
+
+package akka.util
+
+import java.util.concurrent.ConcurrentSkipListSet
+import akka.actor.{ ActorInitializationException, ActorRef }
+
+/**
+ * A manager for listener actors. Intended for mixin by observables.
+ *
+ * @author Martin Krasser
+ */
+trait ListenerManagement {
+
+ private val listeners = new ConcurrentSkipListSet[ActorRef]
+
+ /**
+ * Specifies whether listeners should be started when added and stopped when removed or not
+ */
+ protected def manageLifeCycleOfListeners: Boolean = true
+
+ /**
+ * Adds the <code>listener</code> this registry's listener list.
+ * The <code>listener</code> is started by this method if manageLifeCycleOfListeners yields true.
+ */
+ def addListener(listener: ActorRef) {
+ if (manageLifeCycleOfListeners) listener.start()
+ listeners add listener
+ }
+
+ /**
+ * Removes the <code>listener</code> this registry's listener list.
+ * The <code>listener</code> is stopped by this method if manageLifeCycleOfListeners yields true.
+ */
+ def removeListener(listener: ActorRef) {
+ listeners remove listener
+ if (manageLifeCycleOfListeners) listener.stop()
+ }
+
+ /*
+ * Returns whether there are any listeners currently
+ */
+ def hasListeners: Boolean = !listeners.isEmpty
+
+ /**
+ * Checks if a specific listener is registered. ActorInitializationException leads to removal of listener if that
+ * one isShutdown.
+ */
+ def hasListener(listener: ActorRef): Boolean = listeners.contains(listener)
+
+ protected[akka] def notifyListeners(message: => Any) {
+ if (hasListeners) {
+ val msg = message
+ val iterator = listeners.iterator
+ while (iterator.hasNext) {
+ val listener = iterator.next
+ // Uncomment if those exceptions are so frequent as to bottleneck
+ // if (listener.isShutdown) iterator.remove() else
+ try {
+ listener ! msg
+ } catch {
+ case e: ActorInitializationException =>
+ if (listener.isShutdown) iterator.remove()
+ }
+ }
+ }
+ }
+
+ /**
+ * Execute <code>f</code> with each listener as argument. ActorInitializationException is not handled.
+ */
+ protected[akka] def foreachListener(f: (ActorRef) => Unit) {
+ val iterator = listeners.iterator
+ while (iterator.hasNext) {
+ val listener = iterator.next
+ if (listener.isRunning) f(listener)
+ }
+ }
+}
diff --git a/test/disabled/presentation/akka/src/akka/util/LockUtil.scala b/test/disabled/presentation/akka/src/akka/util/LockUtil.scala
new file mode 100644
index 0000000..4aaefad
--- /dev/null
+++ b/test/disabled/presentation/akka/src/akka/util/LockUtil.scala
@@ -0,0 +1,197 @@
+/**
+ * Copyright (C) 2009-2011 Scalable Solutions AB <http://scalablesolutions.se>
+ */
+
+package akka.util
+
+import java.util.concurrent.locks.{ ReentrantReadWriteLock, ReentrantLock }
+import java.util.concurrent.atomic.{ AtomicBoolean }
+import akka.event.EventHandler
+
+/**
+ * @author <a href="http://jonasboner.com">Jonas Bonér</a>
+ */
+final class ReentrantGuard {
+ val lock = new ReentrantLock
+
+ final def withGuard[T](body: => T): T = {
+ lock.lock
+ try {
+ body
+ } finally {
+ lock.unlock
+ }
+ }
+
+ final def tryWithGuard[T](body: => T): T = {
+ while (!lock.tryLock) { Thread.sleep(10) } // wait on the monitor to be unlocked
+ try {
+ body
+ } finally {
+ lock.unlock
+ }
+ }
+}
+
+/**
+ * @author <a href="http://jonasboner.com">Jonas Bonér</a>
+ */
+class ReadWriteGuard {
+ private val rwl = new ReentrantReadWriteLock
+ val readLock = rwl.readLock
+ val writeLock = rwl.writeLock
+
+ def withWriteGuard[T](body: => T): T = {
+ writeLock.lock
+ try {
+ body
+ } finally {
+ writeLock.unlock
+ }
+ }
+
+ def withReadGuard[T](body: => T): T = {
+ readLock.lock
+ try {
+ body
+ } finally {
+ readLock.unlock
+ }
+ }
+}
+
+/**
+ * A very simple lock that uses CCAS (Compare Compare-And-Swap)
+ * Does not keep track of the owner and isn't Reentrant, so don't nest and try to stick to the if*-methods
+ */
+class SimpleLock {
+ val acquired = new AtomicBoolean(false)
+
+ def ifPossible(perform: () => Unit): Boolean = {
+ if (tryLock()) {
+ try {
+ perform
+ } finally {
+ unlock()
+ }
+ true
+ } else false
+ }
+
+ def ifPossibleYield[T](perform: () => T): Option[T] = {
+ if (tryLock()) {
+ try {
+ Some(perform())
+ } finally {
+ unlock()
+ }
+ } else None
+ }
+
+ def ifPossibleApply[T, R](value: T)(function: (T) => R): Option[R] = {
+ if (tryLock()) {
+ try {
+ Some(function(value))
+ } finally {
+ unlock()
+ }
+ } else None
+ }
+
+ def tryLock() = {
+ if (acquired.get) false
+ else acquired.compareAndSet(false, true)
+ }
+
+ def tryUnlock() = {
+ acquired.compareAndSet(true, false)
+ }
+
+ def locked = acquired.get
+
+ def unlock() {
+ acquired.set(false)
+ }
+}
+
+/**
+ * An atomic switch that can be either on or off
+ */
+class Switch(startAsOn: Boolean = false) {
+ private val switch = new AtomicBoolean(startAsOn)
+
+ protected def transcend(from: Boolean, action: => Unit): Boolean = synchronized {
+ if (switch.compareAndSet(from, !from)) {
+ try {
+ action
+ } catch {
+ case e: Throwable =>
+ EventHandler.error(e, this, e.getMessage)
+ switch.compareAndSet(!from, from) // revert status
+ throw e
+ }
+ true
+ } else false
+ }
+
+ def switchOff(action: => Unit): Boolean = transcend(from = true, action)
+ def switchOn(action: => Unit): Boolean = transcend(from = false, action)
+
+ def switchOff: Boolean = synchronized { switch.compareAndSet(true, false) }
+ def switchOn: Boolean = synchronized { switch.compareAndSet(false, true) }
+
+ def ifOnYield[T](action: => T): Option[T] = {
+ if (switch.get) Some(action)
+ else None
+ }
+
+ def ifOffYield[T](action: => T): Option[T] = {
+ if (!switch.get) Some(action)
+ else None
+ }
+
+ def ifOn(action: => Unit): Boolean = {
+ if (switch.get) {
+ action
+ true
+ } else false
+ }
+
+ def ifOff(action: => Unit): Boolean = {
+ if (!switch.get) {
+ action
+ true
+ } else false
+ }
+
+ def whileOnYield[T](action: => T): Option[T] = synchronized {
+ if (switch.get) Some(action)
+ else None
+ }
+
+ def whileOffYield[T](action: => T): Option[T] = synchronized {
+ if (!switch.get) Some(action)
+ else None
+ }
+
+ def whileOn(action: => Unit): Boolean = synchronized {
+ if (switch.get) {
+ action
+ true
+ } else false
+ }
+
+ def whileOff(action: => Unit): Boolean = synchronized {
+ if (switch.get) {
+ action
+ true
+ } else false
+ }
+
+ def ifElseYield[T](on: => T)(off: => T) = synchronized {
+ if (switch.get) on else off
+ }
+
+ def isOn = switch.get
+ def isOff = !isOn
+}
diff --git a/test/disabled/presentation/akka/src/akka/util/ReflectiveAccess.scala b/test/disabled/presentation/akka/src/akka/util/ReflectiveAccess.scala
new file mode 100644
index 0000000..f38d1f9
--- /dev/null
+++ b/test/disabled/presentation/akka/src/akka/util/ReflectiveAccess.scala
@@ -0,0 +1,232 @@
+/**
+ * Copyright (C) 2009-2011 Scalable Solutions AB <http://scalablesolutions.se>
+ */
+
+package akka.util
+
+import akka.dispatch.{ Future, CompletableFuture, MessageInvocation }
+import akka.config.{ Config, ModuleNotAvailableException }
+
+import java.net.InetSocketAddress
+import akka.remoteinterface.RemoteSupport
+import akka.actor._
+import akka.event.EventHandler
+
+/**
+ * Helper class for reflective access to different modules in order to allow optional loading of modules.
+ *
+ * @author <a href="http://jonasboner.com">Jonas Bonér</a>
+ */
+object ReflectiveAccess {
+
+ val loader = getClass.getClassLoader
+
+ def isRemotingEnabled = Remote.isEnabled
+ lazy val isTypedActorEnabled = TypedActorModule.isEnabled
+
+ def ensureRemotingEnabled = Remote.ensureEnabled
+ def ensureTypedActorEnabled = TypedActorModule.ensureEnabled
+
+ /**
+ * Reflective access to the RemoteClient module.
+ *
+ * @author <a href="http://jonasboner.com">Jonas Bonér</a>
+ */
+ object Remote {
+ val TRANSPORT = Config.config.getString("akka.remote.layer", "akka.remote.netty.NettyRemoteSupport")
+
+ private[akka] val configDefaultAddress =
+ new InetSocketAddress(Config.config.getString("akka.remote.server.hostname", "localhost"),
+ Config.config.getInt("akka.remote.server.port", 2552))
+
+ lazy val isEnabled = remoteSupportClass.isDefined
+
+ def ensureEnabled = if (!isEnabled) {
+ val e = new ModuleNotAvailableException("Can't load the remoting module, make sure that akka-remote.jar is on the classpath")
+ EventHandler.debug(this, e.toString)
+ throw e
+ }
+ val remoteSupportClass = getClassFor[RemoteSupport](TRANSPORT) match {
+ case Right(value) => Some(value)
+ case Left(exception) =>
+ EventHandler.debug(this, exception.toString)
+ None
+ }
+
+ protected[akka] val defaultRemoteSupport: Option[() => RemoteSupport] =
+ remoteSupportClass map { remoteClass =>
+ () => createInstance[RemoteSupport](
+ remoteClass,
+ Array[Class[_]](),
+ Array[AnyRef]()) match {
+ case Right(value) => value
+ case Left(exception) =>
+ val e = new ModuleNotAvailableException(
+ "Can't instantiate [%s] - make sure that akka-remote.jar is on the classpath".format(remoteClass.getName), exception)
+ EventHandler.debug(this, e.toString)
+ throw e
+ }
+ }
+ }
+
+ /**
+ * Reflective access to the TypedActors module.
+ *
+ * @author <a href="http://jonasboner.com">Jonas Bonér</a>
+ */
+ object TypedActorModule {
+
+ type TypedActorObject = {
+ def isJoinPoint(message: Any): Boolean
+ def isJoinPointAndOneWay(message: Any): Boolean
+ def actorFor(proxy: AnyRef): Option[ActorRef]
+ def proxyFor(actorRef: ActorRef): Option[AnyRef]
+ def stop(anyRef: AnyRef): Unit
+ }
+
+ lazy val isEnabled = typedActorObjectInstance.isDefined
+
+ def ensureEnabled = if (!isTypedActorEnabled) throw new ModuleNotAvailableException(
+ "Can't load the typed actor module, make sure that akka-typed-actor.jar is on the classpath")
+
+ val typedActorObjectInstance: Option[TypedActorObject] =
+ getObjectFor[TypedActorObject]("akka.actor.TypedActor$") match {
+ case Right(value) => Some(value)
+ case Left(exception) =>
+ EventHandler.debug(this, exception.toString)
+ None
+ }
+
+ def resolveFutureIfMessageIsJoinPoint(message: Any, future: Future[_]): Boolean = {
+ ensureEnabled
+ if (typedActorObjectInstance.get.isJoinPointAndOneWay(message)) {
+ future.asInstanceOf[CompletableFuture[Option[_]]].completeWithResult(None)
+ }
+ typedActorObjectInstance.get.isJoinPoint(message)
+ }
+ }
+
+ object AkkaCloudModule {
+
+ type Mailbox = {
+ def enqueue(message: MessageInvocation)
+ def dequeue: MessageInvocation
+ }
+
+ type Serializer = {
+ def toBinary(obj: AnyRef): Array[Byte]
+ def fromBinary(bytes: Array[Byte], clazz: Option[Class[_]]): AnyRef
+ }
+
+ lazy val isEnabled = clusterObjectInstance.isDefined
+
+ val clusterObjectInstance: Option[AnyRef] =
+ getObjectFor[AnyRef]("akka.cloud.cluster.Cluster$") match {
+ case Right(value) => Some(value)
+ case Left(exception) =>
+ EventHandler.debug(this, exception.toString)
+ None
+ }
+
+ val serializerClass: Option[Class[_]] =
+ getClassFor("akka.serialization.Serializer") match {
+ case Right(value) => Some(value)
+ case Left(exception) =>
+ EventHandler.debug(this, exception.toString)
+ None
+ }
+
+ def ensureEnabled = if (!isEnabled) throw new ModuleNotAvailableException(
+ "Feature is only available in Akka Cloud")
+ }
+
+ val noParams = Array[Class[_]]()
+ val noArgs = Array[AnyRef]()
+
+ def createInstance[T](clazz: Class[_],
+ params: Array[Class[_]],
+ args: Array[AnyRef]): Either[Exception, T] = try {
+ assert(clazz ne null)
+ assert(params ne null)
+ assert(args ne null)
+ val ctor = clazz.getDeclaredConstructor(params: _*)
+ ctor.setAccessible(true)
+ Right(ctor.newInstance(args: _*).asInstanceOf[T])
+ } catch {
+ case e: Exception => Left(e)
+ }
+
+ def createInstance[T](fqn: String,
+ params: Array[Class[_]],
+ args: Array[AnyRef],
+ classloader: ClassLoader = loader): Either[Exception, T] = try {
+ assert(params ne null)
+ assert(args ne null)
+ getClassFor(fqn) match {
+ case Right(value) =>
+ val ctor = value.getDeclaredConstructor(params: _*)
+ ctor.setAccessible(true)
+ Right(ctor.newInstance(args: _*).asInstanceOf[T])
+ case Left(exception) => Left(exception) //We could just cast this to Either[Exception, T] but it's ugly
+ }
+ } catch {
+ case e: Exception =>
+ Left(e)
+ }
+
+ //Obtains a reference to fqn.MODULE$
+ def getObjectFor[T](fqn: String, classloader: ClassLoader = loader): Either[Exception, T] = try {
+ getClassFor(fqn) match {
+ case Right(value) =>
+ val instance = value.getDeclaredField("MODULE$")
+ instance.setAccessible(true)
+ val obj = instance.get(null)
+ if (obj eq null) Left(new NullPointerException) else Right(obj.asInstanceOf[T])
+ case Left(exception) => Left(exception) //We could just cast this to Either[Exception, T] but it's ugly
+ }
+ } catch {
+ case e: Exception =>
+ Left(e)
+ }
+
+ def getClassFor[T](fqn: String, classloader: ClassLoader = loader): Either[Exception, Class[T]] = try {
+ assert(fqn ne null)
+
+ // First, use the specified CL
+ val first = try {
+ Right(classloader.loadClass(fqn).asInstanceOf[Class[T]])
+ } catch {
+ case c: ClassNotFoundException => Left(c)
+ }
+
+ if (first.isRight) first
+ else {
+ // Second option is to use the ContextClassLoader
+ val second = try {
+ Right(Thread.currentThread.getContextClassLoader.loadClass(fqn).asInstanceOf[Class[T]])
+ } catch {
+ case c: ClassNotFoundException => Left(c)
+ }
+
+ if (second.isRight) second
+ else {
+ val third = try {
+ if (classloader ne loader) Right(loader.loadClass(fqn).asInstanceOf[Class[T]]) else Left(null) //Horrid
+ } catch {
+ case c: ClassNotFoundException => Left(c)
+ }
+
+ if (third.isRight) third
+ else {
+ try {
+ Right(Class.forName(fqn).asInstanceOf[Class[T]]) // Last option is Class.forName
+ } catch {
+ case c: ClassNotFoundException => Left(c)
+ }
+ }
+ }
+ }
+ } catch {
+ case e: Exception => Left(e)
+ }
+}
diff --git a/test/disabled/presentation/akka/src/akka/util/package.scala b/test/disabled/presentation/akka/src/akka/util/package.scala
new file mode 100644
index 0000000..26a2492
--- /dev/null
+++ b/test/disabled/presentation/akka/src/akka/util/package.scala
@@ -0,0 +1,27 @@
+/**
+ * Copyright (C) 2009-2011 Scalable Solutions AB <http://scalablesolutions.se>
+ */
+
+package akka.util
+
+import java.util.concurrent.TimeUnit
+
+package object duration {
+ implicit def intToDurationInt(n: Int) = new DurationInt(n)
+ implicit def longToDurationLong(n: Long) = new DurationLong(n)
+ implicit def doubleToDurationDouble(d: Double) = new DurationDouble(d)
+
+ implicit def pairIntToDuration(p: (Int, TimeUnit)) = Duration(p._1, p._2)
+ implicit def pairLongToDuration(p: (Long, TimeUnit)) = Duration(p._1, p._2)
+ implicit def durationToPair(d: Duration) = (d.length, d.unit)
+
+ implicit def intMult(i: Int) = new {
+ def *(d: Duration) = d * i
+ }
+ implicit def longMult(l: Long) = new {
+ def *(d: Duration) = d * l
+ }
+ implicit def doubleMult(f: Double) = new {
+ def *(d: Duration) = d * f
+ }
+}
diff --git a/test/disabled/presentation/akka/src/com/eaio/util/lang/Hex.java b/test/disabled/presentation/akka/src/com/eaio/util/lang/Hex.java
new file mode 100644
index 0000000..7794059
--- /dev/null
+++ b/test/disabled/presentation/akka/src/com/eaio/util/lang/Hex.java
@@ -0,0 +1,215 @@
+/*
+ * Hex.java
+ *
+ * Created 04.07.2003.
+ *
+ * eaio: UUID - an implementation of the UUID specification Copyright (c) 2003-2009 Johann Burkard (jb at eaio.com)
+ * http://eaio.com.
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated
+ * documentation files (the "Software"), to deal in the Software without restriction, including without limitation the
+ * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to
+ * permit persons to whom the Software is furnished to do so, subject to the following conditions:
+ *
+ * The above copyright notice and this permission notice shall be included in all copies or substantial portions of the
+ * Software.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
+ * WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
+ * COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
+ * OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+ *
+ */
+package com.eaio.util.lang;
+
+import java.io.IOException;
+
+/**
+ * Number-to-hexadecimal and hexadecimal-to-number conversions.
+ *
+ * @see <a href="http://johannburkard.de/software/uuid/">UUID</a>
+ * @author <a href="mailto:jb at eaio.com">Johann Burkard</a>
+ * @version $Id: Hex.java 1888 2009-03-15 12:43:24Z johann $
+ */
+public final class Hex {
+
+ /**
+ * No instances needed.
+ */
+ private Hex() {
+ super();
+ }
+
+ private static final char[] DIGITS = { '0', '1', '2', '3', '4', '5', '6', '7', '8', '9', 'a', 'b', 'c', 'd', 'e',
+ 'f' };
+
+ /**
+ * Turns a <code>short</code> into hex octets.
+ *
+ * @param a the {@link Appendable}, may not be <code>null</code>
+ * @param in the integer
+ * @return {@link Appendable}
+ */
+ public static Appendable append(Appendable a, short in) {
+ return append(a, (long) in, 4);
+ }
+
+ /**
+ * Turns a <code>short</code> into hex octets.
+ *
+ * @param a the {@link Appendable}, may not be <code>null</code>
+ * @param in the integer
+ * @param length the number of octets to produce
+ * @return {@link Appendable}
+ */
+ public static Appendable append(Appendable a, short in, int length) {
+ return append(a, (long) in, length);
+ }
+
+ /**
+ * Turns an <code>int</code> into hex octets.
+ *
+ * @param a the {@link Appendable}, may not be <code>null</code>
+ * @param in the integer
+ * @return {@link Appendable}
+ */
+ public static Appendable append(Appendable a, int in) {
+ return append(a, (long) in, 8);
+ }
+
+ /**
+ * Turns an <code>int</code> into hex octets.
+ *
+ * @param a the {@link Appendable}, may not be <code>null</code>
+ * @param in the integer
+ * @param length the number of octets to produce
+ * @return {@link Appendable}
+ */
+ public static Appendable append(Appendable a, int in, int length) {
+ return append(a, (long) in, length);
+ }
+
+ /**
+ * Turns a <code>long</code> into hex octets.
+ *
+ * @param a the {@link Appendable}, may not be <code>null</code>
+ * @param in the long
+ * @return {@link Appendable}
+ */
+ public static Appendable append(Appendable a, long in) {
+ return append(a, in, 16);
+ }
+
+ /**
+ * Turns a <code>long</code> into hex octets.
+ *
+ * @param a the {@link Appendable}, may not be <code>null</code>
+ * @param in the long
+ * @param length the number of octets to produce
+ * @return {@link Appendable}
+ */
+ public static Appendable append(Appendable a, long in, int length) {
+ try {
+ int lim = (length << 2) - 4;
+ while (lim >= 0) {
+ a.append(DIGITS[(byte) (in >> lim) & 0x0f]);
+ lim -= 4;
+ }
+ }
+ catch (IOException ex) {
+ // Bla
+ }
+ return a;
+ }
+
+ /**
+ * Turns a <code>byte</code> array into hex octets.
+ *
+ * @param a the {@link Appendable}, may not be <code>null</code>
+ * @param bytes the <code>byte</code> array
+ * @return {@link Appendable}
+ */
+ public static Appendable append(Appendable a, byte[] bytes) {
+ try {
+ for (byte b : bytes) {
+ a.append(DIGITS[(byte) ((b & 0xF0) >> 4)]);
+ a.append(DIGITS[(byte) (b & 0x0F)]);
+ }
+ }
+ catch (IOException ex) {
+ // Bla
+ }
+ return a;
+ }
+
+ /**
+ * Parses a <code>long</code> from a hex encoded number. This method will skip all characters that are not 0-9,
+ * A-F and a-f.
+ * <p>
+ * Returns 0 if the {@link CharSequence} does not contain any interesting characters.
+ *
+ * @param s the {@link CharSequence} to extract a <code>long</code> from, may not be <code>null</code>
+ * @return a <code>long</code>
+ * @throws NullPointerException if the {@link CharSequence} is <code>null</code>
+ */
+ public static long parseLong(CharSequence s) {
+ long out = 0;
+ byte shifts = 0;
+ char c;
+ for (int i = 0; i < s.length() && shifts < 16; i++) {
+ c = s.charAt(i);
+ if ((c > 47) && (c < 58)) {
+ ++shifts;
+ out <<= 4;
+ out |= c - 48;
+ }
+ else if ((c > 64) && (c < 71)) {
+ ++shifts;
+ out <<= 4;
+ out |= c - 55;
+ }
+ else if ((c > 96) && (c < 103)) {
+ ++shifts;
+ out <<= 4;
+ out |= c - 87;
+ }
+ }
+ return out;
+ }
+
+ /**
+ * Parses a <code>short</code> from a hex encoded number. This method will skip all characters that are not 0-9,
+ * A-F and a-f.
+ * <p>
+ * Returns 0 if the {@link CharSequence} does not contain any interesting characters.
+ *
+ * @param s the {@link CharSequence} to extract a <code>short</code> from, may not be <code>null</code>
+ * @return a <code>short</code>
+ * @throws NullPointerException if the {@link CharSequence} is <code>null</code>
+ */
+ public static short parseShort(String s) {
+ short out = 0;
+ byte shifts = 0;
+ char c;
+ for (int i = 0; i < s.length() && shifts < 4; i++) {
+ c = s.charAt(i);
+ if ((c > 47) && (c < 58)) {
+ ++shifts;
+ out <<= 4;
+ out |= c - 48;
+ }
+ else if ((c > 64) && (c < 71)) {
+ ++shifts;
+ out <<= 4;
+ out |= c - 55;
+ }
+ else if ((c > 96) && (c < 103)) {
+ ++shifts;
+ out <<= 4;
+ out |= c - 87;
+ }
+ }
+ return out;
+ }
+
+}
diff --git a/test/disabled/presentation/akka/src/com/eaio/uuid/MACAddressParser.java b/test/disabled/presentation/akka/src/com/eaio/uuid/MACAddressParser.java
new file mode 100644
index 0000000..c077147
--- /dev/null
+++ b/test/disabled/presentation/akka/src/com/eaio/uuid/MACAddressParser.java
@@ -0,0 +1,116 @@
+/*
+ * MACAddressParserTest.java
+ *
+ * Created 30.01.2006.
+ *
+ * eaio: UUID - an implementation of the UUID specification
+ * Copyright (c) 2003-2009 Johann Burkard (jb at eaio.com) http://eaio.com.
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a
+ * copy of this software and associated documentation files (the "Software"),
+ * to deal in the Software without restriction, including without limitation
+ * the rights to use, copy, modify, merge, publish, distribute, sublicense,
+ * and/or sell copies of the Software, and to permit persons to whom the
+ * Software is furnished to do so, subject to the following conditions:
+ *
+ * The above copyright notice and this permission notice shall be included
+ * in all copies or substantial portions of the Software.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
+ * OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
+ * NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
+ * DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
+ * OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
+ * USE OR OTHER DEALINGS IN THE SOFTWARE.
+ *
+ */
+package com.eaio.uuid;
+
+/**
+ * The MAC address parser attempts to find the following patterns:
+ * <ul>
+ * <li>.{1,2}:.{1,2}:.{1,2}:.{1,2}:.{1,2}:.{1,2}</li>
+ * <li>.{1,2}-.{1,2}-.{1,2}-.{1,2}-.{1,2}-.{1,2}</li>
+ * </ul>
+ *
+ * @see <a href="http://johannburkard.de/software/uuid/">UUID</a>
+ * @author <a href="mailto:jb at eaio.com">Johann Burkard</a>
+ * @version $Id: MACAddressParser.java 1888 2009-03-15 12:43:24Z johann $
+ */
+class MACAddressParser {
+
+ /**
+ * No instances needed.
+ */
+ private MACAddressParser() {
+ super();
+ }
+
+ /**
+ * Attempts to find a pattern in the given String.
+ *
+ * @param in the String, may not be <code>null</code>
+ * @return the substring that matches this pattern or <code>null</code>
+ */
+ static String parse(String in) {
+
+ String out = in;
+
+ // lanscan
+
+ int hexStart = out.indexOf("0x");
+ if (hexStart != -1 && out.indexOf("ETHER") != -1) {
+ int hexEnd = out.indexOf(' ', hexStart);
+ if (hexEnd > hexStart + 2) {
+ out = out.substring(hexStart, hexEnd);
+ }
+ }
+
+ else {
+
+ int octets = 0;
+ int lastIndex, old, end;
+
+ if (out.indexOf('-') > -1) {
+ out = out.replace('-', ':');
+ }
+
+ lastIndex = out.lastIndexOf(':');
+
+ if (lastIndex > out.length() - 2) {
+ out = null;
+ }
+ else {
+
+ end = Math.min(out.length(), lastIndex + 3);
+
+ ++octets;
+ old = lastIndex;
+ while (octets != 5 && lastIndex != -1 && lastIndex > 1) {
+ lastIndex = out.lastIndexOf(':', --lastIndex);
+ if (old - lastIndex == 3 || old - lastIndex == 2) {
+ ++octets;
+ old = lastIndex;
+ }
+ }
+
+ if (octets == 5 && lastIndex > 1) {
+ out = out.substring(lastIndex - 2, end).trim();
+ }
+ else {
+ out = null;
+ }
+
+ }
+
+ }
+
+ if (out != null && out.startsWith("0x")) {
+ out = out.substring(2);
+ }
+
+ return out;
+ }
+
+}
diff --git a/test/disabled/presentation/akka/src/com/eaio/uuid/UUID.java b/test/disabled/presentation/akka/src/com/eaio/uuid/UUID.java
new file mode 100644
index 0000000..6c49bcd
--- /dev/null
+++ b/test/disabled/presentation/akka/src/com/eaio/uuid/UUID.java
@@ -0,0 +1,311 @@
+/*
+ * UUID.java
+ *
+ * Created 07.02.2003
+ *
+ * eaio: UUID - an implementation of the UUID specification
+ * Copyright (c) 2003-2009 Johann Burkard (jb at eaio.com) http://eaio.com.
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a
+ * copy of this software and associated documentation files (the "Software"),
+ * to deal in the Software without restriction, including without limitation
+ * the rights to use, copy, modify, merge, publish, distribute, sublicense,
+ * and/or sell copies of the Software, and to permit persons to whom the
+ * Software is furnished to do so, subject to the following conditions:
+ *
+ * The above copyright notice and this permission notice shall be included
+ * in all copies or substantial portions of the Software.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
+ * OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
+ * NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
+ * DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
+ * OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
+ * USE OR OTHER DEALINGS IN THE SOFTWARE.
+ *
+ */
+package com.eaio.uuid;
+
+import java.io.IOException;
+import java.io.ObjectInputStream;
+import java.io.ObjectOutputStream;
+import java.io.Serializable;
+
+import org.omg.CORBA.portable.IDLEntity;
+
+import com.eaio.util.lang.Hex;
+
+/**
+ * Creates UUIDs according to the DCE Universal Token Identifier specification.
+ * <p>
+ * All you need to know:
+ * <pre>
+ * UUID u = new UUID();
+ * </pre>
+ *
+ * @see <a href="http://www.opengroup.org/onlinepubs/9629399/apdxa.htm">
+ * http://www.opengroup.org/onlinepubs/9629399/apdxa.htm
+ * </a>
+ * @see <a href="http://www.uddi.org/pubs/draft-leach-uuids-guids-01.txt">
+ * http://www.uddi.org/pubs/draft-leach-uuids-guids-01.txt
+ * </a>
+ * @see <a href="http://johannburkard.de/software/uuid/">UUID</a>
+ * @author <a href="mailto:jb at eaio.de">Johann Burkard</a>
+ * @version $Id: UUID.java 1888 2009-03-15 12:43:24Z johann $
+ */
+public class UUID implements Comparable<UUID>, Serializable, Cloneable,
+ IDLEntity {
+
+ /**
+ * Hasn't ever changed between versions.
+ */
+ static final long serialVersionUID = 7435962790062944603L;
+
+ /**
+ * The time field of the UUID.
+ *
+ * @serial
+ */
+ public long time;
+
+ /**
+ * The clock sequence and node field of the UUID.
+ *
+ * @serial
+ */
+ public long clockSeqAndNode;
+
+ /**
+ * Constructor for UUID. Constructs a new, unique UUID.
+ *
+ * @see UUIDGen#newTime()
+ * @see UUIDGen#getClockSeqAndNode()
+ */
+ public UUID() {
+ this(UUIDGen.newTime(), UUIDGen.getClockSeqAndNode());
+ }
+
+ /**
+ * Constructor for UUID. Constructs a UUID from two <code>long</code> values.
+ *
+ * @param time the upper 64 bits
+ * @param clockSeqAndNode the lower 64 bits
+ */
+ public UUID(long time, long clockSeqAndNode) {
+ this.time = time;
+ this.clockSeqAndNode = clockSeqAndNode;
+ }
+
+ /**
+ * Copy constructor for UUID. Values of the given UUID are copied.
+ *
+ * @param u the UUID, may not be <code>null</code>
+ */
+ public UUID(UUID u) {
+ this(u.time, u.clockSeqAndNode);
+ }
+
+ /**
+ * Parses a textual representation of a UUID.
+ * <p>
+ * No validation is performed. If the {@link CharSequence} is shorter than 36 characters,
+ * {@link ArrayIndexOutOfBoundsException}s will be thrown.
+ *
+ * @param s the {@link CharSequence}, may not be <code>null</code>
+ */
+ public UUID(CharSequence s) {
+ this(Hex.parseLong(s.subSequence(0, 18)), Hex.parseLong(s.subSequence(
+ 19, 36)));
+ }
+
+ /**
+ * Compares this UUID to another Object. Throws a {@link ClassCastException} if
+ * the other Object is not an instance of the UUID class. Returns a value
+ * smaller than zero if the other UUID is "larger" than this UUID and a value
+ * larger than zero if the other UUID is "smaller" than this UUID.
+ *
+ * @param t the other UUID, may not be <code>null</code>
+ * @return a value < 0, 0 or a value > 0
+ * @see java.lang.Comparable#compareTo(java.lang.Object)
+ * @throws ClassCastException
+ */
+ public int compareTo(UUID t) {
+ if (this == t) {
+ return 0;
+ }
+ if (time > t.time) {
+ return 1;
+ }
+ if (time < t.time) {
+ return -1;
+ }
+ if (clockSeqAndNode > t.clockSeqAndNode) {
+ return 1;
+ }
+ if (clockSeqAndNode < t.clockSeqAndNode) {
+ return -1;
+ }
+ return 0;
+ }
+
+ /**
+ * Tweaked Serialization routine.
+ *
+ * @param out the ObjectOutputStream
+ * @throws IOException
+ */
+ private void writeObject(ObjectOutputStream out) throws IOException {
+ out.writeLong(time);
+ out.writeLong(clockSeqAndNode);
+ }
+
+ /**
+ * Tweaked Serialization routine.
+ *
+ * @param in the ObjectInputStream
+ * @throws IOException
+ */
+ private void readObject(ObjectInputStream in) throws IOException {
+ time = in.readLong();
+ clockSeqAndNode = in.readLong();
+ }
+
+ /**
+ * Returns this UUID as a String.
+ *
+ * @return a String, never <code>null</code>
+ * @see java.lang.Object#toString()
+ * @see #toAppendable(Appendable)
+ */
+ @Override
+ public final String toString() {
+ return toAppendable(null).toString();
+ }
+
+ /**
+ * Appends a String representation of this to the given {@link StringBuffer} or
+ * creates a new one if none is given.
+ *
+ * @param in the StringBuffer to append to, may be <code>null</code>
+ * @return a StringBuffer, never <code>null</code>
+ * @see #toAppendable(Appendable)
+ */
+ public StringBuffer toStringBuffer(StringBuffer in) {
+ StringBuffer out = in;
+ if (out == null) {
+ out = new StringBuffer(36);
+ }
+ else {
+ out.ensureCapacity(out.length() + 36);
+ }
+ return (StringBuffer) toAppendable(out);
+ }
+
+ /**
+ * Appends a String representation of this object to the given {@link Appendable} object.
+ * <p>
+ * For reasons I'll probably never understand, Sun has decided to have a number of I/O classes implement
+ * Appendable which forced them to destroy an otherwise nice and simple interface with {@link IOException}s.
+ * <p>
+ * I decided to ignore any possible IOExceptions in this method.
+ *
+ * @param a the Appendable object, may be <code>null</code>
+ * @return an Appendable object, defaults to a {@link StringBuilder} if <code>a</code> is <code>null</code>
+ */
+ public Appendable toAppendable(Appendable a) {
+ Appendable out = a;
+ if (out == null) {
+ out = new StringBuilder(36);
+ }
+ try {
+ Hex.append(out, (int) (time >> 32)).append('-');
+ Hex.append(out, (short) (time >> 16)).append('-');
+ Hex.append(out, (short) time).append('-');
+ Hex.append(out, (short) (clockSeqAndNode >> 48)).append('-');
+ Hex.append(out, clockSeqAndNode, 12);
+ }
+ catch (IOException ex) {
+ // What were they thinking?
+ }
+ return out;
+ }
+
+ /**
+ * Returns a hash code of this UUID. The hash code is calculated by XOR'ing the
+ * upper 32 bits of the time and clockSeqAndNode fields and the lower 32 bits of
+ * the time and clockSeqAndNode fields.
+ *
+ * @return an <code>int</code> representing the hash code
+ * @see java.lang.Object#hashCode()
+ */
+ @Override
+ public int hashCode() {
+ return (int) ((time >> 32) ^ time ^ (clockSeqAndNode >> 32) ^ clockSeqAndNode);
+ }
+
+ /**
+ * Clones this UUID.
+ *
+ * @return a new UUID with identical values, never <code>null</code>
+ */
+ @Override
+ public Object clone() {
+ try {
+ return super.clone();
+ }
+ catch (CloneNotSupportedException ex) {
+ // One of Sun's most epic fails.
+ return null;
+ }
+ }
+
+ /**
+ * Returns the time field of the UUID (upper 64 bits).
+ *
+ * @return the time field
+ */
+ public final long getTime() {
+ return time;
+ }
+
+ /**
+ * Returns the clock and node field of the UUID (lower 64 bits).
+ *
+ * @return the clockSeqAndNode field
+ */
+ public final long getClockSeqAndNode() {
+ return clockSeqAndNode;
+ }
+
+ /**
+ * Compares two Objects for equality.
+ *
+ * @see java.lang.Object#equals(Object)
+ * @param obj the Object to compare this UUID with, may be <code>null</code>
+ * @return <code>true</code> if the other Object is equal to this UUID,
+ * <code>false</code> if not
+ */
+ @Override
+ public boolean equals(Object obj) {
+ if (!(obj instanceof UUID)) {
+ return false;
+ }
+ return compareTo((UUID) obj) == 0;
+ }
+
+ /**
+ * Returns the nil UUID (a UUID whose values are both set to zero).
+ * <p>
+ * Starting with version 2.0, this method does return a new UUID instance every
+ * time it is called. Earlier versions returned one instance. This has now been
+ * changed because this UUID has public, non-final instance fields. Returning a
+ * new instance is therefore more safe.
+ *
+ * @return a nil UUID, never <code>null</code>
+ */
+ public static UUID nilUUID() {
+ return new UUID(0, 0);
+ }
+
+}
diff --git a/test/disabled/presentation/akka/src/com/eaio/uuid/UUIDGen.java b/test/disabled/presentation/akka/src/com/eaio/uuid/UUIDGen.java
new file mode 100644
index 0000000..7b63f65
--- /dev/null
+++ b/test/disabled/presentation/akka/src/com/eaio/uuid/UUIDGen.java
@@ -0,0 +1,364 @@
+/*
+ * UUIDGen.java
+ *
+ * Created on 09.08.2003.
+ *
+ * eaio: UUID - an implementation of the UUID specification
+ * Copyright (c) 2003-2009 Johann Burkard (jb at eaio.com) http://eaio.com.
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a
+ * copy of this software and associated documentation files (the "Software"),
+ * to deal in the Software without restriction, including without limitation
+ * the rights to use, copy, modify, merge, publish, distribute, sublicense,
+ * and/or sell copies of the Software, and to permit persons to whom the
+ * Software is furnished to do so, subject to the following conditions:
+ *
+ * The above copyright notice and this permission notice shall be included
+ * in all copies or substantial portions of the Software.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
+ * OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
+ * NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
+ * DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
+ * OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
+ * USE OR OTHER DEALINGS IN THE SOFTWARE.
+ *
+ */
+package com.eaio.uuid;
+
+import java.io.BufferedReader;
+import java.io.File;
+import java.io.IOException;
+import java.io.InputStreamReader;
+import java.net.InetAddress;
+import java.net.InterfaceAddress;
+import java.net.NetworkInterface;
+import java.net.SocketException;
+import java.net.UnknownHostException;
+import java.util.Enumeration;
+
+import com.eaio.util.lang.Hex;
+
+/**
+ * This class contains methods to generate UUID fields. These methods have been
+ * refactored out of {@link com.eaio.uuid.UUID}.
+ * <p>
+ * Starting with version 2, this implementation tries to obtain the MAC address
+ * of the network card. Under Microsoft Windows, the <code>ifconfig</code>
+ * command is used which may pop up a command window in Java Virtual Machines
+ * prior to 1.4 once this class is initialized. The command window is closed
+ * automatically.
+ * <p>
+ * The MAC address code has been tested extensively in Microsoft Windows,
+ * Linux, Solaris 8, HP-UX 11, but should work in MacOS X and BSDs, too.
+ * <p>
+ * If you use JDK 6 or later, the code in {@link InterfaceAddress} will be used.
+ *
+ * @see <a href="http://johannburkard.de/software/uuid/">UUID</a>
+ * @author <a href="mailto:jb at eaio.de">Johann Burkard</a>
+ * @version $Id: UUIDGen.java 2914 2010-04-23 11:35:00Z johann $
+ * @see com.eaio.uuid.UUID
+ */
+public final class UUIDGen {
+
+ /**
+ * No instances needed.
+ */
+ private UUIDGen() {
+ super();
+ }
+
+ /**
+ * The last time value. Used to remove duplicate UUIDs.
+ */
+ private static long lastTime = Long.MIN_VALUE;
+
+ /**
+ * The cached MAC address.
+ */
+ private static String macAddress = null;
+
+ /**
+ * The current clock and node value.
+ */
+ private static long clockSeqAndNode = 0x8000000000000000L;
+
+ static {
+
+ try {
+ Class.forName("java.net.InterfaceAddress");
+ macAddress = Class.forName(
+ "com.eaio.uuid.UUIDGen$HardwareAddressLookup").newInstance().toString();
+ }
+ catch (ExceptionInInitializerError err) {
+ // Ignored.
+ }
+ catch (ClassNotFoundException ex) {
+ // Ignored.
+ }
+ catch (LinkageError err) {
+ // Ignored.
+ }
+ catch (IllegalAccessException ex) {
+ // Ignored.
+ }
+ catch (InstantiationException ex) {
+ // Ignored.
+ }
+ catch (SecurityException ex) {
+ // Ignored.
+ }
+
+ if (macAddress == null) {
+
+ Process p = null;
+ BufferedReader in = null;
+
+ try {
+ String osname = System.getProperty("os.name", "");
+
+ if (osname.startsWith("Windows")) {
+ p = Runtime.getRuntime().exec(
+ new String[] { "ipconfig", "/all" }, null);
+ }
+ // Solaris code must appear before the generic code
+ else if (osname.startsWith("Solaris")
+ || osname.startsWith("SunOS")) {
+ String hostName = getFirstLineOfCommand(
+ "uname", "-n" );
+ if (hostName != null) {
+ p = Runtime.getRuntime().exec(
+ new String[] { "/usr/sbin/arp", hostName },
+ null);
+ }
+ }
+ else if (new File("/usr/sbin/lanscan").exists()) {
+ p = Runtime.getRuntime().exec(
+ new String[] { "/usr/sbin/lanscan" }, null);
+ }
+ else if (new File("/sbin/ifconfig").exists()) {
+ p = Runtime.getRuntime().exec(
+ new String[] { "/sbin/ifconfig", "-a" }, null);
+ }
+
+ if (p != null) {
+ in = new BufferedReader(new InputStreamReader(
+ p.getInputStream()), 128);
+ String l = null;
+ while ((l = in.readLine()) != null) {
+ macAddress = MACAddressParser.parse(l);
+ if (macAddress != null
+ && Hex.parseShort(macAddress) != 0xff) {
+ break;
+ }
+ }
+ }
+
+ }
+ catch (SecurityException ex) {
+ // Ignore it.
+ }
+ catch (IOException ex) {
+ // Ignore it.
+ }
+ finally {
+ if (p != null) {
+ if (in != null) {
+ try {
+ in.close();
+ }
+ catch (IOException ex) {
+ // Ignore it.
+ }
+ }
+ try {
+ p.getErrorStream().close();
+ }
+ catch (IOException ex) {
+ // Ignore it.
+ }
+ try {
+ p.getOutputStream().close();
+ }
+ catch (IOException ex) {
+ // Ignore it.
+ }
+ p.destroy();
+ }
+ }
+
+ }
+
+ if (macAddress != null) {
+ clockSeqAndNode |= Hex.parseLong(macAddress);
+ }
+ else {
+ try {
+ byte[] local = InetAddress.getLocalHost().getAddress();
+ clockSeqAndNode |= (local[0] << 24) & 0xFF000000L;
+ clockSeqAndNode |= (local[1] << 16) & 0xFF0000;
+ clockSeqAndNode |= (local[2] << 8) & 0xFF00;
+ clockSeqAndNode |= local[3] & 0xFF;
+ }
+ catch (UnknownHostException ex) {
+ clockSeqAndNode |= (long) (Math.random() * 0x7FFFFFFF);
+ }
+ }
+
+ // Skip the clock sequence generation process and use random instead.
+
+ clockSeqAndNode |= (long) (Math.random() * 0x3FFF) << 48;
+
+ }
+
+ /**
+ * Returns the current clockSeqAndNode value.
+ *
+ * @return the clockSeqAndNode value
+ * @see UUID#getClockSeqAndNode()
+ */
+ public static long getClockSeqAndNode() {
+ return clockSeqAndNode;
+ }
+
+ /**
+ * Generates a new time field. Each time field is unique and larger than the
+ * previously generated time field.
+ *
+ * @return a new time value
+ * @see UUID#getTime()
+ */
+ public static long newTime() {
+ return createTime(System.currentTimeMillis());
+ }
+
+ /**
+ * Creates a new time field from the given timestamp. Note that even identical
+ * values of <code>currentTimeMillis</code> will produce different time fields.
+ *
+ * @param currentTimeMillis the timestamp
+ * @return a new time value
+ * @see UUID#getTime()
+ */
+ public static synchronized long createTime(long currentTimeMillis) {
+
+ long time;
+
+ // UTC time
+
+ long timeMillis = (currentTimeMillis * 10000) + 0x01B21DD213814000L;
+
+ if (timeMillis > lastTime) {
+ lastTime = timeMillis;
+ }
+ else {
+ timeMillis = ++lastTime;
+ }
+
+ // time low
+
+ time = timeMillis << 32;
+
+ // time mid
+
+ time |= (timeMillis & 0xFFFF00000000L) >> 16;
+
+ // time hi and version
+
+ time |= 0x1000 | ((timeMillis >> 48) & 0x0FFF); // version 1
+
+ return time;
+
+ }
+
+ /**
+ * Returns the MAC address. Not guaranteed to return anything.
+ *
+ * @return the MAC address, may be <code>null</code>
+ */
+ public static String getMACAddress() {
+ return macAddress;
+ }
+
+ /**
+ * Returns the first line of the shell command.
+ *
+ * @param commands the commands to run
+ * @return the first line of the command
+ * @throws IOException
+ */
+ static String getFirstLineOfCommand(String... commands) throws IOException {
+
+ Process p = null;
+ BufferedReader reader = null;
+
+ try {
+ p = Runtime.getRuntime().exec(commands);
+ reader = new BufferedReader(new InputStreamReader(
+ p.getInputStream()), 128);
+
+ return reader.readLine();
+ }
+ finally {
+ if (p != null) {
+ if (reader != null) {
+ try {
+ reader.close();
+ }
+ catch (IOException ex) {
+ // Ignore it.
+ }
+ }
+ try {
+ p.getErrorStream().close();
+ }
+ catch (IOException ex) {
+ // Ignore it.
+ }
+ try {
+ p.getOutputStream().close();
+ }
+ catch (IOException ex) {
+ // Ignore it.
+ }
+ p.destroy();
+ }
+ }
+
+ }
+
+ /**
+ * Scans MAC addresses for good ones.
+ */
+ static class HardwareAddressLookup {
+
+ /**
+ * @see java.lang.Object#toString()
+ */
+ @Override
+ public String toString() {
+ String out = null;
+ try {
+ Enumeration<NetworkInterface> ifs = NetworkInterface.getNetworkInterfaces();
+ if (ifs != null) {
+ while (ifs.hasMoreElements()) {
+ NetworkInterface iface = ifs.nextElement();
+ byte[] hardware = iface.getHardwareAddress();
+ if (hardware != null && hardware.length == 6
+ && hardware[1] != (byte) 0xff) {
+ out = Hex.append(new StringBuilder(36), hardware).toString();
+ break;
+ }
+ }
+ }
+ }
+ catch (SocketException ex) {
+ // Ignore it.
+ }
+ return out;
+ }
+
+ }
+
+}
diff --git a/test/disabled/presentation/akka/src/com/eaio/uuid/UUIDHelper.java b/test/disabled/presentation/akka/src/com/eaio/uuid/UUIDHelper.java
new file mode 100644
index 0000000..7abbe85
--- /dev/null
+++ b/test/disabled/presentation/akka/src/com/eaio/uuid/UUIDHelper.java
@@ -0,0 +1,86 @@
+package com.eaio.uuid;
+
+
+/**
+* com/eaio/uuid/UUIDHelper.java .
+* Generated by the IDL-to-Java compiler (portable), version "3.1"
+* from uuid.idl
+* Sonntag, 7. März 2004 21.35 Uhr CET
+*/
+
+
+/**
+ * The UUID struct.
+ */
+abstract public class UUIDHelper
+{
+ private static String _id = "IDL:com/eaio/uuid/UUID:1.0";
+
+ public static void insert (org.omg.CORBA.Any a, com.eaio.uuid.UUID that)
+ {
+ org.omg.CORBA.portable.OutputStream out = a.create_output_stream ();
+ a.type (type ());
+ write (out, that);
+ a.read_value (out.create_input_stream (), type ());
+ }
+
+ public static com.eaio.uuid.UUID extract (org.omg.CORBA.Any a)
+ {
+ return read (a.create_input_stream ());
+ }
+
+ private static org.omg.CORBA.TypeCode __typeCode = null;
+ private static boolean __active = false;
+ synchronized public static org.omg.CORBA.TypeCode type ()
+ {
+ if (__typeCode == null)
+ {
+ synchronized (org.omg.CORBA.TypeCode.class)
+ {
+ if (__typeCode == null)
+ {
+ if (__active)
+ {
+ return org.omg.CORBA.ORB.init().create_recursive_tc ( _id );
+ }
+ __active = true;
+ org.omg.CORBA.StructMember[] _members0 = new org.omg.CORBA.StructMember [2];
+ org.omg.CORBA.TypeCode _tcOf_members0 = null;
+ _tcOf_members0 = org.omg.CORBA.ORB.init ().get_primitive_tc (org.omg.CORBA.TCKind.tk_longlong);
+ _members0[0] = new org.omg.CORBA.StructMember (
+ "time",
+ _tcOf_members0,
+ null);
+ _tcOf_members0 = org.omg.CORBA.ORB.init ().get_primitive_tc (org.omg.CORBA.TCKind.tk_longlong);
+ _members0[1] = new org.omg.CORBA.StructMember (
+ "clockSeqAndNode",
+ _tcOf_members0,
+ null);
+ __typeCode = org.omg.CORBA.ORB.init ().create_struct_tc (com.eaio.uuid.UUIDHelper.id (), "UUID", _members0);
+ __active = false;
+ }
+ }
+ }
+ return __typeCode;
+ }
+
+ public static String id ()
+ {
+ return _id;
+ }
+
+ public static com.eaio.uuid.UUID read (org.omg.CORBA.portable.InputStream istream)
+ {
+ com.eaio.uuid.UUID value = new com.eaio.uuid.UUID ();
+ value.time = istream.read_longlong ();
+ value.clockSeqAndNode = istream.read_longlong ();
+ return value;
+ }
+
+ public static void write (org.omg.CORBA.portable.OutputStream ostream, com.eaio.uuid.UUID value)
+ {
+ ostream.write_longlong (value.time);
+ ostream.write_longlong (value.clockSeqAndNode);
+ }
+
+}
diff --git a/test/disabled/presentation/akka/src/com/eaio/uuid/UUIDHolder.java b/test/disabled/presentation/akka/src/com/eaio/uuid/UUIDHolder.java
new file mode 100644
index 0000000..d5531f5
--- /dev/null
+++ b/test/disabled/presentation/akka/src/com/eaio/uuid/UUIDHolder.java
@@ -0,0 +1,42 @@
+package com.eaio.uuid;
+
+/**
+* com/eaio/uuid/UUIDHolder.java .
+* Generated by the IDL-to-Java compiler (portable), version "3.1"
+* from uuid.idl
+* Sonntag, 7. März 2004 21.35 Uhr CET
+*/
+
+
+/**
+ * The UUID struct.
+ */
+public final class UUIDHolder implements org.omg.CORBA.portable.Streamable
+{
+ public com.eaio.uuid.UUID value = null;
+
+ public UUIDHolder ()
+ {
+ }
+
+ public UUIDHolder (com.eaio.uuid.UUID initialValue)
+ {
+ value = initialValue;
+ }
+
+ public void _read (org.omg.CORBA.portable.InputStream i)
+ {
+ value = com.eaio.uuid.UUIDHelper.read (i);
+ }
+
+ public void _write (org.omg.CORBA.portable.OutputStream o)
+ {
+ com.eaio.uuid.UUIDHelper.write (o, value);
+ }
+
+ public org.omg.CORBA.TypeCode _type ()
+ {
+ return com.eaio.uuid.UUIDHelper.type ();
+ }
+
+}
diff --git a/test/disabled/presentation/doc.check b/test/disabled/presentation/doc.check
new file mode 100644
index 0000000..5a3ff13
--- /dev/null
+++ b/test/disabled/presentation/doc.check
@@ -0,0 +1 @@
+reload: Base.scala, Class.scala, Derived.scala
diff --git a/test/disabled/presentation/doc/doc.scala b/test/disabled/presentation/doc/doc.scala
new file mode 100755
index 0000000..371b825
--- /dev/null
+++ b/test/disabled/presentation/doc/doc.scala
@@ -0,0 +1,139 @@
+import scala.tools.nsc.doc
+import scala.tools.nsc.doc.base._
+import scala.tools.nsc.doc.base.comment._
+import scala.tools.nsc.interactive._
+import scala.tools.nsc.interactive.tests._
+import scala.tools.nsc.util._
+
+object Test extends InteractiveTest {
+ val tags = Seq(
+ "@example `\"abb\".permutations = Iterator(abb, bab, bba)`",
+ "@version 1.0, 09/07/2012",
+ "@since 2.10",
+ "@todo this is unsafe!",
+ "@note Don't inherit!",
+ "@see something else"
+ )
+
+ val names = Seq("Class", "Def", "Val", "Var", "AbstracType", "TypeAlias", "Trait", "InnerClass")
+ val bareText =
+ """abstract class %s {
+ | def %s = ""
+ | val %s = ""
+ | var %s: String = _
+ | type %s
+ | type %s = String
+ | class %s
+ |}
+ |trait %s""".stripMargin.format(names: _*)
+
+ def docComment(nTags: Int) = "/**\n%s*/".format(tags.take(nTags).mkString("\n"))
+
+ def text(name: String, nTags: Int) = {
+ val nameIndex = bareText.indexOf(name)
+ val (pre, post) = bareText.splitAt(nameIndex)
+ val crIndex = pre.lastIndexOf("\n")
+ val (prepre, prepost) = pre.splitAt(crIndex)
+ prepre + docComment(nTags) + prepost + post
+ }
+
+
+
+ override lazy val compiler = {
+ prepareSettings(settings)
+ new Global(settings, compilerReporter) with MemberLookupBase with CommentFactoryBase {
+ val global: this.type = this
+ def chooseLink(links: List[LinkTo]): LinkTo = links.head
+ def internalLink(sym: Symbol, site: Symbol) = None
+ def toString(link: LinkTo) = link.toString
+ def warnNoLink = false
+ def findExternalLink(sym: Symbol, name: String) = None
+
+ override def forScaladoc = true
+
+ def getComment(sym: Symbol, source: SourceFile, fragments: List[(Symbol,SourceFile)]): Option[Comment] = {
+ val docResponse = new Response[(String, String, Position)]
+ askDocComment(sym, source, sym.owner, fragments, docResponse)
+ docResponse.get.left.toOption flatMap {
+ case (expanded, raw, pos) =>
+ if (expanded.isEmpty)
+ None
+ else
+ Some(ask { () => parseAtSymbol(expanded, raw, pos, Some(sym.owner)) })
+ }
+ }
+ }
+ }
+
+ override def runDefaultTests() {
+ import compiler._
+ def findSource(name: String) = sourceFiles.find(_.file.name == name).get
+
+ val className = names.head
+ for (name <- names;
+ i <- 1 to tags.length) {
+ val newText = text(name, i)
+ val source = findSource("Class.scala")
+ val batch = new BatchSourceFile(source.file, newText.toCharArray)
+ val reloadResponse = new Response[Unit]
+ compiler.askReload(List(batch), reloadResponse)
+ reloadResponse.get.left.toOption match {
+ case None =>
+ println("Couldn't reload")
+ case Some(_) =>
+ val parseResponse = new Response[Tree]
+ askParsedEntered(batch, true, parseResponse)
+ parseResponse.get.left.toOption match {
+ case None =>
+ println("Couldn't parse")
+ case Some(_) =>
+ val sym = compiler.ask { () =>
+ val toplevel = definitions.EmptyPackage.info.decl(newTypeName(name))
+ if (toplevel eq NoSymbol) {
+ val clazz = definitions.EmptyPackage.info.decl(newTypeName(className))
+
+ val term = clazz.info.decl(newTermName(name))
+ if (term eq NoSymbol) clazz.info.decl(newTypeName(name)) else
+ if (term.isAccessor) term.accessed else term
+ } else toplevel
+ }
+
+ getComment(sym, batch, (sym,batch)::Nil) match {
+ case None => println(s"Got no doc comment for $name")
+ case Some(comment) =>
+ import comment._
+ def cnt(bodies: Iterable[Body]) = bodies.size
+ val actual = cnt(example) + cnt(version) + cnt(since) + cnt(todo) + cnt(note) + cnt(see)
+ if (actual != i)
+ println(s"Got docComment with $actual tags instead of $i, file text:\n$newText")
+ }
+ }
+ }
+ }
+
+ // Check inter-classes documentation one-time retrieved ok.
+ val baseSource = findSource("Base.scala")
+ val derivedSource = findSource("Derived.scala")
+ def existsText(where: Any, text: String): Boolean = where match {
+ case `text` => true
+ case s: Seq[_] => s exists (existsText(_, text))
+ case p: Product => p.productIterator exists (existsText(_, text))
+ }
+ val (derived, base) = compiler.ask { () =>
+ val derived = definitions.RootPackage.info.decl(newTermName("p")).info.decl(newTypeName("Derived"))
+ (derived, derived.ancestors(0))
+ }
+ val cmt1 = getComment(derived, derivedSource, (base, baseSource)::(derived, derivedSource)::Nil)
+ if (!existsText(cmt1, "Derived comment."))
+ println("Unexpected Derived class comment:"+cmt1)
+
+ val (fooDerived, fooBase) = compiler.ask { () =>
+ val decl = derived.tpe.decl(newTermName("foo"))
+ (decl, decl.allOverriddenSymbols(0))
+ }
+
+ val cmt2 = getComment(fooDerived, derivedSource, (fooBase, baseSource)::(fooDerived, derivedSource)::Nil)
+ if (!existsText(cmt2, "Base method has documentation."))
+ println("Unexpected foo method comment:"+cmt2)
+ }
+}
diff --git a/test/disabled/presentation/doc/src/Class.scala b/test/disabled/presentation/doc/src/Class.scala
new file mode 100755
index 0000000..a974bd6
--- /dev/null
+++ b/test/disabled/presentation/doc/src/Class.scala
@@ -0,0 +1 @@
+object Class
\ No newline at end of file
diff --git a/test/disabled/presentation/doc/src/p/Base.scala b/test/disabled/presentation/doc/src/p/Base.scala
new file mode 100755
index 0000000..9031de3
--- /dev/null
+++ b/test/disabled/presentation/doc/src/p/Base.scala
@@ -0,0 +1,11 @@
+package p
+
+/**
+ * @define BaseComment $BaseVar comment.
+ */
+trait Base {
+ /**
+ * Base method has documentation.
+ */
+ def foo: String
+}
diff --git a/test/disabled/presentation/doc/src/p/Derived.scala b/test/disabled/presentation/doc/src/p/Derived.scala
new file mode 100755
index 0000000..1a9c9a2
--- /dev/null
+++ b/test/disabled/presentation/doc/src/p/Derived.scala
@@ -0,0 +1,9 @@
+package p
+
+/**
+ * $BaseComment
+ * @define BaseVar Derived
+ */
+class Derived extends Base {
+ def foo = ""
+}
diff --git a/test/files/jvm/bug680.check b/test/disabled/presentation/ide-bug-1000450.check
similarity index 100%
copy from test/files/jvm/bug680.check
copy to test/disabled/presentation/ide-bug-1000450.check
diff --git a/test/disabled/presentation/ide-bug-1000450/Runner.scala b/test/disabled/presentation/ide-bug-1000450/Runner.scala
new file mode 100644
index 0000000..7c16a57
--- /dev/null
+++ b/test/disabled/presentation/ide-bug-1000450/Runner.scala
@@ -0,0 +1,6 @@
+import scala.tools.nsc.interactive.tests._
+
+import scala.tools.nsc.interactive.Response
+import scala.tools.nsc.util.Position
+
+object Test extends InteractiveTest
\ No newline at end of file
diff --git a/test/disabled/presentation/ide-bug-1000450/src/Ranges.scala b/test/disabled/presentation/ide-bug-1000450/src/Ranges.scala
new file mode 100644
index 0000000..2295d06
--- /dev/null
+++ b/test/disabled/presentation/ide-bug-1000450/src/Ranges.scala
@@ -0,0 +1,5 @@
+object Ranges {
+ // (1 to 10). // (1) this works as expected
+
+ (1 to 10).toS /*!*/ // (2) this fails
+}
\ No newline at end of file
diff --git a/test/disabled/presentation/ide-bug-1000508.check b/test/disabled/presentation/ide-bug-1000508.check
new file mode 100644
index 0000000..5f4d74f
--- /dev/null
+++ b/test/disabled/presentation/ide-bug-1000508.check
@@ -0,0 +1,163 @@
+reload: Foo.scala
+
+askTypeCompletion at Foo.scala(2,4)
+================================================================================
+[response] aksTypeCompletion at (2,4)
+retrieved 163 members
+`lazy value numFractional[Double]`
+`method !=(x$1: Any)Boolean`
+`method !=(x$1: AnyRef)Boolean`
+`method !=(x: Byte)Boolean`
+`method !=(x: Char)Boolean`
+`method !=(x: Double)Boolean`
+`method !=(x: Float)Boolean`
+`method !=(x: Int)Boolean`
+`method !=(x: Long)Boolean`
+`method !=(x: Short)Boolean`
+`method ##()Int`
+`method %(x: Byte)Double`
+`method %(x: Char)Double`
+`method %(x: Double)Double`
+`method %(x: Float)Double`
+`method %(x: Int)Double`
+`method %(x: Long)Double`
+`method %(x: Short)Double`
+`method *(x: Byte)Double`
+`method *(x: Char)Double`
+`method *(x: Double)Double`
+`method *(x: Float)Double`
+`method *(x: Int)Double`
+`method *(x: Long)Double`
+`method *(x: Short)Double`
+`method +(x: Byte)Double`
+`method +(x: Char)Double`
+`method +(x: Double)Double`
+`method +(x: Float)Double`
+`method +(x: Int)Double`
+`method +(x: Long)Double`
+`method +(x: Short)Double`
+`method +(x: String)String`
+`method -(x: Byte)Double`
+`method -(x: Char)Double`
+`method -(x: Double)Double`
+`method -(x: Float)Double`
+`method -(x: Int)Double`
+`method -(x: Long)Double`
+`method -(x: Short)Double`
+`method ->[B](y: B)(Double, B)`
+`method /(x: Byte)Double`
+`method /(x: Char)Double`
+`method /(x: Double)Double`
+`method /(x: Float)Double`
+`method /(x: Int)Double`
+`method /(x: Long)Double`
+`method /(x: Short)Double`
+`method <(x: Byte)Boolean`
+`method <(x: Char)Boolean`
+`method <(x: Double)Boolean`
+`method <(x: Float)Boolean`
+`method <(x: Int)Boolean`
+`method <(x: Long)Boolean`
+`method <(x: Short)Boolean`
+`method <=(x: Byte)Boolean`
+`method <=(x: Char)Boolean`
+`method <=(x: Double)Boolean`
+`method <=(x: Float)Boolean`
+`method <=(x: Int)Boolean`
+`method <=(x: Long)Boolean`
+`method <=(x: Short)Boolean`
+`method ==(x$1: Any)Boolean`
+`method ==(x$1: AnyRef)Boolean`
+`method ==(x: Byte)Boolean`
+`method ==(x: Char)Boolean`
+`method ==(x: Double)Boolean`
+`method ==(x: Float)Boolean`
+`method ==(x: Int)Boolean`
+`method ==(x: Long)Boolean`
+`method ==(x: Short)Boolean`
+`method >(x: Byte)Boolean`
+`method >(x: Char)Boolean`
+`method >(x: Double)Boolean`
+`method >(x: Float)Boolean`
+`method >(x: Int)Boolean`
+`method >(x: Long)Boolean`
+`method >(x: Short)Boolean`
+`method >=(x: Byte)Boolean`
+`method >=(x: Char)Boolean`
+`method >=(x: Double)Boolean`
+`method >=(x: Float)Boolean`
+`method >=(x: Int)Boolean`
+`method >=(x: Long)Boolean`
+`method >=(x: Short)Boolean`
+`method abs=> Double`
+`method asInstanceOf[T0]=> T0`
+`method byteValue()Byte`
+`method ceil=> Double`
+`method clone()java.lang.Object`
+`method compare(y: Double)Int`
+`method compareTo(that: Double)Int`
+`method compareTo(x$1: java.lang.Double)Int`
+`method doubleValue()Double`
+`method ensuring(cond: Boolean)Double`
+`method ensuring(cond: Boolean, msg: => Any)Double`
+`method ensuring(cond: Double => Boolean)Double`
+`method ensuring(cond: Double => Boolean, msg: => Any)Double`
+`method eq(x$1: AnyRef)Boolean`
+`method equals(x$1: Any)Boolean`
+`method finalize()Unit`
+`method floatValue()Float`
+`method floor=> Double`
+`method formatted(fmtstr: String)String`
+`method hashCode()Int`
+`method intValue()Int`
+`method isInfinite()Boolean`
+`method isInfinity=> Boolean`
+`method isInstanceOf[T0]=> Boolean`
+`method isNaN()Boolean`
+`method isNegInfinity=> Boolean`
+`method isPosInfinity=> Boolean`
+`method isValidByte=> Boolean`
+`method isValidChar=> Boolean`
+`method isValidInt=> Boolean`
+`method isValidShort=> Boolean`
+`method isWhole()Boolean`
+`method longValue()Long`
+`method max(that: Double)Double`
+`method min(that: Double)Double`
+`method ne(x$1: AnyRef)Boolean`
+`method notify()Unit`
+`method notifyAll()Unit`
+`method round=> Long`
+`method shortValue()Short`
+`method signum=> Int`
+`method synchronized[T0](x$1: T0)T0`
+`method to(end: Double)Range.Partial[Double,scala.collection.immutable.NumericRange[Double]]`
+`method to(end: Double, step: Double)scala.collection.immutable.NumericRange.Inclusive[Double]`
+`method toByte=> Byte`
+`method toChar=> Char`
+`method toDegrees=> Double`
+`method toDouble=> Double`
+`method toFloat=> Float`
+`method toInt=> Int`
+`method toLong=> Long`
+`method toRadians=> Double`
+`method toShort=> Short`
+`method toString()java.lang.String`
+`method unary_+=> Double`
+`method unary_-=> Double`
+`method underlying()AnyRef`
+`method unifiedPrimitiveEquals(x: Any)Boolean`
+`method unifiedPrimitiveHashcode()Int`
+`method until(end: Double)Range.Partial[Double,scala.collection.immutable.NumericRange[Double]]`
+`method until(end: Double, step: Double)scala.collection.immutable.NumericRange.Exclusive[Double]`
+`method wait()Unit`
+`method wait(x$1: Long)Unit`
+`method wait(x$1: Long, x$2: Int)Unit`
+`method →[B](y: B)(Double, B)`
+`type ResultWithoutStepRange.Partial[Double,scala.collection.immutable.NumericRange[Double]]`
+`value integralNumNumeric.DoubleAsIfIntegral.type`
+`value ordOrdering[Double]`
+`value selfAny`
+`value selfDouble`
+`value xDouble`
+================================================================================
diff --git a/test/disabled/presentation/ide-bug-1000508/Runner.scala b/test/disabled/presentation/ide-bug-1000508/Runner.scala
new file mode 100644
index 0000000..1ef3cf9
--- /dev/null
+++ b/test/disabled/presentation/ide-bug-1000508/Runner.scala
@@ -0,0 +1,3 @@
+import scala.tools.nsc.interactive.tests._
+
+object Test extends InteractiveTest
\ No newline at end of file
diff --git a/test/disabled/presentation/ide-bug-1000508/src/Foo.scala b/test/disabled/presentation/ide-bug-1000508/src/Foo.scala
new file mode 100644
index 0000000..cb5d9ad
--- /dev/null
+++ b/test/disabled/presentation/ide-bug-1000508/src/Foo.scala
@@ -0,0 +1,3 @@
+object Foo {
+ 1./*!*/
+}
\ No newline at end of file
diff --git a/test/files/jvm/bug680.check b/test/disabled/presentation/ide-bug-1000545.check
similarity index 100%
copy from test/files/jvm/bug680.check
copy to test/disabled/presentation/ide-bug-1000545.check
diff --git a/test/disabled/presentation/ide-bug-1000545/Runner.scala b/test/disabled/presentation/ide-bug-1000545/Runner.scala
new file mode 100644
index 0000000..1ef3cf9
--- /dev/null
+++ b/test/disabled/presentation/ide-bug-1000545/Runner.scala
@@ -0,0 +1,3 @@
+import scala.tools.nsc.interactive.tests._
+
+object Test extends InteractiveTest
\ No newline at end of file
diff --git a/test/disabled/presentation/ide-bug-1000545/src/CompletionFails.scala b/test/disabled/presentation/ide-bug-1000545/src/CompletionFails.scala
new file mode 100644
index 0000000..917fd43
--- /dev/null
+++ b/test/disabled/presentation/ide-bug-1000545/src/CompletionFails.scala
@@ -0,0 +1,25 @@
+/**
+ * Presentation compiler returns the wrong answer for this test.
+ *
+ * Below is the current result of running this test:
+ *
+ * Mircos-iMac:test mirco$ ./partest files/presentation/ticket-1000545 --show-log | sed 's/< //'
+ * Testing individual files
+ * testing: [...]/files/presentation/ticket-1000545 [FAILED]
+ * 1,8d0
+ * reload: CompletionFails.scala
+ *
+ * askTypeCompletion at CompletionFails.scala(2,19)
+ * ================================================================================
+ * [response] aksTypeCompletion at (2,19)
+ * retrieved 1 members
+ * TypeMember(method <clinit>,()Unit,false,false,<none>)
+ * ================================================================================
+ *
+ * 1 of 1 tests failed (elapsed time: 00:00:05)
+ *
+ * @note The expected result was the list of static methods for class @see java.io.Console
+ */
+object CompletionFails {
+ java.io.Console. /*!*/
+}
\ No newline at end of file
diff --git a/test/disabled/presentation/ide-t1000620.check b/test/disabled/presentation/ide-t1000620.check
new file mode 100644
index 0000000..3518d7e
--- /dev/null
+++ b/test/disabled/presentation/ide-t1000620.check
@@ -0,0 +1,37 @@
+reload: A.scala, B.scala
+
+askTypeCompletion at B.scala(6,6)
+================================================================================
+[response] aksTypeCompletion at (6,6)
+retrieved 36 members
+`method !=(x$1: Any)Boolean`
+`method !=(x$1: AnyRef)Boolean`
+`method ##()Int`
+`method +(other: String)java.lang.String`
+`method ->[B](y: B)(a.A, B)`
+`method ==(x$1: Any)Boolean`
+`method ==(x$1: AnyRef)Boolean`
+`method asInstanceOf[T0]=> T0`
+`method clone()java.lang.Object`
+`method ensuring(cond: Boolean)a.A`
+`method ensuring(cond: Boolean, msg: => Any)a.A`
+`method ensuring(cond: a.A => Boolean)a.A`
+`method ensuring(cond: a.A => Boolean, msg: => Any)a.A`
+`method eq(x$1: AnyRef)Boolean`
+`method equals(x$1: Any)Boolean`
+`method finalize()Unit`
+`method formatted(fmtstr: String)String`
+`method hashCode()Int`
+`method isInstanceOf[T0]=> Boolean`
+`method ne(x$1: AnyRef)Boolean`
+`method notify()Unit`
+`method notifyAll()Unit`
+`method synchronized[T0](x$1: T0)T0`
+`method toString()java.lang.String`
+`method wait()Unit`
+`method wait(x$1: Long)Unit`
+`method wait(x$1: Long, x$2: Int)Unit`
+`method →[B](y: B)(a.A, B)`
+`value selfAny`
+`value xa.A`
+================================================================================
diff --git a/test/disabled/presentation/ide-t1000620/Runner.scala b/test/disabled/presentation/ide-t1000620/Runner.scala
new file mode 100644
index 0000000..1ef3cf9
--- /dev/null
+++ b/test/disabled/presentation/ide-t1000620/Runner.scala
@@ -0,0 +1,3 @@
+import scala.tools.nsc.interactive.tests._
+
+object Test extends InteractiveTest
\ No newline at end of file
diff --git a/test/disabled/presentation/ide-t1000620/src/a/A.scala b/test/disabled/presentation/ide-t1000620/src/a/A.scala
new file mode 100644
index 0000000..42a9b34
--- /dev/null
+++ b/test/disabled/presentation/ide-t1000620/src/a/A.scala
@@ -0,0 +1,5 @@
+package a
+class A {
+ private var a= 0
+ protected var b= 0
+}
diff --git a/test/disabled/presentation/ide-t1000620/src/b/B.scala b/test/disabled/presentation/ide-t1000620/src/b/B.scala
new file mode 100644
index 0000000..b579f97
--- /dev/null
+++ b/test/disabled/presentation/ide-t1000620/src/b/B.scala
@@ -0,0 +1,8 @@
+package b
+import a.A
+class B {
+ def main(args: Array[String]) {
+ val a = new A()
+ a./*!*/
+ }
+}
diff --git a/test/disabled/presentation/shutdown-deadlock.check b/test/disabled/presentation/shutdown-deadlock.check
new file mode 100644
index 0000000..ddcb4ff
--- /dev/null
+++ b/test/disabled/presentation/shutdown-deadlock.check
@@ -0,0 +1,3 @@
+reload: arrays.scala
+reload: arrays.scala
+No timeouts
diff --git a/test/disabled/presentation/shutdown-deadlock/ShutdownDeadlockTest.scala b/test/disabled/presentation/shutdown-deadlock/ShutdownDeadlockTest.scala
new file mode 100644
index 0000000..cef9d2a
--- /dev/null
+++ b/test/disabled/presentation/shutdown-deadlock/ShutdownDeadlockTest.scala
@@ -0,0 +1,45 @@
+import scala.tools.nsc.interactive._
+import tests._
+
+object Test extends InteractiveTest {
+ val Reps = 30
+ import compiler._
+
+ def askSomething(): Response[Tree] = {
+ // println("*")
+ Thread.sleep(50)
+ ask { compiler.askStructure(true)(sourceFiles.head, _) }
+ }
+
+ def fireAsks() {
+ val jobs1 = for (i <- 1 until Reps) yield {
+ if (i % 10 == 0) {
+ askReload(sourceFiles)
+ }
+ askSomething
+ }
+
+ for ((j, i) <- jobs1.zipWithIndex) {
+ j.get(40000) match {
+ case None =>
+ println(i + ": TIMEOUT")
+ exit(1) // no need to delay the test any longer
+ case r =>
+ }
+ }
+ compiler.askShutdown()
+
+ println("No timeouts")
+ }
+
+ override def main(args: Array[String]) {
+ new Thread("Asking") {
+ override def run() {
+ fireAsks()
+ }
+ }.start()
+
+ Thread.sleep(800)
+ compiler.askShutdown()
+ }
+}
\ No newline at end of file
diff --git a/test/disabled/presentation/shutdown-deadlock/src/arrays.scala b/test/disabled/presentation/shutdown-deadlock/src/arrays.scala
new file mode 100644
index 0000000..ecebc78
--- /dev/null
+++ b/test/disabled/presentation/shutdown-deadlock/src/arrays.scala
@@ -0,0 +1,937 @@
+//############################################################################
+// Arrays
+//############################################################################
+
+//############################################################################
+
+object Test {
+
+ //##########################################################################
+ // Types
+
+ type Strings = List[String]
+ type Map = scala.collection.Map[Int, Any]
+ type HashMap = scala.collection.mutable.HashMap[Int, Any]
+ type TreeMap = scala.collection.immutable.TreeMap[Int, Any]
+
+ //##########################################################################
+ // Identity Functions
+
+ def id_Ta_T[T <: Any ](x: T): T = x;
+ def id_Tr_T[T <: AnyRef ](x: T): T = x;
+ def id_To_T[T <: Object ](x: T): T = x;
+
+ def id_Ta_a[T <: Any ](x: T): Any = x;
+ def id_Tr_a[T <: AnyRef ](x: T): Any = x;
+ def id_To_a[T <: Object ](x: T): Any = x;
+
+ def id_Tr_r[T <: AnyRef ](x: T): AnyRef = x;
+ def id_To_r[T <: Object ](x: T): AnyRef = x;
+
+ def id_To_o[T <: Object ](x: T): Object = x;
+
+ def id_TSa_T [S <: Any , T <: Array[S]](x: T): T = x;
+ def id_TSv_T [S <: AnyVal , T <: Array[S]](x: T): T = x;
+ def id_TSr_T [S <: AnyRef , T <: Array[S]](x: T): T = x;
+ def id_TSo_T [S <: Object , T <: Array[S]](x: T): T = x;
+ def id_TSm_T [S <: Map , T <: Array[S]](x: T): T = x;
+ def id_TSn_T [S <: Strings, T <: Array[S]](x: T): T = x;
+
+ def id_TSa_Ss[S <: Any , T <: Array[S]](x: T): Array[S] = x;
+ def id_TSv_Ss[S <: AnyVal , T <: Array[S]](x: T): Array[S] = x;
+ def id_TSr_Ss[S <: AnyRef , T <: Array[S]](x: T): Array[S] = x;
+ def id_TSo_Ss[S <: Object , T <: Array[S]](x: T): Array[S] = x;
+ def id_TSm_Ss[S <: Map , T <: Array[S]](x: T): Array[S] = x;
+ def id_TSn_Ss[S <: Strings, T <: Array[S]](x: T): Array[S] = x;
+
+ def id_TSa_a [S <: Any , T <: Array[S]](x: T): Any = x;
+ def id_TSv_a [S <: AnyVal , T <: Array[S]](x: T): Any = x;
+ def id_TSr_a [S <: AnyRef , T <: Array[S]](x: T): Any = x;
+ def id_TSo_a [S <: Object , T <: Array[S]](x: T): Any = x;
+ def id_TSm_a [S <: Map , T <: Array[S]](x: T): Any = x;
+ def id_TSn_a [S <: Strings, T <: Array[S]](x: T): Any = x;
+
+ def id_TSa_r [S <: Any , T <: Array[S]](x: T): AnyRef = x;
+ def id_TSv_r [S <: AnyVal , T <: Array[S]](x: T): AnyRef = x;
+ def id_TSr_r [S <: AnyRef , T <: Array[S]](x: T): AnyRef = x;
+ def id_TSo_r [S <: Object , T <: Array[S]](x: T): AnyRef = x;
+ def id_TSm_r [S <: Map , T <: Array[S]](x: T): AnyRef = x;
+ def id_TSn_r [S <: Strings, T <: Array[S]](x: T): AnyRef = x;
+
+ def id_TSa_o [S <: Any , T <: Array[S]](x: T): Object = x;
+ def id_TSv_o [S <: AnyVal , T <: Array[S]](x: T): Object = x;
+ def id_TSr_o [S <: AnyRef , T <: Array[S]](x: T): Object = x;
+ def id_TSo_o [S <: Object , T <: Array[S]](x: T): Object = x;
+ def id_TSm_o [S <: Map , T <: Array[S]](x: T): Object = x;
+ def id_TSn_o [S <: Strings, T <: Array[S]](x: T): Object = x;
+
+ def id_Sas_Ss[S <: Any ](xs: Array[S]): Array[S] = xs;
+ def id_Svs_Ss[S <: AnyVal ](xs: Array[S]): Array[S] = xs;
+ def id_Srs_Ss[S <: AnyRef ](xs: Array[S]): Array[S] = xs;
+ def id_Sos_Ss[S <: Object ](xs: Array[S]): Array[S] = xs;
+ def id_Sms_Ss[S <: Map ](xs: Array[S]): Array[S] = xs;
+ def id_Sns_Ss[S <: Strings](xs: Array[S]): Array[S] = xs;
+
+ def id_Sas_a [S <: Any ](xs: Array[S]): Any = xs;
+ def id_Svs_a [S <: AnyVal ](xs: Array[S]): Any = xs;
+ def id_Srs_a [S <: AnyRef ](xs: Array[S]): Any = xs;
+ def id_Sos_a [S <: Object ](xs: Array[S]): Any = xs;
+ def id_Sms_a [S <: Map ](xs: Array[S]): Any = xs;
+ def id_Sns_a [S <: Strings](xs: Array[S]): Any = xs;
+
+ def id_Sas_r [S <: Any ](xs: Array[S]): AnyRef = xs;
+ def id_Svs_r [S <: AnyVal ](xs: Array[S]): AnyRef = xs;
+ def id_Srs_r [S <: AnyRef ](xs: Array[S]): AnyRef = xs;
+ def id_Sos_r [S <: Object ](xs: Array[S]): AnyRef = xs;
+ def id_Sms_r [S <: Map ](xs: Array[S]): AnyRef = xs;
+ def id_Sns_r [S <: Strings](xs: Array[S]): AnyRef = xs;
+
+ def id_Sas_o [S <: Any ](xs: Array[S]): Object = xs;
+ def id_Svs_o [S <: AnyVal ](xs: Array[S]): Object = xs;
+ def id_Srs_o [S <: AnyRef ](xs: Array[S]): Object = xs;
+ def id_Sos_o [S <: Object ](xs: Array[S]): Object = xs;
+ def id_Sms_o [S <: Map ](xs: Array[S]): Object = xs;
+ def id_Sns_o [S <: Strings](xs: Array[S]): Object = xs;
+
+ //##########################################################################
+ // Generic Checks
+
+ type Check[T] = Array[T] => Unit;
+
+ var checks: Int = 0;
+
+ def check(test0: Boolean, actual: Any, expected: Any) {
+ val test1: Boolean = actual == expected;
+ if (!test0 || !test1) {
+ val s0 = if (test0) "ok" else "KO";
+ val s1 = if (test1) "ok" else "KO";
+ val s2 = actual.toString();
+ val s3 = expected.toString();
+ error(s0 + " - " + s1 + ": " + s2 + " != " + s3);
+ }
+ checks += 1
+ }
+
+ def check_Ta[T <: Any ](xs: Array[T], l: Int, x0: T, c: Check[T]) {
+ check(xs.length == l, xs.length, l);
+ check(xs(0) == x0, xs(0), x0);
+ c(xs);
+ }
+
+ def check_Tv[T <: AnyVal ](xs: Array[T], l: Int, x0: T, c: Check[T]) {
+ check(xs.length == l, xs.length, l);
+ check(xs(0) == x0, xs(0), x0);
+ check_Ta(xs, l, x0, c);
+ c(xs);
+ }
+
+ def check_Tr[T <: AnyRef ](xs: Array[T], l: Int, x0: T, c: Check[T]) {
+ check(xs.length == l, xs.length, l);
+ check(xs(0) == x0, xs(0), x0);
+ check_Ta(xs, l, x0, c);
+ c(xs);
+ }
+
+ def check_To[T <: Object ](xs: Array[T], l: Int, x0: T, c: Check[T]) {
+ check(xs.length == l, xs.length, l);
+ check(xs(0) == x0, xs(0), x0);
+ check_Ta(xs, l, x0, c);
+ check_Tr(xs, l, x0, c);
+ c(xs);
+ }
+
+ def check_Tm[T <: Map ](xs: Array[T], l: Int, x0: T, c: Check[T]) {
+ check(xs.length == l, xs.length, l)
+ check(xs(0) == x0, xs(0), x0)
+ check_Ta(xs, l, x0, c)
+ check_Tr(xs, l, x0, c)
+ check_To(xs, l, x0, c)
+ c(xs)
+ }
+
+ def check_Tn[T <: Strings](xs: Array[T], l: Int, x0: T, c: Check[T]) {
+ check(xs.length == l, xs.length, l)
+ check(xs(0) == x0, xs(0), x0)
+ check_Ta(xs, l, x0, c)
+ check_Tr(xs, l, x0, c)
+ check_To(xs, l, x0, c)
+ c(xs)
+ }
+
+ def checkT2368() {
+ val arr = Array(1, 2, 3)
+ arr(0) += 1
+ assert(arr(0) == 2)
+ }
+
+ //##########################################################################
+ // Values
+
+ val u0: Unit = ();
+ val u1: Unit = ();
+
+ val z0: Boolean = false;
+ val z1: Boolean = true;
+
+ val b0: Byte = Byte.MinValue;
+ val b1: Byte = 1;
+ val b2: Byte = Byte.MaxValue;
+
+ val s0: Short = Short.MinValue;
+ val s1: Short = 2;
+ val s2: Short = Short.MaxValue;
+
+ val c0: Char = Char.MinValue;
+ val c1: Char = '3';
+ val c2: Char = Char.MaxValue;
+
+ val i0: Int = Int.MinValue;
+ val i1: Int = 4;
+ val i2: Int = Int.MinValue;
+
+ val l0: Long = Long.MinValue;
+ val l1: Int = 5;
+ val l2: Long = Long.MaxValue;
+
+ val f0: Float = Float.MinValue;
+ val f1: Int = 6;
+ val f2: Float = Float.MaxValue;
+
+ val d0: Double = Double.MinValue;
+ val d1: Int = 7;
+ val d2: Double = Double.MaxValue;
+
+ val a0: Unit = ();
+ val a1: Boolean = false;
+ val a2: Int = 0;
+ val a3: Null = null;
+ val a4: String = "a-z";
+ val a5: Symbol = 'token;
+ val a6: HashMap = new HashMap();
+ val a7: TreeMap = scala.collection.immutable.TreeMap.empty[Int, Any];
+ val a8: Strings = List("a", "z");
+
+ val v0: Unit = ();
+ val v1: Boolean = false;
+ val v2: Int = 0;
+ val v3: Long = l2;
+ val v4: Float = f2;
+ val v5: Double = d2;
+
+ val r0: Null = a3;
+ val r1: String = a4;
+ val r2: Symbol = a5;
+ val r3: HashMap = a6;
+ val r4: TreeMap = a7;
+ val r5: Strings = a8;
+
+ val o0: Null = r0;
+ val o1: String = r1;
+ val o2: Symbol = r2;
+ val o3: HashMap = r3;
+ val o4: TreeMap = r4;
+ val o5: Strings = r5;
+
+ val m0: Null = r0;
+ val m1: HashMap = r3;
+ val m2: TreeMap = r4;
+
+ val n0: Null = r0;
+ val n1: Strings = r5;
+ val n2: Nil.type= Nil;
+
+ //##########################################################################
+ // Specific Checks
+
+ def ucheck(xs: Array[Unit ]): Unit = {
+ check(xs.length == 2, xs.length, 2);
+ check(xs(0) == u0, xs(0), u0);
+ check(xs(1) == u1, xs(1), u1);
+ }
+
+ def zcheck(xs: Array[Boolean]): Unit = {
+ check(xs.length == 2, xs.length, 2);
+ check(xs(0) == z0, xs(0), z0);
+ check(xs(1) == z1, xs(1), z1);
+ }
+
+ def bcheck(xs: Array[Byte ]): Unit = {
+ check(xs.length == 3, xs.length, 3);
+ check(xs(0) == b0, xs(0), b0);
+ check(xs(1) == b1, xs(1), b1);
+ check(xs(2) == b2, xs(2), b2);
+ }
+
+ def scheck(xs: Array[Short ]): Unit = {
+ check(xs.length == 3, xs.length, 3);
+ check(xs(0) == s0, xs(0), s0);
+ check(xs(1) == s1, xs(1), s1);
+ check(xs(2) == s2, xs(2), s2);
+ }
+
+ def ccheck(xs: Array[Char ]): Unit = {
+ check(xs.length == 3, xs.length, 3);
+ check(xs(0) == c0, xs(0), c0);
+ check(xs(1) == c1, xs(1), c1);
+ check(xs(2) == c2, xs(2), c2);
+ }
+
+ def icheck(xs: Array[Int ]): Unit = {
+ check(xs.length == 3, xs.length, 3);
+ check(xs(0) == i0, xs(0), i0);
+ check(xs(1) == i1, xs(1), i1);
+ check(xs(2) == i2, xs(2), i2);
+ }
+
+ def lcheck(xs: Array[Long ]): Unit = {
+ check(xs.length == 3, xs.length, 3);
+ check(xs(0) == l0, xs(0), l0);
+ check(xs(1) == l1, xs(1), l1: Long); // !!! : Long
+ check(xs(2) == l2, xs(2), l2);
+ }
+
+ def fcheck(xs: Array[Float ]): Unit = {
+ check(xs.length == 3, xs.length, 3);
+ check(xs(0) == f0, xs(0), f0);
+ check(xs(1) == f1, xs(1), f1: Float); // !!! : Float
+ check(xs(2) == f2, xs(2), f2);
+ }
+
+ def dcheck(xs: Array[Double ]): Unit = {
+ check(xs.length == 3, xs.length, 3);
+ check(xs(0) == d0, xs(0), d0);
+ check(xs(1) == d1, xs(1), d1: Double); // !!! : Double
+ check(xs(2) == d2, xs(2), d2);
+ }
+
+ def rcheck(xs: Array[AnyRef ]): Unit = {
+ check(xs.length == 6, xs.length, 6);
+ check(xs(0) == r0, xs(0), r0);
+ check(xs(1) == r1, xs(1), r1);
+ check(xs(2) == r2, xs(2), r2);
+ check(xs(3) == r3, xs(3), r3);
+ check(xs(4) == r4, xs(4), r4);
+ check(xs(5) == r5, xs(5), r5);
+ }
+
+ def ocheck(xs: Array[Object ]): Unit = {
+ check(xs.length == 6, xs.length, 6);
+ check(xs(0) == o0, xs(0), o0);
+ check(xs(1) == o1, xs(1), o1);
+ check(xs(2) == o2, xs(2), o2);
+ check(xs(3) == o3, xs(3), o3);
+ check(xs(4) == o4, xs(4), o4);
+ check(xs(5) == o5, xs(5), o5);
+ }
+
+ def mcheck(xs: Array[Map ]): Unit = {
+ check(xs.length == 3, xs.length, 3);
+ check(xs(0) == m0, xs(0), m0);
+ check(xs(1) == m1, xs(1), m1);
+ check(xs(2) == m2, xs(2), m2);
+ }
+
+ def ncheck(xs: Array[Strings]) {
+ check(xs.length == 3, xs.length, 3)
+ check(xs(0) == n0, xs(0), n0)
+ check(xs(1) == n1, xs(1), n1)
+ check(xs(2) == n2, xs(2), n2)
+ }
+
+ //##########################################################################
+ // Miscellaneous checks
+
+ def checkZip {
+ val zipped = Array("a", "b", "c").zip(Array(1, 2))
+ val expected = Array(("a",1), ("b",2))
+ check(zipped sameElements expected, zipped.toList, expected.toList)
+ }
+
+ def checkConcat { // ticket #713
+ val x1 = Array.concat(Array(1, 2), Array(3, 4))
+ val y1 = Array(1, 2, 3, 4)
+ check(x1 sameElements y1, x1.toList, y1.toList)
+ }
+
+ //##########################################################################
+ // Arrays
+
+ val uarray: Array[Unit ] = Array(u0, u1);
+ val zarray: Array[Boolean] = Array(z0, z1);
+ val barray: Array[Byte ] = Array(b0, b1, b2);
+ val sarray: Array[Short ] = Array(s0, s1, s2);
+ val carray: Array[Char ] = Array(c0, c1, c2);
+ val iarray: Array[Int ] = Array(i0, i1, i2);
+ val larray: Array[Long ] = Array(l0, l1, l2);
+ val farray: Array[Float ] = Array(f0, f1, f2);
+ val darray: Array[Double ] = Array(d0, d1, d2);
+ val rarray: Array[AnyRef ] = Array(r0, r1, r2, r4, r4, r5);
+ val oarray: Array[Object ] = Array(o0, o1, o2, o4, o4, o5);
+ val marray: Array[Map ] = Array(m0, m1, m2);
+ val narray: Array[Strings] = Array(n0, n1, n2);
+
+ //##########################################################################
+ // Main
+
+ def main(args: Array[String]): Unit = {
+
+ //######################################################################
+
+ ucheck(uarray);
+ zcheck(zarray);
+ bcheck(barray);
+ scheck(sarray);
+ ccheck(carray);
+ icheck(iarray);
+ lcheck(larray);
+ fcheck(farray);
+ dcheck(darray);
+ rcheck(rarray);
+ ocheck(oarray);
+ mcheck(marray);
+ ncheck(narray);
+
+ //######################################################################
+
+ ucheck(id_Ta_T(uarray));
+ zcheck(id_Ta_T(zarray));
+ bcheck(id_Ta_T(barray));
+ scheck(id_Ta_T(sarray));
+ ccheck(id_Ta_T(carray));
+ icheck(id_Ta_T(iarray));
+ lcheck(id_Ta_T(larray));
+ fcheck(id_Ta_T(farray));
+ dcheck(id_Ta_T(darray));
+ rcheck(id_Ta_T(rarray));
+ ocheck(id_Ta_T(oarray));
+ mcheck(id_Ta_T(marray));
+ ncheck(id_Ta_T(narray));
+
+ ucheck(id_Tr_T(uarray));
+ zcheck(id_Tr_T(zarray));
+ bcheck(id_Tr_T(barray));
+ scheck(id_Tr_T(sarray));
+ ccheck(id_Tr_T(carray));
+ icheck(id_Tr_T(iarray));
+ lcheck(id_Tr_T(larray));
+ fcheck(id_Tr_T(farray));
+ dcheck(id_Tr_T(darray));
+ rcheck(id_Tr_T(rarray));
+ ocheck(id_Tr_T(oarray));
+ mcheck(id_Tr_T(marray));
+ ncheck(id_Tr_T(narray));
+
+ ucheck(id_To_T(uarray));
+ zcheck(id_To_T(zarray));
+ bcheck(id_To_T(barray));
+ scheck(id_To_T(sarray));
+ ccheck(id_To_T(carray));
+ icheck(id_To_T(iarray));
+ lcheck(id_To_T(larray));
+ fcheck(id_To_T(farray));
+ dcheck(id_To_T(darray));
+ rcheck(id_To_T(rarray));
+ ocheck(id_To_T(oarray));
+ mcheck(id_To_T(marray));
+ ncheck(id_To_T(narray));
+
+ ucheck(id_Ta_a(uarray).asInstanceOf[Array[Unit ]]);
+ zcheck(id_Ta_a(zarray).asInstanceOf[Array[Boolean]]);
+ bcheck(id_Ta_a(barray).asInstanceOf[Array[Byte ]]);
+ scheck(id_Ta_a(sarray).asInstanceOf[Array[Short ]]);
+ ccheck(id_Ta_a(carray).asInstanceOf[Array[Char ]]);
+ icheck(id_Ta_a(iarray).asInstanceOf[Array[Int ]]);
+ lcheck(id_Ta_a(larray).asInstanceOf[Array[Long ]]);
+ fcheck(id_Ta_a(farray).asInstanceOf[Array[Float ]]);
+ dcheck(id_Ta_a(darray).asInstanceOf[Array[Double ]]);
+ rcheck(id_Ta_a(rarray).asInstanceOf[Array[AnyRef ]]);
+ ocheck(id_Ta_a(oarray).asInstanceOf[Array[Object ]]);
+ mcheck(id_Ta_a(marray).asInstanceOf[Array[Map ]]);
+ ncheck(id_Ta_a(narray).asInstanceOf[Array[Strings]]);
+
+ ucheck(id_Tr_a(uarray).asInstanceOf[Array[Unit ]]);
+ zcheck(id_Tr_a(zarray).asInstanceOf[Array[Boolean]]);
+ bcheck(id_Tr_a(barray).asInstanceOf[Array[Byte ]]);
+ scheck(id_Tr_a(sarray).asInstanceOf[Array[Short ]]);
+ ccheck(id_Tr_a(carray).asInstanceOf[Array[Char ]]);
+ icheck(id_Tr_a(iarray).asInstanceOf[Array[Int ]]);
+ lcheck(id_Tr_a(larray).asInstanceOf[Array[Long ]]);
+ fcheck(id_Tr_a(farray).asInstanceOf[Array[Float ]]);
+ dcheck(id_Tr_a(darray).asInstanceOf[Array[Double ]]);
+ rcheck(id_Tr_a(rarray).asInstanceOf[Array[AnyRef ]]);
+ ocheck(id_Tr_a(oarray).asInstanceOf[Array[Object ]]);
+ mcheck(id_Tr_a(marray).asInstanceOf[Array[Map ]]);
+ ncheck(id_Tr_a(narray).asInstanceOf[Array[Strings]]);
+
+ ucheck(id_To_a(uarray).asInstanceOf[Array[Unit ]]);
+ zcheck(id_To_a(zarray).asInstanceOf[Array[Boolean]]);
+ bcheck(id_To_a(barray).asInstanceOf[Array[Byte ]]);
+ scheck(id_To_a(sarray).asInstanceOf[Array[Short ]]);
+ ccheck(id_To_a(carray).asInstanceOf[Array[Char ]]);
+ icheck(id_To_a(iarray).asInstanceOf[Array[Int ]]);
+ lcheck(id_To_a(larray).asInstanceOf[Array[Long ]]);
+ fcheck(id_To_a(farray).asInstanceOf[Array[Float ]]);
+ dcheck(id_To_a(darray).asInstanceOf[Array[Double ]]);
+ rcheck(id_To_a(rarray).asInstanceOf[Array[AnyRef ]]);
+ ocheck(id_To_a(oarray).asInstanceOf[Array[Object ]]);
+ mcheck(id_To_a(marray).asInstanceOf[Array[Map ]]);
+ ncheck(id_To_a(narray).asInstanceOf[Array[Strings]]);
+
+ ucheck(id_Tr_r(uarray).asInstanceOf[Array[Unit ]]);
+ zcheck(id_Tr_r(zarray).asInstanceOf[Array[Boolean]]);
+ bcheck(id_Tr_r(barray).asInstanceOf[Array[Byte ]]);
+ scheck(id_Tr_r(sarray).asInstanceOf[Array[Short ]]);
+ ccheck(id_Tr_r(carray).asInstanceOf[Array[Char ]]);
+ icheck(id_Tr_r(iarray).asInstanceOf[Array[Int ]]);
+ lcheck(id_Tr_r(larray).asInstanceOf[Array[Long ]]);
+ fcheck(id_Tr_r(farray).asInstanceOf[Array[Float ]]);
+ dcheck(id_Tr_r(darray).asInstanceOf[Array[Double ]]);
+ rcheck(id_Tr_r(rarray).asInstanceOf[Array[AnyRef ]]);
+ ocheck(id_Tr_r(oarray).asInstanceOf[Array[Object ]]);
+ mcheck(id_Tr_r(marray).asInstanceOf[Array[Map ]]);
+ ncheck(id_Tr_r(narray).asInstanceOf[Array[Strings]]);
+
+ ucheck(id_To_r(uarray).asInstanceOf[Array[Unit ]]);
+ zcheck(id_To_r(zarray).asInstanceOf[Array[Boolean]]);
+ bcheck(id_To_r(barray).asInstanceOf[Array[Byte ]]);
+ scheck(id_To_r(sarray).asInstanceOf[Array[Short ]]);
+ ccheck(id_To_r(carray).asInstanceOf[Array[Char ]]);
+ icheck(id_To_r(iarray).asInstanceOf[Array[Int ]]);
+ lcheck(id_To_r(larray).asInstanceOf[Array[Long ]]);
+ fcheck(id_To_r(farray).asInstanceOf[Array[Float ]]);
+ dcheck(id_To_r(darray).asInstanceOf[Array[Double ]]);
+ rcheck(id_To_r(rarray).asInstanceOf[Array[AnyRef ]]);
+ ocheck(id_To_r(oarray).asInstanceOf[Array[Object ]]);
+ mcheck(id_To_r(marray).asInstanceOf[Array[Map ]]);
+ ncheck(id_To_r(narray).asInstanceOf[Array[Strings]]);
+
+ ucheck(id_To_o(uarray).asInstanceOf[Array[Unit ]]);
+ zcheck(id_To_o(zarray).asInstanceOf[Array[Boolean]]);
+ bcheck(id_To_o(barray).asInstanceOf[Array[Byte ]]);
+ scheck(id_To_o(sarray).asInstanceOf[Array[Short ]]);
+ ccheck(id_To_o(carray).asInstanceOf[Array[Char ]]);
+ icheck(id_To_o(iarray).asInstanceOf[Array[Int ]]);
+ lcheck(id_To_o(larray).asInstanceOf[Array[Long ]]);
+ fcheck(id_To_o(farray).asInstanceOf[Array[Float ]]);
+ dcheck(id_To_o(darray).asInstanceOf[Array[Double ]]);
+ rcheck(id_To_o(rarray).asInstanceOf[Array[AnyRef ]]);
+ ocheck(id_To_o(oarray).asInstanceOf[Array[Object ]]);
+ mcheck(id_To_o(marray).asInstanceOf[Array[Map ]]);
+ ncheck(id_To_o(narray).asInstanceOf[Array[Strings]]);
+
+ //######################################################################
+
+ ucheck(id_TSa_T [Unit , Array[Unit ]](uarray));
+ zcheck(id_TSa_T [Boolean, Array[Boolean]](zarray));
+ bcheck(id_TSa_T [Byte , Array[Byte ]](barray));
+ scheck(id_TSa_T [Short , Array[Short ]](sarray));
+ ccheck(id_TSa_T [Char , Array[Char ]](carray));
+ icheck(id_TSa_T [Int , Array[Int ]](iarray));
+ lcheck(id_TSa_T [Long , Array[Long ]](larray));
+ fcheck(id_TSa_T [Float , Array[Float ]](farray));
+ dcheck(id_TSa_T [Double , Array[Double ]](darray));
+ rcheck(id_TSa_T [AnyRef , Array[AnyRef ]](rarray));
+ ocheck(id_TSa_T [Object , Array[Object ]](oarray));
+ mcheck(id_TSa_T [Map , Array[Map ]](marray));
+ ncheck(id_TSa_T [Strings, Array[Strings]](narray));
+
+ ucheck(id_TSv_T [Unit , Array[Unit ]](uarray));
+ zcheck(id_TSv_T [Boolean, Array[Boolean]](zarray));
+ bcheck(id_TSv_T [Byte , Array[Byte ]](barray));
+ scheck(id_TSv_T [Short , Array[Short ]](sarray));
+ ccheck(id_TSv_T [Char , Array[Char ]](carray));
+ icheck(id_TSv_T [Int , Array[Int ]](iarray));
+ lcheck(id_TSv_T [Long , Array[Long ]](larray));
+ fcheck(id_TSv_T [Float , Array[Float ]](farray));
+ dcheck(id_TSv_T [Double , Array[Double ]](darray));
+
+ rcheck(id_TSr_T [AnyRef , Array[AnyRef ]](rarray));
+ ocheck(id_TSr_T [Object , Array[Object ]](oarray));
+ mcheck(id_TSr_T [Map , Array[Map ]](marray));
+ ncheck(id_TSr_T [Strings, Array[Strings]](narray));
+
+ rcheck(id_TSo_T [AnyRef , Array[AnyRef ]](rarray));
+ ocheck(id_TSo_T [Object , Array[Object ]](oarray));
+ mcheck(id_TSo_T [Map , Array[Map ]](marray));
+ ncheck(id_TSo_T [Strings, Array[Strings]](narray));
+
+ mcheck(id_TSm_T [Map , Array[Map ]](marray));
+
+ ncheck(id_TSn_T [Strings, Array[Strings]](narray));
+
+ //######################################################################
+
+ ucheck(id_TSa_Ss[Unit , Array[Unit ]](uarray));
+ zcheck(id_TSa_Ss[Boolean, Array[Boolean]](zarray));
+ bcheck(id_TSa_Ss[Byte , Array[Byte ]](barray));
+ scheck(id_TSa_Ss[Short , Array[Short ]](sarray));
+ ccheck(id_TSa_Ss[Char , Array[Char ]](carray));
+ icheck(id_TSa_Ss[Int , Array[Int ]](iarray));
+ lcheck(id_TSa_Ss[Long , Array[Long ]](larray));
+ fcheck(id_TSa_Ss[Float , Array[Float ]](farray));
+ dcheck(id_TSa_Ss[Double , Array[Double ]](darray));
+ rcheck(id_TSa_Ss[AnyRef , Array[AnyRef ]](rarray));
+ ocheck(id_TSa_Ss[Object , Array[Object ]](oarray));
+ mcheck(id_TSa_Ss[Map , Array[Map ]](marray));
+ ncheck(id_TSa_Ss[Strings, Array[Strings]](narray));
+
+ ucheck(id_TSv_Ss[Unit , Array[Unit ]](uarray));
+ zcheck(id_TSv_Ss[Boolean, Array[Boolean]](zarray));
+ bcheck(id_TSv_Ss[Byte , Array[Byte ]](barray));
+ scheck(id_TSv_Ss[Short , Array[Short ]](sarray));
+ ccheck(id_TSv_Ss[Char , Array[Char ]](carray));
+ icheck(id_TSv_Ss[Int , Array[Int ]](iarray));
+ lcheck(id_TSv_Ss[Long , Array[Long ]](larray));
+ fcheck(id_TSv_Ss[Float , Array[Float ]](farray));
+ dcheck(id_TSv_Ss[Double , Array[Double ]](darray));
+
+ rcheck(id_TSr_Ss[AnyRef , Array[AnyRef ]](rarray));
+ ocheck(id_TSr_Ss[Object , Array[Object ]](oarray));
+ mcheck(id_TSr_Ss[Map , Array[Map ]](marray));
+ ncheck(id_TSr_Ss[Strings, Array[Strings]](narray));
+
+ rcheck(id_TSo_Ss[AnyRef , Array[AnyRef ]](rarray));
+ ocheck(id_TSo_Ss[Object , Array[Object ]](oarray));
+ mcheck(id_TSo_Ss[Map , Array[Map ]](marray));
+ ncheck(id_TSo_Ss[Strings, Array[Strings]](narray));
+
+ mcheck(id_TSm_Ss[Map , Array[Map ]](marray));
+
+ ncheck(id_TSn_Ss[Strings, Array[Strings]](narray));
+
+ //######################################################################
+
+ ucheck(id_TSa_a [Unit , Array[Unit ]](uarray).asInstanceOf[Array[Unit ]]);
+ zcheck(id_TSa_a [Boolean, Array[Boolean]](zarray).asInstanceOf[Array[Boolean]]);
+ bcheck(id_TSa_a [Byte , Array[Byte ]](barray).asInstanceOf[Array[Byte ]]);
+ scheck(id_TSa_a [Short , Array[Short ]](sarray).asInstanceOf[Array[Short ]]);
+ ccheck(id_TSa_a [Char , Array[Char ]](carray).asInstanceOf[Array[Char ]]);
+ icheck(id_TSa_a [Int , Array[Int ]](iarray).asInstanceOf[Array[Int ]]);
+ lcheck(id_TSa_a [Long , Array[Long ]](larray).asInstanceOf[Array[Long ]]);
+ fcheck(id_TSa_a [Float , Array[Float ]](farray).asInstanceOf[Array[Float ]]);
+ dcheck(id_TSa_a [Double , Array[Double ]](darray).asInstanceOf[Array[Double ]]);
+ rcheck(id_TSa_a [AnyRef , Array[AnyRef ]](rarray).asInstanceOf[Array[AnyRef ]]);
+ ocheck(id_TSa_a [Object , Array[Object ]](oarray).asInstanceOf[Array[Object ]]);
+ mcheck(id_TSa_a [Map , Array[Map ]](marray).asInstanceOf[Array[Map ]]);
+ ncheck(id_TSa_a [Strings, Array[Strings]](narray).asInstanceOf[Array[Strings]]);
+
+ ucheck(id_TSv_a [Unit , Array[Unit ]](uarray).asInstanceOf[Array[Unit ]]);
+ zcheck(id_TSv_a [Boolean, Array[Boolean]](zarray).asInstanceOf[Array[Boolean]]);
+ bcheck(id_TSv_a [Byte , Array[Byte ]](barray).asInstanceOf[Array[Byte ]]);
+ scheck(id_TSv_a [Short , Array[Short ]](sarray).asInstanceOf[Array[Short ]]);
+ ccheck(id_TSv_a [Char , Array[Char ]](carray).asInstanceOf[Array[Char ]]);
+ icheck(id_TSv_a [Int , Array[Int ]](iarray).asInstanceOf[Array[Int ]]);
+ lcheck(id_TSv_a [Long , Array[Long ]](larray).asInstanceOf[Array[Long ]]);
+ fcheck(id_TSv_a [Float , Array[Float ]](farray).asInstanceOf[Array[Float ]]);
+ dcheck(id_TSv_a [Double , Array[Double ]](darray).asInstanceOf[Array[Double ]]);
+
+ rcheck(id_TSr_a [AnyRef , Array[AnyRef ]](rarray).asInstanceOf[Array[AnyRef ]]);
+ ocheck(id_TSr_a [Object , Array[Object ]](oarray).asInstanceOf[Array[Object ]]);
+ mcheck(id_TSr_a [Map , Array[Map ]](marray).asInstanceOf[Array[Map ]]);
+ ncheck(id_TSr_a [Strings, Array[Strings]](narray).asInstanceOf[Array[Strings]]);
+
+ rcheck(id_TSo_a [AnyRef , Array[AnyRef ]](rarray).asInstanceOf[Array[AnyRef ]]);
+ ocheck(id_TSo_a [Object , Array[Object ]](oarray).asInstanceOf[Array[Object ]]);
+ mcheck(id_TSo_a [Map , Array[Map ]](marray).asInstanceOf[Array[Map ]]);
+ ncheck(id_TSo_a [Strings, Array[Strings]](narray).asInstanceOf[Array[Strings]]);
+
+ mcheck(id_TSm_a [Map , Array[Map ]](marray).asInstanceOf[Array[Map ]]);
+
+ ncheck(id_TSn_a [Strings, Array[Strings]](narray).asInstanceOf[Array[Strings]]);
+
+ //######################################################################
+
+ ucheck(id_TSa_r [Unit , Array[Unit ]](uarray).asInstanceOf[Array[Unit ]]);
+ zcheck(id_TSa_r [Boolean, Array[Boolean]](zarray).asInstanceOf[Array[Boolean]]);
+ bcheck(id_TSa_r [Byte , Array[Byte ]](barray).asInstanceOf[Array[Byte ]]);
+ scheck(id_TSa_r [Short , Array[Short ]](sarray).asInstanceOf[Array[Short ]]);
+ ccheck(id_TSa_r [Char , Array[Char ]](carray).asInstanceOf[Array[Char ]]);
+ icheck(id_TSa_r [Int , Array[Int ]](iarray).asInstanceOf[Array[Int ]]);
+ lcheck(id_TSa_r [Long , Array[Long ]](larray).asInstanceOf[Array[Long ]]);
+ fcheck(id_TSa_r [Float , Array[Float ]](farray).asInstanceOf[Array[Float ]]);
+ dcheck(id_TSa_r [Double , Array[Double ]](darray).asInstanceOf[Array[Double ]]);
+ rcheck(id_TSa_r [AnyRef , Array[AnyRef ]](rarray).asInstanceOf[Array[AnyRef ]]);
+ ocheck(id_TSa_r [Object , Array[Object ]](oarray).asInstanceOf[Array[Object ]]);
+ mcheck(id_TSa_r [Map , Array[Map ]](marray).asInstanceOf[Array[Map ]]);
+ ncheck(id_TSa_r [Strings, Array[Strings]](narray).asInstanceOf[Array[Strings]]);
+
+ ucheck(id_TSv_r [Unit , Array[Unit ]](uarray).asInstanceOf[Array[Unit ]]);
+ zcheck(id_TSv_r [Boolean, Array[Boolean]](zarray).asInstanceOf[Array[Boolean]]);
+ bcheck(id_TSv_r [Byte , Array[Byte ]](barray).asInstanceOf[Array[Byte ]]);
+ scheck(id_TSv_r [Short , Array[Short ]](sarray).asInstanceOf[Array[Short ]]);
+ ccheck(id_TSv_r [Char , Array[Char ]](carray).asInstanceOf[Array[Char ]]);
+ icheck(id_TSv_r [Int , Array[Int ]](iarray).asInstanceOf[Array[Int ]]);
+ lcheck(id_TSv_r [Long , Array[Long ]](larray).asInstanceOf[Array[Long ]]);
+ fcheck(id_TSv_r [Float , Array[Float ]](farray).asInstanceOf[Array[Float ]]);
+ dcheck(id_TSv_r [Double , Array[Double ]](darray).asInstanceOf[Array[Double ]]);
+
+ rcheck(id_TSr_r [AnyRef , Array[AnyRef ]](rarray).asInstanceOf[Array[AnyRef ]]);
+ ocheck(id_TSr_r [Object , Array[Object ]](oarray).asInstanceOf[Array[Object ]]);
+ mcheck(id_TSr_r [Map , Array[Map ]](marray).asInstanceOf[Array[Map ]]);
+ ncheck(id_TSr_r [Strings, Array[Strings]](narray).asInstanceOf[Array[Strings]]);
+
+ rcheck(id_TSo_r [AnyRef , Array[AnyRef ]](rarray).asInstanceOf[Array[AnyRef ]]);
+ ocheck(id_TSo_r [Object , Array[Object ]](oarray).asInstanceOf[Array[Object ]]);
+ mcheck(id_TSo_r [Map , Array[Map ]](marray).asInstanceOf[Array[Map ]]);
+ ncheck(id_TSo_r [Strings, Array[Strings]](narray).asInstanceOf[Array[Strings]]);
+
+ mcheck(id_TSm_r [Map , Array[Map ]](marray).asInstanceOf[Array[Map ]]);
+
+ ncheck(id_TSn_r [Strings, Array[Strings]](narray).asInstanceOf[Array[Strings]]);
+
+ //######################################################################
+
+ ucheck(id_TSa_o [Unit , Array[Unit ]](uarray).asInstanceOf[Array[Unit ]]);
+ zcheck(id_TSa_o [Boolean, Array[Boolean]](zarray).asInstanceOf[Array[Boolean]]);
+ bcheck(id_TSa_o [Byte , Array[Byte ]](barray).asInstanceOf[Array[Byte ]]);
+ scheck(id_TSa_o [Short , Array[Short ]](sarray).asInstanceOf[Array[Short ]]);
+ ccheck(id_TSa_o [Char , Array[Char ]](carray).asInstanceOf[Array[Char ]]);
+ icheck(id_TSa_o [Int , Array[Int ]](iarray).asInstanceOf[Array[Int ]]);
+ lcheck(id_TSa_o [Long , Array[Long ]](larray).asInstanceOf[Array[Long ]]);
+ fcheck(id_TSa_o [Float , Array[Float ]](farray).asInstanceOf[Array[Float ]]);
+ dcheck(id_TSa_o [Double , Array[Double ]](darray).asInstanceOf[Array[Double ]]);
+ rcheck(id_TSa_o [AnyRef , Array[AnyRef ]](rarray).asInstanceOf[Array[AnyRef ]]);
+ ocheck(id_TSa_o [Object , Array[Object ]](oarray).asInstanceOf[Array[Object ]]);
+ mcheck(id_TSa_o [Map , Array[Map ]](marray).asInstanceOf[Array[Map ]]);
+ ncheck(id_TSa_o [Strings, Array[Strings]](narray).asInstanceOf[Array[Strings]]);
+
+ ucheck(id_TSv_o [Unit , Array[Unit ]](uarray).asInstanceOf[Array[Unit ]]);
+ zcheck(id_TSv_o [Boolean, Array[Boolean]](zarray).asInstanceOf[Array[Boolean]]);
+ bcheck(id_TSv_o [Byte , Array[Byte ]](barray).asInstanceOf[Array[Byte ]]);
+ scheck(id_TSv_o [Short , Array[Short ]](sarray).asInstanceOf[Array[Short ]]);
+ ccheck(id_TSv_o [Char , Array[Char ]](carray).asInstanceOf[Array[Char ]]);
+ icheck(id_TSv_o [Int , Array[Int ]](iarray).asInstanceOf[Array[Int ]]);
+ lcheck(id_TSv_o [Long , Array[Long ]](larray).asInstanceOf[Array[Long ]]);
+ fcheck(id_TSv_o [Float , Array[Float ]](farray).asInstanceOf[Array[Float ]]);
+ dcheck(id_TSv_o [Double , Array[Double ]](darray).asInstanceOf[Array[Double ]]);
+
+ rcheck(id_TSr_o [AnyRef , Array[AnyRef ]](rarray).asInstanceOf[Array[AnyRef ]]);
+ ocheck(id_TSr_o [Object , Array[Object ]](oarray).asInstanceOf[Array[Object ]]);
+ mcheck(id_TSr_o [Map , Array[Map ]](marray).asInstanceOf[Array[Map ]]);
+ ncheck(id_TSr_o [Strings, Array[Strings]](narray).asInstanceOf[Array[Strings]]);
+
+ rcheck(id_TSo_o [AnyRef , Array[AnyRef ]](rarray).asInstanceOf[Array[AnyRef ]]);
+ ocheck(id_TSo_o [Object , Array[Object ]](oarray).asInstanceOf[Array[Object ]]);
+ mcheck(id_TSo_o [Map , Array[Map ]](marray).asInstanceOf[Array[Map ]]);
+ ncheck(id_TSo_o [Strings, Array[Strings]](narray).asInstanceOf[Array[Strings]]);
+
+ mcheck(id_TSm_o [Map , Array[Map ]](marray).asInstanceOf[Array[Map ]]);
+
+ ncheck(id_TSn_o [Strings, Array[Strings]](narray).asInstanceOf[Array[Strings]]);
+
+ //######################################################################
+
+ ucheck(id_Sas_Ss[Unit ](uarray));
+ zcheck(id_Sas_Ss[Boolean](zarray));
+ bcheck(id_Sas_Ss[Byte ](barray));
+ scheck(id_Sas_Ss[Short ](sarray));
+ ccheck(id_Sas_Ss[Char ](carray));
+ icheck(id_Sas_Ss[Int ](iarray));
+ lcheck(id_Sas_Ss[Long ](larray));
+ fcheck(id_Sas_Ss[Float ](farray));
+ dcheck(id_Sas_Ss[Double ](darray));
+ rcheck(id_Sas_Ss[AnyRef ](rarray));
+ ocheck(id_Sas_Ss[Object ](oarray));
+ mcheck(id_Sas_Ss[Map ](marray));
+ ncheck(id_Sas_Ss[Strings](narray));
+
+ ucheck(id_Svs_Ss[Unit ](uarray));
+ zcheck(id_Svs_Ss[Boolean](zarray));
+ bcheck(id_Svs_Ss[Byte ](barray));
+ scheck(id_Svs_Ss[Short ](sarray));
+ ccheck(id_Svs_Ss[Char ](carray));
+ icheck(id_Svs_Ss[Int ](iarray));
+ lcheck(id_Svs_Ss[Long ](larray));
+ fcheck(id_Svs_Ss[Float ](farray));
+ dcheck(id_Svs_Ss[Double ](darray));
+
+ rcheck(id_Srs_Ss[AnyRef ](rarray));
+ ocheck(id_Srs_Ss[Object ](oarray));
+ mcheck(id_Srs_Ss[Map ](marray));
+ ncheck(id_Srs_Ss[Strings](narray));
+
+ rcheck(id_Sos_Ss[AnyRef ](rarray));
+ ocheck(id_Sos_Ss[Object ](oarray));
+ mcheck(id_Sos_Ss[Map ](marray));
+ ncheck(id_Sos_Ss[Strings](narray));
+
+ mcheck(id_Sms_Ss[Map ](marray));
+
+ ncheck(id_Sns_Ss[Strings](narray));
+
+ //######################################################################
+
+ ucheck(id_TSa_a [Unit , Array[Unit ]](uarray).asInstanceOf[Array[Unit ]]);
+ zcheck(id_TSa_a [Boolean, Array[Boolean]](zarray).asInstanceOf[Array[Boolean]]);
+ bcheck(id_TSa_a [Byte , Array[Byte ]](barray).asInstanceOf[Array[Byte ]]);
+ scheck(id_TSa_a [Short , Array[Short ]](sarray).asInstanceOf[Array[Short ]]);
+ ccheck(id_TSa_a [Char , Array[Char ]](carray).asInstanceOf[Array[Char ]]);
+ icheck(id_TSa_a [Int , Array[Int ]](iarray).asInstanceOf[Array[Int ]]);
+ lcheck(id_TSa_a [Long , Array[Long ]](larray).asInstanceOf[Array[Long ]]);
+ fcheck(id_TSa_a [Float , Array[Float ]](farray).asInstanceOf[Array[Float ]]);
+ dcheck(id_TSa_a [Double , Array[Double ]](darray).asInstanceOf[Array[Double ]]);
+ rcheck(id_TSa_a [AnyRef , Array[AnyRef ]](rarray).asInstanceOf[Array[AnyRef ]]);
+ ocheck(id_TSa_a [Object , Array[Object ]](oarray).asInstanceOf[Array[Object ]]);
+ mcheck(id_TSa_a [Map , Array[Map ]](marray).asInstanceOf[Array[Map ]]);
+ ncheck(id_TSa_a [Strings, Array[Strings]](narray).asInstanceOf[Array[Strings]]);
+
+ ucheck(id_TSv_a [Unit , Array[Unit ]](uarray).asInstanceOf[Array[Unit ]]);
+ zcheck(id_TSv_a [Boolean, Array[Boolean]](zarray).asInstanceOf[Array[Boolean]]);
+ bcheck(id_TSv_a [Byte , Array[Byte ]](barray).asInstanceOf[Array[Byte ]]);
+ scheck(id_TSv_a [Short , Array[Short ]](sarray).asInstanceOf[Array[Short ]]);
+ ccheck(id_TSv_a [Char , Array[Char ]](carray).asInstanceOf[Array[Char ]]);
+ icheck(id_TSv_a [Int , Array[Int ]](iarray).asInstanceOf[Array[Int ]]);
+ lcheck(id_TSv_a [Long , Array[Long ]](larray).asInstanceOf[Array[Long ]]);
+ fcheck(id_TSv_a [Float , Array[Float ]](farray).asInstanceOf[Array[Float ]]);
+ dcheck(id_TSv_a [Double , Array[Double ]](darray).asInstanceOf[Array[Double ]]);
+
+ rcheck(id_TSr_a [AnyRef , Array[AnyRef ]](rarray).asInstanceOf[Array[AnyRef ]]);
+ ocheck(id_TSr_a [Object , Array[Object ]](oarray).asInstanceOf[Array[Object ]]);
+ mcheck(id_TSr_a [Map , Array[Map ]](marray).asInstanceOf[Array[Map ]]);
+ ncheck(id_TSr_a [Strings, Array[Strings]](narray).asInstanceOf[Array[Strings]]);
+
+ rcheck(id_TSo_a [AnyRef , Array[AnyRef ]](rarray).asInstanceOf[Array[AnyRef ]]);
+ ocheck(id_TSo_a [Object , Array[Object ]](oarray).asInstanceOf[Array[Object ]]);
+ mcheck(id_TSo_a [Map , Array[Map ]](marray).asInstanceOf[Array[Map ]]);
+ ncheck(id_TSo_a [Strings, Array[Strings]](narray).asInstanceOf[Array[Strings]]);
+
+ mcheck(id_TSm_a [Map , Array[Map ]](marray).asInstanceOf[Array[Map ]]);
+
+ ncheck(id_TSn_a [Strings, Array[Strings]](narray).asInstanceOf[Array[Strings]]);
+
+ //######################################################################
+
+ ucheck(id_TSa_r [Unit , Array[Unit ]](uarray).asInstanceOf[Array[Unit ]]);
+ zcheck(id_TSa_r [Boolean, Array[Boolean]](zarray).asInstanceOf[Array[Boolean]]);
+ bcheck(id_TSa_r [Byte , Array[Byte ]](barray).asInstanceOf[Array[Byte ]]);
+ scheck(id_TSa_r [Short , Array[Short ]](sarray).asInstanceOf[Array[Short ]]);
+ ccheck(id_TSa_r [Char , Array[Char ]](carray).asInstanceOf[Array[Char ]]);
+ icheck(id_TSa_r [Int , Array[Int ]](iarray).asInstanceOf[Array[Int ]]);
+ lcheck(id_TSa_r [Long , Array[Long ]](larray).asInstanceOf[Array[Long ]]);
+ fcheck(id_TSa_r [Float , Array[Float ]](farray).asInstanceOf[Array[Float ]]);
+ dcheck(id_TSa_r [Double , Array[Double ]](darray).asInstanceOf[Array[Double ]]);
+ rcheck(id_TSa_r [AnyRef , Array[AnyRef ]](rarray).asInstanceOf[Array[AnyRef ]]);
+ ocheck(id_TSa_r [Object , Array[Object ]](oarray).asInstanceOf[Array[Object ]]);
+ mcheck(id_TSa_r [Map , Array[Map ]](marray).asInstanceOf[Array[Map ]]);
+ ncheck(id_TSa_r [Strings, Array[Strings]](narray).asInstanceOf[Array[Strings]]);
+
+ ucheck(id_TSv_r [Unit , Array[Unit ]](uarray).asInstanceOf[Array[Unit ]]);
+ zcheck(id_TSv_r [Boolean, Array[Boolean]](zarray).asInstanceOf[Array[Boolean]]);
+ bcheck(id_TSv_r [Byte , Array[Byte ]](barray).asInstanceOf[Array[Byte ]]);
+ scheck(id_TSv_r [Short , Array[Short ]](sarray).asInstanceOf[Array[Short ]]);
+ ccheck(id_TSv_r [Char , Array[Char ]](carray).asInstanceOf[Array[Char ]]);
+ icheck(id_TSv_r [Int , Array[Int ]](iarray).asInstanceOf[Array[Int ]]);
+ lcheck(id_TSv_r [Long , Array[Long ]](larray).asInstanceOf[Array[Long ]]);
+ fcheck(id_TSv_r [Float , Array[Float ]](farray).asInstanceOf[Array[Float ]]);
+ dcheck(id_TSv_r [Double , Array[Double ]](darray).asInstanceOf[Array[Double ]]);
+
+ rcheck(id_TSr_r [AnyRef , Array[AnyRef ]](rarray).asInstanceOf[Array[AnyRef ]]);
+ ocheck(id_TSr_r [Object , Array[Object ]](oarray).asInstanceOf[Array[Object ]]);
+ mcheck(id_TSr_r [Map , Array[Map ]](marray).asInstanceOf[Array[Map ]]);
+ ncheck(id_TSr_r [Strings, Array[Strings]](narray).asInstanceOf[Array[Strings]]);
+
+ rcheck(id_TSo_r [AnyRef , Array[AnyRef ]](rarray).asInstanceOf[Array[AnyRef ]]);
+ ocheck(id_TSo_r [Object , Array[Object ]](oarray).asInstanceOf[Array[Object ]]);
+ mcheck(id_TSo_r [Map , Array[Map ]](marray).asInstanceOf[Array[Map ]]);
+ ncheck(id_TSo_r [Strings, Array[Strings]](narray).asInstanceOf[Array[Strings]]);
+
+ mcheck(id_TSm_r [Map , Array[Map ]](marray).asInstanceOf[Array[Map ]]);
+
+ ncheck(id_TSn_r [Strings, Array[Strings]](narray).asInstanceOf[Array[Strings]]);
+
+ //######################################################################
+
+ ucheck(id_TSa_o [Unit , Array[Unit ]](uarray).asInstanceOf[Array[Unit ]]);
+ zcheck(id_TSa_o [Boolean, Array[Boolean]](zarray).asInstanceOf[Array[Boolean]]);
+ bcheck(id_TSa_o [Byte , Array[Byte ]](barray).asInstanceOf[Array[Byte ]]);
+ scheck(id_TSa_o [Short , Array[Short ]](sarray).asInstanceOf[Array[Short ]]);
+ ccheck(id_TSa_o [Char , Array[Char ]](carray).asInstanceOf[Array[Char ]]);
+ icheck(id_TSa_o [Int , Array[Int ]](iarray).asInstanceOf[Array[Int ]]);
+ lcheck(id_TSa_o [Long , Array[Long ]](larray).asInstanceOf[Array[Long ]]);
+ fcheck(id_TSa_o [Float , Array[Float ]](farray).asInstanceOf[Array[Float ]]);
+ dcheck(id_TSa_o [Double , Array[Double ]](darray).asInstanceOf[Array[Double ]]);
+ rcheck(id_TSa_o [AnyRef , Array[AnyRef ]](rarray).asInstanceOf[Array[AnyRef ]]);
+ ocheck(id_TSa_o [Object , Array[Object ]](oarray).asInstanceOf[Array[Object ]]);
+ mcheck(id_TSa_o [Map , Array[Map ]](marray).asInstanceOf[Array[Map ]]);
+ ncheck(id_TSa_o [Strings, Array[Strings]](narray).asInstanceOf[Array[Strings]]);
+
+ ucheck(id_TSv_o [Unit , Array[Unit ]](uarray).asInstanceOf[Array[Unit ]]);
+ zcheck(id_TSv_o [Boolean, Array[Boolean]](zarray).asInstanceOf[Array[Boolean]]);
+ bcheck(id_TSv_o [Byte , Array[Byte ]](barray).asInstanceOf[Array[Byte ]]);
+ scheck(id_TSv_o [Short , Array[Short ]](sarray).asInstanceOf[Array[Short ]]);
+ ccheck(id_TSv_o [Char , Array[Char ]](carray).asInstanceOf[Array[Char ]]);
+ icheck(id_TSv_o [Int , Array[Int ]](iarray).asInstanceOf[Array[Int ]]);
+ lcheck(id_TSv_o [Long , Array[Long ]](larray).asInstanceOf[Array[Long ]]);
+ fcheck(id_TSv_o [Float , Array[Float ]](farray).asInstanceOf[Array[Float ]]);
+ dcheck(id_TSv_o [Double , Array[Double ]](darray).asInstanceOf[Array[Double ]]);
+
+ rcheck(id_TSr_o [AnyRef , Array[AnyRef ]](rarray).asInstanceOf[Array[AnyRef ]]);
+ ocheck(id_TSr_o [Object , Array[Object ]](oarray).asInstanceOf[Array[Object ]]);
+ mcheck(id_TSr_o [Map , Array[Map ]](marray).asInstanceOf[Array[Map ]]);
+ ncheck(id_TSr_o [Strings, Array[Strings]](narray).asInstanceOf[Array[Strings]]);
+
+ rcheck(id_TSo_o [AnyRef , Array[AnyRef ]](rarray).asInstanceOf[Array[AnyRef ]]);
+ ocheck(id_TSo_o [Object , Array[Object ]](oarray).asInstanceOf[Array[Object ]]);
+ mcheck(id_TSo_o [Map , Array[Map ]](marray).asInstanceOf[Array[Map ]]);
+ ncheck(id_TSo_o [Strings, Array[Strings]](narray).asInstanceOf[Array[Strings]]);
+
+ mcheck(id_TSm_o [Map , Array[Map ]](marray).asInstanceOf[Array[Map ]]);
+
+ ncheck(id_TSn_o [Strings, Array[Strings]](narray).asInstanceOf[Array[Strings]]);
+
+ //######################################################################
+
+ check_Ta(uarray, 2, u0, ucheck)
+ check_Ta(zarray, 2, z0, zcheck)
+ check_Ta(barray, 3, b0, bcheck)
+ check_Ta(sarray, 3, s0, scheck)
+ check_Ta(carray, 3, c0, ccheck)
+ check_Ta(iarray, 3, i0, icheck)
+ check_Ta(larray, 3, l0, lcheck)
+ check_Ta(farray, 3, f0, fcheck)
+ check_Ta(darray, 3, d0, dcheck)
+ check_Ta(rarray, 6, r0, rcheck)
+ check_Ta(oarray, 6, o0, ocheck)
+ check_Ta(marray, 3, m0, mcheck)
+ check_Ta(narray, 3, n0, ncheck)
+
+ check_Tv(uarray, 2, u0, ucheck)
+ check_Tv(zarray, 2, z0, zcheck)
+ check_Tv(barray, 3, b0, bcheck)
+ check_Tv(sarray, 3, s0, scheck)
+ check_Tv(carray, 3, c0, ccheck)
+ check_Tv(iarray, 3, i0, icheck)
+ check_Tv(larray, 3, l0, lcheck)
+ check_Tv(farray, 3, f0, fcheck)
+ check_Tv(darray, 3, d0, dcheck)
+
+ check_Tr(rarray, 6, r0, rcheck)
+ check_Tr(oarray, 6, o0, ocheck)
+ check_Tr(marray, 3, m0, mcheck)
+ check_Tr(narray, 3, n0, ncheck)
+
+ check_To(rarray, 6, r0, rcheck)
+ check_To(oarray, 6, o0, ocheck)
+ check_To(marray, 3, m0, mcheck)
+ check_To(narray, 3, n0, ncheck)
+
+ check_Tm(marray, 3, m0, mcheck)
+
+ check_Tn(narray, 3, n0, ncheck)
+
+ //######################################################################
+
+ checkZip
+ checkConcat
+ checkT2368()
+
+ //######################################################################
+
+ println("checks: " + checks)
+
+ //######################################################################
+ }
+
+ //##########################################################################
+}
+
diff --git a/test/disabled/presentation/simple-tests.check b/test/disabled/presentation/simple-tests.check
index b90dfce..cdb80ed 100644
--- a/test/disabled/presentation/simple-tests.check
+++ b/test/disabled/presentation/simple-tests.check
@@ -244,7 +244,7 @@ TypeMember(method disable,(s: Tester.this.settings.Setting)scala.collection.muta
TypeMember(value disable,Tester.this.settings.MultiStringSetting,false,true,<none>)
TypeMember(value elidebelow,Tester.this.settings.IntSetting,false,true,<none>)
TypeMember(method embeddedDefaults,(loader: java.lang.ClassLoader)Unit,true,true,<none>)
-TypeMember(method embeddedDefaults,[T](implicit evidence$1: Manifest[T])Unit,true,true,<none>)
+TypeMember(method embeddedDefaults,[T](implicit evidence$1: ClassTag[T])Unit,true,true,<none>)
TypeMember(value encoding,Tester.this.settings.StringSetting,false,true,<none>)
TypeMember(method ensuring,(cond: (scala.tools.nsc.Settings) => Boolean,msg: => Any)scala.tools.nsc.Settings,true,false,method any2Ensuring)
TypeMember(method ensuring,(cond: (scala.tools.nsc.Settings) => Boolean)scala.tools.nsc.Settings,true,false,method any2Ensuring)
diff --git a/test/disabled/presentation/timeofday.check b/test/disabled/presentation/timeofday.check
new file mode 100644
index 0000000..2a09d0b
--- /dev/null
+++ b/test/disabled/presentation/timeofday.check
@@ -0,0 +1,100 @@
+reload: timeofday.scala
+
+askTypeCompletion at timeofday.scala(26,33)
+================================================================================
+[response] aksTypeCompletion at (26,33)
+retrieved 45 members
+`method !=(x$1: Any)Boolean`
+`method !=(x$1: AnyRef)Boolean`
+`method ##()Int`
+`method +(other: String)String`
+`method ->[B](y: B)(timeofday.TimeOfDayVar, B)`
+`method ==(x$1: Any)Boolean`
+`method ==(x$1: AnyRef)Boolean`
+`method asInstanceOf[T0]=> T0`
+`method clone()Object`
+`method ensuring(cond: Boolean)timeofday.TimeOfDayVar`
+`method ensuring(cond: Boolean, msg: => Any)timeofday.TimeOfDayVar`
+`method ensuring(cond: timeofday.TimeOfDayVar => Boolean)timeofday.TimeOfDayVar`
+`method ensuring(cond: timeofday.TimeOfDayVar => Boolean, msg: => Any)timeofday.TimeOfDayVar`
+`method eq(x$1: AnyRef)Boolean`
+`method equals(x$1: Any)Boolean`
+`method finalize()Unit`
+`method formatted(fmtstr: String)String`
+`method hashCode()Int`
+`method hours=> Int`
+`method hours_=(h: Int)Unit`
+`method isInstanceOf[T0]=> Boolean`
+`method minutes=> Int`
+`method minutes_=(m: Int)Unit`
+`method ne(x$1: AnyRef)Boolean`
+`method notify()Unit`
+`method notifyAll()Unit`
+`method seconds=> Int`
+`method seconds_=(s: Int)Unit`
+`method synchronized[T0](x$1: T0)T0`
+`method toString()String`
+`method wait()Unit`
+`method wait(x$1: Long)Unit`
+`method wait(x$1: Long, x$2: Int)Unit`
+`method x=> timeofday.TimeOfDayVar`
+`method →[B](y: B)(timeofday.TimeOfDayVar, B)`
+`value __leftOfArrowtimeofday.TimeOfDayVar`
+`value __resultOfEnsuringtimeofday.TimeOfDayVar`
+`value selfAny`
+`variable hInt`
+`variable mInt`
+`variable sInt`
+================================================================================
+
+askTypeCompletion at timeofday.scala(32,19)
+================================================================================
+[response] aksTypeCompletion at (32,19)
+retrieved 45 members
+`method !=(x$1: Any)Boolean`
+`method !=(x$1: AnyRef)Boolean`
+`method ##()Int`
+`method +(other: String)String`
+`method ->[B](y: B)(timeofday.TimeOfDayVar, B)`
+`method ==(x$1: Any)Boolean`
+`method ==(x$1: AnyRef)Boolean`
+`method asInstanceOf[T0]=> T0`
+`method clone()Object`
+`method ensuring(cond: Boolean)timeofday.TimeOfDayVar`
+`method ensuring(cond: Boolean, msg: => Any)timeofday.TimeOfDayVar`
+`method ensuring(cond: timeofday.TimeOfDayVar => Boolean)timeofday.TimeOfDayVar`
+`method ensuring(cond: timeofday.TimeOfDayVar => Boolean, msg: => Any)timeofday.TimeOfDayVar`
+`method eq(x$1: AnyRef)Boolean`
+`method equals(x$1: Any)Boolean`
+`method finalize()Unit`
+`method formatted(fmtstr: String)String`
+`method hashCode()Int`
+`method hours=> Int`
+`method hours_=(h: Int)Unit`
+`method isInstanceOf[T0]=> Boolean`
+`method minutes=> Int`
+`method minutes_=(m: Int)Unit`
+`method ne(x$1: AnyRef)Boolean`
+`method notify()Unit`
+`method notifyAll()Unit`
+`method seconds=> Int`
+`method seconds_=(s: Int)Unit`
+`method synchronized[T0](x$1: T0)T0`
+`method toString()String`
+`method wait()Unit`
+`method wait(x$1: Long)Unit`
+`method wait(x$1: Long, x$2: Int)Unit`
+`method x=> timeofday.TimeOfDayVar`
+`method →[B](y: B)(timeofday.TimeOfDayVar, B)`
+`value __leftOfArrowtimeofday.TimeOfDayVar`
+`value __resultOfEnsuringtimeofday.TimeOfDayVar`
+`value selfAny`
+`variable hInt`
+`variable mInt`
+`variable sInt`
+================================================================================
+
+askHyperlinkPos for `hours` at (33,11) timeofday.scala
+================================================================================
+[response] found askHyperlinkPos for `hours` at (10,9) timeofday.scala
+================================================================================
diff --git a/test/disabled/presentation/timeofday/Runner.scala b/test/disabled/presentation/timeofday/Runner.scala
new file mode 100644
index 0000000..1c03e3d
--- /dev/null
+++ b/test/disabled/presentation/timeofday/Runner.scala
@@ -0,0 +1,3 @@
+import scala.tools.nsc.interactive.tests._
+
+object Test extends InteractiveTest
diff --git a/test/disabled/presentation/timeofday/src/timeofday.scala b/test/disabled/presentation/timeofday/src/timeofday.scala
new file mode 100644
index 0000000..d635509
--- /dev/null
+++ b/test/disabled/presentation/timeofday/src/timeofday.scala
@@ -0,0 +1,35 @@
+object timeofday {
+ class DateError extends Exception
+
+ /** Simulating properties in Scala
+ * (example 4.2.1 in ScalaReference.pdf)
+ */
+ class TimeOfDayVar {
+ private var h, m, s: Int = 0
+
+ def hours = h
+
+ /** A method 'ident_=' is a setter for 'ident'. 'code.ident = ...' will
+ * be translated to a call to 'ident_='
+ */
+ def hours_= (h: Int) =
+ if (0 <= h && h < 24) this.h = h
+ else throw new DateError()
+
+ def minutes = m
+ def minutes_= (m: Int) =
+ if (0 <= m && m < 60) this.m = m
+ else throw new DateError()
+
+ def seconds = s
+ def seconds_= (s: Int) =
+ if (0 <= s && s < 60) this./*!*/s = s
+ else throw new DateError()
+ }
+
+ def main(args: Array[String]) {
+ val d = new TimeOfDayVar
+ d.hours = 8; d./*!*/minutes = 30; d.seconds = 0
+ d.hours/*#*/ = 25 // throws a DateError exception
+ }
+}
\ No newline at end of file
diff --git a/test/disabled/properties.check b/test/disabled/properties.check
new file mode 100644
index 0000000..a721d49
--- /dev/null
+++ b/test/disabled/properties.check
@@ -0,0 +1,158 @@
+reload: properties.scala
+
+askTypeCompletion at properties.scala(29,33)
+================================================================================
+[response] aksTypeCompletion at (29,33)
+retrieved 50 members
+`method !=(x$1: Any)Boolean`
+`method !=(x$1: AnyRef)Boolean`
+`method ##()Int`
+`method +(other: String)String`
+`method ->[B](y: B)(properties.Property[String], B)`
+`method ==(x$1: Any)Boolean`
+`method ==(x$1: AnyRef)Boolean`
+`method apply()String`
+`method asInstanceOf[T0]=> T0`
+`method canEqual(that: Any)Boolean`
+`method clone()Object`
+`method ensuring(cond: Boolean)properties.Property[String]`
+`method ensuring(cond: Boolean, msg: => Any)properties.Property[String]`
+`method ensuring(cond: properties.Property[String] => Boolean)properties.Property[String]`
+`method ensuring(cond: properties.Property[String] => Boolean, msg: => Any)properties.Property[String]`
+`method eq(x$1: AnyRef)Boolean`
+`method equals(x$1: Any)Boolean`
+`method finalize()Unit`
+`method formatted(fmtstr: String)String`
+`method get(newGetter: String => String)properties.Property[String]`
+`method hashCode()Int`
+`method isInstanceOf[T0]=> Boolean`
+`method ne(x$1: AnyRef)Boolean`
+`method notify()Unit`
+`method notifyAll()Unit`
+`method productArity=> Int`
+`method productElement(n: Int)Any`
+`method productIterator=> Iterator[Any]`
+`method productPrefix=> String`
+`method set(newSetter: String => String)properties.Property[String]`
+`method synchronized[T0](x$1: T0)T0`
+`method toString()String`
+`method update(newValue: String)Unit`
+`method wait()Unit`
+`method wait(x$1: Long)Unit`
+`method wait(x$1: Long, x$2: Int)Unit`
+`method x=> properties.Property[String]`
+`method →[B](y: B)(properties.Property[String], B)`
+`value __leftOfArrowproperties.Property[String]`
+`value __resultOfEnsuringproperties.Property[String]`
+`value initString`
+`value selfAny`
+`variable getterString => String`
+`variable setterString => String`
+`variable valueString`
+================================================================================
+
+askTypeCompletion at properties.scala(29,67)
+================================================================================
+[response] aksTypeCompletion at (29,67)
+retrieved 50 members
+`method !=(x$1: Any)Boolean`
+`method !=(x$1: AnyRef)Boolean`
+`method ##()Int`
+`method +(other: String)String`
+`method ->[B](y: B)(properties.Property[String], B)`
+`method ==(x$1: Any)Boolean`
+`method ==(x$1: AnyRef)Boolean`
+`method apply()String`
+`method asInstanceOf[T0]=> T0`
+`method canEqual(that: Any)Boolean`
+`method clone()Object`
+`method ensuring(cond: Boolean)properties.Property[String]`
+`method ensuring(cond: Boolean, msg: => Any)properties.Property[String]`
+`method ensuring(cond: properties.Property[String] => Boolean)properties.Property[String]`
+`method ensuring(cond: properties.Property[String] => Boolean, msg: => Any)properties.Property[String]`
+`method eq(x$1: AnyRef)Boolean`
+`method equals(x$1: Any)Boolean`
+`method finalize()Unit`
+`method formatted(fmtstr: String)String`
+`method get(newGetter: String => String)properties.Property[String]`
+`method hashCode()Int`
+`method isInstanceOf[T0]=> Boolean`
+`method ne(x$1: AnyRef)Boolean`
+`method notify()Unit`
+`method notifyAll()Unit`
+`method productArity=> Int`
+`method productElement(n: Int)Any`
+`method productIterator=> Iterator[Any]`
+`method productPrefix=> String`
+`method set(newSetter: String => String)properties.Property[String]`
+`method synchronized[T0](x$1: T0)T0`
+`method toString()String`
+`method update(newValue: String)Unit`
+`method wait()Unit`
+`method wait(x$1: Long)Unit`
+`method wait(x$1: Long, x$2: Int)Unit`
+`method x=> properties.Property[String]`
+`method →[B](y: B)(properties.Property[String], B)`
+`value __leftOfArrowproperties.Property[String]`
+`value __resultOfEnsuringproperties.Property[String]`
+`value initString`
+`value selfAny`
+`variable getterString => String`
+`variable setterString => String`
+`variable valueString`
+================================================================================
+
+askTypeCompletion at properties.scala(45,10)
+================================================================================
+[response] aksTypeCompletion at (45,10)
+retrieved 38 members
+`method !=(x$1: Any)Boolean`
+`method !=(x$1: AnyRef)Boolean`
+`method ##()Int`
+`method +(other: String)String`
+`method ->[B](y: B)(properties.User, B)`
+`method ==(x$1: Any)Boolean`
+`method ==(x$1: AnyRef)Boolean`
+`method asInstanceOf[T0]=> T0`
+`method clone()Object`
+`method ensuring(cond: Boolean)properties.User`
+`method ensuring(cond: Boolean, msg: => Any)properties.User`
+`method ensuring(cond: properties.User => Boolean)properties.User`
+`method ensuring(cond: properties.User => Boolean, msg: => Any)properties.User`
+`method eq(x$1: AnyRef)Boolean`
+`method equals(x$1: Any)Boolean`
+`method finalize()Unit`
+`method formatted(fmtstr: String)String`
+`method hashCode()Int`
+`method isInstanceOf[T0]=> Boolean`
+`method ne(x$1: AnyRef)Boolean`
+`method notify()Unit`
+`method notifyAll()Unit`
+`method synchronized[T0](x$1: T0)T0`
+`method toString()String`
+`method wait()Unit`
+`method wait(x$1: Long)Unit`
+`method wait(x$1: Long, x$2: Int)Unit`
+`method x=> properties.User`
+`method →[B](y: B)(properties.User, B)`
+`value __leftOfArrowproperties.User`
+`value __resultOfEnsuringproperties.User`
+`value firstnameproperties.Property[String]`
+`value lastnameproperties.Property[String]`
+`value selfAny`
+================================================================================
+
+askType at properties.scala(18,28)
+================================================================================
+[response] askTypeAt at (18,28)
+def update(newValue: T): Unit = Property.this.value_=(Property.this.setter.apply(newValue))
+================================================================================
+
+askType at properties.scala(21,31)
+================================================================================
+[response] askTypeAt at (21,31)
+def get(newGetter: T => T): properties.Property[T] = {
+ Property.this.getter_=(newGetter);
+ this
+}
+================================================================================
diff --git a/test/disabled/properties/Runner.scala b/test/disabled/properties/Runner.scala
new file mode 100644
index 0000000..1ef3cf9
--- /dev/null
+++ b/test/disabled/properties/Runner.scala
@@ -0,0 +1,3 @@
+import scala.tools.nsc.interactive.tests._
+
+object Test extends InteractiveTest
\ No newline at end of file
diff --git a/test/disabled/properties/src/properties.scala b/test/disabled/properties/src/properties.scala
new file mode 100644
index 0000000..35b6a92
--- /dev/null
+++ b/test/disabled/properties/src/properties.scala
@@ -0,0 +1,54 @@
+/** Illustrate the use of custom 'apply/update' methods. */
+object properties {
+
+ /** A mutable property whose getter and setter may be customized. */
+ case class Property[T](init: T) {
+ private var value: T = init
+
+ /** The getter function, defaults to identity. */
+ private var setter: T => T = identity[T]
+
+ /** The setter function, defaults to identity. */
+ private var getter: T => T = identity[T]
+
+ /** Retrive the value held in this property. */
+ def apply(): T = getter(value)
+
+ /** Update the value held in this property, through the setter. */
+ def update(newValue: T) /*?*/ = value = setter(newValue)
+
+ /** Change the getter. */
+ def get(newGetter: T => T) /*?*/ = { getter = newGetter; this }
+
+ /** Change the setter */
+ def set(newSetter: T => T) = { setter = newSetter; this }
+ }
+
+ class User {
+ // Create a property with custom getter and setter
+ val firstname = Property("")./*!*/get { v => v.toUpperCase() }./*!*/set { v => "Mr. " + v }
+ val lastname = Property("<noname>")
+
+ /** Scala provides syntactic sugar for calling 'apply'. Simply
+ * adding a list of arguments between parenthesis (in this case,
+ * an empty list) is translated to a call to 'apply' with those
+ * arguments.
+ */
+ override def toString() = firstname() + " " + lastname()
+ }
+
+ def main(args: Array[String]) {
+ val user1 = new User
+
+ // Syntactic sugar for 'update': an assignment is translated to a
+ // call to method 'update'
+ user1./*!*/firstname() = "Robert"
+
+ val user2 = new User
+ user2.firstname() = "bob"
+ user2.lastname() = "KUZ"
+
+ println("user1: " + user1)
+ println("user2: " + user2)
+ }
+}
\ No newline at end of file
diff --git a/test/files/run/lisp.check b/test/disabled/run/lisp.check
similarity index 100%
rename from test/files/run/lisp.check
rename to test/disabled/run/lisp.check
diff --git a/test/files/run/lisp.scala b/test/disabled/run/lisp.scala
similarity index 100%
rename from test/files/run/lisp.scala
rename to test/disabled/run/lisp.scala
diff --git a/test/files/run/syncchannel.check b/test/disabled/run/syncchannel.check
similarity index 100%
copy from test/files/run/syncchannel.check
copy to test/disabled/run/syncchannel.check
diff --git a/test/files/run/syncchannel.scala b/test/disabled/run/syncchannel.scala
similarity index 100%
rename from test/files/run/syncchannel.scala
rename to test/disabled/run/syncchannel.scala
diff --git a/test/files/run/t2886.scala b/test/disabled/run/t2886.scala
similarity index 100%
copy from test/files/run/t2886.scala
copy to test/disabled/run/t2886.scala
diff --git a/test/disabled/run/bug4279.scala b/test/disabled/run/t4279.scala
similarity index 100%
rename from test/disabled/run/bug4279.scala
rename to test/disabled/run/t4279.scala
diff --git a/test/disabled/run/t4532.check b/test/disabled/run/t4532.check
new file mode 100644
index 0000000..47a9809
--- /dev/null
+++ b/test/disabled/run/t4532.check
@@ -0,0 +1,15 @@
+Type in expressions to have them evaluated.
+Type :help for more information.
+
+scala>
+
+scala> object Bippy { class Dingus ; object Bop }
+defined module Bippy
+
+scala> :javap Bippy.Dingus
+Compiled from "<console>"public class Bippy$Dingus extends java.lang.Object implements scala.ScalaObject{ public Bippy$Dingus();}
+scala> :javap Bippy.Bop
+Compiled from "<console>"public final class Bippy$Bop$ extends java.lang.Object implements scala.ScalaObject{ public static final Bippy$Bop$ MODULE$; public static {}; public Bippy$Bop$();}
+scala>
+
+scala>
diff --git a/test/disabled/run/t4532.scala b/test/disabled/run/t4532.scala
new file mode 100644
index 0000000..0dabd2d
--- /dev/null
+++ b/test/disabled/run/t4532.scala
@@ -0,0 +1,34 @@
+import scala.tools.partest.ReplTest
+import scala.tools.util.Javap
+
+object Test extends ReplTest {
+
+ // ugh, windows
+ def expectedOutput =
+"""Type in expressions to have them evaluated.
+Type :help for more information.
+
+scala>
+
+scala> object Bippy { class Dingus ; object Bop }
+defined module Bippy
+
+scala> :javap Bippy.Dingus
+Compiled from "<console>"public class Bippy$Dingus extends java.lang.Object implements scala.ScalaObject{ public Bippy$Dingus();}
+scala> :javap Bippy.Bop
+Compiled from "<console>"public final class Bippy$Bop$ extends java.lang.Object implements scala.ScalaObject{ public static final Bippy$Bop$ MODULE$; public static {}; public Bippy$Bop$();}
+scala>
+
+scala>
+"""
+
+ override def eval() =
+ if (Javap.isAvailable()) super.eval()
+ else expectedOutput.lines
+
+ def code = """
+ |object Bippy { class Dingus ; object Bop }
+ |:javap Bippy.Dingus
+ |:javap Bippy.Bop
+ """.stripMargin
+}
diff --git a/test/disabled/run/t6026.check b/test/disabled/run/t6026.check
new file mode 100644
index 0000000..779bb3a
--- /dev/null
+++ b/test/disabled/run/t6026.check
@@ -0,0 +1,9 @@
+Type in expressions to have them evaluated.
+Type :help for more information.
+
+scala> class Foo
+defined class Foo
+
+scala> :javap Foo
+Compiled from "<console>"public class Foo extends java.lang.Object{ public Foo();}
+scala>
diff --git a/test/disabled/run/t6026.scala b/test/disabled/run/t6026.scala
new file mode 100644
index 0000000..bee27bc
--- /dev/null
+++ b/test/disabled/run/t6026.scala
@@ -0,0 +1,9 @@
+
+import scala.tools.partest.ReplTest
+
+object Test extends ReplTest {
+ override def code =
+"""|class Foo
+ |:javap Foo
+ |""".stripMargin
+}
diff --git a/test/disabled/run/t6987.check b/test/disabled/run/t6987.check
new file mode 100644
index 0000000..86fc96c
--- /dev/null
+++ b/test/disabled/run/t6987.check
@@ -0,0 +1 @@
+got successful verbose results!
diff --git a/test/disabled/run/t6987.scala b/test/disabled/run/t6987.scala
new file mode 100644
index 0000000..37e91d6
--- /dev/null
+++ b/test/disabled/run/t6987.scala
@@ -0,0 +1,43 @@
+import java.io._
+import tools.nsc.{CompileClient, CompileServer}
+import java.util.concurrent.{CountDownLatch, TimeUnit}
+
+object Test extends App {
+ val startupLatch = new CountDownLatch(1)
+ // we have to explicitly launch our server because when the client launches a server it uses
+ // the "scala" shell command meaning whatever version of scala (and whatever version of libraries)
+ // happens to be in the path gets used
+ val t = new Thread(new Runnable {
+ def run() = {
+ CompileServer.execute(() => startupLatch.countDown(), Array[String]())
+ }
+ })
+ t setDaemon true
+ t.start()
+ if (!startupLatch.await(2, TimeUnit.MINUTES))
+ sys error "Timeout waiting for server to start"
+
+ val baos = new ByteArrayOutputStream()
+ val ps = new PrintStream(baos)
+
+ val success = (scala.Console withOut ps) {
+ // shut down the server via the client using the verbose flag
+ CompileClient.process(Array("-shutdown", "-verbose"))
+ }
+
+ // now make sure we got success and a verbose result
+ val msg = baos.toString()
+
+ if (success) {
+ if (msg contains "Settings after normalizing paths") {
+ println("got successful verbose results!")
+ } else {
+ println("did not get the string expected, full results were:")
+ println(msg)
+ }
+ } else {
+ println("got a failure. Full results were:")
+ println(msg)
+ }
+ scala.Console.flush
+}
diff --git a/test/disabled/scalacheck/redblack.scala b/test/disabled/scalacheck/redblack.scala
deleted file mode 100644
index 301d332..0000000
--- a/test/disabled/scalacheck/redblack.scala
+++ /dev/null
@@ -1,157 +0,0 @@
-import org.scalacheck._
-import Prop._
-import Gen._
-
-/*
-Properties of a Red & Black Tree:
-
-A node is either red or black.
-The root is black. (This rule is used in some definitions and not others. Since the
-root can always be changed from red to black but not necessarily vice-versa this
-rule has little effect on analysis.)
-All leaves are black.
-Both children of every red node are black.
-Every simple path from a given node to any of its descendant leaves contains the same number of black nodes.
-*/
-
-abstract class RedBlackTest extends Properties("RedBlack") {
- object RedBlackTest extends scala.collection.immutable.RedBlack[Int] {
- def isSmaller(x: Int, y: Int) = x < y
- }
-
- import RedBlackTest._
-
- def rootIsBlack[A](t: Tree[A]) = t.isBlack
-
- def areAllLeavesBlack[A](t: Tree[A]): Boolean = t match {
- case Empty => t.isBlack
- case ne: NonEmpty[_] => List(ne.left, ne.right) forall areAllLeavesBlack
- }
-
- def areRedNodeChildrenBlack[A](t: Tree[A]): Boolean = t match {
- case RedTree(_, _, left, right) => List(left, right) forall (t => t.isBlack && areRedNodeChildrenBlack(t))
- case BlackTree(_, _, left, right) => List(left, right) forall areRedNodeChildrenBlack
- case Empty => true
- }
-
- def blackNodesToLeaves[A](t: Tree[A]): List[Int] = t match {
- case Empty => List(1)
- case BlackTree(_, _, left, right) => List(left, right) flatMap blackNodesToLeaves map (_ + 1)
- case RedTree(_, _, left, right) => List(left, right) flatMap blackNodesToLeaves
- }
-
- def areBlackNodesToLeavesEqual[A](t: Tree[A]): Boolean = t match {
- case Empty => true
- case ne: NonEmpty[_] =>
- (
- blackNodesToLeaves(ne).removeDuplicates.size == 1
- && areBlackNodesToLeavesEqual(ne.left)
- && areBlackNodesToLeavesEqual(ne.right)
- )
- }
-
- def orderIsPreserved[A](t: Tree[A]): Boolean = t match {
- case Empty => true
- case ne: NonEmpty[_] =>
- (
- (ne.left.iterator map (_._1) forall (isSmaller(_, ne.key)))
- && (ne.right.iterator map (_._1) forall (isSmaller(ne.key, _)))
- && (List(ne.left, ne.right) forall orderIsPreserved)
- )
- }
-
- def setup(l: List[Int], invariant: Tree[Unit] => Boolean): (Boolean, Tree[Unit])
-
- def listNoRepetitions(size: Int) = for {
- s <- Gen.choose(1, size)
- l <- Gen.listOfN(size, Gen.choose(0, Int.MaxValue)) suchThat (l => l.size == l.removeDuplicates.size)
- } yield l
- def listFewRepetitions(size: Int) = for {
- s <- Gen.choose(1, size)
- l <- Gen.listOfN(s, Gen.choose(0, size * 4)) suchThat (l => l.size != l.removeDuplicates.size)
- } yield l
- def listManyRepetitions(size: Int) = for {
- s <- Gen.choose(1, size)
- l <- Gen.listOfN(s, Gen.choose(0, size)) suchThat (l => l.size != l.removeDuplicates.size)
- } yield l
- def listEvenRepetitions(size: Int) = listFewRepetitions(size) map (x =>
- scala.util.Random.shuffle(x zip x flatMap { case (a, b) => List(a, b) })
- )
-
- // Arbitrarily weighted list distribution types
- val seqType: Gen[Int => Gen[List[Int]]]
-
- def myGen(sized: Int) = for {
- size <- Gen.choose(0, sized)
- seq <- seqType
- list <- seq(size)
- } yield list
-
- property("root is black") = forAll(myGen(10)) { l =>
- setup(l, rootIsBlack)._1 :| setup(l, rootIsBlack)._2.toString
- }
- property("all leaves are black") = forAll(myGen(50)) { l =>
- setup(l, areAllLeavesBlack)._1 :| setup(l, areAllLeavesBlack)._2.toString
- }
- property("children of red nodes are black") = forAll(myGen(50)) { l =>
- setup(l, areRedNodeChildrenBlack)._1 :| setup(l, areRedNodeChildrenBlack)._2.toString
- }
- property("Every path from a node to its descendant leaves contains the same number of black nodes") = forAll(myGen(50)) { l =>
- setup(l, areBlackNodesToLeavesEqual)._1 :| setup(l, areBlackNodesToLeavesEqual)._2.toString
- }
- property("Ordering of keys is preserved") = forAll(myGen(50)) { l =>
- setup(l, orderIsPreserved)._1 :| setup(l, orderIsPreserved)._2.toString
- }
-}
-
-object TestInsertion extends RedBlackTest {
- import RedBlackTest._
- override val seqType = Gen.frequency(
- (1, listNoRepetitions _),
- (1, listManyRepetitions _)
- )
-
- property("update adds elements") = forAll(myGen(50)) { l =>
- val tree = l.foldLeft(Empty: Tree[Unit])((acc, n) => acc update (n, ()))
- forAll(Gen.pick(1, l)) ( n => !(tree lookup n.head isEmpty) :| "Tree: "+tree+" N: "+n.head )
- }
-
- override def setup(l: List[Int], invariant: Tree[Unit] => Boolean) = l.foldLeft((true, Empty: Tree[Unit])) {
- case ((true, acc), n) =>
- val newRoot = acc update (n, ())
- (invariant(newRoot), newRoot)
- case (failed, _) => failed
- }
-}
-
-object TestDeletion extends RedBlackTest {
- import RedBlackTest._
- override val seqType = Gen.frequency(
- (2, listFewRepetitions _),
- (3, listManyRepetitions _),
- (1, listEvenRepetitions _)
- )
-
- property("delete removes elements") = forAll(myGen(50)) { l =>
- val tree = l.foldLeft(Empty: Tree[Unit])((acc, n) => acc update (n, ()))
- forAll(Gen.choose(1, l.size)) { numberOfElementsToRemove =>
- forAll(Gen.pick(numberOfElementsToRemove, l)) { elementsToRemove =>
- val newTree = elementsToRemove.foldLeft(tree)((acc, n) => acc delete n)
- (elementsToRemove forall (n => newTree lookup n isEmpty)) :| "Tree: "+tree+"New Tree: "+newTree+" Elements to Remove: "+elementsToRemove
- }
- }
- }
-
- override def setup(l: List[Int], invariant: Tree[Unit] => Boolean) = l.foldLeft((true, Empty: Tree[Unit])) {
- case ((true, acc), n) =>
- val newRoot = if (acc lookup n isEmpty) acc update (n, ()) else acc delete n
- (invariant(newRoot), newRoot)
- case (failed, _) => failed
- }
-}
-
-object Test extends Properties("RedBlack") {
- include(TestInsertion)
- include(TestDeletion)
-}
-
diff --git a/test/disabled/script/fact.bat b/test/disabled/script/fact.bat
old mode 100644
new mode 100755
index bee0ba2..6f02b62
--- a/test/disabled/script/fact.bat
+++ b/test/disabled/script/fact.bat
@@ -1,17 +1,17 @@
-::#!
-:: fact - A simple Scala batch file that prints out the factorial
-:: of the argument specified on the command line.
-
- at echo off
-call scala -nocompdaemon %0 %*
-goto :eof
-::!#
-
-
-val x = argv(0).toInt
-
-def fact(x: Int):Int =
- if(x==0) 1
- else x*fact(x-1)
-
-Console.println("fact(" + x + ") = " + fact(x))
+::#!
+:: fact - A simple Scala batch file that prints out the factorial
+:: of the argument specified on the command line.
+
+ at echo off
+call scala -nocompdaemon %0 %*
+goto :eof
+::!#
+
+
+val x = argv(0).toInt
+
+def fact(x: Int):Int =
+ if(x==0) 1
+ else x*fact(x-1)
+
+Console.println("fact(" + x + ") = " + fact(x))
diff --git a/test/disabled/script/fact.scala b/test/disabled/script/fact.scala
old mode 100644
new mode 100755
diff --git a/test/disabled/script/second.bat b/test/disabled/script/second.bat
old mode 100644
new mode 100755
index 0d70859..222372d
--- a/test/disabled/script/second.bat
+++ b/test/disabled/script/second.bat
@@ -1,3 +1,3 @@
- at echo off
-
-scala -nocompdaemon -e "println(\"My second argument is \" + args(1))" arg1 arg2
+ at echo off
+
+scala -nocompdaemon -e "println(\"My second argument is \" + args(1))" arg1 arg2
diff --git a/test/disabled/script/second.scala b/test/disabled/script/second.scala
old mode 100644
new mode 100755
diff --git a/test/disabled/script/t1015.bat b/test/disabled/script/t1015.bat
old mode 100644
new mode 100755
index 7475313..4eddc80
--- a/test/disabled/script/t1015.bat
+++ b/test/disabled/script/t1015.bat
@@ -1,12 +1,12 @@
-::#!
-:: t1015 - <description>.
-
- at echo off
-call scala -nocompdaemon %0 %*
-goto :eof
-::!#
-
-case class Test(one : Int, two : Int)
-object Test{
- def apply(one : Int): Test = Test(one, 2);
-}
+::#!
+:: t1015 - <description>.
+
+ at echo off
+call scala -nocompdaemon %0 %*
+goto :eof
+::!#
+
+case class Test(one : Int, two : Int)
+object Test{
+ def apply(one : Int): Test = Test(one, 2);
+}
diff --git a/test/disabled/script/t1015.scala b/test/disabled/script/t1015.scala
old mode 100644
new mode 100755
diff --git a/test/disabled/script/t1017.bat b/test/disabled/script/t1017.bat
old mode 100644
new mode 100755
index 369dbd2..0df49c6
--- a/test/disabled/script/t1017.bat
+++ b/test/disabled/script/t1017.bat
@@ -1,15 +1,15 @@
-::#!
-::# t1017 - <description>.
-
- at echo off
-call scala -nocompdaemon %0 %*
-goto :eof
-::!#
-
-def foo = {
- bar
-}
-
-var x = 1
-
-def bar = 1
+::#!
+::# t1017 - <description>.
+
+ at echo off
+call scala -nocompdaemon %0 %*
+goto :eof
+::!#
+
+def foo = {
+ bar
+}
+
+var x = 1
+
+def bar = 1
diff --git a/test/disabled/script/t1017.scala b/test/disabled/script/t1017.scala
old mode 100644
new mode 100755
diff --git a/test/files/gitignore.SAMPLE b/test/files/.gitignore
similarity index 100%
rename from test/files/gitignore.SAMPLE
rename to test/files/.gitignore
diff --git a/test/files/android/HelloAndroid.scala b/test/files/android/HelloAndroid.scala
deleted file mode 100644
index 9fd145a..0000000
--- a/test/files/android/HelloAndroid.scala
+++ /dev/null
@@ -1,16 +0,0 @@
-//package examples.hello3
-
-import android.app.Activity
-import android.os.Bundle
-import android.widget.TextView
-
-//class HelloAndroid extends Activity {
-class Test extends Activity {
- /** Called when the activity is first created. */
- override def onCreate(icicle: Bundle) {
- super.onCreate(icicle)
- val tv = new TextView(this)
- tv setText "Hello, Android (Scala)"
- setContentView(tv)
- }
-}
diff --git a/test/files/android/HelloAndroid.xml b/test/files/android/HelloAndroid.xml
deleted file mode 100644
index 41907b1..0000000
--- a/test/files/android/HelloAndroid.xml
+++ /dev/null
@@ -1,11 +0,0 @@
-<manifest xmlns:android="http://schemas.android.com/apk/res/android"
- package="">
- <application>
- <activity class="HelloAndroid" android:label="HelloAndroid">
- <intent-filter>
- <action android:value="android.intent.action.MAIN" />
- <category android:value="android.intent.category.LAUNCHER" />
- </intent-filter>
- </activity>
- </application>
-</manifest>
diff --git a/test/files/ant/README b/test/files/ant/README
new file mode 100644
index 0000000..8cd8745
--- /dev/null
+++ b/test/files/ant/README
@@ -0,0 +1,42 @@
+README
+======
+
+Test cases in directory test/files/ant/ are executed by invoking an
+Ant script whose name ends with "build.xml" (eg. "fsc001-build.xml").
+
+The Scala Ant tasks fsc/scalac/scaladoc are instantiated from various
+binaries (quick/pack/latest/installed) and are executed with different
+combinations of Ant attributes/elements:
+
+ +---------------------------+--------------------------+
+ | Attributes | Nested elements |
+------------+---------------------------+--------------------------+
+fsc001 | srcdir,classpath (1) | compilerarg |
+fsc002 | srcref,classpathref (1) | compilerarg |
+fsc003 | (2) | compilerarg,src,include |
+------------+---------------------------+--------------------------+
+scalac001 | srcdir,classpath (1) | |
+scalac002 | srcref,classpathref (1) | |
+scalac003 | (2) | src,include |
+scalac004 | deprecation,unchecked (3) | |
+------------+---------------------------+--------------------------+
+scaladoc | srcdir,classpathref | |
+------------+---------------------------+--------------------------+
+
+Other attributes:
+(1) includes,destdir
+(2) destdir,classpathref
+(3) srcdir,includes,destdir,classpath
+
+
+The above test cases can also be run from the command prompt using one of
+the following shell commands:
+
+1) For quick/pack/latest binaries (-Dbinary=quick|pack|latest)
+
+$ ant -Dbinary=quick -Dproject.dir=$HOME/workspace/scala -f scalac001-build.xml
+
+2) For installed binaries (-Dbinary=installed)
+
+$ ant -Dbinary=installed -Dinstalled.dir=/opt/scala -f scalac001-build.xml
+
diff --git a/test/files/ant/fsc-build.xml b/test/files/ant/fsc-build.xml
deleted file mode 100644
index 9323be1..0000000
--- a/test/files/ant/fsc-build.xml
+++ /dev/null
@@ -1,31 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-
-<project name="fsc" default="run" basedir=".">
-
- <import file="${basedir}/imported.xml"/>
-
-<!-- ===========================================================================
-BUILD
-============================================================================ -->
-
- <target name="build" depends="init">
- <echo level="verbose" message="build.dir=${build.dir}"/>
- <mkdir dir="${build.dir}"/>
- <fsc
- srcdir="${source.dir}"
- includes="**/${ant.project.name}*.scala"
- deprecation="yes" unchecked="yes"
- destdir="${build.dir}"
- classpathref="build.classpath"
- />
- <dirname property="log.dir" file="${build.dir}"/>
- <echo level="verbose" message="log.dir=${log.dir}"/>
- <replace
- file="${log.dir}/${ant.project.name}-ant.log"
- token="${log.dir}"
- value="[...]/files/ant"
- />
- </target>
-
-</project>
-
diff --git a/test/files/ant/fsc.check b/test/files/ant/fsc.check
deleted file mode 100644
index d6c4f90..0000000
--- a/test/files/ant/fsc.check
+++ /dev/null
@@ -1,13 +0,0 @@
-Buildfile: [...]/files/ant/fsc-build.xml
-
-quick.init:
-
-latest.init:
-
-installed.init:
-
-init:
-
-build:
- [mkdir] Created dir: [...]/files/ant/fsc-ant.obj
- [fsc] Compiling 1 source file to [...]/files/ant/fsc-ant.obj
diff --git a/test/files/ant/fsc001-build.check b/test/files/ant/fsc001-build.check
new file mode 100644
index 0000000..b5141f5
--- /dev/null
+++ b/test/files/ant/fsc001-build.check
@@ -0,0 +1,14 @@
+
+quick.init:
+
+pack.init:
+
+latest.init:
+
+installed.init:
+
+init:
+
+build:
+ [mkdir] Created dir: [...]/files/ant/fsc001-ant.obj
+ [fsc] Compiling 1 source file to [...]/files/ant/fsc001-ant.obj
diff --git a/test/files/ant/fsc001-build.xml b/test/files/ant/fsc001-build.xml
new file mode 100644
index 0000000..0130f36
--- /dev/null
+++ b/test/files/ant/fsc001-build.xml
@@ -0,0 +1,26 @@
+<?xml version="1.0" encoding="UTF-8"?>
+
+<project name="fsc001" default="run">
+
+ <import file="${basedir}/imported.xml"/>
+
+<!-- ===========================================================================
+BUILD
+============================================================================ -->
+
+ <target name="build" depends="init">
+ <echo level="verbose" message="build.dir=${build.dir}"/>
+ <mkdir dir="${build.dir}"/>
+ <pathconvert property="classpath" refid="build.classpath"/>
+ <fsc
+ srcdir="${source.dir}"
+ includes="**/${ant.project.name}*.scala"
+ destdir="${build.dir}"
+ classpath="${classpath}">
+ </fsc>
+ <echo level="verbose" message="log.file=${log.file}"/>
+ <replace file="${log.file}" token="${log.dir}" value="[...]/files/ant"/>
+ </target>
+
+</project>
+
diff --git a/test/files/ant/fsc001.scala b/test/files/ant/fsc001.scala
new file mode 100644
index 0000000..6ede598
--- /dev/null
+++ b/test/files/ant/fsc001.scala
@@ -0,0 +1,7 @@
+package test
+
+object Main {
+ def main(args: Array[String]) {
+ println(args mkString " ")
+ }
+}
diff --git a/test/files/ant/fsc002-build.check b/test/files/ant/fsc002-build.check
new file mode 100644
index 0000000..0c9c30d
--- /dev/null
+++ b/test/files/ant/fsc002-build.check
@@ -0,0 +1,14 @@
+
+quick.init:
+
+pack.init:
+
+latest.init:
+
+installed.init:
+
+init:
+
+build:
+ [mkdir] Created dir: [...]/files/ant/fsc002-ant.obj
+ [fsc] Compiling 1 source file to [...]/files/ant/fsc002-ant.obj
diff --git a/test/files/ant/fsc002-build.xml b/test/files/ant/fsc002-build.xml
new file mode 100644
index 0000000..db91070
--- /dev/null
+++ b/test/files/ant/fsc002-build.xml
@@ -0,0 +1,28 @@
+<?xml version="1.0" encoding="UTF-8"?>
+
+<project name="fsc002" default="run">
+
+ <import file="${basedir}/imported.xml"/>
+
+<!-- ===========================================================================
+BUILD
+============================================================================ -->
+
+ <target name="build" depends="init">
+ <echo level="verbose" message="build.dir=${build.dir}"/>
+ <mkdir dir="${build.dir}"/>
+ <path id="source.ref">
+ <pathelement location="${source.dir}"/>
+ </path>
+ <fsc
+ srcref="source.ref"
+ includes="**/${ant.project.name}*.scala"
+ destdir="${build.dir}"
+ classpathref="build.classpath">
+ </fsc>
+ <echo level="verbose" message="log.file=${log.file}"/>
+ <replace file="${log.file}" token="${log.dir}" value="[...]/files/ant"/>
+ </target>
+
+</project>
+
diff --git a/test/files/ant/fsc.scala b/test/files/ant/fsc002.scala
similarity index 100%
rename from test/files/ant/fsc.scala
rename to test/files/ant/fsc002.scala
diff --git a/test/files/ant/fsc003-build.check b/test/files/ant/fsc003-build.check
new file mode 100644
index 0000000..c8c9ed8
--- /dev/null
+++ b/test/files/ant/fsc003-build.check
@@ -0,0 +1,14 @@
+
+quick.init:
+
+pack.init:
+
+latest.init:
+
+installed.init:
+
+init:
+
+build:
+ [mkdir] Created dir: [...]/files/ant/fsc003-ant.obj
+ [fsc] Compiling 1 source file to [...]/files/ant/fsc003-ant.obj
diff --git a/test/files/ant/fsc003-build.xml b/test/files/ant/fsc003-build.xml
new file mode 100644
index 0000000..5f71770
--- /dev/null
+++ b/test/files/ant/fsc003-build.xml
@@ -0,0 +1,25 @@
+<?xml version="1.0" encoding="UTF-8"?>
+
+<project name="fsc003" default="run">
+
+ <import file="${basedir}/imported.xml"/>
+
+<!-- ===========================================================================
+BUILD
+============================================================================ -->
+
+ <target name="build" depends="init">
+ <echo level="verbose" message="build.dir=${build.dir}"/>
+ <mkdir dir="${build.dir}"/>
+ <fsc
+ destdir="${build.dir}"
+ classpathref="build.classpath">
+ <src path="${source.dir}"/>
+ <include name="**/${ant.project.name}*.scala"/>
+ </fsc>
+ <echo level="verbose" message="log.file=${log.file}"/>
+ <replace file="${log.file}" token="${log.dir}" value="[...]/files/ant"/>
+ </target>
+
+</project>
+
diff --git a/test/files/ant/fsc003.scala b/test/files/ant/fsc003.scala
new file mode 100644
index 0000000..6ede598
--- /dev/null
+++ b/test/files/ant/fsc003.scala
@@ -0,0 +1,7 @@
+package test
+
+object Main {
+ def main(args: Array[String]) {
+ println(args mkString " ")
+ }
+}
diff --git a/test/files/ant/imported.xml b/test/files/ant/imported.xml
index 9e2a3ff..5a4dfc3 100644
--- a/test/files/ant/imported.xml
+++ b/test/files/ant/imported.xml
@@ -1,6 +1,8 @@
<?xml version="1.0" encoding="UTF-8"?>
-<project name="imported" default="run" basedir=".">
+<project name="imported">
+
+ <!-- This file is imported by the main Ant script. -->
<!-- Prevents system classpath from being used -->
<property name="build.sysclasspath" value="ignore"/>
@@ -11,11 +13,19 @@ PROPERTIES
<property name="source.dir" value="${basedir}"/>
- <property file="./build.properties"/>
+ <property file="${basedir}/build.properties"/>
+
+ <property name="build.dir" location="${source.dir}/${ant.project.name}-ant.obj"/>
+ <property name="log.dir" location="${source.dir}"/>
+ <property name="log.file" value="${log.dir}/${ant.project.name}-build-ant.log"/>
+ <property name="project.dir" value="../../.."/>
<condition property="quick.binary">
<equals arg1="${binary}" arg2="quick"/>
</condition>
+ <condition property="pack.binary">
+ <equals arg1="${binary}" arg2="pack"/>
+ </condition>
<condition property="latest.binary">
<equals arg1="${binary}" arg2="latest"/>
</condition>
@@ -23,16 +33,15 @@ PROPERTIES
<equals arg1="${binary}" arg2="installed"/>
</condition>
- <fail message="Property 'binary' must be set to either 'quick', 'lastest' or 'installed'.">
+ <fail message="Property 'binary' must be set to either 'quick', 'pack', 'latest' or 'installed'.">
<condition><not><or>
<isset property="quick.binary"/>
+ <isset property="pack.binary"/>
<isset property="latest.binary"/>
<isset property="installed.binary"/>
</or></not></condition>
</fail>
<echo level="verbose" message="binary=${binary}"/>
-
- <property name="build.dir" value="${java.io.tmpdir}/classes-${user.name}"/>
<echo level="verbose" message="build.dir=${build.dir}"/>
<!-- ===========================================================================
@@ -40,25 +49,32 @@ INITIALISATION
============================================================================ -->
<target name="quick.init" if="quick.binary">
- <property name="quick.dir" value="../../../build/quick"/>
- <available file="${quick.dir}" property="quick.present"/>
- <fail
- message="Quick build could not be found."
- unless="quick.present"
- />
+ <property name="quick.dir" value="${project.dir}/build/quick"/>
+ <fail message="Quick build could not be found.">
+ <condition><not><available file="${quick.dir}"/></not></condition>
+ </fail>
<property name="scala.dir" value="${quick.dir}"/>
- <property name="scala-library.lib" value="${scala.dir}/lib/library/"/>
- <property name="scala-compiler.lib" value="${scala.dir}/lib/compiler/"/>
- <property name="fjbg.lib" value="${scala.dir}/lib/fjbg.jar"/>
+ <property name="scala-library.lib" value="${scala.dir}/classes/library/"/>
+ <property name="scala-compiler.lib" value="${scala.dir}/classes/compiler/"/>
+ <property name="fjbg.lib" value="${project.dir}/lib/fjbg.jar"/>
+ </target>
+
+ <target name="pack.init" if="pack.binary">
+ <property name="pack.dir" value="${project.dir}/build/pack"/>
+ <fail message="Pack build could not be found.">
+ <condition><not><available file="${pack.dir}"/></not></condition>
+ </fail>
+ <property name="scala.dir" value="${pack.dir}"/>
+ <property name="scala-library.lib" value="${scala.dir}/lib/scala-library.jar"/>
+ <property name="scala-compiler.lib" value="${scala.dir}/lib/scala-compiler.jar"/>
+ <property name="fjbg.lib" value=""/>
</target>
<target name="latest.init" if="latest.binary">
- <property name="latest.dir" value="../../../dists/latest"/>
- <available file="${latest.dir}" property="latest.present"/>
- <fail
- message="Latest build could not be found."
- unless="latest.present"
- />
+ <property name="latest.dir" value="${project.dir}/dists/latest"/>
+ <fail message="Latest build could not be found.">
+ <condition><not><available file="${latest.dir}"/></not></condition>
+ </fail>
<property name="scala.dir" value="${latest.dir}"/>
<property name="scala-library.lib" value="${scala.dir}/lib/scala-library.jar"/>
<property name="scala-compiler.lib" value="${scala.dir}/lib/scala-compiler.jar"/>
@@ -66,55 +82,52 @@ INITIALISATION
</target>
<target name="installed.init" if="installed.binary">
- <property name="installed.dir" value="/home/linuxsoft/apps/scala/share/scala"/>
- <available file="${installed.dir}" property="installed.present"/>
- <fail
- message="Installed distribution could not be found."
- unless="installed.present"
- />
+ <property name="installed.dir" value="/opt/scala"/>
+ <fail message="Installed distribution could not be found.">
+ <condition><not><available file="${installed.dir}"/></not></condition>
+ </fail>
<property name="scala.dir" value="${installed.dir}"/>
<property name="scala-library.lib" value="${scala.dir}/lib/scala-library.jar"/>
<property name="scala-compiler.lib" value="${scala.dir}/lib/scala-compiler.jar"/>
<property name="fjbg.lib" value=""/>
</target>
- <target name="init" depends="quick.init, latest.init, installed.init">
+ <target name="init" depends="quick.init, pack.init, latest.init, installed.init">
<echo level="verbose" message="scala.dir=${scala.dir}"/>
- <fail message="Scala library '${scala-library.lib}' is not available">
+
+ <path id="scala.classpath">
+ <pathelement location="${scala-library.lib}"/>
+ <pathelement location="${scala-compiler.lib}"/>
+ <pathelement location="${fjbg.lib}"/> <!-- only present for 'quick' -->
+ </path>
+
+ <fail message="Scala library '${scala-library.lib}' or '${scala-compiler.lib}' is missing/broken">
<condition><not><and>
<available classname="scala.Predef"
- classpath="${scala-library.lib}"/>
- <available classname="scala.List"
- classpath="${scala-library.lib}"/>
+ classpathref="scala.classpath"/>
+ <available classname="scala.Option"
+ classpathref="scala.classpath"/>
<available classname="scala.runtime.ObjectRef"
- classpath="${scala-library.lib}"/>
- </and></not></condition>
- </fail>
- <fail message="Scala library '${scala-compiler.lib}' is not available">
- <condition><not><and>
+ classpathref="scala.classpath"/>
<available classname="scala.tools.ant.Scalac"
- classpath="${scala-compiler.lib}"/>
+ classpathref="scala.classpath"/>
<available classname="scala.tools.nsc.Main"
- classpath="${scala-compiler.lib}"/>
+ classpathref="scala.classpath"/>
<available classname="scala.tools.util.StringOps"
- classpath="${scala-compiler.lib}"/>
+ classpathref="scala.classpath"/>
</and></not></condition>
</fail>
- <path id="scala.classpath">
- <pathelement location="${scala-library.lib}"/>
- <pathelement location="${scala-compiler.lib}"/>
- <pathelement location="${fjbg.lib}"/> <!-- only present for 'quick' -->
- </path>
- <taskdef resource="scala/tools/ant/antlib.xml">
- <classpath>
- <path refid="scala.classpath"/>
- </classpath>
- </taskdef>
+ <taskdef resource="scala/tools/ant/antlib.xml" classpathref="scala.classpath"/>
+
<path id="build.classpath">
<!--<pathelement location="${scala-actors.lib}"/>-->
<pathelement location="${scala-library.lib}"/>
<pathelement location="${build.dir}"/>
</path>
+
+ <!-- make sure the log file exists when the Ant build scripts -->
+ <!-- are run manually from the command prompt -->
+ <touch file="${log.file}"/>
</target>
<!-- ===========================================================================
diff --git a/test/files/ant/scalac-build.xml b/test/files/ant/scalac-build.xml
deleted file mode 100644
index 0276124..0000000
--- a/test/files/ant/scalac-build.xml
+++ /dev/null
@@ -1,31 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-
-<project name="scalac" default="run" basedir=".">
-
- <import file="${basedir}/imported.xml"/>
-
-<!-- ===========================================================================
-BUILD
-============================================================================ -->
-
- <target name="build" depends="init">
- <echo level="verbose" message="build.dir=${build.dir}"/>
- <mkdir dir="${build.dir}"/>
- <scalac
- srcdir="${source.dir}"
- includes="**/${ant.project.name}*.scala"
- deprecation="yes" unchecked="yes"
- destdir="${build.dir}"
- classpathref="build.classpath"
- />
- <dirname property="log.dir" file="${build.dir}"/>
- <echo level="verbose" message="log.dir=${log.dir}"/>
- <replace
- file="${log.dir}/${ant.project.name}-ant.log"
- token="${log.dir}"
- value="[...]/files/ant"
- />
- </target>
-
-</project>
-
diff --git a/test/files/ant/scalac.check b/test/files/ant/scalac.check
deleted file mode 100644
index c7bd156..0000000
--- a/test/files/ant/scalac.check
+++ /dev/null
@@ -1,13 +0,0 @@
-Buildfile: [...]/files/ant/scalac-build.xml
-
-quick.init:
-
-latest.init:
-
-installed.init:
-
-init:
-
-build:
- [mkdir] Created dir: [...]/files/ant/scalac-ant.obj
- [scalac] Compiling 1 source file to [...]/files/ant/scalac-ant.obj
diff --git a/test/files/ant/scalac001-build.check b/test/files/ant/scalac001-build.check
new file mode 100644
index 0000000..05a43ba
--- /dev/null
+++ b/test/files/ant/scalac001-build.check
@@ -0,0 +1,14 @@
+
+quick.init:
+
+pack.init:
+
+latest.init:
+
+installed.init:
+
+init:
+
+build:
+ [mkdir] Created dir: [...]/files/ant/scalac001-ant.obj
+ [scalac] Compiling 1 source file to [...]/files/ant/scalac001-ant.obj
diff --git a/test/files/ant/scalac001-build.xml b/test/files/ant/scalac001-build.xml
new file mode 100644
index 0000000..4ec7fc8
--- /dev/null
+++ b/test/files/ant/scalac001-build.xml
@@ -0,0 +1,26 @@
+<?xml version="1.0" encoding="UTF-8"?>
+
+<project name="scalac001" default="run">
+
+ <import file="${basedir}/imported.xml"/>
+
+<!-- ===========================================================================
+BUILD
+============================================================================ -->
+
+ <target name="build" depends="init">
+ <echo level="verbose" message="build.dir=${build.dir}"/>
+ <mkdir dir="${build.dir}"/>
+ <pathconvert property="classpath" refid="build.classpath"/>
+ <scalac
+ srcdir="${source.dir}"
+ includes="**/${ant.project.name}*.scala"
+ destdir="${build.dir}"
+ classpath="${classpath}"
+ />
+ <echo level="verbose" message="log.file=${log.file}"/>
+ <replace file="${log.file}" token="${log.dir}" value="[...]/files/ant"/>
+ </target>
+
+</project>
+
diff --git a/test/files/ant/scalac.scala b/test/files/ant/scalac001.scala
similarity index 100%
rename from test/files/ant/scalac.scala
rename to test/files/ant/scalac001.scala
diff --git a/test/files/ant/scalac002-build.check b/test/files/ant/scalac002-build.check
new file mode 100644
index 0000000..e7b3670
--- /dev/null
+++ b/test/files/ant/scalac002-build.check
@@ -0,0 +1,14 @@
+
+quick.init:
+
+pack.init:
+
+latest.init:
+
+installed.init:
+
+init:
+
+build:
+ [mkdir] Created dir: [...]/files/ant/scalac002-ant.obj
+ [scalac] Compiling 1 source file to [...]/files/ant/scalac002-ant.obj
diff --git a/test/files/ant/scalac002-build.xml b/test/files/ant/scalac002-build.xml
new file mode 100644
index 0000000..07628af
--- /dev/null
+++ b/test/files/ant/scalac002-build.xml
@@ -0,0 +1,28 @@
+<?xml version="1.0" encoding="UTF-8"?>
+
+<project name="scalac002" default="run">
+
+ <import file="${basedir}/imported.xml"/>
+
+<!-- ===========================================================================
+BUILD
+============================================================================ -->
+
+ <target name="build" depends="init">
+ <echo level="verbose" message="build.dir=${build.dir}"/>
+ <mkdir dir="${build.dir}"/>
+ <path id="source.ref">
+ <pathelement location="${source.dir}"/>
+ </path>
+ <scalac
+ srcref="source.ref"
+ includes="**/${ant.project.name}*.scala"
+ destdir="${build.dir}"
+ classpathref="build.classpath"
+ />
+ <echo level="verbose" message="log.file=${log.file}"/>
+ <replace file="${log.file}" token="${log.dir}" value="[...]/files/ant"/>
+ </target>
+
+</project>
+
diff --git a/test/files/ant/scalac002.scala b/test/files/ant/scalac002.scala
new file mode 100644
index 0000000..6ede598
--- /dev/null
+++ b/test/files/ant/scalac002.scala
@@ -0,0 +1,7 @@
+package test
+
+object Main {
+ def main(args: Array[String]) {
+ println(args mkString " ")
+ }
+}
diff --git a/test/files/ant/scalac003-build.check b/test/files/ant/scalac003-build.check
new file mode 100644
index 0000000..7b0d336
--- /dev/null
+++ b/test/files/ant/scalac003-build.check
@@ -0,0 +1,14 @@
+
+quick.init:
+
+pack.init:
+
+latest.init:
+
+installed.init:
+
+init:
+
+build:
+ [mkdir] Created dir: [...]/files/ant/scalac003-ant.obj
+ [scalac] Compiling 1 source file to [...]/files/ant/scalac003-ant.obj
diff --git a/test/files/ant/scalac003-build.xml b/test/files/ant/scalac003-build.xml
new file mode 100644
index 0000000..1d70aa1
--- /dev/null
+++ b/test/files/ant/scalac003-build.xml
@@ -0,0 +1,25 @@
+<?xml version="1.0" encoding="UTF-8"?>
+
+<project name="scalac003" default="run">
+
+ <import file="${basedir}/imported.xml"/>
+
+<!-- ===========================================================================
+BUILD
+============================================================================ -->
+
+ <target name="build" depends="init">
+ <echo level="verbose" message="build.dir=${build.dir}"/>
+ <mkdir dir="${build.dir}"/>
+ <scalac
+ destdir="${build.dir}"
+ classpathref="build.classpath">
+ <src path="${source.dir}"/>
+ <include name="**/${ant.project.name}*.scala"/>
+ </scalac>
+ <echo level="verbose" message="log.file=${log.file}"/>
+ <replace file="${log.file}" token="${log.dir}" value="[...]/files/ant"/>
+ </target>
+
+</project>
+
diff --git a/test/files/ant/scalac003.scala b/test/files/ant/scalac003.scala
new file mode 100644
index 0000000..6ede598
--- /dev/null
+++ b/test/files/ant/scalac003.scala
@@ -0,0 +1,7 @@
+package test
+
+object Main {
+ def main(args: Array[String]) {
+ println(args mkString " ")
+ }
+}
diff --git a/test/files/ant/scalac004-build.check b/test/files/ant/scalac004-build.check
new file mode 100644
index 0000000..ffe9e8c
--- /dev/null
+++ b/test/files/ant/scalac004-build.check
@@ -0,0 +1,24 @@
+
+quick.init:
+
+pack.init:
+
+latest.init:
+
+installed.init:
+
+init:
+
+build:
+ [mkdir] Created dir: [...]/files/ant/scalac004-ant.obj
+ [scalac] Compiling 1 source file to [...]/files/ant/scalac004-ant.obj
+ [scalac] [...]/files/ant/scalac004.scala:9: warning: method exit in object Predef is deprecated: Use sys.exit(status) instead
+ [scalac] Predef.exit(0) //deprecated in 2.9.0
+ [scalac] ^
+ [scalac] [...]/files/ant/scalac004.scala:6: warning: match is not exhaustive!
+ [scalac] missing combination Nil
+ [scalac]
+ [scalac] xs match { //(xs: @unchecked) match {
+ [scalac] ^
+ [scalac] two warnings found
+ [scalac] Compile succeeded with 2 warnings; see the compiler output for details.
diff --git a/test/files/ant/scalac004-build.xml b/test/files/ant/scalac004-build.xml
new file mode 100644
index 0000000..66c19a3
--- /dev/null
+++ b/test/files/ant/scalac004-build.xml
@@ -0,0 +1,26 @@
+<?xml version="1.0" encoding="UTF-8"?>
+
+<project name="scalac004" default="run">
+
+ <import file="${basedir}/imported.xml"/>
+
+<!-- ===========================================================================
+BUILD
+============================================================================ -->
+
+ <target name="build" depends="init">
+ <echo level="verbose" message="build.dir=${build.dir}"/>
+ <mkdir dir="${build.dir}"/>
+ <scalac
+ deprecation="yes" unchecked="yes"
+ srcdir="${source.dir}"
+ includes="**/${ant.project.name}*.scala"
+ destdir="${build.dir}"
+ classpathref="build.classpath"
+ />
+ <echo level="verbose" message="log.file=${log.file}"/>
+ <replace file="${log.file}" token="${log.dir}" value="[...]/files/ant"/>
+ </target>
+
+</project>
+
diff --git a/test/files/ant/scalac004.scala b/test/files/ant/scalac004.scala
new file mode 100644
index 0000000..66b2ba7
--- /dev/null
+++ b/test/files/ant/scalac004.scala
@@ -0,0 +1,11 @@
+package test
+
+object Main {
+ def main(args: Array[String]) {
+ val xs = List(1, 2, 3, 4)
+ xs match { //(xs: @unchecked) match {
+ case x::xs => println(x)
+ }
+ Predef.exit(0) //deprecated in 2.9.0
+ }
+}
diff --git a/test/files/ant/scaladoc-build.check b/test/files/ant/scaladoc-build.check
new file mode 100644
index 0000000..1c82456
--- /dev/null
+++ b/test/files/ant/scaladoc-build.check
@@ -0,0 +1,15 @@
+
+quick.init:
+
+pack.init:
+
+latest.init:
+
+installed.init:
+
+init:
+
+build:
+ [mkdir] Created dir: [...]/files/ant/scaladoc-ant.obj
+ [scaladoc] Documenting 1 source file to [...]/files/ant/scaladoc-ant.obj
+ [scaladoc] model contains 3 documentable templates
diff --git a/test/files/ant/scaladoc-build.xml b/test/files/ant/scaladoc-build.xml
index 839c7bd..fb4dc6f 100644
--- a/test/files/ant/scaladoc-build.xml
+++ b/test/files/ant/scaladoc-build.xml
@@ -1,6 +1,6 @@
<?xml version="1.0" encoding="UTF-8"?>
-<project name="scaladoc" default="run" basedir=".">
+<project name="scaladoc" default="run">
<import file="${basedir}/imported.xml"/>
@@ -18,13 +18,8 @@ BUILD
destdir="${build.dir}"
classpathref="build.classpath"
/>
- <dirname property="log.dir" file="${build.dir}"/>
- <echo level="verbose" message="log.dir=${log.dir}"/>
- <replace
- file="${log.dir}/${ant.project.name}-ant.log"
- token="${log.dir}"
- value="[...]/files/ant"
- />
+ <echo level="verbose" message="log.file=${log.file}"/>
+ <replace file="${log.file}" token="${log.dir}" value="[...]/files/ant"/>
</target>
</project>
diff --git a/test/files/ant/scaladoc.check b/test/files/ant/scaladoc.check
deleted file mode 100644
index a404edc..0000000
--- a/test/files/ant/scaladoc.check
+++ /dev/null
@@ -1,13 +0,0 @@
-Buildfile: [...]/files/ant/scaladoc-build.xml
-
-quick.init:
-
-latest.init:
-
-installed.init:
-
-init:
-
-build:
- [mkdir] Created dir: [...]/files/ant/scaladoc-ant.obj
- [scaladoc] Documenting 1 source file to [...]/files/ant/scaladoc-ant.obj
diff --git a/test/files/ant/scaladoc.scala b/test/files/ant/scaladoc.scala
index 47131da..6ede598 100644
--- a/test/files/ant/scaladoc.scala
+++ b/test/files/ant/scaladoc.scala
@@ -1,6 +1,7 @@
package test
object Main {
- def main(args: Array[String]): Unit =
- Console.println(args.toList)
+ def main(args: Array[String]) {
+ println(args mkString " ")
+ }
}
diff --git a/test/files/bench/equality/eq.scala b/test/files/bench/equality/eq.scala
old mode 100644
new mode 100755
index 4e57a81..8ac5b5e
--- a/test/files/bench/equality/eq.scala
+++ b/test/files/bench/equality/eq.scala
@@ -1,34 +1,34 @@
-object eq extends testing.Benchmark {
-
- def eqtest[T](creator: Int => T, n: Int): Int = {
- val elems = Array.tabulate[AnyRef](n)(i => creator(i % 2).asInstanceOf[AnyRef])
-
- var sum = 0
- var i = 0
- while (i < n) {
- var j = 0
- while (j < n) {
- if (elems(i) eq elems(j)) sum += 1
- j += 1
- }
- i += 1
- }
- sum
- }
-
- val obj1 = new Object
- val obj2 = new Object
-
- def run() {
- var sum = 0
- sum += eqtest(x => if (x == 0) obj1 else obj2, 2000)
- sum += eqtest(x => x, 1000)
- sum += eqtest(x => x.toChar, 550)
- sum += eqtest(x => x.toByte, 550)
- sum += eqtest(x => x.toLong, 550)
- sum += eqtest(x => x.toShort, 100)
- sum += eqtest(x => x.toFloat, 100)
- sum += eqtest(x => x.toDouble, 100)
- assert(sum == 2958950)
- }
-}
+object eq extends testing.Benchmark {
+
+ def eqtest[T](creator: Int => T, n: Int): Int = {
+ val elems = Array.tabulate[AnyRef](n)(i => creator(i % 2).asInstanceOf[AnyRef])
+
+ var sum = 0
+ var i = 0
+ while (i < n) {
+ var j = 0
+ while (j < n) {
+ if (elems(i) eq elems(j)) sum += 1
+ j += 1
+ }
+ i += 1
+ }
+ sum
+ }
+
+ val obj1 = new Object
+ val obj2 = new Object
+
+ def run() {
+ var sum = 0
+ sum += eqtest(x => if (x == 0) obj1 else obj2, 2000)
+ sum += eqtest(x => x, 1000)
+ sum += eqtest(x => x.toChar, 550)
+ sum += eqtest(x => x.toByte, 550)
+ sum += eqtest(x => x.toLong, 550)
+ sum += eqtest(x => x.toShort, 100)
+ sum += eqtest(x => x.toFloat, 100)
+ sum += eqtest(x => x.toDouble, 100)
+ assert(sum == 2958950)
+ }
+}
diff --git a/test/files/bench/equality/eqeq.scala b/test/files/bench/equality/eqeq.scala
old mode 100644
new mode 100755
index e1fda69..afccece
--- a/test/files/bench/equality/eqeq.scala
+++ b/test/files/bench/equality/eqeq.scala
@@ -1,46 +1,46 @@
-/** benchmark for testing equality.
- * Mix: == between non-numbers ith Object.equals as equality: 66%
- * 50% of these are tests where eq is true.
- * == between boxed integers: 17%
- * == between boxed characters: 5%
- * == between boxed bytes: 5%
- * == between boxed longs: 5%
- * == between boxed shorts: < 1%
- * == between boxed floats: < 1%
- * == between boxed doubles: < 1%
- * In all cases 50% of the tests return true.
- */
-object eqeq extends testing.Benchmark {
-
- def eqeqtest[T](creator: Int => T, n: Int): Int = {
- val elems = Array.tabulate[AnyRef](n)(i => creator(i % 2).asInstanceOf[AnyRef])
-
- var sum = 0
- var i = 0
- while (i < n) {
- var j = 0
- while (j < n) {
- if (elems(i) == elems(j)) sum += 1
- j += 1
- }
- i += 1
- }
- sum
- }
-
- val obj1 = new Object
- val obj2 = new Object
-
- def run() {
- var sum = 0
- sum += eqeqtest(x => if (x == 0) obj1 else obj2, 2000)
- sum += eqeqtest(x => x, 1000)
- sum += eqeqtest(x => x.toChar, 550)
- sum += eqeqtest(x => x.toByte, 550)
- sum += eqeqtest(x => x.toLong, 550)
- sum += eqeqtest(x => x.toShort, 100)
- sum += eqeqtest(x => x.toFloat, 100)
- sum += eqeqtest(x => x.toDouble, 100)
- assert(sum == 2968750)
- }
-}
+/** benchmark for testing equality.
+ * Mix: == between non-numbers ith Object.equals as equality: 66%
+ * 50% of these are tests where eq is true.
+ * == between boxed integers: 17%
+ * == between boxed characters: 5%
+ * == between boxed bytes: 5%
+ * == between boxed longs: 5%
+ * == between boxed shorts: < 1%
+ * == between boxed floats: < 1%
+ * == between boxed doubles: < 1%
+ * In all cases 50% of the tests return true.
+ */
+object eqeq extends testing.Benchmark {
+
+ def eqeqtest[T](creator: Int => T, n: Int): Int = {
+ val elems = Array.tabulate[AnyRef](n)(i => creator(i % 2).asInstanceOf[AnyRef])
+
+ var sum = 0
+ var i = 0
+ while (i < n) {
+ var j = 0
+ while (j < n) {
+ if (elems(i) == elems(j)) sum += 1
+ j += 1
+ }
+ i += 1
+ }
+ sum
+ }
+
+ val obj1 = new Object
+ val obj2 = new Object
+
+ def run() {
+ var sum = 0
+ sum += eqeqtest(x => if (x == 0) obj1 else obj2, 2000)
+ sum += eqeqtest(x => x, 1000)
+ sum += eqeqtest(x => x.toChar, 550)
+ sum += eqeqtest(x => x.toByte, 550)
+ sum += eqeqtest(x => x.toLong, 550)
+ sum += eqeqtest(x => x.toShort, 100)
+ sum += eqeqtest(x => x.toFloat, 100)
+ sum += eqeqtest(x => x.toDouble, 100)
+ assert(sum == 2968750)
+ }
+}
diff --git a/test/files/buildmanager/overloaded_1/A.scala b/test/files/buildmanager/overloaded_1/A.scala
deleted file mode 100644
index 33b63b8..0000000
--- a/test/files/buildmanager/overloaded_1/A.scala
+++ /dev/null
@@ -1,11 +0,0 @@
-trait As {
- trait C extends D {
- override def foo = this /// Shouldn't cause the change
- override def foo(act: List[D]) = this
- }
-
- abstract class D{
- def foo: D = this
- def foo(act: List[D]) = this
- }
-}
diff --git a/test/files/buildmanager/t2556_1/t2556_1.check b/test/files/buildmanager/t2556_1/t2556_1.check
index dc9437f..2e501c8 100644
--- a/test/files/buildmanager/t2556_1/t2556_1.check
+++ b/test/files/buildmanager/t2556_1/t2556_1.check
@@ -3,10 +3,10 @@ compiling Set(A.scala, B.scala)
Changes: Map()
builder > A.scala
compiling Set(A.scala)
-Changes: Map(class A -> List(Changed(Definition(A.x))[method x changed from (i: Int)java.lang.String to (i: java.lang.String)java.lang.String flags: <method>]))
-invalidate B.scala because inherited method changed [Changed(Definition(A.x))[method x changed from (i: Int)java.lang.String to (i: java.lang.String)java.lang.String flags: <method>]]
+Changes: Map(class A -> List(Changed(Definition(A.x))[method x changed from (i: Int)String to (i: String)String flags: <method>]))
+invalidate B.scala because inherited method changed [Changed(Definition(A.x))[method x changed from (i: Int)String to (i: String)String flags: <method>]]
compiling Set(B.scala)
-B.scala:2: error: overriding method x in class A of type (i: String)java.lang.String;
+B.scala:2: error: overriding method x in class A of type (i: String)String;
method x needs `override' modifier
def x(s: String) = s+"5"
^
diff --git a/test/files/buildmanager/t2556_2/t2556_2.check b/test/files/buildmanager/t2556_2/t2556_2.check
index a4d6724..cae4f72 100644
--- a/test/files/buildmanager/t2556_2/t2556_2.check
+++ b/test/files/buildmanager/t2556_2/t2556_2.check
@@ -3,11 +3,11 @@ compiling Set(A.scala, B.scala, C.scala)
Changes: Map()
builder > A.scala
compiling Set(A.scala)
-Changes: Map(class A -> List(Changed(Definition(A.x))[method x changed from (i: Int)java.lang.String to (i: java.lang.String)java.lang.String flags: <method>]))
-invalidate B.scala because inherited method changed [Changed(Definition(A.x))[method x changed from (i: Int)java.lang.String to (i: java.lang.String)java.lang.String flags: <method>]]
-invalidate C.scala because inherited method changed [Changed(Definition(A.x))[method x changed from (i: Int)java.lang.String to (i: java.lang.String)java.lang.String flags: <method>]]
+Changes: Map(class A -> List(Changed(Definition(A.x))[method x changed from (i: Int)String to (i: String)String flags: <method>]))
+invalidate B.scala because inherited method changed [Changed(Definition(A.x))[method x changed from (i: Int)String to (i: String)String flags: <method>]]
+invalidate C.scala because inherited method changed [Changed(Definition(A.x))[method x changed from (i: Int)String to (i: String)String flags: <method>]]
compiling Set(B.scala, C.scala)
-C.scala:2: error: overriding method x in class A of type (i: String)java.lang.String;
+C.scala:2: error: overriding method x in class A of type (i: String)String;
method x needs `override' modifier
def x(s: String) = s+"5"
^
diff --git a/test/files/buildmanager/t2556_3/t2556_3.check b/test/files/buildmanager/t2556_3/t2556_3.check
index 01dfa79..34f90f7 100644
--- a/test/files/buildmanager/t2556_3/t2556_3.check
+++ b/test/files/buildmanager/t2556_3/t2556_3.check
@@ -3,16 +3,16 @@ compiling Set(A.scala, B.scala, C.scala)
Changes: Map()
builder > A.scala
compiling Set(A.scala)
-Changes: Map(class A -> List(), class B -> List(Changed(Class(B))[List((A,java.lang.Object), (ScalaObject,ScalaObject))]))
-invalidate C.scala because parents have changed [Changed(Class(B))[List((A,java.lang.Object), (ScalaObject,ScalaObject))]]
+Changes: Map(class A -> List(), class B -> List(Changed(Class(B))[List((A,Object))]))
+invalidate C.scala because parents have changed [Changed(Class(B))[List((A,Object))]]
invalidate B.scala because it references invalid (no longer inherited) definition [ParentChanged(Class(C))]
compiling Set(B.scala, C.scala)
B.scala:3: error: type mismatch;
found : C
- required: ?{val x: ?}
+ required: ?{def x: ?}
Note that implicit conversions are not applicable because they are ambiguous:
both method any2Ensuring in object Predef of type [A](x: A)Ensuring[A]
and method any2ArrowAssoc in object Predef of type [A](x: A)ArrowAssoc[A]
- are possible conversion functions from C to ?{val x: ?}
+ are possible conversion functions from C to ?{def x: ?}
println( (new C).x )
^
diff --git a/test/files/buildmanager/t2557/t2557.check b/test/files/buildmanager/t2557/t2557.check
index f51e801..736ef36 100644
--- a/test/files/buildmanager/t2557/t2557.check
+++ b/test/files/buildmanager/t2557/t2557.check
@@ -3,8 +3,8 @@ compiling Set(A.scala, B.scala, C.scala, D.scala, E.scala, F.scala)
Changes: Map()
builder > D.scala
compiling Set(D.scala)
-Changes: Map(trait D -> List(Changed(Class(D))[List((java.lang.Object,java.lang.Object), (C,B), (B,C))]))
-invalidate E.scala because parents have changed [Changed(Class(D))[List((java.lang.Object,java.lang.Object), (C,B), (B,C))]]
-invalidate F.scala because parents have changed [Changed(Class(D))[List((java.lang.Object,java.lang.Object), (C,B), (B,C))]]
+Changes: Map(trait D -> List(Changed(Class(D))[List((Object,Object), (C,B), (B,C))]))
+invalidate E.scala because parents have changed [Changed(Class(D))[List((Object,Object), (C,B), (B,C))]]
+invalidate F.scala because parents have changed [Changed(Class(D))[List((Object,Object), (C,B), (B,C))]]
compiling Set(E.scala, F.scala)
Changes: Map(object F -> List(), trait E -> List())
diff --git a/test/files/buildmanager/t2559/D.scala b/test/files/buildmanager/t2559/D.scala
index 906b69a..62dc542 100644
--- a/test/files/buildmanager/t2559/D.scala
+++ b/test/files/buildmanager/t2559/D.scala
@@ -1,8 +1,4 @@
object D {
- def x(a: A) =
- a match {
- case _: B => ()
- case _: C => ()
- }
+ def x(a: A) = if (a.isInstanceOf[B] || a.isInstanceOf[C]) ()
}
diff --git a/test/files/buildmanager/t2559/t2559.check b/test/files/buildmanager/t2559/t2559.check
index 752278f..4d43838 100644
--- a/test/files/buildmanager/t2559/t2559.check
+++ b/test/files/buildmanager/t2559/t2559.check
@@ -6,9 +6,4 @@ compiling Set(A.scala)
Changes: Map(class B -> List(), class C -> List(), class E -> List(Changed(Class(A))[class E extends a sealed trait A]), trait A -> List())
invalidate D.scala because it references changed class [Changed(Class(A))[class E extends a sealed trait A]]
compiling Set(D.scala)
-D.scala:3: warning: match is not exhaustive!
-missing combination E
-
- a match {
- ^
Changes: Map(object D -> List())
diff --git a/test/files/buildmanager/t2562/t2562.check b/test/files/buildmanager/t2562/t2562.check
index 813d273..74575f2 100644
--- a/test/files/buildmanager/t2562/t2562.check
+++ b/test/files/buildmanager/t2562/t2562.check
@@ -3,10 +3,10 @@ compiling Set(A.scala, B.scala)
Changes: Map()
builder > A.scala
compiling Set(A.scala)
-Changes: Map(object A -> List(Changed(Definition(A.x3))[method x3 changed from ()Int to ()java.lang.String flags: <method>]))
-invalidate B.scala because it references changed definition [Changed(Definition(A.x3))[method x3 changed from ()Int to ()java.lang.String flags: <method>]]
+Changes: Map(object A -> List(Changed(Definition(A.x3))[method x3 changed from ()Int to ()String flags: <method> <triedcooking>]))
+invalidate B.scala because it references changed definition [Changed(Definition(A.x3))[method x3 changed from ()Int to ()String flags: <method> <triedcooking>]]
compiling Set(B.scala)
-Changes: Map(object B -> List(Changed(Definition(B.x2))[method x2 changed from ()Int to ()java.lang.String flags: <method>]))
-invalidate A.scala because it references changed definition [Changed(Definition(B.x2))[method x2 changed from ()Int to ()java.lang.String flags: <method>]]
+Changes: Map(object B -> List(Changed(Definition(B.x2))[method x2 changed from ()Int to ()String flags: <method> <triedcooking>]))
+invalidate A.scala because it references changed definition [Changed(Definition(B.x2))[method x2 changed from ()Int to ()String flags: <method> <triedcooking>]]
compiling Set(A.scala, B.scala)
-Changes: Map(object A -> List(Changed(Definition(A.x0))[method x0 changed from ()Int to ()java.lang.String flags: <method>], Changed(Definition(A.x1))[method x1 changed from ()Int to ()java.lang.String flags: <method>], Changed(Definition(A.x2))[method x2 changed from ()Int to ()java.lang.String flags: <method>]), object B -> List(Changed(Definition(B.x0))[method x0 changed from ()Int to ()java.lang.String flags: <method>], Changed(Definition(B.x1))[method x1 changed from ()Int to ()java [...]
+Changes: Map(object A -> List(Changed(Definition(A.x0))[method x0 changed from ()Int to ()String flags: <method>], Changed(Definition(A.x1))[method x1 changed from ()Int to ()String flags: <method> <triedcooking>], Changed(Definition(A.x2))[method x2 changed from ()Int to ()String flags: <method> <triedcooking>]), object B -> List(Changed(Definition(B.x0))[method x0 changed from ()Int to ()String flags: <method>], Changed(Definition(B.x1))[method x1 changed from ()Int to ()String flags: [...]
diff --git a/test/files/buildmanager/t2649/t2649.check b/test/files/buildmanager/t2649/t2649.check
index 390f284..d0f41f3 100644
--- a/test/files/buildmanager/t2649/t2649.check
+++ b/test/files/buildmanager/t2649/t2649.check
@@ -3,7 +3,7 @@ compiling Set(A.scala, B.scala)
Changes: Map()
builder > A.scala
compiling Set(A.scala)
-Changes: Map(object A -> List(Changed(Definition(A.x))[method x changed from (zz: Int, yy: Int)Int to (yy: Int, zz: Int)Int flags: <method>]))
-invalidate B.scala because it references changed definition [Changed(Definition(A.x))[method x changed from (zz: Int, yy: Int)Int to (yy: Int, zz: Int)Int flags: <method>]]
+Changes: Map(object A -> List(Changed(Definition(A.x))[method x changed from (zz: Int, yy: Int)Int to (yy: Int, zz: Int)Int flags: <method> <triedcooking>]))
+invalidate B.scala because it references changed definition [Changed(Definition(A.x))[method x changed from (zz: Int, yy: Int)Int to (yy: Int, zz: Int)Int flags: <method> <triedcooking>]]
compiling Set(B.scala)
Changes: Map(object B -> List())
diff --git a/test/files/buildmanager/t2650_1/t2650_1.check b/test/files/buildmanager/t2650_1/t2650_1.check
index ecddb33..2f9dd12 100644
--- a/test/files/buildmanager/t2650_1/t2650_1.check
+++ b/test/files/buildmanager/t2650_1/t2650_1.check
@@ -1,5 +1,6 @@
builder > A.scala B.scala
compiling Set(A.scala, B.scala)
+warning: there were 1 feature warning(s); re-run with -feature for details
Changes: Map()
builder > A.scala
compiling Set(A.scala)
diff --git a/test/files/buildmanager/t2650_2/t2650_2.check b/test/files/buildmanager/t2650_2/t2650_2.check
index 7ab72fb..53a0287 100644
--- a/test/files/buildmanager/t2650_2/t2650_2.check
+++ b/test/files/buildmanager/t2650_2/t2650_2.check
@@ -8,6 +8,7 @@ invalidate B.scala because inherited method changed [Changed(Definition(A.S))[ty
compiling Set(B.scala)
B.scala:3: error: type mismatch;
found : B.this.S
+ (which expands to) Long
required: Int
def y: Int = x
^
diff --git a/test/files/buildmanager/t2650_3/t2650_3.check b/test/files/buildmanager/t2650_3/t2650_3.check
index 27be2f5..5c6326d 100644
--- a/test/files/buildmanager/t2650_3/t2650_3.check
+++ b/test/files/buildmanager/t2650_3/t2650_3.check
@@ -8,6 +8,7 @@ invalidate B.scala because it references changed definition [Changed(Definition(
compiling Set(B.scala)
B.scala:2: error: type mismatch;
found : a.T
+ (which expands to) Long
required: Int
def x(a: A): Int = a.x
^
diff --git a/test/files/buildmanager/t2650_4/t2650_4.check b/test/files/buildmanager/t2650_4/t2650_4.check
index ba092d0..a4aeadd 100644
--- a/test/files/buildmanager/t2650_4/t2650_4.check
+++ b/test/files/buildmanager/t2650_4/t2650_4.check
@@ -8,6 +8,7 @@ invalidate B.scala because it references changed definition [Changed(Definition(
compiling Set(B.scala)
B.scala:2: error: type mismatch;
found : a.T2
+ (which expands to) Long
required: Int
def x(a: A): Int = a.x
^
diff --git a/test/files/buildmanager/t2651_3/t2651_3.check b/test/files/buildmanager/t2651_3/t2651_3.check
index d4bac19..2a60e3d 100644
--- a/test/files/buildmanager/t2651_3/t2651_3.check
+++ b/test/files/buildmanager/t2651_3/t2651_3.check
@@ -3,4 +3,4 @@ compiling Set(A.scala)
Changes: Map()
builder > A.scala
compiling Set(A.scala)
-Changes: Map(trait A -> List(Changed(Definition(A.x))[method x changed from ()T to ()S flags: <deferred> <method>]))
+Changes: Map(trait A -> List(Changed(Definition(A.x))[method x changed from ()T to ()S flags: <method> <deferred>]))
diff --git a/test/files/buildmanager/t2651_4/t2651_4.check b/test/files/buildmanager/t2651_4/t2651_4.check
index c4ce382..74e5d8f 100644
--- a/test/files/buildmanager/t2651_4/t2651_4.check
+++ b/test/files/buildmanager/t2651_4/t2651_4.check
@@ -3,8 +3,8 @@ compiling Set(A.scala, B.scala)
Changes: Map()
builder > A.scala
compiling Set(A.scala)
-Changes: Map(trait A -> List(Changed(Definition(A.x))[method x changed from ()T to ()T flags: <deferred> <method>], Changed(Definition(A.y))[method y changed from (a: T)Unit to (a: T)Unit flags: <deferred> <method>], Changed(Definition(A.z))[method z changed from [B <: T]()Unit to [B <: T]()Unit flags: <deferred> <method>]))
-invalidate B.scala because inherited method changed [Changed(Definition(A.x))[method x changed from ()T to ()T flags: <deferred> <method>]]
+Changes: Map(trait A -> List(Changed(Definition(A.x))[method x changed from ()T to ()T flags: <method> <deferred> <triedcooking>], Changed(Definition(A.y))[method y changed from (a: T)Unit to (a: T)Unit flags: <method> <deferred>], Changed(Definition(A.z))[method z changed from [B <: T]()Unit to [B <: T]()Unit flags: <method> <deferred>]))
+invalidate B.scala because inherited method changed [Changed(Definition(A.x))[method x changed from ()T to ()T flags: <method> <deferred> <triedcooking>]]
compiling Set(B.scala)
B.scala:2: error: type mismatch;
found : Int(3)
diff --git a/test/files/buildmanager/t2652/t2652.check b/test/files/buildmanager/t2652/t2652.check
deleted file mode 100644
index f517f9e..0000000
--- a/test/files/buildmanager/t2652/t2652.check
+++ /dev/null
@@ -1,9 +0,0 @@
-builder > A.scala B.scala
-compiling Set(A.scala, B.scala)
-Changes: Map()
-builder > A.scala
-compiling Set(A.scala)
-Changes: Map(class A -> List(Added(Definition(A.x$mBc$sp)), Added(Definition(A.x$mCc$sp)), Added(Definition(A.x$mDc$sp)), Added(Definition(A.x$mFc$sp)), Added(Definition(A.x$mIc$sp)), Added(Definition(A.x$mJc$sp)), Added(Definition(A.x$mSc$sp)), Added(Definition(A.x$mVc$sp)), Added(Definition(A.x$mZc$sp)), Changed(Definition(A.x))[method x changed from [T](t: T)T to [T](t: T)T flags: <method>]))
-invalidate B.scala because it references changed definition [Changed(Definition(A.x))[method x changed from [T](t: T)T to [T](t: T)T flags: <method>]]
-compiling Set(B.scala)
-Changes: Map(object B -> List())
diff --git a/test/files/buildmanager/t2653/t2653.check b/test/files/buildmanager/t2653/t2653.check
index 6a4fc0e..3678152 100644
--- a/test/files/buildmanager/t2653/t2653.check
+++ b/test/files/buildmanager/t2653/t2653.check
@@ -3,7 +3,7 @@ compiling Set(A.scala, B.scala)
Changes: Map()
builder > A.scala
compiling Set(A.scala)
-Changes: Map(class A -> List(Changed(Class(A))[ tparams: List((type T,type T))], Changed(Definition(A.<init>))[constructor A changed from ()A[T] to ()A[T] flags: <method>]))
+Changes: Map(class A -> List(Changed(Class(A))[ tparams: List((type T,type T))], Changed(Definition(A.<init>))[constructor A changed from ()A[T] to ()A[T] flags: <method> <triedcooking>]))
invalidate B.scala because it references changed class [Changed(Class(A))[ tparams: List((type T,type T))]]
compiling Set(B.scala)
B.scala:2: error: type mismatch;
diff --git a/test/files/buildmanager/t2655/t2655.check b/test/files/buildmanager/t2655/t2655.check
index a4a071e..41ce65a 100644
--- a/test/files/buildmanager/t2655/t2655.check
+++ b/test/files/buildmanager/t2655/t2655.check
@@ -3,11 +3,11 @@ compiling Set(A.scala, B.scala)
Changes: Map()
builder > A.scala
compiling Set(A.scala)
-Changes: Map(object A -> List(Changed(Definition(A.x))[method x changed from (i: Function0)Unit to (i: Function0)Unit flags: <method>]))
-invalidate B.scala because it references changed definition [Changed(Definition(A.x))[method x changed from (i: Function0)Unit to (i: Function0)Unit flags: <method>]]
+Changes: Map(object A -> List(Changed(Definition(A.x))[method x changed from (i: Function0)Unit to (i: Function0)Unit flags: <method> <triedcooking>]))
+invalidate B.scala because it references changed definition [Changed(Definition(A.x))[method x changed from (i: Function0)Unit to (i: Function0)Unit flags: <method> <triedcooking>]]
compiling Set(B.scala)
B.scala:2: error: type mismatch;
- found : java.lang.String("3")
+ found : String("3")
required: () => String
val x = A.x("3")
^
diff --git a/test/files/buildmanager/t2657/t2657.check b/test/files/buildmanager/t2657/t2657.check
index 9713f66..7bff078 100644
--- a/test/files/buildmanager/t2657/t2657.check
+++ b/test/files/buildmanager/t2657/t2657.check
@@ -1,10 +1,11 @@
builder > A.scala B.scala
compiling Set(A.scala, B.scala)
+warning: there were 1 feature warning(s); re-run with -feature for details
Changes: Map()
builder > A.scala
compiling Set(A.scala)
-Changes: Map(class A -> List(Changed(Definition(A.y))[method y changed from (i: Int)java.lang.String to (i: Int)java.lang.String flags: implicit <method>]))
-invalidate B.scala because inherited method changed [Changed(Definition(A.y))[method y changed from (i: Int)java.lang.String to (i: Int)java.lang.String flags: implicit <method>]]
+Changes: Map(class A -> List(Changed(Definition(A.y))[method y changed from (i: Int)String to (i: Int)String flags: <method> implicit <triedcooking>]))
+invalidate B.scala because inherited method changed [Changed(Definition(A.y))[method y changed from (i: Int)String to (i: Int)String flags: <method> implicit <triedcooking>]]
compiling Set(B.scala)
B.scala:2: error: type mismatch;
found : Int(3)
diff --git a/test/files/buildmanager/t2789/t2789.check b/test/files/buildmanager/t2789/t2789.check
index 78c5119..066561a 100644
--- a/test/files/buildmanager/t2789/t2789.check
+++ b/test/files/buildmanager/t2789/t2789.check
@@ -3,8 +3,8 @@ compiling Set(A.scala, B.scala)
Changes: Map()
builder > A.scala
compiling Set(A.scala)
-Changes: Map(class A -> List(Changed(Definition(A.e))[method e changed from ()E to ()E flags: implicit <method>]), class E -> List())
-invalidate B.scala because inherited method changed [Changed(Definition(A.e))[method e changed from ()E to ()E flags: implicit <method>]]
+Changes: Map(class A -> List(Changed(Definition(A.e))[method e changed from ()E to ()E flags: <method> implicit <triedcooking>]), class E -> List())
+invalidate B.scala because inherited method changed [Changed(Definition(A.e))[method e changed from ()E to ()E flags: <method> implicit <triedcooking>]]
compiling Set(B.scala)
B.scala:2: error: could not find implicit value for parameter y: E
val y = x(3)
diff --git a/test/files/buildmanager/t2790/t2790.check b/test/files/buildmanager/t2790/t2790.check
index 9d37cce..13d61da 100644
--- a/test/files/buildmanager/t2790/t2790.check
+++ b/test/files/buildmanager/t2790/t2790.check
@@ -3,12 +3,11 @@ compiling Set(A.scala, B.scala)
Changes: Map()
builder > A.scala
compiling Set(A.scala)
-Changes: Map(object A -> List(Added(Definition(A.x)), Changed(Definition(A.x))[value x changed from (f: java.lang.String, g: Int)Int to (f: java.lang.String, g: Int)Int <and> (f: Int, g: Int)Int flags: <method>]))
-invalidate B.scala because it references changed definition [Changed(Definition(A.x))[value x changed from (f: java.lang.String, g: Int)Int to (f: java.lang.String, g: Int)Int <and> (f: Int, g: Int)Int flags: <method>]]
+Changes: Map(object A -> List(Added(Definition(A.x)), Changed(Definition(A.x))[value x changed from (f: String, g: Int)Int to (f: String, g: Int)Int <and> (f: Int, g: Int)Int flags: <method>]))
+invalidate B.scala because it references changed definition [Changed(Definition(A.x))[value x changed from (f: String, g: Int)Int to (f: String, g: Int)Int <and> (f: Int, g: Int)Int flags: <method>]]
compiling Set(B.scala)
B.scala:2: error: type mismatch;
found : Int(5)
required: String
-Error occurred in an application involving default arguments.
val y = A.x(5)
^
diff --git a/test/files/buildmanager/t2792/t2792.check b/test/files/buildmanager/t2792/t2792.check
index 68e14c6..00a2b83 100644
--- a/test/files/buildmanager/t2792/t2792.check
+++ b/test/files/buildmanager/t2792/t2792.check
@@ -9,3 +9,6 @@ compiling Set(A2.scala)
A2.scala:2: error: stable identifier required, but A.x found.
import A.x.y
^
+A2.scala:3: error: not found: value y
+ val z = y
+ ^
diff --git a/test/files/codelib/code.jar.desired.sha1 b/test/files/codelib/code.jar.desired.sha1
new file mode 100644
index 0000000..c4cc74c
--- /dev/null
+++ b/test/files/codelib/code.jar.desired.sha1
@@ -0,0 +1 @@
+e737b123d31eede5594ceda07caafed1673ec472 *code.jar
diff --git a/test/files/continuations-neg/function2.check b/test/files/continuations-neg/function2.check
index 4833057..4b1a622 100644
--- a/test/files/continuations-neg/function2.check
+++ b/test/files/continuations-neg/function2.check
@@ -1,6 +1,6 @@
function2.scala:11: error: type mismatch;
found : () => Int
- required: () => Int @util.continuations.package.cps[Int]
+ required: () => Int @scala.util.continuations.cpsParam[Int,Int]
val g: () => Int @cps[Int] = f
^
one error found
diff --git a/test/files/continuations-neg/infer0.scala b/test/files/continuations-neg/infer0.scala
deleted file mode 100644
index 9cf69c5..0000000
--- a/test/files/continuations-neg/infer0.scala
+++ /dev/null
@@ -1,14 +0,0 @@
-// $Id$
-
-import scala.util.continuations._
-
-
-object Test {
-
- def test(x: => Int @cpsParam[String,Int]) = 7
-
- def main(args: Array[String]): Any = {
- test(8)
- }
-
-}
\ No newline at end of file
diff --git a/test/files/continuations-neg/lazy.check b/test/files/continuations-neg/lazy.check
index b8c6887..3c46054 100644
--- a/test/files/continuations-neg/lazy.check
+++ b/test/files/continuations-neg/lazy.check
@@ -1,6 +1,4 @@
-lazy.scala:5: error: type mismatch;
- found : Unit @scala.util.continuations.cpsParam[Unit,Unit]
- required: Unit
- def foo() = {
- ^
+lazy.scala:6: error: implementation restriction: cps annotations not allowed on lazy value definitions
+ lazy val x = shift((k:Unit=>Unit)=>k())
+ ^
one error found
diff --git a/test/files/continuations-neg/t1929.check b/test/files/continuations-neg/t1929.check
index f42c3a1..b04a5b9 100644
--- a/test/files/continuations-neg/t1929.check
+++ b/test/files/continuations-neg/t1929.check
@@ -1,6 +1,6 @@
t1929.scala:8: error: type mismatch;
- found : Int @scala.util.continuations.cpsParam[String,java.lang.String] @scala.util.continuations.cpsSynth
- required: Int @scala.util.continuations.cpsParam[Int,java.lang.String]
+ found : Int @scala.util.continuations.cpsParam[String,String] @scala.util.continuations.cpsSynth
+ required: Int @scala.util.continuations.cpsParam[Int,String]
reset {
^
one error found
diff --git a/test/files/continuations-neg/t3628.check b/test/files/continuations-neg/t3628.check
new file mode 100644
index 0000000..6e39055
--- /dev/null
+++ b/test/files/continuations-neg/t3628.check
@@ -0,0 +1,4 @@
+t3628.scala:4: error: not found: type Actor
+ val impl: Actor = actor {
+ ^
+one error found
diff --git a/test/pending/continuations-neg/t3628.scala b/test/files/continuations-neg/t3628.scala
similarity index 100%
rename from test/pending/continuations-neg/t3628.scala
rename to test/files/continuations-neg/t3628.scala
diff --git a/test/files/continuations-neg/t3718.check b/test/files/continuations-neg/t3718.check
new file mode 100644
index 0000000..659104c
--- /dev/null
+++ b/test/files/continuations-neg/t3718.check
@@ -0,0 +1,4 @@
+t3718.scala:2: error: cannot cps-transform malformed (possibly in shift/reset placement) expression
+ scala.util.continuations.reset((_: Any).##)
+ ^
+one error found
diff --git a/test/files/continuations-neg/t3718.scala b/test/files/continuations-neg/t3718.scala
new file mode 100644
index 0000000..a0fcb9d
--- /dev/null
+++ b/test/files/continuations-neg/t3718.scala
@@ -0,0 +1,3 @@
+object Test {
+ scala.util.continuations.reset((_: Any).##)
+}
diff --git a/test/files/continuations-neg/t5314-missing-result-type.check b/test/files/continuations-neg/t5314-missing-result-type.check
new file mode 100644
index 0000000..341e580
--- /dev/null
+++ b/test/files/continuations-neg/t5314-missing-result-type.check
@@ -0,0 +1,4 @@
+t5314-missing-result-type.scala:6: error: method bar has return statement; needs result type
+ def bar(x:Int) = return foo(x)
+ ^
+one error found
diff --git a/test/files/continuations-neg/t5314-missing-result-type.scala b/test/files/continuations-neg/t5314-missing-result-type.scala
new file mode 100644
index 0000000..d7c5043
--- /dev/null
+++ b/test/files/continuations-neg/t5314-missing-result-type.scala
@@ -0,0 +1,13 @@
+import scala.util.continuations._
+
+object Test extends App {
+ def foo(x:Int): Int @cps[Int] = x
+
+ def bar(x:Int) = return foo(x)
+
+ reset {
+ val res = bar(8)
+ println(res)
+ res
+ }
+}
diff --git a/test/files/continuations-neg/t5314-npe.check b/test/files/continuations-neg/t5314-npe.check
new file mode 100644
index 0000000..b5f024a
--- /dev/null
+++ b/test/files/continuations-neg/t5314-npe.check
@@ -0,0 +1,4 @@
+t5314-npe.scala:2: error: method bar has return statement; needs result type
+ def bar(x:Int) = { return x; x } // NPE
+ ^
+one error found
diff --git a/test/files/continuations-neg/t5314-npe.scala b/test/files/continuations-neg/t5314-npe.scala
new file mode 100644
index 0000000..2b5966e
--- /dev/null
+++ b/test/files/continuations-neg/t5314-npe.scala
@@ -0,0 +1,3 @@
+object Test extends App {
+ def bar(x:Int) = { return x; x } // NPE
+}
diff --git a/test/files/continuations-neg/t5314-return-reset.check b/test/files/continuations-neg/t5314-return-reset.check
new file mode 100644
index 0000000..4c817ae
--- /dev/null
+++ b/test/files/continuations-neg/t5314-return-reset.check
@@ -0,0 +1,4 @@
+t5314-return-reset.scala:14: error: return expression not allowed, since method calls CPS method
+ if (rnd.nextInt(100) > 50) return 5 // not allowed, since method is calling `reset`
+ ^
+one error found
diff --git a/test/files/continuations-neg/t5314-return-reset.scala b/test/files/continuations-neg/t5314-return-reset.scala
new file mode 100644
index 0000000..df9d58e
--- /dev/null
+++ b/test/files/continuations-neg/t5314-return-reset.scala
@@ -0,0 +1,21 @@
+import scala.util.continuations._
+import scala.util.Random
+
+object Test extends App {
+ val rnd = new Random
+
+ def foo(x: Int): Int @cps[Int] = shift { k => k(x) }
+
+ def bar(x: Int): Int @cps[Int] = return foo(x)
+
+ def caller(): Int = {
+ val v: Int = reset {
+ val res: Int = bar(8)
+ if (rnd.nextInt(100) > 50) return 5 // not allowed, since method is calling `reset`
+ 42
+ }
+ v
+ }
+
+ caller()
+}
diff --git a/test/files/continuations-neg/t5314-type-error.check b/test/files/continuations-neg/t5314-type-error.check
new file mode 100644
index 0000000..e66c9d8
--- /dev/null
+++ b/test/files/continuations-neg/t5314-type-error.check
@@ -0,0 +1,6 @@
+t5314-type-error.scala:7: error: type mismatch;
+ found : Int @scala.util.continuations.cpsParam[Int,Int]
+ required: Int @scala.util.continuations.cpsParam[String,String]
+ def bar(x:Int): Int @cps[String] = return foo(x)
+ ^
+one error found
diff --git a/test/files/continuations-neg/t5314-type-error.scala b/test/files/continuations-neg/t5314-type-error.scala
new file mode 100644
index 0000000..e36ce6c
--- /dev/null
+++ b/test/files/continuations-neg/t5314-type-error.scala
@@ -0,0 +1,17 @@
+import scala.util.continuations._
+
+object Test extends App {
+ def foo(x:Int): Int @cps[Int] = shift { k => k(x) }
+
+ // should be a type error
+ def bar(x:Int): Int @cps[String] = return foo(x)
+
+ def caller(): Unit = {
+ val v: String = reset {
+ val res: Int = bar(8)
+ "hello"
+ }
+ }
+
+ caller()
+}
diff --git a/test/files/continuations-neg/trycatch2.scala b/test/files/continuations-neg/trycatch2.scala
index d614191..d329a3b 100644
--- a/test/files/continuations-neg/trycatch2.scala
+++ b/test/files/continuations-neg/trycatch2.scala
@@ -12,7 +12,7 @@ object Test {
fatal[Int]
cpsIntStringInt
} catch {
- case ex =>
+ case ex: Throwable =>
cpsIntStringInt
}
@@ -20,7 +20,7 @@ object Test {
fatal[Int]
cpsIntStringInt
} catch {
- case ex =>
+ case ex: Throwable =>
cpsIntStringInt
}
@@ -30,4 +30,4 @@ object Test {
println(reset { foo2; "3" })
}
-}
\ No newline at end of file
+}
diff --git a/test/files/continuations-run/basics.check b/test/files/continuations-run/basics.check
old mode 100644
new mode 100755
diff --git a/test/files/continuations-run/basics.scala b/test/files/continuations-run/basics.scala
old mode 100644
new mode 100755
diff --git a/test/files/continuations-run/match2.scala b/test/files/continuations-run/match2.scala
index 8b0fb94..5092ce3 100644
--- a/test/files/continuations-run/match2.scala
+++ b/test/files/continuations-run/match2.scala
@@ -18,7 +18,7 @@ object Test {
}
- def main(args: Array[String]): Any = {
+ def main(args: Array[String]): Unit = {
println(reset(test1()))
println(reset(test2()))
}
diff --git a/test/files/continuations-run/shift-pct.check b/test/files/continuations-run/shift-pct.check
new file mode 100644
index 0000000..fb190e7
--- /dev/null
+++ b/test/files/continuations-run/shift-pct.check
@@ -0,0 +1,25 @@
+d = 1, d2 = 1.0, pct = 1.000
+d = 2, d2 = 4.0, pct = 0.500
+d = 3, d2 = 9.0, pct = 0.333
+d = 4, d2 = 16.0, pct = 0.250
+d = 5, d2 = 25.0, pct = 0.200
+d = 6, d2 = 36.0, pct = 0.167
+d = 7, d2 = 49.0, pct = 0.143
+d = 8, d2 = 64.0, pct = 0.125
+d = 9, d2 = 81.0, pct = 0.111
+d = 10, d2 = 100.0, pct = 0.100
+d = 11, d2 = 121.0, pct = 0.091
+d = 12, d2 = 144.0, pct = 0.083
+d = 13, d2 = 169.0, pct = 0.077
+d = 14, d2 = 196.0, pct = 0.071
+d = 15, d2 = 225.0, pct = 0.067
+d = 16, d2 = 256.0, pct = 0.063
+d = 17, d2 = 289.0, pct = 0.059
+d = 18, d2 = 324.0, pct = 0.056
+d = 19, d2 = 361.0, pct = 0.053
+d = 20, d2 = 400.0, pct = 0.050
+d = 21, d2 = 441.0, pct = 0.048
+d = 22, d2 = 484.0, pct = 0.045
+d = 23, d2 = 529.0, pct = 0.043
+d = 24, d2 = 576.0, pct = 0.042
+d = 25, d2 = 625.0, pct = 0.040
diff --git a/test/files/continuations-run/shift-pct.scala b/test/files/continuations-run/shift-pct.scala
new file mode 100644
index 0000000..7ef9922
--- /dev/null
+++ b/test/files/continuations-run/shift-pct.scala
@@ -0,0 +1,30 @@
+import scala.util.continuations._
+
+object Test {
+ abstract class IfReturnRepro {
+ def s1: Double @cpsParam[Any, Unit]
+ def s2: Double @cpsParam[Any, Unit]
+
+ def p(i: Int): Double @cpsParam[Unit, Any] = {
+ val px = s1
+ val pct = if (px > 100) px else px / s2
+ println("pct = %.3f".format(pct))
+ pct
+ }
+ }
+
+ def main(args: Array[String]) : Unit = {
+ var d: Double = 0d
+ def d2 = d * d
+
+ val irr = new IfReturnRepro {
+ def s1 = shift(f => f(d))
+ def s2 = shift(f => f(d2))
+ }
+ 1 to 25 foreach { i =>
+ d = i
+ print("d = " + i + ", d2 = " + d2 + ", ")
+ run(irr p i)
+ }
+ }
+}
diff --git a/test/files/continuations-run/t5314-2.check b/test/files/continuations-run/t5314-2.check
new file mode 100644
index 0000000..35b3c93
--- /dev/null
+++ b/test/files/continuations-run/t5314-2.check
@@ -0,0 +1,5 @@
+8
+hi
+8
+from try
+8
diff --git a/test/files/continuations-run/t5314-2.scala b/test/files/continuations-run/t5314-2.scala
new file mode 100644
index 0000000..8a896de
--- /dev/null
+++ b/test/files/continuations-run/t5314-2.scala
@@ -0,0 +1,44 @@
+import scala.util.continuations._
+
+class ReturnRepro {
+ def s1: Int @cps[Any] = shift { k => k(5) }
+ def caller = reset { println(p(3)) }
+ def caller2 = reset { println(p2(3)) }
+ def caller3 = reset { println(p3(3)) }
+
+ def p(i: Int): Int @cps[Any] = {
+ val v= s1 + 3
+ return v
+ }
+
+ def p2(i: Int): Int @cps[Any] = {
+ val v = s1 + 3
+ if (v > 0) {
+ println("hi")
+ return v
+ } else {
+ println("hi")
+ return 8
+ }
+ }
+
+ def p3(i: Int): Int @cps[Any] = {
+ val v = s1 + 3
+ try {
+ println("from try")
+ return v
+ } catch {
+ case e: Exception =>
+ println("from catch")
+ return 7
+ }
+ }
+
+}
+
+object Test extends App {
+ val repro = new ReturnRepro
+ repro.caller
+ repro.caller2
+ repro.caller3
+}
diff --git a/test/files/continuations-run/t5314-3.check b/test/files/continuations-run/t5314-3.check
new file mode 100644
index 0000000..71489f0
--- /dev/null
+++ b/test/files/continuations-run/t5314-3.check
@@ -0,0 +1,4 @@
+enter return expr
+8
+hi
+8
diff --git a/test/files/continuations-run/t5314-3.scala b/test/files/continuations-run/t5314-3.scala
new file mode 100644
index 0000000..62c547f
--- /dev/null
+++ b/test/files/continuations-run/t5314-3.scala
@@ -0,0 +1,27 @@
+import scala.util.continuations._
+
+class ReturnRepro {
+ def s1: Int @cpsParam[Any, Unit] = shift { k => k(5) }
+ def caller = reset { println(p(3)) }
+ def caller2 = reset { println(p2(3)) }
+
+ def p(i: Int): Int @cpsParam[Unit, Any] = {
+ val v= s1 + 3
+ return { println("enter return expr"); v }
+ }
+
+ def p2(i: Int): Int @cpsParam[Unit, Any] = {
+ val v = s1 + 3
+ if (v > 0) {
+ return { println("hi"); v }
+ } else {
+ return { println("hi"); 8 }
+ }
+ }
+}
+
+object Test extends App {
+ val repro = new ReturnRepro
+ repro.caller
+ repro.caller2
+}
diff --git a/test/files/continuations-run/t5314-with-if.check b/test/files/continuations-run/t5314-with-if.check
new file mode 100644
index 0000000..7f8f011
--- /dev/null
+++ b/test/files/continuations-run/t5314-with-if.check
@@ -0,0 +1 @@
+7
diff --git a/test/files/continuations-run/t5314-with-if.scala b/test/files/continuations-run/t5314-with-if.scala
new file mode 100644
index 0000000..5840199
--- /dev/null
+++ b/test/files/continuations-run/t5314-with-if.scala
@@ -0,0 +1,17 @@
+import scala.util.continuations._
+
+object Test extends App {
+
+ def foo(x:Int): Int @cps[Int] = 7
+
+ def bar(x:Int): Int @cps[Int] = {
+ val v = foo(x)
+ if (v > 0)
+ return v
+ else
+ return 10
+ }
+
+ println(reset { bar(10) })
+
+}
diff --git a/test/files/continuations-run/t5314.check b/test/files/continuations-run/t5314.check
new file mode 100644
index 0000000..4b35d8e
--- /dev/null
+++ b/test/files/continuations-run/t5314.check
@@ -0,0 +1,8 @@
+7
+7
+7
+8
+8
+hi
+8
+8
diff --git a/test/files/continuations-run/t5314.scala b/test/files/continuations-run/t5314.scala
new file mode 100644
index 0000000..d611016
--- /dev/null
+++ b/test/files/continuations-run/t5314.scala
@@ -0,0 +1,52 @@
+import scala.util.continuations._
+
+class ReturnRepro {
+ def s1: Int @cpsParam[Any, Unit] = shift { k => k(5) }
+ def caller = reset { println(p(3)) }
+ def caller2 = reset { println(p2(3)) }
+
+ def p(i: Int): Int @cpsParam[Unit, Any] = {
+ val v= s1 + 3
+ return v
+ }
+
+ def p2(i: Int): Int @cpsParam[Unit, Any] = {
+ val v = s1 + 3
+ if (v > 0) {
+ println("hi")
+ return v
+ } else {
+ println("hi")
+ return 8
+ }
+ }
+}
+
+object Test extends App {
+ def foo(x:Int): Int @cps[Int] = shift { k => k(x) }
+
+ def bar(x:Int): Int @cps[Int] = return foo(x)
+
+ def nocps(x: Int): Int = { return x; x }
+
+ def foo2(x:Int): Int @cps[Int] = 7
+ def bar2(x:Int): Int @cps[Int] = { foo2(x); return 7 }
+ def bar3(x:Int): Int @cps[Int] = { foo2(x); if (x == 7) return 7 else return foo2(x) }
+ def bar4(x:Int): Int @cps[Int] = { foo2(x); if (x == 7) return 7 else foo2(x) }
+ def bar5(x:Int): Int @cps[Int] = { foo2(x); if (x == 7) return 7 else 8 }
+ println(reset { bar2(10) })
+ println(reset { bar3(10) })
+ println(reset { bar4(10) })
+ println(reset { bar5(10) })
+
+ /* original test case */
+ val repro = new ReturnRepro
+ repro.caller
+ repro.caller2
+
+ reset {
+ val res = bar(8)
+ println(res)
+ res
+ }
+}
diff --git a/test/files/continuations-run/t5472.check b/test/files/continuations-run/t5472.check
new file mode 100644
index 0000000..d42e80c
--- /dev/null
+++ b/test/files/continuations-run/t5472.check
@@ -0,0 +1 @@
+List(23, 23)
diff --git a/test/files/continuations-run/t5472.scala b/test/files/continuations-run/t5472.scala
new file mode 100644
index 0000000..3e3c76b
--- /dev/null
+++ b/test/files/continuations-run/t5472.scala
@@ -0,0 +1,90 @@
+import scala.annotation._
+import scala.util.continuations._
+import java.util.concurrent.atomic._
+
+object Test {
+ def main(args: Array[String]) {
+ val map = Map("foo" -> 1, "bar" -> 2)
+ reset {
+ val mapped =
+ for {
+ (location, accessors) <- new ContinuationizedParallelIterable(map)
+ } yield {
+ shiftUnit0[Int, Unit](23)
+ }
+ println(mapped.toList)
+ }
+ }
+}
+
+final class ContinuationizedParallelIterable[+A](protected val underline: Iterable[A]) {
+ def toList = underline.toList.sortBy(_.toString)
+
+ final def filter(p: A => Boolean @suspendable): ContinuationizedParallelIterable[A] @suspendable =
+ shift(
+ new AtomicInteger(1) with ((ContinuationizedParallelIterable[A] => Unit) => Unit) {
+ private val results = new AtomicReference[List[A]](Nil)
+
+ @tailrec
+ private def add(element: A) {
+ val old = results.get
+ if (!results.compareAndSet(old, element :: old)) {
+ add(element)
+ }
+ }
+
+ override final def apply(continue: ContinuationizedParallelIterable[A] => Unit) {
+ for (element <- underline) {
+ super.incrementAndGet()
+ reset {
+ val pass = p(element)
+ if (pass) {
+ add(element)
+ }
+ if (super.decrementAndGet() == 0) {
+ continue(new ContinuationizedParallelIterable(results.get))
+ }
+ }
+ }
+ if (super.decrementAndGet() == 0) {
+ continue(new ContinuationizedParallelIterable(results.get))
+ }
+ }
+ })
+
+ final def foreach[U](f: A => U @suspendable): Unit @suspendable =
+ shift(
+ new AtomicInteger(1) with ((Unit => Unit) => Unit) {
+ override final def apply(continue: Unit => Unit) {
+ for (element <- underline) {
+ super.incrementAndGet()
+ reset {
+ f(element)
+ if (super.decrementAndGet() == 0) {
+ continue()
+ }
+ }
+ }
+ if (super.decrementAndGet() == 0) {
+ continue()
+ }
+ }
+ })
+
+ final def map[B: Manifest](f: A => B @suspendable): ContinuationizedParallelIterable[B] @suspendable =
+ shift(
+ new AtomicInteger(underline.size) with ((ContinuationizedParallelIterable[B] => Unit) => Unit) {
+ override final def apply(continue: ContinuationizedParallelIterable[B] => Unit) {
+ val results = new Array[B](super.get)
+ for ((element, i) <- underline.view zipWithIndex) {
+ reset {
+ val result = f(element)
+ results(i) = result
+ if (super.decrementAndGet() == 0) {
+ continue(new ContinuationizedParallelIterable(results))
+ }
+ }
+ }
+ }
+ })
+}
diff --git a/test/files/jvm/bug680.check b/test/files/continuations-run/z1673.check
similarity index 100%
copy from test/files/jvm/bug680.check
copy to test/files/continuations-run/z1673.check
diff --git a/test/files/continuations-run/z1673.scala b/test/files/continuations-run/z1673.scala
new file mode 100644
index 0000000..716b374
--- /dev/null
+++ b/test/files/continuations-run/z1673.scala
@@ -0,0 +1,31 @@
+import scala.util.continuations._
+
+class MatchRepro {
+ def s: String @cps[Any] = shift { k => k("foo") }
+
+ def p = {
+ val k = s
+ s match { case lit0 => }
+ }
+
+ def q = {
+ val k = s
+ k match { case lit1 => }
+ }
+
+ def r = {
+ s match { case "FOO" => }
+ }
+
+ def t = {
+ val k = s
+ k match { case "FOO" => }
+ }
+}
+
+object Test {
+ def main(args: Array[String]): Unit = {
+ val m = new MatchRepro
+ ()
+ }
+}
diff --git a/test/files/detach-run/actor/Client.scala b/test/files/detach-run/actor/Client.scala
index 9451b3a..12573e2 100644
--- a/test/files/detach-run/actor/Client.scala
+++ b/test/files/detach-run/actor/Client.scala
@@ -8,9 +8,9 @@ import scala.remoting._, Debug._
object Foo {
def trace(msg: String) { info("[Foo.trace] "+msg)}
-}
+}
object Client {
- val yInstVal: Int = 10
+ val yInstVal: Int = 10
var yInstVar: Int = 99
object Bar {
def trace(msg: String) { info("[Bar.trace] "+msg) }
@@ -19,10 +19,10 @@ object Client {
init(args)
actor {
val server = select(Node(host, port), 'Server)
- val zLocVal: Int = 1000
+ val zLocVal: Int = 1000
var zLocVar: Int = 9998
- server ! detach(
- (x: Int) => {
+ server ! detach(
+ (x: Int) => {
println("yInstVal = "+yInstVal)
this.trace("yInstVar = "+yInstVar)
Bar.trace("zLocVal = "+zLocVal)
@@ -30,17 +30,17 @@ object Client {
zLocVar += 2
System.out.println("zLocVal = "+zLocVal)
Debug.info("zLocVar = "+zLocVar)
- x + yInstVal + yInstVar + zLocVal + zLocVar
- })
+ x + yInstVal + yInstVar + zLocVal + zLocVar
+ })
react {
case result: Int =>
println("result received: " + result)
- Predef.exit(0)
+ Predef.exit(0)
}
}
}
private def trace(msg: String) { info("[Client.trace] "+msg) }
-}
+}
object ClientHelper {
private var _host = "127.0.0.1"
diff --git a/test/files/detach-run/actor/Server.scala b/test/files/detach-run/actor/Server.scala
index 558b4a2..b56d22f 100644
--- a/test/files/detach-run/actor/Server.scala
+++ b/test/files/detach-run/actor/Server.scala
@@ -4,12 +4,12 @@
import scala.actors.Actor._
import scala.actors.remote.RemoteActor._
-
+
object Server extends ServerConsole {
private def computation(f: Int => Int): Int = {
//some time-consuming task
f(2)
- }
+ }
def main(args: Array[String]) {
actor {
classLoader = serverClassLoader
@@ -21,7 +21,7 @@ object Server extends ServerConsole {
val result = computation(f)
sender ! result
}
- }
- }
+ }
+ }
}
}
diff --git a/test/files/detach-run/basic/Client.scala b/test/files/detach-run/basic/Client.scala
index cb39d22..f8eddb0 100644
--- a/test/files/detach-run/basic/Client.scala
+++ b/test/files/detach-run/basic/Client.scala
@@ -7,9 +7,9 @@ import scala.remoting._, Debug._
object Foo {
def trace(s: String) { info("[Foo.trace] "+s)}
-}
-object Client {
- val yInstVal: Int = 10
+}
+object Client {
+ val yInstVal: Int = 10
var yInstVar: Int = 99
object Bar {
def trace(s: String) { info("[Bar.trace] "+s) }
@@ -17,10 +17,10 @@ object Client {
def main(args: Array[String]) {
init(args)
val server = new Channel(host, port)
- val zLocVal: Int = 1000
- var zLocVar: Int = 9998
- server ! detach(
- (x: Int) => {
+ val zLocVal: Int = 1000
+ var zLocVar: Int = 9998
+ server ! detach(
+ (x: Int) => {
println("yInstVal = "+yInstVal)
this.trace("yInstVar = "+yInstVar)
Bar.trace("zLocVal = "+zLocVal)
@@ -28,13 +28,13 @@ object Client {
zLocVar += 2
System.out.println("zLocVal = "+zLocVal)
Debug.info("zLocVar = "+zLocVar)
- x + yInstVal + yInstVar + zLocVal + zLocVar
- })
- val result = server.receiveInt
- println("result received: " + result)
+ x + yInstVal + yInstVar + zLocVal + zLocVar
+ })
+ val result = server.receiveInt
+ println("result received: " + result)
}
- private def trace(s: String) { info("[Client.trace] "+s) }
-}
+ private def trace(s: String) { info("[Client.trace] "+s) }
+}
object ClientHelper {
private var _host = "127.0.0.1"
diff --git a/test/files/detach-run/basic/Server.scala b/test/files/detach-run/basic/Server.scala
index 4fce23c..f8aa02a 100644
--- a/test/files/detach-run/basic/Server.scala
+++ b/test/files/detach-run/basic/Server.scala
@@ -1,22 +1,22 @@
/*
* @author Stephane Micheloud
- */
+ */
import scala.remoting.ServerChannel
-
+
object Server extends ServerConsole {
private def computation(f: Int => Int): Int = {
//some time-consuming task
f(2)
- }
- def main(args: Array[String]) {
+ }
+ def main(args: Array[String]) {
val server = new ServerChannel(args(0).toInt)
loop {
val client = server.accept
val f = client.receive[Int => Int]
val result = computation(f)
- client ! result
+ client ! result
}
- server.close()
+ server.close()
}
-}
+}
diff --git a/test/files/detach-run/basic/ServerConsole.scala b/test/files/detach-run/basic/ServerConsole.scala
index 859cc1b..65b81c0 100644
--- a/test/files/detach-run/basic/ServerConsole.scala
+++ b/test/files/detach-run/basic/ServerConsole.scala
@@ -2,20 +2,20 @@
* @author Stephane Micheloud
*/
-import java.io._
+import java.io._
import scala.compat.Platform.currentTime
-import scala.remoting.Debug, Debug._
+import scala.remoting.Debug, Debug._
trait ServerConsole extends Thread {
private val startTime = currentTime
-
- start()
+
+ start()
private var isTerminated = false
def terminate() { isTerminated = true }
-
+
protected def loop(block: => Unit) {
while (!isTerminated) {
try {
@@ -36,30 +36,30 @@ trait ServerConsole extends Thread {
override def run() {
val in = new BufferedReader(new InputStreamReader(System.in))
- var quit = false
- while (!quit) {
+ var quit = false
+ while (!quit) {
val args = getArgs(in)
- if (args contains "quit")
+ if (args contains "quit")
quit = true
if (args contains "cls") {
println(ERASE_SCREEN)
println(CURSOR_HOME)
- }
- if (args contains "warning")
- Debug.level = Level.WARNING
- if (args contains "info")
- Debug.level = Level.INFO
- if (args contains "silent")
- Debug.level = Level.SILENT
+ }
+ if (args contains "warning")
+ Debug.level = Level.WARNING
+ if (args contains "info")
+ Debug.level = Level.INFO
+ if (args contains "silent")
+ Debug.level = Level.SILENT
}
- terminate()
- println("Server exited ("+mkTimeString(currentTime - startTime)+")")
+ terminate()
+ println("Server exited ("+mkTimeString(currentTime - startTime)+")")
exit(0)
-
- }
-
- protected def trace(msg: String) {
- Debug.info("[ServerConsole.trace] "+msg)
+
+ }
+
+ protected def trace(msg: String) {
+ Debug.info("[ServerConsole.trace] "+msg)
}
private def getArgs(in: BufferedReader): List[String] = {
diff --git a/test/files/disabled/A.scala b/test/files/disabled/A.scala
new file mode 100644
index 0000000..c070faf
--- /dev/null
+++ b/test/files/disabled/A.scala
@@ -0,0 +1,11 @@
+trait As {
+ trait C extends D {
+ override def foo = this /// Shouldn't cause the change
+ override def foo(act: List[D]) = this
+ }
+
+ abstract class D{
+ def foo: D = this
+ def foo(act: List[D]) = this
+ }
+}
diff --git a/test/files/buildmanager/overloaded_1/overloaded_1.check b/test/files/disabled/overloaded_1.check
similarity index 100%
rename from test/files/buildmanager/overloaded_1/overloaded_1.check
rename to test/files/disabled/overloaded_1.check
diff --git a/test/files/buildmanager/overloaded_1/overloaded_1.test b/test/files/disabled/overloaded_1.test
similarity index 100%
rename from test/files/buildmanager/overloaded_1/overloaded_1.test
rename to test/files/disabled/overloaded_1.test
diff --git a/test/files/disabled/run/t4602.scala b/test/files/disabled/run/t4602.scala
new file mode 100644
index 0000000..73ba231
--- /dev/null
+++ b/test/files/disabled/run/t4602.scala
@@ -0,0 +1,57 @@
+import java.io.{File, FileOutputStream, BufferedOutputStream, FileWriter, ByteArrayOutputStream, PrintStream}
+import tools.nsc.{CompileClient, CompileServer}
+import java.util.concurrent.{CountDownLatch, TimeUnit}
+
+object Test extends App {
+ val startupLatch = new CountDownLatch(1)
+ // we have to explicitly launch our server because when the client launches a server it uses
+ // the "scala" shell command meaning whatever version of scala (and whatever version of libraries)
+ // happens to be in the path gets used
+ val t = new Thread(new Runnable {
+ def run() = {
+ CompileServer.execute(() => startupLatch.countDown(), Array[String]())
+ }
+ })
+ t setDaemon true
+ t.start()
+ if (!startupLatch.await(2, TimeUnit.MINUTES))
+ sys error "Timeout waiting for server to start"
+
+ val baos = new ByteArrayOutputStream()
+ val ps = new PrintStream(baos)
+
+ val outdir = scala.reflect.io.Directory(sys.props("partest.output"))
+
+ val dirNameAndPath = (1 to 2).toList map {number =>
+ val name = s"Hello${number}"
+ val dir = outdir / number.toString
+ (dir, name, dir / s"${name}.scala")
+ }
+
+ dirNameAndPath foreach {case (dir, name, path) =>
+ dir.createDirectory()
+ val file = path.jfile
+ val out = new FileWriter(file)
+ try
+ out.write(s"object ${name}\n")
+ finally
+ out.close
+ }
+
+ val success = (scala.Console withOut ps) {
+ dirNameAndPath foreach {case (path, name, _) =>
+ CompileClient.process(Array("-verbose", "-current-dir", path.toString, s"${name}.scala"))
+ }
+
+ CompileClient.process(Array("-shutdown"))
+ }
+
+ // now make sure we got success and the correct normalized paths
+ val msg = baos.toString()
+
+ assert(success, s"got a failure. Full results were: \n${msg}")
+ dirNameAndPath foreach {case (_, _, path) =>
+ val expected = s"Input files after normalizing paths: ${path}"
+ assert(msg contains expected, s"could not find '${expected}' in output. Full results were: \n${msg}")
+ }
+}
diff --git a/test/files/buildmanager/t4245/A.scala b/test/files/disabled/t4245/A.scala
similarity index 100%
rename from test/files/buildmanager/t4245/A.scala
rename to test/files/disabled/t4245/A.scala
diff --git a/test/files/buildmanager/t4245/t4245.check b/test/files/disabled/t4245/t4245.check
similarity index 100%
rename from test/files/buildmanager/t4245/t4245.check
rename to test/files/disabled/t4245/t4245.check
diff --git a/test/files/buildmanager/t4245/t4245.test b/test/files/disabled/t4245/t4245.test
similarity index 100%
rename from test/files/buildmanager/t4245/t4245.test
rename to test/files/disabled/t4245/t4245.test
diff --git a/test/files/disabled/t7020.check b/test/files/disabled/t7020.check
new file mode 100644
index 0000000..a869b12
--- /dev/null
+++ b/test/files/disabled/t7020.check
@@ -0,0 +1,17 @@
+t7020.scala:3: error: match may not be exhaustive.
+It would fail on the following inputs: List((x: Int forSome x not in (1, 2, 4, 5, 6, 7))), List(_, _)
+ List(5) match {
+ ^
+t7020.scala:10: error: match may not be exhaustive.
+It would fail on the following inputs: List((x: Int forSome x not in (1, 2, 4, 5, 6, 7))), List(_, _)
+ List(5) match {
+ ^
+t7020.scala:17: error: match may not be exhaustive.
+It would fail on the following inputs: List((x: Int forSome x not in (1, 2, 4, 5, 6, 7))), List(_, _)
+ List(5) match {
+ ^
+t7020.scala:24: error: match may not be exhaustive.
+It would fail on the following inputs: List((x: Int forSome x not in (1, 2, 4, 5, 6, 7))), List(_, _)
+ List(5) match {
+ ^
+four errors found
diff --git a/test/files/neg/caseinherit.flags b/test/files/disabled/t7020.flags
similarity index 100%
copy from test/files/neg/caseinherit.flags
copy to test/files/disabled/t7020.flags
diff --git a/test/files/disabled/t7020.scala b/test/files/disabled/t7020.scala
new file mode 100644
index 0000000..cc5421b
--- /dev/null
+++ b/test/files/disabled/t7020.scala
@@ -0,0 +1,30 @@
+object Test {
+ // warning was non-deterministic
+ List(5) match {
+ case 1 :: Nil | 2 :: Nil =>
+ case (x@(4 | 5 | 6)) :: Nil =>
+ case 7 :: Nil =>
+ case Nil =>
+ }
+
+ List(5) match {
+ case 1 :: Nil | 2 :: Nil =>
+ case (x@(4 | 5 | 6)) :: Nil =>
+ case 7 :: Nil =>
+ case Nil =>
+ }
+
+ List(5) match {
+ case 1 :: Nil | 2 :: Nil =>
+ case (x@(4 | 5 | 6)) :: Nil =>
+ case 7 :: Nil =>
+ case Nil =>
+ }
+
+ List(5) match {
+ case 1 :: Nil | 2 :: Nil =>
+ case (x@(4 | 5 | 6)) :: Nil =>
+ case 7 :: Nil =>
+ case Nil =>
+ }
+}
diff --git a/test/files/instrumented/InstrumentationTest.check b/test/files/instrumented/InstrumentationTest.check
new file mode 100644
index 0000000..f0f4475
--- /dev/null
+++ b/test/files/instrumented/InstrumentationTest.check
@@ -0,0 +1,8 @@
+true
+Method call statistics:
+ 1 Foo1.<init>()V
+ 1 Foo1.someMethod()I
+ 1 instrumented/Foo2.<init>()V
+ 1 instrumented/Foo2.someMethod()I
+ 1 scala/Predef$.println(Ljava/lang/Object;)V
+ 1 scala/runtime/BoxesRunTime.boxToBoolean(Z)Ljava/lang/Boolean;
diff --git a/test/files/instrumented/InstrumentationTest.scala b/test/files/instrumented/InstrumentationTest.scala
new file mode 100644
index 0000000..0e53f80
--- /dev/null
+++ b/test/files/instrumented/InstrumentationTest.scala
@@ -0,0 +1,30 @@
+import scala.tools.partest.instrumented.Instrumentation._
+
+/** We check if classes put in empty package are properly instrumented */
+class Foo1 {
+ def someMethod = 0
+}
+
+/** We check if classes put in `instrumented` package are properly instrumented */
+package instrumented {
+ class Foo2 {
+ def someMethod = 0
+ }
+}
+
+/** Tests if instrumentation itself works correctly */
+object Test {
+ def main(args: Array[String]) {
+ // force predef initialization before profiling
+ Predef
+ startProfiling()
+ val foo1 = new Foo1
+ foo1.someMethod
+ val foo2 = new instrumented.Foo2
+ foo2.someMethod
+ // should box the boolean
+ println(true)
+ stopProfiling()
+ printStatistics()
+ }
+}
diff --git a/test/files/instrumented/README b/test/files/instrumented/README
new file mode 100644
index 0000000..32d0ef2
--- /dev/null
+++ b/test/files/instrumented/README
@@ -0,0 +1,15 @@
+Tests in `instrumented` directory are executed the same way as in `run` but
+they have additional byte-code instrumentation performed for profiling. You
+should put your tests in `instrumented` directory if you are interested in
+method call counts. Examples include tests for specialization (you want to
+count boxing and unboxing method calls) or high-level tests for optimizer
+where you are interested if methods are successfuly inlined (so they should
+not be called at runtime) or closures are eliminated (so no constructors
+of closures are called).
+
+Check `scala.tools.partest.instrumented.Instrumentation` to learn how to
+use the instrumentation infrastructure.
+
+The instrumentation itself is achieved by attaching a Java agent to the forked
+VM process that injects calls to profiler. Check
+`scala.tools.partest.instrumented.Instrumentation`.
diff --git a/test/files/instrumented/inline-in-constructors.check b/test/files/instrumented/inline-in-constructors.check
new file mode 100644
index 0000000..c6c9ae4
--- /dev/null
+++ b/test/files/instrumented/inline-in-constructors.check
@@ -0,0 +1,3 @@
+Method call statistics:
+ 1 instrumented/Bar.<init>(Z)V
+ 1 instrumented/Foo.<init>(I)V
diff --git a/test/files/instrumented/inline-in-constructors.flags b/test/files/instrumented/inline-in-constructors.flags
new file mode 100644
index 0000000..c9b68d7
--- /dev/null
+++ b/test/files/instrumented/inline-in-constructors.flags
@@ -0,0 +1 @@
+-optimise
diff --git a/test/files/instrumented/inline-in-constructors/assert_1.scala b/test/files/instrumented/inline-in-constructors/assert_1.scala
new file mode 100644
index 0000000..a03757b
--- /dev/null
+++ b/test/files/instrumented/inline-in-constructors/assert_1.scala
@@ -0,0 +1,13 @@
+package instrumented
+
+object MyPredef {
+ @inline
+ final def assert(assertion: Boolean, message: => Any) {
+ if (!assertion)
+ throw new java.lang.AssertionError("assertion failed: " + message)
+ }
+}
+
+class Foo(x: Int) {
+ MyPredef.assert(x > 0, "not positive: " + x)
+}
diff --git a/test/files/instrumented/inline-in-constructors/bar_2.scala b/test/files/instrumented/inline-in-constructors/bar_2.scala
new file mode 100644
index 0000000..418dac5
--- /dev/null
+++ b/test/files/instrumented/inline-in-constructors/bar_2.scala
@@ -0,0 +1,7 @@
+package instrumented
+
+/** Class that uses assert compiled in previous compiler run so we check if
+ inlining in constructors works across different compilation runs */
+class Bar(x: Boolean) {
+ MyPredef.assert(x, "not true: " + x)
+}
diff --git a/test/files/instrumented/inline-in-constructors/test_3.scala b/test/files/instrumented/inline-in-constructors/test_3.scala
new file mode 100644
index 0000000..c4d4cc5
--- /dev/null
+++ b/test/files/instrumented/inline-in-constructors/test_3.scala
@@ -0,0 +1,15 @@
+import scala.tools.partest.instrumented.Instrumentation._
+import instrumented._
+
+object Test {
+ def main(args: Array[String]) {
+ // force predef initialization before profiling
+ Predef
+ MyPredef
+ startProfiling()
+ val a = new Foo(2)
+ val b = new Bar(true)
+ stopProfiling()
+ printStatistics()
+ }
+}
diff --git a/test/files/instrumented/t6611.check b/test/files/instrumented/t6611.check
new file mode 100644
index 0000000..5cd691e
--- /dev/null
+++ b/test/files/instrumented/t6611.check
@@ -0,0 +1 @@
+Method call statistics:
diff --git a/test/files/instrumented/t6611.scala b/test/files/instrumented/t6611.scala
new file mode 100644
index 0000000..4c52f8a
--- /dev/null
+++ b/test/files/instrumented/t6611.scala
@@ -0,0 +1,35 @@
+import scala.tools.partest.instrumented.Instrumentation._
+
+object Test {
+ def main(args: Array[String]) {
+ startProfiling()
+
+ // tests optimization in Cleanup for varargs reference arrays
+ Array("")
+
+
+ Array(true)
+ Array(true, false)
+ Array(1: Byte)
+ Array(1: Byte, 2: Byte)
+ Array(1: Short)
+ Array(1: Short, 2: Short)
+ Array(1)
+ Array(1, 2)
+ Array(1L)
+ Array(1L, 2L)
+ Array(1d)
+ Array(1d, 2d)
+ Array(1f)
+ Array(1f, 2f)
+
+ /* Not currently optimized:
+ Array[Int](1, 2) etc
+ Array(())
+ Array((), ())
+ */
+
+ stopProfiling()
+ printStatistics()
+ }
+}
diff --git a/test/files/jvm/annotations.check b/test/files/jvm/annotations.check
index e307f89..a8dc5ec 100644
--- a/test/files/jvm/annotations.check
+++ b/test/files/jvm/annotations.check
@@ -29,8 +29,20 @@ public Test4$Foo8(int)
private int Test4$Foo9.z
@test.SourceAnnotation(mails={bill.gates at bloodsuckers.com}, value=http://eppli.com)
+private int Test4$Foo9.z2
+
+ at test.SourceAnnotation(mails={bill.gates at bloodsuckers.com}, value=http://eppli.com)
+private int Test4$Foo9.z3
+
+ at test.SourceAnnotation(mails={bill.gates at bloodsuckers.com}, value=http://eppli.com)
public int Test4$Foo9.getZ()
+ at test.SourceAnnotation(mails={bill.gates at bloodsuckers.com}, value=http://eppli.com)
+public int Test4$Foo9.getZ2()
+
+ at test.SourceAnnotation(mails={bill.gates at bloodsuckers.com}, value=http://eppli.com)
+public int Test4$Foo9.getZ3()
+
@test.SourceAnnotation(mails={bill.gates at bloodsuckers.com}, value=http://apple.com)
public int Test4$Foo9.x()
diff --git a/test/files/jvm/annotations.scala b/test/files/jvm/annotations.scala
index f32eb62..77a45fa 100644
--- a/test/files/jvm/annotations.scala
+++ b/test/files/jvm/annotations.scala
@@ -94,23 +94,29 @@ object Test4 {
}
class Foo8(@SourceAnnotation("constructor val") val n: Int) {}
class Foo9 {
- import scala.annotation.target._
- import scala.reflect.BeanProperty
+ import scala.annotation.meta._
+ import scala.beans.BeanProperty
@(SourceAnnotation @getter)("http://apple.com") val x = 0
@BeanProperty @(SourceAnnotation @beanSetter)("http://uppla.com") var y = 0
type myAnn = SourceAnnotation @beanGetter @field
@BeanProperty @myAnn("http://eppli.com") var z = 0
+
+ type myAnn2[T] = SourceAnnotation @beanGetter @field
+ @BeanProperty @myAnn2[String]("http://eppli.com") var z2 = 0
+
+ type myAnn3[CC[_]] = SourceAnnotation @beanGetter @field
+ @BeanProperty @myAnn3[List]("http://eppli.com") var z3 = 0
}
class Foo10(@SourceAnnotation("on param 1") val name: String)
- class Foo11(@(SourceAnnotation @scala.annotation.target.field)("on param 2") val name: String)
- class Foo12(@(SourceAnnotation @scala.annotation.target.setter)("on param 3") var name: String)
+ class Foo11(@(SourceAnnotation @scala.annotation.meta.field)("on param 2") val name: String)
+ class Foo12(@(SourceAnnotation @scala.annotation.meta.setter)("on param 3") var name: String)
def run {
import java.lang.annotation.Annotation
import java.lang.reflect.AnnotatedElement
def printSourceAnnotation(a: Annotation) {
val ann = a.asInstanceOf[SourceAnnotation]
- println("@test.SourceAnnotation(mails=" + ann.mails.deepMkString("{", ",", "}") +
+ println("@test.SourceAnnotation(mails=" + ann.mails.deep.mkString("{", ",", "}") +
", value=" + ann.value + ")")
}
def printSourceAnnotations(target: AnnotatedElement) {
@@ -157,7 +163,7 @@ object Test4 {
}
object Test5 {
- import scala.reflect.BeanProperty
+ import scala.beans.BeanProperty
import java.lang.Integer
class Count {
@@ -182,8 +188,8 @@ object Test5 {
}
object Test6 {
- import scala.reflect.BeanProperty
- import scala.reflect.BooleanBeanProperty
+ import scala.beans.BeanProperty
+ import scala.beans.BooleanBeanProperty
class C(@BeanProperty var text: String)
class D(@BooleanBeanProperty var prop: Boolean) {
@BeanProperty val m: Int = if (prop) 1 else 2
@@ -193,7 +199,9 @@ object Test6 {
val c = new C("bob")
c.setText("dylan")
println(c.getText())
- if (new D(true).isProp()) {
+ val d = new D(true)
+ d.setProp(false)
+ if (!d.isProp()) {
println(new D(false).getM())
}
}
diff --git a/test/files/jvm/backendBugUnapply.scala b/test/files/jvm/backendBugUnapply.scala
index 204072f..45ee6f7 100644
--- a/test/files/jvm/backendBugUnapply.scala
+++ b/test/files/jvm/backendBugUnapply.scala
@@ -1,14 +1,11 @@
object Test {
- import scala.xml.{Node,HasKeyValue}
+ import scala.xml.{Node,UnprefixedAttribute}
- def domatch(x:Node): Node = {
- val hasBar = new HasKeyValue("bar")
-
+ def domatch(x:Node) =
x match {
- case Node("foo", hasBar(z), _*) => z
+ case Node("foo", UnprefixedAttribute("bar", z, _), _*) => z
case _ => null
}
- }
def main(args: Array[String]): Unit = {
println(domatch(<foo bar="baz"><hi/></foo>))
diff --git a/test/files/jvm/bigints.check b/test/files/jvm/bigints.check
new file mode 100644
index 0000000..7952a04
--- /dev/null
+++ b/test/files/jvm/bigints.check
@@ -0,0 +1,9 @@
+int_add_bigint = (3,3)
+int_sub_bigint = (-1,-1)
+int_mul_bigint = (4,4)
+z <= 3 = true
+3 < z = false
+z <= 3 = true
+3 < z = false
+c > MAX_LONG = false
+c <= MAX_LONG = true
diff --git a/test/files/jvm/bigints.scala b/test/files/jvm/bigints.scala
index 7a79787..f0d05f8 100644
--- a/test/files/jvm/bigints.scala
+++ b/test/files/jvm/bigints.scala
@@ -1,41 +1,32 @@
-//############################################################################
-// BigInt, BigDecimal
-//############################################################################
-
-//############################################################################
-
-import testing.SUnit._
-
-/** Test the Scala implementation of class <code>scala.BigDecimal</code>.
+/** Test the Scala implementation of classes <code>scala.BigInt</code>
+* and <code>scala.BigDecimal</code>.
*
* @author Stephane Micheloud
*/
-object Test extends TestConsoleMain {
- def suite = new TestSuite(
- Test_BigInt,
- Test_BigDecimal
- )
+object Test {
+ def main(args: Array[String]) {
+ Test_BigInt.runTest()
+ Test_BigDecimal.runTest()
+ }
}
-object Test_BigInt extends TestCase("BigInt") with Assert {
- override def enableStackTrace = false
- override def runTest {
+object Test_BigInt {
+ def runTest() {
import BigInt._
val x: BigInt = 1
val y = x + 1
val z = 1 + y
- assertEquals("int_add_bigint", 1+y, y+1)
- assertEquals("int_sub_bigint", 1-y, -(y-1))
- assertEquals("int_mul_bigint", 2*x*y, y*x*2)
- assertTrue("z_<=_3", z <= 3)
- assertFalse("3_<_z", 3 < z)
+ println("int_add_bigint = " + (1+y, y+1))
+ println("int_sub_bigint = " + (1-y,-(y-1)))
+ println("int_mul_bigint = " + (2*x*y, y*x*2))
+ println("z <= 3 = " + (z <= 3))
+ println("3 < z = " + (3 < z))
}
}
-object Test_BigDecimal extends TestCase("BigDecimal") with Assert {
- override def enableStackTrace = false
- override def runTest {
+object Test_BigDecimal {
+ def runTest() {
import scala.BigDecimal, BigDecimal._
val xi: BigDecimal = 1
@@ -47,14 +38,14 @@ object Test_BigDecimal extends TestCase("BigDecimal") with Assert {
val x: BigDecimal = 1
val y = x + 1
val z = 1 + y
- assertTrue("z_<=_3", z <= 3)
- assertFalse("3_<_z", 3 < z)
+ println("z <= 3 = " + (z <= 3))
+ println("3 < z = " + (3 < z))
- val a: BigDecimal= Math.MAX_LONG
+ val a: BigDecimal= Long.MaxValue
val b: BigDecimal = 1
val c = a - b
- assertFalse("c_>_MAX_LONG", c > Math.MAX_LONG)
- assertTrue("c_<=_MAX_LONG", c <= Math.MAX_LONG)
+ println("c > MAX_LONG = " + (c > Long.MaxValue))
+ println("c <= MAX_LONG = " + (c <= Long.MaxValue))
}
}
diff --git a/test/files/jvm/bytecode-test-example.check b/test/files/jvm/bytecode-test-example.check
new file mode 100644
index 0000000..0cfbf08
--- /dev/null
+++ b/test/files/jvm/bytecode-test-example.check
@@ -0,0 +1 @@
+2
diff --git a/test/files/jvm/bytecode-test-example/Foo_1.scala b/test/files/jvm/bytecode-test-example/Foo_1.scala
new file mode 100644
index 0000000..4f679d1
--- /dev/null
+++ b/test/files/jvm/bytecode-test-example/Foo_1.scala
@@ -0,0 +1,9 @@
+class Foo_1 {
+ def foo(x: AnyRef): Int = {
+ val bool = x == null
+ if (x != null)
+ 1
+ else
+ 0
+ }
+}
diff --git a/test/files/jvm/bytecode-test-example/Test.scala b/test/files/jvm/bytecode-test-example/Test.scala
new file mode 100644
index 0000000..d668059
--- /dev/null
+++ b/test/files/jvm/bytecode-test-example/Test.scala
@@ -0,0 +1,32 @@
+import scala.tools.partest.BytecodeTest
+
+import scala.tools.nsc.util.JavaClassPath
+import java.io.InputStream
+import scala.tools.asm
+import asm.ClassReader
+import asm.tree.{ClassNode, InsnList}
+import scala.collection.JavaConverters._
+
+object Test extends BytecodeTest {
+ def show: Unit = {
+ val classNode = loadClassNode("Foo_1")
+ val methodNode = getMethod(classNode, "foo")
+ println(countNullChecks(methodNode.instructions))
+ }
+
+ def countNullChecks(insnList: InsnList): Int = {
+ /** Is given instruction a null check?
+ * NOTE
+ * This will detect direct null compparsion as in
+ * if (x == null) ...
+ * and not indirect as in
+ * val foo = null
+ * if (x == foo) ...
+ */
+ def isNullCheck(node: asm.tree.AbstractInsnNode): Boolean = {
+ val opcode = node.getOpcode
+ (opcode == asm.Opcodes.IFNULL) || (opcode == asm.Opcodes.IFNONNULL)
+ }
+ insnList.iterator.asScala.count(isNullCheck)
+ }
+}
diff --git a/test/files/jvm/deprecation.cmds b/test/files/jvm/deprecation.cmds
deleted file mode 100644
index 4c0f73c..0000000
--- a/test/files/jvm/deprecation.cmds
+++ /dev/null
@@ -1,3 +0,0 @@
-javac Defs.java
-scalac Test_1.scala
-javac Use_2.java
diff --git a/test/files/jvm/deprecation/Test_1.scala b/test/files/jvm/deprecation/Test_1.scala
index 5bd87b8..b68a40c 100644
--- a/test/files/jvm/deprecation/Test_1.scala
+++ b/test/files/jvm/deprecation/Test_1.scala
@@ -8,9 +8,9 @@ class Test {
val w = i.buz()
}
- @deprecated("no longer!") class Inner {
- @deprecated("uncool") def f: Int = 1
- @deprecated("this one as well!") var g = -1
+ @deprecated("no longer!", "") class Inner {
+ @deprecated("uncool", "") def f: Int = 1
+ @deprecated("this one as well!", "") var g = -1
}
}
diff --git a/test/files/jvm/duration-java.check b/test/files/jvm/duration-java.check
new file mode 100644
index 0000000..49d06fb
--- /dev/null
+++ b/test/files/jvm/duration-java.check
@@ -0,0 +1,364 @@
+ 0.0 nanoseconds => 0 days
+ 1.0 nanoseconds => 1 nanosecond
+ 7.0 nanoseconds => 7 nanoseconds
+ 10.0 nanoseconds => 10 nanoseconds
+ 12.0 nanoseconds => 12 nanoseconds
+ 24.0 nanoseconds => 24 nanoseconds
+ 30.0 nanoseconds => 30 nanoseconds
+ 49.0 nanoseconds => 49 nanoseconds
+ 60.0 nanoseconds => 60 nanoseconds
+ 70.0 nanoseconds => 70 nanoseconds
+ 84.0 nanoseconds => 84 nanoseconds
+ 100.0 nanoseconds => 100 nanoseconds
+ 120.0 nanoseconds => 120 nanoseconds
+ 144.0 nanoseconds => 144 nanoseconds
+ 168.0 nanoseconds => 168 nanoseconds
+ 210.0 nanoseconds => 210 nanoseconds
+ 240.0 nanoseconds => 240 nanoseconds
+ 288.0 nanoseconds => 288 nanoseconds
+ 300.0 nanoseconds => 300 nanoseconds
+ 360.0 nanoseconds => 360 nanoseconds
+ 420.0 nanoseconds => 420 nanoseconds
+ 576.0 nanoseconds => 576 nanoseconds
+ 600.0 nanoseconds => 600 nanoseconds
+ 700.0 nanoseconds => 700 nanoseconds
+ 720.0 nanoseconds => 720 nanoseconds
+ 900.0 nanoseconds => 900 nanoseconds
+ 1000.0 nanoseconds => 1 microsecond
+ 1200.0 nanoseconds => 1200 nanoseconds
+ 1440.0 nanoseconds => 1440 nanoseconds
+ 1800.0 nanoseconds => 1800 nanoseconds
+ 2400.0 nanoseconds => 2400 nanoseconds
+ 3000.0 nanoseconds => 3 microseconds
+ 3600.0 nanoseconds => 3600 nanoseconds
+ 6000.0 nanoseconds => 6 microseconds
+ 7000.0 nanoseconds => 7 microseconds
+ 10000.0 nanoseconds => 10 microseconds
+ 12000.0 nanoseconds => 12 microseconds
+ 24000.0 nanoseconds => 24 microseconds
+ 30000.0 nanoseconds => 30 microseconds
+ 60000.0 nanoseconds => 60 microseconds
+ 100000.0 nanoseconds => 100 microseconds
+ 1000000.0 nanoseconds => 1 millisecond
+ 7000000.0 nanoseconds => 7 milliseconds
+ 1.0E7 nanoseconds => 10 milliseconds
+ 1.2E7 nanoseconds => 12 milliseconds
+ 2.4E7 nanoseconds => 24 milliseconds
+ 3.0E7 nanoseconds => 30 milliseconds
+ 6.0E7 nanoseconds => 60 milliseconds
+ 1.0E8 nanoseconds => 100 milliseconds
+ 1.0E9 nanoseconds => 1 second
+ 1.0E12 nanoseconds => 1000 seconds
+ 0.0 microseconds => 0 days
+ 1.0 microseconds => 1 microsecond
+ 7.0 microseconds => 7 microseconds
+ 10.0 microseconds => 10 microseconds
+ 12.0 microseconds => 12 microseconds
+ 24.0 microseconds => 24 microseconds
+ 30.0 microseconds => 30 microseconds
+ 49.0 microseconds => 49 microseconds
+ 60.0 microseconds => 60 microseconds
+ 70.0 microseconds => 70 microseconds
+ 84.0 microseconds => 84 microseconds
+ 100.0 microseconds => 100 microseconds
+ 120.0 microseconds => 120 microseconds
+ 144.0 microseconds => 144 microseconds
+ 168.0 microseconds => 168 microseconds
+ 210.0 microseconds => 210 microseconds
+ 240.0 microseconds => 240 microseconds
+ 288.0 microseconds => 288 microseconds
+ 300.0 microseconds => 300 microseconds
+ 360.0 microseconds => 360 microseconds
+ 420.0 microseconds => 420 microseconds
+ 576.0 microseconds => 576 microseconds
+ 600.0 microseconds => 600 microseconds
+ 700.0 microseconds => 700 microseconds
+ 720.0 microseconds => 720 microseconds
+ 900.0 microseconds => 900 microseconds
+ 1000.0 microseconds => 1 millisecond
+ 1200.0 microseconds => 1200 microseconds
+ 1440.0 microseconds => 1440 microseconds
+ 1800.0 microseconds => 1800 microseconds
+ 2400.0 microseconds => 2400 microseconds
+ 3000.0 microseconds => 3 milliseconds
+ 3600.0 microseconds => 3600 microseconds
+ 6000.0 microseconds => 6 milliseconds
+ 7000.0 microseconds => 7 milliseconds
+ 10000.0 microseconds => 10 milliseconds
+ 12000.0 microseconds => 12 milliseconds
+ 24000.0 microseconds => 24 milliseconds
+ 30000.0 microseconds => 30 milliseconds
+ 60000.0 microseconds => 60 milliseconds
+ 100000.0 microseconds => 100 milliseconds
+ 1000000.0 microseconds => 1 second
+ 7000000.0 microseconds => 7 seconds
+ 1.0E7 microseconds => 10 seconds
+ 1.2E7 microseconds => 12 seconds
+ 2.4E7 microseconds => 24 seconds
+ 3.0E7 microseconds => 30 seconds
+ 6.0E7 microseconds => 1 minute
+ 1.0E8 microseconds => 100 seconds
+ 1.0E9 microseconds => 1000 seconds
+ 1.0E12 microseconds => 1000000 seconds
+ 0.0 milliseconds => 0 days
+ 1.0 milliseconds => 1 millisecond
+ 7.0 milliseconds => 7 milliseconds
+ 10.0 milliseconds => 10 milliseconds
+ 12.0 milliseconds => 12 milliseconds
+ 24.0 milliseconds => 24 milliseconds
+ 30.0 milliseconds => 30 milliseconds
+ 49.0 milliseconds => 49 milliseconds
+ 60.0 milliseconds => 60 milliseconds
+ 70.0 milliseconds => 70 milliseconds
+ 84.0 milliseconds => 84 milliseconds
+ 100.0 milliseconds => 100 milliseconds
+ 120.0 milliseconds => 120 milliseconds
+ 144.0 milliseconds => 144 milliseconds
+ 168.0 milliseconds => 168 milliseconds
+ 210.0 milliseconds => 210 milliseconds
+ 240.0 milliseconds => 240 milliseconds
+ 288.0 milliseconds => 288 milliseconds
+ 300.0 milliseconds => 300 milliseconds
+ 360.0 milliseconds => 360 milliseconds
+ 420.0 milliseconds => 420 milliseconds
+ 576.0 milliseconds => 576 milliseconds
+ 600.0 milliseconds => 600 milliseconds
+ 700.0 milliseconds => 700 milliseconds
+ 720.0 milliseconds => 720 milliseconds
+ 900.0 milliseconds => 900 milliseconds
+ 1000.0 milliseconds => 1 second
+ 1200.0 milliseconds => 1200 milliseconds
+ 1440.0 milliseconds => 1440 milliseconds
+ 1800.0 milliseconds => 1800 milliseconds
+ 2400.0 milliseconds => 2400 milliseconds
+ 3000.0 milliseconds => 3 seconds
+ 3600.0 milliseconds => 3600 milliseconds
+ 6000.0 milliseconds => 6 seconds
+ 7000.0 milliseconds => 7 seconds
+ 10000.0 milliseconds => 10 seconds
+ 12000.0 milliseconds => 12 seconds
+ 24000.0 milliseconds => 24 seconds
+ 30000.0 milliseconds => 30 seconds
+ 60000.0 milliseconds => 1 minute
+ 100000.0 milliseconds => 100 seconds
+ 1000000.0 milliseconds => 1000 seconds
+ 7000000.0 milliseconds => 7000 seconds
+ 1.0E7 milliseconds => 10000 seconds
+ 1.2E7 milliseconds => 200 minutes
+ 2.4E7 milliseconds => 400 minutes
+ 3.0E7 milliseconds => 500 minutes
+ 6.0E7 milliseconds => 1000 minutes
+ 1.0E8 milliseconds => 100000 seconds
+ 1.0E9 milliseconds => 1000000 seconds
+ 1.0E12 milliseconds => 1000000000 seconds
+ 0.0 seconds => 0 days
+ 1.0 seconds => 1 second
+ 7.0 seconds => 7 seconds
+ 10.0 seconds => 10 seconds
+ 12.0 seconds => 12 seconds
+ 24.0 seconds => 24 seconds
+ 30.0 seconds => 30 seconds
+ 49.0 seconds => 49 seconds
+ 60.0 seconds => 1 minute
+ 70.0 seconds => 70 seconds
+ 84.0 seconds => 84 seconds
+ 100.0 seconds => 100 seconds
+ 120.0 seconds => 2 minutes
+ 144.0 seconds => 144 seconds
+ 168.0 seconds => 168 seconds
+ 210.0 seconds => 210 seconds
+ 240.0 seconds => 4 minutes
+ 288.0 seconds => 288 seconds
+ 300.0 seconds => 5 minutes
+ 360.0 seconds => 6 minutes
+ 420.0 seconds => 7 minutes
+ 576.0 seconds => 576 seconds
+ 600.0 seconds => 10 minutes
+ 700.0 seconds => 700 seconds
+ 720.0 seconds => 12 minutes
+ 900.0 seconds => 15 minutes
+ 1000.0 seconds => 1000 seconds
+ 1200.0 seconds => 20 minutes
+ 1440.0 seconds => 24 minutes
+ 1800.0 seconds => 30 minutes
+ 2400.0 seconds => 40 minutes
+ 3000.0 seconds => 50 minutes
+ 3600.0 seconds => 1 hour
+ 6000.0 seconds => 100 minutes
+ 7000.0 seconds => 7000 seconds
+ 10000.0 seconds => 10000 seconds
+ 12000.0 seconds => 200 minutes
+ 24000.0 seconds => 400 minutes
+ 30000.0 seconds => 500 minutes
+ 60000.0 seconds => 1000 minutes
+ 100000.0 seconds => 100000 seconds
+ 1000000.0 seconds => 1000000 seconds
+ 7000000.0 seconds => 7000000 seconds
+ 1.0E7 seconds => 10000000 seconds
+ 1.2E7 seconds => 200000 minutes
+ 2.4E7 seconds => 400000 minutes
+ 3.0E7 seconds => 500000 minutes
+ 6.0E7 seconds => 1000000 minutes
+ 1.0E8 seconds => 100000000 seconds
+ 1.0E9 seconds => 1000000000 seconds
+ 1.0E12 seconds => class java.lang.IllegalArgumentException
+ 0.0 minutes => 0 days
+ 1.0 minutes => 1 minute
+ 7.0 minutes => 7 minutes
+ 10.0 minutes => 10 minutes
+ 12.0 minutes => 12 minutes
+ 24.0 minutes => 24 minutes
+ 30.0 minutes => 30 minutes
+ 49.0 minutes => 49 minutes
+ 60.0 minutes => 1 hour
+ 70.0 minutes => 70 minutes
+ 84.0 minutes => 84 minutes
+ 100.0 minutes => 100 minutes
+ 120.0 minutes => 2 hours
+ 144.0 minutes => 144 minutes
+ 168.0 minutes => 168 minutes
+ 210.0 minutes => 210 minutes
+ 240.0 minutes => 4 hours
+ 288.0 minutes => 288 minutes
+ 300.0 minutes => 5 hours
+ 360.0 minutes => 6 hours
+ 420.0 minutes => 7 hours
+ 576.0 minutes => 576 minutes
+ 600.0 minutes => 10 hours
+ 700.0 minutes => 700 minutes
+ 720.0 minutes => 12 hours
+ 900.0 minutes => 15 hours
+ 1000.0 minutes => 1000 minutes
+ 1200.0 minutes => 20 hours
+ 1440.0 minutes => 1 day
+ 1800.0 minutes => 30 hours
+ 2400.0 minutes => 40 hours
+ 3000.0 minutes => 50 hours
+ 3600.0 minutes => 60 hours
+ 6000.0 minutes => 100 hours
+ 7000.0 minutes => 7000 minutes
+ 10000.0 minutes => 10000 minutes
+ 12000.0 minutes => 200 hours
+ 24000.0 minutes => 400 hours
+ 30000.0 minutes => 500 hours
+ 60000.0 minutes => 1000 hours
+ 100000.0 minutes => 100000 minutes
+ 1000000.0 minutes => 1000000 minutes
+ 7000000.0 minutes => 7000000 minutes
+ 1.0E7 minutes => 10000000 minutes
+ 1.2E7 minutes => 200000 hours
+ 2.4E7 minutes => 400000 hours
+ 3.0E7 minutes => 500000 hours
+ 6.0E7 minutes => 1000000 hours
+ 1.0E8 minutes => 100000000 minutes
+ 1.0E9 minutes => class java.lang.IllegalArgumentException
+ 1.0E12 minutes => class java.lang.IllegalArgumentException
+ 0.0 hours => 0 days
+ 1.0 hours => 1 hour
+ 7.0 hours => 7 hours
+ 10.0 hours => 10 hours
+ 12.0 hours => 12 hours
+ 24.0 hours => 1 day
+ 30.0 hours => 30 hours
+ 49.0 hours => 49 hours
+ 60.0 hours => 60 hours
+ 70.0 hours => 70 hours
+ 84.0 hours => 84 hours
+ 100.0 hours => 100 hours
+ 120.0 hours => 5 days
+ 144.0 hours => 6 days
+ 168.0 hours => 7 days
+ 210.0 hours => 210 hours
+ 240.0 hours => 10 days
+ 288.0 hours => 12 days
+ 300.0 hours => 300 hours
+ 360.0 hours => 15 days
+ 420.0 hours => 420 hours
+ 576.0 hours => 24 days
+ 600.0 hours => 25 days
+ 700.0 hours => 700 hours
+ 720.0 hours => 30 days
+ 900.0 hours => 900 hours
+ 1000.0 hours => 1000 hours
+ 1200.0 hours => 50 days
+ 1440.0 hours => 60 days
+ 1800.0 hours => 75 days
+ 2400.0 hours => 100 days
+ 3000.0 hours => 125 days
+ 3600.0 hours => 150 days
+ 6000.0 hours => 250 days
+ 7000.0 hours => 7000 hours
+ 10000.0 hours => 10000 hours
+ 12000.0 hours => 500 days
+ 24000.0 hours => 1000 days
+ 30000.0 hours => 1250 days
+ 60000.0 hours => 2500 days
+ 100000.0 hours => 100000 hours
+ 1000000.0 hours => 1000000 hours
+ 7000000.0 hours => class java.lang.IllegalArgumentException
+ 1.0E7 hours => class java.lang.IllegalArgumentException
+ 1.2E7 hours => class java.lang.IllegalArgumentException
+ 2.4E7 hours => class java.lang.IllegalArgumentException
+ 3.0E7 hours => class java.lang.IllegalArgumentException
+ 6.0E7 hours => class java.lang.IllegalArgumentException
+ 1.0E8 hours => class java.lang.IllegalArgumentException
+ 1.0E9 hours => class java.lang.IllegalArgumentException
+ 1.0E12 hours => class java.lang.IllegalArgumentException
+ 0.0 days => 0 days
+ 1.0 days => 1 day
+ 7.0 days => 7 days
+ 10.0 days => 10 days
+ 12.0 days => 12 days
+ 24.0 days => 24 days
+ 30.0 days => 30 days
+ 49.0 days => 49 days
+ 60.0 days => 60 days
+ 70.0 days => 70 days
+ 84.0 days => 84 days
+ 100.0 days => 100 days
+ 120.0 days => 120 days
+ 144.0 days => 144 days
+ 168.0 days => 168 days
+ 210.0 days => 210 days
+ 240.0 days => 240 days
+ 288.0 days => 288 days
+ 300.0 days => 300 days
+ 360.0 days => 360 days
+ 420.0 days => 420 days
+ 576.0 days => 576 days
+ 600.0 days => 600 days
+ 700.0 days => 700 days
+ 720.0 days => 720 days
+ 900.0 days => 900 days
+ 1000.0 days => 1000 days
+ 1200.0 days => 1200 days
+ 1440.0 days => 1440 days
+ 1800.0 days => 1800 days
+ 2400.0 days => 2400 days
+ 3000.0 days => 3000 days
+ 3600.0 days => 3600 days
+ 6000.0 days => 6000 days
+ 7000.0 days => 7000 days
+ 10000.0 days => 10000 days
+ 12000.0 days => 12000 days
+ 24000.0 days => 24000 days
+ 30000.0 days => 30000 days
+ 60000.0 days => 60000 days
+ 100000.0 days => 100000 days
+ 1000000.0 days => class java.lang.IllegalArgumentException
+ 7000000.0 days => class java.lang.IllegalArgumentException
+ 1.0E7 days => class java.lang.IllegalArgumentException
+ 1.2E7 days => class java.lang.IllegalArgumentException
+ 2.4E7 days => class java.lang.IllegalArgumentException
+ 3.0E7 days => class java.lang.IllegalArgumentException
+ 6.0E7 days => class java.lang.IllegalArgumentException
+ 1.0E8 days => class java.lang.IllegalArgumentException
+ 1.0E9 days => class java.lang.IllegalArgumentException
+ 1.0E12 days => class java.lang.IllegalArgumentException
+10000000000000001 nanoseconds => 10000000000000001 nanoseconds
+10000000000000002 nanoseconds => 10000000000000002 nanoseconds
+ Inf => Duration.Inf
+ -Inf => Duration.MinusInf
+ +Inf => Duration.Inf
+ PlusInf => Duration.Inf
+ MinusInf => Duration.MinusInf
diff --git a/test/files/jvm/duration-java/Test.java b/test/files/jvm/duration-java/Test.java
new file mode 100644
index 0000000..94f3f83
--- /dev/null
+++ b/test/files/jvm/duration-java/Test.java
@@ -0,0 +1,46 @@
+import scala.concurrent.duration.Duration;
+import java.util.*;
+import java.util.concurrent.TimeUnit;
+import static java.util.concurrent.TimeUnit.*;
+
+public class Test {
+ public static List<Double> inputs = Arrays.asList(0d, 1d, 7d, 10d, 12d, 24d, 30d, 60d, 100d, 1000d, 1e6);
+ public static List<Double> makeNumbers() {
+ ArrayList<Double> xs = new ArrayList<Double>();
+ for (Double n1: inputs) {
+ for (Double n2: inputs) {
+ Double n = n1 * n2;
+ if (!xs.contains(n))
+ xs.add(n);
+ }
+ }
+ Double[] arr = xs.toArray(new Double[0]);
+ Arrays.sort(arr);
+ return Arrays.asList(arr);
+ }
+
+ public static void p(Object x) {
+ System.out.println(x);
+ }
+ public static void main(String[] args) {
+ for (TimeUnit t : TimeUnit.values()) {
+ for (Double n: makeNumbers()) {
+ String s = "" + n + " " + t.toString().toLowerCase();
+ String result;
+ try {
+ Duration d = Duration.create(n, t);
+ result = d.toString();
+ } catch(Exception e) {
+ result = e.getClass().toString();
+ }
+ p(String.format("%25s => %s", s, result));
+ }
+ }
+ for (String s: new String[] {"10000000000000001 nanoseconds", "10000000000000002 nanoseconds"})
+ p(String.format("%25s => %s", s, Duration.create(s)));
+ for (String s: Arrays.asList("Inf", "-Inf", "+Inf", "PlusInf", "MinusInf")) {
+ Duration d = Duration.create(s);
+ p(String.format("%25s => %s", s, d));
+ }
+ }
+}
diff --git a/test/files/jvm/duration-tck.scala b/test/files/jvm/duration-tck.scala
new file mode 100644
index 0000000..b257344
--- /dev/null
+++ b/test/files/jvm/duration-tck.scala
@@ -0,0 +1,198 @@
+/**
+ * Copyright (C) 2012 Typesafe Inc. <http://www.typesafe.com>
+ */
+
+import scala.concurrent.duration._
+import scala.reflect._
+import scala.tools.partest.TestUtil.intercept
+
+object Test extends App {
+
+ implicit class Assert(val left: Any) extends AnyVal {
+ import Duration.Undefined
+ def mustBe(right: Any) = right match {
+ case r: Double if r.isNaN => assert(left.asInstanceOf[Double].isNaN, s"$left was not NaN")
+ case r: Double if r == 0 && r.compareTo(0) == -1 => assert(left == 0 && left.asInstanceOf[Double].compareTo(0) == -1, s"$left was not -0.0")
+ case Undefined => assert(left.asInstanceOf[AnyRef] eq Undefined, s"$left was not Undefined")
+ case _ => assert(left == right, s"$left was not equal to $right")
+ }
+ }
+
+ val zero = 0 seconds
+ val one = 1 second
+ val two = one + one
+ val three = 3 * one
+ val inf = Duration.Inf
+ val minf = Duration.MinusInf
+ val undef = Duration.Undefined
+ val inputs = List(zero, one, inf, minf, undef)
+ val nan = Double.NaN
+
+ // test field ops
+ one.isFinite mustBe true
+ 0 * one mustBe zero
+ 2 * one mustBe two
+ three - two mustBe one
+ three / 3 mustBe one
+ two / one mustBe 2
+ one + zero mustBe one
+ one / 1000000 mustBe 1.micro
+
+
+ // test infinities
+
+ inf.isFinite mustBe false
+ minf.isFinite mustBe false
+
+ inf mustBe inf
+ minf mustBe minf
+ -inf mustBe minf
+ -minf mustBe inf
+
+ minf + inf mustBe undef
+ inf - inf mustBe undef
+ inf + minf mustBe undef
+ minf - minf mustBe undef
+
+ inf + inf mustBe inf
+ inf - minf mustBe inf
+ minf - inf mustBe minf
+ minf + minf mustBe minf
+
+ inf.compareTo(inf) mustBe 0
+ inf.compareTo(one) mustBe 1
+ inf.compareTo(minf) mustBe 1
+ minf.compareTo(minf) mustBe 0
+ minf.compareTo(one) mustBe -1
+ minf.compareTo(inf) mustBe -1
+
+ assert(inf != minf)
+ assert(minf != inf)
+ assert(one != inf)
+ assert(minf != one)
+
+ inf mustBe (minf * -1d)
+ inf mustBe (minf / -1d)
+
+ one / inf mustBe 0d
+ -one / inf mustBe -0d
+ one / minf mustBe -0d
+ -one / minf mustBe 0d
+
+ inputs filterNot (_.isFinite) foreach (x => x / zero mustBe x.toUnit(DAYS))
+ inputs filterNot (_.isFinite) foreach (_ * 0d mustBe undef)
+ inputs filterNot (_.isFinite) foreach (_ * -0d mustBe undef)
+ inputs filterNot (_.isFinite) foreach (x => x * Double.PositiveInfinity mustBe x)
+ inputs filterNot (_.isFinite) foreach (x => x * Double.NegativeInfinity mustBe -x)
+
+ inf.toUnit(SECONDS) mustBe Double.PositiveInfinity
+ minf.toUnit(MINUTES) mustBe Double.NegativeInfinity
+ Duration.fromNanos(Double.PositiveInfinity) mustBe inf
+ Duration.fromNanos(Double.NegativeInfinity) mustBe minf
+
+
+ // test undefined & NaN
+
+ undef.isFinite mustBe false
+ -undef mustBe undef
+ assert(undef != undef)
+ assert(undef eq undef)
+
+ inputs foreach (_ + undef mustBe undef)
+ inputs foreach (_ - undef mustBe undef)
+ inputs foreach (_ / undef mustBe nan)
+ inputs foreach (_ / nan mustBe undef)
+ inputs foreach (_ * nan mustBe undef)
+ inputs foreach (undef + _ mustBe undef)
+ inputs foreach (undef - _ mustBe undef)
+ inputs foreach (undef / _ mustBe nan)
+ undef / 1 mustBe undef
+ undef / nan mustBe undef
+ undef * 1 mustBe undef
+ undef * nan mustBe undef
+ inputs foreach (x => x / zero mustBe x.toUnit(SECONDS) / 0d)
+ inputs foreach (x => x / 0d mustBe Duration.fromNanos(x.toUnit(NANOSECONDS) / 0d))
+ inputs foreach (x => x / -0d mustBe Duration.fromNanos(x.toUnit(NANOSECONDS) / -0d))
+
+ inputs filterNot (_ eq undef) foreach (_ compareTo undef mustBe -1)
+ inputs filterNot (_ eq undef) foreach (undef compareTo _ mustBe 1)
+ undef compare undef mustBe 0
+
+ undef.toUnit(DAYS) mustBe nan
+ Duration.fromNanos(nan) mustBe undef
+
+
+ // test overflow protection
+ for (unit ← Seq(DAYS, HOURS, MINUTES, SECONDS, MILLISECONDS, MICROSECONDS, NANOSECONDS)) {
+ val x = unit.convert(Long.MaxValue, NANOSECONDS)
+ val dur = Duration(x, unit)
+ val mdur = Duration(-x, unit)
+ -mdur mustBe (dur)
+ intercept[IllegalArgumentException] { Duration(x + 10000000d, unit) }
+ intercept[IllegalArgumentException] { Duration(-x - 10000000d, unit) }
+ if (unit != NANOSECONDS) {
+ intercept[IllegalArgumentException] { Duration(x + 1, unit) }
+ intercept[IllegalArgumentException] { Duration(-x - 1, unit) }
+ }
+ intercept[IllegalArgumentException] { dur + 1.day }
+ intercept[IllegalArgumentException] { mdur - 1.day }
+ intercept[IllegalArgumentException] { dur * 1.1 }
+ intercept[IllegalArgumentException] { mdur * 1.1 }
+ intercept[IllegalArgumentException] { dur * 2.1 }
+ intercept[IllegalArgumentException] { mdur * 2.1 }
+ intercept[IllegalArgumentException] { dur / 0.9 }
+ intercept[IllegalArgumentException] { mdur / 0.9 }
+ intercept[IllegalArgumentException] { dur / 0.4 }
+ intercept[IllegalArgumentException] { mdur / 0.4 }
+ Duration(x + unit.toString.toLowerCase)
+ Duration("-" + x + unit.toString.toLowerCase)
+ intercept[IllegalArgumentException] { Duration("%.0f".format(x + 10000000d) + unit.toString.toLowerCase) }
+ intercept[IllegalArgumentException] { Duration("-%.0f".format(x + 10000000d) + unit.toString.toLowerCase) }
+ }
+ intercept[IllegalArgumentException] { Duration.fromNanos(1e20) }
+ intercept[IllegalArgumentException] { Duration.fromNanos(-1e20) }
+
+
+ // test precision
+ 1.second + 1.millisecond mustBe 1001.milliseconds
+ 100000.days + 1.nanosecond mustBe 8640000000000000001L.nanoseconds
+ 1.5.seconds.toSeconds mustBe 1
+ (-1.5).seconds.toSeconds mustBe -1
+
+
+ // test unit stability
+ 1000.millis.unit mustBe MILLISECONDS
+ (1000.millis + 0.days).unit mustBe MILLISECONDS
+ 1.second.unit mustBe SECONDS
+ (1.second + 1.millisecond).unit mustBe MILLISECONDS
+
+
+ // test Deadline
+ val dead = 2.seconds.fromNow
+ val dead2 = 2 seconds fromNow
+
+ { val l = dead.timeLeft; assert(l > 1.second, s"$l <= 1.second") }
+ { val l = dead2.timeLeft; assert(l > 1.second, s"$l <= 1.second") }
+
+ Thread.sleep(1.second.toMillis)
+
+ // unfortunately it can happen that the sleep() returns early without throwing
+ { val l = dead.timeLeft; assert(l <= 1100.millis, s"$l > 1100.millis") }
+ { val l = dead2.timeLeft; assert(l <= 1100.millis, s"$l > 1100.millis") }
+
+
+ // test integer mul/div
+ 500.millis * 2 mustBe 1.second
+ (500.millis * 2).unit mustBe MILLISECONDS
+ 1.second / 2 mustBe 500.millis
+ (1.second / 2).unit mustBe MILLISECONDS
+
+
+ // check statically retaining finite-ness
+ val finiteDuration: FiniteDuration = 1.second * 2 / 3 mul 5 div 4 plus 3.seconds minus 1.millisecond min 1.second max 1.second
+ val finite2: FiniteDuration = 2 * 1.second + 3L * 2.seconds
+ finite2 mustBe 8.seconds
+ ((2 seconds fromNow).timeLeft: FiniteDuration) < 4.seconds mustBe true
+ val finite3: FiniteDuration = 3.5 seconds span
+
+}
diff --git a/test/files/jvm/future-spec/FutureTests.scala b/test/files/jvm/future-spec/FutureTests.scala
new file mode 100644
index 0000000..ddd819c
--- /dev/null
+++ b/test/files/jvm/future-spec/FutureTests.scala
@@ -0,0 +1,553 @@
+
+
+
+import scala.concurrent._
+import scala.concurrent.duration._
+import scala.concurrent.duration.Duration.Inf
+import scala.collection._
+import scala.runtime.NonLocalReturnControl
+import scala.util.{Try,Success,Failure}
+
+
+
+object FutureTests extends MinimalScalaTest {
+
+ /* some utils */
+
+ def testAsync(s: String)(implicit ec: ExecutionContext): Future[String] = s match {
+ case "Hello" => future { "World" }
+ case "Failure" => Future.failed(new RuntimeException("Expected exception; to test fault-tolerance"))
+ case "NoReply" => Promise[String]().future
+ }
+
+ val defaultTimeout = 5 seconds
+
+ /* future specification */
+
+ "A future with custom ExecutionContext" should {
+ "shouldHandleThrowables" in {
+ val ms = new mutable.HashSet[Throwable] with mutable.SynchronizedSet[Throwable]
+ implicit val ec = scala.concurrent.ExecutionContext.fromExecutor(new scala.concurrent.forkjoin.ForkJoinPool(), {
+ t =>
+ ms += t
+ })
+
+ class ThrowableTest(m: String) extends Throwable(m)
+
+ val f1 = future[Any] {
+ throw new ThrowableTest("test")
+ }
+
+ intercept[ThrowableTest] {
+ Await.result(f1, defaultTimeout)
+ }
+
+ val latch = new TestLatch
+ val f2 = future {
+ Await.ready(latch, 5 seconds)
+ "success"
+ }
+ val f3 = f2 map { s => s.toUpperCase }
+
+ f2 foreach { _ => throw new ThrowableTest("dispatcher foreach") }
+ f2 onSuccess { case _ => throw new ThrowableTest("dispatcher receive") }
+
+ latch.open()
+
+ Await.result(f2, defaultTimeout) mustBe ("success")
+
+ f2 foreach { _ => throw new ThrowableTest("current thread foreach") }
+ f2 onSuccess { case _ => throw new ThrowableTest("current thread receive") }
+
+ Await.result(f3, defaultTimeout) mustBe ("SUCCESS")
+
+ val waiting = future {
+ Thread.sleep(1000)
+ }
+ Await.ready(waiting, 2000 millis)
+
+ ms.size mustBe (4)
+ //FIXME should check
+ }
+ }
+
+ "The Future companion object" should {
+ "call ExecutionContext.prepare on apply" in {
+ val p = Promise[Boolean]()
+ val ec = new ExecutionContext {
+ val delegate = ExecutionContext.global
+ override def prepare(): ExecutionContext = {
+ p.success(true)
+ delegate.prepare
+ }
+ override def execute(r: Runnable) = delegate.execute(r)
+ override def reportFailure(t: Throwable): Unit = delegate.reportFailure(t)
+ }
+
+ val f = Future("foo")(ec)
+ Await.result(f, defaultTimeout) mustBe ("foo")
+ Await.result(p.future, defaultTimeout) mustBe (true)
+ }
+ }
+
+ "The default ExecutionContext" should {
+ "report uncaught exceptions" in {
+ val p = Promise[Throwable]()
+ val logThrowable: Throwable => Unit = p.trySuccess(_)
+ val ec: ExecutionContext = ExecutionContext.fromExecutor(null, logThrowable)
+
+ val t = new NotImplementedError("foo")
+ val f = Future(throw t)(ec)
+ Await.result(p.future, 2.seconds) mustBe t
+ }
+ }
+
+ "A future with global ExecutionContext" should {
+ import ExecutionContext.Implicits._
+
+ "compose with for-comprehensions" in {
+ def async(x: Int) = future { (x * 2).toString }
+ val future0 = future[Any] {
+ "five!".length
+ }
+
+ val future1 = for {
+ a <- future0.mapTo[Int] // returns 5
+ b <- async(a) // returns "10"
+ c <- async(7) // returns "14"
+ } yield b + "-" + c
+
+ val future2 = for {
+ a <- future0.mapTo[Int]
+ b <- (future { (a * 2).toString }).mapTo[Int]
+ c <- future { (7 * 2).toString }
+ } yield b + "-" + c
+
+ Await.result(future1, defaultTimeout) mustBe ("10-14")
+ assert(checkType(future1, manifest[String]))
+ intercept[ClassCastException] { Await.result(future2, defaultTimeout) }
+ }
+
+ "support pattern matching within a for-comprehension" in {
+ case class Req[T](req: T)
+ case class Res[T](res: T)
+ def async[T](req: Req[T]) = req match {
+ case Req(s: String) => future { Res(s.length) }
+ case Req(i: Int) => future { Res((i * 2).toString) }
+ }
+
+ val future1 = for {
+ Res(a: Int) <- async(Req("Hello"))
+ Res(b: String) <- async(Req(a))
+ Res(c: String) <- async(Req(7))
+ } yield b + "-" + c
+
+ val future2 = for {
+ Res(a: Int) <- async(Req("Hello"))
+ Res(b: Int) <- async(Req(a))
+ Res(c: Int) <- async(Req(7))
+ } yield b + "-" + c
+
+ Await.result(future1, defaultTimeout) mustBe ("10-14")
+ intercept[NoSuchElementException] { Await.result(future2, defaultTimeout) }
+ }
+
+ "recover from exceptions" in {
+ val future1 = Future(5)
+ val future2 = future1 map (_ / 0)
+ val future3 = future2 map (_.toString)
+
+ val future4 = future1 recover {
+ case e: ArithmeticException => 0
+ } map (_.toString)
+
+ val future5 = future2 recover {
+ case e: ArithmeticException => 0
+ } map (_.toString)
+
+ val future6 = future2 recover {
+ case e: MatchError => 0
+ } map (_.toString)
+
+ val future7 = future3 recover {
+ case e: ArithmeticException => "You got ERROR"
+ }
+
+ val future8 = testAsync("Failure")
+ val future9 = testAsync("Failure") recover {
+ case e: RuntimeException => "FAIL!"
+ }
+ val future10 = testAsync("Hello") recover {
+ case e: RuntimeException => "FAIL!"
+ }
+ val future11 = testAsync("Failure") recover {
+ case _ => "Oops!"
+ }
+
+ Await.result(future1, defaultTimeout) mustBe (5)
+ intercept[ArithmeticException] { Await.result(future2, defaultTimeout) }
+ intercept[ArithmeticException] { Await.result(future3, defaultTimeout) }
+ Await.result(future4, defaultTimeout) mustBe ("5")
+ Await.result(future5, defaultTimeout) mustBe ("0")
+ intercept[ArithmeticException] { Await.result(future6, defaultTimeout) }
+ Await.result(future7, defaultTimeout) mustBe ("You got ERROR")
+ intercept[RuntimeException] { Await.result(future8, defaultTimeout) }
+ Await.result(future9, defaultTimeout) mustBe ("FAIL!")
+ Await.result(future10, defaultTimeout) mustBe ("World")
+ Await.result(future11, defaultTimeout) mustBe ("Oops!")
+ }
+
+ "recoverWith from exceptions" in {
+ val o = new IllegalStateException("original")
+ val r = new IllegalStateException("recovered")
+
+ intercept[IllegalStateException] {
+ val failed = Future.failed[String](o) recoverWith {
+ case _ if false == true => Future.successful("yay!")
+ }
+ Await.result(failed, defaultTimeout)
+ } mustBe (o)
+
+ val recovered = Future.failed[String](o) recoverWith {
+ case _ => Future.successful("yay!")
+ }
+ Await.result(recovered, defaultTimeout) mustBe ("yay!")
+
+ intercept[IllegalStateException] {
+ val refailed = Future.failed[String](o) recoverWith {
+ case _ => Future.failed[String](r)
+ }
+ Await.result(refailed, defaultTimeout)
+ } mustBe (r)
+ }
+
+ "andThen like a boss" in {
+ val q = new java.util.concurrent.LinkedBlockingQueue[Int]
+ for (i <- 1 to 1000) {
+ val chained = future {
+ q.add(1); 3
+ } andThen {
+ case _ => q.add(2)
+ } andThen {
+ case Success(0) => q.add(Int.MaxValue)
+ } andThen {
+ case _ => q.add(3);
+ }
+ Await.result(chained, defaultTimeout) mustBe (3)
+ q.poll() mustBe (1)
+ q.poll() mustBe (2)
+ q.poll() mustBe (3)
+ q.clear()
+ }
+ }
+
+ "firstCompletedOf" in {
+ def futures = Vector.fill[Future[Int]](10) {
+ Promise[Int]().future
+ } :+ Future.successful[Int](5)
+
+ Await.result(Future.firstCompletedOf(futures), defaultTimeout) mustBe (5)
+ Await.result(Future.firstCompletedOf(futures.iterator), defaultTimeout) mustBe (5)
+ }
+
+ "find" in {
+ val futures = for (i <- 1 to 10) yield future {
+ i
+ }
+
+ val result = Future.find[Int](futures)(_ == 3)
+ Await.result(result, defaultTimeout) mustBe (Some(3))
+
+ val notFound = Future.find[Int](futures.iterator)(_ == 11)
+ Await.result(notFound, defaultTimeout) mustBe (None)
+ }
+
+ "zip" in {
+ val timeout = 10000 millis
+ val f = new IllegalStateException("test")
+ intercept[IllegalStateException] {
+ val failed = Future.failed[String](f) zip Future.successful("foo")
+ Await.result(failed, timeout)
+ } mustBe (f)
+
+ intercept[IllegalStateException] {
+ val failed = Future.successful("foo") zip Future.failed[String](f)
+ Await.result(failed, timeout)
+ } mustBe (f)
+
+ intercept[IllegalStateException] {
+ val failed = Future.failed[String](f) zip Future.failed[String](f)
+ Await.result(failed, timeout)
+ } mustBe (f)
+
+ val successful = Future.successful("foo") zip Future.successful("foo")
+ Await.result(successful, timeout) mustBe (("foo", "foo"))
+ }
+
+ "fold" in {
+ val timeout = 10000 millis
+ def async(add: Int, wait: Int) = future {
+ Thread.sleep(wait)
+ add
+ }
+
+ val futures = (0 to 9) map {
+ idx => async(idx, idx * 20)
+ }
+ val folded = Future.fold(futures)(0)(_ + _)
+ Await.result(folded, timeout) mustBe (45)
+
+ val futuresit = (0 to 9) map {
+ idx => async(idx, idx * 20)
+ }
+ val foldedit = Future.fold(futures)(0)(_ + _)
+ Await.result(foldedit, timeout) mustBe (45)
+ }
+
+ "fold by composing" in {
+ val timeout = 10000 millis
+ def async(add: Int, wait: Int) = future {
+ Thread.sleep(wait)
+ add
+ }
+ def futures = (0 to 9) map {
+ idx => async(idx, idx * 20)
+ }
+ val folded = futures.foldLeft(Future(0)) {
+ case (fr, fa) => for (r <- fr; a <- fa) yield (r + a)
+ }
+ Await.result(folded, timeout) mustBe (45)
+ }
+
+ "fold with an exception" in {
+ val timeout = 10000 millis
+ def async(add: Int, wait: Int) = future {
+ Thread.sleep(wait)
+ if (add == 6) throw new IllegalArgumentException("shouldFoldResultsWithException: expected")
+ add
+ }
+ def futures = (0 to 9) map {
+ idx => async(idx, idx * 10)
+ }
+ val folded = Future.fold(futures)(0)(_ + _)
+ intercept[IllegalArgumentException] {
+ Await.result(folded, timeout)
+ }.getMessage mustBe ("shouldFoldResultsWithException: expected")
+ }
+
+ "fold mutable zeroes safely" in {
+ import scala.collection.mutable.ArrayBuffer
+ def test(testNumber: Int) {
+ val fs = (0 to 1000) map (i => Future(i))
+ val f = Future.fold(fs)(ArrayBuffer.empty[AnyRef]) {
+ case (l, i) if i % 2 == 0 => l += i.asInstanceOf[AnyRef]
+ case (l, _) => l
+ }
+ val result = Await.result(f.mapTo[ArrayBuffer[Int]], 10000 millis).sum
+
+ assert(result == 250500)
+ }
+
+ (1 to 100) foreach test //Make sure it tries to provoke the problem
+ }
+
+ "return zero value if folding empty list" in {
+ val zero = Future.fold(List[Future[Int]]())(0)(_ + _)
+ Await.result(zero, defaultTimeout) mustBe (0)
+ }
+
+ "shouldReduceResults" in {
+ def async(idx: Int) = future {
+ Thread.sleep(idx * 20)
+ idx
+ }
+ val timeout = 10000 millis
+
+ val futures = (0 to 9) map { async }
+ val reduced = Future.reduce(futures)(_ + _)
+ Await.result(reduced, timeout) mustBe (45)
+
+ val futuresit = (0 to 9) map { async }
+ val reducedit = Future.reduce(futuresit)(_ + _)
+ Await.result(reducedit, timeout) mustBe (45)
+ }
+
+ "shouldReduceResultsWithException" in {
+ def async(add: Int, wait: Int) = future {
+ Thread.sleep(wait)
+ if (add == 6) throw new IllegalArgumentException("shouldFoldResultsWithException: expected")
+ else add
+ }
+ val timeout = 10000 millis
+ def futures = (1 to 10) map {
+ idx => async(idx, idx * 10)
+ }
+ val failed = Future.reduce(futures)(_ + _)
+ intercept[IllegalArgumentException] {
+ Await.result(failed, timeout)
+ }.getMessage mustBe ("shouldFoldResultsWithException: expected")
+ }
+
+ "shouldReduceThrowNSEEOnEmptyInput" in {
+ intercept[java.util.NoSuchElementException] {
+ val emptyreduced = Future.reduce(List[Future[Int]]())(_ + _)
+ Await.result(emptyreduced, defaultTimeout)
+ }
+ }
+
+ "shouldTraverseFutures" in {
+ object counter {
+ var count = -1
+ def incAndGet() = counter.synchronized {
+ count += 2
+ count
+ }
+ }
+
+ val oddFutures = List.fill(100)(future { counter.incAndGet() }).iterator
+ val traversed = Future.sequence(oddFutures)
+ Await.result(traversed, defaultTimeout).sum mustBe (10000)
+
+ val list = (1 to 100).toList
+ val traversedList = Future.traverse(list)(x => Future(x * 2 - 1))
+ Await.result(traversedList, defaultTimeout).sum mustBe (10000)
+
+ val iterator = (1 to 100).toList.iterator
+ val traversedIterator = Future.traverse(iterator)(x => Future(x * 2 - 1))
+ Await.result(traversedIterator, defaultTimeout).sum mustBe (10000)
+ }
+
+ "shouldBlockUntilResult" in {
+ val latch = new TestLatch
+
+ val f = future {
+ Await.ready(latch, 5 seconds)
+ 5
+ }
+ val f2 = future {
+ val res = Await.result(f, Inf)
+ res + 9
+ }
+
+ intercept[TimeoutException] {
+ Await.ready(f2, 100 millis)
+ }
+
+ latch.open()
+
+ Await.result(f2, defaultTimeout) mustBe (14)
+
+ val f3 = future {
+ Thread.sleep(100)
+ 5
+ }
+
+ intercept[TimeoutException] {
+ Await.ready(f3, 0 millis)
+ }
+ }
+
+ "run callbacks async" in {
+ val latch = Vector.fill(10)(new TestLatch)
+
+ val f1 = future {
+ latch(0).open()
+ Await.ready(latch(1), TestLatch.DefaultTimeout)
+ "Hello"
+ }
+ val f2 = f1 map {
+ s =>
+ latch(2).open()
+ Await.ready(latch(3), TestLatch.DefaultTimeout)
+ s.length
+ }
+ for (_ <- f2) latch(4).open()
+
+ Await.ready(latch(0), TestLatch.DefaultTimeout)
+
+ f1.isCompleted mustBe (false)
+ f2.isCompleted mustBe (false)
+
+ latch(1).open()
+ Await.ready(latch(2), TestLatch.DefaultTimeout)
+
+ f1.isCompleted mustBe (true)
+ f2.isCompleted mustBe (false)
+
+ val f3 = f1 map {
+ s =>
+ latch(5).open()
+ Await.ready(latch(6), TestLatch.DefaultTimeout)
+ s.length * 2
+ }
+ for (_ <- f3) latch(3).open()
+
+ Await.ready(latch(5), TestLatch.DefaultTimeout)
+
+ f3.isCompleted mustBe (false)
+
+ latch(6).open()
+ Await.ready(latch(4), TestLatch.DefaultTimeout)
+
+ f2.isCompleted mustBe (true)
+ f3.isCompleted mustBe (true)
+
+ val p1 = Promise[String]()
+ val f4 = p1.future map {
+ s =>
+ latch(7).open()
+ Await.ready(latch(8), TestLatch.DefaultTimeout)
+ s.length
+ }
+ for (_ <- f4) latch(9).open()
+
+ p1.future.isCompleted mustBe (false)
+ f4.isCompleted mustBe (false)
+
+ p1 complete Success("Hello")
+
+ Await.ready(latch(7), TestLatch.DefaultTimeout)
+
+ p1.future.isCompleted mustBe (true)
+ f4.isCompleted mustBe (false)
+
+ latch(8).open()
+ Await.ready(latch(9), TestLatch.DefaultTimeout)
+
+ Await.ready(f4, defaultTimeout).isCompleted mustBe (true)
+ }
+
+ "should not deadlock with nested await (ticket 1313)" in {
+ val simple = Future() map {
+ _ =>
+ val unit = Future(())
+ val umap = unit map { _ => () }
+ Await.result(umap, Inf)
+ }
+ Await.ready(simple, Inf).isCompleted mustBe (true)
+
+ val l1, l2 = new TestLatch
+ val complex = Future() map {
+ _ =>
+ blocking {
+ val nested = Future(())
+ for (_ <- nested) l1.open()
+ Await.ready(l1, TestLatch.DefaultTimeout) // make sure nested is completed
+ for (_ <- nested) l2.open()
+ Await.ready(l2, TestLatch.DefaultTimeout)
+ }
+ }
+ Await.ready(complex, defaultTimeout).isCompleted mustBe (true)
+ }
+
+ "should not throw when Await.ready" in {
+ val expected = try Success(5 / 0) catch { case a: ArithmeticException => Failure(a) }
+ val f = future(5).map(_ / 0)
+ Await.ready(f, defaultTimeout).value.get.toString mustBe expected.toString
+ }
+
+ }
+
+}
+
+
diff --git a/test/files/jvm/future-spec/PromiseTests.scala b/test/files/jvm/future-spec/PromiseTests.scala
new file mode 100644
index 0000000..48f9466
--- /dev/null
+++ b/test/files/jvm/future-spec/PromiseTests.scala
@@ -0,0 +1,246 @@
+
+
+
+import scala.concurrent._
+import scala.concurrent.duration._
+import scala.concurrent.duration.Duration.Inf
+import scala.collection._
+import scala.runtime.NonLocalReturnControl
+import scala.util.{Try,Success,Failure}
+
+
+object PromiseTests extends MinimalScalaTest {
+ import ExecutionContext.Implicits._
+
+ val defaultTimeout = Inf
+
+ /* promise specification */
+
+ "An empty Promise" should {
+
+ "not be completed" in {
+ val p = Promise()
+ p.future.isCompleted mustBe (false)
+ p.isCompleted mustBe (false)
+ }
+
+ "have no value" in {
+ val p = Promise()
+ p.future.value mustBe (None)
+ p.isCompleted mustBe (false)
+ }
+
+ "return supplied value on timeout" in {
+ val failure = Promise.failed[String](new RuntimeException("br0ken")).future
+ val otherFailure = Promise.failed[String](new RuntimeException("last")).future
+ val empty = Promise[String]().future
+ val timedOut = Promise.successful[String]("Timedout").future
+
+ Await.result(failure fallbackTo timedOut, defaultTimeout) mustBe ("Timedout")
+ Await.result(timedOut fallbackTo empty, defaultTimeout) mustBe ("Timedout")
+ Await.result(otherFailure fallbackTo failure fallbackTo timedOut, defaultTimeout) mustBe ("Timedout")
+ intercept[RuntimeException] {
+ Await.result(failure fallbackTo otherFailure, defaultTimeout)
+ }.getMessage mustBe ("br0ken")
+ }
+
+ }
+
+ "A successful Promise" should {
+ val result = "test value"
+ val promise = Promise[String]().complete(Success(result))
+ promise.isCompleted mustBe (true)
+ futureWithResult(_(promise.future, result))
+ }
+
+ "A failed Promise" should {
+ val message = "Expected Exception"
+ val promise = Promise[String]().complete(Failure(new RuntimeException(message)))
+ promise.isCompleted mustBe (true)
+ futureWithException[RuntimeException](_(promise.future, message))
+ }
+
+ "An interrupted Promise" should {
+ val message = "Boxed InterruptedException"
+ val future = Promise[String]().complete(Failure(new InterruptedException(message))).future
+ futureWithException[ExecutionException](_(future, message))
+ }
+
+ "A NonLocalReturnControl failed Promise" should {
+ val result = "test value"
+ val future = Promise[String]().complete(Failure(new NonLocalReturnControl[String]("test", result))).future
+ futureWithResult(_(future, result))
+ }
+
+ def futureWithResult(f: ((Future[Any], Any) => Unit) => Unit) {
+
+ "be completed" in { f((future, _) => future.isCompleted mustBe (true)) }
+
+ "contain a value" in { f((future, result) => future.value mustBe (Some(Success(result)))) }
+
+ "return when ready with 'Await.ready'" in { f((future, result) => Await.ready(future, defaultTimeout).isCompleted mustBe (true)) }
+
+ "return result with 'Await.result'" in { f((future, result) => Await.result(future, defaultTimeout) mustBe (result)) }
+
+ "not timeout" in { f((future, _) => Await.ready(future, 0 millis)) }
+
+ "filter result" in {
+ f {
+ (future, result) =>
+ Await.result((future filter (_ => true)), defaultTimeout) mustBe (result)
+ intercept[NoSuchElementException] {
+ Await.result((future filter (_ => false)), defaultTimeout)
+ }
+ }
+ }
+
+ "transform result with map" in { f((future, result) => Await.result((future map (_.toString.length)), defaultTimeout) mustBe (result.toString.length)) }
+
+ "compose result with flatMap" in {
+ f { (future, result) =>
+ val r = for (r <- future; p <- Promise.successful("foo").future) yield r.toString + p
+ Await.result(r, defaultTimeout) mustBe (result.toString + "foo")
+ }
+ }
+
+ "perform action with foreach" in {
+ f {
+ (future, result) =>
+ val p = Promise[Any]()
+ future foreach p.success
+ Await.result(p.future, defaultTimeout) mustBe (result)
+ }
+ }
+
+ "zip properly" in {
+ f {
+ (future, result) =>
+ Await.result(future zip Promise.successful("foo").future, defaultTimeout) mustBe ((result, "foo"))
+ intercept[RuntimeException] {
+ Await.result(future zip Promise.failed(new RuntimeException("ohnoes")).future, defaultTimeout)
+ }.getMessage mustBe ("ohnoes")
+ }
+ }
+
+ "not recover from exception" in { f((future, result) => Await.result(future.recover({ case _ => "pigdog" }), defaultTimeout) mustBe (result)) }
+
+ "perform action on result" in {
+ f {
+ (future, result) =>
+ val p = Promise[Any]()
+ future.onSuccess { case x => p.success(x) }
+ Await.result(p.future, defaultTimeout) mustBe (result)
+ }
+ }
+
+ "not project a failure" in {
+ f {
+ (future, result) =>
+ intercept[NoSuchElementException] {
+ Await.result(future.failed, defaultTimeout)
+ }.getMessage mustBe ("Future.failed not completed with a throwable.")
+ }
+ }
+
+ "cast using mapTo" in {
+ f {
+ (future, result) =>
+ Await.result(future.mapTo[Boolean].recover({ case _: ClassCastException ⇒ false }), defaultTimeout) mustBe (false)
+ }
+ }
+
+ }
+
+ def futureWithException[E <: Throwable: Manifest](f: ((Future[Any], String) => Unit) => Unit) {
+
+ "be completed" in {
+ f((future, _) => future.isCompleted mustBe (true))
+ }
+
+ "contain a value" in {
+ f((future, message) => {
+ future.value.get.failed.get.getMessage mustBe (message)
+ })
+ }
+
+ "throw not throw exception with 'Await.ready'" in {
+ f {
+ (future, message) => Await.ready(future, defaultTimeout).isCompleted mustBe (true)
+ }
+ }
+
+ "throw exception with 'Await.result'" in {
+ f {
+ (future, message) =>
+ intercept[E] {
+ Await.result(future, defaultTimeout)
+ }.getMessage mustBe (message)
+ }
+ }
+
+ "retain exception with filter" in {
+ f {
+ (future, message) =>
+ intercept[E] { Await.result(future filter (_ => true), defaultTimeout) }.getMessage mustBe (message)
+ intercept[E] { Await.result(future filter (_ => false), defaultTimeout) }.getMessage mustBe (message)
+ }
+ }
+
+ "retain exception with map" in {
+ f {
+ (future, message) =>
+ intercept[E] { Await.result(future map (_.toString.length), defaultTimeout) }.getMessage mustBe (message)
+ }
+ }
+
+ "retain exception with flatMap" in {
+ f {
+ (future, message) =>
+ intercept[E] { Await.result(future flatMap (_ => Promise.successful("foo").future), defaultTimeout) }.getMessage mustBe (message)
+ }
+ }
+
+ "zip properly" in {
+ f {
+ (future, message) =>
+ intercept[E] {
+ Await.result(future zip Promise.successful("foo").future, defaultTimeout)
+ }.getMessage mustBe (message)
+ }
+ }
+
+ "recover from exception" in {
+ f {
+ (future, message) =>
+ Await.result(future.recover({ case e if e.getMessage == message ⇒ "pigdog" }), defaultTimeout) mustBe ("pigdog")
+ }
+ }
+
+ "project a failure" in {
+ f((future, message) => Await.result(future.failed, defaultTimeout).getMessage mustBe (message))
+ }
+
+ "perform action on exception" in {
+ f {
+ (future, message) =>
+ val p = Promise[Any]()
+ future.onFailure { case _ => p.success(message) }
+ Await.result(p.future, defaultTimeout) mustBe (message)
+ }
+ }
+
+ "always cast successfully using mapTo" in {
+ f {
+ (future, message) =>
+ intercept[E] { Await.result(future.mapTo[java.lang.Thread], defaultTimeout) }.getMessage mustBe (message)
+ }
+ }
+ }
+}
+
+
+
+
+
+
+
diff --git a/test/files/jvm/future-spec/TryTests.scala b/test/files/jvm/future-spec/TryTests.scala
new file mode 100644
index 0000000..5d1b9b8
--- /dev/null
+++ b/test/files/jvm/future-spec/TryTests.scala
@@ -0,0 +1,130 @@
+// This is a port of the com.twitter.util Try spec.
+// --
+// It lives in the future-spec directory simply because it requires a specs-like
+// DSL which has already been minimally implemented for the future spec tests.
+
+import scala.util.{Try,Success,Failure}
+
+object TryTests extends MinimalScalaTest {
+ class MyException extends Exception
+ val e = new Exception("this is an exception")
+
+ "Try()" should {
+ "catch exceptions and lift into the Try type" in {
+ Try[Int](1) mustEqual Success(1)
+ Try[Int] { throw e } mustEqual Failure(e)
+ }
+ }
+
+ "Try" should {
+ "recoverWith" in {
+ val myException = new MyException
+ Success(1) recoverWith { case _ => Success(2) } mustEqual Success(1)
+ Failure(e) recoverWith { case _ => Success(2) } mustEqual Success(2)
+ Failure(e) recoverWith { case _ => Failure(e) } mustEqual Failure(e)
+ }
+
+ "getOrElse" in {
+ Success(1) getOrElse 2 mustEqual 1
+ Failure(e) getOrElse 2 mustEqual 2
+ }
+
+ "orElse" in {
+ Success(1) orElse Success(2) mustEqual Success(1)
+ Failure(e) orElse Success(2) mustEqual Success(2)
+ }
+
+ "map" in {
+ "when there is no exception" in {
+ Success(1) map(1+) mustEqual Success(2)
+ Failure[Int](e) map(1+) mustEqual Failure(e)
+ }
+
+ "when there is an exception" in {
+ Success(1) map(_ => throw e) mustEqual Failure(e)
+
+ val e2 = new Exception
+ Failure[Int](e) map(_ => throw e2) mustEqual Failure(e)
+ }
+ "when there is a fatal exception" in {
+ val e3 = new ThreadDeath
+ intercept[ThreadDeath] {
+ Success(1) map (_ => throw e3)
+ }
+ }
+ }
+
+ "flatMap" in {
+ "when there is no exception" in {
+ Success(1) flatMap(x => Success(1 + x)) mustEqual Success(2)
+ Failure[Int](e) flatMap(x => Success(1 + x)) mustEqual Failure(e)
+ }
+
+ "when there is an exception" in {
+ Success(1).flatMap[Int](_ => throw e) mustEqual Failure(e)
+
+ val e2 = new Exception
+ Failure[Int](e).flatMap[Int](_ => throw e2) mustEqual Failure(e)
+ }
+ "when there is a fatal exception" in {
+ val e3 = new ThreadDeath
+ intercept[ThreadDeath] {
+ Success(1).flatMap[Int](_ => throw e3)
+ }
+ }
+ }
+
+ "flatten" in {
+ "is a Success(Success)" in {
+ Success(Success(1)).flatten mustEqual Success(1)
+ }
+
+ "is a Success(Failure)" in {
+ val e = new Exception
+ Success(Failure(e)).flatten mustEqual Failure(e)
+ }
+
+ "is a Throw" in {
+ val e = new Exception
+ Failure[Try[Int]](e).flatten mustEqual Failure(e)
+ }
+ }
+
+ "for" in {
+ "with no Failure values" in {
+ val result = for {
+ i <- Success(1)
+ j <- Success(1)
+ } yield (i + j)
+ result mustEqual Success(2)
+ }
+
+ "with Failure values" in {
+ "throws before" in {
+ val result = for {
+ i <- Failure[Int](e)
+ j <- Success(1)
+ } yield (i + j)
+ result mustEqual Failure(e)
+ }
+
+ "throws after" in {
+ val result = for {
+ i <- Success(1)
+ j <- Failure[Int](e)
+ } yield (i + j)
+ result mustEqual Failure(e)
+ }
+
+ "returns the FIRST Failure" in {
+ val e2 = new Exception
+ val result = for {
+ i <- Failure[Int](e)
+ j <- Failure[Int](e2)
+ } yield (i + j)
+ result mustEqual Failure(e)
+ }
+ }
+ }
+ }
+}
diff --git a/test/files/jvm/future-spec/main.scala b/test/files/jvm/future-spec/main.scala
new file mode 100644
index 0000000..90048cc
--- /dev/null
+++ b/test/files/jvm/future-spec/main.scala
@@ -0,0 +1,110 @@
+
+
+
+import scala.collection._
+import scala.concurrent._
+import scala.concurrent.duration.Duration
+import java.util.concurrent.{ TimeoutException, CountDownLatch, TimeUnit }
+
+
+object Test {
+
+ def main(args: Array[String]) {
+ FutureTests.check()
+ PromiseTests.check()
+ TryTests.check()
+ }
+
+}
+
+
+trait Output {
+ val buffer = new StringBuilder
+
+ def bufferPrintln(a: Any) = buffer.synchronized {
+ buffer.append(a.toString + "\n")
+ }
+}
+
+
+trait MinimalScalaTest extends Output {
+
+ val throwables = mutable.ArrayBuffer[Throwable]()
+
+ def check() {
+ if (throwables.nonEmpty) println(buffer.toString)
+ }
+
+ implicit def stringops(s: String) = new {
+
+ def should[U](snippets: =>U) = {
+ bufferPrintln(s + " should:")
+ snippets
+ }
+
+ def in[U](snippet: =>U) = {
+ try {
+ bufferPrintln("- " + s)
+ snippet
+ bufferPrintln("[OK] Test passed.")
+ } catch {
+ case e: Throwable =>
+ bufferPrintln("[FAILED] " + e)
+ bufferPrintln(e.getStackTrace().mkString("\n"))
+ throwables += e
+ }
+ }
+
+ }
+
+ implicit def objectops(obj: Any) = new {
+
+ def mustBe(other: Any) = assert(obj == other, obj + " is not " + other)
+ def mustEqual(other: Any) = mustBe(other)
+
+ }
+
+ def intercept[T <: Throwable: Manifest](body: =>Any): T = {
+ try {
+ body
+ throw new Exception("Exception of type %s was not thrown".format(manifest[T]))
+ } catch {
+ case t: Throwable =>
+ if (manifest[T].erasure != t.getClass) throw t
+ else t.asInstanceOf[T]
+ }
+ }
+
+ def checkType[T: Manifest, S](in: Future[T], refmanifest: Manifest[S]): Boolean = manifest[T] == refmanifest
+}
+
+
+object TestLatch {
+ val DefaultTimeout = Duration(5, TimeUnit.SECONDS)
+
+ def apply(count: Int = 1) = new TestLatch(count)
+}
+
+
+class TestLatch(count: Int = 1) extends Awaitable[Unit] {
+ private var latch = new CountDownLatch(count)
+
+ def countDown() = latch.countDown()
+ def isOpen: Boolean = latch.getCount == 0
+ def open() = while (!isOpen) countDown()
+ def reset() = latch = new CountDownLatch(count)
+
+ @throws(classOf[TimeoutException])
+ def ready(atMost: Duration)(implicit permit: CanAwait) = {
+ val opened = latch.await(atMost.toNanos, TimeUnit.NANOSECONDS)
+ if (!opened) throw new TimeoutException("Timeout of %s." format (atMost.toString))
+ this
+ }
+
+ @throws(classOf[Exception])
+ def result(atMost: Duration)(implicit permit: CanAwait): Unit = {
+ ready(atMost)
+ }
+
+}
+
diff --git a/test/files/jvm/inner.scala b/test/files/jvm/inner.scala
index 156d9e6..009d52e 100644
--- a/test/files/jvm/inner.scala
+++ b/test/files/jvm/inner.scala
@@ -71,15 +71,15 @@ object Scalatest {
val tmpfile = new FileWriter(tmpfilename)
tmpfile.write(src)
tmpfile.close
- exec(javac + " -d " + outputdir + " -classpath " + classpath + " " + tmpfilename)
+ exec(javac, "-d", outputdir, "-classpath", classpath, tmpfilename)
}
def java(cname: String) =
- exec(javacmd + " -cp " + classpath + " " + cname)
+ exec(javacmd, "-cp", classpath, cname)
/** Execute cmd, wait for the process to end and pipe it's output to stdout */
- private def exec(cmd: String) {
- val proc = Runtime.getRuntime().exec(cmd)
+ private def exec(args: String*) {
+ val proc = Runtime.getRuntime().exec(args.toArray)
val inp = new BufferedReader(new InputStreamReader(proc.getInputStream))
val errp = new BufferedReader(new InputStreamReader(proc.getErrorStream))
proc.waitFor()
diff --git a/test/files/jvm/interpreter.check b/test/files/jvm/interpreter.check
index 42a8ae8..96b57c7 100644
--- a/test/files/jvm/interpreter.check
+++ b/test/files/jvm/interpreter.check
@@ -35,8 +35,9 @@ four: anotherint = 4
scala> val bogus: anotherint = "hello"
<console>:8: error: type mismatch;
- found : java.lang.String("hello")
+ found : String("hello")
required: anotherint
+ (which expands to) Int
val bogus: anotherint = "hello"
^
@@ -72,8 +73,8 @@ fish: S = fish
scala> // Test that arrays pretty print nicely.
-scala> val arr = Array("What's", "up", "doc?")
-arr: Array[java.lang.String] = Array(What's, up, doc?)
+scala> val arr = Array("What's", "up", "doc?")
+arr: Array[String] = Array(What's, up, doc?)
scala> // Test that arrays pretty print nicely, even when we give them type Any
@@ -96,6 +97,7 @@ scala> case class Bar(n: Int)
defined class Bar
scala> implicit def foo2bar(foo: Foo) = Bar(foo.n)
+warning: there were 1 feature warning(s); re-run with -feature for details
foo2bar: (foo: Foo)Bar
scala> val bar: Bar = Foo(3)
@@ -263,12 +265,13 @@ scala> xs filter (_ == 2)
res4: Array[_] = Array(2)
scala> xs map (_ => "abc")
-res5: Array[java.lang.String] = Array(abc, abc)
+res5: Array[String] = Array(abc, abc)
scala> xs map (x => x)
-res6: scala.collection.mutable.ArraySeq[_] = ArraySeq(1, 2)
+res6: Array[_] = Array(1, 2)
scala> xs map (x => (x, x))
+warning: there were 1 feature warning(s); re-run with -feature for details
res7: Array[(_$1, _$1)] forSome { type _$1 } = Array((1,1), (2,2))
scala>
@@ -300,7 +303,7 @@ scala> <a>
/></a>
res8: scala.xml.Elem =
<a>
-<b c="c" d="dd"></b></a>
+<b c="c" d="dd"/></a>
scala>
@@ -312,9 +315,8 @@ scala> /*
*/
*/
-scala>
-scala>
+You typed two blank lines. Starting a new command.
scala> // multi-line string
@@ -322,7 +324,7 @@ scala> """
hello
there
"""
-res9: java.lang.String =
+res12: String =
"
hello
there
@@ -337,10 +339,10 @@ You typed two blank lines. Starting a new command.
scala> // defining and using quoted names should work (ticket #323)
-scala> def `match` = 1
+scala> def `match` = 1
match: Int
-scala> val x = `match`
+scala> val x = `match`
x: Int = 1
scala>
@@ -368,7 +370,7 @@ scala>
scala>
plusOne: (x: Int)Int
res0: Int = 6
-res1: java.lang.String = after reset
+res0: String = after reset
<console>:8: error: not found: value plusOne
plusOne(5) // should be undefined now
^
diff --git a/test/files/jvm/interpreter.scala b/test/files/jvm/interpreter.scala
index f0bc8b5..f45eb03 100644
--- a/test/files/jvm/interpreter.scala
+++ b/test/files/jvm/interpreter.scala
@@ -2,6 +2,7 @@ import scala.tools.nsc._
import scala.tools.partest.ReplTest
object Test extends ReplTest {
+ override def extraSettings = "-deprecation -Xoldpatmat"
def code = <code>
// basics
3+4
@@ -29,7 +30,7 @@ val atom = new scala.xml.Atom()
class S(override val toString : String)
val fish = new S("fish")
// Test that arrays pretty print nicely.
-val arr = Array("What's", "up", "doc?")
+val arr = Array("What's", "up", "doc?")
// Test that arrays pretty print nicely, even when we give them type Any
val arrInt : Any = Array(1,2,3)
// Test that nested arrays are pretty-printed correctly
@@ -132,8 +133,8 @@ there
// defining and using quoted names should work (ticket #323)
-def `match` = 1
-val x = `match`
+def `match` = 1
+val x = `match`
// multiple classes defined on one line
sealed class Exp; class Fact extends Exp; class Term extends Exp
@@ -146,13 +147,13 @@ def f(e: Exp) = e match {{ // non-exhaustive warning here
def appendix() = {
val settings = new Settings
settings.classpath.value = sys.props("java.class.path")
- val interp = new Interpreter(settings)
+ val interp = new interpreter.IMain(settings)
interp.interpret("def plusOne(x: Int) = x + 1")
interp.interpret("plusOne(5)")
interp.reset()
interp.interpret("\"after reset\"")
interp.interpret("plusOne(5) // should be undefined now")
}
-
+
appendix()
}
diff --git a/test/files/jvm/libnatives.jnilib b/test/files/jvm/libnatives.jnilib
old mode 100644
new mode 100755
diff --git a/test/files/jvm/manifests-new.check b/test/files/jvm/manifests-new.check
new file mode 100644
index 0000000..a1ff949
--- /dev/null
+++ b/test/files/jvm/manifests-new.check
@@ -0,0 +1,58 @@
+x=(), t=TypeTag[Unit], k=TypeRef, s=class Unit
+x=true, t=TypeTag[Boolean], k=TypeRef, s=class Boolean
+x=a, t=TypeTag[Char], k=TypeRef, s=class Char
+x=1, t=TypeTag[Int], k=TypeRef, s=class Int
+x=abc, t=TypeTag[java.lang.String], k=TypeRef, s=class String
+x='abc, t=TypeTag[Symbol], k=TypeRef, s=class Symbol
+
+x=List(()), t=TypeTag[List[Unit]], k=TypeRef, s=class List
+x=List(true), t=TypeTag[List[Boolean]], k=TypeRef, s=class List
+x=List(1), t=TypeTag[List[Int]], k=TypeRef, s=class List
+x=List(abc), t=TypeTag[List[java.lang.String]], k=TypeRef, s=class List
+x=List('abc), t=TypeTag[List[Symbol]], k=TypeRef, s=class List
+
+x=[Z, t=TypeTag[Array[Boolean]], k=TypeRef, s=class Array
+x=[C, t=TypeTag[Array[Char]], k=TypeRef, s=class Array
+x=[I, t=TypeTag[Array[Int]], k=TypeRef, s=class Array
+x=[Ljava.lang.String;, t=TypeTag[Array[java.lang.String]], k=TypeRef, s=class Array
+x=[Lscala.Symbol;, t=TypeTag[Array[Symbol]], k=TypeRef, s=class Array
+
+x=((),()), t=TypeTag[(Unit, Unit)], k=TypeRef, s=class Tuple2
+x=(true,false), t=TypeTag[(Boolean, Boolean)], k=TypeRef, s=class Tuple2
+x=(1,2), t=TypeTag[(Int, Int)], k=TypeRef, s=class Tuple2
+x=(abc,xyz), t=TypeTag[(java.lang.String, java.lang.String)], k=TypeRef, s=class Tuple2
+x=('abc,'xyz), t=TypeTag[(Symbol, Symbol)], k=TypeRef, s=class Tuple2
+
+x=Test$, t=TypeTag[Test.type], k=SingleType, s=object Test
+x=scala.collection.immutable.List$, t=TypeTag[scala.collection.immutable.List.type], k=SingleType, s=object List
+
+x=Foo, t=TypeTag[Foo[Int]], k=TypeRef, s=class Foo
+x=Foo, t=TypeTag[Foo[List[Int]]], k=TypeRef, s=class Foo
+x=Foo, t=TypeTag[Foo[Foo[Int]]], k=TypeRef, s=class Foo
+x=Foo, t=TypeTag[Foo[List[Foo[Int]]]], k=TypeRef, s=class Foo
+
+x=Test1$$anon$1, t=TypeTag[Bar[String]], k=RefinedType, s=<local Test1>
+x=Test1$$anon$2, t=TypeTag[Bar[String]], k=RefinedType, s=<local Test1>
+
+()=()
+true=true
+a=a
+1=1
+'abc='abc
+
+List(())=List(())
+List(true)=List(true)
+List('abc)=List('abc)
+
+Array()=Array()
+Array(true)=Array(true)
+Array(a)=Array(a)
+Array(1)=Array(1)
+
+((),())=((),())
+(true,false)=(true,false)
+
+List(List(1), List(2))=List(List(1), List(2))
+
+Array(Array(1), Array(2))=Array(Array(1), Array(2))
+
diff --git a/test/files/jvm/manifests-new.scala b/test/files/jvm/manifests-new.scala
new file mode 100644
index 0000000..f730be6
--- /dev/null
+++ b/test/files/jvm/manifests-new.scala
@@ -0,0 +1,111 @@
+import scala.reflect.runtime.universe._
+import scala.reflect.{ClassTag, classTag}
+
+object Test extends App {
+ Test1
+ Test2
+}
+
+class Foo[T](x: T)
+trait Bar[T] { def f: T }
+
+object Test1 extends TestUtil {
+ print(())
+ print(true)
+ print('a')
+ print(1)
+ print("abc")
+ print('abc)
+ println()
+
+ print(List(()))
+ print(List(true))
+ print(List(1))
+ print(List("abc"))
+ print(List('abc))
+ println()
+
+ //print(Array(())) //Illegal class name "[V" in class file Test$
+ print(Array(true))
+ print(Array('a'))
+ print(Array(1))
+ print(Array("abc"))
+ print(Array('abc))
+ println()
+
+ print(((), ()))
+ print((true, false))
+ print((1, 2))
+ print(("abc", "xyz"))
+ print(('abc, 'xyz))
+ println()
+
+ print(Test)
+ print(List)
+ println()
+
+ print(new Foo(2))
+ print(new Foo(List(2)))
+ print(new Foo(new Foo(2)))
+ print(new Foo(List(new Foo(2))))
+ println()
+
+ print(new Bar[String] { def f = "abc" });
+ {print(new Bar[String] { def f = "abc" })}
+ println()
+}
+
+object Test2 {
+ import scala.util.Marshal._
+ println("()="+load[Unit](dump(())))
+ println("true="+load[Boolean](dump(true)))
+ println("a="+load[Char](dump('a')))
+ println("1="+load[Int](dump(1)))
+ println("'abc="+load[scala.Symbol](dump('abc)))
+ println()
+
+ println("List(())="+load[List[Unit]](dump(List(()))))
+ println("List(true)="+load[List[Boolean]](dump(List(true))))
+ println("List('abc)="+load[List[scala.Symbol]](dump(List('abc))))
+ println()
+
+ def loadArray[T](x: Array[Byte])(implicit t: reflect.ClassTag[Array[T]]) =
+ load[Array[T]](x)(t).deep.toString
+ println("Array()="+loadArray[Int](dump(Array(): Array[Int])))
+ println("Array(true)="+loadArray[Boolean](dump(Array(true))))
+ println("Array(a)="+loadArray[Char](dump(Array('a'))))
+ println("Array(1)="+loadArray[Int](dump(Array(1))))
+ println()
+
+ println("((),())="+load[(Unit, Unit)](dump(((), ()))))
+ println("(true,false)="+load[(Boolean, Boolean)](dump((true, false))))
+ println()
+
+ println("List(List(1), List(2))="+load[List[List[Int]]](dump(List(List(1), List(2)))))
+ println()
+
+ println("Array(Array(1), Array(2))="+loadArray[Array[Int]](dump(Array(Array(1), Array(2)))))
+ println()
+}
+
+trait TestUtil {
+ import java.io._
+ def write[A](o: A): Array[Byte] = {
+ val ba = new ByteArrayOutputStream(512)
+ val out = new ObjectOutputStream(ba)
+ out.writeObject(o)
+ out.close()
+ ba.toByteArray()
+ }
+ def read[A](buffer: Array[Byte]): A = {
+ val in = new ObjectInputStream(new ByteArrayInputStream(buffer))
+ in.readObject().asInstanceOf[A]
+ }
+ def print[T](x: T)(implicit t: TypeTag[T]) {
+ // todo. type tags are not yet serializable
+// val t1: TypeTag[T] = read(write(t))
+ val t1: TypeTag[T] = t
+ val x1 = x.toString.replaceAll("@[0-9a-z]+$", "")
+ println("x="+x1+", t="+t1+", k="+t1.tpe.asInstanceOf[Product].productPrefix+", s="+t1.tpe.typeSymbol.toString)
+ }
+}
\ No newline at end of file
diff --git a/test/files/jvm/manifests.check b/test/files/jvm/manifests-old.check
similarity index 100%
rename from test/files/jvm/manifests.check
rename to test/files/jvm/manifests-old.check
diff --git a/test/files/jvm/manifests-old.scala b/test/files/jvm/manifests-old.scala
new file mode 100644
index 0000000..241966f
--- /dev/null
+++ b/test/files/jvm/manifests-old.scala
@@ -0,0 +1,109 @@
+object Test extends App {
+ Test1
+ Test2
+}
+
+class Foo[T](x: T)
+trait Bar[T] { def f: T }
+
+object Test1 extends TestUtil {
+ print(())
+ print(true)
+ print('a')
+ print(1)
+ print("abc")
+ print('abc)
+ println()
+
+ print(List(()))
+ print(List(true))
+ print(List(1))
+ print(List("abc"))
+ print(List('abc))
+ println()
+
+ //print(Array(())) //Illegal class name "[V" in class file Test$
+ print(Array(true))
+ print(Array('a'))
+ print(Array(1))
+ print(Array("abc"))
+ print(Array('abc))
+ println()
+
+ print(((), ()))
+ print((true, false))
+ print((1, 2))
+ print(("abc", "xyz"))
+ print(('abc, 'xyz))
+ println()
+
+ // Disabled: should these work? changing the inference for objects from
+ // "object Test" to "Test.type" drags in a singleton manifest which for
+ // some reason leads to serialization failure.
+ // print(Test)
+ // print(List)
+ println()
+
+ print(new Foo(2))
+ print(new Foo(List(2)))
+ print(new Foo(new Foo(2)))
+ print(new Foo(List(new Foo(2))))
+ println()
+
+ print(new Bar[String] { def f = "abc" })
+ println()
+}
+
+object Test2 {
+ import scala.util.Marshal._
+ println("()="+load[Unit](dump(())))
+ println("true="+load[Boolean](dump(true)))
+ println("a="+load[Char](dump('a')))
+ println("1="+load[Int](dump(1)))
+ println("'abc="+load[Symbol](dump('abc)))
+ println()
+
+ println("List(())="+load[List[Unit]](dump(List(()))))
+ println("List(true)="+load[List[Boolean]](dump(List(true))))
+ println("List('abc)="+load[List[Symbol]](dump(List('abc))))
+ println()
+
+ def loadArray[T](x: Array[Byte])(implicit m: reflect.Manifest[Array[T]]) =
+ load[Array[T]](x)(m).deep.toString
+ println("Array()="+loadArray[Int](dump(Array(): Array[Int])))
+ println("Array(true)="+loadArray[Boolean](dump(Array(true))))
+ println("Array(a)="+loadArray[Char](dump(Array('a'))))
+ println("Array(1)="+loadArray[Int](dump(Array(1))))
+ println()
+
+ println("((),())="+load[(Unit, Unit)](dump(((), ()))))
+ println("(true,false)="+load[(Boolean, Boolean)](dump((true, false))))
+ println()
+
+ println("List(List(1), List(2))="+load[List[List[Int]]](dump(List(List(1), List(2)))))
+ println()
+
+ println("Array(Array(1), Array(2))="+loadArray[Array[Int]](dump(Array(Array(1), Array(2)))))
+ println()
+}
+
+trait TestUtil {
+ import java.io._
+ def write[A](o: A): Array[Byte] = {
+ val ba = new ByteArrayOutputStream(512)
+ val out = new ObjectOutputStream(ba)
+ out.writeObject(o)
+ out.close()
+ ba.toByteArray()
+ }
+ def read[A](buffer: Array[Byte]): A = {
+ val in = new ObjectInputStream(new ByteArrayInputStream(buffer))
+ in.readObject().asInstanceOf[A]
+ }
+ import scala.reflect._
+ def print[T](x: T)(implicit m: Manifest[T]) {
+ val m1: Manifest[T] = read(write(m))
+ val x1 = x.toString.replaceAll("@[0-9a-z]+$", "")
+ println("x="+x1+", m="+m1)
+ }
+}
diff --git a/test/files/jvm/manifests.scala b/test/files/jvm/manifests.scala
deleted file mode 100644
index 8b6c00f..0000000
--- a/test/files/jvm/manifests.scala
+++ /dev/null
@@ -1,119 +0,0 @@
-object Test extends App {
- Test1
- Test2
- //Test3 // Java 1.5+ only
-}
-
-class Foo[T](x: T)
-trait Bar[T] { def f: T }
-
-object Test1 extends TestUtil {
- print(())
- print(true)
- print('a')
- print(1)
- print("abc")
- print('abc)
- println()
-
- print(List(()))
- print(List(true))
- print(List(1))
- print(List("abc"))
- print(List('abc))
- println()
-
- //print(Array(())) //Illegal class name "[V" in class file Test$
- print(Array(true))
- print(Array('a'))
- print(Array(1))
- print(Array("abc"))
- print(Array('abc))
- println()
-
- print(((), ()))
- print((true, false))
- print((1, 2))
- print(("abc", "xyz"))
- print(('abc, 'xyz))
- println()
-
- // Disabled: should these work? changing the inference for objects from
- // "object Test" to "Test.type" drags in a singleton manifest which for
- // some reason leads to serialization failure.
- // print(Test)
- // print(List)
- println()
-
- print(new Foo(2))
- print(new Foo(List(2)))
- print(new Foo(new Foo(2)))
- print(new Foo(List(new Foo(2))))
- println()
-
- print(new Bar[String] { def f = "abc" })
- println()
-}
-
-object Test2 {
- import scala.util.Marshal._
- println("()="+load[Unit](dump(())))
- println("true="+load[Boolean](dump(true)))
- println("a="+load[Char](dump('a')))
- println("1="+load[Int](dump(1)))
- println("'abc="+load[Symbol](dump('abc)))
- println()
-
- println("List(())="+load[List[Unit]](dump(List(()))))
- println("List(true)="+load[List[Boolean]](dump(List(true))))
- println("List('abc)="+load[List[Symbol]](dump(List('abc))))
- println()
-
- def loadArray[T](x: Array[Byte])(implicit m: reflect.Manifest[Array[T]]) =
- load[Array[T]](x)(m).deepToString
- println("Array()="+loadArray[Int](dump(Array(): Array[Int])))
- println("Array(true)="+loadArray[Boolean](dump(Array(true))))
- println("Array(a)="+loadArray[Char](dump(Array('a'))))
- println("Array(1)="+loadArray[Int](dump(Array(1))))
- println()
-
- println("((),())="+load[(Unit, Unit)](dump(((), ()))))
- println("(true,false)="+load[(Boolean, Boolean)](dump((true, false))))
- println()
-
- println("List(List(1), List(2))="+load[List[List[Int]]](dump(List(List(1), List(2)))))
- println()
-
- println("Array(Array(1), Array(2))="+loadArray[Array[Int]](dump(Array(Array(1), Array(2)))))
- println()
-}
-
-object Test3 extends TestUtil {
- import scala.reflect.Manifest._
- val ct1 = classType(classOf[Char])
- val ct2 = classType(classOf[List[_]], ct1)
- print(ct1)
- //print(ct2) // ??? x=scala.List[char], m=scala.reflect.Manifest[scala.runtime.Nothing$]
- println()
-}
-
-trait TestUtil {
- import java.io._
- def write[A](o: A): Array[Byte] = {
- val ba = new ByteArrayOutputStream(512)
- val out = new ObjectOutputStream(ba)
- out.writeObject(o)
- out.close()
- ba.toByteArray()
- }
- def read[A](buffer: Array[Byte]): A = {
- val in = new ObjectInputStream(new ByteArrayInputStream(buffer))
- in.readObject().asInstanceOf[A]
- }
- import scala.reflect._
- def print[T](x: T)(implicit m: Manifest[T]) {
- val m1: Manifest[T] = read(write(m))
- val x1 = x.toString.replaceAll("@[0-9a-z]+$", "")
- println("x="+x1+", m="+m1)
- }
-}
diff --git a/test/files/jvm/mkLibNatives.bat b/test/files/jvm/mkLibNatives.bat
old mode 100644
new mode 100755
index e11b6ee..623039b
--- a/test/files/jvm/mkLibNatives.bat
+++ b/test/files/jvm/mkLibNatives.bat
@@ -1,70 +1,70 @@
- at echo off
-
-rem ##########################################################################
-rem # Author : Stephane Micheloud
-rem ##########################################################################
-
-rem # For building the -64.dll, you need: Visual C++ Express, Microsoft SDK (to
-rem # get the 64bit compiler / libraries), adapt this script.
-
-rem ##########################################################################
-rem # variables
-
-if "%OS%"=="Windows_NT" @setlocal
-
-rem debug switches are: off=0, on=1
-set DEBUG=0
-set STDOUT=NUL
-if %DEBUG%==1 set STDOUT=CON
-
-set CLASS_NAME=Test$
-set CLASS_DIR=.
-
-set OBJ_NAME=natives
-set LIB_NAME=natives-32
-
-if "%JAVA_HOME%"=="" goto error1
-if "%VSINSTALLDIR%"=="" goto error2
-
-set JAVAH=%JAVA_HOME%\bin\javah
-set JAVAH_OPTIONS=-jni -force -classpath %CLASS_DIR% -o %OBJ_NAME%.h
-
-set CC=%VSINSTALLDIR%\vc\bin\cl
-set CC_OPTIONS=/nologo /c
-set CC_INCLUDES=-I%VSINSTALLDIR%\vc\include -I%JAVA_HOME%\include -I%JAVA_HOME%\include\win32
-
-set LNK_OPTIONS=/nologo /MT /LD
-
-rem variable LIB is used by the C++ linker to find libcmt.lib, ..
-set LIB=%VSINSTALLDIR%\vc\lib
-
-rem ##########################################################################
-rem # commands
-
-del /s/q *.obj *.exp *.lib *.dll 1>%STDOUT%
-
-if %DEBUG%==1 echo %JAVAH% %JAVAH_OPTIONS% %CLASS_NAME%
-%JAVAH% %JAVAH_OPTIONS% %CLASS_NAME%
-
-if %DEBUG%==1 echo %CC% %CC_OPTIONS% %CC_INCLUDES% /Fo%OBJ_NAME%.obj natives.c
-%CC% %CC_OPTIONS% %CC_INCLUDES% /Fo%OBJ_NAME%.obj natives.c 1>%STDOUT%
-
-if %DEBUG%==1 echo %CC% %LNK_OPTIONS% /Fe%LIB_NAME%.dll %OBJ_NAME%.obj
-%CC% %LNK_OPTIONS% /Fe%LIB_NAME%.dll %OBJ_NAME%.obj 1>%STDOUT%
-
-goto end
-
-rem ##########################################################################
-rem # subroutines
-
-:error1
-echo ERROR: environment variable JAVA_HOME is undefined. It should point to your JDK installation.
-goto end
-
-:error2
-echo ERROR: environment variable VSINSTALLDIR is undefined. It should point to your MS Visual Studio installation.
-goto end
-
-:end
-if "%OS%"=="Windows_NT" @endlocal
-
+ at echo off
+
+rem ##########################################################################
+rem # Author : Stephane Micheloud
+rem ##########################################################################
+
+rem # For building the -64.dll, you need: Visual C++ Express, Microsoft SDK (to
+rem # get the 64bit compiler / libraries), adapt this script.
+
+rem ##########################################################################
+rem # variables
+
+if "%OS%"=="Windows_NT" @setlocal
+
+rem debug switches are: off=0, on=1
+set DEBUG=0
+set STDOUT=NUL
+if %DEBUG%==1 set STDOUT=CON
+
+set CLASS_NAME=Test$
+set CLASS_DIR=.
+
+set OBJ_NAME=natives
+set LIB_NAME=natives-32
+
+if "%JAVA_HOME%"=="" goto error1
+if "%VSINSTALLDIR%"=="" goto error2
+
+set JAVAH=%JAVA_HOME%\bin\javah
+set JAVAH_OPTIONS=-jni -force -classpath %CLASS_DIR% -o %OBJ_NAME%.h
+
+set CC=%VSINSTALLDIR%\vc\bin\cl
+set CC_OPTIONS=/nologo /c
+set CC_INCLUDES=-I%VSINSTALLDIR%\vc\include -I%JAVA_HOME%\include -I%JAVA_HOME%\include\win32
+
+set LNK_OPTIONS=/nologo /MT /LD
+
+rem variable LIB is used by the C++ linker to find libcmt.lib, ..
+set LIB=%VSINSTALLDIR%\vc\lib
+
+rem ##########################################################################
+rem # commands
+
+del /s/q *.obj *.exp *.lib *.dll 1>%STDOUT%
+
+if %DEBUG%==1 echo %JAVAH% %JAVAH_OPTIONS% %CLASS_NAME%
+%JAVAH% %JAVAH_OPTIONS% %CLASS_NAME%
+
+if %DEBUG%==1 echo %CC% %CC_OPTIONS% %CC_INCLUDES% /Fo%OBJ_NAME%.obj natives.c
+%CC% %CC_OPTIONS% %CC_INCLUDES% /Fo%OBJ_NAME%.obj natives.c 1>%STDOUT%
+
+if %DEBUG%==1 echo %CC% %LNK_OPTIONS% /Fe%LIB_NAME%.dll %OBJ_NAME%.obj
+%CC% %LNK_OPTIONS% /Fe%LIB_NAME%.dll %OBJ_NAME%.obj 1>%STDOUT%
+
+goto end
+
+rem ##########################################################################
+rem # subroutines
+
+:error1
+echo ERROR: environment variable JAVA_HOME is undefined. It should point to your JDK installation.
+goto end
+
+:error2
+echo ERROR: environment variable VSINSTALLDIR is undefined. It should point to your MS Visual Studio installation.
+goto end
+
+:end
+if "%OS%"=="Windows_NT" @endlocal
+exit /b %errorlevel%
diff --git a/test/files/jvm/mkLibNatives.sh b/test/files/jvm/mkLibNatives.sh
old mode 100644
new mode 100755
diff --git a/test/files/jvm/non-fatal-tests.scala b/test/files/jvm/non-fatal-tests.scala
new file mode 100644
index 0000000..471a9d2
--- /dev/null
+++ b/test/files/jvm/non-fatal-tests.scala
@@ -0,0 +1,47 @@
+import scala.util.control.NonFatal
+
+trait NonFatalTests {
+
+ //NonFatals
+ val nonFatals: Seq[Throwable] =
+ Seq(new StackOverflowError,
+ new RuntimeException,
+ new Exception,
+ new Throwable)
+
+ //Fatals
+ val fatals: Seq[Throwable] =
+ Seq(new InterruptedException,
+ new OutOfMemoryError,
+ new LinkageError,
+ new VirtualMachineError {},
+ new Throwable with scala.util.control.ControlThrowable,
+ new NotImplementedError)
+
+ def testFatalsUsingApply(): Unit = {
+ fatals foreach { t => assert(NonFatal(t) == false) }
+ }
+
+ def testNonFatalsUsingApply(): Unit = {
+ nonFatals foreach { t => assert(NonFatal(t) == true) }
+ }
+
+ def testFatalsUsingUnapply(): Unit = {
+ fatals foreach { t => assert(NonFatal.unapply(t).isEmpty) }
+ }
+
+ def testNonFatalsUsingUnapply(): Unit = {
+ nonFatals foreach { t => assert(NonFatal.unapply(t).isDefined) }
+ }
+
+ testFatalsUsingApply()
+ testNonFatalsUsingApply()
+ testFatalsUsingUnapply()
+ testNonFatalsUsingUnapply()
+}
+
+object Test
+extends App
+with NonFatalTests {
+ System.exit(0)
+}
\ No newline at end of file
diff --git a/test/files/jvm/patmat_opt_ignore_underscore.check b/test/files/jvm/patmat_opt_ignore_underscore.check
new file mode 100644
index 0000000..43f53ab
--- /dev/null
+++ b/test/files/jvm/patmat_opt_ignore_underscore.check
@@ -0,0 +1 @@
+bytecode identical
diff --git a/test/files/jvm/patmat_opt_ignore_underscore.flags b/test/files/jvm/patmat_opt_ignore_underscore.flags
new file mode 100644
index 0000000..1182725
--- /dev/null
+++ b/test/files/jvm/patmat_opt_ignore_underscore.flags
@@ -0,0 +1 @@
+-optimize
\ No newline at end of file
diff --git a/test/files/jvm/patmat_opt_ignore_underscore/Analyzed_1.scala b/test/files/jvm/patmat_opt_ignore_underscore/Analyzed_1.scala
new file mode 100644
index 0000000..fa36393
--- /dev/null
+++ b/test/files/jvm/patmat_opt_ignore_underscore/Analyzed_1.scala
@@ -0,0 +1,29 @@
+// this class's bytecode, compiled under -optimize is analyzed by the test
+// method a's bytecode should be identical to method b's bytecode
+// this is not the best test for shielding against regressing on this particular issue,
+// but it sets the stage for checking the bytecode emitted by the pattern matcher and
+// comparing it to manually tuned code using if/then/else etc.
+class SameBytecode {
+ case class Foo(x: Any, y: String)
+
+ def a =
+ Foo(1, "a") match {
+ case Foo(_: String, y) => y
+ }
+
+ // this method's body holds the tree that should be generated by the pattern matcher for method a (-Xprint:patmat)
+ // the test checks that bytecode for a and b is identical (modulo line numbers)
+ // we can't diff trees as they are quite different (patmat uses jumps to labels that cannot be expressed in source, for example)
+ // note that the actual tree is quite bad: we do an unnecessary null check, isInstanceOf and local val (x3)
+ // some of these will be fixed soon (the initial null check is for the scrutinee, which is harder to fix in patmat)
+ def b: String = {
+ val x1 = Foo(1, "a")
+ if (x1.ne(null)) {
+ if (x1.x.isInstanceOf[String]) {
+ return x1.y
+ }
+ }
+
+ throw new MatchError(x1)
+ }
+}
\ No newline at end of file
diff --git a/test/files/jvm/patmat_opt_ignore_underscore/test.scala b/test/files/jvm/patmat_opt_ignore_underscore/test.scala
new file mode 100644
index 0000000..6179101
--- /dev/null
+++ b/test/files/jvm/patmat_opt_ignore_underscore/test.scala
@@ -0,0 +1,15 @@
+import scala.tools.partest.BytecodeTest
+
+import scala.tools.nsc.util.JavaClassPath
+import java.io.InputStream
+import scala.tools.asm
+import asm.ClassReader
+import asm.tree.{ClassNode, InsnList}
+import scala.collection.JavaConverters._
+
+object Test extends BytecodeTest {
+ def show: Unit = {
+ val classNode = loadClassNode("SameBytecode")
+ sameBytecode(getMethod(classNode, "a"), getMethod(classNode, "b"))
+ }
+}
diff --git a/test/files/jvm/patmat_opt_no_nullcheck.check b/test/files/jvm/patmat_opt_no_nullcheck.check
new file mode 100644
index 0000000..43f53ab
--- /dev/null
+++ b/test/files/jvm/patmat_opt_no_nullcheck.check
@@ -0,0 +1 @@
+bytecode identical
diff --git a/test/files/jvm/patmat_opt_no_nullcheck.flags b/test/files/jvm/patmat_opt_no_nullcheck.flags
new file mode 100644
index 0000000..1182725
--- /dev/null
+++ b/test/files/jvm/patmat_opt_no_nullcheck.flags
@@ -0,0 +1 @@
+-optimize
\ No newline at end of file
diff --git a/test/files/jvm/patmat_opt_no_nullcheck/Analyzed_1.scala b/test/files/jvm/patmat_opt_no_nullcheck/Analyzed_1.scala
new file mode 100644
index 0000000..3a594c4
--- /dev/null
+++ b/test/files/jvm/patmat_opt_no_nullcheck/Analyzed_1.scala
@@ -0,0 +1,24 @@
+// this class's bytecode, compiled under -optimize is analyzed by the test
+// method a's bytecode should be identical to method b's bytecode
+case class Foo(x: Any)
+
+class SameBytecode {
+ def a =
+ (Foo(1): Any) match {
+ case Foo(_: String) =>
+ }
+
+ // there's no null check
+ def b: Unit = {
+ val x1: Any = Foo(1)
+ if (x1.isInstanceOf[Foo]) {
+ val x3 = x1.asInstanceOf[Foo]
+ if (x3.x.isInstanceOf[String]) {
+ val x = ()
+ return
+ }
+ }
+
+ throw new MatchError(x1)
+ }
+}
\ No newline at end of file
diff --git a/test/files/jvm/patmat_opt_no_nullcheck/test.scala b/test/files/jvm/patmat_opt_no_nullcheck/test.scala
new file mode 100644
index 0000000..2927e76
--- /dev/null
+++ b/test/files/jvm/patmat_opt_no_nullcheck/test.scala
@@ -0,0 +1,8 @@
+import scala.tools.partest.BytecodeTest
+
+object Test extends BytecodeTest {
+ def show: Unit = {
+ val classNode = loadClassNode("SameBytecode")
+ sameBytecode(getMethod(classNode, "a"), getMethod(classNode, "b"))
+ }
+}
diff --git a/test/files/jvm/patmat_opt_primitive_typetest.check b/test/files/jvm/patmat_opt_primitive_typetest.check
new file mode 100644
index 0000000..43f53ab
--- /dev/null
+++ b/test/files/jvm/patmat_opt_primitive_typetest.check
@@ -0,0 +1 @@
+bytecode identical
diff --git a/test/files/jvm/patmat_opt_primitive_typetest.flags b/test/files/jvm/patmat_opt_primitive_typetest.flags
new file mode 100644
index 0000000..49d036a
--- /dev/null
+++ b/test/files/jvm/patmat_opt_primitive_typetest.flags
@@ -0,0 +1 @@
+-optimize
diff --git a/test/files/jvm/patmat_opt_primitive_typetest/Analyzed_1.scala b/test/files/jvm/patmat_opt_primitive_typetest/Analyzed_1.scala
new file mode 100644
index 0000000..e5db6c4
--- /dev/null
+++ b/test/files/jvm/patmat_opt_primitive_typetest/Analyzed_1.scala
@@ -0,0 +1,24 @@
+// this class's bytecode, compiled under -optimize is analyzed by the test
+// method a's bytecode should be identical to method b's bytecode
+class SameBytecode {
+ case class Foo(x: Int, y: String)
+
+ def a =
+ Foo(1, "a") match {
+ case Foo(_: Int, y) => y
+ }
+
+ // this method's body holds the tree that should be generated by the pattern matcher for method a (-Xprint:patmat)
+ // the test checks that bytecode for a and b is identical (modulo line numbers)
+ // we can't diff trees as they are quite different (patmat uses jumps to labels that cannot be expressed in source, for example)
+ // note that the actual tree is quite bad: we do an unnecessary null check, and local val (x3)
+ // some of these will be fixed soon (the initial null check is for the scrutinee, which is harder to fix in patmat)
+ def b: String = {
+ val x1 = Foo(1, "a")
+ if (x1.ne(null)) {
+ return x1.y
+ }
+
+ throw new MatchError(x1)
+ }
+}
\ No newline at end of file
diff --git a/test/files/jvm/patmat_opt_primitive_typetest/test.scala b/test/files/jvm/patmat_opt_primitive_typetest/test.scala
new file mode 100644
index 0000000..2927e76
--- /dev/null
+++ b/test/files/jvm/patmat_opt_primitive_typetest/test.scala
@@ -0,0 +1,8 @@
+import scala.tools.partest.BytecodeTest
+
+object Test extends BytecodeTest {
+ def show: Unit = {
+ val classNode = loadClassNode("SameBytecode")
+ sameBytecode(getMethod(classNode, "a"), getMethod(classNode, "b"))
+ }
+}
diff --git a/test/files/jvm/protectedacc.scala b/test/files/jvm/protectedacc.scala
index 525725f..f213e0d 100644
--- a/test/files/jvm/protectedacc.scala
+++ b/test/files/jvm/protectedacc.scala
@@ -82,7 +82,7 @@ package p {
Console.println("meth1(1) = " + meth1(1));
Console.println("meth1(1.0) = " + meth1(1.0));
// test accesses from closures
- for (val x <- 1 until 3)
+ for (x <- 1 until 3)
Console.println("meth2(1)(1) = " + meth2(1)("prefix: "));
Console.println("meth3 = " + meth3.getClass);
diff --git a/test/files/jvm/scala-concurrent-tck.scala b/test/files/jvm/scala-concurrent-tck.scala
new file mode 100644
index 0000000..a306a7d
--- /dev/null
+++ b/test/files/jvm/scala-concurrent-tck.scala
@@ -0,0 +1,780 @@
+import scala.concurrent.{
+ Future,
+ Promise,
+ TimeoutException,
+ SyncVar,
+ ExecutionException,
+ ExecutionContext,
+ CanAwait,
+ Await
+}
+import scala.concurrent.{ future, promise, blocking }
+import scala.util.{ Try, Success, Failure }
+import scala.concurrent.duration.Duration
+import scala.reflect.{ classTag, ClassTag }
+import scala.tools.partest.TestUtil.intercept
+
+trait TestBase {
+ trait Done { def apply(proof: => Boolean): Unit }
+ def once(body: Done => Unit) {
+ import java.util.concurrent.{ LinkedBlockingQueue, TimeUnit }
+ val q = new LinkedBlockingQueue[Try[Boolean]]
+ body(new Done {
+ def apply(proof: => Boolean): Unit = q offer Try(proof)
+ })
+ assert(q.poll(2000, TimeUnit.MILLISECONDS).get)
+ // Check that we don't get more than one completion
+ assert(q.poll(50, TimeUnit.MILLISECONDS) eq null)
+ }
+}
+
+
+trait FutureCallbacks extends TestBase {
+ import ExecutionContext.Implicits._
+
+ def testOnSuccess(): Unit = once {
+ done =>
+ var x = 0
+ val f = future { x = 1 }
+ f onSuccess { case _ => done(x == 1) }
+ }
+
+ def testOnSuccessWhenCompleted(): Unit = once {
+ done =>
+ var x = 0
+ val f = future { x = 1 }
+ f onSuccess {
+ case _ if x == 1 =>
+ x = 2
+ f onSuccess { case _ => done(x == 2) }
+ }
+ }
+
+ def testOnSuccessWhenFailed(): Unit = once {
+ done =>
+ val f = future[Unit] { throw new Exception }
+ f onSuccess { case _ => done(false) }
+ f onFailure { case _ => done(true) }
+ }
+
+ def testOnFailure(): Unit = once {
+ done =>
+ val f = future[Unit] { throw new Exception }
+ f onSuccess { case _ => done(false) }
+ f onFailure { case _ => done(true) }
+ }
+
+ def testOnFailureWhenSpecialThrowable(num: Int, cause: Throwable): Unit = once {
+ done =>
+ val f = future[Unit] { throw cause }
+ f onSuccess { case _ => done(false) }
+ f onFailure {
+ case e: ExecutionException if e.getCause == cause => done(true)
+ case _ => done(false)
+ }
+ }
+
+ def testOnFailureWhenTimeoutException(): Unit = once {
+ done =>
+ val f = future[Unit] { throw new TimeoutException() }
+ f onSuccess { case _ => done(false) }
+ f onFailure {
+ case e: TimeoutException => done(true)
+ case _ => done(false)
+ }
+ }
+
+ def testThatNestedCallbacksDoNotYieldStackOverflow(): Unit = {
+ val promise = Promise[Int]
+ (0 to 10000).map(Future(_)).foldLeft(promise.future)((f1, f2) => f2.flatMap(i => f1))
+ promise.success(-1)
+ }
+
+ testOnSuccess()
+ testOnSuccessWhenCompleted()
+ testOnSuccessWhenFailed()
+ testOnFailure()
+ testOnFailureWhenSpecialThrowable(5, new Error)
+ // testOnFailureWhenSpecialThrowable(6, new scala.util.control.ControlThrowable { })
+ //TODO: this test is currently problematic, because NonFatal does not match InterruptedException
+ //testOnFailureWhenSpecialThrowable(7, new InterruptedException)
+ testThatNestedCallbacksDoNotYieldStackOverflow()
+ testOnFailureWhenTimeoutException()
+}
+
+
+trait FutureCombinators extends TestBase {
+ import ExecutionContext.Implicits._
+
+ def testMapSuccess(): Unit = once {
+ done =>
+ val f = future { 5 }
+ val g = f map { x => "result: " + x }
+ g onSuccess { case s => done(s == "result: 5") }
+ g onFailure { case _ => done(false) }
+ }
+
+ def testMapFailure(): Unit = once {
+ done =>
+ val f = future[Unit] { throw new Exception("exception message") }
+ val g = f map { x => "result: " + x }
+ g onSuccess { case _ => done(false) }
+ g onFailure { case t => done(t.getMessage() == "exception message") }
+ }
+
+ def testMapSuccessPF(): Unit = once {
+ done =>
+ val f = future { 5 }
+ val g = f map { case r => "result: " + r }
+ g onSuccess { case s => done(s == "result: 5") }
+ g onFailure { case _ => done(false) }
+ }
+
+ def testTransformSuccess(): Unit = once {
+ done =>
+ val f = future { 5 }
+ val g = f.transform(r => "result: " + r, identity)
+ g onSuccess { case s => done(s == "result: 5") }
+ g onFailure { case _ => done(false) }
+ }
+
+ def testTransformSuccessPF(): Unit = once {
+ done =>
+ val f = future { 5 }
+ val g = f.transform( { case r => "result: " + r }, identity)
+ g onSuccess { case s => done(s == "result: 5") }
+ g onFailure { case _ => done(false) }
+ }
+
+def testTransformFailure(): Unit = once {
+ done =>
+ val transformed = new Exception("transformed")
+ val f = future { throw new Exception("expected") }
+ val g = f.transform(identity, _ => transformed)
+ g onSuccess { case _ => done(false) }
+ g onFailure { case e => done(e eq transformed) }
+ }
+
+ def testTransformFailurePF(): Unit = once {
+ done =>
+ val e = new Exception("expected")
+ val transformed = new Exception("transformed")
+ val f = future[Unit] { throw e }
+ val g = f.transform(identity, { case `e` => transformed })
+ g onSuccess { case _ => done(false) }
+ g onFailure { case e => done(e eq transformed) }
+ }
+
+ def testFoldFailure(): Unit = once {
+ done =>
+ val f = future[Unit] { throw new Exception("expected") }
+ val g = f.transform(r => "result: " + r, identity)
+ g onSuccess { case _ => done(false) }
+ g onFailure { case t => done(t.getMessage() == "expected") }
+ }
+
+ def testFlatMapSuccess(): Unit = once {
+ done =>
+ val f = future { 5 }
+ val g = f flatMap { _ => future { 10 } }
+ g onSuccess { case x => done(x == 10) }
+ g onFailure { case _ => done(false) }
+ }
+
+ def testFlatMapFailure(): Unit = once {
+ done =>
+ val f = future[Unit] { throw new Exception("expected") }
+ val g = f flatMap { _ => future { 10 } }
+ g onSuccess { case _ => done(false) }
+ g onFailure { case t => done(t.getMessage() == "expected") }
+ }
+
+ def testFilterSuccess(): Unit = once {
+ done =>
+ val f = future { 4 }
+ val g = f filter { _ % 2 == 0 }
+ g onSuccess { case x: Int => done(x == 4) }
+ g onFailure { case _ => done(false) }
+ }
+
+ def testFilterFailure(): Unit = once {
+ done =>
+ val f = future { 4 }
+ val g = f filter { _ % 2 == 1 }
+ g onSuccess { case x: Int => done(false) }
+ g onFailure {
+ case e: NoSuchElementException => done(true)
+ case _ => done(false)
+ }
+ }
+
+ def testCollectSuccess(): Unit = once {
+ done =>
+ val f = future { -5 }
+ val g = f collect { case x if x < 0 => -x }
+ g onSuccess { case x: Int => done(x == 5) }
+ g onFailure { case _ => done(false) }
+ }
+
+ def testCollectFailure(): Unit = once {
+ done =>
+ val f = future { -5 }
+ val g = f collect { case x if x > 0 => x * 2 }
+ g onSuccess { case _ => done(false) }
+ g onFailure {
+ case e: NoSuchElementException => done(true)
+ case _ => done(false)
+ }
+ }
+
+ /* TODO: Test for NonFatal in collect (more of a regression test at this point).
+ */
+
+ def testForeachSuccess(): Unit = once {
+ done =>
+ val p = promise[Int]()
+ val f = future[Int] { 5 }
+ f foreach { x => p.success(x * 2) }
+ val g = p.future
+
+ g.onSuccess { case res: Int => done(res == 10) }
+ g.onFailure { case _ => done(false) }
+ }
+
+ def testForeachFailure(): Unit = once {
+ done =>
+ val p = promise[Int]()
+ val f = future[Int] { throw new Exception }
+ f foreach { x => p.success(x * 2) }
+ f onFailure { case _ => p.failure(new Exception) }
+ val g = p.future
+
+ g.onSuccess { case _ => done(false) }
+ g.onFailure { case _ => done(true) }
+ }
+
+ def testRecoverSuccess(): Unit = once {
+ done =>
+ val cause = new RuntimeException
+ val f = future {
+ throw cause
+ } recover {
+ case re: RuntimeException =>
+ "recovered" }
+ f onSuccess { case x => done(x == "recovered") }
+ f onFailure { case any => done(false) }
+ }
+
+ def testRecoverFailure(): Unit = once {
+ done =>
+ val cause = new RuntimeException
+ val f = future {
+ throw cause
+ } recover {
+ case te: TimeoutException => "timeout"
+ }
+ f onSuccess { case _ => done(false) }
+ f onFailure { case any => done(any == cause) }
+ }
+
+ def testRecoverWithSuccess(): Unit = once {
+ done =>
+ val cause = new RuntimeException
+ val f = future {
+ throw cause
+ } recoverWith {
+ case re: RuntimeException =>
+ future { "recovered" }
+ }
+ f onSuccess { case x => done(x == "recovered") }
+ f onFailure { case any => done(false) }
+ }
+
+ def testRecoverWithFailure(): Unit = once {
+ done =>
+ val cause = new RuntimeException
+ val f = future {
+ throw cause
+ } recoverWith {
+ case te: TimeoutException =>
+ future { "timeout" }
+ }
+ f onSuccess { case x => done(false) }
+ f onFailure { case any => done(any == cause) }
+ }
+
+ def testZipSuccess(): Unit = once {
+ done =>
+ val f = future { 5 }
+ val g = future { 6 }
+ val h = f zip g
+ h onSuccess { case (l: Int, r: Int) => done(l+r == 11) }
+ h onFailure { case _ => done(false) }
+ }
+
+ def testZipFailureLeft(): Unit = once {
+ done =>
+ val cause = new Exception("expected")
+ val f = future { throw cause }
+ val g = future { 6 }
+ val h = f zip g
+ h onSuccess { case _ => done(false) }
+ h onFailure { case e: Exception => done(e.getMessage == "expected") }
+ }
+
+ def testZipFailureRight(): Unit = once {
+ done =>
+ val cause = new Exception("expected")
+ val f = future { 5 }
+ val g = future { throw cause }
+ val h = f zip g
+ h onSuccess { case _ => done(false) }
+ h onFailure { case e: Exception => done(e.getMessage == "expected") }
+ }
+
+ def testFallbackTo(): Unit = once {
+ done =>
+ val f = future { sys.error("failed") }
+ val g = future { 5 }
+ val h = f fallbackTo g
+ h onSuccess { case x: Int => done(x == 5) }
+ h onFailure { case _ => done(false) }
+ }
+
+ def testFallbackToFailure(): Unit = once {
+ done =>
+ val cause = new Exception
+ val f = future { throw cause }
+ val g = future { sys.error("failed") }
+ val h = f fallbackTo g
+
+ h onSuccess { case _ => done(false) }
+ h onFailure { case e => done(e eq cause) }
+ }
+
+ testMapSuccess()
+ testMapFailure()
+ testFlatMapSuccess()
+ testFlatMapFailure()
+ testFilterSuccess()
+ testFilterFailure()
+ testCollectSuccess()
+ testCollectFailure()
+ testForeachSuccess()
+ testForeachFailure()
+ testRecoverSuccess()
+ testRecoverFailure()
+ testRecoverWithSuccess()
+ testRecoverWithFailure()
+ testZipSuccess()
+ testZipFailureLeft()
+ testZipFailureRight()
+ testFallbackTo()
+ testFallbackToFailure()
+ testTransformSuccess()
+ testTransformSuccessPF()
+}
+
+
+trait FutureProjections extends TestBase {
+ import ExecutionContext.Implicits._
+
+ def testFailedFailureOnComplete(): Unit = once {
+ done =>
+ val cause = new RuntimeException
+ val f = future { throw cause }
+ f.failed onComplete {
+ case Success(t) => done(t == cause)
+ case Failure(t) => done(false)
+ }
+ }
+
+ def testFailedFailureOnSuccess(): Unit = once {
+ done =>
+ val cause = new RuntimeException
+ val f = future { throw cause }
+ f.failed onSuccess { case t => done(t == cause) }
+ }
+
+ def testFailedSuccessOnComplete(): Unit = once {
+ done =>
+ val f = future { 0 }
+ f.failed onComplete {
+ case Failure(_: NoSuchElementException) => done(true)
+ case _ => done(false)
+ }
+ }
+
+ def testFailedSuccessOnFailure(): Unit = once {
+ done =>
+ val f = future { 0 }
+ f.failed onFailure {
+ case e: NoSuchElementException => done(true)
+ case _ => done(false)
+ }
+ f.failed onSuccess { case _ => done(false) }
+ }
+
+ def testFailedFailureAwait(): Unit = once {
+ done =>
+ val cause = new RuntimeException
+ val f = future { throw cause }
+ done(Await.result(f.failed, Duration(500, "ms")) == cause)
+ }
+
+ def testFailedSuccessAwait(): Unit = once {
+ done =>
+ val f = future { 0 }
+ try {
+ Await.result(f.failed, Duration(500, "ms"))
+ done(false)
+ } catch {
+ case nsee: NoSuchElementException => done(true)
+ case _: Throwable => done(false)
+ }
+ }
+
+ def testAwaitPositiveDuration(): Unit = once { done =>
+ val p = Promise[Int]()
+ val f = p.future
+ future {
+ intercept[IllegalArgumentException] { Await.ready(f, Duration.Undefined) }
+ p.success(0)
+ Await.ready(f, Duration.Zero)
+ Await.ready(f, Duration(500, "ms"))
+ Await.ready(f, Duration.Inf)
+ done(true)
+ } onFailure { case x => done(throw x) }
+ }
+
+ def testAwaitNegativeDuration(): Unit = once { done =>
+ val f = Promise().future
+ future {
+ intercept[TimeoutException] { Await.ready(f, Duration.Zero) }
+ intercept[TimeoutException] { Await.ready(f, Duration.MinusInf) }
+ intercept[TimeoutException] { Await.ready(f, Duration(-500, "ms")) }
+ done(true)
+ } onFailure { case x => done(throw x) }
+ }
+
+ testFailedFailureOnComplete()
+ testFailedFailureOnSuccess()
+ testFailedSuccessOnComplete()
+ testFailedSuccessOnFailure()
+ testFailedFailureAwait()
+ testFailedSuccessAwait()
+ testAwaitPositiveDuration()
+ testAwaitNegativeDuration()
+}
+
+
+trait Blocking extends TestBase {
+ import ExecutionContext.Implicits._
+
+ def testAwaitSuccess(): Unit = once {
+ done =>
+ val f = future { 0 }
+ done(Await.result(f, Duration(500, "ms")) == 0)
+ }
+
+ def testAwaitFailure(): Unit = once {
+ done =>
+ val cause = new RuntimeException
+ val f = future { throw cause }
+ try {
+ Await.result(f, Duration(500, "ms"))
+ done(false)
+ } catch {
+ case t: Throwable => done(t == cause)
+ }
+ }
+
+ def testFQCNForAwaitAPI(): Unit = once {
+ done =>
+ done(classOf[CanAwait].getName == "scala.concurrent.CanAwait" &&
+ Await.getClass.getName == "scala.concurrent.Await")
+ }
+
+ testAwaitSuccess()
+ testAwaitFailure()
+ testFQCNForAwaitAPI()
+}
+
+trait BlockContexts extends TestBase {
+ import ExecutionContext.Implicits._
+ import scala.concurrent.{ Await, Awaitable, BlockContext }
+
+ private def getBlockContext(body: => BlockContext): BlockContext = {
+ Await.result(Future { body }, Duration(500, "ms"))
+ }
+
+ // test outside of an ExecutionContext
+ def testDefaultOutsideFuture(): Unit = {
+ val bc = BlockContext.current
+ assert(bc.getClass.getName.contains("DefaultBlockContext"))
+ }
+
+ // test BlockContext in our default ExecutionContext
+ def testDefaultFJP(): Unit = {
+ val bc = getBlockContext(BlockContext.current)
+ assert(bc.isInstanceOf[scala.concurrent.forkjoin.ForkJoinWorkerThread])
+ }
+
+ // test BlockContext inside BlockContext.withBlockContext
+ def testPushCustom(): Unit = {
+ val orig = BlockContext.current
+ val customBC = new BlockContext() {
+ override def blockOn[T](thunk: =>T)(implicit permission: CanAwait): T = orig.blockOn(thunk)
+ }
+
+ val bc = getBlockContext({
+ BlockContext.withBlockContext(customBC) {
+ BlockContext.current
+ }
+ })
+
+ assert(bc eq customBC)
+ }
+
+ // test BlockContext after a BlockContext.push
+ def testPopCustom(): Unit = {
+ val orig = BlockContext.current
+ val customBC = new BlockContext() {
+ override def blockOn[T](thunk: =>T)(implicit permission: CanAwait): T = orig.blockOn(thunk)
+ }
+
+ val bc = getBlockContext({
+ BlockContext.withBlockContext(customBC) {}
+ BlockContext.current
+ })
+
+ assert(bc ne customBC)
+ }
+
+ testDefaultOutsideFuture()
+ testDefaultFJP()
+ testPushCustom()
+ testPopCustom()
+}
+
+trait Promises extends TestBase {
+ import ExecutionContext.Implicits._
+
+ def testSuccess(): Unit = once {
+ done =>
+ val p = promise[Int]()
+ val f = p.future
+
+ f onSuccess { case x => done(x == 5) }
+ f onFailure { case any => done(false) }
+
+ p.success(5)
+ }
+
+ def testFailure(): Unit = once {
+ done =>
+ val e = new Exception("expected")
+ val p = promise[Int]()
+ val f = p.future
+
+ f onSuccess { case x => done(false) }
+ f onFailure { case any => done(any eq e) }
+
+ p.failure(e)
+ }
+
+ testSuccess()
+ testFailure()
+}
+
+
+trait Exceptions extends TestBase {
+ import ExecutionContext.Implicits._
+
+}
+
+trait CustomExecutionContext extends TestBase {
+ import scala.concurrent.{ ExecutionContext, Awaitable }
+
+ def defaultEC = ExecutionContext.global
+
+ val inEC = new java.lang.ThreadLocal[Int]() {
+ override def initialValue = 0
+ }
+
+ def enterEC() = inEC.set(inEC.get + 1)
+ def leaveEC() = inEC.set(inEC.get - 1)
+ def assertEC() = assert(inEC.get > 0)
+ def assertNoEC() = assert(inEC.get == 0)
+
+ class CountingExecutionContext extends ExecutionContext {
+ val _count = new java.util.concurrent.atomic.AtomicInteger(0)
+ def count = _count.get
+
+ def delegate = ExecutionContext.global
+
+ override def execute(runnable: Runnable) = {
+ _count.incrementAndGet()
+ val wrapper = new Runnable() {
+ override def run() = {
+ enterEC()
+ try {
+ runnable.run()
+ } finally {
+ leaveEC()
+ }
+ }
+ }
+ delegate.execute(wrapper)
+ }
+
+ override def reportFailure(t: Throwable): Unit = {
+ System.err.println("Failure: " + t.getClass.getSimpleName + ": " + t.getMessage)
+ delegate.reportFailure(t)
+ }
+ }
+
+ def countExecs(block: (ExecutionContext) => Unit): Int = {
+ val context = new CountingExecutionContext()
+ block(context)
+ context.count
+ }
+
+ def testOnSuccessCustomEC(): Unit = {
+ val count = countExecs { implicit ec =>
+ blocking {
+ once { done =>
+ val f = future(assertNoEC())(defaultEC)
+ f onSuccess {
+ case _ =>
+ assertEC()
+ done(true)
+ }
+ assertNoEC()
+ }
+ }
+ }
+
+ // should be onSuccess, but not future body
+ assert(count == 1)
+ }
+
+ def testKeptPromiseCustomEC(): Unit = {
+ val count = countExecs { implicit ec =>
+ blocking {
+ once { done =>
+ val f = Promise.successful(10).future
+ f onSuccess {
+ case _ =>
+ assertEC()
+ done(true)
+ }
+ }
+ }
+ }
+
+ // should be onSuccess called once in proper EC
+ assert(count == 1)
+ }
+
+ def testCallbackChainCustomEC(): Unit = {
+ val count = countExecs { implicit ec =>
+ blocking {
+ once { done =>
+ assertNoEC()
+ val addOne = { x: Int => assertEC(); x + 1 }
+ val f = Promise.successful(10).future
+ f.map(addOne).filter { x =>
+ assertEC()
+ x == 11
+ } flatMap { x =>
+ Promise.successful(x + 1).future.map(addOne).map(addOne)
+ } onComplete {
+ case Failure(t) =>
+ done(throw new AssertionError("error in test: " + t.getMessage, t))
+ case Success(x) =>
+ assertEC()
+ done(x == 14)
+ }
+ assertNoEC()
+ }
+ }
+ }
+
+ // the count is not defined (other than >=1)
+ // due to the batching optimizations.
+ assert(count >= 1)
+ }
+
+ testOnSuccessCustomEC()
+ testKeptPromiseCustomEC()
+ testCallbackChainCustomEC()
+}
+
+trait ExecutionContextPrepare extends TestBase {
+ val theLocal = new ThreadLocal[String] {
+ override protected def initialValue(): String = ""
+ }
+
+ class PreparingExecutionContext extends ExecutionContext {
+ def delegate = ExecutionContext.global
+
+ override def execute(runnable: Runnable): Unit =
+ delegate.execute(runnable)
+
+ override def prepare(): ExecutionContext = {
+ // save object stored in ThreadLocal storage
+ val localData = theLocal.get
+ new PreparingExecutionContext {
+ override def execute(runnable: Runnable): Unit = {
+ val wrapper = new Runnable {
+ override def run(): Unit = {
+ // now we're on the new thread
+ // put localData into theLocal
+ theLocal.set(localData)
+ runnable.run()
+ }
+ }
+ delegate.execute(wrapper)
+ }
+ }
+ }
+
+ override def reportFailure(t: Throwable): Unit =
+ delegate.reportFailure(t)
+ }
+
+ implicit val ec = new PreparingExecutionContext
+
+ def testOnComplete(): Unit = once {
+ done =>
+ theLocal.set("secret")
+ val fut = future { 42 }
+ fut onComplete { case _ => done(theLocal.get == "secret") }
+ }
+
+ def testMap(): Unit = once {
+ done =>
+ theLocal.set("secret2")
+ val fut = future { 42 }
+ fut map { x => done(theLocal.get == "secret2") }
+ }
+
+ testOnComplete()
+ testMap()
+}
+
+object Test
+extends App
+with FutureCallbacks
+with FutureCombinators
+with FutureProjections
+with Promises
+with BlockContexts
+with Exceptions
+with CustomExecutionContext
+with ExecutionContextPrepare
+{
+ System.exit(0)
+}
+
diff --git a/test/files/jvm/serialization-new.check b/test/files/jvm/serialization-new.check
new file mode 100644
index 0000000..f886cfe
--- /dev/null
+++ b/test/files/jvm/serialization-new.check
@@ -0,0 +1,337 @@
+a1 = Array[1,2,3]
+_a1 = Array[1,2,3]
+arrayEquals(a1, _a1): true
+
+e1 = Left(1)
+_e1 = Left(1)
+e1 eq _e1: false, _e1 eq e1: false
+e1 equals _e1: true, _e1 equals e1: true
+
+x7 = RoundingMode
+y7 = RoundingMode
+x7 eq y7: true, y7 eq x7: true
+x7 equals y7: true, y7 equals x7: true
+
+x8 = WeekDay
+y8 = WeekDay
+x8 eq y8: true, y8 eq x8: true
+x8 equals y8: true, y8 equals x8: true
+
+x9 = UP
+y9 = UP
+x9 eq y9: true, y9 eq x9: true
+x9 equals y9: true, y9 equals x9: true
+
+x10 = Monday
+y10 = Monday
+x10 eq y10: true, y10 eq x10: true
+x10 equals y10: true, y10 equals x10: true
+
+x9 eq x10: false, x10 eq x9: false
+x9 equals x10: false, x10 equals x9: false
+x9 eq y10: false, y10 eq x9: false
+x9 equals y10: false, y10 equals x9: false
+
+f1 = <na>
+_f1 = <na>
+f1(2): 4, _f1(2): 4
+
+xs0 = List(1, 2, 3)
+_xs0 = List(1, 2, 3)
+xs0 eq _xs0: false, _xs0 eq xs0: false
+xs0 equals _xs0: true, _xs0 equals xs0: true
+
+xs1 = List()
+_xs1 = List()
+xs1 eq _xs1: true, _xs1 eq xs1: true
+
+o1 = None
+_o1 = None
+o1 eq _o1: true, _o1 eq o1: true
+
+o2 = Some(1)
+_o2 = Some(1)
+o2 eq _o2: false, _o2 eq o2: false
+o2 equals _o2: true, _o2 equals o2: true
+
+s1 = 'hello
+_s1 = 'hello
+s1 eq _s1: true, _s1 eq s1: true
+s1 equals _s1: true, _s1 equals s1: true
+
+t1 = (BannerLimit,12345)
+_t1 = (BannerLimit,12345)
+t1 eq _t1: false, _t1 eq t1: false
+t1 equals _t1: true, _t1 equals t1: true
+
+x = BitSet(1, 2)
+y = BitSet(1, 2)
+x equals y: true, y equals x: true
+
+x = BitSet(2, 3)
+y = BitSet(2, 3)
+x equals y: true, y equals x: true
+
+x = Map(1 -> A, 2 -> B, 3 -> C)
+y = Map(1 -> A, 2 -> B, 3 -> C)
+x equals y: true, y equals x: true
+
+x = Set(1, 2)
+y = Set(1, 2)
+x equals y: true, y equals x: true
+
+x = List((buffers,20), (layers,2), (title,3))
+y = List((buffers,20), (layers,2), (title,3))
+x equals y: true, y equals x: true
+
+x = Map(buffers -> 20, layers -> 2, title -> 3)
+y = Map(buffers -> 20, layers -> 2, title -> 3)
+x equals y: true, y equals x: true
+
+x = ListSet(5, 3)
+y = ListSet(5, 3)
+x equals y: true, y equals x: true
+
+x = Queue(a, b, c)
+y = Queue(a, b, c)
+x equals y: true, y equals x: true
+
+x = Range(0, 1, 2, 3, 4, 5, 6, 7, 8, 9)
+y = Range(0, 1, 2, 3, 4, 5, 6, 7, 8, 9)
+x equals y: true, y equals x: true
+
+x = NumericRange(0, 1, 2, 3, 4, 5, 6, 7, 8, 9)
+y = NumericRange(0, 1, 2, 3, 4, 5, 6, 7, 8, 9)
+x equals y: true, y equals x: true
+
+x = Map(1 -> A, 2 -> B, 3 -> C)
+y = Map(1 -> A, 2 -> B, 3 -> C)
+x equals y: true, y equals x: true
+
+x = TreeSet(1, 2, 3)
+y = TreeSet(1, 2, 3)
+x equals y: true, y equals x: true
+
+x = Stack(c, b, a)
+y = Stack(c, b, a)
+x equals y: true, y equals x: true
+
+x = Stream(0, ?)
+y = Stream(0, ?)
+x equals y: true, y equals x: true
+
+x = Map(42 -> FortyTwo)
+y = Map(42 -> FortyTwo)
+x equals y: true, y equals x: true
+
+x = TreeSet(0, 2)
+y = TreeSet(0, 2)
+x equals y: true, y equals x: true
+
+x = Vector('a, 'b, 'c)
+y = Vector('a, 'b, 'c)
+x equals y: true, y equals x: true
+
+x = ArrayBuffer(one, two)
+y = ArrayBuffer(one, two)
+x equals y: true, y equals x: true
+
+x = ArrayBuilder.ofLong
+y = ArrayBuilder.ofLong
+x equals y: true, y equals x: true
+
+x = ArrayBuilder.ofFloat
+y = ArrayBuilder.ofFloat
+x equals y: true, y equals x: true
+
+x = ArraySeq(1, 2, 3)
+y = ArraySeq(1, 2, 3)
+x equals y: true, y equals x: true
+
+x = ArrayStack(3, 2, 20)
+y = ArrayStack(3, 2, 20)
+x equals y: true, y equals x: true
+
+x = BitSet(0, 8, 9)
+y = BitSet(0, 8, 9)
+x equals y: true, y equals x: true
+
+x = Map(A -> 1, C -> 3, B -> 2)
+y = Map(A -> 1, C -> 3, B -> 2)
+x equals y: true, y equals x: true
+
+x = Set(buffers, title, layers)
+y = Set(buffers, title, layers)
+x equals y: true, y equals x: true
+
+x = History()
+y = History()
+x equals y: true, y equals x: true
+
+x = Map(Linked -> 1, Hash -> 2, Map -> 3)
+y = Map(Linked -> 1, Hash -> 2, Map -> 3)
+x equals y: true, y equals x: true
+
+x = ArrayBuffer((Linked,1), (Hash,2), (Map,3))
+y = ArrayBuffer((Linked,1), (Hash,2), (Map,3))
+x equals y: true, y equals x: true
+
+x = ArrayBuffer((Linked,1), (Hash,2), (Map,3))
+y = List((Linked,1), (Hash,2), (Map,3))
+x equals y: true, y equals x: true
+
+x = Set(layers, buffers, title)
+y = Set(layers, buffers, title)
+x equals y: true, y equals x: true
+
+x = ArrayBuffer(layers, buffers, title)
+y = ArrayBuffer(layers, buffers, title)
+x equals y: true, y equals x: true
+
+x = ArrayBuffer(layers, buffers, title)
+y = List(layers, buffers, title)
+x equals y: true, y equals x: true
+
+x = ListBuffer(white, black)
+y = ListBuffer(white, black)
+x equals y: true, y equals x: true
+
+x = Queue(20, 2, 3)
+y = Queue(20, 2, 3)
+x equals y: true, y equals x: true
+
+x = Stack(3, 2, 20)
+y = Stack(3, 2, 20)
+x equals y: true, y equals x: true
+
+x = abc
+y = abc
+x equals y: true, y equals x: true
+
+x = WrappedArray(1, 2, 3)
+y = WrappedArray(1, 2, 3)
+x equals y: true, y equals x: true
+
+x = TreeSet(1, 2, 3)
+y = TreeSet(1, 2, 3)
+x equals y: true, y equals x: true
+
+x = TrieMap(1 -> one, 2 -> two, 3 -> three)
+y = TrieMap(1 -> one, 2 -> two, 3 -> three)
+x equals y: true, y equals x: true
+
+x = xml:src="hello"
+y = xml:src="hello"
+x equals y: true, y equals x: true
+
+x = <title></title>
+y = <title></title>
+x equals y: true, y equals x: true
+
+x = <html><title>title</title><body></body></html>
+y = <html><title>title</title><body></body></html>
+x equals y: true, y equals x: true
+
+x = <html>
+ <body>
+ <table cellpadding="2" cellspacing="0">
+ <tr>
+ <th>Last Name</th>
+ <th>First Name</th>
+ </tr>
+ <tr>
+ <td> Tom </td>
+ <td> 20 </td>
+ </tr><tr>
+ <td> Bob </td>
+ <td> 22 </td>
+ </tr><tr>
+ <td> James </td>
+ <td> 19 </td>
+ </tr>
+ </table>
+ </body>
+ </html>
+y = <html>
+ <body>
+ <table cellpadding="2" cellspacing="0">
+ <tr>
+ <th>Last Name</th>
+ <th>First Name</th>
+ </tr>
+ <tr>
+ <td> Tom </td>
+ <td> 20 </td>
+ </tr><tr>
+ <td> Bob </td>
+ <td> 22 </td>
+ </tr><tr>
+ <td> James </td>
+ <td> 19 </td>
+ </tr>
+ </table>
+ </body>
+ </html>
+x equals y: true, y equals x: true
+
+x = Tim
+y = Tim
+x equals y: true, y equals x: true
+
+x = Bob
+y = Bob
+x equals y: true, y equals x: true
+
+x = John
+y = John
+x equals y: true, y equals x: true
+
+x = Bill
+y = Bill
+x equals y: true, y equals x: true
+
+x = Paul
+y = Paul
+x equals y: true, y equals x: true
+
+1
+2
+1
+2
+
+x = UnrolledBuffer(one, two)
+y = UnrolledBuffer(one, two)
+x equals y: true, y equals x: true
+
+x = ParArray(abc, def, etc)
+y = ParArray(abc, def, etc)
+x equals y: true, y equals x: true
+
+x = ParHashMap(2 -> 4, 1 -> 2)
+y = ParHashMap(2 -> 4, 1 -> 2)
+x equals y: true, y equals x: true
+
+x = ParTrieMap(1 -> 2, 2 -> 4)
+y = ParTrieMap(1 -> 2, 2 -> 4)
+x equals y: true, y equals x: true
+
+x = ParHashSet(1, 2, 3)
+y = ParHashSet(1, 2, 3)
+x equals y: true, y equals x: true
+
+x = ParRange(0, 1, 2, 3, 4)
+y = ParRange(0, 1, 2, 3, 4)
+x equals y: true, y equals x: true
+
+x = ParRange(0, 1, 2, 3)
+y = ParRange(0, 1, 2, 3)
+x equals y: true, y equals x: true
+
+x = ParMap(5 -> 1, 10 -> 2)
+y = ParMap(5 -> 1, 10 -> 2)
+x equals y: true, y equals x: true
+
+x = ParSet(two, one)
+y = ParSet(two, one)
+x equals y: true, y equals x: true
+
diff --git a/test/files/jvm/serialization-new.scala b/test/files/jvm/serialization-new.scala
new file mode 100644
index 0000000..1522fc8
--- /dev/null
+++ b/test/files/jvm/serialization-new.scala
@@ -0,0 +1,671 @@
+//############################################################################
+// Serialization
+//############################################################################
+
+object Serialize {
+ @throws(classOf[java.io.IOException])
+ def write[A](o: A): Array[Byte] = {
+ val ba = new java.io.ByteArrayOutputStream(512)
+ val out = new java.io.ObjectOutputStream(ba)
+ out.writeObject(o)
+ out.close()
+ ba.toByteArray()
+ }
+ @throws(classOf[java.io.IOException])
+ @throws(classOf[ClassNotFoundException])
+ def read[A](buffer: Array[Byte]): A = {
+ val in =
+ new java.io.ObjectInputStream(new java.io.ByteArrayInputStream(buffer))
+ in.readObject().asInstanceOf[A]
+ }
+ def check[A, B](x: A, y: B) {
+ println("x = " + x)
+ println("y = " + y)
+ println("x equals y: " + (x equals y) + ", y equals x: " + (y equals x))
+ assert((x equals y) && (y equals x))
+ println()
+ }
+}
+import Serialize._
+
+//############################################################################
+// Test classes in package "scala"
+
+object Test1_scala {
+
+ private def arrayToString[A](arr: Array[A]): String =
+ arr.mkString("Array[",",","]")
+
+ private def arrayEquals[A, B](a1: Array[A], a2: Array[B]): Boolean =
+ (a1.length == a2.length) &&
+ (Iterator.range(0, a1.length) forall { i => a1(i) == a2(i) })
+
+ object WeekDay extends Enumeration {
+ type WeekDay = Value
+ val Monday, Tuesday, Wednesday, Thusday, Friday, Saturday, Sunday = Value
+ }
+ import WeekDay._, BigDecimal._, RoundingMode._
+
+ // in alphabetic order
+ try {
+ // Array
+ val a1 = Array(1, 2, 3)
+ val _a1: Array[Int] = read(write(a1))
+ println("a1 = " + arrayToString(a1))
+ println("_a1 = " + arrayToString(_a1))
+ println("arrayEquals(a1, _a1): " + arrayEquals(a1, _a1))
+ println()
+
+ // Either
+ val e1 = Left(1)
+ val _e1: Either[Int, String] = read(write(e1))
+ println("e1 = " + e1)
+ println("_e1 = " + _e1)
+ println("e1 eq _e1: " + (e1 eq _e1) + ", _e1 eq e1: " + (_e1 eq e1))
+ println("e1 equals _e1: " + (e1 equals _e1) + ", _e1 equals e1: " + (_e1 equals e1))
+ println()
+
+ // Enumeration
+ val x7 = BigDecimal.RoundingMode
+ val y7: RoundingMode.type = read(write(x7))
+ println("x7 = " + x7)
+ println("y7 = " + y7)
+ println("x7 eq y7: " + (x7 eq y7) + ", y7 eq x7: " + (y7 eq x7))
+ println("x7 equals y7: " + (x7 equals y7) + ", y7 equals x7: " + (y7 equals x7))
+ println()
+
+ val x8 = WeekDay
+ val y8: WeekDay.type = read(write(x8))
+ println("x8 = " + x8)
+ println("y8 = " + y8)
+ println("x8 eq y8: " + (x8 eq y8) + ", y8 eq x8: " + (y8 eq x8))
+ println("x8 equals y8: " + (x8 equals y8) + ", y8 equals x8: " + (y8 equals x8))
+ println()
+
+ val x9 = UP
+ val y9: RoundingMode = read(write(x9))
+ println("x9 = " + x9)
+ println("y9 = " + y9)
+ println("x9 eq y9: " + (x9 eq y9) + ", y9 eq x9: " + (y9 eq x9))
+ println("x9 equals y9: " + (x9 equals y9) + ", y9 equals x9: " + (y9 equals x9))
+ println()
+
+ val x10 = Monday
+ val y10: WeekDay = read(write(x10))
+ println("x10 = " + x10)
+ println("y10 = " + y10)
+ println("x10 eq y10: " + (x10 eq y10) + ", y10 eq x10: " + (y10 eq x10))
+ println("x10 equals y10: " + (x10 equals y10) + ", y10 equals x10: " + (y10 equals x10))
+ println()
+
+ println("x9 eq x10: " + (x9 eq x10) + ", x10 eq x9: " + (x10 eq x9))
+ println("x9 equals x10: " + (x9 equals x10) + ", x10 equals x9: " + (x10 equals x9))
+ println("x9 eq y10: " + (x9 eq y10) + ", y10 eq x9: " + (y10 eq x9))
+ println("x9 equals y10: " + (x9 equals y10) + ", y10 equals x9: " + (y10 equals x9))
+ println()
+
+ // Function
+ val f1 = { x: Int => 2 * x }
+ val _f1: Function[Int, Int] = read(write(f1))
+ println("f1 = <na>")
+ println("_f1 = <na>")
+ println("f1(2): " + f1(2) + ", _f1(2): " + _f1(2))
+ println()
+
+ // List
+ val xs0 = List(1, 2, 3)
+ val _xs0: List[Int] = read(write(xs0))
+ println("xs0 = " + xs0)
+ println("_xs0 = " + _xs0)
+ println("xs0 eq _xs0: " + (xs0 eq _xs0) + ", _xs0 eq xs0: " + (_xs0 eq xs0))
+ println("xs0 equals _xs0: " + (xs0 equals _xs0) + ", _xs0 equals xs0: " + (_xs0 equals xs0))
+ println()
+
+ val xs1 = Nil
+ val _xs1: List[Nothing] = read(write(xs1))
+ println("xs1 = " + xs1)
+ println("_xs1 = " + _xs1)
+ println("xs1 eq _xs1: " + (xs1 eq _xs1) + ", _xs1 eq xs1: " + (_xs1 eq xs1))
+ println()
+
+ // Option
+ val o1 = None
+ val _o1: Option[Nothing] = read(write(o1))
+ println("o1 = " + o1)
+ println("_o1 = " + _o1)
+ println("o1 eq _o1: " + (o1 eq _o1) + ", _o1 eq o1: " + (_o1 eq o1))
+ println()
+
+ val o2 = Some(1)
+ val _o2: Option[Int] = read(write(o2))
+ println("o2 = " + o2)
+ println("_o2 = " + _o2)
+ println("o2 eq _o2: " + (o2 eq _o2) + ", _o2 eq o2: " + (_o2 eq o2))
+ println("o2 equals _o2: " + (o2 equals _o2) + ", _o2 equals o2: " + (_o2 equals o2))
+ println()
+/*
+ // Responder
+ val r1 = Responder.constant("xyz")
+ val _r1: Responder[String] = read(write(r1))
+ check(r1, _r1)
+*/
+ // Symbol
+ val s1 = 'hello
+ val _s1: Symbol = read(write(s1))
+ println("s1 = " + s1)
+ println("_s1 = " + _s1)
+ println("s1 eq _s1: " + (s1 eq _s1) + ", _s1 eq s1: " + (_s1 eq s1))
+ println("s1 equals _s1: " + (s1 equals _s1) + ", _s1 equals s1: " + (_s1 equals s1))
+ println()
+
+ // Tuple
+ val t1 = ("BannerLimit", 12345)
+ val _t1: (String, Int) = read(write(t1))
+ println("t1 = " + t1)
+ println("_t1 = " + _t1)
+ println("t1 eq _t1: " + (t1 eq _t1) + ", _t1 eq t1: " + (_t1 eq t1))
+ println("t1 equals _t1: " + (t1 equals _t1) + ", _t1 equals t1: " + (_t1 equals t1))
+ println()
+ }
+ catch {
+ case e: Exception =>
+ println("Error in Test1_scala: " + e)
+ throw e
+ }
+}
+
+//############################################################################
+// Test classes in package "scala.collection.immutable"
+
+object Test2_immutable {
+ import scala.collection.immutable.{
+ BitSet, HashMap, HashSet, ListMap, ListSet, Queue, Range, SortedMap,
+ SortedSet, Stack, Stream, TreeMap, TreeSet, Vector}
+
+ // in alphabetic order
+ try {
+ // BitSet
+ val bs1 = BitSet.empty + 1 + 2
+ val _bs1: BitSet = read(write(bs1))
+ check(bs1, _bs1)
+
+ val bs2 = {
+ val bs = new collection.mutable.BitSet()
+ bs += 2; bs += 3
+ bs.toImmutable
+ }
+ val _bs2: BitSet = read(write(bs2))
+ check(bs2, _bs2)
+
+ // HashMap
+ val hm1 = new HashMap[Int, String] + (1 -> "A", 2 -> "B", 3 -> "C")
+ val _hm1: HashMap[Int, String] = read(write(hm1))
+ check(hm1, _hm1)
+
+ // HashSet
+ val hs1 = new HashSet[Int] + 1 + 2
+ val _hs1: HashSet[Int] = read(write(hs1))
+ check(hs1, _hs1)
+
+ // List
+ val xs1 = List(("buffers", 20), ("layers", 2), ("title", 3))
+ val _xs1: List[(String, Int)] = read(write(xs1))
+ check(xs1, _xs1)
+
+ // ListMap
+ val lm1 = new ListMap[String, Int] + ("buffers" -> 20, "layers" -> 2, "title" -> 3)
+ val _lm1: ListMap[String, Int] = read(write(lm1))
+ check(lm1, _lm1)
+
+ // ListSet
+ val ls1 = new ListSet[Int] + 3 + 5
+ val _ls1: ListSet[Int] = read(write(ls1))
+ check(ls1, _ls1)
+
+ // Queue
+ val q1 = Queue("a", "b", "c")
+ val _q1: Queue[String] = read(write(q1))
+ check(q1, _q1)
+
+ // Range
+ val r1 = 0 until 10
+ val _r1: Range = read(write(r1))
+ check(r1, _r1)
+
+ val r2 = Range.Long(0L, 10L, 1)
+ val _r2: r2.type = read(write(r2))
+ check(r2, _r2)
+
+ // SortedMap
+ val sm1 = SortedMap.empty[Int, String] + (2 -> "B", 3 -> "C", 1 -> "A")
+ val _sm1: SortedMap[Int, String] = read(write(sm1))
+ check(sm1, _sm1)
+
+ // SortedSet
+ val ss1 = SortedSet.empty[Int] + 2 + 3 + 1
+ val _ss1: SortedSet[Int] = read(write(ss1))
+ check(ss1, _ss1)
+
+ // Stack
+ val s1 = new Stack().push("a", "b", "c")
+ val _s1: Stack[String] = read(write(s1))
+ check(s1, _s1)
+
+ // Stream
+ val st1 = Stream.range(0, 10)
+ val _st1: Stream[Int] = read(write(st1))
+ check(st1, _st1)
+
+ // TreeMap
+ val tm1 = new TreeMap[Int, String] + (42 -> "FortyTwo")
+ val _tm1: TreeMap[Int, String] = read(write(tm1))
+ check(tm1, _tm1)
+
+ // TreeSet
+ val ts1 = new TreeSet[Int]() + 2 + 0
+ val _ts1: TreeSet[Int] = read(write(ts1))
+ check(ts1, _ts1)
+
+ // Vector
+ val v1 = Vector('a, 'b, 'c)
+ val _v1: Vector[Symbol] = read(write(v1))
+ check(v1, _v1)
+ }
+ catch {
+ case e: Exception =>
+ println("Error in Test2_immutable: " + e)
+ throw e
+ }
+}
+
+//############################################################################
+// Test classes in package "scala.collection.mutable"
+
+object Test3_mutable {
+ import scala.reflect.ClassTag
+ import scala.collection.mutable.{
+ ArrayBuffer, ArrayBuilder, ArraySeq, ArrayStack, BitSet, DoubleLinkedList,
+ HashMap, HashSet, History, LinkedHashMap, LinkedHashSet, LinkedList, ListBuffer,
+ Publisher, Queue, Stack, StringBuilder, WrappedArray, TreeSet}
+ import scala.collection.concurrent.TrieMap
+
+ // in alphabetic order
+ try {
+ // ArrayBuffer
+ val ab1 = new ArrayBuffer[String]
+ ab1 ++= List("one", "two")
+ val _ab1: ArrayBuffer[String] = read(write(ab1))
+ check(ab1, _ab1)
+
+ // ArrayBuilder
+ val abu1 = ArrayBuilder.make[Long]
+ val _abu1: ArrayBuilder[ClassTag[Long]] = read(write(abu1))
+ check(abu1, _abu1)
+
+ val abu2 = ArrayBuilder.make[Float]
+ val _abu2: ArrayBuilder[ClassTag[Float]] = read(write(abu2))
+ check(abu2, _abu2)
+
+ // ArraySeq
+ val aq1 = ArraySeq(1, 2, 3)
+ val _aq1: ArraySeq[Int] = read(write(aq1))
+ check(aq1, _aq1)
+
+ // ArrayStack
+ val as1 = new ArrayStack[Int]
+ as1 ++= List(20, 2, 3).iterator
+ val _as1: ArrayStack[Int] = read(write(as1))
+ check(as1, _as1)
+
+ // BitSet
+ val bs1 = new BitSet()
+ bs1 += 0
+ bs1 += 8
+ bs1 += 9
+ val _bs1: BitSet = read(write(bs1))
+ check(bs1, _bs1)
+/*
+ // DoubleLinkedList
+ val dl1 = new DoubleLinkedList[Int](2, null)
+ dl1.append(new DoubleLinkedList(3, null))
+ val _dl1: DoubleLinkedList[Int] = read(write(dl1))
+ check(dl1, _dl1)
+*/
+ // HashMap
+ val hm1 = new HashMap[String, Int]
+ hm1 ++= List(("A", 1), ("B", 2), ("C", 3)).iterator
+ val _hm1: HashMap[String, Int] = read(write(hm1))
+ check(hm1, _hm1)
+
+ // HashSet
+ val hs1 = new HashSet[String]
+ hs1 ++= List("layers", "buffers", "title").iterator
+ val _hs1: HashSet[String] = read(write(hs1))
+ check(hs1, _hs1)
+
+ val h1 = new History[String, Int]
+ val _h1: History[String, Int] = read(write(h1))
+ check(h1, _h1)
+
+ // LinkedHashMap
+ { val lhm1 = new LinkedHashMap[String, Int]
+ val list = List(("Linked", 1), ("Hash", 2), ("Map", 3))
+ lhm1 ++= list.iterator
+ val _lhm1: LinkedHashMap[String, Int] = read(write(lhm1))
+ check(lhm1, _lhm1)
+ check(lhm1.toSeq, _lhm1.toSeq) // check elements order
+ check(lhm1.toSeq, list) // check elements order
+ }
+
+ // LinkedHashSet
+ { val lhs1 = new LinkedHashSet[String]
+ val list = List("layers", "buffers", "title")
+ lhs1 ++= list.iterator
+ val _lhs1: LinkedHashSet[String] = read(write(lhs1))
+ check(lhs1, _lhs1)
+ check(lhs1.toSeq, _lhs1.toSeq) // check elements order
+ check(lhs1.toSeq, list) // check elements order
+ }
+/*
+ // LinkedList
+ val ll1 = new LinkedList[Int](2, null)
+ ll1.append(new LinkedList(3, null))
+ val _ll1: LinkedList[Int] = read(write(ll1))
+ check(ll1, _ll1)
+*/
+ // ListBuffer
+ val lb1 = new ListBuffer[String]
+ lb1 ++= List("white", "black")
+ val _lb1: ListBuffer[String] = read(write(lb1))
+ check(lb1, _lb1)
+
+ // Queue
+ val q1 = new Queue[Int]
+ q1 ++= List(20, 2, 3).iterator
+ val _q1: Queue[Int] = read(write(q1))
+ check(q1, _q1)
+
+ // Stack
+ val s1 = new Stack[Int]
+ s1 pushAll q1
+ val _s1: Stack[Int] = read(write(s1))
+ check(s1, _s1)
+
+ // StringBuilder
+ val sb1 = new StringBuilder
+ sb1 append "abc"
+ val _sb1: StringBuilder = read(write(sb1))
+ check(sb1, _sb1)
+
+ // WrappedArray
+ val wa1 = WrappedArray.make(Array(1, 2, 3))
+ val _wa1: WrappedArray[Int] = read(write(wa1))
+ check(wa1, _wa1)
+
+ // TreeSet
+ val ts1 = TreeSet[Int]() ++= Array(1, 2, 3)
+ val _ts1: TreeSet[Int] = read(write(ts1))
+ check(ts1, _ts1)
+
+ // concurrent.TrieMap
+ val ct1 = TrieMap[Int, String]() ++= Array(1 -> "one", 2 -> "two", 3 -> "three")
+ val _ct1: TrieMap[Int, String] = read(write(ct1))
+ check(ct1, _ct1)
+ }
+ catch {
+ case e: Exception =>
+ println("Error in Test3_mutable: " + e)
+ throw e
+ }
+}
+
+
+//############################################################################
+// Test classes in package "scala.xml"
+
+object Test4_xml {
+ import scala.xml.{Attribute, Document, Elem, Null, PrefixedAttribute, Text}
+
+ case class Person(name: String, age: Int)
+
+ try {
+ // Attribute
+ val a1 = new PrefixedAttribute("xml", "src", Text("hello"), Null)
+ val _a1: Attribute = read(write(a1))
+ check(a1, _a1)
+
+ // Document
+ val d1 = new Document
+ d1.docElem = <title></title>
+ d1.encoding = Some("UTF-8")
+ val _d1: Document = read(write(d1))
+ check(d1, _d1)
+
+ // Elem
+ val e1 = <html><title>title</title><body></body></html>;
+ val _e1: Elem = read(write(e1))
+ check(e1, _e1)
+
+ class AddressBook(a: Person*) {
+ private val people: List[Person] = a.toList
+ def toXHTML =
+ <table cellpadding="2" cellspacing="0">
+ <tr>
+ <th>Last Name</th>
+ <th>First Name</th>
+ </tr>
+ { for (p <- people) yield
+ <tr>
+ <td> { p.name } </td>
+ <td> { p.age.toString() } </td>
+ </tr> }
+ </table>;
+ }
+
+ val people = new AddressBook(
+ Person("Tom", 20),
+ Person("Bob", 22),
+ Person("James", 19))
+
+ val e2 =
+ <html>
+ <body>
+ { people.toXHTML }
+ </body>
+ </html>;
+ val _e2: Elem = read(write(e2))
+ check(e2, _e2)
+ }
+ catch {
+ case e: Exception =>
+ println("Error in Test4_xml: " + e)
+ throw e
+ }
+}
+
+//############################################################################
+// Test user-defined classes WITHOUT nesting
+
+class Person(_name: String) extends Serializable {
+ private var name = _name
+ override def toString() = name
+ override def equals(that: Any): Boolean =
+ that.isInstanceOf[Person] &&
+ (name == that.asInstanceOf[Person].name)
+}
+
+class Employee(_name: String) extends Serializable {
+ private var name = _name
+ override def toString() = name
+}
+
+object bob extends Employee("Bob")
+
+object Test5 {
+ val x1 = new Person("Tim")
+ val x2 = bob
+
+ try {
+ val y1: Person = read(write(x1))
+ val y2: Employee = read(write(x2))
+
+ check(x1, y1)
+ check(x2, y2)
+ }
+ catch {
+ case e: Exception =>
+ println("Error in Test5: " + e)
+ }
+}
+
+//############################################################################
+// Test user-defined classes WITH nesting
+
+object Test6 {
+ object bill extends Employee("Bill") {
+ val x = paul
+ }
+ object paul extends Person("Paul") {
+ val x = 4 // bill; => StackOverflowException !!!
+ }
+ val x1 = new Person("John")
+ val x2 = bill
+ val x3 = paul
+
+ try {
+ val y1: Person = read(write(x1))
+ val y2: Employee = read(write(x2))
+ val y3: Person = read(write(x3))
+
+ check(x1, y1)
+ check(x2, y2)
+ check(x3, y3)
+ }
+ catch {
+ case e: Exception =>
+ println("Error in Test6: " + e)
+ }
+}
+
+//############################################################################
+// Nested objects cannot get readresolve automatically because after deserialization
+// they would be null (they are treated as lazy vals)
+class Outer extends Serializable {
+ object Inner extends Serializable
+}
+
+object Test7 {
+ val x = new Outer
+ x.Inner // initialize
+ val y:Outer = read(write(x))
+ if (y.Inner == null)
+ println("Inner object is null")
+}
+
+// Verify that transient lazy vals don't get serialized
+class WithTransient extends Serializable {
+ @transient lazy val a1 = 1
+ @transient private lazy val a2 = 2
+ @transient object B extends Serializable
+ @transient private object C extends Serializable
+
+ def test = {
+ println(a1)
+ println(a2)
+ if (B == null || C == null)
+ println("Transient nested object failed to serialize properly")
+ }
+}
+
+object Test8 {
+ val x = new WithTransient
+ x.test
+ try {
+ val y:WithTransient = read(write(x))
+ y.test
+ }
+ catch {
+ case e: Exception =>
+ println("Error in Test8: " + e)
+ }
+}
+
+//############################################################################
+// Test code
+
+object Test {
+ def main(args: Array[String]) {
+ Test1_scala
+ Test2_immutable
+ Test3_mutable
+ Test4_xml
+ Test5
+ Test6
+ Test7
+ Test8
+ Test9_parallel
+ }
+}
+
+//############################################################################
+
+
+//############################################################################
+// Test classes in package "scala.collection.parallel" and subpackages
+object Test9_parallel {
+ import scala.collection.parallel._
+
+ try {
+ println()
+
+ // UnrolledBuffer
+ val ub = new collection.mutable.UnrolledBuffer[String]
+ ub ++= List("one", "two")
+ val _ub: collection.mutable.UnrolledBuffer[String] = read(write(ub))
+ check(ub, _ub)
+
+ // mutable.ParArray
+ val pa = mutable.ParArray("abc", "def", "etc")
+ val _pa: mutable.ParArray[String] = read(write(pa))
+ check(pa, _pa)
+
+ // mutable.ParHashMap
+ val mpm = mutable.ParHashMap(1 -> 2, 2 -> 4)
+ val _mpm: mutable.ParHashMap[Int, Int] = read(write(mpm))
+ check(mpm, _mpm)
+
+ // mutable.ParTrieMap
+ val mpc = mutable.ParTrieMap(1 -> 2, 2 -> 4)
+ val _mpc: mutable.ParTrieMap[Int, Int] = read(write(mpc))
+ check(mpc, _mpc)
+
+ // mutable.ParHashSet
+ val mps = mutable.ParHashSet(1, 2, 3)
+ val _mps: mutable.ParHashSet[Int] = read(write(mps))
+ check(mps, _mps)
+
+ // immutable.ParRange
+ val pr1 = immutable.ParRange(0, 4, 1, true)
+ val _pr1: immutable.ParRange = read(write(pr1))
+ check(pr1, _pr1)
+
+ val pr2 = immutable.ParRange(0, 4, 1, false)
+ val _pr2: immutable.ParRange = read(write(pr2))
+ check(pr2, _pr2)
+
+ // immutable.ParHashMap
+ val ipm = immutable.ParHashMap(5 -> 1, 10 -> 2)
+ val _ipm: immutable.ParHashMap[Int, Int] = read(write(ipm))
+ check(ipm, _ipm)
+
+ // immutable.ParHashSet
+ val ips = immutable.ParHashSet("one", "two")
+ val _ips: immutable.ParHashSet[String] = read(write(ips))
+ check(ips, _ips)
+
+ } catch {
+ case e: Exception =>
+ println("Error in Test5_parallel: " + e)
+ throw e
+ }
+}
\ No newline at end of file
diff --git a/test/files/jvm/serialization.check b/test/files/jvm/serialization.check
index 0717de2..f886cfe 100644
--- a/test/files/jvm/serialization.check
+++ b/test/files/jvm/serialization.check
@@ -2,11 +2,6 @@ a1 = Array[1,2,3]
_a1 = Array[1,2,3]
arrayEquals(a1, _a1): true
-c1 = Cell(a)
-_c1 = Cell(a)
-c1 eq _c1: false, _c1 eq c1: false
-c1 equals _c1: true, _c1 equals c1: true
-
e1 = Left(1)
_e1 = Left(1)
e1 eq _e1: false, _e1 eq e1: false
@@ -161,18 +156,42 @@ x = BitSet(0, 8, 9)
y = BitSet(0, 8, 9)
x equals y: true, y equals x: true
-x = Map(C -> 3, B -> 2, A -> 1)
-y = Map(C -> 3, A -> 1, B -> 2)
+x = Map(A -> 1, C -> 3, B -> 2)
+y = Map(A -> 1, C -> 3, B -> 2)
x equals y: true, y equals x: true
-x = Set(layers, title, buffers)
-y = Set(layers, title, buffers)
+x = Set(buffers, title, layers)
+y = Set(buffers, title, layers)
x equals y: true, y equals x: true
x = History()
y = History()
x equals y: true, y equals x: true
+x = Map(Linked -> 1, Hash -> 2, Map -> 3)
+y = Map(Linked -> 1, Hash -> 2, Map -> 3)
+x equals y: true, y equals x: true
+
+x = ArrayBuffer((Linked,1), (Hash,2), (Map,3))
+y = ArrayBuffer((Linked,1), (Hash,2), (Map,3))
+x equals y: true, y equals x: true
+
+x = ArrayBuffer((Linked,1), (Hash,2), (Map,3))
+y = List((Linked,1), (Hash,2), (Map,3))
+x equals y: true, y equals x: true
+
+x = Set(layers, buffers, title)
+y = Set(layers, buffers, title)
+x equals y: true, y equals x: true
+
+x = ArrayBuffer(layers, buffers, title)
+y = ArrayBuffer(layers, buffers, title)
+x equals y: true, y equals x: true
+
+x = ArrayBuffer(layers, buffers, title)
+y = List(layers, buffers, title)
+x equals y: true, y equals x: true
+
x = ListBuffer(white, black)
y = ListBuffer(white, black)
x equals y: true, y equals x: true
@@ -193,6 +212,14 @@ x = WrappedArray(1, 2, 3)
y = WrappedArray(1, 2, 3)
x equals y: true, y equals x: true
+x = TreeSet(1, 2, 3)
+y = TreeSet(1, 2, 3)
+x equals y: true, y equals x: true
+
+x = TrieMap(1 -> one, 2 -> two, 3 -> three)
+y = TrieMap(1 -> one, 2 -> two, 3 -> three)
+x equals y: true, y equals x: true
+
x = xml:src="hello"
y = xml:src="hello"
x equals y: true, y equals x: true
@@ -207,7 +234,7 @@ x equals y: true, y equals x: true
x = <html>
<body>
- <table cellspacing="0" cellpadding="2">
+ <table cellpadding="2" cellspacing="0">
<tr>
<th>Last Name</th>
<th>First Name</th>
@@ -227,7 +254,7 @@ x = <html>
</html>
y = <html>
<body>
- <table cellspacing="0" cellpadding="2">
+ <table cellpadding="2" cellspacing="0">
<tr>
<th>Last Name</th>
<th>First Name</th>
@@ -280,12 +307,16 @@ x = ParArray(abc, def, etc)
y = ParArray(abc, def, etc)
x equals y: true, y equals x: true
-x = ParHashMap(1 -> 2, 2 -> 4)
-y = ParHashMap(1 -> 2, 2 -> 4)
+x = ParHashMap(2 -> 4, 1 -> 2)
+y = ParHashMap(2 -> 4, 1 -> 2)
+x equals y: true, y equals x: true
+
+x = ParTrieMap(1 -> 2, 2 -> 4)
+y = ParTrieMap(1 -> 2, 2 -> 4)
x equals y: true, y equals x: true
-x = ParHashSet(2, 1, 3)
-y = ParHashSet(2, 1, 3)
+x = ParHashSet(1, 2, 3)
+y = ParHashSet(1, 2, 3)
x equals y: true, y equals x: true
x = ParRange(0, 1, 2, 3, 4)
diff --git a/test/files/jvm/serialization.scala b/test/files/jvm/serialization.scala
index eab0691..34b6493 100644
--- a/test/files/jvm/serialization.scala
+++ b/test/files/jvm/serialization.scala
@@ -56,15 +56,6 @@ object Test1_scala {
println("arrayEquals(a1, _a1): " + arrayEquals(a1, _a1))
println()
- // Cell
- val c1 = new Cell('a')
- val _c1: Cell[Char] = read(write(c1))
- println("c1 = " + c1)
- println("_c1 = " + _c1)
- println("c1 eq _c1: " + (c1 eq _c1) + ", _c1 eq c1: " + (_c1 eq c1))
- println("c1 equals _c1: " + (c1 equals _c1) + ", _c1 equals c1: " + (_c1 equals c1))
- println()
-
// Either
val e1 = Left(1)
val _e1: Either[Int, String] = read(write(e1))
@@ -294,8 +285,9 @@ object Test3_mutable {
import scala.reflect.ClassManifest
import scala.collection.mutable.{
ArrayBuffer, ArrayBuilder, ArraySeq, ArrayStack, BitSet, DoubleLinkedList,
- HashMap, HashSet, History, LinkedList, ListBuffer, Publisher, Queue,
- Stack, StringBuilder, WrappedArray}
+ HashMap, HashSet, History, LinkedHashMap, LinkedHashSet, LinkedList, ListBuffer,
+ Publisher, Queue, Stack, StringBuilder, WrappedArray, TreeSet}
+ import scala.collection.concurrent.TrieMap
// in alphabetic order
try {
@@ -354,6 +346,26 @@ object Test3_mutable {
val h1 = new History[String, Int]
val _h1: History[String, Int] = read(write(h1))
check(h1, _h1)
+
+ // LinkedHashMap
+ { val lhm1 = new LinkedHashMap[String, Int]
+ val list = List(("Linked", 1), ("Hash", 2), ("Map", 3))
+ lhm1 ++= list.iterator
+ val _lhm1: LinkedHashMap[String, Int] = read(write(lhm1))
+ check(lhm1, _lhm1)
+ check(lhm1.toSeq, _lhm1.toSeq) // check elements order
+ check(lhm1.toSeq, list) // check elements order
+ }
+
+ // LinkedHashSet
+ { val lhs1 = new LinkedHashSet[String]
+ val list = List("layers", "buffers", "title")
+ lhs1 ++= list.iterator
+ val _lhs1: LinkedHashSet[String] = read(write(lhs1))
+ check(lhs1, _lhs1)
+ check(lhs1.toSeq, _lhs1.toSeq) // check elements order
+ check(lhs1.toSeq, list) // check elements order
+ }
/*
// LinkedList
val ll1 = new LinkedList[Int](2, null)
@@ -389,6 +401,16 @@ object Test3_mutable {
val wa1 = WrappedArray.make(Array(1, 2, 3))
val _wa1: WrappedArray[Int] = read(write(wa1))
check(wa1, _wa1)
+
+ // TreeSet
+ val ts1 = TreeSet[Int]() ++= Array(1, 2, 3)
+ val _ts1: TreeSet[Int] = read(write(ts1))
+ check(ts1, _ts1)
+
+ // concurrent.TrieMap
+ val ct1 = TrieMap[Int, String]() ++= Array(1 -> "one", 2 -> "two", 3 -> "three")
+ val _ct1: TrieMap[Int, String] = read(write(ct1))
+ check(ct1, _ct1)
}
catch {
case e: Exception =>
@@ -535,29 +557,22 @@ class Outer extends Serializable {
object Test7 {
val x = new Outer
x.Inner // initialize
- try {
- val y:Outer = read(write(x))
- if (y.Inner == null)
- println("Inner object is null")
- }
- catch {
- case e: Exception =>
- println("Error in Test7: " + e)
- }
-
+ val y:Outer = read(write(x))
+ if (y.Inner == null)
+ println("Inner object is null")
}
-
// Verify that transient lazy vals don't get serialized
class WithTransient extends Serializable {
@transient lazy val a1 = 1
@transient private lazy val a2 = 2
@transient object B extends Serializable
+ @transient private object C extends Serializable
def test = {
println(a1)
println(a2)
- if (B == null)
+ if (B == null || C == null)
println("Transient nested object failed to serialize properly")
}
}
@@ -619,6 +634,11 @@ object Test9_parallel {
val _mpm: mutable.ParHashMap[Int, Int] = read(write(mpm))
check(mpm, _mpm)
+ // mutable.ParTrieMap
+ val mpc = mutable.ParTrieMap(1 -> 2, 2 -> 4)
+ val _mpc: mutable.ParTrieMap[Int, Int] = read(write(mpc))
+ check(mpc, _mpc)
+
// mutable.ParHashSet
val mps = mutable.ParHashSet(1, 2, 3)
val _mps: mutable.ParHashSet[Int] = read(write(mps))
diff --git a/test/files/jvm/si5471.check b/test/files/jvm/si5471.check
new file mode 100644
index 0000000..bb101b6
--- /dev/null
+++ b/test/files/jvm/si5471.check
@@ -0,0 +1,2 @@
+true
+true
diff --git a/test/files/jvm/si5471.scala b/test/files/jvm/si5471.scala
new file mode 100644
index 0000000..2c8c420
--- /dev/null
+++ b/test/files/jvm/si5471.scala
@@ -0,0 +1,17 @@
+
+object Test {
+
+ def main(args: Array[String]) {
+ import scala.math.Numeric
+ import scala.math.Numeric.Implicits._
+
+ val b = BigInt(Long.MaxValue) + 1
+
+ def dbl[N :Numeric](n: N) = n.toDouble
+ def flt[N :Numeric](n: N) = n.toFloat
+
+ println(dbl(b) == b.toDouble)
+ println(flt(b) == b.toFloat)
+ }
+
+}
diff --git a/test/files/jvm/signum.scala b/test/files/jvm/signum.scala
new file mode 100644
index 0000000..76602a6
--- /dev/null
+++ b/test/files/jvm/signum.scala
@@ -0,0 +1,21 @@
+object Test {
+ def main(args: Array[String]) {
+ assert(math.signum(Long.MaxValue) == 1L)
+ assert(math.signum(1L) == 1L)
+ assert(math.signum(0L) == 0L)
+ assert(math.signum(-1L) == -1L)
+ assert(math.signum(Long.MinValue) == -1L)
+
+ assert(math.signum(Int.MaxValue) == 1)
+ assert(math.signum(1) == 1)
+ assert(math.signum(0) == 0)
+ assert(math.signum(-1) == -1)
+ assert(math.signum(Int.MinValue) == -1)
+
+ assert(java.lang.Float.floatToIntBits(math.signum(0f)) == 0x0)
+ assert(java.lang.Float.floatToIntBits(math.signum(-0f)) == 0x80000000)
+
+ assert(java.lang.Double.doubleToLongBits(math.signum(0d)) == 0x0L)
+ assert(java.lang.Double.doubleToLongBits(math.signum(-0d)) == 0x8000000000000000L)
+ }
+}
diff --git a/test/files/jvm/stringbuilder.check b/test/files/jvm/stringbuilder.check
new file mode 100644
index 0000000..c9b4499
--- /dev/null
+++ b/test/files/jvm/stringbuilder.check
@@ -0,0 +1,17 @@
+s0 equals j0 = false
+s0.toString equals j0.toString = true
+s1.toString equals j1.toString = true
+j2=cba, s2=cba
+s2.toString equals j2.toString = true
+j3=aba, s3=aba
+s3.toString equals j3.toString = true
+s0.toString equals j0.toString = true
+s1.toString equals j1.toString = true
+j0=-1988a1trueabc, s0=-1988a1trueabc
+s0.toString equals j0.toString = true
+j1=xyz012###, s1=xyz012###
+s1.toString equals j1.toString = true
+j1=2, s1=2
+s1 == j1 = true
+j2=8, s2=8
+s2 == j2 = true
diff --git a/test/files/jvm/stringbuilder.scala b/test/files/jvm/stringbuilder.scala
index 8f73cfa..54951d6 100644
--- a/test/files/jvm/stringbuilder.scala
+++ b/test/files/jvm/stringbuilder.scala
@@ -1,98 +1,88 @@
-import testing.SUnit._
-
/** Test the Scala implementation of class <code>scala.StringBuilder</code>.
*
* @author Stephane Micheloud
*/
-object Test extends TestConsoleMain {
- def suite = new TestSuite(
- Test1, //ctor, reverse
- Test2, //append
- Test3, //insert
- Test4 //indexOf, lastIndexOf
- )
+object Test {
+ def main(args: Array[String]) {
+ Test1.run() //ctor, reverse
+ Test2.run() //append
+ Test3.run() //insert
+ Test4.run() //indexOf, lastIndexOf
+ }
}
-object Test1 extends TestCase("ctor") with Assert {
- override def enableStackTrace = false
- override def runTest {
- val j0 = new java.lang.StringBuilder("abc") // Java 1.5+
+object Test1 {
+ def run() {
+ val j0 = new java.lang.StringBuilder("abc")
val s0 = new StringBuilder("abc")
- assertEquals("s0 equals j0", false, s0 equals j0)
- assertEquals("s0.toString equals j0.toString", true, s0.toString equals j0.toString)
+ println("s0 equals j0 = " + (s0 equals j0))
+ println("s0.toString equals j0.toString = " + (s0.toString equals j0.toString))
val str = """
Scala is a general purpose programming language designed to express common programming patterns in a concise, elegant, and type-safe way. It smoothly integrates features of object-oriented and functional languages. It is also fully interoperable with Java."""
val j1 = new java.lang.StringBuilder(100) append str
val s1 = new java.lang.StringBuilder(100) append str
- assertEquals("s1.toString equals j1.toString", true, s1.toString equals j1.toString)
+ println("s1.toString equals j1.toString = " + (s1.toString equals j1.toString))
val j2 = j0 reverse
val s2 = s0 reverse;
- //println("j2="+j2+", s2="+s2)//debug
- assertEquals("s2.toString equals j2.toString", true, s2.toString equals j2.toString)
+ println("j2="+j2+", s2="+s2)
+ println("s2.toString equals j2.toString = " + (s2.toString equals j2.toString))
val j3 = j2; j3 setCharAt (0, j3 charAt 2)
val s3 = s2; s3(0) = s3(2)
- //println("j3="+j3+", s3="+s3)//debug
- assertEquals("s3.toString equals j3.toString", true, s3.toString equals j3.toString)
+ println("j3="+j3+", s3="+s3)
+ println("s3.toString equals j3.toString = " + (s3.toString equals j3.toString))
}
}
-object Test2 extends TestCase("append") with Assert {
- override def enableStackTrace = false
- override def runTest {
- val j0 = new java.lang.StringBuilder("abc") // Java 1.5+
+object Test2 {
+ def run() {
+ val j0 = new java.lang.StringBuilder("abc")
val s0 = new StringBuilder("abc")
-
j0 append true append (1.toByte) append 'a' append 9 append -1L append 1.2e-10f append -2.1e+100d
s0 append true append (1.toByte) append 'a' append 9 append -1L append 1.2e-10f append -2.1e+100d
+ println("s0.toString equals j0.toString = " + (s0.toString equals j0.toString))
- assertEquals("s0.toString equals j0.toString", true, s0.toString equals j0.toString)
-
- val j1 = new java.lang.StringBuilder // Java 1.5+
+ val j1 = new java.lang.StringBuilder
val s1 = new StringBuilder
j1 append "###" append Array('0', '1', '2') append "xyz".subSequence(0, 3)
s1 append "###" appendAll Array('0', '1', '2') appendAll List('x', 'y', 'z')
- assertEquals("s1.toString equals j1.toString", true, s1.toString equals j1.toString)
+ println("s1.toString equals j1.toString = " + (s1.toString equals j1.toString))
}
}
-object Test3 extends TestCase("insert") with Assert {
- override def enableStackTrace = false
- override def runTest {
- val j0 = new java.lang.StringBuilder("abc") // Java 1.5+
+object Test3 {
+ def run() {
+ val j0 = new java.lang.StringBuilder("abc")
val s0 = new StringBuilder("abc")
-
j0 insert (0, true) insert (0, 1.toByte) insert (0, 'a') insert (0, 88.toShort) insert (0, 9) insert (0, -1L)
s0 insert (0, true) insert (0, 1.toByte) insert (0, 'a') insert (0, 88.toShort) insert (0, 9) insert (0, -1L)
- //println("j0="+j0+", s0="+s0)//debug
- assertEquals("s0.toString equals j0.toString", true, s0.toString equals j0.toString)
+ println("j0="+j0+", s0="+s0)
+ println("s0.toString equals j0.toString = " + (s0.toString equals j0.toString))
- val j1 = new java.lang.StringBuilder // Java 1.5+
+ val j1 = new java.lang.StringBuilder
val s1 = new StringBuilder
j1 insert (0, "###") insert (0, Array('0', '1', '2')) insert (0, "xyz".subSequence(0, 3))
s1 insert (0, "###") insertAll (0, Array('0', '1', '2')) insertAll (0, List('x', 'y', 'z'))
- //println("j1="+j1+", s1="+s1)//debug
- assertEquals("s1.toString equals j1.toString", true, s1.toString equals j1.toString)
-
+ println("j1="+j1+", s1="+s1)
+ println("s1.toString equals j1.toString = " + (s1.toString equals j1.toString))
}
}
-object Test4 extends TestCase("indefOf") with Assert {
- override def enableStackTrace = false
- override def runTest {
+object Test4 {
+ def run() {
val j0 = new java.lang.StringBuilder("abc") // Java 1.5+
val s0 = new StringBuilder("abc")
val j1 = j0 indexOf("c")
val s1 = s0 indexOf("c")
- //println("j1="+j1+", s1="+s1)//debug
- assertEquals("s1 == j1", true, s1 == j1)
+ println("j1="+j1+", s1="+s1)
+ println("s1 == j1 = " + (s1 == j1))
val j2 = j0 append "123abc" lastIndexOf("c")
val s2 = s0 append "123abc" lastIndexOf("c")
- //println("j2="+j2+", s2="+s2)//debug
- assertEquals("s2 == j2", true, s2 == j2)
+ println("j2="+j2+", s2="+s2)
+ println("s2 == j2 = " + (s2 == j2))
}
}
diff --git a/test/files/jvm/t0632.check b/test/files/jvm/t0632.check
old mode 100644
new mode 100755
index 3185410..681bc9d
--- a/test/files/jvm/t0632.check
+++ b/test/files/jvm/t0632.check
@@ -1,12 +1,12 @@
-<foo x="&"></foo>
-<foo x="&"></foo>
-<foo x="&"></foo>
-<foo x="&"></foo>
-<foo x="&"></foo>
-<foo x="&"></foo>
-<foo x="&"></foo>
-<foo x="&"></foo>
-<foo x="&&"></foo>
-<foo x="&&"></foo>
-<foo x="&&"></foo>
-<foo x="&&"></foo>
+<foo x="&"/>
+<foo x="&"/>
+<foo x="&"/>
+<foo x="&"/>
+<foo x="&"/>
+<foo x="&"/>
+<foo x="&"/>
+<foo x="&"/>
+<foo x="&&"/>
+<foo x="&&"/>
+<foo x="&&"/>
+<foo x="&&"/>
diff --git a/test/files/jvm/t1118.check b/test/files/jvm/t1118.check
new file mode 100755
index 0000000..d676b41
--- /dev/null
+++ b/test/files/jvm/t1118.check
@@ -0,0 +1,11 @@
+
+<hi/> <!-- literal short -->
+<there></there> <!-- literal long -->
+<guys who="you all"></guys> <!-- literal long with attribute-->
+<hows it="going"/> <!-- literal short with attribute -->
+<this>is pretty cool</this> <!-- literal not empty -->
+
+<emptiness></emptiness> <!--programmatic long-->
+<vide/> <!--programmatic short-->
+<elem attr="value"/> <!--programmatic short with attribute-->
+<elem2 attr2="value2"></elem2> <!--programmatic long with attribute-->
diff --git a/test/files/jvm/t1118.scala b/test/files/jvm/t1118.scala
new file mode 100755
index 0000000..3c86547
--- /dev/null
+++ b/test/files/jvm/t1118.scala
@@ -0,0 +1,21 @@
+import scala.xml._
+
+object Test {
+ def main(args: Array[String]) {
+ println(<xml:group>
+<hi/> <!-- literal short -->
+<there></there> <!-- literal long -->
+<guys who="you all"></guys> <!-- literal long with attribute-->
+<hows it="going"/> <!-- literal short with attribute -->
+<this>is pretty cool</this> <!-- literal not empty -->
+</xml:group>)
+
+ println(Elem(null, "emptiness", Null, TopScope, false) ++ Text(" ") ++ Comment("programmatic long"))
+
+ println(Elem(null, "vide", Null, TopScope, true) ++ Text(" ") ++ Comment("programmatic short"))
+
+ println(Elem(null, "elem", Attribute("attr", Text("value"), Null), TopScope, true) ++ Text(" ") ++ Comment ("programmatic short with attribute"))
+
+ println(Elem(null, "elem2", Attribute("attr2", Text("value2"), Null), TopScope, false) ++ Text(" ") ++ Comment ("programmatic long with attribute"))
+ }
+}
\ No newline at end of file
diff --git a/test/files/jvm/t1652.check b/test/files/jvm/t1652.check
deleted file mode 100644
index dfa480c..0000000
--- a/test/files/jvm/t1652.check
+++ /dev/null
@@ -1,2 +0,0 @@
-OK1
-OK2
diff --git a/test/files/jvm/t2104.scala b/test/files/jvm/t2104.scala
index e672f02..655d74c 100644
--- a/test/files/jvm/t2104.scala
+++ b/test/files/jvm/t2104.scala
@@ -28,7 +28,7 @@ object Test
tp
}
- implicit val name="bug2104"
+ implicit val name="t2104"
val chars=List('\n','\r','a')
type Cs = List[Char]
diff --git a/test/files/jvm/bug560bis.check b/test/files/jvm/t560bis.check
similarity index 100%
rename from test/files/jvm/bug560bis.check
rename to test/files/jvm/t560bis.check
diff --git a/test/files/jvm/bug560bis.scala b/test/files/jvm/t560bis.scala
similarity index 100%
rename from test/files/jvm/bug560bis.scala
rename to test/files/jvm/t560bis.scala
diff --git a/test/files/jvm/t6172.scala b/test/files/jvm/t6172.scala
new file mode 100644
index 0000000..7fa0489
--- /dev/null
+++ b/test/files/jvm/t6172.scala
@@ -0,0 +1,3005 @@
+trait Foo1 {
+trait A1
+trait A2
+trait A3
+trait A4
+trait A5
+trait A6
+trait A7
+trait A8
+trait A9
+trait A10
+trait A11
+trait A12
+trait A13
+trait A14
+trait A15
+trait A16
+trait A17
+trait A18
+trait A19
+trait A20
+trait A21
+trait A22
+trait A23
+trait A24
+trait A25
+trait A26
+trait A27
+trait A28
+trait A29
+trait A30
+trait A31
+trait A32
+trait A33
+trait A34
+trait A35
+trait A36
+trait A37
+trait A38
+trait A39
+trait A40
+trait A41
+trait A42
+trait A43
+trait A44
+trait A45
+trait A46
+trait A47
+trait A48
+trait A49
+trait A50
+trait A51
+trait A52
+trait A53
+trait A54
+trait A55
+trait A56
+trait A57
+trait A58
+trait A59
+trait A60
+trait A61
+trait A62
+trait A63
+trait A64
+trait A65
+trait A66
+trait A67
+trait A68
+trait A69
+trait A70
+trait A71
+trait A72
+trait A73
+trait A74
+trait A75
+trait A76
+trait A77
+trait A78
+trait A79
+trait A80
+trait A81
+trait A82
+trait A83
+trait A84
+trait A85
+trait A86
+trait A87
+trait A88
+trait A89
+trait A90
+trait A91
+trait A92
+trait A93
+trait A94
+trait A95
+trait A96
+trait A97
+trait A98
+trait A99
+trait A100
+trait A101
+trait A102
+trait A103
+trait A104
+trait A105
+trait A106
+trait A107
+trait A108
+trait A109
+trait A110
+trait A111
+trait A112
+trait A113
+trait A114
+trait A115
+trait A116
+trait A117
+trait A118
+trait A119
+trait A120
+trait A121
+trait A122
+trait A123
+trait A124
+trait A125
+trait A126
+trait A127
+trait A128
+trait A129
+trait A130
+trait A131
+trait A132
+trait A133
+trait A134
+trait A135
+trait A136
+trait A137
+trait A138
+trait A139
+trait A140
+trait A141
+trait A142
+trait A143
+trait A144
+trait A145
+trait A146
+trait A147
+trait A148
+trait A149
+trait A150
+trait A151
+trait A152
+trait A153
+trait A154
+trait A155
+trait A156
+trait A157
+trait A158
+trait A159
+trait A160
+trait A161
+trait A162
+trait A163
+trait A164
+trait A165
+trait A166
+trait A167
+trait A168
+trait A169
+trait A170
+trait A171
+trait A172
+trait A173
+trait A174
+trait A175
+trait A176
+trait A177
+trait A178
+trait A179
+trait A180
+trait A181
+trait A182
+trait A183
+trait A184
+trait A185
+trait A186
+trait A187
+trait A188
+trait A189
+trait A190
+trait A191
+trait A192
+trait A193
+trait A194
+trait A195
+trait A196
+trait A197
+trait A198
+trait A199
+trait A200
+trait A201
+trait A202
+trait A203
+trait A204
+trait A205
+trait A206
+trait A207
+trait A208
+trait A209
+trait A210
+trait A211
+trait A212
+trait A213
+trait A214
+trait A215
+trait A216
+trait A217
+trait A218
+trait A219
+trait A220
+trait A221
+trait A222
+trait A223
+trait A224
+trait A225
+trait A226
+trait A227
+trait A228
+trait A229
+trait A230
+trait A231
+trait A232
+trait A233
+trait A234
+trait A235
+trait A236
+trait A237
+trait A238
+trait A239
+trait A240
+trait A241
+trait A242
+trait A243
+trait A244
+trait A245
+trait A246
+trait A247
+trait A248
+trait A249
+trait A250
+trait A251
+trait A252
+trait A253
+trait A254
+trait A255
+trait A256
+trait A257
+trait A258
+trait A259
+trait A260
+trait A261
+trait A262
+trait A263
+trait A264
+trait A265
+trait A266
+trait A267
+trait A268
+trait A269
+trait A270
+trait A271
+trait A272
+trait A273
+trait A274
+trait A275
+trait A276
+trait A277
+trait A278
+trait A279
+trait A280
+trait A281
+trait A282
+trait A283
+trait A284
+trait A285
+trait A286
+trait A287
+trait A288
+trait A289
+trait A290
+trait A291
+trait A292
+trait A293
+trait A294
+trait A295
+trait A296
+trait A297
+trait A298
+trait A299
+trait A300
+trait A301
+trait A302
+trait A303
+trait A304
+trait A305
+trait A306
+trait A307
+trait A308
+trait A309
+trait A310
+trait A311
+trait A312
+trait A313
+trait A314
+trait A315
+trait A316
+trait A317
+trait A318
+trait A319
+trait A320
+trait A321
+trait A322
+trait A323
+trait A324
+trait A325
+trait A326
+trait A327
+trait A328
+trait A329
+trait A330
+trait A331
+trait A332
+trait A333
+trait A334
+trait A335
+trait A336
+trait A337
+trait A338
+trait A339
+trait A340
+trait A341
+trait A342
+trait A343
+trait A344
+trait A345
+trait A346
+trait A347
+trait A348
+trait A349
+trait A350
+trait A351
+trait A352
+trait A353
+trait A354
+trait A355
+trait A356
+trait A357
+trait A358
+trait A359
+trait A360
+trait A361
+trait A362
+trait A363
+trait A364
+trait A365
+trait A366
+trait A367
+trait A368
+trait A369
+trait A370
+trait A371
+trait A372
+trait A373
+trait A374
+trait A375
+trait A376
+trait A377
+trait A378
+trait A379
+trait A380
+trait A381
+trait A382
+trait A383
+trait A384
+trait A385
+trait A386
+trait A387
+trait A388
+trait A389
+trait A390
+trait A391
+trait A392
+trait A393
+trait A394
+trait A395
+trait A396
+trait A397
+trait A398
+trait A399
+trait A400
+trait A401
+trait A402
+trait A403
+trait A404
+trait A405
+trait A406
+trait A407
+trait A408
+trait A409
+trait A410
+trait A411
+trait A412
+trait A413
+trait A414
+trait A415
+trait A416
+trait A417
+trait A418
+trait A419
+trait A420
+trait A421
+trait A422
+trait A423
+trait A424
+trait A425
+trait A426
+trait A427
+trait A428
+trait A429
+trait A430
+trait A431
+trait A432
+trait A433
+trait A434
+trait A435
+trait A436
+trait A437
+trait A438
+trait A439
+trait A440
+trait A441
+trait A442
+trait A443
+trait A444
+trait A445
+trait A446
+trait A447
+trait A448
+trait A449
+trait A450
+trait A451
+trait A452
+trait A453
+trait A454
+trait A455
+trait A456
+trait A457
+trait A458
+trait A459
+trait A460
+trait A461
+trait A462
+trait A463
+trait A464
+trait A465
+trait A466
+trait A467
+trait A468
+trait A469
+trait A470
+trait A471
+trait A472
+trait A473
+trait A474
+trait A475
+trait A476
+trait A477
+trait A478
+trait A479
+trait A480
+trait A481
+trait A482
+trait A483
+trait A484
+trait A485
+trait A486
+trait A487
+trait A488
+trait A489
+trait A490
+trait A491
+trait A492
+trait A493
+trait A494
+trait A495
+trait A496
+trait A497
+trait A498
+trait A499
+trait A500
+trait A501
+trait A502
+trait A503
+trait A504
+trait A505
+trait A506
+trait A507
+trait A508
+trait A509
+trait A510
+trait A511
+trait A512
+trait A513
+trait A514
+trait A515
+trait A516
+trait A517
+trait A518
+trait A519
+trait A520
+trait A521
+trait A522
+trait A523
+trait A524
+trait A525
+trait A526
+trait A527
+trait A528
+trait A529
+trait A530
+trait A531
+trait A532
+trait A533
+trait A534
+trait A535
+trait A536
+trait A537
+trait A538
+trait A539
+trait A540
+trait A541
+trait A542
+trait A543
+trait A544
+trait A545
+trait A546
+trait A547
+trait A548
+trait A549
+trait A550
+trait A551
+trait A552
+trait A553
+trait A554
+trait A555
+trait A556
+trait A557
+trait A558
+trait A559
+trait A560
+trait A561
+trait A562
+trait A563
+trait A564
+trait A565
+trait A566
+trait A567
+trait A568
+trait A569
+trait A570
+trait A571
+trait A572
+trait A573
+trait A574
+trait A575
+trait A576
+trait A577
+trait A578
+trait A579
+trait A580
+trait A581
+trait A582
+trait A583
+trait A584
+trait A585
+trait A586
+trait A587
+trait A588
+trait A589
+trait A590
+trait A591
+trait A592
+trait A593
+trait A594
+trait A595
+trait A596
+trait A597
+trait A598
+trait A599
+trait A600
+trait A601
+trait A602
+trait A603
+trait A604
+trait A605
+trait A606
+trait A607
+trait A608
+trait A609
+trait A610
+trait A611
+trait A612
+trait A613
+trait A614
+trait A615
+trait A616
+trait A617
+trait A618
+trait A619
+trait A620
+trait A621
+trait A622
+trait A623
+trait A624
+trait A625
+trait A626
+trait A627
+trait A628
+trait A629
+trait A630
+trait A631
+trait A632
+trait A633
+trait A634
+trait A635
+trait A636
+trait A637
+trait A638
+trait A639
+trait A640
+trait A641
+trait A642
+trait A643
+trait A644
+trait A645
+trait A646
+trait A647
+trait A648
+trait A649
+trait A650
+trait A651
+trait A652
+trait A653
+trait A654
+trait A655
+trait A656
+trait A657
+trait A658
+trait A659
+trait A660
+trait A661
+trait A662
+trait A663
+trait A664
+trait A665
+trait A666
+trait A667
+trait A668
+trait A669
+trait A670
+trait A671
+trait A672
+trait A673
+trait A674
+trait A675
+trait A676
+trait A677
+trait A678
+trait A679
+trait A680
+trait A681
+trait A682
+trait A683
+trait A684
+trait A685
+trait A686
+trait A687
+trait A688
+trait A689
+trait A690
+trait A691
+trait A692
+trait A693
+trait A694
+trait A695
+trait A696
+trait A697
+trait A698
+trait A699
+trait A700
+trait A701
+trait A702
+trait A703
+trait A704
+trait A705
+trait A706
+trait A707
+trait A708
+trait A709
+trait A710
+trait A711
+trait A712
+trait A713
+trait A714
+trait A715
+trait A716
+trait A717
+trait A718
+trait A719
+trait A720
+trait A721
+trait A722
+trait A723
+trait A724
+trait A725
+trait A726
+trait A727
+trait A728
+trait A729
+trait A730
+trait A731
+trait A732
+trait A733
+trait A734
+trait A735
+trait A736
+trait A737
+trait A738
+trait A739
+trait A740
+trait A741
+trait A742
+trait A743
+trait A744
+trait A745
+trait A746
+trait A747
+trait A748
+trait A749
+trait A750
+trait A751
+trait A752
+trait A753
+trait A754
+trait A755
+trait A756
+trait A757
+trait A758
+trait A759
+trait A760
+trait A761
+trait A762
+trait A763
+trait A764
+trait A765
+trait A766
+trait A767
+trait A768
+trait A769
+trait A770
+trait A771
+trait A772
+trait A773
+trait A774
+trait A775
+trait A776
+trait A777
+trait A778
+trait A779
+trait A780
+trait A781
+trait A782
+trait A783
+trait A784
+trait A785
+trait A786
+trait A787
+trait A788
+trait A789
+trait A790
+trait A791
+trait A792
+trait A793
+trait A794
+trait A795
+trait A796
+trait A797
+trait A798
+trait A799
+trait A800
+trait A801
+trait A802
+trait A803
+trait A804
+trait A805
+trait A806
+trait A807
+trait A808
+trait A809
+trait A810
+trait A811
+trait A812
+trait A813
+trait A814
+trait A815
+trait A816
+trait A817
+trait A818
+trait A819
+trait A820
+trait A821
+trait A822
+trait A823
+trait A824
+trait A825
+trait A826
+trait A827
+trait A828
+trait A829
+trait A830
+trait A831
+trait A832
+trait A833
+trait A834
+trait A835
+trait A836
+trait A837
+trait A838
+trait A839
+trait A840
+trait A841
+trait A842
+trait A843
+trait A844
+trait A845
+trait A846
+trait A847
+trait A848
+trait A849
+trait A850
+trait A851
+trait A852
+trait A853
+trait A854
+trait A855
+trait A856
+trait A857
+trait A858
+trait A859
+trait A860
+trait A861
+trait A862
+trait A863
+trait A864
+trait A865
+trait A866
+trait A867
+trait A868
+trait A869
+trait A870
+trait A871
+trait A872
+trait A873
+trait A874
+trait A875
+trait A876
+trait A877
+trait A878
+trait A879
+trait A880
+trait A881
+trait A882
+trait A883
+trait A884
+trait A885
+trait A886
+trait A887
+trait A888
+trait A889
+trait A890
+trait A891
+trait A892
+trait A893
+trait A894
+trait A895
+trait A896
+trait A897
+trait A898
+trait A899
+trait A900
+trait A901
+trait A902
+trait A903
+trait A904
+trait A905
+trait A906
+trait A907
+trait A908
+trait A909
+trait A910
+trait A911
+trait A912
+trait A913
+trait A914
+trait A915
+trait A916
+trait A917
+trait A918
+trait A919
+trait A920
+trait A921
+trait A922
+trait A923
+trait A924
+trait A925
+trait A926
+trait A927
+trait A928
+trait A929
+trait A930
+trait A931
+trait A932
+trait A933
+trait A934
+trait A935
+trait A936
+trait A937
+trait A938
+trait A939
+trait A940
+trait A941
+trait A942
+trait A943
+trait A944
+trait A945
+trait A946
+trait A947
+trait A948
+trait A949
+trait A950
+trait A951
+trait A952
+trait A953
+trait A954
+trait A955
+trait A956
+trait A957
+trait A958
+trait A959
+trait A960
+trait A961
+trait A962
+trait A963
+trait A964
+trait A965
+trait A966
+trait A967
+trait A968
+trait A969
+trait A970
+trait A971
+trait A972
+trait A973
+trait A974
+trait A975
+trait A976
+trait A977
+trait A978
+trait A979
+trait A980
+trait A981
+trait A982
+trait A983
+trait A984
+trait A985
+trait A986
+trait A987
+trait A988
+trait A989
+trait A990
+trait A991
+trait A992
+trait A993
+trait A994
+trait A995
+trait A996
+trait A997
+trait A998
+trait A999
+trait A1000
+trait A1001
+trait A1002
+trait A1003
+trait A1004
+trait A1005
+trait A1006
+trait A1007
+trait A1008
+trait A1009
+trait A1010
+trait A1011
+trait A1012
+trait A1013
+trait A1014
+trait A1015
+trait A1016
+trait A1017
+trait A1018
+trait A1019
+trait A1020
+trait A1021
+trait A1022
+trait A1023
+trait A1024
+trait A1025
+trait A1026
+trait A1027
+trait A1028
+trait A1029
+trait A1030
+trait A1031
+trait A1032
+trait A1033
+trait A1034
+trait A1035
+trait A1036
+trait A1037
+trait A1038
+trait A1039
+trait A1040
+trait A1041
+trait A1042
+trait A1043
+trait A1044
+trait A1045
+trait A1046
+trait A1047
+trait A1048
+trait A1049
+trait A1050
+trait A1051
+trait A1052
+trait A1053
+trait A1054
+trait A1055
+trait A1056
+trait A1057
+trait A1058
+trait A1059
+trait A1060
+trait A1061
+trait A1062
+trait A1063
+trait A1064
+trait A1065
+trait A1066
+trait A1067
+trait A1068
+trait A1069
+trait A1070
+trait A1071
+trait A1072
+trait A1073
+trait A1074
+trait A1075
+trait A1076
+trait A1077
+trait A1078
+trait A1079
+trait A1080
+trait A1081
+trait A1082
+trait A1083
+trait A1084
+trait A1085
+trait A1086
+trait A1087
+trait A1088
+trait A1089
+trait A1090
+trait A1091
+trait A1092
+trait A1093
+trait A1094
+trait A1095
+trait A1096
+trait A1097
+trait A1098
+trait A1099
+trait A1100
+trait A1101
+trait A1102
+trait A1103
+trait A1104
+trait A1105
+trait A1106
+trait A1107
+trait A1108
+trait A1109
+trait A1110
+trait A1111
+trait A1112
+trait A1113
+trait A1114
+trait A1115
+trait A1116
+trait A1117
+trait A1118
+trait A1119
+trait A1120
+trait A1121
+trait A1122
+trait A1123
+trait A1124
+trait A1125
+trait A1126
+trait A1127
+trait A1128
+trait A1129
+trait A1130
+trait A1131
+trait A1132
+trait A1133
+trait A1134
+trait A1135
+trait A1136
+trait A1137
+trait A1138
+trait A1139
+trait A1140
+trait A1141
+trait A1142
+trait A1143
+trait A1144
+trait A1145
+trait A1146
+trait A1147
+trait A1148
+trait A1149
+trait A1150
+trait A1151
+trait A1152
+trait A1153
+trait A1154
+trait A1155
+trait A1156
+trait A1157
+trait A1158
+trait A1159
+trait A1160
+trait A1161
+trait A1162
+trait A1163
+trait A1164
+trait A1165
+trait A1166
+trait A1167
+trait A1168
+trait A1169
+trait A1170
+trait A1171
+trait A1172
+trait A1173
+trait A1174
+trait A1175
+trait A1176
+trait A1177
+trait A1178
+trait A1179
+trait A1180
+trait A1181
+trait A1182
+trait A1183
+trait A1184
+trait A1185
+trait A1186
+trait A1187
+trait A1188
+trait A1189
+trait A1190
+trait A1191
+trait A1192
+trait A1193
+trait A1194
+trait A1195
+trait A1196
+trait A1197
+trait A1198
+trait A1199
+trait A1200
+trait A1201
+trait A1202
+trait A1203
+trait A1204
+trait A1205
+trait A1206
+trait A1207
+trait A1208
+trait A1209
+trait A1210
+trait A1211
+trait A1212
+trait A1213
+trait A1214
+trait A1215
+trait A1216
+trait A1217
+trait A1218
+trait A1219
+trait A1220
+trait A1221
+trait A1222
+trait A1223
+trait A1224
+trait A1225
+trait A1226
+trait A1227
+trait A1228
+trait A1229
+trait A1230
+trait A1231
+trait A1232
+trait A1233
+trait A1234
+trait A1235
+trait A1236
+trait A1237
+trait A1238
+trait A1239
+trait A1240
+trait A1241
+trait A1242
+trait A1243
+trait A1244
+trait A1245
+trait A1246
+trait A1247
+trait A1248
+trait A1249
+trait A1250
+trait A1251
+trait A1252
+trait A1253
+trait A1254
+trait A1255
+trait A1256
+trait A1257
+trait A1258
+trait A1259
+trait A1260
+trait A1261
+trait A1262
+trait A1263
+trait A1264
+trait A1265
+trait A1266
+trait A1267
+trait A1268
+trait A1269
+trait A1270
+trait A1271
+trait A1272
+trait A1273
+trait A1274
+trait A1275
+trait A1276
+trait A1277
+trait A1278
+trait A1279
+trait A1280
+trait A1281
+trait A1282
+trait A1283
+trait A1284
+trait A1285
+trait A1286
+trait A1287
+trait A1288
+trait A1289
+trait A1290
+trait A1291
+trait A1292
+trait A1293
+trait A1294
+trait A1295
+trait A1296
+trait A1297
+trait A1298
+trait A1299
+trait A1300
+trait A1301
+trait A1302
+trait A1303
+trait A1304
+trait A1305
+trait A1306
+trait A1307
+trait A1308
+trait A1309
+trait A1310
+trait A1311
+trait A1312
+trait A1313
+trait A1314
+trait A1315
+trait A1316
+trait A1317
+trait A1318
+trait A1319
+trait A1320
+trait A1321
+trait A1322
+trait A1323
+trait A1324
+trait A1325
+trait A1326
+trait A1327
+trait A1328
+trait A1329
+trait A1330
+trait A1331
+trait A1332
+trait A1333
+trait A1334
+trait A1335
+trait A1336
+trait A1337
+trait A1338
+trait A1339
+trait A1340
+trait A1341
+trait A1342
+trait A1343
+trait A1344
+trait A1345
+trait A1346
+trait A1347
+trait A1348
+trait A1349
+trait A1350
+trait A1351
+trait A1352
+trait A1353
+trait A1354
+trait A1355
+trait A1356
+trait A1357
+trait A1358
+trait A1359
+trait A1360
+trait A1361
+trait A1362
+trait A1363
+trait A1364
+trait A1365
+trait A1366
+trait A1367
+trait A1368
+trait A1369
+trait A1370
+trait A1371
+trait A1372
+trait A1373
+trait A1374
+trait A1375
+trait A1376
+trait A1377
+trait A1378
+trait A1379
+trait A1380
+trait A1381
+trait A1382
+trait A1383
+trait A1384
+trait A1385
+trait A1386
+trait A1387
+trait A1388
+trait A1389
+trait A1390
+trait A1391
+trait A1392
+trait A1393
+trait A1394
+trait A1395
+trait A1396
+trait A1397
+trait A1398
+trait A1399
+trait A1400
+trait A1401
+trait A1402
+trait A1403
+trait A1404
+trait A1405
+trait A1406
+trait A1407
+trait A1408
+trait A1409
+trait A1410
+trait A1411
+trait A1412
+trait A1413
+trait A1414
+trait A1415
+trait A1416
+trait A1417
+trait A1418
+trait A1419
+trait A1420
+trait A1421
+trait A1422
+trait A1423
+trait A1424
+trait A1425
+trait A1426
+trait A1427
+trait A1428
+trait A1429
+trait A1430
+trait A1431
+trait A1432
+trait A1433
+trait A1434
+trait A1435
+trait A1436
+trait A1437
+trait A1438
+trait A1439
+trait A1440
+trait A1441
+trait A1442
+trait A1443
+trait A1444
+trait A1445
+trait A1446
+trait A1447
+trait A1448
+trait A1449
+trait A1450
+trait A1451
+trait A1452
+trait A1453
+trait A1454
+trait A1455
+trait A1456
+trait A1457
+trait A1458
+trait A1459
+trait A1460
+trait A1461
+trait A1462
+trait A1463
+trait A1464
+trait A1465
+trait A1466
+trait A1467
+trait A1468
+trait A1469
+trait A1470
+trait A1471
+trait A1472
+trait A1473
+trait A1474
+trait A1475
+trait A1476
+trait A1477
+trait A1478
+trait A1479
+trait A1480
+trait A1481
+trait A1482
+trait A1483
+trait A1484
+trait A1485
+trait A1486
+trait A1487
+trait A1488
+trait A1489
+trait A1490
+trait A1491
+trait A1492
+trait A1493
+trait A1494
+trait A1495
+trait A1496
+trait A1497
+trait A1498
+trait A1499
+trait A1500
+trait A1501
+trait A1502
+trait A1503
+trait A1504
+trait A1505
+trait A1506
+trait A1507
+trait A1508
+trait A1509
+trait A1510
+trait A1511
+trait A1512
+trait A1513
+trait A1514
+trait A1515
+trait A1516
+trait A1517
+trait A1518
+trait A1519
+trait A1520
+trait A1521
+trait A1522
+trait A1523
+trait A1524
+trait A1525
+trait A1526
+trait A1527
+trait A1528
+trait A1529
+trait A1530
+trait A1531
+trait A1532
+trait A1533
+trait A1534
+trait A1535
+trait A1536
+trait A1537
+trait A1538
+trait A1539
+trait A1540
+trait A1541
+trait A1542
+trait A1543
+trait A1544
+trait A1545
+trait A1546
+trait A1547
+trait A1548
+trait A1549
+trait A1550
+trait A1551
+trait A1552
+trait A1553
+trait A1554
+trait A1555
+trait A1556
+trait A1557
+trait A1558
+trait A1559
+trait A1560
+trait A1561
+trait A1562
+trait A1563
+trait A1564
+trait A1565
+trait A1566
+trait A1567
+trait A1568
+trait A1569
+trait A1570
+trait A1571
+trait A1572
+trait A1573
+trait A1574
+trait A1575
+trait A1576
+trait A1577
+trait A1578
+trait A1579
+trait A1580
+trait A1581
+trait A1582
+trait A1583
+trait A1584
+trait A1585
+trait A1586
+trait A1587
+trait A1588
+trait A1589
+trait A1590
+trait A1591
+trait A1592
+trait A1593
+trait A1594
+trait A1595
+trait A1596
+trait A1597
+trait A1598
+trait A1599
+trait A1600
+trait A1601
+trait A1602
+trait A1603
+trait A1604
+trait A1605
+trait A1606
+trait A1607
+trait A1608
+trait A1609
+trait A1610
+trait A1611
+trait A1612
+trait A1613
+trait A1614
+trait A1615
+trait A1616
+trait A1617
+trait A1618
+trait A1619
+trait A1620
+trait A1621
+trait A1622
+trait A1623
+trait A1624
+trait A1625
+trait A1626
+trait A1627
+trait A1628
+trait A1629
+trait A1630
+trait A1631
+trait A1632
+trait A1633
+trait A1634
+trait A1635
+trait A1636
+trait A1637
+trait A1638
+trait A1639
+trait A1640
+trait A1641
+trait A1642
+trait A1643
+trait A1644
+trait A1645
+trait A1646
+trait A1647
+trait A1648
+trait A1649
+trait A1650
+trait A1651
+trait A1652
+trait A1653
+trait A1654
+trait A1655
+trait A1656
+trait A1657
+trait A1658
+trait A1659
+trait A1660
+trait A1661
+trait A1662
+trait A1663
+trait A1664
+trait A1665
+trait A1666
+trait A1667
+trait A1668
+trait A1669
+trait A1670
+trait A1671
+trait A1672
+trait A1673
+trait A1674
+trait A1675
+trait A1676
+trait A1677
+trait A1678
+trait A1679
+trait A1680
+trait A1681
+trait A1682
+trait A1683
+trait A1684
+trait A1685
+trait A1686
+trait A1687
+trait A1688
+trait A1689
+trait A1690
+trait A1691
+trait A1692
+trait A1693
+trait A1694
+trait A1695
+trait A1696
+trait A1697
+trait A1698
+trait A1699
+trait A1700
+trait A1701
+trait A1702
+trait A1703
+trait A1704
+trait A1705
+trait A1706
+trait A1707
+trait A1708
+trait A1709
+trait A1710
+trait A1711
+trait A1712
+trait A1713
+trait A1714
+trait A1715
+trait A1716
+trait A1717
+trait A1718
+trait A1719
+trait A1720
+trait A1721
+trait A1722
+trait A1723
+trait A1724
+trait A1725
+trait A1726
+trait A1727
+trait A1728
+trait A1729
+trait A1730
+trait A1731
+trait A1732
+trait A1733
+trait A1734
+trait A1735
+trait A1736
+trait A1737
+trait A1738
+trait A1739
+trait A1740
+trait A1741
+trait A1742
+trait A1743
+trait A1744
+trait A1745
+trait A1746
+trait A1747
+trait A1748
+trait A1749
+trait A1750
+trait A1751
+trait A1752
+trait A1753
+trait A1754
+trait A1755
+trait A1756
+trait A1757
+trait A1758
+trait A1759
+trait A1760
+trait A1761
+trait A1762
+trait A1763
+trait A1764
+trait A1765
+trait A1766
+trait A1767
+trait A1768
+trait A1769
+trait A1770
+trait A1771
+trait A1772
+trait A1773
+trait A1774
+trait A1775
+trait A1776
+trait A1777
+trait A1778
+trait A1779
+trait A1780
+trait A1781
+trait A1782
+trait A1783
+trait A1784
+trait A1785
+trait A1786
+trait A1787
+trait A1788
+trait A1789
+trait A1790
+trait A1791
+trait A1792
+trait A1793
+trait A1794
+trait A1795
+trait A1796
+trait A1797
+trait A1798
+trait A1799
+trait A1800
+trait A1801
+trait A1802
+trait A1803
+trait A1804
+trait A1805
+trait A1806
+trait A1807
+trait A1808
+trait A1809
+trait A1810
+trait A1811
+trait A1812
+trait A1813
+trait A1814
+trait A1815
+trait A1816
+trait A1817
+trait A1818
+trait A1819
+trait A1820
+trait A1821
+trait A1822
+trait A1823
+trait A1824
+trait A1825
+trait A1826
+trait A1827
+trait A1828
+trait A1829
+trait A1830
+trait A1831
+trait A1832
+trait A1833
+trait A1834
+trait A1835
+trait A1836
+trait A1837
+trait A1838
+trait A1839
+trait A1840
+trait A1841
+trait A1842
+trait A1843
+trait A1844
+trait A1845
+trait A1846
+trait A1847
+trait A1848
+trait A1849
+trait A1850
+trait A1851
+trait A1852
+trait A1853
+trait A1854
+trait A1855
+trait A1856
+trait A1857
+trait A1858
+trait A1859
+trait A1860
+trait A1861
+trait A1862
+trait A1863
+trait A1864
+trait A1865
+trait A1866
+trait A1867
+trait A1868
+trait A1869
+trait A1870
+trait A1871
+trait A1872
+trait A1873
+trait A1874
+trait A1875
+trait A1876
+trait A1877
+trait A1878
+trait A1879
+trait A1880
+trait A1881
+trait A1882
+trait A1883
+trait A1884
+trait A1885
+trait A1886
+trait A1887
+trait A1888
+trait A1889
+trait A1890
+trait A1891
+trait A1892
+trait A1893
+trait A1894
+trait A1895
+trait A1896
+trait A1897
+trait A1898
+trait A1899
+trait A1900
+trait A1901
+trait A1902
+trait A1903
+trait A1904
+trait A1905
+trait A1906
+trait A1907
+trait A1908
+trait A1909
+trait A1910
+trait A1911
+trait A1912
+trait A1913
+trait A1914
+trait A1915
+trait A1916
+trait A1917
+trait A1918
+trait A1919
+trait A1920
+trait A1921
+trait A1922
+trait A1923
+trait A1924
+trait A1925
+trait A1926
+trait A1927
+trait A1928
+trait A1929
+trait A1930
+trait A1931
+trait A1932
+trait A1933
+trait A1934
+trait A1935
+trait A1936
+trait A1937
+trait A1938
+trait A1939
+trait A1940
+trait A1941
+trait A1942
+trait A1943
+trait A1944
+trait A1945
+trait A1946
+trait A1947
+trait A1948
+trait A1949
+trait A1950
+trait A1951
+trait A1952
+trait A1953
+trait A1954
+trait A1955
+trait A1956
+trait A1957
+trait A1958
+trait A1959
+trait A1960
+trait A1961
+trait A1962
+trait A1963
+trait A1964
+trait A1965
+trait A1966
+trait A1967
+trait A1968
+trait A1969
+trait A1970
+trait A1971
+trait A1972
+trait A1973
+trait A1974
+trait A1975
+trait A1976
+trait A1977
+trait A1978
+trait A1979
+trait A1980
+trait A1981
+trait A1982
+trait A1983
+trait A1984
+trait A1985
+trait A1986
+trait A1987
+trait A1988
+trait A1989
+trait A1990
+trait A1991
+trait A1992
+trait A1993
+trait A1994
+trait A1995
+trait A1996
+trait A1997
+trait A1998
+trait A1999
+trait A2000
+trait A2001
+trait A2002
+trait A2003
+trait A2004
+trait A2005
+trait A2006
+trait A2007
+trait A2008
+trait A2009
+trait A2010
+trait A2011
+trait A2012
+trait A2013
+trait A2014
+trait A2015
+trait A2016
+trait A2017
+trait A2018
+trait A2019
+trait A2020
+trait A2021
+trait A2022
+trait A2023
+trait A2024
+trait A2025
+trait A2026
+trait A2027
+trait A2028
+trait A2029
+trait A2030
+trait A2031
+trait A2032
+trait A2033
+trait A2034
+trait A2035
+trait A2036
+trait A2037
+trait A2038
+trait A2039
+trait A2040
+trait A2041
+trait A2042
+trait A2043
+trait A2044
+trait A2045
+trait A2046
+trait A2047
+trait A2048
+trait A2049
+trait A2050
+trait A2051
+trait A2052
+trait A2053
+trait A2054
+trait A2055
+trait A2056
+trait A2057
+trait A2058
+trait A2059
+trait A2060
+trait A2061
+trait A2062
+trait A2063
+trait A2064
+trait A2065
+trait A2066
+trait A2067
+trait A2068
+trait A2069
+trait A2070
+trait A2071
+trait A2072
+trait A2073
+trait A2074
+trait A2075
+trait A2076
+trait A2077
+trait A2078
+trait A2079
+trait A2080
+trait A2081
+trait A2082
+trait A2083
+trait A2084
+trait A2085
+trait A2086
+trait A2087
+trait A2088
+trait A2089
+trait A2090
+trait A2091
+trait A2092
+trait A2093
+trait A2094
+trait A2095
+trait A2096
+trait A2097
+trait A2098
+trait A2099
+trait A2100
+trait A2101
+trait A2102
+trait A2103
+trait A2104
+trait A2105
+trait A2106
+trait A2107
+trait A2108
+trait A2109
+trait A2110
+trait A2111
+trait A2112
+trait A2113
+trait A2114
+trait A2115
+trait A2116
+trait A2117
+trait A2118
+trait A2119
+trait A2120
+trait A2121
+trait A2122
+trait A2123
+trait A2124
+trait A2125
+trait A2126
+trait A2127
+trait A2128
+trait A2129
+trait A2130
+trait A2131
+trait A2132
+trait A2133
+trait A2134
+trait A2135
+trait A2136
+trait A2137
+trait A2138
+trait A2139
+trait A2140
+trait A2141
+trait A2142
+trait A2143
+trait A2144
+trait A2145
+trait A2146
+trait A2147
+trait A2148
+trait A2149
+trait A2150
+trait A2151
+trait A2152
+trait A2153
+trait A2154
+trait A2155
+trait A2156
+trait A2157
+trait A2158
+trait A2159
+trait A2160
+trait A2161
+trait A2162
+trait A2163
+trait A2164
+trait A2165
+trait A2166
+trait A2167
+trait A2168
+trait A2169
+trait A2170
+trait A2171
+trait A2172
+trait A2173
+trait A2174
+trait A2175
+trait A2176
+trait A2177
+trait A2178
+trait A2179
+trait A2180
+trait A2181
+trait A2182
+trait A2183
+trait A2184
+trait A2185
+trait A2186
+trait A2187
+trait A2188
+trait A2189
+trait A2190
+trait A2191
+trait A2192
+trait A2193
+trait A2194
+trait A2195
+trait A2196
+trait A2197
+trait A2198
+trait A2199
+trait A2200
+trait A2201
+trait A2202
+trait A2203
+trait A2204
+trait A2205
+trait A2206
+trait A2207
+trait A2208
+trait A2209
+trait A2210
+trait A2211
+trait A2212
+trait A2213
+trait A2214
+trait A2215
+trait A2216
+trait A2217
+trait A2218
+trait A2219
+trait A2220
+trait A2221
+trait A2222
+trait A2223
+trait A2224
+trait A2225
+trait A2226
+trait A2227
+trait A2228
+trait A2229
+trait A2230
+trait A2231
+trait A2232
+trait A2233
+trait A2234
+trait A2235
+trait A2236
+trait A2237
+trait A2238
+trait A2239
+trait A2240
+trait A2241
+trait A2242
+trait A2243
+trait A2244
+trait A2245
+trait A2246
+trait A2247
+trait A2248
+trait A2249
+trait A2250
+trait A2251
+trait A2252
+trait A2253
+trait A2254
+trait A2255
+trait A2256
+trait A2257
+trait A2258
+trait A2259
+trait A2260
+trait A2261
+trait A2262
+trait A2263
+trait A2264
+trait A2265
+trait A2266
+trait A2267
+trait A2268
+trait A2269
+trait A2270
+trait A2271
+trait A2272
+trait A2273
+trait A2274
+trait A2275
+trait A2276
+trait A2277
+trait A2278
+trait A2279
+trait A2280
+trait A2281
+trait A2282
+trait A2283
+trait A2284
+trait A2285
+trait A2286
+trait A2287
+trait A2288
+trait A2289
+trait A2290
+trait A2291
+trait A2292
+trait A2293
+trait A2294
+trait A2295
+trait A2296
+trait A2297
+trait A2298
+trait A2299
+trait A2300
+trait A2301
+trait A2302
+trait A2303
+trait A2304
+trait A2305
+trait A2306
+trait A2307
+trait A2308
+trait A2309
+trait A2310
+trait A2311
+trait A2312
+trait A2313
+trait A2314
+trait A2315
+trait A2316
+trait A2317
+trait A2318
+trait A2319
+trait A2320
+trait A2321
+trait A2322
+trait A2323
+trait A2324
+trait A2325
+trait A2326
+trait A2327
+trait A2328
+trait A2329
+trait A2330
+trait A2331
+trait A2332
+trait A2333
+trait A2334
+trait A2335
+trait A2336
+trait A2337
+trait A2338
+trait A2339
+trait A2340
+trait A2341
+trait A2342
+trait A2343
+trait A2344
+trait A2345
+trait A2346
+trait A2347
+trait A2348
+trait A2349
+trait A2350
+trait A2351
+trait A2352
+trait A2353
+trait A2354
+trait A2355
+trait A2356
+trait A2357
+trait A2358
+trait A2359
+trait A2360
+trait A2361
+trait A2362
+trait A2363
+trait A2364
+trait A2365
+trait A2366
+trait A2367
+trait A2368
+trait A2369
+trait A2370
+trait A2371
+trait A2372
+trait A2373
+trait A2374
+trait A2375
+trait A2376
+trait A2377
+trait A2378
+trait A2379
+trait A2380
+trait A2381
+trait A2382
+trait A2383
+trait A2384
+trait A2385
+trait A2386
+trait A2387
+trait A2388
+trait A2389
+trait A2390
+trait A2391
+trait A2392
+trait A2393
+trait A2394
+trait A2395
+trait A2396
+trait A2397
+trait A2398
+trait A2399
+trait A2400
+trait A2401
+trait A2402
+trait A2403
+trait A2404
+trait A2405
+trait A2406
+trait A2407
+trait A2408
+trait A2409
+trait A2410
+trait A2411
+trait A2412
+trait A2413
+trait A2414
+trait A2415
+trait A2416
+trait A2417
+trait A2418
+trait A2419
+trait A2420
+trait A2421
+trait A2422
+trait A2423
+trait A2424
+trait A2425
+trait A2426
+trait A2427
+trait A2428
+trait A2429
+trait A2430
+trait A2431
+trait A2432
+trait A2433
+trait A2434
+trait A2435
+trait A2436
+trait A2437
+trait A2438
+trait A2439
+trait A2440
+trait A2441
+trait A2442
+trait A2443
+trait A2444
+trait A2445
+trait A2446
+trait A2447
+trait A2448
+trait A2449
+trait A2450
+trait A2451
+trait A2452
+trait A2453
+trait A2454
+trait A2455
+trait A2456
+trait A2457
+trait A2458
+trait A2459
+trait A2460
+trait A2461
+trait A2462
+trait A2463
+trait A2464
+trait A2465
+trait A2466
+trait A2467
+trait A2468
+trait A2469
+trait A2470
+trait A2471
+trait A2472
+trait A2473
+trait A2474
+trait A2475
+trait A2476
+trait A2477
+trait A2478
+trait A2479
+trait A2480
+trait A2481
+trait A2482
+trait A2483
+trait A2484
+trait A2485
+trait A2486
+trait A2487
+trait A2488
+trait A2489
+trait A2490
+trait A2491
+trait A2492
+trait A2493
+trait A2494
+trait A2495
+trait A2496
+trait A2497
+trait A2498
+trait A2499
+trait A2500
+trait A2501
+trait A2502
+trait A2503
+trait A2504
+trait A2505
+trait A2506
+trait A2507
+trait A2508
+trait A2509
+trait A2510
+trait A2511
+trait A2512
+trait A2513
+trait A2514
+trait A2515
+trait A2516
+trait A2517
+trait A2518
+trait A2519
+trait A2520
+trait A2521
+trait A2522
+trait A2523
+trait A2524
+trait A2525
+trait A2526
+trait A2527
+trait A2528
+trait A2529
+trait A2530
+trait A2531
+trait A2532
+trait A2533
+trait A2534
+trait A2535
+trait A2536
+trait A2537
+trait A2538
+trait A2539
+trait A2540
+trait A2541
+trait A2542
+trait A2543
+trait A2544
+trait A2545
+trait A2546
+trait A2547
+trait A2548
+trait A2549
+trait A2550
+trait A2551
+trait A2552
+trait A2553
+trait A2554
+trait A2555
+trait A2556
+trait A2557
+trait A2558
+trait A2559
+trait A2560
+trait A2561
+trait A2562
+trait A2563
+trait A2564
+trait A2565
+trait A2566
+trait A2567
+trait A2568
+trait A2569
+trait A2570
+trait A2571
+trait A2572
+trait A2573
+trait A2574
+trait A2575
+trait A2576
+trait A2577
+trait A2578
+trait A2579
+trait A2580
+trait A2581
+trait A2582
+trait A2583
+trait A2584
+trait A2585
+trait A2586
+trait A2587
+trait A2588
+trait A2589
+trait A2590
+trait A2591
+trait A2592
+trait A2593
+trait A2594
+trait A2595
+trait A2596
+trait A2597
+trait A2598
+trait A2599
+trait A2600
+trait A2601
+trait A2602
+trait A2603
+trait A2604
+trait A2605
+trait A2606
+trait A2607
+trait A2608
+trait A2609
+trait A2610
+trait A2611
+trait A2612
+trait A2613
+trait A2614
+trait A2615
+trait A2616
+trait A2617
+trait A2618
+trait A2619
+trait A2620
+trait A2621
+trait A2622
+trait A2623
+trait A2624
+trait A2625
+trait A2626
+trait A2627
+trait A2628
+trait A2629
+trait A2630
+trait A2631
+trait A2632
+trait A2633
+trait A2634
+trait A2635
+trait A2636
+trait A2637
+trait A2638
+trait A2639
+trait A2640
+trait A2641
+trait A2642
+trait A2643
+trait A2644
+trait A2645
+trait A2646
+trait A2647
+trait A2648
+trait A2649
+trait A2650
+trait A2651
+trait A2652
+trait A2653
+trait A2654
+trait A2655
+trait A2656
+trait A2657
+trait A2658
+trait A2659
+trait A2660
+trait A2661
+trait A2662
+trait A2663
+trait A2664
+trait A2665
+trait A2666
+trait A2667
+trait A2668
+trait A2669
+trait A2670
+trait A2671
+trait A2672
+trait A2673
+trait A2674
+trait A2675
+trait A2676
+trait A2677
+trait A2678
+trait A2679
+trait A2680
+trait A2681
+trait A2682
+trait A2683
+trait A2684
+trait A2685
+trait A2686
+trait A2687
+trait A2688
+trait A2689
+trait A2690
+trait A2691
+trait A2692
+trait A2693
+trait A2694
+trait A2695
+trait A2696
+trait A2697
+trait A2698
+trait A2699
+trait A2700
+trait A2701
+trait A2702
+trait A2703
+trait A2704
+trait A2705
+trait A2706
+trait A2707
+trait A2708
+trait A2709
+trait A2710
+trait A2711
+trait A2712
+trait A2713
+trait A2714
+trait A2715
+trait A2716
+trait A2717
+trait A2718
+trait A2719
+trait A2720
+trait A2721
+trait A2722
+trait A2723
+trait A2724
+trait A2725
+trait A2726
+trait A2727
+trait A2728
+trait A2729
+trait A2730
+trait A2731
+trait A2732
+trait A2733
+trait A2734
+trait A2735
+trait A2736
+trait A2737
+trait A2738
+trait A2739
+trait A2740
+trait A2741
+trait A2742
+trait A2743
+trait A2744
+trait A2745
+trait A2746
+trait A2747
+trait A2748
+trait A2749
+trait A2750
+trait A2751
+trait A2752
+trait A2753
+trait A2754
+trait A2755
+trait A2756
+trait A2757
+trait A2758
+trait A2759
+trait A2760
+trait A2761
+trait A2762
+trait A2763
+trait A2764
+trait A2765
+trait A2766
+trait A2767
+trait A2768
+trait A2769
+trait A2770
+trait A2771
+trait A2772
+trait A2773
+trait A2774
+trait A2775
+trait A2776
+trait A2777
+trait A2778
+trait A2779
+trait A2780
+trait A2781
+trait A2782
+trait A2783
+trait A2784
+trait A2785
+trait A2786
+trait A2787
+trait A2788
+trait A2789
+trait A2790
+trait A2791
+trait A2792
+trait A2793
+trait A2794
+trait A2795
+trait A2796
+trait A2797
+trait A2798
+trait A2799
+trait A2800
+trait A2801
+trait A2802
+trait A2803
+trait A2804
+trait A2805
+trait A2806
+trait A2807
+trait A2808
+trait A2809
+trait A2810
+trait A2811
+trait A2812
+trait A2813
+trait A2814
+trait A2815
+trait A2816
+trait A2817
+trait A2818
+trait A2819
+trait A2820
+trait A2821
+trait A2822
+trait A2823
+trait A2824
+trait A2825
+trait A2826
+trait A2827
+trait A2828
+trait A2829
+trait A2830
+trait A2831
+trait A2832
+trait A2833
+trait A2834
+trait A2835
+trait A2836
+trait A2837
+trait A2838
+trait A2839
+trait A2840
+trait A2841
+trait A2842
+trait A2843
+trait A2844
+trait A2845
+trait A2846
+trait A2847
+trait A2848
+trait A2849
+trait A2850
+trait A2851
+trait A2852
+trait A2853
+trait A2854
+trait A2855
+trait A2856
+trait A2857
+trait A2858
+trait A2859
+trait A2860
+trait A2861
+trait A2862
+trait A2863
+trait A2864
+trait A2865
+trait A2866
+trait A2867
+trait A2868
+trait A2869
+trait A2870
+trait A2871
+trait A2872
+trait A2873
+trait A2874
+trait A2875
+trait A2876
+trait A2877
+trait A2878
+trait A2879
+trait A2880
+trait A2881
+trait A2882
+trait A2883
+trait A2884
+trait A2885
+trait A2886
+trait A2887
+trait A2888
+trait A2889
+trait A2890
+trait A2891
+trait A2892
+trait A2893
+trait A2894
+trait A2895
+trait A2896
+trait A2897
+trait A2898
+trait A2899
+trait A2900
+trait A2901
+trait A2902
+trait A2903
+trait A2904
+trait A2905
+trait A2906
+trait A2907
+trait A2908
+trait A2909
+trait A2910
+trait A2911
+trait A2912
+trait A2913
+trait A2914
+trait A2915
+trait A2916
+trait A2917
+trait A2918
+trait A2919
+trait A2920
+trait A2921
+trait A2922
+trait A2923
+trait A2924
+trait A2925
+trait A2926
+trait A2927
+trait A2928
+trait A2929
+trait A2930
+trait A2931
+trait A2932
+trait A2933
+trait A2934
+trait A2935
+trait A2936
+trait A2937
+trait A2938
+trait A2939
+trait A2940
+trait A2941
+trait A2942
+trait A2943
+trait A2944
+trait A2945
+trait A2946
+trait A2947
+trait A2948
+trait A2949
+trait A2950
+trait A2951
+trait A2952
+trait A2953
+trait A2954
+trait A2955
+trait A2956
+trait A2957
+trait A2958
+trait A2959
+trait A2960
+trait A2961
+trait A2962
+trait A2963
+trait A2964
+trait A2965
+trait A2966
+trait A2967
+trait A2968
+trait A2969
+trait A2970
+trait A2971
+trait A2972
+trait A2973
+trait A2974
+trait A2975
+trait A2976
+trait A2977
+trait A2978
+trait A2979
+trait A2980
+trait A2981
+trait A2982
+trait A2983
+trait A2984
+trait A2985
+trait A2986
+trait A2987
+trait A2988
+trait A2989
+trait A2990
+trait A2991
+trait A2992
+trait A2993
+trait A2994
+trait A2995
+trait A2996
+trait A2997
+trait A2998
+trait A2999
+trait A3000
+}
+object Test extends App {
+ new Foo1{}
+}
diff --git a/test/files/jvm/bug676.check b/test/files/jvm/t676.check
similarity index 100%
rename from test/files/jvm/bug676.check
rename to test/files/jvm/t676.check
diff --git a/test/files/jvm/bug676.scala b/test/files/jvm/t676.scala
similarity index 100%
rename from test/files/jvm/bug676.scala
rename to test/files/jvm/t676.scala
diff --git a/test/files/jvm/bug680.check b/test/files/jvm/t680.check
similarity index 100%
copy from test/files/jvm/bug680.check
copy to test/files/jvm/t680.check
diff --git a/test/files/jvm/bug680.scala b/test/files/jvm/t680.scala
similarity index 100%
rename from test/files/jvm/bug680.scala
rename to test/files/jvm/t680.scala
diff --git a/test/files/jvm/t6941.check b/test/files/jvm/t6941.check
new file mode 100644
index 0000000..43f53ab
--- /dev/null
+++ b/test/files/jvm/t6941.check
@@ -0,0 +1 @@
+bytecode identical
diff --git a/test/files/jvm/t6941.flags b/test/files/jvm/t6941.flags
new file mode 100644
index 0000000..49d036a
--- /dev/null
+++ b/test/files/jvm/t6941.flags
@@ -0,0 +1 @@
+-optimize
diff --git a/test/files/jvm/t6941/Analyzed_1.scala b/test/files/jvm/t6941/Analyzed_1.scala
new file mode 100644
index 0000000..549abd5
--- /dev/null
+++ b/test/files/jvm/t6941/Analyzed_1.scala
@@ -0,0 +1,11 @@
+// this class's bytecode, compiled under -optimize is analyzed by the test
+// method a's bytecode should be identical to method b's bytecode
+class SameBytecode {
+ def a(xs: List[Int]) = xs match {
+ case x :: _ => x
+ }
+
+ def b(xs: List[Int]) = xs match {
+ case xs: ::[Int] => xs.hd$1
+ }
+}
\ No newline at end of file
diff --git a/test/files/jvm/t6941/test.scala b/test/files/jvm/t6941/test.scala
new file mode 100644
index 0000000..248617f
--- /dev/null
+++ b/test/files/jvm/t6941/test.scala
@@ -0,0 +1,15 @@
+import scala.tools.partest.BytecodeTest
+
+import scala.tools.nsc.util.JavaClassPath
+import java.io.InputStream
+import scala.tools.asm
+import asm.ClassReader
+import asm.tree.{ClassNode, InsnList}
+import scala.collection.JavaConverters._
+
+object Test extends BytecodeTest {
+ def show: Unit = {
+ val classNode = loadClassNode("SameBytecode")
+ similarBytecode(getMethod(classNode, "a"), getMethod(classNode, "b"), equalsModuloVar)
+ }
+}
diff --git a/test/files/jvm/t7146.check b/test/files/jvm/t7146.check
new file mode 100644
index 0000000..7c76040
--- /dev/null
+++ b/test/files/jvm/t7146.check
@@ -0,0 +1,5 @@
+should be scala.concurrent.impl.ExecutionContextImpl == true
+should be scala.concurrent.forkjoin.ForkJoinPool == true
+should have non-null UncaughtExceptionHandler == true
+should be a scala.concurrent.impl.ExecutionContextImpl UncaughtExceptionHandler == true
+should just print out on uncaught == true
diff --git a/test/files/jvm/t7146.scala b/test/files/jvm/t7146.scala
new file mode 100644
index 0000000..2bd03d6
--- /dev/null
+++ b/test/files/jvm/t7146.scala
@@ -0,0 +1,23 @@
+import java.util.concurrent.Executor
+import scala.concurrent._
+import scala.util.control.NoStackTrace
+
+object Test {
+ def main(args: Array[String]) {
+ println("should be scala.concurrent.impl.ExecutionContextImpl == " +
+ ExecutionContext.global.toString.startsWith("scala.concurrent.impl.ExecutionContextImpl"))
+ val i = ExecutionContext.global.asInstanceOf[{ def executor: Executor }]
+ println("should be scala.concurrent.forkjoin.ForkJoinPool == " +
+ i.executor.toString.startsWith("scala.concurrent.forkjoin.ForkJoinPool"))
+ val u = i.executor.
+ asInstanceOf[{ def getUncaughtExceptionHandler: Thread.UncaughtExceptionHandler }].
+ getUncaughtExceptionHandler
+ println("should have non-null UncaughtExceptionHandler == " + (u ne null))
+ println("should be a scala.concurrent.impl.ExecutionContextImpl UncaughtExceptionHandler == " +
+ u.toString.startsWith("scala.concurrent.impl.ExecutionContextImpl"))
+ print("should just print out on uncaught == ")
+ u.uncaughtException(Thread.currentThread, new Throwable {
+ override def printStackTrace() { println("true") }
+ })
+ }
+}
diff --git a/test/files/jvm/t7253.check b/test/files/jvm/t7253.check
new file mode 100644
index 0000000..43f53ab
--- /dev/null
+++ b/test/files/jvm/t7253.check
@@ -0,0 +1 @@
+bytecode identical
diff --git a/test/files/jvm/t7253/Base_1.scala b/test/files/jvm/t7253/Base_1.scala
new file mode 100644
index 0000000..a531ebb
--- /dev/null
+++ b/test/files/jvm/t7253/Base_1.scala
@@ -0,0 +1,5 @@
+trait A { def f(): Int }
+trait B1 extends A
+abstract class B2 extends A
+class B3 extends A { def f(): Int = 1 }
+class B4 extends B3
diff --git a/test/files/jvm/t7253/JavaClient_1.java b/test/files/jvm/t7253/JavaClient_1.java
new file mode 100644
index 0000000..43475de
--- /dev/null
+++ b/test/files/jvm/t7253/JavaClient_1.java
@@ -0,0 +1,9 @@
+public class JavaClient_1 {
+ int foo() {
+ ((A) null).f();
+ ((B1) null).f();
+ ((B2) null).f();
+ ((B3) null).f();
+ return ((B4) null).f();
+ }
+}
diff --git a/test/files/jvm/t7253/ScalaClient_1.scala b/test/files/jvm/t7253/ScalaClient_1.scala
new file mode 100644
index 0000000..d244b32
--- /dev/null
+++ b/test/files/jvm/t7253/ScalaClient_1.scala
@@ -0,0 +1,9 @@
+class ScalaClient_1 {
+ def foo() = {
+ (null: A).f()
+ (null: B1).f()
+ (null: B2).f()
+ (null: B3).f()
+ (null: B4).f()
+ }
+}
diff --git a/test/files/jvm/t7253/test.scala b/test/files/jvm/t7253/test.scala
new file mode 100644
index 0000000..7fe08e8
--- /dev/null
+++ b/test/files/jvm/t7253/test.scala
@@ -0,0 +1,28 @@
+import scala.tools.partest.BytecodeTest
+
+import scala.tools.nsc.util.JavaClassPath
+import java.io.InputStream
+import scala.tools.asm
+import asm.ClassReader
+import asm.tree.{ClassNode, InsnList}
+import scala.collection.JavaConverters._
+
+object Test extends BytecodeTest {
+ import instructions._
+
+ def show: Unit = {
+ val instrBaseSeqs = Seq("ScalaClient_1", "JavaClient_1") map (name => instructions.fromMethod(getMethod(loadClassNode(name), "foo")))
+ val instrSeqs = instrBaseSeqs map (_ filter isInvoke)
+ cmpInstructions(instrSeqs(0), instrSeqs(1))
+ }
+
+ def cmpInstructions(isa: List[Instruction], isb: List[Instruction]) = {
+ if (isa == isb) println("bytecode identical")
+ else diffInstructions(isa, isb)
+ }
+
+ def isInvoke(node: Instruction): Boolean = {
+ val opcode = node.opcode
+ (opcode == "INVOKEVIRTUAL") || (opcode == "INVOKEINTERFACE")
+ }
+}
diff --git a/test/files/jvm/throws-annot-from-java.check b/test/files/jvm/throws-annot-from-java.check
new file mode 100644
index 0000000..be3ba41
--- /dev/null
+++ b/test/files/jvm/throws-annot-from-java.check
@@ -0,0 +1,47 @@
+Type in expressions to have them evaluated.
+Type :help for more information.
+
+scala> :power
+** Power User mode enabled - BEEP WHIR GYVE **
+** :phase has been set to 'typer'. **
+** scala.tools.nsc._ has been imported **
+** global._, definitions._ also imported **
+** Try :help, :vals, power.<tab> **
+
+scala> :paste
+// Entering paste mode (ctrl-D to finish)
+
+{
+ val clazz = rootMirror.getClassByName(newTermName("test.ThrowsDeclaration_2"));
+ {
+ val method = clazz.info.member(newTermName("foo"))
+ val throwsAnn = method.annotations.head
+ val atp = throwsAnn.atp
+ println("foo")
+ println("atp.typeParams.isEmpty: " + atp.typeParams.isEmpty)
+ println(throwsAnn)
+ }
+ println
+
+ {
+ val method = clazz.info.member(newTermName("bar"))
+ val throwsAnn = method.annotations.head
+ val Literal(const) = throwsAnn.args.head
+ val tp = const.typeValue
+ println("bar")
+ println("tp.typeParams.isEmpty: " + tp.typeParams.isEmpty)
+ println(throwsAnn)
+ }
+}
+
+// Exiting paste mode, now interpreting.
+
+foo
+atp.typeParams.isEmpty: true
+throws[IllegalStateException](classOf[java.lang.IllegalStateException])
+
+bar
+tp.typeParams.isEmpty: true
+throws[test.PolymorphicException[_]](classOf[test.PolymorphicException])
+
+scala>
diff --git a/test/files/jvm/throws-annot-from-java/PolymorphicException_1.scala b/test/files/jvm/throws-annot-from-java/PolymorphicException_1.scala
new file mode 100644
index 0000000..58fa536
--- /dev/null
+++ b/test/files/jvm/throws-annot-from-java/PolymorphicException_1.scala
@@ -0,0 +1,3 @@
+package test
+
+class PolymorphicException[T] extends Exception
diff --git a/test/files/jvm/throws-annot-from-java/Test_3.scala b/test/files/jvm/throws-annot-from-java/Test_3.scala
new file mode 100644
index 0000000..de1d984
--- /dev/null
+++ b/test/files/jvm/throws-annot-from-java/Test_3.scala
@@ -0,0 +1,29 @@
+import scala.tools.partest.ReplTest
+
+object Test extends ReplTest {
+ def code = """:power
+:paste
+{
+ val clazz = rootMirror.getClassByName(newTermName("test.ThrowsDeclaration_2"));
+ {
+ val method = clazz.info.member(newTermName("foo"))
+ val throwsAnn = method.annotations.head
+ val atp = throwsAnn.atp
+ println("foo")
+ println("atp.typeParams.isEmpty: " + atp.typeParams.isEmpty)
+ println(throwsAnn)
+ }
+ println
+
+ {
+ val method = clazz.info.member(newTermName("bar"))
+ val throwsAnn = method.annotations.head
+ val Literal(const) = throwsAnn.args.head
+ val tp = const.typeValue
+ println("bar")
+ println("tp.typeParams.isEmpty: " + tp.typeParams.isEmpty)
+ println(throwsAnn)
+ }
+}
+"""
+}
diff --git a/test/files/jvm/throws-annot-from-java/ThrowsDeclaration_2.java b/test/files/jvm/throws-annot-from-java/ThrowsDeclaration_2.java
new file mode 100644
index 0000000..3708fe6
--- /dev/null
+++ b/test/files/jvm/throws-annot-from-java/ThrowsDeclaration_2.java
@@ -0,0 +1,6 @@
+package test;
+
+public class ThrowsDeclaration_2 {
+ public void foo() throws IllegalStateException {};
+ public void bar() throws PolymorphicException {};
+}
diff --git a/test/files/jvm/try-type-tests.scala b/test/files/jvm/try-type-tests.scala
new file mode 100644
index 0000000..17811f6
--- /dev/null
+++ b/test/files/jvm/try-type-tests.scala
@@ -0,0 +1,144 @@
+import scala.util.{Try, Success, Failure}
+
+// tests the basic combinators on Try
+trait TryStandard {
+
+ def testForeachSuccess(): Unit = {
+ val t = Success(1)
+ var res = 0
+ t.foreach(x => res = x * 10)
+ assert(res == 10)
+ }
+
+ def testForeachFailure(): Unit = {
+ val t = Failure(new Exception("foo"))
+ t.foreach(x => assert(false))
+ }
+
+ def testFlatMapSuccess(): Unit = {
+ val t = Success(1)
+ val n = t.flatMap(x => Try(x * 10))
+ assert(n.get == 10)
+ }
+
+ def testFlatMapFailure(): Unit = {
+ val t = Failure(new Exception("foo"))
+ val n = t.flatMap{ x => assert(false); Try() }
+ }
+
+ def testMapSuccess(): Unit = {
+ val t = Success(1)
+ val n = t.map(x => x * 10)
+ assert(n.get == 10)
+ }
+
+ def testMapFailure(): Unit = {
+ val t = Failure(new Exception("foo"))
+ val n = t.map(x => assert(false))
+ }
+
+ def testFilterSuccessTrue(): Unit = {
+ val t = Success(1)
+ val n = t.filter(x => x > 0)
+ assert(n.get == 1)
+ }
+
+ def testFilterSuccessFalse(): Unit = {
+ val t = Success(1)
+ val n = t.filter(x => x < 0)
+ n match {
+ case Success(v) => assert(false)
+ case Failure(e: NoSuchElementException) => assert(true)
+ }
+ }
+
+ def testFilterFailure(): Unit = {
+ val t = Failure(new Exception("foo"))
+ val n = t.filter{ x => assert(false); true }
+ }
+
+ def testRescueSuccess(): Unit = {
+ val t = Success(1)
+ t.recoverWith{ case x => assert(false); Try() }
+ }
+
+ def testRescueFailure(): Unit = {
+ val t = Failure(new Exception("foo"))
+ val n = t.recoverWith{ case x => Try(1) }
+ assert(n.get == 1)
+ }
+
+ def testRecoverSuccess(): Unit = {
+ val t = Success(1)
+ t.recover{ case x => assert(false); 99 }
+ }
+
+ def testRecoverFailure(): Unit = {
+ val t = Failure(new Exception("foo"))
+ val n = t.recover{ case x => 1 }
+ assert(n.get == 1)
+ }
+
+ def testFlattenSuccess(): Unit = {
+ val f = Failure(new Exception("foo"))
+ val t = Success(f)
+ assert(t.flatten == f)
+ }
+
+ def testFailedSuccess(): Unit = {
+ val t = Success(1)
+ val n = t.failed
+ n match {
+ case Failure(e: UnsupportedOperationException) => assert(true)
+ case _ => assert(false)
+ }
+ }
+
+ def testFailedFailure(): Unit = {
+ val t = Failure(new Exception("foo"))
+ val n = t.failed
+ n match {
+ case Success(e: Exception) => assert(true)
+ case _ => assert(false)
+ }
+ }
+
+ def testSuccessTransform(): Unit = {
+ val s = Success(1)
+ val succ = (x: Int) => Success(x * 10)
+ val fail = (x: Throwable) => Success(0)
+ assert(s.transform(succ, fail).get == 10)
+ }
+
+ def testFailureTransform(): Unit = {
+ val f = Failure(new Exception("foo"))
+ val succ = (x: Int) => Success(x * 10)
+ val fail = (x: Throwable) => Success(0)
+ assert(f.transform(succ, fail).get == 0)
+ }
+
+ testForeachSuccess()
+ testForeachFailure()
+ testFlatMapSuccess()
+ testFlatMapFailure()
+ testMapSuccess()
+ testMapFailure()
+ testFilterSuccessTrue()
+ testFilterSuccessFalse()
+ testFilterFailure()
+ testRescueSuccess()
+ testRescueFailure()
+ testRecoverSuccess()
+ testRecoverFailure()
+ testFlattenSuccess()
+ testFailedSuccess()
+ testFailedFailure()
+ testSuccessTransform()
+ testFailureTransform()
+}
+
+object Test
+extends App
+with TryStandard {
+ System.exit(0)
+}
\ No newline at end of file
diff --git a/test/files/jvm/typerep.scala b/test/files/jvm/typerep.scala
index 49a216c..3befc7f 100644
--- a/test/files/jvm/typerep.scala
+++ b/test/files/jvm/typerep.scala
@@ -161,7 +161,7 @@ object TypeRep {
}).asInstanceOf[TypeRep[Option[A]]]
def getType[A](x: List[A])(implicit rep: TypeRep[A]): TypeRep[List[A]] = (x match {
- case h :: t => ListRep(getType(h))
+ case h :: t => ListRep(rep)
case Nil => NilRep
}).asInstanceOf[TypeRep[List[A]]]
diff --git a/test/files/jvm/unittest_io_Jvm.check b/test/files/jvm/unittest_io_Jvm.check
new file mode 100644
index 0000000..d6e855f
--- /dev/null
+++ b/test/files/jvm/unittest_io_Jvm.check
@@ -0,0 +1,6 @@
+lines.size = 5
+
+This is a file
+it is split on several lines.
+
+isn't it?
diff --git a/test/files/jvm/unittest_io_Jvm.scala b/test/files/jvm/unittest_io_Jvm.scala
index 42c793f..1484774 100644
--- a/test/files/jvm/unittest_io_Jvm.scala
+++ b/test/files/jvm/unittest_io_Jvm.scala
@@ -1,24 +1,15 @@
-import scala.testing.SUnit._
import scala.io.Source
-object Test extends TestConsoleMain {
-
- def suite = new TestSuite(
- new ReadlinesTest
- )
-
- class ReadlinesTest extends TestCase("scala.io.Source method getLines()") {
-
- val src = Source.fromString("""
-This is a file
-it is split on several lines.
-
-isn't it?
-""")
- def runTest() = assertEquals("wrong number of lines",src.getLines.toList.length,5) // five new lines in there
- //for (line <- src.getLines) {
- // Console.print(line)
- //}
+object Test {
+ def main(args: Array[String]) {
+ val lines = Source.fromString(
+ """|
+ |This is a file
+ |it is split on several lines.
+ |
+ |isn't it?
+ |""".stripMargin).getLines.toList
+ println("lines.size = " + lines.size)
+ lines.foreach(println)
}
-
}
diff --git a/test/files/jvm/unittest_xml.scala b/test/files/jvm/unittest_xml.scala
index 1c36e74..106334e 100644
--- a/test/files/jvm/unittest_xml.scala
+++ b/test/files/jvm/unittest_xml.scala
@@ -1,23 +1,25 @@
+import scala.xml.{ MetaData, Null, Utility, PrefixedAttribute, UnprefixedAttribute }
object Test {
- import scala.testing.SUnit._
- import scala.xml.{MetaData, Null, Utility, PrefixedAttribute, UnprefixedAttribute }
+ def main(args:Array[String]) = {
+ MetaDataTest.run()
+ UtilityTest.run()
+ }
- class MetaDataTest extends TestCase("scala.xml.MetaData") with Assert {
+ object MetaDataTest {
- import scala.xml.{HasKeyValue, TopScope, NamespaceBinding, Node, Atom, Text }
+ import scala.xml.{ TopScope, NamespaceBinding, Node, Atom, Text }
- def domatch(x:Node): Node = {
- val hasBar = new HasKeyValue("bar")
- x match {
- //case Node("foo", hasBar(z), _*) => z
- case Node("foo", md, _*) if !hasBar.unapplySeq(md).isEmpty =>
+ def domatch(x:Node): Node = {
+ x match {
+ case Node("foo", md @ UnprefixedAttribute(_, value, _), _*) if !value.isEmpty =>
md("bar")(0)
- case _ => new Atom(3)
- }
- }
- override def runTest = {
+ case _ => new Atom(3)
+ }
+ }
+
+ def run() {
var x: MetaData = Null
var s: NamespaceBinding = TopScope
@@ -25,85 +27,75 @@ object Test {
// testing method def apply(uri:String, scp:NamespaceBinding, k:String): Seq[Node]
// def apply(k:String): Seq[Node]
- assertEquals("absent element (prefixed) 1", null, x("za://foo.com", s, "bar" ))
- assertEquals("absent element (unprefix) 1", null, x("bar"))
+ assert(null == x("za://foo.com", s, "bar" ), "absent element (prefixed) 1")
+ assert(null == x("bar"), "absent element (unprefix) 1")
- assertEquals("absent element (prefixed) 2", None, x.get("za://foo.com", s, "bar" ))
- assertEquals("absent element (unprefix) 2", None, x.get("bar"))
+ assert(None == x.get("za://foo.com", s, "bar" ), "absent element (prefixed) 2")
+ assert(None == x.get("bar"), "absent element (unprefix) 2")
x = new PrefixedAttribute("zo","bar", new Atom(42), x)
s = new NamespaceBinding("zo","za://foo.com",s)
- assertEquals("present element (prefixed) 3", new Atom(42), x("za://foo.com", s, "bar" ))
- assertEquals("present element (unprefix) 3", null, x("bar"))
+ assert(new Atom(42) == x("za://foo.com", s, "bar" ), "present element (prefixed) 3")
+ assert(null == x("bar"), "present element (unprefix) 3")
- assertEquals("present element (prefixed) 4", Some(new Atom(42)), x.get("za://foo.com", s, "bar" ))
- assertEquals("present element (unprefix) 4", None, x.get("bar"))
+ assert(Some(new Atom(42)) == x.get("za://foo.com", s, "bar" ), "present element (prefixed) 4")
+ assert(None == x.get("bar"), "present element (unprefix) 4")
x = new UnprefixedAttribute("bar","meaning", x)
- assertEquals("present element (prefixed) 5", null, x(null, s, "bar" ))
- assertEquals("present element (unprefix) 5", Text("meaning"), x("bar"))
+ assert(null == x(null, s, "bar"), "present element (prefixed) 5")
+ assert(Text("meaning") == x("bar"), "present element (unprefix) 5")
- assertEquals("present element (prefixed) 6", None, x.get(null, s, "bar" ))
- assertEquals("present element (unprefix) 6", Some(Text("meaning")), x.get("bar"))
+ assert(None == x.get(null, s, "bar" ), "present element (prefixed) 6")
+ assert(Some(Text("meaning")) == x.get("bar"), "present element (unprefix) 6")
- val z = <foo bar="gar"/>
- val z2 = <foo/>
+ val z = <foo bar="gar"/>
+ val z2 = <foo/>
- assertEquals("attribute extractor 1", Text("gar"), domatch(z))
- assertEquals("attribute extractor 2", new Atom(3), domatch(z2))
+ assert(Text("gar") == domatch(z), "attribute extractor 1")
+ assert(new Atom(3) == domatch(z2), "attribute extractor 2")
}
}
- class UtilityTest extends TestCase("scala.xml.Utility") with Assert {
- def runTest() = {
- assertTrue(Utility.isNameStart('b'))
- assertFalse(Utility.isNameStart(':'))
-
-
- val x = <foo>
- <toomuchws/>
- </foo>
+ object UtilityTest {
+ def run() {
+ assert(Utility.isNameStart('b'))
+ assert(!Utility.isNameStart(':'))
- val y = xml.Utility.trim(x)
-
- assertEquals("trim 1 ", 1, y match { case <foo><toomuchws/></foo> => 1 })
+ val x = <foo>
+ <toomuchws/>
+ </foo>
- val x2 = <foo>
- <toomuchws> a b b a </toomuchws>
- </foo>
+ val y = xml.Utility.trim(x)
- val y2 = xml.Utility.trim(x2)
+ assert(1 == (y match { case <foo><toomuchws/></foo> => 1 }), "trim 1")
- assertEquals("trim 2 ", 2, y2 match { case <foo><toomuchws>a b b a</toomuchws></foo> => 2 })
+ val x2 = <foo>
+ <toomuchws> a b b a </toomuchws>
+ </foo>
+ val y2 = xml.Utility.trim(x2)
- val z = <bar>''</bar>
- val z1 = z.toString
+ assert(2 == (y2 match { case <foo><toomuchws>a b b a</toomuchws></foo> => 2 }), "trim 2")
- assertEquals("apos unescaped", "<bar>''</bar>", z1)
+ val z = <bar>''</bar>
+ val z1 = z.toString
- val q = xml.Utility.sort(<a g='3' j='2' oo='2' a='2'/>)
- assertEquals("sort attrib"+xml.Utility.sort(q.attributes).toString, " a=\"2\" g=\"3\" j=\"2\" oo=\"2\"", xml.Utility.sort(q.attributes).toString)
- val pp = new xml.PrettyPrinter(80,5)
- assertEquals("pretty print sorted attrib:"+pp.format(q), "<a a=\"2\" g=\"3\" j=\"2\" oo=\"2\"></a>", pp.format(q))
+ assert("<bar>''</bar>" == z1, "apos unescaped")
- <hi>
- <there/>
- <guys/>
- </hi>.hashCode // Bug #777
- }
- }
+ val q = xml.Utility.sort(<a g='3' j='2' oo='2' a='2'/>)
+ assert(" a=\"2\" g=\"3\" j=\"2\" oo=\"2\"" == xml.Utility.sort(q.attributes).toString)
- def main(args:Array[String]) = {
- val ts = new TestSuite(
- new MetaDataTest,
- new UtilityTest
- )
- val tr = new TestResult()
- ts.run(tr)
- tr.failures foreach Console.println
+ val pp = new xml.PrettyPrinter(80,5)
+ assert("<a a=\"2\" g=\"3\" j=\"2\" oo=\"2\"/>" == pp.format(q))
+
+ <hi>
+ <there/>
+ <guys/>
+ </hi>.hashCode // Bug #777
+ }
}
+
}
diff --git a/test/files/jvm/xml01.check b/test/files/jvm/xml01.check
old mode 100644
new mode 100755
index 5e82e9a..d78e6df
--- a/test/files/jvm/xml01.check
+++ b/test/files/jvm/xml01.check
@@ -3,6 +3,6 @@ xpath \
xpath \\ DESCENDANTS
<book><author>Peter Buneman</author><author>Dan Suciu</author><title>Data on ze web</title></book>
-- group nodes
-<f><a></a><b></b><c></c></f>
-<a></a><f><a></a><b></b><c></c></f><a></a><b></b><c></c>
+<f><a/><b/><c/></f>
+<a/><f><a/><b/><c/></f><a/><b/><c/>
attribute value normalization
diff --git a/test/files/jvm/xml01.scala b/test/files/jvm/xml01.scala
index 5234518..2fab650 100644
--- a/test/files/jvm/xml01.scala
+++ b/test/files/jvm/xml01.scala
@@ -1,12 +1,10 @@
import java.io.StringReader
import org.xml.sax.InputSource
-import scala.testing.SUnit._
import scala.util.logging._
import scala.xml._
-
-object Test extends App with Assert {
+object Test extends App {
val e: scala.xml.MetaData = Null //Node.NoAttributes
val sc: scala.xml.NamespaceBinding = TopScope
@@ -26,45 +24,33 @@ object Test extends App with Assert {
}
println("equality")
- assertEqualsXML(c, parsedxml11)
- assertEqualsXML(parsedxml1, parsedxml11)
- assertSameElementsXML(List(parsedxml1), List(parsedxml11))
- assertSameElementsXML(Array(parsedxml1).toList, List(parsedxml11))
+ assert(c == parsedxml11)
+ assert(parsedxml1 == parsedxml11)
+ assert(List(parsedxml1) sameElements List(parsedxml11))
+ assert(Array(parsedxml1).toList sameElements List(parsedxml11))
val x2 = "<book><author>Peter Buneman</author><author>Dan Suciu</author><title>Data on ze web</title></book>";
val i = new InputSource(new StringReader(x2))
val x2p = XML.load(i)
- assertEqualsXML(x2p, Elem(null, "book" , e, sc,
- Elem(null, "author", e, sc,Text("Peter Buneman")),
- Elem(null, "author", e, sc,Text("Dan Suciu")),
- Elem(null, "title" , e, sc,Text("Data on ze web"))));
+ assert(x2p == Elem(null, "book" , e, sc,
+ Elem(null, "author", e, sc,Text("Peter Buneman")),
+ Elem(null, "author", e, sc,Text("Dan Suciu")),
+ Elem(null, "title" , e, sc,Text("Data on ze web"))))
val xmlFile2 = "<bib><book><author>Peter Buneman</author><author>Dan Suciu</author><title>Data on ze web</title></book><book><author>John Mitchell</author><title>Foundations of Programming Languages</title></book></bib>";
val isrc2 = new InputSource(new StringReader(xmlFile2))
val parsedxml2 = XML.load(isrc2)
- // xmlFile2/book -> book,book
println("xpath \\")
+ assert(parsedxml1 \ "_" sameElements List(Elem(null,"world", e, sc)))
- assertSameElementsXML(parsedxml1 \ "_" , List(Elem(null,"world", e, sc)))
-
- assertSameElementsXML(parsedxml1 \ "world", List(Elem(null,"world", e, sc)))
-
-/*
- Console.println( parsedxml2 \ "_" );
- Console.println( (parsedxml2 \ "_" ).iterator);
- for( val i <- (parsedxml2 \ "_" ).iterator) {
- Console.println( i );
- };
- */
+ assert(parsedxml1 \ "world" sameElements List(Elem(null,"world", e, sc)))
- assertSameElementsXML(
- parsedxml2 \ "_" ,
-
- List(
+ assert(
+ (parsedxml2 \ "_") sameElements List(
Elem(null,"book", e, sc,
Elem(null,"author", e, sc, Text("Peter Buneman")),
Elem(null,"author", e, sc, Text("Dan Suciu")),
@@ -72,13 +58,11 @@ object Test extends App with Assert {
Elem(null,"book",e,sc,
Elem(null,"author",e,sc,Text("John Mitchell")),
Elem(null,"title",e,sc,Text("Foundations of Programming Languages"))))
- );
- assertEquals( (parsedxml2 \ "author").length, 0 );
+ )
+ assert((parsedxml2 \ "author").isEmpty)
- assertSameElementsXML(
- parsedxml2 \ "book",
-
- List(
+ assert(
+ (parsedxml2 \ "book") sameElements List(
Elem(null,"book",e,sc,
Elem(null,"author", e, sc, Text("Peter Buneman")),
Elem(null,"author", e, sc, Text("Dan Suciu")),
@@ -87,69 +71,51 @@ object Test extends App with Assert {
Elem(null,"author", e, sc, Text("John Mitchell")),
Elem(null,"title" , e, sc, Text("Foundations of Programming Languages")))
)
- );
-
- assertSameElementsXML(
-
- parsedxml2 \ "_" \ "_",
+ )
- List(
+ assert(
+ (parsedxml2 \ "_" \ "_") sameElements List(
Elem(null,"author", e, sc, Text("Peter Buneman")),
Elem(null,"author", e, sc, Text("Dan Suciu")),
Elem(null,"title" , e, sc, Text("Data on ze web")),
Elem(null,"author", e, sc, Text("John Mitchell")),
Elem(null,"title" , e, sc, Text("Foundations of Programming Languages"))
)
- );
+ )
- assertSameElementsXML(
-
- parsedxml2 \ "_" \ "author",
-
- List(
+ assert(
+ (parsedxml2 \ "_" \ "author") sameElements List(
Elem(null,"author", e, sc, Text("Peter Buneman")),
Elem(null,"author", e, sc, Text("Dan Suciu")),
Elem(null,"author", e, sc, Text("John Mitchell"))
)
-
- );
+ )
- assertSameElementsXML( (parsedxml2 \ "_" \ "_" \ "author"), List() );
+ assert((parsedxml2 \ "_" \ "_" \ "author").isEmpty)
Console.println("xpath \\\\ DESCENDANTS");
- assertSameElementsXML(
-
- parsedxml2 \\ "author",
-
- List(
+ assert(
+ (parsedxml2 \\ "author") sameElements List(
Elem(null,"author", e, sc, Text("Peter Buneman")),
Elem(null,"author", e, sc, Text("Dan Suciu")),
Elem(null,"author", e, sc, Text("John Mitchell"))
)
+ )
- );
-
-
- assertSameElementsXML(
-
- parsedxml2 \\ "title",
-
- List(
+ assert(
+ (parsedxml2 \\ "title") sameElements List(
Elem(null,"title", e, sc, Text("Data on ze web")),
Elem(null,"title", e, sc, Text("Foundations of Programming Languages")))
- );
+ )
println(
(parsedxml2 \\ "book" ){ n:Node => (n \ "title") xml_== "Data on ze web" }
- );
-
- assertEqualsXML(
+ )
- (new NodeSeq { val theSeq = List( parsedxml2 ) }) \\ "_",
-
- List(
+ assert(
+ ((new NodeSeq { val theSeq = List( parsedxml2 ) }) \\ "_") sameElements List(
Elem(null,"bib",e,sc,
Elem(null,"book",e,sc,
Elem(null, "author", e, sc, Text("Peter Buneman")),
@@ -163,64 +129,53 @@ object Test extends App with Assert {
Elem(null,"author",e,sc,Text("Dan Suciu")),
Elem(null,"title",e,sc,Text("Data on ze web"))),
Elem(null,"author",e,sc,Text("Peter Buneman")),
- //Text("Peter Buneman"),
Elem(null,"author",e,sc,Text("Dan Suciu")),
- //Text("Dan Suciu"),
Elem(null,"title",e,sc,Text("Data on ze web")),
- //Text("Data on ze web"),
Elem(null,"book",e,sc,
Elem(null,"author",e,sc,Text("John Mitchell")),
Elem(null,"title",e,sc,Text("Foundations of Programming Languages"))),
Elem(null,"author",e,sc,Text("John Mitchell")),
- //Text("John Mitchell"),
Elem(null,"title",e,sc,Text("Foundations of Programming Languages"))
- //Text("Foundations of Programming Languages")
)
- );
-
- // test group node
- Console println "-- group nodes"
- val zx1: Node = Group { <a/><b/><c/> }
- val zy1 = <f>{zx1}</f>
- Console println zy1.toString()
+ )
- val zx2: Node = Group { List(<a/>,zy1,zx1) }
- Console println zx2.toString()
+ // test group node
+ Console println "-- group nodes"
+ val zx1: Node = Group { <a/><b/><c/> }
+ val zy1 = <f>{zx1}</f>
+ Console println zy1.toString()
- val zz1 = <xml:group><a/><b/><c/></xml:group>
+ val zx2: Node = Group { List(<a/>,zy1,zx1) }
+ Console println zx2.toString()
- assertTrue(zx1 xml_== zz1)
- assertTrue(zz1.length == 3)
+ val zz1 = <xml:group><a/><b/><c/></xml:group>
- // unparsed
+ assert(zx1 xml_== zz1)
+ assert(zz1.length == 3)
- // val uup = <xml:unparsed>&<<>""^%@$!#</xml:unparsed>
- // assertTrue(uup == "&<<>\"\"^%@$!#")
- // test unicode escapes backslash u
+ // unparsed
println("attribute value normalization")
val xmlAttrValueNorm = "<personne id='p0003' nom='Şahingöz' />";
- {
- val isrcA = new InputSource( new StringReader(xmlAttrValueNorm) );
- val parsedxmlA = XML.load(isrcA);
- val c = (parsedxmlA \ "@nom").text.charAt(0);
- //Console.println("char '"+c+"' \u015e");
- assertTrue(c == '\u015e');
- }
- // buraq: if the following test fails with 'character x not allowed', it is
- // related to the mutable variable in a closures in MarkupParser.parsecharref
- {
- val isr = scala.io.Source.fromString(xmlAttrValueNorm);
- val pxmlB = scala.xml.parsing.ConstructingParser.fromSource(isr,false);
- val parsedxmlB = pxmlB.element(TopScope);
- val c = (parsedxmlB \ "@nom").text.charAt(0);
- //Console.println("char '"+c+"' \u015e");
- assertTrue(c == '\u015e');
- }
+ {
+ val isrcA = new InputSource( new StringReader(xmlAttrValueNorm) );
+ val parsedxmlA = XML.load(isrcA);
+ val c = (parsedxmlA \ "@nom").text.charAt(0);
+ assert(c == '\u015e');
+ }
+ // buraq: if the following test fails with 'character x not allowed', it is
+ // related to the mutable variable in a closures in MarkupParser.parsecharref
+ {
+ val isr = scala.io.Source.fromString(xmlAttrValueNorm);
+ val pxmlB = scala.xml.parsing.ConstructingParser.fromSource(isr,false);
+ val parsedxmlB = pxmlB.element(TopScope);
+ val c = (parsedxmlB \ "@nom").text.charAt(0);
+ assert(c == '\u015e');
+ }
// #60 test by round trip
val p = scala.xml.parsing.ConstructingParser.fromSource(scala.io.Source.fromString("<foo bar:attr='&'/>"),true)
val n = p.element(new scala.xml.NamespaceBinding("bar","BAR",scala.xml.TopScope))(0)
- assertFalse( n.attributes.get("BAR", n, "attr").isEmpty)
+ assert( n.attributes.get("BAR", n, "attr").nonEmpty)
}
diff --git a/test/files/jvm/bug680.check b/test/files/jvm/xml02.check
similarity index 100%
copy from test/files/jvm/bug680.check
copy to test/files/jvm/xml02.check
diff --git a/test/files/jvm/xml02.scala b/test/files/jvm/xml02.scala
index 02aabf3..b830a0e 100644
--- a/test/files/jvm/xml02.scala
+++ b/test/files/jvm/xml02.scala
@@ -1,6 +1,11 @@
-import testing.SUnit._
+object Test {
-object Test extends TestConsoleMain {
+ def main(args: Array[String]) {
+ XmlEx.run()
+ XmlEy.run()
+ XmlPat.run()
+ DodgyNamespace.run()
+ }
import scala.xml.{NodeSeq, Utility}
import NodeSeq.seqToNodeSeq
@@ -15,38 +20,38 @@ object Test extends TestConsoleMain {
val bx = <hello foo="bar&x"></hello>
- object XmlEx extends TestCase("attributes") with Assert {
+ object XmlEx {
- override def runTest = {
- assertTrue("@one", (ax \ "@foo") xml_== "bar") // uses NodeSeq.view!
- assertTrue("@two", (ax \ "@foo") xml_== xml.Text("bar")) // dto.
- assertTrue("@three", (bx \ "@foo") xml_== "bar&x") // dto.
- assertTrue ("@four", (bx \ "@foo") xml_sameElements List(xml.Text("bar&x")))
- assertEquals("@five", "<hello foo=\"bar&x\"></hello>", bx.toString)
+ def run() {
+ assert((ax \ "@foo") xml_== "bar") // uses NodeSeq.view!
+ assert((ax \ "@foo") xml_== xml.Text("bar")) // dto.
+ assert((bx \ "@foo") xml_== "bar&x") // dto.
+ assert((bx \ "@foo") xml_sameElements List(xml.Text("bar&x")))
+ assert("<hello foo=\"bar&x\"></hello>" == bx.toString)
}
}
- object XmlEy extends TestCase("attributes with namespace") with Assert {
- override def runTest = {
+ object XmlEy {
+ def run() {
val z = ax \ "@{the namespace from outer space}foo"
- assertTrue("@six", (ax \ "@{the namespace from outer space}foo") xml_== "baz")
- assertTrue("@eight", (cx \ "@{the namespace from outer space}foo") xml_== "baz")
+ assert((ax \ "@{the namespace from outer space}foo") xml_== "baz")
+ assert((cx \ "@{the namespace from outer space}foo") xml_== "baz")
try {
ax \ "@"
- assertTrue("wrong1", false)
+ assert(false)
} catch {
case _: IllegalArgumentException =>
}
try {
ax \ "@{"
- assertTrue("wrong2", false)
+ assert(false)
} catch {
case _: IllegalArgumentException =>
}
try {
ax \ "@{}"
- assertTrue("wrong3", false)
+ assert(false)
} catch {
case _: IllegalArgumentException =>
}
@@ -54,25 +59,20 @@ object Test extends TestConsoleMain {
}
}
- object XmlPat extends TestCase("patterns") with Assert {
- override def runTest = {
- assertTrue(<hello/> match { case <hello/> => true; case _ => false; })
- assertTrue(<x:ga xmlns:x="z"/> match { case <x:ga/> => true; case _ => false; });
- assertTrue(Utility.trim(cx) match { case n @ <hello>crazy text world</hello> if (n \ "@foo") xml_== "bar" => true; })
- assertTrue(Utility.trim(cx) match { case n @ <z:hello>crazy text world</z:hello> if (n \ "@foo") xml_== "bar" => true; })
+ object XmlPat {
+ def run() {
+ assert(<hello/> match { case <hello/> => true; case _ => false; })
+ assert(<x:ga xmlns:x="z"/> match { case <x:ga/> => true; case _ => false; });
+ assert(Utility.trim(cx) match { case n @ <hello>crazy text world</hello> if (n \ "@foo") xml_== "bar" => true; })
+ assert(Utility.trim(cx) match { case n @ <z:hello>crazy text world</z:hello> if (n \ "@foo") xml_== "bar" => true; })
}
}
- object DodgyNamespace extends TestCase("DodgyNamespace") with Assert {
- override def runTest = {
+ object DodgyNamespace {
+ def run() {
val x = <flog xmlns:ee="http://ee.com"><foo xmlns:dog="http://dog.com"><dog:cat/></foo></flog>
- assertTrue(x.toString.matches(".*xmlns:dog=\"http://dog.com\".*"));
+ assert(x.toString.matches(".*xmlns:dog=\"http://dog.com\".*"));
}
}
- def suite = new TestSuite(
- XmlEx,
- XmlEy,
- XmlPat,
- DodgyNamespace
- )
+
}
diff --git a/test/files/jvm/xml03syntax.check b/test/files/jvm/xml03syntax.check
old mode 100644
new mode 100755
index fd1e10c..edcdbdd
--- a/test/files/jvm/xml03syntax.check
+++ b/test/files/jvm/xml03syntax.check
@@ -1,9 +1,18 @@
+true
+true
+true
<hello>world</hello>
+true
<hello>1.5</hello>
+true
<hello>5</hello>
+true
<hello>true</hello>
+true
<hello>5</hello>
+true
<hello>27</hello>
+true
<hello>1 2 3 4</hello>
1
2
@@ -13,5 +22,5 @@
2
4
-node=<elem key="<b>hello</b>"></elem>, key=Some(<b>hello</b>)
-node=<elem ></elem>, key=None
+node=<elem key="<b>hello</b>"/>, key=Some(<b>hello</b>)
+node=<elem/>, key=None
diff --git a/test/files/jvm/xml03syntax.scala b/test/files/jvm/xml03syntax.scala
index 2fee024..2c93f7c 100644
--- a/test/files/jvm/xml03syntax.scala
+++ b/test/files/jvm/xml03syntax.scala
@@ -1,7 +1,6 @@
-import scala.testing.SUnit._
import scala.xml._
-object Test extends AnyRef with Assert {
+object Test {
private def handle[A](x: Node): A = {
println(x)
@@ -9,15 +8,15 @@ object Test extends AnyRef with Assert {
}
def main(args: Array[String]) {
- test1
- test2
- test3
+ test1()
+ test2()
+ test3()
}
- private def test1 {
+ private def test1() {
val xNull = <hello>{null}</hello> // these used to be Atom(unit), changed to empty children
- assertSameElements(xNull.child, Nil)
+ println(xNull.child sameElements Nil)
val x0 = <hello>{}</hello> // these used to be Atom(unit), changed to empty children
val x00 = <hello>{ }</hello> // dto.
@@ -25,29 +24,29 @@ object Test extends AnyRef with Assert {
val xa = <hello>{ "world" }</hello>
- assertSameElements(x0.child, Nil)
- assertSameElements(x00.child, Nil)
- assertEquals(handle[String](xa), "world")
+ println(x0.child sameElements Nil)
+ println(x00.child sameElements Nil)
+ println(handle[String](xa) == "world")
val xb = <hello>{ 1.5 }</hello>
- assertEquals(handle[Double](xb), 1.5)
+ println(handle[Double](xb) == 1.5)
val xc = <hello>{ 5 }</hello>
- assertEquals(handle[Int](xc), 5)
+ println(handle[Int](xc) == 5)
val xd = <hello>{ true }</hello>
- assertEquals(handle[Boolean](xd), true)
+ println(handle[Boolean](xd) == true)
val xe = <hello>{ 5:Short }</hello>
- assertEquals(handle[Short](xe), 5:Short)
+ println(handle[Short](xe) == (5:Short))
val xf = <hello>{ val x = 27; x }</hello>
- assertEquals(handle[Int](xf), 27)
+ println(handle[Int](xf) == 27)
val xg = <hello>{ List(1,2,3,4) }</hello>
@@ -68,7 +67,7 @@ object Test extends AnyRef with Assert {
/** see SVN r13821 (emir): support for <elem key={x:Option[Seq[Node]]} />,
* so that Options can be used for optional attributes.
*/
- private def test2 {
+ private def test2() {
val x1: Option[Seq[Node]] = Some(<b>hello</b>)
val n1 = <elem key={x1} />;
println("node="+n1+", key="+n1.attribute("key"))
@@ -78,7 +77,7 @@ object Test extends AnyRef with Assert {
println("node="+n2+", key="+n2.attribute("key"))
}
- private def test3 {
+ private def test3() {
// this demonstrates how to handle entities
val s = io.Source.fromString("<a> </a>")
object parser extends xml.parsing.ConstructingParser(s, false /*ignore ws*/) {
diff --git a/test/files/jvm/xml04embed.check b/test/files/jvm/xml04embed.check
index e69de29..e71e645 100644
--- a/test/files/jvm/xml04embed.check
+++ b/test/files/jvm/xml04embed.check
@@ -0,0 +1,3 @@
+{
+}
+{}{}{}
diff --git a/test/files/jvm/xml04embed.scala b/test/files/jvm/xml04embed.scala
index 249e8ce..fa453e4 100644
--- a/test/files/jvm/xml04embed.scala
+++ b/test/files/jvm/xml04embed.scala
@@ -1,15 +1,10 @@
-import scala.testing.SUnit._
-
-object Test extends AnyRef with Assert {
+object Test {
def main(args: Array[String]) {
val ya = <x>{{</x>
- assertEquals(ya.text, "{")
-
+ println(ya.text)
val ua = <x>}}</x>
- assertEquals(ua.text, "}")
-
+ println(ua.text)
val za = <x>{{}}{{}}{{}}</x>
- assertEquals(za.text, "{}{}{}")
-
+ println(za.text)
}
}
diff --git a/test/files/jvm/xml05.check b/test/files/jvm/xml05.check
new file mode 100644
index 0000000..8d3e803
--- /dev/null
+++ b/test/files/jvm/xml05.check
@@ -0,0 +1,11 @@
+Type in expressions to have them evaluated.
+Type :help for more information.
+
+scala>
+
+scala> <city name="San José"/>
+res0: scala.xml.Elem = <city name="San José"/>
+
+scala>
+
+scala>
diff --git a/test/files/jvm/xml05.scala b/test/files/jvm/xml05.scala
new file mode 100644
index 0000000..52ae255
--- /dev/null
+++ b/test/files/jvm/xml05.scala
@@ -0,0 +1,7 @@
+import scala.tools.partest.ReplTest
+
+object Test extends ReplTest {
+ def code = """
+<city name="San José"/>
+ """
+}
\ No newline at end of file
diff --git a/test/files/jvm/xmlattr.check b/test/files/jvm/xmlattr.check
index e69de29..a87420d 100644
--- a/test/files/jvm/xmlattr.check
+++ b/test/files/jvm/xmlattr.check
@@ -0,0 +1,18 @@
+true
+true
+true
+true
+true
+true
+removal of duplicates for unprefixed attributes in append = 1
+true
+true
+true
+true
+true
+true
+true
+true
+true
+<b x="&"/>
+<b x="&"/>
diff --git a/test/files/jvm/xmlattr.scala b/test/files/jvm/xmlattr.scala
index 4dda843..d214642 100644
--- a/test/files/jvm/xmlattr.scala
+++ b/test/files/jvm/xmlattr.scala
@@ -1,60 +1,63 @@
-import testing.SUnit.{Assert, TestCase, TestConsoleMain, TestSuite}
-import xml.{NodeSeq, Null, Text, UnprefixedAttribute}
+import xml.{ NodeSeq, Null, Text, UnprefixedAttribute }
-object Test extends TestConsoleMain {
- def suite = new TestSuite(UnprefixedAttributeTest, AttributeWithOptionTest)
+object Test {
+
+ def main(args: Array[String]) {
+ UnprefixedAttributeTest()
+ AttributeWithOptionTest()
+ AttributeOutputTest()
+ }
- object UnprefixedAttributeTest extends TestCase("UnprefixedAttribute") with Assert {
- override def runTest {
- var x = new UnprefixedAttribute("foo","bar", Null)
-
- // always assertX(expected, actual)
- assertEquals(Some(Text("bar")), x.get("foo"));
- assertEquals(Text("bar"), x("foo"))
- assertEquals(None, x.get("no_foo"))
- assertEquals(null, x("no_foo"))
+ object UnprefixedAttributeTest {
+ def apply() {
+ val x = new UnprefixedAttribute("foo","bar", Null)
+ println(Some(Text("bar")) == x.get("foo"))
+ println(Text("bar") == x("foo"))
+ println(None == x.get("no_foo"))
+ println(null == x("no_foo"))
val y = x.remove("foo")
- assertEquals(Null, y)
+ println(Null == y)
val z = new UnprefixedAttribute("foo", null:NodeSeq, x)
- assertEquals(None, z.get("foo"))
+ println(None == z.get("foo"))
var appended = x append x append x append x
var len = 0; while (appended ne Null) {
appended = appended.next
len = len + 1
}
- assertEquals("removal of duplicates for unprefixed attributes in append", 1, len)
+ println("removal of duplicates for unprefixed attributes in append = " + len)
}
}
- object AttributeWithOptionTest extends TestCase("AttributeWithOption") with Assert {
- override def runTest {
- var x = new UnprefixedAttribute("foo", Some(Text("bar")), Null)
+ object AttributeWithOptionTest {
+ def apply() {
+ val x = new UnprefixedAttribute("foo", Some(Text("bar")), Null)
- assertEquals(Some(Text("bar")), x.get("foo"));
- assertEquals(Text("bar"), x("foo"))
- assertEquals(None, x.get("no_foo"))
- assertEquals(null, x("no_foo"))
+ println(Some(Text("bar")) == x.get("foo"))
+ println(Text("bar") == x("foo"))
+ println(None == x.get("no_foo"))
+ println(null == x("no_foo"))
val attr1 = Some(Text("foo value"))
val attr2 = None
val y = <b foo={attr1} bar={attr2} />
- assertEquals(Some(Text("foo value")), y.attributes.get("foo"));
- assertEquals(Text("foo value"), y.attributes("foo"))
- assertEquals(None, y.attributes.get("bar"))
- assertEquals(null, y.attributes("bar"))
+ println(Some(Text("foo value")) == y.attributes.get("foo"));
+ println(Text("foo value") == y.attributes("foo"))
+ println(None == y.attributes.get("bar"))
+ println(null == y.attributes("bar"))
val z = new UnprefixedAttribute("foo", None, x)
- assertEquals(None, z.get("foo")) // None
+ println(None == z.get("foo"))
}
}
- object AttributeOutputTest extends TestCase("AttributeOutput") with Assert {
- override def runTest {
- assertEquals(<b x="&"/>.toString, "<b x=\"&\"></b>")
- assertEquals( <b x={"&"}/>.toString, "<b x=\"&\"></b>")
+ object AttributeOutputTest {
+ def apply() {
+ println(<b x="&"/>)
+ println(<b x={"&"}/>)
}
}
+
}
diff --git a/test/files/jvm/xmlstuff.scala b/test/files/jvm/xmlstuff.scala
index 3508070..45234c7 100644
--- a/test/files/jvm/xmlstuff.scala
+++ b/test/files/jvm/xmlstuff.scala
@@ -1,10 +1,8 @@
import java.io.StringReader
import org.xml.sax.InputSource
-
-import scala.testing.SUnit.Assert
import scala.xml.{Node, NodeSeq, Elem, Text, XML}
-object Test extends AnyRef with Assert {
+object Test {
/** returns true if exception was thrown */
def catcher(att: Function1[Unit, scala.xml.MetaData]): Boolean = {
@@ -21,193 +19,163 @@ object Test extends AnyRef with Assert {
def main(args: Array[String]) {
- //val e: scala.xml.MetaData = null; //Node.NoAttributes;
- //val sc: scala.xml.NamespaceBinding = null;
-
- // ------------------------------------------ tests for class NodeSeq
-
- /**
- println("checking wellformed attributes");
- {
- import scala.xml.{ UnprefixedAttribute, Null }
- assertTrue(catcher {x:Unit => new UnprefixedAttribute("key", "<", Null)}); // < illegal
- assertTrue(catcher(x:Unit => new UnprefixedAttribute("key", "&", Null))); // & illegal
- assertTrue(catcher(x:Unit => new UnprefixedAttribute("key", "a&a", Null))); // & illegal
- assertTrue(catcher(x:Unit => new UnprefixedAttribute("key", "a&a;&", Null))); // 2nd &
-
- assertFalse(catcher(x:Unit => new UnprefixedAttribute("key", "a&a; <<", Null)));
- }
-*/
-
-/*
-checking wellformed attributes
-< not allowed in attribute value
-passed ok
-malformed entity reference in attribute value [&]
-passed ok
-malformed entity reference in attribute value [a&a]
-passed ok
-malformed entity reference in attribute value [a&a;&]
-passed ok
-passed ok
-*/
-
- println("NodeSeq")
+ println("NodeSeq")
val p = <foo>
- <bar gt='ga' value="3"/>
- <baz bazValue="8"/>
- <bar value="5" gi='go'/>
- </foo>;
-
- val pelems_1 = for( val x <- p \ "bar"; val y <- p \ "baz" ) yield {
+ <bar gt='ga' value="3"/>
+ <baz bazValue="8"/>
+ <bar value="5" gi='go'/>
+ </foo>;
+
+ val pelems_1 = for (x <- p \ "bar"; y <- p \ "baz" ) yield {
Text(x.attributes("value").toString + y.attributes("bazValue").toString+ "!")
};
val pelems_2 = new NodeSeq { val theSeq = List(Text("38!"),Text("58!")) };
- assertSameElementsXML(pelems_1, pelems_2)
+ assert(pelems_1 sameElements pelems_2)
- assertEqualsXML(p \\ "@bazValue", Text("8"))
+ assert(Text("8") sameElements (p \\ "@bazValue"))
val books =
- <bks>
- <book><title>Blabla</title></book>
- <book><title>Blubabla</title></book>
- <book><title>Baaaaaaalabla</title></book>
+ <bks>
+ <book><title>Blabla</title></book>
+ <book><title>Blubabla</title></book>
+ <book><title>Baaaaaaalabla</title></book>
</bks>;
- val reviews =
- <reviews>
- <entry><title>Blabla</title>
- <remarks>
- Hallo Welt.
- </remarks>
+ val reviews =
+ <reviews>
+ <entry><title>Blabla</title>
+ <remarks>
+ Hallo Welt.
+ </remarks>
+ </entry>
+ <entry><title>Blubabla</title>
+ <remarks>
+ Hello Blu
+ </remarks>
+ </entry>
+ <entry><title>Blubabla</title>
+ <remarks>
+ rem 2
+ </remarks>
</entry>
- <entry><title>Blubabla</title>
- <remarks>
- Hello Blu
- </remarks>
- </entry>
- <entry><title>Blubabla</title>
- <remarks>
- rem 2
- </remarks>
- </entry>
</reviews>;
- println( new scala.xml.PrettyPrinter(80, 5).formatNodes (
- for (t <- books \\ "title";
- r <- reviews \\ "entry"
- if (r \ "title") xml_== t) yield
- <result>
- { t }
- { r \ "remarks" }
- </result>
- ));
-
- // example
- println(
- for (t @ <book><title>Blabla</title></book> <- new NodeSeq { val theSeq = books.child }.toList)
- yield t
- );
-val phoneBook =
- <phonebook>
- <descr>
- This is the <b>phonebook</b> of the
- <a href="http://acme.org">ACME</a> corporation.
- </descr>
- <entry>
- <name>John</name>
- <phone where="work"> +41 21 693 68 67</phone>
- <phone where="mobile">+41 79 602 23 23</phone>
- </entry>
+ println( new scala.xml.PrettyPrinter(80, 5).formatNodes (
+ for (t <- books \\ "title";
+ r <- reviews \\ "entry"
+ if (r \ "title") xml_== t) yield
+ <result>
+ { t }
+ { r \ "remarks" }
+ </result>
+ ));
+
+ // example
+ println(
+ for (t @ <book><title>Blabla</title></book> <- new NodeSeq { val theSeq = books.child }.toList)
+ yield t
+ );
+ val phoneBook =
+ <phonebook>
+ <descr>
+ This is the <b>phonebook</b> of the
+ <a href="http://acme.org">ACME</a> corporation.
+ </descr>
+ <entry>
+ <name>John</name>
+ <phone where="work"> +41 21 693 68 67</phone>
+ <phone where="mobile">+41 79 602 23 23</phone>
+ </entry>
</phonebook>;
-val addrBook =
- <addrbook>
- <descr>
- This is the <b>addressbook</b> of the
- <a href="http://acme.org">ACME</a> corporation.
- </descr>
- <entry>
- <name>John</name>
- <street> Elm Street</street>
- <city>Dolphin City</city>
- </entry>
+ val addrBook =
+ <addrbook>
+ <descr>
+ This is the <b>addressbook</b> of the
+ <a href="http://acme.org">ACME</a> corporation.
+ </descr>
+ <entry>
+ <name>John</name>
+ <street> Elm Street</street>
+ <city>Dolphin City</city>
+ </entry>
</addrbook>;
- println( new scala.xml.PrettyPrinter(80, 5).formatNodes (
- for (t <- addrBook \\ "entry";
- r <- phoneBook \\ "entry"
- if (t \ "name") xml_== (r \ "name")) yield
- <result>
- { t.child }
- { r \ "phone" }
- </result>
- ));
-
-
- /* namespaces */
- // begin tmp
- println("namespaces")
- val cuckoo = <cuckoo xmlns="http://cuckoo.com">
+ println( new scala.xml.PrettyPrinter(80, 5).formatNodes (
+ for (t <- addrBook \\ "entry";
+ r <- phoneBook \\ "entry"
+ if (t \ "name") xml_== (r \ "name")) yield
+ <result>
+ { t.child }
+ { r \ "phone" }
+ </result>
+ ));
+
+
+ /* namespaces */
+ // begin tmp
+ println("namespaces")
+ val cuckoo = <cuckoo xmlns="http://cuckoo.com">
<foo/>
<bar/>
- </cuckoo>;
- assertEquals(cuckoo.namespace, "http://cuckoo.com")
- for (n <- cuckoo \ "_" ) {
- //println("n = "+n);
- //println("n.prefix = "+n.prefix);
- //.println("n.scope = "+n.scope);
- assertEquals( n.namespace, "http://cuckoo.com")
- }
+ </cuckoo>;
+ assert(cuckoo.namespace == "http://cuckoo.com")
+ for (n <- cuckoo \ "_" ) {
+ //println("n = "+n);
+ //println("n.prefix = "+n.prefix);
+ //.println("n.scope = "+n.scope);
+ assert( n.namespace == "http://cuckoo.com")
+ }
- println("validation - elements")
- val vtor = new scala.xml.dtd.ElementValidator();
- {
- import scala.xml.dtd.ELEMENTS
- import scala.xml.dtd.ContentModel._
- vtor.setContentModel(
- ELEMENTS(
- Sequ(
- Letter(ElemName("bar")),
- Star(Letter(ElemName("baz"))) )));
+ println("validation - elements")
+ val vtor = new scala.xml.dtd.ElementValidator();
+ {
+ import scala.xml.dtd.ELEMENTS
+ import scala.xml.dtd.ContentModel._
+ vtor.setContentModel(
+ ELEMENTS(
+ Sequ(
+ Letter(ElemName("bar")),
+ Star(Letter(ElemName("baz"))) )));
- }
- assertEquals(vtor( <foo><bar/><baz/><baz/></foo> ), true);
- {
- import scala.xml.dtd.MIXED
- import scala.xml.dtd.ContentModel._
-
- vtor.setContentModel(
- MIXED(
- Alt(Letter(ElemName("bar")),
- Letter(ElemName("baz")),
- Letter(ElemName("bal")))));
- }
+ }
+ assert(vtor( <foo><bar/><baz/><baz/></foo> ))
- assertEquals(vtor(<foo><bar/><baz/><baz/></foo> ), true)
- assertEquals(vtor(<foo>ab<bar/>cd<baz/>ed<baz/>gh</foo> ), true)
- assertEquals(vtor(<foo> <ugha/> <bugha/> </foo> ), false)
-
- println("validation - attributes")
- vtor.setContentModel(null)
- vtor.setMetaData(List())
- assertEquals(vtor( <foo bar="hello"/> ), false)
-
- {
- import scala.xml.dtd._
- vtor setMetaData List(AttrDecl("bar", "CDATA", IMPLIED))
- }
- assertEquals(vtor(<foo href="http://foo.com" bar="hello"/>), false)
- assertEquals(vtor(<foo bar="hello"/>), true)
+ {
+ import scala.xml.dtd.MIXED
+ import scala.xml.dtd.ContentModel._
+
+ vtor.setContentModel(
+ MIXED(
+ Alt(Letter(ElemName("bar")),
+ Letter(ElemName("baz")),
+ Letter(ElemName("bal")))));
+ }
- {
- import scala.xml.dtd._
- vtor.setMetaData(List(AttrDecl("bar","CDATA",REQUIRED)))
- }
- assertEquals( vtor( <foo href="http://foo.com" /> ), false )
- assertEquals( vtor( <foo bar="http://foo.com" /> ), true )
-
+ assert(vtor(<foo><bar/><baz/><baz/></foo> ))
+ assert(vtor(<foo>ab<bar/>cd<baz/>ed<baz/>gh</foo> ))
+ assert(!vtor(<foo> <ugha/> <bugha/> </foo> ))
+
+ println("validation - attributes")
+ vtor.setContentModel(null)
+ vtor.setMetaData(List())
+ assert(!vtor( <foo bar="hello"/> ))
+
+ {
+ import scala.xml.dtd._
+ vtor setMetaData List(AttrDecl("bar", "CDATA", IMPLIED))
+ }
+ assert(!vtor(<foo href="http://foo.com" bar="hello"/>))
+ assert(vtor(<foo bar="hello"/>))
+
+ {
+ import scala.xml.dtd._
+ vtor.setMetaData(List(AttrDecl("bar","CDATA",REQUIRED)))
+ }
+ assert(!vtor( <foo href="http://foo.com" /> ))
+ assert( vtor( <foo bar="http://foo.com" /> ))
+
}
}
diff --git a/test/files/lib/annotations.jar b/test/files/lib/annotations.jar
deleted file mode 100644
index 59fa4b7..0000000
Binary files a/test/files/lib/annotations.jar and /dev/null differ
diff --git a/test/files/lib/annotations.jar.desired.sha1 b/test/files/lib/annotations.jar.desired.sha1
index 2b4292d..ff7bc94 100644
--- a/test/files/lib/annotations.jar.desired.sha1
+++ b/test/files/lib/annotations.jar.desired.sha1
@@ -1 +1 @@
-02fe2ed93766323a13f22c7a7e2ecdcd84259b6c ?annotations.jar
+02fe2ed93766323a13f22c7a7e2ecdcd84259b6c *annotations.jar
diff --git a/test/files/lib/enums.jar b/test/files/lib/enums.jar
deleted file mode 100644
index f661d13..0000000
Binary files a/test/files/lib/enums.jar and /dev/null differ
diff --git a/test/files/lib/enums.jar.desired.sha1 b/test/files/lib/enums.jar.desired.sha1
index 46cd8e9..040dff4 100644
--- a/test/files/lib/enums.jar.desired.sha1
+++ b/test/files/lib/enums.jar.desired.sha1
@@ -1 +1 @@
-981392dbd1f727b152cd1c908c5fce60ad9d07f7 ?enums.jar
+981392dbd1f727b152cd1c908c5fce60ad9d07f7 *enums.jar
diff --git a/test/files/lib/genericNest.jar b/test/files/lib/genericNest.jar
deleted file mode 100644
index bc08c1e..0000000
Binary files a/test/files/lib/genericNest.jar and /dev/null differ
diff --git a/test/files/lib/genericNest.jar.desired.sha1 b/test/files/lib/genericNest.jar.desired.sha1
index e932126..77e4fec 100644
--- a/test/files/lib/genericNest.jar.desired.sha1
+++ b/test/files/lib/genericNest.jar.desired.sha1
@@ -1 +1 @@
-b1ec8a095cec4902b3609d74d274c04365c59c04 ?genericNest.jar
+b1ec8a095cec4902b3609d74d274c04365c59c04 *genericNest.jar
diff --git a/test/files/lib/methvsfield.jar b/test/files/lib/methvsfield.jar
deleted file mode 100644
index f266071..0000000
Binary files a/test/files/lib/methvsfield.jar and /dev/null differ
diff --git a/test/files/lib/methvsfield.jar.desired.sha1 b/test/files/lib/methvsfield.jar.desired.sha1
index 8c01532..6655f45 100644
--- a/test/files/lib/methvsfield.jar.desired.sha1
+++ b/test/files/lib/methvsfield.jar.desired.sha1
@@ -1 +1 @@
-be8454d5e7751b063ade201c225dcedefd252775 ?methvsfield.jar
+be8454d5e7751b063ade201c225dcedefd252775 *methvsfield.jar
diff --git a/test/files/lib/nest.jar b/test/files/lib/nest.jar
deleted file mode 100644
index 4eda4be..0000000
Binary files a/test/files/lib/nest.jar and /dev/null differ
diff --git a/test/files/lib/nest.jar.desired.sha1 b/test/files/lib/nest.jar.desired.sha1
index 674ca79..056e7ad 100644
--- a/test/files/lib/nest.jar.desired.sha1
+++ b/test/files/lib/nest.jar.desired.sha1
@@ -1 +1 @@
-cd33e0a0ea249eb42363a2f8ba531186345ff68c ?nest.jar
+cd33e0a0ea249eb42363a2f8ba531186345ff68c *nest.jar
diff --git a/test/files/lib/scalacheck.jar b/test/files/lib/scalacheck.jar
deleted file mode 100644
index 10712ad..0000000
Binary files a/test/files/lib/scalacheck.jar and /dev/null differ
diff --git a/test/files/lib/scalacheck.jar.desired.sha1 b/test/files/lib/scalacheck.jar.desired.sha1
index cdbdc53..2f15402 100644
--- a/test/files/lib/scalacheck.jar.desired.sha1
+++ b/test/files/lib/scalacheck.jar.desired.sha1
@@ -1 +1 @@
-77dca656258fe983ec64461860ab1ca0f7e2fd65 ?scalacheck.jar
+b6f4dbb29f0c2ec1eba682414f60d52fea84f703 *scalacheck.jar
diff --git a/test/files/neg/abstract-concrete-methods.check b/test/files/neg/abstract-concrete-methods.check
new file mode 100644
index 0000000..e128f77
--- /dev/null
+++ b/test/files/neg/abstract-concrete-methods.check
@@ -0,0 +1,5 @@
+abstract-concrete-methods.scala:7: error: class Outer2 needs to be abstract, since method score in trait Outer of type (i: Outer2#Inner)Double is not defined
+(Note that This#Inner does not match Outer2#Inner: class Inner in class Outer2 is a subclass of trait Inner in trait Outer, but method parameter types must match exactly.)
+class Outer2 extends Outer[Outer2] {
+ ^
+one error found
diff --git a/test/files/neg/abstract-concrete-methods.scala b/test/files/neg/abstract-concrete-methods.scala
new file mode 100644
index 0000000..7f1aea0
--- /dev/null
+++ b/test/files/neg/abstract-concrete-methods.scala
@@ -0,0 +1,10 @@
+trait Outer[This <: Outer[This]] {
+ self: This =>
+
+ trait Inner
+ def score(i: This#Inner): Double
+}
+class Outer2 extends Outer[Outer2] {
+ class Inner extends super.Inner
+ def score(i: Outer2#Inner) = 0.0
+}
diff --git a/test/files/neg/abstract-report.check b/test/files/neg/abstract-report.check
new file mode 100644
index 0000000..bd550f3
--- /dev/null
+++ b/test/files/neg/abstract-report.check
@@ -0,0 +1,24 @@
+abstract-report.scala:1: error: class Unimplemented needs to be abstract, since:
+it has 12 unimplemented members.
+/** As seen from class Unimplemented, the missing signatures are as follows.
+ * For convenience, these are usable as stub implementations.
+ */
+ // Members declared in scala.collection.GenTraversableOnce
+ def isTraversableAgain: Boolean = ???
+ def toIterator: Iterator[String] = ???
+ def toStream: Stream[String] = ???
+
+ // Members declared in scala.collection.TraversableOnce
+ def copyToArray[B >: String](xs: Array[B],start: Int,len: Int): Unit = ???
+ def exists(p: String => Boolean): Boolean = ???
+ def find(p: String => Boolean): Option[String] = ???
+ def forall(p: String => Boolean): Boolean = ???
+ def foreach[U](f: String => U): Unit = ???
+ def hasDefiniteSize: Boolean = ???
+ def isEmpty: Boolean = ???
+ def seq: scala.collection.TraversableOnce[String] = ???
+ def toTraversable: Traversable[String] = ???
+
+class Unimplemented extends TraversableOnce[String] { }
+ ^
+one error found
diff --git a/test/files/neg/abstract-report.scala b/test/files/neg/abstract-report.scala
new file mode 100644
index 0000000..538e093
--- /dev/null
+++ b/test/files/neg/abstract-report.scala
@@ -0,0 +1 @@
+class Unimplemented extends TraversableOnce[String] { }
\ No newline at end of file
diff --git a/test/files/neg/abstract-report2.check b/test/files/neg/abstract-report2.check
new file mode 100644
index 0000000..35a99bd
--- /dev/null
+++ b/test/files/neg/abstract-report2.check
@@ -0,0 +1,103 @@
+abstract-report2.scala:3: error: class Foo needs to be abstract, since:
+it has 13 unimplemented members.
+/** As seen from class Foo, the missing signatures are as follows.
+ * For convenience, these are usable as stub implementations.
+ */
+ def add(x$1: Int): Boolean = ???
+ def addAll(x$1: java.util.Collection[_ <: Int]): Boolean = ???
+ def clear(): Unit = ???
+ def contains(x$1: Any): Boolean = ???
+ def containsAll(x$1: java.util.Collection[_]): Boolean = ???
+ def isEmpty(): Boolean = ???
+ def iterator(): java.util.Iterator[Int] = ???
+ def remove(x$1: Any): Boolean = ???
+ def removeAll(x$1: java.util.Collection[_]): Boolean = ???
+ def retainAll(x$1: java.util.Collection[_]): Boolean = ???
+ def size(): Int = ???
+ def toArray[T](x$1: Array[T with Object]): Array[T with Object] = ???
+ def toArray(): Array[Object] = ???
+
+class Foo extends Collection[Int]
+ ^
+abstract-report2.scala:5: error: class Bar needs to be abstract, since:
+it has 13 unimplemented members.
+/** As seen from class Bar, the missing signatures are as follows.
+ * For convenience, these are usable as stub implementations.
+ */
+ def add(x$1: List[_ <: String]): Boolean = ???
+ def addAll(x$1: java.util.Collection[_ <: List[_ <: String]]): Boolean = ???
+ def clear(): Unit = ???
+ def contains(x$1: Any): Boolean = ???
+ def containsAll(x$1: java.util.Collection[_]): Boolean = ???
+ def isEmpty(): Boolean = ???
+ def iterator(): java.util.Iterator[List[_ <: String]] = ???
+ def remove(x$1: Any): Boolean = ???
+ def removeAll(x$1: java.util.Collection[_]): Boolean = ???
+ def retainAll(x$1: java.util.Collection[_]): Boolean = ???
+ def size(): Int = ???
+ def toArray[T](x$1: Array[T with Object]): Array[T with Object] = ???
+ def toArray(): Array[Object] = ???
+
+class Bar extends Collection[List[_ <: String]]
+ ^
+abstract-report2.scala:7: error: class Baz needs to be abstract, since:
+it has 13 unimplemented members.
+/** As seen from class Baz, the missing signatures are as follows.
+ * For convenience, these are usable as stub implementations.
+ */
+ def add(x$1: T): Boolean = ???
+ def addAll(x$1: java.util.Collection[_ <: T]): Boolean = ???
+ def clear(): Unit = ???
+ def contains(x$1: Any): Boolean = ???
+ def containsAll(x$1: java.util.Collection[_]): Boolean = ???
+ def isEmpty(): Boolean = ???
+ def iterator(): java.util.Iterator[T] = ???
+ def remove(x$1: Any): Boolean = ???
+ def removeAll(x$1: java.util.Collection[_]): Boolean = ???
+ def retainAll(x$1: java.util.Collection[_]): Boolean = ???
+ def size(): Int = ???
+ def toArray[T](x$1: Array[T with Object]): Array[T with Object] = ???
+ def toArray(): Array[Object] = ???
+
+class Baz[T] extends Collection[T]
+ ^
+abstract-report2.scala:11: error: class Dingus needs to be abstract, since:
+it has 24 unimplemented members.
+/** As seen from class Dingus, the missing signatures are as follows.
+ * For convenience, these are usable as stub implementations.
+ */
+ // Members declared in java.util.Collection
+ def add(x$1: String): Boolean = ???
+ def addAll(x$1: java.util.Collection[_ <: String]): Boolean = ???
+ def clear(): Unit = ???
+ def contains(x$1: Any): Boolean = ???
+ def containsAll(x$1: java.util.Collection[_]): Boolean = ???
+ def iterator(): java.util.Iterator[String] = ???
+ def remove(x$1: Any): Boolean = ???
+ def removeAll(x$1: java.util.Collection[_]): Boolean = ???
+ def retainAll(x$1: java.util.Collection[_]): Boolean = ???
+ def toArray[T](x$1: Array[T with Object]): Array[T with Object] = ???
+ def toArray(): Array[Object] = ???
+
+ // Members declared in scala.collection.GenTraversableOnce
+ def isTraversableAgain: Boolean = ???
+ def toIterator: Iterator[(Set[Int], String)] = ???
+ def toStream: Stream[(Set[Int], String)] = ???
+
+ // Members declared in scala.math.Ordering
+ def compare(x: List[Int],y: List[Int]): Int = ???
+
+ // Members declared in scala.collection.TraversableOnce
+ def copyToArray[B >: (Set[Int], String)](xs: Array[B],start: Int,len: Int): Unit = ???
+ def exists(p: ((Set[Int], String)) => Boolean): Boolean = ???
+ def find(p: ((Set[Int], String)) => Boolean): Option[(Set[Int], String)] = ???
+ def forall(p: ((Set[Int], String)) => Boolean): Boolean = ???
+ def foreach[U](f: ((Set[Int], String)) => U): Unit = ???
+ def hasDefiniteSize: Boolean = ???
+ def isEmpty: Boolean = ???
+ def seq: scala.collection.TraversableOnce[(Set[Int], String)] = ???
+ def toTraversable: Traversable[(Set[Int], String)] = ???
+
+class Dingus extends Bippy[String, Set[Int], List[Int]]
+ ^
+four errors found
diff --git a/test/files/neg/abstract-report2.scala b/test/files/neg/abstract-report2.scala
new file mode 100644
index 0000000..b6327b0
--- /dev/null
+++ b/test/files/neg/abstract-report2.scala
@@ -0,0 +1,11 @@
+import java.util.Collection
+
+class Foo extends Collection[Int]
+
+class Bar extends Collection[List[_ <: String]]
+
+class Baz[T] extends Collection[T]
+
+trait Bippy[T1, T2, T3] extends Collection[T1] with TraversableOnce[(T2, String)] with Ordering[T3]
+
+class Dingus extends Bippy[String, Set[Int], List[Int]]
\ No newline at end of file
diff --git a/test/files/neg/abstraction-from-volatile-type-error.check b/test/files/neg/abstraction-from-volatile-type-error.check
new file mode 100644
index 0000000..34ba055
--- /dev/null
+++ b/test/files/neg/abstraction-from-volatile-type-error.check
@@ -0,0 +1,4 @@
+abstraction-from-volatile-type-error.scala:9: error: illegal abstraction from value with volatile type a.Tv
+ val tv : a.Tv
+ ^
+one error found
diff --git a/test/files/neg/abstraction-from-volatile-type-error.scala b/test/files/neg/abstraction-from-volatile-type-error.scala
new file mode 100644
index 0000000..5afcb3e
--- /dev/null
+++ b/test/files/neg/abstraction-from-volatile-type-error.scala
@@ -0,0 +1,11 @@
+class A {
+ type T
+ type Tv = AnyRef with T
+}
+
+object Test {
+ type B = a.type forSome {
+ val a : A
+ val tv : a.Tv
+ }
+}
diff --git a/test/files/neg/ambiguous-float-dots.check b/test/files/neg/ambiguous-float-dots.check
new file mode 100644
index 0000000..6c21056
--- /dev/null
+++ b/test/files/neg/ambiguous-float-dots.check
@@ -0,0 +1,16 @@
+ambiguous-float-dots.scala:2: error: This lexical syntax is deprecated. From scala 2.11, a dot will only be considered part of a number if it is immediately followed by a digit.
+ val x0 = 5.
+ ^
+ambiguous-float-dots.scala:6: error: This lexical syntax is deprecated. From scala 2.11, a dot will only be considered part of a number if it is immediately followed by a digit.
+ val x1 = 5.f
+ ^
+ambiguous-float-dots.scala:7: error: Treating numbers with a leading zero as octal is deprecated.
+ val y0 = 055
+ ^
+ambiguous-float-dots.scala:11: error: This lexical syntax is deprecated. From scala 2.11, a dot will only be considered part of a number if it is immediately followed by a digit.
+ 1.+(2)
+ ^
+ambiguous-float-dots.scala:12: error: This lexical syntax is deprecated. From scala 2.11, a dot will only be considered part of a number if it is immediately followed by a digit.
+ 1. + 2
+ ^
+5 errors found
diff --git a/test/files/neg/ambiguous-float-dots.flags b/test/files/neg/ambiguous-float-dots.flags
new file mode 100644
index 0000000..65faf53
--- /dev/null
+++ b/test/files/neg/ambiguous-float-dots.flags
@@ -0,0 +1 @@
+-Xfatal-warnings -deprecation
\ No newline at end of file
diff --git a/test/files/neg/ambiguous-float-dots.scala b/test/files/neg/ambiguous-float-dots.scala
new file mode 100644
index 0000000..87e948d
--- /dev/null
+++ b/test/files/neg/ambiguous-float-dots.scala
@@ -0,0 +1,14 @@
+class A {
+ val x0 = 5.
+}
+
+class B {
+ val x1 = 5.f
+ val y0 = 055
+}
+
+class D {
+ 1.+(2)
+ 1. + 2
+ 1 + 2
+}
diff --git a/test/files/neg/ambiguous-float-dots2.check b/test/files/neg/ambiguous-float-dots2.check
new file mode 100644
index 0000000..8919d2c
--- /dev/null
+++ b/test/files/neg/ambiguous-float-dots2.check
@@ -0,0 +1,10 @@
+ambiguous-float-dots2.scala:7: error: Non-zero numbers may not have a leading zero.
+ val y0 = 055
+ ^
+ambiguous-float-dots2.scala:3: error: identifier expected but '}' found.
+}
+^
+ambiguous-float-dots2.scala:12: error: ';' expected but integer literal found.
+ 1. + 2
+ ^
+three errors found
diff --git a/test/files/neg/ambiguous-float-dots2.flags b/test/files/neg/ambiguous-float-dots2.flags
new file mode 100644
index 0000000..112fc72
--- /dev/null
+++ b/test/files/neg/ambiguous-float-dots2.flags
@@ -0,0 +1 @@
+-Xfuture
\ No newline at end of file
diff --git a/test/files/neg/ambiguous-float-dots2.scala b/test/files/neg/ambiguous-float-dots2.scala
new file mode 100644
index 0000000..87e948d
--- /dev/null
+++ b/test/files/neg/ambiguous-float-dots2.scala
@@ -0,0 +1,14 @@
+class A {
+ val x0 = 5.
+}
+
+class B {
+ val x1 = 5.f
+ val y0 = 055
+}
+
+class D {
+ 1.+(2)
+ 1. + 2
+ 1 + 2
+}
diff --git a/test/files/neg/annot-nonconst.check b/test/files/neg/annot-nonconst.check
index e4166e0..b43e58a 100644
--- a/test/files/neg/annot-nonconst.check
+++ b/test/files/neg/annot-nonconst.check
@@ -1,12 +1,12 @@
annot-nonconst.scala:1: warning: Implementation restriction: subclassing Classfile does not
make your annotation visible at runtime. If that is what
you want, you must write the annotation class in Java.
-class Length(value: Int) extends ClassfileAnnotation
+class Length(value: Int) extends annotation.ClassfileAnnotation
^
annot-nonconst.scala:2: warning: Implementation restriction: subclassing Classfile does not
make your annotation visible at runtime. If that is what
you want, you must write the annotation class in Java.
-class Ann2(value: String) extends ClassfileAnnotation
+class Ann2(value: String) extends annotation.ClassfileAnnotation
^
annot-nonconst.scala:6: error: annotation argument needs to be a constant; found: n
@Length(n) def foo = "foo"
diff --git a/test/files/neg/annot-nonconst.scala b/test/files/neg/annot-nonconst.scala
index 69bb60d..1b5856f 100644
--- a/test/files/neg/annot-nonconst.scala
+++ b/test/files/neg/annot-nonconst.scala
@@ -1,5 +1,5 @@
-class Length(value: Int) extends ClassfileAnnotation
-class Ann2(value: String) extends ClassfileAnnotation
+class Length(value: Int) extends annotation.ClassfileAnnotation
+class Ann2(value: String) extends annotation.ClassfileAnnotation
object Test {
def n = 15
diff --git a/test/files/neg/any-vs-anyref.check b/test/files/neg/any-vs-anyref.check
new file mode 100644
index 0000000..63c4853
--- /dev/null
+++ b/test/files/neg/any-vs-anyref.check
@@ -0,0 +1,64 @@
+any-vs-anyref.scala:6: error: type mismatch;
+ found : a.type (with underlying type A)
+ required: AnyRef
+Note that A is bounded only by Equals, which means AnyRef is not a known parent.
+Such types can participate in value classes, but instances
+cannot appear in singleton types or in reference comparisons.
+ def foo1[A <: Product](a: A) = { type X = a.type }
+ ^
+any-vs-anyref.scala:7: error: type mismatch;
+ found : a.type (with underlying type A)
+ required: AnyRef
+Note that A is bounded only by Product, Quux, which means AnyRef is not a known parent.
+Such types can participate in value classes, but instances
+cannot appear in singleton types or in reference comparisons.
+ def foo2[A <: Product with Quux](a: A) = { type X = a.type }
+ ^
+any-vs-anyref.scala:8: error: type mismatch;
+ found : a.type (with underlying type Product)
+ required: AnyRef
+Note that Product extends Any, not AnyRef.
+Such types can participate in value classes, but instances
+cannot appear in singleton types or in reference comparisons.
+ def foo3(a: Product) = { type X = a.type }
+ ^
+any-vs-anyref.scala:9: error: type mismatch;
+ found : Product with Quux
+ required: AnyRef
+Note that the parents of this type (Product, Quux) extend Any, not AnyRef.
+Such types can participate in value classes, but instances
+cannot appear in singleton types or in reference comparisons.
+ def foo4(a: Product with Quux) = { type X = a.type }
+ ^
+any-vs-anyref.scala:10: error: value eq is not a member of Quux with Product
+Note that the parents of this type (Quux, Product) extend Any, not AnyRef.
+Such types can participate in value classes, but instances
+cannot appear in singleton types or in reference comparisons.
+ def foo5(x: Quux with Product) = (x eq "abc") && ("abc" eq x)
+ ^
+any-vs-anyref.scala:11: error: value eq is not a member of Quux with Product{def f: Int}
+Note that the parents of this type (Quux, Product) extend Any, not AnyRef.
+Such types can participate in value classes, but instances
+cannot appear in singleton types or in reference comparisons.
+ def foo6(x: Quux with Product { def f: Int }) = (x eq "abc") && ("abc" eq x)
+ ^
+any-vs-anyref.scala:12: error: type mismatch;
+ found : Quux with Product{def eq(other: String): Boolean}
+ required: AnyRef
+Note that the parents of this type (Quux, Product) extend Any, not AnyRef.
+Such types can participate in value classes, but instances
+cannot appear in singleton types or in reference comparisons.
+ def foo7(x: Quux with Product { def eq(other: String): Boolean }) = (x eq "abc") && ("abc" eq x)
+ ^
+any-vs-anyref.scala:22: error: value eq is not a member of Bippy
+Note that Bippy extends Any, not AnyRef.
+Such types can participate in value classes, but instances
+cannot appear in singleton types or in reference comparisons.
+ def bad1(x: Bippy, y: Bippy) = x eq y
+ ^
+any-vs-anyref.scala:27: error: type mismatch;
+ found : Quux{def g(x: String): String}
+ required: Quux{def g(x: Int): Int}
+ f(new Quux { def g(x: String) = x })
+ ^
+9 errors found
diff --git a/test/files/neg/any-vs-anyref.scala b/test/files/neg/any-vs-anyref.scala
new file mode 100644
index 0000000..8d237fb
--- /dev/null
+++ b/test/files/neg/any-vs-anyref.scala
@@ -0,0 +1,29 @@
+trait Quux extends Any
+trait QuuxRef extends AnyRef
+final class Bippy(val x: Any) extends AnyVal with Quux
+
+object Foo {
+ def foo1[A <: Product](a: A) = { type X = a.type }
+ def foo2[A <: Product with Quux](a: A) = { type X = a.type }
+ def foo3(a: Product) = { type X = a.type }
+ def foo4(a: Product with Quux) = { type X = a.type }
+ def foo5(x: Quux with Product) = (x eq "abc") && ("abc" eq x)
+ def foo6(x: Quux with Product { def f: Int }) = (x eq "abc") && ("abc" eq x)
+ def foo7(x: Quux with Product { def eq(other: String): Boolean }) = (x eq "abc") && ("abc" eq x)
+
+ def ok1[A <: QuuxRef](a: A) = { type X = a.type }
+ def ok2[A <: Product with QuuxRef](a: A) = { type X = a.type }
+ def ok3(a: QuuxRef) = { type X = a.type }
+ def ok4(a: Product with QuuxRef) = { type X = a.type }
+ def ok5(x: QuuxRef with Product) = (x eq "abc") && ("abc" eq x)
+ def ok6(x: QuuxRef with Product { def f: Int }) = (x eq "abc") && ("abc" eq x)
+ def ok7(x: QuuxRef { def eq(other: String): Boolean }) = (x eq "abc") && ("abc" eq x)
+
+ def bad1(x: Bippy, y: Bippy) = x eq y
+}
+
+object Bar {
+ def f(x: Quux { def g(x: Int): Int }): Int = x g 5
+ f(new Quux { def g(x: String) = x })
+ f(new Quux { def g(x: Int) = x })
+}
diff --git a/test/files/neg/anytrait.check b/test/files/neg/anytrait.check
new file mode 100644
index 0000000..fabe74d
--- /dev/null
+++ b/test/files/neg/anytrait.check
@@ -0,0 +1,7 @@
+anytrait.scala:3: error: field definition is not allowed in universal trait extending from class Any
+ var x = 1
+ ^
+anytrait.scala:5: error: this statement is not allowed in universal trait extending from class Any
+ { x += 1 }
+ ^
+two errors found
diff --git a/test/files/neg/anytrait.scala b/test/files/neg/anytrait.scala
new file mode 100644
index 0000000..1501486
--- /dev/null
+++ b/test/files/neg/anytrait.scala
@@ -0,0 +1,10 @@
+trait T extends Any {
+
+ var x = 1
+
+ { x += 1 }
+
+ type T = Int
+
+ val y: T
+}
diff --git a/test/files/neg/anyval-anyref-parent.check b/test/files/neg/anyval-anyref-parent.check
new file mode 100644
index 0000000..fe20e5d
--- /dev/null
+++ b/test/files/neg/anyval-anyref-parent.check
@@ -0,0 +1,23 @@
+anyval-anyref-parent.scala:2: error: only classes (not traits) are allowed to extend AnyVal
+trait Foo2 extends AnyVal // fail
+ ^
+anyval-anyref-parent.scala:5: error: Any does not have a constructor
+class Bar1 extends Any // fail
+ ^
+anyval-anyref-parent.scala:6: error: value class needs to have exactly one public val parameter
+class Bar2(x: Int) extends AnyVal // fail
+ ^
+anyval-anyref-parent.scala:10: error: illegal inheritance; superclass Any
+ is not a subclass of the superclass Object
+ of the mixin trait Immutable
+trait Foo4 extends Any with Immutable // fail
+ ^
+anyval-anyref-parent.scala:11: error: illegal inheritance; superclass AnyVal
+ is not a subclass of the superclass Object
+ of the mixin trait Immutable
+trait Foo5 extends AnyVal with Immutable // fail
+ ^
+anyval-anyref-parent.scala:11: error: only classes (not traits) are allowed to extend AnyVal
+trait Foo5 extends AnyVal with Immutable // fail
+ ^
+6 errors found
diff --git a/test/files/neg/anyval-anyref-parent.scala b/test/files/neg/anyval-anyref-parent.scala
new file mode 100644
index 0000000..f927992
--- /dev/null
+++ b/test/files/neg/anyval-anyref-parent.scala
@@ -0,0 +1,12 @@
+trait Foo1 extends Any
+trait Foo2 extends AnyVal // fail
+trait Foo3 extends AnyRef
+
+class Bar1 extends Any // fail
+class Bar2(x: Int) extends AnyVal // fail
+class Bar3(val x: Int) extends AnyVal // fail
+class Bar4 extends AnyRef
+
+trait Foo4 extends Any with Immutable // fail
+trait Foo5 extends AnyVal with Immutable // fail
+trait Foo6 extends AnyRef with Immutable
diff --git a/test/files/neg/anyval-sealed.check b/test/files/neg/anyval-sealed.check
deleted file mode 100644
index 48a457b..0000000
--- a/test/files/neg/anyval-sealed.check
+++ /dev/null
@@ -1,12 +0,0 @@
-anyval-sealed.scala:2: error: match is not exhaustive!
-missing combination Byte
-missing combination Char
-missing combination Double
-missing combination Float
-missing combination Long
-missing combination Short
-missing combination Unit
-
- def f(x: AnyVal) = x match {
- ^
-one error found
diff --git a/test/files/neg/anyval-sealed.scala b/test/files/neg/anyval-sealed.scala
deleted file mode 100644
index 232a183..0000000
--- a/test/files/neg/anyval-sealed.scala
+++ /dev/null
@@ -1,6 +0,0 @@
-class A {
- def f(x: AnyVal) = x match {
- case _: Boolean => 1
- case _: Int => 2
- }
-}
\ No newline at end of file
diff --git a/test/files/neg/applydynamic_sip.check b/test/files/neg/applydynamic_sip.check
new file mode 100644
index 0000000..b508583
--- /dev/null
+++ b/test/files/neg/applydynamic_sip.check
@@ -0,0 +1,58 @@
+applydynamic_sip.scala:7: error: applyDynamic does not support passing a vararg parameter
+ qual.sel(a, a2: _*)
+ ^
+applydynamic_sip.scala:8: error: applyDynamicNamed does not support passing a vararg parameter
+ qual.sel(arg = a, a2: _*)
+ ^
+applydynamic_sip.scala:9: error: applyDynamicNamed does not support passing a vararg parameter
+ qual.sel(arg, arg2 = "a2", a2: _*)
+ ^
+applydynamic_sip.scala:18: error: type mismatch;
+ found : String("sel")
+ required: Int
+error after rewriting to Test.this.bad1.selectDynamic("sel")
+possible cause: maybe a wrong Dynamic method signature?
+ bad1.sel
+ ^
+applydynamic_sip.scala:19: error: type mismatch;
+ found : String("sel")
+ required: Int
+error after rewriting to Test.this.bad1.applyDynamic("sel")
+possible cause: maybe a wrong Dynamic method signature?
+ bad1.sel(1)
+ ^
+applydynamic_sip.scala:20: error: type mismatch;
+ found : String("sel")
+ required: Int
+error after rewriting to Test.this.bad1.applyDynamicNamed("sel")
+possible cause: maybe a wrong Dynamic method signature?
+ bad1.sel(a = 1)
+ ^
+applydynamic_sip.scala:21: error: type mismatch;
+ found : String("sel")
+ required: Int
+error after rewriting to Test.this.bad1.updateDynamic("sel")
+possible cause: maybe a wrong Dynamic method signature?
+ bad1.sel = 1
+ ^
+applydynamic_sip.scala:29: error: Int does not take parameters
+error after rewriting to Test.this.bad2.selectDynamic("sel")
+possible cause: maybe a wrong Dynamic method signature?
+ bad2.sel
+ ^
+applydynamic_sip.scala:30: error: Int does not take parameters
+error after rewriting to Test.this.bad2.applyDynamic("sel")
+possible cause: maybe a wrong Dynamic method signature?
+ bad2.sel(1)
+ ^
+applydynamic_sip.scala:31: error: Int does not take parameters
+error after rewriting to Test.this.bad2.applyDynamicNamed("sel")
+possible cause: maybe a wrong Dynamic method signature?
+ bad2.sel(a = 1)
+ ^
+applydynamic_sip.scala:32: error: Int does not take parameters
+error after rewriting to Test.this.bad2.updateDynamic("sel")
+possible cause: maybe a wrong Dynamic method signature?
+ bad2.sel = 1
+ ^
+11 errors found
diff --git a/test/files/neg/applydynamic_sip.flags b/test/files/neg/applydynamic_sip.flags
new file mode 100644
index 0000000..1141f97
--- /dev/null
+++ b/test/files/neg/applydynamic_sip.flags
@@ -0,0 +1 @@
+-language:dynamics
diff --git a/test/files/neg/applydynamic_sip.scala b/test/files/neg/applydynamic_sip.scala
new file mode 100644
index 0000000..ee4432e
--- /dev/null
+++ b/test/files/neg/applydynamic_sip.scala
@@ -0,0 +1,33 @@
+object Test extends App {
+ val qual: Dynamic = ???
+ val expr = "expr"
+ val a = "a"
+ val a2 = "a2"
+
+ qual.sel(a, a2: _*)
+ qual.sel(arg = a, a2: _*)
+ qual.sel(arg, arg2 = "a2", a2: _*)
+
+ val bad1 = new Dynamic {
+ def selectDynamic(n: Int) = n
+ def applyDynamic(n: Int) = n
+ def applyDynamicNamed(n: Int) = n
+ def updateDynamic(n: Int) = n
+
+ }
+ bad1.sel
+ bad1.sel(1)
+ bad1.sel(a = 1)
+ bad1.sel = 1
+
+ val bad2 = new Dynamic {
+ def selectDynamic = 1
+ def applyDynamic = 1
+ def applyDynamicNamed = 1
+ def updateDynamic = 1
+ }
+ bad2.sel
+ bad2.sel(1)
+ bad2.sel(a = 1)
+ bad2.sel = 1
+}
diff --git a/test/files/neg/array-not-seq.check b/test/files/neg/array-not-seq.check
index c16ecda..a3a639e 100644
--- a/test/files/neg/array-not-seq.check
+++ b/test/files/neg/array-not-seq.check
@@ -1,7 +1,13 @@
array-not-seq.scala:2: error: An Array will no longer match as Seq[_].
def f1(x: Any) = x.isInstanceOf[Seq[_]]
^
-error: An Array will no longer match as Seq[_].
-error: An Array will no longer match as Seq[_].
-error: An Array will no longer match as Seq[_].
+array-not-seq.scala:4: error: An Array will no longer match as Seq[_].
+ case _: Seq[_] => true
+ ^
+array-not-seq.scala:16: error: An Array will no longer match as Seq[_].
+ case (Some(_: Seq[_]), Nil, _) => 1
+ ^
+array-not-seq.scala:17: error: An Array will no longer match as Seq[_].
+ case (None, List(_: List[_], _), _) => 2
+ ^
four errors found
diff --git a/test/files/neg/bug0418.check b/test/files/neg/bug0418.check
deleted file mode 100644
index 08bd24b..0000000
--- a/test/files/neg/bug0418.check
+++ /dev/null
@@ -1,7 +0,0 @@
-bug0418.scala:2: error: not found: value Foo12340771
- null match { case Foo12340771.Bar(x) => x }
- ^
-bug0418.scala:2: error: not found: value x
- null match { case Foo12340771.Bar(x) => x }
- ^
-two errors found
diff --git a/test/files/neg/bug1010.check b/test/files/neg/bug1010.check
deleted file mode 100644
index 3cf9738..0000000
--- a/test/files/neg/bug1010.check
+++ /dev/null
@@ -1,6 +0,0 @@
-bug1010.scala:14: error: type mismatch;
- found : MailBox#Message
- required: _3.in.Message where val _3: Actor
- unstable.send(msg) // in.Message becomes unstable.Message, but that's ok since Message is a concrete type member
- ^
-one error found
diff --git a/test/files/neg/bug1011.check b/test/files/neg/bug1011.check
deleted file mode 100644
index 7de6776..0000000
--- a/test/files/neg/bug1011.check
+++ /dev/null
@@ -1,4 +0,0 @@
-bug1011.scala:8: error: not found: value entity
- <dl><code>{Text(entity)}</code>
- ^
-one error found
diff --git a/test/files/neg/bug1017.check b/test/files/neg/bug1017.check
deleted file mode 100644
index ad88a10..0000000
--- a/test/files/neg/bug1017.check
+++ /dev/null
@@ -1,4 +0,0 @@
-bug1017.scala:3: error: not found: value foo
-<x><x><x><x><x><x><x><x><x><x><x><x><x><x><x><x><x><x>{ foo }</x></x></x></x></x></x></x></x></x></x></x></x></x></x></x></x></x></x>
- ^
-one error found
diff --git a/test/files/neg/bug1038.check b/test/files/neg/bug1038.check
deleted file mode 100644
index e09cbbc..0000000
--- a/test/files/neg/bug1038.check
+++ /dev/null
@@ -1,5 +0,0 @@
-bug1038.scala:4: error: not enough arguments for constructor X: (x: Int)X.
-Unspecified value parameter x.
- val a = new X
- ^
-one error found
diff --git a/test/files/neg/bug1041.check b/test/files/neg/bug1041.check
deleted file mode 100644
index c366364..0000000
--- a/test/files/neg/bug1041.check
+++ /dev/null
@@ -1,6 +0,0 @@
-bug1041.scala:3: error: type mismatch;
- found : Int(1)
- required: List[Int]
- case 1 => 4
- ^
-one error found
diff --git a/test/files/neg/bug1106.check b/test/files/neg/bug1106.check
deleted file mode 100644
index 3fa595a..0000000
--- a/test/files/neg/bug1106.check
+++ /dev/null
@@ -1,7 +0,0 @@
-bug1106.scala:2: error: expected class or object definition
-val p = new Par[String]
-^
-bug1106.scala:5: error: expected class or object definition
-new Foo[p.type](p) // crashes compiler
-^
-two errors found
diff --git a/test/files/neg/bug1112.check b/test/files/neg/bug1112.check
deleted file mode 100644
index d94dba9..0000000
--- a/test/files/neg/bug1112.check
+++ /dev/null
@@ -1,4 +0,0 @@
-bug1112.scala:12: error: too many arguments for method call: (p: Int)(f: => () => Unit)Unit
- call(0,() => System.out.println("here we are"))
- ^
-one error found
diff --git a/test/files/neg/bug112706A.check b/test/files/neg/bug112706A.check
deleted file mode 100644
index 8fb5bfc..0000000
--- a/test/files/neg/bug112706A.check
+++ /dev/null
@@ -1,6 +0,0 @@
-bug112706A.scala:5: error: constructor cannot be instantiated to expected type;
- found : (T1, T2)
- required: java.lang.String
- case Tuple2(node,_) =>
- ^
-one error found
diff --git a/test/files/neg/bug1181.check b/test/files/neg/bug1181.check
deleted file mode 100644
index a196a70..0000000
--- a/test/files/neg/bug1181.check
+++ /dev/null
@@ -1,4 +0,0 @@
-bug1181.scala:9: error: missing parameter type
- _ => buildMap(map.updated(keyList.head, valueList.head), keyList.tail, valueList.tail)
- ^
-one error found
diff --git a/test/files/neg/bug1183.check b/test/files/neg/bug1183.check
deleted file mode 100644
index ebbf8ac..0000000
--- a/test/files/neg/bug1183.check
+++ /dev/null
@@ -1,17 +0,0 @@
-bug1183.scala:10: error: name clash: class Foo defines object Baz
-and its companion object Foo also defines class Baz
- object Baz
- ^
-bug1183.scala:11: error: name clash: class Foo defines class Bam
-and its companion object Foo also defines object Bam
- class Bam
- ^
-bug1183.scala:12: error: name clash: class Foo defines object Bar
-and its companion object Foo also defines class Bar
- object Bar
- ^
-bug1183.scala:13: error: name clash: class Foo defines class Bar
-and its companion object Foo also defines class Bar
- case class Bar(i:Int)
- ^
-four errors found
diff --git a/test/files/neg/bug1183.scala b/test/files/neg/bug1183.scala
deleted file mode 100644
index ee9385f..0000000
--- a/test/files/neg/bug1183.scala
+++ /dev/null
@@ -1,38 +0,0 @@
-import scala.testing.SUnit._
-
-object Test extends TestConsoleMain {
-
- def suite = new TestSuite(
- new Test717
- )
-
- class Foo(j:Int) {
- object Baz
- class Bam
- object Bar
- case class Bar(i:Int)
- }
-
-
- class Test717 extends TestCase("#717 test path of case classes") {
- val foo1 = new Foo(1)
-
- override def runTest() = {
- val res = (foo1.Bar(2):Any) match {
- case foo1.Bar(2) => true // (1)
- }
- assertTrue("ok", res);
- }
- }
-
- // (2)
- object Foo {
- class Bar(val x : String)
- class Baz
- object Bam
- object Bar
-
- def unapply(s : String) : Option[Bar] = Some(new Bar(s))
- }
-
-}
diff --git a/test/files/neg/bug1224.check b/test/files/neg/bug1224.check
deleted file mode 100644
index 515f823..0000000
--- a/test/files/neg/bug1224.check
+++ /dev/null
@@ -1,4 +0,0 @@
-bug1224.scala:4: error: illegal cyclic reference involving type T
- type T >: C[T] <: C[C[T]]
- ^
-one error found
diff --git a/test/files/neg/bug1241.check b/test/files/neg/bug1241.check
deleted file mode 100644
index 48c86ca..0000000
--- a/test/files/neg/bug1241.check
+++ /dev/null
@@ -1,4 +0,0 @@
-bug1241.scala:5: error: class type required but AnyRef{def hello(): Unit} found
- val x4 = new T { def hello() { println("4") } } // error!
- ^
-one error found
diff --git a/test/files/neg/bug1275.check b/test/files/neg/bug1275.check
deleted file mode 100644
index 6ee8365..0000000
--- a/test/files/neg/bug1275.check
+++ /dev/null
@@ -1,6 +0,0 @@
-bug1275.scala:11: error: type mismatch;
- found : xs.MyType[a]
- required: s
- = xs f
- ^
-one error found
diff --git a/test/files/neg/bug1286.check b/test/files/neg/bug1286.check
deleted file mode 100644
index 734964e..0000000
--- a/test/files/neg/bug1286.check
+++ /dev/null
@@ -1,7 +0,0 @@
-a.scala:1: error: Companions 'object Foo' and 'trait Foo' must be defined in same file
-trait Foo {
- ^
-b.scala:1: error: Companions 'trait Foo' and 'object Foo' must be defined in same file
-object Foo extends Foo {
- ^
-two errors found
diff --git a/test/files/neg/bug1364.check b/test/files/neg/bug1364.check
deleted file mode 100644
index bb5ca90..0000000
--- a/test/files/neg/bug1364.check
+++ /dev/null
@@ -1,5 +0,0 @@
-bug1364.scala:9: error: overriding type T in trait A with bounds >: Nothing <: AnyRef{type S[-U]};
- type T has incompatible type
- type T = { type S[U] = U }
- ^
-one error found
diff --git a/test/files/neg/bug1523.check b/test/files/neg/bug1523.check
deleted file mode 100644
index 96d052f..0000000
--- a/test/files/neg/bug1523.check
+++ /dev/null
@@ -1,4 +0,0 @@
-bug1523.scala:4: error: too many arguments for method bug: (x: Any)Any
- def go() = bug("a", "a", "a", "a", "a", "a", "a", "a", "a", "a", "a", "a", "a", "a", "a", "a", "a", "a", "a", "a", "a", "a", "a", "a", "a", "a")
- ^
-one error found
diff --git a/test/files/neg/bug1623.check b/test/files/neg/bug1623.check
deleted file mode 100644
index cfc2b53..0000000
--- a/test/files/neg/bug1623.check
+++ /dev/null
@@ -1,4 +0,0 @@
-bug1623.scala:11: error: class BImpl cannot be instantiated because it does not conform to its self-type test.BImpl with test.A
- val b = new BImpl
- ^
-one error found
diff --git a/test/files/neg/bug1838.check b/test/files/neg/bug1838.check
deleted file mode 100644
index 0c08bfe..0000000
--- a/test/files/neg/bug1838.check
+++ /dev/null
@@ -1,7 +0,0 @@
-bug1838.scala:6: error: `sealed' modifier can be used only for classes
- sealed val v = 0
- ^
-bug1838.scala:5: error: `sealed' modifier can be used only for classes
- sealed def f = 0
- ^
-two errors found
diff --git a/test/files/neg/bug1845.check b/test/files/neg/bug1845.check
deleted file mode 100644
index 164f3f6..0000000
--- a/test/files/neg/bug1845.check
+++ /dev/null
@@ -1,4 +0,0 @@
-bug1845.scala:9: error: illegal cyclic reference involving value <import>
- val lexical = new StdLexical
- ^
-one error found
diff --git a/test/files/neg/bug1845.scala b/test/files/neg/bug1845.scala
deleted file mode 100644
index ceb43a0..0000000
--- a/test/files/neg/bug1845.scala
+++ /dev/null
@@ -1,10 +0,0 @@
-import scala.util.parsing.combinator.syntactical.TokenParsers
-import scala.util.parsing.combinator.lexical.StdLexical
-import scala.util.parsing.syntax.StdTokens
-
-class MyTokenParsers extends TokenParsers {
- import lexical._
- type Tokens = StdTokens
- type Elem = lexical.Token
- val lexical = new StdLexical
-}
diff --git a/test/files/neg/bug1872.check b/test/files/neg/bug1872.check
deleted file mode 100644
index 9f1af33..0000000
--- a/test/files/neg/bug1872.check
+++ /dev/null
@@ -1,4 +0,0 @@
-bug1872.scala:3: error: isInstanceOf cannot test if value types are references.
- def f(x: Int) = x.isInstanceOf[util.Random]
- ^
-one error found
diff --git a/test/files/neg/bug1878.check b/test/files/neg/bug1878.check
deleted file mode 100644
index f760781..0000000
--- a/test/files/neg/bug1878.check
+++ /dev/null
@@ -1,15 +0,0 @@
-bug1878.scala:3: error: _* may only come last
- val err1 = "" match { case Seq(f @ _*, ',') => f }
- ^
-bug1878.scala:3: error: scrutinee is incompatible with pattern type;
- found : Seq[A]
- required: java.lang.String
- val err1 = "" match { case Seq(f @ _*, ',') => f }
- ^
-bug1878.scala:9: error: _* may only come last
- val List(List(_*, arg2), _) = List(List(1,2,3), List(4,5,6))
- ^
-bug1878.scala:13: error: _* may only come last
- case <p> { _* } </p> =>
- ^
-four errors found
diff --git a/test/files/neg/bug1878.scala b/test/files/neg/bug1878.scala
deleted file mode 100644
index 5278fbb..0000000
--- a/test/files/neg/bug1878.scala
+++ /dev/null
@@ -1,15 +0,0 @@
-object Test extends App {
- // illegal
- val err1 = "" match { case Seq(f @ _*, ',') => f }
-
- // no error
- val List(List(arg1, _*), _) = List(List(1,2,3), List(4,5,6))
-
- // illegal
- val List(List(_*, arg2), _) = List(List(1,2,3), List(4,5,6))
-
- // illegal - bug #1764
- null match {
- case <p> { _* } </p> =>
- }
-}
diff --git a/test/files/neg/bug1909b.check b/test/files/neg/bug1909b.check
deleted file mode 100644
index e5de5fe..0000000
--- a/test/files/neg/bug1909b.check
+++ /dev/null
@@ -1,4 +0,0 @@
-bug1909b.scala:4: error: this can be used only in a class, object, or template
- def bar() = this.z + 5
- ^
-one error found
diff --git a/test/files/neg/bug1960.check b/test/files/neg/bug1960.check
deleted file mode 100644
index 8e19f31..0000000
--- a/test/files/neg/bug1960.check
+++ /dev/null
@@ -1,4 +0,0 @@
-bug1960.scala:5: error: parameter 'p' requires field but conflicts with p in 'TBase'
-class Aclass (p: Int) extends TBase { def g() { f(p) } }
- ^
-one error found
diff --git a/test/files/neg/bug200.check b/test/files/neg/bug200.check
deleted file mode 100644
index d367006..0000000
--- a/test/files/neg/bug200.check
+++ /dev/null
@@ -1,4 +0,0 @@
-bug200.scala:7: error: method foo is defined twice
- def foo: Int;
- ^
-one error found
diff --git a/test/files/neg/bug2102.check b/test/files/neg/bug2102.check
deleted file mode 100644
index 7478fcf..0000000
--- a/test/files/neg/bug2102.check
+++ /dev/null
@@ -1,6 +0,0 @@
-bug2102.scala:2: error: type mismatch;
- found : java.util.Iterator[Int]
- required: scala.collection.Iterator[_]
- val x: Iterator[_] = new java.util.ArrayList[Int]().iterator
- ^
-one error found
diff --git a/test/files/neg/bug2144.check b/test/files/neg/bug2144.check
deleted file mode 100644
index 989bddc..0000000
--- a/test/files/neg/bug2144.check
+++ /dev/null
@@ -1,4 +0,0 @@
-bug2144.scala:2: error: Parameter type in structural refinement may not refer to an abstract type defined outside that refinement
- def foo[A](a: A) = new { def bar(x: A): A = x }
- ^
-one error found
diff --git a/test/files/neg/bug2148.check b/test/files/neg/bug2148.check
deleted file mode 100644
index 22be424..0000000
--- a/test/files/neg/bug2148.check
+++ /dev/null
@@ -1,4 +0,0 @@
-bug2148.scala:9: error: type A is not a stable prefix
- val b = new A with A#A1
- ^
-one error found
diff --git a/test/files/neg/bug2206.check b/test/files/neg/bug2206.check
deleted file mode 100644
index 3deb4d9..0000000
--- a/test/files/neg/bug2206.check
+++ /dev/null
@@ -1,5 +0,0 @@
-bug2206.scala:10: error: value f is not a member of o.A
- Note: implicit method ax is not applicable here because it comes after the application point and it lacks an explicit result type
- a.f()
- ^
-one error found
diff --git a/test/files/neg/bug2213.check b/test/files/neg/bug2213.check
deleted file mode 100644
index b24f7dc..0000000
--- a/test/files/neg/bug2213.check
+++ /dev/null
@@ -1,15 +0,0 @@
-bug2213.scala:9: error: class C needs to be abstract, since:
-value y in class A of type Int is not defined
-value x in class A of type Int is not defined
-method g in class A of type => Int is not defined
-method f in class A of type => Int is not defined
-class C extends A {}
- ^
-bug2213.scala:11: error: object creation impossible, since:
-value y in class A of type Int is not defined
-value x in class A of type Int is not defined
-method g in class A of type => Int is not defined
-method f in class A of type => Int is not defined
-object Q extends A { }
- ^
-two errors found
diff --git a/test/files/neg/bug2275a.check b/test/files/neg/bug2275a.check
deleted file mode 100644
index 892a903..0000000
--- a/test/files/neg/bug2275a.check
+++ /dev/null
@@ -1,13 +0,0 @@
-bug2275a.scala:4: error: in XML literal: in XML content, please use '}}' to express '}'
- }else{
- ^
-bug2275a.scala:3: error: I encountered a '}' where I didn't expect one, maybe this tag isn't closed <br>
- <br>
- ^
-bug2275a.scala:4: error: ';' expected but 'else' found.
- }else{
- ^
-bug2275a.scala:7: error: '}' expected but eof found.
-}
- ^
-four errors found
diff --git a/test/files/neg/bug2275b.check b/test/files/neg/bug2275b.check
deleted file mode 100644
index 0db3eb8..0000000
--- a/test/files/neg/bug2275b.check
+++ /dev/null
@@ -1,10 +0,0 @@
-bug2275b.scala:2: error: in XML literal: in XML content, please use '}}' to express '}'
- {<br>}xx
- ^
-bug2275b.scala:2: error: I encountered a '}' where I didn't expect one, maybe this tag isn't closed <br>
- {<br>}xx
- ^
-bug2275b.scala:3: error: '}' expected but eof found.
-}
- ^
-three errors found
diff --git a/test/files/neg/bug2441.check b/test/files/neg/bug2441.check
deleted file mode 100644
index 2c82e6a..0000000
--- a/test/files/neg/bug2441.check
+++ /dev/null
@@ -1,4 +0,0 @@
-bug2441.scala:12: error: private class Y escapes its defining scope as part of type Some[B.Y]
- override def f = Some(new B.Y)
- ^
-one error found
diff --git a/test/files/neg/bug276.check b/test/files/neg/bug276.check
deleted file mode 100644
index edc4ec5..0000000
--- a/test/files/neg/bug276.check
+++ /dev/null
@@ -1,5 +0,0 @@
-bug276.scala:6: error: overriding type Bar in class Foo, which equals (Int, Int);
- class Bar cannot be used here - classes and objects can only override abstract types
- class Bar
- ^
-one error found
diff --git a/test/files/neg/bug278.check b/test/files/neg/bug278.check
deleted file mode 100644
index b1041b7..0000000
--- a/test/files/neg/bug278.check
+++ /dev/null
@@ -1,10 +0,0 @@
-bug278.scala:5: error: overloaded method value a with alternatives:
- => C.this.A => Unit <and>
- => () => Unit
- does not take type parameters
- a[A]
- ^
-bug278.scala:4: error: method a is defined twice
- def a = (p:A) => ()
- ^
-two errors found
diff --git a/test/files/neg/bug278.scala b/test/files/neg/bug278.scala
deleted file mode 100644
index 16ffe10..0000000
--- a/test/files/neg/bug278.scala
+++ /dev/null
@@ -1,6 +0,0 @@
-class C {
- class A
- def a = () => ()
- def a = (p:A) => ()
- a[A]
-}
diff --git a/test/files/neg/bug284.check b/test/files/neg/bug284.check
deleted file mode 100644
index 821727b..0000000
--- a/test/files/neg/bug284.check
+++ /dev/null
@@ -1,8 +0,0 @@
-bug284.scala:2: warning: Detected apparent refinement of Unit; are you missing an '=' sign?
- def f1(a: T): Unit { }
- ^
-bug284.scala:5: error: Unmatched closing brace '}' ignored here
- }
- ^
-one warning found
-one error found
diff --git a/test/files/neg/bug3209.check b/test/files/neg/bug3209.check
deleted file mode 100644
index fa50f4c..0000000
--- a/test/files/neg/bug3209.check
+++ /dev/null
@@ -1,4 +0,0 @@
-bug3209.scala:2: error: expected start of definition
-package test
-^
-one error found
diff --git a/test/files/neg/bug343.check b/test/files/neg/bug343.check
deleted file mode 100644
index 9b6af98..0000000
--- a/test/files/neg/bug343.check
+++ /dev/null
@@ -1,4 +0,0 @@
-bug343.scala:5: error: private class Foo escapes its defining scope as part of type C.this.Foo
- def get:Foo = new Foo();
- ^
-one error found
diff --git a/test/files/neg/bug3631.check b/test/files/neg/bug3631.check
deleted file mode 100644
index 12d94aa..0000000
--- a/test/files/neg/bug3631.check
+++ /dev/null
@@ -1,4 +0,0 @@
-bug3631.scala:3: error: Implementation restriction: case classes cannot have more than 22 parameters.
-case class X23(x1: Int, x2: Int, x3: Int, x4: Int, x5: Int, x6: Int, x7: Int, x8: Int, x9: Int, x10: Int, x11: Int, x12: Int, x13: Int, x14: Int, x15: Int, x16: Int, x17: Int, x18: Int, x19: Int, x20: Int, x21: Int, x22: Int, x23: Int) { }
- ^
-one error found
diff --git a/test/files/neg/bug3683a.check b/test/files/neg/bug3683a.check
deleted file mode 100644
index a1c5b9c..0000000
--- a/test/files/neg/bug3683a.check
+++ /dev/null
@@ -1,6 +0,0 @@
-bug3683a.scala:14: error: match is not exhaustive!
-missing combination XX
-
- w match {
- ^
-one error found
diff --git a/test/files/neg/bug3683b.check b/test/files/neg/bug3683b.check
deleted file mode 100644
index 2637f62..0000000
--- a/test/files/neg/bug3683b.check
+++ /dev/null
@@ -1,8 +0,0 @@
-bug3683b.scala:15: error: constructor cannot be instantiated to expected type;
- found : X
- required: W[Bar]
-Note: Foo >: Bar (and X <: W[Foo]), but trait W is invariant in type T.
-You may wish to define T as -T instead. (SLS 4.5)
- case X() => 1
- ^
-one error found
diff --git a/test/files/neg/bug3714-neg.check b/test/files/neg/bug3714-neg.check
deleted file mode 100644
index fab6623..0000000
--- a/test/files/neg/bug3714-neg.check
+++ /dev/null
@@ -1,13 +0,0 @@
-bug3714-neg.scala:17: error: value break in class BreakImpl cannot be accessed in BreakImpl
- Access to protected value break not permitted because
- enclosing class object Test is not a subclass of
- class BreakImpl where target is defined
- case b: BreakImpl => b.break
- ^
-bug3714-neg.scala:25: error: value break in class BreakImpl cannot be accessed in BreakImpl
- Access to protected value break not permitted because
- enclosing class object Test is not a subclass of
- class BreakImpl where target is defined
- case b: BreakImpl => b.break
- ^
-two errors found
diff --git a/test/files/neg/bug3714-neg.scala b/test/files/neg/bug3714-neg.scala
deleted file mode 100644
index 77394b5..0000000
--- a/test/files/neg/bug3714-neg.scala
+++ /dev/null
@@ -1,41 +0,0 @@
-// this is a slight negative twist on run/bug3714.scala.
-trait Break {
- protected val break: Int;
-}
-
-class BreakImpl(protected val break: Int) extends Break { }
-object BreakImpl {
- def apply(x: Int): Break = new BreakImpl(x)
- def unapply(x: Any) = x match {
- case x: BreakImpl => Some(x.break)
- case _ => None
- }
-}
-
-object Test {
- def f1(x: Break) = x match {
- case b: BreakImpl => b.break
- case b => -1
- }
- def f2(x: Break) = x match {
- case BreakImpl(x) => x
- case _ => -1
- }
- def f3(x: Any) = x match {
- case b: BreakImpl => b.break
- case b => -1
- }
- def f4(x: Any) = x match {
- case BreakImpl(x) => x
- case _ => -1
- }
-
- def main(args: Array[String]) {
- val break = BreakImpl(22)
- assert(f1(break) == 22)
- assert(f2(break) == 22)
- assert(f3(break) == 22)
- assert(f4(break) == 22)
- }
-}
-
diff --git a/test/files/neg/bug3736.check b/test/files/neg/bug3736.check
deleted file mode 100644
index cc222d1..0000000
--- a/test/files/neg/bug3736.check
+++ /dev/null
@@ -1,16 +0,0 @@
-bug3736.scala:4: error: super not allowed here: use this.isInstanceOf instead
- def f2 = super.isInstanceOf[String]
- ^
-bug3736.scala:5: error: super not allowed here: use this.asInstanceOf instead
- def f3 = super.asInstanceOf[AnyRef]
- ^
-bug3736.scala:6: error: super not allowed here: use this.== instead
- def f4 = super.==(new AnyRef)
- ^
-bug3736.scala:7: error: super not allowed here: use this.!= instead
- def f5 = super.!=(new AnyRef)
- ^
-bug3736.scala:8: error: super not allowed here: use this.## instead
- def f6 = super.##
- ^
-5 errors found
diff --git a/test/files/neg/bug3909.check b/test/files/neg/bug3909.check
deleted file mode 100644
index af79579..0000000
--- a/test/files/neg/bug3909.check
+++ /dev/null
@@ -1,4 +0,0 @@
-bug3909.scala:1: error: in object DO, multiple overloaded alternatives of m1 define default arguments
-object DO {
- ^
-one error found
diff --git a/test/files/neg/bug391.check b/test/files/neg/bug391.check
deleted file mode 100644
index 18f36a5..0000000
--- a/test/files/neg/bug391.check
+++ /dev/null
@@ -1,13 +0,0 @@
-bug391.scala:2: error: identifier expected but 'def' found.
- def fun1(def x: Int): Int = x; // the "def x" is illegal
- ^
-bug391.scala:4: error: ':' expected but '}' found.
-}
-^
-bug391.scala:6: error: identifier expected but 'def' found.
-class E(def x: Int); // the "def x" is illegal
- ^
-bug391.scala:6: error: ':' expected but eof found.
-class E(def x: Int); // the "def x" is illegal
- ^
-four errors found
diff --git a/test/files/neg/bug3913.check b/test/files/neg/bug3913.check
deleted file mode 100644
index e6df31f..0000000
--- a/test/files/neg/bug3913.check
+++ /dev/null
@@ -1,4 +0,0 @@
-bug3913.scala:2: error: super constructor cannot be passed a self reference unless parameter is declared by-name
-object LimboStage extends Stage( Set( LimboStage ))
- ^
-one error found
diff --git a/test/files/neg/bug4069.check b/test/files/neg/bug4069.check
deleted file mode 100644
index d3aa8f3..0000000
--- a/test/files/neg/bug4069.check
+++ /dev/null
@@ -1,16 +0,0 @@
-bug4069.scala:7: error: unexpected end of input: possible missing '}' in XML block
- case 2 =>
- ^
-bug4069.scala:6: error: Missing closing brace `}' assumed here
- </div>
- ^
-bug4069.scala:9: error: in XML literal: in XML content, please use '}}' to express '}'
- }
- ^
-bug4069.scala:4: error: I encountered a '}' where I didn't expect one, maybe this tag isn't closed <div>
- <div>
- ^
-bug4069.scala:10: error: '}' expected but eof found.
-}
-^
-5 errors found
diff --git a/test/files/neg/bug409.check b/test/files/neg/bug409.check
deleted file mode 100644
index 25e5a41..0000000
--- a/test/files/neg/bug409.check
+++ /dev/null
@@ -1,4 +0,0 @@
-bug409.scala:6: error: traits or objects may not have parameters
-class Toto extends Expr with Case1(12);
- ^
-one error found
diff --git a/test/files/neg/bug412.check b/test/files/neg/bug412.check
deleted file mode 100644
index f25ad6f..0000000
--- a/test/files/neg/bug412.check
+++ /dev/null
@@ -1,5 +0,0 @@
-bug412.scala:11: error: stable identifier required, but A.this.c found.
- Note that value c is not stable because its type, A.this.CX with A.this.C2, is volatile.
- def castA(x: c.T): T2 = x;
- ^
-one error found
diff --git a/test/files/neg/bug414.check b/test/files/neg/bug414.check
deleted file mode 100644
index ec23e26..0000000
--- a/test/files/neg/bug414.check
+++ /dev/null
@@ -1,11 +0,0 @@
-bug414.scala:5: error: pattern type is incompatible with expected type;
- found : object Empty
- required: IntMap[a]
- case Empty =>
- ^
-bug414.scala:7: error: type mismatch;
- found : Unit
- required: a
- case _ =>
- ^
-two errors found
diff --git a/test/files/neg/bug4158.check b/test/files/neg/bug4158.check
deleted file mode 100644
index 0d9873d..0000000
--- a/test/files/neg/bug4158.check
+++ /dev/null
@@ -1,19 +0,0 @@
-bug4158.scala:3: error: type mismatch;
- found : Null(null)
- required: Int
-Note that implicit conversions are not applicable because they are ambiguous:
- both method Integer2intNullConflict in class LowPriorityImplicits of type (x: Null)Int
- and method Integer2int in object Predef of type (x: java.lang.Integer)Int
- are possible conversion functions from Null(null) to Int
- var y = null: Int
- ^
-bug4158.scala:2: error: type mismatch;
- found : Null(null)
- required: Int
-Note that implicit conversions are not applicable because they are ambiguous:
- both method Integer2intNullConflict in class LowPriorityImplicits of type (x: Null)Int
- and method Integer2int in object Predef of type (x: java.lang.Integer)Int
- are possible conversion functions from Null(null) to Int
- var x: Int = null
- ^
-two errors found
diff --git a/test/files/neg/bug4166.check b/test/files/neg/bug4166.check
deleted file mode 100644
index 938dad1..0000000
--- a/test/files/neg/bug4166.check
+++ /dev/null
@@ -1,4 +0,0 @@
-bug4166.scala:3: error: super constructor arguments cannot reference unconstructed `this`
-class Demo extends Base(new { Demo.this }) {
- ^
-one error found
diff --git a/test/files/neg/bug4166.scala b/test/files/neg/bug4166.scala
deleted file mode 100644
index c20796c..0000000
--- a/test/files/neg/bug4166.scala
+++ /dev/null
@@ -1,11 +0,0 @@
-class Base(a: Any)
-
-class Demo extends Base(new { Demo.this }) {
- val x: Any = ()
-}
-
-
-class Demo2 extends Base(new { this }) {
- val x: Any = ()
-}
-
diff --git a/test/files/neg/bug4174.check b/test/files/neg/bug4174.check
deleted file mode 100644
index 4881c00..0000000
--- a/test/files/neg/bug4174.check
+++ /dev/null
@@ -1,4 +0,0 @@
-bug4174.scala:7: error: method bar overrides nothing
- foo(new C { override def bar = 1 })
- ^
-one error found
diff --git a/test/files/neg/bug418.check b/test/files/neg/bug418.check
deleted file mode 100644
index e7580be..0000000
--- a/test/files/neg/bug418.check
+++ /dev/null
@@ -1,7 +0,0 @@
-bug418.scala:2: error: not found: value Foo12340771
- null match { case Foo12340771.Bar(x) => x }
- ^
-bug418.scala:2: error: not found: value x
- null match { case Foo12340771.Bar(x) => x }
- ^
-two errors found
diff --git a/test/files/neg/bug4196.check b/test/files/neg/bug4196.check
deleted file mode 100644
index 0905d2e..0000000
--- a/test/files/neg/bug4196.check
+++ /dev/null
@@ -1,4 +0,0 @@
-bug4196.scala:5: error: Some[String] does not take parameters
- }.apply("first param") ("spurious param")
- ^
-one error found
diff --git a/test/files/neg/bug421.check b/test/files/neg/bug421.check
deleted file mode 100644
index 71daba2..0000000
--- a/test/files/neg/bug421.check
+++ /dev/null
@@ -1,4 +0,0 @@
-bug421.scala:5: error: star patterns must correspond with varargs parameters
- case Bar("foo",_*) => error("huh?");
- ^
-one error found
diff --git a/test/files/neg/bug4217.check b/test/files/neg/bug4217.check
deleted file mode 100644
index 4de9201..0000000
--- a/test/files/neg/bug4217.check
+++ /dev/null
@@ -1,4 +0,0 @@
-bug4217.scala:2: error: 'case' expected but '}' found.
- 42 match { }
- ^
-one error found
diff --git a/test/files/neg/bug4221.check b/test/files/neg/bug4221.check
deleted file mode 100644
index 471332e..0000000
--- a/test/files/neg/bug4221.check
+++ /dev/null
@@ -1,6 +0,0 @@
-bug4221.scala:8: error: type mismatch;
- found : Unit
- required: Wrapper[S]
- def wrap[S <: Cl#Sub[S]](v: S): Wrapper[S] = {
- ^
-one error found
diff --git a/test/files/neg/bug4302.check b/test/files/neg/bug4302.check
deleted file mode 100644
index ca33748..0000000
--- a/test/files/neg/bug4302.check
+++ /dev/null
@@ -1,4 +0,0 @@
-bug4302.scala:2: error: abstract type T in type T is unchecked since it is eliminated by erasure
- def hasMatch[T](x: AnyRef) = x.isInstanceOf[T]
- ^
-one error found
diff --git a/test/files/neg/bug4419.check b/test/files/neg/bug4419.check
deleted file mode 100644
index 488be9a..0000000
--- a/test/files/neg/bug4419.check
+++ /dev/null
@@ -1,4 +0,0 @@
-bug4419.scala:2: error: forward reference extends over definition of value b
- { val b = a; val a = 1 }
- ^
-one error found
diff --git a/test/files/neg/bug4419.scala b/test/files/neg/bug4419.scala
deleted file mode 100644
index 38a34be..0000000
--- a/test/files/neg/bug4419.scala
+++ /dev/null
@@ -1,3 +0,0 @@
-class A {
- { val b = a; val a = 1 }
-}
\ No newline at end of file
diff --git a/test/files/neg/bug452.check b/test/files/neg/bug452.check
deleted file mode 100644
index ac23ebc..0000000
--- a/test/files/neg/bug452.check
+++ /dev/null
@@ -1,6 +0,0 @@
-bug452.scala:3: error: type mismatch;
- found : Test.type (with underlying type object Test)
- required: Test.Foo
- def this() = this(this);
- ^
-one error found
diff --git a/test/files/neg/bug4584.check b/test/files/neg/bug4584.check
deleted file mode 100644
index 0008d9e..0000000
--- a/test/files/neg/bug4584.check
+++ /dev/null
@@ -1,4 +0,0 @@
-bug4584.scala:1: error: incomplete unicode escape
-class A { val \u2
- ^
-one error found
diff --git a/test/files/neg/bug464-neg.check b/test/files/neg/bug464-neg.check
deleted file mode 100644
index 7921c5a..0000000
--- a/test/files/neg/bug464-neg.check
+++ /dev/null
@@ -1,16 +0,0 @@
-bug464-neg.scala:7: error: not found: value f1
- f1()
- ^
-bug464-neg.scala:8: error: method f1 in class A cannot be accessed in A with ScalaObject
- super.f1()
- ^
-bug464-neg.scala:9: error: value f2 is not a member of B
- def otherb(b2: B) = b2.f2()
- ^
-bug464-neg.scala:10: error: method f3 in class A cannot be accessed in B
- f3()
- ^
-bug464-neg.scala:11: error: method f3 in class A cannot be accessed in A with ScalaObject
- super.f3()
- ^
-5 errors found
diff --git a/test/files/neg/bug4727.check b/test/files/neg/bug4727.check
deleted file mode 100644
index cac35b1..0000000
--- a/test/files/neg/bug4727.check
+++ /dev/null
@@ -1,11 +0,0 @@
-bug4727.scala:5: error: type mismatch;
- found : Null
- required: Int
-Note that implicit conversions are not applicable because they are ambiguous:
- both method Integer2intNullConflict in class LowPriorityImplicits of type (x: Null)Int
- and method Integer2int in object Predef of type (x: java.lang.Integer)Int
- are possible conversion functions from Null to Int
-Error occurred in an application involving default arguments.
- new C[Int]
- ^
-one error found
diff --git a/test/files/neg/bug473.check b/test/files/neg/bug473.check
deleted file mode 100644
index 7e3c4a4..0000000
--- a/test/files/neg/bug473.check
+++ /dev/null
@@ -1,4 +0,0 @@
-bug473.scala:3: error: super constructor cannot be passed a self reference unless parameter is declared by-name
-case object Voop extends Foo(Voop)
- ^
-one error found
diff --git a/test/files/neg/bug500.check b/test/files/neg/bug500.check
deleted file mode 100644
index ff4443c..0000000
--- a/test/files/neg/bug500.check
+++ /dev/null
@@ -1,4 +0,0 @@
-bug500.scala:3: error: lower bound X does not conform to upper bound Y
- type T >: X <: Y;
- ^
-one error found
diff --git a/test/files/neg/bug501.check b/test/files/neg/bug501.check
deleted file mode 100644
index 112e49a..0000000
--- a/test/files/neg/bug501.check
+++ /dev/null
@@ -1,4 +0,0 @@
-bug501.scala:3: error: lower bound X does not conform to upper bound Y
- abstract class I { type T >: X <: Y; }
- ^
-one error found
diff --git a/test/files/neg/bug510.check b/test/files/neg/bug510.check
deleted file mode 100644
index 14b715e..0000000
--- a/test/files/neg/bug510.check
+++ /dev/null
@@ -1,4 +0,0 @@
-bug510.scala:19: error: cyclic aliasing or subtyping involving type T
- def g(t: e.T): Unit = {
- ^
-one error found
diff --git a/test/files/neg/bug512.check b/test/files/neg/bug512.check
deleted file mode 100644
index f3d0e18..0000000
--- a/test/files/neg/bug512.check
+++ /dev/null
@@ -1,4 +0,0 @@
-bug512.scala:3: error: not found: value something
- val xxx = something ||
- ^
-one error found
diff --git a/test/files/neg/bug515.check b/test/files/neg/bug515.check
deleted file mode 100644
index a38c8b6..0000000
--- a/test/files/neg/bug515.check
+++ /dev/null
@@ -1,6 +0,0 @@
-bug515.scala:7: error: type mismatch;
- found : java.lang.String
- required: Test.Truc
- val parent: Truc = file.getMachin
- ^
-one error found
diff --git a/test/files/neg/bug520.check b/test/files/neg/bug520.check
deleted file mode 100644
index 258f811..0000000
--- a/test/files/neg/bug520.check
+++ /dev/null
@@ -1,4 +0,0 @@
-bug520.scala:8: error: overloaded method verifyKeyword needs result type
- verifyKeyword("", source, pos);
- ^
-one error found
diff --git a/test/files/neg/bug521.check b/test/files/neg/bug521.check
deleted file mode 100644
index d8e9268..0000000
--- a/test/files/neg/bug521.check
+++ /dev/null
@@ -1,15 +0,0 @@
-bug521.scala:10: error: class PlainFile needs to be abstract, since method path in class AbstractFile of type => String is not defined
-class PlainFile(val file : File) extends AbstractFile {}
- ^
-bug521.scala:13: error: overriding value file in class PlainFile of type java.io.File;
- value file needs `override' modifier
-final class ZipArchive(val file : File, archive : ZipFile) extends PlainFile(file) {
- ^
-bug521.scala:13: error: class ZipArchive needs to be abstract, since method path in class AbstractFile of type => String is not defined
-final class ZipArchive(val file : File, archive : ZipFile) extends PlainFile(file) {
- ^
-bug521.scala:15: error: overriding value path in class VirtualFile of type String;
- method path needs to be a stable, immutable value
- override def path = "";
- ^
-four errors found
diff --git a/test/files/neg/bug545.check b/test/files/neg/bug545.check
deleted file mode 100644
index d184f90..0000000
--- a/test/files/neg/bug545.check
+++ /dev/null
@@ -1,7 +0,0 @@
-bug545.scala:4: error: value blah is not a member of Test.Foo
- val x = foo.blah match {
- ^
-bug545.scala:5: error: recursive value x needs type
- case List(x) => x
- ^
-two errors found
diff --git a/test/files/neg/bug550.check b/test/files/neg/bug550.check
deleted file mode 100644
index e1650b7..0000000
--- a/test/files/neg/bug550.check
+++ /dev/null
@@ -1,7 +0,0 @@
-bug550.scala:6: error: type List takes type parameters
- def sum[a](xs: List)(implicit m: Monoid[a]): a =
- ^
-bug550.scala:8: error: could not find implicit value for parameter m: Monoid[a]
- sum(List(1,2,3))
- ^
-two errors found
diff --git a/test/files/neg/bug556.check b/test/files/neg/bug556.check
deleted file mode 100644
index 26aa2f1..0000000
--- a/test/files/neg/bug556.check
+++ /dev/null
@@ -1,4 +0,0 @@
-bug556.scala:3: error: wrong number of parameters; expected = 1
- def g:Int = f((x,y)=>x)
- ^
-one error found
diff --git a/test/files/neg/bug558.check b/test/files/neg/bug558.check
deleted file mode 100644
index 061f64f..0000000
--- a/test/files/neg/bug558.check
+++ /dev/null
@@ -1,4 +0,0 @@
-bug558.scala:13: error: value file is not a member of NewModel.this.RootURL
- final val source = top.file;
- ^
-one error found
diff --git a/test/files/neg/bug562.check b/test/files/neg/bug562.check
deleted file mode 100644
index 638d758..0000000
--- a/test/files/neg/bug562.check
+++ /dev/null
@@ -1,4 +0,0 @@
-bug562.scala:10: error: super may be not be used on value y
- override val y = super.y;
- ^
-one error found
diff --git a/test/files/neg/bug563.check b/test/files/neg/bug563.check
deleted file mode 100644
index c10f504..0000000
--- a/test/files/neg/bug563.check
+++ /dev/null
@@ -1,4 +0,0 @@
-bug563.scala:6: error: missing parameter type
- map(n,ptr => new Cell(ptr.elem));
- ^
-one error found
diff --git a/test/files/neg/bug563.scala b/test/files/neg/bug563.scala
deleted file mode 100644
index 3261491..0000000
--- a/test/files/neg/bug563.scala
+++ /dev/null
@@ -1,7 +0,0 @@
-object Test {
- def map[A,R](a : List[A], f : A => R) : List[R] = a.map(f);
-
- def split(sn : Iterable[List[Cell[Int]]]) : Unit =
- for (n <- sn)
- map(n,ptr => new Cell(ptr.elem));
-}
diff --git a/test/files/neg/bug565.check b/test/files/neg/bug565.check
deleted file mode 100644
index 47a1ebd..0000000
--- a/test/files/neg/bug565.check
+++ /dev/null
@@ -1,5 +0,0 @@
-bug565.scala:2: error: only classes can have declared but undefined members
-(Note that variables need to be initialized to be defined)
- var s0: String
- ^
-one error found
diff --git a/test/files/neg/bug576.check b/test/files/neg/bug576.check
deleted file mode 100644
index b496cca..0000000
--- a/test/files/neg/bug576.check
+++ /dev/null
@@ -1,4 +0,0 @@
-bug576.scala:14: error: overloaded method insert needs result type
- if (true) sibling.insert(node);
- ^
-one error found
diff --git a/test/files/neg/bug585.check b/test/files/neg/bug585.check
deleted file mode 100644
index 4f4c5bf..0000000
--- a/test/files/neg/bug585.check
+++ /dev/null
@@ -1,4 +0,0 @@
-bug585.scala:1: error: unclosed comment
-/*
-^
-one error found
diff --git a/test/files/neg/bug588.check b/test/files/neg/bug588.check
deleted file mode 100644
index 8c01ac5..0000000
--- a/test/files/neg/bug588.check
+++ /dev/null
@@ -1,13 +0,0 @@
-bug588.scala:3: error: double definition:
-method visit:(f: Int => String)Boolean and
-method visit:(f: Int => Unit)Boolean at line 2
-have same type after erasure: (f: Function1)Boolean
- def visit(f: Int => String): Boolean
- ^
-bug588.scala:10: error: double definition:
-method f:(brac: Test.this.TypeB)Unit and
-method f:(node: Test.this.TypeA)Unit at line 9
-have same type after erasure: (brac: Test#TraitA)Unit
- def f(brac : TypeB) : Unit;
- ^
-two errors found
diff --git a/test/files/neg/bug591.check b/test/files/neg/bug591.check
deleted file mode 100644
index 2c2e08d..0000000
--- a/test/files/neg/bug591.check
+++ /dev/null
@@ -1,4 +0,0 @@
-bug591.scala:38: error: method input_= is defined twice
- def input_=(in : Input) = {}
- ^
-one error found
diff --git a/test/files/neg/bug593.check b/test/files/neg/bug593.check
deleted file mode 100644
index f71affe..0000000
--- a/test/files/neg/bug593.check
+++ /dev/null
@@ -1,4 +0,0 @@
-bug593.scala:1: error: traits or objects may not have parameters
-trait Wrapper[T](x : T) {
- ^
-one error found
diff --git a/test/files/neg/bug608.check b/test/files/neg/bug608.check
deleted file mode 100644
index a8e32e4..0000000
--- a/test/files/neg/bug608.check
+++ /dev/null
@@ -1,6 +0,0 @@
-bug608.scala:16: error: type mismatch;
- found : hs{type a = ha}
- required: hs{type s = hs; type a = ha}
- = g(f(x).bimap(id))
- ^
-one error found
diff --git a/test/files/neg/bug630.check b/test/files/neg/bug630.check
deleted file mode 100644
index 739d214..0000000
--- a/test/files/neg/bug630.check
+++ /dev/null
@@ -1,5 +0,0 @@
-bug630.scala:20: error: overriding value foo in trait Bar of type Req2;
- object foo has incompatible type
- object foo extends Req1
- ^
-one error found
diff --git a/test/files/neg/bug631.check b/test/files/neg/bug631.check
deleted file mode 100644
index 0650c70..0000000
--- a/test/files/neg/bug631.check
+++ /dev/null
@@ -1,4 +0,0 @@
-bug631.scala:1: error: `implicit' modifier cannot be used for top-level objects
-implicit object Test {
- ^
-one error found
diff --git a/test/files/neg/bug633.check b/test/files/neg/bug633.check
deleted file mode 100644
index 90f2f42..0000000
--- a/test/files/neg/bug633.check
+++ /dev/null
@@ -1,4 +0,0 @@
-bug633.scala:3: error: not found: type ListBuffer
- def t(a : ListBuffer[String]) = {
- ^
-one error found
diff --git a/test/files/neg/bug639.check b/test/files/neg/bug639.check
deleted file mode 100644
index 30a9351..0000000
--- a/test/files/neg/bug639.check
+++ /dev/null
@@ -1,4 +0,0 @@
-bug639.scala:3: error: not found: object a
-import a._
- ^
-one error found
diff --git a/test/files/neg/bug649.check b/test/files/neg/bug649.check
deleted file mode 100644
index 2a1533d..0000000
--- a/test/files/neg/bug649.check
+++ /dev/null
@@ -1,4 +0,0 @@
-bug649.scala:3: error: overloaded method foo needs result type
- def foo[A] = foo[A]
- ^
-one error found
diff --git a/test/files/neg/bug650.check b/test/files/neg/bug650.check
deleted file mode 100644
index 1120d68..0000000
--- a/test/files/neg/bug650.check
+++ /dev/null
@@ -1,4 +0,0 @@
-bug650.scala:4: error: missing type arguments
-trait Test2 extends LinkedList;
- ^
-one error found
diff --git a/test/files/neg/bug663.check b/test/files/neg/bug663.check
deleted file mode 100644
index 6ae7198..0000000
--- a/test/files/neg/bug663.check
+++ /dev/null
@@ -1,7 +0,0 @@
-bug663.scala:11: error: name clash between defined and inherited member:
-method asMatch:(m: Test.this.Node)Any and
-method asMatch:(node: Test.this.Matchable)Any in trait MatchableImpl
-have same type after erasure: (m: test.Test#NodeImpl)java.lang.Object
- def asMatch(m : Node) : Any = {
- ^
-one error found
diff --git a/test/files/neg/bug664.check b/test/files/neg/bug664.check
deleted file mode 100644
index c29f9b6..0000000
--- a/test/files/neg/bug664.check
+++ /dev/null
@@ -1,7 +0,0 @@
-bug664.scala:4: error: type Foo is not a member of test.Test with ScalaObject
- trait Foo extends super.Foo {
- ^
-bug664.scala:5: error: type Bar is not a member of AnyRef with ScalaObject
- trait Bar extends super.Bar;
- ^
-two errors found
diff --git a/test/files/neg/bug667.check b/test/files/neg/bug667.check
deleted file mode 100644
index 704e317..0000000
--- a/test/files/neg/bug667.check
+++ /dev/null
@@ -1,4 +0,0 @@
-bug667.scala:8: error: class Ni inherits itself
- class Ni extends super.Ni with Ni;
- ^
-one error found
diff --git a/test/files/neg/bug668.check b/test/files/neg/bug668.check
deleted file mode 100644
index 10be0c8..0000000
--- a/test/files/neg/bug668.check
+++ /dev/null
@@ -1,4 +0,0 @@
-bug668.scala:1: error: type Iterable takes type parameters
-class Test extends Iterable
- ^
-one error found
diff --git a/test/files/neg/bug677.check b/test/files/neg/bug677.check
deleted file mode 100644
index 5b0bb27..0000000
--- a/test/files/neg/bug677.check
+++ /dev/null
@@ -1,6 +0,0 @@
-bug677.scala:2: error: type mismatch;
- found : () => Int
- required: Nothing
- val zx: Nothing = {() => 4}
- ^
-one error found
diff --git a/test/files/neg/bug691.check b/test/files/neg/bug691.check
deleted file mode 100644
index 47ac8c3..0000000
--- a/test/files/neg/bug691.check
+++ /dev/null
@@ -1,4 +0,0 @@
-bug691.scala:27: error: ambiguous parent class qualifier
- trait TiC extends super[Arrow].Ti2 with super[AssignArrow].Ti1;
- ^
-one error found
diff --git a/test/files/neg/bug692.check b/test/files/neg/bug692.check
deleted file mode 100644
index 9e96027..0000000
--- a/test/files/neg/bug692.check
+++ /dev/null
@@ -1,24 +0,0 @@
-bug692.scala:3: error: not found: type T
- trait Type[T0] extends Type0[T];
- ^
-bug692.scala:10: error: class Foo takes type parameters
- case class FooType() extends ClassType[Foo,AnyRef](ObjectType());
- ^
-bug692.scala:13: error: class Foo takes type parameters
- case class BarType[T3 <: Foo](tpeT : RefType[T3]) extends ClassType[Bar[T3],Foo](FooType);
- ^
-bug692.scala:13: error: class Foo takes type parameters
- case class BarType[T3 <: Foo](tpeT : RefType[T3]) extends ClassType[Bar[T3],Foo](FooType);
- ^
-bug692.scala:14: error: class Foo takes type parameters
- implicit def typeOfBar[T4 <: Foo](implicit elem : RefType[T4]) : RefType[Bar[T4]] =
- ^
-bug692.scala:15: error: type mismatch;
- found : test3.this.BarType[T4]
- required: test3.this.RefType[test3.this.Bar[T4]]
- BarType(elem);
- ^
-bug692.scala:19: error: class Foo takes type parameters
- class Bar[A <: Foo](implicit tpeA : Type[A]) extends Foo;
- ^
-7 errors found
diff --git a/test/files/neg/bug693.check b/test/files/neg/bug693.check
deleted file mode 100644
index d341d02..0000000
--- a/test/files/neg/bug693.check
+++ /dev/null
@@ -1,4 +0,0 @@
-bug693.scala:4: error: x is already defined as value x
- val x : Int = 10;
- ^
-one error found
diff --git a/test/files/neg/bug696.check b/test/files/neg/bug696.check
deleted file mode 100644
index 1ca740c..0000000
--- a/test/files/neg/bug696.check
+++ /dev/null
@@ -1,5 +0,0 @@
-bug696.scala:4: error: diverging implicit expansion for type TypeUtil0.Type[Any]
-starting with method WithType in object TypeUtil0
- as[Any](null);
- ^
-one error found
diff --git a/test/files/neg/bug700.check b/test/files/neg/bug700.check
deleted file mode 100644
index 5c28540..0000000
--- a/test/files/neg/bug700.check
+++ /dev/null
@@ -1,4 +0,0 @@
-bug700.scala:6: error: method foobar in trait Foo is accessed from super. It may not be abstract unless it is overridden by a member declared `abstract' and `override'
- def foobar: Unit = super.foobar
- ^
-one error found
diff --git a/test/files/neg/bug708.check b/test/files/neg/bug708.check
deleted file mode 100644
index cfeb01c..0000000
--- a/test/files/neg/bug708.check
+++ /dev/null
@@ -1,5 +0,0 @@
-bug708.scala:8: error: overriding type S in trait X with bounds >: Nothing <: A.this.T;
- type S has incompatible type
- override private[A] type S = Any;
- ^
-one error found
diff --git a/test/files/neg/bug712.check b/test/files/neg/bug712.check
deleted file mode 100644
index 532eb4a..0000000
--- a/test/files/neg/bug712.check
+++ /dev/null
@@ -1,4 +0,0 @@
-bug712.scala:10: error: value self is not a member of B.this.ParentImpl
- implicit def coerce(p : ParentImpl) = p.self;
- ^
-one error found
diff --git a/test/files/neg/bug715.check b/test/files/neg/bug715.check
deleted file mode 100644
index 8ea1ddb..0000000
--- a/test/files/neg/bug715.check
+++ /dev/null
@@ -1,4 +0,0 @@
-bug715.scala:12: error: method chilren in trait NodeImpl is accessed from super. It may not be abstract unless it is overridden by a member declared `abstract' and `override'
- override def children = super.chilren;
- ^
-one error found
diff --git a/test/files/neg/bug729.check b/test/files/neg/bug729.check
deleted file mode 100644
index 0b38b9b..0000000
--- a/test/files/neg/bug729.check
+++ /dev/null
@@ -1,6 +0,0 @@
-bug729.scala:20: error: type mismatch;
- found : ScalaParserAutoEdit.this.NodeImpl(in trait Parser)
- required: ScalaParserAutoEdit.this.NodeImpl(in trait ScalaParserAutoEdit)
- val yyy : NodeImpl = link.from;
- ^
-one error found
diff --git a/test/files/neg/bug752.check b/test/files/neg/bug752.check
deleted file mode 100644
index 9262f38..0000000
--- a/test/files/neg/bug752.check
+++ /dev/null
@@ -1,6 +0,0 @@
-bug752.scala:6: error: type mismatch;
- found : String => Unit
- required: Int => Unit
- f(g _)
- ^
-one error found
diff --git a/test/files/neg/bug765.check b/test/files/neg/bug765.check
deleted file mode 100644
index 02bd111..0000000
--- a/test/files/neg/bug765.check
+++ /dev/null
@@ -1,4 +0,0 @@
-bug765.scala:3: error: not found: type Bar123
- val bar = new Bar123
- ^
-one error found
diff --git a/test/files/neg/bug766.check b/test/files/neg/bug766.check
deleted file mode 100644
index d259db6..0000000
--- a/test/files/neg/bug766.check
+++ /dev/null
@@ -1,4 +0,0 @@
-bug766.scala:5: error: not found: value badIdentifier
- val p = badIdentifier
- ^
-one error found
diff --git a/test/files/neg/bug779.check b/test/files/neg/bug779.check
deleted file mode 100644
index 9419519..0000000
--- a/test/files/neg/bug779.check
+++ /dev/null
@@ -1,4 +0,0 @@
-bug779.scala:6: error: method ast has return statement; needs result type
- override def ast = return null
- ^
-one error found
diff --git a/test/files/neg/bug783.check b/test/files/neg/bug783.check
deleted file mode 100644
index bf82edc..0000000
--- a/test/files/neg/bug783.check
+++ /dev/null
@@ -1,6 +0,0 @@
-bug783.scala:12: error: type mismatch;
- found : Contexts.this.Global#Template
- required: Contexts.this.global.Template
- globalInit0.Template(10, 20);
- ^
-one error found
diff --git a/test/files/neg/bug798.check b/test/files/neg/bug798.check
deleted file mode 100644
index 5859e1e..0000000
--- a/test/files/neg/bug798.check
+++ /dev/null
@@ -1,4 +0,0 @@
-bug798.scala:2: error: cyclic aliasing or subtyping involving type Bracks
-trait Test[Bracks <: Bracks] {
- ^
-one error found
diff --git a/test/files/neg/bug800.check b/test/files/neg/bug800.check
deleted file mode 100644
index 00ca020..0000000
--- a/test/files/neg/bug800.check
+++ /dev/null
@@ -1,13 +0,0 @@
-bug800.scala:4: error: qualification is already defined as value qualification
- val qualification = false;
- ^
-bug800.scala:8: error: method qualification is defined twice
- val qualification = false;
- ^
-bug800.scala:12: error: value qualification is defined twice
- var qualification = false;
- ^
-bug800.scala:16: error: method qualification is defined twice
- var qualification = false;
- ^
-four errors found
diff --git a/test/files/neg/bug835.check b/test/files/neg/bug835.check
deleted file mode 100644
index 79ea97b..0000000
--- a/test/files/neg/bug835.check
+++ /dev/null
@@ -1,9 +0,0 @@
-bug835.scala:2: error: no `: _*' annotation allowed here
-(such annotations are only allowed in arguments to *-parameters)
- Console.println(List(List(1, 2, 3) : _*, List(4, 5, 6) : _*))
- ^
-bug835.scala:2: error: no `: _*' annotation allowed here
-(such annotations are only allowed in arguments to *-parameters)
- Console.println(List(List(1, 2, 3) : _*, List(4, 5, 6) : _*))
- ^
-two errors found
diff --git a/test/files/neg/bug836.check b/test/files/neg/bug836.check
deleted file mode 100644
index 4949e2c..0000000
--- a/test/files/neg/bug836.check
+++ /dev/null
@@ -1,6 +0,0 @@
-bug836.scala:9: error: type mismatch;
- found : Any
- required: A.this.S
- val some: S = any // compiles => type X is set to scala.Any
- ^
-one error found
diff --git a/test/files/neg/bug845.check b/test/files/neg/bug845.check
deleted file mode 100644
index a5b92e2..0000000
--- a/test/files/neg/bug845.check
+++ /dev/null
@@ -1,4 +0,0 @@
-bug845.scala:4: error: only classes can have declared but undefined members
- type Bar;
- ^
-one error found
diff --git a/test/files/neg/bug846.check b/test/files/neg/bug846.check
deleted file mode 100644
index 9a17238..0000000
--- a/test/files/neg/bug846.check
+++ /dev/null
@@ -1,6 +0,0 @@
-bug846.scala:9: error: type mismatch;
- found : Null(null)
- required: B
- if (a != null) f(a) else null
- ^
-one error found
diff --git a/test/files/neg/bug856.check b/test/files/neg/bug856.check
deleted file mode 100644
index 168855d..0000000
--- a/test/files/neg/bug856.check
+++ /dev/null
@@ -1,6 +0,0 @@
-bug856.scala:3: error: class ComplexRect needs to be abstract, since:
-method _2 in trait Product2 of type => Double is not defined
-method canEqual in trait Equals of type (that: Any)Boolean is not defined
-class ComplexRect(val _1:Double, _2:Double) extends Complex {
- ^
-one error found
diff --git a/test/files/neg/bug875.check b/test/files/neg/bug875.check
deleted file mode 100644
index 16a9822..0000000
--- a/test/files/neg/bug875.check
+++ /dev/null
@@ -1,17 +0,0 @@
-bug875.scala:3: error: no `: _*' annotation allowed here
-(such annotations are only allowed in arguments to *-parameters)
- val ys = List(1, 2, 3, xs: _*)
- ^
-bug875.scala:6: error: no `: _*' annotation allowed here
-(such annotations are only allowed in arguments to *-parameters)
- mkList1(xs: _*)
- ^
-bug875.scala:15: error: no `: _*' annotation allowed here
-(such annotations are only allowed in arguments to *-parameters)
- f(true, 1, xs: _*)
- ^
-bug875.scala:16: error: no `: _*' annotation allowed here
-(such annotations are only allowed in arguments to *-parameters)
- g(1, xs:_*)
- ^
-four errors found
diff --git a/test/files/neg/bug876.check b/test/files/neg/bug876.check
deleted file mode 100644
index 8552242..0000000
--- a/test/files/neg/bug876.check
+++ /dev/null
@@ -1,4 +0,0 @@
-bug876.scala:25: error: too many arguments for method apply: (key: AssertionError.A)manager.B in trait MapLike
- assert(manager.map(A2) == List(manager.map(A2, A1)))
- ^
-one error found
diff --git a/test/files/neg/bug877.check b/test/files/neg/bug877.check
deleted file mode 100644
index ddd1154..0000000
--- a/test/files/neg/bug877.check
+++ /dev/null
@@ -1,7 +0,0 @@
-bug877.scala:3: error: Invalid literal number
-trait Foo extends A(22A, Bug!) {}
- ^
-bug877.scala:3: error: parents of traits may not have parameters
-trait Foo extends A(22A, Bug!) {}
- ^
-two errors found
diff --git a/test/files/neg/bug882.check b/test/files/neg/bug882.check
deleted file mode 100644
index 4e3e6d0..0000000
--- a/test/files/neg/bug882.check
+++ /dev/null
@@ -1,4 +0,0 @@
-bug882.scala:2: error: traits cannot have type parameters with context bounds `: ...' nor view bounds `<% ...'
-trait SortedSet[A <% Ordered[A]] {
- ^
-one error found
diff --git a/test/files/neg/bug900.check b/test/files/neg/bug900.check
deleted file mode 100644
index 95f7155..0000000
--- a/test/files/neg/bug900.check
+++ /dev/null
@@ -1,10 +0,0 @@
-bug900.scala:4: error: type mismatch;
- found : Foo.this.x.type (with underlying type Foo.this.bar)
- required: AnyRef
-Note that implicit conversions are not applicable because they are ambiguous:
- both method any2Ensuring in object Predef of type [A](x: A)Ensuring[A]
- and method any2ArrowAssoc in object Predef of type [A](x: A)ArrowAssoc[A]
- are possible conversion functions from Foo.this.x.type to AnyRef
- def break(): x.type
- ^
-one error found
diff --git a/test/files/neg/bug908.check b/test/files/neg/bug908.check
deleted file mode 100644
index c73f1d7..0000000
--- a/test/files/neg/bug908.check
+++ /dev/null
@@ -1,4 +0,0 @@
-bug908.scala:8: error: not found: value makeA
- this(makeA)
- ^
-one error found
diff --git a/test/files/neg/bug909.check b/test/files/neg/bug909.check
deleted file mode 100644
index 6dbe8aa..0000000
--- a/test/files/neg/bug909.check
+++ /dev/null
@@ -1,6 +0,0 @@
-bug909.scala:6: error: type mismatch;
- found : java.lang.String("Hello")
- required: Int
- case Foo("Hello") =>
- ^
-one error found
diff --git a/test/files/neg/bug910.check b/test/files/neg/bug910.check
deleted file mode 100644
index 1a845db..0000000
--- a/test/files/neg/bug910.check
+++ /dev/null
@@ -1,6 +0,0 @@
-bug910.scala:4: error: type mismatch;
- found : Seq[Char]
- required: Seq[Int]
- val y: Seq[Int] = rest
- ^
-one error found
diff --git a/test/files/neg/bug935.check b/test/files/neg/bug935.check
deleted file mode 100644
index 6a4129e..0000000
--- a/test/files/neg/bug935.check
+++ /dev/null
@@ -1,7 +0,0 @@
-bug935.scala:7: error: type arguments [Test3.B] do not conform to class E's type parameter bounds [T <: String]
- @E[B](new B) val b = "hi"
- ^
-bug935.scala:13: error: type arguments [Test4.B] do not conform to class E's type parameter bounds [T <: String]
- val b: String @E[B](new B) = "hi"
- ^
-two errors found
diff --git a/test/files/neg/bug944.check b/test/files/neg/bug944.check
deleted file mode 100644
index d45b968..0000000
--- a/test/files/neg/bug944.check
+++ /dev/null
@@ -1,4 +0,0 @@
-bug944.scala:5: error: implementation restricts functions to 22 parameters
- a23:Int) => 1
- ^
-one error found
diff --git a/test/files/neg/bug960.check b/test/files/neg/bug960.check
deleted file mode 100644
index ebfc3c6..0000000
--- a/test/files/neg/bug960.check
+++ /dev/null
@@ -1,10 +0,0 @@
-bug960.scala:18: error: ambiguous reference to overloaded definition,
-both method unapply in object List of type [a](xs: List[a])Option[Null]
-and method unapply in object List of type [a](xs: List[a])Option[(a, List[a])]
-match argument types (List[a])
- case List(x, xs) => 7
- ^
-bug960.scala:12: error: method unapply is defined twice
- def unapply[a](xs: List[a]): Option[Null] = xs match {
- ^
-two errors found
diff --git a/test/files/neg/bug960.scala b/test/files/neg/bug960.scala
deleted file mode 100644
index 5101cf8..0000000
--- a/test/files/neg/bug960.scala
+++ /dev/null
@@ -1,20 +0,0 @@
-sealed abstract class List[+a]
-private case object Nil extends List[Nothing]
-private final case class Cons[+a](head: a, tail: List[a])
-extends List[a]
-
-object List {
- def unapply[a](xs: List[a]): Option[(a, List[a])] = xs match {
- case Nil => None
- case Cons(x, xs) => Some(x, xs)
- }
-
- def unapply[a](xs: List[a]): Option[Null] = xs match {
- case Nil => Some(null)
- case Cons(_, _) => None
- }
-
- def foo[a](xs: List[a]) = xs match {
- case List(x, xs) => 7
- }
-}
diff --git a/test/files/neg/bug961.check b/test/files/neg/bug961.check
deleted file mode 100644
index 0231a3e..0000000
--- a/test/files/neg/bug961.check
+++ /dev/null
@@ -1,4 +0,0 @@
-bug961.scala:11: error: object Temp.B does not take parameters
- B() match {
- ^
-one error found
diff --git a/test/files/neg/bug987.check b/test/files/neg/bug987.check
deleted file mode 100644
index 4cad622..0000000
--- a/test/files/neg/bug987.check
+++ /dev/null
@@ -1,19 +0,0 @@
-bug987.scala:15: error: illegal inheritance;
- class E inherits different type instances of trait B:
-B[D] and B[C]
-class E extends D
- ^
-bug987.scala:20: error: illegal inheritance;
- class F inherits different type instances of trait B:
-B[D] and B[C]
-class F extends D
- ^
-bug987.scala:25: error: illegal inheritance;
- class D inherits different type instances of trait B:
-B[D] and B[C]
-abstract class D extends C with B[D] {}
- ^
-bug987.scala:25: error: type arguments [D] do not conform to trait B's type parameter bounds [T <: B[T]]
-abstract class D extends C with B[D] {}
- ^
-four errors found
diff --git a/test/files/neg/bug997.check b/test/files/neg/bug997.check
deleted file mode 100644
index 44e3832..0000000
--- a/test/files/neg/bug997.check
+++ /dev/null
@@ -1,13 +0,0 @@
-bug997.scala:7: error: wrong number of arguments for object Foo
-"x" match { case Foo(a) => Console.println(a) }
- ^
-bug997.scala:7: error: not found: value a
-"x" match { case Foo(a) => Console.println(a) }
- ^
-bug997.scala:13: error: wrong number of arguments for object Foo
-"x" match { case Foo(a, b, c) => Console.println((a,b,c)) }
- ^
-bug997.scala:13: error: not found: value a
-"x" match { case Foo(a, b, c) => Console.println((a,b,c)) }
- ^
-four errors found
diff --git a/test/files/neg/bug997.scala b/test/files/neg/bug997.scala
deleted file mode 100644
index 42b4617..0000000
--- a/test/files/neg/bug997.scala
+++ /dev/null
@@ -1,15 +0,0 @@
-// An extractor with 2 results
-object Foo { def unapply(x : String) = Some(Pair(x, x)) }
-
-object Test extends App {
-
-// Prints 'x'; ought not to compile (or maybe a should be the Pair?).
-"x" match { case Foo(a) => Console.println(a) }
-
-// Prints '(x,x)' as expected.
-"x" match { case Foo(a, b) => Console.println((a,b)) }
-
-// Gives confusing error 'not found: value c'.
-"x" match { case Foo(a, b, c) => Console.println((a,b,c)) }
-
-}
diff --git a/test/files/neg/case-collision.check b/test/files/neg/case-collision.check
new file mode 100644
index 0000000..4edc6f1
--- /dev/null
+++ b/test/files/neg/case-collision.check
@@ -0,0 +1,10 @@
+case-collision.scala:5: error: Class foo.BIPPY differs only in case from foo.Bippy. Such classes will overwrite one another on case-insensitive filesystems.
+class BIPPY
+ ^
+case-collision.scala:11: error: Class foo.HyRaX$ differs only in case from foo.Hyrax$. Such classes will overwrite one another on case-insensitive filesystems.
+object HyRaX
+ ^
+case-collision.scala:8: error: Class foo.DINGO$ differs only in case from foo.Dingo$. Such classes will overwrite one another on case-insensitive filesystems.
+object DINGO
+ ^
+three errors found
diff --git a/test/files/neg/anyval-sealed.flags b/test/files/neg/case-collision.flags
similarity index 100%
copy from test/files/neg/anyval-sealed.flags
copy to test/files/neg/case-collision.flags
diff --git a/test/files/neg/case-collision.scala b/test/files/neg/case-collision.scala
new file mode 100644
index 0000000..241169a
--- /dev/null
+++ b/test/files/neg/case-collision.scala
@@ -0,0 +1,11 @@
+package foo
+
+class Bippy
+
+class BIPPY
+
+object Dingo
+object DINGO
+
+case class Hyrax()
+object HyRaX
diff --git a/test/files/neg/caseinherit.check b/test/files/neg/caseinherit.check
index 5630da2..09327a4 100644
--- a/test/files/neg/caseinherit.check
+++ b/test/files/neg/caseinherit.check
@@ -1,10 +1,10 @@
-caseinherit.scala:2: error: case class `class B' has case ancestor `class A'. Case-to-case inheritance has potentially dangerous bugs which are unlikely to be fixed. You are strongly encouraged to instead use extractors to pattern match on non-leaf nodes.
-case class B(y: Int) extends A(y)
- ^
-caseinherit.scala:3: error: case class `object Bippy' has case ancestor `class A'. Case-to-case inheritance has potentially dangerous bugs which are unlikely to be fixed. You are strongly encouraged to instead use extractors to pattern match on non-leaf nodes.
-case object Bippy extends A(55)
- ^
-caseinherit.scala:6: error: case class `class Dingus' has case ancestor `class A'. Case-to-case inheritance has potentially dangerous bugs which are unlikely to be fixed. You are strongly encouraged to instead use extractors to pattern match on non-leaf nodes.
-case class Dingus(y: Int) extends Innocent
- ^
+caseinherit.scala:3: error: case class B has case ancestor foo.A, but case-to-case inheritance is prohibited. To overcome this limitation, use extractors to pattern match on non-leaf nodes.
+ case class B(y: Int) extends A(y)
+ ^
+caseinherit.scala:4: error: case object Bippy has case ancestor foo.A, but case-to-case inheritance is prohibited. To overcome this limitation, use extractors to pattern match on non-leaf nodes.
+ case object Bippy extends A(55)
+ ^
+caseinherit.scala:11: error: case class Dingus has case ancestor foo.A, but case-to-case inheritance is prohibited. To overcome this limitation, use extractors to pattern match on non-leaf nodes.
+ case class Dingus(y: Int) extends Innocent
+ ^
three errors found
diff --git a/test/files/neg/caseinherit.scala b/test/files/neg/caseinherit.scala
index fdac97c..5c8da13 100644
--- a/test/files/neg/caseinherit.scala
+++ b/test/files/neg/caseinherit.scala
@@ -1,6 +1,13 @@
-case class A(x: Int)
-case class B(y: Int) extends A(y)
-case object Bippy extends A(55)
+package foo {
+ case class A(x: Int)
+ case class B(y: Int) extends A(y)
+ case object Bippy extends A(55)
+}
+import foo._
-class Innocent extends A(5)
-case class Dingus(y: Int) extends Innocent
\ No newline at end of file
+package bar {
+ class Blameless(x: Int)
+ class Innocent extends A(5)
+ case class Dingus(y: Int) extends Innocent
+ case object Hungle extends Blameless(5)
+}
\ No newline at end of file
diff --git a/test/files/neg/catch-all.check b/test/files/neg/catch-all.check
new file mode 100644
index 0000000..62f895c
--- /dev/null
+++ b/test/files/neg/catch-all.check
@@ -0,0 +1,10 @@
+catch-all.scala:2: error: This catches all Throwables. If this is really intended, use `case _ : Throwable` to clear this warning.
+ try { "warn" } catch { case _ => }
+ ^
+catch-all.scala:4: error: This catches all Throwables. If this is really intended, use `case x : Throwable` to clear this warning.
+ try { "warn" } catch { case x => }
+ ^
+catch-all.scala:6: error: This catches all Throwables. If this is really intended, use `case x : Throwable` to clear this warning.
+ try { "warn" } catch { case _: RuntimeException => ; case x => }
+ ^
+three errors found
diff --git a/test/files/neg/bug3683a.flags b/test/files/neg/catch-all.flags
similarity index 100%
rename from test/files/neg/bug3683a.flags
rename to test/files/neg/catch-all.flags
diff --git a/test/files/neg/catch-all.scala b/test/files/neg/catch-all.scala
new file mode 100644
index 0000000..c05be77
--- /dev/null
+++ b/test/files/neg/catch-all.scala
@@ -0,0 +1,31 @@
+object CatchAll {
+ try { "warn" } catch { case _ => }
+
+ try { "warn" } catch { case x => }
+
+ try { "warn" } catch { case _: RuntimeException => ; case x => }
+
+ val t = T
+
+ try { "okay" } catch { case T => }
+
+ try { "okay" } catch { case `t` => }
+
+ try { "okay" } catch { case x @ T => }
+
+ try { "okay" } catch { case x @ `t` => }
+
+ try { "okay" } catch { case _: Throwable => }
+
+ try { "okay" } catch { case _: Exception => }
+
+ try { "okay" } catch { case okay: Throwable => }
+
+ try { "okay" } catch { case okay: Exception => }
+
+ try { "okay" } catch { case _ if "".isEmpty => }
+
+ "okay" match { case _ => "" }
+}
+
+object T extends Throwable
diff --git a/test/files/neg/checksensible.check b/test/files/neg/checksensible.check
index 9977739..d785179 100644
--- a/test/files/neg/checksensible.check
+++ b/test/files/neg/checksensible.check
@@ -25,67 +25,76 @@ checksensible.scala:26: error: comparing values of types Unit and Int using `=='
checksensible.scala:27: error: comparing values of types Int and Unit using `==' will always yield false
0 == (c = 1)
^
-checksensible.scala:29: error: comparing values of types Int and java.lang.String using `==' will always yield false
+checksensible.scala:29: error: comparing values of types Int and String using `==' will always yield false
1 == "abc"
^
-checksensible.scala:34: error: comparing a fresh object using `==' will always yield false
+checksensible.scala:33: error: comparing values of types Some[Int] and Int using `==' will always yield false
+ Some(1) == 1 // as above
+ ^
+checksensible.scala:38: error: comparing a fresh object using `==' will always yield false
new AnyRef == 1
^
-checksensible.scala:37: error: comparing values of types Int and java.lang.Boolean using `==' will always yield false
+checksensible.scala:41: error: comparing values of types Int and Boolean using `==' will always yield false
1 == (new java.lang.Boolean(true))
^
-checksensible.scala:39: error: comparing values of types Int and Boolean using `!=' will always yield true
+checksensible.scala:43: error: comparing values of types Int and Boolean using `!=' will always yield true
1 != true
^
-checksensible.scala:40: error: comparing values of types Unit and Boolean using `==' will always yield false
+checksensible.scala:44: error: comparing values of types Unit and Boolean using `==' will always yield false
() == true
^
-checksensible.scala:41: error: comparing values of types Unit and Unit using `==' will always yield true
+checksensible.scala:45: error: comparing values of types Unit and Unit using `==' will always yield true
() == ()
^
-checksensible.scala:42: error: comparing values of types Unit and Unit using `==' will always yield true
+checksensible.scala:46: error: comparing values of types Unit and Unit using `==' will always yield true
() == println
^
-checksensible.scala:44: error: comparing values of types Int and Unit using `!=' will always yield true
+checksensible.scala:47: error: comparing values of types Unit and scala.runtime.BoxedUnit using `==' will always yield true
+ () == scala.runtime.BoxedUnit.UNIT // these should warn for always being true/false
+ ^
+checksensible.scala:48: error: comparing values of types scala.runtime.BoxedUnit and Unit using `!=' will always yield false
+ scala.runtime.BoxedUnit.UNIT != ()
+ ^
+checksensible.scala:51: error: comparing values of types Int and Unit using `!=' will always yield true
(1 != println)
^
-checksensible.scala:45: error: comparing values of types Int and Symbol using `!=' will always yield true
+checksensible.scala:52: error: comparing values of types Int and Symbol using `!=' will always yield true
(1 != 'sym)
^
-checksensible.scala:51: error: comparing a fresh object using `==' will always yield false
+checksensible.scala:58: error: comparing a fresh object using `==' will always yield false
((x: Int) => x + 1) == null
^
-checksensible.scala:52: error: comparing a fresh object using `==' will always yield false
+checksensible.scala:59: error: comparing a fresh object using `==' will always yield false
Bep == ((_: Int) + 1)
^
-checksensible.scala:54: error: comparing a fresh object using `==' will always yield false
+checksensible.scala:61: error: comparing a fresh object using `==' will always yield false
new Object == new Object
^
-checksensible.scala:55: error: comparing a fresh object using `==' will always yield false
+checksensible.scala:62: error: comparing a fresh object using `==' will always yield false
new Object == "abc"
^
-checksensible.scala:56: error: comparing a fresh object using `!=' will always yield true
+checksensible.scala:63: error: comparing a fresh object using `!=' will always yield true
new Exception() != new Exception()
^
-checksensible.scala:59: error: comparing values of types Int and Null using `==' will always yield false
+checksensible.scala:66: error: comparing values of types Int and Null using `==' will always yield false
if (foo.length == null) "plante" else "plante pas"
^
-checksensible.scala:64: error: comparing values of types Bip and Bop using `==' will always yield false
+checksensible.scala:71: error: comparing values of types Bip and Bop using `==' will always yield false
(x1 == x2)
^
-checksensible.scala:74: error: comparing values of types EqEqRefTest.this.C3 and EqEqRefTest.this.Z1 using `==' will always yield false
+checksensible.scala:81: error: comparing values of types EqEqRefTest.this.C3 and EqEqRefTest.this.Z1 using `==' will always yield false
c3 == z1
^
-checksensible.scala:75: error: comparing values of types EqEqRefTest.this.Z1 and EqEqRefTest.this.C3 using `==' will always yield false
+checksensible.scala:82: error: comparing values of types EqEqRefTest.this.Z1 and EqEqRefTest.this.C3 using `==' will always yield false
z1 == c3
^
-checksensible.scala:76: error: comparing values of types EqEqRefTest.this.Z1 and EqEqRefTest.this.C3 using `!=' will always yield true
+checksensible.scala:83: error: comparing values of types EqEqRefTest.this.Z1 and EqEqRefTest.this.C3 using `!=' will always yield true
z1 != c3
^
-checksensible.scala:77: error: comparing values of types EqEqRefTest.this.C3 and java.lang.String using `!=' will always yield true
+checksensible.scala:84: error: comparing values of types EqEqRefTest.this.C3 and String using `!=' will always yield true
c3 != "abc"
^
-checksensible.scala:88: error: comparing values of types Unit and Int using `!=' will always yield true
+checksensible.scala:95: error: comparing values of types Unit and Int using `!=' will always yield true
while ((c = in.read) != -1)
^
-30 errors found
+33 errors found
diff --git a/test/files/neg/checksensible.scala b/test/files/neg/checksensible.scala
index 491cd22..27ee908 100644
--- a/test/files/neg/checksensible.scala
+++ b/test/files/neg/checksensible.scala
@@ -19,7 +19,7 @@ class RefEqTest {
null eq new AnyRef
}
-// 11 warnings
+// 13 warnings
class EqEqValTest {
var c = 0
@@ -29,7 +29,11 @@ class EqEqValTest {
1 == "abc"
1 == ("abc": Any) // doesn't warn because an Any may be a boxed Int
1 == (1: Any) // as above
- "abc" == 1 // doesn't generally warn since String defines an equals method, but can chatty warn
+ "abc" == 1 // warns because the lub of String and Int is Any
+ Some(1) == 1 // as above
+
+ true == new java.lang.Boolean(true) // none of these should warn
+ new java.lang.Boolean(true) == true
new AnyRef == 1
1 == new AnyRef // doesn't warn because it could be...
@@ -40,6 +44,9 @@ class EqEqValTest {
() == true
() == ()
() == println
+ () == scala.runtime.BoxedUnit.UNIT // these should warn for always being true/false
+ scala.runtime.BoxedUnit.UNIT != ()
+ (scala.runtime.BoxedUnit.UNIT: java.io.Serializable) != () // shouldn't warn
(1 != println)
(1 != 'sym)
diff --git a/test/files/neg/classmanifests_new_deprecations.check b/test/files/neg/classmanifests_new_deprecations.check
new file mode 100644
index 0000000..4ad4a12
--- /dev/null
+++ b/test/files/neg/classmanifests_new_deprecations.check
@@ -0,0 +1,25 @@
+classmanifests_new_deprecations.scala:2: error: type ClassManifest in object Predef is deprecated: Use scala.reflect.ClassTag instead
+ def cm1[T: ClassManifest] = ???
+ ^
+classmanifests_new_deprecations.scala:3: error: type ClassManifest in object Predef is deprecated: Use scala.reflect.ClassTag instead
+ def cm2[T](implicit evidence$1: ClassManifest[T]) = ???
+ ^
+classmanifests_new_deprecations.scala:4: error: type ClassManifest in object Predef is deprecated: Use scala.reflect.ClassTag instead
+ val cm3: ClassManifest[Int] = null
+ ^
+classmanifests_new_deprecations.scala:6: error: type ClassManifest in package reflect is deprecated: Use scala.reflect.ClassTag instead
+ def rcm1[T: scala.reflect.ClassManifest] = ???
+ ^
+classmanifests_new_deprecations.scala:7: error: type ClassManifest in package reflect is deprecated: Use scala.reflect.ClassTag instead
+ def rcm2[T](implicit evidence$1: scala.reflect.ClassManifest[T]) = ???
+ ^
+classmanifests_new_deprecations.scala:8: error: type ClassManifest in package reflect is deprecated: Use scala.reflect.ClassTag instead
+ val rcm3: scala.reflect.ClassManifest[Int] = null
+ ^
+classmanifests_new_deprecations.scala:10: error: type ClassManifest in object Predef is deprecated: Use scala.reflect.ClassTag instead
+ type CM[T] = ClassManifest[T]
+ ^
+classmanifests_new_deprecations.scala:15: error: type ClassManifest in package reflect is deprecated: Use scala.reflect.ClassTag instead
+ type RCM[T] = scala.reflect.ClassManifest[T]
+ ^
+8 errors found
diff --git a/test/files/neg/classmanifests_new_deprecations.flags b/test/files/neg/classmanifests_new_deprecations.flags
new file mode 100644
index 0000000..c6bfaf1
--- /dev/null
+++ b/test/files/neg/classmanifests_new_deprecations.flags
@@ -0,0 +1 @@
+-deprecation -Xfatal-warnings
diff --git a/test/files/neg/classmanifests_new_deprecations.scala b/test/files/neg/classmanifests_new_deprecations.scala
new file mode 100644
index 0000000..563a0bc
--- /dev/null
+++ b/test/files/neg/classmanifests_new_deprecations.scala
@@ -0,0 +1,37 @@
+object Test extends App {
+ def cm1[T: ClassManifest] = ???
+ def cm2[T](implicit evidence$1: ClassManifest[T]) = ???
+ val cm3: ClassManifest[Int] = null
+
+ def rcm1[T: scala.reflect.ClassManifest] = ???
+ def rcm2[T](implicit evidence$1: scala.reflect.ClassManifest[T]) = ???
+ val rcm3: scala.reflect.ClassManifest[Int] = null
+
+ type CM[T] = ClassManifest[T]
+ def acm1[T: CM] = ???
+ def acm2[T](implicit evidence$1: CM[T]) = ???
+ val acm3: CM[Int] = null
+
+ type RCM[T] = scala.reflect.ClassManifest[T]
+ def arcm1[T: RCM] = ???
+ def arcm2[T](implicit evidence$1: RCM[T]) = ???
+ val arcm3: RCM[Int] = null
+
+ def m1[T: Manifest] = ???
+ def m2[T](implicit evidence$1: Manifest[T]) = ???
+ val m3: Manifest[Int] = null
+
+ def rm1[T: scala.reflect.Manifest] = ???
+ def rm2[T](implicit evidence$1: scala.reflect.Manifest[T]) = ???
+ val rm3: scala.reflect.Manifest[Int] = null
+
+ type M[T] = Manifest[T]
+ def am1[T: M] = ???
+ def am2[T](implicit evidence$1: M[T]) = ???
+ val am3: M[Int] = null
+
+ type RM[T] = scala.reflect.Manifest[T]
+ def arm1[T: RM] = ???
+ def arm2[T](implicit evidence$1: RM[T]) = ???
+ val arm3: RM[Int] = null
+}
\ No newline at end of file
diff --git a/test/files/neg/classtags_contextbound_a.check b/test/files/neg/classtags_contextbound_a.check
new file mode 100644
index 0000000..5edb7f9
--- /dev/null
+++ b/test/files/neg/classtags_contextbound_a.check
@@ -0,0 +1,4 @@
+classtags_contextbound_a.scala:2: error: No ClassTag available for T
+ def foo[T] = Array[T]()
+ ^
+one error found
diff --git a/test/files/neg/classtags_contextbound_a.scala b/test/files/neg/classtags_contextbound_a.scala
new file mode 100644
index 0000000..d18beda
--- /dev/null
+++ b/test/files/neg/classtags_contextbound_a.scala
@@ -0,0 +1,4 @@
+object Test extends App {
+ def foo[T] = Array[T]()
+ println(foo[Int].getClass)
+}
\ No newline at end of file
diff --git a/test/files/neg/classtags_contextbound_b.check b/test/files/neg/classtags_contextbound_b.check
new file mode 100644
index 0000000..e17ab8b
--- /dev/null
+++ b/test/files/neg/classtags_contextbound_b.check
@@ -0,0 +1,4 @@
+classtags_contextbound_b.scala:5: error: No ClassTag available for T
+ def foo[T] = mkArray[T]
+ ^
+one error found
diff --git a/test/files/neg/classtags_contextbound_b.scala b/test/files/neg/classtags_contextbound_b.scala
new file mode 100644
index 0000000..a189f9a
--- /dev/null
+++ b/test/files/neg/classtags_contextbound_b.scala
@@ -0,0 +1,7 @@
+import scala.reflect.{ClassTag, classTag}
+
+object Test extends App {
+ def mkArray[T: ClassTag] = Array[T]()
+ def foo[T] = mkArray[T]
+ println(foo[Int].getClass)
+}
\ No newline at end of file
diff --git a/test/files/neg/classtags_contextbound_c.check b/test/files/neg/classtags_contextbound_c.check
new file mode 100644
index 0000000..e8666f7
--- /dev/null
+++ b/test/files/neg/classtags_contextbound_c.check
@@ -0,0 +1,4 @@
+classtags_contextbound_c.scala:4: error: No ClassTag available for T
+ def mkArray[T] = Array[T]()
+ ^
+one error found
diff --git a/test/files/neg/classtags_contextbound_c.scala b/test/files/neg/classtags_contextbound_c.scala
new file mode 100644
index 0000000..54c616c
--- /dev/null
+++ b/test/files/neg/classtags_contextbound_c.scala
@@ -0,0 +1,7 @@
+import scala.reflect.{ClassTag, classTag}
+
+object Test extends App {
+ def mkArray[T] = Array[T]()
+ def foo[T: ClassTag] = mkArray[T]
+ println(foo[Int].getClass)
+}
\ No newline at end of file
diff --git a/test/files/neg/classtags_dont_use_typetags.check b/test/files/neg/classtags_dont_use_typetags.check
new file mode 100644
index 0000000..4f728d2
--- /dev/null
+++ b/test/files/neg/classtags_dont_use_typetags.check
@@ -0,0 +1,4 @@
+classtags_dont_use_typetags.scala:4: error: No ClassTag available for T
+ def foo[T: TypeTag] = Array[T]()
+ ^
+one error found
diff --git a/test/files/neg/classtags_dont_use_typetags.scala b/test/files/neg/classtags_dont_use_typetags.scala
new file mode 100644
index 0000000..2eb842b
--- /dev/null
+++ b/test/files/neg/classtags_dont_use_typetags.scala
@@ -0,0 +1,5 @@
+import scala.reflect.runtime.universe._
+
+object Test extends App {
+ def foo[T: TypeTag] = Array[T]()
+}
\ No newline at end of file
diff --git a/test/files/neg/constructor-prefix-error.check b/test/files/neg/constructor-prefix-error.check
new file mode 100644
index 0000000..87e9488
--- /dev/null
+++ b/test/files/neg/constructor-prefix-error.check
@@ -0,0 +1,4 @@
+constructor-prefix-error.scala:6: error: Outer is not a legal prefix for a constructor
+ val x = new Outer#Inner
+ ^
+one error found
diff --git a/test/files/neg/constructor-prefix-error.scala b/test/files/neg/constructor-prefix-error.scala
new file mode 100644
index 0000000..c2accea
--- /dev/null
+++ b/test/files/neg/constructor-prefix-error.scala
@@ -0,0 +1,7 @@
+class Outer {
+ class Inner
+}
+
+object Test {
+ val x = new Outer#Inner
+}
diff --git a/test/files/neg/cyclics-import.check b/test/files/neg/cyclics-import.check
new file mode 100644
index 0000000..ef355fa
--- /dev/null
+++ b/test/files/neg/cyclics-import.check
@@ -0,0 +1,15 @@
+cyclics-import.scala:1: error: encountered unrecoverable cycle resolving import.
+Note: this is often due in part to a class depending on a definition nested within its companion.
+If applicable, you may wish to try moving some members into another object.
+import User.UserStatus._
+ ^
+cyclics-import.scala:12: error: not found: type Value
+ type UserStatus = Value
+ ^
+cyclics-import.scala:14: error: not found: value Value
+ val Active = Value("1")
+ ^
+cyclics-import.scala:15: error: not found: value Value
+ val Disabled = Value("2")
+ ^
+four errors found
diff --git a/test/files/neg/cyclics-import.scala b/test/files/neg/cyclics-import.scala
new file mode 100644
index 0000000..7b510b5
--- /dev/null
+++ b/test/files/neg/cyclics-import.scala
@@ -0,0 +1,17 @@
+import User.UserStatus._
+
+class User {
+ var id: Int = 0
+ var email: String = null
+ var password: String = null
+ var userStatus: UserStatus = null
+}
+
+object User {
+ object UserStatus extends Enumeration {
+ type UserStatus = Value
+
+ val Active = Value("1")
+ val Disabled = Value("2")
+ }
+}
diff --git a/test/pending/neg/dbldef.check b/test/files/neg/dbldef.check
similarity index 100%
rename from test/pending/neg/dbldef.check
rename to test/files/neg/dbldef.check
diff --git a/test/pending/neg/dbldef.scala b/test/files/neg/dbldef.scala
similarity index 100%
rename from test/pending/neg/dbldef.scala
rename to test/files/neg/dbldef.scala
diff --git a/test/files/neg/deadline-inf-illegal.check b/test/files/neg/deadline-inf-illegal.check
new file mode 100644
index 0000000..530d2b2
--- /dev/null
+++ b/test/files/neg/deadline-inf-illegal.check
@@ -0,0 +1,15 @@
+deadline-inf-illegal.scala:5: error: value fromNow is not a member of scala.concurrent.duration.Duration
+ d.fromNow
+ ^
+deadline-inf-illegal.scala:6: error: type mismatch;
+ found : scala.concurrent.duration.Duration
+ required: scala.concurrent.duration.FiniteDuration
+ Deadline.now + d
+ ^
+deadline-inf-illegal.scala:7: error: overloaded method value - with alternatives:
+ (other: scala.concurrent.duration.Deadline)scala.concurrent.duration.FiniteDuration <and>
+ (other: scala.concurrent.duration.FiniteDuration)scala.concurrent.duration.Deadline
+ cannot be applied to (scala.concurrent.duration.Duration)
+ Deadline.now - d
+ ^
+three errors found
diff --git a/test/files/neg/deadline-inf-illegal.scala b/test/files/neg/deadline-inf-illegal.scala
new file mode 100644
index 0000000..942cea7
--- /dev/null
+++ b/test/files/neg/deadline-inf-illegal.scala
@@ -0,0 +1,8 @@
+import concurrent.duration.{ Deadline, Duration }
+
+class T {
+ val d: Duration = Duration.Zero
+ d.fromNow
+ Deadline.now + d
+ Deadline.now - d
+}
diff --git a/test/files/neg/delayed-init-ref.check b/test/files/neg/delayed-init-ref.check
new file mode 100644
index 0000000..42ccabe
--- /dev/null
+++ b/test/files/neg/delayed-init-ref.check
@@ -0,0 +1,10 @@
+delayed-init-ref.scala:17: error: Selecting value vall from object O, which extends scala.DelayedInit, is likely to yield an uninitialized value
+ println(O.vall) // warn
+ ^
+delayed-init-ref.scala:19: error: Selecting value vall from object O, which extends scala.DelayedInit, is likely to yield an uninitialized value
+ println(vall) // warn
+ ^
+delayed-init-ref.scala:40: error: Selecting value foo from trait UserContext, which extends scala.DelayedInit, is likely to yield an uninitialized value
+ println({locally(()); this}.foo) // warn (spurious, but we can't discriminate)
+ ^
+three errors found
diff --git a/test/files/neg/delayed-init-ref.flags b/test/files/neg/delayed-init-ref.flags
new file mode 100644
index 0000000..7949c2a
--- /dev/null
+++ b/test/files/neg/delayed-init-ref.flags
@@ -0,0 +1 @@
+-Xlint -Xfatal-warnings
diff --git a/test/files/neg/delayed-init-ref.scala b/test/files/neg/delayed-init-ref.scala
new file mode 100644
index 0000000..f2aa804
--- /dev/null
+++ b/test/files/neg/delayed-init-ref.scala
@@ -0,0 +1,42 @@
+trait T {
+ val traitVal = ""
+}
+
+object O extends App with T {
+ val vall = ""
+ lazy val lazyy = ""
+ def deff = ""
+
+ println(vall) // no warn
+ new {
+ println(vall) // no warn
+ }
+}
+
+object Client {
+ println(O.vall) // warn
+ import O.vall
+ println(vall) // warn
+
+ println(O.lazyy) // no warn
+ println(O.deff) // no warn
+ println(O.traitVal) // no warn
+}
+
+// Delayed init usage pattern from Specs2
+// See: https://groups.google.com/d/msg/scala-sips/wP6dL8nIAQs/ogjoPE-MSVAJ
+trait Before extends DelayedInit {
+ def before()
+ override def delayedInit(x: => Unit): Unit = { before; x }
+}
+object Spec {
+ trait UserContext extends Before {
+ def before() = ()
+ val foo = "foo"
+ }
+ new UserContext {
+ println(foo) // no warn
+ println(this.foo) // no warn
+ println({locally(()); this}.foo) // warn (spurious, but we can't discriminate)
+ }
+}
diff --git a/test/files/neg/depmet_1.flags b/test/files/neg/depmet_1.flags
deleted file mode 100644
index 1c26b24..0000000
--- a/test/files/neg/depmet_1.flags
+++ /dev/null
@@ -1 +0,0 @@
--Ydependent-method-types
\ No newline at end of file
diff --git a/test/files/neg/error_dependentMethodTpeConversionToFunction.check b/test/files/neg/error_dependentMethodTpeConversionToFunction.check
new file mode 100644
index 0000000..3496a55
--- /dev/null
+++ b/test/files/neg/error_dependentMethodTpeConversionToFunction.check
@@ -0,0 +1,4 @@
+error_dependentMethodTpeConversionToFunction.scala:4: error: method with dependent type (x: AnyRef)x.type cannot be converted to function value
+ val x: Any => Any = foo
+ ^
+one error found
diff --git a/test/files/neg/error_dependentMethodTpeConversionToFunction.scala b/test/files/neg/error_dependentMethodTpeConversionToFunction.scala
new file mode 100644
index 0000000..22649e5
--- /dev/null
+++ b/test/files/neg/error_dependentMethodTpeConversionToFunction.scala
@@ -0,0 +1,5 @@
+// test DependentMethodTpeConversionToFunctionError
+object Test {
+ def foo(x: AnyRef): x.type = x
+ val x: Any => Any = foo
+}
\ No newline at end of file
diff --git a/test/files/neg/error_tooManyArgsPattern.check b/test/files/neg/error_tooManyArgsPattern.check
new file mode 100644
index 0000000..ee401ad
--- /dev/null
+++ b/test/files/neg/error_tooManyArgsPattern.check
@@ -0,0 +1,4 @@
+error_tooManyArgsPattern.scala:3: error: too many arguments for unapply pattern, maximum = 22
+ case List(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17, x18, x19, x20, x21, x22, x23) => 7
+ ^
+one error found
diff --git a/test/files/neg/error_tooManyArgsPattern.scala b/test/files/neg/error_tooManyArgsPattern.scala
new file mode 100644
index 0000000..d55ba61
--- /dev/null
+++ b/test/files/neg/error_tooManyArgsPattern.scala
@@ -0,0 +1,5 @@
+object Test {
+ def test(xs: Any) = xs match { // test error message TooManyArgsPatternError
+ case List(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17, x18, x19, x20, x21, x22, x23) => 7
+ }
+}
diff --git a/test/files/neg/eta-expand-star.check b/test/files/neg/eta-expand-star.check
new file mode 100644
index 0000000..6765d50
--- /dev/null
+++ b/test/files/neg/eta-expand-star.check
@@ -0,0 +1,4 @@
+eta-expand-star.scala:6: error: too many arguments for method apply: (v1: Seq[T])Unit in trait Function1
+ g(1, 2)
+ ^
+one error found
diff --git a/test/files/neg/eta-expand-star.scala b/test/files/neg/eta-expand-star.scala
new file mode 100644
index 0000000..5749692
--- /dev/null
+++ b/test/files/neg/eta-expand-star.scala
@@ -0,0 +1,8 @@
+object Test {
+ def f[T](xs: T*): Unit = ()
+ def g[T] = f[T] _
+
+ def main(args: Array[String]): Unit = {
+ g(1, 2)
+ }
+}
diff --git a/test/files/neg/exhausting.check b/test/files/neg/exhausting.check
index 0bef21e..0f0d13c 100644
--- a/test/files/neg/exhausting.check
+++ b/test/files/neg/exhausting.check
@@ -1,29 +1,25 @@
-exhausting.scala:20: error: match is not exhaustive!
-missing combination * Nil
-
+exhausting.scala:21: error: match may not be exhaustive.
+It would fail on the following input: List(_, _, _)
def fail1[T](xs: List[T]) = xs match {
^
-exhausting.scala:24: error: match is not exhaustive!
-missing combination Nil
-
+exhausting.scala:27: error: match may not be exhaustive.
+It would fail on the following input: Nil
def fail2[T](xs: List[T]) = xs match {
^
-exhausting.scala:27: error: match is not exhaustive!
-missing combination Bar3
-
+exhausting.scala:32: error: match may not be exhaustive.
+It would fail on the following input: List((x: Int forSome x not in (1, 2)))
+ def fail3a(xs: List[Int]) = xs match {
+ ^
+exhausting.scala:39: error: match may not be exhaustive.
+It would fail on the following input: Bar3
def fail3[T](x: Foo[T]) = x match {
^
-exhausting.scala:31: error: match is not exhaustive!
-missing combination Bar2 Bar2
-
+exhausting.scala:47: error: match may not be exhaustive.
+It would fail on the following inputs: (Bar1, Bar2), (Bar1, Bar3), (Bar2, Bar1), (Bar2, Bar2)
def fail4[T <: AnyRef](xx: (Foo[T], Foo[T])) = xx match {
^
-exhausting.scala:36: error: match is not exhaustive!
-missing combination Bar1 Bar2
-missing combination Bar1 Bar3
-missing combination Bar2 Bar1
-missing combination Bar2 Bar2
-
+exhausting.scala:56: error: match may not be exhaustive.
+It would fail on the following inputs: (Bar1, Bar2), (Bar1, Bar3), (Bar2, Bar1), (Bar2, Bar2)
def fail5[T](xx: (Foo[T], Foo[T])) = xx match {
^
-5 errors found
+6 errors found
diff --git a/test/files/neg/exhausting.flags b/test/files/neg/exhausting.flags
index e8fb65d..85d8eb2 100644
--- a/test/files/neg/exhausting.flags
+++ b/test/files/neg/exhausting.flags
@@ -1 +1 @@
--Xfatal-warnings
\ No newline at end of file
+-Xfatal-warnings
diff --git a/test/files/neg/exhausting.scala b/test/files/neg/exhausting.scala
index 14b0569..5554ee2 100644
--- a/test/files/neg/exhausting.scala
+++ b/test/files/neg/exhausting.scala
@@ -16,30 +16,46 @@ object Test {
def ex3[T](xx: (Foo[T], Foo[T])) = xx match {
case (_: Foo[_], _: Foo[_]) => ()
}
-
+
+ // fails for: ::(_, ::(_, ::(_, _)))
def fail1[T](xs: List[T]) = xs match {
case Nil => "ok"
case x :: y :: Nil => "ok"
}
+
+ // fails for: Nil
def fail2[T](xs: List[T]) = xs match {
case _ :: _ => "ok"
}
+
+ // fails for: ::(<not in (2, 1)>, _)
+ def fail3a(xs: List[Int]) = xs match {
+ case 1 :: _ =>
+ case 2 :: _ =>
+ case Nil =>
+ }
+
+ // fails for: Bar3
def fail3[T](x: Foo[T]) = x match {
case Bar1 => "ok"
case Bar2 => "ok"
}
+ // fails for: (Bar1, Bar2)
+ // fails for: (Bar1, Bar3)
+ // fails for: (Bar2, Bar2)
+ // fails for: (Bar2, Bar1)
def fail4[T <: AnyRef](xx: (Foo[T], Foo[T])) = xx match {
case (Bar1, Bar1) => ()
case (Bar2, Bar3) => ()
case (Bar3, _) => ()
}
+ // fails for: (Bar1, Bar2)
+ // fails for: (Bar1, Bar3)
+ // fails for: (Bar2, Bar1)
+ // fails for: (Bar2, Bar2)
def fail5[T](xx: (Foo[T], Foo[T])) = xx match {
case (Bar1, Bar1) => ()
case (Bar2, Bar3) => ()
case (Bar3, _) => ()
}
-
- def main(args: Array[String]): Unit = {
-
- }
}
diff --git a/test/files/neg/finitary-error.check b/test/files/neg/finitary-error.check
new file mode 100644
index 0000000..7bc9205
--- /dev/null
+++ b/test/files/neg/finitary-error.check
@@ -0,0 +1,4 @@
+finitary-error.scala:3: error: class graph is not finitary because type parameter T is expansively recursive
+trait C[T] extends A[C[B[T]]]
+ ^
+one error found
diff --git a/test/files/neg/finitary-error.scala b/test/files/neg/finitary-error.scala
new file mode 100644
index 0000000..a48fcdc
--- /dev/null
+++ b/test/files/neg/finitary-error.scala
@@ -0,0 +1,3 @@
+trait A[T]
+trait B[T]
+trait C[T] extends A[C[B[T]]]
diff --git a/test/files/neg/for-comprehension-old.check b/test/files/neg/for-comprehension-old.check
new file mode 100644
index 0000000..1ecaf12
--- /dev/null
+++ b/test/files/neg/for-comprehension-old.check
@@ -0,0 +1,26 @@
+for-comprehension-old.scala:3: warning: val keyword in for comprehension is deprecated
+ for (x <- 1 to 5 ; val y = x) yield x+y // fail
+ ^
+for-comprehension-old.scala:5: warning: val keyword in for comprehension is deprecated
+ for (val x <- 1 to 5 ; val y = x) yield x+y // fail
+ ^
+for-comprehension-old.scala:8: warning: val keyword in for comprehension is deprecated
+ for (z <- 1 to 2 ; x <- 1 to 5 ; val y = x) yield x+y // fail
+ ^
+for-comprehension-old.scala:10: warning: val keyword in for comprehension is deprecated
+ for (z <- 1 to 2 ; val x <- 1 to 5 ; val y = x) yield x+y // fail
+ ^
+for-comprehension-old.scala:4: error: val in for comprehension must be followed by assignment
+ for (val x <- 1 to 5 ; y = x) yield x+y // fail
+ ^
+for-comprehension-old.scala:5: error: val in for comprehension must be followed by assignment
+ for (val x <- 1 to 5 ; val y = x) yield x+y // fail
+ ^
+for-comprehension-old.scala:9: error: val in for comprehension must be followed by assignment
+ for (z <- 1 to 2 ; val x <- 1 to 5 ; y = x) yield x+y // fail
+ ^
+for-comprehension-old.scala:10: error: val in for comprehension must be followed by assignment
+ for (z <- 1 to 2 ; val x <- 1 to 5 ; val y = x) yield x+y // fail
+ ^
+four warnings found
+four errors found
diff --git a/test/files/neg/for-comprehension-old.flags b/test/files/neg/for-comprehension-old.flags
new file mode 100644
index 0000000..dcc59eb
--- /dev/null
+++ b/test/files/neg/for-comprehension-old.flags
@@ -0,0 +1 @@
+-deprecation
diff --git a/test/files/neg/for-comprehension-old.scala b/test/files/neg/for-comprehension-old.scala
new file mode 100644
index 0000000..10ae363
--- /dev/null
+++ b/test/files/neg/for-comprehension-old.scala
@@ -0,0 +1,11 @@
+class A {
+ for (x <- 1 to 5 ; y = x) yield x+y // ok
+ for (x <- 1 to 5 ; val y = x) yield x+y // fail
+ for (val x <- 1 to 5 ; y = x) yield x+y // fail
+ for (val x <- 1 to 5 ; val y = x) yield x+y // fail
+
+ for (z <- 1 to 2 ; x <- 1 to 5 ; y = x) yield x+y // ok
+ for (z <- 1 to 2 ; x <- 1 to 5 ; val y = x) yield x+y // fail
+ for (z <- 1 to 2 ; val x <- 1 to 5 ; y = x) yield x+y // fail
+ for (z <- 1 to 2 ; val x <- 1 to 5 ; val y = x) yield x+y // fail
+}
diff --git a/test/files/neg/found-req-variance.check b/test/files/neg/found-req-variance.check
index 828e40a..cc26458 100644
--- a/test/files/neg/found-req-variance.check
+++ b/test/files/neg/found-req-variance.check
@@ -163,15 +163,15 @@ You may wish to define A as +A instead. (SLS 4.5)
^
found-req-variance.scala:100: error: type mismatch;
found : Set[String]
- required: Set[java.lang.CharSequence]
-Note: String <: java.lang.CharSequence, but trait Set is invariant in type A.
-You may wish to investigate a wildcard type such as `_ <: java.lang.CharSequence`. (SLS 3.2.10)
+ required: Set[CharSequence]
+Note: String <: CharSequence, but trait Set is invariant in type A.
+You may wish to investigate a wildcard type such as `_ <: CharSequence`. (SLS 3.2.10)
foo(s)
^
found-req-variance.scala:104: error: type mismatch;
found : Misc.Trippy[String,String,String]
- required: Misc.Trippy[java.lang.Object,java.lang.Object,java.lang.Object]
-Note: String <: java.lang.Object, but class Trippy is invariant in type T2.
+ required: Misc.Trippy[Object,Object,Object]
+Note: String <: Object, but class Trippy is invariant in type T2.
You may wish to define T2 as +T2 instead. (SLS 4.5)
def g1 = Set[Trippy[AnyRef, AnyRef, AnyRef]]() + new Trippy[String, String, String]
^
diff --git a/test/files/neg/gadts1.check b/test/files/neg/gadts1.check
index 44d2b11..a5e3e0d 100644
--- a/test/files/neg/gadts1.check
+++ b/test/files/neg/gadts1.check
@@ -3,9 +3,9 @@ gadts1.scala:15: error: type mismatch;
required: a
case NumTerm(n) => c.x = Double(1.0)
^
-gadts1.scala:20: error: class Cell of type Test.Cell does not take type parameters.
+gadts1.scala:20: error: Test.Cell[a] does not take parameters
case Cell[a](x: Int) => c.x = 5
- ^
+ ^
gadts1.scala:20: error: type mismatch;
found : Int(5)
required: a
diff --git a/test/files/neg/hk-bad-bounds.check b/test/files/neg/hk-bad-bounds.check
new file mode 100644
index 0000000..d629399
--- /dev/null
+++ b/test/files/neg/hk-bad-bounds.check
@@ -0,0 +1,4 @@
+hk-bad-bounds.scala:4: error: type arguments [Set] do not conform to class SeqFactory's type parameter bounds [CC[X] <: Seq[X] with scala.collection.generic.GenericTraversableTemplate[X,CC]]
+ def f(x: Boolean) = if (x) (null: SeqFactory[List]) else (null: SeqFactory[Set])
+ ^
+one error found
diff --git a/test/files/neg/hk-bad-bounds.scala b/test/files/neg/hk-bad-bounds.scala
new file mode 100644
index 0000000..0ed0b4c
--- /dev/null
+++ b/test/files/neg/hk-bad-bounds.scala
@@ -0,0 +1,5 @@
+import collection.generic.SeqFactory
+
+class A {
+ def f(x: Boolean) = if (x) (null: SeqFactory[List]) else (null: SeqFactory[Set])
+}
diff --git a/test/files/neg/implicit-shadow.check b/test/files/neg/implicit-shadow.check
new file mode 100644
index 0000000..042fca8
--- /dev/null
+++ b/test/files/neg/implicit-shadow.check
@@ -0,0 +1,11 @@
+implicit-shadow.scala:4: <i2s: error> is not a valid implicit value for Int(1) => ?{def isEmpty: ?} because:
+reference to i2s is ambiguous;
+it is imported twice in the same scope by
+import C._
+and import B._
+ 1.isEmpty
+ ^
+implicit-shadow.scala:4: error: value isEmpty is not a member of Int
+ 1.isEmpty
+ ^
+one error found
diff --git a/test/files/neg/implicit-shadow.flags b/test/files/neg/implicit-shadow.flags
new file mode 100644
index 0000000..44842a9
--- /dev/null
+++ b/test/files/neg/implicit-shadow.flags
@@ -0,0 +1 @@
+-Xlog-implicits
diff --git a/test/files/neg/implicit-shadow.scala b/test/files/neg/implicit-shadow.scala
new file mode 100644
index 0000000..ffd34b6
--- /dev/null
+++ b/test/files/neg/implicit-shadow.scala
@@ -0,0 +1,13 @@
+object Test {
+ import B._, C._
+
+ 1.isEmpty
+}
+
+trait A {
+ implicit def i2s(i: Int): String = ""
+}
+
+object B extends A
+
+object C extends A
\ No newline at end of file
diff --git a/test/files/neg/implicits.check b/test/files/neg/implicits.check
index f792313..6d61f7f 100644
--- a/test/files/neg/implicits.check
+++ b/test/files/neg/implicits.check
@@ -1,5 +1,5 @@
implicits.scala:38: error: type mismatch;
- found : test2.HSome[java.lang.String,test2.HMap]
+ found : test2.HSome[String,test2.HMap]
required: Int
foo(set)
^
@@ -8,4 +8,7 @@ implicits.scala:46: error: type mismatch;
required: List[Mxml]
children.toList.flatMap ( e => {
^
-two errors found
+implicits.scala:66: error: could not find implicit value for parameter x: Nothing
+ foo {
+ ^
+three errors found
diff --git a/test/files/neg/implicits.scala b/test/files/neg/implicits.scala
index e908fb0..878d3a7 100644
--- a/test/files/neg/implicits.scala
+++ b/test/files/neg/implicits.scala
@@ -56,3 +56,19 @@ class Mxml {
}
}
+
+// SI-5316
+class Test3 {
+ def foo(p: => Any)(implicit x: Nothing): Unit = ()
+
+ object X
+
+ foo {
+ val a = 0
+
+ {
+ import X._
+ a
+ }
+ }
+}
diff --git a/test/files/neg/interop_abstypetags_arenot_classmanifests.check b/test/files/neg/interop_abstypetags_arenot_classmanifests.check
new file mode 100644
index 0000000..d15e333
--- /dev/null
+++ b/test/files/neg/interop_abstypetags_arenot_classmanifests.check
@@ -0,0 +1,4 @@
+interop_abstypetags_arenot_classmanifests.scala:5: error: No ClassManifest available for T.
+ println(classManifest[T])
+ ^
+one error found
diff --git a/test/files/neg/interop_abstypetags_arenot_classmanifests.scala b/test/files/neg/interop_abstypetags_arenot_classmanifests.scala
new file mode 100644
index 0000000..5d88c90
--- /dev/null
+++ b/test/files/neg/interop_abstypetags_arenot_classmanifests.scala
@@ -0,0 +1,11 @@
+import scala.reflect.runtime.universe._
+
+object Test extends App {
+ def weakTypeTagIsnotClassManifest[T: WeakTypeTag] = {
+ println(classManifest[T])
+ }
+
+ weakTypeTagIsnotClassManifest[Int]
+ weakTypeTagIsnotClassManifest[String]
+ weakTypeTagIsnotClassManifest[Array[Int]]
+}
\ No newline at end of file
diff --git a/test/files/neg/interop_abstypetags_arenot_classtags.check b/test/files/neg/interop_abstypetags_arenot_classtags.check
new file mode 100644
index 0000000..3aa7a50
--- /dev/null
+++ b/test/files/neg/interop_abstypetags_arenot_classtags.check
@@ -0,0 +1,4 @@
+interop_abstypetags_arenot_classtags.scala:6: error: No ClassTag available for T
+ println(classTag[T])
+ ^
+one error found
diff --git a/test/files/neg/interop_abstypetags_arenot_classtags.scala b/test/files/neg/interop_abstypetags_arenot_classtags.scala
new file mode 100644
index 0000000..de1f865
--- /dev/null
+++ b/test/files/neg/interop_abstypetags_arenot_classtags.scala
@@ -0,0 +1,12 @@
+import scala.reflect.runtime.universe._
+import scala.reflect.{ClassTag, classTag}
+
+object Test extends App {
+ def weakTypeTagIsnotClassTag[T: WeakTypeTag] = {
+ println(classTag[T])
+ }
+
+ weakTypeTagIsnotClassTag[Int]
+ weakTypeTagIsnotClassTag[String]
+ weakTypeTagIsnotClassTag[Array[Int]]
+}
\ No newline at end of file
diff --git a/test/files/neg/interop_abstypetags_arenot_manifests.check b/test/files/neg/interop_abstypetags_arenot_manifests.check
new file mode 100644
index 0000000..5916b68
--- /dev/null
+++ b/test/files/neg/interop_abstypetags_arenot_manifests.check
@@ -0,0 +1,4 @@
+interop_abstypetags_arenot_manifests.scala:5: error: No Manifest available for T.
+ println(manifest[T])
+ ^
+one error found
diff --git a/test/files/neg/interop_abstypetags_arenot_manifests.scala b/test/files/neg/interop_abstypetags_arenot_manifests.scala
new file mode 100644
index 0000000..1ca3673
--- /dev/null
+++ b/test/files/neg/interop_abstypetags_arenot_manifests.scala
@@ -0,0 +1,11 @@
+import scala.reflect.runtime.universe._
+
+object Test extends App {
+ def weakTypeTagIsnotManifest[T: WeakTypeTag] = {
+ println(manifest[T])
+ }
+
+ weakTypeTagIsnotManifest[Int]
+ weakTypeTagIsnotManifest[String]
+ weakTypeTagIsnotManifest[Array[Int]]
+}
\ No newline at end of file
diff --git a/test/files/neg/interop_classmanifests_arenot_typetags.check b/test/files/neg/interop_classmanifests_arenot_typetags.check
new file mode 100644
index 0000000..db8e579
--- /dev/null
+++ b/test/files/neg/interop_classmanifests_arenot_typetags.check
@@ -0,0 +1,4 @@
+interop_classmanifests_arenot_typetags.scala:5: error: No TypeTag available for T
+ println(implicitly[TypeTag[T]])
+ ^
+one error found
diff --git a/test/files/neg/interop_classmanifests_arenot_typetags.scala b/test/files/neg/interop_classmanifests_arenot_typetags.scala
new file mode 100644
index 0000000..29d03a8
--- /dev/null
+++ b/test/files/neg/interop_classmanifests_arenot_typetags.scala
@@ -0,0 +1,11 @@
+import scala.reflect.runtime.universe._
+
+object Test extends App {
+ def classManifestIsnotTypeTag[T: ClassManifest] = {
+ println(implicitly[TypeTag[T]])
+ }
+
+ classManifestIsnotTypeTag[Int]
+ classManifestIsnotTypeTag[String]
+ classManifestIsnotTypeTag[Array[Int]]
+}
\ No newline at end of file
diff --git a/test/files/neg/interop_classtags_arenot_manifests.check b/test/files/neg/interop_classtags_arenot_manifests.check
new file mode 100644
index 0000000..fa805b5
--- /dev/null
+++ b/test/files/neg/interop_classtags_arenot_manifests.check
@@ -0,0 +1,4 @@
+interop_classtags_arenot_manifests.scala:5: error: No Manifest available for T.
+ println(manifest[T])
+ ^
+one error found
diff --git a/test/files/neg/interop_classtags_arenot_manifests.scala b/test/files/neg/interop_classtags_arenot_manifests.scala
new file mode 100644
index 0000000..391143c
--- /dev/null
+++ b/test/files/neg/interop_classtags_arenot_manifests.scala
@@ -0,0 +1,11 @@
+import scala.reflect.{ClassTag, classTag}
+
+object Test extends App {
+ def classTagIsnotManifest[T: ClassTag] = {
+ println(manifest[T])
+ }
+
+ classTagIsnotManifest[Int]
+ classTagIsnotManifest[String]
+ classTagIsnotManifest[Array[Int]]
+}
\ No newline at end of file
diff --git a/test/files/neg/interop_typetags_arenot_classmanifests.check b/test/files/neg/interop_typetags_arenot_classmanifests.check
new file mode 100644
index 0000000..88fb164
--- /dev/null
+++ b/test/files/neg/interop_typetags_arenot_classmanifests.check
@@ -0,0 +1,4 @@
+interop_typetags_arenot_classmanifests.scala:5: error: No ClassManifest available for T.
+ println(classManifest[T])
+ ^
+one error found
diff --git a/test/files/neg/interop_typetags_arenot_classmanifests.scala b/test/files/neg/interop_typetags_arenot_classmanifests.scala
new file mode 100644
index 0000000..d07f472
--- /dev/null
+++ b/test/files/neg/interop_typetags_arenot_classmanifests.scala
@@ -0,0 +1,11 @@
+import scala.reflect.runtime.universe._
+
+object Test extends App {
+ def typeTagIsnotClassManifest[T: TypeTag] = {
+ println(classManifest[T])
+ }
+
+ typeTagIsnotClassManifest[Int]
+ typeTagIsnotClassManifest[String]
+ typeTagIsnotClassManifest[Array[Int]]
+}
\ No newline at end of file
diff --git a/test/files/neg/interop_typetags_arenot_classtags.check b/test/files/neg/interop_typetags_arenot_classtags.check
new file mode 100644
index 0000000..1d1fb15
--- /dev/null
+++ b/test/files/neg/interop_typetags_arenot_classtags.check
@@ -0,0 +1,4 @@
+interop_typetags_arenot_classtags.scala:6: error: No ClassTag available for T
+ println(classTag[T])
+ ^
+one error found
diff --git a/test/files/neg/interop_typetags_arenot_classtags.scala b/test/files/neg/interop_typetags_arenot_classtags.scala
new file mode 100644
index 0000000..072c12a
--- /dev/null
+++ b/test/files/neg/interop_typetags_arenot_classtags.scala
@@ -0,0 +1,12 @@
+import scala.reflect.runtime.universe._
+import scala.reflect.{ClassTag, classTag}
+
+object Test extends App {
+ def typeTagIsnotClassTag[T: TypeTag] = {
+ println(classTag[T])
+ }
+
+ typeTagIsnotClassTag[Int]
+ typeTagIsnotClassTag[String]
+ typeTagIsnotClassTag[Array[Int]]
+}
\ No newline at end of file
diff --git a/test/files/neg/interop_typetags_without_classtags_arenot_manifests.check b/test/files/neg/interop_typetags_without_classtags_arenot_manifests.check
new file mode 100644
index 0000000..ba744a8
--- /dev/null
+++ b/test/files/neg/interop_typetags_without_classtags_arenot_manifests.check
@@ -0,0 +1,6 @@
+interop_typetags_without_classtags_arenot_manifests.scala:6: error: to create a manifest here, it is necessary to interoperate with the type tag `evidence$1` in scope.
+however typetag -> manifest conversion requires a class tag for the corresponding type to be present.
+to proceed add a class tag to the type `T` (e.g. by introducing a context bound) and recompile.
+ println(manifest[T])
+ ^
+one error found
diff --git a/test/files/neg/interop_typetags_without_classtags_arenot_manifests.scala b/test/files/neg/interop_typetags_without_classtags_arenot_manifests.scala
new file mode 100644
index 0000000..0d48ae5
--- /dev/null
+++ b/test/files/neg/interop_typetags_without_classtags_arenot_manifests.scala
@@ -0,0 +1,12 @@
+import scala.reflect.runtime.universe._
+import scala.reflect.ClassTag
+
+object Test extends App {
+ def typeTagWithoutClassTagIsnotManifest[T: TypeTag] = {
+ println(manifest[T])
+ }
+
+ typeTagWithoutClassTagIsnotManifest[Int]
+ typeTagWithoutClassTagIsnotManifest[String]
+ typeTagWithoutClassTagIsnotManifest[Array[Int]]
+}
\ No newline at end of file
diff --git a/test/files/neg/javaConversions-2.10-ambiguity.check b/test/files/neg/javaConversions-2.10-ambiguity.check
new file mode 100644
index 0000000..c064a22
--- /dev/null
+++ b/test/files/neg/javaConversions-2.10-ambiguity.check
@@ -0,0 +1,6 @@
+javaConversions-2.10-ambiguity.scala:8: error: type mismatch;
+ found : scala.collection.concurrent.Map[String,String]
+ required: scala.collection.mutable.ConcurrentMap[String,String]
+ assertType[mutable.ConcurrentMap[String, String]](a)
+ ^
+one error found
diff --git a/test/files/neg/javaConversions-2.10-ambiguity.scala b/test/files/neg/javaConversions-2.10-ambiguity.scala
new file mode 100644
index 0000000..e856846
--- /dev/null
+++ b/test/files/neg/javaConversions-2.10-ambiguity.scala
@@ -0,0 +1,10 @@
+import collection.{JavaConversions, mutable, concurrent}
+import JavaConversions._
+import java.util.concurrent.{ConcurrentHashMap => CHM}
+
+object Bar {
+ def assertType[T](t: T) = t
+ val a = new CHM[String, String]() += (("", ""))
+ assertType[mutable.ConcurrentMap[String, String]](a)
+}
+// vim: set et:
diff --git a/test/files/neg/logImplicits.check b/test/files/neg/logImplicits.check
new file mode 100644
index 0000000..54afc6f
--- /dev/null
+++ b/test/files/neg/logImplicits.check
@@ -0,0 +1,19 @@
+logImplicits.scala:2: applied implicit conversion from xs.type to ?{def size: ?} = implicit def byteArrayOps(xs: Array[Byte]): scala.collection.mutable.ArrayOps[Byte]
+ def f(xs: Array[Byte]) = xs.size
+ ^
+logImplicits.scala:7: applied implicit conversion from String("abc") to ?{def map: ?} = implicit def augmentString(x: String): scala.collection.immutable.StringOps
+ def f = "abc" map (_ + 1)
+ ^
+logImplicits.scala:15: inferred view from String("abc") to Int = C.this.convert:(p: String("abc"))Int
+ math.max(122, x: Int)
+ ^
+logImplicits.scala:19: applied implicit conversion from Int(1) to ?{def ->: ?} = implicit def any2ArrowAssoc[A](x: A): ArrowAssoc[A]
+ def f = (1 -> 2) + "c"
+ ^
+logImplicits.scala:19: applied implicit conversion from (Int, Int) to ?{def +: ?} = implicit def any2stringadd(x: Any): scala.runtime.StringAdd
+ def f = (1 -> 2) + "c"
+ ^
+logImplicits.scala:22: error: class Un needs to be abstract, since method unimplemented is not defined
+class Un {
+ ^
+one error found
diff --git a/test/files/neg/logImplicits.flags b/test/files/neg/logImplicits.flags
new file mode 100644
index 0000000..97e5ae9
--- /dev/null
+++ b/test/files/neg/logImplicits.flags
@@ -0,0 +1 @@
+-Xlog-implicit-conversions
\ No newline at end of file
diff --git a/test/files/neg/logImplicits.scala b/test/files/neg/logImplicits.scala
new file mode 100644
index 0000000..fb5dd8a
--- /dev/null
+++ b/test/files/neg/logImplicits.scala
@@ -0,0 +1,25 @@
+class A {
+ def f(xs: Array[Byte]) = xs.size
+ def g(xs: Array[Byte]) = xs.length
+}
+
+class B {
+ def f = "abc" map (_ + 1)
+}
+
+object C {
+ final val x = "abc"
+
+ implicit def convert(p: x.type): Int = 123
+
+ math.max(122, x: Int)
+}
+
+class D {
+ def f = (1 -> 2) + "c"
+}
+
+class Un {
+ // forcing post-typer failure, since we're only interested in the output from the above
+ def unimplemented: Int
+}
\ No newline at end of file
diff --git a/test/files/neg/macro-abort.check b/test/files/neg/macro-abort.check
new file mode 100644
index 0000000..1e58add
--- /dev/null
+++ b/test/files/neg/macro-abort.check
@@ -0,0 +1,4 @@
+Test_2.scala:2: error: aborted
+ Macros.abort
+ ^
+one error found
diff --git a/test/files/neg/macro-abort/Macros_1.scala b/test/files/neg/macro-abort/Macros_1.scala
new file mode 100644
index 0000000..676c112
--- /dev/null
+++ b/test/files/neg/macro-abort/Macros_1.scala
@@ -0,0 +1,9 @@
+import scala.language.experimental.macros
+import scala.reflect.macros.Context
+
+object Macros {
+ def impl(c: Context) = {
+ c.abort(c.enclosingPosition, "aborted")
+ }
+ def abort = macro impl
+}
\ No newline at end of file
diff --git a/test/files/neg/macro-abort/Test_2.scala b/test/files/neg/macro-abort/Test_2.scala
new file mode 100644
index 0000000..1d0a7a2
--- /dev/null
+++ b/test/files/neg/macro-abort/Test_2.scala
@@ -0,0 +1,3 @@
+object Test extends App {
+ Macros.abort
+}
\ No newline at end of file
diff --git a/test/files/neg/macro-basic-mamdmi.check b/test/files/neg/macro-basic-mamdmi.check
new file mode 100644
index 0000000..c7b58d7
--- /dev/null
+++ b/test/files/neg/macro-basic-mamdmi.check
@@ -0,0 +1,4 @@
+Impls_Macros_Test_1.scala:36: error: macro implementation not found: foo (the most common reason for that is that you cannot use macro implementations in the same compilation run that defines them)
+ println(foo(2) + Macros.bar(2) * new Macros().quux(4))
+ ^
+one error found
diff --git a/test/files/neg/macro-basic-mamdmi.flags b/test/files/neg/macro-basic-mamdmi.flags
new file mode 100644
index 0000000..5e5dd6c
--- /dev/null
+++ b/test/files/neg/macro-basic-mamdmi.flags
@@ -0,0 +1 @@
+-language:experimental.macros
diff --git a/test/files/neg/macro-basic-mamdmi/Impls_Macros_Test_1.scala b/test/files/neg/macro-basic-mamdmi/Impls_Macros_Test_1.scala
new file mode 100644
index 0000000..908438c
--- /dev/null
+++ b/test/files/neg/macro-basic-mamdmi/Impls_Macros_Test_1.scala
@@ -0,0 +1,37 @@
+import scala.reflect.macros.{Context => Ctx}
+
+object Impls {
+ def foo(c: Ctx)(x: c.Expr[Int]): c.Expr[Int] = {
+ import c.universe._
+ val body = Apply(Select(x.tree, newTermName("$plus")), List(Literal(Constant(1))))
+ c.Expr[Int](body)
+ }
+
+ def bar(c: Ctx)(x: c.Expr[Int]): c.Expr[Int] = {
+ import c.universe._
+ val body = Apply(Select(x.tree, newTermName("$plus")), List(Literal(Constant(2))))
+ c.Expr[Int](body)
+ }
+
+ def quux(c: Ctx)(x: c.Expr[Int]): c.Expr[Int] = {
+ import c.universe._
+ val body = Apply(Select(x.tree, newTermName("$plus")), List(Literal(Constant(3))))
+ c.Expr[Int](body)
+ }
+}
+
+object Macros {
+ object Shmacros {
+ def foo(x: Int): Int = macro Impls.foo
+ }
+ def bar(x: Int): Int = macro Impls.bar
+}
+
+class Macros {
+ def quux(x: Int): Int = macro Impls.quux
+}
+
+object Test extends App {
+ import Macros.Shmacros._
+ println(foo(2) + Macros.bar(2) * new Macros().quux(4))
+}
\ No newline at end of file
diff --git a/test/files/neg/macro-cyclic.check b/test/files/neg/macro-cyclic.check
new file mode 100644
index 0000000..7978ec6
--- /dev/null
+++ b/test/files/neg/macro-cyclic.check
@@ -0,0 +1,4 @@
+Impls_Macros_1.scala:5: error: could not find implicit value for parameter e: SourceLocation
+ c.universe.reify { implicitly[SourceLocation] }
+ ^
+one error found
diff --git a/test/files/neg/macro-cyclic.flags b/test/files/neg/macro-cyclic.flags
new file mode 100644
index 0000000..cd66464
--- /dev/null
+++ b/test/files/neg/macro-cyclic.flags
@@ -0,0 +1 @@
+-language:experimental.macros
\ No newline at end of file
diff --git a/test/files/neg/macro-cyclic/Impls_Macros_1.scala b/test/files/neg/macro-cyclic/Impls_Macros_1.scala
new file mode 100644
index 0000000..ac9b793
--- /dev/null
+++ b/test/files/neg/macro-cyclic/Impls_Macros_1.scala
@@ -0,0 +1,25 @@
+import scala.reflect.macros.Context
+
+object Macros {
+ def impl(c: Context) = {
+ c.universe.reify { implicitly[SourceLocation] }
+ }
+
+ implicit def sourceLocation: SourceLocation1 = macro impl
+}
+
+trait SourceLocation {
+ /** Source location of the outermost call */
+ val outer: SourceLocation
+
+ /** The name of the source file */
+ val fileName: String
+
+ /** The line number */
+ val line: Int
+
+ /** The character offset */
+ val charOffset: Int
+}
+
+case class SourceLocation1(val outer: SourceLocation, val fileName: String, val line: Int, val charOffset: Int) extends SourceLocation
diff --git a/test/files/neg/macro-deprecate-idents.check b/test/files/neg/macro-deprecate-idents.check
new file mode 100644
index 0000000..22b667c
--- /dev/null
+++ b/test/files/neg/macro-deprecate-idents.check
@@ -0,0 +1,52 @@
+macro-deprecate-idents.scala:2: error: macro is now a reserved word; usage as an identifier is deprecated
+ val macro = ???
+ ^
+macro-deprecate-idents.scala:6: error: macro is now a reserved word; usage as an identifier is deprecated
+ var macro = ???
+ ^
+macro-deprecate-idents.scala:10: error: macro is now a reserved word; usage as an identifier is deprecated
+ type macro = Int
+ ^
+macro-deprecate-idents.scala:14: error: macro is now a reserved word; usage as an identifier is deprecated
+ class macro
+ ^
+macro-deprecate-idents.scala:18: error: macro is now a reserved word; usage as an identifier is deprecated
+ class macro
+ ^
+macro-deprecate-idents.scala:22: error: macro is now a reserved word; usage as an identifier is deprecated
+ object macro
+ ^
+macro-deprecate-idents.scala:26: error: macro is now a reserved word; usage as an identifier is deprecated
+ object macro
+ ^
+macro-deprecate-idents.scala:30: error: macro is now a reserved word; usage as an identifier is deprecated
+ trait macro
+ ^
+macro-deprecate-idents.scala:34: error: macro is now a reserved word; usage as an identifier is deprecated
+ trait macro
+ ^
+macro-deprecate-idents.scala:37: error: macro is now a reserved word; usage as an identifier is deprecated
+package macro {
+ ^
+macro-deprecate-idents.scala:38: error: macro is now a reserved word; usage as an identifier is deprecated
+ package macro.bar {
+ ^
+macro-deprecate-idents.scala:43: error: macro is now a reserved word; usage as an identifier is deprecated
+ package macro.foo {
+ ^
+macro-deprecate-idents.scala:48: error: macro is now a reserved word; usage as an identifier is deprecated
+ val Some(macro) = Some(42)
+ ^
+macro-deprecate-idents.scala:49: error: macro is now a reserved word; usage as an identifier is deprecated
+ macro match {
+ ^
+macro-deprecate-idents.scala:50: error: macro is now a reserved word; usage as an identifier is deprecated
+ case macro => println(macro)
+ ^
+macro-deprecate-idents.scala:50: error: macro is now a reserved word; usage as an identifier is deprecated
+ case macro => println(macro)
+ ^
+macro-deprecate-idents.scala:55: error: macro is now a reserved word; usage as an identifier is deprecated
+ def macro = 2
+ ^
+17 errors found
diff --git a/test/files/neg/macro-deprecate-idents.flags b/test/files/neg/macro-deprecate-idents.flags
new file mode 100644
index 0000000..c6bfaf1
--- /dev/null
+++ b/test/files/neg/macro-deprecate-idents.flags
@@ -0,0 +1 @@
+-deprecation -Xfatal-warnings
diff --git a/test/files/neg/macro-deprecate-idents.scala b/test/files/neg/macro-deprecate-idents.scala
new file mode 100644
index 0000000..23c398e
--- /dev/null
+++ b/test/files/neg/macro-deprecate-idents.scala
@@ -0,0 +1,56 @@
+object Test1 {
+ val macro = ???
+}
+
+object Test2 {
+ var macro = ???
+}
+
+object Test3 {
+ type macro = Int
+}
+
+package test4 {
+ class macro
+}
+
+object Test5 {
+ class macro
+}
+
+package test6 {
+ object macro
+}
+
+object Test7 {
+ object macro
+}
+
+package test8 {
+ trait macro
+}
+
+object Test9 {
+ trait macro
+}
+
+package macro {
+ package macro.bar {
+ }
+}
+
+package foo {
+ package macro.foo {
+ }
+}
+
+object Test12 {
+ val Some(macro) = Some(42)
+ macro match {
+ case macro => println(macro)
+ }
+}
+
+object Test13 {
+ def macro = 2
+}
\ No newline at end of file
diff --git a/test/files/neg/macro-divergence-controlled.check b/test/files/neg/macro-divergence-controlled.check
new file mode 100644
index 0000000..4876f7c
--- /dev/null
+++ b/test/files/neg/macro-divergence-controlled.check
@@ -0,0 +1,4 @@
+Test_2.scala:2: error: could not find implicit value for parameter e: Complex[Foo]
+ println(implicitly[Complex[Foo]])
+ ^
+one error found
diff --git a/test/files/neg/macro-divergence-controlled/Impls_Macros_1.scala b/test/files/neg/macro-divergence-controlled/Impls_Macros_1.scala
new file mode 100644
index 0000000..cdea310
--- /dev/null
+++ b/test/files/neg/macro-divergence-controlled/Impls_Macros_1.scala
@@ -0,0 +1,23 @@
+import scala.reflect.macros.Context
+import language.experimental.macros
+
+trait Complex[T]
+
+class Foo(val foo: Foo)
+
+object Complex {
+ def impl[T: c.WeakTypeTag](c: Context): c.Expr[Complex[T]] = {
+ import c.universe._
+ val tpe = weakTypeOf[T]
+ for (f <- tpe.declarations.collect{case f: TermSymbol if f.isParamAccessor && !f.isMethod => f}) {
+ val trecur = appliedType(typeOf[Complex[_]], List(f.typeSignature))
+ if (c.openImplicits.tail.exists(ic => ic._1 =:= trecur)) c.abort(c.enclosingPosition, "diverging implicit expansion. reported by a macro!")
+ val recur = c.inferImplicitValue(trecur, silent = true)
+ if (recur == EmptyTree) c.abort(c.enclosingPosition, s"couldn't synthesize $trecur")
+ }
+ c.literalNull
+ }
+
+ implicit object ComplexString extends Complex[String]
+ implicit def genComplex[T]: Complex[T] = macro impl[T]
+}
diff --git a/test/files/neg/macro-divergence-controlled/Test_2.scala b/test/files/neg/macro-divergence-controlled/Test_2.scala
new file mode 100644
index 0000000..dcc4593
--- /dev/null
+++ b/test/files/neg/macro-divergence-controlled/Test_2.scala
@@ -0,0 +1,3 @@
+object Test extends App {
+ println(implicitly[Complex[Foo]])
+}
\ No newline at end of file
diff --git a/test/files/neg/macro-exception.check b/test/files/neg/macro-exception.check
new file mode 100644
index 0000000..cee8b32
--- /dev/null
+++ b/test/files/neg/macro-exception.check
@@ -0,0 +1,7 @@
+Test_2.scala:2: error: exception during macro expansion:
+java.lang.Exception
+ at Macros$.impl(Macros_1.scala:6)
+
+ Macros.exception
+ ^
+one error found
diff --git a/test/files/neg/macro-exception/Macros_1.scala b/test/files/neg/macro-exception/Macros_1.scala
new file mode 100644
index 0000000..60e4020
--- /dev/null
+++ b/test/files/neg/macro-exception/Macros_1.scala
@@ -0,0 +1,9 @@
+import scala.language.experimental.macros
+import scala.reflect.macros.Context
+
+object Macros {
+ def impl(c: Context) = {
+ throw new Exception()
+ }
+ def exception = macro impl
+}
\ No newline at end of file
diff --git a/test/files/neg/macro-exception/Test_2.scala b/test/files/neg/macro-exception/Test_2.scala
new file mode 100644
index 0000000..d82b21f
--- /dev/null
+++ b/test/files/neg/macro-exception/Test_2.scala
@@ -0,0 +1,3 @@
+object Test extends App {
+ Macros.exception
+}
\ No newline at end of file
diff --git a/test/files/neg/macro-false-deprecation-warning.check b/test/files/neg/macro-false-deprecation-warning.check
new file mode 100644
index 0000000..7d56505
--- /dev/null
+++ b/test/files/neg/macro-false-deprecation-warning.check
@@ -0,0 +1,4 @@
+Impls_Macros_1.scala:5: error: illegal start of simple expression
+}
+^
+one error found
diff --git a/test/files/neg/macro-false-deprecation-warning.flags b/test/files/neg/macro-false-deprecation-warning.flags
new file mode 100644
index 0000000..59af162
--- /dev/null
+++ b/test/files/neg/macro-false-deprecation-warning.flags
@@ -0,0 +1 @@
+-language:experimental.macros -deprecation
\ No newline at end of file
diff --git a/test/files/neg/macro-false-deprecation-warning/Impls_Macros_1.scala b/test/files/neg/macro-false-deprecation-warning/Impls_Macros_1.scala
new file mode 100644
index 0000000..6dc2ea1
--- /dev/null
+++ b/test/files/neg/macro-false-deprecation-warning/Impls_Macros_1.scala
@@ -0,0 +1,15 @@
+import scala.reflect.macros.Context
+
+object Helper {
+ def unapplySeq[T](x: List[T]): Option[Seq[T]] =
+}
+
+object Macros {
+ def impl[T: c.WeakTypeTag](c: Context)(x: c.Expr[List[T]]) = {
+ c.universe.reify(Helper.unapplySeq(x.splice))
+ }
+
+ object UnapplyMacro {
+ def unapplySeq[T](x: List[T]): Option[Seq[T]] = macro impl[T]
+ }
+}
diff --git a/test/files/neg/macro-invalidimpl-a.check b/test/files/neg/macro-invalidimpl-a.check
new file mode 100644
index 0000000..7f11f3b
--- /dev/null
+++ b/test/files/neg/macro-invalidimpl-a.check
@@ -0,0 +1,4 @@
+Macros_Test_2.scala:3: error: macro implementation must be in statically accessible object
+ def foo(x: Any) = macro impls.foo
+ ^
+one error found
diff --git a/test/files/neg/macro-invalidimpl-a.flags b/test/files/neg/macro-invalidimpl-a.flags
new file mode 100644
index 0000000..cd66464
--- /dev/null
+++ b/test/files/neg/macro-invalidimpl-a.flags
@@ -0,0 +1 @@
+-language:experimental.macros
\ No newline at end of file
diff --git a/test/files/neg/macro-invalidimpl-a/Impls_1.scala b/test/files/neg/macro-invalidimpl-a/Impls_1.scala
new file mode 100644
index 0000000..cfa1218
--- /dev/null
+++ b/test/files/neg/macro-invalidimpl-a/Impls_1.scala
@@ -0,0 +1,5 @@
+import scala.reflect.macros.{Context => Ctx}
+
+class Impls {
+ def foo(c: Ctx)(x: c.Expr[Any]) = ???
+}
diff --git a/test/files/neg/macro-invalidimpl-a/Macros_Test_2.scala b/test/files/neg/macro-invalidimpl-a/Macros_Test_2.scala
new file mode 100644
index 0000000..2220dda
--- /dev/null
+++ b/test/files/neg/macro-invalidimpl-a/Macros_Test_2.scala
@@ -0,0 +1,9 @@
+object Macros {
+ val impls = new Impls
+ def foo(x: Any) = macro impls.foo
+}
+
+object Test extends App {
+ import Macros._
+ foo(42)
+}
\ No newline at end of file
diff --git a/test/files/neg/macro-invalidimpl-b.check b/test/files/neg/macro-invalidimpl-b.check
new file mode 100644
index 0000000..7f11f3b
--- /dev/null
+++ b/test/files/neg/macro-invalidimpl-b.check
@@ -0,0 +1,4 @@
+Macros_Test_2.scala:3: error: macro implementation must be in statically accessible object
+ def foo(x: Any) = macro impls.foo
+ ^
+one error found
diff --git a/test/files/neg/macro-invalidimpl-b.flags b/test/files/neg/macro-invalidimpl-b.flags
new file mode 100644
index 0000000..cd66464
--- /dev/null
+++ b/test/files/neg/macro-invalidimpl-b.flags
@@ -0,0 +1 @@
+-language:experimental.macros
\ No newline at end of file
diff --git a/test/files/neg/macro-invalidimpl-b/Impls_1.scala b/test/files/neg/macro-invalidimpl-b/Impls_1.scala
new file mode 100644
index 0000000..4467021
--- /dev/null
+++ b/test/files/neg/macro-invalidimpl-b/Impls_1.scala
@@ -0,0 +1,5 @@
+import scala.reflect.macros.{Context => Ctx}
+
+object Impls {
+ def foo(c: Ctx)(x: c.Expr[Any]) = ???
+}
diff --git a/test/files/neg/macro-invalidimpl-b/Macros_Test_2.scala b/test/files/neg/macro-invalidimpl-b/Macros_Test_2.scala
new file mode 100644
index 0000000..81e4083
--- /dev/null
+++ b/test/files/neg/macro-invalidimpl-b/Macros_Test_2.scala
@@ -0,0 +1,9 @@
+object Macros {
+ val impls = Impls
+ def foo(x: Any) = macro impls.foo
+}
+
+object Test extends App {
+ import Macros._
+ foo(42)
+}
\ No newline at end of file
diff --git a/test/files/neg/macro-invalidimpl-c.check b/test/files/neg/macro-invalidimpl-c.check
new file mode 100644
index 0000000..9e0181c
--- /dev/null
+++ b/test/files/neg/macro-invalidimpl-c.check
@@ -0,0 +1,4 @@
+Impls_Macros_1.scala:8: error: macro implementation must be in statically accessible object
+ def foo(x: Any) = macro Impls.foo
+ ^
+one error found
diff --git a/test/files/neg/macro-invalidimpl-c.flags b/test/files/neg/macro-invalidimpl-c.flags
new file mode 100644
index 0000000..cd66464
--- /dev/null
+++ b/test/files/neg/macro-invalidimpl-c.flags
@@ -0,0 +1 @@
+-language:experimental.macros
\ No newline at end of file
diff --git a/test/files/neg/macro-invalidimpl-c/Impls_Macros_1.scala b/test/files/neg/macro-invalidimpl-c/Impls_Macros_1.scala
new file mode 100644
index 0000000..67a0eb3
--- /dev/null
+++ b/test/files/neg/macro-invalidimpl-c/Impls_Macros_1.scala
@@ -0,0 +1,9 @@
+import scala.reflect.macros.{Context => Ctx}
+
+class Macros {
+ object Impls {
+ def foo(c: Ctx)(x: c.Expr[Any]) = ???
+ }
+
+ def foo(x: Any) = macro Impls.foo
+}
\ No newline at end of file
diff --git a/test/files/neg/macro-invalidimpl-c/Test_2.scala b/test/files/neg/macro-invalidimpl-c/Test_2.scala
new file mode 100644
index 0000000..e75a8ba
--- /dev/null
+++ b/test/files/neg/macro-invalidimpl-c/Test_2.scala
@@ -0,0 +1,3 @@
+object Test extends App {
+ new Macros().foo(42)
+}
\ No newline at end of file
diff --git a/test/files/neg/macro-invalidimpl-d.check b/test/files/neg/macro-invalidimpl-d.check
new file mode 100644
index 0000000..76a5ba9
--- /dev/null
+++ b/test/files/neg/macro-invalidimpl-d.check
@@ -0,0 +1,4 @@
+Macros_Test_2.scala:2: error: macro implementation must be in statically accessible object
+ def foo(x: Any) = macro Impls.foo
+ ^
+one error found
diff --git a/test/files/neg/macro-invalidimpl-d.flags b/test/files/neg/macro-invalidimpl-d.flags
new file mode 100644
index 0000000..cd66464
--- /dev/null
+++ b/test/files/neg/macro-invalidimpl-d.flags
@@ -0,0 +1 @@
+-language:experimental.macros
\ No newline at end of file
diff --git a/test/files/neg/macro-invalidimpl-d/Impls_1.scala b/test/files/neg/macro-invalidimpl-d/Impls_1.scala
new file mode 100644
index 0000000..e0819c9
--- /dev/null
+++ b/test/files/neg/macro-invalidimpl-d/Impls_1.scala
@@ -0,0 +1,7 @@
+import scala.reflect.macros.{Context => Ctx}
+
+trait MacroHelpers {
+ object Impls {
+ def foo(c: Ctx)(x: c.Expr[Any]) = x
+ }
+}
\ No newline at end of file
diff --git a/test/files/neg/macro-invalidimpl-d/Macros_Test_2.scala b/test/files/neg/macro-invalidimpl-d/Macros_Test_2.scala
new file mode 100644
index 0000000..067ab1d
--- /dev/null
+++ b/test/files/neg/macro-invalidimpl-d/Macros_Test_2.scala
@@ -0,0 +1,7 @@
+class Macros extends MacroHelpers {
+ def foo(x: Any) = macro Impls.foo
+}
+
+object Test extends App {
+ println(new Macros().foo(42))
+}
\ No newline at end of file
diff --git a/test/files/neg/macro-invalidimpl-e.check b/test/files/neg/macro-invalidimpl-e.check
new file mode 100644
index 0000000..e0910b2
--- /dev/null
+++ b/test/files/neg/macro-invalidimpl-e.check
@@ -0,0 +1,13 @@
+Macros_Test_2.scala:2: error: ambiguous reference to overloaded definition,
+both method foo in object Impls of type (c: scala.reflect.macros.Context)(x: c.Expr[Any], y: c.Expr[Any])Nothing
+and method foo in object Impls of type (c: scala.reflect.macros.Context)(x: c.Expr[Any])Nothing
+match expected type ?
+ def foo(x: Any) = macro Impls.foo
+ ^
+Macros_Test_2.scala:3: error: ambiguous reference to overloaded definition,
+both method foo in object Impls of type (c: scala.reflect.macros.Context)(x: c.Expr[Any], y: c.Expr[Any])Nothing
+and method foo in object Impls of type (c: scala.reflect.macros.Context)(x: c.Expr[Any])Nothing
+match expected type ?
+ def foo(x: Any, y: Any) = macro Impls.foo
+ ^
+two errors found
diff --git a/test/files/neg/macro-invalidimpl-e.flags b/test/files/neg/macro-invalidimpl-e.flags
new file mode 100644
index 0000000..cd66464
--- /dev/null
+++ b/test/files/neg/macro-invalidimpl-e.flags
@@ -0,0 +1 @@
+-language:experimental.macros
\ No newline at end of file
diff --git a/test/files/neg/macro-invalidimpl-e/Impls_1.scala b/test/files/neg/macro-invalidimpl-e/Impls_1.scala
new file mode 100644
index 0000000..fd40119
--- /dev/null
+++ b/test/files/neg/macro-invalidimpl-e/Impls_1.scala
@@ -0,0 +1,6 @@
+import scala.reflect.macros.{Context => Ctx}
+
+object Impls {
+ def foo(c: Ctx)(x: c.Expr[Any]) = ???
+ def foo(c: Ctx)(x: c.Expr[Any], y: c.Expr[Any]) = ???
+}
diff --git a/test/files/neg/macro-invalidimpl-e/Macros_Test_2.scala b/test/files/neg/macro-invalidimpl-e/Macros_Test_2.scala
new file mode 100644
index 0000000..6edde08
--- /dev/null
+++ b/test/files/neg/macro-invalidimpl-e/Macros_Test_2.scala
@@ -0,0 +1,9 @@
+object Macros {
+ def foo(x: Any) = macro Impls.foo
+ def foo(x: Any, y: Any) = macro Impls.foo
+}
+
+object Test extends App {
+ import Macros._
+ foo(42)
+}
\ No newline at end of file
diff --git a/test/files/neg/macro-invalidimpl-f.check b/test/files/neg/macro-invalidimpl-f.check
new file mode 100644
index 0000000..8820e05
--- /dev/null
+++ b/test/files/neg/macro-invalidimpl-f.check
@@ -0,0 +1,7 @@
+Macros_Test_2.scala:2: error: macro implementation has incompatible shape:
+ required: (c: scala.reflect.macros.Context)(): c.Expr[Unit]
+ found : (c: scala.reflect.macros.Context): c.Expr[Unit]
+number of parameter sections differ
+ def bar1() = macro Impls.fooNullary
+ ^
+one error found
diff --git a/test/files/neg/macro-invalidimpl-f.flags b/test/files/neg/macro-invalidimpl-f.flags
new file mode 100644
index 0000000..cd66464
--- /dev/null
+++ b/test/files/neg/macro-invalidimpl-f.flags
@@ -0,0 +1 @@
+-language:experimental.macros
\ No newline at end of file
diff --git a/test/files/neg/macro-invalidimpl-f/Impls_1.scala b/test/files/neg/macro-invalidimpl-f/Impls_1.scala
new file mode 100644
index 0000000..334ee71
--- /dev/null
+++ b/test/files/neg/macro-invalidimpl-f/Impls_1.scala
@@ -0,0 +1,11 @@
+import scala.reflect.macros.{Context => Ctx}
+
+object Impls {
+ def fooNullary(c: Ctx) = {
+ import c.universe._
+ val body = Apply(Select(Ident(definitions.PredefModule), newTermName("println")), List(Literal(Constant("it works"))))
+ c.Expr[Unit](body)
+ }
+
+ def fooEmpty(c: Ctx)() = fooNullary(c)
+}
\ No newline at end of file
diff --git a/test/files/neg/macro-invalidimpl-f/Macros_Test_2.scala b/test/files/neg/macro-invalidimpl-f/Macros_Test_2.scala
new file mode 100644
index 0000000..493edf1
--- /dev/null
+++ b/test/files/neg/macro-invalidimpl-f/Macros_Test_2.scala
@@ -0,0 +1,9 @@
+object Macros {
+ def bar1() = macro Impls.fooNullary
+}
+
+object Test extends App {
+ Macros.bar1
+ Macros.bar1()
+ println("kkthxbai")
+}
\ No newline at end of file
diff --git a/test/files/neg/macro-invalidimpl-g.check b/test/files/neg/macro-invalidimpl-g.check
new file mode 100644
index 0000000..c063803
--- /dev/null
+++ b/test/files/neg/macro-invalidimpl-g.check
@@ -0,0 +1,7 @@
+Macros_Test_2.scala:2: error: macro implementation has incompatible shape:
+ required: (c: scala.reflect.macros.Context): c.Expr[Unit]
+ found : (c: scala.reflect.macros.Context)(): c.Expr[Unit]
+number of parameter sections differ
+ def foo1 = macro Impls.fooEmpty
+ ^
+one error found
diff --git a/test/files/neg/macro-invalidimpl-g.flags b/test/files/neg/macro-invalidimpl-g.flags
new file mode 100644
index 0000000..cd66464
--- /dev/null
+++ b/test/files/neg/macro-invalidimpl-g.flags
@@ -0,0 +1 @@
+-language:experimental.macros
\ No newline at end of file
diff --git a/test/files/neg/macro-invalidimpl-g/Impls_1.scala b/test/files/neg/macro-invalidimpl-g/Impls_1.scala
new file mode 100644
index 0000000..334ee71
--- /dev/null
+++ b/test/files/neg/macro-invalidimpl-g/Impls_1.scala
@@ -0,0 +1,11 @@
+import scala.reflect.macros.{Context => Ctx}
+
+object Impls {
+ def fooNullary(c: Ctx) = {
+ import c.universe._
+ val body = Apply(Select(Ident(definitions.PredefModule), newTermName("println")), List(Literal(Constant("it works"))))
+ c.Expr[Unit](body)
+ }
+
+ def fooEmpty(c: Ctx)() = fooNullary(c)
+}
\ No newline at end of file
diff --git a/test/files/neg/macro-invalidimpl-g/Macros_Test_2.scala b/test/files/neg/macro-invalidimpl-g/Macros_Test_2.scala
new file mode 100644
index 0000000..5561db9
--- /dev/null
+++ b/test/files/neg/macro-invalidimpl-g/Macros_Test_2.scala
@@ -0,0 +1,8 @@
+object Macros {
+ def foo1 = macro Impls.fooEmpty
+}
+
+object Test extends App {
+ Macros.foo1
+ println("kkthxbai")
+}
\ No newline at end of file
diff --git a/test/files/neg/macro-invalidimpl-h.check b/test/files/neg/macro-invalidimpl-h.check
new file mode 100644
index 0000000..ea76e1a
--- /dev/null
+++ b/test/files/neg/macro-invalidimpl-h.check
@@ -0,0 +1,4 @@
+Macros_Test_2.scala:2: error: type arguments [String] do not conform to method foo's type parameter bounds [U <: Int]
+ def foo = macro Impls.foo[String]
+ ^
+one error found
diff --git a/test/files/neg/macro-invalidimpl-h.flags b/test/files/neg/macro-invalidimpl-h.flags
new file mode 100644
index 0000000..cd66464
--- /dev/null
+++ b/test/files/neg/macro-invalidimpl-h.flags
@@ -0,0 +1 @@
+-language:experimental.macros
\ No newline at end of file
diff --git a/test/files/neg/macro-invalidimpl-h/Impls_1.scala b/test/files/neg/macro-invalidimpl-h/Impls_1.scala
new file mode 100644
index 0000000..427fd3d
--- /dev/null
+++ b/test/files/neg/macro-invalidimpl-h/Impls_1.scala
@@ -0,0 +1,5 @@
+import scala.reflect.macros.{Context => Ctx}
+
+object Impls {
+ def foo[U <: Int](c: Ctx) = ???
+}
diff --git a/test/files/neg/macro-invalidimpl-h/Macros_Test_2.scala b/test/files/neg/macro-invalidimpl-h/Macros_Test_2.scala
new file mode 100644
index 0000000..218c7ae
--- /dev/null
+++ b/test/files/neg/macro-invalidimpl-h/Macros_Test_2.scala
@@ -0,0 +1,8 @@
+object Macros {
+ def foo = macro Impls.foo[String]
+}
+
+object Test extends App {
+ import Macros._
+ foo
+}
\ No newline at end of file
diff --git a/test/files/neg/macro-invalidimpl-i.check b/test/files/neg/macro-invalidimpl-i.check
new file mode 100644
index 0000000..846ed8d
--- /dev/null
+++ b/test/files/neg/macro-invalidimpl-i.check
@@ -0,0 +1,4 @@
+Macros_Test_2.scala:4: error: macro implementation must be public
+ def foo = macro Impls.impl
+ ^
+one error found
diff --git a/test/files/neg/macro-invalidimpl-i.flags b/test/files/neg/macro-invalidimpl-i.flags
new file mode 100644
index 0000000..cd66464
--- /dev/null
+++ b/test/files/neg/macro-invalidimpl-i.flags
@@ -0,0 +1 @@
+-language:experimental.macros
\ No newline at end of file
diff --git a/test/files/neg/macro-invalidimpl-i/Impls_1.scala b/test/files/neg/macro-invalidimpl-i/Impls_1.scala
new file mode 100644
index 0000000..c35d8ab
--- /dev/null
+++ b/test/files/neg/macro-invalidimpl-i/Impls_1.scala
@@ -0,0 +1,7 @@
+package foo
+
+import scala.reflect.macros.Context
+
+object Impls {
+ private[foo] def impl(c: Context) = ???
+}
\ No newline at end of file
diff --git a/test/files/neg/macro-invalidimpl-i/Macros_Test_2.scala b/test/files/neg/macro-invalidimpl-i/Macros_Test_2.scala
new file mode 100644
index 0000000..fb129c7
--- /dev/null
+++ b/test/files/neg/macro-invalidimpl-i/Macros_Test_2.scala
@@ -0,0 +1,5 @@
+package foo
+
+object Test extends App {
+ def foo = macro Impls.impl
+}
diff --git a/test/files/neg/macro-invalidret-nontree.check b/test/files/neg/macro-invalidret-nontree.check
new file mode 100644
index 0000000..74e6f33
--- /dev/null
+++ b/test/files/neg/macro-invalidret-nontree.check
@@ -0,0 +1,7 @@
+Macros_Test_2.scala:2: error: macro implementation has incompatible shape:
+ required: (c: scala.reflect.macros.Context): c.Expr[Any]
+ found : (c: scala.reflect.macros.Context): Int
+type mismatch for return type: Int does not conform to c.Expr[Any]
+ def foo = macro Impls.foo
+ ^
+one error found
diff --git a/test/files/neg/macro-invalidret-nontree.flags b/test/files/neg/macro-invalidret-nontree.flags
new file mode 100644
index 0000000..cd66464
--- /dev/null
+++ b/test/files/neg/macro-invalidret-nontree.flags
@@ -0,0 +1 @@
+-language:experimental.macros
\ No newline at end of file
diff --git a/test/files/neg/macro-invalidret-nontree/Impls_1.scala b/test/files/neg/macro-invalidret-nontree/Impls_1.scala
new file mode 100644
index 0000000..ef19b1b
--- /dev/null
+++ b/test/files/neg/macro-invalidret-nontree/Impls_1.scala
@@ -0,0 +1,5 @@
+import scala.reflect.macros.{Context => Ctx}
+
+object Impls {
+ def foo(c: Ctx) = 2
+}
diff --git a/test/files/neg/macro-invalidret-nontree/Macros_Test_2.scala b/test/files/neg/macro-invalidret-nontree/Macros_Test_2.scala
new file mode 100644
index 0000000..96a8de2
--- /dev/null
+++ b/test/files/neg/macro-invalidret-nontree/Macros_Test_2.scala
@@ -0,0 +1,8 @@
+object Macros {
+ def foo = macro Impls.foo
+}
+
+object Test extends App {
+ import Macros._
+ foo
+}
\ No newline at end of file
diff --git a/test/files/neg/macro-invalidret-nonuniversetree.check b/test/files/neg/macro-invalidret-nonuniversetree.check
new file mode 100644
index 0000000..81c4114
--- /dev/null
+++ b/test/files/neg/macro-invalidret-nonuniversetree.check
@@ -0,0 +1,7 @@
+Macros_Test_2.scala:2: error: macro implementation has incompatible shape:
+ required: (c: scala.reflect.macros.Context): c.Expr[Any]
+ found : (c: scala.reflect.macros.Context): reflect.runtime.universe.Literal
+type mismatch for return type: reflect.runtime.universe.Literal does not conform to c.Expr[Any]
+ def foo = macro Impls.foo
+ ^
+one error found
diff --git a/test/files/neg/macro-invalidret-nonuniversetree.flags b/test/files/neg/macro-invalidret-nonuniversetree.flags
new file mode 100644
index 0000000..cd66464
--- /dev/null
+++ b/test/files/neg/macro-invalidret-nonuniversetree.flags
@@ -0,0 +1 @@
+-language:experimental.macros
\ No newline at end of file
diff --git a/test/files/neg/macro-invalidret-nonuniversetree/Impls_1.scala b/test/files/neg/macro-invalidret-nonuniversetree/Impls_1.scala
new file mode 100644
index 0000000..f98376a
--- /dev/null
+++ b/test/files/neg/macro-invalidret-nonuniversetree/Impls_1.scala
@@ -0,0 +1,6 @@
+import scala.reflect.macros.{Context => Ctx}
+import scala.reflect.runtime.{universe => ru}
+
+object Impls {
+ def foo(c: Ctx) = ru.Literal(ru.Constant(42))
+}
diff --git a/test/files/neg/macro-invalidret-nonuniversetree/Macros_Test_2.scala b/test/files/neg/macro-invalidret-nonuniversetree/Macros_Test_2.scala
new file mode 100644
index 0000000..96a8de2
--- /dev/null
+++ b/test/files/neg/macro-invalidret-nonuniversetree/Macros_Test_2.scala
@@ -0,0 +1,8 @@
+object Macros {
+ def foo = macro Impls.foo
+}
+
+object Test extends App {
+ import Macros._
+ foo
+}
\ No newline at end of file
diff --git a/test/files/neg/macro-invalidshape-a.check b/test/files/neg/macro-invalidshape-a.check
new file mode 100644
index 0000000..f38a908
--- /dev/null
+++ b/test/files/neg/macro-invalidshape-a.check
@@ -0,0 +1,5 @@
+Macros_Test_2.scala:2: error: macro body has wrong shape:
+ required: macro [<implementation object>].<method name>[[<type args>]]
+ def foo(x: Any) = macro 2
+ ^
+one error found
diff --git a/test/files/neg/macro-invalidshape-a.flags b/test/files/neg/macro-invalidshape-a.flags
new file mode 100644
index 0000000..cd66464
--- /dev/null
+++ b/test/files/neg/macro-invalidshape-a.flags
@@ -0,0 +1 @@
+-language:experimental.macros
\ No newline at end of file
diff --git a/test/files/neg/macro-invalidshape-a/Impls_1.scala b/test/files/neg/macro-invalidshape-a/Impls_1.scala
new file mode 100644
index 0000000..4467021
--- /dev/null
+++ b/test/files/neg/macro-invalidshape-a/Impls_1.scala
@@ -0,0 +1,5 @@
+import scala.reflect.macros.{Context => Ctx}
+
+object Impls {
+ def foo(c: Ctx)(x: c.Expr[Any]) = ???
+}
diff --git a/test/files/neg/macro-invalidshape-a/Macros_Test_2.scala b/test/files/neg/macro-invalidshape-a/Macros_Test_2.scala
new file mode 100644
index 0000000..ffff17d
--- /dev/null
+++ b/test/files/neg/macro-invalidshape-a/Macros_Test_2.scala
@@ -0,0 +1,8 @@
+object Macros {
+ def foo(x: Any) = macro 2
+}
+
+object Test extends App {
+ import Macros._
+ foo(42)
+}
\ No newline at end of file
diff --git a/test/files/neg/macro-invalidshape-b.check b/test/files/neg/macro-invalidshape-b.check
new file mode 100644
index 0000000..976685c
--- /dev/null
+++ b/test/files/neg/macro-invalidshape-b.check
@@ -0,0 +1,5 @@
+Macros_Test_2.scala:2: error: macro body has wrong shape:
+ required: macro [<implementation object>].<method name>[[<type args>]]
+ def foo(x: Any) = macro Impls.foo(null)(null)
+ ^
+one error found
diff --git a/test/files/neg/macro-invalidshape-b.flags b/test/files/neg/macro-invalidshape-b.flags
new file mode 100644
index 0000000..cd66464
--- /dev/null
+++ b/test/files/neg/macro-invalidshape-b.flags
@@ -0,0 +1 @@
+-language:experimental.macros
\ No newline at end of file
diff --git a/test/files/neg/macro-invalidshape-b/Impls_1.scala b/test/files/neg/macro-invalidshape-b/Impls_1.scala
new file mode 100644
index 0000000..4467021
--- /dev/null
+++ b/test/files/neg/macro-invalidshape-b/Impls_1.scala
@@ -0,0 +1,5 @@
+import scala.reflect.macros.{Context => Ctx}
+
+object Impls {
+ def foo(c: Ctx)(x: c.Expr[Any]) = ???
+}
diff --git a/test/files/neg/macro-invalidshape-b/Macros_Test_2.scala b/test/files/neg/macro-invalidshape-b/Macros_Test_2.scala
new file mode 100644
index 0000000..b67cd32
--- /dev/null
+++ b/test/files/neg/macro-invalidshape-b/Macros_Test_2.scala
@@ -0,0 +1,8 @@
+object Macros {
+ def foo(x: Any) = macro Impls.foo(null)(null)
+}
+
+object Test extends App {
+ import Macros._
+ foo(42)
+}
\ No newline at end of file
diff --git a/test/files/neg/macro-invalidshape-c.check b/test/files/neg/macro-invalidshape-c.check
new file mode 100644
index 0000000..0b2e9cf
--- /dev/null
+++ b/test/files/neg/macro-invalidshape-c.check
@@ -0,0 +1,9 @@
+Macros_Test_2.scala:2: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses
+ def foo(x: Any) = macro {2; Impls.foo}
+ ^
+Macros_Test_2.scala:2: error: missing arguments for method foo in object Impls;
+follow this method with `_' if you want to treat it as a partially applied function
+ def foo(x: Any) = macro {2; Impls.foo}
+ ^
+one warning found
+one error found
diff --git a/test/files/neg/macro-invalidshape-c.flags b/test/files/neg/macro-invalidshape-c.flags
new file mode 100644
index 0000000..cd66464
--- /dev/null
+++ b/test/files/neg/macro-invalidshape-c.flags
@@ -0,0 +1 @@
+-language:experimental.macros
\ No newline at end of file
diff --git a/test/files/neg/macro-invalidshape-c/Impls_1.scala b/test/files/neg/macro-invalidshape-c/Impls_1.scala
new file mode 100644
index 0000000..4467021
--- /dev/null
+++ b/test/files/neg/macro-invalidshape-c/Impls_1.scala
@@ -0,0 +1,5 @@
+import scala.reflect.macros.{Context => Ctx}
+
+object Impls {
+ def foo(c: Ctx)(x: c.Expr[Any]) = ???
+}
diff --git a/test/files/neg/macro-invalidshape-c/Macros_Test_2.scala b/test/files/neg/macro-invalidshape-c/Macros_Test_2.scala
new file mode 100644
index 0000000..552c371
--- /dev/null
+++ b/test/files/neg/macro-invalidshape-c/Macros_Test_2.scala
@@ -0,0 +1,8 @@
+object Macros {
+ def foo(x: Any) = macro {2; Impls.foo}
+}
+
+object Test extends App {
+ import Macros._
+ foo(42)
+}
\ No newline at end of file
diff --git a/test/files/neg/macro-invalidshape-d.check b/test/files/neg/macro-invalidshape-d.check
new file mode 100644
index 0000000..e43a2ca
--- /dev/null
+++ b/test/files/neg/macro-invalidshape-d.check
@@ -0,0 +1,8 @@
+Macros_Test_2.scala:2: warning: macro is now a reserved word; usage as an identifier is deprecated
+ def foo(x: Any) = {2; macro Impls.foo}
+ ^
+Macros_Test_2.scala:2: error: ';' expected but '.' found.
+ def foo(x: Any) = {2; macro Impls.foo}
+ ^
+one warning found
+one error found
diff --git a/test/files/neg/macro-invalidshape-d.flags b/test/files/neg/macro-invalidshape-d.flags
new file mode 100644
index 0000000..83b7265
--- /dev/null
+++ b/test/files/neg/macro-invalidshape-d.flags
@@ -0,0 +1 @@
+-deprecation -language:experimental.macros
diff --git a/test/files/neg/macro-invalidshape-d/Impls_1.scala b/test/files/neg/macro-invalidshape-d/Impls_1.scala
new file mode 100644
index 0000000..4467021
--- /dev/null
+++ b/test/files/neg/macro-invalidshape-d/Impls_1.scala
@@ -0,0 +1,5 @@
+import scala.reflect.macros.{Context => Ctx}
+
+object Impls {
+ def foo(c: Ctx)(x: c.Expr[Any]) = ???
+}
diff --git a/test/files/neg/macro-invalidshape-d/Macros_Test_2.scala b/test/files/neg/macro-invalidshape-d/Macros_Test_2.scala
new file mode 100644
index 0000000..bacd9a6
--- /dev/null
+++ b/test/files/neg/macro-invalidshape-d/Macros_Test_2.scala
@@ -0,0 +1,8 @@
+object Macros {
+ def foo(x: Any) = {2; macro Impls.foo}
+}
+
+object Test extends App {
+ import Macros._
+ foo(42)
+}
\ No newline at end of file
diff --git a/test/files/neg/macro-invalidsig-context-bounds.check b/test/files/neg/macro-invalidsig-context-bounds.check
new file mode 100644
index 0000000..cbb2b06
--- /dev/null
+++ b/test/files/neg/macro-invalidsig-context-bounds.check
@@ -0,0 +1,4 @@
+Macros_Test_1.scala:2: error: macro implementations cannot have implicit parameters other than WeakTypeTag evidences
+ def foo[U] = macro Impls.foo[U]
+ ^
+one error found
diff --git a/test/files/neg/macro-invalidsig-context-bounds.flags b/test/files/neg/macro-invalidsig-context-bounds.flags
new file mode 100644
index 0000000..cd66464
--- /dev/null
+++ b/test/files/neg/macro-invalidsig-context-bounds.flags
@@ -0,0 +1 @@
+-language:experimental.macros
\ No newline at end of file
diff --git a/test/files/neg/macro-invalidsig-context-bounds/Impls_1.scala b/test/files/neg/macro-invalidsig-context-bounds/Impls_1.scala
new file mode 100644
index 0000000..c066c48
--- /dev/null
+++ b/test/files/neg/macro-invalidsig-context-bounds/Impls_1.scala
@@ -0,0 +1,9 @@
+import scala.reflect.runtime.universe._
+import scala.reflect.macros.{Context => Ctx}
+
+object Impls {
+ def foo[U: c.WeakTypeTag: Numeric](c: Ctx) = {
+ import c.universe._
+ Literal(Constant(42))
+ }
+}
\ No newline at end of file
diff --git a/test/files/neg/macro-invalidsig-context-bounds/Macros_Test_1.scala b/test/files/neg/macro-invalidsig-context-bounds/Macros_Test_1.scala
new file mode 100644
index 0000000..5b4602f
--- /dev/null
+++ b/test/files/neg/macro-invalidsig-context-bounds/Macros_Test_1.scala
@@ -0,0 +1,8 @@
+object Macros {
+ def foo[U] = macro Impls.foo[U]
+}
+
+object Test extends App {
+ import Macros._
+ println(foo[String])
+}
\ No newline at end of file
diff --git a/test/files/neg/macro-invalidsig-ctx-badargc.check b/test/files/neg/macro-invalidsig-ctx-badargc.check
new file mode 100644
index 0000000..7e8bcba
--- /dev/null
+++ b/test/files/neg/macro-invalidsig-ctx-badargc.check
@@ -0,0 +1,7 @@
+Macros_Test_2.scala:2: error: macro implementation has incompatible shape:
+ required: (c: scala.reflect.macros.Context): c.Expr[Any]
+ found : : Nothing
+number of parameter sections differ
+ def foo = macro Impls.foo
+ ^
+one error found
diff --git a/test/files/neg/macro-invalidsig-ctx-badargc.flags b/test/files/neg/macro-invalidsig-ctx-badargc.flags
new file mode 100644
index 0000000..cd66464
--- /dev/null
+++ b/test/files/neg/macro-invalidsig-ctx-badargc.flags
@@ -0,0 +1 @@
+-language:experimental.macros
\ No newline at end of file
diff --git a/test/files/neg/macro-invalidsig-ctx-badargc/Impls_1.scala b/test/files/neg/macro-invalidsig-ctx-badargc/Impls_1.scala
new file mode 100644
index 0000000..4d5d291
--- /dev/null
+++ b/test/files/neg/macro-invalidsig-ctx-badargc/Impls_1.scala
@@ -0,0 +1,3 @@
+object Impls {
+ def foo = ???
+}
diff --git a/test/files/neg/macro-invalidsig-ctx-badargc/Macros_Test_2.scala b/test/files/neg/macro-invalidsig-ctx-badargc/Macros_Test_2.scala
new file mode 100644
index 0000000..96a8de2
--- /dev/null
+++ b/test/files/neg/macro-invalidsig-ctx-badargc/Macros_Test_2.scala
@@ -0,0 +1,8 @@
+object Macros {
+ def foo = macro Impls.foo
+}
+
+object Test extends App {
+ import Macros._
+ foo
+}
\ No newline at end of file
diff --git a/test/files/neg/macro-invalidsig-ctx-badtype.check b/test/files/neg/macro-invalidsig-ctx-badtype.check
new file mode 100644
index 0000000..837ec3e
--- /dev/null
+++ b/test/files/neg/macro-invalidsig-ctx-badtype.check
@@ -0,0 +1,7 @@
+Macros_Test_2.scala:2: error: macro implementation has incompatible shape:
+ required: (c: scala.reflect.macros.Context): c.Expr[Any]
+ found : (c: scala.reflect.api.Universe): Nothing
+type mismatch for parameter c: scala.reflect.macros.Context does not conform to scala.reflect.api.Universe
+ def foo = macro Impls.foo
+ ^
+one error found
diff --git a/test/files/neg/macro-invalidsig-ctx-badtype.flags b/test/files/neg/macro-invalidsig-ctx-badtype.flags
new file mode 100644
index 0000000..cd66464
--- /dev/null
+++ b/test/files/neg/macro-invalidsig-ctx-badtype.flags
@@ -0,0 +1 @@
+-language:experimental.macros
\ No newline at end of file
diff --git a/test/files/neg/macro-invalidsig-ctx-badtype/Impls_1.scala b/test/files/neg/macro-invalidsig-ctx-badtype/Impls_1.scala
new file mode 100644
index 0000000..cf1a4cf
--- /dev/null
+++ b/test/files/neg/macro-invalidsig-ctx-badtype/Impls_1.scala
@@ -0,0 +1,5 @@
+import scala.reflect.api.{Universe => Ctx}
+
+object Impls {
+ def foo(c: Ctx) = ???
+}
diff --git a/test/files/neg/macro-invalidsig-ctx-badtype/Macros_Test_2.scala b/test/files/neg/macro-invalidsig-ctx-badtype/Macros_Test_2.scala
new file mode 100644
index 0000000..96a8de2
--- /dev/null
+++ b/test/files/neg/macro-invalidsig-ctx-badtype/Macros_Test_2.scala
@@ -0,0 +1,8 @@
+object Macros {
+ def foo = macro Impls.foo
+}
+
+object Test extends App {
+ import Macros._
+ foo
+}
\ No newline at end of file
diff --git a/test/files/neg/macro-invalidsig-ctx-badvarargs.check b/test/files/neg/macro-invalidsig-ctx-badvarargs.check
new file mode 100644
index 0000000..a96421a
--- /dev/null
+++ b/test/files/neg/macro-invalidsig-ctx-badvarargs.check
@@ -0,0 +1,7 @@
+Macros_Test_2.scala:2: error: macro implementation has incompatible shape:
+ required: (c: scala.reflect.macros.Context): c.Expr[Any]
+ found : (cs: scala.reflect.macros.Context*): Nothing
+types incompatible for parameter cs: corresponding is not a vararg parameter
+ def foo = macro Impls.foo
+ ^
+one error found
diff --git a/test/files/neg/macro-invalidsig-ctx-badvarargs.flags b/test/files/neg/macro-invalidsig-ctx-badvarargs.flags
new file mode 100644
index 0000000..cd66464
--- /dev/null
+++ b/test/files/neg/macro-invalidsig-ctx-badvarargs.flags
@@ -0,0 +1 @@
+-language:experimental.macros
\ No newline at end of file
diff --git a/test/files/neg/macro-invalidsig-ctx-badvarargs/Impls_1.scala b/test/files/neg/macro-invalidsig-ctx-badvarargs/Impls_1.scala
new file mode 100644
index 0000000..c4ed8be
--- /dev/null
+++ b/test/files/neg/macro-invalidsig-ctx-badvarargs/Impls_1.scala
@@ -0,0 +1,5 @@
+import scala.reflect.macros.{Context => Ctx}
+
+object Impls {
+ def foo(cs: Ctx*) = ???
+}
diff --git a/test/files/neg/macro-invalidsig-ctx-badvarargs/Macros_Test_2.scala b/test/files/neg/macro-invalidsig-ctx-badvarargs/Macros_Test_2.scala
new file mode 100644
index 0000000..96a8de2
--- /dev/null
+++ b/test/files/neg/macro-invalidsig-ctx-badvarargs/Macros_Test_2.scala
@@ -0,0 +1,8 @@
+object Macros {
+ def foo = macro Impls.foo
+}
+
+object Test extends App {
+ import Macros._
+ foo
+}
\ No newline at end of file
diff --git a/test/files/neg/macro-invalidsig-ctx-noctx.check b/test/files/neg/macro-invalidsig-ctx-noctx.check
new file mode 100644
index 0000000..fd3632e
--- /dev/null
+++ b/test/files/neg/macro-invalidsig-ctx-noctx.check
@@ -0,0 +1,7 @@
+Macros_Test_2.scala:2: error: macro implementation has incompatible shape:
+ required: (c: scala.reflect.macros.Context)(x: c.Expr[Any]): c.Expr[Any]
+ found : (c: scala.reflect.macros.Context): Nothing
+number of parameter sections differ
+ def foo(x: Any) = macro Impls.foo
+ ^
+one error found
diff --git a/test/files/neg/macro-invalidsig-ctx-noctx.flags b/test/files/neg/macro-invalidsig-ctx-noctx.flags
new file mode 100644
index 0000000..cd66464
--- /dev/null
+++ b/test/files/neg/macro-invalidsig-ctx-noctx.flags
@@ -0,0 +1 @@
+-language:experimental.macros
\ No newline at end of file
diff --git a/test/files/neg/macro-invalidsig-ctx-noctx/Impls_1.scala b/test/files/neg/macro-invalidsig-ctx-noctx/Impls_1.scala
new file mode 100644
index 0000000..6904cfb
--- /dev/null
+++ b/test/files/neg/macro-invalidsig-ctx-noctx/Impls_1.scala
@@ -0,0 +1,5 @@
+import scala.reflect.macros.{Context => Ctx}
+
+object Impls {
+ def foo(c: Ctx) = ???
+}
diff --git a/test/files/neg/macro-invalidsig-ctx-noctx/Macros_Test_2.scala b/test/files/neg/macro-invalidsig-ctx-noctx/Macros_Test_2.scala
new file mode 100644
index 0000000..e053cf9
--- /dev/null
+++ b/test/files/neg/macro-invalidsig-ctx-noctx/Macros_Test_2.scala
@@ -0,0 +1,8 @@
+object Macros {
+ def foo(x: Any) = macro Impls.foo
+}
+
+object Test extends App {
+ import Macros._
+ foo(42)
+}
\ No newline at end of file
diff --git a/test/files/neg/macro-invalidsig-implicit-params.check b/test/files/neg/macro-invalidsig-implicit-params.check
new file mode 100644
index 0000000..900098f
--- /dev/null
+++ b/test/files/neg/macro-invalidsig-implicit-params.check
@@ -0,0 +1,4 @@
+Impls_Macros_1.scala:18: error: macro implementations cannot have implicit parameters other than WeakTypeTag evidences
+ def foo_targs[U](x: Int) = macro Impls.foo_targs[T, U]
+ ^
+one error found
diff --git a/test/files/neg/macro-invalidsig-implicit-params.flags b/test/files/neg/macro-invalidsig-implicit-params.flags
new file mode 100644
index 0000000..cd66464
--- /dev/null
+++ b/test/files/neg/macro-invalidsig-implicit-params.flags
@@ -0,0 +1 @@
+-language:experimental.macros
\ No newline at end of file
diff --git a/test/files/neg/macro-invalidsig-implicit-params/Impls_Macros_1.scala b/test/files/neg/macro-invalidsig-implicit-params/Impls_Macros_1.scala
new file mode 100644
index 0000000..7a72934
--- /dev/null
+++ b/test/files/neg/macro-invalidsig-implicit-params/Impls_Macros_1.scala
@@ -0,0 +1,19 @@
+import scala.reflect.runtime.universe._
+import scala.reflect.macros.{Context => Ctx}
+
+object Impls {
+ def foo_targs[T, U: c.WeakTypeTag](c: Ctx)(implicit x: c.Expr[Int]) = {
+ import c.{prefix => prefix}
+ import c.universe._
+ val body = Block(List(
+ Apply(Select(Ident(definitions.PredefModule), newTermName("println")), List(Literal(Constant("invoking foo_targs...")))),
+ Apply(Select(Ident(definitions.PredefModule), newTermName("println")), List(Literal(Constant("type of prefix is: " + prefix.staticType)))),
+ Apply(Select(Ident(definitions.PredefModule), newTermName("println")), List(Literal(Constant("U is: " + implicitly[c.WeakTypeTag[U]].tpe))))),
+ Literal(Constant(())))
+ c.Expr[Unit](body)
+ }
+}
+
+class Macros[T] {
+ def foo_targs[U](x: Int) = macro Impls.foo_targs[T, U]
+}
\ No newline at end of file
diff --git a/test/files/neg/macro-invalidsig-implicit-params/Test_2.scala b/test/files/neg/macro-invalidsig-implicit-params/Test_2.scala
new file mode 100644
index 0000000..90e850d
--- /dev/null
+++ b/test/files/neg/macro-invalidsig-implicit-params/Test_2.scala
@@ -0,0 +1,4 @@
+object Test extends App {
+ println("foo_targs:")
+ new Macros[Int]().foo_targs[String](42)
+}
\ No newline at end of file
diff --git a/test/files/neg/macro-invalidsig-params-badargc.check b/test/files/neg/macro-invalidsig-params-badargc.check
new file mode 100644
index 0000000..bb26b24
--- /dev/null
+++ b/test/files/neg/macro-invalidsig-params-badargc.check
@@ -0,0 +1,7 @@
+Impls_Macros_1.scala:8: error: macro implementation has incompatible shape:
+ required: (c: scala.reflect.macros.Context)(x: c.Expr[Int]): c.Expr[Any]
+ found : (c: scala.reflect.macros.Context)(x: c.Expr[Int], y: c.Expr[Int]): Nothing
+parameter lists have different length, found extra parameter y: c.Expr[Int]
+ def foo(x: Int) = macro Impls.foo
+ ^
+one error found
diff --git a/test/files/neg/macro-invalidsig-params-badargc.flags b/test/files/neg/macro-invalidsig-params-badargc.flags
new file mode 100644
index 0000000..cd66464
--- /dev/null
+++ b/test/files/neg/macro-invalidsig-params-badargc.flags
@@ -0,0 +1 @@
+-language:experimental.macros
\ No newline at end of file
diff --git a/test/files/neg/macro-invalidsig-params-badargc/Impls_Macros_1.scala b/test/files/neg/macro-invalidsig-params-badargc/Impls_Macros_1.scala
new file mode 100644
index 0000000..ae16612
--- /dev/null
+++ b/test/files/neg/macro-invalidsig-params-badargc/Impls_Macros_1.scala
@@ -0,0 +1,9 @@
+import scala.reflect.macros.{Context => Ctx}
+
+object Impls {
+ def foo(c: Ctx)(x: c.Expr[Int], y: c.Expr[Int]) = ???
+}
+
+object Macros {
+ def foo(x: Int) = macro Impls.foo
+}
diff --git a/test/files/neg/macro-invalidsig-params-badargc/Test_2.scala b/test/files/neg/macro-invalidsig-params-badargc/Test_2.scala
new file mode 100644
index 0000000..cbd6232
--- /dev/null
+++ b/test/files/neg/macro-invalidsig-params-badargc/Test_2.scala
@@ -0,0 +1,4 @@
+object Test extends App {
+ import Macros._
+ foo(42)
+}
\ No newline at end of file
diff --git a/test/files/neg/macro-invalidsig-params-badtype.check b/test/files/neg/macro-invalidsig-params-badtype.check
new file mode 100644
index 0000000..8227614
--- /dev/null
+++ b/test/files/neg/macro-invalidsig-params-badtype.check
@@ -0,0 +1,7 @@
+Impls_Macros_1.scala:8: error: macro implementation has incompatible shape:
+ required: (c: scala.reflect.macros.Context)(x: c.Expr[Int]): c.Expr[Any]
+ found : (c: scala.reflect.macros.Context)(x: c.universe.Tree): Nothing
+type mismatch for parameter x: c.Expr[Int] does not conform to c.universe.Tree
+ def foo(x: Int) = macro Impls.foo
+ ^
+one error found
diff --git a/test/files/neg/macro-invalidsig-params-badtype.flags b/test/files/neg/macro-invalidsig-params-badtype.flags
new file mode 100644
index 0000000..cd66464
--- /dev/null
+++ b/test/files/neg/macro-invalidsig-params-badtype.flags
@@ -0,0 +1 @@
+-language:experimental.macros
\ No newline at end of file
diff --git a/test/files/neg/macro-invalidsig-params-badtype/Impls_Macros_1.scala b/test/files/neg/macro-invalidsig-params-badtype/Impls_Macros_1.scala
new file mode 100644
index 0000000..ab90b85
--- /dev/null
+++ b/test/files/neg/macro-invalidsig-params-badtype/Impls_Macros_1.scala
@@ -0,0 +1,9 @@
+import scala.reflect.macros.{Context => Ctx}
+
+object Impls {
+ def foo(c: Ctx)(x: c.universe.Tree) = ???
+}
+
+object Macros {
+ def foo(x: Int) = macro Impls.foo
+}
diff --git a/test/files/neg/macro-invalidsig-params-badtype/Test_2.scala b/test/files/neg/macro-invalidsig-params-badtype/Test_2.scala
new file mode 100644
index 0000000..cbd6232
--- /dev/null
+++ b/test/files/neg/macro-invalidsig-params-badtype/Test_2.scala
@@ -0,0 +1,4 @@
+object Test extends App {
+ import Macros._
+ foo(42)
+}
\ No newline at end of file
diff --git a/test/files/neg/macro-invalidsig-params-badvarargs.check b/test/files/neg/macro-invalidsig-params-badvarargs.check
new file mode 100644
index 0000000..cb4d2d9
--- /dev/null
+++ b/test/files/neg/macro-invalidsig-params-badvarargs.check
@@ -0,0 +1,7 @@
+Impls_Macros_1.scala:8: error: macro implementation has incompatible shape:
+ required: (c: scala.reflect.macros.Context)(x: c.Expr[Int], y: c.Expr[Int]): c.Expr[Any]
+ found : (c: scala.reflect.macros.Context)(xs: c.Expr[Int]*): Nothing
+parameter lists have different length, required extra parameter y: c.Expr[Int]
+ def foo(x: Int, y: Int) = macro Impls.foo
+ ^
+one error found
diff --git a/test/files/neg/macro-invalidsig-params-badvarargs.flags b/test/files/neg/macro-invalidsig-params-badvarargs.flags
new file mode 100644
index 0000000..cd66464
--- /dev/null
+++ b/test/files/neg/macro-invalidsig-params-badvarargs.flags
@@ -0,0 +1 @@
+-language:experimental.macros
\ No newline at end of file
diff --git a/test/files/neg/macro-invalidsig-params-badvarargs/Impls_Macros_1.scala b/test/files/neg/macro-invalidsig-params-badvarargs/Impls_Macros_1.scala
new file mode 100644
index 0000000..b4c75ad
--- /dev/null
+++ b/test/files/neg/macro-invalidsig-params-badvarargs/Impls_Macros_1.scala
@@ -0,0 +1,9 @@
+import scala.reflect.macros.{Context => Ctx}
+
+object Impls {
+ def foo(c: Ctx)(xs: c.Expr[Int]*) = ???
+}
+
+object Macros {
+ def foo(x: Int, y: Int) = macro Impls.foo
+}
diff --git a/test/files/neg/macro-invalidsig-params-badvarargs/Test_2.scala b/test/files/neg/macro-invalidsig-params-badvarargs/Test_2.scala
new file mode 100644
index 0000000..fa50ac4
--- /dev/null
+++ b/test/files/neg/macro-invalidsig-params-badvarargs/Test_2.scala
@@ -0,0 +1,4 @@
+object Test extends App {
+ import Macros._
+ foo(42, 100)
+}
\ No newline at end of file
diff --git a/test/files/neg/macro-invalidsig-params-namemismatch.check b/test/files/neg/macro-invalidsig-params-namemismatch.check
new file mode 100644
index 0000000..82612a9
--- /dev/null
+++ b/test/files/neg/macro-invalidsig-params-namemismatch.check
@@ -0,0 +1,7 @@
+Impls_Macros_1.scala:8: error: macro implementation has incompatible shape:
+ required: (c: scala.reflect.macros.Context)(x: c.Expr[Int], y: c.Expr[Int]): c.Expr[Any]
+ found : (c: scala.reflect.macros.Context)(y: c.Expr[Int], x: c.Expr[Int]): Nothing
+parameter names differ: x != y
+ def foo(x: Int, y: Int) = macro Impls.foo
+ ^
+one error found
diff --git a/test/files/neg/macro-invalidsig-params-namemismatch.flags b/test/files/neg/macro-invalidsig-params-namemismatch.flags
new file mode 100644
index 0000000..cd66464
--- /dev/null
+++ b/test/files/neg/macro-invalidsig-params-namemismatch.flags
@@ -0,0 +1 @@
+-language:experimental.macros
\ No newline at end of file
diff --git a/test/files/neg/macro-invalidsig-params-namemismatch/Impls_Macros_1.scala b/test/files/neg/macro-invalidsig-params-namemismatch/Impls_Macros_1.scala
new file mode 100644
index 0000000..c7cf0b0
--- /dev/null
+++ b/test/files/neg/macro-invalidsig-params-namemismatch/Impls_Macros_1.scala
@@ -0,0 +1,9 @@
+import scala.reflect.macros.{Context => Ctx}
+
+object Impls {
+ def foo(c: Ctx)(y: c.Expr[Int], x: c.Expr[Int]) = ???
+}
+
+object Macros {
+ def foo(x: Int, y: Int) = macro Impls.foo
+}
diff --git a/test/files/neg/macro-invalidsig-params-namemismatch/Test_2.scala b/test/files/neg/macro-invalidsig-params-namemismatch/Test_2.scala
new file mode 100644
index 0000000..fa50ac4
--- /dev/null
+++ b/test/files/neg/macro-invalidsig-params-namemismatch/Test_2.scala
@@ -0,0 +1,4 @@
+object Test extends App {
+ import Macros._
+ foo(42, 100)
+}
\ No newline at end of file
diff --git a/test/files/neg/macro-invalidsig-tparams-badtype.check b/test/files/neg/macro-invalidsig-tparams-badtype.check
new file mode 100644
index 0000000..273d011
--- /dev/null
+++ b/test/files/neg/macro-invalidsig-tparams-badtype.check
@@ -0,0 +1,7 @@
+Macros_Test_2.scala:2: error: macro implementation has incompatible shape:
+ required: (c: scala.reflect.macros.Context): c.Expr[Any]
+ found : (c: scala.reflect.macros.Context)(U: c.universe.Type): Nothing
+number of parameter sections differ
+ def foo[U] = macro Impls.foo[U]
+ ^
+one error found
diff --git a/test/files/neg/macro-invalidsig-tparams-badtype.flags b/test/files/neg/macro-invalidsig-tparams-badtype.flags
new file mode 100644
index 0000000..cd66464
--- /dev/null
+++ b/test/files/neg/macro-invalidsig-tparams-badtype.flags
@@ -0,0 +1 @@
+-language:experimental.macros
\ No newline at end of file
diff --git a/test/files/neg/macro-invalidsig-tparams-badtype/Impls_1.scala b/test/files/neg/macro-invalidsig-tparams-badtype/Impls_1.scala
new file mode 100644
index 0000000..dbeca17
--- /dev/null
+++ b/test/files/neg/macro-invalidsig-tparams-badtype/Impls_1.scala
@@ -0,0 +1,5 @@
+import scala.reflect.macros.{Context => Ctx}
+
+object Impls {
+ def foo[U](c: Ctx)(U: c.universe.Type) = ???
+}
diff --git a/test/files/neg/macro-invalidsig-tparams-badtype/Macros_Test_2.scala b/test/files/neg/macro-invalidsig-tparams-badtype/Macros_Test_2.scala
new file mode 100644
index 0000000..a82e813
--- /dev/null
+++ b/test/files/neg/macro-invalidsig-tparams-badtype/Macros_Test_2.scala
@@ -0,0 +1,8 @@
+object Macros {
+ def foo[U] = macro Impls.foo[U]
+}
+
+object Test extends App {
+ import Macros._
+ foo[Int]
+}
\ No newline at end of file
diff --git a/test/files/neg/macro-invalidsig-tparams-bounds-a.check b/test/files/neg/macro-invalidsig-tparams-bounds-a.check
new file mode 100644
index 0000000..b6248a1
--- /dev/null
+++ b/test/files/neg/macro-invalidsig-tparams-bounds-a.check
@@ -0,0 +1,4 @@
+Macros_Test_2.scala:2: error: type arguments [U] do not conform to method foo's type parameter bounds [U <: String]
+ def foo[U] = macro Impls.foo[U]
+ ^
+one error found
diff --git a/test/files/neg/macro-invalidsig-tparams-bounds-a.flags b/test/files/neg/macro-invalidsig-tparams-bounds-a.flags
new file mode 100644
index 0000000..cd66464
--- /dev/null
+++ b/test/files/neg/macro-invalidsig-tparams-bounds-a.flags
@@ -0,0 +1 @@
+-language:experimental.macros
\ No newline at end of file
diff --git a/test/files/neg/macro-invalidsig-tparams-bounds-a/Impls_1.scala b/test/files/neg/macro-invalidsig-tparams-bounds-a/Impls_1.scala
new file mode 100644
index 0000000..89020de
--- /dev/null
+++ b/test/files/neg/macro-invalidsig-tparams-bounds-a/Impls_1.scala
@@ -0,0 +1,5 @@
+import scala.reflect.macros.{Context => Ctx}
+
+object Impls {
+ def foo[U <: String](c: Ctx) = ???
+}
diff --git a/test/files/neg/macro-invalidsig-tparams-bounds-a/Macros_Test_2.scala b/test/files/neg/macro-invalidsig-tparams-bounds-a/Macros_Test_2.scala
new file mode 100644
index 0000000..a82e813
--- /dev/null
+++ b/test/files/neg/macro-invalidsig-tparams-bounds-a/Macros_Test_2.scala
@@ -0,0 +1,8 @@
+object Macros {
+ def foo[U] = macro Impls.foo[U]
+}
+
+object Test extends App {
+ import Macros._
+ foo[Int]
+}
\ No newline at end of file
diff --git a/test/files/neg/macro-invalidsig-tparams-bounds-b.check b/test/files/neg/macro-invalidsig-tparams-bounds-b.check
new file mode 100644
index 0000000..74eb522
--- /dev/null
+++ b/test/files/neg/macro-invalidsig-tparams-bounds-b.check
@@ -0,0 +1,4 @@
+Macros_Test_2.scala:2: error: type arguments [U] do not conform to method foo's type parameter bounds [U <: String]
+ def foo[U <: Int] = macro Impls.foo[U]
+ ^
+one error found
diff --git a/test/files/neg/macro-invalidsig-tparams-bounds-b.flags b/test/files/neg/macro-invalidsig-tparams-bounds-b.flags
new file mode 100644
index 0000000..cd66464
--- /dev/null
+++ b/test/files/neg/macro-invalidsig-tparams-bounds-b.flags
@@ -0,0 +1 @@
+-language:experimental.macros
\ No newline at end of file
diff --git a/test/files/neg/macro-invalidsig-tparams-bounds-b/Impls_1.scala b/test/files/neg/macro-invalidsig-tparams-bounds-b/Impls_1.scala
new file mode 100644
index 0000000..89020de
--- /dev/null
+++ b/test/files/neg/macro-invalidsig-tparams-bounds-b/Impls_1.scala
@@ -0,0 +1,5 @@
+import scala.reflect.macros.{Context => Ctx}
+
+object Impls {
+ def foo[U <: String](c: Ctx) = ???
+}
diff --git a/test/files/neg/macro-invalidsig-tparams-bounds-b/Macros_Test_2.scala b/test/files/neg/macro-invalidsig-tparams-bounds-b/Macros_Test_2.scala
new file mode 100644
index 0000000..eed6369
--- /dev/null
+++ b/test/files/neg/macro-invalidsig-tparams-bounds-b/Macros_Test_2.scala
@@ -0,0 +1,8 @@
+object Macros {
+ def foo[U <: Int] = macro Impls.foo[U]
+}
+
+object Test extends App {
+ import Macros._
+ foo[Int]
+}
\ No newline at end of file
diff --git a/test/files/neg/macro-invalidsig-tparams-notparams-a.check b/test/files/neg/macro-invalidsig-tparams-notparams-a.check
new file mode 100644
index 0000000..c731259
--- /dev/null
+++ b/test/files/neg/macro-invalidsig-tparams-notparams-a.check
@@ -0,0 +1,4 @@
+Macros_Test_2.scala:2: error: macro implementation reference has too few type arguments for method foo: [U](c: scala.reflect.macros.Context)(implicit evidence$1: c.WeakTypeTag[U])Nothing
+ def foo = macro Impls.foo
+ ^
+one error found
diff --git a/test/files/neg/macro-invalidsig-tparams-notparams-a.flags b/test/files/neg/macro-invalidsig-tparams-notparams-a.flags
new file mode 100644
index 0000000..cd66464
--- /dev/null
+++ b/test/files/neg/macro-invalidsig-tparams-notparams-a.flags
@@ -0,0 +1 @@
+-language:experimental.macros
\ No newline at end of file
diff --git a/test/files/neg/macro-invalidsig-tparams-notparams-a/Impls_1.scala b/test/files/neg/macro-invalidsig-tparams-notparams-a/Impls_1.scala
new file mode 100644
index 0000000..f8b3c92
--- /dev/null
+++ b/test/files/neg/macro-invalidsig-tparams-notparams-a/Impls_1.scala
@@ -0,0 +1,6 @@
+import scala.reflect.runtime.universe._
+import scala.reflect.macros.{Context => Ctx}
+
+object Impls {
+ def foo[U: c.WeakTypeTag](c: Ctx) = ???
+}
\ No newline at end of file
diff --git a/test/files/neg/macro-invalidsig-tparams-notparams-a/Macros_Test_2.scala b/test/files/neg/macro-invalidsig-tparams-notparams-a/Macros_Test_2.scala
new file mode 100644
index 0000000..96a8de2
--- /dev/null
+++ b/test/files/neg/macro-invalidsig-tparams-notparams-a/Macros_Test_2.scala
@@ -0,0 +1,8 @@
+object Macros {
+ def foo = macro Impls.foo
+}
+
+object Test extends App {
+ import Macros._
+ foo
+}
\ No newline at end of file
diff --git a/test/files/neg/macro-invalidsig-tparams-notparams-b.check b/test/files/neg/macro-invalidsig-tparams-notparams-b.check
new file mode 100644
index 0000000..e3d4505
--- /dev/null
+++ b/test/files/neg/macro-invalidsig-tparams-notparams-b.check
@@ -0,0 +1,4 @@
+Macros_Test_2.scala:3: error: macro implementation reference has too few type arguments for method foo: [T, U, V](c: scala.reflect.macros.Context)(implicit evidence$1: c.WeakTypeTag[T], implicit evidence$2: c.WeakTypeTag[U], implicit V: c.WeakTypeTag[V])c.Expr[Unit]
+ def foo[V] = macro Impls.foo
+ ^
+one error found
diff --git a/test/files/neg/macro-invalidsig-tparams-notparams-b.flags b/test/files/neg/macro-invalidsig-tparams-notparams-b.flags
new file mode 100644
index 0000000..cd66464
--- /dev/null
+++ b/test/files/neg/macro-invalidsig-tparams-notparams-b.flags
@@ -0,0 +1 @@
+-language:experimental.macros
\ No newline at end of file
diff --git a/test/files/neg/macro-invalidsig-tparams-notparams-b/Impls_1.scala b/test/files/neg/macro-invalidsig-tparams-notparams-b/Impls_1.scala
new file mode 100644
index 0000000..baf3aab
--- /dev/null
+++ b/test/files/neg/macro-invalidsig-tparams-notparams-b/Impls_1.scala
@@ -0,0 +1,11 @@
+import scala.reflect.runtime.universe._
+import scala.reflect.macros.{Context => Ctx}
+
+object Impls {
+ def foo[T: c.WeakTypeTag, U: c.WeakTypeTag, V](c: Ctx)(implicit V: c.WeakTypeTag[V]): c.Expr[Unit] = {
+ println(implicitly[c.WeakTypeTag[T]])
+ println(implicitly[c.WeakTypeTag[U]])
+ println(V)
+ c.literalUnit
+ }
+}
\ No newline at end of file
diff --git a/test/files/neg/macro-invalidsig-tparams-notparams-b/Macros_Test_2.scala b/test/files/neg/macro-invalidsig-tparams-notparams-b/Macros_Test_2.scala
new file mode 100644
index 0000000..7d02bf6
--- /dev/null
+++ b/test/files/neg/macro-invalidsig-tparams-notparams-b/Macros_Test_2.scala
@@ -0,0 +1,11 @@
+class D[T] {
+ class C[U] {
+ def foo[V] = macro Impls.foo
+ }
+}
+
+object Test extends App {
+ val outer1 = new D[Int]
+ val outer2 = new outer1.C[String]
+ outer2.foo[Boolean]
+}
\ No newline at end of file
diff --git a/test/files/neg/macro-invalidsig-tparams-notparams-c.check b/test/files/neg/macro-invalidsig-tparams-notparams-c.check
new file mode 100644
index 0000000..0be0b6f
--- /dev/null
+++ b/test/files/neg/macro-invalidsig-tparams-notparams-c.check
@@ -0,0 +1,4 @@
+Macros_Test_2.scala:3: error: wrong number of type parameters for method foo: [T, U, V](c: scala.reflect.macros.Context)(implicit evidence$1: c.WeakTypeTag[T], implicit evidence$2: c.WeakTypeTag[U], implicit V: c.WeakTypeTag[V])c.Expr[Unit]
+ def foo[V] = macro Impls.foo[V]
+ ^
+one error found
diff --git a/test/files/neg/macro-invalidsig-tparams-notparams-c.flags b/test/files/neg/macro-invalidsig-tparams-notparams-c.flags
new file mode 100644
index 0000000..cd66464
--- /dev/null
+++ b/test/files/neg/macro-invalidsig-tparams-notparams-c.flags
@@ -0,0 +1 @@
+-language:experimental.macros
\ No newline at end of file
diff --git a/test/files/neg/macro-invalidsig-tparams-notparams-c/Impls_1.scala b/test/files/neg/macro-invalidsig-tparams-notparams-c/Impls_1.scala
new file mode 100644
index 0000000..44b4ed6
--- /dev/null
+++ b/test/files/neg/macro-invalidsig-tparams-notparams-c/Impls_1.scala
@@ -0,0 +1,12 @@
+import scala.reflect.runtime.universe._
+import scala.reflect.macros.{Context => Ctx}
+
+object Impls {
+ def foo[T: c.WeakTypeTag, U: c.WeakTypeTag, V](c: Ctx)(implicit V: c.WeakTypeTag[V]): c.Expr[Unit] = {
+ import c.universe._
+ println(implicitly[c.WeakTypeTag[T]])
+ println(implicitly[c.WeakTypeTag[U]])
+ println(V)
+ c.literalUnit
+ }
+}
\ No newline at end of file
diff --git a/test/files/neg/macro-invalidsig-tparams-notparams-c/Macros_Test_2.scala b/test/files/neg/macro-invalidsig-tparams-notparams-c/Macros_Test_2.scala
new file mode 100644
index 0000000..109e142
--- /dev/null
+++ b/test/files/neg/macro-invalidsig-tparams-notparams-c/Macros_Test_2.scala
@@ -0,0 +1,11 @@
+class D[T] {
+ class C[U] {
+ def foo[V] = macro Impls.foo[V]
+ }
+}
+
+object Test extends App {
+ val outer1 = new D[Int]
+ val outer2 = new outer1.C[String]
+ outer2.foo[Boolean]
+}
\ No newline at end of file
diff --git a/test/files/neg/macro-invalidusage-badargs.check b/test/files/neg/macro-invalidusage-badargs.check
new file mode 100644
index 0000000..294cfd0
--- /dev/null
+++ b/test/files/neg/macro-invalidusage-badargs.check
@@ -0,0 +1,6 @@
+Macros_Test_2.scala:7: error: type mismatch;
+ found : String("42")
+ required: Int
+ val s: String = foo("42")
+ ^
+one error found
diff --git a/test/files/neg/macro-invalidusage-badargs.flags b/test/files/neg/macro-invalidusage-badargs.flags
new file mode 100644
index 0000000..cd66464
--- /dev/null
+++ b/test/files/neg/macro-invalidusage-badargs.flags
@@ -0,0 +1 @@
+-language:experimental.macros
\ No newline at end of file
diff --git a/test/files/neg/macro-invalidusage-badargs/Impls_1.scala b/test/files/neg/macro-invalidusage-badargs/Impls_1.scala
new file mode 100644
index 0000000..52c9f9c
--- /dev/null
+++ b/test/files/neg/macro-invalidusage-badargs/Impls_1.scala
@@ -0,0 +1,5 @@
+import scala.reflect.macros.{Context => Ctx}
+
+object Impls {
+ def foo(c: Ctx)(x: c.Expr[Int]) = x
+}
diff --git a/test/files/neg/macro-invalidusage-badargs/Macros_Test_2.scala b/test/files/neg/macro-invalidusage-badargs/Macros_Test_2.scala
new file mode 100644
index 0000000..a6af1bb
--- /dev/null
+++ b/test/files/neg/macro-invalidusage-badargs/Macros_Test_2.scala
@@ -0,0 +1,8 @@
+object Macros {
+ def foo(x: Int) = macro Impls.foo
+}
+
+object Test extends App {
+ import Macros._
+ val s: String = foo("42")
+}
\ No newline at end of file
diff --git a/test/files/neg/macro-invalidusage-badbounds-a.check b/test/files/neg/macro-invalidusage-badbounds-a.check
new file mode 100644
index 0000000..277f407
--- /dev/null
+++ b/test/files/neg/macro-invalidusage-badbounds-a.check
@@ -0,0 +1,4 @@
+Macros_Test_2.scala:7: error: type arguments [Int] do not conform to macro method foo's type parameter bounds [U <: String]
+ foo[Int]
+ ^
+one error found
diff --git a/test/files/neg/macro-invalidusage-badbounds-a.flags b/test/files/neg/macro-invalidusage-badbounds-a.flags
new file mode 100644
index 0000000..cd66464
--- /dev/null
+++ b/test/files/neg/macro-invalidusage-badbounds-a.flags
@@ -0,0 +1 @@
+-language:experimental.macros
\ No newline at end of file
diff --git a/test/files/neg/macro-invalidusage-badbounds-a/Impls_1.scala b/test/files/neg/macro-invalidusage-badbounds-a/Impls_1.scala
new file mode 100644
index 0000000..6ee71a3
--- /dev/null
+++ b/test/files/neg/macro-invalidusage-badbounds-a/Impls_1.scala
@@ -0,0 +1,5 @@
+import scala.reflect.macros.{Context => Ctx}
+
+object Impls {
+ def foo[U <: String](c: Ctx) = c.literalUnit
+}
diff --git a/test/files/neg/macro-invalidusage-badbounds-a/Macros_Test_2.scala b/test/files/neg/macro-invalidusage-badbounds-a/Macros_Test_2.scala
new file mode 100644
index 0000000..3139599
--- /dev/null
+++ b/test/files/neg/macro-invalidusage-badbounds-a/Macros_Test_2.scala
@@ -0,0 +1,8 @@
+object Macros {
+ def foo[U <: String] = macro Impls.foo[U]
+}
+
+object Test extends App {
+ import Macros._
+ foo[Int]
+}
\ No newline at end of file
diff --git a/test/files/neg/macro-invalidusage-badtargs.check b/test/files/neg/macro-invalidusage-badtargs.check
new file mode 100644
index 0000000..73801ab
--- /dev/null
+++ b/test/files/neg/macro-invalidusage-badtargs.check
@@ -0,0 +1,4 @@
+Macros_Test_2.scala:7: error: macro method foo: (x: Int)Int does not take type parameters.
+ val s: String = foo[String](42)
+ ^
+one error found
diff --git a/test/files/neg/macro-invalidusage-badtargs.flags b/test/files/neg/macro-invalidusage-badtargs.flags
new file mode 100644
index 0000000..cd66464
--- /dev/null
+++ b/test/files/neg/macro-invalidusage-badtargs.flags
@@ -0,0 +1 @@
+-language:experimental.macros
\ No newline at end of file
diff --git a/test/files/neg/macro-invalidusage-badtargs/Impls_1.scala b/test/files/neg/macro-invalidusage-badtargs/Impls_1.scala
new file mode 100644
index 0000000..52c9f9c
--- /dev/null
+++ b/test/files/neg/macro-invalidusage-badtargs/Impls_1.scala
@@ -0,0 +1,5 @@
+import scala.reflect.macros.{Context => Ctx}
+
+object Impls {
+ def foo(c: Ctx)(x: c.Expr[Int]) = x
+}
diff --git a/test/files/neg/macro-invalidusage-badtargs/Macros_Test_2.scala b/test/files/neg/macro-invalidusage-badtargs/Macros_Test_2.scala
new file mode 100644
index 0000000..c54093b
--- /dev/null
+++ b/test/files/neg/macro-invalidusage-badtargs/Macros_Test_2.scala
@@ -0,0 +1,8 @@
+object Macros {
+ def foo(x: Int) = macro Impls.foo
+}
+
+object Test extends App {
+ import Macros._
+ val s: String = foo[String](42)
+}
\ No newline at end of file
diff --git a/test/files/neg/macro-invalidusage-methodvaluesyntax.check b/test/files/neg/macro-invalidusage-methodvaluesyntax.check
new file mode 100644
index 0000000..10046b2
--- /dev/null
+++ b/test/files/neg/macro-invalidusage-methodvaluesyntax.check
@@ -0,0 +1,4 @@
+Macros_Test_2.scala:6: error: macros cannot be eta-expanded
+ val firstClassFoo = Macros.foo _
+ ^
+one error found
diff --git a/test/files/neg/macro-invalidusage-methodvaluesyntax.flags b/test/files/neg/macro-invalidusage-methodvaluesyntax.flags
new file mode 100644
index 0000000..cd66464
--- /dev/null
+++ b/test/files/neg/macro-invalidusage-methodvaluesyntax.flags
@@ -0,0 +1 @@
+-language:experimental.macros
\ No newline at end of file
diff --git a/test/files/neg/macro-invalidusage-methodvaluesyntax/Impls_1.scala b/test/files/neg/macro-invalidusage-methodvaluesyntax/Impls_1.scala
new file mode 100644
index 0000000..8d7fdf3
--- /dev/null
+++ b/test/files/neg/macro-invalidusage-methodvaluesyntax/Impls_1.scala
@@ -0,0 +1,9 @@
+import scala.reflect.macros.{Context => Ctx}
+
+object Impls {
+ def foo(c: Ctx) = {
+ import c.universe._
+ val body = Apply(Select(Ident(definitions.PredefModule), newTermName("println")), List(Literal(Constant("it works"))))
+ c.Expr[Unit](body)
+ }
+}
\ No newline at end of file
diff --git a/test/files/neg/macro-invalidusage-methodvaluesyntax/Macros_Test_2.scala b/test/files/neg/macro-invalidusage-methodvaluesyntax/Macros_Test_2.scala
new file mode 100644
index 0000000..343cec9
--- /dev/null
+++ b/test/files/neg/macro-invalidusage-methodvaluesyntax/Macros_Test_2.scala
@@ -0,0 +1,8 @@
+object Macros {
+ def foo = macro Impls.foo
+}
+
+object Test extends App {
+ val firstClassFoo = Macros.foo _
+ firstClassFoo
+}
\ No newline at end of file
diff --git a/test/files/neg/macro-noexpand.check b/test/files/neg/macro-noexpand.check
new file mode 100644
index 0000000..2c176a9
--- /dev/null
+++ b/test/files/neg/macro-noexpand.check
@@ -0,0 +1,4 @@
+Macros_Test_2.scala:7: error: not found: value x
+ foo(x)
+ ^
+one error found
diff --git a/test/files/neg/macro-noexpand.flags b/test/files/neg/macro-noexpand.flags
new file mode 100644
index 0000000..cd66464
--- /dev/null
+++ b/test/files/neg/macro-noexpand.flags
@@ -0,0 +1 @@
+-language:experimental.macros
\ No newline at end of file
diff --git a/test/files/neg/macro-noexpand/Impls_1.scala b/test/files/neg/macro-noexpand/Impls_1.scala
new file mode 100644
index 0000000..4467021
--- /dev/null
+++ b/test/files/neg/macro-noexpand/Impls_1.scala
@@ -0,0 +1,5 @@
+import scala.reflect.macros.{Context => Ctx}
+
+object Impls {
+ def foo(c: Ctx)(x: c.Expr[Any]) = ???
+}
diff --git a/test/files/neg/macro-noexpand/Macros_Test_2.scala b/test/files/neg/macro-noexpand/Macros_Test_2.scala
new file mode 100644
index 0000000..e783e2b
--- /dev/null
+++ b/test/files/neg/macro-noexpand/Macros_Test_2.scala
@@ -0,0 +1,8 @@
+object Macros {
+ def foo(x: Any) = macro Impls.foo
+}
+
+object Test extends App {
+ import Macros._
+ foo(x)
+}
\ No newline at end of file
diff --git a/test/files/neg/macro-nontypeablebody.check b/test/files/neg/macro-nontypeablebody.check
new file mode 100644
index 0000000..9f5831a
--- /dev/null
+++ b/test/files/neg/macro-nontypeablebody.check
@@ -0,0 +1,4 @@
+Macros_Test_2.scala:2: error: value foo2 is not a member of object Impls
+ def foo(x: Any) = macro Impls.foo2
+ ^
+one error found
diff --git a/test/files/neg/macro-nontypeablebody.flags b/test/files/neg/macro-nontypeablebody.flags
new file mode 100644
index 0000000..cd66464
--- /dev/null
+++ b/test/files/neg/macro-nontypeablebody.flags
@@ -0,0 +1 @@
+-language:experimental.macros
\ No newline at end of file
diff --git a/test/files/neg/macro-nontypeablebody/Impls_1.scala b/test/files/neg/macro-nontypeablebody/Impls_1.scala
new file mode 100644
index 0000000..4467021
--- /dev/null
+++ b/test/files/neg/macro-nontypeablebody/Impls_1.scala
@@ -0,0 +1,5 @@
+import scala.reflect.macros.{Context => Ctx}
+
+object Impls {
+ def foo(c: Ctx)(x: c.Expr[Any]) = ???
+}
diff --git a/test/files/neg/macro-nontypeablebody/Macros_Test_2.scala b/test/files/neg/macro-nontypeablebody/Macros_Test_2.scala
new file mode 100644
index 0000000..2031893
--- /dev/null
+++ b/test/files/neg/macro-nontypeablebody/Macros_Test_2.scala
@@ -0,0 +1,8 @@
+object Macros {
+ def foo(x: Any) = macro Impls.foo2
+}
+
+object Test extends App {
+ import Macros._
+ foo(42)
+}
\ No newline at end of file
diff --git a/test/files/neg/macro-override-macro-overrides-abstract-method-a.check b/test/files/neg/macro-override-macro-overrides-abstract-method-a.check
new file mode 100644
index 0000000..895e0dc
--- /dev/null
+++ b/test/files/neg/macro-override-macro-overrides-abstract-method-a.check
@@ -0,0 +1,5 @@
+Impls_Macros_1.scala:12: error: overriding method foo in trait Foo of type (x: Int)Int;
+ macro method foo cannot override an abstract method
+ def foo(x: Int) = macro Impls.impl
+ ^
+one error found
diff --git a/test/files/neg/macro-override-macro-overrides-abstract-method-a.flags b/test/files/neg/macro-override-macro-overrides-abstract-method-a.flags
new file mode 100644
index 0000000..cd66464
--- /dev/null
+++ b/test/files/neg/macro-override-macro-overrides-abstract-method-a.flags
@@ -0,0 +1 @@
+-language:experimental.macros
\ No newline at end of file
diff --git a/test/files/neg/macro-override-macro-overrides-abstract-method-a/Impls_Macros_1.scala b/test/files/neg/macro-override-macro-overrides-abstract-method-a/Impls_Macros_1.scala
new file mode 100644
index 0000000..e43264f
--- /dev/null
+++ b/test/files/neg/macro-override-macro-overrides-abstract-method-a/Impls_Macros_1.scala
@@ -0,0 +1,13 @@
+import scala.reflect.macros.{Context => Ctx}
+
+object Impls {
+ def impl(c: Ctx)(x: c.Expr[Int]) = x
+}
+
+trait Foo {
+ def foo(x: Int): Int
+}
+
+object Macros extends Foo {
+ def foo(x: Int) = macro Impls.impl
+}
diff --git a/test/files/neg/macro-override-macro-overrides-abstract-method-a/Test_2.scala b/test/files/neg/macro-override-macro-overrides-abstract-method-a/Test_2.scala
new file mode 100644
index 0000000..7e3357e
--- /dev/null
+++ b/test/files/neg/macro-override-macro-overrides-abstract-method-a/Test_2.scala
@@ -0,0 +1,4 @@
+object Test extends App {
+ val designator: Macros.type = Macros
+ designator.foo(42)
+}
\ No newline at end of file
diff --git a/test/files/neg/macro-override-macro-overrides-abstract-method-b.check b/test/files/neg/macro-override-macro-overrides-abstract-method-b.check
new file mode 100644
index 0000000..cde3dbd
--- /dev/null
+++ b/test/files/neg/macro-override-macro-overrides-abstract-method-b.check
@@ -0,0 +1,11 @@
+Test_2.scala:3: error: anonymous class $anon inherits conflicting members:
+ macro method t in trait C of type ()Unit and
+ method t in trait A of type ()Unit
+(Note: this can be resolved by declaring an override in anonymous class $anon.)
+ val c2 = new C with A {}
+ ^
+Test_2.scala:5: error: overriding macro method t in trait C of type ()Unit;
+ method t cannot override a macro
+ val c4 = new C with A { override def t(): Unit = () }
+ ^
+two errors found
diff --git a/test/files/neg/macro-override-macro-overrides-abstract-method-b.flags b/test/files/neg/macro-override-macro-overrides-abstract-method-b.flags
new file mode 100644
index 0000000..cd66464
--- /dev/null
+++ b/test/files/neg/macro-override-macro-overrides-abstract-method-b.flags
@@ -0,0 +1 @@
+-language:experimental.macros
\ No newline at end of file
diff --git a/test/files/neg/macro-override-macro-overrides-abstract-method-b/Impls_Macros_1.scala b/test/files/neg/macro-override-macro-overrides-abstract-method-b/Impls_Macros_1.scala
new file mode 100644
index 0000000..f5b2555
--- /dev/null
+++ b/test/files/neg/macro-override-macro-overrides-abstract-method-b/Impls_Macros_1.scala
@@ -0,0 +1,8 @@
+import scala.reflect.macros.Context
+import language.experimental.macros
+
+trait T { def t(): Unit }
+trait A { def t(): Unit = () }
+
+object Macro { def t(c: Context)(): c.Expr[Unit] = c.universe.reify(()) }
+trait C extends T { self: A => override def t(): Unit = macro Macro.t }
diff --git a/test/files/neg/macro-override-macro-overrides-abstract-method-b/Test_2.scala b/test/files/neg/macro-override-macro-overrides-abstract-method-b/Test_2.scala
new file mode 100644
index 0000000..9b4c8e3
--- /dev/null
+++ b/test/files/neg/macro-override-macro-overrides-abstract-method-b/Test_2.scala
@@ -0,0 +1,6 @@
+object Test extends App {
+ val c1 = new A with C {}
+ val c2 = new C with A {}
+ val c3 = new C with A { override def t(): Unit = macro Macro.t }
+ val c4 = new C with A { override def t(): Unit = () }
+}
\ No newline at end of file
diff --git a/test/files/neg/macro-override-method-overrides-macro.check b/test/files/neg/macro-override-method-overrides-macro.check
new file mode 100644
index 0000000..66dc11b
--- /dev/null
+++ b/test/files/neg/macro-override-method-overrides-macro.check
@@ -0,0 +1,5 @@
+Macros_Test_2.scala:8: error: overriding macro method foo in class B of type (x: String)Unit;
+ method foo cannot override a macro
+ override def foo(x: String) = println("fooDString")
+ ^
+one error found
diff --git a/test/files/neg/macro-override-method-overrides-macro.flags b/test/files/neg/macro-override-method-overrides-macro.flags
new file mode 100644
index 0000000..cd66464
--- /dev/null
+++ b/test/files/neg/macro-override-method-overrides-macro.flags
@@ -0,0 +1 @@
+-language:experimental.macros
\ No newline at end of file
diff --git a/test/files/neg/macro-override-method-overrides-macro/Impls_1.scala b/test/files/neg/macro-override-method-overrides-macro/Impls_1.scala
new file mode 100644
index 0000000..ec93dd4
--- /dev/null
+++ b/test/files/neg/macro-override-method-overrides-macro/Impls_1.scala
@@ -0,0 +1,15 @@
+import scala.reflect.macros.{Context => Ctx}
+
+object Impls {
+ def impl(c: Ctx)(tag: String, x: c.Expr[_]) = {
+ import c.{prefix => prefix}
+ import c.universe._
+ val body = Apply(Select(Ident(definitions.PredefModule), newTermName("println")), List(Literal(Constant(tag)), Literal(Constant(prefix.toString)), x.tree))
+ c.Expr[Unit](body)
+ }
+
+ def fooBString(c: Ctx)(x: c.Expr[_]) = impl(c)("fooBString", x)
+ def fooBInt(c: Ctx)(x: c.Expr[_]) = impl(c)("fooBInt", x)
+ def fooDInt(c: Ctx)(x: c.Expr[_]) = impl(c)("fooDInt", x)
+ def fooZString(c: Ctx)(x: c.Expr[_]) = impl(c)("fooZString", x)
+}
\ No newline at end of file
diff --git a/test/files/neg/macro-override-method-overrides-macro/Macros_Test_2.scala b/test/files/neg/macro-override-method-overrides-macro/Macros_Test_2.scala
new file mode 100644
index 0000000..36821b0
--- /dev/null
+++ b/test/files/neg/macro-override-method-overrides-macro/Macros_Test_2.scala
@@ -0,0 +1,15 @@
+class B {
+ def foo(x: String) = macro Impls.fooBString
+ def foo(x: Int) = macro Impls.fooBInt
+ def foo(x: Boolean) = println("fooBBoolean")
+}
+
+class D extends B {
+ override def foo(x: String) = println("fooDString")
+ override def foo(x: Int) = macro Impls.fooDInt
+}
+
+class Z extends D {
+ override def foo(x: String) = macro Impls.fooZString
+ override def foo(x: Boolean) = println("fooZBoolean")
+}
diff --git a/test/files/neg/macro-qmarkqmarkqmark.check b/test/files/neg/macro-qmarkqmarkqmark.check
new file mode 100644
index 0000000..afd49e7
--- /dev/null
+++ b/test/files/neg/macro-qmarkqmarkqmark.check
@@ -0,0 +1,13 @@
+macro-qmarkqmarkqmark.scala:5: error: macro implementation is missing
+ foo1
+ ^
+macro-qmarkqmarkqmark.scala:8: error: macros cannot be partially applied
+ foo2
+ ^
+macro-qmarkqmarkqmark.scala:9: error: macro implementation is missing
+ foo2(1)
+ ^
+macro-qmarkqmarkqmark.scala:12: error: macro implementation is missing
+ foo3[Int]
+ ^
+four errors found
diff --git a/test/files/neg/macro-qmarkqmarkqmark.scala b/test/files/neg/macro-qmarkqmarkqmark.scala
new file mode 100644
index 0000000..c8d8550
--- /dev/null
+++ b/test/files/neg/macro-qmarkqmarkqmark.scala
@@ -0,0 +1,13 @@
+import language.experimental.macros
+
+object Macros {
+ def foo1 = macro ???
+ foo1
+
+ def foo2(x: Int) = macro ???
+ foo2
+ foo2(1)
+
+ def foo3[T] = macro ???
+ foo3[Int]
+}
\ No newline at end of file
diff --git a/test/files/neg/macro-reify-typetag-hktypeparams-notags.check b/test/files/neg/macro-reify-typetag-hktypeparams-notags.check
new file mode 100644
index 0000000..44efaae
--- /dev/null
+++ b/test/files/neg/macro-reify-typetag-hktypeparams-notags.check
@@ -0,0 +1,7 @@
+Test.scala:5: error: No TypeTag available for C[T]
+ println(implicitly[TypeTag[C[T]]])
+ ^
+Test.scala:6: error: No TypeTag available for List[C[T]]
+ println(implicitly[TypeTag[List[C[T]]]])
+ ^
+two errors found
diff --git a/test/files/neg/macro-reify-typetag-hktypeparams-notags/Test.scala b/test/files/neg/macro-reify-typetag-hktypeparams-notags/Test.scala
new file mode 100644
index 0000000..c7b1ced
--- /dev/null
+++ b/test/files/neg/macro-reify-typetag-hktypeparams-notags/Test.scala
@@ -0,0 +1,9 @@
+import scala.reflect.runtime.universe._
+
+object Test extends App {
+ def fooNoTypeTagHK[C[_], T] = {
+ println(implicitly[TypeTag[C[T]]])
+ println(implicitly[TypeTag[List[C[T]]]])
+ }
+ fooNoTypeTagHK[List, Int]
+}
\ No newline at end of file
diff --git a/test/files/neg/macro-reify-typetag-typeparams-notags.check b/test/files/neg/macro-reify-typetag-typeparams-notags.check
new file mode 100644
index 0000000..7c67b02
--- /dev/null
+++ b/test/files/neg/macro-reify-typetag-typeparams-notags.check
@@ -0,0 +1,7 @@
+Test.scala:5: error: No TypeTag available for T
+ println(implicitly[TypeTag[T]])
+ ^
+Test.scala:6: error: No TypeTag available for List[T]
+ println(implicitly[TypeTag[List[T]]])
+ ^
+two errors found
diff --git a/test/files/neg/macro-reify-typetag-typeparams-notags/Test.scala b/test/files/neg/macro-reify-typetag-typeparams-notags/Test.scala
new file mode 100644
index 0000000..6d849cd
--- /dev/null
+++ b/test/files/neg/macro-reify-typetag-typeparams-notags/Test.scala
@@ -0,0 +1,9 @@
+import scala.reflect.runtime.universe._
+
+object Test extends App {
+ def fooNoTypeTag[T] = {
+ println(implicitly[TypeTag[T]])
+ println(implicitly[TypeTag[List[T]]])
+ }
+ fooNoTypeTag[Int]
+}
\ No newline at end of file
diff --git a/test/files/neg/macro-reify-typetag-useabstypetag.check b/test/files/neg/macro-reify-typetag-useabstypetag.check
new file mode 100644
index 0000000..7c67b02
--- /dev/null
+++ b/test/files/neg/macro-reify-typetag-useabstypetag.check
@@ -0,0 +1,7 @@
+Test.scala:5: error: No TypeTag available for T
+ println(implicitly[TypeTag[T]])
+ ^
+Test.scala:6: error: No TypeTag available for List[T]
+ println(implicitly[TypeTag[List[T]]])
+ ^
+two errors found
diff --git a/test/files/neg/macro-reify-typetag-useabstypetag/Test.scala b/test/files/neg/macro-reify-typetag-useabstypetag/Test.scala
new file mode 100644
index 0000000..1e7fcb3
--- /dev/null
+++ b/test/files/neg/macro-reify-typetag-useabstypetag/Test.scala
@@ -0,0 +1,9 @@
+import scala.reflect.runtime.universe._
+
+object Test extends App {
+ def fooTypeTag[T: WeakTypeTag] = {
+ println(implicitly[TypeTag[T]])
+ println(implicitly[TypeTag[List[T]]])
+ }
+ fooTypeTag[Int]
+}
\ No newline at end of file
diff --git a/test/files/neg/macro-without-xmacros-a.check b/test/files/neg/macro-without-xmacros-a.check
new file mode 100644
index 0000000..ec194be
--- /dev/null
+++ b/test/files/neg/macro-without-xmacros-a.check
@@ -0,0 +1,17 @@
+Macros_2.scala:5: error: macro definition needs to be enabled
+by making the implicit value scala.language.experimental.macros visible.
+This can be achieved by adding the import clause 'import scala.language.experimental.macros'
+or by setting the compiler option -language:experimental.macros.
+See the Scala docs for value scala.language.experimental.macros for a discussion
+why the feature needs to be explicitly enabled.
+ def foo(x: Int): Int = macro foo_impl
+ ^
+Macros_2.scala:7: error: macro definition needs to be enabled
+by making the implicit value scala.language.experimental.macros visible.
+ def bar(x: Int): Int = macro bar_impl
+ ^
+Macros_2.scala:11: error: macro definition needs to be enabled
+by making the implicit value scala.language.experimental.macros visible.
+ def quux(x: Int): Int = macro quux_impl
+ ^
+three errors found
diff --git a/test/files/neg/macro-without-xmacros-a/Impls_1.scala b/test/files/neg/macro-without-xmacros-a/Impls_1.scala
new file mode 100644
index 0000000..8976f8e
--- /dev/null
+++ b/test/files/neg/macro-without-xmacros-a/Impls_1.scala
@@ -0,0 +1,18 @@
+import scala.reflect.macros.{Context => Ctx}
+
+object Impls {
+ def foo_impl(c: Ctx)(x: c.Expr[Int]): c.Expr[Int] = {
+ import c.universe._
+ c.Expr(Apply(Select(x.tree, newTermName("$plus")), List(Literal(Constant(1)))))
+ }
+
+ def bar_impl(c: Ctx)(x: c.Expr[Int]): c.Expr[Int] = {
+ import c.universe._
+ c.Expr(Apply(Select(x.tree, newTermName("$plus")), List(Literal(Constant(2)))))
+ }
+
+ def quux_impl(c: Ctx)(x: c.Expr[Int]): c.Expr[Int] = {
+ import c.universe._
+ c.Expr(Apply(Select(x.tree, newTermName("$plus")), List(Literal(Constant(3)))))
+ }
+}
\ No newline at end of file
diff --git a/test/files/neg/macro-without-xmacros-a/Macros_2.scala b/test/files/neg/macro-without-xmacros-a/Macros_2.scala
new file mode 100644
index 0000000..62f9dcf
--- /dev/null
+++ b/test/files/neg/macro-without-xmacros-a/Macros_2.scala
@@ -0,0 +1,12 @@
+import Impls._
+
+object Macros {
+ object Shmacros {
+ def foo(x: Int): Int = macro foo_impl
+ }
+ def bar(x: Int): Int = macro bar_impl
+}
+
+class Macros {
+ def quux(x: Int): Int = macro quux_impl
+}
\ No newline at end of file
diff --git a/test/files/neg/macro-without-xmacros-a/Test_3.scala b/test/files/neg/macro-without-xmacros-a/Test_3.scala
new file mode 100644
index 0000000..e9a10e2
--- /dev/null
+++ b/test/files/neg/macro-without-xmacros-a/Test_3.scala
@@ -0,0 +1,4 @@
+object Test extends App {
+ import Macros.Shmacros._
+ println(foo(2) + Macros.bar(2) * new Macros().quux(4))
+}
\ No newline at end of file
diff --git a/test/files/neg/macro-without-xmacros-b.check b/test/files/neg/macro-without-xmacros-b.check
new file mode 100644
index 0000000..c97850f
--- /dev/null
+++ b/test/files/neg/macro-without-xmacros-b.check
@@ -0,0 +1,17 @@
+Macros_2.scala:3: error: macro definition needs to be enabled
+by making the implicit value scala.language.experimental.macros visible.
+This can be achieved by adding the import clause 'import scala.language.experimental.macros'
+or by setting the compiler option -language:experimental.macros.
+See the Scala docs for value scala.language.experimental.macros for a discussion
+why the feature needs to be explicitly enabled.
+ def foo(x: Int): Int = macro Impls.foo_impl
+ ^
+Macros_2.scala:5: error: macro definition needs to be enabled
+by making the implicit value scala.language.experimental.macros visible.
+ def bar(x: Int): Int = macro Impls.bar_impl
+ ^
+Macros_2.scala:9: error: macro definition needs to be enabled
+by making the implicit value scala.language.experimental.macros visible.
+ def quux(x: Int): Int = macro Impls.quux_impl
+ ^
+three errors found
diff --git a/test/files/neg/macro-without-xmacros-b/Impls_1.scala b/test/files/neg/macro-without-xmacros-b/Impls_1.scala
new file mode 100644
index 0000000..8976f8e
--- /dev/null
+++ b/test/files/neg/macro-without-xmacros-b/Impls_1.scala
@@ -0,0 +1,18 @@
+import scala.reflect.macros.{Context => Ctx}
+
+object Impls {
+ def foo_impl(c: Ctx)(x: c.Expr[Int]): c.Expr[Int] = {
+ import c.universe._
+ c.Expr(Apply(Select(x.tree, newTermName("$plus")), List(Literal(Constant(1)))))
+ }
+
+ def bar_impl(c: Ctx)(x: c.Expr[Int]): c.Expr[Int] = {
+ import c.universe._
+ c.Expr(Apply(Select(x.tree, newTermName("$plus")), List(Literal(Constant(2)))))
+ }
+
+ def quux_impl(c: Ctx)(x: c.Expr[Int]): c.Expr[Int] = {
+ import c.universe._
+ c.Expr(Apply(Select(x.tree, newTermName("$plus")), List(Literal(Constant(3)))))
+ }
+}
\ No newline at end of file
diff --git a/test/files/neg/macro-without-xmacros-b/Macros_2.scala b/test/files/neg/macro-without-xmacros-b/Macros_2.scala
new file mode 100644
index 0000000..de7080c
--- /dev/null
+++ b/test/files/neg/macro-without-xmacros-b/Macros_2.scala
@@ -0,0 +1,10 @@
+object Macros {
+ object Shmacros {
+ def foo(x: Int): Int = macro Impls.foo_impl
+ }
+ def bar(x: Int): Int = macro Impls.bar_impl
+}
+
+class Macros {
+ def quux(x: Int): Int = macro Impls.quux_impl
+}
\ No newline at end of file
diff --git a/test/files/neg/macro-without-xmacros-b/Test_3.scala b/test/files/neg/macro-without-xmacros-b/Test_3.scala
new file mode 100644
index 0000000..e9a10e2
--- /dev/null
+++ b/test/files/neg/macro-without-xmacros-b/Test_3.scala
@@ -0,0 +1,4 @@
+object Test extends App {
+ import Macros.Shmacros._
+ println(foo(2) + Macros.bar(2) * new Macros().quux(4))
+}
\ No newline at end of file
diff --git a/test/files/neg/main1.check b/test/files/neg/main1.check
new file mode 100644
index 0000000..1a7a13e
--- /dev/null
+++ b/test/files/neg/main1.check
@@ -0,0 +1,26 @@
+main1.scala:3: error: Foo has a main method with parameter type Array[String], but foo1.Foo will not be a runnable program.
+ Reason: companion is a trait, which means no static forwarder can be generated.
+
+ object Foo { // companion is trait
+ ^
+main1.scala:10: error: Foo has a main method with parameter type Array[String], but foo2.Foo will not be a runnable program.
+ Reason: companion contains its own main method, which means no static forwarder can be generated.
+
+ object Foo { // companion has its own main
+ ^
+main1.scala:22: error: Foo has a main method with parameter type Array[String], but foo3.Foo will not be a runnable program.
+ Reason: companion contains its own main method (implementation restriction: no main is allowed, regardless of signature), which means no static forwarder can be generated.
+
+ object Foo { // Companion contains main, but not an interfering main.
+ ^
+main1.scala:31: error: Foo has a main method with parameter type Array[String], but foo4.Foo will not be a runnable program.
+ Reason: companion contains its own main method, which means no static forwarder can be generated.
+
+ object Foo extends Foo { // Inherits main from the class
+ ^
+main1.scala:39: error: Foo has a main method with parameter type Array[String], but foo5.Foo will not be a runnable program.
+ Reason: companion contains its own main method, which means no static forwarder can be generated.
+
+ object Foo extends Foo { // Overrides main from the class
+ ^
+5 errors found
diff --git a/test/files/run/bug4317.flags b/test/files/neg/main1.flags
similarity index 100%
rename from test/files/run/bug4317.flags
rename to test/files/neg/main1.flags
diff --git a/test/files/neg/main1.scala b/test/files/neg/main1.scala
new file mode 100644
index 0000000..2b5551a
--- /dev/null
+++ b/test/files/neg/main1.scala
@@ -0,0 +1,45 @@
+// negatives
+package foo1 {
+ object Foo { // companion is trait
+ def main(args: Array[String]): Unit = ()
+ }
+ trait Foo
+}
+
+package foo2 {
+ object Foo { // companion has its own main
+ def main(args: Array[String]): Unit = ()
+ }
+ class Foo {
+ def main(args: Array[String]): Unit = ()
+ }
+}
+
+// these should all be made to work, but are negatives for now
+// because forwarders need more work.
+
+package foo3 {
+ object Foo { // Companion contains main, but not an interfering main.
+ def main(args: Array[String]): Unit = ()
+ }
+ class Foo {
+ def main(args: Int): Unit = ()
+ }
+}
+
+package foo4 {
+ object Foo extends Foo { // Inherits main from the class
+ }
+ class Foo {
+ def main(args: Array[String]): Unit = ()
+ }
+}
+
+package foo5 {
+ object Foo extends Foo { // Overrides main from the class
+ override def main(args: Array[String]): Unit = ()
+ }
+ class Foo {
+ def main(args: Array[String]): Unit = ()
+ }
+}
diff --git a/test/files/neg/migration28.check b/test/files/neg/migration28.check
index c22c901..d7dfacf 100644
--- a/test/files/neg/migration28.check
+++ b/test/files/neg/migration28.check
@@ -1,8 +1,5 @@
-migration28.scala:5: error: method ++= in class Stack is deprecated: use pushAll
- s ++= List(1,2,3)
- ^
-migration28.scala:7: error: method foreach in class Stack has changed semantics in version 2.8.0:
-`foreach` traverses in FIFO order.
- s foreach (_ => ())
- ^
-two errors found
+migration28.scala:4: error: method scanRight in trait TraversableLike has changed semantics in version 2.9.0:
+The behavior of `scanRight` has changed. The previous behavior can be reproduced with scanRight.reverse.
+ List(1,2,3,4,5).scanRight(0)(_+_)
+ ^
+one error found
diff --git a/test/files/neg/migration28.scala b/test/files/neg/migration28.scala
index 346c1a6..ba73eea 100644
--- a/test/files/neg/migration28.scala
+++ b/test/files/neg/migration28.scala
@@ -1,10 +1,7 @@
object Test {
import scala.collection.mutable._
- val s = new Stack[Int]
- s ++= List(1,2,3)
- s map (_ + 1)
- s foreach (_ => ())
+ List(1,2,3,4,5).scanRight(0)(_+_)
def main(args: Array[String]): Unit = {
diff --git a/test/files/neg/multi-array.check b/test/files/neg/multi-array.check
index f8432a7..511caa1 100644
--- a/test/files/neg/multi-array.check
+++ b/test/files/neg/multi-array.check
@@ -1,8 +1,4 @@
-multi-array.scala:6: warning: new Array(...) with multiple dimensions has been deprecated; use Array.ofDim(...) instead
+multi-array.scala:7: error: too many arguments for constructor Array: (_length: Int)Array[T]
val a: Array[Int] = new Array(10, 10)
^
-multi-array.scala:6: error: too many dimensions for array creation
- val a: Array[Int] = new Array(10, 10)
- ^
-one warning found
one error found
diff --git a/test/files/neg/multi-array.scala b/test/files/neg/multi-array.scala
index 993a1c0..b04e0fa 100644
--- a/test/files/neg/multi-array.scala
+++ b/test/files/neg/multi-array.scala
@@ -1,7 +1,14 @@
-/** Check that a multi-dimensional array can't be created
- * when the wrong number of arguments w.r.t. to the array's
- * type is given.
+/** Multi-dimensional array creation with `new` was removed in 2.10.
+ * The replacement Array.ofDim[Int](10,10) makes the original mistake
+ * which was tested here impossible.
+ * This test will fail now because the constructor doesn't exist anymore.
*/
class Foo {
val a: Array[Int] = new Array(10, 10)
}
+
+//Before removal of constructor non-unary Array constructors:
+/** Check that a multi-dimensional array can't be created
+ * when the wrong number of arguments w.r.t. to the array's
+ * type is given.
+ */
diff --git a/test/files/neg/names-defaults-neg.check b/test/files/neg/names-defaults-neg.check
index d2bd2d1..ea7c323 100644
--- a/test/files/neg/names-defaults-neg.check
+++ b/test/files/neg/names-defaults-neg.check
@@ -3,15 +3,14 @@ Unspecified value parameter b.
val fac = Fact(1)(2, 3)
^
names-defaults-neg.scala:5: error: type mismatch;
- found : java.lang.String("#")
+ found : String("#")
required: Int
test1(b = 2, a = "#")
^
names-defaults-neg.scala:8: error: positional after named argument.
test1(b = "(*", 23)
^
-names-defaults-neg.scala:13: error: reference to x is ambiguous; it is both, a parameter
-name of the method and the name of a variable currently in scope.
+names-defaults-neg.scala:13: error: reference to x is ambiguous; it is both a method parameter and a variable in scope.
test2(x = 1)
^
names-defaults-neg.scala:15: error: not found: value c
@@ -26,66 +25,65 @@ names-defaults-neg.scala:17: error: not found: value m
names-defaults-neg.scala:18: error: not found: value m
test7 { m = 1 } // no named arguments in argument block
^
-names-defaults-neg.scala:19: error: reference to x is ambiguous; it is both, a parameter
-name of the method and the name of a variable currently in scope.
+names-defaults-neg.scala:19: error: reference to x is ambiguous; it is both a method parameter and a variable in scope.
test8(x = 1)
^
-names-defaults-neg.scala:22: error: parameter specified twice: a
+names-defaults-neg.scala:22: error: parameter 'a' is already specified at parameter position 1
test1(1, a = 2)
^
-names-defaults-neg.scala:23: error: parameter specified twice: b
+names-defaults-neg.scala:23: error: parameter 'b' is already specified at parameter position 1
test1(b = 1, b = "2")
^
names-defaults-neg.scala:26: error: Int does not take parameters
test3(b = 3, a = 1)(3)
^
names-defaults-neg.scala:35: error: ambiguous reference to overloaded definition,
-both method f in object t1 of type (b: String, a: Int)java.lang.String
-and method f in object t1 of type (a: Int, b: String)java.lang.String
-match argument types (b: java.lang.String,a: Int)
+both method f in object t1 of type (b: String, a: Int)String
+and method f in object t1 of type (a: Int, b: String)String
+match argument types (b: String,a: Int)
t1.f(b = "dkljf", a = 1)
^
names-defaults-neg.scala:42: error: ambiguous reference to overloaded definition,
-both method f in object t3 of type (a2: Int)(b: Int)java.lang.String
-and method f in object t3 of type (a1: Int)java.lang.String
+both method f in object t3 of type (a2: Int)(b: Int)String
+and method f in object t3 of type (a1: Int)String
match argument types (Int)
t3.f(1)
^
names-defaults-neg.scala:43: error: ambiguous reference to overloaded definition,
-both method f in object t3 of type (a2: Int)(b: Int)java.lang.String
-and method f in object t3 of type (a1: Int)java.lang.String
+both method f in object t3 of type (a2: Int)(b: Int)String
+and method f in object t3 of type (a1: Int)String
match argument types (Int)
t3.f(1)(2)
^
names-defaults-neg.scala:49: error: ambiguous reference to overloaded definition,
-both method g in object t7 of type (a: B)java.lang.String
-and method g in object t7 of type (a: C, b: Int*)java.lang.String
+both method g in object t7 of type (a: B)String
+and method g in object t7 of type (a: C, b: Int*)String
match argument types (C)
t7.g(new C()) // ambigous reference
^
-names-defaults-neg.scala:53: error: parameter specified twice: b
+names-defaults-neg.scala:53: error: parameter 'b' is already specified at parameter position 2
test5(a = 1, b = "dkjl", b = "dkj")
^
-names-defaults-neg.scala:54: error: parameter specified twice: b
+names-defaults-neg.scala:54: error: parameter 'b' is already specified at parameter position 2
test5(1, "2", b = 3)
^
names-defaults-neg.scala:55: error: when using named arguments, the vararg parameter has to be specified exactly once
test5(b = "dlkj")
^
names-defaults-neg.scala:61: error: ambiguous reference to overloaded definition,
-both method f in object t8 of type (b: String, a: Int)java.lang.String
-and method f in object t8 of type (a: Int, b: java.lang.Object)java.lang.String
-match argument types (a: Int,b: java.lang.String) and expected result type Any
+both method f in object t8 of type (b: String, a: Int)String
+and method f in object t8 of type (a: Int, b: Object)String
+match argument types (a: Int,b: String) and expected result type Any
println(t8.f(a = 0, b = "1")) // ambigous reference
^
-names-defaults-neg.scala:69: error: wrong number of arguments for <none>: (x: Int, y: String)A1
+names-defaults-neg.scala:69: error: wrong number of arguments for pattern A1(x: Int,y: String)
A1() match { case A1(_) => () }
^
names-defaults-neg.scala:76: error: no type parameters for method test4: (x: T[T[List[T[X forSome { type X }]]]])T[T[List[T[X forSome { type X }]]]] exist so that it can be applied to arguments (List[Int])
--- because ---
argument expression's type is not compatible with formal parameter type;
found : List[Int]
- required: ?T[?T[List[?T[X forSome { type X }]]]]
+ required: ?T
Error occurred in an application involving default arguments.
test4()
^
@@ -112,20 +110,19 @@ names-defaults-neg.scala:91: error: deprecated parameter name a has to be distin
names-defaults-neg.scala:93: warning: the parameter name y has been deprecated. Use b instead.
deprNam3(y = 10, b = 2)
^
-names-defaults-neg.scala:93: error: parameter specified twice: b
+names-defaults-neg.scala:93: error: parameter 'b' is already specified at parameter position 1
deprNam3(y = 10, b = 2)
^
names-defaults-neg.scala:98: error: unknown parameter name: m
f3818(y = 1, m = 1)
^
-names-defaults-neg.scala:131: error: reference to var2 is ambiguous; it is both, a parameter
-name of the method and the name of a variable currently in scope.
+names-defaults-neg.scala:131: error: reference to var2 is ambiguous; it is both a method parameter and a variable in scope.
delay(var2 = 40)
^
names-defaults-neg.scala:134: error: missing parameter type for expanded function ((x$1) => a = x$1)
val taf2: Int => Unit = testAnnFun(a = _, b = get("+"))
^
-names-defaults-neg.scala:135: error: parameter specified twice: a
+names-defaults-neg.scala:135: error: parameter 'a' is already specified at parameter position 1
val taf3 = testAnnFun(b = _: String, a = get(8))
^
names-defaults-neg.scala:136: error: wrong number of parameters; expected = 2
@@ -137,25 +134,35 @@ names-defaults-neg.scala:144: error: variable definition needs type because 'x'
names-defaults-neg.scala:147: error: variable definition needs type because 'x' is used as a named argument in its body.
object t6 { var x = t.f(x = 1) }
^
+names-defaults-neg.scala:147: warning: type-checking the invocation of method f checks if the named argument expression 'x = ...' is a valid assignment
+in the current scope. The resulting type inference error (see above) can be fixed by providing an explicit type in the local definition for x.
+ object t6 { var x = t.f(x = 1) }
+ ^
names-defaults-neg.scala:150: error: variable definition needs type because 'x' is used as a named argument in its body.
class t9 { var x = t.f(x = 1) }
^
+names-defaults-neg.scala:150: warning: type-checking the invocation of method f checks if the named argument expression 'x = ...' is a valid assignment
+in the current scope. The resulting type inference error (see above) can be fixed by providing an explicit type in the local definition for x.
+ class t9 { var x = t.f(x = 1) }
+ ^
names-defaults-neg.scala:164: error: variable definition needs type because 'x' is used as a named argument in its body.
def u3 { var x = u.f(x = 1) }
^
names-defaults-neg.scala:167: error: variable definition needs type because 'x' is used as a named argument in its body.
def u6 { var x = u.f(x = "32") }
^
-names-defaults-neg.scala:170: error: reference to x is ambiguous; it is both, a parameter
-name of the method and the name of a variable currently in scope.
+names-defaults-neg.scala:170: error: reference to x is ambiguous; it is both a method parameter and a variable in scope.
def u9 { var x: Int = u.f(x = 1) }
^
names-defaults-neg.scala:177: error: variable definition needs type because 'x' is used as a named argument in its body.
class u15 { var x = u.f(x = 1) }
^
-names-defaults-neg.scala:180: error: reference to x is ambiguous; it is both, a parameter
-name of the method and the name of a variable currently in scope.
+names-defaults-neg.scala:177: warning: type-checking the invocation of method f checks if the named argument expression 'x = ...' is a valid assignment
+in the current scope. The resulting type inference error (see above) can be fixed by providing an explicit type in the local definition for x.
+ class u15 { var x = u.f(x = 1) }
+ ^
+names-defaults-neg.scala:180: error: reference to x is ambiguous; it is both a method parameter and a variable in scope.
class u18 { var x: Int = u.f(x = 1) }
^
-one warning found
+four warnings found
41 errors found
diff --git a/test/files/neg/names-defaults-neg.flags b/test/files/neg/names-defaults-neg.flags
new file mode 100644
index 0000000..dcc59eb
--- /dev/null
+++ b/test/files/neg/names-defaults-neg.flags
@@ -0,0 +1 @@
+-deprecation
diff --git a/test/files/neg/nested-fn-print.check b/test/files/neg/nested-fn-print.check
index e6c3204..ea27855 100644
--- a/test/files/neg/nested-fn-print.check
+++ b/test/files/neg/nested-fn-print.check
@@ -3,17 +3,17 @@ nested-fn-print.scala:4: error: only classes can have declared but undefined mem
var x3: Int => Double
^
nested-fn-print.scala:7: error: type mismatch;
- found : java.lang.String("a")
+ found : String("a")
required: Int => (Float => Double)
x1 = "a"
^
nested-fn-print.scala:8: error: type mismatch;
- found : java.lang.String("b")
+ found : String("b")
required: (Int => Float) => Double
x2 = "b"
^
nested-fn-print.scala:9: error: type mismatch;
- found : java.lang.String("c")
+ found : String("c")
required: Int => Double
x3 = "c"
^
diff --git a/test/files/neg/newpat_unreachable.check b/test/files/neg/newpat_unreachable.check
new file mode 100644
index 0000000..08453ca
--- /dev/null
+++ b/test/files/neg/newpat_unreachable.check
@@ -0,0 +1,27 @@
+newpat_unreachable.scala:6: error: patterns after a variable pattern cannot match (SLS 8.1.1)
+If you intended to match against parameter b of method contrivedExample, you must use backticks, like: case `b` =>
+ case b => println("matched b")
+ ^
+newpat_unreachable.scala:7: error: unreachable code due to variable pattern 'b' on line 6
+If you intended to match against parameter c of method contrivedExample, you must use backticks, like: case `c` =>
+ case c => println("matched c")
+ ^
+newpat_unreachable.scala:8: error: unreachable code due to variable pattern 'b' on line 6
+If you intended to match against value d in class A, you must use backticks, like: case `d` =>
+ case d => println("matched d")
+ ^
+newpat_unreachable.scala:9: error: unreachable code due to variable pattern 'b' on line 6
+ case _ => println("matched neither")
+ ^
+newpat_unreachable.scala:22: error: patterns after a variable pattern cannot match (SLS 8.1.1)
+If you intended to match against parameter b of method g, you must use backticks, like: case `b` =>
+ case b => 1
+ ^
+newpat_unreachable.scala:23: error: unreachable code due to variable pattern 'b' on line 22
+If you intended to match against parameter c of method h, you must use backticks, like: case `c` =>
+ case c => 2
+ ^
+newpat_unreachable.scala:24: error: unreachable code due to variable pattern 'b' on line 22
+ case _ => 3
+ ^
+7 errors found
diff --git a/test/files/neg/anyval-sealed.flags b/test/files/neg/newpat_unreachable.flags
similarity index 100%
copy from test/files/neg/anyval-sealed.flags
copy to test/files/neg/newpat_unreachable.flags
diff --git a/test/files/neg/newpat_unreachable.scala b/test/files/neg/newpat_unreachable.scala
new file mode 100644
index 0000000..c9cc85c
--- /dev/null
+++ b/test/files/neg/newpat_unreachable.scala
@@ -0,0 +1,29 @@
+object Test {
+ class A {
+ val d = 55
+
+ def contrivedExample[A, B, C](a: A, b: B, c: C): Unit = a match {
+ case b => println("matched b")
+ case c => println("matched c")
+ case d => println("matched d")
+ case _ => println("matched neither")
+ }
+
+ def correctExample[A, B, C](a: A, b: B, c: C): Unit = a match {
+ case `b` => println("matched b")
+ case `c` => println("matched c")
+ case `d` => println("matched d")
+ case _ => println("matched neither")
+ }
+
+ def f[A](a: A) = {
+ def g[B](b: B) = {
+ def h[C](c: C) = a match {
+ case b => 1
+ case c => 2
+ case _ => 3
+ }
+ }
+ }
+ }
+}
diff --git a/test/files/neg/no-implicit-to-anyref.check b/test/files/neg/no-implicit-to-anyref.check
new file mode 100644
index 0000000..d94b57a
--- /dev/null
+++ b/test/files/neg/no-implicit-to-anyref.check
@@ -0,0 +1,28 @@
+no-implicit-to-anyref.scala:11: error: type mismatch;
+ found : Int(1)
+ required: AnyRef
+Note: an implicit exists from scala.Int => java.lang.Integer, but
+methods inherited from Object are rendered ambiguous. This is to avoid
+a blanket implicit which would convert any scala.Int to any AnyRef.
+You may wish to use a type ascription: `x: java.lang.Integer`.
+ 1: AnyRef
+ ^
+no-implicit-to-anyref.scala:17: error: type mismatch;
+ found : Any
+ required: AnyRef
+ (null: Any): AnyRef
+ ^
+no-implicit-to-anyref.scala:21: error: type mismatch;
+ found : AnyVal
+ required: AnyRef
+ (0: AnyVal): AnyRef
+ ^
+no-implicit-to-anyref.scala:27: error: type mismatch;
+ found : Test.AV
+ required: AnyRef
+Note that AV extends Any, not AnyRef.
+Such types can participate in value classes, but instances
+cannot appear in singleton types or in reference comparisons.
+ new AV(0): AnyRef
+ ^
+four errors found
diff --git a/test/files/neg/no-implicit-to-anyref.scala b/test/files/neg/no-implicit-to-anyref.scala
new file mode 100644
index 0000000..3e3d373
--- /dev/null
+++ b/test/files/neg/no-implicit-to-anyref.scala
@@ -0,0 +1,29 @@
+// Checks that the state of standard implicits in Predef._ and scala._
+// doesn't allow us to unambiguously and implicitly convert AnyVal
+// and subtypes to AnyRef.
+//
+// In the days before value classes, this was precariously held be
+// the competing implicits Any => StringAdd and Any => StringFormat.
+// Since then, these have both become value classes, but seeing as
+// this happened simultaneously, we're still okay.
+object Test {
+ locally {
+ 1: AnyRef
+ }
+
+ locally {
+ // before this test case was added and ContextErrors was tweaked, this
+ // emitted: "Note that Any extends Any, not AnyRef."
+ (null: Any): AnyRef
+ }
+
+ locally {
+ (0: AnyVal): AnyRef
+ }
+
+ class AV(val a: Int) extends AnyVal
+
+ locally {
+ new AV(0): AnyRef
+ }
+}
diff --git a/test/files/neg/no-predef.check b/test/files/neg/no-predef.check
new file mode 100644
index 0000000..a63d8c5
--- /dev/null
+++ b/test/files/neg/no-predef.check
@@ -0,0 +1,14 @@
+no-predef.scala:2: error: type mismatch;
+ found : scala.Long(5L)
+ required: java.lang.Long
+ def f1 = 5L: java.lang.Long
+ ^
+no-predef.scala:3: error: type mismatch;
+ found : java.lang.Long
+ required: scala.Long
+ def f2 = new java.lang.Long(5) : Long
+ ^
+no-predef.scala:4: error: value map is not a member of String
+ def f3 = "abc" map (_ + 1)
+ ^
+three errors found
diff --git a/test/files/neg/no-predef.flags b/test/files/neg/no-predef.flags
new file mode 100644
index 0000000..3abc2d5
--- /dev/null
+++ b/test/files/neg/no-predef.flags
@@ -0,0 +1 @@
+-Yno-predef
\ No newline at end of file
diff --git a/test/files/neg/no-predef.scala b/test/files/neg/no-predef.scala
new file mode 100644
index 0000000..8466c79
--- /dev/null
+++ b/test/files/neg/no-predef.scala
@@ -0,0 +1,5 @@
+class NoPredef {
+ def f1 = 5L: java.lang.Long
+ def f2 = new java.lang.Long(5) : Long
+ def f3 = "abc" map (_ + 1)
+}
\ No newline at end of file
diff --git a/test/files/neg/not-possible-cause.check b/test/files/neg/not-possible-cause.check
new file mode 100644
index 0000000..5c09fa1
--- /dev/null
+++ b/test/files/neg/not-possible-cause.check
@@ -0,0 +1,9 @@
+not-possible-cause.scala:2: error: type mismatch;
+ found : a.type (with underlying type A)
+ required: AnyRef
+Note that A is bounded only by Equals, which means AnyRef is not a known parent.
+Such types can participate in value classes, but instances
+cannot appear in singleton types or in reference comparisons.
+ def foo[A <: Product](a: A) { type X = a.type }
+ ^
+one error found
diff --git a/test/files/neg/not-possible-cause.scala b/test/files/neg/not-possible-cause.scala
new file mode 100644
index 0000000..83ec24d
--- /dev/null
+++ b/test/files/neg/not-possible-cause.scala
@@ -0,0 +1,3 @@
+object Foo {
+ def foo[A <: Product](a: A) { type X = a.type }
+}
diff --git a/test/files/neg/object-not-a-value.check b/test/files/neg/object-not-a-value.check
new file mode 100644
index 0000000..613210f
--- /dev/null
+++ b/test/files/neg/object-not-a-value.check
@@ -0,0 +1,4 @@
+object-not-a-value.scala:5: error: object java.util.List is not a value
+ List(1) map (_ + 1)
+ ^
+one error found
diff --git a/test/files/neg/object-not-a-value.scala b/test/files/neg/object-not-a-value.scala
new file mode 100644
index 0000000..2f894a3
--- /dev/null
+++ b/test/files/neg/object-not-a-value.scala
@@ -0,0 +1,7 @@
+object Test {
+ import java.util._
+
+ def main(args: Array[String]): Unit = {
+ List(1) map (_ + 1)
+ }
+}
diff --git a/test/files/neg/overloaded-implicit.flags b/test/files/neg/overloaded-implicit.flags
index 85d8eb2..7949c2a 100644
--- a/test/files/neg/overloaded-implicit.flags
+++ b/test/files/neg/overloaded-implicit.flags
@@ -1 +1 @@
--Xfatal-warnings
+-Xlint -Xfatal-warnings
diff --git a/test/files/neg/overloaded-unapply.check b/test/files/neg/overloaded-unapply.check
new file mode 100644
index 0000000..68a826b
--- /dev/null
+++ b/test/files/neg/overloaded-unapply.check
@@ -0,0 +1,14 @@
+overloaded-unapply.scala:18: error: ambiguous reference to overloaded definition,
+both method unapply in object List of type [a](xs: List[a])Option[Null]
+and method unapply in object List of type [a](xs: List[a])Option[(a, List[a])]
+match argument types (List[a])
+ case List(x, xs) => 7
+ ^
+overloaded-unapply.scala:22: error: cannot resolve overloaded unapply
+ case List(x, xs) => 7
+ ^
+overloaded-unapply.scala:12: error: method unapply is defined twice
+ conflicting symbols both originated in file 'overloaded-unapply.scala'
+ def unapply[a](xs: List[a]): Option[Null] = xs match {
+ ^
+three errors found
diff --git a/test/files/neg/overloaded-unapply.scala b/test/files/neg/overloaded-unapply.scala
new file mode 100644
index 0000000..3690962
--- /dev/null
+++ b/test/files/neg/overloaded-unapply.scala
@@ -0,0 +1,24 @@
+sealed abstract class List[+a]
+private case object Nil extends List[Nothing]
+private final case class Cons[+a](head: a, tail: List[a])
+extends List[a]
+
+object List {
+ def unapply[a](xs: List[a]): Option[(a, List[a])] = xs match {
+ case Nil => None
+ case Cons(x, xs) => Some(x, xs)
+ }
+
+ def unapply[a](xs: List[a]): Option[Null] = xs match {
+ case Nil => Some(null)
+ case Cons(_, _) => None
+ }
+
+ def foo[a](xs: List[a]) = xs match {
+ case List(x, xs) => 7
+ }
+
+ def bar(xs: Any) = xs match { // test error message OverloadedUnapplyError
+ case List(x, xs) => 7
+ }
+}
diff --git a/test/files/neg/override-object-flag.check b/test/files/neg/override-object-flag.check
new file mode 100644
index 0000000..3441651
--- /dev/null
+++ b/test/files/neg/override-object-flag.check
@@ -0,0 +1,5 @@
+override-object-flag.scala:3: error: overriding object Foo in trait A;
+ object Foo cannot override final member
+trait B extends A { override object Foo }
+ ^
+one error found
diff --git a/test/files/neg/override-object-flag.scala b/test/files/neg/override-object-flag.scala
new file mode 100644
index 0000000..74d00dd
--- /dev/null
+++ b/test/files/neg/override-object-flag.scala
@@ -0,0 +1,3 @@
+// no flag enabling it, fail
+trait A { object Foo }
+trait B extends A { override object Foo }
diff --git a/test/files/neg/override-object-no.check b/test/files/neg/override-object-no.check
new file mode 100644
index 0000000..9cfda80
--- /dev/null
+++ b/test/files/neg/override-object-no.check
@@ -0,0 +1,23 @@
+override-object-no.scala:14: error: overriding object Bar in trait Foo with object Bar in trait Foo2:
+an overriding object must conform to the overridden object's class bound;
+ found : case1.Bippy
+ required: case1.Bippy with case1.Bippo
+ override object Bar extends Bippy { // err
+ ^
+override-object-no.scala:21: error: overriding object Bar in trait Quux1 with object Bar in trait Quux2:
+an overriding object must conform to the overridden object's class bound;
+ found : AnyRef{def g: String}
+ required: AnyRef{def g: Int}
+ trait Quux2 extends Quux1 { override object Bar { def g = "abc" } } // err
+ ^
+override-object-no.scala:25: error: overriding object Bar in trait Quux3;
+ object Bar cannot override final member
+ trait Quux4 extends Quux3 { override object Bar } // err
+ ^
+override-object-no.scala:43: error: overriding object A in class Foo with object A in class P2:
+an overriding object must conform to the overridden object's class bound;
+ found : case2.Bar[List[String]]
+ required: case2.Bar[Traversable[String]]
+ override object A extends Bar[List[String]] // err
+ ^
+four errors found
diff --git a/test/files/neg/override-object-no.flags b/test/files/neg/override-object-no.flags
new file mode 100644
index 0000000..22e9a95
--- /dev/null
+++ b/test/files/neg/override-object-no.flags
@@ -0,0 +1 @@
+-Yoverride-objects
\ No newline at end of file
diff --git a/test/files/neg/override-object-no.scala b/test/files/neg/override-object-no.scala
new file mode 100644
index 0000000..45961e4
--- /dev/null
+++ b/test/files/neg/override-object-no.scala
@@ -0,0 +1,45 @@
+// See also pos/override-object-yes.scala
+
+package case1 {
+ // Missing interface in overriding object
+ class Bippy { def f = 1 }
+ trait Bippo
+
+ trait Foo {
+ object Bar extends Bippy with Bippo { override def f = 2 }
+ def f(x: Bippo)
+ def g = f(Bar)
+ }
+ trait Foo2 extends Foo {
+ override object Bar extends Bippy { // err
+ override def f = 3
+ }
+ }
+
+ // type mismatch in member
+ trait Quux1 { object Bar { def g = 55 } }
+ trait Quux2 extends Quux1 { override object Bar { def g = "abc" } } // err
+
+ // still can't override final objects!
+ trait Quux3 { final object Bar { } }
+ trait Quux4 extends Quux3 { override object Bar } // err
+}
+
+// type parameter as-seen-from business
+package case2 {
+ // invariance (see pos for the covariant case)
+ class Bar[T]
+
+ class Foo[T] {
+ object A extends Bar[T]
+ }
+
+ class Baz[S] extends Foo[S] {
+ override object A extends Bar[S]
+ }
+
+ class P1 extends Foo[Traversable[String]]
+ class P2 extends P1 {
+ override object A extends Bar[List[String]] // err
+ }
+}
diff --git a/test/files/neg/override.check b/test/files/neg/override.check
index 0336fb2..8be98bf 100644
--- a/test/files/neg/override.check
+++ b/test/files/neg/override.check
@@ -1,5 +1,5 @@
override.scala:9: error: overriding type T in trait A with bounds >: Int <: Int;
type T in trait B with bounds >: String <: String has incompatible type
- lazy val x : A with B = x
- ^
+ lazy val x : A with B = {println(""); x}
+ ^
one error found
diff --git a/test/files/neg/override.scala b/test/files/neg/override.scala
old mode 100644
new mode 100755
index 3e589b5..7975516
--- a/test/files/neg/override.scala
+++ b/test/files/neg/override.scala
@@ -6,7 +6,7 @@ trait X {
trait Y extends X {
trait B { type T >: String <: String }
- lazy val x : A with B = x
+ lazy val x : A with B = {println(""); x}
n = "foo"
}
diff --git a/test/files/neg/package-ob-case.check b/test/files/neg/package-ob-case.check
new file mode 100644
index 0000000..e6b2f85
--- /dev/null
+++ b/test/files/neg/package-ob-case.check
@@ -0,0 +1,5 @@
+package-ob-case.scala:3: error: it is not recommended to define classes/objects inside of package objects.
+If possible, define class X in package foo instead.
+ case class X(z: Int) { }
+ ^
+one error found
diff --git a/test/files/neg/package-ob-case.flags b/test/files/neg/package-ob-case.flags
new file mode 100644
index 0000000..6c1dd10
--- /dev/null
+++ b/test/files/neg/package-ob-case.flags
@@ -0,0 +1 @@
+-Xfatal-warnings -Xlint
\ No newline at end of file
diff --git a/test/files/neg/package-ob-case.scala b/test/files/neg/package-ob-case.scala
new file mode 100644
index 0000000..91a1fb7
--- /dev/null
+++ b/test/files/neg/package-ob-case.scala
@@ -0,0 +1,5 @@
+package foo {
+ package object foo {
+ case class X(z: Int) { }
+ }
+}
diff --git a/test/files/neg/parent-inherited-twice-error.check b/test/files/neg/parent-inherited-twice-error.check
new file mode 100644
index 0000000..521a6c1
--- /dev/null
+++ b/test/files/neg/parent-inherited-twice-error.check
@@ -0,0 +1,7 @@
+parent-inherited-twice-error.scala:2: error: trait A is inherited twice
+class B extends A with A
+ ^
+parent-inherited-twice-error.scala:2: error: trait A is inherited twice
+class B extends A with A
+ ^
+two errors found
diff --git a/test/files/neg/parent-inherited-twice-error.scala b/test/files/neg/parent-inherited-twice-error.scala
new file mode 100644
index 0000000..7b433b9
--- /dev/null
+++ b/test/files/neg/parent-inherited-twice-error.scala
@@ -0,0 +1,2 @@
+trait A
+class B extends A with A
diff --git a/test/files/neg/pat_unreachable.check b/test/files/neg/pat_unreachable.check
index 4e1463d..c5706b7 100644
--- a/test/files/neg/pat_unreachable.check
+++ b/test/files/neg/pat_unreachable.check
@@ -4,4 +4,10 @@ pat_unreachable.scala:5: error: unreachable code
pat_unreachable.scala:9: error: unreachable code
case Seq(x, y) => List(x, y)
^
-two errors found
+pat_unreachable.scala:23: error: unreachable code
+ case c => println("matched c")
+ ^
+pat_unreachable.scala:24: error: unreachable code
+ case _ => println("matched neither")
+ ^
+four errors found
diff --git a/test/files/neg/pat_unreachable.flags b/test/files/neg/pat_unreachable.flags
new file mode 100644
index 0000000..cb8324a
--- /dev/null
+++ b/test/files/neg/pat_unreachable.flags
@@ -0,0 +1 @@
+-Xoldpatmat
\ No newline at end of file
diff --git a/test/files/neg/pat_unreachable.scala b/test/files/neg/pat_unreachable.scala
index fc0fd41..1f402e5 100644
--- a/test/files/neg/pat_unreachable.scala
+++ b/test/files/neg/pat_unreachable.scala
@@ -8,7 +8,7 @@ object Test extends App {
case Seq(x, y, _*) => x::y::Nil
case Seq(x, y) => List(x, y)
}
-
+
def not_unreachable(xs:Seq[Char]) = xs match {
case Seq(x, y, _*) => x::y::Nil
case Seq(x) => List(x)
@@ -17,4 +17,10 @@ object Test extends App {
case Seq(x, y) => x::y::Nil
case Seq(x, y, z, _*) => List(x,y)
}
+
+ def contrivedExample[A, B, C](a: A, b: B, c: C): Unit = a match {
+ case b => println("matched b")
+ case c => println("matched c")
+ case _ => println("matched neither")
+ }
}
diff --git a/test/files/neg/patmat-type-check.check b/test/files/neg/patmat-type-check.check
index 8f81ced..721217c 100644
--- a/test/files/neg/patmat-type-check.check
+++ b/test/files/neg/patmat-type-check.check
@@ -1,6 +1,15 @@
+patmat-type-check.scala:11: warning: fruitless type test: a value of type Test.Bop4[T] cannot also be a Seq[A]
+ def s3[T](x: Bop4[T]) = x match { case Seq('b', 'o', 'b') => true }
+ ^
+patmat-type-check.scala:15: warning: fruitless type test: a value of type Test.Bop5[_$1,T1,T2] cannot also be a Seq[A]
+ def s4[T1, T2](x: Bop5[_, T1, T2]) = x match { case Seq('b', 'o', 'b') => true }
+ ^
+patmat-type-check.scala:19: warning: fruitless type test: a value of type Test.Bop3[T] cannot also be a Seq[A]
+ def f4[T](x: Bop3[T]) = x match { case Seq('b', 'o', 'b') => true }
+ ^
patmat-type-check.scala:22: error: scrutinee is incompatible with pattern type;
found : Seq[A]
- required: java.lang.String
+ required: String
def f1 = "bob".reverse match { case Seq('b', 'o', 'b') => true } // fail
^
patmat-type-check.scala:23: error: scrutinee is incompatible with pattern type;
@@ -18,4 +27,5 @@ patmat-type-check.scala:30: error: scrutinee is incompatible with pattern type;
required: Test.Bop3[Char]
def f4[T](x: Bop3[Char]) = x match { case Seq('b', 'o', 'b') => true } // fail
^
+three warnings found
four errors found
diff --git a/test/files/neg/patmatexhaust.check b/test/files/neg/patmatexhaust.check
index 8aa9238..4556e66 100644
--- a/test/files/neg/patmatexhaust.check
+++ b/test/files/neg/patmatexhaust.check
@@ -1,38 +1,40 @@
-patmatexhaust.scala:7: error: match is not exhaustive!
-missing combination Baz
-
+patmatexhaust.scala:7: error: match may not be exhaustive.
+It would fail on the following input: Baz
def ma1(x:Foo) = x match {
^
-patmatexhaust.scala:11: error: match is not exhaustive!
-missing combination Bar
-
+patmatexhaust.scala:11: error: match may not be exhaustive.
+It would fail on the following input: Bar(_)
def ma2(x:Foo) = x match {
^
-patmatexhaust.scala:23: error: match is not exhaustive!
-missing combination Kult Kult
-missing combination Qult Qult
-
+patmatexhaust.scala:23: error: match may not be exhaustive.
+It would fail on the following inputs: (Kult(_), Kult(_)), (Qult(), Qult())
def ma3(x:Mult) = (x,x) match { // not exhaustive
^
-patmatexhaust.scala:49: error: match is not exhaustive!
-missing combination Gp
-missing combination Gu
-
+patmatexhaust.scala:49: error: match may not be exhaustive.
+It would fail on the following inputs: Gp(), Gu
def ma4(x:Deep) = x match { // missing cases: Gu, Gp
^
-patmatexhaust.scala:53: error: match is not exhaustive!
-missing combination Gp
-
- def ma5(x:Deep) = x match { // Gp
+patmatexhaust.scala:55: error: unreachable code
+ case _ if 1 == 0 =>
+ ^
+patmatexhaust.scala:53: error: match may not be exhaustive.
+It would fail on the following input: Gp()
+ def ma5(x:Deep) = x match {
^
-patmatexhaust.scala:59: error: match is not exhaustive!
-missing combination Nil
-
- def ma6() = List(1,2) match { // give up
- ^
-patmatexhaust.scala:75: error: match is not exhaustive!
-missing combination B
-
+patmatexhaust.scala:75: error: match may not be exhaustive.
+It would fail on the following input: B()
def ma9(x: B) = x match {
^
-7 errors found
+patmatexhaust.scala:100: error: match may not be exhaustive.
+It would fail on the following input: C1()
+ def ma10(x: C) = x match { // not exhaustive: C1 is not sealed.
+ ^
+patmatexhaust.scala:114: error: match may not be exhaustive.
+It would fail on the following inputs: D1, D2()
+ def ma10(x: C) = x match { // not exhaustive: C1 has subclasses.
+ ^
+patmatexhaust.scala:126: error: match may not be exhaustive.
+It would fail on the following input: C1()
+ def ma10(x: C) = x match { // not exhaustive: C1 is not abstract.
+ ^
+10 errors found
diff --git a/test/files/neg/patmatexhaust.flags b/test/files/neg/patmatexhaust.flags
index e8fb65d..85d8eb2 100644
--- a/test/files/neg/patmatexhaust.flags
+++ b/test/files/neg/patmatexhaust.flags
@@ -1 +1 @@
--Xfatal-warnings
\ No newline at end of file
+-Xfatal-warnings
diff --git a/test/files/neg/patmatexhaust.scala b/test/files/neg/patmatexhaust.scala
index b9622b5..ceb960e 100644
--- a/test/files/neg/patmatexhaust.scala
+++ b/test/files/neg/patmatexhaust.scala
@@ -50,7 +50,7 @@ class TestSealedExhaustive { // compile only
case Ga =>
}
- def ma5(x:Deep) = x match { // Gp
+ def ma5(x:Deep) = x match {
case Gu =>
case _ if 1 == 0 =>
case Ga =>
@@ -76,14 +76,56 @@ class TestSealedExhaustive { // compile only
case B1() => true // missing B, which is not abstract so must be included
case B2 => true
}
- sealed abstract class C
- abstract class C1 extends C
- object C2 extends C
- case object C6 extends C
- class C3 extends C1
- case class C4() extends C3
- def ma10(x: C) = x match { // exhaustive
- case C4() => true
- case C2 | C6 => true
+
+ object ob1 {
+ sealed abstract class C
+ sealed abstract class C1 extends C
+ object C2 extends C
+ case class C3() extends C
+ case object C4 extends C
+
+ def ma10(x: C) = x match { // exhaustive: abstract sealed C1 is dead end.
+ case C3() => true
+ case C2 | C4 => true
+ }
+ }
+
+ object ob2 {
+ sealed abstract class C
+ abstract class C1 extends C
+ object C2 extends C
+ case class C3() extends C
+ case object C4 extends C
+
+ def ma10(x: C) = x match { // not exhaustive: C1 is not sealed.
+ case C3() => true
+ case C2 | C4 => true
+ }
+ }
+ object ob3 {
+ sealed abstract class C
+ sealed abstract class C1 extends C
+ object D1 extends C1
+ case class D2() extends C1
+ object C2 extends C
+ case class C3() extends C
+ case object C4 extends C
+
+ def ma10(x: C) = x match { // not exhaustive: C1 has subclasses.
+ case C3() => true
+ case C2 | C4 => true
+ }
+ }
+ object ob4 {
+ sealed abstract class C
+ sealed class C1 extends C
+ object C2 extends C
+ case class C3() extends C
+ case object C4 extends C
+
+ def ma10(x: C) = x match { // not exhaustive: C1 is not abstract.
+ case C3() => true
+ case C2 | C4 => true
+ }
}
}
diff --git a/test/files/neg/permanent-blindness.check b/test/files/neg/permanent-blindness.check
new file mode 100644
index 0000000..18b4543
--- /dev/null
+++ b/test/files/neg/permanent-blindness.check
@@ -0,0 +1,10 @@
+permanent-blindness.scala:10: error: imported `Bippy' is permanently hidden by definition of class Bippy in package bar
+ import foo.{ Bippy, Bop, Dingus }
+ ^
+permanent-blindness.scala:10: error: imported `Bop' is permanently hidden by definition of object Bop in package bar
+ import foo.{ Bippy, Bop, Dingus }
+ ^
+permanent-blindness.scala:10: error: imported `Dingus' is permanently hidden by definition of object Dingus in package bar
+ import foo.{ Bippy, Bop, Dingus }
+ ^
+three errors found
diff --git a/test/files/neg/caseinherit.flags b/test/files/neg/permanent-blindness.flags
similarity index 100%
copy from test/files/neg/caseinherit.flags
copy to test/files/neg/permanent-blindness.flags
diff --git a/test/files/neg/permanent-blindness.scala b/test/files/neg/permanent-blindness.scala
new file mode 100644
index 0000000..b6d09d6
--- /dev/null
+++ b/test/files/neg/permanent-blindness.scala
@@ -0,0 +1,22 @@
+package foo {
+ class Bippy
+ object Bop {
+ def fff = 5
+ }
+ object Dingus
+}
+
+package bar {
+ import foo.{ Bippy, Bop, Dingus }
+
+ class Bippy
+ object Bop
+ object Dingus
+
+
+ class Ding {
+ def fff = 5
+
+ def g = new Bippy
+ }
+}
\ No newline at end of file
diff --git a/test/files/neg/primitive-sigs-1.check b/test/files/neg/primitive-sigs-1.check
index befb821..77dc457 100644
--- a/test/files/neg/primitive-sigs-1.check
+++ b/test/files/neg/primitive-sigs-1.check
@@ -1,6 +1,6 @@
-A_3.scala:3: error: type mismatch;
+B.scala:3: error: type mismatch;
found : Bippy
- required: AC[java.lang.Integer]
- J_2.f(new Bippy())
- ^
+ required: AC[Integer]
+ J.f(new Bippy())
+ ^
one error found
diff --git a/test/files/neg/primitive-sigs-1/A_1.scala b/test/files/neg/primitive-sigs-1/A.scala
similarity index 100%
rename from test/files/neg/primitive-sigs-1/A_1.scala
rename to test/files/neg/primitive-sigs-1/A.scala
diff --git a/test/files/neg/primitive-sigs-1/A_3.scala b/test/files/neg/primitive-sigs-1/A_3.scala
deleted file mode 100644
index dec617a..0000000
--- a/test/files/neg/primitive-sigs-1/A_3.scala
+++ /dev/null
@@ -1,5 +0,0 @@
-object Test {
- def main(args: Array[String]): Unit = {
- J_2.f(new Bippy())
- }
-}
diff --git a/test/files/neg/primitive-sigs-1/B.scala b/test/files/neg/primitive-sigs-1/B.scala
new file mode 100644
index 0000000..0958bcd
--- /dev/null
+++ b/test/files/neg/primitive-sigs-1/B.scala
@@ -0,0 +1,5 @@
+object Test {
+ def main(args: Array[String]): Unit = {
+ J.f(new Bippy())
+ }
+}
diff --git a/test/files/neg/primitive-sigs-1/J.java b/test/files/neg/primitive-sigs-1/J.java
new file mode 100644
index 0000000..2e43b83
--- /dev/null
+++ b/test/files/neg/primitive-sigs-1/J.java
@@ -0,0 +1,8 @@
+// java: often the java or scala compiler will save us from
+// the untruth in the signature, but not always.
+public class J {
+ public static Integer f(AC<Integer> x) { return x.f(); }
+ public static void main(String[] args) {
+ f(new Bippy());
+ }
+}
diff --git a/test/files/neg/primitive-sigs-1/J_2.java b/test/files/neg/primitive-sigs-1/J_2.java
deleted file mode 100644
index b416bef..0000000
--- a/test/files/neg/primitive-sigs-1/J_2.java
+++ /dev/null
@@ -1,8 +0,0 @@
-// java: often the java or scala compiler will save us from
-// the untruth in the signature, but not always.
-public class J_2 {
- public static Integer f(AC<Integer> x) { return x.f(); }
- public static void main(String[] args) {
- f(new Bippy());
- }
-}
diff --git a/test/files/neg/protected-constructors.check b/test/files/neg/protected-constructors.check
index d6b9221..f137158 100644
--- a/test/files/neg/protected-constructors.check
+++ b/test/files/neg/protected-constructors.check
@@ -3,23 +3,23 @@ protected-constructors.scala:17: error: too many arguments for constructor Foo1:
^
protected-constructors.scala:18: error: constructor Foo2 in class Foo2 cannot be accessed in object P
Access to protected constructor Foo2 not permitted because
- enclosing class object P in package hungus is not a subclass of
+ enclosing object P in package hungus is not a subclass of
class Foo2 in package dingus where target is defined
val foo2 = new Foo2("abc")
^
protected-constructors.scala:19: error: class Foo3 in object Ding cannot be accessed in object dingus.Ding
Access to protected class Foo3 not permitted because
- enclosing class object P in package hungus is not a subclass of
+ enclosing object P in package hungus is not a subclass of
object Ding in package dingus where target is defined
val foo3 = new Ding.Foo3("abc")
^
protected-constructors.scala:15: error: class Foo3 in object Ding cannot be accessed in object dingus.Ding
Access to protected class Foo3 not permitted because
- enclosing class object P in package hungus is not a subclass of
+ enclosing object P in package hungus is not a subclass of
object Ding in package dingus where target is defined
class Bar3 extends Ding.Foo3("abc")
^
-protected-constructors.scala:15: error: too many arguments for constructor Object: ()java.lang.Object
+protected-constructors.scala:15: error: too many arguments for constructor Object: ()Object
class Bar3 extends Ding.Foo3("abc")
^
5 errors found
diff --git a/test/files/neg/protected-static-fail.check b/test/files/neg/protected-static-fail.check
new file mode 100644
index 0000000..e149bc0
--- /dev/null
+++ b/test/files/neg/protected-static-fail.check
@@ -0,0 +1,16 @@
+S.scala:5: error: method f in object J cannot be accessed in object bippy.J
+ J.f()
+ ^
+S.scala:6: error: method f1 in object S1 cannot be accessed in object bippy.S1
+ Access to protected method f1 not permitted because
+ enclosing object Test in package bippy is not a subclass of
+ object S1 in package bippy where target is defined
+ S1.f1()
+ ^
+S.scala:8: error: method f2 in class S2 cannot be accessed in bippy.S2
+ Access to protected method f2 not permitted because
+ enclosing object Test in package bippy is not a subclass of
+ class S2 in package bippy where target is defined
+ x.f2()
+ ^
+three errors found
diff --git a/test/files/neg/protected-static-fail/J.java b/test/files/neg/protected-static-fail/J.java
new file mode 100644
index 0000000..c340800
--- /dev/null
+++ b/test/files/neg/protected-static-fail/J.java
@@ -0,0 +1,7 @@
+package bippy;
+
+public class J {
+ private static String f() {
+ return "hi mom";
+ }
+}
\ No newline at end of file
diff --git a/test/files/neg/protected-static-fail/S.scala b/test/files/neg/protected-static-fail/S.scala
new file mode 100644
index 0000000..f9dd89d
--- /dev/null
+++ b/test/files/neg/protected-static-fail/S.scala
@@ -0,0 +1,10 @@
+package bippy
+
+object Test {
+ def main(args: Array[String]): Unit = {
+ J.f()
+ S1.f1()
+ val x = new S2
+ x.f2()
+ }
+}
diff --git a/test/files/neg/protected-static-fail/S0.scala b/test/files/neg/protected-static-fail/S0.scala
new file mode 100644
index 0000000..1a3d192
--- /dev/null
+++ b/test/files/neg/protected-static-fail/S0.scala
@@ -0,0 +1,9 @@
+package bippy
+
+object S1 {
+ protected def f1() = "hi mom"
+}
+
+class S2 {
+ protected def f2() = "hi mom"
+}
\ No newline at end of file
diff --git a/test/files/neg/qualifying-class-error-1.check b/test/files/neg/qualifying-class-error-1.check
new file mode 100644
index 0000000..c70db9b
--- /dev/null
+++ b/test/files/neg/qualifying-class-error-1.check
@@ -0,0 +1,4 @@
+qualifying-class-error-1.scala:2: error: this can be used only in a class, object, or template
+class B extends A(this.getClass.getName.length)
+ ^
+one error found
diff --git a/test/files/neg/qualifying-class-error-1.scala b/test/files/neg/qualifying-class-error-1.scala
new file mode 100644
index 0000000..09152fe
--- /dev/null
+++ b/test/files/neg/qualifying-class-error-1.scala
@@ -0,0 +1,2 @@
+class A(val i:Int)
+class B extends A(this.getClass.getName.length)
diff --git a/test/files/neg/qualifying-class-error-2.check b/test/files/neg/qualifying-class-error-2.check
new file mode 100644
index 0000000..50c2759
--- /dev/null
+++ b/test/files/neg/qualifying-class-error-2.check
@@ -0,0 +1,4 @@
+qualifying-class-error-2.scala:9: error: A is not an enclosing class
+ protected[A] def f() {}
+ ^
+one error found
diff --git a/test/files/neg/qualifying-class-error-2.scala b/test/files/neg/qualifying-class-error-2.scala
new file mode 100644
index 0000000..d3aa866
--- /dev/null
+++ b/test/files/neg/qualifying-class-error-2.scala
@@ -0,0 +1,11 @@
+package A {
+ trait X {
+ protected[A] def f()
+ }
+}
+
+package B {
+ class Y extends A.X {
+ protected[A] def f() {}
+ }
+}
diff --git a/test/files/neg/reassignment.check b/test/files/neg/reassignment.check
new file mode 100644
index 0000000..f0effd1
--- /dev/null
+++ b/test/files/neg/reassignment.check
@@ -0,0 +1,13 @@
+reassignment.scala:2: error: not found: value x
+ x = 5
+ ^
+reassignment.scala:3: error: not found: value y
+ y := 45
+ ^
+reassignment.scala:4: error: not found: value y
+ y += 45
+ ^
+reassignment.scala:6: error: reassignment to val
+ z = 51
+ ^
+four errors found
diff --git a/test/files/neg/reassignment.scala b/test/files/neg/reassignment.scala
new file mode 100644
index 0000000..e31eefb
--- /dev/null
+++ b/test/files/neg/reassignment.scala
@@ -0,0 +1,7 @@
+class A {
+ x = 5
+ y := 45
+ y += 45
+ val z = 50
+ z = 51
+}
\ No newline at end of file
diff --git a/test/files/neg/reflection-names-neg.check b/test/files/neg/reflection-names-neg.check
new file mode 100644
index 0000000..a56a19e
--- /dev/null
+++ b/test/files/neg/reflection-names-neg.check
@@ -0,0 +1,10 @@
+reflection-names-neg.scala:5: error: type mismatch;
+ found : String("abc")
+ required: reflect.runtime.universe.Name
+Note that implicit conversions are not applicable because they are ambiguous:
+ both method stringToTermName in trait Names of type (s: String)reflect.runtime.universe.TermName
+ and method stringToTypeName in trait Names of type (s: String)reflect.runtime.universe.TypeName
+ are possible conversion functions from String("abc") to reflect.runtime.universe.Name
+ val x2 = ("abc": Name) drop 1 // error
+ ^
+one error found
diff --git a/test/files/neg/reflection-names-neg.scala b/test/files/neg/reflection-names-neg.scala
new file mode 100644
index 0000000..7283d16
--- /dev/null
+++ b/test/files/neg/reflection-names-neg.scala
@@ -0,0 +1,6 @@
+import scala.reflect.runtime.universe._
+
+object Test {
+ val x1 = "abc" drop 1 // "bc": String
+ val x2 = ("abc": Name) drop 1 // error
+}
diff --git a/test/files/neg/reify_ann2b.check b/test/files/neg/reify_ann2b.check
new file mode 100644
index 0000000..d32beda
--- /dev/null
+++ b/test/files/neg/reify_ann2b.check
@@ -0,0 +1,4 @@
+reify_ann2b.scala:9: error: inner classes cannot be classfile annotations
+ class ann(bar: String) extends annotation.ClassfileAnnotation
+ ^
+one error found
diff --git a/test/files/neg/reify_ann2b.scala b/test/files/neg/reify_ann2b.scala
new file mode 100644
index 0000000..72d8c61
--- /dev/null
+++ b/test/files/neg/reify_ann2b.scala
@@ -0,0 +1,28 @@
+import scala.reflect.runtime.universe._
+import scala.reflect.runtime.{universe => ru}
+import scala.reflect.runtime.{currentMirror => cm}
+import scala.tools.reflect.ToolBox
+
+object Test extends App {
+ // test 1: reify
+ val tree = reify{
+ class ann(bar: String) extends annotation.ClassfileAnnotation
+
+ @ann(bar="1a") @ann(bar="1b") class C[@ann(bar="2a") @ann(bar="2b") T](@ann(bar="3a") @ann(bar="3b") x: T @ann(bar="4a") @ann(bar="4b")) {
+ @ann(bar="5a") @ann(bar="5b") def f(x: Int @ann(bar="6a") @ann(bar="6b")) = {
+ @ann(bar="7a") @ann(bar="7b") val r = (x + 3): @ann(bar="8a") @ann(bar="8b")
+ val s = 4: Int @ann(bar="9a") @ann(bar="9b")
+ r + s
+ }
+ }
+ }.tree
+ println(tree.toString)
+
+ // test 2: import and typecheck
+ val toolbox = cm.mkToolBox()
+ val ttree = toolbox.typeCheck(tree)
+ println(ttree.toString)
+
+ // test 3: import and compile
+ toolbox.eval(tree)
+}
\ No newline at end of file
diff --git a/test/files/neg/reify_metalevel_breach_+0_refers_to_1.check b/test/files/neg/reify_metalevel_breach_+0_refers_to_1.check
new file mode 100644
index 0000000..75b7555
--- /dev/null
+++ b/test/files/neg/reify_metalevel_breach_+0_refers_to_1.check
@@ -0,0 +1,7 @@
+reify_metalevel_breach_+0_refers_to_1.scala:10: error: the splice cannot be resolved statically, which means there is a cross-stage evaluation involved.
+cross-stage evaluations need to be invoked explicitly, so we're showing you this error.
+if you're sure this is not an oversight, add scala-compiler.jar to the classpath,
+import `scala.tools.reflect.Eval` and call `<your expr>.eval` instead.
+ inner.splice
+ ^
+one error found
diff --git a/test/files/neg/reify_metalevel_breach_+0_refers_to_1.scala b/test/files/neg/reify_metalevel_breach_+0_refers_to_1.scala
new file mode 100644
index 0000000..e4d1edf
--- /dev/null
+++ b/test/files/neg/reify_metalevel_breach_+0_refers_to_1.scala
@@ -0,0 +1,16 @@
+import scala.reflect.runtime.universe._
+import scala.reflect.runtime.{universe => ru}
+import scala.reflect.runtime.{currentMirror => cm}
+import scala.tools.reflect.ToolBox
+
+object Test extends App {
+ val code = reify{
+ val x = 2
+ val inner = reify{x}
+ inner.splice
+ };
+
+ val toolbox = cm.mkToolBox()
+ val evaluated = toolbox.eval(code.tree)
+ println("evaluated = " + evaluated)
+}
\ No newline at end of file
diff --git a/test/files/neg/reify_metalevel_breach_-1_refers_to_0_a.check b/test/files/neg/reify_metalevel_breach_-1_refers_to_0_a.check
new file mode 100644
index 0000000..ca5556d
--- /dev/null
+++ b/test/files/neg/reify_metalevel_breach_-1_refers_to_0_a.check
@@ -0,0 +1,7 @@
+reify_metalevel_breach_-1_refers_to_0_a.scala:9: error: the splice cannot be resolved statically, which means there is a cross-stage evaluation involved.
+cross-stage evaluations need to be invoked explicitly, so we're showing you this error.
+if you're sure this is not an oversight, add scala-compiler.jar to the classpath,
+import `scala.tools.reflect.Eval` and call `<your expr>.eval` instead.
+ val code = reify{outer.splice.splice}
+ ^
+one error found
diff --git a/test/files/neg/reify_metalevel_breach_-1_refers_to_0_a.scala b/test/files/neg/reify_metalevel_breach_-1_refers_to_0_a.scala
new file mode 100644
index 0000000..7397441
--- /dev/null
+++ b/test/files/neg/reify_metalevel_breach_-1_refers_to_0_a.scala
@@ -0,0 +1,14 @@
+import scala.reflect.runtime.universe._
+import scala.reflect.runtime.{universe => ru}
+import scala.reflect.runtime.{currentMirror => cm}
+import scala.tools.reflect.ToolBox
+
+object Test extends App {
+ val x = 2
+ val outer = reify{reify{x}}
+ val code = reify{outer.splice.splice}
+
+ val toolbox = cm.mkToolBox()
+ val evaluated = toolbox.eval(code.tree)
+ println("evaluated = " + evaluated)
+}
\ No newline at end of file
diff --git a/test/files/neg/reify_metalevel_breach_-1_refers_to_0_b.check b/test/files/neg/reify_metalevel_breach_-1_refers_to_0_b.check
new file mode 100644
index 0000000..e34cb1a
--- /dev/null
+++ b/test/files/neg/reify_metalevel_breach_-1_refers_to_0_b.check
@@ -0,0 +1,7 @@
+reify_metalevel_breach_-1_refers_to_0_b.scala:12: error: the splice cannot be resolved statically, which means there is a cross-stage evaluation involved.
+cross-stage evaluations need to be invoked explicitly, so we're showing you this error.
+if you're sure this is not an oversight, add scala-compiler.jar to the classpath,
+import `scala.tools.reflect.Eval` and call `<your expr>.eval` instead.
+ }.splice
+ ^
+one error found
diff --git a/test/files/neg/reify_metalevel_breach_-1_refers_to_0_b.scala b/test/files/neg/reify_metalevel_breach_-1_refers_to_0_b.scala
new file mode 100644
index 0000000..4f27a44
--- /dev/null
+++ b/test/files/neg/reify_metalevel_breach_-1_refers_to_0_b.scala
@@ -0,0 +1,18 @@
+import scala.reflect.runtime.universe._
+import scala.reflect.runtime.{universe => ru}
+import scala.reflect.runtime.{currentMirror => cm}
+import scala.tools.reflect.ToolBox
+
+object Test extends App {
+ val x = 2
+ val code = reify{
+ {
+ val inner = reify{reify{x}}
+ inner.splice
+ }.splice
+ }
+
+ val toolbox = cm.mkToolBox()
+ val evaluated = toolbox.eval(code.tree)
+ println("evaluated = " + evaluated)
+}
\ No newline at end of file
diff --git a/test/files/neg/reify_metalevel_breach_-1_refers_to_1.check b/test/files/neg/reify_metalevel_breach_-1_refers_to_1.check
new file mode 100644
index 0000000..90b0e8d
--- /dev/null
+++ b/test/files/neg/reify_metalevel_breach_-1_refers_to_1.check
@@ -0,0 +1,7 @@
+reify_metalevel_breach_-1_refers_to_1.scala:10: error: the splice cannot be resolved statically, which means there is a cross-stage evaluation involved.
+cross-stage evaluations need to be invoked explicitly, so we're showing you this error.
+if you're sure this is not an oversight, add scala-compiler.jar to the classpath,
+import `scala.tools.reflect.Eval` and call `<your expr>.eval` instead.
+ inner.splice.splice
+ ^
+one error found
diff --git a/test/files/neg/reify_metalevel_breach_-1_refers_to_1.scala b/test/files/neg/reify_metalevel_breach_-1_refers_to_1.scala
new file mode 100644
index 0000000..2f63730
--- /dev/null
+++ b/test/files/neg/reify_metalevel_breach_-1_refers_to_1.scala
@@ -0,0 +1,16 @@
+import scala.reflect.runtime.universe._
+import scala.reflect.runtime.{universe => ru}
+import scala.reflect.runtime.{currentMirror => cm}
+import scala.tools.reflect.ToolBox
+
+object Test extends App {
+ val code = reify{
+ val x = 2
+ val inner = reify{reify{x}}
+ inner.splice.splice
+ };
+
+ val toolbox = cm.mkToolBox()
+ val evaluated = toolbox.eval(code.tree)
+ println("evaluated = " + evaluated)
+}
\ No newline at end of file
diff --git a/test/files/neg/reify_nested_inner_refers_to_local.check b/test/files/neg/reify_nested_inner_refers_to_local.check
new file mode 100644
index 0000000..68689b1
--- /dev/null
+++ b/test/files/neg/reify_nested_inner_refers_to_local.check
@@ -0,0 +1,7 @@
+reify_nested_inner_refers_to_local.scala:9: error: the splice cannot be resolved statically, which means there is a cross-stage evaluation involved.
+cross-stage evaluations need to be invoked explicitly, so we're showing you this error.
+if you're sure this is not an oversight, add scala-compiler.jar to the classpath,
+import `scala.tools.reflect.Eval` and call `<your expr>.eval` instead.
+ reify{x}.splice
+ ^
+one error found
diff --git a/test/files/neg/reify_nested_inner_refers_to_local.scala b/test/files/neg/reify_nested_inner_refers_to_local.scala
new file mode 100644
index 0000000..75ed1bf
--- /dev/null
+++ b/test/files/neg/reify_nested_inner_refers_to_local.scala
@@ -0,0 +1,15 @@
+import scala.reflect.runtime.universe._
+import scala.reflect.runtime.{universe => ru}
+import scala.reflect.runtime.{currentMirror => cm}
+import scala.tools.reflect.ToolBox
+
+object Test extends App {
+ val code = reify{
+ val x = 2
+ reify{x}.splice
+ };
+
+ val toolbox = cm.mkToolBox()
+ val evaluated = toolbox.eval(code.tree)
+ println("evaluated = " + evaluated)
+}
\ No newline at end of file
diff --git a/test/files/neg/saferJavaConversions.check b/test/files/neg/saferJavaConversions.check
new file mode 100644
index 0000000..0e53d2c
--- /dev/null
+++ b/test/files/neg/saferJavaConversions.check
@@ -0,0 +1,6 @@
+saferJavaConversions.scala:13: error: type mismatch;
+ found : String("a")
+ required: Foo
+ val v = map.get("a") // now this is a type error
+ ^
+one error found
diff --git a/test/files/neg/saferJavaConversions.scala b/test/files/neg/saferJavaConversions.scala
new file mode 100644
index 0000000..f061120
--- /dev/null
+++ b/test/files/neg/saferJavaConversions.scala
@@ -0,0 +1,20 @@
+
+case class Foo(s: String)
+
+object Test {
+ def f1 = {
+ import scala.collection.JavaConversions._
+ val map: Map[Foo, String] = Map(Foo("a") -> "a", Foo("b") -> "b")
+ val v = map.get("a") // should be a type error, actually returns null
+ }
+ def f2 = {
+ import scala.collection.convert.wrapAsScala._
+ val map: Map[Foo, String] = Map(Foo("a") -> "a", Foo("b") -> "b")
+ val v = map.get("a") // now this is a type error
+ }
+ def f3 = {
+ import scala.collection.convert.wrapAsJava._
+ val map: Map[Foo, String] = Map(Foo("a") -> "a", Foo("b") -> "b")
+ val v = map.get("a")
+ }
+}
diff --git a/test/files/neg/scopes.check b/test/files/neg/scopes.check
index 2f2eaa7..f8e8c37 100644
--- a/test/files/neg/scopes.check
+++ b/test/files/neg/scopes.check
@@ -7,6 +7,9 @@ scopes.scala:5: error: x is already defined as value x
scopes.scala:8: error: y is already defined as value y
val y: Float = .0f
^
+scopes.scala:6: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses
+ {
+ ^
scopes.scala:11: error: x is already defined as value x
def f1(x: Int, x: Float) = x
^
@@ -19,4 +22,5 @@ scopes.scala:13: error: x is already defined as value x
scopes.scala:15: error: x is already defined as value x
case x::x => x
^
+one warning found
7 errors found
diff --git a/test/files/neg/sealed-java-enums.check b/test/files/neg/sealed-java-enums.check
index 9a4bd42..20d00c8 100644
--- a/test/files/neg/sealed-java-enums.check
+++ b/test/files/neg/sealed-java-enums.check
@@ -1,8 +1,5 @@
-sealed-java-enums.scala:5: error: match is not exhaustive!
-missing combination BLOCKED
-missing combination TERMINATED
-missing combination TIMED_WAITING
-
+sealed-java-enums.scala:5: error: match may not be exhaustive.
+It would fail on the following inputs: BLOCKED, TERMINATED, TIMED_WAITING
def f(state: State) = state match {
^
one error found
diff --git a/test/files/neg/sensitive2.check b/test/files/neg/sensitive2.check
new file mode 100644
index 0000000..19152fe
--- /dev/null
+++ b/test/files/neg/sensitive2.check
@@ -0,0 +1,10 @@
+sensitive2.scala:6: error: type mismatch;
+ found : String("abc")
+ required: Test.Foo[_]
+Note that implicit conversions are not applicable because they are ambiguous:
+ both method foo1 in object Test of type [A](a: A)Test.Foo[A]
+ and method foo2 in object Test of type (a: Any)Test.Foo[String]
+ are possible conversion functions from String("abc") to Test.Foo[_]
+ val a: Foo[_] = "abc"
+ ^
+one error found
diff --git a/test/files/neg/sensitive2.scala b/test/files/neg/sensitive2.scala
new file mode 100644
index 0000000..92b91be
--- /dev/null
+++ b/test/files/neg/sensitive2.scala
@@ -0,0 +1,8 @@
+object Test {
+ class Foo[A](z: A)
+ implicit def foo1[A](a: A): Foo[A] = new Foo(a)
+ implicit def foo2(a: Any): Foo[String] = new Foo("123")
+
+ val a: Foo[_] = "abc"
+
+}
\ No newline at end of file
diff --git a/test/files/neg/specification-scopes.check b/test/files/neg/specification-scopes.check
new file mode 100644
index 0000000..7af9842
--- /dev/null
+++ b/test/files/neg/specification-scopes.check
@@ -0,0 +1,12 @@
+P_2.scala:14: error: reference to x is ambiguous;
+it is both defined in object C and imported subsequently by
+import Q.X._
+ println("L14: "+x) // reference to 'x' is ambiguous here
+ ^
+P_2.scala:19: error: reference to y is ambiguous;
+it is imported twice in the same scope by
+import P.X._
+and import X.y
+ println("L19: "+y) // reference to 'y' is ambiguous here
+ ^
+two errors found
diff --git a/test/files/neg/specification-scopes/P_1.scala b/test/files/neg/specification-scopes/P_1.scala
new file mode 100644
index 0000000..3b11f11
--- /dev/null
+++ b/test/files/neg/specification-scopes/P_1.scala
@@ -0,0 +1,6 @@
+package P {
+ object X { val x = 1; val y = 2; }
+}
+package Q {
+ object X { val x = true; val y = "" }
+}
diff --git a/test/files/neg/specification-scopes/P_2.scala b/test/files/neg/specification-scopes/P_2.scala
new file mode 100644
index 0000000..d59f82e
--- /dev/null
+++ b/test/files/neg/specification-scopes/P_2.scala
@@ -0,0 +1,21 @@
+package P { // 'X' bound by package clause
+ import Console._ // 'println' bound by wildcard import
+ object A {
+ println("L4: "+X) // 'X' refers to 'P.X' here
+ object B {
+ import Q._ // 'X' bound by wildcard import
+ println("L7: "+X) // 'X' refers to 'Q.X' here
+ import X._ // 'x' and 'y' bound by wildcard import
+ println("L8: "+x) // 'x' refers to 'Q.X.x' here
+ object C {
+ val x = 3 // 'x' bound by local definition
+ println("L12: "+x); // 'x' refers to constant '3' here
+ { import Q.X._ // 'x' and 'y' bound by wildcard
+ println("L14: "+x) // reference to 'x' is ambiguous here
+ import X.y // 'y' bound by explicit import
+ println("L16: "+y); // 'y' refers to 'Q.X.y' here
+ { val x = "abc" // 'x' bound by local definition
+ import P.X._ // 'x' and 'y' bound by wildcard
+ println("L19: "+y) // reference to 'y' is ambiguous here
+ println("L20: "+x) // 'x' refers to string ''abc'' here
+}}}}}}
diff --git a/test/files/neg/stmt-expr-discard.check b/test/files/neg/stmt-expr-discard.check
new file mode 100644
index 0000000..2d6420a
--- /dev/null
+++ b/test/files/neg/stmt-expr-discard.check
@@ -0,0 +1,7 @@
+stmt-expr-discard.scala:3: error: a pure expression does nothing in statement position; you may be omitting necessary parentheses
+ + 2
+ ^
+stmt-expr-discard.scala:4: error: a pure expression does nothing in statement position; you may be omitting necessary parentheses
+ - 4
+ ^
+two errors found
diff --git a/test/files/neg/caseinherit.flags b/test/files/neg/stmt-expr-discard.flags
similarity index 100%
copy from test/files/neg/caseinherit.flags
copy to test/files/neg/stmt-expr-discard.flags
diff --git a/test/files/neg/stmt-expr-discard.scala b/test/files/neg/stmt-expr-discard.scala
new file mode 100644
index 0000000..e60c854
--- /dev/null
+++ b/test/files/neg/stmt-expr-discard.scala
@@ -0,0 +1,5 @@
+class A {
+ def f = 1
+ + 2
+ - 4
+}
diff --git a/test/files/neg/stringinterpolation_macro-neg.check b/test/files/neg/stringinterpolation_macro-neg.check
new file mode 100644
index 0000000..457f497
--- /dev/null
+++ b/test/files/neg/stringinterpolation_macro-neg.check
@@ -0,0 +1,70 @@
+stringinterpolation_macro-neg.scala:8: error: too few parts
+ new StringContext().f()
+ ^
+stringinterpolation_macro-neg.scala:9: error: too few arguments for interpolated string
+ new StringContext("", " is ", "%2d years old").f(s)
+ ^
+stringinterpolation_macro-neg.scala:10: error: too many arguments for interpolated string
+ new StringContext("", " is ", "%2d years old").f(s, d, d)
+ ^
+stringinterpolation_macro-neg.scala:11: error: too few arguments for interpolated string
+ new StringContext("", "").f()
+ ^
+stringinterpolation_macro-neg.scala:14: error: type mismatch;
+ found : String
+ required: Boolean
+ f"$s%b"
+ ^
+stringinterpolation_macro-neg.scala:15: error: type mismatch;
+ found : String
+ required: Char
+ f"$s%c"
+ ^
+stringinterpolation_macro-neg.scala:16: error: type mismatch;
+ found : Double
+ required: Char
+ f"$f%c"
+ ^
+stringinterpolation_macro-neg.scala:17: error: type mismatch;
+ found : String
+ required: Int
+ f"$s%x"
+ ^
+stringinterpolation_macro-neg.scala:18: error: type mismatch;
+ found : Boolean
+ required: Int
+ f"$b%d"
+ ^
+stringinterpolation_macro-neg.scala:19: error: type mismatch;
+ found : String
+ required: Int
+ f"$s%d"
+ ^
+stringinterpolation_macro-neg.scala:20: error: type mismatch;
+ found : Double
+ required: Int
+ f"$f%o"
+ ^
+stringinterpolation_macro-neg.scala:21: error: type mismatch;
+ found : String
+ required: Double
+ f"$s%e"
+ ^
+stringinterpolation_macro-neg.scala:22: error: type mismatch;
+ found : Boolean
+ required: Double
+ f"$b%f"
+ ^
+stringinterpolation_macro-neg.scala:27: error: type mismatch;
+ found : String
+ required: Int
+Note that implicit conversions are not applicable because they are ambiguous:
+ both value strToInt2 of type String => Int
+ and value strToInt1 of type String => Int
+ are possible conversion functions from String to Int
+ f"$s%d"
+ ^
+stringinterpolation_macro-neg.scala:30: error: illegal conversion character
+ f"$s%i"
+ ^
+15 errors found
diff --git a/test/files/neg/stringinterpolation_macro-neg.scala b/test/files/neg/stringinterpolation_macro-neg.scala
new file mode 100644
index 0000000..ac9d97d
--- /dev/null
+++ b/test/files/neg/stringinterpolation_macro-neg.scala
@@ -0,0 +1,31 @@
+object Test extends App {
+ val s = "Scala"
+ val d = 8
+ val b = false
+ val f = 3.14159
+
+ // 1) number of arguments
+ new StringContext().f()
+ new StringContext("", " is ", "%2d years old").f(s)
+ new StringContext("", " is ", "%2d years old").f(s, d, d)
+ new StringContext("", "").f()
+
+ // 2) Interpolation mismatches
+ f"$s%b"
+ f"$s%c"
+ f"$f%c"
+ f"$s%x"
+ f"$b%d"
+ f"$s%d"
+ f"$f%o"
+ f"$s%e"
+ f"$b%f"
+
+ {
+ implicit val strToInt1 = (s: String) => 1
+ implicit val strToInt2 = (s: String) => 2
+ f"$s%d"
+ }
+
+ f"$s%i"
+}
diff --git a/test/files/neg/structural.check b/test/files/neg/structural.check
index 100b989..5b2f352 100644
--- a/test/files/neg/structural.check
+++ b/test/files/neg/structural.check
@@ -1,31 +1,28 @@
structural.scala:47: error: Parameter type in structural refinement may not refer to the type of that refinement (self type)
val s1 = new { def f(p: this.type): Unit = () }
- ^
-structural.scala:19: error: illegal dependent method type
- def f9[C <: Object](x: Object{ type D <: Object; def m[E >: Null <: Object](x: Object): D }) = x.m[Tata](null) //fail
- ^
+ ^
structural.scala:10: error: Parameter type in structural refinement may not refer to an abstract type defined outside that refinement
def f1[C <: Object](x: Object{ type D <: Object; def m[E >: Null <: Object](x: A): Object; val x: A }) = x.m[Tata](x.x) //fail
- ^
+ ^
structural.scala:11: error: Parameter type in structural refinement may not refer to an abstract type defined outside that refinement
def f2[C <: Object](x: Object{ type D <: Object; def m[E >: Null <: Object](x: B): Object; val x: B }) = x.m[Tata](x.x) //fail
- ^
+ ^
structural.scala:12: error: Parameter type in structural refinement may not refer to an abstract type defined outside that refinement
def f3[C <: Object](x: Object{ type D <: Object; def m[E >: Null <: Object](x: C): Object; val x: C }) = x.m[Tata](x.x) //fail
- ^
+ ^
structural.scala:13: error: Parameter type in structural refinement may not refer to a type member of that refinement
def f4[C <: Object](x: Object{ type D <: Object; def m[E >: Null <: Object](x: D): Object; val x: D }) = x.m[Tata](x.x) //fail
- ^
+ ^
structural.scala:42: error: Parameter type in structural refinement may not refer to an abstract type defined outside that refinement
type Summable[T] = { def +(v : T) : T }
- ^
+ ^
structural.scala:46: error: Parameter type in structural refinement may not refer to the type of that refinement (self type)
type S1 = { def f(p: this.type): Unit }
- ^
+ ^
structural.scala:49: error: Parameter type in structural refinement may not refer to a type member of that refinement
type S2 = { type T; def f(p: T): Unit }
- ^
+ ^
structural.scala:52: error: Parameter type in structural refinement may not refer to an abstract type defined outside that refinement
def s3[U >: Null <: Object](p: { def f(p: U): Unit; def u: U }) = ()
- ^
-10 errors found
+ ^
+9 errors found
diff --git a/test/files/neg/suggest-similar.check b/test/files/neg/suggest-similar.check
new file mode 100644
index 0000000..057aa8b
--- /dev/null
+++ b/test/files/neg/suggest-similar.check
@@ -0,0 +1,10 @@
+suggest-similar.scala:8: error: not found: value flippitx
+ flippitx = 123
+ ^
+suggest-similar.scala:9: error: not found: value identiyt
+ Nil map identiyt
+ ^
+suggest-similar.scala:10: error: not found: type Bingus
+ new Bingus
+ ^
+three errors found
diff --git a/test/files/neg/suggest-similar.scala b/test/files/neg/suggest-similar.scala
new file mode 100644
index 0000000..ff32747
--- /dev/null
+++ b/test/files/neg/suggest-similar.scala
@@ -0,0 +1,11 @@
+class Dingus
+object Dingus {
+ var flippity = 1
+}
+import Dingus._
+
+class A {
+ flippitx = 123
+ Nil map identiyt
+ new Bingus
+}
diff --git a/test/files/neg/switch.check b/test/files/neg/switch.check
index 7212c1a..e4730b6 100644
--- a/test/files/neg/switch.check
+++ b/test/files/neg/switch.check
@@ -1,10 +1,7 @@
-switch.scala:28: error: could not emit switch for @switch annotated match
- def fail1(c: Char) = (c: @switch) match {
- ^
switch.scala:38: error: could not emit switch for @switch annotated match
def fail2(c: Char) = (c: @switch @unchecked) match {
- ^
+ ^
switch.scala:45: error: could not emit switch for @switch annotated match
def fail3(c: Char) = (c: @unchecked @switch) match {
- ^
-three errors found
+ ^
+two errors found
diff --git a/test/files/neg/caseinherit.flags b/test/files/neg/switch.flags
similarity index 100%
copy from test/files/neg/caseinherit.flags
copy to test/files/neg/switch.flags
diff --git a/test/files/neg/switch.scala b/test/files/neg/switch.scala
index a3dfd86..198583f 100644
--- a/test/files/neg/switch.scala
+++ b/test/files/neg/switch.scala
@@ -24,8 +24,8 @@ object Main {
case _ => false
}
- // has a guard
- def fail1(c: Char) = (c: @switch) match {
+ // has a guard, but since SI-5830 that's ok
+ def succ_guard(c: Char) = (c: @switch) match {
case 'A' | 'B' | 'C' => true
case x if x == 'A' => true
case _ => false
diff --git a/test/files/neg/t0152.check b/test/files/neg/t0152.check
index 84f78dc..a7909bf 100644
--- a/test/files/neg/t0152.check
+++ b/test/files/neg/t0152.check
@@ -1,6 +1,6 @@
t0152.scala:10: error: illegal inheritance;
object boom inherits different type instances of class Value:
-Value[Int] and Value[java.lang.String]
+Value[Int] and Value[String]
object boom extends Value[java.lang.String]("foo") with PlusOne
^
one error found
diff --git a/test/files/neg/t0418.check b/test/files/neg/t0418.check
new file mode 100644
index 0000000..4e9ad2f
--- /dev/null
+++ b/test/files/neg/t0418.check
@@ -0,0 +1,7 @@
+t0418.scala:2: error: not found: value Foo12340771
+ null match { case Foo12340771.Bar(x) => x }
+ ^
+t0418.scala:2: error: not found: value x
+ null match { case Foo12340771.Bar(x) => x }
+ ^
+two errors found
diff --git a/test/files/neg/bug0418.scala b/test/files/neg/t0418.scala
similarity index 100%
rename from test/files/neg/bug0418.scala
rename to test/files/neg/t0418.scala
diff --git a/test/files/neg/t0565.check b/test/files/neg/t0565.check
index c5a64d0..98e61a2 100644
--- a/test/files/neg/t0565.check
+++ b/test/files/neg/t0565.check
@@ -1,4 +1,4 @@
t0565.scala:8: error: Parameter type in structural refinement may not refer to a type member of that refinement
def z (w : T) : T } =
- ^
+ ^
one error found
diff --git a/test/files/neg/t0673.check b/test/files/neg/t0673.check
index cc67e99..fd27afc 100644
--- a/test/files/neg/t0673.check
+++ b/test/files/neg/t0673.check
@@ -1,4 +1,4 @@
-Test.scala:2: error: object InnerClass is not a value
+Test.scala:2: error: object JavaClass.InnerClass is not a value
val x = JavaClass.InnerClass
^
one error found
diff --git a/test/files/neg/t0764.check b/test/files/neg/t0764.check
index 9f0cedc..e14c770 100644
--- a/test/files/neg/t0764.check
+++ b/test/files/neg/t0764.check
@@ -1,5 +1,5 @@
t0764.scala:13: error: type mismatch;
- found : java.lang.Object with Node{type T = _1.type} where val _1: Node{type T = NextType}
+ found : Node{type T = _1.type} where val _1: Node{type T = NextType}
required: Node{type T = Main.this.AType}
new Main[AType]( (value: AType).prepend )
^
diff --git a/test/files/neg/t0816.check b/test/files/neg/t0816.check
new file mode 100644
index 0000000..48f37c1
--- /dev/null
+++ b/test/files/neg/t0816.check
@@ -0,0 +1,4 @@
+t0816.scala:5: error: case class Ctest has case ancestor Btest, but case-to-case inheritance is prohibited. To overcome this limitation, use extractors to pattern match on non-leaf nodes.
+case class Ctest(override val data: String) extends Btest(data, true)
+ ^
+one error found
diff --git a/test/files/pos/t0816.scala b/test/files/neg/t0816.scala
similarity index 100%
rename from test/files/pos/t0816.scala
rename to test/files/neg/t0816.scala
diff --git a/test/files/neg/t0903.check b/test/files/neg/t0903.check
index db4cd94..2dd05cd 100644
--- a/test/files/neg/t0903.check
+++ b/test/files/neg/t0903.check
@@ -1,4 +1,4 @@
-t0903.scala:3: error: reassignment to val
+t0903.scala:3: error: value += is not a member of Int
x += 1
^
t0903.scala:4: error: reassignment to val
diff --git a/test/files/neg/t1010.check b/test/files/neg/t1010.check
new file mode 100644
index 0000000..2cc8f9d
--- /dev/null
+++ b/test/files/neg/t1010.check
@@ -0,0 +1,6 @@
+t1010.scala:14: error: type mismatch;
+ found : MailBox#Message
+ required: _3.in.Message where val _3: Actor
+ unstable.send(msg) // in.Message becomes unstable.Message, but that's ok since Message is a concrete type member
+ ^
+one error found
diff --git a/test/files/neg/bug1010.scala b/test/files/neg/t1010.scala
similarity index 100%
rename from test/files/neg/bug1010.scala
rename to test/files/neg/t1010.scala
diff --git a/test/files/neg/t1011.check b/test/files/neg/t1011.check
new file mode 100644
index 0000000..d9c8123
--- /dev/null
+++ b/test/files/neg/t1011.check
@@ -0,0 +1,4 @@
+t1011.scala:8: error: not found: value entity
+ <dl><code>{Text(entity)}</code>
+ ^
+one error found
diff --git a/test/files/neg/bug1011.scala b/test/files/neg/t1011.scala
similarity index 100%
rename from test/files/neg/bug1011.scala
rename to test/files/neg/t1011.scala
diff --git a/test/files/neg/t1017.check b/test/files/neg/t1017.check
new file mode 100644
index 0000000..52101c7
--- /dev/null
+++ b/test/files/neg/t1017.check
@@ -0,0 +1,4 @@
+t1017.scala:3: error: not found: value foo
+<x><x><x><x><x><x><x><x><x><x><x><x><x><x><x><x><x><x>{ foo }</x></x></x></x></x></x></x></x></x></x></x></x></x></x></x></x></x></x>
+ ^
+one error found
diff --git a/test/files/neg/bug1017.scala b/test/files/neg/t1017.scala
similarity index 100%
rename from test/files/neg/bug1017.scala
rename to test/files/neg/t1017.scala
diff --git a/test/files/neg/t1038.check b/test/files/neg/t1038.check
new file mode 100644
index 0000000..b191b89
--- /dev/null
+++ b/test/files/neg/t1038.check
@@ -0,0 +1,5 @@
+t1038.scala:4: error: not enough arguments for constructor X: (x: Int)X.
+Unspecified value parameter x.
+ val a = new X
+ ^
+one error found
diff --git a/test/files/neg/bug1038.scala b/test/files/neg/t1038.scala
similarity index 100%
rename from test/files/neg/bug1038.scala
rename to test/files/neg/t1038.scala
diff --git a/test/files/neg/t1041.check b/test/files/neg/t1041.check
new file mode 100644
index 0000000..d82f3a8
--- /dev/null
+++ b/test/files/neg/t1041.check
@@ -0,0 +1,6 @@
+t1041.scala:3: error: type mismatch;
+ found : Int(1)
+ required: List[Int]
+ case 1 => 4
+ ^
+one error found
diff --git a/test/files/neg/bug1041.scala b/test/files/neg/t1041.scala
similarity index 100%
rename from test/files/neg/bug1041.scala
rename to test/files/neg/t1041.scala
diff --git a/test/files/neg/t1106.check b/test/files/neg/t1106.check
new file mode 100644
index 0000000..f81d0c6
--- /dev/null
+++ b/test/files/neg/t1106.check
@@ -0,0 +1,7 @@
+t1106.scala:2: error: expected class or object definition
+val p = new Par[String]
+^
+t1106.scala:5: error: expected class or object definition
+new Foo[p.type](p) // crashes compiler
+^
+two errors found
diff --git a/test/files/neg/bug1106.scala b/test/files/neg/t1106.scala
similarity index 100%
rename from test/files/neg/bug1106.scala
rename to test/files/neg/t1106.scala
diff --git a/test/files/neg/t1112.check b/test/files/neg/t1112.check
new file mode 100644
index 0000000..e69be3e
--- /dev/null
+++ b/test/files/neg/t1112.check
@@ -0,0 +1,4 @@
+t1112.scala:12: error: too many arguments for method call: (p: Int)(f: => () => Unit)Unit
+ call(0,() => System.out.println("here we are"))
+ ^
+one error found
diff --git a/test/files/neg/bug1112.scala b/test/files/neg/t1112.scala
similarity index 100%
rename from test/files/neg/bug1112.scala
rename to test/files/neg/t1112.scala
diff --git a/test/files/neg/t112706A.check b/test/files/neg/t112706A.check
new file mode 100644
index 0000000..30d0c3e
--- /dev/null
+++ b/test/files/neg/t112706A.check
@@ -0,0 +1,6 @@
+t112706A.scala:5: error: constructor cannot be instantiated to expected type;
+ found : (T1, T2)
+ required: String
+ case Tuple2(node,_) =>
+ ^
+one error found
diff --git a/test/files/neg/bug112706A.scala b/test/files/neg/t112706A.scala
similarity index 100%
rename from test/files/neg/bug112706A.scala
rename to test/files/neg/t112706A.scala
diff --git a/test/files/neg/t1181.check b/test/files/neg/t1181.check
new file mode 100644
index 0000000..3724752
--- /dev/null
+++ b/test/files/neg/t1181.check
@@ -0,0 +1,8 @@
+t1181.scala:8: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses
+ case (Nil, Nil) => map
+ ^
+t1181.scala:9: error: missing parameter type
+ _ => buildMap(map.updated(keyList.head, valueList.head), keyList.tail, valueList.tail)
+ ^
+one warning found
+one error found
diff --git a/test/files/neg/bug1181.scala b/test/files/neg/t1181.scala
similarity index 100%
rename from test/files/neg/bug1181.scala
rename to test/files/neg/t1181.scala
diff --git a/test/files/neg/t1183.check b/test/files/neg/t1183.check
new file mode 100644
index 0000000..c402829
--- /dev/null
+++ b/test/files/neg/t1183.check
@@ -0,0 +1,17 @@
+t1183.scala:6: error: name clash: class Foo defines object Baz
+and its companion object Foo also defines class Baz
+ object Baz
+ ^
+t1183.scala:7: error: name clash: class Foo defines class Bam
+and its companion object Foo also defines object Bam
+ class Bam
+ ^
+t1183.scala:8: error: name clash: class Foo defines object Bar
+and its companion object Foo also defines class Bar
+ object Bar
+ ^
+t1183.scala:9: error: name clash: class Foo defines class Bar
+and its companion object Foo also defines class Bar
+ case class Bar(i:Int)
+ ^
+four errors found
diff --git a/test/files/neg/t1183.scala b/test/files/neg/t1183.scala
new file mode 100644
index 0000000..024c4ab
--- /dev/null
+++ b/test/files/neg/t1183.scala
@@ -0,0 +1,34 @@
+// bug 1183 from in the old tracker, not in Trac
+
+object Test {
+
+ class Foo(j:Int) {
+ object Baz
+ class Bam
+ object Bar
+ case class Bar(i:Int)
+ }
+
+
+ class Test717 {
+ val foo1 = new Foo(1)
+
+ def runTest() = {
+ val res = (foo1.Bar(2):Any) match {
+ case foo1.Bar(2) => true // (1)
+ }
+ require(res)
+ }
+ }
+
+ // (2)
+ object Foo {
+ class Bar(val x : String)
+ class Baz
+ object Bam
+ object Bar
+
+ def unapply(s : String) : Option[Bar] = Some(new Bar(s))
+ }
+
+}
diff --git a/test/files/neg/t1224.check b/test/files/neg/t1224.check
new file mode 100644
index 0000000..fb61275
--- /dev/null
+++ b/test/files/neg/t1224.check
@@ -0,0 +1,4 @@
+t1224.scala:4: error: illegal cyclic reference involving type T
+ type T >: C[T] <: C[C[T]]
+ ^
+one error found
diff --git a/test/files/neg/bug1224.scala b/test/files/neg/t1224.scala
similarity index 100%
rename from test/files/neg/bug1224.scala
rename to test/files/neg/t1224.scala
diff --git a/test/files/neg/t1241.check b/test/files/neg/t1241.check
new file mode 100644
index 0000000..e1ccf41
--- /dev/null
+++ b/test/files/neg/t1241.check
@@ -0,0 +1,4 @@
+t1241.scala:5: error: class type required but AnyRef{def hello(): Unit} found
+ val x4 = new T { def hello() { println("4") } } // error!
+ ^
+one error found
diff --git a/test/files/neg/bug1241.scala b/test/files/neg/t1241.scala
similarity index 100%
rename from test/files/neg/bug1241.scala
rename to test/files/neg/t1241.scala
diff --git a/test/files/neg/t1275.check b/test/files/neg/t1275.check
new file mode 100644
index 0000000..a930e25
--- /dev/null
+++ b/test/files/neg/t1275.check
@@ -0,0 +1,6 @@
+t1275.scala:11: error: type mismatch;
+ found : xs.MyType[a]
+ required: s
+ = xs f
+ ^
+one error found
diff --git a/test/files/neg/bug1275.scala b/test/files/neg/t1275.scala
similarity index 100%
rename from test/files/neg/bug1275.scala
rename to test/files/neg/t1275.scala
diff --git a/test/files/neg/t1286.check b/test/files/neg/t1286.check
new file mode 100644
index 0000000..9127096
--- /dev/null
+++ b/test/files/neg/t1286.check
@@ -0,0 +1,5 @@
+b.scala:1: error: Companions 'trait Foo' and 'object Foo' must be defined in same file:
+ Found in t1286/a.scala and t1286/b.scala
+object Foo extends Foo {
+ ^
+one error found
diff --git a/test/files/neg/bug1286/a.scala b/test/files/neg/t1286/a.scala
similarity index 100%
rename from test/files/neg/bug1286/a.scala
rename to test/files/neg/t1286/a.scala
diff --git a/test/files/neg/bug1286/b.scala b/test/files/neg/t1286/b.scala
similarity index 100%
rename from test/files/neg/bug1286/b.scala
rename to test/files/neg/t1286/b.scala
diff --git a/test/files/neg/t1364.check b/test/files/neg/t1364.check
new file mode 100644
index 0000000..cb8803a
--- /dev/null
+++ b/test/files/neg/t1364.check
@@ -0,0 +1,5 @@
+t1364.scala:9: error: overriding type T in trait A with bounds <: AnyRef{type S[-U]};
+ type T has incompatible type
+ type T = { type S[U] = U }
+ ^
+one error found
diff --git a/test/files/neg/bug1364.scala b/test/files/neg/t1364.scala
similarity index 100%
rename from test/files/neg/bug1364.scala
rename to test/files/neg/t1364.scala
diff --git a/test/files/neg/t1422.check b/test/files/neg/t1422.check
index 5931fcb..362d7ef 100644
--- a/test/files/neg/t1422.check
+++ b/test/files/neg/t1422.check
@@ -1,4 +1,7 @@
t1422.scala:1: error: private[this] not allowed for case class parameters
-case class A(private[this] val foo:String)
+case class A(private[this] val foo:String) { }
^
-one error found
+t1422.scala:1: error: value foo in class A cannot be accessed in A
+case class A(private[this] val foo:String) { }
+ ^
+two errors found
diff --git a/test/files/neg/t1422.scala b/test/files/neg/t1422.scala
index 751f05a..af30824 100644
--- a/test/files/neg/t1422.scala
+++ b/test/files/neg/t1422.scala
@@ -1 +1 @@
-case class A(private[this] val foo:String)
+case class A(private[this] val foo:String) { }
diff --git a/test/files/neg/t1431.check b/test/files/neg/t1431.check
new file mode 100644
index 0000000..a17ba73
--- /dev/null
+++ b/test/files/neg/t1431.check
@@ -0,0 +1,4 @@
+t1431.scala:8: error: class type required but X#Factory found
+ def fun[X<:MyTrait with Singleton]() = new X#Factory().value
+ ^
+one error found
diff --git a/test/files/neg/t1431.scala b/test/files/neg/t1431.scala
new file mode 100644
index 0000000..aff1dbc
--- /dev/null
+++ b/test/files/neg/t1431.scala
@@ -0,0 +1,10 @@
+object Bug_New {
+ trait MyTrait {
+ type Alpha
+ def the_value : Alpha
+ class Factory() {def value : Alpha = the_value}
+ }
+
+ def fun[X<:MyTrait with Singleton]() = new X#Factory().value
+}
+
diff --git a/test/files/neg/t1432.check b/test/files/neg/t1432.check
new file mode 100644
index 0000000..180cb05
--- /dev/null
+++ b/test/files/neg/t1432.check
@@ -0,0 +1,6 @@
+t1432.scala:10: error: type mismatch;
+ found : (Int, Bug_NoUnique.Wrap[Bug_NoUnique.Wrap[Unit]] => Double)
+ required: (Int, Unit => Double)
+ def test(x : TypeCon[Wrap[Unit]]) : TypeCon[Unit] = wrap(x)
+ ^
+one error found
diff --git a/test/files/neg/t1432.scala b/test/files/neg/t1432.scala
new file mode 100644
index 0000000..638f365
--- /dev/null
+++ b/test/files/neg/t1432.scala
@@ -0,0 +1,12 @@
+object Bug_NoUnique {
+
+ type TypeCon[Env] = (Int, Env=>Double)
+
+ case class Wrap[E](parent:E) {}
+
+ def wrap[E,A,Y](v : (A,E=>Y)) : (A,Wrap[E]=>Y) =
+ throw new Error("Body here")
+
+ def test(x : TypeCon[Wrap[Unit]]) : TypeCon[Unit] = wrap(x)
+}
+
diff --git a/test/files/neg/t1477.check b/test/files/neg/t1477.check
index e497637..72bffa3 100644
--- a/test/files/neg/t1477.check
+++ b/test/files/neg/t1477.check
@@ -1,4 +1,4 @@
-t1477.scala:13: error: overriding type V in trait C with bounds >: Nothing <: Middle.this.D;
+t1477.scala:13: error: overriding type V in trait C with bounds <: Middle.this.D;
type V is a volatile type; cannot override a type with non-volatile upper bound
type V <: (D with U)
^
diff --git a/test/files/neg/t1523.check b/test/files/neg/t1523.check
new file mode 100644
index 0000000..d2489f2
--- /dev/null
+++ b/test/files/neg/t1523.check
@@ -0,0 +1,4 @@
+t1523.scala:4: error: too many arguments for method bug: (x: Any)Any
+ def go() = bug("a", "a", "a", "a", "a", "a", "a", "a", "a", "a", "a", "a", "a", "a", "a", "a", "a", "a", "a", "a", "a", "a", "a", "a", "a", "a")
+ ^
+one error found
diff --git a/test/files/neg/bug1523.scala b/test/files/neg/t1523.scala
similarity index 100%
rename from test/files/neg/bug1523.scala
rename to test/files/neg/t1523.scala
diff --git a/test/files/neg/bug1548.check b/test/files/neg/t1548.check
similarity index 100%
rename from test/files/neg/bug1548.check
rename to test/files/neg/t1548.check
diff --git a/test/files/neg/bug1548/J.java b/test/files/neg/t1548/J.java
similarity index 100%
rename from test/files/neg/bug1548/J.java
rename to test/files/neg/t1548/J.java
diff --git a/test/files/neg/bug1548/S.scala b/test/files/neg/t1548/S.scala
similarity index 100%
rename from test/files/neg/bug1548/S.scala
rename to test/files/neg/t1548/S.scala
diff --git a/test/files/neg/t1623.check b/test/files/neg/t1623.check
new file mode 100644
index 0000000..251039a
--- /dev/null
+++ b/test/files/neg/t1623.check
@@ -0,0 +1,4 @@
+t1623.scala:11: error: class BImpl cannot be instantiated because it does not conform to its self-type test.BImpl with test.A
+ val b = new BImpl
+ ^
+one error found
diff --git a/test/files/neg/bug1623.scala b/test/files/neg/t1623.scala
similarity index 100%
rename from test/files/neg/bug1623.scala
rename to test/files/neg/t1623.scala
diff --git a/test/files/neg/t1672b.check b/test/files/neg/t1672b.check
new file mode 100644
index 0000000..60ccf77
--- /dev/null
+++ b/test/files/neg/t1672b.check
@@ -0,0 +1,16 @@
+t1672b.scala:3: error: could not optimize @tailrec annotated method bar: it contains a recursive call not in tail position
+ def bar : Nothing = {
+ ^
+t1672b.scala:14: error: could not optimize @tailrec annotated method baz: it contains a recursive call not in tail position
+ def baz : Nothing = {
+ ^
+t1672b.scala:29: error: could not optimize @tailrec annotated method boz: it contains a recursive call not in tail position
+ case _: Throwable => boz; ???
+ ^
+t1672b.scala:34: error: could not optimize @tailrec annotated method bez: it contains a recursive call not in tail position
+ def bez : Nothing = {
+ ^
+t1672b.scala:46: error: could not optimize @tailrec annotated method bar: it contains a recursive call not in tail position
+ else 1 + (try {
+ ^
+5 errors found
diff --git a/test/files/neg/t1672b.scala b/test/files/neg/t1672b.scala
new file mode 100644
index 0000000..0ccdd03
--- /dev/null
+++ b/test/files/neg/t1672b.scala
@@ -0,0 +1,52 @@
+object Test {
+ @annotation.tailrec
+ def bar : Nothing = {
+ try {
+ throw new RuntimeException
+ } catch {
+ case _: Throwable => bar
+ } finally {
+ bar
+ }
+ }
+
+ @annotation.tailrec
+ def baz : Nothing = {
+ try {
+ throw new RuntimeException
+ } catch {
+ case _: Throwable => baz
+ } finally {
+ ???
+ }
+ }
+
+ @annotation.tailrec
+ def boz : Nothing = {
+ try {
+ throw new RuntimeException
+ } catch {
+ case _: Throwable => boz; ???
+ }
+ }
+
+ @annotation.tailrec
+ def bez : Nothing = {
+ try {
+ bez
+ } finally {
+ ???
+ }
+ }
+
+ // the `liftedTree` local method will prevent a tail call here.
+ @annotation.tailrec
+ def bar(i : Int) : Int = {
+ if (i == 0) 0
+ else 1 + (try {
+ throw new RuntimeException
+ } catch {
+ case _: Throwable => bar(i - 1)
+ })
+ }
+}
diff --git a/test/files/neg/t1701.check b/test/files/neg/t1701.check
index 782b690..d603e62 100644
--- a/test/files/neg/t1701.check
+++ b/test/files/neg/t1701.check
@@ -1,4 +1,4 @@
-t1701.scala:1: error: java.lang.Cloneable does not take type parameters
+t1701.scala:1: error: Cloneable does not take type parameters
class A extends java.lang.Cloneable[String, Option, Int]
^
one error found
diff --git a/test/files/neg/t1838.check b/test/files/neg/t1838.check
new file mode 100644
index 0000000..a476158
--- /dev/null
+++ b/test/files/neg/t1838.check
@@ -0,0 +1,7 @@
+t1838.scala:6: error: `sealed' modifier can be used only for classes
+ sealed val v = 0
+ ^
+t1838.scala:5: error: `sealed' modifier can be used only for classes
+ sealed def f = 0
+ ^
+two errors found
diff --git a/test/files/neg/bug1838.scala b/test/files/neg/t1838.scala
similarity index 100%
rename from test/files/neg/bug1838.scala
rename to test/files/neg/t1838.scala
diff --git a/test/files/neg/t1845.check b/test/files/neg/t1845.check
new file mode 100644
index 0000000..a6c82f5
--- /dev/null
+++ b/test/files/neg/t1845.check
@@ -0,0 +1,6 @@
+t1845.scala:6: error: encountered unrecoverable cycle resolving import.
+Note: this is often due in part to a class depending on a definition nested within its companion.
+If applicable, you may wish to try moving some members into another object.
+ import lexical._
+ ^
+one error found
diff --git a/test/files/neg/t1845.scala b/test/files/neg/t1845.scala
new file mode 100644
index 0000000..dab448b
--- /dev/null
+++ b/test/files/neg/t1845.scala
@@ -0,0 +1,10 @@
+import scala.util.parsing.combinator.syntactical.TokenParsers
+import scala.util.parsing.combinator.lexical.StdLexical
+import scala.util.parsing.combinator.token._
+
+class MyTokenParsers extends TokenParsers {
+ import lexical._
+ type Tokens = StdTokens
+ type Elem = lexical.Token
+ val lexical = new StdLexical
+}
diff --git a/test/files/neg/t1872.check b/test/files/neg/t1872.check
new file mode 100644
index 0000000..c5dc2a8
--- /dev/null
+++ b/test/files/neg/t1872.check
@@ -0,0 +1,8 @@
+t1872.scala:3: warning: fruitless type test: a value of type Int cannot also be a scala.util.Random
+ def f(x: Int) = x.isInstanceOf[util.Random]
+ ^
+t1872.scala:3: error: isInstanceOf cannot test if value types are references.
+ def f(x: Int) = x.isInstanceOf[util.Random]
+ ^
+one warning found
+one error found
diff --git a/test/files/neg/bug1872.scala b/test/files/neg/t1872.scala
similarity index 100%
rename from test/files/neg/bug1872.scala
rename to test/files/neg/t1872.scala
diff --git a/test/files/neg/t1878-typer.check b/test/files/neg/t1878-typer.check
new file mode 100644
index 0000000..e3a20d0
--- /dev/null
+++ b/test/files/neg/t1878-typer.check
@@ -0,0 +1,4 @@
+t1878-typer.scala:4: error: _* may only come last
+ case <p> { _* } </p> =>
+ ^
+one error found
diff --git a/test/files/neg/t1878-typer.scala b/test/files/neg/t1878-typer.scala
new file mode 100644
index 0000000..1eb0cb7
--- /dev/null
+++ b/test/files/neg/t1878-typer.scala
@@ -0,0 +1,6 @@
+object Test extends App {
+ // illegal - bug #1764
+ null match {
+ case <p> { _* } </p> =>
+ }
+}
diff --git a/test/files/neg/t1878.check b/test/files/neg/t1878.check
new file mode 100644
index 0000000..ac2071c
--- /dev/null
+++ b/test/files/neg/t1878.check
@@ -0,0 +1,7 @@
+t1878.scala:3: error: bad use of _* (a sequence pattern must be the last pattern)
+ val err1 = "" match { case Seq(f @ _*, ',') => f }
+ ^
+t1878.scala:9: error: bad use of _* (a sequence pattern must be the last pattern)
+ val List(List(_*, arg2), _) = List(List(1,2,3), List(4,5,6))
+ ^
+two errors found
diff --git a/test/files/neg/t1878.scala b/test/files/neg/t1878.scala
new file mode 100644
index 0000000..99fee48
--- /dev/null
+++ b/test/files/neg/t1878.scala
@@ -0,0 +1,17 @@
+object Test extends App {
+ // illegal
+ val err1 = "" match { case Seq(f @ _*, ',') => f }
+
+ // no error
+ val List(List(arg1, _*), _) = List(List(1,2,3), List(4,5,6))
+
+ // illegal
+ val List(List(_*, arg2), _) = List(List(1,2,3), List(4,5,6))
+
+ /* see t1878-typer.scala
+ // illegal - bug #1764
+ null match {
+ case <p> { _* } </p> =>
+ }
+ */
+}
diff --git a/test/files/neg/t1909b.check b/test/files/neg/t1909b.check
new file mode 100644
index 0000000..9a68364
--- /dev/null
+++ b/test/files/neg/t1909b.check
@@ -0,0 +1,4 @@
+t1909b.scala:4: error: this can be used only in a class, object, or template
+ def bar() = this.z + 5
+ ^
+one error found
diff --git a/test/files/neg/bug1909b.scala b/test/files/neg/t1909b.scala
similarity index 100%
rename from test/files/neg/bug1909b.scala
rename to test/files/neg/t1909b.scala
diff --git a/test/files/neg/t1960.check b/test/files/neg/t1960.check
new file mode 100644
index 0000000..5238141
--- /dev/null
+++ b/test/files/neg/t1960.check
@@ -0,0 +1,4 @@
+t1960.scala:5: error: parameter 'p' requires field but conflicts with method p in trait TBase
+class Aclass (p: Int) extends TBase { def g() { f(p) } }
+ ^
+one error found
diff --git a/test/files/neg/bug1960.scala b/test/files/neg/t1960.scala
similarity index 100%
rename from test/files/neg/bug1960.scala
rename to test/files/neg/t1960.scala
diff --git a/test/files/neg/t200.check b/test/files/neg/t200.check
new file mode 100644
index 0000000..b6b1a32
--- /dev/null
+++ b/test/files/neg/t200.check
@@ -0,0 +1,5 @@
+t200.scala:7: error: method foo is defined twice
+ conflicting symbols both originated in file 't200.scala'
+ def foo: Int;
+ ^
+one error found
diff --git a/test/files/neg/bug200.scala b/test/files/neg/t200.scala
similarity index 100%
rename from test/files/neg/bug200.scala
rename to test/files/neg/t200.scala
diff --git a/test/files/neg/t2070.check b/test/files/neg/t2070.check
index bd04940..ef1d08f 100644
--- a/test/files/neg/t2070.check
+++ b/test/files/neg/t2070.check
@@ -1,5 +1,6 @@
t2070.scala:8: error: The kind of trait T does not conform to the expected kind of type T[X] in trait A.
-t2070.B.T's type parameters do not match type T's expected parameters: type X (in object B) has one type parameter, but type X (in trait A) has none
+t2070.B.T's type parameters do not match type T's expected parameters:
+type X (in object B) has one type parameter, but type X (in trait A) has none
trait T[X[_]]
^
one error found
diff --git a/test/files/neg/t2078.check b/test/files/neg/t2078.check
index 1b79c19..00bb323 100644
--- a/test/files/neg/t2078.check
+++ b/test/files/neg/t2078.check
@@ -1,4 +1,4 @@
-t2078.scala:2: error: contravariant type S occurs in covariant position in type => java.lang.Object{val x: S} of value f
+t2078.scala:2: error: contravariant type S occurs in covariant position in type => AnyRef{val x: S} of value f
val f = new { val x = y }
^
one error found
diff --git a/test/files/neg/t2078.scala b/test/files/neg/t2078.scala
index 03eaa7e..342ba08 100644
--- a/test/files/neg/t2078.scala
+++ b/test/files/neg/t2078.scala
@@ -5,5 +5,5 @@ class A[-S](y : S) {
object Test extends App {
val a = new A(1)
val b = a : A[Nothing]
- b.f.x
+ println(b.f.x)
}
diff --git a/test/files/neg/t2102.check b/test/files/neg/t2102.check
new file mode 100644
index 0000000..b4f91a5
--- /dev/null
+++ b/test/files/neg/t2102.check
@@ -0,0 +1,6 @@
+t2102.scala:2: error: type mismatch;
+ found : java.util.Iterator[Int]
+ required: scala.collection.Iterator[_]
+ val x: Iterator[_] = new java.util.ArrayList[Int]().iterator
+ ^
+one error found
diff --git a/test/files/neg/bug2102.scala b/test/files/neg/t2102.scala
similarity index 100%
rename from test/files/neg/bug2102.scala
rename to test/files/neg/t2102.scala
diff --git a/test/files/neg/t2144.check b/test/files/neg/t2144.check
new file mode 100644
index 0000000..670e188
--- /dev/null
+++ b/test/files/neg/t2144.check
@@ -0,0 +1,4 @@
+t2144.scala:2: error: Parameter type in structural refinement may not refer to an abstract type defined outside that refinement
+ def foo[A](a: A) = new { def bar(x: A): A = x }
+ ^
+one error found
diff --git a/test/files/neg/bug2144.scala b/test/files/neg/t2144.scala
similarity index 100%
rename from test/files/neg/bug2144.scala
rename to test/files/neg/t2144.scala
diff --git a/test/files/neg/t2148.check b/test/files/neg/t2148.check
new file mode 100644
index 0000000..5113b48
--- /dev/null
+++ b/test/files/neg/t2148.check
@@ -0,0 +1,4 @@
+t2148.scala:9: error: type A is not a stable prefix
+ val b = new A with A#A1
+ ^
+one error found
diff --git a/test/files/neg/bug2148.scala b/test/files/neg/t2148.scala
similarity index 100%
rename from test/files/neg/bug2148.scala
rename to test/files/neg/t2148.scala
diff --git a/test/files/neg/t2206.check b/test/files/neg/t2206.check
new file mode 100644
index 0000000..766f35d
--- /dev/null
+++ b/test/files/neg/t2206.check
@@ -0,0 +1,5 @@
+t2206.scala:10: error: value f is not a member of o.A
+ Note: implicit method ax is not applicable here because it comes after the application point and it lacks an explicit result type
+ a.f()
+ ^
+one error found
diff --git a/test/files/neg/bug2206.scala b/test/files/neg/t2206.scala
similarity index 100%
rename from test/files/neg/bug2206.scala
rename to test/files/neg/t2206.scala
diff --git a/test/files/neg/t2208.check b/test/files/neg/t2208.check
index a97b20c..64bb3a7 100644
--- a/test/files/neg/t2208.check
+++ b/test/files/neg/t2208.check
@@ -1,4 +1,4 @@
t2208.scala:7: error: type arguments [Any] do not conform to type Alias's type parameter bounds [X <: Test.A]
class C extends Alias[Any] // not ok, normalisation should check bounds before expanding Alias
^
-one error found
\ No newline at end of file
+one error found
diff --git a/test/files/neg/t2213.check b/test/files/neg/t2213.check
new file mode 100644
index 0000000..9fb3bb2
--- /dev/null
+++ b/test/files/neg/t2213.check
@@ -0,0 +1,25 @@
+t2213.scala:9: error: class C needs to be abstract, since:
+it has 4 unimplemented members.
+/** As seen from class C, the missing signatures are as follows.
+ * For convenience, these are usable as stub implementations.
+ */
+ def f: Int = ???
+ def g: Int = ???
+ val x: Int = ???
+ val y: Int = ???
+
+class C extends A {}
+ ^
+t2213.scala:11: error: object creation impossible, since:
+it has 4 unimplemented members.
+/** As seen from object Q, the missing signatures are as follows.
+ * For convenience, these are usable as stub implementations.
+ */
+ def f: Int = ???
+ def g: Int = ???
+ val x: Int = ???
+ val y: Int = ???
+
+object Q extends A { }
+ ^
+two errors found
diff --git a/test/files/neg/bug2213.scala b/test/files/neg/t2213.scala
similarity index 100%
rename from test/files/neg/bug2213.scala
rename to test/files/neg/t2213.scala
diff --git a/test/files/neg/t2275a.check b/test/files/neg/t2275a.check
new file mode 100644
index 0000000..cd3c868
--- /dev/null
+++ b/test/files/neg/t2275a.check
@@ -0,0 +1,13 @@
+t2275a.scala:4: error: in XML literal: in XML content, please use '}}' to express '}'
+ }else{
+ ^
+t2275a.scala:3: error: I encountered a '}' where I didn't expect one, maybe this tag isn't closed <br>
+ <br>
+ ^
+t2275a.scala:4: error: ';' expected but 'else' found.
+ }else{
+ ^
+t2275a.scala:7: error: '}' expected but eof found.
+}
+ ^
+four errors found
diff --git a/test/files/neg/bug2275a.scala b/test/files/neg/t2275a.scala
similarity index 100%
rename from test/files/neg/bug2275a.scala
rename to test/files/neg/t2275a.scala
diff --git a/test/files/neg/t2275b.check b/test/files/neg/t2275b.check
new file mode 100644
index 0000000..43e34cc
--- /dev/null
+++ b/test/files/neg/t2275b.check
@@ -0,0 +1,10 @@
+t2275b.scala:2: error: in XML literal: in XML content, please use '}}' to express '}'
+ {<br>}xx
+ ^
+t2275b.scala:2: error: I encountered a '}' where I didn't expect one, maybe this tag isn't closed <br>
+ {<br>}xx
+ ^
+t2275b.scala:3: error: '}' expected but eof found.
+}
+ ^
+three errors found
diff --git a/test/files/neg/bug2275b.scala b/test/files/neg/t2275b.scala
similarity index 100%
rename from test/files/neg/bug2275b.scala
rename to test/files/neg/t2275b.scala
diff --git a/test/files/neg/t2296a.check b/test/files/neg/t2296a.check
new file mode 100644
index 0000000..863b861
--- /dev/null
+++ b/test/files/neg/t2296a.check
@@ -0,0 +1,5 @@
+S.scala:6: error: Implementation restriction: trait S accesses protected method foo inside a concrete trait method.
+Add an accessor in a class extending class J as a workaround.
+ foo()
+ ^
+one error found
diff --git a/test/files/neg/t2296a/J.java b/test/files/neg/t2296a/J.java
new file mode 100644
index 0000000..78ff3e9
--- /dev/null
+++ b/test/files/neg/t2296a/J.java
@@ -0,0 +1,7 @@
+package j;
+
+public class J {
+ protected void foo() {
+ System.out.println("J.foo()");
+ }
+}
\ No newline at end of file
diff --git a/test/files/neg/t2296a/S.scala b/test/files/neg/t2296a/S.scala
new file mode 100644
index 0000000..532d038
--- /dev/null
+++ b/test/files/neg/t2296a/S.scala
@@ -0,0 +1,18 @@
+package s {
+ import j.J
+
+ trait S extends J {
+ def bar() {
+ foo()
+ }
+ }
+
+ class SC extends J with S
+}
+
+object Test {
+ def main(args : Array[String]) {
+ (new s.SC).bar()
+ (new s.S { }).bar()
+ }
+}
\ No newline at end of file
diff --git a/test/files/neg/t2296b.check b/test/files/neg/t2296b.check
new file mode 100644
index 0000000..07cc54d
--- /dev/null
+++ b/test/files/neg/t2296b.check
@@ -0,0 +1,5 @@
+S_2.scala:6: error: Implementation restriction: trait S accesses protected method foo inside a concrete trait method.
+Add an accessor in a class extending class J_1 as a workaround.
+ foo()
+ ^
+one error found
diff --git a/test/files/neg/t2296b/J_1.java b/test/files/neg/t2296b/J_1.java
new file mode 100644
index 0000000..4c91d47
--- /dev/null
+++ b/test/files/neg/t2296b/J_1.java
@@ -0,0 +1,7 @@
+package j;
+
+public class J_1 {
+ protected void foo() {
+ System.out.println("J.foo()");
+ }
+}
\ No newline at end of file
diff --git a/test/files/neg/t2296b/S_2.scala b/test/files/neg/t2296b/S_2.scala
new file mode 100644
index 0000000..6cdb0cf
--- /dev/null
+++ b/test/files/neg/t2296b/S_2.scala
@@ -0,0 +1,18 @@
+package s {
+ import j.J_1
+
+ trait S extends J_1 {
+ def bar() {
+ foo()
+ }
+ }
+
+ class SC extends J_1 with S
+}
+
+object Test {
+ def main(args : Array[String]) {
+ (new s.SC).bar()
+ (new s.S { }).bar()
+ }
+}
diff --git a/test/files/neg/t2336.check b/test/files/neg/t2336.check
index 9837174..28acd4d 100644
--- a/test/files/neg/t2336.check
+++ b/test/files/neg/t2336.check
@@ -1,4 +1,4 @@
-t2336.scala:6: error: type Foo[Int] is not a stable prefix
+t2336.scala:6: error: Foo[Int] is not a legal prefix for a constructor
new Foo[Int]#Bar(0)
^
one error found
diff --git a/test/files/neg/t2336.scala b/test/files/neg/t2336.scala
old mode 100644
new mode 100755
diff --git a/test/files/neg/t2386.check b/test/files/neg/t2386.check
deleted file mode 100644
index 2caa46c..0000000
--- a/test/files/neg/t2386.check
+++ /dev/null
@@ -1,4 +0,0 @@
-t2386.scala:2: error: could not find implicit value for evidence parameter of type scala.reflect.ClassManifest[Array[_ >: java.lang.String with Int]]
- val a = Array(Array(1, 2), Array("a","b"))
- ^
-one error found
diff --git a/test/files/neg/t2386.scala b/test/files/neg/t2386.scala
deleted file mode 100644
index 56146cc..0000000
--- a/test/files/neg/t2386.scala
+++ /dev/null
@@ -1,3 +0,0 @@
-object Test {
- val a = Array(Array(1, 2), Array("a","b"))
-}
diff --git a/test/files/neg/t2388.check b/test/files/neg/t2388.check
new file mode 100644
index 0000000..3f97608
--- /dev/null
+++ b/test/files/neg/t2388.check
@@ -0,0 +1,4 @@
+t2388.scala:2: error: recursive method search needs result type
+ val searchField = new AnyRef { search() }
+ ^
+one error found
diff --git a/test/files/neg/t2388.scala b/test/files/neg/t2388.scala
new file mode 100644
index 0000000..3634f34
--- /dev/null
+++ b/test/files/neg/t2388.scala
@@ -0,0 +1,4 @@
+class Foo {
+ val searchField = new AnyRef { search() }
+ def search() = searchField
+}
diff --git a/test/files/neg/t2405.check b/test/files/neg/t2405.check
new file mode 100644
index 0000000..78360bc
--- /dev/null
+++ b/test/files/neg/t2405.check
@@ -0,0 +1,8 @@
+t2405.scala:6: warning: imported `y' is permanently hidden by definition of method y
+ import A.{x => y}
+ ^
+t2405.scala:8: error: could not find implicit value for parameter e: Int
+ implicitly[Int]
+ ^
+one warning found
+one error found
diff --git a/test/files/neg/t2405.scala b/test/files/neg/t2405.scala
new file mode 100644
index 0000000..6982285
--- /dev/null
+++ b/test/files/neg/t2405.scala
@@ -0,0 +1,10 @@
+object A { implicit val x: Int = 1 }
+
+// Expecting shadowing #1
+object Test2 {
+ {
+ import A.{x => y}
+ def y: Int = 0
+ implicitly[Int]
+ }
+}
diff --git a/test/files/neg/t2441.check b/test/files/neg/t2441.check
new file mode 100644
index 0000000..6eaacd8
--- /dev/null
+++ b/test/files/neg/t2441.check
@@ -0,0 +1,4 @@
+t2441.scala:12: error: private class Y escapes its defining scope as part of type Some[B.Y]
+ override def f = Some(new B.Y)
+ ^
+one error found
diff --git a/test/files/neg/bug2441.scala b/test/files/neg/t2441.scala
similarity index 100%
rename from test/files/neg/bug2441.scala
rename to test/files/neg/t2441.scala
diff --git a/test/files/neg/t2442.check b/test/files/neg/t2442.check
new file mode 100644
index 0000000..714816f
--- /dev/null
+++ b/test/files/neg/t2442.check
@@ -0,0 +1,9 @@
+t2442.scala:4: error: match may not be exhaustive.
+It would fail on the following input: THREE
+ def f(e: MyEnum) = e match {
+ ^
+t2442.scala:11: error: match may not be exhaustive.
+It would fail on the following input: BLUE
+ def g(e: MySecondEnum) = e match {
+ ^
+two errors found
diff --git a/test/files/neg/caseinherit.flags b/test/files/neg/t2442.flags
similarity index 100%
copy from test/files/neg/caseinherit.flags
copy to test/files/neg/t2442.flags
diff --git a/test/files/neg/t2442/MyEnum.java b/test/files/neg/t2442/MyEnum.java
new file mode 100644
index 0000000..3ffbbb3
--- /dev/null
+++ b/test/files/neg/t2442/MyEnum.java
@@ -0,0 +1,3 @@
+public enum MyEnum {
+ ONE, TWO, THREE;
+}
\ No newline at end of file
diff --git a/test/files/neg/t2442/MySecondEnum.java b/test/files/neg/t2442/MySecondEnum.java
new file mode 100644
index 0000000..0f84128
--- /dev/null
+++ b/test/files/neg/t2442/MySecondEnum.java
@@ -0,0 +1,6 @@
+public enum MySecondEnum {
+ RED(1), BLUE(2) { public void foo() {} };
+ MySecondEnum(int i) {}
+
+ public void foo() {}
+}
\ No newline at end of file
diff --git a/test/files/neg/t2442/t2442.scala b/test/files/neg/t2442/t2442.scala
new file mode 100644
index 0000000..b0a0f3c
--- /dev/null
+++ b/test/files/neg/t2442/t2442.scala
@@ -0,0 +1,15 @@
+class Test {
+ import MyEnum._
+
+ def f(e: MyEnum) = e match {
+ case ONE => println("one")
+ case TWO => println("two")
+ // missing case --> exhaustivity warning!
+ }
+
+ import MySecondEnum._
+ def g(e: MySecondEnum) = e match {
+ case RED => println("red")
+ // missing case --> exhaustivity warning!
+ }
+}
\ No newline at end of file
diff --git a/test/files/neg/t2488.check b/test/files/neg/t2488.check
new file mode 100644
index 0000000..170dbf8
--- /dev/null
+++ b/test/files/neg/t2488.check
@@ -0,0 +1,31 @@
+t2488.scala:7: error: overloaded method value f with alternatives:
+ ()Int <and>
+ (a: Int,b: Int)Int
+ cannot be applied to (b: Int, Int)
+ println(c.f(b = 2, 2))
+ ^
+t2488.scala:8: error: overloaded method value f with alternatives:
+ ()Int <and>
+ (a: Int,b: Int)Int
+ cannot be applied to (a: Int, c: Int)
+ println(c.f(a = 2, c = 2))
+ ^
+t2488.scala:9: error: overloaded method value f with alternatives:
+ ()Int <and>
+ (a: Int,b: Int)Int
+ cannot be applied to (Int, c: Int)
+ println(c.f(2, c = 2))
+ ^
+t2488.scala:10: error: overloaded method value f with alternatives:
+ ()Int <and>
+ (a: Int,b: Int)Int
+ cannot be applied to (c: Int, Int)
+ println(c.f(c = 2, 2))
+ ^
+t2488.scala:11: error: overloaded method value f with alternatives:
+ ()Int <and>
+ (a: Int,b: Int)Int
+ cannot be applied to (Int)
+ println(c.f(2))
+ ^
+5 errors found
diff --git a/test/files/neg/t2488.scala b/test/files/neg/t2488.scala
new file mode 100644
index 0000000..8db052e
--- /dev/null
+++ b/test/files/neg/t2488.scala
@@ -0,0 +1,12 @@
+class C {
+ def f(a:Int, b:Int) = 1
+ def f() = 2
+}
+object Test extends App {
+ val c = new C()
+ println(c.f(b = 2, 2))
+ println(c.f(a = 2, c = 2))
+ println(c.f(2, c = 2))
+ println(c.f(c = 2, 2))
+ println(c.f(2))
+}
diff --git a/test/files/neg/t2494.scala b/test/files/neg/t2494.scala
old mode 100644
new mode 100755
diff --git a/test/files/neg/t2641.check b/test/files/neg/t2641.check
index 771624e..909f4f0 100644
--- a/test/files/neg/t2641.check
+++ b/test/files/neg/t2641.check
@@ -1,35 +1,15 @@
-t2641.scala:19: error: illegal cyclic reference involving trait ManagedSeq
+t2641.scala:18: error: wrong number of type arguments for ManagedSeq, should be 2
with TraversableViewLike[A, ManagedSeqStrict[A], ManagedSeq[A]]
^
-t2641.scala:17: error: illegal inheritance;
+t2641.scala:16: error: illegal inheritance;
self-type ManagedSeq does not conform to ManagedSeqStrict[A]'s selftype ManagedSeqStrict[A]
extends ManagedSeqStrict[A]
^
-t2641.scala:18: error: illegal inheritance;
+t2641.scala:17: error: illegal inheritance;
self-type ManagedSeq does not conform to scala.collection.TraversableView[A,ManagedSeqStrict[A]]'s selftype scala.collection.TraversableView[A,ManagedSeqStrict[A]]
with TraversableView[A, ManagedSeqStrict[A]]
^
-t2641.scala:17: error: illegal inheritance;
- self-type ManagedSeq does not conform to ScalaObject's selftype ScalaObject
- extends ManagedSeqStrict[A]
- ^
-t2641.scala:25: error: something is wrong (wrong class file?): trait ManagedSeq with type parameters [A,Coll] gets applied to arguments [], phase = typer
- trait Transformed[+B] extends ManagedSeq[B, Coll] with super.Transformed[B]
- ^
-t2641.scala:27: error: something is wrong (wrong class file?): trait ManagedSeq with type parameters [A,Coll] gets applied to arguments [], phase = namer
- trait Sliced extends Transformed[A] with super.Sliced {
- ^
-t2641.scala:27: error: illegal inheritance; superclass Any
- is not a subclass of the superclass ManagedSeqStrict
- of the mixin trait Transformed
- trait Sliced extends Transformed[A] with super.Sliced {
- ^
-t2641.scala:27: error: illegal inheritance; superclass Any
- is not a subclass of the superclass Object
- of the mixin trait Sliced
- trait Sliced extends Transformed[A] with super.Sliced {
- ^
-t2641.scala:28: error: value managedIterator is not a member of ManagedSeq
+t2641.scala:27: error: value managedIterator is not a member of ManagedSeq
override def managedIterator = self.managedIterator slice (from, until)
^
-9 errors found
+four errors found
diff --git a/test/files/neg/t2641.scala b/test/files/neg/t2641.scala
index 68a4ca3..626d5d7 100644
--- a/test/files/neg/t2641.scala
+++ b/test/files/neg/t2641.scala
@@ -9,8 +9,7 @@ abstract class ManagedSeqStrict[+A]
{
override def companion: GenericCompanion[ManagedSeqStrict] = null
- override def foreach[U](f: A => U): Unit =
- null
+ override def foreach[U](f: A => U): Unit = ()
}
trait ManagedSeq[+A, +Coll]
diff --git a/test/files/neg/t276.check b/test/files/neg/t276.check
new file mode 100644
index 0000000..b241953
--- /dev/null
+++ b/test/files/neg/t276.check
@@ -0,0 +1,5 @@
+t276.scala:6: error: overriding type Bar in class Foo, which equals (Int, Int);
+ class Bar cannot be used here - classes can only override abstract types
+ class Bar
+ ^
+one error found
diff --git a/test/files/neg/bug276.scala b/test/files/neg/t276.scala
similarity index 100%
rename from test/files/neg/bug276.scala
rename to test/files/neg/t276.scala
diff --git a/test/files/neg/t2773.check b/test/files/neg/t2773.check
index 6e88762..a5ffb5f 100644
--- a/test/files/neg/t2773.check
+++ b/test/files/neg/t2773.check
@@ -1,4 +1,4 @@
-t2773.scala:5: error: x is not a member of c
+t2773.scala:5: error: value x is not a member of C
import c.x
^
t2773.scala:6: error: not found: value x
diff --git a/test/files/neg/t2773.scala b/test/files/neg/t2773.scala
old mode 100644
new mode 100755
diff --git a/test/files/neg/t2775.check b/test/files/neg/t2775.check
index a30d35f..934a970 100644
--- a/test/files/neg/t2775.check
+++ b/test/files/neg/t2775.check
@@ -1,4 +1,4 @@
-t2775.scala:1: error: cannot find class manifest for element type B.this.T
+t2775.scala:1: error: cannot find class tag for element type B.this.T
trait B[S] { type T = S; val c = new Array[T](1) }
^
one error found
diff --git a/test/files/neg/t2779.check b/test/files/neg/t2779.check
index 4f94a78..0ab4c50 100644
--- a/test/files/neg/t2779.check
+++ b/test/files/neg/t2779.check
@@ -1,4 +1,5 @@
t2779.scala:16: error: method f is defined twice
+ conflicting symbols both originated in file 't2779.scala'
override def f = List(M1)
^
one error found
diff --git a/test/files/neg/t2779.scala b/test/files/neg/t2779.scala
old mode 100644
new mode 100755
diff --git a/test/files/neg/t278.check b/test/files/neg/t278.check
new file mode 100644
index 0000000..405f7d2
--- /dev/null
+++ b/test/files/neg/t278.check
@@ -0,0 +1,11 @@
+t278.scala:5: error: overloaded method value a with alternatives:
+ => C.this.A => Unit <and>
+ => () => Unit
+ does not take type parameters
+ println(a[A])
+ ^
+t278.scala:4: error: method a is defined twice
+ conflicting symbols both originated in file 't278.scala'
+ def a = (p:A) => ()
+ ^
+two errors found
diff --git a/test/files/neg/t278.scala b/test/files/neg/t278.scala
new file mode 100644
index 0000000..39a711b
--- /dev/null
+++ b/test/files/neg/t278.scala
@@ -0,0 +1,6 @@
+class C {
+ class A
+ def a = () => ()
+ def a = (p:A) => ()
+ println(a[A])
+}
diff --git a/test/files/neg/t2796.check b/test/files/neg/t2796.check
new file mode 100644
index 0000000..aeb1849
--- /dev/null
+++ b/test/files/neg/t2796.check
@@ -0,0 +1,4 @@
+t2796.scala:7: error: Implementation restriction: early definitions in traits are not initialized before the super class is initialized.
+ val abstractVal = "T1.abstractVal" // warn
+ ^
+one error found
diff --git a/test/files/neg/caseinherit.flags b/test/files/neg/t2796.flags
similarity index 100%
copy from test/files/neg/caseinherit.flags
copy to test/files/neg/t2796.flags
diff --git a/test/files/neg/t2796.scala b/test/files/neg/t2796.scala
new file mode 100644
index 0000000..3bcc9df
--- /dev/null
+++ b/test/files/neg/t2796.scala
@@ -0,0 +1,28 @@
+trait Base {
+ val abstractVal: String
+ final val useAbstractVal = abstractVal
+}
+
+trait T1 extends {
+ val abstractVal = "T1.abstractVal" // warn
+} with Base
+
+trait T2 extends {
+ type X = Int // okay
+} with Base
+
+
+class C1 extends {
+ val abstractVal = "C1.abstractVal" // okay
+} with Base
+
+object Test {
+ def main(args: Array[String]) {
+ assert(new C1 ().useAbstractVal == "C1.abstractVal")
+ // This currently fails. a more ambitious approach to this ticket would add $earlyinit$
+ // to traits and call it from the right places in the right order.
+ //
+ // For now, we'll just issue a warning.
+ assert(new T1 {}.useAbstractVal == "T1.abstractVal")
+ }
+}
diff --git a/test/files/neg/t284.check b/test/files/neg/t284.check
new file mode 100644
index 0000000..37801af
--- /dev/null
+++ b/test/files/neg/t284.check
@@ -0,0 +1,8 @@
+t284.scala:2: warning: Detected apparent refinement of Unit; are you missing an '=' sign?
+ def f1(a: T): Unit { }
+ ^
+t284.scala:5: error: Unmatched closing brace '}' ignored here
+ }
+ ^
+one warning found
+one error found
diff --git a/test/files/neg/bug284.scala b/test/files/neg/t284.scala
similarity index 100%
rename from test/files/neg/bug284.scala
rename to test/files/neg/t284.scala
diff --git a/test/files/neg/t2870.check b/test/files/neg/t2870.check
index 6577577..99522ec 100644
--- a/test/files/neg/t2870.check
+++ b/test/files/neg/t2870.check
@@ -1,7 +1,9 @@
t2870.scala:1: error: not found: type Jar
class Jars(jar: Jar)
^
-t2870.scala:6: error: illegal cyclic reference involving value <import>
- val scala = fromClasspathString(javaClassPath)
- ^
+t2870.scala:4: error: encountered unrecoverable cycle resolving import.
+Note: this is often due in part to a class depending on a definition nested within its companion.
+If applicable, you may wish to try moving some members into another object.
+ import scala.util.Properties.javaClassPath
+ ^
two errors found
diff --git a/test/files/neg/t2870.scala b/test/files/neg/t2870.scala
old mode 100644
new mode 100755
diff --git a/test/files/neg/t2910.check b/test/files/neg/t2910.check
index ff19012..44bf199 100644
--- a/test/files/neg/t2910.check
+++ b/test/files/neg/t2910.check
@@ -10,7 +10,7 @@ t2910.scala:16: error: forward reference extends over definition of value z
t2910.scala:30: error: forward reference extends over definition of value x
lazy val f: Int = x
^
-t2910.scala:34: error: forward reference extends over definition of variable x
+t2910.scala:35: error: forward reference extends over definition of variable x
lazy val f: Int = g
^
5 errors found
diff --git a/test/files/neg/t2910.scala b/test/files/neg/t2910.scala
index aafeb59..fa51038 100644
--- a/test/files/neg/t2910.scala
+++ b/test/files/neg/t2910.scala
@@ -29,6 +29,7 @@ object Test {
{
lazy val f: Int = x
val x: Int = f
+ println(x)
}
{
lazy val f: Int = g
diff --git a/test/files/neg/t2918.check b/test/files/neg/t2918.check
index e67f24e..aae3045 100644
--- a/test/files/neg/t2918.check
+++ b/test/files/neg/t2918.check
@@ -1,7 +1,10 @@
+t2918.scala:2: error: illegal cyclic reference involving type A
+ def g[X, A[X] <: A[X]](x: A[X]) = x
+ ^
t2918.scala:2: error: cyclic aliasing or subtyping involving type A
- def g[X, A[X] <: A[X]](x: A[X]) = x
+ def g[X, A[X] <: A[X]](x: A[X]) = x
^
t2918.scala:2: error: A does not take type parameters
- def g[X, A[X] <: A[X]](x: A[X]) = x
+ def g[X, A[X] <: A[X]](x: A[X]) = x
^
-two errors found
+three errors found
diff --git a/test/files/neg/t2918.scala b/test/files/neg/t2918.scala
old mode 100644
new mode 100755
index 03477cc..ff2be39
--- a/test/files/neg/t2918.scala
+++ b/test/files/neg/t2918.scala
@@ -1,3 +1,3 @@
object Test {
- def g[X, A[X] <: A[X]](x: A[X]) = x
+ def g[X, A[X] <: A[X]](x: A[X]) = x
}
diff --git a/test/files/neg/t2968.check b/test/files/neg/t2968.check
new file mode 100644
index 0000000..5d2387f
--- /dev/null
+++ b/test/files/neg/t2968.check
@@ -0,0 +1,10 @@
+t2968.scala:8: error: Missing closing brace `}' assumed here
+} // missing brace
+^
+t2968.scala:17: error: Missing closing brace `}' assumed here
+} // missing brace
+^
+t2968.scala:26: error: Missing closing brace `}' assumed here
+} // missing brace
+^
+three errors found
diff --git a/test/files/neg/t2968.scala b/test/files/neg/t2968.scala
new file mode 100644
index 0000000..41c3a79
--- /dev/null
+++ b/test/files/neg/t2968.scala
@@ -0,0 +1,26 @@
+object t1 {
+ case object Const {
+ }
+
+ class Var
+ {
+
+} // missing brace
+
+object t2 {
+ case class Const() {
+ }
+
+ class Var
+ {
+
+} // missing brace
+
+object t3 {
+ final case class Const() {
+ }
+
+ class Var
+ {
+
+} // missing brace
diff --git a/test/files/neg/t2968b.check b/test/files/neg/t2968b.check
new file mode 100644
index 0000000..36d25a2
--- /dev/null
+++ b/test/files/neg/t2968b.check
@@ -0,0 +1,4 @@
+t2968b.scala:7: error: '}' expected but eof found.
+// missing brace
+ ^
+one error found
diff --git a/test/files/neg/t2968b.scala b/test/files/neg/t2968b.scala
new file mode 100644
index 0000000..422b618
--- /dev/null
+++ b/test/files/neg/t2968b.scala
@@ -0,0 +1,7 @@
+case class Const()
+{
+}
+
+class Var
+{
+// missing brace
diff --git a/test/files/neg/t3006.check b/test/files/neg/t3006.check
index 9a90d32..2447eeb 100644
--- a/test/files/neg/t3006.check
+++ b/test/files/neg/t3006.check
@@ -1,5 +1,5 @@
t3006.scala:8: error: type mismatch;
- found : java.lang.String("H")
+ found : String("H")
required: Int
println(A(3) + "H")
^
diff --git a/test/files/neg/t3006.scala b/test/files/neg/t3006.scala
old mode 100644
new mode 100755
diff --git a/test/files/neg/t3015.check b/test/files/neg/t3015.check
index 32809b0..6948392 100644
--- a/test/files/neg/t3015.check
+++ b/test/files/neg/t3015.check
@@ -1,11 +1,6 @@
t3015.scala:7: error: scrutinee is incompatible with pattern type;
- found : _$1 where type _$1
- required: java.lang.String
+ found : _$1
+ required: String
val b(foo) = "foo"
^
-t3015.scala:7: error: type mismatch;
- found : _$1(in value foo) where type _$1(in value foo) <: java.lang.String
- required: (some other)_$1(in value foo) where type (some other)_$1(in value foo)
- val b(foo) = "foo"
- ^
-two errors found
+one error found
diff --git a/test/files/neg/t3098.check b/test/files/neg/t3098.check
new file mode 100644
index 0000000..8582974
--- /dev/null
+++ b/test/files/neg/t3098.check
@@ -0,0 +1,5 @@
+b.scala:3: error: match may not be exhaustive.
+It would fail on the following input: (_ : C)
+ def f = (null: T) match {
+ ^
+one error found
diff --git a/test/files/neg/anyval-sealed.flags b/test/files/neg/t3098.flags
similarity index 100%
copy from test/files/neg/anyval-sealed.flags
copy to test/files/neg/t3098.flags
diff --git a/test/files/neg/t3098/a.scala b/test/files/neg/t3098/a.scala
new file mode 100644
index 0000000..57a103c
--- /dev/null
+++ b/test/files/neg/t3098/a.scala
@@ -0,0 +1,6 @@
+// Traits.scala
+sealed trait T
+
+trait A extends T
+trait B extends T
+trait C extends T
diff --git a/test/files/neg/t3098/b.scala b/test/files/neg/t3098/b.scala
new file mode 100644
index 0000000..84a1f9f
--- /dev/null
+++ b/test/files/neg/t3098/b.scala
@@ -0,0 +1,8 @@
+// Test.scala
+object Test {
+ def f = (null: T) match {
+ case _: A => println("A")
+ case _: B => println("B")
+ // no C
+ }
+}
diff --git a/test/files/neg/t3115.check b/test/files/neg/t3115.check
deleted file mode 100644
index c128ff5..0000000
--- a/test/files/neg/t3115.check
+++ /dev/null
@@ -1,12 +0,0 @@
-t3115.scala:6: error: object Math in object sc is deprecated:
- println(sc.Math)
- ^
-t3115.scala:7: error: object Math is deprecated: use the scala.math package object instead.
-(Example package object usage: scala.math.Pi )
- println(scala.Math)
- ^
-t3115.scala:8: error: object Math is deprecated: use the scala.math package object instead.
-(Example package object usage: scala.math.Pi )
- scala.Math.Pi
- ^
-three errors found
diff --git a/test/files/neg/t3115.scala b/test/files/neg/t3115.scala
deleted file mode 100644
index 3888085..0000000
--- a/test/files/neg/t3115.scala
+++ /dev/null
@@ -1,9 +0,0 @@
-object sc {
- @deprecated("", "2.8.0") object Math
-}
-
-object Test {
- println(sc.Math)
- println(scala.Math)
- scala.Math.Pi
-}
diff --git a/test/files/neg/t3189.check b/test/files/neg/t3189.check
new file mode 100644
index 0000000..3913c52
--- /dev/null
+++ b/test/files/neg/t3189.check
@@ -0,0 +1,4 @@
+t3189.scala:2: error: use _* to match a sequence
+ val Array(a,b*) = ("": Any)
+ ^
+one error found
diff --git a/test/pending/neg/bug3189.scala b/test/files/neg/t3189.scala
similarity index 100%
rename from test/pending/neg/bug3189.scala
rename to test/files/neg/t3189.scala
diff --git a/test/files/neg/t3209.check b/test/files/neg/t3209.check
new file mode 100644
index 0000000..c5a6b1d
--- /dev/null
+++ b/test/files/neg/t3209.check
@@ -0,0 +1,4 @@
+t3209.scala:2: error: expected start of definition
+package test
+^
+one error found
diff --git a/test/files/neg/bug3209.scala b/test/files/neg/t3209.scala
similarity index 100%
rename from test/files/neg/bug3209.scala
rename to test/files/neg/t3209.scala
diff --git a/test/files/neg/t3224.scala b/test/files/neg/t3224.scala
old mode 100644
new mode 100755
diff --git a/test/files/neg/t3234.check b/test/files/neg/t3234.check
new file mode 100644
index 0000000..4339950
--- /dev/null
+++ b/test/files/neg/t3234.check
@@ -0,0 +1,2 @@
+error: there were 1 inliner warning(s); re-run with -Yinline-warnings for details
+one error found
diff --git a/test/files/pos/bug3234.flags b/test/files/neg/t3234.flags
similarity index 100%
rename from test/files/pos/bug3234.flags
rename to test/files/neg/t3234.flags
diff --git a/test/files/pos/bug3234.scala b/test/files/neg/t3234.scala
similarity index 100%
rename from test/files/pos/bug3234.scala
rename to test/files/neg/t3234.scala
diff --git a/test/files/neg/t3240.check b/test/files/neg/t3240.check
new file mode 100644
index 0000000..efae682
--- /dev/null
+++ b/test/files/neg/t3240.check
@@ -0,0 +1,4 @@
+t3240.scala:3: error: only classes can have declared but undefined members
+ type t
+ ^
+one error found
diff --git a/test/files/neg/t3240.scala b/test/files/neg/t3240.scala
new file mode 100644
index 0000000..cf197a4
--- /dev/null
+++ b/test/files/neg/t3240.scala
@@ -0,0 +1,8 @@
+class A {
+ val foo = new {
+ type t
+ def apply(a: Option[t], defVal: Any) = {
+ a.getOrElse(defVal).asInstanceOf[t]
+ }
+ }
+}
\ No newline at end of file
diff --git a/test/files/neg/t3275.check b/test/files/neg/t3275.check
new file mode 100644
index 0000000..117c792
--- /dev/null
+++ b/test/files/neg/t3275.check
@@ -0,0 +1,4 @@
+t3275.scala:2: error: @tailrec annotated method contains no recursive calls
+ @annotation.tailrec def foo() = 5
+ ^
+one error found
diff --git a/test/files/neg/t3275.scala b/test/files/neg/t3275.scala
new file mode 100644
index 0000000..18e38a1
--- /dev/null
+++ b/test/files/neg/t3275.scala
@@ -0,0 +1,3 @@
+object Test {
+ @annotation.tailrec def foo() = 5
+}
diff --git a/test/files/neg/t3392.check b/test/files/neg/t3392.check
new file mode 100644
index 0000000..842d63e
--- /dev/null
+++ b/test/files/neg/t3392.check
@@ -0,0 +1,4 @@
+t3392.scala:9: error: not found: value x
+ case x at A(x/*<-- refers to the pattern that includes this comment*/.Ex(42)) =>
+ ^
+one error found
diff --git a/test/files/neg/t3392.scala b/test/files/neg/t3392.scala
new file mode 100644
index 0000000..655c2e8
--- /dev/null
+++ b/test/files/neg/t3392.scala
@@ -0,0 +1,11 @@
+object Test {
+ case class A(a: Int) {
+ object Ex {
+ def unapply(i: Int): Option[Int] = Some(i)
+ }
+ }
+
+ A(42) match {
+ case x at A(x/*<-- refers to the pattern that includes this comment*/.Ex(42)) =>
+ }
+}
diff --git a/test/files/neg/t343.check b/test/files/neg/t343.check
new file mode 100644
index 0000000..d310b79
--- /dev/null
+++ b/test/files/neg/t343.check
@@ -0,0 +1,4 @@
+t343.scala:5: error: private class Foo escapes its defining scope as part of type C.this.Foo
+ def get:Foo = new Foo();
+ ^
+one error found
diff --git a/test/files/neg/bug343.scala b/test/files/neg/t343.scala
similarity index 100%
rename from test/files/neg/bug343.scala
rename to test/files/neg/t343.scala
diff --git a/test/files/neg/t3481.check b/test/files/neg/t3481.check
new file mode 100644
index 0000000..debe072
--- /dev/null
+++ b/test/files/neg/t3481.check
@@ -0,0 +1,29 @@
+t3481.scala:5: error: type mismatch;
+ found : String("hello")
+ required: _$1
+ f[A[Int]]("hello")
+ ^
+t3481.scala:11: error: type mismatch;
+ found : _$2
+ required: b.T
+ (which expands to) _$2
+ def f[T <: B[_]](a: T#T, b: T) = b.m(a)
+ ^
+t3481.scala:12: error: type mismatch;
+ found : String("Hello")
+ required: _$2
+ f("Hello", new B[Int])
+ ^
+t3481.scala:18: error: type mismatch;
+ found : String("Hello")
+ required: t3481.ex3.b.T2
+ (which expands to) _$3
+ b.m("Hello")
+ ^
+t3481.scala:25: error: type mismatch;
+ found : String("Hello")
+ required: t3481.ex4.Test.b.T2
+ (which expands to) _$4
+ b.m("Hello")
+ ^
+5 errors found
diff --git a/test/files/neg/t3481.scala b/test/files/neg/t3481.scala
new file mode 100644
index 0000000..f4b781e
--- /dev/null
+++ b/test/files/neg/t3481.scala
@@ -0,0 +1,28 @@
+object t3481 {
+ object ex1 {
+ trait A[T] { type B = T }
+ def f[T <: A[_]](a: T#B) = 1
+ f[A[Int]]("hello")
+ }
+
+ object ex2 {
+ trait A { type T; def m(t: T) = t.toString }
+ class B[T2] extends A { type T = T2 }
+ def f[T <: B[_]](a: T#T, b: T) = b.m(a)
+ f("Hello", new B[Int])
+ }
+
+ object ex3 {
+ class B[T] { type T2 = T; def m(t: T2) = t.toString }
+ val b: B[_] = new B[Int]
+ b.m("Hello")
+ }
+
+ object ex4 {
+ abstract class B[T] { type T2 = T; def m(t: T2): Any }
+ object Test {
+ val b: B[_] = sys.error("")
+ b.m("Hello")
+ }
+ }
+}
\ No newline at end of file
diff --git a/test/files/neg/t3507-old.check b/test/files/neg/t3507-old.check
new file mode 100644
index 0000000..b3ac404
--- /dev/null
+++ b/test/files/neg/t3507-old.check
@@ -0,0 +1,4 @@
+t3507-old.scala:13: error: No Manifest available for _1.b.c.type.
+ mani/*[object _1.b.c]*/(c) // kaboom in manifestOfType / TreeGen.mkAttributedQualifier
+ ^
+one error found
diff --git a/test/files/neg/t3507.scala b/test/files/neg/t3507-old.scala
similarity index 100%
rename from test/files/neg/t3507.scala
rename to test/files/neg/t3507-old.scala
diff --git a/test/files/neg/t3507.check b/test/files/neg/t3507.check
deleted file mode 100644
index 8e538e4..0000000
--- a/test/files/neg/t3507.check
+++ /dev/null
@@ -1,4 +0,0 @@
-t3507.scala:13: error: No Manifest available for _1.b.c.type.
- mani/*[object _1.b.c]*/(c) // kaboom in manifestOfType / TreeGen.mkAttributedQualifier
- ^
-one error found
diff --git a/test/files/neg/t3614.check b/test/files/neg/t3614.check
new file mode 100644
index 0000000..81628ef
--- /dev/null
+++ b/test/files/neg/t3614.check
@@ -0,0 +1,4 @@
+t3614.scala:2: error: only declarations allowed here
+ def v = new ({ def a=0 })
+ ^
+one error found
diff --git a/test/files/neg/t3614.scala b/test/files/neg/t3614.scala
new file mode 100644
index 0000000..5b02cdf
--- /dev/null
+++ b/test/files/neg/t3614.scala
@@ -0,0 +1,3 @@
+object t3614 {
+ def v = new ({ def a=0 })
+}
\ No newline at end of file
diff --git a/test/files/neg/t3631.check b/test/files/neg/t3631.check
new file mode 100644
index 0000000..6d8feca
--- /dev/null
+++ b/test/files/neg/t3631.check
@@ -0,0 +1,4 @@
+t3631.scala:3: error: Implementation restriction: case classes cannot have more than 22 parameters.
+case class X23(x1: Int, x2: Int, x3: Int, x4: Int, x5: Int, x6: Int, x7: Int, x8: Int, x9: Int, x10: Int, x11: Int, x12: Int, x13: Int, x14: Int, x15: Int, x16: Int, x17: Int, x18: Int, x19: Int, x20: Int, x21: Int, x22: Int, x23: Int) { }
+ ^
+one error found
diff --git a/test/files/neg/bug3631.scala b/test/files/neg/t3631.scala
similarity index 100%
rename from test/files/neg/bug3631.scala
rename to test/files/neg/t3631.scala
diff --git a/test/files/neg/t3683a.check b/test/files/neg/t3683a.check
new file mode 100644
index 0000000..3de3ad7
--- /dev/null
+++ b/test/files/neg/t3683a.check
@@ -0,0 +1,5 @@
+t3683a.scala:14: error: match may not be exhaustive.
+It would fail on the following input: XX()
+ w match {
+ ^
+one error found
diff --git a/test/files/neg/anyval-sealed.flags b/test/files/neg/t3683a.flags
similarity index 100%
copy from test/files/neg/anyval-sealed.flags
copy to test/files/neg/t3683a.flags
diff --git a/test/files/neg/bug3683a.scala b/test/files/neg/t3683a.scala
similarity index 100%
rename from test/files/neg/bug3683a.scala
rename to test/files/neg/t3683a.scala
diff --git a/test/files/neg/t3683b.check b/test/files/neg/t3683b.check
new file mode 100644
index 0000000..6e33692
--- /dev/null
+++ b/test/files/neg/t3683b.check
@@ -0,0 +1,8 @@
+t3683b.scala:15: error: constructor cannot be instantiated to expected type;
+ found : X
+ required: W[Bar]
+Note: Foo >: Bar (and X <: W[Foo]), but trait W is invariant in type T.
+You may wish to define T as -T instead. (SLS 4.5)
+ case X() => 1
+ ^
+one error found
diff --git a/test/files/neg/bug3683b.scala b/test/files/neg/t3683b.scala
similarity index 100%
rename from test/files/neg/bug3683b.scala
rename to test/files/neg/t3683b.scala
diff --git a/test/files/neg/t3691.check b/test/files/neg/t3691.check
index 1b548cc..6a7e130 100644
--- a/test/files/neg/t3691.check
+++ b/test/files/neg/t3691.check
@@ -1,15 +1,15 @@
t3691.scala:4: error: type mismatch;
- found : java.lang.Object with Test.A[String]
+ found : Test.A[String]
required: AnyRef{type A[x]}
val b = (new A[String]{}): { type A[x] } // not ok
^
t3691.scala:5: error: type mismatch;
- found : java.lang.Object with Test.A[String]
+ found : Test.A[String]
required: AnyRef{type A}
val c = (new A[String]{}): { type A } // not ok
^
t3691.scala:7: error: type mismatch;
- found : java.lang.Object{type A = String}
+ found : AnyRef{type A = String}
required: AnyRef{type A[X]}
val x = (new { type A = String }): { type A[X] } // not ok
^
diff --git a/test/files/neg/t3692-new.check b/test/files/neg/t3692-new.check
new file mode 100644
index 0000000..5aa991c
--- /dev/null
+++ b/test/files/neg/t3692-new.check
@@ -0,0 +1,14 @@
+t3692-new.scala:14: warning: non-variable type argument Int in type pattern Map[Int,Int] is unchecked since it is eliminated by erasure
+ case m0: Map[Int, Int] => new java.util.HashMap[Integer, Integer]
+ ^
+t3692-new.scala:15: warning: non-variable type argument Int in type pattern Map[Int,V] is unchecked since it is eliminated by erasure
+ case m1: Map[Int, V] => new java.util.HashMap[Integer, V]
+ ^
+t3692-new.scala:16: warning: non-variable type argument Int in type pattern Map[T,Int] is unchecked since it is eliminated by erasure
+ case m2: Map[T, Int] => new java.util.HashMap[T, Integer]
+ ^
+t3692-new.scala:16: error: unreachable code
+ case m2: Map[T, Int] => new java.util.HashMap[T, Integer]
+ ^
+three warnings found
+one error found
diff --git a/test/files/neg/t3692-new.flags b/test/files/neg/t3692-new.flags
new file mode 100644
index 0000000..cb8324a
--- /dev/null
+++ b/test/files/neg/t3692-new.flags
@@ -0,0 +1 @@
+-Xoldpatmat
\ No newline at end of file
diff --git a/test/files/neg/t3692-new.scala b/test/files/neg/t3692-new.scala
new file mode 100644
index 0000000..cebdcea
--- /dev/null
+++ b/test/files/neg/t3692-new.scala
@@ -0,0 +1,20 @@
+import scala.reflect.{ClassTag, classTag}
+import java.lang.Integer
+
+object Tester {
+ def main(args: Array[String]) = {
+ val map = Map("John" -> 1, "Josh" -> 2)
+ new Tester().toJavaMap(map)
+ }
+}
+
+class Tester {
+ private final def toJavaMap[T, V](map: Map[T, V])(implicit m1: ClassTag[T], m2: ClassTag[V]): java.util.Map[_, _] = {
+ map match {
+ case m0: Map[Int, Int] => new java.util.HashMap[Integer, Integer]
+ case m1: Map[Int, V] => new java.util.HashMap[Integer, V]
+ case m2: Map[T, Int] => new java.util.HashMap[T, Integer]
+ case _ => new java.util.HashMap[T, V]
+ }
+ }
+}
\ No newline at end of file
diff --git a/test/files/neg/t3692-old.check b/test/files/neg/t3692-old.check
new file mode 100644
index 0000000..9f3ae51
--- /dev/null
+++ b/test/files/neg/t3692-old.check
@@ -0,0 +1,14 @@
+t3692-old.scala:13: warning: non-variable type argument Int in type pattern Map[Int,Int] is unchecked since it is eliminated by erasure
+ case m0: Map[Int, Int] => new java.util.HashMap[Integer, Integer]
+ ^
+t3692-old.scala:14: warning: non-variable type argument Int in type pattern Map[Int,V] is unchecked since it is eliminated by erasure
+ case m1: Map[Int, V] => new java.util.HashMap[Integer, V]
+ ^
+t3692-old.scala:15: warning: non-variable type argument Int in type pattern Map[T,Int] is unchecked since it is eliminated by erasure
+ case m2: Map[T, Int] => new java.util.HashMap[T, Integer]
+ ^
+t3692-old.scala:15: error: unreachable code
+ case m2: Map[T, Int] => new java.util.HashMap[T, Integer]
+ ^
+three warnings found
+one error found
diff --git a/test/files/neg/t3692-old.flags b/test/files/neg/t3692-old.flags
new file mode 100644
index 0000000..cb8324a
--- /dev/null
+++ b/test/files/neg/t3692-old.flags
@@ -0,0 +1 @@
+-Xoldpatmat
\ No newline at end of file
diff --git a/test/files/neg/t3692.scala b/test/files/neg/t3692-old.scala
similarity index 100%
rename from test/files/neg/t3692.scala
rename to test/files/neg/t3692-old.scala
diff --git a/test/files/neg/t3692.check b/test/files/neg/t3692.check
deleted file mode 100644
index 96ddd2a..0000000
--- a/test/files/neg/t3692.check
+++ /dev/null
@@ -1,4 +0,0 @@
-t3692.scala:15: error: unreachable code
- case m2: Map[T, Int] => new java.util.HashMap[T, Integer]
- ^
-one error found
diff --git a/test/files/neg/t3714-neg.check b/test/files/neg/t3714-neg.check
new file mode 100644
index 0000000..2db0655
--- /dev/null
+++ b/test/files/neg/t3714-neg.check
@@ -0,0 +1,13 @@
+t3714-neg.scala:17: error: value break in class BreakImpl cannot be accessed in BreakImpl
+ Access to protected value break not permitted because
+ enclosing object Test is not a subclass of
+ class BreakImpl where target is defined
+ case b: BreakImpl => b.break
+ ^
+t3714-neg.scala:25: error: value break in class BreakImpl cannot be accessed in BreakImpl
+ Access to protected value break not permitted because
+ enclosing object Test is not a subclass of
+ class BreakImpl where target is defined
+ case b: BreakImpl => b.break
+ ^
+two errors found
diff --git a/test/files/neg/t3714-neg.scala b/test/files/neg/t3714-neg.scala
new file mode 100644
index 0000000..4b56f93
--- /dev/null
+++ b/test/files/neg/t3714-neg.scala
@@ -0,0 +1,41 @@
+// this is a slight negative twist on run/t3714.scala.
+trait Break {
+ protected val break: Int;
+}
+
+class BreakImpl(protected val break: Int) extends Break { }
+object BreakImpl {
+ def apply(x: Int): Break = new BreakImpl(x)
+ def unapply(x: Any) = x match {
+ case x: BreakImpl => Some(x.break)
+ case _ => None
+ }
+}
+
+object Test {
+ def f1(x: Break) = x match {
+ case b: BreakImpl => b.break
+ case b => -1
+ }
+ def f2(x: Break) = x match {
+ case BreakImpl(x) => x
+ case _ => -1
+ }
+ def f3(x: Any) = x match {
+ case b: BreakImpl => b.break
+ case b => -1
+ }
+ def f4(x: Any) = x match {
+ case BreakImpl(x) => x
+ case _ => -1
+ }
+
+ def main(args: Array[String]) {
+ val break = BreakImpl(22)
+ assert(f1(break) == 22)
+ assert(f2(break) == 22)
+ assert(f3(break) == 22)
+ assert(f4(break) == 22)
+ }
+}
+
diff --git a/test/files/neg/t3736.check b/test/files/neg/t3736.check
new file mode 100644
index 0000000..7a20f6c
--- /dev/null
+++ b/test/files/neg/t3736.check
@@ -0,0 +1,16 @@
+t3736.scala:4: error: super not allowed here: use this.isInstanceOf instead
+ def f2 = super.isInstanceOf[String]
+ ^
+t3736.scala:5: error: super not allowed here: use this.asInstanceOf instead
+ def f3 = super.asInstanceOf[AnyRef]
+ ^
+t3736.scala:6: error: super not allowed here: use this.== instead
+ def f4 = super.==(new AnyRef)
+ ^
+t3736.scala:7: error: super not allowed here: use this.!= instead
+ def f5 = super.!=(new AnyRef)
+ ^
+t3736.scala:8: error: super not allowed here: use this.## instead
+ def f6 = super.##
+ ^
+5 errors found
diff --git a/test/files/neg/bug3736.scala b/test/files/neg/t3736.scala
similarity index 100%
rename from test/files/neg/bug3736.scala
rename to test/files/neg/t3736.scala
diff --git a/test/files/neg/t3761-overload-byname.check b/test/files/neg/t3761-overload-byname.check
new file mode 100644
index 0000000..ae7d21d
--- /dev/null
+++ b/test/files/neg/t3761-overload-byname.check
@@ -0,0 +1,13 @@
+t3761-overload-byname.scala:9: error: ambiguous reference to overloaded definition,
+both method m1 in object t of type (x: => Int, s: Object)Int
+and method m1 in object t of type (x: => AnyVal, s: String)Int
+match argument types (Int,String)
+ m1(1, "")
+ ^
+t3761-overload-byname.scala:11: error: ambiguous reference to overloaded definition,
+both method m2 in object t of type (x: => Int, s: Object)Int
+and method m2 in object t of type (x: => Any, s: String)Int
+match argument types (Int,String)
+ m2(1, "")
+ ^
+two errors found
diff --git a/test/files/neg/t3761-overload-byname.scala b/test/files/neg/t3761-overload-byname.scala
new file mode 100644
index 0000000..5b9a381
--- /dev/null
+++ b/test/files/neg/t3761-overload-byname.scala
@@ -0,0 +1,13 @@
+object t {
+ def m1(x: => AnyVal, s: String) = 0
+ def m1(x: => Int, s: Object) = 1
+
+ def m2(x: => Any, s: String) = 0
+ def m2(x: => Int, s: Object) = 1
+
+
+ m1(1, "")
+ m1(1d, "")
+ m2(1, "")
+ m2("", "")
+}
diff --git a/test/files/neg/t3773.check b/test/files/neg/t3773.check
deleted file mode 100644
index 29a3e14..0000000
--- a/test/files/neg/t3773.check
+++ /dev/null
@@ -1,4 +0,0 @@
-t3773.scala:3: error: method elements in trait IterableLike is deprecated: use `iterator' instead
- for ((v, t) <- m.elements) ()
- ^
-one error found
diff --git a/test/files/neg/t3773.flags b/test/files/neg/t3773.flags
deleted file mode 100644
index d1b831e..0000000
--- a/test/files/neg/t3773.flags
+++ /dev/null
@@ -1 +0,0 @@
--deprecation -Xfatal-warnings
\ No newline at end of file
diff --git a/test/files/neg/t3773.scala b/test/files/neg/t3773.scala
deleted file mode 100644
index 3b92ed2..0000000
--- a/test/files/neg/t3773.scala
+++ /dev/null
@@ -1,5 +0,0 @@
-object t {
- val m = Map(1 -> "one")
- for ((v, t) <- m.elements) ()
-}
-
diff --git a/test/files/neg/t3816.check b/test/files/neg/t3816.check
index 3658e76..40621f8 100644
--- a/test/files/neg/t3816.check
+++ b/test/files/neg/t3816.check
@@ -1,4 +1,4 @@
-t3816.scala:30: error: stable identifier required, but syncID found.
+t3816.scala:30: error: stable identifier required, but `syncID` found.
case Some( `syncID` ) =>
^
t3816.scala:38: error: stable identifier required, but Test.this.foo found.
diff --git a/test/files/neg/t3836.check b/test/files/neg/t3836.check
new file mode 100644
index 0000000..ff2fc36
--- /dev/null
+++ b/test/files/neg/t3836.check
@@ -0,0 +1,13 @@
+t3836.scala:17: error: reference to IOException is ambiguous;
+it is imported twice in the same scope by
+import foo.bar._
+and import java.io._
+ def f = new IOException // genuinely different
+ ^
+t3836.scala:26: error: reference to Bippy is ambiguous;
+it is imported twice in the same scope by
+import baz._
+and import bar._
+ def f: Bippy[Int] = ???
+ ^
+two errors found
diff --git a/test/files/neg/t3836.scala b/test/files/neg/t3836.scala
new file mode 100644
index 0000000..a68f6e1
--- /dev/null
+++ b/test/files/neg/t3836.scala
@@ -0,0 +1,28 @@
+package foo
+
+package object bar {
+ type IOException = Object
+ type Bippy[T] = List[T]
+}
+
+package object baz {
+ type Bippy[+T] = List[T]
+}
+
+package baz {
+ import java.io._
+ import foo.bar._
+
+ object Test {
+ def f = new IOException // genuinely different
+ }
+}
+
+package baz2 {
+ import bar._
+ import baz._
+
+ object Test2 {
+ def f: Bippy[Int] = ???
+ }
+}
diff --git a/test/files/neg/t3854.check b/test/files/neg/t3854.check
new file mode 100644
index 0000000..c478481
--- /dev/null
+++ b/test/files/neg/t3854.check
@@ -0,0 +1,5 @@
+t3854.scala:1: error: class Bar needs to be abstract, since method foo in trait Foo of type [G[_]](implicit n: N[G,F])X[F] is not defined
+(Note that N[G,F] does not match M[G])
+class Bar[F[_]] extends Foo[F] {
+ ^
+one error found
diff --git a/test/files/neg/t3854.scala b/test/files/neg/t3854.scala
new file mode 100644
index 0000000..e8db76c
--- /dev/null
+++ b/test/files/neg/t3854.scala
@@ -0,0 +1,15 @@
+class Bar[F[_]] extends Foo[F] {
+ def foo[G[_[_], _]](implicit M: M[G]): X[({type λ[α] = G[F, α] })#λ] = null
+}
+// vim: set ts=4 sw=4 et:
+
+trait M[F[_[_], _]]
+trait N[F[_], G[_]]
+
+trait X[F[_]] {
+ def apply[A]: F[A]
+}
+
+trait Foo[F[_]] {
+ def foo[G[_]](implicit n: N[G, F]): X[F]
+}
diff --git a/test/files/neg/t3873.flags b/test/files/neg/t3873.flags
deleted file mode 100644
index 1c26b24..0000000
--- a/test/files/neg/t3873.flags
+++ /dev/null
@@ -1 +0,0 @@
--Ydependent-method-types
\ No newline at end of file
diff --git a/test/files/neg/t3909.check b/test/files/neg/t3909.check
new file mode 100644
index 0000000..7da0195
--- /dev/null
+++ b/test/files/neg/t3909.check
@@ -0,0 +1,4 @@
+t3909.scala:1: error: in object DO, multiple overloaded alternatives of m1 define default arguments
+object DO {
+ ^
+one error found
diff --git a/test/files/neg/bug3909.scala b/test/files/neg/t3909.scala
similarity index 100%
rename from test/files/neg/bug3909.scala
rename to test/files/neg/t3909.scala
diff --git a/test/files/neg/t391.check b/test/files/neg/t391.check
new file mode 100644
index 0000000..879d9af
--- /dev/null
+++ b/test/files/neg/t391.check
@@ -0,0 +1,13 @@
+t391.scala:2: error: identifier expected but 'def' found.
+ def fun1(def x: Int): Int = x; // the "def x" is illegal
+ ^
+t391.scala:4: error: ':' expected but '}' found.
+}
+^
+t391.scala:6: error: identifier expected but 'def' found.
+class E(def x: Int); // the "def x" is illegal
+ ^
+t391.scala:6: error: ':' expected but eof found.
+class E(def x: Int); // the "def x" is illegal
+ ^
+four errors found
diff --git a/test/files/neg/bug391.scala b/test/files/neg/t391.scala
similarity index 100%
rename from test/files/neg/bug391.scala
rename to test/files/neg/t391.scala
diff --git a/test/files/neg/t3913.check b/test/files/neg/t3913.check
new file mode 100644
index 0000000..d85e5c5
--- /dev/null
+++ b/test/files/neg/t3913.check
@@ -0,0 +1,4 @@
+t3913.scala:2: error: super constructor cannot be passed a self reference unless parameter is declared by-name
+object LimboStage extends Stage( Set( LimboStage ))
+ ^
+one error found
diff --git a/test/files/neg/bug3913.scala b/test/files/neg/t3913.scala
similarity index 100%
rename from test/files/neg/bug3913.scala
rename to test/files/neg/t3913.scala
diff --git a/test/files/neg/t3934.check b/test/files/neg/t3934.check
index ebc6dfb..405ed2e 100644
--- a/test/files/neg/t3934.check
+++ b/test/files/neg/t3934.check
@@ -1,6 +1,6 @@
t3934.scala:15: error: method f2 in class J cannot be accessed in test.J
Access to protected method f2 not permitted because
- enclosing class class S1 in package nest is not a subclass of
+ enclosing class S1 in package nest is not a subclass of
class J in package test where target is defined
def g2(x: J) = x.f2()
^
diff --git a/test/files/neg/t3987.check b/test/files/neg/t3987.check
index d72e2d4..a9f7912 100644
--- a/test/files/neg/t3987.check
+++ b/test/files/neg/t3987.check
@@ -1,6 +1,7 @@
t3987.scala:11: error: type mismatch;
found : Gox
required: Test.GoxZed
+ (which expands to) t#Zed forSome { type t <: Gox }
val y: GoxZed = x
^
one error found
diff --git a/test/files/neg/t3995.check b/test/files/neg/t3995.check
new file mode 100644
index 0000000..00ecf4c
--- /dev/null
+++ b/test/files/neg/t3995.check
@@ -0,0 +1,6 @@
+t3995.scala:31: error: type mismatch;
+ found : String("")
+ required: _1.F0 where val _1: Lift
+ (new Lift).apply("")
+ ^
+one error found
diff --git a/test/files/neg/t3995.scala b/test/files/neg/t3995.scala
new file mode 100644
index 0000000..b03617a
--- /dev/null
+++ b/test/files/neg/t3995.scala
@@ -0,0 +1,32 @@
+class Lift {
+ def apply(f: F0) {}
+
+ class F0
+ object F0 {
+ implicit def f2f0(fn: String): F0 = ???
+ }
+}
+
+object Test {
+ val l = new Lift
+ val f = ""
+
+ "": l.F0 // okay
+
+ l.apply("") // okay
+
+ {
+ val l = new Lift
+ l.apply("") // okay
+ }
+
+ // fails trying to mkAttributedQualifier for pre = Skolem(_1 <: Lift with Singletom).F0
+ // should this even have shown up in `companionImplicitMap`? It says that:
+ //
+ // "@return For those parts that refer to classes with companion objects that
+ // can be accessed with unambiguous stable prefixes, the implicits infos
+ // which are members of these companion objects."
+ //
+ // The skolem is stable, but it doen't seem much good to us
+ (new Lift).apply("")
+}
diff --git a/test/files/neg/t4044.check b/test/files/neg/t4044.check
index 75dcf63..0e1ea4f 100644
--- a/test/files/neg/t4044.check
+++ b/test/files/neg/t4044.check
@@ -1,16 +1,14 @@
t4044.scala:9: error: AnyRef takes no type parameters, expected: one
M[AnyRef] // error, (AnyRef :: *) not kind-conformant to (N :: * -> * -> *)
^
-t4044.scala:9: error: kinds of the type arguments (<error>) do not conform to the expected kinds of the type parameters (type N).
-<error>'s type parameters do not match type N's expected parameters: <none> has no type parameters, but type N has one
- M[AnyRef] // error, (AnyRef :: *) not kind-conformant to (N :: * -> * -> *)
- ^
t4044.scala:11: error: kinds of the type arguments (Test.A) do not conform to the expected kinds of the type parameters (type N).
-Test.A's type parameters do not match type N's expected parameters: type _ has no type parameters, but type O has one
+Test.A's type parameters do not match type N's expected parameters:
+type _ has no type parameters, but type O has one
M[A] // error, (A :: (* -> *) not kind-conformant to (N :: * -> * -> *)
^
t4044.scala:15: error: kinds of the type arguments (Test.C) do not conform to the expected kinds of the type parameters (type N).
-Test.C's type parameters do not match type N's expected parameters: type _ has one type parameter, but type _ has none
+Test.C's type parameters do not match type N's expected parameters:
+type _ has one type parameter, but type _ has none
M[C] // error, (C :: (* -> * -> * -> *) not kind-conformant to (N :: * -> * -> *)
^
-four errors found
+three errors found
diff --git a/test/files/neg/t4069.check b/test/files/neg/t4069.check
new file mode 100644
index 0000000..08e937b
--- /dev/null
+++ b/test/files/neg/t4069.check
@@ -0,0 +1,16 @@
+t4069.scala:7: error: unexpected end of input: possible missing '}' in XML block
+ case 2 =>
+ ^
+t4069.scala:6: error: Missing closing brace `}' assumed here
+ </div>
+ ^
+t4069.scala:9: error: in XML literal: in XML content, please use '}}' to express '}'
+ }
+ ^
+t4069.scala:4: error: I encountered a '}' where I didn't expect one, maybe this tag isn't closed <div>
+ <div>
+ ^
+t4069.scala:10: error: '}' expected but eof found.
+}
+ ^
+5 errors found
diff --git a/test/files/neg/bug4069.scala b/test/files/neg/t4069.scala
similarity index 100%
rename from test/files/neg/bug4069.scala
rename to test/files/neg/t4069.scala
diff --git a/test/files/neg/t409.check b/test/files/neg/t409.check
new file mode 100644
index 0000000..433d64d
--- /dev/null
+++ b/test/files/neg/t409.check
@@ -0,0 +1,4 @@
+t409.scala:6: error: traits or objects may not have parameters
+class Toto extends Expr with Case1(12);
+ ^
+one error found
diff --git a/test/files/neg/bug409.scala b/test/files/neg/t409.scala
similarity index 100%
rename from test/files/neg/bug409.scala
rename to test/files/neg/t409.scala
diff --git a/test/files/neg/t4098.check b/test/files/neg/t4098.check
new file mode 100644
index 0000000..7d69cf1
--- /dev/null
+++ b/test/files/neg/t4098.check
@@ -0,0 +1,13 @@
+t4098.scala:3: error: forward reference not allowed from self constructor invocation
+ this(b)
+ ^
+t4098.scala:8: error: forward reference not allowed from self constructor invocation
+ this(b)
+ ^
+t4098.scala:13: error: forward reference not allowed from self constructor invocation
+ this(b)
+ ^
+t4098.scala:18: error: forward reference not allowed from self constructor invocation
+ this(b)
+ ^
+four errors found
diff --git a/test/files/neg/t4098.scala b/test/files/neg/t4098.scala
new file mode 100644
index 0000000..744d619
--- /dev/null
+++ b/test/files/neg/t4098.scala
@@ -0,0 +1,22 @@
+class A(a: Any) {
+ def this() = {
+ this(b)
+ def b = new {}
+ }
+
+ def this(x: Int) = {
+ this(b)
+ lazy val b = new {}
+ }
+
+ def this(x: Int, y: Int) = {
+ this(b)
+ val b = new {}
+ }
+
+ def this(x: Int, y: Int, z: Int) = {
+ this(b)
+ println(".")
+ def b = new {}
+ }
+}
diff --git a/test/files/neg/t412.check b/test/files/neg/t412.check
new file mode 100644
index 0000000..9cb467e
--- /dev/null
+++ b/test/files/neg/t412.check
@@ -0,0 +1,5 @@
+t412.scala:11: error: stable identifier required, but A.this.c found.
+ Note that value c is not stable because its type, A.this.CX with A.this.C2, is volatile.
+ def castA(x: c.T): T2 = x;
+ ^
+one error found
diff --git a/test/files/neg/bug412.scala b/test/files/neg/t412.scala
similarity index 100%
rename from test/files/neg/bug412.scala
rename to test/files/neg/t412.scala
diff --git a/test/files/neg/t4134.check b/test/files/neg/t4134.check
new file mode 100644
index 0000000..35a1820
--- /dev/null
+++ b/test/files/neg/t4134.check
@@ -0,0 +1,4 @@
+t4134.scala:22: error: Member method f of mixin trait T2 is missing a concrete super implementation.
+class Konkret extends T3
+ ^
+one error found
diff --git a/test/files/neg/t4134.scala b/test/files/neg/t4134.scala
new file mode 100644
index 0000000..678e480
--- /dev/null
+++ b/test/files/neg/t4134.scala
@@ -0,0 +1,30 @@
+
+
+
+trait T1 {
+ def f: String
+}
+
+trait T2 extends T1 {
+ abstract override def f: String = "goo"
+ def something = super.f // So the "abstract override" is needed
+}
+
+trait Q1 {
+ def f: String = "bippy"
+}
+
+//trait T3 extends Q1 with T2 {
+trait T3 extends T2 with Q1 {
+ abstract override def f: String = super[Q1].f + " " + super[T2].f + " hoo"
+}
+
+class Konkret extends T3
+
+object Test {
+ def main(args: Array[String]): Unit = {
+ val k = new Konkret
+ println(k.f)
+ println(k.something)
+ }
+}
diff --git a/test/files/neg/t414.check b/test/files/neg/t414.check
new file mode 100644
index 0000000..30211ee
--- /dev/null
+++ b/test/files/neg/t414.check
@@ -0,0 +1,12 @@
+t414.scala:5: error: pattern type is incompatible with expected type;
+ found : Empty.type
+ required: IntMap[a]
+Note: if you intended to match against the class, try `case Empty()`
+ case Empty =>
+ ^
+t414.scala:7: error: type mismatch;
+ found : Unit
+ required: a
+ case _ =>
+ ^
+two errors found
diff --git a/test/files/neg/bug414.scala b/test/files/neg/t414.scala
similarity index 100%
rename from test/files/neg/bug414.scala
rename to test/files/neg/t414.scala
diff --git a/test/files/neg/t4158.check b/test/files/neg/t4158.check
new file mode 100644
index 0000000..3ee2627
--- /dev/null
+++ b/test/files/neg/t4158.check
@@ -0,0 +1,19 @@
+t4158.scala:3: error: type mismatch;
+ found : Null(null)
+ required: Int
+Note that implicit conversions are not applicable because they are ambiguous:
+ both method Integer2intNullConflict in class LowPriorityImplicits of type (x: Null)Int
+ and method Integer2int in object Predef of type (x: Integer)Int
+ are possible conversion functions from Null(null) to Int
+ var y = null: Int
+ ^
+t4158.scala:2: error: type mismatch;
+ found : Null(null)
+ required: Int
+Note that implicit conversions are not applicable because they are ambiguous:
+ both method Integer2intNullConflict in class LowPriorityImplicits of type (x: Null)Int
+ and method Integer2int in object Predef of type (x: Integer)Int
+ are possible conversion functions from Null(null) to Int
+ var x: Int = null
+ ^
+two errors found
diff --git a/test/files/neg/bug4158.scala b/test/files/neg/t4158.scala
similarity index 100%
rename from test/files/neg/bug4158.scala
rename to test/files/neg/t4158.scala
diff --git a/test/files/neg/t4163.check b/test/files/neg/t4163.check
index d275117..47bc78d 100644
--- a/test/files/neg/t4163.check
+++ b/test/files/neg/t4163.check
@@ -1,7 +1,7 @@
t4163.scala:4: error: '<-' expected but '=' found.
- x = 3
- ^
+ x = 3
+ ^
t4163.scala:5: error: illegal start of simple expression
- y <- 0 to 100
+ y <- 0 to 100
^
two errors found
diff --git a/test/files/neg/t4163.scala b/test/files/neg/t4163.scala
index 25ce552..4468673 100644
--- a/test/files/neg/t4163.scala
+++ b/test/files/neg/t4163.scala
@@ -1,8 +1,8 @@
class Bug {
val z = (
- for {
- x = 3
- y <- 0 to 100
- } yield y
- ).toArray
+ for {
+ x = 3
+ y <- 0 to 100
+ } yield y
+ ).toArray
}
diff --git a/test/files/neg/t4166.check b/test/files/neg/t4166.check
new file mode 100644
index 0000000..10b77d8
--- /dev/null
+++ b/test/files/neg/t4166.check
@@ -0,0 +1,4 @@
+t4166.scala:3: error: super constructor arguments cannot reference unconstructed `this`
+class Demo extends Base(new { Demo.this.toString }) {
+ ^
+one error found
diff --git a/test/files/neg/t4166.scala b/test/files/neg/t4166.scala
new file mode 100644
index 0000000..a2ee067
--- /dev/null
+++ b/test/files/neg/t4166.scala
@@ -0,0 +1,11 @@
+class Base(a: Any)
+
+class Demo extends Base(new { Demo.this.toString }) {
+ val x: Any = ()
+}
+
+
+class Demo2 extends Base(new { this.toString }) {
+ val x: Any = ()
+}
+
diff --git a/test/files/neg/t4174.check b/test/files/neg/t4174.check
new file mode 100644
index 0000000..914fcff
--- /dev/null
+++ b/test/files/neg/t4174.check
@@ -0,0 +1,4 @@
+t4174.scala:7: error: method bar overrides nothing
+ foo(new C { override def bar = 1 })
+ ^
+one error found
diff --git a/test/files/neg/bug4174.scala b/test/files/neg/t4174.scala
similarity index 100%
rename from test/files/neg/bug4174.scala
rename to test/files/neg/t4174.scala
diff --git a/test/files/neg/t418.check b/test/files/neg/t418.check
new file mode 100644
index 0000000..1489547
--- /dev/null
+++ b/test/files/neg/t418.check
@@ -0,0 +1,7 @@
+t418.scala:2: error: not found: value Foo12340771
+ null match { case Foo12340771.Bar(x) => x }
+ ^
+t418.scala:2: error: not found: value x
+ null match { case Foo12340771.Bar(x) => x }
+ ^
+two errors found
diff --git a/test/files/neg/bug418.scala b/test/files/neg/t418.scala
similarity index 100%
rename from test/files/neg/bug418.scala
rename to test/files/neg/t418.scala
diff --git a/test/files/neg/t4196.check b/test/files/neg/t4196.check
new file mode 100644
index 0000000..a058681
--- /dev/null
+++ b/test/files/neg/t4196.check
@@ -0,0 +1,4 @@
+t4196.scala:5: error: Some[String] does not take parameters
+ }.apply("first param") ("spurious param")
+ ^
+one error found
diff --git a/test/files/neg/bug4196.scala b/test/files/neg/t4196.scala
similarity index 100%
rename from test/files/neg/bug4196.scala
rename to test/files/neg/t4196.scala
diff --git a/test/files/neg/t421.check b/test/files/neg/t421.check
new file mode 100644
index 0000000..e81df52
--- /dev/null
+++ b/test/files/neg/t421.check
@@ -0,0 +1,4 @@
+t421.scala:5: error: star patterns must correspond with varargs parameters
+ case Bar("foo",_*) => error("huh?");
+ ^
+one error found
diff --git a/test/files/neg/bug421.scala b/test/files/neg/t421.scala
similarity index 100%
rename from test/files/neg/bug421.scala
rename to test/files/neg/t421.scala
diff --git a/test/files/neg/t4217.check b/test/files/neg/t4217.check
new file mode 100644
index 0000000..e8cd5fd
--- /dev/null
+++ b/test/files/neg/t4217.check
@@ -0,0 +1,4 @@
+t4217.scala:2: error: 'case' expected but '}' found.
+ 42 match { }
+ ^
+one error found
diff --git a/test/files/neg/bug4217.scala b/test/files/neg/t4217.scala
similarity index 100%
rename from test/files/neg/bug4217.scala
rename to test/files/neg/t4217.scala
diff --git a/test/files/neg/t4221.check b/test/files/neg/t4221.check
new file mode 100644
index 0000000..46c2d10
--- /dev/null
+++ b/test/files/neg/t4221.check
@@ -0,0 +1,6 @@
+t4221.scala:8: error: type mismatch;
+ found : Unit
+ required: Wrapper[S]
+ def wrap[S <: Cl#Sub[S]](v: S): Wrapper[S] = {
+ ^
+one error found
diff --git a/test/files/neg/bug4221.scala b/test/files/neg/t4221.scala
similarity index 100%
rename from test/files/neg/bug4221.scala
rename to test/files/neg/t4221.scala
diff --git a/test/files/neg/t425.check b/test/files/neg/t425.check
new file mode 100644
index 0000000..77ea0c5
--- /dev/null
+++ b/test/files/neg/t425.check
@@ -0,0 +1,4 @@
+t425.scala:3: error: case class B has case ancestor Temp.A, but case-to-case inheritance is prohibited. To overcome this limitation, use extractors to pattern match on non-leaf nodes.
+ case class B(override val x: Int, y: Double) extends A(x)
+ ^
+one error found
diff --git a/test/files/pos/t425.scala b/test/files/neg/t425.scala
similarity index 100%
rename from test/files/pos/t425.scala
rename to test/files/neg/t425.scala
diff --git a/test/files/neg/t4270.check b/test/files/neg/t4270.check
new file mode 100644
index 0000000..cfe0a93
--- /dev/null
+++ b/test/files/neg/t4270.check
@@ -0,0 +1,4 @@
+t4270.scala:5: error: could not find implicit value for parameter e: Int
+ implicitly[Int]
+ ^
+one error found
diff --git a/test/files/neg/t4270.scala b/test/files/neg/t4270.scala
new file mode 100644
index 0000000..2c7c71d
--- /dev/null
+++ b/test/files/neg/t4270.scala
@@ -0,0 +1,6 @@
+object Test1 {
+ object A { implicit val x: Int = 1 }
+ import A.x
+ def x: Int = 0
+ implicitly[Int]
+}
diff --git a/test/files/neg/t4271.check b/test/files/neg/t4271.check
new file mode 100644
index 0000000..91d9fbc
--- /dev/null
+++ b/test/files/neg/t4271.check
@@ -0,0 +1,10 @@
+t4271.scala:9: error: value to is not a member of Int
+ 3 to 5
+ ^
+t4271.scala:10: error: value ensuring is not a member of Int
+ 5 ensuring true
+ ^
+t4271.scala:11: error: value -> is not a member of Int
+ 3 -> 5
+ ^
+three errors found
diff --git a/test/files/neg/t4271.scala b/test/files/neg/t4271.scala
new file mode 100644
index 0000000..50526c8
--- /dev/null
+++ b/test/files/neg/t4271.scala
@@ -0,0 +1,12 @@
+object foo {
+ object Donotuseme
+ implicit def any2Ensuring[A](x: A) = Donotuseme
+ implicit def doubleWrapper(x: Int) = Donotuseme
+ implicit def floatWrapper(x: Int) = Donotuseme
+ implicit def intWrapper(x: Int) = Donotuseme
+ implicit def longWrapper(x: Int) = Donotuseme
+ implicit def any2ArrowAssoc[A](x: A) = Donotuseme
+ 3 to 5
+ 5 ensuring true
+ 3 -> 5
+}
diff --git a/test/files/neg/t4283b.check b/test/files/neg/t4283b.check
new file mode 100644
index 0000000..30d03a3
--- /dev/null
+++ b/test/files/neg/t4283b.check
@@ -0,0 +1,4 @@
+Test.scala:2: error: Unable to access method f in class AbstractFoo with a super reference.
+ override def f(): Int = super.f()
+ ^
+one error found
diff --git a/test/files/neg/t4283b/AbstractFoo.java b/test/files/neg/t4283b/AbstractFoo.java
new file mode 100644
index 0000000..7abcd5e
--- /dev/null
+++ b/test/files/neg/t4283b/AbstractFoo.java
@@ -0,0 +1,5 @@
+package test;
+
+/* package private */ class AbstractFoo {
+ public int f() { return 2; }
+}
diff --git a/test/pending/run/t4283/ScalaBipp.scala b/test/files/neg/t4283b/ScalaBipp.scala
similarity index 100%
copy from test/pending/run/t4283/ScalaBipp.scala
copy to test/files/neg/t4283b/ScalaBipp.scala
diff --git a/test/files/neg/t4283b/Test.scala b/test/files/neg/t4283b/Test.scala
new file mode 100644
index 0000000..0dc5636
--- /dev/null
+++ b/test/files/neg/t4283b/Test.scala
@@ -0,0 +1,3 @@
+object Derived extends test.ScalaBipp {
+ override def f(): Int = super.f()
+}
diff --git a/test/files/neg/t4302.check b/test/files/neg/t4302.check
new file mode 100644
index 0000000..450d28b
--- /dev/null
+++ b/test/files/neg/t4302.check
@@ -0,0 +1,4 @@
+t4302.scala:2: error: abstract type T is unchecked since it is eliminated by erasure
+ def hasMatch[T](x: AnyRef) = x.isInstanceOf[T]
+ ^
+one error found
diff --git a/test/files/neg/bug4302.flags b/test/files/neg/t4302.flags
similarity index 100%
copy from test/files/neg/bug4302.flags
copy to test/files/neg/t4302.flags
diff --git a/test/files/neg/bug4302.scala b/test/files/neg/t4302.scala
similarity index 100%
rename from test/files/neg/bug4302.scala
rename to test/files/neg/t4302.scala
diff --git a/test/files/neg/t4417.check b/test/files/neg/t4417.check
new file mode 100644
index 0000000..4e3f6c0
--- /dev/null
+++ b/test/files/neg/t4417.check
@@ -0,0 +1,7 @@
+t4417.scala:11: error: constructor Pixel$mcD$sp in class Pixel$mcD$sp cannot be accessed in object Pixel
+ Access to protected constructor Pixel$mcD$sp not permitted because
+ enclosing object Pixel is not a subclass of
+ class Pixel$mcD$sp where target is defined
+ def apply(v: Double): Pixel1d = new Pixel1d(v)
+ ^
+one error found
diff --git a/test/files/neg/t4417.scala b/test/files/neg/t4417.scala
new file mode 100644
index 0000000..7f104e5
--- /dev/null
+++ b/test/files/neg/t4417.scala
@@ -0,0 +1,17 @@
+
+
+
+
+class Pixel[@specialized T] protected (var v: T)
+
+
+object Pixel {
+ type Pixel1d = Pixel[Double]
+
+ def apply(v: Double): Pixel1d = new Pixel1d(v)
+}
+
+
+
+
+
diff --git a/test/files/neg/t4419.check b/test/files/neg/t4419.check
new file mode 100644
index 0000000..a53e0c9
--- /dev/null
+++ b/test/files/neg/t4419.check
@@ -0,0 +1,4 @@
+t4419.scala:2: error: forward reference extends over definition of value b
+ { val b = a; val a = 1 ; println(a) }
+ ^
+one error found
diff --git a/test/files/neg/t4419.scala b/test/files/neg/t4419.scala
new file mode 100644
index 0000000..5dc86d3
--- /dev/null
+++ b/test/files/neg/t4419.scala
@@ -0,0 +1,3 @@
+class A {
+ { val b = a; val a = 1 ; println(a) }
+}
\ No newline at end of file
diff --git a/test/files/neg/t4425.check b/test/files/neg/t4425.check
new file mode 100644
index 0000000..0f2fe6f
--- /dev/null
+++ b/test/files/neg/t4425.check
@@ -0,0 +1,4 @@
+t4425.scala:3: error: isInstanceOf cannot test if value types are references.
+ 42 match { case _ X _ => () }
+ ^
+one error found
diff --git a/test/files/neg/t4425.flags b/test/files/neg/t4425.flags
new file mode 100644
index 0000000..1182725
--- /dev/null
+++ b/test/files/neg/t4425.flags
@@ -0,0 +1 @@
+-optimize
\ No newline at end of file
diff --git a/test/files/neg/t4425.scala b/test/files/neg/t4425.scala
new file mode 100644
index 0000000..d8cc692
--- /dev/null
+++ b/test/files/neg/t4425.scala
@@ -0,0 +1,4 @@
+object Foo {
+ object X { def unapply(x : Int)(y : Option[Int] = None) = None }
+ 42 match { case _ X _ => () }
+}
diff --git a/test/files/neg/t4440.check b/test/files/neg/t4440.check
new file mode 100644
index 0000000..2861dc3
--- /dev/null
+++ b/test/files/neg/t4440.check
@@ -0,0 +1,13 @@
+t4440.scala:12: error: The outer reference in this type test cannot be checked at run time.
+ case _: b.Inner => println("b")
+ ^
+t4440.scala:13: error: The outer reference in this type test cannot be checked at run time.
+ case _: a.Inner => println("a") // this is the case we want
+ ^
+t4440.scala:16: error: The outer reference in this type test cannot be checked at run time.
+ case _: a.Inner => println("a")
+ ^
+t4440.scala:17: error: The outer reference in this type test cannot be checked at run time.
+ case _: b.Inner => println("b") // this is the case we want
+ ^
+four errors found
diff --git a/test/files/pos/bug1439.flags b/test/files/neg/t4440.flags
similarity index 100%
rename from test/files/pos/bug1439.flags
rename to test/files/neg/t4440.flags
diff --git a/test/files/neg/t4440.scala b/test/files/neg/t4440.scala
new file mode 100644
index 0000000..383b141
--- /dev/null
+++ b/test/files/neg/t4440.scala
@@ -0,0 +1,19 @@
+// constructors used to drop outer fields when they were not accessed
+// however, how can you know (respecting separate compilation) that they're not accessed!?
+class Outer { final class Inner }
+
+// the matches below require Inner's outer pointer
+// until SI-4440 is fixed properly, we can't make this a run test
+// in principle, the output should be "a\nb", but without outer checks it's "b\na"
+object Test extends App {
+ val a = new Outer
+ val b = new Outer
+ (new a.Inner: Any) match {
+ case _: b.Inner => println("b")
+ case _: a.Inner => println("a") // this is the case we want
+ }
+ (new b.Inner: Any) match {
+ case _: a.Inner => println("a")
+ case _: b.Inner => println("b") // this is the case we want
+ }
+}
diff --git a/test/files/neg/t4515.check b/test/files/neg/t4515.check
new file mode 100644
index 0000000..a60d162
--- /dev/null
+++ b/test/files/neg/t4515.check
@@ -0,0 +1,6 @@
+t4515.scala:37: error: type mismatch;
+ found : _0(in value $anonfun) where type _0(in value $anonfun)
+ required: (some other)_0(in value $anonfun)
+ handler.onEvent(target, ctx.getEvent, node, ctx)
+ ^
+one error found
diff --git a/test/files/neg/t4515.scala b/test/files/neg/t4515.scala
new file mode 100644
index 0000000..63049f2
--- /dev/null
+++ b/test/files/neg/t4515.scala
@@ -0,0 +1,41 @@
+import scala.collection.mutable.HashMap
+
+object Main {
+ trait Target { }
+
+ trait PushEventContext[EventType] {
+ def getEvent: EventType
+ }
+ trait PushNode[EventType] { }
+ trait DerivedPushNode[EventType] extends PushNode[EventType] { }
+
+ trait HandlerBase[EventType] {
+ def onEvent(target: Target,
+ event: EventType,
+ node: PushNode[EventType],
+ ctx: PushEventContext[EventType]): Unit
+ }
+ val handlers = new HashMap[DerivedPushNode[_], HandlerBase[_]]
+
+ object TimerPushService {
+ private val INSTANCE: TimerPushService = new TimerPushService
+ def get: TimerPushService = INSTANCE
+ }
+
+ class TimerPushService {
+ def add[EventType](node: DerivedPushNode[EventType],
+ context: PushEventContext[EventType]): Unit = {}
+
+ def pollEvents[EventType](node: DerivedPushNode[EventType]): List[PushEventContext[EventType]] =
+ Nil
+ }
+
+ def onTimer(target: Target) {
+ val pushService = TimerPushService.get
+ for ((node, handler) <- handlers) {
+ for (ctx <- pushService.pollEvents(node)) {
+ handler.onEvent(target, ctx.getEvent, node, ctx)
+ }
+ }
+ }
+}
\ No newline at end of file
diff --git a/test/files/neg/t452.check b/test/files/neg/t452.check
new file mode 100644
index 0000000..aac6630
--- /dev/null
+++ b/test/files/neg/t452.check
@@ -0,0 +1,6 @@
+t452.scala:3: error: type mismatch;
+ found : Test.type
+ required: Test.Foo
+ def this() = this(this);
+ ^
+one error found
diff --git a/test/files/neg/bug452.scala b/test/files/neg/t452.scala
similarity index 100%
rename from test/files/neg/bug452.scala
rename to test/files/neg/t452.scala
diff --git a/test/files/neg/t4537.check b/test/files/neg/t4537.check
new file mode 100644
index 0000000..931bcd0
--- /dev/null
+++ b/test/files/neg/t4537.check
@@ -0,0 +1,4 @@
+c.scala:7: error: object Settings in package a cannot be accessed in package a
+ println(Settings.Y)
+ ^
+one error found
diff --git a/test/files/neg/t4537/a.scala b/test/files/neg/t4537/a.scala
new file mode 100644
index 0000000..65e183c
--- /dev/null
+++ b/test/files/neg/t4537/a.scala
@@ -0,0 +1,5 @@
+package a
+
+private[a] object Settings {
+ val X = 0
+}
\ No newline at end of file
diff --git a/test/files/neg/t4537/b.scala b/test/files/neg/t4537/b.scala
new file mode 100644
index 0000000..bb9dd4e
--- /dev/null
+++ b/test/files/neg/t4537/b.scala
@@ -0,0 +1,5 @@
+package b
+
+object Settings {
+ val Y = 0
+}
\ No newline at end of file
diff --git a/test/files/neg/t4537/c.scala b/test/files/neg/t4537/c.scala
new file mode 100644
index 0000000..3795991
--- /dev/null
+++ b/test/files/neg/t4537/c.scala
@@ -0,0 +1,8 @@
+package b
+package c
+
+import a._
+
+object Test {
+ println(Settings.Y)
+}
\ No newline at end of file
diff --git a/test/files/neg/t4541.check b/test/files/neg/t4541.check
new file mode 100644
index 0000000..7bd8ff7
--- /dev/null
+++ b/test/files/neg/t4541.check
@@ -0,0 +1,7 @@
+t4541.scala:11: error: variable data in class Sparse cannot be accessed in Sparse[Int]
+ Access to protected method data not permitted because
+ prefix type Sparse[Int] does not conform to
+ class Sparse$mcI$sp where the access take place
+ that.data
+ ^
+one error found
diff --git a/test/files/neg/t4541.scala b/test/files/neg/t4541.scala
new file mode 100644
index 0000000..744af1c
--- /dev/null
+++ b/test/files/neg/t4541.scala
@@ -0,0 +1,16 @@
+
+
+
+
+
+
+ at SerialVersionUID(1L)
+final class Sparse[@specialized(Int) T](d: Array[T]) extends Serializable {
+ protected var data: Array[T] = d
+ def set(that: Sparse[T]) = {
+ that.data
+ }
+}
+
+
+
diff --git a/test/files/neg/t4541b.check b/test/files/neg/t4541b.check
new file mode 100644
index 0000000..8a52fd9
--- /dev/null
+++ b/test/files/neg/t4541b.check
@@ -0,0 +1,7 @@
+t4541b.scala:13: error: variable data in class SparseArray cannot be accessed in SparseArray[Int]
+ Access to protected method data not permitted because
+ prefix type SparseArray[Int] does not conform to
+ class SparseArray$mcI$sp where the access take place
+ use(that.data.clone)
+ ^
+one error found
diff --git a/test/files/neg/t4541b.scala b/test/files/neg/t4541b.scala
new file mode 100644
index 0000000..7a21ffc
--- /dev/null
+++ b/test/files/neg/t4541b.scala
@@ -0,0 +1,16 @@
+
+
+
+
+
+ at SerialVersionUID(1L)
+final class SparseArray[@specialized(Int) T](private var data: Array[T]) extends Serializable {
+ def use(inData: Array[T]) = {
+ data = inData;
+ }
+
+ def set(that: SparseArray[T]) = {
+ use(that.data.clone)
+ }
+}
+
diff --git a/test/files/neg/t4568.check b/test/files/neg/t4568.check
new file mode 100644
index 0000000..f94d699
--- /dev/null
+++ b/test/files/neg/t4568.check
@@ -0,0 +1,4 @@
+t4568.scala:8: error: recursive method isSubListOf needs result type
+ case h :: t => y.contains(h) && (t.isSubListOf(y.drop(y.indexOf(h) + 1)))
+ ^
+one error found
diff --git a/test/files/neg/t4568.scala b/test/files/neg/t4568.scala
new file mode 100644
index 0000000..8067759
--- /dev/null
+++ b/test/files/neg/t4568.scala
@@ -0,0 +1,13 @@
+object SubList {
+ implicit def sublistable[A](x: List[A]) = new SubListable(x)
+
+ class SubListable[A](x: List[A]) {
+ def isSubListOf(y: List[A]) = {
+ x match {
+ case Nil => true
+ case h :: t => y.contains(h) && (t.isSubListOf(y.drop(y.indexOf(h) + 1)))
+ }
+ }
+ }
+
+}
\ No newline at end of file
diff --git a/test/files/neg/t4584.check b/test/files/neg/t4584.check
new file mode 100644
index 0000000..419f570
--- /dev/null
+++ b/test/files/neg/t4584.check
@@ -0,0 +1,7 @@
+t4584.scala:1: error: error in unicode escape
+class A { val /u2
+ ^
+t4584.scala:1: error: illegal character '/uffff'
+class A { val /u2
+ ^
+two errors found
diff --git a/test/files/neg/bug4584.scala b/test/files/neg/t4584.scala
similarity index 100%
rename from test/files/neg/bug4584.scala
rename to test/files/neg/t4584.scala
diff --git a/test/files/neg/t464-neg.check b/test/files/neg/t464-neg.check
new file mode 100644
index 0000000..e822e7f
--- /dev/null
+++ b/test/files/neg/t464-neg.check
@@ -0,0 +1,16 @@
+t464-neg.scala:7: error: not found: value f1
+ f1()
+ ^
+t464-neg.scala:8: error: method f1 in class A cannot be accessed in A
+ super.f1()
+ ^
+t464-neg.scala:9: error: value f2 is not a member of B
+ def otherb(b2: B) = b2.f2()
+ ^
+t464-neg.scala:10: error: method f3 in class A cannot be accessed in B
+ f3()
+ ^
+t464-neg.scala:11: error: method f3 in class A cannot be accessed in A
+ super.f3()
+ ^
+5 errors found
diff --git a/test/files/neg/bug464-neg.scala b/test/files/neg/t464-neg.scala
similarity index 100%
rename from test/files/neg/bug464-neg.scala
rename to test/files/neg/t464-neg.scala
diff --git a/test/files/neg/t4691_exhaust_extractor.check b/test/files/neg/t4691_exhaust_extractor.check
new file mode 100644
index 0000000..cd12e56
--- /dev/null
+++ b/test/files/neg/t4691_exhaust_extractor.check
@@ -0,0 +1,13 @@
+t4691_exhaust_extractor.scala:17: error: match may not be exhaustive.
+It would fail on the following input: Bar3()
+ def f1(x: Foo) = x match {
+ ^
+t4691_exhaust_extractor.scala:23: error: match may not be exhaustive.
+It would fail on the following input: Bar3()
+ def f2(x: Foo) = x match {
+ ^
+t4691_exhaust_extractor.scala:29: error: match may not be exhaustive.
+It would fail on the following input: Bar3()
+ def f3(x: Foo) = x match {
+ ^
+three errors found
diff --git a/test/files/neg/caseinherit.flags b/test/files/neg/t4691_exhaust_extractor.flags
similarity index 100%
copy from test/files/neg/caseinherit.flags
copy to test/files/neg/t4691_exhaust_extractor.flags
diff --git a/test/files/neg/t4691_exhaust_extractor.scala b/test/files/neg/t4691_exhaust_extractor.scala
new file mode 100644
index 0000000..c68c33d
--- /dev/null
+++ b/test/files/neg/t4691_exhaust_extractor.scala
@@ -0,0 +1,33 @@
+sealed trait Foo
+class Bar1 extends Foo
+class Bar2 extends Foo
+class Bar3 extends Foo
+
+// these extractors are known to always succeed as they return a Some
+object Baz1 {
+ def unapply(x: Bar1): Some[Int] = Some(1)
+}
+object Baz2 {
+ def unapply(x: Bar2): Some[Int] = Some(2)
+}
+
+
+object Test {
+ // warning: missing Bar3
+ def f1(x: Foo) = x match {
+ case _: Bar1 => 1
+ case _: Bar2 => 2
+ }
+
+ // warning: missing Bar3
+ def f2(x: Foo) = x match {
+ case _: Bar1 => 1
+ case Baz2(x) => x
+ }
+
+ // warning: missing Bar3
+ def f3(x: Foo) = x match {
+ case Baz1(x) => x
+ case Baz2(x) => x
+ }
+}
\ No newline at end of file
diff --git a/test/files/neg/t4727.check b/test/files/neg/t4727.check
new file mode 100644
index 0000000..8a4536f
--- /dev/null
+++ b/test/files/neg/t4727.check
@@ -0,0 +1,11 @@
+t4727.scala:5: error: type mismatch;
+ found : Null
+ required: Int
+Note that implicit conversions are not applicable because they are ambiguous:
+ both method Integer2intNullConflict in class LowPriorityImplicits of type (x: Null)Int
+ and method Integer2int in object Predef of type (x: Integer)Int
+ are possible conversion functions from Null to Int
+Error occurred in an application involving default arguments.
+ new C[Int]
+ ^
+one error found
diff --git a/test/files/neg/bug4727.scala b/test/files/neg/t4727.scala
similarity index 100%
rename from test/files/neg/bug4727.scala
rename to test/files/neg/t4727.scala
diff --git a/test/files/neg/t473.check b/test/files/neg/t473.check
new file mode 100644
index 0000000..a14222c
--- /dev/null
+++ b/test/files/neg/t473.check
@@ -0,0 +1,4 @@
+t473.scala:3: error: super constructor cannot be passed a self reference unless parameter is declared by-name
+case object Voop extends Foo(Voop)
+ ^
+one error found
diff --git a/test/files/neg/bug473.scala b/test/files/neg/t473.scala
similarity index 100%
rename from test/files/neg/bug473.scala
rename to test/files/neg/t473.scala
diff --git a/test/files/neg/t4749.check b/test/files/neg/t4749.check
new file mode 100644
index 0000000..93ad393
--- /dev/null
+++ b/test/files/neg/t4749.check
@@ -0,0 +1,28 @@
+t4749.scala:2: error: Fail1 has a main method with parameter type Array[String], but bippy.Fail1 will not be a runnable program.
+ Reason: main method must have exact signature (Array[String])Unit
+ object Fail1 {
+ ^
+t4749.scala:6: error: Fail2 has a main method with parameter type Array[String], but bippy.Fail2 will not be a runnable program.
+ Reason: main methods cannot be generic.
+ object Fail2 {
+ ^
+t4749.scala:13: error: Fail3 has a main method with parameter type Array[String], but bippy.Fail3 will not be a runnable program.
+ Reason: main methods cannot refer to type parameters or abstract types.
+ object Fail3 extends Bippy[Unit] { }
+ ^
+t4749.scala:16: error: Fail4 has a main method with parameter type Array[String], but bippy.Fail4 will not be a runnable program.
+ Reason: companion is a trait, which means no static forwarder can be generated.
+
+ object Fail4 {
+ ^
+t4749.scala:21: error: Fail5 has a main method with parameter type Array[String], but bippy.Fail5 will not be a runnable program.
+ Reason: companion contains its own main method, which means no static forwarder can be generated.
+
+ object Fail5 extends Fail5 { }
+ ^
+t4749.scala:26: error: Fail6 has a main method with parameter type Array[String], but bippy.Fail6 will not be a runnable program.
+ Reason: companion contains its own main method (implementation restriction: no main is allowed, regardless of signature), which means no static forwarder can be generated.
+
+ object Fail6 {
+ ^
+6 errors found
diff --git a/test/files/neg/caseinherit.flags b/test/files/neg/t4749.flags
similarity index 100%
copy from test/files/neg/caseinherit.flags
copy to test/files/neg/t4749.flags
diff --git a/test/files/neg/t4749.scala b/test/files/neg/t4749.scala
new file mode 100644
index 0000000..0973c36
--- /dev/null
+++ b/test/files/neg/t4749.scala
@@ -0,0 +1,44 @@
+package bippy {
+ object Fail1 {
+ def main(args: Array[String]): Any = ()
+ }
+
+ object Fail2 {
+ def main[T](args: Array[String]): T = null.asInstanceOf[T]
+ }
+
+ abstract class Bippy[T] {
+ def main(args: Array[String]): T = null.asInstanceOf[T]
+ }
+ object Fail3 extends Bippy[Unit] { }
+
+
+ object Fail4 {
+ def main(args: Array[String]): Unit = ()
+ }
+ trait Fail4 { }
+
+ object Fail5 extends Fail5 { }
+ class Fail5 {
+ def main(args: Array[String]): Unit = ()
+ }
+
+ object Fail6 {
+ def main(args: Array[String]): Unit = ()
+ }
+ class Fail6 {
+ def main = "bippy"
+ }
+
+ object Win1 {
+ def main(args: Array[String]): Unit = ()
+ }
+ object Win2 extends Bippy[Unit] {
+ override def main(args: Array[String]): Unit = ()
+ }
+ trait WinBippy[T] {
+ def main(args: Array[String]): T = null.asInstanceOf[T]
+ }
+ object Win3 extends WinBippy[Unit] { }
+}
+
diff --git a/test/files/neg/t4762.check b/test/files/neg/t4762.check
new file mode 100644
index 0000000..5e67f20
--- /dev/null
+++ b/test/files/neg/t4762.check
@@ -0,0 +1,7 @@
+t4762.scala:15: error: private[this] value x in class B shadows mutable x inherited from class A. Changes to x will not be visible within class B - you may want to give them distinct names.
+ /* (99,99) */ (this.x, this.y),
+ ^
+t4762.scala:48: error: private[this] value x in class Derived shadows mutable x inherited from class Base. Changes to x will not be visible within class Derived - you may want to give them distinct names.
+ class Derived( x : Int ) extends Base( x ) { override def toString = x.toString }
+ ^
+two errors found
diff --git a/test/files/neg/t4762.flags b/test/files/neg/t4762.flags
new file mode 100644
index 0000000..e93641e
--- /dev/null
+++ b/test/files/neg/t4762.flags
@@ -0,0 +1 @@
+-Xlint -Xfatal-warnings
\ No newline at end of file
diff --git a/test/files/neg/t4762.scala b/test/files/neg/t4762.scala
new file mode 100644
index 0000000..8757f4a
--- /dev/null
+++ b/test/files/neg/t4762.scala
@@ -0,0 +1,51 @@
+// https://issues.scala-lang.org/browse/SI-4762
+
+// In A, x and y are -1.
+class A(var x: Int) {
+ val y: Int = -1
+}
+
+// In B, x and y are 99 and private[this], implicitly so
+// in the case of x.
+class B(x: Int) extends A(-1) {
+ private[this] def y: Int = 99
+
+ // Three distinct results.
+ def f = List(
+ /* (99,99) */ (this.x, this.y),
+ /* (-1,99) */ ((this: B).x, (this: B).y),
+ /* (-1,-1) */ ((this: A).x, (this: A).y)
+ )
+
+ // The 99s tell us we are reading the private[this]
+ // data of a different instance.
+ def g(b: B) = List(
+ /* (-1,99) */ (b.x, b.y),
+ /* (-1,99) */ ((b: B).x, (b: B).y),
+ /* (-1,-1) */ ((b: A).x, (b: A).y)
+ )
+}
+
+object Test {
+ def f(x: A) = /* -2 */ x.x + x.y
+ def g1(x: B) = /* -2 */ (x: A).x + (x: A).y
+ def g2(x: B) = (x: B).x + (x: B).y
+ // java.lang.IllegalAccessError: tried to access method B.y()I from class Test$
+
+ def main(args: Array[String]): Unit = {
+ val b = new B(99)
+ b.f foreach println
+ b.g(new B(99)) foreach println
+
+ println(f(b))
+ println(g1(b))
+ println(g2(b))
+ }
+}
+
+class bug4762 {
+ class Base( var x : Int ) { def increment() { x = x + 1 } }
+ class Derived( x : Int ) extends Base( x ) { override def toString = x.toString }
+
+ val derived = new Derived( 1 )
+}
diff --git a/test/files/neg/t4818.check b/test/files/neg/t4818.check
new file mode 100644
index 0000000..a5e15e4
--- /dev/null
+++ b/test/files/neg/t4818.check
@@ -0,0 +1,6 @@
+t4818.scala:4: error: type mismatch;
+ found : Int(5)
+ required: Nothing
+ def f(x: Any) = x match { case Fn(f) => f(5) }
+ ^
+one error found
diff --git a/test/files/neg/t4818.scala b/test/files/neg/t4818.scala
new file mode 100644
index 0000000..faae229
--- /dev/null
+++ b/test/files/neg/t4818.scala
@@ -0,0 +1,7 @@
+object Test {
+ case class Fn[A, B](f: A => B)
+
+ def f(x: Any) = x match { case Fn(f) => f(5) }
+
+ Fn((x: String) => x)
+}
diff --git a/test/files/neg/t4831.check b/test/files/neg/t4831.check
new file mode 100644
index 0000000..3b8b836
--- /dev/null
+++ b/test/files/neg/t4831.check
@@ -0,0 +1,7 @@
+t4831.scala:10: error: reference to b is ambiguous;
+it is imported twice in the same scope by
+import O.b
+and import O.{a=>b}
+ println(b)
+ ^
+one error found
diff --git a/test/files/neg/t4831.scala b/test/files/neg/t4831.scala
new file mode 100644
index 0000000..82346ec
--- /dev/null
+++ b/test/files/neg/t4831.scala
@@ -0,0 +1,11 @@
+object O {
+ val a = 0
+ val b = 1
+}
+
+import O.{a => b}
+import O.b
+
+object test {
+ println(b)
+}
diff --git a/test/files/neg/t4842.check b/test/files/neg/t4842.check
new file mode 100644
index 0000000..b53bbdb
--- /dev/null
+++ b/test/files/neg/t4842.check
@@ -0,0 +1,7 @@
+t4842.scala:2: error: self constructor arguments cannot reference unconstructed `this`
+ def this(x: Int) = this(new { println(Foo.this)}) // error
+ ^
+t4842.scala:6: error: self constructor arguments cannot reference unconstructed `this`
+ def this() = { this(???)(new { println(TypeArg.this.x) } ); println("next") } // error
+ ^
+two errors found
diff --git a/test/files/neg/t4842.scala b/test/files/neg/t4842.scala
new file mode 100644
index 0000000..c6244ef
--- /dev/null
+++ b/test/files/neg/t4842.scala
@@ -0,0 +1,7 @@
+class Foo (x: AnyRef) {
+ def this(x: Int) = this(new { println(Foo.this)}) // error
+}
+
+class TypeArg[X](val x: X)(a: AnyRef) {
+ def this() = { this(???)(new { println(TypeArg.this.x) } ); println("next") } // error
+}
diff --git a/test/files/neg/t4851.check b/test/files/neg/t4851.check
new file mode 100644
index 0000000..9633fdf
--- /dev/null
+++ b/test/files/neg/t4851.check
@@ -0,0 +1,49 @@
+S.scala:2: error: Adapting argument list by inserting (): leaky (Object-receiving) target makes this especially dangerous.
+ signature: J(x: Any): J
+ given arguments: <none>
+ after adaptation: new J((): Unit)
+ val x1 = new J
+ ^
+S.scala:3: error: Adapting argument list by inserting (): leaky (Object-receiving) target makes this especially dangerous.
+ signature: J(x: Any): J
+ given arguments: <none>
+ after adaptation: new J((): Unit)
+ val x2 = new J()
+ ^
+S.scala:4: error: Adapting argument list by creating a 5-tuple: this may not be what you want.
+ signature: J(x: Any): J
+ given arguments: 1, 2, 3, 4, 5
+ after adaptation: new J((1, 2, 3, 4, 5): (Int, Int, Int, Int, Int))
+ val x3 = new J(1, 2, 3, 4, 5)
+ ^
+S.scala:6: error: Adapting argument list by creating a 3-tuple: this may not be what you want.
+ signature: Some.apply[A](x: A): Some[A]
+ given arguments: 1, 2, 3
+ after adaptation: Some((1, 2, 3): (Int, Int, Int))
+ val y1 = Some(1, 2, 3)
+ ^
+S.scala:7: error: Adapting argument list by creating a 3-tuple: this may not be what you want.
+ signature: Some(x: A): Some[A]
+ given arguments: 1, 2, 3
+ after adaptation: new Some((1, 2, 3): (Int, Int, Int))
+ val y2 = new Some(1, 2, 3)
+ ^
+S.scala:9: error: Adapting argument list by inserting (): this is unlikely to be what you want.
+ signature: J2[T](x: T): J2[T]
+ given arguments: <none>
+ after adaptation: new J2((): Unit)
+ val z1 = new J2
+ ^
+S.scala:10: error: Adapting argument list by inserting (): this is unlikely to be what you want.
+ signature: J2[T](x: T): J2[T]
+ given arguments: <none>
+ after adaptation: new J2((): Unit)
+ val z2 = new J2()
+ ^
+S.scala:14: error: Adapting argument list by creating a 3-tuple: this may not be what you want.
+ signature: Test.anyId(a: Any): Any
+ given arguments: 1, 2, 3
+ after adaptation: Test.anyId((1, 2, 3): (Int, Int, Int))
+ val w1 = anyId(1, 2 ,3)
+ ^
+8 errors found
diff --git a/test/files/neg/t4851.flags b/test/files/neg/t4851.flags
new file mode 100644
index 0000000..0545cb8
--- /dev/null
+++ b/test/files/neg/t4851.flags
@@ -0,0 +1 @@
+-Ywarn-adapted-args -Xfatal-warnings
diff --git a/test/files/neg/t4851/J.java b/test/files/neg/t4851/J.java
new file mode 100644
index 0000000..dbf8b82
--- /dev/null
+++ b/test/files/neg/t4851/J.java
@@ -0,0 +1,15 @@
+public class J {
+ Object x;
+
+ public J(Object x) {
+ this.x = x;
+ }
+
+ public J(int x1, int x2, int x3, int x4, int x5, int x6) {
+ this.x = null;
+ }
+
+ public String toString() {
+ return "J:" + x.getClass();
+ }
+}
\ No newline at end of file
diff --git a/test/files/neg/t4851/J2.java b/test/files/neg/t4851/J2.java
new file mode 100644
index 0000000..c3a7231
--- /dev/null
+++ b/test/files/neg/t4851/J2.java
@@ -0,0 +1,11 @@
+public class J2<T> {
+ T x;
+
+ public <T> J(T x) {
+ this.x = x;
+ }
+
+ public String toString() {
+ return "J2:" + x.getClass();
+ }
+}
\ No newline at end of file
diff --git a/test/files/neg/t4851/S.scala b/test/files/neg/t4851/S.scala
new file mode 100644
index 0000000..0a442ac
--- /dev/null
+++ b/test/files/neg/t4851/S.scala
@@ -0,0 +1,28 @@
+object Test {
+ val x1 = new J
+ val x2 = new J()
+ val x3 = new J(1, 2, 3, 4, 5)
+
+ val y1 = Some(1, 2, 3)
+ val y2 = new Some(1, 2, 3)
+
+ val z1 = new J2
+ val z2 = new J2()
+ val z3 = new J2(())
+
+ def anyId(a: Any) = a
+ val w1 = anyId(1, 2 ,3)
+
+ def main(args: Array[String]): Unit = {
+ println(x1)
+ println(x2)
+ println(x3)
+ println(y1)
+
+ println(z1)
+ println(z2)
+ println(z3)
+
+ println(w1)
+ }
+}
diff --git a/test/files/neg/t4877.check b/test/files/neg/t4877.check
new file mode 100644
index 0000000..5a2413c
--- /dev/null
+++ b/test/files/neg/t4877.check
@@ -0,0 +1,22 @@
+t4877.scala:4: error: type mismatch;
+ found : AnyRef{def bar: Int}
+ required: AnyRef{def bar: String}
+ def foo: AnyRef { def bar: String } = new AnyRef { def bar = 42 }
+ ^
+t4877.scala:6: error: type mismatch;
+ found : AnyRef{def bar(x: Int): String}
+ required: AnyRef{def bar(x: Int): Int}
+ def foo3: AnyRef { def bar(x: Int): Int } = new AnyRef { def bar(x: Int) = "abc" }
+ ^
+t4877.scala:7: error: type mismatch;
+ found : C{def bar(x: Int): Int}
+ required: C{def bar(x: Int): Int; def quux(x: Int): Int}
+ def foo4: C { def bar(x: Int): Int ; def quux(x: Int): Int } = new C { def bar(x: Int) = 5 }
+ ^
+t4877.scala:17: error: type mismatch;
+ found : AnyRef{type Mom = String; def bar(x: Int): Int; def bippy(): List[Int]}
+ required: B.this.Bippy
+ (which expands to) AnyRef{type Mom; def bar(x: Int): this.Mom; def bippy(): List[this.Mom]}
+ val x: Bippy = new AnyRef {
+ ^
+four errors found
diff --git a/test/files/neg/t4877.flags b/test/files/neg/t4877.flags
new file mode 100644
index 0000000..7ccd561
--- /dev/null
+++ b/test/files/neg/t4877.flags
@@ -0,0 +1 @@
+-Xlint
\ No newline at end of file
diff --git a/test/files/neg/t4877.scala b/test/files/neg/t4877.scala
new file mode 100644
index 0000000..9cad156
--- /dev/null
+++ b/test/files/neg/t4877.scala
@@ -0,0 +1,22 @@
+trait C { }
+
+class A {
+ def foo: AnyRef { def bar: String } = new AnyRef { def bar = 42 }
+ def foo2: AnyRef { def bar: String } = new AnyRef { def bar = "abc" }
+ def foo3: AnyRef { def bar(x: Int): Int } = new AnyRef { def bar(x: Int) = "abc" }
+ def foo4: C { def bar(x: Int): Int ; def quux(x: Int): Int } = new C { def bar(x: Int) = 5 }
+}
+
+class B {
+ type Bippy = {
+ type Mom
+ def bar(x: Int): Mom
+ def bippy(): List[Mom]
+ }
+
+ val x: Bippy = new AnyRef {
+ type Mom = String
+ def bar(x: Int) = 55
+ def bippy() = List(bar(55))
+ }
+}
\ No newline at end of file
diff --git a/test/files/neg/t4879.check b/test/files/neg/t4879.check
new file mode 100644
index 0000000..c7edd58
--- /dev/null
+++ b/test/files/neg/t4879.check
@@ -0,0 +1,13 @@
+t4879.scala:6: error: pattern type is incompatible with expected type;
+ found : C.type
+ required: C
+Note: if you intended to match against the class, try `case C(_)`
+ case C => true
+ ^
+t4879.scala:10: error: pattern type is incompatible with expected type;
+ found : D.type
+ required: D[T,U,V]
+Note: if you intended to match against the class, try `case D(_,_,_)`
+ case D => true
+ ^
+two errors found
diff --git a/test/files/neg/t4879.scala b/test/files/neg/t4879.scala
new file mode 100644
index 0000000..7d6561e
--- /dev/null
+++ b/test/files/neg/t4879.scala
@@ -0,0 +1,15 @@
+case class C(d: Double) { }
+case class D[T, U, V](bingo: Int, donkey: String, private val vegas: Set[A])(jehovah: Int) { }
+
+class A {
+ def f = (new C(5)) match {
+ case C => true
+ case _ => false
+ }
+ def g[T, U, V](x: D[T, U, V]) = x match {
+ case D => true
+ case _ => false
+ }
+}
+
+
diff --git a/test/files/neg/t4882.check b/test/files/neg/t4882.check
new file mode 100644
index 0000000..0aafc82
--- /dev/null
+++ b/test/files/neg/t4882.check
@@ -0,0 +1,4 @@
+t4882.scala:2: error: `implicit' modifier not allowed for constructors
+ implicit def this(a: String) = this(a.toInt)
+ ^
+one error found
diff --git a/test/files/neg/t4882.scala b/test/files/neg/t4882.scala
new file mode 100644
index 0000000..4e58ef7
--- /dev/null
+++ b/test/files/neg/t4882.scala
@@ -0,0 +1,3 @@
+class Foo(value: Int) {
+ implicit def this(a: String) = this(a.toInt)
+}
diff --git a/test/files/neg/t4928.check b/test/files/neg/t4928.check
new file mode 100644
index 0000000..06d4f22
--- /dev/null
+++ b/test/files/neg/t4928.check
@@ -0,0 +1,5 @@
+t4928.scala:3: error: parameter 'a' is already specified at parameter position 1
+Note that that 'z' is not a parameter name of the invoked method.
+ f(z = 0, a = 1)
+ ^
+one error found
diff --git a/test/files/neg/t4928.scala b/test/files/neg/t4928.scala
new file mode 100644
index 0000000..17a5980
--- /dev/null
+++ b/test/files/neg/t4928.scala
@@ -0,0 +1,4 @@
+class C {
+ def f(a: Int, b: Int = 0) = 0
+ f(z = 0, a = 1)
+}
diff --git a/test/files/neg/t4987.check b/test/files/neg/t4987.check
new file mode 100644
index 0000000..8d7344d
--- /dev/null
+++ b/test/files/neg/t4987.check
@@ -0,0 +1,4 @@
+t4987.scala:2: error: constructor Foo2 in class Foo2 cannot be accessed in object Bar2
+object Bar2 { new Foo2(0, 0) }
+ ^
+one error found
diff --git a/test/files/neg/t4987.scala b/test/files/neg/t4987.scala
new file mode 100644
index 0000000..e55acd4
--- /dev/null
+++ b/test/files/neg/t4987.scala
@@ -0,0 +1,2 @@
+class Foo2 private (a: Int, b: Int)
+object Bar2 { new Foo2(0, 0) }
diff --git a/test/files/neg/t4989.check b/test/files/neg/t4989.check
new file mode 100644
index 0000000..814507f
--- /dev/null
+++ b/test/files/neg/t4989.check
@@ -0,0 +1,7 @@
+t4989.scala:14: error: method print in class A cannot be directly accessed from class C because class B redeclares it as abstract
+ override def print(): String = super.print() // should be an error
+ ^
+t4989.scala:18: error: method print in class A cannot be directly accessed from trait T because class B redeclares it as abstract
+ override def print(): String = super.print() // should be an error
+ ^
+two errors found
diff --git a/test/files/neg/t4989.scala b/test/files/neg/t4989.scala
new file mode 100644
index 0000000..e7ff80e
--- /dev/null
+++ b/test/files/neg/t4989.scala
@@ -0,0 +1,68 @@
+abstract class A0 {
+ def print(): String
+}
+
+class A extends A0 {
+ def print(): String = "A"
+}
+
+abstract class B extends A {
+ def print() : String
+}
+
+class C extends B {
+ override def print(): String = super.print() // should be an error
+}
+
+trait T extends B {
+ override def print(): String = super.print() // should be an error
+}
+
+class D extends A {
+ override def print(): String = super.print() // okay
+}
+
+
+// it's okay do this when trait are in the mix, as the
+// suitable super accessor methods are used.
+object ConcreteMethodAndIntermediaryAreTraits {
+ trait T1 {
+ def print(): String = ""
+ }
+
+ trait T2 extends T1 {
+ def print(): String
+ }
+
+ class C3 extends T2 {
+ def print(): String = super.print() // okay
+ }
+}
+
+object IntermediaryIsTrait {
+ class T1 {
+ def print(): String = ""
+ }
+
+ trait T2 extends T1 {
+ def print(): String
+ }
+
+ class C3 extends T2 {
+ override def print(): String = super.print() // okay
+ }
+}
+
+object ConcreteMethodIsTrait {
+ trait T1 {
+ def print(): String = ""
+ }
+
+ abstract class T2 extends T1 {
+ def print(): String
+ }
+
+ class C3 extends T2 {
+ override def print(): String = super.print() // okay
+ }
+}
diff --git a/test/files/neg/t500.check b/test/files/neg/t500.check
new file mode 100644
index 0000000..b3f5c85
--- /dev/null
+++ b/test/files/neg/t500.check
@@ -0,0 +1,4 @@
+t500.scala:3: error: lower bound X does not conform to upper bound Y
+ type T >: X <: Y;
+ ^
+one error found
diff --git a/test/files/neg/bug500.scala b/test/files/neg/t500.scala
similarity index 100%
rename from test/files/neg/bug500.scala
rename to test/files/neg/t500.scala
diff --git a/test/files/neg/t501.check b/test/files/neg/t501.check
new file mode 100644
index 0000000..3e3bf39
--- /dev/null
+++ b/test/files/neg/t501.check
@@ -0,0 +1,4 @@
+t501.scala:3: error: lower bound X does not conform to upper bound Y
+ abstract class I { type T >: X <: Y; }
+ ^
+one error found
diff --git a/test/files/neg/bug501.scala b/test/files/neg/t501.scala
similarity index 100%
rename from test/files/neg/bug501.scala
rename to test/files/neg/t501.scala
diff --git a/test/files/neg/t5031.check b/test/files/neg/t5031.check
new file mode 100644
index 0000000..2f1090c
--- /dev/null
+++ b/test/files/neg/t5031.check
@@ -0,0 +1,5 @@
+package.scala:2: error: Companions 'class Test' and 'object Test' must be defined in same file:
+ Found in t5031/package.scala and t5031/Id.scala
+ class Test
+ ^
+one error found
diff --git a/test/files/neg/t5031/Id.scala b/test/files/neg/t5031/Id.scala
new file mode 100644
index 0000000..2f0db00
--- /dev/null
+++ b/test/files/neg/t5031/Id.scala
@@ -0,0 +1,4 @@
+package t5031
+
+object Test
+
diff --git a/test/files/neg/t5031/package.scala b/test/files/neg/t5031/package.scala
new file mode 100644
index 0000000..17b6322
--- /dev/null
+++ b/test/files/neg/t5031/package.scala
@@ -0,0 +1,3 @@
+package object t5031 {
+ class Test
+}
diff --git a/test/files/neg/t5031b.check b/test/files/neg/t5031b.check
new file mode 100644
index 0000000..3bc2284
--- /dev/null
+++ b/test/files/neg/t5031b.check
@@ -0,0 +1,5 @@
+b.scala:3: error: Companions 'class Bippy' and 'object Bippy' must be defined in same file:
+ Found in t5031b/a.scala and t5031b/b.scala
+object Bippy
+ ^
+one error found
diff --git a/test/files/neg/t5031b/a.scala b/test/files/neg/t5031b/a.scala
new file mode 100644
index 0000000..0ab9aa9
--- /dev/null
+++ b/test/files/neg/t5031b/a.scala
@@ -0,0 +1,3 @@
+package foo
+
+class Bippy
diff --git a/test/files/neg/t5031b/b.scala b/test/files/neg/t5031b/b.scala
new file mode 100644
index 0000000..bdef237
--- /dev/null
+++ b/test/files/neg/t5031b/b.scala
@@ -0,0 +1,3 @@
+package foo
+
+object Bippy
diff --git a/test/files/neg/t5044.check b/test/files/neg/t5044.check
new file mode 100644
index 0000000..197da2a
--- /dev/null
+++ b/test/files/neg/t5044.check
@@ -0,0 +1,9 @@
+t5044.scala:7: error: recursive value a needs type
+ val id = m(a)
+ ^
+t5044.scala:6: warning: type-checking the invocation of method foo checks if the named argument expression 'id = ...' is a valid assignment
+in the current scope. The resulting type inference error (see above) can be fixed by providing an explicit type in the local definition for id.
+ val a = foo(id = 1)
+ ^
+one warning found
+one error found
diff --git a/test/files/neg/t5044.scala b/test/files/neg/t5044.scala
new file mode 100644
index 0000000..2663ec1
--- /dev/null
+++ b/test/files/neg/t5044.scala
@@ -0,0 +1,9 @@
+class T {
+ def foo[T](id: T) = 0
+ def m(a: Int) = 0
+
+ def f {
+ val a = foo(id = 1)
+ val id = m(a)
+ }
+}
diff --git a/test/files/neg/t5060.check b/test/files/neg/t5060.check
new file mode 100644
index 0000000..09b2d9a
--- /dev/null
+++ b/test/files/neg/t5060.check
@@ -0,0 +1,7 @@
+t5060.scala:2: error: covariant type T occurs in contravariant position in type => AnyRef{def contains(x: T): Unit} of value foo0
+ val foo0 = {
+ ^
+t5060.scala:6: error: covariant type T occurs in contravariant position in type => AnyRef{def contains(x: T): Unit} of method foo1
+ def foo1 = {
+ ^
+two errors found
diff --git a/test/files/neg/t5060.scala b/test/files/neg/t5060.scala
new file mode 100644
index 0000000..4d934a9
--- /dev/null
+++ b/test/files/neg/t5060.scala
@@ -0,0 +1,19 @@
+class A[+T] {
+ val foo0 = {
+ class AsVariantAsIWantToBe { def contains(x: T) = () }
+ new AsVariantAsIWantToBe
+ }
+ def foo1 = {
+ class VarianceIsTheSpiceOfTypes { def contains(x: T) = () }
+ new VarianceIsTheSpiceOfTypes
+ }
+}
+
+object Test {
+ def main(args: Array[String]): Unit = {
+ val xs: A[String] = new A[String]
+ println(xs.foo0 contains "abc")
+ println((xs: A[Any]).foo0 contains 5)
+ // java.lang.NoSuchMethodException: A$AsVariantAsIWantToBe$1.contains(java.lang.String)
+ }
+}
diff --git a/test/files/neg/t5063.check b/test/files/neg/t5063.check
new file mode 100644
index 0000000..c6e553c
--- /dev/null
+++ b/test/files/neg/t5063.check
@@ -0,0 +1,4 @@
+t5063.scala:2: error: value + is not a member of AnyRef
+ super.+("")
+ ^
+one error found
diff --git a/test/files/neg/t5063.scala b/test/files/neg/t5063.scala
new file mode 100644
index 0000000..5b34b53
--- /dev/null
+++ b/test/files/neg/t5063.scala
@@ -0,0 +1,3 @@
+class A {
+ super.+("")
+}
diff --git a/test/files/neg/t5078.check b/test/files/neg/t5078.check
new file mode 100644
index 0000000..8f66445
--- /dev/null
+++ b/test/files/neg/t5078.check
@@ -0,0 +1,13 @@
+t5078.scala:7: error: an unapply method must accept a single argument.
+ val Foo(x1) = 1
+ ^
+t5078.scala:7: error: recursive value x1 needs type
+ val Foo(x1) = 1
+ ^
+t5078.scala:8: error: an unapply method must accept a single argument.
+ val Foo2(y2) = 2
+ ^
+t5078.scala:8: error: recursive value y2 needs type
+ val Foo2(y2) = 2
+ ^
+four errors found
diff --git a/test/files/neg/t5078.scala b/test/files/neg/t5078.scala
new file mode 100644
index 0000000..2e727e7
--- /dev/null
+++ b/test/files/neg/t5078.scala
@@ -0,0 +1,11 @@
+object Foo { def unapply: Option[Int] = Some(42) }
+object Foo2 { def unapply(): Option[Int] = Some(42) }
+
+
+object Test {
+ def main(args: Array[String]): Unit = {
+ val Foo(x1) = 1
+ val Foo2(y2) = 2
+ ()
+ }
+}
diff --git a/test/files/neg/t5093.check b/test/files/neg/t5093.check
new file mode 100644
index 0000000..daba460
--- /dev/null
+++ b/test/files/neg/t5093.check
@@ -0,0 +1,10 @@
+t5093.scala:2: error: illegal cyclic reference involving type C
+ def f[C[X] <: C[X]](l: C[_]) = l.x
+ ^
+t5093.scala:2: error: cyclic aliasing or subtyping involving type C
+ def f[C[X] <: C[X]](l: C[_]) = l.x
+ ^
+t5093.scala:2: error: C does not take type parameters
+ def f[C[X] <: C[X]](l: C[_]) = l.x
+ ^
+three errors found
diff --git a/test/files/neg/t5093.scala b/test/files/neg/t5093.scala
new file mode 100644
index 0000000..9cde364
--- /dev/null
+++ b/test/files/neg/t5093.scala
@@ -0,0 +1,3 @@
+class T {
+ def f[C[X] <: C[X]](l: C[_]) = l.x
+}
diff --git a/test/files/neg/t510.check b/test/files/neg/t510.check
new file mode 100644
index 0000000..355a6cd
--- /dev/null
+++ b/test/files/neg/t510.check
@@ -0,0 +1,4 @@
+t510.scala:19: error: cyclic aliasing or subtyping involving type T
+ def g(t: e.T): Unit = {
+ ^
+one error found
diff --git a/test/files/neg/bug510.scala b/test/files/neg/t510.scala
similarity index 100%
rename from test/files/neg/bug510.scala
rename to test/files/neg/t510.scala
diff --git a/test/files/neg/t5106.check b/test/files/neg/t5106.check
new file mode 100644
index 0000000..ac16041
--- /dev/null
+++ b/test/files/neg/t5106.check
@@ -0,0 +1,11 @@
+t5106.scala:3: error: type mismatch;
+ found : Int(4)
+ required: String
+ val (n, l): (String, Int) = (4, "")
+ ^
+t5106.scala:3: error: type mismatch;
+ found : String("")
+ required: Int
+ val (n, l): (String, Int) = (4, "")
+ ^
+two errors found
diff --git a/test/files/neg/t5106.scala b/test/files/neg/t5106.scala
new file mode 100644
index 0000000..419b430
--- /dev/null
+++ b/test/files/neg/t5106.scala
@@ -0,0 +1,5 @@
+class A {
+ def f {
+ val (n, l): (String, Int) = (4, "")
+ }
+}
diff --git a/test/files/neg/t512.check b/test/files/neg/t512.check
new file mode 100644
index 0000000..814e65e
--- /dev/null
+++ b/test/files/neg/t512.check
@@ -0,0 +1,4 @@
+t512.scala:3: error: not found: value something
+ val xxx = something ||
+ ^
+one error found
diff --git a/test/files/neg/bug512.scala b/test/files/neg/t512.scala
similarity index 100%
rename from test/files/neg/bug512.scala
rename to test/files/neg/t512.scala
diff --git a/test/files/neg/t5120.check b/test/files/neg/t5120.check
new file mode 100644
index 0000000..34d4ebd
--- /dev/null
+++ b/test/files/neg/t5120.check
@@ -0,0 +1,12 @@
+t5120.scala:11: error: type mismatch;
+ found : Object
+ required: _1
+ List(str, other) foreach (_.x1 = new AnyRef)
+ ^
+t5120.scala:25: error: type mismatch;
+ found : Thread
+ required: h.T
+ (which expands to) _2
+ List(str, num).foreach(h => h.f1 = new Thread())
+ ^
+two errors found
diff --git a/test/files/neg/t5120.scala b/test/files/neg/t5120.scala
new file mode 100644
index 0000000..c7063b7
--- /dev/null
+++ b/test/files/neg/t5120.scala
@@ -0,0 +1,29 @@
+class Cell[T](x0: T) {
+ type U = T
+ var x1: U = x0
+}
+
+object Test {
+ val str: Cell[String] = new Cell("a")
+ val other: Cell[Int] = new Cell(0)
+
+ def main(args: Array[String]): Unit = {
+ List(str, other) foreach (_.x1 = new AnyRef)
+ str.x1.length
+ }
+}
+// another way demonstrating the same underlying problem, as reported by roman kalukiewicz
+
+class Holder[_T](_f1 : _T, _f2 : _T) {
+ type T = _T
+ var f1 : T = _f1
+ var f2 : T = _f2
+}
+object Test2 {
+ val str = new Holder("t1", "t2")
+ val num = new Holder(1, 2)
+ List(str, num).foreach(h => h.f1 = new Thread())
+ def main(args: Array[String]) {
+ println(str.f1)
+ }
+}
diff --git a/test/files/neg/t5148.check b/test/files/neg/t5148.check
new file mode 100644
index 0000000..25107c4
--- /dev/null
+++ b/test/files/neg/t5148.check
@@ -0,0 +1,9 @@
+error: bad symbolic reference. A signature in Imports.class refers to term global
+in class scala.tools.nsc.interpreter.IMain which is not available.
+It may be completely missing from the current classpath, or the version on
+the classpath might be incompatible with the version used when compiling Imports.class.
+error: bad symbolic reference. A signature in Imports.class refers to term memberHandlers
+in class scala.tools.nsc.interpreter.IMain which is not available.
+It may be completely missing from the current classpath, or the version on
+the classpath might be incompatible with the version used when compiling Imports.class.
+two errors found
diff --git a/test/files/neg/t5148.scala b/test/files/neg/t5148.scala
new file mode 100644
index 0000000..fca64e5
--- /dev/null
+++ b/test/files/neg/t5148.scala
@@ -0,0 +1,4 @@
+package scala.tools.nsc
+package interpreter
+
+class IMain extends Imports
diff --git a/test/files/neg/t515.check b/test/files/neg/t515.check
new file mode 100644
index 0000000..47d2d30
--- /dev/null
+++ b/test/files/neg/t515.check
@@ -0,0 +1,6 @@
+t515.scala:7: error: type mismatch;
+ found : String
+ required: Test.Truc
+ val parent: Truc = file.getMachin
+ ^
+one error found
diff --git a/test/files/neg/bug515.scala b/test/files/neg/t515.scala
similarity index 100%
rename from test/files/neg/bug515.scala
rename to test/files/neg/t515.scala
diff --git a/test/files/neg/t5152.check b/test/files/neg/t5152.check
new file mode 100644
index 0000000..fd510db
--- /dev/null
+++ b/test/files/neg/t5152.check
@@ -0,0 +1,11 @@
+t5152.scala:7: error: kinds of the type arguments (Test.B) do not conform to the expected kinds of the type parameters (type E) in class A.
+Test.B's type parameters do not match type E's expected parameters:
+type E has one type parameter, but type _ has none
+ class B[E[_]] extends A[B] { } // B is depth 2 but A requires 1
+ ^
+t5152.scala:11: error: kinds of the type arguments (Test.B1) do not conform to the expected kinds of the type parameters (type E) in class A1.
+Test.B1's type parameters do not match type E's expected parameters:
+type _ has no type parameters, but type G has one
+ class B1[E[_]] extends A1[B1] // B1 is depth 2 but A1 requires 3
+ ^
+two errors found
diff --git a/test/files/neg/t5152.scala b/test/files/neg/t5152.scala
new file mode 100644
index 0000000..5efc76a
--- /dev/null
+++ b/test/files/neg/t5152.scala
@@ -0,0 +1,17 @@
+object Test {
+ new C
+ new C1
+ new C2
+
+ class A[E[_]] { }
+ class B[E[_]] extends A[B] { } // B is depth 2 but A requires 1
+ class C extends B { }
+
+ class A1[E[F[G[_]]]] { }
+ class B1[E[_]] extends A1[B1] // B1 is depth 2 but A1 requires 3
+ class C1 extends B1 { }
+
+ class A2[E[_]] { }
+ class B2[E] extends A2[B2] { } // this one is correct
+ class C2 extends B2 { }
+}
diff --git a/test/files/neg/t5189.check b/test/files/neg/t5189.check
new file mode 100644
index 0000000..7762f46
--- /dev/null
+++ b/test/files/neg/t5189.check
@@ -0,0 +1,6 @@
+t5189.scala:3: error: type mismatch;
+ found : Nothing => Any
+ required: Any => Any
+ def f(x: Any): Any => Any = x match { case Foo(bar) => bar }
+ ^
+one error found
\ No newline at end of file
diff --git a/test/files/neg/t5189.scala b/test/files/neg/t5189.scala
new file mode 100644
index 0000000..19e8e74
--- /dev/null
+++ b/test/files/neg/t5189.scala
@@ -0,0 +1,5 @@
+class TestNeg1 {
+ case class Foo[T, U](f: T => U)
+ def f(x: Any): Any => Any = x match { case Foo(bar) => bar }
+ // uh-oh, Any => Any should be Nothing => Any.
+}
diff --git a/test/files/neg/t5189_inferred.check b/test/files/neg/t5189_inferred.check
new file mode 100644
index 0000000..9cc5dcc
--- /dev/null
+++ b/test/files/neg/t5189_inferred.check
@@ -0,0 +1,6 @@
+t5189_inferred.scala:7: error: type mismatch;
+ found : scala.collection.immutable.Nil.type
+ required: ?A1 where type ?A1
+ f(Invariant(arr): Covariant[Any])(0) = Nil
+ ^
+one error found
diff --git a/test/files/neg/t5189_inferred.scala b/test/files/neg/t5189_inferred.scala
new file mode 100644
index 0000000..e4e8765
--- /dev/null
+++ b/test/files/neg/t5189_inferred.scala
@@ -0,0 +1,8 @@
+trait Covariant[+A]
+case class Invariant[A](xs: Array[A]) extends Covariant[A]
+
+class Test {
+ val arr = Array("abc")
+ def f[A](v: Covariant[A]) /*inferred!*/ = v match { case Invariant(xs) => xs }
+ f(Invariant(arr): Covariant[Any])(0) = Nil
+}
\ No newline at end of file
diff --git a/test/files/neg/t5189b.check b/test/files/neg/t5189b.check
new file mode 100644
index 0000000..46996e9
--- /dev/null
+++ b/test/files/neg/t5189b.check
@@ -0,0 +1,11 @@
+t5189b.scala:38: error: type mismatch;
+ found : TestNeg.Wrapped[?T7] where type ?T7 <: T (this is a GADT skolem)
+ required: TestNeg.Wrapped[T]
+Note: ?T7 <: T, but class Wrapped is invariant in type W.
+You may wish to define W as +W instead. (SLS 4.5)
+ case Wrapper/*[_ <: T ]*/(wrapped) => wrapped // : Wrapped[_ <: T], which is a subtype of Wrapped[T] if and only if Wrapped is covariant in its type parameter
+ ^
+t5189b.scala:51: error: value foo is not a member of type parameter T
+ case Some(xs) => xs.foo // the error message should not refer to a skolem (testing extrapolation)
+ ^
+two errors found
diff --git a/test/files/neg/t5189b.scala b/test/files/neg/t5189b.scala
new file mode 100644
index 0000000..7c1871d
--- /dev/null
+++ b/test/files/neg/t5189b.scala
@@ -0,0 +1,80 @@
+class TestPos {
+ class AbsWrapperCov[+A]
+ case class Wrapper[B](x: B) extends AbsWrapperCov[B]
+
+ def unwrap[T](x: AbsWrapperCov[T]): T = x match {
+ case Wrapper/*[_ <: T ]*/(x) => x // _ <: T, which is a subtype of T
+ }
+
+ def unwrapOption[T](x: Option[T]): T = x match {
+ case Some(xs) => xs
+ }
+
+
+ case class Down[+T](x: T)
+ case class Up[-T](f: T => Unit)
+
+ def f1[T](x1: Down[T])(x2: Up[T]) = ((x1, x2)) match {
+ case (Down(x), Up(f)) => f(x)
+ }
+}
+
+
+object TestNeg extends App {
+ class AbsWrapperCov[+A]
+ case class Wrapper[B](x: Wrapped[B]) extends AbsWrapperCov[B]
+
+ /*
+ when inferring Wrapper's type parameter B from x's type AbsWrapperCov[T],
+ we must take into account that x's actual type is AbsWrapperCov[Tactual] forSome {type Tactual <: T}
+ as AbsWrapperCov is covariant in A -- in other words, we must not assume we know T exactly, all we know is its upper bound
+
+ since method application is the only way to generate this slack between run-time and compile-time types,
+ we'll simply replace the skolems that represent method type parameters as seen from the method's body by
+ other skolems that are (upper/lower)-bounded by the type-parameter skolems
+ (depending on whether the skolem appears in a covariant/contravariant position)
+ */
+ def unwrap[T](x: AbsWrapperCov[T]): Wrapped[T] = x match {
+ case Wrapper/*[_ <: T ]*/(wrapped) => wrapped // : Wrapped[_ <: T], which is a subtype of Wrapped[T] if and only if Wrapped is covariant in its type parameter
+ }
+
+ class Wrapped[W](var cell: W) // must be invariant (to trigger the bug)
+
+ // class A { def imNotAB = println("notB")}
+ // class B
+ //
+ // val w = new Wrapped(new A)
+ // unwrap[Any](Wrapper(w)).cell = new B
+ // w.cell.imNotAB
+
+ def unwrapOption[T](x: Option[T]): T = x match {
+ case Some(xs) => xs.foo // the error message should not refer to a skolem (testing extrapolation)
+ }
+
+}
+
+// class TestPos1 {
+// class Base[T]
+// case class C[T](x: T) extends Base[T]
+// def foo[T](b: Base[T]): T = b match { case C(x) => x }
+//
+// case class Span[K <: Ordered[K]](low: Option[K], high: Option[K]) extends Function1[K, Boolean] {
+// override def equals(x$1: Any): Boolean = x$1 match {
+// case Span((low$0 @ _), (high$0 @ _)) if low$0.equals(low).$amp$amp(high$0.equals(high)) => true
+// case _ => false
+// }
+// def apply(k: K): Boolean = this match {
+// case Span(Some(low), Some(high)) => (k >= low && k <= high)
+// case Span(Some(low), None) => (k >= low)
+// case Span(None, Some(high)) => (k <= high)
+// case _ => false
+// }
+// }
+// }
+//
+// class TestNeg1 {
+// case class Foo[T, U](f: T => U)
+// def f(x: Any): Any => Any = x match { case Foo(bar) => bar }
+// // uh-oh, Any => Any should be Nothing => Any.
+// }
+
diff --git a/test/files/neg/t520.check b/test/files/neg/t520.check
new file mode 100644
index 0000000..0035f89
--- /dev/null
+++ b/test/files/neg/t520.check
@@ -0,0 +1,4 @@
+t520.scala:8: error: overloaded method verifyKeyword needs result type
+ verifyKeyword("", source, pos);
+ ^
+one error found
diff --git a/test/files/neg/bug520.scala b/test/files/neg/t520.scala
similarity index 100%
rename from test/files/neg/bug520.scala
rename to test/files/neg/t520.scala
diff --git a/test/files/neg/t521.check b/test/files/neg/t521.check
new file mode 100644
index 0000000..a100195
--- /dev/null
+++ b/test/files/neg/t521.check
@@ -0,0 +1,15 @@
+t521.scala:10: error: class PlainFile needs to be abstract, since method path in class AbstractFile of type => String is not defined
+class PlainFile(val file : File) extends AbstractFile {}
+ ^
+t521.scala:13: error: overriding value file in class PlainFile of type java.io.File;
+ value file needs `override' modifier
+final class ZipArchive(val file : File, archive : ZipFile) extends PlainFile(file) {
+ ^
+t521.scala:13: error: class ZipArchive needs to be abstract, since method path in class AbstractFile of type => String is not defined
+final class ZipArchive(val file : File, archive : ZipFile) extends PlainFile(file) {
+ ^
+t521.scala:15: error: overriding value path in class VirtualFile of type String;
+ method path needs to be a stable, immutable value
+ override def path = "";
+ ^
+four errors found
diff --git a/test/files/neg/bug521.scala b/test/files/neg/t521.scala
similarity index 100%
rename from test/files/neg/bug521.scala
rename to test/files/neg/t521.scala
diff --git a/test/files/neg/t5318.check b/test/files/neg/t5318.check
new file mode 100644
index 0000000..d6a3a57
--- /dev/null
+++ b/test/files/neg/t5318.check
@@ -0,0 +1,5 @@
+t5318.scala:7: error: diverging implicit expansion for type CompilerHang.this.TC[F]
+starting with method tc in class CompilerHang
+ breakage // type checker doesn't terminate, should report inference failure
+ ^
+one error found
diff --git a/test/files/neg/t5318.scala b/test/files/neg/t5318.scala
new file mode 100644
index 0000000..8009c66
--- /dev/null
+++ b/test/files/neg/t5318.scala
@@ -0,0 +1,8 @@
+class CompilerHang {
+ trait TC[M[_]]
+ trait S[A]
+
+ implicit def tc[M[_]](implicit M0: TC[M]): TC[S] = null
+ def breakage[F[_] : TC] = 0
+ breakage // type checker doesn't terminate, should report inference failure
+}
\ No newline at end of file
diff --git a/test/files/neg/t5318b.check b/test/files/neg/t5318b.check
new file mode 100644
index 0000000..47a10d6
--- /dev/null
+++ b/test/files/neg/t5318b.check
@@ -0,0 +1,5 @@
+t5318b.scala:7: error: diverging implicit expansion for type DivergingImplicitReported.this.TC[F]
+starting with method tc in class DivergingImplicitReported
+ breakage // correct: diverging implicit expansion
+ ^
+one error found
\ No newline at end of file
diff --git a/test/files/neg/t5318b.scala b/test/files/neg/t5318b.scala
new file mode 100644
index 0000000..123f8b4
--- /dev/null
+++ b/test/files/neg/t5318b.scala
@@ -0,0 +1,8 @@
+class DivergingImplicitReported {
+ trait TC[M]
+ trait S
+
+ implicit def tc[M](implicit M0: TC[M]): TC[S] = null
+ def breakage[F: TC] = 0
+ breakage // correct: diverging implicit expansion
+}
\ No newline at end of file
diff --git a/test/files/neg/t5318c.check b/test/files/neg/t5318c.check
new file mode 100644
index 0000000..594539b
--- /dev/null
+++ b/test/files/neg/t5318c.check
@@ -0,0 +1,5 @@
+t5318c.scala:13: error: diverging implicit expansion for type CompilerHang.this.TC[F]
+starting with method tc in class CompilerHang
+ breakage // type checker doesn't terminate, should report inference failure
+ ^
+one error found
diff --git a/test/files/neg/t5318c.scala b/test/files/neg/t5318c.scala
new file mode 100644
index 0000000..477a987
--- /dev/null
+++ b/test/files/neg/t5318c.scala
@@ -0,0 +1,14 @@
+class CompilerHang {
+ trait TC[M[_]]
+ trait S[A]
+
+ class C[M[_]] {
+ type TCM = TC[M]
+ }
+
+ // A nefarious implicit, to motivate the removal of `&& sym.owner.isTerm` from
+ // `isFreeTypeParamNoSkolem`.
+ implicit def tc[x[_], CC[x[_]] <: C[x]](implicit M0: CC[x]#TCM): CC[x]#TCM = null
+ def breakage[F[_] : TC] = 0
+ breakage // type checker doesn't terminate, should report inference failure
+}
diff --git a/test/files/neg/t5340.check b/test/files/neg/t5340.check
new file mode 100644
index 0000000..2de1929
--- /dev/null
+++ b/test/files/neg/t5340.check
@@ -0,0 +1,6 @@
+t5340.scala:17: error: type mismatch;
+ found : MyApp.r.E
+ required: MyApp.s.E
+ println(b: s.E)
+ ^
+one error found
diff --git a/test/files/neg/t5340.scala b/test/files/neg/t5340.scala
new file mode 100644
index 0000000..b283f13
--- /dev/null
+++ b/test/files/neg/t5340.scala
@@ -0,0 +1,29 @@
+class Poly {
+ class E
+ object E {
+ implicit def conv(value: Any): E = sys.error("")
+ }
+}
+
+object MyApp {
+ val r: Poly = sys.error("")
+ val s: Poly = sys.error("")
+ val b: r.E = sys.error("")
+
+ // okay
+ s.E.conv(b): s.E
+
+ // compilation fails with error below
+ println(b: s.E)
+
+ // amb prefix: MyApp.s.type#class E MyApp.r.type#class E
+ // amb prefix: MyApp.s.type#class E MyApp.r.type#class E
+ // ../test/pending/run/t5310.scala:17: error: type mismatch;
+ // found : MyApp.r.E
+ // required: MyApp.s.E
+ // println(b: s.E)
+ // ^
+
+ // The type error is as expected, but the `amb prefix` should be logged,
+ // rather than printed to standard out.
+}
diff --git a/test/files/neg/t5352.check b/test/files/neg/t5352.check
new file mode 100644
index 0000000..d24b0e8
--- /dev/null
+++ b/test/files/neg/t5352.check
@@ -0,0 +1,13 @@
+t5352.scala:11: error: type mismatch;
+ found : boop.Bar
+ required: boop.BarF
+ (which expands to) AnyRef{def f(): Int}
+ x = xs.head
+ ^
+t5352.scala:14: error: method f in class Bar1 cannot be accessed in boop.Bar1
+ Access to protected method f not permitted because
+ enclosing object boop is not a subclass of
+ class Bar1 in object boop where target is defined
+ (new Bar1).f
+ ^
+two errors found
diff --git a/test/files/neg/caseinherit.flags b/test/files/neg/t5352.flags
similarity index 100%
copy from test/files/neg/caseinherit.flags
copy to test/files/neg/t5352.flags
diff --git a/test/files/neg/t5352.scala b/test/files/neg/t5352.scala
new file mode 100644
index 0000000..6ee41f5
--- /dev/null
+++ b/test/files/neg/t5352.scala
@@ -0,0 +1,15 @@
+object boop {
+ abstract class Bar { protected def f(): Any }
+ class Bar1 extends Bar { protected def f(): Int = 5 }
+ class Bar2 extends Bar { protected def f(): Int = 5 }
+
+ val xs = List(new Bar1, new Bar2)
+
+ type BarF = { def f(): Int }
+
+ var x: BarF = _
+ x = xs.head
+ x.f
+
+ (new Bar1).f
+}
diff --git a/test/files/neg/t5354.check b/test/files/neg/t5354.check
new file mode 100644
index 0000000..e47cecb
--- /dev/null
+++ b/test/files/neg/t5354.check
@@ -0,0 +1,7 @@
+t5354.scala:9: error: ambiguous implicit values:
+ both method x123 in package foo of type => foo.Bippy
+ and method z of type => foo.Bippy
+ match expected type foo.Bippy
+ implicitly[Bippy]
+ ^
+one error found
diff --git a/test/files/neg/t5354.scala b/test/files/neg/t5354.scala
new file mode 100644
index 0000000..99b5650
--- /dev/null
+++ b/test/files/neg/t5354.scala
@@ -0,0 +1,15 @@
+package object foo {
+ implicit def x123: Bippy = new Bippy("x")
+}
+package foo {
+ class Bippy(override val toString: String){ }
+ class Dingus {
+ def f1 = {
+ implicit def z: Bippy = new Bippy("z")
+ implicitly[Bippy]
+ }
+ }
+ object Test extends App {
+ println(new Dingus().f1)
+ }
+}
diff --git a/test/files/neg/t5357.check b/test/files/neg/t5357.check
new file mode 100644
index 0000000..3385559
--- /dev/null
+++ b/test/files/neg/t5357.check
@@ -0,0 +1,4 @@
+t5357.scala:5: error: Pattern variables must start with a lower-case letter. (SLS 8.1.1.)
+ case A: N => 1
+ ^
+one error found
diff --git a/test/files/neg/t5357.scala b/test/files/neg/t5357.scala
new file mode 100644
index 0000000..369a556
--- /dev/null
+++ b/test/files/neg/t5357.scala
@@ -0,0 +1,9 @@
+trait M
+
+case class N() extends M {
+ def mytest(x: M) = x match {
+ case A: N => 1
+ case _ => 0
+ }
+}
+
diff --git a/test/files/neg/t5358.check b/test/files/neg/t5358.check
new file mode 100644
index 0000000..59e83bb
--- /dev/null
+++ b/test/files/neg/t5358.check
@@ -0,0 +1,7 @@
+t5358.scala:3: error: class C inherits conflicting members:
+ method hi in trait A of type => String and
+ method hi in trait B of type => String
+(Note: this can be resolved by declaring an override in class C.)
+class C extends A with B
+ ^
+one error found
diff --git a/test/files/neg/t5358.scala b/test/files/neg/t5358.scala
new file mode 100644
index 0000000..13d827e
--- /dev/null
+++ b/test/files/neg/t5358.scala
@@ -0,0 +1,4 @@
+trait A { def hi = "A" }
+trait B { def hi = "B" }
+class C extends A with B
+
diff --git a/test/files/neg/t5361.check b/test/files/neg/t5361.check
new file mode 100644
index 0000000..d7fee87
--- /dev/null
+++ b/test/files/neg/t5361.check
@@ -0,0 +1,4 @@
+t5361.scala:2: error: only declarations allowed here
+ val x : { val self = this } = new { self => }
+ ^
+one error found
diff --git a/test/files/neg/t5361.scala b/test/files/neg/t5361.scala
new file mode 100644
index 0000000..1705c09
--- /dev/null
+++ b/test/files/neg/t5361.scala
@@ -0,0 +1,3 @@
+class A {
+ val x : { val self = this } = new { self => }
+}
diff --git a/test/files/neg/t5376.check b/test/files/neg/t5376.check
new file mode 100644
index 0000000..0376163
--- /dev/null
+++ b/test/files/neg/t5376.check
@@ -0,0 +1,11 @@
+t5376.scala:12: error: type mismatch;
+ found : String("a")
+ required: Int
+ "a": Int
+ ^
+t5376.scala:22: error: type mismatch;
+ found : String("a")
+ required: Int
+ "a": Int
+ ^
+two errors found
diff --git a/test/files/neg/t5376.scala b/test/files/neg/t5376.scala
new file mode 100644
index 0000000..8da3868
--- /dev/null
+++ b/test/files/neg/t5376.scala
@@ -0,0 +1,24 @@
+object Test {
+ object O1 { implicit def f(s: String): Int = 1 }
+ object O2 { implicit def f(s: String): Int = 2 }
+ object O3 { def f(s: String): Int = 3 }
+
+ // Import two implicits with the same name in the same scope.
+ def m1 = {
+ import O1._
+ import O2._
+
+ // Implicit usage compiles.
+ "a": Int
+ }
+
+ // Import one implict and one non-implicit method with the
+ // same name in the same scope.
+ def m2 = {
+ import O1._
+ import O3._
+
+ // Implicit usage compiles.
+ "a": Int
+ }
+}
\ No newline at end of file
diff --git a/test/files/neg/t5378.check b/test/files/neg/t5378.check
new file mode 100644
index 0000000..c146008
--- /dev/null
+++ b/test/files/neg/t5378.check
@@ -0,0 +1,31 @@
+t5378.scala:7: error: Type bound in structural refinement may not refer to an abstract type defined outside that refinement
+ def contains = new { def apply[T1 <: T](value: T1) = ??? }
+ ^
+t5378.scala:8: error: Type bound in structural refinement may not refer to an abstract type defined outside that refinement
+ def contains1 = new { def apply[T1 <: A1](value: T1) = ??? }
+ ^
+t5378.scala:9: error: Type bound in structural refinement may not refer to an abstract type defined outside that refinement
+ def contains2 = new { def apply[T1 <: A2](value: T1) = ??? }
+ ^
+t5378.scala:15: error: Type bound in structural refinement may not refer to an abstract type defined outside that refinement
+ new Bippy { def apply[T1 <: T](value: T1) = ??? }
+ ^
+t5378.scala:16: error: Type bound in structural refinement may not refer to an abstract type defined outside that refinement
+ new Bippy { def apply[T1 <: B1](value: T1) = ??? }
+ ^
+t5378.scala:17: error: Type bound in structural refinement may not refer to an abstract type defined outside that refinement
+ new Bippy { def apply[T1 <: B2](value: T1) = ??? }
+ ^
+t5378.scala:21: error: Type bound in structural refinement may not refer to an abstract type defined outside that refinement
+ def apply1[T1 <: B3](value: T1) = ???
+ ^
+t5378.scala:23: error: Parameter type in structural refinement may not refer to an abstract type defined outside that refinement
+ def apply3(value: B3) = ???
+ ^
+t5378.scala:28: error: Parameter type in structural refinement may not refer to an abstract type defined outside that refinement
+ def apply1(s: String)(x: Int)(value: T) = ???
+ ^
+t5378.scala:29: error: Type bound in structural refinement may not refer to an abstract type defined outside that refinement
+ def apply2[T1 <: T](s: String)(x: Int)(value: T1) = ???
+ ^
+10 errors found
diff --git a/test/files/neg/t5378.scala b/test/files/neg/t5378.scala
new file mode 100644
index 0000000..fa6afa0
--- /dev/null
+++ b/test/files/neg/t5378.scala
@@ -0,0 +1,54 @@
+import scala.language.reflectiveCalls
+
+class Coll[+T] {
+ type A1 <: T
+ type A2 <: A1
+
+ def contains = new { def apply[T1 <: T](value: T1) = ??? }
+ def contains1 = new { def apply[T1 <: A1](value: T1) = ??? }
+ def contains2 = new { def apply[T1 <: A2](value: T1) = ??? }
+ def contains3 = {
+ trait Bippy {
+ type B1 <: T
+ type B2 <: B1
+ }
+ new Bippy { def apply[T1 <: T](value: T1) = ??? }
+ new Bippy { def apply[T1 <: B1](value: T1) = ??? }
+ new Bippy { def apply[T1 <: B2](value: T1) = ??? }
+ new Bippy {
+ type B3 = B2
+ type B4 = List[B2]
+ def apply1[T1 <: B3](value: T1) = ???
+ def apply2[T1 <: B4](value: T1) = ???
+ def apply3(value: B3) = ???
+ def apply4(value: B4) = value.head
+ }
+ }
+ def contains4 = new {
+ def apply1(s: String)(x: Int)(value: T) = ???
+ def apply2[T1 <: T](s: String)(x: Int)(value: T1) = ???
+ }
+ def containsOk = {
+ trait Bippy {
+ type B1 <: AnyRef
+ type B2 <: B1
+ }
+ new Bippy { def apply[T1 <: AnyRef](value: T1) = ??? }
+ new Bippy { type B1 = String ; def apply[T1 <: B1](value: T1) = ??? }
+ new Bippy { type B2 = String ; def apply[T1 <: B2](value: T1) = ??? }
+ }
+}
+
+object Test {
+ def main(args: Array[String]): Unit = {
+ val xs = new Coll[List[String]]
+ val ys: Coll[Traversable[String]] = xs
+
+ println(ys contains Nil)
+ // java.lang.NoSuchMethodException: Coll$$anon$1.apply(scala.collection.Traversable)
+ // at java.lang.Class.getMethod(Class.java:1605)
+ // at Test$.reflMethod$Method1(a.scala:14)
+ // at Test$.main(a.scala:14)
+ // at Test.main(a.scala)
+ }
+}
diff --git a/test/files/neg/t5390.check b/test/files/neg/t5390.check
new file mode 100644
index 0000000..6a0129b
--- /dev/null
+++ b/test/files/neg/t5390.check
@@ -0,0 +1,4 @@
+t5390.scala:7: error: forward reference extends over definition of value b
+ val b = a.B("")
+ ^
+one error found
diff --git a/test/files/neg/t5390.scala b/test/files/neg/t5390.scala
new file mode 100644
index 0000000..dd628f8
--- /dev/null
+++ b/test/files/neg/t5390.scala
@@ -0,0 +1,10 @@
+class A {
+ object B { def apply(s: String) = 0}
+}
+
+object X {
+ def foo {
+ val b = a.B("")
+ val a = new A
+ }
+}
\ No newline at end of file
diff --git a/test/files/neg/t5390b.check b/test/files/neg/t5390b.check
new file mode 100644
index 0000000..cbf8faf
--- /dev/null
+++ b/test/files/neg/t5390b.check
@@ -0,0 +1,4 @@
+t5390b.scala:7: error: forward reference extends over definition of value b
+ val b = a.B("")
+ ^
+one error found
diff --git a/test/files/neg/t5390b.scala b/test/files/neg/t5390b.scala
new file mode 100644
index 0000000..c3373b8
--- /dev/null
+++ b/test/files/neg/t5390b.scala
@@ -0,0 +1,10 @@
+class A {
+ case class B(s: String)
+}
+
+object X {
+ def foo {
+ val b = a.B("")
+ val a = new A
+ }
+}
\ No newline at end of file
diff --git a/test/files/neg/t5390c.check b/test/files/neg/t5390c.check
new file mode 100644
index 0000000..f8a794d
--- /dev/null
+++ b/test/files/neg/t5390c.check
@@ -0,0 +1,4 @@
+t5390c.scala:7: error: forward reference extends over definition of value b
+ val b = new a.B("")
+ ^
+one error found
diff --git a/test/files/neg/t5390c.scala b/test/files/neg/t5390c.scala
new file mode 100644
index 0000000..6b11576
--- /dev/null
+++ b/test/files/neg/t5390c.scala
@@ -0,0 +1,10 @@
+class A {
+ case class B(s: String)
+}
+
+object X {
+ def foo {
+ val b = new a.B("")
+ val a = new A
+ }
+}
\ No newline at end of file
diff --git a/test/files/neg/t5390d.check b/test/files/neg/t5390d.check
new file mode 100644
index 0000000..daa2914
--- /dev/null
+++ b/test/files/neg/t5390d.check
@@ -0,0 +1,4 @@
+t5390d.scala:7: error: forward reference extends over definition of value b
+ val b = a.B.toString
+ ^
+one error found
diff --git a/test/files/neg/t5390d.scala b/test/files/neg/t5390d.scala
new file mode 100644
index 0000000..7a2671b
--- /dev/null
+++ b/test/files/neg/t5390d.scala
@@ -0,0 +1,10 @@
+class A {
+ case class B(s: String)
+}
+
+object X {
+ def foo {
+ val b = a.B.toString
+ val a = new A
+ }
+}
\ No newline at end of file
diff --git a/test/files/neg/t5426.check b/test/files/neg/t5426.check
new file mode 100644
index 0000000..d9e192d
--- /dev/null
+++ b/test/files/neg/t5426.check
@@ -0,0 +1,13 @@
+t5426.scala:2: error: comparing values of types Some[Int] and Int using `==' will always yield false
+ def f1 = Some(5) == 5
+ ^
+t5426.scala:3: error: comparing values of types Int and Some[Int] using `==' will always yield false
+ def f2 = 5 == Some(5)
+ ^
+t5426.scala:8: error: comparing values of types Int and Some[Int] using `==' will always yield false
+ (x1 == x2)
+ ^
+t5426.scala:9: error: comparing values of types Some[Int] and Int using `==' will always yield false
+ (x2 == x1)
+ ^
+four errors found
diff --git a/test/files/neg/caseinherit.flags b/test/files/neg/t5426.flags
similarity index 100%
copy from test/files/neg/caseinherit.flags
copy to test/files/neg/t5426.flags
diff --git a/test/files/neg/t5426.scala b/test/files/neg/t5426.scala
new file mode 100644
index 0000000..f2fb5cc
--- /dev/null
+++ b/test/files/neg/t5426.scala
@@ -0,0 +1,10 @@
+class A {
+ def f1 = Some(5) == 5
+ def f2 = 5 == Some(5)
+
+ val x1 = 5
+ val x2 = Some(5)
+
+ (x1 == x2)
+ (x2 == x1)
+}
diff --git a/test/files/neg/t5429.check b/test/files/neg/t5429.check
new file mode 100644
index 0000000..4350696
--- /dev/null
+++ b/test/files/neg/t5429.check
@@ -0,0 +1,142 @@
+t5429.scala:20: error: overriding value value in class A of type Int;
+ object value needs `override' modifier
+ object value // fail
+ ^
+t5429.scala:21: error: overriding lazy value lazyvalue in class A of type Int;
+ object lazyvalue needs `override' modifier
+ object lazyvalue // fail
+ ^
+t5429.scala:22: error: overriding method nullary in class A of type => Int;
+ object nullary needs `override' modifier
+ object nullary // fail
+ ^
+t5429.scala:23: error: overriding method emptyArg in class A of type ()Int;
+ object emptyArg needs `override' modifier
+ object emptyArg // fail
+ ^
+t5429.scala:27: error: overriding value value in class A0 of type Any;
+ object value needs `override' modifier
+ object value // fail
+ ^
+t5429.scala:28: error: overriding lazy value lazyvalue in class A0 of type Any;
+ object lazyvalue needs `override' modifier
+ object lazyvalue // fail
+ ^
+t5429.scala:29: error: overriding method nullary in class A0 of type => Any;
+ object nullary needs `override' modifier
+ object nullary // fail
+ ^
+t5429.scala:30: error: overriding method emptyArg in class A0 of type ()Any;
+ object emptyArg needs `override' modifier
+ object emptyArg // fail
+ ^
+t5429.scala:35: error: overriding value value in class A of type Int;
+ object value has incompatible type
+ override object value // fail
+ ^
+t5429.scala:36: error: overriding lazy value lazyvalue in class A of type Int;
+ object lazyvalue must be declared lazy to override a concrete lazy value
+ override object lazyvalue // fail
+ ^
+t5429.scala:37: error: overriding method nullary in class A of type => Int;
+ object nullary has incompatible type
+ override object nullary // fail
+ ^
+t5429.scala:38: error: overriding method emptyArg in class A of type ()Int;
+ object emptyArg has incompatible type
+ override object emptyArg // fail
+ ^
+t5429.scala:39: error: object oneArg overrides nothing.
+Note: the super classes of class C contain the following, non final members named oneArg:
+def oneArg(x: String): Int
+ override object oneArg // fail
+ ^
+t5429.scala:43: error: overriding lazy value lazyvalue in class A0 of type Any;
+ object lazyvalue must be declared lazy to override a concrete lazy value
+ override object lazyvalue // !!! this fails, but should succeed (lazy over lazy)
+ ^
+t5429.scala:46: error: object oneArg overrides nothing.
+Note: the super classes of class C0 contain the following, non final members named oneArg:
+def oneArg(x: String): Any
+ override object oneArg // fail
+ ^
+t5429.scala:50: error: overriding value value in class A of type Int;
+ value value needs `override' modifier
+ val value = 0 // fail
+ ^
+t5429.scala:51: error: overriding lazy value lazyvalue in class A of type Int;
+ value lazyvalue needs `override' modifier
+ val lazyvalue = 0 // fail
+ ^
+t5429.scala:52: error: overriding method nullary in class A of type => Int;
+ value nullary needs `override' modifier
+ val nullary = 5 // fail
+ ^
+t5429.scala:53: error: overriding method emptyArg in class A of type ()Int;
+ value emptyArg needs `override' modifier
+ val emptyArg = 10 // fail
+ ^
+t5429.scala:58: error: overriding lazy value lazyvalue in class A0 of type Any;
+ value lazyvalue must be declared lazy to override a concrete lazy value
+ override val lazyvalue = 0 // fail (non-lazy)
+ ^
+t5429.scala:61: error: value oneArg overrides nothing.
+Note: the super classes of class D0 contain the following, non final members named oneArg:
+def oneArg(x: String): Any
+ override val oneArg = 15 // fail
+ ^
+t5429.scala:65: error: overriding value value in class A of type Int;
+ method value needs `override' modifier
+ def value = 0 // fail
+ ^
+t5429.scala:66: error: overriding lazy value lazyvalue in class A of type Int;
+ method lazyvalue needs `override' modifier
+ def lazyvalue = 2 // fail
+ ^
+t5429.scala:67: error: overriding method nullary in class A of type => Int;
+ method nullary needs `override' modifier
+ def nullary = 5 // fail
+ ^
+t5429.scala:68: error: overriding method emptyArg in class A of type ()Int;
+ method emptyArg needs `override' modifier
+ def emptyArg = 10 // fail
+ ^
+t5429.scala:72: error: overriding value value in class A0 of type Any;
+ method value needs to be a stable, immutable value
+ override def value = 0 // fail
+ ^
+t5429.scala:73: error: overriding lazy value lazyvalue in class A0 of type Any;
+ method lazyvalue needs to be a stable, immutable value
+ override def lazyvalue = 2 // fail
+ ^
+t5429.scala:76: error: method oneArg overrides nothing.
+Note: the super classes of class E0 contain the following, non final members named oneArg:
+def oneArg(x: String): Any
+ override def oneArg = 15 // fail
+ ^
+t5429.scala:80: error: overriding value value in class A of type Int;
+ lazy value value needs `override' modifier
+ lazy val value = 0 // fail
+ ^
+t5429.scala:81: error: overriding lazy value lazyvalue in class A of type Int;
+ lazy value lazyvalue needs `override' modifier
+ lazy val lazyvalue = 2 // fail
+ ^
+t5429.scala:82: error: overriding method nullary in class A of type => Int;
+ lazy value nullary needs `override' modifier
+ lazy val nullary = 5 // fail
+ ^
+t5429.scala:83: error: overriding method emptyArg in class A of type ()Int;
+ lazy value emptyArg needs `override' modifier
+ lazy val emptyArg = 10 // fail
+ ^
+t5429.scala:87: error: overriding value value in class A0 of type Any;
+ lazy value value cannot override a concrete non-lazy value
+ override lazy val value = 0 // fail (strict over lazy)
+ ^
+t5429.scala:91: error: value oneArg overrides nothing.
+Note: the super classes of class F0 contain the following, non final members named oneArg:
+def oneArg(x: String): Any
+ override lazy val oneArg = 15 // fail
+ ^
+34 errors found
diff --git a/test/files/neg/t5429.scala b/test/files/neg/t5429.scala
new file mode 100644
index 0000000..1cd4dcd
--- /dev/null
+++ b/test/files/neg/t5429.scala
@@ -0,0 +1,93 @@
+// /scala/trac/5429/a.scala
+// Wed Feb 1 08:05:27 PST 2012
+
+class A {
+ val value = 0
+ lazy val lazyvalue = 2
+ def nullary = 5
+ def emptyArg() = 10
+ def oneArg(x: String) = 15
+}
+class A0 {
+ val value: Any = 0
+ lazy val lazyvalue: Any = 2
+ def nullary: Any = 5
+ def emptyArg(): Any = 10
+ def oneArg(x: String): Any = 15
+}
+
+class B extends A {
+ object value // fail
+ object lazyvalue // fail
+ object nullary // fail
+ object emptyArg // fail
+ object oneArg // overload
+}
+class B0 extends A0 {
+ object value // fail
+ object lazyvalue // fail
+ object nullary // fail
+ object emptyArg // fail
+ object oneArg // overload
+}
+
+class C extends A {
+ override object value // fail
+ override object lazyvalue // fail
+ override object nullary // fail
+ override object emptyArg // fail
+ override object oneArg // fail
+}
+class C0 extends A0 {
+ override object value // !!! this succeeds, but should fail (lazy over strict)
+ override object lazyvalue // !!! this fails, but should succeed (lazy over lazy)
+ override object nullary // override
+ override object emptyArg // override
+ override object oneArg // fail
+}
+
+class D extends A {
+ val value = 0 // fail
+ val lazyvalue = 0 // fail
+ val nullary = 5 // fail
+ val emptyArg = 10 // fail
+ val oneArg = 15 // overload
+}
+class D0 extends A0 {
+ override val value = 0 // override
+ override val lazyvalue = 0 // fail (non-lazy)
+ override val nullary = 5 // override
+ override val emptyArg = 10 // override
+ override val oneArg = 15 // fail
+}
+
+class E extends A {
+ def value = 0 // fail
+ def lazyvalue = 2 // fail
+ def nullary = 5 // fail
+ def emptyArg = 10 // fail
+ def oneArg = 15 // overload
+}
+class E0 extends A0 {
+ override def value = 0 // fail
+ override def lazyvalue = 2 // fail
+ override def nullary = 5 // override
+ override def emptyArg = 10 // override
+ override def oneArg = 15 // fail
+}
+
+class F extends A {
+ lazy val value = 0 // fail
+ lazy val lazyvalue = 2 // fail
+ lazy val nullary = 5 // fail
+ lazy val emptyArg = 10 // fail
+ lazy val oneArg = 15 // overload
+}
+class F0 extends A0 {
+ override lazy val value = 0 // fail (strict over lazy)
+ override lazy val lazyvalue = 2 // override (lazy over lazy)
+ override lazy val nullary = 5 // override
+ override lazy val emptyArg = 10 // override
+ override lazy val oneArg = 15 // fail
+}
+
diff --git a/test/files/neg/t5440.check b/test/files/neg/t5440.check
new file mode 100644
index 0000000..a862350
--- /dev/null
+++ b/test/files/neg/t5440.check
@@ -0,0 +1,5 @@
+t5440.scala:3: error: match may not be exhaustive.
+It would fail on the following inputs: (List(_), Nil), (Nil, List(_))
+ (list1, list2) match {
+ ^
+one error found
diff --git a/test/files/neg/caseinherit.flags b/test/files/neg/t5440.flags
similarity index 100%
copy from test/files/neg/caseinherit.flags
copy to test/files/neg/t5440.flags
diff --git a/test/files/neg/t5440.scala b/test/files/neg/t5440.scala
new file mode 100644
index 0000000..d9cf5d6
--- /dev/null
+++ b/test/files/neg/t5440.scala
@@ -0,0 +1,7 @@
+object Test {
+ def merge(list1: List[Long], list2: List[Long]): Boolean =
+ (list1, list2) match {
+ case (hd1::_, hd2::_) => true
+ case (Nil, Nil) => true
+ }
+}
\ No newline at end of file
diff --git a/test/files/neg/t545.check b/test/files/neg/t545.check
new file mode 100644
index 0000000..8ebbf9b
--- /dev/null
+++ b/test/files/neg/t545.check
@@ -0,0 +1,7 @@
+t545.scala:4: error: value blah is not a member of Test.Foo
+ val x = foo.blah match {
+ ^
+t545.scala:5: error: recursive value x needs type
+ case List(x) => x
+ ^
+two errors found
diff --git a/test/files/neg/bug545.scala b/test/files/neg/t545.scala
similarity index 100%
rename from test/files/neg/bug545.scala
rename to test/files/neg/t545.scala
diff --git a/test/files/neg/t5452-new.check b/test/files/neg/t5452-new.check
new file mode 100644
index 0000000..1850a70
--- /dev/null
+++ b/test/files/neg/t5452-new.check
@@ -0,0 +1,8 @@
+t5452-new.scala:30: error: overloaded method value apply with alternatives:
+ ()Queryable[CoffeesTable] <and>
+ (t: Tree)(implicit evidence$2: scala.reflect.ClassTag[CoffeesTable])Nothing <and>
+ (implicit evidence$1: scala.reflect.ClassTag[CoffeesTable])Nothing
+ cannot be applied to (Queryable[CoffeesTable])
+ Queryable[CoffeesTable]( q.treeFilter(null) )
+ ^
+one error found
diff --git a/test/files/neg/t5452-new.scala b/test/files/neg/t5452-new.scala
new file mode 100644
index 0000000..b74b155
--- /dev/null
+++ b/test/files/neg/t5452-new.scala
@@ -0,0 +1,31 @@
+import scala.reflect.{ClassTag, classTag}
+
+// /scala/trac/5452/a.scala
+// Mon Feb 13 22:52:36 PST 2012
+
+// import scala.reflect.runtime.universe._
+
+trait Tree
+
+object Bip {
+ def ??? = sys.error("")
+}
+import Bip._
+
+case class Queryable[T]() {
+ def treeFilter( t:Tree ) : Queryable[T] = ???
+}
+
+object Queryable {
+ def apply[T:ClassTag] = ???
+ def apply[T:ClassTag]( t:Tree ) = ???
+}
+
+trait CoffeesTable{
+ def sales : Int
+}
+
+object Test extends App{
+ val q = new Queryable[CoffeesTable]
+ Queryable[CoffeesTable]( q.treeFilter(null) )
+}
\ No newline at end of file
diff --git a/test/files/neg/t5452-old.check b/test/files/neg/t5452-old.check
new file mode 100644
index 0000000..1860c98
--- /dev/null
+++ b/test/files/neg/t5452-old.check
@@ -0,0 +1,8 @@
+t5452-old.scala:28: error: overloaded method value apply with alternatives:
+ ()Queryable[CoffeesTable] <and>
+ (t: Tree)(implicit evidence$2: Manifest[CoffeesTable])Nothing <and>
+ (implicit evidence$1: Manifest[CoffeesTable])Nothing
+ cannot be applied to (Queryable[CoffeesTable])
+ Queryable[CoffeesTable]( q.treeFilter(null) )
+ ^
+one error found
diff --git a/test/files/neg/t5452-old.scala b/test/files/neg/t5452-old.scala
new file mode 100644
index 0000000..4f6dcbb
--- /dev/null
+++ b/test/files/neg/t5452-old.scala
@@ -0,0 +1,29 @@
+// /scala/trac/5452/a.scala
+// Mon Feb 13 22:52:36 PST 2012
+
+// import scala.reflect.runtime.universe._
+
+trait Tree
+
+object Bip {
+ def ??? = sys.error("")
+}
+import Bip._
+
+case class Queryable[T]() {
+ def treeFilter( t:Tree ) : Queryable[T] = ???
+}
+
+object Queryable {
+ def apply[T:Manifest] = ???
+ def apply[T:Manifest]( t:Tree ) = ???
+}
+
+trait CoffeesTable{
+ def sales : Int
+}
+
+object Test extends App{
+ val q = new Queryable[CoffeesTable]
+ Queryable[CoffeesTable]( q.treeFilter(null) )
+}
diff --git a/test/files/neg/t5455.check b/test/files/neg/t5455.check
new file mode 100644
index 0000000..788daf9
--- /dev/null
+++ b/test/files/neg/t5455.check
@@ -0,0 +1,4 @@
+t5455.scala:4: error: lazy vals are not tailcall transformed
+ @annotation.tailrec final lazy val bar: Thing[Int] = {
+ ^
+one error found
diff --git a/test/files/neg/t5455.scala b/test/files/neg/t5455.scala
new file mode 100644
index 0000000..22d6c44
--- /dev/null
+++ b/test/files/neg/t5455.scala
@@ -0,0 +1,16 @@
+trait Test {
+ def root: Test
+
+ @annotation.tailrec final lazy val bar: Thing[Int] = {
+ if (this eq root)
+ Thing(() => System.identityHashCode(bar))
+ else
+ root.bar
+ }
+
+ def f = bar.f()
+}
+
+case class Thing[A](f: () => A) {
+ override def toString = "" + f()
+}
diff --git a/test/files/neg/t5493.check b/test/files/neg/t5493.check
new file mode 100644
index 0000000..78b1536
--- /dev/null
+++ b/test/files/neg/t5493.check
@@ -0,0 +1,4 @@
+t5493.scala:2: error: not found: value iDontExist
+ def meh(xs: Any): Any = xs :: iDontExist :: Nil
+ ^
+one error found
diff --git a/test/files/neg/t5493.scala b/test/files/neg/t5493.scala
new file mode 100644
index 0000000..459cf53
--- /dev/null
+++ b/test/files/neg/t5493.scala
@@ -0,0 +1,3 @@
+object Test {
+ def meh(xs: Any): Any = xs :: iDontExist :: Nil
+}
diff --git a/test/files/neg/t5497.check b/test/files/neg/t5497.check
new file mode 100644
index 0000000..fef6d38
--- /dev/null
+++ b/test/files/neg/t5497.check
@@ -0,0 +1,4 @@
+t5497.scala:3: error: not found: value sq
+ case other => println(null.asInstanceOf[sq.Filter].tableName)
+ ^
+one error found
diff --git a/test/files/neg/t5497.scala b/test/files/neg/t5497.scala
new file mode 100644
index 0000000..40d47de
--- /dev/null
+++ b/test/files/neg/t5497.scala
@@ -0,0 +1,5 @@
+object TestQueryable extends App{
+ ({
+ case other => println(null.asInstanceOf[sq.Filter].tableName)
+ } : Any => Unit)(null)
+}
diff --git a/test/files/neg/t550.check b/test/files/neg/t550.check
new file mode 100644
index 0000000..da862e1
--- /dev/null
+++ b/test/files/neg/t550.check
@@ -0,0 +1,7 @@
+t550.scala:6: error: type List takes type parameters
+ def sum[a](xs: List)(implicit m: Monoid[a]): a =
+ ^
+t550.scala:8: error: could not find implicit value for parameter m: Monoid[a]
+ sum(List(1,2,3))
+ ^
+two errors found
diff --git a/test/files/neg/bug550.scala b/test/files/neg/t550.scala
similarity index 100%
rename from test/files/neg/bug550.scala
rename to test/files/neg/t550.scala
diff --git a/test/files/neg/t5510.check b/test/files/neg/t5510.check
new file mode 100644
index 0000000..322a2f5
--- /dev/null
+++ b/test/files/neg/t5510.check
@@ -0,0 +1,19 @@
+t5510.scala:2: error: unclosed string literal
+ val s1 = s"xxx
+ ^
+t5510.scala:3: error: unclosed string literal
+ val s2 = s"xxx $x
+ ^
+t5510.scala:4: error: unclosed string literal
+ val s3 = s"xxx $$
+ ^
+t5510.scala:5: error: unclosed string literal
+ val s4 = ""s"
+ ^
+t5510.scala:6: error: unclosed multi-line string literal
+ val s5 = ""s""" $s1 $s2 s"
+ ^
+t5510.scala:7: error: unclosed multi-line string literal
+}
+ ^
+6 errors found
diff --git a/test/files/neg/t5510.scala b/test/files/neg/t5510.scala
new file mode 100644
index 0000000..12630eb
--- /dev/null
+++ b/test/files/neg/t5510.scala
@@ -0,0 +1,7 @@
+object Test {
+ val s1 = s"xxx
+ val s2 = s"xxx $x
+ val s3 = s"xxx $$
+ val s4 = ""s"
+ val s5 = ""s""" $s1 $s2 s"
+}
diff --git a/test/files/neg/t5529.check b/test/files/neg/t5529.check
new file mode 100644
index 0000000..5d2175f
--- /dev/null
+++ b/test/files/neg/t5529.check
@@ -0,0 +1,10 @@
+t5529.scala:12: error: File is already defined as class File
+ type File
+ ^
+t5529.scala:10: error: class type required but test.Test.File found
+ sealed class Dir extends File { }
+ ^
+t5529.scala:10: error: test.Test.File does not have a constructor
+ sealed class Dir extends File { }
+ ^
+three errors found
diff --git a/test/files/neg/t5529.scala b/test/files/neg/t5529.scala
new file mode 100644
index 0000000..033009a
--- /dev/null
+++ b/test/files/neg/t5529.scala
@@ -0,0 +1,13 @@
+// /scala/trac/5529/a.scala
+// Tue Feb 28 13:11:28 PST 2012
+
+package test;
+
+object Test {
+ sealed class File {
+ val i = 1
+ }
+ sealed class Dir extends File { }
+
+ type File
+}
diff --git a/test/files/neg/t5543.check b/test/files/neg/t5543.check
new file mode 100644
index 0000000..b61de0f
--- /dev/null
+++ b/test/files/neg/t5543.check
@@ -0,0 +1,10 @@
+t5543.scala:3: error: not found: type T
+ def this(x: T) { this() }
+ ^
+t5543.scala:11: error: not found: value x
+ def this(a: Int, b: Int = x) {
+ ^
+t5543.scala:18: error: not found: value x
+ def this(a: Int = x) { this() }
+ ^
+three errors found
diff --git a/test/files/neg/t5543.scala b/test/files/neg/t5543.scala
new file mode 100644
index 0000000..4e03e6e
--- /dev/null
+++ b/test/files/neg/t5543.scala
@@ -0,0 +1,19 @@
+class C1 {
+ type T
+ def this(x: T) { this() }
+}
+
+class C1a[T] {
+ def this(x: T) { this() } // works, no error here
+}
+
+class C2(x: Int) {
+ def this(a: Int, b: Int = x) {
+ this(b)
+ }
+}
+
+class C3 {
+ val x = 0
+ def this(a: Int = x) { this() }
+}
diff --git a/test/files/neg/t5544.check b/test/files/neg/t5544.check
new file mode 100644
index 0000000..d411393
--- /dev/null
+++ b/test/files/neg/t5544.check
@@ -0,0 +1,4 @@
+Test_2.scala:2: error: value baz is not a member of object Api
+ Api.baz
+ ^
+one error found
diff --git a/test/files/neg/t5544/Api_1.scala b/test/files/neg/t5544/Api_1.scala
new file mode 100644
index 0000000..77637f4
--- /dev/null
+++ b/test/files/neg/t5544/Api_1.scala
@@ -0,0 +1,8 @@
+import scala.annotation.StaticAnnotation
+
+class ann(val bar: Any) extends StaticAnnotation
+
+object Api {
+ @ann({def baz = "baz!!"})
+ def foo = println("foo")
+}
diff --git a/test/files/neg/t5544/Test_2.scala b/test/files/neg/t5544/Test_2.scala
new file mode 100644
index 0000000..4c8c99c
--- /dev/null
+++ b/test/files/neg/t5544/Test_2.scala
@@ -0,0 +1,3 @@
+object Test extends App {
+ Api.baz
+}
diff --git a/test/files/neg/t5553_1.check b/test/files/neg/t5553_1.check
new file mode 100644
index 0000000..afd6489
--- /dev/null
+++ b/test/files/neg/t5553_1.check
@@ -0,0 +1,54 @@
+t5553_1.scala:18: error: ambiguous reference to overloaded definition,
+both method apply in object Foo1 of type (z: String)Base[T]
+and method apply in object Foo1 of type (a: Int)Base[T]
+match expected type ?
+ def test1[T] = Foo1[T]
+ ^
+t5553_1.scala:19: error: type mismatch;
+ found : [T](z: String)Base[T] <and> (a: Int)Base[T]
+ required: Int
+ def test2[T]: Int = Foo1[T]
+ ^
+t5553_1.scala:20: error: type mismatch;
+ found : [T(in method apply)](z: String)Base[T(in method apply)] <and> (a: Int)Base[T(in method apply)]
+ required: Base[T(in method test3)]
+ def test3[T]: Base[T] = Foo1[T]
+ ^
+t5553_1.scala:24: error: ambiguous reference to overloaded definition,
+both method apply in object Foo2 of type (z: String)Base[T]
+and method apply in object Foo2 of type (a: Int)Base[T]
+match expected type ?
+ def test4[T] = Foo2[T]
+ ^
+t5553_1.scala:25: error: type mismatch;
+ found : [T](z: String)Base[T] <and> (a: Int)Base[T]
+ required: Int
+ def test5[T]: Int = Foo2[T]
+ ^
+t5553_1.scala:26: error: type mismatch;
+ found : [T(in method apply)](z: String)Base[T(in method apply)] <and> (a: Int)Base[T(in method apply)]
+ required: Base[T(in method test6)]
+ def test6[T]: Base[T] = Foo2[T]
+ ^
+t5553_1.scala:30: error: ambiguous reference to overloaded definition,
+both method apply in object Foo3 of type (z: String)String
+and method apply in object Foo3 of type (a: Int)Base[T]
+match expected type ?
+ def test7[T] = Foo3[T]
+ ^
+t5553_1.scala:31: error: type mismatch;
+ found : [T](z: String)String <and> (a: Int)Base[T]
+ required: String
+ def test8[T]: String = Foo3[T]
+ ^
+t5553_1.scala:32: error: type mismatch;
+ found : [T](z: String)String <and> (a: Int)Base[T]
+ required: Int
+ def test9[T]: Int = Foo3[T]
+ ^
+t5553_1.scala:33: error: type mismatch;
+ found : [T(in method apply)](z: String)String <and> (a: Int)Base[T(in method apply)]
+ required: Base[T(in method test10)]
+ def test10[T]: Base[T] = Foo3[T]
+ ^
+10 errors found
diff --git a/test/files/neg/t5553_1.scala b/test/files/neg/t5553_1.scala
new file mode 100644
index 0000000..32d61ec
--- /dev/null
+++ b/test/files/neg/t5553_1.scala
@@ -0,0 +1,34 @@
+class Base[T]
+
+object Foo1 {
+ def apply[T](a: Int): Base[T] = new Base[T]
+ def apply[T](z: String): Base[T] = new Base[T]
+}
+
+object Foo2 {
+ def apply[T](a: Int): Base[T] = new Base[T]
+ def apply[T](z: String="abc"): Base[T] = new Base[T]
+}
+
+object Foo3 {
+ def apply[T](a: Int): Base[T] = new Base[T]
+ def apply[T](z: String="abc"): String = z
+}
+object Test {
+ def test1[T] = Foo1[T]
+ def test2[T]: Int = Foo1[T]
+ def test3[T]: Base[T] = Foo1[T]
+}
+
+object Test2 {
+ def test4[T] = Foo2[T]
+ def test5[T]: Int = Foo2[T]
+ def test6[T]: Base[T] = Foo2[T]
+}
+
+object Test3{
+ def test7[T] = Foo3[T]
+ def test8[T]: String = Foo3[T]
+ def test9[T]: Int = Foo3[T]
+ def test10[T]: Base[T] = Foo3[T]
+}
diff --git a/test/files/neg/t5553_2.check b/test/files/neg/t5553_2.check
new file mode 100644
index 0000000..599fdb0
--- /dev/null
+++ b/test/files/neg/t5553_2.check
@@ -0,0 +1,50 @@
+t5553_2.scala:27: error: type mismatch;
+ found : Base[T]
+ required: Int
+ def test4[T]: Int = Foo1[T](1)
+ ^
+t5553_2.scala:34: error: type mismatch;
+ found : String
+ required: Base[T]
+ def test7[T]: Base[T] = Foo2[T]
+ ^
+t5553_2.scala:35: error: type mismatch;
+ found : String
+ required: Int
+ def test8[T]: Int = Foo2[T]
+ ^
+t5553_2.scala:40: error: type mismatch;
+ found : String
+ required: Int
+ def test9[T]: Int = Foo3[T]
+ ^
+t5553_2.scala:41: error: type mismatch;
+ found : String
+ required: Base[T]
+ def test10[T]: Base[T] = Foo3[T]
+ ^
+t5553_2.scala:47: error: could not find implicit value for parameter z: String
+ def test13[T]: Int = Foo3[T]
+ ^
+t5553_2.scala:48: error: could not find implicit value for parameter z: String
+ def test14[T]: Base[T] = Foo3[T]
+ ^
+t5553_2.scala:49: error: could not find implicit value for parameter z: String
+ def test15[T]: String = Foo3[T]
+ ^
+t5553_2.scala:50: error: could not find implicit value for parameter z: String
+ def test16[T] = Foo3[T]
+ ^
+t5553_2.scala:54: error: ambiguous reference to overloaded definition,
+both method apply in object Foo4 of type (x: Int)(implicit z: String)Base[T]
+and method apply in object Foo4 of type (x: Int)Base[T]
+match argument types (Int)
+ def test17[T] = Foo4[T](1)
+ ^
+t5553_2.scala:55: error: ambiguous reference to overloaded definition,
+both method apply in object Foo4 of type (x: Int)(implicit z: String)Base[T]
+and method apply in object Foo4 of type (x: Int)Base[T]
+match argument types (Int) and expected result type Base[T]
+ def test18[T]: Base[T] = Foo4[T](1)
+ ^
+11 errors found
diff --git a/test/files/neg/t5553_2.scala b/test/files/neg/t5553_2.scala
new file mode 100644
index 0000000..16958ae
--- /dev/null
+++ b/test/files/neg/t5553_2.scala
@@ -0,0 +1,59 @@
+class Base[T]
+
+object Foo1 {
+ def apply[T](x: Int): Base[T] = new Base[T]
+ def apply[T](x: Int, z: String="abc"): String = z
+}
+
+object Foo2 {
+ def apply[T](a: Int): Base[T] = new Base[T]
+ def apply[T]: String = "abc"
+}
+
+object Foo3 {
+ def apply[T](x: Int): Base[T] = new Base[T]
+ def apply[T](implicit z: String): String = z
+}
+
+object Foo4 {
+ def apply[T](x: Int): Base[T] = new Base[T]
+ def apply[T](x: Int)(implicit z: String): Base[T] = new Base[T]
+}
+
+object Test1 {
+ def test1[T] = Foo1[T](1)
+ def test2[T]: String = Foo1[T](1)
+ def test3[T]: Base[T] = Foo1[T](1)
+ def test4[T]: Int = Foo1[T](1)
+
+}
+
+object Test2 {
+ def test5[T] = Foo2[T]
+ def test6[T]: String = Foo2[T]
+ def test7[T]: Base[T] = Foo2[T]
+ def test8[T]: Int = Foo2[T]
+}
+
+object Test3 {
+ implicit val v: String = "abc"
+ def test9[T]: Int = Foo3[T]
+ def test10[T]: Base[T] = Foo3[T]
+ def test11[T]: String = Foo3[T]
+ def test12[T] = Foo3[T]
+}
+
+object Test4 {
+ def test13[T]: Int = Foo3[T]
+ def test14[T]: Base[T] = Foo3[T]
+ def test15[T]: String = Foo3[T]
+ def test16[T] = Foo3[T]
+}
+
+object Test5 {
+ def test17[T] = Foo4[T](1)
+ def test18[T]: Base[T] = Foo4[T](1)
+ //def test19[T]: String = Foo4[T](1) // #5554
+}
+
+
diff --git a/test/files/neg/t5554.check b/test/files/neg/t5554.check
new file mode 100644
index 0000000..8f657fd
--- /dev/null
+++ b/test/files/neg/t5554.check
@@ -0,0 +1,67 @@
+t5554.scala:14: error: ambiguous reference to overloaded definition,
+both method apply in object Foo1 of type (x: Int)(implicit z: String)String
+and method apply in object Foo1 of type (x: Int)Base[T]
+match argument types (Int)
+ def test1[T]: Int = Foo1[T](1)
+ ^
+t5554.scala:16: error: ambiguous reference to overloaded definition,
+both method apply in object Foo1 of type (x: Int)(implicit z: String)String
+and method apply in object Foo1 of type (x: Int)Base[T]
+match argument types (Int)
+ def test3[T]: String = Foo1[T](1)
+ ^
+t5554.scala:17: error: ambiguous reference to overloaded definition,
+both method apply in object Foo1 of type (x: Int)(implicit z: String)String
+and method apply in object Foo1 of type (x: Int)Base[T]
+match argument types (Int)
+ def test4[T] = Foo1[T](1)
+ ^
+t5554.scala:22: error: ambiguous reference to overloaded definition,
+both method apply in object Foo1 of type (x: Int)(implicit z: String)String
+and method apply in object Foo1 of type (x: Int)Base[T]
+match argument types (Int)
+ def test5[T]: Int = Foo1[T](1)
+ ^
+t5554.scala:25: error: ambiguous reference to overloaded definition,
+both method apply in object Foo1 of type (x: Int)(implicit z: String)String
+and method apply in object Foo1 of type (x: Int)Base[T]
+match argument types (Int)
+ def test8[T] = Foo1[T](1)
+ ^
+t5554.scala:29: error: ambiguous reference to overloaded definition,
+both method apply in object Foo2 of type (x: Int)(implicit z: String)Base[T]
+and method apply in object Foo2 of type (x: Int)Base[T]
+match argument types (Int)
+ def test9[T]: String = Foo2[T](1)
+ ^
+t5554.scala:30: error: ambiguous reference to overloaded definition,
+both method apply in object Foo2 of type (x: Int)(implicit z: String)Base[T]
+and method apply in object Foo2 of type (x: Int)Base[T]
+match argument types (Int) and expected result type Base[T]
+ def test10[T]: Base[T] = Foo2[T](1)
+ ^
+t5554.scala:31: error: ambiguous reference to overloaded definition,
+both method apply in object Foo2 of type (x: Int)(implicit z: String)Base[T]
+and method apply in object Foo2 of type (x: Int)Base[T]
+match argument types (Int)
+ def test11[T] = Foo2[T](1)
+ ^
+t5554.scala:36: error: ambiguous reference to overloaded definition,
+both method apply in object Foo2 of type (x: Int)(implicit z: String)Base[T]
+and method apply in object Foo2 of type (x: Int)Base[T]
+match argument types (Int)
+ def test12[T]: String = Foo2[T](1)
+ ^
+t5554.scala:37: error: ambiguous reference to overloaded definition,
+both method apply in object Foo2 of type (x: Int)(implicit z: String)Base[T]
+and method apply in object Foo2 of type (x: Int)Base[T]
+match argument types (Int) and expected result type Base[T]
+ def test13[T]: Base[T] = Foo2[T](1)
+ ^
+t5554.scala:38: error: ambiguous reference to overloaded definition,
+both method apply in object Foo2 of type (x: Int)(implicit z: String)Base[T]
+and method apply in object Foo2 of type (x: Int)Base[T]
+match argument types (Int)
+ def test14[T] = Foo2[T](1)
+ ^
+11 errors found
diff --git a/test/files/neg/t5554.scala b/test/files/neg/t5554.scala
new file mode 100644
index 0000000..d279abe
--- /dev/null
+++ b/test/files/neg/t5554.scala
@@ -0,0 +1,39 @@
+class Base[T]
+
+object Foo1 {
+ def apply[T](x: Int): Base[T] = new Base[T]
+ def apply[T](x: Int)(implicit z: String): String = z
+}
+
+object Foo2 {
+ def apply[T](x: Int): Base[T] = new Base[T]
+ def apply[T](x: Int)(implicit z: String): Base[T] = new Base[T]
+}
+
+object Test1 {
+ def test1[T]: Int = Foo1[T](1)
+ def test2[T]: Base[T] = Foo1[T](1)
+ def test3[T]: String = Foo1[T](1)
+ def test4[T] = Foo1[T](1)
+}
+
+object Test2 {
+ implicit val v: String = "foo"
+ def test5[T]: Int = Foo1[T](1)
+ def test6[T]: Base[T] = Foo1[T](1)
+ def test7[T]: String = Foo1[T](1)
+ def test8[T] = Foo1[T](1)
+}
+
+object Test3 {
+ def test9[T]: String = Foo2[T](1)
+ def test10[T]: Base[T] = Foo2[T](1)
+ def test11[T] = Foo2[T](1)
+}
+
+object Test4 {
+ implicit val v: String = "foo"
+ def test12[T]: String = Foo2[T](1)
+ def test13[T]: Base[T] = Foo2[T](1)
+ def test14[T] = Foo2[T](1)
+}
diff --git a/test/files/neg/t556.check b/test/files/neg/t556.check
new file mode 100644
index 0000000..c278e13
--- /dev/null
+++ b/test/files/neg/t556.check
@@ -0,0 +1,4 @@
+t556.scala:3: error: wrong number of parameters; expected = 1
+ def g:Int = f((x,y)=>x)
+ ^
+one error found
diff --git a/test/files/neg/bug556.scala b/test/files/neg/t556.scala
similarity index 100%
rename from test/files/neg/bug556.scala
rename to test/files/neg/t556.scala
diff --git a/test/files/neg/t5564.check b/test/files/neg/t5564.check
new file mode 100644
index 0000000..e7e13cc
--- /dev/null
+++ b/test/files/neg/t5564.check
@@ -0,0 +1,4 @@
+t5564.scala:8: error: inferred type arguments [A] do not conform to method bar's type parameter bounds [B >: A <: C]
+ def bar[B >: A <: C]: T = throw new Exception
+ ^
+one error found
diff --git a/test/files/neg/t5564.scala b/test/files/neg/t5564.scala
new file mode 100644
index 0000000..663cf88
--- /dev/null
+++ b/test/files/neg/t5564.scala
@@ -0,0 +1,9 @@
+
+
+
+trait C
+
+
+class Foo[@specialized(Int) T, A] {
+ def bar[B >: A <: C]: T = throw new Exception
+}
diff --git a/test/files/neg/t5572.check b/test/files/neg/t5572.check
new file mode 100644
index 0000000..7b1e290
--- /dev/null
+++ b/test/files/neg/t5572.check
@@ -0,0 +1,11 @@
+t5572.scala:16: error: type mismatch;
+ found : B
+ required: A
+ Z.transf(a, b) match {
+ ^
+t5572.scala:18: error: type mismatch;
+ found : A
+ required: B
+ run(sth, b)
+ ^
+two errors found
diff --git a/test/files/neg/t5572.scala b/test/files/neg/t5572.scala
new file mode 100644
index 0000000..2da1209
--- /dev/null
+++ b/test/files/neg/t5572.scala
@@ -0,0 +1,23 @@
+class A
+class B
+
+trait X
+
+object Z {
+ def transf(a: A, b: B): X = null
+}
+
+class Test {
+
+ def bar(): (A, B)
+
+ def foo {
+ val (b, a) = bar()
+ Z.transf(a, b) match {
+ case sth =>
+ run(sth, b)
+ }
+ }
+
+ def run(x: X, z: B): Unit = ()
+}
diff --git a/test/files/neg/t5578.check b/test/files/neg/t5578.check
new file mode 100644
index 0000000..d803adb
--- /dev/null
+++ b/test/files/neg/t5578.check
@@ -0,0 +1,4 @@
+t5578.scala:33: error: No Manifest available for T.
+ def plus[T: Numeric](x: Rep[T], y: Rep[T]): Rep[T] = Plus[T](x,y)
+ ^
+one error found
diff --git a/test/files/neg/t5578.scala b/test/files/neg/t5578.scala
new file mode 100644
index 0000000..ce72f32
--- /dev/null
+++ b/test/files/neg/t5578.scala
@@ -0,0 +1,39 @@
+trait Base {
+ type Rep[T]
+}
+
+trait Expressions {
+ // constants/symbols (atomic)
+ abstract class Exp[T]
+ // ...
+ case class Sym[T](n: Int) extends Exp[T]
+
+ // operations (composite, defined in subtraits)
+ abstract class Def[T]
+
+ // additional members for managing encountered definitions
+ def findOrCreateDefinition[T](rhs: Def[T]): Sym[T]
+ implicit def toExp[T:Manifest](d: Def[T]): Exp[T] = findOrCreateDefinition(d)
+}
+
+trait BaseExp extends Base with Expressions {
+ type Rep[T] = Exp[T]
+
+ def findOrCreateDefinition[T](rhs: Def[T]): Sym[T] = null // stub
+}
+
+trait NumericOps extends Base {
+ def plus[T](x: Rep[T], y: Rep[T]): Rep[T]
+}
+
+trait NumericOpsExp extends BaseExp {
+ case class Plus[T:Numeric](x: Rep[T], y: Rep[T])
+ extends Def[T]
+
+ def plus[T: Numeric](x: Rep[T], y: Rep[T]): Rep[T] = Plus[T](x,y)
+
+ // Possible solutions:
+// def plus[T: Numeric: Manifest](x: Rep[T], y: Rep[T]): Rep[T] = Plus[T](x, y)
+// def plus[T](x: Rep[T], y: Rep[T])(implicit num: Numeric[T], man: Manifest[T]): Rep[T] = Plus(x,y)
+
+}
diff --git a/test/files/neg/t558.check b/test/files/neg/t558.check
new file mode 100644
index 0000000..f33ddc4
--- /dev/null
+++ b/test/files/neg/t558.check
@@ -0,0 +1,4 @@
+t558.scala:13: error: value file is not a member of NewModel.this.RootURL
+ final val source = top.file;
+ ^
+one error found
diff --git a/test/files/neg/bug558.scala b/test/files/neg/t558.scala
similarity index 100%
rename from test/files/neg/bug558.scala
rename to test/files/neg/t558.scala
diff --git a/test/files/neg/t5580a.check b/test/files/neg/t5580a.check
new file mode 100644
index 0000000..50a3185
--- /dev/null
+++ b/test/files/neg/t5580a.check
@@ -0,0 +1,6 @@
+t5580a.scala:9: error: polymorphic expression cannot be instantiated to expected type;
+ found : [A]scala.collection.mutable.Set[A]
+ required: scala.collection.mutable.Map[bar,scala.collection.mutable.Set[bar]]
+ if (map.get(tmp).isEmpty) map.put(tmp,collection.mutable.Set())
+ ^
+one error found
diff --git a/test/files/neg/t5580a.scala b/test/files/neg/t5580a.scala
new file mode 100644
index 0000000..742f0e8
--- /dev/null
+++ b/test/files/neg/t5580a.scala
@@ -0,0 +1,11 @@
+import scala.collection.mutable.WeakHashMap
+
+class bar{ }
+class foo{
+ val map = WeakHashMap[AnyRef, collection.mutable.Map[bar, collection.mutable.Set[bar]]]()
+
+ def test={
+ val tmp:bar=null
+ if (map.get(tmp).isEmpty) map.put(tmp,collection.mutable.Set())
+ }
+}
diff --git a/test/files/neg/t5589neg.flags b/test/files/neg/t5589neg.flags
new file mode 100644
index 0000000..dcc59eb
--- /dev/null
+++ b/test/files/neg/t5589neg.flags
@@ -0,0 +1 @@
+-deprecation
diff --git a/test/files/neg/t5589neg2.check b/test/files/neg/t5589neg2.check
new file mode 100644
index 0000000..6af4955
--- /dev/null
+++ b/test/files/neg/t5589neg2.check
@@ -0,0 +1,9 @@
+t5589neg2.scala:7: error: constructor cannot be instantiated to expected type;
+ found : (T1, T2)
+ required: String
+ for (((((a, (b, (c, (d1, d2)))), es), fs), gs) <- x) yield (d :: es).mkString(", ") // not ok
+ ^
+t5589neg2.scala:7: error: not found: value d
+ for (((((a, (b, (c, (d1, d2)))), es), fs), gs) <- x) yield (d :: es).mkString(", ") // not ok
+ ^
+two errors found
diff --git a/test/files/neg/t5617.check b/test/files/neg/t5617.check
new file mode 100644
index 0000000..79cc3a1
--- /dev/null
+++ b/test/files/neg/t5617.check
@@ -0,0 +1,8 @@
+t5617.scala:12: error: method foo overrides nothing.
+Note: the super classes of trait C contain the following, non final members named foo:
+def foo(u: Unit): Int
+def foo(x: Boolean): Int
+def foo(i: Int)(b: String): Int
+ override def foo(s: String): Int
+ ^
+one error found
diff --git a/test/files/neg/t5617.scala b/test/files/neg/t5617.scala
new file mode 100644
index 0000000..41541b5
--- /dev/null
+++ b/test/files/neg/t5617.scala
@@ -0,0 +1,14 @@
+trait A {
+ def foo(i: Int)(b: String): Int
+ def foo(u: Unit): Int // not reported
+ def foo(x: Float): Int // not reported
+}
+trait B[X] extends A {
+ def foo(x: X): Int
+ def foo(u: Unit): Int
+ final def foo(x: Float): Int = 0 // not reported
+}
+trait C extends B[Boolean] {
+ override def foo(s: String): Int
+ val foo = 0 // not reported
+}
diff --git a/test/files/neg/t562.check b/test/files/neg/t562.check
new file mode 100644
index 0000000..8c38236
--- /dev/null
+++ b/test/files/neg/t562.check
@@ -0,0 +1,4 @@
+t562.scala:10: error: super may be not be used on value y
+ override val y = super.y;
+ ^
+one error found
diff --git a/test/files/neg/bug562.scala b/test/files/neg/t562.scala
similarity index 100%
rename from test/files/neg/bug562.scala
rename to test/files/neg/t562.scala
diff --git a/test/files/neg/t563.check b/test/files/neg/t563.check
new file mode 100644
index 0000000..1431c85
--- /dev/null
+++ b/test/files/neg/t563.check
@@ -0,0 +1,4 @@
+t563.scala:6: error: missing parameter type
+ map(n,ptr => Option(ptr.get));
+ ^
+one error found
diff --git a/test/files/neg/t563.scala b/test/files/neg/t563.scala
new file mode 100644
index 0000000..d367e2a
--- /dev/null
+++ b/test/files/neg/t563.scala
@@ -0,0 +1,7 @@
+object Test {
+ def map[A,R](a : List[A], f : A => R) : List[R] = a.map(f);
+
+ def split(sn : Iterable[List[Option[Int]]]) : Unit =
+ for (n <- sn)
+ map(n,ptr => Option(ptr.get));
+}
diff --git a/test/files/neg/t565.check b/test/files/neg/t565.check
new file mode 100644
index 0000000..136cc94
--- /dev/null
+++ b/test/files/neg/t565.check
@@ -0,0 +1,5 @@
+t565.scala:2: error: only classes can have declared but undefined members
+(Note that variables need to be initialized to be defined)
+ var s0: String
+ ^
+one error found
diff --git a/test/files/neg/bug565.scala b/test/files/neg/t565.scala
similarity index 100%
rename from test/files/neg/bug565.scala
rename to test/files/neg/t565.scala
diff --git a/test/files/neg/t5663-badwarneq.check b/test/files/neg/t5663-badwarneq.check
new file mode 100644
index 0000000..242be8d
--- /dev/null
+++ b/test/files/neg/t5663-badwarneq.check
@@ -0,0 +1,40 @@
+t5663-badwarneq.scala:47: error: comparing case class values of types Some[Int] and None.type using `==' will always yield false
+ println(new Some(1) == None) // Should complain on type, was: spuriously complains on fresh object
+ ^
+t5663-badwarneq.scala:48: error: comparing case class values of types Some[Int] and Thing using `==' will always yield false
+ println(Some(1) == new Thing(1)) // Should complain on type, was: spuriously complains on fresh object
+ ^
+t5663-badwarneq.scala:56: error: ThingOne and Thingy are unrelated: they will most likely never compare equal
+ println(t1 == t2) // true, but apparently unrelated, a compromise warning
+ ^
+t5663-badwarneq.scala:57: error: ThingThree and Thingy are unrelated: they will most likely never compare equal
+ println(t4 == t2) // true, complains because ThingThree is final and Thingy not a subclass, stronger claim than unrelated
+ ^
+t5663-badwarneq.scala:60: error: comparing case class values of types ThingTwo and Some[Int] using `==' will always yield false
+ println(t3 == Some(1)) // false, warn on different cases
+ ^
+t5663-badwarneq.scala:61: error: comparing values of types ThingOne and Cousin using `==' will always yield false
+ println(t1 == c) // should warn
+ ^
+t5663-badwarneq.scala:69: error: comparing case class values of types Simple and SimpleSibling.type using `==' will always yield false
+ println(new Simple() == SimpleSibling) // like Some(1) == None, but needn't be final case
+ ^
+t5663-badwarneq.scala:72: error: ValueClass1 and Int are unrelated: they will never compare equal
+ println(new ValueClass1(5) == 5) // bad
+ ^
+t5663-badwarneq.scala:74: error: comparing values of types Int and ValueClass1 using `==' will always yield false
+ println(5 == new ValueClass1(5)) // bad
+ ^
+t5663-badwarneq.scala:78: error: ValueClass2[String] and String are unrelated: they will never compare equal
+ println(new ValueClass2("abc") == "abc") // bad
+ ^
+t5663-badwarneq.scala:79: error: ValueClass2[Int] and ValueClass1 are unrelated: they will never compare equal
+ println(new ValueClass2(5) == new ValueClass1(5)) // bad - different value classes
+ ^
+t5663-badwarneq.scala:81: error: comparing values of types ValueClass3 and ValueClass2[Int] using `==' will always yield false
+ println(ValueClass3(5) == new ValueClass2(5)) // bad
+ ^
+t5663-badwarneq.scala:82: error: comparing values of types ValueClass3 and Int using `==' will always yield false
+ println(ValueClass3(5) == 5) // bad
+ ^
+13 errors found
diff --git a/test/files/neg/anyval-sealed.flags b/test/files/neg/t5663-badwarneq.flags
similarity index 100%
copy from test/files/neg/anyval-sealed.flags
copy to test/files/neg/t5663-badwarneq.flags
diff --git a/test/files/neg/t5663-badwarneq.scala b/test/files/neg/t5663-badwarneq.scala
new file mode 100644
index 0000000..3c03769
--- /dev/null
+++ b/test/files/neg/t5663-badwarneq.scala
@@ -0,0 +1,94 @@
+
+// alias
+trait Thingy
+
+class Gramps
+
+// sibling classes that extend a case class
+case class Thing(i: Int) extends Gramps
+class ThingOne(x:Int) extends Thing(x)
+class ThingTwo(y:Int) extends Thing(y) with Thingy
+final class ThingThree(z:Int) extends Thing(z)
+
+// not case cousin
+class Cousin extends Gramps
+
+class SimpleParent
+case class Simple() extends SimpleParent
+case object SimpleSibling extends SimpleParent
+
+// value classes
+final class ValueClass1(val value: Int) extends AnyVal
+final class ValueClass2[T](val value: T) extends AnyVal
+final case class ValueClass3(val value: Int) extends AnyVal
+
+/* It's not possible to run partest without -deprecation.
+ * Since detecting the warnings requires a neg test with
+ * -Xfatal-warnings, and deprecation terminates the compile,
+ * we'll just comment out the nasty part. The point was
+ * just to show there's nothing special about a trait
+ * that extends a case class, which is only permitted
+ * (deprecatingly) by omitting the parens.
+ *
+// common ancestor is something else
+class AnyThing
+case class SomeThing extends AnyThing // deprecation
+class OtherThing extends AnyThing
+
+// how you inherit caseness doesn't matter
+trait InThing extends SomeThing
+class MyThing extends InThing
+*/
+
+object Test {
+ def main(a: Array[String]) {
+ // nothing to do with Gavin
+ println(new Some(1) == new Some(1)) // OK, true
+ println(new Some(1) == None) // Should complain on type, was: spuriously complains on fresh object
+ println(Some(1) == new Thing(1)) // Should complain on type, was: spuriously complains on fresh object
+
+ val t1 = new ThingOne(11)
+ val t2: Thingy = new ThingTwo(11)
+ val t3 = new ThingTwo(11)
+ val t4 = new ThingThree(11)
+ val c = new Cousin
+
+ println(t1 == t2) // true, but apparently unrelated, a compromise warning
+ println(t4 == t2) // true, complains because ThingThree is final and Thingy not a subclass, stronger claim than unrelated
+ println(t2 == t3) // OK, two Thingy
+ println(t3 == t2) // ditto with case receiver
+ println(t3 == Some(1)) // false, warn on different cases
+ println(t1 == c) // should warn
+
+ // don't warn on fresh cases
+ println(new ThingOne(11) == t1) // OK, was: two cases not warnable on trunk
+ println(new ThingTwo(11) == t2) // true, was: spuriously complains on fresh object
+ println(new ThingOne(11) == t3) // two cases not warnable on trunk
+ println(new ThingTwo(11) == t3) // ditto
+
+ println(new Simple() == SimpleSibling) // like Some(1) == None, but needn't be final case
+
+ println(new ValueClass1(5) == new ValueClass1(5)) // ok
+ println(new ValueClass1(5) == 5) // bad
+ println(new ValueClass1(5) == (5: Any)) // ok, have to let it through
+ println(5 == new ValueClass1(5)) // bad
+ println((5: Any) == new ValueClass1(5) == (5: Any)) // ok
+
+ println(new ValueClass2("abc") == new ValueClass2("abc")) // ok
+ println(new ValueClass2("abc") == "abc") // bad
+ println(new ValueClass2(5) == new ValueClass1(5)) // bad - different value classes
+ println(ValueClass3(5) == new ValueClass3(5)) // ok
+ println(ValueClass3(5) == new ValueClass2(5)) // bad
+ println(ValueClass3(5) == 5) // bad
+
+ /*
+ val mine = new MyThing
+ val some = new SomeThing
+ val other = new OtherThing
+ println(mine == some) // OK, two Something
+ println(some == mine)
+ println(mine == other) // OK, two Anything?
+ println(mine == t1) // false
+ */
+ }
+}
diff --git a/test/files/neg/t5666.check b/test/files/neg/t5666.check
new file mode 100644
index 0000000..1c71479
--- /dev/null
+++ b/test/files/neg/t5666.check
@@ -0,0 +1,37 @@
+t5666.scala:2: error: class Any is abstract; cannot be instantiated
+ new Any
+ ^
+t5666.scala:3: error: class AnyVal is abstract; cannot be instantiated
+ new AnyVal
+ ^
+t5666.scala:4: error: class Double is abstract; cannot be instantiated
+ new Double
+ ^
+t5666.scala:5: error: class Float is abstract; cannot be instantiated
+ new Float
+ ^
+t5666.scala:6: error: class Long is abstract; cannot be instantiated
+ new Long
+ ^
+t5666.scala:7: error: class Int is abstract; cannot be instantiated
+ new Int
+ ^
+t5666.scala:8: error: class Char is abstract; cannot be instantiated
+ new Char
+ ^
+t5666.scala:9: error: class Short is abstract; cannot be instantiated
+ new Short
+ ^
+t5666.scala:10: error: class Byte is abstract; cannot be instantiated
+ new Byte
+ ^
+t5666.scala:11: error: class Boolean is abstract; cannot be instantiated
+ new Boolean
+ ^
+t5666.scala:12: error: class Unit is abstract; cannot be instantiated
+ new Unit
+ ^
+t5666.scala:13: error: class Nothing is abstract; cannot be instantiated
+ new Nothing
+ ^
+12 errors found
diff --git a/test/files/neg/t5666.scala b/test/files/neg/t5666.scala
new file mode 100644
index 0000000..ffaeaac
--- /dev/null
+++ b/test/files/neg/t5666.scala
@@ -0,0 +1,14 @@
+object t5666 {
+ new Any
+ new AnyVal
+ new Double
+ new Float
+ new Long
+ new Int
+ new Char
+ new Short
+ new Byte
+ new Boolean
+ new Unit
+ new Nothing
+}
\ No newline at end of file
diff --git a/test/files/neg/t5675.check b/test/files/neg/t5675.check
new file mode 100644
index 0000000..da608a2
--- /dev/null
+++ b/test/files/neg/t5675.check
@@ -0,0 +1,2 @@
+error: there were 1 feature warning(s); re-run with -feature for details
+one error found
diff --git a/test/files/neg/caseinherit.flags b/test/files/neg/t5675.flags
similarity index 100%
copy from test/files/neg/caseinherit.flags
copy to test/files/neg/t5675.flags
diff --git a/test/files/neg/t5675.scala b/test/files/neg/t5675.scala
new file mode 100644
index 0000000..238ed0f
--- /dev/null
+++ b/test/files/neg/t5675.scala
@@ -0,0 +1,7 @@
+class PostFix {
+ val list = List(1, 2, 3)
+ def main(args: Array[String]) {
+ val a = list filter (2 !=)
+ val b = list filter (2 != _)
+ }
+}
diff --git a/test/files/neg/t5683.check b/test/files/neg/t5683.check
new file mode 100644
index 0000000..7c0e501
--- /dev/null
+++ b/test/files/neg/t5683.check
@@ -0,0 +1,16 @@
+t5683.scala:12: error: inferred kinds of the type arguments (Object,Int) do not conform to the expected kinds of the type parameters (type M,type B).
+Object's type parameters do not match type M's expected parameters:
+class Object has no type parameters, but type M has one
+ val crash: K[StringW,Int,Int] = k{ (y: Int) => null: W[String, Int] }
+ ^
+t5683.scala:12: error: type mismatch;
+ found : Int => Test.W[String,Int]
+ required: Int => M[B]
+ val crash: K[StringW,Int,Int] = k{ (y: Int) => null: W[String, Int] }
+ ^
+t5683.scala:12: error: type mismatch;
+ found : Test.K[M,Int,B]
+ required: Test.K[Test.StringW,Int,Int]
+ val crash: K[StringW,Int,Int] = k{ (y: Int) => null: W[String, Int] }
+ ^
+three errors found
diff --git a/test/files/neg/t5683.scala b/test/files/neg/t5683.scala
new file mode 100644
index 0000000..05ab035
--- /dev/null
+++ b/test/files/neg/t5683.scala
@@ -0,0 +1,23 @@
+object Test {
+ trait NT[X]
+ trait W[W, A] extends NT[Int]
+ type StringW[T] = W[String, T]
+ trait K[M[_], A, B]
+
+ def k[M[_], B](f: Int => M[B]): K[M, Int, B] = null
+
+ val okay1: K[StringW,Int,Int] = k{ (y: Int) => null: StringW[Int] }
+ val okay2 = k[StringW,Int]{ (y: Int) => null: W[String, Int] }
+
+ val crash: K[StringW,Int,Int] = k{ (y: Int) => null: W[String, Int] }
+
+ // remove `extends NT[Int]`, and the last line gives an inference error
+ // rather than a crash.
+ // test/files/pos/t5683.scala:12: error: no type parameters for method k: (f: Int => M[B])Test.K[M,Int,B] exist so that it can be applied to arguments (Int => Test.W[String,Int])
+ // --- because ---
+ // argument expression's type is not compatible with formal parameter type;
+ // found : Int => Test.W[String,Int]
+ // required: Int => ?M[?B]
+ // val crash: K[StringW,Int,Int] = k{ (y: Int) => null: W[String, Int] }
+ // ^
+}
diff --git a/test/files/neg/t5687.check b/test/files/neg/t5687.check
new file mode 100644
index 0000000..5096077
--- /dev/null
+++ b/test/files/neg/t5687.check
@@ -0,0 +1,8 @@
+t5687.scala:4: error: type arguments [T] do not conform to class Template's type parameter bounds [T <: AnyRef]
+ type Repr[T]<:Template[T]
+ ^
+t5687.scala:20: error: overriding type Repr in class Template with bounds[T] <: Template[T];
+ type Repr has incompatible type
+ type Repr = CurveTemplate[T]
+ ^
+two errors found
diff --git a/test/files/neg/t5687.scala b/test/files/neg/t5687.scala
new file mode 100644
index 0000000..90a9ae2
--- /dev/null
+++ b/test/files/neg/t5687.scala
@@ -0,0 +1,55 @@
+abstract class Template[T <: AnyRef](private val t: T) {
+
+// type Repr[A<:AnyRef]<:Template[T]
+ type Repr[T]<:Template[T]
+
+ def access1(timeout: Int): Repr[T] = this.asInstanceOf[Repr[T]]
+ def access2: Repr[T] = this.asInstanceOf[Repr[T]]
+ val access3: Repr[T] = this.asInstanceOf[Repr[T]]
+ def access4(v: Repr[T]): Repr[T] = this.asInstanceOf[Repr[T]]
+ def access5(x: X): Repr[T] = this.asInstanceOf[Repr[T]]
+ def access5(x: Y): Repr[T] = this.asInstanceOf[Repr[T]]
+
+ def withReadModifiers(readModifiers:Int): Repr[T] = this.asInstanceOf[Repr[T]]
+}
+
+class Curve
+
+class CurveTemplate [T <: Curve](t: T) extends Template(t) {
+// type Repr[A<: AnyRef] = CurveTemplate[T]
+ type Repr = CurveTemplate[T]
+}
+
+class Base
+class X extends Base
+class Y extends Base
+
+
+object Example {
+ def test1() {
+ new CurveTemplate(new Curve).access1(10)
+
+ new CurveTemplate(new Curve).access2
+
+ new CurveTemplate(new Curve).access3
+
+ new CurveTemplate(new Curve).access4(null)
+
+ new CurveTemplate(new Curve).access5(new X)
+
+ ()
+
+ }
+
+ def test2() {
+ new CurveTemplate(new Curve).access1(10).withReadModifiers(1)
+
+ new CurveTemplate(new Curve).access2.withReadModifiers(1)
+
+ new CurveTemplate(new Curve).access3.withReadModifiers(1)
+
+ new CurveTemplate(new Curve).access4(null).withReadModifiers(1)
+
+ new CurveTemplate(new Curve).access5(new X).withReadModifiers(1)
+ }
+}
diff --git a/test/files/neg/t5689.check b/test/files/neg/t5689.check
new file mode 100644
index 0000000..e497e3b
--- /dev/null
+++ b/test/files/neg/t5689.check
@@ -0,0 +1,7 @@
+t5689.scala:4: error: macro implementation has incompatible shape:
+ required: (c: scala.reflect.macros.Context)(i: c.Expr[Double]): c.Expr[String]
+ found : (c: scala.reflect.macros.Context)(i: c.Expr[Double]): c.Expr[Int]
+type mismatch for return type: c.Expr[Int] does not conform to c.Expr[String]
+ def returnsString(i: Double): String = macro returnsIntImpl
+ ^
+one error found
diff --git a/test/files/neg/t5689.flags b/test/files/neg/t5689.flags
new file mode 100644
index 0000000..cd66464
--- /dev/null
+++ b/test/files/neg/t5689.flags
@@ -0,0 +1 @@
+-language:experimental.macros
\ No newline at end of file
diff --git a/test/files/neg/t5689.scala b/test/files/neg/t5689.scala
new file mode 100644
index 0000000..3266039
--- /dev/null
+++ b/test/files/neg/t5689.scala
@@ -0,0 +1,6 @@
+import scala.reflect.macros.Context
+
+object Macros {
+ def returnsString(i: Double): String = macro returnsIntImpl
+ def returnsIntImpl(c: Context)(i: c.Expr[Double]): c.Expr[Int] = ???
+}
diff --git a/test/files/neg/t5696.check b/test/files/neg/t5696.check
new file mode 100644
index 0000000..72b7781
--- /dev/null
+++ b/test/files/neg/t5696.check
@@ -0,0 +1,19 @@
+t5696.scala:6: error: too many argument lists for constructor invocation
+ new G(1)(2) {}
+ ^
+t5696.scala:14: error: too many argument lists for constructor invocation
+ new G()(2) {}
+ ^
+t5696.scala:22: error: too many argument lists for constructor invocation
+ new G[Int]()(2) {}
+ ^
+t5696.scala:30: error: too many argument lists for constructor invocation
+ new G[Int]()(2)(3) {}
+ ^
+t5696.scala:38: error: too many argument lists for constructor invocation
+ new G[Int]()()(2) {}
+ ^
+t5696.scala:46: error: too many argument lists for constructor invocation
+ object x extends G(1)(2) {}
+ ^
+6 errors found
diff --git a/test/files/neg/t5696.scala b/test/files/neg/t5696.scala
new file mode 100644
index 0000000..051e3a0
--- /dev/null
+++ b/test/files/neg/t5696.scala
@@ -0,0 +1,47 @@
+class TestApply1 {
+ class G(y: Int) {
+ def apply(x: Int) = 1
+ }
+
+ new G(1)(2) {}
+}
+
+class TestApply2 {
+ class G {
+ def apply(x: Int) = 1
+ }
+
+ new G()(2) {}
+}
+
+class TestApply3 {
+ class G[X] {
+ def apply(x: Int) = 1
+ }
+
+ new G[Int]()(2) {}
+}
+
+class TestApply4 {
+ class G[X] {
+ def apply(x: Int)(y: Int) = 1
+ }
+
+ new G[Int]()(2)(3) {}
+}
+
+class TestApply5 {
+ class G[X]()() {
+ def apply(x: Int) = 1
+ }
+
+ new G[Int]()()(2) {}
+}
+
+class TestApply6 {
+ class G(y: Int) {
+ def apply(x: Int) = 1
+ }
+
+ object x extends G(1)(2) {}
+}
diff --git a/test/files/neg/t5702-neg-bad-and-wild.check b/test/files/neg/t5702-neg-bad-and-wild.check
new file mode 100644
index 0000000..eae81ad
--- /dev/null
+++ b/test/files/neg/t5702-neg-bad-and-wild.check
@@ -0,0 +1,28 @@
+t5702-neg-bad-and-wild.scala:10: error: bad use of _* (a sequence pattern must be the last pattern)
+ case List(1, _*,) => // bad use of _* (a sequence pattern must be the last pattern)
+ ^
+t5702-neg-bad-and-wild.scala:10: error: illegal start of simple pattern
+ case List(1, _*,) => // bad use of _* (a sequence pattern must be the last pattern)
+ ^
+t5702-neg-bad-and-wild.scala:12: error: illegal start of simple pattern
+ case List(1, _*3,) => // illegal start of simple pattern
+ ^
+t5702-neg-bad-and-wild.scala:14: error: use _* to match a sequence
+ case List(1, x*) => // use _* to match a sequence
+ ^
+t5702-neg-bad-and-wild.scala:15: error: trailing * is not a valid pattern
+ case List(x*, 1) => // trailing * is not a valid pattern
+ ^
+t5702-neg-bad-and-wild.scala:16: error: trailing * is not a valid pattern
+ case (1, x*) => // trailing * is not a valid pattern
+ ^
+t5702-neg-bad-and-wild.scala:17: error: bad use of _* (sequence pattern not allowed)
+ case (1, x at _*) => // bad use of _* (sequence pattern not allowed)
+ ^
+t5702-neg-bad-and-wild.scala:23: error: bad use of _* (a sequence pattern must be the last pattern)
+ val K(ns @ _*, x) = k // bad use of _* (a sequence pattern must be the last pattern)
+ ^
+t5702-neg-bad-and-wild.scala:24: error: bad use of _* (sequence pattern not allowed)
+ val (b, _ * ) = Pair(5,6) // bad use of _* (sequence pattern not allowed)
+ ^
+9 errors found
diff --git a/test/files/neg/t5702-neg-bad-and-wild.scala b/test/files/neg/t5702-neg-bad-and-wild.scala
new file mode 100644
index 0000000..3833a00
--- /dev/null
+++ b/test/files/neg/t5702-neg-bad-and-wild.scala
@@ -0,0 +1,29 @@
+
+object Test {
+ case class K(i: Int)
+
+ def main(args: Array[String]) {
+ val k = new K(9)
+ val is = List(1,2,3)
+
+ is match {
+ case List(1, _*,) => // bad use of _* (a sequence pattern must be the last pattern)
+ // illegal start of simple pattern
+ case List(1, _*3,) => // illegal start of simple pattern
+ //case List(1, _*3:) => // poor recovery by parens
+ case List(1, x*) => // use _* to match a sequence
+ case List(x*, 1) => // trailing * is not a valid pattern
+ case (1, x*) => // trailing * is not a valid pattern
+ case (1, x at _*) => // bad use of _* (sequence pattern not allowed)
+ }
+
+// good syntax, bad semantics, detected by typer
+//gowild.scala:14: error: star patterns must correspond with varargs parameters
+ val K(is @ _*) = k
+ val K(ns @ _*, x) = k // bad use of _* (a sequence pattern must be the last pattern)
+ val (b, _ * ) = Pair(5,6) // bad use of _* (sequence pattern not allowed)
+// no longer complains
+//bad-and-wild.scala:15: error: ')' expected but '}' found.
+ }
+}
+
diff --git a/test/files/neg/t5702-neg-bad-brace.check b/test/files/neg/t5702-neg-bad-brace.check
new file mode 100644
index 0000000..503f7d9
--- /dev/null
+++ b/test/files/neg/t5702-neg-bad-brace.check
@@ -0,0 +1,10 @@
+t5702-neg-bad-brace.scala:14: error: Unmatched closing brace '}' ignored here
+ case List(1, _*} =>
+ ^
+t5702-neg-bad-brace.scala:14: error: illegal start of simple pattern
+ case List(1, _*} =>
+ ^
+t5702-neg-bad-brace.scala:15: error: ')' expected but '}' found.
+ }
+ ^
+three errors found
diff --git a/test/files/neg/t5702-neg-bad-brace.scala b/test/files/neg/t5702-neg-bad-brace.scala
new file mode 100644
index 0000000..16a341c
--- /dev/null
+++ b/test/files/neg/t5702-neg-bad-brace.scala
@@ -0,0 +1,17 @@
+
+object Test {
+
+ def main(args: Array[String]) {
+ val is = List(1,2,3)
+
+ is match {
+// the erroneous brace is ignored, so we can't halt on it.
+// maybe brace healing can detect overlapping unmatched (...}
+// In this case, the fix emits an extra error:
+// t5702-neg-bad-brace.scala:10: error: Unmatched closing brace '}' ignored here
+// t5702-neg-bad-brace.scala:10: error: illegal start of simple pattern (i.e., =>)
+// t5702-neg-bad-brace.scala:11: error: ')' expected but '}' found.
+ case List(1, _*} =>
+ }
+ }
+}
diff --git a/test/files/neg/t5702-neg-bad-xbrace.check b/test/files/neg/t5702-neg-bad-xbrace.check
new file mode 100644
index 0000000..d88638a
--- /dev/null
+++ b/test/files/neg/t5702-neg-bad-xbrace.check
@@ -0,0 +1,7 @@
+t5702-neg-bad-xbrace.scala:19: error: bad brace or paren after _*
+ case <year>{_*)}</year> => y
+ ^
+t5702-neg-bad-xbrace.scala:28: error: bad brace or paren after _*
+ val <top>{a, z at _*)}</top> = xml
+ ^
+two errors found
diff --git a/test/files/neg/t5702-neg-bad-xbrace.scala b/test/files/neg/t5702-neg-bad-xbrace.scala
new file mode 100644
index 0000000..64bbdb1
--- /dev/null
+++ b/test/files/neg/t5702-neg-bad-xbrace.scala
@@ -0,0 +1,31 @@
+
+object Test {
+ def main(args: Array[String]) {
+ /* PiS example, minus a brace
+ val yearMade = 1965
+ val old =
+ <a>{ if (yearMade < 2000) <old>yearMade}</old>
+ else xml.NodeSeq.Empty } </a>
+ println(old)
+ */
+
+ // bad brace or paren after _*
+ // actually, we know it's a bad paren...
+ // we skip it because not in a context looking for rparen
+ val xyear = <year>1965</year>
+ val ancient =
+ <b>{
+ val when = xyear match {
+ case <year>{_*)}</year> => y
+ case _ => "2035"
+ }
+ <old>{when}</old>
+ }</b>
+ println(ancient)
+
+ val xml = <top><a>apple</a><b>boy</b><c>child</c></top>
+ // bad brace or paren after _*
+ val <top>{a, z at _*)}</top> = xml
+ println("A for "+ a +", ending with "+ z)
+ }
+}
diff --git a/test/files/neg/t5702-neg-ugly-xbrace.check b/test/files/neg/t5702-neg-ugly-xbrace.check
new file mode 100644
index 0000000..7d80bbf
--- /dev/null
+++ b/test/files/neg/t5702-neg-ugly-xbrace.check
@@ -0,0 +1,19 @@
+t5702-neg-ugly-xbrace.scala:11: error: bad brace or paren after _*
+ val <top>{a, z at _*)</top> = xml
+ ^
+t5702-neg-ugly-xbrace.scala:12: error: Missing closing brace `}' assumed here
+ println("A for "+ a +", ending with "+ z)
+ ^
+t5702-neg-ugly-xbrace.scala:13: error: in XML literal: in XML content, please use '}}' to express '}'
+ }
+ ^
+t5702-neg-ugly-xbrace.scala:11: error: I encountered a '}' where I didn't expect one, maybe this tag isn't closed <top>
+ val <top>{a, z at _*)</top> = xml
+ ^
+t5702-neg-ugly-xbrace.scala:14: error: illegal start of simple pattern
+}
+^
+t5702-neg-ugly-xbrace.scala:14: error: '}' expected but eof found.
+}
+ ^
+6 errors found
diff --git a/test/files/neg/t5702-neg-ugly-xbrace.scala b/test/files/neg/t5702-neg-ugly-xbrace.scala
new file mode 100644
index 0000000..0ff7bfa
--- /dev/null
+++ b/test/files/neg/t5702-neg-ugly-xbrace.scala
@@ -0,0 +1,14 @@
+
+object Test {
+ def main(args: Array[String]) {
+
+ val xml = <top><a>apple</a><b>boy</b><c>child</c></top>
+ // This is the more likely typo, and the uglier parse.
+ // We could turn it into a } if } does not follow (to
+ // avoid handing }} back to xml) but that is quite ad hoc.
+ // Assuming } for ) after _* would not be not outlandish.
+ // bad brace or paren after _*
+ val <top>{a, z at _*)</top> = xml
+ println("A for "+ a +", ending with "+ z)
+ }
+}
diff --git a/test/files/neg/t5728.check b/test/files/neg/t5728.check
new file mode 100644
index 0000000..14f9c42
--- /dev/null
+++ b/test/files/neg/t5728.check
@@ -0,0 +1,4 @@
+t5728.scala:3: error: implicit classes must accept exactly one primary constructor parameter
+ implicit class Foo
+ ^
+one error found
diff --git a/test/files/neg/t5728.scala b/test/files/neg/t5728.scala
new file mode 100644
index 0000000..99337d0
--- /dev/null
+++ b/test/files/neg/t5728.scala
@@ -0,0 +1,7 @@
+object Test {
+
+ implicit class Foo
+
+ implicit def Foo = new Foo
+
+}
diff --git a/test/files/neg/t5735.check b/test/files/neg/t5735.check
new file mode 100644
index 0000000..f6e0028
--- /dev/null
+++ b/test/files/neg/t5735.check
@@ -0,0 +1,6 @@
+t5735.scala:6: error: type mismatch;
+ found : (x: Int)Int <and> => String
+ required: Int
+ val z: Int = a
+ ^
+one error found
diff --git a/test/files/neg/t5735.scala b/test/files/neg/t5735.scala
new file mode 100644
index 0000000..fde71ff
--- /dev/null
+++ b/test/files/neg/t5735.scala
@@ -0,0 +1,7 @@
+abstract class Base {
+ def a: String = "one"
+}
+class Clazz extends Base {
+ def a(x: Int): Int = 2
+ val z: Int = a
+}
diff --git a/test/files/neg/t5753.check b/test/files/neg/t5753.check
new file mode 100644
index 0000000..76602de
--- /dev/null
+++ b/test/files/neg/t5753.check
@@ -0,0 +1,4 @@
+Test_2.scala:9: error: macro implementation not found: foo (the most common reason for that is that you cannot use macro implementations in the same compilation run that defines them)
+ println(foo(42))
+ ^
+one error found
diff --git a/test/files/neg/t5753.flags b/test/files/neg/t5753.flags
new file mode 100644
index 0000000..cd66464
--- /dev/null
+++ b/test/files/neg/t5753.flags
@@ -0,0 +1 @@
+-language:experimental.macros
\ No newline at end of file
diff --git a/test/files/neg/t5753/Impls_Macros_1.scala b/test/files/neg/t5753/Impls_Macros_1.scala
new file mode 100644
index 0000000..1d9c264
--- /dev/null
+++ b/test/files/neg/t5753/Impls_Macros_1.scala
@@ -0,0 +1,6 @@
+import scala.reflect.macros.{Context => Ctx}
+
+trait Impls {
+def impl(c: Ctx)(x: c.Expr[Any]) = x
+}
+
diff --git a/test/files/neg/t5753/Test_2.scala b/test/files/neg/t5753/Test_2.scala
new file mode 100644
index 0000000..2369b18
--- /dev/null
+++ b/test/files/neg/t5753/Test_2.scala
@@ -0,0 +1,11 @@
+import scala.reflect.macros.{Context => Ctx}
+
+object Macros extends Impls {
+ def foo(x: Any) = macro impl
+}
+
+object Test extends App {
+ import Macros._
+ println(foo(42))
+}
+
diff --git a/test/files/neg/t576.check b/test/files/neg/t576.check
new file mode 100644
index 0000000..7105c92
--- /dev/null
+++ b/test/files/neg/t576.check
@@ -0,0 +1,4 @@
+t576.scala:14: error: overloaded method insert needs result type
+ if (true) sibling.insert(node);
+ ^
+one error found
diff --git a/test/files/neg/bug576.scala b/test/files/neg/t576.scala
similarity index 100%
rename from test/files/neg/bug576.scala
rename to test/files/neg/t576.scala
diff --git a/test/files/neg/t5760-pkgobj-warn.check b/test/files/neg/t5760-pkgobj-warn.check
new file mode 100644
index 0000000..a89398c
--- /dev/null
+++ b/test/files/neg/t5760-pkgobj-warn.check
@@ -0,0 +1,4 @@
+stalepkg_2.scala:6: error: Foo is already defined as class Foo in package object stalepkg
+ class Foo
+ ^
+one error found
diff --git a/test/files/neg/t5760-pkgobj-warn/stalepkg_1.scala b/test/files/neg/t5760-pkgobj-warn/stalepkg_1.scala
new file mode 100644
index 0000000..ed4b731
--- /dev/null
+++ b/test/files/neg/t5760-pkgobj-warn/stalepkg_1.scala
@@ -0,0 +1,11 @@
+
+package object stalepkg {
+ class Foo
+}
+
+package stalepkg {
+ object Test {
+ def main(args: Array[String]) {
+ }
+ }
+}
diff --git a/test/files/neg/t5760-pkgobj-warn/stalepkg_2.scala b/test/files/neg/t5760-pkgobj-warn/stalepkg_2.scala
new file mode 100644
index 0000000..9abcdba
--- /dev/null
+++ b/test/files/neg/t5760-pkgobj-warn/stalepkg_2.scala
@@ -0,0 +1,11 @@
+
+package object stalepkg {
+}
+
+package stalepkg {
+ class Foo
+ object Test {
+ def main(args: Array[String]) {
+ }
+ }
+}
diff --git a/test/files/neg/t5761.check b/test/files/neg/t5761.check
new file mode 100644
index 0000000..89d766f
--- /dev/null
+++ b/test/files/neg/t5761.check
@@ -0,0 +1,16 @@
+t5761.scala:4: error: not enough arguments for constructor D: (x: Int)D[Int].
+Unspecified value parameter x.
+ println(new D[Int]{}) // crash
+ ^
+t5761.scala:8: error: not enough arguments for constructor D: (x: Int)D[Int].
+Unspecified value parameter x.
+ println(new D[Int]()) // no crash
+ ^
+t5761.scala:9: error: not enough arguments for constructor D: (x: Int)D[Int].
+Unspecified value parameter x.
+ println(new D[Int]{}) // crash
+ ^
+t5761.scala:13: error: not found: type Tread
+ new Tread("sth") { }.run()
+ ^
+four errors found
diff --git a/test/files/neg/t5761.scala b/test/files/neg/t5761.scala
new file mode 100644
index 0000000..040c4eb
--- /dev/null
+++ b/test/files/neg/t5761.scala
@@ -0,0 +1,16 @@
+class D[-A](x: A) { }
+
+object Test1 {
+ println(new D[Int]{}) // crash
+}
+
+object Test2 {
+ println(new D[Int]()) // no crash
+ println(new D[Int]{}) // crash
+}
+
+object Test3 {
+ new Tread("sth") { }.run()
+}
+
+
diff --git a/test/files/neg/t5762.check b/test/files/neg/t5762.check
new file mode 100644
index 0000000..1006403
--- /dev/null
+++ b/test/files/neg/t5762.check
@@ -0,0 +1,13 @@
+t5762.scala:6: error: non-variable type argument Int in type pattern D[Int] is unchecked since it is eliminated by erasure
+ case _: D[Int] if bippy => 1
+ ^
+t5762.scala:7: error: non-variable type argument String in type pattern D[String] is unchecked since it is eliminated by erasure
+ case _: D[String] => 2
+ ^
+t5762.scala:20: error: non-variable type argument D[Int] in type pattern D[D[Int]] is unchecked since it is eliminated by erasure
+ case _: D[D[Int]] if bippy => 1
+ ^
+t5762.scala:21: error: non-variable type argument D[String] in type pattern D[D[String]] is unchecked since it is eliminated by erasure
+ case _: D[D[String]] => 2
+ ^
+four errors found
diff --git a/test/files/neg/anyval-sealed.flags b/test/files/neg/t5762.flags
similarity index 100%
copy from test/files/neg/anyval-sealed.flags
copy to test/files/neg/t5762.flags
diff --git a/test/files/neg/t5762.scala b/test/files/neg/t5762.scala
new file mode 100644
index 0000000..fb73552
--- /dev/null
+++ b/test/files/neg/t5762.scala
@@ -0,0 +1,24 @@
+class D[-A]
+
+object Test {
+ var bippy: Boolean = true
+ def f1(x: D[Int with String]) = x match {
+ case _: D[Int] if bippy => 1
+ case _: D[String] => 2
+ }
+ // Correctly warns:
+ //
+ // a.scala:5: warning: non variable type-argument Int in type pattern D[Int] is unchecked since it is eliminated by erasure
+ // case _: D[Int] => 1
+ // ^
+ // a.scala:6: warning: non variable type-argument String in type pattern D[String] is unchecked since it is eliminated by erasure
+ // case _: D[String] => 2
+ // ^
+ // two warnings found
+
+ def f2(x: D[D[Int] with D[String]]) = x match {
+ case _: D[D[Int]] if bippy => 1
+ case _: D[D[String]] => 2
+ }
+ // No warnings!
+}
diff --git a/test/files/neg/t5799.check b/test/files/neg/t5799.check
new file mode 100644
index 0000000..3b43d06
--- /dev/null
+++ b/test/files/neg/t5799.check
@@ -0,0 +1,4 @@
+t5799.scala:2: error: secondary constructor is not allowed in value class
+ def this(s: String) = this(s.toDouble)
+ ^
+one error found
diff --git a/test/files/neg/t5799.scala b/test/files/neg/t5799.scala
new file mode 100644
index 0000000..9bd6ab7
--- /dev/null
+++ b/test/files/neg/t5799.scala
@@ -0,0 +1,8 @@
+class Foo(val bar: Double) extends AnyVal {
+ def this(s: String) = this(s.toDouble)
+}
+object Test {
+ def main(args: Array[String]): Unit =
+ new Foo("")
+ }
+
diff --git a/test/files/neg/t5801.check b/test/files/neg/t5801.check
new file mode 100644
index 0000000..abf8e6e
--- /dev/null
+++ b/test/files/neg/t5801.check
@@ -0,0 +1,22 @@
+t5801.scala:1: error: object sth is not a member of package scala
+import scala.sth
+ ^
+t5801.scala:4: error: not found: value sth
+ def foo(a: Int)(implicit b: sth.Sth): Unit = {}
+ ^
+t5801.scala:7: error: not found: value sth
+ def bar(x: Int)(implicit y: Int): sth.Sth = null
+ ^
+t5801.scala:8: error: could not find implicit value for parameter y: Int
+ bar(1)
+ ^
+t5801.scala:10: error: not found: value sth
+ def meh(x: Int)(implicit a: sth.Sth, b: Int): Unit = {}
+ ^
+t5801.scala:13: error: not found: value sth
+ def meh2(x: Int)(implicit b: Int, a: sth.Sth): Unit = {}
+ ^
+t5801.scala:14: error: could not find implicit value for parameter b: Int
+ meh2(1)
+ ^
+7 errors found
diff --git a/test/files/neg/t5801.scala b/test/files/neg/t5801.scala
new file mode 100644
index 0000000..d452222
--- /dev/null
+++ b/test/files/neg/t5801.scala
@@ -0,0 +1,16 @@
+import scala.sth
+
+object Test extends App {
+ def foo(a: Int)(implicit b: sth.Sth): Unit = {}
+ foo(1)
+
+ def bar(x: Int)(implicit y: Int): sth.Sth = null
+ bar(1)
+
+ def meh(x: Int)(implicit a: sth.Sth, b: Int): Unit = {}
+ meh(1)
+
+ def meh2(x: Int)(implicit b: Int, a: sth.Sth): Unit = {}
+ meh2(1)
+}
+
diff --git a/test/files/neg/t5803.check b/test/files/neg/t5803.check
new file mode 100644
index 0000000..6a2de2e
--- /dev/null
+++ b/test/files/neg/t5803.check
@@ -0,0 +1,4 @@
+t5803.scala:3: error: could not find implicit value for parameter ev: Nothing
+ new Foo(): String
+ ^
+one error found
diff --git a/test/files/neg/t5803.scala b/test/files/neg/t5803.scala
new file mode 100644
index 0000000..f818272
--- /dev/null
+++ b/test/files/neg/t5803.scala
@@ -0,0 +1,4 @@
+object Test {
+ class Foo()(implicit ev: Nothing)
+ new Foo(): String
+}
diff --git a/test/files/neg/t5821.check b/test/files/neg/t5821.check
new file mode 100644
index 0000000..f9c0060
--- /dev/null
+++ b/test/files/neg/t5821.check
@@ -0,0 +1,4 @@
+t5821.scala:1: error: not found: object SthImportant
+import SthImportant._
+ ^
+one error found
diff --git a/test/files/neg/t5821.scala b/test/files/neg/t5821.scala
new file mode 100644
index 0000000..4af0a2b
--- /dev/null
+++ b/test/files/neg/t5821.scala
@@ -0,0 +1,8 @@
+import SthImportant._
+
+class Bar
+
+class Foo2 {
+ type Sth = Array[Bar]
+ def foo(xs: Sth): Bar = if ((xs eq null) || (xs.length == 0)) null else xs(0)
+}
diff --git a/test/files/neg/t5830.check b/test/files/neg/t5830.check
new file mode 100644
index 0000000..726fac2
--- /dev/null
+++ b/test/files/neg/t5830.check
@@ -0,0 +1,7 @@
+t5830.scala:6: error: unreachable code
+ case 'a' => println("b") // unreachable
+ ^
+t5830.scala:4: error: could not emit switch for @switch annotated match
+ def unreachable(ch: Char) = (ch: @switch) match {
+ ^
+two errors found
diff --git a/test/files/neg/caseinherit.flags b/test/files/neg/t5830.flags
similarity index 100%
copy from test/files/neg/caseinherit.flags
copy to test/files/neg/t5830.flags
diff --git a/test/files/neg/t5830.scala b/test/files/neg/t5830.scala
new file mode 100644
index 0000000..c2df3de
--- /dev/null
+++ b/test/files/neg/t5830.scala
@@ -0,0 +1,9 @@
+import scala.annotation.switch
+
+class Test {
+ def unreachable(ch: Char) = (ch: @switch) match {
+ case 'a' => println("b") // ok
+ case 'a' => println("b") // unreachable
+ case 'c' =>
+ }
+}
\ No newline at end of file
diff --git a/test/files/neg/t5839.check b/test/files/neg/t5839.check
new file mode 100644
index 0000000..d4b125b
--- /dev/null
+++ b/test/files/neg/t5839.check
@@ -0,0 +1,6 @@
+t5839.scala:5: error: type mismatch;
+ found : (x: String => String)Int <and> (x: Int)Int
+ required: Int => String
+ val x: String = goo(foo _)
+ ^
+one error found
diff --git a/test/files/neg/t5839.scala b/test/files/neg/t5839.scala
new file mode 100644
index 0000000..d3a5d4b
--- /dev/null
+++ b/test/files/neg/t5839.scala
@@ -0,0 +1,7 @@
+object Test {
+ def goo[T](x: Int => T): T = x(1)
+ implicit def f(x: Int): String = ""
+ def foo(x: Int): Int = x + 1
+ val x: String = goo(foo _)
+ def foo(x: String => String) = 1
+}
diff --git a/test/files/neg/t5845.check b/test/files/neg/t5845.check
new file mode 100644
index 0000000..8c6100d
--- /dev/null
+++ b/test/files/neg/t5845.check
@@ -0,0 +1,7 @@
+t5845.scala:9: error: value +++ is not a member of Int
+ println(5 +++ 5)
+ ^
+t5845.scala:15: error: value +++ is not a member of Int
+ println(5 +++ 5)
+ ^
+two errors found
diff --git a/test/files/neg/t5845.scala b/test/files/neg/t5845.scala
new file mode 100644
index 0000000..823c722
--- /dev/null
+++ b/test/files/neg/t5845.scala
@@ -0,0 +1,16 @@
+class Num[T] {
+ def mkOps = new Ops
+ class Ops { def +++(rhs: T) = () }
+}
+
+class A {
+ implicit def infixOps[T, CC[X] <: Num[X]](lhs: T)(implicit num: CC[T]) = num.mkOps
+ implicit val n1 = new Num[Int] { }
+ println(5 +++ 5)
+}
+
+class B {
+ implicit def infixOps[T, CC[X] <: Num[X]](lhs: T)(implicit num: CC[T]) : CC[T]#Ops = num.mkOps
+ implicit val n1 = new Num[Int] {}
+ println(5 +++ 5)
+}
diff --git a/test/files/neg/t585.check b/test/files/neg/t585.check
new file mode 100644
index 0000000..d332ac5
--- /dev/null
+++ b/test/files/neg/t585.check
@@ -0,0 +1,4 @@
+t585.scala:1: error: unclosed comment
+/*
+^
+one error found
diff --git a/test/files/neg/bug585.scala b/test/files/neg/t585.scala
similarity index 100%
rename from test/files/neg/bug585.scala
rename to test/files/neg/t585.scala
diff --git a/test/files/neg/t5856.check b/test/files/neg/t5856.check
new file mode 100644
index 0000000..08a61bd
--- /dev/null
+++ b/test/files/neg/t5856.check
@@ -0,0 +1,31 @@
+t5856.scala:10: error: invalid string interpolation: `$$', `$'ident or `$'BlockExpr expected
+ val s9 = s"$"
+ ^
+t5856.scala:10: error: unclosed string literal
+ val s9 = s"$"
+ ^
+t5856.scala:2: error: error in interpolated string: identifier or block expected
+ val s1 = s"$null"
+ ^
+t5856.scala:3: error: error in interpolated string: identifier or block expected
+ val s2 = s"$false"
+ ^
+t5856.scala:4: error: error in interpolated string: identifier or block expected
+ val s3 = s"$true"
+ ^
+t5856.scala:5: error: error in interpolated string: identifier or block expected
+ val s4 = s"$yield"
+ ^
+t5856.scala:6: error: error in interpolated string: identifier or block expected
+ val s5 = s"$return"
+ ^
+t5856.scala:7: error: error in interpolated string: identifier or block expected
+ val s6 = s"$new"
+ ^
+t5856.scala:8: error: error in interpolated string: identifier or block expected
+ val s7 = s"$s1 $null $super"
+ ^
+t5856.scala:9: error: error in interpolated string: identifier or block expected
+ val s8 = s"$super"
+ ^
+10 errors found
diff --git a/test/files/neg/t5856.scala b/test/files/neg/t5856.scala
new file mode 100644
index 0000000..2ceee59
--- /dev/null
+++ b/test/files/neg/t5856.scala
@@ -0,0 +1,11 @@
+object Test {
+ val s1 = s"$null"
+ val s2 = s"$false"
+ val s3 = s"$true"
+ val s4 = s"$yield"
+ val s5 = s"$return"
+ val s6 = s"$new"
+ val s7 = s"$s1 $null $super"
+ val s8 = s"$super"
+ val s9 = s"$"
+}
\ No newline at end of file
diff --git a/test/files/neg/t5878.check b/test/files/neg/t5878.check
new file mode 100644
index 0000000..c60c465
--- /dev/null
+++ b/test/files/neg/t5878.check
@@ -0,0 +1,13 @@
+t5878.scala:1: error: value class may not wrap another user-defined value class
+case class Foo(x: Bar) extends AnyVal
+ ^
+t5878.scala:2: error: value class may not wrap another user-defined value class
+case class Bar(x: Foo) extends AnyVal
+ ^
+t5878.scala:4: error: value class may not wrap another user-defined value class
+class Foo1(val x: Bar1) extends AnyVal
+ ^
+t5878.scala:5: error: value class may not wrap another user-defined value class
+class Bar1(val x: Foo1) extends AnyVal
+ ^
+four errors found
diff --git a/test/files/neg/t5878.scala b/test/files/neg/t5878.scala
new file mode 100644
index 0000000..b4e3362
--- /dev/null
+++ b/test/files/neg/t5878.scala
@@ -0,0 +1,6 @@
+case class Foo(x: Bar) extends AnyVal
+case class Bar(x: Foo) extends AnyVal
+
+class Foo1(val x: Bar1) extends AnyVal
+class Bar1(val x: Foo1) extends AnyVal
+
diff --git a/test/files/neg/t588.check b/test/files/neg/t588.check
new file mode 100644
index 0000000..f8b5516
--- /dev/null
+++ b/test/files/neg/t588.check
@@ -0,0 +1,13 @@
+t588.scala:3: error: double definition:
+method visit:(f: Int => String)Boolean and
+method visit:(f: Int => Unit)Boolean at line 2
+have same type after erasure: (f: Function1)Boolean
+ def visit(f: Int => String): Boolean
+ ^
+t588.scala:10: error: double definition:
+method f:(brac: Test.this.TypeB)Unit and
+method f:(node: Test.this.TypeA)Unit at line 9
+have same type after erasure: (brac: Test#TraitA)Unit
+ def f(brac : TypeB) : Unit;
+ ^
+two errors found
diff --git a/test/files/neg/bug588.scala b/test/files/neg/t588.scala
similarity index 100%
rename from test/files/neg/bug588.scala
rename to test/files/neg/t588.scala
diff --git a/test/files/neg/t5882.check b/test/files/neg/t5882.check
new file mode 100644
index 0000000..e0958e1
--- /dev/null
+++ b/test/files/neg/t5882.check
@@ -0,0 +1,9 @@
+t5882.scala:4: error: implementation restriction: nested class is not allowed in value class
+This restriction is planned to be removed in subsequent releases.
+ case class Scope()
+ ^
+t5882.scala:5: error: implementation restriction: nested object is not allowed in value class
+This restriction is planned to be removed in subsequent releases.
+ object Bar
+ ^
+two errors found
diff --git a/test/files/neg/t5882.scala b/test/files/neg/t5882.scala
new file mode 100644
index 0000000..3a55abd
--- /dev/null
+++ b/test/files/neg/t5882.scala
@@ -0,0 +1,6 @@
+// SIP-15 was changed to allow nested classes. See run/t5882.scala
+
+class NodeOps(val n: Any) extends AnyVal {
+ case class Scope()
+ object Bar
+}
diff --git a/test/files/neg/t5892.check b/test/files/neg/t5892.check
new file mode 100644
index 0000000..839bf9d
--- /dev/null
+++ b/test/files/neg/t5892.check
@@ -0,0 +1,17 @@
+t5892.scala:5: error: type mismatch;
+ found : Boolean(false)
+ required: String
+class C[@annot(false) X] {
+ ^
+t5892.scala:9: error: not found: value b2s
+class D[@annot(b2s(false)) X] {
+ ^
+t5892.scala:13: error: type mismatch;
+ found : Boolean(false)
+ required: String
+ at annot(false) class E {
+ ^
+t5892.scala:17: error: not found: value b2s
+ at annot(b2s(false)) class F {
+ ^
+four errors found
diff --git a/test/files/neg/t5892.scala b/test/files/neg/t5892.scala
new file mode 100644
index 0000000..5e3b2f3
--- /dev/null
+++ b/test/files/neg/t5892.scala
@@ -0,0 +1,25 @@
+import language.implicitConversions
+
+class annot(a: String) extends annotation.StaticAnnotation
+
+class C[@annot(false) X] {
+ implicit def b2s(b: Boolean): String = ""
+}
+
+class D[@annot(b2s(false)) X] {
+ implicit def b2s(b: Boolean): String = ""
+}
+
+ at annot(false) class E {
+ implicit def b2s(b: Boolean): String = ""
+}
+
+ at annot(b2s(false)) class F {
+ implicit def b2s(b: Boolean): String = ""
+}
+
+object T {
+ implicit def b2s(b: Boolean): String = ""
+ @annot(false) val x = 0
+ @annot(b2s(false)) val y = 0
+}
diff --git a/test/files/neg/t591.check b/test/files/neg/t591.check
new file mode 100644
index 0000000..d33f6d7
--- /dev/null
+++ b/test/files/neg/t591.check
@@ -0,0 +1,5 @@
+t591.scala:38: error: method input_= is defined twice
+ conflicting symbols both originated in file 't591.scala'
+ def input_=(in : Input) = {}
+ ^
+one error found
diff --git a/test/files/neg/bug591.scala b/test/files/neg/t591.scala
similarity index 100%
rename from test/files/neg/bug591.scala
rename to test/files/neg/t591.scala
diff --git a/test/files/neg/t593.check b/test/files/neg/t593.check
new file mode 100644
index 0000000..c1aeab8
--- /dev/null
+++ b/test/files/neg/t593.check
@@ -0,0 +1,4 @@
+t593.scala:1: error: traits or objects may not have parameters
+trait Wrapper[T](x : T) {
+ ^
+one error found
diff --git a/test/files/neg/bug593.scala b/test/files/neg/t593.scala
similarity index 100%
rename from test/files/neg/bug593.scala
rename to test/files/neg/t593.scala
diff --git a/test/files/neg/t5956.check b/test/files/neg/t5956.check
new file mode 100644
index 0000000..6641dac
--- /dev/null
+++ b/test/files/neg/t5956.check
@@ -0,0 +1,20 @@
+t5956.scala:1: warning: case classes without a parameter list have been deprecated;
+use either case objects or case classes with `()' as parameter list.
+object O { case class C[T]; class C }
+ ^
+t5956.scala:2: warning: case classes without a parameter list have been deprecated;
+use either case objects or case classes with `()' as parameter list.
+object T { case class C[T]; case class C }
+ ^
+t5956.scala:2: warning: case classes without a parameter list have been deprecated;
+use either case objects or case classes with `()' as parameter list.
+object T { case class C[T]; case class C }
+ ^
+t5956.scala:1: error: C is already defined as case class C
+object O { case class C[T]; class C }
+ ^
+t5956.scala:2: error: C is already defined as case class C
+object T { case class C[T]; case class C }
+ ^
+three warnings found
+two errors found
diff --git a/test/files/neg/t5956.flags b/test/files/neg/t5956.flags
new file mode 100644
index 0000000..dcc59eb
--- /dev/null
+++ b/test/files/neg/t5956.flags
@@ -0,0 +1 @@
+-deprecation
diff --git a/test/files/neg/t5956.scala b/test/files/neg/t5956.scala
new file mode 100644
index 0000000..d985fa9
--- /dev/null
+++ b/test/files/neg/t5956.scala
@@ -0,0 +1,2 @@
+object O { case class C[T]; class C }
+object T { case class C[T]; case class C }
diff --git a/test/files/neg/t5969.check b/test/files/neg/t5969.check
new file mode 100644
index 0000000..9d8ac9a
--- /dev/null
+++ b/test/files/neg/t5969.check
@@ -0,0 +1,7 @@
+t5969.scala:9: error: overloaded method value g with alternatives:
+ (x: C2)String <and>
+ (x: C1)String
+ cannot be applied to (String)
+ if (false) List(g(x)) else List[C1]() map g
+ ^
+one error found
diff --git a/test/files/neg/t5969.scala b/test/files/neg/t5969.scala
new file mode 100644
index 0000000..62f87fd
--- /dev/null
+++ b/test/files/neg/t5969.scala
@@ -0,0 +1,11 @@
+class C1
+class C2
+class A {
+ def f(x: Any) = x
+ def g(x: C1): String = "A"
+ def g(x: C2): String = "B"
+
+ def crash() = f(List[String]() flatMap { x =>
+ if (false) List(g(x)) else List[C1]() map g
+ })
+}
diff --git a/test/files/neg/t6011.check b/test/files/neg/t6011.check
new file mode 100644
index 0000000..5b5a861
--- /dev/null
+++ b/test/files/neg/t6011.check
@@ -0,0 +1,10 @@
+t6011.scala:4: error: unreachable code
+ case 'a' | 'c' => 1 // unreachable
+ ^
+t6011.scala:10: error: unreachable code
+ case 'b' | 'a' => 1 // unreachable
+ ^
+t6011.scala:8: error: could not emit switch for @switch annotated match
+ def f2(ch: Char): Any = (ch: @annotation.switch) match {
+ ^
+three errors found
diff --git a/test/files/neg/caseinherit.flags b/test/files/neg/t6011.flags
similarity index 100%
copy from test/files/neg/caseinherit.flags
copy to test/files/neg/t6011.flags
diff --git a/test/files/neg/t6011.scala b/test/files/neg/t6011.scala
new file mode 100644
index 0000000..a36cca7
--- /dev/null
+++ b/test/files/neg/t6011.scala
@@ -0,0 +1,23 @@
+object Test {
+ def f(ch: Char): Any = ch match {
+ case 'a' => 1
+ case 'a' | 'c' => 1 // unreachable
+ }
+
+ // won't be compiled to a switch since it has an unreachable (duplicate) case
+ def f2(ch: Char): Any = (ch: @annotation.switch) match {
+ case 'a' | 'b' => 1
+ case 'b' | 'a' => 1 // unreachable
+ case _ =>
+ }
+
+ // s'all good
+ def f3(ch: Char): Any = (ch: @annotation.switch) match {
+ case 'a' | 'b' if (true: Boolean) => 1
+ case 'b' | 'a' => 1 // ok
+ case _ => // need third case to check switch annotation (two-case switches are always okay to compile to if-then-else)
+ }
+
+
+ def main(args: Array[String]): Unit = f('a')
+}
\ No newline at end of file
diff --git a/test/files/neg/t6013.check b/test/files/neg/t6013.check
new file mode 100644
index 0000000..502da99
--- /dev/null
+++ b/test/files/neg/t6013.check
@@ -0,0 +1,7 @@
+DerivedScala.scala:4: error: class C needs to be abstract, since there is a deferred declaration of method foo in class B of type => Int which is not implemented in a subclass
+class C extends B
+ ^
+DerivedScala.scala:7: error: class DerivedScala needs to be abstract, since there is a deferred declaration of method foo in class Abstract of type ()Boolean which is not implemented in a subclass
+class DerivedScala extends Abstract
+ ^
+two errors found
diff --git a/test/files/neg/t6013/Abstract.java b/test/files/neg/t6013/Abstract.java
new file mode 100644
index 0000000..c0ef046
--- /dev/null
+++ b/test/files/neg/t6013/Abstract.java
@@ -0,0 +1,7 @@
+public abstract class Abstract extends Base {
+ // overrides Base#bar under the erasure model
+ public void bar(java.util.List<java.lang.Integer> foo) { return; }
+
+ // must force re-implementation in derived classes
+ public abstract boolean foo();
+}
diff --git a/test/files/neg/t6013/Base.java b/test/files/neg/t6013/Base.java
new file mode 100644
index 0000000..b73d7fd
--- /dev/null
+++ b/test/files/neg/t6013/Base.java
@@ -0,0 +1,10 @@
+abstract public class Base {
+ // This must considered to be overridden by Abstract#foo based
+ // on the erased signatures. This special case is handled by
+ // `javaErasedOverridingSym` in `RefChecks`.
+ public abstract void bar(java.util.List<java.lang.String> foo) { return; }
+
+ // But, a concrete method in a Java superclass must not excuse
+ // a deferred method in the Java subclass!
+ public boolean foo() { return true; }
+}
diff --git a/test/files/neg/t6013/DerivedScala.scala b/test/files/neg/t6013/DerivedScala.scala
new file mode 100644
index 0000000..fc0c55d
--- /dev/null
+++ b/test/files/neg/t6013/DerivedScala.scala
@@ -0,0 +1,7 @@
+// Scala extending Scala (this case was working fine before this bug.)
+class A { def foo: Int = 0 }
+abstract class B extends A { def foo: Int }
+class C extends B
+
+// Scala extending Java
+class DerivedScala extends Abstract
diff --git a/test/files/neg/t6040.check b/test/files/neg/t6040.check
new file mode 100644
index 0000000..16c90ed
--- /dev/null
+++ b/test/files/neg/t6040.check
@@ -0,0 +1,9 @@
+t6040.scala:1: error: extension of type scala.Dynamic needs to be enabled
+by making the implicit value scala.language.dynamics visible.
+This can be achieved by adding the import clause 'import scala.language.dynamics'
+or by setting the compiler option -language:dynamics.
+See the Scala docs for value scala.language.dynamics for a discussion
+why the feature needs to be explicitly enabled.
+class X extends Dynamic
+ ^
+one error found
diff --git a/test/files/neg/t6040.scala b/test/files/neg/t6040.scala
new file mode 100644
index 0000000..b8f7dab
--- /dev/null
+++ b/test/files/neg/t6040.scala
@@ -0,0 +1 @@
+class X extends Dynamic
\ No newline at end of file
diff --git a/test/files/neg/t6042.check b/test/files/neg/t6042.check
new file mode 100644
index 0000000..221f06e
--- /dev/null
+++ b/test/files/neg/t6042.check
@@ -0,0 +1,4 @@
+t6042.scala:7: error: illegal type selection from volatile type a.OpSemExp (with upper bound LazyExp[a.OpSemExp] with _$1)
+ def foo[AA <: LazyExp[_]](a: AA): a.OpSemExp#Val = ??? // a.OpSemExp is volatile, because of `with This`
+ ^
+one error found
diff --git a/test/files/neg/t6042.scala b/test/files/neg/t6042.scala
new file mode 100644
index 0000000..5a123d1
--- /dev/null
+++ b/test/files/neg/t6042.scala
@@ -0,0 +1,8 @@
+trait LazyExp[+This <: LazyExp[This]] { this: This =>
+ type OpSemExp <: LazyExp[OpSemExp] with This
+ type Val
+}
+
+object Test {
+ def foo[AA <: LazyExp[_]](a: AA): a.OpSemExp#Val = ??? // a.OpSemExp is volatile, because of `with This`
+}
diff --git a/test/files/neg/t6048.check b/test/files/neg/t6048.check
new file mode 100644
index 0000000..5bdf2ec
--- /dev/null
+++ b/test/files/neg/t6048.check
@@ -0,0 +1,13 @@
+t6048.scala:3: error: unreachable code
+ case _ if false => x // unreachable
+ ^
+t6048.scala:8: error: unreachable code
+ case _ if false => x // unreachable
+ ^
+t6048.scala:13: error: patterns after a variable pattern cannot match (SLS 8.1.1)
+ case _ => x
+ ^
+t6048.scala:14: error: unreachable code due to variable pattern on line 13
+ case 5 if true => x // unreachable
+ ^
+four errors found
diff --git a/test/files/neg/caseinherit.flags b/test/files/neg/t6048.flags
similarity index 100%
copy from test/files/neg/caseinherit.flags
copy to test/files/neg/t6048.flags
diff --git a/test/files/neg/t6048.scala b/test/files/neg/t6048.scala
new file mode 100644
index 0000000..803e651
--- /dev/null
+++ b/test/files/neg/t6048.scala
@@ -0,0 +1,22 @@
+class A {
+ def f1(x: Int) = x match {
+ case _ if false => x // unreachable
+ case 5 => x
+ }
+
+ def f2(x: Int) = x match {
+ case _ if false => x // unreachable
+ case 5 if true => x
+ }
+
+ def f3(x: Int) = x match {
+ case _ => x
+ case 5 if true => x // unreachable
+ }
+
+ def test1(x: Int) = x match {
+ case c if c < 0 => 0
+ case 1 => 1
+ case _ => 2
+ }
+}
diff --git a/test/files/neg/t6074.check b/test/files/neg/t6074.check
new file mode 100644
index 0000000..38670e5
--- /dev/null
+++ b/test/files/neg/t6074.check
@@ -0,0 +1,4 @@
+t6074.scala:5: error: constructor A in class A cannot be accessed in object T
+ def t = new A()
+ ^
+one error found
diff --git a/test/files/neg/t6074.scala b/test/files/neg/t6074.scala
new file mode 100644
index 0000000..8c14f00
--- /dev/null
+++ b/test/files/neg/t6074.scala
@@ -0,0 +1,6 @@
+class A private () { }
+class B { }
+object T {
+ implicit def a2b(a: A): B = null
+ def t = new A()
+}
diff --git a/test/files/neg/t608.check b/test/files/neg/t608.check
new file mode 100644
index 0000000..5c7f49d
--- /dev/null
+++ b/test/files/neg/t608.check
@@ -0,0 +1,6 @@
+t608.scala:16: error: type mismatch;
+ found : hs{type a = ha}
+ required: hs{type s = hs; type a = ha}
+ = g(f(x).bimap(id))
+ ^
+one error found
diff --git a/test/files/neg/bug608.scala b/test/files/neg/t608.scala
similarity index 100%
rename from test/files/neg/bug608.scala
rename to test/files/neg/t608.scala
diff --git a/test/files/neg/t6082.check b/test/files/neg/t6082.check
new file mode 100644
index 0000000..b68de5c
--- /dev/null
+++ b/test/files/neg/t6082.check
@@ -0,0 +1,13 @@
+t6082.scala:1: warning: Implementation restriction: subclassing Classfile does not
+make your annotation visible at runtime. If that is what
+you want, you must write the annotation class in Java.
+class annot(notValue: String) extends annotation.ClassfileAnnotation
+ ^
+t6082.scala:2: error: classfile annotation arguments have to be supplied as named arguments
+ at annot("") class C
+ ^
+t6082.scala:2: error: annotation annot is missing argument notValue
+ at annot("") class C
+ ^
+one warning found
+two errors found
diff --git a/test/files/neg/t6082.scala b/test/files/neg/t6082.scala
new file mode 100644
index 0000000..30de91a
--- /dev/null
+++ b/test/files/neg/t6082.scala
@@ -0,0 +1,2 @@
+class annot(notValue: String) extends annotation.ClassfileAnnotation
+ at annot("") class C
\ No newline at end of file
diff --git a/test/files/neg/t6138.check b/test/files/neg/t6138.check
new file mode 100644
index 0000000..8fd9978
--- /dev/null
+++ b/test/files/neg/t6138.check
@@ -0,0 +1,7 @@
+t6138.scala:4: error: ambiguous reference to overloaded definition,
+both method getClass in object definitions of type (s: Int)Any
+and method getClass in object definitions of type (s: String)Any
+match argument types (Nothing)
+ getClass(???): String
+ ^
+one error found
diff --git a/test/files/neg/t6138.scala b/test/files/neg/t6138.scala
new file mode 100644
index 0000000..2f45a46
--- /dev/null
+++ b/test/files/neg/t6138.scala
@@ -0,0 +1,5 @@
+object definitions {
+ def getClass(s: String): Any = ???
+ def getClass(s: Int): Any = ???
+ getClass(???): String
+}
diff --git a/test/files/neg/t6162-inheritance.check b/test/files/neg/t6162-inheritance.check
new file mode 100644
index 0000000..a7d3cc3
--- /dev/null
+++ b/test/files/neg/t6162-inheritance.check
@@ -0,0 +1,10 @@
+t6162-inheritance.scala:6: error: inheritance from class Foo in package t6126 is deprecated: `Foo` will be made final in a future version.
+class SubFoo extends Foo
+ ^
+t6162-inheritance.scala:11: error: inheritance from trait T in package t6126 is deprecated
+object SubT extends T
+ ^
+t6162-inheritance.scala:17: error: inheritance from trait S in package t6126 is deprecated
+ new S {
+ ^
+three errors found
diff --git a/test/files/neg/t6162-inheritance.flags b/test/files/neg/t6162-inheritance.flags
new file mode 100644
index 0000000..65faf53
--- /dev/null
+++ b/test/files/neg/t6162-inheritance.flags
@@ -0,0 +1 @@
+-Xfatal-warnings -deprecation
\ No newline at end of file
diff --git a/test/files/neg/t6162-inheritance.scala b/test/files/neg/t6162-inheritance.scala
new file mode 100644
index 0000000..7b47b92
--- /dev/null
+++ b/test/files/neg/t6162-inheritance.scala
@@ -0,0 +1,19 @@
+package scala.t6126
+
+ at deprecatedInheritance("`Foo` will be made final in a future version.", "2.10.0")
+class Foo
+
+class SubFoo extends Foo
+
+ at deprecatedInheritance()
+trait T
+
+object SubT extends T
+
+ at deprecatedInheritance()
+trait S
+
+object O {
+ new S {
+ }
+}
diff --git a/test/files/neg/t6162-overriding.check b/test/files/neg/t6162-overriding.check
new file mode 100644
index 0000000..e774888
--- /dev/null
+++ b/test/files/neg/t6162-overriding.check
@@ -0,0 +1,7 @@
+t6162-overriding.scala:14: error: overriding method bar in class Bar is deprecated: `bar` will be made private in a future version.
+ override def bar = 43
+ ^
+t6162-overriding.scala:15: error: overriding method baz in class Bar is deprecated
+ override def baz = 43
+ ^
+two errors found
diff --git a/test/files/neg/t6162-overriding.flags b/test/files/neg/t6162-overriding.flags
new file mode 100644
index 0000000..65faf53
--- /dev/null
+++ b/test/files/neg/t6162-overriding.flags
@@ -0,0 +1 @@
+-Xfatal-warnings -deprecation
\ No newline at end of file
diff --git a/test/files/neg/t6162-overriding.scala b/test/files/neg/t6162-overriding.scala
new file mode 100644
index 0000000..4cab0c2
--- /dev/null
+++ b/test/files/neg/t6162-overriding.scala
@@ -0,0 +1,17 @@
+package scala.t6162
+
+class Bar {
+ @deprecatedOverriding("`bar` will be made private in a future version.", "2.10.0")
+ def bar = 42
+
+ @deprecatedOverriding()
+ def baz = 42
+
+ def baz(a: Any) = 0
+}
+
+class SubBar extends Bar {
+ override def bar = 43
+ override def baz = 43
+ override def baz(a: Any) = 43 // okay
+}
diff --git a/test/files/neg/t6214.check b/test/files/neg/t6214.check
new file mode 100644
index 0000000..6349a3e
--- /dev/null
+++ b/test/files/neg/t6214.check
@@ -0,0 +1,4 @@
+t6214.scala:5: error: missing parameter type
+ m { s => case class Foo() }
+ ^
+one error found
diff --git a/test/files/neg/t6214.scala b/test/files/neg/t6214.scala
new file mode 100644
index 0000000..734acda
--- /dev/null
+++ b/test/files/neg/t6214.scala
@@ -0,0 +1,7 @@
+object Test {
+ def m(f: String => Unit) = 0
+ def m(f: Int => Unit) = 0
+ def foo {
+ m { s => case class Foo() }
+ }
+}
diff --git a/test/files/neg/t6227.check b/test/files/neg/t6227.check
new file mode 100644
index 0000000..5e3c636
--- /dev/null
+++ b/test/files/neg/t6227.check
@@ -0,0 +1,4 @@
+t6227.scala:2: error: illegal combination of modifiers: implicit and case for: class IntOps
+ implicit case class IntOps( i: Int ) {
+ ^
+one error found
diff --git a/test/files/neg/t6227.scala b/test/files/neg/t6227.scala
new file mode 100644
index 0000000..4641683
--- /dev/null
+++ b/test/files/neg/t6227.scala
@@ -0,0 +1,6 @@
+object Test {
+ implicit case class IntOps( i: Int ) {
+ def twice = i * 2
+ }
+}
+
diff --git a/test/files/neg/t6231.check b/test/files/neg/t6231.check
new file mode 100644
index 0000000..b27961d
--- /dev/null
+++ b/test/files/neg/t6231.check
@@ -0,0 +1,6 @@
+t6231.scala:4: error: Implementation restriction: local trait Bug$X$1 is unable to automatically capture the
+free variable value ev$1 on behalf of anonymous class anonfun$qux$1. You can manually assign it to a val inside the trait,
+and refer that that val in anonymous class anonfun$qux$1. For more details, see SI-6231.
+ def qux = { () => ev }
+ ^
+one error found
diff --git a/test/files/neg/t6231.scala b/test/files/neg/t6231.scala
new file mode 100644
index 0000000..1e5b4e0
--- /dev/null
+++ b/test/files/neg/t6231.scala
@@ -0,0 +1,15 @@
+object Bug {
+ def bar(ev: Any) = {
+ trait X {
+ def qux = { () => ev }
+ }
+ new X {}.qux()
+
+ // workaround
+ trait Y {
+ val ev2 = ev // manually capture `ev` so that `ev2` is added to the trait interface.
+ def qux = { () => ev2 }
+ }
+ }
+}
+
diff --git a/test/files/neg/t6258.check b/test/files/neg/t6258.check
new file mode 100644
index 0000000..73363d8
--- /dev/null
+++ b/test/files/neg/t6258.check
@@ -0,0 +1,16 @@
+t6258.scala:2: error: missing parameter type for expanded function
+The argument types of an anonymous function must be fully known. (SLS 8.5)
+Expected type was: PartialFunction[?, Int]
+ val f : PartialFunction[_, Int] = { case a : Int => a } // undefined param
+ ^
+t6258.scala:5: error: missing parameter type for expanded function
+The argument types of an anonymous function must be fully known. (SLS 8.5)
+Expected type was: PartialFunction[?,Int]
+ foo { case a : Int => a } // undefined param
+ ^
+t6258.scala:22: error: missing parameter type for expanded function
+The argument types of an anonymous function must be fully known. (SLS 8.5)
+Expected type was: PartialFunction[?,Any]
+ bar[M[Any]] (foo { // undefined param
+ ^
+three errors found
diff --git a/test/files/neg/t6258.scala b/test/files/neg/t6258.scala
new file mode 100644
index 0000000..5046a47
--- /dev/null
+++ b/test/files/neg/t6258.scala
@@ -0,0 +1,25 @@
+object Test {
+ val f : PartialFunction[_, Int] = { case a : Int => a } // undefined param
+
+ def foo[A](pf: PartialFunction[A, Int]) {};
+ foo { case a : Int => a } // undefined param
+
+ val g : PartialFunction[Int, _] = { case a : Int => a } // okay
+}
+
+
+// Another variation, seen in the wild with Specs2.
+class X {
+ trait Matcher[-T]
+
+ def bar[T](m: Matcher[T]) = null
+ def bar[T](i: Int) = null
+
+ def foo[T](p: PartialFunction[T, Any]): Matcher[T] = null
+
+ case class M[X](a: X)
+
+ bar[M[Any]] (foo { // undefined param
+ case M(_) => null
+ })
+}
diff --git a/test/files/neg/t6260.check b/test/files/neg/t6260.check
new file mode 100644
index 0000000..46e9bd1
--- /dev/null
+++ b/test/files/neg/t6260.check
@@ -0,0 +1,13 @@
+t6260.scala:3: error: bridge generated for member method apply: (bx: Box[X])Box[Y] in anonymous class $anonfun
+which overrides method apply: (v1: T1)R in trait Function1
+clashes with definition of the member itself;
+both have erased type (v1: Object)Object
+ ((bx: Box[X]) => new Box(f(bx.x)))(this)
+ ^
+t6260.scala:8: error: bridge generated for member method apply: (bx: Box[X])Box[Y] in anonymous class $anonfun
+which overrides method apply: (v1: T1)R in trait Function1
+clashes with definition of the member itself;
+both have erased type (v1: Object)Object
+ ((bx: Box[X]) => new Box(f(bx.x)))(self)
+ ^
+two errors found
diff --git a/test/files/neg/t6260.scala b/test/files/neg/t6260.scala
new file mode 100644
index 0000000..93b5448
--- /dev/null
+++ b/test/files/neg/t6260.scala
@@ -0,0 +1,17 @@
+class Box[X](val x: X) extends AnyVal {
+ def map[Y](f: X => Y): Box[Y] =
+ ((bx: Box[X]) => new Box(f(bx.x)))(this)
+}
+
+object Test {
+ def map2[X, Y](self: Box[X], f: X => Y): Box[Y] =
+ ((bx: Box[X]) => new Box(f(bx.x)))(self)
+
+ def main(args: Array[String]) {
+ val f = (x: Int) => x + 1
+ val g = (x: String) => x + x
+
+ map2(new Box(42), f)
+ new Box("abc") map g
+ }
+}
diff --git a/test/files/neg/t6263.check b/test/files/neg/t6263.check
new file mode 100644
index 0000000..9e9c7c6
--- /dev/null
+++ b/test/files/neg/t6263.check
@@ -0,0 +1,9 @@
+t6263.scala:5: error: type mismatch;
+ found : A.this.c.type (with underlying type C)
+ required: AnyRef
+Note that C extends Any, not AnyRef.
+Such types can participate in value classes, but instances
+cannot appear in singleton types or in reference comparisons.
+ type t = c.type
+ ^
+one error found
diff --git a/test/files/neg/t6263.scala b/test/files/neg/t6263.scala
new file mode 100644
index 0000000..6575185
--- /dev/null
+++ b/test/files/neg/t6263.scala
@@ -0,0 +1,6 @@
+class C(val a: Any) extends AnyVal
+class A {
+ implicit def c2AnyRef(c: C): AnyRef = new {}
+ val c = new C(0)
+ type t = c.type
+}
diff --git a/test/files/neg/t6264.check b/test/files/neg/t6264.check
new file mode 100644
index 0000000..438be4c
--- /dev/null
+++ b/test/files/neg/t6264.check
@@ -0,0 +1,4 @@
+t6264.scala:3: error: non-variable type argument Tuple1[_] in type Tuple2[_, Tuple1[_]] is unchecked since it is eliminated by erasure
+ x.isInstanceOf[Tuple2[_, Tuple1[_]]]
+ ^
+one error found
diff --git a/test/files/neg/anyval-sealed.flags b/test/files/neg/t6264.flags
similarity index 100%
copy from test/files/neg/anyval-sealed.flags
copy to test/files/neg/t6264.flags
diff --git a/test/files/neg/t6264.scala b/test/files/neg/t6264.scala
new file mode 100644
index 0000000..dc3b727
--- /dev/null
+++ b/test/files/neg/t6264.scala
@@ -0,0 +1,6 @@
+class Foo {
+ def foo(x: AnyRef): Unit = {
+ x.isInstanceOf[Tuple2[_, Tuple1[_]]]
+ ()
+ }
+}
diff --git a/test/files/neg/t6276.check b/test/files/neg/t6276.check
new file mode 100644
index 0000000..0b3dfa5
--- /dev/null
+++ b/test/files/neg/t6276.check
@@ -0,0 +1,19 @@
+t6276.scala:4: error: method a in class C does nothing other than call itself recursively
+ def a: Any = a // warn
+ ^
+t6276.scala:5: error: value b in class C does nothing other than call itself recursively
+ val b: Any = b // warn
+ ^
+t6276.scala:7: error: method c in class C does nothing other than call itself recursively
+ def c: Any = this.c // warn
+ ^
+t6276.scala:8: error: method d in class C does nothing other than call itself recursively
+ def d: Any = C.this.d // warn
+ ^
+t6276.scala:13: error: method a does nothing other than call itself recursively
+ def a: Any = a // warn
+ ^
+t6276.scala:22: error: method a does nothing other than call itself recursively
+ def a = a // warn
+ ^
+6 errors found
diff --git a/test/files/neg/anyval-sealed.flags b/test/files/neg/t6276.flags
similarity index 100%
copy from test/files/neg/anyval-sealed.flags
copy to test/files/neg/t6276.flags
diff --git a/test/files/neg/t6276.scala b/test/files/neg/t6276.scala
new file mode 100644
index 0000000..bd0a473
--- /dev/null
+++ b/test/files/neg/t6276.scala
@@ -0,0 +1,44 @@
+object Test {
+ def foo(a: Int, b: Int, c: Int) {
+ class C {
+ def a: Any = a // warn
+ val b: Any = b // warn
+
+ def c: Any = this.c // warn
+ def d: Any = C.this.d // warn
+ }
+
+ def method {
+ // method local
+ def a: Any = a // warn
+ }
+
+ trait T {
+ def a: Any
+ }
+
+ new T {
+ // inherited return type
+ def a = a // warn
+ }
+
+ // no warnings below
+ new {
+ def a: Any = {println(""); a}
+ val b: Any = {println(""); b}
+ def c(i: Int): Any = c(i - 0)
+ }
+
+ class D {
+ def other: D = null
+ def foo: Any = other.foo
+ }
+
+ class E {
+ def foo: Any = 0
+ class D extends E {
+ override def foo: Any = E.this.foo
+ }
+ }
+ }
+}
diff --git a/test/files/neg/t6283.check b/test/files/neg/t6283.check
new file mode 100644
index 0000000..69e417e
--- /dev/null
+++ b/test/files/neg/t6283.check
@@ -0,0 +1,4 @@
+t6283.scala:1: error: `abstract' modifier cannot be used with value classes
+abstract class Funky(val i: Int) extends AnyVal
+ ^
+one error found
diff --git a/test/files/neg/t6283.scala b/test/files/neg/t6283.scala
new file mode 100644
index 0000000..d41eb18
--- /dev/null
+++ b/test/files/neg/t6283.scala
@@ -0,0 +1 @@
+abstract class Funky(val i: Int) extends AnyVal
diff --git a/test/files/neg/t630.check b/test/files/neg/t630.check
new file mode 100644
index 0000000..0814ef0
--- /dev/null
+++ b/test/files/neg/t630.check
@@ -0,0 +1,5 @@
+t630.scala:20: error: overriding value foo in trait Bar of type Req2;
+ object foo has incompatible type
+ object foo extends Req1
+ ^
+one error found
diff --git a/test/files/neg/bug630.scala b/test/files/neg/t630.scala
similarity index 100%
rename from test/files/neg/bug630.scala
rename to test/files/neg/t630.scala
diff --git a/test/files/neg/t631.check b/test/files/neg/t631.check
new file mode 100644
index 0000000..3759565
--- /dev/null
+++ b/test/files/neg/t631.check
@@ -0,0 +1,4 @@
+t631.scala:1: error: `implicit' modifier cannot be used for top-level objects
+implicit object Test {
+ ^
+one error found
diff --git a/test/files/neg/bug631.scala b/test/files/neg/t631.scala
similarity index 100%
rename from test/files/neg/bug631.scala
rename to test/files/neg/t631.scala
diff --git a/test/files/neg/t6323a.check b/test/files/neg/t6323a.check
new file mode 100644
index 0000000..4d682e5
--- /dev/null
+++ b/test/files/neg/t6323a.check
@@ -0,0 +1,15 @@
+t6323a.scala:10: materializing requested scala.reflect.type.ClassTag[Test] using `package`.this.materializeClassTag[Test]()
+ val lookAtMe = m.reflect(Test("a",List(5)))
+ ^
+t6323a.scala:11: materializing requested reflect.runtime.universe.type.TypeTag[Test] using `package`.this.materializeTypeTag[Test](scala.reflect.runtime.`package`.universe)
+ val value = u.typeOf[Test]
+ ^
+t6323a.scala:11: `package`.this.materializeTypeTag[Test](scala.reflect.runtime.`package`.universe) is not a valid implicit value for reflect.runtime.universe.TypeTag[Test] because:
+failed to typecheck the materialized tag:
+cannot create a TypeTag referring to local class Test.Test: use WeakTypeTag instead
+ val value = u.typeOf[Test]
+ ^
+t6323a.scala:11: error: No TypeTag available for Test
+ val value = u.typeOf[Test]
+ ^
+one error found
diff --git a/test/files/neg/t6323a.flags b/test/files/neg/t6323a.flags
new file mode 100644
index 0000000..4c6cdb7
--- /dev/null
+++ b/test/files/neg/t6323a.flags
@@ -0,0 +1 @@
+-Xlog-implicits
\ No newline at end of file
diff --git a/test/files/neg/t6323a.scala b/test/files/neg/t6323a.scala
new file mode 100644
index 0000000..a203167
--- /dev/null
+++ b/test/files/neg/t6323a.scala
@@ -0,0 +1,21 @@
+import scala.reflect.runtime.universe._
+import scala.reflect.runtime.{currentMirror => m}
+import scala.reflect.runtime.{universe => u}
+
+object Test extends App {
+ locally {
+ try {
+ case class Test(a:String,b:List[Int])
+
+ val lookAtMe = m.reflect(Test("a",List(5)))
+ val value = u.typeOf[Test]
+ val members = value.members
+ val member = value.members.filter(_.name.encoded == "a")
+ val aAccessor = lookAtMe.reflectMethod(member.head.asMethod)
+ val thisShouldBeA = aAccessor.apply()
+ println(thisShouldBeA)
+ } catch {
+ case ScalaReflectionException(msg) => println(msg)
+ }
+ }
+}
\ No newline at end of file
diff --git a/test/files/neg/t633.check b/test/files/neg/t633.check
new file mode 100644
index 0000000..d69d3be
--- /dev/null
+++ b/test/files/neg/t633.check
@@ -0,0 +1,4 @@
+t633.scala:3: error: not found: type ListBuffer
+ def t(a : ListBuffer[String]) = {
+ ^
+one error found
diff --git a/test/files/neg/bug633.scala b/test/files/neg/t633.scala
similarity index 100%
rename from test/files/neg/bug633.scala
rename to test/files/neg/t633.scala
diff --git a/test/files/neg/t6335.check b/test/files/neg/t6335.check
new file mode 100644
index 0000000..1727a05
--- /dev/null
+++ b/test/files/neg/t6335.check
@@ -0,0 +1,9 @@
+t6335.scala:6: error: method Z is defined twice
+ conflicting symbols both originated in file 't6335.scala'
+ implicit class Z[A](val i: A) { def zz = i }
+ ^
+t6335.scala:3: error: method X is defined twice
+ conflicting symbols both originated in file 't6335.scala'
+ implicit class X(val x: Int) { def xx = x }
+ ^
+two errors found
diff --git a/test/files/neg/t6335.scala b/test/files/neg/t6335.scala
new file mode 100644
index 0000000..5c41e81
--- /dev/null
+++ b/test/files/neg/t6335.scala
@@ -0,0 +1,7 @@
+object ImplicitClass {
+ def X(i: Int) {}
+ implicit class X(val x: Int) { def xx = x }
+
+ def Z[A](i: A) {}
+ implicit class Z[A](val i: A) { def zz = i }
+}
\ No newline at end of file
diff --git a/test/files/neg/t6336.check b/test/files/neg/t6336.check
new file mode 100644
index 0000000..f70a5f7
--- /dev/null
+++ b/test/files/neg/t6336.check
@@ -0,0 +1,7 @@
+t6336.scala:3: error: Parameter type in structural refinement may not refer to a user-defined value class
+ val a = new { def y[T](x: X[T]) = x.i }
+ ^
+t6336.scala:4: error: Result type in structural refinement may not refer to a user-defined value class
+ val b = new { def y[T](x: T): X[T] = new X(2) }
+ ^
+two errors found
diff --git a/test/files/neg/t6336.scala b/test/files/neg/t6336.scala
new file mode 100644
index 0000000..b1d61f4
--- /dev/null
+++ b/test/files/neg/t6336.scala
@@ -0,0 +1,12 @@
+object D {
+ def main(args: Array[String]) {
+ val a = new { def y[T](x: X[T]) = x.i }
+ val b = new { def y[T](x: T): X[T] = new X(2) }
+ val x = new X(3)
+ val t = a.y(x)
+ println(t)
+ }
+}
+
+class X[T](val i: Int) extends AnyVal
+
diff --git a/test/files/neg/t6337.check b/test/files/neg/t6337.check
new file mode 100644
index 0000000..8448f71
--- /dev/null
+++ b/test/files/neg/t6337.check
@@ -0,0 +1,7 @@
+t6337.scala:10: error: value class may not wrap another user-defined value class
+class X[T](val i: XX[T]) extends AnyVal
+ ^
+t6337.scala:20: error: value class may not wrap another user-defined value class
+class X1[T](val i: XX1[T]) extends AnyVal
+ ^
+two errors found
diff --git a/test/files/neg/t6337.scala b/test/files/neg/t6337.scala
new file mode 100644
index 0000000..c3858f8
--- /dev/null
+++ b/test/files/neg/t6337.scala
@@ -0,0 +1,21 @@
+object C {
+
+ def main(args: Array[String]) = {
+ val x = new X(new XX(3))
+ println(x.i.x + 9)
+ }
+
+}
+
+class X[T](val i: XX[T]) extends AnyVal
+class XX[T](val x: T) extends AnyVal
+
+object C1 {
+ def main(args: Array[String]) {
+ val x = new X1(new XX1(Some(3)))
+ println(x.i.x.get + 9)
+ }
+}
+
+class X1[T](val i: XX1[T]) extends AnyVal
+class XX1[T](val x: Option[T]) extends AnyVal
diff --git a/test/files/neg/t6340.check b/test/files/neg/t6340.check
new file mode 100644
index 0000000..f18b8c3
--- /dev/null
+++ b/test/files/neg/t6340.check
@@ -0,0 +1,10 @@
+t6340.scala:11: error: value D is not a member of object Foo
+ import Foo.{ A, B, C, D, E, X, Y, Z }
+ ^
+t6340.scala:16: error: not found: type D
+ val d = new D
+ ^
+t6340.scala:17: error: not found: type W
+ val w = new W
+ ^
+three errors found
diff --git a/test/files/neg/t6340.scala b/test/files/neg/t6340.scala
new file mode 100644
index 0000000..8934d5c
--- /dev/null
+++ b/test/files/neg/t6340.scala
@@ -0,0 +1,21 @@
+object Foo {
+ class A
+ class B
+ class C
+ class X
+ class Y
+ class Z
+}
+
+object Test {
+ import Foo.{ A, B, C, D, E, X, Y, Z }
+
+ val a = new A
+ val b = new B
+ val c = new C
+ val d = new D
+ val w = new W
+ val x = new X
+ val y = new Y
+ val z = new Z
+}
diff --git a/test/files/neg/t6357.check b/test/files/neg/t6357.check
new file mode 100644
index 0000000..a534d14
--- /dev/null
+++ b/test/files/neg/t6357.check
@@ -0,0 +1,4 @@
+t6357.scala:3: error: value class may not be a local class
+ final class Y(val j: Int) extends AnyVal
+ ^
+one error found
diff --git a/test/files/neg/t6357.scala b/test/files/neg/t6357.scala
new file mode 100644
index 0000000..47f5629
--- /dev/null
+++ b/test/files/neg/t6357.scala
@@ -0,0 +1,6 @@
+object K {
+ def q = {
+ final class Y(val j: Int) extends AnyVal
+ 3
+ }
+}
diff --git a/test/files/neg/t6359.check b/test/files/neg/t6359.check
new file mode 100644
index 0000000..5bcdc57
--- /dev/null
+++ b/test/files/neg/t6359.check
@@ -0,0 +1,9 @@
+t6359.scala:3: error: implementation restriction: nested object is not allowed in value class
+This restriction is planned to be removed in subsequent releases.
+ object X
+ ^
+t6359.scala:4: error: implementation restriction: nested class is not allowed in value class
+This restriction is planned to be removed in subsequent releases.
+ class Y
+ ^
+two errors found
diff --git a/test/files/neg/t6359.scala b/test/files/neg/t6359.scala
new file mode 100644
index 0000000..96550fd
--- /dev/null
+++ b/test/files/neg/t6359.scala
@@ -0,0 +1,8 @@
+class M(val t: Int) extends AnyVal {
+ def lazyString = {
+ object X
+ class Y
+
+ () => {X; new Y}
+ }
+}
diff --git a/test/files/neg/t6385.check b/test/files/neg/t6385.check
new file mode 100644
index 0000000..93e51e8
--- /dev/null
+++ b/test/files/neg/t6385.check
@@ -0,0 +1,7 @@
+t6385.scala:12: error: bridge generated for member method x: ()C[T] in class C
+which overrides method x: ()C[T] in trait AA
+clashes with definition of the member itself;
+both have erased type ()Object
+ def x = this
+ ^
+one error found
diff --git a/test/files/neg/t6385.scala b/test/files/neg/t6385.scala
new file mode 100644
index 0000000..cec58ee
--- /dev/null
+++ b/test/files/neg/t6385.scala
@@ -0,0 +1,13 @@
+object N {
+ def main(args: Array[String]) {
+ val y: AA[Int] = C(2)
+ val c: Int = y.x.y
+ println(c)
+ }
+}
+trait AA[T] extends Any {
+ def x: C[T]
+}
+case class C[T](val y: T) extends AnyVal with AA[T] {
+ def x = this
+}
diff --git a/test/files/neg/t639.check b/test/files/neg/t639.check
new file mode 100644
index 0000000..6d41d87
--- /dev/null
+++ b/test/files/neg/t639.check
@@ -0,0 +1,7 @@
+t639.scala:3: error: not found: object a
+import a._
+ ^
+t639.scala:5: error: not found: type B
+ at B
+ ^
+two errors found
diff --git a/test/files/neg/bug639.scala b/test/files/neg/t639.scala
similarity index 100%
rename from test/files/neg/bug639.scala
rename to test/files/neg/t639.scala
diff --git a/test/files/neg/t6436.check b/test/files/neg/t6436.check
new file mode 100644
index 0000000..5cee6fb
--- /dev/null
+++ b/test/files/neg/t6436.check
@@ -0,0 +1,10 @@
+t6436.scala:8: error: type mismatch;
+ found : StringContext
+ required: ?{def q: ?}
+Note that implicit conversions are not applicable because they are ambiguous:
+ both method foo1 in object quasiquotes of type (ctx: StringContext)AnyRef{def q: Nothing}
+ and method foo2 in object quasiquotes of type (ctx: StringContext)AnyRef{def q: Nothing}
+ are possible conversion functions from StringContext to ?{def q: ?}
+ println(q"a")
+ ^
+one error found
diff --git a/test/files/neg/t6436.scala b/test/files/neg/t6436.scala
new file mode 100644
index 0000000..2c40502
--- /dev/null
+++ b/test/files/neg/t6436.scala
@@ -0,0 +1,9 @@
+object quasiquotes {
+ implicit def foo1(ctx: StringContext) = new { def q = ??? }
+ implicit def foo2(ctx: StringContext) = new { def q = ??? }
+}
+
+object Test extends App {
+ import quasiquotes._
+ println(q"a")
+}
diff --git a/test/files/neg/t6436b.check b/test/files/neg/t6436b.check
new file mode 100644
index 0000000..21ab972
--- /dev/null
+++ b/test/files/neg/t6436b.check
@@ -0,0 +1,10 @@
+t6436b.scala:8: error: type mismatch;
+ found : StringContext
+ required: ?{def q: ?}
+Note that implicit conversions are not applicable because they are ambiguous:
+ both method foo1 in object quasiquotes of type (ctx: StringContext)AnyRef{def q: Nothing}
+ and method foo2 in object quasiquotes of type (ctx: StringContext)AnyRef{def q: Nothing}
+ are possible conversion functions from StringContext to ?{def q: ?}
+ println(StringContext("a").q())
+ ^
+one error found
diff --git a/test/files/neg/t6436b.scala b/test/files/neg/t6436b.scala
new file mode 100644
index 0000000..8023329
--- /dev/null
+++ b/test/files/neg/t6436b.scala
@@ -0,0 +1,9 @@
+object quasiquotes {
+ implicit def foo1(ctx: StringContext) = new { def q = ??? }
+ implicit def foo2(ctx: StringContext) = new { def q = ??? }
+}
+
+object Test extends App {
+ import quasiquotes._
+ println(StringContext("a").q())
+}
diff --git a/test/files/neg/t6443c.check b/test/files/neg/t6443c.check
new file mode 100644
index 0000000..7cf8d23
--- /dev/null
+++ b/test/files/neg/t6443c.check
@@ -0,0 +1,7 @@
+t6443c.scala:16: error: double definition:
+method foo:(d: B.D)(a: Any)(d2: d.type)Unit and
+method foo:(d: B.D)(a: Any, d2: d.type)Unit at line 11
+have same type after erasure: (d: B.D, a: Object, d2: B.D)Unit
+ def foo(d: D)(a: Any)(d2: d.type): Unit = ()
+ ^
+one error found
diff --git a/test/files/neg/t6443c.scala b/test/files/neg/t6443c.scala
new file mode 100644
index 0000000..817224e
--- /dev/null
+++ b/test/files/neg/t6443c.scala
@@ -0,0 +1,21 @@
+trait A {
+ type D >: Null <: C
+ def foo(d: D)(a: Any, d2: d.type): Unit
+ trait C {
+ def bar: Unit = foo(null)(null, null)
+ }
+}
+object B extends A {
+ class D extends C
+
+ def foo(d: D)(a: Any, d2: d.type): Unit = () // Bridge method required here!
+
+ // No bridge method should be added, but we'll be happy enough if
+ // the "same type after erasure" error kicks in before the duplicated
+ // bridge causes a problem.
+ def foo(d: D)(a: Any)(d2: d.type): Unit = ()
+}
+
+object Test extends App {
+ new B.D().bar
+}
diff --git a/test/files/neg/t6483.check b/test/files/neg/t6483.check
new file mode 100644
index 0000000..66e3507
--- /dev/null
+++ b/test/files/neg/t6483.check
@@ -0,0 +1,9 @@
+t6483.scala:7: error: implementation restriction: qualified super reference is not allowed in value class
+This restriction is planned to be removed in subsequent releases.
+ override def foo = super[T].foo // error
+ ^
+t6483.scala:20: error: implementation restriction: nested class is not allowed in value class
+This restriction is planned to be removed in subsequent releases.
+ class Inner extends T {
+ ^
+two errors found
diff --git a/test/files/neg/t6483.scala b/test/files/neg/t6483.scala
new file mode 100644
index 0000000..bd99f68
--- /dev/null
+++ b/test/files/neg/t6483.scala
@@ -0,0 +1,24 @@
+trait T extends Any {
+ def foo = 1
+ type X
+}
+
+class C1(val a: Any) extends AnyVal with T {
+ override def foo = super[T].foo // error
+}
+
+class C2(val a: Int) extends AnyVal with T {
+ override def foo = super.foo + a // okay
+}
+
+class C3(val a: Int) extends AnyVal with T {
+ override def foo = C3.super.foo + a // okay
+}
+
+class C4(val a: Int) extends AnyVal with T {
+ def foo {
+ class Inner extends T {
+ override def foo = super[T].foo + a // no (direct) error, other than that a nested class is currently illegal.
+ }
+ }
+}
diff --git a/test/files/neg/t649.check b/test/files/neg/t649.check
new file mode 100644
index 0000000..5a270d4
--- /dev/null
+++ b/test/files/neg/t649.check
@@ -0,0 +1,4 @@
+t649.scala:3: error: overloaded method foo needs result type
+ def foo[A] = foo[A]
+ ^
+one error found
diff --git a/test/files/neg/bug649.scala b/test/files/neg/t649.scala
similarity index 100%
rename from test/files/neg/bug649.scala
rename to test/files/neg/t649.scala
diff --git a/test/files/neg/t650.check b/test/files/neg/t650.check
new file mode 100644
index 0000000..320ae66
--- /dev/null
+++ b/test/files/neg/t650.check
@@ -0,0 +1,4 @@
+t650.scala:4: error: missing type arguments
+trait Test2 extends LinkedList;
+ ^
+one error found
diff --git a/test/files/neg/bug650.scala b/test/files/neg/t650.scala
similarity index 100%
rename from test/files/neg/bug650.scala
rename to test/files/neg/t650.scala
diff --git a/test/files/neg/t6526.check b/test/files/neg/t6526.check
new file mode 100644
index 0000000..606c18c
--- /dev/null
+++ b/test/files/neg/t6526.check
@@ -0,0 +1,16 @@
+t6526.scala:8: error: could not optimize @tailrec annotated method inner: it contains a recursive call not in tail position
+ @tailrec def inner(i: Int): Int = 1 + inner(i)
+ ^
+t6526.scala:14: error: could not optimize @tailrec annotated method inner: it contains a recursive call not in tail position
+ @tailrec def inner(i: Int): Int = 1 + inner(i)
+ ^
+t6526.scala:20: error: could not optimize @tailrec annotated method inner: it contains a recursive call not in tail position
+ @tailrec def inner(i: Int): Int = 1 + inner(i)
+ ^
+t6526.scala:30: error: could not optimize @tailrec annotated method inner: it contains a recursive call not in tail position
+ @tailrec def inner(i: Int): Int = 1 + inner(i)
+ ^
+t6526.scala:39: error: could not optimize @tailrec annotated method inner: it contains a recursive call not in tail position
+ def inner(i: Int): Int = 1 + inner(i)
+ ^
+5 errors found
diff --git a/test/files/neg/t6526.scala b/test/files/neg/t6526.scala
new file mode 100644
index 0000000..0bc249a
--- /dev/null
+++ b/test/files/neg/t6526.scala
@@ -0,0 +1,41 @@
+import scala.annotation.tailrec
+
+class TailRec {
+ def bar(f: => Any) = ""
+
+ // transform the qualifier of a Select
+ bar {
+ @tailrec def inner(i: Int): Int = 1 + inner(i)
+ inner(0)
+ }.length
+
+ // transform the body of a function
+ () => {
+ @tailrec def inner(i: Int): Int = 1 + inner(i)
+ inner(0)
+ }
+
+ // transform the qualifier of a Select
+ {
+ @tailrec def inner(i: Int): Int = 1 + inner(i)
+ inner(0)
+ ""
+ }.length
+
+ // The receiver of a tail recursive call must itself be transformed
+ object X {
+ @tailrec // okay, all other annotated methods should fail.
+ def foo: Any = {
+ {
+ @tailrec def inner(i: Int): Int = 1 + inner(i)
+ inner(0)
+ this
+ }.foo
+ }
+ }
+
+ Some(new AnyRef) map { phooie =>
+ @tailrec
+ def inner(i: Int): Int = 1 + inner(i)
+ } getOrElse 42
+}
diff --git a/test/files/neg/t6534.check b/test/files/neg/t6534.check
new file mode 100644
index 0000000..52e70cf
--- /dev/null
+++ b/test/files/neg/t6534.check
@@ -0,0 +1,17 @@
+t6534.scala:4: warning: Implementation of equals inherited from trait Foo overridden in class Bippy1 to enforce value class semantics
+class Bippy1(val x: Int) extends AnyVal with Foo { } // warn
+ ^
+t6534.scala:5: warning: Implementation of hashCode inherited from trait Ding overridden in class Bippy2 to enforce value class semantics
+class Bippy2(val x: Int) extends AnyVal with Ding { } // warn
+ ^
+t6534.scala:6: error: redefinition of equals method. See SIP-15, criterion 4. is not allowed in value class
+class Bippy3(val x: Int) extends AnyVal { override def equals(x: Any) = false } // error
+ ^
+t6534.scala:7: error: redefinition of hashCode method. See SIP-15, criterion 4. is not allowed in value class
+class Bippy4(val x: Int) extends AnyVal { override def hashCode = -1 } // error
+ ^
+t6534.scala:9: error: redefinition of equals method. See SIP-15, criterion 4. is not allowed in value class
+case class Bippy6(val x: Int) extends AnyVal { override def productPrefix = "Dingo" ; override def equals(x: Any) = false } // error
+ ^
+two warnings found
+three errors found
diff --git a/test/files/neg/t6534.flags b/test/files/neg/t6534.flags
new file mode 100644
index 0000000..1008b0a
--- /dev/null
+++ b/test/files/neg/t6534.flags
@@ -0,0 +1 @@
+-Xlint
diff --git a/test/files/neg/t6534.scala b/test/files/neg/t6534.scala
new file mode 100644
index 0000000..de588b6
--- /dev/null
+++ b/test/files/neg/t6534.scala
@@ -0,0 +1,10 @@
+trait Foo extends Any { override def equals(x: Any) = false }
+trait Ding extends Any { override def hashCode = -1 }
+
+class Bippy1(val x: Int) extends AnyVal with Foo { } // warn
+class Bippy2(val x: Int) extends AnyVal with Ding { } // warn
+class Bippy3(val x: Int) extends AnyVal { override def equals(x: Any) = false } // error
+class Bippy4(val x: Int) extends AnyVal { override def hashCode = -1 } // error
+case class Bippy5(val x: Int) extends AnyVal { override def productPrefix = "Dingo" } // nothing
+case class Bippy6(val x: Int) extends AnyVal { override def productPrefix = "Dingo" ; override def equals(x: Any) = false } // error
+
diff --git a/test/files/neg/t6535.check b/test/files/neg/t6535.check
new file mode 100644
index 0000000..1225ea7
--- /dev/null
+++ b/test/files/neg/t6535.check
@@ -0,0 +1,6 @@
+t6535.scala:2: error: encountered unrecoverable cycle resolving import.
+Note: this is often due in part to a class depending on a definition nested within its companion.
+If applicable, you may wish to try moving some members into another object.
+ import Bs.B._
+ ^
+one error found
diff --git a/test/files/neg/t6535.scala b/test/files/neg/t6535.scala
new file mode 100644
index 0000000..30a7503
--- /dev/null
+++ b/test/files/neg/t6535.scala
@@ -0,0 +1,15 @@
+object As {
+ import Bs.B._
+
+ object A
+ extends scala.AnyRef // needed for the cycle;
+ // replacing with a locally defined closs doesn't
+ // hit the locked import and hence doesn't cycle.
+}
+
+object Bs {
+ import As.A._
+
+ object B
+ extends scala.AnyRef // scala.Immutable, ...
+}
diff --git a/test/files/neg/t6539.check b/test/files/neg/t6539.check
new file mode 100644
index 0000000..b647636
--- /dev/null
+++ b/test/files/neg/t6539.check
@@ -0,0 +1,10 @@
+Test_2.scala:2: error: cto may only be used as an argument to m
+ M.cto // error
+ ^
+Test_2.scala:3: error: cto may only be used as an argument to m
+ M.m(M.cto, ()) // error
+ ^
+Test_2.scala:5: error: cto may only be used as an argument to m
+ M.cto // error
+ ^
+three errors found
diff --git a/test/files/neg/t6539/Macro_1.scala b/test/files/neg/t6539/Macro_1.scala
new file mode 100644
index 0000000..4f7d289
--- /dev/null
+++ b/test/files/neg/t6539/Macro_1.scala
@@ -0,0 +1,10 @@
+import language.experimental.macros
+import reflect.macros.Context
+
+object M {
+ def m(a: Any, b: Any): Any = macro mImpl
+ def mImpl(c: Context)(a: c.Expr[Any], b: c.Expr[Any]) = a
+
+ @reflect.internal.annotations.compileTimeOnly("cto may only be used as an argument to " + "m")
+ def cto = 0
+}
diff --git a/test/files/neg/t6539/Test_2.scala b/test/files/neg/t6539/Test_2.scala
new file mode 100644
index 0000000..26f4504
--- /dev/null
+++ b/test/files/neg/t6539/Test_2.scala
@@ -0,0 +1,12 @@
+object Test {
+ M.cto // error
+ M.m(M.cto, ()) // error
+ M.m((), M.cto) // okay
+ M.cto // error
+
+ locally {
+ val expr = scala.reflect.runtime.universe.reify(2)
+ val splice = expr.splice
+ val value = expr.value
+ }
+}
diff --git a/test/files/neg/t6558.check b/test/files/neg/t6558.check
new file mode 100644
index 0000000..6ad3cec
--- /dev/null
+++ b/test/files/neg/t6558.check
@@ -0,0 +1,10 @@
+t6558.scala:4: error: not found: type classs
+ @classs
+ ^
+t6558.scala:7: error: not found: type typeparam
+ class D[@typeparam T]
+ ^
+t6558.scala:10: error: not found: type valueparam
+ @valueparam x: Any
+ ^
+three errors found
diff --git a/test/files/neg/t6558.scala b/test/files/neg/t6558.scala
new file mode 100644
index 0000000..b4304ff
--- /dev/null
+++ b/test/files/neg/t6558.scala
@@ -0,0 +1,12 @@
+class AnnotNotFound {
+ def foo(a: Any) = ()
+
+ @classs
+ class C
+
+ class D[@typeparam T]
+
+ class E(
+ @valueparam x: Any
+ )
+}
diff --git a/test/files/neg/t6558b.check b/test/files/neg/t6558b.check
new file mode 100644
index 0000000..cfa384f
--- /dev/null
+++ b/test/files/neg/t6558b.check
@@ -0,0 +1,7 @@
+t6558b.scala:5: error: not found: type inargument
+ @inargument
+ ^
+t6558b.scala:11: error: not found: type infunction
+ @infunction
+ ^
+two errors found
diff --git a/test/files/neg/t6558b.scala b/test/files/neg/t6558b.scala
new file mode 100644
index 0000000..2aa06f6
--- /dev/null
+++ b/test/files/neg/t6558b.scala
@@ -0,0 +1,15 @@
+class AnnotNotFound {
+ def foo(a: Any) = ()
+
+ foo {
+ @inargument
+ def foo = 0
+ foo
+ }
+
+ () => {
+ @infunction
+ def foo = 0
+ ()
+ }
+}
diff --git a/test/files/neg/t6563.check b/test/files/neg/t6563.check
new file mode 100644
index 0000000..75dca15
--- /dev/null
+++ b/test/files/neg/t6563.check
@@ -0,0 +1,4 @@
+t6563.scala:4: error: not found: value e
+ e("f")
+ ^
+one error found
diff --git a/test/files/neg/t6563.scala b/test/files/neg/t6563.scala
new file mode 100644
index 0000000..b0077b6
--- /dev/null
+++ b/test/files/neg/t6563.scala
@@ -0,0 +1,8 @@
+class A{
+ def b(c: => Unit){}
+ b{
+ e("f")
+ new G()(){}
+ }
+}
+class G(h:String="i")()
diff --git a/test/files/neg/t6567.check b/test/files/neg/t6567.check
new file mode 100644
index 0000000..4c513e6
--- /dev/null
+++ b/test/files/neg/t6567.check
@@ -0,0 +1,7 @@
+t6567.scala:8: error: Suspicious application of an implicit view (Test.this.a2b) in the argument to Option.apply.
+ Option[B](a)
+ ^
+t6567.scala:10: error: Suspicious application of an implicit view (Test.this.a2b) in the argument to Option.apply.
+ val b: Option[B] = Option(a)
+ ^
+two errors found
diff --git a/test/files/neg/t6567.flags b/test/files/neg/t6567.flags
new file mode 100644
index 0000000..e93641e
--- /dev/null
+++ b/test/files/neg/t6567.flags
@@ -0,0 +1 @@
+-Xlint -Xfatal-warnings
\ No newline at end of file
diff --git a/test/files/neg/t6567.scala b/test/files/neg/t6567.scala
new file mode 100644
index 0000000..650e5e3
--- /dev/null
+++ b/test/files/neg/t6567.scala
@@ -0,0 +1,11 @@
+class A
+class B
+
+object Test {
+ val a: A = null
+ implicit def a2b(a: A) = new B
+
+ Option[B](a)
+
+ val b: Option[B] = Option(a)
+}
diff --git a/test/files/neg/t6597.check b/test/files/neg/t6597.check
new file mode 100644
index 0000000..1d52519
--- /dev/null
+++ b/test/files/neg/t6597.check
@@ -0,0 +1,4 @@
+t6597.scala:3: error: illegal combination of modifiers: implicit and case for: class Quux
+ implicit case class Quux(value: Int) extends AnyVal with T
+ ^
+one error found
diff --git a/test/files/neg/t6597.scala b/test/files/neg/t6597.scala
new file mode 100644
index 0000000..dde53bc
--- /dev/null
+++ b/test/files/neg/t6597.scala
@@ -0,0 +1,5 @@
+object Test {
+ trait T extends Any
+ implicit case class Quux(value: Int) extends AnyVal with T
+ object Quux
+}
diff --git a/test/files/neg/t663.check b/test/files/neg/t663.check
new file mode 100644
index 0000000..40161fb
--- /dev/null
+++ b/test/files/neg/t663.check
@@ -0,0 +1,7 @@
+t663.scala:11: error: name clash between defined and inherited member:
+method asMatch:(m: Test.this.Node)Any and
+method asMatch:(node: Test.this.Matchable)Any in trait MatchableImpl
+have same type after erasure: (m: test.Test#NodeImpl)Object
+ def asMatch(m : Node) : Any = {
+ ^
+one error found
diff --git a/test/files/neg/bug663.scala b/test/files/neg/t663.scala
similarity index 100%
rename from test/files/neg/bug663.scala
rename to test/files/neg/t663.scala
diff --git a/test/files/neg/t664.check b/test/files/neg/t664.check
new file mode 100644
index 0000000..cbdf53d
--- /dev/null
+++ b/test/files/neg/t664.check
@@ -0,0 +1,7 @@
+t664.scala:4: error: type Foo is not a member of test.Test
+ trait Foo extends super.Foo {
+ ^
+t664.scala:5: error: type Bar is not a member of AnyRef
+ trait Bar extends super.Bar;
+ ^
+two errors found
diff --git a/test/files/neg/bug664.scala b/test/files/neg/t664.scala
similarity index 100%
rename from test/files/neg/bug664.scala
rename to test/files/neg/t664.scala
diff --git a/test/files/neg/t6663.check b/test/files/neg/t6663.check
new file mode 100644
index 0000000..aa4faa4
--- /dev/null
+++ b/test/files/neg/t6663.check
@@ -0,0 +1,6 @@
+t6663.scala:16: error: type mismatch;
+ found : String
+ required: Int
+ var v = new C(42).foo[String].get :Int
+ ^
+one error found
diff --git a/test/files/neg/t6663.scala b/test/files/neg/t6663.scala
new file mode 100644
index 0000000..4a358df
--- /dev/null
+++ b/test/files/neg/t6663.scala
@@ -0,0 +1,19 @@
+import language.dynamics
+
+class C(v: Any) extends Dynamic {
+ def selectDynamic[T](n: String): Option[T] = Option(v.asInstanceOf[T])
+ def applyDynamic[T](n: String)(): Option[T] = Option(v.asInstanceOf[T])
+}
+
+object Test extends App {
+ // this should be converted to
+ // C(42).selectDynamic[String]("foo").get
+ // causing a compile error.
+
+ // but, before fixing SI-6663, became
+ // C(42).selectDynamic("foo").get, ignoring
+ // the [String] type parameter
+ var v = new C(42).foo[String].get :Int
+ println(v)
+}
+
diff --git a/test/files/neg/t6666.check b/test/files/neg/t6666.check
new file mode 100644
index 0000000..6337d4c
--- /dev/null
+++ b/test/files/neg/t6666.check
@@ -0,0 +1,37 @@
+t6666.scala:23: error: Implementation restriction: access of method x$2 in object O1 from anonymous class 2, would require illegal premature access to object O1
+ F.byname(x)
+ ^
+t6666.scala:30: error: Implementation restriction: access of value x$3 in object O2 from anonymous class 3, would require illegal premature access to object O2
+ F.byname(x)
+ ^
+t6666.scala:37: error: Implementation restriction: access of method x$4 in object O3 from anonymous class 4, would require illegal premature access to object O3
+ F.hof(() => x)
+ ^
+t6666.scala:50: error: Implementation restriction: access of method x$6 in class C1 from anonymous class 7, would require illegal premature access to the unconstructed `this` of class C1
+ F.byname(x)
+ ^
+t6666.scala:54: error: Implementation restriction: access of value x$7 in class C2 from anonymous class 8, would require illegal premature access to the unconstructed `this` of class C2
+ F.byname(x)
+ ^
+t6666.scala:58: error: Implementation restriction: access of method x$8 in class C3 from anonymous class 9, would require illegal premature access to the unconstructed `this` of class C3
+ F.hof(() => x)
+ ^
+t6666.scala:62: error: Implementation restriction: access of method x$9 in class C4 from object Nested$4, would require illegal premature access to the unconstructed `this` of class C4
+ object Nested { def xx = x}
+ ^
+t6666.scala:76: error: Implementation restriction: access of method x$11 in class C11 from anonymous class 12, would require illegal premature access to the unconstructed `this` of class C11
+ F.byname(x)
+ ^
+t6666.scala:95: error: Implementation restriction: access of method x$12 in class C13 from anonymous class 13, would require illegal premature access to the unconstructed `this` of class C13
+ F.hof(() => x)
+ ^
+t6666.scala:104: error: Implementation restriction: access of method x$13 in class C14 from object Nested$5, would require illegal premature access to the unconstructed `this` of class C14
+ object Nested { def xx = x}
+ ^
+t6666.scala:112: error: Implementation restriction: access of method foo$1 in class COuter from class CInner$1, would require illegal premature access to the unconstructed `this` of class COuter
+ class CInner extends C({foo})
+ ^
+t6666.scala:118: error: Implementation restriction: access of method x$14 in class CEarly from object Nested$6, would require illegal premature access to the unconstructed `this` of class CEarly
+ object Nested { def xx = x}
+ ^
+12 errors found
diff --git a/test/files/neg/t6666.scala b/test/files/neg/t6666.scala
new file mode 100644
index 0000000..1919ea3
--- /dev/null
+++ b/test/files/neg/t6666.scala
@@ -0,0 +1,121 @@
+class C(a: Any)
+object F {
+ def byname(a: => Any) = println(a)
+ def hof(a: () => Any) = println(a())
+}
+
+class COkay extends C(0) {
+ def this(a: Any) {
+ this()
+ def x = "".toString
+ F.byname(x)
+ }
+}
+
+//
+// The thunk's apply method accesses the MODULE$
+// field before it is set.
+//
+// 0: getstatic #23; //Field O1$.MODULE$:LO1$;
+// 3: invokevirtual #26; //Method O1$.O1$$x$1:()Ljava/lang/String;
+object O1 extends C({
+ def x = "".toString
+ F.byname(x)
+})
+
+// java.lang.NullPointerException
+// at O2$$anonfun$$init$$1.apply(<console>:11)
+object O2 extends C({
+ lazy val x = "".toString
+ F.byname(x)
+})
+
+// java.lang.NullPointerException
+// at O3$$anonfun$$init$$1.apply(<console>:11)
+object O3 extends C({
+ def x = "".toString
+ F.hof(() => x)
+})
+
+// Okay, the nested classes don't get an outer pointer passed,
+// just an extra param for `x: String`.
+object O6 extends C({
+ val x = "".toString
+ F.byname(x); F.hof(() => x); (new { val xx = x }.xx)
+})
+
+
+class C1 extends C({
+ def x = "".toString
+ F.byname(x)
+})
+class C2 extends C({
+ lazy val x = "".toString
+ F.byname(x)
+})
+class C3 extends C({
+ def x = "".toString
+ F.hof(() => x)
+})
+class C4 extends C({
+ def x = "".toString
+ object Nested { def xx = x}
+ Nested.xx
+})
+
+// okay, for same reason as O6
+class C6 extends C({
+ val x = "".toString
+ F.byname(x); F.hof(() => x); (new { val xx = x }.xx)
+})
+
+class C11(a: Any) {
+ def this() = {
+ this({
+ def x = "".toString
+ F.byname(x)
+ })
+ }
+}
+
+// Crashes earlier in lazyVals.
+// class C12(a: Any) {
+// def this() = {
+// this({
+// lazy val x = "".toString
+// F.byname(x)
+// })
+// }
+// }
+
+class C13(a: Any) {
+ def this() = {
+ this({
+ def x = "".toString
+ F.hof(() => x)
+ })
+ }
+}
+
+class C14(a: Any) {
+ def this() = {
+ this({
+ def x = "".toString
+ object Nested { def xx = x}
+ Nested.xx
+ })
+ }
+}
+
+class COuter extends C({
+ def foo = 0
+ class CInner extends C({foo})
+})
+
+
+class CEarly(a: Any) extends {
+ val early = {def x = "".toString
+ object Nested { def xx = x}
+ Nested.xx
+ }
+} with AnyRef
\ No newline at end of file
diff --git a/test/files/neg/t6666b.check b/test/files/neg/t6666b.check
new file mode 100644
index 0000000..c3ffc7c
--- /dev/null
+++ b/test/files/neg/t6666b.check
@@ -0,0 +1,7 @@
+t6666b.scala:11: error: Implementation restriction: access of method x$1 in class C5 from object Nested$3, would require illegal premature access to the unconstructed `this` of class C5
+ object Nested { def xx = x}
+ ^
+t6666b.scala:22: error: Implementation restriction: access of method x$2 in class C15 from object Nested$4, would require illegal premature access to the unconstructed `this` of class C15
+ object Nested { def xx = x}
+ ^
+two errors found
diff --git a/test/files/neg/t6666b.scala b/test/files/neg/t6666b.scala
new file mode 100644
index 0000000..205ded7
--- /dev/null
+++ b/test/files/neg/t6666b.scala
@@ -0,0 +1,27 @@
+class C(a: Any)
+object F {
+ def byname(a: => Any) = println(a)
+ def hof(a: () => Any) = println(a())
+}
+
+
+class C5 extends C({
+ def x = "".toString
+ val y = {
+ object Nested { def xx = x}
+ Nested.xx
+ }
+})
+
+
+class C15(a: Any) {
+ def this() = {
+ this({
+ def x = "".toString
+ val y = {
+ object Nested { def xx = x}
+ Nested.xx
+ }
+ })
+ }
+}
diff --git a/test/files/neg/t6666c.check b/test/files/neg/t6666c.check
new file mode 100644
index 0000000..8fb9f4b
--- /dev/null
+++ b/test/files/neg/t6666c.check
@@ -0,0 +1,10 @@
+t6666c.scala:2: error: Implementation restriction: access of method x$1 in class D from object X$4, would require illegal premature access to the unconstructed `this` of class D
+class D extends C({def x = 0; object X { x }})
+ ^
+t6666c.scala:5: error: Implementation restriction: access of method x$2 in class D1 from object X$5, would require illegal premature access to the unconstructed `this` of class D1
+class D1 extends C1({def x = 0; () => {object X { x }}})
+ ^
+t6666c.scala:8: error: Implementation restriction: access of method x$3 from object X$6, would require illegal premature access to the unconstructed `this` of anonymous class 2
+class D2 extends C2({def x = 0; object X { x }})
+ ^
+three errors found
diff --git a/test/files/neg/t6666c.scala b/test/files/neg/t6666c.scala
new file mode 100644
index 0000000..76cc358
--- /dev/null
+++ b/test/files/neg/t6666c.scala
@@ -0,0 +1,8 @@
+class C(a: Any)
+class D extends C({def x = 0; object X { x }})
+
+class C1(a: () => Any)
+class D1 extends C1({def x = 0; () => {object X { x }}})
+
+class C2(a: => Any)
+class D2 extends C2({def x = 0; object X { x }})
diff --git a/test/files/neg/t6666d.check b/test/files/neg/t6666d.check
new file mode 100644
index 0000000..b4785f0
--- /dev/null
+++ b/test/files/neg/t6666d.check
@@ -0,0 +1,4 @@
+t6666d.scala:7: error: Implementation restriction: access of object TreeOrd$1 from object TreeOrd$2, would require illegal premature access to the unconstructed `this` of class Test
+ implicit object TreeOrd extends Ordering[K](){
+ ^
+one error found
diff --git a/test/files/neg/t6666d.scala b/test/files/neg/t6666d.scala
new file mode 100644
index 0000000..49a688f
--- /dev/null
+++ b/test/files/neg/t6666d.scala
@@ -0,0 +1,18 @@
+
+import scala.collection.immutable.TreeMap
+import scala.math.Ordering
+
+class Test[K](param:TreeMap[K,Int]){
+ def this() = this({
+ implicit object TreeOrd extends Ordering[K](){
+ def compare(a: K, b: K) = {
+ -1
+ }
+ }
+ new TreeMap[K, Int]()
+ })
+}
+
+object Test extends App {
+ new Test()
+}
diff --git a/test/files/neg/t6666e.check b/test/files/neg/t6666e.check
new file mode 100644
index 0000000..9fcc3ab
--- /dev/null
+++ b/test/files/neg/t6666e.check
@@ -0,0 +1,4 @@
+t6666e.scala:8: error: Implementation restriction: anonymous class $anonfun requires premature access to class Crash.
+ this(Nil.collect{case x =>})
+ ^
+one error found
diff --git a/test/files/neg/t6666e.scala b/test/files/neg/t6666e.scala
new file mode 100644
index 0000000..120a587
--- /dev/null
+++ b/test/files/neg/t6666e.scala
@@ -0,0 +1,9 @@
+
+import scala.collection.immutable.TreeMap
+import scala.math.Ordering
+
+
+class Crash(a: Any) {
+ def this() =
+ this(Nil.collect{case x =>})
+}
diff --git a/test/files/neg/t6667.check b/test/files/neg/t6667.check
new file mode 100644
index 0000000..b04251d
--- /dev/null
+++ b/test/files/neg/t6667.check
@@ -0,0 +1,14 @@
+t6667.scala:8: error: Search of in-scope implicits was ambiguous, and the implicit scope was searched. In Scala 2.11.0, this code will not compile. See SI-6667.
+ambiguous implicit values:
+ both value inScope1 in object Test of type => C
+ and value inScope2 in object Test of type => C
+ match expected type C
+ implicitly[C]: Unit // C.companion was used; whereas the ambiguity should abort the implicit search.
+ ^
+t6667.scala:9: error: ambiguous implicit values:
+ both value inScope1 in object Test of type => C
+ and value inScope2 in object Test of type => C
+ match expected type C
+ implicitly[C] // ambiguity reported, rather than falling back to C.companion
+ ^
+two errors found
diff --git a/test/files/neg/t6667.flags b/test/files/neg/t6667.flags
new file mode 100644
index 0000000..6c1dd10
--- /dev/null
+++ b/test/files/neg/t6667.flags
@@ -0,0 +1 @@
+-Xfatal-warnings -Xlint
\ No newline at end of file
diff --git a/test/files/neg/t6667.scala b/test/files/neg/t6667.scala
new file mode 100644
index 0000000..fb857eb
--- /dev/null
+++ b/test/files/neg/t6667.scala
@@ -0,0 +1,10 @@
+class C
+object C {
+ implicit def companion = new C
+}
+
+object Test {
+ implicit val inScope1, inScope2 = new C
+ implicitly[C]: Unit // C.companion was used; whereas the ambiguity should abort the implicit search.
+ implicitly[C] // ambiguity reported, rather than falling back to C.companion
+}
diff --git a/test/files/neg/t6667b.check b/test/files/neg/t6667b.check
new file mode 100644
index 0000000..5d56e77
--- /dev/null
+++ b/test/files/neg/t6667b.check
@@ -0,0 +1,14 @@
+t6667b.scala:16: error: ambiguous implicit values:
+ both value a in object Test of type => Test.Box
+ and value b of type Test.Box
+ match expected type Test.Box
+ new Test()
+ ^
+t6667b.scala:19: error: Search of in-scope implicits was ambiguous, and the implicit scope was searched. In Scala 2.11.0, this code will not compile. See SI-6667.
+ambiguous implicit values:
+ both value a in object Test of type => Test.Box
+ and value b of type Test.Box
+ match expected type Test.Box
+ new Test()
+ ^
+two errors found
diff --git a/test/files/neg/t6667b.flags b/test/files/neg/t6667b.flags
new file mode 100644
index 0000000..6c1dd10
--- /dev/null
+++ b/test/files/neg/t6667b.flags
@@ -0,0 +1 @@
+-Xfatal-warnings -Xlint
\ No newline at end of file
diff --git a/test/files/neg/t6667b.scala b/test/files/neg/t6667b.scala
new file mode 100644
index 0000000..4e64e1a
--- /dev/null
+++ b/test/files/neg/t6667b.scala
@@ -0,0 +1,25 @@
+object Test {
+ abstract class Box {
+ val value: Int
+ }
+
+ implicit val a: Box = new Box {
+ val value= 1
+ }
+
+ def main(args: Array[String]) {
+ implicit val b: Box= new Box {
+ val value= 2
+ }
+
+ new Object {
+ new Test()
+ }
+ // compare with:
+ new Test()
+ }
+}
+
+class Test()(implicit x: Test.Box) {
+ println(x.value)
+}
diff --git a/test/files/neg/t667.check b/test/files/neg/t667.check
new file mode 100644
index 0000000..d4367bc
--- /dev/null
+++ b/test/files/neg/t667.check
@@ -0,0 +1,4 @@
+t667.scala:8: error: class Ni inherits itself
+ class Ni extends super.Ni with Ni;
+ ^
+one error found
diff --git a/test/files/neg/bug667.scala b/test/files/neg/t667.scala
similarity index 100%
rename from test/files/neg/bug667.scala
rename to test/files/neg/t667.scala
diff --git a/test/files/neg/t6675-old-patmat.check b/test/files/neg/t6675-old-patmat.check
new file mode 100644
index 0000000..bc3920d
--- /dev/null
+++ b/test/files/neg/t6675-old-patmat.check
@@ -0,0 +1,4 @@
+t6675-old-patmat.scala:10: error: extractor pattern binds a single value to a Product3 of type (Int, Int, Int)
+ "" match { case X(b) => b } // should warn under -Xlint. Not an error because of SI-6111
+ ^
+one error found
diff --git a/test/files/neg/t6675-old-patmat.flags b/test/files/neg/t6675-old-patmat.flags
new file mode 100644
index 0000000..604de64
--- /dev/null
+++ b/test/files/neg/t6675-old-patmat.flags
@@ -0,0 +1 @@
+-Xlint -Xfatal-warnings -Xoldpatmat
\ No newline at end of file
diff --git a/test/files/neg/t6675-old-patmat.scala b/test/files/neg/t6675-old-patmat.scala
new file mode 100644
index 0000000..4d500b7
--- /dev/null
+++ b/test/files/neg/t6675-old-patmat.scala
@@ -0,0 +1,13 @@
+object X {
+ def unapply(s: String): Option[(Int,Int,Int)] = Some((1,2,3))
+}
+
+object Y {
+ def unapplySeq(s: String): Option[Seq[(Int,Int,Int)]] = Some(Seq((1,2,3)))
+}
+
+object Test {
+ "" match { case X(b) => b } // should warn under -Xlint. Not an error because of SI-6111
+
+ "" match { case Y(b) => b } // no warning
+}
diff --git a/test/files/neg/t6675.check b/test/files/neg/t6675.check
new file mode 100644
index 0000000..7b271de
--- /dev/null
+++ b/test/files/neg/t6675.check
@@ -0,0 +1,4 @@
+t6675.scala:10: error: extractor pattern binds a single value to a Product3 of type (Int, Int, Int)
+ "" match { case X(b) => b } // should warn under -Xlint. Not an error because of SI-6111
+ ^
+one error found
diff --git a/test/files/neg/t6675.flags b/test/files/neg/t6675.flags
new file mode 100644
index 0000000..e93641e
--- /dev/null
+++ b/test/files/neg/t6675.flags
@@ -0,0 +1 @@
+-Xlint -Xfatal-warnings
\ No newline at end of file
diff --git a/test/files/neg/t6675.scala b/test/files/neg/t6675.scala
new file mode 100644
index 0000000..4d500b7
--- /dev/null
+++ b/test/files/neg/t6675.scala
@@ -0,0 +1,13 @@
+object X {
+ def unapply(s: String): Option[(Int,Int,Int)] = Some((1,2,3))
+}
+
+object Y {
+ def unapplySeq(s: String): Option[Seq[(Int,Int,Int)]] = Some(Seq((1,2,3)))
+}
+
+object Test {
+ "" match { case X(b) => b } // should warn under -Xlint. Not an error because of SI-6111
+
+ "" match { case Y(b) => b } // no warning
+}
diff --git a/test/files/neg/t668.check b/test/files/neg/t668.check
new file mode 100644
index 0000000..b057ca7
--- /dev/null
+++ b/test/files/neg/t668.check
@@ -0,0 +1,4 @@
+t668.scala:1: error: type Iterable takes type parameters
+class Test extends Iterable
+ ^
+one error found
diff --git a/test/files/neg/bug668.scala b/test/files/neg/t668.scala
similarity index 100%
rename from test/files/neg/bug668.scala
rename to test/files/neg/t668.scala
diff --git a/test/files/neg/t6728.check b/test/files/neg/t6728.check
new file mode 100644
index 0000000..d853d6f
--- /dev/null
+++ b/test/files/neg/t6728.check
@@ -0,0 +1,4 @@
+t6728.scala:4: error: '(' expected but '}' found.
+ }
+ ^
+one error found
diff --git a/test/files/neg/t6728.scala b/test/files/neg/t6728.scala
new file mode 100644
index 0000000..ba0b1a0
--- /dev/null
+++ b/test/files/neg/t6728.scala
@@ -0,0 +1,5 @@
+object X {
+ while(true) {
+ for
+ }
+}
diff --git a/test/files/neg/t6758.check b/test/files/neg/t6758.check
new file mode 100644
index 0000000..2cdd6b8
--- /dev/null
+++ b/test/files/neg/t6758.check
@@ -0,0 +1,28 @@
+t6758.scala:5: error: not found: type inargument
+ @inargument
+ ^
+t6758.scala:11: error: not found: type infunction
+ @infunction
+ ^
+t6758.scala:18: error: not found: type nested
+ @nested
+ ^
+t6758.scala:25: error: not found: type param
+ def func(@param x: Int): Int = 0
+ ^
+t6758.scala:28: error: not found: type typealias
+ @typealias
+ ^
+t6758.scala:32: error: not found: type classs
+ @classs
+ ^
+t6758.scala:35: error: not found: type module
+ @module
+ ^
+t6758.scala:38: error: not found: type typeparam
+ class D[@typeparam T]
+ ^
+t6758.scala:41: error: not found: type valueparam
+ @valueparam x: Any
+ ^
+9 errors found
diff --git a/test/files/neg/t6758.scala b/test/files/neg/t6758.scala
new file mode 100644
index 0000000..acf333b
--- /dev/null
+++ b/test/files/neg/t6758.scala
@@ -0,0 +1,43 @@
+class AnnotNotFound {
+ def foo(a: Any) = ()
+
+ foo {
+ @inargument
+ def foo = 0
+ foo
+ }
+
+ () => {
+ @infunction
+ def foo = 0
+ ()
+ }
+
+ () => {
+ val bar: Int = {
+ @nested
+ val bar2: Int = 2
+ 2
+ }
+ ()
+ }
+
+ def func(@param x: Int): Int = 0
+
+ abstract class A {
+ @typealias
+ type B = Int
+ }
+
+ @classs
+ class C
+
+ @module
+ object D
+
+ class D[@typeparam T]
+
+ class E(
+ @valueparam x: Any
+ )
+}
diff --git a/test/files/neg/t677.check b/test/files/neg/t677.check
new file mode 100644
index 0000000..122830a
--- /dev/null
+++ b/test/files/neg/t677.check
@@ -0,0 +1,6 @@
+t677.scala:2: error: type mismatch;
+ found : () => Int
+ required: Nothing
+ val zx: Nothing = {() => 4}
+ ^
+one error found
diff --git a/test/files/neg/bug677.scala b/test/files/neg/t677.scala
similarity index 100%
rename from test/files/neg/bug677.scala
rename to test/files/neg/t677.scala
diff --git a/test/files/neg/t6771b.check b/test/files/neg/t6771b.check
new file mode 100644
index 0000000..ba99e91
--- /dev/null
+++ b/test/files/neg/t6771b.check
@@ -0,0 +1,6 @@
+t6771b.scala:14: error: type mismatch;
+ found : x.type (with underlying type String)
+ required: Test.a.type
+ b = b match { case x => x }
+ ^
+one error found
diff --git a/test/files/neg/t6771b.scala b/test/files/neg/t6771b.scala
new file mode 100644
index 0000000..78f11f7
--- /dev/null
+++ b/test/files/neg/t6771b.scala
@@ -0,0 +1,16 @@
+// Currently, the pattern matcher widens the type of the
+// scrutinee, so this doesn't typecheck. This test just
+// confirms this behaviour, although it would be an improvement
+// to change this and make this a `pos` test.
+//
+// But, to the intrepid hacker who works on this, a few notes:
+// You'll have to look into places in the pattern matcher that
+// call `dealias`, and see if they need to be `dealiasWiden`.
+// For example, if `checkableType` used only `dealias`, `pos/t6671.scala`
+// would fail.
+object Test {
+ val a = ""; var b: a.type = a
+
+ b = b match { case x => x }
+}
+
diff --git a/test/files/neg/t6788.check b/test/files/neg/t6788.check
new file mode 100644
index 0000000..96a6f8b
--- /dev/null
+++ b/test/files/neg/t6788.check
@@ -0,0 +1,5 @@
+t6788.scala:6: error: not found: value foo
+Error occurred in an application involving default arguments.
+ s.copy(b = foo)
+ ^
+one error found
diff --git a/test/files/neg/t6788.scala b/test/files/neg/t6788.scala
new file mode 100644
index 0000000..77949ed
--- /dev/null
+++ b/test/files/neg/t6788.scala
@@ -0,0 +1,7 @@
+case class B[T](b: T, a: List[Int]) // need two args, B must be polymorphic
+
+class A {
+ var s: B[Int] = _ // has to be a var
+
+ s.copy(b = foo)
+}
diff --git a/test/files/neg/t6795.check b/test/files/neg/t6795.check
new file mode 100644
index 0000000..88ef3e9
--- /dev/null
+++ b/test/files/neg/t6795.check
@@ -0,0 +1,4 @@
+t6795.scala:3: error: `abstract override' modifier not allowed for type members
+trait T1 extends T { abstract override type U = Int }
+ ^
+one error found
diff --git a/test/files/neg/t6795.scala b/test/files/neg/t6795.scala
new file mode 100644
index 0000000..a523c89
--- /dev/null
+++ b/test/files/neg/t6795.scala
@@ -0,0 +1,3 @@
+trait T { type U }
+// "abstract override" shouldn't be allowed on types
+trait T1 extends T { abstract override type U = Int }
diff --git a/test/files/neg/t6829.check b/test/files/neg/t6829.check
new file mode 100644
index 0000000..7c3c66e
--- /dev/null
+++ b/test/files/neg/t6829.check
@@ -0,0 +1,36 @@
+t6829.scala:35: error: type mismatch;
+ found : AgentSimulation.this.state.type (with underlying type G#State)
+ required: _9.State
+ lazy val actions: Map[G#Agent,G#Action] = agents.map(a => a -> a.chooseAction(state)).toMap
+ ^
+t6829.scala:45: error: trait AgentSimulation takes type parameters
+ pastHistory: List[G#State] = Nil) extends AgentSimulation
+ ^
+t6829.scala:47: error: class LearningSimulation takes type parameters
+ lazy val step: LearningSimulation = {
+ ^
+t6829.scala:49: error: not found: value actions
+ val (s,a,s2) = (state,actions(agent),nextState)
+ ^
+t6829.scala:49: error: not found: value nextState
+ val (s,a,s2) = (state,actions(agent),nextState)
+ ^
+t6829.scala:50: error: type mismatch;
+ found : s.type (with underlying type Any)
+ required: _53.State where val _53: G
+ val r = rewards(agent).r(s,a,s2)
+ ^
+t6829.scala:51: error: type mismatch;
+ found : s.type (with underlying type Any)
+ required: _50.State
+ agent.learn(s,a,s2,r): G#Agent
+ ^
+t6829.scala:53: error: not found: value nextState
+Error occurred in an application involving default arguments.
+ copy(agents = updatedAgents, state = nextState, pastHistory = currentHistory)
+ ^
+t6829.scala:53: error: not found: value currentHistory
+Error occurred in an application involving default arguments.
+ copy(agents = updatedAgents, state = nextState, pastHistory = currentHistory)
+ ^
+9 errors found
diff --git a/test/files/neg/t6829.scala b/test/files/neg/t6829.scala
new file mode 100644
index 0000000..7cbe3c9
--- /dev/null
+++ b/test/files/neg/t6829.scala
@@ -0,0 +1,64 @@
+package bugs
+
+/**
+ * Created with IntelliJ IDEA.
+ * User: arya
+ * Date: 12/18/12
+ * Time: 4:17 PM
+ * To change this template use File | Settings | File Templates.
+ */
+object currenttype2 {
+
+ type Reward = Double
+
+ trait AbstractAgent[State,Action] {
+ type A = AbstractAgent[State,Action]
+ def chooseAction(s: State): Action
+ def startEpisode: A = this
+ def learn(s1: State, a: Action, s2: State, r: Reward): A
+ }
+
+ case class RewardFunction[State,Action](r: (State,Action,State) => Reward)
+
+ trait Rules[G<:GameDomain] {
+ def simulate(state: G#State, agentActions: List[(G#Agent,G#Action)]): G#State
+ }
+
+ trait AgentSimulation[G<:GameDomain] {
+ val agents: List[G#Agent]
+ val state: G#State
+ val rewards: Map[G#Agent,G#Rewards]
+ val rules: Rules[G]
+ val pastHistory: List[G#State]
+ lazy val currentHistory = state :: pastHistory
+
+ lazy val actions: Map[G#Agent,G#Action] = agents.map(a => a -> a.chooseAction(state)).toMap
+ lazy val nextState: G#State = rules.simulate(state, actions.toList)
+
+ def step: AgentSimulation[G]
+ }
+
+ case class LearningSimulation[G<:GameDomain](agents: List[G#Agent],
+ state: G#State,
+ rewards: Map[G#Agent,G#Rewards],
+ rules: Rules[G],
+ pastHistory: List[G#State] = Nil) extends AgentSimulation
+ {
+ lazy val step: LearningSimulation = {
+ val updatedAgents: List[G#Agent] = agents map { agent =>
+ val (s,a,s2) = (state,actions(agent),nextState)
+ val r = rewards(agent).r(s,a,s2)
+ agent.learn(s,a,s2,r): G#Agent
+ }
+ copy(agents = updatedAgents, state = nextState, pastHistory = currentHistory)
+ }
+ }
+
+ trait GameDomain {
+ domain =>
+ type State
+ type Action
+ type Agent = AbstractAgent[State, Action] // agent supertype
+ type Rewards = RewardFunction[State,Action]
+ }
+ }
diff --git a/test/files/neg/t6902.check b/test/files/neg/t6902.check
new file mode 100644
index 0000000..8ad7fd3
--- /dev/null
+++ b/test/files/neg/t6902.check
@@ -0,0 +1,10 @@
+t6902.scala:4: error: unreachable code
+ case Some(b) => 3 // no warning was emitted
+ ^
+t6902.scala:9: error: unreachable code
+ case Some(b) => 3 // no warning was emitted
+ ^
+t6902.scala:21: error: unreachable code
+ case 1 => 3 // crash
+ ^
+three errors found
diff --git a/test/files/neg/caseinherit.flags b/test/files/neg/t6902.flags
similarity index 100%
copy from test/files/neg/caseinherit.flags
copy to test/files/neg/t6902.flags
diff --git a/test/files/neg/t6902.scala b/test/files/neg/t6902.scala
new file mode 100644
index 0000000..ce5ff8b
--- /dev/null
+++ b/test/files/neg/t6902.scala
@@ -0,0 +1,23 @@
+object Test {
+ Some(Some(1)) collect {
+ case Some(a) => 2
+ case Some(b) => 3 // no warning was emitted
+ }
+
+ (Some(1): @ unchecked) match {
+ case Some(a) => 2
+ case Some(b) => 3 // no warning was emitted
+ }
+
+ // A variation of SI-6011, which eluded the fix
+ // in 2.10.0.
+ //
+ // duplicate keys in SWITCH, can't pick arbitrarily one of them to evict, see SI-6011.
+ // at scala.reflect.internal.SymbolTable.abort(SymbolTable.scala:50)
+ // at scala.tools.nsc.Global.abort(Global.scala:249)
+ // at scala.tools.nsc.backend.jvm.GenASM$JPlainBuilder$jcode$.emitSWITCH(GenASM.scala:1850)
+ ((1: Byte): @unchecked @annotation.switch) match {
+ case 1 => 2
+ case 1 => 3 // crash
+ }
+}
diff --git a/test/files/neg/t691.check b/test/files/neg/t691.check
new file mode 100644
index 0000000..77ff7b1
--- /dev/null
+++ b/test/files/neg/t691.check
@@ -0,0 +1,4 @@
+t691.scala:27: error: ambiguous parent class qualifier
+ trait TiC extends super[Arrow].Ti2 with super[AssignArrow].Ti1;
+ ^
+one error found
diff --git a/test/files/neg/bug691.scala b/test/files/neg/t691.scala
similarity index 100%
rename from test/files/neg/bug691.scala
rename to test/files/neg/t691.scala
diff --git a/test/files/neg/t6912.check b/test/files/neg/t6912.check
new file mode 100644
index 0000000..137b651
--- /dev/null
+++ b/test/files/neg/t6912.check
@@ -0,0 +1,4 @@
+t6912.scala:8: error: not found: type Xxxx
+ def test[T]: Xxxx = Foo1[T]
+ ^
+one error found
diff --git a/test/files/neg/t6912.scala b/test/files/neg/t6912.scala
new file mode 100644
index 0000000..f2540ee
--- /dev/null
+++ b/test/files/neg/t6912.scala
@@ -0,0 +1,9 @@
+object Foo1 {
+ def apply[T](a: Int = 0): Nothing = sys.error("")
+ def apply[T](z: String = ""): Nothing = sys.error("")
+}
+
+object Test {
+ // Triggered a cycle in Typers#adapt
+ def test[T]: Xxxx = Foo1[T]
+}
diff --git a/test/files/neg/t692.check b/test/files/neg/t692.check
new file mode 100644
index 0000000..4149366
--- /dev/null
+++ b/test/files/neg/t692.check
@@ -0,0 +1,19 @@
+t692.scala:3: error: not found: type T
+ trait Type[T0] extends Type0[T];
+ ^
+t692.scala:10: error: class Foo takes type parameters
+ case class FooType() extends ClassType[Foo,AnyRef](ObjectType());
+ ^
+t692.scala:13: error: class Foo takes type parameters
+ case class BarType[T3 <: Foo](tpeT : RefType[T3]) extends ClassType[Bar[T3],Foo](FooType);
+ ^
+t692.scala:13: error: class Foo takes type parameters
+ case class BarType[T3 <: Foo](tpeT : RefType[T3]) extends ClassType[Bar[T3],Foo](FooType);
+ ^
+t692.scala:14: error: class Foo takes type parameters
+ implicit def typeOfBar[T4 <: Foo](implicit elem : RefType[T4]) : RefType[Bar[T4]] =
+ ^
+t692.scala:19: error: class Foo takes type parameters
+ class Bar[A <: Foo](implicit tpeA : Type[A]) extends Foo;
+ ^
+6 errors found
diff --git a/test/files/neg/bug692.scala b/test/files/neg/t692.scala
similarity index 100%
rename from test/files/neg/bug692.scala
rename to test/files/neg/t692.scala
diff --git a/test/files/neg/t6928.check b/test/files/neg/t6928.check
new file mode 100644
index 0000000..28b8e38
--- /dev/null
+++ b/test/files/neg/t6928.check
@@ -0,0 +1,7 @@
+t6928.scala:2: error: super constructor cannot be passed a self reference unless parameter is declared by-name
+object B extends A(B)
+ ^
+t6928.scala:3: error: super constructor cannot be passed a self reference unless parameter is declared by-name
+object C extends A(null, null, C)
+ ^
+two errors found
diff --git a/test/files/neg/t6928.scala b/test/files/neg/t6928.scala
new file mode 100644
index 0000000..84bdcde
--- /dev/null
+++ b/test/files/neg/t6928.scala
@@ -0,0 +1,10 @@
+abstract class A( val someAs: A* )
+object B extends A(B)
+object C extends A(null, null, C)
+
+object Test {
+ def main(args: Array[String]): Unit = {
+ println(B.someAs)
+ println(C.someAs)
+ }
+}
diff --git a/test/files/neg/t693.check b/test/files/neg/t693.check
new file mode 100644
index 0000000..62325b5
--- /dev/null
+++ b/test/files/neg/t693.check
@@ -0,0 +1,4 @@
+t693.scala:4: error: x is already defined as value x
+ val x : Int = 10;
+ ^
+one error found
diff --git a/test/files/neg/bug693.scala b/test/files/neg/t693.scala
similarity index 100%
rename from test/files/neg/bug693.scala
rename to test/files/neg/t693.scala
diff --git a/test/files/neg/t6952.check b/test/files/neg/t6952.check
new file mode 100644
index 0000000..1a591d0
--- /dev/null
+++ b/test/files/neg/t6952.check
@@ -0,0 +1,13 @@
+t6952.scala:2: error: extension of type scala.Dynamic needs to be enabled
+by making the implicit value scala.language.dynamics visible.
+This can be achieved by adding the import clause 'import scala.language.dynamics'
+or by setting the compiler option -language:dynamics.
+See the Scala docs for value scala.language.dynamics for a discussion
+why the feature needs to be explicitly enabled.
+trait B extends Dynamic
+ ^
+t6952.scala:3: error: extension of type scala.Dynamic needs to be enabled
+by making the implicit value scala.language.dynamics visible.
+trait C extends A with Dynamic
+ ^
+two errors found
diff --git a/test/files/neg/t6952.scala b/test/files/neg/t6952.scala
new file mode 100644
index 0000000..257ea3b
--- /dev/null
+++ b/test/files/neg/t6952.scala
@@ -0,0 +1,4 @@
+trait A
+trait B extends Dynamic
+trait C extends A with Dynamic
+trait D extends B
diff --git a/test/files/neg/t6963a.check b/test/files/neg/t6963a.check
new file mode 100644
index 0000000..159896f
--- /dev/null
+++ b/test/files/neg/t6963a.check
@@ -0,0 +1,5 @@
+t6963a.scala:4: error: method scanRight in trait TraversableLike has changed semantics in version 2.9.0:
+The behavior of `scanRight` has changed. The previous behavior can be reproduced with scanRight.reverse.
+ List(1,2,3,4,5).scanRight(0)(_+_)
+ ^
+one error found
diff --git a/test/files/neg/t6963a.flags b/test/files/neg/t6963a.flags
new file mode 100644
index 0000000..4c61ed9
--- /dev/null
+++ b/test/files/neg/t6963a.flags
@@ -0,0 +1 @@
+-Xfatal-warnings -Xmigration:2.7
diff --git a/test/files/neg/t6963a.scala b/test/files/neg/t6963a.scala
new file mode 100644
index 0000000..b3366b2
--- /dev/null
+++ b/test/files/neg/t6963a.scala
@@ -0,0 +1,5 @@
+object Test {
+ import scala.collection.mutable._
+
+ List(1,2,3,4,5).scanRight(0)(_+_)
+}
diff --git a/test/files/neg/t6963b.check b/test/files/neg/t6963b.check
new file mode 100644
index 0000000..7e205a4
--- /dev/null
+++ b/test/files/neg/t6963b.check
@@ -0,0 +1,13 @@
+t6963b.scala:2: error: An Array will no longer match as Seq[_].
+ def f1(x: Any) = x.isInstanceOf[Seq[_]]
+ ^
+t6963b.scala:4: error: An Array will no longer match as Seq[_].
+ case _: Seq[_] => true
+ ^
+t6963b.scala:16: error: An Array will no longer match as Seq[_].
+ case (Some(_: Seq[_]), Nil, _) => 1
+ ^
+t6963b.scala:17: error: An Array will no longer match as Seq[_].
+ case (None, List(_: List[_], _), _) => 2
+ ^
+four errors found
diff --git a/test/files/neg/t6963b.flags b/test/files/neg/t6963b.flags
new file mode 100644
index 0000000..83caa2b
--- /dev/null
+++ b/test/files/neg/t6963b.flags
@@ -0,0 +1 @@
+-Xmigration:2.7 -Xfatal-warnings
\ No newline at end of file
diff --git a/test/files/neg/t6963b.scala b/test/files/neg/t6963b.scala
new file mode 100644
index 0000000..3cfa8f0
--- /dev/null
+++ b/test/files/neg/t6963b.scala
@@ -0,0 +1,20 @@
+object Test {
+ def f1(x: Any) = x.isInstanceOf[Seq[_]]
+ def f2(x: Any) = x match {
+ case _: Seq[_] => true
+ case _ => false
+ }
+
+ def f3(x: Any) = x match {
+ case _: Array[_] => true
+ case _ => false
+ }
+
+ def f4(x: Any) = x.isInstanceOf[Traversable[_]]
+
+ def f5(x1: Any, x2: Any, x3: AnyRef) = (x1, x2, x3) match {
+ case (Some(_: Seq[_]), Nil, _) => 1
+ case (None, List(_: List[_], _), _) => 2
+ case _ => 3
+ }
+}
diff --git a/test/files/neg/t696a.check b/test/files/neg/t696a.check
new file mode 100644
index 0000000..490fc1a
--- /dev/null
+++ b/test/files/neg/t696a.check
@@ -0,0 +1,5 @@
+t696a.scala:4: error: diverging implicit expansion for type TypeUtil0.Type[Any]
+starting with method WithType in object TypeUtil0
+ as[Any](null);
+ ^
+one error found
diff --git a/test/files/jvm/bug680.check b/test/files/neg/t696a.flags
similarity index 100%
copy from test/files/jvm/bug680.check
copy to test/files/neg/t696a.flags
diff --git a/test/files/neg/bug696.scala b/test/files/neg/t696a.scala
similarity index 100%
rename from test/files/neg/bug696.scala
rename to test/files/neg/t696a.scala
diff --git a/test/files/neg/t696b.check b/test/files/neg/t696b.check
new file mode 100644
index 0000000..fcdb544
--- /dev/null
+++ b/test/files/neg/t696b.check
@@ -0,0 +1,9 @@
+t696b.scala:5: error: diverging implicit expansion for type TypeUtil0.Type[Any]
+starting with method WithType in object TypeUtil0
+ as[Any](null)
+ ^
+t696b.scala:6: error: diverging implicit expansion for type TypeUtil0.Type[X]
+starting with method WithType in object TypeUtil0
+ def foo[X]() = as[X](null)
+ ^
+two errors found
diff --git a/test/files/neg/t696b.flags b/test/files/neg/t696b.flags
new file mode 100644
index 0000000..d564f2b
--- /dev/null
+++ b/test/files/neg/t696b.flags
@@ -0,0 +1 @@
+-Xdivergence211
\ No newline at end of file
diff --git a/test/files/neg/t696b.scala b/test/files/neg/t696b.scala
new file mode 100644
index 0000000..ca76f7e
--- /dev/null
+++ b/test/files/neg/t696b.scala
@@ -0,0 +1,7 @@
+object TypeUtil0 {
+ trait Type[+T]
+ implicit def WithType[S,T](implicit tpeS : Type[S], tpeT : Type[T]) : Type[S with T] = null
+ def as[T](x : Any)(implicit tpe : Type[T]) = null
+ as[Any](null)
+ def foo[X]() = as[X](null)
+}
diff --git a/test/files/neg/t700.check b/test/files/neg/t700.check
new file mode 100644
index 0000000..4c0a2e5
--- /dev/null
+++ b/test/files/neg/t700.check
@@ -0,0 +1,4 @@
+t700.scala:6: error: method foobar in trait Foo is accessed from super. It may not be abstract unless it is overridden by a member declared `abstract' and `override'
+ def foobar: Unit = super.foobar
+ ^
+one error found
diff --git a/test/files/neg/bug700.scala b/test/files/neg/t700.scala
similarity index 100%
rename from test/files/neg/bug700.scala
rename to test/files/neg/t700.scala
diff --git a/test/files/neg/t708.check b/test/files/neg/t708.check
new file mode 100644
index 0000000..4983aab
--- /dev/null
+++ b/test/files/neg/t708.check
@@ -0,0 +1,5 @@
+t708.scala:8: error: overriding type S in trait X with bounds <: A.this.T;
+ type S has incompatible type
+ override private[A] type S = Any;
+ ^
+one error found
diff --git a/test/files/neg/bug708.scala b/test/files/neg/t708.scala
similarity index 100%
rename from test/files/neg/bug708.scala
rename to test/files/neg/t708.scala
diff --git a/test/files/neg/t712.check b/test/files/neg/t712.check
new file mode 100644
index 0000000..6819dc0
--- /dev/null
+++ b/test/files/neg/t712.check
@@ -0,0 +1,4 @@
+t712.scala:10: error: value self is not a member of B.this.ParentImpl
+ implicit def coerce(p : ParentImpl) = p.self;
+ ^
+one error found
diff --git a/test/files/neg/bug712.scala b/test/files/neg/t712.scala
similarity index 100%
rename from test/files/neg/bug712.scala
rename to test/files/neg/t712.scala
diff --git a/test/files/neg/t715.check b/test/files/neg/t715.check
new file mode 100644
index 0000000..2c01047
--- /dev/null
+++ b/test/files/neg/t715.check
@@ -0,0 +1,4 @@
+t715.scala:12: error: method chilren in trait NodeImpl is accessed from super. It may not be abstract unless it is overridden by a member declared `abstract' and `override'
+ override def children = super.chilren;
+ ^
+one error found
diff --git a/test/files/neg/bug715.scala b/test/files/neg/t715.scala
similarity index 100%
rename from test/files/neg/bug715.scala
rename to test/files/neg/t715.scala
diff --git a/test/files/neg/t7166.check b/test/files/neg/t7166.check
new file mode 100644
index 0000000..c87198c
--- /dev/null
+++ b/test/files/neg/t7166.check
@@ -0,0 +1,4 @@
+Test_2.scala:2: error: silent = true does work!
+ println(implicitly[Complex[Foo]])
+ ^
+one error found
diff --git a/test/files/neg/t7166/Impls_Macros_1.scala b/test/files/neg/t7166/Impls_Macros_1.scala
new file mode 100644
index 0000000..62a1565
--- /dev/null
+++ b/test/files/neg/t7166/Impls_Macros_1.scala
@@ -0,0 +1,26 @@
+import scala.reflect.macros.Context
+import language.experimental.macros
+
+trait Complex[T]
+
+class Foo
+
+object Complex {
+ def impl[T: c.WeakTypeTag](c: Context): c.Expr[Complex[T]] = {
+ import c.universe._
+ def shout(msg: String) = {
+ val cannotShutMeUp = c.asInstanceOf[scala.reflect.macros.runtime.Context].universe.currentRun.currentUnit.error _
+ cannotShutMeUp(c.enclosingPosition.asInstanceOf[scala.reflect.internal.util.Position], msg)
+ }
+ try {
+ val complexOfT = appliedType(typeOf[Complex[_]], List(weakTypeOf[T]))
+ val infiniteRecursion = c.inferImplicitValue(complexOfT, silent = true)
+ shout("silent = true does work!")
+ } catch {
+ case ex: Exception => shout(ex.toString)
+ }
+ c.literalNull
+ }
+
+ implicit def genComplex[T]: Complex[T] = macro impl[T]
+}
diff --git a/test/files/neg/t7166/Test_2.scala b/test/files/neg/t7166/Test_2.scala
new file mode 100644
index 0000000..dcc4593
--- /dev/null
+++ b/test/files/neg/t7166/Test_2.scala
@@ -0,0 +1,3 @@
+object Test extends App {
+ println(implicitly[Complex[Foo]])
+}
\ No newline at end of file
diff --git a/test/files/neg/t7171.check b/test/files/neg/t7171.check
new file mode 100644
index 0000000..8bdf081
--- /dev/null
+++ b/test/files/neg/t7171.check
@@ -0,0 +1,7 @@
+t7171.scala:2: error: The outer reference in this type test cannot be checked at run time.
+ final case class A()
+ ^
+t7171.scala:9: error: The outer reference in this type test cannot be checked at run time.
+ case _: A => true; case _ => false
+ ^
+two errors found
diff --git a/test/files/neg/t7171.flags b/test/files/neg/t7171.flags
new file mode 100644
index 0000000..464cc20
--- /dev/null
+++ b/test/files/neg/t7171.flags
@@ -0,0 +1 @@
+-Xfatal-warnings -unchecked
\ No newline at end of file
diff --git a/test/files/neg/t7171.scala b/test/files/neg/t7171.scala
new file mode 100644
index 0000000..534b207
--- /dev/null
+++ b/test/files/neg/t7171.scala
@@ -0,0 +1,11 @@
+trait T {
+ final case class A()
+
+ // Was:
+ // error: scrutinee is incompatible with pattern type;
+ // found : T.this.A
+ // required: T#A
+ def foo(a: T#A) = a match {
+ case _: A => true; case _ => false
+ }
+}
diff --git a/test/files/neg/t7171b.check b/test/files/neg/t7171b.check
new file mode 100644
index 0000000..bd6b2bc
--- /dev/null
+++ b/test/files/neg/t7171b.check
@@ -0,0 +1,10 @@
+t7171b.scala:2: error: The outer reference in this type test cannot be checked at run time.
+ final case class A()
+ ^
+t7171b.scala:8: error: The outer reference in this type test cannot be checked at run time.
+ case _: A => true; case _ => false
+ ^
+t7171b.scala:13: error: The outer reference in this type test cannot be checked at run time.
+ case _: A => true; case _ => false
+ ^
+three errors found
diff --git a/test/files/neg/t7171b.flags b/test/files/neg/t7171b.flags
new file mode 100644
index 0000000..464cc20
--- /dev/null
+++ b/test/files/neg/t7171b.flags
@@ -0,0 +1 @@
+-Xfatal-warnings -unchecked
\ No newline at end of file
diff --git a/test/files/neg/t7171b.scala b/test/files/neg/t7171b.scala
new file mode 100644
index 0000000..53c7787
--- /dev/null
+++ b/test/files/neg/t7171b.scala
@@ -0,0 +1,15 @@
+trait T {
+ final case class A()
+}
+
+final class U extends T {
+ // this match should also not be deemed impossible
+ def foo(a: U#A) = a match {
+ case _: A => true; case _ => false
+ }
+
+ // this match should also not be deemed impossible
+ def bar(a: T#A) = a match {
+ case _: A => true; case _ => false
+ }
+}
diff --git a/test/files/neg/t7185.check b/test/files/neg/t7185.check
new file mode 100644
index 0000000..46f2cc7
--- /dev/null
+++ b/test/files/neg/t7185.check
@@ -0,0 +1,7 @@
+t7185.scala:2: error: overloaded method value apply with alternatives:
+ (f: scala.xml.Node => Boolean)scala.xml.NodeSeq <and>
+ (i: Int)scala.xml.Node
+ cannot be applied to ()
+ <e></e>()
+ ^
+one error found
diff --git a/test/files/neg/t7185.scala b/test/files/neg/t7185.scala
new file mode 100644
index 0000000..2f9284b
--- /dev/null
+++ b/test/files/neg/t7185.scala
@@ -0,0 +1,3 @@
+object Test {
+ <e></e>()
+}
diff --git a/test/files/neg/t7235.check b/test/files/neg/t7235.check
new file mode 100644
index 0000000..357a3df
--- /dev/null
+++ b/test/files/neg/t7235.check
@@ -0,0 +1,4 @@
+t7235.scala:9: error: implementation restriction: cannot reify refinement type trees with non-empty bodies
+ val Block(List(ValDef(_, _, tpt: CompoundTypeTree, _)), _) = reify{ val x: C { def x: Int } = ??? }.tree
+ ^
+one error found
diff --git a/test/files/neg/t7235.scala b/test/files/neg/t7235.scala
new file mode 100644
index 0000000..cfebad3
--- /dev/null
+++ b/test/files/neg/t7235.scala
@@ -0,0 +1,14 @@
+import scala.reflect.runtime.universe._
+import scala.reflect.runtime.{universe => ru}
+import scala.reflect.runtime.{currentMirror => cm}
+import scala.tools.reflect.ToolBox
+
+class C
+
+object Test extends App {
+ val Block(List(ValDef(_, _, tpt: CompoundTypeTree, _)), _) = reify{ val x: C { def x: Int } = ??? }.tree
+ println(tpt)
+ println(tpt.templ.parents)
+ println(tpt.templ.self)
+ println(tpt.templ.body)
+}
diff --git a/test/files/neg/t7238.check b/test/files/neg/t7238.check
new file mode 100644
index 0000000..b87f83f
--- /dev/null
+++ b/test/files/neg/t7238.check
@@ -0,0 +1,6 @@
+t7238.scala:6: error: type mismatch;
+ found : Seq[Any]
+ required: Seq[String]
+ c.c()(Seq[Any](): _*)
+ ^
+one error found
diff --git a/test/files/neg/t7238.scala b/test/files/neg/t7238.scala
new file mode 100644
index 0000000..d42dc8d
--- /dev/null
+++ b/test/files/neg/t7238.scala
@@ -0,0 +1,7 @@
+trait Main {
+ trait C {
+ def c(x: Any = 0)(bs: String*)
+ }
+ def c: C
+ c.c()(Seq[Any](): _*)
+}
diff --git a/test/files/neg/t7251.check b/test/files/neg/t7251.check
new file mode 100644
index 0000000..8df8984
--- /dev/null
+++ b/test/files/neg/t7251.check
@@ -0,0 +1,4 @@
+B_2.scala:5: error: object s.Outer$Triple$ is not a value
+ println( s.Outer$Triple$ )
+ ^
+one error found
diff --git a/test/files/neg/t7251/A_1.scala b/test/files/neg/t7251/A_1.scala
new file mode 100644
index 0000000..d05373e
--- /dev/null
+++ b/test/files/neg/t7251/A_1.scala
@@ -0,0 +1,10 @@
+package s
+
+object Outer {
+ type Triple[+A, +B, +C] = Tuple3[A, B, C]
+ object Triple {
+ def apply[A, B, C](x: A, y: B, z: C) = Tuple3(x, y, z)
+ def unapply[A, B, C](x: Tuple3[A, B, C]): Option[Tuple3[A, B, C]] = Some(x)
+ }
+}
+
diff --git a/test/files/neg/t7251/B_2.scala b/test/files/neg/t7251/B_2.scala
new file mode 100644
index 0000000..eb59b30
--- /dev/null
+++ b/test/files/neg/t7251/B_2.scala
@@ -0,0 +1,7 @@
+package s
+
+object Test {
+ def main(args: Array[String]): Unit = {
+ println( s.Outer$Triple$ )
+ }
+}
diff --git a/test/files/neg/t7259.check b/test/files/neg/t7259.check
new file mode 100644
index 0000000..0ad627f
--- /dev/null
+++ b/test/files/neg/t7259.check
@@ -0,0 +1,7 @@
+t7259.scala:1: error: not found: type xxxxx
+ at xxxxx // error: not found: type xxxx
+ ^
+t7259.scala:8: error: type xxxxx is not a member of package annotation
+ at annotation.xxxxx // error: not found: type scala
+ ^
+two errors found
diff --git a/test/files/neg/t7259.scala b/test/files/neg/t7259.scala
new file mode 100644
index 0000000..0fdfe18
--- /dev/null
+++ b/test/files/neg/t7259.scala
@@ -0,0 +1,9 @@
+ at xxxxx // error: not found: type xxxx
+class Ok
+
+//
+// This had the wrong error message in 2.9 and 2.10.
+//
+
+ at annotation.xxxxx // error: not found: type scala
+class WrongErrorMessage
diff --git a/test/files/neg/t7285.check b/test/files/neg/t7285.check
new file mode 100644
index 0000000..108f429
--- /dev/null
+++ b/test/files/neg/t7285.check
@@ -0,0 +1,13 @@
+t7285.scala:15: error: match may not be exhaustive.
+It would fail on the following input: (Up, Down)
+ (d1, d2) match {
+ ^
+t7285.scala:33: error: match may not be exhaustive.
+It would fail on the following input: Down
+ (d1) match {
+ ^
+t7285.scala:51: error: match may not be exhaustive.
+It would fail on the following input: (Up, Down)
+ (d1, d2) match {
+ ^
+three errors found
diff --git a/test/files/neg/caseinherit.flags b/test/files/neg/t7285.flags
similarity index 100%
copy from test/files/neg/caseinherit.flags
copy to test/files/neg/t7285.flags
diff --git a/test/files/neg/t7285.scala b/test/files/neg/t7285.scala
new file mode 100644
index 0000000..14121d9
--- /dev/null
+++ b/test/files/neg/t7285.scala
@@ -0,0 +1,55 @@
+sealed abstract class Base
+
+
+object Test1 {
+ sealed abstract class Base
+
+ object Base {
+ case object Down extends Base {
+ }
+
+ case object Up extends Base {
+ }
+
+ (d1: Base, d2: Base) =>
+ (d1, d2) match {
+ case (Up, Up) | (Down, Down) => false
+ case (Down, Up) => true
+ }
+ }
+}
+
+object Test2 {
+ sealed abstract class Base
+
+ object Base {
+ case object Down extends Base {
+ }
+
+ case object Up extends Base {
+ }
+
+ (d1: Base, d2: Base) =>
+ (d1) match {
+ case Test2.Base.Up => false
+ }
+ }
+}
+
+
+object Test4 {
+ sealed abstract class Base
+
+ object Base {
+ case object Down extends Base
+
+ case object Up extends Base
+ }
+
+ import Test4.Base._
+ (d1: Base, d2: Base) =>
+ (d1, d2) match {
+ case (Up, Up) | (Down, Down) => false
+ case (Down, Test4.Base.Up) => true
+ }
+}
diff --git a/test/files/neg/t7289.check b/test/files/neg/t7289.check
new file mode 100644
index 0000000..e4aeebb
--- /dev/null
+++ b/test/files/neg/t7289.check
@@ -0,0 +1,4 @@
+t7289.scala:8: error: could not find implicit value for parameter e: Test.Schtroumpf[scala.collection.immutable.Nil.type]
+ implicitly[Schtroumpf[Nil.type]]
+ ^
+one error found
diff --git a/test/files/neg/t7289.scala b/test/files/neg/t7289.scala
new file mode 100644
index 0000000..f4ed3da
--- /dev/null
+++ b/test/files/neg/t7289.scala
@@ -0,0 +1,39 @@
+object Test extends App {
+ trait Schtroumpf[T]
+
+ implicit def schtroumpf[T, U <: Coll[T], Coll[X] <: Traversable[X]]
+ (implicit minorSchtroumpf: Schtroumpf[T]): Schtroumpf[U] = ???
+
+ implicit val qoo: Schtroumpf[Int] = new Schtroumpf[Int]{}
+ implicitly[Schtroumpf[Nil.type]]
+}
+
+/*
+info1 = {scala.tools.nsc.typechecker.Implicits$ImplicitInfo at 3468}"qoo: => Test.Schtroumpf[Int]"
+info2 = {scala.tools.nsc.typechecker.Implicits$ImplicitInfo at 3469}"schtroumpf: [T, U <: Coll[T], Coll[_] <: Traversable[_]](implicit minorSchtroumpf: Test.Schtroumpf[T])Test.Schtroumpf[U]"
+isStrictlyMoreSpecific(info1, info2)
+ isSubType(Test.Schtroumpf[Int], Test.Schtroumpf[U] forSome { T; U <: Coll[T]; Coll[_] <: Traversable[_] })
+ isAsSpecificValueType(Test.Schtroumpf[Int], Test.Schtroumpf[U], undef2 = List(type T, type U, type Coll))
+
+ val et: ExistentialType = Test.Schtroumpf[U] forSome { T; U <: Coll[T]; Coll[_] <: Traversable[_] }
+ val tp1 = Test.Schtroumpf[Int]
+ et.withTypeVars(isSubType(tp1, _, depth))
+ solve()
+ tvars = tList(=?Nothing, =?Int, =?=?Int)
+
+
+[ create] ?T ( In Test#schtroumpf[T,U <: Coll[T],Coll[_] <: Traversable[_]] )
+[ create] ?U ( In Test#schtroumpf[T,U <: Coll[T],Coll[_] <: Traversable[_]] )
+[ create] ?Coll ( In Test#schtroumpf[T,U <: Coll[T],Coll[_] <: Traversable[_]] )
+[ setInst] Nothing ( In Test#schtroumpf[T,U <: Coll[T],Coll[_] <: Traversable[_]], T=Nothing )
+[ setInst] scala.collection.immutable.Nil.type( In Test#schtroumpf[T,U <: Coll[T],Coll[_] <: Traversable[_]], U=scala.collection.immutable.Nil.type )
+[ setInst] =?scala.collection.immutable.Nil.type( In Test#schtroumpf[T,U <: Coll[T],Coll[_] <: Traversable[_]], Coll==?scala.collection.immutable.Nil.type )
+[ create] ?T ( In Test#schtroumpf[T,U <: Coll[T],Coll[_] <: Traversable[_]] )
+[ setInst] Int ( In Test#schtroumpf[T,U <: Coll[T],Coll[_] <: Traversable[_]], T=Int )
+[ create] ?T ( In Test#schtroumpf[T,U <: Coll[T],Coll[_] <: Traversable[_]] )
+[ create] ?U ( In Test#schtroumpf[T,U <: Coll[T],Coll[_] <: Traversable[_]] )
+[ create] ?Coll ( In Test#schtroumpf[T,U <: Coll[T],Coll[_] <: Traversable[_]] )
+[ setInst] Nothing ( In Test#schtroumpf[T,U <: Coll[T],Coll[_] <: Traversable[_]], T=Nothing )
+[ setInst] Int ( In Test#schtroumpf[T,U <: Coll[T],Coll[_] <: Traversable[_]], U=Int )
+[ setInst] =?Int ( In Test#schtroumpf[T,U <: Coll[T],Coll[_] <: Traversable[_]], Coll==?Int )
+*/
\ No newline at end of file
diff --git a/test/files/neg/t7289_status_quo.check b/test/files/neg/t7289_status_quo.check
new file mode 100644
index 0000000..31c072e
--- /dev/null
+++ b/test/files/neg/t7289_status_quo.check
@@ -0,0 +1,22 @@
+t7289_status_quo.scala:9: error: could not find implicit value for parameter e: Test1.Ext[List[Int]]
+ implicitly[Ext[List[Int]]] // fails - not found
+ ^
+t7289_status_quo.scala:11: error: could not find implicit value for parameter e: Test1.Ext[List[List[List[Int]]]]
+ implicitly[Ext[List[List[List[Int]]]]] // fails - not found
+ ^
+t7289_status_quo.scala:15: error: ambiguous implicit values:
+ both method f in object Test1 of type [A, Coll <: CC[A], CC[X] <: Traversable[X]](implicit xi: Test1.Ext[A])Test1.Ext[Coll]
+ and value m in object Test1 of type => Test1.Ext[List[List[Int]]]
+ match expected type Test1.Ext[_ <: List[List[Int]]]
+ implicitly[Ext[_ <: List[List[Int]]]] // fails - ambiguous
+ ^
+t7289_status_quo.scala:20: error: could not find implicit value for parameter e: Test1.ExtCov[List[Int]]
+ implicitly[ExtCov[List[Int]]] // fails - not found
+ ^
+t7289_status_quo.scala:21: error: could not find implicit value for parameter e: Test1.ExtCov[List[List[Int]]]
+ implicitly[ExtCov[List[List[Int]]]] // fails - not found
+ ^
+t7289_status_quo.scala:22: error: could not find implicit value for parameter e: Test1.ExtCov[List[List[List[Int]]]]
+ implicitly[ExtCov[List[List[List[Int]]]]] // fails - not found
+ ^
+6 errors found
diff --git a/test/files/neg/t7289_status_quo.scala b/test/files/neg/t7289_status_quo.scala
new file mode 100644
index 0000000..3962142
--- /dev/null
+++ b/test/files/neg/t7289_status_quo.scala
@@ -0,0 +1,23 @@
+// record the status quo after this fix
+// not clear to @adriaanm why an upper-bounded existential in an invariant position
+// is different from putting that upper bound in a covariant position
+object Test1 {
+ trait Ext[T]
+ implicit def f[A, Coll <: CC[A], CC[X] <: Traversable[X]](implicit xi: Ext[A]): Ext[Coll] = ???
+ implicit val m: Ext[List[List[Int]]] = new Ext[List[List[Int]]]{}
+
+ implicitly[Ext[List[Int]]] // fails - not found
+ implicitly[Ext[List[List[Int]]]] // compiles
+ implicitly[Ext[List[List[List[Int]]]]] // fails - not found
+
+ // Making Ext[+T] should incur the same behavior as these. (so says @paulp)
+ implicitly[Ext[_ <: List[Int]]] // compiles
+ implicitly[Ext[_ <: List[List[Int]]]] // fails - ambiguous
+ implicitly[Ext[_ <: List[List[List[Int]]]]] // compiles
+
+ // But, we currently get:
+ trait ExtCov[+T]
+ implicitly[ExtCov[List[Int]]] // fails - not found
+ implicitly[ExtCov[List[List[Int]]]] // fails - not found
+ implicitly[ExtCov[List[List[List[Int]]]]] // fails - not found
+}
\ No newline at end of file
diff --git a/test/files/neg/t729.check b/test/files/neg/t729.check
new file mode 100644
index 0000000..fb858dc
--- /dev/null
+++ b/test/files/neg/t729.check
@@ -0,0 +1,6 @@
+t729.scala:20: error: type mismatch;
+ found : ScalaParserAutoEdit.this.NodeImpl(in trait Parser)
+ required: ScalaParserAutoEdit.this.NodeImpl(in trait ScalaParserAutoEdit)
+ val yyy : NodeImpl = link.from;
+ ^
+one error found
diff --git a/test/files/neg/bug729.scala b/test/files/neg/t729.scala
similarity index 100%
rename from test/files/neg/bug729.scala
rename to test/files/neg/t729.scala
diff --git a/test/files/neg/t7290.check b/test/files/neg/t7290.check
new file mode 100644
index 0000000..85bedba
--- /dev/null
+++ b/test/files/neg/t7290.check
@@ -0,0 +1,10 @@
+t7290.scala:4: error: Pattern contains duplicate alternatives: 0
+ case 0 | 0 => 0
+ ^
+t7290.scala:5: error: Pattern contains duplicate alternatives: 2, 3
+ case 2 | 2 | 2 | 3 | 2 | 3 => 0
+ ^
+t7290.scala:6: error: Pattern contains duplicate alternatives: 4
+ case 4 | (_ @ 4) => 0
+ ^
+three errors found
diff --git a/test/files/neg/caseinherit.flags b/test/files/neg/t7290.flags
similarity index 100%
copy from test/files/neg/caseinherit.flags
copy to test/files/neg/t7290.flags
diff --git a/test/files/neg/t7290.scala b/test/files/neg/t7290.scala
new file mode 100644
index 0000000..b9db7f7
--- /dev/null
+++ b/test/files/neg/t7290.scala
@@ -0,0 +1,10 @@
+object Test extends App {
+ val y = (0: Int) match {
+ case 1 => 1
+ case 0 | 0 => 0
+ case 2 | 2 | 2 | 3 | 2 | 3 => 0
+ case 4 | (_ @ 4) => 0
+ case _ => -1
+ }
+ assert(y == 0, y)
+}
diff --git a/test/files/neg/t7299.check b/test/files/neg/t7299.check
new file mode 100644
index 0000000..74340c4
--- /dev/null
+++ b/test/files/neg/t7299.check
@@ -0,0 +1,7 @@
+t7299.scala:4: error: implementation restricts functions to 22 parameters
+ val eta1 = f _
+ ^
+t7299.scala:5: error: implementation restricts functions to 22 parameters
+ val eta2 = g[Any] _
+ ^
+two errors found
diff --git a/test/files/neg/t7299.scala b/test/files/neg/t7299.scala
new file mode 100644
index 0000000..f3aae5c
--- /dev/null
+++ b/test/files/neg/t7299.scala
@@ -0,0 +1,6 @@
+object Test {
+ def f(a1: Int, a2: Int, a3: Int, a4: Int, a5: Int, a6: Int, a7: Int, a8: Int, a9: Int, a10: Int, a11: Int, a12: Int, a13: Int, a14: Int, a15: Int, a16: Int, a17: Int, a18: Int, a19: Int, a20: Int, a21: Int, a22: Int, a23: Int) = 0
+ def g[A](a1: Int, a2: Int, a3: Int, a4: Int, a5: Int, a6: Int, a7: Int, a8: Int, a9: Int, a10: Int, a11: Int, a12: Int, a13: Int, a14: Int, a15: Int, a16: Int, a17: Int, a18: Int, a19: Int, a20: Int, a21: Int, a22: Int, a23: Int) = 0
+ val eta1 = f _
+ val eta2 = g[Any] _
+}
diff --git a/test/files/neg/t7325.check b/test/files/neg/t7325.check
new file mode 100644
index 0000000..709ab6d
--- /dev/null
+++ b/test/files/neg/t7325.check
@@ -0,0 +1,19 @@
+t7325.scala:2: error: percent signs not directly following splicees must be escaped
+ println(f"%")
+ ^
+t7325.scala:4: error: percent signs not directly following splicees must be escaped
+ println(f"%%%")
+ ^
+t7325.scala:6: error: percent signs not directly following splicees must be escaped
+ println(f"%%%%%")
+ ^
+t7325.scala:16: error: wrong conversion string
+ println(f"${0}%")
+ ^
+t7325.scala:19: error: percent signs not directly following splicees must be escaped
+ println(f"${0}%%%d")
+ ^
+t7325.scala:21: error: percent signs not directly following splicees must be escaped
+ println(f"${0}%%%%%d")
+ ^
+6 errors found
diff --git a/test/files/neg/t7325.scala b/test/files/neg/t7325.scala
new file mode 100644
index 0000000..adfd8dd
--- /dev/null
+++ b/test/files/neg/t7325.scala
@@ -0,0 +1,25 @@
+object Test extends App {
+ println(f"%")
+ println(f"%%")
+ println(f"%%%")
+ println(f"%%%%")
+ println(f"%%%%%")
+ println(f"%%%%%%")
+
+ println(f"%%n")
+ println(f"%%%n")
+ println(f"%%%%n")
+ println(f"%%%%%n")
+ println(f"%%%%%%n")
+ println(f"%%%%%%%n")
+
+ println(f"${0}%")
+ println(f"${0}%d")
+ println(f"${0}%%d")
+ println(f"${0}%%%d")
+ println(f"${0}%%%%d")
+ println(f"${0}%%%%%d")
+
+ println(f"${0}%n")
+ println(f"${0}%d%n")
+}
\ No newline at end of file
diff --git a/test/files/neg/t7330.check b/test/files/neg/t7330.check
new file mode 100644
index 0000000..b96d656
--- /dev/null
+++ b/test/files/neg/t7330.check
@@ -0,0 +1,5 @@
+t7330.scala:4: error: pattern must be a value: Y[_]
+Note: if you intended to match against the class, try `case _: Y[_]`
+ 0 match { case Y[_] => }
+ ^
+one error found
diff --git a/test/files/neg/t7330.scala b/test/files/neg/t7330.scala
new file mode 100644
index 0000000..13a943a
--- /dev/null
+++ b/test/files/neg/t7330.scala
@@ -0,0 +1,5 @@
+class Y[T]
+class Test {
+ // TypeTree is not a valid tree for a pattern
+ 0 match { case Y[_] => }
+}
\ No newline at end of file
diff --git a/test/files/neg/t7369.check b/test/files/neg/t7369.check
new file mode 100644
index 0000000..4f101e1
--- /dev/null
+++ b/test/files/neg/t7369.check
@@ -0,0 +1,13 @@
+t7369.scala:6: error: unreachable code
+ case Tuple1(X) => // unreachable
+ ^
+t7369.scala:13: error: unreachable code
+ case Tuple1(true) => // unreachable
+ ^
+t7369.scala:31: error: unreachable code
+ case Tuple1(X) => // unreachable
+ ^
+t7369.scala:40: error: unreachable code
+ case Tuple1(null) => // unreachable
+ ^
+four errors found
diff --git a/test/files/neg/caseinherit.flags b/test/files/neg/t7369.flags
similarity index 100%
copy from test/files/neg/caseinherit.flags
copy to test/files/neg/t7369.flags
diff --git a/test/files/neg/t7369.scala b/test/files/neg/t7369.scala
new file mode 100644
index 0000000..87ddfe9
--- /dev/null
+++ b/test/files/neg/t7369.scala
@@ -0,0 +1,43 @@
+object Test {
+ val X, Y = true
+ (null: Tuple1[Boolean]) match {
+ case Tuple1(X) =>
+ case Tuple1(Y) =>
+ case Tuple1(X) => // unreachable
+ case _ =>
+ }
+
+ (null: Tuple1[Boolean]) match {
+ case Tuple1(true) =>
+ case Tuple1(false) =>
+ case Tuple1(true) => // unreachable
+ case _ =>
+ }
+}
+
+
+sealed abstract class B;
+case object True extends B;
+case object False extends B;
+
+object Test2 {
+
+ val X: B = True
+ val Y: B = False
+
+ (null: Tuple1[B]) match {
+ case Tuple1(X) =>
+ case Tuple1(Y) =>
+ case Tuple1(X) => // unreachable
+ case _ =>
+ }
+}
+
+object Test3 {
+ (null: Tuple1[B]) match {
+ case Tuple1(null) =>
+ case Tuple1(True) =>
+ case Tuple1(null) => // unreachable
+ case _ =>
+ }
+}
diff --git a/test/files/neg/t7385.check b/test/files/neg/t7385.check
new file mode 100644
index 0000000..70d3c3f
--- /dev/null
+++ b/test/files/neg/t7385.check
@@ -0,0 +1,10 @@
+t7385.scala:2: error: '(' expected but identifier found.
+ do { println("bippy") } while i<10
+ ^
+t7385.scala:6: error: '(' expected but identifier found.
+ while i<10 { () }
+ ^
+t7385.scala:7: error: illegal start of simple expression
+}
+^
+three errors found
diff --git a/test/files/neg/t7385.scala b/test/files/neg/t7385.scala
new file mode 100644
index 0000000..a7f8010
--- /dev/null
+++ b/test/files/neg/t7385.scala
@@ -0,0 +1,7 @@
+object Bar {
+ do { println("bippy") } while i<10
+}
+
+object Foo {
+ while i<10 { () }
+}
diff --git a/test/files/neg/t7388.check b/test/files/neg/t7388.check
new file mode 100644
index 0000000..0a29e04
--- /dev/null
+++ b/test/files/neg/t7388.check
@@ -0,0 +1,4 @@
+t7388.scala:1: error: doesnotexist is not an enclosing class
+class Test private[doesnotexist]()
+ ^
+one error found
diff --git a/test/files/neg/t7388.scala b/test/files/neg/t7388.scala
new file mode 100644
index 0000000..9ce9ea1
--- /dev/null
+++ b/test/files/neg/t7388.scala
@@ -0,0 +1 @@
+class Test private[doesnotexist]()
diff --git a/test/files/neg/t742.check b/test/files/neg/t742.check
index f587948..d355715 100644
--- a/test/files/neg/t742.check
+++ b/test/files/neg/t742.check
@@ -1,5 +1,6 @@
t742.scala:5: error: kinds of the type arguments (Crash._1,Crash._2,Any) do not conform to the expected kinds of the type parameters (type m,type n,type z).
-Crash._1's type parameters do not match type m's expected parameters: type s1 has one type parameter, but type n has two
+Crash._1's type parameters do not match type m's expected parameters:
+type s1 has one type parameter, but type n has two
type p = mul[_1, _2, Any] // mul[_1, _1, Any] needs -Yrecursion
^
one error found
diff --git a/test/files/neg/t7473.check b/test/files/neg/t7473.check
new file mode 100644
index 0000000..bc8c29d
--- /dev/null
+++ b/test/files/neg/t7473.check
@@ -0,0 +1,7 @@
+t7473.scala:6: error: '<-' expected but '=' found.
+ (for (x = Option(i); if x == j) yield 42) toList
+ ^
+t7473.scala:6: error: illegal start of simple expression
+ (for (x = Option(i); if x == j) yield 42) toList
+ ^
+two errors found
diff --git a/test/files/neg/t7473.scala b/test/files/neg/t7473.scala
new file mode 100644
index 0000000..593231d
--- /dev/null
+++ b/test/files/neg/t7473.scala
@@ -0,0 +1,7 @@
+
+object Foo {
+ val i,j = 3
+ //for (x = Option(i); if x == j) yield 42 //t7473.scala:4: error: '<-' expected but '=' found.
+ // evil postfix!
+ (for (x = Option(i); if x == j) yield 42) toList
+}
diff --git a/test/files/neg/t750.check b/test/files/neg/t750.check
new file mode 100644
index 0000000..c17ca33
--- /dev/null
+++ b/test/files/neg/t750.check
@@ -0,0 +1,15 @@
+Test_2.scala:4: error: type mismatch;
+ found : Array[Int]
+ required: Array[? with Object]
+Note: Int >: ? with Object, but class Array is invariant in type T.
+You may wish to investigate a wildcard type such as `_ >: ? with Object`. (SLS 3.2.10)
+ AO_1.f(a)
+ ^
+Test_2.scala:5: error: type mismatch;
+ found : Array[Int]
+ required: Array[Int]
+Note: Int >: Int, but class Array is invariant in type T.
+You may wish to investigate a wildcard type such as `_ >: Int`. (SLS 3.2.10)
+ AO_1.f[Int](a)
+ ^
+two errors found
diff --git a/test/files/neg/t750/AO_1.java b/test/files/neg/t750/AO_1.java
new file mode 100644
index 0000000..4c7360e
--- /dev/null
+++ b/test/files/neg/t750/AO_1.java
@@ -0,0 +1,5 @@
+public class AO_1 {
+ public static <T> void f(T[] ar0) {
+ System.out.println(ar0);
+ }
+}
\ No newline at end of file
diff --git a/test/files/neg/t750/Test_2.scala b/test/files/neg/t750/Test_2.scala
new file mode 100644
index 0000000..8097743
--- /dev/null
+++ b/test/files/neg/t750/Test_2.scala
@@ -0,0 +1,6 @@
+// t750
+object Test extends App {
+ val a = Array(1, 2, 3)
+ AO_1.f(a)
+ AO_1.f[Int](a)
+}
diff --git a/test/files/neg/t7507.check b/test/files/neg/t7507.check
new file mode 100644
index 0000000..d402869
--- /dev/null
+++ b/test/files/neg/t7507.check
@@ -0,0 +1,4 @@
+t7507.scala:6: error: value bippy in trait Cake cannot be accessed in Cake
+ locally(bippy)
+ ^
+one error found
diff --git a/test/files/neg/t7507.scala b/test/files/neg/t7507.scala
new file mode 100644
index 0000000..1b4756d
--- /dev/null
+++ b/test/files/neg/t7507.scala
@@ -0,0 +1,7 @@
+trait Cake extends Slice {
+ private[this] val bippy = ()
+}
+
+trait Slice { self: Cake =>
+ locally(bippy)
+}
diff --git a/test/files/neg/t7509.check b/test/files/neg/t7509.check
new file mode 100644
index 0000000..eaa6303
--- /dev/null
+++ b/test/files/neg/t7509.check
@@ -0,0 +1,12 @@
+t7509.scala:3: error: inferred type arguments [Int] do not conform to method crash's type parameter bounds [R <: AnyRef]
+ crash(42)
+ ^
+t7509.scala:3: error: type mismatch;
+ found : Int(42)
+ required: R
+ crash(42)
+ ^
+t7509.scala:3: error: could not find implicit value for parameter ev: R
+ crash(42)
+ ^
+three errors found
diff --git a/test/files/neg/t7509.scala b/test/files/neg/t7509.scala
new file mode 100644
index 0000000..3cba801
--- /dev/null
+++ b/test/files/neg/t7509.scala
@@ -0,0 +1,4 @@
+object NMWE {
+ def crash[R <: AnyRef](f: R)(implicit ev: R): Any = ???
+ crash(42)
+}
diff --git a/test/files/neg/t750b.check b/test/files/neg/t750b.check
new file mode 100644
index 0000000..72a2491
--- /dev/null
+++ b/test/files/neg/t750b.check
@@ -0,0 +1,15 @@
+Test.scala:4: error: type mismatch;
+ found : Array[Int]
+ required: Array[? with Object]
+Note: Int >: ? with Object, but class Array is invariant in type T.
+You may wish to investigate a wildcard type such as `_ >: ? with Object`. (SLS 3.2.10)
+ AO.f(a)
+ ^
+Test.scala:5: error: type mismatch;
+ found : Array[Int]
+ required: Array[Int]
+Note: Int >: Int, but class Array is invariant in type T.
+You may wish to investigate a wildcard type such as `_ >: Int`. (SLS 3.2.10)
+ AO.f[Int](a)
+ ^
+two errors found
diff --git a/test/files/neg/t750b/AO.java b/test/files/neg/t750b/AO.java
new file mode 100644
index 0000000..060baf9
--- /dev/null
+++ b/test/files/neg/t750b/AO.java
@@ -0,0 +1,5 @@
+public class AO {
+ public static <T> void f(T[] ar0) {
+ System.out.println(ar0);
+ }
+}
\ No newline at end of file
diff --git a/test/files/neg/t750b/Test.scala b/test/files/neg/t750b/Test.scala
new file mode 100644
index 0000000..5f792a7
--- /dev/null
+++ b/test/files/neg/t750b/Test.scala
@@ -0,0 +1,6 @@
+// t750
+object Test extends App {
+ val a = Array(1, 2, 3)
+ AO.f(a)
+ AO.f[Int](a)
+}
diff --git a/test/files/neg/t7519-b.check b/test/files/neg/t7519-b.check
new file mode 100644
index 0000000..ad554b8
--- /dev/null
+++ b/test/files/neg/t7519-b.check
@@ -0,0 +1,4 @@
+Use_2.scala:6: error: No implicit view available from String => K.
+ val x: Q = ex.Mac.mac("asdf")
+ ^
+one error found
diff --git a/test/files/neg/t7519-b/Mac_1.scala b/test/files/neg/t7519-b/Mac_1.scala
new file mode 100644
index 0000000..55b583d
--- /dev/null
+++ b/test/files/neg/t7519-b/Mac_1.scala
@@ -0,0 +1,14 @@
+// get expected error message without package declaration
+package ex
+
+import scala.language.experimental.macros
+import scala.reflect.macros._
+
+object IW {
+ def foo(a: String): String = ???
+}
+object Mac {
+ def mac(s: String): String = macro macImpl
+ def macImpl(c: Context)(s: c.Expr[String]): c.Expr[String] =
+ c.universe.reify(IW.foo(s.splice))
+}
diff --git a/test/files/neg/t7519-b/Use_2.scala b/test/files/neg/t7519-b/Use_2.scala
new file mode 100644
index 0000000..413e40e
--- /dev/null
+++ b/test/files/neg/t7519-b/Use_2.scala
@@ -0,0 +1,8 @@
+trait Q
+trait K
+
+object Use {
+ implicit def cd[T](p: T)(implicit ev: T => K): Q = ???
+ val x: Q = ex.Mac.mac("asdf")
+}
+
diff --git a/test/files/neg/t7519.check b/test/files/neg/t7519.check
new file mode 100644
index 0000000..164d67f
--- /dev/null
+++ b/test/files/neg/t7519.check
@@ -0,0 +1,7 @@
+t7519.scala:5: error: could not find implicit value for parameter nada: Nothing
+ locally(0 : String) // was: "value conversion is not a member of C.this.C"
+ ^
+t7519.scala:15: error: could not find implicit value for parameter nada: Nothing
+ locally(0 : String) // was: "value conversion is not a member of U"
+ ^
+two errors found
diff --git a/test/files/neg/t7519.scala b/test/files/neg/t7519.scala
new file mode 100644
index 0000000..aea0f35
--- /dev/null
+++ b/test/files/neg/t7519.scala
@@ -0,0 +1,18 @@
+class C {
+ implicit def conversion(m: Int)(implicit nada: Nothing): String = ???
+
+ class C { // rename class to get correct error, can't find implicit: Nothing.
+ locally(0 : String) // was: "value conversion is not a member of C.this.C"
+ }
+}
+
+object Test2 {
+ trait T; trait U
+ new T {
+ implicit def conversion(m: Int)(implicit nada: Nothing): String = ???
+
+ new U { // nested anonymous classes also share a name.
+ locally(0 : String) // was: "value conversion is not a member of U"
+ }
+ }
+}
diff --git a/test/files/neg/t752.check b/test/files/neg/t752.check
new file mode 100644
index 0000000..a91bba4
--- /dev/null
+++ b/test/files/neg/t752.check
@@ -0,0 +1,6 @@
+t752.scala:6: error: type mismatch;
+ found : String => Unit
+ required: Int => Unit
+ f(g _)
+ ^
+one error found
diff --git a/test/files/neg/bug752.scala b/test/files/neg/t752.scala
similarity index 100%
rename from test/files/neg/bug752.scala
rename to test/files/neg/t752.scala
diff --git a/test/files/neg/t7636.check b/test/files/neg/t7636.check
new file mode 100644
index 0000000..f70d50b
--- /dev/null
+++ b/test/files/neg/t7636.check
@@ -0,0 +1,10 @@
+t7636.scala:3: error: illegal inheritance;
+ self-type Main.C does not conform to Main.ResultTable[_$3]'s selftype Main.ResultTable[_$3]
+ class C extends ResultTable(Left(5):Either[_,_])(5)
+ ^
+t7636.scala:3: error: type mismatch;
+ found : Either[_$2,_$3(in constructor C)] where type _$3(in constructor C), type _$2
+ required: Either[_, _$3(in object Main)] where type _$3(in object Main)
+ class C extends ResultTable(Left(5):Either[_,_])(5)
+ ^
+two errors found
diff --git a/test/files/neg/t7636.scala b/test/files/neg/t7636.scala
new file mode 100644
index 0000000..a7b1b90
--- /dev/null
+++ b/test/files/neg/t7636.scala
@@ -0,0 +1,7 @@
+object Main extends App{
+ class ResultTable[E]( query : Either[_,E] )( columns : Int )
+ class C extends ResultTable(Left(5):Either[_,_])(5)
+}
+// Inference of the existential type for the parent type argument
+// E still fails. That looks tricky to fix, see the comments in SI-7636.
+// But we at least prevent a cascading NPE.
\ No newline at end of file
diff --git a/test/files/neg/t765.check b/test/files/neg/t765.check
new file mode 100644
index 0000000..5a5f603
--- /dev/null
+++ b/test/files/neg/t765.check
@@ -0,0 +1,4 @@
+t765.scala:3: error: not found: type Bar123
+ val bar = new Bar123
+ ^
+one error found
diff --git a/test/files/neg/bug765.scala b/test/files/neg/t765.scala
similarity index 100%
rename from test/files/neg/bug765.scala
rename to test/files/neg/t765.scala
diff --git a/test/files/neg/t766.check b/test/files/neg/t766.check
new file mode 100644
index 0000000..92039ed
--- /dev/null
+++ b/test/files/neg/t766.check
@@ -0,0 +1,4 @@
+t766.scala:5: error: not found: value badIdentifier
+ val p = badIdentifier
+ ^
+one error found
diff --git a/test/files/neg/bug766.scala b/test/files/neg/t766.scala
similarity index 100%
rename from test/files/neg/bug766.scala
rename to test/files/neg/t766.scala
diff --git a/test/files/neg/t7694b.check b/test/files/neg/t7694b.check
new file mode 100644
index 0000000..ea3d773
--- /dev/null
+++ b/test/files/neg/t7694b.check
@@ -0,0 +1,7 @@
+t7694b.scala:8: error: type arguments [_3,_4] do not conform to trait L's type parameter bounds [A2,B2 <: A2]
+ def d = if (true) (null: L[A, A]) else (null: L[B, B])
+ ^
+t7694b.scala:9: error: type arguments [_1,_2] do not conform to trait L's type parameter bounds [A2,B2 <: A2]
+ val v = if (true) (null: L[A, A]) else (null: L[B, B])
+ ^
+two errors found
diff --git a/test/files/neg/t771.scala b/test/files/neg/t771.scala
old mode 100644
new mode 100755
diff --git a/test/files/neg/t7752.check b/test/files/neg/t7752.check
new file mode 100644
index 0000000..0a015d3
--- /dev/null
+++ b/test/files/neg/t7752.check
@@ -0,0 +1,27 @@
+t7752.scala:25: error: overloaded method value foo with alternatives:
+ [A](heading: String, rows: A*)(A,) <and>
+ [A, B](heading: (String, String), rows: (A, B)*)(A, B) <and>
+ [A, B, C](heading: (String, String, String), rows: (A, B, C)*)(A, B, C) <and>
+ [A, B, C, D](heading: (String, String, String, String), rows: (A, B, C, D)*)(A, B, C, D) <and>
+ [A, B, C, D, E](heading: (String, String, String, String, String), rows: (A, B, C, D, E)*)(A, B, C, D, E) <and>
+ [A, B, C, D, E, F](heading: (String, String, String, String, String, String), rows: (A, B, C, D, E, F)*)(A, B, C, D, E, F) <and>
+ [A, B, C, D, E, F, G](heading: (String, String, String, String, String, String, String), rows: (A, B, C, D, E, F, G)*)(A, B, C, D, E, F, G) <and>
+ [A, B, C, D, E, F, G, H](heading: (String, String, String, String, String, String, String, String), rows: (A, B, C, D, E, F, G, H)*)(A, B, C, D, E, F, G, H) <and>
+ [A, B, C, D, E, F, G, H, I](heading: (String, String, String, String, String, String, String, String, String), rows: (A, B, C, D, E, F, G, H, I)*)(A, B, C, D, E, F, G, H, I) <and>
+ [A, B, C, D, E, F, G, H, I, J](heading: (String, String, String, String, String, String, String, String, String, String), rows: (A, B, C, D, E, F, G, H, I, J)*)(A, B, C, D, E, F, G, H, I, J) <and>
+ [A, B, C, D, E, F, G, H, I, J, K](heading: (String, String, String, String, String, String, String, String, String, String, String), rows: (A, B, C, D, E, F, G, H, I, J, K)*)(A, B, C, D, E, F, G, H, I, J, K) <and>
+ [A, B, C, D, E, F, G, H, I, J, K, L](heading: (String, String, String, String, String, String, String, String, String, String, String, String), rows: (A, B, C, D, E, F, G, H, I, J, K, L)*)(A, B, C, D, E, F, G, H, I, J, K, L) <and>
+ [A, B, C, D, E, F, G, H, I, J, K, L, M](heading: (String, String, String, String, String, String, String, String, String, String, String, String, String), rows: (A, B, C, D, E, F, G, H, I, J, K, L, M)*)(A, B, C, D, E, F, G, H, I, J, K, L, M) <and>
+ [A, B, C, D, E, F, G, H, I, J, K, L, M, N](heading: (String, String, String, String, String, String, String, String, String, String, String, String, String, String), rows: (A, B, C, D, E, F, G, H, I, J, K, L, M, N)*)(A, B, C, D, E, F, G, H, I, J, K, L, M, N) <and>
+ [A, B, C, D, E, F, G, H, I, J, K, L, M, N, O](heading: (String, String, String, String, String, String, String, String, String, String, String, String, String, String, String), rows: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O)*)(A, B, C, D, E, F, G, H, I, J, K, L, M, N, O) <and>
+ [A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P](heading: (String, String, String, String, String, String, String, String, String, String, String, String, String, String, String, String), rows: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P)*)(A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P) <and>
+ [A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q](heading: (String, String, String, String, String, String, String, String, String, String, String, String, String, String, String, String, String), rows: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q)*)(A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q) <and>
+ [A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R](heading: (String, String, String, String, String, String, String, String, String, String, String, String, String, String, String, String, String, String), rows: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R)*)(A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R) <and>
+ [A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S](heading: (String, String, String, String, String, String, String, String, String, String, String, String, String, String, String, String, String, String, String), rows: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S)*)(A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S) <and>
+ [A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T](heading: (String, String, String, String, String, String, String, String, String, String, String, String, String, String, String, String, String, String, String, String), rows: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T)*)(A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T) <and>
+ [A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U](heading: (String, String, String, String, String, String, String, String, String, String, String, String, String, String, String, String, String, String, String, String, String), rows: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U)*)(A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U) <and>
+ [A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U, V](heading: (String, String, String, String, String, String, String, String, String, String, String, String, String, String, String, String, String, String, String, String, String, String), rows: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U, V)*)(A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U, V)
+ cannot be applied to (Int)
+ foo((1))
+ ^
+one error found
diff --git a/test/files/neg/t7752.scala b/test/files/neg/t7752.scala
new file mode 100644
index 0000000..40ba210
--- /dev/null
+++ b/test/files/neg/t7752.scala
@@ -0,0 +1,26 @@
+object Test {
+ def foo[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U, V](heading: (String, String, String, String, String, String, String, String, String, String, String, String, String, String, String, String, String, String, String, String, String, String), rows: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U, V)*): Tuple22[A,B,C,D,E,F,G,H,I,J,K,L,M,N,O,P,Q,R,S,T,U,V] = null
+ def foo[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U](heading: (String, String, String, String, String, String, String, String, String, String, String, String, String, String, String, String, String, String, String, String, String), rows: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U)*): Tuple21[A,B,C,D,E,F,G,H,I,J,K,L,M,N,O,P,Q,R,S,T,U] = null
+ def foo[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T](heading: (String, String, String, String, String, String, String, String, String, String, String, String, String, String, String, String, String, String, String, String), rows: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T)*): Tuple20[A,B,C,D,E,F,G,H,I,J,K,L,M,N,O,P,Q,R,S,T] = null
+ def foo[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S](heading: (String, String, String, String, String, String, String, String, String, String, String, String, String, String, String, String, String, String, String), rows: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S)*): Tuple19[A,B,C,D,E,F,G,H,I,J,K,L,M,N,O,P,Q,R,S] = null
+ def foo[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R](heading: (String, String, String, String, String, String, String, String, String, String, String, String, String, String, String, String, String, String), rows: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R)*): Tuple18[A,B,C,D,E,F,G,H,I,J,K,L,M,N,O,P,Q,R] = null
+ def foo[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q](heading: (String, String, String, String, String, String, String, String, String, String, String, String, String, String, String, String, String), rows: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q)*): Tuple17[A,B,C,D,E,F,G,H,I,J,K,L,M,N,O,P,Q] = null
+ def foo[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P](heading: (String, String, String, String, String, String, String, String, String, String, String, String, String, String, String, String), rows: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P)*): Tuple16[A,B,C,D,E,F,G,H,I,J,K,L,M,N,O,P] = null
+ def foo[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O](heading: (String, String, String, String, String, String, String, String, String, String, String, String, String, String, String), rows: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O)*): Tuple15[A,B,C,D,E,F,G,H,I,J,K,L,M,N,O] = null
+ def foo[A, B, C, D, E, F, G, H, I, J, K, L, M, N](heading: (String, String, String, String, String, String, String, String, String, String, String, String, String, String), rows: (A, B, C, D, E, F, G, H, I, J, K, L, M, N)*): Tuple14[A,B,C,D,E,F,G,H,I,J,K,L,M,N] = null
+ def foo[A, B, C, D, E, F, G, H, I, J, K, L, M](heading: (String, String, String, String, String, String, String, String, String, String, String, String, String), rows: (A, B, C, D, E, F, G, H, I, J, K, L, M)*): Tuple13[A,B,C,D,E,F,G,H,I,J,K,L,M] = null
+ def foo[A, B, C, D, E, F, G, H, I, J, K, L](heading: (String, String, String, String, String, String, String, String, String, String, String, String), rows: (A, B, C, D, E, F, G, H, I, J, K, L)*): Tuple12[A,B,C,D,E,F,G,H,I,J,K,L] = null
+ def foo[A, B, C, D, E, F, G, H, I, J, K](heading: (String, String, String, String, String, String, String, String, String, String, String), rows: (A, B, C, D, E, F, G, H, I, J, K)*): Tuple11[A,B,C,D,E,F,G,H,I,J,K] = null
+ def foo[A, B, C, D, E, F, G, H, I, J](heading: (String, String, String, String, String, String, String, String, String, String), rows: (A, B, C, D, E, F, G, H, I, J)*): Tuple10[A,B,C,D,E,F,G,H,I,J] = null
+ def foo[A, B, C, D, E, F, G, H, I](heading: (String, String, String, String, String, String, String, String, String), rows: (A, B, C, D, E, F, G, H, I)*): Tuple9[A,B,C,D,E,F,G,H,I] = null
+ def foo[A, B, C, D, E, F, G, H](heading: (String, String, String, String, String, String, String, String), rows: (A, B, C, D, E, F, G, H)*): Tuple8[A,B,C,D,E,F,G,H] = null
+ def foo[A, B, C, D, E, F, G](heading: (String, String, String, String, String, String, String), rows: (A, B, C, D, E, F, G)*): Tuple7[A,B,C,D,E,F,G] = null
+ def foo[A, B, C, D, E, F](heading: (String, String, String, String, String, String), rows: (A, B, C, D, E, F)*): Tuple6[A,B,C,D,E,F] = null
+ def foo[A, B, C, D, E](heading: (String, String, String, String, String), rows: (A, B, C, D, E)*): Tuple5[A,B,C,D,E] = null
+ def foo[A, B, C, D](heading: (String, String, String, String), rows: (A, B, C, D)*): Tuple4[A,B,C,D] = null
+ def foo[A, B, C](heading: (String, String, String), rows: (A, B, C)*): Tuple3[A,B,C] = null
+ def foo[A, B](heading: (String, String), rows: (A, B)*): Tuple2[A,B] = null
+ def foo[A](heading: String, rows: A*): Tuple1[A] = null
+
+ foo((1))
+}
\ No newline at end of file
diff --git a/test/files/neg/t7783.check b/test/files/neg/t7783.check
new file mode 100644
index 0000000..f489b3c
--- /dev/null
+++ b/test/files/neg/t7783.check
@@ -0,0 +1,16 @@
+t7783.scala:1: error: type D in object O is deprecated:
+object O { class C; @deprecated("", "") type D = C; def foo: Seq[D] = Nil }
+ ^
+t7783.scala:11: error: type D in object O is deprecated:
+ type T = O.D
+ ^
+t7783.scala:12: error: type D in object O is deprecated:
+ locally(null: O.D)
+ ^
+t7783.scala:13: error: type D in object O is deprecated:
+ val x: O.D = null
+ ^
+t7783.scala:14: error: type D in object O is deprecated:
+ locally(null.asInstanceOf[O.D])
+ ^
+5 errors found
diff --git a/test/files/neg/t3115.flags b/test/files/neg/t7783.flags
similarity index 100%
rename from test/files/neg/t3115.flags
rename to test/files/neg/t7783.flags
diff --git a/test/files/neg/t7783.scala b/test/files/neg/t7783.scala
new file mode 100644
index 0000000..995b644
--- /dev/null
+++ b/test/files/neg/t7783.scala
@@ -0,0 +1,15 @@
+object O { class C; @deprecated("", "") type D = C; def foo: Seq[D] = Nil }
+
+object NoWarn {
+ O.foo // nowarn
+ O.foo +: Nil // nowarn
+ def bar(a: Any, b: Any) = () // nowarn
+ bar(b = O.foo, a = ()) // nowarn
+}
+
+object Warn {
+ type T = O.D
+ locally(null: O.D)
+ val x: O.D = null
+ locally(null.asInstanceOf[O.D])
+}
diff --git a/test/files/neg/t779.check b/test/files/neg/t779.check
new file mode 100644
index 0000000..65f463c
--- /dev/null
+++ b/test/files/neg/t779.check
@@ -0,0 +1,4 @@
+t779.scala:6: error: method ast has return statement; needs result type
+ override def ast = return null
+ ^
+one error found
diff --git a/test/files/neg/bug779.scala b/test/files/neg/t779.scala
similarity index 100%
rename from test/files/neg/bug779.scala
rename to test/files/neg/t779.scala
diff --git a/test/files/neg/t783.check b/test/files/neg/t783.check
new file mode 100644
index 0000000..37610a5
--- /dev/null
+++ b/test/files/neg/t783.check
@@ -0,0 +1,6 @@
+t783.scala:12: error: type mismatch;
+ found : Contexts.this.Global#Template
+ required: Contexts.this.global.Template
+ globalInit0.Template(10, 20);
+ ^
+one error found
diff --git a/test/files/neg/bug783.scala b/test/files/neg/t783.scala
similarity index 100%
rename from test/files/neg/bug783.scala
rename to test/files/neg/t783.scala
diff --git a/test/files/neg/t798.check b/test/files/neg/t798.check
new file mode 100644
index 0000000..b120f3a
--- /dev/null
+++ b/test/files/neg/t798.check
@@ -0,0 +1,4 @@
+t798.scala:2: error: cyclic aliasing or subtyping involving type Bracks
+trait Test[Bracks <: Bracks] {
+ ^
+one error found
diff --git a/test/files/neg/bug798.scala b/test/files/neg/t798.scala
similarity index 100%
rename from test/files/neg/bug798.scala
rename to test/files/neg/t798.scala
diff --git a/test/files/neg/t800.check b/test/files/neg/t800.check
new file mode 100644
index 0000000..8ba95fd
--- /dev/null
+++ b/test/files/neg/t800.check
@@ -0,0 +1,16 @@
+t800.scala:4: error: qualification is already defined as value qualification
+ val qualification = false;
+ ^
+t800.scala:8: error: method qualification is defined twice
+ conflicting symbols both originated in file 't800.scala'
+ val qualification = false;
+ ^
+t800.scala:12: error: value qualification is defined twice
+ conflicting symbols both originated in file 't800.scala'
+ var qualification = false;
+ ^
+t800.scala:16: error: method qualification is defined twice
+ conflicting symbols both originated in file 't800.scala'
+ var qualification = false;
+ ^
+four errors found
diff --git a/test/files/neg/bug800.scala b/test/files/neg/t800.scala
similarity index 100%
rename from test/files/neg/bug800.scala
rename to test/files/neg/t800.scala
diff --git a/test/files/neg/t8104a.check b/test/files/neg/t8104a.check
new file mode 100644
index 0000000..ef92c2e
--- /dev/null
+++ b/test/files/neg/t8104a.check
@@ -0,0 +1,4 @@
+Test_2.scala:19: error: could not find implicit value for parameter e: Generic.Aux[Test.C,(Int, Int)]
+ implicitly[Generic.Aux[C, (Int, Int)]]
+ ^
+one error found
diff --git a/test/files/neg/t8104a/Macros_1.scala b/test/files/neg/t8104a/Macros_1.scala
new file mode 100644
index 0000000..688d069
--- /dev/null
+++ b/test/files/neg/t8104a/Macros_1.scala
@@ -0,0 +1,23 @@
+import scala.reflect.macros.Context
+
+object Macros {
+ def impl[T](c: Context)(implicit T: c.WeakTypeTag[T]) = {
+ import c.universe._
+ import Flag._
+ import definitions._
+ val fields = T.tpe.declarations.toList.collect{ case x: TermSymbol if x.isVal && x.isCaseAccessor => x }
+ val Repr = appliedType(TupleClass(fields.length).asType.toType, fields.map(_.typeSignature))
+ c.Expr(Block(
+ List(ClassDef(
+ Modifiers(FINAL),
+ newTypeName("$anon"),
+ List(),
+ Template(
+ List(AppliedTypeTree(Ident(newTypeName("Generic")), List(TypeTree(T.tpe)))),
+ emptyValDef,
+ List(
+ DefDef(Modifiers(), nme.CONSTRUCTOR, List(), List(List()), TypeTree(), Block(List(Apply(Select(Super(This(tpnme.EMPTY), tpnme.EMPTY), nme.CONSTRUCTOR), List())), Literal(Constant(())))),
+ TypeDef(Modifiers(), newTypeName("Repr"), List(), TypeTree(Repr)))))),
+ Apply(Select(New(Ident(newTypeName("$anon"))), nme.CONSTRUCTOR), List())))
+ }
+}
\ No newline at end of file
diff --git a/test/files/neg/t8104a/Test_2.scala b/test/files/neg/t8104a/Test_2.scala
new file mode 100644
index 0000000..f601fc3
--- /dev/null
+++ b/test/files/neg/t8104a/Test_2.scala
@@ -0,0 +1,20 @@
+trait Generic[T] { type Repr }
+object Generic {
+ type Aux[T, Repr0] = Generic[T] { type Repr = Repr0 }
+ import scala.language.experimental.macros
+ implicit def materializeGeneric[T]: Generic[T] = macro Macros.impl[T]
+}
+
+object Test extends App {
+ case class C(x: Int, y: Int)
+
+ def reprify[T, Repr](x: T)(implicit generic: Generic.Aux[T, Repr]) = ???
+ reprify(C(40, 2))
+
+ // this is a compilation error at the moment as explained in SI-8104
+ // because matchesPt in implicit search says that depoly(<type of materializeGeneric>) isn't a subtype of Generic.Aux[C, (Int, Int)]
+ // which is rightfully so, because depoly only replaces type parameters, not type members with wildcard types
+ // however in the future we might want to relax the matchesPt check, so this might start compiling
+ // therefore, if you've broken this test, then you should be happy, because most likely you've just enabled an interesting use case!
+ implicitly[Generic.Aux[C, (Int, Int)]]
+}
diff --git a/test/files/neg/t8104b.check b/test/files/neg/t8104b.check
new file mode 100644
index 0000000..3214a13
--- /dev/null
+++ b/test/files/neg/t8104b.check
@@ -0,0 +1,4 @@
+Test_2.scala:16: error: could not find implicit value for parameter generic: Generic.Aux[Test.C,Repr]
+ reprify(C(40, 2))
+ ^
+one error found
diff --git a/test/files/neg/t8104b/Macros_1.scala b/test/files/neg/t8104b/Macros_1.scala
new file mode 100644
index 0000000..688d069
--- /dev/null
+++ b/test/files/neg/t8104b/Macros_1.scala
@@ -0,0 +1,23 @@
+import scala.reflect.macros.Context
+
+object Macros {
+ def impl[T](c: Context)(implicit T: c.WeakTypeTag[T]) = {
+ import c.universe._
+ import Flag._
+ import definitions._
+ val fields = T.tpe.declarations.toList.collect{ case x: TermSymbol if x.isVal && x.isCaseAccessor => x }
+ val Repr = appliedType(TupleClass(fields.length).asType.toType, fields.map(_.typeSignature))
+ c.Expr(Block(
+ List(ClassDef(
+ Modifiers(FINAL),
+ newTypeName("$anon"),
+ List(),
+ Template(
+ List(AppliedTypeTree(Ident(newTypeName("Generic")), List(TypeTree(T.tpe)))),
+ emptyValDef,
+ List(
+ DefDef(Modifiers(), nme.CONSTRUCTOR, List(), List(List()), TypeTree(), Block(List(Apply(Select(Super(This(tpnme.EMPTY), tpnme.EMPTY), nme.CONSTRUCTOR), List())), Literal(Constant(())))),
+ TypeDef(Modifiers(), newTypeName("Repr"), List(), TypeTree(Repr)))))),
+ Apply(Select(New(Ident(newTypeName("$anon"))), nme.CONSTRUCTOR), List())))
+ }
+}
\ No newline at end of file
diff --git a/test/files/neg/t8104b/Test_2.scala b/test/files/neg/t8104b/Test_2.scala
new file mode 100644
index 0000000..a0d3594
--- /dev/null
+++ b/test/files/neg/t8104b/Test_2.scala
@@ -0,0 +1,24 @@
+trait Generic[T] { type Repr }
+object Generic {
+ type Aux[T, Repr0] = Generic[T] { type Repr = Repr0 }
+ import scala.language.experimental.macros
+ implicit def materializeGeneric[T, Repr]: Generic.Aux[T, Repr] = macro Macros.impl[T]
+}
+
+object Test extends App {
+ case class C(x: Int, y: Int)
+
+ // this doesn't work because of SI-7470
+ // well, in fact SI-7470 has been fixed: https://github.com/scala/scala/pull/2499
+ // it's just that the fix hasn't been backported to 2.10.x
+ // if you've made this compile, consider taking a look at the aforementioned pull request
+ def reprify[T, Repr](x: T)(implicit generic: Generic.Aux[T, Repr]) = ???
+ reprify(C(40, 2))
+
+ // this is a compilation error at the moment as explained in SI-8104
+ // because matchesPt in implicit search says that depoly(<type of materializeGeneric>) isn't a subtype of Generic.Aux[C, (Int, Int)]
+ // which is rightfully so, because depoly only replaces type parameters, not type members with wildcard types
+ // however in the future we might want to relax the matchesPt check, so this might start compiling
+ // therefore, if you've broken this test, then you should be happy, because most likely you've just enabled an interesting use case!
+ implicitly[Generic.Aux[C, (Int, Int)]]
+}
diff --git a/test/files/neg/t8146-non-finitary-2.check b/test/files/neg/t8146-non-finitary-2.check
new file mode 100644
index 0000000..8c2e143
--- /dev/null
+++ b/test/files/neg/t8146-non-finitary-2.check
@@ -0,0 +1,9 @@
+t8146-non-finitary-2.scala:5: error: class graph is not finitary because type parameter X is expansively recursive
+trait C[X] extends N[N[C[D[X]]]]
+ ^
+t8146-non-finitary-2.scala:7: error: type mismatch;
+ found : C[Int]
+ required: N[C[Int]]
+ def foo(c: C[Int]): N[C[Int]] = c
+ ^
+two errors found
diff --git a/test/files/neg/t8146-non-finitary-2.scala b/test/files/neg/t8146-non-finitary-2.scala
new file mode 100644
index 0000000..c12f5f8
--- /dev/null
+++ b/test/files/neg/t8146-non-finitary-2.scala
@@ -0,0 +1,8 @@
+// Example 3 from "On Decidability of Nominal Subtyping with Variance" (Pierce, Kennedy)
+// http://research.microsoft.com/pubs/64041/fool2007.pdf
+trait N[-Z]
+trait D[Y]
+trait C[X] extends N[N[C[D[X]]]]
+object Test {
+ def foo(c: C[Int]): N[C[Int]] = c
+}
diff --git a/test/files/neg/t8146-non-finitary.check b/test/files/neg/t8146-non-finitary.check
new file mode 100644
index 0000000..8363b75
--- /dev/null
+++ b/test/files/neg/t8146-non-finitary.check
@@ -0,0 +1,9 @@
+t8146-non-finitary.scala:4: error: class graph is not finitary because type parameter A is expansively recursive
+trait C[A] extends N[N[C[C[A]]]]
+ ^
+t8146-non-finitary.scala:6: error: type mismatch;
+ found : C[Int]
+ required: N[C[Int]]
+ def foo(c: C[Int]): N[C[Int]] = c
+ ^
+two errors found
diff --git a/test/files/neg/t8146-non-finitary.scala b/test/files/neg/t8146-non-finitary.scala
new file mode 100644
index 0000000..3d8a307
--- /dev/null
+++ b/test/files/neg/t8146-non-finitary.scala
@@ -0,0 +1,7 @@
+// Example 3 from "On Decidability of Nominal Subtyping with Variance" (Pierce, Kennedy)
+// http://research.microsoft.com/pubs/64041/fool2007.pdf
+trait N[-A]
+trait C[A] extends N[N[C[C[A]]]]
+object Test {
+ def foo(c: C[Int]): N[C[Int]] = c
+}
diff --git a/test/files/neg/t835.check b/test/files/neg/t835.check
new file mode 100644
index 0000000..6ad18d3
--- /dev/null
+++ b/test/files/neg/t835.check
@@ -0,0 +1,9 @@
+t835.scala:2: error: no `: _*' annotation allowed here
+(such annotations are only allowed in arguments to *-parameters)
+ Console.println(List(List(1, 2, 3) : _*, List(4, 5, 6) : _*))
+ ^
+t835.scala:2: error: no `: _*' annotation allowed here
+(such annotations are only allowed in arguments to *-parameters)
+ Console.println(List(List(1, 2, 3) : _*, List(4, 5, 6) : _*))
+ ^
+two errors found
diff --git a/test/files/neg/bug835.scala b/test/files/neg/t835.scala
similarity index 100%
rename from test/files/neg/bug835.scala
rename to test/files/neg/t835.scala
diff --git a/test/files/neg/t836.check b/test/files/neg/t836.check
new file mode 100644
index 0000000..cf2faf9
--- /dev/null
+++ b/test/files/neg/t836.check
@@ -0,0 +1,7 @@
+t836.scala:9: error: type mismatch;
+ found : Any
+ required: A.this.S
+ (which expands to) A.this.MyObj#S
+ val some: S = any // compiles => type X is set to scala.Any
+ ^
+one error found
diff --git a/test/files/neg/bug836.scala b/test/files/neg/t836.scala
similarity index 100%
rename from test/files/neg/bug836.scala
rename to test/files/neg/t836.scala
diff --git a/test/files/neg/t845.check b/test/files/neg/t845.check
new file mode 100644
index 0000000..07ed7e4
--- /dev/null
+++ b/test/files/neg/t845.check
@@ -0,0 +1,4 @@
+t845.scala:4: error: only classes can have declared but undefined members
+ type Bar;
+ ^
+one error found
diff --git a/test/files/neg/bug845.scala b/test/files/neg/t845.scala
similarity index 100%
rename from test/files/neg/bug845.scala
rename to test/files/neg/t845.scala
diff --git a/test/files/neg/t846.check b/test/files/neg/t846.check
new file mode 100644
index 0000000..242a800
--- /dev/null
+++ b/test/files/neg/t846.check
@@ -0,0 +1,6 @@
+t846.scala:9: error: type mismatch;
+ found : Null(null)
+ required: B
+ if (a != null) f(a) else null
+ ^
+one error found
diff --git a/test/files/neg/bug846.scala b/test/files/neg/t846.scala
similarity index 100%
rename from test/files/neg/bug846.scala
rename to test/files/neg/t846.scala
diff --git a/test/files/neg/t856.check b/test/files/neg/t856.check
new file mode 100644
index 0000000..02978e1
--- /dev/null
+++ b/test/files/neg/t856.check
@@ -0,0 +1,14 @@
+t856.scala:3: error: class ComplexRect needs to be abstract, since:
+it has 2 unimplemented members.
+/** As seen from class ComplexRect, the missing signatures are as follows.
+ * For convenience, these are usable as stub implementations.
+ */
+ // Members declared in scala.Equals
+ def canEqual(that: Any): Boolean = ???
+
+ // Members declared in scala.Product2
+ def _2: Double = ???
+
+class ComplexRect(val _1:Double, _2:Double) extends Complex {
+ ^
+one error found
diff --git a/test/files/neg/bug856.scala b/test/files/neg/t856.scala
similarity index 100%
rename from test/files/neg/bug856.scala
rename to test/files/neg/t856.scala
diff --git a/test/files/neg/t875.check b/test/files/neg/t875.check
new file mode 100644
index 0000000..406edcf
--- /dev/null
+++ b/test/files/neg/t875.check
@@ -0,0 +1,17 @@
+t875.scala:3: error: no `: _*' annotation allowed here
+(such annotations are only allowed in arguments to *-parameters)
+ val ys = List(1, 2, 3, xs: _*)
+ ^
+t875.scala:6: error: no `: _*' annotation allowed here
+(such annotations are only allowed in arguments to *-parameters)
+ mkList1(xs: _*)
+ ^
+t875.scala:15: error: no `: _*' annotation allowed here
+(such annotations are only allowed in arguments to *-parameters)
+ f(true, 1, xs: _*)
+ ^
+t875.scala:16: error: no `: _*' annotation allowed here
+(such annotations are only allowed in arguments to *-parameters)
+ g(1, xs:_*)
+ ^
+four errors found
diff --git a/test/files/neg/bug875.scala b/test/files/neg/t875.scala
similarity index 100%
rename from test/files/neg/bug875.scala
rename to test/files/neg/t875.scala
diff --git a/test/files/neg/t876.check b/test/files/neg/t876.check
new file mode 100644
index 0000000..04c5c8f
--- /dev/null
+++ b/test/files/neg/t876.check
@@ -0,0 +1,4 @@
+t876.scala:25: error: too many arguments for method apply: (key: AssertionError.A)manager.B in class HashMap
+ assert(manager.map(A2) == List(manager.map(A2, A1)))
+ ^
+one error found
diff --git a/test/files/neg/bug876.scala b/test/files/neg/t876.scala
similarity index 100%
rename from test/files/neg/bug876.scala
rename to test/files/neg/t876.scala
diff --git a/test/files/neg/t877.check b/test/files/neg/t877.check
new file mode 100644
index 0000000..5f25bd4
--- /dev/null
+++ b/test/files/neg/t877.check
@@ -0,0 +1,7 @@
+t877.scala:3: error: Invalid literal number
+trait Foo extends A(22A, Bug!) {}
+ ^
+t877.scala:3: error: parents of traits may not have parameters
+trait Foo extends A(22A, Bug!) {}
+ ^
+two errors found
diff --git a/test/files/neg/bug877.scala b/test/files/neg/t877.scala
similarity index 100%
rename from test/files/neg/bug877.scala
rename to test/files/neg/t877.scala
diff --git a/test/files/neg/t882.check b/test/files/neg/t882.check
new file mode 100644
index 0000000..a906778
--- /dev/null
+++ b/test/files/neg/t882.check
@@ -0,0 +1,4 @@
+t882.scala:2: error: traits cannot have type parameters with context bounds `: ...' nor view bounds `<% ...'
+trait SortedSet[A <% Ordered[A]] {
+ ^
+one error found
diff --git a/test/files/neg/bug882.scala b/test/files/neg/t882.scala
similarity index 100%
rename from test/files/neg/bug882.scala
rename to test/files/neg/t882.scala
diff --git a/test/files/neg/t900.check b/test/files/neg/t900.check
new file mode 100644
index 0000000..6fe26a3
--- /dev/null
+++ b/test/files/neg/t900.check
@@ -0,0 +1,9 @@
+t900.scala:4: error: type mismatch;
+ found : Foo.this.x.type (with underlying type Foo.this.bar)
+ required: AnyRef
+Note that bar is unbounded, which means AnyRef is not a known parent.
+Such types can participate in value classes, but instances
+cannot appear in singleton types or in reference comparisons.
+ def break(): x.type
+ ^
+one error found
diff --git a/test/files/neg/bug900.scala b/test/files/neg/t900.scala
similarity index 100%
rename from test/files/neg/bug900.scala
rename to test/files/neg/t900.scala
diff --git a/test/files/neg/t908.check b/test/files/neg/t908.check
new file mode 100644
index 0000000..2c723a7
--- /dev/null
+++ b/test/files/neg/t908.check
@@ -0,0 +1,4 @@
+t908.scala:8: error: not found: value makeA
+ this(makeA)
+ ^
+one error found
diff --git a/test/files/neg/bug908.scala b/test/files/neg/t908.scala
similarity index 100%
rename from test/files/neg/bug908.scala
rename to test/files/neg/t908.scala
diff --git a/test/files/neg/t909.check b/test/files/neg/t909.check
new file mode 100644
index 0000000..e7a42bd
--- /dev/null
+++ b/test/files/neg/t909.check
@@ -0,0 +1,6 @@
+t909.scala:6: error: type mismatch;
+ found : String("Hello")
+ required: Int
+ case Foo("Hello") =>
+ ^
+one error found
diff --git a/test/files/neg/bug909.scala b/test/files/neg/t909.scala
similarity index 100%
rename from test/files/neg/bug909.scala
rename to test/files/neg/t909.scala
diff --git a/test/files/neg/t910.check b/test/files/neg/t910.check
new file mode 100644
index 0000000..45420f8
--- /dev/null
+++ b/test/files/neg/t910.check
@@ -0,0 +1,6 @@
+t910.scala:4: error: type mismatch;
+ found : Seq[Char]
+ required: Seq[Int]
+ val y: Seq[Int] = rest
+ ^
+one error found
diff --git a/test/files/neg/bug910.scala b/test/files/neg/t910.scala
similarity index 100%
rename from test/files/neg/bug910.scala
rename to test/files/neg/t910.scala
diff --git a/test/files/neg/t935.check b/test/files/neg/t935.check
new file mode 100644
index 0000000..8b73700
--- /dev/null
+++ b/test/files/neg/t935.check
@@ -0,0 +1,10 @@
+t935.scala:7: error: type arguments [Test3.B] do not conform to class E's type parameter bounds [T <: String]
+ @E[B](new B) val b = "hi"
+ ^
+t935.scala:13: error: type arguments [Test4.B] do not conform to class E's type parameter bounds [T <: String]
+ val b: String @E[B](new B) = "hi"
+ ^
+t935.scala:13: error: type arguments [Test4.B] do not conform to class E's type parameter bounds [T <: String]
+ val b: String @E[B](new B) = "hi"
+ ^
+three errors found
diff --git a/test/files/neg/bug935.scala b/test/files/neg/t935.scala
similarity index 100%
rename from test/files/neg/bug935.scala
rename to test/files/neg/t935.scala
diff --git a/test/files/neg/t944.check b/test/files/neg/t944.check
new file mode 100644
index 0000000..1fc0a12
--- /dev/null
+++ b/test/files/neg/t944.check
@@ -0,0 +1,4 @@
+t944.scala:5: error: implementation restricts functions to 22 parameters
+ a23:Int) => 1
+ ^
+one error found
diff --git a/test/files/neg/bug944.scala b/test/files/neg/t944.scala
similarity index 100%
rename from test/files/neg/bug944.scala
rename to test/files/neg/t944.scala
diff --git a/test/files/neg/t961.check b/test/files/neg/t961.check
new file mode 100644
index 0000000..14d39b0
--- /dev/null
+++ b/test/files/neg/t961.check
@@ -0,0 +1,4 @@
+t961.scala:11: error: Temp.B.type does not take parameters
+ B() match {
+ ^
+one error found
diff --git a/test/files/neg/bug961.scala b/test/files/neg/t961.scala
similarity index 100%
rename from test/files/neg/bug961.scala
rename to test/files/neg/t961.scala
diff --git a/test/files/neg/t963.check b/test/files/neg/t963.check
new file mode 100644
index 0000000..4dc202c
--- /dev/null
+++ b/test/files/neg/t963.check
@@ -0,0 +1,12 @@
+t963.scala:14: error: stable identifier required, but Test.this.y3.x found.
+ val w3 : y3.x.type = y3.x
+ ^
+t963.scala:17: error: stable identifier required, but Test.this.y4.x found.
+ val w4 : y4.x.type = y4.x
+ ^
+t963.scala:10: error: type mismatch;
+ found : AnyRef{def x: Integer}
+ required: AnyRef{val x: Integer}
+ val y2 : { val x : java.lang.Integer } = new { def x = new java.lang.Integer(r.nextInt) }
+ ^
+three errors found
diff --git a/test/files/neg/t963.scala b/test/files/neg/t963.scala
new file mode 100644
index 0000000..0cc2034
--- /dev/null
+++ b/test/files/neg/t963.scala
@@ -0,0 +1,18 @@
+import scala.util.Random
+
+// Only y1 (val/val) should actually compile.
+object Test {
+ val r = new Random()
+
+ val y1 : { val x : java.lang.Integer } = new { val x = new java.lang.Integer(r.nextInt) }
+ val w1 : y1.x.type = y1.x
+
+ val y2 : { val x : java.lang.Integer } = new { def x = new java.lang.Integer(r.nextInt) }
+ val w2 : y2.x.type = y2.x
+
+ val y3 : { def x : java.lang.Integer } = new { val x = new java.lang.Integer(r.nextInt) }
+ val w3 : y3.x.type = y3.x
+
+ val y4 : { def x : java.lang.Integer } = new { def x = new java.lang.Integer(r.nextInt) }
+ val w4 : y4.x.type = y4.x
+}
diff --git a/test/files/neg/t963b.check b/test/files/neg/t963b.check
new file mode 100644
index 0000000..9918a98
--- /dev/null
+++ b/test/files/neg/t963b.check
@@ -0,0 +1,6 @@
+t963b.scala:25: error: type mismatch;
+ found : B.type
+ required: AnyRef{val y: A}
+ B.f(B)
+ ^
+one error found
diff --git a/test/files/neg/t963b.scala b/test/files/neg/t963b.scala
new file mode 100644
index 0000000..b34aae8
--- /dev/null
+++ b/test/files/neg/t963b.scala
@@ -0,0 +1,26 @@
+// Soundness bug, at #963 and dup at #2079.
+trait A {
+ type T
+ var v : T
+}
+
+object B {
+ def f(x : { val y : A }) { x.y.v = x.y.v }
+
+ var a : A = _
+ var b : Boolean = false
+ def y : A = {
+ if(b) {
+ a = new A { type T = Int; var v = 1 }
+ a
+ } else {
+ a = new A { type T = String; var v = "" }
+ b = true
+ a
+ }
+ }
+}
+
+object Test extends App {
+ B.f(B)
+}
diff --git a/test/files/neg/t987.check b/test/files/neg/t987.check
new file mode 100644
index 0000000..90ab70b
--- /dev/null
+++ b/test/files/neg/t987.check
@@ -0,0 +1,19 @@
+t987.scala:15: error: illegal inheritance;
+ class E inherits different type instances of trait B:
+B[D] and B[C]
+class E extends D
+ ^
+t987.scala:20: error: illegal inheritance;
+ class F inherits different type instances of trait B:
+B[D] and B[C]
+class F extends D
+ ^
+t987.scala:25: error: illegal inheritance;
+ class D inherits different type instances of trait B:
+B[D] and B[C]
+abstract class D extends C with B[D] {}
+ ^
+t987.scala:25: error: type arguments [D] do not conform to trait B's type parameter bounds [T <: B[T]]
+abstract class D extends C with B[D] {}
+ ^
+four errors found
diff --git a/test/files/neg/bug987.scala b/test/files/neg/t987.scala
similarity index 100%
rename from test/files/neg/bug987.scala
rename to test/files/neg/t987.scala
diff --git a/test/files/neg/t997.check b/test/files/neg/t997.check
new file mode 100644
index 0000000..186095f
--- /dev/null
+++ b/test/files/neg/t997.check
@@ -0,0 +1,7 @@
+t997.scala:13: error: wrong number of arguments for object Foo
+"x" match { case Foo(a, b, c) => Console.println((a,b,c)) }
+ ^
+t997.scala:13: error: not found: value a
+"x" match { case Foo(a, b, c) => Console.println((a,b,c)) }
+ ^
+two errors found
diff --git a/test/files/neg/t997.scala b/test/files/neg/t997.scala
new file mode 100644
index 0000000..e8d10f4
--- /dev/null
+++ b/test/files/neg/t997.scala
@@ -0,0 +1,15 @@
+// An extractor with 2 results
+object Foo { def unapply(x : String) = Some(Pair(x, x)) }
+
+object Test extends App {
+
+// Prints '(x, x)'. Should compile as per SI-6111.
+"x" match { case Foo(a) => Console.println(a) }
+
+// Prints '(x,x)' as expected.
+"x" match { case Foo(a, b) => Console.println((a,b)) }
+
+// Gives confusing error 'not found: value c'.
+"x" match { case Foo(a, b, c) => Console.println((a,b,c)) }
+
+}
diff --git a/test/files/neg/tailrec-2.check b/test/files/neg/tailrec-2.check
index 4f763a1..d3432a7 100644
--- a/test/files/neg/tailrec-2.check
+++ b/test/files/neg/tailrec-2.check
@@ -1,4 +1,7 @@
-tailrec-2.scala:6: error: could not optimize @tailrec annotated method f: it contains a recursive call targetting a supertype
+tailrec-2.scala:8: error: could not optimize @tailrec annotated method f: it contains a recursive call targeting supertype Super[A]
@annotation.tailrec final def f[B >: A](mem: List[B]): List[B] = (null: Super[A]).f(mem)
^
-one error found
+tailrec-2.scala:9: error: @tailrec annotated method contains no recursive calls
+ @annotation.tailrec final def f1[B >: A](mem: List[B]): List[B] = this.g(mem)
+ ^
+two errors found
diff --git a/test/files/neg/tailrec-2.scala b/test/files/neg/tailrec-2.scala
index 342cd85..9eb3af2 100644
--- a/test/files/neg/tailrec-2.scala
+++ b/test/files/neg/tailrec-2.scala
@@ -1,9 +1,12 @@
sealed abstract class Super[+A] {
def f[B >: A](mem: List[B]) : List[B]
+ def g(mem: List[_]) = ???
}
// This one should fail, target is a supertype
class Bop1[+A](val element: A) extends Super[A] {
+
@annotation.tailrec final def f[B >: A](mem: List[B]): List[B] = (null: Super[A]).f(mem)
+ @annotation.tailrec final def f1[B >: A](mem: List[B]): List[B] = this.g(mem)
}
// These succeed
class Bop2[+A](val element: A) extends Super[A] {
diff --git a/test/files/neg/tailrec.check b/test/files/neg/tailrec.check
index ad92731..946d342 100644
--- a/test/files/neg/tailrec.check
+++ b/test/files/neg/tailrec.check
@@ -4,9 +4,9 @@ tailrec.scala:45: error: could not optimize @tailrec annotated method facfail: i
tailrec.scala:50: error: could not optimize @tailrec annotated method fail1: it is neither private nor final so can be overridden
@tailrec def fail1(x: Int): Int = fail1(x)
^
-tailrec.scala:55: error: could not optimize @tailrec annotated method fail2: it contains a recursive call not in tail position
- case x :: xs => x :: fail2[T](xs)
- ^
+tailrec.scala:53: error: could not optimize @tailrec annotated method fail2: it contains a recursive call not in tail position
+ @tailrec final def fail2[T](xs: List[T]): List[T] = xs match {
+ ^
tailrec.scala:59: error: could not optimize @tailrec annotated method fail3: it is called recursively with different type arguments
@tailrec final def fail3[T](x: Int): Int = fail3(x - 1)
^
diff --git a/test/files/neg/tcpoly_override.check b/test/files/neg/tcpoly_override.check
index 9552932..dbc3ff9 100644
--- a/test/files/neg/tcpoly_override.check
+++ b/test/files/neg/tcpoly_override.check
@@ -1,5 +1,6 @@
tcpoly_override.scala:9: error: The kind of type T does not conform to the expected kind of type T[_] in trait A.
-C.this.T's type parameters do not match type T's expected parameters: type T (in class C) has no type parameters, but type T (in trait A) has one
+C.this.T's type parameters do not match type T's expected parameters:
+type T (in class C) has no type parameters, but type T (in trait A) has one
type T = B // This compiles well (@M: ... but it shouldn't)
^
one error found
diff --git a/test/files/neg/tcpoly_override.scala b/test/files/neg/tcpoly_override.scala
index 7f64ad0..dd043b4 100644
--- a/test/files/neg/tcpoly_override.scala
+++ b/test/files/neg/tcpoly_override.scala
@@ -1,4 +1,4 @@
-// bug1231: reported by Vladimir Reshetnikov on 19 July 2007
+// t1231: reported by Vladimir Reshetnikov on 19 July 2007
trait A {
type T[_]
}
diff --git a/test/files/neg/tcpoly_ticket2101.check b/test/files/neg/tcpoly_ticket2101.check
index eac582e..ad0fd8b 100644
--- a/test/files/neg/tcpoly_ticket2101.check
+++ b/test/files/neg/tcpoly_ticket2101.check
@@ -1,4 +1,4 @@
tcpoly_ticket2101.scala:2: error: type arguments [T2,X] do not conform to class T's type parameter bounds [A[Y] <: T[A,B],B]
class T2[X] extends T[T2, X] // ill-typed
^
-one error found
\ No newline at end of file
+one error found
diff --git a/test/files/neg/tcpoly_typealias.check b/test/files/neg/tcpoly_typealias.check
index 670add2..4beac0e 100644
--- a/test/files/neg/tcpoly_typealias.check
+++ b/test/files/neg/tcpoly_typealias.check
@@ -1,13 +1,16 @@
tcpoly_typealias.scala:37: error: The kind of type m does not conform to the expected kind of type m[+x] in trait A.
-BInv.this.m's type parameters do not match type m's expected parameters: type x (in trait BInv) is invariant, but type x (in trait A) is declared covariant
+BInv.this.m's type parameters do not match type m's expected parameters:
+type x (in trait BInv) is invariant, but type x (in trait A) is declared covariant
type m[x] = FooCov[x] // error: invariant x in alias def
^
tcpoly_typealias.scala:41: error: The kind of type m does not conform to the expected kind of type m[+x] in trait A.
-BCon.this.m's type parameters do not match type m's expected parameters: type x (in trait BCon) is contravariant, but type x (in trait A) is declared covariant
+BCon.this.m's type parameters do not match type m's expected parameters:
+type x (in trait BCon) is contravariant, but type x (in trait A) is declared covariant
type m[-x] = FooCon[x] // error: contravariant x
^
tcpoly_typealias.scala:45: error: The kind of type m does not conform to the expected kind of type m[+x] in trait A.
-BBound.this.m's type parameters do not match type m's expected parameters: type x (in trait BBound)'s bounds >: Nothing <: String are stricter than type x (in trait A)'s declared bounds >: Nothing <: Any
+BBound.this.m's type parameters do not match type m's expected parameters:
+type x (in trait BBound)'s bounds <: String are stricter than type x (in trait A)'s declared bounds >: Nothing <: Any
type m[+x <: String] = FooBound[x] // error: x with stricter bound
^
three errors found
diff --git a/test/files/neg/tcpoly_variance.check b/test/files/neg/tcpoly_variance.check
index 0695fa0..c0dfcac 100644
--- a/test/files/neg/tcpoly_variance.check
+++ b/test/files/neg/tcpoly_variance.check
@@ -1,4 +1,4 @@
-tcpoly_variance.scala:6: error: overriding method str in class A of type => m[java.lang.Object];
+tcpoly_variance.scala:6: error: overriding method str in class A of type => m[Object];
method str has incompatible type
override def str: m[String] = sys.error("foo") // since x in m[x] is invariant, ! m[String] <: m[Object]
^
diff --git a/test/files/neg/tcpoly_variance_enforce.check b/test/files/neg/tcpoly_variance_enforce.check
index 44b5b2c..3299cc3 100644
--- a/test/files/neg/tcpoly_variance_enforce.check
+++ b/test/files/neg/tcpoly_variance_enforce.check
@@ -1,45 +1,57 @@
tcpoly_variance_enforce.scala:15: error: kinds of the type arguments (FooInvar) do not conform to the expected kinds of the type parameters (type m) in trait coll.
-FooInvar's type parameters do not match type m's expected parameters: type x (in class FooInvar) is invariant, but type x is declared covariant
+FooInvar's type parameters do not match type m's expected parameters:
+type x (in class FooInvar) is invariant, but type x is declared covariant
object fcollinv extends coll[FooInvar] // error
^
tcpoly_variance_enforce.scala:16: error: kinds of the type arguments (FooContra) do not conform to the expected kinds of the type parameters (type m) in trait coll.
-FooContra's type parameters do not match type m's expected parameters: type x (in class FooContra) is contravariant, but type x is declared covariant
+FooContra's type parameters do not match type m's expected parameters:
+type x (in class FooContra) is contravariant, but type x is declared covariant
object fcollcon extends coll[FooContra] // error
^
tcpoly_variance_enforce.scala:17: error: kinds of the type arguments (FooString) do not conform to the expected kinds of the type parameters (type m) in trait coll.
-FooString's type parameters do not match type m's expected parameters: type x (in class FooString)'s bounds >: Nothing <: String are stricter than type x's declared bounds >: Nothing <: Any
+FooString's type parameters do not match type m's expected parameters:
+type x (in class FooString)'s bounds <: String are stricter than type x's declared bounds >: Nothing <: Any
object fcollwb extends coll[FooString] // error
^
tcpoly_variance_enforce.scala:19: error: kinds of the type arguments (FooCov) do not conform to the expected kinds of the type parameters (type m) in trait coll2.
-FooCov's type parameters do not match type m's expected parameters: type x (in class FooCov) is covariant, but type x is declared contravariant
+FooCov's type parameters do not match type m's expected parameters:
+type x (in class FooCov) is covariant, but type x is declared contravariant
object fcoll2ok extends coll2[FooCov] // error
^
tcpoly_variance_enforce.scala:20: error: kinds of the type arguments (FooInvar) do not conform to the expected kinds of the type parameters (type m) in trait coll2.
-FooInvar's type parameters do not match type m's expected parameters: type x (in class FooInvar) is invariant, but type x is declared contravariant
+FooInvar's type parameters do not match type m's expected parameters:
+type x (in class FooInvar) is invariant, but type x is declared contravariant
object fcoll2inv extends coll2[FooInvar] // error
^
tcpoly_variance_enforce.scala:22: error: kinds of the type arguments (FooString) do not conform to the expected kinds of the type parameters (type m) in trait coll2.
-FooString's type parameters do not match type m's expected parameters: type x (in class FooString) is covariant, but type x is declared contravarianttype x (in class FooString)'s bounds >: Nothing <: String are stricter than type x's declared bounds >: Nothing <: Any
+FooString's type parameters do not match type m's expected parameters:
+type x (in class FooString) is covariant, but type x is declared contravariant
+type x (in class FooString)'s bounds <: String are stricter than type x's declared bounds >: Nothing <: Any
object fcoll2wb extends coll2[FooString] // error
^
tcpoly_variance_enforce.scala:27: error: kinds of the type arguments (FooString) do not conform to the expected kinds of the type parameters (type m) in trait coll3.
-FooString's type parameters do not match type m's expected parameters: type x (in class FooString)'s bounds >: Nothing <: String are stricter than type x's declared bounds >: Nothing <: Any
+FooString's type parameters do not match type m's expected parameters:
+type x (in class FooString)'s bounds <: String are stricter than type x's declared bounds >: Nothing <: Any
object fcoll3wb extends coll3[FooString] // error
^
tcpoly_variance_enforce.scala:30: error: kinds of the type arguments (FooString,Int) do not conform to the expected kinds of the type parameters (type m,type y) in trait coll4.
-FooString's type parameters do not match type m's expected parameters: type x (in class FooString)'s bounds >: Nothing <: String are stricter than type x's declared bounds >: Nothing <: y
+FooString's type parameters do not match type m's expected parameters:
+type x (in class FooString)'s bounds <: String are stricter than type x's declared bounds <: y
object fcoll4_1 extends coll4[FooString, Int] // error
^
tcpoly_variance_enforce.scala:31: error: kinds of the type arguments (FooString,Any) do not conform to the expected kinds of the type parameters (type m,type y) in trait coll4.
-FooString's type parameters do not match type m's expected parameters: type x (in class FooString)'s bounds >: Nothing <: String are stricter than type x's declared bounds >: Nothing <: y
+FooString's type parameters do not match type m's expected parameters:
+type x (in class FooString)'s bounds <: String are stricter than type x's declared bounds <: y
object fcoll4_2 extends coll4[FooString, Any] // error
^
tcpoly_variance_enforce.scala:37: error: kinds of the type arguments (FooInvar) do not conform to the expected kinds of the type parameters (type m) in trait coll.
-FooInvar's type parameters do not match type m's expected parameters: type x (in class FooInvar) is invariant, but type x is declared covariant
+FooInvar's type parameters do not match type m's expected parameters:
+type x (in class FooInvar) is invariant, but type x is declared covariant
def x: coll[FooInvar] = sys.error("foo") // error
^
tcpoly_variance_enforce.scala:38: error: kinds of the type arguments (FooContra) do not conform to the expected kinds of the type parameters (type m) in trait coll.
-FooContra's type parameters do not match type m's expected parameters: type x (in class FooContra) is contravariant, but type x is declared covariant
+FooContra's type parameters do not match type m's expected parameters:
+type x (in class FooContra) is contravariant, but type x is declared covariant
def y: coll[FooContra] = sys.error("foo") // error
^
11 errors found
diff --git a/test/files/neg/unchecked-abstract.check b/test/files/neg/unchecked-abstract.check
new file mode 100644
index 0000000..6e811dc
--- /dev/null
+++ b/test/files/neg/unchecked-abstract.check
@@ -0,0 +1,25 @@
+unchecked-abstract.scala:16: error: abstract type H in type Contravariant[M.this.H] is unchecked since it is eliminated by erasure
+ /* warn */ println(x.isInstanceOf[Contravariant[H]])
+ ^
+unchecked-abstract.scala:21: error: abstract type H in type Contravariant[M.this.H] is unchecked since it is eliminated by erasure
+ /* warn */ println(x.isInstanceOf[Contravariant[H]])
+ ^
+unchecked-abstract.scala:27: error: abstract type T in type Invariant[M.this.T] is unchecked since it is eliminated by erasure
+ /* warn */ println(x.isInstanceOf[Invariant[T]])
+ ^
+unchecked-abstract.scala:28: error: abstract type L in type Invariant[M.this.L] is unchecked since it is eliminated by erasure
+ /* warn */ println(x.isInstanceOf[Invariant[L]])
+ ^
+unchecked-abstract.scala:31: error: abstract type H in type Invariant[M.this.H] is unchecked since it is eliminated by erasure
+ /* warn */ println(x.isInstanceOf[Invariant[H]])
+ ^
+unchecked-abstract.scala:33: error: abstract type L in type Invariant[M.this.L] is unchecked since it is eliminated by erasure
+ /* warn */ println(x.isInstanceOf[Invariant[L]])
+ ^
+unchecked-abstract.scala:36: error: abstract type H in type Invariant[M.this.H] is unchecked since it is eliminated by erasure
+ /* warn */ println(x.isInstanceOf[Invariant[H]])
+ ^
+unchecked-abstract.scala:37: error: abstract type T in type Invariant[M.this.T] is unchecked since it is eliminated by erasure
+ /* warn */ println(x.isInstanceOf[Invariant[T]])
+ ^
+8 errors found
diff --git a/test/files/neg/anyval-sealed.flags b/test/files/neg/unchecked-abstract.flags
similarity index 100%
copy from test/files/neg/anyval-sealed.flags
copy to test/files/neg/unchecked-abstract.flags
diff --git a/test/files/neg/unchecked-abstract.scala b/test/files/neg/unchecked-abstract.scala
new file mode 100644
index 0000000..23c8281
--- /dev/null
+++ b/test/files/neg/unchecked-abstract.scala
@@ -0,0 +1,93 @@
+trait Contravariant[-X]
+trait Invariant[X]
+trait Covariant[+X]
+
+abstract class M {
+ type H
+ type L <: H
+ type T >: L <: H
+
+ def h1(x: Contravariant[H]) = {
+ /* nowarn */ println(x.isInstanceOf[Contravariant[H]])
+ /* nowarn */ println(x.isInstanceOf[Contravariant[T]])
+ /* nowarn */ println(x.isInstanceOf[Contravariant[L]])
+ }
+ def h2(x: Contravariant[T]) = {
+ /* warn */ println(x.isInstanceOf[Contravariant[H]])
+ /* nowarn */ println(x.isInstanceOf[Contravariant[T]])
+ /* nowarn */ println(x.isInstanceOf[Contravariant[L]])
+ }
+ def h3(x: Contravariant[L]) = {
+ /* warn */ println(x.isInstanceOf[Contravariant[H]])
+ /* warn */ println(x.isInstanceOf[Contravariant[T]])
+ /* nowarn */ println(x.isInstanceOf[Contravariant[L]])
+ }
+ def h4(x: Invariant[H]) = {
+ /* nowarn */ println(x.isInstanceOf[Invariant[H]])
+ /* warn */ println(x.isInstanceOf[Invariant[T]])
+ /* warn */ println(x.isInstanceOf[Invariant[L]])
+ }
+ def h5(x: Invariant[T]) = {
+ /* warn */ println(x.isInstanceOf[Invariant[H]])
+ /* nowarn */ println(x.isInstanceOf[Invariant[T]])
+ /* warn */ println(x.isInstanceOf[Invariant[L]])
+ }
+ def h6(x: Invariant[L]) = {
+ /* warn */ println(x.isInstanceOf[Invariant[H]])
+ /* warn */ println(x.isInstanceOf[Invariant[T]])
+ /* nowarn */ println(x.isInstanceOf[Invariant[L]])
+ }
+ def h7(x: Covariant[H]) = {
+ /* nowarn */ println(x.isInstanceOf[Covariant[H]])
+ /* warn */ println(x.isInstanceOf[Covariant[T]])
+ /* warn */ println(x.isInstanceOf[Covariant[L]])
+ }
+ def h8(x: Covariant[T]) = {
+ /* nowarn */ println(x.isInstanceOf[Covariant[H]])
+ /* nowarn */ println(x.isInstanceOf[Covariant[T]])
+ /* warn */ println(x.isInstanceOf[Covariant[L]])
+ }
+ def h9(x: Covariant[L]) = {
+ /* nowarn */ println(x.isInstanceOf[Covariant[H]])
+ /* nowarn */ println(x.isInstanceOf[Covariant[T]])
+ /* nowarn */ println(x.isInstanceOf[Covariant[L]])
+ }
+}
+
+object Test extends M {
+ type H = Any
+ type T = Int
+ type L = Nothing
+
+ val conh = new Contravariant[H] { }
+ val cont = new Contravariant[T] { }
+ val conl = new Contravariant[L] { }
+
+ val invh = new Invariant[H] { }
+ val invt = new Invariant[T] { }
+ val invl = new Invariant[L] { }
+
+ val covh = new Covariant[H] { }
+ val covt = new Covariant[T] { }
+ val covl = new Covariant[L] { }
+
+ def main(args: Array[String]): Unit = {
+ h1(conh)
+ h2(conh)
+ h2(cont)
+ h3(conh)
+ h3(cont)
+ h3(conl)
+
+ h4(invh)
+ h5(invt)
+ h6(invl)
+
+ h7(covh)
+ h7(covt)
+ h7(covl)
+ h8(covt)
+ h8(covl)
+ h9(covl)
+ }
+}
diff --git a/test/files/neg/unchecked-impossible.check b/test/files/neg/unchecked-impossible.check
new file mode 100644
index 0000000..0ab371d
--- /dev/null
+++ b/test/files/neg/unchecked-impossible.check
@@ -0,0 +1,4 @@
+unchecked-impossible.scala:5: error: fruitless type test: a value of type T2[Int,Int] cannot also be a Seq[A]
+ case Seq(x) =>
+ ^
+one error found
diff --git a/test/files/neg/anyval-sealed.flags b/test/files/neg/unchecked-impossible.flags
similarity index 100%
copy from test/files/neg/anyval-sealed.flags
copy to test/files/neg/unchecked-impossible.flags
diff --git a/test/files/neg/unchecked-impossible.scala b/test/files/neg/unchecked-impossible.scala
new file mode 100644
index 0000000..985a2d0
--- /dev/null
+++ b/test/files/neg/unchecked-impossible.scala
@@ -0,0 +1,16 @@
+final case class T2[+A, +B](a: A, b: B)
+
+class A {
+ def f1 = T2(1, 2) match {
+ case Seq(x) =>
+ case _ =>
+ }
+ def f2 = T2(1, 2) match {
+ case _: T2[Int, Int] => /* nowarn */
+ case _ =>
+ }
+ def f3 = T2(1, 2) match {
+ case _: T2[_, Int] => /* nowarn */
+ case _ =>
+ }
+}
diff --git a/test/files/neg/unchecked-knowable.check b/test/files/neg/unchecked-knowable.check
new file mode 100644
index 0000000..d279427
--- /dev/null
+++ b/test/files/neg/unchecked-knowable.check
@@ -0,0 +1,7 @@
+unchecked-knowable.scala:18: error: fruitless type test: a value of type Bippy cannot also be a A1
+ /* warn */ (new Bippy).isInstanceOf[A1]
+ ^
+unchecked-knowable.scala:19: error: fruitless type test: a value of type Bippy cannot also be a B1
+ /* warn */ (new Bippy).isInstanceOf[B1]
+ ^
+two errors found
diff --git a/test/files/neg/anyval-sealed.flags b/test/files/neg/unchecked-knowable.flags
similarity index 100%
copy from test/files/neg/anyval-sealed.flags
copy to test/files/neg/unchecked-knowable.flags
diff --git a/test/files/neg/unchecked-knowable.scala b/test/files/neg/unchecked-knowable.scala
new file mode 100644
index 0000000..21624c4
--- /dev/null
+++ b/test/files/neg/unchecked-knowable.scala
@@ -0,0 +1,22 @@
+/** Knowable - only final leaves */
+sealed abstract class A1
+sealed abstract class A2 extends A1
+final class A3 extends A1
+final class A4 extends A2
+
+/** Unknowable */
+sealed abstract class B1
+sealed abstract class B2 extends B1
+sealed trait B2B extends B1
+final class B3 extends B1
+trait B4 extends B2
+
+class Bippy
+trait Dingus
+
+class A {
+ /* warn */ (new Bippy).isInstanceOf[A1]
+ /* warn */ (new Bippy).isInstanceOf[B1]
+ /* nowarn */ (null: Dingus).isInstanceOf[B1]
+ /* nowarn */ ((new Bippy): Any).isInstanceOf[A1]
+}
diff --git a/test/files/neg/unchecked-refinement.check b/test/files/neg/unchecked-refinement.check
new file mode 100644
index 0000000..d815174
--- /dev/null
+++ b/test/files/neg/unchecked-refinement.check
@@ -0,0 +1,13 @@
+unchecked-refinement.scala:17: error: abstract type U in type pattern Foo[U,U,V] is unchecked since it is eliminated by erasure
+ /* warn */ case _: Foo[U, U, V] if b => ()
+ ^
+unchecked-refinement.scala:19: error: non-variable type argument Any in type pattern Foo[Any,U,V] is unchecked since it is eliminated by erasure
+ /* warn */ case _: Foo[Any, U, V] if b => ()
+ ^
+unchecked-refinement.scala:23: error: a pattern match on a refinement type is unchecked
+ /* nowarn - todo */ case x: AnyRef { def bippy: Int } if b => x.bippy // this could/should do an instance check and not warn
+ ^
+unchecked-refinement.scala:24: error: a pattern match on a refinement type is unchecked
+ /* nowarn - todo */ case x: AnyRef { def size: Int } if b => x.size // this could/should do a static conformance test and not warn
+ ^
+four errors found
diff --git a/test/files/neg/anyval-sealed.flags b/test/files/neg/unchecked-refinement.flags
similarity index 100%
copy from test/files/neg/anyval-sealed.flags
copy to test/files/neg/unchecked-refinement.flags
diff --git a/test/files/neg/unchecked-refinement.scala b/test/files/neg/unchecked-refinement.scala
new file mode 100644
index 0000000..79ed7f1
--- /dev/null
+++ b/test/files/neg/unchecked-refinement.scala
@@ -0,0 +1,27 @@
+// a.scala
+// Thu Sep 27 09:42:16 PDT 2012
+
+trait Bar[-T1, T2, +T3] { }
+trait Foo[-T1, T2, +T3] extends Bar[T1, T2, T3]
+
+class A {
+ var b = true
+
+ def f1(x: Foo[Int, Int, Int]) = x match {
+ /* nowarn */ case _: Foo[Nothing, Int, Any] => true
+ }
+ def f2[T, U, V](x: Foo[T, U, V]) = x match {
+ /* nowarn */ case _: Foo[Nothing, U, Any] => true
+ }
+ def f3[T, U, V](x: Foo[T, U, V]) = x match {
+ /* warn */ case _: Foo[U, U, V] if b => ()
+ /* nowarn */ case _: Foo[Nothing, U, V] if b => ()
+ /* warn */ case _: Foo[Any, U, V] if b => ()
+ }
+
+ def f4(xs: List[Int]) = xs match {
+ /* nowarn - todo */ case x: AnyRef { def bippy: Int } if b => x.bippy // this could/should do an instance check and not warn
+ /* nowarn - todo */ case x: AnyRef { def size: Int } if b => x.size // this could/should do a static conformance test and not warn
+ /* nowarn */ case x: ((AnyRef { def size: Int }) @unchecked) if b => x.size
+ }
+}
diff --git a/test/files/neg/unchecked-suppress.check b/test/files/neg/unchecked-suppress.check
new file mode 100644
index 0000000..2e23d21
--- /dev/null
+++ b/test/files/neg/unchecked-suppress.check
@@ -0,0 +1,10 @@
+unchecked-suppress.scala:4: error: non-variable type argument Int in type pattern Set[Int] is unchecked since it is eliminated by erasure
+ case xs: Set[Int] => xs.head // unchecked
+ ^
+unchecked-suppress.scala:5: error: non-variable type argument String in type pattern Map[String @unchecked,String] is unchecked since it is eliminated by erasure
+ case xs: Map[String @unchecked, String] => xs.head // one unchecked, one okay
+ ^
+unchecked-suppress.scala:7: error: non-variable type argument Int in type pattern (Int, Int) => Int is unchecked since it is eliminated by erasure
+ case f: ((Int, Int) => Int) => // unchecked
+ ^
+three errors found
diff --git a/test/files/neg/caseinherit.flags b/test/files/neg/unchecked-suppress.flags
similarity index 100%
copy from test/files/neg/caseinherit.flags
copy to test/files/neg/unchecked-suppress.flags
diff --git a/test/files/neg/unchecked-suppress.scala b/test/files/neg/unchecked-suppress.scala
new file mode 100644
index 0000000..7bd61a2
--- /dev/null
+++ b/test/files/neg/unchecked-suppress.scala
@@ -0,0 +1,10 @@
+class A {
+ def f(x: Any) = x match {
+ case xs: List[String @unchecked] => xs.head // okay
+ case xs: Set[Int] => xs.head // unchecked
+ case xs: Map[String @unchecked, String] => xs.head // one unchecked, one okay
+ case f: ((Int @unchecked) => (Int @unchecked)) => f(5) // okay
+ case f: ((Int, Int) => Int) => // unchecked
+ case _ => ""
+ }
+}
diff --git a/test/files/neg/unchecked.check b/test/files/neg/unchecked.check
new file mode 100644
index 0000000..2883b71
--- /dev/null
+++ b/test/files/neg/unchecked.check
@@ -0,0 +1,19 @@
+unchecked.scala:18: error: non-variable type argument String in type pattern Iterable[String] is unchecked since it is eliminated by erasure
+ case xs: Iterable[String] => xs.head // unchecked
+ ^
+unchecked.scala:22: error: non-variable type argument Any in type pattern Set[Any] is unchecked since it is eliminated by erasure
+ case xs: Set[Any] => xs.head // unchecked
+ ^
+unchecked.scala:26: error: non-variable type argument Any in type pattern Map[Any,Any] is unchecked since it is eliminated by erasure
+ case xs: Map[Any, Any] => xs.head // unchecked
+ ^
+unchecked.scala:35: error: non-variable type argument List[Nothing] in type pattern Test.Contra[List[Nothing]] is unchecked since it is eliminated by erasure
+ case xs: Contra[List[Nothing]] => xs.head // unchecked
+ ^
+unchecked.scala:50: error: non-variable type argument String in type pattern Test.Exp[String] is unchecked since it is eliminated by erasure
+ case ArrayApply(x: Exp[Array[T]], _, j: Exp[String]) => x // unchecked
+ ^
+unchecked.scala:55: error: non-variable type argument Array[T] in type pattern Test.Exp[Array[T]] is unchecked since it is eliminated by erasure
+ case ArrayApply(x: Exp[Array[T]], _, _) => x // unchecked
+ ^
+6 errors found
diff --git a/test/files/neg/unchecked.flags b/test/files/neg/unchecked.flags
new file mode 100644
index 0000000..464cc20
--- /dev/null
+++ b/test/files/neg/unchecked.flags
@@ -0,0 +1 @@
+-Xfatal-warnings -unchecked
\ No newline at end of file
diff --git a/test/files/neg/unchecked.scala b/test/files/neg/unchecked.scala
new file mode 100644
index 0000000..e491b25
--- /dev/null
+++ b/test/files/neg/unchecked.scala
@@ -0,0 +1,74 @@
+import language.existentials
+
+object Test {
+ class Def[T]
+ class Exp[T]
+ class Contra[-T] { def head[T1 <: T] : T1 = ??? }
+ class Cov[+T] { }
+
+ case class ArrayApply[T](x: Exp[Array[T]], i: Exp[Int], j: Exp[_]) extends Def[T]
+
+ val IntArrayApply = ArrayApply[Int](new Exp[Array[Int]], new Exp[Int], new Exp[Int])
+
+ def f(x: Any) = x match {
+ case xs: Iterable[Any] => xs.head // okay
+ case _ => 0
+ }
+ def f2(x: Any) = x match {
+ case xs: Iterable[String] => xs.head // unchecked
+ case _ => 0
+ }
+ def f3(x: Any) = x match {
+ case xs: Set[Any] => xs.head // unchecked
+ case _ => 0
+ }
+ def f4(x: Any) = x match {
+ case xs: Map[Any, Any] => xs.head // unchecked
+ case _ => 0
+ }
+
+ def cf1(x: Any) = x match {
+ case xs: Contra[Nothing] => xs.head // okay
+ case _ => 0
+ }
+ def cf2(x: Any) = x match {
+ case xs: Contra[List[Nothing]] => xs.head // unchecked
+ case _ => 0
+ }
+
+ def co1(x: List[Cov[List[Int]]]) = x match {
+ case _: Seq[Cov[Seq[Any]]] => true // okay
+ case _ => false
+ }
+
+ def g[T](x: Def[T]) = x match {
+ case ArrayApply(x: Exp[Array[T]], i: Exp[Int], _) => x // okay
+ case _ => 0
+ }
+
+ def g2[T](x: Def[T]) = x match {
+ case ArrayApply(x: Exp[Array[T]], _, j: Exp[String]) => x // unchecked
+ case _ => 0
+ }
+
+ def g3[T](x: Any) = x match {
+ case ArrayApply(x: Exp[Array[T]], _, _) => x // unchecked
+ case _ => 0
+ }
+
+ def g4 = IntArrayApply match {
+ case ArrayApply(x: Exp[Array[Int]], _, _) => x // okay
+ case _ => ()
+ }
+ def g5[T](x: ArrayApply[Int]) = x match {
+ case ArrayApply(x: Exp[Array[Int]], _, _) => x // okay
+ case _ => 0
+ }
+
+ // Nope
+ //
+ // def g5 = IntArrayApply match {
+ // case ArrayApply(x: Exp[Array[String]], _, _) => x // nope
+ // case _ => ()
+ // }
+}
diff --git a/test/files/neg/unchecked2.check b/test/files/neg/unchecked2.check
new file mode 100644
index 0000000..68fdfa8
--- /dev/null
+++ b/test/files/neg/unchecked2.check
@@ -0,0 +1,43 @@
+unchecked2.scala:4: error: fruitless type test: a value of type Some[List[Int]] cannot also be a Option[List[String]] (but still might match its erasure)
+ /* warn */ Some(List(1)).isInstanceOf[Option[List[String]]]
+ ^
+unchecked2.scala:5: error: non-variable type argument Option[_] in type Option[Option[_]] is unchecked since it is eliminated by erasure
+ /* warn */ Some(123).isInstanceOf[Option[Option[_]]]
+ ^
+unchecked2.scala:6: error: fruitless type test: a value of type Some[Int] cannot also be a Option[String] (but still might match its erasure)
+ /* warn */ Some(123).isInstanceOf[Option[String]]
+ ^
+unchecked2.scala:7: error: fruitless type test: a value of type Some[Int] cannot also be a Option[List[String]] (but still might match its erasure)
+ /* warn */ Some(123).isInstanceOf[Option[List[String]]]
+ ^
+unchecked2.scala:8: error: fruitless type test: a value of type Some[Int] cannot also be a Option[List[Int => String]] (but still might match its erasure)
+ /* warn */ Some(123).isInstanceOf[Option[List[Int => String]]]
+ ^
+unchecked2.scala:9: error: fruitless type test: a value of type Some[Int] cannot also be a Option[(String, Double)] (but still might match its erasure)
+ /* warn */ Some(123).isInstanceOf[Option[(String, Double)]]
+ ^
+unchecked2.scala:10: error: fruitless type test: a value of type Some[Int] cannot also be a Option[String => Double] (but still might match its erasure)
+ /* warn */ Some(123).isInstanceOf[Option[String => Double]]
+ ^
+unchecked2.scala:14: error: non-variable type argument List[String] in type Option[List[String]] is unchecked since it is eliminated by erasure
+ /* warn */ (Some(List(1)): Any).isInstanceOf[Option[List[String]]]
+ ^
+unchecked2.scala:15: error: non-variable type argument Int in type Option[Int] is unchecked since it is eliminated by erasure
+ /* warn */ (Some(123): Any).isInstanceOf[Option[Int]]
+ ^
+unchecked2.scala:16: error: non-variable type argument String in type Option[String] is unchecked since it is eliminated by erasure
+ /* warn */ (Some(123): Any).isInstanceOf[Option[String]]
+ ^
+unchecked2.scala:17: error: non-variable type argument List[String] in type Option[List[String]] is unchecked since it is eliminated by erasure
+ /* warn */ (Some(123): Any).isInstanceOf[Option[List[String]]]
+ ^
+unchecked2.scala:18: error: non-variable type argument List[Int => String] in type Option[List[Int => String]] is unchecked since it is eliminated by erasure
+ /* warn */ (Some(123): Any).isInstanceOf[Option[List[Int => String]]]
+ ^
+unchecked2.scala:19: error: non-variable type argument (String, Double) in type Option[(String, Double)] is unchecked since it is eliminated by erasure
+ /* warn */ (Some(123): Any).isInstanceOf[Option[(String, Double)]]
+ ^
+unchecked2.scala:20: error: non-variable type argument String => Double in type Option[String => Double] is unchecked since it is eliminated by erasure
+ /* warn */ (Some(123): Any).isInstanceOf[Option[String => Double]]
+ ^
+14 errors found
diff --git a/test/files/pos/bug3097.flags b/test/files/neg/unchecked2.flags
similarity index 100%
copy from test/files/pos/bug3097.flags
copy to test/files/neg/unchecked2.flags
diff --git a/test/files/neg/unchecked2.scala b/test/files/neg/unchecked2.scala
new file mode 100644
index 0000000..616b05a
--- /dev/null
+++ b/test/files/neg/unchecked2.scala
@@ -0,0 +1,33 @@
+object Test {
+ // These warn because it can be statically shown they won't match.
+
+ /* warn */ Some(List(1)).isInstanceOf[Option[List[String]]]
+ /* warn */ Some(123).isInstanceOf[Option[Option[_]]]
+ /* warn */ Some(123).isInstanceOf[Option[String]]
+ /* warn */ Some(123).isInstanceOf[Option[List[String]]]
+ /* warn */ Some(123).isInstanceOf[Option[List[Int => String]]]
+ /* warn */ Some(123).isInstanceOf[Option[(String, Double)]]
+ /* warn */ Some(123).isInstanceOf[Option[String => Double]]
+
+ // These warn because you can't check at runtime.
+
+ /* warn */ (Some(List(1)): Any).isInstanceOf[Option[List[String]]]
+ /* warn */ (Some(123): Any).isInstanceOf[Option[Int]]
+ /* warn */ (Some(123): Any).isInstanceOf[Option[String]]
+ /* warn */ (Some(123): Any).isInstanceOf[Option[List[String]]]
+ /* warn */ (Some(123): Any).isInstanceOf[Option[List[Int => String]]]
+ /* warn */ (Some(123): Any).isInstanceOf[Option[(String, Double)]]
+ /* warn */ (Some(123): Any).isInstanceOf[Option[String => Double]]
+
+ // These don't warn.
+
+ /* nowarn */ Some(List(1)).isInstanceOf[Option[List[Int]]]
+ /* nowarn */ Some(123).isInstanceOf[Option[Int]]
+ /* nowarn */ Some(123).isInstanceOf[Some[Int]]
+ /* nowarn */ Some(123).isInstanceOf[AnyRef]
+
+ /* nowarn */ (Some(List(1)): Any).isInstanceOf[Option[_]]
+ /* nowarn */ (Some(123): Any).isInstanceOf[Option[_]]
+ /* nowarn */ (Some(123): Any).isInstanceOf[Some[_]]
+ /* nowarn */ (Some(123): Any).isInstanceOf[AnyRef]
+}
diff --git a/test/files/neg/unchecked3.check b/test/files/neg/unchecked3.check
new file mode 100644
index 0000000..f4f0c74
--- /dev/null
+++ b/test/files/neg/unchecked3.check
@@ -0,0 +1,37 @@
+unchecked3.scala:24: error: non-variable type argument Double in type pattern E1[Double] is unchecked since it is eliminated by erasure
+ /* warn */ def peerTypes2(x: B1[Int]) = x match { case _: E1[Double] => true }
+ ^
+unchecked3.scala:25: error: non-variable type argument Double in type pattern F1[Double] is unchecked since it is eliminated by erasure
+ /* warn */ def peerTypes3(x: B1[_]) = x match { case _: F1[Double] => true }
+ ^
+unchecked3.scala:28: error: non-variable type argument Int in type pattern A2[Int] is unchecked since it is eliminated by erasure
+ /* warn */ def twotypes1[T](x: B2[T, Int]) = x match { case _: A2[Int] => true }
+ ^
+unchecked3.scala:32: error: non-variable type argument Int in type pattern B2[_,Int] is unchecked since it is eliminated by erasure
+ /* warn */ def twotypes5[T](x: A2[T]) = x match { case _: B2[_, Int] => true }
+ ^
+unchecked3.scala:40: error: non-variable type argument String in type pattern Array[List[String]] is unchecked since it is eliminated by erasure
+ /* warn */ case _: Array[List[String]] => ()
+ ^
+unchecked3.scala:43: error: non-variable type argument String in type pattern Array[Array[List[String]]] is unchecked since it is eliminated by erasure
+ /* warn */ case _: Array[Array[List[String]]] => ()
+ ^
+unchecked3.scala:50: error: non-variable type argument String in type pattern Array[List[String]] is unchecked since it is eliminated by erasure
+ /* warn */ case _: Array[List[String]] => ()
+ ^
+unchecked3.scala:53: error: non-variable type argument String in type pattern Array[Array[List[String]]] is unchecked since it is eliminated by erasure
+ /* warn */ case _: Array[Array[List[String]]] => ()
+ ^
+unchecked3.scala:60: error: non-variable type argument String in type pattern Array[List[String]] is unchecked since it is eliminated by erasure
+ /* warn */ case _: Array[List[String]] => ()
+ ^
+unchecked3.scala:62: error: non-variable type argument Array[String] in type pattern Array[List[Array[String]]] is unchecked since it is eliminated by erasure
+ /* warn */ case _: Array[List[Array[String]]] => ()
+ ^
+unchecked3.scala:63: error: non-variable type argument String in type pattern Array[Array[List[String]]] is unchecked since it is eliminated by erasure
+ /* warn */ case _: Array[Array[List[String]]] => ()
+ ^
+unchecked3.scala:75: error: abstract type A in type pattern Set[Q.this.A] is unchecked since it is eliminated by erasure
+ /* warn */ case xs: Set[A] => xs.head
+ ^
+12 errors found
diff --git a/test/files/neg/anyval-sealed.flags b/test/files/neg/unchecked3.flags
similarity index 100%
copy from test/files/neg/anyval-sealed.flags
copy to test/files/neg/unchecked3.flags
diff --git a/test/files/neg/unchecked3.scala b/test/files/neg/unchecked3.scala
new file mode 100644
index 0000000..7b8c13e
--- /dev/null
+++ b/test/files/neg/unchecked3.scala
@@ -0,0 +1,83 @@
+sealed trait A2[T1]
+final class B2[T1, T2] extends A2[T1]
+
+sealed trait A[T]
+final class B[T] extends A[T]
+
+sealed trait A1[T]
+trait B1[T] extends A1[T]
+trait C1[T] extends A1[T]
+trait D1[T] extends A1[Int]
+trait E1[T] extends B1[Int]
+trait F1[T] extends B1[T]
+
+object MiscUnchecked {
+ /* nowarn */ def knownType1(x: A[Int]) = x match { case _: B[Int] if true => 1 }
+ /* nowarn */ def knownType2(x: B[Int]) = x match { case _: A[Int] if true => 1 }
+ /* nowarn */ def tparamLeakage1(x: Any) = x match { case Array() => 1 }
+ /* nowarn */ def tparamLeakage2(x: Any) = x match { case List() => 1 }
+
+ // E1[Double] implies B1[Int], but B1[Int] does not imply E1[Double], even if .isInstanceOf[E1[_]]
+ // F1[Int] implies B1[Int], and B1[Int] implies F1[Int]
+
+ /* nowarn */ def peerTypes1(x: B1[Int]) = x match { case _: C1[Int] => true }
+ /* warn */ def peerTypes2(x: B1[Int]) = x match { case _: E1[Double] => true }
+ /* warn */ def peerTypes3(x: B1[_]) = x match { case _: F1[Double] => true }
+ /* nowarn */ def peerTypes4(x: B1[Int]) = x match { case _: F1[Int] => true }
+
+ /* warn */ def twotypes1[T](x: B2[T, Int]) = x match { case _: A2[Int] => true }
+ /* nowarn */ def twotypes2[T](x: B2[Int, T]) = x match { case _: A2[Int] => true }
+ /* nowarn */ def twotypes3(x: A2[Int]) = x match { case _: B2[Int, _] => true }
+ /* nowarn */ def twotypes4[T](x: A2[T]) = x match { case _: B2[T, _] => true }
+ /* warn */ def twotypes5[T](x: A2[T]) = x match { case _: B2[_, Int] => true }
+}
+
+object Arrays {
+ def f1(x: Any) = x match {
+ /* nowarn */ case _: Array[Int] => ()
+ /* nowarn */ case _: Array[Boolean] => ()
+ /* nowarn */ case _: Array[String] => ()
+ /* warn */ case _: Array[List[String]] => ()
+ /* nowarn */ case _: Array[Array[String]] => ()
+ /* nowarn */ case _: Array[Array[Array[String]]] => ()
+ /* warn */ case _: Array[Array[List[String]]] => ()
+ }
+
+ def f2(x: Array[_]) = x match {
+ /* nowarn */ case _: Array[Int] => ()
+ /* nowarn */ case _: Array[Boolean] => ()
+ /* nowarn */ case _: Array[String] => ()
+ /* warn */ case _: Array[List[String]] => ()
+ /* nowarn */ case _: Array[Array[String]] => ()
+ /* nowarn */ case _: Array[Array[Array[String]]] => ()
+ /* warn */ case _: Array[Array[List[String]]] => ()
+ }
+
+ def f3[T](x: Array[T]) = x match {
+ /* nowarn */ case _: Array[Int] => ()
+ /* nowarn */ case _: Array[Boolean] => ()
+ /* nowarn */ case _: Array[String] => ()
+ /* warn */ case _: Array[List[String]] => ()
+ /* nowarn */ case _: Array[Array[String]] => ()
+ /* warn */ case _: Array[List[Array[String]]] => ()
+ /* warn */ case _: Array[Array[List[String]]] => ()
+ }
+}
+
+object Matching {
+ class Q {
+ type A
+ type B <: A
+
+ def f(xs: Traversable[B]) = xs match {
+ /* nowarn */ case xs: List[A] => xs.head
+ /* nowarn */ case xs: Seq[B] => xs.head
+ /* warn */ case xs: Set[A] => xs.head
+ }
+ def f2[T <: B](xs: Traversable[T]) = xs match {
+ /* nowarn */ case xs: List[B with T] => xs.head
+ /* nowarn */ case xs: Seq[A] => xs.head
+ /* nowarn */ case xs: Set[T] => xs.head
+ }
+ }
+}
diff --git a/test/files/neg/unicode-unterminated-quote.check b/test/files/neg/unicode-unterminated-quote.check
new file mode 100644
index 0000000..5085505
--- /dev/null
+++ b/test/files/neg/unicode-unterminated-quote.check
@@ -0,0 +1,7 @@
+unicode-unterminated-quote.scala:2: error: unclosed string literal
+ val x = /u0022
+ ^
+unicode-unterminated-quote.scala:2: error: '}' expected but eof found.
+ val x = /u0022
+ ^
+two errors found
diff --git a/test/files/neg/unicode-unterminated-quote.scala b/test/files/neg/unicode-unterminated-quote.scala
new file mode 100644
index 0000000..bb6eab6
--- /dev/null
+++ b/test/files/neg/unicode-unterminated-quote.scala
@@ -0,0 +1,2 @@
+class A {
+ val x = \u0022
\ No newline at end of file
diff --git a/test/files/neg/unit-returns-value.check b/test/files/neg/unit-returns-value.check
index 18368f4..ab458a3 100644
--- a/test/files/neg/unit-returns-value.check
+++ b/test/files/neg/unit-returns-value.check
@@ -1,4 +1,7 @@
+unit-returns-value.scala:4: error: a pure expression does nothing in statement position; you may be omitting necessary parentheses
+ if (b) return 5
+ ^
unit-returns-value.scala:4: error: enclosing method f has result type Unit: return value discarded
if (b) return 5
^
-one error found
+two errors found
diff --git a/test/files/neg/unit2anyref.check b/test/files/neg/unit2anyref.check
index 10fe186..6d11461 100644
--- a/test/files/neg/unit2anyref.check
+++ b/test/files/neg/unit2anyref.check
@@ -1,8 +1,6 @@
unit2anyref.scala:2: error: type mismatch;
found : Unit
required: AnyRef
-Note: Unit is not implicitly converted to AnyRef. You can safely
-pattern match `x: AnyRef` or cast `x.asInstanceOf[AnyRef]` to do so.
val x: AnyRef = () // this should not succeed.
^
one error found
diff --git a/test/files/neg/unreachablechar.flags b/test/files/neg/unreachablechar.flags
new file mode 100644
index 0000000..809e9ff
--- /dev/null
+++ b/test/files/neg/unreachablechar.flags
@@ -0,0 +1 @@
+ -Xoldpatmat
diff --git a/test/files/neg/valueclasses-doubledefs.check b/test/files/neg/valueclasses-doubledefs.check
new file mode 100644
index 0000000..556d7a0
--- /dev/null
+++ b/test/files/neg/valueclasses-doubledefs.check
@@ -0,0 +1,7 @@
+valueclasses-doubledefs.scala:5: error: double definition:
+method apply:(x: Meter)String and
+method apply:(x: Double)String at line 4
+have same type after erasure: (x: Double)String
+ def apply(x: Meter) = x.toString
+ ^
+one error found
diff --git a/test/files/neg/valueclasses-doubledefs.scala b/test/files/neg/valueclasses-doubledefs.scala
new file mode 100644
index 0000000..87bcf8f
--- /dev/null
+++ b/test/files/neg/valueclasses-doubledefs.scala
@@ -0,0 +1,6 @@
+class Meter(val x: Double) extends AnyVal
+
+class Foo {
+ def apply(x: Double) = x.toString
+ def apply(x: Meter) = x.toString
+}
diff --git a/test/files/neg/valueclasses-impl-restrictions.check b/test/files/neg/valueclasses-impl-restrictions.check
new file mode 100644
index 0000000..0af9173
--- /dev/null
+++ b/test/files/neg/valueclasses-impl-restrictions.check
@@ -0,0 +1,13 @@
+valueclasses-impl-restrictions.scala:3: error: implementation restriction: nested object is not allowed in value class
+This restriction is planned to be removed in subsequent releases.
+ object X
+ ^
+valueclasses-impl-restrictions.scala:9: error: implementation restriction: nested trait is not allowed in value class
+This restriction is planned to be removed in subsequent releases.
+ trait I2 {
+ ^
+valueclasses-impl-restrictions.scala:23: error: implementation restriction: nested class is not allowed in value class
+This restriction is planned to be removed in subsequent releases.
+ private[this] class I2(val q: String)
+ ^
+three errors found
diff --git a/test/files/neg/valueclasses-impl-restrictions.scala b/test/files/neg/valueclasses-impl-restrictions.scala
new file mode 100644
index 0000000..f0577a9
--- /dev/null
+++ b/test/files/neg/valueclasses-impl-restrictions.scala
@@ -0,0 +1,29 @@
+class M(val t: Int) extends AnyVal {
+ def lazyString = {
+ object X
+ () => X
+ }
+}
+
+class X1(val s: String) extends AnyVal {
+ trait I2 {
+ val q: String
+ def z = s + q
+ }
+
+ def y(x: X1) = {
+ val i2 = new I2 { val q = x.s } // allowed as of SI-7571
+ i2.z
+
+ { case x => x } : PartialFunction[Int, Int] // allowed
+ }
+}
+
+class X2(val s: String) extends AnyVal {
+ private[this] class I2(val q: String)
+
+ def y(i: Int) = {
+ val i2 = new I2(i.toString)
+ i2.q + s
+ }
+}
diff --git a/test/files/neg/valueclasses-pavlov.check b/test/files/neg/valueclasses-pavlov.check
new file mode 100644
index 0000000..031589e
--- /dev/null
+++ b/test/files/neg/valueclasses-pavlov.check
@@ -0,0 +1,7 @@
+valueclasses-pavlov.scala:8: error: double definition:
+method foo:(x: Box2)String and
+method foo:(x: String)String at line 7
+have same type after erasure: (x: String)String
+ def foo(x: Box2) = "foo(Box2): ok"
+ ^
+one error found
diff --git a/test/files/neg/valueclasses-pavlov.scala b/test/files/neg/valueclasses-pavlov.scala
new file mode 100644
index 0000000..a5858b2
--- /dev/null
+++ b/test/files/neg/valueclasses-pavlov.scala
@@ -0,0 +1,23 @@
+trait Foo[T <: AnyVal] extends Any {
+ def foo(x: String): String
+ def foo(x: T): String
+}
+
+class Box1(val value: String) extends AnyVal with Foo[Box2] {
+ def foo(x: String) = "foo(String): ok"
+ def foo(x: Box2) = "foo(Box2): ok"
+}
+
+class Box2(val value: String) extends AnyVal
+
+
+object test2a {
+
+ def main(args: Array[String]) {
+ val b1 = new Box1(null)
+ val b2 = new Box2(null)
+ val f: Foo[Box2] = b1
+ println(f.foo(""))
+ println(f.foo(b2))
+ }
+}
diff --git a/test/files/neg/valueclasses.check b/test/files/neg/valueclasses.check
new file mode 100644
index 0000000..3b82a83
--- /dev/null
+++ b/test/files/neg/valueclasses.check
@@ -0,0 +1,43 @@
+valueclasses.scala:3: error: only classes (not traits) are allowed to extend AnyVal
+trait T extends AnyVal // fail
+ ^
+valueclasses.scala:6: error: value class may not be a member of another class
+ class Bar(x: Int) extends AnyVal // fail
+ ^
+valueclasses.scala:8: error: value class may not be a local class
+ class Baz(x: Int) extends AnyVal // fail
+ ^
+valueclasses.scala:12: error: value class needs to have exactly one public val parameter
+class V1 extends AnyVal // fail
+ ^
+valueclasses.scala:14: error: value class needs to have a publicly accessible val parameter
+class V2(private[test] val x: Int) extends AnyVal // fail
+ ^
+valueclasses.scala:15: error: value class needs to have a publicly accessible val parameter
+class V3(protected[test] val x: Int) extends AnyVal // fail
+ ^
+valueclasses.scala:16: error: value class needs to have a publicly accessible val parameter
+class V4(protected val x: Int) extends AnyVal // fail
+ ^
+valueclasses.scala:17: error: value class needs to have a publicly accessible val parameter
+class V5(private val x: Int) extends AnyVal // fail
+ ^
+valueclasses.scala:19: error: value class needs to have exactly one public val parameter
+class V6(val x: Int, val y: String) extends AnyVal // fail
+ ^
+valueclasses.scala:20: error: field definition is not allowed in value class
+class V7(val x: Int, private[this] val y: String) extends AnyVal // fail
+ ^
+valueclasses.scala:21: error: value class needs to have exactly one public val parameter
+class V8(var x: Int) extends AnyVal // fail
+ ^
+valueclasses.scala:24: error: field definition is not allowed in value class
+ val y = x // fail
+ ^
+valueclasses.scala:29: error: type parameter of value class may not be specialized
+class V12[@specialized T, U](val x: (T, U)) extends AnyVal // fail
+ ^
+valueclasses.scala:31: error: value class needs to have exactly one public val parameter
+class V13(x: Int) extends AnyVal // fail
+ ^
+14 errors found
diff --git a/test/files/neg/valueclasses.scala b/test/files/neg/valueclasses.scala
new file mode 100644
index 0000000..7cac94a
--- /dev/null
+++ b/test/files/neg/valueclasses.scala
@@ -0,0 +1,31 @@
+package test
+
+trait T extends AnyVal // fail
+
+class Foo {
+ class Bar(x: Int) extends AnyVal // fail
+ def foo() {
+ class Baz(x: Int) extends AnyVal // fail
+ }
+}
+
+class V1 extends AnyVal // fail
+
+class V2(private[test] val x: Int) extends AnyVal // fail
+class V3(protected[test] val x: Int) extends AnyVal // fail
+class V4(protected val x: Int) extends AnyVal // fail
+class V5(private val x: Int) extends AnyVal // fail
+
+class V6(val x: Int, val y: String) extends AnyVal // fail
+class V7(val x: Int, private[this] val y: String) extends AnyVal // fail
+class V8(var x: Int) extends AnyVal // fail
+
+class V9(val x: Int) extends AnyVal {
+ val y = x // fail
+}
+
+class V10[T](val x: T) extends AnyVal // ok
+class V11[T](val x: List[T]) extends AnyVal // ok
+class V12[@specialized T, U](val x: (T, U)) extends AnyVal // fail
+
+class V13(x: Int) extends AnyVal // fail
diff --git a/test/files/neg/varargs.check b/test/files/neg/varargs.check
index 676a611..424e244 100644
--- a/test/files/neg/varargs.check
+++ b/test/files/neg/varargs.check
@@ -1,10 +1,10 @@
-varargs.scala:16: error: A method with a varargs annotation produces a forwarder method with the same signature (a: Int, b: Array[java.lang.String])Int as an existing method.
+varargs.scala:16: error: A method with a varargs annotation produces a forwarder method with the same signature (a: Int, b: Array[String])Int as an existing method.
@varargs def v1(a: Int, b: String*) = a + b.length
^
varargs.scala:19: error: A method without repeated parameters cannot be annotated with the `varargs` annotation.
@varargs def nov(a: Int) = 0
^
-varargs.scala:21: error: A method with a varargs annotation produces a forwarder method with the same signature (a: Int, b: Array[java.lang.String])Int as an existing method.
+varargs.scala:21: error: A method with a varargs annotation produces a forwarder method with the same signature (a: Int, b: Array[String])Int as an existing method.
@varargs def v2(a: Int, b: String*) = 0
^
three errors found
diff --git a/test/files/neg/variances.check b/test/files/neg/variances.check
index 4eaab56..0643e53 100644
--- a/test/files/neg/variances.check
+++ b/test/files/neg/variances.check
@@ -1,16 +1,19 @@
variances.scala:4: error: covariant type A occurs in contravariant position in type test.Vector[A] of value x
- def append(x: Vector[A]): Vector[A]
+ def append(x: Vector[A]): Vector[A]
^
-variances.scala:14: error: covariant type A occurs in contravariant position in type A of value a
- private[this] def setA(a : A) = this.a = a
- ^
-variances.scala:16: error: covariant type A occurs in invariant position in supertype test.C[A] with ScalaObject of object Baz
+variances.scala:18: error: covariant type A occurs in contravariant position in type A of value a
+ private def setA3(a : A) = this.a = a
+ ^
+variances.scala:19: error: covariant type A occurs in contravariant position in type A of value a
+ protected def setA4(a : A) = this.a = a
+ ^
+variances.scala:21: error: covariant type A occurs in invariant position in supertype test.C[A] of object Baz
object Baz extends C[A]
^
-variances.scala:63: error: covariant type A occurs in contravariant position in type => test.Covariant.T[A]{val m: A => A} of value x
+variances.scala:74: error: covariant type A occurs in contravariant position in type => test.Covariant.T[A]{val m: A => A} of value x
val x: T[A] {
^
-variances.scala:79: error: covariant type T occurs in contravariant position in type => test.TestAlias.B[C.this.A] of method foo
+variances.scala:90: error: covariant type T occurs in contravariant position in type => test.TestAlias.B[C.this.A] of method foo
def foo: B[A]
^
-5 errors found
+6 errors found
diff --git a/test/files/neg/variances.scala b/test/files/neg/variances.scala
index 57abba1..10ca111 100644
--- a/test/files/neg/variances.scala
+++ b/test/files/neg/variances.scala
@@ -1,7 +1,7 @@
package test
trait Vector[+A] {
- def append(x: Vector[A]): Vector[A]
+ def append(x: Vector[A]): Vector[A]
private[this] def append3(x: Vector[A]): Vector[A] = append(x)
}
@@ -11,32 +11,40 @@ object Covariant {
class Foo[+A] {
private[this] var a : A = _
def getA : A = a
- private[this] def setA(a : A) = this.a = a
-
+ // allowed
+ private[this] def setA1(a : A) = this.a = a
+ protected[this] def setA2(a : A) = this.a = a
+ // forbidden
+ private def setA3(a : A) = this.a = a
+ protected def setA4(a : A) = this.a = a
+
object Baz extends C[A]
trait Convert[B] {
def b2a(b : B) : A
- def doit(b : B) = setA(b2a(b))
+ def doit1(b : B) = setA1(b2a(b))
+ def doit2(b : B) = setA2(b2a(b))
+ def doit3(b : B) = setA3(b2a(b))
+ def doit4(b : B) = setA4(b2a(b))
}
}
class Foo2[+A] {
private[this] var a : A = _
def getA : A = a
- private[this] def setA(a : A) = this.a = a
-
+ private[this] def setA(a : A) = this.a = a
+
{
trait Convert[B] {
def b2a(b : B) : A
def doit(b : B) = setA(b2a(b))
}
- ()
+ println("")
}
}
class Foo3[+A] {
private[this] var a : A = _
def getA : A = a
- private[this] def setA(a : A) = this.a = a
-
+ private[this] def setA(a : A) = this.a = a
+
private[this] trait Convert[B] {
def b2a(b : B) : A
def doit(b : B) = setA(b2a(b))
@@ -54,7 +62,10 @@ object Covariant {
}
def main(args : Array[String]) {
val test = new Test
- test.c.doit(test.b)
+ test.c.doit1(test.b)
+ test.c.doit2(test.b)
+ test.c.doit3(test.b)
+ test.c.doit4(test.b)
val x : java.lang.Character = test.a.getA
Console.println("XXX " + x)
}
diff --git a/test/files/neg/virtpatmat_reach_null.check b/test/files/neg/virtpatmat_reach_null.check
new file mode 100644
index 0000000..595c8ec
--- /dev/null
+++ b/test/files/neg/virtpatmat_reach_null.check
@@ -0,0 +1,4 @@
+virtpatmat_reach_null.scala:13: error: unreachable code
+ case _ => // unreachable
+ ^
+one error found
diff --git a/test/files/neg/caseinherit.flags b/test/files/neg/virtpatmat_reach_null.flags
similarity index 100%
copy from test/files/neg/caseinherit.flags
copy to test/files/neg/virtpatmat_reach_null.flags
diff --git a/test/files/neg/virtpatmat_reach_null.scala b/test/files/neg/virtpatmat_reach_null.scala
new file mode 100644
index 0000000..6314a5b
--- /dev/null
+++ b/test/files/neg/virtpatmat_reach_null.scala
@@ -0,0 +1,19 @@
+sealed abstract class Const {
+ final def excludes(other: Const) =
+ (this, other) match {
+ case (_, NullConst) =>
+ case (NullConst, _) =>
+ case (_: ValueConst, _: ValueConst) =>
+ case (_: ValueConst, _: TypeConst) =>
+ case (_: TypeConst, _: ValueConst) =>
+ case (_: TypeConst, _: TypeConst) =>
+ case (null, _) =>
+ case (_, null) =>
+ case null =>
+ case _ => // unreachable
+ }
+}
+
+sealed class TypeConst extends Const
+sealed class ValueConst extends Const
+case object NullConst extends Const
diff --git a/test/files/neg/virtpatmat_reach_sealed_unsealed.check b/test/files/neg/virtpatmat_reach_sealed_unsealed.check
new file mode 100644
index 0000000..10638ef
--- /dev/null
+++ b/test/files/neg/virtpatmat_reach_sealed_unsealed.check
@@ -0,0 +1,14 @@
+virtpatmat_reach_sealed_unsealed.scala:16: error: match may not be exhaustive.
+It would fail on the following input: false
+ (true: Boolean) match { case true => } // not exhaustive, but reachable
+ ^
+virtpatmat_reach_sealed_unsealed.scala:18: error: unreachable code
+ (true: Boolean) match { case true => case false => case _ => } // exhaustive, last case is unreachable
+ ^
+virtpatmat_reach_sealed_unsealed.scala:19: error: unreachable code
+ (true: Boolean) match { case true => case false => case _: Boolean => } // exhaustive, last case is unreachable
+ ^
+virtpatmat_reach_sealed_unsealed.scala:20: error: unreachable code
+ (true: Boolean) match { case true => case false => case _: Any => } // exhaustive, last case is unreachable
+ ^
+four errors found
diff --git a/test/files/neg/caseinherit.flags b/test/files/neg/virtpatmat_reach_sealed_unsealed.flags
similarity index 100%
copy from test/files/neg/caseinherit.flags
copy to test/files/neg/virtpatmat_reach_sealed_unsealed.flags
diff --git a/test/files/neg/virtpatmat_reach_sealed_unsealed.scala b/test/files/neg/virtpatmat_reach_sealed_unsealed.scala
new file mode 100644
index 0000000..13911db
--- /dev/null
+++ b/test/files/neg/virtpatmat_reach_sealed_unsealed.scala
@@ -0,0 +1,21 @@
+sealed abstract class X
+sealed case class A(x: Int) extends X
+
+// test reachability on mixed sealed / non-sealed matches
+object Test extends App {
+ val B: X = A(0)
+ val C: X = A(1)
+
+ // all cases are reachable and the match is exhaustive
+ (C: X) match {
+ case B =>
+ case C =>
+ case A(_) =>
+ }
+
+ (true: Boolean) match { case true => } // not exhaustive, but reachable
+ (true: Boolean) match { case true => case false => } // exhaustive, reachable
+ (true: Boolean) match { case true => case false => case _ => } // exhaustive, last case is unreachable
+ (true: Boolean) match { case true => case false => case _: Boolean => } // exhaustive, last case is unreachable
+ (true: Boolean) match { case true => case false => case _: Any => } // exhaustive, last case is unreachable
+}
\ No newline at end of file
diff --git a/test/files/neg/virtpatmat_unreach_select.check b/test/files/neg/virtpatmat_unreach_select.check
new file mode 100644
index 0000000..3771971
--- /dev/null
+++ b/test/files/neg/virtpatmat_unreach_select.check
@@ -0,0 +1,4 @@
+virtpatmat_unreach_select.scala:10: error: unreachable code
+ case WARNING.id => // unreachable
+ ^
+one error found
diff --git a/test/files/neg/anyval-sealed.flags b/test/files/neg/virtpatmat_unreach_select.flags
similarity index 100%
copy from test/files/neg/anyval-sealed.flags
copy to test/files/neg/virtpatmat_unreach_select.flags
diff --git a/test/files/neg/virtpatmat_unreach_select.scala b/test/files/neg/virtpatmat_unreach_select.scala
new file mode 100644
index 0000000..c46ff15
--- /dev/null
+++ b/test/files/neg/virtpatmat_unreach_select.scala
@@ -0,0 +1,12 @@
+class Test {
+ object severity extends Enumeration
+ class Severity(val id: Int) extends severity.Value
+ val INFO = new Severity(0)
+ val WARNING = new Severity(1)
+
+ (0: Int) match {
+ case WARNING.id =>
+ case INFO.id => // reachable
+ case WARNING.id => // unreachable
+ }
+}
diff --git a/test/files/neg/wrong-args-for-none.check b/test/files/neg/wrong-args-for-none.check
new file mode 100644
index 0000000..d3b2d57
--- /dev/null
+++ b/test/files/neg/wrong-args-for-none.check
@@ -0,0 +1,4 @@
+wrong-args-for-none.scala:5: error: wrong number of arguments for pattern Test.Foo(x: Int,y: Int)
+ def f(x: Any) = x match { case Bar(Foo(5)) => }
+ ^
+one error found
diff --git a/test/files/neg/wrong-args-for-none.scala b/test/files/neg/wrong-args-for-none.scala
new file mode 100644
index 0000000..1caa478
--- /dev/null
+++ b/test/files/neg/wrong-args-for-none.scala
@@ -0,0 +1,6 @@
+object Test {
+ case class Foo(x: Int, y: Int)
+ case class Bar(x: AnyRef)
+
+ def f(x: Any) = x match { case Bar(Foo(5)) => }
+}
diff --git a/test/files/pos/CustomGlobal.scala b/test/files/pos/CustomGlobal.scala
new file mode 100644
index 0000000..30bf227
--- /dev/null
+++ b/test/files/pos/CustomGlobal.scala
@@ -0,0 +1,33 @@
+package custom
+
+import scala.tools.nsc._, reporters._, typechecker._
+
+/** Demonstration of a custom Global with a custom Typer,
+ * decoupled from trunk. Demonstration:
+ *
+{{{
+scalac -d . CustomGlobal.scala && scala -nc -Yglobal-class custom.CustomGlobal \
+ -e 'class Bippy(x: Int) ; def f = new Bippy(5)'
+
+I'm typing a Bippy! It's a ClassDef.
+I'm typing a Bippy! It's a Ident.
+I'm typing a Bippy! It's a DefDef.
+}}}
+ *
+ */
+class CustomGlobal(currentSettings: Settings, reporter: Reporter) extends Global(currentSettings, reporter) {
+ override lazy val analyzer = new {
+ val global: CustomGlobal.this.type = CustomGlobal.this
+ } with Analyzer {
+ override def newTyper(context: Context): Typer = new CustomTyper(context)
+
+ class CustomTyper(context : Context) extends Typer(context) {
+ override def typed(tree: Tree, mode: Int, pt: Type): Tree = {
+ if (tree.summaryString contains "Bippy")
+ println("I'm typing a Bippy! It's a " + tree.shortClass + ".")
+
+ super.typed(tree, mode, pt)
+ }
+ }
+ }
+}
diff --git a/test/files/pos/MailBox.scala b/test/files/pos/MailBox.scala
index 35b38f4..2a3f02d 100644
--- a/test/files/pos/MailBox.scala
+++ b/test/files/pos/MailBox.scala
@@ -1,8 +1,9 @@
package test;
-import scala.concurrent._;
+import scala.actors.TIMEOUT;
class MailBox {
+
private class LinkedList[a] {
var elem: a = _;
var next: LinkedList[a] = null;
diff --git a/test/files/pos/SI-4012-a.scala b/test/files/pos/SI-4012-a.scala
new file mode 100644
index 0000000..7fceeea
--- /dev/null
+++ b/test/files/pos/SI-4012-a.scala
@@ -0,0 +1,7 @@
+trait C1[+A] {
+ def head: A = sys.error("")
+}
+trait C2[@specialized +A] extends C1[A] {
+ override def head: A = super.head
+}
+class C3 extends C2[Char]
diff --git a/test/files/pos/SI-4012-b.scala b/test/files/pos/SI-4012-b.scala
new file mode 100644
index 0000000..6bc8592
--- /dev/null
+++ b/test/files/pos/SI-4012-b.scala
@@ -0,0 +1,15 @@
+trait Super[@specialized(Int) A] {
+ def superb = 0
+}
+
+object Sub extends Super[Int] {
+ // it is expected that super[Super].superb crashes, since
+ // specialization does parent class rewiring, and the super
+ // of Sub becomes Super$mcII$sp and not Super. But I consider
+ // this normal behavior -- if you want, I can modify duplicatiors
+ // to make this work, but I consider it's best to keep this
+ // let the user know Super is not the superclass anymore.
+ // super[Super].superb - Vlad
+ super.superb // okay
+ override def superb: Int = super.superb // okay
+}
diff --git a/test/files/pos/SI-5788.scala b/test/files/pos/SI-5788.scala
new file mode 100644
index 0000000..f292461
--- /dev/null
+++ b/test/files/pos/SI-5788.scala
@@ -0,0 +1,3 @@
+trait Foo[@specialized(Int) A] {
+ final def bar(a:A):A = bar(a)
+}
diff --git a/test/files/pos/SI-7060.flags b/test/files/pos/SI-7060.flags
new file mode 100644
index 0000000..c926ad6
--- /dev/null
+++ b/test/files/pos/SI-7060.flags
@@ -0,0 +1 @@
+-Yinline -Ydead-code
diff --git a/test/files/pos/SI-7060.scala b/test/files/pos/SI-7060.scala
new file mode 100644
index 0000000..c87620e
--- /dev/null
+++ b/test/files/pos/SI-7060.scala
@@ -0,0 +1,11 @@
+object Test {
+
+ @inline final def mbarray_apply_minibox(array: Any, tag: Byte): Long =
+ if (tag == 0) {
+ array.asInstanceOf[Array[Long]](0)
+ } else
+ array.asInstanceOf[Array[Byte]](0).toLong
+
+ def crash_method(): Unit =
+ mbarray_apply_minibox(null, 0)
+}
diff --git a/test/files/pos/SI-7100.scala b/test/files/pos/SI-7100.scala
new file mode 100644
index 0000000..7cb6356
--- /dev/null
+++ b/test/files/pos/SI-7100.scala
@@ -0,0 +1,6 @@
+class Buffer {
+ def f[@specialized(Int) T](): T = 0 match {
+ case 0 => 0.asInstanceOf[T]
+ case 1 => 0.asInstanceOf[T]
+ }
+}
diff --git a/test/files/pos/SI-7638.scala b/test/files/pos/SI-7638.scala
new file mode 100644
index 0000000..da16e0b
--- /dev/null
+++ b/test/files/pos/SI-7638.scala
@@ -0,0 +1,51 @@
+package miniboxing.tests.compile
+
+trait Ordering[@specialized(Int) A] {
+ def eqv(x: Array[A], y: Array[A]): Boolean = false
+}
+
+trait ArrayVectorOrder[@specialized(Int) A] extends Ordering[A] {
+ override def eqv(x: Array[A], y: Array[A]): Boolean = super.eqv(x, y)
+}
+
+object vectorOrder {
+ implicit def arrayOrder[@specialized(Int) A]() =
+ /*
+ * Before applying patch:
+ *
+ * while compiling: SI-7638.scala
+ * during phase: mixin
+ * library version: version 2.10.3-20130625-164027-d22e8d282c
+ * compiler version: version 2.10.3-20130627-153946-54cb6af7db
+ * reconstructed args:
+ *
+ * last tree to typer: TypeTree(class Array)
+ * symbol: class Array in package scala (flags: final)
+ * symbol definition: final class Array[T >: ? <: ?] extends Object
+ * tpe: Array[Int]
+ * symbol owners: class Array -> package scala
+ * context owners: anonymous class anon$1 -> package compile
+ *
+ * == Expanded type of tree ==
+ *
+ * TypeRef(
+ * TypeSymbol(final class Array[T >: ? <: ?] extends Object)
+ * args = List(TypeRef(TypeSymbol(final abstract class Int extends )))
+ * )
+ *
+ * unhandled exception while transforming SI-7638.scala
+ * error: uncaught exception during compilation: java.lang.UnsupportedOperationException
+ * error: java.lang.UnsupportedOperationException: tail of empty list
+ * at scala.collection.immutable.Nil$.tail(List.scala:339)
+ * at scala.collection.immutable.Nil$.tail(List.scala:334)
+ * at scala.tools.nsc.transform.Mixin$$anonfun$scala$tools$nsc$transform$Mixin$$rebindSuper$1.apply(Mixin.scala:123)
+ * at scala.tools.nsc.transform.Mixin$$anonfun$scala$tools$nsc$transform$Mixin$$rebindSuper$1.apply(Mixin.scala:122)
+ * at scala.reflect.internal.SymbolTable.atPhase(SymbolTable.scala:207)
+ * at scala.reflect.internal.SymbolTable.afterPhase(SymbolTable.scala:216)
+ * at scala.tools.nsc.Global.afterPickler(Global.scala:1104)
+ * at scala.tools.nsc.transform.Mixin.scala$tools$nsc$transform$Mixin$$rebindSuper(Mixin.scala:122)
+ * at scala.tools.nsc.transform.Mixin$$anonfun$scala$tools$nsc$transform$Mixin$$mixinTraitMembers$1$1.apply(Mixin.scala:339)
+ * at scala.tools.nsc.transform.Mixin$$anonfun$scala$tools$nsc$transform$Mixin$$mixinTraitMembers$1$1.apply(Mixin.scala:292)
+ */
+ new ArrayVectorOrder[A] { }
+}
diff --git a/test/files/pos/Transactions.scala b/test/files/pos/Transactions.scala
index 9b43883..525eff7 100644
--- a/test/files/pos/Transactions.scala
+++ b/test/files/pos/Transactions.scala
@@ -1,4 +1,4 @@
-package scala.concurrent
+package scala.concurrent1
class AbortException extends RuntimeException
diff --git a/test/files/pos/annot-inner.scala b/test/files/pos/annot-inner.scala
index f2ecb5d..9f155a5 100644
--- a/test/files/pos/annot-inner.scala
+++ b/test/files/pos/annot-inner.scala
@@ -1,5 +1,5 @@
object test {
- class annot extends Annotation
+ class annot extends scala.annotation.Annotation
def foo {
@annot def bar(i: Int): Int = i
diff --git a/test/files/pos/annotDepMethType.scala b/test/files/pos/annotDepMethType.scala
index b5e7cb9..079ca62 100644
--- a/test/files/pos/annotDepMethType.scala
+++ b/test/files/pos/annotDepMethType.scala
@@ -1,4 +1,4 @@
-case class pc(calls: Any*) extends TypeConstraint
+case class pc(calls: Any*) extends annotation.TypeConstraint
object Main {
class C0 { def baz: String = "" }
diff --git a/test/files/pos/annotated-original/C_2.scala b/test/files/pos/annotated-original/C_2.scala
new file mode 100644
index 0000000..36a09ff
--- /dev/null
+++ b/test/files/pos/annotated-original/C_2.scala
@@ -0,0 +1,7 @@
+object Bug {
+ M.m {
+ def s = ""
+ M.m(s): @unchecked // error: macro has not been expanded.
+ ???
+ }
+}
diff --git a/test/files/pos/annotated-original/M_1.scala b/test/files/pos/annotated-original/M_1.scala
new file mode 100644
index 0000000..01654e0
--- /dev/null
+++ b/test/files/pos/annotated-original/M_1.scala
@@ -0,0 +1,7 @@
+import language.experimental.macros
+import reflect.macros.Context
+
+object M {
+ def impl(c: Context)(a: c.Expr[Any]) = c.Expr[Any](c.resetLocalAttrs(a.tree))
+ def m(a: Any) = macro impl
+}
diff --git a/test/files/jvm/bug680.check b/test/files/pos/annotated-treecopy.check
similarity index 100%
copy from test/files/jvm/bug680.check
copy to test/files/pos/annotated-treecopy.check
diff --git a/test/files/pos/annotated-treecopy.flags b/test/files/pos/annotated-treecopy.flags
new file mode 100644
index 0000000..cd66464
--- /dev/null
+++ b/test/files/pos/annotated-treecopy.flags
@@ -0,0 +1 @@
+-language:experimental.macros
\ No newline at end of file
diff --git a/test/files/pos/annotated-treecopy/Impls_Macros_1.scala b/test/files/pos/annotated-treecopy/Impls_Macros_1.scala
new file mode 100644
index 0000000..d92fbca
--- /dev/null
+++ b/test/files/pos/annotated-treecopy/Impls_Macros_1.scala
@@ -0,0 +1,53 @@
+import scala.language.experimental.macros
+import scala.reflect.macros.Context
+import collection.mutable.ListBuffer
+import collection.mutable.Stack
+
+object Macros {
+ trait TypedFunction {
+ def tree: scala.reflect.runtime.universe.Tree
+ val typeIn: String
+ val typeOut: String
+ }
+
+ def tree[T,U](f:Function1[T,U]): Function1[T,U] = macro tree_impl[T,U]
+
+ def tree_impl[T:c.WeakTypeTag,U:c.WeakTypeTag](c: Context)
+ (f:c.Expr[Function1[T,U]]): c.Expr[Function1[T,U]] = {
+ import c.universe._
+ val ttag = c.weakTypeTag[U]
+ f match {
+ case Expr(Function(List(ValDef(_,n,tp,_)),b)) =>
+ // normalize argument name
+ var b1 = new Transformer {
+ override def transform(tree: Tree): Tree = tree match {
+ case Ident(x) if (x==n) => Ident(newTermName("_arg"))
+ case tt @ TypeTree() if tt.original != null => TypeTree(tt.tpe) setOriginal transform(tt.original)
+ // without the fix to LazyTreeCopier.Annotated, we would need to uncomment the line below to make the macro work
+ // that's because the pattern match in the input expression gets expanded into Typed(<x>, TypeTree(<Int @unchecked>))
+ // with the original of the TypeTree being Annotated(<@unchecked>, Ident(<x>))
+ // then the macro tries to replace all Ident(<x>) trees with Ident(<_arg>), recurs into the original of the TypeTree, changes it,
+ // but leaves the <@unchecked> part untouched. this signals the misguided LazyTreeCopier that the Annotated tree hasn't been modified,
+ // so the original tree should be copied over and returned => crash when later <x: @unchecked> re-emerges from TypeTree.original
+ // case Annotated(annot, arg) => treeCopy.Annotated(tree, transform(annot).duplicate, transform(arg))
+ case _ => super.transform(tree)
+ }
+ }.transform(b)
+
+ val reifiedTree = c.reifyTree(treeBuild.mkRuntimeUniverseRef, EmptyTree, b1)
+ val reifiedExpr = c.Expr[scala.reflect.runtime.universe.Expr[T => U]](reifiedTree)
+ val template =
+ c.universe.reify(new (T => U) with TypedFunction {
+ override def toString = c.literal(tp+" => "+ttag.tpe+" { "+b1.toString+" } ").splice // DEBUG
+ def tree = reifiedExpr.splice.tree
+ val typeIn = c.literal(tp.toString).splice
+ val typeOut = c.literal(ttag.tpe.toString).splice
+ def apply(_arg: T): U = c.Expr[U](b1)(ttag.asInstanceOf[c.WeakTypeTag[U]]).splice
+ })
+ val untyped = c.resetLocalAttrs(template.tree)
+
+ c.Expr[T => U](untyped)
+ case _ => sys.error("Bad function type")
+ }
+ }
+}
\ No newline at end of file
diff --git a/test/files/pos/annotated-treecopy/Test_2.scala b/test/files/pos/annotated-treecopy/Test_2.scala
new file mode 100644
index 0000000..836e0d8
--- /dev/null
+++ b/test/files/pos/annotated-treecopy/Test_2.scala
@@ -0,0 +1,5 @@
+object Test extends App {
+ import Macros._
+ // tree { (x:((Int,Int,Int),(Int,Int,Int))) => { val y=x; val ((r1,m1,c1),(r2,m2,c2))=y; (r1, m1 + m2 + r1 * c1 * c2, c2) } }
+ tree { (x:((Int,Int,Int),(Int,Int,Int))) => { val ((r1,m1,c1),(r2,m2,c2))=x; (r1, m1 + m2 + r1 * c1 * c2, c2) } }
+}
\ No newline at end of file
diff --git a/test/files/pos/annotations.scala b/test/files/pos/annotations.scala
index 0819379..706a715 100644
--- a/test/files/pos/annotations.scala
+++ b/test/files/pos/annotations.scala
@@ -1,5 +1,5 @@
-class ann(i: Int) extends Annotation
-class cfann(x: String) extends ClassfileAnnotation
+class ann(i: Int) extends scala.annotation.Annotation
+class cfann(x: String) extends annotation.ClassfileAnnotation
// annotations on abstract types
abstract class C1[@serializable @cloneable +T, U, V[_]]
@@ -25,7 +25,7 @@ object Test {
//bug #1214
val y = new (Integer @ann(0))(2)
- import scala.reflect.BeanProperty
+ import scala.beans.BeanProperty
// bug #637
trait S { def getField(): Int }
@@ -44,7 +44,7 @@ object Test {
// test forward references to getters / setters
class BeanPropertyTests {
- @scala.reflect.BeanProperty lazy val lv1 = 0
+ @scala.beans.BeanProperty lazy val lv1 = 0
def foo() {
val bp1 = new BeanPropertyTests1
@@ -58,13 +58,13 @@ class BeanPropertyTests {
bp1.setV2(100)
}
- @scala.reflect.BeanProperty var v1 = 0
+ @scala.beans.BeanProperty var v1 = 0
}
class BeanPropertyTests1 {
- @scala.reflect.BeanProperty lazy val lv2 = "0"
- @scala.reflect.BeanProperty var v2 = 0
+ @scala.beans.BeanProperty lazy val lv2 = "0"
+ @scala.beans.BeanProperty var v2 = 0
}
// test mixin of getters / setters, and implementing abstract
@@ -78,8 +78,8 @@ class C extends T with BeanF {
}
trait T {
- @scala.reflect.BeanProperty var f = "nei"
- @scala.reflect.BooleanBeanProperty var g = false
+ @scala.beans.BeanProperty var f = "nei"
+ @scala.beans.BooleanBeanProperty var g = false
}
trait BeanF {
@@ -91,9 +91,9 @@ trait BeanF {
}
-class Ann3(arr: Array[String]) extends ClassfileAnnotation
-class Ann4(i: Int) extends ClassfileAnnotation
-class Ann5(value: Class[_]) extends ClassfileAnnotation
+class Ann3(arr: Array[String]) extends annotation.ClassfileAnnotation
+class Ann4(i: Int) extends annotation.ClassfileAnnotation
+class Ann5(value: Class[_]) extends annotation.ClassfileAnnotation
object Test3 {
final val i = 1083
diff --git a/test/files/pos/arrays3.scala b/test/files/pos/arrays3.scala
new file mode 100644
index 0000000..f96be0b
--- /dev/null
+++ b/test/files/pos/arrays3.scala
@@ -0,0 +1,11 @@
+trait Foo {
+ type Repr <: String
+ def f2(x: Repr) = x.length
+}
+trait Fooz[Repr <: Array[_]] {
+ def f0(x: Repr) = x.length
+}
+
+trait Bar[Repr <: List[_]] extends Foo with Fooz[Array[Int]] {
+ def f1(x: Repr) = x.length
+}
diff --git a/test/files/jvm/bug680.check b/test/files/pos/attachments-typed-ident.check
similarity index 100%
copy from test/files/jvm/bug680.check
copy to test/files/pos/attachments-typed-ident.check
diff --git a/test/files/pos/attachments-typed-ident.flags b/test/files/pos/attachments-typed-ident.flags
new file mode 100644
index 0000000..cd66464
--- /dev/null
+++ b/test/files/pos/attachments-typed-ident.flags
@@ -0,0 +1 @@
+-language:experimental.macros
\ No newline at end of file
diff --git a/test/files/pos/attachments-typed-ident/Impls_1.scala b/test/files/pos/attachments-typed-ident/Impls_1.scala
new file mode 100644
index 0000000..cc40893
--- /dev/null
+++ b/test/files/pos/attachments-typed-ident/Impls_1.scala
@@ -0,0 +1,17 @@
+import scala.reflect.macros.Context
+import language.experimental.macros
+
+object MyAttachment
+
+object Macros {
+ def impl(c: Context) = {
+ import c.universe._
+ val ident = Ident(newTermName("bar")) updateAttachment MyAttachment
+ assert(ident.attachments.get[MyAttachment.type].isDefined, ident.attachments)
+ val typed = c.typeCheck(ident)
+ assert(typed.attachments.get[MyAttachment.type].isDefined, typed.attachments)
+ c.Expr[Int](typed)
+ }
+
+ def foo = macro impl
+}
\ No newline at end of file
diff --git a/test/files/pos/attachments-typed-ident/Macros_Test_2.scala b/test/files/pos/attachments-typed-ident/Macros_Test_2.scala
new file mode 100644
index 0000000..37065ea
--- /dev/null
+++ b/test/files/pos/attachments-typed-ident/Macros_Test_2.scala
@@ -0,0 +1,4 @@
+object Test extends App {
+ def bar = 2
+ Macros.foo
+}
\ No newline at end of file
diff --git a/test/files/pos/attributes.scala b/test/files/pos/attributes.scala
index f3bbb4c..ec735d0 100644
--- a/test/files/pos/attributes.scala
+++ b/test/files/pos/attributes.scala
@@ -52,15 +52,15 @@ object O6 {
}
object myAttrs {
- class a1 extends scala.Annotation;
- class a2(x: Int) extends scala.Annotation;
- class a3(x: a1) extends scala.Annotation;
+ class a1 extends scala.annotation.Annotation;
+ class a2(x: Int) extends scala.annotation.Annotation;
+ class a3(x: a1) extends scala.annotation.Annotation;
}
-class a4(ns: Array[Int]) extends scala.Annotation;
+class a4(ns: Array[Int]) extends scala.annotation.Annotation;
object O7 {
- class a1 extends scala.Annotation;
- class a2(x: Int) extends scala.Annotation;
- class a3(x: a1) extends scala.Annotation;
+ class a1 extends scala.annotation.Annotation;
+ class a2(x: Int) extends scala.annotation.Annotation;
+ class a3(x: a1) extends scala.annotation.Annotation;
final val x = new a1;
@a1 class C1;
diff --git a/test/files/pos/bug0029.scala b/test/files/pos/bug0029.scala
deleted file mode 100644
index 0af45ab..0000000
--- a/test/files/pos/bug0029.scala
+++ /dev/null
@@ -1,3 +0,0 @@
-object Main {
- def f[a]: List[List[a]] = for (val l1 <- Nil; val l2 <- Nil) yield l1
-}
diff --git a/test/files/pos/bug1000.scala b/test/files/pos/bug1000.scala
deleted file mode 100644
index 93f8201..0000000
--- a/test/files/pos/bug1000.scala
+++ /dev/null
@@ -1,4 +0,0 @@
-object Test extends App {
- val xs = Array(1, 2, 3)
- Console.println(xs.filter(_ >= 0).length)
-}
diff --git a/test/files/pos/bug1001.scala b/test/files/pos/bug1001.scala
deleted file mode 100644
index 776a334..0000000
--- a/test/files/pos/bug1001.scala
+++ /dev/null
@@ -1,105 +0,0 @@
-// I suspect the stack overflow is occurring when the compiler is determining the types for the following line at the end of the file:-
-// val data = List(N26,N25)
-
-abstract class A
-{
- // commenting out the following line (only) leads to successful compilation
- protected val data: List[A]
-}
-
-trait B[T <: B[T]] extends A { self: T => }
-
-abstract class C extends A
-{
- // commenting out the following line (only) leads to successful compilation
- protected val data: List[C]
-}
-
-abstract class D extends C with B[D] {}
-
-abstract class Ee extends C with B[Ee]
-{
-}
-
-
-object N1 extends D
-{
- val data = Nil
-}
-
-object N2 extends D
-{
- val data = Nil
-}
-
-object N5 extends D
-{
- val data = List(N1)
-}
-
-object N6 extends D
-{
- val data = List(N1)
-}
-
-object N8 extends D
-{
- val data = List(N1)
-}
-
-object N10 extends D
-{
- val data = Nil
-}
-
-object N13 extends D
-{
- val data = List(N2)
-}
-
-object N14 extends D
-{
- val data = List(N5,N10,N8)
-}
-
-object N15 extends D
-{
- val data = List(N14)
-}
-
-object N16 extends D
-{
- val data = List(N13,N6,N15)
-}
-
-object N17 extends D
-{
- val data = List(N16)
-}
-
-object N21 extends D
-{
- val data = List(N16)
-}
-
-object N22 extends D
-{
- val data = List(N17)
-}
-
-object N25 extends D
-{
- val data = List(N22)
-}
-
-object N26 extends Ee
-{
- val data = List(N21,N17)
-}
-
-// Commenting out the following object (only) leads to successful compilation
-object N31 extends Ee
-{
- // If we use List[C](N26,N25), we achieve successful compilation
- val data = List[C](N26,N25)
-}
diff --git a/test/files/pos/bug1014.scala b/test/files/pos/bug1014.scala
deleted file mode 100644
index 1ac87b2..0000000
--- a/test/files/pos/bug1014.scala
+++ /dev/null
@@ -1,13 +0,0 @@
-import scala.xml.{NodeSeq, Elem}
-
-class EO extends App with Moo{
- def cat = <cat>dog</cat>
-
- implicit def nodeSeqToFlog(in: Elem): Flog = new Flog(in)
-}
-
-trait Moo {
- def cat: Flog
-}
-
-class Flog(val in: NodeSeq)
diff --git a/test/files/pos/bug1049.scala b/test/files/pos/bug1049.scala
deleted file mode 100644
index 3cc9d0c..0000000
--- a/test/files/pos/bug1049.scala
+++ /dev/null
@@ -1,7 +0,0 @@
-package bug1049
-
-abstract class Test {
- type T <: A
- class A { self: T => }
- class B extends A { self: T => }
-}
diff --git a/test/files/pos/bug1050.scala b/test/files/pos/bug1050.scala
deleted file mode 100644
index 1dfa20c..0000000
--- a/test/files/pos/bug1050.scala
+++ /dev/null
@@ -1,10 +0,0 @@
-package bug1050
-
-abstract class A {
- type T <: scala.ScalaObject
- class A { this: T =>
- def b = 3
- def c = b
- b
- }
-}
diff --git a/test/files/pos/bug1070.scala b/test/files/pos/bug1070.scala
deleted file mode 100644
index 95b7718..0000000
--- a/test/files/pos/bug1070.scala
+++ /dev/null
@@ -1,4 +0,0 @@
-import scala.reflect.BeanProperty;
-trait beanpropertytrait {
- @BeanProperty var myVariable: Long = -1l;
-}
diff --git a/test/files/pos/bug1203.scala b/test/files/pos/bug1203.scala
deleted file mode 100644
index 4938621..0000000
--- a/test/files/pos/bug1203.scala
+++ /dev/null
@@ -1,7 +0,0 @@
-case class ant(t: String) extends Annotation
-object Test {
- def main(args: Array[String]): Unit = {
- val a: scala.xml.NodeSeq @ant("12") = Nil
- println(a)
- }
-}
diff --git a/test/files/pos/bug1279a.scala b/test/files/pos/bug1279a.scala
deleted file mode 100644
index 9212b58..0000000
--- a/test/files/pos/bug1279a.scala
+++ /dev/null
@@ -1,40 +0,0 @@
-// see #13
-// providing the type parameter in the recursive call to all4Impl
-// avoids the problem
-
-
-// covariant linked list
-abstract class M
-{ self =>
-
- type T
- final type selfType = M {type T <: self.T}
- type actualSelfType >: self.type <: selfType
-
- def next: selfType
-
-
- // I don't understand why this doesn't compile, but that's a separate matter
- // error: method all2 cannot be accessed in M.this.selfType
- // because its instance type => Stream[M{type T <: M.this.selfType#T}]
- // contains a malformed type: M.this.selfType#T
- // def all2: Stream[M {type T <: self.T}] = Stream.cons(self: actualSelfType, next.all2)
-
-
- // compiles successfully
- // def all3: Stream[M {type T <: self.T}] = all3Impl(self: actualSelfType)
- // private def all3Impl(first: M {type T <: self.T}): Stream[M {type T <: self.T}] = Stream.cons(first, all3Impl(first.next))
-
-
-
- def all4: Stream[M {type T <: self.T}] = Unrelated.all4Impl[T](self: actualSelfType)
-}
-
-object Unrelated
-{
- // TODO!!! fix this bug for real, it compiles successfully, but weird types are inferred
- // def all4Impl[U](first: M {type T <: U}): Stream[M {type T <: U}] = Stream.cons(first, all4Impl(first.next))
-
-// compiles successfully
- def all4Impl[U](first: M {type T <: U}): Stream[M {type T <: U}] = Stream.cons(first, all4Impl[U](first.next))
-}
diff --git a/test/files/pos/bug1439.scala b/test/files/pos/bug1439.scala
deleted file mode 100644
index 68a7332..0000000
--- a/test/files/pos/bug1439.scala
+++ /dev/null
@@ -1,8 +0,0 @@
-// no unchecked warnings
-class View[C[A]] { }
-
-object Test {
- null match {
- case v: View[_] =>
- }
-}
diff --git a/test/files/pos/bug1560.scala b/test/files/pos/bug1560.scala
deleted file mode 100644
index 4419ae0..0000000
--- a/test/files/pos/bug1560.scala
+++ /dev/null
@@ -1,11 +0,0 @@
-object Test extends App {
- trait C[T] {
- def t: T
- }
-
- def b: Option[C[_]] = null
-
- def c = b match {
- case Some(b) => b.t
- }
-}
diff --git a/test/files/pos/bug2023.scala b/test/files/pos/bug2023.scala
deleted file mode 100644
index de3e848..0000000
--- a/test/files/pos/bug2023.scala
+++ /dev/null
@@ -1,16 +0,0 @@
-trait C[A]
-
-object C {
- implicit def ipl[A](implicit from: A => Ordered[A]): C[A] = null
-}
-
-object P {
- def foo[A](i: A, j: A)(implicit c: C[A]): Unit = ()
-}
-
-class ImplicitChainTest {
- def testTrivial: Unit = {
- P.foo('0', '9')
- P.foo('0', '9')
- }
-}
diff --git a/test/files/pos/bug211.scala b/test/files/pos/bug211.scala
deleted file mode 100644
index e394f21..0000000
--- a/test/files/pos/bug211.scala
+++ /dev/null
@@ -1,8 +0,0 @@
-trait A;
-trait B;
-class Foo extends A with B { self: A with B => }
-object Test extends App {
- new Foo();
- Console.println("bug211 completed");
-}
-
diff --git a/test/files/pos/bug2261.scala b/test/files/pos/bug2261.scala
deleted file mode 100644
index fcb9821..0000000
--- a/test/files/pos/bug2261.scala
+++ /dev/null
@@ -1,6 +0,0 @@
-object Test extends App {
- class Bob[T]
- implicit def foo2bar[T](xs: List[T]): Bob[T] = new Bob[T]
- var x: Bob[Int] = null
- x = List(1,2,3)
-}
diff --git a/test/files/pos/bug252.scala b/test/files/pos/bug252.scala
deleted file mode 100644
index b10811f..0000000
--- a/test/files/pos/bug252.scala
+++ /dev/null
@@ -1,17 +0,0 @@
-abstract class Module {}
-
-abstract class T {
- type moduleType <: Module
- val module: moduleType
-}
-
-abstract class Base {
- type mType = Module
- type tType = T { type moduleType <: mType }
-}
-
-abstract class Derived extends Base {
- def f(inputs: List[tType]): Unit = {
- for (t <- inputs; val m = t.module) { }
- }
-}
diff --git a/test/files/pos/bug2619.scala b/test/files/pos/bug2619.scala
deleted file mode 100644
index 283d93b..0000000
--- a/test/files/pos/bug2619.scala
+++ /dev/null
@@ -1,80 +0,0 @@
-abstract class F {
- final def apply(x: Int): AnyRef = null
-}
-abstract class AbstractModule {
- def as: List[AnyRef]
- def ms: List[AbstractModule]
- def fs: List[F] = Nil
- def rs(x: Int): List[AnyRef] = fs.map(_(x))
-}
-abstract class ModuleType1 extends AbstractModule {}
-abstract class ModuleType2 extends AbstractModule {}
-
-object ModuleAE extends ModuleType1 {
- def as = Nil
- def ms = Nil
-}
-object ModuleAF extends ModuleType2 {
- def as = Nil
- def ms = List(ModuleAE)
-}
-object ModuleAG extends ModuleType1 {
- def as = List("")
- def ms = Nil
-}
-object ModuleAI extends ModuleType1 {
- def as = Nil
- def ms = List(ModuleAE)
-}
-object ModuleAK extends ModuleType2 {
- def as = Nil
- def ms = List(ModuleAF)
-}
-object ModuleAL extends ModuleType1 {
- def as = Nil
- def ms = List(
- ModuleAG,
- ModuleAI
- )
-}
-object ModuleAM extends ModuleType1 {
- def as = Nil
- def ms = List(
- ModuleAL,
- ModuleAE
- ) ::: List(ModuleAK)
-}
-object ModuleBE extends ModuleType1 {
- def as = Nil
- def ms = Nil
-}
-object ModuleBF extends ModuleType2 {
- def as = Nil
- def ms = List(ModuleBE)
-}
-object ModuleBG extends ModuleType1 {
- def as = List("")
- def ms = Nil
-}
-object ModuleBI extends ModuleType1 {
- def as = Nil
- def ms = List(ModuleBE)
-}
-object ModuleBK extends ModuleType2 {
- def as = Nil
- def ms = List(ModuleBF)
-}
-object ModuleBL extends ModuleType1 {
- def as = Nil
- def ms = List(
- ModuleBG,
- ModuleBI
- )
-}
-object ModuleBM extends ModuleType1 {
- def as = Nil
- def ms = List(
- ModuleBL,
- ModuleBE
- ) ::: List(ModuleBK)
-}
diff --git a/test/files/pos/bug2691.scala b/test/files/pos/bug2691.scala
deleted file mode 100644
index d289605..0000000
--- a/test/files/pos/bug2691.scala
+++ /dev/null
@@ -1,10 +0,0 @@
-object Breakdown {
- def unapplySeq(x: Int): Some[List[String]] = Some(List("", "there"))
-}
-object Test {
- 42 match {
- case Breakdown("") => // needed to trigger bug
- case Breakdown("foo") => // needed to trigger bug
- case Breakdown("", who) => println ("hello " + who)
- }
-}
diff --git a/test/files/pos/bug3097.scala b/test/files/pos/bug3097.scala
deleted file mode 100644
index a034b96..0000000
--- a/test/files/pos/bug3097.scala
+++ /dev/null
@@ -1,31 +0,0 @@
-package seal
-
-sealed trait ISimpleValue
-
-sealed trait IListValue extends ISimpleValue {
- def items: List[IAtomicValue[_]]
-}
-sealed trait IAtomicValue[O] extends ISimpleValue {
- def data: O
-}
-
-sealed trait IAbstractDoubleValue[O] extends IAtomicValue[O] { }
-sealed trait IDoubleValue extends IAbstractDoubleValue[Double]
-
-case class ListValue(val items: List[IAtomicValue[_]]) extends IListValue
-class DoubleValue(val data: Double) extends IDoubleValue {
- def asDouble = data
-}
-
-object Test {
- /**
- * @param args the command line arguments
- */
- def main(args: Array[String]): Unit = {
- val v: ISimpleValue = new DoubleValue(1)
- v match {
- case m: IListValue => println("list")
- case a: IAtomicValue[_] => println("atomic")
- }
- }
-}
diff --git a/test/files/pos/bug3568.scala b/test/files/pos/bug3568.scala
deleted file mode 100644
index 950c165..0000000
--- a/test/files/pos/bug3568.scala
+++ /dev/null
@@ -1,46 +0,0 @@
-import scala.annotation._
-import scala.annotation.unchecked._
-import scala.collection._
-
-
-package object buffer {
- val broken = new ArrayVec2() // commenting out this line causes the file to compile.
-
- val works = Class.forName("buffer.ArrayVec2").newInstance().asInstanceOf[ArrayVec2]
-}
-
-package buffer {
- object Main {
- // ArrayVec2 can be compiled, instantiated and used.
- def main(args: Array[String]) { println(works) }
- }
-
- trait ElemType { type Element; type Component <: ElemType }
- trait Float1 extends ElemType { type Element = Float; type Component = Float1}
- class Vec2 extends ElemType { type Element = Vec2; type Component = Float1 }
-
- abstract class BaseSeq[T <: ElemType, E]
- extends IndexedSeq[E] with IndexedSeqOptimized[E, IndexedSeq[E]] {
- def length = 1
- def apply(i: Int) :E
- }
-
- abstract class GenericSeq[T <: ElemType] extends BaseSeq[T, T#Element]
- trait DataArray[T <: ElemType] extends BaseSeq[T, T#Element]
- trait DataView[T <: ElemType] extends BaseSeq[T, T#Element]
- abstract class BaseFloat1 extends BaseSeq[Float1, Float]
-
- class ArrayFloat1 extends BaseFloat1 with DataArray[Float1] {
- def apply(i: Int) :Float = 0f
- }
-
- class ViewFloat1 extends BaseFloat1 with DataView[Float1] {
- def apply(i: Int) :Float = 0f
- }
-
- class ArrayVec2(val backingSeq: ArrayFloat1)
- extends GenericSeq[Vec2] with DataArray[Vec2] {
- def this() = this(new ArrayFloat1)
- def apply(i: Int) :Vec2 = null
- }
-}
\ No newline at end of file
diff --git a/test/files/pos/bug3688.scala b/test/files/pos/bug3688.scala
deleted file mode 100644
index e481db3..0000000
--- a/test/files/pos/bug3688.scala
+++ /dev/null
@@ -1,7 +0,0 @@
-import collection.JavaConversions._
-import java.{ util => ju }
-
-object Test {
- def m[P <% ju.List[Int]](l: P) = 1
- m(List(1)) // bug: should compile
-}
\ No newline at end of file
diff --git a/test/files/pos/bug531.scala b/test/files/pos/bug531.scala
deleted file mode 100644
index 02763e0..0000000
--- a/test/files/pos/bug531.scala
+++ /dev/null
@@ -1,10 +0,0 @@
-object Test extends App {
- import scala.reflect._;
- def titi = {
- var truc = 0
- val tata: Code[()=>Unit] = () => {
- truc = 6
- }
- ()
- }
-}
diff --git a/test/files/pos/bug532.scala b/test/files/pos/bug532.scala
deleted file mode 100644
index 32649b1..0000000
--- a/test/files/pos/bug532.scala
+++ /dev/null
@@ -1,10 +0,0 @@
-object Test extends App {
- import scala.reflect._;
- def titi: Unit = {
- var truc = 0
- val tata: Code[()=>Unit] = () => {
- truc = truc + 6
- }
- ()
- }
-}
diff --git a/test/files/pos/bug715/meredith_1.scala b/test/files/pos/bug715/meredith_1.scala
deleted file mode 100644
index 3ed2e57..0000000
--- a/test/files/pos/bug715/meredith_1.scala
+++ /dev/null
@@ -1,98 +0,0 @@
-package com.sap.dspace.model.othello;
-
-import scala.xml._
-
-trait XMLRenderer {
- type T <: {def getClass() : java.lang.Class[_]}
- val valueTypes =
- List(
- classOf[java.lang.Boolean],
- classOf[java.lang.Integer],
- classOf[java.lang.Float],
- classOf[java.lang.String]
- // more to come
- )
-
- def value2XML(
- value : Object,
- field : java.lang.reflect.Field,
- pojo : T
- ) : Node = {
- value match {
- case null => Text( "null" )
- case vUnmatched =>
- if (value.isInstanceOf[java.lang.Boolean])
- Text( value.asInstanceOf[java.lang.Boolean].toString )
- else if (value.isInstanceOf[java.lang.Integer])
- Text( value.asInstanceOf[java.lang.Integer].toString )
- else if (value.isInstanceOf[java.lang.Float])
- Text( value.asInstanceOf[java.lang.Float].toString )
- // else if (value.isInstanceOf[T])
- // pojo2XML( value.asInstanceOf[T] )
- else
- <unmatchedType>
- <theType>
- {vUnmatched.getClass.toString}
- </theType>
- <theValue>
- {vUnmatched.toString}
- </theValue>
- </unmatchedType>
- }
- }
-
- def field2XML(
- field : java.lang.reflect.Field,
- pojo : T
- ) : Elem = {
-
- val accessible = field.isAccessible;
- field.setAccessible( true );
- // BUGBUG lgm need to disambiguate on type and possibly make
- // recursive call to pojo2XML
- val fldValXML = value2XML( field.get( pojo ), field, pojo );
- field.setAccessible( accessible );
-
- Elem(
- null,
- field.getName,
- null,
- TopScope,
- fldValXML
- )
- }
-
- def pojo2XML( pojo : T ) : Elem = {
- val progeny =
- for (field <- pojo.getClass.getDeclaredFields)
- yield field2XML( field, pojo );
-
- Elem(
- null,
- pojo.getClass.getName,
- null,
- TopScope,
- progeny.asInstanceOf[Array[scala.xml.Node]] : _*
- )
- }
-}
-
-case class POJO2XMLRenderer( recurse : Boolean )
- extends XMLRenderer {
- type T = java.io.Serializable
- override def value2XML(
- value : Object,
- field : java.lang.reflect.Field,
- pojo : java.io.Serializable
- ) : Node = {
- if (recurse) super.value2XML( value, field, pojo )
- else Text( value + "" )
- }
-}
-
-object thePOJO2XMLRenderer extends POJO2XMLRenderer( true ) {
-}
-
-object Test extends Application {
- println(com.sap.dspace.model.othello.thePOJO2XMLRenderer)
-}
diff --git a/test/files/pos/bug839.scala b/test/files/pos/bug839.scala
deleted file mode 100644
index d845ed3..0000000
--- a/test/files/pos/bug839.scala
+++ /dev/null
@@ -1,26 +0,0 @@
-// see pending/pos/bug112606A.scala
-package test;
-trait Test {
- trait Global {
- type Tree;
- def get : Tree;
- }
- trait TreeBuilder {
- val global : Global;
- def set(tree : global.Tree) = {}
- }
- val nsc : Global;
- trait FileImpl {
- object treeBuilder extends TreeBuilder {
- val global : nsc.type = nsc;
- }
- // OK
- treeBuilder.set(nsc.get);
- }
- val file0 : FileImpl;
- // OK
- file0.treeBuilder.set(nsc.get);
- def file : FileImpl;
- // type mismatch
- file.treeBuilder.set(nsc.get);
-}
diff --git a/test/files/pos/bug927.scala b/test/files/pos/bug927.scala
deleted file mode 100644
index 8f3cdac..0000000
--- a/test/files/pos/bug927.scala
+++ /dev/null
@@ -1,11 +0,0 @@
-object Test {
-
- def sum(stream: Stream[Int]): Int =
- stream match {
- case Stream.Empty => 0
- case Stream.cons(hd, tl) => hd + sum(tl)
- }
- val str: Stream[Int] = Stream.fromIterator(List(1,2,3).iterator)
- assert(sum(str) == 6)
-
-}
diff --git a/test/files/pos/classtag-pos.flags b/test/files/pos/classtag-pos.flags
new file mode 100644
index 0000000..281f0a1
--- /dev/null
+++ b/test/files/pos/classtag-pos.flags
@@ -0,0 +1 @@
+-Yrangepos
diff --git a/test/files/pos/classtag-pos.scala b/test/files/pos/classtag-pos.scala
new file mode 100644
index 0000000..768d2e2
--- /dev/null
+++ b/test/files/pos/classtag-pos.scala
@@ -0,0 +1,5 @@
+import scala.reflect.runtime.universe._
+
+class A {
+ def f[T: TypeTag] = typeOf[T] match { case TypeRef(_, _, args) => args }
+}
diff --git a/test/files/pos/code.scala b/test/files/pos/code.scala
deleted file mode 100644
index 110e01c..0000000
--- a/test/files/pos/code.scala
+++ /dev/null
@@ -1,3 +0,0 @@
-class Test {
- val fun: reflect.Code[Int => Int] = x => x + 1;
-}
diff --git a/test/files/pos/collectGenericCC.scala b/test/files/pos/collectGenericCC.scala
index 7504752..8201c6a 100644
--- a/test/files/pos/collectGenericCC.scala
+++ b/test/files/pos/collectGenericCC.scala
@@ -1,10 +1,10 @@
-import scala.collection.generic._
+import scala.collection.generic.CanBuildFrom
import scala.collection._
object Test {
- def collect[A, Res](r: Traversable[A])(implicit bf: CanBuild[A, Res]) = {
- val b = bf()
- for (a <- r) b += a
+ def collect[A, Res](r: Traversable[A])(implicit bf: generic.CanBuild[A, Res]) = {
+ val b: collection.mutable.Builder[A, Res] = bf()
+ r foreach ((a: A) => b += a)
b.result
}
diff --git a/test/files/pos/contextbounds-implicits-new.scala b/test/files/pos/contextbounds-implicits-new.scala
new file mode 100644
index 0000000..327c4a9
--- /dev/null
+++ b/test/files/pos/contextbounds-implicits-new.scala
@@ -0,0 +1,10 @@
+import scala.reflect.runtime.universe._
+
+/* Tests implicit parameters in the presence of context bounds.
+ * See Section 7.4 of the Scala Language Specification.
+ */
+class C {
+
+ def f[T: TypeTag, S: TypeTag](x: T, y: S)(implicit p: C) { }
+
+}
\ No newline at end of file
diff --git a/test/files/pos/contextbounds-implicits-old.scala b/test/files/pos/contextbounds-implicits-old.scala
new file mode 100644
index 0000000..f9113ee
--- /dev/null
+++ b/test/files/pos/contextbounds-implicits-old.scala
@@ -0,0 +1,8 @@
+/* Tests implicit parameters in the presence of context bounds.
+ * See Section 7.4 of the Scala Language Specification.
+ */
+class C {
+
+ def f[T: Manifest, S: Manifest](x: T, y: S)(implicit p: C) { }
+
+}
diff --git a/test/files/pos/depexists.scala b/test/files/pos/depexists.scala
index d539c84..dff1917 100644
--- a/test/files/pos/depexists.scala
+++ b/test/files/pos/depexists.scala
@@ -1,5 +1,5 @@
object depexists {
- val c: Cell[(a, b)] forSome { type a <: Number; type b <: (a, a) } = null
+ val c: Option[(a, b)] forSome { type a <: Number; type b <: (a, a) } = null
val d = c
}
diff --git a/test/files/pos/depmet_1_pos.flags b/test/files/pos/depmet_1_pos.flags
deleted file mode 100644
index 1c26b24..0000000
--- a/test/files/pos/depmet_1_pos.flags
+++ /dev/null
@@ -1 +0,0 @@
--Ydependent-method-types
\ No newline at end of file
diff --git a/test/files/pos/depmet_implicit_chaining_zw.flags b/test/files/pos/depmet_implicit_chaining_zw.flags
deleted file mode 100644
index 1c26b24..0000000
--- a/test/files/pos/depmet_implicit_chaining_zw.flags
+++ /dev/null
@@ -1 +0,0 @@
--Ydependent-method-types
\ No newline at end of file
diff --git a/test/files/pos/depmet_implicit_norm_ret.flags b/test/files/pos/depmet_implicit_norm_ret.flags
deleted file mode 100644
index 1c26b24..0000000
--- a/test/files/pos/depmet_implicit_norm_ret.flags
+++ /dev/null
@@ -1 +0,0 @@
--Ydependent-method-types
\ No newline at end of file
diff --git a/test/files/pos/depmet_implicit_oopsla_session.flags b/test/files/pos/depmet_implicit_oopsla_session.flags
deleted file mode 100644
index 1c26b24..0000000
--- a/test/files/pos/depmet_implicit_oopsla_session.flags
+++ /dev/null
@@ -1 +0,0 @@
--Ydependent-method-types
\ No newline at end of file
diff --git a/test/files/pos/depmet_implicit_oopsla_session_2.flags b/test/files/pos/depmet_implicit_oopsla_session_2.flags
deleted file mode 100644
index 1c26b24..0000000
--- a/test/files/pos/depmet_implicit_oopsla_session_2.flags
+++ /dev/null
@@ -1 +0,0 @@
--Ydependent-method-types
\ No newline at end of file
diff --git a/test/files/pos/depmet_implicit_oopsla_session_simpler.flags b/test/files/pos/depmet_implicit_oopsla_session_simpler.flags
deleted file mode 100644
index 1c26b24..0000000
--- a/test/files/pos/depmet_implicit_oopsla_session_simpler.flags
+++ /dev/null
@@ -1 +0,0 @@
--Ydependent-method-types
\ No newline at end of file
diff --git a/test/files/pos/depmet_implicit_oopsla_zipwith.flags b/test/files/pos/depmet_implicit_oopsla_zipwith.flags
deleted file mode 100644
index 1c26b24..0000000
--- a/test/files/pos/depmet_implicit_oopsla_zipwith.flags
+++ /dev/null
@@ -1 +0,0 @@
--Ydependent-method-types
\ No newline at end of file
diff --git a/test/files/pos/depmet_implicit_tpbetareduce.flags b/test/files/pos/depmet_implicit_tpbetareduce.flags
deleted file mode 100644
index 1c26b24..0000000
--- a/test/files/pos/depmet_implicit_tpbetareduce.flags
+++ /dev/null
@@ -1 +0,0 @@
--Ydependent-method-types
\ No newline at end of file
diff --git a/test/files/neg/caseinherit.flags b/test/files/pos/exhaust_alternatives.flags
similarity index 100%
copy from test/files/neg/caseinherit.flags
copy to test/files/pos/exhaust_alternatives.flags
diff --git a/test/files/pos/exhaust_alternatives.scala b/test/files/pos/exhaust_alternatives.scala
new file mode 100644
index 0000000..cc81d0b
--- /dev/null
+++ b/test/files/pos/exhaust_alternatives.scala
@@ -0,0 +1,10 @@
+sealed abstract class X
+sealed case class A(x: Boolean) extends X
+case object B extends X
+
+object Test {
+ def test(x: X) = x match {
+ case A(true) =>
+ case A(false) | B =>
+ }
+}
\ No newline at end of file
diff --git a/test/files/pos/exhaustive_heuristics.scala b/test/files/pos/exhaustive_heuristics.scala
new file mode 100644
index 0000000..2979005
--- /dev/null
+++ b/test/files/pos/exhaustive_heuristics.scala
@@ -0,0 +1,26 @@
+// tests exhaustivity doesn't give warnings (due to its heuristic rewrites kicking in or it backing off)
+object Test {
+ // List() => Nil
+ List(1) match {
+ case List() =>
+ case x :: xs =>
+ }
+
+ // we don't look into guards
+ val turnOffChecks = true
+ List(1) match {
+ case _ if turnOffChecks =>
+ }
+
+ // we back off when there are any user-defined extractors
+ // in fact this is exhaustive, but we pretend we don't know since List's unapplySeq is not special to the compiler
+ // to compensate our ignorance, we back off
+ // well, in truth, we do rewrite List() to Nil, but otherwise we do nothing
+ // the full rewrite List(a, b) to a :: b :: Nil, for example is planned (but not sure it's a good idea)
+ List(true, false) match {
+ case List(_, _, _*) =>
+ case List(node, _*) =>
+ case Nil =>
+ }
+
+}
\ No newline at end of file
diff --git a/test/files/pos/existentials-harmful.scala b/test/files/pos/existentials-harmful.scala
new file mode 100644
index 0000000..8722852
--- /dev/null
+++ b/test/files/pos/existentials-harmful.scala
@@ -0,0 +1,54 @@
+// a.scala
+// Mon Jul 11 14:18:26 PDT 2011
+
+object ExistentialsConsideredHarmful {
+ class Animal(val name: String)
+ object Dog extends Animal("Dog")
+ object Sheep extends Animal("Sheep")
+
+ trait Tools[A] {
+ def shave(a: A): A
+ }
+ def tools[A](a: A): Tools[A] = null // dummy
+
+ case class TransportBox[A <: Animal](animal: A, tools: Tools[A]) {
+ def label: String = animal.name
+ }
+
+ // 1.
+ def carry[A <: Animal](box: TransportBox[A]): Unit = {
+ println(box.animal.name+" got carried away")
+ }
+
+ val aBox =
+ if (math.random < 0.5)
+ TransportBox(Dog, tools(Dog))
+ else
+ TransportBox(Sheep, tools(Sheep))
+
+ // 2.
+ //aBox.tools.shave(aBox.animal)
+
+ // Use pattern match to avoid opening the existential twice
+ aBox match {
+ case TransportBox(animal, tools) => tools.shave(animal)
+ }
+
+ abstract class BoxCarrier[R <: Animal](box: TransportBox[R]) {
+ def speed: Int
+
+ def talkToAnimal: Unit = println("The carrier says hello to"+box.animal.name)
+ }
+
+ // 3.
+ //val bc = new BoxCarrier(aBox) {
+
+ // Use pattern match to avoid opening the existential twice
+ // Type annotation on bc is required ... possible compiler bug?
+ // val bc : BoxCarrier[_ <: Animal] = aBox match {
+ val bc = aBox match {
+ case tb : TransportBox[a] => new BoxCarrier(tb) {
+ def speed: Int = 12
+ }
+ }
+}
diff --git a/test/files/pos/existentials.scala b/test/files/pos/existentials.scala
new file mode 100644
index 0000000..0adbc70
--- /dev/null
+++ b/test/files/pos/existentials.scala
@@ -0,0 +1,22 @@
+/** All of these should work, some don't yet.
+ * !!!
+ */
+class A {
+ def f() = { case class Bob(); Bob }
+
+ val quux0 = f()
+ def quux1 = f()
+ // lazy val quux2 = f()
+ // def quux3 = {
+ // lazy val quux3a = f()
+ // quux3a
+ // }
+
+ val bippy0 = f _
+ def bippy1 = f _
+ // lazy val bippy2 = f _
+ // val bippy3 = {
+ // lazy val bippy3a = f _
+ // bippy3a
+ // }
+}
diff --git a/test/files/pos/exponential-spec.scala b/test/files/pos/exponential-spec.scala
new file mode 100644
index 0000000..83aef58
--- /dev/null
+++ b/test/files/pos/exponential-spec.scala
@@ -0,0 +1,47 @@
+// a.scala
+// Sat Jun 30 19:51:17 PDT 2012
+
+trait Exp[T]
+
+object Test {
+ def f[T](exp: Exp[T]): Exp[T] = (
+ f[T] _
+ compose f[T]
+ compose f[T]
+ compose f[T]
+ compose f[T]
+ compose f[T]
+ compose f[T]
+ compose f[T]
+ compose f[T]
+ compose f[T]
+ compose f[T] // 4s
+ compose f[T] // 5s
+ compose f[T] // 5s
+ compose f[T] // 6s
+ compose f[T] // 7s
+ compose f[T] // 8s
+ compose f[T] // 11s
+ compose f[T] // 17s
+ compose f[T] // 29s
+ compose f[T] // 54s
+ compose f[T]
+ compose f[T]
+ compose f[T]
+ compose f[T]
+ compose f[T]
+ compose f[T]
+ compose f[T]
+ compose f[T]
+ compose f[T]
+ compose f[T]
+ compose f[T]
+ compose f[T]
+ compose f[T]
+ compose f[T]
+ compose f[T]
+ compose f[T]
+ compose f[T]
+ compose f[T]
+ )(exp)
+}
diff --git a/test/files/pos/five-dot-f.flags b/test/files/pos/five-dot-f.flags
new file mode 100644
index 0000000..112fc72
--- /dev/null
+++ b/test/files/pos/five-dot-f.flags
@@ -0,0 +1 @@
+-Xfuture
\ No newline at end of file
diff --git a/test/files/pos/five-dot-f.scala b/test/files/pos/five-dot-f.scala
new file mode 100644
index 0000000..8a7f86e
--- /dev/null
+++ b/test/files/pos/five-dot-f.scala
@@ -0,0 +1,5 @@
+class C {
+ implicit def ffer(x: Int) = new { def f : Long = 123L }
+
+ val x1: Long = 5.f
+}
diff --git a/test/files/pos/gen-traversable-methods.scala b/test/files/pos/gen-traversable-methods.scala
new file mode 100644
index 0000000..2604a09
--- /dev/null
+++ b/test/files/pos/gen-traversable-methods.scala
@@ -0,0 +1,20 @@
+
+
+
+import collection._
+
+
+
+object Test {
+
+ def main(args: Array[String]) {
+ val gen: GenTraversable[Int] = List(1, 2, 3)
+ gen.head
+ gen.headOption
+ gen.tail
+ gen.last
+ gen.lastOption
+ gen.init
+ }
+
+}
diff --git a/test/files/pos/generic-sigs.scala b/test/files/pos/generic-sigs.scala
index 40ec044..b112766 100644
--- a/test/files/pos/generic-sigs.scala
+++ b/test/files/pos/generic-sigs.scala
@@ -1,3 +1,5 @@
+import language.existentials
+
object A {
def f1 = List(classOf[Int], classOf[String])
def f2 = List(classOf[String], classOf[Int])
@@ -15,4 +17,4 @@ object A {
class Boppy[+T1,-T2]
def g1 = new Boppy[t forSome { type t <: Int }, u forSome { type u <: String }]
-}
\ No newline at end of file
+}
diff --git a/test/files/pos/getClassType.scala b/test/files/pos/getClassType.scala
new file mode 100644
index 0000000..7482788
--- /dev/null
+++ b/test/files/pos/getClassType.scala
@@ -0,0 +1,16 @@
+trait IdlBase
+
+class IdlConcrete extends IdlBase
+
+class A {
+ // In general, this method should not need an instance to reflect on it, so
+ // take a Class[]
+ def reflect(clazz : Class[_ <: IdlBase]) = {
+ // Get a list of all its methods and build a hash keyed by method name
+ // for statistics recording.
+ }
+
+ // But I also really have an IdlConcrete generated by Spring here...
+ val idl = new IdlConcrete
+ reflect(idl.getClass)
+}
diff --git a/test/files/pos/hk-match/a.scala b/test/files/pos/hk-match/a.scala
new file mode 100644
index 0000000..7144068
--- /dev/null
+++ b/test/files/pos/hk-match/a.scala
@@ -0,0 +1,5 @@
+trait A {
+ type HKAlias[X] = List[X]
+
+ (null: Any) match { case f: Bippy[HKAlias] => f }
+}
diff --git a/test/files/pos/hk-match/b.scala b/test/files/pos/hk-match/b.scala
new file mode 100644
index 0000000..f7d21f6
--- /dev/null
+++ b/test/files/pos/hk-match/b.scala
@@ -0,0 +1 @@
+trait Bippy[E[X]]
diff --git a/test/files/pos/hkarray.flags b/test/files/pos/hkarray.flags
index e8fb65d..e745d8b 100644
--- a/test/files/pos/hkarray.flags
+++ b/test/files/pos/hkarray.flags
@@ -1 +1 @@
--Xfatal-warnings
\ No newline at end of file
+-Xfatal-warnings -language:higherKinds
\ No newline at end of file
diff --git a/test/files/pos/hkrange.scala b/test/files/pos/hkrange.scala
new file mode 100644
index 0000000..8d61167
--- /dev/null
+++ b/test/files/pos/hkrange.scala
@@ -0,0 +1,5 @@
+class A {
+ def f[CC[X] <: Traversable[X]](x: CC[Int]) = ()
+
+ f(1 to 5)
+}
diff --git a/test/files/pos/implicit-unwrap-tc.scala b/test/files/pos/implicit-unwrap-tc.scala
new file mode 100644
index 0000000..1afde26
--- /dev/null
+++ b/test/files/pos/implicit-unwrap-tc.scala
@@ -0,0 +1,10 @@
+trait NewType[X]
+
+object Test {
+ // change return type to Foo and it compiles.
+ implicit def Unwrap[X](n: NewType[X]): X = sys.error("")
+ class Foo(val a: Int)
+ def test(f: NewType[Foo]) = f.a
+}
+
+
diff --git a/test/files/pos/implicits-new.scala b/test/files/pos/implicits-new.scala
new file mode 100644
index 0000000..ffc3871
--- /dev/null
+++ b/test/files/pos/implicits-new.scala
@@ -0,0 +1,92 @@
+import scala.reflect.runtime.universe._
+import scala.reflect.{ClassTag, classTag}
+
+// #1435
+object t1435 {
+ implicit def a(s:String):String = error("")
+ implicit def a(i:Int):String = error("")
+ implicit def b(i:Int):String = error("")
+}
+
+class C1435 {
+ val v:String = {
+ import t1435.a
+ 2
+ }
+}
+
+// #1492
+class C1492 {
+
+ class X
+
+ def foo(x: X => X) {}
+
+ foo ( implicit x => implicitly[X] )
+ foo { implicit x => implicitly[X] }
+}
+
+// #1579
+object Test1579 {
+ class Column
+ class Query[E](val value: E)
+ class Invoker(q: Any) { val foo = null }
+
+ implicit def unwrap[C](q: Query[C]) = q.value
+ implicit def invoker(q: Query[Column]) = new Invoker(q)
+
+ val q = new Query(new Column)
+ q.foo
+}
+// #1625
+object Test1625 {
+
+ class Wrapped(x:Any) {
+ def unwrap() = x
+ }
+
+ implicit def byName[A](x: =>A) = new Wrapped(x)
+
+ implicit def byVal[A](x: A) = x
+
+ def main(args: Array[String]) = {
+
+// val res:Wrapped = 7 // works
+
+ val res = 7.unwrap() // doesn't work
+
+ println("=> result: " + res)
+ }
+}
+
+object Test2188 {
+ implicit def toJavaList[A: ClassTag](t:collection.Seq[A]):java.util.List[A] = java.util.Arrays.asList(t.toArray:_*)
+
+ val x: java.util.List[String] = List("foo")
+}
+
+object TestNumericWidening {
+ val y = 1
+ val x: java.lang.Long = y
+}
+
+// #2709
+package foo2709 {
+ class A
+ class B
+
+ package object bar {
+ implicit def a2b(a: A): B = new B
+ }
+
+ package bar {
+ object test {
+ new A: B
+ }
+ }
+}
+
+// Problem with specs
+object specsProblem {
+ println(implicitly[TypeTag[Class[_]]])
+}
\ No newline at end of file
diff --git a/test/files/pos/implicits.scala b/test/files/pos/implicits-old.scala
similarity index 100%
rename from test/files/pos/implicits.scala
rename to test/files/pos/implicits-old.scala
diff --git a/test/files/pos/annotDepMethType.flags b/test/files/pos/infersingle.flags
similarity index 100%
copy from test/files/pos/annotDepMethType.flags
copy to test/files/pos/infersingle.flags
diff --git a/test/files/pos/infersingle.scala b/test/files/pos/infersingle.scala
new file mode 100644
index 0000000..6830fcd
--- /dev/null
+++ b/test/files/pos/infersingle.scala
@@ -0,0 +1,5 @@
+object Test {
+ def one[T](x: T): Option[T] = Some(x)
+ val x = "one"
+ val y: Option[x.type] = one(x)
+}
\ No newline at end of file
diff --git a/test/files/pos/inline-access-levels.flags b/test/files/pos/inline-access-levels.flags
new file mode 100644
index 0000000..882f40f
--- /dev/null
+++ b/test/files/pos/inline-access-levels.flags
@@ -0,0 +1 @@
+-optimise -Xfatal-warnings -Yinline-warnings
diff --git a/test/files/pos/inline-access-levels/A_1.scala b/test/files/pos/inline-access-levels/A_1.scala
new file mode 100644
index 0000000..479fe0f
--- /dev/null
+++ b/test/files/pos/inline-access-levels/A_1.scala
@@ -0,0 +1,10 @@
+package test
+
+object A {
+
+ private var x: Int = 0
+
+ @inline def actOnX(f: Int => Int) = {
+ x = f(x)
+ }
+}
diff --git a/test/files/pos/inline-access-levels/Test_2.scala b/test/files/pos/inline-access-levels/Test_2.scala
new file mode 100644
index 0000000..12c9eb5
--- /dev/null
+++ b/test/files/pos/inline-access-levels/Test_2.scala
@@ -0,0 +1,11 @@
+package test
+
+object Test {
+
+ def main(args: Array[String]) {
+
+ A.actOnX(_ + 1)
+
+ }
+
+}
diff --git a/test/files/pos/bug3420.flags b/test/files/pos/inliner2.flags
similarity index 100%
copy from test/files/pos/bug3420.flags
copy to test/files/pos/inliner2.flags
diff --git a/test/files/pos/inliner2.scala b/test/files/pos/inliner2.scala
new file mode 100644
index 0000000..fe231ec
--- /dev/null
+++ b/test/files/pos/inliner2.scala
@@ -0,0 +1,57 @@
+// This isn't actually testing much, because no warning is emitted in versions
+// before the fix which comes with this because the method isn't even considered
+// for inlining due to the bug.
+class A {
+ private var debug = false
+ @inline private def ifelse[T](cond: => Boolean, ifPart: => T, elsePart: => T): T =
+ if (cond) ifPart else elsePart
+
+ final def bob1() = ifelse(debug, 1, 2)
+ final def bob2() = if (debug) 1 else 2
+}
+// Cool:
+//
+// % ls -1 /tmp/2901/
+// A$$anonfun$bob1$1.class
+// A$$anonfun$bob1$2.class
+// A$$anonfun$bob1$3.class
+// A.class
+// % ls -1 /tmp/trunk
+// A.class
+//
+// Observations:
+//
+// (1) The inlined version accesses the field: the explicit one calls the accessor.
+// (2) The inlined version fails to eliminate boxing. With reference types it emits
+// an unneeded checkcast.
+// (3) The private var debug is mangled to A$$debug, but after inlining it is never accessed
+// from outside of the class and doesn't need mangling.
+// (4) We could forego emitting bytecode for ifelse entirely if it has been
+// inlined at all sites.
+//
+// Generated bytecode for the above:
+//
+// public final int bob1();
+// Code:
+// Stack=1, Locals=1, Args_size=1
+// 0: aload_0
+// 1: getfield #11; //Field A$$debug:Z
+// 4: ifeq 14
+// 7: iconst_1
+// 8: invokestatic #41; //Method scala/runtime/BoxesRunTime.boxToInteger:(I)Ljava/lang/Integer;
+// 11: goto 18
+// 14: iconst_2
+// 15: invokestatic #41; //Method scala/runtime/BoxesRunTime.boxToInteger:(I)Ljava/lang/Integer;
+// 18: invokestatic #45; //Method scala/runtime/BoxesRunTime.unboxToInt:(Ljava/lang/Object;)I
+// 21: ireturn
+//
+// public final int bob2();
+// Code:
+// Stack=1, Locals=1, Args_size=1
+// 0: aload_0
+// 1: invokevirtual #48; //Method A$$debug:()Z
+// 4: ifeq 11
+// 7: iconst_1
+// 8: goto 12
+// 11: iconst_2
+// 12: ireturn
diff --git a/test/files/pos/irrefutable.scala b/test/files/pos/irrefutable.scala
new file mode 100644
index 0000000..0a792b6
--- /dev/null
+++ b/test/files/pos/irrefutable.scala
@@ -0,0 +1,22 @@
+// The test which this should perform but does not
+// is that f1 is recognized as irrefutable and f2 is not
+// This can be recognized via the generated classes:
+//
+// A$$anonfun$f1$1.class
+// A$$anonfun$f2$1.class
+// A$$anonfun$f2$2.class
+//
+// The extra one in $f2$ is the filter.
+//
+// !!! Marking with exclamation points so maybe someday
+// this test will be finished.
+class A {
+ case class Foo[T](x: T)
+
+ def f1(xs: List[Foo[Int]]) = {
+ for (Foo(x: Int) <- xs) yield x
+ }
+ def f2(xs: List[Foo[Any]]) = {
+ for (Foo(x: Int) <- xs) yield x
+ }
+}
diff --git a/test/files/pos/isApplicableSafe.scala b/test/files/pos/isApplicableSafe.scala
new file mode 100644
index 0000000..591beb2
--- /dev/null
+++ b/test/files/pos/isApplicableSafe.scala
@@ -0,0 +1,8 @@
+class A {
+ // Any of Array[List[Symbol]], List[Array[Symbol]], or List[List[Symbol]] compile.
+ var xs: Array[Array[Symbol]] = _
+ var ys: Array[Map[Symbol, Set[Symbol]]] = _
+
+ xs = Array(Array())
+ ys = Array(Map(), Map())
+}
\ No newline at end of file
diff --git a/test/files/pos/javaConversions-2.10-regression.scala b/test/files/pos/javaConversions-2.10-regression.scala
new file mode 100644
index 0000000..e1b8101
--- /dev/null
+++ b/test/files/pos/javaConversions-2.10-regression.scala
@@ -0,0 +1,17 @@
+import collection.{JavaConversions, mutable, concurrent}
+import JavaConversions._
+import java.util.concurrent.{ConcurrentHashMap => CHM}
+
+object Foo {
+ def buildCache2_9_simple[K <: AnyRef, V <: AnyRef]: mutable.ConcurrentMap[K, V] =
+ asScalaConcurrentMap(new CHM())
+
+ def buildCache2_9_implicit[K <: AnyRef, V <: AnyRef]: mutable.ConcurrentMap[K, V] =
+ new CHM[K, V]()
+}
+
+object Bar {
+ def assertType[T](t: T) = t
+ val a = new CHM[String, String]() += (("", ""))
+ assertType[concurrent.Map[String, String]](a)
+}
diff --git a/test/files/pos/javaReadsSigs/fromjava.java b/test/files/pos/javaReadsSigs/fromjava.java
index 7813f0f..eca6396 100644
--- a/test/files/pos/javaReadsSigs/fromjava.java
+++ b/test/files/pos/javaReadsSigs/fromjava.java
@@ -22,11 +22,11 @@ class B { };
class Contra {
// Not an Ordering<Character>.
static Ordering<Object> charOrd = scala.math.Ordering.Char$.MODULE$;
-
+
public boolean useCharOrd() {
return charOrd.compare(new Object(), new Object()) == 0;
}
-
+
static Numeric<?> intNum = scala.math.Numeric.IntIsIntegral$.MODULE$;
}
diff --git a/test/files/pos/lexical.scala b/test/files/pos/lexical.scala
old mode 100644
new mode 100755
diff --git a/test/files/pos/liftcode_polymorphic.scala b/test/files/pos/liftcode_polymorphic.scala
new file mode 100644
index 0000000..8f537d2
--- /dev/null
+++ b/test/files/pos/liftcode_polymorphic.scala
@@ -0,0 +1,12 @@
+import scala.reflect.runtime.universe._
+
+object Append extends Application {
+
+ def append[A](l1: List[A], l2: List[A]):List[A] =
+ l1 match {
+ case Nil => l2
+ case x::xs => x :: append(xs, l2)
+ }
+
+ println(reify(append _).tree)
+}
diff --git a/test/files/pos/list-extractor.scala b/test/files/pos/list-extractor.scala
new file mode 100644
index 0000000..79c622b
--- /dev/null
+++ b/test/files/pos/list-extractor.scala
@@ -0,0 +1,8 @@
+// This was fixed in r25277 but is enough different
+// from the case I was knowingly fixing, I'm throwing it
+// in there.
+object HasArgs {
+ def boop(params: List[List[_]]) = params match {
+ case List(List()) => 2
+ }
+}
diff --git a/test/pending/pos/local-objects.scala b/test/files/pos/local-objects.scala
similarity index 100%
rename from test/pending/pos/local-objects.scala
rename to test/files/pos/local-objects.scala
diff --git a/test/files/pos/lookupswitch.scala b/test/files/pos/lookupswitch.scala
new file mode 100644
index 0000000..33594c0
--- /dev/null
+++ b/test/files/pos/lookupswitch.scala
@@ -0,0 +1,37 @@
+// There's not a real test here, but on compilation the
+// switch should have the cases arranged in order from 1-30.
+class A {
+ def f(x: Int) = x match {
+ case 6 => "6"
+ case 18 => "18"
+ case 7 => "7"
+ case 2 => "2"
+ case 13 => "13"
+ case 11 => "11"
+ case 26 => "26"
+ case 27 => "27"
+ case 29 => "29"
+ case 25 => "25"
+ case 9 => "9"
+ case 17 => "17"
+ case 16 => "16"
+ case 1 => "1"
+ case 30 => "30"
+ case 15 => "15"
+ case 22 => "22"
+ case 19 => "19"
+ case 23 => "23"
+ case 8 => "8"
+ case 28 => "28"
+ case 5 => "5"
+ case 12 => "12"
+ case 10 => "10"
+ case 21 => "21"
+ case 24 => "24"
+ case 4 => "4"
+ case 14 => "14"
+ case 3 => "3"
+ case 20 => "20"
+ }
+}
+
\ No newline at end of file
diff --git a/test/files/pos/lub-dealias-widen.scala b/test/files/pos/lub-dealias-widen.scala
new file mode 100644
index 0000000..38854fb
--- /dev/null
+++ b/test/files/pos/lub-dealias-widen.scala
@@ -0,0 +1,34 @@
+import scala.language.higherKinds
+
+sealed trait Path {
+ type EncodeFunc
+ type Route[R] = List[String] => R
+
+ def >>(f: Route[Int]): Sitelet[EncodeFunc] = ???
+}
+
+case object PAny extends Path {
+ type EncodeFunc = List[String] => String
+}
+
+case class PLit[Next <: Path]() extends Path {
+ type EncodeFunc = Next#EncodeFunc
+}
+
+trait Sitelet[EncodeFunc] { self =>
+ def &[G <: H, H >: EncodeFunc](that: Sitelet[G]): Sitelet[H] = ???
+}
+
+object Test {
+ val r: Sitelet[Int => (Int => String)] = ???
+
+ val p2: PLit[PAny.type] = ???
+ val r2 /*: Sitelet[List[String] => String] */ // annotate type and it compiles with 2.10.0
+ = p2 >> { (xs: List[String]) => 0 }
+
+ // This works after https://github.com/scala/scala/commit/a06d31f6a
+ // Before: error: inferred type arguments [List[String] => String,List[String] => String]
+ // do not conform to method &'s type parameter bounds
+ // [G <: H,H >: Int => (Int => String)]
+ val s = r & r2
+}
\ No newline at end of file
diff --git a/test/files/pos/lubs.scala b/test/files/pos/lubs.scala
new file mode 100644
index 0000000..d7651f8
--- /dev/null
+++ b/test/files/pos/lubs.scala
@@ -0,0 +1,3 @@
+object Test {
+ List(new { def f = 1; def g = 1}, new { def f = 2}).map(_.f)
+}
diff --git a/test/files/neg/caseinherit.flags b/test/files/pos/macro-deprecate-dont-touch-backquotedidents.flags
similarity index 100%
copy from test/files/neg/caseinherit.flags
copy to test/files/pos/macro-deprecate-dont-touch-backquotedidents.flags
diff --git a/test/files/pos/macro-deprecate-dont-touch-backquotedidents.scala b/test/files/pos/macro-deprecate-dont-touch-backquotedidents.scala
new file mode 100644
index 0000000..69a7333
--- /dev/null
+++ b/test/files/pos/macro-deprecate-dont-touch-backquotedidents.scala
@@ -0,0 +1,56 @@
+object Test1 {
+ val `macro` = ???
+}
+
+object Test2 {
+ var `macro` = ???
+}
+
+object Test3 {
+ type `macro` = Int
+}
+
+package test4 {
+ class `macro`
+}
+
+object Test5 {
+ class `macro`
+}
+
+package test6 {
+ object `macro`
+}
+
+object Test7 {
+ object `macro`
+}
+
+package test8 {
+ trait `macro`
+}
+
+object Test9 {
+ trait `macro`
+}
+
+package `macro` {
+ package `macro`.bar {
+ }
+}
+
+package foo {
+ package `macro`.foo {
+ }
+}
+
+//object Test12 {
+// val Some(`macro`) = Some(42)
+// `macro` match {
+// case `macro` => println(`macro`)
+// }
+//}
+
+object Test13 {
+ def `macro` = 2
+}
\ No newline at end of file
diff --git a/test/files/jvm/bug680.check b/test/files/pos/macro-qmarkqmarkqmark.check
similarity index 100%
copy from test/files/jvm/bug680.check
copy to test/files/pos/macro-qmarkqmarkqmark.check
diff --git a/test/files/pos/macro-qmarkqmarkqmark.scala b/test/files/pos/macro-qmarkqmarkqmark.scala
new file mode 100644
index 0000000..a91e432
--- /dev/null
+++ b/test/files/pos/macro-qmarkqmarkqmark.scala
@@ -0,0 +1,7 @@
+import language.experimental.macros
+
+object Macros {
+ def foo1 = macro ???
+ def foo2(x: Int) = macro ???
+ def foo3[T] = macro ???
+}
\ No newline at end of file
diff --git a/test/files/pos/manifest1-new.scala b/test/files/pos/manifest1-new.scala
new file mode 100644
index 0000000..3907d78
--- /dev/null
+++ b/test/files/pos/manifest1-new.scala
@@ -0,0 +1,21 @@
+import scala.reflect.runtime.universe._
+
+object Test {
+ def foo[T](x: T)(implicit m: TypeTag[T]) {
+ foo(List(x))
+ }
+ foo(1)
+ foo("abc")
+ foo(List(1, 2, 3))
+ val x: List[Int] with Ordered[List[Int]] = null
+ foo(x)
+ foo[x.type](x)
+ abstract class C { type T = String; val x: T }
+ val c = new C { val x = "abc" }
+ foo(c.x)
+ abstract class D { type T; implicit val m: TypeTag[T]; val x: T }
+ val stringm = implicitly[TypeTag[String]]
+ val d: D = new D { type T = String; val m = stringm; val x = "x" }
+ import d.m
+ foo(d.x)
+}
\ No newline at end of file
diff --git a/test/files/pos/manifest1.scala b/test/files/pos/manifest1-old.scala
similarity index 100%
rename from test/files/pos/manifest1.scala
rename to test/files/pos/manifest1-old.scala
diff --git a/test/files/pos/native-warning.flags b/test/files/pos/native-warning.flags
new file mode 100644
index 0000000..65faf53
--- /dev/null
+++ b/test/files/pos/native-warning.flags
@@ -0,0 +1 @@
+-Xfatal-warnings -deprecation
\ No newline at end of file
diff --git a/test/files/pos/native-warning.scala b/test/files/pos/native-warning.scala
new file mode 100644
index 0000000..f721a57
--- /dev/null
+++ b/test/files/pos/native-warning.scala
@@ -0,0 +1,3 @@
+class A {
+ @native def setup(): Unit
+}
diff --git a/test/files/pos/bug3097.flags b/test/files/pos/nonlocal-unchecked.flags
similarity index 100%
rename from test/files/pos/bug3097.flags
rename to test/files/pos/nonlocal-unchecked.flags
diff --git a/test/files/pos/nonlocal-unchecked.scala b/test/files/pos/nonlocal-unchecked.scala
new file mode 100644
index 0000000..6bd3dc4
--- /dev/null
+++ b/test/files/pos/nonlocal-unchecked.scala
@@ -0,0 +1,6 @@
+class A {
+ def f: Boolean = {
+ val xs = Nil map (_ => return false)
+ true
+ }
+}
diff --git a/test/files/pos/nothing_manifest_disambig-new.scala b/test/files/pos/nothing_manifest_disambig-new.scala
new file mode 100644
index 0000000..ed3a9e8
--- /dev/null
+++ b/test/files/pos/nothing_manifest_disambig-new.scala
@@ -0,0 +1,12 @@
+import scala.reflect.runtime.universe._
+
+object Test {
+ def mani[T: TypeTag](xs: T) = xs
+ mani(List())
+
+ def listElMani[T: TypeTag](xs: List[T]) = xs
+ listElMani(List())
+
+ def foo[A, C](m : C)(implicit ev: C <:< Traversable[A], mani: TypeTag[A]): (C, A, TypeTag[A]) = (m, m.head, mani)
+ foo(List(1,2,3))
+}
\ No newline at end of file
diff --git a/test/files/pos/nothing_manifest_disambig.scala b/test/files/pos/nothing_manifest_disambig-old.scala
similarity index 100%
rename from test/files/pos/nothing_manifest_disambig.scala
rename to test/files/pos/nothing_manifest_disambig-old.scala
diff --git a/test/files/pos/overloaded_extractor_and_regular_def.scala b/test/files/pos/overloaded_extractor_and_regular_def.scala
new file mode 100644
index 0000000..c8e7da5
--- /dev/null
+++ b/test/files/pos/overloaded_extractor_and_regular_def.scala
@@ -0,0 +1,32 @@
+trait TreesBase {
+ type Tree
+
+ type Apply <: Tree
+
+ val Apply: ApplyExtractor
+
+ abstract class ApplyExtractor {
+ def apply(x: Int): Apply
+ def unapply(apply: Apply): Option[Int]
+ }
+}
+
+trait TreesApi extends TreesBase {
+ def Apply(x: String)
+}
+
+class Universe extends TreesApi {
+ abstract class Tree
+ case class Apply(x: Int) extends Tree
+ object Apply extends ApplyExtractor
+ def Apply(x: String) = Apply(x.toInt)
+}
+
+object Test extends App {
+ def foo(tapi: TreesApi) {
+ import tapi._
+ def bar(tree: Tree) {
+ val Apply(x) = tree
+ }
+ }
+}
\ No newline at end of file
diff --git a/test/files/pos/override-object-yes.flags b/test/files/pos/override-object-yes.flags
new file mode 100644
index 0000000..22e9a95
--- /dev/null
+++ b/test/files/pos/override-object-yes.flags
@@ -0,0 +1 @@
+-Yoverride-objects
\ No newline at end of file
diff --git a/test/files/pos/override-object-yes.scala b/test/files/pos/override-object-yes.scala
new file mode 100644
index 0000000..b0563df
--- /dev/null
+++ b/test/files/pos/override-object-yes.scala
@@ -0,0 +1,40 @@
+package case1 {
+ class Bippy {
+ def f = 1
+ }
+
+ trait Foo {
+ object Bar extends Bippy {
+ override def f = 2
+ }
+ }
+
+ trait Foo2 extends Foo {
+ override object Bar extends Bippy {
+ override def f = 3
+ }
+ }
+
+ trait Foo3 {
+ object Bar {
+ def g: Traversable[Int] = Nil
+ }
+ }
+ trait Foo4 extends Foo3 {
+ override object Bar {
+ def g: List[Int] = Nil
+ }
+ }
+}
+
+package case2 {
+ class Bar[T]
+
+ class Foo[T] {
+ object A extends Bar[T]
+ }
+
+ class Baz[S] extends Foo[S] {
+ override object A extends Bar[S]
+ }
+}
diff --git a/test/files/pos/package-case.scala b/test/files/pos/package-case.scala
new file mode 100644
index 0000000..906f1eb
--- /dev/null
+++ b/test/files/pos/package-case.scala
@@ -0,0 +1,4 @@
+// a.scala
+// Sat Jul 16 00:34:36 EDT 2011
+
+package object io { case class TextReader() }
diff --git a/test/files/pos/package-implicit/ActorRef.scala b/test/files/pos/package-implicit/ActorRef.scala
new file mode 100644
index 0000000..e3f93c5
--- /dev/null
+++ b/test/files/pos/package-implicit/ActorRef.scala
@@ -0,0 +1,7 @@
+package t1000647.foo
+
+trait ActorRef {
+ def stop(): Unit = {}
+}
+
+trait ScalaActorRef { self: ActorRef => }
\ No newline at end of file
diff --git a/test/files/pos/package-implicit/DataFlow.scala b/test/files/pos/package-implicit/DataFlow.scala
new file mode 100644
index 0000000..d948280
--- /dev/null
+++ b/test/files/pos/package-implicit/DataFlow.scala
@@ -0,0 +1,7 @@
+package t1000647.bar
+
+import t1000647.foo.{ScalaActorRef}
+
+object DataFlow {
+ def foo(ref: ScalaActorRef) = ref.stop()
+}
diff --git a/test/files/pos/package-implicit/package.scala b/test/files/pos/package-implicit/package.scala
new file mode 100644
index 0000000..96c4b13
--- /dev/null
+++ b/test/files/pos/package-implicit/package.scala
@@ -0,0 +1,6 @@
+package t1000647
+
+package object foo {
+ implicit def scala2ActorRef(ref: ScalaActorRef): ActorRef =
+ ref.asInstanceOf[ActorRef]
+}
\ No newline at end of file
diff --git a/test/files/pos/packageobjs.scala b/test/files/pos/packageobjs.scala
old mode 100644
new mode 100755
diff --git a/test/files/pos/patmat.scala b/test/files/pos/patmat.scala
new file mode 100644
index 0000000..4e652b1
--- /dev/null
+++ b/test/files/pos/patmat.scala
@@ -0,0 +1,163 @@
+// these used to be in test/files/run/patmatnew.scala
+// the ticket numbers are from the old tracker, not Trac
+
+object ZipFun {
+ //just compilation
+ def zipFun[a, b](xs: List[a], ys: List[b]): List[Pair[a, b]] = (Pair(xs, ys): @unchecked) match {
+ // !!! case Pair(List(), _), Pair(_, List()) => List()
+ case (x :: xs1, y :: ys1) => (x, y) :: zipFun(xs1, ys1)
+ }
+}
+
+object Test1253 { // compile-only
+ def foo(t: (Int, String)) = t match {
+ case (1, "") => throw new Exception
+ case (r, _) => throw new Exception(r.toString)
+ }
+}
+
+object Foo1258 {
+ case object baz
+ def foo(bar: AnyRef) = {
+ val Baz = baz
+ bar match {
+ case Baz => ()
+ }
+ }
+}
+
+object t1261 {
+ sealed trait Elem
+ case class Foo() extends Elem
+ case class Bar() extends Elem
+ trait Row extends Elem
+ object Row {
+ def unapply(r: Row) = true
+
+ def f(elem: Elem) {
+ elem match {
+ case Bar() => ;
+ case Row() => ;
+ case Foo() => ; // used to give ERROR (unreachable code)
+ }
+ }
+ }
+}
+
+sealed abstract class Tree
+case class Node(l: Tree, v: Int, r: Tree) extends Tree
+case object EmptyTree extends Tree
+
+object Ticket335 { // compile-only
+ def runTest() {
+ (EmptyTree: Tree @unchecked) match {
+ case Node(_, v, _) if (v == 0) => 0
+ case EmptyTree => 2
+ }
+ }
+}
+
+object TestIfOpt { //compile-only "test EqualsPatternClass in combination with MixTypes opt, bug #1278"
+ trait Token {
+ val offset: Int
+ def matching: Option[Token]
+ }
+ def go(tok: Token) = (tok.matching: @unchecked) match {
+ case Some(other) if true => Some(other)
+ case _ if true => tok.matching match {
+ case Some(other) => Some(other)
+ case _ => None
+ }
+ }
+}
+
+object Go { // bug #1277 compile-only
+ trait Core { def next: Position = null }
+ trait Dir
+ val NEXT = new Dir {}
+
+ trait Position extends Core
+
+ (null: Core, null: Dir) match {
+ case (_, NEXT) if true => false // no matter whether NEXT test succeed, cannot throw column because of guard
+ case (at2: Position, dir) => true
+ }
+}
+
+trait Outer { // bug #1282 compile-only
+ object No
+ trait File {
+ (null: AnyRef) match {
+ case No => false
+ }
+ }
+}
+
+class Test806_818 { // #806, #811 compile only -- type of bind
+ // t811
+ trait Core {
+ trait NodeImpl
+ trait OtherImpl extends NodeImpl
+ trait DoubleQuoteImpl extends NodeImpl
+ def asDQ(node: OtherImpl) = node match {
+ case dq: DoubleQuoteImpl => dq
+ }
+ }
+
+ trait IfElseMatcher {
+ type Node <: NodeImpl
+ trait NodeImpl
+ trait IfImpl
+ private def coerceIf(node: Node) = node match {
+ case node: IfImpl => node // var node is of type Node with IfImpl!
+ case _ => null
+ }
+ }
+}
+
+object Ticket495bis {
+ def signum(x: Int): Int =
+ x match {
+ case 0 => 0
+ case _ if x < 0 => -1
+ case _ if x > 0 => 1
+ }
+ def pair_m(x: Int, y: Int) =
+ (x, y) match {
+ case (_, 0) => 0
+ case (-1, _) => -1
+ case (_, _) => 1
+ }
+}
+
+object Ticket522 {
+ class Term[X]
+ object App {
+ // i'm hidden
+ case class InternalApply[Y, Z](fun: Y => Z, arg: Y) extends Term[Z]
+
+ def apply[Y, Z](fun: Y => Z, arg: Y): Term[Z] =
+ new InternalApply[Y, Z](fun, arg)
+
+ def unapply[X](arg: Term[X]): Option[(Y => Z, Y)] forSome { type Y; type Z } =
+ arg match {
+ case i: InternalApply[y, z] => Some(i.fun, i.arg)
+ case _ => None
+ }
+ }
+
+ App({ x: Int => x }, 5) match {
+ case App(arg, a) =>
+ }
+}
+
+object Ticket710 {
+ def method {
+ sealed class Parent()
+ case object Child extends Parent()
+ val x: Parent = Child
+ x match {
+ case Child => ()
+ }
+ }
+}
diff --git a/test/files/pos/polymorphic-case-class.flags b/test/files/pos/polymorphic-case-class.flags
new file mode 100644
index 0000000..464cc20
--- /dev/null
+++ b/test/files/pos/polymorphic-case-class.flags
@@ -0,0 +1 @@
+-Xfatal-warnings -unchecked
\ No newline at end of file
diff --git a/test/files/pos/polymorphic-case-class.scala b/test/files/pos/polymorphic-case-class.scala
new file mode 100644
index 0000000..5ed5eed
--- /dev/null
+++ b/test/files/pos/polymorphic-case-class.scala
@@ -0,0 +1,2 @@
+// no unchecked warnings
+case class Bippy[T, -U, +V](x: T, z: V) { }
diff --git a/test/files/pos/presuperContext.scala b/test/files/pos/presuperContext.scala
new file mode 100644
index 0000000..cc34263
--- /dev/null
+++ b/test/files/pos/presuperContext.scala
@@ -0,0 +1,13 @@
+class A {
+ class C extends { val x: A = this } with AnyRef
+}
+
+class B(x: Int)
+
+class D {
+ class C(x: Int) extends B({val test: D = this; x}) {
+ def this() {
+ this({val test: D = this; 1})
+ }
+ }
+}
diff --git a/test/files/pos/protected-static/J.java b/test/files/pos/protected-static/J.java
new file mode 100644
index 0000000..502dc2c
--- /dev/null
+++ b/test/files/pos/protected-static/J.java
@@ -0,0 +1,7 @@
+package bippy;
+
+public class J {
+ protected static String f() {
+ return "hi mom";
+ }
+}
\ No newline at end of file
diff --git a/test/files/pos/protected-static/JavaClass.java b/test/files/pos/protected-static/JavaClass.java
new file mode 100644
index 0000000..cd45a27
--- /dev/null
+++ b/test/files/pos/protected-static/JavaClass.java
@@ -0,0 +1,6 @@
+package bippy;
+
+public abstract class JavaClass {
+ protected static class Inner {}
+ protected abstract Inner getInner();
+}
diff --git a/test/files/pos/protected-static/S.scala b/test/files/pos/protected-static/S.scala
new file mode 100644
index 0000000..6446335
--- /dev/null
+++ b/test/files/pos/protected-static/S.scala
@@ -0,0 +1,7 @@
+package bippy
+
+object Test extends J {
+ def main(args: Array[String]): Unit = {
+ bippy.J.f()
+ }
+}
diff --git a/test/files/pos/protected-static/ScalaClass.scala b/test/files/pos/protected-static/ScalaClass.scala
new file mode 100644
index 0000000..11108b8
--- /dev/null
+++ b/test/files/pos/protected-static/ScalaClass.scala
@@ -0,0 +1,6 @@
+import bippy.JavaClass
+
+class Implementor extends JavaClass {
+ import JavaClass.Inner
+ def getInner: Inner = null
+}
diff --git a/test/files/pos/rangepos-anonapply.flags b/test/files/pos/rangepos-anonapply.flags
new file mode 100644
index 0000000..281f0a1
--- /dev/null
+++ b/test/files/pos/rangepos-anonapply.flags
@@ -0,0 +1 @@
+-Yrangepos
diff --git a/test/files/pos/rangepos-anonapply.scala b/test/files/pos/rangepos-anonapply.scala
new file mode 100644
index 0000000..2f3e4ad
--- /dev/null
+++ b/test/files/pos/rangepos-anonapply.scala
@@ -0,0 +1,9 @@
+class Test {
+ trait PropTraverser {
+ def apply(x: Int): Unit = {}
+ }
+
+ def gather(x: Int) {
+ (new PropTraverser {})(x)
+ }
+}
diff --git a/test/files/pos/rangepos-patmat.flags b/test/files/pos/rangepos-patmat.flags
new file mode 100644
index 0000000..281f0a1
--- /dev/null
+++ b/test/files/pos/rangepos-patmat.flags
@@ -0,0 +1 @@
+-Yrangepos
diff --git a/test/files/pos/rangepos-patmat.scala b/test/files/pos/rangepos-patmat.scala
new file mode 100644
index 0000000..98c842a
--- /dev/null
+++ b/test/files/pos/rangepos-patmat.scala
@@ -0,0 +1,4 @@
+class Foo {
+ def test: PartialFunction[Any, String] = { case _ => "ok" }
+
+}
diff --git a/test/files/pos/rangepos.flags b/test/files/pos/rangepos.flags
new file mode 100644
index 0000000..fcf951d
--- /dev/null
+++ b/test/files/pos/rangepos.flags
@@ -0,0 +1 @@
+-Yrangepos
\ No newline at end of file
diff --git a/test/files/pos/rangepos.scala b/test/files/pos/rangepos.scala
new file mode 100644
index 0000000..623b096
--- /dev/null
+++ b/test/files/pos/rangepos.scala
@@ -0,0 +1,5 @@
+class Foo(val x: Double) extends AnyVal { }
+
+object Pretty {
+ def f(s1: String) = new { def bar = 5 }
+}
diff --git a/test/files/pos/raw-map/J_1.java b/test/files/pos/raw-map/J_1.java
new file mode 100644
index 0000000..bd43bca
--- /dev/null
+++ b/test/files/pos/raw-map/J_1.java
@@ -0,0 +1,4 @@
+public class J_1 {
+ public void setRawType(java.util.Map x) {
+ }
+}
diff --git a/test/files/pos/raw-map/S_2.scala b/test/files/pos/raw-map/S_2.scala
new file mode 100644
index 0000000..de6c4ee
--- /dev/null
+++ b/test/files/pos/raw-map/S_2.scala
@@ -0,0 +1,6 @@
+class Foo {
+ def foo {
+ val x: J_1 = null
+ x.setRawType(new java.util.HashMap)
+ }
+}
diff --git a/test/files/pos/setter-not-implicit.flags b/test/files/pos/setter-not-implicit.flags
new file mode 100644
index 0000000..792c405
--- /dev/null
+++ b/test/files/pos/setter-not-implicit.flags
@@ -0,0 +1 @@
+-feature -Xfatal-warnings
\ No newline at end of file
diff --git a/test/files/pos/setter-not-implicit.scala b/test/files/pos/setter-not-implicit.scala
new file mode 100644
index 0000000..9bfffc2
--- /dev/null
+++ b/test/files/pos/setter-not-implicit.scala
@@ -0,0 +1,3 @@
+object O {
+ implicit var x: Int = 0
+}
diff --git a/test/files/pos/spec-Function1.scala b/test/files/pos/spec-Function1.scala
index 7bdcd07..5b6af67 100644
--- a/test/files/pos/spec-Function1.scala
+++ b/test/files/pos/spec-Function1.scala
@@ -8,7 +8,7 @@
// generated by genprod on Wed Apr 23 10:06:16 CEST 2008 (with fancy comment) (with extra methods)
-package scala
+package scalabip
/** <p>
diff --git a/test/files/pos/spec-annotations.scala b/test/files/pos/spec-annotations.scala
index 215f00d..48281e5 100644
--- a/test/files/pos/spec-annotations.scala
+++ b/test/files/pos/spec-annotations.scala
@@ -1,4 +1,4 @@
-class ann(i: Int) extends Annotation
+class ann(i: Int) extends scala.annotation.Annotation
// annotations on abstract types
abstract class C1[@serializable @cloneable +T, U, V[_]]
@@ -23,7 +23,7 @@ object Test {
//bug #1214
val y = new (Integer @ann(0))(2)
- import scala.reflect.BeanProperty
+ import scala.beans.BeanProperty
// bug #637
trait S { def getField(): Int }
diff --git a/test/files/pos/spec-constr-new.scala b/test/files/pos/spec-constr-new.scala
new file mode 100644
index 0000000..7beff91
--- /dev/null
+++ b/test/files/pos/spec-constr-new.scala
@@ -0,0 +1,9 @@
+import scala.reflect.{ClassTag, classTag}
+
+class SparseArray2[@specialized(Int) T:ClassTag](val maxSize: Int, initialLength:Int = 3) {
+ private var data = new Array[T](initialLength);
+ private var index = new Array[Int](initialLength);
+
+ // comment out to compile correctly
+ data.length + 3;
+}
\ No newline at end of file
diff --git a/test/files/pos/spec-constr.scala b/test/files/pos/spec-constr-old.scala
similarity index 100%
rename from test/files/pos/spec-constr.scala
rename to test/files/pos/spec-constr-old.scala
diff --git a/test/files/pos/spec-doubledef-new.scala b/test/files/pos/spec-doubledef-new.scala
new file mode 100644
index 0000000..ad9c639
--- /dev/null
+++ b/test/files/pos/spec-doubledef-new.scala
@@ -0,0 +1,30 @@
+import scala.reflect.runtime.universe._
+
+object Test {
+ def fn[@specialized T, @specialized U](t : T => Int, u : U => Int) : T =
+ null.asInstanceOf[T]
+}
+
+trait A[@specialized(Int) T] {
+ var value: T
+ def getWith[@specialized(Int) Z](f: T => Z) = f(value)
+}
+
+class C extends A[Int] {
+ var value = 10
+ override def getWith[@specialized(Int) Z](f: Int => Z) = f(value)
+}
+
+abstract class B[T, @specialized(scala.Int) U : TypeTag, @specialized(scala.Int) V <% Ordered[V]] {
+ val u: U
+ val v: V
+
+ def f(t: T, v2: V): Pair[U, V] = {
+ val m: Array[U] = null
+ if (m.isEmpty) {
+ Pair(u, v)
+ } else {
+ Pair(u, v2)
+ }
+ }
+}
\ No newline at end of file
diff --git a/test/files/pos/spec-doubledef.scala b/test/files/pos/spec-doubledef-old.scala
similarity index 100%
rename from test/files/pos/spec-doubledef.scala
rename to test/files/pos/spec-doubledef-old.scala
diff --git a/test/files/pos/spec-fields-new.scala b/test/files/pos/spec-fields-new.scala
new file mode 100644
index 0000000..de75b4b
--- /dev/null
+++ b/test/files/pos/spec-fields-new.scala
@@ -0,0 +1,12 @@
+import scala.reflect.{ClassTag, classTag}
+
+abstract class Foo[@specialized T: ClassTag, U <: Ordered[U]](x: T, size: Int) {
+ var y: T
+ var z: T = x
+
+ def initialSize = 16
+ val array = new Array[T](initialSize + size)
+
+ def getZ = z
+ def setZ(zz: T) = z = zz
+}
\ No newline at end of file
diff --git a/test/files/pos/spec-fields.scala b/test/files/pos/spec-fields-old.scala
similarity index 100%
rename from test/files/pos/spec-fields.scala
rename to test/files/pos/spec-fields-old.scala
diff --git a/test/files/pos/spec-funs.scala b/test/files/pos/spec-funs.scala
index 9acc505..611ec0e 100644
--- a/test/files/pos/spec-funs.scala
+++ b/test/files/pos/spec-funs.scala
@@ -54,10 +54,10 @@ final class ClosureTest {
}
}
-object TestInt extends testing.Benchmark {
+object TestInt extends scala.testing.Benchmark {
def run() = (new IntTest).run()
}
-object TestClosure extends testing.Benchmark {
+object TestClosure extends scala.testing.Benchmark {
def run() = (new ClosureTest).run()
}
diff --git a/test/files/pos/spec-groups.scala b/test/files/pos/spec-groups.scala
new file mode 100644
index 0000000..9b6359a
--- /dev/null
+++ b/test/files/pos/spec-groups.scala
@@ -0,0 +1,65 @@
+import Specializable._
+
+class A[@specialized(Primitives) T](x: T) {
+ def f1[@specialized(Primitives) U](x: T, y: U) = ((x, y))
+ def f2[@specialized(Everything) U](x: T, y: U) = ((x, y))
+ def f3[@specialized(Bits32AndUp) U](x: T, y: U) = ((x, y))
+ def f4[@specialized(Integral) U](x: T, y: U) = ((x, y))
+ def f5[@specialized(AllNumeric) U](x: T, y: U) = ((x, y))
+ def f6[@specialized(BestOfBreed) U](x: T, y: U) = ((x, y))
+ def f7[@specialized(Byte, Double, AnyRef) U](x: T, y: U) = ((x, y))
+}
+class B[@specialized(Everything) T] {
+ def f1[@specialized(Primitives) U](x: T, y: U) = ((x, y))
+ def f2[@specialized(Everything) U](x: T, y: U) = ((x, y))
+ def f3[@specialized(Bits32AndUp) U](x: T, y: U) = ((x, y))
+ def f4[@specialized(Integral) U](x: T, y: U) = ((x, y))
+ def f5[@specialized(AllNumeric) U](x: T, y: U) = ((x, y))
+ def f6[@specialized(BestOfBreed) U](x: T, y: U) = ((x, y))
+ def f7[@specialized(Byte, Double, AnyRef) U](x: T, y: U) = ((x, y))
+}
+class C[@specialized(Bits32AndUp) T] {
+ def f1[@specialized(Primitives) U](x: T, y: U) = ((x, y))
+ def f2[@specialized(Everything) U](x: T, y: U) = ((x, y))
+ def f3[@specialized(Bits32AndUp) U](x: T, y: U) = ((x, y))
+ def f4[@specialized(Integral) U](x: T, y: U) = ((x, y))
+ def f5[@specialized(AllNumeric) U](x: T, y: U) = ((x, y))
+ def f6[@specialized(BestOfBreed) U](x: T, y: U) = ((x, y))
+ def f7[@specialized(Byte, Double, AnyRef) U](x: T, y: U) = ((x, y))
+}
+class D[@specialized(Integral) T] {
+ def f1[@specialized(Primitives) U](x: T, y: U) = ((x, y))
+ def f2[@specialized(Everything) U](x: T, y: U) = ((x, y))
+ def f3[@specialized(Bits32AndUp) U](x: T, y: U) = ((x, y))
+ def f4[@specialized(Integral) U](x: T, y: U) = ((x, y))
+ def f5[@specialized(AllNumeric) U](x: T, y: U) = ((x, y))
+ def f6[@specialized(BestOfBreed) U](x: T, y: U) = ((x, y))
+ def f7[@specialized(Byte, Double, AnyRef) U](x: T, y: U) = ((x, y))
+}
+class E[@specialized(AllNumeric) T] {
+ def f1[@specialized(Primitives) U](x: T, y: U) = ((x, y))
+ def f2[@specialized(Everything) U](x: T, y: U) = ((x, y))
+ def f3[@specialized(Bits32AndUp) U](x: T, y: U) = ((x, y))
+ def f4[@specialized(Integral) U](x: T, y: U) = ((x, y))
+ def f5[@specialized(AllNumeric) U](x: T, y: U) = ((x, y))
+ def f6[@specialized(BestOfBreed) U](x: T, y: U) = ((x, y))
+ def f7[@specialized(Byte, Double, AnyRef) U](x: T, y: U) = ((x, y))
+}
+class F[@specialized(BestOfBreed) T] {
+ def f1[@specialized(Primitives) U](x: T, y: U) = ((x, y))
+ def f2[@specialized(Everything) U](x: T, y: U) = ((x, y))
+ def f3[@specialized(Bits32AndUp) U](x: T, y: U) = ((x, y))
+ def f4[@specialized(Integral) U](x: T, y: U) = ((x, y))
+ def f5[@specialized(AllNumeric) U](x: T, y: U) = ((x, y))
+ def f6[@specialized(BestOfBreed) U](x: T, y: U) = ((x, y))
+ def f7[@specialized(Byte, Double, AnyRef) U](x: T, y: U) = ((x, y))
+}
+class G[@specialized(Byte, Double, AnyRef) T] {
+ def f1[@specialized(Primitives) U](x: T, y: U) = ((x, y))
+ def f2[@specialized(Everything) U](x: T, y: U) = ((x, y))
+ def f3[@specialized(Bits32AndUp) U](x: T, y: U) = ((x, y))
+ def f4[@specialized(Integral) U](x: T, y: U) = ((x, y))
+ def f5[@specialized(AllNumeric) U](x: T, y: U) = ((x, y))
+ def f6[@specialized(BestOfBreed) U](x: T, y: U) = ((x, y))
+ def f7[@specialized(Byte, Double, AnyRef) U](x: T, y: U) = ((x, y))
+}
diff --git a/test/files/pos/spec-params-new.scala b/test/files/pos/spec-params-new.scala
new file mode 100644
index 0000000..959ce15
--- /dev/null
+++ b/test/files/pos/spec-params-new.scala
@@ -0,0 +1,34 @@
+import scala.reflect.{ClassTag, classTag}
+
+class Foo[@specialized A: ClassTag] {
+
+ // conflicting in bounds, expect a normalized member calling m
+ // and bridge + implementation in specialized subclasses
+ // and overloads here according to specialization on A
+ def m1[@specialized B <: A](x: B, y: A) =
+ goal(x)
+
+ // conflicting, unsolvable, expect a warning
+ def m2[@specialized B <: String](x: B) = x.concat("a")
+
+ // conflicting in bounds, no mention of other spec members
+ // expect an overload here plus implementation in
+ // compatible specialized subclasses
+ def m3[@specialized B >: A](x: B) = ()
+
+ // non-conflicting, expect a normalized overload implementation here
+ def m4[@specialized T, U <: Ordered[T]](x: T, y: U) = ()
+
+ // non-conflicting, expect a normalized overload implementation here
+ def m5[@specialized B](x: B) = x
+
+ // non-conflicting, expect a normalized implementation here
+ // and specialized implementations for all expansions in specialized subclasses
+ def m6[@specialized B](x: B, y: A) =
+ goal(y)
+
+ def goal(x: A) = {
+ val xs = new Array[A](1)
+ xs(0) = x
+ }
+}
diff --git a/test/files/pos/spec-params.scala b/test/files/pos/spec-params-old.scala
similarity index 100%
rename from test/files/pos/spec-params.scala
rename to test/files/pos/spec-params-old.scala
diff --git a/test/files/pos/spec-sparsearray-new.scala b/test/files/pos/spec-sparsearray-new.scala
new file mode 100644
index 0000000..7b3934c
--- /dev/null
+++ b/test/files/pos/spec-sparsearray-new.scala
@@ -0,0 +1,25 @@
+import scala.reflect.{ClassTag, classTag}
+import scala.collection.mutable.MapLike
+
+class SparseArray[@specialized(Int) T:ClassTag] extends collection.mutable.Map[Int,T] with collection.mutable.MapLike[Int,T,SparseArray[T]] {
+ override def get(x: Int) = {
+ val ind = findOffset(x)
+ if(ind < 0) None else Some(error("ignore"))
+ }
+
+ /**
+ * Returns the offset into index and data for the requested vector
+ * index. If the requested index is not found, the return value is
+ * negative and can be converted into an insertion point with -(rv+1).
+ */
+ private def findOffset(i : Int) : Int = {
+ error("impl doesn't matter")
+ }
+
+ override def apply(i : Int) : T = { error("ignore") }
+ override def update(i : Int, value : T) = error("ignore")
+ override def empty = new SparseArray[T]
+ def -=(ind: Int) = error("ignore")
+ def +=(kv: (Int,T)) = error("ignore")
+ override final def iterator = error("ignore")
+}
\ No newline at end of file
diff --git a/test/files/pos/spec-sparsearray.scala b/test/files/pos/spec-sparsearray-old.scala
similarity index 100%
rename from test/files/pos/spec-sparsearray.scala
rename to test/files/pos/spec-sparsearray-old.scala
diff --git a/test/files/pos/spec-t6286.scala b/test/files/pos/spec-t6286.scala
new file mode 100755
index 0000000..4d87998
--- /dev/null
+++ b/test/files/pos/spec-t6286.scala
@@ -0,0 +1,10 @@
+trait Foo[@specialized(Int) A] {
+ def fun[@specialized(Int) B](init: B)(f: (B, A) => B): B
+}
+
+class Bar(values: Array[Int]) extends Foo[Int] {
+ def fun[@specialized(Int) C](init: C)(f: (C, Int) => C): C = {
+ val arr = values
+ f(init, arr(0))
+ }
+}
diff --git a/test/files/pos/specialize10.scala b/test/files/pos/specialize10.scala
new file mode 100644
index 0000000..bbe197c
--- /dev/null
+++ b/test/files/pos/specialize10.scala
@@ -0,0 +1,7 @@
+trait Bippy[@specialized(
+ scala.Char, scala.Boolean, scala.Byte,
+ scala.Short, scala.Int, scala.Long,
+ scala.Float, scala.Double, scala.Unit,
+ scala.AnyRef) T] { }
+
+trait Bippy2[@specialized(Char, Boolean, Byte, Short, Int, Long, Float, Double, Unit, AnyRef) T] { }
diff --git a/test/files/pos/specializes-sym-crash.scala b/test/files/pos/specializes-sym-crash.scala
new file mode 100644
index 0000000..c46f435
--- /dev/null
+++ b/test/files/pos/specializes-sym-crash.scala
@@ -0,0 +1,26 @@
+import scala.collection._
+
+trait Foo[+A,
+ +Coll,
+ +This <: GenSeqView[A, Coll] with GenSeqViewLike[A, Coll, This]]
+extends GenSeq[A] with GenSeqLike[A, This] with GenIterableView[A, Coll] with GenIterableViewLike[A, Coll, This] {
+self =>
+
+ trait Transformed[+B] extends GenSeqView[B, Coll] with super.Transformed[B] {
+ def length: Int
+ def apply(idx: Int): B
+ override def toString = viewToString
+ }
+ trait Reversed extends Transformed[A] {
+ override def iterator: Iterator[A] = createReversedIterator
+ def length: Int = self.length
+ def apply(idx: Int): A = self.apply(length - 1 - idx)
+ final override protected[this] def viewIdentifier = "R"
+
+ private def createReversedIterator = {
+ var lst = List[A]()
+ for (elem <- self) lst ::= elem
+ lst.iterator
+ }
+ }
+}
diff --git a/test/files/pos/spurious-overload.scala b/test/files/pos/spurious-overload.scala
new file mode 100644
index 0000000..9767a44
--- /dev/null
+++ b/test/files/pos/spurious-overload.scala
@@ -0,0 +1,32 @@
+object Test extends App {
+ def foo(bar: Any) = bar
+
+ val code = foo{
+ object lazyLib {
+
+ def delay[A](value: => A): Susp[A] = new SuspImpl[A](value)
+
+ implicit def force[A](s: Susp[A]): A = s()
+
+ abstract class Susp[+A] extends Function0[A]
+
+ class SuspImpl[A](lazyValue: => A) extends Susp[A] {
+ private var maybeValue: Option[A] = None
+
+ override def apply() = maybeValue match {
+ case None =>
+ val value = lazyValue
+ maybeValue = Some(value)
+ value
+ case Some(value) =>
+ value
+ }
+ }
+ }
+
+ import lazyLib._
+
+ val s: Susp[Int] = delay { println("evaluating..."); 3 }
+ println("2 + s = " + (2 + s)) // implicit call to force()
+ }
+}
\ No newline at end of file
diff --git a/test/files/pos/strip-tvars-for-lubbasetypes.scala b/test/files/pos/strip-tvars-for-lubbasetypes.scala
new file mode 100644
index 0000000..2be8625
--- /dev/null
+++ b/test/files/pos/strip-tvars-for-lubbasetypes.scala
@@ -0,0 +1,25 @@
+object Test {
+
+ implicit final class EqualOps[T](val x: T) extends AnyVal {
+ def ===[T1, Ph >: T <: T1, Ph2 >: Ph <: T1](other: T1): Boolean = x == other
+ def !!: Boolean = x == other
+ }
+
+ class A
+ class B extends A
+ class C extends A
+
+ val a = new A
+ val b = new B
+ val c = new C
+
+ val x1 = a === b
+ val x2 = b === a
+ val x3 = b === c // error, infers Object{} for T1
+ val x4 = b.===[A, B, B](c)
+
+ val x5 = b !!! c // always compiled due to the order of Ph2 and Ph
+
+
+
+}
diff --git a/test/files/pos/switch-small.scala b/test/files/pos/switch-small.scala
new file mode 100644
index 0000000..9de9ca0
--- /dev/null
+++ b/test/files/pos/switch-small.scala
@@ -0,0 +1,8 @@
+import annotation._
+
+object Test {
+ def f(x: Int) = (x: @switch) match {
+ case 1 => 1
+ case _ => 2
+ }
+}
diff --git a/test/files/pos/bug0002.scala b/test/files/pos/t0002.scala
similarity index 100%
rename from test/files/pos/bug0002.scala
rename to test/files/pos/t0002.scala
diff --git a/test/files/pos/bug0017.scala b/test/files/pos/t0017.scala
similarity index 100%
rename from test/files/pos/bug0017.scala
rename to test/files/pos/t0017.scala
diff --git a/test/files/pos/bug0020.scala b/test/files/pos/t0020.scala
similarity index 100%
rename from test/files/pos/bug0020.scala
rename to test/files/pos/t0020.scala
diff --git a/test/files/pos/t0029.scala b/test/files/pos/t0029.scala
new file mode 100644
index 0000000..937b6d7
--- /dev/null
+++ b/test/files/pos/t0029.scala
@@ -0,0 +1,3 @@
+object Main {
+ def f[a]: List[List[a]] = for (l1 <- Nil; l2 <- Nil) yield l1
+}
diff --git a/test/files/pos/bug0030.scala b/test/files/pos/t0030.scala
similarity index 100%
rename from test/files/pos/bug0030.scala
rename to test/files/pos/t0030.scala
diff --git a/test/files/pos/bug0031.scala b/test/files/pos/t0031.scala
similarity index 100%
rename from test/files/pos/bug0031.scala
rename to test/files/pos/t0031.scala
diff --git a/test/files/pos/bug0032.scala b/test/files/pos/t0032.scala
similarity index 100%
rename from test/files/pos/bug0032.scala
rename to test/files/pos/t0032.scala
diff --git a/test/files/pos/bug0036.scala b/test/files/pos/t0036.scala
similarity index 100%
rename from test/files/pos/bug0036.scala
rename to test/files/pos/t0036.scala
diff --git a/test/files/pos/bug0039.scala b/test/files/pos/t0039.scala
similarity index 100%
rename from test/files/pos/bug0039.scala
rename to test/files/pos/t0039.scala
diff --git a/test/files/pos/bug0049.scala b/test/files/pos/t0049.scala
similarity index 100%
rename from test/files/pos/bug0049.scala
rename to test/files/pos/t0049.scala
diff --git a/test/files/pos/bug0053.scala b/test/files/pos/t0053.scala
similarity index 100%
rename from test/files/pos/bug0053.scala
rename to test/files/pos/t0053.scala
diff --git a/test/files/pos/bug0054.scala b/test/files/pos/t0054.scala
similarity index 100%
rename from test/files/pos/bug0054.scala
rename to test/files/pos/t0054.scala
diff --git a/test/files/pos/bug0061.scala b/test/files/pos/t0061.scala
similarity index 100%
rename from test/files/pos/bug0061.scala
rename to test/files/pos/t0061.scala
diff --git a/test/files/pos/bug0064.scala b/test/files/pos/t0064.scala
similarity index 100%
rename from test/files/pos/bug0064.scala
rename to test/files/pos/t0064.scala
diff --git a/test/files/pos/bug0066.scala b/test/files/pos/t0066.scala
similarity index 100%
rename from test/files/pos/bug0066.scala
rename to test/files/pos/t0066.scala
diff --git a/test/files/pos/bug0068.scala b/test/files/pos/t0068.scala
similarity index 100%
rename from test/files/pos/bug0068.scala
rename to test/files/pos/t0068.scala
diff --git a/test/files/pos/bug0069.scala b/test/files/pos/t0069.scala
similarity index 100%
rename from test/files/pos/bug0069.scala
rename to test/files/pos/t0069.scala
diff --git a/test/files/pos/bug0076.scala b/test/files/pos/t0076.scala
similarity index 100%
rename from test/files/pos/bug0076.scala
rename to test/files/pos/t0076.scala
diff --git a/test/files/pos/bug0081.scala b/test/files/pos/t0081.scala
similarity index 100%
rename from test/files/pos/bug0081.scala
rename to test/files/pos/t0081.scala
diff --git a/test/files/pos/bug0082.scala b/test/files/pos/t0082.scala
similarity index 100%
rename from test/files/pos/bug0082.scala
rename to test/files/pos/t0082.scala
diff --git a/test/files/pos/bug0085.scala b/test/files/pos/t0085.scala
similarity index 100%
rename from test/files/pos/bug0085.scala
rename to test/files/pos/t0085.scala
diff --git a/test/files/pos/bug0091.scala b/test/files/pos/t0091.scala
similarity index 100%
rename from test/files/pos/bug0091.scala
rename to test/files/pos/t0091.scala
diff --git a/test/files/pos/bug0093.scala b/test/files/pos/t0093.scala
similarity index 100%
rename from test/files/pos/bug0093.scala
rename to test/files/pos/t0093.scala
diff --git a/test/files/pos/bug0095.scala b/test/files/pos/t0095.scala
similarity index 100%
rename from test/files/pos/bug0095.scala
rename to test/files/pos/t0095.scala
diff --git a/test/files/pos/bug0123.scala b/test/files/pos/t0123.scala
similarity index 100%
rename from test/files/pos/bug0123.scala
rename to test/files/pos/t0123.scala
diff --git a/test/files/pos/bug0204.scala b/test/files/pos/t0204.scala
similarity index 100%
rename from test/files/pos/bug0204.scala
rename to test/files/pos/t0204.scala
diff --git a/test/files/pos/bug0304.scala b/test/files/pos/t0304.scala
similarity index 100%
rename from test/files/pos/bug0304.scala
rename to test/files/pos/t0304.scala
diff --git a/test/files/pos/bug0305.scala b/test/files/pos/t0305.scala
similarity index 100%
rename from test/files/pos/bug0305.scala
rename to test/files/pos/t0305.scala
diff --git a/test/files/pos/bug0422.scala b/test/files/pos/t0422.scala
similarity index 100%
rename from test/files/pos/bug0422.scala
rename to test/files/pos/t0422.scala
diff --git a/test/files/pos/bug0599.scala b/test/files/pos/t0599.scala
similarity index 100%
rename from test/files/pos/bug0599.scala
rename to test/files/pos/t0599.scala
diff --git a/test/files/pos/bug0646.scala b/test/files/pos/t0646.scala
similarity index 100%
rename from test/files/pos/bug0646.scala
rename to test/files/pos/t0646.scala
diff --git a/test/files/pos/t0651.scala b/test/files/pos/t0651.scala
deleted file mode 100644
index 52bef7e..0000000
--- a/test/files/pos/t0651.scala
+++ /dev/null
@@ -1,4 +0,0 @@
-object Reflection1 extends App {
- case class Element(name: String)
- println(reflect.Code.lift({val e = Element("someName"); e}).tree)
-}
diff --git a/test/files/pos/t1000.scala b/test/files/pos/t1000.scala
index 31d7c7e..fabef94 100644
--- a/test/files/pos/t1000.scala
+++ b/test/files/pos/t1000.scala
@@ -1,3 +1,8 @@
object A {
println("""This a "raw" string ending with a "double quote"""")
}
+
+object Test extends App {
+ val xs = Array(1, 2, 3)
+ Console.println(xs.filter(_ >= 0).length)
+}
diff --git a/test/files/pos/t1001.scala b/test/files/pos/t1001.scala
index 88321e6..7a06bfa 100644
--- a/test/files/pos/t1001.scala
+++ b/test/files/pos/t1001.scala
@@ -1,6 +1,115 @@
+// was t1001.scala
class Foo;
object Overload{
val foo = classOf[Foo].getConstructors()(0)
foo.getDeclaringClass
}
+
+// was t1001.scala
+
+// I suspect the stack overflow is occurring when the compiler is determining the types for the following line at the end of the file:-
+// val data = List(N26,N25)
+
+abstract class A
+{
+ // commenting out the following line (only) leads to successful compilation
+ protected val data: List[A]
+}
+
+trait B[T <: B[T]] extends A { self: T => }
+
+abstract class C extends A
+{
+ // commenting out the following line (only) leads to successful compilation
+ protected val data: List[C]
+}
+
+abstract class D extends C with B[D] {}
+
+abstract class Ee extends C with B[Ee]
+{
+}
+
+
+object N1 extends D
+{
+ val data = Nil
+}
+
+object N2 extends D
+{
+ val data = Nil
+}
+
+object N5 extends D
+{
+ val data = List(N1)
+}
+
+object N6 extends D
+{
+ val data = List(N1)
+}
+
+object N8 extends D
+{
+ val data = List(N1)
+}
+
+object N10 extends D
+{
+ val data = Nil
+}
+
+object N13 extends D
+{
+ val data = List(N2)
+}
+
+object N14 extends D
+{
+ val data = List(N5,N10,N8)
+}
+
+object N15 extends D
+{
+ val data = List(N14)
+}
+
+object N16 extends D
+{
+ val data = List(N13,N6,N15)
+}
+
+object N17 extends D
+{
+ val data = List(N16)
+}
+
+object N21 extends D
+{
+ val data = List(N16)
+}
+
+object N22 extends D
+{
+ val data = List(N17)
+}
+
+object N25 extends D
+{
+ val data = List(N22)
+}
+
+object N26 extends Ee
+{
+ val data = List(N21,N17)
+}
+
+// Commenting out the following object (only) leads to successful compilation
+object N31 extends Ee
+{
+ // If we use List[C](N26,N25), we achieve successful compilation
+ val data = List[C](N26,N25)
+}
diff --git a/test/files/pos/bug1006.scala b/test/files/pos/t1006.scala
similarity index 100%
rename from test/files/pos/bug1006.scala
rename to test/files/pos/t1006.scala
diff --git a/test/files/pos/t1014.scala b/test/files/pos/t1014.scala
new file mode 100644
index 0000000..3fc10d1
--- /dev/null
+++ b/test/files/pos/t1014.scala
@@ -0,0 +1,15 @@
+import scala.xml.{NodeSeq, Elem}
+
+class EO extends App with Moo {
+ // return type is Flog, inherited from overridden method.
+ // implicit conversions are applied because expected type `pt` is `Flog` when `computeType(rhs, pt)`.
+ def cat = <cat>dog</cat>
+
+ implicit def nodeSeqToFlog(in: Elem): Flog = new Flog(in)
+}
+
+trait Moo {
+ def cat: Flog
+}
+
+class Flog(val in: NodeSeq)
diff --git a/test/files/pos/t1029/Test_1.scala b/test/files/pos/t1029/Test_1.scala
index e828087..d268c71 100644
--- a/test/files/pos/t1029/Test_1.scala
+++ b/test/files/pos/t1029/Test_1.scala
@@ -1,4 +1,4 @@
-class ann(a: Array[Int]) extends StaticAnnotation
+class ann(a: Array[Int]) extends annotation.StaticAnnotation
object Test1 {
// bug #1029
diff --git a/test/files/pos/bug1034.scala b/test/files/pos/t1034.scala
similarity index 100%
rename from test/files/pos/bug1034.scala
rename to test/files/pos/t1034.scala
diff --git a/test/files/pos/bug1048.scala b/test/files/pos/t1048.scala
similarity index 100%
rename from test/files/pos/bug1048.scala
rename to test/files/pos/t1048.scala
diff --git a/test/files/pos/t1049.scala b/test/files/pos/t1049.scala
new file mode 100644
index 0000000..61d99f9
--- /dev/null
+++ b/test/files/pos/t1049.scala
@@ -0,0 +1,7 @@
+package t1049
+
+abstract class Test {
+ type T <: A
+ class A { self: T => }
+ class B extends A { self: T => }
+}
diff --git a/test/files/pos/t1050.scala b/test/files/pos/t1050.scala
new file mode 100644
index 0000000..d34b0cf
--- /dev/null
+++ b/test/files/pos/t1050.scala
@@ -0,0 +1,10 @@
+package t1050
+
+abstract class A {
+ type T <: scala.AnyRef
+ class A { this: T =>
+ def b = 3
+ def c = b
+ b
+ }
+}
diff --git a/test/files/pos/bug1056.scala b/test/files/pos/t1056.scala
similarity index 100%
rename from test/files/pos/bug1056.scala
rename to test/files/pos/t1056.scala
diff --git a/test/files/pos/t1070.scala b/test/files/pos/t1070.scala
new file mode 100644
index 0000000..1622043
--- /dev/null
+++ b/test/files/pos/t1070.scala
@@ -0,0 +1,4 @@
+import scala.beans.BeanProperty;
+trait beanpropertytrait {
+ @BeanProperty var myVariable: Long = -1l;
+}
diff --git a/test/files/pos/bug1071.scala b/test/files/pos/t1071.scala
similarity index 100%
rename from test/files/pos/bug1071.scala
rename to test/files/pos/t1071.scala
diff --git a/test/files/pos/bug1075.scala b/test/files/pos/t1075.scala
similarity index 100%
rename from test/files/pos/bug1075.scala
rename to test/files/pos/t1075.scala
diff --git a/test/files/pos/bug1085.scala b/test/files/pos/t1085.scala
similarity index 100%
rename from test/files/pos/bug1085.scala
rename to test/files/pos/t1085.scala
diff --git a/test/files/pos/bug1090.scala b/test/files/pos/t1090.scala
similarity index 100%
rename from test/files/pos/bug1090.scala
rename to test/files/pos/t1090.scala
diff --git a/test/files/pos/bug1107.scala b/test/files/pos/t1107.scala
similarity index 100%
rename from test/files/pos/bug1107.scala
rename to test/files/pos/t1107.scala
diff --git a/test/files/pos/t1107/O.scala b/test/files/pos/t1107b/O.scala
similarity index 100%
rename from test/files/pos/t1107/O.scala
rename to test/files/pos/t1107b/O.scala
diff --git a/test/files/pos/t1107/T.scala b/test/files/pos/t1107b/T.scala
similarity index 100%
rename from test/files/pos/t1107/T.scala
rename to test/files/pos/t1107b/T.scala
diff --git a/test/files/pos/bug1119.scala b/test/files/pos/t1119.scala
similarity index 100%
rename from test/files/pos/bug1119.scala
rename to test/files/pos/t1119.scala
diff --git a/test/files/pos/bug1123.scala b/test/files/pos/t1123.scala
similarity index 100%
rename from test/files/pos/bug1123.scala
rename to test/files/pos/t1123.scala
diff --git a/test/files/pos/bug112606A.scala b/test/files/pos/t112606A.scala
similarity index 100%
rename from test/files/pos/bug112606A.scala
rename to test/files/pos/t112606A.scala
diff --git a/test/files/pos/t1133.scala b/test/files/pos/t1133.scala
new file mode 100644
index 0000000..4538de5
--- /dev/null
+++ b/test/files/pos/t1133.scala
@@ -0,0 +1,32 @@
+object Match
+{
+ def main(args: Array[String]) = {
+ args(0) match {
+ case Extractor1(Extractor2(Extractor3("dog", "dog", "dog"), x2, x3), b, c, Extractor3("b", "b", f), e) => println(e)
+ case Extractor3(Extractor2(Extractor1("a", "aa", "aaa", "aa", "a"), Extractor2("a", "aa", "aaa"), e), y, z) => println(e)
+ case Extractor2(Extractor3("a", "a", x), Extractor3("b", "b", y), Extractor3("c", "c", z)) => println(z)
+ case _ => println("fail")
+ }
+ }
+
+ object Extractor1 {
+ def unapply(x: Any) = x match {
+ case x: String => Some(x, x+x, x+x+x, x+x, x)
+ case _ => None
+ }
+ }
+
+ object Extractor2 {
+ def unapply(x: Any) = x match {
+ case x: String => Some(x, x+x, x+x+x)
+ case _ => None
+ }
+ }
+
+ object Extractor3 {
+ def unapply(x: Any) = x match {
+ case x: String => Some(x, x, x)
+ case _ => None
+ }
+ }
+}
diff --git a/test/files/pos/bug1136.scala b/test/files/pos/t1136.scala
similarity index 100%
rename from test/files/pos/bug1136.scala
rename to test/files/pos/t1136.scala
diff --git a/test/files/pos/bug115.scala b/test/files/pos/t115.scala
similarity index 100%
rename from test/files/pos/bug115.scala
rename to test/files/pos/t115.scala
diff --git a/test/files/pos/bug116.scala b/test/files/pos/t116.scala
similarity index 100%
rename from test/files/pos/bug116.scala
rename to test/files/pos/t116.scala
diff --git a/test/files/pos/bug1168.scala b/test/files/pos/t1168.scala
similarity index 100%
rename from test/files/pos/bug1168.scala
rename to test/files/pos/t1168.scala
diff --git a/test/files/pos/bug1185.scala b/test/files/pos/t1185.scala
similarity index 100%
rename from test/files/pos/bug1185.scala
rename to test/files/pos/t1185.scala
diff --git a/test/files/pos/bug119.scala b/test/files/pos/t119.scala
similarity index 100%
rename from test/files/pos/bug119.scala
rename to test/files/pos/t119.scala
diff --git a/test/files/pos/t1203.scala b/test/files/pos/t1203.scala
new file mode 100644
index 0000000..062ef93
--- /dev/null
+++ b/test/files/pos/t1203.scala
@@ -0,0 +1,7 @@
+case class ant(t: String) extends scala.annotation.Annotation
+object Test {
+ def main(args: Array[String]): Unit = {
+ val a: scala.xml.NodeSeq @ant("12") = Nil
+ println(a)
+ }
+}
diff --git a/test/files/pos/t1203/J.java b/test/files/pos/t1203b/J.java
similarity index 100%
rename from test/files/pos/t1203/J.java
rename to test/files/pos/t1203b/J.java
diff --git a/test/files/pos/t1203/S.scala b/test/files/pos/t1203b/S.scala
similarity index 100%
rename from test/files/pos/t1203/S.scala
rename to test/files/pos/t1203b/S.scala
diff --git a/test/files/pos/bug1208.scala b/test/files/pos/t1208.scala
similarity index 100%
rename from test/files/pos/bug1208.scala
rename to test/files/pos/t1208.scala
diff --git a/test/files/pos/bug121.scala b/test/files/pos/t121.scala
similarity index 100%
rename from test/files/pos/bug121.scala
rename to test/files/pos/t121.scala
diff --git a/test/files/pos/bug1210a.scala b/test/files/pos/t1210a.scala
similarity index 100%
rename from test/files/pos/bug1210a.scala
rename to test/files/pos/t1210a.scala
diff --git a/test/files/pos/bug122.scala b/test/files/pos/t122.scala
similarity index 100%
rename from test/files/pos/bug122.scala
rename to test/files/pos/t122.scala
diff --git a/test/files/pos/bug1237.scala b/test/files/pos/t1237.scala
similarity index 100%
rename from test/files/pos/bug1237.scala
rename to test/files/pos/t1237.scala
diff --git a/test/files/pos/bug124.scala b/test/files/pos/t124.scala
similarity index 100%
rename from test/files/pos/bug124.scala
rename to test/files/pos/t124.scala
diff --git a/test/files/pos/t1263/Test.java b/test/files/pos/t1263/Test.java
index 6ca88c2..1718a99 100644
--- a/test/files/pos/t1263/Test.java
+++ b/test/files/pos/t1263/Test.java
@@ -4,6 +4,7 @@ import java.rmi.RemoteException;
import test.Map;
+ at SuppressWarnings("unchecked")
public class Test implements Map<String, String> {
public Map.MapTo plus(String o) {
return null;
diff --git a/test/files/pos/bug1272.scala b/test/files/pos/t1272.scala
similarity index 100%
rename from test/files/pos/bug1272.scala
rename to test/files/pos/t1272.scala
diff --git a/test/files/pos/t1279a.scala b/test/files/pos/t1279a.scala
new file mode 100644
index 0000000..18b1e53
--- /dev/null
+++ b/test/files/pos/t1279a.scala
@@ -0,0 +1,39 @@
+// covariant linked list
+abstract class M {
+ self =>
+
+ type T
+ final type selfType = M {type T <: self.T}
+ type actualSelfType >: self.type <: selfType
+
+ def next: selfType
+
+ // I don't understand why this doesn't compile, but that's a separate matter
+ // error: method all2 cannot be accessed in M.this.selfType
+ // because its instance type => Stream[M{type T <: M.this.selfType#T}]
+ // contains a malformed type: M.this.selfType#T
+ def all2: Stream[M {type T <: self.T}] = Stream.cons(self: actualSelfType, next.all2)
+
+ // compiles successfully
+ def all3: Stream[M {type T <: self.T}] = all3Impl(self: actualSelfType)
+ private def all3Impl(first: M {type T <: self.T}): Stream[M {type T <: self.T}] = Stream.cons(first, all3Impl(first.next))
+
+ def all4: Stream[M {type T <: self.T}] = Unrelated.all4Impl[T](self: actualSelfType)
+}
+
+// TODO!!! fix this bug for real, it compiles successfully, but weird types are inferred
+object Unrelated {
+ // compiles successfully
+ def all4Impl[U](first: M {type T <: U}): Stream[M {type T <: U}] = Stream.cons(first, all4Impl[U](first.next))
+
+ // should compile successfully without the [U], but:
+ // def all4ImplFail[U](first: M {type T <: U}): Stream[M {type T <: U}] = Stream.cons(first, all4ImplFail(first.next))
+ //
+ // test/files/pos/t1279a.scala:31: error: type mismatch;
+ // found : first.selfType
+ // (which expands to) M{type T <: first.T}
+ // required: M{type T <: Nothing}
+ // def all4ImplFail[U](first: M {type T <: U}): Stream[M {type T <: U}] = Stream.cons(first, all4ImplFail(first.next))
+ // ^
+ // one error found
+}
diff --git a/test/files/pos/bug1292.scala b/test/files/pos/t1292.scala
similarity index 100%
rename from test/files/pos/bug1292.scala
rename to test/files/pos/t1292.scala
diff --git a/test/files/pos/t1318.scala b/test/files/pos/t1318.scala
new file mode 100644
index 0000000..3fc6e30
--- /dev/null
+++ b/test/files/pos/t1318.scala
@@ -0,0 +1,31 @@
+abstract class F {
+ type mType <: M
+}
+
+abstract class M { self =>
+
+ type mType <: M
+
+ type fType = F {type mType >: self.mType }
+ def fs: List[fType]
+}
+
+abstract class A0 extends M {
+ type mType = A0
+ def fs: List[fType] = Nil
+}
+
+object A extends A0 {}
+
+abstract class B0 extends M {
+ type mType = B0
+ def fs: List[fType] = Nil
+}
+
+object B extends B0 {}
+
+object C {
+ def ab = List(A) ::: List(B)
+ // the following compiles successfully:
+ // def ab = List(A) ::: List[M](B)
+}
\ No newline at end of file
diff --git a/test/pending/pos/bug1357.scala b/test/files/pos/t1357.scala
similarity index 100%
rename from test/pending/pos/bug1357.scala
rename to test/files/pos/t1357.scala
diff --git a/test/files/pos/t1381-new.scala b/test/files/pos/t1381-new.scala
new file mode 100644
index 0000000..57e0f31
--- /dev/null
+++ b/test/files/pos/t1381-new.scala
@@ -0,0 +1,31 @@
+import scala.reflect.runtime.universe._
+
+class D[V <: Variable]
+
+class ID[V<:IV] extends D[V] {
+ type E = V#ValueType
+ def index(value:E) : Int = 0
+ // Comment this out to eliminate crash. Or see below
+ def index(values:E*) : Iterable[Int] = null
+}
+
+abstract class Variable {
+ type VT <: Variable
+ def d : D[VT] = null
+}
+
+abstract class PV[T](initval:T) extends Variable {
+ type VT <: PV[T]
+ type ValueType = T
+}
+
+trait IV extends Variable {
+ type ValueType
+}
+
+abstract class EV[T](initval:T) extends PV[T](initval) with IV {
+ type VT <: EV[T]
+ override def d : ID[VT] = null
+ // Comment this out to eliminate crash
+ protected var indx = d.index(initval)
+}
\ No newline at end of file
diff --git a/test/files/pos/bug1381.scala b/test/files/pos/t1381-old.scala
similarity index 100%
rename from test/files/pos/bug1381.scala
rename to test/files/pos/t1381-old.scala
diff --git a/test/files/pos/bug1385.scala b/test/files/pos/t1385.scala
similarity index 100%
rename from test/files/pos/bug1385.scala
rename to test/files/pos/t1385.scala
diff --git a/test/files/pos/t1439.flags b/test/files/pos/t1439.flags
new file mode 100644
index 0000000..1e70f5c
--- /dev/null
+++ b/test/files/pos/t1439.flags
@@ -0,0 +1 @@
+-unchecked -Xfatal-warnings -Xoldpatmat -language:higherKinds
diff --git a/test/files/pos/t1439.scala b/test/files/pos/t1439.scala
new file mode 100644
index 0000000..0efcc74
--- /dev/null
+++ b/test/files/pos/t1439.scala
@@ -0,0 +1,8 @@
+// no unchecked warnings
+class View[C[A]] { }
+
+object Test {
+ (null: Any) match {
+ case v: View[_] =>
+ }
+}
diff --git a/test/files/pos/t1459/AbstractBase.java b/test/files/pos/t1459/AbstractBase.java
old mode 100644
new mode 100755
diff --git a/test/files/pos/t1459/App.scala b/test/files/pos/t1459/App.scala
old mode 100644
new mode 100755
index 1152fcb..36e5022
--- a/test/files/pos/t1459/App.scala
+++ b/test/files/pos/t1459/App.scala
@@ -1,18 +1,18 @@
package foo
import base._
-object App extends Application {
+object App extends scala.App {
class Concrete extends AbstractBase {
override def doStuff(params:java.lang.String*): Unit = println("doStuff invoked")
}
val impl = new Concrete
- //succeeds
+ //succeeds
impl.doStuff(null)
val caller = new Caller
-
+
// fails with AbstractMethodError
caller.callDoStuff(impl)
}
diff --git a/test/files/pos/t1459/Caller.java b/test/files/pos/t1459/Caller.java
old mode 100644
new mode 100755
diff --git a/test/files/pos/bug151.scala b/test/files/pos/t151.scala
similarity index 100%
rename from test/files/pos/bug151.scala
rename to test/files/pos/t151.scala
diff --git a/test/files/pos/t1545.scala b/test/files/pos/t1545.scala
deleted file mode 100644
index 51df606..0000000
--- a/test/files/pos/t1545.scala
+++ /dev/null
@@ -1,16 +0,0 @@
-object Main extends App {
-
- case class Foo (field : Option[String])
-
- val x : PartialFunction[Foo,Int] =
- {
- c => c.field match {
- case Some (s) => 42
- case None => 99
- }
- }
-
- println (x (Foo (None))) // prints 99
- println (x (Foo (Some ("foo")))) // prints 42
-
-}
diff --git a/test/files/pos/bug1565.scala b/test/files/pos/t1565.scala
similarity index 100%
rename from test/files/pos/bug1565.scala
rename to test/files/pos/t1565.scala
diff --git a/test/files/pos/t1569.flags b/test/files/pos/t1569.flags
deleted file mode 100644
index 1c26b24..0000000
--- a/test/files/pos/t1569.flags
+++ /dev/null
@@ -1 +0,0 @@
--Ydependent-method-types
\ No newline at end of file
diff --git a/test/files/pos/bug159.scala b/test/files/pos/t159.scala
similarity index 100%
rename from test/files/pos/bug159.scala
rename to test/files/pos/t159.scala
diff --git a/test/files/pos/bug160.scala b/test/files/pos/t160.scala
similarity index 100%
rename from test/files/pos/bug160.scala
rename to test/files/pos/t160.scala
diff --git a/test/files/pos/bug1626.scala b/test/files/pos/t1626.scala
similarity index 100%
rename from test/files/pos/bug1626.scala
rename to test/files/pos/t1626.scala
diff --git a/test/files/pos/t1672.scala b/test/files/pos/t1672.scala
new file mode 100644
index 0000000..5ee6bb1
--- /dev/null
+++ b/test/files/pos/t1672.scala
@@ -0,0 +1,10 @@
+object Test {
+ @annotation.tailrec
+ def bar : Nothing = {
+ try {
+ throw new RuntimeException
+ } catch {
+ case _: Throwable => bar
+ }
+ }
+}
diff --git a/test/files/pos/t1722/Test.scala b/test/files/pos/t1722/Test.scala
old mode 100644
new mode 100755
diff --git a/test/files/pos/t1722/Top.scala b/test/files/pos/t1722/Top.scala
old mode 100644
new mode 100755
diff --git a/test/files/pos/bug175.scala b/test/files/pos/t175.scala
similarity index 100%
rename from test/files/pos/bug175.scala
rename to test/files/pos/t175.scala
diff --git a/test/files/pos/t1756.scala b/test/files/pos/t1756.scala
old mode 100644
new mode 100755
diff --git a/test/files/pos/bug177.scala b/test/files/pos/t177.scala
similarity index 100%
rename from test/files/pos/bug177.scala
rename to test/files/pos/t177.scala
diff --git a/test/files/pos/t1782/Test_1.scala b/test/files/pos/t1782/Test_1.scala
deleted file mode 100644
index e61ef8e..0000000
--- a/test/files/pos/t1782/Test_1.scala
+++ /dev/null
@@ -1,16 +0,0 @@
- at ImplementedBy(classOf[Provider])
-trait Service {
- def someMethod()
-}
-
-class Provider
- extends Service
-{
- // test enumeration java annotations
- @Ann(Days.Friday) def someMethod() = ()
-
- // #2103
- @scala.reflect.BeanProperty
- @Ann(value = Days.Sunday)
- val t2103 = "test"
-}
diff --git a/test/files/pos/t1785.scala b/test/files/pos/t1785.scala
new file mode 100644
index 0000000..0b1fafb
--- /dev/null
+++ b/test/files/pos/t1785.scala
@@ -0,0 +1,7 @@
+class t1785 {
+ def apply[T](x: Int) = 1
+}
+
+object test {
+ (new t1785)[Int](1)
+}
diff --git a/test/files/pos/t1803.flags b/test/files/pos/t1803.flags
new file mode 100644
index 0000000..d1a8244
--- /dev/null
+++ b/test/files/pos/t1803.flags
@@ -0,0 +1 @@
+-Yinfer-argument-types
\ No newline at end of file
diff --git a/test/files/pos/t1803.scala b/test/files/pos/t1803.scala
new file mode 100644
index 0000000..42f4e78
--- /dev/null
+++ b/test/files/pos/t1803.scala
@@ -0,0 +1,2 @@
+class A { def foo[A](a: A) = a }
+class B extends A { override def foo[A](b) = b }
diff --git a/test/files/pos/bug183.scala b/test/files/pos/t183.scala
similarity index 100%
rename from test/files/pos/bug183.scala
rename to test/files/pos/t183.scala
diff --git a/test/files/pos/t1832.scala b/test/files/pos/t1832.scala
new file mode 100644
index 0000000..c7b1ffb
--- /dev/null
+++ b/test/files/pos/t1832.scala
@@ -0,0 +1,8 @@
+trait Cloning {
+ trait Foo
+ def fn(g: Any => Unit): Foo
+
+ implicit def mkStar(i: Int) = new { def *(a: Foo): Foo = null }
+
+ val pool = 4 * fn { case ghostSYMBOL: Int => ghostSYMBOL * 2 }
+}
\ No newline at end of file
diff --git a/test/files/pos/bug1843.scala b/test/files/pos/t1843.scala
similarity index 100%
rename from test/files/pos/bug1843.scala
rename to test/files/pos/t1843.scala
diff --git a/test/files/pos/bug1858.scala b/test/files/pos/t1858.scala
similarity index 100%
rename from test/files/pos/bug1858.scala
rename to test/files/pos/t1858.scala
diff --git a/test/files/pos/bug1909.scala b/test/files/pos/t1909.scala
similarity index 100%
rename from test/files/pos/bug1909.scala
rename to test/files/pos/t1909.scala
diff --git a/test/files/pos/bug1909b-pos.scala b/test/files/pos/t1909b-pos.scala
similarity index 100%
rename from test/files/pos/bug1909b-pos.scala
rename to test/files/pos/t1909b-pos.scala
diff --git a/test/files/pos/t1942/A_1.scala b/test/files/pos/t1942/A_1.scala
index 19a7575..4915b54 100644
--- a/test/files/pos/t1942/A_1.scala
+++ b/test/files/pos/t1942/A_1.scala
@@ -3,7 +3,7 @@ class A {
def foo(x: String) = 1
}
-class ann(x: Int) extends StaticAnnotation
+class ann(x: Int) extends annotation.StaticAnnotation
class t {
val a = new A
diff --git a/test/pending/pos/bug1957.scala b/test/files/pos/t1957.scala
similarity index 100%
rename from test/pending/pos/bug1957.scala
rename to test/files/pos/t1957.scala
diff --git a/test/files/pos/bug1974.scala b/test/files/pos/t1974.scala
similarity index 100%
rename from test/files/pos/bug1974.scala
rename to test/files/pos/t1974.scala
diff --git a/test/files/pos/t1987.scala b/test/files/pos/t1987a.scala
similarity index 100%
rename from test/files/pos/t1987.scala
rename to test/files/pos/t1987a.scala
diff --git a/test/files/pos/t1987b/a.scala b/test/files/pos/t1987b/a.scala
new file mode 100644
index 0000000..ff27044
--- /dev/null
+++ b/test/files/pos/t1987b/a.scala
@@ -0,0 +1,7 @@
+package bug
+
+// goes with t1987b.scala
+package object packageb {
+ def func(a: Int) = ()
+ def func(a: String) = ()
+}
diff --git a/test/files/pos/t1987b/b.scala b/test/files/pos/t1987b/b.scala
new file mode 100644
index 0000000..a469ca6
--- /dev/null
+++ b/test/files/pos/t1987b/b.scala
@@ -0,0 +1,10 @@
+// compile with t1987a.scala
+
+package bug.packageb
+// Note that the overloading works if instead of being in the package we import it:
+// replace the above line with import bug.packageb._
+
+class Client {
+ val x = func(1) // doesn't compile: type mismatch; found: Int(1) required: String
+ val y = func("1") // compiles
+}
diff --git a/test/files/pos/bug201.scala b/test/files/pos/t201.scala
similarity index 100%
rename from test/files/pos/bug201.scala
rename to test/files/pos/t201.scala
diff --git a/test/files/pos/bug2018.scala b/test/files/pos/t2018.scala
similarity index 100%
rename from test/files/pos/bug2018.scala
rename to test/files/pos/t2018.scala
diff --git a/test/files/pos/t2038.scala b/test/files/pos/t2038.scala
new file mode 100644
index 0000000..17b1a70
--- /dev/null
+++ b/test/files/pos/t2038.scala
@@ -0,0 +1,5 @@
+class Test {
+ List(Some(classOf[java.lang.Integer]), Some(classOf[Int])).map {
+ case Some(f: Class[_]) => f.cast(???)
+ }
+}
\ No newline at end of file
diff --git a/test/files/pos/t2060.scala b/test/files/pos/t2060.scala
old mode 100644
new mode 100755
diff --git a/test/files/pos/bug2081.scala b/test/files/pos/t2081.scala
similarity index 100%
rename from test/files/pos/bug2081.scala
rename to test/files/pos/t2081.scala
diff --git a/test/files/pos/t2082.scala b/test/files/pos/t2082.scala
old mode 100644
new mode 100755
diff --git a/test/files/pos/bug2094.scala b/test/files/pos/t2094.scala
similarity index 100%
rename from test/files/pos/bug2094.scala
rename to test/files/pos/t2094.scala
diff --git a/test/files/pos/bug210.scala b/test/files/pos/t210.scala
similarity index 100%
rename from test/files/pos/bug210.scala
rename to test/files/pos/t210.scala
diff --git a/test/files/pos/t211.scala b/test/files/pos/t211.scala
new file mode 100644
index 0000000..d51c970
--- /dev/null
+++ b/test/files/pos/t211.scala
@@ -0,0 +1,8 @@
+trait A;
+trait B;
+class Foo extends A with B { self: A with B => }
+object Test extends App {
+ new Foo();
+ Console.println("t211 completed");
+}
+
diff --git a/test/files/pos/bug2119.scala b/test/files/pos/t2119.scala
similarity index 100%
rename from test/files/pos/bug2119.scala
rename to test/files/pos/t2119.scala
diff --git a/test/files/pos/bug2127.scala b/test/files/pos/t2127.scala
similarity index 100%
rename from test/files/pos/bug2127.scala
rename to test/files/pos/t2127.scala
diff --git a/test/files/pos/bug2130-1.scala b/test/files/pos/t2130-1.scala
similarity index 100%
rename from test/files/pos/bug2130-1.scala
rename to test/files/pos/t2130-1.scala
diff --git a/test/files/pos/bug2130-2.scala b/test/files/pos/t2130-2.scala
similarity index 100%
rename from test/files/pos/bug2130-2.scala
rename to test/files/pos/t2130-2.scala
diff --git a/test/files/pos/bug2168.scala b/test/files/pos/t2168.scala
similarity index 100%
rename from test/files/pos/bug2168.scala
rename to test/files/pos/t2168.scala
diff --git a/test/files/pos/bug3252.flags b/test/files/pos/t2171.flags
similarity index 100%
copy from test/files/pos/bug3252.flags
copy to test/files/pos/t2171.flags
diff --git a/test/files/pos/t2171.scala b/test/files/pos/t2171.scala
new file mode 100644
index 0000000..a5663c9
--- /dev/null
+++ b/test/files/pos/t2171.scala
@@ -0,0 +1,7 @@
+final object test {
+ def logIgnoredException(msg: => String) =
+ try 0 catch { case ex => println(msg) }
+
+ def main (args: Array[String]): Unit =
+ while (true) logIgnoredException ("...")
+}
diff --git a/test/files/pos/t2179.scala b/test/files/pos/t2179.scala
old mode 100644
new mode 100755
diff --git a/test/files/pos/bug2187-2.scala b/test/files/pos/t2187-2.scala
similarity index 100%
rename from test/files/pos/bug2187-2.scala
rename to test/files/pos/t2187-2.scala
diff --git a/test/files/pos/bug2187.scala b/test/files/pos/t2187.scala
similarity index 100%
rename from test/files/pos/bug2187.scala
rename to test/files/pos/t2187.scala
diff --git a/test/pending/pos/t2194.scala b/test/files/pos/t2194.scala
similarity index 100%
rename from test/pending/pos/t2194.scala
rename to test/files/pos/t2194.scala
diff --git a/test/files/pos/bug2260.scala b/test/files/pos/t2260.scala
similarity index 100%
rename from test/files/pos/bug2260.scala
rename to test/files/pos/t2260.scala
diff --git a/test/files/pos/t2281.scala b/test/files/pos/t2281.scala
new file mode 100644
index 0000000..3515d2e
--- /dev/null
+++ b/test/files/pos/t2281.scala
@@ -0,0 +1,41 @@
+import scala.collection.mutable.ArrayBuffer
+
+class A {
+ def f(x: Boolean) = if (x) <br/><br/> else <br/>
+}
+
+class B {
+ def splitSentences(text : String) : ArrayBuffer[String] = {
+ val outarr = new ArrayBuffer[String]
+ var outstr = new StringBuffer
+ var prevspace = false
+ val ctext = text.replaceAll("\n+","\n")
+ ctext foreach {c =>
+ outstr append c
+ if(c == '.' || c == '!' || c == '?' || c == '\n' || c == ':' || c == ';' || (prevspace && c == '-') ){
+ outarr += outstr.toString
+ outstr = new StringBuffer
+ }
+ if(c == '\n'){
+ outarr += "\n\n"
+ }
+ prevspace = c == ' '
+ }
+ if(outstr.length > 0){
+ outarr += outstr.toString
+ }
+ outarr
+ }
+
+ def spanForSentence(x : String,picktext : String) =
+ if(x == "\n\n"){
+ <br/><br/>
+ }else{
+ <span class='clicksentence' style={if(x == picktext) "background-color: yellow" else ""}>{x}</span>
+ }
+
+ def selectableSentences(text : String, picktext : String) = {
+ val sentences = splitSentences(text)
+ sentences.map(x => spanForSentence(x,picktext))
+ }
+}
\ No newline at end of file
diff --git a/test/files/pos/bug229.scala b/test/files/pos/t229.scala
similarity index 100%
rename from test/files/pos/bug229.scala
rename to test/files/pos/t229.scala
diff --git a/test/files/pos/bug2310.scala b/test/files/pos/t2310.scala
similarity index 100%
rename from test/files/pos/bug2310.scala
rename to test/files/pos/t2310.scala
diff --git a/test/files/pos/bug2399.scala b/test/files/pos/t2399.scala
similarity index 100%
rename from test/files/pos/bug2399.scala
rename to test/files/pos/t2399.scala
diff --git a/test/files/pos/t2405.scala b/test/files/pos/t2405.scala
new file mode 100644
index 0000000..224b2ce
--- /dev/null
+++ b/test/files/pos/t2405.scala
@@ -0,0 +1,23 @@
+object A { implicit val x: Int = 1 }
+
+// Problem as stated in the ticket.
+object Test1 {
+ import A.{x => y}
+ implicitly[Int]
+}
+
+// Testing for the absense of shadowing #1.
+object Test2 {
+ import A.{x => y}
+ val x = 2
+ implicitly[Int]
+}
+
+// Testing for the absense of shadowing #2.
+object Test3 {
+ {
+ import A.{x => y}
+ def x: Int = 0
+ implicitly[Int]
+ }
+}
diff --git a/test/files/pos/bug2409/J.java b/test/files/pos/t2409/J.java
similarity index 100%
rename from test/files/pos/bug2409/J.java
rename to test/files/pos/t2409/J.java
diff --git a/test/files/pos/bug2409/bug2409.scala b/test/files/pos/t2409/t2409.scala
similarity index 100%
rename from test/files/pos/bug2409/bug2409.scala
rename to test/files/pos/t2409/t2409.scala
diff --git a/test/files/pos/t2425.scala b/test/files/pos/t2425.scala
old mode 100644
new mode 100755
diff --git a/test/files/pos/t2429.scala b/test/files/pos/t2429.scala
old mode 100644
new mode 100755
diff --git a/test/files/pos/t2433/A.java b/test/files/pos/t2433/A.java
old mode 100644
new mode 100755
index 8ae23ff..340690c
--- a/test/files/pos/t2433/A.java
+++ b/test/files/pos/t2433/A.java
@@ -1,4 +1,4 @@
-class A223 extends B223.Inner {
- static class Inner {}
- void foo() {}
+class A223 extends B223.Inner {
+ static class Inner {}
+ void foo() {}
}
\ No newline at end of file
diff --git a/test/files/pos/t2433/B.java b/test/files/pos/t2433/B.java
old mode 100644
new mode 100755
index d0d5580..151dd71
--- a/test/files/pos/t2433/B.java
+++ b/test/files/pos/t2433/B.java
@@ -1,4 +1,4 @@
-class B223 {
- static class Inner {}
- void m(A223.Inner x) {}
+class B223 {
+ static class Inner {}
+ void m(A223.Inner x) {}
}
\ No newline at end of file
diff --git a/test/files/pos/t2433/Test.scala b/test/files/pos/t2433/Test.scala
old mode 100644
new mode 100755
index 0e07231..02fd89b
--- a/test/files/pos/t2433/Test.scala
+++ b/test/files/pos/t2433/Test.scala
@@ -1,3 +1,3 @@
-object Test {
- (new A223).foo()
-}
+object Test {
+ (new A223).foo()
+}
diff --git a/test/files/pos/t2435.scala b/test/files/pos/t2435.scala
new file mode 100644
index 0000000..2db931b
--- /dev/null
+++ b/test/files/pos/t2435.scala
@@ -0,0 +1,27 @@
+object Bug {
+ abstract class FChain {
+ type T
+
+ def chain(constant:String) =
+ new FConstant[this.type](constant, this) //removing [this.type], everything compiles
+ }
+
+ case class FConstant[E <: FChain](constant:String, tail:E) extends FChain {
+ type T = tail.T
+ }
+
+ object FNil extends FChain {
+ type T = Unit
+ }
+
+}
+
+object Test {
+ import Bug._
+ println("Compiles:")
+ val a1 = FNil.chain("a").chain("a")
+ val a2 = a1.chain("a")
+
+ println("\nDoesn't compile:")
+ val a = FNil.chain("a").chain("a").chain("a")
+}
diff --git a/test/files/pos/bug2441pos.scala b/test/files/pos/t2441pos.scala
similarity index 100%
rename from test/files/pos/bug2441pos.scala
rename to test/files/pos/t2441pos.scala
diff --git a/test/files/pos/bug245.scala b/test/files/pos/t245.scala
similarity index 100%
rename from test/files/pos/bug245.scala
rename to test/files/pos/t245.scala
diff --git a/test/files/pos/bug247.scala b/test/files/pos/t247.scala
similarity index 100%
rename from test/files/pos/bug247.scala
rename to test/files/pos/t247.scala
diff --git a/test/files/pos/t2484.scala b/test/files/pos/t2484.scala
old mode 100644
new mode 100755
index 20c51b0..7d1b7cb
--- a/test/files/pos/t2484.scala
+++ b/test/files/pos/t2484.scala
@@ -1,17 +1,17 @@
-class Admin extends javax.swing.JApplet {
- val jScrollPane = new javax.swing.JScrollPane (null, 0, 0)
- def bug2484: Unit = {
- scala.concurrent.ops.spawn {jScrollPane.synchronized {
- def someFunction () = {}
- //scala.concurrent.ops.spawn {someFunction ()}
- jScrollPane.addComponentListener (new java.awt.event.ComponentAdapter {override def componentShown (e: java.awt.event.ComponentEvent) = {
- someFunction (); jScrollPane.removeComponentListener (this)}})
- }}
- }
-}
-// t2630.scala
-object Test {
- def meh(xs: List[Any]) {
- xs map { x => (new AnyRef {}) }
- }
-}
+class Admin extends javax.swing.JApplet {
+ val jScrollPane = new javax.swing.JScrollPane (null, 0, 0)
+ def t2484: Unit = {
+ scala.concurrent.ops.spawn {jScrollPane.synchronized {
+ def someFunction () = {}
+ //scala.concurrent.ops.spawn {someFunction ()}
+ jScrollPane.addComponentListener (new java.awt.event.ComponentAdapter {override def componentShown (e: java.awt.event.ComponentEvent) = {
+ someFunction (); jScrollPane.removeComponentListener (this)}})
+ }}
+ }
+}
+// t2630.scala
+object Test {
+ def meh(xs: List[Any]) {
+ xs map { x => (new AnyRef {}) }
+ }
+}
diff --git a/test/files/pos/bug2486.scala b/test/files/pos/t2486.scala
similarity index 100%
rename from test/files/pos/bug2486.scala
rename to test/files/pos/t2486.scala
diff --git a/test/files/pos/t2504.scala b/test/files/pos/t2504.scala
old mode 100644
new mode 100755
diff --git a/test/files/pos/t252.scala b/test/files/pos/t252.scala
new file mode 100644
index 0000000..d51b551
--- /dev/null
+++ b/test/files/pos/t252.scala
@@ -0,0 +1,17 @@
+abstract class Module {}
+
+abstract class T {
+ type moduleType <: Module
+ val module: moduleType
+}
+
+abstract class Base {
+ type mType = Module
+ type tType = T { type moduleType <: mType }
+}
+
+abstract class Derived extends Base {
+ def f(inputs: List[tType]): Unit = {
+ for (t <- inputs; m = t.module) { }
+ }
+}
diff --git a/test/files/pos/t2545.scala b/test/files/pos/t2545.scala
old mode 100644
new mode 100755
diff --git a/test/files/pos/bug261-ab.scala b/test/files/pos/t261-ab.scala
similarity index 100%
rename from test/files/pos/bug261-ab.scala
rename to test/files/pos/t261-ab.scala
diff --git a/test/files/pos/bug261-ba.scala b/test/files/pos/t261-ba.scala
similarity index 100%
rename from test/files/pos/bug261-ba.scala
rename to test/files/pos/t261-ba.scala
diff --git a/test/files/pos/bug262.scala b/test/files/pos/t262.scala
similarity index 100%
rename from test/files/pos/bug262.scala
rename to test/files/pos/t262.scala
diff --git a/test/files/pos/t2635.scala b/test/files/pos/t2635.scala
old mode 100644
new mode 100755
diff --git a/test/files/pos/bug267.scala b/test/files/pos/t267.scala
similarity index 100%
rename from test/files/pos/bug267.scala
rename to test/files/pos/t267.scala
diff --git a/test/files/pos/t2683.scala b/test/files/pos/t2683.scala
old mode 100644
new mode 100755
diff --git a/test/files/pos/t2691.scala b/test/files/pos/t2691.scala
index 3c0d193..94012a8 100644
--- a/test/files/pos/t2691.scala
+++ b/test/files/pos/t2691.scala
@@ -6,4 +6,11 @@ object Test {
case Breakdown("") => // needed to trigger bug
case Breakdown("", who) => println ("hello " + who)
}
-}
\ No newline at end of file
+}
+object Test2 {
+ 42 match {
+ case Breakdown("") => // needed to trigger bug
+ case Breakdown("foo") => // needed to trigger bug
+ case Breakdown("", who) => println ("hello " + who)
+ }
+}
diff --git a/test/files/pos/bug2693.scala b/test/files/pos/t2693.scala
similarity index 100%
rename from test/files/pos/bug2693.scala
rename to test/files/pos/t2693.scala
diff --git a/test/files/pos/t2726/bug2726_2.scala b/test/files/pos/t2726/t2726_2.scala
similarity index 100%
rename from test/files/pos/t2726/bug2726_2.scala
rename to test/files/pos/t2726/t2726_2.scala
diff --git a/test/files/pos/t2764/Ann.java b/test/files/pos/t2764/Ann.java
new file mode 100644
index 0000000..184fc6e
--- /dev/null
+++ b/test/files/pos/t2764/Ann.java
@@ -0,0 +1,5 @@
+package bippy;
+
+public @interface Ann {
+ Enum value();
+}
diff --git a/test/files/pos/t2764/Enum.java b/test/files/pos/t2764/Enum.java
new file mode 100644
index 0000000..fe07559
--- /dev/null
+++ b/test/files/pos/t2764/Enum.java
@@ -0,0 +1,5 @@
+package bippy;
+
+public enum Enum {
+ VALUE;
+}
diff --git a/test/files/pos/t2764/Use.scala b/test/files/pos/t2764/Use.scala
new file mode 100644
index 0000000..8cf8102
--- /dev/null
+++ b/test/files/pos/t2764/Use.scala
@@ -0,0 +1,6 @@
+package bippy
+
+class Use {
+ @Ann(Enum.VALUE)
+ def foo {}
+}
diff --git a/test/files/pos/t2782.scala b/test/files/pos/t2782.scala
new file mode 100644
index 0000000..ab12aaf
--- /dev/null
+++ b/test/files/pos/t2782.scala
@@ -0,0 +1,18 @@
+import scala.{collection => sc}
+
+object Test {
+ trait Foo[T]
+
+ // Haven't managed to repro without using a CanBuild or CanBuildFrom implicit parameter
+ implicit def MapFoo[A, B, M[A, B] <: sc.Map[A,B]](implicit aFoo: Foo[A], bFoo: Foo[B], cb: sc.generic.CanBuild[(A, B), M[A, B]]) = new Foo[M[A,B]] {}
+ implicit object Tuple2IntIntFoo extends Foo[(Int, Int)] // no difference if this line is uncommented
+ implicit def Tuple2Foo[A, B] = new Foo[(A, B)] {} // nor this one
+
+ implicitly[Foo[(Int, Int)]]
+}
+
+class A {
+ def x[N[X] >: M[X], M[_], G](n: N[G], m: M[G]) = null
+
+ x(Some(3), Seq(2))
+}
diff --git a/test/files/pos/t2795-new.scala b/test/files/pos/t2795-new.scala
new file mode 100644
index 0000000..a6a5fdb
--- /dev/null
+++ b/test/files/pos/t2795-new.scala
@@ -0,0 +1,19 @@
+package t1
+
+import scala.reflect.{ClassTag, classTag}
+
+trait Element[T] {
+}
+
+trait Config {
+ type T <: Element[T]
+ implicit val m: ClassTag[T]
+ // XXX Following works fine:
+ // type T <: Element[_]
+}
+
+trait Transform { self: Config =>
+ def processBlock(block: Array[T]): Unit = {
+ var X = new Array[T](1)
+ }
+}
\ No newline at end of file
diff --git a/test/files/pos/t2795-old.scala b/test/files/pos/t2795-old.scala
new file mode 100644
index 0000000..935cb1f
--- /dev/null
+++ b/test/files/pos/t2795-old.scala
@@ -0,0 +1,17 @@
+package t1
+
+trait Element[T] {
+}
+
+trait Config {
+ type T <: Element[T]
+ implicit val m: ClassManifest[T]
+ // XXX Following works fine:
+ // type T <: Element[_]
+}
+
+trait Transform { self: Config =>
+ def processBlock(block: Array[T]): Unit = {
+ var X = new Array[T](1)
+ }
+}
diff --git a/test/files/pos/t2795.scala b/test/files/pos/t2795.scala
deleted file mode 100644
index a4e1b7d..0000000
--- a/test/files/pos/t2795.scala
+++ /dev/null
@@ -1,17 +0,0 @@
-package bug1
-
-trait Element[T] {
-}
-
-trait Config {
- type T <: Element[T]
- implicit val m: ClassManifest[T]
- // XXX Following works fine:
- // type T <: Element[_]
-}
-
-trait Transform { self: Config =>
- def processBlock(block: Array[T]): Unit = {
- var X = new Array[T](1)
- }
-}
diff --git a/test/files/pos/bug284-pos.scala b/test/files/pos/t284-pos.scala
similarity index 100%
rename from test/files/pos/bug284-pos.scala
rename to test/files/pos/t284-pos.scala
diff --git a/test/files/pos/t2868/pick_1.scala b/test/files/pos/t2868/pick_1.scala
deleted file mode 100644
index e91728e..0000000
--- a/test/files/pos/t2868/pick_1.scala
+++ /dev/null
@@ -1,7 +0,0 @@
-class ann(s: String) extends StaticAnnotation
-class pick {
- final val s = "bang!"
- @ann("bang!") def foo = 1
- @Jann(str = "bang!", inn = new Nest(1), arr = Array(1, 2)) def bar = 2
- @Jann(str = "bang!", inn = new Nest(1), arr = Array(1, 2)) def baz = 3
-}
diff --git a/test/files/pos/bug287.scala b/test/files/pos/t287.scala
similarity index 100%
rename from test/files/pos/bug287.scala
rename to test/files/pos/t287.scala
diff --git a/test/files/pos/bug289.scala b/test/files/pos/t289.scala
similarity index 100%
rename from test/files/pos/bug289.scala
rename to test/files/pos/t289.scala
diff --git a/test/files/pos/t2913.scala b/test/files/pos/t2913.scala
old mode 100644
new mode 100755
diff --git a/test/files/pos/bug2939.scala b/test/files/pos/t2939.scala
similarity index 100%
rename from test/files/pos/bug2939.scala
rename to test/files/pos/t2939.scala
diff --git a/test/files/pos/bug2945.scala b/test/files/pos/t2945.scala
similarity index 100%
rename from test/files/pos/bug2945.scala
rename to test/files/pos/t2945.scala
diff --git a/test/files/pos/bug295.scala b/test/files/pos/t295.scala
similarity index 100%
rename from test/files/pos/bug295.scala
rename to test/files/pos/t295.scala
diff --git a/test/files/pos/t2956/t2956.scala b/test/files/pos/t2956/t2956.scala
old mode 100644
new mode 100755
diff --git a/test/files/pos/bug296.scala b/test/files/pos/t296.scala
similarity index 100%
rename from test/files/pos/bug296.scala
rename to test/files/pos/t296.scala
diff --git a/test/files/pos/bug3020.scala b/test/files/pos/t3020.scala
similarity index 100%
rename from test/files/pos/bug3020.scala
rename to test/files/pos/t3020.scala
diff --git a/test/files/pos/bug304.scala b/test/files/pos/t304.scala
similarity index 100%
rename from test/files/pos/bug304.scala
rename to test/files/pos/t304.scala
diff --git a/test/files/pos/bug3048.scala b/test/files/pos/t3048.scala
similarity index 100%
rename from test/files/pos/bug3048.scala
rename to test/files/pos/t3048.scala
diff --git a/test/files/pos/bug3106.scala b/test/files/pos/t3106.scala
similarity index 100%
rename from test/files/pos/bug3106.scala
rename to test/files/pos/t3106.scala
diff --git a/test/files/pos/t3120/J1.java b/test/files/pos/t3120/J1.java
new file mode 100644
index 0000000..12b23c1
--- /dev/null
+++ b/test/files/pos/t3120/J1.java
@@ -0,0 +1,4 @@
+class J1 {
+ public class Inner1 { }
+ public static class Inner2 { }
+}
diff --git a/test/files/pos/t3120/J2.java b/test/files/pos/t3120/J2.java
new file mode 100644
index 0000000..db6e859
--- /dev/null
+++ b/test/files/pos/t3120/J2.java
@@ -0,0 +1,4 @@
+public class J2 {
+ public void f1(J1.Inner1 p) { }
+ public void f2(J1.Inner2 p) { }
+}
diff --git a/test/files/pos/t3120/Q.java b/test/files/pos/t3120/Q.java
new file mode 100644
index 0000000..fe22693
--- /dev/null
+++ b/test/files/pos/t3120/Q.java
@@ -0,0 +1,3 @@
+public class Q {
+ public static void passInner(J1.Inner1 myInner) {}
+}
diff --git a/test/files/pos/t3120/Test.scala b/test/files/pos/t3120/Test.scala
new file mode 100644
index 0000000..c02146f
--- /dev/null
+++ b/test/files/pos/t3120/Test.scala
@@ -0,0 +1,3 @@
+object Test {
+ Q.passInner(null)
+}
diff --git a/test/files/pos/bug3136.scala b/test/files/pos/t3136.scala
similarity index 100%
rename from test/files/pos/bug3136.scala
rename to test/files/pos/t3136.scala
diff --git a/test/files/pos/bug3137.scala b/test/files/pos/t3137.scala
similarity index 100%
rename from test/files/pos/bug3137.scala
rename to test/files/pos/t3137.scala
diff --git a/test/files/pos/t3174.scala b/test/files/pos/t3174.scala
old mode 100644
new mode 100755
diff --git a/test/files/pos/t3174b.scala b/test/files/pos/t3174b.scala
old mode 100644
new mode 100755
diff --git a/test/files/pos/bug3175-pos.scala b/test/files/pos/t3175-pos.scala
similarity index 100%
rename from test/files/pos/bug3175-pos.scala
rename to test/files/pos/t3175-pos.scala
diff --git a/test/files/pos/bug318.scala b/test/files/pos/t318.scala
similarity index 100%
rename from test/files/pos/bug318.scala
rename to test/files/pos/t318.scala
diff --git a/test/files/pos/bug319.scala b/test/files/pos/t319.scala
similarity index 100%
rename from test/files/pos/bug319.scala
rename to test/files/pos/t319.scala
diff --git a/test/files/pos/bug3430.flags b/test/files/pos/t3252.flags
similarity index 100%
rename from test/files/pos/bug3430.flags
rename to test/files/pos/t3252.flags
diff --git a/test/files/pos/bug3252.scala b/test/files/pos/t3252.scala
similarity index 100%
rename from test/files/pos/bug3252.scala
rename to test/files/pos/t3252.scala
diff --git a/test/files/pos/t3272.scala b/test/files/pos/t3272.scala
new file mode 100644
index 0000000..8efd5de
--- /dev/null
+++ b/test/files/pos/t3272.scala
@@ -0,0 +1,8 @@
+trait A {
+ trait C[+T] {
+ protected[this] def f(t: T) {}
+ }
+ trait D[T] extends C[T] {
+ def g(t: T) { f(t) }
+ }
+}
diff --git a/test/files/pos/bug3278.scala b/test/files/pos/t3278.scala
similarity index 100%
rename from test/files/pos/bug3278.scala
rename to test/files/pos/t3278.scala
diff --git a/test/files/pos/bug3312.scala b/test/files/pos/t3312.scala
similarity index 100%
rename from test/files/pos/bug3312.scala
rename to test/files/pos/t3312.scala
diff --git a/test/files/pos/bug3343.scala b/test/files/pos/t3343.scala
similarity index 100%
rename from test/files/pos/bug3343.scala
rename to test/files/pos/t3343.scala
diff --git a/test/files/pos/t3363-new.scala b/test/files/pos/t3363-new.scala
new file mode 100644
index 0000000..e609f4d
--- /dev/null
+++ b/test/files/pos/t3363-new.scala
@@ -0,0 +1,20 @@
+import scala.reflect.runtime.universe._
+
+object TestCase {
+
+ //now matter if you put (abstract) class or trait it will fail in all cases
+ trait MapOps[T]
+
+ //if fs was reduced to List (generic type with one parameter) then the code compiles
+ //if you inherit from MapOps[T] instead of MapOps[F] then code compiles fine
+ implicit def map2ops[T,F](fs: Map[T,F]) = new MapOps[F] {
+ //if you remove this line, then code compiles
+ lazy val m: TypeTag[T] = error("just something to make it compile")
+ def is(xs: List[T]) = List(xs)
+ }
+
+ def main(args: Array[String]) {
+ println(Map(1 -> "2") is List(2))
+ }
+
+ }
\ No newline at end of file
diff --git a/test/files/pos/t3363-old.scala b/test/files/pos/t3363-old.scala
new file mode 100644
index 0000000..bae5408
--- /dev/null
+++ b/test/files/pos/t3363-old.scala
@@ -0,0 +1,18 @@
+object TestCase {
+
+ //now matter if you put (abstract) class or trait it will fail in all cases
+ trait MapOps[T]
+
+ //if fs was reduced to List (generic type with one parameter) then the code compiles
+ //if you inherit from MapOps[T] instead of MapOps[F] then code compiles fine
+ implicit def map2ops[T,F](fs: Map[T,F]) = new MapOps[F] {
+ //if you remove this line, then code compiles
+ lazy val m: Manifest[T] = error("just something to make it compile")
+ def is(xs: List[T]) = List(xs)
+ }
+
+ def main(args: Array[String]) {
+ println(Map(1 -> "2") is List(2))
+ }
+
+ }
diff --git a/test/files/pos/t3363.scala b/test/files/pos/t3363.scala
deleted file mode 100644
index 302f8c4..0000000
--- a/test/files/pos/t3363.scala
+++ /dev/null
@@ -1,18 +0,0 @@
-object TestCase {
-
- //now matter if you put (abstract) class or trait it will fail in all cases
- trait MapOps[T]
-
- //if fs was reduced to List (generic type with one parameter) then the code compiles
- //if you inherit from MapOps[T] instead of MapOps[F] then code compiles fine
- implicit def map2ops[T,F](fs: Map[T,F]) = new MapOps[F] {
- //if you remove this line, then code compiles
- lazy val m: Manifest[T] = error("just something to make it compile")
- def is(xs: List[T]) = List(xs)
- }
-
- def main(args: Array[String]) {
- println(Map(1 -> "2") is List(2))
- }
-
- }
diff --git a/test/files/pos/t3371.scala b/test/files/pos/t3371.scala
new file mode 100644
index 0000000..897cd9d
--- /dev/null
+++ b/test/files/pos/t3371.scala
@@ -0,0 +1,9 @@
+// that compiles
+class Test(myValue:String) { println(myValue) }
+
+// that compiles too
+trait Other { val otherValue = "" }
+class Test2(myValue:String) { self:Other => println(otherValue) }
+
+// that does not compile saying that myValue is not found
+class Test3(myValue:String) { self:Other => println(myValue) }
diff --git a/test/files/pos/bug3411.scala b/test/files/pos/t3411.scala
similarity index 100%
rename from test/files/pos/bug3411.scala
rename to test/files/pos/t3411.scala
diff --git a/test/files/pos/bug342.scala b/test/files/pos/t342.scala
similarity index 100%
rename from test/files/pos/bug342.scala
rename to test/files/pos/t342.scala
diff --git a/test/files/pos/bug3420.flags b/test/files/pos/t3420.flags
similarity index 100%
rename from test/files/pos/bug3420.flags
rename to test/files/pos/t3420.flags
diff --git a/test/files/pos/bug3420.scala b/test/files/pos/t3420.scala
similarity index 100%
rename from test/files/pos/bug3420.scala
rename to test/files/pos/t3420.scala
diff --git a/test/files/pos/bug4840.flags b/test/files/pos/t3430.flags
similarity index 100%
rename from test/files/pos/bug4840.flags
rename to test/files/pos/t3430.flags
diff --git a/test/files/pos/bug3430.scala b/test/files/pos/t3430.scala
similarity index 100%
rename from test/files/pos/bug3430.scala
rename to test/files/pos/t3430.scala
diff --git a/test/files/pos/bug344.scala b/test/files/pos/t344.scala
similarity index 100%
rename from test/files/pos/bug344.scala
rename to test/files/pos/t344.scala
diff --git a/test/files/pos/bug3440.scala b/test/files/pos/t3440.scala
similarity index 100%
rename from test/files/pos/bug3440.scala
rename to test/files/pos/t3440.scala
diff --git a/test/files/pos/bug3480.scala b/test/files/pos/t3480.scala
similarity index 100%
rename from test/files/pos/bug3480.scala
rename to test/files/pos/t3480.scala
diff --git a/test/files/pos/bug348plus.scala b/test/files/pos/t348plus.scala
similarity index 100%
rename from test/files/pos/bug348plus.scala
rename to test/files/pos/t348plus.scala
diff --git a/test/files/pos/bug3495.flags b/test/files/pos/t3495.flags
similarity index 100%
rename from test/files/pos/bug3495.flags
rename to test/files/pos/t3495.flags
diff --git a/test/files/pos/bug3495.scala b/test/files/pos/t3495.scala
similarity index 100%
rename from test/files/pos/bug3495.scala
rename to test/files/pos/t3495.scala
diff --git a/test/files/pos/t3498-new.scala b/test/files/pos/t3498-new.scala
new file mode 100644
index 0000000..eaf00cc
--- /dev/null
+++ b/test/files/pos/t3498-new.scala
@@ -0,0 +1,17 @@
+import scala.reflect.{ClassTag, classTag}
+
+abstract class A[T, @specialized(scala.Int) U : ClassTag] {
+ def f(state: T): Array[U]
+}
+
+abstract class B extends A[ Array[Byte], Int ] {
+ type T = Array[Byte]
+ type U = Int
+
+ val N = 0
+
+ def f(state: T): Array[U] =
+ {
+ new Array[U](N + state(N))
+ }
+}
\ No newline at end of file
diff --git a/test/files/pos/bug3498.scala b/test/files/pos/t3498-old.scala
similarity index 100%
rename from test/files/pos/bug3498.scala
rename to test/files/pos/t3498-old.scala
diff --git a/test/files/pos/bug3521/DoubleValue.java b/test/files/pos/t3521/DoubleValue.java
similarity index 100%
rename from test/files/pos/bug3521/DoubleValue.java
rename to test/files/pos/t3521/DoubleValue.java
diff --git a/test/files/pos/bug3521/a.scala b/test/files/pos/t3521/a.scala
similarity index 100%
rename from test/files/pos/bug3521/a.scala
rename to test/files/pos/t3521/a.scala
diff --git a/test/files/pos/bug3528.scala b/test/files/pos/t3528.scala
similarity index 100%
rename from test/files/pos/bug3528.scala
rename to test/files/pos/t3528.scala
diff --git a/test/files/pos/bug3534.scala b/test/files/pos/t3534.scala
similarity index 100%
rename from test/files/pos/bug3534.scala
rename to test/files/pos/t3534.scala
diff --git a/test/files/pos/3567/Foo.scala b/test/files/pos/t3567/Foo.scala
similarity index 100%
rename from test/files/pos/3567/Foo.scala
rename to test/files/pos/t3567/Foo.scala
diff --git a/test/files/pos/3567/Outer.java b/test/files/pos/t3567/Outer.java
similarity index 100%
rename from test/files/pos/3567/Outer.java
rename to test/files/pos/t3567/Outer.java
diff --git a/test/files/pos/t3568.scala b/test/files/pos/t3568.scala
old mode 100644
new mode 100755
diff --git a/test/files/pos/bug3570.scala b/test/files/pos/t3570.scala
similarity index 100%
rename from test/files/pos/bug3570.scala
rename to test/files/pos/t3570.scala
diff --git a/test/files/pos/t3577.scala b/test/files/pos/t3577.scala
new file mode 100644
index 0000000..80a280f
--- /dev/null
+++ b/test/files/pos/t3577.scala
@@ -0,0 +1,29 @@
+case class Check[A](val value: A)
+
+case class C2(checks: Check[_]*);
+
+object C {
+ def m(x : C2): Any = (null: Any) match {
+ case C2(_, rest @ _*) => {
+ rest.map(_.value)
+ }
+ }
+}
+
+///////////////////
+
+object Container {
+ trait Exp[+T]
+ abstract class FuncExp[-S, +T]
+
+ sealed abstract class FoundNode[T, Repr] {
+ def optimize[TupleT, U, That](parentNode: FlatMap[T, Repr, U, That]): Any
+ def optimize2[TupleT, U, That](parentNode: Any): Any
+ }
+
+ class FlatMap[T, Repr, U, That]
+
+ val Seq(fn: FoundNode[t, repr]) = Seq[FoundNode[_, _]]()
+ fn.optimize(null) // was: scala.MatchError: ? (of class BoundedWildcardType) @ Variances#varianceInType
+ fn.optimize2(null) // was: fatal error: bad type: ?(class scala.reflect.internal.Types$BoundedWildcardType) @ Pickle.putType
+}
diff --git a/test/files/pos/bug3578.scala b/test/files/pos/t3578.scala
similarity index 100%
rename from test/files/pos/bug3578.scala
rename to test/files/pos/t3578.scala
diff --git a/test/files/pos/bug359.scala b/test/files/pos/t359.scala
similarity index 100%
rename from test/files/pos/bug359.scala
rename to test/files/pos/t359.scala
diff --git a/test/files/pos/bug360.scala b/test/files/pos/t360.scala
similarity index 100%
rename from test/files/pos/bug360.scala
rename to test/files/pos/t360.scala
diff --git a/test/files/pos/bug361.scala b/test/files/pos/t361.scala
similarity index 100%
rename from test/files/pos/bug361.scala
rename to test/files/pos/t361.scala
diff --git a/test/files/pos/bug3636.scala b/test/files/pos/t3636.scala
similarity index 100%
rename from test/files/pos/bug3636.scala
rename to test/files/pos/t3636.scala
diff --git a/test/files/pos/bug3642/Tuppel_1.java b/test/files/pos/t3642/Tuppel_1.java
similarity index 100%
rename from test/files/pos/bug3642/Tuppel_1.java
rename to test/files/pos/t3642/Tuppel_1.java
diff --git a/test/files/pos/bug3642/bug3642_2.scala b/test/files/pos/t3642/t3642_2.scala
similarity index 100%
rename from test/files/pos/bug3642/bug3642_2.scala
rename to test/files/pos/t3642/t3642_2.scala
diff --git a/test/files/pos/bug3671.scala b/test/files/pos/t3671.scala
similarity index 100%
rename from test/files/pos/bug3671.scala
rename to test/files/pos/t3671.scala
diff --git a/test/files/pos/bug3688-redux.scala b/test/files/pos/t3688-redux.scala
similarity index 100%
rename from test/files/pos/bug3688-redux.scala
rename to test/files/pos/t3688-redux.scala
diff --git a/test/files/pos/t3688.scala b/test/files/pos/t3688.scala
index 0ac1cfe..d15e9d1 100644
--- a/test/files/pos/t3688.scala
+++ b/test/files/pos/t3688.scala
@@ -7,3 +7,8 @@ object Test {
implicitly[mutable.Map[Int, String] => ju.Dictionary[Int, String]]
}
+
+object Test2 {
+ def m[P <% ju.List[Int]](l: P) = 1
+ m(List(1)) // bug: should compile
+}
\ No newline at end of file
diff --git a/test/files/pos/bug372.scala b/test/files/pos/t372.scala
similarity index 100%
rename from test/files/pos/bug372.scala
rename to test/files/pos/t372.scala
diff --git a/test/files/pos/bug374.scala b/test/files/pos/t374.scala
similarity index 100%
rename from test/files/pos/bug374.scala
rename to test/files/pos/t374.scala
diff --git a/test/files/pos/t3800.scala b/test/files/pos/t3800.scala
index 796eb26..61dbeaf 100644
--- a/test/files/pos/t3800.scala
+++ b/test/files/pos/t3800.scala
@@ -1,4 +1,4 @@
-class meh extends StaticAnnotation
+class meh extends annotation.StaticAnnotation
class ALike[C]
abstract class AFactory[CC[x] <: ALike[CC[x]]] {
diff --git a/test/files/pos/t3836.scala b/test/files/pos/t3836.scala
new file mode 100644
index 0000000..840f171
--- /dev/null
+++ b/test/files/pos/t3836.scala
@@ -0,0 +1,14 @@
+package foo
+
+package object bar {
+ type IOException = java.io.IOException
+}
+
+package baz {
+ import java.io._
+ import foo.bar._
+
+ object Test {
+ def f = new IOException
+ }
+}
diff --git a/test/files/pos/t3856.scala b/test/files/pos/t3856.scala
index fd253a5..5ea4b84 100644
--- a/test/files/pos/t3856.scala
+++ b/test/files/pos/t3856.scala
@@ -2,6 +2,7 @@ case class C[T](x: T)
case class CS(xs: C[_]*)
+// t3856
object Test {
val x = CS(C(5), C("abc")) match { case CS(C(5), xs @ _*) => xs }
println(x)
diff --git a/test/files/pos/bug3861.scala b/test/files/pos/t3861.scala
similarity index 100%
rename from test/files/pos/bug3861.scala
rename to test/files/pos/t3861.scala
diff --git a/test/files/pos/t3880.scala b/test/files/pos/t3880.scala
new file mode 100644
index 0000000..b6f06c4
--- /dev/null
+++ b/test/files/pos/t3880.scala
@@ -0,0 +1,16 @@
+abstract class Bar[+B] {
+}
+abstract class C1[+B] extends Bar[B] {
+ private[this] def g(x: C1[B]): Unit = ()
+
+ // this method is fine: notice that it allows the call to g,
+ // which requires C1[B], even though we matched on C1[_].
+ // (That is good news.)
+ private[this] def f1(x: Bar[B]): Unit = x match {
+ case x: C1[_] => g(x)
+ }
+ // this one crashes.
+ private[this] def f2(x: Bar[B]): Unit = x match {
+ case x: C1[_] => f2(x)
+ }
+}
\ No newline at end of file
diff --git a/test/files/pos/bug3883.scala b/test/files/pos/t3883.scala
similarity index 100%
rename from test/files/pos/bug3883.scala
rename to test/files/pos/t3883.scala
diff --git a/test/files/pos/bug389.scala b/test/files/pos/t389.scala
similarity index 100%
rename from test/files/pos/bug389.scala
rename to test/files/pos/t389.scala
diff --git a/test/files/pos/t3898.scala b/test/files/pos/t3898.scala
new file mode 100644
index 0000000..075692e
--- /dev/null
+++ b/test/files/pos/t3898.scala
@@ -0,0 +1,6 @@
+trait Atomic[@specialized(Boolean) T] {
+ def x: T
+
+ def f(fn: T => T): Boolean = f(fn(x), true)
+ def f[R](a: T, b: R): R = b
+}
diff --git a/test/files/pos/bug3938/Parent.java b/test/files/pos/t3938/Parent.java
similarity index 100%
rename from test/files/pos/bug3938/Parent.java
rename to test/files/pos/t3938/Parent.java
diff --git a/test/files/pos/bug3938/UseParent.scala b/test/files/pos/t3938/UseParent.scala
similarity index 100%
rename from test/files/pos/bug3938/UseParent.scala
rename to test/files/pos/t3938/UseParent.scala
diff --git a/test/files/pos/t3951/Coll_1.scala b/test/files/pos/t3951/Coll_1.scala
index c2cc39a..556c848 100644
--- a/test/files/pos/t3951/Coll_1.scala
+++ b/test/files/pos/t3951/Coll_1.scala
@@ -15,7 +15,7 @@ sealed trait DynamicDocument extends Document {
class Coll extends StaticDocument
// similiar issue with annotations
-class ann[T] extends StaticAnnotation
+class ann[T] extends annotation.StaticAnnotation
trait StatDoc extends Doc {
@ann[StatFB]
diff --git a/test/files/pos/t3960.flags b/test/files/pos/t3960.flags
new file mode 100644
index 0000000..4449dbb
--- /dev/null
+++ b/test/files/pos/t3960.flags
@@ -0,0 +1 @@
+-Ycheck:typer
\ No newline at end of file
diff --git a/test/files/pos/t3960.scala b/test/files/pos/t3960.scala
new file mode 100644
index 0000000..5c658e9
--- /dev/null
+++ b/test/files/pos/t3960.scala
@@ -0,0 +1,7 @@
+class A {
+ class C[x]
+ val cs = new scala.collection.mutable.HashMap[C[_], Int]
+ def c: C[_] = sys.error("")
+ val eval: C[_] = c
+ cs(c) += 1
+}
diff --git a/test/files/pos/bug397.scala b/test/files/pos/t397.scala
similarity index 100%
rename from test/files/pos/bug397.scala
rename to test/files/pos/t397.scala
diff --git a/test/files/pos/bug3972.scala b/test/files/pos/t3972.scala
similarity index 100%
rename from test/files/pos/bug3972.scala
rename to test/files/pos/t3972.scala
diff --git a/test/files/pos/t3999/a_1.scala b/test/files/pos/t3999/a_1.scala
new file mode 100644
index 0000000..25366ee
--- /dev/null
+++ b/test/files/pos/t3999/a_1.scala
@@ -0,0 +1,9 @@
+package foo
+
+class Outside
+
+package object bar {
+ class Val(b: Boolean)
+ implicit def boolean2Val(b: Boolean) = new Val(b)
+ implicit def boolean2Outside(b: Boolean) = new Outside
+}
\ No newline at end of file
diff --git a/test/files/pos/t3999/b_2.scala b/test/files/pos/t3999/b_2.scala
new file mode 100644
index 0000000..1af82c8
--- /dev/null
+++ b/test/files/pos/t3999/b_2.scala
@@ -0,0 +1,7 @@
+package foo
+package bar
+
+class A {
+ val s: Val = false
+ val o: Outside = false
+}
\ No newline at end of file
diff --git a/test/files/pos/t3999b.scala b/test/files/pos/t3999b.scala
new file mode 100644
index 0000000..d3fe108
--- /dev/null
+++ b/test/files/pos/t3999b.scala
@@ -0,0 +1,20 @@
+object `package` {
+ trait Score { def toString : String }
+ trait Test[+T <: Score] { def apply(s : String) : T }
+
+ case class FT(f : Float) extends Score
+ implicit object FT extends Test[FT] { def apply(s : String) : FT = new FT(s.toFloat) }
+
+ case class IT(i : Int) extends Score
+ implicit object IT extends Test[IT] { def apply(s : String) : IT = new IT(s.toInt) }
+}
+
+class TT[+T <: Score](implicit val tb : Test[T]) {
+ def read(s : String) : T = tb(s)
+}
+
+object Tester {
+ val tt = new TT[FT]
+ val r = tt.read("1.0")
+ r.toString
+}
\ No newline at end of file
diff --git a/test/files/pos/bug4018.scala b/test/files/pos/t4018.scala
similarity index 100%
rename from test/files/pos/bug4018.scala
rename to test/files/pos/t4018.scala
diff --git a/test/files/pos/bug402.scala b/test/files/pos/t402.scala
similarity index 100%
rename from test/files/pos/bug402.scala
rename to test/files/pos/t402.scala
diff --git a/test/files/neg/anyval-sealed.flags b/test/files/pos/t4020.flags
similarity index 100%
copy from test/files/neg/anyval-sealed.flags
copy to test/files/pos/t4020.flags
diff --git a/test/files/pos/t4020.scala b/test/files/pos/t4020.scala
new file mode 100644
index 0000000..8a758d5
--- /dev/null
+++ b/test/files/pos/t4020.scala
@@ -0,0 +1,25 @@
+class A {
+ sealed trait Foo
+}
+
+object a1 extends A {
+ case class Foo1(i: Int) extends Foo
+}
+
+object a2 extends A {
+ case class Foo2(i: Int) extends Foo
+}
+
+class B {
+ def mthd(foo: a2.Foo) = {
+ foo match {
+ case a2.Foo2(i) => i
+
+ // Note: This case is impossible. In fact, scalac
+ // will (correctly) report an error if it is uncommented,
+ // but a warning if it is commented.
+
+ // case a1.Foo1(i) => i
+ }
+ }
+}
\ No newline at end of file
diff --git a/test/files/pos/bug404.scala b/test/files/pos/t404.scala
similarity index 100%
rename from test/files/pos/bug404.scala
rename to test/files/pos/t404.scala
diff --git a/test/files/pos/t4063.scala b/test/files/pos/t4063.scala
new file mode 100644
index 0000000..5e19c42
--- /dev/null
+++ b/test/files/pos/t4063.scala
@@ -0,0 +1,39 @@
+trait Parallel
+trait Parallelizable[+ParRepr <: Parallel]
+
+trait PIterableLike[+T, +Repr <: Parallel] extends Parallel with Parallelizable[PIterableLike[T, Repr]]
+
+trait PMap[K, V] extends PIterableLike[(K, V), PMap[K, V]]
+trait PSet[T] extends PIterableLike[T, PSet[T]]
+
+trait CIterableLike[+T, +Repr]
+
+trait CSet[T] extends CIterableLike[T, CSet[T]] with Parallelizable[PSet[T]]
+
+trait CMap[K, V] extends CIterableLike[(K, V), CMap[K, V]] with Parallelizable[PMap[K, V]]
+
+object Test {
+ var x = 0
+
+ def main() {
+ val map: CMap[Int, CSet[Int]] = new CMap[Int, CSet[Int]] {}
+ val set: CSet[Int] = new CSet[Int] {}
+
+ // should infer type argument
+ //map.synchronized[CIterableLike[Any, Any] with Parallelizable[PIterableLike[Any, Parallel with Parallelizable[Parallel]]]] {
+ // or:
+ //map.synchronized[CIterableLike[Any, Any] with Parallelizable[PIterableLike[Any, Parallel]]] {
+ // or, maybe it could also infer existential types:
+ //map.synchronized[CIterableLike[Any, _] with Parallelizable[PIterableLike[Any, _]]] {
+
+ map.synchronized {
+ if (x == 0) {
+ map
+ } else {
+ set
+ }
+ }
+
+ }
+}
+
diff --git a/test/files/pos/t4070.scala b/test/files/pos/t4070.scala
new file mode 100644
index 0000000..29c8d16
--- /dev/null
+++ b/test/files/pos/t4070.scala
@@ -0,0 +1,37 @@
+package a {
+ // method before classes
+ trait Foo {
+ def crash(x: Dingus[_]): Unit = x match { case m: Bippy[tv] => () }
+
+ class Dingus[T]
+ class Bippy[CC[X] <: Seq[X]]() extends Dingus[CC[Int]]
+ }
+}
+
+package b {
+ // classes before method
+ trait Foo {
+ class Dingus[T]
+ class Bippy[CC[X] <: Seq[X]]() extends Dingus[CC[Int]]
+
+ def crash(x: Dingus[_]): Unit = x match { case m: Bippy[tv] => () }
+ }
+}
+
+
+/*
+// With crash below the clasess:
+% scalac -Dscalac.debug.tvar ./a.scala
+[ create] ?_$1 ( In Foo#crash )
+[ setInst] tv[Int] ( In Foo#crash, _$1=tv[Int] )
+[ create] tv[Int] ( In Foo#crash )
+[ clone] tv[Int] ( Foo#crash )
+
+// With crash above the classes:
+% scalac -Dscalac.debug.tvar ./a.scala
+[ create] ?tv ( In Foo#crash )
+./a.scala:2: error: Invalid type application in TypeVar: List(), List(Int)
+ def crash(x: Dingus[_]): Unit = x match { case m: Bippy[tv] => () }
+ ^
+one error found
+*/
diff --git a/test/files/pos/t4070b.scala b/test/files/pos/t4070b.scala
new file mode 100644
index 0000000..36d03de
--- /dev/null
+++ b/test/files/pos/t4070b.scala
@@ -0,0 +1,35 @@
+package a {
+ abstract class DeliteOp[B]
+ abstract class DeliteCollection[A]
+ abstract class Exp[T] { def Type: T }
+
+ trait DeliteOpMap[A,B,C[X] <: DeliteCollection[X]] extends DeliteOp[C[B]] {
+ val in: Exp[C[A]]
+ val func: Exp[B]
+ val alloc: Exp[C[B]]
+ }
+
+ object Test {
+ def f(x: DeliteOp[_]) = x match {
+ case map: DeliteOpMap[_,_,_] => map.alloc.Type
+ }
+ }
+}
+
+package b {
+ object Test {
+ def f(x: DeliteOp[_]) = x match {
+ case map: DeliteOpMap[_,_,_] => map.alloc.Type
+ }
+ }
+
+ abstract class DeliteOp[B]
+ abstract class DeliteCollection[A]
+ abstract class Exp[T] { def Type: T }
+
+ trait DeliteOpMap[A,B,C[X] <: DeliteCollection[X]] extends DeliteOp[C[B]] {
+ val in: Exp[C[A]]
+ val func: Exp[B]
+ val alloc: Exp[C[B]]
+ }
+}
\ No newline at end of file
diff --git a/test/files/pos/bug415.scala b/test/files/pos/t415.scala
similarity index 100%
rename from test/files/pos/bug415.scala
rename to test/files/pos/t415.scala
diff --git a/test/files/pos/t4176.scala b/test/files/pos/t4176.scala
new file mode 100644
index 0000000..b4f1e70
--- /dev/null
+++ b/test/files/pos/t4176.scala
@@ -0,0 +1,6 @@
+// a.scala
+// Fri Jan 20 12:22:51 PST 2012
+
+class A(xs: Int*) { def getXs = xs }
+
+class B extends A { override def getXs = Nil }
diff --git a/test/files/pos/t4176b.scala b/test/files/pos/t4176b.scala
new file mode 100644
index 0000000..11914c5
--- /dev/null
+++ b/test/files/pos/t4176b.scala
@@ -0,0 +1,5 @@
+object Test {
+ def foo(a: String*) = a
+ val fooEta = foo _
+ (foo: Seq[String] => Seq[String])
+}
diff --git a/test/files/pos/bug4188.scala b/test/files/pos/t4188.scala
similarity index 100%
rename from test/files/pos/bug4188.scala
rename to test/files/pos/t4188.scala
diff --git a/test/files/pos/bug419.scala b/test/files/pos/t419.scala
similarity index 100%
rename from test/files/pos/bug419.scala
rename to test/files/pos/t419.scala
diff --git a/test/files/pos/bug422.scala b/test/files/pos/t422.scala
similarity index 100%
rename from test/files/pos/bug422.scala
rename to test/files/pos/t422.scala
diff --git a/test/files/pos/bug4220.scala b/test/files/pos/t4220.scala
similarity index 100%
rename from test/files/pos/bug4220.scala
rename to test/files/pos/t4220.scala
diff --git a/test/files/pos/bug4237.scala b/test/files/pos/t4237.scala
similarity index 100%
rename from test/files/pos/bug4237.scala
rename to test/files/pos/t4237.scala
diff --git a/test/files/pos/bug4269.scala b/test/files/pos/t4269.scala
similarity index 100%
rename from test/files/pos/bug4269.scala
rename to test/files/pos/t4269.scala
diff --git a/test/files/pos/t4273.scala b/test/files/pos/t4273.scala
new file mode 100644
index 0000000..9a942e8
--- /dev/null
+++ b/test/files/pos/t4273.scala
@@ -0,0 +1,8 @@
+class A {
+ implicit def compareComparables[T](x: T)(implicit ord: Ordering[T]) = new ord.Ops(x)
+
+ class Bippy
+ implicit val bippyOrdering = new Ordering[Bippy] { def compare(x: Bippy, y: Bippy) = util.Random.nextInt }
+
+ (new Bippy) < (new Bippy)
+}
\ No newline at end of file
diff --git a/test/files/pos/bug4275.scala b/test/files/pos/t4275.scala
similarity index 100%
rename from test/files/pos/bug4275.scala
rename to test/files/pos/t4275.scala
diff --git a/test/files/pos/bug430-feb09.scala b/test/files/pos/t430-feb09.scala
similarity index 100%
rename from test/files/pos/bug430-feb09.scala
rename to test/files/pos/t430-feb09.scala
diff --git a/test/files/pos/bug430.scala b/test/files/pos/t430.scala
similarity index 100%
rename from test/files/pos/bug430.scala
rename to test/files/pos/t430.scala
diff --git a/test/files/pos/bug4305.scala b/test/files/pos/t4305.scala
similarity index 100%
rename from test/files/pos/bug4305.scala
rename to test/files/pos/t4305.scala
diff --git a/test/files/pos/bug432.scala b/test/files/pos/t432.scala
similarity index 100%
rename from test/files/pos/bug432.scala
rename to test/files/pos/t432.scala
diff --git a/test/files/pos/t4336.scala b/test/files/pos/t4336.scala
new file mode 100644
index 0000000..e10d001
--- /dev/null
+++ b/test/files/pos/t4336.scala
@@ -0,0 +1,19 @@
+object Main {
+ class NonGeneric {}
+ class Generic[T] {}
+
+ class Composite {
+ def contains(setup : Composite => Unit) : Composite = this
+ }
+
+ def generic[T](parent: Composite): Generic[T] = new Generic[T]
+ def nonGeneric(parent: Composite): NonGeneric = new NonGeneric
+
+ new Composite().contains(
+ nonGeneric // should have type Composite => NonGeneric
+ )
+
+ new Composite().contains(
+ generic[Int] // should have type Composite => Generic[Int]
+ )
+}
diff --git a/test/files/pos/t4351.check b/test/files/pos/t4351.check
new file mode 100644
index 0000000..cb5d407
--- /dev/null
+++ b/test/files/pos/t4351.check
@@ -0,0 +1 @@
+runtime exception
diff --git a/test/files/pos/t4351.scala b/test/files/pos/t4351.scala
new file mode 100644
index 0000000..2d57588
--- /dev/null
+++ b/test/files/pos/t4351.scala
@@ -0,0 +1,20 @@
+object Test {
+ def main(args: Array[String]): Unit = {
+ try new BooleanPropImpl() value
+ catch {
+ case e: RuntimeException => println("runtime exception")
+ }
+ }
+}
+
+trait Prop[@specialized(Boolean) +T] {
+ def value: T
+}
+
+class PropImpl[+T] extends Prop[T] {
+ def value: T = scala.sys.error("")
+}
+
+trait BooleanProp extends Prop[Boolean]
+
+class BooleanPropImpl() extends PropImpl[Boolean] with BooleanProp
diff --git a/test/files/pos/bug439.scala b/test/files/pos/t439.scala
similarity index 100%
rename from test/files/pos/bug439.scala
rename to test/files/pos/t439.scala
diff --git a/test/files/pos/bug443.scala b/test/files/pos/t443.scala
similarity index 100%
rename from test/files/pos/bug443.scala
rename to test/files/pos/t443.scala
diff --git a/test/files/pos/t4430.scala b/test/files/pos/t4430.scala
new file mode 100644
index 0000000..746ecb2
--- /dev/null
+++ b/test/files/pos/t4430.scala
@@ -0,0 +1,11 @@
+class Crash {
+ def S(op: => Double) = 0
+ def A(a: Int, b: Int) = 0
+
+ val t = 0
+
+ val q = A(
+ b = S { val xxx = t ; 42 },
+ a = 0
+ )
+}
diff --git a/test/files/pos/t4494.flags b/test/files/pos/t4494.flags
new file mode 100644
index 0000000..281f0a1
--- /dev/null
+++ b/test/files/pos/t4494.flags
@@ -0,0 +1 @@
+-Yrangepos
diff --git a/test/files/pos/t4494.scala b/test/files/pos/t4494.scala
new file mode 100644
index 0000000..ef38a19
--- /dev/null
+++ b/test/files/pos/t4494.scala
@@ -0,0 +1,3 @@
+object A {
+ List(1)
+}
diff --git a/test/files/pos/bug4501.scala b/test/files/pos/t4501.scala
similarity index 100%
rename from test/files/pos/bug4501.scala
rename to test/files/pos/t4501.scala
diff --git a/test/files/pos/t4502.scala b/test/files/pos/t4502.scala
new file mode 100644
index 0000000..ed7d3d0
--- /dev/null
+++ b/test/files/pos/t4502.scala
@@ -0,0 +1,12 @@
+class T {
+ def send(o: Any, d: Int = 10) { }
+
+ def c(f: => Any) { }
+
+ def f() {
+ var a = this
+ a.send(
+ c(a.send(()))
+ )
+ }
+}
diff --git a/test/files/pos/t4524.scala b/test/files/pos/t4524.scala
new file mode 100644
index 0000000..4721a7d
--- /dev/null
+++ b/test/files/pos/t4524.scala
@@ -0,0 +1,9 @@
+object test {
+ import A._
+ class A(b: B = new A.B())
+ object A {
+ class B
+ new A()
+ }
+}
+
diff --git a/test/files/pos/t4545.scala b/test/files/pos/t4545.scala
new file mode 100644
index 0000000..8c7a323
--- /dev/null
+++ b/test/files/pos/t4545.scala
@@ -0,0 +1,14 @@
+object Test {
+ def f[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T](table: Tuple20[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T])(fun: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T) => Unit) {
+ }
+ def g[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U](table: Tuple21[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U])(fun: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U) => Unit) {
+ }
+
+ def g20 = f(
+ ( 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1)
+ ) { case ((a, b, c, d, e, f, g, h, i, j, k, l, m, n, o, p, q, r, s, t)) => () }
+
+ def g21 = g(
+ (1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1)
+ ) { case ((a, b, c, d, e, f, g, h, i, j, k, l, m, n, o, p, q, r, s, t, u)) => () }
+}
diff --git a/test/files/pos/bug4553.scala b/test/files/pos/t4553.scala
old mode 100644
new mode 100755
similarity index 100%
rename from test/files/pos/bug4553.scala
rename to test/files/pos/t4553.scala
diff --git a/test/files/pos/t4579.flags b/test/files/pos/t4579.flags
new file mode 100644
index 0000000..1182725
--- /dev/null
+++ b/test/files/pos/t4579.flags
@@ -0,0 +1 @@
+-optimize
\ No newline at end of file
diff --git a/test/files/pos/t4579.scala b/test/files/pos/t4579.scala
new file mode 100644
index 0000000..8951ec0
--- /dev/null
+++ b/test/files/pos/t4579.scala
@@ -0,0 +1,518 @@
+//############################################################################
+// Lisp interpreter (revived as an optimizer test.)
+//############################################################################
+
+//############################################################################
+// Lisp Scanner
+
+class LispTokenizer(s: String) extends Iterator[String] {
+ private var i = 0;
+ private def isDelimiter(ch: Char) = ch <= ' ' || ch == '(' || ch == ')'
+ def hasNext: Boolean = {
+ while (i < s.length() && s.charAt(i) <= ' ') i += 1
+ i < s.length()
+ }
+ def next: String =
+ if (hasNext) {
+ val start = i
+ if (isDelimiter(s charAt i)) i += 1
+ else
+ do i = i + 1
+ while (!isDelimiter(s charAt i))
+ s.substring(start, i)
+ } else sys.error("premature end of string")
+}
+
+//############################################################################
+// Lisp Interface
+
+trait Lisp {
+ type Data
+
+ def string2lisp(s: String): Data
+ def lisp2string(s: Data): String
+
+ def evaluate(d: Data): Data
+ // !!! def evaluate(s: String): Data = evaluate(string2lisp(s))
+ def evaluate(s: String): Data
+}
+
+//############################################################################
+// Lisp Implementation Using Case Classes
+
+object LispCaseClasses extends Lisp {
+
+ import List.range
+
+ trait Data {
+ def elemsToString(): String = toString();
+ }
+ case class CONS(car: Data, cdr: Data) extends Data {
+ override def toString() = "(" + elemsToString() + ")";
+ override def elemsToString() = car.toString() + (cdr match {
+ case NIL() => ""
+ case _ => " " + cdr.elemsToString();
+ })
+ }
+ case class NIL() extends Data { // !!! use case object
+ override def toString() = "()";
+ }
+ case class SYM(name: String) extends Data {
+ override def toString() = name;
+ }
+ case class NUM(x: Int) extends Data {
+ override def toString() = x.toString();
+ }
+ case class STR(x: String) extends Data {
+ override def toString() = "\"" + x + "\"";
+ }
+ case class FUN(f: List[Data] => Data) extends Data {
+ override def toString() = "<fn>";
+ }
+
+ def list(): Data =
+ NIL();
+ def list(x0: Data): Data =
+ CONS(x0, NIL());
+ def list(x0: Data, x1: Data): Data =
+ CONS(x0, list(x1));
+ def list(x0: Data, x1: Data, x2: Data): Data =
+ CONS(x0, list(x1, x2));
+ def list(x0: Data, x1: Data, x2: Data, x3: Data): Data =
+ CONS(x0, list(x1, x2, x3));
+ def list(x0: Data, x1: Data, x2: Data, x3: Data, x4: Data): Data =
+ CONS(x0, list(x1, x2, x3, x4));
+ def list(x0: Data, x1: Data, x2: Data, x3: Data, x4: Data, x5: Data): Data =
+ CONS(x0, list(x1, x2, x3, x4, x5));
+ def list(x0: Data, x1: Data, x2: Data, x3: Data, x4: Data, x5: Data,
+ x6: Data): Data =
+ CONS(x0, list(x1, x2, x3, x4, x5, x6));
+ def list(x0: Data, x1: Data, x2: Data, x3: Data, x4: Data, x5: Data,
+ x6: Data, x7: Data): Data =
+ CONS(x0, list(x1, x2, x3, x4, x5, x6, x7));
+ def list(x0: Data, x1: Data, x2: Data, x3: Data, x4: Data, x5: Data,
+ x6: Data, x7: Data, x8: Data): Data =
+ CONS(x0, list(x1, x2, x3, x4, x5, x6, x7, x8));
+ def list(x0: Data, x1: Data, x2: Data, x3: Data, x4: Data, x5: Data,
+ x6: Data, x7: Data, x8: Data, x9: Data): Data =
+ CONS(x0, list(x1, x2, x3, x4, x5, x6, x7, x8, x9));
+
+ var curexp: Data = null
+ var trace: Boolean = false
+ var indent: Int = 0
+
+ def lispError[a](msg: String): a =
+ sys.error("error: " + msg + "\n" + curexp);
+
+ trait Environment {
+ def lookup(n: String): Data;
+ def extendRec(name: String, expr: Environment => Data) =
+ new Environment {
+ def lookup(n: String): Data =
+ if (n == name) expr(this) else Environment.this.lookup(n);
+ }
+ def extend(name: String, v: Data) = extendRec(name, (env1 => v));
+ }
+ val EmptyEnvironment = new Environment {
+ def lookup(n: String): Data = lispError("undefined: " + n);
+ }
+
+ def toList(x: Data): List[Data] = x match {
+ case NIL() => List()
+ case CONS(y, ys) => y :: toList(ys)
+ case _ => lispError("malformed list: " + x);
+ }
+
+ def toBoolean(x: Data) = x match {
+ case NUM(0) => false
+ case _ => true
+ }
+
+ def normalize(x: Data): Data = x match {
+ case CONS(SYM("def"),
+ CONS(CONS(SYM(name), args), CONS(body, CONS(expr, NIL())))) =>
+ normalize(list(SYM("def"),
+ SYM(name), list(SYM("lambda"), args, body), expr))
+ case CONS(SYM("cond"), CONS(CONS(SYM("else"), CONS(expr, NIL())),NIL())) =>
+ normalize(expr)
+ case CONS(SYM("cond"), CONS(CONS(test, CONS(expr, NIL())), rest)) =>
+ normalize(list(SYM("if"), test, expr, CONS(SYM("cond"), rest)))
+ case CONS(h, t) => CONS(normalize(h), normalize(t))
+ case _ => x
+ }
+
+ def eval(x: Data, env: Environment): Data = {
+ val prevexp = curexp;
+ curexp = x;
+ if (trace) {
+ for (x <- range(1, indent)) Console.print(" ");
+ Console.println("===> " + x);
+ indent = indent + 1;
+ }
+ val result = eval1(x, env);
+ if (trace) {
+ indent = indent - 1;
+ for (x <- range(1, indent)) Console.print(" ");
+ Console.println("<=== " + result);
+ }
+ curexp = prevexp;
+ result
+ }
+
+ def eval1(x: Data, env: Environment): Data = x match {
+ case SYM(name) =>
+ env lookup name
+ case CONS(SYM("def"), CONS(SYM(name), CONS(y, CONS(z, NIL())))) =>
+ eval(z, env.extendRec(name, (env1 => eval(y, env1))))
+ case CONS(SYM("val"), CONS(SYM(name), CONS(y, CONS(z, NIL())))) =>
+ eval(z, env.extend(name, eval(y, env)))
+ case CONS(SYM("lambda"), CONS(params, CONS(y, NIL()))) =>
+ mkLambda(params, y, env)
+ case CONS(SYM("if"), CONS(c, CONS(t, CONS(e, NIL())))) =>
+ if (toBoolean(eval(c, env))) eval(t, env) else eval(e, env)
+ case CONS(SYM("quote"), CONS(x, NIL())) =>
+ x
+ case CONS(y, xs) =>
+ apply(eval(y, env), toList(xs) map (x => eval(x, env)))
+ case NUM(_) => x
+ case STR(_) => x
+ case FUN(_) => x
+ case _ =>
+ lispError("illegal term")
+ }
+
+ def apply(fn: Data, args: List[Data]): Data = fn match {
+ case FUN(f) => f(args);
+ case _ => lispError("application of non-function: " + fn);
+ }
+
+ def mkLambda(params: Data, expr: Data, env: Environment): Data = {
+
+ def extendEnv(env: Environment,
+ ps: List[String], args: List[Data]): Environment =
+ Pair(ps, args) match {
+ case Pair(List(), List()) =>
+ env
+ case Pair(p :: ps1, arg :: args1) =>
+ extendEnv(env.extend(p, arg), ps1, args1)
+ case _ =>
+ lispError("wrong number of arguments")
+ }
+
+ val ps: List[String] = toList(params) map {
+ case SYM(name) => name
+ case _ => sys.error("illegal parameter list");
+ }
+
+ FUN(args => eval(expr, extendEnv(env, ps, args)))
+ }
+
+ val globalEnv = EmptyEnvironment
+ .extend("=", FUN({
+ case List(NUM(arg1),NUM(arg2)) => NUM(if (arg1 == arg2) 1 else 0)
+ case List(STR(arg1),STR(arg2)) => NUM(if (arg1 == arg2) 1 else 0)}))
+ .extend("+", FUN({
+ case List(NUM(arg1),NUM(arg2)) => NUM(arg1 + arg2)
+ case List(STR(arg1),STR(arg2)) => STR(arg1 + arg2)}))
+ .extend("-", FUN({
+ case List(NUM(arg1),NUM(arg2)) => NUM(arg1 - arg2)}))
+ .extend("*", FUN({
+ case List(NUM(arg1),NUM(arg2)) => NUM(arg1 * arg2)}))
+ .extend("/", FUN({
+ case List(NUM(arg1),NUM(arg2)) => NUM(arg1 / arg2)}))
+ .extend("car", FUN({
+ case List(CONS(x, xs)) => x}))
+ .extend("cdr", FUN({
+ case List(CONS(x, xs)) => xs}))
+ .extend("null?", FUN({
+ case List(NIL()) => NUM(1)
+ case _ => NUM(0)}))
+ .extend("cons", FUN({
+ case List(x, y) => CONS(x, y)}));
+
+ def evaluate(x: Data): Data = eval(normalize(x), globalEnv);
+ def evaluate(s: String): Data = evaluate(string2lisp(s));
+
+ def string2lisp(s: String): Data = {
+ val it = new LispTokenizer(s);
+ def parse(token: String): Data = {
+ if (token == "(") parseList
+ else if (token == ")") sys.error("unbalanced parentheses")
+ else if ('0' <= token.charAt(0) && token.charAt(0) <= '9')
+ NUM(token.toInt)
+ else if (token.charAt(0) == '\"' && token.charAt(token.length()-1)=='\"')
+ STR(token.substring(1,token.length() - 1))
+ else SYM(token)
+ }
+ def parseList: Data = {
+ val token = it.next;
+ if (token == ")") NIL() else CONS(parse(token), parseList)
+ }
+ parse(it.next)
+ }
+
+ def lisp2string(d: Data): String = d.toString();
+}
+
+//############################################################################
+// Lisp Implementation Using Any
+
+object LispAny extends Lisp {
+
+ import List._;
+
+ type Data = Any;
+
+ case class Lambda(f: List[Data] => Data);
+
+ var curexp: Data = null;
+ var trace: Boolean = false;
+ var indent: Int = 0;
+
+ def lispError[a](msg: String): a =
+ sys.error("error: " + msg + "\n" + curexp);
+
+ trait Environment {
+ def lookup(n: String): Data;
+ def extendRec(name: String, expr: Environment => Data) =
+ new Environment {
+ def lookup(n: String): Data =
+ if (n == name) expr(this) else Environment.this.lookup(n);
+ }
+ def extend(name: String, v: Data) = extendRec(name, (env1 => v));
+ }
+ val EmptyEnvironment = new Environment {
+ def lookup(n: String): Data = lispError("undefined: " + n);
+ }
+
+ def asList(x: Data): List[Data] = x match {
+ case y: List[_] => y
+ case _ => lispError("malformed list: " + x)
+ }
+
+ def asInt(x: Data): Int = x match {
+ case y: Int => y
+ case _ => lispError("not an integer: " + x)
+ }
+
+ def asString(x: Data): String = x match {
+ case y: String => y
+ case _ => lispError("not a string: " + x)
+ }
+
+ def asBoolean(x: Data): Boolean = x != 0
+
+ def normalize(x: Data): Data = x match {
+ case 'and :: x :: y :: Nil =>
+ normalize('if :: x :: y :: 0 :: Nil)
+ case 'or :: x :: y :: Nil =>
+ normalize('if :: x :: 1 :: y :: Nil)
+ case 'def :: (name :: args) :: body :: expr :: Nil =>
+ normalize('def :: name :: ('lambda :: args :: body :: Nil) :: expr :: Nil)
+ case 'cond :: ('else :: expr :: Nil) :: rest =>
+ normalize(expr);
+ case 'cond :: (test :: expr :: Nil) :: rest =>
+ normalize('if :: test :: expr :: ('cond :: rest) :: Nil)
+ case 'cond :: 'else :: expr :: Nil =>
+ normalize(expr)
+ case h :: t =>
+ normalize(h) :: asList(normalize(t))
+ case _ =>
+ x
+ }
+
+ def eval(x: Data, env: Environment): Data = {
+ val prevexp = curexp;
+ curexp = x;
+ if (trace) {
+ for (x <- range(1, indent)) Console.print(" ");
+ Console.println("===> " + x);
+ indent += 1;
+ }
+ val result = eval1(x, env);
+ if (trace) {
+ indent -= 1;
+ for (x <- range(1, indent)) Console.print(" ");
+ Console.println("<=== " + result);
+ }
+ curexp = prevexp;
+ result
+ }
+
+ def eval1(x: Data, env: Environment): Data = x match {
+ case Symbol(name) =>
+ env lookup name
+ case 'def :: Symbol(name) :: y :: z :: Nil =>
+ eval(z, env.extendRec(name, (env1 => eval(y, env1))))
+ case 'val :: Symbol(name) :: y :: z :: Nil =>
+ eval(z, env.extend(name, eval(y, env)))
+ case 'lambda :: params :: y :: Nil =>
+ mkLambda(params, y, env)
+ case 'if :: c :: y :: z :: Nil =>
+ if (asBoolean(eval(c, env))) eval(y, env) else eval(z, env)
+ case 'quote :: y :: Nil =>
+ y
+ case y :: z =>
+ apply(eval(y, env), z map (x => eval(x, env)))
+ case Lambda(_) => x
+ case y: String => x
+ case y: Int => x
+ case y => lispError("illegal term")
+ }
+
+ def lisp2string(x: Data): String = x match {
+ case Symbol(name) => name
+ case Nil => "()"
+ case y :: ys =>
+ def list2string(xs: List[Data]): String = xs match {
+ case List() => ""
+ case y :: ys => " " + lisp2string(y) + list2string(ys)
+ }
+ "(" + lisp2string(y) + list2string(ys) + ")"
+ case _ => if (x.isInstanceOf[String]) "\"" + x + "\""; else x.toString()
+ }
+
+ def apply(fn: Data, args: List[Data]): Data = fn match {
+ case Lambda(f) => f(args);
+ case _ => lispError("application of non-function: " + fn + " to " + args);
+ }
+
+ def mkLambda(params: Data, expr: Data, env: Environment): Data = {
+
+ def extendEnv(env: Environment,
+ ps: List[String], args: List[Data]): Environment =
+ Pair(ps, args) match {
+ case Pair(List(), List()) =>
+ env
+ case Pair(p :: ps1, arg :: args1) =>
+ extendEnv(env.extend(p, arg), ps1, args1)
+ case _ =>
+ lispError("wrong number of arguments")
+ }
+
+ val ps: List[String] = asList(params) map {
+ case Symbol(name) => name
+ case _ => sys.error("illegal parameter list");
+ }
+
+ Lambda(args => eval(expr, extendEnv(env, ps, args)))
+ }
+
+ val globalEnv = EmptyEnvironment
+ .extend("=", Lambda{
+ case List(arg1, arg2) => if(arg1 == arg2) 1 else 0})
+ .extend("+", Lambda{
+ case List(arg1: Int, arg2: Int) => arg1 + arg2
+ case List(arg1: String, arg2: String) => arg1 + arg2})
+ .extend("-", Lambda{
+ case List(arg1: Int, arg2: Int) => arg1 - arg2})
+ .extend("*", Lambda{
+ case List(arg1: Int, arg2: Int) => arg1 * arg2})
+ .extend("/", Lambda{
+ case List(arg1: Int, arg2: Int) => arg1 / arg2})
+ .extend("nil", Nil)
+ .extend("cons", Lambda{
+ case List(arg1, arg2) => arg1 :: asList(arg2)})
+ .extend("car", Lambda{
+ case List(x :: xs) => x})
+ .extend("cdr", Lambda{
+ case List(x :: xs) => xs})
+ .extend("null?", Lambda{
+ case List(Nil) => 1
+ case _ => 0});
+
+ def evaluate(x: Data): Data = eval(normalize(x), globalEnv);
+ def evaluate(s: String): Data = evaluate(string2lisp(s));
+
+ def string2lisp(s: String): Data = {
+ val it = new LispTokenizer(s);
+ def parse(token: String): Data = {
+ if (token == "(") parseList
+ else if (token == ")") sys.error("unbalanced parentheses")
+ //else if (Character.isDigit(token.charAt(0)))
+ else if (token.charAt(0).isDigit)
+ token.toInt
+ else if (token.charAt(0) == '\"' && token.charAt(token.length()-1)=='\"')
+ token.substring(1,token.length() - 1)
+ else Symbol(token)
+ }
+ def parseList: List[Data] = {
+ val token = it.next;
+ if (token == ")") Nil else parse(token) :: parseList
+ }
+ parse(it.next)
+ }
+}
+
+//############################################################################
+// List User
+
+class LispUser(lisp: Lisp) {
+
+ import lisp._;
+
+ def evaluate(s: String) = lisp2string(lisp.evaluate(s));
+
+ def run = {
+
+ Console.println(string2lisp("(lambda (x) (+ (* x x) 1))").asInstanceOf[AnyRef]);
+ Console.println(lisp2string(string2lisp("(lambda (x) (+ (* x x) 1))")));
+ Console.println;
+
+ Console.println("( '(1 2 3)) = " + evaluate(" (quote(1 2 3))"));
+ Console.println("(car '(1 2 3)) = " + evaluate("(car (quote(1 2 3)))"));
+ Console.println("(cdr '(1 2 3)) = " + evaluate("(cdr (quote(1 2 3)))"));
+ Console.println("(null? '(2 3)) = " + evaluate("(null? (quote(2 3)))"));
+ Console.println("(null? '()) = " + evaluate("(null? (quote()))"));
+ Console.println;
+
+ Console.println("faculty(10) = " + evaluate(
+ "(def (faculty n) " +
+ "(if (= n 0) " +
+ "1 " +
+ "(* n (faculty (- n 1)))) " +
+ "(faculty 10))"));
+ Console.println("faculty(10) = " + evaluate(
+ "(def (faculty n) " +
+ "(cond " +
+ "((= n 0) 1) " +
+ "(else (* n (faculty (- n 1))))) " +
+ "(faculty 10))"));
+ Console.println("foobar = " + evaluate(
+ "(def (foo n) " +
+ "(cond " +
+ "((= n 0) \"a\")" +
+ "((= n 1) \"b\")" +
+ "((= (/ n 2) 1) " +
+ "(cond " +
+ "((= n 2) \"c\")" +
+ "(else \"d\")))" +
+ "(else " +
+ "(def (bar m) " +
+ "(cond " +
+ "((= m 0) \"e\")" +
+ "((= m 1) \"f\")" +
+ "(else \"z\"))" +
+ "(bar (- n 4)))))" +
+ "(val nil (quote ())" +
+ "(val v1 (foo 0) " +
+ "(val v2 (+ (foo 1) (foo 2)) " +
+ "(val v3 (+ (+ (foo 3) (foo 4)) (foo 5)) " +
+ "(val v4 (foo 6) " +
+ "(cons v1 (cons v2 (cons v3 (cons v4 nil))))))))))"));
+ Console.println;
+ }
+}
+
+//############################################################################
+// Main
+
+object Test {
+ def main(args: Array[String]) {
+ new LispUser(LispCaseClasses).run;
+ new LispUser(LispAny).run;
+ ()
+ }
+}
+
+//############################################################################
diff --git a/test/files/pos/bug460.scala b/test/files/pos/t460.scala
similarity index 100%
rename from test/files/pos/bug460.scala
rename to test/files/pos/t460.scala
diff --git a/test/files/pos/bug4603/J.java b/test/files/pos/t4603/J.java
similarity index 100%
rename from test/files/pos/bug4603/J.java
rename to test/files/pos/t4603/J.java
diff --git a/test/files/pos/bug4603/S.scala b/test/files/pos/t4603/S.scala
similarity index 100%
rename from test/files/pos/bug4603/S.scala
rename to test/files/pos/t4603/S.scala
diff --git a/test/files/pos/bug464.scala b/test/files/pos/t464.scala
similarity index 100%
rename from test/files/pos/bug464.scala
rename to test/files/pos/t464.scala
diff --git a/test/files/pos/t4651.scala b/test/files/pos/t4651.scala
new file mode 100644
index 0000000..0612a8f
--- /dev/null
+++ b/test/files/pos/t4651.scala
@@ -0,0 +1,12 @@
+object Test {
+ def analyze(x: Any) = x match {
+ case s: String => println("It's a string: " + s)
+ case 1 => println("It's a one")
+ case (a: Int, b) => println("It's a pair of and int " + a +
+ " and something " + b)
+ case 1 :: 2 :: _ => println("It's a list starting with 1, 2")
+ case List(a, b, c) => println("It's a three-element list with " +
+ a + ", " + b + ", " + c)
+ case _ => println("It's something different")
+ }
+}
diff --git a/test/files/pos/t4716.scala b/test/files/pos/t4716.scala
new file mode 100644
index 0000000..d4bd55c
--- /dev/null
+++ b/test/files/pos/t4716.scala
@@ -0,0 +1,10 @@
+
+
+
+
+trait Bug2[@specialized(Int) +A] extends TraversableOnce[A] {
+ def ++[B >: A](that: TraversableOnce[B]) = {
+ lazy val it = that.toIterator
+ it
+ }
+}
diff --git a/test/files/pos/t4717.scala b/test/files/pos/t4717.scala
new file mode 100644
index 0000000..4acfe48
--- /dev/null
+++ b/test/files/pos/t4717.scala
@@ -0,0 +1,35 @@
+
+
+
+
+
+
+
+trait Bug1[@specialized(Boolean) A] extends TraversableOnce[A] {
+
+ def ++[B >: A](that: TraversableOnce[B]): Iterator[B] = new Iterator[B] {
+ lazy val it = that.toIterator
+ def hasNext = it.hasNext
+ def next = it.next
+ }
+
+}
+
+
+
+trait WorksFine[@specialized(Boolean) A] {
+ class SubBounds[B >: A] extends Bounds[B] {
+ lazy val it = ???
+ }
+ def x[B >: A]: Unit = new SubBounds[B]
+}
+
+
+trait Bounds[@specialized(Boolean) A] {
+ // okay without `>: A`
+ def x[B >: A]: Unit = new Bounds[B] {
+ lazy val it = ??? // def or val okay
+ }
+}
+
+
diff --git a/test/files/pos/bug4731.scala b/test/files/pos/t4731.scala
similarity index 100%
rename from test/files/pos/bug4731.scala
rename to test/files/pos/t4731.scala
diff --git a/test/files/pos/t4737/J_1.java b/test/files/pos/t4737/J_1.java
new file mode 100644
index 0000000..284afd6
--- /dev/null
+++ b/test/files/pos/t4737/J_1.java
@@ -0,0 +1,9 @@
+package j;
+
+public class J_1 {
+ protected class JavaInnerClass {
+ }
+ public void method(JavaInnerClass javaInnerclass) {
+ System.out.println("hello");
+ }
+}
diff --git a/test/files/pos/t4737/S_2.scala b/test/files/pos/t4737/S_2.scala
new file mode 100644
index 0000000..8598466
--- /dev/null
+++ b/test/files/pos/t4737/S_2.scala
@@ -0,0 +1,10 @@
+package s
+
+import j.J_1
+
+class ScalaSubClass extends J_1 {
+ override def method(javaInnerClass: J_1#JavaInnerClass) {
+ println("world")
+ }
+}
+
diff --git a/test/files/pos/bug4757/A_2.scala b/test/files/pos/t4757/A_2.scala
similarity index 100%
rename from test/files/pos/bug4757/A_2.scala
rename to test/files/pos/t4757/A_2.scala
diff --git a/test/files/pos/bug4757/B_3.scala b/test/files/pos/t4757/B_3.scala
similarity index 100%
rename from test/files/pos/bug4757/B_3.scala
rename to test/files/pos/t4757/B_3.scala
diff --git a/test/files/pos/bug4757/P_1.scala b/test/files/pos/t4757/P_1.scala
similarity index 100%
rename from test/files/pos/bug4757/P_1.scala
rename to test/files/pos/t4757/P_1.scala
diff --git a/test/files/pos/t4758.scala b/test/files/pos/t4758.scala
new file mode 100644
index 0000000..627dfd7
--- /dev/null
+++ b/test/files/pos/t4758.scala
@@ -0,0 +1,17 @@
+// /scala/trac/4758/a.scala
+// Fri Dec 2 13:41:54 PST 2011
+
+package bar {
+ // works
+ trait M[F[_]]
+ class S[X[_] <: M[X], A](val x:X[A])
+ object S {
+ def apply[X[_] <: M[X], A](x: X[A]): S[X, A] = new S[X, A](x)
+ def unapply[X[_] <: M[X], A](p: S[X, A]) = Some(p.x)
+ }
+}
+package foo {
+ // seemingly equivalent, doesn't work
+ trait M[F[_]]
+ case class S[X[_] <: M[X], A](x: X[A])
+}
diff --git a/test/files/pos/t4760.scala b/test/files/pos/t4760.scala
new file mode 100644
index 0000000..767e384
--- /dev/null
+++ b/test/files/pos/t4760.scala
@@ -0,0 +1,34 @@
+
+class Test {
+ // parses
+ def f1 = {
+ import scala._;
+ }
+ // b.scala:7: error: ';' expected but '}' found.
+ // }
+ // ^
+ // one error found
+ def f2 = {
+ import scala._
+ }
+ def f2b = {
+ import scala.collection.mutable.{ Map => MMap }
+ }
+ def f(): Unit = {
+ locally {
+ import scala.util.Properties.lineSeparator
+ }
+ }
+
+ // parses
+ def f3 = {
+ import scala._
+ 5
+ }
+ locally { (x: Int) =>
+ import scala.util._
+ }
+ 1 match {
+ case 1 => import scala.concurrent._
+ }
+}
diff --git a/test/files/pos/t4812.scala b/test/files/pos/t4812.scala
new file mode 100644
index 0000000..2a807ab
--- /dev/null
+++ b/test/files/pos/t4812.scala
@@ -0,0 +1,4 @@
+trait Test1 {
+ def m1(sym: Symbol = 'TestSym)
+ def m2(s: String = "TestString")
+}
diff --git a/test/files/pos/t4831.scala b/test/files/pos/t4831.scala
new file mode 100644
index 0000000..4800210
--- /dev/null
+++ b/test/files/pos/t4831.scala
@@ -0,0 +1,11 @@
+object O {
+ val a = 0
+}
+
+
+object test {
+ val O1: O.type = O
+ val O2: O.type = O
+ import O1.a, O2.a
+ println(a)
+}
diff --git a/test/files/run/bug2106.flags b/test/files/pos/t4840.flags
similarity index 100%
rename from test/files/run/bug2106.flags
rename to test/files/pos/t4840.flags
diff --git a/test/files/pos/bug4840.scala b/test/files/pos/t4840.scala
similarity index 100%
rename from test/files/pos/bug4840.scala
rename to test/files/pos/t4840.scala
diff --git a/test/files/pos/t4842.scala b/test/files/pos/t4842.scala
new file mode 100644
index 0000000..17ff684
--- /dev/null
+++ b/test/files/pos/t4842.scala
@@ -0,0 +1,26 @@
+class Foo (x: AnyRef) {
+ def this() = {
+ this(new { } ) // okay
+ }
+}
+
+
+class Blerg (x: AnyRef) {
+ def this() = {
+ this(new { class Bar { println(Bar.this); new { println(Bar.this) } }; new Bar } ) // okay
+ }
+}
+
+
+class Outer {
+ class Inner (x: AnyRef) {
+ def this() = {
+ this(new { class Bar { println(Bar.this); new { println(Bar.this) } }; new Bar } ) // okay
+ }
+
+ def this(x: Boolean) = {
+ this(new { println(Outer.this) } ) // okay
+ }
+ }
+}
+
diff --git a/test/files/pos/t4853.scala b/test/files/pos/t4853.scala
new file mode 100644
index 0000000..ed9b320
--- /dev/null
+++ b/test/files/pos/t4853.scala
@@ -0,0 +1,12 @@
+object Animal {
+ def main(args: Array[String]) { new Animal[Awake].goToSleep }
+}
+
+class Animal[A <: AwakeOrAsleep] {
+ def goToSleep[B >: A <: Awake]: Animal[Asleep] = new Animal[Asleep]
+ def wakeUp[B >: A <: Asleep]: Animal[Awake] = new Animal[Awake]
+}
+
+sealed trait AwakeOrAsleep
+trait Awake extends AwakeOrAsleep
+trait Asleep extends AwakeOrAsleep
diff --git a/test/files/pos/t4869.scala b/test/files/pos/t4869.scala
new file mode 100644
index 0000000..f84aa4e
--- /dev/null
+++ b/test/files/pos/t4869.scala
@@ -0,0 +1,8 @@
+// /scala/trac/4869/a.scala
+// Wed Jan 4 21:17:29 PST 2012
+
+class C[T]
+class A {
+ def f[T](x: T): C[_ <: T] = null
+ def g = List(1d) map f
+}
diff --git a/test/files/pos/t4910.scala b/test/files/pos/t4910.scala
new file mode 100644
index 0000000..c66fd52
--- /dev/null
+++ b/test/files/pos/t4910.scala
@@ -0,0 +1,6 @@
+class A {
+ implicit object foo
+ // it compiles if we uncomment this
+ // implicit val bar = foo
+ implicitly[foo.type]
+}
diff --git a/test/files/neg/bug4302.flags b/test/files/pos/t4911.flags
similarity index 100%
copy from test/files/neg/bug4302.flags
copy to test/files/pos/t4911.flags
diff --git a/test/files/pos/t4911.scala b/test/files/pos/t4911.scala
new file mode 100644
index 0000000..66c867a
--- /dev/null
+++ b/test/files/pos/t4911.scala
@@ -0,0 +1,16 @@
+import language._
+
+object Test {
+ class Foo[T](val x: T) ; object Foo { def unapply[T](x: Foo[T]) = Some(x.x) }
+ def f1[T](x: Foo[T]) = x match { case Foo(y) => y }
+ def f2[M[_], T](x: M[T]) = x match { case Foo(y) => y }
+
+ case class Bar[T](x: T)
+ def f3[T](x: Bar[T]) = x match { case Bar(y) => y }
+ def f4[M[_], T](x: M[T]) = x match { case Bar(y) => y }
+}
+//
+// ./b.scala:4: warning: non variable type-argument T in type pattern Test.Foo[T] is unchecked since it is eliminated by erasure
+// def f2[M[_], T](x: M[T]) = x match { case Foo(y) => y }
+// ^
+// one warning found
\ No newline at end of file
diff --git a/test/files/pos/t4938.scala b/test/files/pos/t4938.scala
new file mode 100644
index 0000000..6e41312
--- /dev/null
+++ b/test/files/pos/t4938.scala
@@ -0,0 +1,4 @@
+class A {
+ import scala.collection.mutable._
+ val xs = List(Set(), Seq())
+}
diff --git a/test/files/pos/t4957.scala b/test/files/pos/t4957.scala
new file mode 100644
index 0000000..7f037ee
--- /dev/null
+++ b/test/files/pos/t4957.scala
@@ -0,0 +1,89 @@
+// a.scala
+// Sat Oct 29 10:06:51 PDT 2011
+
+package simple
+
+import scala.{Double=>double, Int=>int}
+
+/**
+ * @author Christoph Radig
+ */
+
+trait Vector {
+
+ def xd: double
+ def yd: double
+}
+
+object Vector {
+
+ def apply(x: double, y: double) = Double(x, y)
+ def apply(x: int, y: int) = Int(x, y)
+
+ trait Companion[@specialized(double, int) T] {
+
+ type I <: Instance[T]
+
+ def apply(x: T, y: T): I // I (= this.type#I) or this.I?
+
+ lazy val zero: I = apply(numeric.zero, numeric.zero)
+
+ val numeric: Numeric[T]
+ }
+
+ trait Instance[@specialized(double, int) T] extends Vector {
+
+ type C <: Companion[T]
+ def companion: C
+
+ def numeric: Numeric[T] = companion.numeric
+
+ val x: T
+ val y: T
+
+ def xd = numeric.toDouble(x)
+ def yd = numeric.toDouble(y)
+
+ def + (that: C#I): C#I = companion(numeric.plus(this.x, that.x), numeric.plus(this.y, that.y))
+ def - (that: C#I): C#I = companion(numeric.minus(this.x, that.x), numeric.minus(this.y, that.y))
+
+ /**
+ * scalar multiplication
+ */
+ def * (scalar: T): C#I = companion(numeric.times(this.x, scalar), numeric.times(this.y, scalar))
+ }
+
+ object Double extends Companion[double] {
+
+ type I = Double
+
+ def apply(x: double, y: double) = new Double(x, y)
+
+ val numeric = Numeric.DoubleIsFractional
+ }
+
+ final class Double(val x: double, val y: double) extends Instance[double] {
+
+ type C = Double.type
+ def companion = Double
+
+ @inline override def xd = x
+ @inline override def yd = y
+ }
+
+
+ object Int extends Companion[int] {
+
+ type I = Int
+
+ def apply(x: int, y: int) = new Int(x, y)
+
+ val numeric = Numeric.IntIsIntegral
+ }
+
+ final class Int(val x: int, val y: int) extends Instance[int] {
+
+ type C = Int.type
+ def companion = Int
+ }
+}
diff --git a/test/files/pos/t4970.scala b/test/files/pos/t4970.scala
new file mode 100644
index 0000000..f2f284f
--- /dev/null
+++ b/test/files/pos/t4970.scala
@@ -0,0 +1,13 @@
+trait OuterClass[V <: OuterClass[V]#InnerClass] {
+ trait InnerClass {self: V =>
+ def method = ()
+ }
+}
+
+trait SubOuterClass[T <: SubOuterClass[T]#SubInnerClass] extends OuterClass[T] {
+ class SubInnerClass extends super.InnerClass {self: T => }
+}
+
+trait SubOuterClass2[T <: SubOuterClass2[T]#SubInnerClass2] extends OuterClass[T] {
+ class SubInnerClass2 extends super.InnerClass {self: InnerClass with T => }
+}
diff --git a/test/files/pos/t4975.scala b/test/files/pos/t4975.scala
new file mode 100644
index 0000000..12d889c
--- /dev/null
+++ b/test/files/pos/t4975.scala
@@ -0,0 +1,12 @@
+object ImplicitScope {
+ class A[T]
+
+ def foo {
+ trait B
+ object B {
+ implicit def ab = new A[B]
+ }
+
+ implicitly[A[B]] // Error
+ }
+}
diff --git a/test/files/pos/t5012.scala b/test/files/pos/t5012.scala
new file mode 100644
index 0000000..772b8f4
--- /dev/null
+++ b/test/files/pos/t5012.scala
@@ -0,0 +1,12 @@
+class D {
+ object p // (program point 1)
+}
+
+class C {
+ def m: D = {
+ if("abc".length == 0) {
+ object p // (program point 2)
+ }
+ null
+ }
+}
diff --git a/test/files/pos/t5020.scala b/test/files/pos/t5020.scala
new file mode 100644
index 0000000..06f7723
--- /dev/null
+++ b/test/files/pos/t5020.scala
@@ -0,0 +1,19 @@
+package a {
+ sealed trait GenericList[U, M[_ <: U]] {
+ type Transformed[N[MMA <: U]] <: GenericList[U, N]
+ }
+
+ trait GenericCons[U, M[_ <: U], T <: GenericList[U, M]] extends GenericList[U, M] {
+ type Transformed[N[MMB <: U]] = GenericCons[U, N, GenericList[U, M]#Transformed[N]]
+ }
+}
+
+package b {
+ sealed trait GenericList[L, M[_ >: L]] {
+ type Transformed[N[MMA >: L]] <: GenericList[L, N]
+ }
+
+ trait GenericCons[L, M[_ >: L], T <: GenericList[L, M]] extends GenericList[L, M] {
+ type Transformed[N[MMB >: L]] = GenericCons[L, N, T#Transformed[N]]
+ }
+}
\ No newline at end of file
diff --git a/test/files/neg/caseinherit.flags b/test/files/pos/t5029.flags
similarity index 100%
copy from test/files/neg/caseinherit.flags
copy to test/files/pos/t5029.flags
diff --git a/test/files/pos/t5029.scala b/test/files/pos/t5029.scala
new file mode 100644
index 0000000..6f9a329
--- /dev/null
+++ b/test/files/pos/t5029.scala
@@ -0,0 +1,3 @@
+object Test {
+ (Vector(): Seq[_]) match { case List() => true; case Nil => false }
+}
\ No newline at end of file
diff --git a/test/files/pos/t5031/Id.scala b/test/files/pos/t5031/Id.scala
new file mode 100644
index 0000000..7bc3ebd
--- /dev/null
+++ b/test/files/pos/t5031/Id.scala
@@ -0,0 +1,4 @@
+package t5031
+
+object ID
+
diff --git a/test/files/pos/t5031/package.scala b/test/files/pos/t5031/package.scala
new file mode 100644
index 0000000..c02e69d
--- /dev/null
+++ b/test/files/pos/t5031/package.scala
@@ -0,0 +1,3 @@
+package object t5031 {
+ type ID = Int
+}
diff --git a/test/files/pos/t5031_2.scala b/test/files/pos/t5031_2.scala
new file mode 100644
index 0000000..ded3e82
--- /dev/null
+++ b/test/files/pos/t5031_2.scala
@@ -0,0 +1,7 @@
+package object t5031 {
+ class ID
+}
+
+package t5031 {
+ object ID
+}
diff --git a/test/files/pos/t5031_3/Foo_1.scala b/test/files/pos/t5031_3/Foo_1.scala
new file mode 100644
index 0000000..5934a6b
--- /dev/null
+++ b/test/files/pos/t5031_3/Foo_1.scala
@@ -0,0 +1,5 @@
+package foo.bar
+
+object Foo {
+ def bar = 42
+}
diff --git a/test/files/pos/t5031_3/Main_2.scala b/test/files/pos/t5031_3/Main_2.scala
new file mode 100644
index 0000000..2079460
--- /dev/null
+++ b/test/files/pos/t5031_3/Main_2.scala
@@ -0,0 +1,6 @@
+package org.example
+
+object Main extends App {
+ println(foo.bar.Foo.bar)
+}
+
diff --git a/test/files/pos/t5031_3/package.scala b/test/files/pos/t5031_3/package.scala
new file mode 100644
index 0000000..23fede7
--- /dev/null
+++ b/test/files/pos/t5031_3/package.scala
@@ -0,0 +1,6 @@
+package foo
+
+package object bar {
+ type Foo = Int => String
+}
+
diff --git a/test/files/pos/t5033.scala b/test/files/pos/t5033.scala
new file mode 100644
index 0000000..c4c3334
--- /dev/null
+++ b/test/files/pos/t5033.scala
@@ -0,0 +1,15 @@
+trait Eater {
+ type Food[T]
+}
+
+trait Fruit {
+ type Seed
+}
+
+trait PipExtractor {
+ def extract(a: Fruit)(b: Eater): b.Food[a.Seed]
+}
+
+trait LaserGuidedPipExtractor extends PipExtractor {
+ def extract(f: Fruit)(g: Eater): g.Food[f.Seed]
+}
\ No newline at end of file
diff --git a/test/files/pos/t5041.scala b/test/files/pos/t5041.scala
new file mode 100644
index 0000000..78a1b27
--- /dev/null
+++ b/test/files/pos/t5041.scala
@@ -0,0 +1,9 @@
+case class Token(text: String, startIndex: Int)
+
+object Comment {
+ def unapply(s: String): Option[Token] = None
+}
+
+object HiddenTokens {
+ "foo" match { case Comment(_) => }
+}
diff --git a/test/files/pos/t5071.scala b/test/files/pos/t5071.scala
new file mode 100644
index 0000000..44ad627
--- /dev/null
+++ b/test/files/pos/t5071.scala
@@ -0,0 +1,18 @@
+// abstract
+trait Foo[@specialized A, Repr] {
+ self: Repr =>
+}
+trait Bar[A] extends Foo[A, Object] { }
+class Baz extends Foo[Int, Baz] { }
+
+// concrete
+trait Bippy {
+ def f(x: Int) = 5
+}
+trait FooC[@specialized A] {
+ self: Bippy =>
+
+ f(10)
+}
+
+class BazC extends FooC[Int] with Bippy { }
diff --git a/test/files/pos/t5082.scala b/test/files/pos/t5082.scala
new file mode 100644
index 0000000..63eeda3
--- /dev/null
+++ b/test/files/pos/t5082.scala
@@ -0,0 +1,14 @@
+trait Something[T]
+object Test { class A }
+case class Test() extends Something[Test.A]
+
+object User {
+ val Test() = Test()
+}
+
+object Wrap {
+ trait Something[T]
+ object Test { class A }
+ case class Test(a: Int, b: Int)(c: String) extends Something[Test.A]
+ val Test(x, y) = Test(1, 2)(""); (x + y).toString
+}
diff --git a/test/files/pos/t5084.scala b/test/files/pos/t5084.scala
new file mode 100644
index 0000000..17d0a68
--- /dev/null
+++ b/test/files/pos/t5084.scala
@@ -0,0 +1,5 @@
+case class Search(tpe: Search.Value)
+
+object Search {
+ type Value = String
+}
diff --git a/test/files/pos/t5099.scala b/test/files/pos/t5099.scala
new file mode 100644
index 0000000..1781512
--- /dev/null
+++ b/test/files/pos/t5099.scala
@@ -0,0 +1,14 @@
+class LazyValVsFunctionType[a] {
+ val f: a => a = x => {
+ lazy val _x: a = throw new java.lang.Error("todo")
+ _x // error: type mismatch
+/*
+[error] found : a => => a
+[error] required: a => a
+[error] val f: a => a = x => {
+[error] ^
+[error] one error found
+*/
+ // _x: a // ok
+ }
+}
\ No newline at end of file
diff --git a/test/files/pos/t5119.scala b/test/files/pos/t5119.scala
new file mode 100644
index 0000000..4a67244
--- /dev/null
+++ b/test/files/pos/t5119.scala
@@ -0,0 +1,13 @@
+import collection.mutable
+
+object Test {
+ class IMap0[K[_], V[_]](backing: Map[K[_], V[_]]) {
+ def mapSeparate[VL[_], VR[_]](f: V[_] => ({type l[T] = Either[VL[T], VR[T]]})#l[_] ) = {
+ backing.view.map { case (k,v) => f(v) match {
+ case Left(l) => Left((k, l))
+ case Right(r) => Right((k, r))
+ }
+ }
+ }
+ }
+}
diff --git a/test/files/pos/t5120.scala b/test/files/pos/t5120.scala
new file mode 100644
index 0000000..2c193d1
--- /dev/null
+++ b/test/files/pos/t5120.scala
@@ -0,0 +1,26 @@
+// An example extracted from SBT by Iulian
+// that showed that the previous fix to t5120
+// was too strict.
+class Test {
+ class ScopedKey[T]
+ class Value[T]
+
+ class Compiled[T](val settings: Seq[Pair[T]])
+
+ case class Pair[T](k: ScopedKey[T], v: ScopedKey[T])
+
+ def transform[T](x: T) = x
+
+ def test(compiledSettings: Seq[Compiled[_]]) = {
+ compiledSettings flatMap { cs => // cd: Compiled[_] in both versions
+ (cs.settings map { s => // cs.settings: Seq[Compiled[$1]] in trunk, Seq[Compiled[$1]] forSome $1 in 2.9.1
+ // s: Pair[$1] in trunk, Pair[$1] in 2.9.1
+ val t = transform(s.v) // t: ScopedKey[_] in trunk, ScopedKey[$1] in 2.9.1
+ foo(s.k, t)
+ t
+ }) : Seq[ScopedKey[_]]
+ }
+ }
+
+ def foo[T](x: ScopedKey[T], v: ScopedKey[T]) {}
+}
diff --git a/test/files/pos/t5127.scala b/test/files/pos/t5127.scala
new file mode 100644
index 0000000..e90b8d0
--- /dev/null
+++ b/test/files/pos/t5127.scala
@@ -0,0 +1,8 @@
+package foo {
+ trait Abstract1[C <: Abstract2[C]]
+ trait Abstract2[C <: Abstract2[C]] extends Abstract1[C]
+ class Parametrized1[T] extends Abstract1[Parametrized2[T]] {
+ def bar(a: AnyRef) { a match { case d: Parametrized1[_] => println("ok") } }
+ }
+ class Parametrized2[T] extends Parametrized1[T] with Abstract2[Parametrized2[T]]
+}
diff --git a/test/files/pos/t5130.scala b/test/files/pos/t5130.scala
new file mode 100644
index 0000000..676d3c7
--- /dev/null
+++ b/test/files/pos/t5130.scala
@@ -0,0 +1,46 @@
+import scala.language.reflectiveCalls
+
+class A {
+ this_a =>
+
+ def b = new B
+ class B { def a: this_a.type = this_a }
+}
+trait A2 { def c = () }
+
+object Test {
+ val v1 = new A { def c = () }
+ val v2 = new A with A2 { }
+ val v3: A { def c: Unit } = null
+ def d1 = new A { def c = () }
+ def d2 = new A with A2 { }
+ def d3: A { def c: Unit } = null
+ var x1 = new A { def c = () }
+ var x2 = new A with A2 { }
+ var x3: A { def c: Unit } = null
+
+ def main(args: Array[String]): Unit = {
+ val mv1 = new A { def c = () }
+ val mv2 = new A with A2 { }
+ val mv3: A { def c: Unit } = null
+ def md1 = new A { def c = () }
+ def md2 = new A with A2 { }
+ def md3: A { def c: Unit } = null
+
+ v1.b.a.c
+ v2.b.a.c
+ v3.b.a.c
+ d1.b.a.c
+ d2.b.a.c
+ d3.b.a.c
+ x1.b.a.c
+ x2.b.a.c
+ x3.b.a.c
+ mv1.b.a.c
+ mv2.b.a.c
+ mv3.b.a.c
+ md1.b.a.c
+ md2.b.a.c
+ md3.b.a.c
+ }
+}
diff --git a/test/files/pos/t5137.scala b/test/files/pos/t5137.scala
new file mode 100644
index 0000000..bb72cf3
--- /dev/null
+++ b/test/files/pos/t5137.scala
@@ -0,0 +1,17 @@
+object Test {
+
+ // okay
+ (1 * (List[BigInt]().map(((x0) => x0 match {
+ case x => x
+ })).sum))
+
+ // okay
+ ((1: BigInt) * (List[BigInt]().map({
+ case x => x
+ }).sum))
+
+ // fail
+ (1 * (List[BigInt]().map({
+ case x => x
+ }).sum))
+}
\ No newline at end of file
diff --git a/test/files/pos/bug514.scala b/test/files/pos/t514.scala
similarity index 100%
rename from test/files/pos/bug514.scala
rename to test/files/pos/t514.scala
diff --git a/test/files/pos/t5156.scala b/test/files/pos/t5156.scala
new file mode 100644
index 0000000..e7912ef
--- /dev/null
+++ b/test/files/pos/t5156.scala
@@ -0,0 +1,21 @@
+sealed trait HList
+final case class HCons[H, T <: HList](head : H, tail : T) extends HList
+case object HNil extends HList
+
+object HList {
+ type ::[H, T <: HList] = HCons[H, T]
+ type HNil = HNil.type
+
+ implicit def hlistOps[L <: HList](l : L) = new {
+ def ::[H](h : H) : H :: L = HCons(h, l)
+ def last(implicit last : Last[L]) {}
+ }
+
+ class Last[L <: HList]
+ implicit def hsingleLast[H] = new Last[H :: HNil]
+ implicit def hlistLast[H, T <: HList](implicit lt : Last[T]) = new Last[H :: T]
+
+ type III = Int :: Int :: Int :: HNil
+ val iii : III = 0 :: 0 :: 0 :: HNil
+ val l = iii.last
+}
diff --git a/test/files/pos/bug516.scala b/test/files/pos/t516.scala
similarity index 100%
rename from test/files/pos/bug516.scala
rename to test/files/pos/t516.scala
diff --git a/test/files/pos/t5165/TestAnnotation.java b/test/files/pos/t5165/TestAnnotation.java
new file mode 100644
index 0000000..90886b7
--- /dev/null
+++ b/test/files/pos/t5165/TestAnnotation.java
@@ -0,0 +1,11 @@
+import java.lang.annotation.*;
+
+ at Retention(RetentionPolicy.RUNTIME)
+public @interface TestAnnotation {
+ public enum TestEnumOne { A, B }
+ public enum TestEnumTwo { C, D }
+
+ public TestEnumOne one();
+ public TestEnumTwo two();
+ public String strVal();
+}
diff --git a/test/files/pos/t5165/TestObject.scala b/test/files/pos/t5165/TestObject.scala
new file mode 100644
index 0000000..eaf244e
--- /dev/null
+++ b/test/files/pos/t5165/TestObject.scala
@@ -0,0 +1,3 @@
+
+object TestObject extends TestTrait
+
diff --git a/test/files/pos/t5165/TestTrait.scala b/test/files/pos/t5165/TestTrait.scala
new file mode 100644
index 0000000..b317e6c
--- /dev/null
+++ b/test/files/pos/t5165/TestTrait.scala
@@ -0,0 +1,3 @@
+
+ at TestAnnotation(one=TestAnnotation.TestEnumOne.A, two=TestAnnotation.TestEnumTwo.C, strVal="something")
+trait TestTrait
diff --git a/test/files/neg/caseinherit.flags b/test/files/pos/t5175.flags
similarity index 100%
copy from test/files/neg/caseinherit.flags
copy to test/files/pos/t5175.flags
diff --git a/test/files/pos/t5175.scala b/test/files/pos/t5175.scala
new file mode 100644
index 0000000..e15cc3a
--- /dev/null
+++ b/test/files/pos/t5175.scala
@@ -0,0 +1,9 @@
+object Test {
+ def ==(p: Phase): Int = 0
+
+ def foo {
+ ==(new Phase())
+ }
+}
+
+class Phase
diff --git a/test/files/pos/t5178.scala b/test/files/pos/t5178.scala
new file mode 100644
index 0000000..26c008d
--- /dev/null
+++ b/test/files/pos/t5178.scala
@@ -0,0 +1,11 @@
+abstract class FileOps {
+ def withLock[R](start: Long = 0): Option[R]
+}
+
+trait DefaultFileOps {
+ self: DefaultPath =>
+
+ override def withLock[R](start: Long = 5): Option[R] = None
+}
+
+class DefaultPath extends FileOps with DefaultFileOps { }
diff --git a/test/files/pos/t5198.scala b/test/files/pos/t5198.scala
new file mode 100644
index 0000000..f403f77
--- /dev/null
+++ b/test/files/pos/t5198.scala
@@ -0,0 +1,15 @@
+package gaga
+
+
+
+
+
+trait Sys[Self <: Sys[Self]] {
+ type Tx
+}
+
+
+sealed trait AssocEntry[S <: Sys[S], @specialized(Int) A] {
+ def value: A
+ def value(implicit tx: S#Tx): A
+}
diff --git a/test/files/pos/t5210.scala b/test/files/pos/t5210.scala
new file mode 100644
index 0000000..e85037a
--- /dev/null
+++ b/test/files/pos/t5210.scala
@@ -0,0 +1,10 @@
+object WithOpTest {
+ trait WithOp extends Cloneable {
+ def f: this.type = this
+ def g1: this.type = f
+ def g2: this.type = {
+ val t = f
+ t
+ }
+ }
+}
diff --git a/test/files/pos/bug522.scala b/test/files/pos/t522.scala
similarity index 100%
rename from test/files/pos/bug522.scala
rename to test/files/pos/t522.scala
diff --git a/test/files/pos/t5223.scala b/test/files/pos/t5223.scala
new file mode 100644
index 0000000..0b2528e
--- /dev/null
+++ b/test/files/pos/t5223.scala
@@ -0,0 +1,6 @@
+import scala.reflect.runtime.universe._
+
+object Foo extends App {
+ reify{def printf(format: String, args: Any*): String = null }
+ reify{def printf(format: String, args: Any*): String = ("abc": @cloneable)}
+}
\ No newline at end of file
diff --git a/test/files/pos/t5240.scala b/test/files/pos/t5240.scala
new file mode 100644
index 0000000..2db689c
--- /dev/null
+++ b/test/files/pos/t5240.scala
@@ -0,0 +1,11 @@
+
+
+
+
+
+
+package object foo {
+
+ var labels: Array[_ <: String] = null
+
+}
diff --git a/test/files/pos/t5245.scala b/test/files/pos/t5245.scala
new file mode 100644
index 0000000..763be9e
--- /dev/null
+++ b/test/files/pos/t5245.scala
@@ -0,0 +1,3 @@
+object Foo {
+ def bar = { var x = (); def foo() = x }
+}
\ No newline at end of file
diff --git a/test/files/pos/t5259.scala b/test/files/pos/t5259.scala
new file mode 100644
index 0000000..d33c4dd
--- /dev/null
+++ b/test/files/pos/t5259.scala
@@ -0,0 +1,21 @@
+class A[T]
+class B {
+ def m(a: A[this.type] = new A[this.type]) { }
+}
+
+class C {
+ def foo(a: Int, b: Int = 0) = 0
+ def foo() = 0
+}
+
+object Test {
+ def newB = new B
+ newB.m()
+
+ val stableB = new B
+ stableB.m()
+
+ def f {
+ println((new C).foo(0))
+ }
+}
diff --git a/test/files/pos/bug530.scala b/test/files/pos/t530.scala
similarity index 100%
rename from test/files/pos/bug530.scala
rename to test/files/pos/t530.scala
diff --git a/test/files/pos/t5305.scala b/test/files/pos/t5305.scala
new file mode 100644
index 0000000..c0237ca
--- /dev/null
+++ b/test/files/pos/t5305.scala
@@ -0,0 +1,13 @@
+object t5305 {
+ def in(a: Any) = {}
+
+ object O {
+ type F = Int
+ val v = ""
+ }
+
+ in {
+ import O.{F, v}
+ type x = {type l = (F, v.type)} // not found: type F
+ }
+}
diff --git a/test/files/pos/t531.scala b/test/files/pos/t531.scala
new file mode 100644
index 0000000..d53539f
--- /dev/null
+++ b/test/files/pos/t531.scala
@@ -0,0 +1,11 @@
+import scala.reflect.runtime.universe._
+
+object Test extends App {
+ def titi = {
+ var truc = 0
+ val tata = reify{() => {
+ truc = 6
+ }}
+ ()
+ }
+}
\ No newline at end of file
diff --git a/test/files/pos/t5313.scala b/test/files/pos/t5313.scala
new file mode 100644
index 0000000..e77b73c
--- /dev/null
+++ b/test/files/pos/t5313.scala
@@ -0,0 +1,30 @@
+object DepBug {
+ class A {
+ class B
+ def mkB = new B
+ def m(b : B) = b
+ }
+
+ trait Dep {
+ val a : A
+ val b : a.B
+ }
+
+ val dep = new Dep {
+ val a = new A
+ val b = a.mkB
+ }
+
+ def useDep(d : Dep) {
+ import d._
+ a.m(b) // OK
+ }
+
+ {
+ import dep._
+ a.m(b) // OK with 2.9.1.final, error on trunk
+ }
+
+ dep.a.m(dep.b)
+
+}
diff --git a/test/files/pos/t5317.scala b/test/files/pos/t5317.scala
new file mode 100644
index 0000000..8c9c9d8
--- /dev/null
+++ b/test/files/pos/t5317.scala
@@ -0,0 +1,12 @@
+object Test {
+ trait S { type T; val x: AnyRef }
+ trait A extends S { type T <: A; val x: A = null }
+ trait B extends S { type T <: B; val x: B = null }
+
+ val a = new A{}
+ val b = new B{}
+ val y = if (true) a else b
+
+ // lub of y should allow for this
+ println(y.x.x)
+}
diff --git a/test/files/pos/t532.scala b/test/files/pos/t532.scala
new file mode 100644
index 0000000..7c33637
--- /dev/null
+++ b/test/files/pos/t532.scala
@@ -0,0 +1,11 @@
+import scala.reflect.runtime.universe._
+
+object Test extends App {
+ def titi: Unit = {
+ var truc = 0
+ val tata = reify{() => {
+ truc = truc + 6
+ }}
+ ()
+ }
+}
\ No newline at end of file
diff --git a/test/files/pos/bug533.scala b/test/files/pos/t533.scala
similarity index 100%
rename from test/files/pos/bug533.scala
rename to test/files/pos/t533.scala
diff --git a/test/files/pos/t5330.scala b/test/files/pos/t5330.scala
new file mode 100644
index 0000000..813acd4
--- /dev/null
+++ b/test/files/pos/t5330.scala
@@ -0,0 +1,22 @@
+trait FM[A] {
+ def map(f: A => Any)
+}
+
+trait M[A] extends FM[A] {
+ def map(f: A => Any)
+}
+
+trait N[A] extends FM[A]
+
+object test {
+ def kaboom(xs: M[_]) = xs map (x => ()) // missing parameter type.
+
+ def okay1[A](xs: M[A]) = xs map (x => ())
+ def okay2(xs: FM[_]) = xs map (x => ())
+ def okay3(xs: N[_]) = xs map (x => ())
+}
+
+class CC2(xs: List[_]) {
+ def f(x1: Any, x2: Any) = null
+ def g = xs map (x => f(x, x))
+}
diff --git a/test/files/pos/t5330b.scala b/test/files/pos/t5330b.scala
new file mode 100644
index 0000000..dbeb165
--- /dev/null
+++ b/test/files/pos/t5330b.scala
@@ -0,0 +1,6 @@
+abstract trait Base {
+ def foo: this.type
+};
+class Derived[T] extends Base {
+ def foo: Nothing = sys.error("!!!")
+}
diff --git a/test/files/pos/t5330c.scala b/test/files/pos/t5330c.scala
new file mode 100644
index 0000000..af31f3d
--- /dev/null
+++ b/test/files/pos/t5330c.scala
@@ -0,0 +1,5 @@
+object t5330c {
+ val s: Set[_ >: Char] = Set('A')
+ s forall ("ABC" contains _)
+ s.forall( c => "ABC".toSeq.contains( c ))
+}
diff --git a/test/files/pos/t5359.scala b/test/files/pos/t5359.scala
new file mode 100644
index 0000000..c22b2b1
--- /dev/null
+++ b/test/files/pos/t5359.scala
@@ -0,0 +1,17 @@
+// /scala/trac/5359/a.scala
+// Thu Jan 5 13:31:05 PST 2012
+
+object test {
+ trait Step[F[_]] {
+ // crash: typeConstructor inapplicable for <none>
+ this match {
+ case S1() =>
+ }
+ }
+ case class S1[F[_]]() extends Step[F]
+
+ // okay
+ (null: Step[Option]) match {
+ case S1() =>
+ }
+}
diff --git a/test/files/pos/t5384.scala b/test/files/pos/t5384.scala
new file mode 100644
index 0000000..4e297d5
--- /dev/null
+++ b/test/files/pos/t5384.scala
@@ -0,0 +1,7 @@
+class A(x: String, y: Int)(implicit o: String)
+class B1(implicit o: String) extends A(y = 5, x = "a")
+class B2(implicit o: String) extends A("a", 5)
+class B3(implicit o: String) extends A(y = 5, x = "a")(o)
+
+class AM[E: Manifest](val x: Unit = (), y: Unit)
+class BM[E: Manifest] extends AM[E](y = ())
diff --git a/test/files/pos/t5390.scala b/test/files/pos/t5390.scala
new file mode 100644
index 0000000..36febb6
--- /dev/null
+++ b/test/files/pos/t5390.scala
@@ -0,0 +1,11 @@
+class A {
+ case class B[A](s: String)
+}
+
+object X {
+ def foo {
+ val a = new A
+ val b = new a.B[c.type]("") // not a forward reference
+ val c = ""
+ }
+}
\ No newline at end of file
diff --git a/test/files/pos/t5399.scala b/test/files/pos/t5399.scala
new file mode 100644
index 0000000..ebae7db
--- /dev/null
+++ b/test/files/pos/t5399.scala
@@ -0,0 +1,45 @@
+class Test {
+ class A[T]
+ class B[T](val a: A[T])
+
+ case class CaseClass[T](x: T)
+
+ def break(existB: B[_]) =
+ CaseClass(existB.a) match { case CaseClass(_) => }
+}
+
+class Foo {
+ trait Init[T]
+ class ScopedKey[T] extends Init[T]
+
+ trait Setting[T] {
+ val key: ScopedKey[T]
+ }
+
+ case class ScopedKey1[T](val foo: Init[T]) extends ScopedKey[T]
+
+ val scalaHome: Setting[Option[String]] = null
+ val scalaVersion: Setting[String] = null
+
+ def testPatternMatch(s: Setting[_]) {
+ s.key match {
+ case ScopedKey1(scalaHome.key | scalaVersion.key) => ()
+ }
+ }
+}
+
+class Test2 {
+ type AnyCyclic = Execute[Task]#CyclicException[_]
+
+ trait Task[T]
+
+ trait Execute[A[_] <: AnyRef] {
+ class CyclicException[T](val caller: A[T], val target: A[T])
+ }
+
+ def convertCyclic(c: AnyCyclic): String =
+ (c.caller, c.target) match {
+ case (caller: Task[_], target: Task[_]) => "bazinga!"
+ }
+}
+
diff --git a/test/files/pos/t5406.scala b/test/files/pos/t5406.scala
new file mode 100644
index 0000000..c2e42c0
--- /dev/null
+++ b/test/files/pos/t5406.scala
@@ -0,0 +1,4 @@
+object Wuffles { }
+object Test {
+ def f = (Some(Wuffles): Option[Wuffles.type]) match { case Some(Wuffles) => println("Woof"); case _ => println("Meow") }
+}
diff --git a/test/files/pos/t5444.scala b/test/files/pos/t5444.scala
new file mode 100644
index 0000000..df6b2ce
--- /dev/null
+++ b/test/files/pos/t5444.scala
@@ -0,0 +1,42 @@
+// /scala/trac/5444/a.scala
+// Mon Feb 13 21:01:45 PST 2012
+
+// Traits require identical names to reproduce.
+class Test {
+ def a() = {
+ trait T {
+ def x() = 1
+ }
+ trait U {
+ def x1() = 2
+ }
+ class Bippy extends T with U { def z() = x() + x1() }
+ new Bippy
+ }
+ def b() {
+ trait T {
+ def y() = 3
+ trait T2 {
+ def yy() = 10
+ }
+ }
+ trait U {
+ def y1() = 4
+ trait T3 {
+ def yy() = 11
+ }
+ }
+ class Bippy extends T with U { def z() = y() + y1() + (1 to (new T2 { }).yy()).map(_ + 1).sum }
+ (new Bippy).z()
+ }
+ def c() {
+ trait T {
+ def z() = 5
+ }
+ trait U {
+ def z1() = 6
+ }
+ (new Test with T with U).z1()
+ }
+}
+
diff --git a/test/files/pos/t5504/s_1.scala b/test/files/pos/t5504/s_1.scala
new file mode 100644
index 0000000..35cb2c8
--- /dev/null
+++ b/test/files/pos/t5504/s_1.scala
@@ -0,0 +1,4 @@
+// a.scala
+package object foo {
+ val m: List[_] = Nil
+}
diff --git a/test/files/pos/t5504/s_2.scala b/test/files/pos/t5504/s_2.scala
new file mode 100644
index 0000000..03eecf6
--- /dev/null
+++ b/test/files/pos/t5504/s_2.scala
@@ -0,0 +1,8 @@
+// b.scala
+package foo
+
+object Test {
+ def main(args: Array[String]): Unit = {
+ println(foo.m)
+ }
+}
diff --git a/test/files/pos/t5534.scala b/test/files/pos/t5541.scala
similarity index 100%
rename from test/files/pos/t5534.scala
rename to test/files/pos/t5541.scala
diff --git a/test/files/pos/t5542.flags b/test/files/pos/t5542.flags
new file mode 100644
index 0000000..464cc20
--- /dev/null
+++ b/test/files/pos/t5542.flags
@@ -0,0 +1 @@
+-Xfatal-warnings -unchecked
\ No newline at end of file
diff --git a/test/files/pos/t5542.scala b/test/files/pos/t5542.scala
new file mode 100644
index 0000000..80b8cef
--- /dev/null
+++ b/test/files/pos/t5542.scala
@@ -0,0 +1,3 @@
+class Test {
+ Option(3) match { case Some(n) => n; case None => 0 }
+}
\ No newline at end of file
diff --git a/test/files/pos/t5545/S_1.scala b/test/files/pos/t5545/S_1.scala
new file mode 100644
index 0000000..59ec1fd
--- /dev/null
+++ b/test/files/pos/t5545/S_1.scala
@@ -0,0 +1,4 @@
+trait F[@specialized(Int) T1, R] {
+ def f(v1: T1): R
+ def g = v1 => f(v1)
+}
diff --git a/test/files/pos/t5545/S_2.scala b/test/files/pos/t5545/S_2.scala
new file mode 100644
index 0000000..59ec1fd
--- /dev/null
+++ b/test/files/pos/t5545/S_2.scala
@@ -0,0 +1,4 @@
+trait F[@specialized(Int) T1, R] {
+ def f(v1: T1): R
+ def g = v1 => f(v1)
+}
diff --git a/test/files/pos/t5546.scala b/test/files/pos/t5546.scala
new file mode 100644
index 0000000..4b0b058
--- /dev/null
+++ b/test/files/pos/t5546.scala
@@ -0,0 +1 @@
+class A { def foo: Class[_ <: A] = getClass }
\ No newline at end of file
diff --git a/test/files/pos/t5580b.scala b/test/files/pos/t5580b.scala
new file mode 100644
index 0000000..d5a4a0a
--- /dev/null
+++ b/test/files/pos/t5580b.scala
@@ -0,0 +1,19 @@
+/** It's a pos test because it does indeed compile,
+ * not so much because I'm glad it does. Testing
+ * that error messages created and discarded during
+ * implicit search don't blow it up.
+ */
+
+import scala.collection.mutable.WeakHashMap
+import scala.collection.JavaConversions._
+
+class bar { }
+
+class foo {
+ val map = WeakHashMap[AnyRef, collection.mutable.Map[bar, collection.mutable.Set[bar]]]()
+
+ def test={
+ val tmp:bar=null
+ if (map.get(tmp).isEmpty) map.put(tmp,collection.mutable.Set())
+ }
+}
diff --git a/test/files/pos/t5604/ReplConfig.scala b/test/files/pos/t5604/ReplConfig.scala
new file mode 100644
index 0000000..8c589eb
--- /dev/null
+++ b/test/files/pos/t5604/ReplConfig.scala
@@ -0,0 +1,53 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2011 LAMP/EPFL
+ * @author Paul Phillips
+ */
+
+package scala.tools.nsc
+package interpreter
+
+import util.Exceptional.unwrap
+import util.stackTraceString
+
+trait ReplConfig {
+ lazy val replProps = new ReplProps
+
+ class TapMaker[T](x: T) {
+ def tapInfo(msg: => String): T = tap(x => replinfo(parens(x)))
+ def tapDebug(msg: => String): T = tap(x => repldbg(parens(x)))
+ def tapTrace(msg: => String): T = tap(x => repltrace(parens(x)))
+ def tap[U](f: T => U): T = {
+ f(x)
+ x
+ }
+ }
+
+ private def parens(x: Any) = "(" + x + ")"
+ private def echo(msg: => String) =
+ try Console println msg
+ catch { case x: AssertionError => Console.println("Assertion error printing debugging output: " + x) }
+
+ private[nsc] def repldbg(msg: => String) = if (isReplDebug) echo(msg)
+ private[nsc] def repltrace(msg: => String) = if (isReplTrace) echo(msg)
+ private[nsc] def replinfo(msg: => String) = if (isReplInfo) echo(msg)
+
+ private[nsc] def logAndDiscard[T](label: String, alt: => T): PartialFunction[Throwable, T] = {
+ case t =>
+ repldbg(label + ": " + unwrap(t))
+ repltrace(stackTraceString(unwrap(t)))
+ alt
+ }
+ private[nsc] def substituteAndLog[T](alt: => T)(body: => T): T =
+ substituteAndLog("" + alt, alt)(body)
+ private[nsc] def substituteAndLog[T](label: String, alt: => T)(body: => T): T = {
+ try body
+ catch logAndDiscard(label, alt)
+ }
+ private[nsc] def squashAndLog(label: String)(body: => Unit): Unit =
+ substituteAndLog(label, ())(body)
+
+ def isReplTrace: Boolean = replProps.trace
+ def isReplDebug: Boolean = replProps.debug || isReplTrace
+ def isReplInfo: Boolean = replProps.info || isReplDebug
+ def isReplPower: Boolean = replProps.power
+}
diff --git a/test/files/pos/t5604/ReplReporter.scala b/test/files/pos/t5604/ReplReporter.scala
new file mode 100644
index 0000000..130af99
--- /dev/null
+++ b/test/files/pos/t5604/ReplReporter.scala
@@ -0,0 +1,30 @@
+/* NSC -- new Scala compiler
+ * Copyright 2002-2011 LAMP/EPFL
+ * @author Paul Phillips
+ */
+
+package scala.tools.nsc
+package interpreter
+
+import reporters._
+import IMain._
+
+class ReplReporter(intp: IMain) extends ConsoleReporter(intp.settings, Console.in, new ReplStrippingWriter(intp)) {
+ override def printMessage(msg: String) {
+ // Avoiding deadlock if the compiler starts logging before
+ // the lazy val is complete.
+ if (intp.isInitializeComplete) {
+ if (intp.totalSilence) {
+ if (isReplTrace)
+ super.printMessage("[silent] " + msg)
+ }
+ else super.printMessage(msg)
+ }
+ else Console.println("[init] " + msg)
+ }
+
+ override def displayPrompt() {
+ if (intp.totalSilence) ()
+ else super.displayPrompt()
+ }
+}
diff --git a/test/files/pos/t5604b/T_1.scala b/test/files/pos/t5604b/T_1.scala
new file mode 100644
index 0000000..179dcb1
--- /dev/null
+++ b/test/files/pos/t5604b/T_1.scala
@@ -0,0 +1,6 @@
+// sandbox/t5604/T.scala
+package t6504
+
+trait T {
+ def foo: Boolean = false
+}
diff --git a/test/files/pos/t5604b/T_2.scala b/test/files/pos/t5604b/T_2.scala
new file mode 100644
index 0000000..179dcb1
--- /dev/null
+++ b/test/files/pos/t5604b/T_2.scala
@@ -0,0 +1,6 @@
+// sandbox/t5604/T.scala
+package t6504
+
+trait T {
+ def foo: Boolean = false
+}
diff --git a/test/files/pos/t5604b/Test_1.scala b/test/files/pos/t5604b/Test_1.scala
new file mode 100644
index 0000000..f7c58eb
--- /dev/null
+++ b/test/files/pos/t5604b/Test_1.scala
@@ -0,0 +1,7 @@
+// sandbox/t5604/Test.scala
+package t6504
+
+object Test {
+ def blerg1(a: Any): Any = if (foo) blerg1(0)
+ def blerg2(a: Any): Any = if (t6504.foo) blerg2(0)
+}
diff --git a/test/files/pos/t5604b/Test_2.scala b/test/files/pos/t5604b/Test_2.scala
new file mode 100644
index 0000000..f7c58eb
--- /dev/null
+++ b/test/files/pos/t5604b/Test_2.scala
@@ -0,0 +1,7 @@
+// sandbox/t5604/Test.scala
+package t6504
+
+object Test {
+ def blerg1(a: Any): Any = if (foo) blerg1(0)
+ def blerg2(a: Any): Any = if (t6504.foo) blerg2(0)
+}
diff --git a/test/files/pos/t5604b/pack_1.scala b/test/files/pos/t5604b/pack_1.scala
new file mode 100644
index 0000000..f50d568
--- /dev/null
+++ b/test/files/pos/t5604b/pack_1.scala
@@ -0,0 +1,5 @@
+// sandbox/t5604/pack.scala
+package t6504
+
+object `package` extends T {
+}
diff --git a/test/files/pos/t5626.scala b/test/files/pos/t5626.scala
new file mode 100644
index 0000000..c501dfb
--- /dev/null
+++ b/test/files/pos/t5626.scala
@@ -0,0 +1,12 @@
+class C {
+ val blob = {
+ new { case class Foo() }
+ }
+ val blub = {
+ class Inner { case class Foo() }
+ new Inner
+ }
+
+ val foo = blob.Foo()
+ val bar = blub.Foo()
+}
diff --git a/test/files/pos/t5644/BoxesRunTime.java b/test/files/pos/t5644/BoxesRunTime.java
new file mode 100644
index 0000000..241bf79
--- /dev/null
+++ b/test/files/pos/t5644/BoxesRunTime.java
@@ -0,0 +1,836 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2006-2011, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+
+
+package scala.runtime;
+
+import java.io.*;
+import scala.math.ScalaNumber;
+
+/** An object (static class) that defines methods used for creating,
+ * reverting, and calculating with, boxed values. There are four classes
+ * of methods in this object:
+ * - Convenience boxing methods which call the static valueOf method
+ * on the boxed class, thus utilizing the JVM boxing cache.
+ * - Convenience unboxing methods returning default value on null.
+ * - The generalised comparison method to be used when an object may
+ * be a boxed value.
+ * - Standard value operators for boxed number and quasi-number values.
+ *
+ * @author Gilles Dubochet
+ * @author Martin Odersky
+ * @contributor Stepan Koltsov
+ * @version 2.0 */
+public final class BoxesRunTime
+{
+ private static final int CHAR = 0, BYTE = 1, SHORT = 2, INT = 3, LONG = 4, FLOAT = 5, DOUBLE = 6, OTHER = 7;
+
+ /** We don't need to return BYTE and SHORT, as everything which might
+ * care widens to INT.
+ */
+ private static int typeCode(Object a) {
+ if (a instanceof java.lang.Integer) return INT;
+ if (a instanceof java.lang.Double) return DOUBLE;
+ if (a instanceof java.lang.Long) return LONG;
+ if (a instanceof java.lang.Character) return CHAR;
+ if (a instanceof java.lang.Float) return FLOAT;
+ if ((a instanceof java.lang.Byte) || (a instanceof java.lang.Short)) return INT;
+ return OTHER;
+ }
+
+ private static int eqTypeCode(Number a) {
+ int code = typeCode(a);
+ if (code == CHAR)
+ return OTHER;
+ else
+ return code;
+ }
+
+ private static String boxDescription(Object a) {
+ return "" + a.getClass().getSimpleName() + "(" + a + ")";
+ }
+
+/* BOXING ... BOXING ... BOXING ... BOXING ... BOXING ... BOXING ... BOXING ... BOXING */
+
+ public static java.lang.Boolean boxToBoolean(boolean b) {
+ return java.lang.Boolean.valueOf(b);
+ }
+
+ public static java.lang.Character boxToCharacter(char c) {
+ return java.lang.Character.valueOf(c);
+ }
+
+ public static java.lang.Byte boxToByte(byte b) {
+ return java.lang.Byte.valueOf(b);
+ }
+
+ public static java.lang.Short boxToShort(short s) {
+ return java.lang.Short.valueOf(s);
+ }
+
+ public static java.lang.Integer boxToInteger(int i) {
+ return java.lang.Integer.valueOf(i);
+ }
+
+ public static java.lang.Long boxToLong(long l) {
+ return java.lang.Long.valueOf(l);
+ }
+
+ public static java.lang.Float boxToFloat(float f) {
+ return java.lang.Float.valueOf(f);
+ }
+
+ public static java.lang.Double boxToDouble(double d) {
+ // System.out.println("box " + d);
+ // (new Throwable()).printStackTrace();
+ return java.lang.Double.valueOf(d);
+ }
+
+/* UNBOXING ... UNBOXING ... UNBOXING ... UNBOXING ... UNBOXING ... UNBOXING ... UNBOXING */
+
+ public static boolean unboxToBoolean(Object b) {
+ return b == null ? false : ((java.lang.Boolean)b).booleanValue();
+ }
+
+ public static char unboxToChar(Object c) {
+ return c == null ? 0 : ((java.lang.Character)c).charValue();
+ }
+
+ public static byte unboxToByte(Object b) {
+ return b == null ? 0 : ((java.lang.Byte)b).byteValue();
+ }
+
+ public static short unboxToShort(Object s) {
+ return s == null ? 0 : ((java.lang.Short)s).shortValue();
+ }
+
+ public static int unboxToInt(Object i) {
+ return i == null ? 0 : ((java.lang.Integer)i).intValue();
+ }
+
+ public static long unboxToLong(Object l) {
+ return l == null ? 0 : ((java.lang.Long)l).longValue();
+ }
+
+ public static float unboxToFloat(Object f) {
+ return f == null ? 0.0f : ((java.lang.Float)f).floatValue();
+ }
+
+ public static double unboxToDouble(Object d) {
+ // System.out.println("unbox " + d);
+ return d == null ? 0.0d : ((java.lang.Double)d).doubleValue();
+ }
+
+ /* COMPARISON ... COMPARISON ... COMPARISON ... COMPARISON ... COMPARISON ... COMPARISON */
+
+ public static boolean equals(Object x, Object y) {
+ if (x == y) return true;
+ return equals2(x, y);
+ }
+
+ /** Since all applicable logic has to be present in the equals method of a ScalaNumber
+ * in any case, we dispatch to it as soon as we spot one on either side.
+ */
+ public static boolean equals2(Object x, Object y) {
+ if (x instanceof java.lang.Number)
+ return equalsNumObject((java.lang.Number)x, y);
+ if (x instanceof java.lang.Character)
+ return equalsCharObject((java.lang.Character)x, y);
+ if (x == null)
+ return y == null;
+
+ return x.equals(y);
+ }
+
+ public static boolean equalsNumObject(java.lang.Number xn, Object y) {
+ if (y instanceof java.lang.Number)
+ return equalsNumNum(xn, (java.lang.Number)y);
+ if (y instanceof java.lang.Character)
+ return equalsNumChar(xn, (java.lang.Character)y);
+ if (xn == null)
+ return y == null;
+
+ return xn.equals(y);
+ }
+
+ public static boolean equalsNumNum(java.lang.Number xn, java.lang.Number yn) {
+ int xcode = eqTypeCode(xn);
+ int ycode = eqTypeCode(yn);
+ switch (ycode > xcode ? ycode : xcode) {
+ case INT:
+ return xn.intValue() == yn.intValue();
+ case LONG:
+ return xn.longValue() == yn.longValue();
+ case FLOAT:
+ return xn.floatValue() == yn.floatValue();
+ case DOUBLE:
+ return xn.doubleValue() == yn.doubleValue();
+ default:
+ if ((yn instanceof ScalaNumber) && !(xn instanceof ScalaNumber))
+ return yn.equals(xn);
+ }
+ if (xn == null)
+ return yn == null;
+
+ return xn.equals(yn);
+ }
+
+ public static boolean equalsCharObject(java.lang.Character xc, Object y) {
+ if (y instanceof java.lang.Character)
+ return xc.charValue() == ((java.lang.Character)y).charValue();
+ if (y instanceof java.lang.Number)
+ return equalsNumChar((java.lang.Number)y, xc);
+ if (xc == null)
+ return y == null;
+
+ return xc.equals(y);
+ }
+
+ private static boolean equalsNumChar(java.lang.Number xn, java.lang.Character yc) {
+ if (yc == null)
+ return xn == null;
+
+ char ch = yc.charValue();
+ switch (eqTypeCode(xn)) {
+ case INT:
+ return xn.intValue() == ch;
+ case LONG:
+ return xn.longValue() == ch;
+ case FLOAT:
+ return xn.floatValue() == ch;
+ case DOUBLE:
+ return xn.doubleValue() == ch;
+ default:
+ return xn.equals(yc);
+ }
+ }
+
+ /** Hashcode algorithm is driven by the requirements imposed
+ * by primitive equality semantics, namely that equal objects
+ * have equal hashCodes. The first priority are the integral/char
+ * types, which already have the same hashCodes for the same
+ * values except for Long. So Long's hashCode is altered to
+ * conform to Int's for all values in Int's range.
+ *
+ * Float is problematic because it's far too small to hold
+ * all the Ints, so for instance Int.MaxValue.toFloat claims
+ * to be == to each of the largest 64 Ints. There is no way
+ * to preserve equals/hashCode alignment without compromising
+ * the hashCode distribution, so Floats are only guaranteed
+ * to have the same hashCode for whole Floats in the range
+ * Short.MinValue to Short.MaxValue (2^16 total.)
+ *
+ * Double has its hashCode altered to match the entire Int range,
+ * but is not guaranteed beyond that. (But could/should it be?
+ * The hashCode is only 32 bits so this is a more tractable
+ * issue than Float's, but it might be better simply to exclude it.)
+ *
+ * Note: BigInt and BigDecimal, being arbitrary precision, could
+ * be made consistent with all other types for the Int range, but
+ * as yet have not.
+ *
+ * Note: Among primitives, Float.NaN != Float.NaN, but the boxed
+ * verisons are equal. This still needs reconciliation.
+ */
+ public static int hashFromLong(java.lang.Long n) {
+ int iv = n.intValue();
+ if (iv == n.longValue()) return iv;
+ else return n.hashCode();
+ }
+ public static int hashFromDouble(java.lang.Double n) {
+ int iv = n.intValue();
+ double dv = n.doubleValue();
+ if (iv == dv) return iv;
+
+ long lv = n.longValue();
+ if (lv == dv) return java.lang.Long.valueOf(lv).hashCode();
+ else return n.hashCode();
+ }
+ public static int hashFromFloat(java.lang.Float n) {
+ int iv = n.intValue();
+ float fv = n.floatValue();
+ if (iv == fv) return iv;
+
+ long lv = n.longValue();
+ if (lv == fv) return java.lang.Long.valueOf(lv).hashCode();
+ else return n.hashCode();
+ }
+ public static int hashFromNumber(java.lang.Number n) {
+ if (n instanceof java.lang.Long) return hashFromLong((java.lang.Long)n);
+ else if (n instanceof java.lang.Double) return hashFromDouble((java.lang.Double)n);
+ else if (n instanceof java.lang.Float) return hashFromFloat((java.lang.Float)n);
+ else return n.hashCode();
+ }
+ public static int hashFromObject(Object a) {
+ if (a instanceof Number) return hashFromNumber((Number)a);
+ else return a.hashCode();
+ }
+
+ private static int unboxCharOrInt(Object arg1, int code) {
+ if (code == CHAR)
+ return ((java.lang.Character) arg1).charValue();
+ else
+ return ((java.lang.Number) arg1).intValue();
+ }
+ private static long unboxCharOrLong(Object arg1, int code) {
+ if (code == CHAR)
+ return ((java.lang.Character) arg1).charValue();
+ else
+ return ((java.lang.Number) arg1).longValue();
+ }
+ private static float unboxCharOrFloat(Object arg1, int code) {
+ if (code == CHAR)
+ return ((java.lang.Character) arg1).charValue();
+ else
+ return ((java.lang.Number) arg1).floatValue();
+ }
+ private static double unboxCharOrDouble(Object arg1, int code) {
+ if (code == CHAR)
+ return ((java.lang.Character) arg1).charValue();
+ else
+ return ((java.lang.Number) arg1).doubleValue();
+ }
+
+/* OPERATORS ... OPERATORS ... OPERATORS ... OPERATORS ... OPERATORS ... OPERATORS ... OPERATORS ... OPERATORS */
+
+ /** arg1 + arg2 */
+ public static Object add(Object arg1, Object arg2) throws NoSuchMethodException {
+ int code1 = typeCode(arg1);
+ int code2 = typeCode(arg2);
+ int maxcode = (code1 < code2) ? code2 : code1;
+ if (maxcode <= INT) {
+ return boxToInteger(unboxCharOrInt(arg1, code1) + unboxCharOrInt(arg2, code2));
+ }
+ if (maxcode <= LONG) {
+ return boxToLong(unboxCharOrLong(arg1, code1) + unboxCharOrLong(arg2, code2));
+ }
+ if (maxcode <= FLOAT) {
+ return boxToFloat(unboxCharOrFloat(arg1, code1) + unboxCharOrFloat(arg2, code2));
+ }
+ if (maxcode <= DOUBLE) {
+ return boxToDouble(unboxCharOrDouble(arg1, code1) + unboxCharOrDouble(arg2, code2));
+ }
+ throw new NoSuchMethodException();
+ }
+
+ /** arg1 - arg2 */
+ public static Object subtract(Object arg1, Object arg2) throws NoSuchMethodException {
+ int code1 = typeCode(arg1);
+ int code2 = typeCode(arg2);
+ int maxcode = (code1 < code2) ? code2 : code1;
+ if (maxcode <= INT) {
+ return boxToInteger(unboxCharOrInt(arg1, code1) - unboxCharOrInt(arg2, code2));
+ }
+ if (maxcode <= LONG) {
+ return boxToLong(unboxCharOrLong(arg1, code1) - unboxCharOrLong(arg2, code2));
+ }
+ if (maxcode <= FLOAT) {
+ return boxToFloat(unboxCharOrFloat(arg1, code1) - unboxCharOrFloat(arg2, code2));
+ }
+ if (maxcode <= DOUBLE) {
+ return boxToDouble(unboxCharOrDouble(arg1, code1) - unboxCharOrDouble(arg2, code2));
+ }
+ throw new NoSuchMethodException();
+ }
+
+ /** arg1 * arg2 */
+ public static Object multiply(Object arg1, Object arg2) throws NoSuchMethodException {
+ int code1 = typeCode(arg1);
+ int code2 = typeCode(arg2);
+ int maxcode = (code1 < code2) ? code2 : code1;
+ if (maxcode <= INT) {
+ return boxToInteger(unboxCharOrInt(arg1, code1) * unboxCharOrInt(arg2, code2));
+ }
+ if (maxcode <= LONG) {
+ return boxToLong(unboxCharOrLong(arg1, code1) * unboxCharOrLong(arg2, code2));
+ }
+ if (maxcode <= FLOAT) {
+ return boxToFloat(unboxCharOrFloat(arg1, code1) * unboxCharOrFloat(arg2, code2));
+ }
+ if (maxcode <= DOUBLE) {
+ return boxToDouble(unboxCharOrDouble(arg1, code1) * unboxCharOrDouble(arg2, code2));
+ }
+ throw new NoSuchMethodException();
+ }
+
+ /** arg1 / arg2 */
+ public static Object divide(Object arg1, Object arg2) throws NoSuchMethodException {
+ int code1 = typeCode(arg1);
+ int code2 = typeCode(arg2);
+ int maxcode = (code1 < code2) ? code2 : code1;
+
+ if (maxcode <= INT)
+ return boxToInteger(unboxCharOrInt(arg1, code1) / unboxCharOrInt(arg2, code2));
+ if (maxcode <= LONG)
+ return boxToLong(unboxCharOrLong(arg1, code1) / unboxCharOrLong(arg2, code2));
+ if (maxcode <= FLOAT)
+ return boxToFloat(unboxCharOrFloat(arg1, code1) / unboxCharOrFloat(arg2, code2));
+ if (maxcode <= DOUBLE)
+ return boxToDouble(unboxCharOrDouble(arg1, code1) / unboxCharOrDouble(arg2, code2));
+
+ throw new NoSuchMethodException();
+ }
+
+ /** arg1 % arg2 */
+ public static Object takeModulo(Object arg1, Object arg2) throws NoSuchMethodException {
+ int code1 = typeCode(arg1);
+ int code2 = typeCode(arg2);
+ int maxcode = (code1 < code2) ? code2 : code1;
+
+ if (maxcode <= INT)
+ return boxToInteger(unboxCharOrInt(arg1, code1) % unboxCharOrInt(arg2, code2));
+ if (maxcode <= LONG)
+ return boxToLong(unboxCharOrLong(arg1, code1) % unboxCharOrLong(arg2, code2));
+ if (maxcode <= FLOAT)
+ return boxToFloat(unboxCharOrFloat(arg1, code1) % unboxCharOrFloat(arg2, code2));
+ if (maxcode <= DOUBLE)
+ return boxToDouble(unboxCharOrDouble(arg1, code1) % unboxCharOrDouble(arg2, code2));
+
+ throw new NoSuchMethodException();
+ }
+
+ /** arg1 >> arg2 */
+ public static Object shiftSignedRight(Object arg1, Object arg2) throws NoSuchMethodException {
+ int code1 = typeCode(arg1);
+ int code2 = typeCode(arg2);
+ if (code1 <= INT) {
+ int val1 = unboxCharOrInt(arg1, code1);
+ if (code2 <= INT) {
+ int val2 = unboxCharOrInt(arg2, code2);
+ return boxToInteger(val1 >> val2);
+ }
+ if (code2 <= LONG) {
+ long val2 = unboxCharOrLong(arg2, code2);
+ return boxToInteger(val1 >> val2);
+ }
+ }
+ if (code1 <= LONG) {
+ long val1 = unboxCharOrLong(arg1, code1);
+ if (code2 <= INT) {
+ int val2 = unboxCharOrInt(arg2, code2);
+ return boxToLong(val1 >> val2);
+ }
+ if (code2 <= LONG) {
+ long val2 = unboxCharOrLong(arg2, code2);
+ return boxToLong(val1 >> val2);
+ }
+ }
+ throw new NoSuchMethodException();
+ }
+
+ /** arg1 << arg2 */
+ public static Object shiftSignedLeft(Object arg1, Object arg2) throws NoSuchMethodException {
+ int code1 = typeCode(arg1);
+ int code2 = typeCode(arg2);
+ if (code1 <= INT) {
+ int val1 = unboxCharOrInt(arg1, code1);
+ if (code2 <= INT) {
+ int val2 = unboxCharOrInt(arg2, code2);
+ return boxToInteger(val1 << val2);
+ }
+ if (code2 <= LONG) {
+ long val2 = unboxCharOrLong(arg2, code2);
+ return boxToInteger(val1 << val2);
+ }
+ }
+ if (code1 <= LONG) {
+ long val1 = unboxCharOrLong(arg1, code1);
+ if (code2 <= INT) {
+ int val2 = unboxCharOrInt(arg2, code2);
+ return boxToLong(val1 << val2);
+ }
+ if (code2 <= LONG) {
+ long val2 = unboxCharOrLong(arg2, code2);
+ return boxToLong(val1 << val2);
+ }
+ }
+ throw new NoSuchMethodException();
+ }
+
+ /** arg1 >>> arg2 */
+ public static Object shiftLogicalRight(Object arg1, Object arg2) throws NoSuchMethodException {
+ int code1 = typeCode(arg1);
+ int code2 = typeCode(arg2);
+ if (code1 <= INT) {
+ int val1 = unboxCharOrInt(arg1, code1);
+ if (code2 <= INT) {
+ int val2 = unboxCharOrInt(arg2, code2);
+ return boxToInteger(val1 >>> val2);
+ }
+ if (code2 <= LONG) {
+ long val2 = unboxCharOrLong(arg2, code2);
+ return boxToInteger(val1 >>> val2);
+ }
+ }
+ if (code1 <= LONG) {
+ long val1 = unboxCharOrLong(arg1, code1);
+ if (code2 <= INT) {
+ int val2 = unboxCharOrInt(arg2, code2);
+ return boxToLong(val1 >>> val2);
+ }
+ if (code2 <= LONG) {
+ long val2 = unboxCharOrLong(arg2, code2);
+ return boxToLong(val1 >>> val2);
+ }
+ }
+ throw new NoSuchMethodException();
+ }
+
+ /** -arg */
+ public static Object negate(Object arg) throws NoSuchMethodException {
+ int code = typeCode(arg);
+ if (code <= INT) {
+ int val = unboxCharOrInt(arg, code);
+ return boxToInteger(-val);
+ }
+ if (code <= LONG) {
+ long val = unboxCharOrLong(arg, code);
+ return boxToLong(-val);
+ }
+ if (code <= FLOAT) {
+ float val = unboxCharOrFloat(arg, code);
+ return boxToFloat(-val);
+ }
+ if (code <= DOUBLE) {
+ double val = unboxCharOrDouble(arg, code);
+ return boxToDouble(-val);
+ }
+ throw new NoSuchMethodException();
+ }
+
+ /** +arg */
+ public static Object positive(Object arg) throws NoSuchMethodException {
+ int code = typeCode(arg);
+ if (code <= INT) {
+ return boxToInteger(+unboxCharOrInt(arg, code));
+ }
+ if (code <= LONG) {
+ return boxToLong(+unboxCharOrLong(arg, code));
+ }
+ if (code <= FLOAT) {
+ return boxToFloat(+unboxCharOrFloat(arg, code));
+ }
+ if (code <= DOUBLE) {
+ return boxToDouble(+unboxCharOrDouble(arg, code));
+ }
+ throw new NoSuchMethodException();
+ }
+
+ /** arg1 & arg2 */
+ public static Object takeAnd(Object arg1, Object arg2) throws NoSuchMethodException {
+ if ((arg1 instanceof Boolean) || (arg2 instanceof Boolean)) {
+ if ((arg1 instanceof Boolean) && (arg2 instanceof Boolean))
+ return boxToBoolean(((java.lang.Boolean) arg1).booleanValue() & ((java.lang.Boolean) arg2).booleanValue());
+ else
+ throw new NoSuchMethodException();
+ }
+ int code1 = typeCode(arg1);
+ int code2 = typeCode(arg2);
+ int maxcode = (code1 < code2) ? code2 : code1;
+
+ if (maxcode <= INT)
+ return boxToInteger(unboxCharOrInt(arg1, code1) & unboxCharOrInt(arg2, code2));
+ if (maxcode <= LONG)
+ return boxToLong(unboxCharOrLong(arg1, code1) & unboxCharOrLong(arg2, code2));
+
+ throw new NoSuchMethodException();
+ }
+
+ /** arg1 | arg2 */
+ public static Object takeOr(Object arg1, Object arg2) throws NoSuchMethodException {
+ if ((arg1 instanceof Boolean) || (arg2 instanceof Boolean)) {
+ if ((arg1 instanceof Boolean) && (arg2 instanceof Boolean))
+ return boxToBoolean(((java.lang.Boolean) arg1).booleanValue() & ((java.lang.Boolean) arg2).booleanValue());
+ else
+ throw new NoSuchMethodException();
+ }
+ int code1 = typeCode(arg1);
+ int code2 = typeCode(arg2);
+ int maxcode = (code1 < code2) ? code2 : code1;
+
+ if (maxcode <= INT)
+ return boxToInteger(unboxCharOrInt(arg1, code1) | unboxCharOrInt(arg2, code2));
+ if (maxcode <= LONG)
+ return boxToLong(unboxCharOrLong(arg1, code1) | unboxCharOrLong(arg2, code2));
+
+ throw new NoSuchMethodException();
+ }
+
+ /** arg1 ^ arg2 */
+ public static Object takeXor(Object arg1, Object arg2) throws NoSuchMethodException {
+ if ((arg1 instanceof Boolean) || (arg2 instanceof Boolean)) {
+ if ((arg1 instanceof Boolean) && (arg2 instanceof Boolean))
+ return boxToBoolean(((java.lang.Boolean) arg1).booleanValue() & ((java.lang.Boolean) arg2).booleanValue());
+ else
+ throw new NoSuchMethodException();
+ }
+ int code1 = typeCode(arg1);
+ int code2 = typeCode(arg2);
+ int maxcode = (code1 < code2) ? code2 : code1;
+
+ if (maxcode <= INT)
+ return boxToInteger(unboxCharOrInt(arg1, code1) ^ unboxCharOrInt(arg2, code2));
+ if (maxcode <= LONG)
+ return boxToLong(unboxCharOrLong(arg1, code1) ^ unboxCharOrLong(arg2, code2));
+
+ throw new NoSuchMethodException();
+ }
+
+ /** arg1 && arg2 */
+ public static Object takeConditionalAnd(Object arg1, Object arg2) throws NoSuchMethodException {
+ if ((arg1 instanceof Boolean) && (arg2 instanceof Boolean)) {
+ return boxToBoolean(((java.lang.Boolean) arg1).booleanValue() && ((java.lang.Boolean) arg2).booleanValue());
+ }
+ throw new NoSuchMethodException();
+ }
+
+ /** arg1 || arg2 */
+ public static Object takeConditionalOr(Object arg1, Object arg2) throws NoSuchMethodException {
+ if ((arg1 instanceof Boolean) && (arg2 instanceof Boolean)) {
+ return boxToBoolean(((java.lang.Boolean) arg1).booleanValue() || ((java.lang.Boolean) arg2).booleanValue());
+ }
+ throw new NoSuchMethodException();
+ }
+
+ /** ~arg */
+ public static Object complement(Object arg) throws NoSuchMethodException {
+ int code = typeCode(arg);
+ if (code <= INT) {
+ return boxToInteger(~unboxCharOrInt(arg, code));
+ }
+ if (code <= LONG) {
+ return boxToLong(~unboxCharOrLong(arg, code));
+ }
+ throw new NoSuchMethodException();
+ }
+
+ /** !arg */
+ public static Object takeNot(Object arg) throws NoSuchMethodException {
+ if (arg instanceof Boolean) {
+ return boxToBoolean(!((java.lang.Boolean) arg).booleanValue());
+ }
+ throw new NoSuchMethodException();
+ }
+
+ public static Object testEqual(Object arg1, Object arg2) throws NoSuchMethodException {
+ return boxToBoolean(arg1 == arg2);
+ }
+
+ public static Object testNotEqual(Object arg1, Object arg2) throws NoSuchMethodException {
+ return boxToBoolean(arg1 != arg2);
+ }
+
+ public static Object testLessThan(Object arg1, Object arg2) throws NoSuchMethodException {
+ int code1 = typeCode(arg1);
+ int code2 = typeCode(arg2);
+ int maxcode = (code1 < code2) ? code2 : code1;
+ if (maxcode <= INT) {
+ int val1 = unboxCharOrInt(arg1, code1);
+ int val2 = unboxCharOrInt(arg2, code2);
+ return boxToBoolean(val1 < val2);
+ }
+ if (maxcode <= LONG) {
+ long val1 = unboxCharOrLong(arg1, code1);
+ long val2 = unboxCharOrLong(arg2, code2);
+ return boxToBoolean(val1 < val2);
+ }
+ if (maxcode <= FLOAT) {
+ float val1 = unboxCharOrFloat(arg1, code1);
+ float val2 = unboxCharOrFloat(arg2, code2);
+ return boxToBoolean(val1 < val2);
+ }
+ if (maxcode <= DOUBLE) {
+ double val1 = unboxCharOrDouble(arg1, code1);
+ double val2 = unboxCharOrDouble(arg2, code2);
+ return boxToBoolean(val1 < val2);
+ }
+ throw new NoSuchMethodException();
+ }
+
+ public static Object testLessOrEqualThan(Object arg1, Object arg2) throws NoSuchMethodException {
+ int code1 = typeCode(arg1);
+ int code2 = typeCode(arg2);
+ int maxcode = (code1 < code2) ? code2 : code1;
+ if (maxcode <= INT) {
+ int val1 = unboxCharOrInt(arg1, code1);
+ int val2 = unboxCharOrInt(arg2, code2);
+ return boxToBoolean(val1 <= val2);
+ }
+ if (maxcode <= LONG) {
+ long val1 = unboxCharOrLong(arg1, code1);
+ long val2 = unboxCharOrLong(arg2, code2);
+ return boxToBoolean(val1 <= val2);
+ }
+ if (maxcode <= FLOAT) {
+ float val1 = unboxCharOrFloat(arg1, code1);
+ float val2 = unboxCharOrFloat(arg2, code2);
+ return boxToBoolean(val1 <= val2);
+ }
+ if (maxcode <= DOUBLE) {
+ double val1 = unboxCharOrDouble(arg1, code1);
+ double val2 = unboxCharOrDouble(arg2, code2);
+ return boxToBoolean(val1 <= val2);
+ }
+ throw new NoSuchMethodException();
+ }
+
+ public static Object testGreaterOrEqualThan(Object arg1, Object arg2) throws NoSuchMethodException {
+ int code1 = typeCode(arg1);
+ int code2 = typeCode(arg2);
+ int maxcode = (code1 < code2) ? code2 : code1;
+ if (maxcode <= INT) {
+ int val1 = unboxCharOrInt(arg1, code1);
+ int val2 = unboxCharOrInt(arg2, code2);
+ return boxToBoolean(val1 >= val2);
+ }
+ if (maxcode <= LONG) {
+ long val1 = unboxCharOrLong(arg1, code1);
+ long val2 = unboxCharOrLong(arg2, code2);
+ return boxToBoolean(val1 >= val2);
+ }
+ if (maxcode <= FLOAT) {
+ float val1 = unboxCharOrFloat(arg1, code1);
+ float val2 = unboxCharOrFloat(arg2, code2);
+ return boxToBoolean(val1 >= val2);
+ }
+ if (maxcode <= DOUBLE) {
+ double val1 = unboxCharOrDouble(arg1, code1);
+ double val2 = unboxCharOrDouble(arg2, code2);
+ return boxToBoolean(val1 >= val2);
+ }
+ throw new NoSuchMethodException();
+ }
+
+ public static Object testGreaterThan(Object arg1, Object arg2) throws NoSuchMethodException {
+ int code1 = typeCode(arg1);
+ int code2 = typeCode(arg2);
+ int maxcode = (code1 < code2) ? code2 : code1;
+ if (maxcode <= INT) {
+ int val1 = unboxCharOrInt(arg1, code1);
+ int val2 = unboxCharOrInt(arg2, code2);
+ return boxToBoolean(val1 > val2);
+ }
+ if (maxcode <= LONG) {
+ long val1 = unboxCharOrLong(arg1, code1);
+ long val2 = unboxCharOrLong(arg2, code2);
+ return boxToBoolean(val1 > val2);
+ }
+ if (maxcode <= FLOAT) {
+ float val1 = unboxCharOrFloat(arg1, code1);
+ float val2 = unboxCharOrFloat(arg2, code2);
+ return boxToBoolean(val1 > val2);
+ }
+ if (maxcode <= DOUBLE) {
+ double val1 = unboxCharOrDouble(arg1, code1);
+ double val2 = unboxCharOrDouble(arg2, code2);
+ return boxToBoolean(val1 > val2);
+ }
+ throw new NoSuchMethodException();
+ }
+
+ public static boolean isBoxedNumberOrBoolean(Object arg) {
+ return (arg instanceof java.lang.Boolean) || isBoxedNumber(arg);
+ }
+ public static boolean isBoxedNumber(Object arg) {
+ return (
+ (arg instanceof java.lang.Integer)
+ || (arg instanceof java.lang.Long)
+ || (arg instanceof java.lang.Double)
+ || (arg instanceof java.lang.Float)
+ || (arg instanceof java.lang.Short)
+ || (arg instanceof java.lang.Character)
+ || (arg instanceof java.lang.Byte)
+ );
+ }
+
+ /** arg.toChar */
+ public static java.lang.Character toCharacter(Object arg) throws NoSuchMethodException {
+ if (arg instanceof java.lang.Integer) return boxToCharacter((char)unboxToInt(arg));
+ if (arg instanceof java.lang.Short) return boxToCharacter((char)unboxToShort(arg));
+ if (arg instanceof java.lang.Character) return (java.lang.Character)arg;
+ if (arg instanceof java.lang.Long) return boxToCharacter((char)unboxToLong(arg));
+ if (arg instanceof java.lang.Byte) return boxToCharacter((char)unboxToByte(arg));
+ if (arg instanceof java.lang.Float) return boxToCharacter((char)unboxToFloat(arg));
+ if (arg instanceof java.lang.Double) return boxToCharacter((char)unboxToDouble(arg));
+ throw new NoSuchMethodException();
+ }
+
+ /** arg.toByte */
+ public static java.lang.Byte toByte(Object arg) throws NoSuchMethodException {
+ if (arg instanceof java.lang.Integer) return boxToByte((byte)unboxToInt(arg));
+ if (arg instanceof java.lang.Character) return boxToByte((byte)unboxToChar(arg));
+ if (arg instanceof java.lang.Byte) return (java.lang.Byte)arg;
+ if (arg instanceof java.lang.Long) return boxToByte((byte)unboxToLong(arg));
+ if (arg instanceof java.lang.Short) return boxToByte((byte)unboxToShort(arg));
+ if (arg instanceof java.lang.Float) return boxToByte((byte)unboxToFloat(arg));
+ if (arg instanceof java.lang.Double) return boxToByte((byte)unboxToDouble(arg));
+ throw new NoSuchMethodException();
+ }
+
+ /** arg.toShort */
+ public static java.lang.Short toShort(Object arg) throws NoSuchMethodException {
+ if (arg instanceof java.lang.Integer) return boxToShort((short)unboxToInt(arg));
+ if (arg instanceof java.lang.Long) return boxToShort((short)unboxToLong(arg));
+ if (arg instanceof java.lang.Character) return boxToShort((short)unboxToChar(arg));
+ if (arg instanceof java.lang.Byte) return boxToShort((short)unboxToByte(arg));
+ if (arg instanceof java.lang.Short) return (java.lang.Short)arg;
+ if (arg instanceof java.lang.Float) return boxToShort((short)unboxToFloat(arg));
+ if (arg instanceof java.lang.Double) return boxToShort((short)unboxToDouble(arg));
+ throw new NoSuchMethodException();
+ }
+
+ /** arg.toInt */
+ public static java.lang.Integer toInteger(Object arg) throws NoSuchMethodException {
+ if (arg instanceof java.lang.Integer) return (java.lang.Integer)arg;
+ if (arg instanceof java.lang.Long) return boxToInteger((int)unboxToLong(arg));
+ if (arg instanceof java.lang.Double) return boxToInteger((int)unboxToDouble(arg));
+ if (arg instanceof java.lang.Float) return boxToInteger((int)unboxToFloat(arg));
+ if (arg instanceof java.lang.Character) return boxToInteger((int)unboxToChar(arg));
+ if (arg instanceof java.lang.Byte) return boxToInteger((int)unboxToByte(arg));
+ if (arg instanceof java.lang.Short) return boxToInteger((int)unboxToShort(arg));
+ throw new NoSuchMethodException();
+ }
+
+ /** arg.toLong */
+ public static java.lang.Long toLong(Object arg) throws NoSuchMethodException {
+ if (arg instanceof java.lang.Integer) return boxToLong((long)unboxToInt(arg));
+ if (arg instanceof java.lang.Double) return boxToLong((long)unboxToDouble(arg));
+ if (arg instanceof java.lang.Float) return boxToLong((long)unboxToFloat(arg));
+ if (arg instanceof java.lang.Long) return (java.lang.Long)arg;
+ if (arg instanceof java.lang.Character) return boxToLong((long)unboxToChar(arg));
+ if (arg instanceof java.lang.Byte) return boxToLong((long)unboxToByte(arg));
+ if (arg instanceof java.lang.Short) return boxToLong((long)unboxToShort(arg));
+ throw new NoSuchMethodException();
+ }
+
+ /** arg.toFloat */
+ public static java.lang.Float toFloat(Object arg) throws NoSuchMethodException {
+ if (arg instanceof java.lang.Integer) return boxToFloat((float)unboxToInt(arg));
+ if (arg instanceof java.lang.Long) return boxToFloat((float)unboxToLong(arg));
+ if (arg instanceof java.lang.Float) return (java.lang.Float)arg;
+ if (arg instanceof java.lang.Double) return boxToFloat((float)unboxToDouble(arg));
+ if (arg instanceof java.lang.Character) return boxToFloat((float)unboxToChar(arg));
+ if (arg instanceof java.lang.Byte) return boxToFloat((float)unboxToByte(arg));
+ if (arg instanceof java.lang.Short) return boxToFloat((float)unboxToShort(arg));
+ throw new NoSuchMethodException();
+ }
+
+ /** arg.toDouble */
+ public static java.lang.Double toDouble(Object arg) throws NoSuchMethodException {
+ if (arg instanceof java.lang.Integer) return boxToDouble((double)unboxToInt(arg));
+ if (arg instanceof java.lang.Float) return boxToDouble((double)unboxToFloat(arg));
+ if (arg instanceof java.lang.Double) return (java.lang.Double)arg;
+ if (arg instanceof java.lang.Long) return boxToDouble((double)unboxToLong(arg));
+ if (arg instanceof java.lang.Character) return boxToDouble((double)unboxToChar(arg));
+ if (arg instanceof java.lang.Byte) return boxToDouble((double)unboxToByte(arg));
+ if (arg instanceof java.lang.Short) return boxToDouble((double)unboxToShort(arg));
+ throw new NoSuchMethodException();
+ }
+
+}
diff --git a/test/files/pos/t5644/other.scala b/test/files/pos/t5644/other.scala
new file mode 100644
index 0000000..50388fd
--- /dev/null
+++ b/test/files/pos/t5644/other.scala
@@ -0,0 +1,3 @@
+class Foo {
+ List(1) exists(_ == (null: Any))
+}
diff --git a/test/files/pos/t5654.scala b/test/files/pos/t5654.scala
new file mode 100644
index 0000000..1f8d05b
--- /dev/null
+++ b/test/files/pos/t5654.scala
@@ -0,0 +1,13 @@
+class T(val a: Array[_])
+
+class U {
+ val a = Array(Array(1, 2), Array("a","b"))
+}
+
+class T1 { val a: Array[_] = Array(1) }
+
+case class Bomb(a: Array[_])
+case class Bomb2(a: Array[T] forSome { type T })
+class Okay1(a: Array[_])
+case class Okay2(s: Seq[_])
+
diff --git a/test/files/pos/bug566.scala b/test/files/pos/t566.scala
similarity index 100%
rename from test/files/pos/bug566.scala
rename to test/files/pos/t566.scala
diff --git a/test/files/pos/t5667.scala b/test/files/pos/t5667.scala
new file mode 100644
index 0000000..353eec9
--- /dev/null
+++ b/test/files/pos/t5667.scala
@@ -0,0 +1,4 @@
+object Main {
+ implicit class C(val s: String) extends AnyVal
+ implicit class C2(val s: String) extends AnyRef
+}
diff --git a/test/files/pos/t5692a.check b/test/files/pos/t5692a.check
new file mode 100644
index 0000000..7fbfb5d
--- /dev/null
+++ b/test/files/pos/t5692a.check
@@ -0,0 +1,4 @@
+Test_2.scala:2: error: this type parameter must be specified
+ def x = Macros.foo
+ ^
+one error found
diff --git a/test/files/pos/t5692a.flags b/test/files/pos/t5692a.flags
new file mode 100644
index 0000000..cd66464
--- /dev/null
+++ b/test/files/pos/t5692a.flags
@@ -0,0 +1 @@
+-language:experimental.macros
\ No newline at end of file
diff --git a/test/files/pos/t5692a/Macros_1.scala b/test/files/pos/t5692a/Macros_1.scala
new file mode 100644
index 0000000..06b5a3d
--- /dev/null
+++ b/test/files/pos/t5692a/Macros_1.scala
@@ -0,0 +1,6 @@
+import scala.reflect.macros.Context
+
+object Macros {
+ def impl[T](c: Context) = c.literalUnit
+ def foo[T] = macro impl[T]
+}
\ No newline at end of file
diff --git a/test/files/pos/t5692a/Test_2.scala b/test/files/pos/t5692a/Test_2.scala
new file mode 100644
index 0000000..08d510c
--- /dev/null
+++ b/test/files/pos/t5692a/Test_2.scala
@@ -0,0 +1,3 @@
+class Test {
+ def x = Macros.foo
+}
\ No newline at end of file
diff --git a/test/files/pos/t5692b.check b/test/files/pos/t5692b.check
new file mode 100644
index 0000000..1679682
--- /dev/null
+++ b/test/files/pos/t5692b.check
@@ -0,0 +1,4 @@
+Test_2.scala:2: error: these type parameters must be specified
+ def x = Macros.foo
+ ^
+one error found
diff --git a/test/files/pos/t5692b.flags b/test/files/pos/t5692b.flags
new file mode 100644
index 0000000..cd66464
--- /dev/null
+++ b/test/files/pos/t5692b.flags
@@ -0,0 +1 @@
+-language:experimental.macros
\ No newline at end of file
diff --git a/test/files/pos/t5692b/Macros_1.scala b/test/files/pos/t5692b/Macros_1.scala
new file mode 100644
index 0000000..b28d19f
--- /dev/null
+++ b/test/files/pos/t5692b/Macros_1.scala
@@ -0,0 +1,6 @@
+import scala.reflect.macros.Context
+
+object Macros {
+ def impl[T, U](c: Context) = c.literalUnit
+ def foo[T, U] = macro impl[T, U]
+}
\ No newline at end of file
diff --git a/test/files/pos/t5692b/Test_2.scala b/test/files/pos/t5692b/Test_2.scala
new file mode 100644
index 0000000..08d510c
--- /dev/null
+++ b/test/files/pos/t5692b/Test_2.scala
@@ -0,0 +1,3 @@
+class Test {
+ def x = Macros.foo
+}
\ No newline at end of file
diff --git a/test/files/jvm/bug680.check b/test/files/pos/t5692c.check
similarity index 100%
copy from test/files/jvm/bug680.check
copy to test/files/pos/t5692c.check
diff --git a/test/files/pos/t5692c.scala b/test/files/pos/t5692c.scala
new file mode 100644
index 0000000..fa5f0b2
--- /dev/null
+++ b/test/files/pos/t5692c.scala
@@ -0,0 +1,4 @@
+class C {
+ def foo[T: scala.reflect.ClassTag](xs: T*): Array[T] = ???
+ foo()
+}
\ No newline at end of file
diff --git a/test/files/pos/t5702-pos-infix-star.scala b/test/files/pos/t5702-pos-infix-star.scala
new file mode 100644
index 0000000..756bcdd
--- /dev/null
+++ b/test/files/pos/t5702-pos-infix-star.scala
@@ -0,0 +1,15 @@
+
+object Test {
+ case class *(a: Int, b: Int)
+ type Star = *
+ case class P(a: Int, b: Star) // alias still required
+
+ def main(args: Array[String]) {
+ val v = new *(6,7)
+ val x * y = v
+ printf("%d,%d\n",x,y)
+ val p = P(5, v)
+ val P(a, b * c) = p
+ printf("%d,%d,%d\n",a,b,c)
+ }
+}
diff --git a/test/files/pos/t5703/Base.java b/test/files/pos/t5703/Base.java
new file mode 100644
index 0000000..fa75cc3
--- /dev/null
+++ b/test/files/pos/t5703/Base.java
@@ -0,0 +1,3 @@
+public abstract class Base<Params> {
+ public abstract void func(Params[] params);
+}
\ No newline at end of file
diff --git a/test/files/pos/t5703/Impl.scala b/test/files/pos/t5703/Impl.scala
new file mode 100644
index 0000000..ee22d8f
--- /dev/null
+++ b/test/files/pos/t5703/Impl.scala
@@ -0,0 +1,3 @@
+class Implementation extends Base[Object] {
+ def func(params: Array[Object]): Unit = {}
+}
\ No newline at end of file
diff --git a/test/files/pos/t5706.flags b/test/files/pos/t5706.flags
new file mode 100644
index 0000000..cd66464
--- /dev/null
+++ b/test/files/pos/t5706.flags
@@ -0,0 +1 @@
+-language:experimental.macros
\ No newline at end of file
diff --git a/test/files/pos/t5706.scala b/test/files/pos/t5706.scala
new file mode 100644
index 0000000..20a8b25
--- /dev/null
+++ b/test/files/pos/t5706.scala
@@ -0,0 +1,10 @@
+import scala.reflect.macros.Context
+
+class Logger {
+ def error(message: String) = macro Impls.error
+}
+
+object Impls {
+ type LoggerContext = Context { type PrefixType = Logger }
+ def error(c: LoggerContext)(message: c.Expr[String]): c.Expr[Unit] = ???
+}
diff --git a/test/files/pos/t5720-ownerous.scala b/test/files/pos/t5720-ownerous.scala
new file mode 100644
index 0000000..ad4d4c1
--- /dev/null
+++ b/test/files/pos/t5720-ownerous.scala
@@ -0,0 +1,56 @@
+
+/*
+ * The block under qual$1 must be owned by it.
+ * In the sample bug, the first default arg generates x$4,
+ * the second default arg generates qual$1, hence the maximal
+ * minimization.
+ *
+ <method> <triedcooking> def model: C.this.M = {
+ val qual$1: C.this.M = scala.Option.apply[C.this.M]({
+ val x$1: lang.this.String("foo") = "foo";
+ val x$2: String = C.this.M.apply$default$2("foo");
+ C.this.M.apply("foo")(x$2)
+}).getOrElse[C.this.M]({
+ val x$3: lang.this.String("bar") = "bar";
+ val x$4: String = C.this.M.apply$default$2("bar");
+ C.this.M.apply("bar")(x$4)
+ });
+ val x$5: lang.this.String("baz") = "baz";
+ val x$6: String = qual$1.copy$default$2("baz");
+ qual$1.copy("baz")(x$6)
+ }
+ */
+class C {
+ case class M(currentUser: String = "anon")(val message: String = "empty")
+ val m = M("foo")()
+
+ // reported
+ //def model = Option(M("foo")()).getOrElse(M("bar")()).copy(currentUser = "")()
+
+ // the bug
+ def model = Option(m).getOrElse(M("bar")()).copy("baz")("empty")
+
+ // style points for this version
+ def modish = ((null: Option[M]) getOrElse new M()()).copy()("empty")
+
+ // various simplifications are too simple
+ case class N(currentUser: String = "anon")
+ val n = N("fun")
+ def nudel = Option(n).getOrElse(N()).copy()
+}
+
+object Test {
+ def main(args: Array[String]) {
+ val c = new C
+ println(c.model.currentUser)
+ println(c.model.message)
+ }
+}
+/*
+symbol value x$4$1 does not exist in badcopy.C.model
+at scala.reflect.internal.SymbolTable.abort(SymbolTable.scala:45)
+at scala.tools.nsc.Global.abort(Global.scala:202)
+at scala.tools.nsc.backend.icode.GenICode$ICodePhase.liftedTree2$1(GenICode.scala:998)
+at scala.tools.nsc.backend.icode.GenICode$ICodePhase.scala$tools$nsc$backend$icode$GenICode$ICodePhase$$genLoad(GenICode.scala:992)
+*/
+
diff --git a/test/files/pos/t5726.scala b/test/files/pos/t5726.scala
new file mode 100644
index 0000000..b28ebd8
--- /dev/null
+++ b/test/files/pos/t5726.scala
@@ -0,0 +1,17 @@
+import scala.language.dynamics
+
+class DynamicTest extends Dynamic {
+ def selectDynamic(name: String) = s"value of $name"
+ def updateDynamic(name: String)(value: Any) {
+ println(s"You have just updated property '$name' with value: $value")
+ }
+}
+
+object MyApp extends App {
+ def testing() {
+ val test = new DynamicTest
+ test.firstName = "John"
+ }
+
+ testing()
+}
diff --git a/test/files/pos/t5727.scala b/test/files/pos/t5727.scala
new file mode 100644
index 0000000..e091d82
--- /dev/null
+++ b/test/files/pos/t5727.scala
@@ -0,0 +1,31 @@
+
+/*
+ * We like operators, bar none.
+ */
+object Test {
+
+ trait SomeInfo
+ case object NoInfo extends SomeInfo
+
+ sealed abstract class Res[+T]
+ case object NotRes extends Res[Nothing]
+
+
+ abstract class Base[+T] {
+ def apply(f: String): Res[T]
+ // 'i' crashes the compiler, similarly if we use currying
+ //def |[U >: T](a: => Base[U], i: SomeInfo = NoInfo): Base[U] = null
+ def bar[U >: T](a: => Base[U], i: SomeInfo = NoInfo): Base[U] = null
+ }
+
+ implicit def fromStringToBase(a: String): Base[String] = new Base[String] { def apply(in: String) = NotRes }
+
+ // bug
+ //def Sample: Base[Any] = ( rep("foo" | "bar") | "sth")
+ def Sample: Base[Any] = ( rep("foo" bar "bar") bar "sth")
+
+ def rep[T](p: => Base[T]): Base[T] = null // whatever
+
+ def main(args: Array[String]) {
+ }
+}
diff --git a/test/files/pos/t5729.scala b/test/files/pos/t5729.scala
new file mode 100644
index 0000000..9fd9c9f
--- /dev/null
+++ b/test/files/pos/t5729.scala
@@ -0,0 +1,6 @@
+trait T[X]
+object Test {
+ def join(in: Seq[T[_]]): Int = ???
+ def join[S](in: Seq[T[S]]): String = ???
+ join(null: Seq[T[_]])
+}
\ No newline at end of file
diff --git a/test/files/pos/bug573.scala b/test/files/pos/t573.scala
similarity index 100%
rename from test/files/pos/bug573.scala
rename to test/files/pos/t573.scala
diff --git a/test/files/pos/t5738.scala b/test/files/pos/t5738.scala
new file mode 100644
index 0000000..b8755ed
--- /dev/null
+++ b/test/files/pos/t5738.scala
@@ -0,0 +1,8 @@
+import scala.reflect.runtime.universe._
+
+object Test extends App {
+ def f[T](a: T, b: T) = {
+ reify(a.toString + b)
+ reify(a + b.toString)
+ }
+}
\ No newline at end of file
diff --git a/test/files/pos/t5742.scala b/test/files/pos/t5742.scala
new file mode 100644
index 0000000..3d3125b
--- /dev/null
+++ b/test/files/pos/t5742.scala
@@ -0,0 +1,8 @@
+import scala.reflect.runtime.universe._
+
+object Test extends App {
+ def foo[T](a: T) = reify {
+ val x1 = a
+ val x2 = reify(a)
+ }
+}
\ No newline at end of file
diff --git a/test/files/pos/t5744/Macros_1.scala b/test/files/pos/t5744/Macros_1.scala
new file mode 100644
index 0000000..288a886
--- /dev/null
+++ b/test/files/pos/t5744/Macros_1.scala
@@ -0,0 +1,22 @@
+import scala.language.experimental.macros
+import scala.reflect.macros.Context
+
+object Macros {
+ def foo[U: Numeric](x: U) = macro foo_impl[U]
+ def bar[U: Numeric : Equiv, Y <% String](x: U)(implicit s: String) = macro bar_impl[U, Y]
+
+ def foo_impl[U](c: Context)(x: c.Expr[U])(numeric: c.Expr[Numeric[U]]) = {
+ import c.universe._
+ val plusOne = Apply(Select(numeric.tree, newTermName("plus")), List(x.tree, Literal(Constant(1))))
+ val body = Apply(Select(Ident(definitions.PredefModule), newTermName("println")), List(plusOne))
+ c.Expr[Unit](body)
+ }
+
+ def bar_impl[U, Y](c: Context)(x: c.Expr[U])(numeric: c.Expr[Numeric[U]], equiv: c.Expr[Equiv[U]], viewAsString: c.Expr[Y => String], s: c.Expr[String]) = {
+ import c.universe._
+ val plusOne = Apply(Select(numeric.tree, newTermName("plus")), List(x.tree, Literal(Constant(1))))
+ val plusLen = Apply(Select(numeric.tree, newTermName("plus")), List(plusOne, Select(s.tree, newTermName("length"))))
+ val body = Apply(Select(Ident(definitions.PredefModule), newTermName("println")), List(plusLen))
+ c.Expr[Unit](body)
+ }
+}
\ No newline at end of file
diff --git a/test/files/pos/t5744/Test_2.scala b/test/files/pos/t5744/Test_2.scala
new file mode 100644
index 0000000..64b57e6
--- /dev/null
+++ b/test/files/pos/t5744/Test_2.scala
@@ -0,0 +1,6 @@
+object Test extends App {
+ import Macros._
+ foo(42)
+ implicit val s = ""
+ bar(43)
+}
\ No newline at end of file
diff --git a/test/files/pos/t5756.scala b/test/files/pos/t5756.scala
new file mode 100644
index 0000000..45960fa
--- /dev/null
+++ b/test/files/pos/t5756.scala
@@ -0,0 +1,6 @@
+import scala.reflect.runtime.universe._
+
+object Test extends App {
+ def tagme[T: TypeTag](x: T) = typeTag[T]
+ val foo = tagme{object Bar; Bar}
+}
\ No newline at end of file
diff --git a/test/files/pos/t5769.scala b/test/files/pos/t5769.scala
new file mode 100644
index 0000000..fdc46b6
--- /dev/null
+++ b/test/files/pos/t5769.scala
@@ -0,0 +1,9 @@
+// a.scala
+import scala.reflect.{ClassTag, classTag}
+
+class A {
+ type AI = Array[Int]
+
+ def f1 = classTag[Array[Int]]
+ def f2 = classTag[AI]
+}
\ No newline at end of file
diff --git a/test/files/pos/bug577.scala b/test/files/pos/t577.scala
similarity index 100%
rename from test/files/pos/bug577.scala
rename to test/files/pos/t577.scala
diff --git a/test/files/pos/t5777.scala b/test/files/pos/t5777.scala
new file mode 100644
index 0000000..24cea36
--- /dev/null
+++ b/test/files/pos/t5777.scala
@@ -0,0 +1,45 @@
+// /scala/trac/5777/a.scala
+// Wed May 9 08:44:57 PDT 2012
+
+trait Ring {
+ trait E
+}
+
+class Poly[C <: Ring](val ring: C) extends Ring
+// This definition of Poly triggers the same failure on *both* versions
+// class Poly(val ring: Ring) extends Ring
+
+object BigInt extends Ring
+
+object MyApp {
+ val r = new Poly(BigInt)
+
+ implicitly[r.ring.E <:< BigInt.E]
+
+ // fail on 2.10, works on 2.9.2
+ (null.asInstanceOf[BigInt.E] : r.ring.E)
+
+ // works on both versions
+ val r1 = new Poly[BigInt.type](BigInt)
+ (null.asInstanceOf[BigInt.E] : r1.ring.E)
+
+ // Oddly, -Xprint:typer reports that r and r1 have the same inferred type.
+ //
+ // private[this] val r: Poly[BigInt.type] = new Poly[BigInt.type](BigInt);
+ // <stable> <accessor> def r: Poly[BigInt.type] = MyApp.this.r;
+ // (null.asInstanceOf[BigInt.E]: MyApp.r.ring.E);
+ // private[this] val r1: Poly[BigInt.type] = new Poly[BigInt.type](BigInt);
+ // <stable> <accessor> def r1: Poly[BigInt.type] = MyApp.this.r1;
+ // (null.asInstanceOf[BigInt.E]: MyApp.r1.ring.E)
+
+ // diff typer-2.9.2.txt typer-2.10.txt
+ // ...
+ // ---
+ // > object MyApp extends scala.AnyRef {
+ // > def <init>(): MyApp.type = {
+ // > MyApp.super.<init>();
+ // 30c30
+ // < scala.this.Predef.implicitly[<:<[BigInt.E,MyApp.r.ring.E]](scala.this.Predef.conforms[BigInt.E]);
+ // ---
+ // > scala.this.Predef.implicitly[<:<[BigInt.E,MyApp.r.ring.E]]();
+}
diff --git a/test/files/pos/t5779-numeq-warn.scala b/test/files/pos/t5779-numeq-warn.scala
new file mode 100644
index 0000000..76ef297
--- /dev/null
+++ b/test/files/pos/t5779-numeq-warn.scala
@@ -0,0 +1,13 @@
+
+object Test {
+ def main(args: Array[String]) {
+ val d: Double = (BigInt(1) << 64).toDouble
+ val f: Float = d.toFloat
+ val n: java.lang.Number = d.toFloat
+ assert (d == f) // ok
+ assert (d == n) // was: comparing values of types Double and Number using `==' will always yield false
+ assert (n == d) // was: Number and Double are unrelated: they will most likely never compare equal
+ assert (f == n)
+ assert (n == f)
+ }
+}
diff --git a/test/files/pos/bug578.scala b/test/files/pos/t578.scala
similarity index 100%
rename from test/files/pos/bug578.scala
rename to test/files/pos/t578.scala
diff --git a/test/files/pos/t5796.scala b/test/files/pos/t5796.scala
new file mode 100644
index 0000000..d05350c
--- /dev/null
+++ b/test/files/pos/t5796.scala
@@ -0,0 +1,8 @@
+object Bug {
+ def foo() {
+ val v = {
+ lazy val s = 0
+ s
+ }
+ }
+}
diff --git a/test/files/pos/t5809.flags b/test/files/pos/t5809.flags
new file mode 100644
index 0000000..e93641e
--- /dev/null
+++ b/test/files/pos/t5809.flags
@@ -0,0 +1 @@
+-Xlint -Xfatal-warnings
\ No newline at end of file
diff --git a/test/files/pos/t5809.scala b/test/files/pos/t5809.scala
new file mode 100644
index 0000000..133e13c
--- /dev/null
+++ b/test/files/pos/t5809.scala
@@ -0,0 +1,5 @@
+package object foo {
+ implicit class PimpedInt(foo: Int) {
+ def bar = ???
+ }
+}
\ No newline at end of file
diff --git a/test/files/pos/t5829.scala b/test/files/pos/t5829.scala
new file mode 100644
index 0000000..236045e
--- /dev/null
+++ b/test/files/pos/t5829.scala
@@ -0,0 +1,18 @@
+trait Universe {
+ type Tree
+
+ type SymTree <: Tree
+ type NameTree <: Tree
+ type RefTree <: SymTree with NameTree
+
+ type Ident <: RefTree
+ type Select <: RefTree
+}
+
+object Test extends App {
+ val universe: Universe = null
+ import universe._
+ def select: Select = ???
+ def ident: Ident = ???
+ List(select, ident)
+}
\ No newline at end of file
diff --git a/test/files/pos/t5846.scala b/test/files/pos/t5846.scala
new file mode 100644
index 0000000..b06f5ac
--- /dev/null
+++ b/test/files/pos/t5846.scala
@@ -0,0 +1,10 @@
+
+
+
+
+/** Return the most general sorted map type. */
+object Test extends App {
+
+ val empty: collection.SortedMap[String, String] = collection.SortedMap.empty[String, String]
+
+}
diff --git a/test/files/pos/t5853.scala b/test/files/pos/t5853.scala
new file mode 100644
index 0000000..21d8020
--- /dev/null
+++ b/test/files/pos/t5853.scala
@@ -0,0 +1,55 @@
+
+
+
+
+
+
+
+final class C(val x: Int) extends AnyVal {
+ def ppp[@specialized(Int) T](y: T) = ()
+}
+
+
+class Foo {
+ def f = new C(1) ppp 2
+}
+
+
+/* Original SI-5853 test-case. */
+
+object Bippy {
+ implicit final class C(val x: Int) extends AnyVal {
+ def +++[@specialized T](y: T) = ()
+ }
+ def f = 1 +++ 2
+}
+
+
+/* Few more examples. */
+
+final class C2(val x: Int) extends AnyVal {
+ def +++[@specialized(Int) T](y: T) = ()
+}
+
+
+class Foo2 {
+ def f = new C2(1) +++ 2
+}
+
+
+object Arrow {
+ implicit final class ArrowAssoc[A](val __leftOfArrow: A) extends AnyVal {
+ @inline def ->>[B](y: B): Tuple2[A, B] = Tuple2(__leftOfArrow, y)
+ }
+
+ def foo = 1 ->> 2
+}
+
+
+object SpecArrow {
+ implicit final class ArrowAssoc[A](val __leftOfArrow: A) extends AnyVal {
+ @inline def ->> [@specialized(Int) B](y: B): Tuple2[A, B] = Tuple2(__leftOfArrow, y)
+ }
+
+ def foo = 1 ->> 2
+}
diff --git a/test/files/pos/t5859.scala b/test/files/pos/t5859.scala
new file mode 100644
index 0000000..2a31e68
--- /dev/null
+++ b/test/files/pos/t5859.scala
@@ -0,0 +1,15 @@
+
+class A {
+ def f(xs: List[Int], ys: AnyRef*) = ()
+ def f(xs: AnyRef*) = ()
+
+ f()
+ f(List[AnyRef](): _*)
+ f(List(): _*)
+ f(Nil: _*)
+ f(Array(): _*)
+ f(Array[AnyRef](): _*)
+ f(List(1))
+ f(List(1), Nil: _*)
+ f(List(1), Array(): _*)
+}
diff --git a/test/files/pos/t5862.scala b/test/files/pos/t5862.scala
new file mode 100644
index 0000000..e3006dd
--- /dev/null
+++ b/test/files/pos/t5862.scala
@@ -0,0 +1,38 @@
+package test
+
+import java.io.DataOutput
+import java.io.DataInput
+
+/** Interface for writing outputs from a DoFn. */
+trait Emitter[A] {
+ def emit(value: A): Unit
+}
+
+/** A wrapper for a 'map' function tagged for a specific output channel. */
+abstract class TaggedMapper[A, K, V]
+ (val tags: Set[Int])
+ (implicit val mA: Manifest[A], val wtA: WireFormat[A],
+ val mK: Manifest[K], val wtK: WireFormat[K], val ordK: Ordering[K],
+ val mV: Manifest[V], val wtV: WireFormat[V])
+ extends Serializable {
+}
+
+/** Type-class for sending types across the Hadoop wire. */
+trait WireFormat[A]
+
+class MapReduceJob {
+ trait DataSource
+
+ import scala.collection.mutable.{ Set => MSet, Map => MMap }
+ private val mappers: MMap[DataSource, MSet[TaggedMapper[_, _, _]]] = MMap.empty
+
+ def addTaggedMapper[A, K, V](input: DataSource, m: TaggedMapper[A, K, V]): Unit = {
+ if (!mappers.contains(input))
+ mappers += (input -> MSet(m))
+ else
+ mappers(input) += m // : Unit
+
+ m.tags.foreach { tag =>
+ }
+ }
+}
diff --git a/test/files/pos/t5877.scala b/test/files/pos/t5877.scala
new file mode 100644
index 0000000..c7827df
--- /dev/null
+++ b/test/files/pos/t5877.scala
@@ -0,0 +1,14 @@
+package foo {
+ class Foo
+
+ object Test {
+ new Foo().huzzah
+ }
+}
+
+package object foo {
+ // Crasher: No synthetics for method PimpedFoo2: synthetics contains
+ implicit class PimpedFoo2(value: Foo) {
+ def huzzah = ""
+ }
+}
diff --git a/test/files/pos/t5877b.scala b/test/files/pos/t5877b.scala
new file mode 100644
index 0000000..6b8cbd4
--- /dev/null
+++ b/test/files/pos/t5877b.scala
@@ -0,0 +1,13 @@
+package foo
+
+class Foo
+
+object Test {
+ new Foo().huzzah
+}
+
+object `package` {
+ implicit class PimpedFoo2(value: Foo) {
+ def huzzah = ""
+ }
+}
diff --git a/test/files/pos/t5886.scala b/test/files/pos/t5886.scala
new file mode 100644
index 0000000..0661873
--- /dev/null
+++ b/test/files/pos/t5886.scala
@@ -0,0 +1,18 @@
+object A {
+ def f0[T](x: T): T = x
+ def f1[T](x: => T): T = x
+ def f2[T](x: () => T): T = x()
+
+ f0(this.getClass) // ok
+ f1(this.getClass)
+ f2(this.getClass) // ok
+
+ // a.scala:7: error: type mismatch;
+ // found : Class[_ <: A.type]
+ // required: Class[?0(in value x1)] where type ?0(in value x1) <: A.type
+ // Note: A.type >: ?0, but Java-defined class Class is invariant in type T.
+ // You may wish to investigate a wildcard type such as `_ >: ?0`. (SLS 3.2.10)
+ // val x1 = f1(this.getClass)
+ // ^
+ // one error found
+}
diff --git a/test/files/pos/t5892.scala b/test/files/pos/t5892.scala
new file mode 100644
index 0000000..241e598
--- /dev/null
+++ b/test/files/pos/t5892.scala
@@ -0,0 +1,5 @@
+class foo(a: String) extends annotation.StaticAnnotation
+object o {
+ implicit def i2s(i: Int) = ""
+ @foo(1: String) def blerg { }
+}
diff --git a/test/files/neg/caseinherit.flags b/test/files/pos/t5897.flags
similarity index 100%
copy from test/files/neg/caseinherit.flags
copy to test/files/pos/t5897.flags
diff --git a/test/files/pos/t5897.scala b/test/files/pos/t5897.scala
new file mode 100644
index 0000000..2e9751a
--- /dev/null
+++ b/test/files/pos/t5897.scala
@@ -0,0 +1,6 @@
+// no warning here
+// (strangely, if there's an unreachable code warning *anywhere in this compilation unit*,
+// the non-sensical warning goes away under -Xfatal-warnings)
+class Test {
+ () match { case () => }
+}
diff --git a/test/files/neg/caseinherit.flags b/test/files/pos/t5899.flags
similarity index 100%
copy from test/files/neg/caseinherit.flags
copy to test/files/pos/t5899.flags
diff --git a/test/files/pos/t5899.scala b/test/files/pos/t5899.scala
new file mode 100644
index 0000000..b16f1f8
--- /dev/null
+++ b/test/files/pos/t5899.scala
@@ -0,0 +1,19 @@
+import scala.tools.nsc._
+
+trait Foo {
+ val global: Global
+ import global.{Name, Symbol, nme}
+
+ case class Bippy(name: Name)
+
+ def f(x: Bippy, sym: Symbol): Int = {
+ // no warning (!) for
+ // val Stable = sym.name.toTermName
+
+ val Stable = sym.name
+ Bippy(Stable) match {
+ case Bippy(nme.WILDCARD) => 1
+ case Bippy(Stable) => 2 // should not be considered unreachable
+ }
+ }
+}
\ No newline at end of file
diff --git a/test/files/pos/t5910.java b/test/files/pos/t5910.java
new file mode 100644
index 0000000..e007a1f
--- /dev/null
+++ b/test/files/pos/t5910.java
@@ -0,0 +1,2 @@
+class Foo {
+};;;;;;;
\ No newline at end of file
diff --git a/test/files/pos/bug592.scala b/test/files/pos/t592.scala
similarity index 100%
rename from test/files/pos/bug592.scala
rename to test/files/pos/t592.scala
diff --git a/test/files/pos/t5930.flags b/test/files/pos/t5930.flags
new file mode 100644
index 0000000..c7d406c
--- /dev/null
+++ b/test/files/pos/t5930.flags
@@ -0,0 +1 @@
+-Ywarn-dead-code -Xfatal-warnings
\ No newline at end of file
diff --git a/test/files/pos/t5930.scala b/test/files/pos/t5930.scala
new file mode 100644
index 0000000..de9d62c
--- /dev/null
+++ b/test/files/pos/t5930.scala
@@ -0,0 +1,4 @@
+// should not warn about dead code (`matchEnd(throw new MatchError)`)
+ class Test {
+ 0 match { case x: Int => }
+}
\ No newline at end of file
diff --git a/test/files/neg/caseinherit.flags b/test/files/pos/t5932.flags
similarity index 100%
copy from test/files/neg/caseinherit.flags
copy to test/files/pos/t5932.flags
diff --git a/test/files/pos/t5932.scala b/test/files/pos/t5932.scala
new file mode 100644
index 0000000..d824523
--- /dev/null
+++ b/test/files/pos/t5932.scala
@@ -0,0 +1,15 @@
+class A
+
+case object B extends A
+
+object Test {
+ val x1 = (B: A)
+
+ println(x1 == B) // no warning
+ println(B == x1) // no warning
+
+ val x2 = (B: A with Product)
+
+ println(x2 == B) // no warning
+ println(B == x2) // spurious warning: "always returns false"
+}
diff --git a/test/files/pos/bug595.scala b/test/files/pos/t595.scala
similarity index 100%
rename from test/files/pos/bug595.scala
rename to test/files/pos/t595.scala
diff --git a/test/files/pos/t5953.scala b/test/files/pos/t5953.scala
new file mode 100644
index 0000000..7ba035e
--- /dev/null
+++ b/test/files/pos/t5953.scala
@@ -0,0 +1,16 @@
+import scala.collection.{ mutable, immutable, generic, GenTraversableOnce }
+
+package object foo {
+ @inline implicit class TravOps[A, CC[A] <: GenTraversableOnce[A]](val coll: CC[A]) extends AnyVal {
+ def build[CC2[X]](implicit cbf: generic.CanBuildFrom[Nothing, A, CC2[A]]): CC2[A] = {
+ cbf() ++= coll.toIterator result
+ }
+ }
+}
+
+package foo {
+ object Test {
+ def f1[T](xs: Traversable[T]) = xs.to[immutable.Vector]
+ def f2[T](xs: Traversable[T]) = xs.build[immutable.Vector]
+ }
+}
diff --git a/test/files/pos/t5957/T_1.scala b/test/files/pos/t5957/T_1.scala
new file mode 100644
index 0000000..339dcbf
--- /dev/null
+++ b/test/files/pos/t5957/T_1.scala
@@ -0,0 +1,8 @@
+abstract class T {
+ // see: SI-6109
+ // def t1: Test$Bar
+ def t2: Test#Bar
+ // see: SI-6109
+ // def t3: Test$Baz
+ def t4: Test.Baz
+}
diff --git a/test/files/pos/t5957/Test.java b/test/files/pos/t5957/Test.java
new file mode 100644
index 0000000..4fbd257
--- /dev/null
+++ b/test/files/pos/t5957/Test.java
@@ -0,0 +1,11 @@
+public class Test {
+ public class Bar {
+ public Bar(int i) {
+ }
+ }
+
+ public static class Baz {
+ public Baz(int i) {
+ }
+ }
+}
diff --git a/test/files/pos/t5958.scala b/test/files/pos/t5958.scala
new file mode 100644
index 0000000..3b910f3
--- /dev/null
+++ b/test/files/pos/t5958.scala
@@ -0,0 +1,15 @@
+class Test {
+ def newComponent(u: Universe): u.Component = ???
+
+ class Universe { self =>
+ class Component
+
+ newComponent(this): this.Component // error, but should be fine since this is a stable reference
+ newComponent(self): self.Component // error, but should be fine since this is a stable reference
+ newComponent(self): this.Component // error, but should be fine since this is a stable reference
+ newComponent(this): self.Component // error, but should be fine since this is a stable reference
+
+ val u = this
+ newComponent(u): u.Component // ok
+ }
+}
\ No newline at end of file
diff --git a/test/files/pos/bug596.scala b/test/files/pos/t596.scala
similarity index 100%
rename from test/files/pos/bug596.scala
rename to test/files/pos/t596.scala
diff --git a/test/files/pos/t5967.scala b/test/files/pos/t5967.scala
new file mode 100644
index 0000000..eb9bd6d
--- /dev/null
+++ b/test/files/pos/t5967.scala
@@ -0,0 +1,6 @@
+object Test {
+ def f(a: Int*) = a match {
+ case 0 :: Nil => "List(0)! My favorite Seq!"
+ case _ => a.toString
+ }
+}
diff --git a/test/files/neg/caseinherit.flags b/test/files/pos/t5968.flags
similarity index 100%
copy from test/files/neg/caseinherit.flags
copy to test/files/pos/t5968.flags
diff --git a/test/files/pos/t5968.scala b/test/files/pos/t5968.scala
new file mode 100644
index 0000000..0093f84
--- /dev/null
+++ b/test/files/pos/t5968.scala
@@ -0,0 +1,8 @@
+object X {
+ def f(e: Either[Int, X.type]) = e match {
+ case Left(i) => i
+ case Right(X) => 0
+ // SI-5986 spurious exhaustivity warning here
+ }
+}
+
diff --git a/test/files/pos/bug599.scala b/test/files/pos/t599.scala
similarity index 100%
rename from test/files/pos/bug599.scala
rename to test/files/pos/t599.scala
diff --git a/test/files/neg/caseinherit.flags b/test/files/pos/t6008.flags
similarity index 100%
copy from test/files/neg/caseinherit.flags
copy to test/files/pos/t6008.flags
diff --git a/test/files/pos/t6008.scala b/test/files/pos/t6008.scala
new file mode 100644
index 0000000..84ae19b
--- /dev/null
+++ b/test/files/pos/t6008.scala
@@ -0,0 +1,12 @@
+// none of these should complain about exhaustivity
+class Test {
+ // It would fail on the following inputs: (_, false), (_, true)
+ def x(in: (Int, Boolean)) = in match { case (i: Int, b: Boolean) => 3 }
+
+ // There is no warning if the Int is ignored or bound without an explicit type:
+ def y(in: (Int, Boolean)) = in match { case (_, b: Boolean) => 3 }
+
+ // Keeping the explicit type for the Int but dropping the one for Boolean presents a spurious warning again:
+ // It would fail on the following input: (_, _)
+ def z(in: (Int, Boolean)) = in match { case (i: Int, b) => 3 }
+}
\ No newline at end of file
diff --git a/test/files/pos/t6014.scala b/test/files/pos/t6014.scala
new file mode 100644
index 0000000..46e03bb
--- /dev/null
+++ b/test/files/pos/t6014.scala
@@ -0,0 +1,13 @@
+object Test {
+ case class CC[T](key: T)
+ type Alias[T] = Seq[CC[T]]
+
+ def f(xs: Seq[CC[_]]) = xs map { case CC(x) => CC(x) } // ok
+ def g(xs: Alias[_]) = xs map { case CC(x) => CC(x) } // fails
+ // ./a.scala:11: error: missing parameter type for expanded function
+ // The argument types of an anonymous function must be fully known. (SLS 8.5)
+ // Expected type was: ?
+ // def g(xs: Alias[_]) = xs map { case CC(x) => CC(x) } // fails
+ // ^
+ // one error found
+}
\ No newline at end of file
diff --git a/test/files/pos/bug602.scala b/test/files/pos/t602.scala
similarity index 100%
rename from test/files/pos/bug602.scala
rename to test/files/pos/t602.scala
diff --git a/test/files/neg/caseinherit.flags b/test/files/pos/t6022.flags
similarity index 100%
copy from test/files/neg/caseinherit.flags
copy to test/files/pos/t6022.flags
diff --git a/test/files/pos/t6022.scala b/test/files/pos/t6022.scala
new file mode 100644
index 0000000..522c335
--- /dev/null
+++ b/test/files/pos/t6022.scala
@@ -0,0 +1,7 @@
+class Test {
+ (null: Any) match {
+ case x: AnyRef if false =>
+ case list: Option[_] =>
+ case product: Product => // change Product to String and it's all good
+ }
+}
\ No newline at end of file
diff --git a/test/files/pos/t6022b.scala b/test/files/pos/t6022b.scala
new file mode 100644
index 0000000..6ceb928
--- /dev/null
+++ b/test/files/pos/t6022b.scala
@@ -0,0 +1,20 @@
+trait A
+trait B
+trait C
+trait AB extends B with A
+
+// two types are mutually exclusive if there is no equality symbol whose constant implies both
+object Test extends App {
+ def foo(x: Any) = x match {
+ case _ : C => println("C")
+ case _ : AB => println("AB")
+ case _ : (A with B) => println("AB'")
+ case _ : B => println("B")
+ case _ : A => println("A")
+ }
+
+ foo(new A {})
+ foo(new B {})
+ foo(new AB{})
+ foo(new C {})
+}
diff --git a/test/files/pos/t6028/t6028_1.scala b/test/files/pos/t6028/t6028_1.scala
new file mode 100644
index 0000000..6edb760
--- /dev/null
+++ b/test/files/pos/t6028/t6028_1.scala
@@ -0,0 +1,3 @@
+class C {
+ def foo(a: Int): Unit = () => a
+}
diff --git a/test/files/pos/t6028/t6028_2.scala b/test/files/pos/t6028/t6028_2.scala
new file mode 100644
index 0000000..f44048c
--- /dev/null
+++ b/test/files/pos/t6028/t6028_2.scala
@@ -0,0 +1,4 @@
+object Test {
+ // ensure that parameter names are untouched by lambdalift
+ new C().foo(a = 0)
+}
diff --git a/test/files/pos/t6029.scala b/test/files/pos/t6029.scala
new file mode 100644
index 0000000..8f1bbb4
--- /dev/null
+++ b/test/files/pos/t6029.scala
@@ -0,0 +1,3 @@
+final case class V[A](x: A) extends AnyVal {
+ def flatMap[B](f: A => V[B]) = if (true) this else f(x)
+}
diff --git a/test/files/pos/t6033.scala b/test/files/pos/t6033.scala
new file mode 100644
index 0000000..60142af
--- /dev/null
+++ b/test/files/pos/t6033.scala
@@ -0,0 +1,5 @@
+object Test extends App {
+ val b = new java.math.BigInteger("123")
+ val big1 = BigInt(b)
+ val big2: BigInt = b
+}
diff --git a/test/files/pos/t6034.scala b/test/files/pos/t6034.scala
new file mode 100644
index 0000000..3558d7f
--- /dev/null
+++ b/test/files/pos/t6034.scala
@@ -0,0 +1 @@
+final class OptPlus[+A](val x: A) extends AnyVal { }
diff --git a/test/files/pos/bug604.scala b/test/files/pos/t604.scala
similarity index 100%
rename from test/files/pos/bug604.scala
rename to test/files/pos/t604.scala
diff --git a/test/files/pos/t6040.scala b/test/files/pos/t6040.scala
new file mode 100644
index 0000000..9c00ecd
--- /dev/null
+++ b/test/files/pos/t6040.scala
@@ -0,0 +1,3 @@
+import language.dynamics
+
+class X extends Dynamic
\ No newline at end of file
diff --git a/test/files/pos/t6047.flags b/test/files/pos/t6047.flags
new file mode 100644
index 0000000..cd66464
--- /dev/null
+++ b/test/files/pos/t6047.flags
@@ -0,0 +1 @@
+-language:experimental.macros
\ No newline at end of file
diff --git a/test/files/pos/t6047.scala b/test/files/pos/t6047.scala
new file mode 100644
index 0000000..bc5f856
--- /dev/null
+++ b/test/files/pos/t6047.scala
@@ -0,0 +1,20 @@
+import scala.reflect.macros.Context
+import java.io.InputStream
+
+object Macros {
+ def unpack[A](input: InputStream): A = macro unpack_impl[A]
+
+ def unpack_impl[A: c.WeakTypeTag](c: Context)(input: c.Expr[InputStream]): c.Expr[A] = {
+ import c.universe._
+
+ def unpackcode(tpe: c.Type): c.Expr[_] = {
+ if (tpe <:< implicitly[c.WeakTypeTag[Traversable[_]]].tpe) {
+
+ }
+ ???
+ }
+
+ unpackcode(implicitly[c.WeakTypeTag[A]].tpe)
+ ???
+ }
+ }
\ No newline at end of file
diff --git a/test/files/pos/bug607.scala b/test/files/pos/t607.scala
similarity index 100%
rename from test/files/pos/bug607.scala
rename to test/files/pos/t607.scala
diff --git a/test/files/pos/t6072.scala b/test/files/pos/t6072.scala
new file mode 100644
index 0000000..e25ebbf
--- /dev/null
+++ b/test/files/pos/t6072.scala
@@ -0,0 +1,3 @@
+class A {
+ object B { def eq(lvl: Int) = ??? }
+}
diff --git a/test/files/pos/t6084.scala b/test/files/pos/t6084.scala
new file mode 100644
index 0000000..1aa1fed
--- /dev/null
+++ b/test/files/pos/t6084.scala
@@ -0,0 +1,15 @@
+package object foo { type X[T, U] = (T => U) }
+
+package foo {
+ abstract class Foo[T, U](val d: T => U) extends (T => U) {
+ def f1(r: X[T, U]) = r match { case x: Foo[_,_] => x.d } // inferred ok
+ def f2(r: X[T, U]): (T => U) = r match { case x: Foo[_,_] => x.d } // dealiased ok
+ def f3(r: X[T, U]): X[T, U] = r match { case x: Foo[_,_] => x.d } // alias not ok
+
+ // x.d : foo.this.package.type.X[?scala.reflect.internal.Types$NoPrefix$?.T, ?scala.reflect.internal.Types$NoPrefix$?.U] ~>scala.this.Function1[?scala.reflect.internal.Types$NoPrefix$?.T, ?scala.reflect.internal.Types$NoPrefix$?.U]
+ // at scala.Predef$.assert(Predef.scala:170)
+ // at scala.tools.nsc.Global.assert(Global.scala:235)
+ // at scala.tools.nsc.ast.TreeGen.mkCast(TreeGen.scala:252)
+ // at scala.tools.nsc.typechecker.Typers$Typer.typedCase(Typers.scala:2263)
+ }
+}
diff --git a/test/files/pos/t6089b.scala b/test/files/pos/t6089b.scala
new file mode 100644
index 0000000..ff7ca15
--- /dev/null
+++ b/test/files/pos/t6089b.scala
@@ -0,0 +1,18 @@
+// this crazy code simply tries to nest pattern matches so that the last call is in a tricky-to-determine
+// tail position (my initial tightenign of tailpos detection for SI-6089 ruled this out)
+class BKTree {
+ @annotation.tailrec
+ final def -?-[AA](a: AA): Boolean = this match {
+ case BKTreeEmpty => false
+ case BKTreeNode(v) => {
+ val d = 1
+ d == 0 || ( Map(1 -> this,2 -> this,3 -> this) get d match {
+ case None => false
+ case Some(w) => w -?- a // can tail call here (since || is shortcutting)
+ })
+ }
+ }
+}
+
+object BKTreeEmpty extends BKTree
+case class BKTreeNode[A](v: A) extends BKTree
\ No newline at end of file
diff --git a/test/files/pos/t6091.flags b/test/files/pos/t6091.flags
new file mode 100644
index 0000000..954eaba
--- /dev/null
+++ b/test/files/pos/t6091.flags
@@ -0,0 +1 @@
+-Xfatal-warnings -Xlint
diff --git a/test/files/pos/t6091.scala b/test/files/pos/t6091.scala
new file mode 100644
index 0000000..72e663e
--- /dev/null
+++ b/test/files/pos/t6091.scala
@@ -0,0 +1,10 @@
+object Foo { def eq(x:Int) = x }
+
+class X { def ==(other: String) = true }
+
+object Test {
+ def main(args: Array[String]): Unit = {
+ Foo eq 1
+ new X == null
+ }
+}
diff --git a/test/files/pos/bug611.scala b/test/files/pos/t611.scala
similarity index 100%
rename from test/files/pos/bug611.scala
rename to test/files/pos/t611.scala
diff --git a/test/files/pos/t6117.scala b/test/files/pos/t6117.scala
new file mode 100644
index 0000000..6aca84f
--- /dev/null
+++ b/test/files/pos/t6117.scala
@@ -0,0 +1,19 @@
+package test
+
+trait ImportMe {
+ def foo(i: Int) = 1
+ def foo(s: String) = 2
+}
+
+class Test(val importMe: ImportMe) {
+ import importMe._
+ import importMe._
+
+ // A.scala:12: error: reference to foo is ambiguous;
+ // it is imported twice in the same scope by
+ // import importMe._
+ // and import importMe._
+ // println(foo(1))
+ // ^
+ println(foo(1))
+}
diff --git a/test/files/pos/bug613.scala b/test/files/pos/t613.scala
similarity index 100%
rename from test/files/pos/bug613.scala
rename to test/files/pos/t613.scala
diff --git a/test/files/pos/t6145.scala b/test/files/pos/t6145.scala
new file mode 100644
index 0000000..28334d4
--- /dev/null
+++ b/test/files/pos/t6145.scala
@@ -0,0 +1,11 @@
+object Test {
+ // the existential causes a cast and the cast makes searchClass not be in tail position
+ // can we get rid of the useless cast?
+ @annotation.tailrec
+ final def searchClass: Class[_] = {
+ "packageName" match {
+ case _ =>
+ searchClass
+ }
+ }
+}
\ No newline at end of file
diff --git a/test/files/neg/caseinherit.flags b/test/files/pos/t6146.flags
similarity index 100%
copy from test/files/neg/caseinherit.flags
copy to test/files/pos/t6146.flags
diff --git a/test/files/pos/t6146.scala b/test/files/pos/t6146.scala
new file mode 100644
index 0000000..b5bde82
--- /dev/null
+++ b/test/files/pos/t6146.scala
@@ -0,0 +1,60 @@
+// No unreachable or exhaustiveness warnings, please.
+
+//
+// The reported bug
+//
+
+trait AxisCompanion {
+ sealed trait Format
+ object Format {
+ case object Decimal extends Format
+ case object Integer extends Format
+ // Gives an unrelated warning: The outer reference in this type test cannot be checked at run time.
+ //final case class Time( hours: Boolean = false, millis: Boolean = true ) extends Format
+ }
+}
+object Axis extends AxisCompanion
+class Axis {
+ import Axis._
+ def test( f: Format ) = f match {
+ case Format.Integer => "Int"
+ // case Format.Time( hours, millis ) => "Time"
+ case Format.Decimal => "Dec"
+ }
+}
+
+
+//
+// Some tricksier variations
+//
+
+trait T1[X] {
+ trait T2[Y] {
+ sealed trait Format
+ object Format {
+ case object Decimal extends Format
+ case object Integer extends Format
+ }
+ }
+}
+
+object O1 extends T1[Any] {
+ object O2 extends T2[Any] {
+
+ }
+}
+
+case object Shorty extends O1.O2.Format
+
+class Test1 {
+ import O1.O2._
+ val FI: Format.Integer.type = Format.Integer
+ def test( f: Format ) = {
+ val ff: f.type = f
+ ff match {
+ case FI => "Int"
+ case Format.Decimal => "Dec"
+ case Shorty => "Sho"
+ }
+ }
+}
diff --git a/test/files/pos/bug615.scala b/test/files/pos/t615.scala
similarity index 100%
rename from test/files/pos/bug615.scala
rename to test/files/pos/t615.scala
diff --git a/test/files/pos/t6157.flags b/test/files/pos/t6157.flags
new file mode 100644
index 0000000..0ebca3e
--- /dev/null
+++ b/test/files/pos/t6157.flags
@@ -0,0 +1 @@
+ -optimize
diff --git a/test/files/pos/t6157.scala b/test/files/pos/t6157.scala
new file mode 100644
index 0000000..7463989
--- /dev/null
+++ b/test/files/pos/t6157.scala
@@ -0,0 +1,25 @@
+// SI-6157 - Compiler crash on inlined function and -optimize option
+
+object Test {
+ def main(args: Array[String]) {
+ Console.println(
+ ErrorHandler.defaultIfIOException("String")("String")
+ )
+ }
+}
+
+import java.io.IOException
+
+object ErrorHandler {
+
+ @inline
+ def defaultIfIOException[T](default: => T)(closure: => T): T = {
+ try {
+ closure
+ } catch {
+ case e: IOException =>
+ default
+ }
+ }
+}
+
diff --git a/test/files/pos/bug616.scala b/test/files/pos/t616.scala
similarity index 100%
rename from test/files/pos/bug616.scala
rename to test/files/pos/t616.scala
diff --git a/test/files/pos/t6184.scala b/test/files/pos/t6184.scala
new file mode 100644
index 0000000..83a1306
--- /dev/null
+++ b/test/files/pos/t6184.scala
@@ -0,0 +1,7 @@
+trait Foo[TroubleSome] {
+ type T <: Foo[TroubleSome]
+
+ this match {
+ case e: Foo[_]#T => ???
+ }
+}
\ No newline at end of file
diff --git a/test/files/pos/t6201.scala b/test/files/pos/t6201.scala
new file mode 100644
index 0000000..366c1f2
--- /dev/null
+++ b/test/files/pos/t6201.scala
@@ -0,0 +1,13 @@
+class Test {
+ class Foo1 {
+ def must(x: scala.xml.Elem) = ()
+ }
+
+ class Foo2 {
+ def must(x: Int) = ()
+ }
+ implicit def toFoo1(s: scala.xml.Elem) = new Foo1()
+ implicit def toFoo2(s: scala.xml.Elem) = new Foo2()
+
+ def is: Unit = { (<a>{"a"}</a>).must(<a>{"b"}</a>) }
+}
\ No newline at end of file
diff --git a/test/files/pos/t6204-a.scala b/test/files/pos/t6204-a.scala
new file mode 100644
index 0000000..bd8d5c4
--- /dev/null
+++ b/test/files/pos/t6204-a.scala
@@ -0,0 +1,9 @@
+import scala.reflect.runtime.universe._
+
+object Bish {
+ def m {
+ object Bash {
+ typeOf[Option[_]]
+ }
+ }
+}
\ No newline at end of file
diff --git a/test/files/pos/t6204-b.scala b/test/files/pos/t6204-b.scala
new file mode 100644
index 0000000..86094d1
--- /dev/null
+++ b/test/files/pos/t6204-b.scala
@@ -0,0 +1,10 @@
+import scala.reflect.runtime.universe._
+
+object Bosh {
+ def Besh {
+ new {
+ val t = typeOf[Option[_]]
+ val x = t
+ }
+ }
+}
\ No newline at end of file
diff --git a/test/files/pos/t6205.scala b/test/files/pos/t6205.scala
new file mode 100644
index 0000000..02d924f
--- /dev/null
+++ b/test/files/pos/t6205.scala
@@ -0,0 +1,18 @@
+// original code by reporter
+class A[T]
+class Test1 {
+ def x(backing: Map[A[_], Any]) =
+ for( (k: A[kt], v) <- backing)
+ yield (k: A[kt])
+}
+
+// this tests same thing as above, but independent of library classes,
+// earlier expansions eliminated as well as variance (everything's invariant)
+case class Holder[A](a: A)
+class Mapped[A] { def map[T](f: Holder[A] => T): Iterable[T] = ??? }
+class Test2 {
+ def works(backing: Mapped[A[_]]): Iterable[A[_]]
+ = backing.map(x =>
+ x match {case Holder(k: A[kt]) => (k: A[kt])}
+ )
+}
\ No newline at end of file
diff --git a/test/files/pos/t6208.scala b/test/files/pos/t6208.scala
new file mode 100644
index 0000000..dac5713
--- /dev/null
+++ b/test/files/pos/t6208.scala
@@ -0,0 +1,4 @@
+object Test {
+ val col = collection.mutable.Queue(1,2,3)
+ val WORK: collection.mutable.Queue[Int] = col filterNot (_ % 2 == 0)
+}
diff --git a/test/files/neg/caseinherit.flags b/test/files/pos/t6210.flags
similarity index 100%
copy from test/files/neg/caseinherit.flags
copy to test/files/pos/t6210.flags
diff --git a/test/files/pos/t6210.scala b/test/files/pos/t6210.scala
new file mode 100644
index 0000000..1ce8493
--- /dev/null
+++ b/test/files/pos/t6210.scala
@@ -0,0 +1,21 @@
+abstract sealed trait AST
+abstract sealed trait AExpr extends AST
+case class AAssign(name: String, v: AExpr) extends AExpr
+case class AConstBool(v: Boolean) extends AExpr
+
+trait Ty {}
+case class TInt() extends Ty
+case class TBool() extends Ty
+
+object Foo {
+ def checkExpr(ast: AExpr): Ty = {
+ var astTy:Ty = ast match {
+ case AAssign(nm: String, v:AExpr) => TBool()
+
+ case AConstBool(v: Boolean) => TBool()
+
+ case _ => throw new Exception(s"Unhandled case check(ast: ${ast.getClass})")
+ }
+ astTy
+ }
+}
diff --git a/test/files/pos/t6215.scala b/test/files/pos/t6215.scala
new file mode 100644
index 0000000..2f66892
--- /dev/null
+++ b/test/files/pos/t6215.scala
@@ -0,0 +1 @@
+class Foo(val v: String) extends AnyVal { private def len = v.length ; def f = len }
diff --git a/test/files/pos/t6225.scala b/test/files/pos/t6225.scala
new file mode 100644
index 0000000..d3d30d9
--- /dev/null
+++ b/test/files/pos/t6225.scala
@@ -0,0 +1,20 @@
+
+package library.x {
+ class X {
+ class Foo
+ implicit val foo: Foo = new Foo
+ }
+}
+package library {
+ package object y extends library.x.X
+}
+
+object ko {
+ import library.y.{Foo, foo}
+ implicitly[Foo]
+}
+
+object ko2 {
+ import library.y._
+ implicitly[Foo]
+}
diff --git a/test/files/pos/t6245/Base.java b/test/files/pos/t6245/Base.java
new file mode 100644
index 0000000..651ea08
--- /dev/null
+++ b/test/files/pos/t6245/Base.java
@@ -0,0 +1,5 @@
+package t1;
+
+public class Base {
+ protected Vis inner;
+}
diff --git a/test/files/pos/t6245/Foo.scala b/test/files/pos/t6245/Foo.scala
new file mode 100644
index 0000000..f5f997f
--- /dev/null
+++ b/test/files/pos/t6245/Foo.scala
@@ -0,0 +1,9 @@
+import t1.Vis
+
+abstract class Foo extends t1.Base {
+ trait Nested {
+ def crash() {
+ inner
+ }
+ }
+}
diff --git a/test/files/pos/t6245/Vis.java b/test/files/pos/t6245/Vis.java
new file mode 100644
index 0000000..4267f4e
--- /dev/null
+++ b/test/files/pos/t6245/Vis.java
@@ -0,0 +1,3 @@
+package t1;
+
+public class Vis { }
diff --git a/test/files/pos/t6274.scala b/test/files/pos/t6274.scala
new file mode 100644
index 0000000..cf769fc
--- /dev/null
+++ b/test/files/pos/t6274.scala
@@ -0,0 +1,13 @@
+trait Crash {
+
+ def foo(i: => Int) (j: Int): Int
+
+ def t = {
+ // var count = 0
+ foo {
+ var count = 0
+ count
+ } _
+ }
+
+}
diff --git a/test/files/neg/anyval-sealed.flags b/test/files/pos/t6275.flags
similarity index 100%
copy from test/files/neg/anyval-sealed.flags
copy to test/files/pos/t6275.flags
diff --git a/test/files/pos/t6275.scala b/test/files/pos/t6275.scala
new file mode 100644
index 0000000..6b5ec7d
--- /dev/null
+++ b/test/files/pos/t6275.scala
@@ -0,0 +1,11 @@
+
+sealed trait A[T]
+final class B[T] extends A[T]
+
+object ParsedAxis {
+ type BI = B[Int]
+
+ def f1(a: A[Int]) = a match { case b: B[Int] => 3 }
+ def f2(a: A[Int]) = a match { case b: BI => 3 }
+ def f3(a: A[Int]) = a match { case b: B[t] => 3 }
+}
diff --git a/test/files/pos/t6278-synth-def.scala b/test/files/pos/t6278-synth-def.scala
new file mode 100644
index 0000000..b8b660f
--- /dev/null
+++ b/test/files/pos/t6278-synth-def.scala
@@ -0,0 +1,30 @@
+
+package t6278
+
+import language.implicitConversions
+
+object test {
+ def ok() {
+ class Foo(val i: Int) {
+ def foo[A](body: =>A): A = body
+ }
+ implicit def toFoo(i: Int): Foo = new Foo(i)
+
+ val k = 1
+ k foo println("k?")
+ val j = 2
+ }
+ def nope() {
+ implicit class Foo(val i: Int) {
+ def foo[A](body: =>A): A = body
+ }
+
+ val k = 1
+ k foo println("k?")
+ //lazy
+ val j = 2
+ }
+ def main(args: Array[String]) {
+ ok(); nope()
+ }
+}
diff --git a/test/files/pos/bug628.scala b/test/files/pos/t628.scala
similarity index 100%
rename from test/files/pos/bug628.scala
rename to test/files/pos/t628.scala
diff --git a/test/files/pos/t6311.scala b/test/files/pos/t6311.scala
new file mode 100644
index 0000000..d27ad2f
--- /dev/null
+++ b/test/files/pos/t6311.scala
@@ -0,0 +1,5 @@
+class A {
+ def fooMinimal[T, Coll <: Traversable[T]](msg: String)(param1: Traversable[T])(param2: Coll): Traversable[T] = throw new Exception()
+
+ fooMinimal("")(List(1))(List(2))
+}
diff --git a/test/files/pos/t6335.scala b/test/files/pos/t6335.scala
new file mode 100644
index 0000000..50e3409
--- /dev/null
+++ b/test/files/pos/t6335.scala
@@ -0,0 +1,25 @@
+object E extends Z {
+ def X = 3
+ implicit class X(val i: Int) {
+ def xx = i
+ }
+
+ def Y(a: Any) = 0
+ object Y
+ implicit class Y(val i: String) { def yy = i }
+
+ implicit class Z(val i: Boolean) { def zz = i }
+}
+
+trait Z {
+ def Z = 0
+}
+
+object Test {
+ import E._
+ 0.xx
+
+ "".yy
+
+ true.zz
+}
diff --git a/test/files/pos/t6358.scala b/test/files/pos/t6358.scala
new file mode 100644
index 0000000..25539c8
--- /dev/null
+++ b/test/files/pos/t6358.scala
@@ -0,0 +1,6 @@
+class L(val t: Int) extends AnyVal {
+ def lazyString = {
+ lazy val x = t.toString
+ () => x
+ }
+}
diff --git a/test/files/pos/t6358_2.scala b/test/files/pos/t6358_2.scala
new file mode 100644
index 0000000..7c2beb6
--- /dev/null
+++ b/test/files/pos/t6358_2.scala
@@ -0,0 +1,6 @@
+class Y[T](val i: Option[T]) extends AnyVal {
+ def q: List[T] = {
+ lazy val e: List[T] = i.toList
+ e
+ }
+}
diff --git a/test/files/pos/t6367.scala b/test/files/pos/t6367.scala
new file mode 100644
index 0000000..1214be7
--- /dev/null
+++ b/test/files/pos/t6367.scala
@@ -0,0 +1,34 @@
+package play.api.libs.json.util
+
+trait FunctionalCanBuild[M[_]]{
+ def apply[A,B](ma:M[A], mb:M[B]):M[A ~ B]
+}
+
+trait Variant[M[_]]
+
+trait Functor[M[_]] extends Variant[M]{
+ def fmap[A,B](m:M[A], f: A => B): M[B]
+}
+
+case class ~[A,B](_1:A,_2:B)
+
+class FunctionalBuilder[M[_]](canBuild:FunctionalCanBuild[M]){
+ class CanBuild20[A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20](
+ m1:M[A1 ~ A2 ~ A3 ~ A4 ~ A5 ~ A6 ~ A7 ~ A8 ~ A9 ~ A10 ~ A11 ~ A12 ~ A13 ~ A14 ~ A15 ~ A16 ~ A17 ~ A18 ~ A19],
+ m2:M[A20]
+ ) {
+
+ def ~[A21](m3:M[A21]) = new CanBuild21(canBuild(m1,m2),m3)
+
+ def apply[B](f: (A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20) => B)(implicit fu:Functor[M]): M[B] =
+ fu.fmap[A1 ~ A2 ~ A3 ~ A4 ~ A5 ~ A6 ~ A7 ~ A8 ~ A9 ~ A10 ~ A11 ~ A12 ~ A13 ~ A14 ~ A15 ~ A16 ~ A17 ~ A18 ~ A19 ~ A20, B](
+ canBuild(m1, m2),
+ { case a1 ~ a2 ~ a3 ~ a4 ~ a5 ~ a6 ~ a7 ~ a8 ~ a9 ~ a10 ~ a11 ~ a12 ~ a13 ~ a14 ~ a15 ~ a16 ~ a17 ~ a18 ~ a19 ~ a20 =>
+ f(a1, a2, a3, a4, a5, a6, a7, a8, a9, a10, a11, a12, a13, a14, a15, a16, a17, a18, a19, a20) }
+ )
+ }
+
+ class CanBuild21[A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21](m1:M[A1 ~ A2 ~ A3 ~ A4 ~ A5 ~ A6 ~ A7 ~ A8 ~ A9 ~ A10 ~ A11 ~ A12 ~ A13 ~ A14 ~ A15 ~ A16 ~ A17 ~ A18 ~ A19 ~ A20], m2:M[A21]){
+ }
+
+}
diff --git a/test/files/pos/t6386.scala b/test/files/pos/t6386.scala
new file mode 100644
index 0000000..85098a7
--- /dev/null
+++ b/test/files/pos/t6386.scala
@@ -0,0 +1,5 @@
+import scala.reflect.runtime.universe._
+
+object Test extends App {
+ reify(manifest[Some[_]])
+}
\ No newline at end of file
diff --git a/test/files/pos/bug640.scala b/test/files/pos/t640.scala
similarity index 100%
rename from test/files/pos/bug640.scala
rename to test/files/pos/t640.scala
diff --git a/test/files/pos/t6479.scala b/test/files/pos/t6479.scala
new file mode 100644
index 0000000..c463bc5
--- /dev/null
+++ b/test/files/pos/t6479.scala
@@ -0,0 +1,56 @@
+object TailrecAfterTryCatch {
+
+ @annotation.tailrec
+ final def good1() {
+ 1 match {
+ case 2 => {
+ try {
+ // return
+ } catch {
+ case e: ClassNotFoundException =>
+ }
+ good1()
+ }
+ }
+ }
+
+ @annotation.tailrec
+ final def good2() {
+ //1 match {
+ // case 2 => {
+ try {
+ return
+ } catch {
+ case e: ClassNotFoundException =>
+ }
+ good2()
+ // }
+ //}
+ }
+
+ @annotation.tailrec
+ final def good3() {
+ val 1 = 2
+ try {
+ return
+ } catch {
+ case e: ClassNotFoundException =>
+ }
+ good3()
+ }
+
+ @annotation.tailrec
+ final def bad() {
+ 1 match {
+ case 2 => {
+ try {
+ return
+ } catch {
+ case e: ClassNotFoundException =>
+ }
+ bad()
+ }
+ }
+ }
+
+}
\ No newline at end of file
diff --git a/test/files/pos/t6482.scala b/test/files/pos/t6482.scala
new file mode 100644
index 0000000..24ea38e
--- /dev/null
+++ b/test/files/pos/t6482.scala
@@ -0,0 +1,11 @@
+final class TraversableOnceOps[+A](val collection: TraversableOnce[A]) extends AnyVal {
+ def reduceLeftOption[B >: A](op: (B, A) => B): Option[B] =
+ if (collection.isEmpty) None else Some(collection.reduceLeft[B](op))
+}
+// error: type arguments [B] do not conform to method reduceLeft's type parameter bounds [B >: A]
+// if (collection.isEmpty) None else Some(collection.reduceLeft[B](op))
+// ^
+
+class Foo[+A <: AnyRef](val xs: List[A]) extends AnyVal {
+ def baz[B >: A](x: B): List[B] = x :: xs
+}
diff --git a/test/files/pos/t6485a/Macros_1.scala b/test/files/pos/t6485a/Macros_1.scala
new file mode 100644
index 0000000..85c2d5d
--- /dev/null
+++ b/test/files/pos/t6485a/Macros_1.scala
@@ -0,0 +1,5 @@
+import scala.reflect.macros.Context
+
+object Macros {
+ def crash(c: Context): c.Expr[Unit] = c.universe.reify(())
+}
\ No newline at end of file
diff --git a/test/files/pos/t6485a/Test_2.scala b/test/files/pos/t6485a/Test_2.scala
new file mode 100644
index 0000000..54e260a
--- /dev/null
+++ b/test/files/pos/t6485a/Test_2.scala
@@ -0,0 +1,5 @@
+import scala.language.experimental.macros
+
+final class Ops[T](val x: T) extends AnyVal {
+ def f = macro Macros.crash
+}
diff --git a/test/files/pos/t6485b/Test.scala b/test/files/pos/t6485b/Test.scala
new file mode 100644
index 0000000..382df1c
--- /dev/null
+++ b/test/files/pos/t6485b/Test.scala
@@ -0,0 +1,10 @@
+import scala.language.experimental.macros
+import scala.reflect.macros.Context
+
+final class Ops[T](val x: T) extends AnyVal {
+ def f = macro Macros.crash
+}
+
+object Macros {
+ def crash(c: Context): c.Expr[Unit] = c.universe.reify(())
+}
\ No newline at end of file
diff --git a/test/files/pos/t6499.scala b/test/files/pos/t6499.scala
new file mode 100644
index 0000000..db37657
--- /dev/null
+++ b/test/files/pos/t6499.scala
@@ -0,0 +1,3 @@
+object Test {
+ Map(): Map[_, Int] with Map[_, Int]
+}
diff --git a/test/files/pos/bug651.scala b/test/files/pos/t651.scala
similarity index 100%
rename from test/files/pos/bug651.scala
rename to test/files/pos/t651.scala
diff --git a/test/files/pos/t6514.scala b/test/files/pos/t6514.scala
new file mode 100644
index 0000000..7c58605
--- /dev/null
+++ b/test/files/pos/t6514.scala
@@ -0,0 +1,11 @@
+object Test {
+ def e(msg: String) = new Exception(msg)
+
+ // this code ain't dead.
+ def a(b: Boolean) = {
+ b match {
+ case true => throw e("true")
+ case false => throw e("false")
+ }
+ }
+}
diff --git a/test/files/pos/t6516.scala b/test/files/pos/t6516.scala
new file mode 100644
index 0000000..c004055
--- /dev/null
+++ b/test/files/pos/t6516.scala
@@ -0,0 +1,19 @@
+import scala.language.experimental.macros
+import scala.reflect.macros.Context
+import scala.collection.TraversableLike
+
+// This one compiles
+object Test {
+ type Alias[T, CC[_]] = Context { type PrefixType = TraversableLike[T, CC[T]] }
+ def f() = macro f_impl
+ def f_impl(c: Alias[Int, List])() = ???
+}
+
+// This one doesn't
+object Test2 {
+ type Ctx = scala.reflect.macros.Context
+ type Alias[T, CC[_]] = Ctx { type PrefixType = TraversableLike[T, CC[T]] }
+
+ def f() = macro f_impl
+ def f_impl(c: Alias[Int, List])() = ???
+}
diff --git a/test/files/neg/anyval-sealed.flags b/test/files/pos/t6537.flags
similarity index 100%
copy from test/files/neg/anyval-sealed.flags
copy to test/files/pos/t6537.flags
diff --git a/test/files/pos/t6537.scala b/test/files/pos/t6537.scala
new file mode 100644
index 0000000..d0ca3ba
--- /dev/null
+++ b/test/files/pos/t6537.scala
@@ -0,0 +1,16 @@
+package tester
+
+object PatMatWarning {
+
+ sealed trait X
+ sealed trait Y
+
+ def f(x: X) = x match {
+ case _: Y => false
+ case _ => true
+ }
+
+ class X1 extends X
+ class Y1 extends Y
+ class Z1 extends X with Y
+}
diff --git a/test/files/pos/t6547.flags b/test/files/pos/t6547.flags
new file mode 100644
index 0000000..c9b68d7
--- /dev/null
+++ b/test/files/pos/t6547.flags
@@ -0,0 +1 @@
+-optimise
diff --git a/test/files/pos/t6547.scala b/test/files/pos/t6547.scala
new file mode 100644
index 0000000..53bd798
--- /dev/null
+++ b/test/files/pos/t6547.scala
@@ -0,0 +1,6 @@
+trait ConfigurableDefault[@specialized V] {
+ def fillArray(arr: Array[V], v: V) = (arr: Any) match {
+ case x: Array[Int] => null
+ case x: Array[Long] => v.asInstanceOf[Long]
+ }
+}
diff --git a/test/files/pos/t6551.scala b/test/files/pos/t6551.scala
new file mode 100644
index 0000000..8bb396a
--- /dev/null
+++ b/test/files/pos/t6551.scala
@@ -0,0 +1,13 @@
+import scala.language.dynamics
+
+object Test {
+ def main(args: Array[String]) {
+ class Lenser[T] extends Dynamic {
+ def selectDynamic(propName: String) = ???
+ }
+
+ def lens[T] = new Lenser[T]
+
+ val qq = lens[String]
+ }
+}
diff --git a/test/files/pos/t6552.scala b/test/files/pos/t6552.scala
new file mode 100644
index 0000000..98e686a
--- /dev/null
+++ b/test/files/pos/t6552.scala
@@ -0,0 +1,8 @@
+object Repros {
+ class Bar {}
+ class Baz(val myFoo: Foo) { }
+ trait Foo {
+ this: Bar =>
+ val thing = new Baz(this)
+ }
+}
diff --git a/test/files/pos/t6556.scala b/test/files/pos/t6556.scala
new file mode 100644
index 0000000..e1a6f49
--- /dev/null
+++ b/test/files/pos/t6556.scala
@@ -0,0 +1,32 @@
+package nl.ndervorst.commons.scalapimps
+
+trait Adapter[X] {self =>
+ type This = self.type
+ val adaptee: X
+ val adapt: This = self
+}
+
+object Adapter {
+ implicit def adaptee[Adaptee](adapter: Adapter[Adaptee]) = adapter.adaptee
+}
+
+
+
+object IterableW {
+ def zipMerge[E](it1: Iterable[E], it2: Iterable[E])(implicit o: Ordering[E]): Iterable[(Option[E], Option[E])] = null
+}
+
+
+class Series[X: Ordering, Y](val adaptee: Iterable[(X, Y)]) extends Adapter[Iterable[(X, Y)]] {
+ val order = implicitly[Ordering[X]]
+ def zipMerge(other: Series[X, Y]): Series[X, (Option[Y], Option[Y])] = IterableW.zipMerge(this, other)(new Ordering[(X, Y)] {
+ def compare(xy1: (X, Y), xy2: (X, Y)) = order.compare(xy1._1, xy2._1)
+ }).map {
+ case _ => null
+ }
+}
+
+
+object Series {
+ implicit def wrap[X: Ordering, Y](itble: Iterable[(X, Y)]): Series[X, Y] = new Series(itble)
+}
diff --git a/test/files/pos/t6562.scala b/test/files/pos/t6562.scala
new file mode 100644
index 0000000..eec7aa5
--- /dev/null
+++ b/test/files/pos/t6562.scala
@@ -0,0 +1,14 @@
+class Test {
+
+ @inline
+ def foo {
+ def it = new {}
+ (_: Any) => it
+ }
+
+ @inline
+ private def bar {
+ def it = new {}
+ (_: Any) => it
+ }
+}
diff --git a/test/files/pos/t6575a.scala b/test/files/pos/t6575a.scala
new file mode 100644
index 0000000..f128714
--- /dev/null
+++ b/test/files/pos/t6575a.scala
@@ -0,0 +1,15 @@
+trait X { def foo: PartialFunction[Int, Int] }
+
+trait Y extends X {
+ // Inferred type was AbstractPartialFunction[Int, Int] with Serializable
+ abstract override def foo = { case i => super.foo(i) * 2 }
+}
+trait Z extends X {
+ // ditto
+ abstract override def foo = { case i => super.foo(i) + 3 }
+}
+
+trait Comb extends Y with Z {
+ // ... which led to a type error here.
+ abstract override def foo: PartialFunction[Int, Int] = { case i => super.foo(i) - 2 }
+}
diff --git a/test/files/pos/t6575b.scala b/test/files/pos/t6575b.scala
new file mode 100644
index 0000000..d3e58b2
--- /dev/null
+++ b/test/files/pos/t6575b.scala
@@ -0,0 +1,17 @@
+// inferred types were okay here as Function nodes aren't
+// translated into anoymous subclasses of AbstractFunctionN
+// until after the typer.
+//
+// So this test is just confirmation.
+trait X { def foo: Function1[Int, Int] }
+
+trait Y extends X {
+ abstract override def foo = { case i => super.foo(i) * 2 }
+}
+trait Z extends X {
+ abstract override def foo = { case i => super.foo(i) + 3 }
+}
+
+trait Comb extends Y with Z {
+ abstract override def foo: Function1[Int, Int] = { case i => super.foo(i) - 2 }
+}
diff --git a/test/files/neg/anyval-sealed.flags b/test/files/pos/t6595.flags
similarity index 100%
copy from test/files/neg/anyval-sealed.flags
copy to test/files/pos/t6595.flags
diff --git a/test/files/pos/t6595.scala b/test/files/pos/t6595.scala
new file mode 100644
index 0000000..437c0bc
--- /dev/null
+++ b/test/files/pos/t6595.scala
@@ -0,0 +1,18 @@
+import scala.annotation.switch
+
+class Foo extends {
+ final val b0 = 5
+} with AnyRef {
+ final val b1 = 10
+
+ // Using the @switch annotation as a means of testing that the
+ // type inferred for b0 is Int(5) and not Int. Only in the former
+ // case can a switch be generated.
+ def f(p: Int) = (p: @switch) match {
+ case `b0` => 1
+ case `b1` => 2
+ case 15 => 3
+ case 20 => 4
+ case _ => 5
+ }
+}
diff --git a/test/files/pos/t6600.scala b/test/files/pos/t6600.scala
new file mode 100644
index 0000000..1e81378
--- /dev/null
+++ b/test/files/pos/t6600.scala
@@ -0,0 +1,8 @@
+final class Natural extends scala.math.ScalaNumber with scala.math.ScalaNumericConversions {
+ def intValue(): Int = 0
+ def longValue(): Long = 0L
+ def floatValue(): Float = 0.0F
+ def doubleValue(): Double = 0.0D
+ def isWhole(): Boolean = false
+ def underlying() = this
+}
diff --git a/test/files/pos/t6601/PrivateValueClass_1.scala b/test/files/pos/t6601/PrivateValueClass_1.scala
new file mode 100644
index 0000000..85c3687
--- /dev/null
+++ b/test/files/pos/t6601/PrivateValueClass_1.scala
@@ -0,0 +1 @@
+class V private (val a: Any) extends AnyVal
\ No newline at end of file
diff --git a/test/files/pos/t6601/UsePrivateValueClass_2.scala b/test/files/pos/t6601/UsePrivateValueClass_2.scala
new file mode 100644
index 0000000..461b839
--- /dev/null
+++ b/test/files/pos/t6601/UsePrivateValueClass_2.scala
@@ -0,0 +1,10 @@
+object Test {
+ // After the first attempt to make seprately compiled value
+ // classes respect the privacy of constructors, we got:
+ //
+ // exception when typing v.a().==(v.a())/class scala.reflect.internal.Trees$Apply
+ // constructor V in class V cannot be accessed in object Test in file test/files/pos/t6601/UsePrivateValueClass_2.scala
+ // scala.reflect.internal.Types$TypeError: constructor V in class V cannot be accessed in object Test
+ def foo(v: V) = v.a == v.a
+ def bar(v: V) = v == v
+}
diff --git a/test/files/pos/bug661.scala b/test/files/pos/t661.scala
similarity index 100%
rename from test/files/pos/bug661.scala
rename to test/files/pos/t661.scala
diff --git a/test/files/pos/t6624.scala b/test/files/pos/t6624.scala
new file mode 100644
index 0000000..1a92b92
--- /dev/null
+++ b/test/files/pos/t6624.scala
@@ -0,0 +1,28 @@
+sealed trait KList[+M[_]]
+
+case class KCons[M[_], +T <: KList[M]](
+ tail: T
+) extends KList[M]
+
+case class KNil[M[_]]() extends KList[M]
+
+object Test {
+ val klist: KCons[Option, KCons[Option, KCons[Option, KNil[Nothing]]]] = ???
+
+ // crashes with
+ // "Exception in thread "main" scala.reflect.internal.Types$TypeError: value _1 is not a member
+ // of KCons[Option,KCons[Option,KNil[Nothing]]]"
+ klist match {
+ case KCons(KCons(KCons(_))) =>
+ }
+
+ // fails with a similar message as an error, rather than a crash.
+ klist match {
+ case KCons(KCons(_)) =>
+ }
+
+ // succeeds
+ klist match {
+ case KCons(_) =>
+ }
+}
\ No newline at end of file
diff --git a/test/files/pos/t6648.scala b/test/files/pos/t6648.scala
new file mode 100644
index 0000000..9593ebf
--- /dev/null
+++ b/test/files/pos/t6648.scala
@@ -0,0 +1,24 @@
+abstract class Node extends NodeSeq
+trait NodeSeq extends Seq[Node]
+object NodeSeq {
+ implicit def seqToNodeSeq(ns: Seq[Node]): NodeSeq = ???
+ def foo[B, That](f: Seq[B])(implicit bf: scala.collection.generic.CanBuildFrom[Seq[Int], B, That]): That = ???
+}
+
+class Transformer {
+ def apply(nodes: Any): Any = ???
+}
+
+object transformer1 extends Transformer {
+ // Adding explicit type arguments, or making the impilcit view
+ // seqToNodeSeq explicit avoids the crash
+ NodeSeq.foo {
+ // These both avoid the crash:
+ // val t = new Transformer {}; t.apply(null)
+ // new Transformer().apply(null)
+ new Transformer {}.apply(null)
+
+ null: NodeSeq
+ }: NodeSeq
+}
+
diff --git a/test/files/pos/t6651.scala b/test/files/pos/t6651.scala
new file mode 100644
index 0000000..55a3b74
--- /dev/null
+++ b/test/files/pos/t6651.scala
@@ -0,0 +1,33 @@
+class YouAreYourself[A <: AnyRef](val you: A) extends AnyVal {
+ def yourself: you.type = you
+}
+
+object Test {
+ val s = ""
+ val s1: s.type = new YouAreYourself[s.type](s).yourself
+}
+
+trait Path {
+ type Dep <: AnyRef
+}
+
+final class ValueClass[P <: Path](val path: P) extends AnyVal {
+ import path.Dep
+
+ def apply(dep: Dep)(d2: dep.type, foo: Int): (Dep, d2.type) = (d2, d2)
+
+ // This generates dodgy code; note `ValueClass.this`:
+ //
+ // final def bounds$extension[D >: Nothing <: ValueClass.this.path.Dep,
+ // P >: Nothing <: Path]
+ // ($this: ValueClass[P])
+ // (dep: D)
+ // (d2: dep.type, foo: Int): (D, d2.type) = scala.Tuple2.apply[D, d2.type](d2, d2);
+ //
+ // Nothing crashes down the line, but it certainly doesn't conform to best-practices.
+ //
+ // An better alternative would be to add a type parameter for the (singleton) type of
+ // the wrapped value.
+ def bounds[D <: Dep](dep: D)(d2: dep.type, foo: Int): (D, d2.type) = (d2, d2)
+}
+
diff --git a/test/files/neg/caseinherit.flags b/test/files/pos/t6675.flags
similarity index 100%
copy from test/files/neg/caseinherit.flags
copy to test/files/pos/t6675.flags
diff --git a/test/files/pos/t6675.scala b/test/files/pos/t6675.scala
new file mode 100644
index 0000000..f3bebea
--- /dev/null
+++ b/test/files/pos/t6675.scala
@@ -0,0 +1,20 @@
+object LeftOrRight {
+ def unapply[A](value: Either[A, A]): Option[A] = value match {
+ case scala.Left(x) => Some(x)
+ case scala.Right(x) => Some(x)
+ }
+}
+
+object Test {
+ (Left((0, 0)): Either[(Int, Int), (Int, Int)]) match {
+ case LeftOrRight(pair @ (a, b)) => a // false -Xlint warning: "extractor pattern binds a single value to a Product2 of type (Int, Int)"
+ }
+
+ (Left((0, 0)): Either[(Int, Int), (Int, Int)]) match {
+ case LeftOrRight((a, b)) => a // false -Xlint warning: "extractor pattern binds a single value to a Product2 of type (Int, Int)"
+ }
+
+ (Left((0, 0)): Either[(Int, Int), (Int, Int)]) match {
+ case LeftOrRight(a, b) => a // false -Xlint warning: "extractor pattern binds a single value to a Product2 of type (Int, Int)"
+ }
+}
diff --git a/test/files/pos/t6712.scala b/test/files/pos/t6712.scala
new file mode 100644
index 0000000..3c96eb1
--- /dev/null
+++ b/test/files/pos/t6712.scala
@@ -0,0 +1,5 @@
+class H {
+ object O
+
+ def foo() { object O }
+}
diff --git a/test/files/pos/t6722.scala b/test/files/pos/t6722.scala
new file mode 100644
index 0000000..576746c
--- /dev/null
+++ b/test/files/pos/t6722.scala
@@ -0,0 +1,11 @@
+import scala.language.dynamics
+
+class Dyn extends Dynamic {
+ def selectDynamic(s: String): Dyn = new Dyn
+ def get[T]: T = null.asInstanceOf[T]
+}
+
+object Foo {
+ val dyn = new Dyn
+ dyn.foo.bar.baz.get[String]
+}
diff --git a/test/files/pos/bug675.scala b/test/files/pos/t675.scala
similarity index 100%
rename from test/files/pos/bug675.scala
rename to test/files/pos/t675.scala
diff --git a/test/files/neg/caseinherit.flags b/test/files/pos/t6771.flags
similarity index 100%
copy from test/files/neg/caseinherit.flags
copy to test/files/pos/t6771.flags
diff --git a/test/files/pos/t6771.scala b/test/files/pos/t6771.scala
new file mode 100644
index 0000000..0f0bd4e
--- /dev/null
+++ b/test/files/pos/t6771.scala
@@ -0,0 +1,9 @@
+object Test {
+ type Id[X] = X
+ val a: Id[Option[Int]] = None
+
+ a match {
+ case Some(x) => println(x)
+ case None =>
+ }
+}
diff --git a/test/files/pos/bug684.scala b/test/files/pos/t684.scala
similarity index 100%
rename from test/files/pos/bug684.scala
rename to test/files/pos/t684.scala
diff --git a/test/files/pos/t6846.scala b/test/files/pos/t6846.scala
new file mode 100644
index 0000000..0095664
--- /dev/null
+++ b/test/files/pos/t6846.scala
@@ -0,0 +1,28 @@
+object Test {
+ class Arb[_]
+ implicit def foo[M[_], A]: Arb[M[A]] = null
+ foo: Arb[List[Int]]
+ type ListInt = List[Int]
+ foo: Arb[ListInt]
+}
+
+object Test2 {
+ import scala.collection.immutable.List
+
+ class Carb[_]
+ implicit def narrow[N, M[_], A](x: Carb[M[A]])(implicit ev: N <:< M[A]): Carb[N] = null
+ implicit def bar[M[_], A]: Carb[M[A]] = null
+
+ type ListInt = List[Int]
+
+ val x: List[Int] = List(1)
+ val y: ListInt = List(1)
+
+ type ListSingletonX = x.type
+ type ListSingletonY = y.type
+
+ bar: Carb[List[Int]]
+ bar: Carb[ListInt]
+ bar: Carb[ListSingletonX]
+ bar: Carb[ListSingletonY]
+}
diff --git a/test/files/pos/t6891.flags b/test/files/pos/t6891.flags
new file mode 100644
index 0000000..fe04800
--- /dev/null
+++ b/test/files/pos/t6891.flags
@@ -0,0 +1 @@
+-Ycheck:extmethods -Xfatal-warnings
\ No newline at end of file
diff --git a/test/files/pos/t6891.scala b/test/files/pos/t6891.scala
new file mode 100644
index 0000000..bed2d0d
--- /dev/null
+++ b/test/files/pos/t6891.scala
@@ -0,0 +1,26 @@
+object O {
+ implicit class Foo[A](val value: String) extends AnyVal {
+ def bippy() = {
+ @annotation.tailrec def loop(x: A): Unit = loop(x)
+ ()
+ }
+
+ def boppy() = {
+ @annotation.tailrec def loop(x: value.type): Unit = loop(x)
+ ()
+ }
+
+ def beppy[C](c: => C) = {
+ () => c
+ @annotation.tailrec def loop(x: value.type): Unit = loop(x)
+ () => c
+ ()
+ }
+ }
+ // uncaught exception during compilation: Types$TypeError("type mismatch;
+ // found : A(in method bippy$extension)
+ // required: A(in class Foo)") @ scala.tools.nsc.typechecker.Contexts$Context.issueCommon(Contexts.scala:396)
+ // error: scala.reflect.internal.Types$TypeError: type mismatch;
+ // found : A(in method bippy$extension)
+ // required: A(in class Foo)
+}
diff --git a/test/files/neg/anyval-sealed.flags b/test/files/pos/t6896.flags
similarity index 100%
copy from test/files/neg/anyval-sealed.flags
copy to test/files/pos/t6896.flags
diff --git a/test/files/pos/t6896.scala b/test/files/pos/t6896.scala
new file mode 100644
index 0000000..ab527a8
--- /dev/null
+++ b/test/files/pos/t6896.scala
@@ -0,0 +1,7 @@
+object TooManyMains {
+ def main(args: Array[String]): Unit = {
+ println("Hello, World!")
+ }
+ def main(a: Int, b: Int) = ???
+ def main(s: String, n: String) = ???
+}
diff --git a/test/files/pos/t6897.scala b/test/files/pos/t6897.scala
new file mode 100644
index 0000000..a7a03a1
--- /dev/null
+++ b/test/files/pos/t6897.scala
@@ -0,0 +1,6 @@
+class A {
+ val html = (null: Any) match {
+ case 1 => <xml:group></xml:group>
+ case 2 => <p></p>
+ }
+}
diff --git a/test/files/pos/bug690.scala b/test/files/pos/t690.scala
similarity index 100%
rename from test/files/pos/bug690.scala
rename to test/files/pos/t690.scala
diff --git a/test/files/pos/t6921.scala b/test/files/pos/t6921.scala
new file mode 100644
index 0000000..36e70e5
--- /dev/null
+++ b/test/files/pos/t6921.scala
@@ -0,0 +1,11 @@
+class Message(messageType: String, reason: Option[String])
+
+class ReproForSI6921 {
+
+ private[this] var reason = ""
+
+ def decideElection = {
+ val explanation = None
+ new Message("", reason = explanation)
+ }
+}
diff --git a/test/files/pos/t6925.scala b/test/files/pos/t6925.scala
new file mode 100644
index 0000000..862a6e9
--- /dev/null
+++ b/test/files/pos/t6925.scala
@@ -0,0 +1,9 @@
+class Test {
+ def f[T](xs: Set[T]) /* no expected type to trigger inference */ =
+ xs collect { case x => x }
+
+ def g[T](xs: Set[T]): Set[T] = f[T](xs) // check that f's inferred type is Set[T]
+
+ // check that this type checks:
+ List(1).flatMap(n => Set(1).collect { case w => w })
+}
\ No newline at end of file
diff --git a/test/files/pos/t6925b.scala b/test/files/pos/t6925b.scala
new file mode 100644
index 0000000..ca25146
--- /dev/null
+++ b/test/files/pos/t6925b.scala
@@ -0,0 +1,18 @@
+// code *generated* by test/scaladoc/run/SI-5933.scala
+// duplicated here because it's related to SI-6925
+
+import language.higherKinds
+
+abstract class Base[M[_, _]] {
+ def foo[A, B]: M[(A, B), Any]
+}
+
+class Derived extends Base[PartialFunction] {
+ def foo[AA, BB] /*: PartialFunction[(A, B) => Any]*/ = { case (a, b) => (a: AA, b: BB) }
+}
+
+object Test {
+ lazy val lx = { println("hello"); 3 }
+ def test1(x: Int = lx) = ???
+ def test2(x: Int = lx match { case 0 => 1; case 3 => 4 }) = ???
+}
\ No newline at end of file
diff --git a/test/files/pos/bug694.scala b/test/files/pos/t694.scala
similarity index 100%
rename from test/files/pos/bug694.scala
rename to test/files/pos/t694.scala
diff --git a/test/files/neg/caseinherit.flags b/test/files/pos/t6942.flags
similarity index 100%
copy from test/files/neg/caseinherit.flags
copy to test/files/pos/t6942.flags
diff --git a/test/files/pos/t6942/Bar.java b/test/files/pos/t6942/Bar.java
new file mode 100644
index 0000000..592f62e
--- /dev/null
+++ b/test/files/pos/t6942/Bar.java
@@ -0,0 +1,235 @@
+package foo;
+
+public enum Bar {
+ ANGUILLA /*("US")*/,
+ ANTIGUA_AND_BARBUDA /*("US")*/,
+ ARGENTINA /*("US")*/,
+ ARUBA /*("US")*/,
+ BAHAMAS /*("US")*/,
+ BARBADOS /*("US")*/,
+ BELIZE /*("US")*/,
+ BERMUDA /*("US")*/,
+ BOLIVIA /*("US")*/,
+ BRAZIL /*("US")*/,
+ BRITISH_VIRGIN_ISLANDS /*("US")*/,
+ CANADA /*("US")*/,
+ CAYMAN_ISLANDS /*("US")*/,
+ CHILE /*("US")*/,
+ CHRISTMAS_ISLANDS /*("US")*/,
+ COCOS /*("US")*/,
+ COLOMBIA /*("US")*/,
+ COSTA_RICA /*("US")*/,
+ CUBA /*("US")*/,
+ DOMINICA /*("US")*/,
+ DOMINICAN_REPUBLIC /*("US")*/,
+ ECUADOR /*("US")*/,
+ EL_SALVADOR /*("US")*/,
+ FALKLAND_ISLANDS /*("US")*/,
+ GRENADA /*("US")*/,
+ GUADALOUPE /*("US")*/,
+ GUATEMALA /*("US")*/,
+ HAITI /*("US")*/,
+ HONDURAS /*("US")*/,
+ NETHERLANDS_ANTILLES /*("US")*/,
+ NICARAGUA /*("US")*/,
+ PANAMA /*("US")*/,
+ PARAGUAY /*("US")*/,
+ PERU /*("US")*/,
+ PUERTO_RICO /*("US")*/,
+ JAMAICA /*("US")*/,
+ MARTINIQUE /*("US")*/,
+ MEXICO /*("US")*/,
+ MONTSERRAT /*("US")*/,
+ ST_KITTS /*("US")*/,
+ ST_LUCIA /*("US")*/,
+ ST_VINCENT /*("US")*/,
+ SUPRA_NATIONAL /*("US")*/,
+ TRINIDAD /*("US")*/,
+ TURKS_AND_CAICOS /*("US")*/,
+ UNITED_STATES /*("US")*/,
+ URUGUAY /*("US")*/,
+ VENEZUELA /*("US")*/,
+ VIRGIN_ISLANDS /*("US")*/,
+
+ AUSTRALIA /*("AP")*/,
+ BANGLADESH /*("AP")*/,
+ BHUTAN /*("AP")*/,
+ CAMBODIA /*("AP")*/,
+ CHINA /*("AP")*/,
+ COOK_ISLANDS /*("AP")*/,
+ EAST_TIMOR /*("AP")*/,
+ FIJI /*("AP")*/,
+ GUAM /*("AP")*/,
+ HONG_KONG /*("AP")*/,
+ INDIA /*("AP")*/,
+ INDONESIA /*("AP")*/,
+ JAPAN /*("AP")*/,
+ KIRIBATI /*("AP")*/,
+ LAOS /*("AP")*/,
+ MACAU /*("AP")*/,
+ MALAYSIA /*("AP")*/,
+ MICRONESIA /*("AP")*/,
+ MONGOLIA /*("AP")*/,
+ MYANMAR /*("AP")*/,
+ NEPAL /*("AP")*/,
+ NEW_CALEDONIA /*("AP")*/,
+ NEW_ZEALAND /*("AP")*/,
+ NORFOLK_ISLAND /*("AP")*/,
+ NORTH_KOREA /*("AP")*/,
+ PAKISTAN /*("AP")*/,
+ PALAU /*("AP")*/,
+ PAPUA_NEW_GUINEA /*("AP")*/,
+ PHILIPPINES /*("AP")*/,
+ PITCAIRN_ISLANDS /*("AP")*/,
+ SAMOA /*("AP")*/,
+ WEST_SAMOA /*("AP")*/,
+ SINGAPORE /*("AP")*/,
+ SOUTH_KOREA /*("AP")*/,
+ SRI_LANKA /*("AP")*/,
+ TAIWAN /*("AP")*/,
+ THAILAND /*("AP")*/,
+ TOKELAU /*("AP")*/,
+ TONGA /*("AP")*/,
+ TUVALU /*("AP")*/,
+ VANUATU /*("AP")*/,
+ VIETNAM /*("AP")*/,
+
+ AFGHANISTAN /*("EU")*/,
+ ALBANIA /*("EU")*/,
+ ALGERIA /*("EU")*/,
+ ANDORRA /*("EU")*/,
+ ANGOLA /*("EU")*/,
+ ARMENIA /*("EU")*/,
+ AUSTRIA /*("EU")*/,
+ AZERBAIJAN /*("EU")*/,
+ BAHRAIN /*("EU")*/,
+ BELARUS /*("EU")*/,
+ BELGIUM /*("EU")*/,
+ BENIN /*("EU")*/,
+ BOSNIA_AND_HERZEGOVINA /*("EU")*/,
+ BOTSWANA /*("EU")*/,
+ BOUVET_ISLAND /*("EU")*/,
+ BRUNEI /*("EU")*/,
+ BULGARIA /*("EU")*/,
+ BURKINA_FASO /*("EU")*/,
+ BURUNDI /*("EU")*/,
+ CAMEROON /*("EU")*/,
+ CAPE_VERDE /*("EU")*/,
+ CHAD /*("EU")*/,
+ COMOROS /*("EU")*/,
+ CONGO /*("EU")*/,
+ CROATIA /*("EU")*/,
+ CYPRUS /*("EU")*/,
+ CZECH_REPUBLIC /*("EU")*/,
+ DR_CONGO /*("EU")*/,
+ DENMARK /*("EU")*/,
+ DJIBOUTI /*("EU")*/,
+ EGYPT /*("EU")*/,
+ EQUATORIAL_GUINEA /*("EU")*/,
+ ERITREA /*("EU")*/,
+ ESTONIA /*("EU")*/,
+ ETHIOPIA /*("EU")*/,
+ FAEROE_ISLANDS /*("EU")*/,
+ FINLAND /*("EU")*/,
+ FRANCE /*("EU")*/,
+ FRENCH_GUIANA /*("EU")*/,
+ GABON /*("EU")*/,
+ GAMBIA /*("EU")*/,
+ GEORGIA /*("EU")*/,
+ GERMANY /*("EU")*/,
+ GHANA /*("EU")*/,
+ GIBRALTAR /*("EU")*/,
+ GREAT_BRITAIN /*("EU")*/,
+ GREECE /*("EU")*/,
+ GREENLAND /*("EU")*/,
+ GUINEA /*("EU")*/,
+ GUINEA_BISSAU /*("EU")*/,
+ GUYANA /*("EU")*/,
+ HUNGARY /*("EU")*/,
+ ICELAND /*("EU")*/,
+ IRAN /*("EU")*/,
+ IRAQ /*("EU")*/,
+ IRELAND /*("EU")*/,
+ ISLE_OF_MAN /*("EU")*/,
+ ISRAEL /*("EU")*/,
+ ITALY /*("EU")*/,
+ IVORY_COAST /*("EU")*/,
+ JERSEY /*("EU")*/,
+ JORDAN /*("EU")*/,
+ KAZAKHSTAN /*("EU")*/,
+ KENYA /*("EU")*/,
+ KUWAIT /*("EU")*/,
+ KYRGYZSTAN /*("EU")*/,
+ LATVIA /*("EU")*/,
+ LEBANON /*("EU")*/,
+ LESOTHO /*("EU")*/,
+ LIBERIA /*("EU")*/,
+ LIBYA /*("EU")*/,
+ LIECHTENSTEIN /*("EU")*/,
+ LITHUANIA /*("EU")*/,
+ LUXEMBOURG /*("EU")*/,
+ MACEDONIA /*("EU")*/,
+ MADAGASCAR /*("EU")*/,
+ MALAWI /*("EU")*/,
+ MALDIVES /*("EU")*/,
+ MALI /*("EU")*/,
+ MALTA /*("EU")*/,
+ MARSHALL_ISLAND /*("EU")*/,
+ MAURITANIA /*("EU")*/,
+ MAURITIUS /*("EU")*/,
+ MAYOTTE /*("EU")*/,
+ MOLDOVA /*("EU")*/,
+ MONACO /*("EU")*/,
+ MOROCCO /*("EU")*/,
+ MOZAMBIQUE /*("EU")*/,
+ NAMIBIA /*("EU")*/,
+ NETHERLANDS /*("EU")*/,
+ NIGER_REPUBLIC /*("EU")*/,
+ NIGERIA /*("EU")*/,
+ NORWAY /*("EU")*/,
+ OMAN /*("EU")*/,
+ PALESTINE /*("EU")*/,
+ POLAND /*("EU")*/,
+ PORTUGAL /*("EU")*/,
+ QATAR /*("EU")*/,
+ REUNION /*("EU")*/,
+ ROMANIA /*("EU")*/,
+ RUSSIA /*("EU")*/,
+ RWANDA /*("EU")*/,
+ SAN_MARINO /*("EU")*/,
+ SAO_TOME /*("EU")*/,
+ SAUDI_ARABIA /*("EU")*/,
+ SENEGAL /*("EU")*/,
+ SERBIA /*("EU")*/,
+ SEYCHELLES /*("EU")*/,
+ SEIRRA_LEONE /*("EU")*/,
+ SLOVAKIA /*("EU")*/,
+ SLOVENIA /*("EU")*/,
+ SOMALIA /*("EU")*/,
+ SOUTH_AFRICA /*("EU")*/,
+ SPAIN /*("EU")*/,
+ ST_HELENA /*("EU")*/,
+ SUDAN /*("EU")*/,
+ SURINAME /*("EU")*/,
+ SVALBARD /*("EU")*/,
+ SWAZILAND /*("EU")*/,
+ SWEDEN /*("EU")*/,
+ SWITZERLAND /*("EU")*/,
+ SYRIA /*("EU")*/,
+ TAJIKSTAN /*("EU")*/,
+ TANZANIA /*("EU")*/,
+ TOGO /*("EU")*/,
+ TUNISIA /*("EU")*/,
+ TURKEY /*("EU")*/,
+ TURKMENISTAN /*("EU")*/,
+ UAE /*("EU")*/,
+ UGANDA /*("EU")*/,
+ UKRAINE /*("EU")*/,
+ UZBEKISTAN /*("EU")*/,
+ VATICAN_CITY /*("EU")*/,
+ WESTERN_SAHARA /*("EU")*/,
+ YEMEN /*("EU")*/,
+ ZAMBIA /*("EU")*/,
+ ZIMBABWE /*("EU")*/;
+
+}
\ No newline at end of file
diff --git a/test/files/pos/t6942/t6942.scala b/test/files/pos/t6942/t6942.scala
new file mode 100644
index 0000000..77963d2
--- /dev/null
+++ b/test/files/pos/t6942/t6942.scala
@@ -0,0 +1,64 @@
+// not a peep out of the pattern matcher's unreachability analysis
+// its budget should suffice for these simple matches (they do have a large search space)
+class Test {
+ import foo.Bar // a large enum
+ def exhaustUnreachabilitysStack_ENUM_STYLE = (null: Bar) match {
+ case Bar.BULGARIA =>
+ case _ =>
+ }
+
+ // lots of strings
+ def exhaustUnreachabilitysStack_StringStyle = "foo" match {
+ case "a" =>
+ case "b" =>
+ case "c" =>
+ case "d" =>
+ case "e" =>
+ case "f" =>
+ case "aa" =>
+ case "ba" =>
+ case "ca" =>
+ case "da" =>
+ case "ea" =>
+ case "f1a" =>
+ case "a1a" =>
+ case "b1a" =>
+ case "c1a" =>
+ case "d1a" =>
+ case "e1a" =>
+ case "f1a2" =>
+ case "f1a0" =>
+ case "a1a2" =>
+ case "b1a2" =>
+ case "c1a2" =>
+ case "d1a2" =>
+ case "e1a2" =>
+ case "f1a3" =>
+ case "_a" =>
+ case "_b" =>
+ case "_c" =>
+ case "_d" =>
+ case "_e" =>
+ case "_f" =>
+ case "_aa" =>
+ case "_ba" =>
+ case "_ca" =>
+ case "_da" =>
+ case "_ea" =>
+ case "_f1a" =>
+ case "_a1a" =>
+ case "_b1a" =>
+ case "_c1a" =>
+ case "_d1a" =>
+ case "_e1a" =>
+ case "_f1a0" =>
+ case "_f1a2" =>
+ case "_a1a2" =>
+ case "_b1a2" =>
+ case "_c1a2" =>
+ case "_d1a2" =>
+ case "_e1a2" =>
+ case "_f1a3" =>
+ case _ =>
+ }
+}
diff --git a/test/files/pos/t6963c.flags b/test/files/pos/t6963c.flags
new file mode 100644
index 0000000..4d6e049
--- /dev/null
+++ b/test/files/pos/t6963c.flags
@@ -0,0 +1 @@
+-Xmigration:2.9 -Xfatal-warnings
\ No newline at end of file
diff --git a/test/files/pos/t6963c.scala b/test/files/pos/t6963c.scala
new file mode 100644
index 0000000..0b6b5c7
--- /dev/null
+++ b/test/files/pos/t6963c.scala
@@ -0,0 +1,25 @@
+object Test {
+ def f1(x: Any) = x.isInstanceOf[Seq[_]]
+ def f2(x: Any) = x match {
+ case _: Seq[_] => true
+ case _ => false
+ }
+
+ def f3(x: Any) = x match {
+ case _: Array[_] => true
+ case _ => false
+ }
+
+ def f4(x: Any) = x.isInstanceOf[Traversable[_]]
+
+ def f5(x1: Any, x2: Any, x3: AnyRef) = (x1, x2, x3) match {
+ case (Some(_: Seq[_]), Nil, _) => 1
+ case (None, List(_: List[_], _), _) => 2
+ case _ => 3
+ }
+
+ def f5: Unit = {
+ import scala.collection.mutable._
+ List(1,2,3,4,5).scanRight(0)(_+_)
+ }
+}
diff --git a/test/files/pos/bug697.scala b/test/files/pos/t697.scala
similarity index 100%
rename from test/files/pos/bug697.scala
rename to test/files/pos/t697.scala
diff --git a/test/files/pos/t6976/Exts_1.scala b/test/files/pos/t6976/Exts_1.scala
new file mode 100644
index 0000000..9b3a69e
--- /dev/null
+++ b/test/files/pos/t6976/Exts_1.scala
@@ -0,0 +1,10 @@
+object Exts {
+ implicit class AnyExts[T](val o: T) extends AnyVal {
+ def moo = "moo!"
+ }
+}
+
+trait Exts {
+ import language.implicitConversions
+ implicit def AnyExts[T](o: T) = Exts.AnyExts(o)
+}
diff --git a/test/files/pos/t6976/ImplicitBug_1.scala b/test/files/pos/t6976/ImplicitBug_1.scala
new file mode 100644
index 0000000..c9031ba
--- /dev/null
+++ b/test/files/pos/t6976/ImplicitBug_1.scala
@@ -0,0 +1,27 @@
+// This one is weird and nasty. Not sure if this is scalac or sbt
+// (tried with 0.12 & 0.12.2-RC2) bug.
+//
+// A level of indirection is required to trigger this bug.
+// Exts seems to need to be defined in separate file.
+//
+// Steps to reproduce:
+// 1. sbt clean
+// 2. sbt run (it works)
+// 3. Comment A & uncomment B.
+// 4. sbt run (it fails)
+// 5. Switch it back & sbt run. It still fails.
+//
+// In this project sbt clean helps. However in a large project where this
+// bug was found compiler crashed even after doing sbt clean. The only
+// way to work around this was to reference Exts object explicitly (C) in
+// the source file using its implicit classes.
+
+// Lets suppose this is a mega-trait combining all sorts of helper
+// functionality.
+trait Support extends Exts
+
+object ImplicitsBug extends App with Support { // A
+// object ImplicitsBug extends App with Exts { // B
+ //Exts // C) this reference helped in the large project.
+ println(3.moo)
+}
diff --git a/test/files/pos/t6976/ImplicitBug_2.scala b/test/files/pos/t6976/ImplicitBug_2.scala
new file mode 100644
index 0000000..2fea5e299
--- /dev/null
+++ b/test/files/pos/t6976/ImplicitBug_2.scala
@@ -0,0 +1,7 @@
+trait Support extends Exts
+
+// object ImplicitsBug extends App with Support { // A
+object ImplicitsBug extends App with Exts { // B
+ //Exts // C) this reference helped in the large project.
+ println(3.moo)
+}
diff --git a/test/files/pos/bug698.scala b/test/files/pos/t698.scala
similarity index 100%
rename from test/files/pos/bug698.scala
rename to test/files/pos/t698.scala
diff --git a/test/files/neg/caseinherit.flags b/test/files/pos/t6994.flags
similarity index 100%
copy from test/files/neg/caseinherit.flags
copy to test/files/pos/t6994.flags
diff --git a/test/files/pos/t6994.scala b/test/files/pos/t6994.scala
new file mode 100644
index 0000000..d707196
--- /dev/null
+++ b/test/files/pos/t6994.scala
@@ -0,0 +1,8 @@
+object Test {
+ object NF {
+ def unapply(t: Throwable): Option[Throwable] = None
+ }
+ val x = (try { None } catch { case NF(ex) => None }) getOrElse 0
+ // Was emitting a spurious warning post typer:
+ // "This catches all Throwables. If this is really intended, use `case ex6 : Throwable` to clear this warning."
+}
diff --git a/test/files/pos/t7011.flags b/test/files/pos/t7011.flags
new file mode 100644
index 0000000..a4c1615
--- /dev/null
+++ b/test/files/pos/t7011.flags
@@ -0,0 +1 @@
+-Ydebug -Xfatal-warnings
\ No newline at end of file
diff --git a/test/files/pos/t7011.scala b/test/files/pos/t7011.scala
new file mode 100644
index 0000000..539f662
--- /dev/null
+++ b/test/files/pos/t7011.scala
@@ -0,0 +1,7 @@
+object bar {
+ def foo {
+ lazy val x = 42
+
+ {()=>x}
+ }
+}
\ No newline at end of file
diff --git a/test/files/pos/t7014/ThreadSafety.java b/test/files/pos/t7014/ThreadSafety.java
new file mode 100644
index 0000000..ed50880
--- /dev/null
+++ b/test/files/pos/t7014/ThreadSafety.java
@@ -0,0 +1,9 @@
+package t7014;
+
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+
+ at Retention(RetentionPolicy.RUNTIME) // must be exactly RUNTIME retention (those we parse)
+public @interface ThreadSafety {
+ ThreadSafetyLevel level();
+}
\ No newline at end of file
diff --git a/test/files/pos/t7014/ThreadSafetyLevel.java b/test/files/pos/t7014/ThreadSafetyLevel.java
new file mode 100644
index 0000000..4df1dc7
--- /dev/null
+++ b/test/files/pos/t7014/ThreadSafetyLevel.java
@@ -0,0 +1,8 @@
+package t7014; // package needed due to other bug in scalac's java parser
+
+// since we parse eagerly, we have not yet parsed the classfile when parsing the annotation,
+// and on doing so, fail to find a symbol for the COMPLETELY_THREADSAFE reference
+// from the annotation's argument to the enum's member
+// for now, let's just not crash -- should implement lazy completing at some point
+ at ThreadSafety(level=ThreadSafetyLevel.COMPLETELY_THREADSAFE)
+public enum ThreadSafetyLevel { COMPLETELY_THREADSAFE }
diff --git a/test/files/pos/t7014/t7014.scala b/test/files/pos/t7014/t7014.scala
new file mode 100644
index 0000000..faec4c7
--- /dev/null
+++ b/test/files/pos/t7014/t7014.scala
@@ -0,0 +1,4 @@
+package t7014
+
+import ThreadSafetyLevel.COMPLETELY_THREADSAFE // refer to annotation so it gets parsed
+
\ No newline at end of file
diff --git a/test/files/pos/t7022.scala b/test/files/pos/t7022.scala
new file mode 100644
index 0000000..0609e2d
--- /dev/null
+++ b/test/files/pos/t7022.scala
@@ -0,0 +1,9 @@
+class Catch[+T] {
+ def either[U >: T](body: => U): Either[Throwable, U] = ???
+}
+
+object Test {
+ implicit class RichCatch[T](val c: Catch[T]) extends AnyVal {
+ def validation[U >: T](u: => U): Either[Throwable, U] = c.either(u)
+ }
+}
diff --git a/test/files/pos/bug703.scala b/test/files/pos/t703.scala
similarity index 100%
rename from test/files/pos/bug703.scala
rename to test/files/pos/t703.scala
diff --git a/test/files/pos/t7033.scala b/test/files/pos/t7033.scala
new file mode 100644
index 0000000..a4d2566
--- /dev/null
+++ b/test/files/pos/t7033.scala
@@ -0,0 +1,15 @@
+import language.higherKinds
+object Wrap {
+ implicit class X[X](val a: X)
+
+ X[Int](0)
+}
+
+class Wrap {
+ implicit class Y[Y](val a: Y)
+ Y[Int](0)
+ implicit class Z[Z[_]](val a: Z[Wrap.this.Z[Z]])
+ Z[List](List(new Z[List](null)))
+}
+
+case class X[X](val a: X)
diff --git a/test/files/pos/t7035.scala b/test/files/pos/t7035.scala
new file mode 100644
index 0000000..f45bd0a
--- /dev/null
+++ b/test/files/pos/t7035.scala
@@ -0,0 +1,15 @@
+case class Y(final var x: Int, final private var y: String, final val z1: Boolean, final private val z2: Any) {
+
+ import Test.{y => someY}
+ List(someY.x: Int, someY.y: String, someY.z1: Boolean, someY.z2: Any)
+ someY.y = ""
+}
+
+object Test {
+ val y = Y(0, "", true, new {})
+ val unapp: Option[(Int, String, Boolean, Any)] = // was (Int, Boolean, String, Any) !!
+ Y.unapply(y)
+
+ val Y(a, b, c, d) = y
+ List(a: Int, b: String, c: Boolean, d: Any)
+}
diff --git a/test/files/pos/bug704.scala b/test/files/pos/t704.scala
similarity index 100%
rename from test/files/pos/bug704.scala
rename to test/files/pos/t704.scala
diff --git a/test/files/pos/t7091.scala b/test/files/pos/t7091.scala
new file mode 100644
index 0000000..72e81a2
--- /dev/null
+++ b/test/files/pos/t7091.scala
@@ -0,0 +1,7 @@
+package p1.p2
+
+protected[p2] class C(var x: Int = 0)
+
+protected[p2] trait T {
+ new C()
+}
diff --git a/test/files/pos/bug711.scala b/test/files/pos/t711.scala
similarity index 100%
rename from test/files/pos/bug711.scala
rename to test/files/pos/t711.scala
diff --git a/test/files/pos/t7126.scala b/test/files/pos/t7126.scala
new file mode 100644
index 0000000..6720511
--- /dev/null
+++ b/test/files/pos/t7126.scala
@@ -0,0 +1,11 @@
+import language._
+
+object Test {
+ type T = Any
+ boom(???): Option[T] // SOE
+ def boom[CC[U]](t : CC[T]): Option[CC[T]] = None
+
+ // okay
+ foo(???): Option[Any]
+ def foo[CC[U]](t : CC[Any]): Option[CC[Any]] = None
+}
\ No newline at end of file
diff --git a/test/files/pos/bug715.cmds b/test/files/pos/t715.cmds
similarity index 100%
rename from test/files/pos/bug715.cmds
rename to test/files/pos/t715.cmds
diff --git a/test/files/pos/t715/meredith_1.scala b/test/files/pos/t715/meredith_1.scala
new file mode 100644
index 0000000..8261b98
--- /dev/null
+++ b/test/files/pos/t715/meredith_1.scala
@@ -0,0 +1,98 @@
+package com.sap.dspace.model.othello;
+
+import scala.xml._
+
+trait XMLRenderer {
+ type T <: Any {def getClass() : java.lang.Class[_]}
+ val valueTypes =
+ List(
+ classOf[java.lang.Boolean],
+ classOf[java.lang.Integer],
+ classOf[java.lang.Float],
+ classOf[java.lang.String]
+ // more to come
+ )
+
+ def value2XML(
+ value : Object,
+ field : java.lang.reflect.Field,
+ pojo : T
+ ) : Node = {
+ value match {
+ case null => Text( "null" )
+ case vUnmatched =>
+ if (value.isInstanceOf[java.lang.Boolean])
+ Text( value.asInstanceOf[java.lang.Boolean].toString )
+ else if (value.isInstanceOf[java.lang.Integer])
+ Text( value.asInstanceOf[java.lang.Integer].toString )
+ else if (value.isInstanceOf[java.lang.Float])
+ Text( value.asInstanceOf[java.lang.Float].toString )
+ // else if (value.isInstanceOf[T])
+ // pojo2XML( value.asInstanceOf[T] )
+ else
+ <unmatchedType>
+ <theType>
+ {vUnmatched.getClass.toString}
+ </theType>
+ <theValue>
+ {vUnmatched.toString}
+ </theValue>
+ </unmatchedType>
+ }
+ }
+
+ def field2XML(
+ field : java.lang.reflect.Field,
+ pojo : T
+ ) : Elem = {
+
+ val accessible = field.isAccessible;
+ field.setAccessible( true );
+ // BUGBUG lgm need to disambiguate on type and possibly make
+ // recursive call to pojo2XML
+ val fldValXML = value2XML( field.get( pojo ), field, pojo );
+ field.setAccessible( accessible );
+
+ Elem(
+ null,
+ field.getName,
+ null,
+ TopScope,
+ fldValXML
+ )
+ }
+
+ def pojo2XML( pojo : T ) : Elem = {
+ val progeny =
+ for (field <- pojo.getClass.getDeclaredFields)
+ yield field2XML( field, pojo );
+
+ Elem(
+ null,
+ pojo.getClass.getName,
+ null,
+ TopScope,
+ progeny.asInstanceOf[Array[scala.xml.Node]] : _*
+ )
+ }
+}
+
+case class POJO2XMLRenderer( recurse : Boolean )
+ extends XMLRenderer {
+ type T = java.io.Serializable
+ override def value2XML(
+ value : Object,
+ field : java.lang.reflect.Field,
+ pojo : java.io.Serializable
+ ) : Node = {
+ if (recurse) super.value2XML( value, field, pojo )
+ else Text( value + "" )
+ }
+}
+
+object thePOJO2XMLRenderer extends POJO2XMLRenderer( true ) {
+}
+
+object Test extends Application {
+ println(com.sap.dspace.model.othello.thePOJO2XMLRenderer)
+}
diff --git a/test/files/pos/bug715/runner_2.scala b/test/files/pos/t715/runner_2.scala
similarity index 100%
rename from test/files/pos/bug715/runner_2.scala
rename to test/files/pos/t715/runner_2.scala
diff --git a/test/files/pos/t7180.scala b/test/files/pos/t7180.scala
new file mode 100644
index 0000000..15582f6
--- /dev/null
+++ b/test/files/pos/t7180.scala
@@ -0,0 +1,13 @@
+trait Higher[F[_]]
+
+trait Box[A]
+object Box {
+ implicit def HigherBox = new Higher[Box] {}
+}
+
+object Foo {
+ val box = implicitly[Higher[Box]] // compiles fine !!!
+
+ type Bar[A] = Box[A]
+ val bar = implicitly[Higher[Bar]] // <-- this doesn't compile in 2.10.1-RC1, but does in 2.10.0 !!!
+}
diff --git a/test/files/neg/caseinherit.flags b/test/files/pos/t7183.flags
similarity index 100%
copy from test/files/neg/caseinherit.flags
copy to test/files/pos/t7183.flags
diff --git a/test/files/pos/t7183.scala b/test/files/pos/t7183.scala
new file mode 100644
index 0000000..7647c16
--- /dev/null
+++ b/test/files/pos/t7183.scala
@@ -0,0 +1,13 @@
+class A
+object A {
+ def unapply(a: A): Some[A] = Some(a) // Change return type to Option[A] and the warning is gone
+}
+
+object Test {
+ for (A(a) <- List(new A)) yield a // spurious dead code warning.
+}
+
+// List(new A()).withFilter(((check$ifrefutable$2) => check$ifrefutable$2: @scala.unchecked match {
+// case A((a @ _)) => true
+// case _ => false // this is dead code, but it's compiler generated.
+// }))
diff --git a/test/files/pos/t7190.scala b/test/files/pos/t7190.scala
new file mode 100644
index 0000000..f7ccded
--- /dev/null
+++ b/test/files/pos/t7190.scala
@@ -0,0 +1,26 @@
+import scala.language.experimental.macros
+import scala.reflect.macros._
+
+trait A[T] {
+ def min[U >: T](implicit ord: Numeric[U]): T = macro A.min[T, U]
+}
+
+object A {
+ def min[T: c.WeakTypeTag, U >: T: c.WeakTypeTag](c: Context)(ord: c.Expr[Numeric[U]]): c.Expr[T] = {
+ c.universe.reify {
+ ord.splice.zero.asInstanceOf[T]
+ }
+ }
+}
+
+class B extends A[Int] {
+ override def min[U >: Int](implicit ord: Numeric[U]): Int = macro B.min[U]
+}
+
+object B {
+ def min[U >: Int: c.WeakTypeTag](c: Context)(ord: c.Expr[Numeric[U]]): c.Expr[Int] = {
+ c.universe.reify {
+ ord.splice.zero.asInstanceOf[Int]
+ }
+ }
+}
\ No newline at end of file
diff --git a/test/files/pos/bug720.scala b/test/files/pos/t720.scala
similarity index 100%
rename from test/files/pos/bug720.scala
rename to test/files/pos/t720.scala
diff --git a/test/files/pos/t7200b.scala b/test/files/pos/t7200b.scala
new file mode 100644
index 0000000..9d579c6
--- /dev/null
+++ b/test/files/pos/t7200b.scala
@@ -0,0 +1,50 @@
+import language.higherKinds
+
+trait T {
+ def t = 0
+}
+trait Foo {
+ def coflatMap[A <: T](f: A): A
+}
+
+object O extends Foo {
+ def coflatMap[A <: T](f: A) = {
+ val f2 = coflatMap(f) // inferred in 2.9.2 / 2.10.0 as [Nothing]
+ f2.t // so this does't type check.
+ f2
+ }
+}
+
+// Why? When a return type is inherited, the derived method
+// symbol first gets a preliminary type assigned, based on the
+// 1) method type of a unique matching super member
+// 2) viewed as a member type of the inheritor (to substitute,
+// e.g. class type parameters)
+// 3) substituted to replace the super-method's type parameters
+// with those of the inheritor
+// 4) dissected to take just the return type wrapped in thisMethodType().
+//
+// In Scala 2.10.0 and earlier, this preliminary method type
+//
+// 1) [A#11329 <: <empty>#3.this.T#7068](<param> f#11333: A#11329)A#11329
+// 2) [A#11329 <: <empty>#3.this.T#7068](<param> f#11333: A#11329)A#11329
+// 3) (<param> f#12556: A#11336)A#11336
+// 4) [A#11336 <: <empty>#3.this.T#7068](<param> f#12552: A#11337&0)A#11336
+//
+// The type #4 from the old version is problematic: the parameter is typed with
+// a skolem for the type parameter `A`. It won't be considered to match the
+// method it overrides, instead they are seen as being overloaded, and type inference
+// goes awry (Nothing is inferred as the type argument for the recursive call
+// to coflatMap.
+//
+// The Namers patch adds one step here: it subsitutes the type parameter symbols
+// for the skolems:
+//
+// https://github.com/scala/scala/commit/b74c33eb#L2R1014
+//
+// So we end up with a method symbol info:
+//
+// 5) [A#11336 <: <empty>#3.this.T#7068](<param> f#12505: A#11336)A#11336
+//
+// This *does* match the method in the super class, and type inference
+// chooses the correct type argument.
\ No newline at end of file
diff --git a/test/files/pos/t7226.scala b/test/files/pos/t7226.scala
new file mode 100644
index 0000000..06f0c95
--- /dev/null
+++ b/test/files/pos/t7226.scala
@@ -0,0 +1,26 @@
+trait HK {
+ type Rep[X]
+
+ // okay
+ def unzip2[A, B](ps: Rep[List[(A, B)]])
+ unzip2(null.asInstanceOf[Rep[List[(Int, String)]]])
+
+ // okay
+ def unzipHK[A, B, C[_]](ps: Rep[C[(A, B)]])
+ unzipHK(null.asInstanceOf[Rep[List[(Int, String)]]])
+
+ def unzipHKRet0[A, C[_]](ps: C[A]): C[Int]
+ def ls: List[String]
+ unzipHKRet0(ls)
+
+ // fail
+ def unzipHKRet[A, C[_]](ps: Rep[C[A]]): Rep[C[Int]]
+ def rls: Rep[List[String]]
+ unzipHKRet(rls)
+}
+
+trait HK1 {
+ type Rep[A]
+ def unzip1[A, B, C[_]](ps: Rep[C[(A, B)]]): (Rep[C[A]], Rep[C[B]])
+ def doUnzip1[A, B](ps: Rep[List[(A, B)]]) = unzip1(ps)
+}
diff --git a/test/files/neg/caseinherit.flags b/test/files/pos/t7232.flags
similarity index 100%
copy from test/files/neg/caseinherit.flags
copy to test/files/pos/t7232.flags
diff --git a/test/files/pos/t7232/Foo.java b/test/files/pos/t7232/Foo.java
new file mode 100644
index 0000000..3478301
--- /dev/null
+++ b/test/files/pos/t7232/Foo.java
@@ -0,0 +1,9 @@
+package pack;
+
+import java.util.List;
+
+public class Foo {
+ public static java.util.List okay() { throw new Error(); }
+
+ public static List wrong() { throw new Error(); }
+}
diff --git a/test/files/pos/t7232/List.java b/test/files/pos/t7232/List.java
new file mode 100644
index 0000000..e42c63a
--- /dev/null
+++ b/test/files/pos/t7232/List.java
@@ -0,0 +1,4 @@
+package pack;
+
+public class List {
+}
diff --git a/test/files/pos/t7232/Test.scala b/test/files/pos/t7232/Test.scala
new file mode 100644
index 0000000..49c3c12
--- /dev/null
+++ b/test/files/pos/t7232/Test.scala
@@ -0,0 +1,5 @@
+object Test {
+ import pack._
+ Foo.okay().size()
+ Foo.wrong().size()
+}
diff --git a/test/files/neg/caseinherit.flags b/test/files/pos/t7232b.flags
similarity index 100%
copy from test/files/neg/caseinherit.flags
copy to test/files/pos/t7232b.flags
diff --git a/test/files/pos/t7232b/Foo.java b/test/files/pos/t7232b/Foo.java
new file mode 100644
index 0000000..94f08d5
--- /dev/null
+++ b/test/files/pos/t7232b/Foo.java
@@ -0,0 +1,8 @@
+package pack;
+
+import java.util.*;
+
+public class Foo {
+ // should be pack.List.
+ public static List list() { throw new Error(); }
+}
diff --git a/test/files/pos/t7232b/List.java b/test/files/pos/t7232b/List.java
new file mode 100644
index 0000000..ce97715
--- /dev/null
+++ b/test/files/pos/t7232b/List.java
@@ -0,0 +1,5 @@
+package pack;
+
+public class List {
+ public void packList() {}
+}
diff --git a/test/files/pos/t7232b/Test.scala b/test/files/pos/t7232b/Test.scala
new file mode 100644
index 0000000..6377e26
--- /dev/null
+++ b/test/files/pos/t7232b/Test.scala
@@ -0,0 +1,5 @@
+object Test {
+ import pack._
+
+ Foo.list().packList()
+}
diff --git a/test/files/neg/caseinherit.flags b/test/files/pos/t7232c.flags
similarity index 100%
copy from test/files/neg/caseinherit.flags
copy to test/files/pos/t7232c.flags
diff --git a/test/files/pos/t7232c/Foo.java b/test/files/pos/t7232c/Foo.java
new file mode 100644
index 0000000..bbda09a
--- /dev/null
+++ b/test/files/pos/t7232c/Foo.java
@@ -0,0 +1,10 @@
+package pack;
+
+import java.util.List;
+
+public class Foo {
+ public static class List {
+ public void isInnerList() {}
+ }
+ public static List innerList() { throw new Error(); }
+}
diff --git a/test/files/pos/t7232c/Test.scala b/test/files/pos/t7232c/Test.scala
new file mode 100644
index 0000000..aa7c710
--- /dev/null
+++ b/test/files/pos/t7232c/Test.scala
@@ -0,0 +1,4 @@
+object Test {
+ import pack._
+ Foo.innerList().isInnerList()
+}
diff --git a/test/files/neg/caseinherit.flags b/test/files/pos/t7232d.flags
similarity index 100%
copy from test/files/neg/caseinherit.flags
copy to test/files/pos/t7232d.flags
diff --git a/test/files/pos/t7232d/Entry.java b/test/files/pos/t7232d/Entry.java
new file mode 100644
index 0000000..0cfb6fb
--- /dev/null
+++ b/test/files/pos/t7232d/Entry.java
@@ -0,0 +1,4 @@
+package pack;
+
+public class Entry {
+}
diff --git a/test/files/pos/t7232d/Foo.java b/test/files/pos/t7232d/Foo.java
new file mode 100644
index 0000000..df7114a
--- /dev/null
+++ b/test/files/pos/t7232d/Foo.java
@@ -0,0 +1,8 @@
+package pack;
+
+import java.util.Map.Entry;
+
+public class Foo {
+ public static Entry mapEntry() { throw new Error(); }
+ public static void javaTest() { mapEntry().getKey(); }
+}
diff --git a/test/files/pos/t7232d/Test.scala b/test/files/pos/t7232d/Test.scala
new file mode 100644
index 0000000..89a8063
--- /dev/null
+++ b/test/files/pos/t7232d/Test.scala
@@ -0,0 +1,4 @@
+object Test {
+ import pack._
+ Foo.mapEntry().getKey()
+}
diff --git a/test/files/pos/t7233.scala b/test/files/pos/t7233.scala
new file mode 100644
index 0000000..ae15c08
--- /dev/null
+++ b/test/files/pos/t7233.scala
@@ -0,0 +1,14 @@
+object Foo {
+ def bar(i: Int) = i
+
+ def ol(i: Int) = i
+ def ol(i: String) = i
+}
+object Test {
+ import Foo.{ bar => quux, toString => bar, ol => olRenamed}
+
+ val f1 = quux _
+ val f1Typed: (Int => Int) = f1
+
+ val f2: String => String = olRenamed _
+}
diff --git a/test/files/pos/t7233b.scala b/test/files/pos/t7233b.scala
new file mode 100644
index 0000000..927c7fc
--- /dev/null
+++ b/test/files/pos/t7233b.scala
@@ -0,0 +1,8 @@
+object Test {
+ // crash
+ def foo(a: Any) = { import a.{toString => toS}; toS }
+
+ // okay
+ def ok1(a: String) = { import a.{isInstanceOf => iio}; iio[String] }
+ def ok2(a: Int) = { import a.{toInt => ti}; ti }
+}
diff --git a/test/files/pos/t7239.scala b/test/files/pos/t7239.scala
new file mode 100644
index 0000000..16e9d00
--- /dev/null
+++ b/test/files/pos/t7239.scala
@@ -0,0 +1,38 @@
+object Test {
+ def BrokenMethod(): HasFilter[(Int, String)] = ???
+
+ trait HasFilter[B] {
+ def filter(p: B => Boolean) = ???
+ }
+
+ trait HasWithFilter {
+ def withFilter = ???
+ }
+
+ object addWithFilter {
+ trait NoImplicit
+ implicit def enrich(v: Any)
+ (implicit F0: NoImplicit): HasWithFilter = ???
+ }
+
+ BrokenMethod().withFilter(_ => true) // okay
+ BrokenMethod().filter(_ => true) // okay
+
+ locally {
+ import addWithFilter._
+ BrokenMethod().withFilter((_: (Int, String)) => true) // okay
+ }
+
+ locally {
+ import addWithFilter._
+ // adaptToMemberWithArgs sets the type of the tree `x`
+ // to ErrorType (while in silent mode, so the error is not
+ // reported. Later, when the fallback from `withFilter`
+ // to `filter` is attempted, the closure is taken to have
+ // have the type `<error> => Boolean`, which conforms to
+ // `(B => Boolean)`. Only later during pickling does the
+ // defensive check for erroneous types in the tree pick up
+ // the problem.
+ BrokenMethod().withFilter(x => true) // erroneous or inaccessible type.
+ }
+}
diff --git a/test/files/neg/caseinherit.flags b/test/files/pos/t7285a.flags
similarity index 100%
copy from test/files/neg/caseinherit.flags
copy to test/files/pos/t7285a.flags
diff --git a/test/files/pos/t7285a.scala b/test/files/pos/t7285a.scala
new file mode 100644
index 0000000..34e79c7
--- /dev/null
+++ b/test/files/pos/t7285a.scala
@@ -0,0 +1,83 @@
+sealed abstract class Base
+
+object Test {
+ case object Up extends Base
+
+ def foo(d1: Base) =
+ d1 match {
+ case Up =>
+ }
+
+ // Sealed subtype: ModuleTypeRef <empty>.this.Test.Up.type
+ // Pattern: UniqueThisType Test.this.type
+}
+
+
+object Test1 {
+ sealed abstract class Base
+
+ object Base {
+ case object Down extends Base {
+ }
+
+ case object Up extends Base {
+ }
+
+ (d1: Base, d2: Base) =>
+ (d1, d2) match {
+ case (Up, Up) | (Down, Down) => false
+ case (Down, Up) => true
+ case (Up, Down) => false
+ }
+ }
+}
+
+object Test2 {
+ sealed abstract class Base
+
+ object Base {
+ case object Down extends Base {
+ }
+
+ case object Up extends Base {
+ }
+
+ (d1: Base, d2: Base) =>
+ (d1) match {
+ case Up | Down => false
+ }
+ }
+}
+
+object Test3 {
+ sealed abstract class Base
+
+ object Base {
+ case object Down extends Base
+
+ (d1: Base, d2: Base) =>
+ (d1, d2) match {
+ case (Down, Down) => false
+ }
+ }
+}
+
+object Test4 {
+ sealed abstract class Base
+
+ object Base {
+ case object Down extends Base {
+ }
+
+ case object Up extends Base {
+ }
+
+ }
+ import Test4.Base._
+ (d1: Base, d2: Base) =>
+ (d1, d2) match {
+ case (Up, Up) | (Down, Down) => false
+ case (Down, Test4.Base.Up) => true
+ case (Up, Down) => false
+ }
+}
diff --git a/test/files/pos/t7329.scala b/test/files/pos/t7329.scala
new file mode 100644
index 0000000..76bf1fb
--- /dev/null
+++ b/test/files/pos/t7329.scala
@@ -0,0 +1 @@
+class TwoParamSpecializedWithDefault[@specialized A, @specialized B](a: A, b: B = (??? : B))
\ No newline at end of file
diff --git a/test/files/neg/anyval-sealed.flags b/test/files/pos/t7369.flags
similarity index 100%
copy from test/files/neg/anyval-sealed.flags
copy to test/files/pos/t7369.flags
diff --git a/test/files/pos/t7369.scala b/test/files/pos/t7369.scala
new file mode 100644
index 0000000..2f31c93
--- /dev/null
+++ b/test/files/pos/t7369.scala
@@ -0,0 +1,37 @@
+object Test {
+ val X, Y = true
+ (null: Tuple1[Boolean]) match {
+ case Tuple1(X) =>
+ case Tuple1(Y) => // unreachable
+ case _ =>
+ }
+}
+
+
+sealed abstract class B;
+case object True extends B;
+case object False extends B;
+
+object Test2 {
+
+ val X: B = True
+ val Y: B = False
+
+ (null: Tuple1[B]) match {
+ case Tuple1(X) =>
+ case Tuple1(Y) => // no warning
+ case _ =>
+ }
+}
+
+object Test3 {
+ val X, O = true
+ def classify(neighbourhood: (Boolean, Boolean, Boolean)): String = {
+ neighbourhood match {
+ case (X, X, X) => "middle"
+ case (X, X, O) => "right"
+ case (O, X, X) => "left"
+ case _ => throw new IllegalArgumentException("Invalid")
+ }
+ }
+}
\ No newline at end of file
diff --git a/test/files/pos/t7377/Client_2.scala b/test/files/pos/t7377/Client_2.scala
new file mode 100644
index 0000000..5728956
--- /dev/null
+++ b/test/files/pos/t7377/Client_2.scala
@@ -0,0 +1,11 @@
+object Test {
+ M.noop(List(1) match { case Nil => 0; case (x::xs) => x })
+
+ case class Foo(a: Int)
+ val FooAlias: Foo.type = Foo
+ M.noop(Foo(0) match { case FooAlias(_) => 0 })
+
+ case class Bar()
+ val BarAlias: Bar.type = Bar
+ M.noop(Bar() match { case BarAlias() => 0 })
+}
diff --git a/test/files/pos/t7377/Macro_1.scala b/test/files/pos/t7377/Macro_1.scala
new file mode 100644
index 0000000..a0ec1d8
--- /dev/null
+++ b/test/files/pos/t7377/Macro_1.scala
@@ -0,0 +1,7 @@
+import language.experimental._
+import reflect.macros.Context
+
+object M {
+ def noopImpl[A](c: Context)(expr: c.Expr[A]): c.Expr[A] = c.Expr(c.typeCheck(c.resetLocalAttrs(expr.tree)))
+ def noop[A](expr: A): A = macro noopImpl[A]
+}
diff --git a/test/files/pos/t7377b.flags b/test/files/pos/t7377b.flags
new file mode 100644
index 0000000..cb8324a
--- /dev/null
+++ b/test/files/pos/t7377b.flags
@@ -0,0 +1 @@
+-Xoldpatmat
\ No newline at end of file
diff --git a/test/files/pos/t7377b.scala b/test/files/pos/t7377b.scala
new file mode 100644
index 0000000..aeee800
--- /dev/null
+++ b/test/files/pos/t7377b.scala
@@ -0,0 +1,13 @@
+object Test {
+ List(1) match { case Nil => 0; case (x::xs) => x }
+
+ case class Foo(a: Int)
+ val FooAlias: Foo.type = Foo
+ Foo(0) match { case FooAlias(_) => 0 }
+ Foo(0) match { case Foo(_) => 0 }
+
+ case class Bar()
+ val BarAlias: Bar.type = Bar
+ Bar() match { case BarAlias() => 0 }
+ Bar() match { case Bar() => 0 }
+}
diff --git a/test/files/pos/t7426.scala b/test/files/pos/t7426.scala
new file mode 100644
index 0000000..8e42ad1
--- /dev/null
+++ b/test/files/pos/t7426.scala
@@ -0,0 +1,3 @@
+class foo(x: Any) extends annotation.StaticAnnotation
+
+ at foo(new AnyRef { }) trait A
diff --git a/test/files/jvm/bug680.check b/test/files/pos/t7461.check
similarity index 100%
copy from test/files/jvm/bug680.check
copy to test/files/pos/t7461.check
diff --git a/test/files/pos/t7461/Macros_1.scala b/test/files/pos/t7461/Macros_1.scala
new file mode 100644
index 0000000..353dec6
--- /dev/null
+++ b/test/files/pos/t7461/Macros_1.scala
@@ -0,0 +1,13 @@
+import scala.reflect.macros.Context
+import language.experimental.macros
+
+object Macros {
+ def impl(c: Context) = {
+ import c.universe._
+ val wut = c.typeCheck(Select(Literal(Constant(10)), newTermName("$minus")), silent = true)
+ // println(showRaw(wut, printIds = true, printTypes = true))
+ c.literalUnit
+ }
+
+ def foo = macro impl
+}
\ No newline at end of file
diff --git a/test/files/pos/t7461/Test_2.scala b/test/files/pos/t7461/Test_2.scala
new file mode 100644
index 0000000..3839659
--- /dev/null
+++ b/test/files/pos/t7461/Test_2.scala
@@ -0,0 +1,3 @@
+class C {
+ def foo = Macros.foo
+}
\ No newline at end of file
diff --git a/test/files/pos/t7486-named.scala b/test/files/pos/t7486-named.scala
new file mode 100644
index 0000000..253293e
--- /dev/null
+++ b/test/files/pos/t7486-named.scala
@@ -0,0 +1,8 @@
+
+object Test {
+ def fold(empty: Any) = ()
+ implicit val notAnnotatedImplicit = new {
+ fold(empty = 0)
+ def empty[A]: Any = ???
+ }
+}
diff --git a/test/files/pos/t7486.scala b/test/files/pos/t7486.scala
new file mode 100644
index 0000000..6dd7f4c
--- /dev/null
+++ b/test/files/pos/t7486.scala
@@ -0,0 +1,8 @@
+object Test{
+ var locker = 0
+ // remove implicit, or change to `locker = locker + 1` to make it compile.
+ implicit val davyJones0 = {
+ locker += 0
+ 0
+ }
+}
diff --git a/test/files/pos/t7505.scala b/test/files/pos/t7505.scala
new file mode 100644
index 0000000..3e1e6ab
--- /dev/null
+++ b/test/files/pos/t7505.scala
@@ -0,0 +1,16 @@
+import scala.language.reflectiveCalls
+
+case class ContextProperty(value: Any) {
+ type HasToInt = { def toInt:Int }
+
+ def toInt: Int = value match {
+ case n: HasToInt => n.toInt
+ }
+}
+
+// was:
+// error:7: error during expansion of this match (this is a scalac bug).
+// The underlying error was: type mismatch;
+// found : Boolean(true)
+// required: AnyRef
+// def toInt: Int = value match {
\ No newline at end of file
diff --git a/test/files/pos/t7516/A_1.scala b/test/files/pos/t7516/A_1.scala
new file mode 100644
index 0000000..3bba199
--- /dev/null
+++ b/test/files/pos/t7516/A_1.scala
@@ -0,0 +1,9 @@
+import scala.reflect._,macros._, scala.language.experimental.macros
+
+object A {
+ def impl[T: c.WeakTypeTag](c: Context)(t: c.Expr[T]): c.Expr[List[T]] = {
+ val r = c.universe.reify { List(t.splice) }
+ c.Expr[List[T]]( c.resetLocalAttrs(r.tree) )
+ }
+ def demo[T](t: T): List[T] = macro impl[T]
+}
diff --git a/test/files/pos/t7516/B_2.scala b/test/files/pos/t7516/B_2.scala
new file mode 100644
index 0000000..1b8531b
--- /dev/null
+++ b/test/files/pos/t7516/B_2.scala
@@ -0,0 +1,4 @@
+object B {
+ final case class CV(p: Int = 3, g: Int = 2)
+ A.demo { val d = 4; CV(g = d); "a" }
+}
diff --git a/test/files/pos/t7517.scala b/test/files/pos/t7517.scala
new file mode 100644
index 0000000..7ce4c6b
--- /dev/null
+++ b/test/files/pos/t7517.scala
@@ -0,0 +1,22 @@
+trait Box[ K[A[x]] ]
+
+object Box {
+ // type constructor composition
+ sealed trait ∙[A[_], B[_]] { type l[T] = A[B[T]] }
+
+ // composes type constructors inside K
+ type SplitBox[K[A[x]], B[x]] = Box[ ({ type l[A[x]] = K[ (A ∙ B)#l] })#l ]
+
+ def split[ K[A[x]], B[x] ](base: Box[K]): SplitBox[K,B] = ???
+
+ class Composed[B[_], L[A[x]] ] {
+ val box: Box[L] = ???
+
+ type Split[ A[x] ] = L[ (A ∙ B)#l ]
+ val a: Box[Split] = Box.split(box)
+
+ //Either of these work:
+ val a1: Box[Split] = Box.split[L,B](box)
+ val a2: Box[ ({ type l[A[x]] = L[ (A ∙ B)#l ] })#l ] = Box.split(box)
+ }
+}
\ No newline at end of file
diff --git a/test/files/pos/t7532/A_1.java b/test/files/pos/t7532/A_1.java
new file mode 100644
index 0000000..1ade76c
--- /dev/null
+++ b/test/files/pos/t7532/A_1.java
@@ -0,0 +1,6 @@
+class R {
+ public class attr { // Will have the bytecode name `R$attr`, not to be confused with `R at tr`!
+ }
+ public static class attr1 {
+ }
+}
diff --git a/test/files/pos/t7532/B_2.scala b/test/files/pos/t7532/B_2.scala
new file mode 100644
index 0000000..ee7ce77
--- /dev/null
+++ b/test/files/pos/t7532/B_2.scala
@@ -0,0 +1,5 @@
+object Test {
+ val r = new R
+ new r.attr() // Was: error while loading attr, class file '.../t7532-pos.obj/R$attr.class' has location not matching its contents: contains class
+ new R.attr1
+}
\ No newline at end of file
diff --git a/test/files/pos/t7532b/A_1.scala b/test/files/pos/t7532b/A_1.scala
new file mode 100644
index 0000000..e8f9540
--- /dev/null
+++ b/test/files/pos/t7532b/A_1.scala
@@ -0,0 +1,7 @@
+package pack
+class R {
+ class attr // Will have the bytecode name `R$attr`, not to be confused with `R at tr`!
+ class `@`
+}
+
+class `@`
\ No newline at end of file
diff --git a/test/files/pos/t7532b/B_2.scala b/test/files/pos/t7532b/B_2.scala
new file mode 100644
index 0000000..1555a5d
--- /dev/null
+++ b/test/files/pos/t7532b/B_2.scala
@@ -0,0 +1,8 @@
+import pack._
+
+object Test {
+ val r = new R
+ new r.attr()
+ new r.`@`
+ new `@`
+}
\ No newline at end of file
diff --git a/test/files/pos/bug756.scala b/test/files/pos/t756.scala
similarity index 100%
rename from test/files/pos/bug756.scala
rename to test/files/pos/t756.scala
diff --git a/test/files/pos/bug757.scala b/test/files/pos/t757.scala
similarity index 100%
rename from test/files/pos/bug757.scala
rename to test/files/pos/t757.scala
diff --git a/test/files/pos/bug757a.scala b/test/files/pos/t757a.scala
similarity index 100%
rename from test/files/pos/bug757a.scala
rename to test/files/pos/t757a.scala
diff --git a/test/files/pos/bug758.scala b/test/files/pos/t758.scala
similarity index 100%
rename from test/files/pos/bug758.scala
rename to test/files/pos/t758.scala
diff --git a/test/files/pos/bug759.scala b/test/files/pos/t759.scala
similarity index 100%
rename from test/files/pos/bug759.scala
rename to test/files/pos/t759.scala
diff --git a/test/files/pos/bug762.scala b/test/files/pos/t762.scala
similarity index 100%
rename from test/files/pos/bug762.scala
rename to test/files/pos/t762.scala
diff --git a/test/files/pos/t7649.flags b/test/files/pos/t7649.flags
new file mode 100644
index 0000000..fcf951d
--- /dev/null
+++ b/test/files/pos/t7649.flags
@@ -0,0 +1 @@
+-Yrangepos
\ No newline at end of file
diff --git a/test/files/pos/t7649.scala b/test/files/pos/t7649.scala
new file mode 100644
index 0000000..a1b02f6
--- /dev/null
+++ b/test/files/pos/t7649.scala
@@ -0,0 +1,20 @@
+object Test {
+ val c: reflect.macros.Context = ???
+ import c.universe._
+ reify {
+ // The lookup of the implicit WeakTypeTag[Any]
+ // was triggering an unpositioned tree.
+ c.Expr[Any](Literal(Constant(0))).splice
+ }
+
+ import scala.reflect.ClassTag
+ def ct[A: ClassTag]: Expr[A] = ???
+ def tt[A: TypeTag]: Expr[A] = ???
+ def wtt[A: WeakTypeTag]: Expr[A] = ???
+
+ reify {
+ ct[String].splice
+ tt[String].splice
+ wtt[String].splice
+ }
+}
diff --git a/test/files/pos/bug767.scala b/test/files/pos/t767.scala
similarity index 100%
rename from test/files/pos/bug767.scala
rename to test/files/pos/t767.scala
diff --git a/test/files/pos/t7694.scala b/test/files/pos/t7694.scala
new file mode 100644
index 0000000..9852d5e
--- /dev/null
+++ b/test/files/pos/t7694.scala
@@ -0,0 +1,40 @@
+trait A
+trait B
+
+trait L[A2, B2 <: A2] {
+ def bar(a: Any, b: Any) = 0
+}
+
+object Lub {
+ // use named args transforms to include TypeTree(<lub.tpe>) in the AST before refchecks.
+ def foo(a: L[_, _], b: Any) = 0
+
+ foo(b = 0, a = if (true) (null: L[A, A]) else (null: L[B, B]))
+
+ (if (true) (null: L[A, A]) else (null: L[B, B])).bar(b = 0, a = 0)
+}
+
+/*
+The LUB ends up as:
+
+TypeRef(
+ TypeSymbol(
+ abstract trait L#7038[A2#7039, B2#7040 <: A2#7039] extends AnyRef#2197
+
+ )
+ args = List(
+ AbstractTypeRef(
+ AbstractType(
+ type _1#13680 >: A#7036 with B#7037 <: Object#1752
+ )
+ )
+ AbstractTypeRef(
+ AbstractType(
+ type _2#13681 >: A#7036 with B#7037 <: Object#1752
+ )
+ )
+ )
+)
+
+Note that type _2#13681 is *not* bound by _1#13680
+*/
diff --git a/test/files/pos/t7716.scala b/test/files/pos/t7716.scala
new file mode 100644
index 0000000..4011705
--- /dev/null
+++ b/test/files/pos/t7716.scala
@@ -0,0 +1,16 @@
+object Test {
+ def test: Unit = {
+ val e: java.lang.Enum[_] = java.util.concurrent.TimeUnit.SECONDS
+ e match { case x => println(x) }
+
+
+ trait TA[X <: CharSequence]
+ val ta: TA[_] = new TA[String] {}
+
+ ta match {
+ case _ => println("hi")
+ }
+
+ def f(ta: TA[_]) = ta match { case _ => "hi" }
+ }
+}
diff --git a/test/files/jvm/bug680.check b/test/files/pos/t7776.check
similarity index 100%
copy from test/files/jvm/bug680.check
copy to test/files/pos/t7776.check
diff --git a/test/files/pos/t7776.scala b/test/files/pos/t7776.scala
new file mode 100644
index 0000000..0340fac
--- /dev/null
+++ b/test/files/pos/t7776.scala
@@ -0,0 +1,12 @@
+import scala.language.experimental.macros
+import scala.reflect.macros.Context
+
+class MacroErasure {
+ def app(f: Any => Any, x: Any): Any = macro MacroErasure.appMacro
+ def app[A](f: A => Any, x: Any): Any = macro MacroErasure.appMacroA[A]
+}
+
+object MacroErasure {
+ def appMacro(c: Context)(f: c.Expr[Any => Any], x: c.Expr[Any]): c.Expr[Any] = ???
+ def appMacroA[A](c: Context)(f: c.Expr[A => Any], x: c.Expr[Any])(implicit tt: c.WeakTypeTag[A]): c.Expr[Any] = ???
+}
\ No newline at end of file
diff --git a/test/files/pos/t7782.scala b/test/files/pos/t7782.scala
new file mode 100644
index 0000000..037bdad
--- /dev/null
+++ b/test/files/pos/t7782.scala
@@ -0,0 +1,25 @@
+package pack
+
+object Test {
+ import O.empty
+ empty // this will trigger completion of `test`
+ // with skolemizationLevel = 1
+}
+
+object O {
+ // order matters (!!!)
+
+ // this order breaks under 2.10.x
+ def empty[E]: C[E] = ???
+ def empty(implicit a: Any): Any = ???
+}
+
+abstract class C[E] {
+ def foo[BB](f: BB)
+ def test[B](f: B): Any = foo(f)
+ // error: no type parameters for method foo: (<param> f: BB)scala.this.Unit exist so that it can be applied to arguments (B&1)
+ // --- because ---
+ // argument expression's type is not compatible with formal parameter type;
+ // found : B&1
+ // required: ?BB
+}
diff --git a/test/files/pos/t7782b.scala b/test/files/pos/t7782b.scala
new file mode 100644
index 0000000..09da4a5
--- /dev/null
+++ b/test/files/pos/t7782b.scala
@@ -0,0 +1,25 @@
+package pack
+
+object Test {
+ import O.empty
+ empty // this will trigger completion of `test`
+ // with skolemizationLevel = 1
+}
+
+object O {
+ // order matters (!!!)
+
+ // this order breaks under 2.11.x
+ def empty(implicit a: Any): Any = ???
+ def empty[E]: C[E] = ???
+}
+
+abstract class C[E] {
+ def foo[BB](f: BB)
+ def test[B](f: B): Any = foo(f)
+ // error: no type parameters for method foo: (<param> f: BB)scala.this.Unit exist so that it can be applied to arguments (B&1)
+ // --- because ---
+ // argument expression's type is not compatible with formal parameter type;
+ // found : B&1
+ // required: ?BB
+}
diff --git a/test/files/pos/bug780.scala b/test/files/pos/t780.scala
similarity index 100%
rename from test/files/pos/bug780.scala
rename to test/files/pos/t780.scala
diff --git a/test/files/pos/t7815.scala b/test/files/pos/t7815.scala
new file mode 100644
index 0000000..12a434c
--- /dev/null
+++ b/test/files/pos/t7815.scala
@@ -0,0 +1,30 @@
+import language.higherKinds
+
+trait Foo[A <: AnyRef] {
+ type Repr
+ def f(a: A): Repr
+ def g(a: A): Option[Repr]
+
+ type M[X]
+ def m(a: A): M[a.type]
+
+ type Id[X] = X
+ def n(a: A): Id[(Repr, M[a.type])]
+
+}
+
+object Foo {
+ type Aux[A <: AnyRef, B] = Foo[A] { type Repr = B; type M[X] = Int }
+
+}
+
+object Main extends App {
+ def mapWithFoo[A <: AnyRef, B](as: List[A])(implicit foo: Foo.Aux[A, B]) = {
+ // Should be Eta expandable because the result type of `f` is not
+ // dependant on the value, it is just `B`.
+ as map foo.f
+ as map foo.g
+ as map foo.m
+ as map foo.n
+ }
+}
diff --git a/test/files/pos/t7818.scala b/test/files/pos/t7818.scala
new file mode 100644
index 0000000..77b99e7
--- /dev/null
+++ b/test/files/pos/t7818.scala
@@ -0,0 +1,10 @@
+class Observable1[+T](val asJava: JObservable[_ <: T]) extends AnyVal {
+ private def foo[X](a: JObservable[X]): JObservable[X] = ???
+ // was generating a type error as the type of the RHS included an existential
+ // skolem based on the class type parameter `T`, which did not conform
+ // to the typer parameter of the extension method into which the RHS is
+ // transplanted.
+ def synchronize: Observable1[T] = new Observable1(foo(asJava))
+}
+
+class JObservable[T]
diff --git a/test/files/pos/bug788.scala b/test/files/pos/t788.scala
similarity index 100%
rename from test/files/pos/bug788.scala
rename to test/files/pos/t788.scala
diff --git a/test/files/pos/bug789.scala b/test/files/pos/t789.scala
similarity index 100%
rename from test/files/pos/bug789.scala
rename to test/files/pos/t789.scala
diff --git a/test/files/pos/t7902.scala b/test/files/pos/t7902.scala
new file mode 100644
index 0000000..47c525c
--- /dev/null
+++ b/test/files/pos/t7902.scala
@@ -0,0 +1,17 @@
+import scala.language.higherKinds
+
+object Bug {
+ class Tag[W[M1[X1]]]
+
+ def ofType[W[M2[X2]]]: Tag[W] = ???
+ type InSeq [M3[X3]] = Some[M3[Any]]
+
+ // fail
+ val x = ofType[InSeq]
+
+ // okay
+ val y: Any = ofType[InSeq]
+ object T {
+ val z = ofType[InSeq]
+ }
+}
diff --git a/test/files/pos/bug796.scala b/test/files/pos/t796.scala
similarity index 100%
rename from test/files/pos/bug796.scala
rename to test/files/pos/t796.scala
diff --git a/test/files/pos/bug802.scala b/test/files/pos/t802.scala
similarity index 100%
rename from test/files/pos/bug802.scala
rename to test/files/pos/t802.scala
diff --git a/test/files/pos/bug803.scala b/test/files/pos/t803.scala
similarity index 100%
rename from test/files/pos/bug803.scala
rename to test/files/pos/t803.scala
diff --git a/test/files/pos/bug805.scala b/test/files/pos/t805.scala
similarity index 100%
rename from test/files/pos/bug805.scala
rename to test/files/pos/t805.scala
diff --git a/test/files/pos/t8060.scala b/test/files/pos/t8060.scala
new file mode 100644
index 0000000..90e014d
--- /dev/null
+++ b/test/files/pos/t8060.scala
@@ -0,0 +1,11 @@
+trait M[F[_]]
+
+trait P[A] {
+ type CC[X] = P[X]
+ def f(p: A => Boolean): M[CC]
+}
+
+trait Other {
+ // was infinite loop trying to dealias `x$1.CC`
+ def g[A](p: A => Boolean): P[A] => M[P] = _ f p
+}
diff --git a/test/files/pos/t8062.flags b/test/files/pos/t8062.flags
new file mode 100644
index 0000000..49d036a
--- /dev/null
+++ b/test/files/pos/t8062.flags
@@ -0,0 +1 @@
+-optimize
diff --git a/test/files/pos/t8062/A_1.scala b/test/files/pos/t8062/A_1.scala
new file mode 100644
index 0000000..ca0411d
--- /dev/null
+++ b/test/files/pos/t8062/A_1.scala
@@ -0,0 +1,5 @@
+package warmup
+
+object Warmup {
+ def filter[A](p: Any => Boolean): Any = filter[Any](p)
+}
diff --git a/test/files/pos/t8062/B_2.scala b/test/files/pos/t8062/B_2.scala
new file mode 100644
index 0000000..f0a6761
--- /dev/null
+++ b/test/files/pos/t8062/B_2.scala
@@ -0,0 +1,3 @@
+object Test {
+ warmup.Warmup.filter[Any](x => false)
+}
diff --git a/test/files/pos/bug807.scala b/test/files/pos/t807.scala
similarity index 100%
rename from test/files/pos/bug807.scala
rename to test/files/pos/t807.scala
diff --git a/test/files/pos/t8111.scala b/test/files/pos/t8111.scala
new file mode 100644
index 0000000..0d63a16
--- /dev/null
+++ b/test/files/pos/t8111.scala
@@ -0,0 +1,24 @@
+trait T {
+
+ def crashy(ma: Any) {
+ // okay
+ val f1 = (u: Unit) => ma
+ foo(f1)()
+ foo((u: Unit) => ma)
+ foo(0, (u: Any) => ma) apply ()
+
+ // crash due to side effects on the onwer of the symbol in the
+ // qualifier or arguments of the application during an abandoned
+ // names/defaults transform. The code type checkes because of
+ // autp-tupling which promotes and empty parmater list to `(): Unit`
+ foo((u: Any) => ma)()
+
+ {{(u: Any) => ma}; this}.foo(0)()
+
+ foo({def foo = ma; 0})()
+
+ {def foo = ma; this}.foo(0)()
+ }
+
+ def foo(f: Any): Any => Any
+}
diff --git a/test/files/pos/bug812.scala b/test/files/pos/t812.scala
similarity index 100%
rename from test/files/pos/bug812.scala
rename to test/files/pos/t812.scala
diff --git a/test/files/pos/t8138.scala b/test/files/pos/t8138.scala
new file mode 100644
index 0000000..b980930
--- /dev/null
+++ b/test/files/pos/t8138.scala
@@ -0,0 +1,24 @@
+
+class U {
+ trait Transformer {
+ def transform(a: Tree): Tree = ???
+ }
+ trait Tree
+}
+
+object Test {
+ def m(u: U) = {
+ class C extends u.Transformer {
+ override def transform(t: u.Tree): u.Tree = {
+ null match {
+ case _ =>
+ // crashes in GenICode:
+ // error: Unknown type: <notype>, <notype> [class scala.reflect.internal.Types$NoType$, class scala.reflect.internal.Types$NoType$] TypeRef? false
+ (y: Any) => super.transform(???)
+ null
+ }
+ ???
+ }
+ }
+ }
+}
diff --git a/test/files/pos/t8146a.scala b/test/files/pos/t8146a.scala
new file mode 100644
index 0000000..e4eb8d3
--- /dev/null
+++ b/test/files/pos/t8146a.scala
@@ -0,0 +1,9 @@
+trait M[+A]
+
+object Test {
+ type Inty = Int
+ def t1(
+ x: M[M[M[M[M[M[M[M[M[M[M[M[M[M[M[M[M[M[M[M[M[M[M[M[M[M[M[M[M[M[M[M[M[M[M[M[M[M[M[M[M[M[M[M[M[M[M[M[M[M[M[M[M[M[M[M[M[M[M[M[Int @unchecked]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]
+ ): M[M[M[M[M[M[M[M[M[M[M[M[M[M[M[M[M[M[M[M[M[M[M[M[M[M[M[M[M[M[M[M[M[M[M[M[M[M[M[M[M[M[M[M[M[M[M[M[M[M[M[M[M[M[M[M[M[M[M[M[Inty @unchecked]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]
+ = x
+}
diff --git a/test/files/pos/t8146b.scala b/test/files/pos/t8146b.scala
new file mode 100644
index 0000000..dd031f6
--- /dev/null
+++ b/test/files/pos/t8146b.scala
@@ -0,0 +1,77 @@
+// non-deterministic type errors, non-termination.
+// seems to be due to inconsistent hashing/equality in SubTypePair
+
+import scala.language.{existentials, implicitConversions}
+import scala.annotation.unchecked.uncheckedVariance
+
+trait Column[T]
+
+// Turning this into a trait reduces (eliminates?) the likelihood of type errors (but not of non-termination)
+abstract class Shape[Level <: ShapeLevel, -Mixed_, Unpacked_, Packed_]
+
+trait ShapeLevel
+trait NestedShapeLevel extends ShapeLevel
+trait FlatShapeLevel extends NestedShapeLevel
+trait ColumnsShapeLevel extends FlatShapeLevel
+
+trait ProvenShape[U]
+
+object ProvenShape {
+ implicit def proveShapeOf[T, U](v: T)(implicit sh: Shape[_ <: FlatShapeLevel, T, U, _]): ProvenShape[U] = ???
+}
+
+sealed abstract class HList {
+ type Self <: HList
+ type :: [E] = HCons[E, Self]
+ final def :: [E](elem: E): :: [E] = ???
+}
+
+final class HCons[+H, +T <: HList](val head: H, val tail: T) extends HList {
+ type Self = HCons[H @uncheckedVariance, T @uncheckedVariance]
+}
+
+final object HNil extends HList {
+ type Self = HNil.type
+}
+
+// Success is more likely when not using these aliases
+object syntax {
+ type :: [+H, +T <: HList] = HCons[H, T]
+ type HNil = HNil.type
+}
+
+class HListBench {
+
+ import syntax._
+
+ implicit def columnShape[T, Level <: ShapeLevel]: Shape[Level, Column[T], T, Column[T]] = ???
+ implicit def provenShape[T, P](implicit shape: Shape[_ <: FlatShapeLevel, T, _, P]): Shape[FlatShapeLevel, ProvenShape[T], T, P] = ???
+ final class HListShape[Level <: ShapeLevel, M <: HList, U <: HList, P <: HList](val shapes: Seq[Shape[_ <: ShapeLevel, _, _, _]]) extends Shape[Level, M, U, P]
+ implicit def hnilShape[Level <: ShapeLevel] = new HListShape[Level, HNil.type, HNil.type, HNil.type](Nil)
+ implicit def hconsShape[Level <: ShapeLevel, M1, M2 <: HList, U1, U2 <: HList, P1, P2 <: HList]
+ (implicit s1: Shape[_ <: Level, M1, U1, P1], s2: HListShape[_ <: Level, M2, U2, P2]) =
+ new HListShape[Level, M1 :: M2, U1 :: U2, P1 :: P2](s1 +: s2.shapes)
+
+ trait A[T] {
+ def * : ProvenShape[T]
+ }
+
+ trait B extends A[
+ Int :: Int :: Int :: Int :: Int ::
+ Int :: Int :: Int :: Int :: Int ::
+ Int :: Int :: Int :: Int :: Int ::
+ Int :: Int :: Int :: Int :: Int ::
+ Int :: Int :: Int :: Int :: Int ::
+ Int :: Int :: HNil ] {
+
+ def c: Column[Int]
+
+ def * = c :: c :: c :: c :: c ::
+ c :: c :: c :: c :: c ::
+ c :: c :: c :: c :: c ::
+ c :: c :: c :: c :: c ::
+ c :: c :: c :: c :: c ::
+ c :: c :: HNil
+
+ }
+}
diff --git a/test/files/pos/t8152-performance.scala b/test/files/pos/t8152-performance.scala
new file mode 100644
index 0000000..b6d2ecd
--- /dev/null
+++ b/test/files/pos/t8152-performance.scala
@@ -0,0 +1,13 @@
+class HListBench {
+
+ class A[H, T]
+
+ type B[H, T] = A[H, T]
+
+ // was okay
+ type T1 = A[Int, A[Int, A[Int, A[Int, A[Int, A[Int, A[Int, A[Int, A[Int, A[Int, A[Int, A[Int, A[Int, A[Int, A[Int, A[Int, A[Int, A[Int, A[Int, A[Int, A[Int, A[Int, A[Int, A[Int, A[Int, A[Int, A[Int, A[Int, Nothing]]]]]]]]]]]]]]]]]]]]]]]]]]]]
+
+ // Took over a minute to validate variance in 2.10.3!
+ type T2 = B[Int, B[Int, B[Int, B[Int, B[Int, B[Int, B[Int, B[Int, B[Int, B[Int, B[Int, B[Int, B[Int, B[Int, B[Int, B[Int, B[Int, B[Int, B[Int, B[Int, B[Int, B[Int, B[Int, B[Int, B[Int, B[Int, B[Int, B[Int, Nothing]]]]]]]]]]]]]]]]]]]]]]]]]]]]
+
+}
\ No newline at end of file
diff --git a/test/files/pos/t839.scala b/test/files/pos/t839.scala
new file mode 100644
index 0000000..72f6ca0
--- /dev/null
+++ b/test/files/pos/t839.scala
@@ -0,0 +1,26 @@
+// see pending/pos/t112606A.scala
+package test;
+trait Test {
+ trait Global {
+ type Tree;
+ def get : Tree;
+ }
+ trait TreeBuilder {
+ val global : Global;
+ def set(tree : global.Tree) = {}
+ }
+ val nsc : Global;
+ trait FileImpl {
+ object treeBuilder extends TreeBuilder {
+ val global : nsc.type = nsc;
+ }
+ // OK
+ treeBuilder.set(nsc.get);
+ }
+ val file0 : FileImpl;
+ // OK
+ file0.treeBuilder.set(nsc.get);
+ def file : FileImpl;
+ // type mismatch
+ file.treeBuilder.set(nsc.get);
+}
diff --git a/test/files/pos/bug851.scala b/test/files/pos/t851.scala
similarity index 100%
rename from test/files/pos/bug851.scala
rename to test/files/pos/t851.scala
diff --git a/test/files/pos/bug873.scala b/test/files/pos/t873.scala
similarity index 100%
rename from test/files/pos/bug873.scala
rename to test/files/pos/t873.scala
diff --git a/test/files/pos/bug880.scala b/test/files/pos/t880.scala
similarity index 100%
rename from test/files/pos/bug880.scala
rename to test/files/pos/t880.scala
diff --git a/test/files/pos/bug892.scala b/test/files/pos/t892.scala
similarity index 100%
rename from test/files/pos/bug892.scala
rename to test/files/pos/t892.scala
diff --git a/test/files/pos/bug911.scala b/test/files/pos/t911.scala
similarity index 100%
rename from test/files/pos/bug911.scala
rename to test/files/pos/t911.scala
diff --git a/test/files/pos/t927.scala b/test/files/pos/t927.scala
new file mode 100644
index 0000000..534f355
--- /dev/null
+++ b/test/files/pos/t927.scala
@@ -0,0 +1,11 @@
+object Test {
+
+ def sum(stream: Stream[Int]): Int =
+ stream match {
+ case Stream.Empty => 0
+ case Stream.cons(hd, tl) => hd + sum(tl)
+ }
+ val str: Stream[Int] = List(1,2,3).iterator.toStream
+ assert(sum(str) == 6)
+
+}
diff --git a/test/files/pos/t942/Amount_1.java b/test/files/pos/t942/Amount_1.java
new file mode 100644
index 0000000..d9d37d1
--- /dev/null
+++ b/test/files/pos/t942/Amount_1.java
@@ -0,0 +1,5 @@
+import java.util.concurrent.Callable;
+
+public abstract class Amount_1<Q> extends Object
+ implements Callable<Amount_1<?>> {
+}
diff --git a/test/files/pos/t942/Test_2.scala b/test/files/pos/t942/Test_2.scala
new file mode 100644
index 0000000..3cc84da
--- /dev/null
+++ b/test/files/pos/t942/Test_2.scala
@@ -0,0 +1,3 @@
+abstract class Foo {
+ val x: Amount_1[Foo]
+}
diff --git a/test/files/pos/bug946.scala b/test/files/pos/t946.scala
similarity index 100%
rename from test/files/pos/bug946.scala
rename to test/files/pos/t946.scala
diff --git a/test/files/pos/ticket2251.scala b/test/files/pos/ticket2251.scala
index 7b6efb0..b3afee4 100644
--- a/test/files/pos/ticket2251.scala
+++ b/test/files/pos/ticket2251.scala
@@ -1,6 +1,6 @@
// Martin: I am not sure this is a solvable problem right now. I'll leave it in pending.
-// derived from pos/bug1001
+// derived from pos/t1001
class A
trait B[T <: B[T]] extends A
class C extends B[C]
diff --git a/test/files/run/bug4285.flags b/test/files/pos/trait-force-info.flags
similarity index 100%
rename from test/files/run/bug4285.flags
rename to test/files/pos/trait-force-info.flags
diff --git a/test/files/pos/trait-force-info.scala b/test/files/pos/trait-force-info.scala
new file mode 100644
index 0000000..e01d225
--- /dev/null
+++ b/test/files/pos/trait-force-info.scala
@@ -0,0 +1,18 @@
+/** This does NOT crash unless it's in the interactive package.
+ */
+
+package scala.tools.nsc
+package interactive
+
+trait MyContextTrees {
+ val self: Global
+ val NoContext = self.analyzer.NoContext
+}
+//
+// error: java.lang.AssertionError: assertion failed: trait Contexts.NoContext$ linkedModule: <none>List()
+// at scala.Predef$.assert(Predef.scala:160)
+// at scala.tools.nsc.symtab.classfile.ClassfileParser$innerClasses$.innerSymbol$1(ClassfileParser.scala:1211)
+// at scala.tools.nsc.symtab.classfile.ClassfileParser$innerClasses$.classSymbol(ClassfileParser.scala:1223)
+// at scala.tools.nsc.symtab.classfile.ClassfileParser.classNameToSymbol(ClassfileParser.scala:489)
+// at scala.tools.nsc.symtab.classfile.ClassfileParser.sig2type$1(ClassfileParser.scala:757)
+// at scala.tools.nsc.symtab.classfile.ClassfileParser.sig2type$1(ClassfileParser.scala:789)
diff --git a/test/files/pos/trait-parents.scala b/test/files/pos/trait-parents.scala
new file mode 100644
index 0000000..f6a2688
--- /dev/null
+++ b/test/files/pos/trait-parents.scala
@@ -0,0 +1,16 @@
+trait Bip extends Any
+trait Foo extends Any
+trait Bar extends AnyRef
+trait Quux
+
+object Test {
+ def f(x: Bip) = 1
+ def g1(x: Foo with Bip) = f(x)
+
+ def main(args: Array[String]): Unit = {
+ f(new Bip with Foo { })
+ f(new Foo with Bip { })
+ g1(new Bip with Foo { })
+ g1(new Foo with Bip { })
+ }
+}
diff --git a/test/files/pos/typetags.scala b/test/files/pos/typetags.scala
new file mode 100644
index 0000000..239a9b3
--- /dev/null
+++ b/test/files/pos/typetags.scala
@@ -0,0 +1,16 @@
+// TODO come up with a non-trivial universe different from ru
+// an rewrite this test, so that it makes sure that cross-universe implicit searches work
+//
+// import scala.reflect.{basis => rb}
+// import scala.reflect.runtime.{universe => ru}
+// object Test {
+// def main(args: Array[String]) {
+// def foo(implicit t: rb.TypeTag[List[Int]]) {
+// println(t)
+// val t2: ru.TypeTag[_] = t in ru.rootMirror
+// println(t2)
+// }
+// }
+// }
+
+object Test extends App
\ No newline at end of file
diff --git a/test/files/pos/unapplyComplex.scala b/test/files/pos/unapplyComplex.scala
index 7015834..148fcc1 100644
--- a/test/files/pos/unapplyComplex.scala
+++ b/test/files/pos/unapplyComplex.scala
@@ -14,14 +14,14 @@ object ComplexRect {
def unapply(z:Complex): Option[Complex] = {
if(z.isInstanceOf[ComplexRect]) Some(z) else z match {
case ComplexPolar(mod, arg) =>
- Some(new ComplexRect(mod*Math.cos(arg), mod*Math.sin(arg)))
+ Some(new ComplexRect(mod*math.cos(arg), mod*math.sin(arg)))
} } }
object ComplexPolar {
def unapply(z:Complex): Option[Complex] = {
if(z.isInstanceOf[ComplexPolar]) Some(z) else z match {
case ComplexRect(re,im) =>
- Some(new ComplexPolar(Math.sqrt(re*re + im*im), Math.atan(re/im)))
+ Some(new ComplexPolar(math.sqrt(re*re + im*im), math.atan(re/im)))
} } }
object Test {
@@ -31,7 +31,7 @@ object Test {
Console.println("mod"+mod+"arg"+arg)
}
val Komplex = ComplexRect
- new ComplexPolar(Math.sqrt(2),Math.Pi / 4.0) match {
+ new ComplexPolar(math.sqrt(2),math.Pi / 4.0) match {
case Komplex(re,im) => // z @ ???
Console.println("re"+re+" im"+im)
}
diff --git a/test/files/neg/bug4302.flags b/test/files/pos/unchecked-a.flags
similarity index 100%
rename from test/files/neg/bug4302.flags
rename to test/files/pos/unchecked-a.flags
diff --git a/test/files/pos/unchecked-a.scala b/test/files/pos/unchecked-a.scala
new file mode 100644
index 0000000..deceb91
--- /dev/null
+++ b/test/files/pos/unchecked-a.scala
@@ -0,0 +1,15 @@
+trait Y
+trait Z extends Y
+class X[+A <: Y]
+
+object Test {
+ def f1(x: X[_ <: Y]) = x match {
+ case _: X[Any] => // looks a little funny; `Any` is outside the bounds for `A`
+ }
+ def f2(x: X[_ <: Y]) = x match {
+ case _: X[Y] => // looks better, let's allow this (too)
+ }
+
+ // NonLocalReturnControl[_] warnings
+ def foo: Int = List(0).foldLeft(0){case _ => return 0}
+}
diff --git a/test/files/pos/value-class-override-no-spec.flags b/test/files/pos/value-class-override-no-spec.flags
new file mode 100644
index 0000000..a7e64e4
--- /dev/null
+++ b/test/files/pos/value-class-override-no-spec.flags
@@ -0,0 +1 @@
+-no-specialization
\ No newline at end of file
diff --git a/test/files/pos/value-class-override-no-spec.scala b/test/files/pos/value-class-override-no-spec.scala
new file mode 100644
index 0000000..79de5d9
--- /dev/null
+++ b/test/files/pos/value-class-override-no-spec.scala
@@ -0,0 +1,9 @@
+// There are two versions of this tests: one with and one without specialization.
+// The bug was only exposed *without* specialization.
+trait T extends Any {
+ def x: Any
+}
+
+final class StringOps(val repr0: String) extends AnyVal with T {
+ def x = ()
+}
diff --git a/test/files/pos/value-class-override-spec.scala b/test/files/pos/value-class-override-spec.scala
new file mode 100644
index 0000000..79de5d9
--- /dev/null
+++ b/test/files/pos/value-class-override-spec.scala
@@ -0,0 +1,9 @@
+// There are two versions of this tests: one with and one without specialization.
+// The bug was only exposed *without* specialization.
+trait T extends Any {
+ def x: Any
+}
+
+final class StringOps(val repr0: String) extends AnyVal with T {
+ def x = ()
+}
diff --git a/test/files/pos/virtpatmat_alts_subst.flags b/test/files/pos/virtpatmat_alts_subst.flags
new file mode 100644
index 0000000..3f5a310
--- /dev/null
+++ b/test/files/pos/virtpatmat_alts_subst.flags
@@ -0,0 +1 @@
+ -Xexperimental
diff --git a/test/files/pos/virtpatmat_alts_subst.scala b/test/files/pos/virtpatmat_alts_subst.scala
new file mode 100644
index 0000000..e27c52f
--- /dev/null
+++ b/test/files/pos/virtpatmat_alts_subst.scala
@@ -0,0 +1,6 @@
+case class Foo(s: String) {
+ def appliedType(tycon: Any) =
+ tycon match {
+ case Foo(sym @ ("NothingClass" | "AnyClass")) => println(sym)
+ }
+}
diff --git a/test/files/jvm/bug680.check b/test/files/pos/virtpatmat_anonfun_for.flags
similarity index 100%
copy from test/files/jvm/bug680.check
copy to test/files/pos/virtpatmat_anonfun_for.flags
diff --git a/test/files/pos/virtpatmat_anonfun_for.scala b/test/files/pos/virtpatmat_anonfun_for.scala
new file mode 100644
index 0000000..8623cd9
--- /dev/null
+++ b/test/files/pos/virtpatmat_anonfun_for.scala
@@ -0,0 +1,8 @@
+trait Foo {
+ def bla = {
+ val tvs = "tvs"
+ Nil.foreach(x => x match {
+ case _ => println(tvs)
+ })
+ }
+}
\ No newline at end of file
diff --git a/test/files/pos/virtpatmat_binding_opt.flags b/test/files/pos/virtpatmat_binding_opt.flags
new file mode 100644
index 0000000..3f5a310
--- /dev/null
+++ b/test/files/pos/virtpatmat_binding_opt.flags
@@ -0,0 +1 @@
+ -Xexperimental
diff --git a/test/files/pos/virtpatmat_binding_opt.scala b/test/files/pos/virtpatmat_binding_opt.scala
new file mode 100644
index 0000000..962e3d7
--- /dev/null
+++ b/test/files/pos/virtpatmat_binding_opt.scala
@@ -0,0 +1,11 @@
+class Test {
+ def combine = this match {
+ case that if that eq this => this // just return this
+ case that: Test2 =>
+ println(that)
+ this
+ case _ => error("meh")
+ }
+}
+
+class Test2 extends Test
\ No newline at end of file
diff --git a/test/files/pos/virtpatmat_castbinder.flags b/test/files/pos/virtpatmat_castbinder.flags
new file mode 100644
index 0000000..3f5a310
--- /dev/null
+++ b/test/files/pos/virtpatmat_castbinder.flags
@@ -0,0 +1 @@
+ -Xexperimental
diff --git a/test/files/pos/virtpatmat_castbinder.scala b/test/files/pos/virtpatmat_castbinder.scala
new file mode 100644
index 0000000..53e937e
--- /dev/null
+++ b/test/files/pos/virtpatmat_castbinder.scala
@@ -0,0 +1,15 @@
+class IntMap[+V]
+case class Bin[+T](m: IntMap[T]) extends IntMap[T]
+case class Tip[+T](x: T) extends IntMap[T]
+
+trait IntMapIterator[V, T] {
+ def valueOf(tip: Tip[V]): T
+ def pop: IntMap[V]
+
+ def next: T =
+ pop match {
+ case Bin(t at Tip(_)) => {
+ valueOf(t)
+ }
+ }
+}
\ No newline at end of file
diff --git a/test/files/pos/virtpatmat_exhaust.scala b/test/files/pos/virtpatmat_exhaust.scala
new file mode 100644
index 0000000..a2f47c8
--- /dev/null
+++ b/test/files/pos/virtpatmat_exhaust.scala
@@ -0,0 +1,24 @@
+sealed trait Option {}
+case class Choice(a: Option, b: Option) extends Option;
+case class Some(x: Boolean) extends Option;
+case object None extends Option;
+
+object test {
+
+// drop any case and it will report an error
+// note that booleans are taken into account
+ def f(opt: Option) = opt match {
+ case Choice(None, None) => 1;
+ case Choice(None, Some(_)) => 1;
+ case Choice(None, Choice(_, _)) => 1;
+ case Choice(Some(true), None) => 1;
+ case Choice(Some(false), None) => 1;
+ case Choice(Some(_), Some(_)) => 1;
+ case Choice(Some(_), Choice(_, _)) => 1;
+ case Choice(Choice(_, _), None) => 1;
+ case Choice(Choice(_, _), Some(_)) => 1;
+ case Choice(Choice(_, _), Choice(_, _)) => 1;
+ case Some(b) => 4;
+ case None => 5;
+ }
+}
diff --git a/test/files/neg/anyval-sealed.flags b/test/files/pos/virtpatmat_exhaust_unchecked.flags
similarity index 100%
rename from test/files/neg/anyval-sealed.flags
rename to test/files/pos/virtpatmat_exhaust_unchecked.flags
diff --git a/test/files/pos/virtpatmat_exhaust_unchecked.scala b/test/files/pos/virtpatmat_exhaust_unchecked.scala
new file mode 100644
index 0000000..641f2b4
--- /dev/null
+++ b/test/files/pos/virtpatmat_exhaust_unchecked.scala
@@ -0,0 +1,24 @@
+sealed trait Option {}
+case class Choice(a: Option, b: Option) extends Option;
+case class Some(x: Boolean) extends Option;
+case object None extends Option;
+
+object test {
+
+// drop any case and it will report an error
+// note that booleans are taken into account
+ def f(opt: Option) = (opt: @unchecked) match {
+ case Choice(None, None) => 1;
+ case Choice(None, Some(_)) => 1;
+ case Choice(None, Choice(_, _)) => 1;
+ case Choice(Some(true), None) => 1;
+ // case Choice(Some(false), None) => 1;
+ case Choice(Some(_), Some(_)) => 1;
+ case Choice(Some(_), Choice(_, _)) => 1;
+ case Choice(Choice(_, _), None) => 1;
+ case Choice(Choice(_, _), Some(_)) => 1;
+ case Choice(Choice(_, _), Choice(_, _)) => 1;
+ case Some(b) => 4;
+ case None => 5;
+ }
+}
diff --git a/test/files/pos/virtpatmat_exist1.flags b/test/files/pos/virtpatmat_exist1.flags
new file mode 100644
index 0000000..3f5a310
--- /dev/null
+++ b/test/files/pos/virtpatmat_exist1.flags
@@ -0,0 +1 @@
+ -Xexperimental
diff --git a/test/files/pos/virtpatmat_exist1.scala b/test/files/pos/virtpatmat_exist1.scala
new file mode 100644
index 0000000..ccb9129
--- /dev/null
+++ b/test/files/pos/virtpatmat_exist1.scala
@@ -0,0 +1,24 @@
+import annotation.unchecked.{ uncheckedVariance=> uV }
+import scala.collection.immutable.{ListMap, HashMap, ListSet, HashSet}
+
+object Test {
+ class HashMapCollision1[A, +B](var hash: Int, var kvs: ListMap[A, B @uV]) extends HashMap[A, B @uV]
+ class HashSetCollision1[A](var hash: Int, var ks: ListSet[A]) extends HashSet[A]
+
+ def splitArray[T](ad: Array[Iterable[T]]): Any =
+ ad(0) match {
+ case _: HashMapCollision1[_, _] | _: HashSetCollision1[_] => null
+ }
+
+ // without type ascription for the one in the body of the last flatmap of each alternative, type inference borks on the existentials
+ // def splitArray[T >: Nothing <: Any](ad: Array[Iterable[T]]): Any = { import OptionMatching._
+ // runOrElse(ad.apply(0))(((x1: Iterable[T]) => (
+ // or(((x4: Iterable[T]) => one(null)),
+ // guard(x1.isInstanceOf[Iterable[T] with Test.HashMapCollision1[_,_]], x1.asInstanceOf[Iterable[T] with Test.HashMapCollision1[_,_]]).flatMap(((x2: Iterable[T] with Test.HashMapCollision1[_,_]) => one(x2))),
+ // guard(x1.isInstanceOf[Test.HashSetCollision1[_]], x1.asInstanceOf[Iterable[T] with Test.HashSetCollision1[_]]).flatMap(((x3: Iterable[T] with Test.HashSetCollision1[_]) => one(x3)))): Option[Any]).orElse(
+ // (zero: Option[Any])))
+ // )
+ // }
+
+}
+
diff --git a/test/files/pos/virtpatmat_exist2.flags b/test/files/pos/virtpatmat_exist2.flags
new file mode 100644
index 0000000..3f5a310
--- /dev/null
+++ b/test/files/pos/virtpatmat_exist2.flags
@@ -0,0 +1 @@
+ -Xexperimental
diff --git a/test/files/pos/virtpatmat_exist2.scala b/test/files/pos/virtpatmat_exist2.scala
new file mode 100644
index 0000000..b0e4c66
--- /dev/null
+++ b/test/files/pos/virtpatmat_exist2.scala
@@ -0,0 +1,20 @@
+class ParseResult[+T]
+case class MemoEntry[+T](var r: Either[Nothing,ParseResult[_]])
+
+object Test {
+ def grow[T]: ParseResult[T] = (null: MemoEntry[T]) match {
+ case MemoEntry(Right(x: ParseResult[_])) => x.asInstanceOf[ParseResult[T]]
+ }
+
+ // what's the _$1 doing there?
+ // def grow[T >: Nothing <: Any]: ParseResult[T] = {
+ // import OptionMatching._
+ // runOrElse[MemoEntry[T], ParseResult[T]]((null: MemoEntry[T]))(((x1: MemoEntry[T]) =>
+ // (MemoEntry.unapply[T](x1).flatMap[ParseResult[T]](((x4: Either[Nothing,ParseResult[_]]) =>
+ // guard[Right[Nothing,ParseResult[_]]](x4.isInstanceOf[Right[Nothing,ParseResult[_]]], x4.asInstanceOf[Right[Nothing,ParseResult[_]]]).flatMap[ParseResult[T]](((cp3: Right[Nothing,ParseResult[_]]) =>
+ // scala.Right.unapply[Nothing, ParseResult[_]](cp3).flatMap[ParseResult[T]](((x5: ParseResult[_]) =>
+ // guard[ParseResult[_$1]](x5.ne(null), x5.asInstanceOf[ParseResult[_]]).flatMap[ParseResult[T]](((x6: ParseResult[_]) =>
+ // one[ParseResult[T]](x6.asInstanceOf[ParseResult[T]]))))))))): Option[ParseResult[T]]
+ // ).orElse[ParseResult[T]]((zero: Option[ParseResult[T]]))))
+ // }
+}
\ No newline at end of file
diff --git a/test/files/pos/virtpatmat_exist3.flags b/test/files/pos/virtpatmat_exist3.flags
new file mode 100644
index 0000000..3f5a310
--- /dev/null
+++ b/test/files/pos/virtpatmat_exist3.flags
@@ -0,0 +1 @@
+ -Xexperimental
diff --git a/test/files/pos/virtpatmat_exist3.scala b/test/files/pos/virtpatmat_exist3.scala
new file mode 100644
index 0000000..c8f8738
--- /dev/null
+++ b/test/files/pos/virtpatmat_exist3.scala
@@ -0,0 +1,12 @@
+class ReferenceQueue[T] {
+ def wrapper(jref: ReferenceQueue[_]): ReferenceQueue[T] =
+ jref match {
+ case null => null
+ }
+
+ // def wrapper(jref: ReferenceQueue[_]): ReferenceQueue[T] = OptionMatching.runOrElse(jref)(((x1: ReferenceQueue[_]) =>
+ // (OptionMatching.guard(null.==(x1), x1.asInstanceOf[ReferenceQueue[_]]).flatMap(((x2: ReferenceQueue[_]) =>
+ // OptionMatching.one(null))): Option[ReferenceQueue[T]]).orElse(
+ // (OptionMatching.zero: Option[ReferenceQueue[T]])))
+ // )
+}
\ No newline at end of file
diff --git a/test/files/pos/virtpatmat_exist4.scala b/test/files/pos/virtpatmat_exist4.scala
new file mode 100644
index 0000000..a04d0e3
--- /dev/null
+++ b/test/files/pos/virtpatmat_exist4.scala
@@ -0,0 +1,35 @@
+trait Global {
+ trait Tree
+ trait Symbol { def foo: Boolean }
+}
+
+trait IMain { self: MemberHandlers =>
+ val global: Global
+ def handlers: List[MemberHandler]
+}
+
+trait MemberHandlers {
+ val intp: IMain
+ import intp.global._
+ sealed abstract class MemberHandler(val member: Tree) {
+ def importedSymbols: List[Symbol]
+ }
+}
+
+object Test {
+ var intp: IMain with MemberHandlers = null
+
+ val handlers = intp.handlers
+ handlers.filterNot(_.importedSymbols.isEmpty).zipWithIndex foreach {
+ case (handler, idx) =>
+ val (types, terms) = handler.importedSymbols partition (_.foo)
+ }
+}
+
+object Test2 {
+ type JClass = java.lang.Class[_]
+
+ def tvarString(bounds: List[AnyRef]) = {
+ bounds collect { case x: JClass => x }
+ }
+}
\ No newline at end of file
diff --git a/test/files/pos/virtpatmat_exist_uncurry.scala b/test/files/pos/virtpatmat_exist_uncurry.scala
new file mode 100644
index 0000000..e017da6
--- /dev/null
+++ b/test/files/pos/virtpatmat_exist_uncurry.scala
@@ -0,0 +1,6 @@
+object Test {
+ trait Leaf[T] {
+ def collect[U](f: PartialFunction[Leaf[_], U]): List[U]
+ def leaves: List[Leaf[T]] = collect { case l: Leaf[T] => l }
+ }
+}
\ No newline at end of file
diff --git a/test/files/pos/virtpatmat_gadt_array.flags b/test/files/pos/virtpatmat_gadt_array.flags
new file mode 100644
index 0000000..3f5a310
--- /dev/null
+++ b/test/files/pos/virtpatmat_gadt_array.flags
@@ -0,0 +1 @@
+ -Xexperimental
diff --git a/test/files/pos/virtpatmat_gadt_array.scala b/test/files/pos/virtpatmat_gadt_array.scala
new file mode 100644
index 0000000..27e72aa
--- /dev/null
+++ b/test/files/pos/virtpatmat_gadt_array.scala
@@ -0,0 +1,15 @@
+import scala.collection.mutable._
+object Test {
+ def genericArrayOps[T](xs: Array[T]): ArrayOps[T] = xs match {
+ case x: Array[AnyRef] => refArrayOps[AnyRef](x).asInstanceOf[ArrayOps[T]]
+ case null => null
+ }
+ // def genericArrayOps[T >: Nothing <: Any](xs: Array[T]): scala.collection.mutable.ArrayOps[T]
+ // = OptionMatching.runOrElse(xs)(((x1: Array[T]) =>
+ // ((OptionMatching.guard(x1.isInstanceOf[Array[AnyRef]], x1.asInstanceOf[Array[T] with Array[AnyRef]]).flatMap(((x2: Array[T] with Array[AnyRef]) =>
+ // OptionMatching.one(Test.this.refArrayOps[AnyRef](x2).asInstanceOf[scala.collection.mutable.ArrayOps[T]]))): Option[scala.collection.mutable.ArrayOps[T]]).orElse(
+ // (OptionMatching.guard(null.==(x1), x1.asInstanceOf[Array[T]]).flatMap(((x3: Array[T]) =>
+ // OptionMatching.one(null))): Option[scala.collection.mutable.ArrayOps[T]])): Option[scala.collection.mutable.ArrayOps[T]]).orElse((OptionMatching.zero: Option[scala.collection.mutable.ArrayOps[T]]))))
+
+ def refArrayOps[T <: AnyRef](xs: Array[T]): ArrayOps[T] = new ArrayOps.ofRef[T](xs)
+}
\ No newline at end of file
diff --git a/test/files/pos/virtpatmat_infer_single_1.flags b/test/files/pos/virtpatmat_infer_single_1.flags
new file mode 100644
index 0000000..3f5a310
--- /dev/null
+++ b/test/files/pos/virtpatmat_infer_single_1.flags
@@ -0,0 +1 @@
+ -Xexperimental
diff --git a/test/files/pos/virtpatmat_infer_single_1.scala b/test/files/pos/virtpatmat_infer_single_1.scala
new file mode 100644
index 0000000..b42af95
--- /dev/null
+++ b/test/files/pos/virtpatmat_infer_single_1.scala
@@ -0,0 +1,7 @@
+case class TypeBounds(a: Type, b: Type)
+class Type {
+ def bounds: TypeBounds = bounds match {
+ case TypeBounds(_: this.type, _: this.type) => TypeBounds(this, this)
+ case oftp => oftp
+ }
+}
\ No newline at end of file
diff --git a/test/files/pos/virtpatmat_instof_valuetype.flags b/test/files/pos/virtpatmat_instof_valuetype.flags
new file mode 100644
index 0000000..3f5a310
--- /dev/null
+++ b/test/files/pos/virtpatmat_instof_valuetype.flags
@@ -0,0 +1 @@
+ -Xexperimental
diff --git a/test/files/pos/virtpatmat_instof_valuetype.scala b/test/files/pos/virtpatmat_instof_valuetype.scala
new file mode 100644
index 0000000..1dda9bf
--- /dev/null
+++ b/test/files/pos/virtpatmat_instof_valuetype.scala
@@ -0,0 +1,8 @@
+case class Data(private val t: Option[String] = None, only: Boolean = false) {
+ def add(other: Data) = {
+ other match {
+ case Data(None, b) => ()
+ case Data(Some(_), b) => ()
+ }
+ }
+}
\ No newline at end of file
diff --git a/test/files/pos/virtpatmat_obj_in_case.flags b/test/files/pos/virtpatmat_obj_in_case.flags
new file mode 100644
index 0000000..3f5a310
--- /dev/null
+++ b/test/files/pos/virtpatmat_obj_in_case.flags
@@ -0,0 +1 @@
+ -Xexperimental
diff --git a/test/files/pos/virtpatmat_obj_in_case.scala b/test/files/pos/virtpatmat_obj_in_case.scala
new file mode 100644
index 0000000..496de4c
--- /dev/null
+++ b/test/files/pos/virtpatmat_obj_in_case.scala
@@ -0,0 +1,5 @@
+class ObjInCase {
+ 0 match {
+ case _ => object o
+ }
+}
\ No newline at end of file
diff --git a/test/files/pos/virtpatmat_partialfun_nsdnho.scala b/test/files/pos/virtpatmat_partialfun_nsdnho.scala
new file mode 100644
index 0000000..f79e828
--- /dev/null
+++ b/test/files/pos/virtpatmat_partialfun_nsdnho.scala
@@ -0,0 +1,18 @@
+class Test {
+ // m.$minus(1)
+ // at scala.Predef$.assert(Predef.scala:185)
+ // at scala.tools.nsc.Global.assert(Global.scala:187)
+ // at scala.tools.nsc.typechecker.SuperAccessors$SuperAccTransformer.transform(SuperAccessors.scala:291)
+ val a: (Map[Int, Int] => (Any => Any)) = { m => { case _ => m - 1} }
+
+ // patmat-crash.scala:9: error: erroneous or inaccessible type
+ val b: (Int => (Any => Any)) = { m => { case _ => m } }
+
+ // no-symbol does not have an owner (this is a bug: scala version 2.10.0-20120420-170445-56c1f29250)
+ // at scala.reflect.internal.SymbolTable.abort(SymbolTable.scala:45)
+ // at scala.tools.nsc.Global.abort(Global.scala:202)
+ // at scala.reflect.internal.Symbols$NoSymbol.owner(Symbols.scala:3031)
+ // at scala.tools.nsc.typechecker.SuperAccessors$SuperAccTransformer.hostForAccessorOf(SuperAccessors.scala:474)
+ // at scala.tools.nsc.typechecker.SuperAccessors$SuperAccTransformer.needsProtectedAccessor(SuperAccessors.scala:457)
+ val c: (Int => (Any => Any)) = { m => { case _ => m.toInt } }
+}
\ No newline at end of file
diff --git a/test/files/pos/virtpatmat_reach_const.scala b/test/files/pos/virtpatmat_reach_const.scala
new file mode 100644
index 0000000..b55b7cb
--- /dev/null
+++ b/test/files/pos/virtpatmat_reach_const.scala
@@ -0,0 +1,11 @@
+// check the interaction between constants and type tests in creating the equality axioms
+object Test {
+ type Formula = List[String]
+ val TrueF: Formula = List()
+ def distribute(a: Formula, b: Formula) = (a, b) match {
+ case (TrueF, _) =>
+ case (_, TrueF) => // bug: considered unreachable
+ case (a :: Nil, b :: Nil) =>
+ case _ =>
+ }
+}
\ No newline at end of file
diff --git a/test/files/pos/xlint1.flags b/test/files/pos/xlint1.flags
new file mode 100644
index 0000000..7949c2a
--- /dev/null
+++ b/test/files/pos/xlint1.flags
@@ -0,0 +1 @@
+-Xlint -Xfatal-warnings
diff --git a/test/files/pos/xlint1.scala b/test/files/pos/xlint1.scala
new file mode 100644
index 0000000..27936d8
--- /dev/null
+++ b/test/files/pos/xlint1.scala
@@ -0,0 +1,13 @@
+package object foo {
+ implicit class Bar[T](val x: T) extends AnyVal {
+ def bippy = 1
+ }
+}
+
+package foo {
+ object Baz {
+ def main(args: Array[String]): Unit = {
+ "abc".bippy
+ }
+ }
+}
diff --git a/test/files/pos/z1720.scala b/test/files/pos/z1720.scala
new file mode 100644
index 0000000..7394d42
--- /dev/null
+++ b/test/files/pos/z1720.scala
@@ -0,0 +1,16 @@
+package test
+
+class Thing {
+ def info: Info[this.type] = InfoRepository.getInfo(this)
+ def info2: Info[this.type] = {
+ def self: this.type = this
+ InfoRepository.getInfo(self)
+ }
+}
+
+trait Info[T]
+case class InfoImpl[T](thing: T) extends Info[T]
+
+object InfoRepository {
+ def getInfo(t: Thing): Info[t.type] = InfoImpl(t)
+}
diff --git a/test/files/pos/z1730.flags b/test/files/pos/z1730.flags
new file mode 100644
index 0000000..5319681
--- /dev/null
+++ b/test/files/pos/z1730.flags
@@ -0,0 +1 @@
+-Ycheck:all
\ No newline at end of file
diff --git a/test/files/pos/z1730.scala b/test/files/pos/z1730.scala
new file mode 100644
index 0000000..0c5875a
--- /dev/null
+++ b/test/files/pos/z1730.scala
@@ -0,0 +1,13 @@
+// /scala/trac/z1730/a.scala
+// Wed May 23 07:41:25 PDT 2012
+
+class X[R] {
+ def xx(value: => R, addTweak: Boolean = true) = 0
+}
+
+class Boo {
+ implicit def toX[R](v: R) : X[R] = null
+ def goo2 {
+ 3.xx(34)
+ }
+}
diff --git a/test/files/positions/Unpositioned1.scala b/test/files/positions/Unpositioned1.scala
index 7fc520e..174db90 100644
--- a/test/files/positions/Unpositioned1.scala
+++ b/test/files/positions/Unpositioned1.scala
@@ -1,3 +1,3 @@
object Unpositioned1 {
- for (a <- Some("foo") ; val b = true) {}
+ for (a <- Some("foo") ; b = true) {}
}
diff --git a/test/files/presentation/callcc-interpreter.check b/test/files/presentation/callcc-interpreter.check
new file mode 100644
index 0000000..9a92c40
--- /dev/null
+++ b/test/files/presentation/callcc-interpreter.check
@@ -0,0 +1,94 @@
+reload: CallccInterpreter.scala
+
+askTypeCompletion at CallccInterpreter.scala(51,38)
+================================================================================
+[response] askTypeCompletion at (51,38)
+retrieved 64 members
+[accessible: true] `class AddcallccInterpreter.Add`
+[accessible: true] `class AppcallccInterpreter.App`
+[accessible: true] `class CcccallccInterpreter.Ccc`
+[accessible: true] `class ConcallccInterpreter.Con`
+[accessible: true] `class FuncallccInterpreter.Fun`
+[accessible: true] `class LamcallccInterpreter.Lam`
+[accessible: true] `class McallccInterpreter.M`
+[accessible: true] `class NumcallccInterpreter.Num`
+[accessible: true] `class VarcallccInterpreter.Var`
+[accessible: true] `method !=(x$1: Any)Boolean`
+[accessible: true] `method !=(x$1: AnyRef)Boolean`
+[accessible: true] `method ##()Int`
+[accessible: true] `method +(other: String)String`
+[accessible: true] `method ->[B](y: B)(callccInterpreter.type, B)`
+[accessible: true] `method ==(x$1: Any)Boolean`
+[accessible: true] `method ==(x$1: AnyRef)Boolean`
+[accessible: true] `method add(a: callccInterpreter.Value, b: callccInterpreter.Value)callccInterpreter.M[_ >: callccInterpreter.Num with callccInterpreter.Wrong.type <: Product with Serializable with callccInterpreter.Value]`
+[accessible: true] `method apply(a: callccInterpreter.Value, b: callccInterpreter.Value)callccInterpreter.M[callccInterpreter.Value]`
+[accessible: true] `method asInstanceOf[T0]=> T0`
+[accessible: true] `method callCC[A](h: (A => callccInterpreter.M[A]) => callccInterpreter.M[A])callccInterpreter.M[A]`
+[accessible: true] `method clone()Object`
+[accessible: true] `method ensuring(cond: Boolean)callccInterpreter.type`
+[accessible: true] `method ensuring(cond: Boolean, msg: => Any)callccInterpreter.type`
+[accessible: true] `method ensuring(cond: callccInterpreter.type => Boolean)callccInterpreter.type`
+[accessible: true] `method ensuring(cond: callccInterpreter.type => Boolean, msg: => Any)callccInterpreter.type`
+[accessible: true] `method eq(x$1: AnyRef)Boolean`
+[accessible: true] `method equals(x$1: Any)Boolean`
+[accessible: true] `method finalize()Unit`
+[accessible: true] `method formatted(fmtstr: String)String`
+[accessible: true] `method hashCode()Int`
+[accessible: true] `method id[A]=> A => A`
+[accessible: true] `method interp(t: callccInterpreter.Term, e: callccInterpreter.Environment)callccInterpreter.M[callccInterpreter.Value]`
+[accessible: true] `method isInstanceOf[T0]=> Boolean`
+[accessible: true] `method lookup(x: callccInterpreter.Name, e: callccInterpreter.Environment)callccInterpreter.M[callccInterpreter.Value]`
+[accessible: true] `method main(args: Array[String])Unit`
+[accessible: true] `method ne(x$1: AnyRef)Boolean`
+[accessible: true] `method notify()Unit`
+[accessible: true] `method notifyAll()Unit`
+[accessible: true] `method showM(m: callccInterpreter.M[callccInterpreter.Value])String`
+[accessible: true] `method synchronized[T0](x$1: T0)T0`
+[accessible: true] `method test(t: callccInterpreter.Term)String`
+[accessible: true] `method toString()String`
+[accessible: true] `method unitM[A](a: A)callccInterpreter.M[A]`
+[accessible: true] `method wait()Unit`
+[accessible: true] `method wait(x$1: Long)Unit`
+[accessible: true] `method wait(x$1: Long, x$2: Int)Unit`
+[accessible: true] `method x=> callccInterpreter.type`
+[accessible: true] `method →[B](y: B)(callccInterpreter.type, B)`
+[accessible: true] `object WrongcallccInterpreter.Wrong.type`
+[accessible: true] `trait TermcallccInterpreter.Term`
+[accessible: true] `trait ValuecallccInterpreter.Value`
+[accessible: true] `type AnswercallccInterpreter.Answer`
+[accessible: true] `type EnvironmentcallccInterpreter.Environment`
+[accessible: true] `type NamecallccInterpreter.Name`
+[accessible: true] `value __leftOfArrowcallccInterpreter.type`
+[accessible: true] `value __resultOfEnsuringcallccInterpreter.type`
+[accessible: true] `value selfAny`
+[accessible: true] `value term0callccInterpreter.App`
+[accessible: true] `value term1callccInterpreter.App`
+[accessible: true] `value term2callccInterpreter.Add`
+================================================================================
+
+askType at CallccInterpreter.scala(14,21)
+================================================================================
+[response] askTypeAt at (14,21)
+def unitM[A >: Nothing <: Any](a: A): callccInterpreter.M[A] = callccInterpreter.this.M.apply[A](((c: A => callccInterpreter.Answer) => c.apply(a)))
+================================================================================
+
+askType at CallccInterpreter.scala(16,12)
+================================================================================
+[response] askTypeAt at (16,12)
+def id[A >: Nothing <: Any]: A => A = ((x: A) => x)
+================================================================================
+
+askType at CallccInterpreter.scala(17,25)
+================================================================================
+[response] askTypeAt at (17,25)
+def showM(m: callccInterpreter.M[callccInterpreter.Value]): String = m.in.apply(callccInterpreter.this.id[callccInterpreter.Value]).toString()
+================================================================================
+
+askType at CallccInterpreter.scala(50,30)
+================================================================================
+[response] askTypeAt at (50,30)
+def add(a: callccInterpreter.Value, b: callccInterpreter.Value): callccInterpreter.M[_ >: callccInterpreter.Num with callccInterpreter.Wrong.type <: Product with Serializable with callccInterpreter.Value] = scala.this.Predef.Pair.apply[callccInterpreter.Value, callccInterpreter.Value](a, b) match {
+ case scala.this.Predef.Pair.unapply[callccInterpreter.Value, callccInterpreter.Value](<unapply-selector>) <unapply> ((n: Int)callccInterpreter.Num((m @ _)), (n: Int)callccInterpreter.Num((n @ _))) => this.unitM[callccInterpreter.Num](callccInterpreter.this.Num.apply(m.+(n)))
+ case _ => callccInterpreter.this.unitM[callccInterpreter.Wrong.type](callccInterpreter.this.Wrong)
+}
+================================================================================
diff --git a/test/files/presentation/callcc-interpreter/Runner.scala b/test/files/presentation/callcc-interpreter/Runner.scala
new file mode 100644
index 0000000..1ef3cf9
--- /dev/null
+++ b/test/files/presentation/callcc-interpreter/Runner.scala
@@ -0,0 +1,3 @@
+import scala.tools.nsc.interactive.tests._
+
+object Test extends InteractiveTest
\ No newline at end of file
diff --git a/test/files/presentation/callcc-interpreter/src/CallccInterpreter.scala b/test/files/presentation/callcc-interpreter/src/CallccInterpreter.scala
new file mode 100644
index 0000000..0e96dfa
--- /dev/null
+++ b/test/files/presentation/callcc-interpreter/src/CallccInterpreter.scala
@@ -0,0 +1,86 @@
+object callccInterpreter {
+
+ type Answer = Value
+
+ /**
+ * A continuation monad.
+ */
+ case class M[A](in: (A => Answer) => Answer) {
+ def bind[B](k: A => M[B]) = M[B](c => in (a => k(a) in c))
+ def map[B](f: A => B): M[B] = bind(x => unitM(f(x)))
+ def flatMap[B](f: A => M[B]): M[B] = bind(f)
+ }
+
+ def unitM[A](a: A) /*?*/ = M[A](c => c(a))
+
+ def id[A] /*?*/ = (x: A) => x
+ def showM(m: M[Value]) /*?*/ = (m in id).toString()
+
+ def callCC[A](h: (A => M[A]) => M[A]) =
+ M[A](c => h(a => M[A](d => c(a))) in c)
+
+ type Name = String
+
+ trait Term
+ case class Var(x: Name) extends Term
+ case class Con(n: int) extends Term
+ case class Add(l: Term, r: Term) extends Term
+ case class Lam(x: Name, body: Term) extends Term
+ case class App(fun: Term, arg: Term) extends Term
+ case class Ccc(x: Name, t: Term) extends Term
+
+ trait Value
+ case object Wrong extends Value {
+ override def toString() = "wrong"
+ }
+ case class Num(n: Int) extends Value {
+ override def toString() = n.toString()
+ }
+ case class Fun(f: Value => M[Value]) extends Value {
+ override def toString() = "<function>"
+ }
+
+ type Environment = List[Pair[Name, Value]]
+
+ def lookup(x: Name, e: Environment): M[Value] = e match {
+ case List() => unitM(Wrong)
+ case Pair(y, b) :: e1 => if (x == y) unitM(b) else lookup(x, e1)
+ }
+
+ def add(a: Value, b: Value) /*?*/ = Pair(a, b) match {
+ case Pair(Num(m), Num(n)) => this./*!*/unitM(Num(m + n))
+ case _ => unitM(Wrong)
+ }
+
+ def apply(a: Value, b: Value): M[Value] = a match {
+ case Fun(k) => k(b)
+ case _ => unitM(Wrong)
+ }
+
+ def interp(t: Term, e: Environment): M[Value] = t match {
+ case Var(x) => lookup(x, e)
+ case Con(n) => unitM(Num(n))
+ case Add(l, r) => for (val a <- interp(l, e);
+ val b <- interp(r, e);
+ val c <- add(a, b))
+ yield c
+ case Lam(x, t) => unitM(Fun(a => interp(t, Pair(x, a) :: e)))
+ case App(f, t) => for (val a <- interp(f, e);
+ val b <- interp(t, e);
+ val c <- apply(a, b))
+ yield c
+ case Ccc(x, t) => callCC(k => interp(t, Pair(x, Fun(k)) :: e))
+ }
+
+ def test(t: Term): String = showM(interp(t, List()))
+
+ val term0 = App(Lam("x", Add(Var("x"), Var("x"))), Add(Con(10), Con(11)))
+ val term1 = App(Con(1), Con(2))
+ val term2 = Add(Con(1), Ccc("k", Add(Con(2), App(Var("k"), Con(4)))))
+
+ def main(args: Array[String]) {
+ println(test(term0))
+ println(test(term1))
+ println(test(term2))
+ }
+}
\ No newline at end of file
diff --git a/test/files/presentation/completion-implicit-chained.check b/test/files/presentation/completion-implicit-chained.check
new file mode 100644
index 0000000..24417cf
--- /dev/null
+++ b/test/files/presentation/completion-implicit-chained.check
@@ -0,0 +1,29 @@
+reload: Completions.scala
+
+askTypeCompletion at Completions.scala(11,16)
+================================================================================
+[response] askTypeCompletion at (11,16)
+retrieved 24 members
+[accessible: true] `method !=(x$1: Any)Boolean`
+[accessible: true] `method !=(x$1: AnyRef)Boolean`
+[accessible: true] `method ##()Int`
+[accessible: true] `method ==(x$1: Any)Boolean`
+[accessible: true] `method ==(x$1: AnyRef)Boolean`
+[accessible: true] `method asInstanceOf[T0]=> T0`
+[accessible: true] `method eq(x$1: AnyRef)Boolean`
+[accessible: true] `method equals(x$1: Any)Boolean`
+[accessible: true] `method hashCode()Int`
+[accessible: true] `method isInstanceOf[T0]=> Boolean`
+[accessible: true] `method map(x: Int => Int)(implicit a: DummyImplicit)test.O.type`
+[accessible: true] `method ne(x$1: AnyRef)Boolean`
+[accessible: true] `method notify()Unit`
+[accessible: true] `method notifyAll()Unit`
+[accessible: true] `method synchronized[T0](x$1: T0)T0`
+[accessible: true] `method toString()String`
+[accessible: true] `method wait()Unit`
+[accessible: true] `method wait(x$1: Long)Unit`
+[accessible: true] `method wait(x$1: Long, x$2: Int)Unit`
+[accessible: true] `value prefix123Int`
+[accessible: false] `method clone()Object`
+[accessible: false] `method finalize()Unit`
+================================================================================
diff --git a/test/files/presentation/completion-implicit-chained/Test.scala b/test/files/presentation/completion-implicit-chained/Test.scala
new file mode 100644
index 0000000..bec1131
--- /dev/null
+++ b/test/files/presentation/completion-implicit-chained/Test.scala
@@ -0,0 +1,3 @@
+import scala.tools.nsc.interactive.tests.InteractiveTest
+
+object Test extends InteractiveTest
\ No newline at end of file
diff --git a/test/files/presentation/completion-implicit-chained/src/Completions.scala b/test/files/presentation/completion-implicit-chained/src/Completions.scala
new file mode 100644
index 0000000..67922df
--- /dev/null
+++ b/test/files/presentation/completion-implicit-chained/src/Completions.scala
@@ -0,0 +1,12 @@
+package test
+
+import scala.Predef.DummyImplicit // turn off other predef implicits for a cleaner .check file.
+
+object O {
+ def map(x: Int => Int)(implicit a: DummyImplicit): O.type = this
+ val prefix123 : Int = 0
+}
+
+class Foo {
+ O.map(x => x)./*!*/ // we want the presentation compiler to apply the implicit argument list.
+}
diff --git a/test/files/presentation/forgotten-ask.scala b/test/files/presentation/forgotten-ask.scala
new file mode 100644
index 0000000..358dd75
--- /dev/null
+++ b/test/files/presentation/forgotten-ask.scala
@@ -0,0 +1,33 @@
+import scala.tools.nsc.interactive._
+import tests._
+
+/** Test that no ask calls are left unanswered after a compiler has shut down. */
+object Test extends InteractiveTest {
+ import compiler._
+
+ def askItem(): Response[Unit] = {
+ compiler.askForResponse { () =>
+ Thread.sleep(100)
+ }
+ }
+
+ final val Timeout = 5000 //ms
+
+ override def main(args: Array[String]) {
+ val item1 = askItem()
+
+ compiler.askShutdown()
+
+ Thread.sleep(1000) // wait a bit, the compiler is shutting down
+ val item2 = askItem()
+
+ item1.get(Timeout) match {
+ case None => println("TIMEOUT")
+ case _ =>
+ }
+ item2.get(Timeout) match {
+ case None => println("TIMEOUT")
+ case _ =>
+ }
+ }
+}
\ No newline at end of file
diff --git a/test/files/presentation/hyperlinks.check b/test/files/presentation/hyperlinks.check
new file mode 100644
index 0000000..1051b67
--- /dev/null
+++ b/test/files/presentation/hyperlinks.check
@@ -0,0 +1,181 @@
+reload: NameDefaultTests.scala, PatMatTests.scala, SuperTypes.scala
+
+askHyperlinkPos for `someOtherInt` at (14,24) NameDefaultTests.scala
+================================================================================
+[response] found askHyperlinkPos for `someOtherInt` at (12,9) NameDefaultTests.scala
+================================================================================
+
+askHyperlinkPos for `someString` at (14,45) NameDefaultTests.scala
+================================================================================
+[response] found askHyperlinkPos for `someString` at (3,7) NameDefaultTests.scala
+================================================================================
+
+askHyperlinkPos for `CaseOne` at (12,18) PatMatTests.scala
+================================================================================
+[response] found askHyperlinkPos for `CaseOne` at (5,12) PatMatTests.scala
+================================================================================
+
+askHyperlinkPos for `first` at (14,21) PatMatTests.scala
+================================================================================
+[response] found askHyperlinkPos for `first` at (12,29) PatMatTests.scala
+================================================================================
+
+askHyperlinkPos for `tmp` at (15,19) PatMatTests.scala
+================================================================================
+[response] found askHyperlinkPos for `tmp` at (13,13) PatMatTests.scala
+================================================================================
+
+askHyperlinkPos for `CaseTwo` at (17,18) PatMatTests.scala
+================================================================================
+[response] found askHyperlinkPos for `CaseTwo` at (6,12) PatMatTests.scala
+================================================================================
+
+askHyperlinkPos for `mystring` at (18,24) PatMatTests.scala
+================================================================================
+[response] found askHyperlinkPos for `mystring` at (17,25) PatMatTests.scala
+================================================================================
+
+askHyperlinkPos for `x` at (25,13) PatMatTests.scala
+================================================================================
+[response] found askHyperlinkPos for `x` at (23,10) PatMatTests.scala
+================================================================================
+
+askHyperlinkPos for `y` at (25,21) PatMatTests.scala
+================================================================================
+[response] found askHyperlinkPos for `y` at (23,13) PatMatTests.scala
+================================================================================
+
+askHyperlinkPos for `BadPos` at (10,26) SuperTypes.scala
+================================================================================
+[response] found askHyperlinkPos for `BadPos` at (2,7) SuperTypes.scala
+================================================================================
+
+askHyperlinkPos for `BadPos` at (11,26) SuperTypes.scala
+================================================================================
+[response] found askHyperlinkPos for `BadPos` at (2,7) SuperTypes.scala
+================================================================================
+
+askHyperlinkPos for `Trait` at (12,25) SuperTypes.scala
+================================================================================
+[response] found askHyperlinkPos for `Trait` at (6,7) SuperTypes.scala
+================================================================================
+
+askHyperlinkPos for `SubTrait` at (13,28) SuperTypes.scala
+================================================================================
+[response] found askHyperlinkPos for `SubTrait` at (7,7) SuperTypes.scala
+================================================================================
+
+askHyperlinkPos for `Trait` at (14,25) SuperTypes.scala
+================================================================================
+[response] found askHyperlinkPos for `Trait` at (6,7) SuperTypes.scala
+================================================================================
+
+askHyperlinkPos for `LateralTrait` at (14,48) SuperTypes.scala
+================================================================================
+[response] found askHyperlinkPos for `LateralTrait` at (8,7) SuperTypes.scala
+================================================================================
+
+askHyperlinkPos for `Base` at (15,24) SuperTypes.scala
+================================================================================
+[response] found askHyperlinkPos for `Base` at (4,7) SuperTypes.scala
+================================================================================
+
+askHyperlinkPos for `Trait` at (15,40) SuperTypes.scala
+================================================================================
+[response] found askHyperlinkPos for `Trait` at (6,7) SuperTypes.scala
+================================================================================
+
+askHyperlinkPos for `LateralTrait` at (15,63) SuperTypes.scala
+================================================================================
+[response] found askHyperlinkPos for `LateralTrait` at (8,7) SuperTypes.scala
+================================================================================
+
+askHyperlinkPos for `PBase` at (19,29) SuperTypes.scala
+================================================================================
+[response] found askHyperlinkPos for `PBase` at (17,7) SuperTypes.scala
+================================================================================
+
+askHyperlinkPos for `PTrait` at (20,33) SuperTypes.scala
+================================================================================
+[response] found askHyperlinkPos for `PTrait` at (19,7) SuperTypes.scala
+================================================================================
+
+askHyperlinkPos for `PBase` at (21,36) SuperTypes.scala
+================================================================================
+[response] found askHyperlinkPos for `PBase` at (17,7) SuperTypes.scala
+================================================================================
+
+askHyperlinkPos for `PTrait` at (23,27) SuperTypes.scala
+================================================================================
+[response] found askHyperlinkPos for `PTrait` at (19,7) SuperTypes.scala
+================================================================================
+
+askHyperlinkPos for `PSubTrait` at (24,30) SuperTypes.scala
+================================================================================
+[response] found askHyperlinkPos for `PSubTrait` at (20,7) SuperTypes.scala
+================================================================================
+
+askHyperlinkPos for `PTrait` at (25,27) SuperTypes.scala
+================================================================================
+[response] found askHyperlinkPos for `PTrait` at (19,7) SuperTypes.scala
+================================================================================
+
+askHyperlinkPos for `PLateralTrait` at (25,56) SuperTypes.scala
+================================================================================
+[response] found askHyperlinkPos for `PLateralTrait` at (21,7) SuperTypes.scala
+================================================================================
+
+askHyperlinkPos for `PBase` at (26,26) SuperTypes.scala
+================================================================================
+[response] found askHyperlinkPos for `PBase` at (17,7) SuperTypes.scala
+================================================================================
+
+askHyperlinkPos for `PTrait` at (26,48) SuperTypes.scala
+================================================================================
+[response] found askHyperlinkPos for `PTrait` at (19,7) SuperTypes.scala
+================================================================================
+
+askHyperlinkPos for `PLateralTrait` at (26,77) SuperTypes.scala
+================================================================================
+[response] found askHyperlinkPos for `PLateralTrait` at (21,7) SuperTypes.scala
+================================================================================
+
+askHyperlinkPos for `BadPos` at (28,23) SuperTypes.scala
+================================================================================
+[response] found askHyperlinkPos for `BadPos` at (2,7) SuperTypes.scala
+================================================================================
+
+askHyperlinkPos for `PTrait` at (29,23) SuperTypes.scala
+================================================================================
+[response] found askHyperlinkPos for `PTrait` at (19,7) SuperTypes.scala
+================================================================================
+
+askHyperlinkPos for `PSubTrait` at (30,26) SuperTypes.scala
+================================================================================
+[response] found askHyperlinkPos for `PSubTrait` at (20,7) SuperTypes.scala
+================================================================================
+
+askHyperlinkPos for `PTrait` at (31,23) SuperTypes.scala
+================================================================================
+[response] found askHyperlinkPos for `PTrait` at (19,7) SuperTypes.scala
+================================================================================
+
+askHyperlinkPos for `PLateralTrait` at (31,52) SuperTypes.scala
+================================================================================
+[response] found askHyperlinkPos for `PLateralTrait` at (21,7) SuperTypes.scala
+================================================================================
+
+askHyperlinkPos for `PBase` at (32,22) SuperTypes.scala
+================================================================================
+[response] found askHyperlinkPos for `PBase` at (17,7) SuperTypes.scala
+================================================================================
+
+askHyperlinkPos for `PTrait` at (32,44) SuperTypes.scala
+================================================================================
+[response] found askHyperlinkPos for `PTrait` at (19,7) SuperTypes.scala
+================================================================================
+
+askHyperlinkPos for `PLateralTrait` at (32,73) SuperTypes.scala
+================================================================================
+[response] found askHyperlinkPos for `PLateralTrait` at (21,7) SuperTypes.scala
+================================================================================
diff --git a/test/files/presentation/hyperlinks/Runner.scala b/test/files/presentation/hyperlinks/Runner.scala
new file mode 100644
index 0000000..61da49a
--- /dev/null
+++ b/test/files/presentation/hyperlinks/Runner.scala
@@ -0,0 +1,11 @@
+import scala.tools.nsc.interactive.tests.InteractiveTest
+
+object Test extends InteractiveTest {
+ override def runDefaultTests() {
+ // make sure typer is done.. the virtual pattern matcher might translate
+ // some trees and mess up positions. But we'll catch it red handed!
+ sourceFiles foreach (src => askLoadedTyped(src).get)
+ super.runDefaultTests()
+ }
+
+}
\ No newline at end of file
diff --git a/test/files/presentation/hyperlinks/src/NameDefaultTests.scala b/test/files/presentation/hyperlinks/src/NameDefaultTests.scala
new file mode 100644
index 0000000..b218040
--- /dev/null
+++ b/test/files/presentation/hyperlinks/src/NameDefaultTests.scala
@@ -0,0 +1,16 @@
+
+class NameDefaults {
+ val someString = "abc"
+ val someInt = 42
+
+ def foo(x: String, y: Int)(implicit logger: Int): Int = y
+
+ implicit val l = 42
+
+ def bar {
+ println()
+ val someOtherInt = 10
+
+ foo(y = someOtherInt/*#*/, x = someString/*#*/)
+ }
+}
diff --git a/test/files/presentation/hyperlinks/src/PatMatTests.scala b/test/files/presentation/hyperlinks/src/PatMatTests.scala
new file mode 100644
index 0000000..bbd0f2e
--- /dev/null
+++ b/test/files/presentation/hyperlinks/src/PatMatTests.scala
@@ -0,0 +1,28 @@
+package patmat
+
+abstract class BaseType
+
+case class CaseOne(x: Int, y: List[Int]) extends BaseType
+case class CaseTwo(str: String) extends BaseType
+
+class PatMatTests {
+
+ def foo(x: BaseType) {
+ x match {
+ case CaseOne/*#*/(10, first :: second :: Nil) =>
+ val tmp = 23
+ println(first/*#*/)
+ println(tmp/*#*/)
+
+ case CaseTwo/*#*/(mystring) =>
+ println(mystring/*#*/)
+ }
+ }
+
+ def multipleAssign() {
+ val (x, y) = ("abc", "def")
+
+ println(x/*#*/, y/*#*/)
+ }
+
+}
\ No newline at end of file
diff --git a/test/files/presentation/hyperlinks/src/SuperTypes.scala b/test/files/presentation/hyperlinks/src/SuperTypes.scala
new file mode 100644
index 0000000..15d1606
--- /dev/null
+++ b/test/files/presentation/hyperlinks/src/SuperTypes.scala
@@ -0,0 +1,32 @@
+/** This tests that hyperlinking works for super types. See SI-7224 */
+class BadPos[A](a: A)
+
+class Base
+
+trait Trait extends Base
+trait SubTrait extends Trait
+trait LateralTrait extends Base
+
+object obj1 extends BadPos/*#*/(new Object)
+object obj2 extends BadPos/*#*/[AnyRef](new Object)
+object obj3 extends Trait/*#*/
+object obj4 extends SubTrait/*#*/
+object obj5 extends Trait/*#*/ with LateralTrait/*#*/
+object obj6 extends Base/*#*/ with Trait/*#*/ with LateralTrait/*#*/
+
+class PBase[A]
+
+trait PTrait[A] extends PBase/*#*/[A]
+trait PSubTrait[A] extends PTrait/*#*/[A]
+trait PLateralTrait[A] extends PBase/*#*/[A]
+
+object pobj2 extends PTrait/*#*/[Int]
+object pobj3 extends PSubTrait/*#*/[Int]
+object pobj4 extends PTrait/*#*/[Int] with PLateralTrait/*#*/[Int]
+object pobj5 extends PBase/*#*/[Int] with PTrait/*#*/[Int] with PLateralTrait/*#*/[Int]
+
+class c1 extends BadPos/*#*/(new Object)
+class c2 extends PTrait/*#*/[Int]
+class c3 extends PSubTrait/*#*/[Int]
+class c4 extends PTrait/*#*/[Int] with PLateralTrait/*#*/[Int]
+class c5 extends PBase/*#*/[Int] with PTrait/*#*/[Int] with PLateralTrait/*#*/[Int]
diff --git a/test/files/presentation/ide-bug-1000349.check b/test/files/presentation/ide-bug-1000349.check
new file mode 100644
index 0000000..ada307d
--- /dev/null
+++ b/test/files/presentation/ide-bug-1000349.check
@@ -0,0 +1,40 @@
+reload: CompletionOnEmptyArgMethod.scala
+
+askTypeCompletion at CompletionOnEmptyArgMethod.scala(2,17)
+================================================================================
+[response] askTypeCompletion at (2,17)
+retrieved 37 members
+[accessible: true] `method !=(x$1: Any)Boolean`
+[accessible: true] `method !=(x$1: AnyRef)Boolean`
+[accessible: true] `method ##()Int`
+[accessible: true] `method +(other: String)String`
+[accessible: true] `method ->[B](y: B)(Foo, B)`
+[accessible: true] `method ==(x$1: Any)Boolean`
+[accessible: true] `method ==(x$1: AnyRef)Boolean`
+[accessible: true] `method asInstanceOf[T0]=> T0`
+[accessible: true] `method clone()Object`
+[accessible: true] `method ensuring(cond: Boolean)Foo`
+[accessible: true] `method ensuring(cond: Boolean, msg: => Any)Foo`
+[accessible: true] `method ensuring(cond: Foo => Boolean)Foo`
+[accessible: true] `method ensuring(cond: Foo => Boolean, msg: => Any)Foo`
+[accessible: true] `method eq(x$1: AnyRef)Boolean`
+[accessible: true] `method equals(x$1: Any)Boolean`
+[accessible: true] `method finalize()Unit`
+[accessible: true] `method foo=> Foo`
+[accessible: true] `method formatted(fmtstr: String)String`
+[accessible: true] `method hashCode()Int`
+[accessible: true] `method isInstanceOf[T0]=> Boolean`
+[accessible: true] `method ne(x$1: AnyRef)Boolean`
+[accessible: true] `method notify()Unit`
+[accessible: true] `method notifyAll()Unit`
+[accessible: true] `method synchronized[T0](x$1: T0)T0`
+[accessible: true] `method toString()String`
+[accessible: true] `method wait()Unit`
+[accessible: true] `method wait(x$1: Long)Unit`
+[accessible: true] `method wait(x$1: Long, x$2: Int)Unit`
+[accessible: true] `method x=> Foo`
+[accessible: true] `method →[B](y: B)(Foo, B)`
+[accessible: true] `value __leftOfArrowFoo`
+[accessible: true] `value __resultOfEnsuringFoo`
+[accessible: true] `value selfAny`
+================================================================================
diff --git a/test/files/presentation/ide-bug-1000349/Runner.scala b/test/files/presentation/ide-bug-1000349/Runner.scala
new file mode 100644
index 0000000..1ef3cf9
--- /dev/null
+++ b/test/files/presentation/ide-bug-1000349/Runner.scala
@@ -0,0 +1,3 @@
+import scala.tools.nsc.interactive.tests._
+
+object Test extends InteractiveTest
\ No newline at end of file
diff --git a/test/files/presentation/ide-bug-1000349/src/CompletionOnEmptyArgMethod.scala b/test/files/presentation/ide-bug-1000349/src/CompletionOnEmptyArgMethod.scala
new file mode 100644
index 0000000..a3d8e8f
--- /dev/null
+++ b/test/files/presentation/ide-bug-1000349/src/CompletionOnEmptyArgMethod.scala
@@ -0,0 +1,7 @@
+object Foo {
+ new Foo().foo. /*!*/
+}
+
+class Foo {
+ def foo = this
+}
\ No newline at end of file
diff --git a/test/files/presentation/ide-bug-1000469.check b/test/files/presentation/ide-bug-1000469.check
new file mode 100644
index 0000000..cdc4e7d
--- /dev/null
+++ b/test/files/presentation/ide-bug-1000469.check
@@ -0,0 +1 @@
+reload: EventHandler.scala, JavaEventHandler.java
diff --git a/test/files/presentation/ide-bug-1000469/Runner.scala b/test/files/presentation/ide-bug-1000469/Runner.scala
new file mode 100644
index 0000000..1ef3cf9
--- /dev/null
+++ b/test/files/presentation/ide-bug-1000469/Runner.scala
@@ -0,0 +1,3 @@
+import scala.tools.nsc.interactive.tests._
+
+object Test extends InteractiveTest
\ No newline at end of file
diff --git a/test/files/presentation/ide-bug-1000469/src/java/JavaEventHandler.java b/test/files/presentation/ide-bug-1000469/src/java/JavaEventHandler.java
new file mode 100644
index 0000000..010edef
--- /dev/null
+++ b/test/files/presentation/ide-bug-1000469/src/java/JavaEventHandler.java
@@ -0,0 +1,3 @@
+package java;
+
+import scala.EventHandler;
\ No newline at end of file
diff --git a/test/files/presentation/ide-bug-1000469/src/scala/EventHandler.scala b/test/files/presentation/ide-bug-1000469/src/scala/EventHandler.scala
new file mode 100644
index 0000000..02e836e
--- /dev/null
+++ b/test/files/presentation/ide-bug-1000469/src/scala/EventHandler.scala
@@ -0,0 +1,5 @@
+package scala
+
+class EventHandler {
+ @transient private val foo = 2
+}
\ No newline at end of file
diff --git a/test/files/presentation/ide-bug-1000475.check b/test/files/presentation/ide-bug-1000475.check
new file mode 100644
index 0000000..0790272
--- /dev/null
+++ b/test/files/presentation/ide-bug-1000475.check
@@ -0,0 +1,115 @@
+reload: Foo.scala
+
+askTypeCompletion at Foo.scala(3,7)
+================================================================================
+[response] askTypeCompletion at (3,7)
+retrieved 36 members
+[accessible: true] `method !=(x$1: Any)Boolean`
+[accessible: true] `method !=(x$1: AnyRef)Boolean`
+[accessible: true] `method ##()Int`
+[accessible: true] `method +(other: String)String`
+[accessible: true] `method ->[B](y: B)(Object, B)`
+[accessible: true] `method ==(x$1: Any)Boolean`
+[accessible: true] `method ==(x$1: AnyRef)Boolean`
+[accessible: true] `method asInstanceOf[T0]=> T0`
+[accessible: true] `method ensuring(cond: Boolean)Object`
+[accessible: true] `method ensuring(cond: Boolean, msg: => Any)Object`
+[accessible: true] `method ensuring(cond: Object => Boolean)Object`
+[accessible: true] `method ensuring(cond: Object => Boolean, msg: => Any)Object`
+[accessible: true] `method eq(x$1: AnyRef)Boolean`
+[accessible: true] `method equals(x$1: Any)Boolean`
+[accessible: true] `method formatted(fmtstr: String)String`
+[accessible: true] `method hashCode()Int`
+[accessible: true] `method isInstanceOf[T0]=> Boolean`
+[accessible: true] `method ne(x$1: AnyRef)Boolean`
+[accessible: true] `method notify()Unit`
+[accessible: true] `method notifyAll()Unit`
+[accessible: true] `method synchronized[T0](x$1: T0)T0`
+[accessible: true] `method toString()String`
+[accessible: true] `method wait()Unit`
+[accessible: true] `method wait(x$1: Long)Unit`
+[accessible: true] `method wait(x$1: Long, x$2: Int)Unit`
+[accessible: true] `method x=> Object`
+[accessible: true] `method →[B](y: B)(Object, B)`
+[accessible: true] `value __leftOfArrowObject`
+[accessible: true] `value __resultOfEnsuringObject`
+[accessible: true] `value selfAny`
+[accessible: false] `method clone()Object`
+[accessible: false] `method finalize()Unit`
+================================================================================
+
+askTypeCompletion at Foo.scala(6,10)
+================================================================================
+[response] askTypeCompletion at (6,10)
+retrieved 36 members
+[accessible: true] `method !=(x$1: Any)Boolean`
+[accessible: true] `method !=(x$1: AnyRef)Boolean`
+[accessible: true] `method ##()Int`
+[accessible: true] `method +(other: String)String`
+[accessible: true] `method ->[B](y: B)(Object, B)`
+[accessible: true] `method ==(x$1: Any)Boolean`
+[accessible: true] `method ==(x$1: AnyRef)Boolean`
+[accessible: true] `method asInstanceOf[T0]=> T0`
+[accessible: true] `method ensuring(cond: Boolean)Object`
+[accessible: true] `method ensuring(cond: Boolean, msg: => Any)Object`
+[accessible: true] `method ensuring(cond: Object => Boolean)Object`
+[accessible: true] `method ensuring(cond: Object => Boolean, msg: => Any)Object`
+[accessible: true] `method eq(x$1: AnyRef)Boolean`
+[accessible: true] `method equals(x$1: Any)Boolean`
+[accessible: true] `method formatted(fmtstr: String)String`
+[accessible: true] `method hashCode()Int`
+[accessible: true] `method isInstanceOf[T0]=> Boolean`
+[accessible: true] `method ne(x$1: AnyRef)Boolean`
+[accessible: true] `method notify()Unit`
+[accessible: true] `method notifyAll()Unit`
+[accessible: true] `method synchronized[T0](x$1: T0)T0`
+[accessible: true] `method toString()String`
+[accessible: true] `method wait()Unit`
+[accessible: true] `method wait(x$1: Long)Unit`
+[accessible: true] `method wait(x$1: Long, x$2: Int)Unit`
+[accessible: true] `method x=> Object`
+[accessible: true] `method →[B](y: B)(Object, B)`
+[accessible: true] `value __leftOfArrowObject`
+[accessible: true] `value __resultOfEnsuringObject`
+[accessible: true] `value selfAny`
+[accessible: false] `method clone()Object`
+[accessible: false] `method finalize()Unit`
+================================================================================
+
+askTypeCompletion at Foo.scala(7,7)
+================================================================================
+[response] askTypeCompletion at (7,7)
+retrieved 36 members
+[accessible: true] `method !=(x$1: Any)Boolean`
+[accessible: true] `method !=(x$1: AnyRef)Boolean`
+[accessible: true] `method ##()Int`
+[accessible: true] `method +(other: String)String`
+[accessible: true] `method ->[B](y: B)(Object, B)`
+[accessible: true] `method ==(x$1: Any)Boolean`
+[accessible: true] `method ==(x$1: AnyRef)Boolean`
+[accessible: true] `method asInstanceOf[T0]=> T0`
+[accessible: true] `method ensuring(cond: Boolean)Object`
+[accessible: true] `method ensuring(cond: Boolean, msg: => Any)Object`
+[accessible: true] `method ensuring(cond: Object => Boolean)Object`
+[accessible: true] `method ensuring(cond: Object => Boolean, msg: => Any)Object`
+[accessible: true] `method eq(x$1: AnyRef)Boolean`
+[accessible: true] `method equals(x$1: Any)Boolean`
+[accessible: true] `method formatted(fmtstr: String)String`
+[accessible: true] `method hashCode()Int`
+[accessible: true] `method isInstanceOf[T0]=> Boolean`
+[accessible: true] `method ne(x$1: AnyRef)Boolean`
+[accessible: true] `method notify()Unit`
+[accessible: true] `method notifyAll()Unit`
+[accessible: true] `method synchronized[T0](x$1: T0)T0`
+[accessible: true] `method toString()String`
+[accessible: true] `method wait()Unit`
+[accessible: true] `method wait(x$1: Long)Unit`
+[accessible: true] `method wait(x$1: Long, x$2: Int)Unit`
+[accessible: true] `method x=> Object`
+[accessible: true] `method →[B](y: B)(Object, B)`
+[accessible: true] `value __leftOfArrowObject`
+[accessible: true] `value __resultOfEnsuringObject`
+[accessible: true] `value selfAny`
+[accessible: false] `method clone()Object`
+[accessible: false] `method finalize()Unit`
+================================================================================
diff --git a/test/files/presentation/ide-bug-1000475/Runner.scala b/test/files/presentation/ide-bug-1000475/Runner.scala
new file mode 100644
index 0000000..1ef3cf9
--- /dev/null
+++ b/test/files/presentation/ide-bug-1000475/Runner.scala
@@ -0,0 +1,3 @@
+import scala.tools.nsc.interactive.tests._
+
+object Test extends InteractiveTest
\ No newline at end of file
diff --git a/test/files/presentation/ide-bug-1000475/src/Foo.scala b/test/files/presentation/ide-bug-1000475/src/Foo.scala
new file mode 100644
index 0000000..b963bb7
--- /dev/null
+++ b/test/files/presentation/ide-bug-1000475/src/Foo.scala
@@ -0,0 +1,9 @@
+class Foo {
+ val v = new Object
+ v.toS/*!*/
+
+ val m = Map(1 -> new Object)
+ m(1).toS/*!*/
+ m(1)./*!*/
+ println()
+}
\ No newline at end of file
diff --git a/test/files/presentation/ide-bug-1000531.check b/test/files/presentation/ide-bug-1000531.check
new file mode 100644
index 0000000..a28ecb3
--- /dev/null
+++ b/test/files/presentation/ide-bug-1000531.check
@@ -0,0 +1,129 @@
+reload: CrashOnLoad.scala
+
+askTypeCompletion at CrashOnLoad.scala(6,12)
+================================================================================
+[response] askTypeCompletion at (6,12)
+retrieved 126 members
+[accessible: true] `class GroupedIteratorIterator[B]#GroupedIterator`
+[accessible: true] `method !=(x$1: Any)Boolean`
+[accessible: true] `method !=(x$1: AnyRef)Boolean`
+[accessible: true] `method ##()Int`
+[accessible: true] `method +(other: String)String`
+[accessible: true] `method ++[B >: B](that: => scala.collection.GenTraversableOnce[B])Iterator[B]`
+[accessible: true] `method ->[B](y: B)(java.util.Iterator[B], B)`
+[accessible: true] `method /:[B](z: B)(op: (B, B) => B)B`
+[accessible: true] `method /:\[A1 >: B](z: A1)(op: (A1, A1) => A1)A1`
+[accessible: true] `method :\[B](z: B)(op: (B, B) => B)B`
+[accessible: true] `method ==(x$1: Any)Boolean`
+[accessible: true] `method ==(x$1: AnyRef)Boolean`
+[accessible: true] `method addString(b: StringBuilder)StringBuilder`
+[accessible: true] `method addString(b: StringBuilder, sep: String)StringBuilder`
+[accessible: true] `method addString(b: StringBuilder, start: String, sep: String, end: String)StringBuilder`
+[accessible: true] `method aggregate[B](z: B)(seqop: (B, B) => B, combop: (B, B) => B)B`
+[accessible: true] `method asInstanceOf[T0]=> T0`
+[accessible: true] `method buffered=> scala.collection.BufferedIterator[B]`
+[accessible: true] `method collectFirst[B](pf: PartialFunction[B,B])Option[B]`
+[accessible: true] `method collect[B](pf: PartialFunction[B,B])Iterator[B]`
+[accessible: true] `method contains(elem: Any)Boolean`
+[accessible: true] `method copyToArray[B >: B](xs: Array[B])Unit`
+[accessible: true] `method copyToArray[B >: B](xs: Array[B], start: Int)Unit`
+[accessible: true] `method copyToArray[B >: B](xs: Array[B], start: Int, len: Int)Unit`
+[accessible: true] `method copyToBuffer[B >: B](dest: scala.collection.mutable.Buffer[B])Unit`
+[accessible: true] `method corresponds[B](that: scala.collection.GenTraversableOnce[B])(p: (B, B) => Boolean)Boolean`
+[accessible: true] `method count(p: B => Boolean)Int`
+[accessible: true] `method drop(n: Int)Iterator[B]`
+[accessible: true] `method dropWhile(p: B => Boolean)Iterator[B]`
+[accessible: true] `method duplicate=> (Iterator[B], Iterator[B])`
+[accessible: true] `method ensuring(cond: Boolean)java.util.Iterator[B]`
+[accessible: true] `method ensuring(cond: Boolean, msg: => Any)java.util.Iterator[B]`
+[accessible: true] `method ensuring(cond: java.util.Iterator[B] => Boolean)java.util.Iterator[B]`
+[accessible: true] `method ensuring(cond: java.util.Iterator[B] => Boolean, msg: => Any)java.util.Iterator[B]`
+[accessible: true] `method eq(x$1: AnyRef)Boolean`
+[accessible: true] `method equals(x$1: Any)Boolean`
+[accessible: true] `method exists(p: B => Boolean)Boolean`
+[accessible: true] `method filter(p: B => Boolean)Iterator[B]`
+[accessible: true] `method filterNot(p: B => Boolean)Iterator[B]`
+[accessible: true] `method find(p: B => Boolean)Option[B]`
+[accessible: true] `method flatMap[B](f: B => scala.collection.GenTraversableOnce[B])Iterator[B]`
+[accessible: true] `method foldLeft[B](z: B)(op: (B, B) => B)B`
+[accessible: true] `method foldRight[B](z: B)(op: (B, B) => B)B`
+[accessible: true] `method fold[A1 >: B](z: A1)(op: (A1, A1) => A1)A1`
+[accessible: true] `method forall(p: B => Boolean)Boolean`
+[accessible: true] `method foreach[U](f: B => U)Unit`
+[accessible: true] `method formatted(fmtstr: String)String`
+[accessible: true] `method grouped[B >: B](size: Int)Iterator[B]#GroupedIterator[B]`
+[accessible: true] `method hasDefiniteSize=> Boolean`
+[accessible: true] `method hasNext()Boolean`
+[accessible: true] `method hashCode()Int`
+[accessible: true] `method indexOf[B >: B](elem: B)Int`
+[accessible: true] `method indexWhere(p: B => Boolean)Int`
+[accessible: true] `method isEmpty=> Boolean`
+[accessible: true] `method isInstanceOf[T0]=> Boolean`
+[accessible: true] `method isTraversableAgain=> Boolean`
+[accessible: true] `method length=> Int`
+[accessible: true] `method map[B](f: B => B)Iterator[B]`
+[accessible: true] `method maxBy[B](f: B => B)(implicit cmp: Ordering[B])B`
+[accessible: true] `method max[B >: B](implicit cmp: Ordering[B])B`
+[accessible: true] `method minBy[B](f: B => B)(implicit cmp: Ordering[B])B`
+[accessible: true] `method min[B >: B](implicit cmp: Ordering[B])B`
+[accessible: true] `method mkString(sep: String)String`
+[accessible: true] `method mkString(start: String, sep: String, end: String)String`
+[accessible: true] `method mkString=> String`
+[accessible: true] `method ne(x$1: AnyRef)Boolean`
+[accessible: true] `method next()B`
+[accessible: true] `method nonEmpty=> Boolean`
+[accessible: true] `method notify()Unit`
+[accessible: true] `method notifyAll()Unit`
+[accessible: true] `method padTo[A1 >: B](len: Int, elem: A1)Iterator[A1]`
+[accessible: true] `method partition(p: B => Boolean)(Iterator[B], Iterator[B])`
+[accessible: true] `method patch[B >: B](from: Int, patchElems: Iterator[B], replaced: Int)Iterator[B]`
+[accessible: true] `method product[B >: B](implicit num: Numeric[B])B`
+[accessible: true] `method reduceLeftOption[B >: B](op: (B, B) => B)Option[B]`
+[accessible: true] `method reduceLeft[B >: B](op: (B, B) => B)B`
+[accessible: true] `method reduceOption[A1 >: B](op: (A1, A1) => A1)Option[A1]`
+[accessible: true] `method reduceRightOption[B >: B](op: (B, B) => B)Option[B]`
+[accessible: true] `method reduceRight[B >: B](op: (B, B) => B)B`
+[accessible: true] `method reduce[A1 >: B](op: (A1, A1) => A1)A1`
+[accessible: true] `method remove()Unit`
+[accessible: true] `method sameElements(that: Iterator[_])Boolean`
+[accessible: true] `method scanLeft[B](z: B)(op: (B, B) => B)Iterator[B]`
+[accessible: true] `method scanRight[B](z: B)(op: (B, B) => B)Iterator[B]`
+[accessible: true] `method seq=> Iterator[B]`
+[accessible: true] `method size=> Int`
+[accessible: true] `method slice(from: Int, until: Int)Iterator[B]`
+[accessible: true] `method sliding[B >: B](size: Int, step: Int)Iterator[B]#GroupedIterator[B]`
+[accessible: true] `method span(p: B => Boolean)(Iterator[B], Iterator[B])`
+[accessible: true] `method sum[B >: B](implicit num: Numeric[B])B`
+[accessible: true] `method synchronized[T0](x$1: T0)T0`
+[accessible: true] `method take(n: Int)Iterator[B]`
+[accessible: true] `method takeWhile(p: B => Boolean)Iterator[B]`
+[accessible: true] `method toArray[B >: B](implicit evidence$1: scala.reflect.ClassTag[B])Array[B]`
+[accessible: true] `method toBuffer[B >: B]=> scala.collection.mutable.Buffer[B]`
+[accessible: true] `method toIndexedSeq=> scala.collection.immutable.IndexedSeq[B]`
+[accessible: true] `method toIterable=> Iterable[B]`
+[accessible: true] `method toIterator=> Iterator[B]`
+[accessible: true] `method toList=> List[B]`
+[accessible: true] `method toMap[T, U](implicit ev: <:<[B,(T, U)])scala.collection.immutable.Map[T,U]`
+[accessible: true] `method toSeq=> Seq[B]`
+[accessible: true] `method toSet[B >: B]=> scala.collection.immutable.Set[B]`
+[accessible: true] `method toStream=> scala.collection.immutable.Stream[B]`
+[accessible: true] `method toString()String`
+[accessible: true] `method toTraversable=> Traversable[B]`
+[accessible: true] `method toVector=> Vector[B]`
+[accessible: true] `method to[Col[_]](implicit cbf: scala.collection.generic.CanBuildFrom[Nothing,B,Col[B]])Col[B]`
+[accessible: true] `method wait()Unit`
+[accessible: true] `method wait(x$1: Long)Unit`
+[accessible: true] `method wait(x$1: Long, x$2: Int)Unit`
+[accessible: true] `method withFilter(p: B => Boolean)Iterator[B]`
+[accessible: true] `method x=> java.util.Iterator[B]`
+[accessible: true] `method zipAll[B, A1 >: B, B1 >: B](that: Iterator[B], thisElem: A1, thatElem: B1)Iterator[(A1, B1)]`
+[accessible: true] `method zipWithIndex=> Iterator[(B, Int)]`
+[accessible: true] `method zip[B](that: Iterator[B])Iterator[(B, B)]`
+[accessible: true] `method →[B](y: B)(java.util.Iterator[B], B)`
+[accessible: true] `value __leftOfArrowjava.util.Iterator[B]`
+[accessible: true] `value __resultOfEnsuringjava.util.Iterator[B]`
+[accessible: true] `value selfAny`
+[accessible: false] `method clone()Object`
+[accessible: false] `method finalize()Unit`
+[accessible: false] `method reversed=> List[B]`
+================================================================================
diff --git a/test/files/presentation/ide-bug-1000531/Runner.scala b/test/files/presentation/ide-bug-1000531/Runner.scala
new file mode 100644
index 0000000..1ef3cf9
--- /dev/null
+++ b/test/files/presentation/ide-bug-1000531/Runner.scala
@@ -0,0 +1,3 @@
+import scala.tools.nsc.interactive.tests._
+
+object Test extends InteractiveTest
\ No newline at end of file
diff --git a/test/files/presentation/ide-bug-1000531/src/CrashOnLoad.scala b/test/files/presentation/ide-bug-1000531/src/CrashOnLoad.scala
new file mode 100644
index 0000000..21d39c8
--- /dev/null
+++ b/test/files/presentation/ide-bug-1000531/src/CrashOnLoad.scala
@@ -0,0 +1,7 @@
+/** When this files is opened within the IDE, a typing error is reported. */
+class A[B] extends java.lang.Iterable[B] {
+ import scala.collection.JavaConversions._
+ def iterator = Iterator.empty
+
+ iterator. /*!*/
+}
\ No newline at end of file
diff --git a/test/files/presentation/ide-t1000567.check b/test/files/presentation/ide-t1000567.check
new file mode 100644
index 0000000..6d62cb3
--- /dev/null
+++ b/test/files/presentation/ide-t1000567.check
@@ -0,0 +1 @@
+reload: a.scala, b.scala
diff --git a/test/files/presentation/ide-t1000567/Runner.scala b/test/files/presentation/ide-t1000567/Runner.scala
new file mode 100644
index 0000000..77a618b
--- /dev/null
+++ b/test/files/presentation/ide-t1000567/Runner.scala
@@ -0,0 +1,15 @@
+import scala.tools.nsc.interactive.tests.InteractiveTest
+
+// also known as SI-5013
+
+object Test extends InteractiveTest {
+
+ override def runDefaultTests(): Unit = {
+ val a = sourceFiles.find(_.file.name == "a.scala").head
+ val b = sourceFiles.find(_.file.name == "b.scala").head
+ askLoadedTyped(a).get
+ askLoadedTyped(b).get
+ super.runDefaultTests()
+ }
+
+}
diff --git a/test/files/presentation/ide-t1000567/src/a/a.scala b/test/files/presentation/ide-t1000567/src/a/a.scala
new file mode 100644
index 0000000..ee21112
--- /dev/null
+++ b/test/files/presentation/ide-t1000567/src/a/a.scala
@@ -0,0 +1,5 @@
+package a
+
+class Foo {
+ protected[Foo] var x = 0
+}
diff --git a/test/files/presentation/ide-t1000567/src/b/b.scala b/test/files/presentation/ide-t1000567/src/b/b.scala
new file mode 100644
index 0000000..a0185b1
--- /dev/null
+++ b/test/files/presentation/ide-t1000567/src/b/b.scala
@@ -0,0 +1,5 @@
+package b
+
+class Bar extends a.Foo {
+ println(x)
+}
diff --git a/test/files/presentation/ide-t1000609.check b/test/files/presentation/ide-t1000609.check
new file mode 100644
index 0000000..1094dbd
--- /dev/null
+++ b/test/files/presentation/ide-t1000609.check
@@ -0,0 +1,6 @@
+reload: NoHyperlinking.scala
+
+askHyperlinkPos for `foo` at (7,10) NoHyperlinking.scala
+================================================================================
+[response] found askHyperlinkPos for `foo` at (2,7) NoHyperlinking.scala
+================================================================================
\ No newline at end of file
diff --git a/test/files/presentation/ide-t1000609/Runner.scala b/test/files/presentation/ide-t1000609/Runner.scala
new file mode 100644
index 0000000..1ef3cf9
--- /dev/null
+++ b/test/files/presentation/ide-t1000609/Runner.scala
@@ -0,0 +1,3 @@
+import scala.tools.nsc.interactive.tests._
+
+object Test extends InteractiveTest
\ No newline at end of file
diff --git a/test/files/presentation/ide-t1000609/src/NoHyperlinking.scala b/test/files/presentation/ide-t1000609/src/NoHyperlinking.scala
new file mode 100644
index 0000000..d4bedaf
--- /dev/null
+++ b/test/files/presentation/ide-t1000609/src/NoHyperlinking.scala
@@ -0,0 +1,8 @@
+class Foo {
+ def foo(a: Int) = a
+}
+
+object Test {
+ val a = new Foo
+ a.foo() /*#*/
+}
\ No newline at end of file
diff --git a/test/files/presentation/ide-t1000976.check b/test/files/presentation/ide-t1000976.check
new file mode 100644
index 0000000..d58f86d
--- /dev/null
+++ b/test/files/presentation/ide-t1000976.check
@@ -0,0 +1 @@
+Test OK
\ No newline at end of file
diff --git a/test/files/presentation/ide-t1000976.flags b/test/files/presentation/ide-t1000976.flags
new file mode 100644
index 0000000..9a1a05a
--- /dev/null
+++ b/test/files/presentation/ide-t1000976.flags
@@ -0,0 +1 @@
+-sourcepath src
\ No newline at end of file
diff --git a/test/files/presentation/ide-t1000976/Test.scala b/test/files/presentation/ide-t1000976/Test.scala
new file mode 100644
index 0000000..722259d
--- /dev/null
+++ b/test/files/presentation/ide-t1000976/Test.scala
@@ -0,0 +1,30 @@
+import scala.tools.nsc.interactive.tests.InteractiveTest
+import scala.reflect.internal.util.SourceFile
+import scala.tools.nsc.interactive.Response
+
+object Test extends InteractiveTest {
+ override def execute(): Unit = {
+ loadSourceAndWaitUntilTypechecked("A.scala")
+ val sourceB = loadSourceAndWaitUntilTypechecked("B.scala")
+ checkErrors(sourceB)
+ }
+
+ private def loadSourceAndWaitUntilTypechecked(sourceName: String): SourceFile = {
+ val sourceFile = sourceFiles.find(_.file.name == sourceName).head
+ compiler.askToDoFirst(sourceFile)
+ val res = new Response[Unit]
+ compiler.askReload(List(sourceFile), res)
+ res.get
+ askLoadedTyped(sourceFile).get
+ sourceFile
+ }
+
+ private def checkErrors(source: SourceFile): Unit = compiler.getUnitOf(source) match {
+ case Some(unit) =>
+ val problems = unit.problems.toList
+ if(problems.isEmpty) reporter.println("Test OK")
+ else problems.foreach(problem => reporter.println(problem.msg))
+
+ case None => reporter.println("No compilation unit found for " + source.file.name)
+ }
+}
diff --git a/test/files/presentation/ide-t1000976/src/a/A.scala b/test/files/presentation/ide-t1000976/src/a/A.scala
new file mode 100644
index 0000000..fcfef8b
--- /dev/null
+++ b/test/files/presentation/ide-t1000976/src/a/A.scala
@@ -0,0 +1,7 @@
+package a
+
+import d.D._
+
+object A {
+ Seq.empty[Byte].toArray.toSeq
+}
diff --git a/test/files/presentation/ide-t1000976/src/b/B.scala b/test/files/presentation/ide-t1000976/src/b/B.scala
new file mode 100644
index 0000000..628348c
--- /dev/null
+++ b/test/files/presentation/ide-t1000976/src/b/B.scala
@@ -0,0 +1,7 @@
+package b
+
+import c.C
+
+class B {
+ new C("")
+}
diff --git a/test/files/presentation/ide-t1000976/src/c/C.scala b/test/files/presentation/ide-t1000976/src/c/C.scala
new file mode 100644
index 0000000..cc23e3e
--- /dev/null
+++ b/test/files/presentation/ide-t1000976/src/c/C.scala
@@ -0,0 +1,3 @@
+package c
+
+class C(key: String = "", componentStates: String = "")
diff --git a/test/files/presentation/ide-t1000976/src/d/D.scala b/test/files/presentation/ide-t1000976/src/d/D.scala
new file mode 100644
index 0000000..d7a48f9
--- /dev/null
+++ b/test/files/presentation/ide-t1000976/src/d/D.scala
@@ -0,0 +1,7 @@
+package d
+
+import c.C
+
+object D {
+ implicit def c2s(c: C): String = ""
+}
diff --git a/test/files/presentation/ide-t1001326.check b/test/files/presentation/ide-t1001326.check
new file mode 100644
index 0000000..0ac15fa
--- /dev/null
+++ b/test/files/presentation/ide-t1001326.check
@@ -0,0 +1,4 @@
+Unique OK
+Unattributed OK
+NeverModify OK
+AlwaysParseTree OK
\ No newline at end of file
diff --git a/test/files/presentation/ide-t1001326/Test.scala b/test/files/presentation/ide-t1001326/Test.scala
new file mode 100644
index 0000000..3091da4
--- /dev/null
+++ b/test/files/presentation/ide-t1001326/Test.scala
@@ -0,0 +1,91 @@
+import scala.tools.nsc.interactive.tests.InteractiveTest
+import scala.reflect.internal.util.SourceFile
+import scala.tools.nsc.interactive.Response
+
+object Test extends InteractiveTest {
+
+ override def execute(): Unit = {
+ val sf = sourceFiles.find(_.file.name == "A.scala").head
+ uniqueParseTree_t1001326(sf)
+ unattributedParseTree_t1001326(sf)
+ neverModifyParseTree_t1001326(sf)
+ shouldAlwaysReturnParseTree_t1001326(sf)
+ }
+
+ /**
+ * Asking twice for a parseTree on the same source should always return a new tree
+ */
+ private def uniqueParseTree_t1001326(sf: SourceFile) {
+ val parseTree1 = compiler.parseTree(sf)
+ val parseTree2 = compiler.parseTree(sf)
+ if (parseTree1 != parseTree2) {
+ reporter.println("Unique OK")
+ } else {
+ reporter.println("Unique FAILED")
+ }
+ }
+
+ /**
+ * A parseTree should never contain any symbols or types
+ */
+ private def unattributedParseTree_t1001326(sf: SourceFile) {
+ if (noSymbolsOrTypes(compiler.parseTree(sf))) {
+ reporter.println("Unattributed OK")
+ } else {
+ reporter.println("Unattributed FAILED")
+ }
+ }
+
+ /**
+ * Once you have obtained a parseTree it should never change
+ */
+ private def neverModifyParseTree_t1001326(sf: SourceFile) {
+ val parsedTree = compiler.parseTree(sf)
+ loadSourceAndWaitUntilTypechecked(sf)
+ if (noSymbolsOrTypes(parsedTree)) {
+ reporter.println("NeverModify OK")
+ } else {
+ reporter.println("NeverModify FAILED")
+ }
+ }
+
+ /**
+ * Should always return a parse tree
+ */
+ private def shouldAlwaysReturnParseTree_t1001326(sf: SourceFile) {
+ loadSourceAndWaitUntilTypechecked(sf)
+ if (noSymbolsOrTypes(compiler.parseTree(sf))) {
+ reporter.println("AlwaysParseTree OK")
+ } else {
+ reporter.println("AlwaysParseTree FAILED")
+ }
+ }
+
+ /**
+ * Load a source and block while it is type-checking.
+ */
+ private def loadSourceAndWaitUntilTypechecked(sf: SourceFile): Unit = {
+ compiler.askToDoFirst(sf)
+ val res = new Response[Unit]
+ compiler.askReload(List(sf), res)
+ res.get
+ askLoadedTyped(sf).get
+ }
+
+ /**
+ * Traverses a tree and makes sure that there are no types or symbols present in the tree with
+ * the exception of the symbol for the package 'scala'. This is because that symbol will be
+ * present in some of the nodes that the compiler generates.
+ */
+ private def noSymbolsOrTypes(tree: compiler.Tree): Boolean = {
+ tree.forAll { t =>
+ (t.symbol == null ||
+ t.symbol == compiler.NoSymbol ||
+ t.symbol == compiler.definitions.ScalaPackage // ignore the symbol for the scala package for now
+ ) && (
+ t.tpe == null ||
+ t.tpe == compiler.NoType)
+ }
+ }
+
+}
\ No newline at end of file
diff --git a/test/files/presentation/ide-t1001326/src/a/A.scala b/test/files/presentation/ide-t1001326/src/a/A.scala
new file mode 100644
index 0000000..c82ca02
--- /dev/null
+++ b/test/files/presentation/ide-t1001326/src/a/A.scala
@@ -0,0 +1,5 @@
+package a
+
+class A {
+ def foo(s: String) = s + s
+}
\ No newline at end of file
diff --git a/test/files/presentation/ide-t1001388.check b/test/files/presentation/ide-t1001388.check
new file mode 100644
index 0000000..d58f86d
--- /dev/null
+++ b/test/files/presentation/ide-t1001388.check
@@ -0,0 +1 @@
+Test OK
\ No newline at end of file
diff --git a/test/files/presentation/ide-t1001388/Test.scala b/test/files/presentation/ide-t1001388/Test.scala
new file mode 100644
index 0000000..f6079cf
--- /dev/null
+++ b/test/files/presentation/ide-t1001388/Test.scala
@@ -0,0 +1,28 @@
+import scala.tools.nsc.interactive.tests.InteractiveTest
+import scala.reflect.internal.util.SourceFile
+import scala.tools.nsc.interactive.Response
+
+object Test extends InteractiveTest {
+ override def execute(): Unit = {
+ val sourceA = loadSourceAndWaitUntilTypechecked("A.scala")
+ checkPresent(sourceA)
+ }
+
+ private def loadSourceAndWaitUntilTypechecked(sourceName: String): SourceFile = {
+ val sourceFile = sourceFiles.find(_.file.name == sourceName).head
+ askLoadedTyped(sourceFile).get
+ /* The response to `askLoadedType` may return before `interactive.Global.waitLoadedType`
+ * fully executes. Because this test expects `waitLoadedType` is fully executed before
+ * calling `checkPresent`, with the below no-op presentation compiler request we make
+ * sure this requirement is fulfilled.
+ */
+ compiler.askForResponse(() => ()).get
+ sourceFile
+ }
+
+ private def checkPresent(source: SourceFile): Unit = compiler.getUnitOf(source) match {
+ case Some(unit) => reporter.println("Compilation Unit for " + source.file.name + " still loaded after askLoadedTyped")
+
+ case None => reporter.println("Test OK")
+ }
+}
diff --git a/test/files/presentation/ide-t1001388/src/a/A.scala b/test/files/presentation/ide-t1001388/src/a/A.scala
new file mode 100644
index 0000000..be09097
--- /dev/null
+++ b/test/files/presentation/ide-t1001388/src/a/A.scala
@@ -0,0 +1,6 @@
+package a
+
+object A {
+ val tagString = "foo"
+ Seq.empty[Byte].toArray.toSeq
+}
diff --git a/test/files/presentation/implicit-member.check b/test/files/presentation/implicit-member.check
new file mode 100644
index 0000000..111d06d
--- /dev/null
+++ b/test/files/presentation/implicit-member.check
@@ -0,0 +1,42 @@
+reload: ImplicitMember.scala
+
+askTypeCompletion at ImplicitMember.scala(7,7)
+================================================================================
+[response] askTypeCompletion at (7,7)
+retrieved 39 members
+[accessible: true] `class AppliedImplicitImplicit.AppliedImplicit`
+[accessible: true] `method !=(x$1: Any)Boolean`
+[accessible: true] `method !=(x$1: AnyRef)Boolean`
+[accessible: true] `method ##()Int`
+[accessible: true] `method +(other: String)String`
+[accessible: true] `method ->[B](y: B)(Implicit.type, B)`
+[accessible: true] `method ==(x$1: Any)Boolean`
+[accessible: true] `method ==(x$1: AnyRef)Boolean`
+[accessible: true] `method AppliedImplicit[A](x: A)Implicit.AppliedImplicit[A]`
+[accessible: true] `method asInstanceOf[T0]=> T0`
+[accessible: true] `method clone()Object`
+[accessible: true] `method ensuring(cond: Boolean)Implicit.type`
+[accessible: true] `method ensuring(cond: Boolean, msg: => Any)Implicit.type`
+[accessible: true] `method ensuring(cond: Implicit.type => Boolean)Implicit.type`
+[accessible: true] `method ensuring(cond: Implicit.type => Boolean, msg: => Any)Implicit.type`
+[accessible: true] `method eq(x$1: AnyRef)Boolean`
+[accessible: true] `method equals(x$1: Any)Boolean`
+[accessible: true] `method finalize()Unit`
+[accessible: true] `method formatted(fmtstr: String)String`
+[accessible: true] `method hashCode()Int`
+[accessible: true] `method isInstanceOf[T0]=> Boolean`
+[accessible: true] `method ne(x$1: AnyRef)Boolean`
+[accessible: true] `method notify()Unit`
+[accessible: true] `method notifyAll()Unit`
+[accessible: true] `method synchronized[T0](x$1: T0)T0`
+[accessible: true] `method toString()String`
+[accessible: true] `method wait()Unit`
+[accessible: true] `method wait(x$1: Long)Unit`
+[accessible: true] `method wait(x$1: Long, x$2: Int)Unit`
+[accessible: true] `method x=> Implicit.type`
+[accessible: true] `method →[B](y: B)(Implicit.type, B)`
+[accessible: true] `value __leftOfArrowImplicit.type`
+[accessible: true] `value __resultOfEnsuringImplicit.type`
+[accessible: true] `value selfAny`
+[accessible: true] `value xImplicit.type`
+================================================================================
diff --git a/test/files/presentation/implicit-member/Runner.scala b/test/files/presentation/implicit-member/Runner.scala
new file mode 100644
index 0000000..1c03e3d
--- /dev/null
+++ b/test/files/presentation/implicit-member/Runner.scala
@@ -0,0 +1,3 @@
+import scala.tools.nsc.interactive.tests._
+
+object Test extends InteractiveTest
diff --git a/test/files/presentation/implicit-member/src/ImplicitMember.scala b/test/files/presentation/implicit-member/src/ImplicitMember.scala
new file mode 100644
index 0000000..06732f6
--- /dev/null
+++ b/test/files/presentation/implicit-member/src/ImplicitMember.scala
@@ -0,0 +1,8 @@
+object Implicit {
+
+ final class AppliedImplicit[A](val x: A)
+
+ implicit def AppliedImplicit[A](x: A): AppliedImplicit[A] = new AppliedImplicit(x)
+
+ this./*!*/x
+}
\ No newline at end of file
diff --git a/test/files/presentation/memory-leaks.check b/test/files/presentation/memory-leaks.check
new file mode 100644
index 0000000..86fb077
--- /dev/null
+++ b/test/files/presentation/memory-leaks.check
@@ -0,0 +1,54 @@
+reload: Trees.scala, Typers.scala, Types.scala
+reload: Trees.scala
+reload: Types.scala
+reload: Typers.scala
+reload: Typers.scala
+reload: Typers.scala
+reload: Typers.scala
+reload: Typers.scala
+reload: Typers.scala
+reload: Typers.scala
+reload: Typers.scala
+reload: Typers.scala
+reload: Typers.scala
+reload: Typers.scala
+reload: Typers.scala
+reload: Typers.scala
+reload: Typers.scala
+reload: Typers.scala
+reload: Typers.scala
+reload: Typers.scala
+reload: Typers.scala
+reload: Typers.scala
+reload: Typers.scala
+reload: Typers.scala
+reload: Typers.scala
+reload: Typers.scala
+reload: Typers.scala
+reload: Typers.scala
+reload: Typers.scala
+reload: Typers.scala
+reload: Typers.scala
+reload: Typers.scala
+reload: Typers.scala
+reload: Typers.scala
+reload: Typers.scala
+reload: Typers.scala
+reload: Typers.scala
+reload: Typers.scala
+reload: Typers.scala
+reload: Typers.scala
+reload: Typers.scala
+reload: Typers.scala
+reload: Typers.scala
+reload: Typers.scala
+reload: Typers.scala
+reload: Typers.scala
+reload: Typers.scala
+reload: Typers.scala
+reload: Typers.scala
+reload: Typers.scala
+reload: Typers.scala
+reload: Typers.scala
+reload: Typers.scala
+No leaks detected.
diff --git a/test/files/presentation/memory-leaks/MemoryLeaksTest.scala b/test/files/presentation/memory-leaks/MemoryLeaksTest.scala
new file mode 100644
index 0000000..950569c
--- /dev/null
+++ b/test/files/presentation/memory-leaks/MemoryLeaksTest.scala
@@ -0,0 +1,126 @@
+import java.io.PrintWriter
+import java.io.FileOutputStream
+import java.util.Calendar
+
+import scala.tools.nsc.interactive.tests._
+import scala.tools.nsc.util._
+import scala.tools.nsc.io._
+import scala.tools.nsc.doc
+
+/** This test runs the presentation compiler on the Scala compiler project itself and records memory consumption.
+ *
+ * The test scenario is to open Typers, Trees and Types, then repeatedly add and remove one character
+ * in Typers.scala. Each step causes the parser, namer, and type checker to run.
+ *
+ * At each step we record the memory usage after the GC has run. At the end of the test,
+ * simple linear regression is used to compute the straight line that best fits the
+ * curve, and if the slope is higher than 1 (meaning a leak of 1MB/run), we fail the test.
+ *
+ * The Scala compiler sources are assumed to be under 'basedir/src/compiler'.
+ *
+ * The individual data points are saved under 'usedMem-<date>.txt', under the test project
+ * directory. Use the cool graph-it.R (https://github.com/scala-ide/scala-ide/blob/master/org.scala-ide.sdt.core.tests/graph-it.R)
+ * script to see the memory curve for the given test run.
+ */
+object Test extends InteractiveTest {
+ final val mega = 1024 * 1024
+
+ override val withDocComments = true
+
+ override def execute(): Unit = memoryConsumptionTest()
+
+ def batchSource(name: String) =
+ new BatchSourceFile(AbstractFile.getFile(name))
+
+ def memoryConsumptionTest() {
+ val N = 50
+ val filename = "usedmem-%tF.txt".format(Calendar.getInstance.getTime)
+
+ val typerUnit = AbstractFile.getFile(baseDir.parent.parent.parent.parent / "src/compiler/scala/tools/nsc/typechecker/Typers.scala")
+ val typesUnit = AbstractFile.getFile(baseDir.parent.parent.parent.parent / "src/reflect/scala/reflect/internal/Types.scala")
+ val treesUnit = AbstractFile.getFile(baseDir.parent.parent.parent.parent / "src/reflect/scala/reflect/internal/Trees.scala")
+
+ askReload(Seq(new BatchSourceFile(typerUnit), new BatchSourceFile(typesUnit), new BatchSourceFile(treesUnit)))
+ typeCheckWith(treesUnit, new String(treesUnit.toCharArray))
+ typeCheckWith(typesUnit, new String(typesUnit.toCharArray))
+
+ val originalTyper = new String(typerUnit.toCharArray)
+
+ val (prefix, postfix) = originalTyper.splitAt(originalTyper.indexOf("import global._"))
+ val changedTyper = prefix + " a\n " + postfix
+
+ val usedMem = for (i <- 1 to N) yield {
+ val src = if (i % 2 == 0) originalTyper else changedTyper
+
+ val usedMem = withGC {
+ typeCheckWith(typerUnit, src)
+ }
+
+ usedMem / mega // report size in MB
+ }
+
+ //dumpDataToFile(filename, usedMem)
+ // drop the first two measurements, since the compiler needs some memory when initializing
+ val (a, b) = linearModel((3L to N).toSeq, usedMem.drop(2))
+ //println("LinearModel: constant: %.4f\tslope:%.4f".format(a, b))
+
+ if (b > 1.0)
+ println("Rate of memory consumption is alarming! %.4f MB/run".format(b))
+ else
+ println("No leaks detected.")
+ }
+
+ private def typeCheckWith(file: AbstractFile, src: String) = {
+ val sourceFile = new BatchSourceFile(file, src.toCharArray)
+ askReload(Seq(sourceFile))
+ askLoadedTyped(sourceFile).get // block until it's here
+ }
+
+ private def dumpDataToFile(filename: String, usedMem: Seq[Long]) {
+ val outputFile = new PrintWriter(new FileOutputStream(filename))
+ outputFile.println("\tusedMem")
+ for ((dataPoint, i) <- usedMem.zipWithIndex) {
+ outputFile.println("%d\t%d".format(i, dataPoint))
+ }
+ outputFile.close()
+ }
+
+
+ /** Return the linear model of these values, (a, b). First value is the constant factor,
+ * second value is the slope, i.e. `y = a + bx`
+ *
+ * The linear model of a set of points is a straight line that minimizes the square distance
+ * between the each point and the line.
+ *
+ * See: http://en.wikipedia.org/wiki/Simple_linear_regression
+ */
+ def linearModel(xs: Seq[Long], ys: Seq[Long]): (Double, Double) = {
+ require(xs.length == ys.length)
+
+ def mean(v: Seq[Long]): Double = v.sum.toDouble / v.length
+
+ val meanXs = mean(xs)
+ val meanYs = mean(ys)
+
+ val beta = (mean((xs, ys).zipped.map(_ * _)) - meanXs * meanYs) / (mean(xs.map(x => x * x)) - meanXs * meanXs)
+ val alfa = meanYs - beta * meanXs
+
+ (alfa, beta)
+ }
+
+ /** Run the given closure and return the amount of used memory at the end of its execution.
+ *
+ * Runs the GC before and after the execution of `f'.
+ */
+ def withGC(f: => Unit): Long = {
+ val r = Runtime.getRuntime
+ System.gc()
+
+ f;
+
+ System.gc()
+
+ r.totalMemory() - r.freeMemory()
+ }
+
+}
diff --git a/test/files/presentation/ping-pong.check b/test/files/presentation/ping-pong.check
new file mode 100644
index 0000000..f714c1b
--- /dev/null
+++ b/test/files/presentation/ping-pong.check
@@ -0,0 +1,102 @@
+reload: PingPong.scala
+
+askTypeCompletion at PingPong.scala(10,23)
+================================================================================
+[response] askTypeCompletion at (10,23)
+retrieved 40 members
+[accessible: true] `method !=(x$1: Any)Boolean`
+[accessible: true] `method !=(x$1: AnyRef)Boolean`
+[accessible: true] `method ##()Int`
+[accessible: true] `method +(other: String)String`
+[accessible: true] `method ->[B](y: B)(Pong, B)`
+[accessible: true] `method ==(x$1: Any)Boolean`
+[accessible: true] `method ==(x$1: AnyRef)Boolean`
+[accessible: true] `method asInstanceOf[T0]=> T0`
+[accessible: true] `method ensuring(cond: Boolean)Pong`
+[accessible: true] `method ensuring(cond: Boolean, msg: => Any)Pong`
+[accessible: true] `method ensuring(cond: Pong => Boolean)Pong`
+[accessible: true] `method ensuring(cond: Pong => Boolean, msg: => Any)Pong`
+[accessible: true] `method eq(x$1: AnyRef)Boolean`
+[accessible: true] `method equals(x$1: Any)Boolean`
+[accessible: true] `method formatted(fmtstr: String)String`
+[accessible: true] `method hashCode()Int`
+[accessible: true] `method isInstanceOf[T0]=> Boolean`
+[accessible: true] `method ne(x$1: AnyRef)Boolean`
+[accessible: true] `method notify()Unit`
+[accessible: true] `method notifyAll()Unit`
+[accessible: true] `method poke()Unit`
+[accessible: true] `method synchronized[T0](x$1: T0)T0`
+[accessible: true] `method toString()String`
+[accessible: true] `method wait()Unit`
+[accessible: true] `method wait(x$1: Long)Unit`
+[accessible: true] `method wait(x$1: Long, x$2: Int)Unit`
+[accessible: true] `method x=> Pong`
+[accessible: true] `method →[B](y: B)(Pong, B)`
+[accessible: true] `value __leftOfArrowPong`
+[accessible: true] `value __resultOfEnsuringPong`
+[accessible: true] `value nameString`
+[accessible: true] `value selfAny`
+[accessible: false] `method clone()Object`
+[accessible: false] `method finalize()Unit`
+[accessible: false] `value pingPing`
+================================================================================
+
+askTypeCompletion at PingPong.scala(19,20)
+================================================================================
+[response] askTypeCompletion at (19,20)
+retrieved 40 members
+[accessible: true] `method !=(x$1: Any)Boolean`
+[accessible: true] `method !=(x$1: AnyRef)Boolean`
+[accessible: true] `method ##()Int`
+[accessible: true] `method +(other: String)String`
+[accessible: true] `method ->[B](y: B)(Ping, B)`
+[accessible: true] `method ==(x$1: Any)Boolean`
+[accessible: true] `method ==(x$1: AnyRef)Boolean`
+[accessible: true] `method asInstanceOf[T0]=> T0`
+[accessible: true] `method ensuring(cond: Boolean)Ping`
+[accessible: true] `method ensuring(cond: Boolean, msg: => Any)Ping`
+[accessible: true] `method ensuring(cond: Ping => Boolean)Ping`
+[accessible: true] `method ensuring(cond: Ping => Boolean, msg: => Any)Ping`
+[accessible: true] `method eq(x$1: AnyRef)Boolean`
+[accessible: true] `method equals(x$1: Any)Boolean`
+[accessible: true] `method formatted(fmtstr: String)String`
+[accessible: true] `method hashCode()Int`
+[accessible: true] `method isInstanceOf[T0]=> Boolean`
+[accessible: true] `method loop=> Unit`
+[accessible: true] `method name=> String`
+[accessible: true] `method ne(x$1: AnyRef)Boolean`
+[accessible: true] `method notify()Unit`
+[accessible: true] `method notifyAll()Unit`
+[accessible: true] `method poke=> Unit`
+[accessible: true] `method synchronized[T0](x$1: T0)T0`
+[accessible: true] `method toString()String`
+[accessible: true] `method wait()Unit`
+[accessible: true] `method wait(x$1: Long)Unit`
+[accessible: true] `method wait(x$1: Long, x$2: Int)Unit`
+[accessible: true] `method x=> Ping`
+[accessible: true] `method →[B](y: B)(Ping, B)`
+[accessible: true] `value __leftOfArrowPing`
+[accessible: true] `value __resultOfEnsuringPing`
+[accessible: true] `value pongPong`
+[accessible: true] `value selfAny`
+[accessible: false] `method clone()Object`
+[accessible: false] `method finalize()Unit`
+================================================================================
+
+askType at PingPong.scala(8,10)
+================================================================================
+[response] askTypeAt at (8,10)
+def loop: Unit = Ping.this.poke()
+================================================================================
+
+askType at PingPong.scala(10,10)
+================================================================================
+[response] askTypeAt at (10,10)
+def poke: Unit = Ping.this.pong.poke()
+================================================================================
+
+askType at PingPong.scala(17,10)
+================================================================================
+[response] askTypeAt at (17,10)
+private[this] val name: String = "pong"
+================================================================================
diff --git a/test/files/presentation/ping-pong/Runner.scala b/test/files/presentation/ping-pong/Runner.scala
new file mode 100644
index 0000000..1c03e3d
--- /dev/null
+++ b/test/files/presentation/ping-pong/Runner.scala
@@ -0,0 +1,3 @@
+import scala.tools.nsc.interactive.tests._
+
+object Test extends InteractiveTest
diff --git a/test/files/presentation/ping-pong/src/PingPong.scala b/test/files/presentation/ping-pong/src/PingPong.scala
new file mode 100644
index 0000000..08bb4e3
--- /dev/null
+++ b/test/files/presentation/ping-pong/src/PingPong.scala
@@ -0,0 +1,22 @@
+
+class Ping {
+
+ val pong = new Pong(this)
+
+ def name = "ping"
+
+ def loop/*?*/ { poke() }
+
+ def poke/*?*/ { pong./*!*/poke() }
+
+ override def toString = name
+}
+
+class Pong(ping: Ping) {
+
+ val name/*?*/ = "pong"
+
+ def poke() { ping./*!*/poke() }
+
+ override def toString = name
+}
\ No newline at end of file
diff --git a/test/files/presentation/random.check b/test/files/presentation/random.check
new file mode 100644
index 0000000..fce4b69
--- /dev/null
+++ b/test/files/presentation/random.check
@@ -0,0 +1,27 @@
+reload: Random.scala
+
+askType at Random.scala(18,14)
+================================================================================
+[response] askTypeAt at (18,14)
+val filter: Int => Boolean = try {
+ java.this.lang.Integer.parseInt(args.apply(0)) match {
+ case 1 => ((x: Int) => x.%(2).!=(0))
+ case 2 => ((x: Int) => x.%(2).==(0))
+ case _ => ((x: Int) => x.!=(0))
+ }
+} catch {
+ case _ => ((x: Int) => x.<(100))
+}
+================================================================================
+
+askType at Random.scala(19,30)
+================================================================================
+[response] askTypeAt at (19,30)
+0
+================================================================================
+
+askType at Random.scala(26,12)
+================================================================================
+[response] askTypeAt at (26,12)
+_
+================================================================================
diff --git a/test/files/presentation/random/Runner.scala b/test/files/presentation/random/Runner.scala
new file mode 100644
index 0000000..1c03e3d
--- /dev/null
+++ b/test/files/presentation/random/Runner.scala
@@ -0,0 +1,3 @@
+import scala.tools.nsc.interactive.tests._
+
+object Test extends InteractiveTest
diff --git a/test/files/presentation/random/src/Random.scala b/test/files/presentation/random/src/Random.scala
new file mode 100644
index 0000000..4fff783
--- /dev/null
+++ b/test/files/presentation/random/src/Random.scala
@@ -0,0 +1,106 @@
+package examples
+
+import java.io._
+import java.net.{InetAddress,ServerSocket,Socket,SocketException}
+import java.util.Random
+
+/**
+ * Simple client/server application using Java sockets.
+ *
+ * The server simply generates random integer values and
+ * the clients provide a filter function to the server
+ * to get only values they interested in (eg. even or
+ * odd values, and so on).
+ */
+object randomclient {
+
+ def main(args: Array[String]) {
+ val filter/*?*/ = try {
+ Integer.parseInt(args(0)/*?*/) match {
+ case 1 => x: Int => x % 2 != 0
+ case 2 => x: Int => x % 2 == 0
+ case _ => x: Int => x != 0
+ }
+ }
+ catch {
+ case _/*?*/ => x: Int => x < 100
+ }
+
+ try {
+ val ia = InetAddress.getByName("localhost")
+ val socket = new Socket(ia, 9999)
+ val out = new ObjectOutputStream(
+ new DataOutputStream(socket.getOutputStream()))
+ val in = new DataInputStream(socket.getInputStream())
+
+ out.writeObject(filter)
+ out.flush()
+
+ while (true) {
+ val x = in.readInt()
+ println("x = " + x)
+ }
+ out.close()
+ in.close()
+ socket.close()
+ }
+ catch {
+ case e: IOException =>
+ e.printStackTrace()
+ }
+ }
+
+}
+
+object randomserver {
+
+ def main(args: Array[String]): Unit = {
+ try {
+ val listener = new ServerSocket(9999);
+ while (true)
+ new ServerThread(listener.accept()).start();
+ listener.close()
+ }
+ catch {
+ case e: IOException =>
+ System.err.println("Could not listen on port: 9999.");
+ System.exit(-1)
+ }
+ }
+
+}
+
+case class ServerThread(socket: Socket) extends Thread("ServerThread") {
+
+ override def run(): Unit = {
+ val rand = new Random(System.currentTimeMillis());
+ try {
+ val out = new DataOutputStream(socket.getOutputStream());
+ val in = new ObjectInputStream(
+ new DataInputStream(socket.getInputStream()));
+
+ val filter = in.readObject().asInstanceOf[Int => Boolean];
+
+ while (true) {
+ var succeeded = false;
+ do {
+ val x = rand.nextInt(1000);
+ succeeded = filter(x);
+ if (succeeded) out.writeInt(x)
+ } while (! succeeded);
+ Thread.sleep(100)
+ }
+
+ out.close();
+ in.close();
+ socket.close()
+ }
+ catch {
+ case e: SocketException =>
+ () // avoid stack trace when stopping a client with Ctrl-C
+ case e: IOException =>
+ e.printStackTrace();
+ }
+ }
+
+}
\ No newline at end of file
diff --git a/test/files/presentation/recursive-ask.check b/test/files/presentation/recursive-ask.check
new file mode 100644
index 0000000..357d2cf
--- /dev/null
+++ b/test/files/presentation/recursive-ask.check
@@ -0,0 +1,4 @@
+[ outer] askForResponse
+[nested] askForResponse
+passed
+done
diff --git a/test/files/presentation/recursive-ask/RecursiveAsk.scala b/test/files/presentation/recursive-ask/RecursiveAsk.scala
new file mode 100644
index 0000000..b0e29b3
--- /dev/null
+++ b/test/files/presentation/recursive-ask/RecursiveAsk.scala
@@ -0,0 +1,20 @@
+import scala.tools.nsc.interactive.tests._
+
+object Test extends InteractiveTest {
+ override def execute(): Unit = recursiveAskForResponse()
+
+ def recursiveAskForResponse() {
+ val res0 = compiler.askForResponse( () => {
+ println("[ outer] askForResponse")
+ val res = compiler.askForResponse( () => { println("[nested] askForResponse") })
+ println (res.get(5000) match {
+ case Some(_) => "passed"
+ case None => "timeout"
+ })
+ })
+
+ res0.get
+
+ println("done")
+ }
+}
diff --git a/test/files/presentation/scope-completion-1.check b/test/files/presentation/scope-completion-1.check
new file mode 100644
index 0000000..93c6373
--- /dev/null
+++ b/test/files/presentation/scope-completion-1.check
@@ -0,0 +1,19 @@
+reload: Completions.scala
+
+askScopeCompletion at Completions.scala(6,2)
+================================================================================
+[response] askScopeCompletion at (6,2)
+retrieved 3 members
+[accessible: true] `class Completion1test.Completion1`
+[accessible: true] `constructor Completion1()test.Completion1`
+[accessible: true] `object Completion2test.Completion2.type`
+================================================================================
+
+askScopeCompletion at Completions.scala(10,2)
+================================================================================
+[response] askScopeCompletion at (10,2)
+retrieved 3 members
+[accessible: true] `class Completion1test.Completion1`
+[accessible: true] `constructor Completion2()test.Completion2.type`
+[accessible: true] `object Completion2test.Completion2.type`
+================================================================================
diff --git a/test/files/presentation/scope-completion-1/Test.scala b/test/files/presentation/scope-completion-1/Test.scala
new file mode 100644
index 0000000..bec1131
--- /dev/null
+++ b/test/files/presentation/scope-completion-1/Test.scala
@@ -0,0 +1,3 @@
+import scala.tools.nsc.interactive.tests.InteractiveTest
+
+object Test extends InteractiveTest
\ No newline at end of file
diff --git a/test/files/presentation/scope-completion-1/src/Completions.scala b/test/files/presentation/scope-completion-1/src/Completions.scala
new file mode 100644
index 0000000..c4eea6b
--- /dev/null
+++ b/test/files/presentation/scope-completion-1/src/Completions.scala
@@ -0,0 +1,12 @@
+package test
+
+/* completion on empty class and object */
+
+class Completion1 {
+ /*_*/
+}
+
+object Completion2 {
+ /*_*/
+}
+
diff --git a/test/files/presentation/scope-completion-2.check b/test/files/presentation/scope-completion-2.check
new file mode 100644
index 0000000..462671d
--- /dev/null
+++ b/test/files/presentation/scope-completion-2.check
@@ -0,0 +1,33 @@
+reload: Completions.scala
+
+askScopeCompletion at Completions.scala(15,2)
+================================================================================
+[response] askScopeCompletion at (15,2)
+retrieved 10 members
+[accessible: true] `class Cc1Completion1.this.Cc1`
+[accessible: true] `class Co1test.Completion1.Co1`
+[accessible: true] `class Completion1test.Completion1`
+[accessible: true] `constructor Completion1()test.Completion1`
+[accessible: true] `method fc1=> Int`
+[accessible: true] `method fo1=> Int`
+[accessible: true] `object Completion1test.Completion1.type`
+[accessible: true] `value ctest.Completion1`
+[accessible: true] `value vc1Int`
+[accessible: true] `value vo1Int`
+================================================================================
+
+askScopeCompletion at Completions.scala(29,2)
+================================================================================
+[response] askScopeCompletion at (29,2)
+retrieved 10 members
+[accessible: true] `class Cc1test.Completion1.c.Cc1`
+[accessible: true] `class Co1test.Completion1.Co1`
+[accessible: true] `class Completion1test.Completion1`
+[accessible: true] `constructor Completion1()test.Completion1.type`
+[accessible: true] `method fc1=> Int`
+[accessible: true] `method fo1=> Int`
+[accessible: true] `object Completion1test.Completion1.type`
+[accessible: true] `value ctest.Completion1`
+[accessible: true] `value vc1Int`
+[accessible: true] `value vo1Int`
+================================================================================
diff --git a/test/files/presentation/scope-completion-2/Test.scala b/test/files/presentation/scope-completion-2/Test.scala
new file mode 100644
index 0000000..bec1131
--- /dev/null
+++ b/test/files/presentation/scope-completion-2/Test.scala
@@ -0,0 +1,3 @@
+import scala.tools.nsc.interactive.tests.InteractiveTest
+
+object Test extends InteractiveTest
\ No newline at end of file
diff --git a/test/files/presentation/scope-completion-2/src/Completions.scala b/test/files/presentation/scope-completion-2/src/Completions.scala
new file mode 100644
index 0000000..f8d7cd6
--- /dev/null
+++ b/test/files/presentation/scope-completion-2/src/Completions.scala
@@ -0,0 +1,31 @@
+package test
+
+/* private elements are visible in the companion class/object */
+
+class Completion1 {
+
+ import Completion1._
+
+ private val vc1 = 0
+ private def fc1 = 0
+
+ private class Cc1 {
+ }
+
+ /*_*/
+}
+
+object Completion1 {
+
+ val c = new Completion1()
+ import c._
+
+ private val vo1 = 0
+ private def fo1 = 0
+
+ private class Co1 {
+ }
+
+ /*_*/
+}
+
diff --git a/test/files/presentation/scope-completion-3.check b/test/files/presentation/scope-completion-3.check
new file mode 100644
index 0000000..119fc1d
--- /dev/null
+++ b/test/files/presentation/scope-completion-3.check
@@ -0,0 +1,111 @@
+reload: Completions.scala
+
+askScopeCompletion at Completions.scala(75,2)
+================================================================================
+[response] askScopeCompletion at (75,2)
+retrieved 49 members
+[accessible: true] `class Base1test.Base1`
+[accessible: true] `class Cb1Completion1.this.Cb1`
+[accessible: true] `class Cc1Completion1.this.Cc1`
+[accessible: true] `class Cc2Completion1.this.Cc2`
+[accessible: true] `class Completion1test.Completion1`
+[accessible: true] `class Ct1Completion1.this.Ct1`
+[accessible: true] `constructor Completion1()test.Completion1`
+[accessible: true] `method fb1=> Int`
+[accessible: true] `method fb3=> Int`
+[accessible: true] `method fc1=> Int`
+[accessible: true] `method fc2=> Int`
+[accessible: true] `method ft1=> Int`
+[accessible: true] `method ft3=> Int`
+[accessible: true] `object Completion2test.Completion2.type`
+[accessible: true] `object Ob1Completion1.this.Ob1.type`
+[accessible: true] `object Oc1Completion1.this.Oc1.type`
+[accessible: true] `object Oc2Completion1.this.Oc2.type`
+[accessible: true] `object Ot1Completion1.this.Ot1.type`
+[accessible: true] `trait Trait1test.Trait1`
+[accessible: true] `type tb1Completion1.this.tb1`
+[accessible: true] `type tb3Completion1.this.tb3`
+[accessible: true] `type tc1Completion1.this.tc1`
+[accessible: true] `type tc2Completion1.this.tc2`
+[accessible: true] `type tt1Completion1.this.tt1`
+[accessible: true] `type tt3Completion1.this.tt3`
+[accessible: true] `value vb1Int`
+[accessible: true] `value vb3Int`
+[accessible: true] `value vc1Int`
+[accessible: true] `value vc2Int`
+[accessible: true] `value vt1Int`
+[accessible: true] `value vt3Int`
+[accessible: true] `variable rb1Int`
+[accessible: true] `variable rb3Int`
+[accessible: true] `variable rc1Int`
+[accessible: true] `variable rc2Int`
+[accessible: true] `variable rt1Int`
+[accessible: true] `variable rt3Int`
+[accessible: false] `class Cb2Completion1.this.Cb2`
+[accessible: false] `class Ct2Completion1.this.Ct2`
+[accessible: false] `method fb2=> Int`
+[accessible: false] `method ft2=> Int`
+[accessible: false] `object Ob2Completion1.this.Ob2.type`
+[accessible: false] `object Ot2Completion1.this.Ot2.type`
+[accessible: false] `type tb2Completion1.this.tb2`
+[accessible: false] `type tt2Completion1.this.tt2`
+[accessible: false] `value vb2Int`
+[accessible: false] `value vt2Int`
+[accessible: false] `variable rb2Int`
+[accessible: false] `variable rt2Int`
+================================================================================
+
+askScopeCompletion at Completions.scala(104,2)
+================================================================================
+[response] askScopeCompletion at (104,2)
+retrieved 49 members
+[accessible: true] `class Base1test.Base1`
+[accessible: true] `class Cb1test.Completion2.Cb1`
+[accessible: true] `class Co1test.Completion2.Co1`
+[accessible: true] `class Co2test.Completion2.Co2`
+[accessible: true] `class Completion1test.Completion1`
+[accessible: true] `class Ct1test.Completion2.Ct1`
+[accessible: true] `constructor Completion2()test.Completion2.type`
+[accessible: true] `method fb1=> Int`
+[accessible: true] `method fb3=> Int`
+[accessible: true] `method fo1=> Int`
+[accessible: true] `method fo2=> Int`
+[accessible: true] `method ft1=> Int`
+[accessible: true] `method ft3=> Int`
+[accessible: true] `object Completion2test.Completion2.type`
+[accessible: true] `object Ob1test.Completion2.Ob1.type`
+[accessible: true] `object Oo1test.Completion2.Oo1.type`
+[accessible: true] `object Oo2test.Completion2.Oo2.type`
+[accessible: true] `object Ot1test.Completion2.Ot1.type`
+[accessible: true] `trait Trait1test.Trait1`
+[accessible: true] `type tb1test.Completion2.tb1`
+[accessible: true] `type tb3test.Completion2.tb3`
+[accessible: true] `type to1test.Completion2.to1`
+[accessible: true] `type to2test.Completion2.to2`
+[accessible: true] `type tt1test.Completion2.tt1`
+[accessible: true] `type tt3test.Completion2.tt3`
+[accessible: true] `value vb1Int`
+[accessible: true] `value vb3Int`
+[accessible: true] `value vo1Int`
+[accessible: true] `value vo2Int`
+[accessible: true] `value vt1Int`
+[accessible: true] `value vt3Int`
+[accessible: true] `variable rb1Int`
+[accessible: true] `variable rb3Int`
+[accessible: true] `variable ro1Int`
+[accessible: true] `variable ro2Int`
+[accessible: true] `variable rt1Int`
+[accessible: true] `variable rt3Int`
+[accessible: false] `class Cb2test.Completion2.Cb2`
+[accessible: false] `class Ct2test.Completion2.Ct2`
+[accessible: false] `method fb2=> Int`
+[accessible: false] `method ft2=> Int`
+[accessible: false] `object Ob2test.Completion2.Ob2.type`
+[accessible: false] `object Ot2test.Completion2.Ot2.type`
+[accessible: false] `type tb2test.Completion2.tb2`
+[accessible: false] `type tt2test.Completion2.tt2`
+[accessible: false] `value vb2Int`
+[accessible: false] `value vt2Int`
+[accessible: false] `variable rb2Int`
+[accessible: false] `variable rt2Int`
+================================================================================
diff --git a/test/files/presentation/scope-completion-3/Test.scala b/test/files/presentation/scope-completion-3/Test.scala
new file mode 100644
index 0000000..bec1131
--- /dev/null
+++ b/test/files/presentation/scope-completion-3/Test.scala
@@ -0,0 +1,3 @@
+import scala.tools.nsc.interactive.tests.InteractiveTest
+
+object Test extends InteractiveTest
\ No newline at end of file
diff --git a/test/files/presentation/scope-completion-3/src/Completions.scala b/test/files/presentation/scope-completion-3/src/Completions.scala
new file mode 100644
index 0000000..18cef1c
--- /dev/null
+++ b/test/files/presentation/scope-completion-3/src/Completions.scala
@@ -0,0 +1,106 @@
+package test
+
+/* check availability of members defined locally and in hierachy */
+
+abstract class Base1 {
+
+ type tb1 = Int
+ val vb1 = 0
+ var rb1 = 0
+ def fb1 = 0
+ class Cb1
+ object Ob1
+
+ private type tb2 = Int
+ private val vb2 = 0
+ private var rb2 = 0
+ private def fb2 = 0
+ private class Cb2
+ private object Ob2
+
+ type tb3
+ val vb3: Int
+ var rb3: Int
+ def fb3: Int
+}
+
+trait Trait1 {
+
+ type tt1 = Int
+ val vt1 = 0
+ var rt1 = 0
+ def ft1 = 0
+ class Ct1
+ object Ot1
+
+ private type tt2 = Int
+ private val vt2 = 0
+ private var rt2 = 0
+ private def ft2 = 0
+ private class Ct2
+ private object Ot2
+
+ type tt3
+ val vt3: Int
+ var rt3: Int
+ def ft3: Int
+}
+
+class Completion1 extends Base1 with Trait1 {
+
+ type tc1 = Int
+ val vc1 = 0
+ var rc1 = 0
+ def fc1 = 0
+ class Cc1
+ object Oc1
+
+ private type tc2 = Int
+ private val vc2 = 0
+ private var rc2 = 0
+ private def fc2 = 0
+ private class Cc2
+ private object Oc2
+
+ override type tb3 = Int
+ override val vb3 = 12
+ override var rb3 = 12
+ override def fb3 = 12
+
+ override type tt3 = Int
+ override val vt3 = 12
+ override var rt3 = 12
+ override def ft3 = 12
+
+ /*_*/
+}
+
+object Completion2 extends Base1 with Trait1 {
+
+ type to1 = Int
+ val vo1 = 0
+ var ro1 = 0
+ def fo1 = 0
+ class Co1
+ object Oo1
+
+ private type to2 = Int
+ private val vo2 = 0
+ private var ro2 = 0
+ private def fo2 = 0
+ private class Co2
+ private object Oo2
+
+ override type tb3 = Int
+ override val vb3 = 12
+ override var rb3 = 12
+ override def fb3 = 12
+
+ override type tt3 = Int
+ override val vt3 = 12
+ override var rt3 = 12
+ override def ft3 = 12
+
+ /*_*/
+}
+
diff --git a/test/files/presentation/scope-completion-4.check b/test/files/presentation/scope-completion-4.check
new file mode 100644
index 0000000..f6241cf
--- /dev/null
+++ b/test/files/presentation/scope-completion-4.check
@@ -0,0 +1,293 @@
+reload: Completions.scala
+
+askScopeCompletion at Completions.scala(12,8)
+================================================================================
+[response] askScopeCompletion at (12,8)
+retrieved 8 members
+[accessible: true] `class Completion1test.Completion1`
+[accessible: true] `class cCompletion1.this.c`
+[accessible: true] `class fcfc`
+[accessible: true] `class ffcffc`
+[accessible: true] `constructor Completion1()test.Completion1`
+[accessible: true] `method f=> Unit`
+[accessible: true] `method ff=> Unit`
+[accessible: true] `method fff=> Unit`
+================================================================================
+
+askScopeCompletion at Completions.scala(15,6)
+================================================================================
+[response] askScopeCompletion at (15,6)
+retrieved 8 members
+[accessible: true] `class Completion1test.Completion1`
+[accessible: true] `class cCompletion1.this.c`
+[accessible: true] `class fcfc`
+[accessible: true] `class ffcffc`
+[accessible: true] `constructor Completion1()test.Completion1`
+[accessible: true] `method f=> Unit`
+[accessible: true] `method ff=> Unit`
+[accessible: true] `method fff=> Unit`
+================================================================================
+
+askScopeCompletion at Completions.scala(18,8)
+================================================================================
+[response] askScopeCompletion at (18,8)
+retrieved 8 members
+[accessible: true] `class Completion1test.Completion1`
+[accessible: true] `class cCompletion1.this.c`
+[accessible: true] `class fcfc`
+[accessible: true] `class ffcffc`
+[accessible: true] `constructor ffc()ffc`
+[accessible: true] `method f=> Unit`
+[accessible: true] `method ff=> Unit`
+[accessible: true] `method fff=> Unit`
+================================================================================
+
+askScopeCompletion at Completions.scala(21,6)
+================================================================================
+[response] askScopeCompletion at (21,6)
+retrieved 8 members
+[accessible: true] `class Completion1test.Completion1`
+[accessible: true] `class cCompletion1.this.c`
+[accessible: true] `class fcfc`
+[accessible: true] `class ffcffc`
+[accessible: true] `constructor Completion1()test.Completion1`
+[accessible: true] `method f=> Unit`
+[accessible: true] `method ff=> Unit`
+[accessible: true] `method fff=> Unit`
+================================================================================
+
+askScopeCompletion at Completions.scala(24,4)
+================================================================================
+[response] askScopeCompletion at (24,4)
+retrieved 6 members
+[accessible: true] `class Completion1test.Completion1`
+[accessible: true] `class cCompletion1.this.c`
+[accessible: true] `class fcfc`
+[accessible: true] `constructor Completion1()test.Completion1`
+[accessible: true] `method f=> Unit`
+[accessible: true] `method ff=> Unit`
+================================================================================
+
+askScopeCompletion at Completions.scala(29,8)
+================================================================================
+[response] askScopeCompletion at (29,8)
+retrieved 8 members
+[accessible: true] `class Completion1test.Completion1`
+[accessible: true] `class cCompletion1.this.c`
+[accessible: true] `class fccfc.this.fcc`
+[accessible: true] `class fcfc`
+[accessible: true] `constructor fc()fc`
+[accessible: true] `method f=> Unit`
+[accessible: true] `method fcf=> Unit`
+[accessible: true] `method ff=> Unit`
+================================================================================
+
+askScopeCompletion at Completions.scala(32,6)
+================================================================================
+[response] askScopeCompletion at (32,6)
+retrieved 8 members
+[accessible: true] `class Completion1test.Completion1`
+[accessible: true] `class cCompletion1.this.c`
+[accessible: true] `class fccfc.this.fcc`
+[accessible: true] `class fcfc`
+[accessible: true] `constructor fc()fc`
+[accessible: true] `method f=> Unit`
+[accessible: true] `method fcf=> Unit`
+[accessible: true] `method ff=> Unit`
+================================================================================
+
+askScopeCompletion at Completions.scala(35,8)
+================================================================================
+[response] askScopeCompletion at (35,8)
+retrieved 8 members
+[accessible: true] `class Completion1test.Completion1`
+[accessible: true] `class cCompletion1.this.c`
+[accessible: true] `class fccfc.this.fcc`
+[accessible: true] `class fcfc`
+[accessible: true] `constructor fcc()fc.this.fcc`
+[accessible: true] `method f=> Unit`
+[accessible: true] `method fcf=> Unit`
+[accessible: true] `method ff=> Unit`
+================================================================================
+
+askScopeCompletion at Completions.scala(38,6)
+================================================================================
+[response] askScopeCompletion at (38,6)
+retrieved 8 members
+[accessible: true] `class Completion1test.Completion1`
+[accessible: true] `class cCompletion1.this.c`
+[accessible: true] `class fccfc.this.fcc`
+[accessible: true] `class fcfc`
+[accessible: true] `constructor fc()fc`
+[accessible: true] `method f=> Unit`
+[accessible: true] `method fcf=> Unit`
+[accessible: true] `method ff=> Unit`
+================================================================================
+
+askScopeCompletion at Completions.scala(41,4)
+================================================================================
+[response] askScopeCompletion at (41,4)
+retrieved 6 members
+[accessible: true] `class Completion1test.Completion1`
+[accessible: true] `class cCompletion1.this.c`
+[accessible: true] `class fcfc`
+[accessible: true] `constructor Completion1()test.Completion1`
+[accessible: true] `method f=> Unit`
+[accessible: true] `method ff=> Unit`
+================================================================================
+
+askScopeCompletion at Completions.scala(44,2)
+================================================================================
+[response] askScopeCompletion at (44,2)
+retrieved 4 members
+[accessible: true] `class Completion1test.Completion1`
+[accessible: true] `class cCompletion1.this.c`
+[accessible: true] `constructor Completion1()test.Completion1`
+[accessible: true] `method f=> Unit`
+================================================================================
+
+askScopeCompletion at Completions.scala(51,8)
+================================================================================
+[response] askScopeCompletion at (51,8)
+retrieved 8 members
+[accessible: true] `class Completion1test.Completion1`
+[accessible: true] `class cCompletion1.this.c`
+[accessible: true] `class ccc.this.cc`
+[accessible: true] `class ccccc.this.ccc`
+[accessible: true] `constructor ccc()cc.this.ccc`
+[accessible: true] `method ccf=> Unit`
+[accessible: true] `method cf=> Unit`
+[accessible: true] `method f=> Unit`
+================================================================================
+
+askScopeCompletion at Completions.scala(54,6)
+================================================================================
+[response] askScopeCompletion at (54,6)
+retrieved 8 members
+[accessible: true] `class Completion1test.Completion1`
+[accessible: true] `class cCompletion1.this.c`
+[accessible: true] `class ccc.this.cc`
+[accessible: true] `class ccccc.this.ccc`
+[accessible: true] `constructor cc()c.this.cc`
+[accessible: true] `method ccf=> Unit`
+[accessible: true] `method cf=> Unit`
+[accessible: true] `method f=> Unit`
+================================================================================
+
+askScopeCompletion at Completions.scala(57,8)
+================================================================================
+[response] askScopeCompletion at (57,8)
+retrieved 8 members
+[accessible: true] `class Completion1test.Completion1`
+[accessible: true] `class cCompletion1.this.c`
+[accessible: true] `class ccc.this.cc`
+[accessible: true] `class ccccc.this.ccc`
+[accessible: true] `constructor cc()c.this.cc`
+[accessible: true] `method ccf=> Unit`
+[accessible: true] `method cf=> Unit`
+[accessible: true] `method f=> Unit`
+================================================================================
+
+askScopeCompletion at Completions.scala(60,6)
+================================================================================
+[response] askScopeCompletion at (60,6)
+retrieved 8 members
+[accessible: true] `class Completion1test.Completion1`
+[accessible: true] `class cCompletion1.this.c`
+[accessible: true] `class ccc.this.cc`
+[accessible: true] `class ccccc.this.ccc`
+[accessible: true] `constructor cc()c.this.cc`
+[accessible: true] `method ccf=> Unit`
+[accessible: true] `method cf=> Unit`
+[accessible: true] `method f=> Unit`
+================================================================================
+
+askScopeCompletion at Completions.scala(63,4)
+================================================================================
+[response] askScopeCompletion at (63,4)
+retrieved 6 members
+[accessible: true] `class Completion1test.Completion1`
+[accessible: true] `class cCompletion1.this.c`
+[accessible: true] `class ccc.this.cc`
+[accessible: true] `constructor c()Completion1.this.c`
+[accessible: true] `method cf=> Unit`
+[accessible: true] `method f=> Unit`
+================================================================================
+
+askScopeCompletion at Completions.scala(68,8)
+================================================================================
+[response] askScopeCompletion at (68,8)
+retrieved 8 members
+[accessible: true] `class Completion1test.Completion1`
+[accessible: true] `class cCompletion1.this.c`
+[accessible: true] `class ccc.this.cc`
+[accessible: true] `class cfccfc`
+[accessible: true] `constructor cfc()cfc`
+[accessible: true] `method cf=> Unit`
+[accessible: true] `method cff=> Unit`
+[accessible: true] `method f=> Unit`
+================================================================================
+
+askScopeCompletion at Completions.scala(71,6)
+================================================================================
+[response] askScopeCompletion at (71,6)
+retrieved 8 members
+[accessible: true] `class Completion1test.Completion1`
+[accessible: true] `class cCompletion1.this.c`
+[accessible: true] `class ccc.this.cc`
+[accessible: true] `class cfccfc`
+[accessible: true] `constructor c()Completion1.this.c`
+[accessible: true] `method cf=> Unit`
+[accessible: true] `method cff=> Unit`
+[accessible: true] `method f=> Unit`
+================================================================================
+
+askScopeCompletion at Completions.scala(74,8)
+================================================================================
+[response] askScopeCompletion at (74,8)
+retrieved 8 members
+[accessible: true] `class Completion1test.Completion1`
+[accessible: true] `class cCompletion1.this.c`
+[accessible: true] `class ccc.this.cc`
+[accessible: true] `class cfccfc`
+[accessible: true] `constructor c()Completion1.this.c`
+[accessible: true] `method cf=> Unit`
+[accessible: true] `method cff=> Unit`
+[accessible: true] `method f=> Unit`
+================================================================================
+
+askScopeCompletion at Completions.scala(77,6)
+================================================================================
+[response] askScopeCompletion at (77,6)
+retrieved 8 members
+[accessible: true] `class Completion1test.Completion1`
+[accessible: true] `class cCompletion1.this.c`
+[accessible: true] `class ccc.this.cc`
+[accessible: true] `class cfccfc`
+[accessible: true] `constructor c()Completion1.this.c`
+[accessible: true] `method cf=> Unit`
+[accessible: true] `method cff=> Unit`
+[accessible: true] `method f=> Unit`
+================================================================================
+
+askScopeCompletion at Completions.scala(80,4)
+================================================================================
+[response] askScopeCompletion at (80,4)
+retrieved 6 members
+[accessible: true] `class Completion1test.Completion1`
+[accessible: true] `class cCompletion1.this.c`
+[accessible: true] `class ccc.this.cc`
+[accessible: true] `constructor c()Completion1.this.c`
+[accessible: true] `method cf=> Unit`
+[accessible: true] `method f=> Unit`
+================================================================================
+
+askScopeCompletion at Completions.scala(83,2)
+================================================================================
+[response] askScopeCompletion at (83,2)
+retrieved 4 members
+[accessible: true] `class Completion1test.Completion1`
+[accessible: true] `class cCompletion1.this.c`
+[accessible: true] `constructor Completion1()test.Completion1`
+[accessible: true] `method f=> Unit`
+================================================================================
diff --git a/test/files/presentation/scope-completion-4/Test.scala b/test/files/presentation/scope-completion-4/Test.scala
new file mode 100644
index 0000000..bec1131
--- /dev/null
+++ b/test/files/presentation/scope-completion-4/Test.scala
@@ -0,0 +1,3 @@
+import scala.tools.nsc.interactive.tests.InteractiveTest
+
+object Test extends InteractiveTest
\ No newline at end of file
diff --git a/test/files/presentation/scope-completion-4/src/Completions.scala b/test/files/presentation/scope-completion-4/src/Completions.scala
new file mode 100644
index 0000000..d113157
--- /dev/null
+++ b/test/files/presentation/scope-completion-4/src/Completions.scala
@@ -0,0 +1,84 @@
+package test
+
+/* check that members defined in sub-block are not visible*/
+
+class Completion1 {
+
+ def f {
+
+ def ff {
+
+ def fff {
+ /*_*/
+ }
+
+ /*_*/
+
+ class ffc {
+ /*_*/
+ }
+
+ /*_*/
+ }
+
+ /*_*/
+
+ class fc {
+
+ def fcf {
+ /*_*/
+ }
+
+ /*_*/
+
+ class fcc {
+ /*_*/
+ }
+
+ /*_*/
+ }
+
+ /*_*/
+ }
+
+ /*_*/
+
+ class c {
+
+ class cc {
+
+ class ccc {
+ /*_*/
+ }
+
+ /*_*/
+
+ def ccf {
+ /*_*/
+ }
+
+ /*_*/
+ }
+
+ /*_*/
+
+ def cf {
+
+ class cfc {
+ /*_*/
+ }
+
+ /*_*/
+
+ def cff {
+ /*_*/
+ }
+
+ /*_*/
+ }
+
+ /*_*/
+ }
+
+ /*_*/
+}
diff --git a/test/files/presentation/scope-completion-import.check b/test/files/presentation/scope-completion-import.check
new file mode 100644
index 0000000..33b498c
--- /dev/null
+++ b/test/files/presentation/scope-completion-import.check
@@ -0,0 +1,193 @@
+reload: Completions.scala
+
+askScopeCompletion at Completions.scala(23,4)
+================================================================================
+[response] askScopeCompletion at (23,4)
+retrieved 18 members
+[accessible: true] `class Ctest.C`
+[accessible: true] `class Foo_1test.Foo_1`
+[accessible: true] `class Foo_2test.Foo_2`
+[accessible: true] `class Foo_3test.Foo_3`
+[accessible: true] `class Footest.Foo`
+[accessible: true] `constructor Foo()test.Foo`
+[accessible: true] `method fCCC=> Int`
+[accessible: true] `method fOOO=> Int`
+[accessible: true] `object Otest.O.type`
+[accessible: true] `value otest.O.type`
+[accessible: true] `value vCCCInt`
+[accessible: true] `value vOOOInt`
+[accessible: true] `variable rCCCInt`
+[accessible: true] `variable rOOOInt`
+[accessible: false] `value pVCCCInt`
+[accessible: false] `value pVOOOInt`
+[accessible: false] `variable pRCCCInt`
+[accessible: false] `variable pROOOInt`
+================================================================================
+
+askScopeCompletion at Completions.scala(27,4)
+================================================================================
+[response] askScopeCompletion at (27,4)
+retrieved 17 members
+[accessible: true] `class Ctest.C`
+[accessible: true] `class Foo_1test.Foo_1`
+[accessible: true] `class Foo_2test.Foo_2`
+[accessible: true] `class Foo_3test.Foo_3`
+[accessible: true] `class Footest.Foo`
+[accessible: true] `constructor Foo()test.Foo`
+[accessible: true] `method fCCC=> Int`
+[accessible: true] `method fOOO=> Int`
+[accessible: true] `object Otest.O.type`
+[accessible: true] `value vCCCInt`
+[accessible: true] `value vOOOInt`
+[accessible: true] `variable rCCCInt`
+[accessible: true] `variable rOOOInt`
+[accessible: false] `value pVCCCInt`
+[accessible: false] `value pVOOOInt`
+[accessible: false] `variable pRCCCInt`
+[accessible: false] `variable pROOOInt`
+================================================================================
+
+askScopeCompletion at Completions.scala(32,4)
+================================================================================
+[response] askScopeCompletion at (32,4)
+retrieved 13 members
+[accessible: true] `class Ctest.C`
+[accessible: true] `class Foo_1test.Foo_1`
+[accessible: true] `class Foo_2test.Foo_2`
+[accessible: true] `class Foo_3test.Foo_3`
+[accessible: true] `class Footest.Foo`
+[accessible: true] `constructor Foo()test.Foo`
+[accessible: true] `method fCCC=> Int`
+[accessible: true] `object Otest.O.type`
+[accessible: true] `value ctest.C`
+[accessible: true] `value vCCCInt`
+[accessible: true] `variable rCCCInt`
+[accessible: false] `value pVCCCInt`
+[accessible: false] `variable pRCCCInt`
+================================================================================
+
+askScopeCompletion at Completions.scala(35,5)
+================================================================================
+[response] askScopeCompletion at (35,5)
+retrieved 8 members
+[accessible: true] `class Ctest.C`
+[accessible: true] `class Foo_1test.Foo_1`
+[accessible: true] `class Foo_2test.Foo_2`
+[accessible: true] `class Foo_3test.Foo_3`
+[accessible: true] `class Footest.Foo`
+[accessible: true] `constructor Foo()test.Foo`
+[accessible: true] `object Otest.O.type`
+[accessible: true] `value ctest.C`
+================================================================================
+
+askScopeCompletion at Completions.scala(38,5)
+================================================================================
+[response] askScopeCompletion at (38,5)
+retrieved 13 members
+[accessible: true] `class Ctest.C`
+[accessible: true] `class Foo_1test.Foo_1`
+[accessible: true] `class Foo_2test.Foo_2`
+[accessible: true] `class Foo_3test.Foo_3`
+[accessible: true] `class Footest.Foo`
+[accessible: true] `constructor Foo()test.Foo`
+[accessible: true] `method fCCC=> Int`
+[accessible: true] `object Otest.O.type`
+[accessible: true] `value ctest.C`
+[accessible: true] `value vCCCInt`
+[accessible: true] `variable rCCCInt`
+[accessible: false] `value pVCCCInt`
+[accessible: false] `variable pRCCCInt`
+================================================================================
+
+askScopeCompletion at Completions.scala(40,5)
+================================================================================
+[response] askScopeCompletion at (40,5)
+retrieved 18 members
+[accessible: true] `class Ctest.C`
+[accessible: true] `class Foo_1test.Foo_1`
+[accessible: true] `class Foo_2test.Foo_2`
+[accessible: true] `class Foo_3test.Foo_3`
+[accessible: true] `class Footest.Foo`
+[accessible: true] `constructor Foo()test.Foo`
+[accessible: true] `method fCCC=> Int`
+[accessible: true] `method fOOO=> Int`
+[accessible: true] `object Otest.O.type`
+[accessible: true] `value ctest.C`
+[accessible: true] `value vCCCInt`
+[accessible: true] `value vOOOInt`
+[accessible: true] `variable rCCCInt`
+[accessible: true] `variable rOOOInt`
+[accessible: false] `value pVCCCInt`
+[accessible: false] `value pVOOOInt`
+[accessible: false] `variable pRCCCInt`
+[accessible: false] `variable pROOOInt`
+================================================================================
+
+askScopeCompletion at Completions.scala(49,4)
+================================================================================
+[response] askScopeCompletion at (49,4)
+retrieved 18 members
+[accessible: true] `class Ctest.C`
+[accessible: true] `class Foo_1test.Foo_1`
+[accessible: true] `class Foo_2test.Foo_2`
+[accessible: true] `class Foo_3test.Foo_3`
+[accessible: true] `class Footest.Foo`
+[accessible: true] `constructor Foo_1()test.Foo_1`
+[accessible: true] `method bar=> Unit`
+[accessible: true] `method fCCC=> Int`
+[accessible: true] `method fOOO=> Int`
+[accessible: true] `object Otest.O.type`
+[accessible: true] `value vCCCInt`
+[accessible: true] `value vOOOInt`
+[accessible: true] `variable rCCCInt`
+[accessible: true] `variable rOOOInt`
+[accessible: false] `value pVCCCInt`
+[accessible: false] `value pVOOOInt`
+[accessible: false] `variable pRCCCInt`
+[accessible: false] `variable pROOOInt`
+================================================================================
+
+askScopeCompletion at Completions.scala(59,4)
+================================================================================
+[response] askScopeCompletion at (59,4)
+retrieved 19 members
+[accessible: true] `class Ctest.C`
+[accessible: true] `class Foo_1test.Foo_1`
+[accessible: true] `class Foo_2test.Foo_2`
+[accessible: true] `class Foo_3test.Foo_3`
+[accessible: true] `class Footest.Foo`
+[accessible: true] `constructor Foo_2()test.Foo_2`
+[accessible: true] `method bar=> Unit`
+[accessible: true] `method fCCC=> Int`
+[accessible: true] `method fOOO=> Int`
+[accessible: true] `object Otest.O.type`
+[accessible: true] `value otest.O.type`
+[accessible: true] `value vCCCInt`
+[accessible: true] `value vOOOInt`
+[accessible: true] `variable rCCCInt`
+[accessible: true] `variable rOOOInt`
+[accessible: false] `value pVCCCInt`
+[accessible: false] `value pVOOOInt`
+[accessible: false] `variable pRCCCInt`
+[accessible: false] `variable pROOOInt`
+================================================================================
+
+askScopeCompletion at Completions.scala(69,4)
+================================================================================
+[response] askScopeCompletion at (69,4)
+retrieved 14 members
+[accessible: true] `class Ctest.C`
+[accessible: true] `class Foo_1test.Foo_1`
+[accessible: true] `class Foo_2test.Foo_2`
+[accessible: true] `class Foo_3test.Foo_3`
+[accessible: true] `class Footest.Foo`
+[accessible: true] `constructor Foo_3()test.Foo_3`
+[accessible: true] `method bar=> Unit`
+[accessible: true] `method fCCC=> Int`
+[accessible: true] `object Otest.O.type`
+[accessible: true] `value ctest.C`
+[accessible: true] `value vCCCInt`
+[accessible: true] `variable rCCCInt`
+[accessible: false] `value pVCCCInt`
+[accessible: false] `variable pRCCCInt`
+================================================================================
diff --git a/test/files/presentation/scope-completion-import/Test.scala b/test/files/presentation/scope-completion-import/Test.scala
new file mode 100644
index 0000000..bec1131
--- /dev/null
+++ b/test/files/presentation/scope-completion-import/Test.scala
@@ -0,0 +1,3 @@
+import scala.tools.nsc.interactive.tests.InteractiveTest
+
+object Test extends InteractiveTest
\ No newline at end of file
diff --git a/test/files/presentation/scope-completion-import/src/Completions.scala b/test/files/presentation/scope-completion-import/src/Completions.scala
new file mode 100644
index 0000000..d30aa0b
--- /dev/null
+++ b/test/files/presentation/scope-completion-import/src/Completions.scala
@@ -0,0 +1,72 @@
+package test
+
+class C {
+ val vCCC : Int = 0
+ var rCCC : Int = 0
+ private val pVCCC : Int = 0
+ private var pRCCC : Int = 0
+ def fCCC : Int = 0
+}
+
+object O extends C {
+ val vOOO : Int = 0
+ var rOOO : Int = 0
+ private val pVOOO : Int = 0
+ private var pROOO : Int = 0
+ def fOOO : Int = 0
+}
+
+class Foo {
+ {
+ val o = O
+ import o._
+ /*_*/
+ }
+ {
+ import O._
+ /*_*/
+ }
+ {
+ val c = new C
+ import c._
+ /*_*/
+ }
+ {
+ f/*_*/
+ val c = new C
+ import c._
+ f/*_*/
+ import O._
+ f/*_*/
+ }
+}
+
+class Foo_1 {
+
+ import O._
+
+ def bar {
+ /*_*/
+ }
+}
+
+class Foo_2 {
+
+ val o = O
+ import o._
+
+ def bar {
+ /*_*/
+ }
+}
+
+class Foo_3 {
+
+ val c = new C
+ import c._
+
+ def bar {
+ /*_*/
+ }
+}
+
diff --git a/test/files/presentation/t5708.check b/test/files/presentation/t5708.check
new file mode 100644
index 0000000..fe43f83
--- /dev/null
+++ b/test/files/presentation/t5708.check
@@ -0,0 +1,47 @@
+reload: Completions.scala
+
+askTypeCompletion at Completions.scala(17,9)
+================================================================================
+[response] askTypeCompletion at (17,9)
+retrieved 44 members
+[accessible: true] `lazy value fooInt`
+[accessible: true] `method !=(x$1: Any)Boolean`
+[accessible: true] `method !=(x$1: AnyRef)Boolean`
+[accessible: true] `method ##()Int`
+[accessible: true] `method +(other: String)String`
+[accessible: true] `method ->[B](y: B)(test.Compat.type, B)`
+[accessible: true] `method ==(x$1: Any)Boolean`
+[accessible: true] `method ==(x$1: AnyRef)Boolean`
+[accessible: true] `method asInstanceOf[T0]=> T0`
+[accessible: true] `method ensuring(cond: Boolean)test.Compat.type`
+[accessible: true] `method ensuring(cond: Boolean, msg: => Any)test.Compat.type`
+[accessible: true] `method ensuring(cond: test.Compat.type => Boolean)test.Compat.type`
+[accessible: true] `method ensuring(cond: test.Compat.type => Boolean, msg: => Any)test.Compat.type`
+[accessible: true] `method eq(x$1: AnyRef)Boolean`
+[accessible: true] `method equals(x$1: Any)Boolean`
+[accessible: true] `method formatted(fmtstr: String)String`
+[accessible: true] `method hashCode()Int`
+[accessible: true] `method isInstanceOf[T0]=> Boolean`
+[accessible: true] `method ne(x$1: AnyRef)Boolean`
+[accessible: true] `method notify()Unit`
+[accessible: true] `method notifyAll()Unit`
+[accessible: true] `method pkgPrivateM=> String`
+[accessible: true] `method synchronized[T0](x$1: T0)T0`
+[accessible: true] `method toString()String`
+[accessible: true] `method wait()Unit`
+[accessible: true] `method wait(x$1: Long)Unit`
+[accessible: true] `method wait(x$1: Long, x$2: Int)Unit`
+[accessible: true] `method x=> test.Compat.type`
+[accessible: true] `method →[B](y: B)(test.Compat.type, B)`
+[accessible: true] `value CONST_STRINGString("constant")`
+[accessible: true] `value __leftOfArrowtest.Compat.type`
+[accessible: true] `value __resultOfEnsuringtest.Compat.type`
+[accessible: true] `value pkgPrivateVString`
+[accessible: true] `value selfAny`
+[accessible: false] `method clone()Object`
+[accessible: false] `method finalize()Unit`
+[accessible: false] `method privateM=> String`
+[accessible: false] `method protectedValM=> String`
+[accessible: false] `value privateVString`
+[accessible: false] `value protectedVString`
+================================================================================
diff --git a/test/files/presentation/t5708/Test.scala b/test/files/presentation/t5708/Test.scala
new file mode 100644
index 0000000..bec1131
--- /dev/null
+++ b/test/files/presentation/t5708/Test.scala
@@ -0,0 +1,3 @@
+import scala.tools.nsc.interactive.tests.InteractiveTest
+
+object Test extends InteractiveTest
\ No newline at end of file
diff --git a/test/files/presentation/t5708/src/Completions.scala b/test/files/presentation/t5708/src/Completions.scala
new file mode 100644
index 0000000..1e9e5d5
--- /dev/null
+++ b/test/files/presentation/t5708/src/Completions.scala
@@ -0,0 +1,18 @@
+package test
+
+object Compat {
+ final val CONST_STRING = "constant"
+ lazy val foo = 4
+
+ private val privateV = ""
+ private[test] val pkgPrivateV = ""
+ protected val protectedV = ""
+
+ private def privateM = ""
+ private[test] def pkgPrivateM = ""
+ protected def protectedValM = ""
+}
+
+class Foo {
+ Compat./*!*/CONST_STRING // its 'accessible' flag is false
+}
diff --git a/test/files/presentation/t7548.check b/test/files/presentation/t7548.check
new file mode 100644
index 0000000..5bfb0d2
--- /dev/null
+++ b/test/files/presentation/t7548.check
@@ -0,0 +1 @@
+(x: Int)Unit
diff --git a/test/files/presentation/t7548/Test.scala b/test/files/presentation/t7548/Test.scala
new file mode 100644
index 0000000..94a6048
--- /dev/null
+++ b/test/files/presentation/t7548/Test.scala
@@ -0,0 +1,17 @@
+import scala.tools.nsc.interactive.tests.InteractiveTest
+
+object Test extends InteractiveTest {
+ override protected def loadSources() { /* don't parse or typecheck sources */ }
+
+ import compiler._
+
+ override def runDefaultTests() {
+ val res = new Response[Tree]
+ val pos = compiler.rangePos(sourceFiles.head, 102,102,102)
+ compiler.askTypeAt(pos, res)
+ res.get match {
+ case Left(tree) => compiler.ask(() => reporter.println(tree.tpe))
+ case Right(ex) => reporter.println(ex)
+ }
+ }
+}
diff --git a/test/files/presentation/t7548/src/Foo.scala b/test/files/presentation/t7548/src/Foo.scala
new file mode 100644
index 0000000..cc997f6
--- /dev/null
+++ b/test/files/presentation/t7548/src/Foo.scala
@@ -0,0 +1,7 @@
+object Foo {
+ def foo(x: Int) = {}
+ def foo(x: String) = {}
+ def foo(x: Int, y: String) = {}
+
+ foo(2)
+}
\ No newline at end of file
diff --git a/test/files/presentation/t7548b.check b/test/files/presentation/t7548b.check
new file mode 100644
index 0000000..35445fe
--- /dev/null
+++ b/test/files/presentation/t7548b.check
@@ -0,0 +1 @@
+Foo.this.I2BI(Foo.this.I).+: (other: Foo.BI.type)Unit
diff --git a/test/files/presentation/t7548b/Test.scala b/test/files/presentation/t7548b/Test.scala
new file mode 100644
index 0000000..0c022df
--- /dev/null
+++ b/test/files/presentation/t7548b/Test.scala
@@ -0,0 +1,17 @@
+import scala.tools.nsc.interactive.tests.InteractiveTest
+
+object Test extends InteractiveTest {
+ override protected def loadSources() { /* don't parse or typecheck sources */ }
+
+ import compiler._
+
+ override def runDefaultTests() {
+ val res = new Response[Tree]
+ val pos = compiler.rangePos(sourceFiles.head, 191, 191, 191) // +
+ compiler.askTypeAt(pos, res)
+ res.get match {
+ case Left(tree) => compiler.ask(() => reporter.println(s"$tree: ${tree.tpe}"))
+ case Right(ex) => reporter.println(ex)
+ }
+ }
+}
diff --git a/test/files/presentation/t7548b/src/Foo.scala b/test/files/presentation/t7548b/src/Foo.scala
new file mode 100644
index 0000000..5cf0a4e
--- /dev/null
+++ b/test/files/presentation/t7548b/src/Foo.scala
@@ -0,0 +1,12 @@
+import language._
+
+object Foo {
+ object I {
+ def +(other: I.type) : Unit = ()
+ }
+ object BI {
+ def +(other: BI.type): Unit = ()
+ }
+ implicit def I2BI(i: I.type): BI.type = BI
+ I.+(BI)
+}
diff --git a/test/files/presentation/t7915.check b/test/files/presentation/t7915.check
new file mode 100644
index 0000000..b18b4dd
--- /dev/null
+++ b/test/files/presentation/t7915.check
@@ -0,0 +1,11 @@
+reload: Foo.scala
+
+askHyperlinkPos for `Bar` at (7,11) Foo.scala
+================================================================================
+[response] found askHyperlinkPos for `Bar` at (1,7) Foo.scala
+================================================================================
+
+askHyperlinkPos for `bar` at (7,22) Foo.scala
+================================================================================
+[response] found askHyperlinkPos for `bar` at (2,7) Foo.scala
+================================================================================
diff --git a/test/files/presentation/t7915/Test.scala b/test/files/presentation/t7915/Test.scala
new file mode 100644
index 0000000..c2f89bd
--- /dev/null
+++ b/test/files/presentation/t7915/Test.scala
@@ -0,0 +1,8 @@
+import scala.tools.nsc.interactive.tests.InteractiveTest
+
+object Test extends InteractiveTest {
+ override def runDefaultTests() {
+ sourceFiles foreach (src => askLoadedTyped(src).get)
+ super.runDefaultTests()
+ }
+}
diff --git a/test/files/presentation/t7915/src/Foo.scala b/test/files/presentation/t7915/src/Foo.scala
new file mode 100644
index 0000000..a4166ae
--- /dev/null
+++ b/test/files/presentation/t7915/src/Foo.scala
@@ -0,0 +1,9 @@
+class Bar {
+ def bar(b: Int = 2) {}
+}
+
+class Foo {
+ def foo() {
+ new Bar/*#*/().bar/*#*/()
+ }
+}
diff --git a/test/files/presentation/t8085.check b/test/files/presentation/t8085.check
new file mode 100644
index 0000000..79c1b2a
--- /dev/null
+++ b/test/files/presentation/t8085.check
@@ -0,0 +1,3 @@
+reload: NodeScalaSuite.scala
+open package module: package nodescala
+Test OK
diff --git a/test/files/presentation/t8085.flags b/test/files/presentation/t8085.flags
new file mode 100644
index 0000000..ec35b22
--- /dev/null
+++ b/test/files/presentation/t8085.flags
@@ -0,0 +1 @@
+-sourcepath src
diff --git a/test/files/presentation/t8085/Test.scala b/test/files/presentation/t8085/Test.scala
new file mode 100644
index 0000000..e46b7ab
--- /dev/null
+++ b/test/files/presentation/t8085/Test.scala
@@ -0,0 +1,27 @@
+import scala.tools.nsc.interactive.tests.InteractiveTest
+import scala.reflect.internal.util.SourceFile
+import scala.tools.nsc.interactive.Response
+
+object Test extends InteractiveTest {
+
+ override def execute(): Unit = {
+ val src = loadSourceAndWaitUntilTypechecked("NodeScalaSuite.scala")
+ checkErrors(src)
+ }
+
+ private def loadSourceAndWaitUntilTypechecked(sourceName: String): SourceFile = {
+ val sourceFile = sourceFiles.find(_.file.name == sourceName).head
+ askReload(List(sourceFile)).get
+ askLoadedTyped(sourceFile).get
+ sourceFile
+ }
+
+ private def checkErrors(source: SourceFile): Unit = compiler.getUnitOf(source) match {
+ case Some(unit) =>
+ val problems = unit.problems.toList
+ if(problems.isEmpty) reporter.println("Test OK")
+ else problems.foreach(problem => reporter.println(problem.msg))
+
+ case None => reporter.println("No compilation unit found for " + source.file.name)
+ }
+}
diff --git a/test/files/presentation/t8085/src/nodescala/Foo.scala b/test/files/presentation/t8085/src/nodescala/Foo.scala
new file mode 100644
index 0000000..19efdb6
--- /dev/null
+++ b/test/files/presentation/t8085/src/nodescala/Foo.scala
@@ -0,0 +1,3 @@
+package nodescala
+
+class Foo
diff --git a/test/files/presentation/t8085/src/nodescala/NodeScalaSuite.scala b/test/files/presentation/t8085/src/nodescala/NodeScalaSuite.scala
new file mode 100644
index 0000000..45e43c7
--- /dev/null
+++ b/test/files/presentation/t8085/src/nodescala/NodeScalaSuite.scala
@@ -0,0 +1,10 @@
+package nodescala
+
+class NodeScalaSuite {
+ "".rich
+
+ // This is here only to prove that the presentation compiler is instantiated with the
+ // correct `sourcepath` value (if it wasn't, you would see a `not found: type Foo` in
+ // the test's output
+ println(new Foo())
+}
diff --git a/test/files/presentation/t8085/src/nodescala/package.scala b/test/files/presentation/t8085/src/nodescala/package.scala
new file mode 100644
index 0000000..26fb9f0
--- /dev/null
+++ b/test/files/presentation/t8085/src/nodescala/package.scala
@@ -0,0 +1,7 @@
+import scala.Some // <-- if you move the import *inside* the package object, then it all works fine!!
+
+package object nodescala {
+ implicit class StringOps(val f: String) {
+ def rich = 0
+ }
+}
diff --git a/test/files/presentation/t8085b.check b/test/files/presentation/t8085b.check
new file mode 100644
index 0000000..79c1b2a
--- /dev/null
+++ b/test/files/presentation/t8085b.check
@@ -0,0 +1,3 @@
+reload: NodeScalaSuite.scala
+open package module: package nodescala
+Test OK
diff --git a/test/files/presentation/t8085b.flags b/test/files/presentation/t8085b.flags
new file mode 100644
index 0000000..ec35b22
--- /dev/null
+++ b/test/files/presentation/t8085b.flags
@@ -0,0 +1 @@
+-sourcepath src
diff --git a/test/files/presentation/t8085b/Test.scala b/test/files/presentation/t8085b/Test.scala
new file mode 100644
index 0000000..e46b7ab
--- /dev/null
+++ b/test/files/presentation/t8085b/Test.scala
@@ -0,0 +1,27 @@
+import scala.tools.nsc.interactive.tests.InteractiveTest
+import scala.reflect.internal.util.SourceFile
+import scala.tools.nsc.interactive.Response
+
+object Test extends InteractiveTest {
+
+ override def execute(): Unit = {
+ val src = loadSourceAndWaitUntilTypechecked("NodeScalaSuite.scala")
+ checkErrors(src)
+ }
+
+ private def loadSourceAndWaitUntilTypechecked(sourceName: String): SourceFile = {
+ val sourceFile = sourceFiles.find(_.file.name == sourceName).head
+ askReload(List(sourceFile)).get
+ askLoadedTyped(sourceFile).get
+ sourceFile
+ }
+
+ private def checkErrors(source: SourceFile): Unit = compiler.getUnitOf(source) match {
+ case Some(unit) =>
+ val problems = unit.problems.toList
+ if(problems.isEmpty) reporter.println("Test OK")
+ else problems.foreach(problem => reporter.println(problem.msg))
+
+ case None => reporter.println("No compilation unit found for " + source.file.name)
+ }
+}
diff --git a/test/files/presentation/t8085b/src/p1/nodescala/Foo.scala b/test/files/presentation/t8085b/src/p1/nodescala/Foo.scala
new file mode 100644
index 0000000..8ed1ada
--- /dev/null
+++ b/test/files/presentation/t8085b/src/p1/nodescala/Foo.scala
@@ -0,0 +1,4 @@
+package p1
+package nodescala
+
+class Foo
diff --git a/test/files/presentation/t8085b/src/p1/nodescala/NodeScalaSuite.scala b/test/files/presentation/t8085b/src/p1/nodescala/NodeScalaSuite.scala
new file mode 100644
index 0000000..f6da67b
--- /dev/null
+++ b/test/files/presentation/t8085b/src/p1/nodescala/NodeScalaSuite.scala
@@ -0,0 +1,11 @@
+package p1
+package nodescala
+
+class NodeScalaSuite {
+ "".rich
+
+ // This is here only to prove that the presentation compiler is instantiated with the
+ // correct `sourcepath` value (if it wasn't, you would see a `not found: type Foo` in
+ // the test's output
+ println(new Foo())
+}
diff --git a/test/files/presentation/t8085b/src/p1/nodescala/package.scala b/test/files/presentation/t8085b/src/p1/nodescala/package.scala
new file mode 100644
index 0000000..cc383f1
--- /dev/null
+++ b/test/files/presentation/t8085b/src/p1/nodescala/package.scala
@@ -0,0 +1,9 @@
+import scala.Some // <-- if you move the import *inside* the package object, then it all works fine!!
+
+package p1 {
+ package object nodescala {
+ implicit class StringOps(val f: String) {
+ def rich = 0
+ }
+ }
+}
diff --git a/test/files/presentation/visibility.check b/test/files/presentation/visibility.check
new file mode 100644
index 0000000..221e3fc
--- /dev/null
+++ b/test/files/presentation/visibility.check
@@ -0,0 +1,221 @@
+reload: Completions.scala
+
+askTypeCompletion at Completions.scala(14,12)
+================================================================================
+[response] askTypeCompletion at (14,12)
+retrieved 42 members
+[accessible: true] `method !=(x$1: Any)Boolean`
+[accessible: true] `method !=(x$1: AnyRef)Boolean`
+[accessible: true] `method ##()Int`
+[accessible: true] `method +(other: String)String`
+[accessible: true] `method ->[B](y: B)(accessibility.Foo, B)`
+[accessible: true] `method ==(x$1: Any)Boolean`
+[accessible: true] `method ==(x$1: AnyRef)Boolean`
+[accessible: true] `method asInstanceOf[T0]=> T0`
+[accessible: true] `method clone()Object`
+[accessible: true] `method ensuring(cond: Boolean)accessibility.Foo`
+[accessible: true] `method ensuring(cond: Boolean, msg: => Any)accessibility.Foo`
+[accessible: true] `method ensuring(cond: accessibility.Foo => Boolean)accessibility.Foo`
+[accessible: true] `method ensuring(cond: accessibility.Foo => Boolean, msg: => Any)accessibility.Foo`
+[accessible: true] `method eq(x$1: AnyRef)Boolean`
+[accessible: true] `method equals(x$1: Any)Boolean`
+[accessible: true] `method finalize()Unit`
+[accessible: true] `method formatted(fmtstr: String)String`
+[accessible: true] `method hashCode()Int`
+[accessible: true] `method isInstanceOf[T0]=> Boolean`
+[accessible: true] `method ne(x$1: AnyRef)Boolean`
+[accessible: true] `method notify()Unit`
+[accessible: true] `method notifyAll()Unit`
+[accessible: true] `method secretPrivate()Unit`
+[accessible: true] `method secretProtected()Unit`
+[accessible: true] `method secretProtectedInPackage()Unit`
+[accessible: true] `method secretPublic()Unit`
+[accessible: true] `method someTests(other: accessibility.Foo)Unit`
+[accessible: true] `method synchronized[T0](x$1: T0)T0`
+[accessible: true] `method toString()String`
+[accessible: true] `method wait()Unit`
+[accessible: true] `method wait(x$1: Long)Unit`
+[accessible: true] `method wait(x$1: Long, x$2: Int)Unit`
+[accessible: true] `method x=> accessibility.Foo`
+[accessible: true] `method →[B](y: B)(accessibility.Foo, B)`
+[accessible: true] `value __leftOfArrowaccessibility.Foo`
+[accessible: true] `value __resultOfEnsuringaccessibility.Foo`
+[accessible: true] `value selfAny`
+[accessible: false] `method secretPrivateThis()Unit`
+================================================================================
+
+askTypeCompletion at Completions.scala(16,11)
+================================================================================
+[response] askTypeCompletion at (16,11)
+retrieved 42 members
+[accessible: true] `method !=(x$1: Any)Boolean`
+[accessible: true] `method !=(x$1: AnyRef)Boolean`
+[accessible: true] `method ##()Int`
+[accessible: true] `method +(other: String)String`
+[accessible: true] `method ->[B](y: B)(accessibility.Foo, B)`
+[accessible: true] `method ==(x$1: Any)Boolean`
+[accessible: true] `method ==(x$1: AnyRef)Boolean`
+[accessible: true] `method asInstanceOf[T0]=> T0`
+[accessible: true] `method clone()Object`
+[accessible: true] `method ensuring(cond: Boolean)accessibility.Foo`
+[accessible: true] `method ensuring(cond: Boolean, msg: => Any)accessibility.Foo`
+[accessible: true] `method ensuring(cond: accessibility.Foo => Boolean)accessibility.Foo`
+[accessible: true] `method ensuring(cond: accessibility.Foo => Boolean, msg: => Any)accessibility.Foo`
+[accessible: true] `method eq(x$1: AnyRef)Boolean`
+[accessible: true] `method equals(x$1: Any)Boolean`
+[accessible: true] `method finalize()Unit`
+[accessible: true] `method formatted(fmtstr: String)String`
+[accessible: true] `method hashCode()Int`
+[accessible: true] `method isInstanceOf[T0]=> Boolean`
+[accessible: true] `method ne(x$1: AnyRef)Boolean`
+[accessible: true] `method notify()Unit`
+[accessible: true] `method notifyAll()Unit`
+[accessible: true] `method secretPrivate()Unit`
+[accessible: true] `method secretPrivateThis()Unit`
+[accessible: true] `method secretProtected()Unit`
+[accessible: true] `method secretProtectedInPackage()Unit`
+[accessible: true] `method secretPublic()Unit`
+[accessible: true] `method someTests(other: accessibility.Foo)Unit`
+[accessible: true] `method synchronized[T0](x$1: T0)T0`
+[accessible: true] `method toString()String`
+[accessible: true] `method wait()Unit`
+[accessible: true] `method wait(x$1: Long)Unit`
+[accessible: true] `method wait(x$1: Long, x$2: Int)Unit`
+[accessible: true] `method x=> accessibility.Foo`
+[accessible: true] `method →[B](y: B)(accessibility.Foo, B)`
+[accessible: true] `value __leftOfArrowaccessibility.Foo`
+[accessible: true] `value __resultOfEnsuringaccessibility.Foo`
+[accessible: true] `value selfAny`
+================================================================================
+
+askTypeCompletion at Completions.scala(22,11)
+================================================================================
+[response] askTypeCompletion at (22,11)
+retrieved 42 members
+[accessible: true] `method !=(x$1: Any)Boolean`
+[accessible: true] `method !=(x$1: AnyRef)Boolean`
+[accessible: true] `method ##()Int`
+[accessible: true] `method +(other: String)String`
+[accessible: true] `method ->[B](y: B)(accessibility.AccessibilityChecks, B)`
+[accessible: true] `method ==(x$1: Any)Boolean`
+[accessible: true] `method ==(x$1: AnyRef)Boolean`
+[accessible: true] `method asInstanceOf[T0]=> T0`
+[accessible: true] `method clone()Object`
+[accessible: true] `method ensuring(cond: Boolean)accessibility.AccessibilityChecks`
+[accessible: true] `method ensuring(cond: Boolean, msg: => Any)accessibility.AccessibilityChecks`
+[accessible: true] `method ensuring(cond: accessibility.AccessibilityChecks => Boolean)accessibility.AccessibilityChecks`
+[accessible: true] `method ensuring(cond: accessibility.AccessibilityChecks => Boolean, msg: => Any)accessibility.AccessibilityChecks`
+[accessible: true] `method eq(x$1: AnyRef)Boolean`
+[accessible: true] `method equals(x$1: Any)Boolean`
+[accessible: true] `method finalize()Unit`
+[accessible: true] `method formatted(fmtstr: String)String`
+[accessible: true] `method hashCode()Int`
+[accessible: true] `method isInstanceOf[T0]=> Boolean`
+[accessible: true] `method ne(x$1: AnyRef)Boolean`
+[accessible: true] `method notify()Unit`
+[accessible: true] `method notifyAll()Unit`
+[accessible: true] `method secretProtected()Unit`
+[accessible: true] `method secretProtectedInPackage()Unit`
+[accessible: true] `method secretPublic()Unit`
+[accessible: true] `method someTests(other: accessibility.Foo)Unit`
+[accessible: true] `method someTests=> Unit`
+[accessible: true] `method synchronized[T0](x$1: T0)T0`
+[accessible: true] `method toString()String`
+[accessible: true] `method wait()Unit`
+[accessible: true] `method wait(x$1: Long)Unit`
+[accessible: true] `method wait(x$1: Long, x$2: Int)Unit`
+[accessible: true] `method x=> accessibility.AccessibilityChecks`
+[accessible: true] `method →[B](y: B)(accessibility.AccessibilityChecks, B)`
+[accessible: true] `value __leftOfArrowaccessibility.AccessibilityChecks`
+[accessible: true] `value __resultOfEnsuringaccessibility.AccessibilityChecks`
+[accessible: true] `value selfAny`
+[accessible: false] `method secretPrivate()Unit`
+================================================================================
+
+askTypeCompletion at Completions.scala(28,10)
+================================================================================
+[response] askTypeCompletion at (28,10)
+retrieved 42 members
+[accessible: true] `method !=(x$1: Any)Boolean`
+[accessible: true] `method !=(x$1: AnyRef)Boolean`
+[accessible: true] `method ##()Int`
+[accessible: true] `method +(other: String)String`
+[accessible: true] `method ->[B](y: B)(accessibility.Foo, B)`
+[accessible: true] `method ==(x$1: Any)Boolean`
+[accessible: true] `method ==(x$1: AnyRef)Boolean`
+[accessible: true] `method asInstanceOf[T0]=> T0`
+[accessible: true] `method ensuring(cond: Boolean)accessibility.Foo`
+[accessible: true] `method ensuring(cond: Boolean, msg: => Any)accessibility.Foo`
+[accessible: true] `method ensuring(cond: accessibility.Foo => Boolean)accessibility.Foo`
+[accessible: true] `method ensuring(cond: accessibility.Foo => Boolean, msg: => Any)accessibility.Foo`
+[accessible: true] `method eq(x$1: AnyRef)Boolean`
+[accessible: true] `method equals(x$1: Any)Boolean`
+[accessible: true] `method formatted(fmtstr: String)String`
+[accessible: true] `method hashCode()Int`
+[accessible: true] `method isInstanceOf[T0]=> Boolean`
+[accessible: true] `method ne(x$1: AnyRef)Boolean`
+[accessible: true] `method notify()Unit`
+[accessible: true] `method notifyAll()Unit`
+[accessible: true] `method secretProtectedInPackage()Unit`
+[accessible: true] `method secretPublic()Unit`
+[accessible: true] `method someTests(other: accessibility.Foo)Unit`
+[accessible: true] `method synchronized[T0](x$1: T0)T0`
+[accessible: true] `method toString()String`
+[accessible: true] `method wait()Unit`
+[accessible: true] `method wait(x$1: Long)Unit`
+[accessible: true] `method wait(x$1: Long, x$2: Int)Unit`
+[accessible: true] `method x=> accessibility.Foo`
+[accessible: true] `method →[B](y: B)(accessibility.Foo, B)`
+[accessible: true] `value __leftOfArrowaccessibility.Foo`
+[accessible: true] `value __resultOfEnsuringaccessibility.Foo`
+[accessible: true] `value selfAny`
+[accessible: false] `method clone()Object`
+[accessible: false] `method finalize()Unit`
+[accessible: false] `method secretPrivate()Unit`
+[accessible: false] `method secretPrivateThis()Unit`
+[accessible: false] `method secretProtected()Unit`
+================================================================================
+
+askTypeCompletion at Completions.scala(37,8)
+================================================================================
+[response] askTypeCompletion at (37,8)
+retrieved 42 members
+[accessible: true] `method !=(x$1: Any)Boolean`
+[accessible: true] `method !=(x$1: AnyRef)Boolean`
+[accessible: true] `method ##()Int`
+[accessible: true] `method +(other: String)String`
+[accessible: true] `method ->[B](y: B)(accessibility.Foo, B)`
+[accessible: true] `method ==(x$1: Any)Boolean`
+[accessible: true] `method ==(x$1: AnyRef)Boolean`
+[accessible: true] `method asInstanceOf[T0]=> T0`
+[accessible: true] `method ensuring(cond: Boolean)accessibility.Foo`
+[accessible: true] `method ensuring(cond: Boolean, msg: => Any)accessibility.Foo`
+[accessible: true] `method ensuring(cond: accessibility.Foo => Boolean)accessibility.Foo`
+[accessible: true] `method ensuring(cond: accessibility.Foo => Boolean, msg: => Any)accessibility.Foo`
+[accessible: true] `method eq(x$1: AnyRef)Boolean`
+[accessible: true] `method equals(x$1: Any)Boolean`
+[accessible: true] `method formatted(fmtstr: String)String`
+[accessible: true] `method hashCode()Int`
+[accessible: true] `method isInstanceOf[T0]=> Boolean`
+[accessible: true] `method ne(x$1: AnyRef)Boolean`
+[accessible: true] `method notify()Unit`
+[accessible: true] `method notifyAll()Unit`
+[accessible: true] `method secretPublic()Unit`
+[accessible: true] `method someTests(other: accessibility.Foo)Unit`
+[accessible: true] `method synchronized[T0](x$1: T0)T0`
+[accessible: true] `method toString()String`
+[accessible: true] `method wait()Unit`
+[accessible: true] `method wait(x$1: Long)Unit`
+[accessible: true] `method wait(x$1: Long, x$2: Int)Unit`
+[accessible: true] `method x=> accessibility.Foo`
+[accessible: true] `method →[B](y: B)(accessibility.Foo, B)`
+[accessible: true] `value __leftOfArrowaccessibility.Foo`
+[accessible: true] `value __resultOfEnsuringaccessibility.Foo`
+[accessible: true] `value selfAny`
+[accessible: false] `method clone()Object`
+[accessible: false] `method finalize()Unit`
+[accessible: false] `method secretPrivate()Unit`
+[accessible: false] `method secretPrivateThis()Unit`
+[accessible: false] `method secretProtected()Unit`
+[accessible: false] `method secretProtectedInPackage()Unit`
+================================================================================
diff --git a/test/files/presentation/visibility/Test.scala b/test/files/presentation/visibility/Test.scala
new file mode 100644
index 0000000..bec1131
--- /dev/null
+++ b/test/files/presentation/visibility/Test.scala
@@ -0,0 +1,3 @@
+import scala.tools.nsc.interactive.tests.InteractiveTest
+
+object Test extends InteractiveTest
\ No newline at end of file
diff --git a/test/files/presentation/visibility/src/Completions.scala b/test/files/presentation/visibility/src/Completions.scala
new file mode 100644
index 0000000..098b98a
--- /dev/null
+++ b/test/files/presentation/visibility/src/Completions.scala
@@ -0,0 +1,40 @@
+package accessibility {
+
+ class Foo {
+ private def secretPrivate(): Unit = ()
+ private[this] def secretPrivateThis(): Unit = ()
+
+ protected def secretProtected(): Unit
+
+ protected[accessibility] def secretProtectedInPackage(): Unit
+
+ def secretPublic(): Unit
+
+ def someTests(other: Foo) {
+ other./*!*/secretPrivate // should be all but scretThis
+
+ this./*!*/secretProtected // should hit five completions
+ }
+ }
+
+ class AccessibilityChecks extends Foo {
+ def someTests {
+ this./*!*/ // should not list secretPrivate*
+ }
+ }
+
+ class UnrelatedClass {
+ def someTests(foo: Foo) {
+ foo./*!*/ // should list public and protected[accessiblity]
+ }
+ }
+
+}
+
+package other {
+ class SomeChecsk {
+ def foo(o: accessibility.Foo) {
+ o./*!*/ // should only match secretPublic
+ }
+ }
+}
\ No newline at end of file
diff --git a/test/files/res/bug597.res b/test/files/res/bug597.res
deleted file mode 100644
index 0db355c..0000000
--- a/test/files/res/bug597.res
+++ /dev/null
@@ -1,2 +0,0 @@
-bug597/Test.scala
-bug597/Main.scala
diff --git a/test/files/res/bug687.check b/test/files/res/bug687.check
deleted file mode 100644
index a905c3f..0000000
--- a/test/files/res/bug687.check
+++ /dev/null
@@ -1,8 +0,0 @@
-
-nsc>
-nsc> bug687/QueryB.scala:3: error: name clash between defined and inherited member:
-method equals:(o: java.lang.Object)Boolean and
-method equals:(x$1: Any)Boolean in class Any
-have same type after erasure: (o: java.lang.Object)Boolean
- override def equals(o : Object) = false;
- ^
diff --git a/test/files/res/bug687.res b/test/files/res/bug687.res
deleted file mode 100644
index ba8ba76..0000000
--- a/test/files/res/bug687.res
+++ /dev/null
@@ -1,3 +0,0 @@
-bug687/QueryA.scala
-bug687/QueryB.scala
-bug687/QueryA.scala
diff --git a/test/files/res/bug687/QueryA.scala b/test/files/res/bug687/QueryA.scala
deleted file mode 100644
index 2899f7e..0000000
--- a/test/files/res/bug687/QueryA.scala
+++ /dev/null
@@ -1,4 +0,0 @@
-package bug687;
-trait Query {
- override def equals(o : Any) = false;
-}
diff --git a/test/files/res/bug687/QueryB.scala b/test/files/res/bug687/QueryB.scala
deleted file mode 100644
index 3e1e0b7..0000000
--- a/test/files/res/bug687/QueryB.scala
+++ /dev/null
@@ -1,4 +0,0 @@
-package bug687;
-trait Query {
- override def equals(o : Object) = false;
-}
diff --git a/test/files/res/bug722.res b/test/files/res/bug722.res
deleted file mode 100644
index dbc785c..0000000
--- a/test/files/res/bug722.res
+++ /dev/null
@@ -1,2 +0,0 @@
-bug722/Parser.scala bug722/IfElse.scala
-bug722/Parser.scala bug722/IfElse.scala
diff --git a/test/files/res/bug722/IfElse.scala b/test/files/res/bug722/IfElse.scala
deleted file mode 100644
index 1515ecf..0000000
--- a/test/files/res/bug722/IfElse.scala
+++ /dev/null
@@ -1,4 +0,0 @@
-package bug722;
-trait IfEse extends ScanBased {
- object condition extends WhitespaceLink;
-}
diff --git a/test/files/res/bug722/Parser.scala b/test/files/res/bug722/Parser.scala
deleted file mode 100644
index c4b3037..0000000
--- a/test/files/res/bug722/Parser.scala
+++ /dev/null
@@ -1,8 +0,0 @@
-
-package bug722;
-trait Parser {
- trait Link {
- def foo() = {}
- }
-}
-
diff --git a/test/files/res/bug722/ScanBased.scala b/test/files/res/bug722/ScanBased.scala
deleted file mode 100644
index 4793a04..0000000
--- a/test/files/res/bug722/ScanBased.scala
+++ /dev/null
@@ -1,10 +0,0 @@
-package bug722;
-trait ScanBased extends Parser {
- trait AdjacentLink extends Link {
- override def foo() = super.foo;
- }
- trait WhitespaceLink extends AdjacentLink {
- override def foo() = super.foo;
- }
-}
-
diff --git a/test/files/res/bug735.res b/test/files/res/bug735.res
deleted file mode 100644
index d477e6f..0000000
--- a/test/files/res/bug735.res
+++ /dev/null
@@ -1,2 +0,0 @@
-bug735/ScalaTyper.scala
-bug735/ScalaTyper.scala
diff --git a/test/files/res/bug735/ScalaExpressions.scala b/test/files/res/bug735/ScalaExpressions.scala
deleted file mode 100644
index eb00f49..0000000
--- a/test/files/res/bug735/ScalaExpressions.scala
+++ /dev/null
@@ -1,11 +0,0 @@
-package bug735;
-trait ScalaExpressions {
- trait ExpressionFactory {
- def foo = 10;
- def bar : Int;
- }
- val values : ValueFactory;
- trait ValueFactory extends ExpressionFactory {
- def bar = 42;
- }
-}
diff --git a/test/files/res/bug735/ScalaTyper.scala b/test/files/res/bug735/ScalaTyper.scala
deleted file mode 100644
index 75e7a18..0000000
--- a/test/files/res/bug735/ScalaTyper.scala
+++ /dev/null
@@ -1,5 +0,0 @@
-
-package bug735;
-trait ScalaTyper extends ScalaExpressions {
- val values = new ValueFactory {}
-}
diff --git a/test/files/res/bug743.res b/test/files/res/bug743.res
deleted file mode 100644
index f1dd246..0000000
--- a/test/files/res/bug743.res
+++ /dev/null
@@ -1,2 +0,0 @@
-bug743/ParserXXX.scala
-bug743/BracesXXX.scala
diff --git a/test/files/res/bug743/BracesXXX.scala b/test/files/res/bug743/BracesXXX.scala
deleted file mode 100644
index 545618d..0000000
--- a/test/files/res/bug743/BracesXXX.scala
+++ /dev/null
@@ -1,6 +0,0 @@
-package bug743;
-trait BracesXXX extends ParserXXX {
- trait Matchable extends IsLinked {
- def foo : NodeImpl = null;
- }
-}
diff --git a/test/files/res/bug743/ParserXXX.scala b/test/files/res/bug743/ParserXXX.scala
deleted file mode 100644
index 449dd71..0000000
--- a/test/files/res/bug743/ParserXXX.scala
+++ /dev/null
@@ -1,15 +0,0 @@
-package bug743;
-trait ParserXXX {
- val foo = null;
- trait NodeImpl {
- trait Link extends ParserXXX.this.Link {
- val from = null;
- }
- }
- trait Link {
- val to0 = null;
- }
- trait IsLinked extends NodeImpl {
- trait Link extends super.Link;
- }
-}
diff --git a/test/files/res/bug785.res b/test/files/res/bug785.res
deleted file mode 100644
index 7e5da1c..0000000
--- a/test/files/res/bug785.res
+++ /dev/null
@@ -1,2 +0,0 @@
-bug785/ScalaTrees.scala
-bug785/ScalaTrees.scala
diff --git a/test/files/res/bug785/ScalaNewTyper.scala b/test/files/res/bug785/ScalaNewTyper.scala
deleted file mode 100644
index 2d1460b..0000000
--- a/test/files/res/bug785/ScalaNewTyper.scala
+++ /dev/null
@@ -1,10 +0,0 @@
-package bug785;
-trait ScalaNewTyper {
- private var typed : String = null;
- trait HasSymbol {
- protected def foo() : Unit = {}
- }
- trait HasArgsTypeParametersImpl extends HasSymbol {
- private var argss : List[List[String]] = Nil;
- }
-}
diff --git a/test/files/res/bug785/ScalaTrees.scala b/test/files/res/bug785/ScalaTrees.scala
deleted file mode 100644
index ade58e6..0000000
--- a/test/files/res/bug785/ScalaTrees.scala
+++ /dev/null
@@ -1,7 +0,0 @@
-package bug785;
-trait ScalaTrees extends ScalaNewTyper {
- trait TraitClassImpl extends HasArgsTypeParametersImpl {
- argss(null);
- protected def argss(tree : String) : List[List[String]] = Nil;
- }
-}
diff --git a/test/files/res/bug831.res b/test/files/res/bug831.res
deleted file mode 100644
index 0cc6c39..0000000
--- a/test/files/res/bug831.res
+++ /dev/null
@@ -1,2 +0,0 @@
-bug831/NewScalaTestXXX.scala
-bug831/NewScalaTestXXX.scala
diff --git a/test/files/res/bug831/NewScalaParserXXX.scala b/test/files/res/bug831/NewScalaParserXXX.scala
deleted file mode 100644
index 48f78a7..0000000
--- a/test/files/res/bug831/NewScalaParserXXX.scala
+++ /dev/null
@@ -1,43 +0,0 @@
-package bug831;
-
-trait ScalaNodeScannerXXX {
- type Node <: NodeImpl;
- trait NodeImpl { def self : Node; }
- type Unfixed <: Node with UnfixedImpl;
- trait UnfixedImpl extends NodeImpl { def self : Unfixed; }
-}
-//def f = { Console.println("hello"); 42; }
-//for (val ns <-n; val i <- 0.until(ns)) yield f;
-
-
-trait NewScalaScannerXXX extends ScalaNodeScannerXXX {
- type Unfixed <: Node with UnfixedImpl;
- trait UnfixedImpl extends super.UnfixedImpl with NodeImpl;
- type Statement <: Unfixed with StatementImpl;
- trait StatementImpl extends UnfixedImpl { def self : Statement; }
- type NewLine <: Statement with NewLineImpl;
- trait NewLineImpl extends StatementImpl {
- def self : NewLine;
- def isActive : Boolean = true;
- }
- object ArrowMode extends Enumeration { val Def, Case, Expr = Value }
-}
-
-trait ScalaPrecedenceXXX extends NewScalaScannerXXX {
- type NewLine <: Statement with NewLineImpl;
- trait NewLineImpl extends super.NewLineImpl with StatementImpl {
- def self : NewLine;
- override def isActive = super[NewLineImpl].isActive;
- }
-}
-trait NewScalaParserXXX extends NewScalaScannerXXX with ScalaPrecedenceXXX {
- type NewLine <: Statement with NewLineImpl;
- trait MyNewLine extends super[NewScalaScannerXXX].NewLineImpl;
- trait NewLineImpl extends MyNewLine with
- super[ScalaPrecedenceXXX].NewLineImpl with
- StatementImpl {
- def self : NewLine;
- override def isActive = super[MyNewLine].isActive;
- }
-}
-
diff --git a/test/files/res/bug831/NewScalaTestXXX.scala b/test/files/res/bug831/NewScalaTestXXX.scala
deleted file mode 100644
index a1dafa8..0000000
--- a/test/files/res/bug831/NewScalaTestXXX.scala
+++ /dev/null
@@ -1,2 +0,0 @@
-package bug831;
-abstract class NewScalaTestXXX extends NewScalaParserXXX;
diff --git a/test/files/res/bug597.check b/test/files/res/t5167.check
similarity index 100%
copy from test/files/res/bug597.check
copy to test/files/res/t5167.check
diff --git a/test/files/res/t5167.res b/test/files/res/t5167.res
new file mode 100644
index 0000000..a485cbe
--- /dev/null
+++ b/test/files/res/t5167.res
@@ -0,0 +1,2 @@
+t5167/t5167_1.scala
+t5167/t5167_2.scala
\ No newline at end of file
diff --git a/test/files/res/t5167/t5167_1.scala b/test/files/res/t5167/t5167_1.scala
new file mode 100644
index 0000000..ed28243
--- /dev/null
+++ b/test/files/res/t5167/t5167_1.scala
@@ -0,0 +1,12 @@
+package compilerbug
+
+trait SadTrait {
+ def buggyMethod[T](argWithDefault1: Int = 0)(argWithDefault2: String = "default") {
+ for (i <- 0 to 1) {
+ val x = argWithDefault1
+ val y = argWithDefault2
+ }
+ }
+}
+
+object SadObject extends SadTrait
diff --git a/test/files/res/t5167/t5167_2.scala b/test/files/res/t5167/t5167_2.scala
new file mode 100644
index 0000000..5aa56ef
--- /dev/null
+++ b/test/files/res/t5167/t5167_2.scala
@@ -0,0 +1,7 @@
+package compilerbug
+
+class TestClass {
+ def repro() {
+ SadObject.buggyMethod[Int]()()
+ }
+}
diff --git a/test/files/res/bug722.check b/test/files/res/t5489.check
similarity index 100%
rename from test/files/res/bug722.check
rename to test/files/res/t5489.check
diff --git a/test/files/res/t5489.res b/test/files/res/t5489.res
new file mode 100644
index 0000000..5b787b7
--- /dev/null
+++ b/test/files/res/t5489.res
@@ -0,0 +1,2 @@
+t5489/t5489.scala
+t5489/t5489.scala
\ No newline at end of file
diff --git a/test/files/res/t5489/t5489.scala b/test/files/res/t5489/t5489.scala
new file mode 100644
index 0000000..f821a1a
--- /dev/null
+++ b/test/files/res/t5489/t5489.scala
@@ -0,0 +1,14 @@
+package repro
+
+trait HasString {
+ def blerg(): String
+}
+
+class CausesProblems {
+ def problems = (
+ if ("don't optimize me away!".length == 0)
+ new HasString { def blerg() = "wut" }
+ else
+ new HasString { def blerg() = "okay" }
+ ).blerg()
+}
diff --git a/test/files/res/bug735.check b/test/files/res/t597.check
similarity index 100%
rename from test/files/res/bug735.check
rename to test/files/res/t597.check
diff --git a/test/files/res/t597.res b/test/files/res/t597.res
new file mode 100644
index 0000000..1081b59
--- /dev/null
+++ b/test/files/res/t597.res
@@ -0,0 +1,2 @@
+t597/Test.scala
+t597/Main.scala
diff --git a/test/files/res/bug597/Main.scala b/test/files/res/t597/Main.scala
similarity index 100%
rename from test/files/res/bug597/Main.scala
rename to test/files/res/t597/Main.scala
diff --git a/test/files/res/bug597/Test.scala b/test/files/res/t597/Test.scala
similarity index 100%
rename from test/files/res/bug597/Test.scala
rename to test/files/res/t597/Test.scala
diff --git a/test/files/res/t687.check b/test/files/res/t687.check
new file mode 100644
index 0000000..b741b26
--- /dev/null
+++ b/test/files/res/t687.check
@@ -0,0 +1,8 @@
+
+nsc>
+nsc> t687/QueryB.scala:3: error: name clash between defined and inherited member:
+method equals:(o: Object)Boolean and
+method equals:(x$1: Any)Boolean in class Any
+have same type after erasure: (o: Object)Boolean
+ override def equals(o : Object) = false;
+ ^
diff --git a/test/files/res/t687.res b/test/files/res/t687.res
new file mode 100644
index 0000000..2222979
--- /dev/null
+++ b/test/files/res/t687.res
@@ -0,0 +1,3 @@
+t687/QueryA.scala
+t687/QueryB.scala
+t687/QueryA.scala
diff --git a/test/files/res/t687/QueryA.scala b/test/files/res/t687/QueryA.scala
new file mode 100644
index 0000000..72365c7
--- /dev/null
+++ b/test/files/res/t687/QueryA.scala
@@ -0,0 +1,4 @@
+package t687;
+trait Query {
+ override def equals(o : Any) = false;
+}
diff --git a/test/files/res/t687/QueryB.scala b/test/files/res/t687/QueryB.scala
new file mode 100644
index 0000000..8f6f2d9
--- /dev/null
+++ b/test/files/res/t687/QueryB.scala
@@ -0,0 +1,4 @@
+package t687;
+trait Query {
+ override def equals(o : Object) = false;
+}
diff --git a/test/files/res/bug743.check b/test/files/res/t722.check
similarity index 100%
rename from test/files/res/bug743.check
rename to test/files/res/t722.check
diff --git a/test/files/res/t722.res b/test/files/res/t722.res
new file mode 100644
index 0000000..e2873a5
--- /dev/null
+++ b/test/files/res/t722.res
@@ -0,0 +1,2 @@
+t722/Parser.scala t722/IfElse.scala
+t722/Parser.scala t722/IfElse.scala
diff --git a/test/files/res/t722/IfElse.scala b/test/files/res/t722/IfElse.scala
new file mode 100644
index 0000000..c0128de
--- /dev/null
+++ b/test/files/res/t722/IfElse.scala
@@ -0,0 +1,4 @@
+package t722;
+trait IfEse extends ScanBased {
+ object condition extends WhitespaceLink;
+}
diff --git a/test/files/res/t722/Parser.scala b/test/files/res/t722/Parser.scala
new file mode 100644
index 0000000..9f54358
--- /dev/null
+++ b/test/files/res/t722/Parser.scala
@@ -0,0 +1,8 @@
+
+package t722;
+trait Parser {
+ trait Link {
+ def foo() = {}
+ }
+}
+
diff --git a/test/files/res/t722/ScanBased.scala b/test/files/res/t722/ScanBased.scala
new file mode 100644
index 0000000..8e55b80
--- /dev/null
+++ b/test/files/res/t722/ScanBased.scala
@@ -0,0 +1,10 @@
+package t722;
+trait ScanBased extends Parser {
+ trait AdjacentLink extends Link {
+ override def foo() = super.foo;
+ }
+ trait WhitespaceLink extends AdjacentLink {
+ override def foo() = super.foo;
+ }
+}
+
diff --git a/test/files/res/bug785.check b/test/files/res/t735.check
similarity index 100%
rename from test/files/res/bug785.check
rename to test/files/res/t735.check
diff --git a/test/files/res/t735.res b/test/files/res/t735.res
new file mode 100644
index 0000000..6ef42ff
--- /dev/null
+++ b/test/files/res/t735.res
@@ -0,0 +1,2 @@
+t735/ScalaTyper.scala
+t735/ScalaTyper.scala
diff --git a/test/files/res/t735/ScalaExpressions.scala b/test/files/res/t735/ScalaExpressions.scala
new file mode 100644
index 0000000..605ad51
--- /dev/null
+++ b/test/files/res/t735/ScalaExpressions.scala
@@ -0,0 +1,11 @@
+package t735;
+trait ScalaExpressions {
+ trait ExpressionFactory {
+ def foo = 10;
+ def bar : Int;
+ }
+ val values : ValueFactory;
+ trait ValueFactory extends ExpressionFactory {
+ def bar = 42;
+ }
+}
diff --git a/test/files/res/t735/ScalaTyper.scala b/test/files/res/t735/ScalaTyper.scala
new file mode 100644
index 0000000..0369c5f
--- /dev/null
+++ b/test/files/res/t735/ScalaTyper.scala
@@ -0,0 +1,5 @@
+
+package t735;
+trait ScalaTyper extends ScalaExpressions {
+ val values = new ValueFactory {}
+}
diff --git a/test/files/res/bug831.check b/test/files/res/t743.check
similarity index 100%
rename from test/files/res/bug831.check
rename to test/files/res/t743.check
diff --git a/test/files/res/t743.res b/test/files/res/t743.res
new file mode 100644
index 0000000..7772a4b
--- /dev/null
+++ b/test/files/res/t743.res
@@ -0,0 +1,2 @@
+t743/ParserXXX.scala
+t743/BracesXXX.scala
diff --git a/test/files/res/t743/BracesXXX.scala b/test/files/res/t743/BracesXXX.scala
new file mode 100644
index 0000000..d3f6e28
--- /dev/null
+++ b/test/files/res/t743/BracesXXX.scala
@@ -0,0 +1,6 @@
+package t743;
+trait BracesXXX extends ParserXXX {
+ trait Matchable extends IsLinked {
+ def foo : NodeImpl = null;
+ }
+}
diff --git a/test/files/res/t743/ParserXXX.scala b/test/files/res/t743/ParserXXX.scala
new file mode 100644
index 0000000..fd584b9
--- /dev/null
+++ b/test/files/res/t743/ParserXXX.scala
@@ -0,0 +1,15 @@
+package t743;
+trait ParserXXX {
+ val foo = null;
+ trait NodeImpl {
+ trait Link extends ParserXXX.this.Link {
+ val from = null;
+ }
+ }
+ trait Link {
+ val to0 = null;
+ }
+ trait IsLinked extends NodeImpl {
+ trait Link extends super.Link;
+ }
+}
diff --git a/test/files/res/bug597.check b/test/files/res/t785.check
similarity index 100%
copy from test/files/res/bug597.check
copy to test/files/res/t785.check
diff --git a/test/files/res/t785.res b/test/files/res/t785.res
new file mode 100644
index 0000000..cfac559
--- /dev/null
+++ b/test/files/res/t785.res
@@ -0,0 +1,2 @@
+t785/ScalaTrees.scala
+t785/ScalaTrees.scala
diff --git a/test/files/res/t785/ScalaNewTyper.scala b/test/files/res/t785/ScalaNewTyper.scala
new file mode 100644
index 0000000..919e3b8
--- /dev/null
+++ b/test/files/res/t785/ScalaNewTyper.scala
@@ -0,0 +1,10 @@
+package t785;
+trait ScalaNewTyper {
+ private var typed : String = null;
+ trait HasSymbol {
+ protected def foo() : Unit = {}
+ }
+ trait HasArgsTypeParametersImpl extends HasSymbol {
+ private var argss : List[List[String]] = Nil;
+ }
+}
diff --git a/test/files/res/t785/ScalaTrees.scala b/test/files/res/t785/ScalaTrees.scala
new file mode 100644
index 0000000..fdf32b5
--- /dev/null
+++ b/test/files/res/t785/ScalaTrees.scala
@@ -0,0 +1,7 @@
+package t785;
+trait ScalaTrees extends ScalaNewTyper {
+ trait TraitClassImpl extends HasArgsTypeParametersImpl {
+ argss(null);
+ protected def argss(tree : String) : List[List[String]] = Nil;
+ }
+}
diff --git a/test/files/res/bug597.check b/test/files/res/t831.check
similarity index 100%
rename from test/files/res/bug597.check
rename to test/files/res/t831.check
diff --git a/test/files/res/t831.res b/test/files/res/t831.res
new file mode 100644
index 0000000..9b9bd69
--- /dev/null
+++ b/test/files/res/t831.res
@@ -0,0 +1,2 @@
+t831/NewScalaTestXXX.scala
+t831/NewScalaTestXXX.scala
diff --git a/test/files/res/t831/NewScalaParserXXX.scala b/test/files/res/t831/NewScalaParserXXX.scala
new file mode 100644
index 0000000..ed9b9d3
--- /dev/null
+++ b/test/files/res/t831/NewScalaParserXXX.scala
@@ -0,0 +1,43 @@
+package t831;
+
+trait ScalaNodeScannerXXX {
+ type Node <: NodeImpl;
+ trait NodeImpl { def self : Node; }
+ type Unfixed <: Node with UnfixedImpl;
+ trait UnfixedImpl extends NodeImpl { def self : Unfixed; }
+}
+//def f = { Console.println("hello"); 42; }
+//for (ns <-n; val i <- 0.until(ns)) yield f;
+
+
+trait NewScalaScannerXXX extends ScalaNodeScannerXXX {
+ type Unfixed <: Node with UnfixedImpl;
+ trait UnfixedImpl extends super.UnfixedImpl with NodeImpl;
+ type Statement <: Unfixed with StatementImpl;
+ trait StatementImpl extends UnfixedImpl { def self : Statement; }
+ type NewLine <: Statement with NewLineImpl;
+ trait NewLineImpl extends StatementImpl {
+ def self : NewLine;
+ def isActive : Boolean = true;
+ }
+ object ArrowMode extends Enumeration { val Def, Case, Expr = Value }
+}
+
+trait ScalaPrecedenceXXX extends NewScalaScannerXXX {
+ type NewLine <: Statement with NewLineImpl;
+ trait NewLineImpl extends super.NewLineImpl with StatementImpl {
+ def self : NewLine;
+ override def isActive = super[NewLineImpl].isActive;
+ }
+}
+trait NewScalaParserXXX extends NewScalaScannerXXX with ScalaPrecedenceXXX {
+ type NewLine <: Statement with NewLineImpl;
+ trait MyNewLine extends super[NewScalaScannerXXX].NewLineImpl;
+ trait NewLineImpl extends MyNewLine with
+ super[ScalaPrecedenceXXX].NewLineImpl with
+ StatementImpl {
+ def self : NewLine;
+ override def isActive = super[MyNewLine].isActive;
+ }
+}
+
diff --git a/test/files/res/t831/NewScalaTestXXX.scala b/test/files/res/t831/NewScalaTestXXX.scala
new file mode 100644
index 0000000..24d864f
--- /dev/null
+++ b/test/files/res/t831/NewScalaTestXXX.scala
@@ -0,0 +1,2 @@
+package t831;
+abstract class NewScalaTestXXX extends NewScalaParserXXX;
diff --git a/test/files/run/Course-2002-01-msil.check b/test/files/run/Course-2002-01-msil.check
deleted file mode 100644
index ca9d7ac..0000000
--- a/test/files/run/Course-2002-01-msil.check
+++ /dev/null
@@ -1,34 +0,0 @@
-232
-667
-11
-10
-62.8318
-62.8318
-62.8318
-4
-81
-256
-25
-1
-737
-1
-0
-1
-76
-1.41421568627451
-1.73214285714286
-2.00000009292229
-1.41421568627451
-1.73214285714286
-2.00000009292229
-1.41421568627451
-1.73214285714286
-2.00000009292229
-sqrt(2) = 1.41421356237469
-sqrt(2) = 1.41421356237469
-cbrt(2) = 1.25992105001777
-1
-1 1
-1 2 1
-1 3 3 1
-1 4 6 4 1
diff --git a/test/files/run/Course-2002-02-msil.check b/test/files/run/Course-2002-02-msil.check
deleted file mode 100644
index 12ac0b1..0000000
--- a/test/files/run/Course-2002-02-msil.check
+++ /dev/null
@@ -1,187 +0,0 @@
-7
-120
-
-10
-100
-2.08333333333333
-3025.76877140318
-pi = 3.16597927284322
-
-10
-100
-2.08333333333333
-3025.76877140318
-pi = 3.16597927284322
-
-10
-100
-2.08333333333333
-3025.76877140318
-pi = 3.16597927284322
-
-10
-100
-2.08333333333333
-3025.76877140318
-pi = 3.16597927284322
-
-10
-100
-2.08333333333333
-3025.76877140318
-pi = 3.16597927284322
-
-10
-100
-2.08333333333333
-3025.76877140318
-pi = 3.16597927284322
-
-10
-100
-2.08333333333333
-3025.76877140318
-pi = 3.16597927284322
-
-pi = 3.18110488557771
-pi = 3.18110488557771
-
-10
-100
-2.08333333333333
-3025.76877140318
-pi = 3.16597927284322
-pi = 3.18110488557771
-pi = 3.18110488557771
-
-1.5
-1.41666666666667
-1.41421568627451
-1.41421356237469
-sqrt(2) = 1.41421356237469
-
-1.5
-1.41666666666667
-1.41421568627451
-1.41421356237469
-sqrt(2) = 1.41421356237469
-
-1 + 2 + .. + 5 = 15
-1 * 2 * .. * 5 = 120
-
-1^2 + 2^2 + .. + 5^2 = 55
-1^2 * 2^2 * .. * 5^2 = 14400
-
-factorial(0) = 1
-factorial(1) = 1
-factorial(2) = 2
-factorial(3) = 6
-factorial(4) = 24
-factorial(5) = 120
-
-1 + 2 + .. + 5 = 15
-1 * 2 * .. * 5 = 120
-
-1^2 + 2^2 + .. + 5^2 = 55
-1^2 * 2^2 * .. * 5^2 = 14400
-
-factorial(0) = 1
-factorial(1) = 1
-factorial(2) = 2
-factorial(3) = 6
-factorial(4) = 24
-factorial(5) = 120
-
-1 + 2 + .. + 5 = 15
-1 * 2 * .. * 5 = 120
-
-1^2 + 2^2 + .. + 5^2 = 55
-1^2 * 2^2 * .. * 5^2 = 14400
-
-factorial(0) = 1
-factorial(1) = 1
-factorial(2) = 2
-factorial(3) = 6
-factorial(4) = 24
-factorial(5) = 120
-
-fib(0) = 0
-fib(1) = 1
-fib(2) = 1
-fib(3) = 2
-fib(4) = 3
-fib(5) = 5
-fib(6) = 8
-fib(7) = 13
-fib(8) = 21
-fib(9) = 34
-fib(0) = 0
-fib(1) = 1
-fib(2) = 1
-fib(3) = 2
-fib(4) = 3
-fib(5) = 5
-fib(6) = 8
-fib(7) = 13
-fib(8) = 21
-fib(9) = 34
-power(0,0) = 1
-power(0,1) = 0
-power(0,2) = 0
-power(0,3) = 0
-power(0,4) = 0
-power(0,5) = 0
-power(0,6) = 0
-power(0,7) = 0
-power(0,8) = 0
-
-power(1,0) = 1
-power(1,1) = 1
-power(1,2) = 1
-power(1,3) = 1
-power(1,4) = 1
-power(1,5) = 1
-power(1,6) = 1
-power(1,7) = 1
-power(1,8) = 1
-
-power(2,0) = 1
-power(2,1) = 2
-power(2,2) = 4
-power(2,3) = 8
-power(2,4) = 16
-power(2,5) = 32
-power(2,6) = 64
-power(2,7) = 128
-power(2,8) = 256
-
-power(3,0) = 1
-power(3,1) = 3
-power(3,2) = 9
-power(3,3) = 27
-power(3,4) = 81
-power(3,5) = 243
-power(3,6) = 729
-power(3,7) = 2187
-power(3,8) = 6561
-
-power(4,0) = 1
-power(4,1) = 4
-power(4,2) = 16
-power(4,3) = 64
-power(4,4) = 256
-power(4,5) = 1024
-power(4,6) = 4096
-power(4,7) = 16384
-power(4,8) = 65536
-
-power(5,0) = 1
-power(5,1) = 5
-power(5,2) = 25
-power(5,3) = 125
-power(5,4) = 625
-power(5,5) = 3125
-power(5,6) = 15625
-power(5,7) = 78125
-power(5,8) = 390625
-
diff --git a/test/files/run/Course-2002-03-msil.check b/test/files/run/Course-2002-03-msil.check
deleted file mode 100644
index 3556893..0000000
--- a/test/files/run/Course-2002-03-msil.check
+++ /dev/null
@@ -1,67 +0,0 @@
-1
-2
-1/2
-5/6
-
-1/3
-5/7
-3/2
-66/42
-
-1/3
-5/7
-3/2
-11/7
-
-11/7
-7/11
-11/7
-11/7
-
-13/36
-
-False
-True
-True
-False
-
-set0 = []
-set1 = [1]
-set2 = [1,2]
-set3 = [1,2,3]
-set4 = [1,2,3,4]
-
-set2 contains the following elements:
-1
-2
-
-set3 contains the following elements:
-1
-2
-3
-
-set4 contains the following elements:
-1
-2
-3
-4
-
-2 <- set2: True
-3 <- set2: False
-
-setx = [-10,-1,0,3,5,21]
-setx * 2 = [-20,-2,0,6,10,42]
-
-setx = [-10,-1,0,3,5,21]
-sety = [-9,-5,-1,0,3,7,8]
-setx & sety = [-1,0,3]
-sety & setx = [-1,0,3]
-setx > 0 = [3,5,21]
-sety > 0 = [3,7,8]
-setx & sety = [-1,0,3]
-sety & setx = [-1,0,3]
-
-1/1
-1/1
-1/1
-
diff --git a/test/files/run/Course-2002-04-msil.check b/test/files/run/Course-2002-04-msil.check
deleted file mode 100644
index fc6ad96..0000000
--- a/test/files/run/Course-2002-04-msil.check
+++ /dev/null
@@ -1,64 +0,0 @@
-list0 = List(6, 3, 1, 8, 7, 1, 2, 5, 8, 4, 3, 4, 8)
-list1 = List(1, 1, 2, 3, 3, 4, 4, 5, 6, 7, 8, 8, 8)
-list2 = List(1, 1, 2, 3, 3, 4, 4, 5, 6, 7, 8, 8, 8)
-list3 = List(1, 1, 2, 3, 3, 4, 4, 5, 6, 7, 8, 8, 8)
-list4 = List(1, 1, 2, 3, 3, 4, 4, 5, 6, 7, 8, 8, 8)
-list5 = List(8, 8, 8, 7, 6, 5, 4, 4, 3, 3, 2, 1, 1)
-list6 = List(8, 8, 8, 7, 6, 5, 4, 4, 3, 3, 2, 1, 1)
-
-list0: List() -> List()
-list1: List(0) -> List(0)
-list2: List(0, 1) -> List(0, 1)
-list3: List(1, 0) -> List(0, 1)
-list4: List(0, 1, 2) -> List(0, 1, 2)
-list5: List(1, 0, 2) -> List(0, 1, 2)
-list6: List(0, 1, 2) -> List(0, 1, 2)
-list7: List(1, 0, 2) -> List(0, 1, 2)
-list8: List(2, 0, 1) -> List(0, 1, 2)
-list9: List(2, 1, 0) -> List(0, 1, 2)
-listA: List(6, 3, 1, 8, 7, 1, 2, 5, 8, 4) -> List(1, 1, 2, 3, 4, 5, 6, 7, 8, 8)
-
-f(x) = 5x^3+7x^2+5x+9
-f(0) = 9
-f(1) = 26
-f(2) = 87
-f(3) = 222
-
-v1 = List(2, 3, 4)
-v2 = List(6, 7, 8)
-
-id = List(List(1, 0, 0), List(0, 1, 0), List(0, 0, 1))
-m1 = List(List(2, 0, 0), List(0, 2, 0), List(0, 0, 2))
-m2 = List(List(1, 2, 3), List(4, 5, 6), List(7, 8, 9))
-
-v1 * v1 = 29
-v1 * v2 = 65
-v2 * v1 = 65
-v1 * v2 = 65
-
-id * v1 = List(2, 3, 4)
-m1 * v1 = List(4, 6, 8)
-m2 * v1 = List(20, 47, 74)
-
-trn(id) = List(List(1, 0, 0), List(0, 1, 0), List(0, 0, 1))
-trn(m1) = List(List(2, 0, 0), List(0, 2, 0), List(0, 0, 2))
-trn(m2) = List(List(1, 4, 7), List(2, 5, 8), List(3, 6, 9))
-
-List(v1) * id = List(List(2, 3, 4))
-List(v1) * m1 = List(List(4, 6, 8))
-List(v1) * m2 = List(List(42, 51, 60))
-
-id * List(v1) = List(List(2, 3, 4), List(0, 0, 0), List(0, 0, 0))
-m1 * List(v1) = List(List(4, 6, 8), List(0, 0, 0), List(0, 0, 0))
-m2 * List(v1) = List(List(2, 3, 4), List(8, 12, 16), List(14, 21, 28))
-
-id * id = List(List(1, 0, 0), List(0, 1, 0), List(0, 0, 1))
-id * m1 = List(List(2, 0, 0), List(0, 2, 0), List(0, 0, 2))
-m1 * id = List(List(2, 0, 0), List(0, 2, 0), List(0, 0, 2))
-m1 * m1 = List(List(4, 0, 0), List(0, 4, 0), List(0, 0, 4))
-id * m2 = List(List(1, 2, 3), List(4, 5, 6), List(7, 8, 9))
-m2 * id = List(List(1, 2, 3), List(4, 5, 6), List(7, 8, 9))
-m1 * m2 = List(List(2, 4, 6), List(8, 10, 12), List(14, 16, 18))
-m2 * m1 = List(List(2, 4, 6), List(8, 10, 12), List(14, 16, 18))
-m2 * m2 = List(List(30, 36, 42), List(66, 81, 96), List(102, 126, 150))
-
diff --git a/test/files/run/Course-2002-08-msil.check b/test/files/run/Course-2002-08-msil.check
deleted file mode 100644
index c5b26c7..0000000
--- a/test/files/run/Course-2002-08-msil.check
+++ /dev/null
@@ -1,171 +0,0 @@
-x = abc
-count = 111
-x = hello
-count = 112
-
-account deposit 50 -> ()
-account withdraw 20 -> 30
-account withdraw 20 -> 10
-account withdraw 15 ->
-
-x deposit 30 -> ()
-y withdraw 20 ->
-
-x deposit 30 -> ()
-x withdraw 20 -> 10
-
-x deposit 30 -> ()
-y withdraw 20 -> 10
-
-2^0 = 1
-2^1 = 2
-2^2 = 4
-2^3 = 8
-
-2^0 = 1
-2^1 = 2
-2^2 = 4
-2^3 = 8
-
-1 2 3
-List(1,2,3)
-
-out 0 new-value = False
-*** simulation started ***
-out 1 new-value = True
-!0 = 1
-
-*** simulation started ***
-out 2 new-value = False
-!1 = 0
-
-out 2 new-value = False
-
-*** simulation started ***
-0 & 0 = 0
-
-*** simulation started ***
-0 & 1 = 0
-
-*** simulation started ***
-out 11 new-value = True
-out 11 new-value = False
-1 & 0 = 0
-
-*** simulation started ***
-out 14 new-value = True
-1 & 1 = 1
-
-out 14 new-value = False
-
-*** simulation started ***
-0 | 0 = 0
-
-*** simulation started ***
-out 24 new-value = True
-0 | 1 = 1
-
-*** simulation started ***
-1 | 0 = 1
-
-*** simulation started ***
-1 | 1 = 1
-
-sum 34 new-value = False
-carry 34 new-value = False
-
-*** simulation started ***
-0 + 0 = 0
-
-*** simulation started ***
-sum 47 new-value = True
-0 + 1 = 1
-
-*** simulation started ***
-carry 50 new-value = True
-carry 50 new-value = False
-sum 54 new-value = False
-sum 54 new-value = True
-1 + 0 = 1
-
-*** simulation started ***
-carry 57 new-value = True
-sum 61 new-value = False
-1 + 1 = 2
-
-sum 61 new-value = False
-carry 61 new-value = False
-
-*** simulation started ***
-0 + 0 + 0 = 0
-
-*** simulation started ***
-sum 82 new-value = True
-0 + 0 + 1 = 1
-
-*** simulation started ***
-sum 89 new-value = False
-carry 90 new-value = True
-sum 97 new-value = True
-carry 98 new-value = False
-0 + 1 + 0 = 1
-
-*** simulation started ***
-sum 113 new-value = False
-carry 114 new-value = True
-0 + 1 + 1 = 2
-
-*** simulation started ***
-sum 121 new-value = True
-carry 122 new-value = False
-sum 129 new-value = False
-sum 129 new-value = True
-1 + 0 + 0 = 1
-
-*** simulation started ***
-carry 137 new-value = True
-sum 144 new-value = False
-1 + 0 + 1 = 2
-
-*** simulation started ***
-carry 152 new-value = False
-sum 152 new-value = True
-sum 158 new-value = False
-carry 159 new-value = True
-1 + 1 + 0 = 2
-
-*** simulation started ***
-sum 173 new-value = True
-1 + 1 + 1 = 3
-
-in 0 new-value = False
-ctrl0 0 new-value = False
-ctrl1 0 new-value = False
-ctrl2 0 new-value = False
-out0 0 new-value = False
-out1 0 new-value = False
-out2 0 new-value = False
-out3 0 new-value = False
-out4 0 new-value = False
-out5 0 new-value = False
-out6 0 new-value = False
-out7 0 new-value = False
-in 0 new-value = True
-*** simulation started ***
-out0 10 new-value = True
-ctrl0 10 new-value = True
-*** simulation started ***
-out1 13 new-value = True
-out0 14 new-value = False
-ctrl1 14 new-value = True
-*** simulation started ***
-out3 20 new-value = True
-out1 21 new-value = False
-ctrl2 21 new-value = True
-*** simulation started ***
-out7 30 new-value = True
-out3 31 new-value = False
-ctrl0 31 new-value = False
-*** simulation started ***
-out7 34 new-value = False
-out6 35 new-value = True
diff --git a/test/files/run/Course-2002-08.scala b/test/files/run/Course-2002-08.scala
index 85b0bad..85a83e0 100644
--- a/test/files/run/Course-2002-08.scala
+++ b/test/files/run/Course-2002-08.scala
@@ -135,9 +135,9 @@ object M3 {
object M4 {
def test = {
- for (val i <- range(1, 4)) { Console.print(i + " ") };
+ for (i <- range(1, 4)) { Console.print(i + " ") };
Console.println;
- Console.println(for (val i <- range(1, 4)) yield i);
+ Console.println(for (i <- range(1, 4)) yield i);
Console.println;
}
}
@@ -561,14 +561,14 @@ class Main() extends CircuitSimulator() {
val outNum = 1 << n;
val in = new Wire();
- val ctrl = for (val x <- range(0,n)) yield { new Wire() };
- val out = for (val x <- range(0,outNum)) yield { new Wire() };
+ val ctrl = for (x <- range(0,n)) yield { new Wire() };
+ val out = for (x <- range(0,outNum)) yield { new Wire() };
demux(in, ctrl.reverse, out.reverse);
probe("in", in);
- for (val Pair(x,c) <- range(0,n) zip ctrl) { probe("ctrl" + x, c) }
- for (val Pair(x,o) <- range(0,outNum) zip out) { probe("out" + x, o) }
+ for (Pair(x,c) <- range(0,n) zip ctrl) { probe("ctrl" + x, c) }
+ for (Pair(x,o) <- range(0,outNum) zip out) { probe("out" + x, o) }
in.setSignal(true);
run;
diff --git a/test/files/run/Course-2002-09-msil.check b/test/files/run/Course-2002-09-msil.check
deleted file mode 100644
index c921361..0000000
--- a/test/files/run/Course-2002-09-msil.check
+++ /dev/null
@@ -1,50 +0,0 @@
-Probe: f = 32
-Probe: c = 0
-Probe: f = ?
-Probe: c = ?
-
-Probe: f = 212
-Probe: c = 100
-Probe: f = ?
-Probe: c = ?
-
-Probe: c = 0
-Probe: f = 32
-Probe: c = ?
-Probe: f = ?
-
-Probe: c = 100
-Probe: f = 212
-Probe: c = ?
-Probe: f = ?
-
-0 Celsius -> 32 Fahrenheits
-100 Celsius -> 212 Fahrenheits
-32 Fahrenheits -> 0 Celsius
-212 Fahrenheits -> 100 Celsius
-
-a = ?, b = ?, c = ? => ? * ? = ?
-a = 2, b = ?, c = ? => 2 * ? = ?
-a = ?, b = 3, c = ? => ? * 3 = ?
-a = ?, b = ?, c = 6 => ? * ? = 6
-a = 2, b = 3, c = ? => 2 * 3 = 6
-a = 2, b = ?, c = 6 => 2 * 3 = 6
-a = ?, b = 3, c = 6 => 2 * 3 = 6
-a = 2, b = 3, c = 6 => 2 * 3 = 6
-
-a = 0, b = ?, c = ? => 0 * ? = 0
-a = ?, b = 0, c = ? => ? * 0 = 0
-a = ?, b = ?, c = 0 => ? * ? = 0
-a = 0, b = 7, c = ? => 0 * 7 = 0
-a = 7, b = 0, c = ? => 7 * 0 = 0
-a = 0, b = 0, c = ? => 0 * 0 = 0
-a = 0, b = ?, c = 0 => 0 * ? = 0
-a = ?, b = 0, c = 0 => ? * 0 = 0
-a = 0, b = 7, c = 0 => 0 * 7 = 0
-a = 7, b = 0, c = 0 => 7 * 0 = 0
-a = 0, b = 0, c = 0 => 0 * 0 = 0
-
-a = 3, b = 4 => c = 5
-a = 3, c = 5 => b = 4
-b = 4, c = 5 => a = 3
-
diff --git a/test/files/run/Course-2002-10-msil.check b/test/files/run/Course-2002-10-msil.check
deleted file mode 100644
index bbd9414..0000000
--- a/test/files/run/Course-2002-10-msil.check
+++ /dev/null
@@ -1,46 +0,0 @@
-fib(0) = 0
-fib(1) = 1
-fib(2) = 1
-fib(3) = 2
-fib(4) = 3
-fib(5) = 5
-fib(6) = 8
-fib(7) = 13
-fib(8) = 21
-fib(9) = 34
-fib(10) = 55
-fib(11) = 89
-fib(12) = 144
-fib(13) = 233
-fib(14) = 377
-fib(15) = 610
-fib(16) = 987
-fib(17) = 1597
-fib(18) = 2584
-fib(19) = 4181
-
-pi(0) = 4 , 3.16666666666667 , 4
-pi(1) = 2.66666666666667 , 3.13333333333333 , 3.16666666666667
-pi(2) = 3.46666666666667 , 3.1452380952381 , 3.1421052631579
-pi(3) = 2.8952380952381 , 3.13968253968254 , 3.141599357319
-pi(4) = 3.33968253968254 , 3.14271284271284 , 3.14159271403378
-pi(5) = 2.97604617604618 , 3.14088134088134 , 3.14159265397529
-pi(6) = 3.28373848373848 , 3.14207181707182 , 3.14159265359118
-pi(7) = 3.01707181707182 , 3.14125482360776 , 3.14159265358978
-pi(8) = 3.25236593471888 , 3.1418396189294 , 3.14159265358979
-pi(9) = 3.0418396189294 , 3.1414067184965 , 3.14159265358979
-pi = 3.14159265358979 , 3.14159265358979 , 3.14159265358979
-
-ln(0) = 1 , 0.7 , 1
-ln(1) = 0.5 , 0.69047619047619 , 0.7
-ln(2) = 0.833333333333333 , 0.694444444444444 , 0.69327731092437
-ln(3) = 0.583333333333333 , 0.692424242424242 , 0.693148869332925
-ln(4) = 0.783333333333333 , 0.693589743589744 , 0.693147196073549
-ln(5) = 0.616666666666667 , 0.692857142857143 , 0.693147180663564
-ln(6) = 0.759523809523809 , 0.693347338935574 , 0.693147180560404
-ln(7) = 0.634523809523809 , 0.693003341687552 , 0.693147180559944
-ln(8) = 0.745634920634921 , 0.693253968253968 , 0.693147180559943
-ln(9) = 0.645634920634921 , 0.693065750674446 , 0.693147180559945
-ln = 0.693147180559945 , 0.693147180559945 , 0.693147180559945
-
-prime numbers: 2 3 5 7 11 13 17 19 23 29 31 37 41 43 47 53 59 61 67 71 73 79 83 89 97 101 103 107 109 113
diff --git a/test/files/run/Course-2002-10.scala b/test/files/run/Course-2002-10.scala
index e978bc8..4cfa1de 100644
--- a/test/files/run/Course-2002-10.scala
+++ b/test/files/run/Course-2002-10.scala
@@ -2,7 +2,7 @@
// Programmation IV - 2002 - Week 10
//############################################################################
-import Math.{Pi, log}
+import math.{Pi, log}
object M0 {
diff --git a/test/files/run/Course-2002-13.scala b/test/files/run/Course-2002-13.scala
index c016d41..c266af8 100644
--- a/test/files/run/Course-2002-13.scala
+++ b/test/files/run/Course-2002-13.scala
@@ -116,7 +116,7 @@ object Programs {
(lhs.tyvars ::: (rhs flatMap (t => t.tyvars))).distinct;
def newInstance = {
var s: Subst = List();
- for (val a <- tyvars) { s = Binding(a, newVar(a)) :: s }
+ for (a <- tyvars) { s = Binding(a, newVar(a)) :: s }
Clause(lhs map s, rhs map (t => t map s))
}
override def toString() =
@@ -141,9 +141,9 @@ object Programs {
if (solve1(qs, s).isEmpty) Stream.cons(s, Stream.empty)
else Stream.empty
case q :: query1 =>
- for (val clause <- list2stream(clauses);
- val s1 <- tryClause(clause.newInstance, q, s);
- val s2 <- solve1(query1, s1)) yield s2
+ for (clause <- list2stream(clauses);
+ s1 <- tryClause(clause.newInstance, q, s);
+ s2 <- solve1(query1, s1)) yield s2
}
def solve1(query: List[Term], s: Subst): Stream[Subst] = {
@@ -154,8 +154,7 @@ object Programs {
def tryClause(c: Clause, q: Term, s: Subst): Stream[Subst] = {
if (debug) Console.println("trying " + c);
- for (val s1 <- option2stream(unify(q, c.lhs, s));
- val s2 <- solve1(c.rhs, s1)) yield s2;
+ for (s1 <- option2stream(unify(q, c.lhs, s)); s2 <- solve1(c.rhs, s1)) yield s2;
}
solve1(query, List())
diff --git a/test/files/run/Meter.check b/test/files/run/Meter.check
new file mode 100644
index 0000000..b7e2eac
--- /dev/null
+++ b/test/files/run/Meter.check
@@ -0,0 +1,13 @@
+2.0
+4.0m
+false
+x.isInstanceOf[Meter]: true
+x.hashCode: 1072693248
+x == 1: false
+x == y: true
+a == b: true
+testing native arrays
+Array(1.0m, 2.0m)
+1.0m
+>>>1.0m<<< 1.0m
+>>>2.0m<<< 2.0m
diff --git a/test/files/run/Meter.scala b/test/files/run/Meter.scala
new file mode 100644
index 0000000..a10ad31
--- /dev/null
+++ b/test/files/run/Meter.scala
@@ -0,0 +1,109 @@
+package a {
+ abstract class BoxingConversions[Boxed, Unboxed] {
+ def box(x: Unboxed): Boxed
+ def unbox(x: Boxed): Unboxed
+ }
+
+ class Meter(val underlying: Double) extends AnyVal with _root_.b.Printable {
+ def + (other: Meter): Meter =
+ new Meter(this.underlying + other.underlying)
+ def / (other: Meter)(implicit dummy: Meter.MeterArg = null): Double = this.underlying / other.underlying
+ def / (factor: Double): Meter = new Meter(this.underlying / factor)
+ def < (other: Meter): Boolean = this.underlying < other.underlying
+ def toFoot: Foot = new Foot(this.underlying * 0.3048)
+ override def print = { Console.print(">>>"); super.print; proprint }
+ override def toString: String = underlying.toString+"m"
+ }
+
+ object Meter extends (Double => Meter) {
+
+ private[a] trait MeterArg
+
+ def apply(x: Double): Meter = new Meter(x)
+
+ implicit val boxings = new BoxingConversions[Meter, Double] {
+ def box(x: Double) = new Meter(x)
+ def unbox(m: Meter) = m.underlying
+ }
+ }
+
+ class Foot(val unbox: Double) extends AnyVal {
+ def + (other: Foot): Foot =
+ new Foot(this.unbox + other.unbox)
+ override def toString = unbox.toString+"ft"
+ }
+ object Foot {
+ implicit val boxings = new BoxingConversions[Foot, Double] {
+ def box(x: Double) = new Foot(x)
+ def unbox(m: Foot) = m.unbox
+ }
+ }
+
+}
+package b {
+ trait Printable extends Any {
+ def print: Unit = Console.print(this)
+ protected def proprint = Console.print("<<<")
+ }
+}
+import a._
+import _root_.b._
+object Test extends App {
+
+ {
+ val x: Meter = new Meter(1)
+ val a: Object = x.asInstanceOf[Object]
+ val y: Meter = a.asInstanceOf[Meter]
+
+ val u: Double = 1
+ val b: Object = u.asInstanceOf[Object]
+ val v: Double = b.asInstanceOf[Double]
+ }
+
+ val x = new Meter(1)
+ val y = x
+ println((x + x) / x)
+ println((x + x) / 0.5)
+ println((x < x).toString)
+ println("x.isInstanceOf[Meter]: "+x.isInstanceOf[Meter])
+
+
+ println("x.hashCode: "+x.hashCode)
+ println("x == 1: "+(x == 1))
+ println("x == y: "+(x == y))
+ assert(x.hashCode == (1.0).hashCode)
+
+ val a: Any = x
+ val b: Any = y
+ println("a == b: "+(a == b))
+
+ { println("testing native arrays")
+ val arr = Array(x, y + x)
+ println(arr.deep)
+ def foo[T <: Printable](x: Array[T]) {
+ for (i <- 0 until x.length) { x(i).print; println(" "+x(i)) }
+ }
+ val m = arr(0)
+ println(m)
+ foo(arr)
+ }
+ //
+ // { println("testing wrapped arrays")
+ // import collection.mutable.FlatArray
+ // val arr = FlatArray(x, y + x)
+ // println(arr)
+ // def foo(x: FlatArray[Meter]) {
+ // for (i <- 0 until x.length) { x(i).print; println(" "+x(i)) }
+ // }
+ // val m = arr(0)
+ // println(m)
+ // foo(arr)
+ // val ys: Seq[Meter] = arr map (_ + new Meter(1))
+ // println(ys)
+ // val zs = arr map (_ / Meter(1))
+ // println(zs)
+ // val fs = arr map (_.toFoot)
+ // println(fs)
+ // }
+
+}
diff --git a/test/files/run/MeterCaseClass.check b/test/files/run/MeterCaseClass.check
new file mode 100644
index 0000000..2528753
--- /dev/null
+++ b/test/files/run/MeterCaseClass.check
@@ -0,0 +1,13 @@
+2.0
+Meter(4.0)
+false
+x.isInstanceOf[Meter]: true
+x.hashCode: 1072693248
+x == 1: false
+x == y: true
+a == b: true
+testing native arrays
+Array(Meter(1.0), Meter(2.0))
+Meter(1.0)
+>>>Meter(1.0)<<< Meter(1.0)
+>>>Meter(2.0)<<< Meter(2.0)
diff --git a/test/files/run/MeterCaseClass.scala b/test/files/run/MeterCaseClass.scala
new file mode 100644
index 0000000..39d95c2
--- /dev/null
+++ b/test/files/run/MeterCaseClass.scala
@@ -0,0 +1,106 @@
+package a {
+ abstract class BoxingConversions[Boxed, Unboxed] {
+ def box(x: Unboxed): Boxed
+ def unbox(x: Boxed): Unboxed
+ }
+
+ case class Meter(underlying: Double) extends AnyVal with _root_.b.Printable {
+ def + (other: Meter): Meter =
+ new Meter(this.underlying + other.underlying)
+ def / (other: Meter)(implicit dummy: Meter.MeterArg = null): Double = this.underlying / other.underlying
+ def / (factor: Double): Meter = new Meter(this.underlying / factor)
+ def < (other: Meter): Boolean = this.underlying < other.underlying
+ def toFoot: Foot = new Foot(this.underlying * 0.3048)
+ override def print = { Console.print(">>>"); super.print; proprint }
+ }
+
+ object Meter extends (Double => Meter) {
+
+ private[a] trait MeterArg
+
+ implicit val boxings = new BoxingConversions[Meter, Double] {
+ def box(x: Double) = new Meter(x)
+ def unbox(m: Meter) = m.underlying
+ }
+ }
+
+ class Foot(val unbox: Double) extends AnyVal {
+ def + (other: Foot): Foot =
+ new Foot(this.unbox + other.unbox)
+ override def toString = unbox.toString+"ft"
+ }
+ object Foot {
+ implicit val boxings = new BoxingConversions[Foot, Double] {
+ def box(x: Double) = new Foot(x)
+ def unbox(m: Foot) = m.unbox
+ }
+ }
+
+}
+package b {
+ trait Printable extends Any {
+ def print: Unit = Console.print(this)
+ protected def proprint = Console.print("<<<")
+ }
+}
+import a._
+import _root_.b._
+object Test extends App {
+
+ {
+ val x: Meter = new Meter(1)
+ val a: Object = x.asInstanceOf[Object]
+ val y: Meter = a.asInstanceOf[Meter]
+
+ val u: Double = 1
+ val b: Object = u.asInstanceOf[Object]
+ val v: Double = b.asInstanceOf[Double]
+ }
+
+ val x = new Meter(1)
+ val y = x
+ println((x + x) / x)
+ println((x + x) / 0.5)
+ println((x < x).toString)
+ println("x.isInstanceOf[Meter]: "+x.isInstanceOf[Meter])
+
+
+ println("x.hashCode: "+x.hashCode)
+ println("x == 1: "+(x == 1))
+ println("x == y: "+(x == y))
+ assert(x.hashCode == (1.0).hashCode)
+
+ val a: Any = x
+ val b: Any = y
+ println("a == b: "+(a == b))
+
+ { println("testing native arrays")
+ val arr = Array(x, y + x)
+ println(arr.deep)
+ def foo[T <: Printable](x: Array[T]) {
+ for (i <- 0 until x.length) { x(i).print; println(" "+x(i)) }
+ }
+ val m = arr(0)
+ println(m)
+ foo(arr)
+ }
+ //
+ // { println("testing wrapped arrays")
+ // import collection.mutable.FlatArray
+ // val arr = FlatArray(x, y + x)
+ // println(arr)
+ // def foo(x: FlatArray[Meter]) {
+ // for (i <- 0 until x.length) { x(i).print; println(" "+x(i)) }
+ // }
+ // val m = arr(0)
+ // println(m)
+ // foo(arr)
+ // val ys: Seq[Meter] = arr map (_ + new Meter(1))
+ // println(ys)
+ // val zs = arr map (_ / Meter(1))
+ // println(zs)
+ // val fs = arr map (_.toFoot)
+ // println(fs)
+ // }
+
+}
diff --git a/test/files/run/MutableListTest.scala b/test/files/run/MutableListTest.scala
index 7043157..322a368 100644
--- a/test/files/run/MutableListTest.scala
+++ b/test/files/run/MutableListTest.scala
@@ -1,126 +1,126 @@
-
-
-
-import scala.collection.mutable.MutableList
-
-
-
-class ExtList[T] extends MutableList[T] {
- def printState {
- println("Length: " + len)
- println("Last elem: " + last0.elem)
- println("First elem: " + first0.elem)
- println("After first elem: " + first0.next.elem)
- println("After first: " + first0.next)
- println("Last: " + last0)
- }
-}
-
-object Test {
-
- def main(args: Array[String]) {
- testEmpty
- testAddElement
- testAddFewElements
- testAddMoreElements
- testTraversables
- }
-
- def testEmpty {
- val mlist = new MutableList[Int]
- assert(mlist.isEmpty)
- assert(mlist.get(0) == None)
- }
-
- def testAddElement {
- val mlist = new MutableList[Int]
- mlist += 17
- assert(mlist.nonEmpty)
- assert(mlist.length == 1)
- assert(mlist.head == 17)
- assert(mlist.last == 17)
- assert(mlist(0) == 17)
- assert(mlist.tail.isEmpty)
- assert(mlist.tail.length == 0)
- mlist(0) = 101
- assert(mlist(0) == 101)
- assert(mlist.toList == List(101))
- assert(mlist.tail.get(0) == None)
- assert((mlist.tail += 19).head == 19)
- assert(mlist.tail.length == 0)
- }
-
- def testAddFewElements {
- val mlist = new MutableList[Int]
- for (i <- 0 until 2) mlist += i
-// mlist.printState
- for (i <- 0 until 2) assert(mlist(i) == i)
- assert(mlist.length == 2)
- assert(mlist.nonEmpty)
- assert(mlist.tail.length == 1)
- assert(mlist.tail.tail.length == 0)
- assert(mlist.tail.tail.isEmpty)
- }
-
- def testAddMoreElements {
- val mlist = new MutableList[Int]
- for (i <- 0 until 10) mlist += i * i
- assert(mlist.nonEmpty)
- assert(mlist.length == 10)
- for (i <- 0 until 10) assert(mlist(i) == i * i)
- assert(mlist(5) == 5 * 5)
- assert(mlist(9) == 9 * 9)
- var sometail = mlist
- for (i <- 0 until 10) {
- assert(sometail.head == i * i)
- sometail = sometail.tail
- }
- mlist(5) = -25
- assert(mlist(5) == -25)
- mlist(0) = -1
- assert(mlist(0) == -1)
- mlist(9) = -81
- assert(mlist(9) == -81)
- assert(mlist(5) == -25)
- assert(mlist(0) == -1)
- assert(mlist.last == -81)
- mlist.clear
- assert(mlist.isEmpty)
- mlist += 1001
- assert(mlist.head == 1001)
- mlist += 9999
- assert(mlist.tail.head == 9999)
- assert(mlist.last == 9999)
- }
-
- def testTraversables {
- val mlist = new MutableList[Int]
- for (i <- 0 until 10) mlist += i * i
- var lst = mlist.drop(5)
- for (i <- 0 until 5) assert(lst(i) == (i + 5) * (i + 5))
- lst = lst.take(3)
- for (i <- 0 until 3) assert(lst(i) == (i + 5) * (i + 5))
- lst += 129
- assert(lst(3) == 129)
- assert(lst.last == 129)
- assert(lst.length == 4)
- lst += 7
- assert(lst.init.last == 129)
- assert(lst.length == 5)
- lst.clear
- assert(lst.length == 0)
- for (i <- 0 until 5) lst += i
- assert(lst.reduceLeft(_ + _) == 10)
- }
-
-}
-
-
-
-
-
-
-
-
-
-
+
+
+
+import scala.collection.mutable.MutableList
+
+
+
+class ExtList[T] extends MutableList[T] {
+ def printState {
+ println("Length: " + len)
+ println("Last elem: " + last0.elem)
+ println("First elem: " + first0.elem)
+ println("After first elem: " + first0.next.elem)
+ println("After first: " + first0.next)
+ println("Last: " + last0)
+ }
+}
+
+object Test {
+
+ def main(args: Array[String]) {
+ testEmpty
+ testAddElement
+ testAddFewElements
+ testAddMoreElements
+ testTraversables
+ }
+
+ def testEmpty {
+ val mlist = new MutableList[Int]
+ assert(mlist.isEmpty)
+ assert(mlist.get(0) == None)
+ }
+
+ def testAddElement {
+ val mlist = new MutableList[Int]
+ mlist += 17
+ assert(mlist.nonEmpty)
+ assert(mlist.length == 1)
+ assert(mlist.head == 17)
+ assert(mlist.last == 17)
+ assert(mlist(0) == 17)
+ assert(mlist.tail.isEmpty)
+ assert(mlist.tail.length == 0)
+ mlist(0) = 101
+ assert(mlist(0) == 101)
+ assert(mlist.toList == List(101))
+ assert(mlist.tail.get(0) == None)
+ assert((mlist.tail += 19).head == 19)
+ assert(mlist.tail.length == 0)
+ }
+
+ def testAddFewElements {
+ val mlist = new MutableList[Int]
+ for (i <- 0 until 2) mlist += i
+// mlist.printState
+ for (i <- 0 until 2) assert(mlist(i) == i)
+ assert(mlist.length == 2)
+ assert(mlist.nonEmpty)
+ assert(mlist.tail.length == 1)
+ assert(mlist.tail.tail.length == 0)
+ assert(mlist.tail.tail.isEmpty)
+ }
+
+ def testAddMoreElements {
+ val mlist = new MutableList[Int]
+ for (i <- 0 until 10) mlist += i * i
+ assert(mlist.nonEmpty)
+ assert(mlist.length == 10)
+ for (i <- 0 until 10) assert(mlist(i) == i * i)
+ assert(mlist(5) == 5 * 5)
+ assert(mlist(9) == 9 * 9)
+ var sometail = mlist
+ for (i <- 0 until 10) {
+ assert(sometail.head == i * i)
+ sometail = sometail.tail
+ }
+ mlist(5) = -25
+ assert(mlist(5) == -25)
+ mlist(0) = -1
+ assert(mlist(0) == -1)
+ mlist(9) = -81
+ assert(mlist(9) == -81)
+ assert(mlist(5) == -25)
+ assert(mlist(0) == -1)
+ assert(mlist.last == -81)
+ mlist.clear
+ assert(mlist.isEmpty)
+ mlist += 1001
+ assert(mlist.head == 1001)
+ mlist += 9999
+ assert(mlist.tail.head == 9999)
+ assert(mlist.last == 9999)
+ }
+
+ def testTraversables {
+ val mlist = new MutableList[Int]
+ for (i <- 0 until 10) mlist += i * i
+ var lst = mlist.drop(5)
+ for (i <- 0 until 5) assert(lst(i) == (i + 5) * (i + 5))
+ lst = lst.take(3)
+ for (i <- 0 until 3) assert(lst(i) == (i + 5) * (i + 5))
+ lst += 129
+ assert(lst(3) == 129)
+ assert(lst.last == 129)
+ assert(lst.length == 4)
+ lst += 7
+ assert(lst.init.last == 129)
+ assert(lst.length == 5)
+ lst.clear
+ assert(lst.length == 0)
+ for (i <- 0 until 5) lst += i
+ assert(lst.reduceLeft(_ + _) == 10)
+ }
+
+}
+
+
+
+
+
+
+
+
+
+
diff --git a/test/files/run/Predef.readLine.check b/test/files/run/Predef.readLine.check
new file mode 100644
index 0000000..4fb2bc4
--- /dev/null
+++ b/test/files/run/Predef.readLine.check
@@ -0,0 +1,3 @@
+prompt
+fancy prompt
+immensely fancy prompt
\ No newline at end of file
diff --git a/test/files/run/Predef.readLine.scala b/test/files/run/Predef.readLine.scala
new file mode 100644
index 0000000..9f07936
--- /dev/null
+++ b/test/files/run/Predef.readLine.scala
@@ -0,0 +1,10 @@
+import java.io.StringReader
+
+object Test extends App {
+ Console.withIn(new StringReader("")) {
+ readLine()
+ readLine("prompt\n")
+ readLine("%s prompt\n", "fancy")
+ readLine("%s %s prompt\n", "immensely", "fancy")
+ }
+}
\ No newline at end of file
diff --git a/test/files/run/QueueTest.scala b/test/files/run/QueueTest.scala
index 2f8ecae..859ce20 100644
--- a/test/files/run/QueueTest.scala
+++ b/test/files/run/QueueTest.scala
@@ -1,297 +1,297 @@
-
-
-import scala.collection.mutable.Queue
-
-
-
-
-class ExtQueue[T] extends Queue[T] {
- def printState {
- println("-------------------")
- println("Length: " + len)
- println("First: " + first0)
- println("First elem: " + first0.elem)
- println("After first: " + first0.next)
- }
-}
-
-object Test {
-
- def main(args: Array[String]) {
- testEmpty
- testEnqueue
- testTwoEnqueues
- testFewEnqueues
- testMoreEnqueues
- }
-
- def testEmpty {
- val queue = new Queue[Int]
-
- assert(queue.isEmpty)
- assert(queue.size == 0)
- assert(queue.length == 0)
- assert(queue.dequeueFirst(_ > 500) == None)
- assert(queue.dequeueAll(_ > 500).isEmpty)
-
- queue.clear
- assert(queue.isEmpty)
- assert(queue.size == 0)
- assert(queue.length == 0)
- assert(queue.dequeueFirst(_ > 500) == None)
- assert(queue.dequeueAll(_ > 500).isEmpty)
- }
-
- def testEnqueue {
- val queue = new Queue[Int]
-
- queue.enqueue(10)
- assert(queue.nonEmpty)
- assert(queue.size == 1)
- assert(queue.length == 1)
- assert(queue.head == 10)
- assert(queue(0) == 10)
- assert(queue.init.isEmpty)
- assert(queue.tail.isEmpty)
-
- queue.clear
- assert(queue.isEmpty)
- assert(queue.length == 0)
-
- queue.enqueue(11)
- assert(queue.nonEmpty)
- assert(queue.length == 1)
- assert(queue.head == 11)
- assert(queue.front == 11)
-
- val deq = queue.dequeue
- assert(deq == 11)
- assert(queue.isEmpty)
- assert(queue.length == 0)
-
- queue.enqueue(12)
- val pdopt = queue.dequeueFirst(_ > 999)
- assert(pdopt == None)
- assert(queue.nonEmpty && queue.length == 1)
-
- val somepd = queue.dequeueFirst(_ >= 1)
- assert(somepd == Some(12))
- assert(queue.isEmpty && queue.length == 0)
- }
-
- def testTwoEnqueues {
- val queue = new ExtQueue[Int]
- queue.enqueue(30)
- queue.enqueue(40)
-
- assert(queue.length == 2)
- assert(queue.size == 2)
- assert(queue.nonEmpty)
- assert(queue.front == 30)
-// queue.printState
-
- val all = queue.dequeueAll(_ > 20)
- assert(all.size == 2)
- assert(all.contains(30))
- assert(all.contains(40))
- assert(queue.size == 0)
- assert(queue.isEmpty)
- }
-
- def testFewEnqueues {
- val queue = new ExtQueue[Int]
- queue.enqueue(10)
- queue.enqueue(20)
-
- assert(queue.length == 2)
- assert(queue.nonEmpty)
- assert(queue.head == 10)
- assert(queue.last == 20)
- assert(queue.front == 10)
-// queue.printState
-
- val ten = queue.dequeue
- assert(ten == 10)
- assert(queue.length == 1)
-// queue.printState
-
- queue.enqueue(30)
-// queue.printState
- val gt25 = queue.dequeueFirst(_ > 25)
- assert(gt25 == Some(30))
- assert(queue.nonEmpty)
- assert(queue.length == 1)
- assert(queue.head == 20)
- assert(queue.front == 20)
-// queue.printState
-
- queue.enqueue(30)
-// queue.printState
- val lt25 = queue.dequeueFirst(_ < 25)
- assert(lt25 == Some(20))
- assert(queue.nonEmpty)
- assert(queue.length == 1)
-// queue.printState
-
- queue.enqueue(40)
-// queue.printState
- val all = queue.dequeueAll(_ > 20)
-// queue.printState
- assert(all.size == 2)
- assert(all.contains(30))
- assert(all.contains(40))
- assert(queue.isEmpty)
- assert(queue.length == 0)
-
- queue.enqueue(50)
- queue.enqueue(60)
-// queue.printState
- val allgt55 = queue.dequeueAll(_ > 55)
-// println(allgt55)
-// queue.printState
- assert(allgt55.size == 1)
- assert(allgt55.contains(60))
- assert(queue.length == 1)
-
- queue.enqueue(70)
- queue.enqueue(80)
-// queue.printState
- val alllt75 = queue.dequeueAll(_ < 75)
-// queue.printState
- assert(alllt75.size == 2)
- assert(alllt75.contains(70))
- assert(alllt75.contains(50))
- assert(queue.length == 1)
- assert(queue.head == 80)
- assert(queue.last == 80)
- assert(queue.front == 80)
- }
-
- def testMoreEnqueues {
- val queue = new ExtQueue[Int]
- for (i <- 0 until 10) queue.enqueue(i * 2)
-
- for (i <- 0 until 10) {
- val top = queue.dequeue
- assert(top == i * 2)
- assert(queue.length == 10 - i - 1)
- }
- assert(queue.isEmpty)
- assert(queue.length == 0)
-
- for (i <- 0 until 10) queue.enqueue(i * i)
- assert(queue.length == 10)
- assert(queue.nonEmpty)
-
- //queue.printState
- val gt5 = queue.dequeueAll(_ > 4)
- //queue.printState
- //println(gt5)
- assert(gt5.size == 7)
- assert(queue.length == 3)
- assert(queue.nonEmpty)
-
- queue.clear
- assert(queue.length == 0)
- assert(queue.isEmpty)
-
- for (i <- 0 until 10) queue.enqueue(i)
- assert(queue.length == 10)
-
- val even = queue.dequeueAll(_ % 2 == 0)
- assert(even.size == 5)
- assert(even.sameElements(List(0, 2, 4, 6, 8)))
- assert(queue.length == 5)
- assert(queue.head == 1)
- assert(queue.last == 9)
-
- val odd = queue.dequeueAll(_ %2 == 1)
- assert(odd.size == 5)
- assert(queue.length == 0)
- assert(queue.isEmpty)
- assert(odd.sameElements(List(1, 3, 5, 7, 9)))
-
- for (i <- 0 until 10) queue.enqueue(i * i)
- assert(queue.last == 81)
- assert(queue.head == 0)
- assert(queue.length == 10)
-
- val foddgt25 = queue.dequeueFirst(num => num > 25 && num % 2 == 1)
- assert(foddgt25 == Some(49))
- assert(queue.length == 9)
- assert(queue.nonEmpty)
-
- //queue.printState
- val lt30 = queue.dequeueAll(_ < 30)
- //println(lt30)
- //queue.printState
- assert(lt30.size == 6)
- assert(queue.length == 3)
-
- val fgt60 = queue.dequeueFirst(_ > 60)
- assert(fgt60 == Some(64))
- assert(queue.length == 2)
- assert(queue.head == 36)
- assert(queue.last == 81)
-
- val sgt60 = queue.dequeueFirst(_ > 60)
- assert(sgt60 == Some(81))
- assert(queue.length == 1)
- assert(queue.head == 36)
- assert(queue.last == 36)
-
- val nogt60 = queue.dequeueFirst(_ > 60)
- assert(nogt60 == None)
- assert(queue.length == 1)
- assert(queue.nonEmpty)
- assert(queue.head == 36)
-
- val gt0 = queue.dequeueFirst(_ > 0)
- assert(gt0 == Some(36))
- assert(queue.length == 0)
- assert(queue.isEmpty)
-
- for (i <- 0 until 4) queue.enqueue(i)
- val interv = queue.dequeueAll(n => n > 0 && n < 3)
- assert(interv.sameElements(List(1, 2)))
- assert(queue.length == 2)
- assert(queue.head == 0)
- assert(queue.last == 3)
-
- queue.dequeue
- assert(queue.head == 3)
-
- queue.enqueue(9)
- val three = queue.dequeueFirst(_ < 5)
- assert(three == Some(3))
- assert(queue.length == 1)
- assert(queue.head == 9)
-
- queue.clear
- for (i <- -100 until 100) queue.enqueue(i * i + i % 7 + 5)
- assert(queue.length == 200)
-
- val manyodds = queue.dequeueAll(_ % 2 == 1)
- assert((manyodds.size + queue.length) == 200)
-
- queue.dequeueAll(_ > -10000)
- assert(queue.isEmpty)
-
- for (i <- 0 until 100) queue.enqueue(i)
- val multof3 = queue.dequeueAll(_ % 3 == 0)
- assert(multof3.size == 34)
- assert(queue.size == 66)
-
- val n98 = queue.dequeueFirst(_ == 98)
- assert(n98 == Some(98))
- assert(queue.size == 65)
- assert(queue.last == 97)
- assert(queue.head == 1)
- // well... seems to work
- }
-
-}
-
-
-
-
+
+
+import scala.collection.mutable.Queue
+
+
+
+
+class ExtQueue[T] extends Queue[T] {
+ def printState {
+ println("-------------------")
+ println("Length: " + len)
+ println("First: " + first0)
+ println("First elem: " + first0.elem)
+ println("After first: " + first0.next)
+ }
+}
+
+object Test {
+
+ def main(args: Array[String]) {
+ testEmpty
+ testEnqueue
+ testTwoEnqueues
+ testFewEnqueues
+ testMoreEnqueues
+ }
+
+ def testEmpty {
+ val queue = new Queue[Int]
+
+ assert(queue.isEmpty)
+ assert(queue.size == 0)
+ assert(queue.length == 0)
+ assert(queue.dequeueFirst(_ > 500) == None)
+ assert(queue.dequeueAll(_ > 500).isEmpty)
+
+ queue.clear
+ assert(queue.isEmpty)
+ assert(queue.size == 0)
+ assert(queue.length == 0)
+ assert(queue.dequeueFirst(_ > 500) == None)
+ assert(queue.dequeueAll(_ > 500).isEmpty)
+ }
+
+ def testEnqueue {
+ val queue = new Queue[Int]
+
+ queue.enqueue(10)
+ assert(queue.nonEmpty)
+ assert(queue.size == 1)
+ assert(queue.length == 1)
+ assert(queue.head == 10)
+ assert(queue(0) == 10)
+ assert(queue.init.isEmpty)
+ assert(queue.tail.isEmpty)
+
+ queue.clear
+ assert(queue.isEmpty)
+ assert(queue.length == 0)
+
+ queue.enqueue(11)
+ assert(queue.nonEmpty)
+ assert(queue.length == 1)
+ assert(queue.head == 11)
+ assert(queue.front == 11)
+
+ val deq = queue.dequeue
+ assert(deq == 11)
+ assert(queue.isEmpty)
+ assert(queue.length == 0)
+
+ queue.enqueue(12)
+ val pdopt = queue.dequeueFirst(_ > 999)
+ assert(pdopt == None)
+ assert(queue.nonEmpty && queue.length == 1)
+
+ val somepd = queue.dequeueFirst(_ >= 1)
+ assert(somepd == Some(12))
+ assert(queue.isEmpty && queue.length == 0)
+ }
+
+ def testTwoEnqueues {
+ val queue = new ExtQueue[Int]
+ queue.enqueue(30)
+ queue.enqueue(40)
+
+ assert(queue.length == 2)
+ assert(queue.size == 2)
+ assert(queue.nonEmpty)
+ assert(queue.front == 30)
+// queue.printState
+
+ val all = queue.dequeueAll(_ > 20)
+ assert(all.size == 2)
+ assert(all.contains(30))
+ assert(all.contains(40))
+ assert(queue.size == 0)
+ assert(queue.isEmpty)
+ }
+
+ def testFewEnqueues {
+ val queue = new ExtQueue[Int]
+ queue.enqueue(10)
+ queue.enqueue(20)
+
+ assert(queue.length == 2)
+ assert(queue.nonEmpty)
+ assert(queue.head == 10)
+ assert(queue.last == 20)
+ assert(queue.front == 10)
+// queue.printState
+
+ val ten = queue.dequeue
+ assert(ten == 10)
+ assert(queue.length == 1)
+// queue.printState
+
+ queue.enqueue(30)
+// queue.printState
+ val gt25 = queue.dequeueFirst(_ > 25)
+ assert(gt25 == Some(30))
+ assert(queue.nonEmpty)
+ assert(queue.length == 1)
+ assert(queue.head == 20)
+ assert(queue.front == 20)
+// queue.printState
+
+ queue.enqueue(30)
+// queue.printState
+ val lt25 = queue.dequeueFirst(_ < 25)
+ assert(lt25 == Some(20))
+ assert(queue.nonEmpty)
+ assert(queue.length == 1)
+// queue.printState
+
+ queue.enqueue(40)
+// queue.printState
+ val all = queue.dequeueAll(_ > 20)
+// queue.printState
+ assert(all.size == 2)
+ assert(all.contains(30))
+ assert(all.contains(40))
+ assert(queue.isEmpty)
+ assert(queue.length == 0)
+
+ queue.enqueue(50)
+ queue.enqueue(60)
+// queue.printState
+ val allgt55 = queue.dequeueAll(_ > 55)
+// println(allgt55)
+// queue.printState
+ assert(allgt55.size == 1)
+ assert(allgt55.contains(60))
+ assert(queue.length == 1)
+
+ queue.enqueue(70)
+ queue.enqueue(80)
+// queue.printState
+ val alllt75 = queue.dequeueAll(_ < 75)
+// queue.printState
+ assert(alllt75.size == 2)
+ assert(alllt75.contains(70))
+ assert(alllt75.contains(50))
+ assert(queue.length == 1)
+ assert(queue.head == 80)
+ assert(queue.last == 80)
+ assert(queue.front == 80)
+ }
+
+ def testMoreEnqueues {
+ val queue = new ExtQueue[Int]
+ for (i <- 0 until 10) queue.enqueue(i * 2)
+
+ for (i <- 0 until 10) {
+ val top = queue.dequeue
+ assert(top == i * 2)
+ assert(queue.length == 10 - i - 1)
+ }
+ assert(queue.isEmpty)
+ assert(queue.length == 0)
+
+ for (i <- 0 until 10) queue.enqueue(i * i)
+ assert(queue.length == 10)
+ assert(queue.nonEmpty)
+
+ //queue.printState
+ val gt5 = queue.dequeueAll(_ > 4)
+ //queue.printState
+ //println(gt5)
+ assert(gt5.size == 7)
+ assert(queue.length == 3)
+ assert(queue.nonEmpty)
+
+ queue.clear
+ assert(queue.length == 0)
+ assert(queue.isEmpty)
+
+ for (i <- 0 until 10) queue.enqueue(i)
+ assert(queue.length == 10)
+
+ val even = queue.dequeueAll(_ % 2 == 0)
+ assert(even.size == 5)
+ assert(even.sameElements(List(0, 2, 4, 6, 8)))
+ assert(queue.length == 5)
+ assert(queue.head == 1)
+ assert(queue.last == 9)
+
+ val odd = queue.dequeueAll(_ %2 == 1)
+ assert(odd.size == 5)
+ assert(queue.length == 0)
+ assert(queue.isEmpty)
+ assert(odd.sameElements(List(1, 3, 5, 7, 9)))
+
+ for (i <- 0 until 10) queue.enqueue(i * i)
+ assert(queue.last == 81)
+ assert(queue.head == 0)
+ assert(queue.length == 10)
+
+ val foddgt25 = queue.dequeueFirst(num => num > 25 && num % 2 == 1)
+ assert(foddgt25 == Some(49))
+ assert(queue.length == 9)
+ assert(queue.nonEmpty)
+
+ //queue.printState
+ val lt30 = queue.dequeueAll(_ < 30)
+ //println(lt30)
+ //queue.printState
+ assert(lt30.size == 6)
+ assert(queue.length == 3)
+
+ val fgt60 = queue.dequeueFirst(_ > 60)
+ assert(fgt60 == Some(64))
+ assert(queue.length == 2)
+ assert(queue.head == 36)
+ assert(queue.last == 81)
+
+ val sgt60 = queue.dequeueFirst(_ > 60)
+ assert(sgt60 == Some(81))
+ assert(queue.length == 1)
+ assert(queue.head == 36)
+ assert(queue.last == 36)
+
+ val nogt60 = queue.dequeueFirst(_ > 60)
+ assert(nogt60 == None)
+ assert(queue.length == 1)
+ assert(queue.nonEmpty)
+ assert(queue.head == 36)
+
+ val gt0 = queue.dequeueFirst(_ > 0)
+ assert(gt0 == Some(36))
+ assert(queue.length == 0)
+ assert(queue.isEmpty)
+
+ for (i <- 0 until 4) queue.enqueue(i)
+ val interv = queue.dequeueAll(n => n > 0 && n < 3)
+ assert(interv.sameElements(List(1, 2)))
+ assert(queue.length == 2)
+ assert(queue.head == 0)
+ assert(queue.last == 3)
+
+ queue.dequeue
+ assert(queue.head == 3)
+
+ queue.enqueue(9)
+ val three = queue.dequeueFirst(_ < 5)
+ assert(three == Some(3))
+ assert(queue.length == 1)
+ assert(queue.head == 9)
+
+ queue.clear
+ for (i <- -100 until 100) queue.enqueue(i * i + i % 7 + 5)
+ assert(queue.length == 200)
+
+ val manyodds = queue.dequeueAll(_ % 2 == 1)
+ assert((manyodds.size + queue.length) == 200)
+
+ queue.dequeueAll(_ > -10000)
+ assert(queue.isEmpty)
+
+ for (i <- 0 until 100) queue.enqueue(i)
+ val multof3 = queue.dequeueAll(_ % 3 == 0)
+ assert(multof3.size == 34)
+ assert(queue.size == 66)
+
+ val n98 = queue.dequeueFirst(_ == 98)
+ assert(n98 == Some(98))
+ assert(queue.size == 65)
+ assert(queue.last == 97)
+ assert(queue.head == 1)
+ // well... seems to work
+ }
+
+}
+
+
+
+
diff --git a/test/files/run/SymbolsTest.scala b/test/files/run/SymbolsTest.scala
index cdd68ab..53caa5e 100644
--- a/test/files/run/SymbolsTest.scala
+++ b/test/files/run/SymbolsTest.scala
@@ -1,283 +1,283 @@
-
-
-
-
-class Slazz {
- val s1 = 'myFirstSymbol
- val s2 = 'mySecondSymbol
- def s3 = 'myThirdSymbol
- var s4: Symbol = null
-
- s4 = 'myFourthSymbol
-}
-
-class Base {
- val basesymbol = 'symbase
-}
-
-class Sub extends Base {
- val subsymbol = 'symsub
-}
-
-trait Signs {
- val ind = 'indication
- val trace = 'trace
-}
-
-trait Lazy1 {
- lazy val v1 = "lazy v1"
- lazy val s1 = 'lazySymbol1
-}
-
-trait Lazy2 {
- lazy val v2 = "lazy v2"
- lazy val s2 = 'lazySymbol2
-}
-
-trait Lazy3 {
- lazy val v3 = "lazy v3"
- lazy val s3 = 'lazySymbol3
-}
-
-object SingletonOfLazyness {
- lazy val lazysym = 'lazySymbol
- lazy val another = 'another
- lazy val lastone = 'lastone
-}
-
-/*
- * Tests symbols to see if they work correct.
- */
-object Test {
- class Inner {
- val simba = 'smba
- var mfs: Symbol = null
- mfs = Symbol("mfsa")
- }
-
- object InnerObject {
- val o1 = 'aaa
- val o2 = 'ddd
- }
-
- def aSymbol = 'myFirstSymbol
- val anotherSymbol = 'mySecondSymbol
-
- def main(args: Array[String]) {
- testLiterals
- testForLoop
- testInnerClasses
- testInnerObjects
- testWithHashMaps
- testLists
- testAnonymous
- testNestedObject
- testInheritance
- testTraits
- testLazyTraits
- testLazyObjects
- }
-
- def testLiterals {
- val scl = new Slazz
- assert(scl.s1 == aSymbol)
- assert(scl.s2 == anotherSymbol)
- assert(scl.s3 == 'myThirdSymbol)
- assert(scl.s4 == Symbol.apply("myFourthSymbol"))
- assert(scl.s1 == Symbol("myFirstSymbol"))
- }
-
- def testForLoop {
- for (i <- 0 until 100) List("Val" + i)
- }
-
- def testInnerClasses {
- val innerPower = new Inner
- assert(innerPower.simba == 'smba)
- assert(innerPower.mfs == 'mfsa)
- }
-
- def testInnerObjects {
- assert(InnerObject.o1 == 'aaa)
- assert(InnerObject.o2 == 'ddd)
- }
-
- def testWithHashMaps {
- val map = new collection.mutable.HashMap[Symbol, Symbol]
- map.put(InnerObject.o1, 'smba)
- map.put(InnerObject.o2, 'mfsa)
- map.put(Symbol("WeirdKey" + 1), Symbol("Weird" + "Val" + 1))
- assert(map('aaa) == 'smba)
- assert(map('ddd) == 'mfsa)
- assert(map('WeirdKey1) == Symbol("WeirdVal1"))
-
- map.clear
- for (i <- 0 until 100) map.put(Symbol("symKey" + i), Symbol("symVal" + i))
- assert(map(Symbol("symKey15")) == Symbol("symVal15"))
- assert(map('symKey22) == 'symVal22)
- assert(map('symKey73) == 'symVal73)
- assert(map('symKey56) == 'symVal56)
- assert(map('symKey91) == 'symVal91)
- }
-
- def testLists {
- var lst: List[Symbol] = Nil
- for (i <- 0 until 100) lst ::= Symbol("lsym" + (99 - i))
- assert(lst(0) == 'lsym0)
- assert(lst(10) == 'lsym10)
- assert(lst(30) == 'lsym30)
- assert(lst(40) == 'lsym40)
- assert(lst(65) == 'lsym65)
- assert(lst(90) == 'lsym90)
- }
-
- def testAnonymous { // TODO complaints classdef can't be found for some reason, runs fine in my case
- // val anon = () => {
- // val simba = 'smba
- // simba
- // }
- // val an2 = () => {
- // object nested {
- // val m = 'mfsa
- // }
- // nested.m
- // }
- // val an3 = () => {
- // object nested {
- // val f = () => {
- // 'layered
- // }
- // def gets = f()
- // }
- // nested.gets
- // }
- // val inner = new Inner
- // assert(anon() == inner.simba)
- // assert(anon().toString == "'smba")
- // assert(an2() == 'mfsa)
- // assert(an3() == Symbol("layered" + ""))
- }
-
- def testNestedObject {
- object nested {
- def sign = 'sign
- def insignia = 'insignia
- }
- assert(nested.sign == 'sign)
- assert(nested.insignia == 'insignia)
- assert(('insignia).toString == "'insignia")
- }
-
- def testInheritance {
- val base = new Base
- val sub = new Sub
- assert(base.basesymbol == 'symbase)
- assert(sub.subsymbol == 'symsub)
- assert(sub.basesymbol == 'symbase)
-
- val anon = new Sub {
- def subsubsymbol = 'symsubsub
- }
- assert(anon.subsubsymbol == 'symsubsub)
- assert(anon.subsymbol == 'symsub)
- assert(anon.basesymbol == 'symbase)
-
- object nested extends Sub {
- def objsymbol = 'symobj
- }
- assert(nested.objsymbol == 'symobj)
- assert(nested.subsymbol == 'symsub)
- assert(nested.basesymbol == 'symbase)
- assert(('symbase).toString == "'symbase")
- }
-
- def testTraits {
- val fromTrait = new AnyRef with Signs {
- def traitsymbol = 'traitSymbol
- }
-
- assert(fromTrait.traitsymbol == 'traitSymbol)
- assert(fromTrait.ind == 'indication)
- assert(fromTrait.trace == 'trace)
- assert(('trace).toString == "'trace")
-
- trait Compl {
- val s1 = 's1
- def s2 = 's2
- object inner {
- val s3 = 's3
- val s4 = 's4
- }
- }
-
- val compl = new Sub with Signs with Compl
- assert(compl.s1 == 's1)
- assert(compl.s2 == 's2)
- assert(compl.inner.s3 == 's3)
- assert(compl.inner.s4 == 's4)
- assert(compl.ind == 'indication)
- assert(compl.trace == 'trace)
- assert(compl.subsymbol == 'symsub)
- assert(compl.basesymbol == 'symbase)
-
- object Local extends Signs with Compl {
- val s5 = 's5
- def s6 = 's6
- object inner2 {
- val s7 = 's7
- def s8 = 's8
- }
- }
- assert(Local.s5 == 's5)
- assert(Local.s6 == 's6)
- assert(Local.inner2.s7 == 's7)
- assert(Local.inner2.s8 == 's8)
- assert(Local.inner.s3 == 's3)
- assert(Local.inner.s4 == 's4)
- assert(Local.s1 == 's1)
- assert(Local.s2 == 's2)
- assert(Local.trace == 'trace)
- assert(Local.ind == 'indication)
- assert(('s8).toString == "'s8")
- }
-
- def testLazyTraits {
- val l1 = new AnyRef with Lazy1
- val l2 = new AnyRef with Lazy2
- val l3 = new AnyRef with Lazy3
-
- l1.v1
- l2.v2
- l3.v3
- assert((l1.s1).toString == "'lazySymbol1")
- assert(l2.s2 == Symbol("lazySymbol" + 2))
- assert(l3.s3 == 'lazySymbol3)
- }
-
- def testLazyObjects {
- assert(SingletonOfLazyness.lazysym == 'lazySymbol)
- assert(SingletonOfLazyness.another == Symbol("ano" + "ther"))
- assert((SingletonOfLazyness.lastone).toString == "'lastone")
-
- object nested {
- lazy val sym1 = 'snested1
- lazy val sym2 = 'snested2
- }
-
- assert(nested.sym1 == 'snested1)
- assert(nested.sym2 == Symbol("snested" + "2"))
- }
-
-}
-
-
-
-
-
-
-
-
-
-
-
-
+
+
+
+
+class Slazz {
+ val s1 = 'myFirstSymbol
+ val s2 = 'mySecondSymbol
+ def s3 = 'myThirdSymbol
+ var s4: Symbol = null
+
+ s4 = 'myFourthSymbol
+}
+
+class Base {
+ val basesymbol = 'symbase
+}
+
+class Sub extends Base {
+ val subsymbol = 'symsub
+}
+
+trait Signs {
+ val ind = 'indication
+ val trace = 'trace
+}
+
+trait Lazy1 {
+ lazy val v1 = "lazy v1"
+ lazy val s1 = 'lazySymbol1
+}
+
+trait Lazy2 {
+ lazy val v2 = "lazy v2"
+ lazy val s2 = 'lazySymbol2
+}
+
+trait Lazy3 {
+ lazy val v3 = "lazy v3"
+ lazy val s3 = 'lazySymbol3
+}
+
+object SingletonOfLazyness {
+ lazy val lazysym = 'lazySymbol
+ lazy val another = 'another
+ lazy val lastone = 'lastone
+}
+
+/*
+ * Tests symbols to see if they work correct.
+ */
+object Test {
+ class Inner {
+ val simba = 'smba
+ var mfs: Symbol = null
+ mfs = Symbol("mfsa")
+ }
+
+ object InnerObject {
+ val o1 = 'aaa
+ val o2 = 'ddd
+ }
+
+ def aSymbol = 'myFirstSymbol
+ val anotherSymbol = 'mySecondSymbol
+
+ def main(args: Array[String]) {
+ testLiterals
+ testForLoop
+ testInnerClasses
+ testInnerObjects
+ testWithHashMaps
+ testLists
+ testAnonymous
+ testNestedObject
+ testInheritance
+ testTraits
+ testLazyTraits
+ testLazyObjects
+ }
+
+ def testLiterals {
+ val scl = new Slazz
+ assert(scl.s1 == aSymbol)
+ assert(scl.s2 == anotherSymbol)
+ assert(scl.s3 == 'myThirdSymbol)
+ assert(scl.s4 == Symbol.apply("myFourthSymbol"))
+ assert(scl.s1 == Symbol("myFirstSymbol"))
+ }
+
+ def testForLoop {
+ for (i <- 0 until 100) List("Val" + i)
+ }
+
+ def testInnerClasses {
+ val innerPower = new Inner
+ assert(innerPower.simba == 'smba)
+ assert(innerPower.mfs == 'mfsa)
+ }
+
+ def testInnerObjects {
+ assert(InnerObject.o1 == 'aaa)
+ assert(InnerObject.o2 == 'ddd)
+ }
+
+ def testWithHashMaps {
+ val map = new collection.mutable.HashMap[Symbol, Symbol]
+ map.put(InnerObject.o1, 'smba)
+ map.put(InnerObject.o2, 'mfsa)
+ map.put(Symbol("WeirdKey" + 1), Symbol("Weird" + "Val" + 1))
+ assert(map('aaa) == 'smba)
+ assert(map('ddd) == 'mfsa)
+ assert(map('WeirdKey1) == Symbol("WeirdVal1"))
+
+ map.clear
+ for (i <- 0 until 100) map.put(Symbol("symKey" + i), Symbol("symVal" + i))
+ assert(map(Symbol("symKey15")) == Symbol("symVal15"))
+ assert(map('symKey22) == 'symVal22)
+ assert(map('symKey73) == 'symVal73)
+ assert(map('symKey56) == 'symVal56)
+ assert(map('symKey91) == 'symVal91)
+ }
+
+ def testLists {
+ var lst: List[Symbol] = Nil
+ for (i <- 0 until 100) lst ::= Symbol("lsym" + (99 - i))
+ assert(lst(0) == 'lsym0)
+ assert(lst(10) == 'lsym10)
+ assert(lst(30) == 'lsym30)
+ assert(lst(40) == 'lsym40)
+ assert(lst(65) == 'lsym65)
+ assert(lst(90) == 'lsym90)
+ }
+
+ def testAnonymous { // TODO complaints classdef can't be found for some reason, runs fine in my case
+ // val anon = () => {
+ // val simba = 'smba
+ // simba
+ // }
+ // val an2 = () => {
+ // object nested {
+ // val m = 'mfsa
+ // }
+ // nested.m
+ // }
+ // val an3 = () => {
+ // object nested {
+ // val f = () => {
+ // 'layered
+ // }
+ // def gets = f()
+ // }
+ // nested.gets
+ // }
+ // val inner = new Inner
+ // assert(anon() == inner.simba)
+ // assert(anon().toString == "'smba")
+ // assert(an2() == 'mfsa)
+ // assert(an3() == Symbol("layered" + ""))
+ }
+
+ def testNestedObject {
+ object nested {
+ def sign = 'sign
+ def insignia = 'insignia
+ }
+ assert(nested.sign == 'sign)
+ assert(nested.insignia == 'insignia)
+ assert(('insignia).toString == "'insignia")
+ }
+
+ def testInheritance {
+ val base = new Base
+ val sub = new Sub
+ assert(base.basesymbol == 'symbase)
+ assert(sub.subsymbol == 'symsub)
+ assert(sub.basesymbol == 'symbase)
+
+ val anon = new Sub {
+ def subsubsymbol = 'symsubsub
+ }
+ assert(anon.subsubsymbol == 'symsubsub)
+ assert(anon.subsymbol == 'symsub)
+ assert(anon.basesymbol == 'symbase)
+
+ object nested extends Sub {
+ def objsymbol = 'symobj
+ }
+ assert(nested.objsymbol == 'symobj)
+ assert(nested.subsymbol == 'symsub)
+ assert(nested.basesymbol == 'symbase)
+ assert(('symbase).toString == "'symbase")
+ }
+
+ def testTraits {
+ val fromTrait = new AnyRef with Signs {
+ def traitsymbol = 'traitSymbol
+ }
+
+ assert(fromTrait.traitsymbol == 'traitSymbol)
+ assert(fromTrait.ind == 'indication)
+ assert(fromTrait.trace == 'trace)
+ assert(('trace).toString == "'trace")
+
+ trait Compl {
+ val s1 = 's1
+ def s2 = 's2
+ object inner {
+ val s3 = 's3
+ val s4 = 's4
+ }
+ }
+
+ val compl = new Sub with Signs with Compl
+ assert(compl.s1 == 's1)
+ assert(compl.s2 == 's2)
+ assert(compl.inner.s3 == 's3)
+ assert(compl.inner.s4 == 's4)
+ assert(compl.ind == 'indication)
+ assert(compl.trace == 'trace)
+ assert(compl.subsymbol == 'symsub)
+ assert(compl.basesymbol == 'symbase)
+
+ object Local extends Signs with Compl {
+ val s5 = 's5
+ def s6 = 's6
+ object inner2 {
+ val s7 = 's7
+ def s8 = 's8
+ }
+ }
+ assert(Local.s5 == 's5)
+ assert(Local.s6 == 's6)
+ assert(Local.inner2.s7 == 's7)
+ assert(Local.inner2.s8 == 's8)
+ assert(Local.inner.s3 == 's3)
+ assert(Local.inner.s4 == 's4)
+ assert(Local.s1 == 's1)
+ assert(Local.s2 == 's2)
+ assert(Local.trace == 'trace)
+ assert(Local.ind == 'indication)
+ assert(('s8).toString == "'s8")
+ }
+
+ def testLazyTraits {
+ val l1 = new AnyRef with Lazy1
+ val l2 = new AnyRef with Lazy2
+ val l3 = new AnyRef with Lazy3
+
+ l1.v1
+ l2.v2
+ l3.v3
+ assert((l1.s1).toString == "'lazySymbol1")
+ assert(l2.s2 == Symbol("lazySymbol" + 2))
+ assert(l3.s3 == 'lazySymbol3)
+ }
+
+ def testLazyObjects {
+ assert(SingletonOfLazyness.lazysym == 'lazySymbol)
+ assert(SingletonOfLazyness.another == Symbol("ano" + "ther"))
+ assert((SingletonOfLazyness.lastone).toString == "'lastone")
+
+ object nested {
+ lazy val sym1 = 'snested1
+ lazy val sym2 = 'snested2
+ }
+
+ assert(nested.sym1 == 'snested1)
+ assert(nested.sym2 == Symbol("snested" + "2"))
+ }
+
+}
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/test/files/run/WeakHashSetTest.scala b/test/files/run/WeakHashSetTest.scala
new file mode 100644
index 0000000..3c8f380
--- /dev/null
+++ b/test/files/run/WeakHashSetTest.scala
@@ -0,0 +1,174 @@
+object Test {
+ def main(args: Array[String]) {
+ val test = scala.reflect.internal.util.WeakHashSetTest
+ test.checkEmpty
+ test.checkPlusEquals
+ test.checkPlusEqualsCollisions
+ test.checkRehashing
+ test.checkRehashCollisions
+ test.checkFindOrUpdate
+ test.checkMinusEquals
+ test.checkMinusEqualsCollisions
+ test.checkClear
+ test.checkIterator
+ test.checkIteratorCollisions
+
+ // This test is commented out because it relies on gc behavior which isn't reliable enough in an automated environment
+ // test.checkRemoveUnreferencedObjects
+ }
+}
+
+// put the main test object in the same package as WeakHashSet because
+// it uses the package private "diagnostics" method
+package scala.reflect.internal.util {
+
+ object WeakHashSetTest {
+ // a class guaranteed to provide hash collisions
+ case class Collider(x : String) extends Comparable[Collider] with Serializable {
+ override def hashCode = 0
+ def compareTo(y : Collider) = this.x compareTo y.x
+ }
+
+ // basic emptiness check
+ def checkEmpty {
+ val hs = new WeakHashSet[String]()
+ assert(hs.size == 0)
+ hs.diagnostics.fullyValidate
+ }
+
+ // make sure += works
+ def checkPlusEquals {
+ val hs = new WeakHashSet[String]()
+ val elements = List("hello", "goodbye")
+ elements foreach (hs += _)
+ assert(hs.size == 2)
+ assert(hs contains "hello")
+ assert(hs contains "goodbye")
+ hs.diagnostics.fullyValidate
+ }
+
+ // make sure += works when there are collisions
+ def checkPlusEqualsCollisions {
+ val hs = new WeakHashSet[Collider]()
+ val elements = List("hello", "goodbye") map Collider
+ elements foreach (hs += _)
+ assert(hs.size == 2)
+ assert(hs contains Collider("hello"))
+ assert(hs contains Collider("goodbye"))
+ hs.diagnostics.fullyValidate
+ }
+
+ // add a large number of elements to force rehashing and then validate
+ def checkRehashing {
+ val size = 200
+ val hs = new WeakHashSet[String]()
+ val elements = (0 until size).toList map ("a" + _)
+ elements foreach (hs += _)
+ elements foreach {i => assert(hs contains i)}
+ hs.diagnostics.fullyValidate
+ }
+
+ // make sure rehashing works properly when the set is rehashed
+ def checkRehashCollisions {
+ val size = 200
+ val hs = new WeakHashSet[Collider]()
+ val elements = (0 until size).toList map {x => Collider("a" + x)}
+ elements foreach (hs += _)
+ elements foreach {i => assert(hs contains i)}
+ hs.diagnostics.fullyValidate
+ }
+
+ // test that unreferenced objects are removed
+ // not run in an automated environment because gc behavior can't be relied on
+ def checkRemoveUnreferencedObjects {
+ val size = 200
+ val hs = new WeakHashSet[Collider]()
+ val elements = (0 until size).toList map {x => Collider("a" + x)}
+ elements foreach (hs += _)
+ // don't throw the following into a retained collection so gc
+ // can remove them
+ for (i <- 0 until size) {
+ hs += Collider("b" + i)
+ }
+ System.gc()
+ Thread.sleep(1000)
+ assert(hs.size == 200)
+ elements foreach {i => assert(hs contains i)}
+ for (i <- 0 until size) {
+ assert(!(hs contains Collider("b" + i)))
+ }
+ hs.diagnostics.fullyValidate
+ }
+
+ // make sure findOrUpdate returns the originally entered element
+ def checkFindOrUpdate {
+ val size = 200
+ val hs = new WeakHashSet[Collider]()
+ val elements = (0 until size).toList map {x => Collider("a" + x)}
+ elements foreach {x => assert(hs findEntryOrUpdate x eq x)}
+ for (i <- 0 until size) {
+ // when we do a lookup the result should be the same reference we
+ // original put in
+ assert(hs findEntryOrUpdate(Collider("a" + i)) eq elements(i))
+ }
+ hs.diagnostics.fullyValidate
+ }
+
+ // check -= functionality
+ def checkMinusEquals {
+ val hs = new WeakHashSet[String]()
+ val elements = List("hello", "goodbye")
+ elements foreach (hs += _)
+ hs -= "goodbye"
+ assert(hs.size == 1)
+ assert(hs contains "hello")
+ assert(!(hs contains "goodbye"))
+ hs.diagnostics.fullyValidate
+ }
+
+ // check -= when there are collisions
+ def checkMinusEqualsCollisions {
+ val hs = new WeakHashSet[Collider]
+ val elements = List(Collider("hello"), Collider("goodbye"))
+ elements foreach (hs += _)
+ hs -= Collider("goodbye")
+ assert(hs.size == 1)
+ assert(hs contains Collider("hello"))
+ assert(!(hs contains Collider("goodbye")))
+ hs -= Collider("hello")
+ assert(hs.size == 0)
+ assert(!(hs contains Collider("hello")))
+ hs.diagnostics.fullyValidate
+ }
+
+ // check that the clear method actually cleans everything
+ def checkClear {
+ val size = 200
+ val hs = new WeakHashSet[String]()
+ val elements = (0 until size).toList map ("a" + _)
+ elements foreach (hs += _)
+ hs.clear()
+ assert(hs.size == 0)
+ elements foreach {i => assert(!(hs contains i))}
+ hs.diagnostics.fullyValidate
+ }
+
+ // check that the iterator covers all the contents
+ def checkIterator {
+ val hs = new WeakHashSet[String]()
+ val elements = (0 until 20).toList map ("a" + _)
+ elements foreach (hs += _)
+ assert(elements.iterator.toList.sorted == elements.sorted)
+ hs.diagnostics.fullyValidate
+ }
+
+ // check that the iterator covers all the contents even when there is a collision
+ def checkIteratorCollisions {
+ val hs = new WeakHashSet[Collider]
+ val elements = (0 until 20).toList map {x => Collider("a" + x)}
+ elements foreach (hs += _)
+ assert(elements.iterator.toList.sorted == elements.sorted)
+ hs.diagnostics.fullyValidate
+ }
+ }
+}
diff --git a/test/files/run/absoverride-msil.check b/test/files/run/absoverride-msil.check
deleted file mode 100644
index 938f622..0000000
--- a/test/files/run/absoverride-msil.check
+++ /dev/null
@@ -1,20 +0,0 @@
-<sync>
-next: m
-</sync>
-log: m
-m
-<sync>
-next: s
-</sync>
-log: s
-s
-<sync>
-next: i
-</sync>
-log: i
-i
-<sync>
-next: l
-</sync>
-log: l
-l
diff --git a/test/files/run/abstypetags_core.check b/test/files/run/abstypetags_core.check
new file mode 100644
index 0000000..980b471
--- /dev/null
+++ b/test/files/run/abstypetags_core.check
@@ -0,0 +1,30 @@
+true
+TypeTag[Byte]
+true
+TypeTag[Short]
+true
+TypeTag[Char]
+true
+TypeTag[Int]
+true
+TypeTag[Long]
+true
+TypeTag[Float]
+true
+TypeTag[Double]
+true
+TypeTag[Boolean]
+true
+TypeTag[Unit]
+true
+TypeTag[Any]
+true
+TypeTag[AnyVal]
+true
+TypeTag[AnyRef]
+true
+TypeTag[java.lang.Object]
+true
+TypeTag[Null]
+true
+TypeTag[Nothing]
diff --git a/test/files/run/abstypetags_core.scala b/test/files/run/abstypetags_core.scala
new file mode 100644
index 0000000..2692fec
--- /dev/null
+++ b/test/files/run/abstypetags_core.scala
@@ -0,0 +1,34 @@
+import scala.reflect.runtime.universe._
+
+object Test extends App {
+ println(implicitly[WeakTypeTag[Byte]] eq WeakTypeTag.Byte)
+ println(implicitly[WeakTypeTag[Byte]])
+ println(implicitly[WeakTypeTag[Short]] eq WeakTypeTag.Short)
+ println(implicitly[WeakTypeTag[Short]])
+ println(implicitly[WeakTypeTag[Char]] eq WeakTypeTag.Char)
+ println(implicitly[WeakTypeTag[Char]])
+ println(implicitly[WeakTypeTag[Int]] eq WeakTypeTag.Int)
+ println(implicitly[WeakTypeTag[Int]])
+ println(implicitly[WeakTypeTag[Long]] eq WeakTypeTag.Long)
+ println(implicitly[WeakTypeTag[Long]])
+ println(implicitly[WeakTypeTag[Float]] eq WeakTypeTag.Float)
+ println(implicitly[WeakTypeTag[Float]])
+ println(implicitly[WeakTypeTag[Double]] eq WeakTypeTag.Double)
+ println(implicitly[WeakTypeTag[Double]])
+ println(implicitly[WeakTypeTag[Boolean]] eq WeakTypeTag.Boolean)
+ println(implicitly[WeakTypeTag[Boolean]])
+ println(implicitly[WeakTypeTag[Unit]] eq WeakTypeTag.Unit)
+ println(implicitly[WeakTypeTag[Unit]])
+ println(implicitly[WeakTypeTag[Any]] eq WeakTypeTag.Any)
+ println(implicitly[WeakTypeTag[Any]])
+ println(implicitly[WeakTypeTag[AnyVal]] eq WeakTypeTag.AnyVal)
+ println(implicitly[WeakTypeTag[AnyVal]])
+ println(implicitly[WeakTypeTag[AnyRef]] eq WeakTypeTag.AnyRef)
+ println(implicitly[WeakTypeTag[AnyRef]])
+ println(implicitly[WeakTypeTag[Object]] eq WeakTypeTag.Object)
+ println(implicitly[WeakTypeTag[Object]])
+ println(implicitly[WeakTypeTag[Null]] eq WeakTypeTag.Null)
+ println(implicitly[WeakTypeTag[Null]])
+ println(implicitly[WeakTypeTag[Nothing]] eq WeakTypeTag.Nothing)
+ println(implicitly[WeakTypeTag[Nothing]])
+}
\ No newline at end of file
diff --git a/test/files/run/abstypetags_serialize.check b/test/files/run/abstypetags_serialize.check
new file mode 100644
index 0000000..bddc452
--- /dev/null
+++ b/test/files/run/abstypetags_serialize.check
@@ -0,0 +1,2 @@
+java.io.NotSerializableException: Test$$typecreator1$1
+java.io.NotSerializableException: Test$$typecreator2$1
diff --git a/test/files/run/abstypetags_serialize.scala b/test/files/run/abstypetags_serialize.scala
new file mode 100644
index 0000000..93fb5dc
--- /dev/null
+++ b/test/files/run/abstypetags_serialize.scala
@@ -0,0 +1,33 @@
+import java.io._
+import scala.reflect.runtime.universe._
+import scala.reflect.runtime.{universe => ru}
+import scala.reflect.runtime.{currentMirror => cm}
+
+object Test extends App {
+ def test(tag: WeakTypeTag[_]) =
+ try {
+ val fout = new ByteArrayOutputStream()
+ val out = new ObjectOutputStream(fout)
+ out.writeObject(tag)
+ out.close()
+ fout.close()
+
+ val fin = new ByteArrayInputStream(fout.toByteArray)
+ val in = new ObjectInputStream(fin)
+ val retag = in.readObject().asInstanceOf[ru.WeakTypeTag[_]].in(cm)
+ in.close()
+ fin.close()
+
+ println(retag)
+ } catch {
+ case ex: Exception =>
+ println(ex)
+ }
+
+ def qwe[T, U[_]] = {
+ test(implicitly[WeakTypeTag[T]])
+ test(implicitly[WeakTypeTag[U[String]]])
+ }
+
+ qwe
+}
\ No newline at end of file
diff --git a/test/files/run/adding-growing-set.scala b/test/files/run/adding-growing-set.scala
index 5903813..ab94b89 100644
--- a/test/files/run/adding-growing-set.scala
+++ b/test/files/run/adding-growing-set.scala
@@ -1,4 +1,4 @@
-/** This will run a a loooong time if Set's builder copies a
+/** This will run a loooong time if Set's builder copies a
* complete new Set for every element.
*/
object Test {
diff --git a/test/files/run/analyzerPlugins.check b/test/files/run/analyzerPlugins.check
new file mode 100644
index 0000000..7d8d181
--- /dev/null
+++ b/test/files/run/analyzerPlugins.check
@@ -0,0 +1,196 @@
+adaptBoundsToAnnots(List( <: Int), List(type T), List(Int @testAnn)) [2]
+annotationsConform(Boolean @testAnn, Boolean) [1]
+annotationsConform(Boolean(false), Boolean @testAnn) [1]
+annotationsConform(Int @testAnn, ?A) [1]
+annotationsConform(Int @testAnn, Any) [1]
+annotationsConform(Int @testAnn, Int) [2]
+annotationsConform(Int(1) @testAnn, Int) [1]
+annotationsConform(Int(1), Int @testAnn) [1]
+annotationsConform(Nothing, Int @testAnn) [2]
+annotationsConform(String @testAnn, String) [1]
+canAdaptAnnotations(Trees$Ident, String) [1]
+canAdaptAnnotations(Trees$Select, ?) [1]
+canAdaptAnnotations(Trees$Select, Boolean @testAnn) [1]
+canAdaptAnnotations(Trees$Select, Boolean) [1]
+canAdaptAnnotations(Trees$Select, String @testAnn) [1]
+canAdaptAnnotations(Trees$TypeTree, ?) [10]
+canAdaptAnnotations(Trees$Typed, ?) [3]
+canAdaptAnnotations(Trees$Typed, Any) [1]
+canAdaptAnnotations(Trees$Typed, Int) [1]
+lub(List(Int @testAnn, Int)) [1]
+pluginsPt(?, Trees$Annotated) [7]
+pluginsPt(?, Trees$Apply) [8]
+pluginsPt(?, Trees$ApplyImplicitView) [2]
+pluginsPt(?, Trees$Assign) [7]
+pluginsPt(?, Trees$Block) [7]
+pluginsPt(?, Trees$ClassDef) [2]
+pluginsPt(?, Trees$DefDef) [14]
+pluginsPt(?, Trees$Ident) [49]
+pluginsPt(?, Trees$If) [2]
+pluginsPt(?, Trees$Literal) [20]
+pluginsPt(?, Trees$New) [5]
+pluginsPt(?, Trees$PackageDef) [1]
+pluginsPt(?, Trees$Return) [1]
+pluginsPt(?, Trees$Select) [51]
+pluginsPt(?, Trees$Super) [2]
+pluginsPt(?, Trees$This) [20]
+pluginsPt(?, Trees$TypeApply) [3]
+pluginsPt(?, Trees$TypeBoundsTree) [2]
+pluginsPt(?, Trees$TypeDef) [1]
+pluginsPt(?, Trees$TypeTree) [37]
+pluginsPt(?, Trees$Typed) [1]
+pluginsPt(?, Trees$ValDef) [23]
+pluginsPt(Any, Trees$Literal) [2]
+pluginsPt(Any, Trees$Typed) [1]
+pluginsPt(Array[Any], Trees$ArrayValue) [1]
+pluginsPt(Boolean @testAnn, Trees$Literal) [1]
+pluginsPt(Boolean @testAnn, Trees$Select) [1]
+pluginsPt(Boolean, Trees$Apply) [1]
+pluginsPt(Boolean, Trees$Ident) [1]
+pluginsPt(Boolean, Trees$Literal) [1]
+pluginsPt(Double, Trees$Select) [1]
+pluginsPt(Int @testAnn, Trees$Literal) [1]
+pluginsPt(Int, Trees$Apply) [1]
+pluginsPt(Int, Trees$Ident) [2]
+pluginsPt(Int, Trees$If) [1]
+pluginsPt(Int, Trees$Literal) [6]
+pluginsPt(Int, Trees$Select) [3]
+pluginsPt(List, Trees$Apply) [1]
+pluginsPt(List[Any], Trees$Select) [1]
+pluginsPt(String @testAnn, Trees$Select) [1]
+pluginsPt(String, Trees$Apply) [1]
+pluginsPt(String, Trees$Block) [2]
+pluginsPt(String, Trees$Ident) [4]
+pluginsPt(String, Trees$Literal) [1]
+pluginsPt(String, Trees$Select) [1]
+pluginsPt(String, Trees$Typed) [1]
+pluginsPt(Unit, Trees$Assign) [1]
+pluginsPt(scala.annotation.Annotation, Trees$Apply) [5]
+pluginsTypeSig(<none>, Trees$Template) [2]
+pluginsTypeSig(class A, Trees$ClassDef) [1]
+pluginsTypeSig(class testAnn, Trees$ClassDef) [1]
+pluginsTypeSig(constructor A, Trees$DefDef) [2]
+pluginsTypeSig(constructor testAnn, Trees$DefDef) [1]
+pluginsTypeSig(method foo, Trees$DefDef) [1]
+pluginsTypeSig(method method, Trees$DefDef) [1]
+pluginsTypeSig(method nested, Trees$DefDef) [1]
+pluginsTypeSig(type T, Trees$TypeDef) [2]
+pluginsTypeSig(value annotField, Trees$ValDef) [2]
+pluginsTypeSig(value f, Trees$ValDef) [1]
+pluginsTypeSig(value inferField, Trees$ValDef) [2]
+pluginsTypeSig(value lub1, Trees$ValDef) [2]
+pluginsTypeSig(value lub2, Trees$ValDef) [2]
+pluginsTypeSig(value param, Trees$ValDef) [2]
+pluginsTypeSig(value str, Trees$ValDef) [1]
+pluginsTypeSig(value x, Trees$ValDef) [5]
+pluginsTypeSig(value y, Trees$ValDef) [5]
+pluginsTypeSig(variable count, Trees$ValDef) [3]
+pluginsTypeSigAccessor(value annotField) [1]
+pluginsTypeSigAccessor(value inferField) [1]
+pluginsTypeSigAccessor(value lub1) [1]
+pluginsTypeSigAccessor(value lub2) [1]
+pluginsTypeSigAccessor(value x) [1]
+pluginsTypeSigAccessor(value y) [1]
+pluginsTypeSigAccessor(variable count) [2]
+pluginsTyped( <: Int, Trees$TypeBoundsTree) [2]
+pluginsTyped(()Object, Trees$Select) [1]
+pluginsTyped(()String, Trees$Ident) [1]
+pluginsTyped(()String, Trees$TypeApply) [1]
+pluginsTyped(()scala.annotation.Annotation, Trees$Select) [1]
+pluginsTyped(()testAnn, Trees$Select) [10]
+pluginsTyped((str: String)A <and> (param: Double)A, Trees$Select) [1]
+pluginsTyped((x$1: Any)Boolean <and> (x: Double)Boolean <and> (x: Float)Boolean <and> (x: Long)Boolean <and> (x: Int)Boolean <and> (x: Char)Boolean <and> (x: Short)Boolean <and> (x: Byte)Boolean, Trees$Select) [1]
+pluginsTyped((x$1: Int)Unit, Trees$Select) [1]
+pluginsTyped((x: Double)Double <and> (x: Float)Float <and> (x: Long)Long <and> (x: Int)Int <and> (x: Char)Int <and> (x: Short)Int <and> (x: Byte)Int <and> (x: String)String, Trees$Select) [1]
+pluginsTyped((x: String)scala.collection.immutable.StringOps, Trees$Select) [2]
+pluginsTyped((xs: Array[Any])scala.collection.mutable.WrappedArray[Any], Trees$TypeApply) [1]
+pluginsTyped(<empty>.type, Trees$Ident) [1]
+pluginsTyped(<error>, Trees$Select) [1]
+pluginsTyped(<notype>, Trees$ClassDef) [2]
+pluginsTyped(<notype>, Trees$DefDef) [14]
+pluginsTyped(<notype>, Trees$PackageDef) [1]
+pluginsTyped(<notype>, Trees$TypeDef) [1]
+pluginsTyped(<notype>, Trees$ValDef) [23]
+pluginsTyped(<root>, Trees$Ident) [1]
+pluginsTyped(=> Boolean @testAnn, Trees$Select) [1]
+pluginsTyped(=> Double, Trees$Select) [4]
+pluginsTyped(=> Int, Trees$Select) [5]
+pluginsTyped(=> Int, Trees$TypeApply) [1]
+pluginsTyped(=> String @testAnn, Trees$Select) [1]
+pluginsTyped(A, Trees$Apply) [1]
+pluginsTyped(A, Trees$Ident) [2]
+pluginsTyped(A, Trees$This) [8]
+pluginsTyped(A, Trees$TypeTree) [4]
+pluginsTyped(A.super.type, Trees$Super) [1]
+pluginsTyped(A.this.type, Trees$This) [11]
+pluginsTyped(Any, Trees$TypeTree) [1]
+pluginsTyped(AnyRef, Trees$Select) [2]
+pluginsTyped(Array[Any], Trees$ArrayValue) [1]
+pluginsTyped(Boolean @testAnn, Trees$Select) [1]
+pluginsTyped(Boolean @testAnn, Trees$TypeTree) [4]
+pluginsTyped(Boolean(false), Trees$Literal) [2]
+pluginsTyped(Boolean, Trees$Apply) [1]
+pluginsTyped(Boolean, Trees$Select) [4]
+pluginsTyped(Char('c'), Trees$Literal) [2]
+pluginsTyped(Double, Trees$Select) [6]
+pluginsTyped(Int @testAnn, Trees$TypeTree) [2]
+pluginsTyped(Int @testAnn, Trees$Typed) [2]
+pluginsTyped(Int(0), Trees$Literal) [3]
+pluginsTyped(Int(1) @testAnn, Trees$Typed) [1]
+pluginsTyped(Int(1), Trees$Literal) [9]
+pluginsTyped(Int(2), Trees$Literal) [1]
+pluginsTyped(Int, Trees$Apply) [1]
+pluginsTyped(Int, Trees$Ident) [2]
+pluginsTyped(Int, Trees$If) [2]
+pluginsTyped(Int, Trees$Select) [17]
+pluginsTyped(Int, Trees$TypeTree) [13]
+pluginsTyped(List, Trees$Apply) [1]
+pluginsTyped(List, Trees$Select) [1]
+pluginsTyped(List[Any], Trees$Apply) [1]
+pluginsTyped(List[Any], Trees$Select) [1]
+pluginsTyped(List[Any], Trees$TypeTree) [3]
+pluginsTyped(Nothing, Trees$Return) [1]
+pluginsTyped(Nothing, Trees$Select) [2]
+pluginsTyped(Object, Trees$Apply) [1]
+pluginsTyped(String @testAnn, Trees$Ident) [1]
+pluginsTyped(String @testAnn, Trees$Select) [1]
+pluginsTyped(String @testAnn, Trees$TypeTree) [4]
+pluginsTyped(String(""), Trees$Literal) [2]
+pluginsTyped(String("huhu"), Trees$Literal) [1]
+pluginsTyped(String("str") @testAnn, Trees$Typed) [1]
+pluginsTyped(String("str"), Trees$Literal) [1]
+pluginsTyped(String("str"), Trees$Typed) [1]
+pluginsTyped(String("two"), Trees$Literal) [3]
+pluginsTyped(String, Trees$Apply) [2]
+pluginsTyped(String, Trees$Block) [2]
+pluginsTyped(String, Trees$Ident) [1]
+pluginsTyped(String, Trees$Select) [9]
+pluginsTyped(String, Trees$TypeTree) [8]
+pluginsTyped(Unit, Trees$Apply) [2]
+pluginsTyped(Unit, Trees$Assign) [8]
+pluginsTyped(Unit, Trees$Block) [7]
+pluginsTyped(Unit, Trees$If) [1]
+pluginsTyped(Unit, Trees$Literal) [8]
+pluginsTyped(Unit, Trees$TypeTree) [1]
+pluginsTyped([A](xs: A*)List[A], Trees$Select) [1]
+pluginsTyped([T <: Int]=> Int, Trees$Select) [1]
+pluginsTyped([T0 >: ? <: ?]()T0, Trees$Select) [1]
+pluginsTyped([T](xs: Array[T])scala.collection.mutable.WrappedArray[T], Trees$Select) [1]
+pluginsTyped(annotation.type, Trees$Select) [2]
+pluginsTyped(math.type, Trees$Select) [9]
+pluginsTyped(scala.annotation.Annotation, Trees$Apply) [1]
+pluginsTyped(scala.annotation.TypeConstraint, Trees$Select) [4]
+pluginsTyped(scala.collection.immutable.List.type, Trees$Select) [2]
+pluginsTyped(scala.collection.immutable.StringOps, Trees$ApplyImplicitView) [2]
+pluginsTyped(scala.collection.mutable.WrappedArray[Any], Trees$Apply) [1]
+pluginsTyped(scala.type, Trees$Ident) [1]
+pluginsTyped(scala.type, Trees$Select) [1]
+pluginsTyped(str.type, Trees$Ident) [3]
+pluginsTyped(testAnn, Trees$Apply) [5]
+pluginsTyped(testAnn, Trees$Ident) [5]
+pluginsTyped(testAnn, Trees$New) [5]
+pluginsTyped(testAnn, Trees$This) [1]
+pluginsTyped(testAnn, Trees$TypeTree) [2]
+pluginsTyped(testAnn.super.type, Trees$Super) [1]
+pluginsTyped(type, Trees$Select) [1]
+pluginsTypedReturn(return f, String) [1]
diff --git a/test/files/run/analyzerPlugins.scala b/test/files/run/analyzerPlugins.scala
new file mode 100644
index 0000000..daef83f
--- /dev/null
+++ b/test/files/run/analyzerPlugins.scala
@@ -0,0 +1,121 @@
+import scala.tools.partest._
+import scala.tools.nsc._
+
+object Test extends DirectTest {
+
+ override def extraSettings: String = "-usejavacp"
+
+ def code = """
+ class testAnn extends annotation.TypeConstraint
+
+ class A(param: Double) extends { val x: Int = 1; val y = "two"; type T = A } with AnyRef {
+ val inferField = ("str": @testAnn)
+ val annotField: Boolean @testAnn = false
+
+ val lub1 = List('c', (1: Int @testAnn), "")
+ val lub2 = if (annotField) (1: @testAnn) else 2
+
+ def foo[T <: Int] = 0
+ foo[Int @testAnn]
+
+ var count = 0
+
+ math.random // some statement
+
+ def method: String = {
+ math.random
+ val f = inferField
+
+ def nested(): String = {
+ if(count == 1)
+ return f
+ "huhu"
+ }
+ nested()
+ }
+
+ def this(str: String) {
+ this(str.toDouble)
+ math.random
+ count += 1
+ }
+ }
+ """.trim
+
+
+ def show() {
+ val global = newCompiler()
+ import global._
+ import analyzer._
+
+ val output = collection.mutable.ListBuffer[String]()
+
+ object annotChecker extends AnnotationChecker {
+ def hasTestAnn(tps: Type*) = {
+ tps exists (_.annotations.map(_.toString) contains "testAnn")
+ }
+
+ def annotationsConform(tpe1: Type, tpe2: Type): Boolean = {
+ if (hasTestAnn(tpe1, tpe2))
+ output += s"annotationsConform($tpe1, $tpe2)"
+ true
+ }
+
+ override def annotationsLub(tp: Type, ts: List[Type]): Type = {
+ if (hasTestAnn(ts: _*))
+ output += s"lub($ts)"
+ tp
+ }
+
+ override def adaptBoundsToAnnotations(bounds: List[TypeBounds], tparams: List[Symbol], targs: List[Type]): List[TypeBounds] = {
+ if (hasTestAnn(targs: _*))
+ output += s"adaptBoundsToAnnots($bounds, $tparams, $targs)"
+ bounds
+ }
+ }
+
+ object analyzerPlugin extends AnalyzerPlugin {
+ def treeClass(t: Tree) = t.getClass.toString.split('.').last
+
+ override def pluginsPt(pt: Type, typer: Typer, tree: Tree, mode: Int): Type = {
+ output += s"pluginsPt($pt, ${treeClass(tree)})"
+ pt
+ }
+
+ override def pluginsTyped(tpe: Type, typer: Typer, tree: Tree, mode: Int, pt: Type): Type = {
+ output += s"pluginsTyped($tpe, ${treeClass(tree)})"
+ tpe
+ }
+
+ override def pluginsTypeSig(tpe: Type, typer: Typer, defTree: Tree, pt: Type): Type = {
+ output += s"pluginsTypeSig(${defTree.symbol}, ${treeClass(defTree)})"
+ tpe
+ }
+
+ override def pluginsTypeSigAccessor(tpe: Type, typer: Typer, tree: ValDef, sym: Symbol): Type = {
+ output += s"pluginsTypeSigAccessor(${tree.symbol})"
+ tpe
+ }
+
+
+ override def canAdaptAnnotations(tree: Tree, typer: Typer, mode: Int, pt: Type): Boolean = {
+ output += s"canAdaptAnnotations(${treeClass(tree)}, $pt)"
+ false
+ }
+
+ override def pluginsTypedReturn(tpe: Type, typer: Typer, tree: Return, pt: Type): Type = {
+ output += s"pluginsTypedReturn($tree, $pt)"
+ tpe
+ }
+
+ }
+
+ addAnnotationChecker(annotChecker)
+ addAnalyzerPlugin(analyzerPlugin)
+ compileString(global)(code)
+
+ val res = output.groupBy(identity).mapValues(_.size).map { case (k,v) => s"$k [$v]" }.toList.sorted
+ println(res.mkString("\n"))
+ }
+
+}
diff --git a/test/files/run/annotatedRetyping.check b/test/files/run/annotatedRetyping.check
new file mode 100644
index 0000000..b296a80
--- /dev/null
+++ b/test/files/run/annotatedRetyping.check
@@ -0,0 +1,6 @@
+typing List(1, 2).map(((x) => {
+ val another = scala.Tuple2(t.nt, t.tr): @testAnn match {
+ case scala.Tuple2(_, _) => 1
+ };
+ x
+}))
diff --git a/test/files/run/annotatedRetyping.scala b/test/files/run/annotatedRetyping.scala
new file mode 100644
index 0000000..cf1b0f2
--- /dev/null
+++ b/test/files/run/annotatedRetyping.scala
@@ -0,0 +1,62 @@
+import scala.tools.partest._
+import scala.tools.nsc._
+
+object Test extends DirectTest {
+
+ override def extraSettings: String = "-usejavacp"
+
+ def code = """
+ class testAnn extends annotation.Annotation
+
+ object t {
+ def nt = 1
+ def tr = "a"
+ }
+
+ class Test {
+ List(1,2).map(x => {
+ val another = ((t.nt, t.tr): @testAnn) match { case (_, _) => 1 }
+ x
+ })
+ }
+ """.trim
+
+
+ // point of this test: type-check the "Annotated" tree twice. first time the analyzer plugin types it,
+ // second time the typer.
+
+ // bug was that typedAnnotated assigned a type to the Annotated tree. The second type check would consider
+ // the tree as alreadyTyped, which is not cool, the Annotated needs to be transformed into a Typed tree.
+
+ def show() {
+ val global = newCompiler()
+ import global._
+ import analyzer._
+ import collection.{mutable => m}
+
+ object analyzerPlugin extends AnalyzerPlugin {
+ val templates: m.Map[Symbol, (Template, Typer)] = m.Map()
+ override def pluginsTypeSig(tpe: Type, typer: Typer, defTree: Tree, pt: Type): Type = {
+ defTree match {
+ case impl: Template =>
+ templates += typer.context.owner -> (impl, typer)
+
+ case dd: DefDef if dd.symbol.isPrimaryConstructor && templates.contains(dd.symbol.owner) =>
+ val (impl, templTyper) = templates(dd.symbol.owner)
+ for (stat <- impl.body.filterNot(_.isDef)) {
+ println("typing "+ stat)
+ val statsOwner = impl.symbol orElse templTyper.context.owner.newLocalDummy(impl.pos)
+ val tpr = analyzer.newTyper(templTyper.context.make(stat, statsOwner))
+ tpr.typed(stat)
+ }
+
+ case _ =>
+ }
+ tpe
+ }
+ }
+
+ addAnalyzerPlugin(analyzerPlugin)
+ compileString(global)(code)
+ }
+}
diff --git a/test/files/run/applydynamic_sip.check b/test/files/run/applydynamic_sip.check
new file mode 100644
index 0000000..6d04dc4
--- /dev/null
+++ b/test/files/run/applydynamic_sip.check
@@ -0,0 +1,29 @@
+qual.applyDynamic(sel)()
+qual.applyDynamic(sel)(a)
+qual.applyDynamic(sel)(a)
+.apply(a2)
+qual.applyDynamic(sel)(a)
+qual.applyDynamic(sel)(a)
+.apply(a2)
+qual.applyDynamicNamed(sel)((arg,a))
+qual.applyDynamicNamed(sel)((arg,a))
+qual.applyDynamicNamed(sel)((,a), (arg2,a2))
+qual.updateDynamic(sel)(expr)
+qual.selectDynamic(sel)
+qual.selectDynamic(sel)
+qual.selectDynamic(sel)
+.update(1, expr)
+qual.selectDynamic(sel)
+.update(expr)
+qual.selectDynamic(sel)
+.apply(1)
+qual.selectDynamic(sel)
+.apply
+.update(1, 1)
+qual.applyDynamic(apply)(a)
+qual.applyDynamic(apply)(a)
+qual.applyDynamic(apply)(a)
+qual.applyDynamic(apply)(a)
+qual.applyDynamicNamed(apply)((arg,a))
+qual.applyDynamicNamed(apply)((,a), (arg2,a2))
+qual.applyDynamic(update)(a, a2)
diff --git a/test/files/run/applydynamic_sip.flags b/test/files/run/applydynamic_sip.flags
new file mode 100644
index 0000000..1141f97
--- /dev/null
+++ b/test/files/run/applydynamic_sip.flags
@@ -0,0 +1 @@
+-language:dynamics
diff --git a/test/files/run/applydynamic_sip.scala b/test/files/run/applydynamic_sip.scala
new file mode 100644
index 0000000..cf918a8
--- /dev/null
+++ b/test/files/run/applydynamic_sip.scala
@@ -0,0 +1,66 @@
+object Test extends App {
+ object stubUpdate {
+ def update(as: Any*) = println(".update"+as.toList.mkString("(",", ", ")"))
+ }
+
+ object stub {
+ def apply = {println(".apply"); stubUpdate}
+ def apply(as: Any*) = println(".apply"+as.toList.mkString("(",", ", ")"))
+ def update(as: Any*) = println(".update"+as.toList.mkString("(",", ", ")"))
+ }
+ class MyDynamic extends Dynamic {
+ def applyDynamic[T](n: String)(as: Any*) = {println("qual.applyDynamic("+ n +")"+ as.toList.mkString("(",", ", ")")); stub}
+ def applyDynamicNamed[T](n: String)(as: (String, Any)*) = {println("qual.applyDynamicNamed("+ n +")"+ as.toList.mkString("(",", ", ")")); stub}
+ def selectDynamic[T](n: String) = {println("qual.selectDynamic("+ n +")"); stub}
+ def updateDynamic(n: String)(x: Any): Unit = {println("qual.updateDynamic("+ n +")("+ x +")")}
+ }
+ val qual = new MyDynamic
+ val expr = "expr"
+ val a = "a"
+ val a2 = "a2"
+ type T = String
+
+ // If qual.sel is followed by a potential type argument list [Ts] and an argument list (arg1, …, argn) where none of the arguments argi are named:
+ // qual.applyDynamic(“sel”)(arg1, …, argn)
+ qual.sel()
+ qual.sel(a)
+ // qual.sel(a, a2: _*) -- should not accept varargs?
+ qual.sel(a)(a2)
+ qual.sel[T](a)
+ qual.sel[T](a)(a2)
+
+ // If qual.sel is followed by a potential type argument list [Ts]
+ // and a non-empty named argument list (x1 = arg1, …, xn = argn) where some name prefixes xi = might be missing:
+ // qual.applyDynamicNamed(“sel”)(xs1 -> arg1, …, xsn -> argn)
+ qual.sel(arg = a)
+ qual.sel[T](arg = a)
+ qual.sel(a, arg2 = "a2")
+ // qual.sel(a)(a2, arg2 = "a2")
+ // qual.sel[T](a)(a2, arg2 = "a2")
+ // qual.sel(arg = a, a2: _*)
+ // qual.sel(arg, arg2 = "a2", a2: _*)
+
+ // If qual.sel appears immediately on the left-hand side of an assigment
+ // qual.updateDynamic(“sel”)(expr)
+ qual.sel = expr
+
+ // If qual.sel, possibly applied to type arguments, but is
+ // not applied to explicit value arguments,
+ // nor immediately followed by an assignment operator:
+ // qual.selectDynamic[Ts](“sel”)
+ qual.sel
+ qual.sel[T]
+
+ qual.sel(1) = expr // parser turns this into qual.sel.update(1, expr)
+ qual.sel() = expr // parser turns this into qual.sel.update(expr)
+ qual.sel.apply(1)
+ qual.sel.apply(1) = 1
+
+ qual.apply(a)
+ qual.apply[String](a)
+ qual(a)
+ qual[String](a)
+ qual[T](arg = a)
+ qual(a, arg2 = "a2")
+ qual(a) = a2
+}
diff --git a/test/files/run/array-charSeq.check b/test/files/run/array-charSeq.check
new file mode 100644
index 0000000..f1f374f
--- /dev/null
+++ b/test/files/run/array-charSeq.check
@@ -0,0 +1,248 @@
+
+[check 'abcdefghi'] len = 9
+sub(0, 9) == 'abcdefghi'
+sub(0, 0) == ''
+sub(1, 9) == 'bcdefghi'
+sub(0, 1) == 'a'
+sub(2, 9) == 'cdefghi'
+sub(0, 2) == 'ab'
+sub(3, 9) == 'defghi'
+sub(0, 3) == 'abc'
+sub(4, 9) == 'efghi'
+sub(0, 4) == 'abcd'
+sub(5, 9) == 'fghi'
+sub(0, 5) == 'abcde'
+sub(6, 9) == 'ghi'
+sub(0, 6) == 'abcdef'
+sub(7, 9) == 'hi'
+sub(0, 7) == 'abcdefg'
+sub(8, 9) == 'i'
+sub(0, 8) == 'abcdefgh'
+
+[check 'bcdefgh'] len = 7
+sub(0, 7) == 'bcdefgh'
+sub(0, 0) == ''
+sub(1, 7) == 'cdefgh'
+sub(0, 1) == 'b'
+sub(2, 7) == 'defgh'
+sub(0, 2) == 'bc'
+sub(3, 7) == 'efgh'
+sub(0, 3) == 'bcd'
+sub(4, 7) == 'fgh'
+sub(0, 4) == 'bcde'
+sub(5, 7) == 'gh'
+sub(0, 5) == 'bcdef'
+sub(6, 7) == 'h'
+sub(0, 6) == 'bcdefg'
+
+[check 'cdefg'] len = 5
+sub(0, 5) == 'cdefg'
+sub(0, 0) == ''
+sub(1, 5) == 'defg'
+sub(0, 1) == 'c'
+sub(2, 5) == 'efg'
+sub(0, 2) == 'cd'
+sub(3, 5) == 'fg'
+sub(0, 3) == 'cde'
+sub(4, 5) == 'g'
+sub(0, 4) == 'cdef'
+
+[check 'def'] len = 3
+sub(0, 3) == 'def'
+sub(0, 0) == ''
+sub(1, 3) == 'ef'
+sub(0, 1) == 'd'
+sub(2, 3) == 'f'
+sub(0, 2) == 'de'
+
+[check 'e'] len = 1
+sub(0, 1) == 'e'
+sub(0, 0) == ''
+
+[check 'abcdefgh'] len = 8
+sub(0, 8) == 'abcdefgh'
+sub(0, 0) == ''
+sub(1, 8) == 'bcdefgh'
+sub(0, 1) == 'a'
+sub(2, 8) == 'cdefgh'
+sub(0, 2) == 'ab'
+sub(3, 8) == 'defgh'
+sub(0, 3) == 'abc'
+sub(4, 8) == 'efgh'
+sub(0, 4) == 'abcd'
+sub(5, 8) == 'fgh'
+sub(0, 5) == 'abcde'
+sub(6, 8) == 'gh'
+sub(0, 6) == 'abcdef'
+sub(7, 8) == 'h'
+sub(0, 7) == 'abcdefg'
+
+[check 'bcdefg'] len = 6
+sub(0, 6) == 'bcdefg'
+sub(0, 0) == ''
+sub(1, 6) == 'cdefg'
+sub(0, 1) == 'b'
+sub(2, 6) == 'defg'
+sub(0, 2) == 'bc'
+sub(3, 6) == 'efg'
+sub(0, 3) == 'bcd'
+sub(4, 6) == 'fg'
+sub(0, 4) == 'bcde'
+sub(5, 6) == 'g'
+sub(0, 5) == 'bcdef'
+
+[check 'cdef'] len = 4
+sub(0, 4) == 'cdef'
+sub(0, 0) == ''
+sub(1, 4) == 'def'
+sub(0, 1) == 'c'
+sub(2, 4) == 'ef'
+sub(0, 2) == 'cd'
+sub(3, 4) == 'f'
+sub(0, 3) == 'cde'
+
+[check 'de'] len = 2
+sub(0, 2) == 'de'
+sub(0, 0) == ''
+sub(1, 2) == 'e'
+sub(0, 1) == 'd'
+
+[check ''] len = 0
+
+[check 'abcdefg'] len = 7
+sub(0, 7) == 'abcdefg'
+sub(0, 0) == ''
+sub(1, 7) == 'bcdefg'
+sub(0, 1) == 'a'
+sub(2, 7) == 'cdefg'
+sub(0, 2) == 'ab'
+sub(3, 7) == 'defg'
+sub(0, 3) == 'abc'
+sub(4, 7) == 'efg'
+sub(0, 4) == 'abcd'
+sub(5, 7) == 'fg'
+sub(0, 5) == 'abcde'
+sub(6, 7) == 'g'
+sub(0, 6) == 'abcdef'
+
+[check 'bcdef'] len = 5
+sub(0, 5) == 'bcdef'
+sub(0, 0) == ''
+sub(1, 5) == 'cdef'
+sub(0, 1) == 'b'
+sub(2, 5) == 'def'
+sub(0, 2) == 'bc'
+sub(3, 5) == 'ef'
+sub(0, 3) == 'bcd'
+sub(4, 5) == 'f'
+sub(0, 4) == 'bcde'
+
+[check 'cde'] len = 3
+sub(0, 3) == 'cde'
+sub(0, 0) == ''
+sub(1, 3) == 'de'
+sub(0, 1) == 'c'
+sub(2, 3) == 'e'
+sub(0, 2) == 'cd'
+
+[check 'd'] len = 1
+sub(0, 1) == 'd'
+sub(0, 0) == ''
+
+[check 'abcdef'] len = 6
+sub(0, 6) == 'abcdef'
+sub(0, 0) == ''
+sub(1, 6) == 'bcdef'
+sub(0, 1) == 'a'
+sub(2, 6) == 'cdef'
+sub(0, 2) == 'ab'
+sub(3, 6) == 'def'
+sub(0, 3) == 'abc'
+sub(4, 6) == 'ef'
+sub(0, 4) == 'abcd'
+sub(5, 6) == 'f'
+sub(0, 5) == 'abcde'
+
+[check 'bcde'] len = 4
+sub(0, 4) == 'bcde'
+sub(0, 0) == ''
+sub(1, 4) == 'cde'
+sub(0, 1) == 'b'
+sub(2, 4) == 'de'
+sub(0, 2) == 'bc'
+sub(3, 4) == 'e'
+sub(0, 3) == 'bcd'
+
+[check 'cd'] len = 2
+sub(0, 2) == 'cd'
+sub(0, 0) == ''
+sub(1, 2) == 'd'
+sub(0, 1) == 'c'
+
+[check ''] len = 0
+
+[check 'abcde'] len = 5
+sub(0, 5) == 'abcde'
+sub(0, 0) == ''
+sub(1, 5) == 'bcde'
+sub(0, 1) == 'a'
+sub(2, 5) == 'cde'
+sub(0, 2) == 'ab'
+sub(3, 5) == 'de'
+sub(0, 3) == 'abc'
+sub(4, 5) == 'e'
+sub(0, 4) == 'abcd'
+
+[check 'bcd'] len = 3
+sub(0, 3) == 'bcd'
+sub(0, 0) == ''
+sub(1, 3) == 'cd'
+sub(0, 1) == 'b'
+sub(2, 3) == 'd'
+sub(0, 2) == 'bc'
+
+[check 'c'] len = 1
+sub(0, 1) == 'c'
+sub(0, 0) == ''
+
+[check 'abcd'] len = 4
+sub(0, 4) == 'abcd'
+sub(0, 0) == ''
+sub(1, 4) == 'bcd'
+sub(0, 1) == 'a'
+sub(2, 4) == 'cd'
+sub(0, 2) == 'ab'
+sub(3, 4) == 'd'
+sub(0, 3) == 'abc'
+
+[check 'bc'] len = 2
+sub(0, 2) == 'bc'
+sub(0, 0) == ''
+sub(1, 2) == 'c'
+sub(0, 1) == 'b'
+
+[check ''] len = 0
+
+[check 'abc'] len = 3
+sub(0, 3) == 'abc'
+sub(0, 0) == ''
+sub(1, 3) == 'bc'
+sub(0, 1) == 'a'
+sub(2, 3) == 'c'
+sub(0, 2) == 'ab'
+
+[check 'b'] len = 1
+sub(0, 1) == 'b'
+sub(0, 0) == ''
+
+[check 'ab'] len = 2
+sub(0, 2) == 'ab'
+sub(0, 0) == ''
+sub(1, 2) == 'b'
+sub(0, 1) == 'a'
+
+[check ''] len = 0
+
+[check 'a'] len = 1
+sub(0, 1) == 'a'
+sub(0, 0) == ''
diff --git a/test/files/run/array-charSeq.scala b/test/files/run/array-charSeq.scala
new file mode 100644
index 0000000..f7d0586
--- /dev/null
+++ b/test/files/run/array-charSeq.scala
@@ -0,0 +1,27 @@
+object Test {
+ val arr = Array[Char]('a' to 'i': _*)
+ var xs: CharSequence = arr
+ val hash = xs.hashCode
+
+ def check(chars: CharSequence) {
+ println("\n[check '" + chars + "'] len = " + chars.length)
+ chars match {
+ case x: runtime.ArrayCharSequence => assert(x.xs eq arr, ((x.xs, arr)))
+ case x => assert(false, x)
+ }
+
+ 0 until chars.length foreach { i =>
+ println("sub(%s, %s) == '%s'".format(i, chars.length, chars.subSequence(i, chars.length)))
+ println("sub(%s, %s) == '%s'".format(0, i, chars.subSequence(0, i)))
+ }
+ if (chars.length >= 2)
+ check(chars.subSequence(1, chars.length - 1))
+ }
+
+ def main(args: Array[String]): Unit = {
+ while (xs.length > 0) {
+ check(xs)
+ xs = xs.subSequence(0, xs.length - 1)
+ }
+ }
+}
diff --git a/test/files/run/array-existential-bound.check b/test/files/run/array-existential-bound.check
new file mode 100644
index 0000000..f5cca84
--- /dev/null
+++ b/test/files/run/array-existential-bound.check
@@ -0,0 +1,4 @@
+2
+1000
+1000
+26
diff --git a/test/files/run/array-existential-bound.scala b/test/files/run/array-existential-bound.scala
new file mode 100644
index 0000000..bc442d3
--- /dev/null
+++ b/test/files/run/array-existential-bound.scala
@@ -0,0 +1,17 @@
+trait Fooz[Q <: Array[_]] {
+ def f0(x: Q) = x.length
+}
+
+object Test extends Fooz[Array[Int]] {
+ val f1 = new Fooz[Array[String]] { }
+ val f2 = new Fooz[Array[Int]] { }
+ val f3 = new Fooz[Array[Any]] { }
+ val f4 = new Fooz[Array[_]] { }
+
+ def main(args: Array[String]): Unit = {
+ println(f1.f0(Array[String]("a", "b")))
+ println(f2.f0(1 to 1000 toArray))
+ println(f3.f0((1 to 1000).toArray[Any]))
+ println(f4.f0('a' to 'z' toArray))
+ }
+}
diff --git a/test/files/run/arrayclone-new.scala b/test/files/run/arrayclone-new.scala
new file mode 100644
index 0000000..506e4f5
--- /dev/null
+++ b/test/files/run/arrayclone-new.scala
@@ -0,0 +1,108 @@
+import scala.reflect.{ClassTag, classTag}
+
+object Test extends App{
+ BooleanArrayClone;
+ ByteArrayClone;
+ ShortArrayClone;
+ CharArrayClone;
+ IntArrayClone;
+ LongArrayClone;
+ FloatArrayClone;
+ DoubleArrayClone;
+ ObjectArrayClone;
+ PolymorphicArrayClone;
+}
+
+object BooleanArrayClone{
+ val it : Array[Boolean] = Array(true, false);
+ val cloned = it.clone();
+ assert(cloned.sameElements(it));
+ cloned(0) = false;
+ assert(it(0) == true)
+}
+
+object ByteArrayClone{
+ val it : Array[Byte] = Array(1, 0);
+ val cloned = it.clone();
+ assert(cloned.sameElements(it));
+ cloned(0) = 0;
+ assert(it(0) == 1)
+}
+
+object ShortArrayClone{
+ val it : Array[Short] = Array(1, 0);
+ val cloned = it.clone();
+ assert(cloned.sameElements(it));
+ cloned(0) = 0;
+ assert(it(0) == 1)
+}
+
+object CharArrayClone{
+ val it : Array[Char] = Array(1, 0);
+ val cloned = it.clone();
+ assert(cloned.sameElements(it));
+ cloned(0) = 0;
+ assert(it(0) == 1)
+}
+
+object IntArrayClone{
+ val it : Array[Int] = Array(1, 0);
+ val cloned = it.clone();
+ assert(cloned.sameElements(it));
+ cloned(0) = 0;
+ assert(it(0) == 1)
+}
+
+object LongArrayClone{
+ val it : Array[Long] = Array(1, 0);
+ val cloned = it.clone();
+ assert(cloned.sameElements(it));
+ cloned(0) = 0;
+ assert(it(0) == 1)
+}
+
+object FloatArrayClone{
+ val it : Array[Float] = Array(1, 0);
+ val cloned = it.clone();
+ assert(cloned.sameElements(it));
+ cloned(0) = 0;
+ assert(it(0) == 1)
+}
+
+object DoubleArrayClone{
+ val it : Array[Double] = Array(1, 0);
+ val cloned = it.clone();
+ assert(cloned.sameElements(it));
+ cloned(0) = 0;
+ assert(it(0) == 1)
+}
+
+object ObjectArrayClone{
+ val it : Array[String] = Array("1", "0");
+ val cloned = it.clone();
+ assert(cloned.sameElements(it));
+ cloned(0) = "0";
+ assert(it(0) == "1")
+}
+
+object PolymorphicArrayClone{
+ def testIt[T](it : Array[T], one : T, zero : T) = {
+ val cloned = it.clone();
+ assert(cloned.sameElements(it));
+ cloned(0) = zero;
+ assert(it(0) == one)
+ }
+
+ testIt(Array("one", "two"), "one", "two");
+
+ class Mangler[T: ClassTag](ts : T*){
+ // this will always be a BoxedAnyArray even after we've unboxed its contents.
+ val it = ts.toArray[T];
+ }
+
+ val mangled = new Mangler[Int](0, 1);
+
+ val y : Array[Int] = mangled.it; // make sure it's unboxed
+
+ testIt(mangled.it, 0, 1);
+}
\ No newline at end of file
diff --git a/test/files/run/arrayclone.scala b/test/files/run/arrayclone-old.scala
similarity index 100%
rename from test/files/run/arrayclone.scala
rename to test/files/run/arrayclone-old.scala
diff --git a/test/files/run/arrays.scala b/test/files/run/arrays.scala
index cd1cf13..ecebc78 100644
--- a/test/files/run/arrays.scala
+++ b/test/files/run/arrays.scala
@@ -167,41 +167,39 @@ object Test {
//##########################################################################
// Values
- import Math._
-
val u0: Unit = ();
val u1: Unit = ();
val z0: Boolean = false;
val z1: Boolean = true;
- val b0: Byte = MIN_BYTE;
+ val b0: Byte = Byte.MinValue;
val b1: Byte = 1;
- val b2: Byte = MAX_BYTE;
+ val b2: Byte = Byte.MaxValue;
- val s0: Short = MIN_SHORT;
+ val s0: Short = Short.MinValue;
val s1: Short = 2;
- val s2: Short = MAX_SHORT;
+ val s2: Short = Short.MaxValue;
- val c0: Char = MIN_CHAR;
+ val c0: Char = Char.MinValue;
val c1: Char = '3';
- val c2: Char = MAX_CHAR;
+ val c2: Char = Char.MaxValue;
- val i0: Int = MIN_INT;
+ val i0: Int = Int.MinValue;
val i1: Int = 4;
- val i2: Int = MAX_INT;
+ val i2: Int = Int.MinValue;
- val l0: Long = MIN_LONG;
+ val l0: Long = Long.MinValue;
val l1: Int = 5;
- val l2: Long = MAX_LONG;
+ val l2: Long = Long.MaxValue;
- val f0: Float = MIN_FLOAT;
+ val f0: Float = Float.MinValue;
val f1: Int = 6;
- val f2: Float = MAX_FLOAT;
+ val f2: Float = Float.MaxValue;
- val d0: Double = MIN_DOUBLE;
+ val d0: Double = Double.MinValue;
val d1: Int = 7;
- val d2: Double = MAX_DOUBLE;
+ val d2: Double = Double.MaxValue;
val a0: Unit = ();
val a1: Boolean = false;
diff --git a/test/files/run/bug4062.check b/test/files/run/backreferences.check
similarity index 100%
copy from test/files/run/bug4062.check
copy to test/files/run/backreferences.check
diff --git a/test/files/run/backreferences.scala b/test/files/run/backreferences.scala
new file mode 100644
index 0000000..335cd6c
--- /dev/null
+++ b/test/files/run/backreferences.scala
@@ -0,0 +1,13 @@
+case class Elem[T](x: T, y: T)
+
+object Test {
+ def unrolled[T](x: Any, y: Any, z: Any) = (x, y, z) match {
+ case (el: Elem[_], el.x, el.y) => true
+ case _ => false
+ }
+
+ def main(args: Array[String]): Unit = {
+ println(unrolled(Elem("bippy", 5), "bippy", 6))
+ println(unrolled(Elem("bippy", 5), "bippy", 5))
+ }
+}
diff --git a/test/files/run/bigDecimalTest.check b/test/files/run/bigDecimalTest.check
new file mode 100644
index 0000000..6d11c23
--- /dev/null
+++ b/test/files/run/bigDecimalTest.check
@@ -0,0 +1,6 @@
+34
+83
+0
+0
+0
+14
diff --git a/test/files/run/bigDecimalTest.scala b/test/files/run/bigDecimalTest.scala
new file mode 100644
index 0000000..07b524c
--- /dev/null
+++ b/test/files/run/bigDecimalTest.scala
@@ -0,0 +1,35 @@
+object Test {
+ def main(args: Array[String]): Unit = {
+
+ // SI-4981: avoid being limited by math context when not needed
+ val big = BigDecimal("32432875832753287583275382753288325325328532875325")
+ val f = big % BigDecimal(scala.math.Pi)
+
+ // SI-1812: use math context to limit decimal expansion
+ val a = BigDecimal(1) / BigDecimal(3)
+ val b = BigDecimal(1) / big
+
+ // SI-2199: implicit conversions from java.math.BigDecimal to BigDecimal
+ val c = BigDecimal(1) + (new java.math.BigDecimal(3))
+
+ // SI-2024: correctly use BigDecimal.valueOf
+ assert(BigDecimal(123) + 1.1 == BigDecimal("124.1"))
+
+ // SI-3206: BigDecimal cache errors
+ val d = BigDecimal(2, new java.math.MathContext(33))
+ val e = BigDecimal(2, new java.math.MathContext(34))
+ assert(d.mc != e.mc)
+
+ // SI-921
+ assert(BigDecimal(2) / BigDecimal(0.5) == BigDecimal(4))
+
+ // SI-2304: enforce equals/hashCode contract
+ assert(BigDecimal("2").hashCode == BigDecimal("2.00").hashCode)
+
+ // SI-4547: implicit conversion
+ assert(5 + BigDecimal(3) == BigDecimal(8))
+
+ // meaningless sanity check
+ List[BigDecimal](a, b, c, d, e, f) map (_.scale) foreach println
+ }
+}
diff --git a/test/files/run/bitsets-msil.check b/test/files/run/bitsets-msil.check
deleted file mode 100644
index b187571..0000000
--- a/test/files/run/bitsets-msil.check
+++ /dev/null
@@ -1,33 +0,0 @@
-ms0 = BitSet(2)
-ms1 = BitSet(2)
-ms2 = BitSet(2)
-mb0 = False
-mb1 = True
-mb2 = False
-xs0 = List(2)
-xs1 = List(2)
-xs2 = List(2)
-ma0 = List(2)
-ma1 = List(2)
-ma2 = List(2)
-mi0 = BitSet(2)
-mi1 = BitSet(2)
-mi2 = BitSet(2)
-
-is0 = BitSet()
-is1 = BitSet()
-is2 = BitSet(2)
-is3 = BitSet()
-ib0 = False
-ib1 = False
-ib2 = True
-ib3 = False
-ys0 = List()
-ys1 = List()
-ys2 = List(2)
-ys3 = List()
-ia0 = List()
-ia1 = List()
-ia2 = List(2)
-ia3 = List()
-
diff --git a/test/files/run/bitsets.check b/test/files/run/bitsets.check
index 478de26..3f01d2a 100644
--- a/test/files/run/bitsets.check
+++ b/test/files/run/bitsets.check
@@ -14,6 +14,29 @@ mi0 = BitSet(2)
mi1 = BitSet(2)
mi2 = BitSet(2)
+m2_m0 = List(1010101010101010101010101)
+m2_m2 = List(ffffffffffffffff, ffffffffffffffff, ffffffffffffffff, ffffffffffffffff, 1, 0, 0, 0)
+m2_m0c = true
+m2_m1c = true
+m2_m2c = true
+m2_m3c = true
+m2_i0 = true
+m2_i1 = true
+m2_i2 = true
+m2_i3 = true
+m2_f0 = true
+m2_f1 = true
+m2_f2 = true
+m2_f3 = true
+m2_t0 = true
+m2_t1 = true
+m2_t2 = true
+m2_t3 = true
+m2_r0 = true
+m2_r1 = true
+m2_r2 = true
+m2_r3 = true
+
is0 = BitSet()
is1 = BitSet()
is2 = BitSet(2)
@@ -31,3 +54,26 @@ ia1 = List()
ia2 = List(2)
ia3 = List()
+i2_m0 = List(1010101010101010101010101)
+i2_m2 = List(ffffffffffffffff, ffffffffffffffff, ffffffffffffffff, ffffffffffffffff, 1)
+i2_m0c = true
+i2_m1c = true
+i2_m2c = true
+i2_m3c = true
+i2_i0 = true
+i2_i1 = true
+i2_i2 = true
+i2_i3 = true
+i2_f0 = true
+i2_f1 = true
+i2_f2 = true
+i2_f3 = true
+i2_t0 = true
+i2_t1 = true
+i2_t2 = true
+i2_t3 = true
+i2_r0 = true
+i2_r1 = true
+i2_r2 = true
+i2_r3 = true
+
diff --git a/test/files/run/bitsets.scala b/test/files/run/bitsets.scala
index a847c99..2739568 100644
--- a/test/files/run/bitsets.scala
+++ b/test/files/run/bitsets.scala
@@ -39,6 +39,48 @@ object TestMutable {
Console.println
}
+object TestMutable2 {
+ import scala.collection.mutable.BitSet
+ import scala.collection.immutable.TreeSet
+
+ val l0 = 0 to 24 by 2 toList
+ val l1 = (190 to 255 toList) reverse
+ val l2 = (0 to 256 toList)
+ val l3 = (1 to 200 by 2 toList) reverse
+ val t0 = TreeSet(l0: _*)
+ val t1 = TreeSet(l1: _*)
+ val t2 = TreeSet(l2: _*)
+ val t3 = TreeSet(l3: _*)
+ val b0 = BitSet(l0: _*)
+ val b1 = BitSet(l1: _*)
+ val b2 = BitSet(l2: _*)
+ val b3 = BitSet(l3: _*)
+
+ println("m2_m0 = " + b0.toBitMask.toList.map(_.toBinaryString))
+ println("m2_m2 = " + b2.toBitMask.toList.map(_.toHexString))
+ println("m2_m0c = " + (BitSet.fromBitMask(b0.toBitMask) == b0))
+ println("m2_m1c = " + (BitSet.fromBitMask(b1.toBitMask) == b1))
+ println("m2_m2c = " + (BitSet.fromBitMask(b2.toBitMask) == b2))
+ println("m2_m3c = " + (BitSet.fromBitMask(b3.toBitMask) == b3))
+ println("m2_i0 = " + (t0 == b0))
+ println("m2_i1 = " + (t1 == b1))
+ println("m2_i2 = " + (t2 == b2))
+ println("m2_i3 = " + (t3 == b3))
+ println("m2_f0 = " + (t0.from(42) == b0.from(42)))
+ println("m2_f1 = " + (t1.from(42) == b1.from(42)))
+ println("m2_f2 = " + (t2.from(42) == b2.from(42)))
+ println("m2_f3 = " + (t3.from(42) == b3.from(42)))
+ println("m2_t0 = " + (t0.to(195) == b0.to(195)))
+ println("m2_t1 = " + (t1.to(195) == b1.to(195)))
+ println("m2_t2 = " + (t2.to(195) == b2.to(195)))
+ println("m2_t3 = " + (t3.to(195) == b3.to(195)))
+ println("m2_r0 = " + (t0.range(43,194) == b0.range(43,194)))
+ println("m2_r1 = " + (t1.range(43,194) == b1.range(43,194)))
+ println("m2_r2 = " + (t2.range(43,194) == b2.range(43,194)))
+ println("m2_r3 = " + (t3.range(43,194) == b3.range(43,194)))
+ println
+}
+
object TestImmutable {
import scala.collection.immutable.BitSet
@@ -69,9 +111,52 @@ object TestImmutable {
Console.println
}
+object TestImmutable2 {
+ import scala.collection.immutable.{BitSet, TreeSet}
+
+ val l0 = 0 to 24 by 2 toList
+ val l1 = (190 to 255 toList) reverse
+ val l2 = (0 to 256 toList)
+ val l3 = (1 to 200 by 2 toList) reverse
+ val t0 = TreeSet(l0: _*)
+ val t1 = TreeSet(l1: _*)
+ val t2 = TreeSet(l2: _*)
+ val t3 = TreeSet(l3: _*)
+ val b0 = BitSet(l0: _*)
+ val b1 = BitSet(l1: _*)
+ val b2 = BitSet(l2: _*)
+ val b3 = BitSet(l3: _*)
+
+ println("i2_m0 = " + b0.toBitMask.toList.map(_.toBinaryString))
+ println("i2_m2 = " + b2.toBitMask.toList.map(_.toHexString))
+ println("i2_m0c = " + (BitSet.fromBitMask(b0.toBitMask) == b0))
+ println("i2_m1c = " + (BitSet.fromBitMask(b1.toBitMask) == b1))
+ println("i2_m2c = " + (BitSet.fromBitMask(b2.toBitMask) == b2))
+ println("i2_m3c = " + (BitSet.fromBitMask(b3.toBitMask) == b3))
+ println("i2_i0 = " + (t0 == b0))
+ println("i2_i1 = " + (t1 == b1))
+ println("i2_i2 = " + (t2 == b2))
+ println("i2_i3 = " + (t3 == b3))
+ println("i2_f0 = " + (t0.from(42) == b0.from(42)))
+ println("i2_f1 = " + (t1.from(42) == b1.from(42)))
+ println("i2_f2 = " + (t2.from(42) == b2.from(42)))
+ println("i2_f3 = " + (t3.from(42) == b3.from(42)))
+ println("i2_t0 = " + (t0.to(195) == b0.to(195)))
+ println("i2_t1 = " + (t1.to(195) == b1.to(195)))
+ println("i2_t2 = " + (t2.to(195) == b2.to(195)))
+ println("i2_t3 = " + (t3.to(195) == b3.to(195)))
+ println("i2_r0 = " + (t0.range(77,194) == b0.range(77,194)))
+ println("i2_r1 = " + (t1.range(77,194) == b1.range(77,194)))
+ println("i2_r2 = " + (t2.range(77,194) == b2.range(77,194)))
+ println("i2_r3 = " + (t3.range(77,194) == b3.range(77,194)))
+ println
+}
+
object Test extends App {
TestMutable
+ TestMutable2
TestImmutable
+ TestImmutable2
}
//############################################################################
diff --git a/test/files/run/boolord-msil.check b/test/files/run/boolord-msil.check
deleted file mode 100644
index e5f1daa..0000000
--- a/test/files/run/boolord-msil.check
+++ /dev/null
@@ -1,4 +0,0 @@
-false < false = False
-false < true = True
-true < false = False
-true < true = False
diff --git a/test/files/run/buffer-slice.check b/test/files/run/buffer-slice.check
new file mode 100644
index 0000000..5287aa9
--- /dev/null
+++ b/test/files/run/buffer-slice.check
@@ -0,0 +1 @@
+ArrayBuffer()
diff --git a/test/files/run/buffer-slice.scala b/test/files/run/buffer-slice.scala
new file mode 100644
index 0000000..ddd82e0
--- /dev/null
+++ b/test/files/run/buffer-slice.scala
@@ -0,0 +1,5 @@
+object Test {
+ def main(args: Array[String]): Unit = {
+ println(scala.collection.mutable.ArrayBuffer().slice(102450392, -2045033354))
+ }
+}
diff --git a/test/files/run/bug1044.scala b/test/files/run/bug1044.scala
deleted file mode 100644
index 7ac5058..0000000
--- a/test/files/run/bug1044.scala
+++ /dev/null
@@ -1,4 +0,0 @@
-object Test extends App {
- val ducks = Array[AnyRef]("Huey", "Dewey", "Louie");
- ducks.elements.asInstanceOf[Iterator[String]]
-}
diff --git a/test/files/run/bug1192.scala b/test/files/run/bug1192.scala
deleted file mode 100644
index a32cbf5..0000000
--- a/test/files/run/bug1192.scala
+++ /dev/null
@@ -1,7 +0,0 @@
-object Test extends App {
- val v1: Array[Array[Int]] = Array(Array(1, 2), Array(3, 4))
- def f[T](w: Array[Array[T]]) {
- for (val r <- w) println(r.deep.toString)
- }
- f(v1)
-}
diff --git a/test/files/run/bug1309.scala b/test/files/run/bug1309.scala
deleted file mode 100644
index 6b5167e..0000000
--- a/test/files/run/bug1309.scala
+++ /dev/null
@@ -1,7 +0,0 @@
-object Test {
- def f(ras: => RandomAccessSeq[Byte]): RandomAccessSeq[Byte] = ras
-
- def main(args: Array[String]): Unit = {
- f(new Array[Byte](0))
- }
-}
diff --git a/test/files/run/bug2005.scala b/test/files/run/bug2005.scala
deleted file mode 100644
index 4176709..0000000
--- a/test/files/run/bug2005.scala
+++ /dev/null
@@ -1,10 +0,0 @@
-object Test {
- def main(args: Array[String]) {
- val a = new Array[Array[Int]](2,2)
- test(a)
- }
- def test[A](t: Array[Array[A]]) {
- val tmp = t(0)
- t(1) = tmp
- }
-}
diff --git a/test/files/run/bug2124.check b/test/files/run/bug2124.check
deleted file mode 100644
index 2b88402..0000000
--- a/test/files/run/bug2124.check
+++ /dev/null
@@ -1 +0,0 @@
-<p><lost></lost><q></q></p>
diff --git a/test/files/run/bug2125.check b/test/files/run/bug2125.check
deleted file mode 100644
index 2b88402..0000000
--- a/test/files/run/bug2125.check
+++ /dev/null
@@ -1 +0,0 @@
-<p><lost></lost><q></q></p>
diff --git a/test/files/run/bug3613.scala b/test/files/run/bug3613.scala
deleted file mode 100644
index c3b2495..0000000
--- a/test/files/run/bug3613.scala
+++ /dev/null
@@ -1,22 +0,0 @@
-class Boopy {
- private val s = new Schnuck
- def observer : PartialFunction[ Any, Unit ] = s.observer
-
- private class Schnuck extends javax.swing.AbstractListModel {
- model =>
- val observer : PartialFunction[ Any, Unit ] = {
- case "Boopy" => fireIntervalAdded( model, 0, 1 )
- }
- def getSize = 0
- def getElementAt( idx: Int ) : AnyRef = "egal"
- }
-
-}
-
-object Test {
- def main(args: Array[String]): Unit = {
- val x = new Boopy
- val o = x.observer
- o( "Boopy" ) // --> throws runtime error
- }
-}
diff --git a/test/files/run/bug4110.scala b/test/files/run/bug4110.scala
deleted file mode 100644
index 4bd377b..0000000
--- a/test/files/run/bug4110.scala
+++ /dev/null
@@ -1,11 +0,0 @@
-object Test extends App {
- def inferredType[T : Manifest](v : T) = println(manifest[T])
-
- trait A
- trait B
-
- inferredType(new A with B)
-
- val name = new A with B
- inferredType(name)
-}
\ No newline at end of file
diff --git a/test/files/run/bug4317/S_1.scala b/test/files/run/bug4317/S_1.scala
deleted file mode 100644
index 2de4082..0000000
--- a/test/files/run/bug4317/S_1.scala
+++ /dev/null
@@ -1,9 +0,0 @@
-object S_1 {
- def foo1(x: Class[_ <: AnyRef]) = 0
- def foo2(x: Class[_ <: AnyRef], y: Int) = 99
- def foo3[T](x: Int, y: Int) = x + y
- def foo4a(x: Unit): Unit = ()
- def foo4[T](x: Unit): Unit = ()
- def foo5[T <: Unit](x: T): T = sys.error("")
- def foo6[T](x: Class[_], y: Class[T], z: Class[_ <: T]) = ((x, y, z))
-}
diff --git a/test/files/run/bug4671.check b/test/files/run/bug4671.check
deleted file mode 100644
index dc92c9a..0000000
--- a/test/files/run/bug4671.check
+++ /dev/null
@@ -1,46 +0,0 @@
-Type in expressions to have them evaluated.
-Type :help for more information.
-
-scala> object o { val file = sys.props("partest.cwd") + "/bug4671.scala" }
-defined module o
-
-scala> val s = scala.io.Source.fromFile(o.file)
-s: scala.io.BufferedSource = non-empty iterator
-
-scala> println(s.getLines.mkString("\n"))
-import scala.tools.partest.ReplTest
-
-object Test extends ReplTest {
- // My god...it's full of quines
- def code = """
-object o { val file = sys.props("partest.cwd") + "/bug4671.scala" }
-val s = scala.io.Source.fromFile(o.file)
-println(s.getLines.mkString("\n"))
-
-val s = scala.io.Source.fromFile(o.file)
-println(s.mkString(""))
-""".trim
-}
-
-scala>
-
-scala> val s = scala.io.Source.fromFile(o.file)
-s: scala.io.BufferedSource = non-empty iterator
-
-scala> println(s.mkString(""))
-import scala.tools.partest.ReplTest
-
-object Test extends ReplTest {
- // My god...it's full of quines
- def code = """
-object o { val file = sys.props("partest.cwd") + "/bug4671.scala" }
-val s = scala.io.Source.fromFile(o.file)
-println(s.getLines.mkString("\n"))
-
-val s = scala.io.Source.fromFile(o.file)
-println(s.mkString(""))
-""".trim
-}
-
-
-scala>
diff --git a/test/files/run/bug4671.scala b/test/files/run/bug4671.scala
deleted file mode 100644
index 86c38d6..0000000
--- a/test/files/run/bug4671.scala
+++ /dev/null
@@ -1,13 +0,0 @@
-import scala.tools.partest.ReplTest
-
-object Test extends ReplTest {
- // My god...it's full of quines
- def code = """
-object o { val file = sys.props("partest.cwd") + "/bug4671.scala" }
-val s = scala.io.Source.fromFile(o.file)
-println(s.getLines.mkString("\n"))
-
-val s = scala.io.Source.fromFile(o.file)
-println(s.mkString(""))
-""".trim
-}
diff --git a/test/files/run/bug4710.check b/test/files/run/bug4710.check
deleted file mode 100644
index aa2f08d..0000000
--- a/test/files/run/bug4710.check
+++ /dev/null
@@ -1,7 +0,0 @@
-Type in expressions to have them evaluated.
-Type :help for more information.
-
-scala> def method : String = { implicit def f(s: Symbol) = "" ; 'symbol }
-method: String
-
-scala>
diff --git a/test/files/run/bug576.scala b/test/files/run/bug576.scala
deleted file mode 100644
index dc09d8d..0000000
--- a/test/files/run/bug576.scala
+++ /dev/null
@@ -1,45 +0,0 @@
-class A {
- override def equals(other: Any) = other match {
- case _: this.type => true
- case _ => false
- }
-}
-
-object Dingus {
- def IamDingus = 5
-}
-
-object Test {
- val x1 = new A
- val x2 = new A
-
- val x3 = new { self =>
- override def equals(other : Any) = other match {
- case that: self.type => true
- case _ => false
- }
- }
- val x4 = new { self =>
- def f(x: Any) = x match {
- case _: x1.type => 1
- case _: x2.type => 2
- case _: x3.type => 3
- case _: self.type => 4
- case x: Dingus.type => x.IamDingus
- }
- }
-
- def main(args: Array[String]): Unit = {
-
- assert(x1 == x1)
- assert(x1 != x2)
- assert(x1 != ())
- assert(x2 != x1)
-
- assert(x3 == x3)
- assert(x3 != x2)
- assert(x2 != x3)
-
- List(x1, x2, x3, x4, Dingus) map x4.f foreach println
- }
-}
\ No newline at end of file
diff --git a/test/files/run/bugs-msil.check b/test/files/run/bugs-msil.check
deleted file mode 100644
index 4e15409..0000000
--- a/test/files/run/bugs-msil.check
+++ /dev/null
@@ -1,96 +0,0 @@
-<<< bug 98
-mycase
->>> bug 98
-
-<<< bug 120
-one
-A
-B
-C
->>> bug 120
-
-<<< bug 135
-Some(The answer)
->>> bug 135
-
-<<< bug 142
-ok
-ok
-ok
-ok
-ok
-ok
-ok
-ok
->>> bug 142
-
-<<< bug 166
->>> bug 166
-
-<<< bug 167
->>> bug 167
-
-<<< bug 168
->>> bug 168
-
-<<< bug 174
->>> bug 174
-
-<<< bug 176
-1
->>> bug 176
-
-<<< bug 199
->>> bug 199
-
-<<< bug 213
-Cannot cast unit to Nothing
-Cannot cast empty string to Null
->>> bug 213
-
-<<< bug 217
->>> bug 217
-
-<<< bug 222
->>> bug 222
-
-<<< bug 225
->>> bug 225
-
-<<< bug 226
->>> bug 226
-
-<<< bug 233
-True
->>> bug 233
-
-<<< bug 250
->>> bug 250
-
-<<< bug 257
-I should come 1st and 2nd
-I should come 1st and 2nd
-I should come last
->>> bug 257
-
-<<< bug 266
-hello
-4
->>> bug 266
-
-<<< bug 316
->>> bug 316
-
-<<< bug 328
->>> bug 328
-
-<<< bug 396
-A
-B
-C
->>> bug 396
-
-<<< bug 399
-a
->>> bug 399
-
diff --git a/test/files/run/bugs.scala b/test/files/run/bugs.scala
index d5905af..ca59860 100644
--- a/test/files/run/bugs.scala
+++ b/test/files/run/bugs.scala
@@ -445,8 +445,7 @@ object Test {
test;
} catch {
case exception =>
- val curr: String = currentThread.toString();
- Console.print("Exception in thread \"" + curr + "\" " + exception);
+ Console.print("Exception in thread \"" + Thread.currentThread + "\" " + exception);
Console.println;
errors += 1
}
diff --git a/test/files/run/byname.scala b/test/files/run/byname.scala
index 8aff1ec..1325552 100644
--- a/test/files/run/byname.scala
+++ b/test/files/run/byname.scala
@@ -36,13 +36,13 @@ def testVarargs(x: Int*) = x.reduceLeft((x: Int, y: Int) => x + y)
test("varargs", 4, testVarargs(1, 2, 1))
val testVarargsR = testVarargs _
-test("varargs r", 4, testVarargsR(1, 2, 1))
+test("varargs r", 4, testVarargsR(Seq(1, 2, 1)))
def testAll(x: Int, y: => Int, z: Int*) = x + y + z.size
test("all", 5, testAll(1, 2, 22, 23))
val testAllR = testAll _
-test("all r", 7, testAllR(2, 3, 34, 35))
+test("all r", 7, testAllR(2, 3, Seq(34, 35)))
val testAllS: (Int, =>Int, Int*) => Int = testAll _
test("all s", 8, testAllS(1, 5, 78, 89))
@@ -73,7 +73,7 @@ def testCVV(a: Int*)(z: String, b: Int*) = a.size + b.size
test("cvv", 3, testCVV(1, 2)("", 8))
val testCVVR = testCVV _
-test("cvv r", 3, testCVVR(1)("", 8, 9))
+test("cvv r", 3, testCVVR(Seq(1))("", Seq(8, 9)))
val testCVVRS: (String, Int*) => Int = testCVV(2, 3)
test("cvv rs", 4, testCVVRS("", 5, 6))
diff --git a/test/files/run/bytecodecs.scala b/test/files/run/bytecodecs.scala
index bf8a0f8..837be0e 100644
--- a/test/files/run/bytecodecs.scala
+++ b/test/files/run/bytecodecs.scala
@@ -1,4 +1,4 @@
-import scala.reflect.generic.ByteCodecs._
+import scala.reflect.internal.pickling.ByteCodecs._
object Test {
diff --git a/test/files/run/caseClassHash.check b/test/files/run/caseClassHash.check
new file mode 100644
index 0000000..b5a6f08
--- /dev/null
+++ b/test/files/run/caseClassHash.check
@@ -0,0 +1,9 @@
+Foo(true,-1,-1,d,-5,-10,500.0,500.0,List(),5.0)
+Foo(true,-1,-1,d,-5,-10,500.0,500.0,List(),5)
+1383698062
+1383698062
+true
+## method 1: 1383698062
+## method 2: 1383698062
+ Murmur 1: 1383698062
+ Murmur 2: 1383698062
diff --git a/test/files/run/caseClassHash.scala b/test/files/run/caseClassHash.scala
new file mode 100644
index 0000000..c5cb09c
--- /dev/null
+++ b/test/files/run/caseClassHash.scala
@@ -0,0 +1,37 @@
+case class Foo[T](a: Boolean, b: Byte, c: Short, d: Char, e: Int, f: Long, g: Double, h: Float, i: AnyRef, j: T) { }
+
+object Test {
+ def mkFoo[T](x: T) = Foo[T](true, -1, -1, 100, -5, -10, 500d, 500f, Nil, x)
+
+ def main(args: Array[String]): Unit = {
+ val foo1 = mkFoo[Double](5.0d)
+ val foo2 = mkFoo[Long](5l)
+
+ List(foo1, foo2, foo1.##, foo2.##, foo1 == foo2) foreach println
+
+ println("## method 1: " + foo1.##)
+ println("## method 2: " + foo2.##)
+ println(" Murmur 1: " + scala.util.hashing.MurmurHash3.productHash(foo1))
+ println(" Murmur 2: " + scala.util.hashing.MurmurHash3.productHash(foo2))
+ }
+}
+
+object Timing {
+ var hash = 0
+ def mkFoo(i: Int) = Foo(i % 2 == 0, i.toByte, i.toShort, i.toChar, i, i, 1.1, 1.1f, this, this)
+
+ def main(args: Array[String]): Unit = {
+ val reps = if (args.isEmpty) 100000000 else args(0).toInt
+ val start = System.nanoTime
+
+ println("Warmup.")
+ 1 to 10000 foreach mkFoo
+
+ hash = 0
+ 1 to reps foreach (i => hash += mkFoo(i).##)
+
+ val end = System.nanoTime
+ println("hash = " + hash)
+ println("Elapsed: " + ((end - start) / 1e6) + " ms.")
+ }
+}
diff --git a/test/files/run/class-symbol-contravariant.check b/test/files/run/class-symbol-contravariant.check
new file mode 100644
index 0000000..987f215
--- /dev/null
+++ b/test/files/run/class-symbol-contravariant.check
@@ -0,0 +1,36 @@
+Type in expressions to have them evaluated.
+Type :help for more information.
+
+scala> :power
+** Power User mode enabled - BEEP WHIR GYVE **
+** :phase has been set to 'typer'. **
+** scala.tools.nsc._ has been imported **
+** global._, definitions._ also imported **
+** Try :help, :vals, power.<tab> **
+
+scala> val u = rootMirror.universe
+u: $r.intp.global.type = <global>
+
+scala> import u._, scala.reflect.internal.Flags
+import u._
+import scala.reflect.internal.Flags
+
+scala> class C
+defined class C
+
+scala> val sym = u.typeOf[C].typeSymbol
+sym: u.Symbol = class C
+
+scala> sym.isContravariant
+res0: Boolean = false
+
+scala> sym setFlag Flags.INCONSTRUCTOR
+res1: sym.type = class C
+
+scala> sym.isClassLocalToConstructor
+res2: Boolean = true
+
+scala> sym.isContravariant // was true
+res3: Boolean = false
+
+scala>
diff --git a/test/files/run/class-symbol-contravariant.scala b/test/files/run/class-symbol-contravariant.scala
new file mode 100644
index 0000000..6a84944
--- /dev/null
+++ b/test/files/run/class-symbol-contravariant.scala
@@ -0,0 +1,15 @@
+import scala.tools.partest.ReplTest
+
+object Test extends ReplTest {
+ override def code = """
+ |:power
+ |val u = rootMirror.universe
+ |import u._, scala.reflect.internal.Flags
+ |class C
+ |val sym = u.typeOf[C].typeSymbol
+ |sym.isContravariant
+ |sym setFlag Flags.INCONSTRUCTOR
+ |sym.isClassLocalToConstructor
+ |sym.isContravariant // was true
+ |""".stripMargin.trim
+}
\ No newline at end of file
diff --git a/test/files/run/classfile-format-51.scala b/test/files/run/classfile-format-51.scala
new file mode 100644
index 0000000..378caa7
--- /dev/null
+++ b/test/files/run/classfile-format-51.scala
@@ -0,0 +1,126 @@
+import java.io.{File, FileOutputStream}
+
+import scala.tools.nsc.settings.ScalaVersion
+import scala.tools.partest._
+import scala.tools.asm
+import asm.{AnnotationVisitor, ClassWriter, FieldVisitor, Handle, MethodVisitor, Opcodes}
+import Opcodes._
+
+// This test ensures that we can read JDK 7 (classfile format 51) files, including those
+// with invokeDynamic instructions and associated constant pool entries
+// to do that it first uses ASM to generate a class called DynamicInvoker. Then
+// it runs a normal compile on the source in the 'code' field that refers to
+// DynamicInvoker. Any failure will be dumped to std out.
+//
+// By it's nature the test can only work on JDK 7+ because under JDK 6 some of the
+// classes referred to by DynamicInvoker won't be available and DynamicInvoker won't
+// verify. So the test includes a version check that short-circuites the whole test
+// on JDK 6
+object Test extends DirectTest {
+ override def extraSettings: String = "-optimise -usejavacp -d " + testOutput.path + " -cp " + testOutput.path
+
+ def generateClass() {
+ val invokerClassName = "DynamicInvoker"
+ val bootstrapMethodName = "bootstrap"
+ val bootStrapMethodType = "(Ljava/lang/invoke/MethodHandles$Lookup;Ljava/lang/String;Ljava/lang/invoke/MethodType;)Ljava/lang/invoke/CallSite;"
+ val targetMethodName = "target"
+ val targetMethodType = "()Ljava/lang/String;"
+
+ val cw = new ClassWriter(0)
+ cw.visit(V1_7, ACC_PUBLIC + ACC_SUPER, invokerClassName, null, "java/lang/Object", null)
+
+ val constructor = cw.visitMethod(ACC_PUBLIC, "<init>", "()V", null, null)
+ constructor.visitCode()
+ constructor.visitVarInsn(ALOAD, 0)
+ constructor.visitMethodInsn(INVOKESPECIAL, "java/lang/Object", "<init>", "()V")
+ constructor.visitInsn(RETURN)
+ constructor.visitMaxs(1, 1)
+ constructor.visitEnd()
+
+ val target = cw.visitMethod(ACC_PUBLIC + ACC_STATIC, targetMethodName, targetMethodType, null, null)
+ target.visitCode()
+ target.visitLdcInsn("hello")
+ target.visitInsn(ARETURN)
+ target.visitMaxs(1, 1)
+ target.visitEnd()
+
+ val bootstrap = cw.visitMethod(ACC_PUBLIC + ACC_STATIC, bootstrapMethodName, bootStrapMethodType, null, null)
+ bootstrap.visitCode()
+// val lookup = MethodHandles.lookup();
+ bootstrap.visitMethodInsn(INVOKESTATIC, "java/lang/invoke/MethodHandles", "lookup", "()Ljava/lang/invoke/MethodHandles$Lookup;")
+ bootstrap.visitVarInsn(ASTORE, 3) // lookup
+
+// val clazz = lookup.lookupClass();
+ bootstrap.visitVarInsn(ALOAD, 3) // lookup
+ bootstrap.visitMethodInsn(INVOKEVIRTUAL, "java/lang/invoke/MethodHandles$Lookup", "lookupClass", "()Ljava/lang/Class;")
+ bootstrap.visitVarInsn(ASTORE, 4) // clazz
+
+// val methodType = MethodType.fromMethodDescriptorString("()Ljava/lang/String, clazz.getClassLoader()")
+ bootstrap.visitLdcInsn("()Ljava/lang/String;")
+ bootstrap.visitVarInsn(ALOAD, 4) // CLAZZ
+ bootstrap.visitMethodInsn(INVOKEVIRTUAL, "java/lang/Class", "getClassLoader", "()Ljava/lang/ClassLoader;")
+ bootstrap.visitMethodInsn(INVOKESTATIC, "java/lang/invoke/MethodType", "fromMethodDescriptorString", "(Ljava/lang/String;Ljava/lang/ClassLoader;)Ljava/lang/invoke/MethodType;")
+ bootstrap.visitVarInsn(ASTORE, 5) // methodType
+
+// val methodHandle = lookup.findStatic(thisClass, "target", methodType)
+ bootstrap.visitVarInsn(ALOAD, 3) // lookup
+ bootstrap.visitVarInsn(ALOAD, 4) // clazz
+ bootstrap.visitLdcInsn("target")
+ bootstrap.visitVarInsn(ALOAD, 5) // methodType
+ bootstrap.visitMethodInsn(INVOKEVIRTUAL, "java/lang/invoke/MethodHandles$Lookup", "findStatic", "(Ljava/lang/Class;Ljava/lang/String;Ljava/lang/invoke/MethodType;)Ljava/lang/invoke/MethodHandle;")
+ bootstrap.visitVarInsn(ASTORE, 6) // methodHandle
+
+// new ConstantCallSite(methodHandle)
+ bootstrap.visitTypeInsn(NEW, "java/lang/invoke/ConstantCallSite")
+ bootstrap.visitInsn(DUP)
+ bootstrap.visitVarInsn(ALOAD, 6) // methodHandle
+ bootstrap.visitMethodInsn(INVOKESPECIAL, "java/lang/invoke/ConstantCallSite", "<init>", "(Ljava/lang/invoke/MethodHandle;)V")
+ bootstrap.visitInsn(ARETURN)
+ bootstrap.visitMaxs(4,7)
+ bootstrap.visitEnd()
+
+ val test = cw.visitMethod(ACC_PUBLIC + ACC_FINAL, "test", s"()Ljava/lang/String;", null, null)
+ test.visitCode()
+ val bootstrapHandle = new Handle(H_INVOKESTATIC, invokerClassName, bootstrapMethodName, bootStrapMethodType)
+ test.visitInvokeDynamicInsn("invoke", targetMethodType, bootstrapHandle)
+ test.visitInsn(ARETURN)
+ test.visitMaxs(1, 1)
+ test.visitEnd()
+
+ cw.visitEnd()
+ val bytes = cw.toByteArray()
+
+ val fos = new FileOutputStream(new File(s"${testOutput.path}/$invokerClassName.class"))
+ try
+ fos write bytes
+ finally
+ fos.close()
+
+ }
+
+ def code =
+"""
+object Driver {
+ val invoker = new DynamicInvoker()
+ println(invoker.test())
+}
+"""
+
+ override def show(): Unit = {
+ // redirect err to out, for logging
+ val prevErr = System.err
+ System.setErr(System.out)
+ try {
+ // this test is only valid under JDK 1.7+
+ testUnderJavaAtLeast("1.7") {
+ generateClass()
+ compile()
+ ()
+ } otherwise {
+ ()
+ }
+ }
+ finally
+ System.setErr(prevErr)
+ }
+}
diff --git a/test/files/run/classfile-format-52.check b/test/files/run/classfile-format-52.check
new file mode 100644
index 0000000..5d24ef0
--- /dev/null
+++ b/test/files/run/classfile-format-52.check
@@ -0,0 +1,2 @@
+hello from publicMethod
+hello from staticMethod
diff --git a/test/files/run/classfile-format-52.scala b/test/files/run/classfile-format-52.scala
new file mode 100644
index 0000000..7afa09a
--- /dev/null
+++ b/test/files/run/classfile-format-52.scala
@@ -0,0 +1,77 @@
+import java.io.{File, FileOutputStream}
+
+import scala.tools.nsc.settings.ScalaVersion
+import scala.tools.partest._
+import scala.tools.asm
+import asm.{AnnotationVisitor, ClassWriter, FieldVisitor, Handle, MethodVisitor, Opcodes}
+import Opcodes._
+
+// This test ensures that we can read JDK 8 (classfile format 52) files, including those
+// with default methods. To do that it first uses ASM to generate an interface called
+// HasDefaultMethod. Then it runs a normal compile on Scala source that extends that
+// interface. Any failure will be dumped to std out.
+//
+// By it's nature the test can only work on JDK 8+ because under JDK 7- the
+// interface won't verify.
+object Test extends DirectTest {
+ override def extraSettings: String = "-optimise -usejavacp -d " + testOutput.path + " -cp " + testOutput.path
+
+ def generateInterface() {
+ val interfaceName = "HasDefaultMethod"
+ val methodType = "()Ljava/lang/String;"
+
+ val cw = new ClassWriter(0)
+ cw.visit(52, ACC_PUBLIC+ACC_ABSTRACT+ACC_INTERFACE, interfaceName, null, "java/lang/Object", null)
+
+ def createMethod(flags:Int, name: String) {
+ val method = cw.visitMethod(flags, name, methodType, null, null)
+ method.visitCode()
+ method.visitLdcInsn(s"hello from $name")
+ method.visitInsn(ARETURN)
+ method.visitMaxs(1, 1)
+ method.visitEnd()
+ }
+
+ createMethod(ACC_PUBLIC, "publicMethod")
+ createMethod(ACC_PUBLIC+ACC_STATIC, "staticMethod")
+ createMethod(ACC_PRIVATE, "privateMethod")
+
+ cw.visitEnd()
+ val bytes = cw.toByteArray()
+
+ val fos = new FileOutputStream(new File(s"${testOutput.path}/$interfaceName.class"))
+ try
+ fos write bytes
+ finally
+ fos.close()
+
+ }
+
+ def code =
+"""
+class Driver extends HasDefaultMethod {
+ println(publicMethod())
+ println(HasDefaultMethod.staticMethod())
+}
+"""
+
+ override def show(): Unit = {
+ // redirect err to out, for logging
+ val prevErr = System.err
+ System.setErr(System.out)
+ try {
+ // this test is only valid under JDK 1.8+
+ testUnderJavaAtLeast("1.8") {
+ generateInterface()
+ compile()
+ Class.forName("Driver").newInstance()
+ ()
+ } otherwise {
+ println("hello from publicMethod")
+ println("hello from staticMethod")
+ }
+ }
+ finally
+ System.setErr(prevErr)
+ }
+}
diff --git a/test/files/run/classmanifests_new_alias.check b/test/files/run/classmanifests_new_alias.check
new file mode 100644
index 0000000..032521a
--- /dev/null
+++ b/test/files/run/classmanifests_new_alias.check
@@ -0,0 +1,2 @@
+Int
+true
diff --git a/test/files/run/classmanifests_new_alias.scala b/test/files/run/classmanifests_new_alias.scala
new file mode 100644
index 0000000..12bd93b
--- /dev/null
+++ b/test/files/run/classmanifests_new_alias.scala
@@ -0,0 +1,5 @@
+object Test extends App {
+ type CM[T] = ClassManifest[T]
+ println(implicitly[CM[Int]])
+ println(implicitly[CM[Int]] eq Manifest.Int)
+}
\ No newline at end of file
diff --git a/test/files/run/classmanifests_new_core.check b/test/files/run/classmanifests_new_core.check
new file mode 100644
index 0000000..032521a
--- /dev/null
+++ b/test/files/run/classmanifests_new_core.check
@@ -0,0 +1,2 @@
+Int
+true
diff --git a/test/files/run/classmanifests_new_core.scala b/test/files/run/classmanifests_new_core.scala
new file mode 100644
index 0000000..63dbfab
--- /dev/null
+++ b/test/files/run/classmanifests_new_core.scala
@@ -0,0 +1,4 @@
+object Test extends App {
+ println(classManifest[Int])
+ println(classManifest[Int] eq Manifest.Int)
+}
\ No newline at end of file
diff --git a/test/files/run/classtags_contextbound.check b/test/files/run/classtags_contextbound.check
new file mode 100644
index 0000000..6041228
--- /dev/null
+++ b/test/files/run/classtags_contextbound.check
@@ -0,0 +1 @@
+class [I
diff --git a/test/files/run/classtags_contextbound.scala b/test/files/run/classtags_contextbound.scala
new file mode 100644
index 0000000..2f12792
--- /dev/null
+++ b/test/files/run/classtags_contextbound.scala
@@ -0,0 +1,7 @@
+import scala.reflect.{ClassTag, classTag}
+
+object Test extends App {
+ def mkArray[T: ClassTag] = Array[T]()
+ def foo[T: ClassTag] = mkArray[T]
+ println(foo[Int].getClass)
+}
\ No newline at end of file
diff --git a/test/files/run/classtags_core.check b/test/files/run/classtags_core.check
new file mode 100644
index 0000000..d5c4386
--- /dev/null
+++ b/test/files/run/classtags_core.check
@@ -0,0 +1,30 @@
+true
+Byte
+true
+Short
+true
+Char
+true
+Int
+true
+Long
+true
+Float
+true
+Double
+true
+Boolean
+true
+Unit
+true
+Any
+true
+AnyVal
+true
+Object
+true
+Object
+true
+Null
+true
+Nothing
diff --git a/test/files/run/classtags_core.scala b/test/files/run/classtags_core.scala
new file mode 100644
index 0000000..0e174d8
--- /dev/null
+++ b/test/files/run/classtags_core.scala
@@ -0,0 +1,34 @@
+import scala.reflect.{ClassTag, classTag}
+
+object Test extends App {
+ println(implicitly[ClassTag[Byte]] eq ClassTag.Byte)
+ println(implicitly[ClassTag[Byte]])
+ println(implicitly[ClassTag[Short]] eq ClassTag.Short)
+ println(implicitly[ClassTag[Short]])
+ println(implicitly[ClassTag[Char]] eq ClassTag.Char)
+ println(implicitly[ClassTag[Char]])
+ println(implicitly[ClassTag[Int]] eq ClassTag.Int)
+ println(implicitly[ClassTag[Int]])
+ println(implicitly[ClassTag[Long]] eq ClassTag.Long)
+ println(implicitly[ClassTag[Long]])
+ println(implicitly[ClassTag[Float]] eq ClassTag.Float)
+ println(implicitly[ClassTag[Float]])
+ println(implicitly[ClassTag[Double]] eq ClassTag.Double)
+ println(implicitly[ClassTag[Double]])
+ println(implicitly[ClassTag[Boolean]] eq ClassTag.Boolean)
+ println(implicitly[ClassTag[Boolean]])
+ println(implicitly[ClassTag[Unit]] eq ClassTag.Unit)
+ println(implicitly[ClassTag[Unit]])
+ println(implicitly[ClassTag[Any]] eq ClassTag.Any)
+ println(implicitly[ClassTag[Any]])
+ println(implicitly[ClassTag[AnyVal]] eq ClassTag.AnyVal)
+ println(implicitly[ClassTag[AnyVal]])
+ println(implicitly[ClassTag[AnyRef]] eq ClassTag.AnyRef)
+ println(implicitly[ClassTag[AnyRef]])
+ println(implicitly[ClassTag[Object]] eq ClassTag.Object)
+ println(implicitly[ClassTag[Object]])
+ println(implicitly[ClassTag[Null]] eq ClassTag.Null)
+ println(implicitly[ClassTag[Null]])
+ println(implicitly[ClassTag[Nothing]] eq ClassTag.Nothing)
+ println(implicitly[ClassTag[Nothing]])
+}
\ No newline at end of file
diff --git a/test/files/run/classtags_multi.check b/test/files/run/classtags_multi.check
new file mode 100644
index 0000000..56da87e
--- /dev/null
+++ b/test/files/run/classtags_multi.check
@@ -0,0 +1,5 @@
+Int
+Array[int]
+Array[Array[int]]
+Array[Array[Array[int]]]
+Array[Array[Array[Array[int]]]]
diff --git a/test/files/run/classtags_multi.scala b/test/files/run/classtags_multi.scala
new file mode 100644
index 0000000..b4b47bc
--- /dev/null
+++ b/test/files/run/classtags_multi.scala
@@ -0,0 +1,9 @@
+import scala.reflect.{ClassTag, classTag}
+
+object Test extends App {
+ println(classTag[Int])
+ println(classTag[Array[Int]])
+ println(classTag[Array[Array[Int]]])
+ println(classTag[Array[Array[Array[Int]]]])
+ println(classTag[Array[Array[Array[Array[Int]]]]])
+}
\ No newline at end of file
diff --git a/test/files/run/collection-conversions.check b/test/files/run/collection-conversions.check
new file mode 100644
index 0000000..5e43d25
--- /dev/null
+++ b/test/files/run/collection-conversions.check
@@ -0,0 +1,126 @@
+-- Testing iterator ---
+ :[Direct] Vector : OK
+ :[Copy] Vector : OK
+ :[Direct] Buffer : OK
+ :[Copy] Buffer : OK
+ :[Direct] GenSeq : OK
+ :[Copy] GenSeq : OK
+ :[Copy] Seq : OK
+ :[Direct] Stream : OK
+ :[Copy] Stream : OK
+ :[Direct] Array : OK
+ :[Copy] Array : OK
+ :[Copy] ParVector: OK
+ :[Copy] ParArray : OK
+-- Testing Vector ---
+ :[Direct] Vector : OK
+ :[Copy] Vector : OK
+ :[Direct] Buffer : OK
+ :[Copy] Buffer : OK
+ :[Direct] GenSeq : OK
+ :[Copy] GenSeq : OK
+ :[Copy] Seq : OK
+ :[Direct] Stream : OK
+ :[Copy] Stream : OK
+ :[Direct] Array : OK
+ :[Copy] Array : OK
+ :[Copy] ParVector: OK
+ :[Copy] ParArray : OK
+-- Testing List ---
+ :[Direct] Vector : OK
+ :[Copy] Vector : OK
+ :[Direct] Buffer : OK
+ :[Copy] Buffer : OK
+ :[Direct] GenSeq : OK
+ :[Copy] GenSeq : OK
+ :[Copy] Seq : OK
+ :[Direct] Stream : OK
+ :[Copy] Stream : OK
+ :[Direct] Array : OK
+ :[Copy] Array : OK
+ :[Copy] ParVector: OK
+ :[Copy] ParArray : OK
+-- Testing Buffer ---
+ :[Direct] Vector : OK
+ :[Copy] Vector : OK
+ :[Direct] Buffer : OK
+ :[Copy] Buffer : OK
+ :[Direct] GenSeq : OK
+ :[Copy] GenSeq : OK
+ :[Copy] Seq : OK
+ :[Direct] Stream : OK
+ :[Copy] Stream : OK
+ :[Direct] Array : OK
+ :[Copy] Array : OK
+ :[Copy] ParVector: OK
+ :[Copy] ParArray : OK
+-- Testing ParVector ---
+ :[Direct] Vector : OK
+ :[Copy] Vector : OK
+ :[Direct] Buffer : OK
+ :[Copy] Buffer : OK
+ :[Direct] GenSeq : OK
+ :[Copy] GenSeq : OK
+ :[Copy] Seq : OK
+ :[Direct] Stream : OK
+ :[Copy] Stream : OK
+ :[Direct] Array : OK
+ :[Copy] Array : OK
+ :[Copy] ParVector: OK
+ :[Copy] ParArray : OK
+-- Testing ParArray ---
+ :[Direct] Vector : OK
+ :[Copy] Vector : OK
+ :[Direct] Buffer : OK
+ :[Copy] Buffer : OK
+ :[Direct] GenSeq : OK
+ :[Copy] GenSeq : OK
+ :[Copy] Seq : OK
+ :[Direct] Stream : OK
+ :[Copy] Stream : OK
+ :[Direct] Array : OK
+ :[Copy] Array : OK
+ :[Copy] ParVector: OK
+ :[Copy] ParArray : OK
+-- Testing Set ---
+ :[Direct] Vector : OK
+ :[Copy] Vector : OK
+ :[Direct] Buffer : OK
+ :[Copy] Buffer : OK
+ :[Direct] GenSeq : OK
+ :[Copy] GenSeq : OK
+ :[Copy] Seq : OK
+ :[Direct] Stream : OK
+ :[Copy] Stream : OK
+ :[Direct] Array : OK
+ :[Copy] Array : OK
+ :[Copy] ParVector: OK
+ :[Copy] ParArray : OK
+-- Testing SetView ---
+ :[Direct] Vector : OK
+ :[Copy] Vector : OK
+ :[Direct] Buffer : OK
+ :[Copy] Buffer : OK
+ :[Direct] GenSeq : OK
+ :[Copy] GenSeq : OK
+ :[Copy] Seq : OK
+ :[Direct] Stream : OK
+ :[Copy] Stream : OK
+ :[Direct] Array : OK
+ :[Copy] Array : OK
+ :[Copy] ParVector: OK
+ :[Copy] ParArray : OK
+-- Testing BufferView ---
+ :[Direct] Vector : OK
+ :[Copy] Vector : OK
+ :[Direct] Buffer : OK
+ :[Copy] Buffer : OK
+ :[Direct] GenSeq : OK
+ :[Copy] GenSeq : OK
+ :[Copy] Seq : OK
+ :[Direct] Stream : OK
+ :[Copy] Stream : OK
+ :[Direct] Array : OK
+ :[Copy] Array : OK
+ :[Copy] ParVector: OK
+ :[Copy] ParArray : OK
\ No newline at end of file
diff --git a/test/files/run/collection-conversions.scala b/test/files/run/collection-conversions.scala
new file mode 100644
index 0000000..d842742
--- /dev/null
+++ b/test/files/run/collection-conversions.scala
@@ -0,0 +1,64 @@
+import collection._
+import mutable.Buffer
+import parallel.immutable.ParVector
+import parallel.mutable.ParArray
+import reflect.ClassTag
+
+object Test {
+
+ def printResult[A,B](msg: String, obj: A, expected: B)(implicit tag: ClassTag[A], tag2: ClassTag[B]) = {
+ print(" :" + msg +": ")
+ val isArray = obj match {
+ case x: Array[Int] => true
+ case _ => false
+ }
+ val expectedEquals =
+ if(isArray) obj.asInstanceOf[Array[Int]].toSeq == expected.asInstanceOf[Array[Int]].toSeq
+ else obj == expected
+ val tagEquals = tag == tag2
+ if(expectedEquals && tagEquals) print("OK")
+ else print("FAILED")
+ if(!expectedEquals) print(", " + obj + " != " + expected)
+ if(!tagEquals) print(", " + tag + " != " + tag2)
+ println("")
+ }
+
+ val testVector = Vector(1,2,3)
+ val testBuffer = Buffer(1,2,3)
+ val testGenSeq = GenSeq(1,2,3)
+ val testSeq = Seq(1,2,3)
+ val testStream = Stream(1,2,3)
+ val testArray = Array(1,2,3)
+ val testParVector = ParVector(1,2,3)
+ val testParArray = ParArray(1,2,3)
+
+ def testConversion[A: ClassTag](name: String, col: => GenTraversableOnce[A]): Unit = {
+ val tmp = col
+ println("-- Testing " + name + " ---")
+ printResult("[Direct] Vector ", col.toVector, testVector)
+ printResult("[Copy] Vector ", col.to[Vector], testVector)
+ printResult("[Direct] Buffer ", col.toBuffer, testBuffer)
+ printResult("[Copy] Buffer ", col.to[Buffer], testBuffer)
+ printResult("[Direct] GenSeq ", col.toSeq, testGenSeq)
+ printResult("[Copy] GenSeq ", col.to[GenSeq], testGenSeq)
+ printResult("[Copy] Seq ", col.to[Seq], testSeq)
+ printResult("[Direct] Stream ", col.toStream, testStream)
+ printResult("[Copy] Stream ", col.to[Stream], testStream)
+ printResult("[Direct] Array ", col.toArray, testArray)
+ printResult("[Copy] Array ", col.to[Array], testArray)
+ printResult("[Copy] ParVector", col.to[ParVector], testParVector)
+ printResult("[Copy] ParArray ", col.to[ParArray], testParArray)
+ }
+
+ def main(args: Array[String]): Unit = {
+ testConversion("iterator", (1 to 3).iterator)
+ testConversion("Vector", Vector(1,2,3))
+ testConversion("List", List(1,2,3))
+ testConversion("Buffer", Buffer(1,2,3))
+ testConversion("ParVector", ParVector(1,2,3))
+ testConversion("ParArray", ParArray(1,2,3))
+ testConversion("Set", Set(1,2,3))
+ testConversion("SetView", Set(1,2,3).view)
+ testConversion("BufferView", Buffer(1,2,3).view)
+ }
+}
diff --git a/test/files/run/collections.check b/test/files/run/collections.check
index b87a599..c24150b 100644
--- a/test/files/run/collections.check
+++ b/test/files/run/collections.check
@@ -2,6 +2,10 @@
test1: 14005
test2: 25005003, iters = 5000
test3: 25005003
+***** mutable.LinkedHashSet:
+test1: 14005
+test2: 25005003, iters = 5000
+test3: 25005003
***** immutable.Set:
test1: 14005
test2: 25005003, iters = 5000
@@ -18,6 +22,10 @@ test3: 25005003
test1: 14005
test2: 25005003, iters = 5000
test3: 25005003
+***** mutable.LinkedHashMap:
+test1: 14005
+test2: 25005003, iters = 5000
+test3: 25005003
***** immutable.Map:
test1: 14005
test2: 25005003, iters = 5000
diff --git a/test/files/run/collections.scala b/test/files/run/collections.scala
index 60f0765..69c40fa 100644
--- a/test/files/run/collections.scala
+++ b/test/files/run/collections.scala
@@ -106,10 +106,12 @@ object Test extends App {
}
test("mutable.HashSet", new mutable.HashSet[Int], 5000)
+ test("mutable.LinkedHashSet", new mutable.LinkedHashSet[Int], 5000)
test("immutable.Set", immutable.Set[Int](), 5000)
test("immutable.ListSet", new immutable.ListSet[Int], 5000)
test("immutable.TreeSet", new immutable.TreeSet[Int], 5000)
test("mutable.HashMap", new mutable.HashMap[Int, Int], 5000)
+ test("mutable.LinkedHashMap", new mutable.LinkedHashMap[Int, Int], 5000)
test("immutable.Map", immutable.Map[Int, Int](), 5000)
test("immutable.TreeMap", new immutable.TreeMap[Int, Int], 5000)
test("immutable.ListMap", new immutable.ListMap[Int, Int], 3000)
diff --git a/test/files/run/colltest.check b/test/files/run/colltest.check
index 1ad81a1..e5bb013 100644
--- a/test/files/run/colltest.check
+++ b/test/files/run/colltest.check
@@ -5,3 +5,4 @@ false
true
false
succeeded for 10 iterations.
+succeeded for 10 iterations.
diff --git a/test/files/run/colltest.scala b/test/files/run/colltest.scala
index ecd234b..703e94a 100644
--- a/test/files/run/colltest.scala
+++ b/test/files/run/colltest.scala
@@ -61,5 +61,6 @@ object Test extends App {
}
t3954
- new TestSet(HashSet.empty, new scala.collection.mutable.LinkedHashSet)
+ new TestSet(HashSet.empty, new LinkedHashSet)
+ new TestSet(new ImmutableSetAdaptor(collection.immutable.Set.empty[Int]), new LinkedHashSet)
}
diff --git a/test/files/run/colltest1.check b/test/files/run/colltest1.check
index 7377174..5ec6286 100644
--- a/test/files/run/colltest1.check
+++ b/test/files/run/colltest1.check
@@ -107,3 +107,5 @@ List((A,A), (B,B), (C,C), (D,D), (E,E), (F,F), (G,G), (H,H), (I,I), (J,J), (K,K)
List((A,A), (B,B), (C,C), (D,D), (E,E), (F,F), (G,G), (H,H), (I,I), (J,J), (K,K), (L,L), (M,M), (N,N), (O,O), (P,P), (Q,Q), (R,R), (S,S), (T,T), (U,U), (V,V), (W,W), (X,X), (Y,Y), (Z,Z))
List((A,A), (B,B), (C,C), (D,D), (E,E), (F,F), (G,G), (H,H), (I,I), (J,J), (K,K), (L,L), (M,M), (N,N), (O,O), (P,P), (Q,Q), (R,R), (S,S), (T,T), (U,U), (V,V), (W,W), (X,X), (Y,Y), (Z,Z))
List((A,A), (B,B), (C,C), (D,D), (E,E), (F,F), (G,G), (H,H), (I,I), (J,J), (K,K), (L,L), (M,M), (N,N), (O,O), (P,P), (Q,Q), (R,R), (S,S), (T,T), (U,U), (V,V), (W,W), (X,X), (Y,Y), (Z,Z))
+List((A,A), (B,B), (C,C), (D,D), (E,E), (F,F), (G,G), (H,H), (I,I), (J,J), (K,K), (L,L), (M,M), (N,N), (O,O), (P,P), (Q,Q), (R,R), (S,S), (T,T), (U,U), (V,V), (W,W), (X,X), (Y,Y), (Z,Z))
+List((A,A), (B,B), (C,C), (D,D), (E,E), (F,F), (G,G), (H,H), (I,I), (J,J), (K,K), (L,L), (M,M), (N,N), (O,O), (P,P), (Q,Q), (R,R), (S,S), (T,T), (U,U), (V,V), (W,W), (X,X), (Y,Y), (Z,Z))
diff --git a/test/files/run/colltest1.scala b/test/files/run/colltest1.scala
index 1cbd932..54adeb7 100644
--- a/test/files/run/colltest1.scala
+++ b/test/files/run/colltest1.scala
@@ -226,6 +226,7 @@ object Test extends App {
setTest(mutable.Set())
setTest(immutable.Set())
setTest(mutable.HashSet())
+ setTest(mutable.LinkedHashSet())
setTest(immutable.HashSet())
mapTest(Map())
@@ -233,5 +234,6 @@ object Test extends App {
mapTest(immutable.Map())
mapTest(immutable.TreeMap())
mutableMapTest(mutable.HashMap())
+ mutableMapTest(mutable.LinkedHashMap())
mapTest(immutable.HashMap())
}
diff --git a/test/files/run/compiler-asSeenFrom.check b/test/files/run/compiler-asSeenFrom.check
new file mode 100644
index 0000000..47d40b0
--- /dev/null
+++ b/test/files/run/compiler-asSeenFrom.check
@@ -0,0 +1,323 @@
+class C {
+ type seen from prefix is
+ ---- ---------------- --
+ C[List[T3]]#I[T1] D[A1] C[List[T3]]#I[A1]
+ C[List[T3]]#I[T1] D[T3] C[List[T3]]#I[T3]
+ C[List[T3]]#J[T1] D[A1] C[List[T3]]#J[A1]
+ C[List[T3]]#J[T1] D[T3] C[List[T3]]#J[T3]
+ C[T1]#I[Int] C[List[T3]] C[List[T3]]#I[Int]
+ C[T1]#I[Int] D[A1] C[A1]#I[Int]
+ C[T1]#I[Int] D[T3] C[T3]#I[Int]
+ C[T1]#I[List[Int]] C[List[T3]] C[List[T3]]#I[List[Int]]
+ C[T1]#I[List[Int]] D[A1] C[A1]#I[List[Int]]
+ C[T1]#I[List[Int]] D[T3] C[T3]#I[List[Int]]
+ C[T1]#I[T1] C[List[T3]] C[List[T3]]#I[List[T3]]
+ C[T1]#I[T1] D[A1] C[A1]#I[A1]
+ C[T1]#I[T1] D[T3] C[T3]#I[T3]
+ C[T1]#I[T2] C[List[T3]] C[List[T3]]#I[T2]
+ C[T1]#I[T2] D[A1] C[A1]#I[T2]
+ C[T1]#I[T2] D[T3] C[T3]#I[T2]
+ C[T1]#I[T3] C[List[T3]] C[List[T3]]#I[T3]
+ C[T1]#I[T3] D[A1] C[A1]#I[T3]
+ C[T1]#I[T3] D[T3] C[T3]#I[T3]
+ C[T1]#I[T4] C[List[T3]] C[List[T3]]#I[T4]
+ C[T1]#I[T4] D[A1] C[A1]#I[T4]
+ C[T1]#I[T4] D[T3] C[T3]#I[T4]
+ C[T1]#J[Int] C[List[T3]] C[List[T3]]#J[Int]
+ C[T1]#J[Int] D[A1] C[A1]#J[Int]
+ C[T1]#J[Int] D[T3] C[T3]#J[Int]
+ C[T1]#J[List[Int]] C[List[T3]] C[List[T3]]#J[List[Int]]
+ C[T1]#J[List[Int]] D[A1] C[A1]#J[List[Int]]
+ C[T1]#J[List[Int]] D[T3] C[T3]#J[List[Int]]
+ C[T1]#J[T1] C[List[T3]] C[List[T3]]#J[List[T3]]
+ C[T1]#J[T1] D[A1] C[A1]#J[A1]
+ C[T1]#J[T1] D[T3] C[T3]#J[T3]
+ C[T1]#J[T2] C[List[T3]] C[List[T3]]#J[T2]
+ C[T1]#J[T2] D[A1] C[A1]#J[T2]
+ C[T1]#J[T2] D[T3] C[T3]#J[T2]
+ C[T1]#J[T3] C[List[T3]] C[List[T3]]#J[T3]
+ C[T1]#J[T3] D[A1] C[A1]#J[T3]
+ C[T1]#J[T3] D[T3] C[T3]#J[T3]
+ C[T1]#J[T4] C[List[T3]] C[List[T3]]#J[T4]
+ C[T1]#J[T4] D[A1] C[A1]#J[T4]
+ C[T1]#J[T4] D[T3] C[T3]#J[T4]
+ D[T3]#J[T1] C[List[T3]] D[T3]#J[List[T3]]
+ D[T3]#J[T1] D[A1] D[T3]#J[A1]
+ D[A1]#J[T1] C[List[T3]] D[A1]#J[List[T3]]
+ D[A1]#J[T1] D[T3] D[A1]#J[T3]
+}
+class D {
+ type seen from prefix is
+ ---- ---------------- --
+ C[List[T3]]#I[Int] D[A1] C[List[A1]]#I[Int]
+ C[List[T3]]#I[List[Int]] D[A1] C[List[A1]]#I[List[Int]]
+ C[List[T3]]#I[T1] D[A1] C[List[A1]]#I[T1]
+ C[List[T3]]#I[T2] D[A1] C[List[A1]]#I[T2]
+ C[List[T3]]#I[T3] D[A1] C[List[A1]]#I[A1]
+ C[List[T3]]#I[T4] D[A1] C[List[A1]]#I[T4]
+ C[List[T3]]#J[Int] D[A1] C[List[A1]]#J[Int]
+ C[List[T3]]#J[List[Int]] D[A1] C[List[A1]]#J[List[Int]]
+ C[List[T3]]#J[T1] D[A1] C[List[A1]]#J[T1]
+ C[List[T3]]#J[T2] D[A1] C[List[A1]]#J[T2]
+ C[List[T3]]#J[T3] D[A1] C[List[A1]]#J[A1]
+ C[List[T3]]#J[T4] D[A1] C[List[A1]]#J[T4]
+ C[T1]#I[T3] D[A1] C[T1]#I[A1]
+ C[T1]#J[T3] D[A1] C[T1]#J[A1]
+ D[T3]#J[Int] D[A1] D[A1]#J[Int]
+ D[T3]#J[List[Int]] D[A1] D[A1]#J[List[Int]]
+ D[T3]#J[T1] D[A1] D[A1]#J[T1]
+ D[T3]#J[T2] D[A1] D[A1]#J[T2]
+ D[T3]#J[T3] D[A1] D[A1]#J[A1]
+ D[T3]#J[T4] D[A1] D[A1]#J[T4]
+}
+class I {
+ type seen from prefix is
+ ---- ---------------- --
+ C[List[T3]]#I[T1] D.this.J[T4] C[List[T3]]#I[List[T3]]
+ C[List[T3]]#I[T1] Z.dZ.J[A2] C[List[T3]]#I[List[A1]]
+ C[List[T3]]#I[T1] Z.dZ.J[P] C[List[T3]]#I[List[A1]]
+ C[List[T3]]#I[T2] D.this.J[T4] C[List[T3]]#I[T4]
+ C[List[T3]]#I[T2] Z.dZ.J[A2] C[List[T3]]#I[A2]
+ C[List[T3]]#I[T2] Z.dZ.J[P] C[List[T3]]#I[P]
+ C[List[T3]]#J[T1] D.this.J[T4] C[List[T3]]#J[List[T3]]
+ C[List[T3]]#J[T1] Z.dZ.J[A2] C[List[T3]]#J[List[A1]]
+ C[List[T3]]#J[T1] Z.dZ.J[P] C[List[T3]]#J[List[A1]]
+ C[List[T3]]#J[T2] D.this.J[T4] C[List[T3]]#J[T4]
+ C[List[T3]]#J[T2] Z.dZ.J[A2] C[List[T3]]#J[A2]
+ C[List[T3]]#J[T2] Z.dZ.J[P] C[List[T3]]#J[P]
+ C[T1]#I[Int] D.this.J[T4] C[List[T3]]#I[Int]
+ C[T1]#I[Int] Z.dZ.J[A2] C[List[A1]]#I[Int]
+ C[T1]#I[Int] Z.dZ.J[P] C[List[A1]]#I[Int]
+ C[T1]#I[List[Int]] D.this.J[T4] C[List[T3]]#I[List[Int]]
+ C[T1]#I[List[Int]] Z.dZ.J[A2] C[List[A1]]#I[List[Int]]
+ C[T1]#I[List[Int]] Z.dZ.J[P] C[List[A1]]#I[List[Int]]
+ C[T1]#I[T1] D.this.J[T4] C[List[T3]]#I[List[T3]]
+ C[T1]#I[T1] Z.dZ.J[A2] C[List[A1]]#I[List[A1]]
+ C[T1]#I[T1] Z.dZ.J[P] C[List[A1]]#I[List[A1]]
+ C[T1]#I[T2] D.this.J[T4] C[List[T3]]#I[T4]
+ C[T1]#I[T2] Z.dZ.J[A2] C[List[A1]]#I[A2]
+ C[T1]#I[T2] Z.dZ.J[P] C[List[A1]]#I[P]
+ C[T1]#I[T3] D.this.J[T4] C[List[T3]]#I[T3]
+ C[T1]#I[T3] Z.dZ.J[A2] C[List[A1]]#I[T3]
+ C[T1]#I[T3] Z.dZ.J[P] C[List[A1]]#I[T3]
+ C[T1]#I[T4] D.this.J[T4] C[List[T3]]#I[T4]
+ C[T1]#I[T4] Z.dZ.J[A2] C[List[A1]]#I[T4]
+ C[T1]#I[T4] Z.dZ.J[P] C[List[A1]]#I[T4]
+ C[T1]#J[Int] D.this.J[T4] C[List[T3]]#J[Int]
+ C[T1]#J[Int] Z.dZ.J[A2] C[List[A1]]#J[Int]
+ C[T1]#J[Int] Z.dZ.J[P] C[List[A1]]#J[Int]
+ C[T1]#J[List[Int]] D.this.J[T4] C[List[T3]]#J[List[Int]]
+ C[T1]#J[List[Int]] Z.dZ.J[A2] C[List[A1]]#J[List[Int]]
+ C[T1]#J[List[Int]] Z.dZ.J[P] C[List[A1]]#J[List[Int]]
+ C[T1]#J[T1] D.this.J[T4] C[List[T3]]#J[List[T3]]
+ C[T1]#J[T1] Z.dZ.J[A2] C[List[A1]]#J[List[A1]]
+ C[T1]#J[T1] Z.dZ.J[P] C[List[A1]]#J[List[A1]]
+ C[T1]#J[T2] D.this.J[T4] C[List[T3]]#J[T4]
+ C[T1]#J[T2] Z.dZ.J[A2] C[List[A1]]#J[A2]
+ C[T1]#J[T2] Z.dZ.J[P] C[List[A1]]#J[P]
+ C[T1]#J[T3] D.this.J[T4] C[List[T3]]#J[T3]
+ C[T1]#J[T3] Z.dZ.J[A2] C[List[A1]]#J[T3]
+ C[T1]#J[T3] Z.dZ.J[P] C[List[A1]]#J[T3]
+ C[T1]#J[T4] D.this.J[T4] C[List[T3]]#J[T4]
+ C[T1]#J[T4] Z.dZ.J[A2] C[List[A1]]#J[T4]
+ C[T1]#J[T4] Z.dZ.J[P] C[List[A1]]#J[T4]
+ D[T3]#J[T1] D.this.J[T4] D[T3]#J[List[T3]]
+ D[T3]#J[T1] Z.dZ.J[A2] D[T3]#J[List[A1]]
+ D[T3]#J[T1] Z.dZ.J[P] D[T3]#J[List[A1]]
+ D[T3]#J[T2] D.this.J[T4] D[T3]#J[T4]
+ D[T3]#J[T2] Z.dZ.J[A2] D[T3]#J[A2]
+ D[T3]#J[T2] Z.dZ.J[P] D[T3]#J[P]
+ D[A1]#J[T1] D.this.J[T4] D[A1]#J[List[T3]]
+ D[A1]#J[T1] Z.dZ.J[A2] D[A1]#J[List[A1]]
+ D[A1]#J[T1] Z.dZ.J[P] D[A1]#J[List[A1]]
+ D[A1]#J[T2] D.this.J[T4] D[A1]#J[T4]
+ D[A1]#J[T2] Z.dZ.J[A2] D[A1]#J[A2]
+ D[A1]#J[T2] Z.dZ.J[P] D[A1]#J[P]
+}
+class J {
+ type seen from prefix is
+ ---- ---------------- --
+ C[List[T3]]#I[Int] Z.dZ.J[A2] C[List[A1]]#I[Int]
+ C[List[T3]]#I[Int] Z.dZ.J[P] C[List[A1]]#I[Int]
+ C[List[T3]]#I[List[Int]] Z.dZ.J[A2] C[List[A1]]#I[List[Int]]
+ C[List[T3]]#I[List[Int]] Z.dZ.J[P] C[List[A1]]#I[List[Int]]
+ C[List[T3]]#I[T1] Z.dZ.J[A2] C[List[A1]]#I[T1]
+ C[List[T3]]#I[T1] Z.dZ.J[P] C[List[A1]]#I[T1]
+ C[List[T3]]#I[T2] Z.dZ.J[A2] C[List[A1]]#I[T2]
+ C[List[T3]]#I[T2] Z.dZ.J[P] C[List[A1]]#I[T2]
+ C[List[T3]]#I[T3] Z.dZ.J[A2] C[List[A1]]#I[A1]
+ C[List[T3]]#I[T3] Z.dZ.J[P] C[List[A1]]#I[A1]
+ C[List[T3]]#I[T4] Z.dZ.J[A2] C[List[A1]]#I[A2]
+ C[List[T3]]#I[T4] Z.dZ.J[P] C[List[A1]]#I[P]
+ C[List[T3]]#J[Int] Z.dZ.J[A2] C[List[A1]]#J[Int]
+ C[List[T3]]#J[Int] Z.dZ.J[P] C[List[A1]]#J[Int]
+ C[List[T3]]#J[List[Int]] Z.dZ.J[A2] C[List[A1]]#J[List[Int]]
+ C[List[T3]]#J[List[Int]] Z.dZ.J[P] C[List[A1]]#J[List[Int]]
+ C[List[T3]]#J[T1] Z.dZ.J[A2] C[List[A1]]#J[T1]
+ C[List[T3]]#J[T1] Z.dZ.J[P] C[List[A1]]#J[T1]
+ C[List[T3]]#J[T2] Z.dZ.J[A2] C[List[A1]]#J[T2]
+ C[List[T3]]#J[T2] Z.dZ.J[P] C[List[A1]]#J[T2]
+ C[List[T3]]#J[T3] Z.dZ.J[A2] C[List[A1]]#J[A1]
+ C[List[T3]]#J[T3] Z.dZ.J[P] C[List[A1]]#J[A1]
+ C[List[T3]]#J[T4] Z.dZ.J[A2] C[List[A1]]#J[A2]
+ C[List[T3]]#J[T4] Z.dZ.J[P] C[List[A1]]#J[P]
+ C[T1]#I[T3] Z.dZ.J[A2] C[T1]#I[A1]
+ C[T1]#I[T3] Z.dZ.J[P] C[T1]#I[A1]
+ C[T1]#I[T4] Z.dZ.J[A2] C[T1]#I[A2]
+ C[T1]#I[T4] Z.dZ.J[P] C[T1]#I[P]
+ C[T1]#J[T3] Z.dZ.J[A2] C[T1]#J[A1]
+ C[T1]#J[T3] Z.dZ.J[P] C[T1]#J[A1]
+ C[T1]#J[T4] Z.dZ.J[A2] C[T1]#J[A2]
+ C[T1]#J[T4] Z.dZ.J[P] C[T1]#J[P]
+ D[T3]#J[Int] Z.dZ.J[A2] D[A1]#J[Int]
+ D[T3]#J[Int] Z.dZ.J[P] D[A1]#J[Int]
+ D[T3]#J[List[Int]] Z.dZ.J[A2] D[A1]#J[List[Int]]
+ D[T3]#J[List[Int]] Z.dZ.J[P] D[A1]#J[List[Int]]
+ D[T3]#J[T1] Z.dZ.J[A2] D[A1]#J[T1]
+ D[T3]#J[T1] Z.dZ.J[P] D[A1]#J[T1]
+ D[T3]#J[T2] Z.dZ.J[A2] D[A1]#J[T2]
+ D[T3]#J[T2] Z.dZ.J[P] D[A1]#J[T2]
+ D[T3]#J[T3] Z.dZ.J[A2] D[A1]#J[A1]
+ D[T3]#J[T3] Z.dZ.J[P] D[A1]#J[A1]
+ D[T3]#J[T4] Z.dZ.J[A2] D[A1]#J[A2]
+ D[T3]#J[T4] Z.dZ.J[P] D[A1]#J[P]
+ D[A1]#J[T3] Z.dZ.J[A2] D[A1]#J[A1]
+ D[A1]#J[T3] Z.dZ.J[P] D[A1]#J[A1]
+ D[A1]#J[T4] Z.dZ.J[A2] D[A1]#J[A2]
+ D[A1]#J[T4] Z.dZ.J[P] D[A1]#J[P]
+}
+class D { // after parser
+ private[this] val cD: ll.C[List[T3]]
+ val cD: ll.C[List[T3]]
+}
+
+class D { // after uncurry
+ private[this] val cD: ll.C[List[T3]]
+ val cD(): ll.C[List[T3]]
+}
+
+class D { // after erasure
+ private[this] val cD: ll.C
+ val cD(): ll.C
+}
+
+object Z { // after parser
+ def kz[P <: ll.Z.dZ.J[ll.A2]]: ll.Z.dZ.J[P]
+ private[this] val jZ: ll.Z.dZ.J[ll.A2]
+ val jZ: ll.Z.dZ.J[ll.A2]
+ private[this] val dZ: ll.D[ll.A1]
+ val dZ: ll.D[ll.A1]
+}
+
+object Z { // after uncurry
+ def kz[P <: ll.Z.dZ.J[ll.A2]](): ll.Z.dZ.J[P]
+ private[this] val jZ: ll.Z.dZ.J[ll.A2]
+ val jZ(): ll.Z.dZ.J[ll.A2]
+ private[this] val dZ: ll.D[ll.A1]
+ val dZ(): ll.D[ll.A1]
+}
+
+object Z { // after erasure
+ def kz(): ll.D#J
+ private[this] val jZ: ll.D#J
+ val jZ(): ll.D#J
+ private[this] val dZ: ll.D
+ val dZ(): ll.D
+}
+
+object Z { // after flatten
+ def kz(): ll.D#D$J
+ private[this] val jZ: ll.D#D$J
+ val jZ(): ll.D#D$J
+ private[this] val dZ: ll.D
+ val dZ(): ll.D
+}
+
+value dZ { // after parser
+ private[this] val cD: ll.C[List[T3]]
+ val cD: ll.C[List[T3]]
+}
+
+value dZ { // after parser
+ private[this] val cD: ll.C[List[T3]]
+ val cD: ll.C[List[T3]]
+}
+
+value dZ { // after uncurry
+ private[this] val cD: ll.C[List[T3]]
+ val cD(): ll.C[List[T3]]
+}
+
+value dZ { // after erasure
+ private[this] val cD: ll.C
+ val cD(): ll.C
+}
+
+value jZ { // after parser
+ def thisI(): I.this.type
+ def thisC(): C.this.type
+ def t2(): T2
+ def t1(): T1
+}
+
+value jZ { // after parser
+ def thisI(): I.this.type
+ def thisC(): C.this.type
+ def t2(): T2
+ def t1(): T1
+}
+
+value jZ { // after explicitouter
+ protected val $outer: D.this.type
+ val $outer(): D.this.type
+ val $outer(): C.this.type
+ def thisI(): I.this.type
+ def thisC(): C.this.type
+ def t2(): T2
+ def t1(): T1
+}
+
+value jZ { // after erasure
+ protected val $outer: ll.D
+ val $outer(): ll.D
+ protected val $outer: ll.C
+ val $outer(): ll.C
+ def thisI(): ll.C#I
+ def thisC(): ll.C
+ def t2(): Object
+ def t1(): Object
+}
+
+value jZ { // after flatten
+ protected val $outer: ll.D
+ val $outer(): ll.D
+ protected val $outer: ll.C
+ val $outer(): ll.C
+ def thisI(): ll.C#C$I
+ def thisC(): ll.C
+ def t2(): Object
+ def t1(): Object
+}
+
+method kz { // after parser
+ def thisI(): I.this.type
+ def thisC(): C.this.type
+ def t2(): T2
+ def t1(): T1
+}
+
+value $outer { // after parser
+ private[this] val cD: ll.C[List[T3]]
+ val cD: ll.C[List[T3]]
+}
+
+value $outer { // after uncurry
+ private[this] val cD: ll.C[List[T3]]
+ val cD(): ll.C[List[T3]]
+}
+
+value $outer { // after erasure
+ private[this] val cD: ll.C
+ val cD(): ll.C
+}
+
diff --git a/test/files/run/compiler-asSeenFrom.scala b/test/files/run/compiler-asSeenFrom.scala
new file mode 100644
index 0000000..19feb45
--- /dev/null
+++ b/test/files/run/compiler-asSeenFrom.scala
@@ -0,0 +1,122 @@
+import scala.tools.nsc._
+import scala.tools.partest.CompilerTest
+import scala.collection.{ mutable, immutable, generic }
+
+/** It's too messy but it's better than not having it.
+ */
+object Test extends CompilerTest {
+ import global._
+ import definitions._
+
+ override def sources = List(lambdaLift)
+ def lambdaLift = """
+package ll {
+ class A1
+ class A2
+ class X
+ class C[T1]() {
+ class I[T2]() {
+ def t1(): T1 = ???
+ def t2(): T2 = ???
+ def thisC(): C.this.type = ???
+ def thisI(): I.this.type = ???
+ }
+ }
+ class D[T3]() extends C[T3]() {
+ val cD: C[List[T3]] = ???
+ class J[T4]() extends cD.I[T4]()
+ }
+ object Z {
+ val dZ: D[A1] = ???
+ val jZ: dZ.J[A2] = ???
+
+ def kz[P <: dZ.J[A2]]: dZ.J[P] = ???
+ }
+}
+"""
+
+ object syms extends SymsInPackage("ll") {
+ def isPossibleEnclosure(encl: Symbol, sym: Symbol) = sym.enclClassChain drop 1 exists (_ isSubClass encl)
+ def isInterestingPrefix(pre: Type) = pre.typeConstructor.typeParams.nonEmpty && pre.members.exists(_.isType)
+
+ def asSeenPrefixes = tpes map (_.finalResultType) distinct
+ def typeRefPrefixes = asSeenPrefixes filter isInterestingPrefix
+
+ def nestsIn(outer: Symbol) = classes filter (c => c.enclClassChain drop 1 exists(_ isSubClass outer))
+ def typeRefs(targs: List[Type]) = (
+ for (p <- typeRefPrefixes ; c <- classes filter (isPossibleEnclosure(p.typeSymbol, _)) ; a <- targs) yield
+ typeRef(p, c, List(a))
+ )
+
+ val wfmt = "%-" + 25 + "s"
+ def to_s(x: Any): String = wfmt.format(x.toString.replaceAll("""\bll\.""", ""))
+
+ def fmt(args: Any*): String = {
+ (args map to_s mkString " ").replaceAll("""\s+$""", "")
+ }
+ def fname(sym: Symbol) = {
+ val p = "" + sym.owner.name
+ val x = if (sym.owner.isPackageClass || sym.owner.isModuleClass || sym.owner.isTerm) "." else "#"
+ sym.kindString + " " + p + x + sym.name
+ }
+
+ def permuteAsSeenFrom(targs: List[Type]) = (
+ for {
+ tp <- typeRefs(targs filterNot (_ eq NoType))
+ prefix <- asSeenPrefixes
+ if tp.prefix != prefix
+ site <- classes
+ seen = tp.asSeenFrom(prefix, site)
+ if tp != seen
+ if !seen.isInstanceOf[ExistentialType]
+ }
+ yield ((site, tp, prefix, seen))
+ )
+
+ def block(label: Any)(lines: List[String]): List[String] = {
+ val first = "" + label + " {"
+ val last = "}"
+
+ first +: lines.map(" " + _) :+ last
+ }
+
+ def permute(targs: List[Type]): List[String] = {
+ permuteAsSeenFrom(targs).groupBy(_._1).toList.sortBy(_._1.toString) flatMap {
+ case (site, xs) =>
+ block(fmt(site)) {
+ fmt("type", "seen from prefix", "is") ::
+ fmt("----", "----------------", "--") :: {
+ xs.groupBy(_._2).toList.sortBy(_._1.toString) flatMap {
+ case (tp, ys) =>
+ (ys map { case (_, _, prefix, seen) => fmt(tp, prefix, seen) }).sorted.distinct
+ }
+ }
+ }
+ }
+ }
+ }
+
+ def pretty(xs: List[_]) = if (xs.isEmpty) "" else xs.mkString("\n ", "\n ", "\n")
+
+ def signaturesIn(info: Type): List[String] = (
+ info.members.toList
+ filterNot (s => s.isType || s.owner == ObjectClass || s.owner == AnyClass || s.isConstructor)
+ map (_.defString)
+ )
+
+ def check(source: String, unit: global.CompilationUnit) = {
+ import syms._
+
+ afterTyper {
+ val typeArgs = List[Type](IntClass.tpe, ListClass[Int]) ++ tparams.map(_.tpe)
+ permute(typeArgs) foreach println
+ }
+ for (x <- classes ++ terms) {
+ afterEachPhase(signaturesIn(x.tpe)) collect {
+ case (ph, sigs) if sigs.nonEmpty =>
+ println(sigs.mkString(x + " { // after " + ph + "\n ", "\n ", "\n}\n"))
+ }
+ }
+ true
+ }
+}
diff --git a/test/files/run/concurrent-map-conversions.scala b/test/files/run/concurrent-map-conversions.scala
new file mode 100644
index 0000000..0350b69
--- /dev/null
+++ b/test/files/run/concurrent-map-conversions.scala
@@ -0,0 +1,36 @@
+
+
+
+
+
+object Test {
+
+ def main(args: Array[String]) {
+ testConversions()
+ testConverters()
+ }
+
+ def needPackageConcurrentMap(map: collection.concurrent.Map[Int, Int]) {
+ }
+ def needJavaConcurrent(map: java.util.concurrent.ConcurrentMap[Int, Int]) {
+ }
+
+ def testConversions() {
+ import collection.JavaConversions._
+ val skiplist = new java.util.concurrent.ConcurrentSkipListMap[Int, Int]
+ val ctrie = new collection.concurrent.TrieMap[Int, Int]
+
+ needPackageConcurrentMap(skiplist)
+ needJavaConcurrent(ctrie)
+ }
+
+ def testConverters() {
+ import collection.JavaConverters._
+ val skiplist = new java.util.concurrent.ConcurrentSkipListMap[Int, Int]
+ val ctrie = new collection.concurrent.TrieMap[Int, Int]
+
+ needPackageConcurrentMap(skiplist.asScala)
+ needJavaConcurrent(ctrie.asJava)
+ }
+
+}
diff --git a/test/files/run/constant-type.check b/test/files/run/constant-type.check
new file mode 100644
index 0000000..dfd8be5
--- /dev/null
+++ b/test/files/run/constant-type.check
@@ -0,0 +1,30 @@
+Type in expressions to have them evaluated.
+Type :help for more information.
+
+scala>
+
+scala> :power
+** Power User mode enabled - BEEP WHIR GYVE **
+** :phase has been set to 'typer'. **
+** scala.tools.nsc._ has been imported **
+** global._, definitions._ also imported **
+** Try :help, :vals, power.<tab> **
+
+scala> val s = transformedType(StringClass.toType).asInstanceOf[Type]
+s: $r.intp.global.Type = String
+
+scala> { println(afterPhase(currentRun.erasurePhase)(ConstantType(Constant(s)))) }
+Class[String](classOf[java.lang.String])
+
+scala> { afterPhase(currentRun.erasurePhase)(println(ConstantType(Constant(s)))) }
+Class(classOf[java.lang.String])
+
+scala> { ConstantType(Constant(s)); println(afterPhase(currentRun.erasurePhase)(ConstantType(Constant(s)))); }
+Class[String](classOf[java.lang.String])
+
+scala> { ConstantType(Constant(s)); afterPhase(currentRun.erasurePhase)(println(ConstantType(Constant(s)))); }
+Class(classOf[java.lang.String])
+
+scala>
+
+scala>
diff --git a/test/files/run/constant-type.scala b/test/files/run/constant-type.scala
new file mode 100644
index 0000000..84539e2
--- /dev/null
+++ b/test/files/run/constant-type.scala
@@ -0,0 +1,17 @@
+import scala.tools.partest.ReplTest
+
+// see the commit message to understand what this stuff is about
+// just a quick note:
+// transformedType returns an erased version of the type
+// as explained in the commit message, Type.erasure won't do for this test
+// because it does some postprocessing to the result of transformedType
+object Test extends ReplTest {
+ def code = """
+:power
+val s = transformedType(StringClass.toType).asInstanceOf[Type]
+{ println(afterPhase(currentRun.erasurePhase)(ConstantType(Constant(s)))) }
+{ afterPhase(currentRun.erasurePhase)(println(ConstantType(Constant(s)))) }
+{ ConstantType(Constant(s)); println(afterPhase(currentRun.erasurePhase)(ConstantType(Constant(s)))); }
+{ ConstantType(Constant(s)); afterPhase(currentRun.erasurePhase)(println(ConstantType(Constant(s)))); }
+ """
+}
diff --git a/test/files/run/constrained-types.check b/test/files/run/constrained-types.check
index 66580f0..85c4f41 100644
--- a/test/files/run/constrained-types.check
+++ b/test/files/run/constrained-types.check
@@ -66,19 +66,21 @@ m: (x: String)String @Annot(x)
scala>
scala> val three = "three"
-three: java.lang.String = three
+three: String = three
scala> val three2 = m(three:three.type) // should change x to three
three2: String @Annot(three) = three
scala> var four = "four"
-four: java.lang.String = four
+four: String = four
scala> val four2 = m(four) // should have an existential bound
-four2: java.lang.String @Annot(x) forSome { val x: java.lang.String } = four
+warning: there were 1 feature warning(s); re-run with -feature for details
+four2: String @Annot(x) forSome { val x: String } = four
scala> val four3 = four2 // should have the same type as four2
-four3: java.lang.String @Annot(x) forSome { val x: java.lang.String } = four
+warning: there were 1 feature warning(s); re-run with -feature for details
+four3: String @Annot(x) forSome { val x: String } = four
scala> val stuff = m("stuff") // should not crash
stuff: String @Annot("stuff") = stuff
@@ -100,7 +102,8 @@ scala> def m = {
val y : String @Annot(x) = x
y
} // x should not escape the local scope with a narrow type
-m: java.lang.String @Annot(x) forSome { val x: java.lang.String }
+warning: there were 1 feature warning(s); re-run with -feature for details
+m: String @Annot(x) forSome { val x: String }
scala>
@@ -113,7 +116,8 @@ scala> def n(y: String) = {
}
m("stuff".stripMargin)
} // x should be existentially bound
-n: (y: String)java.lang.String @Annot(x) forSome { val x: String }
+warning: there were 1 feature warning(s); re-run with -feature for details
+n: (y: String)String @Annot(x) forSome { val x: String }
scala>
@@ -130,7 +134,7 @@ Companions must be defined together; you may wish to use :paste mode for this.
scala>
scala> val y = a.x // should drop the annotation
-y: java.lang.String = hello
+y: String = hello
scala>
diff --git a/test/files/run/ctries-new/DumbHash.scala b/test/files/run/ctries-new/DumbHash.scala
new file mode 100644
index 0000000..8ef325b
--- /dev/null
+++ b/test/files/run/ctries-new/DumbHash.scala
@@ -0,0 +1,14 @@
+
+
+
+
+
+
+class DumbHash(val i: Int) {
+ override def equals(other: Any) = other match {
+ case that: DumbHash => that.i == this.i
+ case _ => false
+ }
+ override def hashCode = i % 5
+ override def toString = "DH(%s)".format(i)
+}
diff --git a/test/files/run/ctries-new/Wrap.scala b/test/files/run/ctries-new/Wrap.scala
new file mode 100644
index 0000000..7b645c1
--- /dev/null
+++ b/test/files/run/ctries-new/Wrap.scala
@@ -0,0 +1,9 @@
+
+
+
+
+
+
+case class Wrap(i: Int) {
+ override def hashCode = i * 0x9e3775cd
+}
diff --git a/test/files/run/ctries-new/concmap.scala b/test/files/run/ctries-new/concmap.scala
new file mode 100644
index 0000000..3ec0256
--- /dev/null
+++ b/test/files/run/ctries-new/concmap.scala
@@ -0,0 +1,188 @@
+
+
+
+import collection.concurrent.TrieMap
+
+
+object ConcurrentMapSpec extends Spec {
+
+ val initsz = 500
+ val secondsz = 750
+
+ def test() {
+ "support put" in {
+ val ct = new TrieMap[Wrap, Int]
+ for (i <- 0 until initsz) assert(ct.put(new Wrap(i), i) == None)
+ for (i <- 0 until initsz) assert(ct.put(new Wrap(i), -i) == Some(i))
+ }
+
+ "support put if absent" in {
+ val ct = new TrieMap[Wrap, Int]
+ for (i <- 0 until initsz) ct.update(new Wrap(i), i)
+ for (i <- 0 until initsz) assert(ct.putIfAbsent(new Wrap(i), -i) == Some(i))
+ for (i <- 0 until initsz) assert(ct.putIfAbsent(new Wrap(i), -i) == Some(i))
+ for (i <- initsz until secondsz) assert(ct.putIfAbsent(new Wrap(i), -i) == None)
+ for (i <- initsz until secondsz) assert(ct.putIfAbsent(new Wrap(i), i) == Some(-i))
+ }
+
+ "support remove if mapped to a specific value" in {
+ val ct = new TrieMap[Wrap, Int]
+ for (i <- 0 until initsz) ct.update(new Wrap(i), i)
+ for (i <- 0 until initsz) assert(ct.remove(new Wrap(i), -i - 1) == false)
+ for (i <- 0 until initsz) assert(ct.remove(new Wrap(i), i) == true)
+ for (i <- 0 until initsz) assert(ct.remove(new Wrap(i), i) == false)
+ }
+
+ "support replace if mapped to a specific value" in {
+ val ct = new TrieMap[Wrap, Int]
+ for (i <- 0 until initsz) ct.update(new Wrap(i), i)
+ for (i <- 0 until initsz) assert(ct.replace(new Wrap(i), -i - 1, -i - 2) == false)
+ for (i <- 0 until initsz) assert(ct.replace(new Wrap(i), i, -i - 2) == true)
+ for (i <- 0 until initsz) assert(ct.replace(new Wrap(i), i, -i - 2) == false)
+ for (i <- initsz until secondsz) assert(ct.replace(new Wrap(i), i, 0) == false)
+ }
+
+ "support replace if present" in {
+ val ct = new TrieMap[Wrap, Int]
+ for (i <- 0 until initsz) ct.update(new Wrap(i), i)
+ for (i <- 0 until initsz) assert(ct.replace(new Wrap(i), -i) == Some(i))
+ for (i <- 0 until initsz) assert(ct.replace(new Wrap(i), i) == Some(-i))
+ for (i <- initsz until secondsz) assert(ct.replace(new Wrap(i), i) == None)
+ }
+
+ def assertEqual(a: Any, b: Any) = {
+ if (a != b) println(a, b)
+ assert(a == b)
+ }
+
+ "support replace if mapped to a specific value, using several threads" in {
+ val ct = new TrieMap[Wrap, Int]
+ val sz = 55000
+ for (i <- 0 until sz) ct.update(new Wrap(i), i)
+
+ class Updater(index: Int, offs: Int) extends Thread {
+ override def run() {
+ var repeats = 0
+ for (i <- 0 until sz) {
+ val j = (offs + i) % sz
+ var k = Int.MaxValue
+ do {
+ if (k != Int.MaxValue) repeats += 1
+ k = ct.lookup(new Wrap(j))
+ } while (!ct.replace(new Wrap(j), k, -k))
+ }
+ //println("Thread %d repeats: %d".format(index, repeats))
+ }
+ }
+
+ val threads = for (i <- 0 until 16) yield new Updater(i, sz / 32 * i)
+ threads.foreach(_.start())
+ threads.foreach(_.join())
+
+ for (i <- 0 until sz) assertEqual(ct(new Wrap(i)), i)
+
+ val threads2 = for (i <- 0 until 15) yield new Updater(i, sz / 32 * i)
+ threads2.foreach(_.start())
+ threads2.foreach(_.join())
+
+ for (i <- 0 until sz) assertEqual(ct(new Wrap(i)), -i)
+ }
+
+ "support put if absent, several threads" in {
+ val ct = new TrieMap[Wrap, Int]
+ val sz = 110000
+
+ class Updater(offs: Int) extends Thread {
+ override def run() {
+ for (i <- 0 until sz) {
+ val j = (offs + i) % sz
+ ct.putIfAbsent(new Wrap(j), j)
+ assert(ct.lookup(new Wrap(j)) == j)
+ }
+ }
+ }
+
+ val threads = for (i <- 0 until 16) yield new Updater(sz / 32 * i)
+ threads.foreach(_.start())
+ threads.foreach(_.join())
+
+ for (i <- 0 until sz) assert(ct(new Wrap(i)) == i)
+ }
+
+ "support remove if mapped to a specific value, several threads" in {
+ val ct = new TrieMap[Wrap, Int]
+ val sz = 55000
+ for (i <- 0 until sz) ct.update(new Wrap(i), i)
+
+ class Remover(offs: Int) extends Thread {
+ override def run() {
+ for (i <- 0 until sz) {
+ val j = (offs + i) % sz
+ ct.remove(new Wrap(j), j)
+ assert(ct.get(new Wrap(j)) == None)
+ }
+ }
+ }
+
+ val threads = for (i <- 0 until 16) yield new Remover(sz / 32 * i)
+ threads.foreach(_.start())
+ threads.foreach(_.join())
+
+ for (i <- 0 until sz) assert(ct.get(new Wrap(i)) == None)
+ }
+
+ "have all or none of the elements depending on the oddity" in {
+ val ct = new TrieMap[Wrap, Int]
+ val sz = 65000
+ for (i <- 0 until sz) ct(new Wrap(i)) = i
+
+ class Modifier(index: Int, offs: Int) extends Thread {
+ override def run() {
+ for (j <- 0 until sz) {
+ val i = (offs + j) % sz
+ var success = false
+ do {
+ if (ct.contains(new Wrap(i))) {
+ success = ct.remove(new Wrap(i)) != None
+ } else {
+ success = ct.putIfAbsent(new Wrap(i), i) == None
+ }
+ } while (!success)
+ }
+ }
+ }
+
+ def modify(n: Int) = {
+ val threads = for (i <- 0 until n) yield new Modifier(i, sz / n * i)
+ threads.foreach(_.start())
+ threads.foreach(_.join())
+ }
+
+ modify(16)
+ for (i <- 0 until sz) assertEqual(ct.get(new Wrap(i)), Some(i))
+ modify(15)
+ for (i <- 0 until sz) assertEqual(ct.get(new Wrap(i)), None)
+ }
+
+ "compute size correctly" in {
+ val ct = new TrieMap[Wrap, Int]
+ val sz = 36450
+ for (i <- 0 until sz) ct(new Wrap(i)) = i
+
+ assertEqual(ct.size, sz)
+ assertEqual(ct.size, sz)
+ }
+
+ "compute size correctly in parallel" in {
+ val ct = new TrieMap[Wrap, Int]
+ val sz = 36450
+ for (i <- 0 until sz) ct(new Wrap(i)) = i
+ val pct = ct.par
+
+ assertEqual(pct.size, sz)
+ assertEqual(pct.size, sz)
+ }
+
+ }
+
+}
diff --git a/test/files/run/ctries-new/iterator.scala b/test/files/run/ctries-new/iterator.scala
new file mode 100644
index 0000000..b953a40
--- /dev/null
+++ b/test/files/run/ctries-new/iterator.scala
@@ -0,0 +1,289 @@
+
+
+
+
+import collection._
+import collection.concurrent.TrieMap
+
+
+
+object IteratorSpec extends Spec {
+
+ def test() {
+ "work for an empty trie" in {
+ val ct = new TrieMap
+ val it = ct.iterator
+
+ it.hasNext shouldEqual (false)
+ evaluating { it.next() }.shouldProduce [NoSuchElementException]
+ }
+
+ def nonEmptyIteratorCheck(sz: Int) {
+ val ct = new TrieMap[Wrap, Int]
+ for (i <- 0 until sz) ct.put(new Wrap(i), i)
+
+ val it = ct.iterator
+ val tracker = mutable.Map[Wrap, Int]()
+ for (i <- 0 until sz) {
+ assert(it.hasNext == true)
+ tracker += it.next
+ }
+
+ it.hasNext shouldEqual (false)
+ evaluating { it.next() }.shouldProduce [NoSuchElementException]
+ tracker.size shouldEqual (sz)
+ tracker shouldEqual (ct)
+ }
+
+ "work for a 1 element trie" in {
+ nonEmptyIteratorCheck(1)
+ }
+
+ "work for a 2 element trie" in {
+ nonEmptyIteratorCheck(2)
+ }
+
+ "work for a 3 element trie" in {
+ nonEmptyIteratorCheck(3)
+ }
+
+ "work for a 5 element trie" in {
+ nonEmptyIteratorCheck(5)
+ }
+
+ "work for a 10 element trie" in {
+ nonEmptyIteratorCheck(10)
+ }
+
+ "work for a 20 element trie" in {
+ nonEmptyIteratorCheck(20)
+ }
+
+ "work for a 50 element trie" in {
+ nonEmptyIteratorCheck(50)
+ }
+
+ "work for a 100 element trie" in {
+ nonEmptyIteratorCheck(100)
+ }
+
+ "work for a 1k element trie" in {
+ nonEmptyIteratorCheck(1000)
+ }
+
+ "work for a 5k element trie" in {
+ nonEmptyIteratorCheck(5000)
+ }
+
+ "work for a 75k element trie" in {
+ nonEmptyIteratorCheck(75000)
+ }
+
+ "work for a 250k element trie" in {
+ nonEmptyIteratorCheck(500000)
+ }
+
+ def nonEmptyCollideCheck(sz: Int) {
+ val ct = new TrieMap[DumbHash, Int]
+ for (i <- 0 until sz) ct.put(new DumbHash(i), i)
+
+ val it = ct.iterator
+ val tracker = mutable.Map[DumbHash, Int]()
+ for (i <- 0 until sz) {
+ assert(it.hasNext == true)
+ tracker += it.next
+ }
+
+ it.hasNext shouldEqual (false)
+ evaluating { it.next() }.shouldProduce [NoSuchElementException]
+ tracker.size shouldEqual (sz)
+ tracker shouldEqual (ct)
+ }
+
+ "work for colliding hashcodes, 2 element trie" in {
+ nonEmptyCollideCheck(2)
+ }
+
+ "work for colliding hashcodes, 3 element trie" in {
+ nonEmptyCollideCheck(3)
+ }
+
+ "work for colliding hashcodes, 5 element trie" in {
+ nonEmptyCollideCheck(5)
+ }
+
+ "work for colliding hashcodes, 10 element trie" in {
+ nonEmptyCollideCheck(10)
+ }
+
+ "work for colliding hashcodes, 100 element trie" in {
+ nonEmptyCollideCheck(100)
+ }
+
+ "work for colliding hashcodes, 500 element trie" in {
+ nonEmptyCollideCheck(500)
+ }
+
+ "work for colliding hashcodes, 5k element trie" in {
+ nonEmptyCollideCheck(5000)
+ }
+
+ def assertEqual(a: Map[Wrap, Int], b: Map[Wrap, Int]) {
+ if (a != b) {
+ println(a.size + " vs " + b.size)
+ // println(a)
+ // println(b)
+ // println(a.toSeq.sortBy((x: (Wrap, Int)) => x._1.i))
+ // println(b.toSeq.sortBy((x: (Wrap, Int)) => x._1.i))
+ }
+ assert(a == b)
+ }
+
+ "be consistent when taken with concurrent modifications" in {
+ val sz = 25000
+ val W = 15
+ val S = 5
+ val checks = 5
+ val ct = new TrieMap[Wrap, Int]
+ for (i <- 0 until sz) ct.put(new Wrap(i), i)
+
+ class Modifier extends Thread {
+ override def run() {
+ for (i <- 0 until sz) ct.putIfAbsent(new Wrap(i), i) match {
+ case Some(_) => ct.remove(new Wrap(i))
+ case None =>
+ }
+ }
+ }
+
+ def consistentIteration(ct: TrieMap[Wrap, Int], checks: Int) {
+ class Iter extends Thread {
+ override def run() {
+ val snap = ct.readOnlySnapshot()
+ val initial = mutable.Map[Wrap, Int]()
+ for (kv <- snap) initial += kv
+
+ for (i <- 0 until checks) {
+ assertEqual(snap.iterator.toMap, initial)
+ }
+ }
+ }
+
+ val iter = new Iter
+ iter.start()
+ iter.join()
+ }
+
+ val threads = for (_ <- 0 until W) yield new Modifier
+ threads.foreach(_.start())
+ for (_ <- 0 until S) consistentIteration(ct, checks)
+ threads.foreach(_.join())
+ }
+
+ "be consistent with a concurrent removal with a well defined order" in {
+ val sz = 150000
+ val sgroupsize = 10
+ val sgroupnum = 5
+ val removerslowdown = 50
+ val ct = new TrieMap[Wrap, Int]
+ for (i <- 0 until sz) ct.put(new Wrap(i), i)
+
+ class Remover extends Thread {
+ override def run() {
+ for (i <- 0 until sz) {
+ assert(ct.remove(new Wrap(i)) == Some(i))
+ for (i <- 0 until removerslowdown) ct.get(new Wrap(i)) // slow down, mate
+ }
+ //println("done removing")
+ }
+ }
+
+ def consistentIteration(it: Iterator[(Wrap, Int)]) = {
+ class Iter extends Thread {
+ override def run() {
+ val elems = it.toBuffer
+ if (elems.nonEmpty) {
+ val minelem = elems.minBy((x: (Wrap, Int)) => x._1.i)._1.i
+ assert(elems.forall(_._1.i >= minelem))
+ }
+ }
+ }
+ new Iter
+ }
+
+ val remover = new Remover
+ remover.start()
+ for (_ <- 0 until sgroupnum) {
+ val iters = for (_ <- 0 until sgroupsize) yield consistentIteration(ct.iterator)
+ iters.foreach(_.start())
+ iters.foreach(_.join())
+ }
+ //println("done with iterators")
+ remover.join()
+ }
+
+ "be consistent with a concurrent insertion with a well defined order" in {
+ val sz = 150000
+ val sgroupsize = 10
+ val sgroupnum = 10
+ val inserterslowdown = 50
+ val ct = new TrieMap[Wrap, Int]
+
+ class Inserter extends Thread {
+ override def run() {
+ for (i <- 0 until sz) {
+ assert(ct.put(new Wrap(i), i) == None)
+ for (i <- 0 until inserterslowdown) ct.get(new Wrap(i)) // slow down, mate
+ }
+ //println("done inserting")
+ }
+ }
+
+ def consistentIteration(it: Iterator[(Wrap, Int)]) = {
+ class Iter extends Thread {
+ override def run() {
+ val elems = it.toSeq
+ if (elems.nonEmpty) {
+ val maxelem = elems.maxBy((x: (Wrap, Int)) => x._1.i)._1.i
+ assert(elems.forall(_._1.i <= maxelem))
+ }
+ }
+ }
+ new Iter
+ }
+
+ val inserter = new Inserter
+ inserter.start()
+ for (_ <- 0 until sgroupnum) {
+ val iters = for (_ <- 0 until sgroupsize) yield consistentIteration(ct.iterator)
+ iters.foreach(_.start())
+ iters.foreach(_.join())
+ }
+ //println("done with iterators")
+ inserter.join()
+ }
+
+ "work on a yet unevaluated snapshot" in {
+ val sz = 50000
+ val ct = new TrieMap[Wrap, Int]
+ for (i <- 0 until sz) ct.update(new Wrap(i), i)
+
+ val snap = ct.snapshot()
+ val it = snap.iterator
+
+ while (it.hasNext) it.next()
+ }
+
+ "be duplicated" in {
+ val sz = 50
+ val ct = collection.parallel.mutable.ParTrieMap((0 until sz) zip (0 until sz): _*)
+ val it = ct.splitter
+ for (_ <- 0 until (sz / 2)) it.next()
+ val dupit = it.dup
+
+ it.toList shouldEqual dupit.toList
+ }
+
+ }
+
+}
diff --git a/test/files/run/ctries-new/lnode.scala b/test/files/run/ctries-new/lnode.scala
new file mode 100644
index 0000000..92a3108
--- /dev/null
+++ b/test/files/run/ctries-new/lnode.scala
@@ -0,0 +1,61 @@
+
+
+
+import collection.concurrent.TrieMap
+
+
+object LNodeSpec extends Spec {
+
+ val initsz = 1500
+ val secondsz = 1750
+
+ def test() {
+ "accept elements with the same hash codes" in {
+ val ct = new TrieMap[DumbHash, Int]
+ for (i <- 0 until initsz) ct.update(new DumbHash(i), i)
+ }
+
+ "lookup elements with the same hash codes" in {
+ val ct = new TrieMap[DumbHash, Int]
+ for (i <- 0 until initsz) ct.update(new DumbHash(i), i)
+ for (i <- 0 until initsz) assert(ct.get(new DumbHash(i)) == Some(i))
+ for (i <- initsz until secondsz) assert(ct.get(new DumbHash(i)) == None)
+ }
+
+ "remove elements with the same hash codes" in {
+ val ct = new TrieMap[DumbHash, Int]
+ for (i <- 0 until initsz) ct.update(new DumbHash(i), i)
+ for (i <- 0 until initsz) {
+ val remelem = ct.remove(new DumbHash(i))
+ assert(remelem == Some(i), "removing " + i + " yields " + remelem)
+ }
+ for (i <- 0 until initsz) assert(ct.get(new DumbHash(i)) == None)
+ }
+
+ "put elements with the same hash codes if absent" in {
+ val ct = new TrieMap[DumbHash, Int]
+ for (i <- 0 until initsz) ct.put(new DumbHash(i), i)
+ for (i <- 0 until initsz) assert(ct.lookup(new DumbHash(i)) == i)
+ for (i <- 0 until initsz) assert(ct.putIfAbsent(new DumbHash(i), i) == Some(i))
+ for (i <- initsz until secondsz) assert(ct.putIfAbsent(new DumbHash(i), i) == None)
+ for (i <- initsz until secondsz) assert(ct.lookup(new DumbHash(i)) == i)
+ }
+
+ "replace elements with the same hash codes" in {
+ val ct = new TrieMap[DumbHash, Int]
+ for (i <- 0 until initsz) assert(ct.put(new DumbHash(i), i) == None)
+ for (i <- 0 until initsz) assert(ct.lookup(new DumbHash(i)) == i)
+ for (i <- 0 until initsz) assert(ct.replace(new DumbHash(i), -i) == Some(i))
+ for (i <- 0 until initsz) assert(ct.lookup(new DumbHash(i)) == -i)
+ for (i <- 0 until initsz) assert(ct.replace(new DumbHash(i), -i, i) == true)
+ }
+
+ "remove elements with the same hash codes if mapped to a specific value" in {
+ val ct = new TrieMap[DumbHash, Int]
+ for (i <- 0 until initsz) assert(ct.put(new DumbHash(i), i) == None)
+ for (i <- 0 until initsz) assert(ct.remove(new DumbHash(i), i) == true)
+ }
+
+ }
+
+}
diff --git a/test/files/run/ctries-new/main.scala b/test/files/run/ctries-new/main.scala
new file mode 100644
index 0000000..d7fe087
--- /dev/null
+++ b/test/files/run/ctries-new/main.scala
@@ -0,0 +1,47 @@
+import scala.reflect.{ClassTag, classTag}
+
+
+
+
+
+
+
+
+object Test {
+
+ def main(args: Array[String]) {
+ ConcurrentMapSpec.test()
+ IteratorSpec.test()
+ LNodeSpec.test()
+ SnapshotSpec.test()
+ }
+
+}
+
+
+trait Spec {
+
+ implicit def str2ops(s: String) = new {
+ def in[U](body: =>U) {
+ // just execute body
+ body
+ }
+ }
+
+ implicit def any2ops(a: Any) = new {
+ def shouldEqual(other: Any) = assert(a == other)
+ }
+
+ def evaluating[U](body: =>U) = new {
+ def shouldProduce[T <: Throwable: ClassTag]() = {
+ var produced = false
+ try body
+ catch {
+ case e => if (e.getClass == implicitly[ClassTag[T]].runtimeClass) produced = true
+ } finally {
+ assert(produced, "Did not produce exception of type: " + implicitly[ClassTag[T]])
+ }
+ }
+ }
+
+}
\ No newline at end of file
diff --git a/test/files/run/ctries-new/snapshot.scala b/test/files/run/ctries-new/snapshot.scala
new file mode 100644
index 0000000..5fe77d4
--- /dev/null
+++ b/test/files/run/ctries-new/snapshot.scala
@@ -0,0 +1,267 @@
+
+
+
+
+import collection._
+import collection.concurrent.TrieMap
+
+
+
+object SnapshotSpec extends Spec {
+
+ def test() {
+ "support snapshots" in {
+ val ctn = new TrieMap
+ ctn.snapshot()
+ ctn.readOnlySnapshot()
+
+ val ct = new TrieMap[Int, Int]
+ for (i <- 0 until 100) ct.put(i, i)
+ ct.snapshot()
+ ct.readOnlySnapshot()
+ }
+
+ "empty 2 quiescent snapshots in isolation" in {
+ val sz = 4000
+
+ class Worker(trie: TrieMap[Wrap, Int]) extends Thread {
+ override def run() {
+ for (i <- 0 until sz) {
+ assert(trie.remove(new Wrap(i)) == Some(i))
+ for (j <- 0 until sz)
+ if (j <= i) assert(trie.get(new Wrap(j)) == None)
+ else assert(trie.get(new Wrap(j)) == Some(j))
+ }
+ }
+ }
+
+ val ct = new TrieMap[Wrap, Int]
+ for (i <- 0 until sz) ct.put(new Wrap(i), i)
+ val snapt = ct.snapshot()
+
+ val original = new Worker(ct)
+ val snapshot = new Worker(snapt)
+ original.start()
+ snapshot.start()
+ original.join()
+ snapshot.join()
+
+ for (i <- 0 until sz) {
+ assert(ct.get(new Wrap(i)) == None)
+ assert(snapt.get(new Wrap(i)) == None)
+ }
+ }
+
+ def consistentReadOnly(name: String, readonly: Map[Wrap, Int], sz: Int, N: Int) {
+ @volatile var e: Exception = null
+
+ // reads possible entries once and stores them
+ // then reads all these N more times to check if the
+ // state stayed the same
+ class Reader(trie: Map[Wrap, Int]) extends Thread {
+ setName("Reader " + name)
+
+ override def run() =
+ try check()
+ catch {
+ case ex: Exception => e = ex
+ }
+
+ def check() {
+ val initial = mutable.Map[Wrap, Int]()
+ for (i <- 0 until sz) trie.get(new Wrap(i)) match {
+ case Some(i) => initial.put(new Wrap(i), i)
+ case None => // do nothing
+ }
+
+ for (k <- 0 until N) {
+ for (i <- 0 until sz) {
+ val tres = trie.get(new Wrap(i))
+ val ires = initial.get(new Wrap(i))
+ if (tres != ires) println(i, "initially: " + ires, "traversal %d: %s".format(k, tres))
+ assert(tres == ires)
+ }
+ }
+ }
+ }
+
+ val reader = new Reader(readonly)
+ reader.start()
+ reader.join()
+
+ if (e ne null) {
+ e.printStackTrace()
+ throw e
+ }
+ }
+
+ // traverses the trie `rep` times and modifies each entry
+ class Modifier(trie: TrieMap[Wrap, Int], index: Int, rep: Int, sz: Int) extends Thread {
+ setName("Modifier %d".format(index))
+
+ override def run() {
+ for (k <- 0 until rep) {
+ for (i <- 0 until sz) trie.putIfAbsent(new Wrap(i), i) match {
+ case Some(_) => trie.remove(new Wrap(i))
+ case None => // do nothing
+ }
+ }
+ }
+ }
+
+ // removes all the elements from the trie
+ class Remover(trie: TrieMap[Wrap, Int], index: Int, totremovers: Int, sz: Int) extends Thread {
+ setName("Remover %d".format(index))
+
+ override def run() {
+ for (i <- 0 until sz) trie.remove(new Wrap((i + sz / totremovers * index) % sz))
+ }
+ }
+
+ "have a consistent quiescent read-only snapshot" in {
+ val sz = 10000
+ val N = 100
+ val W = 10
+
+ val ct = new TrieMap[Wrap, Int]
+ for (i <- 0 until sz) ct(new Wrap(i)) = i
+ val readonly = ct.readOnlySnapshot()
+ val threads = for (i <- 0 until W) yield new Modifier(ct, i, N, sz)
+
+ threads.foreach(_.start())
+ consistentReadOnly("qm", readonly, sz, N)
+ threads.foreach(_.join())
+ }
+
+ // now, we check non-quiescent snapshots, as these permit situations
+ // where a thread is caught in the middle of the update when a snapshot is taken
+
+ "have a consistent non-quiescent read-only snapshot, concurrent with removes only" in {
+ val sz = 1250
+ val W = 100
+ val S = 5000
+
+ val ct = new TrieMap[Wrap, Int]
+ for (i <- 0 until sz) ct(new Wrap(i)) = i
+ val threads = for (i <- 0 until W) yield new Remover(ct, i, W, sz)
+
+ threads.foreach(_.start())
+ for (i <- 0 until S) consistentReadOnly("non-qr", ct.readOnlySnapshot(), sz, 5)
+ threads.foreach(_.join())
+ }
+
+ "have a consistent non-quiescent read-only snapshot, concurrent with modifications" in {
+ val sz = 1000
+ val N = 7000
+ val W = 10
+ val S = 7000
+
+ val ct = new TrieMap[Wrap, Int]
+ for (i <- 0 until sz) ct(new Wrap(i)) = i
+ val threads = for (i <- 0 until W) yield new Modifier(ct, i, N, sz)
+
+ threads.foreach(_.start())
+ for (i <- 0 until S) consistentReadOnly("non-qm", ct.readOnlySnapshot(), sz, 5)
+ threads.foreach(_.join())
+ }
+
+ def consistentNonReadOnly(name: String, trie: TrieMap[Wrap, Int], sz: Int, N: Int) {
+ @volatile var e: Exception = null
+
+ // reads possible entries once and stores them
+ // then reads all these N more times to check if the
+ // state stayed the same
+ class Worker extends Thread {
+ setName("Worker " + name)
+
+ override def run() =
+ try check()
+ catch {
+ case ex: Exception => e = ex
+ }
+
+ def check() {
+ val initial = mutable.Map[Wrap, Int]()
+ for (i <- 0 until sz) trie.get(new Wrap(i)) match {
+ case Some(i) => initial.put(new Wrap(i), i)
+ case None => // do nothing
+ }
+
+ for (k <- 0 until N) {
+ // modify
+ for ((key, value) <- initial) {
+ val oldv = if (k % 2 == 0) value else -value
+ val newv = -oldv
+ trie.replace(key, oldv, newv)
+ }
+
+ // check
+ for (i <- 0 until sz) if (initial.contains(new Wrap(i))) {
+ val expected = if (k % 2 == 0) -i else i
+ //println(trie.get(new Wrap(i)))
+ assert(trie.get(new Wrap(i)) == Some(expected))
+ } else {
+ assert(trie.get(new Wrap(i)) == None)
+ }
+ }
+ }
+ }
+
+ val worker = new Worker
+ worker.start()
+ worker.join()
+
+ if (e ne null) {
+ e.printStackTrace()
+ throw e
+ }
+ }
+
+ "have a consistent non-quiescent snapshot, concurrent with modifications" in {
+ val sz = 9000
+ val N = 1000
+ val W = 10
+ val S = 400
+
+ val ct = new TrieMap[Wrap, Int]
+ for (i <- 0 until sz) ct(new Wrap(i)) = i
+ val threads = for (i <- 0 until W) yield new Modifier(ct, i, N, sz)
+
+ threads.foreach(_.start())
+ for (i <- 0 until S) {
+ consistentReadOnly("non-qm", ct.snapshot(), sz, 5)
+ consistentNonReadOnly("non-qsnap", ct.snapshot(), sz, 5)
+ }
+ threads.foreach(_.join())
+ }
+
+ "work when many concurrent snapshots are taken, concurrent with modifications" in {
+ val sz = 12000
+ val W = 10
+ val S = 10
+ val modifytimes = 1200
+ val snaptimes = 600
+ val ct = new TrieMap[Wrap, Int]
+ for (i <- 0 until sz) ct(new Wrap(i)) = i
+
+ class Snapshooter extends Thread {
+ setName("Snapshooter")
+ override def run() {
+ for (k <- 0 until snaptimes) {
+ val snap = ct.snapshot()
+ for (i <- 0 until sz) snap.remove(new Wrap(i))
+ for (i <- 0 until sz) assert(!snap.contains(new Wrap(i)))
+ }
+ }
+ }
+
+ val mods = for (i <- 0 until W) yield new Modifier(ct, i, modifytimes, sz)
+ val shooters = for (i <- 0 until S) yield new Snapshooter
+ val threads = mods ++ shooters
+ threads.foreach(_.start())
+ threads.foreach(_.join())
+ }
+
+ }
+
+}
diff --git a/test/files/run/ctries-old/DumbHash.scala b/test/files/run/ctries-old/DumbHash.scala
new file mode 100644
index 0000000..8ef325b
--- /dev/null
+++ b/test/files/run/ctries-old/DumbHash.scala
@@ -0,0 +1,14 @@
+
+
+
+
+
+
+class DumbHash(val i: Int) {
+ override def equals(other: Any) = other match {
+ case that: DumbHash => that.i == this.i
+ case _ => false
+ }
+ override def hashCode = i % 5
+ override def toString = "DH(%s)".format(i)
+}
diff --git a/test/files/run/ctries-old/Wrap.scala b/test/files/run/ctries-old/Wrap.scala
new file mode 100644
index 0000000..7b645c1
--- /dev/null
+++ b/test/files/run/ctries-old/Wrap.scala
@@ -0,0 +1,9 @@
+
+
+
+
+
+
+case class Wrap(i: Int) {
+ override def hashCode = i * 0x9e3775cd
+}
diff --git a/test/files/run/ctries-old/concmap.scala b/test/files/run/ctries-old/concmap.scala
new file mode 100644
index 0000000..3ec0256
--- /dev/null
+++ b/test/files/run/ctries-old/concmap.scala
@@ -0,0 +1,188 @@
+
+
+
+import collection.concurrent.TrieMap
+
+
+object ConcurrentMapSpec extends Spec {
+
+ val initsz = 500
+ val secondsz = 750
+
+ def test() {
+ "support put" in {
+ val ct = new TrieMap[Wrap, Int]
+ for (i <- 0 until initsz) assert(ct.put(new Wrap(i), i) == None)
+ for (i <- 0 until initsz) assert(ct.put(new Wrap(i), -i) == Some(i))
+ }
+
+ "support put if absent" in {
+ val ct = new TrieMap[Wrap, Int]
+ for (i <- 0 until initsz) ct.update(new Wrap(i), i)
+ for (i <- 0 until initsz) assert(ct.putIfAbsent(new Wrap(i), -i) == Some(i))
+ for (i <- 0 until initsz) assert(ct.putIfAbsent(new Wrap(i), -i) == Some(i))
+ for (i <- initsz until secondsz) assert(ct.putIfAbsent(new Wrap(i), -i) == None)
+ for (i <- initsz until secondsz) assert(ct.putIfAbsent(new Wrap(i), i) == Some(-i))
+ }
+
+ "support remove if mapped to a specific value" in {
+ val ct = new TrieMap[Wrap, Int]
+ for (i <- 0 until initsz) ct.update(new Wrap(i), i)
+ for (i <- 0 until initsz) assert(ct.remove(new Wrap(i), -i - 1) == false)
+ for (i <- 0 until initsz) assert(ct.remove(new Wrap(i), i) == true)
+ for (i <- 0 until initsz) assert(ct.remove(new Wrap(i), i) == false)
+ }
+
+ "support replace if mapped to a specific value" in {
+ val ct = new TrieMap[Wrap, Int]
+ for (i <- 0 until initsz) ct.update(new Wrap(i), i)
+ for (i <- 0 until initsz) assert(ct.replace(new Wrap(i), -i - 1, -i - 2) == false)
+ for (i <- 0 until initsz) assert(ct.replace(new Wrap(i), i, -i - 2) == true)
+ for (i <- 0 until initsz) assert(ct.replace(new Wrap(i), i, -i - 2) == false)
+ for (i <- initsz until secondsz) assert(ct.replace(new Wrap(i), i, 0) == false)
+ }
+
+ "support replace if present" in {
+ val ct = new TrieMap[Wrap, Int]
+ for (i <- 0 until initsz) ct.update(new Wrap(i), i)
+ for (i <- 0 until initsz) assert(ct.replace(new Wrap(i), -i) == Some(i))
+ for (i <- 0 until initsz) assert(ct.replace(new Wrap(i), i) == Some(-i))
+ for (i <- initsz until secondsz) assert(ct.replace(new Wrap(i), i) == None)
+ }
+
+ def assertEqual(a: Any, b: Any) = {
+ if (a != b) println(a, b)
+ assert(a == b)
+ }
+
+ "support replace if mapped to a specific value, using several threads" in {
+ val ct = new TrieMap[Wrap, Int]
+ val sz = 55000
+ for (i <- 0 until sz) ct.update(new Wrap(i), i)
+
+ class Updater(index: Int, offs: Int) extends Thread {
+ override def run() {
+ var repeats = 0
+ for (i <- 0 until sz) {
+ val j = (offs + i) % sz
+ var k = Int.MaxValue
+ do {
+ if (k != Int.MaxValue) repeats += 1
+ k = ct.lookup(new Wrap(j))
+ } while (!ct.replace(new Wrap(j), k, -k))
+ }
+ //println("Thread %d repeats: %d".format(index, repeats))
+ }
+ }
+
+ val threads = for (i <- 0 until 16) yield new Updater(i, sz / 32 * i)
+ threads.foreach(_.start())
+ threads.foreach(_.join())
+
+ for (i <- 0 until sz) assertEqual(ct(new Wrap(i)), i)
+
+ val threads2 = for (i <- 0 until 15) yield new Updater(i, sz / 32 * i)
+ threads2.foreach(_.start())
+ threads2.foreach(_.join())
+
+ for (i <- 0 until sz) assertEqual(ct(new Wrap(i)), -i)
+ }
+
+ "support put if absent, several threads" in {
+ val ct = new TrieMap[Wrap, Int]
+ val sz = 110000
+
+ class Updater(offs: Int) extends Thread {
+ override def run() {
+ for (i <- 0 until sz) {
+ val j = (offs + i) % sz
+ ct.putIfAbsent(new Wrap(j), j)
+ assert(ct.lookup(new Wrap(j)) == j)
+ }
+ }
+ }
+
+ val threads = for (i <- 0 until 16) yield new Updater(sz / 32 * i)
+ threads.foreach(_.start())
+ threads.foreach(_.join())
+
+ for (i <- 0 until sz) assert(ct(new Wrap(i)) == i)
+ }
+
+ "support remove if mapped to a specific value, several threads" in {
+ val ct = new TrieMap[Wrap, Int]
+ val sz = 55000
+ for (i <- 0 until sz) ct.update(new Wrap(i), i)
+
+ class Remover(offs: Int) extends Thread {
+ override def run() {
+ for (i <- 0 until sz) {
+ val j = (offs + i) % sz
+ ct.remove(new Wrap(j), j)
+ assert(ct.get(new Wrap(j)) == None)
+ }
+ }
+ }
+
+ val threads = for (i <- 0 until 16) yield new Remover(sz / 32 * i)
+ threads.foreach(_.start())
+ threads.foreach(_.join())
+
+ for (i <- 0 until sz) assert(ct.get(new Wrap(i)) == None)
+ }
+
+ "have all or none of the elements depending on the oddity" in {
+ val ct = new TrieMap[Wrap, Int]
+ val sz = 65000
+ for (i <- 0 until sz) ct(new Wrap(i)) = i
+
+ class Modifier(index: Int, offs: Int) extends Thread {
+ override def run() {
+ for (j <- 0 until sz) {
+ val i = (offs + j) % sz
+ var success = false
+ do {
+ if (ct.contains(new Wrap(i))) {
+ success = ct.remove(new Wrap(i)) != None
+ } else {
+ success = ct.putIfAbsent(new Wrap(i), i) == None
+ }
+ } while (!success)
+ }
+ }
+ }
+
+ def modify(n: Int) = {
+ val threads = for (i <- 0 until n) yield new Modifier(i, sz / n * i)
+ threads.foreach(_.start())
+ threads.foreach(_.join())
+ }
+
+ modify(16)
+ for (i <- 0 until sz) assertEqual(ct.get(new Wrap(i)), Some(i))
+ modify(15)
+ for (i <- 0 until sz) assertEqual(ct.get(new Wrap(i)), None)
+ }
+
+ "compute size correctly" in {
+ val ct = new TrieMap[Wrap, Int]
+ val sz = 36450
+ for (i <- 0 until sz) ct(new Wrap(i)) = i
+
+ assertEqual(ct.size, sz)
+ assertEqual(ct.size, sz)
+ }
+
+ "compute size correctly in parallel" in {
+ val ct = new TrieMap[Wrap, Int]
+ val sz = 36450
+ for (i <- 0 until sz) ct(new Wrap(i)) = i
+ val pct = ct.par
+
+ assertEqual(pct.size, sz)
+ assertEqual(pct.size, sz)
+ }
+
+ }
+
+}
diff --git a/test/files/run/ctries-old/iterator.scala b/test/files/run/ctries-old/iterator.scala
new file mode 100644
index 0000000..b953a40
--- /dev/null
+++ b/test/files/run/ctries-old/iterator.scala
@@ -0,0 +1,289 @@
+
+
+
+
+import collection._
+import collection.concurrent.TrieMap
+
+
+
+object IteratorSpec extends Spec {
+
+ def test() {
+ "work for an empty trie" in {
+ val ct = new TrieMap
+ val it = ct.iterator
+
+ it.hasNext shouldEqual (false)
+ evaluating { it.next() }.shouldProduce [NoSuchElementException]
+ }
+
+ def nonEmptyIteratorCheck(sz: Int) {
+ val ct = new TrieMap[Wrap, Int]
+ for (i <- 0 until sz) ct.put(new Wrap(i), i)
+
+ val it = ct.iterator
+ val tracker = mutable.Map[Wrap, Int]()
+ for (i <- 0 until sz) {
+ assert(it.hasNext == true)
+ tracker += it.next
+ }
+
+ it.hasNext shouldEqual (false)
+ evaluating { it.next() }.shouldProduce [NoSuchElementException]
+ tracker.size shouldEqual (sz)
+ tracker shouldEqual (ct)
+ }
+
+ "work for a 1 element trie" in {
+ nonEmptyIteratorCheck(1)
+ }
+
+ "work for a 2 element trie" in {
+ nonEmptyIteratorCheck(2)
+ }
+
+ "work for a 3 element trie" in {
+ nonEmptyIteratorCheck(3)
+ }
+
+ "work for a 5 element trie" in {
+ nonEmptyIteratorCheck(5)
+ }
+
+ "work for a 10 element trie" in {
+ nonEmptyIteratorCheck(10)
+ }
+
+ "work for a 20 element trie" in {
+ nonEmptyIteratorCheck(20)
+ }
+
+ "work for a 50 element trie" in {
+ nonEmptyIteratorCheck(50)
+ }
+
+ "work for a 100 element trie" in {
+ nonEmptyIteratorCheck(100)
+ }
+
+ "work for a 1k element trie" in {
+ nonEmptyIteratorCheck(1000)
+ }
+
+ "work for a 5k element trie" in {
+ nonEmptyIteratorCheck(5000)
+ }
+
+ "work for a 75k element trie" in {
+ nonEmptyIteratorCheck(75000)
+ }
+
+ "work for a 250k element trie" in {
+ nonEmptyIteratorCheck(500000)
+ }
+
+ def nonEmptyCollideCheck(sz: Int) {
+ val ct = new TrieMap[DumbHash, Int]
+ for (i <- 0 until sz) ct.put(new DumbHash(i), i)
+
+ val it = ct.iterator
+ val tracker = mutable.Map[DumbHash, Int]()
+ for (i <- 0 until sz) {
+ assert(it.hasNext == true)
+ tracker += it.next
+ }
+
+ it.hasNext shouldEqual (false)
+ evaluating { it.next() }.shouldProduce [NoSuchElementException]
+ tracker.size shouldEqual (sz)
+ tracker shouldEqual (ct)
+ }
+
+ "work for colliding hashcodes, 2 element trie" in {
+ nonEmptyCollideCheck(2)
+ }
+
+ "work for colliding hashcodes, 3 element trie" in {
+ nonEmptyCollideCheck(3)
+ }
+
+ "work for colliding hashcodes, 5 element trie" in {
+ nonEmptyCollideCheck(5)
+ }
+
+ "work for colliding hashcodes, 10 element trie" in {
+ nonEmptyCollideCheck(10)
+ }
+
+ "work for colliding hashcodes, 100 element trie" in {
+ nonEmptyCollideCheck(100)
+ }
+
+ "work for colliding hashcodes, 500 element trie" in {
+ nonEmptyCollideCheck(500)
+ }
+
+ "work for colliding hashcodes, 5k element trie" in {
+ nonEmptyCollideCheck(5000)
+ }
+
+ def assertEqual(a: Map[Wrap, Int], b: Map[Wrap, Int]) {
+ if (a != b) {
+ println(a.size + " vs " + b.size)
+ // println(a)
+ // println(b)
+ // println(a.toSeq.sortBy((x: (Wrap, Int)) => x._1.i))
+ // println(b.toSeq.sortBy((x: (Wrap, Int)) => x._1.i))
+ }
+ assert(a == b)
+ }
+
+ "be consistent when taken with concurrent modifications" in {
+ val sz = 25000
+ val W = 15
+ val S = 5
+ val checks = 5
+ val ct = new TrieMap[Wrap, Int]
+ for (i <- 0 until sz) ct.put(new Wrap(i), i)
+
+ class Modifier extends Thread {
+ override def run() {
+ for (i <- 0 until sz) ct.putIfAbsent(new Wrap(i), i) match {
+ case Some(_) => ct.remove(new Wrap(i))
+ case None =>
+ }
+ }
+ }
+
+ def consistentIteration(ct: TrieMap[Wrap, Int], checks: Int) {
+ class Iter extends Thread {
+ override def run() {
+ val snap = ct.readOnlySnapshot()
+ val initial = mutable.Map[Wrap, Int]()
+ for (kv <- snap) initial += kv
+
+ for (i <- 0 until checks) {
+ assertEqual(snap.iterator.toMap, initial)
+ }
+ }
+ }
+
+ val iter = new Iter
+ iter.start()
+ iter.join()
+ }
+
+ val threads = for (_ <- 0 until W) yield new Modifier
+ threads.foreach(_.start())
+ for (_ <- 0 until S) consistentIteration(ct, checks)
+ threads.foreach(_.join())
+ }
+
+ "be consistent with a concurrent removal with a well defined order" in {
+ val sz = 150000
+ val sgroupsize = 10
+ val sgroupnum = 5
+ val removerslowdown = 50
+ val ct = new TrieMap[Wrap, Int]
+ for (i <- 0 until sz) ct.put(new Wrap(i), i)
+
+ class Remover extends Thread {
+ override def run() {
+ for (i <- 0 until sz) {
+ assert(ct.remove(new Wrap(i)) == Some(i))
+ for (i <- 0 until removerslowdown) ct.get(new Wrap(i)) // slow down, mate
+ }
+ //println("done removing")
+ }
+ }
+
+ def consistentIteration(it: Iterator[(Wrap, Int)]) = {
+ class Iter extends Thread {
+ override def run() {
+ val elems = it.toBuffer
+ if (elems.nonEmpty) {
+ val minelem = elems.minBy((x: (Wrap, Int)) => x._1.i)._1.i
+ assert(elems.forall(_._1.i >= minelem))
+ }
+ }
+ }
+ new Iter
+ }
+
+ val remover = new Remover
+ remover.start()
+ for (_ <- 0 until sgroupnum) {
+ val iters = for (_ <- 0 until sgroupsize) yield consistentIteration(ct.iterator)
+ iters.foreach(_.start())
+ iters.foreach(_.join())
+ }
+ //println("done with iterators")
+ remover.join()
+ }
+
+ "be consistent with a concurrent insertion with a well defined order" in {
+ val sz = 150000
+ val sgroupsize = 10
+ val sgroupnum = 10
+ val inserterslowdown = 50
+ val ct = new TrieMap[Wrap, Int]
+
+ class Inserter extends Thread {
+ override def run() {
+ for (i <- 0 until sz) {
+ assert(ct.put(new Wrap(i), i) == None)
+ for (i <- 0 until inserterslowdown) ct.get(new Wrap(i)) // slow down, mate
+ }
+ //println("done inserting")
+ }
+ }
+
+ def consistentIteration(it: Iterator[(Wrap, Int)]) = {
+ class Iter extends Thread {
+ override def run() {
+ val elems = it.toSeq
+ if (elems.nonEmpty) {
+ val maxelem = elems.maxBy((x: (Wrap, Int)) => x._1.i)._1.i
+ assert(elems.forall(_._1.i <= maxelem))
+ }
+ }
+ }
+ new Iter
+ }
+
+ val inserter = new Inserter
+ inserter.start()
+ for (_ <- 0 until sgroupnum) {
+ val iters = for (_ <- 0 until sgroupsize) yield consistentIteration(ct.iterator)
+ iters.foreach(_.start())
+ iters.foreach(_.join())
+ }
+ //println("done with iterators")
+ inserter.join()
+ }
+
+ "work on a yet unevaluated snapshot" in {
+ val sz = 50000
+ val ct = new TrieMap[Wrap, Int]
+ for (i <- 0 until sz) ct.update(new Wrap(i), i)
+
+ val snap = ct.snapshot()
+ val it = snap.iterator
+
+ while (it.hasNext) it.next()
+ }
+
+ "be duplicated" in {
+ val sz = 50
+ val ct = collection.parallel.mutable.ParTrieMap((0 until sz) zip (0 until sz): _*)
+ val it = ct.splitter
+ for (_ <- 0 until (sz / 2)) it.next()
+ val dupit = it.dup
+
+ it.toList shouldEqual dupit.toList
+ }
+
+ }
+
+}
diff --git a/test/files/run/ctries-old/lnode.scala b/test/files/run/ctries-old/lnode.scala
new file mode 100644
index 0000000..92a3108
--- /dev/null
+++ b/test/files/run/ctries-old/lnode.scala
@@ -0,0 +1,61 @@
+
+
+
+import collection.concurrent.TrieMap
+
+
+object LNodeSpec extends Spec {
+
+ val initsz = 1500
+ val secondsz = 1750
+
+ def test() {
+ "accept elements with the same hash codes" in {
+ val ct = new TrieMap[DumbHash, Int]
+ for (i <- 0 until initsz) ct.update(new DumbHash(i), i)
+ }
+
+ "lookup elements with the same hash codes" in {
+ val ct = new TrieMap[DumbHash, Int]
+ for (i <- 0 until initsz) ct.update(new DumbHash(i), i)
+ for (i <- 0 until initsz) assert(ct.get(new DumbHash(i)) == Some(i))
+ for (i <- initsz until secondsz) assert(ct.get(new DumbHash(i)) == None)
+ }
+
+ "remove elements with the same hash codes" in {
+ val ct = new TrieMap[DumbHash, Int]
+ for (i <- 0 until initsz) ct.update(new DumbHash(i), i)
+ for (i <- 0 until initsz) {
+ val remelem = ct.remove(new DumbHash(i))
+ assert(remelem == Some(i), "removing " + i + " yields " + remelem)
+ }
+ for (i <- 0 until initsz) assert(ct.get(new DumbHash(i)) == None)
+ }
+
+ "put elements with the same hash codes if absent" in {
+ val ct = new TrieMap[DumbHash, Int]
+ for (i <- 0 until initsz) ct.put(new DumbHash(i), i)
+ for (i <- 0 until initsz) assert(ct.lookup(new DumbHash(i)) == i)
+ for (i <- 0 until initsz) assert(ct.putIfAbsent(new DumbHash(i), i) == Some(i))
+ for (i <- initsz until secondsz) assert(ct.putIfAbsent(new DumbHash(i), i) == None)
+ for (i <- initsz until secondsz) assert(ct.lookup(new DumbHash(i)) == i)
+ }
+
+ "replace elements with the same hash codes" in {
+ val ct = new TrieMap[DumbHash, Int]
+ for (i <- 0 until initsz) assert(ct.put(new DumbHash(i), i) == None)
+ for (i <- 0 until initsz) assert(ct.lookup(new DumbHash(i)) == i)
+ for (i <- 0 until initsz) assert(ct.replace(new DumbHash(i), -i) == Some(i))
+ for (i <- 0 until initsz) assert(ct.lookup(new DumbHash(i)) == -i)
+ for (i <- 0 until initsz) assert(ct.replace(new DumbHash(i), -i, i) == true)
+ }
+
+ "remove elements with the same hash codes if mapped to a specific value" in {
+ val ct = new TrieMap[DumbHash, Int]
+ for (i <- 0 until initsz) assert(ct.put(new DumbHash(i), i) == None)
+ for (i <- 0 until initsz) assert(ct.remove(new DumbHash(i), i) == true)
+ }
+
+ }
+
+}
diff --git a/test/files/run/ctries-old/main.scala b/test/files/run/ctries-old/main.scala
new file mode 100644
index 0000000..78ba7f0
--- /dev/null
+++ b/test/files/run/ctries-old/main.scala
@@ -0,0 +1,45 @@
+
+
+
+
+
+
+
+object Test {
+
+ def main(args: Array[String]) {
+ ConcurrentMapSpec.test()
+ IteratorSpec.test()
+ LNodeSpec.test()
+ SnapshotSpec.test()
+ }
+
+}
+
+
+trait Spec {
+
+ implicit def str2ops(s: String) = new {
+ def in[U](body: =>U) {
+ // just execute body
+ body
+ }
+ }
+
+ implicit def any2ops(a: Any) = new {
+ def shouldEqual(other: Any) = assert(a == other)
+ }
+
+ def evaluating[U](body: =>U) = new {
+ def shouldProduce[T <: Throwable: ClassManifest]() = {
+ var produced = false
+ try body
+ catch {
+ case e => if (e.getClass == implicitly[ClassManifest[T]].erasure) produced = true
+ } finally {
+ assert(produced, "Did not produce exception of type: " + implicitly[ClassManifest[T]])
+ }
+ }
+ }
+
+}
diff --git a/test/files/run/ctries-old/snapshot.scala b/test/files/run/ctries-old/snapshot.scala
new file mode 100644
index 0000000..5fe77d4
--- /dev/null
+++ b/test/files/run/ctries-old/snapshot.scala
@@ -0,0 +1,267 @@
+
+
+
+
+import collection._
+import collection.concurrent.TrieMap
+
+
+
+object SnapshotSpec extends Spec {
+
+ def test() {
+ "support snapshots" in {
+ val ctn = new TrieMap
+ ctn.snapshot()
+ ctn.readOnlySnapshot()
+
+ val ct = new TrieMap[Int, Int]
+ for (i <- 0 until 100) ct.put(i, i)
+ ct.snapshot()
+ ct.readOnlySnapshot()
+ }
+
+ "empty 2 quiescent snapshots in isolation" in {
+ val sz = 4000
+
+ class Worker(trie: TrieMap[Wrap, Int]) extends Thread {
+ override def run() {
+ for (i <- 0 until sz) {
+ assert(trie.remove(new Wrap(i)) == Some(i))
+ for (j <- 0 until sz)
+ if (j <= i) assert(trie.get(new Wrap(j)) == None)
+ else assert(trie.get(new Wrap(j)) == Some(j))
+ }
+ }
+ }
+
+ val ct = new TrieMap[Wrap, Int]
+ for (i <- 0 until sz) ct.put(new Wrap(i), i)
+ val snapt = ct.snapshot()
+
+ val original = new Worker(ct)
+ val snapshot = new Worker(snapt)
+ original.start()
+ snapshot.start()
+ original.join()
+ snapshot.join()
+
+ for (i <- 0 until sz) {
+ assert(ct.get(new Wrap(i)) == None)
+ assert(snapt.get(new Wrap(i)) == None)
+ }
+ }
+
+ def consistentReadOnly(name: String, readonly: Map[Wrap, Int], sz: Int, N: Int) {
+ @volatile var e: Exception = null
+
+ // reads possible entries once and stores them
+ // then reads all these N more times to check if the
+ // state stayed the same
+ class Reader(trie: Map[Wrap, Int]) extends Thread {
+ setName("Reader " + name)
+
+ override def run() =
+ try check()
+ catch {
+ case ex: Exception => e = ex
+ }
+
+ def check() {
+ val initial = mutable.Map[Wrap, Int]()
+ for (i <- 0 until sz) trie.get(new Wrap(i)) match {
+ case Some(i) => initial.put(new Wrap(i), i)
+ case None => // do nothing
+ }
+
+ for (k <- 0 until N) {
+ for (i <- 0 until sz) {
+ val tres = trie.get(new Wrap(i))
+ val ires = initial.get(new Wrap(i))
+ if (tres != ires) println(i, "initially: " + ires, "traversal %d: %s".format(k, tres))
+ assert(tres == ires)
+ }
+ }
+ }
+ }
+
+ val reader = new Reader(readonly)
+ reader.start()
+ reader.join()
+
+ if (e ne null) {
+ e.printStackTrace()
+ throw e
+ }
+ }
+
+ // traverses the trie `rep` times and modifies each entry
+ class Modifier(trie: TrieMap[Wrap, Int], index: Int, rep: Int, sz: Int) extends Thread {
+ setName("Modifier %d".format(index))
+
+ override def run() {
+ for (k <- 0 until rep) {
+ for (i <- 0 until sz) trie.putIfAbsent(new Wrap(i), i) match {
+ case Some(_) => trie.remove(new Wrap(i))
+ case None => // do nothing
+ }
+ }
+ }
+ }
+
+ // removes all the elements from the trie
+ class Remover(trie: TrieMap[Wrap, Int], index: Int, totremovers: Int, sz: Int) extends Thread {
+ setName("Remover %d".format(index))
+
+ override def run() {
+ for (i <- 0 until sz) trie.remove(new Wrap((i + sz / totremovers * index) % sz))
+ }
+ }
+
+ "have a consistent quiescent read-only snapshot" in {
+ val sz = 10000
+ val N = 100
+ val W = 10
+
+ val ct = new TrieMap[Wrap, Int]
+ for (i <- 0 until sz) ct(new Wrap(i)) = i
+ val readonly = ct.readOnlySnapshot()
+ val threads = for (i <- 0 until W) yield new Modifier(ct, i, N, sz)
+
+ threads.foreach(_.start())
+ consistentReadOnly("qm", readonly, sz, N)
+ threads.foreach(_.join())
+ }
+
+ // now, we check non-quiescent snapshots, as these permit situations
+ // where a thread is caught in the middle of the update when a snapshot is taken
+
+ "have a consistent non-quiescent read-only snapshot, concurrent with removes only" in {
+ val sz = 1250
+ val W = 100
+ val S = 5000
+
+ val ct = new TrieMap[Wrap, Int]
+ for (i <- 0 until sz) ct(new Wrap(i)) = i
+ val threads = for (i <- 0 until W) yield new Remover(ct, i, W, sz)
+
+ threads.foreach(_.start())
+ for (i <- 0 until S) consistentReadOnly("non-qr", ct.readOnlySnapshot(), sz, 5)
+ threads.foreach(_.join())
+ }
+
+ "have a consistent non-quiescent read-only snapshot, concurrent with modifications" in {
+ val sz = 1000
+ val N = 7000
+ val W = 10
+ val S = 7000
+
+ val ct = new TrieMap[Wrap, Int]
+ for (i <- 0 until sz) ct(new Wrap(i)) = i
+ val threads = for (i <- 0 until W) yield new Modifier(ct, i, N, sz)
+
+ threads.foreach(_.start())
+ for (i <- 0 until S) consistentReadOnly("non-qm", ct.readOnlySnapshot(), sz, 5)
+ threads.foreach(_.join())
+ }
+
+ def consistentNonReadOnly(name: String, trie: TrieMap[Wrap, Int], sz: Int, N: Int) {
+ @volatile var e: Exception = null
+
+ // reads possible entries once and stores them
+ // then reads all these N more times to check if the
+ // state stayed the same
+ class Worker extends Thread {
+ setName("Worker " + name)
+
+ override def run() =
+ try check()
+ catch {
+ case ex: Exception => e = ex
+ }
+
+ def check() {
+ val initial = mutable.Map[Wrap, Int]()
+ for (i <- 0 until sz) trie.get(new Wrap(i)) match {
+ case Some(i) => initial.put(new Wrap(i), i)
+ case None => // do nothing
+ }
+
+ for (k <- 0 until N) {
+ // modify
+ for ((key, value) <- initial) {
+ val oldv = if (k % 2 == 0) value else -value
+ val newv = -oldv
+ trie.replace(key, oldv, newv)
+ }
+
+ // check
+ for (i <- 0 until sz) if (initial.contains(new Wrap(i))) {
+ val expected = if (k % 2 == 0) -i else i
+ //println(trie.get(new Wrap(i)))
+ assert(trie.get(new Wrap(i)) == Some(expected))
+ } else {
+ assert(trie.get(new Wrap(i)) == None)
+ }
+ }
+ }
+ }
+
+ val worker = new Worker
+ worker.start()
+ worker.join()
+
+ if (e ne null) {
+ e.printStackTrace()
+ throw e
+ }
+ }
+
+ "have a consistent non-quiescent snapshot, concurrent with modifications" in {
+ val sz = 9000
+ val N = 1000
+ val W = 10
+ val S = 400
+
+ val ct = new TrieMap[Wrap, Int]
+ for (i <- 0 until sz) ct(new Wrap(i)) = i
+ val threads = for (i <- 0 until W) yield new Modifier(ct, i, N, sz)
+
+ threads.foreach(_.start())
+ for (i <- 0 until S) {
+ consistentReadOnly("non-qm", ct.snapshot(), sz, 5)
+ consistentNonReadOnly("non-qsnap", ct.snapshot(), sz, 5)
+ }
+ threads.foreach(_.join())
+ }
+
+ "work when many concurrent snapshots are taken, concurrent with modifications" in {
+ val sz = 12000
+ val W = 10
+ val S = 10
+ val modifytimes = 1200
+ val snaptimes = 600
+ val ct = new TrieMap[Wrap, Int]
+ for (i <- 0 until sz) ct(new Wrap(i)) = i
+
+ class Snapshooter extends Thread {
+ setName("Snapshooter")
+ override def run() {
+ for (k <- 0 until snaptimes) {
+ val snap = ct.snapshot()
+ for (i <- 0 until sz) snap.remove(new Wrap(i))
+ for (i <- 0 until sz) assert(!snap.contains(new Wrap(i)))
+ }
+ }
+ }
+
+ val mods = for (i <- 0 until W) yield new Modifier(ct, i, modifytimes, sz)
+ val shooters = for (i <- 0 until S) yield new Snapshooter
+ val threads = mods ++ shooters
+ threads.foreach(_.start())
+ threads.foreach(_.join())
+ }
+
+ }
+
+}
diff --git a/test/files/jvm/bug680.check b/test/files/run/dead-code-elimination.check
similarity index 100%
copy from test/files/jvm/bug680.check
copy to test/files/run/dead-code-elimination.check
diff --git a/test/files/run/dead-code-elimination.flags b/test/files/run/dead-code-elimination.flags
new file mode 100644
index 0000000..49d036a
--- /dev/null
+++ b/test/files/run/dead-code-elimination.flags
@@ -0,0 +1 @@
+-optimize
diff --git a/test/files/run/dead-code-elimination.scala b/test/files/run/dead-code-elimination.scala
new file mode 100644
index 0000000..1af17c9
--- /dev/null
+++ b/test/files/run/dead-code-elimination.scala
@@ -0,0 +1,33 @@
+
+// This testcase is a snippet that did not compile correctly under
+// pre-release 2.10.x. The relevant discussion around it can be
+// found at:
+// https://groups.google.com/forum/?fromgroups#!topic/scala-internals/qcyTjk8euUI[1-25]
+//
+// The reason it did not compile is related to the fact that ICode
+// ops did not correctly define the stack entries they consumed and
+// the dead code elimination phase was unable to correctly reconstruct
+// the stack after code elimination.
+//
+// Originally, this did not compile, but I included it in the run
+// tests because this was ASM-dependand and did not happen for GenJVM.
+//
+// Thus, we run the code and force the loading of class B -- if the
+// bytecode is incorrect, it will fail the test.
+
+final class A {
+ def f1 = true
+ def f2 = true
+ @inline def f3 = f1 || f2
+ class B {
+ def f() = 1 to 10 foreach (_ => f3)
+ }
+ def f = (new B).f()
+}
+
+object Test {
+ def main(args: Array[String]): Unit = {
+ // force the loading of B
+ (new A).f
+ }
+}
diff --git a/test/files/run/delay-bad.check b/test/files/run/delay-bad.check
new file mode 100644
index 0000000..9d9c828
--- /dev/null
+++ b/test/files/run/delay-bad.check
@@ -0,0 +1,47 @@
+
+
+// new C { }
+-A -B -C
+
+// new C { 5 }
+-A -B -C
+ A+ B+ C+
+
+// new D()
+-A -B -C -D
+ A+ B+ C+ D+
+
+// new D() { }
+-A -B -C -D
+ A+ B+ C+ D+
+
+// new D() { val x = 5 }
+-A -B -C -D
+ A+ B+ C+ D+
+ A+ B+ C+ D+
+
+// new { val x = 5 } with D()
+-A -B -C -D
+ A+ B+ C+ D+
+
+// new E() { val x = 5 }
+-A -B -C -D
+ A+ B+ C+ D+ E+ -E
+ A+ B+ C+ D+ E+
+ A+ B+ C+ D+ E+
+
+// new { val x = 5 } with E()
+-A -B -C -D
+ A+ B+ C+ D+ E+ -E
+ A+ B+ C+ D+ E+
+
+// new { val x = 5 } with E() { }
+-A -B -C -D
+ A+ B+ C+ D+ E+ -E
+ A+ B+ C+ D+ E+
+
+// new { val x = 5 } with E() { 5 }
+-A -B -C -D
+ A+ B+ C+ D+ E+ -E
+ A+ B+ C+ D+ E+
+ A+ B+ C+ D+ E+
diff --git a/test/files/run/delay-bad.scala b/test/files/run/delay-bad.scala
new file mode 100644
index 0000000..43acc1e
--- /dev/null
+++ b/test/files/run/delay-bad.scala
@@ -0,0 +1,77 @@
+trait A extends DelayedInit
+{
+ print("-A")
+
+ def delayedInit(body: => Unit) = {
+ body
+ postConstructionCode
+ }
+ def postConstructionCode: Unit = {
+ print("\n A+")
+ }
+}
+trait B extends A {
+ print(" -B")
+ override def postConstructionCode: Unit = {
+ super.postConstructionCode
+ print(" B+")
+ }
+}
+
+trait C extends B {
+ print(" -C")
+ override def postConstructionCode: Unit = {
+ super.postConstructionCode
+ print(" C+")
+ }
+}
+
+class D() extends C {
+ print(" -D")
+ override def postConstructionCode: Unit = {
+ super.postConstructionCode
+ print(" D+")
+ }
+}
+class E() extends D() {
+ print(" -E")
+ override def postConstructionCode: Unit = {
+ super.postConstructionCode
+ print(" E+")
+ }
+}
+
+object Test {
+ def p(msg: String) = println("\n\n// " + msg)
+
+ def main(args: Array[String]) {
+ val f: A => Unit = _ => ()
+
+ p("new C { }")
+ f(new C { })
+ p("new C { 5 }")
+ f(new C { 5 })
+
+ p("new D()")
+ f(new D())
+ p("new D() { }")
+ f(new D() { })
+
+ p("new D() { val x = 5 }")
+ f(new D() { val x = 5 })
+ p("new { val x = 5 } with D()")
+ f(new { val x = 5 } with D())
+
+ p("new E() { val x = 5 }")
+ f(new E() { val x = 5 })
+ p("new { val x = 5 } with E()")
+ f(new { val x = 5 } with E())
+
+ p("new { val x = 5 } with E() { }")
+ f(new { val x = 5 } with E() { })
+ p("new { val x = 5 } with E() { 5 }")
+ f(new { val x = 5 } with E() { 5 })
+
+ println("")
+ }
+}
diff --git a/test/files/run/delay-good.check b/test/files/run/delay-good.check
new file mode 100644
index 0000000..8eb04c7
--- /dev/null
+++ b/test/files/run/delay-good.check
@@ -0,0 +1,41 @@
+
+
+// new C { }
+-A -B -C
+ A+ B+ C+
+
+// new C { 5 }
+-A -B -C
+ A+ B+ C+
+
+// new D()
+-A -B -C -D
+ A+ B+ C+ D+
+
+// new D() { }
+-A -B -C -D
+ A+ B+ C+ D+
+
+// new D() { val x = 5 }
+-A -B -C -D
+ A+ B+ C+ D+
+
+// new { val x = 5 } with D()
+-A -B -C -D
+ A+ B+ C+ D+
+
+// new E() { val x = 5 }
+-A -B -C -D -E
+ A+ B+ C+ D+ E+
+
+// new { val x = 5 } with E()
+-A -B -C -D -E
+ A+ B+ C+ D+ E+
+
+// new { val x = 5 } with E() { }
+-A -B -C -D -E
+ A+ B+ C+ D+ E+
+
+// new { val x = 5 } with E() { 5 }
+-A -B -C -D -E
+ A+ B+ C+ D+ E+
diff --git a/test/files/run/delay-good.scala b/test/files/run/delay-good.scala
new file mode 100644
index 0000000..2e4487b
--- /dev/null
+++ b/test/files/run/delay-good.scala
@@ -0,0 +1,77 @@
+trait A
+{
+ print("-A")
+
+ def delayedInit(body: => Unit) = {
+ body
+ postConstructionCode
+ }
+ def postConstructionCode: Unit = {
+ print("\n A+")
+ }
+}
+trait B extends A {
+ print(" -B")
+ override def postConstructionCode: Unit = {
+ super.postConstructionCode
+ print(" B+")
+ }
+}
+
+trait C extends B {
+ print(" -C")
+ override def postConstructionCode: Unit = {
+ super.postConstructionCode
+ print(" C+")
+ }
+}
+
+class D() extends C {
+ print(" -D")
+ override def postConstructionCode: Unit = {
+ super.postConstructionCode
+ print(" D+")
+ }
+}
+class E() extends D() {
+ print(" -E")
+ override def postConstructionCode: Unit = {
+ super.postConstructionCode
+ print(" E+")
+ }
+}
+
+object Test {
+ def p(msg: String) = println("\n\n// " + msg)
+
+ def main(args: Array[String]) {
+ val f: A => Unit = _.postConstructionCode
+
+ p("new C { }")
+ f(new C { })
+ p("new C { 5 }")
+ f(new C { 5 })
+
+ p("new D()")
+ f(new D())
+ p("new D() { }")
+ f(new D() { })
+
+ p("new D() { val x = 5 }")
+ f(new D() { val x = 5 })
+ p("new { val x = 5 } with D()")
+ f(new { val x = 5 } with D())
+
+ p("new E() { val x = 5 }")
+ f(new E() { val x = 5 })
+ p("new { val x = 5 } with E()")
+ f(new { val x = 5 } with E())
+
+ p("new { val x = 5 } with E() { }")
+ f(new { val x = 5 } with E() { })
+ p("new { val x = 5 } with E() { 5 }")
+ f(new { val x = 5 } with E() { 5 })
+
+ println("")
+ }
+}
diff --git a/test/files/run/dynamic-anyval.check b/test/files/run/dynamic-anyval.check
new file mode 100644
index 0000000..dee7bef
--- /dev/null
+++ b/test/files/run/dynamic-anyval.check
@@ -0,0 +1,4 @@
+().dingo(bippy, 5)
+List(1, 2, 3).dingo(bippy, 5)
+().dingo(bippy, 5)
+List(1, 2, 3).dingo(bippy, 5)
diff --git a/test/files/run/dynamic-anyval.scala b/test/files/run/dynamic-anyval.scala
new file mode 100644
index 0000000..605503d
--- /dev/null
+++ b/test/files/run/dynamic-anyval.scala
@@ -0,0 +1,22 @@
+import scala.language.dynamics
+
+object Test {
+ implicit class DynamicValue[T](val value: T) extends AnyVal with Dynamic {
+ def applyDynamic(name: String)(args: Any*) = println(s"""$this.$name(${args mkString ", "})""")
+ override def toString = "" + value
+ }
+ implicit class DynamicValue2[T](val value: T) extends Dynamic {
+ def applyDynamic(name: String)(args: Any*) = println(s"""$this.$name(${args mkString ", "})""")
+ override def toString = "" + value
+ }
+
+ def f[T](x: DynamicValue[T]) = x.dingo("bippy", 5)
+ def g[T](x: DynamicValue2[T]) = x.dingo("bippy", 5)
+
+ def main(args: Array[String]): Unit = {
+ f(())
+ f(List(1, 2, 3))
+ g(())
+ g(List(1, 2, 3))
+ }
+}
diff --git a/test/files/run/dynamic-applyDynamic.check b/test/files/run/dynamic-applyDynamic.check
new file mode 100644
index 0000000..89a0d55
--- /dev/null
+++ b/test/files/run/dynamic-applyDynamic.check
@@ -0,0 +1,14 @@
+[[syntax trees at end of typer]] // newSource1.scala
+[0:67]package [0:0]<empty> {
+ [0:67]object X extends [9:67][67]scala.AnyRef {
+ [9]def <init>(): [9]X.type = [9]{
+ [9][9][9]X.super.<init>();
+ [9]()
+ };
+ [17:30]private[this] val d: [21]D = [25:30][25:30][25:30]new [29:30]D();
+ [21]<stable> <accessor> def d: [21]D = [21][21]X.this.d;
+ [37:49][37:38][37:38][37]X.this.d.applyDynamic(<39:45>"method")([46:48]10);
+ [56:61]<56:57><56:57>[56]X.this.d.applyDynamic(<56:57>"apply")([58:60]10)
+ }
+}
+
diff --git a/test/files/run/dynamic-applyDynamic.scala b/test/files/run/dynamic-applyDynamic.scala
new file mode 100644
index 0000000..b060411
--- /dev/null
+++ b/test/files/run/dynamic-applyDynamic.scala
@@ -0,0 +1,26 @@
+import scala.tools.partest.DirectTest
+
+object Test extends DirectTest {
+
+ override def extraSettings: String =
+ s"-usejavacp -Xprint-pos -Xprint:typer -Yrangepos -Ystop-after:typer -d ${testOutput.path}"
+
+ override def code = """
+ object X {
+ val d = new D
+ d.method(10)
+ d(10)
+ }
+ """.trim
+
+ override def show(): Unit = {
+ Console.withErr(System.out) {
+ compile()
+ }
+ }
+}
+
+import language.dynamics
+class D extends Dynamic {
+ def applyDynamic(name: String)(value: Any) = ???
+}
\ No newline at end of file
diff --git a/test/files/run/dynamic-applyDynamicNamed.check b/test/files/run/dynamic-applyDynamicNamed.check
new file mode 100644
index 0000000..17fa496
--- /dev/null
+++ b/test/files/run/dynamic-applyDynamicNamed.check
@@ -0,0 +1,14 @@
+[[syntax trees at end of typer]] // newSource1.scala
+[0:97]package [0:0]<empty> {
+ [0:97]object X extends [9:97][97]scala.AnyRef {
+ [9]def <init>(): [9]X.type = [9]{
+ [9][9][9]X.super.<init>();
+ [9]()
+ };
+ [17:30]private[this] val d: [21]D = [25:30][25:30][25:30]new [29:30]D();
+ [21]<stable> <accessor> def d: [21]D = [21][21]X.this.d;
+ [37:70][37:38][37:38][37]X.this.d.applyDynamicNamed(<39:43>"meth")([44:55][44][44][44]scala.this.Tuple2.apply[[44]String, [44]Int]([44:50]"value1", [53:55]10), [57:69][57][57][57]scala.this.Tuple2.apply[[57]String, [57]Int]([57:63]"value2", [66:69]100));
+ [77:91]<77:78><77:78>[77]X.this.d.applyDynamicNamed(<77:78>"apply")([79:90][79][79][79]scala.this.Tuple2.apply[[79]String, [79]Int]([79:85]"value1", [88:90]10))
+ }
+}
+
diff --git a/test/files/run/dynamic-applyDynamicNamed.scala b/test/files/run/dynamic-applyDynamicNamed.scala
new file mode 100644
index 0000000..cc59f90
--- /dev/null
+++ b/test/files/run/dynamic-applyDynamicNamed.scala
@@ -0,0 +1,26 @@
+import scala.tools.partest.DirectTest
+
+object Test extends DirectTest {
+
+ override def extraSettings: String =
+ s"-usejavacp -Xprint-pos -Xprint:typer -Yrangepos -Ystop-after:typer -d ${testOutput.path}"
+
+ override def code = """
+ object X {
+ val d = new D
+ d.meth(value1 = 10, value2 = 100)
+ d(value1 = 10)
+ }
+ """.trim
+
+ override def show(): Unit = {
+ Console.withErr(System.out) {
+ compile()
+ }
+ }
+}
+
+import language.dynamics
+class D extends Dynamic {
+ def applyDynamicNamed(name: String)(value: (String, Any)*) = ???
+}
diff --git a/test/files/run/dynamic-selectDynamic.check b/test/files/run/dynamic-selectDynamic.check
new file mode 100644
index 0000000..7f95ed3
--- /dev/null
+++ b/test/files/run/dynamic-selectDynamic.check
@@ -0,0 +1,13 @@
+[[syntax trees at end of typer]] // newSource1.scala
+[0:50]package [0:0]<empty> {
+ [0:50]object X extends [9:50][50]scala.AnyRef {
+ [9]def <init>(): [9]X.type = [9]{
+ [9][9][9]X.super.<init>();
+ [9]()
+ };
+ [17:30]private[this] val d: [21]D = [25:30][25:30][25:30]new [29:30]D();
+ [21]<stable> <accessor> def d: [21]D = [21][21]X.this.d;
+ [37:38][37:38][37]X.this.d.selectDynamic(<39:44>"field")
+ }
+}
+
diff --git a/test/files/run/dynamic-selectDynamic.scala b/test/files/run/dynamic-selectDynamic.scala
new file mode 100644
index 0000000..bd6c138
--- /dev/null
+++ b/test/files/run/dynamic-selectDynamic.scala
@@ -0,0 +1,25 @@
+import scala.tools.partest.DirectTest
+
+object Test extends DirectTest {
+
+ override def extraSettings: String =
+ s"-usejavacp -Xprint-pos -Xprint:typer -Yrangepos -Ystop-after:typer -d ${testOutput.path}"
+
+ override def code = """
+ object X {
+ val d = new D
+ d.field
+ }
+ """.trim
+
+ override def show(): Unit = {
+ Console.withErr(System.out) {
+ compile()
+ }
+ }
+}
+
+import language.dynamics
+class D extends Dynamic {
+ def selectDynamic(name: String) = ???
+}
diff --git a/test/files/run/dynamic-updateDynamic.check b/test/files/run/dynamic-updateDynamic.check
new file mode 100644
index 0000000..3e21b7d
--- /dev/null
+++ b/test/files/run/dynamic-updateDynamic.check
@@ -0,0 +1,14 @@
+[[syntax trees at end of typer]] // newSource1.scala
+[0:69]package [0:0]<empty> {
+ [0:69]object X extends [9:69][69]scala.AnyRef {
+ [9]def <init>(): [9]X.type = [9]{
+ [9][9][9]X.super.<init>();
+ [9]()
+ };
+ [17:30]private[this] val d: [21]D = [25:30][25:30][25:30]new [29:30]D();
+ [21]<stable> <accessor> def d: [21]D = [21][21]X.this.d;
+ [37:49][37:38][37:38][37]X.this.d.updateDynamic(<39:44>"field")([47:49]10);
+ [56:57][56:57][56]X.this.d.selectDynamic(<58:63>"field")
+ }
+}
+
diff --git a/test/files/run/dynamic-updateDynamic.scala b/test/files/run/dynamic-updateDynamic.scala
new file mode 100644
index 0000000..80fe0ea
--- /dev/null
+++ b/test/files/run/dynamic-updateDynamic.scala
@@ -0,0 +1,28 @@
+import scala.tools.partest.DirectTest
+
+object Test extends DirectTest {
+
+ override def extraSettings: String =
+ s"-usejavacp -Xprint-pos -Xprint:typer -Yrangepos -Ystop-after:typer -d ${testOutput.path}"
+
+ override def code = """
+ object X {
+ val d = new D
+ d.field = 10
+ d.field
+ }
+ """.trim
+
+ override def show(): Unit = {
+ Console.withErr(System.out) {
+ compile()
+ }
+ }
+}
+
+import language.dynamics
+class D extends Dynamic {
+ def selectDynamic(name: String): Any = ???
+ def updateDynamic(name: String)(value: Any): Unit = ???
+}
+
diff --git a/test/files/run/elidable-opt.check b/test/files/run/elidable-opt.check
new file mode 100644
index 0000000..88cf98e
--- /dev/null
+++ b/test/files/run/elidable-opt.check
@@ -0,0 +1,14 @@
+Good for me, I was not elided. Test.f3
+Good for me, I was not elided. O.f3
+Good for me, I was not elided. C.f1
+Good for me, I was not elided. C.f2
+()
+false
+0
+0
+0
+0
+0
+0.0
+0.0
+null
diff --git a/test/files/run/elidable-opt.flags b/test/files/run/elidable-opt.flags
new file mode 100644
index 0000000..62897ff
--- /dev/null
+++ b/test/files/run/elidable-opt.flags
@@ -0,0 +1 @@
+-optimise -Xelide-below 900
diff --git a/test/files/run/elidable-opt.scala b/test/files/run/elidable-opt.scala
new file mode 100644
index 0000000..a2f29d2
--- /dev/null
+++ b/test/files/run/elidable-opt.scala
@@ -0,0 +1,85 @@
+import annotation._
+import elidable._
+
+trait T {
+ @elidable(FINEST) def f1()
+ @elidable(SEVERE) def f2()
+ @elidable(FINEST) def f3() = assert(false, "Should have been elided.")
+ def f4()
+}
+
+class C extends T {
+ def f1() = println("Good for me, I was not elided. C.f1")
+ def f2() = println("Good for me, I was not elided. C.f2")
+ @elidable(FINEST) def f4() = assert(false, "Should have been elided.")
+}
+
+object O {
+ @elidable(FINEST) def f1() = assert(false, "Should have been elided.")
+ @elidable(INFO) def f2() = assert(false, "Should have been elided.")
+ @elidable(SEVERE) def f3() = println("Good for me, I was not elided. O.f3")
+ @elidable(INFO) def f4 = assert(false, "Should have been elided (no parens).")
+}
+
+object Test {
+ @elidable(FINEST) def f1() = assert(false, "Should have been elided.")
+ @elidable(INFO) def f2() = assert(false, "Should have been elided.")
+ @elidable(SEVERE) def f3() = println("Good for me, I was not elided. Test.f3")
+ @elidable(INFO) def f4 = assert(false, "Should have been elided (no parens).")
+
+ @elidable(FINEST) def f5() = {}
+ @elidable(FINEST) def f6() = true
+ @elidable(FINEST) def f7() = 1:Byte
+ @elidable(FINEST) def f8() = 1:Short
+ @elidable(FINEST) def f9() = 1:Char
+ @elidable(FINEST) def fa() = 1
+ @elidable(FINEST) def fb() = 1l
+ @elidable(FINEST) def fc() = 1.0f
+ @elidable(FINEST) def fd() = 1.0
+ @elidable(FINEST) def fe() = "s"
+
+ def main(args: Array[String]): Unit = {
+ f1()
+ f2()
+ f3()
+ f4
+ O.f1()
+ O.f2()
+ O.f3()
+ O.f4
+
+ val c = new C
+ c.f1()
+ c.f2()
+ c.f3()
+ c.f4()
+
+ // make sure a return value is still available when eliding a call
+ println(f5())
+ println(f6())
+ println(f7())
+ println(f8())
+ println(f9().toInt)
+ println(fa())
+ println(fb())
+ println(fc())
+ println(fd())
+ println(fe())
+
+ // this one won't show up in the output because a call to f1 is elidable when accessed through T
+ (c:T).f1()
+
+ // Test whether the method definitions are still available.
+ List("Test", "Test$", "O", "O$", "C", "T") foreach { className =>
+ List("f1", "f2", "f3", "f4") foreach { methodName =>
+ Class.forName(className).getMethod(methodName)
+ }
+ }
+ List("Test", "Test$") foreach { className =>
+ List("f5", "f6", "f7", "f8", "f9", "fa", "fb", "fc", "fd", "fe") foreach { methodName =>
+ Class.forName(className).getMethod(methodName)
+ }
+ }
+ Class.forName("T$class").getMethod("f3", classOf[T])
+ }
+}
diff --git a/test/files/run/elidable.check b/test/files/run/elidable.check
index 4ce04f0..88cf98e 100644
--- a/test/files/run/elidable.check
+++ b/test/files/run/elidable.check
@@ -1 +1,14 @@
-Good for me, I was not elided.
+Good for me, I was not elided. Test.f3
+Good for me, I was not elided. O.f3
+Good for me, I was not elided. C.f1
+Good for me, I was not elided. C.f2
+()
+false
+0
+0
+0
+0
+0
+0.0
+0.0
+null
diff --git a/test/files/run/elidable.scala b/test/files/run/elidable.scala
index 264efba..a2f29d2 100644
--- a/test/files/run/elidable.scala
+++ b/test/files/run/elidable.scala
@@ -1,16 +1,85 @@
import annotation._
import elidable._
+trait T {
+ @elidable(FINEST) def f1()
+ @elidable(SEVERE) def f2()
+ @elidable(FINEST) def f3() = assert(false, "Should have been elided.")
+ def f4()
+}
+
+class C extends T {
+ def f1() = println("Good for me, I was not elided. C.f1")
+ def f2() = println("Good for me, I was not elided. C.f2")
+ @elidable(FINEST) def f4() = assert(false, "Should have been elided.")
+}
+
+object O {
+ @elidable(FINEST) def f1() = assert(false, "Should have been elided.")
+ @elidable(INFO) def f2() = assert(false, "Should have been elided.")
+ @elidable(SEVERE) def f3() = println("Good for me, I was not elided. O.f3")
+ @elidable(INFO) def f4 = assert(false, "Should have been elided (no parens).")
+}
+
object Test {
@elidable(FINEST) def f1() = assert(false, "Should have been elided.")
@elidable(INFO) def f2() = assert(false, "Should have been elided.")
- @elidable(SEVERE) def f3() = println("Good for me, I was not elided.")
+ @elidable(SEVERE) def f3() = println("Good for me, I was not elided. Test.f3")
@elidable(INFO) def f4 = assert(false, "Should have been elided (no parens).")
-
+
+ @elidable(FINEST) def f5() = {}
+ @elidable(FINEST) def f6() = true
+ @elidable(FINEST) def f7() = 1:Byte
+ @elidable(FINEST) def f8() = 1:Short
+ @elidable(FINEST) def f9() = 1:Char
+ @elidable(FINEST) def fa() = 1
+ @elidable(FINEST) def fb() = 1l
+ @elidable(FINEST) def fc() = 1.0f
+ @elidable(FINEST) def fd() = 1.0
+ @elidable(FINEST) def fe() = "s"
+
def main(args: Array[String]): Unit = {
f1()
f2()
f3()
f4
+ O.f1()
+ O.f2()
+ O.f3()
+ O.f4
+
+ val c = new C
+ c.f1()
+ c.f2()
+ c.f3()
+ c.f4()
+
+ // make sure a return value is still available when eliding a call
+ println(f5())
+ println(f6())
+ println(f7())
+ println(f8())
+ println(f9().toInt)
+ println(fa())
+ println(fb())
+ println(fc())
+ println(fd())
+ println(fe())
+
+ // this one won't show up in the output because a call to f1 is elidable when accessed through T
+ (c:T).f1()
+
+ // Test whether the method definitions are still available.
+ List("Test", "Test$", "O", "O$", "C", "T") foreach { className =>
+ List("f1", "f2", "f3", "f4") foreach { methodName =>
+ Class.forName(className).getMethod(methodName)
+ }
+ }
+ List("Test", "Test$") foreach { className =>
+ List("f5", "f6", "f7", "f8", "f9", "fa", "fb", "fc", "fd", "fe") foreach { methodName =>
+ Class.forName(className).getMethod(methodName)
+ }
+ }
+ Class.forName("T$class").getMethod("f3", classOf[T])
}
}
diff --git a/test/files/run/empty-array.check b/test/files/run/empty-array.check
new file mode 100644
index 0000000..bb0b1cf
--- /dev/null
+++ b/test/files/run/empty-array.check
@@ -0,0 +1,3 @@
+0
+0
+0
diff --git a/test/files/run/empty-array.scala b/test/files/run/empty-array.scala
new file mode 100644
index 0000000..6e37dca
--- /dev/null
+++ b/test/files/run/empty-array.scala
@@ -0,0 +1,8 @@
+object Test {
+ def main(args: Array[String]): Unit = {
+ println(Array.emptyByteArray.length)
+ println(Array.emptyDoubleArray.length)
+ println(Array.emptyBooleanArray.length)
+ // okay okay okay
+ }
+}
diff --git a/test/files/run/emptypf.check b/test/files/run/emptypf.check
new file mode 100644
index 0000000..f6c3992
--- /dev/null
+++ b/test/files/run/emptypf.check
@@ -0,0 +1,3 @@
+100
+3
+false
diff --git a/test/files/run/emptypf.scala b/test/files/run/emptypf.scala
new file mode 100644
index 0000000..8aa0906
--- /dev/null
+++ b/test/files/run/emptypf.scala
@@ -0,0 +1,14 @@
+object Test {
+ val f: PartialFunction[String, Int] = {
+ PartialFunction.empty[String, Int] orElse {
+ case "abc" => 100
+ case s => s.length
+ }
+ }
+
+ def main(args: Array[String]): Unit = {
+ println(f("abc"))
+ println(f("def"))
+ println(PartialFunction.empty[String, Int] isDefinedAt "abc")
+ }
+}
diff --git a/test/files/run/enrich-gentraversable.check b/test/files/run/enrich-gentraversable.check
new file mode 100644
index 0000000..94c66e3
--- /dev/null
+++ b/test/files/run/enrich-gentraversable.check
@@ -0,0 +1,8 @@
+List(2, 4)
+Array(2, 4)
+HW
+Vector(72, 108, 108, 32, 114, 108, 100)
+List(2, 4)
+Array(2, 4)
+HW
+Vector(72, 108, 108, 32, 114, 108, 100)
diff --git a/test/files/run/enrich-gentraversable.scala b/test/files/run/enrich-gentraversable.scala
new file mode 100644
index 0000000..52eded5
--- /dev/null
+++ b/test/files/run/enrich-gentraversable.scala
@@ -0,0 +1,67 @@
+object Test extends App {
+ import scala.collection.{GenTraversableOnce,GenTraversableLike}
+ import scala.collection.generic._
+
+ def typed[T](t : => T) {}
+ def testTraversableLike = {
+ class FilterMapImpl[A, Repr](val r: GenTraversableLike[A, Repr]) /* extends AnyVal */ {
+ final def filterMap[B, That](f: A => Option[B])(implicit cbf: CanBuildFrom[Repr, B, That]): That =
+ r.flatMap(f(_).toSeq)
+ }
+ implicit def filterMap[Repr, A](r: Repr)(implicit fr: IsTraversableLike[Repr]): FilterMapImpl[fr.A,Repr] =
+ new FilterMapImpl[fr.A, Repr](fr.conversion(r))
+
+ val l = List(1, 2, 3, 4, 5)
+ val fml = l.filterMap(i => if(i % 2 == 0) Some(i) else None)
+ typed[List[Int]](fml)
+ println(fml)
+
+ val a = Array(1, 2, 3, 4, 5)
+ val fma = a.filterMap(i => if(i % 2 == 0) Some(i) else None)
+ typed[Array[Int]](fma)
+ println(fma.deep)
+
+ val s = "Hello World"
+ val fms1 = s.filterMap(c => if(c >= 'A' && c <= 'Z') Some(c) else None)
+ typed[String](fms1)
+ println(fms1)
+
+ val fms2 = s.filterMap(c =>if(c % 2 == 0) Some(c.toInt) else None)
+ typed[IndexedSeq[Int]](fms2)
+ println(fms2)
+ }
+ def testTraversableOnce = {
+ class FilterMapImpl[A, Repr](val r: GenTraversableOnce[A]) /* extends AnyVal */ {
+ final def filterMap[B, That](f: A => Option[B])(implicit cbf: CanBuildFrom[Repr, B, That]): That = {
+ val b = cbf()
+ for(e <- r.seq) f(e) foreach (b +=)
+
+ b.result
+ }
+ }
+ implicit def filterMap[Repr, A](r: Repr)(implicit fr: IsTraversableOnce[Repr]): FilterMapImpl[fr.A,Repr] =
+ new FilterMapImpl[fr.A, Repr](fr.conversion(r))
+
+ val l = List(1, 2, 3, 4, 5)
+ val fml = l.filterMap(i => if(i % 2 == 0) Some(i) else None)
+ typed[List[Int]](fml)
+ println(fml)
+
+ val a = Array(1, 2, 3, 4, 5)
+ val fma = a.filterMap(i => if(i % 2 == 0) Some(i) else None)
+ typed[Array[Int]](fma)
+ println(fma.deep)
+
+ val s = "Hello World"
+ val fms1 = s.filterMap(c => if(c >= 'A' && c <= 'Z') Some(c) else None)
+ typed[String](fms1)
+ println(fms1)
+
+ val fms2 = s.filterMap(c =>if(c % 2 == 0) Some(c.toInt) else None)
+ typed[IndexedSeq[Int]](fms2)
+ println(fms2)
+ }
+
+ testTraversableLike
+ testTraversableOnce
+}
diff --git a/test/files/run/enums.check b/test/files/run/enums.check
index f53aba8..93eadae 100644
--- a/test/files/run/enums.check
+++ b/test/files/run/enums.check
@@ -3,3 +3,13 @@ test Test2 was successful
test Test3 was successful
test Test4 was successful
+D1.ValueSet(North, East)
+D2.ValueSet(North, East)
+D1.ValueSet(North, East, West)
+D2.ValueSet(North, East, West)
+List(101)
+List(101)
+D1.ValueSet(North, East)
+D2.ValueSet(North, East)
+WeekDays.ValueSet(Tue, Wed, Thu, Fri)
+
diff --git a/test/files/run/enums.scala b/test/files/run/enums.scala
index 6dda8cb..9cdeed2 100644
--- a/test/files/run/enums.scala
+++ b/test/files/run/enums.scala
@@ -65,6 +65,58 @@ object Test4 {
}
}
+object Test5 {
+
+ object D1 extends Enumeration(0) {
+ val North, South, East, West = Value;
+ }
+
+ object D2 extends Enumeration(-2) {
+ val North, South, East, West = Value;
+ }
+
+ object WeekDays extends Enumeration {
+ val Mon, Tue, Wed, Thu, Fri, Sat, Sun = Value
+ }
+
+ def run {
+ val s1 = D1.ValueSet(D1.North, D1.East)
+ val s2 = D2.North + D2.East
+ println(s1)
+ println(s2)
+ println(s1 + D1.West)
+ println(s2 + D2.West)
+ println(s1.toBitMask.map(_.toBinaryString).toList)
+ println(s2.toBitMask.map(_.toBinaryString).toList)
+ println(D1.ValueSet.fromBitMask(s1.toBitMask))
+ println(D2.ValueSet.fromBitMask(s2.toBitMask))
+ println(WeekDays.values.range(WeekDays.Tue, WeekDays.Sat))
+ }
+}
+
+object SerializationTest {
+ object Types extends Enumeration { val X, Y = Value }
+ class A extends java.io.Serializable { val types = Types.values }
+ class B extends java.io.Serializable { val types = Set(Types.X, Types.Y) }
+
+ def serialize(obj: AnyRef) = {
+ val baos = new java.io.ByteArrayOutputStream()
+ val oos = new java.io.ObjectOutputStream(baos)
+ oos.writeObject(obj)
+ oos.close()
+ val bais = new java.io.ByteArrayInputStream(baos.toByteArray)
+ val ois = new java.io.ObjectInputStream(bais)
+ val prime = ois.readObject()
+ ois.close()
+ prime
+ }
+
+ def run {
+ serialize(new B())
+ serialize(new A())
+ }
+}
+
//############################################################################
// Test code
@@ -94,6 +146,9 @@ object Test {
check_success("Test3", Test3.run, 1);
check_success("Test4", Test4.run, 0);
Console.println;
+ Test5.run;
+ Console.println;
+ SerializationTest.run;
}
}
diff --git a/test/files/run/eta-expand-star.check b/test/files/run/eta-expand-star.check
new file mode 100644
index 0000000..ce01362
--- /dev/null
+++ b/test/files/run/eta-expand-star.check
@@ -0,0 +1 @@
+hello
diff --git a/test/files/run/eta-expand-star.scala b/test/files/run/eta-expand-star.scala
new file mode 100644
index 0000000..7717c4b
--- /dev/null
+++ b/test/files/run/eta-expand-star.scala
@@ -0,0 +1,8 @@
+object Test {
+ def f[T](xs: T*): T = xs.head
+ def g[T] = f[T] _
+
+ def main(args: Array[String]): Unit = {
+ println(g("hello" +: args))
+ }
+}
diff --git a/test/files/run/eta-expand-star2.check b/test/files/run/eta-expand-star2.check
new file mode 100644
index 0000000..ce01362
--- /dev/null
+++ b/test/files/run/eta-expand-star2.check
@@ -0,0 +1 @@
+hello
diff --git a/test/files/run/eta-expand-star2.flags b/test/files/run/eta-expand-star2.flags
new file mode 100644
index 0000000..0402fe5
--- /dev/null
+++ b/test/files/run/eta-expand-star2.flags
@@ -0,0 +1 @@
+-Yeta-expand-keeps-star
\ No newline at end of file
diff --git a/test/files/run/eta-expand-star2.scala b/test/files/run/eta-expand-star2.scala
new file mode 100644
index 0000000..eb65078
--- /dev/null
+++ b/test/files/run/eta-expand-star2.scala
@@ -0,0 +1,8 @@
+object Test {
+ def f[T](xs: T*): T = xs.head
+ def g[T] = f[T] _
+
+ def main(args: Array[String]): Unit = {
+ println(g("hello"))
+ }
+}
diff --git a/test/files/run/existentials-in-compiler.check b/test/files/run/existentials-in-compiler.check
new file mode 100644
index 0000000..0d7a929
--- /dev/null
+++ b/test/files/run/existentials-in-compiler.check
@@ -0,0 +1,156 @@
+abstract trait Bippy[A <: AnyRef, B] extends AnyRef
+ extest.Bippy[_ <: AnyRef, _]
+
+abstract trait BippyBud[A <: AnyRef, B, C <: List[A]] extends AnyRef
+ extest.BippyBud[A,B,C] forSome { A <: AnyRef; B; C <: List[A] }
+
+abstract trait BippyLike[A <: AnyRef, B <: List[A], This <: extest.BippyLike[A,B,This] with extest.Bippy[A,B]] extends AnyRef
+ extest.BippyLike[A,B,This] forSome { A <: AnyRef; B <: List[A]; This <: extest.BippyLike[A,B,This] with extest.Bippy[A,B] }
+
+abstract trait Contra[-A >: AnyRef, -B] extends AnyRef
+ extest.Contra[_ >: AnyRef, _]
+
+abstract trait ContraLike[-A >: AnyRef, -B >: List[A]] extends AnyRef
+ extest.ContraLike[A,B] forSome { -A >: AnyRef; -B >: List[A] }
+
+abstract trait Cov01[+A <: AnyRef, +B] extends AnyRef
+ extest.Cov01[_ <: AnyRef, _]
+
+abstract trait Cov02[+A <: AnyRef, B] extends AnyRef
+ extest.Cov02[_ <: AnyRef, _]
+
+abstract trait Cov03[+A <: AnyRef, -B] extends AnyRef
+ extest.Cov03[_ <: AnyRef, _]
+
+abstract trait Cov04[A <: AnyRef, +B] extends AnyRef
+ extest.Cov04[_ <: AnyRef, _]
+
+abstract trait Cov05[A <: AnyRef, B] extends AnyRef
+ extest.Cov05[_ <: AnyRef, _]
+
+abstract trait Cov06[A <: AnyRef, -B] extends AnyRef
+ extest.Cov06[_ <: AnyRef, _]
+
+abstract trait Cov07[-A <: AnyRef, +B] extends AnyRef
+ extest.Cov07[_ <: AnyRef, _]
+
+abstract trait Cov08[-A <: AnyRef, B] extends AnyRef
+ extest.Cov08[_ <: AnyRef, _]
+
+abstract trait Cov09[-A <: AnyRef, -B] extends AnyRef
+ extest.Cov09[_ <: AnyRef, _]
+
+abstract trait Cov11[+A <: AnyRef, +B <: List[_]] extends AnyRef
+ extest.Cov11[_ <: AnyRef, _ <: List[_]]
+
+abstract trait Cov12[+A <: AnyRef, B <: List[_]] extends AnyRef
+ extest.Cov12[_ <: AnyRef, _ <: List[_]]
+
+abstract trait Cov13[+A <: AnyRef, -B <: List[_]] extends AnyRef
+ extest.Cov13[_ <: AnyRef, _ <: List[_]]
+
+abstract trait Cov14[A <: AnyRef, +B <: List[_]] extends AnyRef
+ extest.Cov14[_ <: AnyRef, _ <: List[_]]
+
+abstract trait Cov15[A <: AnyRef, B <: List[_]] extends AnyRef
+ extest.Cov15[_ <: AnyRef, _ <: List[_]]
+
+abstract trait Cov16[A <: AnyRef, -B <: List[_]] extends AnyRef
+ extest.Cov16[_ <: AnyRef, _ <: List[_]]
+
+abstract trait Cov17[-A <: AnyRef, +B <: List[_]] extends AnyRef
+ extest.Cov17[_ <: AnyRef, _ <: List[_]]
+
+abstract trait Cov18[-A <: AnyRef, B <: List[_]] extends AnyRef
+ extest.Cov18[_ <: AnyRef, _ <: List[_]]
+
+abstract trait Cov19[-A <: AnyRef, -B <: List[_]] extends AnyRef
+ extest.Cov19[_ <: AnyRef, _ <: List[_]]
+
+abstract trait Cov21[+A, +B] extends AnyRef
+ extest.Cov21[_, _]
+
+abstract trait Cov22[+A, B] extends AnyRef
+ extest.Cov22[_, _]
+
+abstract trait Cov23[+A, -B] extends AnyRef
+ extest.Cov23[_, _]
+
+abstract trait Cov24[A, +B] extends AnyRef
+ extest.Cov24[_, _]
+
+abstract trait Cov25[A, B] extends AnyRef
+ extest.Cov25[_, _]
+
+abstract trait Cov26[A, -B] extends AnyRef
+ extest.Cov26[_, _]
+
+abstract trait Cov27[-A, +B] extends AnyRef
+ extest.Cov27[_, _]
+
+abstract trait Cov28[-A, B] extends AnyRef
+ extest.Cov28[_, _]
+
+abstract trait Cov29[-A, -B] extends AnyRef
+ extest.Cov29[_, _]
+
+abstract trait Cov31[+A, +B, C <: (A, B)] extends AnyRef
+ extest.Cov31[A,B,C] forSome { +A; +B; C <: (A, B) }
+
+abstract trait Cov32[+A, B, C <: (A, B)] extends AnyRef
+ extest.Cov32[A,B,C] forSome { +A; B; C <: (A, B) }
+
+abstract trait Cov33[+A, -B, C <: Tuple2[A, _]] extends AnyRef
+ extest.Cov33[A,B,C] forSome { +A; -B; C <: Tuple2[A, _] }
+
+abstract trait Cov34[A, +B, C <: (A, B)] extends AnyRef
+ extest.Cov34[A,B,C] forSome { A; +B; C <: (A, B) }
+
+abstract trait Cov35[A, B, C <: (A, B)] extends AnyRef
+ extest.Cov35[A,B,C] forSome { A; B; C <: (A, B) }
+
+abstract trait Cov36[A, -B, C <: Tuple2[A, _]] extends AnyRef
+ extest.Cov36[A,B,C] forSome { A; -B; C <: Tuple2[A, _] }
+
+abstract trait Cov37[-A, +B, C <: Tuple2[_, B]] extends AnyRef
+ extest.Cov37[A,B,C] forSome { -A; +B; C <: Tuple2[_, B] }
+
+abstract trait Cov38[-A, B, C <: Tuple2[_, B]] extends AnyRef
+ extest.Cov38[A,B,C] forSome { -A; B; C <: Tuple2[_, B] }
+
+abstract trait Cov39[-A, -B, C <: Tuple2[_, _]] extends AnyRef
+ extest.Cov39[_, _, _ <: Tuple2[_, _]]
+
+abstract trait Cov41[+A >: Null, +B] extends AnyRef
+ extest.Cov41[_ >: Null, _]
+
+abstract trait Cov42[+A >: Null, B] extends AnyRef
+ extest.Cov42[_ >: Null, _]
+
+abstract trait Cov43[+A >: Null, -B] extends AnyRef
+ extest.Cov43[_ >: Null, _]
+
+abstract trait Cov44[A >: Null, +B] extends AnyRef
+ extest.Cov44[_ >: Null, _]
+
+abstract trait Cov45[A >: Null, B] extends AnyRef
+ extest.Cov45[_ >: Null, _]
+
+abstract trait Cov46[A >: Null, -B] extends AnyRef
+ extest.Cov46[_ >: Null, _]
+
+abstract trait Cov47[-A >: Null, +B] extends AnyRef
+ extest.Cov47[_ >: Null, _]
+
+abstract trait Cov48[-A >: Null, B] extends AnyRef
+ extest.Cov48[_ >: Null, _]
+
+abstract trait Cov49[-A >: Null, -B] extends AnyRef
+ extest.Cov49[_ >: Null, _]
+
+abstract trait Covariant[+A <: AnyRef, +B] extends AnyRef
+ extest.Covariant[_ <: AnyRef, _]
+
+abstract trait CovariantLike[+A <: AnyRef, +B <: List[A], +This <: extest.CovariantLike[A,B,This] with extest.Covariant[A,B]] extends AnyRef
+ extest.CovariantLike[A,B,This] forSome { +A <: AnyRef; +B <: List[A]; +This <: extest.CovariantLike[A,B,This] with extest.Covariant[A,B] }
+
diff --git a/test/files/run/existentials-in-compiler.scala b/test/files/run/existentials-in-compiler.scala
new file mode 100644
index 0000000..c69d121
--- /dev/null
+++ b/test/files/run/existentials-in-compiler.scala
@@ -0,0 +1,84 @@
+import scala.tools.nsc._
+import scala.tools.partest.CompilerTest
+import scala.collection.{ mutable, immutable, generic }
+
+object Test extends CompilerTest {
+ import global._
+ import rootMirror._
+ import definitions._
+
+ override def code = """
+package extest {
+ trait Bippy[A <: AnyRef, B] { } // wildcards
+ trait BippyLike[A <: AnyRef, B <: List[A], This <: BippyLike[A, B, This] with Bippy[A, B]] // no wildcards
+ trait BippyBud[A <: AnyRef, B, C <: List[A]]
+
+ trait Cov01[+A <: AnyRef, +B] { }
+ trait Cov02[+A <: AnyRef, B] { }
+ trait Cov03[+A <: AnyRef, -B] { }
+ trait Cov04[ A <: AnyRef, +B] { }
+ trait Cov05[ A <: AnyRef, B] { }
+ trait Cov06[ A <: AnyRef, -B] { }
+ trait Cov07[-A <: AnyRef, +B] { }
+ trait Cov08[-A <: AnyRef, B] { }
+ trait Cov09[-A <: AnyRef, -B] { }
+
+ trait Cov11[+A <: AnyRef, +B <: List[_]] { }
+ trait Cov12[+A <: AnyRef, B <: List[_]] { }
+ trait Cov13[+A <: AnyRef, -B <: List[_]] { }
+ trait Cov14[ A <: AnyRef, +B <: List[_]] { }
+ trait Cov15[ A <: AnyRef, B <: List[_]] { }
+ trait Cov16[ A <: AnyRef, -B <: List[_]] { }
+ trait Cov17[-A <: AnyRef, +B <: List[_]] { }
+ trait Cov18[-A <: AnyRef, B <: List[_]] { }
+ trait Cov19[-A <: AnyRef, -B <: List[_]] { }
+
+ trait Cov21[+A, +B] { }
+ trait Cov22[+A, B] { }
+ trait Cov23[+A, -B] { }
+ trait Cov24[ A, +B] { }
+ trait Cov25[ A, B] { }
+ trait Cov26[ A, -B] { }
+ trait Cov27[-A, +B] { }
+ trait Cov28[-A, B] { }
+ trait Cov29[-A, -B] { }
+
+ trait Cov31[+A, +B, C <: ((A, B))] { }
+ trait Cov32[+A, B, C <: ((A, B))] { }
+ trait Cov33[+A, -B, C <: ((A, _))] { }
+ trait Cov34[ A, +B, C <: ((A, B))] { }
+ trait Cov35[ A, B, C <: ((A, B))] { }
+ trait Cov36[ A, -B, C <: ((A, _))] { }
+ trait Cov37[-A, +B, C <: ((_, B))] { }
+ trait Cov38[-A, B, C <: ((_, B))] { }
+ trait Cov39[-A, -B, C <: ((_, _))] { }
+
+ trait Cov41[+A >: Null, +B] { }
+ trait Cov42[+A >: Null, B] { }
+ trait Cov43[+A >: Null, -B] { }
+ trait Cov44[ A >: Null, +B] { }
+ trait Cov45[ A >: Null, B] { }
+ trait Cov46[ A >: Null, -B] { }
+ trait Cov47[-A >: Null, +B] { }
+ trait Cov48[-A >: Null, B] { }
+ trait Cov49[-A >: Null, -B] { }
+
+ trait Covariant[+A <: AnyRef, +B] { }
+ trait CovariantLike[+A <: AnyRef, +B <: List[A], +This <: CovariantLike[A, B, This] with Covariant[A, B]]
+
+ trait Contra[-A >: AnyRef, -B] { }
+ trait ContraLike[-A >: AnyRef, -B >: List[A]]
+}
+ """
+
+ def check(source: String, unit: global.CompilationUnit) = {
+ getRequiredPackage("extest").moduleClass.info.decls.toList.filter(_.isType).map(_.initialize).sortBy(_.name.toString) foreach { clazz =>
+ afterTyper {
+ clazz.info
+ println(clazz.defString)
+ println(" " + classExistentialType(clazz) + "\n")
+ }
+ }
+ true
+ }
+}
diff --git a/test/files/run/existentials3-new.check b/test/files/run/existentials3-new.check
new file mode 100644
index 0000000..8f7dd70
--- /dev/null
+++ b/test/files/run/existentials3-new.check
@@ -0,0 +1,24 @@
+Bar.type, t=TypeRef, s=type Bar.type
+Bar, t=TypeRef, s=type Bar
+Test.ToS, t=RefinedType, s=f3
+Test.ToS, t=RefinedType, s=f4
+Test.ToS, t=RefinedType, s=f5
+() => Test.ToS, t=TypeRef, s=trait Function0
+() => Test.ToS, t=TypeRef, s=trait Function0
+$anon, t=TypeRef, s=type $anon
+$anon, t=TypeRef, s=type $anon
+List[AnyRef{type T1}#T1], t=TypeRef, s=class List
+List[Seq[Int]], t=TypeRef, s=class List
+List[Seq[U forSome { type U <: Int }]], t=TypeRef, s=class List
+Bar.type, t=TypeRef, s=type Bar.type
+Bar, t=TypeRef, s=type Bar
+Test.ToS, t=RefinedType, s=g3
+Test.ToS, t=RefinedType, s=g4
+Test.ToS, t=RefinedType, s=g5
+() => Test.ToS, t=TypeRef, s=trait Function0
+() => Test.ToS, t=TypeRef, s=trait Function0
+$anon, t=TypeRef, s=type $anon
+$anon, t=TypeRef, s=type $anon
+List[AnyRef{type T1}#T1], t=TypeRef, s=class List
+List[Seq[Int]], t=TypeRef, s=class List
+List[Seq[U forSome { type U <: Int }]], t=TypeRef, s=class List
diff --git a/test/files/run/existentials3-new.scala b/test/files/run/existentials3-new.scala
new file mode 100644
index 0000000..110c8ef
--- /dev/null
+++ b/test/files/run/existentials3-new.scala
@@ -0,0 +1,80 @@
+import scala.reflect.runtime.universe._
+
+object Test {
+ trait ToS { final override def toString = getClass.getName }
+
+ def f1 = { case class Bar() extends ToS; Bar }
+ def f2 = { case class Bar() extends ToS; Bar() }
+ def f3 = { class Bar() extends ToS; object Bar extends ToS; Bar }
+ def f4 = { class Bar() extends ToS; new Bar() }
+ def f5 = { object Bar extends ToS; Bar }
+ def f6 = { () => { object Bar extends ToS ; Bar } }
+ def f7 = { val f = { () => { object Bar extends ToS ; Bar } } ; f }
+
+ def f8 = { trait A ; trait B extends A ; class C extends B with ToS; new C { } }
+ def f9 = { trait A ; trait B ; class C extends B with A with ToS; new C { } }
+
+ def f10 = { class A { type T1 } ; List[A#T1]() }
+ def f11 = { abstract class A extends Seq[Int] ; List[A]() }
+ def f12 = { abstract class A extends Seq[U forSome { type U <: Int }] ; List[A]() }
+
+ val g1 = { case class Bar() extends ToS; Bar }
+ val g2 = { case class Bar() extends ToS; Bar() }
+ val g3 = { class Bar() extends ToS; object Bar extends ToS; Bar }
+ val g4 = { class Bar() extends ToS; new Bar() }
+ val g5 = { object Bar extends ToS; Bar }
+ val g6 = { () => { object Bar extends ToS ; Bar } }
+ val g7 = { val f = { () => { object Bar extends ToS ; Bar } } ; f }
+
+ val g8 = { trait A ; trait B extends A ; class C extends B with ToS; new C { } }
+ val g9 = { trait A ; trait B ; class C extends B with A with ToS; new C { } }
+
+ val g10 = { class A { type T1 } ; List[A#T1]() }
+ val g11 = { abstract class A extends Seq[Int] ; List[A]() }
+ val g12 = { abstract class A extends Seq[U forSome { type U <: Int }] ; List[A]() }
+
+ def printTpe(t: Type) = {
+ val s = if (t.typeSymbol.isFreeType) t.typeSymbol.typeSignature.toString else t.typeSymbol.toString
+ println("%s, t=%s, s=%s".format(t, t.asInstanceOf[Product].productPrefix, s))
+ }
+ def m[T: TypeTag](x: T) = printTpe(typeOf[T])
+ def m2[T: WeakTypeTag](x: T) = printTpe(implicitly[WeakTypeTag[T]].tpe)
+
+ // tags do work for f10/g10
+ def main(args: Array[String]): Unit = {
+ m2(f1)
+ m2(f2)
+ m(f3)
+ m(f4)
+ m(f5)
+ m(f6)
+ m(f7)
+ m2(f8)
+ m2(f9)
+ m2(f10)
+ m(f11)
+ m(f12)
+ m2(g1)
+ m2(g2)
+ m(g3)
+ m(g4)
+ m(g5)
+ m(g6)
+ m(g7)
+ m2(g8)
+ m2(g9)
+ m2(g10)
+ m(g11)
+ m(g12)
+ }
+}
+
+object Misc {
+ trait Bippy { def bippy = "I'm Bippy!" }
+ object o1 {
+ def f1 = { trait A extends Seq[U forSome { type U <: Bippy }] ; abstract class B extends A ; trait C extends B ; (null: C) }
+ def f2 = f1.head.bippy
+ }
+ def g1 = o1.f1 _
+ def g2 = o1.f2 _
+}
\ No newline at end of file
diff --git a/test/files/run/existentials3-old.check b/test/files/run/existentials3-old.check
new file mode 100644
index 0000000..36a458d
--- /dev/null
+++ b/test/files/run/existentials3-old.check
@@ -0,0 +1,22 @@
+_ <: scala.runtime.AbstractFunction0[_ <: Object with Test$ToS with scala.Product with scala.Serializable] with scala.Serializable with java.lang.Object
+_ <: Object with Test$ToS with scala.Product with scala.Serializable
+Object with Test$ToS
+Object with Test$ToS
+Object with Test$ToS
+scala.Function0[Object with Test$ToS]
+scala.Function0[Object with Test$ToS]
+_ <: Object with _ <: Object with Object with Test$ToS
+_ <: Object with _ <: Object with _ <: Object with Test$ToS
+scala.collection.immutable.List[Object with scala.collection.Seq[Int]]
+scala.collection.immutable.List[Object with scala.collection.Seq[_ <: Int]]
+_ <: scala.runtime.AbstractFunction0[_ <: Object with Test$ToS with scala.Product with scala.Serializable] with scala.Serializable with java.lang.Object
+_ <: Object with Test$ToS with scala.Product with scala.Serializable
+Object with Test$ToS
+Object with Test$ToS
+Object with Test$ToS
+scala.Function0[Object with Test$ToS]
+scala.Function0[Object with Test$ToS]
+_ <: Object with _ <: Object with Object with Test$ToS
+_ <: Object with _ <: Object with _ <: Object with Test$ToS
+scala.collection.immutable.List[Object with scala.collection.Seq[Int]]
+scala.collection.immutable.List[Object with scala.collection.Seq[_ <: Int]]
diff --git a/test/files/run/existentials3-old.scala b/test/files/run/existentials3-old.scala
new file mode 100644
index 0000000..944160f
--- /dev/null
+++ b/test/files/run/existentials3-old.scala
@@ -0,0 +1,73 @@
+object Test {
+ trait ToS { final override def toString = getClass.getName }
+
+ def f1 = { case class Bar() extends ToS; Bar }
+ def f2 = { case class Bar() extends ToS; Bar() }
+ def f3 = { class Bar() extends ToS; object Bar extends ToS; Bar }
+ def f4 = { class Bar() extends ToS; new Bar() }
+ def f5 = { object Bar extends ToS; Bar }
+ def f6 = { () => { object Bar extends ToS ; Bar } }
+ def f7 = { val f = { () => { object Bar extends ToS ; Bar } } ; f }
+
+ def f8 = { trait A ; trait B extends A ; class C extends B with ToS; new C { } }
+ def f9 = { trait A ; trait B ; class C extends B with A with ToS; new C { } }
+
+ def f10 = { class A { type T1 } ; List[A#T1]() }
+ def f11 = { abstract class A extends Seq[Int] ; List[A]() }
+ def f12 = { abstract class A extends Seq[U forSome { type U <: Int }] ; List[A]() }
+
+ val g1 = { case class Bar() extends ToS; Bar }
+ val g2 = { case class Bar() extends ToS; Bar() }
+ val g3 = { class Bar() extends ToS; object Bar extends ToS; Bar }
+ val g4 = { class Bar() extends ToS; new Bar() }
+ val g5 = { object Bar extends ToS; Bar }
+ val g6 = { () => { object Bar extends ToS ; Bar } }
+ val g7 = { val f = { () => { object Bar extends ToS ; Bar } } ; f }
+
+ val g8 = { trait A ; trait B extends A ; class C extends B with ToS; new C { } }
+ val g9 = { trait A ; trait B ; class C extends B with A with ToS; new C { } }
+
+ val g10 = { class A { type T1 } ; List[A#T1]() }
+ val g11 = { abstract class A extends Seq[Int] ; List[A]() }
+ val g12 = { abstract class A extends Seq[U forSome { type U <: Int }] ; List[A]() }
+
+ def m[T: Manifest](x: T) = println(manifest[T])
+
+ // manifests don't work for f10/g10
+ def main(args: Array[String]): Unit = {
+ m(f1)
+ m(f2)
+ m(f3)
+ m(f4)
+ m(f5)
+ m(f6)
+ m(f7)
+ m(f8)
+ m(f9)
+ // m(f10)
+ m(f11)
+ m(f12)
+ m(g1)
+ m(g2)
+ m(g3)
+ m(g4)
+ m(g5)
+ m(g6)
+ m(g7)
+ m(g8)
+ m(g9)
+ // m(g10)
+ m(g11)
+ m(g12)
+ }
+}
+
+object Misc {
+ trait Bippy { def bippy = "I'm Bippy!" }
+ object o1 {
+ def f1 = { trait A extends Seq[U forSome { type U <: Bippy }] ; abstract class B extends A ; trait C extends B ; (null: C) }
+ def f2 = f1.head.bippy
+ }
+ def g1 = o1.f1 _
+ def g2 = o1.f2 _
+}
diff --git a/test/files/run/exprs_serialize.check b/test/files/run/exprs_serialize.check
new file mode 100644
index 0000000..20ad6c1
--- /dev/null
+++ b/test/files/run/exprs_serialize.check
@@ -0,0 +1,2 @@
+java.io.NotSerializableException: Test$$treecreator1$1
+java.io.NotSerializableException: Test$$treecreator2$1
diff --git a/test/files/run/exprs_serialize.scala b/test/files/run/exprs_serialize.scala
new file mode 100644
index 0000000..c4310b0
--- /dev/null
+++ b/test/files/run/exprs_serialize.scala
@@ -0,0 +1,29 @@
+import java.io._
+import scala.reflect.runtime.universe._
+import scala.reflect.runtime.{universe => ru}
+import scala.reflect.runtime.{currentMirror => cm}
+
+object Test extends App {
+ def test(expr: Expr[_]) =
+ try {
+ val fout = new ByteArrayOutputStream()
+ val out = new ObjectOutputStream(fout)
+ out.writeObject(expr)
+ out.close()
+ fout.close()
+
+ val fin = new ByteArrayInputStream(fout.toByteArray)
+ val in = new ObjectInputStream(fin)
+ val reexpr = in.readObject().asInstanceOf[ru.Expr[_]].in(cm)
+ in.close()
+ fin.close()
+
+ println(reexpr)
+ } catch {
+ case ex: Exception =>
+ println(ex)
+ }
+
+ test(reify(2))
+ test(reify{def foo = "hello"; foo + "world!"})
+}
\ No newline at end of file
diff --git a/test/files/run/fail-non-value-types.check b/test/files/run/fail-non-value-types.check
new file mode 100644
index 0000000..714dce2
--- /dev/null
+++ b/test/files/run/fail-non-value-types.check
@@ -0,0 +1,3 @@
+[B, That](f: A => B)(implicit cbf: ImaginaryCanBuildFrom[CompletelyIndependentList.this.Repr,B,That])That
+[B, That](f: Int => B)(implicit cbf: ImaginaryCanBuildFrom[CompletelyIndependentList[Int]#Repr,B,That])That
+()CompletelyIndependentList[A]
diff --git a/test/files/run/fail-non-value-types.scala b/test/files/run/fail-non-value-types.scala
new file mode 100644
index 0000000..51198a5
--- /dev/null
+++ b/test/files/run/fail-non-value-types.scala
@@ -0,0 +1,40 @@
+import scala.reflect.runtime.universe._
+
+class ImaginaryCanBuildFrom[-From, -Elem, +To]
+class CompletelyIndependentList[+A] {
+ type Repr <: CompletelyIndependentList[A]
+ def map[B, That](f: A => B)(implicit cbf: ImaginaryCanBuildFrom[Repr, B, That]): That = ???
+ def distinct(): CompletelyIndependentList[A] = ???
+}
+
+object Test {
+ var failed = false
+ def expectFailure[T](body: => T): Boolean = {
+ try { val res = body ; failed = true ; println(res + " failed to fail.") ; false }
+ catch { case _: AssertionError => true }
+ }
+
+ /** Attempt to use a method type as a type argument - expect failure. */
+ def tcon[T: TypeTag](args: Type*) = appliedType(typeOf[T].typeConstructor, args.toList)
+
+ def cil = typeOf[CompletelyIndependentList[Int]]
+ def map = cil.member("map": TermName).asMethod
+ def distinct = cil.member("distinct": TermName).asMethod
+
+ def main(args: Array[String]): Unit = {
+ // Need the assert in there to fail.
+ // expectFailure(println(tcon[CompletelyIndependentList[Int]](map)))
+ // expectFailure(tcon[CompletelyIndependentList[Int]](distinct))
+
+ // Why is the first map signature printing showing an
+ // uninitialized symbol?
+ //
+ // [B <: <?>, That <: <?>](f: <?>)(implicit cbf: <?>)That
+ //
+
+ println(map.typeSignature)
+ println(map.typeSignatureIn(cil))
+ println(distinct.typeSignature)
+ if (failed) sys.exit(1)
+ }
+}
diff --git a/test/files/run/finalvar.check b/test/files/run/finalvar.check
new file mode 100644
index 0000000..2496293
--- /dev/null
+++ b/test/files/run/finalvar.check
@@ -0,0 +1,6 @@
+(2,2,2,2,1)
+(2,2,2,2)
+(2,2,2,2,1001)
+(2,2,2,2)
+2
+10
diff --git a/test/files/run/finalvar.flags b/test/files/run/finalvar.flags
new file mode 100644
index 0000000..aee3039
--- /dev/null
+++ b/test/files/run/finalvar.flags
@@ -0,0 +1 @@
+-Yoverride-vars -Yinline
\ No newline at end of file
diff --git a/test/files/run/finalvar.scala b/test/files/run/finalvar.scala
new file mode 100644
index 0000000..010813e
--- /dev/null
+++ b/test/files/run/finalvar.scala
@@ -0,0 +1,37 @@
+object Final {
+ class X(final var x: Int) { }
+ def f = new X(0).x += 1
+}
+
+class A {
+ var x = 1
+ def y0 = x
+ def y1 = this.x
+ def y2 = (this: A).x
+}
+
+class B extends A {
+ override def x = 2
+ def z = super.x
+}
+
+object Test {
+ def main(args: Array[String]): Unit = {
+ Final.f
+ val a = new B
+ println((a.x, a.y0, a.y1, a.y2, a.z))
+ val a0: A = a
+ println((a0.x, a0.y0, a0.y1, a0.y2))
+ a.x = 1001
+ println((a.x, a.y0, a.y1, a.y2, a.z))
+ println((a0.x, a0.y0, a0.y1, a0.y2))
+
+ val d = new D
+ println(d.w)
+ d.ten
+ println(d.w)
+ }
+}
+
+class C { var w = 1 ; def ten = this.w = 10 }
+class D extends C { override var w = 2 }
\ No newline at end of file
diff --git a/test/files/run/fors.scala b/test/files/run/fors.scala
index c7682f5..54afdc7 100644
--- a/test/files/run/fors.scala
+++ b/test/files/run/fors.scala
@@ -76,10 +76,10 @@ object Test extends App {
for {x <- it
if x % 2 == 0} print(x + " "); println
for (x <- it;
- val y = 2
+ y = 2
if x % y == 0) print(x + " "); println
for {x <- it
- val y = 2
+ y = 2
if x % y == 0} print(x + " "); println
// arrays
diff --git a/test/files/run/freetypes_false_alarm1.check b/test/files/run/freetypes_false_alarm1.check
new file mode 100644
index 0000000..085b3ee
--- /dev/null
+++ b/test/files/run/freetypes_false_alarm1.check
@@ -0,0 +1 @@
+scala.List[Int]
diff --git a/test/files/run/freetypes_false_alarm1.scala b/test/files/run/freetypes_false_alarm1.scala
new file mode 100644
index 0000000..8d6797f
--- /dev/null
+++ b/test/files/run/freetypes_false_alarm1.scala
@@ -0,0 +1,10 @@
+import scala.reflect.runtime.universe._
+import scala.tools.reflect.Eval
+
+object Test extends App {
+ reify {
+ val ru = scala.reflect.runtime.universe
+ val tpe: ru.Type = ru.typeOf[List[Int]]
+ println(tpe)
+ }.eval
+}
\ No newline at end of file
diff --git a/test/files/run/freetypes_false_alarm2.check b/test/files/run/freetypes_false_alarm2.check
new file mode 100644
index 0000000..02e4a84
--- /dev/null
+++ b/test/files/run/freetypes_false_alarm2.check
@@ -0,0 +1 @@
+false
\ No newline at end of file
diff --git a/test/files/run/freetypes_false_alarm2.scala b/test/files/run/freetypes_false_alarm2.scala
new file mode 100644
index 0000000..3499f13
--- /dev/null
+++ b/test/files/run/freetypes_false_alarm2.scala
@@ -0,0 +1,8 @@
+import scala.reflect.runtime.universe._
+import scala.reflect.runtime.{universe => ru}
+import scala.tools.reflect.Eval
+
+object Test extends App {
+ val tpe = typeOf[ru.Type]
+ println(tpe.typeSymbol.isFreeType)
+}
\ No newline at end of file
diff --git a/test/files/run/future-flatmap-exec-count.check b/test/files/run/future-flatmap-exec-count.check
new file mode 100644
index 0000000..dd9dce6
--- /dev/null
+++ b/test/files/run/future-flatmap-exec-count.check
@@ -0,0 +1,6 @@
+mapping
+execute()
+flatmapping
+execute()
+recovering
+execute()
diff --git a/test/files/run/future-flatmap-exec-count.scala b/test/files/run/future-flatmap-exec-count.scala
new file mode 100644
index 0000000..86c37be
--- /dev/null
+++ b/test/files/run/future-flatmap-exec-count.scala
@@ -0,0 +1,61 @@
+import scala.concurrent._
+import java.util.concurrent.atomic.AtomicInteger
+
+object Test {
+ def main(args: Array[String]) {
+ test()
+ }
+
+ def test() = {
+ def await(f: Future[Any]) =
+ Await.result(f, duration.Duration.Inf)
+
+ val ec = new TestExecutionContext(ExecutionContext.Implicits.global)
+
+ {
+ val p = Promise[Int]()
+ val fp = p.future
+ println("mapping")
+ val mapped = fp.map(x => x)(ec)
+ p.success(0)
+ await(mapped)
+ }
+
+ {
+ println("flatmapping")
+ val p = Promise[Int]()
+ val fp = p.future
+ val flatMapped = fp.flatMap({ (x: Int) =>
+ Future.successful(2 * x)
+ })(ec)
+ p.success(0)
+ await(flatMapped)
+ }
+
+ {
+ println("recovering")
+ val recovered = Future.failed(new Throwable()).recoverWith {
+ case _ => Future.successful(2)
+ }(ec)
+ await(recovered)
+ }
+ }
+
+ class TestExecutionContext(delegate: ExecutionContext) extends ExecutionContext {
+ def execute(runnable: Runnable): Unit = ???
+
+ def reportFailure(t: Throwable): Unit = ???
+
+ override def prepare(): ExecutionContext = {
+ val preparedDelegate = delegate.prepare()
+ return new ExecutionContext {
+ def execute(runnable: Runnable): Unit = {
+ println("execute()")
+ preparedDelegate.execute(runnable)
+ }
+
+ def reportFailure(t: Throwable): Unit = ???
+ }
+ }
+ }
+}
diff --git a/test/files/run/genericValueClass.check b/test/files/run/genericValueClass.check
new file mode 100644
index 0000000..ec3a41a
--- /dev/null
+++ b/test/files/run/genericValueClass.check
@@ -0,0 +1,2 @@
+(1,abc)
+(2,def)
diff --git a/test/files/run/genericValueClass.scala b/test/files/run/genericValueClass.scala
new file mode 100644
index 0000000..68162bb
--- /dev/null
+++ b/test/files/run/genericValueClass.scala
@@ -0,0 +1,17 @@
+final class ArrowAssoc[A](val __leftOfArrow: A) extends AnyVal {
+ @inline def -> [B](y: B): Tuple2[A, B] = Tuple2(__leftOfArrow, y)
+ def →[B](y: B): Tuple2[A, B] = ->(y)
+}
+
+object Test extends App {
+ {
+ @inline implicit def any2ArrowAssoc[A](x: A): ArrowAssoc[A] = new ArrowAssoc(x)
+ val x = 1 -> "abc"
+ println(x)
+ }
+
+ {
+ val y = 2 -> "def"
+ println(y)
+ }
+}
diff --git a/test/files/run/getClassTest.check b/test/files/run/getClassTest-new.check
similarity index 100%
copy from test/files/run/getClassTest.check
copy to test/files/run/getClassTest-new.check
diff --git a/test/files/run/getClassTest-new.scala b/test/files/run/getClassTest-new.scala
new file mode 100644
index 0000000..7d8ec93
--- /dev/null
+++ b/test/files/run/getClassTest-new.scala
@@ -0,0 +1,68 @@
+import scala.reflect.{ClassTag, classTag}
+
+class AnyVals {
+ def f1 = (5: Any).getClass
+ def f2 = (5: AnyVal).getClass
+ def f3 = 5.getClass
+ def f4 = (5: java.lang.Integer).getClass
+ def f5 = (5.asInstanceOf[AnyRef]).getClass
+
+ // scalap says:
+ //
+ // def f1 : java.lang.Class[?0] forSome {type ?0} = { /* compiled code */ }
+ // def f2 : java.lang.Class[?0] forSome {type ?0} = { /* compiled code */ }
+ // def f3 : java.lang.Class[scala.Int] = { /* compiled code */ }
+ // def f4 : java.lang.Class[?0] forSome {type ?0 <: java.lang.Integer} = { /* compiled code */ }
+ // def f5 : java.lang.Class[?0] forSome {type ?0 <: scala.AnyRef} = { /* compiled code */ }
+ //
+ // java generic signature says:
+ //
+ // f1: java.lang.Class<?>
+ // f2: java.lang.Class<?>
+ // f3: java.lang.Class<java.lang.Object>
+ // f4: java.lang.Class<? extends java.lang.Integer>
+ // f5: java.lang.Class<?>
+}
+
+class AnyRefs {
+ class A
+ class B extends A
+
+ def f1 = (new B: Any).getClass().newInstance()
+ def f2 = (new B: AnyRef).getClass().newInstance()
+ def f3 = (new B: A).getClass().newInstance()
+ def f4 = (new B: B).getClass().newInstance()
+
+ def f0[T >: B] = (new B: T).getClass().newInstance()
+
+ def f5 = f0[Any]
+ def f6 = f0[AnyRef]
+ def f7 = f0[A]
+ def f8 = f0[B]
+}
+
+class MoreAnyRefs {
+ trait A
+ trait B
+
+ // don't leak anon/refinements
+ def f1 = (new A with B { }).getClass()
+ def f2 = (new B with A { }).getClass()
+ def f3 = (new { def bippy() = 5 }).getClass()
+ def f4 = (new A { def bippy() = 5 }).getClass()
+}
+
+object Test {
+ def returnTypes[T: ClassTag] = (
+ classTag[T].runtimeClass.getMethods.toList
+ filter (_.getName startsWith "f")
+ sortBy (_.getName)
+ map (m => m.getName + ": " + m.getGenericReturnType.toString)
+ )
+
+ def main(args: Array[String]): Unit = {
+ returnTypes[AnyVals] foreach println
+ returnTypes[AnyRefs] foreach println
+ returnTypes[MoreAnyRefs] foreach println
+ }
+}
\ No newline at end of file
diff --git a/test/files/run/getClassTest.check b/test/files/run/getClassTest-old.check
similarity index 100%
rename from test/files/run/getClassTest.check
rename to test/files/run/getClassTest-old.check
diff --git a/test/files/run/getClassTest-old.scala b/test/files/run/getClassTest-old.scala
new file mode 100644
index 0000000..951cc8d
--- /dev/null
+++ b/test/files/run/getClassTest-old.scala
@@ -0,0 +1,66 @@
+class AnyVals {
+ def f1 = (5: Any).getClass
+ def f2 = (5: AnyVal).getClass
+ def f3 = 5.getClass
+ def f4 = (5: java.lang.Integer).getClass
+ def f5 = (5.asInstanceOf[AnyRef]).getClass
+
+ // scalap says:
+ //
+ // def f1 : java.lang.Class[?0] forSome {type ?0} = { /* compiled code */ }
+ // def f2 : java.lang.Class[?0] forSome {type ?0} = { /* compiled code */ }
+ // def f3 : java.lang.Class[scala.Int] = { /* compiled code */ }
+ // def f4 : java.lang.Class[?0] forSome {type ?0 <: java.lang.Integer} = { /* compiled code */ }
+ // def f5 : java.lang.Class[?0] forSome {type ?0 <: scala.AnyRef} = { /* compiled code */ }
+ //
+ // java generic signature says:
+ //
+ // f1: java.lang.Class<?>
+ // f2: java.lang.Class<?>
+ // f3: java.lang.Class<java.lang.Object>
+ // f4: java.lang.Class<? extends java.lang.Integer>
+ // f5: java.lang.Class<?>
+}
+
+class AnyRefs {
+ class A
+ class B extends A
+
+ def f1 = (new B: Any).getClass().newInstance()
+ def f2 = (new B: AnyRef).getClass().newInstance()
+ def f3 = (new B: A).getClass().newInstance()
+ def f4 = (new B: B).getClass().newInstance()
+
+ def f0[T >: B] = (new B: T).getClass().newInstance()
+
+ def f5 = f0[Any]
+ def f6 = f0[AnyRef]
+ def f7 = f0[A]
+ def f8 = f0[B]
+}
+
+class MoreAnyRefs {
+ trait A
+ trait B
+
+ // don't leak anon/refinements
+ def f1 = (new A with B { }).getClass()
+ def f2 = (new B with A { }).getClass()
+ def f3 = (new { def bippy() = 5 }).getClass()
+ def f4 = (new A { def bippy() = 5 }).getClass()
+}
+
+object Test {
+ def returnTypes[T: Manifest] = (
+ manifest[T].erasure.getMethods.toList
+ filter (_.getName startsWith "f")
+ sortBy (_.getName)
+ map (m => m.getName + ": " + m.getGenericReturnType.toString)
+ )
+
+ def main(args: Array[String]): Unit = {
+ returnTypes[AnyVals] foreach println
+ returnTypes[AnyRefs] foreach println
+ returnTypes[MoreAnyRefs] foreach println
+ }
+}
diff --git a/test/files/run/getClassTest-valueClass.check b/test/files/run/getClassTest-valueClass.check
new file mode 100644
index 0000000..7608d92
--- /dev/null
+++ b/test/files/run/getClassTest-valueClass.check
@@ -0,0 +1,2 @@
+int
+class V
diff --git a/test/files/run/getClassTest-valueClass.scala b/test/files/run/getClassTest-valueClass.scala
new file mode 100644
index 0000000..05a116d
--- /dev/null
+++ b/test/files/run/getClassTest-valueClass.scala
@@ -0,0 +1,10 @@
+class V(val x: Int) extends AnyVal
+
+object Test {
+ def main(args: Array[String]) = {
+ val v = new V(2)
+ val s: Any = 2
+ println(2.getClass)
+ println(v.getClass)
+ }
+}
diff --git a/test/files/run/getClassTest.scala b/test/files/run/getClassTest.scala
deleted file mode 100644
index 2485cd2..0000000
--- a/test/files/run/getClassTest.scala
+++ /dev/null
@@ -1,66 +0,0 @@
-class AnyVals {
- def f1 = (5: Any).getClass
- def f2 = (5: AnyVal).getClass
- def f3 = 5.getClass
- def f4 = (5: java.lang.Integer).getClass
- def f5 = (5.asInstanceOf[AnyRef]).getClass
-
- // scalap says:
- //
- // def f1 : java.lang.Class[?0] forSome {type ?0} = { /* compiled code */ }
- // def f2 : java.lang.Class[?0] forSome {type ?0} = { /* compiled code */ }
- // def f3 : java.lang.Class[scala.Int] = { /* compiled code */ }
- // def f4 : java.lang.Class[?0] forSome {type ?0 <: java.lang.Integer} = { /* compiled code */ }
- // def f5 : java.lang.Class[?0] forSome {type ?0 <: scala.AnyRef} = { /* compiled code */ }
- //
- // java generic signature says:
- //
- // f1: java.lang.Class<?>
- // f2: java.lang.Class<?>
- // f3: java.lang.Class<java.lang.Object>
- // f4: java.lang.Class<? extends java.lang.Integer>
- // f5: java.lang.Class<?>
-}
-
-class AnyRefs {
- class A
- class B extends A
-
- def f1 = (new B: Any).getClass().newInstance()
- def f2 = (new B: AnyRef).getClass().newInstance()
- def f3 = (new B: A).getClass().newInstance()
- def f4 = (new B: B).getClass().newInstance()
-
- def f0[T >: B] = (new B: T).getClass().newInstance()
-
- def f5 = f0[Any]
- def f6 = f0[AnyRef]
- def f7 = f0[A]
- def f8 = f0[B]
-}
-
-class MoreAnyRefs {
- trait A
- trait B
-
- // don't leak anon/refinements
- def f1 = (new A with B { }).getClass()
- def f2 = (new B with A { }).getClass()
- def f3 = (new { def bippy() = 5 }).getClass()
- def f4 = (new A { def bippy() = 5 }).getClass()
-}
-
-object Test {
- def returnTypes[T: Manifest] = (
- manifest[T].erasure.getMethods.toList
- filter (_.getName startsWith "f")
- sortBy (_.getName)
- map (m => m.getName + ": " + m.getGenericReturnType.toString)
- )
-
- def main(args: Array[String]): Unit = {
- returnTypes[AnyVals] foreach println
- returnTypes[AnyRefs] foreach println
- returnTypes[MoreAnyRefs] foreach println
- }
-}
diff --git a/test/files/run/global-showdef.check b/test/files/run/global-showdef.check
index 36d33f6..4c2fd41 100644
--- a/test/files/run/global-showdef.check
+++ b/test/files/run/global-showdef.check
@@ -9,6 +9,6 @@
<<-- class foo.bar.Bippy.Boppity.Boo after phase 'typer' -->>
def showdefTestMemberClass3: Int
<<-- object foo.bar.Bippy after phase 'typer' -->>
- def showdefTestMemberObject2: java.lang.String
+ def showdefTestMemberObject2: String
<<-- object foo.bar.Bippy.Boppity.Boo after phase 'typer' -->>
- def showdefTestMemberObject1: java.lang.String
+ def showdefTestMemberObject1: String
diff --git a/test/files/run/hashhash.scala b/test/files/run/hashhash.scala
index b9cec99..f9fc067 100644
--- a/test/files/run/hashhash.scala
+++ b/test/files/run/hashhash.scala
@@ -6,5 +6,18 @@ object Test {
/** Just a little sanity check, not to be confused with a unit test. */
List(5, 5.5f, "abc", new AnyRef, ()) foreach confirmSame
List(5.0f, 1.0d, -(5.0f), (-1.0d)) foreach confirmDifferent
+
+ val x = (BigInt(1) << 64).toDouble
+ val y: Any = x
+ val f: Float = x.toFloat
+ val jn: java.lang.Number = x
+ val jf: java.lang.Float = x.toFloat
+ val jd: java.lang.Double = x
+
+ assert(x.## == y.##, ((x, y)))
+ assert(x.## == f.##, ((x, f)))
+ assert(x.## == jn.##, ((x, jn)))
+ assert(x.## == jf.##, ((x, jf)))
+ assert(x.## == jd.##, ((x, jd)))
}
}
diff --git a/test/files/run/idempotency-case-classes.check b/test/files/run/idempotency-case-classes.check
new file mode 100644
index 0000000..e045388
--- /dev/null
+++ b/test/files/run/idempotency-case-classes.check
@@ -0,0 +1,55 @@
+C(2,3)
+()
+{
+ case class C extends AnyRef with Product with Serializable {
+ <caseaccessor> <paramaccessor> private[this] val x: Int = _;
+ <stable> <caseaccessor> <accessor> <paramaccessor> def x: Int = C.this.x;
+ <caseaccessor> <paramaccessor> private[this] val y: Int = _;
+ <stable> <caseaccessor> <accessor> <paramaccessor> def y: Int = C.this.y;
+ def <init>(x: Int, y: Int): C = {
+ C.super.<init>();
+ ()
+ };
+ <synthetic> def copy(x: Int = x, y: Int = y): C = new C(x, y);
+ <synthetic> def copy$default$1: Int = C.this.x;
+ <synthetic> def copy$default$2: Int = C.this.y;
+ override <synthetic> def productPrefix: String = "C";
+ <synthetic> def productArity: Int = 2;
+ <synthetic> def productElement(x$1: Int): Any = x$1 match {
+ case 0 => C.this.x
+ case 1 => C.this.y
+ case _ => throw new IndexOutOfBoundsException(x$1.toString())
+ };
+ override <synthetic> def productIterator: Iterator[Any] = runtime.this.ScalaRunTime.typedProductIterator[Any](C.this);
+ <synthetic> def canEqual(x$1: Any): Boolean = x$1.$isInstanceOf[C]();
+ override <synthetic> def hashCode(): Int = {
+ <synthetic> var acc: Int = -889275714;
+ acc = Statics.this.mix(acc, x);
+ acc = Statics.this.mix(acc, y);
+ Statics.this.finalizeHash(acc, 2)
+ };
+ override <synthetic> def toString(): String = ScalaRunTime.this._toString(C.this);
+ override <synthetic> def equals(x$1: Any): Boolean = C.this.eq(x$1.asInstanceOf[Object]).||(x$1 match {
+ case (_: C) => true
+ case _ => false
+}.&&({
+ <synthetic> val C$1: C = x$1.asInstanceOf[C];
+ C.this.x.==(C$1.x).&&(C.this.y.==(C$1.y)).&&(C$1.canEqual(C.this))
+ }))
+ };
+ <synthetic> object C extends scala.runtime.AbstractFunction2[Int,Int,C] with Serializable {
+ def <init>(): C.type = {
+ C.super.<init>();
+ ()
+ };
+ final override <synthetic> def toString(): String = "C";
+ case <synthetic> def apply(x: Int, y: Int): C = new C(x, y);
+ case <synthetic> def unapply(x$0: C): Option[(Int, Int)] = if (x$0.==(null))
+ scala.this.None
+ else
+ Some.apply[(Int, Int)](Tuple2.apply[Int, Int](x$0.x, x$0.y));
+ <synthetic> private def readResolve(): Object = C
+ };
+ Predef.println(C.apply(2, 3))
+}
+error!
diff --git a/test/files/run/idempotency-case-classes.scala b/test/files/run/idempotency-case-classes.scala
new file mode 100644
index 0000000..4da8393
--- /dev/null
+++ b/test/files/run/idempotency-case-classes.scala
@@ -0,0 +1,22 @@
+import scala.reflect.runtime.universe._
+import scala.reflect.runtime.{currentMirror => cm}
+import scala.tools.reflect.{ToolBox, ToolBoxError}
+import scala.tools.reflect.Eval
+
+object Test extends App {
+ val casee = reify {
+ case class C(x: Int, y: Int)
+ println(C(2, 3))
+ }
+ println(casee.eval)
+ val tb = cm.mkToolBox()
+ val tcasee = tb.typeCheck(casee.tree)
+ println(tcasee)
+ val rtcasee = tb.resetAllAttrs(tcasee)
+ try {
+ println(tb.eval(rtcasee))
+ } catch {
+ // this is the current behaviour, rather than the desired behavior; see SI-5467
+ case _: ToolBoxError => println("error!")
+ }
+}
\ No newline at end of file
diff --git a/test/files/run/idempotency-extractors.check b/test/files/run/idempotency-extractors.check
new file mode 100644
index 0000000..fcd50fa
--- /dev/null
+++ b/test/files/run/idempotency-extractors.check
@@ -0,0 +1,5 @@
+2
+2 match {
+ case Test.this.Extractor.unapply(<unapply-selector>) <unapply> ((x @ _)) => x
+}
+error!
diff --git a/test/files/run/idempotency-extractors.scala b/test/files/run/idempotency-extractors.scala
new file mode 100644
index 0000000..fe03329
--- /dev/null
+++ b/test/files/run/idempotency-extractors.scala
@@ -0,0 +1,22 @@
+import scala.reflect.runtime.universe._
+import scala.reflect.runtime.{currentMirror => cm}
+import scala.tools.reflect.{ToolBox, ToolBoxError}
+import scala.tools.reflect.Eval
+
+object Test extends App {
+ object Extractor { def unapply(x: Int): Option[Int] = Some(x) }
+ val extractor = reify {
+ 2 match { case Extractor(x) => x }
+ }
+ println(extractor.eval)
+ val tb = cm.mkToolBox()
+ val textractor = tb.typeCheck(extractor.tree)
+ println(textractor)
+ val rtextractor = tb.resetAllAttrs(textractor)
+ try {
+ println(tb.eval(rtextractor))
+ } catch {
+ // this is the current behaviour, rather than the desired behavior; see SI-5465
+ case _: ToolBoxError => println("error!")
+ }
+}
\ No newline at end of file
diff --git a/test/files/run/idempotency-labels.check b/test/files/run/idempotency-labels.check
new file mode 100644
index 0000000..8709efe
--- /dev/null
+++ b/test/files/run/idempotency-labels.check
@@ -0,0 +1,15 @@
+2
+{
+ var x: Int = 0;
+ while$1(){
+ if (x.<(2))
+ {
+ x = x.+(1);
+ while$1()
+ }
+ else
+ ()
+ };
+ x
+}
+2
diff --git a/test/files/run/idempotency-labels.scala b/test/files/run/idempotency-labels.scala
new file mode 100644
index 0000000..82d0097
--- /dev/null
+++ b/test/files/run/idempotency-labels.scala
@@ -0,0 +1,22 @@
+import scala.reflect.runtime.universe._
+import scala.reflect.runtime.{currentMirror => cm}
+import scala.tools.reflect.{ToolBox, ToolBoxError}
+import scala.tools.reflect.Eval
+
+object Test extends App {
+ val label = reify {
+ var x = 0
+ while (x < 2) { x += 1 }
+ x
+ }
+ println(label.eval)
+ val tb = cm.mkToolBox()
+ val tlabel = tb.typeCheck(label.tree)
+ println(tlabel)
+ val rtlabel = tb.resetAllAttrs(tlabel)
+ try {
+ println(tb.eval(rtlabel))
+ } catch {
+ case _: ToolBoxError => println("error!")
+ }
+}
\ No newline at end of file
diff --git a/test/files/run/idempotency-lazy-vals.check b/test/files/run/idempotency-lazy-vals.check
new file mode 100644
index 0000000..15afa53
--- /dev/null
+++ b/test/files/run/idempotency-lazy-vals.check
@@ -0,0 +1,23 @@
+6
+{
+ class C extends AnyRef {
+ def <init>(): C = {
+ C.super.<init>();
+ ()
+ };
+ lazy private[this] val x: Int = _;
+ <stable> <accessor> lazy def x: Int = {
+ C.this.x = 2;
+ C.this.x
+ };
+ lazy private[this] val y: Int = _;
+ implicit <stable> <accessor> lazy def y: Int = {
+ C.this.y = 3;
+ C.this.y
+ }
+ };
+ val c: C = new C();
+ import c._;
+ c.x.*(Predef.implicitly[Int](c.y))
+}
+error!
diff --git a/test/files/run/idempotency-lazy-vals.scala b/test/files/run/idempotency-lazy-vals.scala
new file mode 100644
index 0000000..3531f9f
--- /dev/null
+++ b/test/files/run/idempotency-lazy-vals.scala
@@ -0,0 +1,27 @@
+import scala.reflect.runtime.universe._
+import scala.reflect.runtime.{currentMirror => cm}
+import scala.tools.reflect.{ToolBox, ToolBoxError}
+import scala.tools.reflect.Eval
+
+object Test extends App {
+ val lazee = reify {
+ class C {
+ lazy val x = 2
+ implicit lazy val y = 3
+ }
+ val c = new C()
+ import c._
+ x * implicitly[Int]
+ }
+ println(lazee.eval)
+ val tb = cm.mkToolBox()
+ val tlazee = tb.typeCheck(lazee.tree)
+ println(tlazee)
+ val rtlazee = tb.resetAllAttrs(tlazee)
+ try {
+ println(tb.eval(rtlazee))
+ } catch {
+ // this is the current behaviour, rather than the desired behavior; see SI-5466
+ case _: ToolBoxError => println("error!")
+ }
+}
\ No newline at end of file
diff --git a/test/files/run/idempotency-partial-functions.check b/test/files/run/idempotency-partial-functions.check
new file mode 100644
index 0000000..5c8a411
--- /dev/null
+++ b/test/files/run/idempotency-partial-functions.check
@@ -0,0 +1,2 @@
+error!!
+error!
diff --git a/test/files/run/idempotency-this.check b/test/files/run/idempotency-this.check
new file mode 100644
index 0000000..8faf703
--- /dev/null
+++ b/test/files/run/idempotency-this.check
@@ -0,0 +1,4 @@
+List()
+List.apply[String]("")
+Apply(TypeApply(Select(Ident(scala.collection.immutable.List), newTermName("apply")), List(TypeTree().setOriginal(Select(Ident(scala.Predef), newTypeName("String"))))), List(Literal(Constant(""))))
+List()
diff --git a/test/files/run/idempotency-this.scala b/test/files/run/idempotency-this.scala
new file mode 100644
index 0000000..5cd4226
--- /dev/null
+++ b/test/files/run/idempotency-this.scala
@@ -0,0 +1,22 @@
+import scala.reflect.runtime.universe._
+import scala.reflect.runtime.{currentMirror => cm}
+import scala.tools.reflect.{ToolBox, ToolBoxError}
+import scala.tools.reflect.Eval
+
+object Test extends App {
+ val thiss = reify {
+ List[String]("")
+ }
+ println(thiss.eval)
+ val tb = cm.mkToolBox()
+ val tthiss = tb.typeCheck(thiss.tree)
+ println(tthiss)
+ println(showRaw(tthiss))
+ val rtthiss = tb.resetAllAttrs(tthiss)
+ try {
+ println(tb.eval(rtthiss))
+ } catch {
+ // this is the current behaviour, rather than the desired behavior; see SI-5705
+ case _: ToolBoxError => println("error!")
+ }
+}
\ No newline at end of file
diff --git a/test/files/run/imain.check b/test/files/run/imain.check
new file mode 100644
index 0000000..76df308
--- /dev/null
+++ b/test/files/run/imain.check
@@ -0,0 +1 @@
+Some(246)
diff --git a/test/files/run/imain.scala b/test/files/run/imain.scala
new file mode 100644
index 0000000..c164fb5
--- /dev/null
+++ b/test/files/run/imain.scala
@@ -0,0 +1,17 @@
+object Test {
+ import scala.tools.nsc._
+ import interpreter._
+ import java.io.PrintWriter
+
+ class NullOutputStream extends OutputStream { def write(b: Int) { } }
+
+ def main(args: Array[String]) {
+ val settings = new Settings
+ settings.classpath.value = System.getProperty("java.class.path")
+
+ val intp = new IMain(settings, new PrintWriter(new NullOutputStream))
+ intp.interpret("def x0 = 123")
+ intp.interpret("val x1 = x0 * 2")
+ println(intp.valueOfTerm("x1"))
+ }
+}
diff --git a/test/files/run/impconvtimes-msil.check b/test/files/run/impconvtimes-msil.check
deleted file mode 100644
index 082377e..0000000
--- a/test/files/run/impconvtimes-msil.check
+++ /dev/null
@@ -1 +0,0 @@
-3.0 * Hour = Measure(3,Hour)
diff --git a/test/files/run/implicitclasses.scala b/test/files/run/implicitclasses.scala
new file mode 100644
index 0000000..886d4de
--- /dev/null
+++ b/test/files/run/implicitclasses.scala
@@ -0,0 +1,10 @@
+object Test extends App {
+
+ implicit class C(s: String) {
+ def nElems = s.length
+ }
+
+ assert("abc".nElems == 3)
+
+}
+
diff --git a/test/files/run/bug216.check b/test/files/run/indexedSeq-apply.check
similarity index 100%
rename from test/files/run/bug216.check
rename to test/files/run/indexedSeq-apply.check
diff --git a/test/files/run/indexedSeq-apply.scala b/test/files/run/indexedSeq-apply.scala
new file mode 100644
index 0000000..39d4db2
--- /dev/null
+++ b/test/files/run/indexedSeq-apply.scala
@@ -0,0 +1,15 @@
+object Test extends App {
+ val empty = IndexedSeq()
+ assert(empty.isEmpty)
+
+ val single = IndexedSeq(1)
+ assert(List(1) == single.toList)
+
+ val two = IndexedSeq("a", "b")
+ assert("a" == two.head)
+ assert("b" == two.apply(1))
+
+ println("OK")
+}
+
+// vim: set ts=2 sw=2 et:
diff --git a/test/files/run/infix-msil.check b/test/files/run/infix-msil.check
deleted file mode 100644
index b37cdcd..0000000
--- a/test/files/run/infix-msil.check
+++ /dev/null
@@ -1,2 +0,0 @@
-op(op(op(,0,0),1,1),2,2)
-OK
diff --git a/test/files/run/inline-ex-handlers.check b/test/files/run/inline-ex-handlers.check
new file mode 100644
index 0000000..50a9d87
--- /dev/null
+++ b/test/files/run/inline-ex-handlers.check
@@ -0,0 +1,490 @@
+--- a
++++ b
+@@ -171,5 +171,5 @@
+ def productElement(x$1: Int (INT)): Object {
+- locals: value x$1, value x1
++ locals: value x$1, value x1, variable boxed1
+ startBlock: 1
+- blocks: [1,2,3,4]
++ blocks: [1,3,4]
+
+@@ -186,2 +186,4 @@
+ 92 LOAD_LOCAL(value x$1)
++ 92 STORE_LOCAL(variable boxed1)
++ 92 LOAD_LOCAL(variable boxed1)
+ 92 BOX INT
+@@ -194,5 +196,2 @@
+ 92 CALL_METHOD MyException.message (dynamic)
+- 92 JUMP 2
+-
+- 2:
+ 92 RETURN(REF(class Object))
+@@ -246,3 +245,3 @@
+ startBlock: 1
+- blocks: [1,2,3,4,5,6,7,8,10,11,12,13,14,15,16,17,18]
++ blocks: [1,2,3,4,5,6,8,10,11,12,13,14,15,16,17,18]
+
+@@ -257,5 +256,2 @@
+ 92 SCOPE_ENTER value x1
+- 92 JUMP 7
+-
+- 7:
+ 92 LOAD_LOCAL(value x1)
+@@ -390,5 +386,5 @@
+ def main(args: Array[String] (ARRAY[REF(class String)])): Unit {
+- locals: value args, variable result, value ex6, value x4, value x5, value message, value x
++ locals: value args, variable result, value ex6, value x4, value x5, value x
+ startBlock: 1
+- blocks: [1,2,3,4,5,8,10,11,13]
++ blocks: [1,2,3,5,8,10,11,13,14]
+
+@@ -416,4 +412,13 @@
+ 103 CALL_METHOD MyException.<init> (static-instance)
+- 103 THROW(MyException)
++ ? STORE_LOCAL(value ex6)
++ ? JUMP 14
+
++ 14:
++ 101 LOAD_LOCAL(value ex6)
++ 101 STORE_LOCAL(value x4)
++ 101 SCOPE_ENTER value x4
++ 106 LOAD_LOCAL(value x4)
++ 106 IS_INSTANCE REF(class MyException)
++ 106 CZJUMP (BOOL)NE ? 5 : 8
++
+ 13:
+@@ -429,5 +434,2 @@
+ 101 SCOPE_ENTER value x4
+- 101 JUMP 4
+-
+- 4:
+ 106 LOAD_LOCAL(value x4)
+@@ -441,8 +443,5 @@
+ 106 SCOPE_ENTER value x5
+- 106 LOAD_LOCAL(value x5)
+- 106 CALL_METHOD MyException.message (dynamic)
+- 106 STORE_LOCAL(value message)
+- 106 SCOPE_ENTER value message
+ 106 LOAD_MODULE object Predef
+- 106 LOAD_LOCAL(value message)
++ ? LOAD_LOCAL(value x5)
++ 106 CALL_METHOD MyException.message (dynamic)
+ 106 CALL_METHOD scala.Predef.println (dynamic)
+@@ -518,3 +517,3 @@
+ startBlock: 1
+- blocks: [1,2,3,4,6,7,8,9,10]
++ blocks: [1,2,3,4,6,7,8,9,10,11,12,13]
+
+@@ -547,4 +546,9 @@
+ 306 CALL_METHOD MyException.<init> (static-instance)
+- 306 THROW(MyException)
++ ? JUMP 11
+
++ 11:
++ ? LOAD_LOCAL(variable monitor4)
++ 305 MONITOR_EXIT
++ ? JUMP 12
++
+ 9:
+@@ -553,3 +557,3 @@
+ 305 MONITOR_EXIT
+- ? THROW(Throwable)
++ ? JUMP 12
+
+@@ -559,4 +563,11 @@
+ 304 MONITOR_EXIT
+- ? THROW(Throwable)
++ ? STORE_LOCAL(value t)
++ ? JUMP 13
+
++ 12:
++ ? LOAD_LOCAL(variable monitor3)
++ 304 MONITOR_EXIT
++ ? STORE_LOCAL(value t)
++ ? JUMP 13
++
+ 3:
+@@ -575,2 +586,14 @@
+
++ 13:
++ 310 LOAD_MODULE object Predef
++ 310 CALL_PRIMITIVE(StartConcat)
++ 310 CONSTANT("Caught crash: ")
++ 310 CALL_PRIMITIVE(StringConcat(REF(class String)))
++ 310 LOAD_LOCAL(value t)
++ 310 CALL_METHOD java.lang.Throwable.toString (dynamic)
++ 310 CALL_PRIMITIVE(StringConcat(REF(class String)))
++ 310 CALL_PRIMITIVE(EndConcat)
++ 310 CALL_METHOD scala.Predef.println (dynamic)
++ 310 JUMP 2
++
+ 2:
+@@ -583,6 +606,6 @@
+ with finalizer: null
+- catch (Throwable) in ArrayBuffer(7, 8, 9, 10) starting at: 6
++ catch (Throwable) in ArrayBuffer(7, 8, 9, 10, 11) starting at: 6
+ consisting of blocks: List(6)
+ with finalizer: null
+- catch (Throwable) in ArrayBuffer(4, 6, 7, 8, 9, 10) starting at: 3
++ catch (Throwable) in ArrayBuffer(4, 6, 7, 8, 9, 10, 11, 12) starting at: 3
+ consisting of blocks: List(3)
+@@ -618,3 +641,3 @@
+ startBlock: 1
+- blocks: [1,2,3,4,5,6,7,9,10]
++ blocks: [1,2,3,4,5,6,7,9,10,11,12]
+
+@@ -642,4 +665,10 @@
+ 78 CALL_METHOD java.lang.IllegalArgumentException.<init> (static-instance)
+- 78 THROW(IllegalArgumentException)
++ ? STORE_LOCAL(value e)
++ ? JUMP 11
+
++ 11:
++ 81 LOAD_LOCAL(value e)
++ ? STORE_LOCAL(variable exc1)
++ ? JUMP 12
++
+ 9:
+@@ -671,3 +700,4 @@
+ 81 LOAD_LOCAL(value e)
+- 81 THROW(Exception)
++ ? STORE_LOCAL(variable exc1)
++ ? JUMP 12
+
+@@ -688,2 +718,15 @@
+
++ 12:
++ 83 LOAD_MODULE object Predef
++ 83 CONSTANT("finally")
++ 83 CALL_METHOD scala.Predef.println (dynamic)
++ 84 LOAD_LOCAL(variable result)
++ 84 CONSTANT(1)
++ 84 CALL_PRIMITIVE(Arithmetic(SUB,INT))
++ 84 CONSTANT(2)
++ 84 CALL_PRIMITIVE(Arithmetic(DIV,INT))
++ 84 STORE_LOCAL(variable result)
++ 84 LOAD_LOCAL(variable exc1)
++ 84 THROW(Throwable)
++
+ }
+@@ -693,3 +736,3 @@
+ with finalizer: null
+- catch (<none>) in ArrayBuffer(4, 6, 7, 9) starting at: 3
++ catch (<none>) in ArrayBuffer(4, 6, 7, 9, 11) starting at: 3
+ consisting of blocks: List(3)
+@@ -717,5 +760,5 @@
+ def main(args: Array[String] (ARRAY[REF(class String)])): Unit {
+- locals: value args, variable result, value ex6, variable exc2, value x4, value x5, value message, value x, value ex6, value x4, value x5, value message, value x
++ locals: value args, variable result, value ex6, variable exc2, value x4, value x5, value x, value ex6, value x4, value x5, value x
+ startBlock: 1
+- blocks: [1,2,3,4,5,6,9,11,14,15,16,19,21,22,24,25]
++ blocks: [1,2,3,4,5,6,9,11,14,15,16,19,21,22,24,25,26,27,28]
+
+@@ -743,4 +786,11 @@
+ 172 CALL_METHOD MyException.<init> (static-instance)
+- 172 THROW(MyException)
++ ? STORE_LOCAL(value ex6)
++ ? JUMP 26
+
++ 26:
++ 170 LOAD_LOCAL(value ex6)
++ 170 STORE_LOCAL(value x4)
++ 170 SCOPE_ENTER value x4
++ 170 JUMP 15
++
+ 24:
+@@ -786,8 +836,5 @@
+ 175 SCOPE_ENTER value x5
+- 175 LOAD_LOCAL(value x5)
+- 175 CALL_METHOD MyException.message (dynamic)
+- 175 STORE_LOCAL(value message)
+- 175 SCOPE_ENTER value message
+ 176 LOAD_MODULE object Predef
+- 176 LOAD_LOCAL(value message)
++ ? LOAD_LOCAL(value x5)
++ 176 CALL_METHOD MyException.message (dynamic)
+ 176 CALL_METHOD scala.Predef.println (dynamic)
+@@ -795,5 +842,7 @@
+ 177 DUP(REF(class MyException))
+- 177 LOAD_LOCAL(value message)
++ ? LOAD_LOCAL(value x5)
++ 177 CALL_METHOD MyException.message (dynamic)
+ 177 CALL_METHOD MyException.<init> (static-instance)
+- 177 THROW(MyException)
++ ? STORE_LOCAL(value ex6)
++ ? JUMP 27
+
+@@ -801,3 +850,4 @@
+ 170 LOAD_LOCAL(value ex6)
+- 170 THROW(Throwable)
++ ? STORE_LOCAL(value ex6)
++ ? JUMP 27
+
+@@ -811,2 +861,8 @@
+
++ 27:
++ 169 LOAD_LOCAL(value ex6)
++ 169 STORE_LOCAL(value x4)
++ 169 SCOPE_ENTER value x4
++ 169 JUMP 5
++
+ 5:
+@@ -821,8 +877,5 @@
+ 180 SCOPE_ENTER value x5
+- 180 LOAD_LOCAL(value x5)
+- 180 CALL_METHOD MyException.message (dynamic)
+- 180 STORE_LOCAL(value message)
+- 180 SCOPE_ENTER value message
+ 181 LOAD_MODULE object Predef
+- 181 LOAD_LOCAL(value message)
++ ? LOAD_LOCAL(value x5)
++ 181 CALL_METHOD MyException.message (dynamic)
+ 181 CALL_METHOD scala.Predef.println (dynamic)
+@@ -830,5 +883,7 @@
+ 182 DUP(REF(class MyException))
+- 182 LOAD_LOCAL(value message)
++ ? LOAD_LOCAL(value x5)
++ 182 CALL_METHOD MyException.message (dynamic)
+ 182 CALL_METHOD MyException.<init> (static-instance)
+- 182 THROW(MyException)
++ ? STORE_LOCAL(variable exc2)
++ ? JUMP 28
+
+@@ -836,3 +891,4 @@
+ 169 LOAD_LOCAL(value ex6)
+- 169 THROW(Throwable)
++ ? STORE_LOCAL(variable exc2)
++ ? JUMP 28
+
+@@ -853,2 +909,15 @@
+
++ 28:
++ 184 LOAD_MODULE object Predef
++ 184 CONSTANT("finally")
++ 184 CALL_METHOD scala.Predef.println (dynamic)
++ 185 LOAD_LOCAL(variable result)
++ 185 CONSTANT(1)
++ 185 CALL_PRIMITIVE(Arithmetic(SUB,INT))
++ 185 CONSTANT(2)
++ 185 CALL_PRIMITIVE(Arithmetic(DIV,INT))
++ 185 STORE_LOCAL(variable result)
++ 185 LOAD_LOCAL(variable exc2)
++ 185 THROW(Throwable)
++
+ }
+@@ -858,6 +927,6 @@
+ with finalizer: null
+- catch (Throwable) in ArrayBuffer(14, 15, 16, 19, 21, 22, 24) starting at: 4
++ catch (Throwable) in ArrayBuffer(14, 15, 16, 19, 21, 22, 24, 26) starting at: 4
+ consisting of blocks: List(9, 6, 5, 4)
+ with finalizer: null
+- catch (<none>) in ArrayBuffer(4, 5, 6, 9, 14, 15, 16, 19, 21, 22, 24) starting at: 3
++ catch (<none>) in ArrayBuffer(4, 5, 6, 9, 14, 15, 16, 19, 21, 22, 24, 26, 27) starting at: 3
+ consisting of blocks: List(3)
+@@ -885,5 +954,5 @@
+ def main(args: Array[String] (ARRAY[REF(class String)])): Unit {
+- locals: value args, variable result, value e, value ex6, value x4, value x5, value message, value x
++ locals: value args, variable result, value e, value ex6, value x4, value x5, value x
+ startBlock: 1
+- blocks: [1,2,3,6,7,8,11,13,14,16]
++ blocks: [1,2,3,6,7,8,11,13,14,16,17]
+
+@@ -911,4 +980,11 @@
+ 124 CALL_METHOD MyException.<init> (static-instance)
+- 124 THROW(MyException)
++ ? STORE_LOCAL(value ex6)
++ ? JUMP 17
+
++ 17:
++ 122 LOAD_LOCAL(value ex6)
++ 122 STORE_LOCAL(value x4)
++ 122 SCOPE_ENTER value x4
++ 122 JUMP 7
++
+ 16:
+@@ -936,8 +1012,5 @@
+ 127 SCOPE_ENTER value x5
+- 127 LOAD_LOCAL(value x5)
+- 127 CALL_METHOD MyException.message (dynamic)
+- 127 STORE_LOCAL(value message)
+- 127 SCOPE_ENTER value message
+ 127 LOAD_MODULE object Predef
+- 127 LOAD_LOCAL(value message)
++ ? LOAD_LOCAL(value x5)
++ 127 CALL_METHOD MyException.message (dynamic)
+ 127 CALL_METHOD scala.Predef.println (dynamic)
+@@ -970,3 +1043,3 @@
+ with finalizer: null
+- catch (IllegalArgumentException) in ArrayBuffer(6, 7, 8, 11, 13, 14, 16) starting at: 3
++ catch (IllegalArgumentException) in ArrayBuffer(6, 7, 8, 11, 13, 14, 16, 17) starting at: 3
+ consisting of blocks: List(3)
+@@ -994,5 +1067,5 @@
+ def main(args: Array[String] (ARRAY[REF(class String)])): Unit {
+- locals: value args, variable result, value ex6, value x4, value x5, value message, value x, value e
++ locals: value args, variable result, value ex6, value x4, value x5, value x, value e
+ startBlock: 1
+- blocks: [1,2,3,4,5,8,12,13,14,16]
++ blocks: [1,2,3,5,8,12,13,14,16,17]
+
+@@ -1020,4 +1093,13 @@
+ 148 CALL_METHOD MyException.<init> (static-instance)
+- 148 THROW(MyException)
++ ? STORE_LOCAL(value ex6)
++ ? JUMP 17
+
++ 17:
++ 145 LOAD_LOCAL(value ex6)
++ 145 STORE_LOCAL(value x4)
++ 145 SCOPE_ENTER value x4
++ 154 LOAD_LOCAL(value x4)
++ 154 IS_INSTANCE REF(class MyException)
++ 154 CZJUMP (BOOL)NE ? 5 : 8
++
+ 16:
+@@ -1041,5 +1123,2 @@
+ 145 SCOPE_ENTER value x4
+- 145 JUMP 4
+-
+- 4:
+ 154 LOAD_LOCAL(value x4)
+@@ -1053,8 +1132,5 @@
+ 154 SCOPE_ENTER value x5
+- 154 LOAD_LOCAL(value x5)
+- 154 CALL_METHOD MyException.message (dynamic)
+- 154 STORE_LOCAL(value message)
+- 154 SCOPE_ENTER value message
+ 154 LOAD_MODULE object Predef
+- 154 LOAD_LOCAL(value message)
++ ? LOAD_LOCAL(value x5)
++ 154 CALL_METHOD MyException.message (dynamic)
+ 154 CALL_METHOD scala.Predef.println (dynamic)
+@@ -1275,3 +1351,3 @@
+ startBlock: 1
+- blocks: [1,2,3,4,5,7]
++ blocks: [1,2,3,4,5,7,8]
+
+@@ -1299,4 +1375,11 @@
+ 38 CALL_METHOD java.lang.IllegalArgumentException.<init> (static-instance)
+- 38 THROW(IllegalArgumentException)
++ ? STORE_LOCAL(value e)
++ ? JUMP 8
+
++ 8:
++ 42 LOAD_MODULE object Predef
++ 42 CONSTANT("IllegalArgumentException")
++ 42 CALL_METHOD scala.Predef.println (dynamic)
++ 42 JUMP 2
++
+ 7:
+@@ -1346,5 +1429,5 @@
+ def main(args: Array[String] (ARRAY[REF(class String)])): Unit {
+- locals: value args, variable result, value ex6, value x4, value x5, value message, value x
++ locals: value args, variable result, value ex6, value x4, value x5, value x
+ startBlock: 1
+- blocks: [1,2,3,4,5,8,10,11,13,14,16]
++ blocks: [1,2,3,5,8,10,11,13,14,16,17]
+
+@@ -1372,3 +1455,4 @@
+ 203 CALL_METHOD MyException.<init> (static-instance)
+- 203 THROW(MyException)
++ ? STORE_LOCAL(value ex6)
++ ? JUMP 17
+
+@@ -1392,4 +1476,13 @@
+ 209 CALL_METHOD MyException.<init> (static-instance)
+- 209 THROW(MyException)
++ ? STORE_LOCAL(value ex6)
++ ? JUMP 17
+
++ 17:
++ 200 LOAD_LOCAL(value ex6)
++ 200 STORE_LOCAL(value x4)
++ 200 SCOPE_ENTER value x4
++ 212 LOAD_LOCAL(value x4)
++ 212 IS_INSTANCE REF(class MyException)
++ 212 CZJUMP (BOOL)NE ? 5 : 8
++
+ 16:
+@@ -1405,5 +1498,2 @@
+ 200 SCOPE_ENTER value x4
+- 200 JUMP 4
+-
+- 4:
+ 212 LOAD_LOCAL(value x4)
+@@ -1417,8 +1507,5 @@
+ 212 SCOPE_ENTER value x5
+- 212 LOAD_LOCAL(value x5)
+- 212 CALL_METHOD MyException.message (dynamic)
+- 212 STORE_LOCAL(value message)
+- 212 SCOPE_ENTER value message
+ 213 LOAD_MODULE object Predef
+- 213 LOAD_LOCAL(value message)
++ ? LOAD_LOCAL(value x5)
++ 213 CALL_METHOD MyException.message (dynamic)
+ 213 CALL_METHOD scala.Predef.println (dynamic)
+@@ -1466,3 +1553,3 @@
+ startBlock: 1
+- blocks: [1,2,3,4,5,7]
++ blocks: [1,2,3,4,5,7,8]
+
+@@ -1490,4 +1577,11 @@
+ 58 CALL_METHOD java.lang.IllegalArgumentException.<init> (static-instance)
+- 58 THROW(IllegalArgumentException)
++ ? STORE_LOCAL(value e)
++ ? JUMP 8
+
++ 8:
++ 62 LOAD_MODULE object Predef
++ 62 CONSTANT("RuntimeException")
++ 62 CALL_METHOD scala.Predef.println (dynamic)
++ 62 JUMP 2
++
+ 7:
+@@ -1539,3 +1633,3 @@
+ startBlock: 1
+- blocks: [1,2,3,4]
++ blocks: [1,2,3,4,5]
+
+@@ -1559,4 +1653,9 @@
+ 229 CALL_METHOD MyException.<init> (static-instance)
+- 229 THROW(MyException)
++ ? JUMP 5
+
++ 5:
++ ? LOAD_LOCAL(variable monitor1)
++ 228 MONITOR_EXIT
++ 228 THROW(Throwable)
++
+ 3:
+@@ -1565,3 +1664,3 @@
+ 228 MONITOR_EXIT
+- ? THROW(Throwable)
++ 228 THROW(Throwable)
+
+@@ -1593,5 +1692,5 @@
+ def main(args: Array[String] (ARRAY[REF(class String)])): Unit {
+- locals: value args, variable result, variable monitor2, variable monitorResult1
++ locals: value exception$1, value args, variable result, variable monitor2, variable monitorResult1
+ startBlock: 1
+- blocks: [1,2,3,4]
++ blocks: [1,2,3,4,5]
+
+@@ -1618,4 +1717,12 @@
+ 245 CALL_METHOD MyException.<init> (static-instance)
+- 245 THROW(MyException)
++ ? STORE_LOCAL(value exception$1)
++ ? DROP ConcatClass
++ ? LOAD_LOCAL(value exception$1)
++ ? JUMP 5
+
++ 5:
++ ? LOAD_LOCAL(variable monitor2)
++ 244 MONITOR_EXIT
++ 244 THROW(Throwable)
++
+ 3:
+@@ -1624,3 +1731,3 @@
+ 244 MONITOR_EXIT
+- ? THROW(Throwable)
++ 244 THROW(Throwable)
+
diff --git a/test/files/run/inline-ex-handlers.scala b/test/files/run/inline-ex-handlers.scala
new file mode 100644
index 0000000..a96b938
--- /dev/null
+++ b/test/files/run/inline-ex-handlers.scala
@@ -0,0 +1,329 @@
+import scala.tools.partest.IcodeTest
+
+object Test extends IcodeTest {
+ override def printIcodeAfterPhase = "inlineExceptionHandlers"
+}
+
+import scala.util.Random._
+
+/** There should be no inlining taking place in this class */
+object TestInlineHandlersNoInline {
+
+ def main(args: Array[String]): Unit = {
+ println("TestInlineHandlersNoInline")
+ var result = -1
+
+ try {
+ if (nextInt % 2 == 0)
+ throw new IllegalArgumentException("something")
+ result = 1
+ } catch {
+ case e: StackOverflowError =>
+ println("Stack overflow")
+ }
+
+ result
+ }
+}
+
+/** Just a simple inlining should take place in this class */
+object TestInlineHandlersSimpleInline {
+
+ def main(args: Array[String]): Unit = {
+ println("TestInlineHandlersSimpleInline")
+ var result = -1
+
+ try {
+ if (nextInt % 2 == 0)
+ throw new IllegalArgumentException("something")
+ result = 1
+ } catch {
+ case e: IllegalArgumentException =>
+ println("IllegalArgumentException")
+ }
+
+ result
+ }
+}
+
+/** Inlining should take place because the handler is taking a superclass of the exception thrown */
+object TestInlineHandlersSubclassInline {
+
+ def main(args: Array[String]): Unit = {
+ println("TestInlineHandlersSubclassInline")
+ var result = -1
+
+ try {
+ if (nextInt % 2 == 0)
+ throw new IllegalArgumentException("something")
+ result = 1
+ } catch {
+ case e: RuntimeException =>
+ println("RuntimeException")
+ }
+
+ result
+ }
+}
+
+/** For this class, the finally handler should be inlined */
+object TestInlineHandlersFinallyInline {
+
+ def main(args: Array[String]): Unit = {
+ println("TestInlineHandlersFinallyInline")
+ var result = -1
+
+ try {
+ if (nextInt % 2 == 0)
+ throw new IllegalArgumentException("something")
+ result = 1
+ } catch {
+ case e: Exception => throw e
+ } finally {
+ println("finally")
+ result = (result - 1) / 2
+ }
+
+ result
+ }
+}
+
+
+case class MyException(message: String) extends RuntimeException(message)
+
+/** For this class, we test inlining for a case class error */
+object TestInlineHandlersCaseClassExceptionInline {
+
+ def main(args: Array[String]): Unit = {
+ println("TestInlineHandlersCaseClassExceptionInline")
+ var result = -1
+
+ try {
+ if (nextInt % 2 == 0)
+ throw new MyException("something")
+ result = 1
+ } catch {
+ case MyException(message) => println(message)
+ }
+
+ result
+ }
+}
+
+
+/** For this class, inline should take place in the inner handler */
+object TestInlineHandlersNestedHandlerInnerInline {
+
+ def main(args: Array[String]): Unit = {
+ println("TestInlineHandlersNestedHandlersInnerInline")
+ var result = -1
+
+ try {
+ try {
+ if (nextInt % 2 == 0)
+ throw new MyException("something")
+ result = 1
+ } catch {
+ case MyException(message) => println(message)
+ }
+ } catch {
+ case e: IllegalArgumentException => println("IllegalArgumentException")
+ }
+
+ result
+ }
+}
+
+
+/** For this class, inline should take place in the outer handler */
+object TestInlineHandlersNestedHandlerOuterInline {
+
+ def main(args: Array[String]): Unit = {
+ println("TestInlineHandlersNestedHandlersOuterInline")
+ var result = -1
+
+ try {
+ try {
+ if (nextInt % 2 == 0)
+ throw new MyException("something")
+ result = 1
+ } catch {
+ case e: IllegalArgumentException => println("IllegalArgumentException")
+ }
+ } catch {
+ case MyException(message) => println(message)
+ }
+
+ result
+ }
+}
+
+
+/** For this class, inline should take place in the all handlers (inner, outer and finally) */
+object TestInlineHandlersNestedHandlerAllInline {
+
+ def main(args: Array[String]): Unit = {
+ println("TestInlineHandlersNestedHandlersOuterInline")
+ var result = -1
+
+ try {
+ try {
+ if (nextInt % 2 == 0)
+ throw new MyException("something")
+ result = 1
+ } catch {
+ case MyException(message) =>
+ println(message)
+ throw MyException(message)
+ }
+ } catch {
+ case MyException(message) =>
+ println(message)
+ throw MyException(message)
+ } finally {
+ println("finally")
+ result = (result - 1) / 2
+ }
+
+ result
+ }
+}
+
+
+/** This class is meant to test whether the inline handler is copied only once for multiple inlines */
+object TestInlineHandlersSingleCopy {
+
+ def main(args: Array[String]): Unit = {
+ println("TestInlineHandlersSingleCopy")
+ var result = -1
+
+ try {
+
+ if (nextInt % 2 == 0)
+ throw new MyException("something")
+
+ println("A side effect in the middle")
+ result = 3 // another one
+
+ if (nextInt % 3 == 2)
+ throw new MyException("something else")
+ result = 1
+ } catch {
+ case MyException(message) =>
+ println(message)
+ }
+
+ result
+ }
+}
+
+/** This should test the special exception handler for synchronized blocks */
+object TestInlineHandlersSynchronized {
+
+ def main(args: Array[String]): Unit = {
+ println("TestInlineHandlersSynchronized")
+ var result = "hello"
+
+ // any exception thrown here will be caught by a default handler that does MONTIOR_EXIT on result :)
+ result.synchronized {
+ throw MyException(result)
+ }
+
+ result.length
+ }
+}
+
+/** This should test the special exception handler for synchronized blocks with stack */
+object TestInlineHandlersSynchronizedWithStack {
+
+ def main(args: Array[String]): Unit = {
+ println("TestInlineHandlersSynchronizedWithStack")
+ var result = "hello"
+
+ // any exception thrown here will be caught by a default handler that does MONTIOR_EXIT on result :)
+ result = "abc" + result.synchronized {
+ throw MyException(result)
+ }
+
+ result.length
+ }
+}
+
+/** This test should trigger a bug in the dead code elimination phase - it actually crashes ICodeCheckers
+object TestInlineHandlersSynchronizedWithStackDoubleThrow {
+
+ def main(args: Array[String]): Unit = {
+ println("TestInlineHandlersSynchronizedWithStackDoubleThrow")
+ var result = "a"
+
+ // any exception thrown here will be caught by a default handler that does MONTIOR_EXIT on result :)
+ result += result.synchronized { throw MyException(result) }
+ result += result.synchronized { throw MyException(result) }
+
+ result.length
+ }
+}
+*/
+
+/** This test should check the preciseness of the inliner: it should not do any inlining here
+* as it is not able to discern between the different exceptions
+*/
+object TestInlineHandlersPreciseness {
+
+ def main(args: Array[String]): Unit = {
+ println("TestInlineHandlersCorrectHandler")
+
+ try {
+ val exception: Throwable =
+ if (scala.util.Random.nextInt % 2 == 0)
+ new IllegalArgumentException("even")
+ else
+ new StackOverflowError("odd")
+ throw exception
+ } catch {
+ case e: IllegalArgumentException =>
+ println("Correct, IllegalArgumentException")
+ case e: StackOverflowError =>
+ println("Correct, StackOverflowException")
+ case t: Throwable =>
+ println("WROOOONG, not Throwable!")
+ }
+ }
+}
+
+/** This check should verify that the double no-local exception handler is duplicated correctly */
+object TestInlineHandlersDoubleNoLocal {
+
+ val a1: String = "a"
+ val a2: String = "b"
+
+ def main(args: Array[String]): Unit = {
+ println("TestInlineHandlersDoubleNoLocal")
+
+ try {
+ a1.synchronized {
+ a2. synchronized {
+ throw new MyException("crash")
+ }
+ }
+ } catch {
+ case t: Throwable => println("Caught crash: " + t.toString)
+ }
+
+ /* try {
+ val exception: Throwable =
+ if (scala.util.Random.nextInt % 2 == 0)
+ new IllegalArgumentException("even")
+ else
+ new StackOverflowError("odd")
+ throw exception
+ } catch {
+ case e: IllegalArgumentException =>
+ println("Correct, IllegalArgumentException")
+ case e: StackOverflowError =>
+ println("Correct, StackOverflowException")
+ case t: Throwable =>
+ println("WROOOONG, not Throwable!")
+ }*/
+ }
+}
diff --git a/test/files/run/inner-obj-auto.check b/test/files/run/inner-obj-auto.check
new file mode 100644
index 0000000..90f7e27
--- /dev/null
+++ b/test/files/run/inner-obj-auto.check
@@ -0,0 +1,65 @@
+ok
+ok
+ok
+ok
+ok
+ok
+ok
+ok
+ok
+ok
+ok
+ok
+ok
+ok
+ok
+ok
+ok
+ok
+ok
+ok
+ok
+ok
+ok
+ok
+ok
+ok
+ok
+ok
+ok
+ok
+ok
+ok
+ok
+ok
+ok
+ok
+ok
+ok
+ok
+ok
+ok
+ok
+ok
+ok
+ok
+ok
+ok
+ok
+ok
+ok
+ok
+ok
+ok
+ok
+ok
+ok
+ok
+ok
+ok
+ok
+ok
+ok
+ok
+ok
+ok
diff --git a/test/files/run/inner-obj-auto.scala b/test/files/run/inner-obj-auto.scala
new file mode 100644
index 0000000..aa2e293
--- /dev/null
+++ b/test/files/run/inner-obj-auto.scala
@@ -0,0 +1,2092 @@
+
+
+/* ================================================================================
+ Automatically generated on 2011-05-11. Do Not Edit (unless you have to).
+ (2-level nesting)
+ ================================================================================ */
+
+
+
+class Class2_1 {
+
+ class Class1_2 {
+ var ObjCounter = 0
+
+ object Obj { ObjCounter += 1}
+ Obj // one
+
+ def singleThreadedAccess(x: Any) = {
+ x == Obj
+ }
+
+ def runTest {
+ try {
+ assert(singleThreadedAccess(Obj))
+ assert(ObjCounter == 1, "multiple instances: " + ObjCounter)
+ println("ok")
+ } catch {
+ case e => print("failed "); e.printStackTrace()
+ }
+ }
+
+ def run { runTest }
+ }
+
+ def run { (new Class1_2).run }
+}
+
+
+object Object3_1 {
+
+ class Class1_2 {
+ var ObjCounter = 0
+
+ object Obj { ObjCounter += 1}
+ Obj // one
+
+ def singleThreadedAccess(x: Any) = {
+ x == Obj
+ }
+
+ def runTest {
+ try {
+ assert(singleThreadedAccess(Obj))
+ assert(ObjCounter == 1, "multiple instances: " + ObjCounter)
+ println("ok")
+ } catch {
+ case e => print("failed "); e.printStackTrace()
+ }
+ }
+
+ def run { runTest }
+ }
+
+ def run { (new Class1_2).run } // trigger
+}
+
+
+trait Trait4_1 {
+
+ class Class1_2 {
+ var ObjCounter = 0
+
+ object Obj { ObjCounter += 1}
+ Obj // one
+
+ def singleThreadedAccess(x: Any) = {
+ x == Obj
+ }
+
+ def runTest {
+ try {
+ assert(singleThreadedAccess(Obj))
+ assert(ObjCounter == 1, "multiple instances: " + ObjCounter)
+ println("ok")
+ } catch {
+ case e => print("failed "); e.printStackTrace()
+ }
+ }
+
+ def run { runTest }
+ }
+
+ def run { (new Class1_2).run }
+}
+
+
+class Class6_1 {
+
+ object Object5_2 {
+ var ObjCounter = 0
+
+ object Obj { ObjCounter += 1}
+ Obj // one
+
+ def singleThreadedAccess(x: Any) = {
+ x == Obj
+ }
+
+ def runTest {
+ try {
+ assert(singleThreadedAccess(Obj))
+ assert(ObjCounter == 1, "multiple instances: " + ObjCounter)
+ println("ok")
+ } catch {
+ case e => print("failed "); e.printStackTrace()
+ }
+ }
+
+ def run { runTest } // trigger
+ }
+
+ def run { Object5_2.run }
+}
+
+
+object Object7_1 {
+
+ object Object5_2 {
+ var ObjCounter = 0
+
+ object Obj { ObjCounter += 1}
+ Obj // one
+
+ def singleThreadedAccess(x: Any) = {
+ x == Obj
+ }
+
+ def runTest {
+ try {
+ assert(singleThreadedAccess(Obj))
+ assert(ObjCounter == 1, "multiple instances: " + ObjCounter)
+ println("ok")
+ } catch {
+ case e => print("failed "); e.printStackTrace()
+ }
+ }
+
+ def run { runTest } // trigger
+ }
+
+ def run { Object5_2.run } // trigger
+}
+
+
+trait Trait8_1 {
+
+ object Object5_2 {
+ var ObjCounter = 0
+
+ object Obj { ObjCounter += 1}
+ Obj // one
+
+ def singleThreadedAccess(x: Any) = {
+ x == Obj
+ }
+
+ def runTest {
+ try {
+ assert(singleThreadedAccess(Obj))
+ assert(ObjCounter == 1, "multiple instances: " + ObjCounter)
+ println("ok")
+ } catch {
+ case e => print("failed "); e.printStackTrace()
+ }
+ }
+
+ def run { runTest } // trigger
+ }
+
+ def run { Object5_2.run }
+}
+
+
+class Class10_1 {
+
+ trait Trait9_2 {
+ var ObjCounter = 0
+
+ object Obj { ObjCounter += 1}
+ Obj // one
+
+ def singleThreadedAccess(x: Any) = {
+ x == Obj
+ }
+
+ def runTest {
+ try {
+ assert(singleThreadedAccess(Obj))
+ assert(ObjCounter == 1, "multiple instances: " + ObjCounter)
+ println("ok")
+ } catch {
+ case e => print("failed "); e.printStackTrace()
+ }
+ }
+
+ def run { runTest }
+ }
+
+ def run { (new Trait9_2 {}).run }
+}
+
+
+object Object11_1 {
+
+ trait Trait9_2 {
+ var ObjCounter = 0
+
+ object Obj { ObjCounter += 1}
+ Obj // one
+
+ def singleThreadedAccess(x: Any) = {
+ x == Obj
+ }
+
+ def runTest {
+ try {
+ assert(singleThreadedAccess(Obj))
+ assert(ObjCounter == 1, "multiple instances: " + ObjCounter)
+ println("ok")
+ } catch {
+ case e => print("failed "); e.printStackTrace()
+ }
+ }
+
+ def run { runTest }
+ }
+
+ def run { (new Trait9_2 {}).run } // trigger
+}
+
+
+trait Trait12_1 {
+
+ trait Trait9_2 {
+ var ObjCounter = 0
+
+ object Obj { ObjCounter += 1}
+ Obj // one
+
+ def singleThreadedAccess(x: Any) = {
+ x == Obj
+ }
+
+ def runTest {
+ try {
+ assert(singleThreadedAccess(Obj))
+ assert(ObjCounter == 1, "multiple instances: " + ObjCounter)
+ println("ok")
+ } catch {
+ case e => print("failed "); e.printStackTrace()
+ }
+ }
+
+ def run { runTest }
+ }
+
+ def run { (new Trait9_2 {}).run }
+}
+
+
+class Class14_1 {
+
+ def method13_2 {
+ var ObjCounter = 0
+
+ object Obj { ObjCounter += 1}
+ Obj // one
+
+ def singleThreadedAccess(x: Any) = {
+ x == Obj
+ }
+
+ def runTest {
+ try {
+ assert(singleThreadedAccess(Obj))
+ assert(ObjCounter == 1, "multiple instances: " + ObjCounter)
+ println("ok")
+ } catch {
+ case e => print("failed "); e.printStackTrace()
+ }
+ }
+
+ runTest // trigger
+ }
+
+ def run { method13_2 }
+}
+
+
+object Object15_1 {
+
+ def method13_2 {
+ var ObjCounter = 0
+
+ object Obj { ObjCounter += 1}
+ Obj // one
+
+ def singleThreadedAccess(x: Any) = {
+ x == Obj
+ }
+
+ def runTest {
+ try {
+ assert(singleThreadedAccess(Obj))
+ assert(ObjCounter == 1, "multiple instances: " + ObjCounter)
+ println("ok")
+ } catch {
+ case e => print("failed "); e.printStackTrace()
+ }
+ }
+
+ runTest // trigger
+ }
+
+ def run { method13_2 } // trigger
+}
+
+
+trait Trait16_1 {
+
+ def method13_2 {
+ var ObjCounter = 0
+
+ object Obj { ObjCounter += 1}
+ Obj // one
+
+ def singleThreadedAccess(x: Any) = {
+ x == Obj
+ }
+
+ def runTest {
+ try {
+ assert(singleThreadedAccess(Obj))
+ assert(ObjCounter == 1, "multiple instances: " + ObjCounter)
+ println("ok")
+ } catch {
+ case e => print("failed "); e.printStackTrace()
+ }
+ }
+
+ runTest // trigger
+ }
+
+ def run { method13_2 }
+}
+
+
+class Class18_1 {
+
+ private def method17_2 {
+ var ObjCounter = 0
+
+ object Obj { ObjCounter += 1}
+ Obj // one
+
+ def singleThreadedAccess(x: Any) = {
+ x == Obj
+ }
+
+ def runTest {
+ try {
+ assert(singleThreadedAccess(Obj))
+ assert(ObjCounter == 1, "multiple instances: " + ObjCounter)
+ println("ok")
+ } catch {
+ case e => print("failed "); e.printStackTrace()
+ }
+ }
+
+ runTest // trigger
+ }
+
+ def run { method17_2 }
+}
+
+
+object Object19_1 {
+
+ private def method17_2 {
+ var ObjCounter = 0
+
+ object Obj { ObjCounter += 1}
+ Obj // one
+
+ def singleThreadedAccess(x: Any) = {
+ x == Obj
+ }
+
+ def runTest {
+ try {
+ assert(singleThreadedAccess(Obj))
+ assert(ObjCounter == 1, "multiple instances: " + ObjCounter)
+ println("ok")
+ } catch {
+ case e => print("failed "); e.printStackTrace()
+ }
+ }
+
+ runTest // trigger
+ }
+
+ def run { method17_2 } // trigger
+}
+
+
+trait Trait20_1 {
+
+ private def method17_2 {
+ var ObjCounter = 0
+
+ object Obj { ObjCounter += 1}
+ Obj // one
+
+ def singleThreadedAccess(x: Any) = {
+ x == Obj
+ }
+
+ def runTest {
+ try {
+ assert(singleThreadedAccess(Obj))
+ assert(ObjCounter == 1, "multiple instances: " + ObjCounter)
+ println("ok")
+ } catch {
+ case e => print("failed "); e.printStackTrace()
+ }
+ }
+
+ runTest // trigger
+ }
+
+ def run { method17_2 }
+}
+
+
+class Class22_1 {
+
+ val fun21_2 = () => {
+ var ObjCounter = 0
+
+ object Obj { ObjCounter += 1}
+ Obj // one
+
+ def singleThreadedAccess(x: Any) = {
+ x == Obj
+ }
+
+ def runTest {
+ try {
+ assert(singleThreadedAccess(Obj))
+ assert(ObjCounter == 1, "multiple instances: " + ObjCounter)
+ println("ok")
+ } catch {
+ case e => print("failed "); e.printStackTrace()
+ }
+ }
+
+ runTest // trigger
+ }
+
+ def run { fun21_2() }
+}
+
+
+object Object23_1 {
+
+ val fun21_2 = () => {
+ var ObjCounter = 0
+
+ object Obj { ObjCounter += 1}
+ Obj // one
+
+ def singleThreadedAccess(x: Any) = {
+ x == Obj
+ }
+
+ def runTest {
+ try {
+ assert(singleThreadedAccess(Obj))
+ assert(ObjCounter == 1, "multiple instances: " + ObjCounter)
+ println("ok")
+ } catch {
+ case e => print("failed "); e.printStackTrace()
+ }
+ }
+
+ runTest // trigger
+ }
+
+ def run { fun21_2() } // trigger
+}
+
+
+trait Trait24_1 {
+
+ val fun21_2 = () => {
+ var ObjCounter = 0
+
+ object Obj { ObjCounter += 1}
+ Obj // one
+
+ def singleThreadedAccess(x: Any) = {
+ x == Obj
+ }
+
+ def runTest {
+ try {
+ assert(singleThreadedAccess(Obj))
+ assert(ObjCounter == 1, "multiple instances: " + ObjCounter)
+ println("ok")
+ } catch {
+ case e => print("failed "); e.printStackTrace()
+ }
+ }
+
+ runTest // trigger
+ }
+
+ def run { fun21_2() }
+}
+
+
+class Class26_1 {
+
+ class Class25_2 {
+ { // in primary constructor
+ var ObjCounter = 0
+
+ object Obj { ObjCounter += 1}
+ Obj // one
+
+ def singleThreadedAccess(x: Any) = {
+ x == Obj
+ }
+
+ def runTest {
+ try {
+ assert(singleThreadedAccess(Obj))
+ assert(ObjCounter == 1, "multiple instances: " + ObjCounter)
+ println("ok")
+ } catch {
+ case e => print("failed "); e.printStackTrace()
+ }
+ }
+
+ runTest // trigger
+ }
+ }
+
+ def run { (new Class25_2) }
+}
+
+
+object Object27_1 {
+
+ class Class25_2 {
+ { // in primary constructor
+ var ObjCounter = 0
+
+ object Obj { ObjCounter += 1}
+ Obj // one
+
+ def singleThreadedAccess(x: Any) = {
+ x == Obj
+ }
+
+ def runTest {
+ try {
+ assert(singleThreadedAccess(Obj))
+ assert(ObjCounter == 1, "multiple instances: " + ObjCounter)
+ println("ok")
+ } catch {
+ case e => print("failed "); e.printStackTrace()
+ }
+ }
+
+ runTest // trigger
+ }
+ }
+
+ def run { (new Class25_2) } // trigger
+}
+
+
+trait Trait28_1 {
+
+ class Class25_2 {
+ { // in primary constructor
+ var ObjCounter = 0
+
+ object Obj { ObjCounter += 1}
+ Obj // one
+
+ def singleThreadedAccess(x: Any) = {
+ x == Obj
+ }
+
+ def runTest {
+ try {
+ assert(singleThreadedAccess(Obj))
+ assert(ObjCounter == 1, "multiple instances: " + ObjCounter)
+ println("ok")
+ } catch {
+ case e => print("failed "); e.printStackTrace()
+ }
+ }
+
+ runTest // trigger
+ }
+ }
+
+ def run { (new Class25_2) }
+}
+
+
+class Class30_1 {
+
+ trait Trait29_2 {
+ { // in primary constructor
+ var ObjCounter = 0
+
+ object Obj { ObjCounter += 1}
+ Obj // one
+
+ def singleThreadedAccess(x: Any) = {
+ x == Obj
+ }
+
+ def runTest {
+ try {
+ assert(singleThreadedAccess(Obj))
+ assert(ObjCounter == 1, "multiple instances: " + ObjCounter)
+ println("ok")
+ } catch {
+ case e => print("failed "); e.printStackTrace()
+ }
+ }
+
+ runTest // trigger
+ }
+ }
+
+ def run { (new Trait29_2 {}) }
+}
+
+
+object Object31_1 {
+
+ trait Trait29_2 {
+ { // in primary constructor
+ var ObjCounter = 0
+
+ object Obj { ObjCounter += 1}
+ Obj // one
+
+ def singleThreadedAccess(x: Any) = {
+ x == Obj
+ }
+
+ def runTest {
+ try {
+ assert(singleThreadedAccess(Obj))
+ assert(ObjCounter == 1, "multiple instances: " + ObjCounter)
+ println("ok")
+ } catch {
+ case e => print("failed "); e.printStackTrace()
+ }
+ }
+
+ runTest // trigger
+ }
+ }
+
+ def run { (new Trait29_2 {}) } // trigger
+}
+
+
+trait Trait32_1 {
+
+ trait Trait29_2 {
+ { // in primary constructor
+ var ObjCounter = 0
+
+ object Obj { ObjCounter += 1}
+ Obj // one
+
+ def singleThreadedAccess(x: Any) = {
+ x == Obj
+ }
+
+ def runTest {
+ try {
+ assert(singleThreadedAccess(Obj))
+ assert(ObjCounter == 1, "multiple instances: " + ObjCounter)
+ println("ok")
+ } catch {
+ case e => print("failed "); e.printStackTrace()
+ }
+ }
+
+ runTest // trigger
+ }
+ }
+
+ def run { (new Trait29_2 {}) }
+}
+
+
+class Class34_1 {
+
+ lazy val lzvalue33_2 = {
+ var ObjCounter = 0
+
+ object Obj { ObjCounter += 1}
+ Obj // one
+
+ def singleThreadedAccess(x: Any) = {
+ x == Obj
+ }
+
+ def runTest {
+ try {
+ assert(singleThreadedAccess(Obj))
+ assert(ObjCounter == 1, "multiple instances: " + ObjCounter)
+ println("ok")
+ } catch {
+ case e => print("failed "); e.printStackTrace()
+ }
+ }
+
+ runTest // trigger
+ }
+
+ def run { lzvalue33_2 }
+}
+
+
+object Object35_1 {
+
+ lazy val lzvalue33_2 = {
+ var ObjCounter = 0
+
+ object Obj { ObjCounter += 1}
+ Obj // one
+
+ def singleThreadedAccess(x: Any) = {
+ x == Obj
+ }
+
+ def runTest {
+ try {
+ assert(singleThreadedAccess(Obj))
+ assert(ObjCounter == 1, "multiple instances: " + ObjCounter)
+ println("ok")
+ } catch {
+ case e => print("failed "); e.printStackTrace()
+ }
+ }
+
+ runTest // trigger
+ }
+
+ def run { lzvalue33_2 } // trigger
+}
+
+
+trait Trait36_1 {
+
+ lazy val lzvalue33_2 = {
+ var ObjCounter = 0
+
+ object Obj { ObjCounter += 1}
+ Obj // one
+
+ def singleThreadedAccess(x: Any) = {
+ x == Obj
+ }
+
+ def runTest {
+ try {
+ assert(singleThreadedAccess(Obj))
+ assert(ObjCounter == 1, "multiple instances: " + ObjCounter)
+ println("ok")
+ } catch {
+ case e => print("failed "); e.printStackTrace()
+ }
+ }
+
+ runTest // trigger
+ }
+
+ def run { lzvalue33_2 }
+}
+
+
+class Class38_1 {
+
+ val value37_2 = {
+ var ObjCounter = 0
+
+ object Obj { ObjCounter += 1}
+ Obj // one
+
+ def singleThreadedAccess(x: Any) = {
+ x == Obj
+ }
+
+ def runTest {
+ try {
+ assert(singleThreadedAccess(Obj))
+ assert(ObjCounter == 1, "multiple instances: " + ObjCounter)
+ println("ok")
+ } catch {
+ case e => print("failed "); e.printStackTrace()
+ }
+ }
+
+ runTest // trigger
+ }
+
+ def run { value37_2 }
+}
+
+
+object Object39_1 {
+
+ val value37_2 = {
+ var ObjCounter = 0
+
+ object Obj { ObjCounter += 1}
+ Obj // one
+
+ def singleThreadedAccess(x: Any) = {
+ x == Obj
+ }
+
+ def runTest {
+ try {
+ assert(singleThreadedAccess(Obj))
+ assert(ObjCounter == 1, "multiple instances: " + ObjCounter)
+ println("ok")
+ } catch {
+ case e => print("failed "); e.printStackTrace()
+ }
+ }
+
+ runTest // trigger
+ }
+
+ def run { value37_2 } // trigger
+}
+
+
+trait Trait40_1 {
+
+ val value37_2 = {
+ var ObjCounter = 0
+
+ object Obj { ObjCounter += 1}
+ Obj // one
+
+ def singleThreadedAccess(x: Any) = {
+ x == Obj
+ }
+
+ def runTest {
+ try {
+ assert(singleThreadedAccess(Obj))
+ assert(ObjCounter == 1, "multiple instances: " + ObjCounter)
+ println("ok")
+ } catch {
+ case e => print("failed "); e.printStackTrace()
+ }
+ }
+
+ runTest // trigger
+ }
+
+ def run { value37_2 }
+}
+
+
+class Class42_1 {
+
+ class Class41_2 {
+ var ObjCounter = 0
+
+ private object Obj { ObjCounter += 1}
+ Obj // one
+
+ def singleThreadedAccess(x: Any) = {
+ x == Obj
+ }
+
+ def runTest {
+ try {
+ assert(singleThreadedAccess(Obj))
+ assert(ObjCounter == 1, "multiple instances: " + ObjCounter)
+ println("ok")
+ } catch {
+ case e => print("failed "); e.printStackTrace()
+ }
+ }
+
+ def run { runTest }
+ }
+
+ def run { (new Class41_2).run }
+}
+
+
+object Object43_1 {
+
+ class Class41_2 {
+ var ObjCounter = 0
+
+ private object Obj { ObjCounter += 1}
+ Obj // one
+
+ def singleThreadedAccess(x: Any) = {
+ x == Obj
+ }
+
+ def runTest {
+ try {
+ assert(singleThreadedAccess(Obj))
+ assert(ObjCounter == 1, "multiple instances: " + ObjCounter)
+ println("ok")
+ } catch {
+ case e => print("failed "); e.printStackTrace()
+ }
+ }
+
+ def run { runTest }
+ }
+
+ def run { (new Class41_2).run } // trigger
+}
+
+
+trait Trait44_1 {
+
+ class Class41_2 {
+ var ObjCounter = 0
+
+ private object Obj { ObjCounter += 1}
+ Obj // one
+
+ def singleThreadedAccess(x: Any) = {
+ x == Obj
+ }
+
+ def runTest {
+ try {
+ assert(singleThreadedAccess(Obj))
+ assert(ObjCounter == 1, "multiple instances: " + ObjCounter)
+ println("ok")
+ } catch {
+ case e => print("failed "); e.printStackTrace()
+ }
+ }
+
+ def run { runTest }
+ }
+
+ def run { (new Class41_2).run }
+}
+
+
+class Class46_1 {
+
+ object Object45_2 {
+ var ObjCounter = 0
+
+ private object Obj { ObjCounter += 1}
+ Obj // one
+
+ def singleThreadedAccess(x: Any) = {
+ x == Obj
+ }
+
+ def runTest {
+ try {
+ assert(singleThreadedAccess(Obj))
+ assert(ObjCounter == 1, "multiple instances: " + ObjCounter)
+ println("ok")
+ } catch {
+ case e => print("failed "); e.printStackTrace()
+ }
+ }
+
+ def run { runTest } // trigger
+ }
+
+ def run { Object45_2.run }
+}
+
+
+object Object47_1 {
+
+ object Object45_2 {
+ var ObjCounter = 0
+
+ private object Obj { ObjCounter += 1}
+ Obj // one
+
+ def singleThreadedAccess(x: Any) = {
+ x == Obj
+ }
+
+ def runTest {
+ try {
+ assert(singleThreadedAccess(Obj))
+ assert(ObjCounter == 1, "multiple instances: " + ObjCounter)
+ println("ok")
+ } catch {
+ case e => print("failed "); e.printStackTrace()
+ }
+ }
+
+ def run { runTest } // trigger
+ }
+
+ def run { Object45_2.run } // trigger
+}
+
+
+trait Trait48_1 {
+
+ object Object45_2 {
+ var ObjCounter = 0
+
+ private object Obj { ObjCounter += 1}
+ Obj // one
+
+ def singleThreadedAccess(x: Any) = {
+ x == Obj
+ }
+
+ def runTest {
+ try {
+ assert(singleThreadedAccess(Obj))
+ assert(ObjCounter == 1, "multiple instances: " + ObjCounter)
+ println("ok")
+ } catch {
+ case e => print("failed "); e.printStackTrace()
+ }
+ }
+
+ def run { runTest } // trigger
+ }
+
+ def run { Object45_2.run }
+}
+
+
+class Class50_1 {
+
+ trait Trait49_2 {
+ var ObjCounter = 0
+
+ private object Obj { ObjCounter += 1}
+ Obj // one
+
+ def singleThreadedAccess(x: Any) = {
+ x == Obj
+ }
+
+ def runTest {
+ try {
+ assert(singleThreadedAccess(Obj))
+ assert(ObjCounter == 1, "multiple instances: " + ObjCounter)
+ println("ok")
+ } catch {
+ case e => print("failed "); e.printStackTrace()
+ }
+ }
+
+ def run { runTest }
+ }
+
+ def run { (new Trait49_2 {}).run }
+}
+
+
+object Object51_1 {
+
+ trait Trait49_2 {
+ var ObjCounter = 0
+
+ private object Obj { ObjCounter += 1}
+ Obj // one
+
+ def singleThreadedAccess(x: Any) = {
+ x == Obj
+ }
+
+ def runTest {
+ try {
+ assert(singleThreadedAccess(Obj))
+ assert(ObjCounter == 1, "multiple instances: " + ObjCounter)
+ println("ok")
+ } catch {
+ case e => print("failed "); e.printStackTrace()
+ }
+ }
+
+ def run { runTest }
+ }
+
+ def run { (new Trait49_2 {}).run } // trigger
+}
+
+
+trait Trait52_1 {
+
+ trait Trait49_2 {
+ var ObjCounter = 0
+
+ private object Obj { ObjCounter += 1}
+ Obj // one
+
+ def singleThreadedAccess(x: Any) = {
+ x == Obj
+ }
+
+ def runTest {
+ try {
+ assert(singleThreadedAccess(Obj))
+ assert(ObjCounter == 1, "multiple instances: " + ObjCounter)
+ println("ok")
+ } catch {
+ case e => print("failed "); e.printStackTrace()
+ }
+ }
+
+ def run { runTest }
+ }
+
+ def run { (new Trait49_2 {}).run }
+}
+
+
+class Class54_1 {
+
+ class Class53_2 {
+ @volatile var ObjCounter = 0
+
+ object Obj { ObjCounter += 1}
+
+ def multiThreadedAccess() {
+ val threads = for (i <- 1 to 5) yield new Thread(new Runnable {
+ def run = Obj
+ })
+
+ threads foreach (_.start())
+ threads foreach (_.join())
+ }
+
+ def runTest {
+ try {
+ multiThreadedAccess()
+ assert(ObjCounter == 1, "multiple instances: " + ObjCounter)
+ println("ok")
+ } catch {
+ case e => print("multi-threaded failed "); e.printStackTrace()
+ }
+ }
+
+ def run { runTest }
+ }
+
+ def run { (new Class53_2).run }
+}
+
+
+object Object55_1 {
+
+ class Class53_2 {
+ @volatile var ObjCounter = 0
+
+ object Obj { ObjCounter += 1}
+
+ def multiThreadedAccess() {
+ val threads = for (i <- 1 to 5) yield new Thread(new Runnable {
+ def run = Obj
+ })
+
+ threads foreach (_.start())
+ threads foreach (_.join())
+ }
+
+ def runTest {
+ try {
+ multiThreadedAccess()
+ assert(ObjCounter == 1, "multiple instances: " + ObjCounter)
+ println("ok")
+ } catch {
+ case e => print("multi-threaded failed "); e.printStackTrace()
+ }
+ }
+
+ def run { runTest }
+ }
+
+ def run { (new Class53_2).run } // trigger
+}
+
+
+trait Trait56_1 {
+
+ class Class53_2 {
+ @volatile var ObjCounter = 0
+
+ object Obj { ObjCounter += 1}
+
+ def multiThreadedAccess() {
+ val threads = for (i <- 1 to 5) yield new Thread(new Runnable {
+ def run = Obj
+ })
+
+ threads foreach (_.start())
+ threads foreach (_.join())
+ }
+
+ def runTest {
+ try {
+ multiThreadedAccess()
+ assert(ObjCounter == 1, "multiple instances: " + ObjCounter)
+ println("ok")
+ } catch {
+ case e => print("multi-threaded failed "); e.printStackTrace()
+ }
+ }
+
+ def run { runTest }
+ }
+
+ def run { (new Class53_2).run }
+}
+
+
+class Class58_1 {
+
+ object Object57_2 {
+ @volatile var ObjCounter = 0
+
+ object Obj { ObjCounter += 1}
+
+ def multiThreadedAccess() {
+ val threads = for (i <- 1 to 5) yield new Thread(new Runnable {
+ def run = Obj
+ })
+
+ threads foreach (_.start())
+ threads foreach (_.join())
+ }
+
+ def runTest {
+ try {
+ multiThreadedAccess()
+ assert(ObjCounter == 1, "multiple instances: " + ObjCounter)
+ println("ok")
+ } catch {
+ case e => print("multi-threaded failed "); e.printStackTrace()
+ }
+ }
+
+ def run { runTest } // trigger
+ }
+
+ def run { Object57_2.run }
+}
+
+
+object Object59_1 {
+
+ object Object57_2 {
+ @volatile var ObjCounter = 0
+
+ object Obj { ObjCounter += 1}
+
+ def multiThreadedAccess() {
+ val threads = for (i <- 1 to 5) yield new Thread(new Runnable {
+ def run = Obj
+ })
+
+ threads foreach (_.start())
+ threads foreach (_.join())
+ }
+
+ def runTest {
+ try {
+ multiThreadedAccess()
+ assert(ObjCounter == 1, "multiple instances: " + ObjCounter)
+ println("ok")
+ } catch {
+ case e => print("multi-threaded failed "); e.printStackTrace()
+ }
+ }
+
+ def run { runTest } // trigger
+ }
+
+ def run { Object57_2.run } // trigger
+}
+
+
+trait Trait60_1 {
+
+ object Object57_2 {
+ @volatile var ObjCounter = 0
+
+ object Obj { ObjCounter += 1}
+
+ def multiThreadedAccess() {
+ val threads = for (i <- 1 to 5) yield new Thread(new Runnable {
+ def run = Obj
+ })
+
+ threads foreach (_.start())
+ threads foreach (_.join())
+ }
+
+ def runTest {
+ try {
+ multiThreadedAccess()
+ assert(ObjCounter == 1, "multiple instances: " + ObjCounter)
+ println("ok")
+ } catch {
+ case e => print("multi-threaded failed "); e.printStackTrace()
+ }
+ }
+
+ def run { runTest } // trigger
+ }
+
+ def run { Object57_2.run }
+}
+
+
+class Class62_1 {
+
+ trait Trait61_2 {
+ @volatile var ObjCounter = 0
+
+ object Obj { ObjCounter += 1}
+
+ def multiThreadedAccess() {
+ val threads = for (i <- 1 to 5) yield new Thread(new Runnable {
+ def run = Obj
+ })
+
+ threads foreach (_.start())
+ threads foreach (_.join())
+ }
+
+ def runTest {
+ try {
+ multiThreadedAccess()
+ assert(ObjCounter == 1, "multiple instances: " + ObjCounter)
+ println("ok")
+ } catch {
+ case e => print("multi-threaded failed "); e.printStackTrace()
+ }
+ }
+
+ def run { runTest }
+ }
+
+ def run { (new Trait61_2 {}).run }
+}
+
+
+object Object63_1 {
+
+ trait Trait61_2 {
+ @volatile var ObjCounter = 0
+
+ object Obj { ObjCounter += 1}
+
+ def multiThreadedAccess() {
+ val threads = for (i <- 1 to 5) yield new Thread(new Runnable {
+ def run = Obj
+ })
+
+ threads foreach (_.start())
+ threads foreach (_.join())
+ }
+
+ def runTest {
+ try {
+ multiThreadedAccess()
+ assert(ObjCounter == 1, "multiple instances: " + ObjCounter)
+ println("ok")
+ } catch {
+ case e => print("multi-threaded failed "); e.printStackTrace()
+ }
+ }
+
+ def run { runTest }
+ }
+
+ def run { (new Trait61_2 {}).run } // trigger
+}
+
+
+trait Trait64_1 {
+
+ trait Trait61_2 {
+ @volatile var ObjCounter = 0
+
+ object Obj { ObjCounter += 1}
+
+ def multiThreadedAccess() {
+ val threads = for (i <- 1 to 5) yield new Thread(new Runnable {
+ def run = Obj
+ })
+
+ threads foreach (_.start())
+ threads foreach (_.join())
+ }
+
+ def runTest {
+ try {
+ multiThreadedAccess()
+ assert(ObjCounter == 1, "multiple instances: " + ObjCounter)
+ println("ok")
+ } catch {
+ case e => print("multi-threaded failed "); e.printStackTrace()
+ }
+ }
+
+ def run { runTest }
+ }
+
+ def run { (new Trait61_2 {}).run }
+}
+
+
+class Class66_1 {
+
+ def method65_2 {
+ @volatile var ObjCounter = 0
+
+ object Obj { ObjCounter += 1}
+
+ def multiThreadedAccess() {
+ val threads = for (i <- 1 to 5) yield new Thread(new Runnable {
+ def run = Obj
+ })
+
+ threads foreach (_.start())
+ threads foreach (_.join())
+ }
+
+ def runTest {
+ try {
+ multiThreadedAccess()
+ assert(ObjCounter == 1, "multiple instances: " + ObjCounter)
+ println("ok")
+ } catch {
+ case e => print("multi-threaded failed "); e.printStackTrace()
+ }
+ }
+
+ runTest // trigger
+ }
+
+ def run { method65_2 }
+}
+
+
+object Object67_1 {
+
+ def method65_2 {
+ @volatile var ObjCounter = 0
+
+ object Obj { ObjCounter += 1}
+
+ def multiThreadedAccess() {
+ val threads = for (i <- 1 to 5) yield new Thread(new Runnable {
+ def run = Obj
+ })
+
+ threads foreach (_.start())
+ threads foreach (_.join())
+ }
+
+ def runTest {
+ try {
+ multiThreadedAccess()
+ assert(ObjCounter == 1, "multiple instances: " + ObjCounter)
+ println("ok")
+ } catch {
+ case e => print("multi-threaded failed "); e.printStackTrace()
+ }
+ }
+
+ runTest // trigger
+ }
+
+ def run { method65_2 } // trigger
+}
+
+
+trait Trait68_1 {
+
+ def method65_2 {
+ @volatile var ObjCounter = 0
+
+ object Obj { ObjCounter += 1}
+
+ def multiThreadedAccess() {
+ val threads = for (i <- 1 to 5) yield new Thread(new Runnable {
+ def run = Obj
+ })
+
+ threads foreach (_.start())
+ threads foreach (_.join())
+ }
+
+ def runTest {
+ try {
+ multiThreadedAccess()
+ assert(ObjCounter == 1, "multiple instances: " + ObjCounter)
+ println("ok")
+ } catch {
+ case e => print("multi-threaded failed "); e.printStackTrace()
+ }
+ }
+
+ runTest // trigger
+ }
+
+ def run { method65_2 }
+}
+
+
+class Class70_1 {
+
+ private def method69_2 {
+ @volatile var ObjCounter = 0
+
+ object Obj { ObjCounter += 1}
+
+ def multiThreadedAccess() {
+ val threads = for (i <- 1 to 5) yield new Thread(new Runnable {
+ def run = Obj
+ })
+
+ threads foreach (_.start())
+ threads foreach (_.join())
+ }
+
+ def runTest {
+ try {
+ multiThreadedAccess()
+ assert(ObjCounter == 1, "multiple instances: " + ObjCounter)
+ println("ok")
+ } catch {
+ case e => print("multi-threaded failed "); e.printStackTrace()
+ }
+ }
+
+ runTest // trigger
+ }
+
+ def run { method69_2 }
+}
+
+
+object Object71_1 {
+
+ private def method69_2 {
+ @volatile var ObjCounter = 0
+
+ object Obj { ObjCounter += 1}
+
+ def multiThreadedAccess() {
+ val threads = for (i <- 1 to 5) yield new Thread(new Runnable {
+ def run = Obj
+ })
+
+ threads foreach (_.start())
+ threads foreach (_.join())
+ }
+
+ def runTest {
+ try {
+ multiThreadedAccess()
+ assert(ObjCounter == 1, "multiple instances: " + ObjCounter)
+ println("ok")
+ } catch {
+ case e => print("multi-threaded failed "); e.printStackTrace()
+ }
+ }
+
+ runTest // trigger
+ }
+
+ def run { method69_2 } // trigger
+}
+
+
+trait Trait72_1 {
+
+ private def method69_2 {
+ @volatile var ObjCounter = 0
+
+ object Obj { ObjCounter += 1}
+
+ def multiThreadedAccess() {
+ val threads = for (i <- 1 to 5) yield new Thread(new Runnable {
+ def run = Obj
+ })
+
+ threads foreach (_.start())
+ threads foreach (_.join())
+ }
+
+ def runTest {
+ try {
+ multiThreadedAccess()
+ assert(ObjCounter == 1, "multiple instances: " + ObjCounter)
+ println("ok")
+ } catch {
+ case e => print("multi-threaded failed "); e.printStackTrace()
+ }
+ }
+
+ runTest // trigger
+ }
+
+ def run { method69_2 }
+}
+
+
+class Class74_1 {
+
+ val fun73_2 = () => {
+ @volatile var ObjCounter = 0
+
+ object Obj { ObjCounter += 1}
+
+ def multiThreadedAccess() {
+ val threads = for (i <- 1 to 5) yield new Thread(new Runnable {
+ def run = Obj
+ })
+
+ threads foreach (_.start())
+ threads foreach (_.join())
+ }
+
+ def runTest {
+ try {
+ multiThreadedAccess()
+ assert(ObjCounter == 1, "multiple instances: " + ObjCounter)
+ println("ok")
+ } catch {
+ case e => print("multi-threaded failed "); e.printStackTrace()
+ }
+ }
+
+ runTest // trigger
+ }
+
+ def run { fun73_2() }
+}
+
+
+object Object75_1 {
+
+ val fun73_2 = () => {
+ @volatile var ObjCounter = 0
+
+ object Obj { ObjCounter += 1}
+
+ def multiThreadedAccess() {
+ val threads = for (i <- 1 to 5) yield new Thread(new Runnable {
+ def run = Obj
+ })
+
+ threads foreach (_.start())
+ threads foreach (_.join())
+ }
+
+ def runTest {
+ try {
+ multiThreadedAccess()
+ assert(ObjCounter == 1, "multiple instances: " + ObjCounter)
+ println("ok")
+ } catch {
+ case e => print("multi-threaded failed "); e.printStackTrace()
+ }
+ }
+
+ runTest // trigger
+ }
+
+ def run { fun73_2() } // trigger
+}
+
+
+trait Trait76_1 {
+
+ val fun73_2 = () => {
+ @volatile var ObjCounter = 0
+
+ object Obj { ObjCounter += 1}
+
+ def multiThreadedAccess() {
+ val threads = for (i <- 1 to 5) yield new Thread(new Runnable {
+ def run = Obj
+ })
+
+ threads foreach (_.start())
+ threads foreach (_.join())
+ }
+
+ def runTest {
+ try {
+ multiThreadedAccess()
+ assert(ObjCounter == 1, "multiple instances: " + ObjCounter)
+ println("ok")
+ } catch {
+ case e => print("multi-threaded failed "); e.printStackTrace()
+ }
+ }
+
+ runTest // trigger
+ }
+
+ def run { fun73_2() }
+}
+
+
+class Class78_1 {
+
+ class Class77_2 {
+ { // in primary constructor
+ @volatile var ObjCounter = 0
+
+ object Obj { ObjCounter += 1}
+
+ def multiThreadedAccess() {
+ val threads = for (i <- 1 to 5) yield new Thread(new Runnable {
+ def run = Obj
+ })
+
+ threads foreach (_.start())
+ threads foreach (_.join())
+ }
+
+ def runTest {
+ try {
+ multiThreadedAccess()
+ assert(ObjCounter == 1, "multiple instances: " + ObjCounter)
+ println("ok")
+ } catch {
+ case e => print("multi-threaded failed "); e.printStackTrace()
+ }
+ }
+
+ runTest // trigger
+ }
+ }
+
+ def run { (new Class77_2) }
+}
+
+
+object Object79_1 {
+
+ class Class77_2 {
+ { // in primary constructor
+ @volatile var ObjCounter = 0
+
+ object Obj { ObjCounter += 1}
+
+ def multiThreadedAccess() {
+ val threads = for (i <- 1 to 5) yield new Thread(new Runnable {
+ def run = Obj
+ })
+
+ threads foreach (_.start())
+ threads foreach (_.join())
+ }
+
+ def runTest {
+ try {
+ multiThreadedAccess()
+ assert(ObjCounter == 1, "multiple instances: " + ObjCounter)
+ println("ok")
+ } catch {
+ case e => print("multi-threaded failed "); e.printStackTrace()
+ }
+ }
+
+ runTest // trigger
+ }
+ }
+
+ def run { (new Class77_2) } // trigger
+}
+
+
+trait Trait80_1 {
+
+ class Class77_2 {
+ { // in primary constructor
+ @volatile var ObjCounter = 0
+
+ object Obj { ObjCounter += 1}
+
+ def multiThreadedAccess() {
+ val threads = for (i <- 1 to 5) yield new Thread(new Runnable {
+ def run = Obj
+ })
+
+ threads foreach (_.start())
+ threads foreach (_.join())
+ }
+
+ def runTest {
+ try {
+ multiThreadedAccess()
+ assert(ObjCounter == 1, "multiple instances: " + ObjCounter)
+ println("ok")
+ } catch {
+ case e => print("multi-threaded failed "); e.printStackTrace()
+ }
+ }
+
+ runTest // trigger
+ }
+ }
+
+ def run { (new Class77_2) }
+}
+
+
+class Class82_1 {
+
+ trait Trait81_2 {
+ { // in primary constructor
+ @volatile var ObjCounter = 0
+
+ object Obj { ObjCounter += 1}
+
+ def multiThreadedAccess() {
+ val threads = for (i <- 1 to 5) yield new Thread(new Runnable {
+ def run = Obj
+ })
+
+ threads foreach (_.start())
+ threads foreach (_.join())
+ }
+
+ def runTest {
+ try {
+ multiThreadedAccess()
+ assert(ObjCounter == 1, "multiple instances: " + ObjCounter)
+ println("ok")
+ } catch {
+ case e => print("multi-threaded failed "); e.printStackTrace()
+ }
+ }
+
+ runTest // trigger
+ }
+ }
+
+ def run { (new Trait81_2 {}) }
+}
+
+
+object Object83_1 {
+
+ trait Trait81_2 {
+ { // in primary constructor
+ @volatile var ObjCounter = 0
+
+ object Obj { ObjCounter += 1}
+
+ def multiThreadedAccess() {
+ val threads = for (i <- 1 to 5) yield new Thread(new Runnable {
+ def run = Obj
+ })
+
+ threads foreach (_.start())
+ threads foreach (_.join())
+ }
+
+ def runTest {
+ try {
+ multiThreadedAccess()
+ assert(ObjCounter == 1, "multiple instances: " + ObjCounter)
+ println("ok")
+ } catch {
+ case e => print("multi-threaded failed "); e.printStackTrace()
+ }
+ }
+
+ runTest // trigger
+ }
+ }
+
+ def run { (new Trait81_2 {}) } // trigger
+}
+
+
+trait Trait84_1 {
+
+ trait Trait81_2 {
+ { // in primary constructor
+ @volatile var ObjCounter = 0
+
+ object Obj { ObjCounter += 1}
+
+ def multiThreadedAccess() {
+ val threads = for (i <- 1 to 5) yield new Thread(new Runnable {
+ def run = Obj
+ })
+
+ threads foreach (_.start())
+ threads foreach (_.join())
+ }
+
+ def runTest {
+ try {
+ multiThreadedAccess()
+ assert(ObjCounter == 1, "multiple instances: " + ObjCounter)
+ println("ok")
+ } catch {
+ case e => print("multi-threaded failed "); e.printStackTrace()
+ }
+ }
+
+ runTest // trigger
+ }
+ }
+
+ def run { (new Trait81_2 {}) }
+}
+
+
+class Class90_1 {
+
+ val value89_2 = {
+ @volatile var ObjCounter = 0
+
+ object Obj { ObjCounter += 1}
+
+ def multiThreadedAccess() {
+ val threads = for (i <- 1 to 5) yield new Thread(new Runnable {
+ def run = Obj
+ })
+
+ threads foreach (_.start())
+ threads foreach (_.join())
+ }
+
+ def runTest {
+ try {
+ multiThreadedAccess()
+ assert(ObjCounter == 1, "multiple instances: " + ObjCounter)
+ println("ok")
+ } catch {
+ case e => print("multi-threaded failed "); e.printStackTrace()
+ }
+ }
+
+ runTest // trigger
+ }
+
+ def run { value89_2 }
+}
+
+
+trait Trait92_1 {
+
+ val value89_2 = {
+ @volatile var ObjCounter = 0
+
+ object Obj { ObjCounter += 1}
+
+ def multiThreadedAccess() {
+ val threads = for (i <- 1 to 5) yield new Thread(new Runnable {
+ def run = Obj
+ })
+
+ threads foreach (_.start())
+ threads foreach (_.join())
+ }
+
+ def runTest {
+ try {
+ multiThreadedAccess()
+ assert(ObjCounter == 1, "multiple instances: " + ObjCounter)
+ println("ok")
+ } catch {
+ case e => print("multi-threaded failed "); e.printStackTrace()
+ }
+ }
+
+ runTest // trigger
+ }
+
+ def run { value89_2 }
+}
+
+
+object Test {
+ def main(args: Array[String]) {
+ (new Class2_1).run
+ Object3_1.run
+ (new Trait4_1 {}).run
+ (new Class6_1).run
+ Object7_1.run
+ (new Trait8_1 {}).run
+ (new Class10_1).run
+ Object11_1.run
+ (new Trait12_1 {}).run
+ (new Class14_1).run
+ Object15_1.run
+ (new Trait16_1 {}).run
+ (new Class18_1).run
+ Object19_1.run
+ (new Trait20_1 {}).run
+ (new Class22_1).run
+ Object23_1.run
+ (new Trait24_1 {}).run
+ (new Class26_1).run
+ Object27_1.run
+ (new Trait28_1 {}).run
+ (new Class30_1).run
+ Object31_1.run
+ (new Trait32_1 {}).run
+ (new Class34_1).run
+ Object35_1.run
+ (new Trait36_1 {}).run
+ (new Class38_1).run
+ Object39_1.run
+ (new Trait40_1 {}).run
+ (new Class42_1).run
+ Object43_1.run
+ (new Trait44_1 {}).run
+ (new Class46_1).run
+ Object47_1.run
+ (new Trait48_1 {}).run
+ (new Class50_1).run
+ Object51_1.run
+ (new Trait52_1 {}).run
+ (new Class54_1).run
+ Object55_1.run
+ (new Trait56_1 {}).run
+ (new Class58_1).run
+ Object59_1.run
+ (new Trait60_1 {}).run
+ (new Class62_1).run
+ Object63_1.run
+ (new Trait64_1 {}).run
+ (new Class66_1).run
+ Object67_1.run
+ (new Trait68_1 {}).run
+ (new Class70_1).run
+ Object71_1.run
+ (new Trait72_1 {}).run
+ (new Class74_1).run
+ Object75_1.run
+ (new Trait76_1 {}).run
+ (new Class78_1).run
+ Object79_1.run
+ (new Trait80_1 {}).run
+ (new Class82_1).run
+ Object83_1.run
+ (new Trait84_1 {}).run
+ (new Class90_1).run
+ (new Trait92_1 {}).run
+ }
+}
+
diff --git a/test/files/run/interop_classtags_are_classmanifests.check b/test/files/run/interop_classtags_are_classmanifests.check
new file mode 100644
index 0000000..5a8fc2b
--- /dev/null
+++ b/test/files/run/interop_classtags_are_classmanifests.check
@@ -0,0 +1,3 @@
+Int
+java.lang.String
+Array[int]
diff --git a/test/files/run/interop_classtags_are_classmanifests.scala b/test/files/run/interop_classtags_are_classmanifests.scala
new file mode 100644
index 0000000..91b9d89
--- /dev/null
+++ b/test/files/run/interop_classtags_are_classmanifests.scala
@@ -0,0 +1,11 @@
+import scala.reflect.{ClassTag, classTag}
+
+object Test extends App {
+ def classTagIsClassManifest[T: ClassTag] = {
+ println(classManifest[T])
+ }
+
+ classTagIsClassManifest[Int]
+ classTagIsClassManifest[String]
+ classTagIsClassManifest[Array[Int]]
+}
\ No newline at end of file
diff --git a/test/files/run/interop_manifests_are_abstypetags.check b/test/files/run/interop_manifests_are_abstypetags.check
new file mode 100644
index 0000000..19a35ad
--- /dev/null
+++ b/test/files/run/interop_manifests_are_abstypetags.check
@@ -0,0 +1,3 @@
+Int
+java.lang.String
+Array[Int]
diff --git a/test/files/run/interop_manifests_are_abstypetags.scala b/test/files/run/interop_manifests_are_abstypetags.scala
new file mode 100644
index 0000000..f2c2723
--- /dev/null
+++ b/test/files/run/interop_manifests_are_abstypetags.scala
@@ -0,0 +1,11 @@
+import scala.reflect.runtime.universe._
+
+object Test extends App {
+ def manifestIsWeakTypeTag[T: Manifest] = {
+ println(implicitly[WeakTypeTag[T]].tpe)
+ }
+
+ manifestIsWeakTypeTag[Int]
+ manifestIsWeakTypeTag[String]
+ manifestIsWeakTypeTag[Array[Int]]
+}
\ No newline at end of file
diff --git a/test/files/run/interop_manifests_are_classtags.check b/test/files/run/interop_manifests_are_classtags.check
new file mode 100644
index 0000000..f3f7041
--- /dev/null
+++ b/test/files/run/interop_manifests_are_classtags.check
@@ -0,0 +1,18 @@
+Int
+List()
+List(0, 0, 0, 0, 0)
+java.lang.String
+List()
+List(null, null, null, null, null)
+Array[Int]
+List()
+List(null, null, null, null, null)
+Int
+List()
+List(0, 0, 0, 0, 0)
+java.lang.String
+List()
+List(null, null, null, null, null)
+Array[Int]
+List()
+List(null, null, null, null, null)
diff --git a/test/files/run/interop_manifests_are_classtags.scala b/test/files/run/interop_manifests_are_classtags.scala
new file mode 100644
index 0000000..03479e5
--- /dev/null
+++ b/test/files/run/interop_manifests_are_classtags.scala
@@ -0,0 +1,23 @@
+import scala.reflect.{ClassTag, classTag}
+
+object Test extends App {
+ def classManifestIsClassTag[T: ClassManifest] = {
+ println(classTag[T])
+ println(Array[T]().toList)
+ println(new Array[T](5).toList)
+ }
+
+ classManifestIsClassTag[Int]
+ classManifestIsClassTag[String]
+ classManifestIsClassTag[Array[Int]]
+
+ def manifestIsClassTag[T: Manifest] = {
+ println(classTag[T])
+ println(Array[T]().toList)
+ println(new Array[T](5).toList)
+ }
+
+ manifestIsClassTag[Int]
+ manifestIsClassTag[String]
+ manifestIsClassTag[Array[Int]]
+}
\ No newline at end of file
diff --git a/test/files/run/interop_manifests_are_typetags.check b/test/files/run/interop_manifests_are_typetags.check
new file mode 100644
index 0000000..19a35ad
--- /dev/null
+++ b/test/files/run/interop_manifests_are_typetags.check
@@ -0,0 +1,3 @@
+Int
+java.lang.String
+Array[Int]
diff --git a/test/files/run/interop_manifests_are_typetags.scala b/test/files/run/interop_manifests_are_typetags.scala
new file mode 100644
index 0000000..294d3c2
--- /dev/null
+++ b/test/files/run/interop_manifests_are_typetags.scala
@@ -0,0 +1,11 @@
+import scala.reflect.runtime.universe._
+
+object Test extends App {
+ def manifestIsTypeTag[T: Manifest] = {
+ println(typeOf[T])
+ }
+
+ manifestIsTypeTag[Int]
+ manifestIsTypeTag[String]
+ manifestIsTypeTag[Array[Int]]
+}
\ No newline at end of file
diff --git a/test/files/run/interop_typetags_are_manifests.check b/test/files/run/interop_typetags_are_manifests.check
new file mode 100644
index 0000000..e02de1f
--- /dev/null
+++ b/test/files/run/interop_typetags_are_manifests.check
@@ -0,0 +1,3 @@
+int
+java.lang.String
+Array[Int]
diff --git a/test/files/run/interop_typetags_are_manifests.scala b/test/files/run/interop_typetags_are_manifests.scala
new file mode 100644
index 0000000..1aca7f5
--- /dev/null
+++ b/test/files/run/interop_typetags_are_manifests.scala
@@ -0,0 +1,12 @@
+import scala.reflect.runtime.universe._
+import scala.reflect.ClassTag
+
+object Test extends App {
+ def typeTagIsManifest[T: TypeTag : ClassTag] = {
+ println(manifest[T])
+ }
+
+ typeTagIsManifest[Int]
+ typeTagIsManifest[String]
+ typeTagIsManifest[Array[Int]]
+}
\ No newline at end of file
diff --git a/test/files/run/interpolation.check b/test/files/run/interpolation.check
new file mode 100644
index 0000000..997abb4
--- /dev/null
+++ b/test/files/run/interpolation.check
@@ -0,0 +1,32 @@
+Bob is 1 years old
+Bob is 1 years old
+Bob will be 2 years old
+Bob will be 2 years old
+1+1 = 2
+1+1 = 2
+Bob is 12 years old
+Bob is 12 years old
+Bob will be 13 years old
+Bob will be 13 years old
+12+1 = 13
+12+1 = 13
+Bob is 123 years old
+Bob is 123 years old
+Bob will be 124 years old
+Bob will be 124 years old
+123+1 = 124
+123+1 = 124
+Best price: 10.0
+Best price: 10.00
+10.0% discount included
+10.00% discount included
+Best price: 13.345
+Best price: 13.35
+13.345% discount included
+13.35% discount included
+
+0
+00
+
+0
+00
diff --git a/test/files/run/interpolation.flags b/test/files/run/interpolation.flags
new file mode 100644
index 0000000..48fd867
--- /dev/null
+++ b/test/files/run/interpolation.flags
@@ -0,0 +1 @@
+-Xexperimental
diff --git a/test/files/run/interpolation.scala b/test/files/run/interpolation.scala
new file mode 100644
index 0000000..14d9819
--- /dev/null
+++ b/test/files/run/interpolation.scala
@@ -0,0 +1,32 @@
+object Test extends App {
+
+ def test1(n: Int) = {
+ println(s"Bob is $n years old")
+ println(f"Bob is $n%2d years old")
+ println(s"Bob will be ${n+1} years old")
+ println(f"Bob will be ${n+1}%2d years old")
+ println(s"$n+1 = ${n+1}")
+ println(f"$n%d+1 = ${n+1}%d")
+ }
+
+ def test2(f: Float) = {
+ println(s"Best price: $f")
+ println(f"Best price: $f%.2f")
+ println(s"$f% discount included")
+ println(f"$f%3.2f%% discount included")
+ }
+
+ test1(1)
+ test1(12)
+ test1(123)
+
+ test2(10.0f)
+ test2(13.345f)
+
+ println(s"")
+ println(s"${0}")
+ println(s"${0}${0}")
+ println(f"")
+ println(f"${0}")
+ println(f"${0}${0}")
+}
diff --git a/test/files/run/interpolationArgs.check b/test/files/run/interpolationArgs.check
new file mode 100644
index 0000000..155991e
--- /dev/null
+++ b/test/files/run/interpolationArgs.check
@@ -0,0 +1,2 @@
+java.lang.IllegalArgumentException: wrong number of arguments for interpolated string
+java.lang.IllegalArgumentException: wrong number of arguments for interpolated string
diff --git a/test/files/pos/annotDepMethType.flags b/test/files/run/interpolationArgs.flags
similarity index 100%
copy from test/files/pos/annotDepMethType.flags
copy to test/files/run/interpolationArgs.flags
diff --git a/test/files/run/interpolationArgs.scala b/test/files/run/interpolationArgs.scala
new file mode 100644
index 0000000..eb13767
--- /dev/null
+++ b/test/files/run/interpolationArgs.scala
@@ -0,0 +1,5 @@
+object Test extends App {
+ try { scala.StringContext("p1", "p2", "p3").s("e1") } catch { case ex => println(ex) }
+ try { scala.StringContext("p1").s("e1") } catch { case ex => println(ex) }
+}
+
diff --git a/test/files/run/interpolationMultiline1.check b/test/files/run/interpolationMultiline1.check
new file mode 100644
index 0000000..09579a8
--- /dev/null
+++ b/test/files/run/interpolationMultiline1.check
@@ -0,0 +1,26 @@
+Bob is 1 years old
+Bob is 1 years old
+Bob will be 2 years old
+Bob will be 2 years old
+1+1 = 2
+1+1 = 2
+Bob is 12 years old
+Bob is 12 years old
+Bob will be 13 years old
+Bob will be 13 years old
+12+1 = 13
+12+1 = 13
+Bob is 123 years old
+Bob is 123 years old
+Bob will be 124 years old
+Bob will be 124 years old
+123+1 = 124
+123+1 = 124
+Best price: 10.0
+Best price: 10.00
+10.0% discount included
+10.00% discount included
+Best price: 13.345
+Best price: 13.35
+13.345% discount included
+13.35% discount included
diff --git a/test/files/run/interpolationMultiline1.flags b/test/files/run/interpolationMultiline1.flags
new file mode 100644
index 0000000..48fd867
--- /dev/null
+++ b/test/files/run/interpolationMultiline1.flags
@@ -0,0 +1 @@
+-Xexperimental
diff --git a/test/files/run/interpolationMultiline1.scala b/test/files/run/interpolationMultiline1.scala
new file mode 100644
index 0000000..db634e7
--- /dev/null
+++ b/test/files/run/interpolationMultiline1.scala
@@ -0,0 +1,26 @@
+object Test extends App {
+
+ def test1(n: Int) = {
+ println(s"""Bob is $n years old""")
+ println(f"""Bob is $n%2d years old""")
+ println(s"""Bob will be ${n+1} years old""")
+ println(f"""Bob will be ${n+1}%2d years old""")
+ println(s"""$n+1 = ${n+1}""")
+ println(f"""$n%d+1 = ${n+1}%d""")
+ }
+
+ def test2(f: Float) = {
+ println(s"""Best price: $f""")
+ println(f"""Best price: $f%.2f""")
+ println(s"""$f% discount included""")
+ println(f"""$f%3.2f%% discount included""")
+ }
+
+ test1(1)
+ test1(12)
+ test1(123)
+
+ test2(10.0f)
+ test2(13.345f)
+
+}
diff --git a/test/files/run/interpolationMultiline2.check b/test/files/run/interpolationMultiline2.check
new file mode 100644
index 0000000..2218c93
--- /dev/null
+++ b/test/files/run/interpolationMultiline2.check
@@ -0,0 +1,26 @@
+Bob is 1 years old!
+Bob is 1 years old!
+Bob is 1 years old!
+Bob is 1 years old!
+Bob is 1 years old!
+Bob is 1%2d years old!
+Bob is 1 years old!
+Bob is 1%2d years old!
+===============
+Bob is 12 years old!
+Bob is 12 years old!
+Bob is 12 years old!
+Bob is 12 years old!
+Bob is 12 years old!
+Bob is 12%2d years old!
+Bob is 12 years old!
+Bob is 12%2d years old!
+===============
+Bob is 123 years old!
+Bob is 123 years old!
+Bob is 123 years old!
+Bob is 123 years old!
+Bob is 123 years old!
+Bob is 123%2d years old!
+Bob is 123 years old!
+Bob is 123%2d years old!
diff --git a/test/files/pos/annotDepMethType.flags b/test/files/run/interpolationMultiline2.flags
similarity index 100%
copy from test/files/pos/annotDepMethType.flags
copy to test/files/run/interpolationMultiline2.flags
diff --git a/test/files/run/interpolationMultiline2.scala b/test/files/run/interpolationMultiline2.scala
new file mode 100644
index 0000000..f6a682c
--- /dev/null
+++ b/test/files/run/interpolationMultiline2.scala
@@ -0,0 +1,21 @@
+object Test extends App {
+
+ def test1(n: Int) = {
+ val old = "old"
+ try { println(s"""Bob is ${s"$n"} years ${s"$old"}!""") } catch { case ex => println(ex) }
+ try { println(s"""Bob is ${f"$n"} years ${s"$old"}!""") } catch { case ex => println(ex) }
+ try { println(f"""Bob is ${s"$n"} years ${s"$old"}!""") } catch { case ex => println(ex) }
+ try { println(f"""Bob is ${f"$n"} years ${s"$old"}!""") } catch { case ex => println(ex) }
+ try { println(f"""Bob is ${f"$n%2d"} years ${s"$old"}!""") } catch { case ex => println(ex) }
+ try { println(f"""Bob is ${s"$n%2d"} years ${s"$old"}!""") } catch { case ex => println(ex) }
+ try { println(s"""Bob is ${f"$n%2d"} years ${s"$old"}!""") } catch { case ex => println(ex) }
+ try { println(s"""Bob is ${s"$n%2d"} years ${s"$old"}!""") } catch { case ex => println(ex) }
+ }
+
+ test1(1)
+ println("===============")
+ test1(12)
+ println("===============")
+ test1(123)
+
+}
diff --git a/test/files/run/iq-msil.check b/test/files/run/iq-msil.check
deleted file mode 100644
index 08f9fc7..0000000
--- a/test/files/run/iq-msil.check
+++ /dev/null
@@ -1,12 +0,0 @@
-Empty
-Head: 42
-q5: Queue(0,1,2,3,4,5,6,7,8,9)
-q5[5]: 5
-q5 == q5c: True
-q5c == q5: True
-q8: Queue(2,3,4,5,6,7,8,9,10,11)
-q8 == q9: True
-Elements: 1 2 3 4 5 6 7 8 9
-String: <1-2-3-4-5-6-7-8-9>
-Length: 9
-Front: 1
diff --git a/test/files/run/iq.scala b/test/files/run/iq.scala
index 59baee1..e5f9e47 100644
--- a/test/files/run/iq.scala
+++ b/test/files/run/iq.scala
@@ -7,7 +7,7 @@ import scala.collection.immutable.Queue
object iq {
def main {
/* Create an empty queue. */
- val q: Queue[Int] = Queue.Empty
+ val q: Queue[Int] = Queue.empty
/* Test isEmpty.
* Expected: Empty
@@ -45,7 +45,7 @@ object iq {
*/
Console.println("q5[5]: " + q5(5))
- val q5c: Queue[Int] = Queue.Empty.enqueue(List(0, 1, 2, 3, 4, 5, 6, 7, 8, 9))
+ val q5c: Queue[Int] = Queue.empty.enqueue(List(0, 1, 2, 3, 4, 5, 6, 7, 8, 9))
/* Testing ==
* Expected: q5 == q9: true
diff --git a/test/files/run/is-valid-num.scala b/test/files/run/is-valid-num.scala
new file mode 100644
index 0000000..402eff9
--- /dev/null
+++ b/test/files/run/is-valid-num.scala
@@ -0,0 +1,312 @@
+object Test {
+ def x = BigInt("10000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000")
+ def y = BigDecimal("" + (Short.MaxValue + 1) + ".0")
+ def y1 = BigDecimal("0.1")
+ def y2 = BigDecimal("0.5")
+
+ def l1 = Int.MaxValue.toLong + 1
+ def l2 = Int.MinValue.toLong - 1
+
+ def main(args: Array[String]): Unit = {
+// assert(x.isWhole, x)
+ assert(!x.isValidDouble, x)
+ assert(!x.isValidFloat, x)
+ assert(!x.isValidLong, x)
+ assert(!x.isValidInt, x)
+ assert(!x.isValidChar, x)
+ assert(!x.isValidShort, x)
+ assert(!x.isValidByte, x)
+// assert(y.isWhole, y)
+ assert(!y.isValidShort, y)
+ assert(y.isValidChar, y)
+ assert(y.isValidInt, y)
+ assert(y.isValidFloat, y)
+ assert(y.isValidDouble, y)
+ assert(y.isValidLong, y)
+ assert(!y.isValidByte, y)
+// assert(!y1.isWhole)
+ assert(!y1.isValidLong, y1)
+ assert(!y1.isValidFloat, y1)
+ assert(!y1.isValidDouble, y1)
+ assert(!y1.isValidInt, y1)
+ assert(!y1.isValidChar, y1)
+ assert(!y1.isValidShort, y1)
+ assert(!y1.isValidByte, y1)
+ assert(!y2.isValidLong, y2)
+ assert(y2.isValidFloat, y2)
+ assert(y2.isValidDouble, y2)
+
+ assert(!l1.isValidInt && (l1 - 1).isValidInt, l1)
+ assert(!l2.isValidInt && (l2 + 1).isValidInt, l2)
+
+ testBigInts()
+ testNonWholeDoubles()
+ testNaNs()
+ }
+
+ def testBigInts() {
+ def biExp2(e: Int) = BigInt(1) << e
+ def checkBigInt2(bi: BigInt) { checkBigInt(-bi); checkBigInt(bi) }
+
+ val pf = 24
+ val pd = 53
+
+ checkBigInt(BigInt(0))
+ checkBigInt2(biExp2(0))
+
+ checkBigInt2(biExp2(7) - 1)
+ checkBigInt2(biExp2(7))
+ checkBigInt2(biExp2(7) + 1)
+
+ checkBigInt2(biExp2(8) - 1)
+ checkBigInt2(biExp2(8))
+ checkBigInt2(biExp2(8) + 1)
+
+ checkBigInt2(biExp2(15) - 1)
+ checkBigInt2(biExp2(15))
+ checkBigInt2(biExp2(15) + 1)
+
+ checkBigInt2(biExp2(16) - 1)
+ checkBigInt2(biExp2(16))
+ checkBigInt2(biExp2(16) + 1)
+
+ checkBigInt2(biExp2(pf) - 1)
+ checkBigInt2(biExp2(pf))
+ checkBigInt2(biExp2(pf) + 1)
+ checkBigInt2(biExp2(pf) + 2)
+ checkBigInt2(biExp2(pf) - 2)
+ checkBigInt2(biExp2(pf + 1) - 1)
+ checkBigInt2(biExp2(pf + 1))
+ checkBigInt2(biExp2(pf + 1) + 1)
+ checkBigInt2(biExp2(pf + 1) + 2)
+ checkBigInt2(biExp2(pf + 1) + 3)
+ checkBigInt2(biExp2(pf + 1) + 4)
+
+ checkBigInt2(biExp2(31) - 1)
+ checkBigInt2(biExp2(31))
+ checkBigInt2(biExp2(31) + 1)
+
+ checkBigInt2(biExp2(32) - 1)
+ checkBigInt2(biExp2(32))
+ checkBigInt2(biExp2(32) + 1)
+ checkBigInt2(biExp2(32) + biExp2(64 - pf))
+ checkBigInt2(biExp2(32) + biExp2(64 - pf + 1))
+
+ checkBigInt2(biExp2(pd) - 1)
+ checkBigInt2(biExp2(pd))
+ checkBigInt2(biExp2(pd) + 1)
+ checkBigInt2(biExp2(pd) + 2)
+ checkBigInt2(biExp2(pd + 1) - 2)
+ checkBigInt2(biExp2(pd + 1) - 1)
+ checkBigInt2(biExp2(pd + 1))
+ checkBigInt2(biExp2(pd + 1) + 1)
+ checkBigInt2(biExp2(pd + 1) + 2)
+ checkBigInt2(biExp2(pd + 1) + 3)
+ checkBigInt2(biExp2(pd + 1) + 4)
+
+ checkBigInt2(biExp2(63) - 1)
+ checkBigInt2(biExp2(63))
+ checkBigInt2(biExp2(63) + 1)
+ checkBigInt2(biExp2(63) + biExp2(63 - pd))
+ checkBigInt2(biExp2(63) + biExp2(63 - pd + 1))
+ checkBigInt2(biExp2(63) + biExp2(63 - pf))
+ checkBigInt2(biExp2(63) + biExp2(63 - pf + 1))
+
+ checkBigInt2(biExp2(64) - 1)
+ checkBigInt2(biExp2(64))
+ checkBigInt2(biExp2(64) + 1)
+ checkBigInt2(biExp2(64) + biExp2(64 - pd))
+ checkBigInt2(biExp2(64) + biExp2(64 - pd + 1))
+ checkBigInt2(biExp2(64) + biExp2(64 - pf))
+ checkBigInt2(biExp2(64) + biExp2(64 - pf + 1))
+
+ checkBigInt2(biExp2(127))
+ checkBigInt2(biExp2(128) - biExp2(128 - pf))
+ checkBigInt2(biExp2(128) - biExp2(128 - pf - 1))
+ checkBigInt2(biExp2(128))
+
+ checkBigInt2(biExp2(1023))
+ checkBigInt2(biExp2(1024) - biExp2(1024 - pd))
+ checkBigInt2(biExp2(1024) - biExp2(1024 - pd - 1))
+ checkBigInt2(biExp2(1024))
+ }
+
+ def testNonWholeDoubles() {
+ checkNonWholeDouble(0.5)
+ checkNonWholeDouble(-math.E)
+ checkNonWholeDouble((1L << 51).toDouble + 0.5)
+ checkNonWholeDouble((1L << 23).toDouble + 0.5)
+ checkNonWholeDouble(Double.PositiveInfinity)
+ checkNonWholeDouble(Double.NegativeInfinity)
+ }
+
+ def testNaNs() {
+ assert(!Double.NaN.isWhole, Double.NaN)
+// assert(!Double.NaN.isValidDouble, Double.NaN)
+// assert(!Double.NaN.isValidFloat, Double.NaN)
+// assert(!Double.NaN.isValidLong, Double.NaN)
+ assert(!Double.NaN.isValidInt, Double.NaN)
+ assert(!Double.NaN.isValidChar, Double.NaN)
+ assert(!Double.NaN.isValidShort, Double.NaN)
+ assert(!Double.NaN.isValidByte, Double.NaN)
+
+ assert(!Float.NaN.isWhole, Float.NaN)
+// assert(!Float.NaN.isValidDouble, Float.NaN)
+// assert(!Float.NaN.isValidFloat, Float.NaN)
+// assert(!Float.NaN.isValidLong, Float.NaN)
+ assert(!Float.NaN.isValidInt, Float.NaN)
+ assert(!Float.NaN.isValidChar, Float.NaN)
+ assert(!Float.NaN.isValidShort, Float.NaN)
+ assert(!Float.NaN.isValidByte, Float.NaN)
+ }
+
+ def checkNonWholeDouble(d: Double) {
+ val f = d.toFloat
+ val isFloat = f == d
+
+ if (!d.isInfinity) {
+ val bd = BigDecimal(new java.math.BigDecimal(d))
+// assert(!bd.isWhole, bd)
+ assert(bd.isValidDouble, bd)
+ assert(bd.isValidFloat == isFloat, bd)
+ assert(!bd.isValidLong, bd)
+ assert(!bd.isValidInt, bd)
+ assert(!bd.isValidChar, bd)
+ assert(!bd.isValidShort, bd)
+ assert(!bd.isValidByte, bd)
+ }
+
+ assert(!d.isWhole, d)
+// assert(d.isValidDouble, d)
+// assert(d.isValidFloat == isFloat, d)
+// assert(!d.isValidLong, d)
+ assert(!d.isValidInt, d)
+ assert(!d.isValidChar, d)
+ assert(!d.isValidShort, d)
+ assert(!d.isValidByte, d)
+
+ if (isFloat) {
+ assert(!f.isWhole, f)
+// assert(f.isValidDouble, f)
+// assert(f.isValidFloat == isFloat, f)
+// assert(!f.isValidLong, f)
+ assert(!f.isValidInt, f)
+ assert(!f.isValidChar, f)
+ assert(!f.isValidShort, f)
+ assert(!f.isValidByte, f)
+ }
+ }
+
+ def checkBigInt(bi: BigInt) {
+ val bd = BigDecimal(bi, java.math.MathContext.UNLIMITED)
+ val isByte = bi >= Byte.MinValue && bi <= Byte.MaxValue
+ val isShort = bi >= Short.MinValue && bi <= Short.MaxValue
+ val isChar = bi >= Char.MinValue && bi <= Char.MaxValue
+ val isInt = bi >= Int.MinValue && bi <= Int.MaxValue
+ val isLong = bi >= Long.MinValue && bi <= Long.MaxValue
+ val isFloat = !bi.toFloat.isInfinity && bd.compare(BigDecimal(new java.math.BigDecimal(bi.toFloat))) == 0
+ val isDouble = !bi.toDouble.isInfinity && bd.compare(BigDecimal(new java.math.BigDecimal(bi.toDouble))) == 0
+
+// assert(bd.isWhole, bd)
+ assert(bd.isValidDouble == isDouble, bd)
+ assert(bd.isValidFloat == isFloat, bd)
+ assert(bd.isValidLong == isLong, bd)
+ assert(bd.isValidInt == isInt, bd)
+ assert(bd.isValidChar == isChar, bd)
+ assert(bd.isValidShort == isShort, bd)
+ assert(bd.isValidByte == isByte, bd)
+
+// assert(bi.isWhole, bi)
+ assert(bi.isValidDouble == isDouble, bi)
+ assert(bi.isValidFloat == isFloat, bi)
+ assert(bi.isValidLong == isLong, bi)
+ assert(bi.isValidInt == isInt, bi)
+ assert(bi.isValidChar == isChar, bi)
+ assert(bi.isValidShort == isShort, bi)
+ assert(bi.isValidByte == isByte, bi)
+
+ if (isDouble) {
+ val d = bi.toDouble
+ assert(d.isWhole, d)
+// assert(d.isValidDouble == isDouble, d)
+// assert(d.isValidFloat == isFloat, d)
+// assert(d.isValidLong == isLong, d)
+ assert(d.isValidInt == isInt, d)
+ assert(d.isValidChar == isChar, d)
+ assert(d.isValidShort == isShort, d)
+ assert(d.isValidByte == isByte, d)
+ }
+
+ if (isFloat) {
+ val f = bi.toFloat
+ assert(f.isWhole, f)
+// assert(f.isValidDouble == isDouble, f)
+// assert(f.isValidFloat == isFloat, f)
+// assert(f.isValidLong == isLong, f)
+ assert(f.isValidInt == isInt, f)
+ assert(f.isValidChar == isChar, f)
+ assert(f.isValidShort == isShort, f)
+ assert(f.isValidByte == isByte, f)
+ }
+
+ if (isLong) {
+ val l = bi.toLong
+ assert(l.isWhole, l)
+// assert(l.isValidDouble == isDouble, l)
+// assert(l.isValidFloat == isFloat, l)
+// assert(l.isValidLong == isLong, l)
+ assert(l.isValidInt == isInt, l)
+ assert(l.isValidChar == isChar, l)
+ assert(l.isValidShort == isShort, l)
+ assert(l.isValidByte == isByte, l)
+ }
+
+ if (isInt) {
+ val i = bi.toInt
+ assert(i.isWhole, i)
+// assert(i.isValidDouble == isDouble, i)
+// assert(i.isValidFloat == isFloat, i)
+// assert(i.isValidLong == isLong, i)
+ assert(i.isValidInt == isInt, i)
+ assert(i.isValidChar == isChar, i)
+ assert(i.isValidShort == isShort, i)
+ assert(i.isValidByte == isByte, i)
+ }
+
+ if (isChar) {
+ val c = bi.toChar
+ assert(c.isWhole, c)
+// assert(c.isValidDouble == isDouble, c)
+// assert(c.isValidFloat == isFloat, c)
+// assert(c.isValidLong == isLong, c)
+ assert(c.isValidInt == isInt, c)
+ assert(c.isValidChar == isChar, c)
+ assert(c.isValidShort == isShort, c)
+ assert(c.isValidByte == isByte, c)
+ }
+
+ if (isShort) {
+ val s = bi.toShort
+ assert(s.isWhole, s)
+// assert(s.isValidDouble == isDouble, s)
+// assert(s.isValidFloat == isFloat, s)
+// assert(s.isValidLong == isLong, s)
+ assert(s.isValidInt == isInt, s)
+ assert(s.isValidChar == isChar, s)
+ assert(s.isValidShort == isShort, s)
+ assert(s.isValidByte == isByte, s)
+ }
+
+ if (isByte) {
+ val b = bi.toByte
+ assert(b.isWhole, b)
+// assert(b.isValidDouble == isDouble, b)
+// assert(b.isValidFloat == isFloat, b)
+// assert(b.isValidLong == isLong, b)
+ assert(b.isValidInt == isInt, b)
+ assert(b.isValidChar == isChar, b)
+ assert(b.isValidShort == isShort, b)
+ assert(b.isValidByte == isByte, b)
+ }
+ }
+}
diff --git a/test/files/run/iterators.scala b/test/files/run/iterators.scala
index f0f93f0..b85291c 100644
--- a/test/files/run/iterators.scala
+++ b/test/files/run/iterators.scala
@@ -75,11 +75,11 @@ object Test {
def check_fromArray: Int = { // ticket #429
val a = List(1, 2, 3, 4).toArray
- var xs0 = Iterator.fromArray(a).toList;
- var xs1 = Iterator.fromArray(a, 0, 1).toList;
- var xs2 = Iterator.fromArray(a, 0, 2).toList;
- var xs3 = Iterator.fromArray(a, 0, 3).toList;
- var xs4 = Iterator.fromArray(a, 0, 4).toList;
+ var xs0 = a.iterator.toList;
+ var xs1 = a.slice(0, 1).iterator.toList;
+ var xs2 = a.slice(0, 2).iterator.toList;
+ var xs3 = a.slice(0, 3).iterator.toList;
+ var xs4 = a.slice(0, 4).iterator.toList;
xs0.length + xs1.length + xs2.length + xs3.length + xs4.length
}
diff --git a/test/files/run/java-erasure.check b/test/files/run/java-erasure.check
new file mode 100644
index 0000000..f2ad6c7
--- /dev/null
+++ b/test/files/run/java-erasure.check
@@ -0,0 +1 @@
+c
diff --git a/test/files/run/java-erasure.scala b/test/files/run/java-erasure.scala
new file mode 100644
index 0000000..0441ad7
--- /dev/null
+++ b/test/files/run/java-erasure.scala
@@ -0,0 +1,10 @@
+object Test {
+ val list = new java.util.ArrayList[String] { };
+ list add "a"
+ list add "c"
+ list add "b"
+
+ def main(args: Array[String]): Unit = {
+ println(java.util.Collections.max(list))
+ }
+}
diff --git a/test/files/run/json.scala b/test/files/run/json.scala
index f342064..a81f125 100644
--- a/test/files/run/json.scala
+++ b/test/files/run/json.scala
@@ -7,8 +7,8 @@ object Test extends App {
* toString comparison. */
def jsonToString(in : Any) : String = in match {
case l : List[_] => "[" + l.map(jsonToString).mkString(", ") + "]"
- case m : Map[String,_] => "{" + m.elements.toList
- .sort({ (x,y) => x._1 < y._1 })
+ case m : Map[String,_] => "{" + m.iterator.toList
+ .sortWith({ (x,y) => x._1 < y._1 })
.map({ case (k,v) => "\"" + k + "\": " + jsonToString(v) })
.mkString(", ") + "}"
case s : String => "\"" + s + "\""
@@ -20,7 +20,7 @@ object Test extends App {
*/
def sortJSON(in : Any) : Any = in match {
case l : List[_] => l.map(sortJSON)
- case m : Map[String,_] => TreeMap(m.mapElements(sortJSON).elements.toSeq : _*)
+ case m : Map[String,_] => TreeMap(m.mapValues(sortJSON).iterator.toSeq : _*)
// For the object versions, sort their contents, ugly casts and all...
case JSONObject(data) => JSONObject(sortJSON(data).asInstanceOf[Map[String,Any]])
case JSONArray(data) => JSONArray(sortJSON(data).asInstanceOf[List[Any]])
@@ -62,7 +62,7 @@ object Test extends App {
def stringDiff (expected : String, actual : String) {
if (expected != actual) {
// Figure out where the Strings differ and generate a marker
- val mismatchPosition = expected.toList.zip(actual.toList).findIndexOf({case (x,y) => x != y}) match {
+ val mismatchPosition = expected.toList.zip(actual.toList).indexWhere({case (x,y) => x != y}) match {
case -1 => Math.min(expected.length, actual.length)
case x => x
}
diff --git a/test/files/run/kmpSliceSearch.check b/test/files/run/kmpSliceSearch.check
new file mode 100644
index 0000000..9ce0eba
--- /dev/null
+++ b/test/files/run/kmpSliceSearch.check
@@ -0,0 +1,4 @@
+6 6
+5 10
+-1 -1
+4 4
diff --git a/test/files/run/kmpSliceSearch.scala b/test/files/run/kmpSliceSearch.scala
new file mode 100644
index 0000000..0f7e052
--- /dev/null
+++ b/test/files/run/kmpSliceSearch.scala
@@ -0,0 +1,60 @@
+object Test {
+ import scala.collection.SeqLike
+ def slowSearch[A](xs: Seq[A], ys: Seq[A], start: Int = 0): Int = {
+ if (xs startsWith ys) start
+ else if (xs.isEmpty) -1
+ else slowSearch(xs.tail, ys, start+1)
+ }
+ def bkwSlowSearch[A](xs: Seq[A], ys: Seq[A]) = {
+ val i = slowSearch(xs.reverse, ys.reverse)
+ if (i<0) i
+ else xs.length - ys.length - i
+ }
+ def main(args: Array[String]) {
+ val rng = new scala.util.Random(java.lang.Integer.parseInt("kmp",36))
+
+ // Make sure we agree with naive implementation
+ for (h <- Array(2,5,1000)) {
+ for (i <- 0 to 100) {
+ for (j <- 0 to 10) {
+ val xs = (0 to j).map(_ => (rng.nextInt & 0x7FFFFFFF) % h)
+ val xsa = xs.toArray
+ val xsv = Vector() ++ xs
+ val xsl = xs.toList
+ val xss = Vector[Seq[Int]](xs,xsa,xsv,xsl)
+ for (k <- 0 to 5) {
+ val ys = (0 to k).map(_ => (rng.nextInt & 0x7FFFFFFF) % h)
+ val ysa = ys.toArray
+ val ysv = Vector() ++ ys
+ val ysl = ys.toList
+ val yss = Vector[Seq[Int]](ys,ysa,ysv,ysl)
+ val fwd_slow = slowSearch(xs,ys)
+ val bkw_slow = bkwSlowSearch(xs,ys)
+ val fwd_fast = xss.flatMap(xs => yss.map(ys => SeqLike.indexOf(xs,0,xs.length,ys,0,ys.length,0)))
+ val bkw_fast = xss.flatMap(xs => yss.map(ys => SeqLike.lastIndexOf(xs,0,xs.length,ys,0,ys.length,xs.length)))
+ assert(fwd_fast.forall(_ == fwd_slow))
+ assert(bkw_fast.forall(_ == bkw_slow))
+ }
+ }
+ }
+ }
+
+ // Check performance^Wcorrectness of common small test cases
+ val haystacks = List[Seq[Int]](
+ Array(1,2,3,4,5,6,7,8,9,10,11,12,13,14,15),
+ Vector(99,2,99,99,2,99,99,99,2,99,99,99,99,2),
+ List(1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1),
+ 1 to 15
+ )
+ val needles = List[Seq[Int]](
+ Array(7,8,9,10),
+ Vector(99,99,99),
+ List(1,1,1,1,1,2),
+ 5 to 9
+ )
+ (haystacks zip needles) foreach {
+ case (hay, nee) =>
+ println(hay.indexOfSlice(nee,2) + " " + hay.lastIndexOfSlice(nee,13))
+ }
+ }
+}
diff --git a/test/files/run/large_code.check b/test/files/run/large_code.check
new file mode 100644
index 0000000..6ad5096
--- /dev/null
+++ b/test/files/run/large_code.check
@@ -0,0 +1,3 @@
+newSource1.scala:1: error: Could not write class BigEnoughToFail because it exceeds JVM code size limits. Method tooLong's code too large!
+class BigEnoughToFail {
+ ^
diff --git a/test/files/run/large_code.scala b/test/files/run/large_code.scala
new file mode 100644
index 0000000..f9d7f8c
--- /dev/null
+++ b/test/files/run/large_code.scala
@@ -0,0 +1,24 @@
+import scala.tools.partest._
+import java.io.{Console => _, _}
+
+// a cold run of partest takes about 15s for this test on my laptop
+object Test extends DirectTest {
+ override def extraSettings: String = "-usejavacp -d " + testOutput.path
+
+ // test that we hit the code size limit and error out gracefully
+ // 5958 is the magic number (2^16/11 -- each `a(1,2,3,4,5,6)` is 11 bytes of bytecode)
+ override def code
+ = s"""
+ |class BigEnoughToFail {
+ | def a(a: Int, b: Int, c: Int, d: Int, e: Int, f: Int): Unit = {}
+ | def tooLong: Unit = {
+ | ${(1 to 5958) map (_ => "a(1,2,3,4,5,6)") mkString(";")}
+ | }
+ |}""".stripMargin.trim
+
+ override def show(): Unit = {
+ Console.withErr(System.out) {
+ compile()
+ }
+ }
+}
diff --git a/test/files/run/lazy-leaks.scala b/test/files/run/lazy-leaks.scala
index e3db55a..22a3770 100644
--- a/test/files/run/lazy-leaks.scala
+++ b/test/files/run/lazy-leaks.scala
@@ -9,7 +9,7 @@ object Test extends App
// This test requires 4 Mb of RAM if Lazy is discarding thunks
// It consumes 4 Gb of RAM if Lazy is not discarding thunks
- for (val idx <- Iterator.range(0, 1024)) {
+ for (idx <- Iterator.range(0, 1024)) {
val data = new Array[Int](1024*1024)
val lz: Lazy = new Lazy(data.length)
buffer += lz
diff --git a/test/files/run/lazy-locals.check b/test/files/run/lazy-locals.check
index 7d14da2..d1cc754 100644
--- a/test/files/run/lazy-locals.check
+++ b/test/files/run/lazy-locals.check
@@ -36,30 +36,19 @@ forced lazy val t02
forced lazy val t01
forced lazy val t00
Sum is: 496
-forced lazy val t32
-forced lazy val t31
-forced lazy val t30
-forced lazy val t29
-forced lazy val t28
-forced lazy val t27
-forced lazy val t26
-forced lazy val t25
-forced lazy val t24
-forced lazy val t23
-forced lazy val t22
-forced lazy val t21
-forced lazy val t20
-forced lazy val t19
-forced lazy val t18
-forced lazy val t17
-forced lazy val t16
-forced lazy val t15
-forced lazy val t14
-forced lazy val t13
-forced lazy val t12
-forced lazy val t11
-forced lazy val t10
-forced lazy val t09
+Sum again is: 496
+Sum again again is: 496
+forced lazy val t07
+forced lazy val t06
+forced lazy val t05
+forced lazy val t04
+forced lazy val t03
+forced lazy val t02
+forced lazy val t01
+forced lazy val t00
+Sum is: 28
+Sum again is: 28
+Sum again again is: 28
forced lazy val t08
forced lazy val t07
forced lazy val t06
@@ -69,7 +58,9 @@ forced lazy val t03
forced lazy val t02
forced lazy val t01
forced lazy val t00
-Sum is: 528
+Sum is: 36
+Sum again is: 36
+Sum again again is: 36
forced lazy val t at n = 0
42
forced lazy val t at n = 0
diff --git a/test/files/run/lazy-locals.scala b/test/files/run/lazy-locals.scala
index 696aeeb..aca15d0 100644
--- a/test/files/run/lazy-locals.scala
+++ b/test/files/run/lazy-locals.scala
@@ -13,7 +13,7 @@ object Test extends App {
1 + t + t
}
- /** test 32 lazy vals, which fills one bitmap int. */
+ /** test 32 lazy vals, which should spill over multiple byte bitmaps. */
def testLazy32 = {
lazy val t00 = { Console.println("forced lazy val t00"); 0 }
lazy val t01 = { Console.println("forced lazy val t01"); 1 }
@@ -51,12 +51,25 @@ object Test extends App {
val sum = t31 + t30 + t29 + t28 + t27 + t26 + t25 + t24 + t23 +
t22 + t21 + t20 + t19 + t18 + t17 + t16 + t15 + t14 +
t13 + t12 + t11 + t10 + t09 + t08 + t07 + t06 + t05 +
- t04 + t03 + t02 + t01 + t00;
- println("Sum is: " + sum);
+ t04 + t03 + t02 + t01 + t00
+ val sum2 = t31 + t30 + t29 + t28 + t27 + t26 + t25 + t24 + t23 +
+ t22 + t21 + t20 + t19 + t18 + t17 + t16 + t15 + t14 +
+ t13 + t12 + t11 + t10 + t09 + t08 + t07 + t06 + t05 +
+ t04 + t03 + t02 + t01 + t00
+ val sum3 = t00 + t01 + t02 + t03 + t04 + t05 + t06 + t07 + t08 +
+ t09 + t10 + t11 + t12 + t13 + t14 + t15 + t16 + t17 +
+ t18 + t19 + t20 + t21 + t22 + t23 + t24 + t25 + t26 +
+ t27 + t28 + t29 + t30 + t31
+
+
+
+ println("Sum is: " + sum)
+ println("Sum again is: " + sum2)
+ println("Sum again again is: " + sum3)
}
- /** test 32 lazy vals, which needs two bitmap ints. */
- def testLazy33 = {
+ /** test 8 lazy vals, which should fit one byte bitmap. */
+ def testLazy8 = {
lazy val t00 = { Console.println("forced lazy val t00"); 0 }
lazy val t01 = { Console.println("forced lazy val t01"); 1 }
lazy val t02 = { Console.println("forced lazy val t02"); 2 }
@@ -65,39 +78,38 @@ object Test extends App {
lazy val t05 = { Console.println("forced lazy val t05"); 5 }
lazy val t06 = { Console.println("forced lazy val t06"); 6 }
lazy val t07 = { Console.println("forced lazy val t07"); 7 }
- lazy val t08 = { Console.println("forced lazy val t08"); 8 }
- lazy val t09 = { Console.println("forced lazy val t09"); 9 }
- lazy val t10 = { Console.println("forced lazy val t10"); 10 }
- lazy val t11 = { Console.println("forced lazy val t11"); 11 }
- lazy val t12 = { Console.println("forced lazy val t12"); 12 }
- lazy val t13 = { Console.println("forced lazy val t13"); 13 }
- lazy val t14 = { Console.println("forced lazy val t14"); 14 }
- lazy val t15 = { Console.println("forced lazy val t15"); 15 }
- lazy val t16 = { Console.println("forced lazy val t16"); 16 }
- lazy val t17 = { Console.println("forced lazy val t17"); 17 }
- lazy val t18 = { Console.println("forced lazy val t18"); 18 }
- lazy val t19 = { Console.println("forced lazy val t19"); 19 }
- lazy val t20 = { Console.println("forced lazy val t20"); 20 }
- lazy val t21 = { Console.println("forced lazy val t21"); 21 }
- lazy val t22 = { Console.println("forced lazy val t22"); 22 }
- lazy val t23 = { Console.println("forced lazy val t23"); 23 }
- lazy val t24 = { Console.println("forced lazy val t24"); 24 }
- lazy val t25 = { Console.println("forced lazy val t25"); 25 }
- lazy val t26 = { Console.println("forced lazy val t26"); 26 }
- lazy val t27 = { Console.println("forced lazy val t27"); 27 }
- lazy val t28 = { Console.println("forced lazy val t28"); 28 }
- lazy val t29 = { Console.println("forced lazy val t29"); 29 }
- lazy val t30 = { Console.println("forced lazy val t30"); 30 }
- lazy val t31 = { Console.println("forced lazy val t31"); 31 }
- lazy val t32 = { Console.println("forced lazy val t32"); 32 }
- val sum = t32 + t31 + t30 + t29 + t28 + t27 + t26 + t25 + t24 + t23 +
- t22 + t21 + t20 + t19 + t18 + t17 + t16 + t15 + t14 +
- t13 + t12 + t11 + t10 + t09 + t08 + t07 + t06 + t05 +
- t04 + t03 + t02 + t01 + t00;
- println("Sum is: " + sum);
+ val sum = t07 + t06 + t05 + t04 + t03 + t02 + t01 + t00
+ val sum2 = t07 + t06 + t05 + t04 + t03 + t02 + t01 + t00
+ val sum3 = t00 + t01 + t02 + t03 + t04 + t05 + t06 + t07
+
+
+
+ println("Sum is: " + sum)
+ println("Sum again is: " + sum2)
+ println("Sum again again is: " + sum3)
}
+ /** test 9 lazy vals, which should spill over two bitmaps. */
+ def testLazy9 = {
+ lazy val t00 = { Console.println("forced lazy val t00"); 0 }
+ lazy val t01 = { Console.println("forced lazy val t01"); 1 }
+ lazy val t02 = { Console.println("forced lazy val t02"); 2 }
+ lazy val t03 = { Console.println("forced lazy val t03"); 3 }
+ lazy val t04 = { Console.println("forced lazy val t04"); 4 }
+ lazy val t05 = { Console.println("forced lazy val t05"); 5 }
+ lazy val t06 = { Console.println("forced lazy val t06"); 6 }
+ lazy val t07 = { Console.println("forced lazy val t07"); 7 }
+ lazy val t08 = { Console.println("forced lazy val t08"); 8 }
+
+ val sum = t08 + t07 + t06 + t05 + t04 + t03 + t02 + t01 + t00
+ val sum2 = t08 + t07 + t06 + t05 + t04 + t03 + t02 + t01 + t00
+ val sum3 = t00 + t01 + t02 + t03 + t04 + t05 + t06 + t07 + t08
+
+ println("Sum is: " + sum)
+ println("Sum again is: " + sum2)
+ println("Sum again again is: " + sum3)
+ }
/** test recursive method with lazy vals and a single forced */
def testLazyRec(n: Int): Int = {
@@ -185,7 +197,8 @@ object Test extends App {
println(testLazy)
testLazy32
- testLazy33
+ testLazy8
+ testLazy9
println(testLazyRec(5))
println(testLazyRecMany(5))
testRecVal
diff --git a/test/files/run/lift-and-unlift.scala b/test/files/run/lift-and-unlift.scala
index b944c70..a4a5d95 100644
--- a/test/files/run/lift-and-unlift.scala
+++ b/test/files/run/lift-and-unlift.scala
@@ -2,7 +2,7 @@ import Function.unlift
object Test {
def evens1(x: Int) = if (x % 2 == 0) Some(x) else None
- def evens2: PartialFunction[Int, Int] = {
+ val evens2: PartialFunction[Int, Int] = {
case x if x % 2 == 0 => x
}
@@ -21,7 +21,7 @@ object Test {
})
assert(f1 eq f3.lift)
- // Hmm, why is this not true:
- // assert(f2 eq f4.lift)
+ assert(f4 eq unlift(f2))
+ assert(f4 eq evens2)
}
}
diff --git a/test/files/run/lists-run.scala b/test/files/run/lists-run.scala
index dccc73f..ccfe5bc 100644
--- a/test/files/run/lists-run.scala
+++ b/test/files/run/lists-run.scala
@@ -1,99 +1,84 @@
-//############################################################################
-// Lists
-//############################################################################
-
-//############################################################################
-
-import testing.SUnit._
-
/** Test the Scala implementation of class <code>scala.List</code>.
*
* @author Stephane Micheloud
*/
-object Test extends TestConsoleMain {
- def suite = new TestSuite(
- Test_multiset, // multiset operations: union, intersect, diff
- Test1, //count, exists, filter, ..
- Test2, //#468
- Test3, //#1691
- Test4, //#1721
- Test5
- )
+object Test {
+ def main(args: Array[String]) {
+ Test_multiset.run() // multiset operations: union, intersect, diff
+ Test1.run() //count, exists, filter, ..
+ Test2.run() //#468
+ Test3.run() //#1691
+ Test4.run() //#1721
+ Test5.run()
+ }
}
-object Test_multiset extends TestCase("multiset") with Assert {
- override def enableStackTrace = false
- override def runTest {
+object Test_multiset {
+ def run() {
def isSubListOf[A](thiz: List[A], that: List[A]): Boolean =
thiz forall (that contains _)
val xs = List(1, 1, 2)
val ys = List(1, 2, 2, 3)
- assertEquals("xs_union_ys", List(1, 1, 2, 1, 2, 2, 3), xs union ys)
- assertEquals("ys_union_xs", List(1, 2, 2, 3, 1, 1, 2), ys union xs)
- assertEquals("xs_intersect_ys", List(1, 2), xs intersect ys)
- assertEquals("ys_intersect_xs", List(1, 2), ys intersect xs)
- assertEquals("xs_diff_ys", List(1), xs diff ys)
- assertEquals("ys_diff_xs", List(2, 3), ys diff xs)
- assertTrue("xs_subset_ys", isSubListOf(xs -- ys, xs diff ys))
+ assert(List(1, 1, 2, 1, 2, 2, 3) == (xs union ys), "xs_union_ys")
+ assert(List(1, 2, 2, 3, 1, 1, 2) == (ys union xs), "ys_union_xs")
+ assert(List(1, 2) == (xs intersect ys), "xs_intersect_ys")
+ assert(List(1, 2) == (ys intersect xs), "ys_intersect_xs")
+ assert(List(1) == (xs diff ys), "xs_diff_ys")
+ assert(List(2, 3) == (ys diff xs), "ys_diff_xs")
+ assert(isSubListOf(xs filterNot (ys contains), xs diff ys), "xs_subset_ys")
val zs = List(0, 1, 1, 2, 2, 2)
- assertEquals("zs_union_ys", List(0, 1, 1, 2, 2, 2, 1, 2, 2, 3), zs union ys)
- assertEquals("ys_union_zs", List(1, 2, 2, 3, 0, 1, 1, 2, 2, 2), ys union zs)
- assertEquals("zs_intersect_ys", List(1, 2, 2), zs intersect ys)
- assertEquals("ys_intersect_zs", List(1, 2, 2), ys intersect zs)
- assertEquals("zs_diff_ys", List(0, 1, 2), zs diff ys)
- assertEquals("ys_diff_zs", List(3), ys diff zs)
- assertTrue("xs_subset_ys", isSubListOf(zs -- ys, zs diff ys))
+ assert(List(0, 1, 1, 2, 2, 2, 1, 2, 2, 3) == (zs union ys), "zs_union_ys")
+ assert(List(1, 2, 2, 3, 0, 1, 1, 2, 2, 2) == (ys union zs), "ys_union_zs")
+ assert(List(1, 2, 2) == (zs intersect ys), "zs_intersect_ys")
+ assert(List(1, 2, 2) == (ys intersect zs), "ys_intersect_zs")
+ assert(List(0, 1, 2) == (zs diff ys), "zs_diff_ys")
+ assert(List(3) == (ys diff zs), "ys_diff_zs")
+ assert(isSubListOf(zs filterNot (ys contains), zs diff ys), "xs_subset_ys")
val ws = List(2)
- assertEquals("ws_union_ys", List(2, 1, 2, 2, 3), ws union ys)
- assertEquals("ys_union_ws", List(1, 2, 2, 3, 2), ys union ws)
- assertEquals("ws_intersect_ys", List(2), ws intersect ys)
- assertEquals("ys_intersect_ws", List(2), ys intersect ws)
- assertEquals("ws_diff_ys", List(), ws diff ys)
- assertEquals("ys_diff_ws", List(1, 2, 3), ys diff ws)
- assertTrue("ws_subset_ys", isSubListOf(ws -- ys, ws diff ys))
+ assert(List(2, 1, 2, 2, 3) == (ws union ys), "ws_union_ys")
+ assert(List(1, 2, 2, 3, 2) == (ys union ws), "ys_union_ws")
+ assert(List(2) == (ws intersect ys), "ws_intersect_ys")
+ assert(List(2) == (ys intersect ws), "ys_intersect_ws")
+ assert(List() == (ws diff ys), "ws_diff_ys")
+ assert(List(1, 2, 3) == (ys diff ws), "ys_diff_ws")
+ assert(isSubListOf(ws filterNot (ys contains), ws diff ys), "ws_subset_ys")
val vs = List(3, 2, 2, 1)
- assertEquals("xs_union_vs", List(1, 1, 2, 3, 2, 2, 1), xs union vs)
- assertEquals("vs_union_xs", List(3, 2, 2, 1, 1, 1, 2), vs union xs)
- assertEquals("xs_intersect_vs", List(1, 2), xs intersect vs)
- assertEquals("vs_intersect_xs", List(2, 1), vs intersect xs)
- assertEquals("xs_diff_vs", List(1), xs diff vs)
- assertEquals("vs_diff_xs", List(3, 2), vs diff xs)
- assertTrue("xs_subset_vs", isSubListOf(xs -- vs, xs diff vs))
+ assert(List(1, 1, 2, 3, 2, 2, 1) == (xs union vs), "xs_union_vs")
+ assert(List(3, 2, 2, 1, 1, 1, 2) == (vs union xs), "vs_union_xs")
+ assert(List(1, 2) == (xs intersect vs), "xs_intersect_vs")
+ assert(List(2, 1) == (vs intersect xs), "vs_intersect_xs")
+ assert(List(1) == (xs diff vs), "xs_diff_vs")
+ assert(List(3, 2) == (vs diff xs), "vs_diff_xs")
+ assert(isSubListOf(xs filterNot (vs contains), xs diff vs), "xs_subset_vs")
// tests adapted from Thomas Jung
- assertTrue(
- "be symmetric after sorting", {
- def sort(zs: List[Int]) = zs sort ( _ > _ )
+ assert({
+ def sort(zs: List[Int]) = zs sortWith ( _ > _ )
sort(xs intersect ys) == sort(ys intersect xs)
- })
- assertTrue(
- "obey min cardinality", {
+ }, "be symmetric after sorting")
+ assert({
def cardinality[A](zs: List[A], e: A): Int = zs count (e == _)
val intersection = xs intersect ys
xs forall (e => cardinality(intersection, e) == (cardinality(xs, e)
min cardinality(ys, e)))
- })
- assertTrue(
- "maintain order", {
+ }, "obey min cardinality")
+ assert({
val intersection = xs intersect ys
val unconsumed = xs.foldLeft(intersection){(rest, e) =>
if (! rest.isEmpty && e == rest.head) rest.tail else rest
}
unconsumed.isEmpty
- })
- assertTrue(
- "has the list as again intersection",
- xs == (xs intersect xs)
- )
+ }, "maintain order")
+ assert(xs == (xs intersect xs),
+ "has the list as again intersection")
}
}
-object Test1 extends TestCase("ctor") with Assert {
- override def enableStackTrace = false
- override def runTest {
+object Test1 {
+ def run() {
val xs1 = List(1, 2, 3)
val xs2 = List('a', 'b')
val xs3 = List(List(1, 2), List(4, 5))
@@ -103,100 +88,96 @@ object Test1 extends TestCase("ctor") with Assert {
{
val n1 = xs1 count { e => e % 2 != 0 }
val n2 = xs4 count { e => e < 5 }
- assertEquals("check_count", 4, n1 + n2)
+ assert(4 == (n1 + n2), "check_count")
}
{
val b1 = xs1 exists { e => e % 2 == 0 }
val b2 = xs4 exists { e => e == 5 }
- assertEquals("check_exists", false , b1 & b2)
+ assert(!(b1 & b2), "check_exists")
}
{
val ys1 = xs1 filter { e => e % 2 == 0 }
val ys2 = xs4 filter { e => e < 5 }
- assertEquals("check_filter", 3, ys1.length + ys2.length)
+ assert(3 == ys1.length + ys2.length, "check_filter")
}
{
val n1 = xs1.foldLeft(0)((e1, e2) => e1 + e2)
val ys1 = xs4.foldLeft(List[Int]())((e1, e2) => e2 :: e1)
- assertEquals("check_foldLeft", 10, n1 + ys1.length)
+ assert(10 == n1 + ys1.length, "check_foldLeft")
}
{
val b1 = xs1 forall { e => e < 10}
val b2 = xs4 forall { e => e % 2 == 0 }
- assertEquals("check_forall", true, b1 & b2)
+ assert(b1 & b2, "check_forall")
}
{
val ys1 = xs1 filterNot { e => e % 2 != 0 }
val ys2 = xs4 filterNot { e => e < 5 }
- assertEquals("check_remove", 3, ys1.length + ys2.length)
+ assert(3 == ys1.length + ys2.length, "check_remove")
}
{
val ys1 = xs1 zip xs2
val ys2 = xs1 zip xs3
- assertEquals("check_zip", 4, ys1.length + ys2.length)
+ assert(4 == ys1.length + ys2.length, "check_zip")
}
{
val ys1 = xs1.zipAll(xs2, 0, '_')
val ys2 = xs2.zipAll(xs1, '_', 0)
val ys3 = xs1.zipAll(xs3, 0, List(-1))
- assertEquals("check_zipAll", 9, ys1.length + ys2.length + ys3.length)
+ assert(9 == ys1.length + ys2.length + ys3.length, "check_zipAll")
}
}
}
-object Test2 extends TestCase("t0468") with Assert {
- override def enableStackTrace = false
- override def runTest {
+object Test2 {
+ def run() {
val xs1 = List(1, 2, 3)
val xs2 = List(0)
val ys1 = xs1 ::: List(4)
- assertEquals("check_:::", List(1, 2, 3, 4), ys1)
+ assert(List(1, 2, 3, 4) == ys1, "check_:::")
- val ys2 = ys1 - 4
- assertEquals("check_-", xs1, ys2)
+ val ys2 = ys1 filterNot (_ == 4)
+ assert(xs1 == ys2, "check_-")
val n2 = (xs1 ++ ys1).length
val n3 = (xs1 ++ Nil).length
- val n4 = (xs1 ++ ((new collection.mutable.ArrayBuffer[Int]) + 0)).length
- assertEquals("check_++", 14, n2 + n3 + n4)
+ val n4 = (xs1 ++ ((new collection.mutable.ArrayBuffer[Int]) += 0)).length
+ assert(14 == n2 + n3 + n4, "check_++")
}
}
-object Test3 extends TestCase("t1691") with Assert {
- override def enableStackTrace = false
- override def runTest {
+object Test3 {
+ def run() {
try {
List.range(1, 10, 0)
} catch {
case e: IllegalArgumentException => ()
case _ => throw new Error("List.range(1, 10, 0)")
}
- assertEquals(List.range(10, 0, -2),
- List(10, 8, 6, 4, 2))
+ assert(List.range(10, 0, -2) == List(10, 8, 6, 4, 2))
}
}
-object Test4 extends TestCase("t1721") with Assert {
- override def enableStackTrace = false
- override def runTest {
- assertTrue(List(1,2,3).endsWith(List(2,3)))
- assertFalse(List(1,2,3).endsWith(List(1,3)))
- assertTrue(List(1,2,3).endsWith(List()))
- assertFalse(List(1,2,3).endsWith(List(0,1,2,3)))
- assertTrue(List(1,2,3).endsWith(List(1,2,3)))
- assertFalse(List().endsWith(List(1,2,3)))
- assertTrue(List().endsWith(List()))
+object Test4 {
+ def run() {
+ assert(List(1,2,3).endsWith(List(2,3)))
+ assert(!List(1,2,3).endsWith(List(1,3)))
+ assert(List(1,2,3).endsWith(List()))
+ assert(!List(1,2,3).endsWith(List(0,1,2,3)))
+ assert(List(1,2,3).endsWith(List(1,2,3)))
+ assert(!List().endsWith(List(1,2,3)))
+ assert(List().endsWith(List()))
}
}
-object Test5 extends TestCase("list pattern matching") {
+object Test5 {
def show(xs: List[String]) = xs match {
case "foo" :: args => args.toString
case List(x) => x.toString
case Nil => "Nil"
}
- override def runTest {
+ def run() {
assert(show(List()) == "Nil")
assert(show(List("a")) == "a")
assert(show(List("foo", "b")) == "List(b)")
diff --git a/test/files/run/lub-visibility.check b/test/files/run/lub-visibility.check
index 359ccbb..3461d1b 100644
--- a/test/files/run/lub-visibility.check
+++ b/test/files/run/lub-visibility.check
@@ -8,8 +8,7 @@ scala> // should infer List[scala.collection.immutable.Seq[Nothing]]
scala> // but reverted that for SI-5534.
scala> val x = List(List(), Vector())
-x: List[scala.collection.immutable.Seq[Nothing]] = List(List(), Vector())
-
+x: List[scala.collection.immutable.Seq[Nothing] with scala.collection.AbstractSeq[Nothing]{def companion: scala.collection.generic.GenericCompanion[scala.collection.immutable.Seq with scala.collection.AbstractSeq{def dropRight(n: Int): scala.collection.immutable.Seq[Any] with scala.collection.AbstractSeq[Any]; def takeRight(n: Int): scala.collection.immutable.Seq[Any] with scala.collection.AbstractSeq[Any]; def drop(n: Int): scala.collection.immutable.Seq[Any] with scala.collection.Abstr [...]
scala>
scala>
diff --git a/test/files/run/macro-abort-fresh.check b/test/files/run/macro-abort-fresh.check
new file mode 100644
index 0000000..75ad5e7
--- /dev/null
+++ b/test/files/run/macro-abort-fresh.check
@@ -0,0 +1,6 @@
+$1$
+qwe1
+qwe2
+reflective compilation has failed:
+
+blargh
diff --git a/test/files/run/macro-abort-fresh.flags b/test/files/run/macro-abort-fresh.flags
new file mode 100644
index 0000000..cd66464
--- /dev/null
+++ b/test/files/run/macro-abort-fresh.flags
@@ -0,0 +1 @@
+-language:experimental.macros
\ No newline at end of file
diff --git a/test/files/run/macro-abort-fresh/Macros_1.scala b/test/files/run/macro-abort-fresh/Macros_1.scala
new file mode 100644
index 0000000..af1e292
--- /dev/null
+++ b/test/files/run/macro-abort-fresh/Macros_1.scala
@@ -0,0 +1,15 @@
+import scala.reflect.macros.Context
+
+object Impls {
+ def impl(c: Context) = {
+ import c.universe._
+ println(c.fresh())
+ println(c.fresh("qwe"))
+ println(c.fresh(newTypeName("qwe")))
+ c.abort(NoPosition, "blargh")
+ }
+}
+
+object Macros {
+ def foo = macro Impls.impl
+}
\ No newline at end of file
diff --git a/test/files/run/macro-abort-fresh/Test_2.scala b/test/files/run/macro-abort-fresh/Test_2.scala
new file mode 100644
index 0000000..0b9986e
--- /dev/null
+++ b/test/files/run/macro-abort-fresh/Test_2.scala
@@ -0,0 +1,8 @@
+object Test extends App {
+ import scala.reflect.runtime.universe._
+ import scala.reflect.runtime.{currentMirror => cm}
+ import scala.tools.reflect.ToolBox
+ val tree = Select(Ident(newTermName("Macros")), newTermName("foo"))
+ try cm.mkToolBox().eval(tree)
+ catch { case ex: Throwable => println(ex.getMessage) }
+}
\ No newline at end of file
diff --git a/test/files/run/syncchannel.check b/test/files/run/macro-auto-duplicate.check
similarity index 100%
copy from test/files/run/syncchannel.check
copy to test/files/run/macro-auto-duplicate.check
diff --git a/test/files/run/macro-auto-duplicate/Macros_1.scala b/test/files/run/macro-auto-duplicate/Macros_1.scala
new file mode 100644
index 0000000..e3df05b
--- /dev/null
+++ b/test/files/run/macro-auto-duplicate/Macros_1.scala
@@ -0,0 +1,17 @@
+import scala.reflect.macros.Context
+import language.experimental.macros
+
+object Macros {
+ def impl(c: Context) = {
+ import c.universe._
+ val x = Ident(newTermName("x"))
+ def defAndUseX(rhs: Tree) = {
+ Block(List(ValDef(NoMods, newTermName("x"), TypeTree(), rhs)), x)
+ }
+ val xi4 = defAndUseX(Literal(Constant(4)))
+ val xs2 = defAndUseX(Literal(Constant("2")))
+ c.Expr[String](Apply(Select(xi4, newTermName("$plus")), List(xs2)))
+ }
+
+ def foo = macro impl
+}
\ No newline at end of file
diff --git a/test/files/run/macro-auto-duplicate/Test_2.scala b/test/files/run/macro-auto-duplicate/Test_2.scala
new file mode 100644
index 0000000..f697da6
--- /dev/null
+++ b/test/files/run/macro-auto-duplicate/Test_2.scala
@@ -0,0 +1,3 @@
+object Test extends App {
+ println(Macros.foo)
+}
\ No newline at end of file
diff --git a/test/files/run/macro-basic-ma-md-mi.check b/test/files/run/macro-basic-ma-md-mi.check
new file mode 100644
index 0000000..b74e882
--- /dev/null
+++ b/test/files/run/macro-basic-ma-md-mi.check
@@ -0,0 +1 @@
+31
\ No newline at end of file
diff --git a/test/files/run/macro-basic-ma-md-mi.flags b/test/files/run/macro-basic-ma-md-mi.flags
new file mode 100644
index 0000000..5e5dd6c
--- /dev/null
+++ b/test/files/run/macro-basic-ma-md-mi.flags
@@ -0,0 +1 @@
+-language:experimental.macros
diff --git a/test/files/run/macro-basic-ma-md-mi/Impls_1.scala b/test/files/run/macro-basic-ma-md-mi/Impls_1.scala
new file mode 100644
index 0000000..646634c
--- /dev/null
+++ b/test/files/run/macro-basic-ma-md-mi/Impls_1.scala
@@ -0,0 +1,21 @@
+import scala.reflect.macros.{Context => Ctx}
+
+object Impls {
+ def foo(c: Ctx)(x: c.Expr[Int]): c.Expr[Int] = {
+ import c.universe._
+ val body = Apply(Select(x.tree, newTermName("$plus")), List(Literal(Constant(1))))
+ c.Expr[Int](body)
+ }
+
+ def bar(c: Ctx)(x: c.Expr[Int]): c.Expr[Int] = {
+ import c.universe._
+ val body = Apply(Select(x.tree, newTermName("$plus")), List(Literal(Constant(2))))
+ c.Expr[Int](body)
+ }
+
+ def quux(c: Ctx)(x: c.Expr[Int]): c.Expr[Int] = {
+ import c.universe._
+ val body = Apply(Select(x.tree, newTermName("$plus")), List(Literal(Constant(3))))
+ c.Expr[Int](body)
+ }
+}
\ No newline at end of file
diff --git a/test/files/run/macro-basic-ma-md-mi/Macros_2.scala b/test/files/run/macro-basic-ma-md-mi/Macros_2.scala
new file mode 100644
index 0000000..5279043
--- /dev/null
+++ b/test/files/run/macro-basic-ma-md-mi/Macros_2.scala
@@ -0,0 +1,10 @@
+object Macros {
+ object Shmacros {
+ def foo(x: Int): Int = macro Impls.foo
+ }
+ def bar(x: Int): Int = macro Impls.bar
+}
+
+class Macros {
+ def quux(x: Int): Int = macro Impls.quux
+}
\ No newline at end of file
diff --git a/test/files/run/macro-basic-ma-md-mi/Test_3.scala b/test/files/run/macro-basic-ma-md-mi/Test_3.scala
new file mode 100644
index 0000000..e9a10e2
--- /dev/null
+++ b/test/files/run/macro-basic-ma-md-mi/Test_3.scala
@@ -0,0 +1,4 @@
+object Test extends App {
+ import Macros.Shmacros._
+ println(foo(2) + Macros.bar(2) * new Macros().quux(4))
+}
\ No newline at end of file
diff --git a/test/files/run/macro-basic-ma-mdmi.check b/test/files/run/macro-basic-ma-mdmi.check
new file mode 100644
index 0000000..b74e882
--- /dev/null
+++ b/test/files/run/macro-basic-ma-mdmi.check
@@ -0,0 +1 @@
+31
\ No newline at end of file
diff --git a/test/files/run/macro-basic-ma-mdmi.flags b/test/files/run/macro-basic-ma-mdmi.flags
new file mode 100644
index 0000000..5e5dd6c
--- /dev/null
+++ b/test/files/run/macro-basic-ma-mdmi.flags
@@ -0,0 +1 @@
+-language:experimental.macros
diff --git a/test/files/run/macro-basic-ma-mdmi/Impls_Macros_1.scala b/test/files/run/macro-basic-ma-mdmi/Impls_Macros_1.scala
new file mode 100644
index 0000000..aa1e52e
--- /dev/null
+++ b/test/files/run/macro-basic-ma-mdmi/Impls_Macros_1.scala
@@ -0,0 +1,32 @@
+import scala.reflect.macros.{Context => Ctx}
+
+object Impls {
+ def foo(c: Ctx)(x: c.Expr[Int]): c.Expr[Int] = {
+ import c.universe._
+ val body = Apply(Select(x.tree, newTermName("$plus")), List(Literal(Constant(1))))
+ c.Expr[Int](body)
+ }
+
+ def bar(c: Ctx)(x: c.Expr[Int]): c.Expr[Int] = {
+ import c.universe._
+ val body = Apply(Select(x.tree, newTermName("$plus")), List(Literal(Constant(2))))
+ c.Expr[Int](body)
+ }
+
+ def quux(c: Ctx)(x: c.Expr[Int]): c.Expr[Int] = {
+ import c.universe._
+ val body = Apply(Select(x.tree, newTermName("$plus")), List(Literal(Constant(3))))
+ c.Expr[Int](body)
+ }
+}
+
+object Macros {
+ object Shmacros {
+ def foo(x: Int): Int = macro Impls.foo
+ }
+ def bar(x: Int): Int = macro Impls.bar
+}
+
+class Macros {
+ def quux(x: Int): Int = macro Impls.quux
+}
\ No newline at end of file
diff --git a/test/files/run/macro-basic-ma-mdmi/Test_2.scala b/test/files/run/macro-basic-ma-mdmi/Test_2.scala
new file mode 100644
index 0000000..e9a10e2
--- /dev/null
+++ b/test/files/run/macro-basic-ma-mdmi/Test_2.scala
@@ -0,0 +1,4 @@
+object Test extends App {
+ import Macros.Shmacros._
+ println(foo(2) + Macros.bar(2) * new Macros().quux(4))
+}
\ No newline at end of file
diff --git a/test/files/run/macro-basic-mamd-mi.check b/test/files/run/macro-basic-mamd-mi.check
new file mode 100644
index 0000000..b74e882
--- /dev/null
+++ b/test/files/run/macro-basic-mamd-mi.check
@@ -0,0 +1 @@
+31
\ No newline at end of file
diff --git a/test/files/run/macro-basic-mamd-mi.flags b/test/files/run/macro-basic-mamd-mi.flags
new file mode 100644
index 0000000..5e5dd6c
--- /dev/null
+++ b/test/files/run/macro-basic-mamd-mi.flags
@@ -0,0 +1 @@
+-language:experimental.macros
diff --git a/test/files/run/macro-basic-mamd-mi/Impls_1.scala b/test/files/run/macro-basic-mamd-mi/Impls_1.scala
new file mode 100644
index 0000000..061aa2d
--- /dev/null
+++ b/test/files/run/macro-basic-mamd-mi/Impls_1.scala
@@ -0,0 +1,19 @@
+import scala.reflect.macros.{Context => Ctx}
+
+object Impls {
+ def foo(c: Ctx)(x: c.Expr[Int]): c.Expr[Int] = {
+ import c.universe._
+ c.Expr(Apply(Select(x.tree, newTermName("$plus")), List(Literal(Constant(1)))))
+ }
+
+ def bar(c: Ctx)(x: c.Expr[Int]): c.Expr[Int] = {
+ import c.universe._
+ c.Expr(Apply(Select(x.tree, newTermName("$plus")), List(Literal(Constant(2)))))
+ }
+
+ def quux(c: Ctx)(x: c.Expr[Int]): c.Expr[Int] = {
+ import c.universe._
+ val body = Apply(Select(x.tree, newTermName("$plus")), List(Literal(Constant(3))))
+ c.Expr[Int](body)
+ }
+}
\ No newline at end of file
diff --git a/test/files/run/macro-basic-mamd-mi/Macros_Test_2.scala b/test/files/run/macro-basic-mamd-mi/Macros_Test_2.scala
new file mode 100644
index 0000000..d374689
--- /dev/null
+++ b/test/files/run/macro-basic-mamd-mi/Macros_Test_2.scala
@@ -0,0 +1,15 @@
+object Macros {
+ object Shmacros {
+ def foo(x: Int): Int = macro Impls.foo
+ }
+ def bar(x: Int): Int = macro Impls.bar
+}
+
+class Macros {
+ def quux(x: Int): Int = macro Impls.quux
+}
+
+object Test extends App {
+ import Macros.Shmacros._
+ println(foo(2) + Macros.bar(2) * new Macros().quux(4))
+}
\ No newline at end of file
diff --git a/test/files/run/macro-bodyexpandstoimpl.check b/test/files/run/macro-bodyexpandstoimpl.check
new file mode 100644
index 0000000..f70d7bb
--- /dev/null
+++ b/test/files/run/macro-bodyexpandstoimpl.check
@@ -0,0 +1 @@
+42
\ No newline at end of file
diff --git a/test/files/run/macro-bodyexpandstoimpl.flags b/test/files/run/macro-bodyexpandstoimpl.flags
new file mode 100644
index 0000000..cd66464
--- /dev/null
+++ b/test/files/run/macro-bodyexpandstoimpl.flags
@@ -0,0 +1 @@
+-language:experimental.macros
\ No newline at end of file
diff --git a/test/files/run/macro-bodyexpandstoimpl/Impls_1.scala b/test/files/run/macro-bodyexpandstoimpl/Impls_1.scala
new file mode 100644
index 0000000..0ca0be5
--- /dev/null
+++ b/test/files/run/macro-bodyexpandstoimpl/Impls_1.scala
@@ -0,0 +1,12 @@
+import scala.reflect.macros.{Context => Ctx}
+
+object Impls {
+ def foo(c: Ctx)(x: c.Expr[Int]) = x
+
+ def refToFoo(dummy: Int) = macro refToFoo_impl
+ def refToFoo_impl(c: Ctx)(dummy: c.Expr[Int]) = {
+ import c.universe._
+ val body = Select(Ident(newTermName("Impls")), newTermName("foo"))
+ c.Expr[Int](body)
+ }
+}
\ No newline at end of file
diff --git a/test/files/run/macro-bodyexpandstoimpl/Macros_Test_2.scala b/test/files/run/macro-bodyexpandstoimpl/Macros_Test_2.scala
new file mode 100644
index 0000000..b589d4b
--- /dev/null
+++ b/test/files/run/macro-bodyexpandstoimpl/Macros_Test_2.scala
@@ -0,0 +1,10 @@
+import scala.reflect.macros.{Context => Ctx}
+
+object Macros {
+ def foo(x: Int) = macro Impls.refToFoo(42)
+}
+
+object Test extends App {
+ import Macros._
+ println(foo(42))
+}
\ No newline at end of file
diff --git a/test/files/run/macro-declared-in-annotation.check b/test/files/run/macro-declared-in-annotation.check
new file mode 100644
index 0000000..7658ad2
--- /dev/null
+++ b/test/files/run/macro-declared-in-annotation.check
@@ -0,0 +1 @@
+it works
diff --git a/test/files/run/macro-declared-in-annotation.flags b/test/files/run/macro-declared-in-annotation.flags
new file mode 100644
index 0000000..cd66464
--- /dev/null
+++ b/test/files/run/macro-declared-in-annotation.flags
@@ -0,0 +1 @@
+-language:experimental.macros
\ No newline at end of file
diff --git a/test/files/run/macro-declared-in-annotation/Impls_1.scala b/test/files/run/macro-declared-in-annotation/Impls_1.scala
new file mode 100644
index 0000000..a11ee29
--- /dev/null
+++ b/test/files/run/macro-declared-in-annotation/Impls_1.scala
@@ -0,0 +1,11 @@
+import scala.reflect.macros.{Context => Ctx}
+
+object Impls {
+ def foo(c: Ctx) = {
+ import c.{prefix => prefix}
+ import c.universe._
+ val printPrefix = Apply(Select(Ident(definitions.PredefModule), newTermName("println")), List(Literal(Constant("prefix = " + prefix))))
+ val body = Block(List(printPrefix), Literal(Constant("this is deprecated")))
+ c.Expr[String](body)
+ }
+}
\ No newline at end of file
diff --git a/test/files/run/macro-declared-in-annotation/Macros_2.scala b/test/files/run/macro-declared-in-annotation/Macros_2.scala
new file mode 100644
index 0000000..40d71c6
--- /dev/null
+++ b/test/files/run/macro-declared-in-annotation/Macros_2.scala
@@ -0,0 +1,8 @@
+class foo(val bar: String) extends annotation.StaticAnnotation
+
+object Api {
+ // foo in ann must have a different name
+ // otherwise, we get bitten by https://issues.scala-lang.org/browse/SI-5544
+ @foo({def fooInAnn = macro Impls.foo; fooInAnn})
+ def foo = println("it works")
+}
\ No newline at end of file
diff --git a/test/files/run/macro-declared-in-annotation/Test_3.scala b/test/files/run/macro-declared-in-annotation/Test_3.scala
new file mode 100644
index 0000000..866487f
--- /dev/null
+++ b/test/files/run/macro-declared-in-annotation/Test_3.scala
@@ -0,0 +1,3 @@
+object Test extends App {
+ Api.foo
+}
\ No newline at end of file
diff --git a/test/files/run/macro-declared-in-anonymous.check b/test/files/run/macro-declared-in-anonymous.check
new file mode 100644
index 0000000..09b8d01
--- /dev/null
+++ b/test/files/run/macro-declared-in-anonymous.check
@@ -0,0 +1,2 @@
+prefix = Expr[Nothing](Test.this.macros)
+it works
diff --git a/test/files/run/macro-declared-in-anonymous.flags b/test/files/run/macro-declared-in-anonymous.flags
new file mode 100644
index 0000000..cd66464
--- /dev/null
+++ b/test/files/run/macro-declared-in-anonymous.flags
@@ -0,0 +1 @@
+-language:experimental.macros
\ No newline at end of file
diff --git a/test/files/run/macro-declared-in-anonymous/Impls_1.scala b/test/files/run/macro-declared-in-anonymous/Impls_1.scala
new file mode 100644
index 0000000..6f06f6d
--- /dev/null
+++ b/test/files/run/macro-declared-in-anonymous/Impls_1.scala
@@ -0,0 +1,11 @@
+import scala.reflect.macros.{Context => Ctx}
+
+object Impls {
+ def foo(c: Ctx) = {
+ import c.{prefix => prefix}
+ import c.universe._
+ val printPrefix = Apply(Select(Ident(definitions.PredefModule), newTermName("println")), List(Literal(Constant("prefix = " + prefix))))
+ val body = Block(List(printPrefix), Apply(Select(Ident(definitions.PredefModule), newTermName("println")), List(Literal(Constant("it works")))))
+ c.Expr[Unit](body)
+ }
+}
\ No newline at end of file
diff --git a/test/files/run/macro-declared-in-anonymous/Macros_Test_2.scala b/test/files/run/macro-declared-in-anonymous/Macros_Test_2.scala
new file mode 100644
index 0000000..8bd8c17
--- /dev/null
+++ b/test/files/run/macro-declared-in-anonymous/Macros_Test_2.scala
@@ -0,0 +1,4 @@
+object Test extends App {
+ val macros = new { def foo = macro Impls.foo }
+ macros.foo
+}
\ No newline at end of file
diff --git a/test/files/run/macro-declared-in-block.check b/test/files/run/macro-declared-in-block.check
new file mode 100644
index 0000000..5e687db
--- /dev/null
+++ b/test/files/run/macro-declared-in-block.check
@@ -0,0 +1,2 @@
+prefix = Expr[Nothing](<empty>)
+it works
diff --git a/test/files/run/macro-declared-in-block.flags b/test/files/run/macro-declared-in-block.flags
new file mode 100644
index 0000000..cd66464
--- /dev/null
+++ b/test/files/run/macro-declared-in-block.flags
@@ -0,0 +1 @@
+-language:experimental.macros
\ No newline at end of file
diff --git a/test/files/run/macro-declared-in-block/Impls_1.scala b/test/files/run/macro-declared-in-block/Impls_1.scala
new file mode 100644
index 0000000..6f06f6d
--- /dev/null
+++ b/test/files/run/macro-declared-in-block/Impls_1.scala
@@ -0,0 +1,11 @@
+import scala.reflect.macros.{Context => Ctx}
+
+object Impls {
+ def foo(c: Ctx) = {
+ import c.{prefix => prefix}
+ import c.universe._
+ val printPrefix = Apply(Select(Ident(definitions.PredefModule), newTermName("println")), List(Literal(Constant("prefix = " + prefix))))
+ val body = Block(List(printPrefix), Apply(Select(Ident(definitions.PredefModule), newTermName("println")), List(Literal(Constant("it works")))))
+ c.Expr[Unit](body)
+ }
+}
\ No newline at end of file
diff --git a/test/files/run/macro-declared-in-block/Macros_Test_2.scala b/test/files/run/macro-declared-in-block/Macros_Test_2.scala
new file mode 100644
index 0000000..69088e2
--- /dev/null
+++ b/test/files/run/macro-declared-in-block/Macros_Test_2.scala
@@ -0,0 +1,6 @@
+object Test extends App {
+ {
+ def foo = macro Impls.foo
+ foo
+ }
+}
\ No newline at end of file
diff --git a/test/files/run/macro-declared-in-class-class.check b/test/files/run/macro-declared-in-class-class.check
new file mode 100644
index 0000000..47248d7
--- /dev/null
+++ b/test/files/run/macro-declared-in-class-class.check
@@ -0,0 +1,2 @@
+prefix = Expr[Nothing](new Test.this.outer.Macros())
+it works
diff --git a/test/files/run/macro-declared-in-class-class.flags b/test/files/run/macro-declared-in-class-class.flags
new file mode 100644
index 0000000..cd66464
--- /dev/null
+++ b/test/files/run/macro-declared-in-class-class.flags
@@ -0,0 +1 @@
+-language:experimental.macros
\ No newline at end of file
diff --git a/test/files/run/macro-declared-in-class-class/Impls_1.scala b/test/files/run/macro-declared-in-class-class/Impls_1.scala
new file mode 100644
index 0000000..6f06f6d
--- /dev/null
+++ b/test/files/run/macro-declared-in-class-class/Impls_1.scala
@@ -0,0 +1,11 @@
+import scala.reflect.macros.{Context => Ctx}
+
+object Impls {
+ def foo(c: Ctx) = {
+ import c.{prefix => prefix}
+ import c.universe._
+ val printPrefix = Apply(Select(Ident(definitions.PredefModule), newTermName("println")), List(Literal(Constant("prefix = " + prefix))))
+ val body = Block(List(printPrefix), Apply(Select(Ident(definitions.PredefModule), newTermName("println")), List(Literal(Constant("it works")))))
+ c.Expr[Unit](body)
+ }
+}
\ No newline at end of file
diff --git a/test/files/run/macro-declared-in-class-class/Macros_Test_2.scala b/test/files/run/macro-declared-in-class-class/Macros_Test_2.scala
new file mode 100644
index 0000000..871857a
--- /dev/null
+++ b/test/files/run/macro-declared-in-class-class/Macros_Test_2.scala
@@ -0,0 +1,10 @@
+class Macros {
+ class Macros {
+ def foo = macro Impls.foo
+ }
+}
+
+object Test extends App {
+ val outer = new Macros()
+ new outer.Macros().foo
+}
\ No newline at end of file
diff --git a/test/files/run/macro-declared-in-class-object.check b/test/files/run/macro-declared-in-class-object.check
new file mode 100644
index 0000000..35af59e
--- /dev/null
+++ b/test/files/run/macro-declared-in-class-object.check
@@ -0,0 +1,2 @@
+prefix = Expr[Nothing](Test.this.outer.Macros)
+it works
diff --git a/test/files/run/macro-declared-in-class-object.flags b/test/files/run/macro-declared-in-class-object.flags
new file mode 100644
index 0000000..cd66464
--- /dev/null
+++ b/test/files/run/macro-declared-in-class-object.flags
@@ -0,0 +1 @@
+-language:experimental.macros
\ No newline at end of file
diff --git a/test/files/run/macro-declared-in-class-object/Impls_1.scala b/test/files/run/macro-declared-in-class-object/Impls_1.scala
new file mode 100644
index 0000000..6f06f6d
--- /dev/null
+++ b/test/files/run/macro-declared-in-class-object/Impls_1.scala
@@ -0,0 +1,11 @@
+import scala.reflect.macros.{Context => Ctx}
+
+object Impls {
+ def foo(c: Ctx) = {
+ import c.{prefix => prefix}
+ import c.universe._
+ val printPrefix = Apply(Select(Ident(definitions.PredefModule), newTermName("println")), List(Literal(Constant("prefix = " + prefix))))
+ val body = Block(List(printPrefix), Apply(Select(Ident(definitions.PredefModule), newTermName("println")), List(Literal(Constant("it works")))))
+ c.Expr[Unit](body)
+ }
+}
\ No newline at end of file
diff --git a/test/files/run/macro-declared-in-class-object/Macros_Test_2.scala b/test/files/run/macro-declared-in-class-object/Macros_Test_2.scala
new file mode 100644
index 0000000..994f9fe
--- /dev/null
+++ b/test/files/run/macro-declared-in-class-object/Macros_Test_2.scala
@@ -0,0 +1,10 @@
+class Macros {
+ object Macros {
+ def foo = macro Impls.foo
+ }
+}
+
+object Test extends App {
+ val outer = new Macros()
+ outer.Macros.foo
+}
\ No newline at end of file
diff --git a/test/files/run/macro-declared-in-class.check b/test/files/run/macro-declared-in-class.check
new file mode 100644
index 0000000..a1c1d7a
--- /dev/null
+++ b/test/files/run/macro-declared-in-class.check
@@ -0,0 +1,2 @@
+prefix = Expr[Nothing](new Macros())
+it works
diff --git a/test/files/run/macro-declared-in-class.flags b/test/files/run/macro-declared-in-class.flags
new file mode 100644
index 0000000..cd66464
--- /dev/null
+++ b/test/files/run/macro-declared-in-class.flags
@@ -0,0 +1 @@
+-language:experimental.macros
\ No newline at end of file
diff --git a/test/files/run/macro-declared-in-class/Impls_1.scala b/test/files/run/macro-declared-in-class/Impls_1.scala
new file mode 100644
index 0000000..6f06f6d
--- /dev/null
+++ b/test/files/run/macro-declared-in-class/Impls_1.scala
@@ -0,0 +1,11 @@
+import scala.reflect.macros.{Context => Ctx}
+
+object Impls {
+ def foo(c: Ctx) = {
+ import c.{prefix => prefix}
+ import c.universe._
+ val printPrefix = Apply(Select(Ident(definitions.PredefModule), newTermName("println")), List(Literal(Constant("prefix = " + prefix))))
+ val body = Block(List(printPrefix), Apply(Select(Ident(definitions.PredefModule), newTermName("println")), List(Literal(Constant("it works")))))
+ c.Expr[Unit](body)
+ }
+}
\ No newline at end of file
diff --git a/test/files/run/macro-declared-in-class/Macros_Test_2.scala b/test/files/run/macro-declared-in-class/Macros_Test_2.scala
new file mode 100644
index 0000000..1b9d13e
--- /dev/null
+++ b/test/files/run/macro-declared-in-class/Macros_Test_2.scala
@@ -0,0 +1,7 @@
+class Macros {
+ def foo = macro Impls.foo
+}
+
+object Test extends App {
+ new Macros().foo
+}
\ No newline at end of file
diff --git a/test/files/run/macro-declared-in-default-param.check b/test/files/run/macro-declared-in-default-param.check
new file mode 100644
index 0000000..6decd7a
--- /dev/null
+++ b/test/files/run/macro-declared-in-default-param.check
@@ -0,0 +1,5 @@
+prefix = Expr[Nothing](<empty>)
+it works
+it works
+prefix = Expr[Nothing](<empty>)
+it works
diff --git a/test/files/run/macro-declared-in-default-param.flags b/test/files/run/macro-declared-in-default-param.flags
new file mode 100644
index 0000000..cd66464
--- /dev/null
+++ b/test/files/run/macro-declared-in-default-param.flags
@@ -0,0 +1 @@
+-language:experimental.macros
\ No newline at end of file
diff --git a/test/files/run/macro-declared-in-default-param/Impls_1.scala b/test/files/run/macro-declared-in-default-param/Impls_1.scala
new file mode 100644
index 0000000..db1e5c7
--- /dev/null
+++ b/test/files/run/macro-declared-in-default-param/Impls_1.scala
@@ -0,0 +1,11 @@
+import scala.reflect.macros.{Context => Ctx}
+
+object Impls {
+ def foo(c: Ctx) = {
+ import c.{prefix => prefix}
+ import c.universe._
+ val printPrefix = Apply(Select(Ident(definitions.PredefModule), newTermName("println")), List(Literal(Constant("prefix = " + prefix))))
+ val body = Block(List(printPrefix), Literal(Constant("it works")))
+ c.Expr[String](body)
+ }
+}
\ No newline at end of file
diff --git a/test/files/run/macro-declared-in-default-param/Macros_Test_2.scala b/test/files/run/macro-declared-in-default-param/Macros_Test_2.scala
new file mode 100644
index 0000000..356029e
--- /dev/null
+++ b/test/files/run/macro-declared-in-default-param/Macros_Test_2.scala
@@ -0,0 +1,7 @@
+object Test extends App {
+ def foo(bar: String = { def foo = macro Impls.foo; foo }) = println(bar)
+
+ foo()
+ foo("it works")
+ foo()
+}
\ No newline at end of file
diff --git a/test/files/run/macro-declared-in-implicit-class.check b/test/files/run/macro-declared-in-implicit-class.check
new file mode 100644
index 0000000..5dc968c
--- /dev/null
+++ b/test/files/run/macro-declared-in-implicit-class.check
@@ -0,0 +1,2 @@
+prefix = Expr[Nothing](Macros.foo("2"))
+Some(2)
diff --git a/test/files/run/macro-declared-in-implicit-class.flags b/test/files/run/macro-declared-in-implicit-class.flags
new file mode 100644
index 0000000..cd66464
--- /dev/null
+++ b/test/files/run/macro-declared-in-implicit-class.flags
@@ -0,0 +1 @@
+-language:experimental.macros
\ No newline at end of file
diff --git a/test/files/run/macro-declared-in-implicit-class/Impls_Macros_1.scala b/test/files/run/macro-declared-in-implicit-class/Impls_Macros_1.scala
new file mode 100644
index 0000000..837b306
--- /dev/null
+++ b/test/files/run/macro-declared-in-implicit-class/Impls_Macros_1.scala
@@ -0,0 +1,19 @@
+import scala.reflect.macros.{Context => Ctx}
+
+object Impls {
+ def toOptionOfInt(c: Ctx) = {
+ import c.{prefix => prefix}
+ import c.universe._
+ val printPrefix = Apply(Select(Ident(definitions.PredefModule), newTermName("println")), List(Literal(Constant("prefix = " + prefix))))
+ val body = Block(List(printPrefix), Apply(Ident(definitions.SomeModule), List(Select(Select(prefix.tree, newTermName("x")), newTermName("toInt")))))
+ c.Expr[Option[Int]](body)
+ }
+}
+
+object Macros {
+ implicit def foo(x: String): Foo = new Foo(x)
+
+ class Foo(val x: String) {
+ def toOptionOfInt = macro Impls.toOptionOfInt
+ }
+}
\ No newline at end of file
diff --git a/test/files/run/macro-declared-in-implicit-class/Test_2.scala b/test/files/run/macro-declared-in-implicit-class/Test_2.scala
new file mode 100644
index 0000000..d0bc9cc
--- /dev/null
+++ b/test/files/run/macro-declared-in-implicit-class/Test_2.scala
@@ -0,0 +1,4 @@
+object Test extends App {
+ import Macros._
+ println("2".toOptionOfInt)
+}
\ No newline at end of file
diff --git a/test/files/run/macro-declared-in-method.check b/test/files/run/macro-declared-in-method.check
new file mode 100644
index 0000000..5e687db
--- /dev/null
+++ b/test/files/run/macro-declared-in-method.check
@@ -0,0 +1,2 @@
+prefix = Expr[Nothing](<empty>)
+it works
diff --git a/test/files/run/macro-declared-in-method.flags b/test/files/run/macro-declared-in-method.flags
new file mode 100644
index 0000000..cd66464
--- /dev/null
+++ b/test/files/run/macro-declared-in-method.flags
@@ -0,0 +1 @@
+-language:experimental.macros
\ No newline at end of file
diff --git a/test/files/run/macro-declared-in-method/Impls_1.scala b/test/files/run/macro-declared-in-method/Impls_1.scala
new file mode 100644
index 0000000..6f06f6d
--- /dev/null
+++ b/test/files/run/macro-declared-in-method/Impls_1.scala
@@ -0,0 +1,11 @@
+import scala.reflect.macros.{Context => Ctx}
+
+object Impls {
+ def foo(c: Ctx) = {
+ import c.{prefix => prefix}
+ import c.universe._
+ val printPrefix = Apply(Select(Ident(definitions.PredefModule), newTermName("println")), List(Literal(Constant("prefix = " + prefix))))
+ val body = Block(List(printPrefix), Apply(Select(Ident(definitions.PredefModule), newTermName("println")), List(Literal(Constant("it works")))))
+ c.Expr[Unit](body)
+ }
+}
\ No newline at end of file
diff --git a/test/files/run/macro-declared-in-method/Macros_Test_2.scala b/test/files/run/macro-declared-in-method/Macros_Test_2.scala
new file mode 100644
index 0000000..ed5c8b7
--- /dev/null
+++ b/test/files/run/macro-declared-in-method/Macros_Test_2.scala
@@ -0,0 +1,8 @@
+object Test extends App {
+ def bar() = {
+ def foo = macro Impls.foo
+ foo
+ }
+
+ bar()
+}
\ No newline at end of file
diff --git a/test/files/run/macro-declared-in-object-class.check b/test/files/run/macro-declared-in-object-class.check
new file mode 100644
index 0000000..47248d7
--- /dev/null
+++ b/test/files/run/macro-declared-in-object-class.check
@@ -0,0 +1,2 @@
+prefix = Expr[Nothing](new Test.this.outer.Macros())
+it works
diff --git a/test/files/run/macro-declared-in-object-class.flags b/test/files/run/macro-declared-in-object-class.flags
new file mode 100644
index 0000000..cd66464
--- /dev/null
+++ b/test/files/run/macro-declared-in-object-class.flags
@@ -0,0 +1 @@
+-language:experimental.macros
\ No newline at end of file
diff --git a/test/files/run/macro-declared-in-object-class/Impls_1.scala b/test/files/run/macro-declared-in-object-class/Impls_1.scala
new file mode 100644
index 0000000..6f06f6d
--- /dev/null
+++ b/test/files/run/macro-declared-in-object-class/Impls_1.scala
@@ -0,0 +1,11 @@
+import scala.reflect.macros.{Context => Ctx}
+
+object Impls {
+ def foo(c: Ctx) = {
+ import c.{prefix => prefix}
+ import c.universe._
+ val printPrefix = Apply(Select(Ident(definitions.PredefModule), newTermName("println")), List(Literal(Constant("prefix = " + prefix))))
+ val body = Block(List(printPrefix), Apply(Select(Ident(definitions.PredefModule), newTermName("println")), List(Literal(Constant("it works")))))
+ c.Expr[Unit](body)
+ }
+}
\ No newline at end of file
diff --git a/test/files/run/macro-declared-in-object-class/Macros_Test_2.scala b/test/files/run/macro-declared-in-object-class/Macros_Test_2.scala
new file mode 100644
index 0000000..204deed
--- /dev/null
+++ b/test/files/run/macro-declared-in-object-class/Macros_Test_2.scala
@@ -0,0 +1,10 @@
+object Macros {
+ class Macros {
+ def foo = macro Impls.foo
+ }
+}
+
+object Test extends App {
+ val outer = Macros
+ new outer.Macros().foo
+}
\ No newline at end of file
diff --git a/test/files/run/macro-declared-in-object-object.check b/test/files/run/macro-declared-in-object-object.check
new file mode 100644
index 0000000..35af59e
--- /dev/null
+++ b/test/files/run/macro-declared-in-object-object.check
@@ -0,0 +1,2 @@
+prefix = Expr[Nothing](Test.this.outer.Macros)
+it works
diff --git a/test/files/run/macro-declared-in-object-object.flags b/test/files/run/macro-declared-in-object-object.flags
new file mode 100644
index 0000000..cd66464
--- /dev/null
+++ b/test/files/run/macro-declared-in-object-object.flags
@@ -0,0 +1 @@
+-language:experimental.macros
\ No newline at end of file
diff --git a/test/files/run/macro-declared-in-object-object/Impls_1.scala b/test/files/run/macro-declared-in-object-object/Impls_1.scala
new file mode 100644
index 0000000..6f06f6d
--- /dev/null
+++ b/test/files/run/macro-declared-in-object-object/Impls_1.scala
@@ -0,0 +1,11 @@
+import scala.reflect.macros.{Context => Ctx}
+
+object Impls {
+ def foo(c: Ctx) = {
+ import c.{prefix => prefix}
+ import c.universe._
+ val printPrefix = Apply(Select(Ident(definitions.PredefModule), newTermName("println")), List(Literal(Constant("prefix = " + prefix))))
+ val body = Block(List(printPrefix), Apply(Select(Ident(definitions.PredefModule), newTermName("println")), List(Literal(Constant("it works")))))
+ c.Expr[Unit](body)
+ }
+}
\ No newline at end of file
diff --git a/test/files/run/macro-declared-in-object-object/Macros_Test_2.scala b/test/files/run/macro-declared-in-object-object/Macros_Test_2.scala
new file mode 100644
index 0000000..e261a50
--- /dev/null
+++ b/test/files/run/macro-declared-in-object-object/Macros_Test_2.scala
@@ -0,0 +1,10 @@
+object Macros {
+ object Macros {
+ def foo = macro Impls.foo
+ }
+}
+
+object Test extends App {
+ val outer = Macros
+ outer.Macros.foo
+}
\ No newline at end of file
diff --git a/test/files/run/macro-declared-in-object.check b/test/files/run/macro-declared-in-object.check
new file mode 100644
index 0000000..4d955a9
--- /dev/null
+++ b/test/files/run/macro-declared-in-object.check
@@ -0,0 +1,2 @@
+prefix = Expr[Nothing](Macros)
+it works
diff --git a/test/files/run/macro-declared-in-object.flags b/test/files/run/macro-declared-in-object.flags
new file mode 100644
index 0000000..cd66464
--- /dev/null
+++ b/test/files/run/macro-declared-in-object.flags
@@ -0,0 +1 @@
+-language:experimental.macros
\ No newline at end of file
diff --git a/test/files/run/macro-declared-in-object/Impls_1.scala b/test/files/run/macro-declared-in-object/Impls_1.scala
new file mode 100644
index 0000000..6f06f6d
--- /dev/null
+++ b/test/files/run/macro-declared-in-object/Impls_1.scala
@@ -0,0 +1,11 @@
+import scala.reflect.macros.{Context => Ctx}
+
+object Impls {
+ def foo(c: Ctx) = {
+ import c.{prefix => prefix}
+ import c.universe._
+ val printPrefix = Apply(Select(Ident(definitions.PredefModule), newTermName("println")), List(Literal(Constant("prefix = " + prefix))))
+ val body = Block(List(printPrefix), Apply(Select(Ident(definitions.PredefModule), newTermName("println")), List(Literal(Constant("it works")))))
+ c.Expr[Unit](body)
+ }
+}
\ No newline at end of file
diff --git a/test/files/run/macro-declared-in-object/Macros_Test_2.scala b/test/files/run/macro-declared-in-object/Macros_Test_2.scala
new file mode 100644
index 0000000..a5a4862
--- /dev/null
+++ b/test/files/run/macro-declared-in-object/Macros_Test_2.scala
@@ -0,0 +1,7 @@
+object Macros {
+ def foo = macro Impls.foo
+}
+
+object Test extends App {
+ Macros.foo
+}
\ No newline at end of file
diff --git a/test/files/run/macro-declared-in-package-object.check b/test/files/run/macro-declared-in-package-object.check
new file mode 100644
index 0000000..bc00691
--- /dev/null
+++ b/test/files/run/macro-declared-in-package-object.check
@@ -0,0 +1,2 @@
+prefix = Expr[Nothing](Macros.`package`)
+it works
diff --git a/test/files/run/macro-declared-in-package-object.flags b/test/files/run/macro-declared-in-package-object.flags
new file mode 100644
index 0000000..cd66464
--- /dev/null
+++ b/test/files/run/macro-declared-in-package-object.flags
@@ -0,0 +1 @@
+-language:experimental.macros
\ No newline at end of file
diff --git a/test/files/run/macro-declared-in-package-object/Impls_1.scala b/test/files/run/macro-declared-in-package-object/Impls_1.scala
new file mode 100644
index 0000000..6f06f6d
--- /dev/null
+++ b/test/files/run/macro-declared-in-package-object/Impls_1.scala
@@ -0,0 +1,11 @@
+import scala.reflect.macros.{Context => Ctx}
+
+object Impls {
+ def foo(c: Ctx) = {
+ import c.{prefix => prefix}
+ import c.universe._
+ val printPrefix = Apply(Select(Ident(definitions.PredefModule), newTermName("println")), List(Literal(Constant("prefix = " + prefix))))
+ val body = Block(List(printPrefix), Apply(Select(Ident(definitions.PredefModule), newTermName("println")), List(Literal(Constant("it works")))))
+ c.Expr[Unit](body)
+ }
+}
\ No newline at end of file
diff --git a/test/files/run/macro-declared-in-package-object/Macros_Test_2.scala b/test/files/run/macro-declared-in-package-object/Macros_Test_2.scala
new file mode 100644
index 0000000..54a5962
--- /dev/null
+++ b/test/files/run/macro-declared-in-package-object/Macros_Test_2.scala
@@ -0,0 +1,8 @@
+package object Macros {
+ def foo = macro Impls.foo
+}
+
+object Test extends App {
+ import Macros._
+ foo
+}
\ No newline at end of file
diff --git a/test/files/run/macro-declared-in-refinement.check b/test/files/run/macro-declared-in-refinement.check
new file mode 100644
index 0000000..09b8d01
--- /dev/null
+++ b/test/files/run/macro-declared-in-refinement.check
@@ -0,0 +1,2 @@
+prefix = Expr[Nothing](Test.this.macros)
+it works
diff --git a/test/files/run/macro-declared-in-refinement.flags b/test/files/run/macro-declared-in-refinement.flags
new file mode 100644
index 0000000..cd66464
--- /dev/null
+++ b/test/files/run/macro-declared-in-refinement.flags
@@ -0,0 +1 @@
+-language:experimental.macros
\ No newline at end of file
diff --git a/test/files/run/macro-declared-in-refinement/Impls_1.scala b/test/files/run/macro-declared-in-refinement/Impls_1.scala
new file mode 100644
index 0000000..6f06f6d
--- /dev/null
+++ b/test/files/run/macro-declared-in-refinement/Impls_1.scala
@@ -0,0 +1,11 @@
+import scala.reflect.macros.{Context => Ctx}
+
+object Impls {
+ def foo(c: Ctx) = {
+ import c.{prefix => prefix}
+ import c.universe._
+ val printPrefix = Apply(Select(Ident(definitions.PredefModule), newTermName("println")), List(Literal(Constant("prefix = " + prefix))))
+ val body = Block(List(printPrefix), Apply(Select(Ident(definitions.PredefModule), newTermName("println")), List(Literal(Constant("it works")))))
+ c.Expr[Unit](body)
+ }
+}
\ No newline at end of file
diff --git a/test/files/run/macro-declared-in-refinement/Macros_Test_2.scala b/test/files/run/macro-declared-in-refinement/Macros_Test_2.scala
new file mode 100644
index 0000000..f746c2d
--- /dev/null
+++ b/test/files/run/macro-declared-in-refinement/Macros_Test_2.scala
@@ -0,0 +1,6 @@
+class Base
+
+object Test extends App {
+ val macros = new Base { def foo = macro Impls.foo }
+ macros.foo
+}
\ No newline at end of file
diff --git a/test/files/run/macro-declared-in-trait.check b/test/files/run/macro-declared-in-trait.check
new file mode 100644
index 0000000..0d70ac7
--- /dev/null
+++ b/test/files/run/macro-declared-in-trait.check
@@ -0,0 +1,15 @@
+prefix = Expr[Nothing]({
+ final class $anon extends AnyRef with Base {
+ def <init>(): anonymous class $anon = {
+ $anon.super.<init>();
+ ()
+ };
+ <empty>
+ };
+ new $anon()
+})
+it works
+prefix = Expr[Nothing](Macros)
+it works
+prefix = Expr[Nothing](new Macros())
+it works
diff --git a/test/files/run/macro-declared-in-trait.flags b/test/files/run/macro-declared-in-trait.flags
new file mode 100644
index 0000000..cd66464
--- /dev/null
+++ b/test/files/run/macro-declared-in-trait.flags
@@ -0,0 +1 @@
+-language:experimental.macros
\ No newline at end of file
diff --git a/test/files/run/macro-declared-in-trait/Impls_1.scala b/test/files/run/macro-declared-in-trait/Impls_1.scala
new file mode 100644
index 0000000..6f06f6d
--- /dev/null
+++ b/test/files/run/macro-declared-in-trait/Impls_1.scala
@@ -0,0 +1,11 @@
+import scala.reflect.macros.{Context => Ctx}
+
+object Impls {
+ def foo(c: Ctx) = {
+ import c.{prefix => prefix}
+ import c.universe._
+ val printPrefix = Apply(Select(Ident(definitions.PredefModule), newTermName("println")), List(Literal(Constant("prefix = " + prefix))))
+ val body = Block(List(printPrefix), Apply(Select(Ident(definitions.PredefModule), newTermName("println")), List(Literal(Constant("it works")))))
+ c.Expr[Unit](body)
+ }
+}
\ No newline at end of file
diff --git a/test/files/run/macro-declared-in-trait/Macros_Test_2.scala b/test/files/run/macro-declared-in-trait/Macros_Test_2.scala
new file mode 100644
index 0000000..f75906b
--- /dev/null
+++ b/test/files/run/macro-declared-in-trait/Macros_Test_2.scala
@@ -0,0 +1,13 @@
+trait Base {
+ def foo = macro Impls.foo
+}
+
+object Macros extends Base
+
+class Macros extends Base
+
+object Test extends App {
+ (new Base {}).foo
+ Macros.foo
+ new Macros().foo
+}
\ No newline at end of file
diff --git a/test/files/run/macro-def-infer-return-type-a.check b/test/files/run/macro-def-infer-return-type-a.check
new file mode 100644
index 0000000..f70d7bb
--- /dev/null
+++ b/test/files/run/macro-def-infer-return-type-a.check
@@ -0,0 +1 @@
+42
\ No newline at end of file
diff --git a/test/files/run/macro-def-infer-return-type-a.flags b/test/files/run/macro-def-infer-return-type-a.flags
new file mode 100644
index 0000000..cd66464
--- /dev/null
+++ b/test/files/run/macro-def-infer-return-type-a.flags
@@ -0,0 +1 @@
+-language:experimental.macros
\ No newline at end of file
diff --git a/test/files/run/macro-def-infer-return-type-a/Impls_1.scala b/test/files/run/macro-def-infer-return-type-a/Impls_1.scala
new file mode 100644
index 0000000..52c9f9c
--- /dev/null
+++ b/test/files/run/macro-def-infer-return-type-a/Impls_1.scala
@@ -0,0 +1,5 @@
+import scala.reflect.macros.{Context => Ctx}
+
+object Impls {
+ def foo(c: Ctx)(x: c.Expr[Int]) = x
+}
diff --git a/test/files/run/macro-def-infer-return-type-a/Macros_Test_2.scala b/test/files/run/macro-def-infer-return-type-a/Macros_Test_2.scala
new file mode 100644
index 0000000..60fe9dc
--- /dev/null
+++ b/test/files/run/macro-def-infer-return-type-a/Macros_Test_2.scala
@@ -0,0 +1,4 @@
+object Test extends App {
+ def foo(x: Int) = macro Impls.foo
+ println(foo(42))
+}
\ No newline at end of file
diff --git a/test/files/run/macro-def-infer-return-type-b.check b/test/files/run/macro-def-infer-return-type-b.check
new file mode 100644
index 0000000..ae2dc7a
--- /dev/null
+++ b/test/files/run/macro-def-infer-return-type-b.check
@@ -0,0 +1,6 @@
+reflective compilation has failed:
+
+exception during macro expansion:
+java.lang.Error: an implementation is missing
+ at Impls$.foo(Impls_Macros_1.scala:5)
+
diff --git a/test/files/run/macro-def-infer-return-type-b.flags b/test/files/run/macro-def-infer-return-type-b.flags
new file mode 100644
index 0000000..cd66464
--- /dev/null
+++ b/test/files/run/macro-def-infer-return-type-b.flags
@@ -0,0 +1 @@
+-language:experimental.macros
\ No newline at end of file
diff --git a/test/files/run/macro-def-infer-return-type-b/Impls_Macros_1.scala b/test/files/run/macro-def-infer-return-type-b/Impls_Macros_1.scala
new file mode 100644
index 0000000..8a0f18c
--- /dev/null
+++ b/test/files/run/macro-def-infer-return-type-b/Impls_Macros_1.scala
@@ -0,0 +1,10 @@
+import scala.reflect.macros.{Context => Ctx}
+
+object Impls {
+ def foo[T](c: Ctx)(x: c.Expr[T]) =
+ throw new Error("an implementation is missing")
+}
+
+object Macros {
+ def foo[T](x: T) = macro Impls.foo[T]
+}
\ No newline at end of file
diff --git a/test/files/run/macro-def-infer-return-type-b/Test_2.scala b/test/files/run/macro-def-infer-return-type-b/Test_2.scala
new file mode 100644
index 0000000..ea0fd4b
--- /dev/null
+++ b/test/files/run/macro-def-infer-return-type-b/Test_2.scala
@@ -0,0 +1,8 @@
+object Test extends App {
+ import scala.reflect.runtime.universe._
+ import scala.reflect.runtime.{currentMirror => cm}
+ import scala.tools.reflect.ToolBox
+ val tree = Apply(Select(Ident(newTermName("Macros")), newTermName("foo")), List(Literal(Constant(42))))
+ try cm.mkToolBox().eval(tree)
+ catch { case ex: Throwable => println(ex.getMessage) }
+}
diff --git a/test/files/run/macro-def-infer-return-type-c.check b/test/files/run/macro-def-infer-return-type-c.check
new file mode 100644
index 0000000..f70d7bb
--- /dev/null
+++ b/test/files/run/macro-def-infer-return-type-c.check
@@ -0,0 +1 @@
+42
\ No newline at end of file
diff --git a/test/files/run/macro-def-infer-return-type-c.flags b/test/files/run/macro-def-infer-return-type-c.flags
new file mode 100644
index 0000000..cd66464
--- /dev/null
+++ b/test/files/run/macro-def-infer-return-type-c.flags
@@ -0,0 +1 @@
+-language:experimental.macros
\ No newline at end of file
diff --git a/test/files/run/macro-def-infer-return-type-c/Impls_1.scala b/test/files/run/macro-def-infer-return-type-c/Impls_1.scala
new file mode 100644
index 0000000..78db67e
--- /dev/null
+++ b/test/files/run/macro-def-infer-return-type-c/Impls_1.scala
@@ -0,0 +1,5 @@
+import scala.reflect.macros.{Context => Ctx}
+
+object Impls {
+ def foo[T](c: Ctx)(x: c.Expr[T]): c.Expr[T] = x
+}
diff --git a/test/files/run/macro-def-infer-return-type-c/Macros_Test_2.scala b/test/files/run/macro-def-infer-return-type-c/Macros_Test_2.scala
new file mode 100644
index 0000000..967d16f
--- /dev/null
+++ b/test/files/run/macro-def-infer-return-type-c/Macros_Test_2.scala
@@ -0,0 +1,4 @@
+object Test extends App {
+ def foo[T](x: T) = macro Impls.foo[T]
+ println(foo(42))
+}
\ No newline at end of file
diff --git a/test/files/run/macro-def-path-dependent-a.check b/test/files/run/macro-def-path-dependent-a.check
new file mode 100644
index 0000000..7658ad2
--- /dev/null
+++ b/test/files/run/macro-def-path-dependent-a.check
@@ -0,0 +1 @@
+it works
diff --git a/test/files/run/macro-def-path-dependent-a.flags b/test/files/run/macro-def-path-dependent-a.flags
new file mode 100644
index 0000000..cd66464
--- /dev/null
+++ b/test/files/run/macro-def-path-dependent-a.flags
@@ -0,0 +1 @@
+-language:experimental.macros
\ No newline at end of file
diff --git a/test/files/run/macro-def-path-dependent-a/Impls_Macros_1.scala b/test/files/run/macro-def-path-dependent-a/Impls_Macros_1.scala
new file mode 100644
index 0000000..3a91e41
--- /dev/null
+++ b/test/files/run/macro-def-path-dependent-a/Impls_Macros_1.scala
@@ -0,0 +1,21 @@
+import scala.reflect.macros.{Context => Ctx}
+
+trait Exprs {
+ self: Universe =>
+
+ class Expr[T]
+}
+
+trait Reifiers {
+ self: Universe =>
+
+ type Expr[T]
+
+ def reify[T](expr: T) = macro Impls.reify[T]
+}
+
+trait Universe extends Exprs with Reifiers
+
+object Impls {
+ def reify[T](cc: Ctx{ type PrefixType = Reifiers })(expr: cc.Expr[T]): cc.Expr[cc.prefix.value.Expr[T]] = ???
+}
diff --git a/test/files/run/macro-def-path-dependent-a/Test_2.scala b/test/files/run/macro-def-path-dependent-a/Test_2.scala
new file mode 100644
index 0000000..7dffc51
--- /dev/null
+++ b/test/files/run/macro-def-path-dependent-a/Test_2.scala
@@ -0,0 +1,3 @@
+object Test extends App {
+ println("it works")
+}
\ No newline at end of file
diff --git a/test/files/run/macro-def-path-dependent-b.check b/test/files/run/macro-def-path-dependent-b.check
new file mode 100644
index 0000000..7658ad2
--- /dev/null
+++ b/test/files/run/macro-def-path-dependent-b.check
@@ -0,0 +1 @@
+it works
diff --git a/test/files/run/macro-def-path-dependent-b.flags b/test/files/run/macro-def-path-dependent-b.flags
new file mode 100644
index 0000000..cd66464
--- /dev/null
+++ b/test/files/run/macro-def-path-dependent-b.flags
@@ -0,0 +1 @@
+-language:experimental.macros
\ No newline at end of file
diff --git a/test/files/run/macro-def-path-dependent-b/Impls_Macros_1.scala b/test/files/run/macro-def-path-dependent-b/Impls_Macros_1.scala
new file mode 100644
index 0000000..cf9f9eb
--- /dev/null
+++ b/test/files/run/macro-def-path-dependent-b/Impls_Macros_1.scala
@@ -0,0 +1,20 @@
+import scala.reflect.macros.{Context => Ctx}
+
+trait Exprs {
+ self: Universe =>
+
+ class Expr[T]
+}
+
+trait Reifiers {
+ self: Universe =>
+
+}
+
+trait Universe extends Exprs with Reifiers {
+ def reify[T](expr: T) = macro Impls.reify[T]
+}
+
+object Impls {
+ def reify[T](cc: Ctx{ type PrefixType = Universe })(expr: cc.Expr[T]): cc.Expr[cc.prefix.value.Expr[T]] = ???
+}
diff --git a/test/files/run/macro-def-path-dependent-b/Test_2.scala b/test/files/run/macro-def-path-dependent-b/Test_2.scala
new file mode 100644
index 0000000..7dffc51
--- /dev/null
+++ b/test/files/run/macro-def-path-dependent-b/Test_2.scala
@@ -0,0 +1,3 @@
+object Test extends App {
+ println("it works")
+}
\ No newline at end of file
diff --git a/test/files/run/macro-def-path-dependent-c.check b/test/files/run/macro-def-path-dependent-c.check
new file mode 100644
index 0000000..7658ad2
--- /dev/null
+++ b/test/files/run/macro-def-path-dependent-c.check
@@ -0,0 +1 @@
+it works
diff --git a/test/files/run/macro-def-path-dependent-c.flags b/test/files/run/macro-def-path-dependent-c.flags
new file mode 100644
index 0000000..cd66464
--- /dev/null
+++ b/test/files/run/macro-def-path-dependent-c.flags
@@ -0,0 +1 @@
+-language:experimental.macros
\ No newline at end of file
diff --git a/test/files/run/macro-def-path-dependent-c/Impls_Macros_1.scala b/test/files/run/macro-def-path-dependent-c/Impls_Macros_1.scala
new file mode 100644
index 0000000..6cb374d
--- /dev/null
+++ b/test/files/run/macro-def-path-dependent-c/Impls_Macros_1.scala
@@ -0,0 +1,20 @@
+import scala.reflect.macros.{Context => Ctx}
+
+trait Exprs {
+ self: Universe =>
+
+ class Expr[T]
+}
+
+trait Reifiers {
+ self: Universe =>
+
+}
+
+trait Universe extends Exprs with Reifiers {
+ def reify[T](expr: T): Expr[T] = macro Impls.reify[T]
+}
+
+object Impls {
+ def reify[T](cc: Ctx{ type PrefixType = Universe })(expr: cc.Expr[T]): cc.Expr[cc.prefix.value.Expr[T]] = ???
+}
diff --git a/test/files/run/macro-def-path-dependent-c/Test_2.scala b/test/files/run/macro-def-path-dependent-c/Test_2.scala
new file mode 100644
index 0000000..7dffc51
--- /dev/null
+++ b/test/files/run/macro-def-path-dependent-c/Test_2.scala
@@ -0,0 +1,3 @@
+object Test extends App {
+ println("it works")
+}
\ No newline at end of file
diff --git a/test/files/run/macro-def-path-dependent-d1.check b/test/files/run/macro-def-path-dependent-d1.check
new file mode 100644
index 0000000..7658ad2
--- /dev/null
+++ b/test/files/run/macro-def-path-dependent-d1.check
@@ -0,0 +1 @@
+it works
diff --git a/test/files/run/macro-def-path-dependent-d1.flags b/test/files/run/macro-def-path-dependent-d1.flags
new file mode 100644
index 0000000..cd66464
--- /dev/null
+++ b/test/files/run/macro-def-path-dependent-d1.flags
@@ -0,0 +1 @@
+-language:experimental.macros
\ No newline at end of file
diff --git a/test/files/run/macro-def-path-dependent-d1/Impls_Macros_1.scala b/test/files/run/macro-def-path-dependent-d1/Impls_Macros_1.scala
new file mode 100644
index 0000000..69d9708
--- /dev/null
+++ b/test/files/run/macro-def-path-dependent-d1/Impls_Macros_1.scala
@@ -0,0 +1,9 @@
+import scala.reflect.runtime.universe._
+import scala.reflect.macros.Context
+import scala.reflect.api.Universe
+
+object Test {
+ def materializeTypeTag[T](u: Universe)(e: T) = macro materializeTypeTag_impl[T]
+
+ def materializeTypeTag_impl[T: c.WeakTypeTag](c: Context)(u: c.Expr[Universe])(e: c.Expr[T]): c.Expr[u.value.TypeTag[T]] = ???
+}
\ No newline at end of file
diff --git a/test/files/run/macro-def-path-dependent-d1/Test_2.scala b/test/files/run/macro-def-path-dependent-d1/Test_2.scala
new file mode 100644
index 0000000..7dffc51
--- /dev/null
+++ b/test/files/run/macro-def-path-dependent-d1/Test_2.scala
@@ -0,0 +1,3 @@
+object Test extends App {
+ println("it works")
+}
\ No newline at end of file
diff --git a/test/files/run/macro-def-path-dependent-d2.check b/test/files/run/macro-def-path-dependent-d2.check
new file mode 100644
index 0000000..7658ad2
--- /dev/null
+++ b/test/files/run/macro-def-path-dependent-d2.check
@@ -0,0 +1 @@
+it works
diff --git a/test/files/run/macro-def-path-dependent-d2.flags b/test/files/run/macro-def-path-dependent-d2.flags
new file mode 100644
index 0000000..cd66464
--- /dev/null
+++ b/test/files/run/macro-def-path-dependent-d2.flags
@@ -0,0 +1 @@
+-language:experimental.macros
\ No newline at end of file
diff --git a/test/files/run/macro-def-path-dependent-d2/Impls_1.scala b/test/files/run/macro-def-path-dependent-d2/Impls_1.scala
new file mode 100644
index 0000000..7fa9c35
--- /dev/null
+++ b/test/files/run/macro-def-path-dependent-d2/Impls_1.scala
@@ -0,0 +1,7 @@
+import scala.reflect.runtime.universe._
+import scala.reflect.macros.Context
+import scala.reflect.api.Universe
+
+object Impls {
+ def materializeTypeTag_impl[T: c.WeakTypeTag](c: Context)(u: c.Expr[Universe])(e: c.Expr[T]): c.Expr[u.value.TypeTag[T]] = ???
+}
\ No newline at end of file
diff --git a/test/files/run/macro-def-path-dependent-d2/Macros_2.scala b/test/files/run/macro-def-path-dependent-d2/Macros_2.scala
new file mode 100644
index 0000000..65ce4d8
--- /dev/null
+++ b/test/files/run/macro-def-path-dependent-d2/Macros_2.scala
@@ -0,0 +1,7 @@
+import scala.reflect.runtime.universe._
+import scala.reflect.macros.Context
+import scala.reflect.api.Universe
+
+object Macros {
+ def materializeTypeTag[T](u: Universe)(e: T) = macro Impls.materializeTypeTag_impl[T]
+}
\ No newline at end of file
diff --git a/test/files/run/macro-def-path-dependent-d2/Test_3.scala b/test/files/run/macro-def-path-dependent-d2/Test_3.scala
new file mode 100644
index 0000000..7dffc51
--- /dev/null
+++ b/test/files/run/macro-def-path-dependent-d2/Test_3.scala
@@ -0,0 +1,3 @@
+object Test extends App {
+ println("it works")
+}
\ No newline at end of file
diff --git a/test/files/run/macro-divergence-spurious.check b/test/files/run/macro-divergence-spurious.check
new file mode 100644
index 0000000..19765bd
--- /dev/null
+++ b/test/files/run/macro-divergence-spurious.check
@@ -0,0 +1 @@
+null
diff --git a/test/files/run/macro-divergence-spurious/Impls_Macros_1.scala b/test/files/run/macro-divergence-spurious/Impls_Macros_1.scala
new file mode 100644
index 0000000..bc4a9fd
--- /dev/null
+++ b/test/files/run/macro-divergence-spurious/Impls_Macros_1.scala
@@ -0,0 +1,23 @@
+import scala.reflect.macros.Context
+import language.experimental.macros
+
+trait Complex[T]
+
+class Foo(val bar: Bar)
+class Bar(val s: String)
+
+object Complex {
+ def impl[T: c.WeakTypeTag](c: Context): c.Expr[Complex[T]] = {
+ import c.universe._
+ val tpe = weakTypeOf[T]
+ for (f <- tpe.declarations.collect{case f: TermSymbol if f.isParamAccessor && !f.isMethod => f}) {
+ val trecur = appliedType(typeOf[Complex[_]], List(f.typeSignature))
+ val recur = c.inferImplicitValue(trecur, silent = true)
+ if (recur == EmptyTree) c.abort(c.enclosingPosition, s"couldn't synthesize $trecur")
+ }
+ c.literalNull
+ }
+
+ implicit object ComplexString extends Complex[String]
+ implicit def genComplex[T]: Complex[T] = macro impl[T]
+}
diff --git a/test/files/run/macro-divergence-spurious/Test_2.scala b/test/files/run/macro-divergence-spurious/Test_2.scala
new file mode 100644
index 0000000..dcc4593
--- /dev/null
+++ b/test/files/run/macro-divergence-spurious/Test_2.scala
@@ -0,0 +1,3 @@
+object Test extends App {
+ println(implicitly[Complex[Foo]])
+}
\ No newline at end of file
diff --git a/test/files/jvm/bug680.check b/test/files/run/macro-duplicate.check
similarity index 100%
copy from test/files/jvm/bug680.check
copy to test/files/run/macro-duplicate.check
diff --git a/test/files/run/macro-duplicate.flags b/test/files/run/macro-duplicate.flags
new file mode 100644
index 0000000..cd66464
--- /dev/null
+++ b/test/files/run/macro-duplicate.flags
@@ -0,0 +1 @@
+-language:experimental.macros
\ No newline at end of file
diff --git a/test/files/run/macro-duplicate/Impls_Macros_1.scala b/test/files/run/macro-duplicate/Impls_Macros_1.scala
new file mode 100644
index 0000000..de81923
--- /dev/null
+++ b/test/files/run/macro-duplicate/Impls_Macros_1.scala
@@ -0,0 +1,29 @@
+import scala.reflect.macros.Context
+
+object Macros {
+ def impl(c: Context) = {
+ import c.universe._
+ val Expr(Block((cdef: ClassDef) :: Nil, _)) = reify { class C { def x = 2 } }
+ val cdef1 =
+ new Transformer {
+ override def transform(tree: Tree): Tree = tree match {
+ case Template(_, _, ctor :: defs) =>
+ val defs1 = defs collect {
+ case ddef @ DefDef(mods, name, tparams, vparamss, tpt, body) =>
+ val future = Select(Select(Select(Ident(newTermName("scala")), newTermName("concurrent")), newTermName("package")), newTermName("future"))
+ val Future = Select(Select(Ident(newTermName("scala")), newTermName("concurrent")), newTypeName("Future"))
+ val tpt1 = if (tpt.isEmpty) tpt else AppliedTypeTree(Future, List(tpt))
+ val body1 = Apply(future, List(body))
+ val name1 = newTermName("async" + name.toString.capitalize)
+ DefDef(mods, name1, tparams, vparamss, tpt1, body1)
+ }
+ Template(Nil, emptyValDef, ctor +: defs ::: defs1)
+ case _ =>
+ super.transform(tree)
+ }
+ } transform cdef
+ c.Expr[Unit](Block(cdef1 :: Nil, Literal(Constant(()))))
+ }
+
+ def foo = macro impl
+}
\ No newline at end of file
diff --git a/test/files/run/macro-duplicate/Test_2.scala b/test/files/run/macro-duplicate/Test_2.scala
new file mode 100644
index 0000000..6dbd438
--- /dev/null
+++ b/test/files/run/macro-duplicate/Test_2.scala
@@ -0,0 +1,6 @@
+import scala.concurrent._
+import ExecutionContext.Implicits.global
+
+object Test extends App {
+ Macros.foo
+}
\ No newline at end of file
diff --git a/test/files/run/bug4656.check b/test/files/run/macro-expand-implicit-argument.check
similarity index 100%
copy from test/files/run/bug4656.check
copy to test/files/run/macro-expand-implicit-argument.check
diff --git a/test/files/run/macro-expand-implicit-argument.flags b/test/files/run/macro-expand-implicit-argument.flags
new file mode 100644
index 0000000..cd66464
--- /dev/null
+++ b/test/files/run/macro-expand-implicit-argument.flags
@@ -0,0 +1 @@
+-language:experimental.macros
\ No newline at end of file
diff --git a/test/files/run/macro-expand-implicit-argument/Macros_1.scala b/test/files/run/macro-expand-implicit-argument/Macros_1.scala
new file mode 100644
index 0000000..d9fd5b8
--- /dev/null
+++ b/test/files/run/macro-expand-implicit-argument/Macros_1.scala
@@ -0,0 +1,59 @@
+import annotation.tailrec
+import scala.math.{min, max}
+import scala.{specialized => spec}
+
+import language.experimental.macros
+
+import scala.reflect.ClassTag
+import scala.reflect.macros.Context
+
+object Macros {
+ def alloc[@spec A:ClassTag](src:Array[A], s1:Int, len:Int) = {
+ val as = Array.ofDim[A](len)
+ System.arraycopy(src, s1, as, 0, len)
+ as
+ }
+
+ /**
+ * Efficient alternative to Array.apply.
+ *
+ * "As seen on scala-internals!"
+ */
+ def array[A](as:A*)(implicit ct: ClassTag[A]) = macro arrayMacro[A]
+
+ /**
+ * Takes in something like:
+ * ArrayUtil.alloc[Int](11, 22, 33, 44)(ct)
+ *
+ * and builds a tree like:
+ * {
+ * val arr:Array[Int] = ct.newArray(4)
+ * arr.update(0, 11)
+ * arr.update(1, 22)
+ * arr.update(2, 33)
+ * arr.update(3, 44)
+ * arr
+ * }
+ */
+ def arrayMacro[A:c.WeakTypeTag](c:Context)(as:c.Expr[A]*)(ct: c.Expr[ClassTag[A]]): c.Expr[Array[A]] = {
+ import c.mirror._
+ import c.universe._
+ def const(x:Int) = Literal(Constant(x))
+
+ val n = as.length
+ val arr = newTermName("arr")
+
+ val create = Apply(Select(ct.tree, newTermName("newArray")), List(const(n)))
+ val arrtpe = TypeTree(implicitly[c.WeakTypeTag[Array[A]]].tpe)
+ val valdef = ValDef(Modifiers(), arr, arrtpe, create)
+
+ val updates = (0 until n).map {
+ i => Apply(Select(Ident(arr), newTermName("update")), List(const(i), as(i).tree))
+ }
+
+ val exprs = (Seq(valdef) ++ updates ++ Seq(Ident(arr))).toList
+ val block = Block(exprs.init, exprs.last)
+
+ c.Expr[Array[A]](block)
+ }
+}
\ No newline at end of file
diff --git a/test/files/run/macro-expand-implicit-argument/Test_2.scala b/test/files/run/macro-expand-implicit-argument/Test_2.scala
new file mode 100644
index 0000000..ce8a068
--- /dev/null
+++ b/test/files/run/macro-expand-implicit-argument/Test_2.scala
@@ -0,0 +1,4 @@
+object Test extends App {
+ import Macros._
+ println(array(1, 2, 3).toList)
+}
\ No newline at end of file
diff --git a/test/files/run/syncchannel.check b/test/files/run/macro-expand-implicit-macro-has-implicit.check
similarity index 100%
copy from test/files/run/syncchannel.check
copy to test/files/run/macro-expand-implicit-macro-has-implicit.check
diff --git a/test/files/run/macro-expand-implicit-macro-has-implicit.flags b/test/files/run/macro-expand-implicit-macro-has-implicit.flags
new file mode 100644
index 0000000..cd66464
--- /dev/null
+++ b/test/files/run/macro-expand-implicit-macro-has-implicit.flags
@@ -0,0 +1 @@
+-language:experimental.macros
\ No newline at end of file
diff --git a/test/files/run/macro-expand-implicit-macro-has-implicit/Impls_1.scala b/test/files/run/macro-expand-implicit-macro-has-implicit/Impls_1.scala
new file mode 100644
index 0000000..082e6b2
--- /dev/null
+++ b/test/files/run/macro-expand-implicit-macro-has-implicit/Impls_1.scala
@@ -0,0 +1,9 @@
+import scala.reflect.macros.{Context => Ctx}
+
+object Impls {
+ def foo(c: Ctx)(x: c.Expr[Int]) = {
+ import c.universe._
+ val body = Apply(Select(Ident(definitions.PredefModule), newTermName("println")), List(x.tree))
+ c.Expr[Unit](body)
+ }
+}
\ No newline at end of file
diff --git a/test/files/run/macro-expand-implicit-macro-has-implicit/Macros_Test_2.scala b/test/files/run/macro-expand-implicit-macro-has-implicit/Macros_Test_2.scala
new file mode 100644
index 0000000..ffb04dc
--- /dev/null
+++ b/test/files/run/macro-expand-implicit-macro-has-implicit/Macros_Test_2.scala
@@ -0,0 +1,5 @@
+object Test extends App {
+ implicit val x = 42
+ def foo(implicit x: Int) = macro Impls.foo
+ foo
+}
\ No newline at end of file
diff --git a/test/files/run/macro-expand-implicit-macro-is-implicit.check b/test/files/run/macro-expand-implicit-macro-is-implicit.check
new file mode 100644
index 0000000..c205945
--- /dev/null
+++ b/test/files/run/macro-expand-implicit-macro-is-implicit.check
@@ -0,0 +1,2 @@
+Some(2)
+2
diff --git a/test/files/run/macro-expand-implicit-macro-is-implicit.flags b/test/files/run/macro-expand-implicit-macro-is-implicit.flags
new file mode 100644
index 0000000..cd66464
--- /dev/null
+++ b/test/files/run/macro-expand-implicit-macro-is-implicit.flags
@@ -0,0 +1 @@
+-language:experimental.macros
\ No newline at end of file
diff --git a/test/files/run/macro-expand-implicit-macro-is-implicit/Impls_1.scala b/test/files/run/macro-expand-implicit-macro-is-implicit/Impls_1.scala
new file mode 100644
index 0000000..cceb038
--- /dev/null
+++ b/test/files/run/macro-expand-implicit-macro-is-implicit/Impls_1.scala
@@ -0,0 +1,9 @@
+import scala.reflect.macros.{Context => Ctx}
+
+object Impls {
+ def foo(c: Ctx)(x: c.Expr[String]): c.Expr[Option[Int]] = {
+ import c.universe._
+ val body = Apply(Ident(definitions.SomeModule), List(Select(x.tree, newTermName("toInt"))))
+ c.Expr[Option[Int]](body)
+ }
+}
\ No newline at end of file
diff --git a/test/files/run/macro-expand-implicit-macro-is-implicit/Macros_Test_2.scala b/test/files/run/macro-expand-implicit-macro-is-implicit/Macros_Test_2.scala
new file mode 100644
index 0000000..81ebd63
--- /dev/null
+++ b/test/files/run/macro-expand-implicit-macro-is-implicit/Macros_Test_2.scala
@@ -0,0 +1,10 @@
+object Macros {
+ implicit def foo(x: String): Option[Int] = macro Impls.foo
+}
+
+object Test extends App {
+ import Macros._
+ println("2": Option[Int])
+ val s: Int = "2" getOrElse 0
+ println(s)
+}
\ No newline at end of file
diff --git a/test/files/run/macro-expand-implicit-macro-is-val.check b/test/files/run/macro-expand-implicit-macro-is-val.check
new file mode 100644
index 0000000..0cfbf08
--- /dev/null
+++ b/test/files/run/macro-expand-implicit-macro-is-val.check
@@ -0,0 +1 @@
+2
diff --git a/test/files/run/macro-expand-implicit-macro-is-val.flags b/test/files/run/macro-expand-implicit-macro-is-val.flags
new file mode 100644
index 0000000..cd66464
--- /dev/null
+++ b/test/files/run/macro-expand-implicit-macro-is-val.flags
@@ -0,0 +1 @@
+-language:experimental.macros
\ No newline at end of file
diff --git a/test/files/run/macro-expand-implicit-macro-is-val/Impls_1.scala b/test/files/run/macro-expand-implicit-macro-is-val/Impls_1.scala
new file mode 100644
index 0000000..fa717b2
--- /dev/null
+++ b/test/files/run/macro-expand-implicit-macro-is-val/Impls_1.scala
@@ -0,0 +1,9 @@
+import scala.reflect.macros.{Context => Ctx}
+
+object Impls {
+ def foo(c: Ctx) = {
+ import c.universe._
+ val body = Literal(Constant(2))
+ c.Expr[Int](body)
+ }
+}
\ No newline at end of file
diff --git a/test/files/run/macro-expand-implicit-macro-is-val/Macros_Test_2.scala b/test/files/run/macro-expand-implicit-macro-is-val/Macros_Test_2.scala
new file mode 100644
index 0000000..b91b101
--- /dev/null
+++ b/test/files/run/macro-expand-implicit-macro-is-val/Macros_Test_2.scala
@@ -0,0 +1,5 @@
+object Test extends App {
+ implicit def foo = macro Impls.foo
+ def bar(implicit x: Int) = println(x)
+ bar
+}
\ No newline at end of file
diff --git a/test/files/run/macro-expand-implicit-macro-is-view.check b/test/files/run/macro-expand-implicit-macro-is-view.check
new file mode 100644
index 0000000..0cfbf08
--- /dev/null
+++ b/test/files/run/macro-expand-implicit-macro-is-view.check
@@ -0,0 +1 @@
+2
diff --git a/test/files/run/macro-expand-implicit-macro-is-view.flags b/test/files/run/macro-expand-implicit-macro-is-view.flags
new file mode 100644
index 0000000..cd66464
--- /dev/null
+++ b/test/files/run/macro-expand-implicit-macro-is-view.flags
@@ -0,0 +1 @@
+-language:experimental.macros
\ No newline at end of file
diff --git a/test/files/run/macro-expand-implicit-macro-is-view/Impls_1.scala b/test/files/run/macro-expand-implicit-macro-is-view/Impls_1.scala
new file mode 100644
index 0000000..cceb038
--- /dev/null
+++ b/test/files/run/macro-expand-implicit-macro-is-view/Impls_1.scala
@@ -0,0 +1,9 @@
+import scala.reflect.macros.{Context => Ctx}
+
+object Impls {
+ def foo(c: Ctx)(x: c.Expr[String]): c.Expr[Option[Int]] = {
+ import c.universe._
+ val body = Apply(Ident(definitions.SomeModule), List(Select(x.tree, newTermName("toInt"))))
+ c.Expr[Option[Int]](body)
+ }
+}
\ No newline at end of file
diff --git a/test/files/run/macro-expand-implicit-macro-is-view/Macros_Test_2.scala b/test/files/run/macro-expand-implicit-macro-is-view/Macros_Test_2.scala
new file mode 100644
index 0000000..0ff1fb8
--- /dev/null
+++ b/test/files/run/macro-expand-implicit-macro-is-view/Macros_Test_2.scala
@@ -0,0 +1,9 @@
+object Macros {
+ implicit def foo(x: String): Option[Int] = macro Impls.foo
+}
+
+object Test extends App {
+ import Macros._
+ def bar[T <% Option[Int]](x: T) = println(x)
+ println("2")
+}
\ No newline at end of file
diff --git a/test/files/run/macro-expand-multiple-arglists.check b/test/files/run/macro-expand-multiple-arglists.check
new file mode 100644
index 0000000..c24b6ae
--- /dev/null
+++ b/test/files/run/macro-expand-multiple-arglists.check
@@ -0,0 +1 @@
+38
\ No newline at end of file
diff --git a/test/files/run/macro-expand-multiple-arglists.flags b/test/files/run/macro-expand-multiple-arglists.flags
new file mode 100644
index 0000000..cd66464
--- /dev/null
+++ b/test/files/run/macro-expand-multiple-arglists.flags
@@ -0,0 +1 @@
+-language:experimental.macros
\ No newline at end of file
diff --git a/test/files/run/macro-expand-multiple-arglists/Impls_1.scala b/test/files/run/macro-expand-multiple-arglists/Impls_1.scala
new file mode 100644
index 0000000..11e0793
--- /dev/null
+++ b/test/files/run/macro-expand-multiple-arglists/Impls_1.scala
@@ -0,0 +1,10 @@
+import scala.reflect.macros.{Context => Ctx}
+
+object Impls {
+ def foo(c: Ctx)(x: c.Expr[Int])(y: c.Expr[Int]) = {
+ import c.universe._
+ val sum = Apply(Select(x.tree, newTermName("$minus")), List(y.tree))
+ val body = Apply(Select(Ident(definitions.PredefModule), newTermName("println")), List(sum))
+ c.Expr[Unit](body)
+ }
+}
\ No newline at end of file
diff --git a/test/files/run/macro-expand-multiple-arglists/Macros_Test_2.scala b/test/files/run/macro-expand-multiple-arglists/Macros_Test_2.scala
new file mode 100644
index 0000000..fa4504b
--- /dev/null
+++ b/test/files/run/macro-expand-multiple-arglists/Macros_Test_2.scala
@@ -0,0 +1,4 @@
+object Test extends App {
+ def foo(x: Int)(y: Int) = macro Impls.foo
+ foo(40)(2)
+}
\ No newline at end of file
diff --git a/test/files/run/macro-expand-nullary-generic.check b/test/files/run/macro-expand-nullary-generic.check
new file mode 100644
index 0000000..42976f4
--- /dev/null
+++ b/test/files/run/macro-expand-nullary-generic.check
@@ -0,0 +1,6 @@
+it works WeakTypeTag[Int]
+it works WeakTypeTag[Int]
+it works WeakTypeTag[Int]
+it works WeakTypeTag[Int]
+it works WeakTypeTag[Int]
+kkthxbai
diff --git a/test/files/run/macro-expand-nullary-generic.flags b/test/files/run/macro-expand-nullary-generic.flags
new file mode 100644
index 0000000..cd66464
--- /dev/null
+++ b/test/files/run/macro-expand-nullary-generic.flags
@@ -0,0 +1 @@
+-language:experimental.macros
\ No newline at end of file
diff --git a/test/files/run/macro-expand-nullary-generic/Impls_1.scala b/test/files/run/macro-expand-nullary-generic/Impls_1.scala
new file mode 100644
index 0000000..1180c83
--- /dev/null
+++ b/test/files/run/macro-expand-nullary-generic/Impls_1.scala
@@ -0,0 +1,15 @@
+import scala.reflect.runtime.universe._
+import scala.reflect.macros.{Context => Ctx}
+
+object Impls {
+ def impl[T: c.WeakTypeTag](c: Ctx) = {
+ import c.universe._
+ val body = Apply(Select(Ident(definitions.PredefModule), newTermName("println")), List(Literal(Constant("it works " + implicitly[c.WeakTypeTag[T]]))))
+ c.Expr[Unit](body)
+ }
+
+ def fooNullary[T: c.WeakTypeTag](c: Ctx) = impl[T](c)
+ def fooEmpty[T: c.WeakTypeTag](c: Ctx)() = impl[T](c)
+ def barNullary[T: c.WeakTypeTag](c: Ctx)(x: c.Expr[Int]) = impl[T](c)
+ def barEmpty[T: c.WeakTypeTag](c: Ctx)(x: c.Expr[Int])() = impl[T](c)
+}
\ No newline at end of file
diff --git a/test/files/run/macro-expand-nullary-generic/Macros_Test_2.scala b/test/files/run/macro-expand-nullary-generic/Macros_Test_2.scala
new file mode 100644
index 0000000..2d5cf53
--- /dev/null
+++ b/test/files/run/macro-expand-nullary-generic/Macros_Test_2.scala
@@ -0,0 +1,15 @@
+object Macros {
+ def foo1[T] = macro Impls.fooNullary[T]
+ def foo2[T]() = macro Impls.fooEmpty[T]
+ def bar1[T](x: Int) = macro Impls.barNullary[T]
+ def bar2[T](x: Int)() = macro Impls.barEmpty[T]
+}
+
+object Test extends App {
+ Macros.foo1[Int]
+ Macros.foo2[Int]
+ Macros.foo2[Int]()
+ Macros.bar1[Int](42)
+ Macros.bar2[Int](42)()
+ println("kkthxbai")
+}
\ No newline at end of file
diff --git a/test/files/run/macro-expand-nullary-nongeneric.check b/test/files/run/macro-expand-nullary-nongeneric.check
new file mode 100644
index 0000000..9ab5f3a
--- /dev/null
+++ b/test/files/run/macro-expand-nullary-nongeneric.check
@@ -0,0 +1,6 @@
+it works
+it works
+it works
+it works
+it works
+kkthxbai
diff --git a/test/files/run/macro-expand-nullary-nongeneric.flags b/test/files/run/macro-expand-nullary-nongeneric.flags
new file mode 100644
index 0000000..cd66464
--- /dev/null
+++ b/test/files/run/macro-expand-nullary-nongeneric.flags
@@ -0,0 +1 @@
+-language:experimental.macros
\ No newline at end of file
diff --git a/test/files/run/macro-expand-nullary-nongeneric/Impls_1.scala b/test/files/run/macro-expand-nullary-nongeneric/Impls_1.scala
new file mode 100644
index 0000000..c6bd1cd
--- /dev/null
+++ b/test/files/run/macro-expand-nullary-nongeneric/Impls_1.scala
@@ -0,0 +1,14 @@
+import scala.reflect.macros.{Context => Ctx}
+
+object Impls {
+ def impl(c: Ctx) = {
+ import c.universe._
+ val body = Apply(Select(Ident(definitions.PredefModule), newTermName("println")), List(Literal(Constant("it works"))))
+ c.Expr[Unit](body)
+ }
+
+ def fooNullary(c: Ctx) = impl(c)
+ def fooEmpty(c: Ctx)() = impl(c)
+ def barNullary(c: Ctx)(x: c.Expr[Int]) = impl(c)
+ def barEmpty(c: Ctx)(x: c.Expr[Int])() = impl(c)
+}
\ No newline at end of file
diff --git a/test/files/run/macro-expand-nullary-nongeneric/Macros_Test_2.scala b/test/files/run/macro-expand-nullary-nongeneric/Macros_Test_2.scala
new file mode 100644
index 0000000..1f6d717
--- /dev/null
+++ b/test/files/run/macro-expand-nullary-nongeneric/Macros_Test_2.scala
@@ -0,0 +1,15 @@
+object Macros {
+ def foo1 = macro Impls.fooNullary
+ def foo2() = macro Impls.fooEmpty
+ def bar1(x: Int) = macro Impls.barNullary
+ def bar2(x: Int)() = macro Impls.barEmpty
+}
+
+object Test extends App {
+ Macros.foo1
+ Macros.foo2
+ Macros.foo2()
+ Macros.bar1(42)
+ Macros.bar2(42)()
+ println("kkthxbai")
+}
\ No newline at end of file
diff --git a/test/files/run/macro-expand-overload.check b/test/files/run/macro-expand-overload.check
new file mode 100644
index 0000000..a2b7b0e
--- /dev/null
+++ b/test/files/run/macro-expand-overload.check
@@ -0,0 +1,6 @@
+(fooObjectString,Expr[Nothing](Macros),42)
+(fooObjectInt,Expr[Nothing](Macros),42)
+fooObjectBoolean
+(fooClassString,Expr[Nothing](new Macros()),42)
+(fooClassInt,Expr[Nothing](new Macros()),42)
+fooClassBoolean
diff --git a/test/files/run/macro-expand-overload.flags b/test/files/run/macro-expand-overload.flags
new file mode 100644
index 0000000..cd66464
--- /dev/null
+++ b/test/files/run/macro-expand-overload.flags
@@ -0,0 +1 @@
+-language:experimental.macros
\ No newline at end of file
diff --git a/test/files/run/macro-expand-overload/Impls_1.scala b/test/files/run/macro-expand-overload/Impls_1.scala
new file mode 100644
index 0000000..f7c240d
--- /dev/null
+++ b/test/files/run/macro-expand-overload/Impls_1.scala
@@ -0,0 +1,15 @@
+import scala.reflect.macros.{Context => Ctx}
+
+object Impls {
+ def impl(c: Ctx)(tag: String, x: c.Expr[_]) = {
+ import c.{prefix => prefix}
+ import c.universe._
+ val body = Apply(Select(Ident(definitions.PredefModule), newTermName("println")), List(Literal(Constant(tag)), Literal(Constant(prefix.toString)), x.tree))
+ c.Expr[Unit](body)
+ }
+
+ def fooObjectString(c: Ctx)(x: c.Expr[_]) = impl(c)("fooObjectString", x)
+ def fooObjectInt(c: Ctx)(x: c.Expr[_]) = impl(c)("fooObjectInt", x)
+ def fooClassString(c: Ctx)(x: c.Expr[_]) = impl(c)("fooClassString", x)
+ def fooClassInt(c: Ctx)(x: c.Expr[_]) = impl(c)("fooClassInt", x)
+}
\ No newline at end of file
diff --git a/test/files/run/macro-expand-overload/Macros_Test_2.scala b/test/files/run/macro-expand-overload/Macros_Test_2.scala
new file mode 100644
index 0000000..7f61f85
--- /dev/null
+++ b/test/files/run/macro-expand-overload/Macros_Test_2.scala
@@ -0,0 +1,20 @@
+object Macros {
+ def foo(x: String) = macro Impls.fooObjectString
+ def foo(x: Int) = macro Impls.fooObjectInt
+ def foo(x: Boolean) = println("fooObjectBoolean")
+}
+
+class Macros {
+ def foo(x: String) = macro Impls.fooClassString
+ def foo(x: Int) = macro Impls.fooClassInt
+ def foo(x: Boolean) = println("fooClassBoolean")
+}
+
+object Test extends App {
+ Macros.foo("42")
+ Macros.foo(42)
+ Macros.foo(true)
+ new Macros().foo("42")
+ new Macros().foo(42)
+ new Macros().foo(true)
+}
\ No newline at end of file
diff --git a/test/files/run/macro-expand-override.check b/test/files/run/macro-expand-override.check
new file mode 100644
index 0000000..b41dc15
--- /dev/null
+++ b/test/files/run/macro-expand-override.check
@@ -0,0 +1,15 @@
+(fooBString,Expr[Nothing](Test.this.dd),42)
+(fooDInt,Expr[Nothing](Test.this.dd),42)
+fooBBoolean
+(fooBString,Expr[Nothing](Test.this.db),42)
+(fooBInt,Expr[Nothing](Test.this.db),42)
+fooBBoolean
+(fooZString,Expr[Nothing](Test.this.zz),42)
+(fooDInt,Expr[Nothing](Test.this.zz),42)
+fooZBoolean
+(fooBString,Expr[Nothing](Test.this.zd),42)
+(fooDInt,Expr[Nothing](Test.this.zd),42)
+fooZBoolean
+(fooBString,Expr[Nothing](Test.this.zb),42)
+(fooBInt,Expr[Nothing](Test.this.zb),42)
+fooZBoolean
diff --git a/test/files/run/macro-expand-override.flags b/test/files/run/macro-expand-override.flags
new file mode 100644
index 0000000..cd66464
--- /dev/null
+++ b/test/files/run/macro-expand-override.flags
@@ -0,0 +1 @@
+-language:experimental.macros
\ No newline at end of file
diff --git a/test/files/run/macro-expand-override/Impls_1.scala b/test/files/run/macro-expand-override/Impls_1.scala
new file mode 100644
index 0000000..ec93dd4
--- /dev/null
+++ b/test/files/run/macro-expand-override/Impls_1.scala
@@ -0,0 +1,15 @@
+import scala.reflect.macros.{Context => Ctx}
+
+object Impls {
+ def impl(c: Ctx)(tag: String, x: c.Expr[_]) = {
+ import c.{prefix => prefix}
+ import c.universe._
+ val body = Apply(Select(Ident(definitions.PredefModule), newTermName("println")), List(Literal(Constant(tag)), Literal(Constant(prefix.toString)), x.tree))
+ c.Expr[Unit](body)
+ }
+
+ def fooBString(c: Ctx)(x: c.Expr[_]) = impl(c)("fooBString", x)
+ def fooBInt(c: Ctx)(x: c.Expr[_]) = impl(c)("fooBInt", x)
+ def fooDInt(c: Ctx)(x: c.Expr[_]) = impl(c)("fooDInt", x)
+ def fooZString(c: Ctx)(x: c.Expr[_]) = impl(c)("fooZString", x)
+}
\ No newline at end of file
diff --git a/test/files/run/macro-expand-override/Macros_Test_2.scala b/test/files/run/macro-expand-override/Macros_Test_2.scala
new file mode 100644
index 0000000..f162773
--- /dev/null
+++ b/test/files/run/macro-expand-override/Macros_Test_2.scala
@@ -0,0 +1,43 @@
+class B {
+ def foo(x: String) = macro Impls.fooBString
+ def foo(x: Int) = macro Impls.fooBInt
+ def foo(x: Boolean) = println("fooBBoolean")
+}
+
+class D extends B {
+ //override def foo(x: String) = println("fooDString") => method cannot override a macro
+ override def foo(x: Int) = macro Impls.fooDInt
+}
+
+class Z extends D {
+ override def foo(x: String) = macro Impls.fooZString
+ override def foo(x: Boolean) = println("fooZBoolean")
+}
+
+object Test extends App {
+
+ val dd: D = new D()
+ dd.foo("42")
+ dd.foo(42)
+ dd.foo(true)
+
+ val db: B = new D()
+ db.foo("42")
+ db.foo(42)
+ db.foo(true)
+
+ val zz: Z = new Z()
+ zz.foo("42")
+ zz.foo(42)
+ zz.foo(true)
+
+ val zd: D = new Z()
+ zd.foo("42")
+ zd.foo(42)
+ zd.foo(true)
+
+ val zb: B = new Z()
+ zb.foo("42")
+ zb.foo(42)
+ zb.foo(true)
+}
\ No newline at end of file
diff --git a/test/files/run/macro-expand-recursive.check b/test/files/run/macro-expand-recursive.check
new file mode 100644
index 0000000..7658ad2
--- /dev/null
+++ b/test/files/run/macro-expand-recursive.check
@@ -0,0 +1 @@
+it works
diff --git a/test/files/run/macro-expand-recursive.flags b/test/files/run/macro-expand-recursive.flags
new file mode 100644
index 0000000..cd66464
--- /dev/null
+++ b/test/files/run/macro-expand-recursive.flags
@@ -0,0 +1 @@
+-language:experimental.macros
\ No newline at end of file
diff --git a/test/files/run/macro-expand-recursive/Impls_1.scala b/test/files/run/macro-expand-recursive/Impls_1.scala
new file mode 100644
index 0000000..61db5c4
--- /dev/null
+++ b/test/files/run/macro-expand-recursive/Impls_1.scala
@@ -0,0 +1,15 @@
+import scala.reflect.macros.{Context => Ctx}
+
+object Impls {
+ def foo(c: Ctx) = {
+ import c.universe._
+ val body = Apply(Select(Ident(definitions.PredefModule), newTermName("println")), List(Literal(Constant("it works"))))
+ c.Expr[Unit](body)
+ }
+
+ def fooFoo(c: Ctx) = {
+ import c.universe._
+ val body = Select(Ident(newTermName("Macros")), newTermName("foo"))
+ c.Expr[Unit](body)
+ }
+}
\ No newline at end of file
diff --git a/test/files/run/macro-expand-recursive/Macros_Test_2.scala b/test/files/run/macro-expand-recursive/Macros_Test_2.scala
new file mode 100644
index 0000000..6ff691b
--- /dev/null
+++ b/test/files/run/macro-expand-recursive/Macros_Test_2.scala
@@ -0,0 +1,8 @@
+object Macros {
+ def foo = macro Impls.foo
+ def fooFoo = macro Impls.fooFoo
+}
+
+object Test extends App {
+ Macros.fooFoo
+}
\ No newline at end of file
diff --git a/test/files/jvm/bug680.check b/test/files/run/macro-expand-tparams-bounds-a.check
similarity index 100%
copy from test/files/jvm/bug680.check
copy to test/files/run/macro-expand-tparams-bounds-a.check
diff --git a/test/files/run/macro-expand-tparams-bounds-a.flags b/test/files/run/macro-expand-tparams-bounds-a.flags
new file mode 100644
index 0000000..cd66464
--- /dev/null
+++ b/test/files/run/macro-expand-tparams-bounds-a.flags
@@ -0,0 +1 @@
+-language:experimental.macros
\ No newline at end of file
diff --git a/test/files/run/macro-expand-tparams-bounds-a/Impls_1.scala b/test/files/run/macro-expand-tparams-bounds-a/Impls_1.scala
new file mode 100644
index 0000000..9b8dafa
--- /dev/null
+++ b/test/files/run/macro-expand-tparams-bounds-a/Impls_1.scala
@@ -0,0 +1,5 @@
+import scala.reflect.macros.{Context => Ctx}
+
+object Impls {
+ def foo[U <: String](c: Ctx): c.Expr[Unit] = c.literalUnit
+}
diff --git a/test/files/run/macro-expand-tparams-bounds-a/Macros_Test_2.scala b/test/files/run/macro-expand-tparams-bounds-a/Macros_Test_2.scala
new file mode 100644
index 0000000..b498e6f
--- /dev/null
+++ b/test/files/run/macro-expand-tparams-bounds-a/Macros_Test_2.scala
@@ -0,0 +1,8 @@
+object Macros {
+ def foo[U <: String] = macro Impls.foo[U]
+}
+
+object Test extends App {
+ import Macros._
+ foo[String]
+}
\ No newline at end of file
diff --git a/test/files/jvm/bug680.check b/test/files/run/macro-expand-tparams-bounds-b.check
similarity index 100%
copy from test/files/jvm/bug680.check
copy to test/files/run/macro-expand-tparams-bounds-b.check
diff --git a/test/files/run/macro-expand-tparams-bounds-b.flags b/test/files/run/macro-expand-tparams-bounds-b.flags
new file mode 100644
index 0000000..cd66464
--- /dev/null
+++ b/test/files/run/macro-expand-tparams-bounds-b.flags
@@ -0,0 +1 @@
+-language:experimental.macros
\ No newline at end of file
diff --git a/test/files/run/macro-expand-tparams-bounds-b/Impls_1.scala b/test/files/run/macro-expand-tparams-bounds-b/Impls_1.scala
new file mode 100644
index 0000000..c11c891
--- /dev/null
+++ b/test/files/run/macro-expand-tparams-bounds-b/Impls_1.scala
@@ -0,0 +1,7 @@
+import scala.reflect.macros.{Context => Ctx}
+
+class C
+
+object Impls {
+ def foo[U <: C](c: Ctx): c.Expr[Unit] = c.literalUnit
+}
diff --git a/test/files/run/macro-expand-tparams-bounds-b/Macros_Test_2.scala b/test/files/run/macro-expand-tparams-bounds-b/Macros_Test_2.scala
new file mode 100644
index 0000000..1a261e9
--- /dev/null
+++ b/test/files/run/macro-expand-tparams-bounds-b/Macros_Test_2.scala
@@ -0,0 +1,10 @@
+class D extends C
+
+object Macros {
+ def foo[T <: D] = macro Impls.foo[T]
+}
+
+object Test extends App {
+ import Macros._
+ foo[D]
+}
\ No newline at end of file
diff --git a/test/files/run/macro-expand-tparams-explicit.check b/test/files/run/macro-expand-tparams-explicit.check
new file mode 100644
index 0000000..b6b4f6f
--- /dev/null
+++ b/test/files/run/macro-expand-tparams-explicit.check
@@ -0,0 +1 @@
+WeakTypeTag[Int]
diff --git a/test/files/run/macro-expand-tparams-explicit.flags b/test/files/run/macro-expand-tparams-explicit.flags
new file mode 100644
index 0000000..cd66464
--- /dev/null
+++ b/test/files/run/macro-expand-tparams-explicit.flags
@@ -0,0 +1 @@
+-language:experimental.macros
\ No newline at end of file
diff --git a/test/files/run/macro-expand-tparams-explicit/Impls_1.scala b/test/files/run/macro-expand-tparams-explicit/Impls_1.scala
new file mode 100644
index 0000000..72b420d
--- /dev/null
+++ b/test/files/run/macro-expand-tparams-explicit/Impls_1.scala
@@ -0,0 +1,11 @@
+import scala.reflect.runtime.universe._
+import scala.reflect.macros.{Context => Ctx}
+
+object Impls {
+ def foo[U: c.WeakTypeTag](c: Ctx) = {
+ import c.universe._
+ val U = implicitly[c.WeakTypeTag[U]]
+ val body = Apply(Select(Ident(definitions.PredefModule), newTermName("println")), List(Literal(Constant(U.toString))))
+ c.Expr[Unit](body)
+ }
+}
\ No newline at end of file
diff --git a/test/files/run/macro-expand-tparams-explicit/Macros_Test_2.scala b/test/files/run/macro-expand-tparams-explicit/Macros_Test_2.scala
new file mode 100644
index 0000000..e72c278
--- /dev/null
+++ b/test/files/run/macro-expand-tparams-explicit/Macros_Test_2.scala
@@ -0,0 +1,4 @@
+object Test extends App {
+ def foo[U] = macro Impls.foo[U]
+ foo[Int]
+}
\ No newline at end of file
diff --git a/test/files/run/macro-expand-tparams-implicit.check b/test/files/run/macro-expand-tparams-implicit.check
new file mode 100644
index 0000000..a9bf554
--- /dev/null
+++ b/test/files/run/macro-expand-tparams-implicit.check
@@ -0,0 +1,2 @@
+WeakTypeTag[Int]
+WeakTypeTag[String]
diff --git a/test/files/run/macro-expand-tparams-implicit.flags b/test/files/run/macro-expand-tparams-implicit.flags
new file mode 100644
index 0000000..cd66464
--- /dev/null
+++ b/test/files/run/macro-expand-tparams-implicit.flags
@@ -0,0 +1 @@
+-language:experimental.macros
\ No newline at end of file
diff --git a/test/files/run/macro-expand-tparams-implicit/Impls_1.scala b/test/files/run/macro-expand-tparams-implicit/Impls_1.scala
new file mode 100644
index 0000000..3377051
--- /dev/null
+++ b/test/files/run/macro-expand-tparams-implicit/Impls_1.scala
@@ -0,0 +1,11 @@
+import scala.reflect.runtime.universe._
+import scala.reflect.macros.{Context => Ctx}
+
+object Impls {
+ def foo[U: c.WeakTypeTag](c: Ctx)(x: c.Expr[U]) = {
+ import c.universe._
+ val U = implicitly[c.WeakTypeTag[U]]
+ val body = Apply(Select(Ident(definitions.PredefModule), newTermName("println")), List(Literal(Constant(U.toString))))
+ c.Expr[Unit](body)
+ }
+}
\ No newline at end of file
diff --git a/test/files/run/macro-expand-tparams-implicit/Macros_Test_2.scala b/test/files/run/macro-expand-tparams-implicit/Macros_Test_2.scala
new file mode 100644
index 0000000..f8c573f
--- /dev/null
+++ b/test/files/run/macro-expand-tparams-implicit/Macros_Test_2.scala
@@ -0,0 +1,5 @@
+object Test extends App {
+ def foo[U](x: U) = macro Impls.foo[U]
+ foo(42)
+ foo("42")
+}
\ No newline at end of file
diff --git a/test/files/run/macro-expand-tparams-only-in-impl.flags b/test/files/run/macro-expand-tparams-only-in-impl.flags
new file mode 100644
index 0000000..cd66464
--- /dev/null
+++ b/test/files/run/macro-expand-tparams-only-in-impl.flags
@@ -0,0 +1 @@
+-language:experimental.macros
\ No newline at end of file
diff --git a/test/files/run/macro-expand-tparams-only-in-impl/Impls_1.scala b/test/files/run/macro-expand-tparams-only-in-impl/Impls_1.scala
new file mode 100644
index 0000000..9b8dafa
--- /dev/null
+++ b/test/files/run/macro-expand-tparams-only-in-impl/Impls_1.scala
@@ -0,0 +1,5 @@
+import scala.reflect.macros.{Context => Ctx}
+
+object Impls {
+ def foo[U <: String](c: Ctx): c.Expr[Unit] = c.literalUnit
+}
diff --git a/test/files/run/macro-expand-tparams-only-in-impl/Macros_Test_2.scala b/test/files/run/macro-expand-tparams-only-in-impl/Macros_Test_2.scala
new file mode 100644
index 0000000..218c7ae
--- /dev/null
+++ b/test/files/run/macro-expand-tparams-only-in-impl/Macros_Test_2.scala
@@ -0,0 +1,8 @@
+object Macros {
+ def foo = macro Impls.foo[String]
+}
+
+object Test extends App {
+ import Macros._
+ foo
+}
\ No newline at end of file
diff --git a/test/files/run/macro-expand-tparams-optional.check b/test/files/run/macro-expand-tparams-optional.check
new file mode 100644
index 0000000..b4a0f39
--- /dev/null
+++ b/test/files/run/macro-expand-tparams-optional.check
@@ -0,0 +1 @@
+don't know U
diff --git a/test/files/run/macro-expand-tparams-optional.flags b/test/files/run/macro-expand-tparams-optional.flags
new file mode 100644
index 0000000..cd66464
--- /dev/null
+++ b/test/files/run/macro-expand-tparams-optional.flags
@@ -0,0 +1 @@
+-language:experimental.macros
\ No newline at end of file
diff --git a/test/files/run/macro-expand-tparams-optional/Impls_1.scala b/test/files/run/macro-expand-tparams-optional/Impls_1.scala
new file mode 100644
index 0000000..3b829e2
--- /dev/null
+++ b/test/files/run/macro-expand-tparams-optional/Impls_1.scala
@@ -0,0 +1,9 @@
+import scala.reflect.macros.{Context => Ctx}
+
+object Impls {
+ def foo[U](c: Ctx) = {
+ import c.universe._
+ val body = Apply(Select(Ident(definitions.PredefModule), newTermName("println")), List(Literal(Constant("don't know U"))))
+ c.Expr[Unit](body)
+ }
+}
\ No newline at end of file
diff --git a/test/files/run/macro-expand-tparams-optional/Macros_Test_2.scala b/test/files/run/macro-expand-tparams-optional/Macros_Test_2.scala
new file mode 100644
index 0000000..e72c278
--- /dev/null
+++ b/test/files/run/macro-expand-tparams-optional/Macros_Test_2.scala
@@ -0,0 +1,4 @@
+object Test extends App {
+ def foo[U] = macro Impls.foo[U]
+ foo[Int]
+}
\ No newline at end of file
diff --git a/test/files/run/macro-expand-tparams-prefix-a.check b/test/files/run/macro-expand-tparams-prefix-a.check
new file mode 100644
index 0000000..ca44a4f
--- /dev/null
+++ b/test/files/run/macro-expand-tparams-prefix-a.check
@@ -0,0 +1,4 @@
+WeakTypeTag[Int]
+WeakTypeTag[Int]
+WeakTypeTag[String]
+WeakTypeTag[Boolean]
diff --git a/test/files/run/macro-expand-tparams-prefix-a.flags b/test/files/run/macro-expand-tparams-prefix-a.flags
new file mode 100644
index 0000000..cd66464
--- /dev/null
+++ b/test/files/run/macro-expand-tparams-prefix-a.flags
@@ -0,0 +1 @@
+-language:experimental.macros
\ No newline at end of file
diff --git a/test/files/run/macro-expand-tparams-prefix-a/Impls_1.scala b/test/files/run/macro-expand-tparams-prefix-a/Impls_1.scala
new file mode 100644
index 0000000..3377051
--- /dev/null
+++ b/test/files/run/macro-expand-tparams-prefix-a/Impls_1.scala
@@ -0,0 +1,11 @@
+import scala.reflect.runtime.universe._
+import scala.reflect.macros.{Context => Ctx}
+
+object Impls {
+ def foo[U: c.WeakTypeTag](c: Ctx)(x: c.Expr[U]) = {
+ import c.universe._
+ val U = implicitly[c.WeakTypeTag[U]]
+ val body = Apply(Select(Ident(definitions.PredefModule), newTermName("println")), List(Literal(Constant(U.toString))))
+ c.Expr[Unit](body)
+ }
+}
\ No newline at end of file
diff --git a/test/files/run/macro-expand-tparams-prefix-a/Macros_Test_2.scala b/test/files/run/macro-expand-tparams-prefix-a/Macros_Test_2.scala
new file mode 100644
index 0000000..81ccb7f
--- /dev/null
+++ b/test/files/run/macro-expand-tparams-prefix-a/Macros_Test_2.scala
@@ -0,0 +1,10 @@
+object Test extends App {
+ class C[T] {
+ def foo[U](x: U) = macro Impls.foo[U]
+ }
+
+ new C[Int]().foo(42)
+ new C[Boolean]().foo(42)
+ new C[Int]().foo("42")
+ new C[String]().foo(true)
+}
\ No newline at end of file
diff --git a/test/files/run/macro-expand-tparams-prefix-b.check b/test/files/run/macro-expand-tparams-prefix-b.check
new file mode 100644
index 0000000..2ff2ce4
--- /dev/null
+++ b/test/files/run/macro-expand-tparams-prefix-b.check
@@ -0,0 +1,2 @@
+WeakTypeTag[Boolean] WeakTypeTag[Int]
+WeakTypeTag[Boolean] WeakTypeTag[String]
diff --git a/test/files/run/macro-expand-tparams-prefix-b.flags b/test/files/run/macro-expand-tparams-prefix-b.flags
new file mode 100644
index 0000000..cd66464
--- /dev/null
+++ b/test/files/run/macro-expand-tparams-prefix-b.flags
@@ -0,0 +1 @@
+-language:experimental.macros
\ No newline at end of file
diff --git a/test/files/run/macro-expand-tparams-prefix-b/Impls_1.scala b/test/files/run/macro-expand-tparams-prefix-b/Impls_1.scala
new file mode 100644
index 0000000..9378e67
--- /dev/null
+++ b/test/files/run/macro-expand-tparams-prefix-b/Impls_1.scala
@@ -0,0 +1,12 @@
+import scala.reflect.runtime.universe._
+import scala.reflect.macros.{Context => Ctx}
+
+object Impls {
+ def foo[T: c.WeakTypeTag, U: c.WeakTypeTag](c: Ctx)(x: c.Expr[U]) = {
+ import c.universe._
+ val T = implicitly[c.WeakTypeTag[T]]
+ val U = implicitly[c.WeakTypeTag[U]]
+ val body = Apply(Select(Ident(definitions.PredefModule), newTermName("println")), List(Literal(Constant(T.toString + " " + U.toString))))
+ c.Expr[Unit](body)
+ }
+}
\ No newline at end of file
diff --git a/test/files/run/macro-expand-tparams-prefix-b/Macros_Test_2.scala b/test/files/run/macro-expand-tparams-prefix-b/Macros_Test_2.scala
new file mode 100644
index 0000000..a4a0acf
--- /dev/null
+++ b/test/files/run/macro-expand-tparams-prefix-b/Macros_Test_2.scala
@@ -0,0 +1,10 @@
+object Test extends App {
+ class C[T] {
+ def foo[U](x: U) = macro Impls.foo[T, U]
+ }
+
+ object D extends C[Boolean]
+
+ D.foo(42)
+ D.foo("42")
+}
\ No newline at end of file
diff --git a/test/files/run/macro-expand-tparams-prefix-c1.check b/test/files/run/macro-expand-tparams-prefix-c1.check
new file mode 100644
index 0000000..0f24f74
--- /dev/null
+++ b/test/files/run/macro-expand-tparams-prefix-c1.check
@@ -0,0 +1,3 @@
+WeakTypeTag[Int]
+WeakTypeTag[String]
+WeakTypeTag[Boolean]
diff --git a/test/files/run/macro-expand-tparams-prefix-c1.flags b/test/files/run/macro-expand-tparams-prefix-c1.flags
new file mode 100644
index 0000000..cd66464
--- /dev/null
+++ b/test/files/run/macro-expand-tparams-prefix-c1.flags
@@ -0,0 +1 @@
+-language:experimental.macros
\ No newline at end of file
diff --git a/test/files/run/macro-expand-tparams-prefix-c1/Impls_1.scala b/test/files/run/macro-expand-tparams-prefix-c1/Impls_1.scala
new file mode 100644
index 0000000..afdd7d4
--- /dev/null
+++ b/test/files/run/macro-expand-tparams-prefix-c1/Impls_1.scala
@@ -0,0 +1,13 @@
+import scala.reflect.runtime.universe._
+import scala.reflect.macros.{Context => Ctx}
+
+object Impls {
+ def foo[T, U: c.WeakTypeTag, V](c: Ctx)(implicit T: c.WeakTypeTag[T], V: c.WeakTypeTag[V]): c.Expr[Unit] = {
+ import c.universe._
+ c.Expr(Block(List(
+ Apply(Select(Ident(definitions.PredefModule), newTermName("println")), List(Literal(Constant(T.toString)))),
+ Apply(Select(Ident(definitions.PredefModule), newTermName("println")), List(Literal(Constant(implicitly[c.WeakTypeTag[U]].toString)))),
+ Apply(Select(Ident(definitions.PredefModule), newTermName("println")), List(Literal(Constant(V.toString))))),
+ Literal(Constant(()))))
+ }
+}
\ No newline at end of file
diff --git a/test/files/run/macro-expand-tparams-prefix-c1/Macros_Test_2.scala b/test/files/run/macro-expand-tparams-prefix-c1/Macros_Test_2.scala
new file mode 100644
index 0000000..4fa0c8c
--- /dev/null
+++ b/test/files/run/macro-expand-tparams-prefix-c1/Macros_Test_2.scala
@@ -0,0 +1,11 @@
+class D[T] {
+ class C[U] {
+ def foo[V] = macro Impls.foo[T, U, V]
+ }
+}
+
+object Test extends App {
+ val outer1 = new D[Int]
+ val outer2 = new outer1.C[String]
+ outer2.foo[Boolean]
+}
\ No newline at end of file
diff --git a/test/files/run/macro-expand-tparams-prefix-c2.check b/test/files/run/macro-expand-tparams-prefix-c2.check
new file mode 100644
index 0000000..0f24f74
--- /dev/null
+++ b/test/files/run/macro-expand-tparams-prefix-c2.check
@@ -0,0 +1,3 @@
+WeakTypeTag[Int]
+WeakTypeTag[String]
+WeakTypeTag[Boolean]
diff --git a/test/files/run/macro-expand-tparams-prefix-c2.flags b/test/files/run/macro-expand-tparams-prefix-c2.flags
new file mode 100644
index 0000000..cd66464
--- /dev/null
+++ b/test/files/run/macro-expand-tparams-prefix-c2.flags
@@ -0,0 +1 @@
+-language:experimental.macros
\ No newline at end of file
diff --git a/test/files/run/macro-expand-tparams-prefix-c2/Impls_Macros_1.scala b/test/files/run/macro-expand-tparams-prefix-c2/Impls_Macros_1.scala
new file mode 100644
index 0000000..3c28382
--- /dev/null
+++ b/test/files/run/macro-expand-tparams-prefix-c2/Impls_Macros_1.scala
@@ -0,0 +1,19 @@
+import scala.reflect.runtime.universe._
+import scala.reflect.macros.{Context => Ctx}
+
+object Impls {
+ def foo[T, U: c.WeakTypeTag, V](c: Ctx)(implicit T: c.WeakTypeTag[T], V: c.WeakTypeTag[V]): c.Expr[Unit] = {
+ import c.universe._
+ c.Expr(Block(List(
+ Apply(Select(Ident(definitions.PredefModule), newTermName("println")), List(Literal(Constant(T.toString)))),
+ Apply(Select(Ident(definitions.PredefModule), newTermName("println")), List(Literal(Constant(implicitly[c.WeakTypeTag[U]].toString)))),
+ Apply(Select(Ident(definitions.PredefModule), newTermName("println")), List(Literal(Constant(V.toString))))),
+ Literal(Constant(()))))
+ }
+}
+
+class D[T] {
+ class C[U] {
+ def foo[V] = macro Impls.foo[T, U, V]
+ }
+}
\ No newline at end of file
diff --git a/test/files/run/macro-expand-tparams-prefix-c2/Test_2.scala b/test/files/run/macro-expand-tparams-prefix-c2/Test_2.scala
new file mode 100644
index 0000000..e729d4a
--- /dev/null
+++ b/test/files/run/macro-expand-tparams-prefix-c2/Test_2.scala
@@ -0,0 +1,5 @@
+object Test extends App {
+ val outer1 = new D[Int]
+ val outer2 = new outer1.C[String]
+ outer2.foo[Boolean]
+}
\ No newline at end of file
diff --git a/test/files/run/macro-expand-tparams-prefix-d1.check b/test/files/run/macro-expand-tparams-prefix-d1.check
new file mode 100644
index 0000000..7832503
--- /dev/null
+++ b/test/files/run/macro-expand-tparams-prefix-d1.check
@@ -0,0 +1,3 @@
+WeakTypeTag[T]
+WeakTypeTag[U]
+WeakTypeTag[Boolean]
diff --git a/test/files/run/macro-expand-tparams-prefix-d1.flags b/test/files/run/macro-expand-tparams-prefix-d1.flags
new file mode 100644
index 0000000..cd66464
--- /dev/null
+++ b/test/files/run/macro-expand-tparams-prefix-d1.flags
@@ -0,0 +1 @@
+-language:experimental.macros
\ No newline at end of file
diff --git a/test/files/run/macro-expand-tparams-prefix-d1/Impls_1.scala b/test/files/run/macro-expand-tparams-prefix-d1/Impls_1.scala
new file mode 100644
index 0000000..afdd7d4
--- /dev/null
+++ b/test/files/run/macro-expand-tparams-prefix-d1/Impls_1.scala
@@ -0,0 +1,13 @@
+import scala.reflect.runtime.universe._
+import scala.reflect.macros.{Context => Ctx}
+
+object Impls {
+ def foo[T, U: c.WeakTypeTag, V](c: Ctx)(implicit T: c.WeakTypeTag[T], V: c.WeakTypeTag[V]): c.Expr[Unit] = {
+ import c.universe._
+ c.Expr(Block(List(
+ Apply(Select(Ident(definitions.PredefModule), newTermName("println")), List(Literal(Constant(T.toString)))),
+ Apply(Select(Ident(definitions.PredefModule), newTermName("println")), List(Literal(Constant(implicitly[c.WeakTypeTag[U]].toString)))),
+ Apply(Select(Ident(definitions.PredefModule), newTermName("println")), List(Literal(Constant(V.toString))))),
+ Literal(Constant(()))))
+ }
+}
\ No newline at end of file
diff --git a/test/files/run/macro-expand-tparams-prefix-d1/Macros_Test_2.scala b/test/files/run/macro-expand-tparams-prefix-d1/Macros_Test_2.scala
new file mode 100644
index 0000000..8222a6d
--- /dev/null
+++ b/test/files/run/macro-expand-tparams-prefix-d1/Macros_Test_2.scala
@@ -0,0 +1,11 @@
+object Test extends App {
+ class D[T] {
+ class C[U] {
+ def foo[V] = macro Impls.foo[T, U, V]
+ foo[Boolean]
+ }
+ }
+
+ val outer1 = new D[Int]
+ new outer1.C[String]
+}
\ No newline at end of file
diff --git a/test/files/run/macro-expand-varargs-explicit-over-nonvarargs-bad.check b/test/files/run/macro-expand-varargs-explicit-over-nonvarargs-bad.check
new file mode 100644
index 0000000..1c3ecfd
--- /dev/null
+++ b/test/files/run/macro-expand-varargs-explicit-over-nonvarargs-bad.check
@@ -0,0 +1,4 @@
+reflective compilation has failed:
+
+no `: _*' annotation allowed here
+(such annotations are only allowed in arguments to *-parameters)
diff --git a/test/files/run/macro-expand-varargs-explicit-over-nonvarargs-bad.flags b/test/files/run/macro-expand-varargs-explicit-over-nonvarargs-bad.flags
new file mode 100644
index 0000000..cd66464
--- /dev/null
+++ b/test/files/run/macro-expand-varargs-explicit-over-nonvarargs-bad.flags
@@ -0,0 +1 @@
+-language:experimental.macros
\ No newline at end of file
diff --git a/test/files/run/macro-expand-varargs-explicit-over-nonvarargs-bad/Impls_1.scala b/test/files/run/macro-expand-varargs-explicit-over-nonvarargs-bad/Impls_1.scala
new file mode 100644
index 0000000..2ef8f04
--- /dev/null
+++ b/test/files/run/macro-expand-varargs-explicit-over-nonvarargs-bad/Impls_1.scala
@@ -0,0 +1,9 @@
+import scala.reflect.macros.{Context => Ctx}
+
+object Impls {
+ def foo(c: Ctx)(xs: c.Expr[Int]*) = {
+ import c.universe._
+ val body = Apply(Select(Ident(definitions.PredefModule), newTermName("println")), xs.map(_.tree).toList)
+ c.Expr[Unit](body)
+ }
+}
\ No newline at end of file
diff --git a/test/files/run/macro-expand-varargs-explicit-over-nonvarargs-bad/Macros_Test_2.scala b/test/files/run/macro-expand-varargs-explicit-over-nonvarargs-bad/Macros_Test_2.scala
new file mode 100644
index 0000000..c832826
--- /dev/null
+++ b/test/files/run/macro-expand-varargs-explicit-over-nonvarargs-bad/Macros_Test_2.scala
@@ -0,0 +1,12 @@
+object Macros {
+ def foo(xs: Int*) = macro Impls.foo
+}
+
+object Test extends App {
+ import scala.reflect.runtime.universe._
+ import scala.reflect.runtime.{currentMirror => cm}
+ import scala.tools.reflect.ToolBox
+ val tree = Apply(Select(Ident(newTermName("Macros")), newTermName("foo")), List(Typed(Apply(Ident(definitions.ListModule), List(Literal(Constant(1)), Literal(Constant(2)))), Ident(tpnme.WILDCARD_STAR))))
+ try cm.mkToolBox().eval(tree)
+ catch { case ex: Throwable => println(ex.getMessage) }
+}
\ No newline at end of file
diff --git a/test/files/run/macro-expand-varargs-explicit-over-nonvarargs-good.check b/test/files/run/macro-expand-varargs-explicit-over-nonvarargs-good.check
new file mode 100644
index 0000000..fe90cae
--- /dev/null
+++ b/test/files/run/macro-expand-varargs-explicit-over-nonvarargs-good.check
@@ -0,0 +1 @@
+List(1, 2, 3, 4, 5)
diff --git a/test/files/run/macro-expand-varargs-explicit-over-nonvarargs-good.flags b/test/files/run/macro-expand-varargs-explicit-over-nonvarargs-good.flags
new file mode 100644
index 0000000..cd66464
--- /dev/null
+++ b/test/files/run/macro-expand-varargs-explicit-over-nonvarargs-good.flags
@@ -0,0 +1 @@
+-language:experimental.macros
\ No newline at end of file
diff --git a/test/files/run/macro-expand-varargs-explicit-over-nonvarargs-good/Impls_1.scala b/test/files/run/macro-expand-varargs-explicit-over-nonvarargs-good/Impls_1.scala
new file mode 100644
index 0000000..3c7f94f
--- /dev/null
+++ b/test/files/run/macro-expand-varargs-explicit-over-nonvarargs-good/Impls_1.scala
@@ -0,0 +1,13 @@
+import scala.reflect.macros.{Context => Ctx}
+
+object Impls {
+ def foo(c: Ctx)(xs: c.Expr[Int]*) = {
+ import c.universe._
+ val stripped_xs = xs map (_.tree) toList match {
+ case List(Typed(stripped, Ident(wildstar))) if wildstar == tpnme.WILDCARD_STAR => List(stripped)
+ case _ => ???
+ }
+ val body = Apply(Select(Ident(definitions.PredefModule), newTermName("println")), stripped_xs)
+ c.Expr[Unit](body)
+ }
+}
\ No newline at end of file
diff --git a/test/files/run/macro-expand-varargs-explicit-over-nonvarargs-good/Macros_Test_2.scala b/test/files/run/macro-expand-varargs-explicit-over-nonvarargs-good/Macros_Test_2.scala
new file mode 100644
index 0000000..f127ebc
--- /dev/null
+++ b/test/files/run/macro-expand-varargs-explicit-over-nonvarargs-good/Macros_Test_2.scala
@@ -0,0 +1,8 @@
+object Macros {
+ def foo(xs: Int*) = macro Impls.foo
+}
+
+object Test extends App {
+ val numbers = List(1, 2, 3, 4, 5)
+ Macros.foo(numbers: _*)
+}
\ No newline at end of file
diff --git a/test/files/run/macro-expand-varargs-explicit-over-varargs.check b/test/files/run/macro-expand-varargs-explicit-over-varargs.check
new file mode 100644
index 0000000..fe90cae
--- /dev/null
+++ b/test/files/run/macro-expand-varargs-explicit-over-varargs.check
@@ -0,0 +1 @@
+List(1, 2, 3, 4, 5)
diff --git a/test/files/run/macro-expand-varargs-explicit-over-varargs.flags b/test/files/run/macro-expand-varargs-explicit-over-varargs.flags
new file mode 100644
index 0000000..cd66464
--- /dev/null
+++ b/test/files/run/macro-expand-varargs-explicit-over-varargs.flags
@@ -0,0 +1 @@
+-language:experimental.macros
\ No newline at end of file
diff --git a/test/files/run/macro-expand-varargs-explicit-over-varargs/Impls_1.scala b/test/files/run/macro-expand-varargs-explicit-over-varargs/Impls_1.scala
new file mode 100644
index 0000000..2066893
--- /dev/null
+++ b/test/files/run/macro-expand-varargs-explicit-over-varargs/Impls_1.scala
@@ -0,0 +1,13 @@
+import scala.reflect.macros.{Context => Ctx}
+
+object Impls {
+ def myprintln(xs: Int*) = {
+ println(xs)
+ }
+
+ def foo(c: Ctx)(xs: c.Expr[Int]*) = {
+ import c.universe._
+ val body = Apply(Select(Ident(newTermName("Impls")), newTermName("myprintln")), xs.map(_.tree).toList)
+ c.Expr[Unit](body)
+ }
+}
\ No newline at end of file
diff --git a/test/files/run/macro-expand-varargs-explicit-over-varargs/Macros_Test_2.scala b/test/files/run/macro-expand-varargs-explicit-over-varargs/Macros_Test_2.scala
new file mode 100644
index 0000000..f127ebc
--- /dev/null
+++ b/test/files/run/macro-expand-varargs-explicit-over-varargs/Macros_Test_2.scala
@@ -0,0 +1,8 @@
+object Macros {
+ def foo(xs: Int*) = macro Impls.foo
+}
+
+object Test extends App {
+ val numbers = List(1, 2, 3, 4, 5)
+ Macros.foo(numbers: _*)
+}
\ No newline at end of file
diff --git a/test/files/run/macro-expand-varargs-implicit-over-nonvarargs.check b/test/files/run/macro-expand-varargs-implicit-over-nonvarargs.check
new file mode 100644
index 0000000..bcfab19
--- /dev/null
+++ b/test/files/run/macro-expand-varargs-implicit-over-nonvarargs.check
@@ -0,0 +1 @@
+(1,2,3,4,5)
diff --git a/test/files/run/macro-expand-varargs-implicit-over-nonvarargs.flags b/test/files/run/macro-expand-varargs-implicit-over-nonvarargs.flags
new file mode 100644
index 0000000..cd66464
--- /dev/null
+++ b/test/files/run/macro-expand-varargs-implicit-over-nonvarargs.flags
@@ -0,0 +1 @@
+-language:experimental.macros
\ No newline at end of file
diff --git a/test/files/run/macro-expand-varargs-implicit-over-nonvarargs/Impls_1.scala b/test/files/run/macro-expand-varargs-implicit-over-nonvarargs/Impls_1.scala
new file mode 100644
index 0000000..2ef8f04
--- /dev/null
+++ b/test/files/run/macro-expand-varargs-implicit-over-nonvarargs/Impls_1.scala
@@ -0,0 +1,9 @@
+import scala.reflect.macros.{Context => Ctx}
+
+object Impls {
+ def foo(c: Ctx)(xs: c.Expr[Int]*) = {
+ import c.universe._
+ val body = Apply(Select(Ident(definitions.PredefModule), newTermName("println")), xs.map(_.tree).toList)
+ c.Expr[Unit](body)
+ }
+}
\ No newline at end of file
diff --git a/test/files/run/macro-expand-varargs-implicit-over-nonvarargs/Macros_Test_2.scala b/test/files/run/macro-expand-varargs-implicit-over-nonvarargs/Macros_Test_2.scala
new file mode 100644
index 0000000..2311ca0
--- /dev/null
+++ b/test/files/run/macro-expand-varargs-implicit-over-nonvarargs/Macros_Test_2.scala
@@ -0,0 +1,7 @@
+object Macros {
+ def foo(xs: Int*) = macro Impls.foo
+}
+
+object Test extends App {
+ Macros.foo(1, 2, 3, 4, 5)
+}
\ No newline at end of file
diff --git a/test/files/run/macro-expand-varargs-implicit-over-varargs.check b/test/files/run/macro-expand-varargs-implicit-over-varargs.check
new file mode 100644
index 0000000..2c174a8
--- /dev/null
+++ b/test/files/run/macro-expand-varargs-implicit-over-varargs.check
@@ -0,0 +1 @@
+WrappedArray(1, 2, 3, 4, 5)
diff --git a/test/files/run/macro-expand-varargs-implicit-over-varargs.flags b/test/files/run/macro-expand-varargs-implicit-over-varargs.flags
new file mode 100644
index 0000000..cd66464
--- /dev/null
+++ b/test/files/run/macro-expand-varargs-implicit-over-varargs.flags
@@ -0,0 +1 @@
+-language:experimental.macros
\ No newline at end of file
diff --git a/test/files/run/macro-expand-varargs-implicit-over-varargs/Impls_1.scala b/test/files/run/macro-expand-varargs-implicit-over-varargs/Impls_1.scala
new file mode 100644
index 0000000..2066893
--- /dev/null
+++ b/test/files/run/macro-expand-varargs-implicit-over-varargs/Impls_1.scala
@@ -0,0 +1,13 @@
+import scala.reflect.macros.{Context => Ctx}
+
+object Impls {
+ def myprintln(xs: Int*) = {
+ println(xs)
+ }
+
+ def foo(c: Ctx)(xs: c.Expr[Int]*) = {
+ import c.universe._
+ val body = Apply(Select(Ident(newTermName("Impls")), newTermName("myprintln")), xs.map(_.tree).toList)
+ c.Expr[Unit](body)
+ }
+}
\ No newline at end of file
diff --git a/test/files/run/macro-expand-varargs-implicit-over-varargs/Macros_Test_2.scala b/test/files/run/macro-expand-varargs-implicit-over-varargs/Macros_Test_2.scala
new file mode 100644
index 0000000..2311ca0
--- /dev/null
+++ b/test/files/run/macro-expand-varargs-implicit-over-varargs/Macros_Test_2.scala
@@ -0,0 +1,7 @@
+object Macros {
+ def foo(xs: Int*) = macro Impls.foo
+}
+
+object Test extends App {
+ Macros.foo(1, 2, 3, 4, 5)
+}
\ No newline at end of file
diff --git a/test/files/run/macro-impl-default-params.check b/test/files/run/macro-impl-default-params.check
new file mode 100644
index 0000000..b32e345
--- /dev/null
+++ b/test/files/run/macro-impl-default-params.check
@@ -0,0 +1,5 @@
+foo_targs:
+invoking foo_targs...
+type of prefix is: Nothing
+type of prefix tree is: Macros[Int]
+U is: String
diff --git a/test/files/run/macro-impl-default-params.flags b/test/files/run/macro-impl-default-params.flags
new file mode 100644
index 0000000..cd66464
--- /dev/null
+++ b/test/files/run/macro-impl-default-params.flags
@@ -0,0 +1 @@
+-language:experimental.macros
\ No newline at end of file
diff --git a/test/files/run/macro-impl-default-params/Impls_Macros_1.scala b/test/files/run/macro-impl-default-params/Impls_Macros_1.scala
new file mode 100644
index 0000000..7c40045
--- /dev/null
+++ b/test/files/run/macro-impl-default-params/Impls_Macros_1.scala
@@ -0,0 +1,21 @@
+import scala.reflect.runtime.universe._
+import scala.reflect.macros.{Context => Ctx}
+
+object Impls {
+ def foo_targs[T, U: c.WeakTypeTag](c: Ctx = null)(x: c.Expr[Int] = null) = {
+ import c.{prefix => prefix}
+ import c.universe._
+ val U = implicitly[c.WeakTypeTag[U]]
+ val body = Block(List(
+ Apply(Select(Ident(definitions.PredefModule), newTermName("println")), List(Literal(Constant("invoking foo_targs...")))),
+ Apply(Select(Ident(definitions.PredefModule), newTermName("println")), List(Literal(Constant("type of prefix is: " + prefix.staticType)))),
+ Apply(Select(Ident(definitions.PredefModule), newTermName("println")), List(Literal(Constant("type of prefix tree is: " + prefix.tree.tpe)))),
+ Apply(Select(Ident(definitions.PredefModule), newTermName("println")), List(Literal(Constant("U is: " + U.tpe))))),
+ Literal(Constant(())))
+ c.Expr[Unit](body)
+ }
+}
+
+class Macros[T] {
+ def foo_targs[U](x: Int) = macro Impls.foo_targs[T, U]
+}
\ No newline at end of file
diff --git a/test/files/run/macro-impl-default-params/Test_2.scala b/test/files/run/macro-impl-default-params/Test_2.scala
new file mode 100644
index 0000000..90e850d
--- /dev/null
+++ b/test/files/run/macro-impl-default-params/Test_2.scala
@@ -0,0 +1,4 @@
+object Test extends App {
+ println("foo_targs:")
+ new Macros[Int]().foo_targs[String](42)
+}
\ No newline at end of file
diff --git a/test/files/run/macro-impl-rename-context.check b/test/files/run/macro-impl-rename-context.check
new file mode 100644
index 0000000..6a34e5f
--- /dev/null
+++ b/test/files/run/macro-impl-rename-context.check
@@ -0,0 +1,2 @@
+foo
+invoking foo...
diff --git a/test/files/run/macro-impl-rename-context.flags b/test/files/run/macro-impl-rename-context.flags
new file mode 100644
index 0000000..cd66464
--- /dev/null
+++ b/test/files/run/macro-impl-rename-context.flags
@@ -0,0 +1 @@
+-language:experimental.macros
\ No newline at end of file
diff --git a/test/files/run/macro-impl-rename-context/Impls_Macros_1.scala b/test/files/run/macro-impl-rename-context/Impls_Macros_1.scala
new file mode 100644
index 0000000..56c23f5
--- /dev/null
+++ b/test/files/run/macro-impl-rename-context/Impls_Macros_1.scala
@@ -0,0 +1,15 @@
+import scala.reflect.macros.{Context => Ctx}
+
+object Impls {
+ def foo(unconventionalName: Ctx)(x: unconventionalName.Expr[Int]) = {
+ import unconventionalName.universe._
+ val body = Block(List(
+ Apply(Select(Ident(definitions.PredefModule), newTermName("println")), List(Literal(Constant("invoking foo..."))))),
+ Literal(Constant(())))
+ unconventionalName.Expr[Unit](body)
+ }
+}
+
+object Macros {
+ def foo(x: Int) = macro Impls.foo
+}
diff --git a/test/files/run/macro-impl-rename-context/Test_2.scala b/test/files/run/macro-impl-rename-context/Test_2.scala
new file mode 100644
index 0000000..bd9c493
--- /dev/null
+++ b/test/files/run/macro-impl-rename-context/Test_2.scala
@@ -0,0 +1,4 @@
+object Test extends App {
+ println("foo")
+ Macros.foo(42)
+}
\ No newline at end of file
diff --git a/test/files/run/macro-invalidret-doesnt-conform-to-def-rettype.check b/test/files/run/macro-invalidret-doesnt-conform-to-def-rettype.check
new file mode 100644
index 0000000..9161951
--- /dev/null
+++ b/test/files/run/macro-invalidret-doesnt-conform-to-def-rettype.check
@@ -0,0 +1,5 @@
+reflective compilation has failed:
+
+type mismatch;
+ found : String("42")
+ required: Int
diff --git a/test/files/run/macro-invalidret-doesnt-conform-to-def-rettype.flags b/test/files/run/macro-invalidret-doesnt-conform-to-def-rettype.flags
new file mode 100644
index 0000000..cd66464
--- /dev/null
+++ b/test/files/run/macro-invalidret-doesnt-conform-to-def-rettype.flags
@@ -0,0 +1 @@
+-language:experimental.macros
\ No newline at end of file
diff --git a/test/files/run/macro-invalidret-doesnt-conform-to-def-rettype/Impls_Macros_1.scala b/test/files/run/macro-invalidret-doesnt-conform-to-def-rettype/Impls_Macros_1.scala
new file mode 100644
index 0000000..b3babd8
--- /dev/null
+++ b/test/files/run/macro-invalidret-doesnt-conform-to-def-rettype/Impls_Macros_1.scala
@@ -0,0 +1,12 @@
+import scala.reflect.macros.{Context => Ctx}
+
+object Impls {
+ def foo(c: Ctx): c.Expr[Int] = {
+ import c.universe._
+ c.Expr(Literal(Constant("42")))
+ }
+}
+
+object Macros {
+ def foo: Int = macro Impls.foo
+}
\ No newline at end of file
diff --git a/test/files/run/macro-invalidret-doesnt-conform-to-def-rettype/Test_2.scala b/test/files/run/macro-invalidret-doesnt-conform-to-def-rettype/Test_2.scala
new file mode 100644
index 0000000..0b9986e
--- /dev/null
+++ b/test/files/run/macro-invalidret-doesnt-conform-to-def-rettype/Test_2.scala
@@ -0,0 +1,8 @@
+object Test extends App {
+ import scala.reflect.runtime.universe._
+ import scala.reflect.runtime.{currentMirror => cm}
+ import scala.tools.reflect.ToolBox
+ val tree = Select(Ident(newTermName("Macros")), newTermName("foo"))
+ try cm.mkToolBox().eval(tree)
+ catch { case ex: Throwable => println(ex.getMessage) }
+}
\ No newline at end of file
diff --git a/test/files/run/macro-invalidret-nontypeable.check b/test/files/run/macro-invalidret-nontypeable.check
new file mode 100644
index 0000000..cf7acb0
--- /dev/null
+++ b/test/files/run/macro-invalidret-nontypeable.check
@@ -0,0 +1,3 @@
+reflective compilation has failed:
+
+not found: value IDoNotExist
diff --git a/test/files/run/macro-invalidret-nontypeable.flags b/test/files/run/macro-invalidret-nontypeable.flags
new file mode 100644
index 0000000..cd66464
--- /dev/null
+++ b/test/files/run/macro-invalidret-nontypeable.flags
@@ -0,0 +1 @@
+-language:experimental.macros
\ No newline at end of file
diff --git a/test/files/run/macro-invalidret-nontypeable/Impls_Macros_1.scala b/test/files/run/macro-invalidret-nontypeable/Impls_Macros_1.scala
new file mode 100644
index 0000000..fb0d552
--- /dev/null
+++ b/test/files/run/macro-invalidret-nontypeable/Impls_Macros_1.scala
@@ -0,0 +1,13 @@
+import scala.reflect.macros.{Context => Ctx}
+
+object Impls {
+ def foo(c: Ctx) = {
+ import c.universe._
+ val body = Ident(newTermName("IDoNotExist"))
+ c.Expr[Int](body)
+ }
+}
+
+object Macros {
+ def foo = macro Impls.foo
+}
\ No newline at end of file
diff --git a/test/files/run/macro-invalidret-nontypeable/Test_2.scala b/test/files/run/macro-invalidret-nontypeable/Test_2.scala
new file mode 100644
index 0000000..0daee49
--- /dev/null
+++ b/test/files/run/macro-invalidret-nontypeable/Test_2.scala
@@ -0,0 +1,8 @@
+ object Test extends App {
+ import scala.reflect.runtime.universe._
+ import scala.reflect.runtime.{currentMirror => cm}
+ import scala.tools.reflect.ToolBox
+ val tree = Select(Ident(newTermName("Macros")), newTermName("foo"))
+ try cm.mkToolBox().eval(tree)
+ catch { case ex: Throwable => println(ex.getMessage) }
+}
\ No newline at end of file
diff --git a/test/files/run/macro-invalidusage-badret.check b/test/files/run/macro-invalidusage-badret.check
new file mode 100644
index 0000000..221732e
--- /dev/null
+++ b/test/files/run/macro-invalidusage-badret.check
@@ -0,0 +1,5 @@
+reflective compilation has failed:
+
+type mismatch;
+ found : Int(42)
+ required: String
diff --git a/test/files/run/macro-invalidusage-badret.flags b/test/files/run/macro-invalidusage-badret.flags
new file mode 100644
index 0000000..cd66464
--- /dev/null
+++ b/test/files/run/macro-invalidusage-badret.flags
@@ -0,0 +1 @@
+-language:experimental.macros
\ No newline at end of file
diff --git a/test/files/run/macro-invalidusage-badret/Impls_Macros_1.scala b/test/files/run/macro-invalidusage-badret/Impls_Macros_1.scala
new file mode 100644
index 0000000..0d840ee
--- /dev/null
+++ b/test/files/run/macro-invalidusage-badret/Impls_Macros_1.scala
@@ -0,0 +1,9 @@
+import scala.reflect.macros.{Context => Ctx}
+
+object Impls {
+ def foo(c: Ctx)(x: c.Expr[Int]) = x
+}
+
+object Macros {
+ def foo(x: Int) = macro Impls.foo
+}
diff --git a/test/files/run/macro-invalidusage-badret/Test_2.scala b/test/files/run/macro-invalidusage-badret/Test_2.scala
new file mode 100644
index 0000000..5cb0be5
--- /dev/null
+++ b/test/files/run/macro-invalidusage-badret/Test_2.scala
@@ -0,0 +1,8 @@
+object Test extends App {
+ import scala.reflect.runtime.universe._
+ import scala.reflect.runtime.{currentMirror => cm}
+ import scala.tools.reflect.ToolBox
+ val tree = Typed(Apply(Select(Ident(newTermName("Macros")), newTermName("foo")), List(Literal(Constant(42)))), Ident(newTypeName("String")))
+ try cm.mkToolBox().eval(tree)
+ catch { case ex: Throwable => println(ex.getMessage) }
+}
diff --git a/test/files/run/macro-invalidusage-partialapplication-with-tparams.check b/test/files/run/macro-invalidusage-partialapplication-with-tparams.check
new file mode 100644
index 0000000..f1d5e92
--- /dev/null
+++ b/test/files/run/macro-invalidusage-partialapplication-with-tparams.check
@@ -0,0 +1,3 @@
+reflective compilation has failed:
+
+macros cannot be partially applied
diff --git a/test/files/run/macro-invalidusage-partialapplication-with-tparams.flags b/test/files/run/macro-invalidusage-partialapplication-with-tparams.flags
new file mode 100644
index 0000000..cd66464
--- /dev/null
+++ b/test/files/run/macro-invalidusage-partialapplication-with-tparams.flags
@@ -0,0 +1 @@
+-language:experimental.macros
\ No newline at end of file
diff --git a/test/files/run/macro-invalidusage-partialapplication-with-tparams/Impls_Macros_1.scala b/test/files/run/macro-invalidusage-partialapplication-with-tparams/Impls_Macros_1.scala
new file mode 100644
index 0000000..4583a72
--- /dev/null
+++ b/test/files/run/macro-invalidusage-partialapplication-with-tparams/Impls_Macros_1.scala
@@ -0,0 +1,13 @@
+import scala.reflect.macros.{Context => Ctx}
+
+object Impls {
+ def foo[T: c.WeakTypeTag](c: Ctx)(x: c.Expr[T]) = {
+ import c.universe._
+ val body = Apply(Select(Ident(definitions.PredefModule), newTermName("println")), List(Literal(Constant(x.tree.toString))))
+ c.Expr[Unit](body)
+ }
+}
+
+object Macros {
+ def foo[T](x: T) = macro Impls.foo[T]
+}
\ No newline at end of file
diff --git a/test/files/run/macro-invalidusage-partialapplication-with-tparams/Test_2.scala b/test/files/run/macro-invalidusage-partialapplication-with-tparams/Test_2.scala
new file mode 100644
index 0000000..e453d0b
--- /dev/null
+++ b/test/files/run/macro-invalidusage-partialapplication-with-tparams/Test_2.scala
@@ -0,0 +1,8 @@
+object Test extends App {
+ import scala.reflect.runtime.universe._
+ import scala.reflect.runtime.{currentMirror => cm}
+ import scala.tools.reflect.ToolBox
+ val tree = Select(Ident(newTermName("Macros")), newTermName("foo"))
+ try cm.mkToolBox().eval(tree)
+ catch { case ex: Throwable => println(ex.getMessage) }
+}
diff --git a/test/files/run/macro-invalidusage-partialapplication.check b/test/files/run/macro-invalidusage-partialapplication.check
new file mode 100644
index 0000000..f1d5e92
--- /dev/null
+++ b/test/files/run/macro-invalidusage-partialapplication.check
@@ -0,0 +1,3 @@
+reflective compilation has failed:
+
+macros cannot be partially applied
diff --git a/test/files/run/macro-invalidusage-partialapplication.flags b/test/files/run/macro-invalidusage-partialapplication.flags
new file mode 100644
index 0000000..cd66464
--- /dev/null
+++ b/test/files/run/macro-invalidusage-partialapplication.flags
@@ -0,0 +1 @@
+-language:experimental.macros
\ No newline at end of file
diff --git a/test/files/run/macro-invalidusage-partialapplication/Impls_Macros_1.scala b/test/files/run/macro-invalidusage-partialapplication/Impls_Macros_1.scala
new file mode 100644
index 0000000..5866469
--- /dev/null
+++ b/test/files/run/macro-invalidusage-partialapplication/Impls_Macros_1.scala
@@ -0,0 +1,14 @@
+import scala.reflect.macros.{Context => Ctx}
+
+object Impls {
+ def foo(c: Ctx)(x: c.Expr[Int])(y: c.Expr[Int]) = {
+ import c.universe._
+ val sum = Apply(Select(x.tree, newTermName("$plus")), List(y.tree))
+ val body = Apply(Select(Ident(definitions.PredefModule), newTermName("println")), List(sum))
+ c.Expr[Unit](body)
+ }
+}
+
+object Macros {
+ def foo(x: Int)(y: Int) = macro Impls.foo
+}
\ No newline at end of file
diff --git a/test/files/run/macro-invalidusage-partialapplication/Test_2.scala b/test/files/run/macro-invalidusage-partialapplication/Test_2.scala
new file mode 100644
index 0000000..dc48c12
--- /dev/null
+++ b/test/files/run/macro-invalidusage-partialapplication/Test_2.scala
@@ -0,0 +1,8 @@
+object Test extends App {
+ import scala.reflect.runtime.universe._
+ import scala.reflect.runtime.{currentMirror => cm}
+ import scala.tools.reflect.ToolBox
+ val tree = Apply(Select(Ident(newTermName("Macros")), newTermName("foo")), List(Literal(Constant(40))))
+ try cm.mkToolBox().eval(tree)
+ catch { case ex: Throwable => println(ex.getMessage) }
+}
diff --git a/test/files/run/macro-openmacros.check b/test/files/run/macro-openmacros.check
new file mode 100644
index 0000000..ba0ae3f
--- /dev/null
+++ b/test/files/run/macro-openmacros.check
@@ -0,0 +1,3 @@
+List(MacroContext(foo at source-Test_2.scala,line-2,offset=35 +0))
+List(MacroContext(foo at source-Test_2.scala,line-2,offset=35 +1), MacroContext(foo at source-Test_2.scala,line-2,offset=35 +0))
+List(MacroContext(foo at source-Test_2.scala,line-2,offset=35 +2), MacroContext(foo at source-Test_2.scala,line-2,offset=35 +1), MacroContext(foo at source-Test_2.scala,line-2,offset=35 +0))
diff --git a/test/files/run/macro-openmacros.flags b/test/files/run/macro-openmacros.flags
new file mode 100644
index 0000000..cd66464
--- /dev/null
+++ b/test/files/run/macro-openmacros.flags
@@ -0,0 +1 @@
+-language:experimental.macros
\ No newline at end of file
diff --git a/test/files/run/macro-openmacros/Impls_Macros_1.scala b/test/files/run/macro-openmacros/Impls_Macros_1.scala
new file mode 100644
index 0000000..b863ac0
--- /dev/null
+++ b/test/files/run/macro-openmacros/Impls_Macros_1.scala
@@ -0,0 +1,25 @@
+import scala.reflect.macros.Context
+
+object Macros {
+ def impl(c: Context): c.Expr[Unit] = {
+ // we're macros, so we can reflect against our source path
+ // so we don't need any partests to clean up after us!
+ val dir = c.enclosingUnit.source.file.file.getCanonicalFile.getParentFile
+ def normalizePaths(s: String) = {
+ val base = (dir.getCanonicalPath + java.io.File.separator).replace('\\', '/')
+ var regex = """\Q%s\E""" format base
+ val isWin = System.getProperty("os.name", "") startsWith "Windows"
+ if (isWin) regex = "(?i)" + regex
+ s.replace('\\', '/').replaceAll(regex, "")
+ }
+
+ import c.universe._
+ val next = if (c.enclosingMacros.length < 3) c.Expr[Unit](Select(Ident(c.mirror.staticModule("Macros")), newTermName("foo"))) else c.literalUnit
+ c.universe.reify {
+ println(c.literal(normalizePaths(c.enclosingMacros.toString)).splice)
+ next.splice
+ }
+ }
+
+ def foo = macro impl
+}
\ No newline at end of file
diff --git a/test/files/run/macro-openmacros/Test_2.scala b/test/files/run/macro-openmacros/Test_2.scala
new file mode 100644
index 0000000..5d19639
--- /dev/null
+++ b/test/files/run/macro-openmacros/Test_2.scala
@@ -0,0 +1,3 @@
+object Test extends App {
+ Macros.foo
+}
diff --git a/test/files/run/macro-quasiinvalidbody-c.check b/test/files/run/macro-quasiinvalidbody-c.check
new file mode 100644
index 0000000..f70d7bb
--- /dev/null
+++ b/test/files/run/macro-quasiinvalidbody-c.check
@@ -0,0 +1 @@
+42
\ No newline at end of file
diff --git a/test/files/run/macro-quasiinvalidbody-c.flags b/test/files/run/macro-quasiinvalidbody-c.flags
new file mode 100644
index 0000000..cd66464
--- /dev/null
+++ b/test/files/run/macro-quasiinvalidbody-c.flags
@@ -0,0 +1 @@
+-language:experimental.macros
\ No newline at end of file
diff --git a/test/files/run/macro-quasiinvalidbody-c/Impls_Macros_1.scala b/test/files/run/macro-quasiinvalidbody-c/Impls_Macros_1.scala
new file mode 100644
index 0000000..6c14428
--- /dev/null
+++ b/test/files/run/macro-quasiinvalidbody-c/Impls_Macros_1.scala
@@ -0,0 +1,9 @@
+import scala.reflect.macros.{Context => Ctx}
+
+object Macros {
+ object Impls {
+ def foo(c: Ctx)(x: c.Expr[Any]) = x
+ }
+
+ def foo(x: Any) = macro Impls.foo
+}
\ No newline at end of file
diff --git a/test/files/run/macro-quasiinvalidbody-c/Test_2.scala b/test/files/run/macro-quasiinvalidbody-c/Test_2.scala
new file mode 100644
index 0000000..dec29aa
--- /dev/null
+++ b/test/files/run/macro-quasiinvalidbody-c/Test_2.scala
@@ -0,0 +1,4 @@
+object Test extends App {
+ import Macros._
+ println(foo(42))
+}
\ No newline at end of file
diff --git a/test/files/run/macro-range.check b/test/files/run/macro-range.check
new file mode 100644
index 0000000..0719398
--- /dev/null
+++ b/test/files/run/macro-range.check
@@ -0,0 +1,9 @@
+1
+2
+3
+4
+5
+6
+7
+8
+9
diff --git a/test/files/run/macro-range.flags b/test/files/run/macro-range.flags
new file mode 100644
index 0000000..5e5dd6c
--- /dev/null
+++ b/test/files/run/macro-range.flags
@@ -0,0 +1 @@
+-language:experimental.macros
diff --git a/test/files/run/macro-range/Common_1.scala b/test/files/run/macro-range/Common_1.scala
new file mode 100644
index 0000000..5c4bc21
--- /dev/null
+++ b/test/files/run/macro-range/Common_1.scala
@@ -0,0 +1,47 @@
+import reflect.macros.Context
+
+abstract class RangeDefault {
+ val from, to: Int
+ def foreach(f: Int => Unit) = {
+ var i = from
+ while (i < to) { f(i); i += 1 }
+ }
+}
+
+/** This class should go into reflect.macro once it is a bit more stable. */
+abstract class Utils {
+ val context: Context
+ import context.universe._
+
+ class TreeSubstituter(from: List[Symbol], to: List[Tree]) extends Transformer {
+ override def transform(tree: Tree): Tree = tree match {
+ case Ident(_) =>
+ def subst(from: List[Symbol], to: List[Tree]): Tree =
+ if (from.isEmpty) tree
+ else if (tree.symbol == from.head) to.head.duplicate // TODO: does it ever make sense *not* to perform a shallowDuplicate on `to.head`?
+ else subst(from.tail, to.tail);
+ subst(from, to)
+ case _ =>
+ val tree1 = super.transform(tree)
+ if (tree1 ne tree) tree1.tpe = null
+ tree1
+ }
+ }
+ def makeApply(fn: Tree, args: List[Tree]): Tree = fn match {
+ case Function(vparams, body) =>
+ new TreeSubstituter(vparams map (_.symbol), args) transform body
+ case Block(stats, expr) =>
+ Block(stats, makeApply(expr, args))
+ case _ =>
+ // todo. read the compiler config and print if -Ydebug is set
+ //println("no beta on "+fn+" "+fn.getClass)
+ Apply(fn, args)
+ }
+ def makeWhile(lname: TermName, cond: Tree, body: Tree): Tree = {
+ val continu = Apply(Ident(lname), Nil)
+ val rhs = If(cond, Block(List(body), continu), Literal(Constant()))
+ LabelDef(lname, Nil, rhs)
+ }
+ def makeBinop(left: Tree, op: String, right: Tree): Tree =
+ Apply(Select(left, newTermName(op)), List(right))
+}
diff --git a/test/files/run/macro-range/Expansion_Impossible_2.scala b/test/files/run/macro-range/Expansion_Impossible_2.scala
new file mode 100644
index 0000000..57e0cee
--- /dev/null
+++ b/test/files/run/macro-range/Expansion_Impossible_2.scala
@@ -0,0 +1,53 @@
+import reflect.macros.Context
+
+object Impls {
+ def foreach(c: Context)(f: c.Expr[Int => Unit]): c.Expr[Unit] = {
+ // todo. read the compiler config and print if -Ydebug is set
+ //println("macro-expand, _this = "+ _this)
+ object utils extends Utils { val context: c.type = c }
+ import utils._
+ import c.universe._
+ import Flag._
+
+ val initName = nme.CONSTRUCTOR
+ // Either:
+ // scala"{ var i = $low; val h = $hi; while (i < h) { $f(i); i = i + 1 } }
+ // or:
+ // scala"($_this: RangeDefault).foreach($f)"
+ c.Expr(c.prefix.tree match {
+ case Apply(Select(New(tpt), initName), List(lo, hi)) if tpt.symbol.fullName == "Range" =>
+ val iname = newTermName("$i")
+ val hname = newTermName("$h")
+ def iref = Ident(iname)
+ def href = Ident(hname)
+ val labelname = newTermName("$while")
+ val cond = makeBinop(iref, "$less", href)
+ val body = Block(
+ List(makeApply(f.tree, List(iref))),
+ Assign(iref, makeBinop(iref, "$plus", Literal(Constant(1)))))
+ val generated =
+ Block(
+ List(
+ ValDef(Modifiers(MUTABLE), iname, TypeTree(), lo),
+ ValDef(Modifiers(), hname, TypeTree(), hi)),
+ makeWhile(labelname, cond, body))
+ // todo. read the compiler config and print if -Ydebug is set
+ //tools.nsc.util.trace("generated: ")(generated)
+ generated
+ case _ =>
+ Apply(
+ Select(
+ Typed(c.prefix.tree, Ident(newTypeName("RangeDefault"))),
+ newTermName("foreach")),
+ List(f.tree))
+ })
+ }
+}
+
+class Range(val from: Int, val to: Int) extends RangeDefault {
+ override def foreach(f: Int => Unit): Unit = macro Impls.foreach
+}
+
+object Test extends App {
+ new Range(1, 10) foreach println
+}
\ No newline at end of file
diff --git a/test/files/run/macro-range/Expansion_Possible_3.scala b/test/files/run/macro-range/Expansion_Possible_3.scala
new file mode 100644
index 0000000..e7ecbcc
--- /dev/null
+++ b/test/files/run/macro-range/Expansion_Possible_3.scala
@@ -0,0 +1,7 @@
+class Range(val from: Int, val to: Int) extends RangeDefault {
+ override def foreach(f: Int => Unit): Unit = macro Impls.foreach
+}
+
+object Test extends App {
+ new Range(1, 10) foreach println
+}
\ No newline at end of file
diff --git a/test/files/run/macro-reflective-ma-normal-mdmi.check b/test/files/run/macro-reflective-ma-normal-mdmi.check
new file mode 100644
index 0000000..ac4213d
--- /dev/null
+++ b/test/files/run/macro-reflective-ma-normal-mdmi.check
@@ -0,0 +1 @@
+43
\ No newline at end of file
diff --git a/test/files/run/macro-reflective-ma-normal-mdmi.flags b/test/files/run/macro-reflective-ma-normal-mdmi.flags
new file mode 100644
index 0000000..cd66464
--- /dev/null
+++ b/test/files/run/macro-reflective-ma-normal-mdmi.flags
@@ -0,0 +1 @@
+-language:experimental.macros
\ No newline at end of file
diff --git a/test/files/run/macro-reflective-ma-normal-mdmi/Impls_Macros_1.scala b/test/files/run/macro-reflective-ma-normal-mdmi/Impls_Macros_1.scala
new file mode 100644
index 0000000..fa55933
--- /dev/null
+++ b/test/files/run/macro-reflective-ma-normal-mdmi/Impls_Macros_1.scala
@@ -0,0 +1,13 @@
+import scala.reflect.macros.{Context => Ctx}
+
+object Impls {
+ def foo(c: Ctx)(x: c.Expr[Int]) = {
+ import c.universe._
+ val body = Apply(Select(x.tree, newTermName("$plus")), List(Literal(Constant(1))))
+ c.Expr[Int](body)
+ }
+}
+
+object Macros {
+ def foo(x: Int) = macro Impls.foo
+}
\ No newline at end of file
diff --git a/test/files/run/macro-reflective-ma-normal-mdmi/Test_2.scala b/test/files/run/macro-reflective-ma-normal-mdmi/Test_2.scala
new file mode 100644
index 0000000..2e64c01
--- /dev/null
+++ b/test/files/run/macro-reflective-ma-normal-mdmi/Test_2.scala
@@ -0,0 +1,7 @@
+object Test extends App {
+ import scala.reflect.runtime.universe._
+ import scala.reflect.runtime.{currentMirror => cm}
+ import scala.tools.reflect.ToolBox
+ val tree = Apply(Select(Ident(newTermName("Macros")), newTermName("foo")), List(Literal(Constant(42))))
+ println(cm.mkToolBox().eval(tree))
+}
diff --git a/test/files/run/macro-reflective-mamd-normal-mi.check b/test/files/run/macro-reflective-mamd-normal-mi.check
new file mode 100644
index 0000000..ac4213d
--- /dev/null
+++ b/test/files/run/macro-reflective-mamd-normal-mi.check
@@ -0,0 +1 @@
+43
\ No newline at end of file
diff --git a/test/files/run/macro-reflective-mamd-normal-mi/Impls_1.scala b/test/files/run/macro-reflective-mamd-normal-mi/Impls_1.scala
new file mode 100644
index 0000000..5d7e077
--- /dev/null
+++ b/test/files/run/macro-reflective-mamd-normal-mi/Impls_1.scala
@@ -0,0 +1,9 @@
+import scala.reflect.macros.{Context => Ctx}
+
+object Impls {
+ def foo(c: Ctx)(x: c.Expr[Int]) = {
+ import c.universe._
+ val body = Apply(Select(x.tree, newTermName("$plus")), List(Literal(Constant(1))))
+ c.Expr[Int](body)
+ }
+}
\ No newline at end of file
diff --git a/test/files/run/macro-reflective-mamd-normal-mi/Macros_Test_2.scala b/test/files/run/macro-reflective-mamd-normal-mi/Macros_Test_2.scala
new file mode 100644
index 0000000..7056000
--- /dev/null
+++ b/test/files/run/macro-reflective-mamd-normal-mi/Macros_Test_2.scala
@@ -0,0 +1,20 @@
+//object Macros {
+// def foo(x: Int) = macro Impls.foo
+//}
+
+object Test extends App {
+ import scala.reflect.runtime.universe._
+ import scala.reflect.runtime.universe.Flag._
+ import scala.reflect.runtime.{currentMirror => cm}
+ import scala.tools.reflect.ToolBox
+
+ val macrobody = Select(Ident(newTermName("Impls")), newTermName("foo"))
+ val macroparam = ValDef(NoMods, newTermName("x"), TypeTree(definitions.IntClass.toType), EmptyTree)
+ val macrodef = DefDef(Modifiers(MACRO), newTermName("foo"), Nil, List(List(macroparam)), TypeTree(), macrobody)
+ val modulector = DefDef(NoMods, nme.CONSTRUCTOR, Nil, List(List()), TypeTree(), Block(List(Apply(Select(Super(This(tpnme.EMPTY), tpnme.EMPTY), nme.CONSTRUCTOR), List())), Literal(Constant(()))))
+ val module = ModuleDef(NoMods, newTermName("Macros"), Template(Nil, emptyValDef, List(modulector, macrodef)))
+ val macroapp = Apply(Select(Ident(newTermName("Macros")), newTermName("foo")), List(Literal(Constant(42))))
+ val tree = Block(List(macrodef, module), macroapp)
+ val toolbox = cm.mkToolBox(options = "-language:experimental.macros")
+ println(toolbox.eval(tree))
+}
diff --git a/test/files/run/macro-reify-abstypetag-notypeparams.check b/test/files/run/macro-reify-abstypetag-notypeparams.check
new file mode 100644
index 0000000..7732c10
--- /dev/null
+++ b/test/files/run/macro-reify-abstypetag-notypeparams.check
@@ -0,0 +1,2 @@
+TypeTag[Int]
+TypeTag[List[Int]]
diff --git a/test/files/run/macro-reify-abstypetag-notypeparams/Test.scala b/test/files/run/macro-reify-abstypetag-notypeparams/Test.scala
new file mode 100644
index 0000000..73c2d05
--- /dev/null
+++ b/test/files/run/macro-reify-abstypetag-notypeparams/Test.scala
@@ -0,0 +1,6 @@
+import scala.reflect.runtime.universe._
+
+object Test extends App {
+ println(implicitly[WeakTypeTag[Int]])
+ println(implicitly[WeakTypeTag[List[Int]]])
+}
\ No newline at end of file
diff --git a/test/files/run/macro-reify-abstypetag-typeparams-notags.check b/test/files/run/macro-reify-abstypetag-typeparams-notags.check
new file mode 100644
index 0000000..a741d11
--- /dev/null
+++ b/test/files/run/macro-reify-abstypetag-typeparams-notags.check
@@ -0,0 +1,2 @@
+WeakTypeTag[T]
+WeakTypeTag[List[T]]
diff --git a/test/files/run/macro-reify-abstypetag-typeparams-notags/Test.scala b/test/files/run/macro-reify-abstypetag-typeparams-notags/Test.scala
new file mode 100644
index 0000000..4ba2231
--- /dev/null
+++ b/test/files/run/macro-reify-abstypetag-typeparams-notags/Test.scala
@@ -0,0 +1,9 @@
+import scala.reflect.runtime.universe._
+
+object Test extends App {
+ def fooNoTypeTag[T] = {
+ println(implicitly[WeakTypeTag[T]])
+ println(implicitly[WeakTypeTag[List[T]]])
+ }
+ fooNoTypeTag[Int]
+}
\ No newline at end of file
diff --git a/test/files/run/macro-reify-abstypetag-typeparams-tags.check b/test/files/run/macro-reify-abstypetag-typeparams-tags.check
new file mode 100644
index 0000000..e225e57
--- /dev/null
+++ b/test/files/run/macro-reify-abstypetag-typeparams-tags.check
@@ -0,0 +1,2 @@
+TypeTag[Int]
+WeakTypeTag[List[Int]]
diff --git a/test/files/run/macro-reify-abstypetag-typeparams-tags/Test.scala b/test/files/run/macro-reify-abstypetag-typeparams-tags/Test.scala
new file mode 100644
index 0000000..70ca615
--- /dev/null
+++ b/test/files/run/macro-reify-abstypetag-typeparams-tags/Test.scala
@@ -0,0 +1,9 @@
+import scala.reflect.runtime.universe._
+
+object Test extends App {
+ def fooTypeTag[T: WeakTypeTag] = {
+ println(implicitly[WeakTypeTag[T]])
+ println(implicitly[WeakTypeTag[List[T]]])
+ }
+ fooTypeTag[Int]
+}
\ No newline at end of file
diff --git a/test/files/run/macro-reify-abstypetag-usetypetag.check b/test/files/run/macro-reify-abstypetag-usetypetag.check
new file mode 100644
index 0000000..e225e57
--- /dev/null
+++ b/test/files/run/macro-reify-abstypetag-usetypetag.check
@@ -0,0 +1,2 @@
+TypeTag[Int]
+WeakTypeTag[List[Int]]
diff --git a/test/files/run/macro-reify-abstypetag-usetypetag/Test.scala b/test/files/run/macro-reify-abstypetag-usetypetag/Test.scala
new file mode 100644
index 0000000..ecae411
--- /dev/null
+++ b/test/files/run/macro-reify-abstypetag-usetypetag/Test.scala
@@ -0,0 +1,9 @@
+import scala.reflect.runtime.universe._
+
+object Test extends App {
+ def fooTypeTag[T: TypeTag] = {
+ println(implicitly[WeakTypeTag[T]])
+ println(implicitly[WeakTypeTag[List[T]]])
+ }
+ fooTypeTag[Int]
+}
\ No newline at end of file
diff --git a/test/files/run/macro-reify-basic.check b/test/files/run/macro-reify-basic.check
new file mode 100644
index 0000000..3b18e51
--- /dev/null
+++ b/test/files/run/macro-reify-basic.check
@@ -0,0 +1 @@
+hello world
diff --git a/test/files/run/macro-reify-basic.flags b/test/files/run/macro-reify-basic.flags
new file mode 100644
index 0000000..cd66464
--- /dev/null
+++ b/test/files/run/macro-reify-basic.flags
@@ -0,0 +1 @@
+-language:experimental.macros
\ No newline at end of file
diff --git a/test/files/run/macro-reify-basic/Macros_1.scala b/test/files/run/macro-reify-basic/Macros_1.scala
new file mode 100644
index 0000000..3f6720f
--- /dev/null
+++ b/test/files/run/macro-reify-basic/Macros_1.scala
@@ -0,0 +1,11 @@
+import scala.reflect.macros.{Context => Ctx}
+
+object Macros {
+ def foo(s: String) = macro Impls.foo
+
+ object Impls {
+ def foo(c: Ctx)(s: c.Expr[String]) = c.universe.reify {
+ println("hello " + s.splice)
+ }
+ }
+}
\ No newline at end of file
diff --git a/test/files/run/macro-reify-basic/Test_2.scala b/test/files/run/macro-reify-basic/Test_2.scala
new file mode 100644
index 0000000..0a762f7
--- /dev/null
+++ b/test/files/run/macro-reify-basic/Test_2.scala
@@ -0,0 +1,3 @@
+object Test extends App {
+ Macros.foo("world")
+}
\ No newline at end of file
diff --git a/test/files/run/macro-reify-freevars.check b/test/files/run/macro-reify-freevars.check
new file mode 100644
index 0000000..f138068
--- /dev/null
+++ b/test/files/run/macro-reify-freevars.check
@@ -0,0 +1,3 @@
+reflective compilation has failed:
+
+Macro expansion contains free term variable code defined by map in Macros_1.scala:9:9. Have you forgotten to use splice when splicing this variable into a reifee? If you have troubles tracking free term variables, consider using -Xlog-free-terms
diff --git a/test/files/run/macro-reify-freevars.flags b/test/files/run/macro-reify-freevars.flags
new file mode 100644
index 0000000..cd66464
--- /dev/null
+++ b/test/files/run/macro-reify-freevars.flags
@@ -0,0 +1 @@
+-language:experimental.macros
\ No newline at end of file
diff --git a/test/files/run/macro-reify-freevars/Macros_1.scala b/test/files/run/macro-reify-freevars/Macros_1.scala
new file mode 100644
index 0000000..20f80c0
--- /dev/null
+++ b/test/files/run/macro-reify-freevars/Macros_1.scala
@@ -0,0 +1,20 @@
+package scala.collection.slick
+
+object QueryableMacros{
+ def map[T:c.WeakTypeTag, S:c.WeakTypeTag]
+ (c: scala.reflect.macros.Context)
+ (projection: c.Expr[T => S])
+ : c.Expr[scala.collection.slick.Queryable[S]] = {
+ import c.universe._
+ val code = EmptyTree
+ c.universe.reify{
+ Queryable.factory[S]( code.asInstanceOf[reflect.runtime.universe.Tree] )
+ }
+ }
+}
+class Queryable[T]{
+ def map[S]( projection: T => S ) : Queryable[S] = macro QueryableMacros.map[T,S]
+}
+object Queryable{
+ def factory[S]( projection:reflect.runtime.universe.Tree ) : Queryable[S] = null
+}
\ No newline at end of file
diff --git a/test/files/run/macro-reify-freevars/Test_2.scala b/test/files/run/macro-reify-freevars/Test_2.scala
new file mode 100644
index 0000000..7af9d89
--- /dev/null
+++ b/test/files/run/macro-reify-freevars/Test_2.scala
@@ -0,0 +1,11 @@
+object Test extends App {
+ import scala.reflect.runtime.universe._
+ import scala.reflect.runtime.{currentMirror => cm}
+ import scala.tools.reflect.ToolBox
+ val q = New(AppliedTypeTree(Select(Select(Select(Ident(newTermName("scala")), newTermName("collection")), newTermName("slick")), newTypeName("Queryable")), List(Ident(newTermName("Int")))))
+ val x = ValDef(NoMods, newTermName("x"), Ident(newTermName("Int")), EmptyTree)
+ val fn = Function(List(x), Apply(Select(Ident(newTermName("x")), newTermName("$plus")), List(Literal(Constant("5")))))
+ val tree = Apply(Select(q, newTermName("map")), List(fn))
+ try cm.mkToolBox().eval(tree)
+ catch { case ex: Throwable => println(ex.getMessage) }
+}
\ No newline at end of file
diff --git a/test/files/jvm/bug680.check b/test/files/run/macro-reify-nested-a.check
similarity index 100%
copy from test/files/jvm/bug680.check
copy to test/files/run/macro-reify-nested-a.check
diff --git a/test/files/run/macro-reify-nested-a.flags b/test/files/run/macro-reify-nested-a.flags
new file mode 100644
index 0000000..cd66464
--- /dev/null
+++ b/test/files/run/macro-reify-nested-a.flags
@@ -0,0 +1 @@
+-language:experimental.macros
\ No newline at end of file
diff --git a/test/files/run/macro-reify-nested-a/Impls_Macros_1.scala b/test/files/run/macro-reify-nested-a/Impls_Macros_1.scala
new file mode 100644
index 0000000..b4351c2
--- /dev/null
+++ b/test/files/run/macro-reify-nested-a/Impls_Macros_1.scala
@@ -0,0 +1,46 @@
+import scala.reflect.runtime.universe._
+import scala.reflect.runtime.{universe => ru}
+import scala.reflect.macros.Context
+
+case class Utils[C <: Context]( c:C ) {
+ import c.universe._
+ import c.{Tree=>_}
+ object removeDoubleReify extends c.universe.Transformer {
+ def apply( tree:Tree ) = transform(tree)
+ override def transform(tree: Tree): Tree = {
+ super.transform {
+ tree match {
+ case Apply(TypeApply(Select(_this, termname), _), reification::Nil )
+ if termname.toString == "factory" => c.unreifyTree(reification)
+ case Apply(Select(_this, termname), reification::Nil )
+ if termname.toString == "factory" => c.unreifyTree(reification)
+ case _ => tree
+ }
+ }
+ }
+ }
+}
+object QueryableMacros{
+ def _helper[C <: Context,S:c.WeakTypeTag]( c:C )( name:String, projection:c.Expr[_] ) = {
+ import c.universe._
+ import treeBuild._
+ val element_type = implicitly[c.WeakTypeTag[S]].tpe
+ val foo = c.Expr[ru.Expr[Queryable[S]]](
+ c.reifyTree( mkRuntimeUniverseRef, EmptyTree, c.typeCheck(
+ Utils[c.type](c).removeDoubleReify(
+ Apply(Select(c.prefix.tree, newTermName( name )), List( projection.tree ))
+ ).asInstanceOf[Tree]
+ )))
+ c.universe.reify{ Queryable.factory[S]( foo.splice )}
+ }
+ def map[T:c.WeakTypeTag, S:c.WeakTypeTag]
+ (c: scala.reflect.macros.Context)
+ (projection: c.Expr[T => S]): c.Expr[Queryable[S]] = _helper[c.type,S]( c )( "_map", projection )
+}
+class Queryable[T]{
+ def _map[S]( projection: T => S ) : Queryable[S] = ???
+ def map[S]( projection: T => S ) : Queryable[S] = macro QueryableMacros.map[T,S]
+}
+object Queryable{
+ def factory[S]( projection:ru.Expr[Queryable[S]] ) : Queryable[S] = null
+}
\ No newline at end of file
diff --git a/test/files/run/macro-reify-nested-a/Test_2.scala b/test/files/run/macro-reify-nested-a/Test_2.scala
new file mode 100644
index 0000000..fa0eb37
--- /dev/null
+++ b/test/files/run/macro-reify-nested-a/Test_2.scala
@@ -0,0 +1,4 @@
+object Test extends App{
+ val q : Queryable[Any] = new Queryable[Any]
+ q.map(e1 => q.map(e2=>e1))
+}
\ No newline at end of file
diff --git a/test/files/jvm/bug680.check b/test/files/run/macro-reify-nested-b.check
similarity index 100%
copy from test/files/jvm/bug680.check
copy to test/files/run/macro-reify-nested-b.check
diff --git a/test/files/run/macro-reify-nested-b.flags b/test/files/run/macro-reify-nested-b.flags
new file mode 100644
index 0000000..cd66464
--- /dev/null
+++ b/test/files/run/macro-reify-nested-b.flags
@@ -0,0 +1 @@
+-language:experimental.macros
\ No newline at end of file
diff --git a/test/files/run/macro-reify-nested-b/Impls_Macros_1.scala b/test/files/run/macro-reify-nested-b/Impls_Macros_1.scala
new file mode 100644
index 0000000..b4351c2
--- /dev/null
+++ b/test/files/run/macro-reify-nested-b/Impls_Macros_1.scala
@@ -0,0 +1,46 @@
+import scala.reflect.runtime.universe._
+import scala.reflect.runtime.{universe => ru}
+import scala.reflect.macros.Context
+
+case class Utils[C <: Context]( c:C ) {
+ import c.universe._
+ import c.{Tree=>_}
+ object removeDoubleReify extends c.universe.Transformer {
+ def apply( tree:Tree ) = transform(tree)
+ override def transform(tree: Tree): Tree = {
+ super.transform {
+ tree match {
+ case Apply(TypeApply(Select(_this, termname), _), reification::Nil )
+ if termname.toString == "factory" => c.unreifyTree(reification)
+ case Apply(Select(_this, termname), reification::Nil )
+ if termname.toString == "factory" => c.unreifyTree(reification)
+ case _ => tree
+ }
+ }
+ }
+ }
+}
+object QueryableMacros{
+ def _helper[C <: Context,S:c.WeakTypeTag]( c:C )( name:String, projection:c.Expr[_] ) = {
+ import c.universe._
+ import treeBuild._
+ val element_type = implicitly[c.WeakTypeTag[S]].tpe
+ val foo = c.Expr[ru.Expr[Queryable[S]]](
+ c.reifyTree( mkRuntimeUniverseRef, EmptyTree, c.typeCheck(
+ Utils[c.type](c).removeDoubleReify(
+ Apply(Select(c.prefix.tree, newTermName( name )), List( projection.tree ))
+ ).asInstanceOf[Tree]
+ )))
+ c.universe.reify{ Queryable.factory[S]( foo.splice )}
+ }
+ def map[T:c.WeakTypeTag, S:c.WeakTypeTag]
+ (c: scala.reflect.macros.Context)
+ (projection: c.Expr[T => S]): c.Expr[Queryable[S]] = _helper[c.type,S]( c )( "_map", projection )
+}
+class Queryable[T]{
+ def _map[S]( projection: T => S ) : Queryable[S] = ???
+ def map[S]( projection: T => S ) : Queryable[S] = macro QueryableMacros.map[T,S]
+}
+object Queryable{
+ def factory[S]( projection:ru.Expr[Queryable[S]] ) : Queryable[S] = null
+}
\ No newline at end of file
diff --git a/test/files/run/macro-reify-nested-b/Test_2.scala b/test/files/run/macro-reify-nested-b/Test_2.scala
new file mode 100644
index 0000000..fa13f57
--- /dev/null
+++ b/test/files/run/macro-reify-nested-b/Test_2.scala
@@ -0,0 +1,4 @@
+object Test extends App{
+ val q : Queryable[Any] = new Queryable[Any]
+ q.map(e1 => q.map(e2=>e1).map(e2=>e1))
+}
\ No newline at end of file
diff --git a/test/files/run/syncchannel.check b/test/files/run/macro-reify-ref-to-packageless.check
similarity index 100%
copy from test/files/run/syncchannel.check
copy to test/files/run/macro-reify-ref-to-packageless.check
diff --git a/test/files/run/macro-reify-ref-to-packageless.flags b/test/files/run/macro-reify-ref-to-packageless.flags
new file mode 100644
index 0000000..cd66464
--- /dev/null
+++ b/test/files/run/macro-reify-ref-to-packageless.flags
@@ -0,0 +1 @@
+-language:experimental.macros
\ No newline at end of file
diff --git a/test/files/run/macro-reify-ref-to-packageless/Impls_1.scala b/test/files/run/macro-reify-ref-to-packageless/Impls_1.scala
new file mode 100644
index 0000000..f19fd23
--- /dev/null
+++ b/test/files/run/macro-reify-ref-to-packageless/Impls_1.scala
@@ -0,0 +1,6 @@
+import scala.reflect.macros.{Context => Ctx}
+
+object Impls {
+ val `Answer to the Ultimate Question of Life, the Universe, and Everything` = 42
+ def foo(c: Ctx) = c.universe.reify { `Answer to the Ultimate Question of Life, the Universe, and Everything` }
+}
diff --git a/test/files/run/macro-reify-ref-to-packageless/Test_2.scala b/test/files/run/macro-reify-ref-to-packageless/Test_2.scala
new file mode 100644
index 0000000..9d475f7
--- /dev/null
+++ b/test/files/run/macro-reify-ref-to-packageless/Test_2.scala
@@ -0,0 +1,4 @@
+object Test extends App {
+ def foo = macro Impls.foo
+ println(foo)
+}
\ No newline at end of file
diff --git a/test/files/run/syncchannel.check b/test/files/run/macro-reify-splice-outside-reify.check
similarity index 100%
copy from test/files/run/syncchannel.check
copy to test/files/run/macro-reify-splice-outside-reify.check
diff --git a/test/files/run/macro-reify-splice-outside-reify.flags b/test/files/run/macro-reify-splice-outside-reify.flags
new file mode 100644
index 0000000..cd66464
--- /dev/null
+++ b/test/files/run/macro-reify-splice-outside-reify.flags
@@ -0,0 +1 @@
+-language:experimental.macros
\ No newline at end of file
diff --git a/test/files/run/macro-reify-splice-outside-reify/Impls_Macros_1.scala b/test/files/run/macro-reify-splice-outside-reify/Impls_Macros_1.scala
new file mode 100644
index 0000000..5330d0e
--- /dev/null
+++ b/test/files/run/macro-reify-splice-outside-reify/Impls_Macros_1.scala
@@ -0,0 +1,13 @@
+import scala.reflect.macros.{Context => Ctx}
+
+object Impls {
+ def foo(c: Ctx)(x: c.Expr[Int]) = {
+ val x1 = c.Expr[Int](c.resetAllAttrs(x.tree))
+// was: c.literal(x1.splice)
+ c.literal(c.eval(x1))
+ }
+}
+
+object Macros {
+ def foo(x: Int) = macro Impls.foo
+}
diff --git a/test/files/run/macro-reify-splice-outside-reify/Test_2.scala b/test/files/run/macro-reify-splice-outside-reify/Test_2.scala
new file mode 100644
index 0000000..54bd03f
--- /dev/null
+++ b/test/files/run/macro-reify-splice-outside-reify/Test_2.scala
@@ -0,0 +1,8 @@
+object Test extends App {
+ import scala.reflect.runtime.universe._
+ import scala.reflect.runtime.{currentMirror => cm}
+ import scala.tools.reflect.ToolBox
+ val tree = Apply(Select(Ident(newTermName("Macros")), newTermName("foo")), List(Literal(Constant(42))))
+ try println(cm.mkToolBox().eval(tree))
+ catch { case ex: Throwable => println(ex.getMessage) }
+}
diff --git a/test/files/run/macro-reify-splice-splice.check b/test/files/run/macro-reify-splice-splice.check
new file mode 100644
index 0000000..3b18e51
--- /dev/null
+++ b/test/files/run/macro-reify-splice-splice.check
@@ -0,0 +1 @@
+hello world
diff --git a/test/files/run/macro-reify-splice-splice.flags b/test/files/run/macro-reify-splice-splice.flags
new file mode 100644
index 0000000..cd66464
--- /dev/null
+++ b/test/files/run/macro-reify-splice-splice.flags
@@ -0,0 +1 @@
+-language:experimental.macros
\ No newline at end of file
diff --git a/test/files/run/macro-reify-splice-splice/Macros_1.scala b/test/files/run/macro-reify-splice-splice/Macros_1.scala
new file mode 100644
index 0000000..efdd5db
--- /dev/null
+++ b/test/files/run/macro-reify-splice-splice/Macros_1.scala
@@ -0,0 +1,11 @@
+import scala.reflect.macros.{Context => Ctx}
+
+object Macros {
+ def foo = macro Impls.foo
+
+ object Impls {
+ def foo(c: Ctx) = c.universe.reify {
+ { c.universe.reify(c.universe.reify("hello world")) }.splice.splice
+ }
+ }
+}
\ No newline at end of file
diff --git a/test/files/run/macro-reify-splice-splice/Test_2.scala b/test/files/run/macro-reify-splice-splice/Test_2.scala
new file mode 100644
index 0000000..f697da6
--- /dev/null
+++ b/test/files/run/macro-reify-splice-splice/Test_2.scala
@@ -0,0 +1,3 @@
+object Test extends App {
+ println(Macros.foo)
+}
\ No newline at end of file
diff --git a/test/files/run/macro-reify-staticXXX.check b/test/files/run/macro-reify-staticXXX.check
new file mode 100644
index 0000000..2894fa5
--- /dev/null
+++ b/test/files/run/macro-reify-staticXXX.check
@@ -0,0 +1,12 @@
+object
+class
+object > object
+object > class
+package > object
+package > class
+object
+class
+object > object
+object > class
+package > object
+package > class
\ No newline at end of file
diff --git a/test/files/run/macro-reify-staticXXX.flags b/test/files/run/macro-reify-staticXXX.flags
new file mode 100644
index 0000000..cd66464
--- /dev/null
+++ b/test/files/run/macro-reify-staticXXX.flags
@@ -0,0 +1 @@
+-language:experimental.macros
\ No newline at end of file
diff --git a/test/files/run/macro-reify-staticXXX/Macros_1.scala b/test/files/run/macro-reify-staticXXX/Macros_1.scala
new file mode 100644
index 0000000..f12c8f7
--- /dev/null
+++ b/test/files/run/macro-reify-staticXXX/Macros_1.scala
@@ -0,0 +1,48 @@
+import scala.reflect.macros.Context
+
+object B { override def toString = "object" }
+class C { override def toString = "class" }
+
+package foo {
+ object B { override def toString = "package > object" }
+ class C { override def toString = "package > class" }
+}
+
+object foo {
+ object B { override def toString = "object > object" }
+ class C { override def toString = "object > class" }
+}
+
+object packageless {
+ def impl(c: Context) = {
+ import c.universe._
+ reify {
+ println(B)
+ println(new C)
+ println(foo.B)
+ println(new foo.C)
+ println(_root_.foo.B)
+ println(new _root_.foo.C)
+ }
+ }
+
+ def test = macro impl
+}
+
+package packageful {
+ object Test {
+ def impl(c: Context) = {
+ import c.universe._
+ reify {
+ println(B)
+ println(new C)
+ println(foo.B)
+ println(new foo.C)
+ println(_root_.foo.B)
+ println(new _root_.foo.C)
+ }
+ }
+
+ def test = macro impl
+ }
+}
diff --git a/test/files/run/macro-reify-staticXXX/Test_2.scala b/test/files/run/macro-reify-staticXXX/Test_2.scala
new file mode 100644
index 0000000..6e8cc36
--- /dev/null
+++ b/test/files/run/macro-reify-staticXXX/Test_2.scala
@@ -0,0 +1,4 @@
+object Test extends App {
+ packageless.test
+ packageful.Test.test
+}
diff --git a/test/files/run/macro-reify-tagful-a.check b/test/files/run/macro-reify-tagful-a.check
new file mode 100644
index 0000000..3f4c719
--- /dev/null
+++ b/test/files/run/macro-reify-tagful-a.check
@@ -0,0 +1 @@
+List(hello world)
diff --git a/test/files/run/macro-reify-tagful-a.flags b/test/files/run/macro-reify-tagful-a.flags
new file mode 100644
index 0000000..cd66464
--- /dev/null
+++ b/test/files/run/macro-reify-tagful-a.flags
@@ -0,0 +1 @@
+-language:experimental.macros
\ No newline at end of file
diff --git a/test/files/run/macro-reify-tagful-a/Macros_1.scala b/test/files/run/macro-reify-tagful-a/Macros_1.scala
new file mode 100644
index 0000000..f2512dc
--- /dev/null
+++ b/test/files/run/macro-reify-tagful-a/Macros_1.scala
@@ -0,0 +1,12 @@
+import scala.reflect.runtime.universe._
+import scala.reflect.macros.{Context => Ctx}
+
+object Macros {
+ def foo[T](s: T) = macro Impls.foo[T]
+
+ object Impls {
+ def foo[T: c.WeakTypeTag](c: Ctx)(s: c.Expr[T]) = c.universe.reify {
+ List(s.splice)
+ }
+ }
+}
\ No newline at end of file
diff --git a/test/files/run/macro-reify-tagful-a/Test_2.scala b/test/files/run/macro-reify-tagful-a/Test_2.scala
new file mode 100644
index 0000000..4d27166
--- /dev/null
+++ b/test/files/run/macro-reify-tagful-a/Test_2.scala
@@ -0,0 +1,4 @@
+object Test extends App {
+ val list: List[String] = Macros.foo("hello world")
+ println(list)
+}
\ No newline at end of file
diff --git a/test/files/run/macro-reify-tagless-a.check b/test/files/run/macro-reify-tagless-a.check
new file mode 100644
index 0000000..231741e
--- /dev/null
+++ b/test/files/run/macro-reify-tagless-a.check
@@ -0,0 +1,3 @@
+reflective compilation has failed:
+
+Macro expansion contains free type variable T defined by foo in Impls_Macros_1.scala:7:13. Have you forgotten to use c.WeakTypeTag annotation for this type parameter? If you have troubles tracking free type variables, consider using -Xlog-free-types
diff --git a/test/files/run/macro-reify-tagless-a.flags b/test/files/run/macro-reify-tagless-a.flags
new file mode 100644
index 0000000..cd66464
--- /dev/null
+++ b/test/files/run/macro-reify-tagless-a.flags
@@ -0,0 +1 @@
+-language:experimental.macros
\ No newline at end of file
diff --git a/test/files/run/macro-reify-tagless-a/Impls_Macros_1.scala b/test/files/run/macro-reify-tagless-a/Impls_Macros_1.scala
new file mode 100644
index 0000000..96cfb75
--- /dev/null
+++ b/test/files/run/macro-reify-tagless-a/Impls_Macros_1.scala
@@ -0,0 +1,11 @@
+import scala.reflect.macros.{Context => Ctx}
+
+object Macros {
+ def foo[T](s: T) = macro Impls.foo[T]
+
+ object Impls {
+ def foo[T](c: Ctx)(s: c.Expr[T]) = c.universe.reify {
+ List[T](s.splice)
+ }
+ }
+}
\ No newline at end of file
diff --git a/test/files/run/macro-reify-tagless-a/Test_2.scala b/test/files/run/macro-reify-tagless-a/Test_2.scala
new file mode 100644
index 0000000..584c4bd
--- /dev/null
+++ b/test/files/run/macro-reify-tagless-a/Test_2.scala
@@ -0,0 +1,14 @@
+object Test extends App {
+ //val list: List[String] = Macros.foo("hello world")
+ //println(list)
+
+ import scala.reflect.runtime.universe._
+ import scala.reflect.runtime.{currentMirror => cm}
+ import scala.tools.reflect.ToolBox
+ val tpt = AppliedTypeTree(Ident(definitions.ListClass), List(Ident(definitions.StringClass)))
+ val rhs = Apply(Select(Ident(newTermName("Macros")), newTermName("foo")), List(Literal(Constant("hello world"))))
+ val list = ValDef(NoMods, newTermName("list"), tpt, rhs)
+ val tree = Block(List(list), Apply(Select(Ident(definitions.PredefModule), newTermName("println")), List(Ident(list.name))))
+ try cm.mkToolBox().eval(tree)
+ catch { case ex: Throwable => println(ex.getMessage) }
+}
diff --git a/test/files/run/macro-reify-type.check b/test/files/run/macro-reify-type.check
new file mode 100644
index 0000000..ea5e70e
--- /dev/null
+++ b/test/files/run/macro-reify-type.check
@@ -0,0 +1 @@
+[B, That](f: Int => B)(implicit bf: scala.collection.generic.CanBuildFrom[List[Int],B,That])That
\ No newline at end of file
diff --git a/test/files/run/macro-reify-type.flags b/test/files/run/macro-reify-type.flags
new file mode 100644
index 0000000..cd66464
--- /dev/null
+++ b/test/files/run/macro-reify-type.flags
@@ -0,0 +1 @@
+-language:experimental.macros
\ No newline at end of file
diff --git a/test/files/run/macro-reify-type/Macros_1.scala b/test/files/run/macro-reify-type/Macros_1.scala
new file mode 100644
index 0000000..06de057
--- /dev/null
+++ b/test/files/run/macro-reify-type/Macros_1.scala
@@ -0,0 +1,27 @@
+import scala.reflect.macros.Context
+import scala.reflect.runtime.{universe => ru}
+
+object StaticReflect {
+ def method[A](name: String): ru.Type = macro methodImpl[A]
+
+ def methodImpl[A: c.WeakTypeTag](c: Context)(name: c.Expr[String]): c.Expr[ru.Type] = {
+ import c.universe._
+
+ val nameName: TermName = name.tree match {
+ case Literal(Constant(str: String)) => newTermName(str)
+ case _ => c.error(c.enclosingPosition, s"Method name not constant.") ; return reify(ru.NoType)
+ }
+ val clazz = weakTypeOf[A]
+
+ clazz member nameName match {
+ case NoSymbol => c.error(c.enclosingPosition, s"No member called $nameName in $clazz.") ; reify(ru.NoType)
+ case member =>
+ val mtpe = member typeSignatureIn clazz
+ val mtag = c.reifyType(treeBuild.mkRuntimeUniverseRef, Select(treeBuild.mkRuntimeUniverseRef, newTermName("rootMirror")), mtpe)
+ val mtree = Select(mtag, newTermName("tpe"))
+
+ c.Expr[ru.Type](mtree)
+ }
+ }
+
+}
diff --git a/test/files/run/macro-reify-type/Test_2.scala b/test/files/run/macro-reify-type/Test_2.scala
new file mode 100644
index 0000000..9beaf98
--- /dev/null
+++ b/test/files/run/macro-reify-type/Test_2.scala
@@ -0,0 +1,21 @@
+import StaticReflect._
+
+object Test extends App {
+ //println(method[List[Int]]("distinct"))
+ println(method[List[Int]]("map"))
+ //val $u: scala.reflect.runtime.universe.type = scala.reflect.runtime.universe;
+ //val $m: $u.Mirror = scala.reflect.runtime.universe.rootMirror;
+ //import $u._, $m._, Flag._
+ //val tpe = {
+ // val symdef$B2 = build.newNestedSymbol(build.selectTerm(staticClass("scala.collection.TraversableLike"), "map"), newTypeName("B"), NoPosition, DEFERRED | PARAM, false);
+ // val symdef$That2 = build.newNestedSymbol(build.selectTerm(staticClass("scala.collection.TraversableLike"), "map"), newTypeName("That"), NoPosition, DEFERRED | PARAM, false);
+ // val symdef$f2 = build.newNestedSymbol(build.selectTerm(staticClass("scala.collection.TraversableLike"), "map"), newTermName("f"), NoPosition, PARAM, false);
+ // val symdef$bf2 = build.newNestedSymbol(build.selectTerm(staticClass("scala.collection.TraversableLike"), "map"), newTermName("bf"), NoPosition, IMPLICIT | PARAM, false);
+ // build.setTypeSignature(symdef$B2, TypeBounds(staticClass("scala.Nothing").asType.toTypeConstructor, staticClass("scala.Any").asType.toTypeConstructor));
+ // build.setTypeSignature(symdef$That2, TypeBounds(staticClass("scala.Nothing").asType.toTypeConstructor, staticClass("scala.Any").asType.toTypeConstructor));
+ // build.setTypeSignature(symdef$f2, TypeRef(ThisType(staticPackage("scala").asModule.moduleClass), staticClass("scala.Function1"), List(staticClass("scala.Int").asType.toTypeConstructor, TypeRef(NoPrefix, symdef$B2, List()))));
+ // build.setTypeSignature(symdef$bf2, TypeRef(ThisType(staticPackage("scala.collection.generic").asModule.moduleClass), staticClass("scala.collection.generic.CanBuildFrom"), List(TypeRef(ThisType(staticPackage("scala.collection.immutable").asModule.moduleClass), staticClass("scala.collection.immutable.List"), List(staticClass("scala.Int").asType.toTypeConstructor)), TypeRef(NoPrefix, symdef$B2, List()), TypeRef(NoPrefix, symdef$That2, List()))));
+ // PolyType(List(symdef$B2, symdef$That2), MethodType(List(symdef$f2), MethodType(List(symdef$bf2), TypeRef(NoPrefix, symdef$That2, List()))))
+ //}
+ //println(tpe)
+}
\ No newline at end of file
diff --git a/test/files/run/macro-reify-typetag-notypeparams.check b/test/files/run/macro-reify-typetag-notypeparams.check
new file mode 100644
index 0000000..7732c10
--- /dev/null
+++ b/test/files/run/macro-reify-typetag-notypeparams.check
@@ -0,0 +1,2 @@
+TypeTag[Int]
+TypeTag[List[Int]]
diff --git a/test/files/run/macro-reify-typetag-notypeparams/Test.scala b/test/files/run/macro-reify-typetag-notypeparams/Test.scala
new file mode 100644
index 0000000..be9feac
--- /dev/null
+++ b/test/files/run/macro-reify-typetag-notypeparams/Test.scala
@@ -0,0 +1,6 @@
+import scala.reflect.runtime.universe._
+
+object Test extends App {
+ println(implicitly[TypeTag[Int]])
+ println(implicitly[TypeTag[List[Int]]])
+}
\ No newline at end of file
diff --git a/test/files/run/macro-reify-typetag-typeparams-tags.check b/test/files/run/macro-reify-typetag-typeparams-tags.check
new file mode 100644
index 0000000..7732c10
--- /dev/null
+++ b/test/files/run/macro-reify-typetag-typeparams-tags.check
@@ -0,0 +1,2 @@
+TypeTag[Int]
+TypeTag[List[Int]]
diff --git a/test/files/run/macro-reify-typetag-typeparams-tags/Test.scala b/test/files/run/macro-reify-typetag-typeparams-tags/Test.scala
new file mode 100644
index 0000000..be0ce33
--- /dev/null
+++ b/test/files/run/macro-reify-typetag-typeparams-tags/Test.scala
@@ -0,0 +1,9 @@
+import scala.reflect.runtime.universe._
+
+object Test extends App {
+ def fooTypeTag[T: TypeTag] = {
+ println(implicitly[TypeTag[T]])
+ println(implicitly[TypeTag[List[T]]])
+ }
+ fooTypeTag[Int]
+}
\ No newline at end of file
diff --git a/test/files/run/macro-reify-unreify.check b/test/files/run/macro-reify-unreify.check
new file mode 100644
index 0000000..7a6d53c
--- /dev/null
+++ b/test/files/run/macro-reify-unreify.check
@@ -0,0 +1 @@
+hello world = Expr[java.lang.String("hello world")]("hello world")
diff --git a/test/files/run/macro-reify-unreify.flags b/test/files/run/macro-reify-unreify.flags
new file mode 100644
index 0000000..cd66464
--- /dev/null
+++ b/test/files/run/macro-reify-unreify.flags
@@ -0,0 +1 @@
+-language:experimental.macros
\ No newline at end of file
diff --git a/test/files/run/macro-reify-unreify/Macros_1.scala b/test/files/run/macro-reify-unreify/Macros_1.scala
new file mode 100644
index 0000000..9f04c13
--- /dev/null
+++ b/test/files/run/macro-reify-unreify/Macros_1.scala
@@ -0,0 +1,20 @@
+import scala.reflect.macros.{Context => Ctx}
+
+object Macros {
+ def foo(s: String) = macro Impls.foo
+
+ object Impls {
+ def foo(c: Ctx)(s: c.Expr[String]) = {
+ import c.universe._
+ import treeBuild._
+
+ val world = c.reifyTree(mkRuntimeUniverseRef, EmptyTree, s.tree)
+ val greeting = c.reifyTree(mkRuntimeUniverseRef, EmptyTree, c.typeCheck(Apply(Select(Literal(Constant("hello ")), newTermName("$plus")), List(c.unreifyTree(world)))))
+ val typedGreeting = c.Expr[String](greeting)
+
+ c.universe.reify {
+ println("hello " + s.splice + " = " + typedGreeting.splice)
+ }
+ }
+ }
+}
\ No newline at end of file
diff --git a/test/files/run/macro-reify-unreify/Test_2.scala b/test/files/run/macro-reify-unreify/Test_2.scala
new file mode 100644
index 0000000..0a762f7
--- /dev/null
+++ b/test/files/run/macro-reify-unreify/Test_2.scala
@@ -0,0 +1,3 @@
+object Test extends App {
+ Macros.foo("world")
+}
\ No newline at end of file
diff --git a/test/files/run/macro-repl-basic.check b/test/files/run/macro-repl-basic.check
new file mode 100644
index 0000000..7deed4a
--- /dev/null
+++ b/test/files/run/macro-repl-basic.check
@@ -0,0 +1,54 @@
+Type in expressions to have them evaluated.
+Type :help for more information.
+
+scala>
+
+scala> import language.experimental.macros
+import language.experimental.macros
+
+scala> import scala.reflect.macros.{Context => Ctx}
+import scala.reflect.macros.{Context=>Ctx}
+
+scala>
+
+scala> object Impls {
+ def foo(c: Ctx)(x: c.Expr[Int]) = {
+ import c.universe._
+ val body = Apply(Select(x.tree, newTermName("$plus")), List(Literal(Constant(1))))
+ c.Expr[Int](body)
+ }
+
+ def bar(c: Ctx)(x: c.Expr[Int]) = {
+ import c.universe._
+ val body = Apply(Select(x.tree, newTermName("$plus")), List(Literal(Constant(2))))
+ c.Expr[Int](body)
+ }
+
+ def quux(c: Ctx)(x: c.Expr[Int]) = {
+ import c.universe._
+ val body = Apply(Select(x.tree, newTermName("$plus")), List(Literal(Constant(3))))
+ c.Expr[Int](body)
+ }
+}
+defined module Impls
+
+scala> object Macros {
+ object Shmacros {
+ def foo(x: Int): Int = macro Impls.foo
+ }
+ def bar(x: Int): Int = macro Impls.bar
+}; class Macros {
+ def quux(x: Int): Int = macro Impls.quux
+}
+defined module Macros
+defined class Macros
+
+scala>
+
+scala> import Macros.Shmacros._
+import Macros.Shmacros._
+
+scala> println(foo(2) + Macros.bar(2) * new Macros().quux(4))
+31
+
+scala>
diff --git a/test/files/run/macro-repl-basic.scala b/test/files/run/macro-repl-basic.scala
new file mode 100644
index 0000000..eae1feb
--- /dev/null
+++ b/test/files/run/macro-repl-basic.scala
@@ -0,0 +1,39 @@
+import scala.tools.partest.ReplTest
+
+object Test extends ReplTest {
+ def code = """
+ |import language.experimental.macros
+ |import scala.reflect.macros.{Context => Ctx}
+ |
+ |object Impls {
+ | def foo(c: Ctx)(x: c.Expr[Int]) = {
+ | import c.universe._
+ | val body = Apply(Select(x.tree, newTermName("$plus")), List(Literal(Constant(1))))
+ | c.Expr[Int](body)
+ | }
+ |
+ | def bar(c: Ctx)(x: c.Expr[Int]) = {
+ | import c.universe._
+ | val body = Apply(Select(x.tree, newTermName("$plus")), List(Literal(Constant(2))))
+ | c.Expr[Int](body)
+ | }
+ |
+ | def quux(c: Ctx)(x: c.Expr[Int]) = {
+ | import c.universe._
+ | val body = Apply(Select(x.tree, newTermName("$plus")), List(Literal(Constant(3))))
+ | c.Expr[Int](body)
+ | }
+ |}
+ |object Macros {
+ | object Shmacros {
+ | def foo(x: Int): Int = macro Impls.foo
+ | }
+ | def bar(x: Int): Int = macro Impls.bar
+ |}; class Macros {
+ | def quux(x: Int): Int = macro Impls.quux
+ |}
+ |
+ |import Macros.Shmacros._
+ |println(foo(2) + Macros.bar(2) * new Macros().quux(4))
+ |""".stripMargin
+}
diff --git a/test/files/run/macro-repl-dontexpand.check b/test/files/run/macro-repl-dontexpand.check
new file mode 100644
index 0000000..628a914
--- /dev/null
+++ b/test/files/run/macro-repl-dontexpand.check
@@ -0,0 +1,12 @@
+Type in expressions to have them evaluated.
+Type :help for more information.
+
+scala>
+
+scala> def bar(c: scala.reflect.macros.Context) = ???
+bar: (c: scala.reflect.macros.Context)Nothing
+
+scala> def foo = macro bar
+foo: Any
+
+scala>
diff --git a/test/files/run/macro-repl-dontexpand.scala b/test/files/run/macro-repl-dontexpand.scala
new file mode 100644
index 0000000..f3422d8
--- /dev/null
+++ b/test/files/run/macro-repl-dontexpand.scala
@@ -0,0 +1,9 @@
+import scala.tools.partest.ReplTest
+
+object Test extends ReplTest {
+ override def extraSettings = "-language:experimental.macros"
+ def code = """
+ |def bar(c: scala.reflect.macros.Context) = ???
+ |def foo = macro bar
+ |""".stripMargin
+}
diff --git a/test/files/run/macro-settings.check b/test/files/run/macro-settings.check
new file mode 100644
index 0000000..050d53c
--- /dev/null
+++ b/test/files/run/macro-settings.check
@@ -0,0 +1 @@
+List(hello=1)
diff --git a/test/files/run/macro-settings.flags b/test/files/run/macro-settings.flags
new file mode 100644
index 0000000..15479e3
--- /dev/null
+++ b/test/files/run/macro-settings.flags
@@ -0,0 +1 @@
+-language:experimental.macros -Xmacro-settings:hello=1
\ No newline at end of file
diff --git a/test/files/run/macro-settings/Impls_Macros_1.scala b/test/files/run/macro-settings/Impls_Macros_1.scala
new file mode 100644
index 0000000..83d80a5
--- /dev/null
+++ b/test/files/run/macro-settings/Impls_Macros_1.scala
@@ -0,0 +1,11 @@
+import scala.reflect.macros.Context
+
+object Impls {
+ def impl(c: Context) = c.universe.reify {
+ println(c.literal(c.settings.toString).splice)
+ }
+}
+
+object Macros {
+ def foo = macro Impls.impl
+}
\ No newline at end of file
diff --git a/test/files/run/macro-settings/Test_2.scala b/test/files/run/macro-settings/Test_2.scala
new file mode 100644
index 0000000..acfddae
--- /dev/null
+++ b/test/files/run/macro-settings/Test_2.scala
@@ -0,0 +1,3 @@
+object Test extends App {
+ Macros.foo
+}
\ No newline at end of file
diff --git a/test/files/run/macro-sip19-revised.check b/test/files/run/macro-sip19-revised.check
new file mode 100644
index 0000000..86c3d81
--- /dev/null
+++ b/test/files/run/macro-sip19-revised.check
@@ -0,0 +1,5 @@
+hey, i've been called from SourceLocation1(null,Test_2.scala,11,251)
+hey, i've been called from SourceLocation1(SourceLocation1(null,Test_2.scala,11,251),Test_2.scala,8,222)
+hey, i've been called from SourceLocation1(SourceLocation1(SourceLocation1(null,Test_2.scala,11,251),Test_2.scala,8,222),Test_2.scala,8,222)
+hey, i've been called from SourceLocation1(SourceLocation1(SourceLocation1(SourceLocation1(null,Test_2.scala,11,251),Test_2.scala,8,222),Test_2.scala,8,222),Test_2.scala,6,180)
+2
diff --git a/test/files/run/macro-sip19-revised.flags b/test/files/run/macro-sip19-revised.flags
new file mode 100644
index 0000000..cd66464
--- /dev/null
+++ b/test/files/run/macro-sip19-revised.flags
@@ -0,0 +1 @@
+-language:experimental.macros
\ No newline at end of file
diff --git a/test/files/run/macro-sip19-revised/Impls_Macros_1.scala b/test/files/run/macro-sip19-revised/Impls_Macros_1.scala
new file mode 100644
index 0000000..5f3f61c
--- /dev/null
+++ b/test/files/run/macro-sip19-revised/Impls_Macros_1.scala
@@ -0,0 +1,34 @@
+import scala.reflect.macros.Context
+
+object Macros {
+ def impl(c: Context) = {
+ import c.universe._
+
+ val inscope = c.inferImplicitValue(c.mirror.staticClass("SourceLocation").toType)
+ val outer = c.Expr[SourceLocation](if (!inscope.isEmpty) inscope else Literal(Constant(null)))
+
+ val Apply(fun, args) = c.enclosingImplicits(0)._2
+ val fileName = fun.pos.source.file.file.getName
+ val line = fun.pos.line
+ val charOffset = fun.pos.point
+ c.universe.reify { SourceLocation1(outer.splice, c.literal(fileName).splice, c.literal(line).splice, c.literal(charOffset).splice) }
+ }
+
+ implicit def sourceLocation: SourceLocation1 = macro impl
+}
+
+trait SourceLocation {
+ /** Source location of the outermost call */
+ val outer: SourceLocation
+
+ /** The name of the source file */
+ val fileName: String
+
+ /** The line number */
+ val line: Int
+
+ /** The character offset */
+ val charOffset: Int
+}
+
+case class SourceLocation1(val outer: SourceLocation, val fileName: String, val line: Int, val charOffset: Int) extends SourceLocation
\ No newline at end of file
diff --git a/test/files/run/macro-sip19-revised/Test_2.scala b/test/files/run/macro-sip19-revised/Test_2.scala
new file mode 100644
index 0000000..d9a4d7d
--- /dev/null
+++ b/test/files/run/macro-sip19-revised/Test_2.scala
@@ -0,0 +1,12 @@
+import Macros._
+
+object Test extends App {
+ def foo(x: Int, y: Int)(implicit loc: SourceLocation): Int = {
+ println("hey, i've been called from %s".format(loc))
+ if (x < y) foo(y, x)
+ else if (y == 0) x
+ else foo(x - y, y)
+ }
+
+ println(foo(4, 2))
+}
diff --git a/test/files/run/macro-sip19.check b/test/files/run/macro-sip19.check
new file mode 100644
index 0000000..07cfd8c
--- /dev/null
+++ b/test/files/run/macro-sip19.check
@@ -0,0 +1,5 @@
+hey, i've been called from SourceLocation(Test_2.scala,15,366)
+hey, i've been called from SourceLocation(Test_2.scala,11,331)
+hey, i've been called from SourceLocation(Test_2.scala,11,331)
+hey, i've been called from SourceLocation(Test_2.scala,9,285)
+2
diff --git a/test/files/run/macro-sip19.flags b/test/files/run/macro-sip19.flags
new file mode 100644
index 0000000..cd66464
--- /dev/null
+++ b/test/files/run/macro-sip19.flags
@@ -0,0 +1 @@
+-language:experimental.macros
\ No newline at end of file
diff --git a/test/files/run/macro-sip19/Impls_Macros_1.scala b/test/files/run/macro-sip19/Impls_Macros_1.scala
new file mode 100644
index 0000000..535ec2c
--- /dev/null
+++ b/test/files/run/macro-sip19/Impls_Macros_1.scala
@@ -0,0 +1,25 @@
+import scala.reflect.macros.Context
+
+object Macros {
+ def impl(c: Context) = {
+ import c.universe._
+ val Apply(fun, args) = c.enclosingImplicits(0)._2
+ val fileName = fun.pos.source.file.file.getName
+ val line = fun.pos.line
+ val charOffset = fun.pos.point
+ c.universe.reify { SourceLocation(c.literal(fileName).splice, c.literal(line).splice, c.literal(charOffset).splice) }
+ }
+
+ implicit def sourceLocation: SourceLocation = macro impl
+}
+
+case class SourceLocation(
+ /** The name of the source file */
+ val fileName: String,
+
+ /** The line number */
+ val line: Int,
+
+ /** The character offset */
+ val charOffset: Int
+)
\ No newline at end of file
diff --git a/test/files/run/macro-sip19/Test_2.scala b/test/files/run/macro-sip19/Test_2.scala
new file mode 100644
index 0000000..32326e6
--- /dev/null
+++ b/test/files/run/macro-sip19/Test_2.scala
@@ -0,0 +1,16 @@
+import Macros._
+
+object Test extends App {
+ def foo(x: Int, y: Int)(implicit loc0: SourceLocation): Int = {
+ var loc = loc0;
+ {
+ var loc0 = 0 // shadow loc0 to disambiguate with the implicit macro
+ println("hey, i've been called from %s".format(loc))
+ if (x < y) foo(y, x)
+ else if (y == 0) x
+ else foo(x - y, y)
+ }
+ }
+
+ println(foo(4, 2))
+}
diff --git a/test/files/run/macro-system-properties.check b/test/files/run/macro-system-properties.check
new file mode 100644
index 0000000..dce976d
--- /dev/null
+++ b/test/files/run/macro-system-properties.check
@@ -0,0 +1,26 @@
+Type in expressions to have them evaluated.
+Type :help for more information.
+
+scala>
+
+scala> import language.experimental._, reflect.macros.Context
+import language.experimental._
+import reflect.macros.Context
+
+scala> object GrabContext {
+ def lastContext = Option(System.getProperties.get("lastContext").asInstanceOf[reflect.macros.runtime.Context])
+ // System.properties lets you stash true globals (unlike statics which are classloader scoped)
+ def impl(c: Context)() = { System.getProperties.put("lastContext", c); c.literalUnit }
+ def grab() = macro impl
+ }
+defined module GrabContext
+
+scala> object Test { class C(implicit a: Any) { GrabContext.grab } }
+defined module Test
+
+scala> object Test { class C(implicit a: Any) { GrabContext.grab } }
+defined module Test
+
+scala>
+
+scala>
diff --git a/test/files/run/macro-system-properties.scala b/test/files/run/macro-system-properties.scala
new file mode 100644
index 0000000..e182def
--- /dev/null
+++ b/test/files/run/macro-system-properties.scala
@@ -0,0 +1,16 @@
+import scala.tools.nsc._
+import scala.tools.partest.ReplTest
+
+object Test extends ReplTest {
+ def code = """
+ import language.experimental._, reflect.macros.Context
+ object GrabContext {
+ def lastContext = Option(System.getProperties.get("lastContext").asInstanceOf[reflect.macros.runtime.Context])
+ // System.properties lets you stash true globals (unlike statics which are classloader scoped)
+ def impl(c: Context)() = { System.getProperties.put("lastContext", c); c.literalUnit }
+ def grab() = macro impl
+ }
+ object Test { class C(implicit a: Any) { GrabContext.grab } }
+ object Test { class C(implicit a: Any) { GrabContext.grab } }
+ """
+}
diff --git a/test/files/run/macro-typecheck-implicitsdisabled.check b/test/files/run/macro-typecheck-implicitsdisabled.check
new file mode 100644
index 0000000..c4fa2c5
--- /dev/null
+++ b/test/files/run/macro-typecheck-implicitsdisabled.check
@@ -0,0 +1,2 @@
+scala.this.Predef.any2ArrowAssoc[Int](1).->[Int](2)
+scala.reflect.macros.TypecheckException: value -> is not a member of Int
diff --git a/test/files/run/macro-typecheck-implicitsdisabled.flags b/test/files/run/macro-typecheck-implicitsdisabled.flags
new file mode 100644
index 0000000..cd66464
--- /dev/null
+++ b/test/files/run/macro-typecheck-implicitsdisabled.flags
@@ -0,0 +1 @@
+-language:experimental.macros
\ No newline at end of file
diff --git a/test/files/run/macro-typecheck-implicitsdisabled/Impls_Macros_1.scala b/test/files/run/macro-typecheck-implicitsdisabled/Impls_Macros_1.scala
new file mode 100644
index 0000000..633cb93
--- /dev/null
+++ b/test/files/run/macro-typecheck-implicitsdisabled/Impls_Macros_1.scala
@@ -0,0 +1,28 @@
+import scala.reflect.macros.Context
+
+object Macros {
+ def impl_with_implicits_enabled(c: Context) = {
+ import c.universe._
+
+ val tree1 = Apply(Select(Literal(Constant(1)), newTermName("$minus$greater")), List(Literal(Constant(2))))
+ val ttree1 = c.typeCheck(tree1, withImplicitViewsDisabled = false)
+ c.literal(ttree1.toString)
+ }
+
+ def foo_with_implicits_enabled = macro impl_with_implicits_enabled
+
+ def impl_with_implicits_disabled(c: Context) = {
+ import c.universe._
+
+ try {
+ val tree2 = Apply(Select(Literal(Constant(1)), newTermName("$minus$greater")), List(Literal(Constant(2))))
+ val ttree2 = c.typeCheck(tree2, withImplicitViewsDisabled = true)
+ c.literal(ttree2.toString)
+ } catch {
+ case ex: Throwable =>
+ c.literal(ex.toString)
+ }
+ }
+
+ def foo_with_implicits_disabled = macro impl_with_implicits_disabled
+}
\ No newline at end of file
diff --git a/test/files/run/macro-typecheck-implicitsdisabled/Test_2.scala b/test/files/run/macro-typecheck-implicitsdisabled/Test_2.scala
new file mode 100644
index 0000000..127e955
--- /dev/null
+++ b/test/files/run/macro-typecheck-implicitsdisabled/Test_2.scala
@@ -0,0 +1,4 @@
+object Test extends App {
+ println(Macros.foo_with_implicits_enabled)
+ println(Macros.foo_with_implicits_disabled)
+}
\ No newline at end of file
diff --git a/test/files/run/macro-typecheck-macrosdisabled.check b/test/files/run/macro-typecheck-macrosdisabled.check
new file mode 100644
index 0000000..29a881f
--- /dev/null
+++ b/test/files/run/macro-typecheck-macrosdisabled.check
@@ -0,0 +1,32 @@
+{
+ val $u: reflect.runtime.universe.type = scala.reflect.runtime.`package`.universe;
+ val $m: $u.Mirror = scala.reflect.runtime.`package`.universe.runtimeMirror(this.getClass().getClassLoader());
+ $u.Expr.apply[Int(2)]($m, {
+ final class $treecreator1 extends TreeCreator {
+ def <init>(): $treecreator1 = {
+ $treecreator1.super.<init>();
+ ()
+ };
+ def apply[U >: Nothing <: scala.reflect.api.Universe with Singleton]($m$untyped: scala.reflect.api.Mirror[U]): U#Tree = {
+ val $u: U = $m$untyped.universe;
+ val $m: $u.Mirror = $m$untyped.asInstanceOf[$u.Mirror];
+ $u.Literal.apply($u.Constant.apply(2))
+ }
+ };
+ new $treecreator1()
+ })($u.TypeTag.apply[Int(2)]($m, {
+ final class $typecreator2 extends TypeCreator {
+ def <init>(): $typecreator2 = {
+ $typecreator2.super.<init>();
+ ()
+ };
+ def apply[U >: Nothing <: scala.reflect.api.Universe with Singleton]($m$untyped: scala.reflect.api.Mirror[U]): U#Type = {
+ val $u: U = $m$untyped.universe;
+ val $m: $u.Mirror = $m$untyped.asInstanceOf[$u.Mirror];
+ $u.ConstantType.apply($u.Constant.apply(2))
+ }
+ };
+ new $typecreator2()
+ }))
+}
+ru.reify[Int](2)
diff --git a/test/files/run/macro-typecheck-macrosdisabled.flags b/test/files/run/macro-typecheck-macrosdisabled.flags
new file mode 100644
index 0000000..cd66464
--- /dev/null
+++ b/test/files/run/macro-typecheck-macrosdisabled.flags
@@ -0,0 +1 @@
+-language:experimental.macros
\ No newline at end of file
diff --git a/test/files/run/macro-typecheck-macrosdisabled/Impls_Macros_1.scala b/test/files/run/macro-typecheck-macrosdisabled/Impls_Macros_1.scala
new file mode 100644
index 0000000..f693ad7
--- /dev/null
+++ b/test/files/run/macro-typecheck-macrosdisabled/Impls_Macros_1.scala
@@ -0,0 +1,30 @@
+import scala.reflect.macros.Context
+
+object Macros {
+ def impl_with_macros_enabled(c: Context) = {
+ import c.universe._
+
+ val ru = Select(Select(Select(Select(Ident(newTermName("scala")), newTermName("reflect")), newTermName("runtime")), newTermName("package")), newTermName("universe"))
+ val tree1 = Apply(Select(ru, newTermName("reify")), List(Literal(Constant(2))))
+ val ttree1 = c.typeCheck(tree1, withMacrosDisabled = false)
+ c.literal(ttree1.toString)
+ }
+
+ def foo_with_macros_enabled = macro impl_with_macros_enabled
+
+ def impl_with_macros_disabled(c: Context) = {
+ import c.universe._
+
+ val rupkg = c.mirror.staticModule("scala.reflect.runtime.package")
+ val rusym = build.selectTerm(rupkg, "universe")
+ val NullaryMethodType(rutpe) = rusym.typeSignature
+ val ru = build.newFreeTerm("ru", scala.reflect.runtime.universe)
+ build.setTypeSignature(ru, rutpe)
+
+ val tree2 = Apply(Select(Ident(ru), newTermName("reify")), List(Literal(Constant(2))))
+ val ttree2 = c.typeCheck(tree2, withMacrosDisabled = true)
+ c.literal(ttree2.toString)
+ }
+
+ def foo_with_macros_disabled = macro impl_with_macros_disabled
+}
\ No newline at end of file
diff --git a/test/files/run/macro-typecheck-macrosdisabled/Test_2.scala b/test/files/run/macro-typecheck-macrosdisabled/Test_2.scala
new file mode 100644
index 0000000..bdba391
--- /dev/null
+++ b/test/files/run/macro-typecheck-macrosdisabled/Test_2.scala
@@ -0,0 +1,4 @@
+object Test extends App {
+ println(Macros.foo_with_macros_enabled)
+ println(Macros.foo_with_macros_disabled)
+}
\ No newline at end of file
diff --git a/test/files/run/macro-typecheck-macrosdisabled2.check b/test/files/run/macro-typecheck-macrosdisabled2.check
new file mode 100644
index 0000000..27d15d4
--- /dev/null
+++ b/test/files/run/macro-typecheck-macrosdisabled2.check
@@ -0,0 +1,32 @@
+{
+ val $u: reflect.runtime.universe.type = scala.reflect.runtime.`package`.universe;
+ val $m: $u.Mirror = scala.reflect.runtime.`package`.universe.runtimeMirror(this.getClass().getClassLoader());
+ $u.Expr.apply[Array[Int]]($m, {
+ final class $treecreator1 extends TreeCreator {
+ def <init>(): $treecreator1 = {
+ $treecreator1.super.<init>();
+ ()
+ };
+ def apply[U >: Nothing <: scala.reflect.api.Universe with Singleton]($m$untyped: scala.reflect.api.Mirror[U]): U#Tree = {
+ val $u: U = $m$untyped.universe;
+ val $m: $u.Mirror = $m$untyped.asInstanceOf[$u.Mirror];
+ $u.Apply.apply($u.Select.apply($u.build.Ident($m.staticModule("scala.Array")), $u.newTermName("apply")), scala.collection.immutable.List.apply[$u.Literal]($u.Literal.apply($u.Constant.apply(2))))
+ }
+ };
+ new $treecreator1()
+ })($u.TypeTag.apply[Array[Int]]($m, {
+ final class $typecreator2 extends TypeCreator {
+ def <init>(): $typecreator2 = {
+ $typecreator2.super.<init>();
+ ()
+ };
+ def apply[U >: Nothing <: scala.reflect.api.Universe with Singleton]($m$untyped: scala.reflect.api.Mirror[U]): U#Type = {
+ val $u: U = $m$untyped.universe;
+ val $m: $u.Mirror = $m$untyped.asInstanceOf[$u.Mirror];
+ $u.TypeRef.apply($u.ThisType.apply($m.staticPackage("scala").asModule.moduleClass), $m.staticClass("scala.Array"), scala.collection.immutable.List.apply[$u.Type]($m.staticClass("scala.Int").asType.toTypeConstructor))
+ }
+ };
+ new $typecreator2()
+ }))
+}
+ru.reify[Array[Int]](scala.Array.apply(2))
diff --git a/test/files/run/macro-typecheck-macrosdisabled2.flags b/test/files/run/macro-typecheck-macrosdisabled2.flags
new file mode 100644
index 0000000..cd66464
--- /dev/null
+++ b/test/files/run/macro-typecheck-macrosdisabled2.flags
@@ -0,0 +1 @@
+-language:experimental.macros
\ No newline at end of file
diff --git a/test/files/run/macro-typecheck-macrosdisabled2/Impls_Macros_1.scala b/test/files/run/macro-typecheck-macrosdisabled2/Impls_Macros_1.scala
new file mode 100644
index 0000000..1dbf5a1
--- /dev/null
+++ b/test/files/run/macro-typecheck-macrosdisabled2/Impls_Macros_1.scala
@@ -0,0 +1,30 @@
+import scala.reflect.macros.Context
+
+object Macros {
+ def impl_with_macros_enabled(c: Context) = {
+ import c.universe._
+
+ val ru = Select(Select(Select(Select(Ident(newTermName("scala")), newTermName("reflect")), newTermName("runtime")), newTermName("package")), newTermName("universe"))
+ val tree1 = Apply(Select(ru, newTermName("reify")), List(Apply(Select(Ident(newTermName("scala")), newTermName("Array")), List(Literal(Constant(2))))))
+ val ttree1 = c.typeCheck(tree1, withMacrosDisabled = false)
+ c.literal(ttree1.toString)
+ }
+
+ def foo_with_macros_enabled = macro impl_with_macros_enabled
+
+ def impl_with_macros_disabled(c: Context) = {
+ import c.universe._
+
+ val rupkg = c.mirror.staticModule("scala.reflect.runtime.package")
+ val rusym = build.selectTerm(rupkg, "universe")
+ val NullaryMethodType(rutpe) = rusym.typeSignature
+ val ru = build.newFreeTerm("ru", scala.reflect.runtime.universe)
+ build.setTypeSignature(ru, rutpe)
+
+ val tree2 = Apply(Select(Ident(ru), newTermName("reify")), List(Apply(Select(Ident(newTermName("scala")), newTermName("Array")), List(Literal(Constant(2))))))
+ val ttree2 = c.typeCheck(tree2, withMacrosDisabled = true)
+ c.literal(ttree2.toString)
+ }
+
+ def foo_with_macros_disabled = macro impl_with_macros_disabled
+}
diff --git a/test/files/run/macro-typecheck-macrosdisabled2/Test_2.scala b/test/files/run/macro-typecheck-macrosdisabled2/Test_2.scala
new file mode 100644
index 0000000..bdba391
--- /dev/null
+++ b/test/files/run/macro-typecheck-macrosdisabled2/Test_2.scala
@@ -0,0 +1,4 @@
+object Test extends App {
+ println(Macros.foo_with_macros_enabled)
+ println(Macros.foo_with_macros_disabled)
+}
\ No newline at end of file
diff --git a/test/files/run/macro-undetparams-consfromsls.check b/test/files/run/macro-undetparams-consfromsls.check
new file mode 100644
index 0000000..3fee58d
--- /dev/null
+++ b/test/files/run/macro-undetparams-consfromsls.check
@@ -0,0 +1,5 @@
+A = WeakTypeTag[Int]
+B = WeakTypeTag[Nothing]
+List(1)
+A = WeakTypeTag[Any]
+List(abc, 1)
diff --git a/test/files/run/macro-undetparams-consfromsls.flags b/test/files/run/macro-undetparams-consfromsls.flags
new file mode 100644
index 0000000..cd66464
--- /dev/null
+++ b/test/files/run/macro-undetparams-consfromsls.flags
@@ -0,0 +1 @@
+-language:experimental.macros
\ No newline at end of file
diff --git a/test/files/run/macro-undetparams-consfromsls/Impls_Macros_1.scala b/test/files/run/macro-undetparams-consfromsls/Impls_Macros_1.scala
new file mode 100644
index 0000000..bcbd128
--- /dev/null
+++ b/test/files/run/macro-undetparams-consfromsls/Impls_Macros_1.scala
@@ -0,0 +1,18 @@
+import scala.reflect.runtime.universe._
+import scala.reflect.macros.Context
+
+object Macros {
+ def cons_impl[A: c.WeakTypeTag](c: Context)(x: c.Expr[A], xs: c.Expr[List[A]]): c.Expr[List[A]] = c.universe.reify {
+ println("A = " + c.literal(implicitly[c.WeakTypeTag[A]].toString).splice)
+ x.splice :: xs.splice
+ }
+
+ def nil_impl[B: c.WeakTypeTag](c: Context): c.Expr[List[B]] = c.universe.reify {
+ println("B = " + c.literal(implicitly[c.WeakTypeTag[B]].toString).splice)
+ Nil
+ }
+
+ def cons[A](x: A, xs: List[A]): List[A] = macro cons_impl[A]
+
+ def nil[B]: List[B] = macro nil_impl[B]
+}
\ No newline at end of file
diff --git a/test/files/run/macro-undetparams-consfromsls/Test_2.scala b/test/files/run/macro-undetparams-consfromsls/Test_2.scala
new file mode 100644
index 0000000..f2c2ce0
--- /dev/null
+++ b/test/files/run/macro-undetparams-consfromsls/Test_2.scala
@@ -0,0 +1,7 @@
+object Test extends App {
+ import Macros._
+ val xs = cons(1, nil)
+ println(xs)
+ val ys = cons("abc", xs)
+ println(ys)
+}
\ No newline at end of file
diff --git a/test/files/run/macro-undetparams-implicitval.check b/test/files/run/macro-undetparams-implicitval.check
new file mode 100644
index 0000000..541b922
--- /dev/null
+++ b/test/files/run/macro-undetparams-implicitval.check
@@ -0,0 +1 @@
+TypeTag[Nothing]
diff --git a/test/files/run/macro-undetparams-implicitval.flags b/test/files/run/macro-undetparams-implicitval.flags
new file mode 100644
index 0000000..cd66464
--- /dev/null
+++ b/test/files/run/macro-undetparams-implicitval.flags
@@ -0,0 +1 @@
+-language:experimental.macros
\ No newline at end of file
diff --git a/test/files/run/macro-undetparams-implicitval/Test.scala b/test/files/run/macro-undetparams-implicitval/Test.scala
new file mode 100644
index 0000000..72fd2f3
--- /dev/null
+++ b/test/files/run/macro-undetparams-implicitval/Test.scala
@@ -0,0 +1,6 @@
+import scala.reflect.runtime.universe._
+
+object Test extends App {
+ def foo[T: TypeTag] = println(implicitly[TypeTag[T]])
+ foo
+}
\ No newline at end of file
diff --git a/test/files/run/macro-undetparams-macroitself.check b/test/files/run/macro-undetparams-macroitself.check
new file mode 100644
index 0000000..a9bf554
--- /dev/null
+++ b/test/files/run/macro-undetparams-macroitself.check
@@ -0,0 +1,2 @@
+WeakTypeTag[Int]
+WeakTypeTag[String]
diff --git a/test/files/run/macro-undetparams-macroitself.flags b/test/files/run/macro-undetparams-macroitself.flags
new file mode 100644
index 0000000..cd66464
--- /dev/null
+++ b/test/files/run/macro-undetparams-macroitself.flags
@@ -0,0 +1 @@
+-language:experimental.macros
\ No newline at end of file
diff --git a/test/files/run/macro-undetparams-macroitself/Impls_Macros_1.scala b/test/files/run/macro-undetparams-macroitself/Impls_Macros_1.scala
new file mode 100644
index 0000000..0244273
--- /dev/null
+++ b/test/files/run/macro-undetparams-macroitself/Impls_Macros_1.scala
@@ -0,0 +1,8 @@
+import scala.reflect.runtime.universe._
+import scala.reflect.macros.Context
+
+object Macros {
+ def impl[T: c.WeakTypeTag](c: Context)(foo: c.Expr[T]): c.Expr[Unit] = c.universe.reify { println(c.literal(implicitly[c.WeakTypeTag[T]].toString).splice) }
+
+ def foo[T](foo: T) = macro impl[T]
+}
\ No newline at end of file
diff --git a/test/files/run/macro-undetparams-macroitself/Test_2.scala b/test/files/run/macro-undetparams-macroitself/Test_2.scala
new file mode 100644
index 0000000..1a93ff1
--- /dev/null
+++ b/test/files/run/macro-undetparams-macroitself/Test_2.scala
@@ -0,0 +1,4 @@
+object Test extends App {
+ Macros.foo(42)
+ Macros.foo("42")
+}
\ No newline at end of file
diff --git a/test/files/run/manifests-new.scala b/test/files/run/manifests-new.scala
new file mode 100644
index 0000000..f1596de
--- /dev/null
+++ b/test/files/run/manifests-new.scala
@@ -0,0 +1,149 @@
+import scala.reflect.runtime.universe._
+
+object Test
+{
+ object Variances extends Enumeration {
+ val CO, IN, CONTRA = Value
+ }
+ import Variances.{ CO, IN, CONTRA }
+
+ object SubtypeRelationship extends Enumeration {
+ val NONE, SAME, SUB, SUPER = Value
+ }
+ import SubtypeRelationship.{ NONE, SAME, SUB, SUPER }
+
+ class VarianceTester[T, U, CC[_]](expected: Variances.Value)(
+ implicit ev1: TypeTag[T], ev2: TypeTag[U], ev3: TypeTag[CC[T]], ev4: TypeTag[CC[U]]) {
+
+ def elements = List(ev1.tpe <:< ev2.tpe, ev2.tpe <:< ev1.tpe)
+ def containers = List(ev3.tpe <:< ev4.tpe, ev4.tpe <:< ev3.tpe)
+
+ def isUnrelated = typeCompare[T, U] == NONE
+ def isSame = typeCompare[T, U] == SAME
+ def isSub = typeCompare[T, U] == SUB
+ def isSuper = typeCompare[T, U] == SUPER
+
+ def showsCovariance = (elements == containers)
+ def showsContravariance = (elements == containers.reverse)
+ def showsInvariance = containers forall (_ == isSame)
+
+ def allContainerVariances = List(showsCovariance, showsInvariance, showsContravariance)
+
+ def showsExpectedVariance =
+ if (isUnrelated) allContainerVariances forall (_ == false)
+ else if (isSame) allContainerVariances forall (_ == true)
+ else expected match {
+ case CO => showsCovariance && !showsContravariance && !showsInvariance
+ case IN => showsInvariance && !showsCovariance && !showsContravariance
+ case CONTRA => showsContravariance && !showsCovariance && !showsInvariance
+ }
+ }
+
+ def showsCovariance[T, U, CC[_]](implicit ev1: TypeTag[T], ev2: TypeTag[U], ev3: TypeTag[CC[T]], ev4: TypeTag[CC[U]]) =
+ new VarianceTester[T, U, CC](CO) showsExpectedVariance
+
+ def showsInvariance[T, U, CC[_]](implicit ev1: TypeTag[T], ev2: TypeTag[U], ev3: TypeTag[CC[T]], ev4: TypeTag[CC[U]]) =
+ new VarianceTester[T, U, CC](IN) showsExpectedVariance
+
+ def showsContravariance[T, U, CC[_]](implicit ev1: TypeTag[T], ev2: TypeTag[U], ev3: TypeTag[CC[T]], ev4: TypeTag[CC[U]]) =
+ new VarianceTester[T, U, CC](CONTRA) showsExpectedVariance
+
+ def typeCompare[T, U](implicit ev1: TypeTag[T], ev2: TypeTag[U]) = (ev1.tpe <:< ev2.tpe, ev2.tpe <:< ev1.tpe) match {
+ case (true, true) => SAME
+ case (true, false) => SUB
+ case (false, true) => SUPER
+ case (false, false) => NONE
+ }
+
+ def assertAnyRef[T: TypeTag] = List(
+ typeOf[T] <:< typeOf[Any],
+ typeOf[T] <:< typeOf[AnyRef],
+ !(typeOf[T] <:< typeOf[AnyVal])
+ ) foreach (assert(_, "assertAnyRef"))
+
+ def assertAnyVal[T: TypeTag] = List(
+ typeOf[T] <:< typeOf[Any],
+ !(typeOf[T] <:< typeOf[AnyRef]),
+ typeOf[T] <:< typeOf[AnyVal]
+ ) foreach (assert(_, "assertAnyVal"))
+
+ def assertSameType[T: TypeTag, U: TypeTag] = assert(typeCompare[T, U] == SAME, "assertSameType")
+ def assertSuperType[T: TypeTag, U: TypeTag] = assert(typeCompare[T, U] == SUPER, "assertSuperType")
+ def assertSubType[T: TypeTag, U: TypeTag] = assert(typeCompare[T, U] == SUB, "assertSubType")
+ def assertNoRelationship[T: TypeTag, U: TypeTag] = assert(typeCompare[T, U] == NONE, "assertNoRelationship")
+
+ def testVariancesVia[T: TypeTag, U: TypeTag] = assert(
+ typeCompare[T, U] == SUB &&
+ showsCovariance[T, U, List] &&
+ showsInvariance[T, U, Set],
+ "testVariancesVia"
+ )
+
+ def runAllTests = {
+ assertAnyVal[AnyVal]
+ assertAnyVal[Unit]
+ assertAnyVal[Int]
+ assertAnyVal[Double]
+ assertAnyVal[Boolean]
+ assertAnyVal[Char]
+
+ assertAnyRef[AnyRef]
+ assertAnyRef[java.lang.Object]
+ assertAnyRef[java.lang.Integer]
+ assertAnyRef[java.lang.Double]
+ assertAnyRef[java.lang.Boolean]
+ assertAnyRef[java.lang.Character]
+ assertAnyRef[String]
+ assertAnyRef[scala.List[String]]
+ assertAnyRef[scala.List[_]]
+
+ // variance doesn't work yet
+ // testVariancesVia[String, Any]
+ // testVariancesVia[String, AnyRef]
+
+ assertSubType[List[String], List[Any]]
+ assertSubType[List[String], List[AnyRef]]
+ assertNoRelationship[List[String], List[AnyVal]]
+
+ assertSubType[List[Int], List[Any]]
+ assertSubType[List[Int], List[AnyVal]]
+ assertNoRelationship[List[Int], List[AnyRef]]
+
+ // Nothing
+ assertSubType[Nothing, Any]
+ assertSubType[Nothing, AnyVal]
+ assertSubType[Nothing, AnyRef]
+ assertSubType[Nothing, String]
+ assertSubType[Nothing, List[String]]
+ assertSubType[Nothing, Null]
+ assertSameType[Nothing, Nothing]
+
+ // Null
+ assertSubType[Null, Any]
+ assertNoRelationship[Null, AnyVal]
+ assertSubType[Null, AnyRef]
+ assertSubType[Null, String]
+ assertSubType[Null, List[String]]
+ assertSameType[Null, Null]
+ assertSuperType[Null, Nothing]
+
+ // Any
+ assertSameType[Any, Any]
+ assertSuperType[Any, AnyVal]
+ assertSuperType[Any, AnyRef]
+ assertSuperType[Any, String]
+ assertSuperType[Any, List[String]]
+ assertSuperType[Any, Null]
+ assertSuperType[Any, Nothing]
+
+ // Misc unrelated types
+ assertNoRelationship[Unit, AnyRef]
+ assertNoRelationship[Unit, Int]
+ assertNoRelationship[Int, Long]
+ assertNoRelationship[Boolean, String]
+ assertNoRelationship[List[Boolean], List[String]]
+ assertNoRelationship[Set[Boolean], Set[String]]
+ }
+
+ def main(args: Array[String]): Unit = runAllTests
+}
\ No newline at end of file
diff --git a/test/files/run/manifests-old.scala b/test/files/run/manifests-old.scala
new file mode 100644
index 0000000..621689a
--- /dev/null
+++ b/test/files/run/manifests-old.scala
@@ -0,0 +1,147 @@
+object Test
+{
+ object Variances extends Enumeration {
+ val CO, IN, CONTRA = Value
+ }
+ import Variances.{ CO, IN, CONTRA }
+
+ object SubtypeRelationship extends Enumeration {
+ val NONE, SAME, SUB, SUPER = Value
+ }
+ import SubtypeRelationship.{ NONE, SAME, SUB, SUPER }
+
+ class VarianceTester[T, U, CC[_]](expected: Variances.Value)(
+ implicit ev1: Manifest[T], ev2: Manifest[U], ev3: Manifest[CC[T]], ev4: Manifest[CC[U]]) {
+
+ def elements = List(ev1 <:< ev2, ev2 <:< ev1)
+ def containers = List(ev3 <:< ev4, ev4 <:< ev3)
+
+ def isUnrelated = typeCompare[T, U] == NONE
+ def isSame = typeCompare[T, U] == SAME
+ def isSub = typeCompare[T, U] == SUB
+ def isSuper = typeCompare[T, U] == SUPER
+
+ def showsCovariance = (elements == containers)
+ def showsContravariance = (elements == containers.reverse)
+ def showsInvariance = containers forall (_ == isSame)
+
+ def allContainerVariances = List(showsCovariance, showsInvariance, showsContravariance)
+
+ def showsExpectedVariance =
+ if (isUnrelated) allContainerVariances forall (_ == false)
+ else if (isSame) allContainerVariances forall (_ == true)
+ else expected match {
+ case CO => showsCovariance && !showsContravariance && !showsInvariance
+ case IN => showsInvariance && !showsCovariance && !showsContravariance
+ case CONTRA => showsContravariance && !showsCovariance && !showsInvariance
+ }
+ }
+
+ def showsCovariance[T, U, CC[_]](implicit ev1: Manifest[T], ev2: Manifest[U], ev3: Manifest[CC[T]], ev4: Manifest[CC[U]]) =
+ new VarianceTester[T, U, CC](CO) showsExpectedVariance
+
+ def showsInvariance[T, U, CC[_]](implicit ev1: Manifest[T], ev2: Manifest[U], ev3: Manifest[CC[T]], ev4: Manifest[CC[U]]) =
+ new VarianceTester[T, U, CC](IN) showsExpectedVariance
+
+ def showsContravariance[T, U, CC[_]](implicit ev1: Manifest[T], ev2: Manifest[U], ev3: Manifest[CC[T]], ev4: Manifest[CC[U]]) =
+ new VarianceTester[T, U, CC](CONTRA) showsExpectedVariance
+
+ def typeCompare[T, U](implicit ev1: Manifest[T], ev2: Manifest[U]) = (ev1 <:< ev2, ev2 <:< ev1) match {
+ case (true, true) => SAME
+ case (true, false) => SUB
+ case (false, true) => SUPER
+ case (false, false) => NONE
+ }
+
+ def assertAnyRef[T: Manifest] = List(
+ manifest[T] <:< manifest[Any],
+ manifest[T] <:< manifest[AnyRef],
+ !(manifest[T] <:< manifest[AnyVal])
+ ) foreach (assert(_, "assertAnyRef"))
+
+ def assertAnyVal[T: Manifest] = List(
+ manifest[T] <:< manifest[Any],
+ !(manifest[T] <:< manifest[AnyRef]),
+ manifest[T] <:< manifest[AnyVal]
+ ) foreach (assert(_, "assertAnyVal"))
+
+ def assertSameType[T: Manifest, U: Manifest] = assert(typeCompare[T, U] == SAME, "assertSameType")
+ def assertSuperType[T: Manifest, U: Manifest] = assert(typeCompare[T, U] == SUPER, "assertSuperType")
+ def assertSubType[T: Manifest, U: Manifest] = assert(typeCompare[T, U] == SUB, "assertSubType")
+ def assertNoRelationship[T: Manifest, U: Manifest] = assert(typeCompare[T, U] == NONE, "assertNoRelationship")
+
+ def testVariancesVia[T: Manifest, U: Manifest] = assert(
+ typeCompare[T, U] == SUB &&
+ showsCovariance[T, U, List] &&
+ showsInvariance[T, U, Set],
+ "testVariancesVia"
+ )
+
+ def runAllTests = {
+ assertAnyVal[AnyVal]
+ assertAnyVal[Unit]
+ assertAnyVal[Int]
+ assertAnyVal[Double]
+ assertAnyVal[Boolean]
+ assertAnyVal[Char]
+
+ assertAnyRef[AnyRef]
+ assertAnyRef[java.lang.Object]
+ assertAnyRef[java.lang.Integer]
+ assertAnyRef[java.lang.Double]
+ assertAnyRef[java.lang.Boolean]
+ assertAnyRef[java.lang.Character]
+ assertAnyRef[String]
+ assertAnyRef[scala.List[String]]
+ assertAnyRef[scala.List[_]]
+
+ // variance doesn't work yet
+ // testVariancesVia[String, Any]
+ // testVariancesVia[String, AnyRef]
+
+ assertSubType[List[String], List[Any]]
+ assertSubType[List[String], List[AnyRef]]
+ assertNoRelationship[List[String], List[AnyVal]]
+
+ assertSubType[List[Int], List[Any]]
+ assertSubType[List[Int], List[AnyVal]]
+ assertNoRelationship[List[Int], List[AnyRef]]
+
+ // Nothing
+ assertSubType[Nothing, Any]
+ assertSubType[Nothing, AnyVal]
+ assertSubType[Nothing, AnyRef]
+ assertSubType[Nothing, String]
+ assertSubType[Nothing, List[String]]
+ assertSubType[Nothing, Null]
+ assertSameType[Nothing, Nothing]
+
+ // Null
+ assertSubType[Null, Any]
+ assertNoRelationship[Null, AnyVal]
+ assertSubType[Null, AnyRef]
+ assertSubType[Null, String]
+ assertSubType[Null, List[String]]
+ assertSameType[Null, Null]
+ assertSuperType[Null, Nothing]
+
+ // Any
+ assertSameType[Any, Any]
+ assertSuperType[Any, AnyVal]
+ assertSuperType[Any, AnyRef]
+ assertSuperType[Any, String]
+ assertSuperType[Any, List[String]]
+ assertSuperType[Any, Null]
+ assertSuperType[Any, Nothing]
+
+ // Misc unrelated types
+ assertNoRelationship[Unit, AnyRef]
+ assertNoRelationship[Unit, Int]
+ assertNoRelationship[Int, Long]
+ assertNoRelationship[Boolean, String]
+ assertNoRelationship[List[Boolean], List[String]]
+ assertNoRelationship[Set[Boolean], Set[String]]
+ }
+
+ def main(args: Array[String]): Unit = runAllTests
+}
\ No newline at end of file
diff --git a/test/files/neg/caseinherit.flags b/test/files/run/manifests-undeprecated-in-2.10.0.flags
similarity index 100%
copy from test/files/neg/caseinherit.flags
copy to test/files/run/manifests-undeprecated-in-2.10.0.flags
diff --git a/test/files/run/manifests-undeprecated-in-2.10.0.scala b/test/files/run/manifests-undeprecated-in-2.10.0.scala
new file mode 100644
index 0000000..82e90b3
--- /dev/null
+++ b/test/files/run/manifests-undeprecated-in-2.10.0.scala
@@ -0,0 +1,15 @@
+object Test extends App {
+ def m1a: scala.reflect.Manifest[Int] = scala.reflect.Manifest.Int
+ def m2a: scala.reflect.OptManifest[Int] = ???
+ def m3a = scala.reflect.NoManifest
+
+ def m1b: Manifest[Int] = Manifest.Int
+ def m2b: OptManifest[Int] = ???
+ def m3b = NoManifest
+
+ val m4a = manifest[Int]
+ val m5a = optManifest[Int]
+
+ val m4b = implicitly[Manifest[Int]]
+ val m5b = implicitly[OptManifest[Int]]
+}
\ No newline at end of file
diff --git a/test/files/run/manifests.scala b/test/files/run/manifests.scala
deleted file mode 100644
index 1da06b8..0000000
--- a/test/files/run/manifests.scala
+++ /dev/null
@@ -1,147 +0,0 @@
-object Test
-{
- object Variances extends Enumeration {
- val CO, IN, CONTRA = Value
- }
- import Variances.{ CO, IN, CONTRA }
-
- object SubtypeRelationship extends Enumeration {
- val NONE, SAME, SUB, SUPER = Value
- }
- import SubtypeRelationship.{ NONE, SAME, SUB, SUPER }
-
- class VarianceTester[T, U, CC[_]](expected: Variances.Value)(
- implicit ev1: Manifest[T], ev2: Manifest[U], ev3: Manifest[CC[T]], ev4: Manifest[CC[U]]) {
-
- def elements = List(ev1 <:< ev2, ev2 <:< ev1)
- def containers = List(ev3 <:< ev4, ev4 <:< ev3)
-
- def isUnrelated = typeCompare[T, U] == NONE
- def isSame = typeCompare[T, U] == SAME
- def isSub = typeCompare[T, U] == SUB
- def isSuper = typeCompare[T, U] == SUPER
-
- def showsCovariance = (elements == containers)
- def showsContravariance = (elements == containers.reverse)
- def showsInvariance = containers forall (_ == isSame)
-
- def allContainerVariances = List(showsCovariance, showsInvariance, showsContravariance)
-
- def showsExpectedVariance =
- if (isUnrelated) allContainerVariances forall (_ == false)
- else if (isSame) allContainerVariances forall (_ == true)
- else expected match {
- case CO => showsCovariance && !showsContravariance && !showsInvariance
- case IN => showsInvariance && !showsCovariance && !showsContravariance
- case CONTRA => showsContravariance && !showsCovariance && !showsInvariance
- }
- }
-
- def showsCovariance[T, U, CC[_]](implicit ev1: Manifest[T], ev2: Manifest[U], ev3: Manifest[CC[T]], ev4: Manifest[CC[U]]) =
- new VarianceTester[T, U, CC](CO) showsExpectedVariance
-
- def showsInvariance[T, U, CC[_]](implicit ev1: Manifest[T], ev2: Manifest[U], ev3: Manifest[CC[T]], ev4: Manifest[CC[U]]) =
- new VarianceTester[T, U, CC](IN) showsExpectedVariance
-
- def showsContravariance[T, U, CC[_]](implicit ev1: Manifest[T], ev2: Manifest[U], ev3: Manifest[CC[T]], ev4: Manifest[CC[U]]) =
- new VarianceTester[T, U, CC](CONTRA) showsExpectedVariance
-
- def typeCompare[T, U](implicit ev1: Manifest[T], ev2: Manifest[U]) = (ev1 <:< ev2, ev2 <:< ev1) match {
- case (true, true) => SAME
- case (true, false) => SUB
- case (false, true) => SUPER
- case (false, false) => NONE
- }
-
- def assertAnyRef[T: Manifest] = List(
- manifest[T] <:< manifest[Any],
- manifest[T] <:< manifest[AnyRef],
- !(manifest[T] <:< manifest[AnyVal])
- ) foreach (assert(_, "assertAnyRef"))
-
- def assertAnyVal[T: Manifest] = List(
- manifest[T] <:< manifest[Any],
- !(manifest[T] <:< manifest[AnyRef]),
- manifest[T] <:< manifest[AnyVal]
- ) foreach (assert(_, "assertAnyVal"))
-
- def assertSameType[T: Manifest, U: Manifest] = assert(typeCompare[T, U] == SAME, "assertSameType")
- def assertSuperType[T: Manifest, U: Manifest] = assert(typeCompare[T, U] == SUPER, "assertSuperType")
- def assertSubType[T: Manifest, U: Manifest] = assert(typeCompare[T, U] == SUB, "assertSubType")
- def assertNoRelationship[T: Manifest, U: Manifest] = assert(typeCompare[T, U] == NONE, "assertNoRelationship")
-
- def testVariancesVia[T: Manifest, U: Manifest] = assert(
- typeCompare[T, U] == SUB &&
- showsCovariance[T, U, List] &&
- showsInvariance[T, U, Set],
- "testVariancesVia"
- )
-
- def runAllTests = {
- assertAnyVal[AnyVal]
- assertAnyVal[Unit]
- assertAnyVal[Int]
- assertAnyVal[Double]
- assertAnyVal[Boolean]
- assertAnyVal[Char]
-
- assertAnyRef[AnyRef]
- assertAnyRef[java.lang.Object]
- assertAnyRef[java.lang.Integer]
- assertAnyRef[java.lang.Double]
- assertAnyRef[java.lang.Boolean]
- assertAnyRef[java.lang.Character]
- assertAnyRef[String]
- assertAnyRef[scala.List[String]]
- assertAnyRef[scala.List[_]]
-
- // variance doesn't work yet
- // testVariancesVia[String, Any]
- // testVariancesVia[String, AnyRef]
-
- assertSubType[List[String], List[Any]]
- assertSubType[List[String], List[AnyRef]]
- assertNoRelationship[List[String], List[AnyVal]]
-
- assertSubType[List[Int], List[Any]]
- assertSubType[List[Int], List[AnyVal]]
- assertNoRelationship[List[Int], List[AnyRef]]
-
- // Nothing
- assertSubType[Nothing, Any]
- assertSubType[Nothing, AnyVal]
- assertSubType[Nothing, AnyRef]
- assertSubType[Nothing, String]
- assertSubType[Nothing, List[String]]
- assertSubType[Nothing, Null]
- assertSameType[Nothing, Nothing]
-
- // Null
- assertSubType[Null, Any]
- assertNoRelationship[Null, AnyVal]
- assertSubType[Null, AnyRef]
- assertSubType[Null, String]
- assertSubType[Null, List[String]]
- assertSameType[Null, Null]
- assertSuperType[Null, Nothing]
-
- // Any
- assertSameType[Any, Any]
- assertSuperType[Any, AnyVal]
- assertSuperType[Any, AnyRef]
- assertSuperType[Any, String]
- assertSuperType[Any, List[String]]
- assertSuperType[Any, Null]
- assertSuperType[Any, Nothing]
-
- // Misc unrelated types
- assertNoRelationship[Unit, AnyRef]
- assertNoRelationship[Unit, Int]
- assertNoRelationship[Int, Long]
- assertNoRelationship[Boolean, String]
- assertNoRelationship[List[Boolean], List[String]]
- assertNoRelationship[Set[Boolean], Set[String]]
- }
-
- def main(args: Array[String]): Unit = runAllTests
-}
diff --git a/test/files/run/map_java_conversions.scala b/test/files/run/map_java_conversions.scala
index 58ff471..7714b2c 100644
--- a/test/files/run/map_java_conversions.scala
+++ b/test/files/run/map_java_conversions.scala
@@ -19,7 +19,7 @@ object Test {
val concMap = new java.util.concurrent.ConcurrentHashMap[String, String]
test(concMap)
- val cmap = asConcurrentMap(concMap)
+ val cmap = asScalaConcurrentMap(concMap)
cmap.putIfAbsent("absentKey", "absentValue")
cmap.put("somekey", "somevalue")
assert(cmap.remove("somekey", "somevalue") == true)
diff --git a/test/files/run/matchonseq.check b/test/files/run/matchonseq.check
new file mode 100644
index 0000000..3fe5540
--- /dev/null
+++ b/test/files/run/matchonseq.check
@@ -0,0 +1,2 @@
+It worked! head=1
+It worked! last=3
diff --git a/test/files/run/matchonseq.scala b/test/files/run/matchonseq.scala
new file mode 100644
index 0000000..49b406a
--- /dev/null
+++ b/test/files/run/matchonseq.scala
@@ -0,0 +1,8 @@
+object Test extends App{
+ Vector(1,2,3) match {
+ case head +: tail => println("It worked! head=" + head)
+ }
+ Vector(1,2,3) match {
+ case init :+ last => println("It worked! last=" + last)
+ }
+}
diff --git a/test/files/run/misc-msil.check b/test/files/run/misc-msil.check
deleted file mode 100644
index 480a840..0000000
--- a/test/files/run/misc-msil.check
+++ /dev/null
@@ -1,33 +0,0 @@
-### Hello
-### 17
-### Bye
-
-### fib(0) = 1
-### fib(1) = 1
-### fib(2) = 2
-### fib(3) = 3
-### fib(4) = 5
-=== MyClass::toString ===
-=== MySubclass::toString ===
-=== MyClass::test ===
-
-identity
-
-A.a = 1
-B.a = 5
-B.b = 2
-
-X.a = 4
-Y.a = 11
-Y.b = 5
-Y.b = 5
-
-X::foo
-
-Y::foo
-X::foo
-
-3
-3
-
-True
diff --git a/test/files/run/mock.check b/test/files/run/mock.check
deleted file mode 100644
index 967c4e2..0000000
--- a/test/files/run/mock.check
+++ /dev/null
@@ -1,3 +0,0 @@
-Hi, thanks for calling: that makes 1 times.
-Hi, thanks for calling: that makes 2 times.
-Hi, thanks for calling: that makes 3 times.
diff --git a/test/files/run/mock.scala b/test/files/run/mock.scala
deleted file mode 100644
index 8778e20..0000000
--- a/test/files/run/mock.scala
+++ /dev/null
@@ -1,29 +0,0 @@
-import scala.tools.reflect._
-import java.util.concurrent.Callable
-import java.io.Closeable
-
-object Test {
- // It'd be really nice about now if functions had a common parent.
- implicit def interfaceify(x: AnyRef): UniversalFn = UniversalFn(x)
-
- def runner(x: Runnable) = x.run()
- def caller[T](x: Callable[T]): T = x.call()
- def closer(x: Closeable) = x.close()
-
- def main(args: Array[String]): Unit = {
- var counter = 0
- val closure = () => {
- counter += 1
- println("Hi, thanks for calling: that makes " + counter + " times.")
- counter
- }
-
- val int1 = closure.as[Runnable]
- val int2 = closure.as[Callable[Int]]
- val int3 = closure.as[Closeable]
-
- runner(int1)
- caller(int2)
- closer(int3)
- }
-}
diff --git a/test/files/run/multi-array.scala b/test/files/run/multi-array.scala
index 4f3a8d3..36e21ae 100644
--- a/test/files/run/multi-array.scala
+++ b/test/files/run/multi-array.scala
@@ -1,6 +1,6 @@
object Test extends App {
val a = Array(1, 2, 3)
- println(a.deepToString)
+ println(a.deep.toString)
val aaiIncomplete = new Array[Array[Array[Int]]](3)
println(aaiIncomplete(0))
@@ -9,6 +9,6 @@ object Test extends App {
println(aaiComplete.deep)
for (i <- 0 until 3; j <- 0 until 3)
aaiComplete(i)(j) = i + j
- println(aaiComplete.deepToString)
+ println(aaiComplete.deep.toString)
assert(aaiComplete.last.last == 4)
}
diff --git a/test/files/run/names-defaults.check b/test/files/run/names-defaults.check
index 5656d1a..f253de7 100644
--- a/test/files/run/names-defaults.check
+++ b/test/files/run/names-defaults.check
@@ -92,7 +92,7 @@ test5
test5
5
10: 2
-slkdfj1
+slkdfj2
1
lskfdjlk
11
diff --git a/test/files/run/names-defaults.scala b/test/files/run/names-defaults.scala
index 4a69842..220414f 100644
--- a/test/files/run/names-defaults.scala
+++ b/test/files/run/names-defaults.scala
@@ -176,7 +176,7 @@ object Test extends App {
println(Fact2()("jyp"))
println(Fact2(x = 1)())
- println(Fact2(10)().copy(y = "blabla")())
+ println(Fact2(10)().copy(y = "blabla")(3))
// assignment to var <-> named argument
@@ -195,7 +195,7 @@ object Test extends App {
// dependent types and copy method
val a11 = new A2
val b11 = a11.B2(new a11.C2)(1)
- println(b11.copy()())
+ println(b11.copy()(2))
@@ -392,6 +392,11 @@ object Test extends App {
}
println(""+ t4041._1 +", "+ t4041._2)
+ // #4441
+ case class C4441a()
+ case class C4441b()()
+ C4441a().copy()
+ C4441b()().copy()()
// DEFINITIONS
def test1(a: Int, b: String) = println(a +": "+ b)
diff --git a/test/files/run/newTags.check b/test/files/run/newTags.check
new file mode 100644
index 0000000..16be9b1
--- /dev/null
+++ b/test/files/run/newTags.check
@@ -0,0 +1,3 @@
+List[Int]
+Map[String,String]
+TypeTag[Map[String,String]]
diff --git a/test/files/run/newTags.scala b/test/files/run/newTags.scala
new file mode 100644
index 0000000..c5199d4
--- /dev/null
+++ b/test/files/run/newTags.scala
@@ -0,0 +1,11 @@
+import scala.reflect.api.{Universe => ApiUniverse}
+import scala.reflect.runtime.{universe => ru}
+
+object Test extends App {
+ println(ru.typeOf[List[Int]])
+ def foo[T: ru.TypeTag] = {
+ println(ru.typeOf[T])
+ println(implicitly[ApiUniverse#TypeTag[T]])
+ }
+ foo[Map[String, String]]
+}
\ No newline at end of file
diff --git a/test/files/run/nonlocalreturn.check b/test/files/run/nonlocalreturn.check
new file mode 100644
index 0000000..aeb2d5e
--- /dev/null
+++ b/test/files/run/nonlocalreturn.check
@@ -0,0 +1 @@
+Some(1)
diff --git a/test/files/run/nonlocalreturn.scala b/test/files/run/nonlocalreturn.scala
new file mode 100644
index 0000000..3c1e742
--- /dev/null
+++ b/test/files/run/nonlocalreturn.scala
@@ -0,0 +1,15 @@
+object Test {
+ def wrap[K](body: => K): K = body
+
+ def f(): Option[Int] = {
+ wrap({ return Some(1) ; None })
+ }
+
+ def main(args: Array[String]) {
+ println(f())
+ }
+}
+// java.lang.ClassCastException: scala.Some cannot be cast to scala.None$
+// at Test$$anonfun$f$1.apply(nonlocalreturn.scala:5)
+// at Test$$anonfun$f$1.apply(nonlocalreturn.scala:5)
+// at Test$.wrap(nonlocalreturn.scala:2)
diff --git a/test/files/run/nullable-lazyvals.check b/test/files/run/nullable-lazyvals.check
new file mode 100644
index 0000000..4db5783
--- /dev/null
+++ b/test/files/run/nullable-lazyvals.check
@@ -0,0 +1,3 @@
+
+param1: null
+param2: null
diff --git a/test/files/run/nullable-lazyvals.scala b/test/files/run/nullable-lazyvals.scala
new file mode 100644
index 0000000..c201e74
--- /dev/null
+++ b/test/files/run/nullable-lazyvals.scala
@@ -0,0 +1,36 @@
+
+/** Test that call-by-name parameters are set to null if
+ * they are used only to initialize a lazy value, after the
+ * value has been initialized.
+ */
+
+class Foo(param1: => Object, param2: => String) {
+ lazy val field1 = param1
+ lazy val field2 = try param2 finally println("")
+}
+
+object Test extends App {
+ val foo = new Foo(new Object, "abc")
+
+ foo.field1
+ foo.field2
+
+ for (f <- foo.getClass.getDeclaredFields) {
+ f.setAccessible(true)
+ if (f.getName.startsWith("param")) {
+ println("%s: %s".format(f.getName, f.get(foo)))
+ }
+ }
+
+ // test that try-finally does not generated a liftedTry
+ // helper. This would already fail the first part of the test,
+ // but this check will help diganose it (if the single access to a
+ // private field does not happen directly in the lazy val, it won't
+ // be nulled).
+ for (f <- foo.getClass.getDeclaredMethods) {
+ f.setAccessible(true)
+ if (f.getName.startsWith("lifted")) {
+ println("not expected: %s".format(f))
+ }
+ }
+}
diff --git a/test/files/run/numbereq.scala b/test/files/run/numbereq.scala
index 77a217d..a1f11da 100644
--- a/test/files/run/numbereq.scala
+++ b/test/files/run/numbereq.scala
@@ -16,7 +16,20 @@ object Test {
base ::: extras
}
-
+
+ def mkNumbers(x: BigInt): List[AnyRef] = {
+ List(
+ List(BigDecimal(x, java.math.MathContext.UNLIMITED)),
+ List(x),
+ if (x.isValidDouble) List(new java.lang.Double(x.toDouble)) else Nil,
+ if (x.isValidFloat) List(new java.lang.Float(x.toFloat)) else Nil,
+ if (x.isValidLong) List(new java.lang.Long(x.toLong)) else Nil,
+ if (x.isValidInt) List(new java.lang.Integer(x.toInt)) else Nil,
+ if (x.isValidShort) List(new java.lang.Short(x.toShort)) else Nil,
+ if (x.isValidByte) List(new java.lang.Byte(x.toByte)) else Nil,
+ if (x.isValidChar) List(new java.lang.Character(x.toChar)) else Nil
+ ).flatten
+ }
def main(args: Array[String]): Unit = {
val ints = (0 to 15).toList map (Short.MinValue >> _)
@@ -37,5 +50,23 @@ object Test {
assert(x == y, "%s/%s != %s/%s".format(x, x.getClass, y, y.getClass))
assert(x.## == y.##, "%s != %s".format(x.getClass, y.getClass))
}
+
+ val bigInts = (0 to 1024).toList map (BigInt(-1) << _)
+ val bigInts2 = bigInts map (x => -x)
+ val bigInts3 = bigInts map (_ + 1)
+ val bigInts4 = bigInts2 map (_ - 1)
+
+ val setneg1b = bigInts map mkNumbers
+ val setneg2b = bigInts3 map mkNumbers
+ val setpos1b = bigInts2 map mkNumbers
+ val setpos2b = bigInts4 map mkNumbers
+
+ val sets2 = setneg1 ++ setneg1b ++ setneg2 ++ setneg2b ++ List(zero) ++ setpos1 ++ setpos1b ++ setpos2 ++ setpos2b
+
+ for (set <- sets2 ; x <- set ; y <- set) {
+// println("'%s' == '%s' (%s == %s) (%s == %s)".format(x, y, x.hashCode, y.hashCode, x.##, y.##))
+ assert(x == y, "%s/%s != %s/%s".format(x, x.getClass, y, y.getClass))
+// assert(x.## == y.##, "%s != %s".format(x.getClass, y.getClass)) Disable until Double.## is fixed (SI-5640)
+ }
}
-}
\ No newline at end of file
+}
diff --git a/test/files/run/optimizer-array-load.check b/test/files/run/optimizer-array-load.check
new file mode 100644
index 0000000..e8371f0
--- /dev/null
+++ b/test/files/run/optimizer-array-load.check
@@ -0,0 +1,6 @@
+0
+1
+2
+3
+4
+5
diff --git a/test/files/pos/bug3252.flags b/test/files/run/optimizer-array-load.flags
similarity index 100%
copy from test/files/pos/bug3252.flags
copy to test/files/run/optimizer-array-load.flags
diff --git a/test/files/run/optimizer-array-load.scala b/test/files/run/optimizer-array-load.scala
new file mode 100644
index 0000000..a4d76f7
--- /dev/null
+++ b/test/files/run/optimizer-array-load.scala
@@ -0,0 +1,16 @@
+object Test {
+ def f() = {
+ val ar = Array.ofDim[Int](5)
+ var x = 0
+
+ while (x<=5) {
+ println(x)
+ val a = ar(x)
+ x+=1
+ }
+ }
+ def main(args: Array[String]): Unit = {
+ try { f() ; assert(false, "should have thrown exception") }
+ catch { case _: ArrayIndexOutOfBoundsException => () }
+ }
+}
diff --git a/test/files/run/option-fold.check b/test/files/run/option-fold.check
new file mode 100644
index 0000000..4e3fe99
--- /dev/null
+++ b/test/files/run/option-fold.check
@@ -0,0 +1,5 @@
+List()
+List(5)
+-1
+0
+1
diff --git a/test/files/run/option-fold.scala b/test/files/run/option-fold.scala
new file mode 100644
index 0000000..d554ba4
--- /dev/null
+++ b/test/files/run/option-fold.scala
@@ -0,0 +1,19 @@
+object Test {
+ sealed class A
+ case object B extends A
+ case class C(x: Int) extends A
+
+ def f[T](x: Option[T]) = x.fold(List.empty[T])(List(_))
+ def g(x: Option[A]) = x.fold(-1) {
+ case B => 0
+ case C(x) => x
+ }
+
+ def main(args: Array[String]): Unit = {
+ println(f(None))
+ println(f(Some(5)))
+ println(g(None))
+ println(g(Some(B)))
+ println(g(Some(C(1))))
+ }
+}
diff --git a/test/files/run/origins.check b/test/files/run/origins.check
index ffbf1c1..b12cb6e 100644
--- a/test/files/run/origins.check
+++ b/test/files/run/origins.check
@@ -1,5 +1,5 @@
->> Origins goxbox.Socks.boop logged 65 calls from 3 distinguished sources.
+>> Origins tag 'boop' logged 65 calls from 3 distinguished sources.
50 Test$$anonfun$f3$1.apply(origins.scala:16)
10 Test$$anonfun$f2$1.apply(origins.scala:15)
diff --git a/test/files/run/origins.scala b/test/files/run/origins.scala
index ab873bc..0ad9229 100644
--- a/test/files/run/origins.scala
+++ b/test/files/run/origins.scala
@@ -1,8 +1,8 @@
-import scala.tools.nsc.util.Origins
+import scala.reflect.internal.util.Origins
package goxbox {
object Socks {
- val origins = Origins[Socks.type]("boop")
+ val origins = Origins("boop")
def boop(x: Int): Int = origins { 5 }
}
diff --git a/test/files/run/outertest.scala b/test/files/run/outertest.scala
new file mode 100644
index 0000000..fa0443f
--- /dev/null
+++ b/test/files/run/outertest.scala
@@ -0,0 +1,57 @@
+// A test for the case where the outer field of class B#J should be eliminated.
+
+import reflect.ClassTag
+
+abstract class A {
+ abstract class I
+
+ val foo = this
+}
+
+class B extends A {
+ class J extends I {
+ val bar = foo
+ }
+
+ type II = I
+ class K extends II {
+ val bar = foo
+ }
+
+ class L extends (I @annotation.tailrec) {
+ val bar = foo
+ }
+}
+
+
+class C extends A {
+ val c: C = this
+
+ class M extends c.I {
+ val bar = foo
+ }
+}
+
+
+object Test extends App {
+ val b = new B
+ val c0 = new C
+ val c = new { override val c = c0 } with C
+
+ assert((new b.J).bar eq b)
+ assert((new b.K).bar eq b)
+ assert((new b.L).bar eq b)
+ assert((new c.M).bar eq c)
+
+ def checkOuterFields[C: ClassTag](expected: Int) {
+ val cls = implicitly[ClassTag[C]].runtimeClass
+ val outerFields = cls.getDeclaredFields().filter(_.getName.contains("$outer"))
+ assert(outerFields.size == expected, outerFields.map(_.getName))
+ }
+
+ checkOuterFields[A#I](1) // the base class must have the $outer pointer
+ checkOuterFields[B#J](0) // reuse parent class' $outer pointer
+ checkOuterFields[B#K](0) // ... through an alias
+ checkOuterFields[B#L](0) // ... through the annotated type
+ checkOuterFields[C#M](1) // different prefix, can't share.
+}
diff --git a/test/files/run/packrat1.scala b/test/files/run/packrat1.scala
index 47e77da..b5a4687 100644
--- a/test/files/run/packrat1.scala
+++ b/test/files/run/packrat1.scala
@@ -2,7 +2,7 @@ import scala.util.parsing.combinator._
import scala.util.parsing.combinator.syntactical.StandardTokenParsers
import scala.util.parsing.input._
-import scala.util.parsing.syntax._
+import scala.util.parsing.combinator.token._
import scala.collection.mutable.HashMap
diff --git a/test/files/run/packrat2.scala b/test/files/run/packrat2.scala
index 1ea8285..f55021a 100644
--- a/test/files/run/packrat2.scala
+++ b/test/files/run/packrat2.scala
@@ -2,7 +2,7 @@ import scala.util.parsing.combinator._
import scala.util.parsing.combinator.syntactical.StandardTokenParsers
import scala.util.parsing.input._
-import scala.util.parsing.syntax._
+import scala.util.parsing.combinator.token._
import scala.collection.mutable.HashMap
diff --git a/test/files/run/packrat3.check b/test/files/run/packrat3.check
index 4d84623..8c10626 100644
--- a/test/files/run/packrat3.check
+++ b/test/files/run/packrat3.check
@@ -4,4 +4,4 @@
(((List(a, a, a, a, b, b, b, b)~())~List(a, a, a, a))~List(b, b, b, b, c, c, c, c))
Expected failure
``b'' expected but `c' found
-``c'' expected but EOF found
+end of input
diff --git a/test/files/run/packrat3.scala b/test/files/run/packrat3.scala
index e58d16a..216ef8f 100644
--- a/test/files/run/packrat3.scala
+++ b/test/files/run/packrat3.scala
@@ -2,7 +2,7 @@ import scala.util.parsing.combinator._
import scala.util.parsing.combinator.syntactical.StandardTokenParsers
import scala.util.parsing.input._
-import scala.util.parsing.syntax._
+import scala.util.parsing.combinator.token._
import scala.collection.mutable.HashMap
diff --git a/test/files/run/parmap-ops.scala b/test/files/run/parmap-ops.scala
new file mode 100644
index 0000000..f93bd7b
--- /dev/null
+++ b/test/files/run/parmap-ops.scala
@@ -0,0 +1,48 @@
+import collection._
+
+object Test {
+
+ def main(args: Array[String]) {
+ val gm: GenMap[Int, Int] = GenMap(0 -> 0, 1 -> 1).par
+
+ // ops
+ assert(gm.isDefinedAt(1))
+ assert(gm.contains(1))
+ assert(gm.getOrElse(1, 2) == 1)
+ assert(gm.getOrElse(2, 3) == 3)
+ assert(gm.keysIterator.toSet == Set(0, 1))
+ assert(gm.valuesIterator.toSet == Set(0, 1))
+ assert(gm.keySet == Set(0, 1))
+ assert(gm.keys.toSet == Set(0, 1))
+ assert(gm.values.toSet == Set(0, 1))
+ try {
+ gm.default(-1)
+ assert(false)
+ } catch {
+ case e: NoSuchElementException => // ok
+ }
+
+ assert(gm.filterKeys(_ % 2 == 0)(0) == 0)
+ assert(gm.filterKeys(_ % 2 == 0).get(1) == None)
+ assert(gm.mapValues(_ + 1)(0) == 1)
+
+ // with defaults
+ val pm = parallel.mutable.ParMap(0 -> 0, 1 -> 1)
+ val dm = pm.withDefault(x => -x)
+ assert(dm(0) == 0)
+ assert(dm(1) == 1)
+ assert(dm(2) == -2)
+ assert(dm.updated(2, 2) == parallel.ParMap(0 -> 0, 1 -> 1, 2 -> 2))
+ dm.put(3, 3)
+ assert(dm(3) == 3)
+ assert(pm(3) == 3)
+ assert(dm(4) == -4)
+
+ val imdm = parallel.immutable.ParMap(0 -> 0, 1 -> 1).withDefault(x => -x)
+ assert(imdm(0) == 0)
+ assert(imdm(1) == 1)
+ assert(imdm(2) == -2)
+ assert(imdm.updated(2, 2) == parallel.ParMap(0 -> 0, 1 -> 1, 2 -> 2))
+ }
+
+}
diff --git a/test/files/run/parserFilter.check b/test/files/run/parserFilter.check
new file mode 100644
index 0000000..be04454
--- /dev/null
+++ b/test/files/run/parserFilter.check
@@ -0,0 +1,9 @@
+[1.3] failure: Input doesn't match filter: false
+
+if false
+ ^
+[1.1] failure: Input doesn't match filter: not
+
+not true
+^
+[1.8] parsed: (if~true)
diff --git a/test/files/run/parserFilter.scala b/test/files/run/parserFilter.scala
new file mode 100644
index 0000000..d007d44
--- /dev/null
+++ b/test/files/run/parserFilter.scala
@@ -0,0 +1,15 @@
+object Test extends scala.util.parsing.combinator.RegexParsers {
+ val keywords = Set("if", "false")
+ def word: Parser[String] = "\\w+".r
+
+ def keyword: Parser[String] = word filter (keywords.contains)
+ def ident: Parser[String] = word filter(!keywords.contains(_))
+
+ def test = keyword ~ ident
+
+ def main(args: Array[String]) {
+ println(parseAll(test, "if false"))
+ println(parseAll(test, "not true"))
+ println(parseAll(test, "if true"))
+ }
+}
diff --git a/test/files/run/parserForFilter.check b/test/files/run/parserForFilter.check
new file mode 100644
index 0000000..a53c147
--- /dev/null
+++ b/test/files/run/parserForFilter.check
@@ -0,0 +1 @@
+[1.13] parsed: (second,first)
diff --git a/test/files/run/parserForFilter.scala b/test/files/run/parserForFilter.scala
new file mode 100644
index 0000000..1bc44f8
--- /dev/null
+++ b/test/files/run/parserForFilter.scala
@@ -0,0 +1,12 @@
+object Test extends scala.util.parsing.combinator.RegexParsers {
+ def word: Parser[String] = "\\w+".r
+
+ def twoWords = for {
+ (a ~ b) <- word ~ word
+ } yield (b, a)
+
+ def main(args: Array[String]) {
+ println(parseAll(twoWords, "first second"))
+ }
+}
+
diff --git a/test/files/run/parserJavaIdent.check b/test/files/run/parserJavaIdent.check
new file mode 100644
index 0000000..597ddbe
--- /dev/null
+++ b/test/files/run/parserJavaIdent.check
@@ -0,0 +1,26 @@
+[1.7] parsed: simple
+[1.8] parsed: with123
+[1.6] parsed: with$
+[1.10] parsed: withøßöèæ
+[1.6] parsed: with_
+[1.6] parsed: _with
+[1.1] failure: java identifier expected
+
+3start
+^
+[1.1] failure: java identifier expected
+
+-start
+^
+[1.5] failure: java identifier expected
+
+with-s
+ ^
+[1.3] failure: java identifier expected
+
+we♥scala
+ ^
+[1.6] failure: java identifier expected
+
+with space
+ ^
diff --git a/test/files/run/parserJavaIdent.scala b/test/files/run/parserJavaIdent.scala
new file mode 100644
index 0000000..c068075
--- /dev/null
+++ b/test/files/run/parserJavaIdent.scala
@@ -0,0 +1,26 @@
+object Test extends scala.util.parsing.combinator.JavaTokenParsers {
+
+ def test[A](s: String) {
+ val res = parseAll(ident, s) match {
+ case Failure(_, in) => Failure("java identifier expected", in)
+ case o => o
+ }
+ println(res)
+ }
+
+ def main(args: Array[String]) {
+ // Happy tests
+ test("simple")
+ test("with123")
+ test("with$")
+ test("withøßöèæ")
+ test("with_")
+ test("_with")
+ // Sad tests
+ test("3start")
+ test("-start")
+ test("with-s")
+ test("we♥scala")
+ test("with space")
+ }
+}
diff --git a/test/files/run/parserNoSuccessMessage.check b/test/files/run/parserNoSuccessMessage.check
new file mode 100644
index 0000000..fe00d2f
--- /dev/null
+++ b/test/files/run/parserNoSuccessMessage.check
@@ -0,0 +1,20 @@
+[1.2] failure: string matching regex `\d+' expected but `x' found
+
+-x
+ ^
+[1.1] failure: string matching regex `\d+' expected but `x' found
+
+x
+^
+[1.3] parsed: (Some(-)~5)
+[1.2] parsed: (None~5)
+[1.2] error: Number expected!
+
+-x
+ ^
+[1.1] error: Number expected!
+
+x
+^
+[1.3] parsed: (Some(-)~5)
+[1.2] parsed: (None~5)
diff --git a/test/files/run/parserNoSuccessMessage.scala b/test/files/run/parserNoSuccessMessage.scala
new file mode 100644
index 0000000..93aa252
--- /dev/null
+++ b/test/files/run/parserNoSuccessMessage.scala
@@ -0,0 +1,19 @@
+object Test extends scala.util.parsing.combinator.RegexParsers {
+ def sign = "-"
+ def number = "\\d+".r
+ def p = sign.? ~ number withErrorMessage "Number expected!"
+ def q = sign.? ~! number withErrorMessage "Number expected!"
+
+ def main(args: Array[String]) {
+ println(parseAll(p, "-x"))
+ println(parseAll(p, "x"))
+ println(parseAll(p, "-5"))
+ println(parseAll(p, "5"))
+ println(parseAll(q, "-x"))
+ println(parseAll(q, "x"))
+ println(parseAll(q, "-5"))
+ println(parseAll(q, "5"))
+ }
+}
+
+
diff --git a/test/files/run/partialfun.check b/test/files/run/partialfun.check
new file mode 100644
index 0000000..d4e9f49
--- /dev/null
+++ b/test/files/run/partialfun.check
@@ -0,0 +1,6 @@
+47
+147
+100
+0:isDefinedAt
+1:isDefinedAt
+2:apply
diff --git a/test/files/run/partialfun.scala b/test/files/run/partialfun.scala
new file mode 100644
index 0000000..f3c53b9
--- /dev/null
+++ b/test/files/run/partialfun.scala
@@ -0,0 +1,86 @@
+import collection._
+import collection.generic._
+
+object Test {
+ def collectIDA[A, B, Repr, That](_this: TraversableLike[A, Repr])(pf: PartialFunction[A, B])(implicit bf: CanBuildFrom[Repr, B, That]): That = {
+ val repr: Repr = _this.asInstanceOf[Repr]
+ val b = bf(repr)
+ _this foreach { x => if (pf isDefinedAt x) b += pf(x) }
+ b.result
+ }
+
+ def collectRW[A, B, Repr, That](_this: TraversableLike[A, Repr])(pf: PartialFunction[A, B])(implicit bf: CanBuildFrom[Repr, B, That]): That = {
+ val repr: Repr = _this.asInstanceOf[Repr]
+ val b = bf(repr)
+ val f = pf runWith { b += _ }
+ _this foreach f
+ b.result
+ }
+
+ var cnt = 0
+
+ object Ex1 {
+ def unapply(x: Int) : Option[Int] = {
+ cnt += 1
+ if ((x % 3) == 0) Some(-x) else None
+ }
+ }
+
+ object Ex2 {
+ def unapply(x: Int) : Option[Int] = {
+ //cnt += 1
+ if ((x % 5) == 0) Some(x) else None
+ }
+ }
+
+ def resetCnt() = { val r = cnt; cnt = 0; r }
+
+ val pf: PartialFunction[Int,Int] = {
+ case Ex1(result) => result
+ case Ex2(result) => result
+ }
+
+ def collectTest() {
+ val xs = 1 to 100
+ resetCnt()
+
+ val ysIDA = collectIDA(xs)(pf)
+ val cntIDA = resetCnt()
+
+ val ysRW = collectRW(xs)(pf)
+ val cntRW = resetCnt()
+
+ val ys = xs collect pf
+
+ assert(ys == ysIDA)
+ assert(ys == ysRW)
+ assert(cntIDA == xs.length + ys.length)
+ assert(cntRW == xs.length)
+ println(ys.length)
+ println(cntIDA)
+ println(cntRW)
+ }
+
+ def orElseTest() {
+ val pf0 = new PartialFunction[Unit, Unit] {
+ def apply(u: Unit) { println("0:apply") }
+ def isDefinedAt(u: Unit) = { println("0:isDefinedAt"); false }
+ }
+ val pf1 = new PartialFunction[Unit, Unit] {
+ def apply(u: Unit) { println("1:apply") }
+ def isDefinedAt(u: Unit) = { println("1:isDefinedAt"); false }
+ }
+ val pf2 = new PartialFunction[Unit, Unit] {
+ def apply(u: Unit) { println("2:apply") }
+ def isDefinedAt(u: Unit) = { println("2:isDefinedAt"); true }
+ }
+
+ val chained = pf0 orElse pf1 orElse pf2
+ chained()
+ }
+
+ def main(args: Array[String]): Unit = {
+ collectTest()
+ orElseTest()
+ }
+}
diff --git a/test/files/run/patmat-finally.scala b/test/files/run/patmat-finally.scala
new file mode 100644
index 0000000..6f769b3
--- /dev/null
+++ b/test/files/run/patmat-finally.scala
@@ -0,0 +1,25 @@
+/** Test pattern matching and finally, see SI-5929. */
+object Test extends App {
+ def bar(s1: Object, s2: Object) {
+ s1 match {
+ case _ =>
+ }
+
+ try {
+ ()
+ } finally {
+ s2 match {
+ case _ =>
+ }
+ }
+ }
+
+ def x = {
+ null match { case _ => }
+
+ try { 1 } finally { while(false) { } }
+ }
+
+ bar(null, null)
+ x
+}
diff --git a/test/files/run/patmat_unapp_abstype-new.check b/test/files/run/patmat_unapp_abstype-new.check
new file mode 100644
index 0000000..42c5463
--- /dev/null
+++ b/test/files/run/patmat_unapp_abstype-new.check
@@ -0,0 +1,4 @@
+TypeRef
+MethodType
+Bar
+Foo
diff --git a/test/files/run/patmat_unapp_abstype-new.scala b/test/files/run/patmat_unapp_abstype-new.scala
new file mode 100644
index 0000000..1141177
--- /dev/null
+++ b/test/files/run/patmat_unapp_abstype-new.scala
@@ -0,0 +1,76 @@
+import reflect.{ClassTag, classTag}
+
+// abstract types and extractors, oh my!
+trait TypesAPI {
+ trait Type
+
+ type TypeRef <: Type
+ val TypeRef: TypeRefExtractor; trait TypeRefExtractor {
+ def apply(x: Int): TypeRef
+ def unapply(x: TypeRef): Option[(Int)]
+ }
+
+ // just for illustration, should follow the same pattern as TypeRef
+ case class MethodType(n: Int) extends Type
+}
+
+// user should not be exposed to the implementation
+trait TypesUser extends TypesAPI {
+ def shouldNotCrash(tp: Type): Unit = {
+ tp match {
+ case TypeRef(x) => println("TypeRef")
+ case MethodType(x) => println("MethodType")
+ case _ => println("none of the above")
+ }
+ }
+}
+
+trait TypesImpl extends TypesAPI {
+ object TypeRef extends TypeRefExtractor // this will have a bridged unapply(x: Type) = unapply(x.asInstanceOf[TypeRef])
+ case class TypeRef(n: Int) extends Type // this has a bridge from TypesAPI#Type to TypesImpl#TypeRef
+ // --> the cast in the bridge will fail because the pattern matcher can't type test against the abstract types in TypesUser
+}
+
+trait Foos {
+ trait Bar
+ type Foo <: Bar
+ trait FooExtractor {
+ def unapply(foo: Foo): Option[Int]
+ }
+ val Foo: FooExtractor
+}
+
+trait RealFoos extends Foos {
+ class Foo(val x: Int) extends Bar
+ object Foo extends FooExtractor {
+ def unapply(foo: Foo): Option[Int] = Some(foo.x)
+ }
+}
+
+trait Intermed extends Foos {
+ def crash(bar: Bar): Unit =
+ bar match {
+ case Foo(x) => println("Foo")
+ case _ => println("Bar")
+ }
+}
+
+object TestUnappStaticallyKnownSynthetic extends TypesImpl with TypesUser {
+ def test() = {
+ shouldNotCrash(TypeRef(10)) // prints "TypeRef"
+ shouldNotCrash(MethodType(10)) // prints "MethodType"
+ }
+}
+
+object TestUnappDynamicSynth extends RealFoos with Intermed {
+ case class NotAFoo(n: Int) extends Bar
+ def test() = {
+ crash(NotAFoo(10))
+ crash(new Foo(5))
+ }
+}
+
+object Test extends App {
+ TestUnappStaticallyKnownSynthetic.test()
+ TestUnappDynamicSynth.test()
+}
diff --git a/test/files/run/patmat_unapp_abstype-old.check b/test/files/run/patmat_unapp_abstype-old.check
new file mode 100644
index 0000000..72239d1
--- /dev/null
+++ b/test/files/run/patmat_unapp_abstype-old.check
@@ -0,0 +1,4 @@
+TypeRef
+none of the above
+Bar
+Foo
diff --git a/test/files/run/patmat_unapp_abstype-old.flags b/test/files/run/patmat_unapp_abstype-old.flags
new file mode 100644
index 0000000..ba80cad
--- /dev/null
+++ b/test/files/run/patmat_unapp_abstype-old.flags
@@ -0,0 +1 @@
+-Xoldpatmat
diff --git a/test/files/run/patmat_unapp_abstype-old.scala b/test/files/run/patmat_unapp_abstype-old.scala
new file mode 100644
index 0000000..45496f0
--- /dev/null
+++ b/test/files/run/patmat_unapp_abstype-old.scala
@@ -0,0 +1,83 @@
+// abstract types and extractors, oh my!
+trait TypesAPI {
+ trait Type
+
+ // an alternative fix (implemented in the virtual pattern matcher, is to replace the isInstanceOf by a manifest-based run-time test)
+ // that's what typeRefMani is for
+ type TypeRef <: Type //; implicit def typeRefMani: Manifest[TypeRef]
+ val TypeRef: TypeRefExtractor; trait TypeRefExtractor {
+ def apply(x: Int): TypeRef
+ def unapply(x: TypeRef): Option[(Int)]
+ }
+
+ // just for illustration, should follow the same pattern as TypeRef
+ case class MethodType(n: Int) extends Type
+}
+
+// user should not be exposed to the implementation
+trait TypesUser extends TypesAPI {
+ def shouldNotCrash(tp: Type): Unit = {
+ tp match {
+ case TypeRef(x) => println("TypeRef")
+ // the above checks tp.isInstanceOf[TypeRef], which is erased to tp.isInstanceOf[Type]
+ // before calling TypeRef.unapply(tp), which will then crash unless tp.isInstanceOf[TypesImpl#TypeRef] (which is not implied by tp.isInstanceOf[Type])
+ // tp.isInstanceOf[TypesImpl#TypeRef] is equivalent to classOf[TypesImpl#TypeRef].isAssignableFrom(tp.getClass)
+ // this is equivalent to manifest
+ // it is NOT equivalent to manifest[Type] <:< typeRefMani
+ case MethodType(x) => println("MethodType")
+ case _ => println("none of the above")
+ }
+ }
+}
+
+trait TypesImpl extends TypesAPI {
+ object TypeRef extends TypeRefExtractor // this will have a bridged unapply(x: Type) = unapply(x.asInstanceOf[TypeRef])
+ case class TypeRef(n: Int) extends Type // this has a bridge from TypesAPI#Type to TypesImpl#TypeRef
+ // --> the cast in the bridge will fail because the pattern matcher can't type test against the abstract types in TypesUser
+ //lazy val typeRefMani = manifest[TypeRef]
+}
+
+trait Foos {
+ trait Bar
+ type Foo <: Bar
+ trait FooExtractor {
+ def unapply(foo: Foo): Option[Int]
+ }
+ val Foo: FooExtractor
+}
+
+trait RealFoos extends Foos {
+ class Foo(val x: Int) extends Bar
+ object Foo extends FooExtractor {
+ def unapply(foo: Foo): Option[Int] = Some(foo.x)
+ }
+}
+
+trait Intermed extends Foos {
+ def crash(bar: Bar): Unit =
+ bar match {
+ case Foo(x) => println("Foo")
+ case _ => println("Bar")
+ }
+}
+
+object TestUnappStaticallyKnownSynthetic extends TypesImpl with TypesUser {
+ def test() = {
+ shouldNotCrash(TypeRef(10)) // should and does print "TypeRef"
+ // once #1697/#2337 are fixed, this should generate the correct output
+ shouldNotCrash(MethodType(10)) // should print "MethodType" but prints "none of the above" -- good one, pattern matcher!
+ }
+}
+
+object TestUnappDynamicSynth extends RealFoos with Intermed {
+ case class FooToo(n: Int) extends Bar
+ def test() = {
+ crash(FooToo(10))
+ crash(new Foo(5))
+ }
+}
+
+object Test extends App {
+ TestUnappStaticallyKnownSynthetic.test()
+ TestUnappDynamicSynth.test()
+}
diff --git a/test/files/run/patmatnew.scala b/test/files/run/patmatnew.scala
index e2c95e3..a6f8199 100644
--- a/test/files/run/patmatnew.scala
+++ b/test/files/run/patmatnew.scala
@@ -1,102 +1,81 @@
-trait Treez { self: Shmeez =>
- abstract class Tree
- case class Beez(i:Int) extends Tree
- case object HagbardCeline extends Tree
-}
-
-trait Shmeez extends AnyRef with Treez {
- val tree: Tree
-
- def foo = tree match {
- case Beez(2) => 1
- case HagbardCeline => 0
- }
-}
-
-import scala.testing.SUnit._
-
-object Test extends TestConsoleMain {
-
- //just compilation
- def zipFun[a,b](xs:List[a], ys:List[b]):List[Pair[a,b]] = (Pair(xs,ys): @unchecked) match {
- // !!! case Pair(List(), _), Pair(_, List()) => List()
- case (x :: xs1, y :: ys1) => (x, y) :: zipFun(xs1, ys1)
- }
-
- def suite = new TestSuite(
- new TestSimpleIntSwitch,
- new SimpleUnapply,
- SeqUnapply,
- applyFromJcl,
- new Test717,
- TestGuards,
- TestEqualsPatternOpt,
- TestSequence01,
- TestSequence02,
- TestSequence03,
- TestSequence04,
- TestSequence05,
- TestSequence06,
- TestSequence07,
- TestSequence08,
- TestStream,
- new Test903,
- new Test1163_Order,
- new TestUnbox,
- Bug457,
- Bug508,
- Bug789,
- Bug995,
- Bug1093,
- Bug1094,
- ClassDefInGuard,
- Ticket2,
- Ticket11,
- Ticket37,
- Ticket44,
- Ticket346
- )
-
- class Foo(j:Int) {
- case class Bar(i:Int)
- }
- class SimpleUnapply extends TestCase("simpleUnapply") {
- override def runTest() { // from sortedmap, old version
+object Test {
+
+ def main(args: Array[String]) {
+ ApplyFromJcl.run()
+ Bug1093.run()
+ Bug1094.run()
+ Bug1270.run()
+ Bug1281.run()
+ Bug457.run()
+ Bug508.run()
+ Bug789.run()
+ Bug881.run()
+ Bug995.run()
+ ClassDefInGuard.run()
+ SeqUnapply.run()
+ SimpleUnapply.run()
+ Test1163_Order.run()
+ Test717.run()
+ Test903.run()
+ TestEqualsPatternOpt.run()
+ TestGuards.run()
+ TestSequence01.run()
+ TestSequence02.run()
+ TestSequence03.run()
+ TestSequence04.run()
+ TestSequence05.run()
+ TestSequence06.run()
+ TestSequence07.run()
+ TestSequence08.run()
+ TestSimpleIntSwitch.run()
+ TestStream.run()
+ TestUnbox.run()
+ Ticket11.run()
+ Ticket2.run()
+ Ticket346.run()
+ Ticket37.run()
+ Ticket44.run()
+ }
+
+ def assertEquals(a: Any, b: Any) { assert(a == b) }
+ def assertEquals(msg: String, a: Any, b: Any) { assert(a == b, msg) }
+
+ object SimpleUnapply {
+ def run() { // from sortedmap, old version
List((1, 2)).head match {
- case kv @ Pair(key, _) => kv.toString + " " + key.toString
+ case kv at Pair(key, _) => kv.toString + " " + key.toString
}
-
}
}
- object SeqUnapply extends TestCase("seqUnapply") {
+ object SeqUnapply {
case class SFB(i: Int, xs: List[Int])
- override def runTest() {
- List(1,2) match {
+ def run() {
+ List(1, 2) match {
case List(1) => assert(false, "wrong case")
- case List(1,2,xs @ _*) => assert(xs.isEmpty, "not empty")
+ case List(1, 2, xs at _*) => assert(xs.isEmpty, "not empty")
case Nil => assert(false, "wrong case")
}
- SFB(1,List(1)) match {
+ SFB(1, List(1)) match {
case SFB(_, List(x)) => assert(x == 1)
case SFB(_, _) => assert(false)
}
}
}
- object applyFromJcl extends TestCase("applyFromJcl") {
- override def runTest {
- val p = (1,2)
- Some(2) match {
- case Some(p._2) =>
- case _ => assert(false)
- }
+ object ApplyFromJcl {
+ def run() {
+ val p = (1, 2)
+ Some(2) match {
+ case Some(p._2) =>
+ case _ => assert(false)
+ }
}
}
- class TestSimpleIntSwitch extends TestCase("SimpleIntSwitch") {
- override def runTest() = {
+ object TestSimpleIntSwitch {
+ def run() {
assertEquals("s1", 1, 1 match {
case 3 => 3
case 2 => 2
@@ -104,15 +83,15 @@ object Test extends TestConsoleMain {
case 0 => 0
})
assertEquals("s2", 1, 1 match {
- case 1 => 1
+ case 1 => 1
case _ => 0
})
- assertEquals("s2boxed", 1, (1:Any) match {
- case 1 => 1
+ assertEquals("s2boxed", 1, (1: Any) match {
+ case 1 => 1
case _ => 0
})
assertEquals("s3", 1, ("hello") match {
- case s:String => 1
+ case s: String => 1
//case _ => 0 // unreachable!
})
val xyz: (Int, String, Boolean) = (1, "abc", true);
@@ -122,69 +101,93 @@ object Test extends TestConsoleMain {
})
}
}
- class Test717 extends TestCase("#717 test path of case classes") {
+
+ // #717 test path of case classes
+ object Test717 {
+ class Foo(j: Int) {
+ case class Bar(i: Int)
+ }
val foo1 = new Foo(1)
val foo2 = new Foo(2)
-
- override def runTest() = {
- val res = (foo1.Bar(2):Any) match {
- case foo2.Bar(2) => false
- case foo1.Bar(2) => true
+ def run() {
+ val res = (foo1.Bar(2): Any) match {
+ case foo2.Bar(2) => false
+ case foo1.Bar(2) => true
}
- assertTrue("ok", res);
+ assert(res)
}
}
- object TestGuards extends TestCase("multiple guards for same pattern") with Shmeez {
- val tree:Tree = Beez(2)
- override def runTest = {
+ ///
+
+ trait Treez { self: Shmeez =>
+ abstract class Tree
+ case class Beez(i: Int) extends Tree
+ case object HagbardCeline extends Tree
+ }
+
+ trait Shmeez extends AnyRef with Treez {
+ val tree: Tree
+
+ def foo = tree match {
+ case Beez(2) => 1
+ case HagbardCeline => 0
+ }
+ }
+
+ // multiple guards for same pattern
+ object TestGuards extends Shmeez {
+ val tree: Tree = Beez(2)
+ def run() {
val res = tree match {
case Beez(x) if x == 3 => false
case Beez(x) if x == 2 => true
}
- assertTrue("ok", res);
- val ret = (Beez(3):Tree) match {
+ assert(res)
+ val ret = (Beez(3): Tree) match {
case Beez(x) if x == 3 => true
case Beez(x) if x == 2 => false
}
- assertTrue("ok", ret);
+ assert(ret)
}
}
- object TestEqualsPatternOpt extends TestCase("test EqualsPatternClass in combination with MixTypes opt, bug #1276") {
+ // test EqualsPatternClass in combination with MixTypes opt, bug #1276
+ object TestEqualsPatternOpt {
val NoContext = new Object
- override def runTest {
- assertEquals(1,((NoContext:Any) match {
- case that : AnyRef if this eq that => 0
+ def run() {
+ assertEquals(1, ((NoContext: Any) match {
+ case that: AnyRef if this eq that => 0
case NoContext => 1
case _ => 2
}))
}
}
- object TestSequence01 extends TestCase("uno (all ignoring patterns on List)") {
+ // all ignoring patterns on List
+ object TestSequence01 {
def doMatch(xs: List[String]): String = xs match {
case List(_*) => "ok"
}
def doMatch2(xs: List[String]): List[String] = xs match {
- case List(_, rest @ _*) => rest.toList
+ case List(_, rest at _*) => rest.toList
}
- override def runTest() {
+ def run() {
val list1 = List()
assertEquals(doMatch(list1), "ok")
- val list2 = List("1","2","3")
+ val list2 = List("1", "2", "3")
assertEquals(doMatch(list2), "ok")
- val list3 = List("1","2","3")
- assertEquals(doMatch2(list3), List("2","3"))
+ val list3 = List("1", "2", "3")
+ assertEquals(doMatch2(list3), List("2", "3"))
}
}
-
- object TestSequence02 extends TestCase("due (all ignoring patterns on Seq)") {
+ // all ignoring patterns on Seq
+ object TestSequence02 {
def doMatch(l: Seq[String]): String = l match {
case Seq(_*) => "ok"
}
- override def runTest() {
+ def run() {
val list1 = List()
assertEquals(doMatch(list1), "ok")
val list2 = List("1", "2", "3")
@@ -196,112 +199,104 @@ object Test extends TestConsoleMain {
}
}
- object TestSequence03 extends TestCase("tre (right-ignoring patterns on List, defaults)") {
+ // right-ignoring patterns on List, defaults
+ object TestSequence03 {
def doMatch(xs: List[String]): String = xs match {
- case List(_,_,_,_*) => "ok"
+ case List(_, _, _, _*) => "ok"
case _ => "not ok"
}
- override def runTest() {
+ def run() {
val list1 = List()
assertEquals(doMatch(list1), "not ok")
- val list2 = List("1","2","3")
+ val list2 = List("1", "2", "3")
assertEquals(doMatch(list2), "ok")
- val list3 = List("1","2","3","4")
+ val list3 = List("1", "2", "3", "4")
assertEquals(doMatch(list3), "ok")
}
}
-
- object TestSequence04 extends TestCase("quattro (all- and right-ignoring pattern on case class w/ seq param)") {
+ // all- and right-ignoring pattern on case class w/ seq param
+ object TestSequence04 {
case class Foo(i: Int, chars: Char*)
- override def runTest() = {
+ def run() {
val a = Foo(0, 'a') match {
- case Foo(i, c, chars @ _*) => c
+ case Foo(i, c, chars at _*) => c
case _ => null
}
assertEquals(a, 'a')
val b = Foo(0, 'a') match {
- case Foo(i, chars @ _*) => 'b'
+ case Foo(i, chars at _*) => 'b'
case _ => null
}
assertEquals(b, 'b')
}
}
- object TestSequence05 extends TestCase("cinque (sealed case class with ignoring seq patterns)") {
+ // sealed case class with ignoring seq patterns
+ object TestSequence05 {
sealed abstract class Con;
case class Foo() extends Con
- case class Bar(xs:Con*) extends Con
-
- override def runTest() {
- val res = (Bar(Foo()):Con) match {
+ case class Bar(xs: Con*) extends Con
+
+ def run() {
+ val res = (Bar(Foo()): Con) match {
case Bar(xs at _*) => xs // this should be optimized away to a pattern Bar(xs)
case _ => Nil
}
- assertEquals("res instance"+res.isInstanceOf[Seq[Con] forSome { type Con }]+" res(0)="+res(0), true, res.isInstanceOf[Seq[Foo] forSome { type Foo}] && res(0) == Foo() )
+ assertEquals("res instance" + res.isInstanceOf[Seq[Con] forSome { type Con }] + " res(0)=" + res(0), true, res.isInstanceOf[Seq[Foo] forSome { type Foo }] && res(0) == Foo())
}
}
- object TestSequence06 extends TestCase("sei (not regular) fancy guards / bug#644 ") {
-
+ // (not regular) fancy guards / bug#644
+ object TestSequence06 {
+
case class A(i: Any)
-
+
def doMatch(x: Any, bla: Int) = x match {
- case x:A if (bla==1) => 0
+ case x: A if (bla == 1) => 0
case A(1) => 1
case A(A(1)) => 2
}
- override def runTest() {
- assertEquals(doMatch(A(null),1), 0)
- assertEquals(doMatch(A(1),2), 1)
- assertEquals(doMatch(A(A(1)),2), 2)
+ def run() {
+ assertEquals(doMatch(A(null), 1), 0)
+ assertEquals(doMatch(A(1), 2), 1)
+ assertEquals(doMatch(A(A(1)), 2), 2)
}
}
- object TestSequence07 extends TestCase("sette List of chars") {
+ // List of chars
+ object TestSequence07 {
def doMatch1(xs: List[Char]) = xs match {
- case List(x, y, _*) => x::y::Nil
+ case List(x, y, _*) => x :: y :: Nil
}
def doMatch2(xs: List[Char]) = xs match {
- case List(x, y, z, w) => List(z,w)
- }
- //def doMatch3(xs:List[char]) = xs match {
- // case List(_*, z, w) => w::Nil
- //}
- //
- // Since the second case should have been unreachable all along,
- // let's just comment this one out.
- //
- // def doMatch4(xs:Seq[Char]) = xs match {
- // case Seq(x, y, _*) => x::y::Nil
- // case Seq(x, y, z, w) => List(z,w) // redundant!
- // }
- def doMatch5(xs:Seq[Char]) = xs match {
- case Seq(x, y, 'c', w @ _*) => x::y::Nil
- case Seq(x, y, z @ _*) => z
- }
- def doMatch6(xs:Seq[Char]) = xs match {
- case Seq(x, 'b') => x::'b'::Nil
- case Seq(x, y, z @ _*) => z.toList
- }
-
- override def runTest() {
- assertEquals(List('a','b'), doMatch1(List('a','b','c','d')))
- assertEquals(List('c','d'), doMatch2(List('a','b','c','d')))
- // assertEquals(doMatch3(List('a','b','c','d')), List('d'))
- // assertEquals(List('a','b'), doMatch4(List('a','b','c','d')))
- assertEquals(List('a','b'), doMatch5(List('a','b','c','d')))
- assertEquals(List('c','d'), doMatch6(List('a','b','c','d')))
- }
- }
-
- object TestSequence08 extends TestCase("backquoted identifiers in pattern") {
- override def runTest() {
+ case List(x, y, z, w) => List(z, w)
+ }
+ def doMatch3(xs: Seq[Char]) = xs match {
+ case Seq(x, y, 'c', w at _*) => x :: y :: Nil
+ case Seq(x, y, z at _*) => z
+ }
+ def doMatch4(xs: Seq[Char]) = xs match {
+ case Seq(x, 'b') => x :: 'b' :: Nil
+ case Seq(x, y, z at _*) => z.toList
+ }
+
+ def run() {
+ assertEquals(List('a', 'b'), doMatch1(List('a', 'b', 'c', 'd')))
+ assertEquals(List('c', 'd'), doMatch2(List('a', 'b', 'c', 'd')))
+ assertEquals(List('a', 'b'), doMatch3(List('a', 'b', 'c', 'd')))
+ assertEquals(List('c', 'd'), doMatch4(List('a', 'b', 'c', 'd')))
+ }
+ }
+
+ // backquoted identifiers in pattern
+ object TestSequence08 {
+ def run() {
val xs = List(2, 3)
val ys = List(1, 2, 3) match {
case x :: `xs` => xs
@@ -311,24 +306,25 @@ object Test extends TestConsoleMain {
}
}
-
- object TestStream extends TestCase("unapply for Streams") {
+ // unapply for Streams
+ object TestStream {
def sum(stream: Stream[Int]): Int =
stream match {
case Stream.Empty => 0
case Stream.cons(hd, tl) => hd + sum(tl)
}
-
- val str: Stream[Int] = List(1,2,3).iterator.toStream
- def runTest() = assertEquals(sum(str), 6)
+ val str: Stream[Int] = List(1, 2, 3).iterator.toStream
+
+ def run() { assertEquals(sum(str), 6) }
}
-
- class Test1163_Order extends TestCase("bug#1163 order of temps must be preserved") {
+
+ // bug#1163 order of temps must be preserved
+ object Test1163_Order {
abstract class Function
case class Var(n: String) extends Function
case class Const(v: Double) extends Function
-
+
def f(): (Function, Function) = {
(Var("x"): Function, Var("y"): Function) match {
case (Const(v), Const(w)) => throw new Error
@@ -336,17 +332,17 @@ object Test extends TestConsoleMain {
case (leftTwo, rightTwo) => (leftTwo, rightTwo) // was giving "y","x"
}
}
-
- def flips(l: List[Int]): Int = (l: @unchecked) match {
+
+ def flips(l: List[Int]): Int = (l: @unchecked) match {
case 1 :: ls => 0
case n :: ls => flips((l take n reverse) ::: (l drop n)) + 1
}
- def runTest() = assertEquals("both", (Var("x"),Var("y")), f)
+ def run() { assertEquals("both", (Var("x"), Var("y")), f) }
}
- class TestUnbox extends TestCase("unbox") {
- override def runTest() {
+ object TestUnbox {
+ def run() {
val xyz: (Int, String, Boolean) = (1, "abc", true)
xyz._1 match {
case 1 => "OK"
@@ -356,82 +352,41 @@ object Test extends TestConsoleMain {
}
}
- class Test806_818 { // #806, #811 compile only -- type of bind
- // bug811
- trait Core {
- trait NodeImpl
- trait OtherImpl extends NodeImpl
- trait DoubleQuoteImpl extends NodeImpl
- def asDQ(node : OtherImpl) = node match {
- case dq : DoubleQuoteImpl => dq
- }
+ object Test903 {
+ class Person(_name: String, _father: Person) {
+ def name = _name
+ def father = _father
}
-
- trait IfElseMatcher {
- type Node <: NodeImpl
- trait NodeImpl
- trait IfImpl
- private def coerceIf(node: Node) = node match {
- case node : IfImpl => node // var node is of type Node with IfImpl!
- case _ => null
- }
- }
- }
-
- class Person(_name : String, _father : Person) {
- def name = _name
- def father = _father
- }
-
- object PersonFather {
- def unapply(p : Person) : Option[Person] =
- if (p.father == null)
- None
- else
- Some(p.father)
- }
-
- class Test903 extends TestCase("bug903") {
-
- override def runTest = {
- val p1 = new Person("p1",null)
- val p2 = new Person("p2",p1)
- assertEquals((p2.name, p1.name), p2 match {
- case aPerson at PersonFather(f) => (aPerson.name,f.name)
- case _ => "No father"
- })
- }
+ object PersonFather {
+ def unapply(p: Person): Option[Person] =
+ if (p.father == null)
+ None
+ else
+ Some(p.father)
+ }
+ def run() {
+ val p1 = new Person("p1", null)
+ val p2 = new Person("p2", p1)
+ assertEquals((p2.name, p1.name), p2 match {
+ case aPerson at PersonFather(f) => (aPerson.name, f.name)
+ case _ => "No father"
+ })
+ }
}
+ object Bug881 {
+ object Foo1 {
+ class Bar1(val x: String)
+ def p(b: Bar1) = b.x
- object Test1253 { // compile-only
- def foo(t : (Int, String)) = t match {
- case (1, "") => throw new Exception
- case (r, _) => throw new Exception(r.toString)
+ def unapply(s: String): Option[Bar1] =
+ Some(new Bar1(s))
}
- }
-
- object Foo1258 {
- case object baz
- def foo(bar : AnyRef) = {
- val Baz = baz
- bar match {
- case Baz => ()
- }
+ class Foo(j: Int) {
+ case class Bar(i: Int)
}
- }
-
- object Foo1 {
- class Bar1(val x : String)
- def p(b : Bar1) = Console.println(b.x)
-
- def unapply(s : String) : Option[Bar1] =
- Some(new Bar1(s))
- }
-
- object bug881 extends TestCase("881") {
- override def runTest = {
+ def run() {
"baz" match {
case Foo1(x) =>
Foo1.p(x)
@@ -439,32 +394,31 @@ object Test extends TestConsoleMain {
}
}
-
// these are exhaustive matches
// should not generate any warnings
- def f[A](z:(Option[A],Option[A])) = z match {
- case Pair(None,Some(x)) => 1
- case Pair(Some(x),None ) => 2
- case Pair(Some(x),Some(y)) => 3
+ def f[A](z: (Option[A], Option[A])) = z match {
+ case Pair(None, Some(x)) => 1
+ case Pair(Some(x), None) => 2
+ case Pair(Some(x), Some(y)) => 3
case _ => 4
}
-
- def g1[A](z:Option[List[A]]) = z match {
- case Some(Nil) => true
- case Some(x::Nil) => true
+
+ def g1[A](z: Option[List[A]]) = z match {
+ case Some(Nil) => true
+ case Some(x :: Nil) => true
case _ => true
}
-
- def g2[A](z:Option[List[A]]) = z match {
- case Some(x::Nil) => true
+
+ def g2[A](z: Option[List[A]]) = z match {
+ case Some(x :: Nil) => true
case Some(_) => false
case _ => true
}
-
- def h[A](x: (Option[A],Option[A])) = x match {
- case Pair(None,_:Some[_]) => 1
- case Pair(_:Some[_],None ) => 2
- case Pair(_:Some[_],_:Some[_]) => 3
+
+ def h[A](x: (Option[A], Option[A])) = x match {
+ case Pair(None, _: Some[_]) => 1
+ case Pair(_: Some[_], None) => 2
+ case Pair(_: Some[_], _: Some[_]) => 3
case _ => 4
}
@@ -474,7 +428,7 @@ object Test extends TestConsoleMain {
case (h1 :: t1, h2 :: t2) => 'c'
}
- def k (x:AnyRef) = x match {
+ def k(x: AnyRef) = x match {
case null => 1
case _ => 2
}
@@ -484,125 +438,59 @@ object Test extends TestConsoleMain {
case FooBar => true
}
- object Bug1270 { // unapply13
-
+ object Bug1270 { // unapply13
class Sync {
def apply(x: Int): Int = 42
def unapply(scrut: Any): Option[Int] = None
}
-
class Buffer {
object Get extends Sync
-
+
var ps: PartialFunction[Any, Any] = {
case Get(y) if y > 4 => // y gets a wildcard type for some reason?! hack
}
}
-
- println((new Buffer).ps.isDefinedAt(42))
- }
-
- object Bug1261 {
- sealed trait Elem
- case class Foo() extends Elem
- case class Bar() extends Elem
- trait Row extends Elem
- object Row {
- def unapply(r: Row) = true
-
- def f(elem: Elem) {
- elem match {
- case Bar() => ;
- case Row() => ;
- case Foo() => ; // used to give ERROR (unreachable code)
- }}}
- }
-/*
- object Feature1196 {
- def f(l: List[Int]) { }
-
- val l: Seq[Int] = List(1, 2, 3)
-
- l match {
- case x @ List(1, _) => f(x) // x needs to get better type List[int] here
- }
- }
-*/
- object TestIfOpt { //compile-only "test EqualsPatternClass in combination with MixTypes opt, bug #1278"
- trait Token {
- val offset : Int
- def matching : Option[Token]
- }
- def go(tok : Token) = (tok.matching: @unchecked) match {
- case Some(other) if true => Some(other)
- case _ if true => tok.matching match {
- case Some(other) => Some(other)
- case _ => None
- }
- }
- }
-
- object Go { // bug #1277 compile-only
- trait Core { def next : Position = null }
- trait Dir
- val NEXT = new Dir{}
-
- trait Position extends Core
-
- (null:Core, null:Dir) match {
- case (_, NEXT) if true => false // no matter whether NEXT test succeed, cannot throw column because of guard
- case (at2:Position,dir) => true
+ def run() {
+ assert(!(new Buffer).ps.isDefinedAt(42))
}
}
- trait Outer { // bug #1282 compile-only
- object No
- trait File {
- (null:AnyRef) match {
- case No => false
- }
- }
- }
-
- object cast2 { // #1281
-
+ object Bug1281 {
class Sync {
def unapplySeq(scrut: Int): Option[Seq[Int]] = {
- println("unapplySeq: "+scrut)
if (scrut == 42) Some(List(1, 2))
else None
}
}
-
class Buffer {
val Get = new Sync
-
val jp: PartialFunction[Any, Any] = {
- case Get(xs) => println(xs) // the argDummy <unapply-selector> should have proper arg.tpe (Int in this case)
+ case Get(xs) => // the argDummy <unapply-selector> should have proper arg.tpe (Int in this case)
}
}
-
- println((new Buffer).jp.isDefinedAt(40))
- println((new Buffer).jp.isDefinedAt(42))
+ def run() {
+ assert(!(new Buffer).jp.isDefinedAt(40))
+ assert(!(new Buffer).jp.isDefinedAt(42))
+ }
}
- object ClassDefInGuard extends TestCase("classdef in guard") { // compile-and-load only
- val z:PartialFunction[Any,Any] = {
- case x::xs if xs.forall { y => y.hashCode() > 0 } => 1
+ object ClassDefInGuard {
+ val z: PartialFunction[Any, Any] = {
+ case x :: xs if xs.forall { y => y.hashCode() > 0 } => 1
}
- override def runTest {
- val s:PartialFunction[Any,Any] = {
- case List(4::xs) => 1
- case List(5::xs) => 1
- case _ if false =>
- case List(3::xs) if List(3:Any).forall { g => g.hashCode() > 0 } => 1
- }
+ def run() {
+ val s: PartialFunction[Any, Any] = {
+ case List(4 :: xs) => 1
+ case List(5 :: xs) => 1
+ case _ if false =>
+ case List(3 :: xs) if List(3: Any).forall { g => g.hashCode() > 0 } => 1
+ }
z.isDefinedAt(42)
s.isDefinedAt(42)
// just load the thing, to see if the classes are found
- (None:Option[Boolean] @unchecked) match {
+ (None: Option[Boolean] @unchecked) match {
case x if x.map(x => x).isEmpty =>
}
}
@@ -610,33 +498,33 @@ object Test extends TestConsoleMain {
// bug#457
- object Bug457 extends TestCase("Bug457") {
+ object Bug457 {
def method1() = {
val x = "Hello, world"; val y = 100;
y match {
case _: Int if (x match { case t => t.trim().length() > 0 }) => false;
case _ => true;
- }}
+ }
+ }
def method2(): scala.Boolean = {
val x: String = "Hello, world"; val y: scala.Int = 100; {
var temp1: scala.Int = y
var result: scala.Boolean = false
- if (
- {
- var result1: scala.Boolean = true;
- if (y == 100)
- result1
- else
- throw new MatchError("crazybox.scala, line 11")
- } && (y > 90)
- )
+ if ({
+ var result1: scala.Boolean = true;
+ if (y == 100)
+ result1
+ else
+ throw new MatchError("crazybox.scala, line 11")
+ } && (y > 90))
result
- else
- throw new MatchError("crazybox.scala, line 9")
- }}
+ else
+ throw new MatchError("crazybox.scala, line 9")
+ }
+ }
- override def runTest {
+ def run() {
method1();
method2();
}
@@ -644,52 +532,52 @@ object Test extends TestConsoleMain {
// bug#508
- object Bug508 extends TestCase("aladdin #508") {
+ object Bug508 {
case class Operator(x: Int);
val EQ = new Operator(2);
-
+
def analyze(x: Pair[Operator, Int]) = x match {
case Pair(EQ, 0) => "0"
case Pair(EQ, 1) => "1"
case Pair(EQ, 2) => "2"
}
- override def runTest {
+ def run() {
val x = Pair(EQ, 0);
assertEquals("0", analyze(x)); // should print "0"
val y = Pair(EQ, 1);
assertEquals("1", analyze(y)); // should print "1"
val z = Pair(EQ, 2);
assertEquals("2", analyze(z)); // should print "2"
- }
+ }
}
// bug#789
-
- object Bug789 extends TestCase("aladdin #789") { // don't do this at home
+
+ object Bug789 { // don't do this at home
trait Impl
-
+
trait SizeImpl extends Impl { def size = 42 }
-
+
trait ColorImpl extends Impl { def color = "red" }
-
+
type Both = SizeImpl with ColorImpl
-
- def info(x:Impl) = x match {
- case x:Both => "size "+x.size+" color "+x.color // you wish
- case x:SizeImpl => "!size "+x.size
- case x:ColorImpl => "color "+x.color
- case _ => "n.a."
- }
-
- def info2(x:Impl) = x match {
- case x:SizeImpl with ColorImpl => "size "+x.size+" color "+x.color // you wish
- case x:SizeImpl => "!size "+x.size
- case x:ColorImpl => "color "+x.color
- case _ => "n.a."
- }
-
- override def runTest {
+
+ def info(x: Impl) = x match {
+ case x: Both => "size " + x.size + " color " + x.color // you wish
+ case x: SizeImpl => "!size " + x.size
+ case x: ColorImpl => "color " + x.color
+ case _ => "n.a."
+ }
+
+ def info2(x: Impl) = x match {
+ case x: SizeImpl with ColorImpl => "size " + x.size + " color " + x.color // you wish
+ case x: SizeImpl => "!size " + x.size
+ case x: ColorImpl => "color " + x.color
+ case _ => "n.a."
+ }
+
+ def run() {
// make up some class that has a size
class MyNode extends SizeImpl
assertEquals("!size 42", info(new MyNode))
@@ -699,34 +587,36 @@ object Test extends TestConsoleMain {
// bug#995
- object Bug995 extends TestCase("aladdin #995") {
+ object Bug995 {
def foo(v: Any): String = v match {
case s: Seq[_] => "Seq" // see hack in object Seq.unapplySeq
case a: AnyRef if runtime.ScalaRunTime.isArray(a) => "Array"
case _ => v.toString
}
- override def runTest { assertEquals("Array", foo(Array(0))) }
+ def run() { assertEquals("Array", foo(Array(0))) }
}
// bug#1093 (contribution #460)
- object Bug1093 extends TestCase("aladdin #1093") {
- override def runTest {assertTrue(Some(3) match {
- case Some(1 | 2) => false
- case Some(3) => true
- })}
+ object Bug1093 {
+ def run() {
+ assert(Some(3) match {
+ case Some(1 | 2) => false
+ case Some(3) => true
+ })
+ }
}
// bug#1094 (contribution #461)
- object Bug1094 extends TestCase("aladdin #1094") {
+ object Bug1094 {
def foo(ps: String*) = "Foo"
case class X(p: String, ps: String*)
def bar =
X("a", "b") match {
- case X(p, ps @ _*) => foo(ps : _*)
+ case X(p, ps at _*) => foo(ps: _*)
}
- override def runTest { assertEquals("Foo", bar) }
+ def run() { assertEquals("Foo", bar) }
}
// #2
@@ -739,18 +629,20 @@ object Test extends TestConsoleMain {
}
}
}
-
- object Ticket2 extends TestCase("#2") { override def runTest {
- val o1 = new Outer_2; val o2 = new Outer_2; val x: Any = o1.Foo(1, 2); val y: Any = o2.Foo(1, 2)
- assertFalse("equals test returns true (but should not)", x equals y)
- assertTrue("match enters wrong case", x match {
- case o2.Foo(x, y) => false
- case o1.Foo(x, y) => true
- case _ => false
- })
- }}
-// #11
+ object Ticket2 {
+ def run() {
+ val o1 = new Outer_2; val o2 = new Outer_2; val x: Any = o1.Foo(1, 2); val y: Any = o2.Foo(1, 2)
+ assert(x != y, "equals test returns true (but should not)")
+ assert(x match {
+ case o2.Foo(x, y) => false
+ case o1.Foo(x, y) => true
+ case _ => false
+ }, "match enters wrong case")
+ }
+ }
+
+ // #11
class MyException1 extends Exception
@@ -758,34 +650,34 @@ object Test extends TestConsoleMain {
// will cause the test to succeed.
trait SpecialException extends MyException1
// trait SpecialException
-
+
class MyException2 extends MyException1 with SpecialException
-
- object Ticket11 extends TestCase("#11") {
- override def runTest {
+
+ object Ticket11 {
+ def run() {
Array[Throwable](new Exception("abc"),
- new MyException1,
- new MyException2).foreach { e =>
- try {
- throw e
- } catch {
- case e : SpecialException => {
- assume(e.isInstanceOf[SpecialException])
- }
- case e => {
- assume(e.isInstanceOf[Throwable])
- }
- }
- }
+ new MyException1,
+ new MyException2).foreach { e =>
+ try {
+ throw e
+ } catch {
+ case e: SpecialException => {
+ assume(e.isInstanceOf[SpecialException])
+ }
+ case e => {
+ assume(e.isInstanceOf[Throwable])
+ }
+ }
+ }
}
}
// #37
-
- object Ticket37 extends TestCase("#37") {
+
+ object Ticket37 {
def foo() {}
- val (a,b) = { foo(); (2,3) }
- override def runTest { assertEquals(this.a, 2) }
+ val (a, b) = { foo(); (2, 3) }
+ def run() { assertEquals(this.a, 2) }
}
// #44
@@ -793,146 +685,78 @@ object Test extends TestConsoleMain {
trait _X {
case class _Foo();
object _Bar {
- def unapply(foo: _Foo):Boolean = true;
+ def unapply(foo: _Foo): Boolean = true;
}
}
object Y extends _X {
val foo = _Foo()
foo match {
- case _Bar() =>
- case _ => assert(false)
+ case _Bar() =>
+ case _ => assert(false)
}
}
- object Ticket44 extends TestCase("#44") {
- override def runTest { assert(Y.toString ne null) /*instantiate Y*/ }
+ object Ticket44 {
+ def run() { assert(Y.toString ne null) /*instantiate Y*/ }
}
- object Ticket211 extends TestCase("#211") {
- override def runTest {
- (Some(123):Option[Int]) match {
- case (x:Option[a]) if false => {};
- case (y:Option[b]) => {};
+ object Ticket211 {
+ def run() {
+ (Some(123): Option[Int]) match {
+ case (x: Option[a]) if false => {};
+ case (y: Option[b]) => {};
}
}
}
- sealed abstract class Tree
- case class Node(l: Tree, v: Int, r: Tree) extends Tree
- case object EmptyTree extends Tree
-
- object Ticket335 extends TestCase("#335") { // compile-only
- override def runTest {
- (EmptyTree: Tree @unchecked) match {
- case Node(_,v,_) if (v == 0) => 0
- case EmptyTree => 2
- }
- }
- }
-
-// this test case checks nothing more than whether
-// case N for object N is translated to a check scrutinee.equals(N)
-// (or the other way round)... for a long time, we got away with
-// scrutinee eq N, but those golden days are, apparently, over.
- object Ticket346 extends TestCase("#346") {
+ // this test case checks nothing more than whether
+ // case N for object N is translated to a check scrutinee.equals(N)
+ // (or the other way round)... for a long time, we got away with
+ // scrutinee eq N, but those golden days are, apparently, over.
+ object Ticket346 {
-class L(val content: List[Int]) {
+ class L(val content: List[Int]) {
- def isEmpty = content.isEmpty
- def head = content.head
- def tail = content.tail
+ def isEmpty = content.isEmpty
+ def head = content.head
+ def tail = content.tail
- override def equals(that: Any): Boolean = {
+ override def equals(that: Any): Boolean = {
val result = that.isInstanceOf[N.type]
- println("L("+content+").equals("+that+") returning "+result)
+ println("L(" + content + ").equals(" + that + ") returning " + result)
result
+ }
}
-}
-object N extends L(Nil) {
-
- override def equals(that: Any): Boolean = {
- val result = (that.isInstanceOf[L] && that.asInstanceOf[L].isEmpty)
- //println("N.equals("+that+") returning "+result)
- result
+ object N extends L(Nil) {
+ override def equals(that: Any): Boolean =
+ (that.isInstanceOf[L] && that.asInstanceOf[L].isEmpty)
}
-}
-object C {
+ object C {
- def unapply(xs: L): Option[(Int, L)] = {
- if (xs.isEmpty)
- { println("xs is empty"); None }
+ def unapply(xs: L): Option[(Int, L)] = {
+ if (xs.isEmpty) { println("xs is empty"); None }
else
- Some((xs.head, new L(xs.tail)))
- }
-
-}
-
+ Some((xs.head, new L(xs.tail)))
+ }
- def empty(xs : L) : Boolean = xs match {
- case N => true
- case _ => false
}
- def singleton(xs : L) : Boolean = xs match {
- case C(_, N) => true
- case _ => false
+ def empty(xs: L): Boolean = xs match {
+ case N => true
+ case _ => false
}
-override def runTest() {
- assertTrue(empty( new L(Nil) ))
- assertTrue(singleton( new L(List(1)) ))
-}
-
-} // end Ticket346
-
- object Ticket495bis { // compile-only
- def signum(x: Int): Int =
- x match {
- case 0 => 0
- case _ if x < 0 => -1
- case _ if x > 0 => 1
- }
- def pair_m(x: Int, y: Int) =
- (x,y) match {
- case (_, 0) => 0
- case (-1, _) => -1
- case (_, _) => 1
- }
- }
-
- object Ticket522 { // compile-only
- class Term[X]
- object App {
- // i'm hidden
- case class InternalApply[Y,Z](fun:Y=>Z, arg:Y) extends Term[Z]
-
- def apply[Y,Z](fun:Y=>Z, arg:Y): Term[Z] =
- new InternalApply[Y,Z](fun,arg)
-
- def unapply[X](arg: Term[X]): Option[(Y=>Z,Y)] forSome {type Y; type Z} =
- arg match {
- case i:InternalApply[y,z] => Some(i.fun, i.arg)
- case _ => None
- }
- }
-
- App({x: Int => x}, 5) match {
- case App(arg, a) =>
- }
- } // end Ticket522
-
+ def singleton(xs: L): Boolean = xs match {
+ case C(_, N) => true
+ case _ => false
+ }
- object Ticket710 { // compile-only
- def method {
- sealed class Parent()
- case object Child extends Parent()
- val x: Parent = Child
- x match {
- case Child => ()
- }
+ def run() {
+ assert(empty(new L(Nil)))
+ assert(singleton(new L(List(1))))
}
- }
-}
+ } // end Ticket346
+}
diff --git a/test/files/run/phantomValueClass.check b/test/files/run/phantomValueClass.check
new file mode 100644
index 0000000..323fae0
--- /dev/null
+++ b/test/files/run/phantomValueClass.check
@@ -0,0 +1 @@
+foobar
diff --git a/test/files/run/phantomValueClass.scala b/test/files/run/phantomValueClass.scala
new file mode 100644
index 0000000..f6509f2
--- /dev/null
+++ b/test/files/run/phantomValueClass.scala
@@ -0,0 +1,10 @@
+final class Phantom[A](val s: String) extends AnyVal {
+ def compose(p: Phantom[A]): Phantom[A] = new Phantom[A](s+p.s)
+}
+
+object Test extends App {
+ val x = new Phantom[Int]("foo")
+ val y = new Phantom[Int]("bar")
+ val z = x compose y
+ println(z.s)
+}
diff --git a/test/files/run/predef-cycle.scala b/test/files/run/predef-cycle.scala
new file mode 100644
index 0000000..ab14768
--- /dev/null
+++ b/test/files/run/predef-cycle.scala
@@ -0,0 +1,71 @@
+class Force {
+ val t1 = new Thread {
+ override def run() {
+ scala.`package`
+ }
+ }
+ val t2 = new Thread {
+ override def run() {
+ scala.Predef
+ }
+ }
+ t1.start()
+ t2.start()
+ t1.join()
+ t2.join()
+}
+
+object Test {
+ def main(args: Array[String]) {
+ new Force()
+ }
+}
+
+/* Was deadlocking:
+"Thread-2" prio=5 tid=7f9637268000 nid=0x119601000 in Object.wait() [119600000]
+ java.lang.Thread.State: RUNNABLE
+ at scala.Predef$.<init>(Predef.scala:90)
+ at scala.Predef$.<clinit>(Predef.scala)
+ at Force$$anon$2.run(predef-cycle.scala:10)
+
+"Thread-1" prio=5 tid=7f9637267800 nid=0x1194fe000 in Object.wait() [1194fb000]
+ java.lang.Thread.State: RUNNABLE
+ at scala.collection.immutable.Set$Set4.$plus(Set.scala:127)
+ at scala.collection.immutable.Set$Set4.$plus(Set.scala:121)
+ at scala.collection.mutable.SetBuilder.$plus$eq(SetBuilder.scala:24)
+ at scala.collection.mutable.SetBuilder.$plus$eq(SetBuilder.scala:22)
+ at scala.collection.generic.Growable$$anonfun$$plus$plus$eq$1.apply(Growable.scala:48)
+ at scala.collection.generic.Growable$$anonfun$$plus$plus$eq$1.apply(Growable.scala:48)
+ at scala.collection.immutable.List.foreach(List.scala:318)
+ at scala.collection.generic.Growable$class.$plus$plus$eq(Growable.scala:48)
+ at scala.collection.mutable.SetBuilder.$plus$plus$eq(SetBuilder.scala:22)
+ at scala.collection.TraversableLike$class.to(TraversableLike.scala:629)
+ at scala.collection.AbstractTraversable.to(Traversable.scala:105)
+ at scala.collection.TraversableOnce$class.toSet(TraversableOnce.scala:267)
+ at scala.collection.AbstractTraversable.toSet(Traversable.scala:105)
+ at scala.runtime.ScalaRunTime$.<init>(ScalaRunTime.scala:50)
+ at scala.runtime.ScalaRunTime$.<clinit>(ScalaRunTime.scala)
+ at scala.collection.mutable.HashTable$HashUtils$class.elemHashCode(HashTable.scala)
+ at scala.collection.mutable.HashMap.elemHashCode(HashMap.scala:39)
+ at scala.collection.mutable.HashTable$class.findOrAddEntry(HashTable.scala:161)
+ at scala.collection.mutable.HashMap.findOrAddEntry(HashMap.scala:39)
+ at scala.collection.mutable.HashMap.put(HashMap.scala:75)
+ at scala.collection.mutable.HashMap.update(HashMap.scala:80)
+ at scala.sys.SystemProperties$.addHelp(SystemProperties.scala:64)
+ at scala.sys.SystemProperties$.bool(SystemProperties.scala:68)
+ at scala.sys.SystemProperties$.noTraceSupression$lzycompute(SystemProperties.scala:80)
+ - locked <7b8b0e228> (a scala.sys.SystemProperties$)
+ at scala.sys.SystemProperties$.noTraceSupression(SystemProperties.scala:80)
+ at scala.util.control.NoStackTrace$.<init>(NoStackTrace.scala:31)
+ at scala.util.control.NoStackTrace$.<clinit>(NoStackTrace.scala)
+ at scala.util.control.NoStackTrace$class.fillInStackTrace(NoStackTrace.scala:22)
+ at scala.util.control.BreakControl.fillInStackTrace(Breaks.scala:93)
+ at java.lang.Throwable.<init>(Throwable.java:181)
+ at scala.util.control.BreakControl.<init>(Breaks.scala:93)
+ at scala.util.control.Breaks.<init>(Breaks.scala:28)
+ at scala.collection.Traversable$.<init>(Traversable.scala:96)
+ at scala.collection.Traversable$.<clinit>(Traversable.scala)
+ at scala.package$.<init>(package.scala:46)
+ at scala.package$.<clinit>(package.scala)
+ at Force$$anon$1.run(predef-cycle.scala:4)
+ */
\ No newline at end of file
diff --git a/test/files/run/primitive-sigs-2-new.check b/test/files/run/primitive-sigs-2-new.check
new file mode 100644
index 0000000..59d8649
--- /dev/null
+++ b/test/files/run/primitive-sigs-2-new.check
@@ -0,0 +1,7 @@
+T<java.lang.Object>
+List(A, char, class java.lang.Object)
+a
+public <T> java.lang.Object Arr.arr4(java.lang.Object[],scala.reflect.ClassTag<T>)
+public float[] Arr.arr3(float[][])
+public scala.collection.immutable.List<java.lang.Character> Arr.arr2(java.lang.Character[])
+public scala.collection.immutable.List<java.lang.Object> Arr.arr1(int[])
diff --git a/test/files/run/primitive-sigs-2-new.scala b/test/files/run/primitive-sigs-2-new.scala
new file mode 100644
index 0000000..cf6de9c
--- /dev/null
+++ b/test/files/run/primitive-sigs-2-new.scala
@@ -0,0 +1,32 @@
+import scala.reflect.{ClassTag, classTag}
+import java.{ lang => jl }
+
+trait T[A] {
+ def f(): A
+}
+class C extends T[Char] {
+ def f(): Char = 'a'
+}
+class Arr {
+ def arr1(xs: Array[Int]): List[Int] = xs.toList
+ def arr2(xs: Array[jl.Character]): List[jl.Character] = xs.toList
+ def arr3(xss: Array[Array[Float]]): Array[Float] = xss map (_.sum)
+ def arr4[T: ClassTag](xss: Array[Array[T]]): Array[T] = xss map (_.head)
+}
+
+object Test {
+ val c1: Class[_] = classOf[T[_]]
+ val c2: Class[_] = classOf[C]
+ val c3: Class[_] = classOf[Arr]
+
+ val c1m = c1.getMethods.toList filter (_.getName == "f") map (_.getGenericReturnType.toString)
+ val c2m = c2.getMethods.toList filter (_.getName == "f") map (_.getGenericReturnType.toString)
+ val c3m = c3.getDeclaredMethods.toList map (_.toGenericString)
+
+ def main(args: Array[String]): Unit = {
+ println(c2.getGenericInterfaces.map(_.toString).sorted mkString " ")
+ println(c1m ++ c2m sorted)
+ println(new C f)
+ c3m.sorted foreach println
+ }
+}
\ No newline at end of file
diff --git a/test/files/run/primitive-sigs-2-old.check b/test/files/run/primitive-sigs-2-old.check
new file mode 100644
index 0000000..feb0619
--- /dev/null
+++ b/test/files/run/primitive-sigs-2-old.check
@@ -0,0 +1,7 @@
+T<java.lang.Object>
+List(A, char, class java.lang.Object)
+a
+public <T> java.lang.Object Arr.arr4(java.lang.Object[],scala.reflect.Manifest<T>)
+public float[] Arr.arr3(float[][])
+public scala.collection.immutable.List<java.lang.Character> Arr.arr2(java.lang.Character[])
+public scala.collection.immutable.List<java.lang.Object> Arr.arr1(int[])
diff --git a/test/files/run/primitive-sigs-2-old.scala b/test/files/run/primitive-sigs-2-old.scala
new file mode 100644
index 0000000..b7152f7
--- /dev/null
+++ b/test/files/run/primitive-sigs-2-old.scala
@@ -0,0 +1,39 @@
+import java.{ lang => jl }
+
+trait T[A] {
+ def f(): A
+}
+class C extends T[Char] {
+ def f(): Char = 'a'
+}
+class Arr {
+ def arr1(xs: Array[Int]): List[Int] = xs.toList
+ def arr2(xs: Array[jl.Character]): List[jl.Character] = xs.toList
+ def arr3(xss: Array[Array[Float]]): Array[Float] = xss map (_.sum)
+ // This gets a signature like
+ // public <T> java.lang.Object Arr.arr4(java.lang.Object[],scala.reflect.Manifest<T>)
+ //
+ // instead of the more appealing version from the past
+ // public <T> T[] Arr.arr4(T[][],scala.reflect.Manifest<T>)
+ //
+ // because java inflict's its reference-only generic-arrays on us.
+ //
+ def arr4[T: Manifest](xss: Array[Array[T]]): Array[T] = xss map (_.head)
+}
+
+object Test {
+ val c1: Class[_] = classOf[T[_]]
+ val c2: Class[_] = classOf[C]
+ val c3: Class[_] = classOf[Arr]
+
+ val c1m = c1.getMethods.toList filter (_.getName == "f") map (_.getGenericReturnType.toString)
+ val c2m = c2.getMethods.toList filter (_.getName == "f") map (_.getGenericReturnType.toString)
+ val c3m = c3.getDeclaredMethods.toList map (_.toGenericString)
+
+ def main(args: Array[String]): Unit = {
+ println(c2.getGenericInterfaces.map(_.toString).sorted mkString " ")
+ println(c1m ++ c2m sorted)
+ println(new C f)
+ c3m.sorted foreach println
+ }
+}
diff --git a/test/files/run/primitive-sigs-2.check b/test/files/run/primitive-sigs-2.check
deleted file mode 100644
index 4ecec9f..0000000
--- a/test/files/run/primitive-sigs-2.check
+++ /dev/null
@@ -1,3 +0,0 @@
-T<java.lang.Object> interface scala.ScalaObject
-List(A, char, class java.lang.Object)
-a
diff --git a/test/files/run/primitive-sigs-2.scala b/test/files/run/primitive-sigs-2.scala
deleted file mode 100644
index 0c72b4a..0000000
--- a/test/files/run/primitive-sigs-2.scala
+++ /dev/null
@@ -1,20 +0,0 @@
-trait T[A] {
- def f(): A
-}
-class C extends T[Char] {
- def f(): Char = 'a'
-}
-
-object Test {
- val c1: Class[_] = classOf[T[_]]
- val c2: Class[_] = classOf[C]
-
- val c1m = c1.getMethods.toList filter (_.getName == "f") map (_.getGenericReturnType.toString)
- val c2m = c2.getMethods.toList filter (_.getName == "f") map (_.getGenericReturnType.toString)
-
- def main(args: Array[String]): Unit = {
- println(c2.getGenericInterfaces.map(_.toString).sorted mkString " ")
- println(c1m ++ c2m sorted)
- println(new C f)
- }
-}
diff --git a/test/files/run/private-inline.check b/test/files/run/private-inline.check
new file mode 100644
index 0000000..209e3ef
--- /dev/null
+++ b/test/files/run/private-inline.check
@@ -0,0 +1 @@
+20
diff --git a/test/files/pos/bug3252.flags b/test/files/run/private-inline.flags
similarity index 100%
copy from test/files/pos/bug3252.flags
copy to test/files/run/private-inline.flags
diff --git a/test/files/run/private-inline.scala b/test/files/run/private-inline.scala
new file mode 100644
index 0000000..a620077
--- /dev/null
+++ b/test/files/run/private-inline.scala
@@ -0,0 +1,52 @@
+
+final class A {
+ private var x1 = false
+ var x2 = false
+
+ // manipulates private var
+ @inline private def wrapper1[T](body: => T): T = {
+ val saved = x1
+ x1 = true
+ try body
+ finally x1 = saved
+ }
+ // manipulates public var
+ @inline private def wrapper2[T](body: => T): T = {
+ val saved = x2
+ x2 = true
+ try body
+ finally x2 = saved
+ }
+
+ // not inlined
+ def f1a() = wrapper1(5)
+ // inlined!
+ def f1b() = identity(wrapper1(5))
+
+ // not inlined
+ def f2a() = wrapper2(5)
+ // inlined!
+ def f2b() = identity(wrapper2(5))
+}
+
+object Test {
+ def methodClasses = List("f1a", "f2a") map ("A$$anonfun$" + _ + "$1")
+
+ def main(args: Array[String]): Unit = {
+ val a = new A
+ import a._
+ println(f1a() + f1b() + f2a() + f2b())
+
+ // Don't know how else to test this: all these should have been
+ // inlined, so all should fail.
+ methodClasses foreach { clazz =>
+
+ val foundClass = (
+ try Class.forName(clazz)
+ catch { case _ => null }
+ )
+
+ assert(foundClass == null, foundClass)
+ }
+ }
+}
diff --git a/test/files/run/programmatic-main.check b/test/files/run/programmatic-main.check
index be446b5..bdf76dd 100644
--- a/test/files/run/programmatic-main.check
+++ b/test/files/run/programmatic-main.check
@@ -1,28 +1,31 @@
- phase name id description
- ---------- -- -----------
- parser 1 parse source into ASTs, perform simple desugaring
- namer 2 resolve names, attach symbols to named trees
-packageobjects 3 load package objects
- typer 4 the meat and potatoes: type the trees
-superaccessors 5 add super accessors in traits and nested classes
- pickler 6 serialize symbol tables
- refchecks 7 reference/override checking, translate nested objects
- liftcode 8 reify trees
- uncurry 9 uncurry, translate function values to anonymous classes
- tailcalls 10 replace tail calls by jumps
- specialize 11 @specialized-driven class and method specialization
- explicitouter 12 this refs to outer pointers, translate patterns
- erasure 13 erase types, add interfaces for traits
- lazyvals 14 allocate bitmaps, translate lazy vals into lazified defs
- lambdalift 15 move nested functions to top level
- constructors 16 move field definitions into constructors
- flatten 17 eliminate inner classes
- mixin 18 mixin composition
- cleanup 19 platform-specific cleanups, generate reflective calls
- icode 20 generate portable intermediate code
- inliner 21 optimization: do inlining
- closelim 22 optimization: eliminate uncalled closures
- dce 23 optimization: eliminate dead code
- jvm 24 generate JVM bytecode
- terminal 25 The last phase in the compiler chain
+ phase name id description
+ ---------- -- -----------
+ parser 1 parse source into ASTs, perform simple desugaring
+ namer 2 resolve names, attach symbols to named trees
+ packageobjects 3 load package objects
+ typer 4 the meat and potatoes: type the trees
+ patmat 5 translate match expressions
+ superaccessors 6 add super accessors in traits and nested classes
+ extmethods 7 add extension methods for inline classes
+ pickler 8 serialize symbol tables
+ refchecks 9 reference/override checking, translate nested objects
+ uncurry 10 uncurry, translate function values to anonymous classes
+ tailcalls 11 replace tail calls by jumps
+ specialize 12 @specialized-driven class and method specialization
+ explicitouter 13 this refs to outer pointers, translate patterns
+ erasure 14 erase types, add interfaces for traits
+ posterasure 15 clean up erased inline classes
+ lazyvals 16 allocate bitmaps, translate lazy vals into lazified defs
+ lambdalift 17 move nested functions to top level
+ constructors 18 move field definitions into constructors
+ flatten 19 eliminate inner classes
+ mixin 20 mixin composition
+ cleanup 21 platform-specific cleanups, generate reflective calls
+ icode 22 generate portable intermediate code
+ inliner 23 optimization: do inlining
+inlineExceptionHandlers 24 optimization: inline exception handlers
+ closelim 25 optimization: eliminate uncalled closures
+ dce 26 optimization: eliminate dead code
+ jvm 27 generate JVM bytecode
+ terminal 28 The last phase in the compiler chain
diff --git a/test/files/run/promotion-msil.check b/test/files/run/promotion-msil.check
deleted file mode 100644
index 41e36c3..0000000
--- a/test/files/run/promotion-msil.check
+++ /dev/null
@@ -1,4 +0,0 @@
-2
-6
-20
-30
diff --git a/test/files/run/proxy.check b/test/files/run/proxy.check
index 9eb68b1..c40b3db 100644
--- a/test/files/run/proxy.check
+++ b/test/files/run/proxy.check
@@ -2,3 +2,5 @@ false
true
false
false
+true
+true
diff --git a/test/files/run/proxy.scala b/test/files/run/proxy.scala
index 5f40397..ea222cb 100644
--- a/test/files/run/proxy.scala
+++ b/test/files/run/proxy.scala
@@ -6,4 +6,12 @@ object Test extends App {
println(p equals 2)
println(p equals 3)
println(p equals null)
+
+ case class Bippy(a: String) extends Proxy {
+ def self = a
+ }
+
+ val label = Bippy("bippy!")
+ println(label == label)
+ println(label == "bippy!")
}
diff --git a/test/files/run/pure-args-byname-noinline.check b/test/files/run/pure-args-byname-noinline.check
new file mode 100644
index 0000000..a39c61e
--- /dev/null
+++ b/test/files/run/pure-args-byname-noinline.check
@@ -0,0 +1,12 @@
+2
+2
+2
+2
+List(1)
+List()
+
+1
+1
+1
+1
+1
diff --git a/test/files/run/pure-args-byname-noinline.scala b/test/files/run/pure-args-byname-noinline.scala
new file mode 100644
index 0000000..5c5c8a7
--- /dev/null
+++ b/test/files/run/pure-args-byname-noinline.scala
@@ -0,0 +1,33 @@
+object Test {
+ //Were affected by SI-6306
+ def f[A](a: =>A) = println(a.toString)
+ def f1[A <: AnyVal](a: =>A) = println(a.toString)
+ def f1a[A <: AnyVal](a: =>A) = println(a.##)
+ def f2[A <: AnyRef](a: =>A) = println(a.toString)
+ def f2a[A <: String](a: =>A) = println(a.toString)
+ //Works
+ def f3[A](a: =>Seq[A]) = println(a.toString)
+
+ def foo() = println(2)
+ def client(f: () => Unit) = {f(); f()}
+ def attempt2() {
+ val bar: () => Unit = foo _
+ //The code causing SI-6306 was supposed to optimize code like this:
+ client(() => bar ())
+ //to:
+ client(bar)
+ }
+ def main(args: Array[String]) {
+ attempt2()
+ f3(Seq(1))
+ f3(Seq())
+ f("")
+ f((1).toString)
+ f((1).##)
+ f1((1).##)
+ f2((1).toString)
+ f2a((1).toString)
+ }
+}
+
+// vim: set ts=8 sw=2 et:
diff --git a/test/files/run/randomAccessSeq-apply.scala b/test/files/run/randomAccessSeq-apply.scala
deleted file mode 100644
index b8d6d54..0000000
--- a/test/files/run/randomAccessSeq-apply.scala
+++ /dev/null
@@ -1,15 +0,0 @@
-object Test extends App {
- val empty = RandomAccessSeq()
- assert(empty.isEmpty)
-
- val single = RandomAccessSeq(1)
- assert(List(1) == single.toList)
-
- val two = RandomAccessSeq("a", "b")
- assert("a" == two.head)
- assert("b" == two.apply(1))
-
- println("OK")
-}
-
-// vim: set ts=2 sw=2 et:
diff --git a/test/files/run/range-unit.check b/test/files/run/range-unit.check
new file mode 100644
index 0000000..3daf91c
--- /dev/null
+++ b/test/files/run/range-unit.check
@@ -0,0 +1,4178 @@
+>>> Range.inclusive <<<
+
+start end step length/first/last
+-----------------------------------------
+0 0 0 ---
+ java.lang.IllegalArgumentException: step cannot be 0.
+0 0 -1 1/0/0
+0 0 1 1/0/0
+0 0 -2 1/0/0
+0 0 2 1/0/0
+0 0 -3 1/0/0
+0 0 3 1/0/0
+0 0 17 1/0/0
+0 0 127 1/0/0
+0 0 MIN+1 1/0/0
+0 0 MAX 1/0/0
+0 0 MIN 1/0/0
+0 -1 0 ---
+ java.lang.IllegalArgumentException: step cannot be 0.
+0 -1 -1 2/0/-1
+0 -1 1 0
+0 -1 -2 1/0/0
+0 -1 2 0
+0 -1 -3 1/0/0
+0 -1 3 0
+0 -1 17 0
+0 -1 127 0
+0 -1 MIN+1 1/0/0
+0 -1 MAX 0
+0 -1 MIN 1/0/0
+0 1 0 ---
+ java.lang.IllegalArgumentException: step cannot be 0.
+0 1 -1 0
+0 1 1 2/0/1
+0 1 -2 0
+0 1 2 1/0/0
+0 1 -3 0
+0 1 3 1/0/0
+0 1 17 1/0/0
+0 1 127 1/0/0
+0 1 MIN+1 0
+0 1 MAX 1/0/0
+0 1 MIN 0
+0 3 0 ---
+ java.lang.IllegalArgumentException: step cannot be 0.
+0 3 -1 0
+0 3 1 4/0/3
+0 3 -2 0
+0 3 2 2/0/2
+0 3 -3 0
+0 3 3 2/0/3
+0 3 17 1/0/0
+0 3 127 1/0/0
+0 3 MIN+1 0
+0 3 MAX 1/0/0
+0 3 MIN 0
+0 MIN+1 0 ---
+ java.lang.IllegalArgumentException: step cannot be 0.
+0 MIN+1 -1 ---
+ java.lang.IllegalArgumentException: 0 to -2147483647 by -1: seqs cannot contain more than Int.MaxValue elements.
+0 MIN+1 1 0
+0 MIN+1 -2 1073741824/0/MIN+2
+0 MIN+1 2 0
+0 MIN+1 -3 715827883/0/MIN+2
+0 MIN+1 3 0
+0 MIN+1 17 0
+0 MIN+1 127 0
+0 MIN+1 MIN+1 2/0/MIN+1
+0 MIN+1 MAX 0
+0 MIN+1 MIN 1/0/0
+0 MAX 0 ---
+ java.lang.IllegalArgumentException: step cannot be 0.
+0 MAX -1 0
+0 MAX 1 ---
+ java.lang.IllegalArgumentException: 0 to 2147483647 by 1: seqs cannot contain more than Int.MaxValue elements.
+0 MAX -2 0
+0 MAX 2 1073741824/0/MAX-1
+0 MAX -3 0
+0 MAX 3 715827883/0/MAX-1
+0 MAX 17 126322568/0/MAX-8
+0 MAX 127 16909321/0/MAX-7
+0 MAX MIN+1 0
+0 MAX MAX 2/0/MAX
+0 MAX MIN 0
+0 MIN 0 ---
+ java.lang.IllegalArgumentException: step cannot be 0.
+0 MIN -1 ---
+ java.lang.IllegalArgumentException: 0 to -2147483648 by -1: seqs cannot contain more than Int.MaxValue elements.
+0 MIN 1 0
+0 MIN -2 1073741825/0/MIN
+0 MIN 2 0
+0 MIN -3 715827883/0/MIN+2
+0 MIN 3 0
+0 MIN 17 0
+0 MIN 127 0
+0 MIN MIN+1 2/0/MIN+1
+0 MIN MAX 0
+0 MIN MIN 2/0/MIN
+
+start end step length/first/last
+-----------------------------------------
+-1 0 0 ---
+ java.lang.IllegalArgumentException: step cannot be 0.
+-1 0 -1 0
+-1 0 1 2/-1/0
+-1 0 -2 0
+-1 0 2 1/-1/-1
+-1 0 -3 0
+-1 0 3 1/-1/-1
+-1 0 17 1/-1/-1
+-1 0 127 1/-1/-1
+-1 0 MIN+1 0
+-1 0 MAX 1/-1/-1
+-1 0 MIN 0
+-1 -1 0 ---
+ java.lang.IllegalArgumentException: step cannot be 0.
+-1 -1 -1 1/-1/-1
+-1 -1 1 1/-1/-1
+-1 -1 -2 1/-1/-1
+-1 -1 2 1/-1/-1
+-1 -1 -3 1/-1/-1
+-1 -1 3 1/-1/-1
+-1 -1 17 1/-1/-1
+-1 -1 127 1/-1/-1
+-1 -1 MIN+1 1/-1/-1
+-1 -1 MAX 1/-1/-1
+-1 -1 MIN 1/-1/-1
+-1 1 0 ---
+ java.lang.IllegalArgumentException: step cannot be 0.
+-1 1 -1 0
+-1 1 1 3/-1/1
+-1 1 -2 0
+-1 1 2 2/-1/1
+-1 1 -3 0
+-1 1 3 1/-1/-1
+-1 1 17 1/-1/-1
+-1 1 127 1/-1/-1
+-1 1 MIN+1 0
+-1 1 MAX 1/-1/-1
+-1 1 MIN 0
+-1 3 0 ---
+ java.lang.IllegalArgumentException: step cannot be 0.
+-1 3 -1 0
+-1 3 1 5/-1/3
+-1 3 -2 0
+-1 3 2 3/-1/3
+-1 3 -3 0
+-1 3 3 2/-1/2
+-1 3 17 1/-1/-1
+-1 3 127 1/-1/-1
+-1 3 MIN+1 0
+-1 3 MAX 1/-1/-1
+-1 3 MIN 0
+-1 MIN+1 0 ---
+ java.lang.IllegalArgumentException: step cannot be 0.
+-1 MIN+1 -1 MAX/-1/MIN+1
+-1 MIN+1 1 0
+-1 MIN+1 -2 1073741824/-1/MIN+1
+-1 MIN+1 2 0
+-1 MIN+1 -3 715827883/-1/MIN+1
+-1 MIN+1 3 0
+-1 MIN+1 17 0
+-1 MIN+1 127 0
+-1 MIN+1 MIN+1 1/-1/-1
+-1 MIN+1 MAX 0
+-1 MIN+1 MIN 1/-1/-1
+-1 MAX 0 ---
+ java.lang.IllegalArgumentException: step cannot be 0.
+-1 MAX -1 0
+-1 MAX 1 ---
+ java.lang.IllegalArgumentException: -1 to 2147483647 by 1: seqs cannot contain more than Int.MaxValue elements.
+-1 MAX -2 0
+-1 MAX 2 1073741825/-1/MAX
+-1 MAX -3 0
+-1 MAX 3 715827883/-1/MAX-2
+-1 MAX 17 126322568/-1/MAX-9
+-1 MAX 127 16909321/-1/MAX-8
+-1 MAX MIN+1 0
+-1 MAX MAX 2/-1/MAX-1
+-1 MAX MIN 0
+-1 MIN 0 ---
+ java.lang.IllegalArgumentException: step cannot be 0.
+-1 MIN -1 ---
+ java.lang.IllegalArgumentException: -1 to -2147483648 by -1: seqs cannot contain more than Int.MaxValue elements.
+-1 MIN 1 0
+-1 MIN -2 1073741824/-1/MIN+1
+-1 MIN 2 0
+-1 MIN -3 715827883/-1/MIN+1
+-1 MIN 3 0
+-1 MIN 17 0
+-1 MIN 127 0
+-1 MIN MIN+1 2/-1/MIN
+-1 MIN MAX 0
+-1 MIN MIN 1/-1/-1
+
+start end step length/first/last
+-----------------------------------------
+1 0 0 ---
+ java.lang.IllegalArgumentException: step cannot be 0.
+1 0 -1 2/1/0
+1 0 1 0
+1 0 -2 1/1/1
+1 0 2 0
+1 0 -3 1/1/1
+1 0 3 0
+1 0 17 0
+1 0 127 0
+1 0 MIN+1 1/1/1
+1 0 MAX 0
+1 0 MIN 1/1/1
+1 -1 0 ---
+ java.lang.IllegalArgumentException: step cannot be 0.
+1 -1 -1 3/1/-1
+1 -1 1 0
+1 -1 -2 2/1/-1
+1 -1 2 0
+1 -1 -3 1/1/1
+1 -1 3 0
+1 -1 17 0
+1 -1 127 0
+1 -1 MIN+1 1/1/1
+1 -1 MAX 0
+1 -1 MIN 1/1/1
+1 1 0 ---
+ java.lang.IllegalArgumentException: step cannot be 0.
+1 1 -1 1/1/1
+1 1 1 1/1/1
+1 1 -2 1/1/1
+1 1 2 1/1/1
+1 1 -3 1/1/1
+1 1 3 1/1/1
+1 1 17 1/1/1
+1 1 127 1/1/1
+1 1 MIN+1 1/1/1
+1 1 MAX 1/1/1
+1 1 MIN 1/1/1
+1 3 0 ---
+ java.lang.IllegalArgumentException: step cannot be 0.
+1 3 -1 0
+1 3 1 3/1/3
+1 3 -2 0
+1 3 2 2/1/3
+1 3 -3 0
+1 3 3 1/1/1
+1 3 17 1/1/1
+1 3 127 1/1/1
+1 3 MIN+1 0
+1 3 MAX 1/1/1
+1 3 MIN 0
+1 MIN+1 0 ---
+ java.lang.IllegalArgumentException: step cannot be 0.
+1 MIN+1 -1 ---
+ java.lang.IllegalArgumentException: 1 to -2147483647 by -1: seqs cannot contain more than Int.MaxValue elements.
+1 MIN+1 1 0
+1 MIN+1 -2 1073741825/1/MIN+1
+1 MIN+1 2 0
+1 MIN+1 -3 715827883/1/MIN+3
+1 MIN+1 3 0
+1 MIN+1 17 0
+1 MIN+1 127 0
+1 MIN+1 MIN+1 2/1/MIN+2
+1 MIN+1 MAX 0
+1 MIN+1 MIN 2/1/MIN+1
+1 MAX 0 ---
+ java.lang.IllegalArgumentException: step cannot be 0.
+1 MAX -1 0
+1 MAX 1 MAX/1/MAX
+1 MAX -2 0
+1 MAX 2 1073741824/1/MAX
+1 MAX -3 0
+1 MAX 3 715827883/1/MAX
+1 MAX 17 126322568/1/MAX-7
+1 MAX 127 16909321/1/MAX-6
+1 MAX MIN+1 0
+1 MAX MAX 1/1/1
+1 MAX MIN 0
+1 MIN 0 ---
+ java.lang.IllegalArgumentException: step cannot be 0.
+1 MIN -1 ---
+ java.lang.IllegalArgumentException: 1 to -2147483648 by -1: seqs cannot contain more than Int.MaxValue elements.
+1 MIN 1 0
+1 MIN -2 1073741825/1/MIN+1
+1 MIN 2 0
+1 MIN -3 715827884/1/MIN
+1 MIN 3 0
+1 MIN 17 0
+1 MIN 127 0
+1 MIN MIN+1 2/1/MIN+2
+1 MIN MAX 0
+1 MIN MIN 2/1/MIN+1
+
+start end step length/first/last
+-----------------------------------------
+3 0 0 ---
+ java.lang.IllegalArgumentException: step cannot be 0.
+3 0 -1 4/3/0
+3 0 1 0
+3 0 -2 2/3/1
+3 0 2 0
+3 0 -3 2/3/0
+3 0 3 0
+3 0 17 0
+3 0 127 0
+3 0 MIN+1 1/3/3
+3 0 MAX 0
+3 0 MIN 1/3/3
+3 -1 0 ---
+ java.lang.IllegalArgumentException: step cannot be 0.
+3 -1 -1 5/3/-1
+3 -1 1 0
+3 -1 -2 3/3/-1
+3 -1 2 0
+3 -1 -3 2/3/0
+3 -1 3 0
+3 -1 17 0
+3 -1 127 0
+3 -1 MIN+1 1/3/3
+3 -1 MAX 0
+3 -1 MIN 1/3/3
+3 1 0 ---
+ java.lang.IllegalArgumentException: step cannot be 0.
+3 1 -1 3/3/1
+3 1 1 0
+3 1 -2 2/3/1
+3 1 2 0
+3 1 -3 1/3/3
+3 1 3 0
+3 1 17 0
+3 1 127 0
+3 1 MIN+1 1/3/3
+3 1 MAX 0
+3 1 MIN 1/3/3
+3 3 0 ---
+ java.lang.IllegalArgumentException: step cannot be 0.
+3 3 -1 1/3/3
+3 3 1 1/3/3
+3 3 -2 1/3/3
+3 3 2 1/3/3
+3 3 -3 1/3/3
+3 3 3 1/3/3
+3 3 17 1/3/3
+3 3 127 1/3/3
+3 3 MIN+1 1/3/3
+3 3 MAX 1/3/3
+3 3 MIN 1/3/3
+3 MIN+1 0 ---
+ java.lang.IllegalArgumentException: step cannot be 0.
+3 MIN+1 -1 ---
+ java.lang.IllegalArgumentException: 3 to -2147483647 by -1: seqs cannot contain more than Int.MaxValue elements.
+3 MIN+1 1 0
+3 MIN+1 -2 1073741826/3/MIN+1
+3 MIN+1 2 0
+3 MIN+1 -3 715827884/3/MIN+2
+3 MIN+1 3 0
+3 MIN+1 17 0
+3 MIN+1 127 0
+3 MIN+1 MIN+1 2/3/MIN+4
+3 MIN+1 MAX 0
+3 MIN+1 MIN 2/3/MIN+3
+3 MAX 0 ---
+ java.lang.IllegalArgumentException: step cannot be 0.
+3 MAX -1 0
+3 MAX 1 MAX-2/3/MAX
+3 MAX -2 0
+3 MAX 2 1073741823/3/MAX
+3 MAX -3 0
+3 MAX 3 715827882/3/MAX-1
+3 MAX 17 126322568/3/MAX-5
+3 MAX 127 16909321/3/MAX-4
+3 MAX MIN+1 0
+3 MAX MAX 1/3/3
+3 MAX MIN 0
+3 MIN 0 ---
+ java.lang.IllegalArgumentException: step cannot be 0.
+3 MIN -1 ---
+ java.lang.IllegalArgumentException: 3 to -2147483648 by -1: seqs cannot contain more than Int.MaxValue elements.
+3 MIN 1 0
+3 MIN -2 1073741826/3/MIN+1
+3 MIN 2 0
+3 MIN -3 715827884/3/MIN+2
+3 MIN 3 0
+3 MIN 17 0
+3 MIN 127 0
+3 MIN MIN+1 2/3/MIN+4
+3 MIN MAX 0
+3 MIN MIN 2/3/MIN+3
+
+start end step length/first/last
+-----------------------------------------
+MIN+1 0 0 ---
+ java.lang.IllegalArgumentException: step cannot be 0.
+MIN+1 0 -1 0
+MIN+1 0 1 ---
+ java.lang.IllegalArgumentException: -2147483647 to 0 by 1: seqs cannot contain more than Int.MaxValue elements.
+MIN+1 0 -2 0
+MIN+1 0 2 1073741824/MIN+1/-1
+MIN+1 0 -3 0
+MIN+1 0 3 715827883/MIN+1/-1
+MIN+1 0 17 126322568/MIN+1/-8
+MIN+1 0 127 16909321/MIN+1/-7
+MIN+1 0 MIN+1 0
+MIN+1 0 MAX 2/MIN+1/0
+MIN+1 0 MIN 0
+MIN+1 -1 0 ---
+ java.lang.IllegalArgumentException: step cannot be 0.
+MIN+1 -1 -1 0
+MIN+1 -1 1 MAX/MIN+1/-1
+MIN+1 -1 -2 0
+MIN+1 -1 2 1073741824/MIN+1/-1
+MIN+1 -1 -3 0
+MIN+1 -1 3 715827883/MIN+1/-1
+MIN+1 -1 17 126322568/MIN+1/-8
+MIN+1 -1 127 16909321/MIN+1/-7
+MIN+1 -1 MIN+1 0
+MIN+1 -1 MAX 1/MIN+1/MIN+1
+MIN+1 -1 MIN 0
+MIN+1 1 0 ---
+ java.lang.IllegalArgumentException: step cannot be 0.
+MIN+1 1 -1 0
+MIN+1 1 1 ---
+ java.lang.IllegalArgumentException: -2147483647 to 1 by 1: seqs cannot contain more than Int.MaxValue elements.
+MIN+1 1 -2 0
+MIN+1 1 2 1073741825/MIN+1/1
+MIN+1 1 -3 0
+MIN+1 1 3 715827883/MIN+1/-1
+MIN+1 1 17 126322568/MIN+1/-8
+MIN+1 1 127 16909321/MIN+1/-7
+MIN+1 1 MIN+1 0
+MIN+1 1 MAX 2/MIN+1/0
+MIN+1 1 MIN 0
+MIN+1 3 0 ---
+ java.lang.IllegalArgumentException: step cannot be 0.
+MIN+1 3 -1 0
+MIN+1 3 1 ---
+ java.lang.IllegalArgumentException: -2147483647 to 3 by 1: seqs cannot contain more than Int.MaxValue elements.
+MIN+1 3 -2 0
+MIN+1 3 2 1073741826/MIN+1/3
+MIN+1 3 -3 0
+MIN+1 3 3 715827884/MIN+1/2
+MIN+1 3 17 126322568/MIN+1/-8
+MIN+1 3 127 16909321/MIN+1/-7
+MIN+1 3 MIN+1 0
+MIN+1 3 MAX 2/MIN+1/0
+MIN+1 3 MIN 0
+MIN+1 MIN+1 0 ---
+ java.lang.IllegalArgumentException: step cannot be 0.
+MIN+1 MIN+1 -1 1/MIN+1/MIN+1
+MIN+1 MIN+1 1 1/MIN+1/MIN+1
+MIN+1 MIN+1 -2 1/MIN+1/MIN+1
+MIN+1 MIN+1 2 1/MIN+1/MIN+1
+MIN+1 MIN+1 -3 1/MIN+1/MIN+1
+MIN+1 MIN+1 3 1/MIN+1/MIN+1
+MIN+1 MIN+1 17 1/MIN+1/MIN+1
+MIN+1 MIN+1 127 1/MIN+1/MIN+1
+MIN+1 MIN+1 MIN+1 1/MIN+1/MIN+1
+MIN+1 MIN+1 MAX 1/MIN+1/MIN+1
+MIN+1 MIN+1 MIN 1/MIN+1/MIN+1
+MIN+1 MAX 0 ---
+ java.lang.IllegalArgumentException: step cannot be 0.
+MIN+1 MAX -1 0
+MIN+1 MAX 1 ---
+ java.lang.IllegalArgumentException: -2147483647 to 2147483647 by 1: seqs cannot contain more than Int.MaxValue elements.
+MIN+1 MAX -2 0
+MIN+1 MAX 2 ---
+ java.lang.IllegalArgumentException: -2147483647 to 2147483647 by 2: seqs cannot contain more than Int.MaxValue elements.
+MIN+1 MAX -3 0
+MIN+1 MAX 3 1431655765/MIN+1/MAX-2
+MIN+1 MAX 17 252645135/MIN+1/MAX-16
+MIN+1 MAX 127 33818641/MIN+1/MAX-14
+MIN+1 MAX MIN+1 0
+MIN+1 MAX MAX 3/MIN+1/MAX
+MIN+1 MAX MIN 0
+MIN+1 MIN 0 ---
+ java.lang.IllegalArgumentException: step cannot be 0.
+MIN+1 MIN -1 2/MIN+1/MIN
+MIN+1 MIN 1 0
+MIN+1 MIN -2 1/MIN+1/MIN+1
+MIN+1 MIN 2 0
+MIN+1 MIN -3 1/MIN+1/MIN+1
+MIN+1 MIN 3 0
+MIN+1 MIN 17 0
+MIN+1 MIN 127 0
+MIN+1 MIN MIN+1 1/MIN+1/MIN+1
+MIN+1 MIN MAX 0
+MIN+1 MIN MIN 1/MIN+1/MIN+1
+
+start end step length/first/last
+-----------------------------------------
+MAX 0 0 ---
+ java.lang.IllegalArgumentException: step cannot be 0.
+MAX 0 -1 ---
+ java.lang.IllegalArgumentException: 2147483647 to 0 by -1: seqs cannot contain more than Int.MaxValue elements.
+MAX 0 1 0
+MAX 0 -2 1073741824/MAX/1
+MAX 0 2 0
+MAX 0 -3 715827883/MAX/1
+MAX 0 3 0
+MAX 0 17 0
+MAX 0 127 0
+MAX 0 MIN+1 2/MAX/0
+MAX 0 MAX 0
+MAX 0 MIN 1/MAX/MAX
+MAX -1 0 ---
+ java.lang.IllegalArgumentException: step cannot be 0.
+MAX -1 -1 ---
+ java.lang.IllegalArgumentException: 2147483647 to -1 by -1: seqs cannot contain more than Int.MaxValue elements.
+MAX -1 1 0
+MAX -1 -2 1073741825/MAX/-1
+MAX -1 2 0
+MAX -1 -3 715827883/MAX/1
+MAX -1 3 0
+MAX -1 17 0
+MAX -1 127 0
+MAX -1 MIN+1 2/MAX/0
+MAX -1 MAX 0
+MAX -1 MIN 2/MAX/-1
+MAX 1 0 ---
+ java.lang.IllegalArgumentException: step cannot be 0.
+MAX 1 -1 MAX/MAX/1
+MAX 1 1 0
+MAX 1 -2 1073741824/MAX/1
+MAX 1 2 0
+MAX 1 -3 715827883/MAX/1
+MAX 1 3 0
+MAX 1 17 0
+MAX 1 127 0
+MAX 1 MIN+1 1/MAX/MAX
+MAX 1 MAX 0
+MAX 1 MIN 1/MAX/MAX
+MAX 3 0 ---
+ java.lang.IllegalArgumentException: step cannot be 0.
+MAX 3 -1 MAX-2/MAX/3
+MAX 3 1 0
+MAX 3 -2 1073741823/MAX/3
+MAX 3 2 0
+MAX 3 -3 715827882/MAX/4
+MAX 3 3 0
+MAX 3 17 0
+MAX 3 127 0
+MAX 3 MIN+1 1/MAX/MAX
+MAX 3 MAX 0
+MAX 3 MIN 1/MAX/MAX
+MAX MIN+1 0 ---
+ java.lang.IllegalArgumentException: step cannot be 0.
+MAX MIN+1 -1 ---
+ java.lang.IllegalArgumentException: 2147483647 to -2147483647 by -1: seqs cannot contain more than Int.MaxValue elements.
+MAX MIN+1 1 0
+MAX MIN+1 -2 ---
+ java.lang.IllegalArgumentException: 2147483647 to -2147483647 by -2: seqs cannot contain more than Int.MaxValue elements.
+MAX MIN+1 2 0
+MAX MIN+1 -3 1431655765/MAX/MIN+3
+MAX MIN+1 3 0
+MAX MIN+1 17 0
+MAX MIN+1 127 0
+MAX MIN+1 MIN+1 3/MAX/MIN+1
+MAX MIN+1 MAX 0
+MAX MIN+1 MIN 2/MAX/-1
+MAX MAX 0 ---
+ java.lang.IllegalArgumentException: step cannot be 0.
+MAX MAX -1 1/MAX/MAX
+MAX MAX 1 1/MAX/MAX
+MAX MAX -2 1/MAX/MAX
+MAX MAX 2 1/MAX/MAX
+MAX MAX -3 1/MAX/MAX
+MAX MAX 3 1/MAX/MAX
+MAX MAX 17 1/MAX/MAX
+MAX MAX 127 1/MAX/MAX
+MAX MAX MIN+1 1/MAX/MAX
+MAX MAX MAX 1/MAX/MAX
+MAX MAX MIN 1/MAX/MAX
+MAX MIN 0 ---
+ java.lang.IllegalArgumentException: step cannot be 0.
+MAX MIN -1 ---
+ java.lang.IllegalArgumentException: 2147483647 to -2147483648 by -1: seqs cannot contain more than Int.MaxValue elements.
+MAX MIN 1 0
+MAX MIN -2 ---
+ java.lang.IllegalArgumentException: 2147483647 to -2147483648 by -2: seqs cannot contain more than Int.MaxValue elements.
+MAX MIN 2 0
+MAX MIN -3 1431655766/MAX/MIN
+MAX MIN 3 0
+MAX MIN 17 0
+MAX MIN 127 0
+MAX MIN MIN+1 3/MAX/MIN+1
+MAX MIN MAX 0
+MAX MIN MIN 2/MAX/-1
+
+start end step length/first/last
+-----------------------------------------
+MIN 0 0 ---
+ java.lang.IllegalArgumentException: step cannot be 0.
+MIN 0 -1 0
+MIN 0 1 ---
+ java.lang.IllegalArgumentException: -2147483648 to 0 by 1: seqs cannot contain more than Int.MaxValue elements.
+MIN 0 -2 0
+MIN 0 2 1073741825/MIN/0
+MIN 0 -3 0
+MIN 0 3 715827883/MIN/-2
+MIN 0 17 126322568/MIN/-9
+MIN 0 127 16909321/MIN/-8
+MIN 0 MIN+1 0
+MIN 0 MAX 2/MIN/-1
+MIN 0 MIN 0
+MIN -1 0 ---
+ java.lang.IllegalArgumentException: step cannot be 0.
+MIN -1 -1 0
+MIN -1 1 ---
+ java.lang.IllegalArgumentException: -2147483648 to -1 by 1: seqs cannot contain more than Int.MaxValue elements.
+MIN -1 -2 0
+MIN -1 2 1073741824/MIN/-2
+MIN -1 -3 0
+MIN -1 3 715827883/MIN/-2
+MIN -1 17 126322568/MIN/-9
+MIN -1 127 16909321/MIN/-8
+MIN -1 MIN+1 0
+MIN -1 MAX 2/MIN/-1
+MIN -1 MIN 0
+MIN 1 0 ---
+ java.lang.IllegalArgumentException: step cannot be 0.
+MIN 1 -1 0
+MIN 1 1 ---
+ java.lang.IllegalArgumentException: -2147483648 to 1 by 1: seqs cannot contain more than Int.MaxValue elements.
+MIN 1 -2 0
+MIN 1 2 1073741825/MIN/0
+MIN 1 -3 0
+MIN 1 3 715827884/MIN/1
+MIN 1 17 126322568/MIN/-9
+MIN 1 127 16909321/MIN/-8
+MIN 1 MIN+1 0
+MIN 1 MAX 2/MIN/-1
+MIN 1 MIN 0
+MIN 3 0 ---
+ java.lang.IllegalArgumentException: step cannot be 0.
+MIN 3 -1 0
+MIN 3 1 ---
+ java.lang.IllegalArgumentException: -2147483648 to 3 by 1: seqs cannot contain more than Int.MaxValue elements.
+MIN 3 -2 0
+MIN 3 2 1073741826/MIN/2
+MIN 3 -3 0
+MIN 3 3 715827884/MIN/1
+MIN 3 17 126322568/MIN/-9
+MIN 3 127 16909321/MIN/-8
+MIN 3 MIN+1 0
+MIN 3 MAX 2/MIN/-1
+MIN 3 MIN 0
+MIN MIN+1 0 ---
+ java.lang.IllegalArgumentException: step cannot be 0.
+MIN MIN+1 -1 0
+MIN MIN+1 1 2/MIN/MIN+1
+MIN MIN+1 -2 0
+MIN MIN+1 2 1/MIN/MIN
+MIN MIN+1 -3 0
+MIN MIN+1 3 1/MIN/MIN
+MIN MIN+1 17 1/MIN/MIN
+MIN MIN+1 127 1/MIN/MIN
+MIN MIN+1 MIN+1 0
+MIN MIN+1 MAX 1/MIN/MIN
+MIN MIN+1 MIN 0
+MIN MAX 0 ---
+ java.lang.IllegalArgumentException: step cannot be 0.
+MIN MAX -1 0
+MIN MAX 1 ---
+ java.lang.IllegalArgumentException: -2147483648 to 2147483647 by 1: seqs cannot contain more than Int.MaxValue elements.
+MIN MAX -2 0
+MIN MAX 2 ---
+ java.lang.IllegalArgumentException: -2147483648 to 2147483647 by 2: seqs cannot contain more than Int.MaxValue elements.
+MIN MAX -3 0
+MIN MAX 3 1431655766/MIN/MAX
+MIN MAX 17 252645136/MIN/MAX
+MIN MAX 127 33818641/MIN/MAX-15
+MIN MAX MIN+1 0
+MIN MAX MAX 3/MIN/MAX-1
+MIN MAX MIN 0
+MIN MIN 0 ---
+ java.lang.IllegalArgumentException: step cannot be 0.
+MIN MIN -1 1/MIN/MIN
+MIN MIN 1 1/MIN/MIN
+MIN MIN -2 1/MIN/MIN
+MIN MIN 2 1/MIN/MIN
+MIN MIN -3 1/MIN/MIN
+MIN MIN 3 1/MIN/MIN
+MIN MIN 17 1/MIN/MIN
+MIN MIN 127 1/MIN/MIN
+MIN MIN MIN+1 1/MIN/MIN
+MIN MIN MAX 1/MIN/MIN
+MIN MIN MIN 1/MIN/MIN
+
+>>> Range.apply <<<
+
+start end step length/first/last
+-----------------------------------------
+0 0 0 ---
+ java.lang.IllegalArgumentException: step cannot be 0.
+0 0 -1 0
+0 0 1 0
+0 0 -2 0
+0 0 2 0
+0 0 -3 0
+0 0 3 0
+0 0 17 0
+0 0 127 0
+0 0 MIN+1 0
+0 0 MAX 0
+0 0 MIN 0
+0 -1 0 ---
+ java.lang.IllegalArgumentException: step cannot be 0.
+0 -1 -1 1/0/0
+0 -1 1 0
+0 -1 -2 1/0/0
+0 -1 2 0
+0 -1 -3 1/0/0
+0 -1 3 0
+0 -1 17 0
+0 -1 127 0
+0 -1 MIN+1 1/0/0
+0 -1 MAX 0
+0 -1 MIN 1/0/0
+0 1 0 ---
+ java.lang.IllegalArgumentException: step cannot be 0.
+0 1 -1 0
+0 1 1 1/0/0
+0 1 -2 0
+0 1 2 1/0/0
+0 1 -3 0
+0 1 3 1/0/0
+0 1 17 1/0/0
+0 1 127 1/0/0
+0 1 MIN+1 0
+0 1 MAX 1/0/0
+0 1 MIN 0
+0 3 0 ---
+ java.lang.IllegalArgumentException: step cannot be 0.
+0 3 -1 0
+0 3 1 3/0/2
+0 3 -2 0
+0 3 2 2/0/2
+0 3 -3 0
+0 3 3 1/0/0
+0 3 17 1/0/0
+0 3 127 1/0/0
+0 3 MIN+1 0
+0 3 MAX 1/0/0
+0 3 MIN 0
+0 MIN+1 0 ---
+ java.lang.IllegalArgumentException: step cannot be 0.
+0 MIN+1 -1 MAX/0/MIN+2
+0 MIN+1 1 0
+0 MIN+1 -2 1073741824/0/MIN+2
+0 MIN+1 2 0
+0 MIN+1 -3 715827883/0/MIN+2
+0 MIN+1 3 0
+0 MIN+1 17 0
+0 MIN+1 127 0
+0 MIN+1 MIN+1 1/0/0
+0 MIN+1 MAX 0
+0 MIN+1 MIN 1/0/0
+0 MAX 0 ---
+ java.lang.IllegalArgumentException: step cannot be 0.
+0 MAX -1 0
+0 MAX 1 MAX/0/MAX-1
+0 MAX -2 0
+0 MAX 2 1073741824/0/MAX-1
+0 MAX -3 0
+0 MAX 3 715827883/0/MAX-1
+0 MAX 17 126322568/0/MAX-8
+0 MAX 127 16909321/0/MAX-7
+0 MAX MIN+1 0
+0 MAX MAX 1/0/0
+0 MAX MIN 0
+0 MIN 0 ---
+ java.lang.IllegalArgumentException: step cannot be 0.
+0 MIN -1 ---
+ java.lang.IllegalArgumentException: 0 until -2147483648 by -1: seqs cannot contain more than Int.MaxValue elements.
+0 MIN 1 0
+0 MIN -2 1073741824/0/MIN+2
+0 MIN 2 0
+0 MIN -3 715827883/0/MIN+2
+0 MIN 3 0
+0 MIN 17 0
+0 MIN 127 0
+0 MIN MIN+1 2/0/MIN+1
+0 MIN MAX 0
+0 MIN MIN 1/0/0
+
+start end step length/first/last
+-----------------------------------------
+-1 0 0 ---
+ java.lang.IllegalArgumentException: step cannot be 0.
+-1 0 -1 0
+-1 0 1 1/-1/-1
+-1 0 -2 0
+-1 0 2 1/-1/-1
+-1 0 -3 0
+-1 0 3 1/-1/-1
+-1 0 17 1/-1/-1
+-1 0 127 1/-1/-1
+-1 0 MIN+1 0
+-1 0 MAX 1/-1/-1
+-1 0 MIN 0
+-1 -1 0 ---
+ java.lang.IllegalArgumentException: step cannot be 0.
+-1 -1 -1 0
+-1 -1 1 0
+-1 -1 -2 0
+-1 -1 2 0
+-1 -1 -3 0
+-1 -1 3 0
+-1 -1 17 0
+-1 -1 127 0
+-1 -1 MIN+1 0
+-1 -1 MAX 0
+-1 -1 MIN 0
+-1 1 0 ---
+ java.lang.IllegalArgumentException: step cannot be 0.
+-1 1 -1 0
+-1 1 1 2/-1/0
+-1 1 -2 0
+-1 1 2 1/-1/-1
+-1 1 -3 0
+-1 1 3 1/-1/-1
+-1 1 17 1/-1/-1
+-1 1 127 1/-1/-1
+-1 1 MIN+1 0
+-1 1 MAX 1/-1/-1
+-1 1 MIN 0
+-1 3 0 ---
+ java.lang.IllegalArgumentException: step cannot be 0.
+-1 3 -1 0
+-1 3 1 4/-1/2
+-1 3 -2 0
+-1 3 2 2/-1/1
+-1 3 -3 0
+-1 3 3 2/-1/2
+-1 3 17 1/-1/-1
+-1 3 127 1/-1/-1
+-1 3 MIN+1 0
+-1 3 MAX 1/-1/-1
+-1 3 MIN 0
+-1 MIN+1 0 ---
+ java.lang.IllegalArgumentException: step cannot be 0.
+-1 MIN+1 -1 MAX-1/-1/MIN+2
+-1 MIN+1 1 0
+-1 MIN+1 -2 1073741823/-1/MIN+3
+-1 MIN+1 2 0
+-1 MIN+1 -3 715827882/-1/MIN+4
+-1 MIN+1 3 0
+-1 MIN+1 17 0
+-1 MIN+1 127 0
+-1 MIN+1 MIN+1 1/-1/-1
+-1 MIN+1 MAX 0
+-1 MIN+1 MIN 1/-1/-1
+-1 MAX 0 ---
+ java.lang.IllegalArgumentException: step cannot be 0.
+-1 MAX -1 0
+-1 MAX 1 ---
+ java.lang.IllegalArgumentException: -1 until 2147483647 by 1: seqs cannot contain more than Int.MaxValue elements.
+-1 MAX -2 0
+-1 MAX 2 1073741824/-1/MAX-2
+-1 MAX -3 0
+-1 MAX 3 715827883/-1/MAX-2
+-1 MAX 17 126322568/-1/MAX-9
+-1 MAX 127 16909321/-1/MAX-8
+-1 MAX MIN+1 0
+-1 MAX MAX 2/-1/MAX-1
+-1 MAX MIN 0
+-1 MIN 0 ---
+ java.lang.IllegalArgumentException: step cannot be 0.
+-1 MIN -1 MAX/-1/MIN+1
+-1 MIN 1 0
+-1 MIN -2 1073741824/-1/MIN+1
+-1 MIN 2 0
+-1 MIN -3 715827883/-1/MIN+1
+-1 MIN 3 0
+-1 MIN 17 0
+-1 MIN 127 0
+-1 MIN MIN+1 1/-1/-1
+-1 MIN MAX 0
+-1 MIN MIN 1/-1/-1
+
+start end step length/first/last
+-----------------------------------------
+1 0 0 ---
+ java.lang.IllegalArgumentException: step cannot be 0.
+1 0 -1 1/1/1
+1 0 1 0
+1 0 -2 1/1/1
+1 0 2 0
+1 0 -3 1/1/1
+1 0 3 0
+1 0 17 0
+1 0 127 0
+1 0 MIN+1 1/1/1
+1 0 MAX 0
+1 0 MIN 1/1/1
+1 -1 0 ---
+ java.lang.IllegalArgumentException: step cannot be 0.
+1 -1 -1 2/1/0
+1 -1 1 0
+1 -1 -2 1/1/1
+1 -1 2 0
+1 -1 -3 1/1/1
+1 -1 3 0
+1 -1 17 0
+1 -1 127 0
+1 -1 MIN+1 1/1/1
+1 -1 MAX 0
+1 -1 MIN 1/1/1
+1 1 0 ---
+ java.lang.IllegalArgumentException: step cannot be 0.
+1 1 -1 0
+1 1 1 0
+1 1 -2 0
+1 1 2 0
+1 1 -3 0
+1 1 3 0
+1 1 17 0
+1 1 127 0
+1 1 MIN+1 0
+1 1 MAX 0
+1 1 MIN 0
+1 3 0 ---
+ java.lang.IllegalArgumentException: step cannot be 0.
+1 3 -1 0
+1 3 1 2/1/2
+1 3 -2 0
+1 3 2 1/1/1
+1 3 -3 0
+1 3 3 1/1/1
+1 3 17 1/1/1
+1 3 127 1/1/1
+1 3 MIN+1 0
+1 3 MAX 1/1/1
+1 3 MIN 0
+1 MIN+1 0 ---
+ java.lang.IllegalArgumentException: step cannot be 0.
+1 MIN+1 -1 ---
+ java.lang.IllegalArgumentException: 1 until -2147483647 by -1: seqs cannot contain more than Int.MaxValue elements.
+1 MIN+1 1 0
+1 MIN+1 -2 1073741824/1/MIN+3
+1 MIN+1 2 0
+1 MIN+1 -3 715827883/1/MIN+3
+1 MIN+1 3 0
+1 MIN+1 17 0
+1 MIN+1 127 0
+1 MIN+1 MIN+1 2/1/MIN+2
+1 MIN+1 MAX 0
+1 MIN+1 MIN 1/1/1
+1 MAX 0 ---
+ java.lang.IllegalArgumentException: step cannot be 0.
+1 MAX -1 0
+1 MAX 1 MAX-1/1/MAX-1
+1 MAX -2 0
+1 MAX 2 1073741823/1/MAX-2
+1 MAX -3 0
+1 MAX 3 715827882/1/MAX-3
+1 MAX 17 126322568/1/MAX-7
+1 MAX 127 16909321/1/MAX-6
+1 MAX MIN+1 0
+1 MAX MAX 1/1/1
+1 MAX MIN 0
+1 MIN 0 ---
+ java.lang.IllegalArgumentException: step cannot be 0.
+1 MIN -1 ---
+ java.lang.IllegalArgumentException: 1 until -2147483648 by -1: seqs cannot contain more than Int.MaxValue elements.
+1 MIN 1 0
+1 MIN -2 1073741825/1/MIN+1
+1 MIN 2 0
+1 MIN -3 715827883/1/MIN+3
+1 MIN 3 0
+1 MIN 17 0
+1 MIN 127 0
+1 MIN MIN+1 2/1/MIN+2
+1 MIN MAX 0
+1 MIN MIN 2/1/MIN+1
+
+start end step length/first/last
+-----------------------------------------
+3 0 0 ---
+ java.lang.IllegalArgumentException: step cannot be 0.
+3 0 -1 3/3/1
+3 0 1 0
+3 0 -2 2/3/1
+3 0 2 0
+3 0 -3 1/3/3
+3 0 3 0
+3 0 17 0
+3 0 127 0
+3 0 MIN+1 1/3/3
+3 0 MAX 0
+3 0 MIN 1/3/3
+3 -1 0 ---
+ java.lang.IllegalArgumentException: step cannot be 0.
+3 -1 -1 4/3/0
+3 -1 1 0
+3 -1 -2 2/3/1
+3 -1 2 0
+3 -1 -3 2/3/0
+3 -1 3 0
+3 -1 17 0
+3 -1 127 0
+3 -1 MIN+1 1/3/3
+3 -1 MAX 0
+3 -1 MIN 1/3/3
+3 1 0 ---
+ java.lang.IllegalArgumentException: step cannot be 0.
+3 1 -1 2/3/2
+3 1 1 0
+3 1 -2 1/3/3
+3 1 2 0
+3 1 -3 1/3/3
+3 1 3 0
+3 1 17 0
+3 1 127 0
+3 1 MIN+1 1/3/3
+3 1 MAX 0
+3 1 MIN 1/3/3
+3 3 0 ---
+ java.lang.IllegalArgumentException: step cannot be 0.
+3 3 -1 0
+3 3 1 0
+3 3 -2 0
+3 3 2 0
+3 3 -3 0
+3 3 3 0
+3 3 17 0
+3 3 127 0
+3 3 MIN+1 0
+3 3 MAX 0
+3 3 MIN 0
+3 MIN+1 0 ---
+ java.lang.IllegalArgumentException: step cannot be 0.
+3 MIN+1 -1 ---
+ java.lang.IllegalArgumentException: 3 until -2147483647 by -1: seqs cannot contain more than Int.MaxValue elements.
+3 MIN+1 1 0
+3 MIN+1 -2 1073741825/3/MIN+3
+3 MIN+1 2 0
+3 MIN+1 -3 715827884/3/MIN+2
+3 MIN+1 3 0
+3 MIN+1 17 0
+3 MIN+1 127 0
+3 MIN+1 MIN+1 2/3/MIN+4
+3 MIN+1 MAX 0
+3 MIN+1 MIN 2/3/MIN+3
+3 MAX 0 ---
+ java.lang.IllegalArgumentException: step cannot be 0.
+3 MAX -1 0
+3 MAX 1 MAX-3/3/MAX-1
+3 MAX -2 0
+3 MAX 2 1073741822/3/MAX-2
+3 MAX -3 0
+3 MAX 3 715827882/3/MAX-1
+3 MAX 17 126322568/3/MAX-5
+3 MAX 127 16909321/3/MAX-4
+3 MAX MIN+1 0
+3 MAX MAX 1/3/3
+3 MAX MIN 0
+3 MIN 0 ---
+ java.lang.IllegalArgumentException: step cannot be 0.
+3 MIN -1 ---
+ java.lang.IllegalArgumentException: 3 until -2147483648 by -1: seqs cannot contain more than Int.MaxValue elements.
+3 MIN 1 0
+3 MIN -2 1073741826/3/MIN+1
+3 MIN 2 0
+3 MIN -3 715827884/3/MIN+2
+3 MIN 3 0
+3 MIN 17 0
+3 MIN 127 0
+3 MIN MIN+1 2/3/MIN+4
+3 MIN MAX 0
+3 MIN MIN 2/3/MIN+3
+
+start end step length/first/last
+-----------------------------------------
+MIN+1 0 0 ---
+ java.lang.IllegalArgumentException: step cannot be 0.
+MIN+1 0 -1 0
+MIN+1 0 1 MAX/MIN+1/-1
+MIN+1 0 -2 0
+MIN+1 0 2 1073741824/MIN+1/-1
+MIN+1 0 -3 0
+MIN+1 0 3 715827883/MIN+1/-1
+MIN+1 0 17 126322568/MIN+1/-8
+MIN+1 0 127 16909321/MIN+1/-7
+MIN+1 0 MIN+1 0
+MIN+1 0 MAX 1/MIN+1/MIN+1
+MIN+1 0 MIN 0
+MIN+1 -1 0 ---
+ java.lang.IllegalArgumentException: step cannot be 0.
+MIN+1 -1 -1 0
+MIN+1 -1 1 MAX-1/MIN+1/-2
+MIN+1 -1 -2 0
+MIN+1 -1 2 1073741823/MIN+1/-3
+MIN+1 -1 -3 0
+MIN+1 -1 3 715827882/MIN+1/-4
+MIN+1 -1 17 126322568/MIN+1/-8
+MIN+1 -1 127 16909321/MIN+1/-7
+MIN+1 -1 MIN+1 0
+MIN+1 -1 MAX 1/MIN+1/MIN+1
+MIN+1 -1 MIN 0
+MIN+1 1 0 ---
+ java.lang.IllegalArgumentException: step cannot be 0.
+MIN+1 1 -1 0
+MIN+1 1 1 ---
+ java.lang.IllegalArgumentException: -2147483647 until 1 by 1: seqs cannot contain more than Int.MaxValue elements.
+MIN+1 1 -2 0
+MIN+1 1 2 1073741824/MIN+1/-1
+MIN+1 1 -3 0
+MIN+1 1 3 715827883/MIN+1/-1
+MIN+1 1 17 126322568/MIN+1/-8
+MIN+1 1 127 16909321/MIN+1/-7
+MIN+1 1 MIN+1 0
+MIN+1 1 MAX 2/MIN+1/0
+MIN+1 1 MIN 0
+MIN+1 3 0 ---
+ java.lang.IllegalArgumentException: step cannot be 0.
+MIN+1 3 -1 0
+MIN+1 3 1 ---
+ java.lang.IllegalArgumentException: -2147483647 until 3 by 1: seqs cannot contain more than Int.MaxValue elements.
+MIN+1 3 -2 0
+MIN+1 3 2 1073741825/MIN+1/1
+MIN+1 3 -3 0
+MIN+1 3 3 715827884/MIN+1/2
+MIN+1 3 17 126322568/MIN+1/-8
+MIN+1 3 127 16909321/MIN+1/-7
+MIN+1 3 MIN+1 0
+MIN+1 3 MAX 2/MIN+1/0
+MIN+1 3 MIN 0
+MIN+1 MIN+1 0 ---
+ java.lang.IllegalArgumentException: step cannot be 0.
+MIN+1 MIN+1 -1 0
+MIN+1 MIN+1 1 0
+MIN+1 MIN+1 -2 0
+MIN+1 MIN+1 2 0
+MIN+1 MIN+1 -3 0
+MIN+1 MIN+1 3 0
+MIN+1 MIN+1 17 0
+MIN+1 MIN+1 127 0
+MIN+1 MIN+1 MIN+1 0
+MIN+1 MIN+1 MAX 0
+MIN+1 MIN+1 MIN 0
+MIN+1 MAX 0 ---
+ java.lang.IllegalArgumentException: step cannot be 0.
+MIN+1 MAX -1 0
+MIN+1 MAX 1 ---
+ java.lang.IllegalArgumentException: -2147483647 until 2147483647 by 1: seqs cannot contain more than Int.MaxValue elements.
+MIN+1 MAX -2 0
+MIN+1 MAX 2 MAX/MIN+1/MAX-2
+MIN+1 MAX -3 0
+MIN+1 MAX 3 1431655765/MIN+1/MAX-2
+MIN+1 MAX 17 252645135/MIN+1/MAX-16
+MIN+1 MAX 127 33818641/MIN+1/MAX-14
+MIN+1 MAX MIN+1 0
+MIN+1 MAX MAX 2/MIN+1/0
+MIN+1 MAX MIN 0
+MIN+1 MIN 0 ---
+ java.lang.IllegalArgumentException: step cannot be 0.
+MIN+1 MIN -1 1/MIN+1/MIN+1
+MIN+1 MIN 1 0
+MIN+1 MIN -2 1/MIN+1/MIN+1
+MIN+1 MIN 2 0
+MIN+1 MIN -3 1/MIN+1/MIN+1
+MIN+1 MIN 3 0
+MIN+1 MIN 17 0
+MIN+1 MIN 127 0
+MIN+1 MIN MIN+1 1/MIN+1/MIN+1
+MIN+1 MIN MAX 0
+MIN+1 MIN MIN 1/MIN+1/MIN+1
+
+start end step length/first/last
+-----------------------------------------
+MAX 0 0 ---
+ java.lang.IllegalArgumentException: step cannot be 0.
+MAX 0 -1 MAX/MAX/1
+MAX 0 1 0
+MAX 0 -2 1073741824/MAX/1
+MAX 0 2 0
+MAX 0 -3 715827883/MAX/1
+MAX 0 3 0
+MAX 0 17 0
+MAX 0 127 0
+MAX 0 MIN+1 1/MAX/MAX
+MAX 0 MAX 0
+MAX 0 MIN 1/MAX/MAX
+MAX -1 0 ---
+ java.lang.IllegalArgumentException: step cannot be 0.
+MAX -1 -1 ---
+ java.lang.IllegalArgumentException: 2147483647 until -1 by -1: seqs cannot contain more than Int.MaxValue elements.
+MAX -1 1 0
+MAX -1 -2 1073741824/MAX/1
+MAX -1 2 0
+MAX -1 -3 715827883/MAX/1
+MAX -1 3 0
+MAX -1 17 0
+MAX -1 127 0
+MAX -1 MIN+1 2/MAX/0
+MAX -1 MAX 0
+MAX -1 MIN 1/MAX/MAX
+MAX 1 0 ---
+ java.lang.IllegalArgumentException: step cannot be 0.
+MAX 1 -1 MAX-1/MAX/2
+MAX 1 1 0
+MAX 1 -2 1073741823/MAX/3
+MAX 1 2 0
+MAX 1 -3 715827882/MAX/4
+MAX 1 3 0
+MAX 1 17 0
+MAX 1 127 0
+MAX 1 MIN+1 1/MAX/MAX
+MAX 1 MAX 0
+MAX 1 MIN 1/MAX/MAX
+MAX 3 0 ---
+ java.lang.IllegalArgumentException: step cannot be 0.
+MAX 3 -1 MAX-3/MAX/4
+MAX 3 1 0
+MAX 3 -2 1073741822/MAX/5
+MAX 3 2 0
+MAX 3 -3 715827882/MAX/4
+MAX 3 3 0
+MAX 3 17 0
+MAX 3 127 0
+MAX 3 MIN+1 1/MAX/MAX
+MAX 3 MAX 0
+MAX 3 MIN 1/MAX/MAX
+MAX MIN+1 0 ---
+ java.lang.IllegalArgumentException: step cannot be 0.
+MAX MIN+1 -1 ---
+ java.lang.IllegalArgumentException: 2147483647 until -2147483647 by -1: seqs cannot contain more than Int.MaxValue elements.
+MAX MIN+1 1 0
+MAX MIN+1 -2 MAX/MAX/MIN+3
+MAX MIN+1 2 0
+MAX MIN+1 -3 1431655765/MAX/MIN+3
+MAX MIN+1 3 0
+MAX MIN+1 17 0
+MAX MIN+1 127 0
+MAX MIN+1 MIN+1 2/MAX/0
+MAX MIN+1 MAX 0
+MAX MIN+1 MIN 2/MAX/-1
+MAX MAX 0 ---
+ java.lang.IllegalArgumentException: step cannot be 0.
+MAX MAX -1 0
+MAX MAX 1 0
+MAX MAX -2 0
+MAX MAX 2 0
+MAX MAX -3 0
+MAX MAX 3 0
+MAX MAX 17 0
+MAX MAX 127 0
+MAX MAX MIN+1 0
+MAX MAX MAX 0
+MAX MAX MIN 0
+MAX MIN 0 ---
+ java.lang.IllegalArgumentException: step cannot be 0.
+MAX MIN -1 ---
+ java.lang.IllegalArgumentException: 2147483647 until -2147483648 by -1: seqs cannot contain more than Int.MaxValue elements.
+MAX MIN 1 0
+MAX MIN -2 ---
+ java.lang.IllegalArgumentException: 2147483647 until -2147483648 by -2: seqs cannot contain more than Int.MaxValue elements.
+MAX MIN 2 0
+MAX MIN -3 1431655765/MAX/MIN+3
+MAX MIN 3 0
+MAX MIN 17 0
+MAX MIN 127 0
+MAX MIN MIN+1 3/MAX/MIN+1
+MAX MIN MAX 0
+MAX MIN MIN 2/MAX/-1
+
+start end step length/first/last
+-----------------------------------------
+MIN 0 0 ---
+ java.lang.IllegalArgumentException: step cannot be 0.
+MIN 0 -1 0
+MIN 0 1 ---
+ java.lang.IllegalArgumentException: -2147483648 until 0 by 1: seqs cannot contain more than Int.MaxValue elements.
+MIN 0 -2 0
+MIN 0 2 1073741824/MIN/-2
+MIN 0 -3 0
+MIN 0 3 715827883/MIN/-2
+MIN 0 17 126322568/MIN/-9
+MIN 0 127 16909321/MIN/-8
+MIN 0 MIN+1 0
+MIN 0 MAX 2/MIN/-1
+MIN 0 MIN 0
+MIN -1 0 ---
+ java.lang.IllegalArgumentException: step cannot be 0.
+MIN -1 -1 0
+MIN -1 1 MAX/MIN/-2
+MIN -1 -2 0
+MIN -1 2 1073741824/MIN/-2
+MIN -1 -3 0
+MIN -1 3 715827883/MIN/-2
+MIN -1 17 126322568/MIN/-9
+MIN -1 127 16909321/MIN/-8
+MIN -1 MIN+1 0
+MIN -1 MAX 1/MIN/MIN
+MIN -1 MIN 0
+MIN 1 0 ---
+ java.lang.IllegalArgumentException: step cannot be 0.
+MIN 1 -1 0
+MIN 1 1 ---
+ java.lang.IllegalArgumentException: -2147483648 until 1 by 1: seqs cannot contain more than Int.MaxValue elements.
+MIN 1 -2 0
+MIN 1 2 1073741825/MIN/0
+MIN 1 -3 0
+MIN 1 3 715827883/MIN/-2
+MIN 1 17 126322568/MIN/-9
+MIN 1 127 16909321/MIN/-8
+MIN 1 MIN+1 0
+MIN 1 MAX 2/MIN/-1
+MIN 1 MIN 0
+MIN 3 0 ---
+ java.lang.IllegalArgumentException: step cannot be 0.
+MIN 3 -1 0
+MIN 3 1 ---
+ java.lang.IllegalArgumentException: -2147483648 until 3 by 1: seqs cannot contain more than Int.MaxValue elements.
+MIN 3 -2 0
+MIN 3 2 1073741826/MIN/2
+MIN 3 -3 0
+MIN 3 3 715827884/MIN/1
+MIN 3 17 126322568/MIN/-9
+MIN 3 127 16909321/MIN/-8
+MIN 3 MIN+1 0
+MIN 3 MAX 2/MIN/-1
+MIN 3 MIN 0
+MIN MIN+1 0 ---
+ java.lang.IllegalArgumentException: step cannot be 0.
+MIN MIN+1 -1 0
+MIN MIN+1 1 1/MIN/MIN
+MIN MIN+1 -2 0
+MIN MIN+1 2 1/MIN/MIN
+MIN MIN+1 -3 0
+MIN MIN+1 3 1/MIN/MIN
+MIN MIN+1 17 1/MIN/MIN
+MIN MIN+1 127 1/MIN/MIN
+MIN MIN+1 MIN+1 0
+MIN MIN+1 MAX 1/MIN/MIN
+MIN MIN+1 MIN 0
+MIN MAX 0 ---
+ java.lang.IllegalArgumentException: step cannot be 0.
+MIN MAX -1 0
+MIN MAX 1 ---
+ java.lang.IllegalArgumentException: -2147483648 until 2147483647 by 1: seqs cannot contain more than Int.MaxValue elements.
+MIN MAX -2 0
+MIN MAX 2 ---
+ java.lang.IllegalArgumentException: -2147483648 until 2147483647 by 2: seqs cannot contain more than Int.MaxValue elements.
+MIN MAX -3 0
+MIN MAX 3 1431655765/MIN/MAX-3
+MIN MAX 17 252645135/MIN/MAX-17
+MIN MAX 127 33818641/MIN/MAX-15
+MIN MAX MIN+1 0
+MIN MAX MAX 3/MIN/MAX-1
+MIN MAX MIN 0
+MIN MIN 0 ---
+ java.lang.IllegalArgumentException: step cannot be 0.
+MIN MIN -1 0
+MIN MIN 1 0
+MIN MIN -2 0
+MIN MIN 2 0
+MIN MIN -3 0
+MIN MIN 3 0
+MIN MIN 17 0
+MIN MIN 127 0
+MIN MIN MIN+1 0
+MIN MIN MAX 0
+MIN MIN MIN 0
+
+>>> start to end <<<
+
+start end step length/first/last
+-----------------------------------------
+0 0 0 1/0/0
+0 0 -1 1/0/0
+0 0 1 1/0/0
+0 0 -2 1/0/0
+0 0 2 1/0/0
+0 0 -3 1/0/0
+0 0 3 1/0/0
+0 0 17 1/0/0
+0 0 127 1/0/0
+0 0 MIN+1 1/0/0
+0 0 MAX 1/0/0
+0 0 MIN 1/0/0
+0 -1 0 0
+0 -1 -1 0
+0 -1 1 0
+0 -1 -2 0
+0 -1 2 0
+0 -1 -3 0
+0 -1 3 0
+0 -1 17 0
+0 -1 127 0
+0 -1 MIN+1 0
+0 -1 MAX 0
+0 -1 MIN 0
+0 1 0 2/0/1
+0 1 -1 2/0/1
+0 1 1 2/0/1
+0 1 -2 2/0/1
+0 1 2 2/0/1
+0 1 -3 2/0/1
+0 1 3 2/0/1
+0 1 17 2/0/1
+0 1 127 2/0/1
+0 1 MIN+1 2/0/1
+0 1 MAX 2/0/1
+0 1 MIN 2/0/1
+0 3 0 4/0/3
+0 3 -1 4/0/3
+0 3 1 4/0/3
+0 3 -2 4/0/3
+0 3 2 4/0/3
+0 3 -3 4/0/3
+0 3 3 4/0/3
+0 3 17 4/0/3
+0 3 127 4/0/3
+0 3 MIN+1 4/0/3
+0 3 MAX 4/0/3
+0 3 MIN 4/0/3
+0 MIN+1 0 0
+0 MIN+1 -1 0
+0 MIN+1 1 0
+0 MIN+1 -2 0
+0 MIN+1 2 0
+0 MIN+1 -3 0
+0 MIN+1 3 0
+0 MIN+1 17 0
+0 MIN+1 127 0
+0 MIN+1 MIN+1 0
+0 MIN+1 MAX 0
+0 MIN+1 MIN 0
+0 MAX 0 ---
+ java.lang.IllegalArgumentException: 0 to 2147483647 by 1: seqs cannot contain more than Int.MaxValue elements.
+0 MAX -1 ---
+ java.lang.IllegalArgumentException: 0 to 2147483647 by 1: seqs cannot contain more than Int.MaxValue elements.
+0 MAX 1 ---
+ java.lang.IllegalArgumentException: 0 to 2147483647 by 1: seqs cannot contain more than Int.MaxValue elements.
+0 MAX -2 ---
+ java.lang.IllegalArgumentException: 0 to 2147483647 by 1: seqs cannot contain more than Int.MaxValue elements.
+0 MAX 2 ---
+ java.lang.IllegalArgumentException: 0 to 2147483647 by 1: seqs cannot contain more than Int.MaxValue elements.
+0 MAX -3 ---
+ java.lang.IllegalArgumentException: 0 to 2147483647 by 1: seqs cannot contain more than Int.MaxValue elements.
+0 MAX 3 ---
+ java.lang.IllegalArgumentException: 0 to 2147483647 by 1: seqs cannot contain more than Int.MaxValue elements.
+0 MAX 17 ---
+ java.lang.IllegalArgumentException: 0 to 2147483647 by 1: seqs cannot contain more than Int.MaxValue elements.
+0 MAX 127 ---
+ java.lang.IllegalArgumentException: 0 to 2147483647 by 1: seqs cannot contain more than Int.MaxValue elements.
+0 MAX MIN+1 ---
+ java.lang.IllegalArgumentException: 0 to 2147483647 by 1: seqs cannot contain more than Int.MaxValue elements.
+0 MAX MAX ---
+ java.lang.IllegalArgumentException: 0 to 2147483647 by 1: seqs cannot contain more than Int.MaxValue elements.
+0 MAX MIN ---
+ java.lang.IllegalArgumentException: 0 to 2147483647 by 1: seqs cannot contain more than Int.MaxValue elements.
+0 MIN 0 0
+0 MIN -1 0
+0 MIN 1 0
+0 MIN -2 0
+0 MIN 2 0
+0 MIN -3 0
+0 MIN 3 0
+0 MIN 17 0
+0 MIN 127 0
+0 MIN MIN+1 0
+0 MIN MAX 0
+0 MIN MIN 0
+
+start end step length/first/last
+-----------------------------------------
+-1 0 0 2/-1/0
+-1 0 -1 2/-1/0
+-1 0 1 2/-1/0
+-1 0 -2 2/-1/0
+-1 0 2 2/-1/0
+-1 0 -3 2/-1/0
+-1 0 3 2/-1/0
+-1 0 17 2/-1/0
+-1 0 127 2/-1/0
+-1 0 MIN+1 2/-1/0
+-1 0 MAX 2/-1/0
+-1 0 MIN 2/-1/0
+-1 -1 0 1/-1/-1
+-1 -1 -1 1/-1/-1
+-1 -1 1 1/-1/-1
+-1 -1 -2 1/-1/-1
+-1 -1 2 1/-1/-1
+-1 -1 -3 1/-1/-1
+-1 -1 3 1/-1/-1
+-1 -1 17 1/-1/-1
+-1 -1 127 1/-1/-1
+-1 -1 MIN+1 1/-1/-1
+-1 -1 MAX 1/-1/-1
+-1 -1 MIN 1/-1/-1
+-1 1 0 3/-1/1
+-1 1 -1 3/-1/1
+-1 1 1 3/-1/1
+-1 1 -2 3/-1/1
+-1 1 2 3/-1/1
+-1 1 -3 3/-1/1
+-1 1 3 3/-1/1
+-1 1 17 3/-1/1
+-1 1 127 3/-1/1
+-1 1 MIN+1 3/-1/1
+-1 1 MAX 3/-1/1
+-1 1 MIN 3/-1/1
+-1 3 0 5/-1/3
+-1 3 -1 5/-1/3
+-1 3 1 5/-1/3
+-1 3 -2 5/-1/3
+-1 3 2 5/-1/3
+-1 3 -3 5/-1/3
+-1 3 3 5/-1/3
+-1 3 17 5/-1/3
+-1 3 127 5/-1/3
+-1 3 MIN+1 5/-1/3
+-1 3 MAX 5/-1/3
+-1 3 MIN 5/-1/3
+-1 MIN+1 0 0
+-1 MIN+1 -1 0
+-1 MIN+1 1 0
+-1 MIN+1 -2 0
+-1 MIN+1 2 0
+-1 MIN+1 -3 0
+-1 MIN+1 3 0
+-1 MIN+1 17 0
+-1 MIN+1 127 0
+-1 MIN+1 MIN+1 0
+-1 MIN+1 MAX 0
+-1 MIN+1 MIN 0
+-1 MAX 0 ---
+ java.lang.IllegalArgumentException: -1 to 2147483647 by 1: seqs cannot contain more than Int.MaxValue elements.
+-1 MAX -1 ---
+ java.lang.IllegalArgumentException: -1 to 2147483647 by 1: seqs cannot contain more than Int.MaxValue elements.
+-1 MAX 1 ---
+ java.lang.IllegalArgumentException: -1 to 2147483647 by 1: seqs cannot contain more than Int.MaxValue elements.
+-1 MAX -2 ---
+ java.lang.IllegalArgumentException: -1 to 2147483647 by 1: seqs cannot contain more than Int.MaxValue elements.
+-1 MAX 2 ---
+ java.lang.IllegalArgumentException: -1 to 2147483647 by 1: seqs cannot contain more than Int.MaxValue elements.
+-1 MAX -3 ---
+ java.lang.IllegalArgumentException: -1 to 2147483647 by 1: seqs cannot contain more than Int.MaxValue elements.
+-1 MAX 3 ---
+ java.lang.IllegalArgumentException: -1 to 2147483647 by 1: seqs cannot contain more than Int.MaxValue elements.
+-1 MAX 17 ---
+ java.lang.IllegalArgumentException: -1 to 2147483647 by 1: seqs cannot contain more than Int.MaxValue elements.
+-1 MAX 127 ---
+ java.lang.IllegalArgumentException: -1 to 2147483647 by 1: seqs cannot contain more than Int.MaxValue elements.
+-1 MAX MIN+1 ---
+ java.lang.IllegalArgumentException: -1 to 2147483647 by 1: seqs cannot contain more than Int.MaxValue elements.
+-1 MAX MAX ---
+ java.lang.IllegalArgumentException: -1 to 2147483647 by 1: seqs cannot contain more than Int.MaxValue elements.
+-1 MAX MIN ---
+ java.lang.IllegalArgumentException: -1 to 2147483647 by 1: seqs cannot contain more than Int.MaxValue elements.
+-1 MIN 0 0
+-1 MIN -1 0
+-1 MIN 1 0
+-1 MIN -2 0
+-1 MIN 2 0
+-1 MIN -3 0
+-1 MIN 3 0
+-1 MIN 17 0
+-1 MIN 127 0
+-1 MIN MIN+1 0
+-1 MIN MAX 0
+-1 MIN MIN 0
+
+start end step length/first/last
+-----------------------------------------
+1 0 0 0
+1 0 -1 0
+1 0 1 0
+1 0 -2 0
+1 0 2 0
+1 0 -3 0
+1 0 3 0
+1 0 17 0
+1 0 127 0
+1 0 MIN+1 0
+1 0 MAX 0
+1 0 MIN 0
+1 -1 0 0
+1 -1 -1 0
+1 -1 1 0
+1 -1 -2 0
+1 -1 2 0
+1 -1 -3 0
+1 -1 3 0
+1 -1 17 0
+1 -1 127 0
+1 -1 MIN+1 0
+1 -1 MAX 0
+1 -1 MIN 0
+1 1 0 1/1/1
+1 1 -1 1/1/1
+1 1 1 1/1/1
+1 1 -2 1/1/1
+1 1 2 1/1/1
+1 1 -3 1/1/1
+1 1 3 1/1/1
+1 1 17 1/1/1
+1 1 127 1/1/1
+1 1 MIN+1 1/1/1
+1 1 MAX 1/1/1
+1 1 MIN 1/1/1
+1 3 0 3/1/3
+1 3 -1 3/1/3
+1 3 1 3/1/3
+1 3 -2 3/1/3
+1 3 2 3/1/3
+1 3 -3 3/1/3
+1 3 3 3/1/3
+1 3 17 3/1/3
+1 3 127 3/1/3
+1 3 MIN+1 3/1/3
+1 3 MAX 3/1/3
+1 3 MIN 3/1/3
+1 MIN+1 0 0
+1 MIN+1 -1 0
+1 MIN+1 1 0
+1 MIN+1 -2 0
+1 MIN+1 2 0
+1 MIN+1 -3 0
+1 MIN+1 3 0
+1 MIN+1 17 0
+1 MIN+1 127 0
+1 MIN+1 MIN+1 0
+1 MIN+1 MAX 0
+1 MIN+1 MIN 0
+1 MAX 0 MAX/1/MAX
+1 MAX -1 MAX/1/MAX
+1 MAX 1 MAX/1/MAX
+1 MAX -2 MAX/1/MAX
+1 MAX 2 MAX/1/MAX
+1 MAX -3 MAX/1/MAX
+1 MAX 3 MAX/1/MAX
+1 MAX 17 MAX/1/MAX
+1 MAX 127 MAX/1/MAX
+1 MAX MIN+1 MAX/1/MAX
+1 MAX MAX MAX/1/MAX
+1 MAX MIN MAX/1/MAX
+1 MIN 0 0
+1 MIN -1 0
+1 MIN 1 0
+1 MIN -2 0
+1 MIN 2 0
+1 MIN -3 0
+1 MIN 3 0
+1 MIN 17 0
+1 MIN 127 0
+1 MIN MIN+1 0
+1 MIN MAX 0
+1 MIN MIN 0
+
+start end step length/first/last
+-----------------------------------------
+3 0 0 0
+3 0 -1 0
+3 0 1 0
+3 0 -2 0
+3 0 2 0
+3 0 -3 0
+3 0 3 0
+3 0 17 0
+3 0 127 0
+3 0 MIN+1 0
+3 0 MAX 0
+3 0 MIN 0
+3 -1 0 0
+3 -1 -1 0
+3 -1 1 0
+3 -1 -2 0
+3 -1 2 0
+3 -1 -3 0
+3 -1 3 0
+3 -1 17 0
+3 -1 127 0
+3 -1 MIN+1 0
+3 -1 MAX 0
+3 -1 MIN 0
+3 1 0 0
+3 1 -1 0
+3 1 1 0
+3 1 -2 0
+3 1 2 0
+3 1 -3 0
+3 1 3 0
+3 1 17 0
+3 1 127 0
+3 1 MIN+1 0
+3 1 MAX 0
+3 1 MIN 0
+3 3 0 1/3/3
+3 3 -1 1/3/3
+3 3 1 1/3/3
+3 3 -2 1/3/3
+3 3 2 1/3/3
+3 3 -3 1/3/3
+3 3 3 1/3/3
+3 3 17 1/3/3
+3 3 127 1/3/3
+3 3 MIN+1 1/3/3
+3 3 MAX 1/3/3
+3 3 MIN 1/3/3
+3 MIN+1 0 0
+3 MIN+1 -1 0
+3 MIN+1 1 0
+3 MIN+1 -2 0
+3 MIN+1 2 0
+3 MIN+1 -3 0
+3 MIN+1 3 0
+3 MIN+1 17 0
+3 MIN+1 127 0
+3 MIN+1 MIN+1 0
+3 MIN+1 MAX 0
+3 MIN+1 MIN 0
+3 MAX 0 MAX-2/3/MAX
+3 MAX -1 MAX-2/3/MAX
+3 MAX 1 MAX-2/3/MAX
+3 MAX -2 MAX-2/3/MAX
+3 MAX 2 MAX-2/3/MAX
+3 MAX -3 MAX-2/3/MAX
+3 MAX 3 MAX-2/3/MAX
+3 MAX 17 MAX-2/3/MAX
+3 MAX 127 MAX-2/3/MAX
+3 MAX MIN+1 MAX-2/3/MAX
+3 MAX MAX MAX-2/3/MAX
+3 MAX MIN MAX-2/3/MAX
+3 MIN 0 0
+3 MIN -1 0
+3 MIN 1 0
+3 MIN -2 0
+3 MIN 2 0
+3 MIN -3 0
+3 MIN 3 0
+3 MIN 17 0
+3 MIN 127 0
+3 MIN MIN+1 0
+3 MIN MAX 0
+3 MIN MIN 0
+
+start end step length/first/last
+-----------------------------------------
+MIN+1 0 0 ---
+ java.lang.IllegalArgumentException: -2147483647 to 0 by 1: seqs cannot contain more than Int.MaxValue elements.
+MIN+1 0 -1 ---
+ java.lang.IllegalArgumentException: -2147483647 to 0 by 1: seqs cannot contain more than Int.MaxValue elements.
+MIN+1 0 1 ---
+ java.lang.IllegalArgumentException: -2147483647 to 0 by 1: seqs cannot contain more than Int.MaxValue elements.
+MIN+1 0 -2 ---
+ java.lang.IllegalArgumentException: -2147483647 to 0 by 1: seqs cannot contain more than Int.MaxValue elements.
+MIN+1 0 2 ---
+ java.lang.IllegalArgumentException: -2147483647 to 0 by 1: seqs cannot contain more than Int.MaxValue elements.
+MIN+1 0 -3 ---
+ java.lang.IllegalArgumentException: -2147483647 to 0 by 1: seqs cannot contain more than Int.MaxValue elements.
+MIN+1 0 3 ---
+ java.lang.IllegalArgumentException: -2147483647 to 0 by 1: seqs cannot contain more than Int.MaxValue elements.
+MIN+1 0 17 ---
+ java.lang.IllegalArgumentException: -2147483647 to 0 by 1: seqs cannot contain more than Int.MaxValue elements.
+MIN+1 0 127 ---
+ java.lang.IllegalArgumentException: -2147483647 to 0 by 1: seqs cannot contain more than Int.MaxValue elements.
+MIN+1 0 MIN+1 ---
+ java.lang.IllegalArgumentException: -2147483647 to 0 by 1: seqs cannot contain more than Int.MaxValue elements.
+MIN+1 0 MAX ---
+ java.lang.IllegalArgumentException: -2147483647 to 0 by 1: seqs cannot contain more than Int.MaxValue elements.
+MIN+1 0 MIN ---
+ java.lang.IllegalArgumentException: -2147483647 to 0 by 1: seqs cannot contain more than Int.MaxValue elements.
+MIN+1 -1 0 MAX/MIN+1/-1
+MIN+1 -1 -1 MAX/MIN+1/-1
+MIN+1 -1 1 MAX/MIN+1/-1
+MIN+1 -1 -2 MAX/MIN+1/-1
+MIN+1 -1 2 MAX/MIN+1/-1
+MIN+1 -1 -3 MAX/MIN+1/-1
+MIN+1 -1 3 MAX/MIN+1/-1
+MIN+1 -1 17 MAX/MIN+1/-1
+MIN+1 -1 127 MAX/MIN+1/-1
+MIN+1 -1 MIN+1 MAX/MIN+1/-1
+MIN+1 -1 MAX MAX/MIN+1/-1
+MIN+1 -1 MIN MAX/MIN+1/-1
+MIN+1 1 0 ---
+ java.lang.IllegalArgumentException: -2147483647 to 1 by 1: seqs cannot contain more than Int.MaxValue elements.
+MIN+1 1 -1 ---
+ java.lang.IllegalArgumentException: -2147483647 to 1 by 1: seqs cannot contain more than Int.MaxValue elements.
+MIN+1 1 1 ---
+ java.lang.IllegalArgumentException: -2147483647 to 1 by 1: seqs cannot contain more than Int.MaxValue elements.
+MIN+1 1 -2 ---
+ java.lang.IllegalArgumentException: -2147483647 to 1 by 1: seqs cannot contain more than Int.MaxValue elements.
+MIN+1 1 2 ---
+ java.lang.IllegalArgumentException: -2147483647 to 1 by 1: seqs cannot contain more than Int.MaxValue elements.
+MIN+1 1 -3 ---
+ java.lang.IllegalArgumentException: -2147483647 to 1 by 1: seqs cannot contain more than Int.MaxValue elements.
+MIN+1 1 3 ---
+ java.lang.IllegalArgumentException: -2147483647 to 1 by 1: seqs cannot contain more than Int.MaxValue elements.
+MIN+1 1 17 ---
+ java.lang.IllegalArgumentException: -2147483647 to 1 by 1: seqs cannot contain more than Int.MaxValue elements.
+MIN+1 1 127 ---
+ java.lang.IllegalArgumentException: -2147483647 to 1 by 1: seqs cannot contain more than Int.MaxValue elements.
+MIN+1 1 MIN+1 ---
+ java.lang.IllegalArgumentException: -2147483647 to 1 by 1: seqs cannot contain more than Int.MaxValue elements.
+MIN+1 1 MAX ---
+ java.lang.IllegalArgumentException: -2147483647 to 1 by 1: seqs cannot contain more than Int.MaxValue elements.
+MIN+1 1 MIN ---
+ java.lang.IllegalArgumentException: -2147483647 to 1 by 1: seqs cannot contain more than Int.MaxValue elements.
+MIN+1 3 0 ---
+ java.lang.IllegalArgumentException: -2147483647 to 3 by 1: seqs cannot contain more than Int.MaxValue elements.
+MIN+1 3 -1 ---
+ java.lang.IllegalArgumentException: -2147483647 to 3 by 1: seqs cannot contain more than Int.MaxValue elements.
+MIN+1 3 1 ---
+ java.lang.IllegalArgumentException: -2147483647 to 3 by 1: seqs cannot contain more than Int.MaxValue elements.
+MIN+1 3 -2 ---
+ java.lang.IllegalArgumentException: -2147483647 to 3 by 1: seqs cannot contain more than Int.MaxValue elements.
+MIN+1 3 2 ---
+ java.lang.IllegalArgumentException: -2147483647 to 3 by 1: seqs cannot contain more than Int.MaxValue elements.
+MIN+1 3 -3 ---
+ java.lang.IllegalArgumentException: -2147483647 to 3 by 1: seqs cannot contain more than Int.MaxValue elements.
+MIN+1 3 3 ---
+ java.lang.IllegalArgumentException: -2147483647 to 3 by 1: seqs cannot contain more than Int.MaxValue elements.
+MIN+1 3 17 ---
+ java.lang.IllegalArgumentException: -2147483647 to 3 by 1: seqs cannot contain more than Int.MaxValue elements.
+MIN+1 3 127 ---
+ java.lang.IllegalArgumentException: -2147483647 to 3 by 1: seqs cannot contain more than Int.MaxValue elements.
+MIN+1 3 MIN+1 ---
+ java.lang.IllegalArgumentException: -2147483647 to 3 by 1: seqs cannot contain more than Int.MaxValue elements.
+MIN+1 3 MAX ---
+ java.lang.IllegalArgumentException: -2147483647 to 3 by 1: seqs cannot contain more than Int.MaxValue elements.
+MIN+1 3 MIN ---
+ java.lang.IllegalArgumentException: -2147483647 to 3 by 1: seqs cannot contain more than Int.MaxValue elements.
+MIN+1 MIN+1 0 1/MIN+1/MIN+1
+MIN+1 MIN+1 -1 1/MIN+1/MIN+1
+MIN+1 MIN+1 1 1/MIN+1/MIN+1
+MIN+1 MIN+1 -2 1/MIN+1/MIN+1
+MIN+1 MIN+1 2 1/MIN+1/MIN+1
+MIN+1 MIN+1 -3 1/MIN+1/MIN+1
+MIN+1 MIN+1 3 1/MIN+1/MIN+1
+MIN+1 MIN+1 17 1/MIN+1/MIN+1
+MIN+1 MIN+1 127 1/MIN+1/MIN+1
+MIN+1 MIN+1 MIN+1 1/MIN+1/MIN+1
+MIN+1 MIN+1 MAX 1/MIN+1/MIN+1
+MIN+1 MIN+1 MIN 1/MIN+1/MIN+1
+MIN+1 MAX 0 ---
+ java.lang.IllegalArgumentException: -2147483647 to 2147483647 by 1: seqs cannot contain more than Int.MaxValue elements.
+MIN+1 MAX -1 ---
+ java.lang.IllegalArgumentException: -2147483647 to 2147483647 by 1: seqs cannot contain more than Int.MaxValue elements.
+MIN+1 MAX 1 ---
+ java.lang.IllegalArgumentException: -2147483647 to 2147483647 by 1: seqs cannot contain more than Int.MaxValue elements.
+MIN+1 MAX -2 ---
+ java.lang.IllegalArgumentException: -2147483647 to 2147483647 by 1: seqs cannot contain more than Int.MaxValue elements.
+MIN+1 MAX 2 ---
+ java.lang.IllegalArgumentException: -2147483647 to 2147483647 by 1: seqs cannot contain more than Int.MaxValue elements.
+MIN+1 MAX -3 ---
+ java.lang.IllegalArgumentException: -2147483647 to 2147483647 by 1: seqs cannot contain more than Int.MaxValue elements.
+MIN+1 MAX 3 ---
+ java.lang.IllegalArgumentException: -2147483647 to 2147483647 by 1: seqs cannot contain more than Int.MaxValue elements.
+MIN+1 MAX 17 ---
+ java.lang.IllegalArgumentException: -2147483647 to 2147483647 by 1: seqs cannot contain more than Int.MaxValue elements.
+MIN+1 MAX 127 ---
+ java.lang.IllegalArgumentException: -2147483647 to 2147483647 by 1: seqs cannot contain more than Int.MaxValue elements.
+MIN+1 MAX MIN+1 ---
+ java.lang.IllegalArgumentException: -2147483647 to 2147483647 by 1: seqs cannot contain more than Int.MaxValue elements.
+MIN+1 MAX MAX ---
+ java.lang.IllegalArgumentException: -2147483647 to 2147483647 by 1: seqs cannot contain more than Int.MaxValue elements.
+MIN+1 MAX MIN ---
+ java.lang.IllegalArgumentException: -2147483647 to 2147483647 by 1: seqs cannot contain more than Int.MaxValue elements.
+MIN+1 MIN 0 0
+MIN+1 MIN -1 0
+MIN+1 MIN 1 0
+MIN+1 MIN -2 0
+MIN+1 MIN 2 0
+MIN+1 MIN -3 0
+MIN+1 MIN 3 0
+MIN+1 MIN 17 0
+MIN+1 MIN 127 0
+MIN+1 MIN MIN+1 0
+MIN+1 MIN MAX 0
+MIN+1 MIN MIN 0
+
+start end step length/first/last
+-----------------------------------------
+MAX 0 0 0
+MAX 0 -1 0
+MAX 0 1 0
+MAX 0 -2 0
+MAX 0 2 0
+MAX 0 -3 0
+MAX 0 3 0
+MAX 0 17 0
+MAX 0 127 0
+MAX 0 MIN+1 0
+MAX 0 MAX 0
+MAX 0 MIN 0
+MAX -1 0 0
+MAX -1 -1 0
+MAX -1 1 0
+MAX -1 -2 0
+MAX -1 2 0
+MAX -1 -3 0
+MAX -1 3 0
+MAX -1 17 0
+MAX -1 127 0
+MAX -1 MIN+1 0
+MAX -1 MAX 0
+MAX -1 MIN 0
+MAX 1 0 0
+MAX 1 -1 0
+MAX 1 1 0
+MAX 1 -2 0
+MAX 1 2 0
+MAX 1 -3 0
+MAX 1 3 0
+MAX 1 17 0
+MAX 1 127 0
+MAX 1 MIN+1 0
+MAX 1 MAX 0
+MAX 1 MIN 0
+MAX 3 0 0
+MAX 3 -1 0
+MAX 3 1 0
+MAX 3 -2 0
+MAX 3 2 0
+MAX 3 -3 0
+MAX 3 3 0
+MAX 3 17 0
+MAX 3 127 0
+MAX 3 MIN+1 0
+MAX 3 MAX 0
+MAX 3 MIN 0
+MAX MIN+1 0 0
+MAX MIN+1 -1 0
+MAX MIN+1 1 0
+MAX MIN+1 -2 0
+MAX MIN+1 2 0
+MAX MIN+1 -3 0
+MAX MIN+1 3 0
+MAX MIN+1 17 0
+MAX MIN+1 127 0
+MAX MIN+1 MIN+1 0
+MAX MIN+1 MAX 0
+MAX MIN+1 MIN 0
+MAX MAX 0 1/MAX/MAX
+MAX MAX -1 1/MAX/MAX
+MAX MAX 1 1/MAX/MAX
+MAX MAX -2 1/MAX/MAX
+MAX MAX 2 1/MAX/MAX
+MAX MAX -3 1/MAX/MAX
+MAX MAX 3 1/MAX/MAX
+MAX MAX 17 1/MAX/MAX
+MAX MAX 127 1/MAX/MAX
+MAX MAX MIN+1 1/MAX/MAX
+MAX MAX MAX 1/MAX/MAX
+MAX MAX MIN 1/MAX/MAX
+MAX MIN 0 0
+MAX MIN -1 0
+MAX MIN 1 0
+MAX MIN -2 0
+MAX MIN 2 0
+MAX MIN -3 0
+MAX MIN 3 0
+MAX MIN 17 0
+MAX MIN 127 0
+MAX MIN MIN+1 0
+MAX MIN MAX 0
+MAX MIN MIN 0
+
+start end step length/first/last
+-----------------------------------------
+MIN 0 0 ---
+ java.lang.IllegalArgumentException: -2147483648 to 0 by 1: seqs cannot contain more than Int.MaxValue elements.
+MIN 0 -1 ---
+ java.lang.IllegalArgumentException: -2147483648 to 0 by 1: seqs cannot contain more than Int.MaxValue elements.
+MIN 0 1 ---
+ java.lang.IllegalArgumentException: -2147483648 to 0 by 1: seqs cannot contain more than Int.MaxValue elements.
+MIN 0 -2 ---
+ java.lang.IllegalArgumentException: -2147483648 to 0 by 1: seqs cannot contain more than Int.MaxValue elements.
+MIN 0 2 ---
+ java.lang.IllegalArgumentException: -2147483648 to 0 by 1: seqs cannot contain more than Int.MaxValue elements.
+MIN 0 -3 ---
+ java.lang.IllegalArgumentException: -2147483648 to 0 by 1: seqs cannot contain more than Int.MaxValue elements.
+MIN 0 3 ---
+ java.lang.IllegalArgumentException: -2147483648 to 0 by 1: seqs cannot contain more than Int.MaxValue elements.
+MIN 0 17 ---
+ java.lang.IllegalArgumentException: -2147483648 to 0 by 1: seqs cannot contain more than Int.MaxValue elements.
+MIN 0 127 ---
+ java.lang.IllegalArgumentException: -2147483648 to 0 by 1: seqs cannot contain more than Int.MaxValue elements.
+MIN 0 MIN+1 ---
+ java.lang.IllegalArgumentException: -2147483648 to 0 by 1: seqs cannot contain more than Int.MaxValue elements.
+MIN 0 MAX ---
+ java.lang.IllegalArgumentException: -2147483648 to 0 by 1: seqs cannot contain more than Int.MaxValue elements.
+MIN 0 MIN ---
+ java.lang.IllegalArgumentException: -2147483648 to 0 by 1: seqs cannot contain more than Int.MaxValue elements.
+MIN -1 0 ---
+ java.lang.IllegalArgumentException: -2147483648 to -1 by 1: seqs cannot contain more than Int.MaxValue elements.
+MIN -1 -1 ---
+ java.lang.IllegalArgumentException: -2147483648 to -1 by 1: seqs cannot contain more than Int.MaxValue elements.
+MIN -1 1 ---
+ java.lang.IllegalArgumentException: -2147483648 to -1 by 1: seqs cannot contain more than Int.MaxValue elements.
+MIN -1 -2 ---
+ java.lang.IllegalArgumentException: -2147483648 to -1 by 1: seqs cannot contain more than Int.MaxValue elements.
+MIN -1 2 ---
+ java.lang.IllegalArgumentException: -2147483648 to -1 by 1: seqs cannot contain more than Int.MaxValue elements.
+MIN -1 -3 ---
+ java.lang.IllegalArgumentException: -2147483648 to -1 by 1: seqs cannot contain more than Int.MaxValue elements.
+MIN -1 3 ---
+ java.lang.IllegalArgumentException: -2147483648 to -1 by 1: seqs cannot contain more than Int.MaxValue elements.
+MIN -1 17 ---
+ java.lang.IllegalArgumentException: -2147483648 to -1 by 1: seqs cannot contain more than Int.MaxValue elements.
+MIN -1 127 ---
+ java.lang.IllegalArgumentException: -2147483648 to -1 by 1: seqs cannot contain more than Int.MaxValue elements.
+MIN -1 MIN+1 ---
+ java.lang.IllegalArgumentException: -2147483648 to -1 by 1: seqs cannot contain more than Int.MaxValue elements.
+MIN -1 MAX ---
+ java.lang.IllegalArgumentException: -2147483648 to -1 by 1: seqs cannot contain more than Int.MaxValue elements.
+MIN -1 MIN ---
+ java.lang.IllegalArgumentException: -2147483648 to -1 by 1: seqs cannot contain more than Int.MaxValue elements.
+MIN 1 0 ---
+ java.lang.IllegalArgumentException: -2147483648 to 1 by 1: seqs cannot contain more than Int.MaxValue elements.
+MIN 1 -1 ---
+ java.lang.IllegalArgumentException: -2147483648 to 1 by 1: seqs cannot contain more than Int.MaxValue elements.
+MIN 1 1 ---
+ java.lang.IllegalArgumentException: -2147483648 to 1 by 1: seqs cannot contain more than Int.MaxValue elements.
+MIN 1 -2 ---
+ java.lang.IllegalArgumentException: -2147483648 to 1 by 1: seqs cannot contain more than Int.MaxValue elements.
+MIN 1 2 ---
+ java.lang.IllegalArgumentException: -2147483648 to 1 by 1: seqs cannot contain more than Int.MaxValue elements.
+MIN 1 -3 ---
+ java.lang.IllegalArgumentException: -2147483648 to 1 by 1: seqs cannot contain more than Int.MaxValue elements.
+MIN 1 3 ---
+ java.lang.IllegalArgumentException: -2147483648 to 1 by 1: seqs cannot contain more than Int.MaxValue elements.
+MIN 1 17 ---
+ java.lang.IllegalArgumentException: -2147483648 to 1 by 1: seqs cannot contain more than Int.MaxValue elements.
+MIN 1 127 ---
+ java.lang.IllegalArgumentException: -2147483648 to 1 by 1: seqs cannot contain more than Int.MaxValue elements.
+MIN 1 MIN+1 ---
+ java.lang.IllegalArgumentException: -2147483648 to 1 by 1: seqs cannot contain more than Int.MaxValue elements.
+MIN 1 MAX ---
+ java.lang.IllegalArgumentException: -2147483648 to 1 by 1: seqs cannot contain more than Int.MaxValue elements.
+MIN 1 MIN ---
+ java.lang.IllegalArgumentException: -2147483648 to 1 by 1: seqs cannot contain more than Int.MaxValue elements.
+MIN 3 0 ---
+ java.lang.IllegalArgumentException: -2147483648 to 3 by 1: seqs cannot contain more than Int.MaxValue elements.
+MIN 3 -1 ---
+ java.lang.IllegalArgumentException: -2147483648 to 3 by 1: seqs cannot contain more than Int.MaxValue elements.
+MIN 3 1 ---
+ java.lang.IllegalArgumentException: -2147483648 to 3 by 1: seqs cannot contain more than Int.MaxValue elements.
+MIN 3 -2 ---
+ java.lang.IllegalArgumentException: -2147483648 to 3 by 1: seqs cannot contain more than Int.MaxValue elements.
+MIN 3 2 ---
+ java.lang.IllegalArgumentException: -2147483648 to 3 by 1: seqs cannot contain more than Int.MaxValue elements.
+MIN 3 -3 ---
+ java.lang.IllegalArgumentException: -2147483648 to 3 by 1: seqs cannot contain more than Int.MaxValue elements.
+MIN 3 3 ---
+ java.lang.IllegalArgumentException: -2147483648 to 3 by 1: seqs cannot contain more than Int.MaxValue elements.
+MIN 3 17 ---
+ java.lang.IllegalArgumentException: -2147483648 to 3 by 1: seqs cannot contain more than Int.MaxValue elements.
+MIN 3 127 ---
+ java.lang.IllegalArgumentException: -2147483648 to 3 by 1: seqs cannot contain more than Int.MaxValue elements.
+MIN 3 MIN+1 ---
+ java.lang.IllegalArgumentException: -2147483648 to 3 by 1: seqs cannot contain more than Int.MaxValue elements.
+MIN 3 MAX ---
+ java.lang.IllegalArgumentException: -2147483648 to 3 by 1: seqs cannot contain more than Int.MaxValue elements.
+MIN 3 MIN ---
+ java.lang.IllegalArgumentException: -2147483648 to 3 by 1: seqs cannot contain more than Int.MaxValue elements.
+MIN MIN+1 0 2/MIN/MIN+1
+MIN MIN+1 -1 2/MIN/MIN+1
+MIN MIN+1 1 2/MIN/MIN+1
+MIN MIN+1 -2 2/MIN/MIN+1
+MIN MIN+1 2 2/MIN/MIN+1
+MIN MIN+1 -3 2/MIN/MIN+1
+MIN MIN+1 3 2/MIN/MIN+1
+MIN MIN+1 17 2/MIN/MIN+1
+MIN MIN+1 127 2/MIN/MIN+1
+MIN MIN+1 MIN+1 2/MIN/MIN+1
+MIN MIN+1 MAX 2/MIN/MIN+1
+MIN MIN+1 MIN 2/MIN/MIN+1
+MIN MAX 0 ---
+ java.lang.IllegalArgumentException: -2147483648 to 2147483647 by 1: seqs cannot contain more than Int.MaxValue elements.
+MIN MAX -1 ---
+ java.lang.IllegalArgumentException: -2147483648 to 2147483647 by 1: seqs cannot contain more than Int.MaxValue elements.
+MIN MAX 1 ---
+ java.lang.IllegalArgumentException: -2147483648 to 2147483647 by 1: seqs cannot contain more than Int.MaxValue elements.
+MIN MAX -2 ---
+ java.lang.IllegalArgumentException: -2147483648 to 2147483647 by 1: seqs cannot contain more than Int.MaxValue elements.
+MIN MAX 2 ---
+ java.lang.IllegalArgumentException: -2147483648 to 2147483647 by 1: seqs cannot contain more than Int.MaxValue elements.
+MIN MAX -3 ---
+ java.lang.IllegalArgumentException: -2147483648 to 2147483647 by 1: seqs cannot contain more than Int.MaxValue elements.
+MIN MAX 3 ---
+ java.lang.IllegalArgumentException: -2147483648 to 2147483647 by 1: seqs cannot contain more than Int.MaxValue elements.
+MIN MAX 17 ---
+ java.lang.IllegalArgumentException: -2147483648 to 2147483647 by 1: seqs cannot contain more than Int.MaxValue elements.
+MIN MAX 127 ---
+ java.lang.IllegalArgumentException: -2147483648 to 2147483647 by 1: seqs cannot contain more than Int.MaxValue elements.
+MIN MAX MIN+1 ---
+ java.lang.IllegalArgumentException: -2147483648 to 2147483647 by 1: seqs cannot contain more than Int.MaxValue elements.
+MIN MAX MAX ---
+ java.lang.IllegalArgumentException: -2147483648 to 2147483647 by 1: seqs cannot contain more than Int.MaxValue elements.
+MIN MAX MIN ---
+ java.lang.IllegalArgumentException: -2147483648 to 2147483647 by 1: seqs cannot contain more than Int.MaxValue elements.
+MIN MIN 0 1/MIN/MIN
+MIN MIN -1 1/MIN/MIN
+MIN MIN 1 1/MIN/MIN
+MIN MIN -2 1/MIN/MIN
+MIN MIN 2 1/MIN/MIN
+MIN MIN -3 1/MIN/MIN
+MIN MIN 3 1/MIN/MIN
+MIN MIN 17 1/MIN/MIN
+MIN MIN 127 1/MIN/MIN
+MIN MIN MIN+1 1/MIN/MIN
+MIN MIN MAX 1/MIN/MIN
+MIN MIN MIN 1/MIN/MIN
+
+>>> start to end by step <<<
+
+start end step length/first/last
+-----------------------------------------
+0 0 0 ---
+ java.lang.IllegalArgumentException: step cannot be 0.
+0 0 -1 1/0/0
+0 0 1 1/0/0
+0 0 -2 1/0/0
+0 0 2 1/0/0
+0 0 -3 1/0/0
+0 0 3 1/0/0
+0 0 17 1/0/0
+0 0 127 1/0/0
+0 0 MIN+1 1/0/0
+0 0 MAX 1/0/0
+0 0 MIN 1/0/0
+0 -1 0 ---
+ java.lang.IllegalArgumentException: step cannot be 0.
+0 -1 -1 2/0/-1
+0 -1 1 0
+0 -1 -2 1/0/0
+0 -1 2 0
+0 -1 -3 1/0/0
+0 -1 3 0
+0 -1 17 0
+0 -1 127 0
+0 -1 MIN+1 1/0/0
+0 -1 MAX 0
+0 -1 MIN 1/0/0
+0 1 0 ---
+ java.lang.IllegalArgumentException: step cannot be 0.
+0 1 -1 0
+0 1 1 2/0/1
+0 1 -2 0
+0 1 2 1/0/0
+0 1 -3 0
+0 1 3 1/0/0
+0 1 17 1/0/0
+0 1 127 1/0/0
+0 1 MIN+1 0
+0 1 MAX 1/0/0
+0 1 MIN 0
+0 3 0 ---
+ java.lang.IllegalArgumentException: step cannot be 0.
+0 3 -1 0
+0 3 1 4/0/3
+0 3 -2 0
+0 3 2 2/0/2
+0 3 -3 0
+0 3 3 2/0/3
+0 3 17 1/0/0
+0 3 127 1/0/0
+0 3 MIN+1 0
+0 3 MAX 1/0/0
+0 3 MIN 0
+0 MIN+1 0 ---
+ java.lang.IllegalArgumentException: step cannot be 0.
+0 MIN+1 -1 ---
+ java.lang.IllegalArgumentException: 0 to -2147483647 by -1: seqs cannot contain more than Int.MaxValue elements.
+0 MIN+1 1 0
+0 MIN+1 -2 1073741824/0/MIN+2
+0 MIN+1 2 0
+0 MIN+1 -3 715827883/0/MIN+2
+0 MIN+1 3 0
+0 MIN+1 17 0
+0 MIN+1 127 0
+0 MIN+1 MIN+1 2/0/MIN+1
+0 MIN+1 MAX 0
+0 MIN+1 MIN 1/0/0
+0 MAX 0 ---
+ java.lang.IllegalArgumentException: step cannot be 0.
+0 MAX -1 0
+0 MAX 1 ---
+ java.lang.IllegalArgumentException: 0 to 2147483647 by 1: seqs cannot contain more than Int.MaxValue elements.
+0 MAX -2 0
+0 MAX 2 1073741824/0/MAX-1
+0 MAX -3 0
+0 MAX 3 715827883/0/MAX-1
+0 MAX 17 126322568/0/MAX-8
+0 MAX 127 16909321/0/MAX-7
+0 MAX MIN+1 0
+0 MAX MAX 2/0/MAX
+0 MAX MIN 0
+0 MIN 0 ---
+ java.lang.IllegalArgumentException: step cannot be 0.
+0 MIN -1 ---
+ java.lang.IllegalArgumentException: 0 to -2147483648 by -1: seqs cannot contain more than Int.MaxValue elements.
+0 MIN 1 0
+0 MIN -2 1073741825/0/MIN
+0 MIN 2 0
+0 MIN -3 715827883/0/MIN+2
+0 MIN 3 0
+0 MIN 17 0
+0 MIN 127 0
+0 MIN MIN+1 2/0/MIN+1
+0 MIN MAX 0
+0 MIN MIN 2/0/MIN
+
+start end step length/first/last
+-----------------------------------------
+-1 0 0 ---
+ java.lang.IllegalArgumentException: step cannot be 0.
+-1 0 -1 0
+-1 0 1 2/-1/0
+-1 0 -2 0
+-1 0 2 1/-1/-1
+-1 0 -3 0
+-1 0 3 1/-1/-1
+-1 0 17 1/-1/-1
+-1 0 127 1/-1/-1
+-1 0 MIN+1 0
+-1 0 MAX 1/-1/-1
+-1 0 MIN 0
+-1 -1 0 ---
+ java.lang.IllegalArgumentException: step cannot be 0.
+-1 -1 -1 1/-1/-1
+-1 -1 1 1/-1/-1
+-1 -1 -2 1/-1/-1
+-1 -1 2 1/-1/-1
+-1 -1 -3 1/-1/-1
+-1 -1 3 1/-1/-1
+-1 -1 17 1/-1/-1
+-1 -1 127 1/-1/-1
+-1 -1 MIN+1 1/-1/-1
+-1 -1 MAX 1/-1/-1
+-1 -1 MIN 1/-1/-1
+-1 1 0 ---
+ java.lang.IllegalArgumentException: step cannot be 0.
+-1 1 -1 0
+-1 1 1 3/-1/1
+-1 1 -2 0
+-1 1 2 2/-1/1
+-1 1 -3 0
+-1 1 3 1/-1/-1
+-1 1 17 1/-1/-1
+-1 1 127 1/-1/-1
+-1 1 MIN+1 0
+-1 1 MAX 1/-1/-1
+-1 1 MIN 0
+-1 3 0 ---
+ java.lang.IllegalArgumentException: step cannot be 0.
+-1 3 -1 0
+-1 3 1 5/-1/3
+-1 3 -2 0
+-1 3 2 3/-1/3
+-1 3 -3 0
+-1 3 3 2/-1/2
+-1 3 17 1/-1/-1
+-1 3 127 1/-1/-1
+-1 3 MIN+1 0
+-1 3 MAX 1/-1/-1
+-1 3 MIN 0
+-1 MIN+1 0 ---
+ java.lang.IllegalArgumentException: step cannot be 0.
+-1 MIN+1 -1 MAX/-1/MIN+1
+-1 MIN+1 1 0
+-1 MIN+1 -2 1073741824/-1/MIN+1
+-1 MIN+1 2 0
+-1 MIN+1 -3 715827883/-1/MIN+1
+-1 MIN+1 3 0
+-1 MIN+1 17 0
+-1 MIN+1 127 0
+-1 MIN+1 MIN+1 1/-1/-1
+-1 MIN+1 MAX 0
+-1 MIN+1 MIN 1/-1/-1
+-1 MAX 0 ---
+ java.lang.IllegalArgumentException: step cannot be 0.
+-1 MAX -1 0
+-1 MAX 1 ---
+ java.lang.IllegalArgumentException: -1 to 2147483647 by 1: seqs cannot contain more than Int.MaxValue elements.
+-1 MAX -2 0
+-1 MAX 2 1073741825/-1/MAX
+-1 MAX -3 0
+-1 MAX 3 715827883/-1/MAX-2
+-1 MAX 17 126322568/-1/MAX-9
+-1 MAX 127 16909321/-1/MAX-8
+-1 MAX MIN+1 0
+-1 MAX MAX 2/-1/MAX-1
+-1 MAX MIN 0
+-1 MIN 0 ---
+ java.lang.IllegalArgumentException: step cannot be 0.
+-1 MIN -1 ---
+ java.lang.IllegalArgumentException: -1 to -2147483648 by -1: seqs cannot contain more than Int.MaxValue elements.
+-1 MIN 1 0
+-1 MIN -2 1073741824/-1/MIN+1
+-1 MIN 2 0
+-1 MIN -3 715827883/-1/MIN+1
+-1 MIN 3 0
+-1 MIN 17 0
+-1 MIN 127 0
+-1 MIN MIN+1 2/-1/MIN
+-1 MIN MAX 0
+-1 MIN MIN 1/-1/-1
+
+start end step length/first/last
+-----------------------------------------
+1 0 0 ---
+ java.lang.IllegalArgumentException: step cannot be 0.
+1 0 -1 2/1/0
+1 0 1 0
+1 0 -2 1/1/1
+1 0 2 0
+1 0 -3 1/1/1
+1 0 3 0
+1 0 17 0
+1 0 127 0
+1 0 MIN+1 1/1/1
+1 0 MAX 0
+1 0 MIN 1/1/1
+1 -1 0 ---
+ java.lang.IllegalArgumentException: step cannot be 0.
+1 -1 -1 3/1/-1
+1 -1 1 0
+1 -1 -2 2/1/-1
+1 -1 2 0
+1 -1 -3 1/1/1
+1 -1 3 0
+1 -1 17 0
+1 -1 127 0
+1 -1 MIN+1 1/1/1
+1 -1 MAX 0
+1 -1 MIN 1/1/1
+1 1 0 ---
+ java.lang.IllegalArgumentException: step cannot be 0.
+1 1 -1 1/1/1
+1 1 1 1/1/1
+1 1 -2 1/1/1
+1 1 2 1/1/1
+1 1 -3 1/1/1
+1 1 3 1/1/1
+1 1 17 1/1/1
+1 1 127 1/1/1
+1 1 MIN+1 1/1/1
+1 1 MAX 1/1/1
+1 1 MIN 1/1/1
+1 3 0 ---
+ java.lang.IllegalArgumentException: step cannot be 0.
+1 3 -1 0
+1 3 1 3/1/3
+1 3 -2 0
+1 3 2 2/1/3
+1 3 -3 0
+1 3 3 1/1/1
+1 3 17 1/1/1
+1 3 127 1/1/1
+1 3 MIN+1 0
+1 3 MAX 1/1/1
+1 3 MIN 0
+1 MIN+1 0 ---
+ java.lang.IllegalArgumentException: step cannot be 0.
+1 MIN+1 -1 ---
+ java.lang.IllegalArgumentException: 1 to -2147483647 by -1: seqs cannot contain more than Int.MaxValue elements.
+1 MIN+1 1 0
+1 MIN+1 -2 1073741825/1/MIN+1
+1 MIN+1 2 0
+1 MIN+1 -3 715827883/1/MIN+3
+1 MIN+1 3 0
+1 MIN+1 17 0
+1 MIN+1 127 0
+1 MIN+1 MIN+1 2/1/MIN+2
+1 MIN+1 MAX 0
+1 MIN+1 MIN 2/1/MIN+1
+1 MAX 0 ---
+ java.lang.IllegalArgumentException: step cannot be 0.
+1 MAX -1 0
+1 MAX 1 MAX/1/MAX
+1 MAX -2 0
+1 MAX 2 1073741824/1/MAX
+1 MAX -3 0
+1 MAX 3 715827883/1/MAX
+1 MAX 17 126322568/1/MAX-7
+1 MAX 127 16909321/1/MAX-6
+1 MAX MIN+1 0
+1 MAX MAX 1/1/1
+1 MAX MIN 0
+1 MIN 0 ---
+ java.lang.IllegalArgumentException: step cannot be 0.
+1 MIN -1 ---
+ java.lang.IllegalArgumentException: 1 to -2147483648 by -1: seqs cannot contain more than Int.MaxValue elements.
+1 MIN 1 0
+1 MIN -2 1073741825/1/MIN+1
+1 MIN 2 0
+1 MIN -3 715827884/1/MIN
+1 MIN 3 0
+1 MIN 17 0
+1 MIN 127 0
+1 MIN MIN+1 2/1/MIN+2
+1 MIN MAX 0
+1 MIN MIN 2/1/MIN+1
+
+start end step length/first/last
+-----------------------------------------
+3 0 0 ---
+ java.lang.IllegalArgumentException: step cannot be 0.
+3 0 -1 4/3/0
+3 0 1 0
+3 0 -2 2/3/1
+3 0 2 0
+3 0 -3 2/3/0
+3 0 3 0
+3 0 17 0
+3 0 127 0
+3 0 MIN+1 1/3/3
+3 0 MAX 0
+3 0 MIN 1/3/3
+3 -1 0 ---
+ java.lang.IllegalArgumentException: step cannot be 0.
+3 -1 -1 5/3/-1
+3 -1 1 0
+3 -1 -2 3/3/-1
+3 -1 2 0
+3 -1 -3 2/3/0
+3 -1 3 0
+3 -1 17 0
+3 -1 127 0
+3 -1 MIN+1 1/3/3
+3 -1 MAX 0
+3 -1 MIN 1/3/3
+3 1 0 ---
+ java.lang.IllegalArgumentException: step cannot be 0.
+3 1 -1 3/3/1
+3 1 1 0
+3 1 -2 2/3/1
+3 1 2 0
+3 1 -3 1/3/3
+3 1 3 0
+3 1 17 0
+3 1 127 0
+3 1 MIN+1 1/3/3
+3 1 MAX 0
+3 1 MIN 1/3/3
+3 3 0 ---
+ java.lang.IllegalArgumentException: step cannot be 0.
+3 3 -1 1/3/3
+3 3 1 1/3/3
+3 3 -2 1/3/3
+3 3 2 1/3/3
+3 3 -3 1/3/3
+3 3 3 1/3/3
+3 3 17 1/3/3
+3 3 127 1/3/3
+3 3 MIN+1 1/3/3
+3 3 MAX 1/3/3
+3 3 MIN 1/3/3
+3 MIN+1 0 ---
+ java.lang.IllegalArgumentException: step cannot be 0.
+3 MIN+1 -1 ---
+ java.lang.IllegalArgumentException: 3 to -2147483647 by -1: seqs cannot contain more than Int.MaxValue elements.
+3 MIN+1 1 0
+3 MIN+1 -2 1073741826/3/MIN+1
+3 MIN+1 2 0
+3 MIN+1 -3 715827884/3/MIN+2
+3 MIN+1 3 0
+3 MIN+1 17 0
+3 MIN+1 127 0
+3 MIN+1 MIN+1 2/3/MIN+4
+3 MIN+1 MAX 0
+3 MIN+1 MIN 2/3/MIN+3
+3 MAX 0 ---
+ java.lang.IllegalArgumentException: step cannot be 0.
+3 MAX -1 0
+3 MAX 1 MAX-2/3/MAX
+3 MAX -2 0
+3 MAX 2 1073741823/3/MAX
+3 MAX -3 0
+3 MAX 3 715827882/3/MAX-1
+3 MAX 17 126322568/3/MAX-5
+3 MAX 127 16909321/3/MAX-4
+3 MAX MIN+1 0
+3 MAX MAX 1/3/3
+3 MAX MIN 0
+3 MIN 0 ---
+ java.lang.IllegalArgumentException: step cannot be 0.
+3 MIN -1 ---
+ java.lang.IllegalArgumentException: 3 to -2147483648 by -1: seqs cannot contain more than Int.MaxValue elements.
+3 MIN 1 0
+3 MIN -2 1073741826/3/MIN+1
+3 MIN 2 0
+3 MIN -3 715827884/3/MIN+2
+3 MIN 3 0
+3 MIN 17 0
+3 MIN 127 0
+3 MIN MIN+1 2/3/MIN+4
+3 MIN MAX 0
+3 MIN MIN 2/3/MIN+3
+
+start end step length/first/last
+-----------------------------------------
+MIN+1 0 0 ---
+ java.lang.IllegalArgumentException: step cannot be 0.
+MIN+1 0 -1 0
+MIN+1 0 1 ---
+ java.lang.IllegalArgumentException: -2147483647 to 0 by 1: seqs cannot contain more than Int.MaxValue elements.
+MIN+1 0 -2 0
+MIN+1 0 2 1073741824/MIN+1/-1
+MIN+1 0 -3 0
+MIN+1 0 3 715827883/MIN+1/-1
+MIN+1 0 17 126322568/MIN+1/-8
+MIN+1 0 127 16909321/MIN+1/-7
+MIN+1 0 MIN+1 0
+MIN+1 0 MAX 2/MIN+1/0
+MIN+1 0 MIN 0
+MIN+1 -1 0 ---
+ java.lang.IllegalArgumentException: step cannot be 0.
+MIN+1 -1 -1 0
+MIN+1 -1 1 MAX/MIN+1/-1
+MIN+1 -1 -2 0
+MIN+1 -1 2 1073741824/MIN+1/-1
+MIN+1 -1 -3 0
+MIN+1 -1 3 715827883/MIN+1/-1
+MIN+1 -1 17 126322568/MIN+1/-8
+MIN+1 -1 127 16909321/MIN+1/-7
+MIN+1 -1 MIN+1 0
+MIN+1 -1 MAX 1/MIN+1/MIN+1
+MIN+1 -1 MIN 0
+MIN+1 1 0 ---
+ java.lang.IllegalArgumentException: step cannot be 0.
+MIN+1 1 -1 0
+MIN+1 1 1 ---
+ java.lang.IllegalArgumentException: -2147483647 to 1 by 1: seqs cannot contain more than Int.MaxValue elements.
+MIN+1 1 -2 0
+MIN+1 1 2 1073741825/MIN+1/1
+MIN+1 1 -3 0
+MIN+1 1 3 715827883/MIN+1/-1
+MIN+1 1 17 126322568/MIN+1/-8
+MIN+1 1 127 16909321/MIN+1/-7
+MIN+1 1 MIN+1 0
+MIN+1 1 MAX 2/MIN+1/0
+MIN+1 1 MIN 0
+MIN+1 3 0 ---
+ java.lang.IllegalArgumentException: step cannot be 0.
+MIN+1 3 -1 0
+MIN+1 3 1 ---
+ java.lang.IllegalArgumentException: -2147483647 to 3 by 1: seqs cannot contain more than Int.MaxValue elements.
+MIN+1 3 -2 0
+MIN+1 3 2 1073741826/MIN+1/3
+MIN+1 3 -3 0
+MIN+1 3 3 715827884/MIN+1/2
+MIN+1 3 17 126322568/MIN+1/-8
+MIN+1 3 127 16909321/MIN+1/-7
+MIN+1 3 MIN+1 0
+MIN+1 3 MAX 2/MIN+1/0
+MIN+1 3 MIN 0
+MIN+1 MIN+1 0 ---
+ java.lang.IllegalArgumentException: step cannot be 0.
+MIN+1 MIN+1 -1 1/MIN+1/MIN+1
+MIN+1 MIN+1 1 1/MIN+1/MIN+1
+MIN+1 MIN+1 -2 1/MIN+1/MIN+1
+MIN+1 MIN+1 2 1/MIN+1/MIN+1
+MIN+1 MIN+1 -3 1/MIN+1/MIN+1
+MIN+1 MIN+1 3 1/MIN+1/MIN+1
+MIN+1 MIN+1 17 1/MIN+1/MIN+1
+MIN+1 MIN+1 127 1/MIN+1/MIN+1
+MIN+1 MIN+1 MIN+1 1/MIN+1/MIN+1
+MIN+1 MIN+1 MAX 1/MIN+1/MIN+1
+MIN+1 MIN+1 MIN 1/MIN+1/MIN+1
+MIN+1 MAX 0 ---
+ java.lang.IllegalArgumentException: step cannot be 0.
+MIN+1 MAX -1 0
+MIN+1 MAX 1 ---
+ java.lang.IllegalArgumentException: -2147483647 to 2147483647 by 1: seqs cannot contain more than Int.MaxValue elements.
+MIN+1 MAX -2 0
+MIN+1 MAX 2 ---
+ java.lang.IllegalArgumentException: -2147483647 to 2147483647 by 2: seqs cannot contain more than Int.MaxValue elements.
+MIN+1 MAX -3 0
+MIN+1 MAX 3 1431655765/MIN+1/MAX-2
+MIN+1 MAX 17 252645135/MIN+1/MAX-16
+MIN+1 MAX 127 33818641/MIN+1/MAX-14
+MIN+1 MAX MIN+1 0
+MIN+1 MAX MAX 3/MIN+1/MAX
+MIN+1 MAX MIN 0
+MIN+1 MIN 0 ---
+ java.lang.IllegalArgumentException: step cannot be 0.
+MIN+1 MIN -1 2/MIN+1/MIN
+MIN+1 MIN 1 0
+MIN+1 MIN -2 1/MIN+1/MIN+1
+MIN+1 MIN 2 0
+MIN+1 MIN -3 1/MIN+1/MIN+1
+MIN+1 MIN 3 0
+MIN+1 MIN 17 0
+MIN+1 MIN 127 0
+MIN+1 MIN MIN+1 1/MIN+1/MIN+1
+MIN+1 MIN MAX 0
+MIN+1 MIN MIN 1/MIN+1/MIN+1
+
+start end step length/first/last
+-----------------------------------------
+MAX 0 0 ---
+ java.lang.IllegalArgumentException: step cannot be 0.
+MAX 0 -1 ---
+ java.lang.IllegalArgumentException: 2147483647 to 0 by -1: seqs cannot contain more than Int.MaxValue elements.
+MAX 0 1 0
+MAX 0 -2 1073741824/MAX/1
+MAX 0 2 0
+MAX 0 -3 715827883/MAX/1
+MAX 0 3 0
+MAX 0 17 0
+MAX 0 127 0
+MAX 0 MIN+1 2/MAX/0
+MAX 0 MAX 0
+MAX 0 MIN 1/MAX/MAX
+MAX -1 0 ---
+ java.lang.IllegalArgumentException: step cannot be 0.
+MAX -1 -1 ---
+ java.lang.IllegalArgumentException: 2147483647 to -1 by -1: seqs cannot contain more than Int.MaxValue elements.
+MAX -1 1 0
+MAX -1 -2 1073741825/MAX/-1
+MAX -1 2 0
+MAX -1 -3 715827883/MAX/1
+MAX -1 3 0
+MAX -1 17 0
+MAX -1 127 0
+MAX -1 MIN+1 2/MAX/0
+MAX -1 MAX 0
+MAX -1 MIN 2/MAX/-1
+MAX 1 0 ---
+ java.lang.IllegalArgumentException: step cannot be 0.
+MAX 1 -1 MAX/MAX/1
+MAX 1 1 0
+MAX 1 -2 1073741824/MAX/1
+MAX 1 2 0
+MAX 1 -3 715827883/MAX/1
+MAX 1 3 0
+MAX 1 17 0
+MAX 1 127 0
+MAX 1 MIN+1 1/MAX/MAX
+MAX 1 MAX 0
+MAX 1 MIN 1/MAX/MAX
+MAX 3 0 ---
+ java.lang.IllegalArgumentException: step cannot be 0.
+MAX 3 -1 MAX-2/MAX/3
+MAX 3 1 0
+MAX 3 -2 1073741823/MAX/3
+MAX 3 2 0
+MAX 3 -3 715827882/MAX/4
+MAX 3 3 0
+MAX 3 17 0
+MAX 3 127 0
+MAX 3 MIN+1 1/MAX/MAX
+MAX 3 MAX 0
+MAX 3 MIN 1/MAX/MAX
+MAX MIN+1 0 ---
+ java.lang.IllegalArgumentException: step cannot be 0.
+MAX MIN+1 -1 ---
+ java.lang.IllegalArgumentException: 2147483647 to -2147483647 by -1: seqs cannot contain more than Int.MaxValue elements.
+MAX MIN+1 1 0
+MAX MIN+1 -2 ---
+ java.lang.IllegalArgumentException: 2147483647 to -2147483647 by -2: seqs cannot contain more than Int.MaxValue elements.
+MAX MIN+1 2 0
+MAX MIN+1 -3 1431655765/MAX/MIN+3
+MAX MIN+1 3 0
+MAX MIN+1 17 0
+MAX MIN+1 127 0
+MAX MIN+1 MIN+1 3/MAX/MIN+1
+MAX MIN+1 MAX 0
+MAX MIN+1 MIN 2/MAX/-1
+MAX MAX 0 ---
+ java.lang.IllegalArgumentException: step cannot be 0.
+MAX MAX -1 1/MAX/MAX
+MAX MAX 1 1/MAX/MAX
+MAX MAX -2 1/MAX/MAX
+MAX MAX 2 1/MAX/MAX
+MAX MAX -3 1/MAX/MAX
+MAX MAX 3 1/MAX/MAX
+MAX MAX 17 1/MAX/MAX
+MAX MAX 127 1/MAX/MAX
+MAX MAX MIN+1 1/MAX/MAX
+MAX MAX MAX 1/MAX/MAX
+MAX MAX MIN 1/MAX/MAX
+MAX MIN 0 ---
+ java.lang.IllegalArgumentException: step cannot be 0.
+MAX MIN -1 ---
+ java.lang.IllegalArgumentException: 2147483647 to -2147483648 by -1: seqs cannot contain more than Int.MaxValue elements.
+MAX MIN 1 0
+MAX MIN -2 ---
+ java.lang.IllegalArgumentException: 2147483647 to -2147483648 by -2: seqs cannot contain more than Int.MaxValue elements.
+MAX MIN 2 0
+MAX MIN -3 1431655766/MAX/MIN
+MAX MIN 3 0
+MAX MIN 17 0
+MAX MIN 127 0
+MAX MIN MIN+1 3/MAX/MIN+1
+MAX MIN MAX 0
+MAX MIN MIN 2/MAX/-1
+
+start end step length/first/last
+-----------------------------------------
+MIN 0 0 ---
+ java.lang.IllegalArgumentException: step cannot be 0.
+MIN 0 -1 0
+MIN 0 1 ---
+ java.lang.IllegalArgumentException: -2147483648 to 0 by 1: seqs cannot contain more than Int.MaxValue elements.
+MIN 0 -2 0
+MIN 0 2 1073741825/MIN/0
+MIN 0 -3 0
+MIN 0 3 715827883/MIN/-2
+MIN 0 17 126322568/MIN/-9
+MIN 0 127 16909321/MIN/-8
+MIN 0 MIN+1 0
+MIN 0 MAX 2/MIN/-1
+MIN 0 MIN 0
+MIN -1 0 ---
+ java.lang.IllegalArgumentException: step cannot be 0.
+MIN -1 -1 0
+MIN -1 1 ---
+ java.lang.IllegalArgumentException: -2147483648 to -1 by 1: seqs cannot contain more than Int.MaxValue elements.
+MIN -1 -2 0
+MIN -1 2 1073741824/MIN/-2
+MIN -1 -3 0
+MIN -1 3 715827883/MIN/-2
+MIN -1 17 126322568/MIN/-9
+MIN -1 127 16909321/MIN/-8
+MIN -1 MIN+1 0
+MIN -1 MAX 2/MIN/-1
+MIN -1 MIN 0
+MIN 1 0 ---
+ java.lang.IllegalArgumentException: step cannot be 0.
+MIN 1 -1 0
+MIN 1 1 ---
+ java.lang.IllegalArgumentException: -2147483648 to 1 by 1: seqs cannot contain more than Int.MaxValue elements.
+MIN 1 -2 0
+MIN 1 2 1073741825/MIN/0
+MIN 1 -3 0
+MIN 1 3 715827884/MIN/1
+MIN 1 17 126322568/MIN/-9
+MIN 1 127 16909321/MIN/-8
+MIN 1 MIN+1 0
+MIN 1 MAX 2/MIN/-1
+MIN 1 MIN 0
+MIN 3 0 ---
+ java.lang.IllegalArgumentException: step cannot be 0.
+MIN 3 -1 0
+MIN 3 1 ---
+ java.lang.IllegalArgumentException: -2147483648 to 3 by 1: seqs cannot contain more than Int.MaxValue elements.
+MIN 3 -2 0
+MIN 3 2 1073741826/MIN/2
+MIN 3 -3 0
+MIN 3 3 715827884/MIN/1
+MIN 3 17 126322568/MIN/-9
+MIN 3 127 16909321/MIN/-8
+MIN 3 MIN+1 0
+MIN 3 MAX 2/MIN/-1
+MIN 3 MIN 0
+MIN MIN+1 0 ---
+ java.lang.IllegalArgumentException: step cannot be 0.
+MIN MIN+1 -1 0
+MIN MIN+1 1 2/MIN/MIN+1
+MIN MIN+1 -2 0
+MIN MIN+1 2 1/MIN/MIN
+MIN MIN+1 -3 0
+MIN MIN+1 3 1/MIN/MIN
+MIN MIN+1 17 1/MIN/MIN
+MIN MIN+1 127 1/MIN/MIN
+MIN MIN+1 MIN+1 0
+MIN MIN+1 MAX 1/MIN/MIN
+MIN MIN+1 MIN 0
+MIN MAX 0 ---
+ java.lang.IllegalArgumentException: step cannot be 0.
+MIN MAX -1 0
+MIN MAX 1 ---
+ java.lang.IllegalArgumentException: -2147483648 to 2147483647 by 1: seqs cannot contain more than Int.MaxValue elements.
+MIN MAX -2 0
+MIN MAX 2 ---
+ java.lang.IllegalArgumentException: -2147483648 to 2147483647 by 2: seqs cannot contain more than Int.MaxValue elements.
+MIN MAX -3 0
+MIN MAX 3 1431655766/MIN/MAX
+MIN MAX 17 252645136/MIN/MAX
+MIN MAX 127 33818641/MIN/MAX-15
+MIN MAX MIN+1 0
+MIN MAX MAX 3/MIN/MAX-1
+MIN MAX MIN 0
+MIN MIN 0 ---
+ java.lang.IllegalArgumentException: step cannot be 0.
+MIN MIN -1 1/MIN/MIN
+MIN MIN 1 1/MIN/MIN
+MIN MIN -2 1/MIN/MIN
+MIN MIN 2 1/MIN/MIN
+MIN MIN -3 1/MIN/MIN
+MIN MIN 3 1/MIN/MIN
+MIN MIN 17 1/MIN/MIN
+MIN MIN 127 1/MIN/MIN
+MIN MIN MIN+1 1/MIN/MIN
+MIN MIN MAX 1/MIN/MIN
+MIN MIN MIN 1/MIN/MIN
+
+>>> start until end <<<
+
+start end step length/first/last
+-----------------------------------------
+0 0 0 0
+0 0 -1 0
+0 0 1 0
+0 0 -2 0
+0 0 2 0
+0 0 -3 0
+0 0 3 0
+0 0 17 0
+0 0 127 0
+0 0 MIN+1 0
+0 0 MAX 0
+0 0 MIN 0
+0 -1 0 0
+0 -1 -1 0
+0 -1 1 0
+0 -1 -2 0
+0 -1 2 0
+0 -1 -3 0
+0 -1 3 0
+0 -1 17 0
+0 -1 127 0
+0 -1 MIN+1 0
+0 -1 MAX 0
+0 -1 MIN 0
+0 1 0 1/0/0
+0 1 -1 1/0/0
+0 1 1 1/0/0
+0 1 -2 1/0/0
+0 1 2 1/0/0
+0 1 -3 1/0/0
+0 1 3 1/0/0
+0 1 17 1/0/0
+0 1 127 1/0/0
+0 1 MIN+1 1/0/0
+0 1 MAX 1/0/0
+0 1 MIN 1/0/0
+0 3 0 3/0/2
+0 3 -1 3/0/2
+0 3 1 3/0/2
+0 3 -2 3/0/2
+0 3 2 3/0/2
+0 3 -3 3/0/2
+0 3 3 3/0/2
+0 3 17 3/0/2
+0 3 127 3/0/2
+0 3 MIN+1 3/0/2
+0 3 MAX 3/0/2
+0 3 MIN 3/0/2
+0 MIN+1 0 0
+0 MIN+1 -1 0
+0 MIN+1 1 0
+0 MIN+1 -2 0
+0 MIN+1 2 0
+0 MIN+1 -3 0
+0 MIN+1 3 0
+0 MIN+1 17 0
+0 MIN+1 127 0
+0 MIN+1 MIN+1 0
+0 MIN+1 MAX 0
+0 MIN+1 MIN 0
+0 MAX 0 MAX/0/MAX-1
+0 MAX -1 MAX/0/MAX-1
+0 MAX 1 MAX/0/MAX-1
+0 MAX -2 MAX/0/MAX-1
+0 MAX 2 MAX/0/MAX-1
+0 MAX -3 MAX/0/MAX-1
+0 MAX 3 MAX/0/MAX-1
+0 MAX 17 MAX/0/MAX-1
+0 MAX 127 MAX/0/MAX-1
+0 MAX MIN+1 MAX/0/MAX-1
+0 MAX MAX MAX/0/MAX-1
+0 MAX MIN MAX/0/MAX-1
+0 MIN 0 0
+0 MIN -1 0
+0 MIN 1 0
+0 MIN -2 0
+0 MIN 2 0
+0 MIN -3 0
+0 MIN 3 0
+0 MIN 17 0
+0 MIN 127 0
+0 MIN MIN+1 0
+0 MIN MAX 0
+0 MIN MIN 0
+
+start end step length/first/last
+-----------------------------------------
+-1 0 0 1/-1/-1
+-1 0 -1 1/-1/-1
+-1 0 1 1/-1/-1
+-1 0 -2 1/-1/-1
+-1 0 2 1/-1/-1
+-1 0 -3 1/-1/-1
+-1 0 3 1/-1/-1
+-1 0 17 1/-1/-1
+-1 0 127 1/-1/-1
+-1 0 MIN+1 1/-1/-1
+-1 0 MAX 1/-1/-1
+-1 0 MIN 1/-1/-1
+-1 -1 0 0
+-1 -1 -1 0
+-1 -1 1 0
+-1 -1 -2 0
+-1 -1 2 0
+-1 -1 -3 0
+-1 -1 3 0
+-1 -1 17 0
+-1 -1 127 0
+-1 -1 MIN+1 0
+-1 -1 MAX 0
+-1 -1 MIN 0
+-1 1 0 2/-1/0
+-1 1 -1 2/-1/0
+-1 1 1 2/-1/0
+-1 1 -2 2/-1/0
+-1 1 2 2/-1/0
+-1 1 -3 2/-1/0
+-1 1 3 2/-1/0
+-1 1 17 2/-1/0
+-1 1 127 2/-1/0
+-1 1 MIN+1 2/-1/0
+-1 1 MAX 2/-1/0
+-1 1 MIN 2/-1/0
+-1 3 0 4/-1/2
+-1 3 -1 4/-1/2
+-1 3 1 4/-1/2
+-1 3 -2 4/-1/2
+-1 3 2 4/-1/2
+-1 3 -3 4/-1/2
+-1 3 3 4/-1/2
+-1 3 17 4/-1/2
+-1 3 127 4/-1/2
+-1 3 MIN+1 4/-1/2
+-1 3 MAX 4/-1/2
+-1 3 MIN 4/-1/2
+-1 MIN+1 0 0
+-1 MIN+1 -1 0
+-1 MIN+1 1 0
+-1 MIN+1 -2 0
+-1 MIN+1 2 0
+-1 MIN+1 -3 0
+-1 MIN+1 3 0
+-1 MIN+1 17 0
+-1 MIN+1 127 0
+-1 MIN+1 MIN+1 0
+-1 MIN+1 MAX 0
+-1 MIN+1 MIN 0
+-1 MAX 0 ---
+ java.lang.IllegalArgumentException: -1 until 2147483647 by 1: seqs cannot contain more than Int.MaxValue elements.
+-1 MAX -1 ---
+ java.lang.IllegalArgumentException: -1 until 2147483647 by 1: seqs cannot contain more than Int.MaxValue elements.
+-1 MAX 1 ---
+ java.lang.IllegalArgumentException: -1 until 2147483647 by 1: seqs cannot contain more than Int.MaxValue elements.
+-1 MAX -2 ---
+ java.lang.IllegalArgumentException: -1 until 2147483647 by 1: seqs cannot contain more than Int.MaxValue elements.
+-1 MAX 2 ---
+ java.lang.IllegalArgumentException: -1 until 2147483647 by 1: seqs cannot contain more than Int.MaxValue elements.
+-1 MAX -3 ---
+ java.lang.IllegalArgumentException: -1 until 2147483647 by 1: seqs cannot contain more than Int.MaxValue elements.
+-1 MAX 3 ---
+ java.lang.IllegalArgumentException: -1 until 2147483647 by 1: seqs cannot contain more than Int.MaxValue elements.
+-1 MAX 17 ---
+ java.lang.IllegalArgumentException: -1 until 2147483647 by 1: seqs cannot contain more than Int.MaxValue elements.
+-1 MAX 127 ---
+ java.lang.IllegalArgumentException: -1 until 2147483647 by 1: seqs cannot contain more than Int.MaxValue elements.
+-1 MAX MIN+1 ---
+ java.lang.IllegalArgumentException: -1 until 2147483647 by 1: seqs cannot contain more than Int.MaxValue elements.
+-1 MAX MAX ---
+ java.lang.IllegalArgumentException: -1 until 2147483647 by 1: seqs cannot contain more than Int.MaxValue elements.
+-1 MAX MIN ---
+ java.lang.IllegalArgumentException: -1 until 2147483647 by 1: seqs cannot contain more than Int.MaxValue elements.
+-1 MIN 0 0
+-1 MIN -1 0
+-1 MIN 1 0
+-1 MIN -2 0
+-1 MIN 2 0
+-1 MIN -3 0
+-1 MIN 3 0
+-1 MIN 17 0
+-1 MIN 127 0
+-1 MIN MIN+1 0
+-1 MIN MAX 0
+-1 MIN MIN 0
+
+start end step length/first/last
+-----------------------------------------
+1 0 0 0
+1 0 -1 0
+1 0 1 0
+1 0 -2 0
+1 0 2 0
+1 0 -3 0
+1 0 3 0
+1 0 17 0
+1 0 127 0
+1 0 MIN+1 0
+1 0 MAX 0
+1 0 MIN 0
+1 -1 0 0
+1 -1 -1 0
+1 -1 1 0
+1 -1 -2 0
+1 -1 2 0
+1 -1 -3 0
+1 -1 3 0
+1 -1 17 0
+1 -1 127 0
+1 -1 MIN+1 0
+1 -1 MAX 0
+1 -1 MIN 0
+1 1 0 0
+1 1 -1 0
+1 1 1 0
+1 1 -2 0
+1 1 2 0
+1 1 -3 0
+1 1 3 0
+1 1 17 0
+1 1 127 0
+1 1 MIN+1 0
+1 1 MAX 0
+1 1 MIN 0
+1 3 0 2/1/2
+1 3 -1 2/1/2
+1 3 1 2/1/2
+1 3 -2 2/1/2
+1 3 2 2/1/2
+1 3 -3 2/1/2
+1 3 3 2/1/2
+1 3 17 2/1/2
+1 3 127 2/1/2
+1 3 MIN+1 2/1/2
+1 3 MAX 2/1/2
+1 3 MIN 2/1/2
+1 MIN+1 0 0
+1 MIN+1 -1 0
+1 MIN+1 1 0
+1 MIN+1 -2 0
+1 MIN+1 2 0
+1 MIN+1 -3 0
+1 MIN+1 3 0
+1 MIN+1 17 0
+1 MIN+1 127 0
+1 MIN+1 MIN+1 0
+1 MIN+1 MAX 0
+1 MIN+1 MIN 0
+1 MAX 0 MAX-1/1/MAX-1
+1 MAX -1 MAX-1/1/MAX-1
+1 MAX 1 MAX-1/1/MAX-1
+1 MAX -2 MAX-1/1/MAX-1
+1 MAX 2 MAX-1/1/MAX-1
+1 MAX -3 MAX-1/1/MAX-1
+1 MAX 3 MAX-1/1/MAX-1
+1 MAX 17 MAX-1/1/MAX-1
+1 MAX 127 MAX-1/1/MAX-1
+1 MAX MIN+1 MAX-1/1/MAX-1
+1 MAX MAX MAX-1/1/MAX-1
+1 MAX MIN MAX-1/1/MAX-1
+1 MIN 0 0
+1 MIN -1 0
+1 MIN 1 0
+1 MIN -2 0
+1 MIN 2 0
+1 MIN -3 0
+1 MIN 3 0
+1 MIN 17 0
+1 MIN 127 0
+1 MIN MIN+1 0
+1 MIN MAX 0
+1 MIN MIN 0
+
+start end step length/first/last
+-----------------------------------------
+3 0 0 0
+3 0 -1 0
+3 0 1 0
+3 0 -2 0
+3 0 2 0
+3 0 -3 0
+3 0 3 0
+3 0 17 0
+3 0 127 0
+3 0 MIN+1 0
+3 0 MAX 0
+3 0 MIN 0
+3 -1 0 0
+3 -1 -1 0
+3 -1 1 0
+3 -1 -2 0
+3 -1 2 0
+3 -1 -3 0
+3 -1 3 0
+3 -1 17 0
+3 -1 127 0
+3 -1 MIN+1 0
+3 -1 MAX 0
+3 -1 MIN 0
+3 1 0 0
+3 1 -1 0
+3 1 1 0
+3 1 -2 0
+3 1 2 0
+3 1 -3 0
+3 1 3 0
+3 1 17 0
+3 1 127 0
+3 1 MIN+1 0
+3 1 MAX 0
+3 1 MIN 0
+3 3 0 0
+3 3 -1 0
+3 3 1 0
+3 3 -2 0
+3 3 2 0
+3 3 -3 0
+3 3 3 0
+3 3 17 0
+3 3 127 0
+3 3 MIN+1 0
+3 3 MAX 0
+3 3 MIN 0
+3 MIN+1 0 0
+3 MIN+1 -1 0
+3 MIN+1 1 0
+3 MIN+1 -2 0
+3 MIN+1 2 0
+3 MIN+1 -3 0
+3 MIN+1 3 0
+3 MIN+1 17 0
+3 MIN+1 127 0
+3 MIN+1 MIN+1 0
+3 MIN+1 MAX 0
+3 MIN+1 MIN 0
+3 MAX 0 MAX-3/3/MAX-1
+3 MAX -1 MAX-3/3/MAX-1
+3 MAX 1 MAX-3/3/MAX-1
+3 MAX -2 MAX-3/3/MAX-1
+3 MAX 2 MAX-3/3/MAX-1
+3 MAX -3 MAX-3/3/MAX-1
+3 MAX 3 MAX-3/3/MAX-1
+3 MAX 17 MAX-3/3/MAX-1
+3 MAX 127 MAX-3/3/MAX-1
+3 MAX MIN+1 MAX-3/3/MAX-1
+3 MAX MAX MAX-3/3/MAX-1
+3 MAX MIN MAX-3/3/MAX-1
+3 MIN 0 0
+3 MIN -1 0
+3 MIN 1 0
+3 MIN -2 0
+3 MIN 2 0
+3 MIN -3 0
+3 MIN 3 0
+3 MIN 17 0
+3 MIN 127 0
+3 MIN MIN+1 0
+3 MIN MAX 0
+3 MIN MIN 0
+
+start end step length/first/last
+-----------------------------------------
+MIN+1 0 0 MAX/MIN+1/-1
+MIN+1 0 -1 MAX/MIN+1/-1
+MIN+1 0 1 MAX/MIN+1/-1
+MIN+1 0 -2 MAX/MIN+1/-1
+MIN+1 0 2 MAX/MIN+1/-1
+MIN+1 0 -3 MAX/MIN+1/-1
+MIN+1 0 3 MAX/MIN+1/-1
+MIN+1 0 17 MAX/MIN+1/-1
+MIN+1 0 127 MAX/MIN+1/-1
+MIN+1 0 MIN+1 MAX/MIN+1/-1
+MIN+1 0 MAX MAX/MIN+1/-1
+MIN+1 0 MIN MAX/MIN+1/-1
+MIN+1 -1 0 MAX-1/MIN+1/-2
+MIN+1 -1 -1 MAX-1/MIN+1/-2
+MIN+1 -1 1 MAX-1/MIN+1/-2
+MIN+1 -1 -2 MAX-1/MIN+1/-2
+MIN+1 -1 2 MAX-1/MIN+1/-2
+MIN+1 -1 -3 MAX-1/MIN+1/-2
+MIN+1 -1 3 MAX-1/MIN+1/-2
+MIN+1 -1 17 MAX-1/MIN+1/-2
+MIN+1 -1 127 MAX-1/MIN+1/-2
+MIN+1 -1 MIN+1 MAX-1/MIN+1/-2
+MIN+1 -1 MAX MAX-1/MIN+1/-2
+MIN+1 -1 MIN MAX-1/MIN+1/-2
+MIN+1 1 0 ---
+ java.lang.IllegalArgumentException: -2147483647 until 1 by 1: seqs cannot contain more than Int.MaxValue elements.
+MIN+1 1 -1 ---
+ java.lang.IllegalArgumentException: -2147483647 until 1 by 1: seqs cannot contain more than Int.MaxValue elements.
+MIN+1 1 1 ---
+ java.lang.IllegalArgumentException: -2147483647 until 1 by 1: seqs cannot contain more than Int.MaxValue elements.
+MIN+1 1 -2 ---
+ java.lang.IllegalArgumentException: -2147483647 until 1 by 1: seqs cannot contain more than Int.MaxValue elements.
+MIN+1 1 2 ---
+ java.lang.IllegalArgumentException: -2147483647 until 1 by 1: seqs cannot contain more than Int.MaxValue elements.
+MIN+1 1 -3 ---
+ java.lang.IllegalArgumentException: -2147483647 until 1 by 1: seqs cannot contain more than Int.MaxValue elements.
+MIN+1 1 3 ---
+ java.lang.IllegalArgumentException: -2147483647 until 1 by 1: seqs cannot contain more than Int.MaxValue elements.
+MIN+1 1 17 ---
+ java.lang.IllegalArgumentException: -2147483647 until 1 by 1: seqs cannot contain more than Int.MaxValue elements.
+MIN+1 1 127 ---
+ java.lang.IllegalArgumentException: -2147483647 until 1 by 1: seqs cannot contain more than Int.MaxValue elements.
+MIN+1 1 MIN+1 ---
+ java.lang.IllegalArgumentException: -2147483647 until 1 by 1: seqs cannot contain more than Int.MaxValue elements.
+MIN+1 1 MAX ---
+ java.lang.IllegalArgumentException: -2147483647 until 1 by 1: seqs cannot contain more than Int.MaxValue elements.
+MIN+1 1 MIN ---
+ java.lang.IllegalArgumentException: -2147483647 until 1 by 1: seqs cannot contain more than Int.MaxValue elements.
+MIN+1 3 0 ---
+ java.lang.IllegalArgumentException: -2147483647 until 3 by 1: seqs cannot contain more than Int.MaxValue elements.
+MIN+1 3 -1 ---
+ java.lang.IllegalArgumentException: -2147483647 until 3 by 1: seqs cannot contain more than Int.MaxValue elements.
+MIN+1 3 1 ---
+ java.lang.IllegalArgumentException: -2147483647 until 3 by 1: seqs cannot contain more than Int.MaxValue elements.
+MIN+1 3 -2 ---
+ java.lang.IllegalArgumentException: -2147483647 until 3 by 1: seqs cannot contain more than Int.MaxValue elements.
+MIN+1 3 2 ---
+ java.lang.IllegalArgumentException: -2147483647 until 3 by 1: seqs cannot contain more than Int.MaxValue elements.
+MIN+1 3 -3 ---
+ java.lang.IllegalArgumentException: -2147483647 until 3 by 1: seqs cannot contain more than Int.MaxValue elements.
+MIN+1 3 3 ---
+ java.lang.IllegalArgumentException: -2147483647 until 3 by 1: seqs cannot contain more than Int.MaxValue elements.
+MIN+1 3 17 ---
+ java.lang.IllegalArgumentException: -2147483647 until 3 by 1: seqs cannot contain more than Int.MaxValue elements.
+MIN+1 3 127 ---
+ java.lang.IllegalArgumentException: -2147483647 until 3 by 1: seqs cannot contain more than Int.MaxValue elements.
+MIN+1 3 MIN+1 ---
+ java.lang.IllegalArgumentException: -2147483647 until 3 by 1: seqs cannot contain more than Int.MaxValue elements.
+MIN+1 3 MAX ---
+ java.lang.IllegalArgumentException: -2147483647 until 3 by 1: seqs cannot contain more than Int.MaxValue elements.
+MIN+1 3 MIN ---
+ java.lang.IllegalArgumentException: -2147483647 until 3 by 1: seqs cannot contain more than Int.MaxValue elements.
+MIN+1 MIN+1 0 0
+MIN+1 MIN+1 -1 0
+MIN+1 MIN+1 1 0
+MIN+1 MIN+1 -2 0
+MIN+1 MIN+1 2 0
+MIN+1 MIN+1 -3 0
+MIN+1 MIN+1 3 0
+MIN+1 MIN+1 17 0
+MIN+1 MIN+1 127 0
+MIN+1 MIN+1 MIN+1 0
+MIN+1 MIN+1 MAX 0
+MIN+1 MIN+1 MIN 0
+MIN+1 MAX 0 ---
+ java.lang.IllegalArgumentException: -2147483647 until 2147483647 by 1: seqs cannot contain more than Int.MaxValue elements.
+MIN+1 MAX -1 ---
+ java.lang.IllegalArgumentException: -2147483647 until 2147483647 by 1: seqs cannot contain more than Int.MaxValue elements.
+MIN+1 MAX 1 ---
+ java.lang.IllegalArgumentException: -2147483647 until 2147483647 by 1: seqs cannot contain more than Int.MaxValue elements.
+MIN+1 MAX -2 ---
+ java.lang.IllegalArgumentException: -2147483647 until 2147483647 by 1: seqs cannot contain more than Int.MaxValue elements.
+MIN+1 MAX 2 ---
+ java.lang.IllegalArgumentException: -2147483647 until 2147483647 by 1: seqs cannot contain more than Int.MaxValue elements.
+MIN+1 MAX -3 ---
+ java.lang.IllegalArgumentException: -2147483647 until 2147483647 by 1: seqs cannot contain more than Int.MaxValue elements.
+MIN+1 MAX 3 ---
+ java.lang.IllegalArgumentException: -2147483647 until 2147483647 by 1: seqs cannot contain more than Int.MaxValue elements.
+MIN+1 MAX 17 ---
+ java.lang.IllegalArgumentException: -2147483647 until 2147483647 by 1: seqs cannot contain more than Int.MaxValue elements.
+MIN+1 MAX 127 ---
+ java.lang.IllegalArgumentException: -2147483647 until 2147483647 by 1: seqs cannot contain more than Int.MaxValue elements.
+MIN+1 MAX MIN+1 ---
+ java.lang.IllegalArgumentException: -2147483647 until 2147483647 by 1: seqs cannot contain more than Int.MaxValue elements.
+MIN+1 MAX MAX ---
+ java.lang.IllegalArgumentException: -2147483647 until 2147483647 by 1: seqs cannot contain more than Int.MaxValue elements.
+MIN+1 MAX MIN ---
+ java.lang.IllegalArgumentException: -2147483647 until 2147483647 by 1: seqs cannot contain more than Int.MaxValue elements.
+MIN+1 MIN 0 0
+MIN+1 MIN -1 0
+MIN+1 MIN 1 0
+MIN+1 MIN -2 0
+MIN+1 MIN 2 0
+MIN+1 MIN -3 0
+MIN+1 MIN 3 0
+MIN+1 MIN 17 0
+MIN+1 MIN 127 0
+MIN+1 MIN MIN+1 0
+MIN+1 MIN MAX 0
+MIN+1 MIN MIN 0
+
+start end step length/first/last
+-----------------------------------------
+MAX 0 0 0
+MAX 0 -1 0
+MAX 0 1 0
+MAX 0 -2 0
+MAX 0 2 0
+MAX 0 -3 0
+MAX 0 3 0
+MAX 0 17 0
+MAX 0 127 0
+MAX 0 MIN+1 0
+MAX 0 MAX 0
+MAX 0 MIN 0
+MAX -1 0 0
+MAX -1 -1 0
+MAX -1 1 0
+MAX -1 -2 0
+MAX -1 2 0
+MAX -1 -3 0
+MAX -1 3 0
+MAX -1 17 0
+MAX -1 127 0
+MAX -1 MIN+1 0
+MAX -1 MAX 0
+MAX -1 MIN 0
+MAX 1 0 0
+MAX 1 -1 0
+MAX 1 1 0
+MAX 1 -2 0
+MAX 1 2 0
+MAX 1 -3 0
+MAX 1 3 0
+MAX 1 17 0
+MAX 1 127 0
+MAX 1 MIN+1 0
+MAX 1 MAX 0
+MAX 1 MIN 0
+MAX 3 0 0
+MAX 3 -1 0
+MAX 3 1 0
+MAX 3 -2 0
+MAX 3 2 0
+MAX 3 -3 0
+MAX 3 3 0
+MAX 3 17 0
+MAX 3 127 0
+MAX 3 MIN+1 0
+MAX 3 MAX 0
+MAX 3 MIN 0
+MAX MIN+1 0 0
+MAX MIN+1 -1 0
+MAX MIN+1 1 0
+MAX MIN+1 -2 0
+MAX MIN+1 2 0
+MAX MIN+1 -3 0
+MAX MIN+1 3 0
+MAX MIN+1 17 0
+MAX MIN+1 127 0
+MAX MIN+1 MIN+1 0
+MAX MIN+1 MAX 0
+MAX MIN+1 MIN 0
+MAX MAX 0 0
+MAX MAX -1 0
+MAX MAX 1 0
+MAX MAX -2 0
+MAX MAX 2 0
+MAX MAX -3 0
+MAX MAX 3 0
+MAX MAX 17 0
+MAX MAX 127 0
+MAX MAX MIN+1 0
+MAX MAX MAX 0
+MAX MAX MIN 0
+MAX MIN 0 0
+MAX MIN -1 0
+MAX MIN 1 0
+MAX MIN -2 0
+MAX MIN 2 0
+MAX MIN -3 0
+MAX MIN 3 0
+MAX MIN 17 0
+MAX MIN 127 0
+MAX MIN MIN+1 0
+MAX MIN MAX 0
+MAX MIN MIN 0
+
+start end step length/first/last
+-----------------------------------------
+MIN 0 0 ---
+ java.lang.IllegalArgumentException: -2147483648 until 0 by 1: seqs cannot contain more than Int.MaxValue elements.
+MIN 0 -1 ---
+ java.lang.IllegalArgumentException: -2147483648 until 0 by 1: seqs cannot contain more than Int.MaxValue elements.
+MIN 0 1 ---
+ java.lang.IllegalArgumentException: -2147483648 until 0 by 1: seqs cannot contain more than Int.MaxValue elements.
+MIN 0 -2 ---
+ java.lang.IllegalArgumentException: -2147483648 until 0 by 1: seqs cannot contain more than Int.MaxValue elements.
+MIN 0 2 ---
+ java.lang.IllegalArgumentException: -2147483648 until 0 by 1: seqs cannot contain more than Int.MaxValue elements.
+MIN 0 -3 ---
+ java.lang.IllegalArgumentException: -2147483648 until 0 by 1: seqs cannot contain more than Int.MaxValue elements.
+MIN 0 3 ---
+ java.lang.IllegalArgumentException: -2147483648 until 0 by 1: seqs cannot contain more than Int.MaxValue elements.
+MIN 0 17 ---
+ java.lang.IllegalArgumentException: -2147483648 until 0 by 1: seqs cannot contain more than Int.MaxValue elements.
+MIN 0 127 ---
+ java.lang.IllegalArgumentException: -2147483648 until 0 by 1: seqs cannot contain more than Int.MaxValue elements.
+MIN 0 MIN+1 ---
+ java.lang.IllegalArgumentException: -2147483648 until 0 by 1: seqs cannot contain more than Int.MaxValue elements.
+MIN 0 MAX ---
+ java.lang.IllegalArgumentException: -2147483648 until 0 by 1: seqs cannot contain more than Int.MaxValue elements.
+MIN 0 MIN ---
+ java.lang.IllegalArgumentException: -2147483648 until 0 by 1: seqs cannot contain more than Int.MaxValue elements.
+MIN -1 0 MAX/MIN/-2
+MIN -1 -1 MAX/MIN/-2
+MIN -1 1 MAX/MIN/-2
+MIN -1 -2 MAX/MIN/-2
+MIN -1 2 MAX/MIN/-2
+MIN -1 -3 MAX/MIN/-2
+MIN -1 3 MAX/MIN/-2
+MIN -1 17 MAX/MIN/-2
+MIN -1 127 MAX/MIN/-2
+MIN -1 MIN+1 MAX/MIN/-2
+MIN -1 MAX MAX/MIN/-2
+MIN -1 MIN MAX/MIN/-2
+MIN 1 0 ---
+ java.lang.IllegalArgumentException: -2147483648 until 1 by 1: seqs cannot contain more than Int.MaxValue elements.
+MIN 1 -1 ---
+ java.lang.IllegalArgumentException: -2147483648 until 1 by 1: seqs cannot contain more than Int.MaxValue elements.
+MIN 1 1 ---
+ java.lang.IllegalArgumentException: -2147483648 until 1 by 1: seqs cannot contain more than Int.MaxValue elements.
+MIN 1 -2 ---
+ java.lang.IllegalArgumentException: -2147483648 until 1 by 1: seqs cannot contain more than Int.MaxValue elements.
+MIN 1 2 ---
+ java.lang.IllegalArgumentException: -2147483648 until 1 by 1: seqs cannot contain more than Int.MaxValue elements.
+MIN 1 -3 ---
+ java.lang.IllegalArgumentException: -2147483648 until 1 by 1: seqs cannot contain more than Int.MaxValue elements.
+MIN 1 3 ---
+ java.lang.IllegalArgumentException: -2147483648 until 1 by 1: seqs cannot contain more than Int.MaxValue elements.
+MIN 1 17 ---
+ java.lang.IllegalArgumentException: -2147483648 until 1 by 1: seqs cannot contain more than Int.MaxValue elements.
+MIN 1 127 ---
+ java.lang.IllegalArgumentException: -2147483648 until 1 by 1: seqs cannot contain more than Int.MaxValue elements.
+MIN 1 MIN+1 ---
+ java.lang.IllegalArgumentException: -2147483648 until 1 by 1: seqs cannot contain more than Int.MaxValue elements.
+MIN 1 MAX ---
+ java.lang.IllegalArgumentException: -2147483648 until 1 by 1: seqs cannot contain more than Int.MaxValue elements.
+MIN 1 MIN ---
+ java.lang.IllegalArgumentException: -2147483648 until 1 by 1: seqs cannot contain more than Int.MaxValue elements.
+MIN 3 0 ---
+ java.lang.IllegalArgumentException: -2147483648 until 3 by 1: seqs cannot contain more than Int.MaxValue elements.
+MIN 3 -1 ---
+ java.lang.IllegalArgumentException: -2147483648 until 3 by 1: seqs cannot contain more than Int.MaxValue elements.
+MIN 3 1 ---
+ java.lang.IllegalArgumentException: -2147483648 until 3 by 1: seqs cannot contain more than Int.MaxValue elements.
+MIN 3 -2 ---
+ java.lang.IllegalArgumentException: -2147483648 until 3 by 1: seqs cannot contain more than Int.MaxValue elements.
+MIN 3 2 ---
+ java.lang.IllegalArgumentException: -2147483648 until 3 by 1: seqs cannot contain more than Int.MaxValue elements.
+MIN 3 -3 ---
+ java.lang.IllegalArgumentException: -2147483648 until 3 by 1: seqs cannot contain more than Int.MaxValue elements.
+MIN 3 3 ---
+ java.lang.IllegalArgumentException: -2147483648 until 3 by 1: seqs cannot contain more than Int.MaxValue elements.
+MIN 3 17 ---
+ java.lang.IllegalArgumentException: -2147483648 until 3 by 1: seqs cannot contain more than Int.MaxValue elements.
+MIN 3 127 ---
+ java.lang.IllegalArgumentException: -2147483648 until 3 by 1: seqs cannot contain more than Int.MaxValue elements.
+MIN 3 MIN+1 ---
+ java.lang.IllegalArgumentException: -2147483648 until 3 by 1: seqs cannot contain more than Int.MaxValue elements.
+MIN 3 MAX ---
+ java.lang.IllegalArgumentException: -2147483648 until 3 by 1: seqs cannot contain more than Int.MaxValue elements.
+MIN 3 MIN ---
+ java.lang.IllegalArgumentException: -2147483648 until 3 by 1: seqs cannot contain more than Int.MaxValue elements.
+MIN MIN+1 0 1/MIN/MIN
+MIN MIN+1 -1 1/MIN/MIN
+MIN MIN+1 1 1/MIN/MIN
+MIN MIN+1 -2 1/MIN/MIN
+MIN MIN+1 2 1/MIN/MIN
+MIN MIN+1 -3 1/MIN/MIN
+MIN MIN+1 3 1/MIN/MIN
+MIN MIN+1 17 1/MIN/MIN
+MIN MIN+1 127 1/MIN/MIN
+MIN MIN+1 MIN+1 1/MIN/MIN
+MIN MIN+1 MAX 1/MIN/MIN
+MIN MIN+1 MIN 1/MIN/MIN
+MIN MAX 0 ---
+ java.lang.IllegalArgumentException: -2147483648 until 2147483647 by 1: seqs cannot contain more than Int.MaxValue elements.
+MIN MAX -1 ---
+ java.lang.IllegalArgumentException: -2147483648 until 2147483647 by 1: seqs cannot contain more than Int.MaxValue elements.
+MIN MAX 1 ---
+ java.lang.IllegalArgumentException: -2147483648 until 2147483647 by 1: seqs cannot contain more than Int.MaxValue elements.
+MIN MAX -2 ---
+ java.lang.IllegalArgumentException: -2147483648 until 2147483647 by 1: seqs cannot contain more than Int.MaxValue elements.
+MIN MAX 2 ---
+ java.lang.IllegalArgumentException: -2147483648 until 2147483647 by 1: seqs cannot contain more than Int.MaxValue elements.
+MIN MAX -3 ---
+ java.lang.IllegalArgumentException: -2147483648 until 2147483647 by 1: seqs cannot contain more than Int.MaxValue elements.
+MIN MAX 3 ---
+ java.lang.IllegalArgumentException: -2147483648 until 2147483647 by 1: seqs cannot contain more than Int.MaxValue elements.
+MIN MAX 17 ---
+ java.lang.IllegalArgumentException: -2147483648 until 2147483647 by 1: seqs cannot contain more than Int.MaxValue elements.
+MIN MAX 127 ---
+ java.lang.IllegalArgumentException: -2147483648 until 2147483647 by 1: seqs cannot contain more than Int.MaxValue elements.
+MIN MAX MIN+1 ---
+ java.lang.IllegalArgumentException: -2147483648 until 2147483647 by 1: seqs cannot contain more than Int.MaxValue elements.
+MIN MAX MAX ---
+ java.lang.IllegalArgumentException: -2147483648 until 2147483647 by 1: seqs cannot contain more than Int.MaxValue elements.
+MIN MAX MIN ---
+ java.lang.IllegalArgumentException: -2147483648 until 2147483647 by 1: seqs cannot contain more than Int.MaxValue elements.
+MIN MIN 0 0
+MIN MIN -1 0
+MIN MIN 1 0
+MIN MIN -2 0
+MIN MIN 2 0
+MIN MIN -3 0
+MIN MIN 3 0
+MIN MIN 17 0
+MIN MIN 127 0
+MIN MIN MIN+1 0
+MIN MIN MAX 0
+MIN MIN MIN 0
+
+>>> start until end by step <<<
+
+start end step length/first/last
+-----------------------------------------
+0 0 0 ---
+ java.lang.IllegalArgumentException: step cannot be 0.
+0 0 -1 0
+0 0 1 0
+0 0 -2 0
+0 0 2 0
+0 0 -3 0
+0 0 3 0
+0 0 17 0
+0 0 127 0
+0 0 MIN+1 0
+0 0 MAX 0
+0 0 MIN 0
+0 -1 0 ---
+ java.lang.IllegalArgumentException: step cannot be 0.
+0 -1 -1 1/0/0
+0 -1 1 0
+0 -1 -2 1/0/0
+0 -1 2 0
+0 -1 -3 1/0/0
+0 -1 3 0
+0 -1 17 0
+0 -1 127 0
+0 -1 MIN+1 1/0/0
+0 -1 MAX 0
+0 -1 MIN 1/0/0
+0 1 0 ---
+ java.lang.IllegalArgumentException: step cannot be 0.
+0 1 -1 0
+0 1 1 1/0/0
+0 1 -2 0
+0 1 2 1/0/0
+0 1 -3 0
+0 1 3 1/0/0
+0 1 17 1/0/0
+0 1 127 1/0/0
+0 1 MIN+1 0
+0 1 MAX 1/0/0
+0 1 MIN 0
+0 3 0 ---
+ java.lang.IllegalArgumentException: step cannot be 0.
+0 3 -1 0
+0 3 1 3/0/2
+0 3 -2 0
+0 3 2 2/0/2
+0 3 -3 0
+0 3 3 1/0/0
+0 3 17 1/0/0
+0 3 127 1/0/0
+0 3 MIN+1 0
+0 3 MAX 1/0/0
+0 3 MIN 0
+0 MIN+1 0 ---
+ java.lang.IllegalArgumentException: step cannot be 0.
+0 MIN+1 -1 MAX/0/MIN+2
+0 MIN+1 1 0
+0 MIN+1 -2 1073741824/0/MIN+2
+0 MIN+1 2 0
+0 MIN+1 -3 715827883/0/MIN+2
+0 MIN+1 3 0
+0 MIN+1 17 0
+0 MIN+1 127 0
+0 MIN+1 MIN+1 1/0/0
+0 MIN+1 MAX 0
+0 MIN+1 MIN 1/0/0
+0 MAX 0 ---
+ java.lang.IllegalArgumentException: step cannot be 0.
+0 MAX -1 0
+0 MAX 1 MAX/0/MAX-1
+0 MAX -2 0
+0 MAX 2 1073741824/0/MAX-1
+0 MAX -3 0
+0 MAX 3 715827883/0/MAX-1
+0 MAX 17 126322568/0/MAX-8
+0 MAX 127 16909321/0/MAX-7
+0 MAX MIN+1 0
+0 MAX MAX 1/0/0
+0 MAX MIN 0
+0 MIN 0 ---
+ java.lang.IllegalArgumentException: step cannot be 0.
+0 MIN -1 ---
+ java.lang.IllegalArgumentException: 0 until -2147483648 by -1: seqs cannot contain more than Int.MaxValue elements.
+0 MIN 1 0
+0 MIN -2 1073741824/0/MIN+2
+0 MIN 2 0
+0 MIN -3 715827883/0/MIN+2
+0 MIN 3 0
+0 MIN 17 0
+0 MIN 127 0
+0 MIN MIN+1 2/0/MIN+1
+0 MIN MAX 0
+0 MIN MIN 1/0/0
+
+start end step length/first/last
+-----------------------------------------
+-1 0 0 ---
+ java.lang.IllegalArgumentException: step cannot be 0.
+-1 0 -1 0
+-1 0 1 1/-1/-1
+-1 0 -2 0
+-1 0 2 1/-1/-1
+-1 0 -3 0
+-1 0 3 1/-1/-1
+-1 0 17 1/-1/-1
+-1 0 127 1/-1/-1
+-1 0 MIN+1 0
+-1 0 MAX 1/-1/-1
+-1 0 MIN 0
+-1 -1 0 ---
+ java.lang.IllegalArgumentException: step cannot be 0.
+-1 -1 -1 0
+-1 -1 1 0
+-1 -1 -2 0
+-1 -1 2 0
+-1 -1 -3 0
+-1 -1 3 0
+-1 -1 17 0
+-1 -1 127 0
+-1 -1 MIN+1 0
+-1 -1 MAX 0
+-1 -1 MIN 0
+-1 1 0 ---
+ java.lang.IllegalArgumentException: step cannot be 0.
+-1 1 -1 0
+-1 1 1 2/-1/0
+-1 1 -2 0
+-1 1 2 1/-1/-1
+-1 1 -3 0
+-1 1 3 1/-1/-1
+-1 1 17 1/-1/-1
+-1 1 127 1/-1/-1
+-1 1 MIN+1 0
+-1 1 MAX 1/-1/-1
+-1 1 MIN 0
+-1 3 0 ---
+ java.lang.IllegalArgumentException: step cannot be 0.
+-1 3 -1 0
+-1 3 1 4/-1/2
+-1 3 -2 0
+-1 3 2 2/-1/1
+-1 3 -3 0
+-1 3 3 2/-1/2
+-1 3 17 1/-1/-1
+-1 3 127 1/-1/-1
+-1 3 MIN+1 0
+-1 3 MAX 1/-1/-1
+-1 3 MIN 0
+-1 MIN+1 0 ---
+ java.lang.IllegalArgumentException: step cannot be 0.
+-1 MIN+1 -1 MAX-1/-1/MIN+2
+-1 MIN+1 1 0
+-1 MIN+1 -2 1073741823/-1/MIN+3
+-1 MIN+1 2 0
+-1 MIN+1 -3 715827882/-1/MIN+4
+-1 MIN+1 3 0
+-1 MIN+1 17 0
+-1 MIN+1 127 0
+-1 MIN+1 MIN+1 1/-1/-1
+-1 MIN+1 MAX 0
+-1 MIN+1 MIN 1/-1/-1
+-1 MAX 0 ---
+ java.lang.IllegalArgumentException: step cannot be 0.
+-1 MAX -1 0
+-1 MAX 1 ---
+ java.lang.IllegalArgumentException: -1 until 2147483647 by 1: seqs cannot contain more than Int.MaxValue elements.
+-1 MAX -2 0
+-1 MAX 2 1073741824/-1/MAX-2
+-1 MAX -3 0
+-1 MAX 3 715827883/-1/MAX-2
+-1 MAX 17 126322568/-1/MAX-9
+-1 MAX 127 16909321/-1/MAX-8
+-1 MAX MIN+1 0
+-1 MAX MAX 2/-1/MAX-1
+-1 MAX MIN 0
+-1 MIN 0 ---
+ java.lang.IllegalArgumentException: step cannot be 0.
+-1 MIN -1 MAX/-1/MIN+1
+-1 MIN 1 0
+-1 MIN -2 1073741824/-1/MIN+1
+-1 MIN 2 0
+-1 MIN -3 715827883/-1/MIN+1
+-1 MIN 3 0
+-1 MIN 17 0
+-1 MIN 127 0
+-1 MIN MIN+1 1/-1/-1
+-1 MIN MAX 0
+-1 MIN MIN 1/-1/-1
+
+start end step length/first/last
+-----------------------------------------
+1 0 0 ---
+ java.lang.IllegalArgumentException: step cannot be 0.
+1 0 -1 1/1/1
+1 0 1 0
+1 0 -2 1/1/1
+1 0 2 0
+1 0 -3 1/1/1
+1 0 3 0
+1 0 17 0
+1 0 127 0
+1 0 MIN+1 1/1/1
+1 0 MAX 0
+1 0 MIN 1/1/1
+1 -1 0 ---
+ java.lang.IllegalArgumentException: step cannot be 0.
+1 -1 -1 2/1/0
+1 -1 1 0
+1 -1 -2 1/1/1
+1 -1 2 0
+1 -1 -3 1/1/1
+1 -1 3 0
+1 -1 17 0
+1 -1 127 0
+1 -1 MIN+1 1/1/1
+1 -1 MAX 0
+1 -1 MIN 1/1/1
+1 1 0 ---
+ java.lang.IllegalArgumentException: step cannot be 0.
+1 1 -1 0
+1 1 1 0
+1 1 -2 0
+1 1 2 0
+1 1 -3 0
+1 1 3 0
+1 1 17 0
+1 1 127 0
+1 1 MIN+1 0
+1 1 MAX 0
+1 1 MIN 0
+1 3 0 ---
+ java.lang.IllegalArgumentException: step cannot be 0.
+1 3 -1 0
+1 3 1 2/1/2
+1 3 -2 0
+1 3 2 1/1/1
+1 3 -3 0
+1 3 3 1/1/1
+1 3 17 1/1/1
+1 3 127 1/1/1
+1 3 MIN+1 0
+1 3 MAX 1/1/1
+1 3 MIN 0
+1 MIN+1 0 ---
+ java.lang.IllegalArgumentException: step cannot be 0.
+1 MIN+1 -1 ---
+ java.lang.IllegalArgumentException: 1 until -2147483647 by -1: seqs cannot contain more than Int.MaxValue elements.
+1 MIN+1 1 0
+1 MIN+1 -2 1073741824/1/MIN+3
+1 MIN+1 2 0
+1 MIN+1 -3 715827883/1/MIN+3
+1 MIN+1 3 0
+1 MIN+1 17 0
+1 MIN+1 127 0
+1 MIN+1 MIN+1 2/1/MIN+2
+1 MIN+1 MAX 0
+1 MIN+1 MIN 1/1/1
+1 MAX 0 ---
+ java.lang.IllegalArgumentException: step cannot be 0.
+1 MAX -1 0
+1 MAX 1 MAX-1/1/MAX-1
+1 MAX -2 0
+1 MAX 2 1073741823/1/MAX-2
+1 MAX -3 0
+1 MAX 3 715827882/1/MAX-3
+1 MAX 17 126322568/1/MAX-7
+1 MAX 127 16909321/1/MAX-6
+1 MAX MIN+1 0
+1 MAX MAX 1/1/1
+1 MAX MIN 0
+1 MIN 0 ---
+ java.lang.IllegalArgumentException: step cannot be 0.
+1 MIN -1 ---
+ java.lang.IllegalArgumentException: 1 until -2147483648 by -1: seqs cannot contain more than Int.MaxValue elements.
+1 MIN 1 0
+1 MIN -2 1073741825/1/MIN+1
+1 MIN 2 0
+1 MIN -3 715827883/1/MIN+3
+1 MIN 3 0
+1 MIN 17 0
+1 MIN 127 0
+1 MIN MIN+1 2/1/MIN+2
+1 MIN MAX 0
+1 MIN MIN 2/1/MIN+1
+
+start end step length/first/last
+-----------------------------------------
+3 0 0 ---
+ java.lang.IllegalArgumentException: step cannot be 0.
+3 0 -1 3/3/1
+3 0 1 0
+3 0 -2 2/3/1
+3 0 2 0
+3 0 -3 1/3/3
+3 0 3 0
+3 0 17 0
+3 0 127 0
+3 0 MIN+1 1/3/3
+3 0 MAX 0
+3 0 MIN 1/3/3
+3 -1 0 ---
+ java.lang.IllegalArgumentException: step cannot be 0.
+3 -1 -1 4/3/0
+3 -1 1 0
+3 -1 -2 2/3/1
+3 -1 2 0
+3 -1 -3 2/3/0
+3 -1 3 0
+3 -1 17 0
+3 -1 127 0
+3 -1 MIN+1 1/3/3
+3 -1 MAX 0
+3 -1 MIN 1/3/3
+3 1 0 ---
+ java.lang.IllegalArgumentException: step cannot be 0.
+3 1 -1 2/3/2
+3 1 1 0
+3 1 -2 1/3/3
+3 1 2 0
+3 1 -3 1/3/3
+3 1 3 0
+3 1 17 0
+3 1 127 0
+3 1 MIN+1 1/3/3
+3 1 MAX 0
+3 1 MIN 1/3/3
+3 3 0 ---
+ java.lang.IllegalArgumentException: step cannot be 0.
+3 3 -1 0
+3 3 1 0
+3 3 -2 0
+3 3 2 0
+3 3 -3 0
+3 3 3 0
+3 3 17 0
+3 3 127 0
+3 3 MIN+1 0
+3 3 MAX 0
+3 3 MIN 0
+3 MIN+1 0 ---
+ java.lang.IllegalArgumentException: step cannot be 0.
+3 MIN+1 -1 ---
+ java.lang.IllegalArgumentException: 3 until -2147483647 by -1: seqs cannot contain more than Int.MaxValue elements.
+3 MIN+1 1 0
+3 MIN+1 -2 1073741825/3/MIN+3
+3 MIN+1 2 0
+3 MIN+1 -3 715827884/3/MIN+2
+3 MIN+1 3 0
+3 MIN+1 17 0
+3 MIN+1 127 0
+3 MIN+1 MIN+1 2/3/MIN+4
+3 MIN+1 MAX 0
+3 MIN+1 MIN 2/3/MIN+3
+3 MAX 0 ---
+ java.lang.IllegalArgumentException: step cannot be 0.
+3 MAX -1 0
+3 MAX 1 MAX-3/3/MAX-1
+3 MAX -2 0
+3 MAX 2 1073741822/3/MAX-2
+3 MAX -3 0
+3 MAX 3 715827882/3/MAX-1
+3 MAX 17 126322568/3/MAX-5
+3 MAX 127 16909321/3/MAX-4
+3 MAX MIN+1 0
+3 MAX MAX 1/3/3
+3 MAX MIN 0
+3 MIN 0 ---
+ java.lang.IllegalArgumentException: step cannot be 0.
+3 MIN -1 ---
+ java.lang.IllegalArgumentException: 3 until -2147483648 by -1: seqs cannot contain more than Int.MaxValue elements.
+3 MIN 1 0
+3 MIN -2 1073741826/3/MIN+1
+3 MIN 2 0
+3 MIN -3 715827884/3/MIN+2
+3 MIN 3 0
+3 MIN 17 0
+3 MIN 127 0
+3 MIN MIN+1 2/3/MIN+4
+3 MIN MAX 0
+3 MIN MIN 2/3/MIN+3
+
+start end step length/first/last
+-----------------------------------------
+MIN+1 0 0 ---
+ java.lang.IllegalArgumentException: step cannot be 0.
+MIN+1 0 -1 0
+MIN+1 0 1 MAX/MIN+1/-1
+MIN+1 0 -2 0
+MIN+1 0 2 1073741824/MIN+1/-1
+MIN+1 0 -3 0
+MIN+1 0 3 715827883/MIN+1/-1
+MIN+1 0 17 126322568/MIN+1/-8
+MIN+1 0 127 16909321/MIN+1/-7
+MIN+1 0 MIN+1 0
+MIN+1 0 MAX 1/MIN+1/MIN+1
+MIN+1 0 MIN 0
+MIN+1 -1 0 ---
+ java.lang.IllegalArgumentException: step cannot be 0.
+MIN+1 -1 -1 0
+MIN+1 -1 1 MAX-1/MIN+1/-2
+MIN+1 -1 -2 0
+MIN+1 -1 2 1073741823/MIN+1/-3
+MIN+1 -1 -3 0
+MIN+1 -1 3 715827882/MIN+1/-4
+MIN+1 -1 17 126322568/MIN+1/-8
+MIN+1 -1 127 16909321/MIN+1/-7
+MIN+1 -1 MIN+1 0
+MIN+1 -1 MAX 1/MIN+1/MIN+1
+MIN+1 -1 MIN 0
+MIN+1 1 0 ---
+ java.lang.IllegalArgumentException: step cannot be 0.
+MIN+1 1 -1 0
+MIN+1 1 1 ---
+ java.lang.IllegalArgumentException: -2147483647 until 1 by 1: seqs cannot contain more than Int.MaxValue elements.
+MIN+1 1 -2 0
+MIN+1 1 2 1073741824/MIN+1/-1
+MIN+1 1 -3 0
+MIN+1 1 3 715827883/MIN+1/-1
+MIN+1 1 17 126322568/MIN+1/-8
+MIN+1 1 127 16909321/MIN+1/-7
+MIN+1 1 MIN+1 0
+MIN+1 1 MAX 2/MIN+1/0
+MIN+1 1 MIN 0
+MIN+1 3 0 ---
+ java.lang.IllegalArgumentException: step cannot be 0.
+MIN+1 3 -1 0
+MIN+1 3 1 ---
+ java.lang.IllegalArgumentException: -2147483647 until 3 by 1: seqs cannot contain more than Int.MaxValue elements.
+MIN+1 3 -2 0
+MIN+1 3 2 1073741825/MIN+1/1
+MIN+1 3 -3 0
+MIN+1 3 3 715827884/MIN+1/2
+MIN+1 3 17 126322568/MIN+1/-8
+MIN+1 3 127 16909321/MIN+1/-7
+MIN+1 3 MIN+1 0
+MIN+1 3 MAX 2/MIN+1/0
+MIN+1 3 MIN 0
+MIN+1 MIN+1 0 ---
+ java.lang.IllegalArgumentException: step cannot be 0.
+MIN+1 MIN+1 -1 0
+MIN+1 MIN+1 1 0
+MIN+1 MIN+1 -2 0
+MIN+1 MIN+1 2 0
+MIN+1 MIN+1 -3 0
+MIN+1 MIN+1 3 0
+MIN+1 MIN+1 17 0
+MIN+1 MIN+1 127 0
+MIN+1 MIN+1 MIN+1 0
+MIN+1 MIN+1 MAX 0
+MIN+1 MIN+1 MIN 0
+MIN+1 MAX 0 ---
+ java.lang.IllegalArgumentException: step cannot be 0.
+MIN+1 MAX -1 0
+MIN+1 MAX 1 ---
+ java.lang.IllegalArgumentException: -2147483647 until 2147483647 by 1: seqs cannot contain more than Int.MaxValue elements.
+MIN+1 MAX -2 0
+MIN+1 MAX 2 MAX/MIN+1/MAX-2
+MIN+1 MAX -3 0
+MIN+1 MAX 3 1431655765/MIN+1/MAX-2
+MIN+1 MAX 17 252645135/MIN+1/MAX-16
+MIN+1 MAX 127 33818641/MIN+1/MAX-14
+MIN+1 MAX MIN+1 0
+MIN+1 MAX MAX 2/MIN+1/0
+MIN+1 MAX MIN 0
+MIN+1 MIN 0 ---
+ java.lang.IllegalArgumentException: step cannot be 0.
+MIN+1 MIN -1 1/MIN+1/MIN+1
+MIN+1 MIN 1 0
+MIN+1 MIN -2 1/MIN+1/MIN+1
+MIN+1 MIN 2 0
+MIN+1 MIN -3 1/MIN+1/MIN+1
+MIN+1 MIN 3 0
+MIN+1 MIN 17 0
+MIN+1 MIN 127 0
+MIN+1 MIN MIN+1 1/MIN+1/MIN+1
+MIN+1 MIN MAX 0
+MIN+1 MIN MIN 1/MIN+1/MIN+1
+
+start end step length/first/last
+-----------------------------------------
+MAX 0 0 ---
+ java.lang.IllegalArgumentException: step cannot be 0.
+MAX 0 -1 MAX/MAX/1
+MAX 0 1 0
+MAX 0 -2 1073741824/MAX/1
+MAX 0 2 0
+MAX 0 -3 715827883/MAX/1
+MAX 0 3 0
+MAX 0 17 0
+MAX 0 127 0
+MAX 0 MIN+1 1/MAX/MAX
+MAX 0 MAX 0
+MAX 0 MIN 1/MAX/MAX
+MAX -1 0 ---
+ java.lang.IllegalArgumentException: step cannot be 0.
+MAX -1 -1 ---
+ java.lang.IllegalArgumentException: 2147483647 until -1 by -1: seqs cannot contain more than Int.MaxValue elements.
+MAX -1 1 0
+MAX -1 -2 1073741824/MAX/1
+MAX -1 2 0
+MAX -1 -3 715827883/MAX/1
+MAX -1 3 0
+MAX -1 17 0
+MAX -1 127 0
+MAX -1 MIN+1 2/MAX/0
+MAX -1 MAX 0
+MAX -1 MIN 1/MAX/MAX
+MAX 1 0 ---
+ java.lang.IllegalArgumentException: step cannot be 0.
+MAX 1 -1 MAX-1/MAX/2
+MAX 1 1 0
+MAX 1 -2 1073741823/MAX/3
+MAX 1 2 0
+MAX 1 -3 715827882/MAX/4
+MAX 1 3 0
+MAX 1 17 0
+MAX 1 127 0
+MAX 1 MIN+1 1/MAX/MAX
+MAX 1 MAX 0
+MAX 1 MIN 1/MAX/MAX
+MAX 3 0 ---
+ java.lang.IllegalArgumentException: step cannot be 0.
+MAX 3 -1 MAX-3/MAX/4
+MAX 3 1 0
+MAX 3 -2 1073741822/MAX/5
+MAX 3 2 0
+MAX 3 -3 715827882/MAX/4
+MAX 3 3 0
+MAX 3 17 0
+MAX 3 127 0
+MAX 3 MIN+1 1/MAX/MAX
+MAX 3 MAX 0
+MAX 3 MIN 1/MAX/MAX
+MAX MIN+1 0 ---
+ java.lang.IllegalArgumentException: step cannot be 0.
+MAX MIN+1 -1 ---
+ java.lang.IllegalArgumentException: 2147483647 until -2147483647 by -1: seqs cannot contain more than Int.MaxValue elements.
+MAX MIN+1 1 0
+MAX MIN+1 -2 MAX/MAX/MIN+3
+MAX MIN+1 2 0
+MAX MIN+1 -3 1431655765/MAX/MIN+3
+MAX MIN+1 3 0
+MAX MIN+1 17 0
+MAX MIN+1 127 0
+MAX MIN+1 MIN+1 2/MAX/0
+MAX MIN+1 MAX 0
+MAX MIN+1 MIN 2/MAX/-1
+MAX MAX 0 ---
+ java.lang.IllegalArgumentException: step cannot be 0.
+MAX MAX -1 0
+MAX MAX 1 0
+MAX MAX -2 0
+MAX MAX 2 0
+MAX MAX -3 0
+MAX MAX 3 0
+MAX MAX 17 0
+MAX MAX 127 0
+MAX MAX MIN+1 0
+MAX MAX MAX 0
+MAX MAX MIN 0
+MAX MIN 0 ---
+ java.lang.IllegalArgumentException: step cannot be 0.
+MAX MIN -1 ---
+ java.lang.IllegalArgumentException: 2147483647 until -2147483648 by -1: seqs cannot contain more than Int.MaxValue elements.
+MAX MIN 1 0
+MAX MIN -2 ---
+ java.lang.IllegalArgumentException: 2147483647 until -2147483648 by -2: seqs cannot contain more than Int.MaxValue elements.
+MAX MIN 2 0
+MAX MIN -3 1431655765/MAX/MIN+3
+MAX MIN 3 0
+MAX MIN 17 0
+MAX MIN 127 0
+MAX MIN MIN+1 3/MAX/MIN+1
+MAX MIN MAX 0
+MAX MIN MIN 2/MAX/-1
+
+start end step length/first/last
+-----------------------------------------
+MIN 0 0 ---
+ java.lang.IllegalArgumentException: step cannot be 0.
+MIN 0 -1 0
+MIN 0 1 ---
+ java.lang.IllegalArgumentException: -2147483648 until 0 by 1: seqs cannot contain more than Int.MaxValue elements.
+MIN 0 -2 0
+MIN 0 2 1073741824/MIN/-2
+MIN 0 -3 0
+MIN 0 3 715827883/MIN/-2
+MIN 0 17 126322568/MIN/-9
+MIN 0 127 16909321/MIN/-8
+MIN 0 MIN+1 0
+MIN 0 MAX 2/MIN/-1
+MIN 0 MIN 0
+MIN -1 0 ---
+ java.lang.IllegalArgumentException: step cannot be 0.
+MIN -1 -1 0
+MIN -1 1 MAX/MIN/-2
+MIN -1 -2 0
+MIN -1 2 1073741824/MIN/-2
+MIN -1 -3 0
+MIN -1 3 715827883/MIN/-2
+MIN -1 17 126322568/MIN/-9
+MIN -1 127 16909321/MIN/-8
+MIN -1 MIN+1 0
+MIN -1 MAX 1/MIN/MIN
+MIN -1 MIN 0
+MIN 1 0 ---
+ java.lang.IllegalArgumentException: step cannot be 0.
+MIN 1 -1 0
+MIN 1 1 ---
+ java.lang.IllegalArgumentException: -2147483648 until 1 by 1: seqs cannot contain more than Int.MaxValue elements.
+MIN 1 -2 0
+MIN 1 2 1073741825/MIN/0
+MIN 1 -3 0
+MIN 1 3 715827883/MIN/-2
+MIN 1 17 126322568/MIN/-9
+MIN 1 127 16909321/MIN/-8
+MIN 1 MIN+1 0
+MIN 1 MAX 2/MIN/-1
+MIN 1 MIN 0
+MIN 3 0 ---
+ java.lang.IllegalArgumentException: step cannot be 0.
+MIN 3 -1 0
+MIN 3 1 ---
+ java.lang.IllegalArgumentException: -2147483648 until 3 by 1: seqs cannot contain more than Int.MaxValue elements.
+MIN 3 -2 0
+MIN 3 2 1073741826/MIN/2
+MIN 3 -3 0
+MIN 3 3 715827884/MIN/1
+MIN 3 17 126322568/MIN/-9
+MIN 3 127 16909321/MIN/-8
+MIN 3 MIN+1 0
+MIN 3 MAX 2/MIN/-1
+MIN 3 MIN 0
+MIN MIN+1 0 ---
+ java.lang.IllegalArgumentException: step cannot be 0.
+MIN MIN+1 -1 0
+MIN MIN+1 1 1/MIN/MIN
+MIN MIN+1 -2 0
+MIN MIN+1 2 1/MIN/MIN
+MIN MIN+1 -3 0
+MIN MIN+1 3 1/MIN/MIN
+MIN MIN+1 17 1/MIN/MIN
+MIN MIN+1 127 1/MIN/MIN
+MIN MIN+1 MIN+1 0
+MIN MIN+1 MAX 1/MIN/MIN
+MIN MIN+1 MIN 0
+MIN MAX 0 ---
+ java.lang.IllegalArgumentException: step cannot be 0.
+MIN MAX -1 0
+MIN MAX 1 ---
+ java.lang.IllegalArgumentException: -2147483648 until 2147483647 by 1: seqs cannot contain more than Int.MaxValue elements.
+MIN MAX -2 0
+MIN MAX 2 ---
+ java.lang.IllegalArgumentException: -2147483648 until 2147483647 by 2: seqs cannot contain more than Int.MaxValue elements.
+MIN MAX -3 0
+MIN MAX 3 1431655765/MIN/MAX-3
+MIN MAX 17 252645135/MIN/MAX-17
+MIN MAX 127 33818641/MIN/MAX-15
+MIN MAX MIN+1 0
+MIN MAX MAX 3/MIN/MAX-1
+MIN MAX MIN 0
+MIN MIN 0 ---
+ java.lang.IllegalArgumentException: step cannot be 0.
+MIN MIN -1 0
+MIN MIN 1 0
+MIN MIN -2 0
+MIN MIN 2 0
+MIN MIN -3 0
+MIN MIN 3 0
+MIN MIN 17 0
+MIN MIN 127 0
+MIN MIN MIN+1 0
+MIN MIN MAX 0
+MIN MIN MIN 0
+
diff --git a/test/files/run/range-unit.scala b/test/files/run/range-unit.scala
new file mode 100644
index 0000000..ece0d98
--- /dev/null
+++ b/test/files/run/range-unit.scala
@@ -0,0 +1,55 @@
+import scala.collection.immutable.Range
+
+object Test {
+ // ha ha, I always forget math.abs(Int.MinValue) == Int.MinValue
+ val numbers = (
+ ( (-3 to 3) ++ List(17, 127, Int.MaxValue, Int.MinValue + 1)
+ ).distinct.sortBy(n => (math.abs(n), n))
+ ) :+ Int.MinValue
+
+ // reducing output a little
+ val endpoints = numbers filterNot Set(-3, -2, 2, 17, 127)
+
+ def num(n: Int) = {
+ val frommax = Int.MaxValue - n
+ val frommin = Int.MinValue - n
+
+ if (n > 0) {
+ if (frommax == 0) "MAX"
+ else if (frommax < 1000) "MAX-" + frommax
+ else "" + n
+ }
+ else {
+ if (frommin == 0) "MIN"
+ else if (frommin > -1000) "MIN+" + (-frommin)
+ else "" + n
+ }
+ }
+
+ def run[T](body: => Range): List[Any] = {
+ try { val r = body ; if (r.isEmpty) List(r.length) else List(num(r.length), num(r.head), num(r.last)) }
+ catch { case e: IllegalArgumentException => List("---\n " + e) }
+ }
+
+ def runGroup(label: String, f: (Int, Int, Int) => Range) {
+ println(">>> " + label + " <<<\n")
+ for (start <- endpoints) {
+ val s = "%-7s %-7s %-7s %s".format("start", "end", "step", "length/first/last")
+ println(s + "\n" + ("-" * s.length))
+ for (end <- endpoints ; step <- numbers) {
+ print("%-7s %-7s %-7s ".format(num(start), num(end), num(step)))
+ println(run(f(start, end, step)).mkString("/"))
+ }
+ println("")
+ }
+ }
+
+ def main(args: Array[String]): Unit = {
+ runGroup("Range.inclusive", Range.inclusive(_, _, _))
+ runGroup("Range.apply", Range.apply(_, _, _))
+ runGroup("start to end", (x, y, _) => x to y)
+ runGroup("start to end by step", _ to _ by _)
+ runGroup("start until end", (x, y, _) => x until y)
+ runGroup("start until end by step", _ until _ by _)
+ }
+}
diff --git a/test/files/run/rawstrings.check b/test/files/run/rawstrings.check
new file mode 100644
index 0000000..2b6c407
--- /dev/null
+++ b/test/files/run/rawstrings.check
@@ -0,0 +1 @@
+[\n\t'"$\n]
diff --git a/test/files/run/rawstrings.scala b/test/files/run/rawstrings.scala
new file mode 100644
index 0000000..b4d6e0c
--- /dev/null
+++ b/test/files/run/rawstrings.scala
@@ -0,0 +1,3 @@
+object Test extends App {
+ println(raw"[\n\t'${'"'}$$\n]")
+}
diff --git a/test/files/run/reflection-allmirrors-tostring.check b/test/files/run/reflection-allmirrors-tostring.check
new file mode 100644
index 0000000..2a3be29
--- /dev/null
+++ b/test/files/run/reflection-allmirrors-tostring.check
@@ -0,0 +1,14 @@
+class mirror for C (bound to null)
+module mirror for M (bound to null)
+instance mirror for an instance of C
+field mirror for C.f1 (bound to an instance of C)
+field mirror for C.f2 (bound to an instance of C)
+method mirror for C.m1: Int (bound to an instance of C)
+method mirror for C.m2(): Int (bound to an instance of C)
+method mirror for C.m3[T >: String <: Int]: T (bound to an instance of C)
+method mirror for C.m4[A, B <: A[Int]](x: A[B])(implicit y: Int): Nothing (bound to an instance of C)
+method mirror for C.m5(x: => Int, y: Int*): String (bound to an instance of C)
+class mirror for C.C (bound to an instance of C)
+module mirror for C.M (bound to an instance of C)
+constructor mirror for C.<init>(): C (bound to null)
+constructor mirror for C.C.<init>(): C.this.C (bound to an instance of C)
diff --git a/test/files/run/reflection-allmirrors-tostring.scala b/test/files/run/reflection-allmirrors-tostring.scala
new file mode 100644
index 0000000..73afff2
--- /dev/null
+++ b/test/files/run/reflection-allmirrors-tostring.scala
@@ -0,0 +1,43 @@
+import scala.reflect.runtime.universe._
+
+class C {
+ val f1 = 2
+ var f2 = 3
+
+ def m1 = 4
+ def m2() = 5
+ def m3[T >: String <: Int]: T = ???
+ def m4[A[_], B <: A[Int]](x: A[B])(implicit y: Int) = ???
+ def m5(x: => Int, y: Int*): String = ???
+
+ class C
+ object M
+
+ override def toString = "an instance of C"
+}
+object M
+
+object Test extends App {
+ val cm = scala.reflect.runtime.currentMirror
+// println(cm)
+
+ println(cm.reflectClass(cm.staticClass("C")))
+ println(cm.reflectModule(cm.staticModule("M")))
+ println(cm.reflect(new C))
+
+ val im = cm.reflect(new C)
+ println(im.reflectField(typeOf[C].member(newTermName("f1")).asTerm))
+ println(im.reflectField(typeOf[C].member(newTermName("f2")).asTerm))
+ println(im.reflectMethod(typeOf[C].member(newTermName("m1")).asMethod))
+ println(im.reflectMethod(typeOf[C].member(newTermName("m2")).asMethod))
+ println(im.reflectMethod(typeOf[C].member(newTermName("m3")).asMethod))
+ println(im.reflectMethod(typeOf[C].member(newTermName("m4")).asMethod))
+ println(im.reflectMethod(typeOf[C].member(newTermName("m5")).asMethod))
+ println(im.reflectClass(typeOf[C].member(newTypeName("C")).asClass))
+ println(im.reflectModule(typeOf[C].member(newTermName("M")).asModule))
+
+ val c = cm.staticClass("C")
+ val cc = typeOf[C].member(newTypeName("C")).asClass
+ println(cm.reflectClass(c).reflectConstructor(c.typeSignature.member(nme.CONSTRUCTOR).asMethod))
+ println(im.reflectClass(cc).reflectConstructor(cc.typeSignature.member(nme.CONSTRUCTOR).asMethod))
+}
\ No newline at end of file
diff --git a/test/files/run/reflection-constructormirror-inner-badpath.check b/test/files/run/reflection-constructormirror-inner-badpath.check
new file mode 100644
index 0000000..83852aa
--- /dev/null
+++ b/test/files/run/reflection-constructormirror-inner-badpath.check
@@ -0,0 +1,2 @@
+class R is an inner class, use reflectClass on an InstanceMirror to obtain its ClassMirror
+()
diff --git a/test/files/run/reflection-constructormirror-inner-badpath.scala b/test/files/run/reflection-constructormirror-inner-badpath.scala
new file mode 100644
index 0000000..4bccff2
--- /dev/null
+++ b/test/files/run/reflection-constructormirror-inner-badpath.scala
@@ -0,0 +1,32 @@
+import scala.reflect.runtime.universe._
+import scala.reflect.ClassTag
+
+class Foo{
+ case class R(
+ sales : Int,
+ name : String
+ )
+
+ def foo = {
+ val expectedType = implicitly[TypeTag[R]]
+ val classTag = implicitly[ClassTag[R]]
+ val cl = classTag.runtimeClass.getClassLoader
+ val cm = runtimeMirror(cl)
+ val constructor = expectedType.tpe.member( nme.CONSTRUCTOR ).asMethod
+ val sig = constructor.typeSignature
+ val sym = cm.classSymbol( classTag.runtimeClass )
+ try {
+ val cls = cm.reflectClass( sym )
+ cls.reflectConstructor( constructor )( 5,"test" ).asInstanceOf[R]
+ println("this indicates a failure")
+ } catch {
+ case ex: Throwable =>
+ println(ex.getMessage)
+ }
+ }
+
+}
+object Test extends App{
+ val foo = new Foo
+ println( foo.foo )
+}
\ No newline at end of file
diff --git a/test/files/run/reflection-constructormirror-inner-good.check b/test/files/run/reflection-constructormirror-inner-good.check
new file mode 100644
index 0000000..d38fb33
--- /dev/null
+++ b/test/files/run/reflection-constructormirror-inner-good.check
@@ -0,0 +1 @@
+R(5,test)
diff --git a/test/files/run/reflection-constructormirror-inner-good.scala b/test/files/run/reflection-constructormirror-inner-good.scala
new file mode 100644
index 0000000..8613321
--- /dev/null
+++ b/test/files/run/reflection-constructormirror-inner-good.scala
@@ -0,0 +1,26 @@
+import scala.reflect.runtime.universe._
+import scala.reflect.ClassTag
+
+class Foo{
+ case class R(
+ sales : Int,
+ name : String
+ )
+
+ def foo = {
+ val expectedType = implicitly[TypeTag[R]]
+ val classTag = implicitly[ClassTag[R]]
+ val cl = classTag.runtimeClass.getClassLoader
+ val cm = runtimeMirror(cl)
+ val constructor = expectedType.tpe.member( nme.CONSTRUCTOR ).asMethod
+ val sig = constructor.typeSignature
+ val sym = cm.classSymbol( classTag.runtimeClass )
+ val cls = cm.reflect( this ).reflectClass( sym )
+ cls.reflectConstructor( constructor )( 5,"test" ).asInstanceOf[R]
+ }
+
+}
+object Test extends App{
+ val foo = new Foo
+ println( foo.foo )
+}
\ No newline at end of file
diff --git a/test/files/run/reflection-constructormirror-nested-badpath.check b/test/files/run/reflection-constructormirror-nested-badpath.check
new file mode 100644
index 0000000..4c65b8a
--- /dev/null
+++ b/test/files/run/reflection-constructormirror-nested-badpath.check
@@ -0,0 +1,2 @@
+class R is a static class, use reflectClass on a RuntimeMirror to obtain its ClassMirror
+()
diff --git a/test/files/run/reflection-constructormirror-nested-badpath.scala b/test/files/run/reflection-constructormirror-nested-badpath.scala
new file mode 100644
index 0000000..2983f18
--- /dev/null
+++ b/test/files/run/reflection-constructormirror-nested-badpath.scala
@@ -0,0 +1,32 @@
+import scala.reflect.runtime.universe._
+import scala.reflect.ClassTag
+
+class Foo{
+ import Test._
+ def foo = {
+ val expectedType = implicitly[TypeTag[R]]
+ val classTag = implicitly[ClassTag[R]]
+ val cl = classTag.runtimeClass.getClassLoader
+ val cm = runtimeMirror(cl)
+ val constructor = expectedType.tpe.member( nme.CONSTRUCTOR ).asMethod
+ val sig = constructor.typeSignature
+ val sym = cm.classSymbol( classTag.runtimeClass )
+ try {
+ val cls = cm.reflect( this ).reflectClass( sym )
+ cls.reflectConstructor( constructor )( 5,"test" ).asInstanceOf[R]
+ println("this indicates a failure")
+ } catch {
+ case ex: Throwable =>
+ println(ex.getMessage)
+ }
+ }
+
+}
+object Test extends App{
+ case class R(
+ sales : Int,
+ name : String
+ )
+ val foo = new Foo
+ println( foo.foo )
+}
\ No newline at end of file
diff --git a/test/files/run/reflection-constructormirror-nested-good.check b/test/files/run/reflection-constructormirror-nested-good.check
new file mode 100644
index 0000000..d38fb33
--- /dev/null
+++ b/test/files/run/reflection-constructormirror-nested-good.check
@@ -0,0 +1 @@
+R(5,test)
diff --git a/test/files/run/reflection-constructormirror-nested-good.scala b/test/files/run/reflection-constructormirror-nested-good.scala
new file mode 100644
index 0000000..0b7c413
--- /dev/null
+++ b/test/files/run/reflection-constructormirror-nested-good.scala
@@ -0,0 +1,26 @@
+import scala.reflect.runtime.universe._
+import scala.reflect.ClassTag
+
+class Foo{
+ import Test._
+ def foo = {
+ val expectedType = implicitly[TypeTag[R]]
+ val classTag = implicitly[ClassTag[R]]
+ val cl = classTag.runtimeClass.getClassLoader
+ val cm = runtimeMirror(cl)
+ val constructor = expectedType.tpe.member( nme.CONSTRUCTOR ).asMethod
+ val sig = constructor.typeSignature
+ val sym = cm.classSymbol( classTag.runtimeClass )
+ val cls = cm.reflectClass( sym )
+ cls.reflectConstructor( constructor )( 5,"test" ).asInstanceOf[R]
+ }
+}
+
+object Test extends App{
+ case class R(
+ sales : Int,
+ name : String
+ )
+ val foo = new Foo
+ println( foo.foo )
+}
\ No newline at end of file
diff --git a/test/files/run/reflection-constructormirror-toplevel-badpath.check b/test/files/run/reflection-constructormirror-toplevel-badpath.check
new file mode 100644
index 0000000..4c65b8a
--- /dev/null
+++ b/test/files/run/reflection-constructormirror-toplevel-badpath.check
@@ -0,0 +1,2 @@
+class R is a static class, use reflectClass on a RuntimeMirror to obtain its ClassMirror
+()
diff --git a/test/files/run/reflection-constructormirror-toplevel-badpath.scala b/test/files/run/reflection-constructormirror-toplevel-badpath.scala
new file mode 100644
index 0000000..cf92929
--- /dev/null
+++ b/test/files/run/reflection-constructormirror-toplevel-badpath.scala
@@ -0,0 +1,33 @@
+import scala.reflect.runtime.universe._
+import scala.reflect.ClassTag
+
+case class R(
+ sales : Int,
+ name : String
+)
+
+class Foo{
+ import Test._
+ def foo = {
+ val expectedType = implicitly[TypeTag[R]]
+ val classTag = implicitly[ClassTag[R]]
+ val cl = classTag.runtimeClass.getClassLoader
+ val cm = runtimeMirror(cl)
+ val constructor = expectedType.tpe.member( nme.CONSTRUCTOR ).asMethod
+ val sig = constructor.typeSignature
+ val sym = cm.classSymbol( classTag.runtimeClass )
+ try {
+ val cls = cm.reflect( this ).reflectClass( sym )
+ cls.reflectConstructor( constructor )( 5,"test" ).asInstanceOf[R]
+ println("this indicates a failure")
+ } catch {
+ case ex: Throwable =>
+ println(ex.getMessage)
+ }
+ }
+}
+
+object Test extends App{
+ val foo = new Foo
+ println( foo.foo )
+}
\ No newline at end of file
diff --git a/test/files/run/reflection-constructormirror-toplevel-good.check b/test/files/run/reflection-constructormirror-toplevel-good.check
new file mode 100644
index 0000000..d38fb33
--- /dev/null
+++ b/test/files/run/reflection-constructormirror-toplevel-good.check
@@ -0,0 +1 @@
+R(5,test)
diff --git a/test/files/run/reflection-constructormirror-toplevel-good.scala b/test/files/run/reflection-constructormirror-toplevel-good.scala
new file mode 100644
index 0000000..b68134b
--- /dev/null
+++ b/test/files/run/reflection-constructormirror-toplevel-good.scala
@@ -0,0 +1,27 @@
+import scala.reflect.runtime.universe._
+import scala.reflect.ClassTag
+
+case class R(
+ sales : Int,
+ name : String
+)
+
+class Foo{
+ import Test._
+ def foo = {
+ val expectedType = implicitly[TypeTag[R]]
+ val classTag = implicitly[ClassTag[R]]
+ val cl = classTag.runtimeClass.getClassLoader
+ val cm = runtimeMirror(cl)
+ val constructor = expectedType.tpe.member( nme.CONSTRUCTOR ).asMethod
+ val sig = constructor.typeSignature
+ val sym = cm.classSymbol( classTag.runtimeClass )
+ val cls = cm.reflectClass( sym )
+ cls.reflectConstructor( constructor )( 5,"test" ).asInstanceOf[R]
+ }
+}
+
+object Test extends App{
+ val foo = new Foo
+ println( foo.foo )
+}
\ No newline at end of file
diff --git a/test/files/run/reflection-enclosed-basic.check b/test/files/run/reflection-enclosed-basic.check
new file mode 100644
index 0000000..6210b42
--- /dev/null
+++ b/test/files/run/reflection-enclosed-basic.check
@@ -0,0 +1,18 @@
+class B1
+B1
+1
+class B2
+B2
+2
+object B3
+B3
+3
+object B4
+B4
+4
+object B5
+B5
+5
+object B6
+B6
+6
diff --git a/test/files/run/reflection-enclosed-basic.scala b/test/files/run/reflection-enclosed-basic.scala
new file mode 100644
index 0000000..1dcb6c2
--- /dev/null
+++ b/test/files/run/reflection-enclosed-basic.scala
@@ -0,0 +1,46 @@
+import scala.reflect.runtime.universe._
+import scala.reflect.runtime.{currentMirror => cm}
+import scala.reflect.{classTag, ClassTag}
+
+class B1 { override def toString = "B1"; def foo = 1 }
+private class B2 { override def toString = "B2"; def foo = 2 }
+object B3 { override def toString = "B3"; def foo = 3 }
+private object B4 { override def toString = "B4"; def foo = 4 }
+object B5 extends B1 { override def toString = "B5"; override def foo = 5 }
+private object B6 extends B2 { override def toString = "B6"; override def foo = 6 }
+
+object Test extends App {
+ def testMethodInvocation(instance: Any) = {
+ val instanceMirror = cm.reflect(instance)
+ val method = instanceMirror.symbol.typeSignature.declaration(newTermName("foo")).asMethod
+ val methodMirror = instanceMirror.reflectMethod(method)
+ println(methodMirror())
+ }
+
+ def testNestedClass(name: String) = {
+ val sym = cm.staticClass(name)
+ println(sym)
+ val ctor = sym.typeSignature.declaration(newTermName("<init>")).asMethod
+ val ctorMirror = cm.reflectClass(sym).reflectConstructor(ctor)
+ val instance = ctorMirror()
+ println(instance)
+ testMethodInvocation(instance)
+ }
+
+ testNestedClass("B1")
+ testNestedClass("B2")
+
+ def testNestedModule(name: String) = {
+ val sym = cm.staticModule(name)
+ println(sym)
+ val moduleMirror = cm.reflectModule(sym)
+ val instance = moduleMirror.instance
+ println(instance)
+ testMethodInvocation(instance)
+ }
+
+ testNestedModule("B3")
+ testNestedModule("B4")
+ testNestedModule("B5")
+ testNestedModule("B6")
+}
diff --git a/test/files/run/reflection-enclosed-inner-basic.check b/test/files/run/reflection-enclosed-inner-basic.check
new file mode 100644
index 0000000..2496ee4
--- /dev/null
+++ b/test/files/run/reflection-enclosed-inner-basic.check
@@ -0,0 +1,20 @@
+class B
+List(constructor B, class B1, class B2, object B3, object B4, object B5, object B6)
+class B1
+B1
+1
+class B2
+B2
+2
+object B3
+B3
+3
+object B4
+B4
+4
+object B5
+B5
+5
+object B6
+B6
+6
diff --git a/test/files/run/reflection-enclosed-inner-basic.scala b/test/files/run/reflection-enclosed-inner-basic.scala
new file mode 100644
index 0000000..2b2c701
--- /dev/null
+++ b/test/files/run/reflection-enclosed-inner-basic.scala
@@ -0,0 +1,52 @@
+import scala.reflect.runtime.universe._
+import scala.reflect.runtime.{currentMirror => cm}
+import scala.reflect.{classTag, ClassTag}
+
+class B {
+ class B1 { override def toString = "B1"; def foo = 1 }
+ private class B2 { override def toString = "B2"; def foo = 2 }
+ object B3 { override def toString = "B3"; def foo = 3 }
+ private object B4 { override def toString = "B4"; def foo = 4 }
+ object B5 extends B1 { override def toString = "B5"; override def foo = 5 }
+ private object B6 extends B2 { override def toString = "B6"; override def foo = 6 }
+}
+
+object Test extends App {
+ val b = cm.classSymbol(classTag[B].runtimeClass)
+ println(b)
+ println(b.typeSignature.declarations.toList)
+
+ def testMethodInvocation(instance: Any) = {
+ val instanceMirror = cm.reflect(instance)
+ val method = instanceMirror.symbol.typeSignature.declaration(newTermName("foo")).asMethod
+ val methodMirror = instanceMirror.reflectMethod(method)
+ println(methodMirror())
+ }
+
+ def testInnerClass(name: String) = {
+ val sym = b.typeSignature.declaration(newTypeName(name)).asClass
+ println(sym)
+ val ctor = sym.typeSignature.declaration(newTermName("<init>")).asMethod
+ val ctorMirror = cm.reflect(new B).reflectClass(sym).reflectConstructor(ctor)
+ val instance = ctorMirror()
+ println(instance)
+ testMethodInvocation(instance)
+ }
+
+ testInnerClass("B1")
+ testInnerClass("B2")
+
+ def testInnerModule(name: String) = {
+ val sym = b.typeSignature.declaration(newTermName(name)).asModule
+ println(sym)
+ val moduleMirror = cm.reflect(new B).reflectModule(sym)
+ val instance = moduleMirror.instance
+ println(instance)
+ testMethodInvocation(instance)
+ }
+
+ testInnerModule("B3")
+ testInnerModule("B4")
+ testInnerModule("B5")
+ testInnerModule("B6")
+}
diff --git a/test/files/run/reflection-enclosed-inner-inner-basic.check b/test/files/run/reflection-enclosed-inner-inner-basic.check
new file mode 100644
index 0000000..add7a81
--- /dev/null
+++ b/test/files/run/reflection-enclosed-inner-inner-basic.check
@@ -0,0 +1,20 @@
+class BB
+List(constructor BB, class B1, class B2, object B3, object B4, object B5, object B6)
+class B1
+B1
+1
+class B2
+B2
+2
+object B3
+B3
+3
+object B4
+B4
+4
+object B5
+B5
+5
+object B6
+B6
+6
diff --git a/test/files/run/reflection-enclosed-inner-inner-basic.scala b/test/files/run/reflection-enclosed-inner-inner-basic.scala
new file mode 100644
index 0000000..1b9e19d
--- /dev/null
+++ b/test/files/run/reflection-enclosed-inner-inner-basic.scala
@@ -0,0 +1,58 @@
+import scala.reflect.runtime.universe._
+import scala.reflect.runtime.{currentMirror => cm}
+import scala.reflect.{classTag, ClassTag}
+
+class B {
+ class BB {
+ class B1 { override def toString = "B1"; def foo = 1 }
+ private class B2 { override def toString = "B2"; def foo = 2 }
+ object B3 { override def toString = "B3"; def foo = 3 }
+ private object B4 { override def toString = "B4"; def foo = 4 }
+ object B5 extends B1 { override def toString = "B5"; override def foo = 5 }
+ private object B6 extends B2 { override def toString = "B6"; override def foo = 6 }
+ }
+}
+
+object Test extends App {
+ val b = cm.classSymbol(classTag[B#BB].runtimeClass)
+ println(b)
+ println(b.typeSignature.declarations.toList)
+
+ def testMethodInvocation(instance: Any) = {
+ val instanceMirror = cm.reflect(instance)
+ val method = instanceMirror.symbol.typeSignature.declaration(newTermName("foo")).asMethod
+ val methodMirror = instanceMirror.reflectMethod(method)
+ println(methodMirror())
+ }
+
+ def testInnerClass(name: String) = {
+ val sym = b.typeSignature.declaration(newTypeName(name)).asClass
+ println(sym)
+ val ctor = sym.typeSignature.declaration(newTermName("<init>")).asMethod
+ val outer1 = new B
+ val outer2 = new outer1.BB
+ val ctorMirror = cm.reflect(outer2).reflectClass(sym).reflectConstructor(ctor)
+ val instance = ctorMirror()
+ println(instance)
+ testMethodInvocation(instance)
+ }
+
+ testInnerClass("B1")
+ testInnerClass("B2")
+
+ def testInnerModule(name: String) = {
+ val sym = b.typeSignature.declaration(newTermName(name)).asModule
+ println(sym)
+ val outer1 = new B
+ val outer2 = new outer1.BB
+ val moduleMirror = cm.reflect(outer2).reflectModule(sym)
+ val instance = moduleMirror.instance
+ println(instance)
+ testMethodInvocation(instance)
+ }
+
+ testInnerModule("B3")
+ testInnerModule("B4")
+ testInnerModule("B5")
+ testInnerModule("B6")
+}
diff --git a/test/files/run/reflection-enclosed-inner-nested-basic.check b/test/files/run/reflection-enclosed-inner-nested-basic.check
new file mode 100644
index 0000000..0f5176a
--- /dev/null
+++ b/test/files/run/reflection-enclosed-inner-nested-basic.check
@@ -0,0 +1,20 @@
+object BB
+List(constructor BB, class B1, class B2, object B3, object B4, object B5, object B6)
+class B1
+B1
+1
+class B2
+B2
+2
+object B3
+B3
+3
+object B4
+B4
+4
+object B5
+B5
+5
+object B6
+B6
+6
diff --git a/test/files/run/reflection-enclosed-inner-nested-basic.scala b/test/files/run/reflection-enclosed-inner-nested-basic.scala
new file mode 100644
index 0000000..2800ee2
--- /dev/null
+++ b/test/files/run/reflection-enclosed-inner-nested-basic.scala
@@ -0,0 +1,55 @@
+import scala.reflect.runtime.universe._
+import scala.reflect.runtime.{currentMirror => cm}
+import scala.reflect.{classTag, ClassTag}
+
+class B {
+ object BB {
+ class B1 { override def toString = "B1"; def foo = 1 }
+ private class B2 { override def toString = "B2"; def foo = 2 }
+ object B3 { override def toString = "B3"; def foo = 3 }
+ private object B4 { override def toString = "B4"; def foo = 4 }
+ object B5 extends B1 { override def toString = "B5"; override def foo = 5 }
+ private object B6 extends B2 { override def toString = "B6"; override def foo = 6 }
+ }
+}
+
+object Test extends App {
+ val outer1 = new B()
+ val b = cm.moduleSymbol(classTag[outer1.BB.type].runtimeClass)
+ println(b)
+ println(b.typeSignature.declarations.toList)
+
+ def testMethodInvocation(instance: Any) = {
+ val instanceMirror = cm.reflect(instance)
+ val method = instanceMirror.symbol.typeSignature.declaration(newTermName("foo")).asMethod
+ val methodMirror = instanceMirror.reflectMethod(method)
+ println(methodMirror())
+ }
+
+ def testNestedClass(name: String) = {
+ val sym = b.typeSignature.declaration(newTypeName(name)).asClass
+ println(sym)
+ val ctor = sym.typeSignature.declaration(newTermName("<init>")).asMethod
+ val ctorMirror = cm.reflect(outer1.BB).reflectClass(sym).reflectConstructor(ctor)
+ val instance = ctorMirror()
+ println(instance)
+ testMethodInvocation(instance)
+ }
+
+ testNestedClass("B1")
+ testNestedClass("B2")
+
+ def testNestedModule(name: String) = {
+ val sym = b.typeSignature.declaration(newTermName(name)).asModule
+ println(sym)
+ val moduleMirror = cm.reflect(outer1.BB).reflectModule(sym)
+ val instance = moduleMirror.instance
+ println(instance)
+ testMethodInvocation(instance)
+ }
+
+ testNestedModule("B3")
+ testNestedModule("B4")
+ testNestedModule("B5")
+ testNestedModule("B6")
+}
diff --git a/test/files/run/reflection-enclosed-nested-basic.check b/test/files/run/reflection-enclosed-nested-basic.check
new file mode 100644
index 0000000..b0e6114
--- /dev/null
+++ b/test/files/run/reflection-enclosed-nested-basic.check
@@ -0,0 +1,20 @@
+object B
+List(constructor B, class B1, class B2, object B3, object B4, object B5, object B6)
+class B1
+B1
+1
+class B2
+B2
+2
+object B3
+B3
+3
+object B4
+B4
+4
+object B5
+B5
+5
+object B6
+B6
+6
diff --git a/test/files/run/reflection-enclosed-nested-basic.scala b/test/files/run/reflection-enclosed-nested-basic.scala
new file mode 100644
index 0000000..8b740c2
--- /dev/null
+++ b/test/files/run/reflection-enclosed-nested-basic.scala
@@ -0,0 +1,52 @@
+import scala.reflect.runtime.universe._
+import scala.reflect.runtime.{currentMirror => cm}
+import scala.reflect.{classTag, ClassTag}
+
+object B {
+ class B1 { override def toString = "B1"; def foo = 1 }
+ private class B2 { override def toString = "B2"; def foo = 2 }
+ object B3 { override def toString = "B3"; def foo = 3 }
+ private object B4 { override def toString = "B4"; def foo = 4 }
+ object B5 extends B1 { override def toString = "B5"; override def foo = 5 }
+ private object B6 extends B2 { override def toString = "B6"; override def foo = 6 }
+}
+
+object Test extends App {
+ val b = cm.moduleSymbol(classTag[B.type].runtimeClass)
+ println(b)
+ println(b.typeSignature.declarations.toList)
+
+ def testMethodInvocation(instance: Any) = {
+ val instanceMirror = cm.reflect(instance)
+ val method = instanceMirror.symbol.typeSignature.declaration(newTermName("foo")).asMethod
+ val methodMirror = instanceMirror.reflectMethod(method)
+ println(methodMirror())
+ }
+
+ def testNestedClass(name: String) = {
+ val sym = b.typeSignature.declaration(newTypeName(name)).asClass
+ println(sym)
+ val ctor = sym.typeSignature.declaration(newTermName("<init>")).asMethod
+ val ctorMirror = cm.reflectClass(sym).reflectConstructor(ctor)
+ val instance = ctorMirror()
+ println(instance)
+ testMethodInvocation(instance)
+ }
+
+ testNestedClass("B1")
+ testNestedClass("B2")
+
+ def testNestedModule(name: String) = {
+ val sym = b.typeSignature.declaration(newTermName(name)).asModule
+ println(sym)
+ val moduleMirror = cm.reflectModule(sym)
+ val instance = moduleMirror.instance
+ println(instance)
+ testMethodInvocation(instance)
+ }
+
+ testNestedModule("B3")
+ testNestedModule("B4")
+ testNestedModule("B5")
+ testNestedModule("B6")
+}
diff --git a/test/files/run/reflection-enclosed-nested-inner-basic.check b/test/files/run/reflection-enclosed-nested-inner-basic.check
new file mode 100644
index 0000000..add7a81
--- /dev/null
+++ b/test/files/run/reflection-enclosed-nested-inner-basic.check
@@ -0,0 +1,20 @@
+class BB
+List(constructor BB, class B1, class B2, object B3, object B4, object B5, object B6)
+class B1
+B1
+1
+class B2
+B2
+2
+object B3
+B3
+3
+object B4
+B4
+4
+object B5
+B5
+5
+object B6
+B6
+6
diff --git a/test/files/run/reflection-enclosed-nested-inner-basic.scala b/test/files/run/reflection-enclosed-nested-inner-basic.scala
new file mode 100644
index 0000000..7466733
--- /dev/null
+++ b/test/files/run/reflection-enclosed-nested-inner-basic.scala
@@ -0,0 +1,54 @@
+import scala.reflect.runtime.universe._
+import scala.reflect.runtime.{currentMirror => cm}
+import scala.reflect.{classTag, ClassTag}
+
+object B {
+ class BB {
+ class B1 { override def toString = "B1"; def foo = 1 }
+ private class B2 { override def toString = "B2"; def foo = 2 }
+ object B3 { override def toString = "B3"; def foo = 3 }
+ private object B4 { override def toString = "B4"; def foo = 4 }
+ object B5 extends B1 { override def toString = "B5"; override def foo = 5 }
+ private object B6 extends B2 { override def toString = "B6"; override def foo = 6 }
+ }
+}
+
+object Test extends App {
+ val b = cm.classSymbol(classTag[B.BB].runtimeClass)
+ println(b)
+ println(b.typeSignature.declarations.toList)
+
+ def testMethodInvocation(instance: Any) = {
+ val instanceMirror = cm.reflect(instance)
+ val method = instanceMirror.symbol.typeSignature.declaration(newTermName("foo")).asMethod
+ val methodMirror = instanceMirror.reflectMethod(method)
+ println(methodMirror())
+ }
+
+ def testInnerClass(name: String) = {
+ val sym = b.typeSignature.declaration(newTypeName(name)).asClass
+ println(sym)
+ val ctor = sym.typeSignature.declaration(newTermName("<init>")).asMethod
+ val ctorMirror = cm.reflect(new B.BB).reflectClass(sym).reflectConstructor(ctor)
+ val instance = ctorMirror()
+ println(instance)
+ testMethodInvocation(instance)
+ }
+
+ testInnerClass("B1")
+ testInnerClass("B2")
+
+ def testInnerModule(name: String) = {
+ val sym = b.typeSignature.declaration(newTermName(name)).asModule
+ println(sym)
+ val moduleMirror = cm.reflect(new B.BB).reflectModule(sym)
+ val instance = moduleMirror.instance
+ println(instance)
+ testMethodInvocation(instance)
+ }
+
+ testInnerModule("B3")
+ testInnerModule("B4")
+ testInnerModule("B5")
+ testInnerModule("B6")
+}
diff --git a/test/files/run/reflection-enclosed-nested-nested-basic.check b/test/files/run/reflection-enclosed-nested-nested-basic.check
new file mode 100644
index 0000000..0f5176a
--- /dev/null
+++ b/test/files/run/reflection-enclosed-nested-nested-basic.check
@@ -0,0 +1,20 @@
+object BB
+List(constructor BB, class B1, class B2, object B3, object B4, object B5, object B6)
+class B1
+B1
+1
+class B2
+B2
+2
+object B3
+B3
+3
+object B4
+B4
+4
+object B5
+B5
+5
+object B6
+B6
+6
diff --git a/test/files/run/reflection-enclosed-nested-nested-basic.scala b/test/files/run/reflection-enclosed-nested-nested-basic.scala
new file mode 100644
index 0000000..8335ea4
--- /dev/null
+++ b/test/files/run/reflection-enclosed-nested-nested-basic.scala
@@ -0,0 +1,54 @@
+import scala.reflect.runtime.universe._
+import scala.reflect.runtime.{currentMirror => cm}
+import scala.reflect.{classTag, ClassTag}
+
+object B {
+ object BB {
+ class B1 { override def toString = "B1"; def foo = 1 }
+ private class B2 { override def toString = "B2"; def foo = 2 }
+ object B3 { override def toString = "B3"; def foo = 3 }
+ private object B4 { override def toString = "B4"; def foo = 4 }
+ object B5 extends B1 { override def toString = "B5"; override def foo = 5 }
+ private object B6 extends B2 { override def toString = "B6"; override def foo = 6 }
+ }
+}
+
+object Test extends App {
+ val b = cm.moduleSymbol(classTag[B.BB.type].runtimeClass)
+ println(b)
+ println(b.typeSignature.declarations.toList)
+
+ def testMethodInvocation(instance: Any) = {
+ val instanceMirror = cm.reflect(instance)
+ val method = instanceMirror.symbol.typeSignature.declaration(newTermName("foo")).asMethod
+ val methodMirror = instanceMirror.reflectMethod(method)
+ println(methodMirror())
+ }
+
+ def testNestedClass(name: String) = {
+ val sym = b.typeSignature.declaration(newTypeName(name)).asClass
+ println(sym)
+ val ctor = sym.typeSignature.declaration(newTermName("<init>")).asMethod
+ val ctorMirror = cm.reflectClass(sym).reflectConstructor(ctor)
+ val instance = ctorMirror()
+ println(instance)
+ testMethodInvocation(instance)
+ }
+
+ testNestedClass("B1")
+ testNestedClass("B2")
+
+ def testNestedModule(name: String) = {
+ val sym = b.typeSignature.declaration(newTermName(name)).asModule
+ println(sym)
+ val moduleMirror = cm.reflectModule(sym)
+ val instance = moduleMirror.instance
+ println(instance)
+ testMethodInvocation(instance)
+ }
+
+ testNestedModule("B3")
+ testNestedModule("B4")
+ testNestedModule("B5")
+ testNestedModule("B6")
+}
diff --git a/test/files/run/reflection-equality.check b/test/files/run/reflection-equality.check
new file mode 100644
index 0000000..65b5257
--- /dev/null
+++ b/test/files/run/reflection-equality.check
@@ -0,0 +1,53 @@
+Type in expressions to have them evaluated.
+Type :help for more information.
+
+scala>
+
+scala> class X {
+ def methodIntIntInt(x: Int, y: Int) = x+y
+}
+defined class X
+
+scala>
+
+scala> import scala.reflect.runtime.universe._
+import scala.reflect.runtime.universe._
+
+scala> import scala.reflect.runtime.{ currentMirror => cm }
+import scala.reflect.runtime.{currentMirror=>cm}
+
+scala> def im: InstanceMirror = cm.reflect(new X)
+im: reflect.runtime.universe.InstanceMirror
+
+scala> val cs: ClassSymbol = im.symbol
+cs: reflect.runtime.universe.ClassSymbol = class X
+
+scala> val ts: Type = cs.typeSignature
+ts: reflect.runtime.universe.Type =
+scala.AnyRef {
+ def <init>(): X
+ def methodIntIntInt(x: scala.Int,y: scala.Int): scala.Int
+}
+
+scala> val ms: MethodSymbol = ts.declaration(newTermName("methodIntIntInt")).asMethod
+ms: reflect.runtime.universe.MethodSymbol = method methodIntIntInt
+
+scala> val MethodType( _, t1 ) = ms.typeSignature
+t1: reflect.runtime.universe.Type = scala.Int
+
+scala> val t2 = typeOf[scala.Int]
+t2: reflect.runtime.universe.Type = Int
+
+scala> t1 == t2
+res0: Boolean = false
+
+scala> t1 =:= t2
+res1: Boolean = true
+
+scala> t1 <:< t2
+res2: Boolean = true
+
+scala> t2 <:< t1
+res3: Boolean = true
+
+scala>
diff --git a/test/files/run/reflection-equality.scala b/test/files/run/reflection-equality.scala
new file mode 100644
index 0000000..8fc8272
--- /dev/null
+++ b/test/files/run/reflection-equality.scala
@@ -0,0 +1,22 @@
+import scala.tools.partest.ReplTest
+
+object Test extends ReplTest {
+ def code = """
+ |class X {
+ | def methodIntIntInt(x: Int, y: Int) = x+y
+ |}
+ |
+ |import scala.reflect.runtime.universe._
+ |import scala.reflect.runtime.{ currentMirror => cm }
+ |def im: InstanceMirror = cm.reflect(new X)
+ |val cs: ClassSymbol = im.symbol
+ |val ts: Type = cs.typeSignature
+ |val ms: MethodSymbol = ts.declaration(newTermName("methodIntIntInt")).asMethod
+ |val MethodType( _, t1 ) = ms.typeSignature
+ |val t2 = typeOf[scala.Int]
+ |t1 == t2
+ |t1 =:= t2
+ |t1 <:< t2
+ |t2 <:< t1
+ |""".stripMargin
+}
diff --git a/test/files/run/reflection-fieldmirror-accessorsareokay.check b/test/files/run/reflection-fieldmirror-accessorsareokay.check
new file mode 100644
index 0000000..e6936c8
--- /dev/null
+++ b/test/files/run/reflection-fieldmirror-accessorsareokay.check
@@ -0,0 +1,6 @@
+true
+42
+2
+true
+2
+2
diff --git a/test/files/run/reflection-fieldmirror-accessorsareokay.scala b/test/files/run/reflection-fieldmirror-accessorsareokay.scala
new file mode 100644
index 0000000..1635402
--- /dev/null
+++ b/test/files/run/reflection-fieldmirror-accessorsareokay.scala
@@ -0,0 +1,29 @@
+import scala.reflect.runtime.universe._
+import scala.reflect.runtime.{currentMirror => cm}
+
+object Test extends App {
+ class A {
+ var x: Int = 42
+ }
+
+ val a = new A
+
+ val im: InstanceMirror = cm.reflect(a)
+ val cs = im.symbol
+
+ def test(f: Symbol) = {
+ try {
+ val fm: FieldMirror = im.reflectField(f.asTerm)
+ println(fm.symbol.isVar)
+ println(fm.get)
+ fm.set(2)
+ println(fm.get)
+ } catch {
+ case ex: Throwable =>
+ println(ex.getMessage)
+ }
+ }
+
+ test(cs.typeSignature.declaration(newTermName("x")).asTerm)
+ test(cs.typeSignature.declaration(newTermName("x_$eq")).asTerm)
+}
diff --git a/test/files/run/reflection-fieldmirror-ctorparam.check b/test/files/run/reflection-fieldmirror-ctorparam.check
new file mode 100644
index 0000000..e391e7c
--- /dev/null
+++ b/test/files/run/reflection-fieldmirror-ctorparam.check
@@ -0,0 +1,3 @@
+class scala.ScalaReflectionException: Scala field x isn't represented as a Java field, neither it has a Java accessor method
+note that private parameters of class constructors don't get mapped onto fields and/or accessors,
+unless they are used outside of their declaring constructors.
diff --git a/test/files/run/reflection-fieldmirror-ctorparam.scala b/test/files/run/reflection-fieldmirror-ctorparam.scala
new file mode 100644
index 0000000..b9d50fe
--- /dev/null
+++ b/test/files/run/reflection-fieldmirror-ctorparam.scala
@@ -0,0 +1,21 @@
+import scala.reflect.runtime.universe._
+import scala.reflect.runtime.{currentMirror => cm}
+
+object Test extends App {
+ class A(x: Int) {
+ private[this] var xx = x
+ }
+
+ val a = new A(42)
+
+ val im: InstanceMirror = cm.reflect(a)
+ val cs = im.symbol
+ val f = cs.typeSignature.declaration(newTermName("x")).asTerm
+ try {
+ val fm: FieldMirror = im.reflectField(f)
+ println(fm.get)
+ } catch {
+ case ex: Throwable =>
+ println(s"${ex.getClass}: ${ex.getMessage}")
+ }
+}
diff --git a/test/files/run/reflection-fieldmirror-getsetval.check b/test/files/run/reflection-fieldmirror-getsetval.check
new file mode 100644
index 0000000..1e959a9
--- /dev/null
+++ b/test/files/run/reflection-fieldmirror-getsetval.check
@@ -0,0 +1,2 @@
+42
+2
diff --git a/test/files/run/reflection-fieldmirror-getsetval.scala b/test/files/run/reflection-fieldmirror-getsetval.scala
new file mode 100644
index 0000000..9022148
--- /dev/null
+++ b/test/files/run/reflection-fieldmirror-getsetval.scala
@@ -0,0 +1,18 @@
+import scala.reflect.runtime.universe._
+import scala.reflect.runtime.{currentMirror => cm}
+
+object Test extends App {
+ class A {
+ val x: Int = 42
+ }
+
+ val a = new A
+
+ val im: InstanceMirror = cm.reflect(a)
+ val cs = im.symbol
+ val f = cs.typeSignature.declaration(newTermName("x" + nme.LOCAL_SUFFIX_STRING)).asTerm
+ val fm: FieldMirror = im.reflectField(f)
+ println(fm.get)
+ fm.set(2)
+ println(fm.get)
+}
diff --git a/test/files/run/reflection-fieldmirror-getsetvar.check b/test/files/run/reflection-fieldmirror-getsetvar.check
new file mode 100644
index 0000000..1e959a9
--- /dev/null
+++ b/test/files/run/reflection-fieldmirror-getsetvar.check
@@ -0,0 +1,2 @@
+42
+2
diff --git a/test/files/run/reflection-fieldmirror-getsetvar.scala b/test/files/run/reflection-fieldmirror-getsetvar.scala
new file mode 100644
index 0000000..abcf396
--- /dev/null
+++ b/test/files/run/reflection-fieldmirror-getsetvar.scala
@@ -0,0 +1,18 @@
+import scala.reflect.runtime.universe._
+import scala.reflect.runtime.{currentMirror => cm}
+
+object Test extends App {
+ class A {
+ var x: Int = 42
+ }
+
+ val a = new A
+
+ val im: InstanceMirror = cm.reflect(a)
+ val cs = im.symbol
+ val f = cs.typeSignature.declaration(newTermName("x" + nme.LOCAL_SUFFIX_STRING)).asTerm
+ val fm: FieldMirror = im.reflectField(f)
+ println(fm.get)
+ fm.set(2)
+ println(fm.get)
+}
diff --git a/test/files/run/reflection-fieldmirror-nmelocalsuffixstring.check b/test/files/run/reflection-fieldmirror-nmelocalsuffixstring.check
new file mode 100644
index 0000000..27ba77d
--- /dev/null
+++ b/test/files/run/reflection-fieldmirror-nmelocalsuffixstring.check
@@ -0,0 +1 @@
+true
diff --git a/test/files/run/reflection-fieldmirror-nmelocalsuffixstring.scala b/test/files/run/reflection-fieldmirror-nmelocalsuffixstring.scala
new file mode 100644
index 0000000..2b4a9bb
--- /dev/null
+++ b/test/files/run/reflection-fieldmirror-nmelocalsuffixstring.scala
@@ -0,0 +1,16 @@
+import scala.reflect.runtime.universe._
+import scala.reflect.runtime.{currentMirror => cm}
+
+object Test extends App {
+ class A {
+ var x: Int = 42
+ }
+
+ val a = new A
+
+ val im: InstanceMirror = cm.reflect(a)
+ val cs = im.symbol
+ val f = cs.typeSignature.declaration(newTermName("x" + nme.LOCAL_SUFFIX_STRING)).asTerm
+ val fm: FieldMirror = im.reflectField(f)
+ println(fm.symbol.isVar)
+}
diff --git a/test/files/run/reflection-fieldmirror-privatethis.check b/test/files/run/reflection-fieldmirror-privatethis.check
new file mode 100644
index 0000000..1601011
--- /dev/null
+++ b/test/files/run/reflection-fieldmirror-privatethis.check
@@ -0,0 +1,3 @@
+true
+42
+2
diff --git a/test/files/run/reflection-fieldmirror-privatethis.scala b/test/files/run/reflection-fieldmirror-privatethis.scala
new file mode 100644
index 0000000..ab838db
--- /dev/null
+++ b/test/files/run/reflection-fieldmirror-privatethis.scala
@@ -0,0 +1,19 @@
+import scala.reflect.runtime.universe._
+import scala.reflect.runtime.{currentMirror => cm}
+
+object Test extends App {
+ class A {
+ private[this] var x: Int = 42
+ }
+
+ val a = new A
+
+ val im: InstanceMirror = cm.reflect(a)
+ val cs = im.symbol
+ val f = cs.typeSignature.declaration(newTermName("x")).asTerm
+ val fm: FieldMirror = im.reflectField(f)
+ println(fm.symbol.isVar)
+ println(fm.get)
+ fm.set(2)
+ println(fm.get)
+}
diff --git a/test/files/run/reflection-fieldsymbol-navigation.check b/test/files/run/reflection-fieldsymbol-navigation.check
new file mode 100644
index 0000000..ae0597a
--- /dev/null
+++ b/test/files/run/reflection-fieldsymbol-navigation.check
@@ -0,0 +1,6 @@
+method x
+false
+variable x
+true
+method x
+method x_=
diff --git a/test/files/run/reflection-fieldsymbol-navigation.scala b/test/files/run/reflection-fieldsymbol-navigation.scala
new file mode 100644
index 0000000..4448724
--- /dev/null
+++ b/test/files/run/reflection-fieldsymbol-navigation.scala
@@ -0,0 +1,15 @@
+import scala.reflect.runtime.universe._
+
+class C {
+ var x = 2
+}
+
+object Test extends App {
+ val x = typeOf[C].member(newTermName("x")).asTerm
+ println(x)
+ println(x.isVar)
+ println(x.accessed)
+ println(x.accessed.asTerm.isVar)
+ println(x.getter)
+ println(x.setter)
+}
\ No newline at end of file
diff --git a/test/files/run/reflection-implClass.scala b/test/files/run/reflection-implClass.scala
new file mode 100644
index 0000000..b3c0081
--- /dev/null
+++ b/test/files/run/reflection-implClass.scala
@@ -0,0 +1,40 @@
+/**
+ * Tries to load a symbol for the `Foo$class` using Scala reflection.
+ * Since trait implementation classes do not get pickling information
+ * symbol for them should be created using fallback mechanism
+ * that exposes Java reflection information dressed up in
+ * a Scala symbol.
+ */
+object Test extends App with Outer {
+ import scala.reflect.{ClassTag, classTag}
+ import scala.reflect.runtime.universe._
+ import scala.reflect.runtime.{currentMirror => cm}
+
+ assert(cm.classSymbol(classTag[Foo].runtimeClass).typeSignature.declaration(newTermName("bar")).typeSignature ==
+ cm.classSymbol(classTag[Bar].runtimeClass).typeSignature.declaration(newTermName("foo")).typeSignature)
+
+ val s1 = implClass(classTag[Foo].runtimeClass)
+ assert(s1 != NoSymbol)
+ assert(s1.typeSignature != NoType)
+ assert(s1.companionSymbol.typeSignature != NoType)
+ assert(s1.companionSymbol.typeSignature.declaration(newTermName("bar")) != NoSymbol)
+ val s2 = implClass(classTag[Bar].runtimeClass)
+ assert(s2 != NoSymbol)
+ assert(s2.typeSignature != NoType)
+ assert(s2.companionSymbol.typeSignature != NoType)
+ assert(s2.companionSymbol.typeSignature.declaration(newTermName("foo")) != NoSymbol)
+ def implClass(clazz: Class[_]) = {
+ val implClass = Class.forName(clazz.getName + "$class")
+ cm.classSymbol(implClass)
+ }
+}
+
+trait Foo {
+ def bar = 1
+}
+
+trait Outer {
+ trait Bar {
+ def foo = 1
+ }
+}
diff --git a/test/files/run/reflection-implicit.check b/test/files/run/reflection-implicit.check
new file mode 100644
index 0000000..5a88a46
--- /dev/null
+++ b/test/files/run/reflection-implicit.check
@@ -0,0 +1,2 @@
+List(true, true, true, true)
+true
diff --git a/test/files/run/reflection-implicit.scala b/test/files/run/reflection-implicit.scala
new file mode 100644
index 0000000..0bcb0bc
--- /dev/null
+++ b/test/files/run/reflection-implicit.scala
@@ -0,0 +1,15 @@
+import scala.reflect.runtime.universe._
+
+class C {
+ implicit val v = new C
+ implicit def d(x: C)(implicit c: C): Int = ???
+ implicit class X(val x: Int)
+}
+
+object Test extends App {
+ val decls = typeOf[C].typeSymbol.typeSignature.declarations.sorted.toList.filter(sym => !sym.isTerm || (sym.isMethod && !sym.asMethod.isConstructor))
+ println(decls map (_.isImplicit))
+ val param = decls.find(_.name.toString == "d").get.asMethod.paramss.last.head
+ param.typeSignature
+ println(param.isImplicit)
+}
\ No newline at end of file
diff --git a/test/files/run/reflection-java-annotations.check b/test/files/run/reflection-java-annotations.check
new file mode 100644
index 0000000..2d37fff
--- /dev/null
+++ b/test/files/run/reflection-java-annotations.check
@@ -0,0 +1 @@
+List(JavaComplexAnnotation_1(v1 = 1, v10 = "hello", v101 = [101, 101], v102 = [102, 102], v103 = ['g', 'g'], v104 = [104, 104], v105 = [105L, 105L], v106 = [106.0, 106.0], v107 = [107.0, 107.0], v108 = [false, true], v11 = classOf[JavaAnnottee_1], v110 = ["hello", "world"], v111 = [classOf[JavaSimpleAnnotation_1], classOf[JavaComplexAnnotation_1]], v112 = [FOO, BAR], v113 = [JavaSimpleAnnotation_1(v1 = 21, v10 = "world2", v11 = classOf[JavaComplexAnnotation_1], v12 = BAR, v2 = 22, v3 = ' [...]
diff --git a/test/files/run/reflection-java-annotations/JavaAnnottee_1.java b/test/files/run/reflection-java-annotations/JavaAnnottee_1.java
new file mode 100644
index 0000000..b241f5d
--- /dev/null
+++ b/test/files/run/reflection-java-annotations/JavaAnnottee_1.java
@@ -0,0 +1,47 @@
+ at JavaComplexAnnotation_1(
+ v1 = (byte)1,
+ v2 = (short)2,
+ v3 = (char)3,
+ v4 = (int)4,
+ v5 = (long)5,
+ v6 = (float)6,
+ v7 = (double)7,
+ v10 = "hello",
+ v11 = JavaAnnottee_1.class,
+ v12 = JavaSimpleEnumeration_1.FOO,
+ v13 = @JavaSimpleAnnotation_1(
+ v1 = (byte)11,
+ v2 = (short)12,
+ v3 = (char)13,
+ v4 = (int)14,
+ v5 = (long)15,
+ v6 = (float)16,
+ v7 = (double)17,
+ v10 = "world1",
+ v11 = JavaSimpleAnnotation_1.class,
+ v12 = JavaSimpleEnumeration_1.FOO
+ ),
+ v101 = {(byte)101, (byte)101},
+ v102 = {(short)102, (short)102},
+ v103 = {(char)103, (char)103},
+ v104 = {(int)104, (int)104},
+ v105 = {(long)105, (long)105},
+ v106 = {(float)106, (float)106},
+ v107 = {(double)107, (double)107},
+ v108 = {false, true},
+ v110 = {"hello", "world"},
+ v111 = {JavaSimpleAnnotation_1.class, JavaComplexAnnotation_1.class},
+ v112 = {JavaSimpleEnumeration_1.FOO, JavaSimpleEnumeration_1.BAR},
+ v113 = {@JavaSimpleAnnotation_1(
+ v1 = (byte)21,
+ v2 = (short)22,
+ v3 = (char)23,
+ v4 = (int)24,
+ v5 = (long)25,
+ v6 = (float)26,
+ v7 = (double)27,
+ v10 = "world2",
+ v11 = JavaComplexAnnotation_1.class,
+ v12 = JavaSimpleEnumeration_1.BAR
+ )})
+public class JavaAnnottee_1 {}
\ No newline at end of file
diff --git a/test/files/run/reflection-java-annotations/JavaComplexAnnotation_1.java b/test/files/run/reflection-java-annotations/JavaComplexAnnotation_1.java
new file mode 100644
index 0000000..645eeb9
--- /dev/null
+++ b/test/files/run/reflection-java-annotations/JavaComplexAnnotation_1.java
@@ -0,0 +1,34 @@
+import java.lang.annotation.Target;
+import java.lang.annotation.ElementType;
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+
+ at Retention(RetentionPolicy.RUNTIME)
+ at Target({ElementType.FIELD, ElementType.METHOD, ElementType.TYPE})
+public @interface JavaComplexAnnotation_1 {
+ byte v1();
+ short v2();
+ char v3();
+ int v4();
+ long v5();
+ float v6();
+ double v7();
+ boolean v8() default false;
+ // void v9();
+ String v10();
+ Class<?> v11();
+ JavaSimpleEnumeration_1 v12();
+ JavaSimpleAnnotation_1 v13();
+ byte[] v101();
+ short[] v102();
+ char[] v103();
+ int[] v104();
+ long[] v105();
+ float[] v106();
+ double[] v107();
+ boolean[] v108();
+ String[] v110();
+ Class<?>[] v111();
+ JavaSimpleEnumeration_1[] v112();
+ JavaSimpleAnnotation_1[] v113();
+}
\ No newline at end of file
diff --git a/test/files/run/reflection-java-annotations/JavaSimpleAnnotation_1.java b/test/files/run/reflection-java-annotations/JavaSimpleAnnotation_1.java
new file mode 100644
index 0000000..c0f92fa
--- /dev/null
+++ b/test/files/run/reflection-java-annotations/JavaSimpleAnnotation_1.java
@@ -0,0 +1,21 @@
+import java.lang.annotation.Target;
+import java.lang.annotation.ElementType;
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+
+ at Retention(RetentionPolicy.RUNTIME)
+ at Target({ElementType.FIELD, ElementType.METHOD, ElementType.TYPE})
+public @interface JavaSimpleAnnotation_1 {
+ byte v1();
+ short v2();
+ char v3();
+ int v4();
+ long v5();
+ float v6();
+ double v7();
+ boolean v8() default false;
+ // void v9();
+ String v10();
+ Class<?> v11();
+ JavaSimpleEnumeration_1 v12();
+}
\ No newline at end of file
diff --git a/test/files/run/reflection-java-annotations/JavaSimpleEnumeration_1.java b/test/files/run/reflection-java-annotations/JavaSimpleEnumeration_1.java
new file mode 100644
index 0000000..5f4dcce
--- /dev/null
+++ b/test/files/run/reflection-java-annotations/JavaSimpleEnumeration_1.java
@@ -0,0 +1,4 @@
+public enum JavaSimpleEnumeration_1 {
+ FOO,
+ BAR
+}
\ No newline at end of file
diff --git a/test/files/run/reflection-java-annotations/Test_2.scala b/test/files/run/reflection-java-annotations/Test_2.scala
new file mode 100644
index 0000000..d2c3157
--- /dev/null
+++ b/test/files/run/reflection-java-annotations/Test_2.scala
@@ -0,0 +1,7 @@
+object Test extends App {
+ import scala.reflect.runtime.universe._
+ val sym = typeOf[JavaAnnottee_1].typeSymbol
+ sym.typeSignature
+ sym.annotations foreach (_.javaArgs)
+ println(sym.annotations)
+}
\ No newline at end of file
diff --git a/test/files/run/reflection-java-crtp.check b/test/files/run/reflection-java-crtp.check
new file mode 100644
index 0000000..3e5a77e
--- /dev/null
+++ b/test/files/run/reflection-java-crtp.check
@@ -0,0 +1 @@
+(type E,type E,true)
diff --git a/test/files/run/reflection-java-crtp/JavaSimpleEnumeration_1.java b/test/files/run/reflection-java-crtp/JavaSimpleEnumeration_1.java
new file mode 100644
index 0000000..3924614
--- /dev/null
+++ b/test/files/run/reflection-java-crtp/JavaSimpleEnumeration_1.java
@@ -0,0 +1,4 @@
+enum JavaSimpleEnumeration_1 {
+ FOO,
+ BAR
+}
\ No newline at end of file
diff --git a/test/files/run/reflection-java-crtp/Main_2.scala b/test/files/run/reflection-java-crtp/Main_2.scala
new file mode 100644
index 0000000..fb5668f
--- /dev/null
+++ b/test/files/run/reflection-java-crtp/Main_2.scala
@@ -0,0 +1,8 @@
+object Test extends App {
+ import scala.reflect.runtime.universe._
+ val enum = typeOf[JavaSimpleEnumeration_1].baseClasses(1).asClass
+ // make sure that the E's in Enum<E extends Enum<E>> are represented by the same symbol
+ val e1 = enum.typeParams(0).asType
+ val TypeBounds(_, TypeRef(_, _, List(TypeRef(_, e2: TypeSymbol, _)))) = e1.typeSignature
+ println(e1, e2, e1 eq e2)
+}
\ No newline at end of file
diff --git a/test/files/run/reflection-magicsymbols-invoke.check b/test/files/run/reflection-magicsymbols-invoke.check
new file mode 100644
index 0000000..352aefa
--- /dev/null
+++ b/test/files/run/reflection-magicsymbols-invoke.check
@@ -0,0 +1,124 @@
+============
+Any
+it's important to print the list of Any's members
+if some of them change (possibly, adding and/or removing magic symbols), we must update this test
+method !=: (x$1: Any)Boolean
+method ##: ()Int
+method ==: (x$1: Any)Boolean
+method asInstanceOf: [T0]=> T0
+method equals: (x$1: Any)Boolean
+method getClass: ()java.lang.Class[_]
+method hashCode: ()Int
+method isInstanceOf: [T0]=> Boolean
+method toString: ()java.lang.String
+testing Any.!=: false
+testing Any.##: 50
+testing Any.==: true
+testing Any.asInstanceOf: class scala.ScalaReflectionException: Any.asInstanceOf requires a type argument, it cannot be invoked with mirrors
+testing Any.asInstanceOf: class scala.ScalaReflectionException: scala.Any.asInstanceOf[T0]: T0 takes 0 arguments
+testing Any.equals: true
+testing Any.getClass: class java.lang.String
+testing Any.hashCode: 50
+testing Any.isInstanceOf: class scala.ScalaReflectionException: Any.isInstanceOf requires a type argument, it cannot be invoked with mirrors
+testing Any.isInstanceOf: class scala.ScalaReflectionException: scala.Any.isInstanceOf[T0]: Boolean takes 0 arguments
+testing Any.toString: 2
+============
+AnyVal
+it's important to print the list of AnyVal's members
+if some of them change (possibly, adding and/or removing magic symbols), we must update this test
+constructor AnyVal: ()AnyVal
+method getClass: ()Class[_ <: AnyVal]
+testing AnyVal.<init>: class java.lang.InstantiationException: null
+testing AnyVal.getClass: class scala.ScalaReflectionException: expected a member of class Integer, you provided method scala.AnyVal.getClass
+============
+AnyRef
+it's important to print the list of AnyRef's members
+if some of them change (possibly, adding and/or removing magic symbols), we must update this test
+constructor Object: ()java.lang.Object
+method !=: (x$1: Any)Boolean
+method !=: (x$1: AnyRef)Boolean
+method ##: ()Int
+method $asInstanceOf: [T0]()T0
+method $isInstanceOf: [T0]()Boolean
+method ==: (x$1: Any)Boolean
+method ==: (x$1: AnyRef)Boolean
+method asInstanceOf: [T0]=> T0
+method clone: ()java.lang.Object
+method eq: (x$1: AnyRef)Boolean
+method equals: (x$1: Any)Boolean
+method finalize: ()Unit
+method getClass: ()java.lang.Class[_]
+method hashCode: ()Int
+method isInstanceOf: [T0]=> Boolean
+method ne: (x$1: AnyRef)Boolean
+method notify: ()Unit
+method notifyAll: ()Unit
+method synchronized: [T0](x$1: T0)T0
+method toString: ()java.lang.String
+method wait: ()Unit
+method wait: (x$1: Long)Unit
+method wait: (x$1: Long, x$2: Int)Unit
+testing Object.!=: false
+testing Object.##: 50
+testing Object.$asInstanceOf: class scala.ScalaReflectionException: AnyRef.$asInstanceOf is an internal method, it cannot be invoked with mirrors
+testing Object.$asInstanceOf: class scala.ScalaReflectionException: java.lang.Object.$asInstanceOf[T0](): T0 takes 0 arguments
+testing Object.$isInstanceOf: class scala.ScalaReflectionException: AnyRef.$isInstanceOf is an internal method, it cannot be invoked with mirrors
+testing Object.$isInstanceOf: class scala.ScalaReflectionException: java.lang.Object.$isInstanceOf[T0](): Boolean takes 0 arguments
+testing Object.==: true
+testing Object.clone: class java.lang.CloneNotSupportedException: java.lang.String
+testing Object.eq: true
+testing Object.equals: true
+testing Object.finalize: ()
+testing Object.getClass: class java.lang.String
+testing Object.hashCode: 50
+testing Object.ne: false
+testing Object.notify: class java.lang.IllegalMonitorStateException: null
+testing Object.notifyAll: class java.lang.IllegalMonitorStateException: null
+testing Object.synchronized: 2
+testing Object.toString: 2
+TODO: also test AnyRef.wait overloads
+============
+Array
+it's important to print the list of Array's members
+if some of them change (possibly, adding and/or removing magic symbols), we must update this test
+constructor Array: (_length: Int)Array[T]
+constructor Cloneable: ()java.lang.Cloneable
+method !=: (x$1: Any)Boolean
+method !=: (x$1: AnyRef)Boolean
+method ##: ()Int
+method $asInstanceOf: [T0]()T0
+method $isInstanceOf: [T0]()Boolean
+method ==: (x$1: Any)Boolean
+method ==: (x$1: AnyRef)Boolean
+method apply: (i: Int)T
+method asInstanceOf: [T0]=> T0
+method clone: ()Array[T]
+method eq: (x$1: AnyRef)Boolean
+method equals: (x$1: Any)Boolean
+method finalize: ()Unit
+method getClass: ()java.lang.Class[_]
+method hashCode: ()Int
+method isInstanceOf: [T0]=> Boolean
+method length: => Int
+method ne: (x$1: AnyRef)Boolean
+method notify: ()Unit
+method notifyAll: ()Unit
+method synchronized: [T0](x$1: T0)T0
+method toString: ()java.lang.String
+method update: (i: Int, x: T)Unit
+method wait: ()Unit
+method wait: (x$1: Long)Unit
+method wait: (x$1: Long, x$2: Int)Unit
+value _length: Int
+testing Array.length: 2
+testing Array.apply: 1
+testing Array.update: ()
+testing Array.clone: List(1, 2)
+============
+Other
+testing String.+: 23
+============
+CTM
+testing Predef.classOf: class scala.ScalaReflectionException: Predef.classOf is a compile-time function, it cannot be invoked with mirrors
+testing Predef.classOf: class scala.ScalaReflectionException: scala.Predef.classOf[T]: Class[T] takes 0 arguments
+testing Universe.reify: class scala.ScalaReflectionException: scala.reflect.api.Universe.reify is a macro, i.e. a compile-time function, it cannot be invoked with mirrors
diff --git a/test/files/run/reflection-magicsymbols-invoke.scala b/test/files/run/reflection-magicsymbols-invoke.scala
new file mode 100644
index 0000000..5f39370
--- /dev/null
+++ b/test/files/run/reflection-magicsymbols-invoke.scala
@@ -0,0 +1,100 @@
+import scala.reflect.runtime.universe._
+import scala.reflect.runtime.universe.definitions._
+import scala.reflect.runtime.{currentMirror => cm}
+
+package scala {
+ object ExceptionUtils {
+ def unwrapThrowable(ex: Throwable): Throwable = scala.reflect.runtime.ReflectionUtils.unwrapThrowable(ex)
+ }
+}
+
+object Test extends App {
+ def key(sym: Symbol) = sym + ": " + sym.typeSignature
+ def test(tpe: Type, receiver: Any, method: String, args: Any*) {
+ def wrap[T](op: => T) =
+ try {
+ var result = op.asInstanceOf[AnyRef]
+ if (scala.runtime.ScalaRunTime.isArray(result))
+ result = scala.runtime.ScalaRunTime.toObjectArray(result).toList
+ println(result)
+ } catch {
+ case ex: Throwable =>
+ val realex = scala.ExceptionUtils.unwrapThrowable(ex)
+ println(realex.getClass + ": " + realex.getMessage)
+ }
+ print(s"testing ${tpe.typeSymbol.name}.$method: ")
+ wrap({
+ if (method == nme.CONSTRUCTOR.toString) {
+ val ctor = tpe.declaration(nme.CONSTRUCTOR).asMethod
+ cm.reflectClass(ctor.owner.asClass).reflectConstructor(ctor)(args: _*)
+ } else {
+ val meth = tpe.declaration(newTermName(method).encodedName.toTermName).asMethod
+ cm.reflect(receiver).reflectMethod(meth)(args: _*)
+ }
+ })
+ }
+
+ println("============\nAny")
+ println("it's important to print the list of Any's members")
+ println("if some of them change (possibly, adding and/or removing magic symbols), we must update this test")
+ typeOf[Any].members.toList.sortBy(key).foreach(sym => println(key(sym)))
+ test(typeOf[Any], "2", "!=", "2")
+ test(typeOf[Any], "2", "##")
+ test(typeOf[Any], "2", "==", "2")
+ test(typeOf[Any], "2", "asInstanceOf")
+ test(typeOf[Any], "2", "asInstanceOf", typeOf[String])
+ test(typeOf[Any], "2", "equals", "2")
+ test(typeOf[Any], "2", "getClass")
+ test(typeOf[Any], "2", "hashCode")
+ test(typeOf[Any], "2", "isInstanceOf")
+ test(typeOf[Any], "2", "isInstanceOf", typeOf[String])
+ test(typeOf[Any], "2", "toString")
+
+ println("============\nAnyVal")
+ println("it's important to print the list of AnyVal's members")
+ println("if some of them change (possibly, adding and/or removing magic symbols), we must update this test")
+ typeOf[AnyVal].declarations.toList.sortBy(key).foreach(sym => println(key(sym)))
+ test(typeOf[AnyVal], null, "<init>")
+ test(typeOf[AnyVal], 2, "getClass")
+
+ println("============\nAnyRef")
+ println("it's important to print the list of AnyRef's members")
+ println("if some of them change (possibly, adding and/or removing magic symbols), we must update this test")
+ typeOf[AnyRef].members.toList.sortBy(key).foreach(sym => println(key(sym)))
+ test(typeOf[AnyRef], "2", "!=", "2")
+ test(typeOf[AnyRef], "2", "##")
+ test(typeOf[AnyRef], "2", "$asInstanceOf")
+ test(typeOf[AnyRef], "2", "$asInstanceOf", typeOf[String])
+ test(typeOf[AnyRef], "2", "$isInstanceOf")
+ test(typeOf[AnyRef], "2", "$isInstanceOf", typeOf[String])
+ test(typeOf[AnyRef], "2", "==", "2")
+ test(typeOf[AnyRef], "2", "clone")
+ test(typeOf[AnyRef], "2", "eq", "2")
+ test(typeOf[AnyRef], "2", "equals", "2")
+ test(typeOf[AnyRef], "2", "finalize")
+ test(typeOf[AnyRef], "2", "getClass")
+ test(typeOf[AnyRef], "2", "hashCode")
+ test(typeOf[AnyRef], "2", "ne", "2")
+ test(typeOf[AnyRef], "2", "notify")
+ test(typeOf[AnyRef], "2", "notifyAll")
+ test(typeOf[AnyRef], "2", "synchronized", "2")
+ test(typeOf[AnyRef], "2", "toString")
+ println("TODO: also test AnyRef.wait overloads")
+
+ println("============\nArray")
+ println("it's important to print the list of Array's members")
+ println("if some of them change (possibly, adding and/or removing magic symbols), we must update this test")
+ ArrayClass.typeSignature.members.toList.sortBy(key).foreach(sym => println(key(sym)))
+ test(ArrayClass.typeSignature, Array(1, 2), "length")
+ test(ArrayClass.typeSignature, Array(1, 2), "apply", 0)
+ test(ArrayClass.typeSignature, Array(1, 2), "update", 0, 0)
+ test(ArrayClass.typeSignature, Array(1, 2), "clone")
+
+ println("============\nOther")
+ test(typeOf[String], "2", "+", 3)
+
+ println("============\nCTM")
+ test(PredefModule.moduleClass.typeSignature, Predef, "classOf")
+ test(PredefModule.moduleClass.typeSignature, Predef, "classOf", typeOf[String])
+ test(typeOf[scala.reflect.api.Universe], scala.reflect.runtime.universe, "reify", "2")
+}
\ No newline at end of file
diff --git a/test/files/run/reflection-magicsymbols-repl.check b/test/files/run/reflection-magicsymbols-repl.check
new file mode 100644
index 0000000..2535e3f
--- /dev/null
+++ b/test/files/run/reflection-magicsymbols-repl.check
@@ -0,0 +1,39 @@
+Type in expressions to have them evaluated.
+Type :help for more information.
+
+scala>
+
+scala> import scala.reflect.runtime.universe._
+import scala.reflect.runtime.universe._
+
+scala> class A {
+ def foo1(x: Int*) = ???
+ def foo2(x: => Int) = ???
+ def foo3(x: Any) = ???
+ def foo4(x: AnyRef) = ???
+ def foo5(x: AnyVal) = ???
+ def foo6(x: Null) = ???
+ def foo7(x: Nothing) = ???
+ def foo8(x: Singleton) = ???
+}
+defined class A
+
+scala> def test(n: Int): Unit = {
+ val sig = typeOf[A] member newTermName("foo" + n) typeSignature
+ val x = sig.asInstanceOf[MethodType].params.head
+ println(x.typeSignature)
+}
+warning: there were 1 feature warning(s); re-run with -feature for details
+test: (n: Int)Unit
+
+scala> for (i <- 1 to 8) test(i)
+scala.Int*
+=> scala.Int
+scala.Any
+scala.AnyRef
+scala.AnyVal
+scala.Null
+scala.Nothing
+scala.Singleton
+
+scala>
diff --git a/test/files/run/reflection-magicsymbols-repl.scala b/test/files/run/reflection-magicsymbols-repl.scala
new file mode 100644
index 0000000..26127b8
--- /dev/null
+++ b/test/files/run/reflection-magicsymbols-repl.scala
@@ -0,0 +1,23 @@
+import scala.tools.partest.ReplTest
+
+object Test extends ReplTest {
+ def code = """
+ |import scala.reflect.runtime.universe._
+ |class A {
+ | def foo1(x: Int*) = ???
+ | def foo2(x: => Int) = ???
+ | def foo3(x: Any) = ???
+ | def foo4(x: AnyRef) = ???
+ | def foo5(x: AnyVal) = ???
+ | def foo6(x: Null) = ???
+ | def foo7(x: Nothing) = ???
+ | def foo8(x: Singleton) = ???
+ |}
+ |def test(n: Int): Unit = {
+ | val sig = typeOf[A] member newTermName("foo" + n) typeSignature
+ | val x = sig.asInstanceOf[MethodType].params.head
+ | println(x.typeSignature)
+ |}
+ |for (i <- 1 to 8) test(i)
+ |""".stripMargin
+}
diff --git a/test/files/run/reflection-magicsymbols-vanilla.check b/test/files/run/reflection-magicsymbols-vanilla.check
new file mode 100644
index 0000000..d3ff152
--- /dev/null
+++ b/test/files/run/reflection-magicsymbols-vanilla.check
@@ -0,0 +1,8 @@
+Int*
+=> Int
+Any
+AnyRef
+AnyVal
+Null
+Nothing
+Singleton
diff --git a/test/files/run/reflection-magicsymbols-vanilla.scala b/test/files/run/reflection-magicsymbols-vanilla.scala
new file mode 100644
index 0000000..32819dc
--- /dev/null
+++ b/test/files/run/reflection-magicsymbols-vanilla.scala
@@ -0,0 +1,20 @@
+class A {
+ def foo1(x: Int*) = ???
+ def foo2(x: => Int) = ???
+ def foo3(x: Any) = ???
+ def foo4(x: AnyRef) = ???
+ def foo5(x: AnyVal) = ???
+ def foo6(x: Null) = ???
+ def foo7(x: Nothing) = ???
+ def foo8(x: Singleton) = ???
+}
+
+object Test extends App {
+ import scala.reflect.runtime.universe._
+ def test(n: Int): Unit = {
+ val sig = typeOf[A] member newTermName("foo" + n) typeSignature
+ val x = sig.asInstanceOf[MethodType].params.head
+ println(x.typeSignature)
+ }
+ for (i <- 1 to 8) test(i)
+}
diff --git a/test/files/run/reflection-mem-glbs.scala b/test/files/run/reflection-mem-glbs.scala
new file mode 100644
index 0000000..3f29a91
--- /dev/null
+++ b/test/files/run/reflection-mem-glbs.scala
@@ -0,0 +1,13 @@
+import scala.tools.partest.MemoryTest
+
+trait A { type T <: A }
+trait B { type T <: B }
+
+object Test extends MemoryTest {
+ override def maxDelta = 10
+ override def calcsPerIter = 50000
+ override def calc() {
+ import scala.reflect.runtime.universe._
+ glb(List(typeOf[A], typeOf[B]))
+ }
+}
\ No newline at end of file
diff --git a/test/files/run/reflection-mem-tags.scala b/test/files/run/reflection-mem-tags.scala
new file mode 100644
index 0000000..8815e7d
--- /dev/null
+++ b/test/files/run/reflection-mem-tags.scala
@@ -0,0 +1,17 @@
+import scala.tools.partest.MemoryTest
+
+trait A { type T <: A }
+trait B { type T <: B }
+
+object Test extends MemoryTest {
+ override def maxDelta = 10
+ override def calcsPerIter = 100000
+ override def calc() {
+ import scala.reflect.runtime.universe._
+ def foo = {
+ class A { def x = 2; def y: A = new A }
+ weakTypeOf[A { def z: Int }]
+ }
+ foo
+ }
+}
\ No newline at end of file
diff --git a/test/files/run/reflection-mem-typecheck.scala b/test/files/run/reflection-mem-typecheck.scala
new file mode 100644
index 0000000..a312c2c
--- /dev/null
+++ b/test/files/run/reflection-mem-typecheck.scala
@@ -0,0 +1,26 @@
+import scala.tools.partest.MemoryTest
+
+trait A { type T <: A }
+trait B { type T <: B }
+
+object Test extends MemoryTest {
+ lazy val tb = {
+ import scala.reflect.runtime.universe._
+ import scala.reflect.runtime.{currentMirror => cm}
+ import scala.tools.reflect.ToolBox
+ cm.mkToolBox()
+ }
+
+ override def maxDelta = 10
+ override def calcsPerIter = 8
+ override def calc() {
+ var snippet = """
+ trait A { type T <: A }
+ trait B { type T <: B }
+ def foo[T](x: List[T]) = x
+ foo(List(new A {}, new B {}))
+ """.trim
+ snippet = snippet + "\n" + (List.fill(50)(snippet.split("\n").last) mkString "\n")
+ tb.typeCheck(tb.parse(snippet))
+ }
+}
\ No newline at end of file
diff --git a/test/files/run/reflection-methodsymbol-params.check b/test/files/run/reflection-methodsymbol-params.check
new file mode 100644
index 0000000..554e72d
--- /dev/null
+++ b/test/files/run/reflection-methodsymbol-params.check
@@ -0,0 +1,8 @@
+List()
+List(List())
+List(List(value x))
+List(List(value x), List(value y))
+List()
+List(List())
+List(List(value x))
+List(List(value x), List(value y))
diff --git a/test/files/run/reflection-methodsymbol-params.scala b/test/files/run/reflection-methodsymbol-params.scala
new file mode 100644
index 0000000..45b1f96
--- /dev/null
+++ b/test/files/run/reflection-methodsymbol-params.scala
@@ -0,0 +1,24 @@
+import scala.reflect.runtime.universe._
+
+class C {
+ def x1: Int = ???
+ def x2(): Int = ???
+ def x3(x: Int): Int = ???
+ def x4(x: Int)(y: Int): Int = ???
+
+ def y1[T]: Int = ???
+ def y2[T](): Int = ???
+ def y3[T](x: Int): Int = ???
+ def y4[T](x: Int)(y: Int): Int = ???
+}
+
+object Test extends App {
+ println(typeOf[C].member(newTermName("x1")).asMethod.paramss)
+ println(typeOf[C].member(newTermName("x2")).asMethod.paramss)
+ println(typeOf[C].member(newTermName("x3")).asMethod.paramss)
+ println(typeOf[C].member(newTermName("x4")).asMethod.paramss)
+ println(typeOf[C].member(newTermName("y1")).asMethod.paramss)
+ println(typeOf[C].member(newTermName("y2")).asMethod.paramss)
+ println(typeOf[C].member(newTermName("y3")).asMethod.paramss)
+ println(typeOf[C].member(newTermName("y4")).asMethod.paramss)
+}
\ No newline at end of file
diff --git a/test/files/run/reflection-methodsymbol-returntype.check b/test/files/run/reflection-methodsymbol-returntype.check
new file mode 100644
index 0000000..97ea029
--- /dev/null
+++ b/test/files/run/reflection-methodsymbol-returntype.check
@@ -0,0 +1,8 @@
+Int
+Int
+Int
+Int
+Int
+Int
+Int
+Int
diff --git a/test/files/run/reflection-methodsymbol-returntype.scala b/test/files/run/reflection-methodsymbol-returntype.scala
new file mode 100644
index 0000000..392754d
--- /dev/null
+++ b/test/files/run/reflection-methodsymbol-returntype.scala
@@ -0,0 +1,24 @@
+import scala.reflect.runtime.universe._
+
+class C {
+ def x1: Int = ???
+ def x2(): Int = ???
+ def x3(x: Int): Int = ???
+ def x4(x: Int)(y: Int): Int = ???
+
+ def y1[T]: Int = ???
+ def y2[T](): Int = ???
+ def y3[T](x: Int): Int = ???
+ def y4[T](x: Int)(y: Int): Int = ???
+}
+
+object Test extends App {
+ println(typeOf[C].member(newTermName("x1")).asMethod.returnType)
+ println(typeOf[C].member(newTermName("x2")).asMethod.returnType)
+ println(typeOf[C].member(newTermName("x3")).asMethod.returnType)
+ println(typeOf[C].member(newTermName("x4")).asMethod.returnType)
+ println(typeOf[C].member(newTermName("y1")).asMethod.returnType)
+ println(typeOf[C].member(newTermName("y2")).asMethod.returnType)
+ println(typeOf[C].member(newTermName("y3")).asMethod.returnType)
+ println(typeOf[C].member(newTermName("y4")).asMethod.returnType)
+}
\ No newline at end of file
diff --git a/test/files/run/reflection-methodsymbol-typeparams.check b/test/files/run/reflection-methodsymbol-typeparams.check
new file mode 100644
index 0000000..f041887
--- /dev/null
+++ b/test/files/run/reflection-methodsymbol-typeparams.check
@@ -0,0 +1,8 @@
+List()
+List()
+List()
+List()
+List(type T)
+List(type T)
+List(type T)
+List(type T)
diff --git a/test/files/run/reflection-methodsymbol-typeparams.scala b/test/files/run/reflection-methodsymbol-typeparams.scala
new file mode 100644
index 0000000..bb0a3c3
--- /dev/null
+++ b/test/files/run/reflection-methodsymbol-typeparams.scala
@@ -0,0 +1,24 @@
+import scala.reflect.runtime.universe._
+
+class C {
+ def x1: Int = ???
+ def x2(): Int = ???
+ def x3(x: Int): Int = ???
+ def x4(x: Int)(y: Int): Int = ???
+
+ def y1[T]: Int = ???
+ def y2[T](): Int = ???
+ def y3[T](x: Int): Int = ???
+ def y4[T](x: Int)(y: Int): Int = ???
+}
+
+object Test extends App {
+ println(typeOf[C].member(newTermName("x1")).asMethod.typeParams)
+ println(typeOf[C].member(newTermName("x2")).asMethod.typeParams)
+ println(typeOf[C].member(newTermName("x3")).asMethod.typeParams)
+ println(typeOf[C].member(newTermName("x4")).asMethod.typeParams)
+ println(typeOf[C].member(newTermName("y1")).asMethod.typeParams)
+ println(typeOf[C].member(newTermName("y2")).asMethod.typeParams)
+ println(typeOf[C].member(newTermName("y3")).asMethod.typeParams)
+ println(typeOf[C].member(newTermName("y4")).asMethod.typeParams)
+}
\ No newline at end of file
diff --git a/test/files/run/reflection-modulemirror-inner-badpath.check b/test/files/run/reflection-modulemirror-inner-badpath.check
new file mode 100644
index 0000000..a8e7397
--- /dev/null
+++ b/test/files/run/reflection-modulemirror-inner-badpath.check
@@ -0,0 +1,2 @@
+object R is an inner module, use reflectModule on an InstanceMirror to obtain its ModuleMirror
+()
diff --git a/test/files/run/reflection-modulemirror-inner-badpath.scala b/test/files/run/reflection-modulemirror-inner-badpath.scala
new file mode 100644
index 0000000..de200c5
--- /dev/null
+++ b/test/files/run/reflection-modulemirror-inner-badpath.scala
@@ -0,0 +1,24 @@
+import scala.reflect.runtime.universe._
+import scala.reflect.runtime.{currentMirror => cm}
+import scala.reflect.ClassTag
+
+class Foo{
+ object R { override def toString = "R" }
+ def foo = {
+ val classTag = implicitly[ClassTag[R.type]]
+ val sym = cm.moduleSymbol(classTag.runtimeClass)
+ try {
+ val cls = cm.reflectModule(sym)
+ cls.instance
+ println("this indicates a failure")
+ } catch {
+ case ex: Throwable =>
+ println(ex.getMessage)
+ }
+ }
+}
+
+object Test extends App{
+ val foo = new Foo
+ println(foo.foo)
+}
\ No newline at end of file
diff --git a/test/files/run/reflection-modulemirror-inner-good.check b/test/files/run/reflection-modulemirror-inner-good.check
new file mode 100644
index 0000000..331bae0
--- /dev/null
+++ b/test/files/run/reflection-modulemirror-inner-good.check
@@ -0,0 +1 @@
+R
diff --git a/test/files/run/reflection-modulemirror-inner-good.scala b/test/files/run/reflection-modulemirror-inner-good.scala
new file mode 100644
index 0000000..b5540db
--- /dev/null
+++ b/test/files/run/reflection-modulemirror-inner-good.scala
@@ -0,0 +1,23 @@
+import scala.reflect.runtime.universe._
+import scala.reflect.runtime.{currentMirror => cm}
+import scala.reflect.ClassTag
+
+class Foo{
+ object R { override def toString = "R" }
+ def foo = {
+ val classTag = implicitly[ClassTag[R.type]]
+ val sym = cm.moduleSymbol(classTag.runtimeClass)
+ val cls = cm.reflect(this).reflectModule(sym)
+ try {
+ cls.instance
+ } catch {
+ case ex: Throwable =>
+ println(ex.getMessage)
+ }
+ }
+}
+
+object Test extends App{
+ val foo = new Foo
+ println(foo.foo)
+}
\ No newline at end of file
diff --git a/test/files/run/reflection-modulemirror-nested-badpath.check b/test/files/run/reflection-modulemirror-nested-badpath.check
new file mode 100644
index 0000000..3ef94e1
--- /dev/null
+++ b/test/files/run/reflection-modulemirror-nested-badpath.check
@@ -0,0 +1,2 @@
+object R is a static module, use reflectModule on a RuntimeMirror to obtain its ModuleMirror
+()
diff --git a/test/files/run/reflection-modulemirror-nested-badpath.scala b/test/files/run/reflection-modulemirror-nested-badpath.scala
new file mode 100644
index 0000000..43cae00
--- /dev/null
+++ b/test/files/run/reflection-modulemirror-nested-badpath.scala
@@ -0,0 +1,26 @@
+import scala.reflect.runtime.universe._
+import scala.reflect.runtime.{currentMirror => cm}
+import scala.reflect.ClassTag
+
+class Foo{
+ import Test._
+ def foo = {
+ val classTag = implicitly[ClassTag[R.type]]
+ val sym = cm.moduleSymbol(classTag.runtimeClass)
+ try {
+ val cls = cm.reflect(this).reflectModule(sym)
+ cls.instance
+ println("this indicates a failure")
+ } catch {
+ case ex: Throwable =>
+ println(ex.getMessage)
+ }
+ }
+
+}
+
+object Test extends App{
+ object R { override def toString = "R" }
+ val foo = new Foo
+ println(foo.foo)
+}
diff --git a/test/files/run/reflection-modulemirror-nested-good.check b/test/files/run/reflection-modulemirror-nested-good.check
new file mode 100644
index 0000000..331bae0
--- /dev/null
+++ b/test/files/run/reflection-modulemirror-nested-good.check
@@ -0,0 +1 @@
+R
diff --git a/test/files/run/reflection-modulemirror-nested-good.scala b/test/files/run/reflection-modulemirror-nested-good.scala
new file mode 100644
index 0000000..8390bba
--- /dev/null
+++ b/test/files/run/reflection-modulemirror-nested-good.scala
@@ -0,0 +1,24 @@
+import scala.reflect.runtime.universe._
+import scala.reflect.runtime.{currentMirror => cm}
+import scala.reflect.ClassTag
+
+class Foo{
+ import Test._
+ def foo = {
+ val classTag = implicitly[ClassTag[R.type]]
+ val sym = cm.moduleSymbol(classTag.runtimeClass)
+ val cls = cm.reflectModule(sym)
+ try {
+ cls.instance
+ } catch {
+ case ex: Throwable =>
+ println(ex.getMessage)
+ }
+ }
+}
+
+object Test extends App{
+ object R { override def toString = "R" }
+ val foo = new Foo
+ println(foo.foo)
+}
\ No newline at end of file
diff --git a/test/files/run/reflection-modulemirror-toplevel-badpath.check b/test/files/run/reflection-modulemirror-toplevel-badpath.check
new file mode 100644
index 0000000..3ef94e1
--- /dev/null
+++ b/test/files/run/reflection-modulemirror-toplevel-badpath.check
@@ -0,0 +1,2 @@
+object R is a static module, use reflectModule on a RuntimeMirror to obtain its ModuleMirror
+()
diff --git a/test/files/run/reflection-modulemirror-toplevel-badpath.scala b/test/files/run/reflection-modulemirror-toplevel-badpath.scala
new file mode 100644
index 0000000..8b2994c
--- /dev/null
+++ b/test/files/run/reflection-modulemirror-toplevel-badpath.scala
@@ -0,0 +1,26 @@
+import scala.reflect.runtime.universe._
+import scala.reflect.runtime.{currentMirror => cm}
+import scala.reflect.ClassTag
+
+object R { override def toString = "R" }
+
+class Foo{
+ import Test._
+ def foo = {
+ val classTag = implicitly[ClassTag[R.type]]
+ val sym = cm.moduleSymbol(classTag.runtimeClass)
+ try {
+ val cls = cm.reflect(this).reflectModule(sym)
+ cls.instance
+ println("this indicates a failure")
+ } catch {
+ case ex: Throwable =>
+ println(ex.getMessage)
+ }
+ }
+}
+
+object Test extends App{
+ val foo = new Foo
+ println(foo.foo)
+}
\ No newline at end of file
diff --git a/test/files/run/reflection-modulemirror-toplevel-good.check b/test/files/run/reflection-modulemirror-toplevel-good.check
new file mode 100644
index 0000000..ac044e5
--- /dev/null
+++ b/test/files/run/reflection-modulemirror-toplevel-good.check
@@ -0,0 +1 @@
+R
\ No newline at end of file
diff --git a/test/files/run/reflection-modulemirror-toplevel-good.scala b/test/files/run/reflection-modulemirror-toplevel-good.scala
new file mode 100644
index 0000000..0663136
--- /dev/null
+++ b/test/files/run/reflection-modulemirror-toplevel-good.scala
@@ -0,0 +1,20 @@
+import scala.reflect.runtime.universe._
+import scala.reflect.runtime.{currentMirror => cm}
+import scala.reflect.ClassTag
+
+object R { override def toString = "R" }
+
+class Foo{
+ import Test._
+ def foo = {
+ val classTag = implicitly[ClassTag[R.type]]
+ val sym = cm.moduleSymbol(classTag.runtimeClass)
+ val cls = cm.reflectModule(sym)
+ cls.instance
+ }
+}
+
+object Test extends App{
+ val foo = new Foo
+ println(foo.foo)
+}
\ No newline at end of file
diff --git a/test/files/run/reflection-names.check b/test/files/run/reflection-names.check
new file mode 100644
index 0000000..f8cb78c
--- /dev/null
+++ b/test/files/run/reflection-names.check
@@ -0,0 +1,4 @@
+(java.lang.String,bc)
+(scala.reflect.internal.Names$TermName_R,bc)
+(scala.reflect.internal.Names$TypeName_R,bc)
+(scala.reflect.internal.Names$TypeName_R,bc)
diff --git a/test/files/run/reflection-names.scala b/test/files/run/reflection-names.scala
new file mode 100644
index 0000000..2433c84
--- /dev/null
+++ b/test/files/run/reflection-names.scala
@@ -0,0 +1,15 @@
+import scala.tools.nsc._
+
+object Test {
+ val global = new Global(new Settings())
+ import global._
+
+ val x1 = "abc" drop 1 // "bc": String
+ val x2 = ("abc": TermName) drop 1 // "bc": TermName
+ val x3 = ("abc": TypeName) drop 1 // "bc": TypeName
+ val x4 = (("abc": TypeName): Name) drop 1 // "bc": Name
+
+ def main(args: Array[String]): Unit = {
+ List(x1, x2, x3, x4) foreach (x => println(x.getClass.getName, x))
+ }
+}
diff --git a/test/files/run/reflection-repl-classes.check b/test/files/run/reflection-repl-classes.check
new file mode 100644
index 0000000..2dd96a9
--- /dev/null
+++ b/test/files/run/reflection-repl-classes.check
@@ -0,0 +1,35 @@
+Type in expressions to have them evaluated.
+Type :help for more information.
+
+scala>
+
+scala> class A
+defined class A
+
+scala>
+
+scala> class B {
+ def foo(x: A) = 1
+}
+defined class B
+
+scala>
+
+scala> object defs {
+ val cm = reflect.runtime.currentMirror
+ val u = cm.universe
+ val im = cm.reflect(new B)
+ val method = im.symbol.typeSignature.member(u.newTermName("foo")).asMethod
+ val mm = im.reflectMethod(method)
+}
+defined module defs
+
+scala> import defs._
+import defs._
+
+scala>
+
+scala> mm(new A)
+res0: Any = 1
+
+scala>
diff --git a/test/files/run/reflection-repl-classes.scala b/test/files/run/reflection-repl-classes.scala
new file mode 100644
index 0000000..80e332c
--- /dev/null
+++ b/test/files/run/reflection-repl-classes.scala
@@ -0,0 +1,22 @@
+import scala.tools.partest.ReplTest
+
+object Test extends ReplTest {
+ def code = """
+ |class A
+ |
+ |class B {
+ | def foo(x: A) = 1
+ |}
+ |
+ |object defs {
+ | val cm = reflect.runtime.currentMirror
+ | val u = cm.universe
+ | val im = cm.reflect(new B)
+ | val method = im.symbol.typeSignature.member(u.newTermName("foo")).asMethod
+ | val mm = im.reflectMethod(method)
+ |}
+ |import defs._
+ |
+ |mm(new A)
+ |""".stripMargin
+}
diff --git a/test/files/run/reflection-repl-elementary.check b/test/files/run/reflection-repl-elementary.check
new file mode 100644
index 0000000..d9133f7
--- /dev/null
+++ b/test/files/run/reflection-repl-elementary.check
@@ -0,0 +1,9 @@
+Type in expressions to have them evaluated.
+Type :help for more information.
+
+scala>
+
+scala> scala.reflect.runtime.universe.typeOf[List[Nothing]]
+res0: reflect.runtime.universe.Type = scala.List[Nothing]
+
+scala>
diff --git a/test/files/run/reflection-repl-elementary.scala b/test/files/run/reflection-repl-elementary.scala
new file mode 100644
index 0000000..72b65a1
--- /dev/null
+++ b/test/files/run/reflection-repl-elementary.scala
@@ -0,0 +1,7 @@
+import scala.tools.partest.ReplTest
+
+object Test extends ReplTest {
+ def code = """
+ |scala.reflect.runtime.universe.typeOf[List[Nothing]]
+ |""".stripMargin
+}
diff --git a/test/files/run/reflection-sanitychecks.check b/test/files/run/reflection-sanitychecks.check
new file mode 100644
index 0000000..821457a
--- /dev/null
+++ b/test/files/run/reflection-sanitychecks.check
@@ -0,0 +1,30 @@
+=========members of C in a mirror of D=========
+field #1: 11
+method #1: 22
+field #2: 13
+method #2: 14
+constructor #1: scala.ScalaReflectionException: expected a constructor of class D, you provided method bar
+constructor #2: scala.ScalaReflectionException: expected a constructor of class D, you provided constructor C
+class: CC
+object: CO
+
+=========members of D in a mirror of D=========
+field #1: 21
+method #1: 22
+field #2: 13
+method #2: 14
+constructor #1: scala.ScalaReflectionException: expected a constructor of class D, you provided method bar
+constructor #2: an instance of class D
+class: CC
+object: CO
+
+=========members of E in a mirror of D=========
+field #1: scala.ScalaReflectionException: expected a member of class D, you provided value E.foo
+method #1: scala.ScalaReflectionException: expected a member of class D, you provided method E.bar
+field #2: scala.ScalaReflectionException: expected a member of class D, you provided value E.quux
+method #2: scala.ScalaReflectionException: expected a member of class D, you provided method E.baz
+constructor #1: scala.ScalaReflectionException: expected a constructor of class D, you provided method bar
+constructor #2: scala.ScalaReflectionException: expected a constructor of class D, you provided constructor E
+class: scala.ScalaReflectionException: expected a member of class D, you provided class E.C
+object: scala.ScalaReflectionException: expected a member of class D, you provided object E.O
+
diff --git a/test/files/run/reflection-sanitychecks.scala b/test/files/run/reflection-sanitychecks.scala
new file mode 100644
index 0000000..f817f23
--- /dev/null
+++ b/test/files/run/reflection-sanitychecks.scala
@@ -0,0 +1,49 @@
+class C {
+ val foo = 11
+ def bar = 12
+ val quux = 13
+ def baz = 14
+ class C { override def toString = "CC" }
+ object O { override def toString = "CO" }
+ override def toString = "an instance of class C"
+}
+
+class D extends C {
+ override val foo = 21
+ override def bar = 22
+ override def toString = "an instance of class D"
+}
+
+class E {
+ val foo = 31
+ def bar = 32
+ val quux = 33
+ def baz = 34
+ class C { override def toString = "EC" }
+ object O { override def toString = "EO" }
+ override def toString = "an instance of class E"
+}
+
+object Test extends App {
+ import scala.reflect.runtime.universe._
+ import scala.reflect.runtime.{currentMirror => cm}
+ val im = cm.reflect(new D)
+
+ def test(tpe: Type): Unit = {
+ def failsafe(action: => Any): Any = try action catch { case ex: Throwable => ex.toString }
+ println(s"=========members of ${tpe.typeSymbol.name} in a mirror of D=========")
+ println("field #1: " + failsafe(im.reflectField(tpe.member(newTermName("foo")).asTerm).get))
+ println("method #1: " + failsafe(im.reflectMethod(tpe.member(newTermName("bar")).asMethod)()))
+ println("field #2: " + failsafe(im.reflectField(tpe.member(newTermName("quux")).asTerm).get))
+ println("method #2: " + failsafe(im.reflectMethod(tpe.member(newTermName("baz")).asMethod)()))
+ println("constructor #1: " + failsafe(cm.reflectClass(im.symbol).reflectConstructor(tpe.member(newTermName("bar")).asMethod)()))
+ println("constructor #2: " + failsafe(cm.reflectClass(im.symbol).reflectConstructor(tpe.member(newTermName("<init>")).asMethod)()))
+ println("class: " + failsafe(im.reflectClass(tpe.member(newTypeName("C")).asClass).reflectConstructor(typeOf[C].member(newTypeName("C")).asClass.typeSignature.member(newTermName("<init>")).asMethod)()))
+ println("object: " + failsafe(im.reflectModule(tpe.member(newTermName("O")).asModule).instance))
+ println()
+ }
+
+ test(typeOf[C])
+ test(typeOf[D])
+ test(typeOf[E])
+}
\ No newline at end of file
diff --git a/test/files/run/reflection-sorted-decls.check b/test/files/run/reflection-sorted-decls.check
new file mode 100644
index 0000000..415e073
--- /dev/null
+++ b/test/files/run/reflection-sorted-decls.check
@@ -0,0 +1,3 @@
+value a
+value b
+value c
diff --git a/test/files/run/reflection-sorted-decls.scala b/test/files/run/reflection-sorted-decls.scala
new file mode 100644
index 0000000..5616e10
--- /dev/null
+++ b/test/files/run/reflection-sorted-decls.scala
@@ -0,0 +1,8 @@
+object Test {
+ def main(args: Array[String]) {
+ class Foo(val a: Int, val b: Int, val c: Int)
+ import scala.reflect.runtime.{currentMirror => cm}
+ val decls = cm.classSymbol(classOf[Foo]).typeSignature.declarations
+ decls.sorted.toList.filter(!_.isMethod) foreach System.out.println
+ }
+}
diff --git a/test/files/run/reflection-sorted-members.check b/test/files/run/reflection-sorted-members.check
new file mode 100644
index 0000000..c148e19
--- /dev/null
+++ b/test/files/run/reflection-sorted-members.check
@@ -0,0 +1,4 @@
+value a
+value b
+value c
+value x
diff --git a/test/files/run/reflection-sorted-members.scala b/test/files/run/reflection-sorted-members.scala
new file mode 100644
index 0000000..a837923
--- /dev/null
+++ b/test/files/run/reflection-sorted-members.scala
@@ -0,0 +1,11 @@
+object Test {
+ def main(args: Array[String]) {
+ trait T1 { def a: Int; def c: Int }
+ trait T2 { def a: Int; def b: Int }
+ class Bar(val x: Int)
+ class Foo(val a: Int, val b: Int, val c: Int) extends Bar(a + b + c) with T1 with T2
+ import scala.reflect.runtime.{currentMirror => cm}
+ val members = cm.classSymbol(classOf[Foo]).typeSignature.members
+ members.sorted.toList.filter(!_.isMethod) foreach System.out.println
+ }
+}
diff --git a/test/files/run/reflection-valueclasses-derived.check b/test/files/run/reflection-valueclasses-derived.check
new file mode 100644
index 0000000..3382d41
--- /dev/null
+++ b/test/files/run/reflection-valueclasses-derived.check
@@ -0,0 +1,3 @@
+4
+class C
+C at 2
diff --git a/test/files/run/reflection-valueclasses-derived.scala b/test/files/run/reflection-valueclasses-derived.scala
new file mode 100644
index 0000000..6b08f98
--- /dev/null
+++ b/test/files/run/reflection-valueclasses-derived.scala
@@ -0,0 +1,12 @@
+import scala.reflect.runtime.universe._
+import scala.reflect.runtime.{currentMirror => cm}
+
+class C(val x: Int) extends AnyVal {
+ def foo(y: Int) = x + y
+}
+
+object Test extends App {
+ println(cm.reflect(new C(2)).reflectMethod(typeOf[C].member(newTermName("foo")).asMethod)(2))
+ println(cm.reflect(new C(2)).reflectMethod(typeOf[C].member(newTermName("getClass")).asMethod)())
+ println(cm.reflect(new C(2)).reflectMethod(typeOf[C].member(newTermName("toString")).asMethod)())
+}
\ No newline at end of file
diff --git a/test/files/run/reflection-valueclasses-magic.check b/test/files/run/reflection-valueclasses-magic.check
new file mode 100644
index 0000000..8ecad3e
--- /dev/null
+++ b/test/files/run/reflection-valueclasses-magic.check
@@ -0,0 +1,1456 @@
+============
+Byte
+it's important to print the list of Byte's members
+if some of them change (possibly, adding and/or removing magic symbols), we must update this test
+constructor Byte: ()Byte
+method !=: (x$1: Any)Boolean
+method !=: (x: Byte)Boolean
+method !=: (x: Char)Boolean
+method !=: (x: Double)Boolean
+method !=: (x: Float)Boolean
+method !=: (x: Int)Boolean
+method !=: (x: Long)Boolean
+method !=: (x: Short)Boolean
+method ##: ()Int
+method %: (x: Byte)Int
+method %: (x: Char)Int
+method %: (x: Double)Double
+method %: (x: Float)Float
+method %: (x: Int)Int
+method %: (x: Long)Long
+method %: (x: Short)Int
+method &: (x: Byte)Int
+method &: (x: Char)Int
+method &: (x: Int)Int
+method &: (x: Long)Long
+method &: (x: Short)Int
+method *: (x: Byte)Int
+method *: (x: Char)Int
+method *: (x: Double)Double
+method *: (x: Float)Float
+method *: (x: Int)Int
+method *: (x: Long)Long
+method *: (x: Short)Int
+method +: (x: Byte)Int
+method +: (x: Char)Int
+method +: (x: Double)Double
+method +: (x: Float)Float
+method +: (x: Int)Int
+method +: (x: Long)Long
+method +: (x: Short)Int
+method +: (x: String)String
+method -: (x: Byte)Int
+method -: (x: Char)Int
+method -: (x: Double)Double
+method -: (x: Float)Float
+method -: (x: Int)Int
+method -: (x: Long)Long
+method -: (x: Short)Int
+method /: (x: Byte)Int
+method /: (x: Char)Int
+method /: (x: Double)Double
+method /: (x: Float)Float
+method /: (x: Int)Int
+method /: (x: Long)Long
+method /: (x: Short)Int
+method <: (x: Byte)Boolean
+method <: (x: Char)Boolean
+method <: (x: Double)Boolean
+method <: (x: Float)Boolean
+method <: (x: Int)Boolean
+method <: (x: Long)Boolean
+method <: (x: Short)Boolean
+method <<: (x: Int)Int
+method <<: (x: Long)Int
+method <=: (x: Byte)Boolean
+method <=: (x: Char)Boolean
+method <=: (x: Double)Boolean
+method <=: (x: Float)Boolean
+method <=: (x: Int)Boolean
+method <=: (x: Long)Boolean
+method <=: (x: Short)Boolean
+method ==: (x$1: Any)Boolean
+method ==: (x: Byte)Boolean
+method ==: (x: Char)Boolean
+method ==: (x: Double)Boolean
+method ==: (x: Float)Boolean
+method ==: (x: Int)Boolean
+method ==: (x: Long)Boolean
+method ==: (x: Short)Boolean
+method >: (x: Byte)Boolean
+method >: (x: Char)Boolean
+method >: (x: Double)Boolean
+method >: (x: Float)Boolean
+method >: (x: Int)Boolean
+method >: (x: Long)Boolean
+method >: (x: Short)Boolean
+method >=: (x: Byte)Boolean
+method >=: (x: Char)Boolean
+method >=: (x: Double)Boolean
+method >=: (x: Float)Boolean
+method >=: (x: Int)Boolean
+method >=: (x: Long)Boolean
+method >=: (x: Short)Boolean
+method >>: (x: Int)Int
+method >>: (x: Long)Int
+method >>>: (x: Int)Int
+method >>>: (x: Long)Int
+method ^: (x: Byte)Int
+method ^: (x: Char)Int
+method ^: (x: Int)Int
+method ^: (x: Long)Long
+method ^: (x: Short)Int
+method asInstanceOf: [T0]=> T0
+method equals: (x$1: Any)Boolean
+method getClass: ()Class[Byte]
+method hashCode: ()Int
+method isInstanceOf: [T0]=> Boolean
+method toByte: => Byte
+method toChar: => Char
+method toDouble: => Double
+method toFloat: => Float
+method toInt: => Int
+method toLong: => Long
+method toShort: => Short
+method toString: ()java.lang.String
+method unary_+: => Int
+method unary_-: => Int
+method unary_~: => Int
+method |: (x: Byte)Int
+method |: (x: Char)Int
+method |: (x: Int)Int
+method |: (x: Long)Long
+method |: (x: Short)Int
+testing Byte.toByte() with receiver = 2 and args = List(): [class java.lang.Byte] =======> 2
+testing Byte.toShort() with receiver = 2 and args = List(): [class java.lang.Short] =======> 2
+testing Byte.toChar() with receiver = 2 and args = List(): [class java.lang.Character] =======>
+testing Byte.toInt() with receiver = 2 and args = List(): [class java.lang.Integer] =======> 2
+testing Byte.toLong() with receiver = 2 and args = List(): [class java.lang.Long] =======> 2
+testing Byte.toFloat() with receiver = 2 and args = List(): [class java.lang.Float] =======> 2.0
+testing Byte.toDouble() with receiver = 2 and args = List(): [class java.lang.Double] =======> 2.0
+testing Byte.==(Byte) with receiver = 2 and args = List(2 class java.lang.Byte): [class java.lang.Boolean] =======> true
+testing Byte.==(Short) with receiver = 2 and args = List(2 class java.lang.Short): [class java.lang.Boolean] =======> false
+testing Byte.==(Char) with receiver = 2 and args = List( class java.lang.Character): [class java.lang.Boolean] =======> false
+testing Byte.==(Int) with receiver = 2 and args = List(2 class java.lang.Integer): [class java.lang.Boolean] =======> false
+testing Byte.==(Long) with receiver = 2 and args = List(2 class java.lang.Long): [class java.lang.Boolean] =======> false
+testing Byte.==(Float) with receiver = 2 and args = List(2.0 class java.lang.Float): [class java.lang.Boolean] =======> false
+testing Byte.==(Double) with receiver = 2 and args = List(2.0 class java.lang.Double): [class java.lang.Boolean] =======> false
+testing Byte.!=(Byte) with receiver = 2 and args = List(2 class java.lang.Byte): [class java.lang.Boolean] =======> false
+testing Byte.!=(Short) with receiver = 2 and args = List(2 class java.lang.Short): [class java.lang.Boolean] =======> true
+testing Byte.!=(Char) with receiver = 2 and args = List( class java.lang.Character): [class java.lang.Boolean] =======> true
+testing Byte.!=(Int) with receiver = 2 and args = List(2 class java.lang.Integer): [class java.lang.Boolean] =======> true
+testing Byte.!=(Long) with receiver = 2 and args = List(2 class java.lang.Long): [class java.lang.Boolean] =======> true
+testing Byte.!=(Float) with receiver = 2 and args = List(2.0 class java.lang.Float): [class java.lang.Boolean] =======> true
+testing Byte.!=(Double) with receiver = 2 and args = List(2.0 class java.lang.Double): [class java.lang.Boolean] =======> true
+testing Byte.<(Byte) with receiver = 2 and args = List(2 class java.lang.Byte): [class java.lang.Boolean] =======> false
+testing Byte.<(Short) with receiver = 2 and args = List(2 class java.lang.Short): [class java.lang.Boolean] =======> false
+testing Byte.<(Char) with receiver = 2 and args = List( class java.lang.Character): [class java.lang.Boolean] =======> false
+testing Byte.<(Int) with receiver = 2 and args = List(2 class java.lang.Integer): [class java.lang.Boolean] =======> false
+testing Byte.<(Long) with receiver = 2 and args = List(2 class java.lang.Long): [class java.lang.Boolean] =======> false
+testing Byte.<(Float) with receiver = 2 and args = List(2.0 class java.lang.Float): [class java.lang.Boolean] =======> false
+testing Byte.<(Double) with receiver = 2 and args = List(2.0 class java.lang.Double): [class java.lang.Boolean] =======> false
+testing Byte.<=(Byte) with receiver = 2 and args = List(2 class java.lang.Byte): [class java.lang.Boolean] =======> true
+testing Byte.<=(Short) with receiver = 2 and args = List(2 class java.lang.Short): [class java.lang.Boolean] =======> true
+testing Byte.<=(Char) with receiver = 2 and args = List( class java.lang.Character): [class java.lang.Boolean] =======> true
+testing Byte.<=(Int) with receiver = 2 and args = List(2 class java.lang.Integer): [class java.lang.Boolean] =======> true
+testing Byte.<=(Long) with receiver = 2 and args = List(2 class java.lang.Long): [class java.lang.Boolean] =======> true
+testing Byte.<=(Float) with receiver = 2 and args = List(2.0 class java.lang.Float): [class java.lang.Boolean] =======> true
+testing Byte.<=(Double) with receiver = 2 and args = List(2.0 class java.lang.Double): [class java.lang.Boolean] =======> true
+testing Byte.>(Byte) with receiver = 2 and args = List(2 class java.lang.Byte): [class java.lang.Boolean] =======> false
+testing Byte.>(Short) with receiver = 2 and args = List(2 class java.lang.Short): [class java.lang.Boolean] =======> false
+testing Byte.>(Char) with receiver = 2 and args = List( class java.lang.Character): [class java.lang.Boolean] =======> false
+testing Byte.>(Int) with receiver = 2 and args = List(2 class java.lang.Integer): [class java.lang.Boolean] =======> false
+testing Byte.>(Long) with receiver = 2 and args = List(2 class java.lang.Long): [class java.lang.Boolean] =======> false
+testing Byte.>(Float) with receiver = 2 and args = List(2.0 class java.lang.Float): [class java.lang.Boolean] =======> false
+testing Byte.>(Double) with receiver = 2 and args = List(2.0 class java.lang.Double): [class java.lang.Boolean] =======> false
+testing Byte.>=(Byte) with receiver = 2 and args = List(2 class java.lang.Byte): [class java.lang.Boolean] =======> true
+testing Byte.>=(Short) with receiver = 2 and args = List(2 class java.lang.Short): [class java.lang.Boolean] =======> true
+testing Byte.>=(Char) with receiver = 2 and args = List( class java.lang.Character): [class java.lang.Boolean] =======> true
+testing Byte.>=(Int) with receiver = 2 and args = List(2 class java.lang.Integer): [class java.lang.Boolean] =======> true
+testing Byte.>=(Long) with receiver = 2 and args = List(2 class java.lang.Long): [class java.lang.Boolean] =======> true
+testing Byte.>=(Float) with receiver = 2 and args = List(2.0 class java.lang.Float): [class java.lang.Boolean] =======> true
+testing Byte.>=(Double) with receiver = 2 and args = List(2.0 class java.lang.Double): [class java.lang.Boolean] =======> true
+testing Byte.+(String) with receiver = 2 and args = List(2 class java.lang.String): [class java.lang.String] =======> 22
+testing Byte.+(Byte) with receiver = 2 and args = List(2 class java.lang.Byte): [class java.lang.Integer] =======> 4
+testing Byte.+(Short) with receiver = 2 and args = List(2 class java.lang.Short): [class java.lang.Integer] =======> 4
+testing Byte.+(Char) with receiver = 2 and args = List( class java.lang.Character): [class java.lang.Integer] =======> 4
+testing Byte.+(Int) with receiver = 2 and args = List(2 class java.lang.Integer): [class java.lang.Integer] =======> 4
+testing Byte.+(Long) with receiver = 2 and args = List(2 class java.lang.Long): [class java.lang.Long] =======> 4
+testing Byte.+(Float) with receiver = 2 and args = List(2.0 class java.lang.Float): [class java.lang.Float] =======> 4.0
+testing Byte.+(Double) with receiver = 2 and args = List(2.0 class java.lang.Double): [class java.lang.Double] =======> 4.0
+testing Byte.-(Byte) with receiver = 2 and args = List(2 class java.lang.Byte): [class java.lang.Integer] =======> 0
+testing Byte.-(Short) with receiver = 2 and args = List(2 class java.lang.Short): [class java.lang.Integer] =======> 0
+testing Byte.-(Char) with receiver = 2 and args = List( class java.lang.Character): [class java.lang.Integer] =======> 0
+testing Byte.-(Int) with receiver = 2 and args = List(2 class java.lang.Integer): [class java.lang.Integer] =======> 0
+testing Byte.-(Long) with receiver = 2 and args = List(2 class java.lang.Long): [class java.lang.Long] =======> 0
+testing Byte.-(Float) with receiver = 2 and args = List(2.0 class java.lang.Float): [class java.lang.Float] =======> 0.0
+testing Byte.-(Double) with receiver = 2 and args = List(2.0 class java.lang.Double): [class java.lang.Double] =======> 0.0
+testing Byte.*(Byte) with receiver = 2 and args = List(2 class java.lang.Byte): [class java.lang.Integer] =======> 4
+testing Byte.*(Short) with receiver = 2 and args = List(2 class java.lang.Short): [class java.lang.Integer] =======> 4
+testing Byte.*(Char) with receiver = 2 and args = List( class java.lang.Character): [class java.lang.Integer] =======> 4
+testing Byte.*(Int) with receiver = 2 and args = List(2 class java.lang.Integer): [class java.lang.Integer] =======> 4
+testing Byte.*(Long) with receiver = 2 and args = List(2 class java.lang.Long): [class java.lang.Long] =======> 4
+testing Byte.*(Float) with receiver = 2 and args = List(2.0 class java.lang.Float): [class java.lang.Float] =======> 4.0
+testing Byte.*(Double) with receiver = 2 and args = List(2.0 class java.lang.Double): [class java.lang.Double] =======> 4.0
+testing Byte./(Byte) with receiver = 2 and args = List(2 class java.lang.Byte): [class java.lang.Integer] =======> 1
+testing Byte./(Short) with receiver = 2 and args = List(2 class java.lang.Short): [class java.lang.Integer] =======> 1
+testing Byte./(Char) with receiver = 2 and args = List( class java.lang.Character): [class java.lang.Integer] =======> 1
+testing Byte./(Int) with receiver = 2 and args = List(2 class java.lang.Integer): [class java.lang.Integer] =======> 1
+testing Byte./(Long) with receiver = 2 and args = List(2 class java.lang.Long): [class java.lang.Long] =======> 1
+testing Byte./(Float) with receiver = 2 and args = List(2.0 class java.lang.Float): [class java.lang.Float] =======> 1.0
+testing Byte./(Double) with receiver = 2 and args = List(2.0 class java.lang.Double): [class java.lang.Double] =======> 1.0
+testing Byte.%(Byte) with receiver = 2 and args = List(2 class java.lang.Byte): [class java.lang.Integer] =======> 0
+testing Byte.%(Short) with receiver = 2 and args = List(2 class java.lang.Short): [class java.lang.Integer] =======> 0
+testing Byte.%(Char) with receiver = 2 and args = List( class java.lang.Character): [class java.lang.Integer] =======> 0
+testing Byte.%(Int) with receiver = 2 and args = List(2 class java.lang.Integer): [class java.lang.Integer] =======> 0
+testing Byte.%(Long) with receiver = 2 and args = List(2 class java.lang.Long): [class java.lang.Long] =======> 0
+testing Byte.%(Float) with receiver = 2 and args = List(2.0 class java.lang.Float): [class java.lang.Float] =======> 0.0
+testing Byte.%(Double) with receiver = 2 and args = List(2.0 class java.lang.Double): [class java.lang.Double] =======> 0.0
+============
+Short
+it's important to print the list of Byte's members
+if some of them change (possibly, adding and/or removing magic symbols), we must update this test
+constructor Short: ()Short
+method !=: (x$1: Any)Boolean
+method !=: (x: Byte)Boolean
+method !=: (x: Char)Boolean
+method !=: (x: Double)Boolean
+method !=: (x: Float)Boolean
+method !=: (x: Int)Boolean
+method !=: (x: Long)Boolean
+method !=: (x: Short)Boolean
+method ##: ()Int
+method %: (x: Byte)Int
+method %: (x: Char)Int
+method %: (x: Double)Double
+method %: (x: Float)Float
+method %: (x: Int)Int
+method %: (x: Long)Long
+method %: (x: Short)Int
+method &: (x: Byte)Int
+method &: (x: Char)Int
+method &: (x: Int)Int
+method &: (x: Long)Long
+method &: (x: Short)Int
+method *: (x: Byte)Int
+method *: (x: Char)Int
+method *: (x: Double)Double
+method *: (x: Float)Float
+method *: (x: Int)Int
+method *: (x: Long)Long
+method *: (x: Short)Int
+method +: (x: Byte)Int
+method +: (x: Char)Int
+method +: (x: Double)Double
+method +: (x: Float)Float
+method +: (x: Int)Int
+method +: (x: Long)Long
+method +: (x: Short)Int
+method +: (x: String)String
+method -: (x: Byte)Int
+method -: (x: Char)Int
+method -: (x: Double)Double
+method -: (x: Float)Float
+method -: (x: Int)Int
+method -: (x: Long)Long
+method -: (x: Short)Int
+method /: (x: Byte)Int
+method /: (x: Char)Int
+method /: (x: Double)Double
+method /: (x: Float)Float
+method /: (x: Int)Int
+method /: (x: Long)Long
+method /: (x: Short)Int
+method <: (x: Byte)Boolean
+method <: (x: Char)Boolean
+method <: (x: Double)Boolean
+method <: (x: Float)Boolean
+method <: (x: Int)Boolean
+method <: (x: Long)Boolean
+method <: (x: Short)Boolean
+method <<: (x: Int)Int
+method <<: (x: Long)Int
+method <=: (x: Byte)Boolean
+method <=: (x: Char)Boolean
+method <=: (x: Double)Boolean
+method <=: (x: Float)Boolean
+method <=: (x: Int)Boolean
+method <=: (x: Long)Boolean
+method <=: (x: Short)Boolean
+method ==: (x$1: Any)Boolean
+method ==: (x: Byte)Boolean
+method ==: (x: Char)Boolean
+method ==: (x: Double)Boolean
+method ==: (x: Float)Boolean
+method ==: (x: Int)Boolean
+method ==: (x: Long)Boolean
+method ==: (x: Short)Boolean
+method >: (x: Byte)Boolean
+method >: (x: Char)Boolean
+method >: (x: Double)Boolean
+method >: (x: Float)Boolean
+method >: (x: Int)Boolean
+method >: (x: Long)Boolean
+method >: (x: Short)Boolean
+method >=: (x: Byte)Boolean
+method >=: (x: Char)Boolean
+method >=: (x: Double)Boolean
+method >=: (x: Float)Boolean
+method >=: (x: Int)Boolean
+method >=: (x: Long)Boolean
+method >=: (x: Short)Boolean
+method >>: (x: Int)Int
+method >>: (x: Long)Int
+method >>>: (x: Int)Int
+method >>>: (x: Long)Int
+method ^: (x: Byte)Int
+method ^: (x: Char)Int
+method ^: (x: Int)Int
+method ^: (x: Long)Long
+method ^: (x: Short)Int
+method asInstanceOf: [T0]=> T0
+method equals: (x$1: Any)Boolean
+method getClass: ()Class[Short]
+method hashCode: ()Int
+method isInstanceOf: [T0]=> Boolean
+method toByte: => Byte
+method toChar: => Char
+method toDouble: => Double
+method toFloat: => Float
+method toInt: => Int
+method toLong: => Long
+method toShort: => Short
+method toString: ()java.lang.String
+method unary_+: => Int
+method unary_-: => Int
+method unary_~: => Int
+method |: (x: Byte)Int
+method |: (x: Char)Int
+method |: (x: Int)Int
+method |: (x: Long)Long
+method |: (x: Short)Int
+testing Short.toByte() with receiver = 2 and args = List(): [class java.lang.Byte] =======> 2
+testing Short.toShort() with receiver = 2 and args = List(): [class java.lang.Short] =======> 2
+testing Short.toChar() with receiver = 2 and args = List(): [class java.lang.Character] =======>
+testing Short.toInt() with receiver = 2 and args = List(): [class java.lang.Integer] =======> 2
+testing Short.toLong() with receiver = 2 and args = List(): [class java.lang.Long] =======> 2
+testing Short.toFloat() with receiver = 2 and args = List(): [class java.lang.Float] =======> 2.0
+testing Short.toDouble() with receiver = 2 and args = List(): [class java.lang.Double] =======> 2.0
+testing Short.==(Byte) with receiver = 2 and args = List(2 class java.lang.Byte): [class java.lang.Boolean] =======> false
+testing Short.==(Short) with receiver = 2 and args = List(2 class java.lang.Short): [class java.lang.Boolean] =======> true
+testing Short.==(Char) with receiver = 2 and args = List( class java.lang.Character): [class java.lang.Boolean] =======> false
+testing Short.==(Int) with receiver = 2 and args = List(2 class java.lang.Integer): [class java.lang.Boolean] =======> false
+testing Short.==(Long) with receiver = 2 and args = List(2 class java.lang.Long): [class java.lang.Boolean] =======> false
+testing Short.==(Float) with receiver = 2 and args = List(2.0 class java.lang.Float): [class java.lang.Boolean] =======> false
+testing Short.==(Double) with receiver = 2 and args = List(2.0 class java.lang.Double): [class java.lang.Boolean] =======> false
+testing Short.!=(Byte) with receiver = 2 and args = List(2 class java.lang.Byte): [class java.lang.Boolean] =======> true
+testing Short.!=(Short) with receiver = 2 and args = List(2 class java.lang.Short): [class java.lang.Boolean] =======> false
+testing Short.!=(Char) with receiver = 2 and args = List( class java.lang.Character): [class java.lang.Boolean] =======> true
+testing Short.!=(Int) with receiver = 2 and args = List(2 class java.lang.Integer): [class java.lang.Boolean] =======> true
+testing Short.!=(Long) with receiver = 2 and args = List(2 class java.lang.Long): [class java.lang.Boolean] =======> true
+testing Short.!=(Float) with receiver = 2 and args = List(2.0 class java.lang.Float): [class java.lang.Boolean] =======> true
+testing Short.!=(Double) with receiver = 2 and args = List(2.0 class java.lang.Double): [class java.lang.Boolean] =======> true
+testing Short.<(Byte) with receiver = 2 and args = List(2 class java.lang.Byte): [class java.lang.Boolean] =======> false
+testing Short.<(Short) with receiver = 2 and args = List(2 class java.lang.Short): [class java.lang.Boolean] =======> false
+testing Short.<(Char) with receiver = 2 and args = List( class java.lang.Character): [class java.lang.Boolean] =======> false
+testing Short.<(Int) with receiver = 2 and args = List(2 class java.lang.Integer): [class java.lang.Boolean] =======> false
+testing Short.<(Long) with receiver = 2 and args = List(2 class java.lang.Long): [class java.lang.Boolean] =======> false
+testing Short.<(Float) with receiver = 2 and args = List(2.0 class java.lang.Float): [class java.lang.Boolean] =======> false
+testing Short.<(Double) with receiver = 2 and args = List(2.0 class java.lang.Double): [class java.lang.Boolean] =======> false
+testing Short.<=(Byte) with receiver = 2 and args = List(2 class java.lang.Byte): [class java.lang.Boolean] =======> true
+testing Short.<=(Short) with receiver = 2 and args = List(2 class java.lang.Short): [class java.lang.Boolean] =======> true
+testing Short.<=(Char) with receiver = 2 and args = List( class java.lang.Character): [class java.lang.Boolean] =======> true
+testing Short.<=(Int) with receiver = 2 and args = List(2 class java.lang.Integer): [class java.lang.Boolean] =======> true
+testing Short.<=(Long) with receiver = 2 and args = List(2 class java.lang.Long): [class java.lang.Boolean] =======> true
+testing Short.<=(Float) with receiver = 2 and args = List(2.0 class java.lang.Float): [class java.lang.Boolean] =======> true
+testing Short.<=(Double) with receiver = 2 and args = List(2.0 class java.lang.Double): [class java.lang.Boolean] =======> true
+testing Short.>(Byte) with receiver = 2 and args = List(2 class java.lang.Byte): [class java.lang.Boolean] =======> false
+testing Short.>(Short) with receiver = 2 and args = List(2 class java.lang.Short): [class java.lang.Boolean] =======> false
+testing Short.>(Char) with receiver = 2 and args = List( class java.lang.Character): [class java.lang.Boolean] =======> false
+testing Short.>(Int) with receiver = 2 and args = List(2 class java.lang.Integer): [class java.lang.Boolean] =======> false
+testing Short.>(Long) with receiver = 2 and args = List(2 class java.lang.Long): [class java.lang.Boolean] =======> false
+testing Short.>(Float) with receiver = 2 and args = List(2.0 class java.lang.Float): [class java.lang.Boolean] =======> false
+testing Short.>(Double) with receiver = 2 and args = List(2.0 class java.lang.Double): [class java.lang.Boolean] =======> false
+testing Short.>=(Byte) with receiver = 2 and args = List(2 class java.lang.Byte): [class java.lang.Boolean] =======> true
+testing Short.>=(Short) with receiver = 2 and args = List(2 class java.lang.Short): [class java.lang.Boolean] =======> true
+testing Short.>=(Char) with receiver = 2 and args = List( class java.lang.Character): [class java.lang.Boolean] =======> true
+testing Short.>=(Int) with receiver = 2 and args = List(2 class java.lang.Integer): [class java.lang.Boolean] =======> true
+testing Short.>=(Long) with receiver = 2 and args = List(2 class java.lang.Long): [class java.lang.Boolean] =======> true
+testing Short.>=(Float) with receiver = 2 and args = List(2.0 class java.lang.Float): [class java.lang.Boolean] =======> true
+testing Short.>=(Double) with receiver = 2 and args = List(2.0 class java.lang.Double): [class java.lang.Boolean] =======> true
+testing Short.+(String) with receiver = 2 and args = List(2 class java.lang.String): [class java.lang.String] =======> 22
+testing Short.+(Byte) with receiver = 2 and args = List(2 class java.lang.Byte): [class java.lang.Integer] =======> 4
+testing Short.+(Short) with receiver = 2 and args = List(2 class java.lang.Short): [class java.lang.Integer] =======> 4
+testing Short.+(Char) with receiver = 2 and args = List( class java.lang.Character): [class java.lang.Integer] =======> 4
+testing Short.+(Int) with receiver = 2 and args = List(2 class java.lang.Integer): [class java.lang.Integer] =======> 4
+testing Short.+(Long) with receiver = 2 and args = List(2 class java.lang.Long): [class java.lang.Long] =======> 4
+testing Short.+(Float) with receiver = 2 and args = List(2.0 class java.lang.Float): [class java.lang.Float] =======> 4.0
+testing Short.+(Double) with receiver = 2 and args = List(2.0 class java.lang.Double): [class java.lang.Double] =======> 4.0
+testing Short.-(Byte) with receiver = 2 and args = List(2 class java.lang.Byte): [class java.lang.Integer] =======> 0
+testing Short.-(Short) with receiver = 2 and args = List(2 class java.lang.Short): [class java.lang.Integer] =======> 0
+testing Short.-(Char) with receiver = 2 and args = List( class java.lang.Character): [class java.lang.Integer] =======> 0
+testing Short.-(Int) with receiver = 2 and args = List(2 class java.lang.Integer): [class java.lang.Integer] =======> 0
+testing Short.-(Long) with receiver = 2 and args = List(2 class java.lang.Long): [class java.lang.Long] =======> 0
+testing Short.-(Float) with receiver = 2 and args = List(2.0 class java.lang.Float): [class java.lang.Float] =======> 0.0
+testing Short.-(Double) with receiver = 2 and args = List(2.0 class java.lang.Double): [class java.lang.Double] =======> 0.0
+testing Short.*(Byte) with receiver = 2 and args = List(2 class java.lang.Byte): [class java.lang.Integer] =======> 4
+testing Short.*(Short) with receiver = 2 and args = List(2 class java.lang.Short): [class java.lang.Integer] =======> 4
+testing Short.*(Char) with receiver = 2 and args = List( class java.lang.Character): [class java.lang.Integer] =======> 4
+testing Short.*(Int) with receiver = 2 and args = List(2 class java.lang.Integer): [class java.lang.Integer] =======> 4
+testing Short.*(Long) with receiver = 2 and args = List(2 class java.lang.Long): [class java.lang.Long] =======> 4
+testing Short.*(Float) with receiver = 2 and args = List(2.0 class java.lang.Float): [class java.lang.Float] =======> 4.0
+testing Short.*(Double) with receiver = 2 and args = List(2.0 class java.lang.Double): [class java.lang.Double] =======> 4.0
+testing Short./(Byte) with receiver = 2 and args = List(2 class java.lang.Byte): [class java.lang.Integer] =======> 1
+testing Short./(Short) with receiver = 2 and args = List(2 class java.lang.Short): [class java.lang.Integer] =======> 1
+testing Short./(Char) with receiver = 2 and args = List( class java.lang.Character): [class java.lang.Integer] =======> 1
+testing Short./(Int) with receiver = 2 and args = List(2 class java.lang.Integer): [class java.lang.Integer] =======> 1
+testing Short./(Long) with receiver = 2 and args = List(2 class java.lang.Long): [class java.lang.Long] =======> 1
+testing Short./(Float) with receiver = 2 and args = List(2.0 class java.lang.Float): [class java.lang.Float] =======> 1.0
+testing Short./(Double) with receiver = 2 and args = List(2.0 class java.lang.Double): [class java.lang.Double] =======> 1.0
+testing Short.%(Byte) with receiver = 2 and args = List(2 class java.lang.Byte): [class java.lang.Integer] =======> 0
+testing Short.%(Short) with receiver = 2 and args = List(2 class java.lang.Short): [class java.lang.Integer] =======> 0
+testing Short.%(Char) with receiver = 2 and args = List( class java.lang.Character): [class java.lang.Integer] =======> 0
+testing Short.%(Int) with receiver = 2 and args = List(2 class java.lang.Integer): [class java.lang.Integer] =======> 0
+testing Short.%(Long) with receiver = 2 and args = List(2 class java.lang.Long): [class java.lang.Long] =======> 0
+testing Short.%(Float) with receiver = 2 and args = List(2.0 class java.lang.Float): [class java.lang.Float] =======> 0.0
+testing Short.%(Double) with receiver = 2 and args = List(2.0 class java.lang.Double): [class java.lang.Double] =======> 0.0
+============
+Char
+it's important to print the list of Byte's members
+if some of them change (possibly, adding and/or removing magic symbols), we must update this test
+constructor Char: ()Char
+method !=: (x$1: Any)Boolean
+method !=: (x: Byte)Boolean
+method !=: (x: Char)Boolean
+method !=: (x: Double)Boolean
+method !=: (x: Float)Boolean
+method !=: (x: Int)Boolean
+method !=: (x: Long)Boolean
+method !=: (x: Short)Boolean
+method ##: ()Int
+method %: (x: Byte)Int
+method %: (x: Char)Int
+method %: (x: Double)Double
+method %: (x: Float)Float
+method %: (x: Int)Int
+method %: (x: Long)Long
+method %: (x: Short)Int
+method &: (x: Byte)Int
+method &: (x: Char)Int
+method &: (x: Int)Int
+method &: (x: Long)Long
+method &: (x: Short)Int
+method *: (x: Byte)Int
+method *: (x: Char)Int
+method *: (x: Double)Double
+method *: (x: Float)Float
+method *: (x: Int)Int
+method *: (x: Long)Long
+method *: (x: Short)Int
+method +: (x: Byte)Int
+method +: (x: Char)Int
+method +: (x: Double)Double
+method +: (x: Float)Float
+method +: (x: Int)Int
+method +: (x: Long)Long
+method +: (x: Short)Int
+method +: (x: String)String
+method -: (x: Byte)Int
+method -: (x: Char)Int
+method -: (x: Double)Double
+method -: (x: Float)Float
+method -: (x: Int)Int
+method -: (x: Long)Long
+method -: (x: Short)Int
+method /: (x: Byte)Int
+method /: (x: Char)Int
+method /: (x: Double)Double
+method /: (x: Float)Float
+method /: (x: Int)Int
+method /: (x: Long)Long
+method /: (x: Short)Int
+method <: (x: Byte)Boolean
+method <: (x: Char)Boolean
+method <: (x: Double)Boolean
+method <: (x: Float)Boolean
+method <: (x: Int)Boolean
+method <: (x: Long)Boolean
+method <: (x: Short)Boolean
+method <<: (x: Int)Int
+method <<: (x: Long)Int
+method <=: (x: Byte)Boolean
+method <=: (x: Char)Boolean
+method <=: (x: Double)Boolean
+method <=: (x: Float)Boolean
+method <=: (x: Int)Boolean
+method <=: (x: Long)Boolean
+method <=: (x: Short)Boolean
+method ==: (x$1: Any)Boolean
+method ==: (x: Byte)Boolean
+method ==: (x: Char)Boolean
+method ==: (x: Double)Boolean
+method ==: (x: Float)Boolean
+method ==: (x: Int)Boolean
+method ==: (x: Long)Boolean
+method ==: (x: Short)Boolean
+method >: (x: Byte)Boolean
+method >: (x: Char)Boolean
+method >: (x: Double)Boolean
+method >: (x: Float)Boolean
+method >: (x: Int)Boolean
+method >: (x: Long)Boolean
+method >: (x: Short)Boolean
+method >=: (x: Byte)Boolean
+method >=: (x: Char)Boolean
+method >=: (x: Double)Boolean
+method >=: (x: Float)Boolean
+method >=: (x: Int)Boolean
+method >=: (x: Long)Boolean
+method >=: (x: Short)Boolean
+method >>: (x: Int)Int
+method >>: (x: Long)Int
+method >>>: (x: Int)Int
+method >>>: (x: Long)Int
+method ^: (x: Byte)Int
+method ^: (x: Char)Int
+method ^: (x: Int)Int
+method ^: (x: Long)Long
+method ^: (x: Short)Int
+method asInstanceOf: [T0]=> T0
+method equals: (x$1: Any)Boolean
+method getClass: ()Class[Char]
+method hashCode: ()Int
+method isInstanceOf: [T0]=> Boolean
+method toByte: => Byte
+method toChar: => Char
+method toDouble: => Double
+method toFloat: => Float
+method toInt: => Int
+method toLong: => Long
+method toShort: => Short
+method toString: ()java.lang.String
+method unary_+: => Int
+method unary_-: => Int
+method unary_~: => Int
+method |: (x: Byte)Int
+method |: (x: Char)Int
+method |: (x: Int)Int
+method |: (x: Long)Long
+method |: (x: Short)Int
+testing Char.toByte() with receiver = and args = List(): [class java.lang.Byte] =======> 2
+testing Char.toShort() with receiver = and args = List(): [class java.lang.Short] =======> 2
+testing Char.toChar() with receiver = and args = List(): [class java.lang.Character] =======>
+testing Char.toInt() with receiver = and args = List(): [class java.lang.Integer] =======> 2
+testing Char.toLong() with receiver = and args = List(): [class java.lang.Long] =======> 2
+testing Char.toFloat() with receiver = and args = List(): [class java.lang.Float] =======> 2.0
+testing Char.toDouble() with receiver = and args = List(): [class java.lang.Double] =======> 2.0
+testing Char.==(Byte) with receiver = and args = List(2 class java.lang.Byte): [class java.lang.Boolean] =======> false
+testing Char.==(Short) with receiver = and args = List(2 class java.lang.Short): [class java.lang.Boolean] =======> false
+testing Char.==(Char) with receiver = and args = List( class java.lang.Character): [class java.lang.Boolean] =======> true
+testing Char.==(Int) with receiver = and args = List(2 class java.lang.Integer): [class java.lang.Boolean] =======> false
+testing Char.==(Long) with receiver = and args = List(2 class java.lang.Long): [class java.lang.Boolean] =======> false
+testing Char.==(Float) with receiver = and args = List(2.0 class java.lang.Float): [class java.lang.Boolean] =======> false
+testing Char.==(Double) with receiver = and args = List(2.0 class java.lang.Double): [class java.lang.Boolean] =======> false
+testing Char.!=(Byte) with receiver = and args = List(2 class java.lang.Byte): [class java.lang.Boolean] =======> true
+testing Char.!=(Short) with receiver = and args = List(2 class java.lang.Short): [class java.lang.Boolean] =======> true
+testing Char.!=(Char) with receiver = and args = List( class java.lang.Character): [class java.lang.Boolean] =======> false
+testing Char.!=(Int) with receiver = and args = List(2 class java.lang.Integer): [class java.lang.Boolean] =======> true
+testing Char.!=(Long) with receiver = and args = List(2 class java.lang.Long): [class java.lang.Boolean] =======> true
+testing Char.!=(Float) with receiver = and args = List(2.0 class java.lang.Float): [class java.lang.Boolean] =======> true
+testing Char.!=(Double) with receiver = and args = List(2.0 class java.lang.Double): [class java.lang.Boolean] =======> true
+testing Char.<(Byte) with receiver = and args = List(2 class java.lang.Byte): [class java.lang.Boolean] =======> false
+testing Char.<(Short) with receiver = and args = List(2 class java.lang.Short): [class java.lang.Boolean] =======> false
+testing Char.<(Char) with receiver = and args = List( class java.lang.Character): [class java.lang.Boolean] =======> false
+testing Char.<(Int) with receiver = and args = List(2 class java.lang.Integer): [class java.lang.Boolean] =======> false
+testing Char.<(Long) with receiver = and args = List(2 class java.lang.Long): [class java.lang.Boolean] =======> false
+testing Char.<(Float) with receiver = and args = List(2.0 class java.lang.Float): [class java.lang.Boolean] =======> false
+testing Char.<(Double) with receiver = and args = List(2.0 class java.lang.Double): [class java.lang.Boolean] =======> false
+testing Char.<=(Byte) with receiver = and args = List(2 class java.lang.Byte): [class java.lang.Boolean] =======> true
+testing Char.<=(Short) with receiver = and args = List(2 class java.lang.Short): [class java.lang.Boolean] =======> true
+testing Char.<=(Char) with receiver = and args = List( class java.lang.Character): [class java.lang.Boolean] =======> true
+testing Char.<=(Int) with receiver = and args = List(2 class java.lang.Integer): [class java.lang.Boolean] =======> true
+testing Char.<=(Long) with receiver = and args = List(2 class java.lang.Long): [class java.lang.Boolean] =======> true
+testing Char.<=(Float) with receiver = and args = List(2.0 class java.lang.Float): [class java.lang.Boolean] =======> true
+testing Char.<=(Double) with receiver = and args = List(2.0 class java.lang.Double): [class java.lang.Boolean] =======> true
+testing Char.>(Byte) with receiver = and args = List(2 class java.lang.Byte): [class java.lang.Boolean] =======> false
+testing Char.>(Short) with receiver = and args = List(2 class java.lang.Short): [class java.lang.Boolean] =======> false
+testing Char.>(Char) with receiver = and args = List( class java.lang.Character): [class java.lang.Boolean] =======> false
+testing Char.>(Int) with receiver = and args = List(2 class java.lang.Integer): [class java.lang.Boolean] =======> false
+testing Char.>(Long) with receiver = and args = List(2 class java.lang.Long): [class java.lang.Boolean] =======> false
+testing Char.>(Float) with receiver = and args = List(2.0 class java.lang.Float): [class java.lang.Boolean] =======> false
+testing Char.>(Double) with receiver = and args = List(2.0 class java.lang.Double): [class java.lang.Boolean] =======> false
+testing Char.>=(Byte) with receiver = and args = List(2 class java.lang.Byte): [class java.lang.Boolean] =======> true
+testing Char.>=(Short) with receiver = and args = List(2 class java.lang.Short): [class java.lang.Boolean] =======> true
+testing Char.>=(Char) with receiver = and args = List( class java.lang.Character): [class java.lang.Boolean] =======> true
+testing Char.>=(Int) with receiver = and args = List(2 class java.lang.Integer): [class java.lang.Boolean] =======> true
+testing Char.>=(Long) with receiver = and args = List(2 class java.lang.Long): [class java.lang.Boolean] =======> true
+testing Char.>=(Float) with receiver = and args = List(2.0 class java.lang.Float): [class java.lang.Boolean] =======> true
+testing Char.>=(Double) with receiver = and args = List(2.0 class java.lang.Double): [class java.lang.Boolean] =======> true
+testing Char.+(String) with receiver = and args = List(2 class java.lang.String): [class java.lang.String] =======> 2
+testing Char.+(Byte) with receiver = and args = List(2 class java.lang.Byte): [class java.lang.Integer] =======> 4
+testing Char.+(Short) with receiver = and args = List(2 class java.lang.Short): [class java.lang.Integer] =======> 4
+testing Char.+(Char) with receiver = and args = List( class java.lang.Character): [class java.lang.Integer] =======> 4
+testing Char.+(Int) with receiver = and args = List(2 class java.lang.Integer): [class java.lang.Integer] =======> 4
+testing Char.+(Long) with receiver = and args = List(2 class java.lang.Long): [class java.lang.Long] =======> 4
+testing Char.+(Float) with receiver = and args = List(2.0 class java.lang.Float): [class java.lang.Float] =======> 4.0
+testing Char.+(Double) with receiver = and args = List(2.0 class java.lang.Double): [class java.lang.Double] =======> 4.0
+testing Char.-(Byte) with receiver = and args = List(2 class java.lang.Byte): [class java.lang.Integer] =======> 0
+testing Char.-(Short) with receiver = and args = List(2 class java.lang.Short): [class java.lang.Integer] =======> 0
+testing Char.-(Char) with receiver = and args = List( class java.lang.Character): [class java.lang.Integer] =======> 0
+testing Char.-(Int) with receiver = and args = List(2 class java.lang.Integer): [class java.lang.Integer] =======> 0
+testing Char.-(Long) with receiver = and args = List(2 class java.lang.Long): [class java.lang.Long] =======> 0
+testing Char.-(Float) with receiver = and args = List(2.0 class java.lang.Float): [class java.lang.Float] =======> 0.0
+testing Char.-(Double) with receiver = and args = List(2.0 class java.lang.Double): [class java.lang.Double] =======> 0.0
+testing Char.*(Byte) with receiver = and args = List(2 class java.lang.Byte): [class java.lang.Integer] =======> 4
+testing Char.*(Short) with receiver = and args = List(2 class java.lang.Short): [class java.lang.Integer] =======> 4
+testing Char.*(Char) with receiver = and args = List( class java.lang.Character): [class java.lang.Integer] =======> 4
+testing Char.*(Int) with receiver = and args = List(2 class java.lang.Integer): [class java.lang.Integer] =======> 4
+testing Char.*(Long) with receiver = and args = List(2 class java.lang.Long): [class java.lang.Long] =======> 4
+testing Char.*(Float) with receiver = and args = List(2.0 class java.lang.Float): [class java.lang.Float] =======> 4.0
+testing Char.*(Double) with receiver = and args = List(2.0 class java.lang.Double): [class java.lang.Double] =======> 4.0
+testing Char./(Byte) with receiver = and args = List(2 class java.lang.Byte): [class java.lang.Integer] =======> 1
+testing Char./(Short) with receiver = and args = List(2 class java.lang.Short): [class java.lang.Integer] =======> 1
+testing Char./(Char) with receiver = and args = List( class java.lang.Character): [class java.lang.Integer] =======> 1
+testing Char./(Int) with receiver = and args = List(2 class java.lang.Integer): [class java.lang.Integer] =======> 1
+testing Char./(Long) with receiver = and args = List(2 class java.lang.Long): [class java.lang.Long] =======> 1
+testing Char./(Float) with receiver = and args = List(2.0 class java.lang.Float): [class java.lang.Float] =======> 1.0
+testing Char./(Double) with receiver = and args = List(2.0 class java.lang.Double): [class java.lang.Double] =======> 1.0
+testing Char.%(Byte) with receiver = and args = List(2 class java.lang.Byte): [class java.lang.Integer] =======> 0
+testing Char.%(Short) with receiver = and args = List(2 class java.lang.Short): [class java.lang.Integer] =======> 0
+testing Char.%(Char) with receiver = and args = List( class java.lang.Character): [class java.lang.Integer] =======> 0
+testing Char.%(Int) with receiver = and args = List(2 class java.lang.Integer): [class java.lang.Integer] =======> 0
+testing Char.%(Long) with receiver = and args = List(2 class java.lang.Long): [class java.lang.Long] =======> 0
+testing Char.%(Float) with receiver = and args = List(2.0 class java.lang.Float): [class java.lang.Float] =======> 0.0
+testing Char.%(Double) with receiver = and args = List(2.0 class java.lang.Double): [class java.lang.Double] =======> 0.0
+============
+Int
+it's important to print the list of Byte's members
+if some of them change (possibly, adding and/or removing magic symbols), we must update this test
+constructor Int: ()Int
+method !=: (x$1: Any)Boolean
+method !=: (x: Byte)Boolean
+method !=: (x: Char)Boolean
+method !=: (x: Double)Boolean
+method !=: (x: Float)Boolean
+method !=: (x: Int)Boolean
+method !=: (x: Long)Boolean
+method !=: (x: Short)Boolean
+method ##: ()Int
+method %: (x: Byte)Int
+method %: (x: Char)Int
+method %: (x: Double)Double
+method %: (x: Float)Float
+method %: (x: Int)Int
+method %: (x: Long)Long
+method %: (x: Short)Int
+method &: (x: Byte)Int
+method &: (x: Char)Int
+method &: (x: Int)Int
+method &: (x: Long)Long
+method &: (x: Short)Int
+method *: (x: Byte)Int
+method *: (x: Char)Int
+method *: (x: Double)Double
+method *: (x: Float)Float
+method *: (x: Int)Int
+method *: (x: Long)Long
+method *: (x: Short)Int
+method +: (x: Byte)Int
+method +: (x: Char)Int
+method +: (x: Double)Double
+method +: (x: Float)Float
+method +: (x: Int)Int
+method +: (x: Long)Long
+method +: (x: Short)Int
+method +: (x: String)String
+method -: (x: Byte)Int
+method -: (x: Char)Int
+method -: (x: Double)Double
+method -: (x: Float)Float
+method -: (x: Int)Int
+method -: (x: Long)Long
+method -: (x: Short)Int
+method /: (x: Byte)Int
+method /: (x: Char)Int
+method /: (x: Double)Double
+method /: (x: Float)Float
+method /: (x: Int)Int
+method /: (x: Long)Long
+method /: (x: Short)Int
+method <: (x: Byte)Boolean
+method <: (x: Char)Boolean
+method <: (x: Double)Boolean
+method <: (x: Float)Boolean
+method <: (x: Int)Boolean
+method <: (x: Long)Boolean
+method <: (x: Short)Boolean
+method <<: (x: Int)Int
+method <<: (x: Long)Int
+method <=: (x: Byte)Boolean
+method <=: (x: Char)Boolean
+method <=: (x: Double)Boolean
+method <=: (x: Float)Boolean
+method <=: (x: Int)Boolean
+method <=: (x: Long)Boolean
+method <=: (x: Short)Boolean
+method ==: (x$1: Any)Boolean
+method ==: (x: Byte)Boolean
+method ==: (x: Char)Boolean
+method ==: (x: Double)Boolean
+method ==: (x: Float)Boolean
+method ==: (x: Int)Boolean
+method ==: (x: Long)Boolean
+method ==: (x: Short)Boolean
+method >: (x: Byte)Boolean
+method >: (x: Char)Boolean
+method >: (x: Double)Boolean
+method >: (x: Float)Boolean
+method >: (x: Int)Boolean
+method >: (x: Long)Boolean
+method >: (x: Short)Boolean
+method >=: (x: Byte)Boolean
+method >=: (x: Char)Boolean
+method >=: (x: Double)Boolean
+method >=: (x: Float)Boolean
+method >=: (x: Int)Boolean
+method >=: (x: Long)Boolean
+method >=: (x: Short)Boolean
+method >>: (x: Int)Int
+method >>: (x: Long)Int
+method >>>: (x: Int)Int
+method >>>: (x: Long)Int
+method ^: (x: Byte)Int
+method ^: (x: Char)Int
+method ^: (x: Int)Int
+method ^: (x: Long)Long
+method ^: (x: Short)Int
+method asInstanceOf: [T0]=> T0
+method equals: (x$1: Any)Boolean
+method getClass: ()Class[Int]
+method hashCode: ()Int
+method isInstanceOf: [T0]=> Boolean
+method toByte: => Byte
+method toChar: => Char
+method toDouble: => Double
+method toFloat: => Float
+method toInt: => Int
+method toLong: => Long
+method toShort: => Short
+method toString: ()java.lang.String
+method unary_+: => Int
+method unary_-: => Int
+method unary_~: => Int
+method |: (x: Byte)Int
+method |: (x: Char)Int
+method |: (x: Int)Int
+method |: (x: Long)Long
+method |: (x: Short)Int
+testing Int.toByte() with receiver = 2 and args = List(): [class java.lang.Byte] =======> 2
+testing Int.toShort() with receiver = 2 and args = List(): [class java.lang.Short] =======> 2
+testing Int.toChar() with receiver = 2 and args = List(): [class java.lang.Character] =======>
+testing Int.toInt() with receiver = 2 and args = List(): [class java.lang.Integer] =======> 2
+testing Int.toLong() with receiver = 2 and args = List(): [class java.lang.Long] =======> 2
+testing Int.toFloat() with receiver = 2 and args = List(): [class java.lang.Float] =======> 2.0
+testing Int.toDouble() with receiver = 2 and args = List(): [class java.lang.Double] =======> 2.0
+testing Int.==(Byte) with receiver = 2 and args = List(2 class java.lang.Byte): [class java.lang.Boolean] =======> false
+testing Int.==(Short) with receiver = 2 and args = List(2 class java.lang.Short): [class java.lang.Boolean] =======> false
+testing Int.==(Char) with receiver = 2 and args = List( class java.lang.Character): [class java.lang.Boolean] =======> false
+testing Int.==(Int) with receiver = 2 and args = List(2 class java.lang.Integer): [class java.lang.Boolean] =======> true
+testing Int.==(Long) with receiver = 2 and args = List(2 class java.lang.Long): [class java.lang.Boolean] =======> false
+testing Int.==(Float) with receiver = 2 and args = List(2.0 class java.lang.Float): [class java.lang.Boolean] =======> false
+testing Int.==(Double) with receiver = 2 and args = List(2.0 class java.lang.Double): [class java.lang.Boolean] =======> false
+testing Int.!=(Byte) with receiver = 2 and args = List(2 class java.lang.Byte): [class java.lang.Boolean] =======> true
+testing Int.!=(Short) with receiver = 2 and args = List(2 class java.lang.Short): [class java.lang.Boolean] =======> true
+testing Int.!=(Char) with receiver = 2 and args = List( class java.lang.Character): [class java.lang.Boolean] =======> true
+testing Int.!=(Int) with receiver = 2 and args = List(2 class java.lang.Integer): [class java.lang.Boolean] =======> false
+testing Int.!=(Long) with receiver = 2 and args = List(2 class java.lang.Long): [class java.lang.Boolean] =======> true
+testing Int.!=(Float) with receiver = 2 and args = List(2.0 class java.lang.Float): [class java.lang.Boolean] =======> true
+testing Int.!=(Double) with receiver = 2 and args = List(2.0 class java.lang.Double): [class java.lang.Boolean] =======> true
+testing Int.<(Byte) with receiver = 2 and args = List(2 class java.lang.Byte): [class java.lang.Boolean] =======> false
+testing Int.<(Short) with receiver = 2 and args = List(2 class java.lang.Short): [class java.lang.Boolean] =======> false
+testing Int.<(Char) with receiver = 2 and args = List( class java.lang.Character): [class java.lang.Boolean] =======> false
+testing Int.<(Int) with receiver = 2 and args = List(2 class java.lang.Integer): [class java.lang.Boolean] =======> false
+testing Int.<(Long) with receiver = 2 and args = List(2 class java.lang.Long): [class java.lang.Boolean] =======> false
+testing Int.<(Float) with receiver = 2 and args = List(2.0 class java.lang.Float): [class java.lang.Boolean] =======> false
+testing Int.<(Double) with receiver = 2 and args = List(2.0 class java.lang.Double): [class java.lang.Boolean] =======> false
+testing Int.<=(Byte) with receiver = 2 and args = List(2 class java.lang.Byte): [class java.lang.Boolean] =======> true
+testing Int.<=(Short) with receiver = 2 and args = List(2 class java.lang.Short): [class java.lang.Boolean] =======> true
+testing Int.<=(Char) with receiver = 2 and args = List( class java.lang.Character): [class java.lang.Boolean] =======> true
+testing Int.<=(Int) with receiver = 2 and args = List(2 class java.lang.Integer): [class java.lang.Boolean] =======> true
+testing Int.<=(Long) with receiver = 2 and args = List(2 class java.lang.Long): [class java.lang.Boolean] =======> true
+testing Int.<=(Float) with receiver = 2 and args = List(2.0 class java.lang.Float): [class java.lang.Boolean] =======> true
+testing Int.<=(Double) with receiver = 2 and args = List(2.0 class java.lang.Double): [class java.lang.Boolean] =======> true
+testing Int.>(Byte) with receiver = 2 and args = List(2 class java.lang.Byte): [class java.lang.Boolean] =======> false
+testing Int.>(Short) with receiver = 2 and args = List(2 class java.lang.Short): [class java.lang.Boolean] =======> false
+testing Int.>(Char) with receiver = 2 and args = List( class java.lang.Character): [class java.lang.Boolean] =======> false
+testing Int.>(Int) with receiver = 2 and args = List(2 class java.lang.Integer): [class java.lang.Boolean] =======> false
+testing Int.>(Long) with receiver = 2 and args = List(2 class java.lang.Long): [class java.lang.Boolean] =======> false
+testing Int.>(Float) with receiver = 2 and args = List(2.0 class java.lang.Float): [class java.lang.Boolean] =======> false
+testing Int.>(Double) with receiver = 2 and args = List(2.0 class java.lang.Double): [class java.lang.Boolean] =======> false
+testing Int.>=(Byte) with receiver = 2 and args = List(2 class java.lang.Byte): [class java.lang.Boolean] =======> true
+testing Int.>=(Short) with receiver = 2 and args = List(2 class java.lang.Short): [class java.lang.Boolean] =======> true
+testing Int.>=(Char) with receiver = 2 and args = List( class java.lang.Character): [class java.lang.Boolean] =======> true
+testing Int.>=(Int) with receiver = 2 and args = List(2 class java.lang.Integer): [class java.lang.Boolean] =======> true
+testing Int.>=(Long) with receiver = 2 and args = List(2 class java.lang.Long): [class java.lang.Boolean] =======> true
+testing Int.>=(Float) with receiver = 2 and args = List(2.0 class java.lang.Float): [class java.lang.Boolean] =======> true
+testing Int.>=(Double) with receiver = 2 and args = List(2.0 class java.lang.Double): [class java.lang.Boolean] =======> true
+testing Int.+(String) with receiver = 2 and args = List(2 class java.lang.String): [class java.lang.String] =======> 22
+testing Int.+(Byte) with receiver = 2 and args = List(2 class java.lang.Byte): [class java.lang.Integer] =======> 4
+testing Int.+(Short) with receiver = 2 and args = List(2 class java.lang.Short): [class java.lang.Integer] =======> 4
+testing Int.+(Char) with receiver = 2 and args = List( class java.lang.Character): [class java.lang.Integer] =======> 4
+testing Int.+(Int) with receiver = 2 and args = List(2 class java.lang.Integer): [class java.lang.Integer] =======> 4
+testing Int.+(Long) with receiver = 2 and args = List(2 class java.lang.Long): [class java.lang.Long] =======> 4
+testing Int.+(Float) with receiver = 2 and args = List(2.0 class java.lang.Float): [class java.lang.Float] =======> 4.0
+testing Int.+(Double) with receiver = 2 and args = List(2.0 class java.lang.Double): [class java.lang.Double] =======> 4.0
+testing Int.-(Byte) with receiver = 2 and args = List(2 class java.lang.Byte): [class java.lang.Integer] =======> 0
+testing Int.-(Short) with receiver = 2 and args = List(2 class java.lang.Short): [class java.lang.Integer] =======> 0
+testing Int.-(Char) with receiver = 2 and args = List( class java.lang.Character): [class java.lang.Integer] =======> 0
+testing Int.-(Int) with receiver = 2 and args = List(2 class java.lang.Integer): [class java.lang.Integer] =======> 0
+testing Int.-(Long) with receiver = 2 and args = List(2 class java.lang.Long): [class java.lang.Long] =======> 0
+testing Int.-(Float) with receiver = 2 and args = List(2.0 class java.lang.Float): [class java.lang.Float] =======> 0.0
+testing Int.-(Double) with receiver = 2 and args = List(2.0 class java.lang.Double): [class java.lang.Double] =======> 0.0
+testing Int.*(Byte) with receiver = 2 and args = List(2 class java.lang.Byte): [class java.lang.Integer] =======> 4
+testing Int.*(Short) with receiver = 2 and args = List(2 class java.lang.Short): [class java.lang.Integer] =======> 4
+testing Int.*(Char) with receiver = 2 and args = List( class java.lang.Character): [class java.lang.Integer] =======> 4
+testing Int.*(Int) with receiver = 2 and args = List(2 class java.lang.Integer): [class java.lang.Integer] =======> 4
+testing Int.*(Long) with receiver = 2 and args = List(2 class java.lang.Long): [class java.lang.Long] =======> 4
+testing Int.*(Float) with receiver = 2 and args = List(2.0 class java.lang.Float): [class java.lang.Float] =======> 4.0
+testing Int.*(Double) with receiver = 2 and args = List(2.0 class java.lang.Double): [class java.lang.Double] =======> 4.0
+testing Int./(Byte) with receiver = 2 and args = List(2 class java.lang.Byte): [class java.lang.Integer] =======> 1
+testing Int./(Short) with receiver = 2 and args = List(2 class java.lang.Short): [class java.lang.Integer] =======> 1
+testing Int./(Char) with receiver = 2 and args = List( class java.lang.Character): [class java.lang.Integer] =======> 1
+testing Int./(Int) with receiver = 2 and args = List(2 class java.lang.Integer): [class java.lang.Integer] =======> 1
+testing Int./(Long) with receiver = 2 and args = List(2 class java.lang.Long): [class java.lang.Long] =======> 1
+testing Int./(Float) with receiver = 2 and args = List(2.0 class java.lang.Float): [class java.lang.Float] =======> 1.0
+testing Int./(Double) with receiver = 2 and args = List(2.0 class java.lang.Double): [class java.lang.Double] =======> 1.0
+testing Int.%(Byte) with receiver = 2 and args = List(2 class java.lang.Byte): [class java.lang.Integer] =======> 0
+testing Int.%(Short) with receiver = 2 and args = List(2 class java.lang.Short): [class java.lang.Integer] =======> 0
+testing Int.%(Char) with receiver = 2 and args = List( class java.lang.Character): [class java.lang.Integer] =======> 0
+testing Int.%(Int) with receiver = 2 and args = List(2 class java.lang.Integer): [class java.lang.Integer] =======> 0
+testing Int.%(Long) with receiver = 2 and args = List(2 class java.lang.Long): [class java.lang.Long] =======> 0
+testing Int.%(Float) with receiver = 2 and args = List(2.0 class java.lang.Float): [class java.lang.Float] =======> 0.0
+testing Int.%(Double) with receiver = 2 and args = List(2.0 class java.lang.Double): [class java.lang.Double] =======> 0.0
+============
+Long
+it's important to print the list of Byte's members
+if some of them change (possibly, adding and/or removing magic symbols), we must update this test
+constructor Long: ()Long
+method !=: (x$1: Any)Boolean
+method !=: (x: Byte)Boolean
+method !=: (x: Char)Boolean
+method !=: (x: Double)Boolean
+method !=: (x: Float)Boolean
+method !=: (x: Int)Boolean
+method !=: (x: Long)Boolean
+method !=: (x: Short)Boolean
+method ##: ()Int
+method %: (x: Byte)Long
+method %: (x: Char)Long
+method %: (x: Double)Double
+method %: (x: Float)Float
+method %: (x: Int)Long
+method %: (x: Long)Long
+method %: (x: Short)Long
+method &: (x: Byte)Long
+method &: (x: Char)Long
+method &: (x: Int)Long
+method &: (x: Long)Long
+method &: (x: Short)Long
+method *: (x: Byte)Long
+method *: (x: Char)Long
+method *: (x: Double)Double
+method *: (x: Float)Float
+method *: (x: Int)Long
+method *: (x: Long)Long
+method *: (x: Short)Long
+method +: (x: Byte)Long
+method +: (x: Char)Long
+method +: (x: Double)Double
+method +: (x: Float)Float
+method +: (x: Int)Long
+method +: (x: Long)Long
+method +: (x: Short)Long
+method +: (x: String)String
+method -: (x: Byte)Long
+method -: (x: Char)Long
+method -: (x: Double)Double
+method -: (x: Float)Float
+method -: (x: Int)Long
+method -: (x: Long)Long
+method -: (x: Short)Long
+method /: (x: Byte)Long
+method /: (x: Char)Long
+method /: (x: Double)Double
+method /: (x: Float)Float
+method /: (x: Int)Long
+method /: (x: Long)Long
+method /: (x: Short)Long
+method <: (x: Byte)Boolean
+method <: (x: Char)Boolean
+method <: (x: Double)Boolean
+method <: (x: Float)Boolean
+method <: (x: Int)Boolean
+method <: (x: Long)Boolean
+method <: (x: Short)Boolean
+method <<: (x: Int)Long
+method <<: (x: Long)Long
+method <=: (x: Byte)Boolean
+method <=: (x: Char)Boolean
+method <=: (x: Double)Boolean
+method <=: (x: Float)Boolean
+method <=: (x: Int)Boolean
+method <=: (x: Long)Boolean
+method <=: (x: Short)Boolean
+method ==: (x$1: Any)Boolean
+method ==: (x: Byte)Boolean
+method ==: (x: Char)Boolean
+method ==: (x: Double)Boolean
+method ==: (x: Float)Boolean
+method ==: (x: Int)Boolean
+method ==: (x: Long)Boolean
+method ==: (x: Short)Boolean
+method >: (x: Byte)Boolean
+method >: (x: Char)Boolean
+method >: (x: Double)Boolean
+method >: (x: Float)Boolean
+method >: (x: Int)Boolean
+method >: (x: Long)Boolean
+method >: (x: Short)Boolean
+method >=: (x: Byte)Boolean
+method >=: (x: Char)Boolean
+method >=: (x: Double)Boolean
+method >=: (x: Float)Boolean
+method >=: (x: Int)Boolean
+method >=: (x: Long)Boolean
+method >=: (x: Short)Boolean
+method >>: (x: Int)Long
+method >>: (x: Long)Long
+method >>>: (x: Int)Long
+method >>>: (x: Long)Long
+method ^: (x: Byte)Long
+method ^: (x: Char)Long
+method ^: (x: Int)Long
+method ^: (x: Long)Long
+method ^: (x: Short)Long
+method asInstanceOf: [T0]=> T0
+method equals: (x$1: Any)Boolean
+method getClass: ()Class[Long]
+method hashCode: ()Int
+method isInstanceOf: [T0]=> Boolean
+method toByte: => Byte
+method toChar: => Char
+method toDouble: => Double
+method toFloat: => Float
+method toInt: => Int
+method toLong: => Long
+method toShort: => Short
+method toString: ()java.lang.String
+method unary_+: => Long
+method unary_-: => Long
+method unary_~: => Long
+method |: (x: Byte)Long
+method |: (x: Char)Long
+method |: (x: Int)Long
+method |: (x: Long)Long
+method |: (x: Short)Long
+testing Long.toByte() with receiver = 2 and args = List(): [class java.lang.Byte] =======> 2
+testing Long.toShort() with receiver = 2 and args = List(): [class java.lang.Short] =======> 2
+testing Long.toChar() with receiver = 2 and args = List(): [class java.lang.Character] =======>
+testing Long.toInt() with receiver = 2 and args = List(): [class java.lang.Integer] =======> 2
+testing Long.toLong() with receiver = 2 and args = List(): [class java.lang.Long] =======> 2
+testing Long.toFloat() with receiver = 2 and args = List(): [class java.lang.Float] =======> 2.0
+testing Long.toDouble() with receiver = 2 and args = List(): [class java.lang.Double] =======> 2.0
+testing Long.==(Byte) with receiver = 2 and args = List(2 class java.lang.Byte): [class java.lang.Boolean] =======> false
+testing Long.==(Short) with receiver = 2 and args = List(2 class java.lang.Short): [class java.lang.Boolean] =======> false
+testing Long.==(Char) with receiver = 2 and args = List( class java.lang.Character): [class java.lang.Boolean] =======> false
+testing Long.==(Int) with receiver = 2 and args = List(2 class java.lang.Integer): [class java.lang.Boolean] =======> false
+testing Long.==(Long) with receiver = 2 and args = List(2 class java.lang.Long): [class java.lang.Boolean] =======> true
+testing Long.==(Float) with receiver = 2 and args = List(2.0 class java.lang.Float): [class java.lang.Boolean] =======> false
+testing Long.==(Double) with receiver = 2 and args = List(2.0 class java.lang.Double): [class java.lang.Boolean] =======> false
+testing Long.!=(Byte) with receiver = 2 and args = List(2 class java.lang.Byte): [class java.lang.Boolean] =======> true
+testing Long.!=(Short) with receiver = 2 and args = List(2 class java.lang.Short): [class java.lang.Boolean] =======> true
+testing Long.!=(Char) with receiver = 2 and args = List( class java.lang.Character): [class java.lang.Boolean] =======> true
+testing Long.!=(Int) with receiver = 2 and args = List(2 class java.lang.Integer): [class java.lang.Boolean] =======> true
+testing Long.!=(Long) with receiver = 2 and args = List(2 class java.lang.Long): [class java.lang.Boolean] =======> false
+testing Long.!=(Float) with receiver = 2 and args = List(2.0 class java.lang.Float): [class java.lang.Boolean] =======> true
+testing Long.!=(Double) with receiver = 2 and args = List(2.0 class java.lang.Double): [class java.lang.Boolean] =======> true
+testing Long.<(Byte) with receiver = 2 and args = List(2 class java.lang.Byte): [class java.lang.Boolean] =======> false
+testing Long.<(Short) with receiver = 2 and args = List(2 class java.lang.Short): [class java.lang.Boolean] =======> false
+testing Long.<(Char) with receiver = 2 and args = List( class java.lang.Character): [class java.lang.Boolean] =======> false
+testing Long.<(Int) with receiver = 2 and args = List(2 class java.lang.Integer): [class java.lang.Boolean] =======> false
+testing Long.<(Long) with receiver = 2 and args = List(2 class java.lang.Long): [class java.lang.Boolean] =======> false
+testing Long.<(Float) with receiver = 2 and args = List(2.0 class java.lang.Float): [class java.lang.Boolean] =======> false
+testing Long.<(Double) with receiver = 2 and args = List(2.0 class java.lang.Double): [class java.lang.Boolean] =======> false
+testing Long.<=(Byte) with receiver = 2 and args = List(2 class java.lang.Byte): [class java.lang.Boolean] =======> true
+testing Long.<=(Short) with receiver = 2 and args = List(2 class java.lang.Short): [class java.lang.Boolean] =======> true
+testing Long.<=(Char) with receiver = 2 and args = List( class java.lang.Character): [class java.lang.Boolean] =======> true
+testing Long.<=(Int) with receiver = 2 and args = List(2 class java.lang.Integer): [class java.lang.Boolean] =======> true
+testing Long.<=(Long) with receiver = 2 and args = List(2 class java.lang.Long): [class java.lang.Boolean] =======> true
+testing Long.<=(Float) with receiver = 2 and args = List(2.0 class java.lang.Float): [class java.lang.Boolean] =======> true
+testing Long.<=(Double) with receiver = 2 and args = List(2.0 class java.lang.Double): [class java.lang.Boolean] =======> true
+testing Long.>(Byte) with receiver = 2 and args = List(2 class java.lang.Byte): [class java.lang.Boolean] =======> false
+testing Long.>(Short) with receiver = 2 and args = List(2 class java.lang.Short): [class java.lang.Boolean] =======> false
+testing Long.>(Char) with receiver = 2 and args = List( class java.lang.Character): [class java.lang.Boolean] =======> false
+testing Long.>(Int) with receiver = 2 and args = List(2 class java.lang.Integer): [class java.lang.Boolean] =======> false
+testing Long.>(Long) with receiver = 2 and args = List(2 class java.lang.Long): [class java.lang.Boolean] =======> false
+testing Long.>(Float) with receiver = 2 and args = List(2.0 class java.lang.Float): [class java.lang.Boolean] =======> false
+testing Long.>(Double) with receiver = 2 and args = List(2.0 class java.lang.Double): [class java.lang.Boolean] =======> false
+testing Long.>=(Byte) with receiver = 2 and args = List(2 class java.lang.Byte): [class java.lang.Boolean] =======> true
+testing Long.>=(Short) with receiver = 2 and args = List(2 class java.lang.Short): [class java.lang.Boolean] =======> true
+testing Long.>=(Char) with receiver = 2 and args = List( class java.lang.Character): [class java.lang.Boolean] =======> true
+testing Long.>=(Int) with receiver = 2 and args = List(2 class java.lang.Integer): [class java.lang.Boolean] =======> true
+testing Long.>=(Long) with receiver = 2 and args = List(2 class java.lang.Long): [class java.lang.Boolean] =======> true
+testing Long.>=(Float) with receiver = 2 and args = List(2.0 class java.lang.Float): [class java.lang.Boolean] =======> true
+testing Long.>=(Double) with receiver = 2 and args = List(2.0 class java.lang.Double): [class java.lang.Boolean] =======> true
+testing Long.+(String) with receiver = 2 and args = List(2 class java.lang.String): [class java.lang.String] =======> 22
+testing Long.+(Byte) with receiver = 2 and args = List(2 class java.lang.Byte): [class java.lang.Long] =======> 4
+testing Long.+(Short) with receiver = 2 and args = List(2 class java.lang.Short): [class java.lang.Long] =======> 4
+testing Long.+(Char) with receiver = 2 and args = List( class java.lang.Character): [class java.lang.Long] =======> 4
+testing Long.+(Int) with receiver = 2 and args = List(2 class java.lang.Integer): [class java.lang.Long] =======> 4
+testing Long.+(Long) with receiver = 2 and args = List(2 class java.lang.Long): [class java.lang.Long] =======> 4
+testing Long.+(Float) with receiver = 2 and args = List(2.0 class java.lang.Float): [class java.lang.Float] =======> 4.0
+testing Long.+(Double) with receiver = 2 and args = List(2.0 class java.lang.Double): [class java.lang.Double] =======> 4.0
+testing Long.-(Byte) with receiver = 2 and args = List(2 class java.lang.Byte): [class java.lang.Long] =======> 0
+testing Long.-(Short) with receiver = 2 and args = List(2 class java.lang.Short): [class java.lang.Long] =======> 0
+testing Long.-(Char) with receiver = 2 and args = List( class java.lang.Character): [class java.lang.Long] =======> 0
+testing Long.-(Int) with receiver = 2 and args = List(2 class java.lang.Integer): [class java.lang.Long] =======> 0
+testing Long.-(Long) with receiver = 2 and args = List(2 class java.lang.Long): [class java.lang.Long] =======> 0
+testing Long.-(Float) with receiver = 2 and args = List(2.0 class java.lang.Float): [class java.lang.Float] =======> 0.0
+testing Long.-(Double) with receiver = 2 and args = List(2.0 class java.lang.Double): [class java.lang.Double] =======> 0.0
+testing Long.*(Byte) with receiver = 2 and args = List(2 class java.lang.Byte): [class java.lang.Long] =======> 4
+testing Long.*(Short) with receiver = 2 and args = List(2 class java.lang.Short): [class java.lang.Long] =======> 4
+testing Long.*(Char) with receiver = 2 and args = List( class java.lang.Character): [class java.lang.Long] =======> 4
+testing Long.*(Int) with receiver = 2 and args = List(2 class java.lang.Integer): [class java.lang.Long] =======> 4
+testing Long.*(Long) with receiver = 2 and args = List(2 class java.lang.Long): [class java.lang.Long] =======> 4
+testing Long.*(Float) with receiver = 2 and args = List(2.0 class java.lang.Float): [class java.lang.Float] =======> 4.0
+testing Long.*(Double) with receiver = 2 and args = List(2.0 class java.lang.Double): [class java.lang.Double] =======> 4.0
+testing Long./(Byte) with receiver = 2 and args = List(2 class java.lang.Byte): [class java.lang.Long] =======> 1
+testing Long./(Short) with receiver = 2 and args = List(2 class java.lang.Short): [class java.lang.Long] =======> 1
+testing Long./(Char) with receiver = 2 and args = List( class java.lang.Character): [class java.lang.Long] =======> 1
+testing Long./(Int) with receiver = 2 and args = List(2 class java.lang.Integer): [class java.lang.Long] =======> 1
+testing Long./(Long) with receiver = 2 and args = List(2 class java.lang.Long): [class java.lang.Long] =======> 1
+testing Long./(Float) with receiver = 2 and args = List(2.0 class java.lang.Float): [class java.lang.Float] =======> 1.0
+testing Long./(Double) with receiver = 2 and args = List(2.0 class java.lang.Double): [class java.lang.Double] =======> 1.0
+testing Long.%(Byte) with receiver = 2 and args = List(2 class java.lang.Byte): [class java.lang.Long] =======> 0
+testing Long.%(Short) with receiver = 2 and args = List(2 class java.lang.Short): [class java.lang.Long] =======> 0
+testing Long.%(Char) with receiver = 2 and args = List( class java.lang.Character): [class java.lang.Long] =======> 0
+testing Long.%(Int) with receiver = 2 and args = List(2 class java.lang.Integer): [class java.lang.Long] =======> 0
+testing Long.%(Long) with receiver = 2 and args = List(2 class java.lang.Long): [class java.lang.Long] =======> 0
+testing Long.%(Float) with receiver = 2 and args = List(2.0 class java.lang.Float): [class java.lang.Float] =======> 0.0
+testing Long.%(Double) with receiver = 2 and args = List(2.0 class java.lang.Double): [class java.lang.Double] =======> 0.0
+============
+Float
+it's important to print the list of Byte's members
+if some of them change (possibly, adding and/or removing magic symbols), we must update this test
+constructor Float: ()Float
+method !=: (x$1: Any)Boolean
+method !=: (x: Byte)Boolean
+method !=: (x: Char)Boolean
+method !=: (x: Double)Boolean
+method !=: (x: Float)Boolean
+method !=: (x: Int)Boolean
+method !=: (x: Long)Boolean
+method !=: (x: Short)Boolean
+method ##: ()Int
+method %: (x: Byte)Float
+method %: (x: Char)Float
+method %: (x: Double)Double
+method %: (x: Float)Float
+method %: (x: Int)Float
+method %: (x: Long)Float
+method %: (x: Short)Float
+method *: (x: Byte)Float
+method *: (x: Char)Float
+method *: (x: Double)Double
+method *: (x: Float)Float
+method *: (x: Int)Float
+method *: (x: Long)Float
+method *: (x: Short)Float
+method +: (x: Byte)Float
+method +: (x: Char)Float
+method +: (x: Double)Double
+method +: (x: Float)Float
+method +: (x: Int)Float
+method +: (x: Long)Float
+method +: (x: Short)Float
+method +: (x: String)String
+method -: (x: Byte)Float
+method -: (x: Char)Float
+method -: (x: Double)Double
+method -: (x: Float)Float
+method -: (x: Int)Float
+method -: (x: Long)Float
+method -: (x: Short)Float
+method /: (x: Byte)Float
+method /: (x: Char)Float
+method /: (x: Double)Double
+method /: (x: Float)Float
+method /: (x: Int)Float
+method /: (x: Long)Float
+method /: (x: Short)Float
+method <: (x: Byte)Boolean
+method <: (x: Char)Boolean
+method <: (x: Double)Boolean
+method <: (x: Float)Boolean
+method <: (x: Int)Boolean
+method <: (x: Long)Boolean
+method <: (x: Short)Boolean
+method <=: (x: Byte)Boolean
+method <=: (x: Char)Boolean
+method <=: (x: Double)Boolean
+method <=: (x: Float)Boolean
+method <=: (x: Int)Boolean
+method <=: (x: Long)Boolean
+method <=: (x: Short)Boolean
+method ==: (x$1: Any)Boolean
+method ==: (x: Byte)Boolean
+method ==: (x: Char)Boolean
+method ==: (x: Double)Boolean
+method ==: (x: Float)Boolean
+method ==: (x: Int)Boolean
+method ==: (x: Long)Boolean
+method ==: (x: Short)Boolean
+method >: (x: Byte)Boolean
+method >: (x: Char)Boolean
+method >: (x: Double)Boolean
+method >: (x: Float)Boolean
+method >: (x: Int)Boolean
+method >: (x: Long)Boolean
+method >: (x: Short)Boolean
+method >=: (x: Byte)Boolean
+method >=: (x: Char)Boolean
+method >=: (x: Double)Boolean
+method >=: (x: Float)Boolean
+method >=: (x: Int)Boolean
+method >=: (x: Long)Boolean
+method >=: (x: Short)Boolean
+method asInstanceOf: [T0]=> T0
+method equals: (x$1: Any)Boolean
+method getClass: ()Class[Float]
+method hashCode: ()Int
+method isInstanceOf: [T0]=> Boolean
+method toByte: => Byte
+method toChar: => Char
+method toDouble: => Double
+method toFloat: => Float
+method toInt: => Int
+method toLong: => Long
+method toShort: => Short
+method toString: ()java.lang.String
+method unary_+: => Float
+method unary_-: => Float
+testing Float.toByte() with receiver = 2.0 and args = List(): [class java.lang.Byte] =======> 2
+testing Float.toShort() with receiver = 2.0 and args = List(): [class java.lang.Short] =======> 2
+testing Float.toChar() with receiver = 2.0 and args = List(): [class java.lang.Character] =======>
+testing Float.toInt() with receiver = 2.0 and args = List(): [class java.lang.Integer] =======> 2
+testing Float.toLong() with receiver = 2.0 and args = List(): [class java.lang.Long] =======> 2
+testing Float.toFloat() with receiver = 2.0 and args = List(): [class java.lang.Float] =======> 2.0
+testing Float.toDouble() with receiver = 2.0 and args = List(): [class java.lang.Double] =======> 2.0
+testing Float.==(Byte) with receiver = 2.0 and args = List(2 class java.lang.Byte): [class java.lang.Boolean] =======> false
+testing Float.==(Short) with receiver = 2.0 and args = List(2 class java.lang.Short): [class java.lang.Boolean] =======> false
+testing Float.==(Char) with receiver = 2.0 and args = List( class java.lang.Character): [class java.lang.Boolean] =======> false
+testing Float.==(Int) with receiver = 2.0 and args = List(2 class java.lang.Integer): [class java.lang.Boolean] =======> false
+testing Float.==(Long) with receiver = 2.0 and args = List(2 class java.lang.Long): [class java.lang.Boolean] =======> false
+testing Float.==(Float) with receiver = 2.0 and args = List(2.0 class java.lang.Float): [class java.lang.Boolean] =======> false
+testing Float.==(Double) with receiver = 2.0 and args = List(2.0 class java.lang.Double): [class java.lang.Boolean] =======> false
+testing Float.!=(Byte) with receiver = 2.0 and args = List(2 class java.lang.Byte): [class java.lang.Boolean] =======> true
+testing Float.!=(Short) with receiver = 2.0 and args = List(2 class java.lang.Short): [class java.lang.Boolean] =======> true
+testing Float.!=(Char) with receiver = 2.0 and args = List( class java.lang.Character): [class java.lang.Boolean] =======> true
+testing Float.!=(Int) with receiver = 2.0 and args = List(2 class java.lang.Integer): [class java.lang.Boolean] =======> true
+testing Float.!=(Long) with receiver = 2.0 and args = List(2 class java.lang.Long): [class java.lang.Boolean] =======> true
+testing Float.!=(Float) with receiver = 2.0 and args = List(2.0 class java.lang.Float): [class java.lang.Boolean] =======> true
+testing Float.!=(Double) with receiver = 2.0 and args = List(2.0 class java.lang.Double): [class java.lang.Boolean] =======> true
+testing Float.<(Byte) with receiver = 2.0 and args = List(2 class java.lang.Byte): [class java.lang.Boolean] =======> false
+testing Float.<(Short) with receiver = 2.0 and args = List(2 class java.lang.Short): [class java.lang.Boolean] =======> false
+testing Float.<(Char) with receiver = 2.0 and args = List( class java.lang.Character): [class java.lang.Boolean] =======> false
+testing Float.<(Int) with receiver = 2.0 and args = List(2 class java.lang.Integer): [class java.lang.Boolean] =======> false
+testing Float.<(Long) with receiver = 2.0 and args = List(2 class java.lang.Long): [class java.lang.Boolean] =======> false
+testing Float.<(Float) with receiver = 2.0 and args = List(2.0 class java.lang.Float): [class java.lang.Boolean] =======> false
+testing Float.<(Double) with receiver = 2.0 and args = List(2.0 class java.lang.Double): [class java.lang.Boolean] =======> false
+testing Float.<=(Byte) with receiver = 2.0 and args = List(2 class java.lang.Byte): [class java.lang.Boolean] =======> true
+testing Float.<=(Short) with receiver = 2.0 and args = List(2 class java.lang.Short): [class java.lang.Boolean] =======> true
+testing Float.<=(Char) with receiver = 2.0 and args = List( class java.lang.Character): [class java.lang.Boolean] =======> true
+testing Float.<=(Int) with receiver = 2.0 and args = List(2 class java.lang.Integer): [class java.lang.Boolean] =======> true
+testing Float.<=(Long) with receiver = 2.0 and args = List(2 class java.lang.Long): [class java.lang.Boolean] =======> true
+testing Float.<=(Float) with receiver = 2.0 and args = List(2.0 class java.lang.Float): [class java.lang.Boolean] =======> true
+testing Float.<=(Double) with receiver = 2.0 and args = List(2.0 class java.lang.Double): [class java.lang.Boolean] =======> true
+testing Float.>(Byte) with receiver = 2.0 and args = List(2 class java.lang.Byte): [class java.lang.Boolean] =======> false
+testing Float.>(Short) with receiver = 2.0 and args = List(2 class java.lang.Short): [class java.lang.Boolean] =======> false
+testing Float.>(Char) with receiver = 2.0 and args = List( class java.lang.Character): [class java.lang.Boolean] =======> false
+testing Float.>(Int) with receiver = 2.0 and args = List(2 class java.lang.Integer): [class java.lang.Boolean] =======> false
+testing Float.>(Long) with receiver = 2.0 and args = List(2 class java.lang.Long): [class java.lang.Boolean] =======> false
+testing Float.>(Float) with receiver = 2.0 and args = List(2.0 class java.lang.Float): [class java.lang.Boolean] =======> false
+testing Float.>(Double) with receiver = 2.0 and args = List(2.0 class java.lang.Double): [class java.lang.Boolean] =======> false
+testing Float.>=(Byte) with receiver = 2.0 and args = List(2 class java.lang.Byte): [class java.lang.Boolean] =======> true
+testing Float.>=(Short) with receiver = 2.0 and args = List(2 class java.lang.Short): [class java.lang.Boolean] =======> true
+testing Float.>=(Char) with receiver = 2.0 and args = List( class java.lang.Character): [class java.lang.Boolean] =======> true
+testing Float.>=(Int) with receiver = 2.0 and args = List(2 class java.lang.Integer): [class java.lang.Boolean] =======> true
+testing Float.>=(Long) with receiver = 2.0 and args = List(2 class java.lang.Long): [class java.lang.Boolean] =======> true
+testing Float.>=(Float) with receiver = 2.0 and args = List(2.0 class java.lang.Float): [class java.lang.Boolean] =======> true
+testing Float.>=(Double) with receiver = 2.0 and args = List(2.0 class java.lang.Double): [class java.lang.Boolean] =======> true
+testing Float.+(String) with receiver = 2.0 and args = List(2 class java.lang.String): [class java.lang.String] =======> 2.02
+testing Float.+(Byte) with receiver = 2.0 and args = List(2 class java.lang.Byte): [class java.lang.Float] =======> 4.0
+testing Float.+(Short) with receiver = 2.0 and args = List(2 class java.lang.Short): [class java.lang.Float] =======> 4.0
+testing Float.+(Char) with receiver = 2.0 and args = List( class java.lang.Character): [class java.lang.Float] =======> 4.0
+testing Float.+(Int) with receiver = 2.0 and args = List(2 class java.lang.Integer): [class java.lang.Float] =======> 4.0
+testing Float.+(Long) with receiver = 2.0 and args = List(2 class java.lang.Long): [class java.lang.Float] =======> 4.0
+testing Float.+(Float) with receiver = 2.0 and args = List(2.0 class java.lang.Float): [class java.lang.Float] =======> 4.0
+testing Float.+(Double) with receiver = 2.0 and args = List(2.0 class java.lang.Double): [class java.lang.Double] =======> 4.0
+testing Float.-(Byte) with receiver = 2.0 and args = List(2 class java.lang.Byte): [class java.lang.Float] =======> 0.0
+testing Float.-(Short) with receiver = 2.0 and args = List(2 class java.lang.Short): [class java.lang.Float] =======> 0.0
+testing Float.-(Char) with receiver = 2.0 and args = List( class java.lang.Character): [class java.lang.Float] =======> 0.0
+testing Float.-(Int) with receiver = 2.0 and args = List(2 class java.lang.Integer): [class java.lang.Float] =======> 0.0
+testing Float.-(Long) with receiver = 2.0 and args = List(2 class java.lang.Long): [class java.lang.Float] =======> 0.0
+testing Float.-(Float) with receiver = 2.0 and args = List(2.0 class java.lang.Float): [class java.lang.Float] =======> 0.0
+testing Float.-(Double) with receiver = 2.0 and args = List(2.0 class java.lang.Double): [class java.lang.Double] =======> 0.0
+testing Float.*(Byte) with receiver = 2.0 and args = List(2 class java.lang.Byte): [class java.lang.Float] =======> 4.0
+testing Float.*(Short) with receiver = 2.0 and args = List(2 class java.lang.Short): [class java.lang.Float] =======> 4.0
+testing Float.*(Char) with receiver = 2.0 and args = List( class java.lang.Character): [class java.lang.Float] =======> 4.0
+testing Float.*(Int) with receiver = 2.0 and args = List(2 class java.lang.Integer): [class java.lang.Float] =======> 4.0
+testing Float.*(Long) with receiver = 2.0 and args = List(2 class java.lang.Long): [class java.lang.Float] =======> 4.0
+testing Float.*(Float) with receiver = 2.0 and args = List(2.0 class java.lang.Float): [class java.lang.Float] =======> 4.0
+testing Float.*(Double) with receiver = 2.0 and args = List(2.0 class java.lang.Double): [class java.lang.Double] =======> 4.0
+testing Float./(Byte) with receiver = 2.0 and args = List(2 class java.lang.Byte): [class java.lang.Float] =======> 1.0
+testing Float./(Short) with receiver = 2.0 and args = List(2 class java.lang.Short): [class java.lang.Float] =======> 1.0
+testing Float./(Char) with receiver = 2.0 and args = List( class java.lang.Character): [class java.lang.Float] =======> 1.0
+testing Float./(Int) with receiver = 2.0 and args = List(2 class java.lang.Integer): [class java.lang.Float] =======> 1.0
+testing Float./(Long) with receiver = 2.0 and args = List(2 class java.lang.Long): [class java.lang.Float] =======> 1.0
+testing Float./(Float) with receiver = 2.0 and args = List(2.0 class java.lang.Float): [class java.lang.Float] =======> 1.0
+testing Float./(Double) with receiver = 2.0 and args = List(2.0 class java.lang.Double): [class java.lang.Double] =======> 1.0
+testing Float.%(Byte) with receiver = 2.0 and args = List(2 class java.lang.Byte): [class java.lang.Float] =======> 0.0
+testing Float.%(Short) with receiver = 2.0 and args = List(2 class java.lang.Short): [class java.lang.Float] =======> 0.0
+testing Float.%(Char) with receiver = 2.0 and args = List( class java.lang.Character): [class java.lang.Float] =======> 0.0
+testing Float.%(Int) with receiver = 2.0 and args = List(2 class java.lang.Integer): [class java.lang.Float] =======> 0.0
+testing Float.%(Long) with receiver = 2.0 and args = List(2 class java.lang.Long): [class java.lang.Float] =======> 0.0
+testing Float.%(Float) with receiver = 2.0 and args = List(2.0 class java.lang.Float): [class java.lang.Float] =======> 0.0
+testing Float.%(Double) with receiver = 2.0 and args = List(2.0 class java.lang.Double): [class java.lang.Double] =======> 0.0
+============
+Double
+it's important to print the list of Byte's members
+if some of them change (possibly, adding and/or removing magic symbols), we must update this test
+constructor Double: ()Double
+method !=: (x$1: Any)Boolean
+method !=: (x: Byte)Boolean
+method !=: (x: Char)Boolean
+method !=: (x: Double)Boolean
+method !=: (x: Float)Boolean
+method !=: (x: Int)Boolean
+method !=: (x: Long)Boolean
+method !=: (x: Short)Boolean
+method ##: ()Int
+method %: (x: Byte)Double
+method %: (x: Char)Double
+method %: (x: Double)Double
+method %: (x: Float)Double
+method %: (x: Int)Double
+method %: (x: Long)Double
+method %: (x: Short)Double
+method *: (x: Byte)Double
+method *: (x: Char)Double
+method *: (x: Double)Double
+method *: (x: Float)Double
+method *: (x: Int)Double
+method *: (x: Long)Double
+method *: (x: Short)Double
+method +: (x: Byte)Double
+method +: (x: Char)Double
+method +: (x: Double)Double
+method +: (x: Float)Double
+method +: (x: Int)Double
+method +: (x: Long)Double
+method +: (x: Short)Double
+method +: (x: String)String
+method -: (x: Byte)Double
+method -: (x: Char)Double
+method -: (x: Double)Double
+method -: (x: Float)Double
+method -: (x: Int)Double
+method -: (x: Long)Double
+method -: (x: Short)Double
+method /: (x: Byte)Double
+method /: (x: Char)Double
+method /: (x: Double)Double
+method /: (x: Float)Double
+method /: (x: Int)Double
+method /: (x: Long)Double
+method /: (x: Short)Double
+method <: (x: Byte)Boolean
+method <: (x: Char)Boolean
+method <: (x: Double)Boolean
+method <: (x: Float)Boolean
+method <: (x: Int)Boolean
+method <: (x: Long)Boolean
+method <: (x: Short)Boolean
+method <=: (x: Byte)Boolean
+method <=: (x: Char)Boolean
+method <=: (x: Double)Boolean
+method <=: (x: Float)Boolean
+method <=: (x: Int)Boolean
+method <=: (x: Long)Boolean
+method <=: (x: Short)Boolean
+method ==: (x$1: Any)Boolean
+method ==: (x: Byte)Boolean
+method ==: (x: Char)Boolean
+method ==: (x: Double)Boolean
+method ==: (x: Float)Boolean
+method ==: (x: Int)Boolean
+method ==: (x: Long)Boolean
+method ==: (x: Short)Boolean
+method >: (x: Byte)Boolean
+method >: (x: Char)Boolean
+method >: (x: Double)Boolean
+method >: (x: Float)Boolean
+method >: (x: Int)Boolean
+method >: (x: Long)Boolean
+method >: (x: Short)Boolean
+method >=: (x: Byte)Boolean
+method >=: (x: Char)Boolean
+method >=: (x: Double)Boolean
+method >=: (x: Float)Boolean
+method >=: (x: Int)Boolean
+method >=: (x: Long)Boolean
+method >=: (x: Short)Boolean
+method asInstanceOf: [T0]=> T0
+method equals: (x$1: Any)Boolean
+method getClass: ()Class[Double]
+method hashCode: ()Int
+method isInstanceOf: [T0]=> Boolean
+method toByte: => Byte
+method toChar: => Char
+method toDouble: => Double
+method toFloat: => Float
+method toInt: => Int
+method toLong: => Long
+method toShort: => Short
+method toString: ()java.lang.String
+method unary_+: => Double
+method unary_-: => Double
+testing Double.toByte() with receiver = 2.0 and args = List(): [class java.lang.Byte] =======> 2
+testing Double.toShort() with receiver = 2.0 and args = List(): [class java.lang.Short] =======> 2
+testing Double.toChar() with receiver = 2.0 and args = List(): [class java.lang.Character] =======>
+testing Double.toInt() with receiver = 2.0 and args = List(): [class java.lang.Integer] =======> 2
+testing Double.toLong() with receiver = 2.0 and args = List(): [class java.lang.Long] =======> 2
+testing Double.toFloat() with receiver = 2.0 and args = List(): [class java.lang.Float] =======> 2.0
+testing Double.toDouble() with receiver = 2.0 and args = List(): [class java.lang.Double] =======> 2.0
+testing Double.==(Byte) with receiver = 2.0 and args = List(2 class java.lang.Byte): [class java.lang.Boolean] =======> false
+testing Double.==(Short) with receiver = 2.0 and args = List(2 class java.lang.Short): [class java.lang.Boolean] =======> false
+testing Double.==(Char) with receiver = 2.0 and args = List( class java.lang.Character): [class java.lang.Boolean] =======> false
+testing Double.==(Int) with receiver = 2.0 and args = List(2 class java.lang.Integer): [class java.lang.Boolean] =======> false
+testing Double.==(Long) with receiver = 2.0 and args = List(2 class java.lang.Long): [class java.lang.Boolean] =======> false
+testing Double.==(Float) with receiver = 2.0 and args = List(2.0 class java.lang.Float): [class java.lang.Boolean] =======> false
+testing Double.==(Double) with receiver = 2.0 and args = List(2.0 class java.lang.Double): [class java.lang.Boolean] =======> false
+testing Double.!=(Byte) with receiver = 2.0 and args = List(2 class java.lang.Byte): [class java.lang.Boolean] =======> true
+testing Double.!=(Short) with receiver = 2.0 and args = List(2 class java.lang.Short): [class java.lang.Boolean] =======> true
+testing Double.!=(Char) with receiver = 2.0 and args = List( class java.lang.Character): [class java.lang.Boolean] =======> true
+testing Double.!=(Int) with receiver = 2.0 and args = List(2 class java.lang.Integer): [class java.lang.Boolean] =======> true
+testing Double.!=(Long) with receiver = 2.0 and args = List(2 class java.lang.Long): [class java.lang.Boolean] =======> true
+testing Double.!=(Float) with receiver = 2.0 and args = List(2.0 class java.lang.Float): [class java.lang.Boolean] =======> true
+testing Double.!=(Double) with receiver = 2.0 and args = List(2.0 class java.lang.Double): [class java.lang.Boolean] =======> true
+testing Double.<(Byte) with receiver = 2.0 and args = List(2 class java.lang.Byte): [class java.lang.Boolean] =======> false
+testing Double.<(Short) with receiver = 2.0 and args = List(2 class java.lang.Short): [class java.lang.Boolean] =======> false
+testing Double.<(Char) with receiver = 2.0 and args = List( class java.lang.Character): [class java.lang.Boolean] =======> false
+testing Double.<(Int) with receiver = 2.0 and args = List(2 class java.lang.Integer): [class java.lang.Boolean] =======> false
+testing Double.<(Long) with receiver = 2.0 and args = List(2 class java.lang.Long): [class java.lang.Boolean] =======> false
+testing Double.<(Float) with receiver = 2.0 and args = List(2.0 class java.lang.Float): [class java.lang.Boolean] =======> false
+testing Double.<(Double) with receiver = 2.0 and args = List(2.0 class java.lang.Double): [class java.lang.Boolean] =======> false
+testing Double.<=(Byte) with receiver = 2.0 and args = List(2 class java.lang.Byte): [class java.lang.Boolean] =======> true
+testing Double.<=(Short) with receiver = 2.0 and args = List(2 class java.lang.Short): [class java.lang.Boolean] =======> true
+testing Double.<=(Char) with receiver = 2.0 and args = List( class java.lang.Character): [class java.lang.Boolean] =======> true
+testing Double.<=(Int) with receiver = 2.0 and args = List(2 class java.lang.Integer): [class java.lang.Boolean] =======> true
+testing Double.<=(Long) with receiver = 2.0 and args = List(2 class java.lang.Long): [class java.lang.Boolean] =======> true
+testing Double.<=(Float) with receiver = 2.0 and args = List(2.0 class java.lang.Float): [class java.lang.Boolean] =======> true
+testing Double.<=(Double) with receiver = 2.0 and args = List(2.0 class java.lang.Double): [class java.lang.Boolean] =======> true
+testing Double.>(Byte) with receiver = 2.0 and args = List(2 class java.lang.Byte): [class java.lang.Boolean] =======> false
+testing Double.>(Short) with receiver = 2.0 and args = List(2 class java.lang.Short): [class java.lang.Boolean] =======> false
+testing Double.>(Char) with receiver = 2.0 and args = List( class java.lang.Character): [class java.lang.Boolean] =======> false
+testing Double.>(Int) with receiver = 2.0 and args = List(2 class java.lang.Integer): [class java.lang.Boolean] =======> false
+testing Double.>(Long) with receiver = 2.0 and args = List(2 class java.lang.Long): [class java.lang.Boolean] =======> false
+testing Double.>(Float) with receiver = 2.0 and args = List(2.0 class java.lang.Float): [class java.lang.Boolean] =======> false
+testing Double.>(Double) with receiver = 2.0 and args = List(2.0 class java.lang.Double): [class java.lang.Boolean] =======> false
+testing Double.>=(Byte) with receiver = 2.0 and args = List(2 class java.lang.Byte): [class java.lang.Boolean] =======> true
+testing Double.>=(Short) with receiver = 2.0 and args = List(2 class java.lang.Short): [class java.lang.Boolean] =======> true
+testing Double.>=(Char) with receiver = 2.0 and args = List( class java.lang.Character): [class java.lang.Boolean] =======> true
+testing Double.>=(Int) with receiver = 2.0 and args = List(2 class java.lang.Integer): [class java.lang.Boolean] =======> true
+testing Double.>=(Long) with receiver = 2.0 and args = List(2 class java.lang.Long): [class java.lang.Boolean] =======> true
+testing Double.>=(Float) with receiver = 2.0 and args = List(2.0 class java.lang.Float): [class java.lang.Boolean] =======> true
+testing Double.>=(Double) with receiver = 2.0 and args = List(2.0 class java.lang.Double): [class java.lang.Boolean] =======> true
+testing Double.+(String) with receiver = 2.0 and args = List(2 class java.lang.String): [class java.lang.String] =======> 2.02
+testing Double.+(Byte) with receiver = 2.0 and args = List(2 class java.lang.Byte): [class java.lang.Double] =======> 4.0
+testing Double.+(Short) with receiver = 2.0 and args = List(2 class java.lang.Short): [class java.lang.Double] =======> 4.0
+testing Double.+(Char) with receiver = 2.0 and args = List( class java.lang.Character): [class java.lang.Double] =======> 4.0
+testing Double.+(Int) with receiver = 2.0 and args = List(2 class java.lang.Integer): [class java.lang.Double] =======> 4.0
+testing Double.+(Long) with receiver = 2.0 and args = List(2 class java.lang.Long): [class java.lang.Double] =======> 4.0
+testing Double.+(Float) with receiver = 2.0 and args = List(2.0 class java.lang.Float): [class java.lang.Double] =======> 4.0
+testing Double.+(Double) with receiver = 2.0 and args = List(2.0 class java.lang.Double): [class java.lang.Double] =======> 4.0
+testing Double.-(Byte) with receiver = 2.0 and args = List(2 class java.lang.Byte): [class java.lang.Double] =======> 0.0
+testing Double.-(Short) with receiver = 2.0 and args = List(2 class java.lang.Short): [class java.lang.Double] =======> 0.0
+testing Double.-(Char) with receiver = 2.0 and args = List( class java.lang.Character): [class java.lang.Double] =======> 0.0
+testing Double.-(Int) with receiver = 2.0 and args = List(2 class java.lang.Integer): [class java.lang.Double] =======> 0.0
+testing Double.-(Long) with receiver = 2.0 and args = List(2 class java.lang.Long): [class java.lang.Double] =======> 0.0
+testing Double.-(Float) with receiver = 2.0 and args = List(2.0 class java.lang.Float): [class java.lang.Double] =======> 0.0
+testing Double.-(Double) with receiver = 2.0 and args = List(2.0 class java.lang.Double): [class java.lang.Double] =======> 0.0
+testing Double.*(Byte) with receiver = 2.0 and args = List(2 class java.lang.Byte): [class java.lang.Double] =======> 4.0
+testing Double.*(Short) with receiver = 2.0 and args = List(2 class java.lang.Short): [class java.lang.Double] =======> 4.0
+testing Double.*(Char) with receiver = 2.0 and args = List( class java.lang.Character): [class java.lang.Double] =======> 4.0
+testing Double.*(Int) with receiver = 2.0 and args = List(2 class java.lang.Integer): [class java.lang.Double] =======> 4.0
+testing Double.*(Long) with receiver = 2.0 and args = List(2 class java.lang.Long): [class java.lang.Double] =======> 4.0
+testing Double.*(Float) with receiver = 2.0 and args = List(2.0 class java.lang.Float): [class java.lang.Double] =======> 4.0
+testing Double.*(Double) with receiver = 2.0 and args = List(2.0 class java.lang.Double): [class java.lang.Double] =======> 4.0
+testing Double./(Byte) with receiver = 2.0 and args = List(2 class java.lang.Byte): [class java.lang.Double] =======> 1.0
+testing Double./(Short) with receiver = 2.0 and args = List(2 class java.lang.Short): [class java.lang.Double] =======> 1.0
+testing Double./(Char) with receiver = 2.0 and args = List( class java.lang.Character): [class java.lang.Double] =======> 1.0
+testing Double./(Int) with receiver = 2.0 and args = List(2 class java.lang.Integer): [class java.lang.Double] =======> 1.0
+testing Double./(Long) with receiver = 2.0 and args = List(2 class java.lang.Long): [class java.lang.Double] =======> 1.0
+testing Double./(Float) with receiver = 2.0 and args = List(2.0 class java.lang.Float): [class java.lang.Double] =======> 1.0
+testing Double./(Double) with receiver = 2.0 and args = List(2.0 class java.lang.Double): [class java.lang.Double] =======> 1.0
+testing Double.%(Byte) with receiver = 2.0 and args = List(2 class java.lang.Byte): [class java.lang.Double] =======> 0.0
+testing Double.%(Short) with receiver = 2.0 and args = List(2 class java.lang.Short): [class java.lang.Double] =======> 0.0
+testing Double.%(Char) with receiver = 2.0 and args = List( class java.lang.Character): [class java.lang.Double] =======> 0.0
+testing Double.%(Int) with receiver = 2.0 and args = List(2 class java.lang.Integer): [class java.lang.Double] =======> 0.0
+testing Double.%(Long) with receiver = 2.0 and args = List(2 class java.lang.Long): [class java.lang.Double] =======> 0.0
+testing Double.%(Float) with receiver = 2.0 and args = List(2.0 class java.lang.Float): [class java.lang.Double] =======> 0.0
+testing Double.%(Double) with receiver = 2.0 and args = List(2.0 class java.lang.Double): [class java.lang.Double] =======> 0.0
+============
+Boolean
+it's important to print the list of Byte's members
+if some of them change (possibly, adding and/or removing magic symbols), we must update this test
+constructor Boolean: ()Boolean
+method !=: (x$1: Any)Boolean
+method !=: (x: Boolean)Boolean
+method ##: ()Int
+method &&: (x: Boolean)Boolean
+method &: (x: Boolean)Boolean
+method ==: (x$1: Any)Boolean
+method ==: (x: Boolean)Boolean
+method ^: (x: Boolean)Boolean
+method asInstanceOf: [T0]=> T0
+method equals: (x$1: Any)Boolean
+method getClass: ()Class[Boolean]
+method hashCode: ()Int
+method isInstanceOf: [T0]=> Boolean
+method toString: ()java.lang.String
+method unary_!: => Boolean
+method |: (x: Boolean)Boolean
+method ||: (x: Boolean)Boolean
+testing Boolean.unary_!() with receiver = true and args = List(): [class java.lang.Boolean] =======> false
+testing Boolean.==(Boolean) with receiver = true and args = List(true class java.lang.Boolean): [class java.lang.Boolean] =======> true
+testing Boolean.!=(Boolean) with receiver = true and args = List(true class java.lang.Boolean): [class java.lang.Boolean] =======> false
+testing Boolean.||(Boolean) with receiver = true and args = List(true class java.lang.Boolean): [class java.lang.Boolean] =======> true
+testing Boolean.&&(Boolean) with receiver = true and args = List(true class java.lang.Boolean): [class java.lang.Boolean] =======> true
+testing Boolean.|(Boolean) with receiver = true and args = List(true class java.lang.Boolean): [class java.lang.Boolean] =======> true
+testing Boolean.&(Boolean) with receiver = true and args = List(true class java.lang.Boolean): [class java.lang.Boolean] =======> true
+testing Boolean.^(Boolean) with receiver = true and args = List(true class java.lang.Boolean): [class java.lang.Boolean] =======> false
+============
+Unit
+it's important to print the list of Byte's members
+if some of them change (possibly, adding and/or removing magic symbols), we must update this test
+constructor Unit: ()Unit
+method !=: (x$1: Any)Boolean
+method ##: ()Int
+method ==: (x$1: Any)Boolean
+method asInstanceOf: [T0]=> T0
+method equals: (x$1: Any)Boolean
+method getClass: ()Class[Unit]
+method hashCode: ()Int
+method isInstanceOf: [T0]=> Boolean
+method toString: ()java.lang.String
diff --git a/test/files/run/reflection-valueclasses-magic.scala b/test/files/run/reflection-valueclasses-magic.scala
new file mode 100644
index 0000000..c4a26e4
--- /dev/null
+++ b/test/files/run/reflection-valueclasses-magic.scala
@@ -0,0 +1,116 @@
+import scala.reflect.runtime.universe._
+import scala.reflect.runtime.universe.definitions._
+import scala.reflect.runtime.{currentMirror => cm}
+import scala.reflect.ClassTag
+
+package scala {
+ object ExceptionUtils {
+ def unwrapThrowable(ex: Throwable): Throwable = scala.reflect.runtime.ReflectionUtils.unwrapThrowable(ex)
+ }
+}
+
+object Test extends App {
+ def key(sym: Symbol) = {
+ sym match {
+ // initialize parameter symbols
+ case meth: MethodSymbol => meth.paramss.flatten.map(_.typeSignature)
+ }
+ sym + ": " + sym.typeSignature
+ }
+
+ def convert(value: Any, tpe: Type) = {
+ import scala.runtime.BoxesRunTime._
+ if (tpe =:= typeOf[Byte]) toByte(value)
+ else if (tpe =:= typeOf[Short]) toShort(value)
+ else if (tpe =:= typeOf[Char]) toCharacter(value)
+ else if (tpe =:= typeOf[Int]) toInteger(value)
+ else if (tpe =:= typeOf[Long]) toLong(value)
+ else if (tpe =:= typeOf[Float]) toFloat(value)
+ else if (tpe =:= typeOf[Double]) toDouble(value)
+ else if (tpe =:= typeOf[String]) value.toString
+ else if (tpe =:= typeOf[Boolean]) value.asInstanceOf[Boolean]
+ else throw new Exception(s"not supported: value = $value, tpe = $tpe")
+ }
+
+ def test[T: ClassTag](tpe: Type, receiver: T, method: String, args: Any*) {
+ def wrap[T](op: => T) =
+ try {
+ var result = op.asInstanceOf[AnyRef]
+ if (scala.runtime.ScalaRunTime.isArray(result))
+ result = scala.runtime.ScalaRunTime.toObjectArray(result).toList
+ println(s"[${result.getClass}] =======> $result")
+ } catch {
+ case ex: Throwable =>
+ val realex = scala.ExceptionUtils.unwrapThrowable(ex)
+ println(realex.getClass + ": " + realex.getMessage)
+ }
+ val meth = tpe.declaration(newTermName(method).encodedName.toTermName)
+ val testees = if (meth.isMethod) List(meth.asMethod) else meth.asTerm.alternatives.map(_.asMethod)
+ testees foreach (testee => {
+ val convertedArgs = args.zipWithIndex.map { case (arg, i) => convert(arg, testee.paramss.flatten.apply(i).typeSignature) }
+ print(s"testing ${tpe.typeSymbol.name}.$method(${testee.paramss.flatten.map(_.typeSignature).mkString(','.toString)}) with receiver = $receiver and args = ${convertedArgs.map(arg => arg + ' '.toString + arg.getClass).toList}: ")
+ wrap(cm.reflect(receiver).reflectMethod(testee)(convertedArgs: _*))
+ })
+ }
+ def header(tpe: Type) {
+ println(s"============\n$tpe")
+ println("it's important to print the list of Byte's members")
+ println("if some of them change (possibly, adding and/or removing magic symbols), we must update this test")
+ tpe.members.toList.sortBy(key).foreach(sym => println(key(sym)))
+ }
+
+ def testNumeric[T: ClassTag](tpe: Type, value: T) {
+ header(tpe)
+ List("toByte", "toShort", "toChar", "toInt", "toLong", "toFloat", "toDouble") foreach (meth => test(tpe, value, meth))
+ test(tpe, value, "==", 2)
+ test(tpe, value, "!=", 2)
+ test(tpe, value, "<", 2)
+ test(tpe, value, "<=", 2)
+ test(tpe, value, ">", 2)
+ test(tpe, value, ">=", 2)
+ test(tpe, value, "+", 2)
+ test(tpe, value, "-", 2)
+ test(tpe, value, "*", 2)
+ test(tpe, value, "/", 2)
+ test(tpe, value, "%", 2)
+ }
+
+ def testIntegral[T: ClassTag](tpe: Type, value: T) {
+ testNumeric(tpe, value)
+ test(tpe, value, "unary_~")
+ test(tpe, value, "unary_+")
+ test(tpe, value, "unary_-")
+ test(tpe, value, "<<", 2)
+ test(tpe, value, ">>", 2)
+ test(tpe, value, ">>>", 2)
+ test(tpe, value, "|", 2)
+ test(tpe, value, "&", 2)
+ test(tpe, value, "^", 2)
+ }
+
+ def testBoolean() {
+ header(typeOf[Boolean])
+ test(typeOf[Boolean], true, "unary_!")
+ test(typeOf[Boolean], true, "==", true)
+ test(typeOf[Boolean], true, "!=", true)
+ test(typeOf[Boolean], true, "||", true)
+ test(typeOf[Boolean], true, "&&", true)
+ test(typeOf[Boolean], true, "|", true)
+ test(typeOf[Boolean], true, "&", true)
+ test(typeOf[Boolean], true, "^", true)
+ }
+
+ def testUnit() {
+ header(typeOf[Unit])
+ }
+
+ testNumeric(typeOf[Byte], 2.toByte)
+ testNumeric(typeOf[Short], 2.toShort)
+ testNumeric(typeOf[Char], 2.toChar)
+ testNumeric(typeOf[Int], 2.toInt)
+ testNumeric(typeOf[Long], 2.toLong)
+ testNumeric(typeOf[Float], 2.toFloat)
+ testNumeric(typeOf[Double], 2.toDouble)
+ testBoolean()
+ testUnit()
+}
\ No newline at end of file
diff --git a/test/files/run/reflection-valueclasses-standard.check b/test/files/run/reflection-valueclasses-standard.check
new file mode 100644
index 0000000..643c3d0
--- /dev/null
+++ b/test/files/run/reflection-valueclasses-standard.check
@@ -0,0 +1,27 @@
+========byte========
+byte
+2
+========short========
+short
+2
+========int========
+int
+2
+========long========
+long
+2
+========float========
+float
+2.0
+========double========
+double
+2.0
+========char========
+char
+2
+========boolean========
+boolean
+true
+========void========
+void
+()
diff --git a/test/files/run/reflection-valueclasses-standard.scala b/test/files/run/reflection-valueclasses-standard.scala
new file mode 100644
index 0000000..18a3d1f
--- /dev/null
+++ b/test/files/run/reflection-valueclasses-standard.scala
@@ -0,0 +1,21 @@
+import scala.reflect.runtime.universe._
+import scala.reflect.runtime.{currentMirror => cm}
+import scala.reflect.{ClassTag, classTag}
+
+object Test extends App {
+ def test[T: ClassTag: TypeTag](x: T) = {
+ println(s"========${classTag[T].runtimeClass}========")
+ println(cm.reflect(x).reflectMethod(typeOf[T].member(newTermName("getClass")).asMethod)())
+ println(cm.reflect(x).reflectMethod(typeOf[T].member(newTermName("toString")).asMethod)())
+ }
+
+ test(2.toByte)
+ test(2.toShort)
+ test(2.toInt)
+ test(2.toLong)
+ test(2.toFloat)
+ test(2.toDouble)
+ test('2')
+ test(true)
+ test(())
+}
\ No newline at end of file
diff --git a/test/files/run/reflinit.check b/test/files/run/reflinit.check
new file mode 100644
index 0000000..a9df354
--- /dev/null
+++ b/test/files/run/reflinit.check
@@ -0,0 +1 @@
+List[Int]
diff --git a/test/files/run/reflinit.scala b/test/files/run/reflinit.scala
new file mode 100644
index 0000000..6d3ba3a
--- /dev/null
+++ b/test/files/run/reflinit.scala
@@ -0,0 +1,6 @@
+import scala.reflect.runtime.universe._
+
+object Test extends App {
+ val tt2 = typeOf[List[Int]]
+ println(tt2)
+}
\ No newline at end of file
diff --git a/test/files/run/reify-aliases.check b/test/files/run/reify-aliases.check
new file mode 100644
index 0000000..aa846b9
--- /dev/null
+++ b/test/files/run/reify-aliases.check
@@ -0,0 +1 @@
+TypeRef(SingleType(ThisType(scala), scala.Predef), newTypeName("String"), List())
diff --git a/test/files/run/reify-aliases.scala b/test/files/run/reify-aliases.scala
new file mode 100644
index 0000000..45b1a34
--- /dev/null
+++ b/test/files/run/reify-aliases.scala
@@ -0,0 +1,5 @@
+import scala.reflect.runtime.universe._
+
+object Test extends App {
+ println(showRaw(typeOf[String]))
+}
\ No newline at end of file
diff --git a/test/files/run/reify-repl-fail-gracefully.check b/test/files/run/reify-repl-fail-gracefully.check
new file mode 100644
index 0000000..1b0f3f2
--- /dev/null
+++ b/test/files/run/reify-repl-fail-gracefully.check
@@ -0,0 +1,21 @@
+Type in expressions to have them evaluated.
+Type :help for more information.
+
+scala>
+
+scala> import language.experimental.macros
+import language.experimental.macros
+
+scala> import scala.reflect.runtime.universe._
+import scala.reflect.runtime.universe._
+
+scala>
+
+scala> reify
+<console>:12: error: macros cannot be partially applied
+ reify
+ ^
+
+scala>
+
+scala>
diff --git a/test/files/run/reify-repl-fail-gracefully.scala b/test/files/run/reify-repl-fail-gracefully.scala
new file mode 100644
index 0000000..ed6d6cb
--- /dev/null
+++ b/test/files/run/reify-repl-fail-gracefully.scala
@@ -0,0 +1,10 @@
+import scala.tools.partest.ReplTest
+
+object Test extends ReplTest {
+ def code = """
+ |import language.experimental.macros
+ |import scala.reflect.runtime.universe._
+ |
+ |reify
+ """.stripMargin
+}
diff --git a/test/files/run/reify-staticXXX.check b/test/files/run/reify-staticXXX.check
new file mode 100644
index 0000000..37102b2
--- /dev/null
+++ b/test/files/run/reify-staticXXX.check
@@ -0,0 +1,24 @@
+object
+object
+class
+class
+object > object
+object > object
+object > class
+object > class
+package > object
+package > object
+package > class
+package > class
+object
+object
+class
+class
+object > object
+object > object
+object > class
+object > class
+package > object
+package > object
+package > class
+package > class
diff --git a/test/files/run/reify-staticXXX.scala b/test/files/run/reify-staticXXX.scala
new file mode 100644
index 0000000..e80157d
--- /dev/null
+++ b/test/files/run/reify-staticXXX.scala
@@ -0,0 +1,56 @@
+import scala.reflect.runtime.universe._
+import scala.tools.reflect.Eval
+
+object B { override def toString = "object" }
+class C { override def toString = "class" }
+
+package foo1 {
+ object B { override def toString = "package > object" }
+ class C { override def toString = "package > class" }
+}
+
+object Foo2 {
+ object B { override def toString = "object > object" }
+ class C { override def toString = "object > class" }
+}
+
+object packageless {
+ def test = {
+ println(B)
+ println(reify(B).eval)
+ println(new C)
+ println(reify(new C).eval)
+ println(Foo2.B)
+ println(reify(Foo2.B).eval)
+ println(new Foo2.C)
+ println(reify(new Foo2.C).eval)
+ println(_root_.foo1.B)
+ println(reify(_root_.foo1.B).eval)
+ println(new _root_.foo1.C)
+ println(reify(new _root_.foo1.C).eval)
+ }
+}
+
+package packageful {
+ object Test {
+ def test = {
+ println(B)
+ println(reify(B).eval)
+ println(new C)
+ println(reify(new C).eval)
+ println(Foo2.B)
+ println(reify(Foo2.B).eval)
+ println(new Foo2.C)
+ println(reify(new Foo2.C).eval)
+ println(_root_.foo1.B)
+ println(reify(_root_.foo1.B).eval)
+ println(new _root_.foo1.C)
+ println(reify(new _root_.foo1.C).eval)
+ }
+ }
+}
+
+object Test extends App {
+ packageless.test
+ packageful.Test.test
+}
diff --git a/test/files/run/reify_ann1a.check b/test/files/run/reify_ann1a.check
new file mode 100644
index 0000000..99a966f
--- /dev/null
+++ b/test/files/run/reify_ann1a.check
@@ -0,0 +1,30 @@
+{
+ @new ann(List.apply("1a")) @new ann(List.apply("1b")) class C[@new ann(List.apply("2a")) @new ann(List.apply("2b")) T >: Nothing <: Any] extends AnyRef {
+ @new ann(List.apply("3a")) @new ann(List.apply("3b")) <paramaccessor> private[this] val x: T @ann(List.apply("4a")) @ann(List.apply("4b")) = _;
+ def <init>(@new ann(List.apply("3a")) @new ann(List.apply("3b")) x: T @ann(List.apply("4a")) @ann(List.apply("4b"))) = {
+ super.<init>();
+ ()
+ };
+ @new ann(List.apply("5a")) @new ann(List.apply("5b")) def f(x: Int @ann(List.apply("6a")) @ann(List.apply("6b"))) = {
+ @new ann(List.apply("7a")) @new ann(List.apply("7b")) val r = x.$plus(3): @ann(List.apply("8a")): @ann(List.apply("8b"));
+ val s = (4: Int @ann(List.apply("9a")) @ann(List.apply("9b")));
+ r.$plus(s)
+ }
+ };
+ ()
+}
+{
+ @ann(List.apply[String]("1a")) @ann(List.apply[String]("1b")) class C[@ann(List.apply[String]("2a")) @ann(List.apply[String]("2b")) T] extends AnyRef {
+ @ann(List.apply[String]("3a")) @ann(List.apply[String]("3b")) <paramaccessor> private[this] val x: T @ann(List.apply[String]("4b")) @ann(List.apply[String]("4a")) = _;
+ def <init>(@ann(List.apply[String]("3a")) @ann(List.apply[String]("3b")) x: T @ann(List.apply[String]("4b")) @ann(List.apply[String]("4a"))): C[T] = {
+ C.super.<init>();
+ ()
+ };
+ @ann(List.apply[String]("5a")) @ann(List.apply[String]("5b")) def f(x: Int @ann(List.apply[String]("6b")) @ann(List.apply[String]("6a"))): Int = {
+ @ann(List.apply[String]("7a")) @ann(List.apply[String]("7b")) val r: Int @ann(List.apply[String]("8b")) @ann(List.apply[String]("8a")) = ((x.+(3): Int @ann(List.apply[String]("8a"))): Int @ann(List.apply[String]("8b")) @ann(List.apply[String]("8a")));
+ val s: Int @ann(List.apply[String]("9b")) @ann(List.apply[String]("9a")) = (4: Int @ann(List.apply[String]("9b")) @ann(List.apply[String]("9a")));
+ r.+(s)
+ }
+ };
+ ()
+}
diff --git a/test/files/run/reify_ann1a.scala b/test/files/run/reify_ann1a.scala
new file mode 100644
index 0000000..c23048e
--- /dev/null
+++ b/test/files/run/reify_ann1a.scala
@@ -0,0 +1,28 @@
+import scala.reflect.runtime.universe._
+import scala.reflect.runtime.{universe => ru}
+import scala.reflect.runtime.{currentMirror => cm}
+import scala.tools.reflect.ToolBox
+
+class ann(bar: List[String]) extends annotation.StaticAnnotation
+
+object Test extends App {
+ // test 1: reify
+ val tree = reify{
+ @ann(bar=List("1a")) @ann(bar=List("1b")) class C[@ann(bar=List("2a")) @ann(bar=List("2b")) T](@ann(bar=List("3a")) @ann(bar=List("3b")) x: T @ann(bar=List("4a")) @ann(bar=List("4b"))) {
+ @ann(bar=List("5a")) @ann(bar=List("5b")) def f(x: Int @ann(bar=List("6a")) @ann(bar=List("6b"))) = {
+ @ann(bar=List("7a")) @ann(bar=List("7b")) val r = (x + 3): @ann(bar=List("8a")) @ann(bar=List("8b"))
+ val s = 4: Int @ann(bar=List("9a")) @ann(bar=List("9b"))
+ r + s
+ }
+ }
+ }.tree
+ println(tree.toString)
+
+ // test 2: import and typecheck
+ val toolbox = cm.mkToolBox()
+ val ttree = toolbox.typeCheck(tree)
+ println(ttree.toString)
+
+ // test 3: import and compile
+ toolbox.eval(tree)
+}
\ No newline at end of file
diff --git a/test/files/run/reify_ann1b.check b/test/files/run/reify_ann1b.check
new file mode 100644
index 0000000..6a5f32a
--- /dev/null
+++ b/test/files/run/reify_ann1b.check
@@ -0,0 +1,30 @@
+{
+ @new ann(bar = "1a") @new ann(bar = "1b") class C[@new ann(bar = "2a") @new ann(bar = "2b") T >: Nothing <: Any] extends AnyRef {
+ @new ann(bar = "3a") @new ann(bar = "3b") <paramaccessor> private[this] val x: T @ann(bar = "4a") @ann(bar = "4b") = _;
+ def <init>(@new ann(bar = "3a") @new ann(bar = "3b") x: T @ann(bar = "4a") @ann(bar = "4b")) = {
+ super.<init>();
+ ()
+ };
+ @new ann(bar = "5a") @new ann(bar = "5b") def f(x: Int @ann(bar = "6a") @ann(bar = "6b")) = {
+ @new ann(bar = "7a") @new ann(bar = "7b") val r = x.$plus(3): @ann(bar = "8a"): @ann(bar = "8b");
+ val s = (4: Int @ann(bar = "9a") @ann(bar = "9b"));
+ r.$plus(s)
+ }
+ };
+ ()
+}
+{
+ @ann(bar = "1a") @ann(bar = "1b") class C[@ann(bar = "2a") @ann(bar = "2b") T] extends AnyRef {
+ @ann(bar = "3a") @ann(bar = "3b") <paramaccessor> private[this] val x: T @ann(bar = "4b") @ann(bar = "4a") = _;
+ def <init>(@ann(bar = "3a") @ann(bar = "3b") x: T @ann(bar = "4b") @ann(bar = "4a")): C[T] = {
+ C.super.<init>();
+ ()
+ };
+ @ann(bar = "5a") @ann(bar = "5b") def f(x: Int @ann(bar = "6b") @ann(bar = "6a")): Int = {
+ @ann(bar = "7a") @ann(bar = "7b") val r: Int @ann(bar = "8b") @ann(bar = "8a") = ((x.+(3): Int @ann(bar = "8a")): Int @ann(bar = "8b") @ann(bar = "8a"));
+ val s: Int @ann(bar = "9b") @ann(bar = "9a") = (4: Int @ann(bar = "9b") @ann(bar = "9a"));
+ r.+(s)
+ }
+ };
+ ()
+}
diff --git a/test/files/run/reify_ann1b.scala b/test/files/run/reify_ann1b.scala
new file mode 100644
index 0000000..29ce602
--- /dev/null
+++ b/test/files/run/reify_ann1b.scala
@@ -0,0 +1,28 @@
+import scala.reflect.runtime.universe._
+import scala.reflect.runtime.{universe => ru}
+import scala.reflect.runtime.{currentMirror => cm}
+import scala.tools.reflect.ToolBox
+
+class ann(bar: String) extends annotation.ClassfileAnnotation
+
+object Test extends App {
+ // test 1: reify
+ val tree = reify{
+ @ann(bar="1a") @ann(bar="1b") class C[@ann(bar="2a") @ann(bar="2b") T](@ann(bar="3a") @ann(bar="3b") x: T @ann(bar="4a") @ann(bar="4b")) {
+ @ann(bar="5a") @ann(bar="5b") def f(x: Int @ann(bar="6a") @ann(bar="6b")) = {
+ @ann(bar="7a") @ann(bar="7b") val r = (x + 3): @ann(bar="8a") @ann(bar="8b")
+ val s = 4: Int @ann(bar="9a") @ann(bar="9b")
+ r + s
+ }
+ }
+ }.tree
+ println(tree.toString)
+
+ // test 2: import and typecheck
+ val toolbox = cm.mkToolBox()
+ val ttree = toolbox.typeCheck(tree)
+ println(ttree.toString)
+
+ // test 3: import and compile
+ toolbox.eval(tree)
+}
\ No newline at end of file
diff --git a/test/files/run/reify_ann2a.check b/test/files/run/reify_ann2a.check
new file mode 100644
index 0000000..ccbcb4c
--- /dev/null
+++ b/test/files/run/reify_ann2a.check
@@ -0,0 +1,44 @@
+{
+ class ann extends StaticAnnotation {
+ <paramaccessor> private[this] val bar: `package`.List[Predef.String] = _;
+ def <init>(bar: `package`.List[Predef.String]) = {
+ super.<init>();
+ ()
+ }
+ };
+ @new ann(List.apply("1a")) @new ann(List.apply("1b")) class C[@new ann(List.apply("2a")) @new ann(List.apply("2b")) T >: Nothing <: Any] extends AnyRef {
+ @new ann(List.apply("3a")) @new ann(List.apply("3b")) <paramaccessor> private[this] val x: T @ann(List.apply("4a")) @ann(List.apply("4b")) = _;
+ def <init>(@new ann(List.apply("3a")) @new ann(List.apply("3b")) x: T @ann(List.apply("4a")) @ann(List.apply("4b"))) = {
+ super.<init>();
+ ()
+ };
+ @new ann(List.apply("5a")) @new ann(List.apply("5b")) def f(x: Int @ann(List.apply("6a")) @ann(List.apply("6b"))) = {
+ @new ann(List.apply("7a")) @new ann(List.apply("7b")) val r = x.$plus(3): @ann(List.apply("8a")): @ann(List.apply("8b"));
+ val s = (4: Int @ann(List.apply("9a")) @ann(List.apply("9b")));
+ r.$plus(s)
+ }
+ };
+ ()
+}
+{
+ class ann extends scala.annotation.Annotation with scala.annotation.StaticAnnotation {
+ <paramaccessor> private[this] val bar: List[String] = _;
+ def <init>(bar: List[String]): ann = {
+ ann.super.<init>();
+ ()
+ }
+ };
+ @ann(List.apply[String]("1a")) @ann(List.apply[String]("1b")) class C[@ann(List.apply[String]("2a")) @ann(List.apply[String]("2b")) T] extends AnyRef {
+ @ann(List.apply[String]("3a")) @ann(List.apply[String]("3b")) <paramaccessor> private[this] val x: T @ann(List.apply[String]("4b")) @ann(List.apply[String]("4a")) = _;
+ def <init>(@ann(List.apply[String]("3a")) @ann(List.apply[String]("3b")) x: T @ann(List.apply[String]("4b")) @ann(List.apply[String]("4a"))): C[T] = {
+ C.super.<init>();
+ ()
+ };
+ @ann(List.apply[String]("5a")) @ann(List.apply[String]("5b")) def f(x: Int @ann(List.apply[String]("6b")) @ann(List.apply[String]("6a"))): Int = {
+ @ann(List.apply[String]("7a")) @ann(List.apply[String]("7b")) val r: Int @ann(List.apply[String]("8b")) @ann(List.apply[String]("8a")) = ((x.+(3): Int @ann(List.apply[String]("8a"))): Int @ann(List.apply[String]("8b")) @ann(List.apply[String]("8a")));
+ val s: Int @ann(List.apply[String]("9b")) @ann(List.apply[String]("9a")) = (4: Int @ann(List.apply[String]("9b")) @ann(List.apply[String]("9a")));
+ r.+(s)
+ }
+ };
+ ()
+}
diff --git a/test/files/run/reify_ann2a.scala b/test/files/run/reify_ann2a.scala
new file mode 100644
index 0000000..53423e1
--- /dev/null
+++ b/test/files/run/reify_ann2a.scala
@@ -0,0 +1,28 @@
+import scala.reflect.runtime.universe._
+import scala.reflect.runtime.{universe => ru}
+import scala.reflect.runtime.{currentMirror => cm}
+import scala.tools.reflect.ToolBox
+
+object Test extends App {
+ // test 1: reify
+ val tree = reify{
+ class ann(bar: List[String]) extends annotation.StaticAnnotation
+
+ @ann(bar=List("1a")) @ann(bar=List("1b")) class C[@ann(bar=List("2a")) @ann(bar=List("2b")) T](@ann(bar=List("3a")) @ann(bar=List("3b")) x: T @ann(bar=List("4a")) @ann(bar=List("4b"))) {
+ @ann(bar=List("5a")) @ann(bar=List("5b")) def f(x: Int @ann(bar=List("6a")) @ann(bar=List("6b"))) = {
+ @ann(bar=List("7a")) @ann(bar=List("7b")) val r = (x + 3): @ann(bar=List("8a")) @ann(bar=List("8b"))
+ val s = 4: Int @ann(bar=List("9a")) @ann(bar=List("9b"))
+ r + s
+ }
+ }
+ }.tree
+ println(tree.toString)
+
+ // test 2: import and typecheck
+ val toolbox = cm.mkToolBox()
+ val ttree = toolbox.typeCheck(tree)
+ println(ttree.toString)
+
+ // test 3: import and compile
+ toolbox.eval(tree)
+}
\ No newline at end of file
diff --git a/test/files/run/reify_ann3.check b/test/files/run/reify_ann3.check
new file mode 100644
index 0000000..8caceb2
--- /dev/null
+++ b/test/files/run/reify_ann3.check
@@ -0,0 +1,21 @@
+{
+ class Tree[A >: Nothing <: Any, B >: Nothing <: Any] extends AnyRef {
+ @new inline @getter() final <paramaccessor> val key: A = _;
+ def <init>(key: A) = {
+ super.<init>();
+ ()
+ }
+ };
+ ()
+}
+{
+ class Tree[A, B] extends AnyRef {
+ final <paramaccessor> private[this] val key: A = _;
+ @inline @scala.annotation.meta.getter final <stable> <accessor> <paramaccessor> def key: A = Tree.this.key;
+ def <init>(key: A): Tree[A,B] = {
+ Tree.super.<init>();
+ ()
+ }
+ };
+ ()
+}
diff --git a/test/files/run/reify_ann3.scala b/test/files/run/reify_ann3.scala
new file mode 100644
index 0000000..4162fa5
--- /dev/null
+++ b/test/files/run/reify_ann3.scala
@@ -0,0 +1,22 @@
+import scala.reflect.runtime.universe._
+import scala.reflect.runtime.{universe => ru}
+import scala.reflect.runtime.{currentMirror => cm}
+import scala.tools.reflect.ToolBox
+import scala.annotation._
+import scala.annotation.meta._
+
+object Test extends App {
+ // test 1: reify
+ val tree = reify{
+ class Tree[A, +B](@(inline @getter) final val key: A)
+ }.tree
+ println(tree.toString)
+
+ // test 2: import and typecheck
+ val toolbox = cm.mkToolBox()
+ val ttree = toolbox.typeCheck(tree)
+ println(ttree.toString)
+
+ // test 3: import and compile
+ toolbox.eval(tree)
+}
\ No newline at end of file
diff --git a/test/files/run/reify_ann4.check b/test/files/run/reify_ann4.check
new file mode 100644
index 0000000..8bf5fe3
--- /dev/null
+++ b/test/files/run/reify_ann4.check
@@ -0,0 +1,32 @@
+{
+ class D extends StaticAnnotation {
+ def <init>() = {
+ super.<init>();
+ ()
+ }
+ };
+ class C extends AnyRef {
+ def <init>() = {
+ super.<init>();
+ ()
+ }
+ };
+ val c1 = new C @D();
+ ()
+}
+{
+ class D extends scala.annotation.Annotation with scala.annotation.StaticAnnotation {
+ def <init>(): D = {
+ D.super.<init>();
+ ()
+ }
+ };
+ class C extends AnyRef {
+ def <init>(): C = {
+ C.super.<init>();
+ ()
+ }
+ };
+ val c1: C = new C @D();
+ ()
+}
diff --git a/test/files/run/reify_ann4.scala b/test/files/run/reify_ann4.scala
new file mode 100644
index 0000000..0aedb77
--- /dev/null
+++ b/test/files/run/reify_ann4.scala
@@ -0,0 +1,26 @@
+import scala.reflect.runtime.universe._
+import scala.reflect.runtime.{universe => ru}
+import scala.reflect.runtime.{currentMirror => cm}
+import scala.tools.reflect.ToolBox
+import scala.annotation._
+import scala.annotation.meta._
+
+object Test extends App {
+ // test 1: reify
+ val tree = reify{
+ class D extends StaticAnnotation
+ class C
+ val c1 = new C @D
+ //val c2 = (new C) @D // illegal syntax
+ //val c3 = c1 @D // illegal syntax
+ }.tree
+ println(tree.toString)
+
+ // test 2: import and typecheck
+ val toolbox = cm.mkToolBox()
+ val ttree = toolbox.typeCheck(tree)
+ println(ttree.toString)
+
+ // test 3: import and compile
+ toolbox.eval(tree)
+}
\ No newline at end of file
diff --git a/test/files/run/reify_ann5.check b/test/files/run/reify_ann5.check
new file mode 100644
index 0000000..1ec0457
--- /dev/null
+++ b/test/files/run/reify_ann5.check
@@ -0,0 +1,22 @@
+{
+ class C extends AnyRef {
+ @new inline @beanGetter() @new BeanProperty() <paramaccessor> val x: Int = _;
+ def <init>(x: Int) = {
+ super.<init>();
+ ()
+ }
+ };
+ ()
+}
+{
+ class C extends AnyRef {
+ @scala.beans.BeanProperty <paramaccessor> private[this] val x: Int = _;
+ <stable> <accessor> <paramaccessor> def x: Int = C.this.x;
+ def <init>(x: Int): C = {
+ C.super.<init>();
+ ()
+ };
+ @inline @scala.annotation.meta.beanGetter def getX(): Int = C.this.x
+ };
+ ()
+}
diff --git a/test/files/run/reify_ann5.scala b/test/files/run/reify_ann5.scala
new file mode 100644
index 0000000..d27be3b
--- /dev/null
+++ b/test/files/run/reify_ann5.scala
@@ -0,0 +1,23 @@
+import scala.reflect.runtime.universe._
+import scala.reflect.runtime.{universe => ru}
+import scala.reflect.runtime.{currentMirror => cm}
+import scala.tools.reflect.ToolBox
+import scala.annotation._
+import scala.annotation.meta._
+import scala.beans._
+
+object Test extends App {
+ // test 1: reify
+ val tree = reify{
+ class C(@BeanProperty @(inline @beanGetter) val x: Int)
+ }.tree
+ println(tree.toString)
+
+ // test 2: import and typecheck
+ val toolbox = cm.mkToolBox()
+ val ttree = toolbox.typeCheck(tree)
+ println(ttree.toString)
+
+ // test 3: import and compile
+ toolbox.eval(tree)
+}
\ No newline at end of file
diff --git a/test/files/run/reify_anonymous.check b/test/files/run/reify_anonymous.check
new file mode 100644
index 0000000..b8626c4
--- /dev/null
+++ b/test/files/run/reify_anonymous.check
@@ -0,0 +1 @@
+4
diff --git a/test/files/run/reify_anonymous.scala b/test/files/run/reify_anonymous.scala
new file mode 100644
index 0000000..d743014
--- /dev/null
+++ b/test/files/run/reify_anonymous.scala
@@ -0,0 +1,8 @@
+import scala.reflect.runtime.universe._
+import scala.tools.reflect.Eval
+
+object Test extends App {
+ reify {
+ println(new {def x = 2; def y = x * x}.y)
+ }.eval
+}
\ No newline at end of file
diff --git a/test/files/run/reify_classfileann_a.check b/test/files/run/reify_classfileann_a.check
new file mode 100644
index 0000000..0c91902
--- /dev/null
+++ b/test/files/run/reify_classfileann_a.check
@@ -0,0 +1,18 @@
+{
+ @new ann(bar = "1", quux = Array("2", "3"), baz = new ann(bar = "4")) class C extends AnyRef {
+ def <init>() = {
+ super.<init>();
+ ()
+ }
+ };
+ ()
+}
+{
+ @ann(bar = "1", quux = ["2", "3"], baz = ann(bar = "4")) class C extends AnyRef {
+ def <init>(): C = {
+ C.super.<init>();
+ ()
+ }
+ };
+ ()
+}
diff --git a/test/files/run/reify_classfileann_a.scala b/test/files/run/reify_classfileann_a.scala
new file mode 100644
index 0000000..1d51688
--- /dev/null
+++ b/test/files/run/reify_classfileann_a.scala
@@ -0,0 +1,22 @@
+import scala.reflect.runtime.universe._
+import scala.reflect.runtime.{universe => ru}
+import scala.reflect.runtime.{currentMirror => cm}
+import scala.tools.reflect.ToolBox
+
+class ann(bar: String, quux: Array[String] = Array(), baz: ann = null) extends annotation.ClassfileAnnotation
+
+object Test extends App {
+ // test 1: reify
+ val tree = reify{
+ @ann(bar="1", quux=Array("2", "3"), baz = new ann(bar = "4")) class C
+ }.tree
+ println(tree.toString)
+
+ // test 2: import and typecheck
+ val toolbox = cm.mkToolBox()
+ val ttree = toolbox.typeCheck(tree)
+ println(ttree.toString)
+
+ // test 3: import and compile
+ toolbox.eval(tree)
+}
\ No newline at end of file
diff --git a/test/files/run/reify_classfileann_b.check b/test/files/run/reify_classfileann_b.check
new file mode 100644
index 0000000..c204fa8
--- /dev/null
+++ b/test/files/run/reify_classfileann_b.check
@@ -0,0 +1,20 @@
+{
+ class C extends AnyRef {
+ def <init>() = {
+ super.<init>();
+ ()
+ };
+ def x: Int = 2: @ann(bar = "1",quux = Array("2", "3"),baz = new ann(bar = "4"))
+ };
+ ()
+}
+{
+ class C extends AnyRef {
+ def <init>(): C = {
+ C.super.<init>();
+ ()
+ };
+ def x: Int = (2: Int(2) @ann(bar = "1", quux = ["2", "3"], baz = ann(bar = "4")))
+ };
+ ()
+}
diff --git a/test/files/run/reify_classfileann_b.scala b/test/files/run/reify_classfileann_b.scala
new file mode 100644
index 0000000..ef19e92
--- /dev/null
+++ b/test/files/run/reify_classfileann_b.scala
@@ -0,0 +1,26 @@
+import scala.reflect.runtime.universe._
+import scala.reflect.runtime.{universe => ru}
+import scala.reflect.runtime.{currentMirror => cm}
+import scala.tools.reflect.ToolBox
+
+class ann(bar: String, quux: Array[String] = Array(), baz: ann = null) extends annotation.ClassfileAnnotation
+
+object Test extends App {
+ // test 1: reify
+ val tree = reify{
+ class C {
+ def x: Int = {
+ 2: @ann(bar="1", quux=Array("2", "3"), baz = new ann(bar = "4"))
+ }
+ }
+ }.tree
+ println(tree.toString)
+
+ // test 2: import and typecheck
+ val toolbox = cm.mkToolBox()
+ val ttree = toolbox.typeCheck(tree)
+ println(ttree.toString)
+
+ // test 3: import and compile
+ toolbox.eval(tree)
+}
\ No newline at end of file
diff --git a/test/files/run/reify_closure1.check b/test/files/run/reify_closure1.check
new file mode 100644
index 0000000..b2f7f08
--- /dev/null
+++ b/test/files/run/reify_closure1.check
@@ -0,0 +1,2 @@
+10
+10
diff --git a/test/files/run/reify_closure1.scala b/test/files/run/reify_closure1.scala
new file mode 100644
index 0000000..af24a4b
--- /dev/null
+++ b/test/files/run/reify_closure1.scala
@@ -0,0 +1,19 @@
+import scala.reflect.runtime.universe._
+import scala.reflect.runtime.{universe => ru}
+import scala.reflect.runtime.{currentMirror => cm}
+import scala.tools.reflect.ToolBox
+
+object Test extends App {
+ def foo[T](ys: List[T]): Int => Int = {
+ val fun = reify{(x: Int) => {
+ x
+ }}
+
+ val toolbox = cm.mkToolBox()
+ val dyn = toolbox.eval(fun.tree)
+ dyn.asInstanceOf[Int => Int]
+ }
+
+ println(foo(List(1, 2, 3))(10))
+ println(foo(List(1, 2, 3, 4))(10))
+}
\ No newline at end of file
diff --git a/test/files/run/reify_closure2a.check b/test/files/run/reify_closure2a.check
new file mode 100644
index 0000000..c1f3abd
--- /dev/null
+++ b/test/files/run/reify_closure2a.check
@@ -0,0 +1,2 @@
+11
+12
diff --git a/test/files/run/reify_closure2a.scala b/test/files/run/reify_closure2a.scala
new file mode 100644
index 0000000..7a2cdb5
--- /dev/null
+++ b/test/files/run/reify_closure2a.scala
@@ -0,0 +1,19 @@
+import scala.reflect.runtime.universe._
+import scala.reflect.runtime.{universe => ru}
+import scala.reflect.runtime.{currentMirror => cm}
+import scala.tools.reflect.ToolBox
+
+object Test extends App {
+ def foo(y: Int): Int => Int = {
+ val fun = reify{(x: Int) => {
+ x + y
+ }}
+
+ val toolbox = cm.mkToolBox()
+ val dyn = toolbox.eval(fun.tree)
+ dyn.asInstanceOf[Int => Int]
+ }
+
+ println(foo(1)(10))
+ println(foo(2)(10))
+}
\ No newline at end of file
diff --git a/test/files/run/reify_closure3a.check b/test/files/run/reify_closure3a.check
new file mode 100644
index 0000000..c1f3abd
--- /dev/null
+++ b/test/files/run/reify_closure3a.check
@@ -0,0 +1,2 @@
+11
+12
diff --git a/test/files/run/reify_closure3a.scala b/test/files/run/reify_closure3a.scala
new file mode 100644
index 0000000..cb17c89
--- /dev/null
+++ b/test/files/run/reify_closure3a.scala
@@ -0,0 +1,21 @@
+import scala.reflect.runtime.universe._
+import scala.reflect.runtime.{universe => ru}
+import scala.reflect.runtime.{currentMirror => cm}
+import scala.tools.reflect.ToolBox
+
+object Test extends App {
+ def foo(y: Int): Int => Int = {
+ def y1 = y
+
+ val fun = reify{(x: Int) => {
+ x + y1
+ }}
+
+ val toolbox = cm.mkToolBox()
+ val dyn = toolbox.eval(fun.tree)
+ dyn.asInstanceOf[Int => Int]
+ }
+
+ println(foo(1)(10))
+ println(foo(2)(10))
+}
\ No newline at end of file
diff --git a/test/files/run/reify_closure4a.check b/test/files/run/reify_closure4a.check
new file mode 100644
index 0000000..c1f3abd
--- /dev/null
+++ b/test/files/run/reify_closure4a.check
@@ -0,0 +1,2 @@
+11
+12
diff --git a/test/files/run/reify_closure4a.scala b/test/files/run/reify_closure4a.scala
new file mode 100644
index 0000000..23436e0
--- /dev/null
+++ b/test/files/run/reify_closure4a.scala
@@ -0,0 +1,21 @@
+import scala.reflect.runtime.universe._
+import scala.reflect.runtime.{universe => ru}
+import scala.reflect.runtime.{currentMirror => cm}
+import scala.tools.reflect.ToolBox
+
+object Test extends App {
+ def foo(y: Int): Int => Int = {
+ val y1 = y
+
+ val fun = reify{(x: Int) => {
+ x + y1
+ }}
+
+ val toolbox = cm.mkToolBox()
+ val dyn = toolbox.eval(fun.tree)
+ dyn.asInstanceOf[Int => Int]
+ }
+
+ println(foo(1)(10))
+ println(foo(2)(10))
+}
\ No newline at end of file
diff --git a/test/files/run/reify_closure5a.check b/test/files/run/reify_closure5a.check
new file mode 100644
index 0000000..df9e19c
--- /dev/null
+++ b/test/files/run/reify_closure5a.check
@@ -0,0 +1,2 @@
+13
+14
diff --git a/test/files/run/reify_closure5a.scala b/test/files/run/reify_closure5a.scala
new file mode 100644
index 0000000..6b5089a
--- /dev/null
+++ b/test/files/run/reify_closure5a.scala
@@ -0,0 +1,21 @@
+import scala.reflect.runtime.universe._
+import scala.reflect.runtime.{universe => ru}
+import scala.reflect.runtime.{currentMirror => cm}
+import scala.tools.reflect.ToolBox
+
+object Test extends App {
+ def foo[T: TypeTag](ys: List[T]): Int => Int = {
+ val fun = reify{(x: Int) => {
+ x + ys.length
+ }}
+
+ val toolbox = cm.mkToolBox()
+ val dyn = toolbox.eval(fun.tree)
+ dyn.asInstanceOf[Int => Int]
+ }
+
+ var fun1 = foo(List(1, 2, 3))
+ println(fun1(10))
+ var fun2 = foo(List(1, 2, 3, 4))
+ println(fun2(10))
+}
\ No newline at end of file
diff --git a/test/files/run/reify_closure6.check b/test/files/run/reify_closure6.check
new file mode 100644
index 0000000..b9de4c6
--- /dev/null
+++ b/test/files/run/reify_closure6.check
@@ -0,0 +1,7 @@
+q = 1
+y = 1
+first invocation = 15
+q = 2
+y = 1
+second invocation = 17
+q after second invocation = 2
\ No newline at end of file
diff --git a/test/files/run/reify_closure6.scala b/test/files/run/reify_closure6.scala
new file mode 100644
index 0000000..cba0351
--- /dev/null
+++ b/test/files/run/reify_closure6.scala
@@ -0,0 +1,29 @@
+import scala.reflect.runtime.universe._
+import scala.reflect.runtime.{universe => ru}
+import scala.reflect.runtime.{currentMirror => cm}
+import scala.tools.reflect.ToolBox
+
+object Test extends App {
+ var q = 0
+ def foo[T: TypeTag](ys: List[T]): Int => Int = {
+ val z = 1
+ var y = 0
+ val fun = reify{(x: Int) => {
+ y += 1
+ q += 1
+ println("q = " + q)
+ println("y = " + y)
+ x + ys.length * z + q + y
+ }}
+
+ val toolbox = cm.mkToolBox()
+ val dyn = toolbox.eval(fun.tree)
+ dyn.asInstanceOf[Int => Int]
+ }
+
+ val fun1 = foo(List(1, 2, 3))
+ println("first invocation = " + fun1(10))
+ val fun2 = foo(List(1, 2, 3, 4))
+ println("second invocation = " + fun2(10))
+ println("q after second invocation = " + q)
+}
\ No newline at end of file
diff --git a/test/files/run/reify_closure7.check b/test/files/run/reify_closure7.check
new file mode 100644
index 0000000..bf58b52
--- /dev/null
+++ b/test/files/run/reify_closure7.check
@@ -0,0 +1,6 @@
+q = 1
+y = 1
+first invocation = 15
+q = 2
+y = 2
+second invocation = 17
diff --git a/test/files/run/reify_closure7.scala b/test/files/run/reify_closure7.scala
new file mode 100644
index 0000000..2a7ce25
--- /dev/null
+++ b/test/files/run/reify_closure7.scala
@@ -0,0 +1,33 @@
+import scala.reflect.runtime.universe._
+import scala.reflect.runtime.{universe => ru}
+import scala.reflect.runtime.{currentMirror => cm}
+import scala.tools.reflect.ToolBox
+
+object Test extends App {
+ var q = 0
+ var clo: Int => Int = null
+ def foo[T: TypeTag](ys: List[T]): Int => Int = {
+ val z = 1
+ var y = 0
+ val fun = reify{(x: Int) => {
+ y += 1
+ q += 1
+ println("q = " + q)
+ println("y = " + y)
+ x + ys.length * z + q + y
+ }}
+
+ if (clo == null) {
+ val toolbox = cm.mkToolBox()
+ val dyn = toolbox.eval(fun.tree)
+ clo = dyn.asInstanceOf[Int => Int]
+ }
+
+ clo
+ }
+
+ val fun1 = foo(List(1, 2, 3))
+ println("first invocation = " + fun1(10))
+ val fun2 = foo(List(1, 2, 3, 4))
+ println("second invocation = " + fun2(10))
+}
\ No newline at end of file
diff --git a/test/files/run/reify_closure8a.check b/test/files/run/reify_closure8a.check
new file mode 100644
index 0000000..9a03714
--- /dev/null
+++ b/test/files/run/reify_closure8a.check
@@ -0,0 +1 @@
+10
\ No newline at end of file
diff --git a/test/files/run/reify_closure8a.scala b/test/files/run/reify_closure8a.scala
new file mode 100644
index 0000000..f303a75
--- /dev/null
+++ b/test/files/run/reify_closure8a.scala
@@ -0,0 +1,15 @@
+import scala.reflect.runtime.universe._
+import scala.reflect.runtime.{universe => ru}
+import scala.reflect.runtime.{currentMirror => cm}
+import scala.tools.reflect.ToolBox
+
+object Test extends App {
+ class Foo(val y: Int) {
+ def fun = reify{y}
+ }
+
+ val toolbox = cm.mkToolBox()
+ val dyn = toolbox.eval(new Foo(10).fun.tree)
+ val foo = dyn.asInstanceOf[Int]
+ println(foo)
+}
\ No newline at end of file
diff --git a/test/files/run/reify_closure8b.check b/test/files/run/reify_closure8b.check
new file mode 100644
index 0000000..5a7863f
--- /dev/null
+++ b/test/files/run/reify_closure8b.check
@@ -0,0 +1,3 @@
+scala.tools.reflect.ToolBoxError: reflective compilation has failed:
+
+value y is not a member of Test.Foo
diff --git a/test/files/run/reify_closure8b.scala b/test/files/run/reify_closure8b.scala
new file mode 100644
index 0000000..c693cb4
--- /dev/null
+++ b/test/files/run/reify_closure8b.scala
@@ -0,0 +1,21 @@
+import scala.reflect.runtime.universe._
+import scala.reflect.runtime.{universe => ru}
+import scala.reflect.runtime.{currentMirror => cm}
+import scala.tools.reflect.ToolBox
+
+object Test extends App {
+ // will fail because y is a private field
+ // reification doesn't magically make unavailable stuff available
+ class Foo(y: Int) {
+ def fun = reify{y}
+ }
+
+ try {
+ val dyn = cm.mkToolBox().eval(new Foo(10).fun.tree)
+ val foo = dyn.asInstanceOf[Int]
+ println(foo)
+ } catch {
+ case ex: Throwable =>
+ println(ex)
+ }
+}
\ No newline at end of file
diff --git a/test/files/run/t4560.check b/test/files/run/reify_closures10.check
similarity index 100%
copy from test/files/run/t4560.check
copy to test/files/run/reify_closures10.check
diff --git a/test/files/run/reify_closures10.scala b/test/files/run/reify_closures10.scala
new file mode 100644
index 0000000..a2740c8
--- /dev/null
+++ b/test/files/run/reify_closures10.scala
@@ -0,0 +1,13 @@
+import scala.reflect.runtime.universe._
+import scala.reflect.runtime.{universe => ru}
+import scala.reflect.runtime.{currentMirror => cm}
+import scala.tools.reflect.ToolBox
+
+object Test extends App {
+ val x = 2
+ val y = 3
+ val code = reify{println(x + y); x + y}
+
+ val toolbox = cm.mkToolBox()
+ println(toolbox.eval(code.tree))
+}
\ No newline at end of file
diff --git a/test/files/run/reify_complex.check b/test/files/run/reify_complex.check
new file mode 100644
index 0000000..7df35e3
--- /dev/null
+++ b/test/files/run/reify_complex.check
@@ -0,0 +1 @@
+3.0+4.0*i
diff --git a/test/files/run/reify_complex.scala b/test/files/run/reify_complex.scala
new file mode 100644
index 0000000..4abec39
--- /dev/null
+++ b/test/files/run/reify_complex.scala
@@ -0,0 +1,25 @@
+import scala.reflect.runtime.universe._
+import scala.tools.reflect.Eval
+
+object Test extends App {
+ reify {
+ class Complex(val re: Double, val im: Double) {
+ def + (that: Complex) =
+ new Complex(re + that.re, im + that.im)
+ def - (that: Complex) =
+ new Complex(re - that.re, im - that.im)
+ def * (that: Complex) =
+ new Complex(re * that.re - im * that.im,
+ re * that.im + im * that.re)
+ def / (that: Complex) = {
+ val denom = that.re * that.re + that.im * that.im
+ new Complex((re * that.re + im * that.im) / denom,
+ (im * that.re - re * that.im) / denom)
+ }
+ override def toString =
+ re + (if (im < 0) "-" + (-im) else "+" + im) + "*i"
+ }
+ val x = new Complex(2, 1); val y = new Complex(1, 3)
+ println(x + y)
+ }.eval
+}
\ No newline at end of file
diff --git a/test/files/run/reify_copypaste1.check b/test/files/run/reify_copypaste1.check
new file mode 100644
index 0000000..b204f9c
--- /dev/null
+++ b/test/files/run/reify_copypaste1.check
@@ -0,0 +1,2 @@
+List(1, 2)
+
diff --git a/test/files/run/reify_copypaste1.scala b/test/files/run/reify_copypaste1.scala
new file mode 100644
index 0000000..c597b7a
--- /dev/null
+++ b/test/files/run/reify_copypaste1.scala
@@ -0,0 +1,19 @@
+import scala.reflect.runtime._
+import scala.reflect.runtime.universe._
+import scala.reflect.runtime.universe.definitions._
+import scala.reflect.runtime.universe.Flag._
+import scala.tools.reflect.ToolBox
+
+object Test extends App {
+ val stdout = System.out
+ val output = new java.io.ByteArrayOutputStream()
+ System.setOut(new java.io.PrintStream(output))
+ val toolBox = currentMirror.mkToolBox(options = "-Yreify-copypaste")
+ val reify = Select(Select(Select(Select(Ident(ScalaPackage), newTermName("reflect")), newTermName("runtime")), newTermName("universe")), newTermName("reify"))
+ val reifee = Block(List(ValDef(Modifiers(LAZY), newTermName("x"), TypeTree(), Apply(Ident(ListModule), List(Literal(Constant(1)), Literal(Constant(2)))))), Ident(newTermName("x")))
+ toolBox.eval(Apply(reify, List(reifee)))
+ val Block(List(tpeCopypaste), exprCopypaste @ ModuleDef(_, _, Template(_, _, (_ :: stats) :+ expr))) = toolBox.parse(output.toString())
+ output.reset()
+ toolBox.eval(Block(stats, expr))
+ stdout.println(output.toString)
+}
\ No newline at end of file
diff --git a/test/files/run/reify_copypaste2.check b/test/files/run/reify_copypaste2.check
new file mode 100644
index 0000000..f5c1076
--- /dev/null
+++ b/test/files/run/reify_copypaste2.check
@@ -0,0 +1 @@
+`package`.universe.reify(Test.this.x)
diff --git a/test/files/run/reify_copypaste2.scala b/test/files/run/reify_copypaste2.scala
new file mode 100644
index 0000000..12d08cf
--- /dev/null
+++ b/test/files/run/reify_copypaste2.scala
@@ -0,0 +1,10 @@
+import scala.reflect.runtime.universe._
+import scala.reflect.runtime.{universe => ru}
+import scala.reflect.runtime.{currentMirror => cm}
+import scala.tools.reflect.ToolBox
+
+object Test extends App {
+ val x = 2
+ val outer = reify{reify{x}}
+ println(outer.tree)
+}
\ No newline at end of file
diff --git a/test/files/run/reify_csv.check b/test/files/run/reify_csv.check
new file mode 100644
index 0000000..b56f4bb
--- /dev/null
+++ b/test/files/run/reify_csv.check
@@ -0,0 +1,10 @@
+List(phase name, id, description)
+record(parser,1,parse source into ASTs, perform simple desugaring)
+record(namer,2,resolve names, attach symbols to named trees)
+record(packageobjects,3,load package objects)
+record(typer,4,the meat and potatoes: type the trees)
+record(superaccessors,5,add super accessors in traits and nested classes)
+record(pickler,6,serialize symbol tables)
+record(refchecks,7,reference/override checking, translate nested objects)
+record(selectiveanf,8,)
+record(liftcode,9,reify trees)
diff --git a/test/files/run/reify_csv.scala b/test/files/run/reify_csv.scala
new file mode 100644
index 0000000..c356244
--- /dev/null
+++ b/test/files/run/reify_csv.scala
@@ -0,0 +1,36 @@
+import scala.reflect.runtime.universe._
+import scala.tools.reflect.Eval
+
+object Test extends App {
+ val csv = """
+ | phase name; id; description
+ | parser; 1; parse source into ASTs, perform simple desugaring
+ | namer; 2; resolve names, attach symbols to named trees
+ |packageobjects; 3; load package objects
+ | typer; 4; the meat and potatoes: type the trees
+ |superaccessors; 5; add super accessors in traits and nested classes
+ | pickler; 6; serialize symbol tables
+ | refchecks; 7; reference/override checking, translate nested objects
+ | selectiveanf; 8;
+ | liftcode; 9; reify trees""".stripMargin.split("\n").map{_.trim()}.drop(1).toList
+
+ val fields = csv.head.split(";").map{_.trim()}.toList
+ println(fields)
+
+ reify({
+ object Csv {
+ case class record(`phase name`: String, id: String, description: String)
+
+ object record {
+ def parse(lines: List[String]) = {
+ lines drop(1) map { line => line.split(";", -1).toList match {
+ case phase$whitespace$name :: id :: description :: _ => record(phase$whitespace$name.trim(), id.trim(), description.trim())
+ case _ => throw new Exception("format error")
+ }}
+ }
+ }
+ }
+
+ Csv.record.parse(csv) foreach println
+ }).eval
+}
diff --git a/test/files/run/reify_extendbuiltins.check b/test/files/run/reify_extendbuiltins.check
new file mode 100644
index 0000000..a48033a
--- /dev/null
+++ b/test/files/run/reify_extendbuiltins.check
@@ -0,0 +1 @@
+10! = 3628800
diff --git a/test/files/run/reify_extendbuiltins.scala b/test/files/run/reify_extendbuiltins.scala
new file mode 100644
index 0000000..a2d5465
--- /dev/null
+++ b/test/files/run/reify_extendbuiltins.scala
@@ -0,0 +1,15 @@
+import scala.reflect.runtime.universe._
+import scala.tools.reflect.Eval
+
+object Test extends App {
+ reify {
+ def fact(n: Int): BigInt =
+ if (n == 0) 1 else fact(n-1) * n
+ class Factorizer(n: Int) {
+ def ! = fact(n)
+ }
+ implicit def int2fact(n: Int) = new Factorizer(n)
+
+ println("10! = " + (10!))
+ }.eval
+}
\ No newline at end of file
diff --git a/test/files/jvm/bug680.check b/test/files/run/reify_for1.check
similarity index 100%
copy from test/files/jvm/bug680.check
copy to test/files/run/reify_for1.check
diff --git a/test/files/run/reify_for1.scala b/test/files/run/reify_for1.scala
new file mode 100644
index 0000000..e1f5347
--- /dev/null
+++ b/test/files/run/reify_for1.scala
@@ -0,0 +1,10 @@
+import scala.reflect.runtime.universe._
+import scala.tools.reflect.Eval
+
+object Test extends App {
+ reify {
+ val sumOfSquares1 = (for (i <- 1 to 100; if (i % 3 == 0)) yield Math.pow(i, 2)).sum
+ val sumOfSquares2 = (1 to 100).filter(_ % 3 == 0).map(Math.pow(_, 2)).sum
+ assert(sumOfSquares1 == sumOfSquares2)
+ }.eval
+}
\ No newline at end of file
diff --git a/test/files/run/reify_fors_newpatmat.check b/test/files/run/reify_fors_newpatmat.check
new file mode 100644
index 0000000..eefdded
--- /dev/null
+++ b/test/files/run/reify_fors_newpatmat.check
@@ -0,0 +1,5 @@
+Persons over 20: John Richard
+divisors(34) = List(1, 2, 17, 34)
+findNums(15) = (4,1) (5,2) (6,1) (7,4) (8,3) (8,5) (9,2) (9,4) (10,1) (10,3) (10,7) (11,2) (11,6) (11,8) (12,1) (12,5) (12,7) (13,4) (13,6) (13,10) (14,3) (14,5) (14,9)
+average(List(3.5, 5.0, 4.5)) = 4.333333333333333
+scalProd(List(3.5, 5.0, 4.5), List(2.0, 1.0, 3.0)) = 25.5
diff --git a/test/files/run/reify_fors_newpatmat.scala b/test/files/run/reify_fors_newpatmat.scala
new file mode 100644
index 0000000..6bee953
--- /dev/null
+++ b/test/files/run/reify_fors_newpatmat.scala
@@ -0,0 +1,101 @@
+import scala.reflect.runtime.universe._
+import scala.tools.reflect.Eval
+
+object Test extends App {
+ reify {
+ object Persons {
+ /** A list of persons. To create a list, we use Predef.List
+ * which takes a variable number of arguments and constructs
+ * a list out of them.
+ */
+ val persons = List(
+ new Person("Bob", 17),
+ new Person("John", 40),
+ new Person("Richard", 68)
+ )
+
+ /** A Person class. 'val' constructor parameters become
+ * public members of the class.
+ */
+ class Person(val name: String, val age: Int)
+
+ /** Return an iterator over persons that are older than 20.
+ */
+ def olderThan20(xs: Seq[Person]): Iterator[String] =
+ olderThan20(xs.iterator)
+
+ /** Return an iterator over persons older than 20, given
+ * an iterator over persons.
+ */
+ def olderThan20(xs: Iterator[Person]): Iterator[String] = {
+
+ // The first expression is called a 'generator' and makes
+ // 'p' take values from 'xs'. The second expression is
+ // called a 'filter' and it is a boolean expression which
+ // selects only persons older than 20. There can be more than
+ // one generator and filter. The 'yield' expression is evaluated
+ // for each 'p' which satisfies the filters and used to assemble
+ // the resulting iterator
+ for (p <- xs if p.age > 20) yield p.name
+ }
+ }
+
+ /** Some functions over lists of numbers which demonstrate
+ * the use of for comprehensions.
+ */
+ object Numeric {
+
+ /** Return the divisors of n. */
+ def divisors(n: Int): List[Int] =
+ for (i <- List.range(1, n+1) if n % i == 0) yield i
+
+ /** Is 'n' a prime number? */
+ def isPrime(n: Int) = divisors(n).length == 2
+
+ /** Return pairs of numbers whose sum is prime. */
+ def findNums(n: Int): Iterable[(Int, Int)] = {
+
+ // a for comprehension using two generators
+ for (i <- 1 until n;
+ j <- 1 until (i-1);
+ if isPrime(i + j)) yield (i, j)
+ }
+
+ /** Return the sum of the elements of 'xs'. */
+ def sum(xs: List[Double]): Double =
+ xs.foldLeft(0.0) { (x, y) => x + y }
+
+ /** Return the sum of pairwise product of the two lists. */
+ def scalProd(xs: List[Double], ys: List[Double]) =
+ sum(for((x, y) <- xs zip ys) yield x * y);
+
+ /** Remove duplicate elements in 'xs'. */
+ def removeDuplicates[A](xs: List[A]): List[A] =
+ if (xs.isEmpty)
+ xs
+ else
+ xs.head :: removeDuplicates(for (x <- xs.tail if x != xs.head) yield x)
+ }
+
+ // import all members of object 'persons' in the current scope
+ import Persons._
+
+ print("Persons over 20:")
+ olderThan20(persons) foreach { x => print(" " + x) }
+ println
+
+ import Numeric._
+
+ println("divisors(34) = " + divisors(34))
+
+ print("findNums(15) =")
+ findNums(15) foreach { x => print(" " + x) }
+ println
+
+ val xs = List(3.5, 5.0, 4.5)
+ println("average(" + xs + ") = " + sum(xs) / xs.length)
+
+ val ys = List(2.0, 1.0, 3.0)
+ println("scalProd(" + xs + ", " + ys +") = " + scalProd(xs, ys))
+ }.eval
+}
\ No newline at end of file
diff --git a/test/files/run/reify_fors_oldpatmat.check b/test/files/run/reify_fors_oldpatmat.check
new file mode 100644
index 0000000..eefdded
--- /dev/null
+++ b/test/files/run/reify_fors_oldpatmat.check
@@ -0,0 +1,5 @@
+Persons over 20: John Richard
+divisors(34) = List(1, 2, 17, 34)
+findNums(15) = (4,1) (5,2) (6,1) (7,4) (8,3) (8,5) (9,2) (9,4) (10,1) (10,3) (10,7) (11,2) (11,6) (11,8) (12,1) (12,5) (12,7) (13,4) (13,6) (13,10) (14,3) (14,5) (14,9)
+average(List(3.5, 5.0, 4.5)) = 4.333333333333333
+scalProd(List(3.5, 5.0, 4.5), List(2.0, 1.0, 3.0)) = 25.5
diff --git a/test/files/jvm/bug680.check b/test/files/run/reify_fors_oldpatmat.flags
similarity index 100%
copy from test/files/jvm/bug680.check
copy to test/files/run/reify_fors_oldpatmat.flags
diff --git a/test/files/run/reify_fors_oldpatmat.scala b/test/files/run/reify_fors_oldpatmat.scala
new file mode 100644
index 0000000..6bee953
--- /dev/null
+++ b/test/files/run/reify_fors_oldpatmat.scala
@@ -0,0 +1,101 @@
+import scala.reflect.runtime.universe._
+import scala.tools.reflect.Eval
+
+object Test extends App {
+ reify {
+ object Persons {
+ /** A list of persons. To create a list, we use Predef.List
+ * which takes a variable number of arguments and constructs
+ * a list out of them.
+ */
+ val persons = List(
+ new Person("Bob", 17),
+ new Person("John", 40),
+ new Person("Richard", 68)
+ )
+
+ /** A Person class. 'val' constructor parameters become
+ * public members of the class.
+ */
+ class Person(val name: String, val age: Int)
+
+ /** Return an iterator over persons that are older than 20.
+ */
+ def olderThan20(xs: Seq[Person]): Iterator[String] =
+ olderThan20(xs.iterator)
+
+ /** Return an iterator over persons older than 20, given
+ * an iterator over persons.
+ */
+ def olderThan20(xs: Iterator[Person]): Iterator[String] = {
+
+ // The first expression is called a 'generator' and makes
+ // 'p' take values from 'xs'. The second expression is
+ // called a 'filter' and it is a boolean expression which
+ // selects only persons older than 20. There can be more than
+ // one generator and filter. The 'yield' expression is evaluated
+ // for each 'p' which satisfies the filters and used to assemble
+ // the resulting iterator
+ for (p <- xs if p.age > 20) yield p.name
+ }
+ }
+
+ /** Some functions over lists of numbers which demonstrate
+ * the use of for comprehensions.
+ */
+ object Numeric {
+
+ /** Return the divisors of n. */
+ def divisors(n: Int): List[Int] =
+ for (i <- List.range(1, n+1) if n % i == 0) yield i
+
+ /** Is 'n' a prime number? */
+ def isPrime(n: Int) = divisors(n).length == 2
+
+ /** Return pairs of numbers whose sum is prime. */
+ def findNums(n: Int): Iterable[(Int, Int)] = {
+
+ // a for comprehension using two generators
+ for (i <- 1 until n;
+ j <- 1 until (i-1);
+ if isPrime(i + j)) yield (i, j)
+ }
+
+ /** Return the sum of the elements of 'xs'. */
+ def sum(xs: List[Double]): Double =
+ xs.foldLeft(0.0) { (x, y) => x + y }
+
+ /** Return the sum of pairwise product of the two lists. */
+ def scalProd(xs: List[Double], ys: List[Double]) =
+ sum(for((x, y) <- xs zip ys) yield x * y);
+
+ /** Remove duplicate elements in 'xs'. */
+ def removeDuplicates[A](xs: List[A]): List[A] =
+ if (xs.isEmpty)
+ xs
+ else
+ xs.head :: removeDuplicates(for (x <- xs.tail if x != xs.head) yield x)
+ }
+
+ // import all members of object 'persons' in the current scope
+ import Persons._
+
+ print("Persons over 20:")
+ olderThan20(persons) foreach { x => print(" " + x) }
+ println
+
+ import Numeric._
+
+ println("divisors(34) = " + divisors(34))
+
+ print("findNums(15) =")
+ findNums(15) foreach { x => print(" " + x) }
+ println
+
+ val xs = List(3.5, 5.0, 4.5)
+ println("average(" + xs + ") = " + sum(xs) / xs.length)
+
+ val ys = List(2.0, 1.0, 3.0)
+ println("scalProd(" + xs + ", " + ys +") = " + scalProd(xs, ys))
+ }.eval
+}
\ No newline at end of file
diff --git a/test/files/run/reify_generic.check b/test/files/run/reify_generic.check
new file mode 100644
index 0000000..b8626c4
--- /dev/null
+++ b/test/files/run/reify_generic.check
@@ -0,0 +1 @@
+4
diff --git a/test/files/run/reify_generic.scala b/test/files/run/reify_generic.scala
new file mode 100644
index 0000000..7baffac
--- /dev/null
+++ b/test/files/run/reify_generic.scala
@@ -0,0 +1,9 @@
+import scala.reflect.runtime.universe._
+import scala.tools.reflect.Eval
+
+object Test extends App {
+ reify {
+ val product = List(1, 2, 3).head * List[Any](4, 2, 0).head.asInstanceOf[Int]
+ println(product)
+ }.eval
+}
\ No newline at end of file
diff --git a/test/files/run/reify_generic2.check b/test/files/run/reify_generic2.check
new file mode 100644
index 0000000..b8626c4
--- /dev/null
+++ b/test/files/run/reify_generic2.check
@@ -0,0 +1 @@
+4
diff --git a/test/files/run/reify_generic2.scala b/test/files/run/reify_generic2.scala
new file mode 100644
index 0000000..36ab61e
--- /dev/null
+++ b/test/files/run/reify_generic2.scala
@@ -0,0 +1,10 @@
+import scala.reflect.runtime.universe._
+import scala.tools.reflect.Eval
+
+object Test extends App {
+ reify {
+ class C
+ val product = List(new C, new C).length * List[C](new C, new C).length
+ println(product)
+ }.eval
+}
\ No newline at end of file
diff --git a/test/files/run/reify_getter.check b/test/files/run/reify_getter.check
new file mode 100644
index 0000000..5ef4ff4
--- /dev/null
+++ b/test/files/run/reify_getter.check
@@ -0,0 +1 @@
+evaluated = 2
diff --git a/test/files/run/reify_getter.scala b/test/files/run/reify_getter.scala
new file mode 100644
index 0000000..cb04ddf
--- /dev/null
+++ b/test/files/run/reify_getter.scala
@@ -0,0 +1,18 @@
+import scala.reflect.runtime.universe._
+import scala.reflect.runtime.{universe => ru}
+import scala.reflect.runtime.{currentMirror => cm}
+import scala.tools.reflect.ToolBox
+
+object Test extends App {
+ val code = reify {
+ class C {
+ val x = 2
+ }
+
+ new C().x
+ }
+
+ val toolbox = cm.mkToolBox()
+ val evaluated = toolbox.eval(code.tree)
+ println("evaluated = " + evaluated)
+}
\ No newline at end of file
diff --git a/test/files/run/reify_inheritance.check b/test/files/run/reify_inheritance.check
new file mode 100644
index 0000000..25bf17f
--- /dev/null
+++ b/test/files/run/reify_inheritance.check
@@ -0,0 +1 @@
+18
\ No newline at end of file
diff --git a/test/files/run/reify_inheritance.scala b/test/files/run/reify_inheritance.scala
new file mode 100644
index 0000000..c732664
--- /dev/null
+++ b/test/files/run/reify_inheritance.scala
@@ -0,0 +1,17 @@
+import scala.reflect.runtime.universe._
+import scala.tools.reflect.Eval
+
+object Test extends App {
+ reify {
+ class C {
+ def x = 2
+ def y = x * x
+ }
+
+ class D extends C {
+ override def x = 3
+ }
+
+ println(new D().y * new C().x)
+ }.eval
+}
\ No newline at end of file
diff --git a/test/files/run/reify_inner1.check b/test/files/run/reify_inner1.check
new file mode 100644
index 0000000..d8263ee
--- /dev/null
+++ b/test/files/run/reify_inner1.check
@@ -0,0 +1 @@
+2
\ No newline at end of file
diff --git a/test/files/run/reify_inner1.scala b/test/files/run/reify_inner1.scala
new file mode 100644
index 0000000..8da338e
--- /dev/null
+++ b/test/files/run/reify_inner1.scala
@@ -0,0 +1,16 @@
+import scala.reflect.runtime.universe._
+import scala.tools.reflect.Eval
+
+object Test extends App {
+ reify {
+ class C {
+ class D {
+ val x = 2
+ }
+ }
+
+ val outer = new C()
+ val inner = new outer.D()
+ println(inner.x)
+ }.eval
+}
\ No newline at end of file
diff --git a/test/files/run/reify_inner2.check b/test/files/run/reify_inner2.check
new file mode 100644
index 0000000..d8263ee
--- /dev/null
+++ b/test/files/run/reify_inner2.check
@@ -0,0 +1 @@
+2
\ No newline at end of file
diff --git a/test/files/run/reify_inner2.scala b/test/files/run/reify_inner2.scala
new file mode 100644
index 0000000..f82eff8
--- /dev/null
+++ b/test/files/run/reify_inner2.scala
@@ -0,0 +1,16 @@
+import scala.reflect.runtime.universe._
+import scala.tools.reflect.Eval
+
+object Test extends App {
+ reify {
+ class C {
+ object D {
+ val x = 2
+ }
+ }
+
+ val outer = new C()
+ val inner = outer.D
+ println(inner.x)
+ }.eval
+}
\ No newline at end of file
diff --git a/test/files/run/reify_inner3.check b/test/files/run/reify_inner3.check
new file mode 100644
index 0000000..d8263ee
--- /dev/null
+++ b/test/files/run/reify_inner3.check
@@ -0,0 +1 @@
+2
\ No newline at end of file
diff --git a/test/files/run/reify_inner3.scala b/test/files/run/reify_inner3.scala
new file mode 100644
index 0000000..72f8d9a
--- /dev/null
+++ b/test/files/run/reify_inner3.scala
@@ -0,0 +1,16 @@
+import scala.reflect.runtime.universe._
+import scala.tools.reflect.Eval
+
+object Test extends App {
+ reify {
+ object C {
+ class D {
+ val x = 2
+ }
+ }
+
+ val outer = C
+ val inner = new outer.D
+ println(inner.x)
+ }.eval
+}
\ No newline at end of file
diff --git a/test/files/run/reify_inner4.check b/test/files/run/reify_inner4.check
new file mode 100644
index 0000000..d8263ee
--- /dev/null
+++ b/test/files/run/reify_inner4.check
@@ -0,0 +1 @@
+2
\ No newline at end of file
diff --git a/test/files/run/reify_inner4.scala b/test/files/run/reify_inner4.scala
new file mode 100644
index 0000000..ecbbf14
--- /dev/null
+++ b/test/files/run/reify_inner4.scala
@@ -0,0 +1,16 @@
+import scala.reflect.runtime.universe._
+import scala.tools.reflect.Eval
+
+object Test extends App {
+ reify {
+ object C {
+ object D {
+ val x = 2
+ }
+ }
+
+ val outer = C
+ val inner = outer.D
+ println(inner.x)
+ }.eval
+}
\ No newline at end of file
diff --git a/test/files/run/reify_lazyevaluation.check b/test/files/run/reify_lazyevaluation.check
new file mode 100644
index 0000000..1c7f96c
--- /dev/null
+++ b/test/files/run/reify_lazyevaluation.check
@@ -0,0 +1,8 @@
+s = Susp(?)
+evaluating...
+s() = 3
+s = Susp(3)
+2 + s = 5
+sl2 = Susp(?)
+sl2() = Some(3)
+sl2 = Susp(Some(3))
diff --git a/test/files/run/reify_lazyevaluation.scala b/test/files/run/reify_lazyevaluation.scala
new file mode 100644
index 0000000..5b310d9
--- /dev/null
+++ b/test/files/run/reify_lazyevaluation.scala
@@ -0,0 +1,59 @@
+import scala.reflect.runtime.universe._
+import scala.tools.reflect.Eval
+
+object Test extends App {
+ reify {
+ object lazyLib {
+
+ /** Delay the evaluation of an expression until it is needed. */
+ def delay[A](value: => A): Susp[A] = new SuspImpl[A](value)
+
+ /** Get the value of a delayed expression. */
+ implicit def force[A](s: Susp[A]): A = s()
+
+ /**
+ * Data type of suspended computations. (The name froms from ML.)
+ */
+ abstract class Susp[+A] extends Function0[A]
+
+ /**
+ * Implementation of suspended computations, separated from the
+ * abstract class so that the type parameter can be invariant.
+ */
+ class SuspImpl[A](lazyValue: => A) extends Susp[A] {
+ private var maybeValue: Option[A] = None
+
+ override def apply() = maybeValue match {
+ case None =>
+ val value = lazyValue
+ maybeValue = Some(value)
+ value
+ case Some(value) =>
+ value
+ }
+
+ override def toString() = maybeValue match {
+ case None => "Susp(?)"
+ case Some(value) => "Susp(" + value + ")"
+ }
+ }
+ }
+
+ import lazyLib._
+
+ val s: Susp[Int] = delay { println("evaluating..."); 3 }
+
+ println("s = " + s) // show that s is unevaluated
+ println("s() = " + s()) // evaluate s
+ println("s = " + s) // show that the value is saved
+ println("2 + s = " + (2 + s)) // implicit call to force()
+
+ val sl = delay { Some(3) }
+ val sl1: Susp[Some[Int]] = sl
+ val sl2: Susp[Option[Int]] = sl1 // the type is covariant
+
+ println("sl2 = " + sl2)
+ println("sl2() = " + sl2())
+ println("sl2 = " + sl2)
+ }.eval
+}
diff --git a/test/files/run/reify_lazyunit.check b/test/files/run/reify_lazyunit.check
new file mode 100644
index 0000000..1b46c90
--- /dev/null
+++ b/test/files/run/reify_lazyunit.check
@@ -0,0 +1,3 @@
+12
+one
+two
diff --git a/test/files/run/reify_lazyunit.scala b/test/files/run/reify_lazyunit.scala
new file mode 100644
index 0000000..78b00cd
--- /dev/null
+++ b/test/files/run/reify_lazyunit.scala
@@ -0,0 +1,13 @@
+import scala.reflect.runtime.universe._
+import scala.tools.reflect.Eval
+
+object Test extends App {
+ reify {
+ lazy val x = { 0; println("12")}
+ x
+ println("one")
+ x
+ println("two")
+ }.eval
+}
+
diff --git a/test/files/run/reify_magicsymbols.check b/test/files/run/reify_magicsymbols.check
new file mode 100644
index 0000000..c9d892d
--- /dev/null
+++ b/test/files/run/reify_magicsymbols.check
@@ -0,0 +1,13 @@
+Any
+AnyVal
+AnyRef
+Null
+Nothing
+List[Any]
+List[AnyVal]
+List[AnyRef]
+List[Null]
+List[Nothing]
+AnyRef{def foo(x: Int): Int}
+Int* => Unit
+(=> Int) => Unit
diff --git a/test/files/run/reify_magicsymbols.scala b/test/files/run/reify_magicsymbols.scala
new file mode 100644
index 0000000..256ecbe
--- /dev/null
+++ b/test/files/run/reify_magicsymbols.scala
@@ -0,0 +1,17 @@
+import scala.reflect.runtime.universe._
+
+object Test extends App {
+ println(typeOf[Any])
+ println(typeOf[AnyVal])
+ println(typeOf[AnyRef])
+ println(typeOf[Null])
+ println(typeOf[Nothing])
+ println(typeOf[List[Any]])
+ println(typeOf[List[AnyVal]])
+ println(typeOf[List[AnyRef]])
+ println(typeOf[List[Null]])
+ println(typeOf[List[Nothing]])
+ println(typeOf[{def foo(x: Int): Int}])
+ println(typeOf[(Int*) => Unit])
+ println(typeOf[(=> Int) => Unit])
+}
\ No newline at end of file
diff --git a/test/files/run/reify_maps_newpatmat.check b/test/files/run/reify_maps_newpatmat.check
new file mode 100644
index 0000000..08cbbb1
--- /dev/null
+++ b/test/files/run/reify_maps_newpatmat.check
@@ -0,0 +1,4 @@
+red has code: 16711680
+Unknown color: green
+Unknown color: blue
+turquoise has code: 65535
diff --git a/test/files/run/reify_maps_newpatmat.scala b/test/files/run/reify_maps_newpatmat.scala
new file mode 100644
index 0000000..b538355
--- /dev/null
+++ b/test/files/run/reify_maps_newpatmat.scala
@@ -0,0 +1,20 @@
+import scala.reflect.runtime.universe._
+import scala.tools.reflect.Eval
+
+object Test extends App {
+ reify {
+ val colors = Map("red" -> 0xFF0000,
+ "turquoise" -> 0x00FFFF,
+ "black" -> 0x000000,
+ "orange" -> 0xFF8040,
+ "brown" -> 0x804000)
+ for (name <- List("red", "green", "blue", "turquoise")) println(
+ colors.get(name) match {
+ case Some(code) =>
+ name + " has code: " + code
+ case None =>
+ "Unknown color: " + name
+ }
+ )
+ }.eval
+}
\ No newline at end of file
diff --git a/test/files/run/reify_maps_oldpatmat.check b/test/files/run/reify_maps_oldpatmat.check
new file mode 100644
index 0000000..08cbbb1
--- /dev/null
+++ b/test/files/run/reify_maps_oldpatmat.check
@@ -0,0 +1,4 @@
+red has code: 16711680
+Unknown color: green
+Unknown color: blue
+turquoise has code: 65535
diff --git a/test/files/jvm/bug680.check b/test/files/run/reify_maps_oldpatmat.flags
similarity index 100%
copy from test/files/jvm/bug680.check
copy to test/files/run/reify_maps_oldpatmat.flags
diff --git a/test/files/run/reify_maps_oldpatmat.scala b/test/files/run/reify_maps_oldpatmat.scala
new file mode 100644
index 0000000..b538355
--- /dev/null
+++ b/test/files/run/reify_maps_oldpatmat.scala
@@ -0,0 +1,20 @@
+import scala.reflect.runtime.universe._
+import scala.tools.reflect.Eval
+
+object Test extends App {
+ reify {
+ val colors = Map("red" -> 0xFF0000,
+ "turquoise" -> 0x00FFFF,
+ "black" -> 0x000000,
+ "orange" -> 0xFF8040,
+ "brown" -> 0x804000)
+ for (name <- List("red", "green", "blue", "turquoise")) println(
+ colors.get(name) match {
+ case Some(code) =>
+ name + " has code: " + code
+ case None =>
+ "Unknown color: " + name
+ }
+ )
+ }.eval
+}
\ No newline at end of file
diff --git a/test/files/run/reify_metalevel_breach_+0_refers_to_1.check b/test/files/run/reify_metalevel_breach_+0_refers_to_1.check
new file mode 100644
index 0000000..5bfed17
--- /dev/null
+++ b/test/files/run/reify_metalevel_breach_+0_refers_to_1.check
@@ -0,0 +1 @@
+evaluated = 2
\ No newline at end of file
diff --git a/test/files/run/reify_metalevel_breach_+0_refers_to_1.scala b/test/files/run/reify_metalevel_breach_+0_refers_to_1.scala
new file mode 100644
index 0000000..76f935e
--- /dev/null
+++ b/test/files/run/reify_metalevel_breach_+0_refers_to_1.scala
@@ -0,0 +1,18 @@
+import scala.reflect.runtime.universe._
+import scala.reflect.runtime.{universe => ru}
+import scala.reflect.runtime.{currentMirror => cm}
+import scala.tools.reflect.ToolBox
+import scala.tools.reflect.Eval
+
+object Test extends App {
+ val code = reify{
+ val x = 2
+ val inner = reify{x}
+// was: inner.splice
+ inner.eval
+ };
+
+ val toolbox = cm.mkToolBox()
+ val evaluated = toolbox.eval(code.tree)
+ println("evaluated = " + evaluated)
+}
\ No newline at end of file
diff --git a/test/files/run/reify_metalevel_breach_-1_refers_to_0_a.check b/test/files/run/reify_metalevel_breach_-1_refers_to_0_a.check
new file mode 100644
index 0000000..5bfed17
--- /dev/null
+++ b/test/files/run/reify_metalevel_breach_-1_refers_to_0_a.check
@@ -0,0 +1 @@
+evaluated = 2
\ No newline at end of file
diff --git a/test/files/run/reify_metalevel_breach_-1_refers_to_0_a.scala b/test/files/run/reify_metalevel_breach_-1_refers_to_0_a.scala
new file mode 100644
index 0000000..e7c5cb7
--- /dev/null
+++ b/test/files/run/reify_metalevel_breach_-1_refers_to_0_a.scala
@@ -0,0 +1,16 @@
+import scala.reflect.runtime.universe._
+import scala.reflect.runtime.{universe => ru}
+import scala.reflect.runtime.{currentMirror => cm}
+import scala.tools.reflect.ToolBox
+import scala.tools.reflect.Eval
+
+object Test extends App {
+ val x = 2
+ val outer = reify{reify{x}}
+// was: val code = reify{outer.splice.splice}
+ val code = reify{outer.eval.eval}
+
+ val toolbox = cm.mkToolBox()
+ val evaluated = toolbox.eval(code.tree)
+ println("evaluated = " + evaluated)
+}
\ No newline at end of file
diff --git a/test/files/run/reify_metalevel_breach_-1_refers_to_0_b.check b/test/files/run/reify_metalevel_breach_-1_refers_to_0_b.check
new file mode 100644
index 0000000..5bfed17
--- /dev/null
+++ b/test/files/run/reify_metalevel_breach_-1_refers_to_0_b.check
@@ -0,0 +1 @@
+evaluated = 2
\ No newline at end of file
diff --git a/test/files/run/reify_metalevel_breach_-1_refers_to_0_b.scala b/test/files/run/reify_metalevel_breach_-1_refers_to_0_b.scala
new file mode 100644
index 0000000..770fccc
--- /dev/null
+++ b/test/files/run/reify_metalevel_breach_-1_refers_to_0_b.scala
@@ -0,0 +1,21 @@
+import scala.reflect.runtime.universe._
+import scala.reflect.runtime.{universe => ru}
+import scala.reflect.runtime.{currentMirror => cm}
+import scala.tools.reflect.ToolBox
+import scala.tools.reflect.Eval
+
+object Test extends App {
+ val x = 2
+ val code = reify{
+ {
+ val inner = reify{reify{x}}
+// was: inner.splice
+ inner.eval
+// was: }.splice
+ }.eval
+ }
+
+ val toolbox = cm.mkToolBox()
+ val evaluated = toolbox.eval(code.tree)
+ println("evaluated = " + evaluated)
+}
\ No newline at end of file
diff --git a/test/files/run/reify_metalevel_breach_-1_refers_to_1.check b/test/files/run/reify_metalevel_breach_-1_refers_to_1.check
new file mode 100644
index 0000000..5bfed17
--- /dev/null
+++ b/test/files/run/reify_metalevel_breach_-1_refers_to_1.check
@@ -0,0 +1 @@
+evaluated = 2
\ No newline at end of file
diff --git a/test/files/run/reify_metalevel_breach_-1_refers_to_1.scala b/test/files/run/reify_metalevel_breach_-1_refers_to_1.scala
new file mode 100644
index 0000000..32e7e90
--- /dev/null
+++ b/test/files/run/reify_metalevel_breach_-1_refers_to_1.scala
@@ -0,0 +1,18 @@
+import scala.reflect.runtime.universe._
+import scala.reflect.runtime.{universe => ru}
+import scala.reflect.runtime.{currentMirror => cm}
+import scala.tools.reflect.ToolBox
+import scala.tools.reflect.Eval
+
+object Test extends App {
+ val code = reify{
+ val x = 2
+ val inner = reify{reify{x}}
+// was: inner.splice.splice
+ inner.eval.eval
+ };
+
+ val toolbox = cm.mkToolBox()
+ val evaluated = toolbox.eval(code.tree)
+ println("evaluated = " + evaluated)
+}
\ No newline at end of file
diff --git a/test/files/run/reify_nested_inner_refers_to_global.check b/test/files/run/reify_nested_inner_refers_to_global.check
new file mode 100644
index 0000000..5ef4ff4
--- /dev/null
+++ b/test/files/run/reify_nested_inner_refers_to_global.check
@@ -0,0 +1 @@
+evaluated = 2
diff --git a/test/files/run/reify_nested_inner_refers_to_global.scala b/test/files/run/reify_nested_inner_refers_to_global.scala
new file mode 100644
index 0000000..877222f
--- /dev/null
+++ b/test/files/run/reify_nested_inner_refers_to_global.scala
@@ -0,0 +1,17 @@
+import scala.reflect.runtime.universe._
+import scala.reflect.runtime.{universe => ru}
+import scala.reflect.runtime.{currentMirror => cm}
+import scala.tools.reflect.ToolBox
+
+object Test extends App {
+ val code = {
+ val x = 2
+ reify{
+ reify{x}.splice
+ }
+ }
+
+ val toolbox = cm.mkToolBox()
+ val evaluated = toolbox.eval(code.tree)
+ println("evaluated = " + evaluated)
+}
\ No newline at end of file
diff --git a/test/files/run/reify_nested_inner_refers_to_local.check b/test/files/run/reify_nested_inner_refers_to_local.check
new file mode 100644
index 0000000..5bfed17
--- /dev/null
+++ b/test/files/run/reify_nested_inner_refers_to_local.check
@@ -0,0 +1 @@
+evaluated = 2
\ No newline at end of file
diff --git a/test/files/run/reify_nested_inner_refers_to_local.scala b/test/files/run/reify_nested_inner_refers_to_local.scala
new file mode 100644
index 0000000..703474e
--- /dev/null
+++ b/test/files/run/reify_nested_inner_refers_to_local.scala
@@ -0,0 +1,17 @@
+import scala.reflect.runtime.universe._
+import scala.reflect.runtime.{universe => ru}
+import scala.reflect.runtime.{currentMirror => cm}
+import scala.tools.reflect.ToolBox
+import scala.tools.reflect.Eval
+
+object Test extends App {
+ val code = reify{
+ val x = 2
+// was: reify{x}.eval
+ reify{x}.eval
+ };
+
+ val toolbox = cm.mkToolBox()
+ val evaluated = toolbox.eval(code.tree)
+ println("evaluated = " + evaluated)
+}
\ No newline at end of file
diff --git a/test/files/run/reify_nested_outer_refers_to_global.check b/test/files/run/reify_nested_outer_refers_to_global.check
new file mode 100644
index 0000000..5ef4ff4
--- /dev/null
+++ b/test/files/run/reify_nested_outer_refers_to_global.check
@@ -0,0 +1 @@
+evaluated = 2
diff --git a/test/files/run/reify_nested_outer_refers_to_global.scala b/test/files/run/reify_nested_outer_refers_to_global.scala
new file mode 100644
index 0000000..e40c569
--- /dev/null
+++ b/test/files/run/reify_nested_outer_refers_to_global.scala
@@ -0,0 +1,19 @@
+import scala.reflect.runtime.universe._
+import scala.reflect.runtime.{universe => ru}
+import scala.reflect.runtime.{currentMirror => cm}
+import scala.tools.reflect.ToolBox
+
+object Test extends App {
+ val code = {
+ val x = 2
+ val outer = reify{x}
+ reify{
+ val x = 42
+ outer.splice
+ };
+ }
+
+ val toolbox = cm.mkToolBox()
+ val evaluated = toolbox.eval(code.tree)
+ println("evaluated = " + evaluated)
+}
\ No newline at end of file
diff --git a/test/files/run/reify_nested_outer_refers_to_local.check b/test/files/run/reify_nested_outer_refers_to_local.check
new file mode 100644
index 0000000..5ef4ff4
--- /dev/null
+++ b/test/files/run/reify_nested_outer_refers_to_local.check
@@ -0,0 +1 @@
+evaluated = 2
diff --git a/test/files/run/reify_nested_outer_refers_to_local.scala b/test/files/run/reify_nested_outer_refers_to_local.scala
new file mode 100644
index 0000000..12147c5
--- /dev/null
+++ b/test/files/run/reify_nested_outer_refers_to_local.scala
@@ -0,0 +1,19 @@
+import scala.reflect.runtime.universe._
+import scala.reflect.runtime.{universe => ru}
+import scala.reflect.runtime.{currentMirror => cm}
+import scala.tools.reflect.ToolBox
+
+object Test extends App {
+ val outer = {
+ val x = 2
+ reify{x}
+ }
+ val code = reify{
+ val x = 42
+ outer.splice
+ };
+
+ val toolbox = cm.mkToolBox()
+ val evaluated = toolbox.eval(code.tree)
+ println("evaluated = " + evaluated)
+}
\ No newline at end of file
diff --git a/test/files/run/reify_newimpl_01.check b/test/files/run/reify_newimpl_01.check
new file mode 100644
index 0000000..d8263ee
--- /dev/null
+++ b/test/files/run/reify_newimpl_01.check
@@ -0,0 +1 @@
+2
\ No newline at end of file
diff --git a/test/files/run/reify_newimpl_01.scala b/test/files/run/reify_newimpl_01.scala
new file mode 100644
index 0000000..e4b46e4
--- /dev/null
+++ b/test/files/run/reify_newimpl_01.scala
@@ -0,0 +1,13 @@
+import scala.reflect.runtime.universe._
+import scala.tools.reflect.ToolBox
+import scala.tools.reflect.Eval
+
+object Test extends App {
+ {
+ val x = 2
+ val code = reify {
+ x
+ }
+ println(code.eval)
+ }
+}
\ No newline at end of file
diff --git a/test/files/run/reify_newimpl_02.check b/test/files/run/reify_newimpl_02.check
new file mode 100644
index 0000000..d8263ee
--- /dev/null
+++ b/test/files/run/reify_newimpl_02.check
@@ -0,0 +1 @@
+2
\ No newline at end of file
diff --git a/test/files/run/reify_newimpl_02.scala b/test/files/run/reify_newimpl_02.scala
new file mode 100644
index 0000000..fa7ee17
--- /dev/null
+++ b/test/files/run/reify_newimpl_02.scala
@@ -0,0 +1,13 @@
+import scala.reflect.runtime.universe._
+import scala.tools.reflect.ToolBox
+import scala.tools.reflect.Eval
+
+object Test extends App {
+ {
+ var x = 2
+ val code = reify {
+ x
+ }
+ println(code.eval)
+ }
+}
\ No newline at end of file
diff --git a/test/files/run/reify_newimpl_03.check b/test/files/run/reify_newimpl_03.check
new file mode 100644
index 0000000..d8263ee
--- /dev/null
+++ b/test/files/run/reify_newimpl_03.check
@@ -0,0 +1 @@
+2
\ No newline at end of file
diff --git a/test/files/run/reify_newimpl_03.scala b/test/files/run/reify_newimpl_03.scala
new file mode 100644
index 0000000..8d65425
--- /dev/null
+++ b/test/files/run/reify_newimpl_03.scala
@@ -0,0 +1,13 @@
+import scala.reflect.runtime.universe._
+import scala.tools.reflect.ToolBox
+import scala.tools.reflect.Eval
+
+object Test extends App {
+ {
+ val code = reify {
+ val x = 2
+ reify{x}.eval
+ }
+ println(code.eval)
+ }
+}
\ No newline at end of file
diff --git a/test/files/run/reify_newimpl_04.check b/test/files/run/reify_newimpl_04.check
new file mode 100644
index 0000000..d8263ee
--- /dev/null
+++ b/test/files/run/reify_newimpl_04.check
@@ -0,0 +1 @@
+2
\ No newline at end of file
diff --git a/test/files/run/reify_newimpl_04.scala b/test/files/run/reify_newimpl_04.scala
new file mode 100644
index 0000000..21341ed
--- /dev/null
+++ b/test/files/run/reify_newimpl_04.scala
@@ -0,0 +1,13 @@
+import scala.reflect.runtime.universe._
+import scala.tools.reflect.ToolBox
+import scala.tools.reflect.Eval
+
+object Test extends App {
+ {
+ val code = reify {
+ var x = 2
+ reify{x}.eval
+ }
+ println(code.eval)
+ }
+}
\ No newline at end of file
diff --git a/test/files/run/reify_newimpl_05.check b/test/files/run/reify_newimpl_05.check
new file mode 100644
index 0000000..d8263ee
--- /dev/null
+++ b/test/files/run/reify_newimpl_05.check
@@ -0,0 +1 @@
+2
\ No newline at end of file
diff --git a/test/files/run/reify_newimpl_05.scala b/test/files/run/reify_newimpl_05.scala
new file mode 100644
index 0000000..635eba3
--- /dev/null
+++ b/test/files/run/reify_newimpl_05.scala
@@ -0,0 +1,14 @@
+import scala.reflect.runtime.universe._
+import scala.tools.reflect.ToolBox
+import scala.tools.reflect.Eval
+
+object Test extends App {
+ {
+ val code = reify {
+ var x = 2
+ def y = x // forcibly captures x
+ reify{x}.eval
+ }
+ println(code.eval)
+ }
+}
\ No newline at end of file
diff --git a/test/files/run/reify_newimpl_06.check b/test/files/run/reify_newimpl_06.check
new file mode 100644
index 0000000..d8263ee
--- /dev/null
+++ b/test/files/run/reify_newimpl_06.check
@@ -0,0 +1 @@
+2
\ No newline at end of file
diff --git a/test/files/run/reify_newimpl_06.scala b/test/files/run/reify_newimpl_06.scala
new file mode 100644
index 0000000..0bf37da
--- /dev/null
+++ b/test/files/run/reify_newimpl_06.scala
@@ -0,0 +1,13 @@
+import scala.reflect.runtime.universe._
+import scala.tools.reflect.ToolBox
+import scala.tools.reflect.Eval
+
+object Test extends App {
+ class C(val y: Int) {
+ val code = reify {
+ reify{y}.eval
+ }
+ }
+
+ println(new C(2).code.eval)
+}
\ No newline at end of file
diff --git a/test/files/run/reify_newimpl_11.check b/test/files/run/reify_newimpl_11.check
new file mode 100644
index 0000000..2f5cb58
--- /dev/null
+++ b/test/files/run/reify_newimpl_11.check
@@ -0,0 +1,2 @@
+scala.tools.reflect.ToolBoxError: reflective toolbox has failed:
+unresolved free type variables (namely: T defined by C in reify_newimpl_11.scala:6:11). have you forgot to use TypeTag annotations for type parameters external to a reifee? if you have troubles tracking free type variables, consider using -Xlog-free-types
diff --git a/test/files/run/reify_newimpl_11.scala b/test/files/run/reify_newimpl_11.scala
new file mode 100644
index 0000000..e8ca664
--- /dev/null
+++ b/test/files/run/reify_newimpl_11.scala
@@ -0,0 +1,19 @@
+import scala.reflect.runtime.universe._
+import scala.tools.reflect.ToolBox
+import scala.tools.reflect.Eval
+
+object Test extends App {
+ class C[T] {
+ val code = reify {
+ List[T](2.asInstanceOf[T])
+ }
+ println(code.eval)
+ }
+
+ try {
+ new C[Int]
+ } catch {
+ case ex: Throwable =>
+ println(ex)
+ }
+}
\ No newline at end of file
diff --git a/test/files/run/reify_newimpl_12.check b/test/files/run/reify_newimpl_12.check
new file mode 100644
index 0000000..220bd68
--- /dev/null
+++ b/test/files/run/reify_newimpl_12.check
@@ -0,0 +1 @@
+List(2)
\ No newline at end of file
diff --git a/test/files/run/reify_newimpl_12.scala b/test/files/run/reify_newimpl_12.scala
new file mode 100644
index 0000000..246d7b4
--- /dev/null
+++ b/test/files/run/reify_newimpl_12.scala
@@ -0,0 +1,14 @@
+import scala.reflect.runtime.universe._
+import scala.tools.reflect.ToolBox
+import scala.tools.reflect.Eval
+
+object Test extends App {
+ class C[T: TypeTag] {
+ val code = reify {
+ List[T](2.asInstanceOf[T])
+ }
+ println(code.eval)
+ }
+
+ new C[Int]
+}
\ No newline at end of file
diff --git a/test/files/run/reify_newimpl_13.check b/test/files/run/reify_newimpl_13.check
new file mode 100644
index 0000000..d518cd7
--- /dev/null
+++ b/test/files/run/reify_newimpl_13.check
@@ -0,0 +1,2 @@
+scala.tools.reflect.ToolBoxError: reflective toolbox has failed:
+unresolved free type variables (namely: T defined by C in reify_newimpl_13.scala:7:13). have you forgot to use TypeTag annotations for type parameters external to a reifee? if you have troubles tracking free type variables, consider using -Xlog-free-types
diff --git a/test/files/run/reify_newimpl_13.scala b/test/files/run/reify_newimpl_13.scala
new file mode 100644
index 0000000..1b2b8cb
--- /dev/null
+++ b/test/files/run/reify_newimpl_13.scala
@@ -0,0 +1,21 @@
+import scala.reflect.runtime.universe._
+import scala.tools.reflect.ToolBox
+import scala.tools.reflect.Eval
+
+object Test extends App {
+ {
+ class C[T] {
+ val code = reify {
+ List[T](2.asInstanceOf[T])
+ }
+ println(code.eval)
+ }
+
+ try {
+ new C[Int]
+ } catch {
+ case ex: Throwable =>
+ println(ex)
+ }
+ }
+}
\ No newline at end of file
diff --git a/test/files/run/reify_newimpl_14.check b/test/files/run/reify_newimpl_14.check
new file mode 100644
index 0000000..220bd68
--- /dev/null
+++ b/test/files/run/reify_newimpl_14.check
@@ -0,0 +1 @@
+List(2)
\ No newline at end of file
diff --git a/test/files/run/reify_newimpl_14.scala b/test/files/run/reify_newimpl_14.scala
new file mode 100644
index 0000000..284e87a
--- /dev/null
+++ b/test/files/run/reify_newimpl_14.scala
@@ -0,0 +1,16 @@
+import scala.reflect.runtime.universe._
+import scala.tools.reflect.ToolBox
+import scala.tools.reflect.Eval
+
+object Test extends App {
+ {
+ class C[T: TypeTag] {
+ val code = reify {
+ List[T](2.asInstanceOf[T])
+ }
+ println(code.eval)
+ }
+
+ new C[Int]
+ }
+}
\ No newline at end of file
diff --git a/test/files/run/reify_newimpl_15.check b/test/files/run/reify_newimpl_15.check
new file mode 100644
index 0000000..220bd68
--- /dev/null
+++ b/test/files/run/reify_newimpl_15.check
@@ -0,0 +1 @@
+List(2)
\ No newline at end of file
diff --git a/test/files/run/reify_newimpl_15.scala b/test/files/run/reify_newimpl_15.scala
new file mode 100644
index 0000000..cb66e85
--- /dev/null
+++ b/test/files/run/reify_newimpl_15.scala
@@ -0,0 +1,15 @@
+import scala.reflect.runtime.universe._
+import scala.tools.reflect.ToolBox
+import scala.tools.reflect.Eval
+
+object Test extends App {
+ class C {
+ type T = Int
+ val code = reify {
+ List[T](2)
+ }
+ println(code.eval)
+ }
+
+ new C
+}
\ No newline at end of file
diff --git a/test/files/run/reify_newimpl_18.check b/test/files/run/reify_newimpl_18.check
new file mode 100644
index 0000000..c23af69
--- /dev/null
+++ b/test/files/run/reify_newimpl_18.check
@@ -0,0 +1 @@
+List(2)
diff --git a/test/files/run/reify_newimpl_18.scala b/test/files/run/reify_newimpl_18.scala
new file mode 100644
index 0000000..8456fd1
--- /dev/null
+++ b/test/files/run/reify_newimpl_18.scala
@@ -0,0 +1,15 @@
+import scala.reflect.runtime.universe._
+import scala.tools.reflect.ToolBox
+import scala.tools.reflect.Eval
+
+object Test extends App {
+ class C[U: TypeTag] {
+ type T = U
+ val code = reify {
+ List[T](2.asInstanceOf[T])
+ }
+ println(code.eval)
+ }
+
+ new C[Int]
+}
\ No newline at end of file
diff --git a/test/files/run/reify_newimpl_19.check b/test/files/run/reify_newimpl_19.check
new file mode 100644
index 0000000..8b8652f
--- /dev/null
+++ b/test/files/run/reify_newimpl_19.check
@@ -0,0 +1,2 @@
+scala.tools.reflect.ToolBoxError: reflective toolbox has failed:
+unresolved free type variables (namely: T defined by C in reify_newimpl_19.scala:7:10). have you forgot to use TypeTag annotations for type parameters external to a reifee? if you have troubles tracking free type variables, consider using -Xlog-free-types
diff --git a/test/files/run/reify_newimpl_19.scala b/test/files/run/reify_newimpl_19.scala
new file mode 100644
index 0000000..ba2d39c
--- /dev/null
+++ b/test/files/run/reify_newimpl_19.scala
@@ -0,0 +1,20 @@
+import scala.reflect.runtime.universe._
+import scala.tools.reflect.ToolBox
+import scala.tools.reflect.Eval
+
+object Test extends App {
+ class C {
+ type T
+ val code = reify {
+ List[T](2.asInstanceOf[T])
+ }
+ println(code.eval)
+ }
+
+ try {
+ new C { val T = Int }
+ } catch {
+ case ex: Throwable =>
+ println(ex)
+ }
+}
\ No newline at end of file
diff --git a/test/files/run/reify_newimpl_20.check b/test/files/run/reify_newimpl_20.check
new file mode 100644
index 0000000..c23af69
--- /dev/null
+++ b/test/files/run/reify_newimpl_20.check
@@ -0,0 +1 @@
+List(2)
diff --git a/test/files/run/reify_newimpl_20.scala b/test/files/run/reify_newimpl_20.scala
new file mode 100644
index 0000000..f8ddb53
--- /dev/null
+++ b/test/files/run/reify_newimpl_20.scala
@@ -0,0 +1,16 @@
+import scala.reflect.runtime.universe._
+import scala.tools.reflect.ToolBox
+import scala.tools.reflect.Eval
+
+object Test extends App {
+ class C {
+ type T
+ implicit val tt: TypeTag[T] = implicitly[TypeTag[Int]].asInstanceOf[TypeTag[T]]
+ val code = reify {
+ List[T](2.asInstanceOf[T])
+ }
+ println(code.eval)
+ }
+
+ new C { type T = String } // this "mistake" is made for a reason!
+}
\ No newline at end of file
diff --git a/test/files/run/reify_newimpl_21.check b/test/files/run/reify_newimpl_21.check
new file mode 100644
index 0000000..c23af69
--- /dev/null
+++ b/test/files/run/reify_newimpl_21.check
@@ -0,0 +1 @@
+List(2)
diff --git a/test/files/run/reify_newimpl_21.scala b/test/files/run/reify_newimpl_21.scala
new file mode 100644
index 0000000..97261b2
--- /dev/null
+++ b/test/files/run/reify_newimpl_21.scala
@@ -0,0 +1,20 @@
+import scala.reflect.runtime.universe._
+import scala.tools.reflect.ToolBox
+import scala.tools.reflect.Eval
+
+object Test extends App {
+ trait C {
+ type T
+ implicit val tt: TypeTag[T]
+ lazy val code = reify {
+ List[T](2.asInstanceOf[T])
+ }
+ }
+
+ class D extends C {
+ type T = String // this "mistake" is made for a reason!
+ override val tt: TypeTag[T] = implicitly[TypeTag[Int]].asInstanceOf[TypeTag[T]]
+ }
+
+ println((new D).code.eval)
+}
\ No newline at end of file
diff --git a/test/files/run/reify_newimpl_22.check b/test/files/run/reify_newimpl_22.check
new file mode 100644
index 0000000..dcb3e28
--- /dev/null
+++ b/test/files/run/reify_newimpl_22.check
@@ -0,0 +1,29 @@
+Type in expressions to have them evaluated.
+Type :help for more information.
+
+scala>
+
+scala> import scala.reflect.runtime.universe._
+import scala.reflect.runtime.universe._
+
+scala> import scala.tools.reflect.ToolBox
+import scala.tools.reflect.ToolBox
+
+scala> import scala.tools.reflect.Eval
+import scala.tools.reflect.Eval
+
+scala> {
+ val x = 2
+ val code = reify {
+ x
+ }
+ println(code.eval)
+}
+<console>:15: free term: Ident(newTermName("x")) defined by res0 in <console>:14:21
+ val code = reify {
+ ^
+2
+
+scala>
+
+scala>
diff --git a/test/files/run/reify_newimpl_22.scala b/test/files/run/reify_newimpl_22.scala
new file mode 100644
index 0000000..8512620
--- /dev/null
+++ b/test/files/run/reify_newimpl_22.scala
@@ -0,0 +1,17 @@
+import scala.tools.partest.ReplTest
+
+object Test extends ReplTest {
+ override def extraSettings = "-Xlog-free-terms"
+ def code = """
+import scala.reflect.runtime.universe._
+import scala.tools.reflect.ToolBox
+import scala.tools.reflect.Eval
+{
+ val x = 2
+ val code = reify {
+ x
+ }
+ println(code.eval)
+}
+ """
+}
\ No newline at end of file
diff --git a/test/files/run/reify_newimpl_23.check b/test/files/run/reify_newimpl_23.check
new file mode 100644
index 0000000..8821246
--- /dev/null
+++ b/test/files/run/reify_newimpl_23.check
@@ -0,0 +1,28 @@
+Type in expressions to have them evaluated.
+Type :help for more information.
+
+scala>
+
+scala> import scala.reflect.runtime.universe._
+import scala.reflect.runtime.universe._
+
+scala> import scala.tools.reflect.ToolBox
+import scala.tools.reflect.ToolBox
+
+scala> import scala.tools.reflect.Eval
+import scala.tools.reflect.Eval
+
+scala> def foo[T]{
+ val code = reify {
+ List[T]()
+ }
+ println(code.eval)
+}
+<console>:13: free type: Ident(newTypeName("T")) defined by foo in <console>:12:16
+ val code = reify {
+ ^
+foo: [T]=> Unit
+
+scala>
+
+scala>
diff --git a/test/files/run/reify_newimpl_23.scala b/test/files/run/reify_newimpl_23.scala
new file mode 100644
index 0000000..d4c2a68
--- /dev/null
+++ b/test/files/run/reify_newimpl_23.scala
@@ -0,0 +1,16 @@
+import scala.tools.partest.ReplTest
+
+object Test extends ReplTest {
+ override def extraSettings = "-Xlog-free-types"
+ def code = """
+import scala.reflect.runtime.universe._
+import scala.tools.reflect.ToolBox
+import scala.tools.reflect.Eval
+def foo[T]{
+ val code = reify {
+ List[T]()
+ }
+ println(code.eval)
+}
+ """
+}
\ No newline at end of file
diff --git a/test/files/run/reify_newimpl_25.check b/test/files/run/reify_newimpl_25.check
new file mode 100644
index 0000000..d1028b9
--- /dev/null
+++ b/test/files/run/reify_newimpl_25.check
@@ -0,0 +1,19 @@
+Type in expressions to have them evaluated.
+Type :help for more information.
+
+scala>
+
+scala> {
+ import scala.reflect.runtime.universe._
+ val x = "2"
+ val tt = implicitly[TypeTag[x.type]]
+ println(tt)
+}
+<console>:11: free term: Ident(newTermName("x")) defined by res0 in <console>:10:21
+ val tt = implicitly[TypeTag[x.type]]
+ ^
+TypeTag[x.type]
+
+scala>
+
+scala>
diff --git a/test/files/run/reify_newimpl_25.scala b/test/files/run/reify_newimpl_25.scala
new file mode 100644
index 0000000..01cc04b
--- /dev/null
+++ b/test/files/run/reify_newimpl_25.scala
@@ -0,0 +1,13 @@
+import scala.tools.partest.ReplTest
+
+object Test extends ReplTest {
+ override def extraSettings = "-Xlog-free-terms"
+ def code = """
+{
+ import scala.reflect.runtime.universe._
+ val x = "2"
+ val tt = implicitly[TypeTag[x.type]]
+ println(tt)
+}
+ """
+}
\ No newline at end of file
diff --git a/test/files/run/reify_newimpl_26.check b/test/files/run/reify_newimpl_26.check
new file mode 100644
index 0000000..347f636
--- /dev/null
+++ b/test/files/run/reify_newimpl_26.check
@@ -0,0 +1,21 @@
+Type in expressions to have them evaluated.
+Type :help for more information.
+
+scala>
+
+scala> def foo[T]{
+ import scala.reflect.runtime.universe._
+ val tt = implicitly[WeakTypeTag[List[T]]]
+ println(tt)
+}
+<console>:9: free type: Ident(newTypeName("T")) defined by foo in <console>:7:16
+ val tt = implicitly[WeakTypeTag[List[T]]]
+ ^
+foo: [T]=> Unit
+
+scala> foo[Int]
+WeakTypeTag[scala.List[T]]
+
+scala>
+
+scala>
diff --git a/test/files/run/reify_newimpl_26.scala b/test/files/run/reify_newimpl_26.scala
new file mode 100644
index 0000000..af74d60
--- /dev/null
+++ b/test/files/run/reify_newimpl_26.scala
@@ -0,0 +1,13 @@
+import scala.tools.partest.ReplTest
+
+object Test extends ReplTest {
+ override def extraSettings = "-Xlog-free-types"
+ def code = """
+def foo[T]{
+ import scala.reflect.runtime.universe._
+ val tt = implicitly[WeakTypeTag[List[T]]]
+ println(tt)
+}
+foo[Int]
+ """
+}
\ No newline at end of file
diff --git a/test/files/run/reify_newimpl_27.check b/test/files/run/reify_newimpl_27.check
new file mode 100644
index 0000000..c23af69
--- /dev/null
+++ b/test/files/run/reify_newimpl_27.check
@@ -0,0 +1 @@
+List(2)
diff --git a/test/files/run/reify_newimpl_27.scala b/test/files/run/reify_newimpl_27.scala
new file mode 100644
index 0000000..db9ada3
--- /dev/null
+++ b/test/files/run/reify_newimpl_27.scala
@@ -0,0 +1,15 @@
+import scala.reflect.runtime.universe._
+import scala.tools.reflect.ToolBox
+import scala.tools.reflect.Eval
+
+object Test extends App {
+ object C {
+ type T = Int
+ val code = reify {
+ List[T](2)
+ }
+ println(code.eval)
+ }
+
+ C
+}
\ No newline at end of file
diff --git a/test/files/run/reify_newimpl_29.check b/test/files/run/reify_newimpl_29.check
new file mode 100644
index 0000000..c23af69
--- /dev/null
+++ b/test/files/run/reify_newimpl_29.check
@@ -0,0 +1 @@
+List(2)
diff --git a/test/files/run/reify_newimpl_29.scala b/test/files/run/reify_newimpl_29.scala
new file mode 100644
index 0000000..033c360
--- /dev/null
+++ b/test/files/run/reify_newimpl_29.scala
@@ -0,0 +1,15 @@
+import scala.reflect.runtime.universe._
+import scala.tools.reflect.ToolBox
+import scala.tools.reflect.Eval
+
+object Test extends App {
+ class C {
+ type T = Int
+ val code = reify {
+ List[C#T](2)
+ }
+ println(code.eval)
+ }
+
+ new C
+}
\ No newline at end of file
diff --git a/test/files/run/reify_newimpl_30.check b/test/files/run/reify_newimpl_30.check
new file mode 100644
index 0000000..29baac9
--- /dev/null
+++ b/test/files/run/reify_newimpl_30.check
@@ -0,0 +1,2 @@
+reflective toolbox has failed:
+unresolved free type variables (namely: C defined by <local Test> in reify_newimpl_30.scala:7:11). have you forgot to use TypeTag annotations for type parameters external to a reifee? if you have troubles tracking free type variables, consider using -Xlog-free-types
diff --git a/test/files/run/reify_newimpl_30.scala b/test/files/run/reify_newimpl_30.scala
new file mode 100644
index 0000000..bc34f1b
--- /dev/null
+++ b/test/files/run/reify_newimpl_30.scala
@@ -0,0 +1,18 @@
+import scala.reflect.runtime.universe._
+import scala.tools.reflect.{ ToolBox, ToolBoxError }
+import scala.tools.reflect.Eval
+
+object Test extends App {
+ {
+ class C {
+ type T = Int
+ val code = reify {
+ List[C#T](2)
+ }
+ try { println(code.eval) }
+ catch { case e: ToolBoxError => println(e.getMessage) }
+ }
+
+ new C
+ }
+}
diff --git a/test/files/run/reify_newimpl_31.check b/test/files/run/reify_newimpl_31.check
new file mode 100644
index 0000000..c23af69
--- /dev/null
+++ b/test/files/run/reify_newimpl_31.check
@@ -0,0 +1 @@
+List(2)
diff --git a/test/files/run/reify_newimpl_31.scala b/test/files/run/reify_newimpl_31.scala
new file mode 100644
index 0000000..2e20aa0
--- /dev/null
+++ b/test/files/run/reify_newimpl_31.scala
@@ -0,0 +1,15 @@
+import scala.reflect.runtime.universe._
+import scala.tools.reflect.ToolBox
+import scala.tools.reflect.Eval
+
+object Test extends App {
+ object C {
+ type T = Int
+ val code = reify {
+ List[C.T](2)
+ }
+ println(code.eval)
+ }
+
+ C
+}
\ No newline at end of file
diff --git a/test/files/run/reify_newimpl_33.check b/test/files/run/reify_newimpl_33.check
new file mode 100644
index 0000000..c23af69
--- /dev/null
+++ b/test/files/run/reify_newimpl_33.check
@@ -0,0 +1 @@
+List(2)
diff --git a/test/files/run/reify_newimpl_33.scala b/test/files/run/reify_newimpl_33.scala
new file mode 100644
index 0000000..98bb2e5
--- /dev/null
+++ b/test/files/run/reify_newimpl_33.scala
@@ -0,0 +1,16 @@
+import scala.reflect.runtime.universe._
+import scala.tools.reflect.ToolBox
+import scala.tools.reflect.Eval
+
+object Test extends App {
+ object C {
+ type T = Int
+ val c = C
+ val code = reify {
+ List[c.T](2)
+ }
+ println(code.eval)
+ }
+
+ C
+}
\ No newline at end of file
diff --git a/test/files/run/reify_newimpl_35.check b/test/files/run/reify_newimpl_35.check
new file mode 100644
index 0000000..52aaa17
--- /dev/null
+++ b/test/files/run/reify_newimpl_35.check
@@ -0,0 +1,17 @@
+Type in expressions to have them evaluated.
+Type :help for more information.
+
+scala>
+
+scala> import scala.reflect.runtime.universe._
+import scala.reflect.runtime.universe._
+
+scala> def foo[T: TypeTag] = reify{List[T]()}
+foo: [T](implicit evidence$1: reflect.runtime.universe.TypeTag[T])reflect.runtime.universe.Expr[List[T]]
+
+scala> println(foo)
+Expr[List[Nothing]](Nil)
+
+scala>
+
+scala>
diff --git a/test/files/run/reify_newimpl_35.scala b/test/files/run/reify_newimpl_35.scala
new file mode 100644
index 0000000..f2ebf51
--- /dev/null
+++ b/test/files/run/reify_newimpl_35.scala
@@ -0,0 +1,10 @@
+import scala.tools.partest.ReplTest
+
+object Test extends ReplTest {
+ override def extraSettings = "-Xlog-free-types"
+ def code = """
+import scala.reflect.runtime.universe._
+def foo[T: TypeTag] = reify{List[T]()}
+println(foo)
+ """
+}
diff --git a/test/files/run/syncchannel.check b/test/files/run/reify_newimpl_36.check
similarity index 100%
copy from test/files/run/syncchannel.check
copy to test/files/run/reify_newimpl_36.check
diff --git a/test/files/run/reify_newimpl_36.scala b/test/files/run/reify_newimpl_36.scala
new file mode 100644
index 0000000..490e645
--- /dev/null
+++ b/test/files/run/reify_newimpl_36.scala
@@ -0,0 +1,15 @@
+import scala.reflect.runtime.universe._
+import scala.tools.reflect.Eval
+
+object Test extends App {
+ {
+ val x = 42
+ def foo() = reify(reify(x));
+ {
+ val x = 2
+ val code1 = foo()
+ val code2 = code1.eval
+ println(code2.eval)
+ }
+ }
+}
\ No newline at end of file
diff --git a/test/files/run/syncchannel.check b/test/files/run/reify_newimpl_37.check
similarity index 100%
copy from test/files/run/syncchannel.check
copy to test/files/run/reify_newimpl_37.check
diff --git a/test/files/run/reify_newimpl_37.scala b/test/files/run/reify_newimpl_37.scala
new file mode 100644
index 0000000..7c4d4af
--- /dev/null
+++ b/test/files/run/reify_newimpl_37.scala
@@ -0,0 +1,16 @@
+import scala.reflect.runtime.universe._
+import scala.tools.reflect.Eval
+
+object Test extends App {
+ {
+ val x = 42
+ def foo() = reify(reify(reify(x)));
+ {
+ val x = 2
+ val code1 = foo()
+ val code2 = code1.eval
+ val code3 = code2.eval
+ println(code3.eval)
+ }
+ }
+}
\ No newline at end of file
diff --git a/test/files/run/syncchannel.check b/test/files/run/reify_newimpl_38.check
similarity index 100%
copy from test/files/run/syncchannel.check
copy to test/files/run/reify_newimpl_38.check
diff --git a/test/files/run/reify_newimpl_38.scala b/test/files/run/reify_newimpl_38.scala
new file mode 100644
index 0000000..fd898b9
--- /dev/null
+++ b/test/files/run/reify_newimpl_38.scala
@@ -0,0 +1,15 @@
+import scala.reflect.runtime.universe._
+import scala.tools.reflect.Eval
+
+object Test extends App {
+ {
+ val x = 42
+ def foo() = reify{ val y = x; reify(y) };
+ {
+ val x = 2
+ val code1 = foo()
+ val code2 = code1.eval
+ println(code2.eval)
+ }
+ }
+}
\ No newline at end of file
diff --git a/test/files/run/syncchannel.check b/test/files/run/reify_newimpl_39.check
similarity index 100%
copy from test/files/run/syncchannel.check
copy to test/files/run/reify_newimpl_39.check
diff --git a/test/files/run/reify_newimpl_39.scala b/test/files/run/reify_newimpl_39.scala
new file mode 100644
index 0000000..885c738
--- /dev/null
+++ b/test/files/run/reify_newimpl_39.scala
@@ -0,0 +1,16 @@
+import scala.reflect.runtime.universe._
+import scala.tools.reflect.Eval
+
+object Test extends App {
+ {
+ val x = 42
+ def foo() = reify{ val y = x; reify{ val z = y; reify(z) } };
+ {
+ val x = 2
+ val code1 = foo()
+ val code2 = code1.eval
+ val code3 = code2.eval
+ println(code3.eval)
+ }
+ }
+}
\ No newline at end of file
diff --git a/test/files/run/reify_newimpl_40.check b/test/files/run/reify_newimpl_40.check
new file mode 100644
index 0000000..cc0001a
--- /dev/null
+++ b/test/files/run/reify_newimpl_40.check
@@ -0,0 +1 @@
+74088
diff --git a/test/files/run/reify_newimpl_40.scala b/test/files/run/reify_newimpl_40.scala
new file mode 100644
index 0000000..018bf72
--- /dev/null
+++ b/test/files/run/reify_newimpl_40.scala
@@ -0,0 +1,16 @@
+import scala.reflect.runtime.universe._
+import scala.tools.reflect.Eval
+
+object Test extends App {
+ {
+ val x = 42
+ def foo() = reify{ val y = x; reify{ val z = y * x; reify(z * x) } };
+ {
+ val x = 2
+ val code1 = foo()
+ val code2 = code1.eval
+ val code3 = code2.eval
+ println(code3.eval)
+ }
+ }
+}
\ No newline at end of file
diff --git a/test/files/run/reify_newimpl_41.check b/test/files/run/reify_newimpl_41.check
new file mode 100644
index 0000000..0b427f2
--- /dev/null
+++ b/test/files/run/reify_newimpl_41.check
@@ -0,0 +1,3 @@
+42
+44
+43
\ No newline at end of file
diff --git a/test/files/run/reify_newimpl_41.scala b/test/files/run/reify_newimpl_41.scala
new file mode 100644
index 0000000..9bb79fb
--- /dev/null
+++ b/test/files/run/reify_newimpl_41.scala
@@ -0,0 +1,18 @@
+import scala.reflect.runtime.universe._
+import scala.tools.reflect.Eval
+
+object Test extends App {
+ {
+ var _x = 42
+ def x = { val x0 = _x; _x += 1; x0 }
+ var _y = 1
+ def y = { val y0 = _y + _x; _y += y0; y0 }
+ val code = reify {
+ def foo = y // ensures that y is the first freevar we find
+ println(x)
+ println(y)
+ println(x)
+ }
+ code.eval
+ }
+}
\ No newline at end of file
diff --git a/test/files/run/reify_newimpl_42.check b/test/files/run/reify_newimpl_42.check
new file mode 100644
index 0000000..0b427f2
--- /dev/null
+++ b/test/files/run/reify_newimpl_42.check
@@ -0,0 +1,3 @@
+42
+44
+43
\ No newline at end of file
diff --git a/test/files/run/reify_newimpl_42.scala b/test/files/run/reify_newimpl_42.scala
new file mode 100644
index 0000000..bd7dead
--- /dev/null
+++ b/test/files/run/reify_newimpl_42.scala
@@ -0,0 +1,17 @@
+import scala.reflect.runtime.universe._
+import scala.tools.reflect.Eval
+
+object Test extends App {
+ {
+ var _x = 42
+ def x = { val x0 = _x; _x += 1; x0 }
+ var _y = 1
+ def y = { val y0 = _y + _x; _y += y0; y0 }
+ val code = reify {
+ println(x)
+ println(y)
+ println(x)
+ }
+ code.eval
+ }
+}
\ No newline at end of file
diff --git a/test/files/run/reify_newimpl_43.check b/test/files/run/reify_newimpl_43.check
new file mode 100644
index 0000000..7a754f4
--- /dev/null
+++ b/test/files/run/reify_newimpl_43.check
@@ -0,0 +1,2 @@
+1
+2
\ No newline at end of file
diff --git a/test/files/run/reify_newimpl_43.scala b/test/files/run/reify_newimpl_43.scala
new file mode 100644
index 0000000..88ea224
--- /dev/null
+++ b/test/files/run/reify_newimpl_43.scala
@@ -0,0 +1,16 @@
+import scala.reflect.runtime.universe._
+import scala.tools.reflect.Eval
+
+object Test extends App {
+ {
+ var counter = 0
+ lazy val x = { counter += 1; counter }
+ lazy val y = { counter += 1; counter }
+ val code = reify {
+ def foo = y // ensures that y is the first freevar we find
+ println(x)
+ println(y)
+ }
+ code.eval
+ }
+}
\ No newline at end of file
diff --git a/test/files/run/reify_newimpl_44.check b/test/files/run/reify_newimpl_44.check
new file mode 100644
index 0000000..7a754f4
--- /dev/null
+++ b/test/files/run/reify_newimpl_44.check
@@ -0,0 +1,2 @@
+1
+2
\ No newline at end of file
diff --git a/test/files/run/reify_newimpl_44.scala b/test/files/run/reify_newimpl_44.scala
new file mode 100644
index 0000000..88ea224
--- /dev/null
+++ b/test/files/run/reify_newimpl_44.scala
@@ -0,0 +1,16 @@
+import scala.reflect.runtime.universe._
+import scala.tools.reflect.Eval
+
+object Test extends App {
+ {
+ var counter = 0
+ lazy val x = { counter += 1; counter }
+ lazy val y = { counter += 1; counter }
+ val code = reify {
+ def foo = y // ensures that y is the first freevar we find
+ println(x)
+ println(y)
+ }
+ code.eval
+ }
+}
\ No newline at end of file
diff --git a/test/files/run/reify_newimpl_45.check b/test/files/run/reify_newimpl_45.check
new file mode 100644
index 0000000..6e14f71
--- /dev/null
+++ b/test/files/run/reify_newimpl_45.check
@@ -0,0 +1,2 @@
+List(free type T)
+ima worx: 2
\ No newline at end of file
diff --git a/test/files/run/reify_newimpl_45.scala b/test/files/run/reify_newimpl_45.scala
new file mode 100644
index 0000000..2a6c68d
--- /dev/null
+++ b/test/files/run/reify_newimpl_45.scala
@@ -0,0 +1,16 @@
+import scala.reflect.runtime.universe._
+import scala.reflect.runtime.{universe => ru}
+import scala.reflect.runtime.{currentMirror => cm}
+import scala.tools.reflect.ToolBox
+
+object Test extends App {
+ class C[T >: Null] {
+ val code = reify{val x: T = "2".asInstanceOf[T]; println("ima worx: %s".format(x)); x}
+ println(code.tree.freeTypes)
+ val T = code.tree.freeTypes(0)
+ val tree = code.tree.substituteSymbols(List(T), List(definitions.StringClass))
+ cm.mkToolBox().eval(tree)
+ }
+
+ new C[String]
+}
\ No newline at end of file
diff --git a/test/files/run/reify_newimpl_47.check b/test/files/run/reify_newimpl_47.check
new file mode 100644
index 0000000..d8263ee
--- /dev/null
+++ b/test/files/run/reify_newimpl_47.check
@@ -0,0 +1 @@
+2
\ No newline at end of file
diff --git a/test/files/run/reify_newimpl_47.scala b/test/files/run/reify_newimpl_47.scala
new file mode 100644
index 0000000..8740132
--- /dev/null
+++ b/test/files/run/reify_newimpl_47.scala
@@ -0,0 +1,17 @@
+import scala.reflect.runtime.universe._
+import scala.tools.reflect.ToolBox
+import scala.tools.reflect.Eval
+
+object Test extends App {
+ val outer = {
+ val x = 2
+ reify{x}
+ }
+
+ val code = reify{
+ val x = 42
+ outer.splice
+ }
+
+ println(code.eval)
+}
\ No newline at end of file
diff --git a/test/files/run/reify_newimpl_48.check b/test/files/run/reify_newimpl_48.check
new file mode 100644
index 0000000..f11c82a
--- /dev/null
+++ b/test/files/run/reify_newimpl_48.check
@@ -0,0 +1 @@
+9
\ No newline at end of file
diff --git a/test/files/run/reify_newimpl_48.scala b/test/files/run/reify_newimpl_48.scala
new file mode 100644
index 0000000..9899bc0
--- /dev/null
+++ b/test/files/run/reify_newimpl_48.scala
@@ -0,0 +1,22 @@
+import scala.reflect.runtime.universe._
+import scala.tools.reflect.ToolBox
+import scala.tools.reflect.Eval
+
+object Test extends App {
+ val outer1 = {
+ val x = 2
+ reify{x}
+ }
+
+ val outer2 = {
+ val x = 3
+ reify{x}
+ }
+
+ val code = reify{
+ val x = 4
+ x + outer1.splice + outer2.splice
+ }
+
+ println(code.eval)
+}
\ No newline at end of file
diff --git a/test/files/run/reify_newimpl_49.check b/test/files/run/reify_newimpl_49.check
new file mode 100644
index 0000000..d8a621d
--- /dev/null
+++ b/test/files/run/reify_newimpl_49.check
@@ -0,0 +1,3 @@
+3
+3
+5
\ No newline at end of file
diff --git a/test/files/run/reify_newimpl_49.scala b/test/files/run/reify_newimpl_49.scala
new file mode 100644
index 0000000..2222bd6
--- /dev/null
+++ b/test/files/run/reify_newimpl_49.scala
@@ -0,0 +1,16 @@
+import scala.reflect.runtime.universe._
+import scala.tools.reflect.Eval
+
+object Test extends App {
+ {
+ var y = 1
+ def x = { y += 2; y }
+ val code = reify {
+ def foo = y // ensures that y is the first freevar we find
+ println(x)
+ println(y)
+ println(x)
+ }
+ code.eval
+ }
+}
\ No newline at end of file
diff --git a/test/files/run/reify_newimpl_50.check b/test/files/run/reify_newimpl_50.check
new file mode 100644
index 0000000..d8a621d
--- /dev/null
+++ b/test/files/run/reify_newimpl_50.check
@@ -0,0 +1,3 @@
+3
+3
+5
\ No newline at end of file
diff --git a/test/files/run/reify_newimpl_50.scala b/test/files/run/reify_newimpl_50.scala
new file mode 100644
index 0000000..279cb16
--- /dev/null
+++ b/test/files/run/reify_newimpl_50.scala
@@ -0,0 +1,15 @@
+import scala.reflect.runtime.universe._
+import scala.tools.reflect.Eval
+
+object Test extends App {
+ {
+ var y = 1
+ def x = { y += 2; y }
+ val code = reify {
+ println(x)
+ println(y)
+ println(x)
+ }
+ code.eval
+ }
+}
\ No newline at end of file
diff --git a/test/files/run/reify_newimpl_51.check b/test/files/run/reify_newimpl_51.check
new file mode 100644
index 0000000..9a4ddea
--- /dev/null
+++ b/test/files/run/reify_newimpl_51.check
@@ -0,0 +1,3 @@
+2
+1
+2
\ No newline at end of file
diff --git a/test/files/run/reify_newimpl_51.scala b/test/files/run/reify_newimpl_51.scala
new file mode 100644
index 0000000..f823bf4
--- /dev/null
+++ b/test/files/run/reify_newimpl_51.scala
@@ -0,0 +1,18 @@
+import scala.reflect.runtime.universe._
+import scala.tools.reflect.Eval
+
+object Test extends App {
+ {
+ var counter = 0
+ lazy val x = { counter += 1; counter }
+ lazy val y = { counter += 1; counter }
+ val code = reify {
+ def foo = y // ensures that y is the first freevar we find
+ val bar = reify { println(x * y) }
+ bar.eval
+ println(x)
+ println(y)
+ }
+ code.eval
+ }
+}
\ No newline at end of file
diff --git a/test/files/run/reify_newimpl_52.check b/test/files/run/reify_newimpl_52.check
new file mode 100644
index 0000000..9359a2b
--- /dev/null
+++ b/test/files/run/reify_newimpl_52.check
@@ -0,0 +1,3 @@
+2
+2
+1
\ No newline at end of file
diff --git a/test/files/run/reify_newimpl_52.scala b/test/files/run/reify_newimpl_52.scala
new file mode 100644
index 0000000..f01199e
--- /dev/null
+++ b/test/files/run/reify_newimpl_52.scala
@@ -0,0 +1,18 @@
+import scala.reflect.runtime.universe._
+import scala.tools.reflect.Eval
+
+object Test extends App {
+ {
+ var counter = 0
+ lazy val x = { counter += 1; counter }
+ lazy val y = { counter += 1; counter }
+ val code = reify {
+ def foo = y // ensures that y is the first freevar we find
+ val bar = reify { println(y * x) }
+ bar.eval
+ println(x)
+ println(y)
+ }
+ code.eval
+ }
+}
\ No newline at end of file
diff --git a/test/files/jvm/bug680.check b/test/files/run/reify_printf.check
similarity index 100%
copy from test/files/jvm/bug680.check
copy to test/files/run/reify_printf.check
diff --git a/test/files/run/reify_printf.scala b/test/files/run/reify_printf.scala
new file mode 100644
index 0000000..272856b
--- /dev/null
+++ b/test/files/run/reify_printf.scala
@@ -0,0 +1,71 @@
+import java.io.{ ByteArrayOutputStream, PrintStream }
+import scala.reflect.runtime.universe._
+import scala.reflect.runtime.{universe => ru}
+import scala.reflect.runtime.{currentMirror => cm}
+import scala.tools.reflect.ToolBox
+import scala.reflect.api._
+import scala.reflect.api.Trees
+import scala.reflect.internal.Types
+import scala.util.matching.Regex
+
+object Test extends App {
+ val output = new ByteArrayOutputStream()
+ Console.setOut(new PrintStream(output))
+ val toolbox = cm.mkToolBox()
+
+ val tree = tree_printf(reify("hello %s").tree, reify("world").tree)
+ val evaluated = toolbox.eval(tree)
+ assert(output.toString() == "hello world", output.toString() +" == hello world")
+
+ /*
+ // upd. Oh, good old times, our very-very first experiments with macros :)
+ macro def printf(format: String, params: Any*) : String = tree_printf(format: Tree, (params: Seq[Tree]): _*)
+ */
+
+ var i = 0
+ def gensym(name: String) = { i += 1; newTermName(name + i) }
+
+ def createTempValDef( value : Tree, tpe : Type ) : (Option[Tree],Tree) = {
+ val local = gensym("temp")
+ (
+ Some(
+ ValDef(
+ NoMods
+ , local
+ , TypeTree(tpe)
+ , value
+ )
+ )
+ , Ident(local)
+ )
+ }
+
+ def tree_printf(format: Tree, params: Tree*) = {
+ val Literal(Constant(s_format: String)) = format
+ val paramsStack = scala.collection.mutable.Stack(params: _*)
+ val parsed = s_format.split("(?<=%[\\w%])|(?=%[\\w%])") map {
+ case "%d" => createTempValDef( paramsStack.pop, typeOf[Int] )
+ case "%s" => createTempValDef( paramsStack.pop, typeOf[String] )
+ case "%%" => {
+ (None:Option[Tree], Literal(Constant("%")))
+ }
+ case part => {
+ (None:Option[Tree], Literal(Constant(part)))
+ }
+ }
+
+ val evals = for ((Some(eval), _) <- parsed if eval != None) yield (eval: Tree)
+ val prints = for ((_, ref) <- parsed) yield
+ Apply(
+ Select(
+ Select(
+ Ident( newTermName("scala") )
+ , newTermName("Predef")
+ )
+ , newTermName("print")
+ )
+ , List(ref)
+ ): Tree
+ Block((evals ++ prints).toList, Literal(Constant(())))
+ }
+}
\ No newline at end of file
diff --git a/test/files/run/reify_properties.check b/test/files/run/reify_properties.check
new file mode 100644
index 0000000..d769bea
--- /dev/null
+++ b/test/files/run/reify_properties.check
@@ -0,0 +1,2 @@
+user1: MR. ROBERT <noname>
+user2: MR. BOB KUZ
diff --git a/test/files/run/reify_properties.scala b/test/files/run/reify_properties.scala
new file mode 100644
index 0000000..01a9b12
--- /dev/null
+++ b/test/files/run/reify_properties.scala
@@ -0,0 +1,57 @@
+import scala.reflect.runtime.universe._
+import scala.tools.reflect.Eval
+
+object Test extends App {
+ reify {
+ /** A mutable property whose getter and setter may be customized. */
+ case class Property[T](init: T) {
+ private var value: T = init
+
+ /** The getter function, defaults to identity. */
+ private var setter: T => T = identity[T]
+
+ /** The setter function, defaults to identity. */
+ private var getter: T => T = identity[T]
+
+ /** Retrive the value held in this property. */
+ def apply(): T = getter(value)
+
+ /** Update the value held in this property, through the setter. */
+ def update(newValue: T) = value = setter(newValue)
+
+ /** Change the getter. */
+ def get(newGetter: T => T) = { getter = newGetter; this }
+
+ /** Change the setter */
+ def set(newSetter: T => T) = { setter = newSetter; this }
+ }
+
+ class User {
+ // Create a property with custom getter and setter
+ val firstname = Property("")
+ .get { v => v.toUpperCase() }
+ .set { v => "Mr. " + v }
+ val lastname = Property("<noname>")
+
+ /** Scala provides syntactic sugar for calling 'apply'. Simply
+ * adding a list of arguments between parenthesis (in this case,
+ * an empty list) is translated to a call to 'apply' with those
+ * arguments.
+ */
+ override def toString() = firstname() + " " + lastname()
+ }
+
+ val user1 = new User
+
+ // Syntactic sugar for 'update': an assignment is translated to a
+ // call to method 'update'
+ user1.firstname() = "Robert"
+
+ val user2 = new User
+ user2.firstname() = "bob"
+ user2.lastname() = "KUZ"
+
+ println("user1: " + user1)
+ println("user2: " + user2)
+ }.eval
+}
diff --git a/test/files/run/reify_renamed_term_basic.check b/test/files/run/reify_renamed_term_basic.check
new file mode 100644
index 0000000..e78f94f
--- /dev/null
+++ b/test/files/run/reify_renamed_term_basic.check
@@ -0,0 +1 @@
+((),(),())
diff --git a/test/files/run/reify_renamed_term_basic.scala b/test/files/run/reify_renamed_term_basic.scala
new file mode 100644
index 0000000..cd76def
--- /dev/null
+++ b/test/files/run/reify_renamed_term_basic.scala
@@ -0,0 +1,20 @@
+import scala.reflect.runtime.universe._
+import scala.tools.reflect.Eval
+
+object A {
+ object B {
+ val c = ()
+ }
+}
+
+object Test extends App {
+ import A.{B => X}
+ import A.B.{c => y}
+ import X.{c => z}
+
+ val expr = reify (
+ X.c, y, z
+ )
+
+ println(expr.eval)
+}
\ No newline at end of file
diff --git a/test/files/run/reify_renamed_term_local_to_reifee.check b/test/files/run/reify_renamed_term_local_to_reifee.check
new file mode 100644
index 0000000..e78f94f
--- /dev/null
+++ b/test/files/run/reify_renamed_term_local_to_reifee.check
@@ -0,0 +1 @@
+((),(),())
diff --git a/test/files/run/reify_renamed_term_local_to_reifee.scala b/test/files/run/reify_renamed_term_local_to_reifee.scala
new file mode 100644
index 0000000..1860316
--- /dev/null
+++ b/test/files/run/reify_renamed_term_local_to_reifee.scala
@@ -0,0 +1,20 @@
+import scala.reflect.runtime.universe._
+import scala.tools.reflect.Eval
+
+object A {
+ object B {
+ val c = ()
+ }
+}
+
+object Test extends App {
+ val expr = reify {
+ import A.{B => X}
+ import A.B.{c => y}
+ import X.{c => z}
+
+ (X.c, y, z)
+ }
+
+ println(expr.eval)
+}
\ No newline at end of file
diff --git a/test/files/run/reify_renamed_term_overloaded_method.check b/test/files/run/reify_renamed_term_overloaded_method.check
new file mode 100644
index 0000000..48082f7
--- /dev/null
+++ b/test/files/run/reify_renamed_term_overloaded_method.check
@@ -0,0 +1 @@
+12
diff --git a/test/files/run/reify_renamed_term_overloaded_method.scala b/test/files/run/reify_renamed_term_overloaded_method.scala
new file mode 100644
index 0000000..3ef442d
--- /dev/null
+++ b/test/files/run/reify_renamed_term_overloaded_method.scala
@@ -0,0 +1,17 @@
+import scala.reflect.runtime.universe._
+import scala.tools.reflect.Eval
+
+object O {
+ def show(i: Int) = i.toString
+ def show(s: String) = s
+}
+
+object Test extends App {
+ import O.{show => s}
+
+ val expr = reify {
+ s("1") + s(2)
+ }
+
+ println(expr.eval)
+}
\ No newline at end of file
diff --git a/test/files/run/reify_renamed_term_si5841.check b/test/files/run/reify_renamed_term_si5841.check
new file mode 100644
index 0000000..6031277
--- /dev/null
+++ b/test/files/run/reify_renamed_term_si5841.check
@@ -0,0 +1 @@
+class scala.reflect.runtime.JavaUniverse
diff --git a/test/files/run/reify_renamed_term_si5841.scala b/test/files/run/reify_renamed_term_si5841.scala
new file mode 100644
index 0000000..ef18d65
--- /dev/null
+++ b/test/files/run/reify_renamed_term_si5841.scala
@@ -0,0 +1,7 @@
+import scala.reflect.runtime.universe._
+import scala.reflect.runtime.{universe => ru}
+import scala.tools.reflect.Eval
+
+object Test extends App {
+ println(reify{ru}.eval.getClass)
+}
\ No newline at end of file
diff --git a/test/files/run/reify_renamed_type_basic.check b/test/files/run/reify_renamed_type_basic.check
new file mode 100644
index 0000000..6a452c1
--- /dev/null
+++ b/test/files/run/reify_renamed_type_basic.check
@@ -0,0 +1 @@
+()
diff --git a/test/files/run/reify_renamed_type_basic.scala b/test/files/run/reify_renamed_type_basic.scala
new file mode 100644
index 0000000..23729e5
--- /dev/null
+++ b/test/files/run/reify_renamed_type_basic.scala
@@ -0,0 +1,16 @@
+import scala.reflect.runtime.universe._
+import scala.tools.reflect.Eval
+
+object O {
+ type A = Unit
+}
+
+object Test extends App {
+ import O.{A => X}
+
+ def expr = reify {
+ val a: X = ()
+ }
+
+ println(expr.eval)
+}
\ No newline at end of file
diff --git a/test/files/run/reify_renamed_type_local_to_reifee.check b/test/files/run/reify_renamed_type_local_to_reifee.check
new file mode 100644
index 0000000..6a452c1
--- /dev/null
+++ b/test/files/run/reify_renamed_type_local_to_reifee.check
@@ -0,0 +1 @@
+()
diff --git a/test/files/run/reify_renamed_type_local_to_reifee.scala b/test/files/run/reify_renamed_type_local_to_reifee.scala
new file mode 100644
index 0000000..ed1bad2
--- /dev/null
+++ b/test/files/run/reify_renamed_type_local_to_reifee.scala
@@ -0,0 +1,24 @@
+import scala.reflect.runtime.universe._
+import scala.tools.reflect.Eval
+
+object O {
+ type A = Unit
+}
+
+object Test extends App {
+ val expr = reify {
+ import O.{A => X}
+
+ val a: X = ()
+
+ object P {
+ type B = Unit
+ }
+
+ import P.{B => Y}
+
+ val b: Y = ()
+ }
+
+ println(expr.eval)
+}
\ No newline at end of file
diff --git a/test/files/run/reify_renamed_type_spliceable.check b/test/files/run/reify_renamed_type_spliceable.check
new file mode 100644
index 0000000..6a452c1
--- /dev/null
+++ b/test/files/run/reify_renamed_type_spliceable.check
@@ -0,0 +1 @@
+()
diff --git a/test/files/run/reify_renamed_type_spliceable.scala b/test/files/run/reify_renamed_type_spliceable.scala
new file mode 100644
index 0000000..9c2cff5
--- /dev/null
+++ b/test/files/run/reify_renamed_type_spliceable.scala
@@ -0,0 +1,21 @@
+import scala.reflect.runtime.universe._
+import scala.tools.reflect.Eval
+
+abstract class C {
+ type T >: Null
+}
+
+object Test extends App {
+ def foo(c: C) = {
+ import c.{T => U}
+ reify {
+ val x: U = null
+ }
+ }
+
+ val expr = foo(new C {
+ type T = AnyRef
+ })
+
+ println(expr.eval)
+}
\ No newline at end of file
diff --git a/test/files/run/reify_sort.check b/test/files/run/reify_sort.check
new file mode 100644
index 0000000..375536c
--- /dev/null
+++ b/test/files/run/reify_sort.check
@@ -0,0 +1,2 @@
+[6,2,8,5,1]
+[1,2,5,6,8]
diff --git a/test/files/run/reify_sort.scala b/test/files/run/reify_sort.scala
new file mode 100644
index 0000000..17e3976
--- /dev/null
+++ b/test/files/run/reify_sort.scala
@@ -0,0 +1,51 @@
+import scala.reflect.runtime.universe._
+import scala.tools.reflect.Eval
+
+object Test extends App {
+ reify {
+ /** Nested methods can use and even update everything
+ * visible in their scope (including local variables or
+ * arguments of enclosing methods).
+ */
+ def sort(a: Array[Int]) {
+
+ def swap(i: Int, j: Int) {
+ val t = a(i); a(i) = a(j); a(j) = t
+ }
+
+ def sort1(l: Int, r: Int) {
+ val pivot = a((l + r) / 2)
+ var i = l
+ var j = r
+ while (i <= j) {
+ while (a(i) < pivot) i += 1
+ while (a(j) > pivot) j -= 1
+ if (i <= j) {
+ swap(i, j)
+ i += 1
+ j -= 1
+ }
+ }
+ if (l < j) sort1(l, j)
+ if (j < r) sort1(i, r)
+ }
+
+ if (a.length > 0)
+ sort1(0, a.length - 1)
+ }
+
+ def println(ar: Array[Int]) {
+ def print1 = {
+ def iter(i: Int): String =
+ ar(i) + (if (i < ar.length-1) "," + iter(i+1) else "")
+ if (ar.length == 0) "" else iter(0)
+ }
+ Console.println("[" + print1 + "]")
+ }
+
+ val ar = Array(6, 2, 8, 5, 1)
+ println(ar)
+ sort(ar)
+ println(ar)
+ }.eval
+}
\ No newline at end of file
diff --git a/test/files/run/reify_sort1.check b/test/files/run/reify_sort1.check
new file mode 100644
index 0000000..0d30805
--- /dev/null
+++ b/test/files/run/reify_sort1.check
@@ -0,0 +1,2 @@
+List(6, 2, 8, 5, 1)
+List(1, 2, 5, 6, 8)
diff --git a/test/files/run/reify_sort1.scala b/test/files/run/reify_sort1.scala
new file mode 100644
index 0000000..6fb3cc5
--- /dev/null
+++ b/test/files/run/reify_sort1.scala
@@ -0,0 +1,21 @@
+import scala.reflect.runtime.universe._
+import scala.tools.reflect.Eval
+
+object Test extends App {
+ reify {
+ def sort(a: List[Int]): List[Int] = {
+ if (a.length < 2)
+ a
+ else {
+ val pivot = a(a.length / 2)
+ sort(a.filter(_ < pivot)) :::
+ a.filter(_ == pivot) :::
+ sort(a.filter(_ > pivot))
+ }
+ }
+
+ val xs = List(6, 2, 8, 5, 1)
+ println(xs)
+ println(sort(xs))
+ }.eval
+}
\ No newline at end of file
diff --git a/test/files/run/reify_this.check b/test/files/run/reify_this.check
new file mode 100644
index 0000000..af3d065
--- /dev/null
+++ b/test/files/run/reify_this.check
@@ -0,0 +1,5 @@
+foo
+false
+2
+bar
+2
\ No newline at end of file
diff --git a/test/files/run/reify_this.scala b/test/files/run/reify_this.scala
new file mode 100644
index 0000000..ecbf394
--- /dev/null
+++ b/test/files/run/reify_this.scala
@@ -0,0 +1,20 @@
+import scala.reflect.runtime.universe._
+import scala.tools.reflect.Eval
+
+trait Eval {
+ def eval(tree: Expr[_]) = tree.eval
+}
+
+object Test extends App with Eval {
+ // select a value from package
+ eval(reify{println("foo")})
+ eval(reify{println((new Object).toString == (new Object).toString)})
+
+ // select a type from package
+ eval(reify{val x: Any = 2; println(x)})
+ eval(reify{val x: Object = "bar"; println(x)})
+
+ // select a value from module
+ val x = 2
+ eval(reify{println(x)})
+}
\ No newline at end of file
diff --git a/test/files/run/reify_timeofday.check b/test/files/run/reify_timeofday.check
new file mode 100644
index 0000000..3fd3b76
--- /dev/null
+++ b/test/files/run/reify_timeofday.check
@@ -0,0 +1 @@
+DateError
diff --git a/test/files/run/reify_timeofday.scala b/test/files/run/reify_timeofday.scala
new file mode 100644
index 0000000..efeb81d
--- /dev/null
+++ b/test/files/run/reify_timeofday.scala
@@ -0,0 +1,42 @@
+import scala.reflect.runtime.universe._
+import scala.tools.reflect.Eval
+
+object Test extends App {
+ reify {
+ class DateError extends Exception
+
+ /** Simulating properties in Scala
+ * (example 4.2.1 in ScalaReference.pdf)
+ */
+ class TimeOfDayVar {
+ private var h, m, s: Int = 0
+
+ def hours = h
+
+ /** A method 'ident_=' is a setter for 'ident'. 'code.ident = ...' will
+ * be translated to a call to 'ident_='
+ */
+ def hours_= (h: Int) =
+ if (0 <= h && h < 24) this.h = h
+ else throw new DateError()
+
+ def minutes = m
+ def minutes_= (m: Int) =
+ if (0 <= m && m < 60) this.m = m
+ else throw new DateError()
+
+ def seconds = s
+ def seconds_= (s: Int) =
+ if (0 <= s && s < 60) this.s = s
+ else throw new DateError()
+ }
+
+ val d = new TimeOfDayVar
+ d.hours = 8; d.minutes = 30; d.seconds = 0
+ try { d.hours = 25 // throws a DateError exception
+ } catch {
+ case de: DateError => println("DateError")
+ case e: Exception => println("Exception")
+ }
+ }.eval
+}
\ No newline at end of file
diff --git a/test/files/run/reify_typerefs_1a.check b/test/files/run/reify_typerefs_1a.check
new file mode 100644
index 0000000..919c298
--- /dev/null
+++ b/test/files/run/reify_typerefs_1a.check
@@ -0,0 +1 @@
+evaluated = List(Expression, Expression)
\ No newline at end of file
diff --git a/test/files/run/reify_typerefs_1a.scala b/test/files/run/reify_typerefs_1a.scala
new file mode 100644
index 0000000..2e961f1
--- /dev/null
+++ b/test/files/run/reify_typerefs_1a.scala
@@ -0,0 +1,18 @@
+import scala.reflect.runtime.universe._
+import scala.reflect.runtime.{universe => ru}
+import scala.reflect.runtime.{currentMirror => cm}
+import scala.tools.reflect.ToolBox
+
+class Expression {
+ override def toString = "Expression"
+}
+
+object Test extends App {
+ val code = reify {
+ List(new Expression, new Expression)
+ };
+
+ val toolbox = cm.mkToolBox()
+ val evaluated = toolbox.eval(code.tree)
+ println("evaluated = " + evaluated)
+}
\ No newline at end of file
diff --git a/test/files/run/reify_typerefs_1b.check b/test/files/run/reify_typerefs_1b.check
new file mode 100644
index 0000000..919c298
--- /dev/null
+++ b/test/files/run/reify_typerefs_1b.check
@@ -0,0 +1 @@
+evaluated = List(Expression, Expression)
\ No newline at end of file
diff --git a/test/files/run/reify_typerefs_1b.scala b/test/files/run/reify_typerefs_1b.scala
new file mode 100644
index 0000000..88bb864
--- /dev/null
+++ b/test/files/run/reify_typerefs_1b.scala
@@ -0,0 +1,18 @@
+import scala.reflect.runtime.universe._
+import scala.reflect.runtime.{universe => ru}
+import scala.reflect.runtime.{currentMirror => cm}
+import scala.tools.reflect.ToolBox
+
+object Expression {
+ override def toString = "Expression"
+}
+
+object Test extends App {
+ val code = reify {
+ List(Expression, Expression)
+ };
+
+ val toolbox = cm.mkToolBox()
+ val evaluated = toolbox.eval(code.tree)
+ println("evaluated = " + evaluated)
+}
\ No newline at end of file
diff --git a/test/files/run/reify_typerefs_2a.check b/test/files/run/reify_typerefs_2a.check
new file mode 100644
index 0000000..919c298
--- /dev/null
+++ b/test/files/run/reify_typerefs_2a.check
@@ -0,0 +1 @@
+evaluated = List(Expression, Expression)
\ No newline at end of file
diff --git a/test/files/run/reify_typerefs_2a.scala b/test/files/run/reify_typerefs_2a.scala
new file mode 100644
index 0000000..3a1db1d
--- /dev/null
+++ b/test/files/run/reify_typerefs_2a.scala
@@ -0,0 +1,20 @@
+import scala.reflect.runtime.universe._
+import scala.reflect.runtime.{universe => ru}
+import scala.reflect.runtime.{currentMirror => cm}
+import scala.tools.reflect.ToolBox
+
+package foo {
+ class Expression {
+ override def toString = "Expression"
+ }
+}
+
+object Test extends App {
+ val code = reify {
+ List(new foo.Expression, new foo.Expression)
+ };
+
+ val toolbox = cm.mkToolBox()
+ val evaluated = toolbox.eval(code.tree)
+ println("evaluated = " + evaluated)
+}
\ No newline at end of file
diff --git a/test/files/run/reify_typerefs_2b.check b/test/files/run/reify_typerefs_2b.check
new file mode 100644
index 0000000..919c298
--- /dev/null
+++ b/test/files/run/reify_typerefs_2b.check
@@ -0,0 +1 @@
+evaluated = List(Expression, Expression)
\ No newline at end of file
diff --git a/test/files/run/reify_typerefs_2b.scala b/test/files/run/reify_typerefs_2b.scala
new file mode 100644
index 0000000..50082aa
--- /dev/null
+++ b/test/files/run/reify_typerefs_2b.scala
@@ -0,0 +1,20 @@
+import scala.reflect.runtime.universe._
+import scala.reflect.runtime.{universe => ru}
+import scala.reflect.runtime.{currentMirror => cm}
+import scala.tools.reflect.ToolBox
+
+package foo {
+ object Expression {
+ override def toString = "Expression"
+ }
+}
+
+object Test extends App {
+ val code = reify {
+ List(foo.Expression, foo.Expression)
+ };
+
+ val toolbox = cm.mkToolBox()
+ val evaluated = toolbox.eval(code.tree)
+ println("evaluated = " + evaluated)
+}
\ No newline at end of file
diff --git a/test/files/run/reify_typerefs_3a.check b/test/files/run/reify_typerefs_3a.check
new file mode 100644
index 0000000..919c298
--- /dev/null
+++ b/test/files/run/reify_typerefs_3a.check
@@ -0,0 +1 @@
+evaluated = List(Expression, Expression)
\ No newline at end of file
diff --git a/test/files/run/reify_typerefs_3a.scala b/test/files/run/reify_typerefs_3a.scala
new file mode 100644
index 0000000..682d6f0
--- /dev/null
+++ b/test/files/run/reify_typerefs_3a.scala
@@ -0,0 +1,20 @@
+import scala.reflect.runtime.universe._
+import scala.reflect.runtime.{universe => ru}
+import scala.reflect.runtime.{currentMirror => cm}
+import scala.tools.reflect.ToolBox
+
+object foo {
+ class Expression {
+ override def toString = "Expression"
+ }
+}
+
+object Test extends App {
+ val code = reify {
+ List(new foo.Expression, new foo.Expression)
+ };
+
+ val toolbox = cm.mkToolBox()
+ val evaluated = toolbox.eval(code.tree)
+ println("evaluated = " + evaluated)
+}
\ No newline at end of file
diff --git a/test/files/run/reify_typerefs_3b.check b/test/files/run/reify_typerefs_3b.check
new file mode 100644
index 0000000..919c298
--- /dev/null
+++ b/test/files/run/reify_typerefs_3b.check
@@ -0,0 +1 @@
+evaluated = List(Expression, Expression)
\ No newline at end of file
diff --git a/test/files/run/reify_typerefs_3b.scala b/test/files/run/reify_typerefs_3b.scala
new file mode 100644
index 0000000..c85072f
--- /dev/null
+++ b/test/files/run/reify_typerefs_3b.scala
@@ -0,0 +1,20 @@
+import scala.reflect.runtime.universe._
+import scala.reflect.runtime.{universe => ru}
+import scala.reflect.runtime.{currentMirror => cm}
+import scala.tools.reflect.ToolBox
+
+object foo {
+ object Expression {
+ override def toString = "Expression"
+ }
+}
+
+object Test extends App {
+ val code = reify {
+ List(foo.Expression, foo.Expression)
+ };
+
+ val toolbox = cm.mkToolBox()
+ val evaluated = toolbox.eval(code.tree)
+ println("evaluated = " + evaluated)
+}
\ No newline at end of file
diff --git a/test/files/run/reify_varargs.check b/test/files/run/reify_varargs.check
new file mode 100644
index 0000000..e300a57
--- /dev/null
+++ b/test/files/run/reify_varargs.check
@@ -0,0 +1 @@
+Message=On the fifth of August there was a disturbance in the Force on planet Hoth.
diff --git a/test/files/run/reify_varargs.scala b/test/files/run/reify_varargs.scala
new file mode 100644
index 0000000..1cbc7c9
--- /dev/null
+++ b/test/files/run/reify_varargs.scala
@@ -0,0 +1,11 @@
+import scala.reflect.runtime.universe._
+import scala.tools.reflect.Eval
+
+object Test extends App {
+ reify {
+ val msg = java.text.MessageFormat.format(
+ "On {1} there was {2} on planet {0}.",
+ "Hoth", "the fifth of August", "a disturbance in the Force")
+ println("Message="+msg)
+ }.eval
+}
\ No newline at end of file
diff --git a/test/files/run/repl-backticks.scala b/test/files/run/repl-backticks.scala
index 11c58e1..5eaa1ec 100644
--- a/test/files/run/repl-backticks.scala
+++ b/test/files/run/repl-backticks.scala
@@ -11,7 +11,7 @@ object Test {
def main(args: Array[String]) = {
val settings = new Settings()
settings.classpath.value = System.getProperty("java.class.path")
- val repl = new Interpreter(settings)
+ val repl = new interpreter.IMain(settings)
repl.interpret(testCode)
}
}
diff --git a/test/files/run/repl-bare-expr.check b/test/files/run/repl-bare-expr.check
index 04daa48..8b6434e 100644
--- a/test/files/run/repl-bare-expr.check
+++ b/test/files/run/repl-bare-expr.check
@@ -4,15 +4,33 @@ Type :help for more information.
scala>
scala> 2 ; 3
+<console>:7: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses
+ 2 ;;
+ ^
res0: Int = 3
scala> { 2 ; 3 }
+<console>:8: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses
+ { 2 ; 3 }
+ ^
res1: Int = 3
scala> 5 ; 10 ; case object Cow ; 20 ; class Moo { override def toString = "Moooooo" } ; 30 ; def bippy = {
1 +
2 +
3 } ; bippy+88+11
+<console>:7: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses
+ 5 ; 10 ; case object Cow ; 20 ; class Moo { override def toString = "Moooooo" } ; 30 ; def bippy = {
+ ^
+<console>:7: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses
+ 5 ; 10 ; case object Cow ; 20 ; class Moo { override def toString = "Moooooo" } ; 30 ; def bippy = {
+ ^
+<console>:7: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses
+ 5 ; 10 ; case object Cow ; 20 ; class Moo { override def toString = "Moooooo" } ; 30 ; def bippy = {
+ ^
+<console>:7: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses
+ 5 ; 10 ; case object Cow ; 20 ; class Moo { override def toString = "Moooooo" } ; 30 ; def bippy = {
+ ^
defined module Cow
defined class Moo
bippy: Int
diff --git a/test/files/run/repl-colon-type.check b/test/files/run/repl-colon-type.check
index 66c2fcc..0cb18e9 100644
--- a/test/files/run/repl-colon-type.check
+++ b/test/files/run/repl-colon-type.check
@@ -4,7 +4,7 @@ Type :help for more information.
scala>
scala> :type List[1, 2, 3]
-<console>:2: error: identifier expected but integer literal found.
+<console>:1: error: identifier expected but integer literal found.
List[1, 2, 3]
^
@@ -25,7 +25,7 @@ scala> :type def f[T >: Null, U <: String](x: T, y: U) = Set(x, y)
[T >: Null, U <: String](x: T, y: U)scala.collection.immutable.Set[Any]
scala> :type def x = 1 ; def bar[T >: Null <: AnyRef](xyz: T) = 5
-[T >: Null <: AnyRef](xyz: T)Int
+=> Int <and> [T >: Null <: AnyRef](xyz: T)Int
scala>
@@ -39,10 +39,16 @@ scala> :type lazy val f = 5
Int
scala> :type protected lazy val f = 5
-Int
+<console>:5: error: lazy value f cannot be accessed in object $iw
+ Access to protected value f not permitted because
+ enclosing object $eval in package $line13 is not a subclass of
+ object $iw where target is defined
+ lazy val $result = `f`
+ ^
+
scala> :type def f = 5
-Int
+=> Int
scala> :type def f() = 5
()Int
@@ -54,4 +60,168 @@ scala> :type def g[T](xs: Set[_ <: T]) = Some(xs.head)
scala>
+scala> // verbose!
+
+scala> :type -v List(1,2,3) filter _
+// Type signature
+(Int => Boolean) => List[Int]
+
+// Internal Type structure
+TypeRef(
+ TypeSymbol(abstract trait Function1[-T1, +R] extends AnyRef)
+ args = List(
+ TypeRef(
+ TypeSymbol(abstract trait Function1[-T1, +R] extends AnyRef)
+ args = List(
+ TypeRef(TypeSymbol(final abstract class Int extends AnyVal))
+ TypeRef(
+ TypeSymbol(final abstract class Boolean extends AnyVal)
+ )
+ )
+ )
+ TypeRef(
+ TypeSymbol(
+ sealed abstract class List[+A] extends AbstractSeq[A] with LinearSeq[A] with Product with GenericTraversableTemplate[A,List] with LinearSeqOptimized[A,List[A]]
+
+ )
+ args = List(
+ TypeRef(TypeSymbol(final abstract class Int extends AnyVal))
+ )
+ )
+ )
+)
+
+scala> :type -v def f[T >: Null, U <: String](x: T, y: U) = Set(x, y)
+// Type signature
+[T >: Null, U <: String](x: T, y: U)scala.collection.immutable.Set[Any]
+
+// Internal Type structure
+PolyType(
+ typeParams = List(TypeParam(T >: Null), TypeParam(U <: String))
+ resultType = MethodType(
+ params = List(TermSymbol(x: T), TermSymbol(y: U))
+ resultType = TypeRef(
+ TypeSymbol(
+ abstract trait Set[A] extends Iterable[A] with Set[A] with GenericSetTemplate[A,scala.collection.immutable.Set] with SetLike[A,scala.collection.immutable.Set[A]] with Parallelizable[A,scala.collection.parallel.immutable.ParSet[A]]
+
+ )
+ args = List(TypeRef(TypeSymbol(abstract class Any extends )))
+ )
+ )
+)
+
+scala> :type -v def x = 1 ; def bar[T >: Null <: AnyRef](xyz: T) = 5
+// Type signature
+=> Int <and> [T >: Null <: AnyRef](xyz: T)Int
+
+// Internal Type structure
+OverloadedType(
+ alts = List(
+ NullaryMethodType(
+ TypeRef(TypeSymbol(final abstract class Int extends AnyVal))
+ )
+ PolyType(
+ typeParams = List(TypeParam(T >: Null <: AnyRef))
+ resultType = MethodType(
+ params = List(TermSymbol(xyz: T))
+ resultType = TypeRef(
+ TypeSymbol(final abstract class Int extends AnyVal)
+ )
+ )
+ )
+ )
+)
+
+scala> :type -v Nil.combinations _
+// Type signature
+Int => Iterator[List[Nothing]]
+
+// Internal Type structure
+TypeRef(
+ TypeSymbol(abstract trait Function1[-T1, +R] extends AnyRef)
+ args = List(
+ TypeRef(TypeSymbol(final abstract class Int extends AnyVal))
+ TypeRef(
+ TypeSymbol(
+ abstract trait Iterator[+A] extends TraversableOnce[A]
+ )
+ args = List(
+ TypeRef(
+ TypeSymbol(
+ sealed abstract class List[+A] extends AbstractSeq[A] with LinearSeq[A] with Product with GenericTraversableTemplate[A,List] with LinearSeqOptimized[A,List[A]]
+
+ )
+ args = List(
+ TypeRef(
+ TypeSymbol(final abstract class Nothing extends Any)
+ )
+ )
+ )
+ )
+ )
+ )
+)
+
+scala> :type -v def f[T <: AnyVal] = List[T]().combinations _
+// Type signature
+[T <: AnyVal]=> Int => Iterator[List[T]]
+
+// Internal Type structure
+PolyType(
+ typeParams = List(TypeParam(T <: AnyVal))
+ resultType = NullaryMethodType(
+ TypeRef(
+ TypeSymbol(abstract trait Function1[-T1, +R] extends AnyRef)
+ args = List(
+ TypeRef(TypeSymbol(final abstract class Int extends AnyVal))
+ TypeRef(
+ TypeSymbol(
+ abstract trait Iterator[+A] extends TraversableOnce[A]
+ )
+ args = List(
+ TypeRef(
+ TypeSymbol(
+ sealed abstract class List[+A] extends AbstractSeq[A] with LinearSeq[A] with Product with GenericTraversableTemplate[A,List] with LinearSeqOptimized[A,List[A]]
+
+ )
+ args = List(TypeParamTypeRef(TypeParam(T <: AnyVal)))
+ )
+ )
+ )
+ )
+ )
+ )
+)
+
+scala> :type -v def f[T, U >: T](x: T, y: List[U]) = x :: y
+// Type signature
+[T, U >: T](x: T, y: List[U])List[U]
+
+// Internal Type structure
+PolyType(
+ typeParams = List(TypeParam(T), TypeParam(U >: T))
+ resultType = MethodType(
+ params = List(TermSymbol(x: T), TermSymbol(y: List[U]))
+ resultType = TypeRef(
+ TypeSymbol(
+ sealed abstract class List[+A] extends AbstractSeq[A] with LinearSeq[A] with Product with GenericTraversableTemplate[A,List] with LinearSeqOptimized[A,List[A]]
+
+ )
+ args = List(TypeParamTypeRef(TypeParam(U >: T)))
+ )
+ )
+)
+
+scala>
+
+scala> // SI-7132 - :type doesn't understand Unit
+
+scala> :type ()
+Unit
+
+scala> :type println("side effect!")
+Unit
+
+scala>
+
scala>
diff --git a/test/files/run/repl-colon-type.scala b/test/files/run/repl-colon-type.scala
index 39ab580..8cf81a6 100644
--- a/test/files/run/repl-colon-type.scala
+++ b/test/files/run/repl-colon-type.scala
@@ -18,6 +18,18 @@ object Test extends ReplTest {
|:type def f() = 5
|
|:type def g[T](xs: Set[_ <: T]) = Some(xs.head)
+ |
+ |// verbose!
+ |:type -v List(1,2,3) filter _
+ |:type -v def f[T >: Null, U <: String](x: T, y: U) = Set(x, y)
+ |:type -v def x = 1 ; def bar[T >: Null <: AnyRef](xyz: T) = 5
+ |:type -v Nil.combinations _
+ |:type -v def f[T <: AnyVal] = List[T]().combinations _
+ |:type -v def f[T, U >: T](x: T, y: List[U]) = x :: y
+ |
+ |// SI-7132 - :type doesn't understand Unit
+ |:type ()
+ |:type println("side effect!")
""".stripMargin
}
diff --git a/test/files/run/repl-parens.check b/test/files/run/repl-parens.check
index 2f56e5d..15f4b45 100644
--- a/test/files/run/repl-parens.check
+++ b/test/files/run/repl-parens.check
@@ -20,6 +20,12 @@ scala> ( (2 + 2 ) )
res5: Int = 4
scala> 5 ; ( (2 + 2 ) ) ; ((5))
+<console>:7: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses
+ 5 ; ( (2 + 2 ) ) ;;
+ ^
+<console>:7: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses
+ 5 ; ( (2 + 2 ) ) ;;
+ ^
res6: Int = 5
scala> (((2 + 2)), ((2 + 2)))
@@ -28,15 +34,24 @@ res7: (Int, Int) = (4,4)
scala> (((2 + 2)), ((2 + 2)), 2)
res8: (Int, Int, Int) = (4,4,2)
-scala> ((((2 + 2)), ((2 + 2)), 2).productIterator ++ Iterator(3) mkString)
+scala> (((((2 + 2)), ((2 + 2)), 2).productIterator ++ Iterator(3)).mkString)
res9: String = 4423
scala>
scala> 55 ; ((2 + 2)) ; (1, 2, 3)
+<console>:7: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses
+ 55 ; ((2 + 2)) ;;
+ ^
+<console>:7: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses
+ 55 ; ((2 + 2)) ;;
+ ^
res10: (Int, Int, Int) = (1,2,3)
scala> 55 ; (x: Int) => x + 1 ; () => ((5))
+<console>:7: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses
+ 55 ; (x: Int) => x + 1 ;;
+ ^
res11: () => Int = <function0>
scala>
@@ -45,10 +60,13 @@ scala> () => 5
res12: () => Int = <function0>
scala> 55 ; () => 5
+<console>:7: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses
+ 55 ;;
+ ^
res13: () => Int = <function0>
scala> () => { class X ; new X }
-res14: () => java.lang.Object with ScalaObject = <function0>
+res14: () => AnyRef = <function0>
scala>
@@ -59,3 +77,8 @@ scala> foo(5)(10)(15)+foo(5)(10)(15)
res15: Int = 60
scala>
+
+scala> List(1) ++ List('a')
+res16: List[AnyVal] = List(1, a)
+
+scala>
diff --git a/test/files/run/repl-parens.scala b/test/files/run/repl-parens.scala
index 1baa9c3..e25933b 100644
--- a/test/files/run/repl-parens.scala
+++ b/test/files/run/repl-parens.scala
@@ -11,7 +11,7 @@ object Test extends ReplTest {
5 ; ( (2 + 2 ) ) ; ((5))
(((2 + 2)), ((2 + 2)))
(((2 + 2)), ((2 + 2)), 2)
-((((2 + 2)), ((2 + 2)), 2).productIterator ++ Iterator(3) mkString)
+(((((2 + 2)), ((2 + 2)), 2).productIterator ++ Iterator(3)).mkString)
55 ; ((2 + 2)) ; (1, 2, 3)
55 ; (x: Int) => x + 1 ; () => ((5))
@@ -23,5 +23,7 @@ object Test extends ReplTest {
def foo(x: Int)(y: Int)(z: Int) = x+y+z
foo(5)(10)(15)+foo(5)(10)(15)
+List(1) ++ List('a')
+
""".trim
-}
\ No newline at end of file
+}
diff --git a/test/files/run/repl-paste-2.check b/test/files/run/repl-paste-2.check
index 18bd6d2..203b020 100644
--- a/test/files/run/repl-paste-2.check
+++ b/test/files/run/repl-paste-2.check
@@ -3,7 +3,7 @@ Type :help for more information.
scala>
-scala> scala> 0123
+scala> scala> 999l
// Detected repl transcript paste: ctrl-D to finish.
@@ -34,8 +34,8 @@ res10: Int = 12
// Replaying 8 commands from transcript.
-scala> 0123
-res0: Int = 83
+scala> 999l
+res0: Long = 999
scala> val res5 = { 123 }
res5: Int = 123
@@ -52,7 +52,7 @@ scala> val x = dingus
^
scala> val x = "dingus"
-x: java.lang.String = dingus
+x: String = dingus
scala> x.length
res2: Int = 6
diff --git a/test/files/run/repl-paste-2.scala b/test/files/run/repl-paste-2.scala
index f629277..65f9b25 100644
--- a/test/files/run/repl-paste-2.scala
+++ b/test/files/run/repl-paste-2.scala
@@ -2,7 +2,7 @@ import scala.tools.partest.ReplTest
object Test extends ReplTest {
def code = """
-scala> 0123
+scala> 999l
res4: Int = 0123
scala> 123
diff --git a/test/files/run/repl-paste-3.check b/test/files/run/repl-paste-3.check
new file mode 100644
index 0000000..2b4c941
--- /dev/null
+++ b/test/files/run/repl-paste-3.check
@@ -0,0 +1,14 @@
+Type in expressions to have them evaluated.
+Type :help for more information.
+
+scala>
+
+scala> println(3)
+3
+
+scala> List(1,2)
+res1: List[Int] = List(1, 2)
+
+scala>
+
+scala>
diff --git a/test/files/run/repl-paste-3.scala b/test/files/run/repl-paste-3.scala
new file mode 100644
index 0000000..3f26799
--- /dev/null
+++ b/test/files/run/repl-paste-3.scala
@@ -0,0 +1,8 @@
+import scala.tools.partest.ReplTest
+
+object Test extends ReplTest {
+ def code = """
+println(3)
+ List(1,2)
+ """
+}
\ No newline at end of file
diff --git a/test/files/run/repl-power.check b/test/files/run/repl-power.check
index 38e7532..9d63ecd 100644
--- a/test/files/run/repl-power.check
+++ b/test/files/run/repl-power.check
@@ -2,15 +2,31 @@ Type in expressions to have them evaluated.
Type :help for more information.
scala> :power
-** Power User mode enabled - BEEP BOOP SPIZ **
+** Power User mode enabled - BEEP WHIR GYVE **
** :phase has been set to 'typer'. **
** scala.tools.nsc._ has been imported **
-** global._ and definitions._ also imported **
-** Try :help, vals.<tab>, power.<tab> **
+** global._, definitions._ also imported **
+** Try :help, :vals, power.<tab> **
scala> // guarding against "error: reference to global is ambiguous"
scala> global.emptyValDef // "it is imported twice in the same scope by ..."
res0: $r.global.emptyValDef.type = private val _ = _
+scala> val tp = ArrayClass[scala.util.Random] // magic with tags
+tp: $r.global.Type = Array[scala.util.Random]
+
+scala> tp.memberType(Array_apply) // evidence
+res1: $r.global.Type = (i: Int)scala.util.Random
+
+scala> val m = LIT(10) MATCH (CASE(LIT(5)) ==> FALSE, DEFAULT ==> TRUE) // treedsl
+m: $r.treedsl.global.Match =
+10 match {
+ case 5 => false
+ case _ => true
+}
+
+scala> typed(m).tpe // typed is in scope
+res2: $r.treedsl.global.Type = Boolean
+
scala>
diff --git a/test/files/run/repl-power.scala b/test/files/run/repl-power.scala
index 9f70ac4..f7c88c6 100644
--- a/test/files/run/repl-power.scala
+++ b/test/files/run/repl-power.scala
@@ -5,6 +5,9 @@ object Test extends ReplTest {
:power
// guarding against "error: reference to global is ambiguous"
global.emptyValDef // "it is imported twice in the same scope by ..."
+val tp = ArrayClass[scala.util.Random] // magic with tags
+tp.memberType(Array_apply) // evidence
+val m = LIT(10) MATCH (CASE(LIT(5)) ==> FALSE, DEFAULT ==> TRUE) // treedsl
+typed(m).tpe // typed is in scope
""".trim
}
-
diff --git a/test/files/run/repl-reset.check b/test/files/run/repl-reset.check
new file mode 100644
index 0000000..7256b85
--- /dev/null
+++ b/test/files/run/repl-reset.check
@@ -0,0 +1,55 @@
+Type in expressions to have them evaluated.
+Type :help for more information.
+
+scala>
+
+scala> val x1 = 1
+x1: Int = 1
+
+scala> val x2 = 2
+x2: Int = 2
+
+scala> val x3 = 3
+x3: Int = 3
+
+scala> case class BippyBungus()
+defined class BippyBungus
+
+scala> x1 + x2 + x3
+res0: Int = 6
+
+scala> :reset
+Resetting interpreter state.
+Forgetting this session history:
+
+val x1 = 1
+val x2 = 2
+val x3 = 3
+case class BippyBungus()
+x1 + x2 + x3
+
+Forgetting all expression results and named terms: $intp, BippyBungus, x1, x2, x3
+Forgetting defined types: BippyBungus
+
+scala> x1 + x2 + x3
+<console>:8: error: not found: value x1
+ x1 + x2 + x3
+ ^
+
+scala> val x1 = 4
+x1: Int = 4
+
+scala> new BippyBungus
+<console>:8: error: not found: type BippyBungus
+ new BippyBungus
+ ^
+
+scala> class BippyBungus() { def f = 5 }
+defined class BippyBungus
+
+scala> { new BippyBungus ; x1 }
+res2: Int = 4
+
+scala>
+
+scala>
diff --git a/test/files/run/repl-reset.scala b/test/files/run/repl-reset.scala
new file mode 100644
index 0000000..ad1602b
--- /dev/null
+++ b/test/files/run/repl-reset.scala
@@ -0,0 +1,22 @@
+import scala.tools.partest.ReplTest
+
+object Test extends ReplTest {
+ def code = """
+ |val x1 = 1
+ |val x2 = 2
+ |val x3 = 3
+ |case class BippyBungus()
+ |x1 + x2 + x3
+ |:reset
+ |x1 + x2 + x3
+ |val x1 = 4
+ |new BippyBungus
+ |class BippyBungus() { def f = 5 }
+ |{ new BippyBungus ; x1 }
+ """.stripMargin
+
+ // would be nice if we could count on javap being present
+ // but no
+ //
+ // |:javap BippyBungus
+}
diff --git a/test/files/run/repl-type-verbose.check b/test/files/run/repl-type-verbose.check
new file mode 100644
index 0000000..989c073
--- /dev/null
+++ b/test/files/run/repl-type-verbose.check
@@ -0,0 +1,194 @@
+Type in expressions to have them evaluated.
+Type :help for more information.
+
+scala>
+
+scala> // verbose!
+
+scala> :type -v def f = 5
+// Type signature
+=> Int
+
+// Internal Type structure
+NullaryMethodType(
+ TypeRef(TypeSymbol(final abstract class Int extends AnyVal))
+)
+
+scala> :type -v def f() = 5
+// Type signature
+()Int
+
+// Internal Type structure
+NullaryMethodType(
+ resultType = TypeRef(
+ TypeSymbol(final abstract class Int extends AnyVal)
+ )
+)
+
+scala> :type -v def f[T] = 5
+// Type signature
+[T]=> Int
+
+// Internal Type structure
+PolyType(
+ typeParams = List(TypeParam(T))
+ resultType = NullaryMethodType(
+ TypeRef(TypeSymbol(final abstract class Int extends AnyVal))
+ )
+)
+
+scala> :type -v def f[T >: Null] = 5
+// Type signature
+[T >: Null]=> Int
+
+// Internal Type structure
+PolyType(
+ typeParams = List(TypeParam(T >: Null))
+ resultType = NullaryMethodType(
+ TypeRef(TypeSymbol(final abstract class Int extends AnyVal))
+ )
+)
+
+scala> :type -v def f[T <: String] = 5
+// Type signature
+[T <: String]=> Int
+
+// Internal Type structure
+PolyType(
+ typeParams = List(TypeParam(T <: String))
+ resultType = NullaryMethodType(
+ TypeRef(TypeSymbol(final abstract class Int extends AnyVal))
+ )
+)
+
+scala> :type -v def f[T]() = 5
+// Type signature
+[T]()Int
+
+// Internal Type structure
+PolyType(
+ typeParams = List(TypeParam(T))
+ resultType = NullaryMethodType(
+ resultType = TypeRef(
+ TypeSymbol(final abstract class Int extends AnyVal)
+ )
+ )
+)
+
+scala> :type -v def f[T, U]() = 5
+// Type signature
+[T, U]()Int
+
+// Internal Type structure
+PolyType(
+ typeParams = List(TypeParam(T), TypeParam(U))
+ resultType = NullaryMethodType(
+ resultType = TypeRef(
+ TypeSymbol(final abstract class Int extends AnyVal)
+ )
+ )
+)
+
+scala> :type -v def f[T, U]()() = 5
+// Type signature
+[T, U]()()Int
+
+// Internal Type structure
+PolyType(
+ typeParams = List(TypeParam(T), TypeParam(U))
+ resultType = NullaryMethodType(
+ resultType = NullaryMethodType(
+ resultType = TypeRef(
+ TypeSymbol(final abstract class Int extends AnyVal)
+ )
+ )
+ )
+)
+
+scala> :type -v def f[T, U <: T] = 5
+// Type signature
+[T, U <: T]=> Int
+
+// Internal Type structure
+PolyType(
+ typeParams = List(TypeParam(T), TypeParam(U <: T))
+ resultType = NullaryMethodType(
+ TypeRef(TypeSymbol(final abstract class Int extends AnyVal))
+ )
+)
+
+scala> :type -v def f[T, U <: T](x: T)(y: U) = 5
+// Type signature
+[T, U <: T](x: T)(y: U)Int
+
+// Internal Type structure
+PolyType(
+ typeParams = List(TypeParam(T), TypeParam(U <: T))
+ resultType = MethodType(
+ params = List(TermSymbol(x: T))
+ resultType = MethodType(
+ params = List(TermSymbol(y: U))
+ resultType = TypeRef(
+ TypeSymbol(final abstract class Int extends AnyVal)
+ )
+ )
+ )
+)
+
+scala> :type -v def f[T: Ordering] = 5
+// Type signature
+[T](implicit evidence$1: Ordering[T])Int
+
+// Internal Type structure
+PolyType(
+ typeParams = List(TypeParam(T))
+ resultType = MethodType(
+ params = List(TermSymbol(implicit evidence$1: Ordering[T]))
+ resultType = TypeRef(
+ TypeSymbol(final abstract class Int extends AnyVal)
+ )
+ )
+)
+
+scala> :type -v def f[T: Ordering] = implicitly[Ordering[T]]
+// Type signature
+[T](implicit evidence$1: Ordering[T])Ordering[T]
+
+// Internal Type structure
+PolyType(
+ typeParams = List(TypeParam(T))
+ resultType = MethodType(
+ params = List(TermSymbol(implicit evidence$1: Ordering[T]))
+ resultType = AliasTypeRef(
+ Alias(type Ordering[T] = scala.math.Ordering[T])
+ args = List(TypeParamTypeRef(TypeParam(T)))
+ normalize = TypeRef(
+ TypeSymbol(
+ abstract trait Ordering[T] extends Comparator[T] with PartialOrdering[T] with Serializable
+
+ )
+ args = List(TypeParamTypeRef(TypeParam(T)))
+ )
+ )
+ )
+)
+
+scala> :type -v def f[T <: { type Bippy = List[Int] ; def g(): Bippy }] = 5
+// Type signature
+[T <: AnyRef{type Bippy = List[Int]; def g(): this.Bippy}]=> Int
+
+// Internal Type structure
+PolyType(
+ typeParams = List(
+ TypeParam(
+ T <: AnyRef{type Bippy = List[Int]; def g(): this.Bippy}
+ )
+ )
+ resultType = NullaryMethodType(
+ TypeRef(TypeSymbol(final abstract class Int extends AnyVal))
+ )
+)
+
+scala>
+
+scala>
diff --git a/test/files/run/repl-type-verbose.scala b/test/files/run/repl-type-verbose.scala
new file mode 100644
index 0000000..10c3905
--- /dev/null
+++ b/test/files/run/repl-type-verbose.scala
@@ -0,0 +1,20 @@
+import scala.tools.partest.ReplTest
+
+object Test extends ReplTest {
+ def code = """
+ |// verbose!
+ |:type -v def f = 5
+ |:type -v def f() = 5
+ |:type -v def f[T] = 5
+ |:type -v def f[T >: Null] = 5
+ |:type -v def f[T <: String] = 5
+ |:type -v def f[T]() = 5
+ |:type -v def f[T, U]() = 5
+ |:type -v def f[T, U]()() = 5
+ |:type -v def f[T, U <: T] = 5
+ |:type -v def f[T, U <: T](x: T)(y: U) = 5
+ |:type -v def f[T: Ordering] = 5
+ |:type -v def f[T: Ordering] = implicitly[Ordering[T]]
+ |:type -v def f[T <: { type Bippy = List[Int] ; def g(): Bippy }] = 5
+ """.stripMargin
+}
diff --git a/test/files/run/resetattrs-this.check b/test/files/run/resetattrs-this.check
new file mode 100644
index 0000000..27ba77d
--- /dev/null
+++ b/test/files/run/resetattrs-this.check
@@ -0,0 +1 @@
+true
diff --git a/test/files/run/resetattrs-this.scala b/test/files/run/resetattrs-this.scala
new file mode 100644
index 0000000..12afa3d
--- /dev/null
+++ b/test/files/run/resetattrs-this.scala
@@ -0,0 +1,11 @@
+import scala.reflect.runtime.universe._
+import scala.reflect.runtime.{currentMirror => cm}
+import scala.tools.reflect.ToolBox
+
+object Test extends App {
+ val tb = cm.mkToolBox()
+ val tree = Select(This(cm.staticPackage("scala").moduleClass), newTermName("Predef"))
+ val ttree = tb.typeCheck(tree)
+ val rttree = tb.resetAllAttrs(ttree)
+ println(tb.eval(rttree) == Predef)
+}
\ No newline at end of file
diff --git a/test/files/run/richWrapperEquals.scala b/test/files/run/richWrapperEquals.scala
index 44beb13..4a43617 100644
--- a/test/files/run/richWrapperEquals.scala
+++ b/test/files/run/richWrapperEquals.scala
@@ -1,10 +1,6 @@
object Test {
def main(args: Array[String]): Unit = {
- assert(intWrapper(5) == 5)
- assert(5 == intWrapper(5))
assert(5 == (5: java.lang.Integer))
assert((5: java.lang.Integer) == 5)
- assert((5: java.lang.Integer) == intWrapper(5))
- assert(intWrapper(5) == (5: java.lang.Integer))
}
}
diff --git a/test/files/run/richs-msil.check b/test/files/run/richs-msil.check
deleted file mode 100644
index e628ad3..0000000
--- a/test/files/run/richs-msil.check
+++ /dev/null
@@ -1,66 +0,0 @@
-
-RichCharTest1:
-True
-True
-True
-False
-
-RichIntTest:
-10
-11
-12
-13
-0
-0
-
-RichStringTest1:
-s1: abc
-s2: abc\txyz\n
-s3: abc
- xyz
-s4: abc
- |xyz
-s5: abc
- #xyz
-
-RichStringTest2:
-s1: abc
-s2: abc\txyz\n
-s3: abc
- xyz
-s4: abc
- |xyz
-s5: abc
- #xyz
-
-RichStringTest3:
-s1: abc
-s2: abc\txyz\n
-s3: abc
- xyz
-s4: abc
- |xyz
-s5: abc
- #xyz
-
-RichStringTest4:
-s1: abc
-s2: abc\txyz\n
-s3: abc
- xyz
-s4: abc
-xyz
-s5: abc
- #xyz
-
-RichStringTest5:
-s1: abc
- xyz
-s2: abc
- xyz
-s3: abc
- xyz
-s4: abc
- |xyz
-s5: abc
-xyz
diff --git a/test/files/run/runtime-msil.check b/test/files/run/runtime-msil.check
deleted file mode 100644
index 70e7608..0000000
--- a/test/files/run/runtime-msil.check
+++ /dev/null
@@ -1,64 +0,0 @@
-<<< Test0
-[False,True]
-[0,1,2]
-[3,4,5]
-[a,b,c]
-[6,7,8]
-[9,10,11]
-[12,13]
-[14,15]
-[string]
->>> Test0
-
-<<< Test1
-10
-14
-15
-16
-20
-23
-24
-25
-26
->>> Test1
-
-<<< Test2
-A
-M0
-N0
-
-A
-N0
-M0
-
-A
-M0
-M1
-N0
-
-A
-N0
-N1
-M0
-
->>> Test2
-
-<<< Test3
-Ok
-Ok
-Ok
-Ok
-Ok
-Ok
-Ok
-Ok
-Ok
-Ok
-Ok
-Ok
-Ok
-Ok
-Ok
-Ok
->>> Test3
-
diff --git a/test/files/run/runtimeEval1.check b/test/files/run/runtimeEval1.check
new file mode 100644
index 0000000..d8263ee
--- /dev/null
+++ b/test/files/run/runtimeEval1.check
@@ -0,0 +1 @@
+2
\ No newline at end of file
diff --git a/test/files/run/runtimeEval1.scala b/test/files/run/runtimeEval1.scala
new file mode 100644
index 0000000..9497b52
--- /dev/null
+++ b/test/files/run/runtimeEval1.scala
@@ -0,0 +1,9 @@
+import scala.reflect.runtime.universe._
+import scala.tools.reflect.Eval
+
+object Test extends App {
+ reify {
+ val x = 2
+ println(x)
+ }.eval
+}
\ No newline at end of file
diff --git a/test/files/run/runtimeEval2.check b/test/files/run/runtimeEval2.check
new file mode 100644
index 0000000..0cfbf08
--- /dev/null
+++ b/test/files/run/runtimeEval2.check
@@ -0,0 +1 @@
+2
diff --git a/test/files/run/runtimeEval2.scala b/test/files/run/runtimeEval2.scala
new file mode 100644
index 0000000..513b820
--- /dev/null
+++ b/test/files/run/runtimeEval2.scala
@@ -0,0 +1,11 @@
+import scala.reflect.runtime.universe._
+import scala.reflect.runtime.{universe => ru}
+import scala.reflect.runtime.{currentMirror => cm}
+import scala.tools.reflect.ToolBox
+import scala.tools.reflect.Eval
+
+object Test extends App {
+ val x = 2
+ val outer = reify{reify{x}}
+ println(outer.eval.eval)
+}
\ No newline at end of file
diff --git a/test/files/run/sequenceComparisons.scala b/test/files/run/sequenceComparisons.scala
index c8e14e4..5d7958b 100644
--- a/test/files/run/sequenceComparisons.scala
+++ b/test/files/run/sequenceComparisons.scala
@@ -104,10 +104,10 @@ object Test {
val scrut = s1f(seq)
for (Method(f, (trueList, falseList), descr) <- methodList) {
- for (s <- trueList; val rhs = s2f(s))
+ for (s <- trueList; rhs = s2f(s))
assertOne(scrut, rhs, f(scrut, rhs), descr)
- for (s <- falseList; val rhs = s2f(s))
+ for (s <- falseList; rhs = s2f(s))
assertOne(scrut, rhs, !f(scrut, rhs), "!(" + descr + ")")
}
}
diff --git a/test/files/run/serialize-stream.check b/test/files/run/serialize-stream.check
new file mode 100644
index 0000000..e2a9f57
--- /dev/null
+++ b/test/files/run/serialize-stream.check
@@ -0,0 +1,6 @@
+Stream(1, ?)
+List(1, 2, 3)
+Stream(1, ?)
+List(1)
+Stream()
+List()
diff --git a/test/files/run/serialize-stream.scala b/test/files/run/serialize-stream.scala
new file mode 100644
index 0000000..e424d5b
--- /dev/null
+++ b/test/files/run/serialize-stream.scala
@@ -0,0 +1,20 @@
+
+
+object Test {
+ def ser[T](s: Stream[T]) {
+ val bos = new java.io.ByteArrayOutputStream()
+ val oos = new java.io.ObjectOutputStream(bos)
+ oos.writeObject(s)
+
+ val ois = new java.io.ObjectInputStream(new java.io.ByteArrayInputStream(bos.toByteArray))
+ val obj = ois.readObject()
+ println(obj)
+ println(obj.asInstanceOf[Seq[T]].toList)
+ }
+
+ def main(args: Array[String]) {
+ ser(Stream(1, 2, 3))
+ ser(Stream(1))
+ ser(Stream())
+ }
+}
diff --git a/test/files/run/showraw_aliases.check b/test/files/run/showraw_aliases.check
new file mode 100644
index 0000000..aebd354
--- /dev/null
+++ b/test/files/run/showraw_aliases.check
@@ -0,0 +1,2 @@
+Block(List(Import(Select(Select(Ident(scala), scala.reflect), scala.reflect.runtime), List(ImportSelector(newTermName("universe"), <offset>, newTermName("ru"), <offset>)))), Select(Select(Select(Select(Ident(scala), scala.reflect), scala.reflect.runtime), scala.reflect.runtime.package), [newTermName("universe") aka newTermName("ru")]))
+Block(List(Import(Select(Select(Ident(scala#<id>), scala.reflect#<id>), scala.reflect.runtime#<id>), List(ImportSelector(newTermName("universe"), <offset>, newTermName("ru"), <offset>)))), Select(Select(Select(Select(Ident(scala#<id>), scala.reflect#<id>), scala.reflect.runtime#<id>), scala.reflect.runtime.package#<id>), [newTermName("universe")#<id> aka newTermName("ru")]))
diff --git a/test/files/run/showraw_aliases.scala b/test/files/run/showraw_aliases.scala
new file mode 100644
index 0000000..65b4fcb
--- /dev/null
+++ b/test/files/run/showraw_aliases.scala
@@ -0,0 +1,17 @@
+import scala.reflect.runtime.universe._
+import scala.tools.reflect.ToolBox
+
+object Test extends App {
+ val tb = runtimeMirror(getClass.getClassLoader).mkToolBox()
+ val tree = tb.parse("""
+ import scala.reflect.runtime.{universe => ru}
+ ru
+ """)
+ val ttree = tb.typeCheck(tree)
+
+ def stabilizeIds(s: String) = """#\d+""".r.replaceAllIn(s, "#<id>")
+ def stabilizePositions(s: String) = """\d+""".r.replaceAllIn(s, "<offset>")
+ def stabilize(s: String) = stabilizePositions(stabilizeIds(s))
+ println(stabilize(showRaw(ttree)))
+ println(stabilize(showRaw(ttree, printIds = true)))
+}
\ No newline at end of file
diff --git a/test/files/run/showraw_mods.check b/test/files/run/showraw_mods.check
new file mode 100644
index 0000000..7fca027
--- /dev/null
+++ b/test/files/run/showraw_mods.check
@@ -0,0 +1 @@
+Block(List(ClassDef(Modifiers(ABSTRACT | DEFAULTPARAM/TRAIT), newTypeName("C"), List(), Template(List(Ident(newTypeName("AnyRef"))), emptyValDef, List(DefDef(Modifiers(), newTermName("$init$"), List(), List(List()), TypeTree(), Block(List(), Literal(Constant(())))), ValDef(Modifiers(PRIVATE | LOCAL), newTermName("x"), TypeTree(), Literal(Constant(2))), ValDef(Modifiers(MUTABLE), newTermName("y"), TypeTree(), Select(This(newTypeName("C")), newTermName("x"))), ValDef(Modifiers(LAZY), newTe [...]
diff --git a/test/files/run/showraw_mods.scala b/test/files/run/showraw_mods.scala
new file mode 100644
index 0000000..a10e482
--- /dev/null
+++ b/test/files/run/showraw_mods.scala
@@ -0,0 +1,6 @@
+import scala.reflect.runtime.universe._
+
+object Test extends App {
+ val tree = reify{trait C { private[this] val x = 2; var y = x; lazy val z = y }}
+ println(showRaw(tree.tree))
+}
\ No newline at end of file
diff --git a/test/files/run/showraw_nosymbol.check b/test/files/run/showraw_nosymbol.check
new file mode 100644
index 0000000..c54fe74
--- /dev/null
+++ b/test/files/run/showraw_nosymbol.check
@@ -0,0 +1 @@
+NoSymbol
diff --git a/test/files/run/showraw_nosymbol.scala b/test/files/run/showraw_nosymbol.scala
new file mode 100644
index 0000000..fbdc159
--- /dev/null
+++ b/test/files/run/showraw_nosymbol.scala
@@ -0,0 +1,5 @@
+import scala.reflect.runtime.universe._
+
+object Test extends App {
+ println(showRaw(NoSymbol))
+}
\ No newline at end of file
diff --git a/test/files/run/showraw_tree.check b/test/files/run/showraw_tree.check
new file mode 100644
index 0000000..b71018d
--- /dev/null
+++ b/test/files/run/showraw_tree.check
@@ -0,0 +1,2 @@
+Apply(Select(New(AppliedTypeTree(Ident(scala.collection.immutable.HashMap), List(Select(Ident(scala.Predef), newTypeName("String")), Select(Ident(scala.Predef), newTypeName("String"))))), nme.CONSTRUCTOR), List())
+Apply(Select(New(AppliedTypeTree(Ident(scala.collection.mutable.HashMap), List(Select(Ident(scala.Predef), newTypeName("String")), Select(Ident(scala.Predef), newTypeName("String"))))), nme.CONSTRUCTOR), List())
diff --git a/test/files/run/showraw_tree.scala b/test/files/run/showraw_tree.scala
new file mode 100644
index 0000000..3624a24
--- /dev/null
+++ b/test/files/run/showraw_tree.scala
@@ -0,0 +1,8 @@
+import scala.reflect.runtime.universe._
+
+object Test extends App {
+ val tree1 = reify(new collection.immutable.HashMap[String, String])
+ val tree2 = reify(new collection.mutable.HashMap[String, String])
+ println(showRaw(tree1.tree))
+ println(showRaw(tree2.tree))
+}
\ No newline at end of file
diff --git a/test/files/run/showraw_tree_ids.check b/test/files/run/showraw_tree_ids.check
new file mode 100644
index 0000000..5835ffa
--- /dev/null
+++ b/test/files/run/showraw_tree_ids.check
@@ -0,0 +1,2 @@
+Apply(Select(New(AppliedTypeTree(Ident(scala.collection.immutable.HashMap#<id>), List(Select(Ident(scala.Predef#<id>), newTypeName("String")), Select(Ident(scala.Predef#<id>), newTypeName("String"))))), nme.CONSTRUCTOR), List())
+Apply(Select(New(AppliedTypeTree(Ident(scala.collection.mutable.HashMap#<id>), List(Select(Ident(scala.Predef#<id>), newTypeName("String")), Select(Ident(scala.Predef#<id>), newTypeName("String"))))), nme.CONSTRUCTOR), List())
diff --git a/test/files/run/showraw_tree_ids.scala b/test/files/run/showraw_tree_ids.scala
new file mode 100644
index 0000000..ea9a3cd
--- /dev/null
+++ b/test/files/run/showraw_tree_ids.scala
@@ -0,0 +1,9 @@
+import scala.reflect.runtime.universe._
+
+object Test extends App {
+ val tree1 = reify(new collection.immutable.HashMap[String, String])
+ val tree2 = reify(new collection.mutable.HashMap[String, String])
+ def stabilize(s: String) = """#\d+""".r.replaceAllIn(s, "#<id>")
+ println(stabilize(showRaw(tree1.tree, printIds = true)))
+ println(stabilize(showRaw(tree2.tree, printIds = true)))
+}
\ No newline at end of file
diff --git a/test/files/run/showraw_tree_kinds.check b/test/files/run/showraw_tree_kinds.check
new file mode 100644
index 0000000..c4d6685
--- /dev/null
+++ b/test/files/run/showraw_tree_kinds.check
@@ -0,0 +1,2 @@
+Apply(Select(New(AppliedTypeTree(Ident(scala.collection.immutable.HashMap#CLS), List(Select(Ident(scala.Predef#MOD), newTypeName("String")), Select(Ident(scala.Predef#MOD), newTypeName("String"))))), nme.CONSTRUCTOR), List())
+Apply(Select(New(AppliedTypeTree(Ident(scala.collection.mutable.HashMap#CLS), List(Select(Ident(scala.Predef#MOD), newTypeName("String")), Select(Ident(scala.Predef#MOD), newTypeName("String"))))), nme.CONSTRUCTOR), List())
diff --git a/test/files/run/showraw_tree_kinds.scala b/test/files/run/showraw_tree_kinds.scala
new file mode 100644
index 0000000..0ca5a38
--- /dev/null
+++ b/test/files/run/showraw_tree_kinds.scala
@@ -0,0 +1,8 @@
+import scala.reflect.runtime.universe._
+
+object Test extends App {
+ val tree1 = reify(new collection.immutable.HashMap[String, String])
+ val tree2 = reify(new collection.mutable.HashMap[String, String])
+ println(showRaw(tree1.tree, printKinds = true))
+ println(showRaw(tree2.tree, printKinds = true))
+}
\ No newline at end of file
diff --git a/test/files/run/showraw_tree_types_ids.check b/test/files/run/showraw_tree_types_ids.check
new file mode 100644
index 0000000..fccb81d
--- /dev/null
+++ b/test/files/run/showraw_tree_types_ids.check
@@ -0,0 +1,12 @@
+Apply[1](Select[2](New[1](TypeTree[1]().setOriginal(AppliedTypeTree(Ident[3](scala.collection.immutable.HashMap#<id>), List(TypeTree[4]().setOriginal(Select[4](Ident[5](scala.Predef#<id>), newTypeName("String")#<id>)), TypeTree[4]().setOriginal(Select[4](Ident[5](scala.Predef#<id>), newTypeName("String")#<id>)))))), nme.CONSTRUCTOR#<id>), List())
+[1] TypeRef(ThisType(scala.collection.immutable#<id>), scala.collection.immutable.HashMap#<id>, List(TypeRef(SingleType(ThisType(scala#<id>), scala.Predef#<id>), newTypeName("String")#<id>, List()), TypeRef(SingleType(ThisType(scala#<id>), scala.Predef#<id>), newTypeName("String")#<id>, List())))
+[2] MethodType(List(), TypeRef(ThisType(scala.collection.immutable#<id>), scala.collection.immutable.HashMap#<id>, List(TypeRef(SingleType(ThisType(scala#<id>), scala.Predef#<id>), newTypeName("String")#<id>, List()), TypeRef(SingleType(ThisType(scala#<id>), scala.Predef#<id>), newTypeName("String")#<id>, List()))))
+[3] TypeRef(ThisType(scala.collection.immutable#<id>), scala.collection.immutable.HashMap#<id>, List())
+[4] TypeRef(SingleType(ThisType(scala#<id>), scala.Predef#<id>), newTypeName("String")#<id>, List())
+[5] SingleType(ThisType(scala#<id>), scala.Predef#<id>)
+Apply[6](Select[7](New[6](TypeTree[6]().setOriginal(AppliedTypeTree(Ident[8](scala.collection.mutable.HashMap#<id>), List(TypeTree[4]().setOriginal(Select[4](Ident[5](scala.Predef#<id>), newTypeName("String")#<id>)), TypeTree[4]().setOriginal(Select[4](Ident[5](scala.Predef#<id>), newTypeName("String")#<id>)))))), nme.CONSTRUCTOR#<id>), List())
+[4] TypeRef(SingleType(ThisType(scala#<id>), scala.Predef#<id>), newTypeName("String")#<id>, List())
+[5] SingleType(ThisType(scala#<id>), scala.Predef#<id>)
+[6] TypeRef(ThisType(scala.collection.mutable#<id>), scala.collection.mutable.HashMap#<id>, List(TypeRef(SingleType(ThisType(scala#<id>), scala.Predef#<id>), newTypeName("String")#<id>, List()), TypeRef(SingleType(ThisType(scala#<id>), scala.Predef#<id>), newTypeName("String")#<id>, List())))
+[7] MethodType(List(), TypeRef(ThisType(scala.collection.mutable#<id>), scala.collection.mutable.HashMap#<id>, List(TypeRef(SingleType(ThisType(scala#<id>), scala.Predef#<id>), newTypeName("String")#<id>, List()), TypeRef(SingleType(ThisType(scala#<id>), scala.Predef#<id>), newTypeName("String")#<id>, List()))))
+[8] TypeRef(ThisType(scala.collection.mutable#<id>), scala.collection.mutable.HashMap#<id>, List())
diff --git a/test/files/run/showraw_tree_types_ids.scala b/test/files/run/showraw_tree_types_ids.scala
new file mode 100644
index 0000000..198729e
--- /dev/null
+++ b/test/files/run/showraw_tree_types_ids.scala
@@ -0,0 +1,11 @@
+import scala.reflect.runtime.universe._
+import scala.tools.reflect.ToolBox
+
+object Test extends App {
+ val tb = runtimeMirror(getClass.getClassLoader).mkToolBox()
+ val tree1 = reify(new collection.immutable.HashMap[String, String])
+ val tree2 = reify(new collection.mutable.HashMap[String, String])
+ def stabilize(s: String) = """#\d+""".r.replaceAllIn(s, "#<id>")
+ println(stabilize(showRaw(tb.typeCheck(tree1.tree), printIds = true, printTypes = true)))
+ println(stabilize(showRaw(tb.typeCheck(tree2.tree), printIds = true, printTypes = true)))
+}
\ No newline at end of file
diff --git a/test/files/run/showraw_tree_types_typed.check b/test/files/run/showraw_tree_types_typed.check
new file mode 100644
index 0000000..f3e0f8c
--- /dev/null
+++ b/test/files/run/showraw_tree_types_typed.check
@@ -0,0 +1,12 @@
+Apply[1](Select[2](New[1](TypeTree[1]().setOriginal(AppliedTypeTree(Ident[3](scala.collection.immutable.HashMap), List(TypeTree[4]().setOriginal(Select[4](Ident[5](scala.Predef), newTypeName("String"))), TypeTree[4]().setOriginal(Select[4](Ident[5](scala.Predef), newTypeName("String"))))))), nme.CONSTRUCTOR), List())
+[1] TypeRef(ThisType(scala.collection.immutable), scala.collection.immutable.HashMap, List(TypeRef(SingleType(ThisType(scala), scala.Predef), newTypeName("String"), List()), TypeRef(SingleType(ThisType(scala), scala.Predef), newTypeName("String"), List())))
+[2] MethodType(List(), TypeRef(ThisType(scala.collection.immutable), scala.collection.immutable.HashMap, List(TypeRef(SingleType(ThisType(scala), scala.Predef), newTypeName("String"), List()), TypeRef(SingleType(ThisType(scala), scala.Predef), newTypeName("String"), List()))))
+[3] TypeRef(ThisType(scala.collection.immutable), scala.collection.immutable.HashMap, List())
+[4] TypeRef(SingleType(ThisType(scala), scala.Predef), newTypeName("String"), List())
+[5] SingleType(ThisType(scala), scala.Predef)
+Apply[6](Select[7](New[6](TypeTree[6]().setOriginal(AppliedTypeTree(Ident[8](scala.collection.mutable.HashMap), List(TypeTree[4]().setOriginal(Select[4](Ident[5](scala.Predef), newTypeName("String"))), TypeTree[4]().setOriginal(Select[4](Ident[5](scala.Predef), newTypeName("String"))))))), nme.CONSTRUCTOR), List())
+[4] TypeRef(SingleType(ThisType(scala), scala.Predef), newTypeName("String"), List())
+[5] SingleType(ThisType(scala), scala.Predef)
+[6] TypeRef(ThisType(scala.collection.mutable), scala.collection.mutable.HashMap, List(TypeRef(SingleType(ThisType(scala), scala.Predef), newTypeName("String"), List()), TypeRef(SingleType(ThisType(scala), scala.Predef), newTypeName("String"), List())))
+[7] MethodType(List(), TypeRef(ThisType(scala.collection.mutable), scala.collection.mutable.HashMap, List(TypeRef(SingleType(ThisType(scala), scala.Predef), newTypeName("String"), List()), TypeRef(SingleType(ThisType(scala), scala.Predef), newTypeName("String"), List()))))
+[8] TypeRef(ThisType(scala.collection.mutable), scala.collection.mutable.HashMap, List())
diff --git a/test/files/run/showraw_tree_types_typed.scala b/test/files/run/showraw_tree_types_typed.scala
new file mode 100644
index 0000000..d7ccc84
--- /dev/null
+++ b/test/files/run/showraw_tree_types_typed.scala
@@ -0,0 +1,10 @@
+import scala.reflect.runtime.universe._
+import scala.tools.reflect.ToolBox
+
+object Test extends App {
+ val tb = runtimeMirror(getClass.getClassLoader).mkToolBox()
+ val tree1 = reify(new collection.immutable.HashMap[String, String])
+ val tree2 = reify(new collection.mutable.HashMap[String, String])
+ println(showRaw(tb.typeCheck(tree1.tree), printTypes = true))
+ println(showRaw(tb.typeCheck(tree2.tree), printTypes = true))
+}
\ No newline at end of file
diff --git a/test/files/run/showraw_tree_types_untyped.check b/test/files/run/showraw_tree_types_untyped.check
new file mode 100644
index 0000000..b71018d
--- /dev/null
+++ b/test/files/run/showraw_tree_types_untyped.check
@@ -0,0 +1,2 @@
+Apply(Select(New(AppliedTypeTree(Ident(scala.collection.immutable.HashMap), List(Select(Ident(scala.Predef), newTypeName("String")), Select(Ident(scala.Predef), newTypeName("String"))))), nme.CONSTRUCTOR), List())
+Apply(Select(New(AppliedTypeTree(Ident(scala.collection.mutable.HashMap), List(Select(Ident(scala.Predef), newTypeName("String")), Select(Ident(scala.Predef), newTypeName("String"))))), nme.CONSTRUCTOR), List())
diff --git a/test/files/run/showraw_tree_types_untyped.scala b/test/files/run/showraw_tree_types_untyped.scala
new file mode 100644
index 0000000..4df2eb6
--- /dev/null
+++ b/test/files/run/showraw_tree_types_untyped.scala
@@ -0,0 +1,8 @@
+import scala.reflect.runtime.universe._
+
+object Test extends App {
+ val tree1 = reify(new collection.immutable.HashMap[String, String])
+ val tree2 = reify(new collection.mutable.HashMap[String, String])
+ println(showRaw(tree1.tree, printTypes = true))
+ println(showRaw(tree2.tree, printTypes = true))
+}
\ No newline at end of file
diff --git a/test/files/run/showraw_tree_ultimate.check b/test/files/run/showraw_tree_ultimate.check
new file mode 100644
index 0000000..a6286ba
--- /dev/null
+++ b/test/files/run/showraw_tree_ultimate.check
@@ -0,0 +1,12 @@
+Apply[1](Select[2](New[1](TypeTree[1]().setOriginal(AppliedTypeTree(Ident[3](scala.collection.immutable.HashMap#<id>#CLS), List(TypeTree[4]().setOriginal(Select[4](Ident[5](scala.Predef#<id>#MOD), newTypeName("String")#<id>#TPE)), TypeTree[4]().setOriginal(Select[4](Ident[5](scala.Predef#<id>#MOD), newTypeName("String")#<id>#TPE)))))), nme.CONSTRUCTOR#<id>#PCTOR), List())
+[1] TypeRef(ThisType(scala.collection.immutable#<id>#PK), scala.collection.immutable.HashMap#<id>#CLS, List(TypeRef(SingleType(ThisType(scala#<id>#PK), scala.Predef#<id>#MOD), newTypeName("String")#<id>#TPE, List()), TypeRef(SingleType(ThisType(scala#<id>#PK), scala.Predef#<id>#MOD), newTypeName("String")#<id>#TPE, List())))
+[2] MethodType(List(), TypeRef(ThisType(scala.collection.immutable#<id>#PK), scala.collection.immutable.HashMap#<id>#CLS, List(TypeRef(SingleType(ThisType(scala#<id>#PK), scala.Predef#<id>#MOD), newTypeName("String")#<id>#TPE, List()), TypeRef(SingleType(ThisType(scala#<id>#PK), scala.Predef#<id>#MOD), newTypeName("String")#<id>#TPE, List()))))
+[3] TypeRef(ThisType(scala.collection.immutable#<id>#PK), scala.collection.immutable.HashMap#<id>#CLS, List())
+[4] TypeRef(SingleType(ThisType(scala#<id>#PK), scala.Predef#<id>#MOD), newTypeName("String")#<id>#TPE, List())
+[5] SingleType(ThisType(scala#<id>#PK), scala.Predef#<id>#MOD)
+Apply[6](Select[7](New[6](TypeTree[6]().setOriginal(AppliedTypeTree(Ident[8](scala.collection.mutable.HashMap#<id>#CLS), List(TypeTree[4]().setOriginal(Select[4](Ident[5](scala.Predef#<id>#MOD), newTypeName("String")#<id>#TPE)), TypeTree[4]().setOriginal(Select[4](Ident[5](scala.Predef#<id>#MOD), newTypeName("String")#<id>#TPE)))))), nme.CONSTRUCTOR#<id>#CTOR), List())
+[4] TypeRef(SingleType(ThisType(scala#<id>#PK), scala.Predef#<id>#MOD), newTypeName("String")#<id>#TPE, List())
+[5] SingleType(ThisType(scala#<id>#PK), scala.Predef#<id>#MOD)
+[6] TypeRef(ThisType(scala.collection.mutable#<id>#PK), scala.collection.mutable.HashMap#<id>#CLS, List(TypeRef(SingleType(ThisType(scala#<id>#PK), scala.Predef#<id>#MOD), newTypeName("String")#<id>#TPE, List()), TypeRef(SingleType(ThisType(scala#<id>#PK), scala.Predef#<id>#MOD), newTypeName("String")#<id>#TPE, List())))
+[7] MethodType(List(), TypeRef(ThisType(scala.collection.mutable#<id>#PK), scala.collection.mutable.HashMap#<id>#CLS, List(TypeRef(SingleType(ThisType(scala#<id>#PK), scala.Predef#<id>#MOD), newTypeName("String")#<id>#TPE, List()), TypeRef(SingleType(ThisType(scala#<id>#PK), scala.Predef#<id>#MOD), newTypeName("String")#<id>#TPE, List()))))
+[8] TypeRef(ThisType(scala.collection.mutable#<id>#PK), scala.collection.mutable.HashMap#<id>#CLS, List())
diff --git a/test/files/run/showraw_tree_ultimate.scala b/test/files/run/showraw_tree_ultimate.scala
new file mode 100644
index 0000000..a850762
--- /dev/null
+++ b/test/files/run/showraw_tree_ultimate.scala
@@ -0,0 +1,11 @@
+import scala.reflect.runtime.universe._
+import scala.tools.reflect.ToolBox
+
+object Test extends App {
+ val tb = runtimeMirror(getClass.getClassLoader).mkToolBox()
+ val tree1 = reify(new collection.immutable.HashMap[String, String])
+ val tree2 = reify(new collection.mutable.HashMap[String, String])
+ def stabilize(s: String) = """#\d+""".r.replaceAllIn(s, "#<id>")
+ println(stabilize(showRaw(tb.typeCheck(tree1.tree), printIds = true, printKinds = true, printTypes = true)))
+ println(stabilize(showRaw(tb.typeCheck(tree2.tree), printIds = true, printKinds = true, printTypes = true)))
+}
\ No newline at end of file
diff --git a/test/files/run/shutdownhooks.check b/test/files/run/shutdownhooks.check
new file mode 100644
index 0000000..2995695
--- /dev/null
+++ b/test/files/run/shutdownhooks.check
@@ -0,0 +1,3 @@
+Fooblitzky!
+main#shutdown.
+Test#shutdown.
diff --git a/test/files/run/shutdownhooks.scala b/test/files/run/shutdownhooks.scala
new file mode 100644
index 0000000..7fe5d12
--- /dev/null
+++ b/test/files/run/shutdownhooks.scala
@@ -0,0 +1,37 @@
+object Test {
+ scala.sys.addShutdownHook {
+ Thread.sleep(1000)
+ println("Test#shutdown.")
+ }
+
+ def daemon() = {
+ val t = new Thread {
+ override def run() {
+ Thread.sleep(10000)
+ println("Hallelujah!") // should not see this
+ }
+ }
+ t.setDaemon(true)
+ t.start()
+ t
+ }
+
+ def nonDaemon() = {
+ val t = new Thread {
+ override def run() {
+ Thread.sleep(100)
+ println("Fooblitzky!")
+ }
+ }
+ t.start()
+ t
+ }
+
+ def main(args: Array[String]): Unit = {
+ daemon()
+ nonDaemon()
+ scala.sys.addShutdownHook {
+ println("main#shutdown.")
+ }
+ }
+}
diff --git a/test/files/run/si5045.check b/test/files/run/si5045.check
new file mode 100644
index 0000000..7e9c196
--- /dev/null
+++ b/test/files/run/si5045.check
@@ -0,0 +1,6 @@
+ extract an exact match 2011-07-15 2011-07-15
+ extract from middle of string 2011-07-15 2011-07-15
+ extract from middle of string (P2) 2011-07-15 2011-07-15
+ extract from middle of string (P3) 2011-07-15 2011-07-15
+ copyright example has date Copyright 2011 Copyright 2011
+ copyright example missing date No copyright No copyright
diff --git a/test/files/run/si5045.scala b/test/files/run/si5045.scala
new file mode 100644
index 0000000..e198b10
--- /dev/null
+++ b/test/files/run/si5045.scala
@@ -0,0 +1,46 @@
+object Test extends App {
+
+ import scala.util.matching.{ Regex, UnanchoredRegex }
+
+ val dateP1 = """(\d\d\d\d)-(\d\d)-(\d\d)""".r.unanchored
+ val dateP2 = """(\d\d\d\d)-(\d\d)-(\d\d)""" r ("year", "month", "day") unanchored
+ val dateP3 = new Regex("""(\d\d\d\d)-(\d\d)-(\d\d)""", "year", "month", "day") with UnanchoredRegex
+
+ val yearStr = "2011"
+ val dateStr = List(yearStr,"07","15").mkString("-")
+
+ def test(msg: String)(strs: Seq[String]): Unit = println("%40s %s".format(msg, strs mkString " "))
+
+ test("extract an exact match") {
+ val dateP1(y,m,d) = dateStr
+ Seq(List(y,m,d).mkString("-"), dateStr)
+ }
+
+ test("extract from middle of string") {
+ val dateP1(y,m,d) = "Tested on "+dateStr+"."
+ Seq(List(y,m,d).mkString("-"), dateStr)
+ }
+
+ test("extract from middle of string (P2)") {
+ val dateP2(y,m,d) = "Tested on "+dateStr+"."
+ Seq(List(y,m,d).mkString("-"), dateStr)
+ }
+
+ test("extract from middle of string (P3)") {
+ val dateP2(y,m,d) = "Tested on "+dateStr+"."
+ Seq(List(y,m,d).mkString("-"), dateStr)
+ }
+
+ def copyright(in: String): String = in match {
+ case dateP1(year, month, day) => "Copyright "+year
+ case _ => "No copyright"
+ }
+
+ test("copyright example has date") {
+ Seq(copyright("Date of this document: "+dateStr), "Copyright "+yearStr)
+ }
+
+ test("copyright example missing date") {
+ Seq(copyright("Date of this document: unknown"), "No copyright")
+ }
+}
diff --git a/test/files/run/sm-interpolator.scala b/test/files/run/sm-interpolator.scala
new file mode 100644
index 0000000..7f7b9f0
--- /dev/null
+++ b/test/files/run/sm-interpolator.scala
@@ -0,0 +1,41 @@
+object Test extends App {
+ import scala.reflect.internal.util.StringContextStripMarginOps
+ def check(actual: Any, expected: Any) = if (actual != expected) sys.error(s"expected: [$expected], actual: [$actual])")
+
+ val bar = "|\n ||"
+
+ check(
+ sm"""|ab
+ |de
+ |${bar} | ${1}""",
+ "ab \nde\n|\n || | 1")
+
+ check(
+ sm"|",
+ "")
+
+ check(
+ sm"${0}",
+ "0")
+
+ check(
+ sm"${0}",
+ "0")
+
+ check(
+ sm"""${0}|${1}
+ |""",
+ "0|1\n")
+
+ check(
+ sm""" ||""",
+ "|")
+
+ check(
+ sm""" ${" "} ||""",
+ " ||")
+
+ check(
+ sm"\n",
+ raw"\n".stripMargin)
+}
diff --git a/test/files/run/sort.scala b/test/files/run/sort.scala
index b89ff22..eea3a2d 100644
--- a/test/files/run/sort.scala
+++ b/test/files/run/sort.scala
@@ -1,9 +1,9 @@
object Test extends App {
- println((1 to 100000).toList.sort(_<_).length)
- println(List(1, 5, 10, 3, 2).toList.sort(_<_))
- println(List(1, 5, 10, 3, 2).toList.sort(_>_))
- println(List(10).toList.sort(_<_))
- println(List(10,9).toList.sort(_<_))
- println(List[Int]().toList.sort(_<_))
+ println((1 to 100000).toList.sortWith(_<_).length)
+ println(List(1, 5, 10, 3, 2).toList.sortWith(_<_))
+ println(List(1, 5, 10, 3, 2).toList.sortWith(_>_))
+ println(List(10).toList.sortWith(_<_))
+ println(List(10,9).toList.sortWith(_<_))
+ println(List[Int]().toList.sortWith(_<_))
}
diff --git a/test/files/run/spec-nlreturn.check b/test/files/run/spec-nlreturn.check
new file mode 100644
index 0000000..26cff07
--- /dev/null
+++ b/test/files/run/spec-nlreturn.check
@@ -0,0 +1,2 @@
+scala.runtime.NonLocalReturnControl$mcI$sp
+16
diff --git a/test/files/run/spec-nlreturn.scala b/test/files/run/spec-nlreturn.scala
new file mode 100644
index 0000000..ec5e722
--- /dev/null
+++ b/test/files/run/spec-nlreturn.scala
@@ -0,0 +1,16 @@
+object Test {
+ def f(): Int = {
+ try {
+ val g = 1 to 10 map { i => return 16 ; i } sum;
+ g
+ }
+ catch { case x: runtime.NonLocalReturnControl[_] =>
+ println(x.getClass.getName)
+ x.value.asInstanceOf[Int]
+ }
+ }
+
+ def main(args: Array[String]): Unit = {
+ println(f())
+ }
+}
diff --git a/test/files/run/spec-self.check b/test/files/run/spec-self.check
new file mode 100644
index 0000000..e981f45
--- /dev/null
+++ b/test/files/run/spec-self.check
@@ -0,0 +1,2 @@
+5.0
+5.0
diff --git a/test/files/run/spec-self.scala b/test/files/run/spec-self.scala
new file mode 100644
index 0000000..1c95e0a
--- /dev/null
+++ b/test/files/run/spec-self.scala
@@ -0,0 +1,14 @@
+class Foo0 extends (() => Double) {
+ def apply() = 5.0d
+}
+
+class Foo1 extends (Double => Double) {
+ def apply(x: Double) = x
+}
+
+object Test {
+ def main(args: Array[String]): Unit = {
+ println((new Foo0)())
+ println((new Foo1)(5.0d))
+ }
+}
diff --git a/test/files/run/stream-stack-overflow-filter-map.scala b/test/files/run/stream-stack-overflow-filter-map.scala
new file mode 100644
index 0000000..f3a9dd4
--- /dev/null
+++ b/test/files/run/stream-stack-overflow-filter-map.scala
@@ -0,0 +1,44 @@
+import collection.generic.{FilterMonadic, CanBuildFrom}
+
+object Test extends App {
+ def mapSucc[Repr, That](s: FilterMonadic[Int, Repr])(implicit cbf: CanBuildFrom[Repr, Int, That]) = s map (_ + 1)
+ def flatMapId[T, Repr, That](s: FilterMonadic[T, Repr])(implicit cbf: CanBuildFrom[Repr, T, That]) = s flatMap (Seq(_))
+
+ def testStreamPred(s: Stream[Int])(p: Int => Boolean) {
+ val res1 = s withFilter p
+ val res2 = s filter p
+
+ val expected = s.toSeq filter p
+
+ val fMapped1 = flatMapId(res1)
+ val fMapped2 = flatMapId(res2)
+ assert(fMapped1 == fMapped2)
+ assert(fMapped1.toSeq == expected)
+
+ val mapped1 = mapSucc(res1)
+ val mapped2 = mapSucc(res2)
+ assert(mapped1 == mapped2)
+ assert(mapped1.toSeq == (expected map (_ + 1)))
+
+ assert((res1 map identity).toSeq == res2.toSeq)
+ }
+
+ def testStream(s: Stream[Int]) {
+ testStreamPred(s)(_ => false)
+ testStreamPred(s)(_ => true)
+ testStreamPred(s)(_ % 2 == 0)
+ testStreamPred(s)(_ % 3 == 0)
+ }
+
+ //Reduced version of the test case - either invocation used to cause a stack
+ //overflow before commit 80b3f433e5536d086806fa108ccdfacf10719cc2.
+ val resFMap = (1 to 10000).toStream withFilter (_ => false) flatMap (Seq(_))
+ val resMap = (1 to 10000).toStream withFilter (_ => false) map (_ + 1)
+
+ //Complete test case for withFilter + map/flatMap, as requested by @axel22.
+ for (j <- (0 to 3) :+ 10000) {
+ val stream = (1 to j).toStream
+ assert(stream.toSeq == (1 to j).toSeq)
+ testStream(stream)
+ }
+}
diff --git a/test/files/run/stream_length.scala b/test/files/run/stream_length.scala
index 68e9cad..2808fbc 100644
--- a/test/files/run/stream_length.scala
+++ b/test/files/run/stream_length.scala
@@ -5,7 +5,7 @@ object Test {
if (depth == 0)
Stream(bias)
else {
- Stream.concat(Stream.range(1, 100).map((x: Int) => walk(depth-1, bias + x)))
+ (Stream.iterate(1, 99)(_+1).map((x: Int) => walk(depth-1, bias + x))).flatten
}
}
diff --git a/test/files/run/streams.check b/test/files/run/streams.check
index 7f89405..db6d2ee 100644
--- a/test/files/run/streams.check
+++ b/test/files/run/streams.check
@@ -1,5 +1,8 @@
Stream()
Stream()
+true
+true
+true
Array(1)
Stream(1, ?)
@@ -8,12 +11,21 @@ Stream()
Stream()
Stream(1)
Stream()
+true
+true
+true
+true
Array(1, 2)
Stream(2)
Stream()
Stream(1, 2)
Stream()
+true
+true
+true
+true
+true
999
512
@@ -23,3 +35,5 @@ Stream(100001, ?)
true
true
705082704
+
+true
diff --git a/test/files/run/streams.scala b/test/files/run/streams.scala
index 51b4e5d..03b2622 100644
--- a/test/files/run/streams.scala
+++ b/test/files/run/streams.scala
@@ -2,6 +2,9 @@ object Test extends App {
val s0: Stream[Int] = Stream.empty
println(s0.take(1))
println(s0.takeWhile(_ > 0))
+ println(s0.lengthCompare(-5) > 0)
+ println(s0.lengthCompare(0) == 0)
+ println(s0.lengthCompare(5) < 0)
println
val s1 = Stream.cons(1, Stream.empty)
@@ -12,6 +15,10 @@ object Test extends App {
println(s1.drop(2))
println(s1.drop(-1))
println(s1.dropWhile(_ > 0))
+ println(s1.lengthCompare(-5) > 0)
+ println(s1.lengthCompare(0) > 0)
+ println(s1.lengthCompare(1) == 0)
+ println(s1.lengthCompare(5) < 0)
println
val s2 = s1.append(Stream.cons(2, Stream.empty))
@@ -20,6 +27,11 @@ object Test extends App {
println(s2.drop(2))
println(s2.drop(-1))
println(s2.dropWhile(_ > 0))
+ println(s2.lengthCompare(-5) > 0)
+ println(s2.lengthCompare(0) > 0)
+ println(s2.lengthCompare(1) > 0)
+ println(s2.lengthCompare(2) == 0)
+ println(s2.lengthCompare(5) < 0)
println
val s3 = Stream.range(1, 1000) //100000 (ticket #153: Stackoverflow)
@@ -43,4 +55,12 @@ object Test extends App {
println(Stream.from(1).take(size).foldLeft(0)(_ + _))
val arr = new Array[Int](size)
Stream.from(1).take(size).copyToArray(arr, 0)
+
+ println
+
+ // ticket #6415
+ lazy val x = { println("evaluated"); 1 }
+ val s4 = 0 #:: x #:: Stream.empty
+
+ println(s4.isDefinedAt(0))
}
diff --git a/test/files/run/stringinterpolation_macro-run.check b/test/files/run/stringinterpolation_macro-run.check
new file mode 100644
index 0000000..be62c57
--- /dev/null
+++ b/test/files/run/stringinterpolation_macro-run.check
@@ -0,0 +1,62 @@
+false
+false
+true
+false
+true
+FALSE
+FALSE
+TRUE
+FALSE
+TRUE
+true
+false
+null
+0
+80000000
+4c01926
+NULL
+4C01926
+null
+NULL
+Scala
+SCALA
+5
+x
+x
+x
+x
+x
+x
+x
+x
+x
+x
+x
+x
+S
+120
+120
+120
+120
+120
+120
+120
+120
+120
+120
+120
+120
+120
+42
+3.400000e+00
+3.400000e+00
+3.400000e+00
+3.400000e+00
+3.400000e+00
+3.400000e+00
+3.000000e+00
+3.000000e+00
+05/26/12
+05/26/12
+05/26/12
+05/26/12
diff --git a/test/files/run/stringinterpolation_macro-run.scala b/test/files/run/stringinterpolation_macro-run.scala
new file mode 100644
index 0000000..9c59c33
--- /dev/null
+++ b/test/files/run/stringinterpolation_macro-run.scala
@@ -0,0 +1,103 @@
+object Test extends App {
+
+// 'b' / 'B' (category: general)
+// -----------------------------
+println(f"${null}%b")
+println(f"${false}%b")
+println(f"${true}%b")
+println(f"${new java.lang.Boolean(false)}%b")
+println(f"${new java.lang.Boolean(true)}%b")
+
+println(f"${null}%B")
+println(f"${false}%B")
+println(f"${true}%B")
+println(f"${new java.lang.Boolean(false)}%B")
+println(f"${new java.lang.Boolean(true)}%B")
+
+implicit val stringToBoolean = java.lang.Boolean.parseBoolean(_: String)
+println(f"${"true"}%b")
+println(f"${"false"}%b")
+
+// 'h' | 'H' (category: general)
+// -----------------------------
+println(f"${null}%h")
+println(f"${0.0}%h")
+println(f"${-0.0}%h")
+println(f"${"Scala"}%h")
+
+println(f"${null}%H")
+println(f"${"Scala"}%H")
+
+// 's' | 'S' (category: general)
+// -----------------------------
+println(f"${null}%s")
+println(f"${null}%S")
+println(f"${"Scala"}%s")
+println(f"${"Scala"}%S")
+println(f"${5}")
+
+// 'c' | 'C' (category: character)
+// -------------------------------
+println(f"${120:Char}%c")
+println(f"${120:Byte}%c")
+println(f"${120:Short}%c")
+println(f"${120:Int}%c")
+println(f"${new java.lang.Character('x')}%c")
+println(f"${new java.lang.Byte(120:Byte)}%c")
+println(f"${new java.lang.Short(120:Short)}%c")
+println(f"${new java.lang.Integer(120)}%c")
+
+println(f"${'x' : java.lang.Character}%c")
+println(f"${(120:Byte) : java.lang.Byte}%c")
+println(f"${(120:Short) : java.lang.Short}%c")
+println(f"${120 : java.lang.Integer}%c")
+
+implicit val stringToChar = (x: String) => x(0)
+println(f"${"Scala"}%c")
+
+// 'd' | 'o' | 'x' | 'X' (category: integral)
+// ------------------------------------------
+println(f"${120:Byte}%d")
+println(f"${120:Short}%d")
+println(f"${120:Int}%d")
+println(f"${120:Long}%d")
+println(f"${new java.lang.Byte(120:Byte)}%d")
+println(f"${new java.lang.Short(120:Short)}%d")
+println(f"${new java.lang.Integer(120)}%d")
+println(f"${new java.lang.Long(120)}%d")
+println(f"${120 : java.lang.Integer}%d")
+println(f"${120 : java.lang.Long}%d")
+println(f"${BigInt(120)}%d")
+println(f"${new java.math.BigInteger("120")}%d")
+
+{
+ implicit val strToShort = (s: String) => java.lang.Short.parseShort(s)
+ println(f"${"120"}%d")
+ implicit val strToInt = (s: String) => 42
+ println(f"${"120"}%d")
+}
+
+// 'e' | 'E' | 'g' | 'G' | 'f' | 'a' | 'A' (category: floating point)
+// ------------------------------------------------------------------
+println(f"${3.4f}%e")
+println(f"${3.4}%e")
+println(f"${3.4f : java.lang.Float}%e")
+println(f"${3.4 : java.lang.Double}%e")
+println(f"${BigDecimal(3.4)}%e")
+println(f"${new java.math.BigDecimal(3.4)}%e")
+println(f"${3}%e")
+println(f"${3L}%e")
+
+// 't' | 'T' (category: date/time)
+// -------------------------------
+import java.util.Calendar
+import java.util.Locale
+val c = Calendar.getInstance(Locale.US)
+c.set(2012, Calendar.MAY, 26)
+println(f"${c}%TD")
+println(f"${c.getTime}%TD")
+println(f"${c.getTime.getTime}%TD")
+
+implicit val strToDate = (x: String) => c
+println(f"""${"1234"}%TD""")
+}
diff --git a/test/files/run/synchronized.check b/test/files/run/synchronized.check
new file mode 100644
index 0000000..dd9f4ef
--- /dev/null
+++ b/test/files/run/synchronized.check
@@ -0,0 +1,128 @@
+ .|. c1.f1: OK
+ .|. c1.fi: OK
+ .|... c1.fv: OK
+ .|... c1.ff: OK
+ .|. c1.fl: OK
+ .|... c1.fo: OK
+ |.. c1.g1: OK
+ |.. c1.gi: OK
+ |.... c1.gv: OK
+ |..... c1.gf: OK
+ .|... c1.c.f1: OK
+ .|... c1.c.fi: OK
+ .|..... c1.c.fv: OK
+ .|..... c1.c.ff: OK
+ .|... c1.c.fl: OK
+ .|..... c1.c.fo: OK
+ .|... c1.c.fn: OK
+ |.... c1.c.g1: OK
+ |.... c1.c.gi: OK
+ |...... c1.c.gv: OK
+ |...... c1.c.gf: OK
+ .|... c1.O.f1: OK
+ .|... c1.O.fi: OK
+ .|..... c1.O.fv: OK
+ .|..... c1.O.ff: OK
+ .|... c1.O.fl: OK
+ .|..... c1.O.fo: OK
+ .|... c1.O.fn: OK
+ |.... c1.O.g1: OK
+ |.... c1.O.gi: OK
+ |...... c1.O.gv: OK
+ |...... c1.O.gf: OK
+ .|. O1.f1: OK
+ .|. O1.fi: OK
+ .|... O1.fv: OK
+ .|... O1.ff: OK
+ .|. O1.fl: OK
+ .|... O1.fo: OK
+ |.. O1.g1: OK
+ |.. O1.gi: OK
+ |.... O1.gv: OK
+ |.... O1.gf: OK
+ .|... O1.c.f1: OK
+ .|... O1.c.fi: OK
+ .|..... O1.c.fv: OK
+ .|..... O1.c.ff: OK
+ .|... O1.c.fl: OK
+ .|..... O1.c.fo: OK
+ .|... O1.c.fn: OK
+ |.... O1.c.g1: OK
+ |.... O1.c.gi: OK
+ |...... O1.c.gv: OK
+ |...... O1.c.gf: OK
+ .|... O1.O.f1: OK
+ .|... O1.O.fi: OK
+ .|..... O1.O.fv: OK
+ .|..... O1.O.ff: OK
+ .|... O1.O.fl: OK
+ .|..... O1.O.fo: OK
+ .|... O1.O.fn: OK
+ |.... O1.O.g1: OK
+ |.... O1.O.gi: OK
+ |...... O1.O.gv: OK
+ |...... O1.O.gf: OK
+ .|..... c2.f1: OK
+ .|..... c2.fi: OK
+ .|....... c2.fv: OK
+ .|....... c2.ff: OK
+ .|..... c2.fl: OK
+ .|....... c2.fo: OK
+ |....... c2.g1: OK
+ |....... c2.gi: OK
+ |......... c2.gv: OK
+ |......... c2.gf: OK
+ .|........ c2.c.f1: OK
+ .|........ c2.c.fi: OK
+ .|.......... c2.c.fv: OK
+ .|.......... c2.c.ff: OK
+ .|........ c2.c.fl: OK
+ .|.......... c2.c.fo: OK
+ .|....... c2.c.fn: OK
+ |......... c2.c.g1: OK
+ |......... c2.c.gi: OK
+ |........... c2.c.gv: OK
+ |........... c2.c.gf: OK
+ .|........ c2.O.f1: OK
+ .|........ c2.O.fi: OK
+ .|.......... c2.O.fv: OK
+ .|.......... c2.O.ff: OK
+ .|........ c2.O.fl: OK
+ .|.......... c2.O.fo: OK
+ .|....... c2.O.fn: OK
+ |......... c2.O.g1: OK
+ |......... c2.O.gi: OK
+ |........... c2.O.gv: OK
+ |........... c2.O.gf: OK
+ .|..... O2.f1: OK
+ .|..... O2.fi: OK
+ .|....... O2.fv: OK
+ .|....... O2.ff: OK
+ .|..... O2.fl: OK
+ .|....... O2.fo: OK
+ |....... O2.g1: OK
+ |....... O2.gi: OK
+ |......... O2.gv: OK
+ |......... O2.gf: OK
+ .|........ O2.c.f1: OK
+ .|........ O2.c.fi: OK
+ .|.......... O2.c.fv: OK
+ .|.......... O2.c.ff: OK
+ .|........ O2.c.fl: OK
+ .|.......... O2.c.fo: OK
+ .|....... O2.c.fn: OK
+ |......... O2.c.g1: OK
+ |......... O2.c.gi: OK
+ |........... O2.c.gv: OK
+ |........... O2.c.gf: OK
+ .|........ O2.O.f1: OK
+ .|........ O2.O.fi: OK
+ .|.......... O2.O.fv: OK
+ .|.......... O2.O.ff: OK
+ .|........ O2.O.fl: OK
+ .|.......... O2.O.fo: OK
+ .|....... O2.O.fn: OK
+ |......... O2.O.g1: OK
+ |......... O2.O.gi: OK
+ |........... O2.O.gv: OK
+ |........... O2.O.gf: OK
diff --git a/test/files/run/synchronized.flags b/test/files/run/synchronized.flags
new file mode 100644
index 0000000..1182725
--- /dev/null
+++ b/test/files/run/synchronized.flags
@@ -0,0 +1 @@
+-optimize
\ No newline at end of file
diff --git a/test/files/run/synchronized.scala b/test/files/run/synchronized.scala
new file mode 100644
index 0000000..1f0e329
--- /dev/null
+++ b/test/files/run/synchronized.scala
@@ -0,0 +1,449 @@
+import java.lang.Thread.holdsLock
+import scala.collection.mutable.StringBuilder
+
+object Util {
+ def checkLocks(held: AnyRef*)(notHeld: AnyRef*) = {
+ val sb = new StringBuilder
+ for (lock <- held) {
+ sb.append(if (holdsLock(lock)) '.' else '!')
+ }
+ print("%5s|" format sb)
+
+ sb.clear()
+ for (lock <- notHeld) {
+ sb.append(if (holdsLock(lock)) '!' else '.')
+ }
+ print("%-15s " format sb)
+
+ (held forall holdsLock) && !(notHeld exists holdsLock)
+ }
+}
+
+class C1 {
+ import Util._
+
+ val lock = new AnyRef
+
+ def f1 = synchronized { checkLocks(this)(this.getClass) }
+ @inline final def fi = synchronized { checkLocks(this)(this.getClass) }
+ val fv: () => Boolean = () => synchronized { checkLocks(this)(this.getClass, fv, fv.getClass) }
+ def ff = {
+ lazy val ffv: AnyRef => Boolean = lock => synchronized { checkLocks(lock)(ffv, ffv.getClass, lock.getClass) }
+ ffv(this)
+ }
+ def fl = {
+ lazy val flv = synchronized { checkLocks(this)(this.getClass) }
+ flv
+ }
+ def fo = lock.synchronized { checkLocks(lock)(lock.getClass, this, this.getClass) }
+
+ def g1 = checkLocks()(this, this.getClass)
+ @inline final def gi = checkLocks()(this, this.getClass)
+ val gv: () => Boolean = () => checkLocks()(this, this.getClass, gv, gv.getClass)
+ def gf = {
+ lazy val gfv: AnyRef => Boolean = lock => checkLocks()(C1.this, gfv, gfv.getClass, lock, lock.getClass)
+ gfv(this)
+ }
+ def gl = {
+ lazy val glv = checkLocks()(this, this.getClass)
+ glv
+ }
+
+ class C {
+ def f1 = synchronized { checkLocks(this)(this.getClass, C1.this, C1.this.getClass) }
+ @inline final def fi = synchronized { checkLocks(this)(this.getClass, C1.this, C1.this.getClass) }
+ val fv: () => Boolean = () => synchronized { checkLocks(this)(this.getClass, C1.this, C1.this.getClass, fv, fv.getClass) }
+ def ff = {
+ lazy val ffv: AnyRef => Boolean = lock => synchronized { checkLocks(lock)(ffv, ffv.getClass, lock.getClass, C1.this, C1.this.getClass) }
+ ffv(this)
+ }
+ def fl = {
+ lazy val flv = synchronized { checkLocks(this)(this.getClass, C1.this, C1.this.getClass) }
+ flv
+ }
+ def fo = lock.synchronized { checkLocks(lock)(lock.getClass, this, this.getClass, C1.this, C1.this.getClass) }
+ def fn = C1.this.synchronized { checkLocks(C1.this)(C1.this.getClass, this, this.getClass) }
+
+ def g1 = checkLocks()(this, this.getClass, C1.this, C1.this.getClass)
+ @inline final def gi = checkLocks()(this, this.getClass, C1.this, C1.this.getClass)
+ val gv: () => Boolean = () => checkLocks()(this, this.getClass, C1.this, C1.this.getClass, gv, gv.getClass)
+ def gf = {
+ lazy val gfv: AnyRef => Boolean = lock => checkLocks()(gfv, gfv.getClass, lock, lock.getClass, C1.this, C1.this.getClass)
+ gfv(this)
+ }
+ def gl = {
+ lazy val glv = checkLocks()(this, this.getClass, C1.this, C1.this.getClass)
+ glv
+ }
+ }
+ val c = new C
+
+ object O {
+ def f1 = synchronized { checkLocks(this)(this.getClass, C1.this, C1.this.getClass) }
+ @inline final def fi = synchronized { checkLocks(this)(this.getClass, C1.this, C1.this.getClass) }
+ val fv: () => Boolean = () => synchronized { checkLocks(this)(this.getClass, fv, fv.getClass, C1.this, C1.this.getClass) }
+ def ff = {
+ lazy val ffv: AnyRef => Boolean = lock => synchronized { checkLocks(lock)(lock.getClass, ffv, ffv.getClass, C1.this, C1.this.getClass) }
+ ffv(this)
+ }
+ def fl = {
+ lazy val flv = synchronized { checkLocks(this)(this.getClass, C1.this, C1.this.getClass) }
+ flv
+ }
+ def fo = lock.synchronized { checkLocks(lock)(lock.getClass, this, this.getClass, C1.this, C1.this.getClass) }
+ def fn = C1.this.synchronized { checkLocks(C1.this)(C1.this.getClass, this, this.getClass) }
+
+ def g1 = checkLocks()(this, this.getClass, C1.this, C1.this.getClass)
+ @inline final def gi = checkLocks()(this, this.getClass, C1.this, C1.this.getClass)
+ val gv: () => Boolean = () => checkLocks()(this, this.getClass, gv, gv.getClass, C1.this, C1.this.getClass)
+ def gf = {
+ lazy val gfv: AnyRef => Boolean = lock => checkLocks()(lock, lock.getClass, gfv, gfv.getClass, C1.this, C1.this.getClass)
+ gfv(this)
+ }
+ def gl = {
+ lazy val glv = checkLocks()(this, this.getClass, C1.this, C1.this.getClass)
+ glv
+ }
+ }
+}
+
+object O1 {
+ import Util._
+
+ val lock = new AnyRef
+
+ def f1 = synchronized { checkLocks(this)(this.getClass) }
+ @inline final def fi = synchronized { checkLocks(this)(this.getClass) }
+ val fv: () => Boolean = () => synchronized { checkLocks(this)(this.getClass, fv, fv.getClass) }
+ def ff = {
+ lazy val ffv: AnyRef => Boolean = lock => synchronized { checkLocks(lock)(ffv, ffv.getClass, lock.getClass) }
+ ffv(this)
+ }
+ def fl = {
+ lazy val flv = synchronized { checkLocks(this)(this.getClass) }
+ flv
+ }
+ def fo = lock.synchronized { checkLocks(lock)(lock.getClass, this, this.getClass) }
+
+ def g1 = checkLocks()(this, this.getClass)
+ @inline final def gi = checkLocks()(this, this.getClass)
+ val gv: () => Boolean = () => checkLocks()(this, this.getClass, gv, gv.getClass)
+ def gf = {
+ lazy val gfv: AnyRef => Boolean = lock => checkLocks()(gfv, gfv.getClass, lock, lock.getClass)
+ gfv(this)
+ }
+ def gl = {
+ lazy val glv = checkLocks()(this, this.getClass)
+ glv
+ }
+
+ class C {
+ def f1 = synchronized { checkLocks(this)(this.getClass, O1, O1.getClass) }
+ @inline final def fi = synchronized { checkLocks(this)(this.getClass, O1, O1.getClass) }
+ val fv: () => Boolean = () => synchronized { checkLocks(this)(this.getClass, O1, O1.getClass, fv, fv.getClass) }
+ def ff = {
+ lazy val ffv: AnyRef => Boolean = lock => synchronized { checkLocks(lock)(ffv, ffv.getClass, lock.getClass, O1, O1.getClass) }
+ ffv(this)
+ }
+ def fl = {
+ lazy val flv = synchronized { checkLocks(this)(this.getClass, O1, O1.getClass) }
+ flv
+ }
+ def fo = lock.synchronized { checkLocks(lock)(lock.getClass, this, this.getClass, O1, O1.getClass) }
+ def fn = O1.synchronized { checkLocks(O1)(O1.getClass, this, this.getClass) }
+
+ def g1 = checkLocks()(this, this.getClass, O1, O1.getClass)
+ @inline final def gi = checkLocks()(this, this.getClass, O1, O1.getClass)
+ val gv: () => Boolean = () => checkLocks()(this, this.getClass, O1, O1.getClass, gv, gv.getClass)
+ def gf = {
+ lazy val gfv: AnyRef => Boolean = lock => checkLocks()(gfv, gfv.getClass, lock, lock.getClass, O1, O1.getClass)
+ gfv(this)
+ }
+ def gl = {
+ lazy val glv = checkLocks()(this, this.getClass, O1, O1.getClass)
+ glv
+ }
+ }
+ val c = new C
+
+ object O {
+ def f1 = synchronized { checkLocks(this)(this.getClass, O1, O1.getClass) }
+ @inline final def fi = synchronized { checkLocks(this)(this.getClass, O1, O1.getClass) }
+ val fv: () => Boolean = () => synchronized { checkLocks(this)(this.getClass, fv, fv.getClass, O1, O1.getClass) }
+ def ff = {
+ lazy val ffv: AnyRef => Boolean = lock => synchronized { checkLocks(lock)(lock.getClass, ffv, ffv.getClass, O1, O1.getClass) }
+ ffv(this)
+ }
+ def fl = {
+ lazy val flv = synchronized { checkLocks(this)(this.getClass, O1, O1.getClass) }
+ flv
+ }
+ def fo = lock.synchronized { checkLocks(lock)(lock.getClass, this, this.getClass, O1, O1.getClass) }
+ def fn = O1.synchronized { checkLocks(O1)(O1.getClass, this, this.getClass) }
+
+ def g1 = checkLocks()(this, this.getClass, O1, O1.getClass)
+ @inline final def gi = checkLocks()(this, this.getClass, O1, O1.getClass)
+ val gv: () => Boolean = () => checkLocks()(this, this.getClass, gv, gv.getClass, O1, O1.getClass)
+ def gf = {
+ lazy val gfv: AnyRef => Boolean = lock => checkLocks()(lock, lock.getClass, gfv, gfv.getClass, O1, O1.getClass)
+ gfv(this)
+ }
+ def gl = {
+ lazy val glv = checkLocks()(this, this.getClass, O1, O1.getClass)
+ glv
+ }
+ }
+}
+
+trait T {
+ import Util._
+
+ val Tclass = Class.forName("T$class")
+
+ val lock = new AnyRef
+
+ def f1 = synchronized { checkLocks(this)(this.getClass, classOf[T], Tclass, classOf[C2], O2.getClass) }
+ @inline final def fi = synchronized { checkLocks(this)(this.getClass, classOf[T], Tclass, classOf[C2], O2.getClass) }
+ val fv: () => Boolean = () => synchronized { checkLocks(this)(this.getClass, fv, fv.getClass, classOf[T], Tclass, classOf[C2], O2.getClass) }
+ def ff = {
+ lazy val ffv: AnyRef => Boolean = lock => synchronized { checkLocks(lock)(ffv, ffv.getClass, lock.getClass, classOf[T], Tclass, classOf[C2], O2.getClass) }
+ ffv(this)
+ }
+ def fl = {
+ lazy val flv = synchronized { checkLocks(this)(this.getClass, classOf[T], Tclass, classOf[C2], O2.getClass) }
+ flv
+ }
+ def fo = lock.synchronized { checkLocks(lock)(lock.getClass, this, this.getClass, classOf[T], Tclass, classOf[C2], O2.getClass) }
+
+ def g1 = checkLocks()(this, this.getClass, classOf[T], Tclass, classOf[C2], O2, O2.getClass)
+ @inline final def gi = checkLocks()(this, this.getClass, classOf[T], Tclass, classOf[C2], O2, O2.getClass)
+ val gv: () => Boolean = () => checkLocks()(this, this.getClass, gv, gv.getClass, classOf[T], Tclass, classOf[C2], O2, O2.getClass)
+ def gf = {
+ lazy val gfv: AnyRef => Boolean = lock => checkLocks()(gfv, gfv.getClass, lock, lock.getClass, classOf[T], Tclass, classOf[C2], O2, O2.getClass)
+ gfv(this)
+ }
+ def gl = {
+ lazy val glv = checkLocks()(this, this.getClass, classOf[T], Tclass, classOf[C2], O2.getClass)
+ glv
+ }
+
+ class C {
+ def f1 = synchronized { checkLocks(this)(this.getClass, T.this, T.this.getClass, classOf[T], Tclass, classOf[C2], O2, O2.getClass) }
+ @inline final def fi = synchronized { checkLocks(this)(this.getClass, T.this, T.this.getClass, classOf[T], Tclass, classOf[C2], O2, O2.getClass) }
+ val fv: () => Boolean = () => synchronized { checkLocks(this)(this.getClass, T.this, T.this.getClass, fv, fv.getClass, classOf[T], Tclass, classOf[C2], O2, O2.getClass) }
+ def ff = {
+ lazy val ffv: AnyRef => Boolean = lock => synchronized { checkLocks(lock)(ffv, ffv.getClass, lock.getClass, T.this, T.this.getClass, classOf[T], Tclass, classOf[C2], O2, O2.getClass) }
+ ffv(this)
+ }
+ def fl = {
+ lazy val flv = synchronized { checkLocks(this)(this.getClass, T.this, T.this.getClass, classOf[T], Tclass, classOf[C2], O2, O2.getClass) }
+ flv
+ }
+ def fo = lock.synchronized { checkLocks(lock)(lock.getClass, this, this.getClass, T.this, T.this.getClass, classOf[T], Tclass, classOf[C2], O2, O2.getClass) }
+ def fn = T.this.synchronized { checkLocks(T.this)(T.this.getClass, this, this.getClass, classOf[T], Tclass, classOf[C2], O2.getClass) }
+
+ def g1 = checkLocks()(this, this.getClass, T.this, T.this.getClass, classOf[T], Tclass, classOf[C2], O2, O2.getClass)
+ @inline final def gi = checkLocks()(this, this.getClass, T.this, T.this.getClass, classOf[T], Tclass, classOf[C2], O2, O2.getClass)
+ val gv: () => Boolean = () => checkLocks()(this, this.getClass, T.this, T.this.getClass, gv, gv.getClass, classOf[T], Tclass, classOf[C2], O2, O2.getClass)
+ def gf = {
+ lazy val gfv: AnyRef => Boolean = lock => checkLocks()(gfv, gfv.getClass, lock, lock.getClass, T.this, T.this.getClass, classOf[T], Tclass, classOf[C2], O2, O2.getClass)
+ gfv(this)
+ }
+ def gl = {
+ lazy val glv = checkLocks()(this, this.getClass, T.this, T.this.getClass, classOf[T], Tclass, classOf[C2], O2, O2.getClass)
+ glv
+ }
+ }
+ val c = new C
+
+ object O {
+ def f1 = synchronized { checkLocks(this)(this.getClass, T.this, T.this.getClass, classOf[T], Tclass, classOf[C2], O2, O2.getClass) }
+ @inline final def fi = synchronized { checkLocks(this)(this.getClass, T.this, T.this.getClass, classOf[T], Tclass, classOf[C2], O2, O2.getClass) }
+ val fv: () => Boolean = () => synchronized { checkLocks(this)(this.getClass, fv, fv.getClass, T.this, T.this.getClass, classOf[T], Tclass, classOf[C2], O2, O2.getClass) }
+ def ff = {
+ lazy val ffv: AnyRef => Boolean = lock => synchronized { checkLocks(lock)(lock.getClass, ffv, ffv.getClass, T.this, T.this.getClass, classOf[T], Tclass, classOf[C2], O2, O2.getClass) }
+ ffv(this)
+ }
+ def fl = {
+ lazy val flv = synchronized { checkLocks(this)(this.getClass, T.this, T.this.getClass, classOf[T], Tclass, classOf[C2], O2, O2.getClass) }
+ flv
+ }
+ def fo = lock.synchronized { checkLocks(lock)(lock.getClass, this, this.getClass, T.this, T.this.getClass, classOf[T], Tclass, classOf[C2], O2, O2.getClass) }
+ def fn = T.this.synchronized { checkLocks(T.this)(T.this.getClass, this, this.getClass, classOf[T], Tclass, classOf[C2], O2.getClass) }
+
+ def g1 = checkLocks()(this, this.getClass, T.this, T.this.getClass, classOf[T], Tclass, classOf[C2], O2, O2.getClass)
+ @inline final def gi = checkLocks()(this, this.getClass, T.this, T.this.getClass, classOf[T], Tclass, classOf[C2], O2, O2.getClass)
+ val gv: () => Boolean = () => checkLocks()(this, this.getClass, gv, gv.getClass, T.this, T.this.getClass, classOf[T], Tclass, classOf[C2], O2, O2.getClass)
+ def gf = {
+ lazy val gfv: AnyRef => Boolean = lock => checkLocks()(lock, lock.getClass, gfv, gfv.getClass, T.this, T.this.getClass, classOf[T], Tclass, classOf[C2], O2, O2.getClass)
+ gfv(this)
+ }
+ def gl = {
+ lazy val glv = checkLocks()(this, this.getClass, T.this, T.this.getClass, classOf[T], Tclass, classOf[C2], O2, O2.getClass)
+ glv
+ }
+ }
+}
+
+class C2 extends T
+object O2 extends T
+
+object Test extends App {
+ def check(name: String, result: Boolean) {
+ println("%-10s %s" format (name +":", if (result) "OK" else "FAILED"))
+ }
+
+ val c1 = new C1
+ check("c1.f1", c1.f1)
+ check("c1.fi", c1.fi)
+ check("c1.fv", c1.fv())
+ check("c1.ff", c1.ff)
+ check("c1.fl", c1.fl)
+ check("c1.fo", c1.fo)
+ check("c1.g1", c1.g1)
+ check("c1.gi", c1.gi)
+ check("c1.gv", c1.gv())
+ check("c1.gf", c1.gf)
+// check("c1.gl", c1.gl) // FIXME *.gl are failing because of the issue described in SUGGEST-11
+
+ check("c1.c.f1", c1.c.f1)
+ check("c1.c.fi", c1.c.fi)
+ check("c1.c.fv", c1.c.fv())
+ check("c1.c.ff", c1.c.ff)
+ check("c1.c.fl", c1.c.fl)
+ check("c1.c.fo", c1.c.fo)
+ check("c1.c.fn", c1.c.fn)
+ check("c1.c.g1", c1.c.g1)
+ check("c1.c.gi", c1.c.gi)
+ check("c1.c.gv", c1.c.gv())
+ check("c1.c.gf", c1.c.gf)
+// check("c1.c.gl", c1.c.gl)
+
+ check("c1.O.f1", c1.O.f1)
+ check("c1.O.fi", c1.O.fi)
+ check("c1.O.fv", c1.O.fv())
+ check("c1.O.ff", c1.O.ff)
+ check("c1.O.fl", c1.O.fl)
+ check("c1.O.fo", c1.O.fo)
+ check("c1.O.fn", c1.O.fn)
+ check("c1.O.g1", c1.O.g1)
+ check("c1.O.gi", c1.O.gi)
+ check("c1.O.gv", c1.O.gv())
+ check("c1.O.gf", c1.O.gf)
+// check("c1.O.gl", c1.O.gl)
+
+ check("O1.f1", O1.f1)
+ check("O1.fi", O1.fi)
+ check("O1.fv", O1.fv())
+ check("O1.ff", O1.ff)
+ check("O1.fl", O1.fl)
+ check("O1.fo", O1.fo)
+ check("O1.g1", O1.g1)
+ check("O1.gi", O1.gi)
+ check("O1.gv", O1.gv())
+ check("O1.gf", O1.gf)
+// check("O1.gl", O1.gl)
+
+ check("O1.c.f1", O1.c.f1)
+ check("O1.c.fi", O1.c.fi)
+ check("O1.c.fv", O1.c.fv())
+ check("O1.c.ff", O1.c.ff)
+ check("O1.c.fl", O1.c.fl)
+ check("O1.c.fo", O1.c.fo)
+ check("O1.c.fn", O1.c.fn)
+ check("O1.c.g1", O1.c.g1)
+ check("O1.c.gi", O1.c.gi)
+ check("O1.c.gv", O1.c.gv())
+ check("O1.c.gf", O1.c.gf)
+// check("O1.c.gl", O1.c.gl)
+
+ check("O1.O.f1", O1.O.f1)
+ check("O1.O.fi", O1.O.fi)
+ check("O1.O.fv", O1.O.fv())
+ check("O1.O.ff", O1.O.ff)
+ check("O1.O.fl", O1.O.fl)
+ check("O1.O.fo", O1.O.fo)
+ check("O1.O.fn", O1.O.fn)
+ check("O1.O.g1", O1.O.g1)
+ check("O1.O.gi", O1.O.gi)
+ check("O1.O.gv", O1.O.gv())
+ check("O1.O.gf", O1.O.gf)
+// check("O1.O.gl", O1.O.gl)
+
+ val c2 = new C2
+ check("c2.f1", c2.f1)
+ check("c2.fi", c2.fi)
+ check("c2.fv", c2.fv())
+ check("c2.ff", c2.ff)
+ check("c2.fl", c2.fl)
+ check("c2.fo", c2.fo)
+ check("c2.g1", c2.g1)
+ check("c2.gi", c2.gi)
+ check("c2.gv", c2.gv())
+ check("c2.gf", c2.gf)
+// check("c2.gl", c2.gl)
+
+ check("c2.c.f1", c2.c.f1)
+ check("c2.c.fi", c2.c.fi)
+ check("c2.c.fv", c2.c.fv())
+ check("c2.c.ff", c2.c.ff)
+ check("c2.c.fl", c2.c.fl)
+ check("c2.c.fo", c2.c.fo)
+ check("c2.c.fn", c2.c.fn)
+ check("c2.c.g1", c2.c.g1)
+ check("c2.c.gi", c2.c.gi)
+ check("c2.c.gv", c2.c.gv())
+ check("c2.c.gf", c2.c.gf)
+// check("c2.c.gl", c2.c.gl)
+
+ check("c2.O.f1", c2.O.f1)
+ check("c2.O.fi", c2.O.fi)
+ check("c2.O.fv", c2.O.fv())
+ check("c2.O.ff", c2.O.ff)
+ check("c2.O.fl", c2.O.fl)
+ check("c2.O.fo", c2.O.fo)
+ check("c2.O.fn", c2.O.fn)
+ check("c2.O.g1", c2.O.g1)
+ check("c2.O.gi", c2.O.gi)
+ check("c2.O.gv", c2.O.gv())
+ check("c2.O.gf", c2.O.gf)
+// check("c2.O.gl", c2.O.gl)
+
+ check("O2.f1", O2.f1)
+ check("O2.fi", O2.fi)
+ check("O2.fv", O2.fv())
+ check("O2.ff", O2.ff)
+ check("O2.fl", O2.fl)
+ check("O2.fo", O2.fo)
+ check("O2.g1", O2.g1)
+ check("O2.gi", O2.gi)
+ check("O2.gv", O2.gv())
+ check("O2.gf", O2.gf)
+// check("O2.gl", O2.gl)
+
+ check("O2.c.f1", O2.c.f1)
+ check("O2.c.fi", O2.c.fi)
+ check("O2.c.fv", O2.c.fv())
+ check("O2.c.ff", O2.c.ff)
+ check("O2.c.fl", O2.c.fl)
+ check("O2.c.fo", O2.c.fo)
+ check("O2.c.fn", O2.c.fn)
+ check("O2.c.g1", O2.c.g1)
+ check("O2.c.gi", O2.c.gi)
+ check("O2.c.gv", O2.c.gv())
+ check("O2.c.gf", O2.c.gf)
+// check("O2.c.gl", O2.c.gl)
+
+ check("O2.O.f1", O2.O.f1)
+ check("O2.O.fi", O2.O.fi)
+ check("O2.O.fv", O2.O.fv())
+ check("O2.O.ff", O2.O.ff)
+ check("O2.O.fl", O2.O.fl)
+ check("O2.O.fo", O2.O.fo)
+ check("O2.O.fn", O2.O.fn)
+ check("O2.O.g1", O2.O.g1)
+ check("O2.O.gi", O2.O.gi)
+ check("O2.O.gv", O2.O.gv())
+ check("O2.O.gf", O2.O.gf)
+// check("O2.O.gl", O2.O.gl)
+}
\ No newline at end of file
diff --git a/test/files/run/t0017.scala b/test/files/run/t0017.scala
index 103a089..e976f45 100644
--- a/test/files/run/t0017.scala
+++ b/test/files/run/t0017.scala
@@ -12,6 +12,6 @@ for (i <- Array.range(0, my_arr(0).length)) yield
val transposed = transpose(my_arr)
-println(transposed.deepToString)
+println(transposed.deep.toString)
}
diff --git a/test/files/run/bug0325.check b/test/files/run/t0325.check
similarity index 100%
rename from test/files/run/bug0325.check
rename to test/files/run/t0325.check
diff --git a/test/files/run/bug0325.scala b/test/files/run/t0325.scala
similarity index 100%
rename from test/files/run/bug0325.scala
rename to test/files/run/t0325.scala
diff --git a/test/files/run/t0421.check b/test/files/run/t0421-new.check
similarity index 100%
copy from test/files/run/t0421.check
copy to test/files/run/t0421-new.check
diff --git a/test/files/run/t0421-new.scala b/test/files/run/t0421-new.scala
new file mode 100644
index 0000000..8df5aa1
--- /dev/null
+++ b/test/files/run/t0421-new.scala
@@ -0,0 +1,32 @@
+import scala.reflect.{ClassTag, classTag}
+
+// ticket #421
+object Test extends App {
+
+ def transpose[A: ClassTag](xss: Array[Array[A]]) = {
+ for (i <- Array.range(0, xss(0).length)) yield
+ for (xs <- xss) yield xs(i)
+ }
+
+ def scalprod(xs: Array[Double], ys: Array[Double]) = {
+ var acc = 0.0
+ for ((x, y) <- xs zip ys) acc = acc + x * y
+ acc
+ }
+
+ def matmul(xss: Array[Array[Double]], yss: Array[Array[Double]]) = {
+ val ysst = transpose(yss)
+ val ysst1: Array[Array[Double]] = yss.transpose
+ assert(ysst.deep == ysst1.deep)
+ for (xs <- xss) yield
+ for (yst <- ysst) yield
+ scalprod(xs, yst)
+ }
+
+ val a1 = Array(Array(0, 2, 4), Array(1, 3, 5))
+ println(transpose(a1).deep.mkString("[", ",", "]"))
+
+ println(matmul(Array(Array(2, 3)), Array(Array(5), Array(7))).deep.mkString("[", ",", "]"))
+
+ println(matmul(Array(Array(4)), Array(Array(6, 8))).deep.mkString("[", ",", "]"))
+}
\ No newline at end of file
diff --git a/test/files/run/t0421.check b/test/files/run/t0421-old.check
similarity index 100%
rename from test/files/run/t0421.check
rename to test/files/run/t0421-old.check
diff --git a/test/files/run/t0421-old.scala b/test/files/run/t0421-old.scala
new file mode 100644
index 0000000..8d51013
--- /dev/null
+++ b/test/files/run/t0421-old.scala
@@ -0,0 +1,30 @@
+// ticket #421
+object Test extends App {
+
+ def transpose[A: ClassManifest](xss: Array[Array[A]]) = {
+ for (i <- Array.range(0, xss(0).length)) yield
+ for (xs <- xss) yield xs(i)
+ }
+
+ def scalprod(xs: Array[Double], ys: Array[Double]) = {
+ var acc = 0.0
+ for ((x, y) <- xs zip ys) acc = acc + x * y
+ acc
+ }
+
+ def matmul(xss: Array[Array[Double]], yss: Array[Array[Double]]) = {
+ val ysst = transpose(yss)
+ val ysst1: Array[Array[Double]] = yss.transpose
+ assert(ysst.deep == ysst1.deep)
+ for (xs <- xss) yield
+ for (yst <- ysst) yield
+ scalprod(xs, yst)
+ }
+
+ val a1 = Array(Array(0, 2, 4), Array(1, 3, 5))
+ println(transpose(a1).deep.mkString("[", ",", "]"))
+
+ println(matmul(Array(Array(2, 3)), Array(Array(5), Array(7))).deep.mkString("[", ",", "]"))
+
+ println(matmul(Array(Array(4)), Array(Array(6, 8))).deep.mkString("[", ",", "]"))
+}
diff --git a/test/files/run/t0421.scala b/test/files/run/t0421.scala
deleted file mode 100644
index 128bd1b..0000000
--- a/test/files/run/t0421.scala
+++ /dev/null
@@ -1,30 +0,0 @@
-// ticket #421
-object Test extends App {
-
- def transpose[A: ClassManifest](xss: Array[Array[A]]) = {
- for (i <- Array.range(0, xss(0).length)) yield
- for (xs <- xss) yield xs(i)
- }
-
- def scalprod(xs: Array[Double], ys: Array[Double]) = {
- var acc = 0.0
- for ((x, y) <- xs zip ys) acc = acc + x * y
- acc
- }
-
- def matmul(xss: Array[Array[Double]], yss: Array[Array[Double]]) = {
- val ysst = transpose(yss)
- val ysst1: Array[Array[Double]] = yss.transpose
- assert(ysst.deep == ysst1.deep)
- for (xs <- xss) yield
- for (yst <- ysst) yield
- scalprod(xs, yst)
- }
-
- val a1 = Array(Array(0, 2, 4), Array(1, 3, 5))
- println(transpose(a1).deepMkString("[", ",", "]"))
-
- println(matmul(Array(Array(2, 3)), Array(Array(5), Array(7))).deepMkString("[", ",", "]"))
-
- println(matmul(Array(Array(4)), Array(Array(6, 8))).deepMkString("[", ",", "]"))
-}
diff --git a/test/files/run/t0663.check b/test/files/run/t0663.check
old mode 100644
new mode 100755
index 22b68b7..dd9be2a
--- a/test/files/run/t0663.check
+++ b/test/files/run/t0663.check
@@ -1 +1 @@
-<feed></feed>
+<feed/>
diff --git a/test/files/run/t0677-new.scala b/test/files/run/t0677-new.scala
new file mode 100644
index 0000000..15c8b4a
--- /dev/null
+++ b/test/files/run/t0677-new.scala
@@ -0,0 +1,10 @@
+import scala.reflect.{ClassTag, classTag}
+
+object Test extends App {
+ class X[T: ClassTag] {
+ val a = Array.ofDim[T](3, 4)
+ }
+ val x = new X[String]
+ x.a(1)(2) = "hello"
+ assert(x.a(1)(2) == "hello")
+}
\ No newline at end of file
diff --git a/test/files/run/t0677-old.scala b/test/files/run/t0677-old.scala
new file mode 100644
index 0000000..6c8a3a7
--- /dev/null
+++ b/test/files/run/t0677-old.scala
@@ -0,0 +1,8 @@
+object Test extends App {
+ class X[T: ClassManifest] {
+ val a = Array.ofDim[T](3, 4)
+ }
+ val x = new X[String]
+ x.a(1)(2) = "hello"
+ assert(x.a(1)(2) == "hello")
+}
diff --git a/test/files/run/t0677.scala b/test/files/run/t0677.scala
deleted file mode 100644
index 764f8e8..0000000
--- a/test/files/run/t0677.scala
+++ /dev/null
@@ -1,9 +0,0 @@
-object Test extends App {
- class X[T: ClassManifest] {
- val a = new Array[Array[T]](3,4)
- val b = Array.ofDim[T](3, 4)
- }
- val x = new X[String]
- x.a(1)(2) = "hello"
- assert(x.a(1)(2) == "hello")
-}
diff --git a/test/files/run/bug1005.check b/test/files/run/t1005.check
similarity index 100%
rename from test/files/run/bug1005.check
rename to test/files/run/t1005.check
diff --git a/test/files/run/bug1005.scala b/test/files/run/t1005.scala
similarity index 100%
rename from test/files/run/bug1005.scala
rename to test/files/run/t1005.scala
diff --git a/test/files/run/t102.check b/test/files/run/t102.check
new file mode 100644
index 0000000..315f210
--- /dev/null
+++ b/test/files/run/t102.check
@@ -0,0 +1,2 @@
+(5,5)
+(10,10)
diff --git a/test/files/run/t102.scala b/test/files/run/t102.scala
new file mode 100644
index 0000000..6517d8a
--- /dev/null
+++ b/test/files/run/t102.scala
@@ -0,0 +1,24 @@
+trait Foo {
+ type Arg
+ type Prod
+ def makeProd(a: Arg): Prod
+}
+
+object Test {
+ def f1(x: Foo)(y: x.Arg) = x.makeProd(y)
+
+ case class f2[T <: Foo](x: T) {
+ def apply(y: x.Arg) = x.makeProd(y)
+ }
+
+ val myFoo = new Foo {
+ type Arg = Int
+ type Prod = (Int, Int)
+ def makeProd(i: Int) = (i, i)
+ }
+
+ def main(args: Array[String]): Unit = {
+ println(f1(myFoo)(5))
+ println(f2(myFoo)(10))
+ }
+}
diff --git a/test/files/run/bug1042.check b/test/files/run/t1042.check
similarity index 100%
rename from test/files/run/bug1042.check
rename to test/files/run/t1042.check
diff --git a/test/files/run/bug1042.scala b/test/files/run/t1042.scala
similarity index 100%
rename from test/files/run/bug1042.scala
rename to test/files/run/t1042.scala
diff --git a/test/files/run/t1044.scala b/test/files/run/t1044.scala
new file mode 100644
index 0000000..4d37a40
--- /dev/null
+++ b/test/files/run/t1044.scala
@@ -0,0 +1,4 @@
+object Test extends App {
+ val ducks = Array[AnyRef]("Huey", "Dewey", "Louie");
+ ducks.iterator.asInstanceOf[Iterator[String]]
+}
diff --git a/test/files/run/bug1048.check b/test/files/run/t1048.check
similarity index 100%
rename from test/files/run/bug1048.check
rename to test/files/run/t1048.check
diff --git a/test/files/run/bug1048.scala b/test/files/run/t1048.scala
similarity index 100%
rename from test/files/run/bug1048.scala
rename to test/files/run/t1048.scala
diff --git a/test/files/run/bug1074.check b/test/files/run/t1074.check
similarity index 100%
rename from test/files/run/bug1074.check
rename to test/files/run/t1074.check
diff --git a/test/files/run/bug1074.scala b/test/files/run/t1074.scala
similarity index 100%
rename from test/files/run/bug1074.scala
rename to test/files/run/t1074.scala
diff --git a/test/files/run/bug1079.check b/test/files/run/t1079.check
similarity index 100%
rename from test/files/run/bug1079.check
rename to test/files/run/t1079.check
diff --git a/test/files/run/bug1079.scala b/test/files/run/t1079.scala
similarity index 100%
rename from test/files/run/bug1079.scala
rename to test/files/run/t1079.scala
diff --git a/test/files/run/t1100.check b/test/files/run/t1100.check
new file mode 100644
index 0000000..d3a49a4
--- /dev/null
+++ b/test/files/run/t1100.check
@@ -0,0 +1,4 @@
+[1.4] error: errors are propagated
+
+aaab
+ ^
diff --git a/test/files/run/t1100.scala b/test/files/run/t1100.scala
new file mode 100644
index 0000000..6b95fd6
--- /dev/null
+++ b/test/files/run/t1100.scala
@@ -0,0 +1,17 @@
+import scala.util.parsing.combinator.Parsers
+import scala.util.parsing.input.CharSequenceReader
+
+class TestParsers extends Parsers {
+ type Elem = Char
+
+ def p: Parser[List[Char]] = rep1(p1)
+ def p1: Parser[Char] = accept('a') | err("errors are propagated")
+}
+
+object Test {
+ def main(args: Array[String]): Unit = {
+ val tstParsers = new TestParsers
+ val s = new CharSequenceReader("aaab")
+ println(tstParsers.p(s))
+ }
+}
diff --git a/test/files/run/bug1110.scala b/test/files/run/t1110.scala
similarity index 100%
rename from test/files/run/bug1110.scala
rename to test/files/run/t1110.scala
diff --git a/test/files/run/bug1141.check b/test/files/run/t1141.check
similarity index 100%
rename from test/files/run/bug1141.check
rename to test/files/run/t1141.check
diff --git a/test/files/run/bug1141.scala b/test/files/run/t1141.scala
similarity index 100%
rename from test/files/run/bug1141.scala
rename to test/files/run/t1141.scala
diff --git a/test/files/run/bug1192.check b/test/files/run/t1192.check
similarity index 100%
rename from test/files/run/bug1192.check
rename to test/files/run/t1192.check
diff --git a/test/files/run/t1192.scala b/test/files/run/t1192.scala
new file mode 100644
index 0000000..3222bb0
--- /dev/null
+++ b/test/files/run/t1192.scala
@@ -0,0 +1,7 @@
+object Test extends App {
+ val v1: Array[Array[Int]] = Array(Array(1, 2), Array(3, 4))
+ def f[T](w: Array[Array[T]]) {
+ for (r <- w) println(r.deep.toString)
+ }
+ f(v1)
+}
diff --git a/test/files/run/t1195-new.check b/test/files/run/t1195-new.check
new file mode 100644
index 0000000..0a3f434
--- /dev/null
+++ b/test/files/run/t1195-new.check
@@ -0,0 +1,6 @@
+Bar.type, underlying = <: scala.runtime.AbstractFunction1[Int,Bar] with Serializable{case def unapply(x$0: Bar): Option[Int]} with Singleton
+Bar, underlying = <: Product with Serializable{val x: Int; def copy(x: Int): Bar; def copy$default$1: Int}
+Product with Serializable, underlying = Product with Serializable
+Bar.type, underlying = <: scala.runtime.AbstractFunction1[Int,Bar] with Serializable{case def unapply(x$0: Bar): Option[Int]} with Singleton
+Bar, underlying = <: Product with Serializable{val x: Int; def copy(x: Int): Bar; def copy$default$1: Int}
+Product with Serializable, underlying = Product with Serializable
diff --git a/test/files/run/t1195-new.scala b/test/files/run/t1195-new.scala
new file mode 100644
index 0000000..0f62b14
--- /dev/null
+++ b/test/files/run/t1195-new.scala
@@ -0,0 +1,28 @@
+import scala.reflect.runtime.universe._
+
+object Test {
+ def f() = { case class Bar(x: Int); Bar }
+ def g() = { case class Bar(x: Int); Bar(5) }
+ def h() = { case object Bar ; Bar }
+
+ val f1 = f()
+ val g1 = g()
+ val h1 = h()
+
+ def m[T: WeakTypeTag](x: T) = println(weakTypeOf[T] + ", underlying = " + weakTypeOf[T].typeSymbol.typeSignature)
+
+ def main(args: Array[String]): Unit = {
+ m(f)
+ m(g)
+ m(h)
+ m(f1)
+ m(g1)
+ m(h1)
+ }
+}
+
+class A1[T] {
+ class B1[U] {
+ def f = { case class D(x: Int) extends A1[String] ; new D(5) }
+ }
+}
\ No newline at end of file
diff --git a/test/files/run/t1195-old.check b/test/files/run/t1195-old.check
new file mode 100644
index 0000000..d023bc9
--- /dev/null
+++ b/test/files/run/t1195-old.check
@@ -0,0 +1,6 @@
+_ <: scala.runtime.AbstractFunction1[Int, _ <: Object with scala.Product with scala.Serializable] with scala.Serializable with java.lang.Object
+_ <: Object with scala.Product with scala.Serializable
+Object with scala.Product with scala.Serializable
+_ <: scala.runtime.AbstractFunction1[Int, _ <: Object with scala.Product with scala.Serializable] with scala.Serializable with java.lang.Object
+_ <: Object with scala.Product with scala.Serializable
+Object with scala.Product with scala.Serializable
diff --git a/test/files/run/t1195-old.scala b/test/files/run/t1195-old.scala
new file mode 100644
index 0000000..b46a3b7
--- /dev/null
+++ b/test/files/run/t1195-old.scala
@@ -0,0 +1,26 @@
+object Test {
+ def f() = { case class Bar(x: Int); Bar }
+ def g() = { case class Bar(x: Int); Bar(5) }
+ def h() = { case object Bar ; Bar }
+
+ val f1 = f()
+ val g1 = g()
+ val h1 = h()
+
+ def m[T: Manifest](x: T) = println(manifest[T])
+
+ def main(args: Array[String]): Unit = {
+ m(f)
+ m(g)
+ m(h)
+ m(f1)
+ m(g1)
+ m(h1)
+ }
+}
+
+class A1[T] {
+ class B1[U] {
+ def f = { case class D(x: Int) extends A1[String] ; new D(5) }
+ }
+}
diff --git a/test/files/run/bug1220.scala b/test/files/run/t1220.scala
similarity index 100%
rename from test/files/run/bug1220.scala
rename to test/files/run/t1220.scala
diff --git a/test/files/run/t1247.check b/test/files/run/t1247.check
new file mode 100644
index 0000000..ce12303
--- /dev/null
+++ b/test/files/run/t1247.check
@@ -0,0 +1 @@
+Is same closure class: true is same closure: true
diff --git a/test/files/run/t1247.scala b/test/files/run/t1247.scala
new file mode 100644
index 0000000..c709b73
--- /dev/null
+++ b/test/files/run/t1247.scala
@@ -0,0 +1,11 @@
+object Test extends App {
+ val f = () => 5
+ def test(g: => Int) {
+ val gFunc = g _
+ val isSameClosureClass = gFunc.getClass == f.getClass
+ val isSame = gFunc eq f
+ println("Is same closure class: "+isSameClosureClass+" is same closure: "+isSame)
+ }
+
+ test(f())
+}
diff --git a/test/files/run/bug1300.check b/test/files/run/t1300.check
similarity index 100%
rename from test/files/run/bug1300.check
rename to test/files/run/t1300.check
diff --git a/test/files/run/bug1300.scala b/test/files/run/t1300.scala
similarity index 100%
rename from test/files/run/bug1300.scala
rename to test/files/run/t1300.scala
diff --git a/test/files/run/t1309.scala b/test/files/run/t1309.scala
new file mode 100644
index 0000000..b6a75fe
--- /dev/null
+++ b/test/files/run/t1309.scala
@@ -0,0 +1,7 @@
+object Test {
+ def f(ras: => IndexedSeq[Byte]): IndexedSeq[Byte] = ras
+
+ def main(args: Array[String]): Unit = {
+ f(new Array[Byte](0))
+ }
+}
diff --git a/test/files/run/bug1333.check b/test/files/run/t1333.check
similarity index 100%
rename from test/files/run/bug1333.check
rename to test/files/run/t1333.check
diff --git a/test/files/run/bug1333.scala b/test/files/run/t1333.scala
similarity index 100%
rename from test/files/run/bug1333.scala
rename to test/files/run/t1333.scala
diff --git a/test/files/run/bug1360.check b/test/files/run/t1360.check
similarity index 100%
rename from test/files/run/bug1360.check
rename to test/files/run/t1360.check
diff --git a/test/files/run/bug1360.scala b/test/files/run/t1360.scala
similarity index 100%
rename from test/files/run/bug1360.scala
rename to test/files/run/t1360.scala
diff --git a/test/files/run/bug1373.scala b/test/files/run/t1373.scala
similarity index 100%
rename from test/files/run/bug1373.scala
rename to test/files/run/t1373.scala
diff --git a/test/files/run/bug1427.scala b/test/files/run/t1427.scala
similarity index 100%
rename from test/files/run/bug1427.scala
rename to test/files/run/t1427.scala
diff --git a/test/files/run/t1430.check b/test/files/run/t1430.check
new file mode 100644
index 0000000..a688182
--- /dev/null
+++ b/test/files/run/t1430.check
@@ -0,0 +1 @@
+Baz
diff --git a/test/files/run/t1430/Bar_1.java b/test/files/run/t1430/Bar_1.java
new file mode 100644
index 0000000..4db2eaf
--- /dev/null
+++ b/test/files/run/t1430/Bar_1.java
@@ -0,0 +1,8 @@
+package j;
+
+interface Foo {
+ public void foo();
+}
+public interface Bar_1 extends Foo {
+ public void bar();
+}
diff --git a/test/files/run/t1430/Test_2.scala b/test/files/run/t1430/Test_2.scala
new file mode 100644
index 0000000..7af65de
--- /dev/null
+++ b/test/files/run/t1430/Test_2.scala
@@ -0,0 +1,16 @@
+package s {
+ object Boop extends j.Bar_1 {
+ def foo() {}
+ def bar() {}
+ }
+ class Baz(x: j.Bar_1) {
+ x.foo
+ override def toString = "Baz"
+ }
+}
+
+object Test {
+ def main(args: Array[String]): Unit = {
+ println(new s.Baz(s.Boop))
+ }
+}
diff --git a/test/files/run/bug1434.scala b/test/files/run/t1434.scala
similarity index 100%
rename from test/files/run/bug1434.scala
rename to test/files/run/t1434.scala
diff --git a/test/files/run/bug1466.scala b/test/files/run/t1466.scala
similarity index 100%
rename from test/files/run/bug1466.scala
rename to test/files/run/t1466.scala
diff --git a/test/files/run/t1500.scala b/test/files/run/t1500.scala
index c312a9a..ab132b7 100644
--- a/test/files/run/t1500.scala
+++ b/test/files/run/t1500.scala
@@ -1,4 +1,4 @@
-import scala.tools.nsc._
+import scala.tools.nsc._
object Test {
@@ -8,7 +8,7 @@ object Test {
val testCode = <code>
- class posingAs[A] extends TypeConstraint
+ class posingAs[A] extends annotation.TypeConstraint
def resolve[A,B](x: A @posingAs[B]): B = x.asInstanceOf[B]
@@ -20,7 +20,7 @@ object Test {
val settings = new Settings()
settings.classpath.value = System.getProperty("java.class.path")
- val tool = new Interpreter(settings)
+ val tool = new interpreter.IMain(settings)
val global = tool.compiler
import global._
diff --git a/test/files/run/t1501.scala b/test/files/run/t1501.scala
index 05e4da8..aba206b 100644
--- a/test/files/run/t1501.scala
+++ b/test/files/run/t1501.scala
@@ -8,7 +8,7 @@ object Test {
val testCode = <code>
- class xyz[A] extends TypeConstraint
+ class xyz[A] extends annotation.TypeConstraint
def loopWhile[T](cond: =>Boolean)(body: =>(Unit @xyz[T])): Unit @ xyz[T] = {{
if (cond) {{
@@ -30,7 +30,7 @@ object Test {
def main(args: Array[String]) = {
val settings = new Settings()
settings.classpath.value = System.getProperty("java.class.path")
- val tool = new Interpreter(settings)
+ val tool = new interpreter.IMain(settings)
val global = tool.compiler
import global._
diff --git a/test/files/run/t1620.check b/test/files/run/t1620.check
old mode 100644
new mode 100755
index 979efc8..afa1e6a
--- a/test/files/run/t1620.check
+++ b/test/files/run/t1620.check
@@ -1,6 +1,6 @@
<?xml version='1.0' encoding='utf-8'?>
<!DOCTYPE foo PUBLIC "-//Foo Corp//DTD 1.0//EN" "foo.dtd">
-<foo></foo>
+<foo/>
<?xml version='1.0' encoding='utf-8'?>
<!DOCTYPE foo PUBLIC "-//Foo Corp//DTD 1.0//EN">
-<foo></foo>
+<foo/>
diff --git a/test/files/run/t1672.scala b/test/files/run/t1672.scala
new file mode 100644
index 0000000..ee025b9
--- /dev/null
+++ b/test/files/run/t1672.scala
@@ -0,0 +1,28 @@
+object Test {
+ @annotation.tailrec
+ def bar(i : Int) : Int = {
+ if (i == 0) 0
+ else try {
+ throw new RuntimeException
+ } catch {
+ case _: Throwable => bar(i - 1)
+ }
+ }
+
+ @annotation.tailrec
+ def nestedTry1(i : Int) : Int = {
+ if (i == 0) 0
+ else try {
+ throw new RuntimeException
+ } catch {
+ case _: Throwable =>
+ try { ??? } catch { case _: Throwable => nestedTry1(i - 1) }
+ }
+ }
+
+ def main(args: Array[String]) {
+ assert(bar(2) == 0)
+
+ assert(nestedTry1(2) == 0)
+ }
+}
diff --git a/test/pending/run/bug1697.scala b/test/files/run/t1697.scala
similarity index 100%
rename from test/pending/run/bug1697.scala
rename to test/files/run/t1697.scala
diff --git a/test/files/run/bug1766.scala b/test/files/run/t1766.scala
similarity index 100%
rename from test/files/run/bug1766.scala
rename to test/files/run/t1766.scala
diff --git a/test/files/run/t1987.check b/test/files/run/t1987.check
new file mode 100644
index 0000000..d2102a4
--- /dev/null
+++ b/test/files/run/t1987.check
@@ -0,0 +1,16 @@
+long
+long
+double
+double
+long
+long
+double
+double
+long
+long
+double
+double
+long
+long
+double
+double
diff --git a/test/files/neg/caseinherit.flags b/test/files/run/t1987.flags
similarity index 100%
copy from test/files/neg/caseinherit.flags
copy to test/files/run/t1987.flags
diff --git a/test/files/run/t1987.scala b/test/files/run/t1987.scala
new file mode 100644
index 0000000..4c278ec
--- /dev/null
+++ b/test/files/run/t1987.scala
@@ -0,0 +1,62 @@
+// a.scala
+// Fri Jan 13 11:31:47 PST 2012
+
+package foo {
+ package object bar {
+ def duh(n: Long) = println("long")
+ def duh(n: Double) = println("double")
+
+ def duh2(n: Double) = println("double")
+ def duh2(n: Long) = println("long")
+ }
+ package bar {
+ object Main {
+ def main(args:Array[String]) {
+ duh(33L)
+ bip.bar.duh(33L)
+ duh(33d)
+ bip.bar.duh(33d)
+
+ duh2(33L)
+ bip.bar.duh2(33L)
+ duh2(33d)
+ bip.bar.duh2(33d)
+ }
+ }
+ }
+}
+
+package bip {
+ trait Duh {
+ def duh(n: Long) = println("long")
+ def duh(n: Double) = println("double")
+ }
+ trait Duh2 {
+ def duh2(n: Double) = println("double")
+ def duh2(n: Long) = println("long")
+ }
+
+ package object bar extends Duh with Duh2 { }
+ package bar {
+ object Main {
+ def main(args:Array[String]) {
+ duh(33L)
+ bip.bar.duh(33L)
+ duh(33d)
+ bip.bar.duh(33d)
+
+ duh2(33L)
+ bip.bar.duh2(33L)
+ duh2(33d)
+ bip.bar.duh2(33d)
+ }
+ }
+ }
+}
+
+object Test {
+ def main(args: Array[String]): Unit = {
+ foo.bar.Main.main(null)
+ bip.bar.Main.main(null)
+ }
+}
diff --git a/test/files/run/t1987b.check b/test/files/run/t1987b.check
new file mode 100644
index 0000000..68d4b10
--- /dev/null
+++ b/test/files/run/t1987b.check
@@ -0,0 +1 @@
+ok!
diff --git a/test/files/run/t1987b/PullIteratees.scala b/test/files/run/t1987b/PullIteratees.scala
new file mode 100644
index 0000000..a5a3e65
--- /dev/null
+++ b/test/files/run/t1987b/PullIteratees.scala
@@ -0,0 +1,17 @@
+package scales.xml
+
+trait PullType
+class QName
+trait RetUrn[T]
+
+/**
+ * Iteratees related to pull parsing
+ */
+trait PullIteratees {
+ /**
+ * Without the overload it doesn't trigger the CCE, even though its
+ * not used
+ */
+ def iterate(path: List[QName], xml: String): RetUrn[String] = null
+ def iterate(path: List[QName], xml: Iterator[PullType]): RetUrn[String] = null
+}
diff --git a/test/files/run/t1987b/a.scala b/test/files/run/t1987b/a.scala
new file mode 100644
index 0000000..c1be5fe
--- /dev/null
+++ b/test/files/run/t1987b/a.scala
@@ -0,0 +1,6 @@
+object Test {
+ def main(args: Array[String]): Unit = {
+ scales.xml.CCE_Test.main(args)
+ println("ok!")
+ }
+}
diff --git a/test/files/run/t1987b/cce_test.scala b/test/files/run/t1987b/cce_test.scala
new file mode 100644
index 0000000..4f9acf0
--- /dev/null
+++ b/test/files/run/t1987b/cce_test.scala
@@ -0,0 +1,15 @@
+package scales.xml
+//import scales.xml._ // using another pacakge and importing doesn't CCE
+
+object CCE_Test {
+ def main(args: Array[String]): Unit = {
+ // without the import it doesn't trigger the CCE
+ import scaley.funny._
+
+ val pull = null.asInstanceOf[Iterator[PullType]]
+ val LogEntries = null.asInstanceOf[List[QName]]
+ // fully qualify with scales.xml. and it won't trigger it
+ iterate(LogEntries,
+ pull)
+ }
+}
diff --git a/test/files/run/t1987b/pkg1.scala b/test/files/run/t1987b/pkg1.scala
new file mode 100644
index 0000000..6e749fc
--- /dev/null
+++ b/test/files/run/t1987b/pkg1.scala
@@ -0,0 +1,4 @@
+package scaley
+
+package object funny {
+}
diff --git a/test/files/run/t1987b/pkg2.scala b/test/files/run/t1987b/pkg2.scala
new file mode 100644
index 0000000..38056a1
--- /dev/null
+++ b/test/files/run/t1987b/pkg2.scala
@@ -0,0 +1,3 @@
+package scales
+
+package object xml extends PullIteratees
diff --git a/test/files/run/t2005.scala b/test/files/run/t2005.scala
new file mode 100644
index 0000000..45da9fe
--- /dev/null
+++ b/test/files/run/t2005.scala
@@ -0,0 +1,10 @@
+object Test {
+ def main(args: Array[String]) {
+ val a = Array.ofDim[Int](2,2)
+ test(a)
+ }
+ def test[A](t: Array[Array[A]]) {
+ val tmp = t(0)
+ t(1) = tmp
+ }
+}
diff --git a/test/files/run/bug2029.check b/test/files/run/t2029.check
similarity index 100%
rename from test/files/run/bug2029.check
rename to test/files/run/t2029.check
diff --git a/test/files/run/bug2029.scala b/test/files/run/t2029.scala
similarity index 100%
rename from test/files/run/bug2029.scala
rename to test/files/run/t2029.scala
diff --git a/test/files/run/bug2075.scala b/test/files/run/t2075.scala
similarity index 100%
rename from test/files/run/bug2075.scala
rename to test/files/run/t2075.scala
diff --git a/test/files/run/bugs2087-and-2400.scala b/test/files/run/t2087-and-2400.scala
similarity index 100%
rename from test/files/run/bugs2087-and-2400.scala
rename to test/files/run/t2087-and-2400.scala
diff --git a/test/files/pos/bug3252.flags b/test/files/run/t2106.flags
similarity index 100%
copy from test/files/pos/bug3252.flags
copy to test/files/run/t2106.flags
diff --git a/test/files/run/bug2106.scala b/test/files/run/t2106.scala
similarity index 100%
rename from test/files/run/bug2106.scala
rename to test/files/run/t2106.scala
diff --git a/test/files/run/t2124.check b/test/files/run/t2124.check
new file mode 100755
index 0000000..51b4046
--- /dev/null
+++ b/test/files/run/t2124.check
@@ -0,0 +1 @@
+<p><lost/><q/></p>
diff --git a/test/files/run/bug2124.scala b/test/files/run/t2124.scala
similarity index 100%
rename from test/files/run/bug2124.scala
rename to test/files/run/t2124.scala
diff --git a/test/files/run/t2125.check b/test/files/run/t2125.check
new file mode 100755
index 0000000..51b4046
--- /dev/null
+++ b/test/files/run/t2125.check
@@ -0,0 +1 @@
+<p><lost/><q/></p>
diff --git a/test/files/run/bug2125.scala b/test/files/run/t2125.scala
similarity index 100%
rename from test/files/run/bug2125.scala
rename to test/files/run/t2125.scala
diff --git a/test/files/run/t2127.scala b/test/files/run/t2127.scala
old mode 100644
new mode 100755
diff --git a/test/files/run/bug629.check b/test/files/run/t216.check
similarity index 100%
rename from test/files/run/bug629.check
rename to test/files/run/t216.check
diff --git a/test/files/run/bug216.scala b/test/files/run/t216.scala
similarity index 100%
rename from test/files/run/bug216.scala
rename to test/files/run/t216.scala
diff --git a/test/files/run/bug2162.check b/test/files/run/t2162.check
similarity index 100%
rename from test/files/run/bug2162.check
rename to test/files/run/t2162.check
diff --git a/test/files/run/bug2162.scala b/test/files/run/t2162.scala
similarity index 100%
rename from test/files/run/bug2162.scala
rename to test/files/run/t2162.scala
diff --git a/test/files/run/bug2175.scala b/test/files/run/t2175.scala
similarity index 100%
rename from test/files/run/bug2175.scala
rename to test/files/run/t2175.scala
diff --git a/test/files/run/t2236-new.scala b/test/files/run/t2236-new.scala
new file mode 100644
index 0000000..26d6945
--- /dev/null
+++ b/test/files/run/t2236-new.scala
@@ -0,0 +1,19 @@
+import scala.reflect.runtime.universe._
+
+class T[A](implicit val m:TypeTag[A])
+class Foo
+class Bar extends T[Foo]
+object Test extends App {
+ new Bar
+}
+
+object EvidenceTest {
+ trait E[T]
+ trait A[T] { implicit val e: E[T] = null }
+ class B[T : E] extends A[T] { override val e = null }
+
+ def f[T] {
+ implicit val e: E[T] = null
+ new B[T]{}
+ }
+}
\ No newline at end of file
diff --git a/test/files/run/t2236.scala b/test/files/run/t2236-old.scala
similarity index 100%
rename from test/files/run/t2236.scala
rename to test/files/run/t2236-old.scala
diff --git a/test/files/run/bug2241.scala b/test/files/run/t2241.scala
similarity index 100%
rename from test/files/run/bug2241.scala
rename to test/files/run/t2241.scala
diff --git a/test/files/run/bug2250.scala b/test/files/run/t2250.scala
similarity index 100%
rename from test/files/run/bug2250.scala
rename to test/files/run/t2250.scala
diff --git a/test/files/run/bug2276.check b/test/files/run/t2276.check
similarity index 100%
rename from test/files/run/bug2276.check
rename to test/files/run/t2276.check
diff --git a/test/files/run/bug2276.scala b/test/files/run/t2276.scala
similarity index 100%
rename from test/files/run/bug2276.scala
rename to test/files/run/t2276.scala
diff --git a/test/files/run/t2296c.check b/test/files/run/t2296c.check
new file mode 100644
index 0000000..076e918
--- /dev/null
+++ b/test/files/run/t2296c.check
@@ -0,0 +1 @@
+RUNNING ACTION
diff --git a/test/files/run/t2296c/Action.java b/test/files/run/t2296c/Action.java
new file mode 100644
index 0000000..50ba9a4
--- /dev/null
+++ b/test/files/run/t2296c/Action.java
@@ -0,0 +1,21 @@
+package bug.action;
+
+import bug.Global;
+
+public abstract class Action {
+ protected Global m_glob;
+
+ public Action(Global glob0) {
+ m_glob = glob0;
+ }
+
+ public Action() {
+ this(null);
+ }
+
+ public abstract void run(int v);
+
+ public void setGlobal(Global g) {
+ m_glob = g;
+ }
+}
diff --git a/test/files/run/t2296c/Display.java b/test/files/run/t2296c/Display.java
new file mode 100644
index 0000000..7f7e6a7
--- /dev/null
+++ b/test/files/run/t2296c/Display.java
@@ -0,0 +1,9 @@
+package bug;
+
+public class Display {
+ protected Global m_glob;
+
+ public void start() {
+ m_glob.runActions();
+ }
+}
diff --git a/test/files/run/t2296c/Global.java b/test/files/run/t2296c/Global.java
new file mode 100644
index 0000000..7e5a762
--- /dev/null
+++ b/test/files/run/t2296c/Global.java
@@ -0,0 +1,29 @@
+package bug;
+
+import bug.action.Action;
+import java.util.List;
+import java.util.LinkedList;
+
+public class Global {
+ public int items() {
+ return 0;
+ }
+
+ public int items(int i) {
+ return i + ls.size();
+ }
+
+ private List<Action> ls = new LinkedList<Action>();
+
+ public void putAction(Action a) {
+ a.setGlobal(this);
+ ls.add(a);
+ }
+
+ public void runActions() {
+ for (Action action: ls) {
+ System.out.println("RUNNING ACTION");
+ action.run(0);
+ }
+ }
+}
diff --git a/test/files/run/t2296c/ScalaActivity.scala b/test/files/run/t2296c/ScalaActivity.scala
new file mode 100644
index 0000000..aa7648a
--- /dev/null
+++ b/test/files/run/t2296c/ScalaActivity.scala
@@ -0,0 +1,18 @@
+package test
+
+import bug.Display
+import bug.action.Action
+
+abstract class Outer extends Display {
+
+ def init() {
+ m_glob.putAction(ScalaActivity)
+ }
+
+ object ScalaActivity extends Action {
+ def run(v: Int) {
+ val testSet = List(1,2,3)
+ testSet.map(p => m_glob.items(p)) // crash with illegal access
+ }
+ }
+}
diff --git a/test/files/run/t2296c/Test.scala b/test/files/run/t2296c/Test.scala
new file mode 100644
index 0000000..1132beb
--- /dev/null
+++ b/test/files/run/t2296c/Test.scala
@@ -0,0 +1,15 @@
+package test
+
+import bug.Global
+
+object Test {
+ def main(args: Array[String]) {
+ val m = new Main()
+ m.init()
+ m.start()
+ }
+}
+
+class Main extends Outer {
+ m_glob = new Global()
+}
diff --git a/test/files/run/t2296c/a.scala b/test/files/run/t2296c/a.scala
new file mode 100644
index 0000000..fae32f4
--- /dev/null
+++ b/test/files/run/t2296c/a.scala
@@ -0,0 +1,5 @@
+object Test {
+ def main(args: Array[String]): Unit = {
+ test.Test main args
+ }
+}
diff --git a/test/files/run/bug2308a.check b/test/files/run/t2308a.check
similarity index 100%
rename from test/files/run/bug2308a.check
rename to test/files/run/t2308a.check
diff --git a/test/files/run/bug2308a.scala b/test/files/run/t2308a.scala
similarity index 100%
rename from test/files/run/bug2308a.scala
rename to test/files/run/t2308a.scala
diff --git a/test/files/run/t2337.check b/test/files/run/t2337.check
new file mode 100644
index 0000000..18f1f66
--- /dev/null
+++ b/test/files/run/t2337.check
@@ -0,0 +1,4 @@
+(Both Int,-1,-1)
+(Both Float,1,1)
+(Float then Int,0,0)
+(Int then Float,0,0)
diff --git a/test/files/run/t2337.scala b/test/files/run/t2337.scala
new file mode 100644
index 0000000..86a372c
--- /dev/null
+++ b/test/files/run/t2337.scala
@@ -0,0 +1,21 @@
+
+object Test {
+
+ def compare(first: Any, second: Any): Any = {
+ (first, second) match {
+ case (k: Int, o: Int) => k compare o
+ //why the next case matches (Float, Int) but does not match (Int, Float) ???
+ case (k: Number, o: Number) => k.doubleValue() compare o.doubleValue()
+ case _ => "BOGON"
+ // throw new Exception("Unsupported compare " + first + "; " + second)
+ }
+ }
+
+ def main(args: Array[String]): Unit = {
+ println("Both Int", -1, compare(0, 1))
+ println("Both Float", 1, compare(1.0, 0.0))
+ println("Float then Int", 0, compare(10.0, 10))
+ println("Int then Float", 0, compare(10, 10.0)) //this fails with an exception
+ }
+}
+
diff --git a/test/files/run/bug2354.scala b/test/files/run/t2354.scala
similarity index 100%
rename from test/files/run/bug2354.scala
rename to test/files/run/t2354.scala
diff --git a/test/files/run/bug2378.scala b/test/files/run/t2378.scala
similarity index 100%
rename from test/files/run/bug2378.scala
rename to test/files/run/t2378.scala
diff --git a/test/files/run/t2386-new.check b/test/files/run/t2386-new.check
new file mode 100644
index 0000000..8ed0ffd
--- /dev/null
+++ b/test/files/run/t2386-new.check
@@ -0,0 +1,2 @@
+a(0) = Array(1, 2)
+a(1) = Array("a", "b")
diff --git a/test/files/run/t2386-new.scala b/test/files/run/t2386-new.scala
new file mode 100644
index 0000000..15d1859
--- /dev/null
+++ b/test/files/run/t2386-new.scala
@@ -0,0 +1,5 @@
+object Test extends App {
+ val a = Array(Array(1, 2), Array("a","b"))
+ println("a(0) = Array(" + (a(0) mkString ", ") + ")")
+ println("a(1) = Array(" + (a(1) map (s => "\"" + s + "\"") mkString ", ") + ")")
+}
diff --git a/test/files/run/t2417.scala b/test/files/run/t2417.scala
index aeb61a7..2d0bc2d 100644
--- a/test/files/run/t2417.scala
+++ b/test/files/run/t2417.scala
@@ -3,7 +3,7 @@ object Test {
def parallel(numThreads: Int)(block: => Unit) {
var failure: Throwable = null
- val threads = Array.fromFunction(i => new Thread {
+ val threads = Array.tabulate(numThreads)(i => new Thread {
override def run {
try {
block
@@ -11,7 +11,7 @@ object Test {
case x => failure = x
}
}
- })(numThreads)
+ })
for (t <- threads) t.start
for (t <- threads) t.join
if (failure != null) println("FAILURE: " + failure)
@@ -74,4 +74,4 @@ object Test {
testSet(5, 2, 1000000)
println()
}
-}
\ No newline at end of file
+}
diff --git a/test/files/run/t2418.check b/test/files/run/t2418.check
new file mode 100644
index 0000000..f599e28
--- /dev/null
+++ b/test/files/run/t2418.check
@@ -0,0 +1 @@
+10
diff --git a/test/files/run/t2418.scala b/test/files/run/t2418.scala
new file mode 100644
index 0000000..f330bef
--- /dev/null
+++ b/test/files/run/t2418.scala
@@ -0,0 +1,10 @@
+class Foo {
+ @volatile final var x=10
+ override def toString = "" + x
+}
+
+object Test {
+ def main(args: Array[String]): Unit = {
+ println((new Foo))
+ }
+}
diff --git a/test/files/run/t2488.check b/test/files/run/t2488.check
new file mode 100644
index 0000000..1af4bf8
--- /dev/null
+++ b/test/files/run/t2488.check
@@ -0,0 +1,4 @@
+1
+1
+1
+2
diff --git a/test/files/run/t2488.scala b/test/files/run/t2488.scala
new file mode 100644
index 0000000..22abdf8
--- /dev/null
+++ b/test/files/run/t2488.scala
@@ -0,0 +1,11 @@
+class C {
+ def f(a:Int, b:Int) = 1
+ def f() = 2
+}
+object Test extends App {
+ val c = new C()
+ println(c.f(a = 1,2))
+ println(c.f(a = 1, b = 2))
+ println(c.f(b = 2, a = 1))
+ println(c.f())
+}
diff --git a/test/files/run/t2503.scala b/test/files/run/t2503.scala
old mode 100644
new mode 100755
diff --git a/test/files/run/bug2512.scala b/test/files/run/t2512.scala
similarity index 100%
rename from test/files/run/bug2512.scala
rename to test/files/run/t2512.scala
diff --git a/test/files/run/bug2514.scala b/test/files/run/t2514.scala
similarity index 100%
rename from test/files/run/bug2514.scala
rename to test/files/run/t2514.scala
diff --git a/test/files/run/t2544.check b/test/files/run/t2544.check
index 716b146..d19538d 100644
--- a/test/files/run/t2544.check
+++ b/test/files/run/t2544.check
@@ -2,8 +2,8 @@
2
3
3
--2
--2
+-1
+-1
1
1
0
diff --git a/test/files/run/t2544.scala b/test/files/run/t2544.scala
index 7e7cfeb..6bee2f1 100644
--- a/test/files/run/t2544.scala
+++ b/test/files/run/t2544.scala
@@ -1,19 +1,25 @@
object Test {
object Foo extends Seq[Int] {
def apply(i: Int) = i
- def length = 4
+ def length = 5
def iterator = Iterator(0,1,2,3,4)
}
+ def lengthEquiv(result: Int) = println(
+ if (result < 0) -1
+ else if (result == 0) 0
+ else 1
+ )
+
def main(args: Array[String]) = {
println(Foo indexWhere(_ >= 2,1))
println(Foo.toList indexWhere(_ >= 2,1))
println(Foo segmentLength(_ <= 3,1))
println(Foo.toList segmentLength(_ <= 3,1))
- println(Foo lengthCompare 7)
- println(Foo.toList lengthCompare 7)
- println(Foo lengthCompare 2)
- println(Foo.toList lengthCompare 2)
- println(Foo lengthCompare 5)
- println(Foo.toList lengthCompare 5)
+ lengthEquiv(Foo lengthCompare 7)
+ lengthEquiv(Foo.toList lengthCompare 7)
+ lengthEquiv(Foo lengthCompare 2)
+ lengthEquiv(Foo.toList lengthCompare 2)
+ lengthEquiv(Foo lengthCompare 5)
+ lengthEquiv(Foo.toList lengthCompare 5)
}
-}
\ No newline at end of file
+}
diff --git a/test/files/run/bug2552.check b/test/files/run/t2552.check
similarity index 100%
rename from test/files/run/bug2552.check
rename to test/files/run/t2552.check
diff --git a/test/files/run/bug2552.scala b/test/files/run/t2552.scala
similarity index 100%
rename from test/files/run/bug2552.scala
rename to test/files/run/t2552.scala
diff --git a/test/files/run/bug2636.scala b/test/files/run/t2636.scala
similarity index 100%
rename from test/files/run/bug2636.scala
rename to test/files/run/t2636.scala
diff --git a/test/files/run/bug266.scala b/test/files/run/t266.scala
similarity index 100%
rename from test/files/run/bug266.scala
rename to test/files/run/t266.scala
diff --git a/test/files/run/bug2721.check b/test/files/run/t2721.check
similarity index 100%
rename from test/files/run/bug2721.check
rename to test/files/run/t2721.check
diff --git a/test/files/run/bug2721.scala b/test/files/run/t2721.scala
similarity index 100%
rename from test/files/run/bug2721.scala
rename to test/files/run/t2721.scala
diff --git a/test/files/run/bug2755.check b/test/files/run/t2755.check
similarity index 100%
rename from test/files/run/bug2755.check
rename to test/files/run/t2755.check
diff --git a/test/files/run/bug2755.scala b/test/files/run/t2755.scala
similarity index 100%
rename from test/files/run/bug2755.scala
rename to test/files/run/t2755.scala
diff --git a/test/files/run/bug2800.check b/test/files/run/t2800.check
similarity index 100%
rename from test/files/run/bug2800.check
rename to test/files/run/t2800.check
diff --git a/test/files/run/bug2800.scala b/test/files/run/t2800.scala
similarity index 100%
rename from test/files/run/bug2800.scala
rename to test/files/run/t2800.scala
diff --git a/test/files/run/t2813.2.scala b/test/files/run/t2813.2.scala
index e55cc6c..f41f645 100644
--- a/test/files/run/t2813.2.scala
+++ b/test/files/run/t2813.2.scala
@@ -1,39 +1,39 @@
-import java.util.LinkedList
-import collection.JavaConversions._
-
-object Test extends App {
- def assertListEquals[A](expected: List[A], actual: Seq[A]) {
- assert(expected.sameElements(actual),
- "Expected list to contain " + expected.mkString("[", ", ", "]") +
- ", but was " + actual.mkString("[", ", ", "]"))
- }
-
- def addAllOfNonCollectionWrapperAtZeroOnEmptyLinkedList() {
- val l = new LinkedList[Int]
- l.addAll(0, List(1, 2))
- assertListEquals(List(1, 2), l)
- }
-
- def addAllOfNonCollectionWrapperAtZeroOnLinkedList() {
- val l = new LinkedList[Int] + 1 + 2
- l.addAll(0, List(10, 11))
- assertListEquals((List(10, 11, 1, 2)), l)
- }
-
- def addAllOfCollectionWrapperAtZeroOnLinkedList() {
- val l = new LinkedList[Int] + 1 + 2
- l.addAll(0, new LinkedList[Int] + 10 + 11)
- assertListEquals((List(10, 11, 1, 2)), l)
- }
-
- def addAllOfCollectionWrapperAtZeroOnEmptyLinkedList() {
- val l = new LinkedList[Int]
- l.addAll(0, new LinkedList[Int] + 10 + 11)
- assertListEquals((List(10, 11)), l)
- }
-
- addAllOfNonCollectionWrapperAtZeroOnEmptyLinkedList
- addAllOfNonCollectionWrapperAtZeroOnLinkedList
- addAllOfCollectionWrapperAtZeroOnEmptyLinkedList
- addAllOfCollectionWrapperAtZeroOnLinkedList
-}
+import java.util.LinkedList
+import collection.JavaConversions._
+
+object Test extends App {
+ def assertListEquals[A](expected: List[A], actual: Seq[A]) {
+ assert(expected.sameElements(actual),
+ "Expected list to contain " + expected.mkString("[", ", ", "]") +
+ ", but was " + actual.mkString("[", ", ", "]"))
+ }
+
+ def addAllOfNonCollectionWrapperAtZeroOnEmptyLinkedList() {
+ val l = new LinkedList[Int]
+ l.addAll(0, List(1, 2))
+ assertListEquals(List(1, 2), l)
+ }
+
+ def addAllOfNonCollectionWrapperAtZeroOnLinkedList() {
+ val l = new LinkedList[Int] += 1 += 2
+ l.addAll(0, List(10, 11))
+ assertListEquals((List(10, 11, 1, 2)), l)
+ }
+
+ def addAllOfCollectionWrapperAtZeroOnLinkedList() {
+ val l = new LinkedList[Int] += 1 += 2
+ l.addAll(0, new LinkedList[Int] += 10 += 11)
+ assertListEquals((List(10, 11, 1, 2)), l)
+ }
+
+ def addAllOfCollectionWrapperAtZeroOnEmptyLinkedList() {
+ val l = new LinkedList[Int]
+ l.addAll(0, new LinkedList[Int] += 10 += 11)
+ assertListEquals((List(10, 11)), l)
+ }
+
+ addAllOfNonCollectionWrapperAtZeroOnEmptyLinkedList
+ addAllOfNonCollectionWrapperAtZeroOnLinkedList
+ addAllOfCollectionWrapperAtZeroOnEmptyLinkedList
+ addAllOfCollectionWrapperAtZeroOnLinkedList
+}
diff --git a/test/files/run/t2818.check b/test/files/run/t2818.check
new file mode 100644
index 0000000..31286c9
--- /dev/null
+++ b/test/files/run/t2818.check
@@ -0,0 +1,4 @@
+105
+499999500000
+0
+1
diff --git a/test/files/run/t2818.scala b/test/files/run/t2818.scala
new file mode 100644
index 0000000..19b67cb
--- /dev/null
+++ b/test/files/run/t2818.scala
@@ -0,0 +1,6 @@
+object Test extends App {
+ println((List.range(1L, 15L) :\ 0L) (_ + _))
+ println((List.range(1L, 1000000L) :\ 0L) (_ + _))
+ println((List.fill(5)(1) :\ 1) (_ - _))
+ println((List.fill(1000000)(1) :\ 1) (_ - _))
+}
diff --git a/test/files/run/t2873.check b/test/files/run/t2873.check
new file mode 100644
index 0000000..9198280
--- /dev/null
+++ b/test/files/run/t2873.check
@@ -0,0 +1 @@
+scala.collection.immutable.RedBlack<A>.Empty$
diff --git a/test/files/run/t2873.scala b/test/files/run/t2873.scala
new file mode 100644
index 0000000..8d48a8d
--- /dev/null
+++ b/test/files/run/t2873.scala
@@ -0,0 +1,5 @@
+object Test {
+ def main(args: Array[String]): Unit = {
+ println(classOf[scala.collection.immutable.RedBlack[_]].getMethod("Empty").getGenericReturnType)
+ }
+}
diff --git a/test/files/run/bug2876.scala b/test/files/run/t2876.scala
similarity index 100%
rename from test/files/run/bug2876.scala
rename to test/files/run/t2876.scala
diff --git a/test/files/run/t2886.check b/test/files/run/t2886.check
index 39ee46a..a70f993 100644
--- a/test/files/run/t2886.check
+++ b/test/files/run/t2886.check
@@ -1 +1,5 @@
-Function(List(LocalValue(NoSymbol,x,PrefixedType(SingleType(ThisType(Class(scala)),Field(scala.Predef,PrefixedType(ThisType(Class(scala)),Class(scala.Predef)))),TypeField(scala.Predef.String,PrefixedType(ThisType(Class(java.lang)),Class(java.lang.String)))))),Block(List(ValDef(LocalValue(NoSymbol,x$1,NoType),Ident(LocalValue(NoSymbol,x,PrefixedType(SingleType(ThisType(Class(scala)),Field(scala.Predef,PrefixedType(ThisType(Class(scala)),Class(scala.Predef)))),TypeField(scala.Predef.String [...]
\ No newline at end of file
+((x: Predef.String) => {
+ val x$1 = x;
+ val x$2 = x;
+ Test.this.test(x$2, x$1)
+})
diff --git a/test/files/run/t2886.scala b/test/files/run/t2886.scala
index eb392f0..b919f13 100644
--- a/test/files/run/t2886.scala
+++ b/test/files/run/t2886.scala
@@ -1,7 +1,9 @@
+import scala.reflect.runtime.universe._
+
object Test {
def test(name: String, address: String) = null
def main(args: Array[String]) = {
- val tree = scala.reflect.Code.lift((x:String) => test(address=x,name=x)).tree
+ val tree = reify((x:String) => test(address=x,name=x)).tree
println(tree)
}
-}
+}
\ No newline at end of file
diff --git a/test/files/run/bug2958.scala b/test/files/run/t2958.scala
similarity index 100%
rename from test/files/run/bug2958.scala
rename to test/files/run/t2958.scala
diff --git a/test/files/run/bug298.check b/test/files/run/t298.check
similarity index 100%
rename from test/files/run/bug298.check
rename to test/files/run/t298.check
diff --git a/test/files/run/bug298.scala b/test/files/run/t298.scala
similarity index 100%
rename from test/files/run/bug298.scala
rename to test/files/run/t298.scala
diff --git a/test/files/run/bug3004.scala b/test/files/run/t3004.scala
similarity index 100%
rename from test/files/run/bug3004.scala
rename to test/files/run/t3004.scala
diff --git a/test/files/run/t3026.scala b/test/files/run/t3026.scala
old mode 100644
new mode 100755
diff --git a/test/files/run/bug3050.scala b/test/files/run/t3050.scala
similarity index 100%
rename from test/files/run/bug3050.scala
rename to test/files/run/t3050.scala
diff --git a/test/files/run/bug3088.scala b/test/files/run/t3088.scala
similarity index 100%
rename from test/files/run/bug3088.scala
rename to test/files/run/t3088.scala
diff --git a/test/files/run/t3097.check b/test/files/run/t3097.check
new file mode 100644
index 0000000..63695f7
--- /dev/null
+++ b/test/files/run/t3097.check
@@ -0,0 +1 @@
+atomic
diff --git a/test/files/run/t3097.scala b/test/files/run/t3097.scala
new file mode 100644
index 0000000..4aaf805
--- /dev/null
+++ b/test/files/run/t3097.scala
@@ -0,0 +1,18 @@
+sealed trait ISimpleValue
+
+sealed trait IListValue extends ISimpleValue
+sealed trait IAtomicValue[O] extends ISimpleValue
+
+sealed trait IAbstractDoubleValue[O] extends IAtomicValue[O]
+sealed trait IDoubleValue extends IAbstractDoubleValue[Double]
+
+case class ListValue(val items: List[IAtomicValue[_]]) extends IListValue
+class DoubleValue(val data: Double) extends IDoubleValue
+
+object Test extends App {
+ // match is exhaustive
+ (new DoubleValue(1): ISimpleValue) match {
+ case m: IListValue => println("list")
+ case a: IAtomicValue[_] => println("atomic")
+ }
+}
diff --git a/test/files/run/bug3126.scala b/test/files/run/t3126.scala
similarity index 100%
rename from test/files/run/bug3126.scala
rename to test/files/run/t3126.scala
diff --git a/test/files/run/bug3150.scala b/test/files/run/t3150.scala
similarity index 100%
rename from test/files/run/bug3150.scala
rename to test/files/run/t3150.scala
diff --git a/test/files/run/bug3175.check b/test/files/run/t3175.check
similarity index 100%
rename from test/files/run/bug3175.check
rename to test/files/run/t3175.check
diff --git a/test/files/run/bug3175.scala b/test/files/run/t3175.scala
similarity index 100%
rename from test/files/run/bug3175.scala
rename to test/files/run/t3175.scala
diff --git a/test/files/run/bug3232.scala b/test/files/run/t3232.scala
similarity index 100%
rename from test/files/run/bug3232.scala
rename to test/files/run/t3232.scala
diff --git a/test/files/run/bug3269.check b/test/files/run/t3269.check
similarity index 100%
rename from test/files/run/bug3269.check
rename to test/files/run/t3269.check
diff --git a/test/files/run/bug3269.scala b/test/files/run/t3269.scala
similarity index 100%
rename from test/files/run/bug3269.scala
rename to test/files/run/t3269.scala
diff --git a/test/files/run/bug3273.scala b/test/files/run/t3273.scala
similarity index 100%
rename from test/files/run/bug3273.scala
rename to test/files/run/t3273.scala
diff --git a/test/files/run/t3326.check b/test/files/run/t3326.check
new file mode 100644
index 0000000..d0e11ce
--- /dev/null
+++ b/test/files/run/t3326.check
@@ -0,0 +1,8 @@
+Map(2 -> Hello, 1 -> World)
+Map(5 -> Foo, 4 -> Bar)
+Map(5 -> Foo, 4 -> Bar, 2 -> Hello, 1 -> World)
+Map(3 -> ?, 2 -> Hello, 1 -> World)
+Map(2 -> Hello, 1 -> World)
+Map(5 -> Foo, 4 -> Bar)
+Map(5 -> Foo, 4 -> Bar, 2 -> Hello, 1 -> World)
+Map(3 -> ?, 2 -> Hello, 1 -> World)
\ No newline at end of file
diff --git a/test/files/run/t3326.scala b/test/files/run/t3326.scala
new file mode 100644
index 0000000..f70cb01
--- /dev/null
+++ b/test/files/run/t3326.scala
@@ -0,0 +1,74 @@
+
+
+
+import scala.math.Ordering
+
+
+
+/** The heart of the problem - we want to retain the ordering when
+ * using `++` on sorted maps.
+ *
+ * There are 2 `++` overloads - a generic one in traversables and
+ * a map-specific one in `MapLike` - which knows about the ordering.
+ *
+ * The problem here is that the expected return type for the expression
+ * in which `++` appears drives the decision of the overload that needs
+ * to be taken.
+ * The `collection.SortedMap` does not have `++` overridden to return
+ * `SortedMap`, but `immutable.Map` instead.
+ * This is why `collection.SortedMap` used to resort to the generic
+ * `TraversableLike.++` which knows nothing about the ordering.
+ *
+ * To avoid `collection.SortedMap`s resort to the more generic `TraverableLike.++`,
+ * we override the `MapLike.++` overload in `collection.SortedMap` to return
+ * the proper type `SortedMap`.
+ */
+object Test {
+
+ def main(args: Array[String]) {
+ testCollectionSorted()
+ testImmutableSorted()
+ }
+
+ def testCollectionSorted() {
+ import collection._
+ val order = implicitly[Ordering[Int]].reverse
+ var m1: SortedMap[Int, String] = SortedMap.empty[Int, String](order)
+ var m2: SortedMap[Int, String] = SortedMap.empty[Int, String](order)
+
+ m1 += (1 -> "World")
+ m1 += (2 -> "Hello")
+
+ m2 += (4 -> "Bar")
+ m2 += (5 -> "Foo")
+
+ val m3: SortedMap[Int, String] = m1 ++ m2
+
+ println(m1)
+ println(m2)
+ println(m3)
+
+ println(m1 + (3 -> "?"))
+ }
+
+ def testImmutableSorted() {
+ import collection.immutable._
+ val order = implicitly[Ordering[Int]].reverse
+ var m1: SortedMap[Int, String] = SortedMap.empty[Int, String](order)
+ var m2: SortedMap[Int, String] = SortedMap.empty[Int, String](order)
+
+ m1 += (1 -> "World")
+ m1 += (2 -> "Hello")
+
+ m2 += (4 -> "Bar")
+ m2 += (5 -> "Foo")
+
+ val m3: SortedMap[Int, String] = m1 ++ m2
+
+ println(m1)
+ println(m2)
+ println(m3)
+
+ println(m1 + (3 -> "?"))
+ }
+}
diff --git a/test/files/run/bug3327.check b/test/files/run/t3327.check
similarity index 100%
rename from test/files/run/bug3327.check
rename to test/files/run/t3327.check
diff --git a/test/files/run/bug3327.scala b/test/files/run/t3327.scala
similarity index 100%
rename from test/files/run/bug3327.scala
rename to test/files/run/t3327.scala
diff --git a/test/files/run/t3353.check b/test/files/run/t3353.check
new file mode 100644
index 0000000..8b4ae1f
--- /dev/null
+++ b/test/files/run/t3353.check
@@ -0,0 +1 @@
+Got: foo and None
diff --git a/test/files/run/t3353.scala b/test/files/run/t3353.scala
new file mode 100644
index 0000000..eeb63c1
--- /dev/null
+++ b/test/files/run/t3353.scala
@@ -0,0 +1,10 @@
+object Test extends App {
+
+ "foo" match {
+ case Matcher(result) => println(result)
+ }
+
+ object Matcher{
+ def unapply(s: String)(implicit secondParam: Option[String] = None) = Some("Got: " + s + " and " + secondParam)
+ }
+}
diff --git a/test/files/run/bug3376.check b/test/files/run/t3376.check
similarity index 100%
rename from test/files/run/bug3376.check
rename to test/files/run/t3376.check
diff --git a/test/files/run/bug3376.scala b/test/files/run/t3376.scala
similarity index 100%
rename from test/files/run/bug3376.scala
rename to test/files/run/t3376.scala
diff --git a/test/files/run/bug3395.check b/test/files/run/t3395.check
similarity index 100%
rename from test/files/run/bug3395.check
rename to test/files/run/t3395.check
diff --git a/test/files/run/bug3395.scala b/test/files/run/t3395.scala
similarity index 100%
rename from test/files/run/bug3395.scala
rename to test/files/run/t3395.scala
diff --git a/test/files/run/bug3397.scala b/test/files/run/t3397.scala
similarity index 100%
rename from test/files/run/bug3397.scala
rename to test/files/run/t3397.scala
diff --git a/test/files/run/bug3487.scala b/test/files/run/t3487.scala
similarity index 100%
rename from test/files/run/bug3487.scala
rename to test/files/run/t3487.scala
diff --git a/test/files/run/t3488.check b/test/files/run/t3488.check
new file mode 100644
index 0000000..0d66ea1
--- /dev/null
+++ b/test/files/run/t3488.check
@@ -0,0 +1,2 @@
+0
+1
diff --git a/test/files/run/t3488.scala b/test/files/run/t3488.scala
new file mode 100644
index 0000000..20a1400
--- /dev/null
+++ b/test/files/run/t3488.scala
@@ -0,0 +1,6 @@
+object Test extends App {
+ def foo(p: => Unit)(x:Int = 0) = x
+
+ println(foo { val List(_*)=List(0); 1 } ())
+ println(foo { val List(_*)=List(0); 1 } (1))
+}
diff --git a/test/files/run/t3507-new.check b/test/files/run/t3507-new.check
new file mode 100644
index 0000000..208e093
--- /dev/null
+++ b/test/files/run/t3507-new.check
@@ -0,0 +1 @@
+_1.b.c.type
diff --git a/test/files/run/t3507-new.scala b/test/files/run/t3507-new.scala
new file mode 100644
index 0000000..f045755
--- /dev/null
+++ b/test/files/run/t3507-new.scala
@@ -0,0 +1,17 @@
+import scala.reflect.runtime.universe._
+
+class A {
+ object b {
+ object c
+ }
+ def m = b.c
+}
+
+object Test extends App {
+ var a: A = new A // mutable
+ val c /*: object _1.b.c forSome { val _1: A } */ = a.m // widening using existential
+
+ def mani[T: TypeTag](x: T) = println(typeOf[T])
+ mani/*[object _1.b.c]*/(c) // kaboom in manifestOfType / TreeGen.mkAttributedQualifier
+ // --> _1 is not in scope here
+}
\ No newline at end of file
diff --git a/test/files/run/bug3509.flags b/test/files/run/t3509.flags
similarity index 100%
copy from test/files/run/bug3509.flags
copy to test/files/run/t3509.flags
diff --git a/test/files/run/bug3509.scala b/test/files/run/t3509.scala
similarity index 100%
rename from test/files/run/bug3509.scala
rename to test/files/run/t3509.scala
diff --git a/test/files/run/bug3516.check b/test/files/run/t3516.check
similarity index 100%
rename from test/files/run/bug3516.check
rename to test/files/run/t3516.check
diff --git a/test/files/run/bug3516.scala b/test/files/run/t3516.scala
similarity index 100%
rename from test/files/run/bug3516.scala
rename to test/files/run/t3516.scala
diff --git a/test/files/run/bug3518.scala b/test/files/run/t3518.scala
similarity index 100%
rename from test/files/run/bug3518.scala
rename to test/files/run/t3518.scala
diff --git a/test/files/run/bug3529.scala b/test/files/run/t3529.scala
similarity index 100%
rename from test/files/run/bug3529.scala
rename to test/files/run/t3529.scala
diff --git a/test/files/run/bug3530.check b/test/files/run/t3530.check
similarity index 100%
rename from test/files/run/bug3530.check
rename to test/files/run/t3530.check
diff --git a/test/files/run/bug3530.scala b/test/files/run/t3530.scala
similarity index 100%
rename from test/files/run/bug3530.scala
rename to test/files/run/t3530.scala
diff --git a/test/files/run/bug3540.scala b/test/files/run/t3540.scala
similarity index 100%
rename from test/files/run/bug3540.scala
rename to test/files/run/t3540.scala
diff --git a/test/files/run/bug3563.scala b/test/files/run/t3563.scala
similarity index 100%
rename from test/files/run/bug3563.scala
rename to test/files/run/t3563.scala
diff --git a/test/files/run/t3569.check b/test/files/run/t3569.check
new file mode 100644
index 0000000..a9fb5ff
--- /dev/null
+++ b/test/files/run/t3569.check
@@ -0,0 +1,16 @@
+1
+private final int Test$X.val1
+private final int Test$X.val2
+private final int Test$X.val3
+private int Test$X.lval1
+private int Test$X.lval2
+private int Test$X.lval3
+private int Test$X.var1
+private int Test$X.var2
+private int Test$X.var3
+private int Test$X.x
+private volatile byte Test$X.bitmap$0
+private final int Test$Y.z1
+private final int Test$Y.z2
+private int Test$Y.x
+private int Test$Y.y
diff --git a/test/files/run/bug3509.flags b/test/files/run/t3569.flags
similarity index 100%
rename from test/files/run/bug3509.flags
rename to test/files/run/t3569.flags
diff --git a/test/files/run/t3569.scala b/test/files/run/t3569.scala
new file mode 100644
index 0000000..4699aea
--- /dev/null
+++ b/test/files/run/t3569.scala
@@ -0,0 +1,32 @@
+object Test {
+ final val bippy1 = 1
+ final lazy val bippy2 = 2
+
+ lazy val lv = scala.util.Random.nextInt()
+
+ class X(final var x: Int) {
+ final var var1: Int = 0
+ final private var var2: Int = 0
+ final private[this] var var3: Int = 0
+
+ final val val1: Int = 1
+ final private val val2: Int = 1
+ final private[this] val val3: Int = 1
+
+ final lazy val lval1: Int = 2
+ final private lazy val lval2: Int = 2
+ final private[this] lazy val lval3: Int = 2
+ }
+ case class Y(final var x: Int, final private var y: Int, final val z1: Int, final private val z2: Int) { }
+
+ def f = new X(0).x += 1
+ def main(args: Array[String]) {
+ f
+ val s = new X(0)
+ s.x += 1
+ println(s.x)
+
+ (classOf[X].getDeclaredFields map ("" + _)).sorted foreach println
+ (classOf[Y].getDeclaredFields map ("" + _)).sorted foreach println
+ }
+}
diff --git a/test/files/run/t3575.check b/test/files/run/t3575.check
new file mode 100644
index 0000000..8b935ad
--- /dev/null
+++ b/test/files/run/t3575.check
@@ -0,0 +1,20 @@
+Two
+Two$mcIL$sp
+Two$mcLI$sp
+Two$mcII$sp
+TwoLong
+TwoLong$mcIL$sp
+TwoLong$mcLI$sp
+TwoLong$mcII$sp
+TwoCool
+TwoCool$mcIL$sp
+TwoCool$mcLI$sp
+TwoCool$mcII$sp
+TwoShort
+TwoShort$mcIL$sp
+TwoShort$mcLI$sp
+TwoShort$mcII$sp
+TwoMinimal
+TwoMinimal$mcIL$sp
+TwoMinimal$mcLI$sp
+TwoMinimal$mcII$sp
diff --git a/test/files/run/t3575.scala b/test/files/run/t3575.scala
new file mode 100644
index 0000000..7ede65b
--- /dev/null
+++ b/test/files/run/t3575.scala
@@ -0,0 +1,55 @@
+// This is here to tell me if the behavior changes, not because
+// the output is endorsed.
+case class Two[
+ @specialized(Specializable.Everything) A,
+ @specialized(Specializable.Everything) B
+](v: A, w: B)
+
+case class TwoLong[
+ @specialized(Char, Boolean, Byte, Short, Int, Long, Float, Double, Unit, AnyRef) A,
+ @specialized(Char, Boolean, Byte, Short, Int, Long, Float, Double, Unit, AnyRef) B
+](v: A, w: B)
+
+case class TwoCool[
+ @specialized(Specializable.Everything) A,
+ @specialized(Specializable.Everything) B
+](v: A, w: B)
+
+case class TwoShort[
+ @specialized(Specializable.Everything) A,
+ @specialized(Specializable.Everything) B
+](v: A, w: B)
+
+case class TwoMinimal[
+ @specialized(Int, AnyRef) A,
+ @specialized(Int, AnyRef) B
+](v: A, w: B)
+
+object Test {
+ def main(args: Array[String]): Unit = {
+ println(Two("Hello", "World").getClass().getName());
+ println(Two(12, "Hello").getClass().getName());
+ println(Two("Hello", 12).getClass().getName());
+ println(Two(12, 12).getClass().getName());
+
+ println(TwoLong("Hello", "World").getClass().getName());
+ println(TwoLong(12, "Hello").getClass().getName());
+ println(TwoLong("Hello", 12).getClass().getName());
+ println(TwoLong(12, 12).getClass().getName());
+
+ println(TwoCool("Hello", "World").getClass().getName());
+ println(TwoCool(12, "Hello").getClass().getName());
+ println(TwoCool("Hello", 12).getClass().getName());
+ println(TwoCool(12, 12).getClass().getName());
+
+ println(TwoShort("Hello", "World").getClass().getName());
+ println(TwoShort(12, "Hello").getClass().getName());
+ println(TwoShort("Hello", 12).getClass().getName());
+ println(TwoShort(12, 12).getClass().getName());
+
+ println(TwoMinimal("Hello", "World").getClass().getName());
+ println(TwoMinimal(12, "Hello").getClass().getName());
+ println(TwoMinimal("Hello", 12).getClass().getName());
+ println(TwoMinimal(12, 12).getClass().getName());
+ }
+}
diff --git a/test/files/run/t3613.scala b/test/files/run/t3613.scala
new file mode 100644
index 0000000..171a6a2
--- /dev/null
+++ b/test/files/run/t3613.scala
@@ -0,0 +1,22 @@
+class Boopy {
+ private val s = new Schnuck
+ def observer : PartialFunction[ Any, Unit ] = s.observer
+
+ private class Schnuck extends javax.swing.AbstractListModel {
+ model =>
+ val observer : PartialFunction[ Any, Unit ] = {
+ case "Boopy" => fireIntervalAdded( model, 0, 1 )
+ }
+ def getSize = 0
+ def getElementAt( idx: Int ) = ???
+ }
+
+}
+
+object Test {
+ def main(args: Array[String]): Unit = {
+ val x = new Boopy
+ val o = x.observer
+ o( "Boopy" ) // --> throws runtime error
+ }
+}
diff --git a/test/files/run/bug3616.check b/test/files/run/t3616.check
similarity index 100%
rename from test/files/run/bug3616.check
rename to test/files/run/t3616.check
diff --git a/test/files/run/bug3616.scala b/test/files/run/t3616.scala
similarity index 100%
rename from test/files/run/bug3616.scala
rename to test/files/run/t3616.scala
diff --git a/test/files/run/bug363.check b/test/files/run/t363.check
similarity index 100%
rename from test/files/run/bug363.check
rename to test/files/run/t363.check
diff --git a/test/files/run/bug363.scala b/test/files/run/t363.scala
similarity index 100%
rename from test/files/run/bug363.scala
rename to test/files/run/t363.scala
diff --git a/test/files/run/bug3699.scala b/test/files/run/t3699.scala
similarity index 100%
rename from test/files/run/bug3699.scala
rename to test/files/run/t3699.scala
diff --git a/test/files/run/t3702.check b/test/files/run/t3702.check
new file mode 100644
index 0000000..31c2ac4
--- /dev/null
+++ b/test/files/run/t3702.check
@@ -0,0 +1,2 @@
+()
+6
diff --git a/test/files/run/t3702.scala b/test/files/run/t3702.scala
new file mode 100644
index 0000000..021abcb
--- /dev/null
+++ b/test/files/run/t3702.scala
@@ -0,0 +1,11 @@
+object Test {
+ def foo(h: Any, t: List[Any]) = h match {
+ case 5 :: _ => ()
+ case List(from) => from
+ }
+
+ def main(args: Array[String]): Unit = {
+ println(foo(5 :: Nil, List(1,2,3)))
+ println(foo(6 :: Nil, List(1,2,3)))
+ }
+}
diff --git a/test/files/run/t3705.scala b/test/files/run/t3705.scala
new file mode 100644
index 0000000..fcc020f
--- /dev/null
+++ b/test/files/run/t3705.scala
@@ -0,0 +1,17 @@
+// package foo
+
+import scala.xml._
+object Test {
+ def updateNodes(ns: Seq[Node]): Seq[Node] =
+ for(subnode <- ns) yield subnode match {
+ case <d>{_}</d> if true => <d>abc</d>
+ case Elem(prefix, label, attribs, scope, children @ _*) =>
+ Elem(prefix, label, attribs, scope, updateNodes(children) : _*)
+ case other => other
+ }
+ def main(args: Array[String]): Unit = {
+ updateNodes(<b />)
+
+ }
+}
+
diff --git a/test/files/run/bug3714.scala b/test/files/run/t3714.scala
similarity index 100%
rename from test/files/run/bug3714.scala
rename to test/files/run/t3714.scala
diff --git a/test/files/run/t3758-old.scala b/test/files/run/t3758-old.scala
new file mode 100644
index 0000000..f00254a
--- /dev/null
+++ b/test/files/run/t3758-old.scala
@@ -0,0 +1,10 @@
+object Test {
+ def main(args: Array[String]): Unit = {
+ assert(classManifest[Array[String]].typeArguments contains classManifest[String])
+ assert(classManifest[Array[Int]].typeArguments contains classManifest[Int])
+ assert(classManifest[Array[Float]].typeArguments contains classManifest[Float])
+ assert(manifest[Array[String]].typeArguments contains manifest[String])
+ assert(manifest[Array[Int]].typeArguments contains manifest[Int])
+ assert(manifest[Array[Float]].typeArguments contains manifest[Float])
+ }
+}
\ No newline at end of file
diff --git a/test/files/run/bug3760.scala b/test/files/run/t3760.scala
similarity index 100%
rename from test/files/run/bug3760.scala
rename to test/files/run/t3760.scala
diff --git a/test/files/run/t3761-overload-byname.check b/test/files/run/t3761-overload-byname.check
new file mode 100644
index 0000000..ab7eff0
--- /dev/null
+++ b/test/files/run/t3761-overload-byname.check
@@ -0,0 +1,12 @@
+hello!
+hello working world
+goodnight!
+goodnight moon, nobody, noises everywhere
+0
+1
+0
+1
+0
+1
+0
+1
diff --git a/test/files/run/t3761-overload-byname.scala b/test/files/run/t3761-overload-byname.scala
new file mode 100644
index 0000000..a52d866
--- /dev/null
+++ b/test/files/run/t3761-overload-byname.scala
@@ -0,0 +1,39 @@
+
+class OverTheTop {
+ def info0(m: String) = m + "!"
+ def info0(m: String, args: Any*) = m +" "+ args.mkString(" ")
+
+ // as reported
+ def info1(m: =>String) = m + "!"
+ def info1(m: =>String, args: Any*) = m +" "+ args.mkString(", ")
+
+ // @lrytz
+ def m[A](x: => Int) = 0; def m[A](x: => Int, xs: Int*) = 1
+
+ def m1(x: => Int, s: String) = 0
+ def m1(x: => Int, s: Object) = 1
+
+ def m2(x: => Int, s: String) = 0
+ def m2(x: => AnyVal, s: Object) = 1
+
+ def m3(x: => Int, s: String) = 0
+ def m3(x: => Any, s: Object) = 1
+}
+
+object Test {
+ def main(args: Array[String]) {
+ val top = new OverTheTop
+ println(top.info0("hello"))
+ println(top.info0("hello","working","world"))
+ println(top.info1("goodnight"))
+ println(top.info1("goodnight", "moon", "nobody", "noises everywhere"))
+ println(top.m(17))
+ println(top.m(17,19))
+ println(top.m1(1, "two"))
+ println(top.m1(1, new Object()))
+ println(top.m2(1, ""))
+ println(top.m2(1d, ""))
+ println(top.m3(1, ""))
+ println(top.m3("", ""))
+ }
+}
diff --git a/test/files/run/t3798.check b/test/files/run/t3798.check
new file mode 100644
index 0000000..27ba77d
--- /dev/null
+++ b/test/files/run/t3798.check
@@ -0,0 +1 @@
+true
diff --git a/test/files/run/t3798.scala b/test/files/run/t3798.scala
new file mode 100644
index 0000000..3ede57b
--- /dev/null
+++ b/test/files/run/t3798.scala
@@ -0,0 +1,10 @@
+object Test {
+ def main(args: Array[String]) {
+ val seq: MySeq[Undefined] = new MySeq[Floats](new Array[Float](10))
+ println(10 == seq.array.length)
+ }
+}
+
+sealed trait Undefined { type ArrayType <: Array[_] }
+sealed trait Floats extends Undefined { type ArrayType = Array[Float] }
+class MySeq[+T <: Undefined](val array: T#ArrayType)
diff --git a/test/files/run/bug3822.scala b/test/files/run/t3822.scala
similarity index 100%
rename from test/files/run/bug3822.scala
rename to test/files/run/t3822.scala
diff --git a/test/files/run/t3835.scala b/test/files/run/t3835.scala
index 49e5911..c120a61 100644
--- a/test/files/run/t3835.scala
+++ b/test/files/run/t3835.scala
@@ -1,4 +1,9 @@
object Test extends App {
- println((1, 2, 3) match { case (r, \u03b8, \u03c6) => r + \u03b8 + \u03c6 })
- println(1 match { case \u00e9 => \u00e9 })
+ // work around optimizer bug SI-5672 -- generates wrong bytecode for switches in arguments
+ // virtpatmat happily emits a switch for a one-case switch, whereas -Xoldpatmat did not
+ // this is not the focus of this test, hence the temporary workaround
+ def a = (1, 2, 3) match { case (r, \u03b8, \u03c6) => r + \u03b8 + \u03c6 }
+ println(a)
+ def b = (1 match { case \u00e9 => \u00e9 })
+ println(b)
}
diff --git a/test/files/run/bug3855.scala b/test/files/run/t3855.scala
similarity index 100%
rename from test/files/run/bug3855.scala
rename to test/files/run/t3855.scala
diff --git a/test/files/run/bug3923.scala b/test/files/run/t3923.scala
similarity index 100%
rename from test/files/run/bug3923.scala
rename to test/files/run/t3923.scala
diff --git a/test/files/run/bug3964.check b/test/files/run/t3964.check
similarity index 100%
rename from test/files/run/bug3964.check
rename to test/files/run/t3964.check
diff --git a/test/files/run/bug3964.scala b/test/files/run/t3964.scala
similarity index 100%
rename from test/files/run/bug3964.scala
rename to test/files/run/t3964.scala
diff --git a/test/files/run/bug3984.scala b/test/files/run/t3984.scala
similarity index 100%
rename from test/files/run/bug3984.scala
rename to test/files/run/t3984.scala
diff --git a/test/files/run/t3994.scala b/test/files/run/t3994.scala
new file mode 100644
index 0000000..0ee1d9d
--- /dev/null
+++ b/test/files/run/t3994.scala
@@ -0,0 +1,20 @@
+trait T {
+ trait Default { def foo = this }
+ object Default extends Default
+}
+
+class Crash { // if you change this to a `trait` it keeps failing, though if it is an `object` it compiles just fine!
+ class Element
+
+ /* declare this as a class, and the crash goes away */
+ trait ElementOrdering extends Ordering[Element] {
+ def compare(a: Element, b: Element): Int = 0
+ }
+
+ implicit object ElementOrdering extends ElementOrdering
+}
+
+object Test extends App {
+ (new T {}).Default
+ (new Crash).ElementOrdering
+}
diff --git a/test/files/run/t4024.scala b/test/files/run/t4024.scala
index ef768be..7c62a3f 100644
--- a/test/files/run/t4024.scala
+++ b/test/files/run/t4024.scala
@@ -5,5 +5,16 @@ object Test extends App {
val m = x.getClass.getMethod("toString")
assert(m.invoke(x, (Nil: List[AnyRef]): _*) == "abc")
+
+ Test2.main(Array())
}
+
+object Test2 {
+ def main(args: Array[String]): Unit = {
+ val x = "abc"
+ val m = x.getClass.getMethod("toString")
+ m.invoke(x, Nil: _*)
+ m.invoke(x, Seq(): _*)
+ }
+}
diff --git a/test/files/run/t4025.check b/test/files/run/t4025.check
new file mode 100644
index 0000000..6715003
--- /dev/null
+++ b/test/files/run/t4025.check
@@ -0,0 +1,19 @@
+Type in expressions to have them evaluated.
+Type :help for more information.
+
+scala>
+
+scala> class Color(val red: Int)
+defined class Color
+
+scala>
+
+scala> case class Red(r:Int) extends Color(r)
+defined class Red
+
+scala>
+
+scala> def f(c: Any) = c match { case Red(_) => () }
+f: (c: Any)Unit
+
+scala>
diff --git a/test/files/run/t4025.scala b/test/files/run/t4025.scala
new file mode 100644
index 0000000..5db0093
--- /dev/null
+++ b/test/files/run/t4025.scala
@@ -0,0 +1,12 @@
+import scala.tools.nsc.Settings
+import scala.tools.partest.ReplTest
+
+object Test extends ReplTest {
+ def code = """
+class Color(val red: Int)
+
+case class Red(r:Int) extends Color(r)
+
+def f(c: Any) = c match { case Red(_) => () }
+"""
+}
diff --git a/test/files/run/t4027.check b/test/files/run/t4027.check
new file mode 100644
index 0000000..bdacfc1
--- /dev/null
+++ b/test/files/run/t4027.check
@@ -0,0 +1,12 @@
+Map(2 -> true, 4 -> true)
+Map(1 -> false!, 2 -> true!, 3 -> false!, 4 -> true!)
+Map(2 -> 4, 4 -> 4)
+Map(1 -> 6, 2 -> 5, 3 -> 6, 4 -> 5)
+Map()
+Map(1 -> false!)
+Map(2 -> true, 4 -> true)
+Map(1 -> false!, 2 -> true!, 3 -> false!, 4 -> true!)
+Map(2 -> 4, 4 -> 4)
+Map(1 -> 6, 2 -> 5, 3 -> 6, 4 -> 5)
+Map()
+Map(1 -> false!)
\ No newline at end of file
diff --git a/test/files/run/t4027.scala b/test/files/run/t4027.scala
new file mode 100644
index 0000000..d70ca0c
--- /dev/null
+++ b/test/files/run/t4027.scala
@@ -0,0 +1,27 @@
+
+
+import collection._
+
+
+/** Sorted maps should have `filterKeys` and `mapValues` which return sorted maps.
+ * Mapping, filtering, etc. on these views should return sorted maps again.
+ */
+object Test extends App {
+
+ val sortedmap = SortedMap(1 -> false, 2 -> true, 3 -> false, 4 -> true)
+ println(sortedmap.filterKeys(_ % 2 == 0): SortedMap[Int, Boolean])
+ println(sortedmap.mapValues(_ + "!"): SortedMap[Int, String])
+ println(sortedmap.filterKeys(_ % 2 == 0).map(t => (t._1, t._2.toString.length)): SortedMap[Int, Int])
+ println(sortedmap.mapValues(_ + "!").map(t => (t._1, t._2.toString.length)): SortedMap[Int, Int])
+ println(sortedmap.filterKeys(_ % 2 == 0).filter(t => t._1 < 2): SortedMap[Int, Boolean])
+ println(sortedmap.mapValues(_ + "!").filter(t => t._1 < 2): SortedMap[Int, String])
+
+ val immsortedmap = immutable.SortedMap(1 -> false, 2 -> true, 3 -> false, 4 -> true)
+ println(immsortedmap.filterKeys(_ % 2 == 0): immutable.SortedMap[Int, Boolean])
+ println(immsortedmap.mapValues(_ + "!"): immutable.SortedMap[Int, String])
+ println(immsortedmap.filterKeys(_ % 2 == 0).map(t => (t._1, t._2.toString.length)): immutable.SortedMap[Int, Int])
+ println(immsortedmap.mapValues(_ + "!").map(t => (t._1, t._2.toString.length)): immutable.SortedMap[Int, Int])
+ println(immsortedmap.filterKeys(_ % 2 == 0).filter(t => t._1 < 2): immutable.SortedMap[Int, Boolean])
+ println(immsortedmap.mapValues(_ + "!").filter(t => t._1 < 2): immutable.SortedMap[Int, String])
+
+}
diff --git a/test/files/run/bug405.scala b/test/files/run/t405.scala
similarity index 100%
rename from test/files/run/bug405.scala
rename to test/files/run/t405.scala
diff --git a/test/files/run/bug4062.check b/test/files/run/t4062.check
similarity index 100%
rename from test/files/run/bug4062.check
rename to test/files/run/t4062.check
diff --git a/test/files/run/bug4062.scala b/test/files/run/t4062.scala
similarity index 100%
rename from test/files/run/bug4062.scala
rename to test/files/run/t4062.scala
diff --git a/test/files/run/bug408.scala b/test/files/run/t408.scala
similarity index 100%
rename from test/files/run/bug408.scala
rename to test/files/run/t408.scala
diff --git a/test/files/run/bug4080.check b/test/files/run/t4080.check
similarity index 100%
rename from test/files/run/bug4080.check
rename to test/files/run/t4080.check
diff --git a/test/files/run/bug4080.scala b/test/files/run/t4080.scala
similarity index 100%
rename from test/files/run/bug4080.scala
rename to test/files/run/t4080.scala
diff --git a/test/files/run/t4110-new.check b/test/files/run/t4110-new.check
new file mode 100644
index 0000000..c0f646c
--- /dev/null
+++ b/test/files/run/t4110-new.check
@@ -0,0 +1,2 @@
+Test.A with Test.B
+Test.A with Test.B
diff --git a/test/files/run/t4110-new.scala b/test/files/run/t4110-new.scala
new file mode 100644
index 0000000..24ecd66
--- /dev/null
+++ b/test/files/run/t4110-new.scala
@@ -0,0 +1,13 @@
+import scala.reflect.runtime.universe._
+
+object Test extends App {
+ def inferredType[T : TypeTag](v : T) = println(typeOf[T])
+
+ trait A
+ trait B
+
+ inferredType(new A with B)
+
+ val name = new A with B
+ inferredType(name)
+}
\ No newline at end of file
diff --git a/test/files/run/bug4110.check b/test/files/run/t4110-old.check
similarity index 100%
rename from test/files/run/bug4110.check
rename to test/files/run/t4110-old.check
diff --git a/test/files/run/t4110-old.scala b/test/files/run/t4110-old.scala
new file mode 100644
index 0000000..a42646c
--- /dev/null
+++ b/test/files/run/t4110-old.scala
@@ -0,0 +1,11 @@
+object Test extends App {
+ def inferredType[T : Manifest](v : T) = println(manifest[T])
+
+ trait A
+ trait B
+
+ inferredType(new A with B)
+
+ val name = new A with B
+ inferredType(name)
+}
\ No newline at end of file
diff --git a/test/files/run/bug4119/J.java b/test/files/run/t4119/J.java
similarity index 100%
rename from test/files/run/bug4119/J.java
rename to test/files/run/t4119/J.java
diff --git a/test/files/run/bug4119/S.scala b/test/files/run/t4119/S.scala
similarity index 100%
rename from test/files/run/bug4119/S.scala
rename to test/files/run/t4119/S.scala
diff --git a/test/files/run/bug4122.scala b/test/files/run/t4122.scala
similarity index 100%
rename from test/files/run/bug4122.scala
rename to test/files/run/t4122.scala
diff --git a/test/files/run/t4124.check b/test/files/run/t4124.check
new file mode 100644
index 0000000..66a0092
--- /dev/null
+++ b/test/files/run/t4124.check
@@ -0,0 +1,4 @@
+hi
+hi
+bye
+bye
diff --git a/test/files/run/t4124.scala b/test/files/run/t4124.scala
new file mode 100644
index 0000000..9f35b57
--- /dev/null
+++ b/test/files/run/t4124.scala
@@ -0,0 +1,24 @@
+import xml.Node
+
+object Test extends App {
+ val body: Node = <elem>hi</elem>
+ println ((body: AnyRef, "foo") match {
+ case (node: Node, "bar") => "bye"
+ case (ser: Serializable, "foo") => "hi"
+ })
+
+ println ((body, "foo") match {
+ case (node: Node, "bar") => "bye"
+ case (ser: Serializable, "foo") => "hi"
+ })
+
+ println ((body: AnyRef, "foo") match {
+ case (node: Node, "foo") => "bye"
+ case (ser: Serializable, "foo") => "hi"
+ })
+
+ println ((body: AnyRef, "foo") match {
+ case (node: Node, "foo") => "bye"
+ case (ser: Serializable, "foo") => "hi"
+ })
+}
diff --git a/test/files/run/t4138.check b/test/files/run/t4138.check
new file mode 100644
index 0000000..f561b5e
--- /dev/null
+++ b/test/files/run/t4138.check
@@ -0,0 +1,2 @@
+[1.45] parsed: "lir 'de\' ' \\ \n / upa \"new\" \t parsing"
+[1.5] parsed: "s "
diff --git a/test/files/run/t4138.scala b/test/files/run/t4138.scala
new file mode 100644
index 0000000..131489e
--- /dev/null
+++ b/test/files/run/t4138.scala
@@ -0,0 +1,6 @@
+object Test extends App {
+ object p extends scala.util.parsing.combinator.JavaTokenParsers
+
+ println(p.parse(p.stringLiteral, """"lir 'de\' ' \\ \n / upa \"new\" \t parsing""""))
+ println(p.parse(p.stringLiteral, """"s " lkjse""""))
+}
diff --git a/test/files/run/t4147.scala b/test/files/run/t4147.scala
new file mode 100644
index 0000000..c1e2d74
--- /dev/null
+++ b/test/files/run/t4147.scala
@@ -0,0 +1,36 @@
+
+
+
+import scala.collection._
+
+
+
+object Test {
+
+ def main(args: Array[String]) {
+ checkElementsAreSorted()
+ checkRangedImpl()
+ }
+
+ def checkElementsAreSorted() {
+ val tree = mutable.SortedSet[Int]()
+ tree ++= List(4, 3, 1, 6, 7, 5, 2)
+ assert(tree == immutable.SortedSet(1, 2, 3, 4, 5, 6, 7))
+ assert(tree.size == 7)
+ }
+
+ def checkRangedImpl() {
+ val tree = mutable.SortedSet[Int](3, 1, 6, 7, 5, 2)
+ val projection = tree.rangeImpl(Some(3), Some(6))
+ assert(projection == immutable.SortedSet(3, 5))
+ assert(projection.size == 2)
+
+ // Let's check that modification are taken into account
+ tree add 4
+ assert(tree == immutable.SortedSet(1, 2, 3, 4, 5, 6, 7))
+ assert(projection == immutable.SortedSet(3, 4, 5))
+ assert(tree.size == 7)
+ assert(projection.size == 3)
+ }
+
+}
diff --git a/test/files/run/bug4148.check b/test/files/run/t4148.check
similarity index 100%
rename from test/files/run/bug4148.check
rename to test/files/run/t4148.check
diff --git a/test/files/run/bug4148.scala b/test/files/run/t4148.scala
similarity index 100%
rename from test/files/run/bug4148.scala
rename to test/files/run/t4148.scala
diff --git a/test/files/run/t4171.check b/test/files/run/t4171.check
new file mode 100644
index 0000000..d72391a
--- /dev/null
+++ b/test/files/run/t4171.check
@@ -0,0 +1,3 @@
+1
+5
+class Test$B$1
diff --git a/test/files/run/t4171.scala b/test/files/run/t4171.scala
new file mode 100644
index 0000000..fba2fb5
--- /dev/null
+++ b/test/files/run/t4171.scala
@@ -0,0 +1,11 @@
+object Test {
+ val c = { class C; new C { def foo = 1 } }
+ val a = { class B { def bar = 5 }; class C extends B; new C }
+ val e = { class A; class B extends A; classOf[B] }
+
+ def main(args: Array[String]): Unit = {
+ println(c.foo)
+ println(a.bar)
+ println(e)
+ }
+}
diff --git a/test/files/run/t4172.check b/test/files/run/t4172.check
new file mode 100644
index 0000000..b48c9ca
--- /dev/null
+++ b/test/files/run/t4172.check
@@ -0,0 +1,12 @@
+Type in expressions to have them evaluated.
+Type :help for more information.
+
+scala>
+
+scala> val c = { class C { override def toString = "C" }; ((new C, new C { def f = 2 })) }
+warning: there were 1 feature warning(s); re-run with -feature for details
+c: (C, C{def f: Int}) forSome { type C <: AnyRef } = (C,C)
+
+scala>
+
+scala>
diff --git a/test/files/run/t4172.scala b/test/files/run/t4172.scala
new file mode 100644
index 0000000..3a08f2f
--- /dev/null
+++ b/test/files/run/t4172.scala
@@ -0,0 +1,7 @@
+import scala.tools.partest.ReplTest
+
+object Test extends ReplTest {
+ def code = """
+ val c = { class C { override def toString = "C" }; ((new C, new C { def f = 2 })) }
+ """
+}
diff --git a/test/files/run/t4190.check b/test/files/run/t4190.check
new file mode 100644
index 0000000..b8aae0c
--- /dev/null
+++ b/test/files/run/t4190.check
@@ -0,0 +1,3 @@
+a0
+b0
+c0
diff --git a/test/files/run/t4190.scala b/test/files/run/t4190.scala
new file mode 100644
index 0000000..aa88b87
--- /dev/null
+++ b/test/files/run/t4190.scala
@@ -0,0 +1,6 @@
+import collection.mutable._
+
+object Test extends App {
+ val x: ArrayBuffer[String] = ArrayBuffer("a", "b", "c")
+ x.view map (_ + "0") foreach println
+}
diff --git a/test/files/run/bug4201.scala b/test/files/run/t4201.scala
similarity index 100%
rename from test/files/run/bug4201.scala
rename to test/files/run/t4201.scala
diff --git a/test/files/run/t4216.check b/test/files/run/t4216.check
new file mode 100644
index 0000000..091e55a
--- /dev/null
+++ b/test/files/run/t4216.check
@@ -0,0 +1,37 @@
+Type in expressions to have them evaluated.
+Type :help for more information.
+
+scala> import scala.reflect.ClassTag
+import scala.reflect.ClassTag
+
+scala> def f[A: ClassTag](a: A) = java.util.Arrays.asList(Array(a): _*)
+f: [A](a: A)(implicit evidence$1: scala.reflect.ClassTag[A])java.util.List[A]
+
+scala> f(".")
+res0: java.util.List[String] = [.]
+
+scala> f(0)
+res1: java.util.List[Int] = [0]
+
+scala> def i(a: Int) = java.util.Arrays.asList(Array(a): _*)
+i: (a: Int)java.util.List[Int]
+
+scala> i(0)
+res2: java.util.List[Int] = [0]
+
+scala> def o(a: Any) = java.util.Arrays.asList(Array(a): _*)
+o: (a: Any)java.util.List[Any]
+
+scala> o(".")
+res3: java.util.List[Any] = [.]
+
+scala> class V(val a: Int) extends AnyVal
+defined class V
+
+scala> f(new V(0))
+res4: java.util.List[V] = [V at 0]
+
+scala> o(new V(0))
+res5: java.util.List[Any] = [V at 0]
+
+scala>
diff --git a/test/files/run/t4216.scala b/test/files/run/t4216.scala
new file mode 100644
index 0000000..ecaae5b
--- /dev/null
+++ b/test/files/run/t4216.scala
@@ -0,0 +1,19 @@
+import scala.tools.partest.ReplTest
+
+// t4216
+object Test extends ReplTest {
+ def code =
+ """
+ |import scala.reflect.ClassTag
+ |def f[A: ClassTag](a: A) = java.util.Arrays.asList(Array(a): _*)
+ |f(".")
+ |f(0)
+ |def i(a: Int) = java.util.Arrays.asList(Array(a): _*)
+ |i(0)
+ |def o(a: Any) = java.util.Arrays.asList(Array(a): _*)
+ |o(".")
+ |class V(val a: Int) extends AnyVal
+ |f(new V(0))
+ |o(new V(0))
+ |""".stripMargin.trim
+}
diff --git a/test/files/run/bug4238/J_1.java b/test/files/run/t4238/J_1.java
similarity index 100%
rename from test/files/run/bug4238/J_1.java
rename to test/files/run/t4238/J_1.java
diff --git a/test/files/run/bug4238/s_2.scala b/test/files/run/t4238/s_2.scala
similarity index 100%
rename from test/files/run/bug4238/s_2.scala
rename to test/files/run/t4238/s_2.scala
diff --git a/test/files/run/t4283.check b/test/files/run/t4283.check
new file mode 100644
index 0000000..0d27989
--- /dev/null
+++ b/test/files/run/t4283.check
@@ -0,0 +1,5 @@
+2
+2
+1
+1
+1
diff --git a/test/pending/run/t4283/AbstractFoo.java b/test/files/run/t4283/AbstractFoo.java
similarity index 100%
rename from test/pending/run/t4283/AbstractFoo.java
rename to test/files/run/t4283/AbstractFoo.java
diff --git a/test/pending/run/t4283/ScalaBipp.scala b/test/files/run/t4283/ScalaBipp.scala
similarity index 100%
rename from test/pending/run/t4283/ScalaBipp.scala
rename to test/files/run/t4283/ScalaBipp.scala
diff --git a/test/files/run/t4283/Test.scala b/test/files/run/t4283/Test.scala
new file mode 100644
index 0000000..af72fa6
--- /dev/null
+++ b/test/files/run/t4283/Test.scala
@@ -0,0 +1,16 @@
+object Test {
+
+ def main(args: Array[String]) {
+ val x = (new test.ScalaBipp).make.get.asInstanceOf[test.ScalaBipp].f()
+ println(x)
+ val y = (new test.ScalaBipp).make.get.f()
+ println(y)
+ val u = (new test.ScalaBipp).make.get.asInstanceOf[test.ScalaBipp].t
+ println(u)
+ val v = (new test.ScalaBipp).make.get.t
+ println(v)
+ val sb: test.ScalaBipp = (new test.ScalaBipp).make.get
+ val z = sb.t
+ println(z)
+ }
+}
diff --git a/test/files/run/bug4285.check b/test/files/run/t4285.check
similarity index 100%
rename from test/files/run/bug4285.check
rename to test/files/run/t4285.check
diff --git a/test/files/pos/bug3252.flags b/test/files/run/t4285.flags
similarity index 100%
copy from test/files/pos/bug3252.flags
copy to test/files/run/t4285.flags
diff --git a/test/files/run/bug4285.scala b/test/files/run/t4285.scala
similarity index 100%
rename from test/files/run/bug4285.scala
rename to test/files/run/t4285.scala
diff --git a/test/files/run/bug4288.scala b/test/files/run/t4288.scala
similarity index 100%
rename from test/files/run/bug4288.scala
rename to test/files/run/t4288.scala
diff --git a/test/files/run/bug429.check b/test/files/run/t429.check
similarity index 100%
rename from test/files/run/bug429.check
rename to test/files/run/t429.check
diff --git a/test/files/run/bug429.scala b/test/files/run/t429.scala
similarity index 100%
rename from test/files/run/bug429.scala
rename to test/files/run/t429.scala
diff --git a/test/files/run/bug4297.scala b/test/files/run/t4297.scala
similarity index 100%
rename from test/files/run/bug4297.scala
rename to test/files/run/t4297.scala
diff --git a/test/files/run/bug4317.check b/test/files/run/t4317.check
similarity index 100%
rename from test/files/run/bug4317.check
rename to test/files/run/t4317.check
diff --git a/test/files/neg/caseinherit.flags b/test/files/run/t4317.flags
similarity index 100%
copy from test/files/neg/caseinherit.flags
copy to test/files/run/t4317.flags
diff --git a/test/files/run/bug4317/J_2.java b/test/files/run/t4317/J_2.java
similarity index 100%
rename from test/files/run/bug4317/J_2.java
rename to test/files/run/t4317/J_2.java
diff --git a/test/files/run/t4317/S_1.scala b/test/files/run/t4317/S_1.scala
new file mode 100644
index 0000000..2756c87
--- /dev/null
+++ b/test/files/run/t4317/S_1.scala
@@ -0,0 +1,11 @@
+import language.existentials
+
+object S_1 {
+ def foo1(x: Class[_ <: AnyRef]) = 0
+ def foo2(x: Class[_ <: AnyRef], y: Int) = 99
+ def foo3[T](x: Int, y: Int) = x + y
+ def foo4a(x: Unit): Unit = ()
+ def foo4[T](x: Unit): Unit = ()
+ def foo5[T <: Unit](x: T): T = sys.error("")
+ def foo6[T](x: Class[_], y: Class[T], z: Class[_ <: T]) = ((x, y, z))
+}
diff --git a/test/files/run/bug4317/S_3.scala b/test/files/run/t4317/S_3.scala
similarity index 100%
rename from test/files/run/bug4317/S_3.scala
rename to test/files/run/t4317/S_3.scala
diff --git a/test/files/run/bug4387.scala b/test/files/run/t4387.scala
similarity index 100%
rename from test/files/run/bug4387.scala
rename to test/files/run/t4387.scala
diff --git a/test/files/run/t4398.scala b/test/files/run/t4398.scala
new file mode 100644
index 0000000..1d57eb6
--- /dev/null
+++ b/test/files/run/t4398.scala
@@ -0,0 +1,11 @@
+
+
+object Test {
+ def main(args: Array[String]) {
+ val x = 1 to 10 toSet
+ val y = x + 5
+ val z = y - 154321
+ assert(x eq y)
+ assert(x eq z)
+ }
+}
diff --git a/test/files/run/t4415.scala b/test/files/run/t4415.scala
new file mode 100644
index 0000000..f96031d
--- /dev/null
+++ b/test/files/run/t4415.scala
@@ -0,0 +1,86 @@
+/**
+ * Demonstration of issue with Extractors. If lines 15/16 are not present, get at runtime:
+ *
+ * Exception in thread "main" java.lang.VerifyError: (class: ExtractorIssue$$, method: convert signature: (LTopProperty;)LMyProp;) Accessing value from uninitialized register 5
+ * at ExtractorIssue.main(ExtractorIssue.scala)
+ * at com.intellij.rt.execution.application.AppMain.main(AppMain.java:115)]
+ *
+ * If lines 15/16 are present, the compiler crashes:
+ *
+ * fatal error (server aborted): not enough arguments for method body%3: (val p: MyProp[java.lang.String])MyProp[_33].
+ * Unspecified value parameter p.
+ */
+object Test {
+
+ def main(args: Array[String]) {
+ convert(new SubclassProperty)
+ }
+
+ def convert(prop: TopProperty): MyProp[_] = {
+ prop match {
+
+ ///////////////////////////////////////////////////////////////////////////////////////////////////////////////
+ //case SubclassSecondMatch(p) => p // if these lines are present, the compiler crashes. If commented, unsafe byte
+ //case SecondMatch(p) => p // byte code is generated, which causes a java.lang.VerifyError at runtime
+ ///////////////////////////////////////////////////////////////////////////////////////////////////////////////
+
+ case SubclassMatch(p) => p
+ case StandardMatch(p) => p
+ }
+ }
+}
+
+class TopProperty
+
+class StandardProperty extends TopProperty
+class SubclassProperty extends StandardProperty
+
+class SecondProperty extends TopProperty
+class SubclassSecondProperty extends StandardProperty
+
+trait MyProp[T]
+case class MyPropImpl[T] extends MyProp[T]
+
+object SubclassMatch {
+
+ def unapply(prop: SubclassProperty) : Option[MyProp[String]] = {
+ Some(new MyPropImpl)
+ }
+
+ def apply(prop: MyProp[String]) : SubclassProperty = {
+ new SubclassProperty()
+ }
+}
+
+object StandardMatch {
+
+ def unapply(prop: StandardProperty) : Option[MyProp[String]] = {
+ Some(new MyPropImpl)
+ }
+
+ def apply(prop: MyProp[String]) : StandardProperty = {
+ new StandardProperty()
+ }
+}
+
+object SubclassSecondMatch {
+
+ def unapply(prop: SubclassSecondProperty) : Option[MyProp[BigInt]] = {
+ Some(new MyPropImpl)
+ }
+
+ def apply(prop: MyProp[String]) : SubclassSecondProperty = {
+ new SubclassSecondProperty()
+ }
+}
+
+object SecondMatch {
+
+ def unapply(prop: SecondProperty) : Option[MyProp[BigInt]] = {
+ Some(new MyPropImpl)
+ }
+
+ def apply(prop: MyProp[String]) : SecondProperty = {
+ new SecondProperty()
+ }
+}
\ No newline at end of file
diff --git a/test/files/run/t4461.check b/test/files/run/t4461.check
new file mode 100644
index 0000000..e9c01e7
--- /dev/null
+++ b/test/files/run/t4461.check
@@ -0,0 +1,11 @@
+Include(End,1)
+Include(End,2)
+Include(End,3)
+Include(End,4)
+Include(End,5)
+Include(End,6)
+Include(End,7)
+Script([1] Include(Index(7),8), [2] Include(Index(8),9), [3] Include(Index(9),10))
+Include(Start,0)
+Script([1] Include(Index(0),-2), [2] Include(Index(1),-1))
+Remove(Index(0),-2)
\ No newline at end of file
diff --git a/test/files/run/t4461.scala b/test/files/run/t4461.scala
new file mode 100644
index 0000000..adc9201
--- /dev/null
+++ b/test/files/run/t4461.scala
@@ -0,0 +1,23 @@
+import scala.collection.mutable._
+import scala.collection.script._
+
+
+// #4461
+object Test {
+ def main(args: Array[String]) {
+ val buf = new ArrayBuffer[Int] with ObservableBuffer[Int]
+ buf.subscribe(new Subscriber[Message[Int], ObservableBuffer[Int]] {
+ def notify(pub: ObservableBuffer[Int], event: Message[Int]) = println(event)
+ })
+
+ buf += 1 // works
+ buf ++= Array(2) // works
+ buf ++= ArrayBuffer(3, 4) // works
+ buf ++= List(5) // works
+ buf ++= collection.immutable.Vector(6, 7) // works
+ buf.insertAll(7, List(8, 9, 10))
+ 0 +=: buf
+ List(-2, -1) ++=: buf
+ buf remove 0
+ }
+}
diff --git a/test/files/run/t4482.check b/test/files/run/t4482.check
new file mode 100644
index 0000000..0cfbf08
--- /dev/null
+++ b/test/files/run/t4482.check
@@ -0,0 +1 @@
+2
diff --git a/test/files/run/t4482.scala b/test/files/run/t4482.scala
new file mode 100644
index 0000000..392861c
--- /dev/null
+++ b/test/files/run/t4482.scala
@@ -0,0 +1,15 @@
+trait Foo { def i: Int }
+trait Bar
+
+case class Spam(i: Int) extends Foo with Bar
+
+object Test {
+ def matchParent(p:Any) = p match {
+ case f:Foo if f.i == 1 => 1
+ case _:Bar => 2
+ case _:Foo => 3
+ }
+ def main(args: Array[String]): Unit = {
+ println(matchParent(Spam(3)))
+ }
+}
diff --git a/test/files/run/t4535.check b/test/files/run/t4535.check
new file mode 100644
index 0000000..9d4ce0d
--- /dev/null
+++ b/test/files/run/t4535.check
@@ -0,0 +1,3 @@
+ArrayStack(1, 2, 3)
+ArrayStack(1, 2, 3, 4, 5, 6)
+ArrayStack(6, 5, 4, 3, 2, 1)
\ No newline at end of file
diff --git a/test/files/run/t4535.scala b/test/files/run/t4535.scala
new file mode 100644
index 0000000..eba7943
--- /dev/null
+++ b/test/files/run/t4535.scala
@@ -0,0 +1,30 @@
+
+
+import collection._
+
+
+// #4535
+object Test {
+
+ def main(args: Array[String]) {
+ val as = new mutable.ArrayStack[Int]
+ as push 1
+ as push 2
+ as push 3
+ println(as.reverse)
+
+ as push 4
+ as push 5
+ as push 6
+ println(as.reverse)
+
+ println(as map { x => x })
+
+ for (i <- 0 until 100) {
+ as push i
+ assert(as == as.map(x => x))
+ assert(as == as.reverse.reverse)
+ }
+ }
+
+}
diff --git a/test/files/run/t4536.check b/test/files/run/t4536.check
new file mode 100644
index 0000000..0c5a72a
--- /dev/null
+++ b/test/files/run/t4536.check
@@ -0,0 +1,8 @@
+cls: bar
+obj: foo
+obj: bar
+cls: bar
+obj: bar
+trait: pili
+trait: mili
+trait: foo
\ No newline at end of file
diff --git a/test/files/run/t4536.flags b/test/files/run/t4536.flags
new file mode 100644
index 0000000..1141f97
--- /dev/null
+++ b/test/files/run/t4536.flags
@@ -0,0 +1 @@
+-language:dynamics
diff --git a/test/files/run/t4536.scala b/test/files/run/t4536.scala
new file mode 100644
index 0000000..acd91de
--- /dev/null
+++ b/test/files/run/t4536.scala
@@ -0,0 +1,46 @@
+
+
+
+
+
+
+object dynamicObject extends Dynamic {
+ def applyDynamic(m: String)() = println("obj: " + m);
+ this.foo()
+}
+
+
+class dynamicClass extends Dynamic {
+ def applyDynamic(m: String)() = println("cls: " + m);
+ this.bar()
+ dynamicObject.bar()
+}
+
+
+abstract class dynamicAbstractClass extends Dynamic {
+ def applyDynamic(m: String)(args: Any*): Unit
+ this.pili(1, new dynamicClass, "hello");
+}
+
+
+trait dynamicTrait extends Dynamic {
+ def applyDynamic(m: String)(args: Any*) = println("trait: " + m);
+ def two = 2
+ this.mili(1,2,3)
+ two
+}
+
+
+object dynamicMixin extends dynamicAbstractClass with dynamicTrait {
+ this.foo(None)
+}
+
+
+object Test {
+
+ def main(args: Array[String]) {
+ val cls = new dynamicClass
+ dynamicMixin
+ }
+
+}
diff --git a/test/files/run/t4542.check b/test/files/run/t4542.check
new file mode 100644
index 0000000..5c293a8
--- /dev/null
+++ b/test/files/run/t4542.check
@@ -0,0 +1,19 @@
+Type in expressions to have them evaluated.
+Type :help for more information.
+
+scala>
+
+scala> @deprecated("foooo", "ReplTest version 1.0-FINAL") class Foo() {
+ override def toString = "Bippy"
+}
+defined class Foo
+
+scala> val f = new Foo
+<console>:8: warning: class Foo is deprecated: foooo
+ val f = new Foo
+ ^
+f: Foo = Bippy
+
+scala>
+
+scala>
diff --git a/test/files/run/t4542.scala b/test/files/run/t4542.scala
new file mode 100644
index 0000000..5d6e8fe
--- /dev/null
+++ b/test/files/run/t4542.scala
@@ -0,0 +1,11 @@
+import scala.tools.partest.ReplTest
+
+object Test extends ReplTest {
+ override def extraSettings = "-deprecation"
+ def code = """
+ |@deprecated("foooo", "ReplTest version 1.0-FINAL") class Foo() {
+ | override def toString = "Bippy"
+ |}
+ |val f = new Foo
+ """.stripMargin
+}
diff --git a/test/files/run/bug4558.scala b/test/files/run/t4558.scala
similarity index 100%
rename from test/files/run/bug4558.scala
rename to test/files/run/t4558.scala
diff --git a/test/files/run/t4560.check b/test/files/run/t4560.check
index fd3c81a..f8cb083 100644
--- a/test/files/run/t4560.check
+++ b/test/files/run/t4560.check
@@ -1,2 +1,6 @@
-5
-5
+'Test
+Success 1
+'Test
+Success 2
+'Test
+Success 3
diff --git a/test/files/run/t4560.scala b/test/files/run/t4560.scala
index 1392077..9979199 100644
--- a/test/files/run/t4560.scala
+++ b/test/files/run/t4560.scala
@@ -1,39 +1,66 @@
-object Pimper {
- implicit def pimp(i: Int) = new {
- def test: String = i.toString
- }
-}
+// SI-4560 (and SI-4601): Reflection caches are expected in the wrong classfiles
+// with various differing constellations of self-types. This leads to runtime exceptions
+// when the reflection caches are accessed. This tests both reflection cache accesses
+// for structural type method invocations (`y.f()`) (SI-4560) and accesses to symbols which are
+// handled similarly (SI-4601)
-trait A
+// TEST 1
+// self-type is other trait
-trait B {
- self: A =>
+trait Aa
+trait Ab
- def test {
- import Pimper.pimp
+trait B {
+ self: Aa with Ab =>
- println(5.test)
+ def y = new { def f() = println("Success 1") }
+ def fail() = {
+ println('Test)
+ y.f()
}
}
+object Test1 extends Aa with Ab with B
+
+// TEST 2
+// self-type is class
+
class A2
trait B2 {
self: A2 =>
- def test {
- import Pimper.pimp
+ def y = new { def f() = println("Success 2") }
+ def fail() = {
+ println('Test)
+ y.f()
+ }
+}
+
+object Test2 extends A2 with B2
+
+// TEST 3
+// self-type is singleton type
+
+trait B3 {
+ this: Test3.type =>
- println(5.test)
+ def y = new { def f() = println("Success 3") }
+ def fail() = {
+ println('Test)
+ y.f()
}
}
-object Test extends A with B {
+object Test3 extends B3 {
+ def test { fail() }
+}
+
+object Test {
def main(args: Array[String]) {
- test
- Test2.test
+ Test1.fail()
+ Test2.fail()
+ Test3.fail()
}
}
-object Test2 extends A2 with B2
-
diff --git a/test/files/run/t4560b.check b/test/files/run/t4560b.check
new file mode 100644
index 0000000..7ee6e19
--- /dev/null
+++ b/test/files/run/t4560b.check
@@ -0,0 +1,2 @@
+23
+SUCCESS
diff --git a/test/files/run/t4560b.scala b/test/files/run/t4560b.scala
new file mode 100644
index 0000000..97fe00c
--- /dev/null
+++ b/test/files/run/t4560b.scala
@@ -0,0 +1,28 @@
+object Outer {
+ class Tester
+ private[Outer] trait B4 { _: Tester =>
+ protected val FREQ = 23
+ def fail() = {
+ println(FREQ)
+ }
+ }
+ object C4 extends Tester with B4
+}
+
+object Outer2 {
+ abstract class A5
+ private[Outer2] trait C5 {
+ def impl() { println("SUCCESS") }
+ }
+ trait B5 extends C5 { self: A5 =>
+ def fail() { impl() }
+ }
+ object Test5 extends A5 with B5 with C5
+}
+
+object Test {
+ def main(args: Array[String]): Unit = {
+ Outer.C4.fail()
+ Outer2.Test5.fail()
+ }
+}
diff --git a/test/files/run/t4565_2.check b/test/files/run/t4565_2.check
deleted file mode 100644
index 96f3dbc..0000000
--- a/test/files/run/t4565_2.check
+++ /dev/null
@@ -1,189 +0,0 @@
-ok
-ok
-ok
-ok
-ok
-ok
-ok
-ok
-ok
-ok
-ok
-ok
-ok
-ok
-ok
-ok
-ok
-ok
-ok
-ok
-ok
-ok
-ok
-ok
-ok
-ok
-ok
-ok
-ok
-ok
-ok
-ok
-ok
-ok
-ok
-ok
-ok
-ok
-ok
-ok
-ok
-ok
-ok
-ok
-ok
-ok
-ok
-ok
-ok
-ok
-ok
-ok
-ok
-ok
-ok
-ok
-ok
-ok
-ok
-ok
-ok
-ok
-ok
-ok
-ok
-ok
-ok
-ok
-ok
-ok
-ok
-ok
-ok
-ok
-ok
-ok
-ok
-ok
-ok
-ok
-ok
-ok
-ok
-ok
-ok
-ok
-ok
-ok
-ok
-ok
-ok
-ok
-ok
-ok
-ok
-ok
-ok
-ok
-ok
-ok
-ok
-ok
-ok
-ok
-ok
-ok
-ok
-ok
-ok
-ok
-ok
-ok
-ok
-ok
-ok
-ok
-ok
-ok
-ok
-ok
-ok
-ok
-ok
-ok
-ok
-ok
-ok
-ok
-ok
-ok
-ok
-ok
-ok
-ok
-ok
-ok
-ok
-ok
-ok
-ok
-ok
-ok
-ok
-ok
-ok
-ok
-ok
-ok
-ok
-ok
-ok
-ok
-ok
-ok
-ok
-ok
-ok
-ok
-ok
-ok
-ok
-ok
-ok
-ok
-ok
-ok
-ok
-ok
-ok
-ok
-ok
-ok
-ok
-ok
-ok
-ok
-ok
-ok
-ok
-ok
-ok
-ok
-ok
-ok
-ok
-ok
-ok
-ok
-ok
diff --git a/test/files/run/t4565_2.scala b/test/files/run/t4565_2.scala
deleted file mode 100644
index 96f4f86..0000000
--- a/test/files/run/t4565_2.scala
+++ /dev/null
@@ -1,6717 +0,0 @@
-
-
-class Class3_1 {
-
- class Class2_2 {
-
- class Class1_3 {
- var ObjCounter = 0
-
- object Obj { ObjCounter += 1}
- Obj // one
-
- def singleThreadedAccess(x: Any) = {
- x == Obj
- }
-
- def runTest {
- try {
- assert(singleThreadedAccess(Obj))
- assert(ObjCounter == 1)
- } catch {
- case e => print("failed "); e.printStackTrace()
- }
- println("ok")
- }
-
- def run { runTest }
- }
-
- def run { (new Class1_3).run }
- }
-
- def run { (new Class2_2).run }
-}
-
-
-object Object4_1 {
-
- class Class2_2 {
-
- class Class1_3 {
- var ObjCounter = 0
-
- object Obj { ObjCounter += 1}
- Obj // one
-
- def singleThreadedAccess(x: Any) = {
- x == Obj
- }
-
- def runTest {
- try {
- assert(singleThreadedAccess(Obj))
- assert(ObjCounter == 1)
- } catch {
- case e => print("failed "); e.printStackTrace()
- }
- println("ok")
- }
-
- def run { runTest }
- }
-
- def run { (new Class1_3).run }
- }
-
- def run { (new Class2_2).run } // trigger
-}
-
-
-trait Trait5_1 {
-
- class Class2_2 {
-
- class Class1_3 {
- var ObjCounter = 0
-
- object Obj { ObjCounter += 1}
- Obj // one
-
- def singleThreadedAccess(x: Any) = {
- x == Obj
- }
-
- def runTest {
- try {
- assert(singleThreadedAccess(Obj))
- assert(ObjCounter == 1)
- } catch {
- case e => print("failed "); e.printStackTrace()
- }
- println("ok")
- }
-
- def run { runTest }
- }
-
- def run { (new Class1_3).run }
- }
-
- def run { (new Class2_2).run }
-}
-
-
-class Class7_1 {
-
- object Object6_2 {
-
- class Class1_3 {
- var ObjCounter = 0
-
- object Obj { ObjCounter += 1}
- Obj // one
-
- def singleThreadedAccess(x: Any) = {
- x == Obj
- }
-
- def runTest {
- try {
- assert(singleThreadedAccess(Obj))
- assert(ObjCounter == 1)
- } catch {
- case e => print("failed "); e.printStackTrace()
- }
- println("ok")
- }
-
- def run { runTest }
- }
-
- def run { (new Class1_3).run } // trigger
- }
-
- def run { Object6_2.run }
-}
-
-
-object Object8_1 {
-
- object Object6_2 {
-
- class Class1_3 {
- var ObjCounter = 0
-
- object Obj { ObjCounter += 1}
- Obj // one
-
- def singleThreadedAccess(x: Any) = {
- x == Obj
- }
-
- def runTest {
- try {
- assert(singleThreadedAccess(Obj))
- assert(ObjCounter == 1)
- } catch {
- case e => print("failed "); e.printStackTrace()
- }
- println("ok")
- }
-
- def run { runTest }
- }
-
- def run { (new Class1_3).run } // trigger
- }
-
- def run { Object6_2.run } // trigger
-}
-
-
-trait Trait9_1 {
-
- object Object6_2 {
-
- class Class1_3 {
- var ObjCounter = 0
-
- object Obj { ObjCounter += 1}
- Obj // one
-
- def singleThreadedAccess(x: Any) = {
- x == Obj
- }
-
- def runTest {
- try {
- assert(singleThreadedAccess(Obj))
- assert(ObjCounter == 1)
- } catch {
- case e => print("failed "); e.printStackTrace()
- }
- println("ok")
- }
-
- def run { runTest }
- }
-
- def run { (new Class1_3).run } // trigger
- }
-
- def run { Object6_2.run }
-}
-
-
-class Class11_1 {
-
- trait Trait10_2 {
-
- class Class1_3 {
- var ObjCounter = 0
-
- object Obj { ObjCounter += 1}
- Obj // one
-
- def singleThreadedAccess(x: Any) = {
- x == Obj
- }
-
- def runTest {
- try {
- assert(singleThreadedAccess(Obj))
- assert(ObjCounter == 1)
- } catch {
- case e => print("failed "); e.printStackTrace()
- }
- println("ok")
- }
-
- def run { runTest }
- }
-
- def run { (new Class1_3).run }
- }
-
- def run { (new Trait10_2 {}).run }
-}
-
-
-object Object12_1 {
-
- trait Trait10_2 {
-
- class Class1_3 {
- var ObjCounter = 0
-
- object Obj { ObjCounter += 1}
- Obj // one
-
- def singleThreadedAccess(x: Any) = {
- x == Obj
- }
-
- def runTest {
- try {
- assert(singleThreadedAccess(Obj))
- assert(ObjCounter == 1)
- } catch {
- case e => print("failed "); e.printStackTrace()
- }
- println("ok")
- }
-
- def run { runTest }
- }
-
- def run { (new Class1_3).run }
- }
-
- def run { (new Trait10_2 {}).run } // trigger
-}
-
-
-trait Trait13_1 {
-
- trait Trait10_2 {
-
- class Class1_3 {
- var ObjCounter = 0
-
- object Obj { ObjCounter += 1}
- Obj // one
-
- def singleThreadedAccess(x: Any) = {
- x == Obj
- }
-
- def runTest {
- try {
- assert(singleThreadedAccess(Obj))
- assert(ObjCounter == 1)
- } catch {
- case e => print("failed "); e.printStackTrace()
- }
- println("ok")
- }
-
- def run { runTest }
- }
-
- def run { (new Class1_3).run }
- }
-
- def run { (new Trait10_2 {}).run }
-}
-
-
-class Class15_1 {
-
- def method14_2 {
-
- class Class1_3 {
- var ObjCounter = 0
-
- object Obj { ObjCounter += 1}
- Obj // one
-
- def singleThreadedAccess(x: Any) = {
- x == Obj
- }
-
- def runTest {
- try {
- assert(singleThreadedAccess(Obj))
- assert(ObjCounter == 1)
- } catch {
- case e => print("failed "); e.printStackTrace()
- }
- println("ok")
- }
-
- def run { runTest }
- }
-
- (new Class1_3).run // trigger
- }
-
- def run { method14_2 }
-}
-
-
-object Object16_1 {
-
- def method14_2 {
-
- class Class1_3 {
- var ObjCounter = 0
-
- object Obj { ObjCounter += 1}
- Obj // one
-
- def singleThreadedAccess(x: Any) = {
- x == Obj
- }
-
- def runTest {
- try {
- assert(singleThreadedAccess(Obj))
- assert(ObjCounter == 1)
- } catch {
- case e => print("failed "); e.printStackTrace()
- }
- println("ok")
- }
-
- def run { runTest }
- }
-
- (new Class1_3).run // trigger
- }
-
- def run { method14_2 } // trigger
-}
-
-
-trait Trait17_1 {
-
- def method14_2 {
-
- class Class1_3 {
- var ObjCounter = 0
-
- object Obj { ObjCounter += 1}
- Obj // one
-
- def singleThreadedAccess(x: Any) = {
- x == Obj
- }
-
- def runTest {
- try {
- assert(singleThreadedAccess(Obj))
- assert(ObjCounter == 1)
- } catch {
- case e => print("failed "); e.printStackTrace()
- }
- println("ok")
- }
-
- def run { runTest }
- }
-
- (new Class1_3).run // trigger
- }
-
- def run { method14_2 }
-}
-
-
-class Class19_1 {
-
- val fun18_2 = () => {
-
- class Class1_3 {
- var ObjCounter = 0
-
- object Obj { ObjCounter += 1}
- Obj // one
-
- def singleThreadedAccess(x: Any) = {
- x == Obj
- }
-
- def runTest {
- try {
- assert(singleThreadedAccess(Obj))
- assert(ObjCounter == 1)
- } catch {
- case e => print("failed "); e.printStackTrace()
- }
- println("ok")
- }
-
- def run { runTest }
- }
-
- (new Class1_3).run // trigger
- }
-
- def run { fun18_2() }
-}
-
-
-object Object20_1 {
-
- val fun18_2 = () => {
-
- class Class1_3 {
- var ObjCounter = 0
-
- object Obj { ObjCounter += 1}
- Obj // one
-
- def singleThreadedAccess(x: Any) = {
- x == Obj
- }
-
- def runTest {
- try {
- assert(singleThreadedAccess(Obj))
- assert(ObjCounter == 1)
- } catch {
- case e => print("failed "); e.printStackTrace()
- }
- println("ok")
- }
-
- def run { runTest }
- }
-
- (new Class1_3).run // trigger
- }
-
- def run { fun18_2() } // trigger
-}
-
-
-trait Trait21_1 {
-
- val fun18_2 = () => {
-
- class Class1_3 {
- var ObjCounter = 0
-
- object Obj { ObjCounter += 1}
- Obj // one
-
- def singleThreadedAccess(x: Any) = {
- x == Obj
- }
-
- def runTest {
- try {
- assert(singleThreadedAccess(Obj))
- assert(ObjCounter == 1)
- } catch {
- case e => print("failed "); e.printStackTrace()
- }
- println("ok")
- }
-
- def run { runTest }
- }
-
- (new Class1_3).run // trigger
- }
-
- def run { fun18_2() }
-}
-
-
-class Class23_1 {
-
- class Class22_2 {
- { // in primary constructor
-
- class Class1_3 {
- var ObjCounter = 0
-
- object Obj { ObjCounter += 1}
- Obj // one
-
- def singleThreadedAccess(x: Any) = {
- x == Obj
- }
-
- def runTest {
- try {
- assert(singleThreadedAccess(Obj))
- assert(ObjCounter == 1)
- } catch {
- case e => print("failed "); e.printStackTrace()
- }
- println("ok")
- }
-
- def run { runTest }
- }
-
- (new Class1_3).run // trigger
- }
- }
-
- def run { (new Class22_2) }
-}
-
-
-object Object24_1 {
-
- class Class22_2 {
- { // in primary constructor
-
- class Class1_3 {
- var ObjCounter = 0
-
- object Obj { ObjCounter += 1}
- Obj // one
-
- def singleThreadedAccess(x: Any) = {
- x == Obj
- }
-
- def runTest {
- try {
- assert(singleThreadedAccess(Obj))
- assert(ObjCounter == 1)
- } catch {
- case e => print("failed "); e.printStackTrace()
- }
- println("ok")
- }
-
- def run { runTest }
- }
-
- (new Class1_3).run // trigger
- }
- }
-
- def run { (new Class22_2) } // trigger
-}
-
-
-trait Trait25_1 {
-
- class Class22_2 {
- { // in primary constructor
-
- class Class1_3 {
- var ObjCounter = 0
-
- object Obj { ObjCounter += 1}
- Obj // one
-
- def singleThreadedAccess(x: Any) = {
- x == Obj
- }
-
- def runTest {
- try {
- assert(singleThreadedAccess(Obj))
- assert(ObjCounter == 1)
- } catch {
- case e => print("failed "); e.printStackTrace()
- }
- println("ok")
- }
-
- def run { runTest }
- }
-
- (new Class1_3).run // trigger
- }
- }
-
- def run { (new Class22_2) }
-}
-
-
-class Class27_1 {
-
- lazy val lzvalue26_2 = {
-
- class Class1_3 {
- var ObjCounter = 0
-
- object Obj { ObjCounter += 1}
- Obj // one
-
- def singleThreadedAccess(x: Any) = {
- x == Obj
- }
-
- def runTest {
- try {
- assert(singleThreadedAccess(Obj))
- assert(ObjCounter == 1)
- } catch {
- case e => print("failed "); e.printStackTrace()
- }
- println("ok")
- }
-
- def run { runTest }
- }
-
- (new Class1_3).run // trigger
- }
-
- def run { lzvalue26_2 }
-}
-
-
-object Object28_1 {
-
- lazy val lzvalue26_2 = {
-
- class Class1_3 {
- var ObjCounter = 0
-
- object Obj { ObjCounter += 1}
- Obj // one
-
- def singleThreadedAccess(x: Any) = {
- x == Obj
- }
-
- def runTest {
- try {
- assert(singleThreadedAccess(Obj))
- assert(ObjCounter == 1)
- } catch {
- case e => print("failed "); e.printStackTrace()
- }
- println("ok")
- }
-
- def run { runTest }
- }
-
- (new Class1_3).run // trigger
- }
-
- def run { lzvalue26_2 } // trigger
-}
-
-
-trait Trait29_1 {
-
- lazy val lzvalue26_2 = {
-
- class Class1_3 {
- var ObjCounter = 0
-
- object Obj { ObjCounter += 1}
- Obj // one
-
- def singleThreadedAccess(x: Any) = {
- x == Obj
- }
-
- def runTest {
- try {
- assert(singleThreadedAccess(Obj))
- assert(ObjCounter == 1)
- } catch {
- case e => print("failed "); e.printStackTrace()
- }
- println("ok")
- }
-
- def run { runTest }
- }
-
- (new Class1_3).run // trigger
- }
-
- def run { lzvalue26_2 }
-}
-
-
-class Class31_1 {
-
- val value30_2 = {
-
- class Class1_3 {
- var ObjCounter = 0
-
- object Obj { ObjCounter += 1}
- Obj // one
-
- def singleThreadedAccess(x: Any) = {
- x == Obj
- }
-
- def runTest {
- try {
- assert(singleThreadedAccess(Obj))
- assert(ObjCounter == 1)
- } catch {
- case e => print("failed "); e.printStackTrace()
- }
- println("ok")
- }
-
- def run { runTest }
- }
-
- (new Class1_3).run // trigger
- }
-
- def run { value30_2 }
-}
-
-
-object Object32_1 {
-
- val value30_2 = {
-
- class Class1_3 {
- var ObjCounter = 0
-
- object Obj { ObjCounter += 1}
- Obj // one
-
- def singleThreadedAccess(x: Any) = {
- x == Obj
- }
-
- def runTest {
- try {
- assert(singleThreadedAccess(Obj))
- assert(ObjCounter == 1)
- } catch {
- case e => print("failed "); e.printStackTrace()
- }
- println("ok")
- }
-
- def run { runTest }
- }
-
- (new Class1_3).run // trigger
- }
-
- def run { value30_2 } // trigger
-}
-
-
-trait Trait33_1 {
-
- val value30_2 = {
-
- class Class1_3 {
- var ObjCounter = 0
-
- object Obj { ObjCounter += 1}
- Obj // one
-
- def singleThreadedAccess(x: Any) = {
- x == Obj
- }
-
- def runTest {
- try {
- assert(singleThreadedAccess(Obj))
- assert(ObjCounter == 1)
- } catch {
- case e => print("failed "); e.printStackTrace()
- }
- println("ok")
- }
-
- def run { runTest }
- }
-
- (new Class1_3).run // trigger
- }
-
- def run { value30_2 }
-}
-
-
-class Class36_1 {
-
- class Class35_2 {
-
- object Object34_3 {
- var ObjCounter = 0
-
- object Obj { ObjCounter += 1}
- Obj // one
-
- def singleThreadedAccess(x: Any) = {
- x == Obj
- }
-
- def runTest {
- try {
- assert(singleThreadedAccess(Obj))
- assert(ObjCounter == 1)
- } catch {
- case e => print("failed "); e.printStackTrace()
- }
- println("ok")
- }
-
- def run { runTest } // trigger
- }
-
- def run { Object34_3.run }
- }
-
- def run { (new Class35_2).run }
-}
-
-
-object Object37_1 {
-
- class Class35_2 {
-
- object Object34_3 {
- var ObjCounter = 0
-
- object Obj { ObjCounter += 1}
- Obj // one
-
- def singleThreadedAccess(x: Any) = {
- x == Obj
- }
-
- def runTest {
- try {
- assert(singleThreadedAccess(Obj))
- assert(ObjCounter == 1)
- } catch {
- case e => print("failed "); e.printStackTrace()
- }
- println("ok")
- }
-
- def run { runTest } // trigger
- }
-
- def run { Object34_3.run }
- }
-
- def run { (new Class35_2).run } // trigger
-}
-
-
-trait Trait38_1 {
-
- class Class35_2 {
-
- object Object34_3 {
- var ObjCounter = 0
-
- object Obj { ObjCounter += 1}
- Obj // one
-
- def singleThreadedAccess(x: Any) = {
- x == Obj
- }
-
- def runTest {
- try {
- assert(singleThreadedAccess(Obj))
- assert(ObjCounter == 1)
- } catch {
- case e => print("failed "); e.printStackTrace()
- }
- println("ok")
- }
-
- def run { runTest } // trigger
- }
-
- def run { Object34_3.run }
- }
-
- def run { (new Class35_2).run }
-}
-
-
-class Class40_1 {
-
- object Object39_2 {
-
- object Object34_3 {
- var ObjCounter = 0
-
- object Obj { ObjCounter += 1}
- Obj // one
-
- def singleThreadedAccess(x: Any) = {
- x == Obj
- }
-
- def runTest {
- try {
- assert(singleThreadedAccess(Obj))
- assert(ObjCounter == 1)
- } catch {
- case e => print("failed "); e.printStackTrace()
- }
- println("ok")
- }
-
- def run { runTest } // trigger
- }
-
- def run { Object34_3.run } // trigger
- }
-
- def run { Object39_2.run }
-}
-
-
-object Object41_1 {
-
- object Object39_2 {
-
- object Object34_3 {
- var ObjCounter = 0
-
- object Obj { ObjCounter += 1}
- Obj // one
-
- def singleThreadedAccess(x: Any) = {
- x == Obj
- }
-
- def runTest {
- try {
- assert(singleThreadedAccess(Obj))
- assert(ObjCounter == 1)
- } catch {
- case e => print("failed "); e.printStackTrace()
- }
- println("ok")
- }
-
- def run { runTest } // trigger
- }
-
- def run { Object34_3.run } // trigger
- }
-
- def run { Object39_2.run } // trigger
-}
-
-
-trait Trait42_1 {
-
- object Object39_2 {
-
- object Object34_3 {
- var ObjCounter = 0
-
- object Obj { ObjCounter += 1}
- Obj // one
-
- def singleThreadedAccess(x: Any) = {
- x == Obj
- }
-
- def runTest {
- try {
- assert(singleThreadedAccess(Obj))
- assert(ObjCounter == 1)
- } catch {
- case e => print("failed "); e.printStackTrace()
- }
- println("ok")
- }
-
- def run { runTest } // trigger
- }
-
- def run { Object34_3.run } // trigger
- }
-
- def run { Object39_2.run }
-}
-
-
-class Class44_1 {
-
- trait Trait43_2 {
-
- object Object34_3 {
- var ObjCounter = 0
-
- object Obj { ObjCounter += 1}
- Obj // one
-
- def singleThreadedAccess(x: Any) = {
- x == Obj
- }
-
- def runTest {
- try {
- assert(singleThreadedAccess(Obj))
- assert(ObjCounter == 1)
- } catch {
- case e => print("failed "); e.printStackTrace()
- }
- println("ok")
- }
-
- def run { runTest } // trigger
- }
-
- def run { Object34_3.run }
- }
-
- def run { (new Trait43_2 {}).run }
-}
-
-
-object Object45_1 {
-
- trait Trait43_2 {
-
- object Object34_3 {
- var ObjCounter = 0
-
- object Obj { ObjCounter += 1}
- Obj // one
-
- def singleThreadedAccess(x: Any) = {
- x == Obj
- }
-
- def runTest {
- try {
- assert(singleThreadedAccess(Obj))
- assert(ObjCounter == 1)
- } catch {
- case e => print("failed "); e.printStackTrace()
- }
- println("ok")
- }
-
- def run { runTest } // trigger
- }
-
- def run { Object34_3.run }
- }
-
- def run { (new Trait43_2 {}).run } // trigger
-}
-
-
-trait Trait46_1 {
-
- trait Trait43_2 {
-
- object Object34_3 {
- var ObjCounter = 0
-
- object Obj { ObjCounter += 1}
- Obj // one
-
- def singleThreadedAccess(x: Any) = {
- x == Obj
- }
-
- def runTest {
- try {
- assert(singleThreadedAccess(Obj))
- assert(ObjCounter == 1)
- } catch {
- case e => print("failed "); e.printStackTrace()
- }
- println("ok")
- }
-
- def run { runTest } // trigger
- }
-
- def run { Object34_3.run }
- }
-
- def run { (new Trait43_2 {}).run }
-}
-
-
-class Class48_1 {
-
- def method47_2 {
-
- object Object34_3 {
- var ObjCounter = 0
-
- object Obj { ObjCounter += 1}
- Obj // one
-
- def singleThreadedAccess(x: Any) = {
- x == Obj
- }
-
- def runTest {
- try {
- assert(singleThreadedAccess(Obj))
- assert(ObjCounter == 1)
- } catch {
- case e => print("failed "); e.printStackTrace()
- }
- println("ok")
- }
-
- def run { runTest } // trigger
- }
-
- Object34_3.run // trigger
- }
-
- def run { method47_2 }
-}
-
-
-object Object49_1 {
-
- def method47_2 {
-
- object Object34_3 {
- var ObjCounter = 0
-
- object Obj { ObjCounter += 1}
- Obj // one
-
- def singleThreadedAccess(x: Any) = {
- x == Obj
- }
-
- def runTest {
- try {
- assert(singleThreadedAccess(Obj))
- assert(ObjCounter == 1)
- } catch {
- case e => print("failed "); e.printStackTrace()
- }
- println("ok")
- }
-
- def run { runTest } // trigger
- }
-
- Object34_3.run // trigger
- }
-
- def run { method47_2 } // trigger
-}
-
-
-trait Trait50_1 {
-
- def method47_2 {
-
- object Object34_3 {
- var ObjCounter = 0
-
- object Obj { ObjCounter += 1}
- Obj // one
-
- def singleThreadedAccess(x: Any) = {
- x == Obj
- }
-
- def runTest {
- try {
- assert(singleThreadedAccess(Obj))
- assert(ObjCounter == 1)
- } catch {
- case e => print("failed "); e.printStackTrace()
- }
- println("ok")
- }
-
- def run { runTest } // trigger
- }
-
- Object34_3.run // trigger
- }
-
- def run { method47_2 }
-}
-
-
-class Class52_1 {
-
- val fun51_2 = () => {
-
- object Object34_3 {
- var ObjCounter = 0
-
- object Obj { ObjCounter += 1}
- Obj // one
-
- def singleThreadedAccess(x: Any) = {
- x == Obj
- }
-
- def runTest {
- try {
- assert(singleThreadedAccess(Obj))
- assert(ObjCounter == 1)
- } catch {
- case e => print("failed "); e.printStackTrace()
- }
- println("ok")
- }
-
- def run { runTest } // trigger
- }
-
- Object34_3.run // trigger
- }
-
- def run { fun51_2() }
-}
-
-
-object Object53_1 {
-
- val fun51_2 = () => {
-
- object Object34_3 {
- var ObjCounter = 0
-
- object Obj { ObjCounter += 1}
- Obj // one
-
- def singleThreadedAccess(x: Any) = {
- x == Obj
- }
-
- def runTest {
- try {
- assert(singleThreadedAccess(Obj))
- assert(ObjCounter == 1)
- } catch {
- case e => print("failed "); e.printStackTrace()
- }
- println("ok")
- }
-
- def run { runTest } // trigger
- }
-
- Object34_3.run // trigger
- }
-
- def run { fun51_2() } // trigger
-}
-
-
-trait Trait54_1 {
-
- val fun51_2 = () => {
-
- object Object34_3 {
- var ObjCounter = 0
-
- object Obj { ObjCounter += 1}
- Obj // one
-
- def singleThreadedAccess(x: Any) = {
- x == Obj
- }
-
- def runTest {
- try {
- assert(singleThreadedAccess(Obj))
- assert(ObjCounter == 1)
- } catch {
- case e => print("failed "); e.printStackTrace()
- }
- println("ok")
- }
-
- def run { runTest } // trigger
- }
-
- Object34_3.run // trigger
- }
-
- def run { fun51_2() }
-}
-
-
-class Class56_1 {
-
- class Class55_2 {
- { // in primary constructor
-
- object Object34_3 {
- var ObjCounter = 0
-
- object Obj { ObjCounter += 1}
- Obj // one
-
- def singleThreadedAccess(x: Any) = {
- x == Obj
- }
-
- def runTest {
- try {
- assert(singleThreadedAccess(Obj))
- assert(ObjCounter == 1)
- } catch {
- case e => print("failed "); e.printStackTrace()
- }
- println("ok")
- }
-
- def run { runTest } // trigger
- }
-
- Object34_3.run // trigger
- }
- }
-
- def run { (new Class55_2) }
-}
-
-
-object Object57_1 {
-
- class Class55_2 {
- { // in primary constructor
-
- object Object34_3 {
- var ObjCounter = 0
-
- object Obj { ObjCounter += 1}
- Obj // one
-
- def singleThreadedAccess(x: Any) = {
- x == Obj
- }
-
- def runTest {
- try {
- assert(singleThreadedAccess(Obj))
- assert(ObjCounter == 1)
- } catch {
- case e => print("failed "); e.printStackTrace()
- }
- println("ok")
- }
-
- def run { runTest } // trigger
- }
-
- Object34_3.run // trigger
- }
- }
-
- def run { (new Class55_2) } // trigger
-}
-
-
-trait Trait58_1 {
-
- class Class55_2 {
- { // in primary constructor
-
- object Object34_3 {
- var ObjCounter = 0
-
- object Obj { ObjCounter += 1}
- Obj // one
-
- def singleThreadedAccess(x: Any) = {
- x == Obj
- }
-
- def runTest {
- try {
- assert(singleThreadedAccess(Obj))
- assert(ObjCounter == 1)
- } catch {
- case e => print("failed "); e.printStackTrace()
- }
- println("ok")
- }
-
- def run { runTest } // trigger
- }
-
- Object34_3.run // trigger
- }
- }
-
- def run { (new Class55_2) }
-}
-
-
-class Class60_1 {
-
- lazy val lzvalue59_2 = {
-
- object Object34_3 {
- var ObjCounter = 0
-
- object Obj { ObjCounter += 1}
- Obj // one
-
- def singleThreadedAccess(x: Any) = {
- x == Obj
- }
-
- def runTest {
- try {
- assert(singleThreadedAccess(Obj))
- assert(ObjCounter == 1)
- } catch {
- case e => print("failed "); e.printStackTrace()
- }
- println("ok")
- }
-
- def run { runTest } // trigger
- }
-
- Object34_3.run // trigger
- }
-
- def run { lzvalue59_2 }
-}
-
-
-object Object61_1 {
-
- lazy val lzvalue59_2 = {
-
- object Object34_3 {
- var ObjCounter = 0
-
- object Obj { ObjCounter += 1}
- Obj // one
-
- def singleThreadedAccess(x: Any) = {
- x == Obj
- }
-
- def runTest {
- try {
- assert(singleThreadedAccess(Obj))
- assert(ObjCounter == 1)
- } catch {
- case e => print("failed "); e.printStackTrace()
- }
- println("ok")
- }
-
- def run { runTest } // trigger
- }
-
- Object34_3.run // trigger
- }
-
- def run { lzvalue59_2 } // trigger
-}
-
-
-trait Trait62_1 {
-
- lazy val lzvalue59_2 = {
-
- object Object34_3 {
- var ObjCounter = 0
-
- object Obj { ObjCounter += 1}
- Obj // one
-
- def singleThreadedAccess(x: Any) = {
- x == Obj
- }
-
- def runTest {
- try {
- assert(singleThreadedAccess(Obj))
- assert(ObjCounter == 1)
- } catch {
- case e => print("failed "); e.printStackTrace()
- }
- println("ok")
- }
-
- def run { runTest } // trigger
- }
-
- Object34_3.run // trigger
- }
-
- def run { lzvalue59_2 }
-}
-
-
-class Class64_1 {
-
- val value63_2 = {
-
- object Object34_3 {
- var ObjCounter = 0
-
- object Obj { ObjCounter += 1}
- Obj // one
-
- def singleThreadedAccess(x: Any) = {
- x == Obj
- }
-
- def runTest {
- try {
- assert(singleThreadedAccess(Obj))
- assert(ObjCounter == 1)
- } catch {
- case e => print("failed "); e.printStackTrace()
- }
- println("ok")
- }
-
- def run { runTest } // trigger
- }
-
- Object34_3.run // trigger
- }
-
- def run { value63_2 }
-}
-
-
-object Object65_1 {
-
- val value63_2 = {
-
- object Object34_3 {
- var ObjCounter = 0
-
- object Obj { ObjCounter += 1}
- Obj // one
-
- def singleThreadedAccess(x: Any) = {
- x == Obj
- }
-
- def runTest {
- try {
- assert(singleThreadedAccess(Obj))
- assert(ObjCounter == 1)
- } catch {
- case e => print("failed "); e.printStackTrace()
- }
- println("ok")
- }
-
- def run { runTest } // trigger
- }
-
- Object34_3.run // trigger
- }
-
- def run { value63_2 } // trigger
-}
-
-
-trait Trait66_1 {
-
- val value63_2 = {
-
- object Object34_3 {
- var ObjCounter = 0
-
- object Obj { ObjCounter += 1}
- Obj // one
-
- def singleThreadedAccess(x: Any) = {
- x == Obj
- }
-
- def runTest {
- try {
- assert(singleThreadedAccess(Obj))
- assert(ObjCounter == 1)
- } catch {
- case e => print("failed "); e.printStackTrace()
- }
- println("ok")
- }
-
- def run { runTest } // trigger
- }
-
- Object34_3.run // trigger
- }
-
- def run { value63_2 }
-}
-
-
-class Class69_1 {
-
- class Class68_2 {
-
- trait Trait67_3 {
- var ObjCounter = 0
-
- object Obj { ObjCounter += 1}
- Obj // one
-
- def singleThreadedAccess(x: Any) = {
- x == Obj
- }
-
- def runTest {
- try {
- assert(singleThreadedAccess(Obj))
- assert(ObjCounter == 1)
- } catch {
- case e => print("failed "); e.printStackTrace()
- }
- println("ok")
- }
-
- def run { runTest }
- }
-
- def run { (new Trait67_3 {}).run }
- }
-
- def run { (new Class68_2).run }
-}
-
-
-object Object70_1 {
-
- class Class68_2 {
-
- trait Trait67_3 {
- var ObjCounter = 0
-
- object Obj { ObjCounter += 1}
- Obj // one
-
- def singleThreadedAccess(x: Any) = {
- x == Obj
- }
-
- def runTest {
- try {
- assert(singleThreadedAccess(Obj))
- assert(ObjCounter == 1)
- } catch {
- case e => print("failed "); e.printStackTrace()
- }
- println("ok")
- }
-
- def run { runTest }
- }
-
- def run { (new Trait67_3 {}).run }
- }
-
- def run { (new Class68_2).run } // trigger
-}
-
-
-trait Trait71_1 {
-
- class Class68_2 {
-
- trait Trait67_3 {
- var ObjCounter = 0
-
- object Obj { ObjCounter += 1}
- Obj // one
-
- def singleThreadedAccess(x: Any) = {
- x == Obj
- }
-
- def runTest {
- try {
- assert(singleThreadedAccess(Obj))
- assert(ObjCounter == 1)
- } catch {
- case e => print("failed "); e.printStackTrace()
- }
- println("ok")
- }
-
- def run { runTest }
- }
-
- def run { (new Trait67_3 {}).run }
- }
-
- def run { (new Class68_2).run }
-}
-
-
-class Class73_1 {
-
- object Object72_2 {
-
- trait Trait67_3 {
- var ObjCounter = 0
-
- object Obj { ObjCounter += 1}
- Obj // one
-
- def singleThreadedAccess(x: Any) = {
- x == Obj
- }
-
- def runTest {
- try {
- assert(singleThreadedAccess(Obj))
- assert(ObjCounter == 1)
- } catch {
- case e => print("failed "); e.printStackTrace()
- }
- println("ok")
- }
-
- def run { runTest }
- }
-
- def run { (new Trait67_3 {}).run } // trigger
- }
-
- def run { Object72_2.run }
-}
-
-
-object Object74_1 {
-
- object Object72_2 {
-
- trait Trait67_3 {
- var ObjCounter = 0
-
- object Obj { ObjCounter += 1}
- Obj // one
-
- def singleThreadedAccess(x: Any) = {
- x == Obj
- }
-
- def runTest {
- try {
- assert(singleThreadedAccess(Obj))
- assert(ObjCounter == 1)
- } catch {
- case e => print("failed "); e.printStackTrace()
- }
- println("ok")
- }
-
- def run { runTest }
- }
-
- def run { (new Trait67_3 {}).run } // trigger
- }
-
- def run { Object72_2.run } // trigger
-}
-
-
-trait Trait75_1 {
-
- object Object72_2 {
-
- trait Trait67_3 {
- var ObjCounter = 0
-
- object Obj { ObjCounter += 1}
- Obj // one
-
- def singleThreadedAccess(x: Any) = {
- x == Obj
- }
-
- def runTest {
- try {
- assert(singleThreadedAccess(Obj))
- assert(ObjCounter == 1)
- } catch {
- case e => print("failed "); e.printStackTrace()
- }
- println("ok")
- }
-
- def run { runTest }
- }
-
- def run { (new Trait67_3 {}).run } // trigger
- }
-
- def run { Object72_2.run }
-}
-
-
-class Class77_1 {
-
- trait Trait76_2 {
-
- trait Trait67_3 {
- var ObjCounter = 0
-
- object Obj { ObjCounter += 1}
- Obj // one
-
- def singleThreadedAccess(x: Any) = {
- x == Obj
- }
-
- def runTest {
- try {
- assert(singleThreadedAccess(Obj))
- assert(ObjCounter == 1)
- } catch {
- case e => print("failed "); e.printStackTrace()
- }
- println("ok")
- }
-
- def run { runTest }
- }
-
- def run { (new Trait67_3 {}).run }
- }
-
- def run { (new Trait76_2 {}).run }
-}
-
-
-object Object78_1 {
-
- trait Trait76_2 {
-
- trait Trait67_3 {
- var ObjCounter = 0
-
- object Obj { ObjCounter += 1}
- Obj // one
-
- def singleThreadedAccess(x: Any) = {
- x == Obj
- }
-
- def runTest {
- try {
- assert(singleThreadedAccess(Obj))
- assert(ObjCounter == 1)
- } catch {
- case e => print("failed "); e.printStackTrace()
- }
- println("ok")
- }
-
- def run { runTest }
- }
-
- def run { (new Trait67_3 {}).run }
- }
-
- def run { (new Trait76_2 {}).run } // trigger
-}
-
-
-trait Trait79_1 {
-
- trait Trait76_2 {
-
- trait Trait67_3 {
- var ObjCounter = 0
-
- object Obj { ObjCounter += 1}
- Obj // one
-
- def singleThreadedAccess(x: Any) = {
- x == Obj
- }
-
- def runTest {
- try {
- assert(singleThreadedAccess(Obj))
- assert(ObjCounter == 1)
- } catch {
- case e => print("failed "); e.printStackTrace()
- }
- println("ok")
- }
-
- def run { runTest }
- }
-
- def run { (new Trait67_3 {}).run }
- }
-
- def run { (new Trait76_2 {}).run }
-}
-
-
-class Class81_1 {
-
- def method80_2 {
-
- trait Trait67_3 {
- var ObjCounter = 0
-
- object Obj { ObjCounter += 1}
- Obj // one
-
- def singleThreadedAccess(x: Any) = {
- x == Obj
- }
-
- def runTest {
- try {
- assert(singleThreadedAccess(Obj))
- assert(ObjCounter == 1)
- } catch {
- case e => print("failed "); e.printStackTrace()
- }
- println("ok")
- }
-
- def run { runTest }
- }
-
- (new Trait67_3 {}).run // trigger
- }
-
- def run { method80_2 }
-}
-
-
-object Object82_1 {
-
- def method80_2 {
-
- trait Trait67_3 {
- var ObjCounter = 0
-
- object Obj { ObjCounter += 1}
- Obj // one
-
- def singleThreadedAccess(x: Any) = {
- x == Obj
- }
-
- def runTest {
- try {
- assert(singleThreadedAccess(Obj))
- assert(ObjCounter == 1)
- } catch {
- case e => print("failed "); e.printStackTrace()
- }
- println("ok")
- }
-
- def run { runTest }
- }
-
- (new Trait67_3 {}).run // trigger
- }
-
- def run { method80_2 } // trigger
-}
-
-
-trait Trait83_1 {
-
- def method80_2 {
-
- trait Trait67_3 {
- var ObjCounter = 0
-
- object Obj { ObjCounter += 1}
- Obj // one
-
- def singleThreadedAccess(x: Any) = {
- x == Obj
- }
-
- def runTest {
- try {
- assert(singleThreadedAccess(Obj))
- assert(ObjCounter == 1)
- } catch {
- case e => print("failed "); e.printStackTrace()
- }
- println("ok")
- }
-
- def run { runTest }
- }
-
- (new Trait67_3 {}).run // trigger
- }
-
- def run { method80_2 }
-}
-
-
-class Class85_1 {
-
- val fun84_2 = () => {
-
- trait Trait67_3 {
- var ObjCounter = 0
-
- object Obj { ObjCounter += 1}
- Obj // one
-
- def singleThreadedAccess(x: Any) = {
- x == Obj
- }
-
- def runTest {
- try {
- assert(singleThreadedAccess(Obj))
- assert(ObjCounter == 1)
- } catch {
- case e => print("failed "); e.printStackTrace()
- }
- println("ok")
- }
-
- def run { runTest }
- }
-
- (new Trait67_3 {}).run // trigger
- }
-
- def run { fun84_2() }
-}
-
-
-object Object86_1 {
-
- val fun84_2 = () => {
-
- trait Trait67_3 {
- var ObjCounter = 0
-
- object Obj { ObjCounter += 1}
- Obj // one
-
- def singleThreadedAccess(x: Any) = {
- x == Obj
- }
-
- def runTest {
- try {
- assert(singleThreadedAccess(Obj))
- assert(ObjCounter == 1)
- } catch {
- case e => print("failed "); e.printStackTrace()
- }
- println("ok")
- }
-
- def run { runTest }
- }
-
- (new Trait67_3 {}).run // trigger
- }
-
- def run { fun84_2() } // trigger
-}
-
-
-trait Trait87_1 {
-
- val fun84_2 = () => {
-
- trait Trait67_3 {
- var ObjCounter = 0
-
- object Obj { ObjCounter += 1}
- Obj // one
-
- def singleThreadedAccess(x: Any) = {
- x == Obj
- }
-
- def runTest {
- try {
- assert(singleThreadedAccess(Obj))
- assert(ObjCounter == 1)
- } catch {
- case e => print("failed "); e.printStackTrace()
- }
- println("ok")
- }
-
- def run { runTest }
- }
-
- (new Trait67_3 {}).run // trigger
- }
-
- def run { fun84_2() }
-}
-
-
-class Class89_1 {
-
- class Class88_2 {
- { // in primary constructor
-
- trait Trait67_3 {
- var ObjCounter = 0
-
- object Obj { ObjCounter += 1}
- Obj // one
-
- def singleThreadedAccess(x: Any) = {
- x == Obj
- }
-
- def runTest {
- try {
- assert(singleThreadedAccess(Obj))
- assert(ObjCounter == 1)
- } catch {
- case e => print("failed "); e.printStackTrace()
- }
- println("ok")
- }
-
- def run { runTest }
- }
-
- (new Trait67_3 {}).run // trigger
- }
- }
-
- def run { (new Class88_2) }
-}
-
-
-object Object90_1 {
-
- class Class88_2 {
- { // in primary constructor
-
- trait Trait67_3 {
- var ObjCounter = 0
-
- object Obj { ObjCounter += 1}
- Obj // one
-
- def singleThreadedAccess(x: Any) = {
- x == Obj
- }
-
- def runTest {
- try {
- assert(singleThreadedAccess(Obj))
- assert(ObjCounter == 1)
- } catch {
- case e => print("failed "); e.printStackTrace()
- }
- println("ok")
- }
-
- def run { runTest }
- }
-
- (new Trait67_3 {}).run // trigger
- }
- }
-
- def run { (new Class88_2) } // trigger
-}
-
-
-trait Trait91_1 {
-
- class Class88_2 {
- { // in primary constructor
-
- trait Trait67_3 {
- var ObjCounter = 0
-
- object Obj { ObjCounter += 1}
- Obj // one
-
- def singleThreadedAccess(x: Any) = {
- x == Obj
- }
-
- def runTest {
- try {
- assert(singleThreadedAccess(Obj))
- assert(ObjCounter == 1)
- } catch {
- case e => print("failed "); e.printStackTrace()
- }
- println("ok")
- }
-
- def run { runTest }
- }
-
- (new Trait67_3 {}).run // trigger
- }
- }
-
- def run { (new Class88_2) }
-}
-
-
-class Class93_1 {
-
- lazy val lzvalue92_2 = {
-
- trait Trait67_3 {
- var ObjCounter = 0
-
- object Obj { ObjCounter += 1}
- Obj // one
-
- def singleThreadedAccess(x: Any) = {
- x == Obj
- }
-
- def runTest {
- try {
- assert(singleThreadedAccess(Obj))
- assert(ObjCounter == 1)
- } catch {
- case e => print("failed "); e.printStackTrace()
- }
- println("ok")
- }
-
- def run { runTest }
- }
-
- (new Trait67_3 {}).run // trigger
- }
-
- def run { lzvalue92_2 }
-}
-
-
-object Object94_1 {
-
- lazy val lzvalue92_2 = {
-
- trait Trait67_3 {
- var ObjCounter = 0
-
- object Obj { ObjCounter += 1}
- Obj // one
-
- def singleThreadedAccess(x: Any) = {
- x == Obj
- }
-
- def runTest {
- try {
- assert(singleThreadedAccess(Obj))
- assert(ObjCounter == 1)
- } catch {
- case e => print("failed "); e.printStackTrace()
- }
- println("ok")
- }
-
- def run { runTest }
- }
-
- (new Trait67_3 {}).run // trigger
- }
-
- def run { lzvalue92_2 } // trigger
-}
-
-
-trait Trait95_1 {
-
- lazy val lzvalue92_2 = {
-
- trait Trait67_3 {
- var ObjCounter = 0
-
- object Obj { ObjCounter += 1}
- Obj // one
-
- def singleThreadedAccess(x: Any) = {
- x == Obj
- }
-
- def runTest {
- try {
- assert(singleThreadedAccess(Obj))
- assert(ObjCounter == 1)
- } catch {
- case e => print("failed "); e.printStackTrace()
- }
- println("ok")
- }
-
- def run { runTest }
- }
-
- (new Trait67_3 {}).run // trigger
- }
-
- def run { lzvalue92_2 }
-}
-
-
-class Class97_1 {
-
- val value96_2 = {
-
- trait Trait67_3 {
- var ObjCounter = 0
-
- object Obj { ObjCounter += 1}
- Obj // one
-
- def singleThreadedAccess(x: Any) = {
- x == Obj
- }
-
- def runTest {
- try {
- assert(singleThreadedAccess(Obj))
- assert(ObjCounter == 1)
- } catch {
- case e => print("failed "); e.printStackTrace()
- }
- println("ok")
- }
-
- def run { runTest }
- }
-
- (new Trait67_3 {}).run // trigger
- }
-
- def run { value96_2 }
-}
-
-
-object Object98_1 {
-
- val value96_2 = {
-
- trait Trait67_3 {
- var ObjCounter = 0
-
- object Obj { ObjCounter += 1}
- Obj // one
-
- def singleThreadedAccess(x: Any) = {
- x == Obj
- }
-
- def runTest {
- try {
- assert(singleThreadedAccess(Obj))
- assert(ObjCounter == 1)
- } catch {
- case e => print("failed "); e.printStackTrace()
- }
- println("ok")
- }
-
- def run { runTest }
- }
-
- (new Trait67_3 {}).run // trigger
- }
-
- def run { value96_2 } // trigger
-}
-
-
-trait Trait99_1 {
-
- val value96_2 = {
-
- trait Trait67_3 {
- var ObjCounter = 0
-
- object Obj { ObjCounter += 1}
- Obj // one
-
- def singleThreadedAccess(x: Any) = {
- x == Obj
- }
-
- def runTest {
- try {
- assert(singleThreadedAccess(Obj))
- assert(ObjCounter == 1)
- } catch {
- case e => print("failed "); e.printStackTrace()
- }
- println("ok")
- }
-
- def run { runTest }
- }
-
- (new Trait67_3 {}).run // trigger
- }
-
- def run { value96_2 }
-}
-
-
-class Class102_1 {
-
- class Class101_2 {
-
- def method100_3 {
- var ObjCounter = 0
-
- object Obj { ObjCounter += 1}
- Obj // one
-
- def singleThreadedAccess(x: Any) = {
- x == Obj
- }
-
- def runTest {
- try {
- assert(singleThreadedAccess(Obj))
- assert(ObjCounter == 1)
- } catch {
- case e => print("failed "); e.printStackTrace()
- }
- println("ok")
- }
-
- runTest // trigger
- }
-
- def run { method100_3 }
- }
-
- def run { (new Class101_2).run }
-}
-
-
-object Object103_1 {
-
- class Class101_2 {
-
- def method100_3 {
- var ObjCounter = 0
-
- object Obj { ObjCounter += 1}
- Obj // one
-
- def singleThreadedAccess(x: Any) = {
- x == Obj
- }
-
- def runTest {
- try {
- assert(singleThreadedAccess(Obj))
- assert(ObjCounter == 1)
- } catch {
- case e => print("failed "); e.printStackTrace()
- }
- println("ok")
- }
-
- runTest // trigger
- }
-
- def run { method100_3 }
- }
-
- def run { (new Class101_2).run } // trigger
-}
-
-
-trait Trait104_1 {
-
- class Class101_2 {
-
- def method100_3 {
- var ObjCounter = 0
-
- object Obj { ObjCounter += 1}
- Obj // one
-
- def singleThreadedAccess(x: Any) = {
- x == Obj
- }
-
- def runTest {
- try {
- assert(singleThreadedAccess(Obj))
- assert(ObjCounter == 1)
- } catch {
- case e => print("failed "); e.printStackTrace()
- }
- println("ok")
- }
-
- runTest // trigger
- }
-
- def run { method100_3 }
- }
-
- def run { (new Class101_2).run }
-}
-
-
-class Class106_1 {
-
- object Object105_2 {
-
- def method100_3 {
- var ObjCounter = 0
-
- object Obj { ObjCounter += 1}
- Obj // one
-
- def singleThreadedAccess(x: Any) = {
- x == Obj
- }
-
- def runTest {
- try {
- assert(singleThreadedAccess(Obj))
- assert(ObjCounter == 1)
- } catch {
- case e => print("failed "); e.printStackTrace()
- }
- println("ok")
- }
-
- runTest // trigger
- }
-
- def run { method100_3 } // trigger
- }
-
- def run { Object105_2.run }
-}
-
-
-object Object107_1 {
-
- object Object105_2 {
-
- def method100_3 {
- var ObjCounter = 0
-
- object Obj { ObjCounter += 1}
- Obj // one
-
- def singleThreadedAccess(x: Any) = {
- x == Obj
- }
-
- def runTest {
- try {
- assert(singleThreadedAccess(Obj))
- assert(ObjCounter == 1)
- } catch {
- case e => print("failed "); e.printStackTrace()
- }
- println("ok")
- }
-
- runTest // trigger
- }
-
- def run { method100_3 } // trigger
- }
-
- def run { Object105_2.run } // trigger
-}
-
-
-trait Trait108_1 {
-
- object Object105_2 {
-
- def method100_3 {
- var ObjCounter = 0
-
- object Obj { ObjCounter += 1}
- Obj // one
-
- def singleThreadedAccess(x: Any) = {
- x == Obj
- }
-
- def runTest {
- try {
- assert(singleThreadedAccess(Obj))
- assert(ObjCounter == 1)
- } catch {
- case e => print("failed "); e.printStackTrace()
- }
- println("ok")
- }
-
- runTest // trigger
- }
-
- def run { method100_3 } // trigger
- }
-
- def run { Object105_2.run }
-}
-
-
-class Class110_1 {
-
- trait Trait109_2 {
-
- def method100_3 {
- var ObjCounter = 0
-
- object Obj { ObjCounter += 1}
- Obj // one
-
- def singleThreadedAccess(x: Any) = {
- x == Obj
- }
-
- def runTest {
- try {
- assert(singleThreadedAccess(Obj))
- assert(ObjCounter == 1)
- } catch {
- case e => print("failed "); e.printStackTrace()
- }
- println("ok")
- }
-
- runTest // trigger
- }
-
- def run { method100_3 }
- }
-
- def run { (new Trait109_2 {}).run }
-}
-
-
-object Object111_1 {
-
- trait Trait109_2 {
-
- def method100_3 {
- var ObjCounter = 0
-
- object Obj { ObjCounter += 1}
- Obj // one
-
- def singleThreadedAccess(x: Any) = {
- x == Obj
- }
-
- def runTest {
- try {
- assert(singleThreadedAccess(Obj))
- assert(ObjCounter == 1)
- } catch {
- case e => print("failed "); e.printStackTrace()
- }
- println("ok")
- }
-
- runTest // trigger
- }
-
- def run { method100_3 }
- }
-
- def run { (new Trait109_2 {}).run } // trigger
-}
-
-
-trait Trait112_1 {
-
- trait Trait109_2 {
-
- def method100_3 {
- var ObjCounter = 0
-
- object Obj { ObjCounter += 1}
- Obj // one
-
- def singleThreadedAccess(x: Any) = {
- x == Obj
- }
-
- def runTest {
- try {
- assert(singleThreadedAccess(Obj))
- assert(ObjCounter == 1)
- } catch {
- case e => print("failed "); e.printStackTrace()
- }
- println("ok")
- }
-
- runTest // trigger
- }
-
- def run { method100_3 }
- }
-
- def run { (new Trait109_2 {}).run }
-}
-
-
-class Class114_1 {
-
- def method113_2 {
-
- def method100_3 {
- var ObjCounter = 0
-
- object Obj { ObjCounter += 1}
- Obj // one
-
- def singleThreadedAccess(x: Any) = {
- x == Obj
- }
-
- def runTest {
- try {
- assert(singleThreadedAccess(Obj))
- assert(ObjCounter == 1)
- } catch {
- case e => print("failed "); e.printStackTrace()
- }
- println("ok")
- }
-
- runTest // trigger
- }
-
- method100_3 // trigger
- }
-
- def run { method113_2 }
-}
-
-
-object Object115_1 {
-
- def method113_2 {
-
- def method100_3 {
- var ObjCounter = 0
-
- object Obj { ObjCounter += 1}
- Obj // one
-
- def singleThreadedAccess(x: Any) = {
- x == Obj
- }
-
- def runTest {
- try {
- assert(singleThreadedAccess(Obj))
- assert(ObjCounter == 1)
- } catch {
- case e => print("failed "); e.printStackTrace()
- }
- println("ok")
- }
-
- runTest // trigger
- }
-
- method100_3 // trigger
- }
-
- def run { method113_2 } // trigger
-}
-
-
-trait Trait116_1 {
-
- def method113_2 {
-
- def method100_3 {
- var ObjCounter = 0
-
- object Obj { ObjCounter += 1}
- Obj // one
-
- def singleThreadedAccess(x: Any) = {
- x == Obj
- }
-
- def runTest {
- try {
- assert(singleThreadedAccess(Obj))
- assert(ObjCounter == 1)
- } catch {
- case e => print("failed "); e.printStackTrace()
- }
- println("ok")
- }
-
- runTest // trigger
- }
-
- method100_3 // trigger
- }
-
- def run { method113_2 }
-}
-
-
-class Class118_1 {
-
- val fun117_2 = () => {
-
- def method100_3 {
- var ObjCounter = 0
-
- object Obj { ObjCounter += 1}
- Obj // one
-
- def singleThreadedAccess(x: Any) = {
- x == Obj
- }
-
- def runTest {
- try {
- assert(singleThreadedAccess(Obj))
- assert(ObjCounter == 1)
- } catch {
- case e => print("failed "); e.printStackTrace()
- }
- println("ok")
- }
-
- runTest // trigger
- }
-
- method100_3 // trigger
- }
-
- def run { fun117_2() }
-}
-
-
-object Object119_1 {
-
- val fun117_2 = () => {
-
- def method100_3 {
- var ObjCounter = 0
-
- object Obj { ObjCounter += 1}
- Obj // one
-
- def singleThreadedAccess(x: Any) = {
- x == Obj
- }
-
- def runTest {
- try {
- assert(singleThreadedAccess(Obj))
- assert(ObjCounter == 1)
- } catch {
- case e => print("failed "); e.printStackTrace()
- }
- println("ok")
- }
-
- runTest // trigger
- }
-
- method100_3 // trigger
- }
-
- def run { fun117_2() } // trigger
-}
-
-
-trait Trait120_1 {
-
- val fun117_2 = () => {
-
- def method100_3 {
- var ObjCounter = 0
-
- object Obj { ObjCounter += 1}
- Obj // one
-
- def singleThreadedAccess(x: Any) = {
- x == Obj
- }
-
- def runTest {
- try {
- assert(singleThreadedAccess(Obj))
- assert(ObjCounter == 1)
- } catch {
- case e => print("failed "); e.printStackTrace()
- }
- println("ok")
- }
-
- runTest // trigger
- }
-
- method100_3 // trigger
- }
-
- def run { fun117_2() }
-}
-
-
-class Class122_1 {
-
- class Class121_2 {
- { // in primary constructor
-
- def method100_3 {
- var ObjCounter = 0
-
- object Obj { ObjCounter += 1}
- Obj // one
-
- def singleThreadedAccess(x: Any) = {
- x == Obj
- }
-
- def runTest {
- try {
- assert(singleThreadedAccess(Obj))
- assert(ObjCounter == 1)
- } catch {
- case e => print("failed "); e.printStackTrace()
- }
- println("ok")
- }
-
- runTest // trigger
- }
-
- method100_3 // trigger
- }
- }
-
- def run { (new Class121_2) }
-}
-
-
-object Object123_1 {
-
- class Class121_2 {
- { // in primary constructor
-
- def method100_3 {
- var ObjCounter = 0
-
- object Obj { ObjCounter += 1}
- Obj // one
-
- def singleThreadedAccess(x: Any) = {
- x == Obj
- }
-
- def runTest {
- try {
- assert(singleThreadedAccess(Obj))
- assert(ObjCounter == 1)
- } catch {
- case e => print("failed "); e.printStackTrace()
- }
- println("ok")
- }
-
- runTest // trigger
- }
-
- method100_3 // trigger
- }
- }
-
- def run { (new Class121_2) } // trigger
-}
-
-
-trait Trait124_1 {
-
- class Class121_2 {
- { // in primary constructor
-
- def method100_3 {
- var ObjCounter = 0
-
- object Obj { ObjCounter += 1}
- Obj // one
-
- def singleThreadedAccess(x: Any) = {
- x == Obj
- }
-
- def runTest {
- try {
- assert(singleThreadedAccess(Obj))
- assert(ObjCounter == 1)
- } catch {
- case e => print("failed "); e.printStackTrace()
- }
- println("ok")
- }
-
- runTest // trigger
- }
-
- method100_3 // trigger
- }
- }
-
- def run { (new Class121_2) }
-}
-
-
-class Class126_1 {
-
- lazy val lzvalue125_2 = {
-
- def method100_3 {
- var ObjCounter = 0
-
- object Obj { ObjCounter += 1}
- Obj // one
-
- def singleThreadedAccess(x: Any) = {
- x == Obj
- }
-
- def runTest {
- try {
- assert(singleThreadedAccess(Obj))
- assert(ObjCounter == 1)
- } catch {
- case e => print("failed "); e.printStackTrace()
- }
- println("ok")
- }
-
- runTest // trigger
- }
-
- method100_3 // trigger
- }
-
- def run { lzvalue125_2 }
-}
-
-
-object Object127_1 {
-
- lazy val lzvalue125_2 = {
-
- def method100_3 {
- var ObjCounter = 0
-
- object Obj { ObjCounter += 1}
- Obj // one
-
- def singleThreadedAccess(x: Any) = {
- x == Obj
- }
-
- def runTest {
- try {
- assert(singleThreadedAccess(Obj))
- assert(ObjCounter == 1)
- } catch {
- case e => print("failed "); e.printStackTrace()
- }
- println("ok")
- }
-
- runTest // trigger
- }
-
- method100_3 // trigger
- }
-
- def run { lzvalue125_2 } // trigger
-}
-
-
-trait Trait128_1 {
-
- lazy val lzvalue125_2 = {
-
- def method100_3 {
- var ObjCounter = 0
-
- object Obj { ObjCounter += 1}
- Obj // one
-
- def singleThreadedAccess(x: Any) = {
- x == Obj
- }
-
- def runTest {
- try {
- assert(singleThreadedAccess(Obj))
- assert(ObjCounter == 1)
- } catch {
- case e => print("failed "); e.printStackTrace()
- }
- println("ok")
- }
-
- runTest // trigger
- }
-
- method100_3 // trigger
- }
-
- def run { lzvalue125_2 }
-}
-
-
-class Class130_1 {
-
- val value129_2 = {
-
- def method100_3 {
- var ObjCounter = 0
-
- object Obj { ObjCounter += 1}
- Obj // one
-
- def singleThreadedAccess(x: Any) = {
- x == Obj
- }
-
- def runTest {
- try {
- assert(singleThreadedAccess(Obj))
- assert(ObjCounter == 1)
- } catch {
- case e => print("failed "); e.printStackTrace()
- }
- println("ok")
- }
-
- runTest // trigger
- }
-
- method100_3 // trigger
- }
-
- def run { value129_2 }
-}
-
-
-object Object131_1 {
-
- val value129_2 = {
-
- def method100_3 {
- var ObjCounter = 0
-
- object Obj { ObjCounter += 1}
- Obj // one
-
- def singleThreadedAccess(x: Any) = {
- x == Obj
- }
-
- def runTest {
- try {
- assert(singleThreadedAccess(Obj))
- assert(ObjCounter == 1)
- } catch {
- case e => print("failed "); e.printStackTrace()
- }
- println("ok")
- }
-
- runTest // trigger
- }
-
- method100_3 // trigger
- }
-
- def run { value129_2 } // trigger
-}
-
-
-trait Trait132_1 {
-
- val value129_2 = {
-
- def method100_3 {
- var ObjCounter = 0
-
- object Obj { ObjCounter += 1}
- Obj // one
-
- def singleThreadedAccess(x: Any) = {
- x == Obj
- }
-
- def runTest {
- try {
- assert(singleThreadedAccess(Obj))
- assert(ObjCounter == 1)
- } catch {
- case e => print("failed "); e.printStackTrace()
- }
- println("ok")
- }
-
- runTest // trigger
- }
-
- method100_3 // trigger
- }
-
- def run { value129_2 }
-}
-
-
-class Class135_1 {
-
- class Class134_2 {
-
- val fun133_3 = () => {
- var ObjCounter = 0
-
- object Obj { ObjCounter += 1}
- Obj // one
-
- def singleThreadedAccess(x: Any) = {
- x == Obj
- }
-
- def runTest {
- try {
- assert(singleThreadedAccess(Obj))
- assert(ObjCounter == 1)
- } catch {
- case e => print("failed "); e.printStackTrace()
- }
- println("ok")
- }
-
- runTest // trigger
- }
-
- def run { fun133_3() }
- }
-
- def run { (new Class134_2).run }
-}
-
-
-object Object136_1 {
-
- class Class134_2 {
-
- val fun133_3 = () => {
- var ObjCounter = 0
-
- object Obj { ObjCounter += 1}
- Obj // one
-
- def singleThreadedAccess(x: Any) = {
- x == Obj
- }
-
- def runTest {
- try {
- assert(singleThreadedAccess(Obj))
- assert(ObjCounter == 1)
- } catch {
- case e => print("failed "); e.printStackTrace()
- }
- println("ok")
- }
-
- runTest // trigger
- }
-
- def run { fun133_3() }
- }
-
- def run { (new Class134_2).run } // trigger
-}
-
-
-trait Trait137_1 {
-
- class Class134_2 {
-
- val fun133_3 = () => {
- var ObjCounter = 0
-
- object Obj { ObjCounter += 1}
- Obj // one
-
- def singleThreadedAccess(x: Any) = {
- x == Obj
- }
-
- def runTest {
- try {
- assert(singleThreadedAccess(Obj))
- assert(ObjCounter == 1)
- } catch {
- case e => print("failed "); e.printStackTrace()
- }
- println("ok")
- }
-
- runTest // trigger
- }
-
- def run { fun133_3() }
- }
-
- def run { (new Class134_2).run }
-}
-
-
-class Class139_1 {
-
- object Object138_2 {
-
- val fun133_3 = () => {
- var ObjCounter = 0
-
- object Obj { ObjCounter += 1}
- Obj // one
-
- def singleThreadedAccess(x: Any) = {
- x == Obj
- }
-
- def runTest {
- try {
- assert(singleThreadedAccess(Obj))
- assert(ObjCounter == 1)
- } catch {
- case e => print("failed "); e.printStackTrace()
- }
- println("ok")
- }
-
- runTest // trigger
- }
-
- def run { fun133_3() } // trigger
- }
-
- def run { Object138_2.run }
-}
-
-
-object Object140_1 {
-
- object Object138_2 {
-
- val fun133_3 = () => {
- var ObjCounter = 0
-
- object Obj { ObjCounter += 1}
- Obj // one
-
- def singleThreadedAccess(x: Any) = {
- x == Obj
- }
-
- def runTest {
- try {
- assert(singleThreadedAccess(Obj))
- assert(ObjCounter == 1)
- } catch {
- case e => print("failed "); e.printStackTrace()
- }
- println("ok")
- }
-
- runTest // trigger
- }
-
- def run { fun133_3() } // trigger
- }
-
- def run { Object138_2.run } // trigger
-}
-
-
-trait Trait141_1 {
-
- object Object138_2 {
-
- val fun133_3 = () => {
- var ObjCounter = 0
-
- object Obj { ObjCounter += 1}
- Obj // one
-
- def singleThreadedAccess(x: Any) = {
- x == Obj
- }
-
- def runTest {
- try {
- assert(singleThreadedAccess(Obj))
- assert(ObjCounter == 1)
- } catch {
- case e => print("failed "); e.printStackTrace()
- }
- println("ok")
- }
-
- runTest // trigger
- }
-
- def run { fun133_3() } // trigger
- }
-
- def run { Object138_2.run }
-}
-
-
-class Class143_1 {
-
- trait Trait142_2 {
-
- val fun133_3 = () => {
- var ObjCounter = 0
-
- object Obj { ObjCounter += 1}
- Obj // one
-
- def singleThreadedAccess(x: Any) = {
- x == Obj
- }
-
- def runTest {
- try {
- assert(singleThreadedAccess(Obj))
- assert(ObjCounter == 1)
- } catch {
- case e => print("failed "); e.printStackTrace()
- }
- println("ok")
- }
-
- runTest // trigger
- }
-
- def run { fun133_3() }
- }
-
- def run { (new Trait142_2 {}).run }
-}
-
-
-object Object144_1 {
-
- trait Trait142_2 {
-
- val fun133_3 = () => {
- var ObjCounter = 0
-
- object Obj { ObjCounter += 1}
- Obj // one
-
- def singleThreadedAccess(x: Any) = {
- x == Obj
- }
-
- def runTest {
- try {
- assert(singleThreadedAccess(Obj))
- assert(ObjCounter == 1)
- } catch {
- case e => print("failed "); e.printStackTrace()
- }
- println("ok")
- }
-
- runTest // trigger
- }
-
- def run { fun133_3() }
- }
-
- def run { (new Trait142_2 {}).run } // trigger
-}
-
-
-trait Trait145_1 {
-
- trait Trait142_2 {
-
- val fun133_3 = () => {
- var ObjCounter = 0
-
- object Obj { ObjCounter += 1}
- Obj // one
-
- def singleThreadedAccess(x: Any) = {
- x == Obj
- }
-
- def runTest {
- try {
- assert(singleThreadedAccess(Obj))
- assert(ObjCounter == 1)
- } catch {
- case e => print("failed "); e.printStackTrace()
- }
- println("ok")
- }
-
- runTest // trigger
- }
-
- def run { fun133_3() }
- }
-
- def run { (new Trait142_2 {}).run }
-}
-
-
-class Class147_1 {
-
- def method146_2 {
-
- val fun133_3 = () => {
- var ObjCounter = 0
-
- object Obj { ObjCounter += 1}
- Obj // one
-
- def singleThreadedAccess(x: Any) = {
- x == Obj
- }
-
- def runTest {
- try {
- assert(singleThreadedAccess(Obj))
- assert(ObjCounter == 1)
- } catch {
- case e => print("failed "); e.printStackTrace()
- }
- println("ok")
- }
-
- runTest // trigger
- }
-
- fun133_3() // trigger
- }
-
- def run { method146_2 }
-}
-
-
-object Object148_1 {
-
- def method146_2 {
-
- val fun133_3 = () => {
- var ObjCounter = 0
-
- object Obj { ObjCounter += 1}
- Obj // one
-
- def singleThreadedAccess(x: Any) = {
- x == Obj
- }
-
- def runTest {
- try {
- assert(singleThreadedAccess(Obj))
- assert(ObjCounter == 1)
- } catch {
- case e => print("failed "); e.printStackTrace()
- }
- println("ok")
- }
-
- runTest // trigger
- }
-
- fun133_3() // trigger
- }
-
- def run { method146_2 } // trigger
-}
-
-
-trait Trait149_1 {
-
- def method146_2 {
-
- val fun133_3 = () => {
- var ObjCounter = 0
-
- object Obj { ObjCounter += 1}
- Obj // one
-
- def singleThreadedAccess(x: Any) = {
- x == Obj
- }
-
- def runTest {
- try {
- assert(singleThreadedAccess(Obj))
- assert(ObjCounter == 1)
- } catch {
- case e => print("failed "); e.printStackTrace()
- }
- println("ok")
- }
-
- runTest // trigger
- }
-
- fun133_3() // trigger
- }
-
- def run { method146_2 }
-}
-
-
-class Class151_1 {
-
- val fun150_2 = () => {
-
- val fun133_3 = () => {
- var ObjCounter = 0
-
- object Obj { ObjCounter += 1}
- Obj // one
-
- def singleThreadedAccess(x: Any) = {
- x == Obj
- }
-
- def runTest {
- try {
- assert(singleThreadedAccess(Obj))
- assert(ObjCounter == 1)
- } catch {
- case e => print("failed "); e.printStackTrace()
- }
- println("ok")
- }
-
- runTest // trigger
- }
-
- fun133_3() // trigger
- }
-
- def run { fun150_2() }
-}
-
-
-object Object152_1 {
-
- val fun150_2 = () => {
-
- val fun133_3 = () => {
- var ObjCounter = 0
-
- object Obj { ObjCounter += 1}
- Obj // one
-
- def singleThreadedAccess(x: Any) = {
- x == Obj
- }
-
- def runTest {
- try {
- assert(singleThreadedAccess(Obj))
- assert(ObjCounter == 1)
- } catch {
- case e => print("failed "); e.printStackTrace()
- }
- println("ok")
- }
-
- runTest // trigger
- }
-
- fun133_3() // trigger
- }
-
- def run { fun150_2() } // trigger
-}
-
-
-trait Trait153_1 {
-
- val fun150_2 = () => {
-
- val fun133_3 = () => {
- var ObjCounter = 0
-
- object Obj { ObjCounter += 1}
- Obj // one
-
- def singleThreadedAccess(x: Any) = {
- x == Obj
- }
-
- def runTest {
- try {
- assert(singleThreadedAccess(Obj))
- assert(ObjCounter == 1)
- } catch {
- case e => print("failed "); e.printStackTrace()
- }
- println("ok")
- }
-
- runTest // trigger
- }
-
- fun133_3() // trigger
- }
-
- def run { fun150_2() }
-}
-
-
-class Class155_1 {
-
- class Class154_2 {
- { // in primary constructor
-
- val fun133_3 = () => {
- var ObjCounter = 0
-
- object Obj { ObjCounter += 1}
- Obj // one
-
- def singleThreadedAccess(x: Any) = {
- x == Obj
- }
-
- def runTest {
- try {
- assert(singleThreadedAccess(Obj))
- assert(ObjCounter == 1)
- } catch {
- case e => print("failed "); e.printStackTrace()
- }
- println("ok")
- }
-
- runTest // trigger
- }
-
- fun133_3() // trigger
- }
- }
-
- def run { (new Class154_2) }
-}
-
-
-object Object156_1 {
-
- class Class154_2 {
- { // in primary constructor
-
- val fun133_3 = () => {
- var ObjCounter = 0
-
- object Obj { ObjCounter += 1}
- Obj // one
-
- def singleThreadedAccess(x: Any) = {
- x == Obj
- }
-
- def runTest {
- try {
- assert(singleThreadedAccess(Obj))
- assert(ObjCounter == 1)
- } catch {
- case e => print("failed "); e.printStackTrace()
- }
- println("ok")
- }
-
- runTest // trigger
- }
-
- fun133_3() // trigger
- }
- }
-
- def run { (new Class154_2) } // trigger
-}
-
-
-trait Trait157_1 {
-
- class Class154_2 {
- { // in primary constructor
-
- val fun133_3 = () => {
- var ObjCounter = 0
-
- object Obj { ObjCounter += 1}
- Obj // one
-
- def singleThreadedAccess(x: Any) = {
- x == Obj
- }
-
- def runTest {
- try {
- assert(singleThreadedAccess(Obj))
- assert(ObjCounter == 1)
- } catch {
- case e => print("failed "); e.printStackTrace()
- }
- println("ok")
- }
-
- runTest // trigger
- }
-
- fun133_3() // trigger
- }
- }
-
- def run { (new Class154_2) }
-}
-
-
-class Class159_1 {
-
- lazy val lzvalue158_2 = {
-
- val fun133_3 = () => {
- var ObjCounter = 0
-
- object Obj { ObjCounter += 1}
- Obj // one
-
- def singleThreadedAccess(x: Any) = {
- x == Obj
- }
-
- def runTest {
- try {
- assert(singleThreadedAccess(Obj))
- assert(ObjCounter == 1)
- } catch {
- case e => print("failed "); e.printStackTrace()
- }
- println("ok")
- }
-
- runTest // trigger
- }
-
- fun133_3() // trigger
- }
-
- def run { lzvalue158_2 }
-}
-
-
-object Object160_1 {
-
- lazy val lzvalue158_2 = {
-
- val fun133_3 = () => {
- var ObjCounter = 0
-
- object Obj { ObjCounter += 1}
- Obj // one
-
- def singleThreadedAccess(x: Any) = {
- x == Obj
- }
-
- def runTest {
- try {
- assert(singleThreadedAccess(Obj))
- assert(ObjCounter == 1)
- } catch {
- case e => print("failed "); e.printStackTrace()
- }
- println("ok")
- }
-
- runTest // trigger
- }
-
- fun133_3() // trigger
- }
-
- def run { lzvalue158_2 } // trigger
-}
-
-
-trait Trait161_1 {
-
- lazy val lzvalue158_2 = {
-
- val fun133_3 = () => {
- var ObjCounter = 0
-
- object Obj { ObjCounter += 1}
- Obj // one
-
- def singleThreadedAccess(x: Any) = {
- x == Obj
- }
-
- def runTest {
- try {
- assert(singleThreadedAccess(Obj))
- assert(ObjCounter == 1)
- } catch {
- case e => print("failed "); e.printStackTrace()
- }
- println("ok")
- }
-
- runTest // trigger
- }
-
- fun133_3() // trigger
- }
-
- def run { lzvalue158_2 }
-}
-
-
-class Class163_1 {
-
- val value162_2 = {
-
- val fun133_3 = () => {
- var ObjCounter = 0
-
- object Obj { ObjCounter += 1}
- Obj // one
-
- def singleThreadedAccess(x: Any) = {
- x == Obj
- }
-
- def runTest {
- try {
- assert(singleThreadedAccess(Obj))
- assert(ObjCounter == 1)
- } catch {
- case e => print("failed "); e.printStackTrace()
- }
- println("ok")
- }
-
- runTest // trigger
- }
-
- fun133_3() // trigger
- }
-
- def run { value162_2 }
-}
-
-
-object Object164_1 {
-
- val value162_2 = {
-
- val fun133_3 = () => {
- var ObjCounter = 0
-
- object Obj { ObjCounter += 1}
- Obj // one
-
- def singleThreadedAccess(x: Any) = {
- x == Obj
- }
-
- def runTest {
- try {
- assert(singleThreadedAccess(Obj))
- assert(ObjCounter == 1)
- } catch {
- case e => print("failed "); e.printStackTrace()
- }
- println("ok")
- }
-
- runTest // trigger
- }
-
- fun133_3() // trigger
- }
-
- def run { value162_2 } // trigger
-}
-
-
-trait Trait165_1 {
-
- val value162_2 = {
-
- val fun133_3 = () => {
- var ObjCounter = 0
-
- object Obj { ObjCounter += 1}
- Obj // one
-
- def singleThreadedAccess(x: Any) = {
- x == Obj
- }
-
- def runTest {
- try {
- assert(singleThreadedAccess(Obj))
- assert(ObjCounter == 1)
- } catch {
- case e => print("failed "); e.printStackTrace()
- }
- println("ok")
- }
-
- runTest // trigger
- }
-
- fun133_3() // trigger
- }
-
- def run { value162_2 }
-}
-
-
-class Class168_1 {
-
- class Class167_2 {
-
- class Class166_3 {
- { // in primary constructor
- var ObjCounter = 0
-
- object Obj { ObjCounter += 1}
- Obj // one
-
- def singleThreadedAccess(x: Any) = {
- x == Obj
- }
-
- def runTest {
- try {
- assert(singleThreadedAccess(Obj))
- assert(ObjCounter == 1)
- } catch {
- case e => print("failed "); e.printStackTrace()
- }
- println("ok")
- }
-
- runTest // trigger
- }
- }
-
- def run { (new Class166_3) }
- }
-
- def run { (new Class167_2).run }
-}
-
-
-object Object169_1 {
-
- class Class167_2 {
-
- class Class166_3 {
- { // in primary constructor
- var ObjCounter = 0
-
- object Obj { ObjCounter += 1}
- Obj // one
-
- def singleThreadedAccess(x: Any) = {
- x == Obj
- }
-
- def runTest {
- try {
- assert(singleThreadedAccess(Obj))
- assert(ObjCounter == 1)
- } catch {
- case e => print("failed "); e.printStackTrace()
- }
- println("ok")
- }
-
- runTest // trigger
- }
- }
-
- def run { (new Class166_3) }
- }
-
- def run { (new Class167_2).run } // trigger
-}
-
-
-trait Trait170_1 {
-
- class Class167_2 {
-
- class Class166_3 {
- { // in primary constructor
- var ObjCounter = 0
-
- object Obj { ObjCounter += 1}
- Obj // one
-
- def singleThreadedAccess(x: Any) = {
- x == Obj
- }
-
- def runTest {
- try {
- assert(singleThreadedAccess(Obj))
- assert(ObjCounter == 1)
- } catch {
- case e => print("failed "); e.printStackTrace()
- }
- println("ok")
- }
-
- runTest // trigger
- }
- }
-
- def run { (new Class166_3) }
- }
-
- def run { (new Class167_2).run }
-}
-
-
-class Class172_1 {
-
- object Object171_2 {
-
- class Class166_3 {
- { // in primary constructor
- var ObjCounter = 0
-
- object Obj { ObjCounter += 1}
- Obj // one
-
- def singleThreadedAccess(x: Any) = {
- x == Obj
- }
-
- def runTest {
- try {
- assert(singleThreadedAccess(Obj))
- assert(ObjCounter == 1)
- } catch {
- case e => print("failed "); e.printStackTrace()
- }
- println("ok")
- }
-
- runTest // trigger
- }
- }
-
- def run { (new Class166_3) } // trigger
- }
-
- def run { Object171_2.run }
-}
-
-
-object Object173_1 {
-
- object Object171_2 {
-
- class Class166_3 {
- { // in primary constructor
- var ObjCounter = 0
-
- object Obj { ObjCounter += 1}
- Obj // one
-
- def singleThreadedAccess(x: Any) = {
- x == Obj
- }
-
- def runTest {
- try {
- assert(singleThreadedAccess(Obj))
- assert(ObjCounter == 1)
- } catch {
- case e => print("failed "); e.printStackTrace()
- }
- println("ok")
- }
-
- runTest // trigger
- }
- }
-
- def run { (new Class166_3) } // trigger
- }
-
- def run { Object171_2.run } // trigger
-}
-
-
-trait Trait174_1 {
-
- object Object171_2 {
-
- class Class166_3 {
- { // in primary constructor
- var ObjCounter = 0
-
- object Obj { ObjCounter += 1}
- Obj // one
-
- def singleThreadedAccess(x: Any) = {
- x == Obj
- }
-
- def runTest {
- try {
- assert(singleThreadedAccess(Obj))
- assert(ObjCounter == 1)
- } catch {
- case e => print("failed "); e.printStackTrace()
- }
- println("ok")
- }
-
- runTest // trigger
- }
- }
-
- def run { (new Class166_3) } // trigger
- }
-
- def run { Object171_2.run }
-}
-
-
-class Class176_1 {
-
- trait Trait175_2 {
-
- class Class166_3 {
- { // in primary constructor
- var ObjCounter = 0
-
- object Obj { ObjCounter += 1}
- Obj // one
-
- def singleThreadedAccess(x: Any) = {
- x == Obj
- }
-
- def runTest {
- try {
- assert(singleThreadedAccess(Obj))
- assert(ObjCounter == 1)
- } catch {
- case e => print("failed "); e.printStackTrace()
- }
- println("ok")
- }
-
- runTest // trigger
- }
- }
-
- def run { (new Class166_3) }
- }
-
- def run { (new Trait175_2 {}).run }
-}
-
-
-object Object177_1 {
-
- trait Trait175_2 {
-
- class Class166_3 {
- { // in primary constructor
- var ObjCounter = 0
-
- object Obj { ObjCounter += 1}
- Obj // one
-
- def singleThreadedAccess(x: Any) = {
- x == Obj
- }
-
- def runTest {
- try {
- assert(singleThreadedAccess(Obj))
- assert(ObjCounter == 1)
- } catch {
- case e => print("failed "); e.printStackTrace()
- }
- println("ok")
- }
-
- runTest // trigger
- }
- }
-
- def run { (new Class166_3) }
- }
-
- def run { (new Trait175_2 {}).run } // trigger
-}
-
-
-trait Trait178_1 {
-
- trait Trait175_2 {
-
- class Class166_3 {
- { // in primary constructor
- var ObjCounter = 0
-
- object Obj { ObjCounter += 1}
- Obj // one
-
- def singleThreadedAccess(x: Any) = {
- x == Obj
- }
-
- def runTest {
- try {
- assert(singleThreadedAccess(Obj))
- assert(ObjCounter == 1)
- } catch {
- case e => print("failed "); e.printStackTrace()
- }
- println("ok")
- }
-
- runTest // trigger
- }
- }
-
- def run { (new Class166_3) }
- }
-
- def run { (new Trait175_2 {}).run }
-}
-
-
-class Class180_1 {
-
- def method179_2 {
-
- class Class166_3 {
- { // in primary constructor
- var ObjCounter = 0
-
- object Obj { ObjCounter += 1}
- Obj // one
-
- def singleThreadedAccess(x: Any) = {
- x == Obj
- }
-
- def runTest {
- try {
- assert(singleThreadedAccess(Obj))
- assert(ObjCounter == 1)
- } catch {
- case e => print("failed "); e.printStackTrace()
- }
- println("ok")
- }
-
- runTest // trigger
- }
- }
-
- (new Class166_3) // trigger
- }
-
- def run { method179_2 }
-}
-
-
-object Object181_1 {
-
- def method179_2 {
-
- class Class166_3 {
- { // in primary constructor
- var ObjCounter = 0
-
- object Obj { ObjCounter += 1}
- Obj // one
-
- def singleThreadedAccess(x: Any) = {
- x == Obj
- }
-
- def runTest {
- try {
- assert(singleThreadedAccess(Obj))
- assert(ObjCounter == 1)
- } catch {
- case e => print("failed "); e.printStackTrace()
- }
- println("ok")
- }
-
- runTest // trigger
- }
- }
-
- (new Class166_3) // trigger
- }
-
- def run { method179_2 } // trigger
-}
-
-
-trait Trait182_1 {
-
- def method179_2 {
-
- class Class166_3 {
- { // in primary constructor
- var ObjCounter = 0
-
- object Obj { ObjCounter += 1}
- Obj // one
-
- def singleThreadedAccess(x: Any) = {
- x == Obj
- }
-
- def runTest {
- try {
- assert(singleThreadedAccess(Obj))
- assert(ObjCounter == 1)
- } catch {
- case e => print("failed "); e.printStackTrace()
- }
- println("ok")
- }
-
- runTest // trigger
- }
- }
-
- (new Class166_3) // trigger
- }
-
- def run { method179_2 }
-}
-
-
-class Class184_1 {
-
- val fun183_2 = () => {
-
- class Class166_3 {
- { // in primary constructor
- var ObjCounter = 0
-
- object Obj { ObjCounter += 1}
- Obj // one
-
- def singleThreadedAccess(x: Any) = {
- x == Obj
- }
-
- def runTest {
- try {
- assert(singleThreadedAccess(Obj))
- assert(ObjCounter == 1)
- } catch {
- case e => print("failed "); e.printStackTrace()
- }
- println("ok")
- }
-
- runTest // trigger
- }
- }
-
- (new Class166_3) // trigger
- }
-
- def run { fun183_2() }
-}
-
-
-object Object185_1 {
-
- val fun183_2 = () => {
-
- class Class166_3 {
- { // in primary constructor
- var ObjCounter = 0
-
- object Obj { ObjCounter += 1}
- Obj // one
-
- def singleThreadedAccess(x: Any) = {
- x == Obj
- }
-
- def runTest {
- try {
- assert(singleThreadedAccess(Obj))
- assert(ObjCounter == 1)
- } catch {
- case e => print("failed "); e.printStackTrace()
- }
- println("ok")
- }
-
- runTest // trigger
- }
- }
-
- (new Class166_3) // trigger
- }
-
- def run { fun183_2() } // trigger
-}
-
-
-trait Trait186_1 {
-
- val fun183_2 = () => {
-
- class Class166_3 {
- { // in primary constructor
- var ObjCounter = 0
-
- object Obj { ObjCounter += 1}
- Obj // one
-
- def singleThreadedAccess(x: Any) = {
- x == Obj
- }
-
- def runTest {
- try {
- assert(singleThreadedAccess(Obj))
- assert(ObjCounter == 1)
- } catch {
- case e => print("failed "); e.printStackTrace()
- }
- println("ok")
- }
-
- runTest // trigger
- }
- }
-
- (new Class166_3) // trigger
- }
-
- def run { fun183_2() }
-}
-
-
-class Class188_1 {
-
- class Class187_2 {
- { // in primary constructor
-
- class Class166_3 {
- { // in primary constructor
- var ObjCounter = 0
-
- object Obj { ObjCounter += 1}
- Obj // one
-
- def singleThreadedAccess(x: Any) = {
- x == Obj
- }
-
- def runTest {
- try {
- assert(singleThreadedAccess(Obj))
- assert(ObjCounter == 1)
- } catch {
- case e => print("failed "); e.printStackTrace()
- }
- println("ok")
- }
-
- runTest // trigger
- }
- }
-
- (new Class166_3) // trigger
- }
- }
-
- def run { (new Class187_2) }
-}
-
-
-object Object189_1 {
-
- class Class187_2 {
- { // in primary constructor
-
- class Class166_3 {
- { // in primary constructor
- var ObjCounter = 0
-
- object Obj { ObjCounter += 1}
- Obj // one
-
- def singleThreadedAccess(x: Any) = {
- x == Obj
- }
-
- def runTest {
- try {
- assert(singleThreadedAccess(Obj))
- assert(ObjCounter == 1)
- } catch {
- case e => print("failed "); e.printStackTrace()
- }
- println("ok")
- }
-
- runTest // trigger
- }
- }
-
- (new Class166_3) // trigger
- }
- }
-
- def run { (new Class187_2) } // trigger
-}
-
-
-trait Trait190_1 {
-
- class Class187_2 {
- { // in primary constructor
-
- class Class166_3 {
- { // in primary constructor
- var ObjCounter = 0
-
- object Obj { ObjCounter += 1}
- Obj // one
-
- def singleThreadedAccess(x: Any) = {
- x == Obj
- }
-
- def runTest {
- try {
- assert(singleThreadedAccess(Obj))
- assert(ObjCounter == 1)
- } catch {
- case e => print("failed "); e.printStackTrace()
- }
- println("ok")
- }
-
- runTest // trigger
- }
- }
-
- (new Class166_3) // trigger
- }
- }
-
- def run { (new Class187_2) }
-}
-
-
-class Class192_1 {
-
- lazy val lzvalue191_2 = {
-
- class Class166_3 {
- { // in primary constructor
- var ObjCounter = 0
-
- object Obj { ObjCounter += 1}
- Obj // one
-
- def singleThreadedAccess(x: Any) = {
- x == Obj
- }
-
- def runTest {
- try {
- assert(singleThreadedAccess(Obj))
- assert(ObjCounter == 1)
- } catch {
- case e => print("failed "); e.printStackTrace()
- }
- println("ok")
- }
-
- runTest // trigger
- }
- }
-
- (new Class166_3) // trigger
- }
-
- def run { lzvalue191_2 }
-}
-
-
-object Object193_1 {
-
- lazy val lzvalue191_2 = {
-
- class Class166_3 {
- { // in primary constructor
- var ObjCounter = 0
-
- object Obj { ObjCounter += 1}
- Obj // one
-
- def singleThreadedAccess(x: Any) = {
- x == Obj
- }
-
- def runTest {
- try {
- assert(singleThreadedAccess(Obj))
- assert(ObjCounter == 1)
- } catch {
- case e => print("failed "); e.printStackTrace()
- }
- println("ok")
- }
-
- runTest // trigger
- }
- }
-
- (new Class166_3) // trigger
- }
-
- def run { lzvalue191_2 } // trigger
-}
-
-
-trait Trait194_1 {
-
- lazy val lzvalue191_2 = {
-
- class Class166_3 {
- { // in primary constructor
- var ObjCounter = 0
-
- object Obj { ObjCounter += 1}
- Obj // one
-
- def singleThreadedAccess(x: Any) = {
- x == Obj
- }
-
- def runTest {
- try {
- assert(singleThreadedAccess(Obj))
- assert(ObjCounter == 1)
- } catch {
- case e => print("failed "); e.printStackTrace()
- }
- println("ok")
- }
-
- runTest // trigger
- }
- }
-
- (new Class166_3) // trigger
- }
-
- def run { lzvalue191_2 }
-}
-
-
-class Class196_1 {
-
- val value195_2 = {
-
- class Class166_3 {
- { // in primary constructor
- var ObjCounter = 0
-
- object Obj { ObjCounter += 1}
- Obj // one
-
- def singleThreadedAccess(x: Any) = {
- x == Obj
- }
-
- def runTest {
- try {
- assert(singleThreadedAccess(Obj))
- assert(ObjCounter == 1)
- } catch {
- case e => print("failed "); e.printStackTrace()
- }
- println("ok")
- }
-
- runTest // trigger
- }
- }
-
- (new Class166_3) // trigger
- }
-
- def run { value195_2 }
-}
-
-
-object Object197_1 {
-
- val value195_2 = {
-
- class Class166_3 {
- { // in primary constructor
- var ObjCounter = 0
-
- object Obj { ObjCounter += 1}
- Obj // one
-
- def singleThreadedAccess(x: Any) = {
- x == Obj
- }
-
- def runTest {
- try {
- assert(singleThreadedAccess(Obj))
- assert(ObjCounter == 1)
- } catch {
- case e => print("failed "); e.printStackTrace()
- }
- println("ok")
- }
-
- runTest // trigger
- }
- }
-
- (new Class166_3) // trigger
- }
-
- def run { value195_2 } // trigger
-}
-
-
-trait Trait198_1 {
-
- val value195_2 = {
-
- class Class166_3 {
- { // in primary constructor
- var ObjCounter = 0
-
- object Obj { ObjCounter += 1}
- Obj // one
-
- def singleThreadedAccess(x: Any) = {
- x == Obj
- }
-
- def runTest {
- try {
- assert(singleThreadedAccess(Obj))
- assert(ObjCounter == 1)
- } catch {
- case e => print("failed "); e.printStackTrace()
- }
- println("ok")
- }
-
- runTest // trigger
- }
- }
-
- (new Class166_3) // trigger
- }
-
- def run { value195_2 }
-}
-
-
-class Class201_1 {
-
- class Class200_2 {
-
- lazy val lzvalue199_3 = {
- var ObjCounter = 0
-
- object Obj { ObjCounter += 1}
- Obj // one
-
- def singleThreadedAccess(x: Any) = {
- x == Obj
- }
-
- def runTest {
- try {
- assert(singleThreadedAccess(Obj))
- assert(ObjCounter == 1)
- } catch {
- case e => print("failed "); e.printStackTrace()
- }
- println("ok")
- }
-
- runTest // trigger
- }
-
- def run { lzvalue199_3 }
- }
-
- def run { (new Class200_2).run }
-}
-
-
-object Object202_1 {
-
- class Class200_2 {
-
- lazy val lzvalue199_3 = {
- var ObjCounter = 0
-
- object Obj { ObjCounter += 1}
- Obj // one
-
- def singleThreadedAccess(x: Any) = {
- x == Obj
- }
-
- def runTest {
- try {
- assert(singleThreadedAccess(Obj))
- assert(ObjCounter == 1)
- } catch {
- case e => print("failed "); e.printStackTrace()
- }
- println("ok")
- }
-
- runTest // trigger
- }
-
- def run { lzvalue199_3 }
- }
-
- def run { (new Class200_2).run } // trigger
-}
-
-
-trait Trait203_1 {
-
- class Class200_2 {
-
- lazy val lzvalue199_3 = {
- var ObjCounter = 0
-
- object Obj { ObjCounter += 1}
- Obj // one
-
- def singleThreadedAccess(x: Any) = {
- x == Obj
- }
-
- def runTest {
- try {
- assert(singleThreadedAccess(Obj))
- assert(ObjCounter == 1)
- } catch {
- case e => print("failed "); e.printStackTrace()
- }
- println("ok")
- }
-
- runTest // trigger
- }
-
- def run { lzvalue199_3 }
- }
-
- def run { (new Class200_2).run }
-}
-
-
-class Class205_1 {
-
- object Object204_2 {
-
- lazy val lzvalue199_3 = {
- var ObjCounter = 0
-
- object Obj { ObjCounter += 1}
- Obj // one
-
- def singleThreadedAccess(x: Any) = {
- x == Obj
- }
-
- def runTest {
- try {
- assert(singleThreadedAccess(Obj))
- assert(ObjCounter == 1)
- } catch {
- case e => print("failed "); e.printStackTrace()
- }
- println("ok")
- }
-
- runTest // trigger
- }
-
- def run { lzvalue199_3 } // trigger
- }
-
- def run { Object204_2.run }
-}
-
-
-object Object206_1 {
-
- object Object204_2 {
-
- lazy val lzvalue199_3 = {
- var ObjCounter = 0
-
- object Obj { ObjCounter += 1}
- Obj // one
-
- def singleThreadedAccess(x: Any) = {
- x == Obj
- }
-
- def runTest {
- try {
- assert(singleThreadedAccess(Obj))
- assert(ObjCounter == 1)
- } catch {
- case e => print("failed "); e.printStackTrace()
- }
- println("ok")
- }
-
- runTest // trigger
- }
-
- def run { lzvalue199_3 } // trigger
- }
-
- def run { Object204_2.run } // trigger
-}
-
-
-trait Trait207_1 {
-
- object Object204_2 {
-
- lazy val lzvalue199_3 = {
- var ObjCounter = 0
-
- object Obj { ObjCounter += 1}
- Obj // one
-
- def singleThreadedAccess(x: Any) = {
- x == Obj
- }
-
- def runTest {
- try {
- assert(singleThreadedAccess(Obj))
- assert(ObjCounter == 1)
- } catch {
- case e => print("failed "); e.printStackTrace()
- }
- println("ok")
- }
-
- runTest // trigger
- }
-
- def run { lzvalue199_3 } // trigger
- }
-
- def run { Object204_2.run }
-}
-
-
-class Class209_1 {
-
- trait Trait208_2 {
-
- lazy val lzvalue199_3 = {
- var ObjCounter = 0
-
- object Obj { ObjCounter += 1}
- Obj // one
-
- def singleThreadedAccess(x: Any) = {
- x == Obj
- }
-
- def runTest {
- try {
- assert(singleThreadedAccess(Obj))
- assert(ObjCounter == 1)
- } catch {
- case e => print("failed "); e.printStackTrace()
- }
- println("ok")
- }
-
- runTest // trigger
- }
-
- def run { lzvalue199_3 }
- }
-
- def run { (new Trait208_2 {}).run }
-}
-
-
-object Object210_1 {
-
- trait Trait208_2 {
-
- lazy val lzvalue199_3 = {
- var ObjCounter = 0
-
- object Obj { ObjCounter += 1}
- Obj // one
-
- def singleThreadedAccess(x: Any) = {
- x == Obj
- }
-
- def runTest {
- try {
- assert(singleThreadedAccess(Obj))
- assert(ObjCounter == 1)
- } catch {
- case e => print("failed "); e.printStackTrace()
- }
- println("ok")
- }
-
- runTest // trigger
- }
-
- def run { lzvalue199_3 }
- }
-
- def run { (new Trait208_2 {}).run } // trigger
-}
-
-
-trait Trait211_1 {
-
- trait Trait208_2 {
-
- lazy val lzvalue199_3 = {
- var ObjCounter = 0
-
- object Obj { ObjCounter += 1}
- Obj // one
-
- def singleThreadedAccess(x: Any) = {
- x == Obj
- }
-
- def runTest {
- try {
- assert(singleThreadedAccess(Obj))
- assert(ObjCounter == 1)
- } catch {
- case e => print("failed "); e.printStackTrace()
- }
- println("ok")
- }
-
- runTest // trigger
- }
-
- def run { lzvalue199_3 }
- }
-
- def run { (new Trait208_2 {}).run }
-}
-
-
-class Class213_1 {
-
- def method212_2 {
-
- lazy val lzvalue199_3 = {
- var ObjCounter = 0
-
- object Obj { ObjCounter += 1}
- Obj // one
-
- def singleThreadedAccess(x: Any) = {
- x == Obj
- }
-
- def runTest {
- try {
- assert(singleThreadedAccess(Obj))
- assert(ObjCounter == 1)
- } catch {
- case e => print("failed "); e.printStackTrace()
- }
- println("ok")
- }
-
- runTest // trigger
- }
-
- lzvalue199_3 // trigger
- }
-
- def run { method212_2 }
-}
-
-
-object Object214_1 {
-
- def method212_2 {
-
- lazy val lzvalue199_3 = {
- var ObjCounter = 0
-
- object Obj { ObjCounter += 1}
- Obj // one
-
- def singleThreadedAccess(x: Any) = {
- x == Obj
- }
-
- def runTest {
- try {
- assert(singleThreadedAccess(Obj))
- assert(ObjCounter == 1)
- } catch {
- case e => print("failed "); e.printStackTrace()
- }
- println("ok")
- }
-
- runTest // trigger
- }
-
- lzvalue199_3 // trigger
- }
-
- def run { method212_2 } // trigger
-}
-
-
-trait Trait215_1 {
-
- def method212_2 {
-
- lazy val lzvalue199_3 = {
- var ObjCounter = 0
-
- object Obj { ObjCounter += 1}
- Obj // one
-
- def singleThreadedAccess(x: Any) = {
- x == Obj
- }
-
- def runTest {
- try {
- assert(singleThreadedAccess(Obj))
- assert(ObjCounter == 1)
- } catch {
- case e => print("failed "); e.printStackTrace()
- }
- println("ok")
- }
-
- runTest // trigger
- }
-
- lzvalue199_3 // trigger
- }
-
- def run { method212_2 }
-}
-
-
-class Class217_1 {
-
- val fun216_2 = () => {
-
- lazy val lzvalue199_3 = {
- var ObjCounter = 0
-
- object Obj { ObjCounter += 1}
- Obj // one
-
- def singleThreadedAccess(x: Any) = {
- x == Obj
- }
-
- def runTest {
- try {
- assert(singleThreadedAccess(Obj))
- assert(ObjCounter == 1)
- } catch {
- case e => print("failed "); e.printStackTrace()
- }
- println("ok")
- }
-
- runTest // trigger
- }
-
- lzvalue199_3 // trigger
- }
-
- def run { fun216_2() }
-}
-
-
-object Object218_1 {
-
- val fun216_2 = () => {
-
- lazy val lzvalue199_3 = {
- var ObjCounter = 0
-
- object Obj { ObjCounter += 1}
- Obj // one
-
- def singleThreadedAccess(x: Any) = {
- x == Obj
- }
-
- def runTest {
- try {
- assert(singleThreadedAccess(Obj))
- assert(ObjCounter == 1)
- } catch {
- case e => print("failed "); e.printStackTrace()
- }
- println("ok")
- }
-
- runTest // trigger
- }
-
- lzvalue199_3 // trigger
- }
-
- def run { fun216_2() } // trigger
-}
-
-
-trait Trait219_1 {
-
- val fun216_2 = () => {
-
- lazy val lzvalue199_3 = {
- var ObjCounter = 0
-
- object Obj { ObjCounter += 1}
- Obj // one
-
- def singleThreadedAccess(x: Any) = {
- x == Obj
- }
-
- def runTest {
- try {
- assert(singleThreadedAccess(Obj))
- assert(ObjCounter == 1)
- } catch {
- case e => print("failed "); e.printStackTrace()
- }
- println("ok")
- }
-
- runTest // trigger
- }
-
- lzvalue199_3 // trigger
- }
-
- def run { fun216_2() }
-}
-
-
-class Class221_1 {
-
- class Class220_2 {
- { // in primary constructor
-
- lazy val lzvalue199_3 = {
- var ObjCounter = 0
-
- object Obj { ObjCounter += 1}
- Obj // one
-
- def singleThreadedAccess(x: Any) = {
- x == Obj
- }
-
- def runTest {
- try {
- assert(singleThreadedAccess(Obj))
- assert(ObjCounter == 1)
- } catch {
- case e => print("failed "); e.printStackTrace()
- }
- println("ok")
- }
-
- runTest // trigger
- }
-
- lzvalue199_3 // trigger
- }
- }
-
- def run { (new Class220_2) }
-}
-
-
-object Object222_1 {
-
- class Class220_2 {
- { // in primary constructor
-
- lazy val lzvalue199_3 = {
- var ObjCounter = 0
-
- object Obj { ObjCounter += 1}
- Obj // one
-
- def singleThreadedAccess(x: Any) = {
- x == Obj
- }
-
- def runTest {
- try {
- assert(singleThreadedAccess(Obj))
- assert(ObjCounter == 1)
- } catch {
- case e => print("failed "); e.printStackTrace()
- }
- println("ok")
- }
-
- runTest // trigger
- }
-
- lzvalue199_3 // trigger
- }
- }
-
- def run { (new Class220_2) } // trigger
-}
-
-
-trait Trait223_1 {
-
- class Class220_2 {
- { // in primary constructor
-
- lazy val lzvalue199_3 = {
- var ObjCounter = 0
-
- object Obj { ObjCounter += 1}
- Obj // one
-
- def singleThreadedAccess(x: Any) = {
- x == Obj
- }
-
- def runTest {
- try {
- assert(singleThreadedAccess(Obj))
- assert(ObjCounter == 1)
- } catch {
- case e => print("failed "); e.printStackTrace()
- }
- println("ok")
- }
-
- runTest // trigger
- }
-
- lzvalue199_3 // trigger
- }
- }
-
- def run { (new Class220_2) }
-}
-
-
-class Class225_1 {
-
- lazy val lzvalue224_2 = {
-
- lazy val lzvalue199_3 = {
- var ObjCounter = 0
-
- object Obj { ObjCounter += 1}
- Obj // one
-
- def singleThreadedAccess(x: Any) = {
- x == Obj
- }
-
- def runTest {
- try {
- assert(singleThreadedAccess(Obj))
- assert(ObjCounter == 1)
- } catch {
- case e => print("failed "); e.printStackTrace()
- }
- println("ok")
- }
-
- runTest // trigger
- }
-
- lzvalue199_3 // trigger
- }
-
- def run { lzvalue224_2 }
-}
-
-
-object Object226_1 {
-
- lazy val lzvalue224_2 = {
-
- lazy val lzvalue199_3 = {
- var ObjCounter = 0
-
- object Obj { ObjCounter += 1}
- Obj // one
-
- def singleThreadedAccess(x: Any) = {
- x == Obj
- }
-
- def runTest {
- try {
- assert(singleThreadedAccess(Obj))
- assert(ObjCounter == 1)
- } catch {
- case e => print("failed "); e.printStackTrace()
- }
- println("ok")
- }
-
- runTest // trigger
- }
-
- lzvalue199_3 // trigger
- }
-
- def run { lzvalue224_2 } // trigger
-}
-
-
-trait Trait227_1 {
-
- lazy val lzvalue224_2 = {
-
- lazy val lzvalue199_3 = {
- var ObjCounter = 0
-
- object Obj { ObjCounter += 1}
- Obj // one
-
- def singleThreadedAccess(x: Any) = {
- x == Obj
- }
-
- def runTest {
- try {
- assert(singleThreadedAccess(Obj))
- assert(ObjCounter == 1)
- } catch {
- case e => print("failed "); e.printStackTrace()
- }
- println("ok")
- }
-
- runTest // trigger
- }
-
- lzvalue199_3 // trigger
- }
-
- def run { lzvalue224_2 }
-}
-
-
-
-class Class234_1 {
-
- class Class233_2 {
-
- val value232_3 = {
- var ObjCounter = 0
-
- object Obj { ObjCounter += 1}
- Obj // one
-
- def singleThreadedAccess(x: Any) = {
- x == Obj
- }
-
- def runTest {
- try {
- assert(singleThreadedAccess(Obj))
- assert(ObjCounter == 1)
- } catch {
- case e => print("failed "); e.printStackTrace()
- }
- println("ok")
- }
-
- runTest // trigger
- }
-
- def run { value232_3 }
- }
-
- def run { (new Class233_2).run }
-}
-
-
-object Object235_1 {
-
- class Class233_2 {
-
- val value232_3 = {
- var ObjCounter = 0
-
- object Obj { ObjCounter += 1}
- Obj // one
-
- def singleThreadedAccess(x: Any) = {
- x == Obj
- }
-
- def runTest {
- try {
- assert(singleThreadedAccess(Obj))
- assert(ObjCounter == 1)
- } catch {
- case e => print("failed "); e.printStackTrace()
- }
- println("ok")
- }
-
- runTest // trigger
- }
-
- def run { value232_3 }
- }
-
- def run { (new Class233_2).run } // trigger
-}
-
-
-trait Trait236_1 {
-
- class Class233_2 {
-
- val value232_3 = {
- var ObjCounter = 0
-
- object Obj { ObjCounter += 1}
- Obj // one
-
- def singleThreadedAccess(x: Any) = {
- x == Obj
- }
-
- def runTest {
- try {
- assert(singleThreadedAccess(Obj))
- assert(ObjCounter == 1)
- } catch {
- case e => print("failed "); e.printStackTrace()
- }
- println("ok")
- }
-
- runTest // trigger
- }
-
- def run { value232_3 }
- }
-
- def run { (new Class233_2).run }
-}
-
-
-class Class238_1 {
-
- object Object237_2 {
-
- val value232_3 = {
- var ObjCounter = 0
-
- object Obj { ObjCounter += 1}
- Obj // one
-
- def singleThreadedAccess(x: Any) = {
- x == Obj
- }
-
- def runTest {
- try {
- assert(singleThreadedAccess(Obj))
- assert(ObjCounter == 1)
- } catch {
- case e => print("failed "); e.printStackTrace()
- }
- println("ok")
- }
-
- runTest // trigger
- }
-
- def run { value232_3 } // trigger
- }
-
- def run { Object237_2.run }
-}
-
-
-object Object239_1 {
-
- object Object237_2 {
-
- val value232_3 = {
- var ObjCounter = 0
-
- object Obj { ObjCounter += 1}
- Obj // one
-
- def singleThreadedAccess(x: Any) = {
- x == Obj
- }
-
- def runTest {
- try {
- assert(singleThreadedAccess(Obj))
- assert(ObjCounter == 1)
- } catch {
- case e => print("failed "); e.printStackTrace()
- }
- println("ok")
- }
-
- runTest // trigger
- }
-
- def run { value232_3 } // trigger
- }
-
- def run { Object237_2.run } // trigger
-}
-
-
-trait Trait240_1 {
-
- object Object237_2 {
-
- val value232_3 = {
- var ObjCounter = 0
-
- object Obj { ObjCounter += 1}
- Obj // one
-
- def singleThreadedAccess(x: Any) = {
- x == Obj
- }
-
- def runTest {
- try {
- assert(singleThreadedAccess(Obj))
- assert(ObjCounter == 1)
- } catch {
- case e => print("failed "); e.printStackTrace()
- }
- println("ok")
- }
-
- runTest // trigger
- }
-
- def run { value232_3 } // trigger
- }
-
- def run { Object237_2.run }
-}
-
-
-class Class242_1 {
-
- trait Trait241_2 {
-
- val value232_3 = {
- var ObjCounter = 0
-
- object Obj { ObjCounter += 1}
- Obj // one
-
- def singleThreadedAccess(x: Any) = {
- x == Obj
- }
-
- def runTest {
- try {
- assert(singleThreadedAccess(Obj))
- assert(ObjCounter == 1)
- } catch {
- case e => print("failed "); e.printStackTrace()
- }
- println("ok")
- }
-
- runTest // trigger
- }
-
- def run { value232_3 }
- }
-
- def run { (new Trait241_2 {}).run }
-}
-
-
-object Object243_1 {
-
- trait Trait241_2 {
-
- val value232_3 = {
- var ObjCounter = 0
-
- object Obj { ObjCounter += 1}
- Obj // one
-
- def singleThreadedAccess(x: Any) = {
- x == Obj
- }
-
- def runTest {
- try {
- assert(singleThreadedAccess(Obj))
- assert(ObjCounter == 1)
- } catch {
- case e => print("failed "); e.printStackTrace()
- }
- println("ok")
- }
-
- runTest // trigger
- }
-
- def run { value232_3 }
- }
-
- def run { (new Trait241_2 {}).run } // trigger
-}
-
-
-trait Trait244_1 {
-
- trait Trait241_2 {
-
- val value232_3 = {
- var ObjCounter = 0
-
- object Obj { ObjCounter += 1}
- Obj // one
-
- def singleThreadedAccess(x: Any) = {
- x == Obj
- }
-
- def runTest {
- try {
- assert(singleThreadedAccess(Obj))
- assert(ObjCounter == 1)
- } catch {
- case e => print("failed "); e.printStackTrace()
- }
- println("ok")
- }
-
- runTest // trigger
- }
-
- def run { value232_3 }
- }
-
- def run { (new Trait241_2 {}).run }
-}
-
-
-class Class246_1 {
-
- def method245_2 {
-
- val value232_3 = {
- var ObjCounter = 0
-
- object Obj { ObjCounter += 1}
- Obj // one
-
- def singleThreadedAccess(x: Any) = {
- x == Obj
- }
-
- def runTest {
- try {
- assert(singleThreadedAccess(Obj))
- assert(ObjCounter == 1)
- } catch {
- case e => print("failed "); e.printStackTrace()
- }
- println("ok")
- }
-
- runTest // trigger
- }
-
- value232_3 // trigger
- }
-
- def run { method245_2 }
-}
-
-
-object Object247_1 {
-
- def method245_2 {
-
- val value232_3 = {
- var ObjCounter = 0
-
- object Obj { ObjCounter += 1}
- Obj // one
-
- def singleThreadedAccess(x: Any) = {
- x == Obj
- }
-
- def runTest {
- try {
- assert(singleThreadedAccess(Obj))
- assert(ObjCounter == 1)
- } catch {
- case e => print("failed "); e.printStackTrace()
- }
- println("ok")
- }
-
- runTest // trigger
- }
-
- value232_3 // trigger
- }
-
- def run { method245_2 } // trigger
-}
-
-
-trait Trait248_1 {
-
- def method245_2 {
-
- val value232_3 = {
- var ObjCounter = 0
-
- object Obj { ObjCounter += 1}
- Obj // one
-
- def singleThreadedAccess(x: Any) = {
- x == Obj
- }
-
- def runTest {
- try {
- assert(singleThreadedAccess(Obj))
- assert(ObjCounter == 1)
- } catch {
- case e => print("failed "); e.printStackTrace()
- }
- println("ok")
- }
-
- runTest // trigger
- }
-
- value232_3 // trigger
- }
-
- def run { method245_2 }
-}
-
-
-class Class250_1 {
-
- val fun249_2 = () => {
-
- val value232_3 = {
- var ObjCounter = 0
-
- object Obj { ObjCounter += 1}
- Obj // one
-
- def singleThreadedAccess(x: Any) = {
- x == Obj
- }
-
- def runTest {
- try {
- assert(singleThreadedAccess(Obj))
- assert(ObjCounter == 1)
- } catch {
- case e => print("failed "); e.printStackTrace()
- }
- println("ok")
- }
-
- runTest // trigger
- }
-
- value232_3 // trigger
- }
-
- def run { fun249_2() }
-}
-
-
-object Object251_1 {
-
- val fun249_2 = () => {
-
- val value232_3 = {
- var ObjCounter = 0
-
- object Obj { ObjCounter += 1}
- Obj // one
-
- def singleThreadedAccess(x: Any) = {
- x == Obj
- }
-
- def runTest {
- try {
- assert(singleThreadedAccess(Obj))
- assert(ObjCounter == 1)
- } catch {
- case e => print("failed "); e.printStackTrace()
- }
- println("ok")
- }
-
- runTest // trigger
- }
-
- value232_3 // trigger
- }
-
- def run { fun249_2() } // trigger
-}
-
-
-trait Trait252_1 {
-
- val fun249_2 = () => {
-
- val value232_3 = {
- var ObjCounter = 0
-
- object Obj { ObjCounter += 1}
- Obj // one
-
- def singleThreadedAccess(x: Any) = {
- x == Obj
- }
-
- def runTest {
- try {
- assert(singleThreadedAccess(Obj))
- assert(ObjCounter == 1)
- } catch {
- case e => print("failed "); e.printStackTrace()
- }
- println("ok")
- }
-
- runTest // trigger
- }
-
- value232_3 // trigger
- }
-
- def run { fun249_2() }
-}
-
-
-class Class254_1 {
-
- class Class253_2 {
- { // in primary constructor
-
- val value232_3 = {
- var ObjCounter = 0
-
- object Obj { ObjCounter += 1}
- Obj // one
-
- def singleThreadedAccess(x: Any) = {
- x == Obj
- }
-
- def runTest {
- try {
- assert(singleThreadedAccess(Obj))
- assert(ObjCounter == 1)
- } catch {
- case e => print("failed "); e.printStackTrace()
- }
- println("ok")
- }
-
- runTest // trigger
- }
-
- value232_3 // trigger
- }
- }
-
- def run { (new Class253_2) }
-}
-
-
-object Object255_1 {
-
- class Class253_2 {
- { // in primary constructor
-
- val value232_3 = {
- var ObjCounter = 0
-
- object Obj { ObjCounter += 1}
- Obj // one
-
- def singleThreadedAccess(x: Any) = {
- x == Obj
- }
-
- def runTest {
- try {
- assert(singleThreadedAccess(Obj))
- assert(ObjCounter == 1)
- } catch {
- case e => print("failed "); e.printStackTrace()
- }
- println("ok")
- }
-
- runTest // trigger
- }
-
- value232_3 // trigger
- }
- }
-
- def run { (new Class253_2) } // trigger
-}
-
-
-trait Trait256_1 {
-
- class Class253_2 {
- { // in primary constructor
-
- val value232_3 = {
- var ObjCounter = 0
-
- object Obj { ObjCounter += 1}
- Obj // one
-
- def singleThreadedAccess(x: Any) = {
- x == Obj
- }
-
- def runTest {
- try {
- assert(singleThreadedAccess(Obj))
- assert(ObjCounter == 1)
- } catch {
- case e => print("failed "); e.printStackTrace()
- }
- println("ok")
- }
-
- runTest // trigger
- }
-
- value232_3 // trigger
- }
- }
-
- def run { (new Class253_2) }
-}
-
-
-class Class258_1 {
-
- lazy val lzvalue257_2 = {
-
- val value232_3 = {
- var ObjCounter = 0
-
- object Obj { ObjCounter += 1}
- Obj // one
-
- def singleThreadedAccess(x: Any) = {
- x == Obj
- }
-
- def runTest {
- try {
- assert(singleThreadedAccess(Obj))
- assert(ObjCounter == 1)
- } catch {
- case e => print("failed "); e.printStackTrace()
- }
- println("ok")
- }
-
- runTest // trigger
- }
-
- value232_3 // trigger
- }
-
- def run { lzvalue257_2 }
-}
-
-
-object Object259_1 {
-
- lazy val lzvalue257_2 = {
-
- val value232_3 = {
- var ObjCounter = 0
-
- object Obj { ObjCounter += 1}
- Obj // one
-
- def singleThreadedAccess(x: Any) = {
- x == Obj
- }
-
- def runTest {
- try {
- assert(singleThreadedAccess(Obj))
- assert(ObjCounter == 1)
- } catch {
- case e => print("failed "); e.printStackTrace()
- }
- println("ok")
- }
-
- runTest // trigger
- }
-
- value232_3 // trigger
- }
-
- def run { lzvalue257_2 } // trigger
-}
-
-
-trait Trait260_1 {
-
- lazy val lzvalue257_2 = {
-
- val value232_3 = {
- var ObjCounter = 0
-
- object Obj { ObjCounter += 1}
- Obj // one
-
- def singleThreadedAccess(x: Any) = {
- x == Obj
- }
-
- def runTest {
- try {
- assert(singleThreadedAccess(Obj))
- assert(ObjCounter == 1)
- } catch {
- case e => print("failed "); e.printStackTrace()
- }
- println("ok")
- }
-
- runTest // trigger
- }
-
- value232_3 // trigger
- }
-
- def run { lzvalue257_2 }
-}
-
-
-class Class262_1 {
-
- val value261_2 = {
-
- val value232_3 = {
- var ObjCounter = 0
-
- object Obj { ObjCounter += 1}
- Obj // one
-
- def singleThreadedAccess(x: Any) = {
- x == Obj
- }
-
- def runTest {
- try {
- assert(singleThreadedAccess(Obj))
- assert(ObjCounter == 1)
- } catch {
- case e => print("failed "); e.printStackTrace()
- }
- println("ok")
- }
-
- runTest // trigger
- }
-
- value232_3 // trigger
- }
-
- def run { value261_2 }
-}
-
-
-object Object263_1 {
-
- val value261_2 = {
-
- val value232_3 = {
- var ObjCounter = 0
-
- object Obj { ObjCounter += 1}
- Obj // one
-
- def singleThreadedAccess(x: Any) = {
- x == Obj
- }
-
- def runTest {
- try {
- assert(singleThreadedAccess(Obj))
- assert(ObjCounter == 1)
- } catch {
- case e => print("failed "); e.printStackTrace()
- }
- println("ok")
- }
-
- runTest // trigger
- }
-
- value232_3 // trigger
- }
-
- def run { value261_2 } // trigger
-}
-
-
-trait Trait264_1 {
-
- val value261_2 = {
-
- val value232_3 = {
- var ObjCounter = 0
-
- object Obj { ObjCounter += 1}
- Obj // one
-
- def singleThreadedAccess(x: Any) = {
- x == Obj
- }
-
- def runTest {
- try {
- assert(singleThreadedAccess(Obj))
- assert(ObjCounter == 1)
- } catch {
- case e => print("failed "); e.printStackTrace()
- }
- println("ok")
- }
-
- runTest // trigger
- }
-
- value232_3 // trigger
- }
-
- def run { value261_2 }
-}
-
-
-object Test extends App {
- (new Class3_1).run
- Object4_1.run
- (new Trait5_1 {}).run
- (new Class7_1).run
- Object8_1.run
- (new Trait9_1 {}).run
- (new Class11_1).run
- Object12_1.run
- (new Trait13_1 {}).run
- (new Class15_1).run
- Object16_1.run
- (new Trait17_1 {}).run
- (new Class19_1).run
- Object20_1.run
- (new Trait21_1 {}).run
- (new Class23_1).run
- Object24_1.run
- (new Trait25_1 {}).run
- (new Class27_1).run
- Object28_1.run
- (new Trait29_1 {}).run
- (new Class31_1).run
- Object32_1.run
- (new Trait33_1 {}).run
- (new Class36_1).run
- Object37_1.run
- (new Trait38_1 {}).run
- (new Class40_1).run
- Object41_1.run
- (new Trait42_1 {}).run
- (new Class44_1).run
- Object45_1.run
- (new Trait46_1 {}).run
- (new Class48_1).run
- Object49_1.run
- (new Trait50_1 {}).run
- (new Class52_1).run
- Object53_1.run
- (new Trait54_1 {}).run
- (new Class56_1).run
- Object57_1.run
- (new Trait58_1 {}).run
- (new Class60_1).run
- Object61_1.run
- (new Trait62_1 {}).run
- (new Class64_1).run
- Object65_1.run
- (new Trait66_1 {}).run
- (new Class69_1).run
- Object70_1.run
- (new Trait71_1 {}).run
- (new Class73_1).run
- Object74_1.run
- (new Trait75_1 {}).run
- (new Class77_1).run
- Object78_1.run
- (new Trait79_1 {}).run
- (new Class81_1).run
- Object82_1.run
- (new Trait83_1 {}).run
- (new Class85_1).run
- Object86_1.run
- (new Trait87_1 {}).run
- (new Class89_1).run
- Object90_1.run
- (new Trait91_1 {}).run
- (new Class93_1).run
- Object94_1.run
- (new Trait95_1 {}).run
- (new Class97_1).run
- Object98_1.run
- (new Trait99_1 {}).run
- (new Class102_1).run
- Object103_1.run
- (new Trait104_1 {}).run
- (new Class106_1).run
- Object107_1.run
- (new Trait108_1 {}).run
- (new Class110_1).run
- Object111_1.run
- (new Trait112_1 {}).run
- (new Class114_1).run
- Object115_1.run
- (new Trait116_1 {}).run
- (new Class118_1).run
- Object119_1.run
- (new Trait120_1 {}).run
- (new Class122_1).run
- Object123_1.run
- (new Trait124_1 {}).run
- (new Class126_1).run
- Object127_1.run
- (new Trait128_1 {}).run
- (new Class130_1).run
- Object131_1.run
- (new Trait132_1 {}).run
- (new Class135_1).run
- Object136_1.run
- (new Trait137_1 {}).run
- (new Class139_1).run
- Object140_1.run
- (new Trait141_1 {}).run
- (new Class143_1).run
- Object144_1.run
- (new Trait145_1 {}).run
- (new Class147_1).run
- Object148_1.run
- (new Trait149_1 {}).run
- (new Class151_1).run
- Object152_1.run
- (new Trait153_1 {}).run
- (new Class155_1).run
- Object156_1.run
- (new Trait157_1 {}).run
- (new Class159_1).run
- Object160_1.run
- (new Trait161_1 {}).run
- (new Class163_1).run
- Object164_1.run
- (new Trait165_1 {}).run
- (new Class168_1).run
- Object169_1.run
- (new Trait170_1 {}).run
- (new Class172_1).run
- Object173_1.run
- (new Trait174_1 {}).run
- (new Class176_1).run
- Object177_1.run
- (new Trait178_1 {}).run
- (new Class180_1).run
- Object181_1.run
- (new Trait182_1 {}).run
- (new Class184_1).run
- Object185_1.run
- (new Trait186_1 {}).run
- (new Class188_1).run
- Object189_1.run
- (new Trait190_1 {}).run
- (new Class192_1).run
- Object193_1.run
- (new Trait194_1 {}).run
- (new Class196_1).run
- Object197_1.run
- (new Trait198_1 {}).run
- (new Class201_1).run
- Object202_1.run
- (new Trait203_1 {}).run
- (new Class205_1).run
- Object206_1.run
- (new Trait207_1 {}).run
- (new Class209_1).run
- Object210_1.run
- (new Trait211_1 {}).run
- (new Class213_1).run
- Object214_1.run
- (new Trait215_1 {}).run
- (new Class217_1).run
- Object218_1.run
- (new Trait219_1 {}).run
- (new Class221_1).run
- Object222_1.run
- (new Trait223_1 {}).run
- (new Class225_1).run
- Object226_1.run
- (new Trait227_1 {}).run
- (new Class234_1).run
- Object235_1.run
- (new Trait236_1 {}).run
- (new Class238_1).run
- Object239_1.run
- (new Trait240_1 {}).run
- (new Class242_1).run
- Object243_1.run
- (new Trait244_1 {}).run
- (new Class246_1).run
- Object247_1.run
- (new Trait248_1 {}).run
- (new Class250_1).run
- Object251_1.run
- (new Trait252_1 {}).run
- (new Class254_1).run
- Object255_1.run
- (new Trait256_1 {}).run
- (new Class258_1).run
- Object259_1.run
- (new Trait260_1 {}).run
- (new Class262_1).run
- Object263_1.run
- (new Trait264_1 {}).run
-}
-
diff --git a/test/files/run/bug4570.check b/test/files/run/t4570.check
similarity index 100%
copy from test/files/run/bug4570.check
copy to test/files/run/t4570.check
diff --git a/test/files/run/bug4570.scala b/test/files/run/t4570.scala
similarity index 100%
rename from test/files/run/bug4570.scala
rename to test/files/run/t4570.scala
diff --git a/test/files/run/t4574.check b/test/files/run/t4574.check
new file mode 100644
index 0000000..a4522ff
--- /dev/null
+++ b/test/files/run/t4574.check
@@ -0,0 +1,2 @@
+I hereby refute null!
+I denounce null as unListLike!
diff --git a/test/files/run/t4582.scala b/test/files/run/t4582.scala
new file mode 100644
index 0000000..f747791
--- /dev/null
+++ b/test/files/run/t4582.scala
@@ -0,0 +1,11 @@
+object Test {
+ def main(args: Array[String]): Unit = {
+ var flag = true
+ def it = {
+ flag = false
+ Iterator(2)
+ }
+ val flat = (Iterator(Iterator(1)) ++ Iterator(it)).flatten
+ assert(flag)
+ }
+}
diff --git a/test/files/run/t4592.check b/test/files/run/t4592.check
new file mode 100644
index 0000000..e133386
--- /dev/null
+++ b/test/files/run/t4592.check
@@ -0,0 +1,3 @@
+3.14
+3.14
+3.14
diff --git a/test/files/run/t4592.scala b/test/files/run/t4592.scala
new file mode 100644
index 0000000..d1666d8
--- /dev/null
+++ b/test/files/run/t4592.scala
@@ -0,0 +1,10 @@
+object Test {
+ def repeat[T](count: Int = 1, x: Boolean = true)(thunk: => T) : T = (0 until count).map(_ => thunk).last
+ def repeat[T](thunk: => T) : T = repeat()(thunk)
+
+ def main(args: Array[String]): Unit = {
+ println(repeat(3.14))
+ println(repeat(count=5)(3.14))
+ println(repeat(count=5,x=false)(3.14))
+ }
+}
diff --git a/test/files/run/t4601.check b/test/files/run/t4601.check
new file mode 100644
index 0000000..83b0b0b
--- /dev/null
+++ b/test/files/run/t4601.check
@@ -0,0 +1 @@
+'blubber
diff --git a/test/files/run/t4601.scala b/test/files/run/t4601.scala
new file mode 100644
index 0000000..fe40634
--- /dev/null
+++ b/test/files/run/t4601.scala
@@ -0,0 +1,15 @@
+class A
+
+trait B {
+ self: A =>
+
+ def test {
+ println('blubber)
+ }
+}
+
+object Test extends A with B {
+ def main(args: Array[String]) {
+ test
+ }
+}
\ No newline at end of file
diff --git a/test/files/run/t4608.scala b/test/files/run/t4608.scala
index 3601adc..2d43beb 100644
--- a/test/files/run/t4608.scala
+++ b/test/files/run/t4608.scala
@@ -1,6 +1,3 @@
-
-
-
// #4608
object Test {
diff --git a/test/files/run/bug4617.check b/test/files/run/t4617.check
similarity index 100%
rename from test/files/run/bug4617.check
rename to test/files/run/t4617.check
diff --git a/test/files/run/bug4617.scala b/test/files/run/t4617.scala
similarity index 100%
rename from test/files/run/bug4617.scala
rename to test/files/run/t4617.scala
diff --git a/test/files/run/bug4656.check b/test/files/run/t4656.check
similarity index 100%
rename from test/files/run/bug4656.check
rename to test/files/run/t4656.check
diff --git a/test/files/run/bug4656.scala b/test/files/run/t4656.scala
similarity index 100%
rename from test/files/run/bug4656.scala
rename to test/files/run/t4656.scala
diff --git a/test/files/run/t4658.check b/test/files/run/t4658.check
new file mode 100644
index 0000000..bb64051
--- /dev/null
+++ b/test/files/run/t4658.check
@@ -0,0 +1,80 @@
+Ranges:
+1073741824
+1073741824
+0
+0
+55
+25
+1
+-45
+-55
+0
+-24
+-30
+0
+-40
+-55
+-10
+-24
+-30
+-10
+IntRanges:
+-1073741824
+-1073741824
+0
+0
+55
+25
+1
+-45
+-55
+0
+-24
+-30
+0
+-40
+-55
+-10
+-24
+-30
+-10
+LongRanges:
+2305843008139952128
+-2305843008139952128
+0
+0
+55
+25
+1
+-45
+-55
+0
+-24
+-30
+0
+-40
+-55
+-10
+-24
+-30
+-10
+BigIntRanges:
+2305843008139952128
+-2305843008139952128
+0
+0
+55
+25
+1
+-45
+-55
+0
+-24
+-30
+0
+-40
+-55
+-10
+-24
+-30
+-10
diff --git a/test/files/run/t4658.scala b/test/files/run/t4658.scala
new file mode 100644
index 0000000..8c07c50
--- /dev/null
+++ b/test/files/run/t4658.scala
@@ -0,0 +1,33 @@
+import scala.collection.immutable.NumericRange
+//#4658
+object Test {
+
+ case class R(start: Int, end: Int, step: Int = 1, inclusive: Boolean = true)
+
+ val rangeData = Array(
+ R(1, Int.MaxValue), R(-Int.MaxValue, -1), R(0, 0), R(0,0, inclusive = false), R(1,10),
+ R(1,10,2), R(1,10,11), R(-10, -5), R(-10, 0), R(-10, 10), R(-10, -5, 2), R(-10, 0, 2), R(-10, 10, 2),
+ R(-10, -5, inclusive = false), R(-10, 0, inclusive = false), R(-10, 10, inclusive = false),
+ R(-10, -5, 2, inclusive = false), R(-10, 0, 2, inclusive = false), R(-10, 10, 2, inclusive = false)
+ )
+
+ def ranges = rangeData.map(r => if (r.inclusive) r.start to r.end by r.step else r.start until r.end by r.step)
+
+ def numericIntRanges = rangeData.map(r => if (r.inclusive) NumericRange.inclusive(r.start, r.end, r.step) else NumericRange(r.start, r.end, r.step))
+
+ def numericLongRanges = rangeData.map(r => if (r.inclusive) NumericRange.inclusive(r.start.toLong, r.end, r.step) else NumericRange(r.start.toLong, r.end, r.step))
+
+ def numericBigIntRanges = rangeData.map(r => if (r.inclusive) NumericRange.inclusive(BigInt(r.start), BigInt(r.end), BigInt(r.step)) else NumericRange(BigInt(r.start), BigInt(r.end), BigInt(r.step)))
+
+ def main(args: Array[String]) {
+ println("Ranges:")
+ ranges.foreach{range => println(range.sum)}
+ println("IntRanges:")
+ numericIntRanges.foreach{range => println(range.sum)}
+ println("LongRanges:")
+ numericLongRanges.foreach{range => println(range.sum)}
+ println("BigIntRanges:")
+ numericBigIntRanges.foreach{range => println(range.sum)}
+ }
+
+}
\ No newline at end of file
diff --git a/test/files/run/bug4660.scala b/test/files/run/t4660.scala
similarity index 100%
rename from test/files/run/bug4660.scala
rename to test/files/run/t4660.scala
diff --git a/test/files/run/t4671.check b/test/files/run/t4671.check
new file mode 100644
index 0000000..4699818
--- /dev/null
+++ b/test/files/run/t4671.check
@@ -0,0 +1,46 @@
+Type in expressions to have them evaluated.
+Type :help for more information.
+
+scala> object o { val file = sys.props("partest.cwd") + "/t4671.scala" }
+defined module o
+
+scala> val s = scala.io.Source.fromFile(o.file)
+s: scala.io.BufferedSource = non-empty iterator
+
+scala> println(s.getLines.mkString("\n"))
+import scala.tools.partest.ReplTest
+
+object Test extends ReplTest {
+ // My god...it's full of quines
+ def code = """
+object o { val file = sys.props("partest.cwd") + "/t4671.scala" }
+val s = scala.io.Source.fromFile(o.file)
+println(s.getLines.mkString("\n"))
+
+val s = scala.io.Source.fromFile(o.file)
+println(s.mkString(""))
+""".trim
+}
+
+scala>
+
+scala> val s = scala.io.Source.fromFile(o.file)
+s: scala.io.BufferedSource = non-empty iterator
+
+scala> println(s.mkString(""))
+import scala.tools.partest.ReplTest
+
+object Test extends ReplTest {
+ // My god...it's full of quines
+ def code = """
+object o { val file = sys.props("partest.cwd") + "/t4671.scala" }
+val s = scala.io.Source.fromFile(o.file)
+println(s.getLines.mkString("\n"))
+
+val s = scala.io.Source.fromFile(o.file)
+println(s.mkString(""))
+""".trim
+}
+
+
+scala>
diff --git a/test/files/run/t4671.scala b/test/files/run/t4671.scala
new file mode 100644
index 0000000..aba0138
--- /dev/null
+++ b/test/files/run/t4671.scala
@@ -0,0 +1,13 @@
+import scala.tools.partest.ReplTest
+
+object Test extends ReplTest {
+ // My god...it's full of quines
+ def code = """
+object o { val file = sys.props("partest.cwd") + "/t4671.scala" }
+val s = scala.io.Source.fromFile(o.file)
+println(s.getLines.mkString("\n"))
+
+val s = scala.io.Source.fromFile(o.file)
+println(s.mkString(""))
+""".trim
+}
diff --git a/test/files/run/t4680.check b/test/files/run/t4680.check
new file mode 100644
index 0000000..b5cfc65
--- /dev/null
+++ b/test/files/run/t4680.check
@@ -0,0 +1,60 @@
+
+
+// new C { }
+-A -B -C
+
+// new C { 5 }
+-A -B -C
+A+ B+ C+
+
+// new D()
+-A -B -C -D
+A+ B+ C+ D+
+
+// new D() { }
+-A -B -C -D
+A+ B+ C+ D+
+
+// new D() { val x = 5 }
+-A -B -C -D
+A+ B+ C+ D+
+A+ B+ C+ D+
+
+// new { val x = 5 } with D()
+-A -B -C -D
+A+ B+ C+ D+
+
+// new E() { val x = 5 }
+-A -B -C -D
+A+ B+ C+ D+ E+
+-E
+
+A+ B+ C+ D+ E+
+
+A+ B+ C+ D+ E+
+
+
+// new { val x = 5 } with E()
+-A -B -C -D
+A+ B+ C+ D+ E+
+-E
+
+A+ B+ C+ D+ E+
+
+
+// new { val x = 5 } with E() { }
+-A -B -C -D
+A+ B+ C+ D+ E+
+-E
+
+A+ B+ C+ D+ E+
+
+
+// new { val x = 5 } with E() { 5 }
+-A -B -C -D
+A+ B+ C+ D+ E+
+-E
+
+A+ B+ C+ D+ E+
+
+A+ B+ C+ D+ E+
diff --git a/test/files/run/t4680.scala b/test/files/run/t4680.scala
new file mode 100644
index 0000000..88611df
--- /dev/null
+++ b/test/files/run/t4680.scala
@@ -0,0 +1,71 @@
+trait A extends DelayedInit {
+ print("-A ")
+
+ def delayedInit(body: => Unit) = {
+ body
+ postConstructionCode
+ }
+ protected def postConstructionCode: Unit = {
+ print("\nA+ ")
+ }
+}
+trait B extends A {
+ print("-B ")
+ override protected def postConstructionCode: Unit = {
+ super.postConstructionCode
+ print("B+ ")
+ }
+}
+
+trait C extends B {
+ print("-C ")
+ override protected def postConstructionCode: Unit = {
+ super.postConstructionCode
+ print("C+ ")
+ }
+}
+
+class D() extends C {
+ print("-D ")
+ override protected def postConstructionCode: Unit = {
+ super.postConstructionCode
+ print("D+ ")
+ }
+}
+class E() extends D() {
+ println("-E")
+ override protected def postConstructionCode: Unit = {
+ super.postConstructionCode
+ println("E+")
+ }
+}
+
+
+object Test {
+ def p(msg: String) = println("\n\n// " + msg)
+
+ def main(args: Array[String]) {
+ p("new C { }")
+ new C { }
+ p("new C { 5 }")
+ new C { 5 }
+
+ p("new D()")
+ new D()
+ p("new D() { }")
+ new D() { }
+ p("new D() { val x = 5 }")
+ new D() { val x = 5 }
+ p("new { val x = 5 } with D()")
+ new { val x = 5 } with D()
+
+ p("new E() { val x = 5 }")
+ new E() { val x = 5 }
+ p("new { val x = 5 } with E()")
+ new { val x = 5 } with E()
+ p("new { val x = 5 } with E() { }")
+ new { val x = 5 } with E() { }
+ p("new { val x = 5 } with E() { 5 }")
+ new { val x = 5 } with E() { 5 }
+ }
+}
diff --git a/test/files/run/bug4697.check b/test/files/run/t4697.check
similarity index 100%
rename from test/files/run/bug4697.check
rename to test/files/run/t4697.check
diff --git a/test/files/run/bug4697.scala b/test/files/run/t4697.scala
similarity index 100%
rename from test/files/run/bug4697.scala
rename to test/files/run/t4697.scala
diff --git a/test/files/run/t4709.scala b/test/files/run/t4709.scala
new file mode 100644
index 0000000..c61a440
--- /dev/null
+++ b/test/files/run/t4709.scala
@@ -0,0 +1,10 @@
+
+
+import collection.GenSeq
+
+
+object Test {
+ def main(args: Array[String]) {
+ val Seq(1, 2) = Stream(1, 2)
+ }
+}
diff --git a/test/files/run/t4710.check b/test/files/run/t4710.check
new file mode 100644
index 0000000..f2335d1
--- /dev/null
+++ b/test/files/run/t4710.check
@@ -0,0 +1,8 @@
+Type in expressions to have them evaluated.
+Type :help for more information.
+
+scala> def method : String = { implicit def f(s: Symbol) = "" ; 'symbol }
+warning: there were 1 feature warning(s); re-run with -feature for details
+method: String
+
+scala>
diff --git a/test/files/run/bug4710.scala b/test/files/run/t4710.scala
similarity index 100%
rename from test/files/run/bug4710.scala
rename to test/files/run/t4710.scala
diff --git a/test/files/run/t4723.scala b/test/files/run/t4723.scala
new file mode 100644
index 0000000..4278ff9
--- /dev/null
+++ b/test/files/run/t4723.scala
@@ -0,0 +1,9 @@
+
+
+
+object Test {
+ def main(args: Array[String]) {
+ assert(Nil == collection.parallel.ParSeq())
+ assert(collection.parallel.ParSeq() == Nil)
+ }
+}
diff --git a/test/files/run/t4729.check b/test/files/run/t4729.check
new file mode 100644
index 0000000..9a2aa56
--- /dev/null
+++ b/test/files/run/t4729.check
@@ -0,0 +1,4 @@
+WrappedArray(1, 2)
+WrappedArray(1, 2)
+WrappedArray(1, 2)
+WrappedArray(1, 2)
diff --git a/test/files/run/t4729/J_1.java b/test/files/run/t4729/J_1.java
new file mode 100644
index 0000000..2ffb5a8
--- /dev/null
+++ b/test/files/run/t4729/J_1.java
@@ -0,0 +1,4 @@
+// Java Interface:
+public interface J_1 {
+ public void method(String... s);
+}
diff --git a/test/files/run/t4729/S_2.scala b/test/files/run/t4729/S_2.scala
new file mode 100644
index 0000000..e34e3d3
--- /dev/null
+++ b/test/files/run/t4729/S_2.scala
@@ -0,0 +1,29 @@
+ // Scala class:
+class ScalaVarArgs extends J_1 {
+ // -- no problem on overriding it using ordinary class
+ def method(s: String*) { println(s) }
+}
+
+object Test {
+ def main(args: Array[String]) {
+ //[1] Ok - no problem using inferred type
+ val varArgs = new J_1 {
+ def method(s: String*) { println(s) }
+ }
+ varArgs.method("1", "2")
+
+ //[2] Ok -- no problem when explicit set its type after construction
+ val b: J_1 = varArgs
+ b.method("1", "2")
+
+ //[3] Ok -- no problem on calling its method
+ (new ScalaVarArgs).method("1", "2")
+ (new ScalaVarArgs: J_1).method("1", "2")
+
+ //[4] Not Ok -- error when assigning anonymous class to a explictly typed val
+ // Compiler error: object creation impossible, since method method in trait VarArgs of type (s: <repeated...>[java.lang.String])Unit is not defined
+ val tagged: J_1 = new J_1 {
+ def method(s: String*) { println(s) }
+ }
+ }
+}
diff --git a/test/files/run/bug4752.scala b/test/files/run/t4752.scala
similarity index 100%
rename from test/files/run/bug4752.scala
rename to test/files/run/t4752.scala
diff --git a/test/files/run/t4753.check b/test/files/run/t4753.check
new file mode 100644
index 0000000..7b19ee8
--- /dev/null
+++ b/test/files/run/t4753.check
@@ -0,0 +1 @@
+boolean
diff --git a/test/files/run/t4753.scala b/test/files/run/t4753.scala
new file mode 100644
index 0000000..98f3e92
--- /dev/null
+++ b/test/files/run/t4753.scala
@@ -0,0 +1,12 @@
+trait A {
+ val actualType: Class[_]
+}
+trait B extends A {
+ final val actualType = classOf[Boolean]
+}
+
+object Test extends B {
+ def main(args: Array[String]): Unit = {
+ println(actualType)
+ }
+}
diff --git a/test/files/run/t4761.check b/test/files/run/t4761.check
new file mode 100644
index 0000000..1698a57
--- /dev/null
+++ b/test/files/run/t4761.check
@@ -0,0 +1,4 @@
+Vector(1, 1, 1, 1, 1)
+Vector(Vector(1, 1, 1, 1, 1))
+List(1, 2)
+List(List(1, 2))
diff --git a/test/files/run/t4761.scala b/test/files/run/t4761.scala
new file mode 100644
index 0000000..a9c245d
--- /dev/null
+++ b/test/files/run/t4761.scala
@@ -0,0 +1,11 @@
+object Test {
+ def main(args: Array[String]) {
+ val gs = for (x <- (1 to 5)) yield { if (x % 2 == 0) List(1).seq else List(1).par }
+ println(gs.flatten)
+ println(gs.transpose)
+
+ val s = Stream(Vector(1).par, Vector(2).par)
+ println(s.flatten.toList)
+ println(s.transpose.map(_.toList).toList)
+ }
+}
diff --git a/test/files/run/t4766.check b/test/files/run/t4766.check
new file mode 100644
index 0000000..d00491f
--- /dev/null
+++ b/test/files/run/t4766.check
@@ -0,0 +1 @@
+1
diff --git a/test/files/run/t4766.scala b/test/files/run/t4766.scala
new file mode 100644
index 0000000..c2a864d
--- /dev/null
+++ b/test/files/run/t4766.scala
@@ -0,0 +1,7 @@
+object Test extends App {
+ val x = new {
+ def > = 1
+ }
+
+ println(x>)
+}
diff --git a/test/files/run/t4770.check b/test/files/run/t4770.check
new file mode 100644
index 0000000..38e5a83
--- /dev/null
+++ b/test/files/run/t4770.check
@@ -0,0 +1,2 @@
+(a,2)
+(2,a)
diff --git a/test/files/run/t4770.scala b/test/files/run/t4770.scala
new file mode 100644
index 0000000..25bf305
--- /dev/null
+++ b/test/files/run/t4770.scala
@@ -0,0 +1,15 @@
+package crasher {
+ class Z[@specialized A, @specialized(AnyRef) B](var a: A, var b: B) {
+ override def toString = "" + ((a, b))
+ }
+ object O {
+ def apply[@specialized A, @specialized(AnyRef) B](a0: A, b0: B) = new Z(a0, b0)
+ }
+}
+
+object Test {
+ def main(args: Array[String]): Unit = {
+ println(crasher.O("a", 2))
+ println(crasher.O(2, "a"))
+ }
+}
diff --git a/test/files/run/t4777.check b/test/files/run/t4777.check
new file mode 100644
index 0000000..11f1f59
--- /dev/null
+++ b/test/files/run/t4777.check
@@ -0,0 +1,2 @@
+28
+28
diff --git a/test/files/run/t4777.scala b/test/files/run/t4777.scala
new file mode 100644
index 0000000..4a811d3
--- /dev/null
+++ b/test/files/run/t4777.scala
@@ -0,0 +1,8 @@
+class A(val a: Int = 13)
+class DefaultsTest(x: Int = 25) extends A(28)
+object DefaultsTest extends DefaultsTest(12)
+
+object Test extends App {
+ println(new DefaultsTest() a)
+ println(DefaultsTest a)
+}
diff --git a/test/files/run/t4794.check b/test/files/run/t4794.check
new file mode 100644
index 0000000..f599e28
--- /dev/null
+++ b/test/files/run/t4794.check
@@ -0,0 +1 @@
+10
diff --git a/test/files/run/t4794.scala b/test/files/run/t4794.scala
new file mode 100644
index 0000000..afe89fa
--- /dev/null
+++ b/test/files/run/t4794.scala
@@ -0,0 +1,12 @@
+trait Mutable[@specialized A] { def a: A; def a_=(a0: A): Unit }
+trait NotSpecialized { }
+class Arr[@specialized A](val arr: Array[A]) {
+ def bippy(m: Mutable[A]) { m.a = arr(0) }
+ def quux(m: Mutable[A] with NotSpecialized) { m.a = arr(0) }
+}
+
+object Test {
+ def main(args: Array[String]): Unit = {
+ println(classOf[Arr[_]].getMethods filter (_.getName contains "quux") size) // expect 10, not 1
+ }
+}
diff --git a/test/files/run/t4809.scala b/test/files/run/t4809.scala
new file mode 100644
index 0000000..b30d805
--- /dev/null
+++ b/test/files/run/t4809.scala
@@ -0,0 +1,34 @@
+
+
+import scala.util.control.Breaks._
+
+
+
+object Test {
+
+ def main(args: Array[String]) {
+ val x = tryBreakable {
+ break
+ 2
+ } catchBreak {
+ 3
+ }
+ assert(x == 3, x)
+
+ val y = tryBreakable {
+ 2
+ } catchBreak {
+ 3
+ }
+ assert(y == 2, y)
+
+ val z = tryBreakable {
+ break
+ 1.0
+ } catchBreak {
+ 2
+ }
+ assert(z == 2.0, z)
+ }
+
+}
diff --git a/test/files/run/t4813.scala b/test/files/run/t4813.scala
new file mode 100644
index 0000000..6d48ca8
--- /dev/null
+++ b/test/files/run/t4813.scala
@@ -0,0 +1,37 @@
+import collection.mutable._
+import reflect._
+
+
+object Test extends App {
+ def runTest[T, U](col: T)(clone: T => U)(mod: T => Unit)(implicit ct: ClassTag[T]): Unit = {
+ val cloned = clone(col)
+ assert(cloned == col, s"cloned should be equal to original. $cloned != $col")
+ mod(col)
+ assert(cloned != col, s"cloned should not modify when original does: $ct")
+ }
+
+ // Seqs
+ runTest(ArrayBuffer(1,2,3))(_.clone) { buf => buf transform (_ + 1) }
+ runTest(ArraySeq(1,2,3))(_.clone) { buf => buf transform (_ + 1) }
+ runTest(Buffer(1,2,3))(_.clone) { buf => buf transform (_ + 1) }
+ runTest(DoubleLinkedList(1,2,3))(_.clone) { buf => buf transform (_ + 1) }
+ runTest(IndexedSeq(1,2,3))(_.clone) { buf => buf transform (_ + 1) }
+ runTest(LinearSeq(1,2,3))(_.clone) { buf => buf transform (_ + 1) }
+ runTest(LinkedList(1,2,3))(_.clone) { buf => buf transform (_ + 1) }
+ runTest(ListBuffer(1,2,3))(_.clone) { buf => buf transform (_ + 1) }
+ runTest(MutableList(1,2,3))(_.clone) { buf => buf transform (_ + 1) }
+ runTest(Queue(1,2,3))(_.clone) { buf => buf transform (_ + 1) }
+ runTest(Stack(1,2,3))(_.clone) { buf => buf transform (_ + 1) }
+
+ // Sets
+ runTest(BitSet(1,2,3))(_.clone) { buf => buf add 4 }
+ runTest(HashSet(1,2,3))(_.clone) { buf => buf add 4 }
+ runTest(Set(1,2,3))(_.clone) { buf => buf add 4 }
+ runTest(SortedSet(1,2,3))(_.clone) { buf => buf add 4 }
+ runTest(TreeSet(1,2,3))(_.clone) { buf => buf add 4 }
+
+ // Maps
+ runTest(HashMap(1->1,2->2,3->3))(_.clone) { buf => buf put (4,4) }
+ runTest(WeakHashMap(1->1,2->2,3->3))(_.clone) { buf => buf put (4,4) }
+}
+
diff --git a/test/files/run/t4827.scala b/test/files/run/t4827.scala
new file mode 100644
index 0000000..7270cf1
--- /dev/null
+++ b/test/files/run/t4827.scala
@@ -0,0 +1,15 @@
+object Test {
+ def main(args: Array[String]): Unit = Foo.foo()
+}
+
+trait CommonTrait {
+ def foo(): String = null
+}
+
+class Foo
+
+object Foo {
+ def goo() = new Foo() with CommonTrait
+
+ def foo(): String = null
+}
diff --git a/test/files/run/t4827b.scala b/test/files/run/t4827b.scala
new file mode 100644
index 0000000..84d6d90
--- /dev/null
+++ b/test/files/run/t4827b.scala
@@ -0,0 +1,18 @@
+package foo {
+ class Foo { }
+ object Foo {
+ def bippy(x: Int) = x
+ }
+}
+
+package bar {
+ class Bippy extends foo.Foo {
+ def bippy(x: Int) = x
+ }
+}
+
+object Test {
+ def main(args: Array[String]): Unit = {
+ new bar.Bippy bippy 5
+ }
+}
diff --git a/test/files/run/t4835.check b/test/files/run/t4835.check
index 531c3d7..0987722 100644
--- a/test/files/run/t4835.check
+++ b/test/files/run/t4835.check
@@ -1,7 +1,7 @@
--1 0 1 2 3 4 5 6 7 8 9
--1 1 3 5 7 9 11 13 15 17 19
-1 1
-2 1 2
-2 1 A 2
-3 1 2 3
-3 1 A 2 B 3
+-1 0 1 2 3 4 5 6 7 8 9
+-1 1 3 5 7 9 11 13 15 17 19
+1 1
+2 1 2
+2 1 A 2
+3 1 2 3
+3 1 A 2 B 3
diff --git a/test/files/run/t4835.scala b/test/files/run/t4835.scala
index 50d161b..c964e42 100644
--- a/test/files/run/t4835.scala
+++ b/test/files/run/t4835.scala
@@ -1,38 +1,38 @@
-/*
- * Test case for SI-4835. This tests confirm that the fix
- * doesn't break laziness. To test memory consumption,
- * I need to confirm that OutOfMemoryError doesn't occur.
- * I could create such tests. However, such tests consume
- * too much time and memory.
- */
-object Test {
- private final val INFINITE = -1
- def testStreamIterator(num: Int, stream: Stream[Int]): Unit = {
- val iter = stream.iterator
- print(num)
- // if num == -1, then steram is infinite sequence
- if (num == INFINITE) {
- for(i <- 0 until 10) {
- print(" " + iter.next())
- }
- } else {
- while(iter.hasNext) {
- print(" " + iter.next())
- }
- }
- println()
- }
-
- def main(args: Array[String]): Unit = {
- import Stream.{from, cons, empty}
- testStreamIterator(INFINITE, from(0))
- testStreamIterator(INFINITE, from(0).filter(_ % 2 == 1))
- testStreamIterator(1, Stream(1))
- testStreamIterator(2, Stream(1, 2))
- //Stream with side effect
- testStreamIterator(2, cons(1, cons({ print(" A"); 2}, empty)))
- testStreamIterator(3, Stream(1, 2, 3))
- //Stream with side effect
- testStreamIterator(3, cons(1, cons({ print(" A"); 2}, cons({ print(" B"); 3}, Stream.empty))))
- }
-}
+/*
+ * Test case for SI-4835. This tests confirm that the fix
+ * doesn't break laziness. To test memory consumption,
+ * I need to confirm that OutOfMemoryError doesn't occur.
+ * I could create such tests. However, such tests consume
+ * too much time and memory.
+ */
+object Test {
+ private final val INFINITE = -1
+ def testStreamIterator(num: Int, stream: Stream[Int]): Unit = {
+ val iter = stream.iterator
+ print(num)
+ // if num == -1, then steram is infinite sequence
+ if (num == INFINITE) {
+ for(i <- 0 until 10) {
+ print(" " + iter.next())
+ }
+ } else {
+ while(iter.hasNext) {
+ print(" " + iter.next())
+ }
+ }
+ println()
+ }
+
+ def main(args: Array[String]): Unit = {
+ import Stream.{from, cons, empty}
+ testStreamIterator(INFINITE, from(0))
+ testStreamIterator(INFINITE, from(0).filter(_ % 2 == 1))
+ testStreamIterator(1, Stream(1))
+ testStreamIterator(2, Stream(1, 2))
+ //Stream with side effect
+ testStreamIterator(2, cons(1, cons({ print(" A"); 2}, empty)))
+ testStreamIterator(3, Stream(1, 2, 3))
+ //Stream with side effect
+ testStreamIterator(3, cons(1, cons({ print(" A"); 2}, cons({ print(" B"); 3}, Stream.empty))))
+ }
+}
diff --git a/test/files/run/t4871.check b/test/files/run/t4871.check
new file mode 100644
index 0000000..a60526a
--- /dev/null
+++ b/test/files/run/t4871.check
@@ -0,0 +1,2 @@
+class Test$C
+class Test$D
diff --git a/test/files/run/t4871.scala b/test/files/run/t4871.scala
new file mode 100644
index 0000000..70d8b71
--- /dev/null
+++ b/test/files/run/t4871.scala
@@ -0,0 +1,12 @@
+object Test {
+ class C
+ class D
+
+ def main(args: Array[String]): Unit = {
+ val z: Class[C] = classOf
+ val z2: Class[D] = classOf[D]
+
+ println(z)
+ println(z2)
+ }
+}
diff --git a/test/files/run/t4891.check b/test/files/run/t4891.check
new file mode 100644
index 0000000..79fd7f6
--- /dev/null
+++ b/test/files/run/t4891.check
@@ -0,0 +1,7 @@
+test.generic.T1
+ (m) public abstract A test.generic.T1.t1(A)
+test.generic.C1
+ (m) public void test.generic.C1.m1()
+test.generic.C2
+ (m) public void test.generic.C1.m1()
+null
diff --git a/test/files/run/t4891/J_2.java b/test/files/run/t4891/J_2.java
new file mode 100644
index 0000000..db1cc52
--- /dev/null
+++ b/test/files/run/t4891/J_2.java
@@ -0,0 +1,13 @@
+import test.generic.*;
+
+public class J_2 {
+ public static <A> void foo(T1<A> x) {
+ // x.m1();
+ }
+
+ public static void main(String[] args) {
+ Bug4891.main(null);
+ T1<Object> x = new C2<Object>();
+ foo(x);
+ }
+}
diff --git a/test/files/run/t4891/S_1.scala b/test/files/run/t4891/S_1.scala
new file mode 100644
index 0000000..3309d22
--- /dev/null
+++ b/test/files/run/t4891/S_1.scala
@@ -0,0 +1,26 @@
+package test.generic {
+ class C1[A] {
+ def m1(): Unit = ()
+ }
+
+ trait T1[A] extends C1[A] {
+ def t1(x: A) = x
+ }
+
+ class C2[A] extends T1[A]
+}
+
+import scala.tools.partest._
+
+object Bug4891 extends SigTest {
+ import test.generic._
+
+ def main(args: Array[String]): Unit = {
+ show[T1[_]]()
+ show[C1[_]]()
+ show[C2[_]]("m1")
+
+ println(classOf[T1[_]].getGenericSuperclass)
+ classOf[T1[_]].getGenericInterfaces foreach println
+ }
+}
diff --git a/test/files/run/t4891/S_3.scala b/test/files/run/t4891/S_3.scala
new file mode 100644
index 0000000..0da4912
--- /dev/null
+++ b/test/files/run/t4891/S_3.scala
@@ -0,0 +1,5 @@
+object Test {
+ def main(args: Array[String]): Unit = {
+ J_2 main null
+ }
+}
diff --git a/test/files/run/t4894.scala b/test/files/run/t4894.scala
new file mode 100644
index 0000000..aa3b434
--- /dev/null
+++ b/test/files/run/t4894.scala
@@ -0,0 +1,27 @@
+
+
+
+
+
+object Test {
+
+ def main(args: Array[String]) {
+ import collection._
+ val hs = mutable.HashSet[Int]()
+ hs ++= 1 to 10
+ hs --= 1 to 10
+
+ val phs = parallel.mutable.ParHashSet[Int]()
+ phs ++= 1 to 10
+ for (i <- 1 to 10) assert(phs(i))
+ phs --= 1 to 10
+ assert(phs.isEmpty)
+
+ val phm = parallel.mutable.ParHashMap[Int, Int]()
+ phm ++= ((1 to 10) zip (1 to 10))
+ for (i <- 1 to 10) assert(phm(i) == i)
+ phm --= 1 to 10
+ assert(phm.isEmpty)
+ }
+
+}
diff --git a/test/files/run/t4895.scala b/test/files/run/t4895.scala
index 14f6761..a0e8c19 100644
--- a/test/files/run/t4895.scala
+++ b/test/files/run/t4895.scala
@@ -1,20 +1,16 @@
-
-
-
-
object Test {
-
+
def checkPar(sz: Int) {
import collection._
val hs = mutable.HashSet[Int]() ++ (1 to sz)
assert(hs.par.map(_ + 1).seq.toSeq.sorted == (2 to (sz + 1)))
}
-
+
def main(args: Array[String]) {
for (i <- 0 until 100) checkPar(i)
for (i <- 100 until 1000 by 50) checkPar(i)
for (i <- 1000 until 10000 by 500) checkPar(i)
for (i <- 10000 until 100000 by 5000) checkPar(i)
}
-
+
}
diff --git a/test/files/run/t4897.check b/test/files/run/t4897.check
new file mode 100644
index 0000000..17dda56
--- /dev/null
+++ b/test/files/run/t4897.check
@@ -0,0 +1 @@
+joepie
diff --git a/test/files/run/t4897.scala b/test/files/run/t4897.scala
new file mode 100644
index 0000000..a2ec3de
--- /dev/null
+++ b/test/files/run/t4897.scala
@@ -0,0 +1,10 @@
+class CSuper {
+ object A
+}
+class C extends CSuper {
+ def f = (A: AnyRef) match { case _: A.type => "joepie" }
+}
+
+object Test extends C with App {
+ println(f)
+}
\ No newline at end of file
diff --git a/test/files/run/t4929.check b/test/files/run/t4929.check
new file mode 100644
index 0000000..0f0c913
--- /dev/null
+++ b/test/files/run/t4929.check
@@ -0,0 +1 @@
+success
\ No newline at end of file
diff --git a/test/files/run/t4929.scala b/test/files/run/t4929.scala
new file mode 100644
index 0000000..3208cd1
--- /dev/null
+++ b/test/files/run/t4929.scala
@@ -0,0 +1,42 @@
+import scala.util.parsing.json._
+import java.util.concurrent._
+import collection.JavaConversions._
+
+object Test extends App {
+
+ val LIMIT = 2000
+ val THREAD_COUNT = 20
+ val count = new java.util.concurrent.atomic.AtomicInteger(0)
+
+ val begin = new CountDownLatch(THREAD_COUNT)
+ val finish = new CountDownLatch(THREAD_COUNT)
+
+ val errors = new ConcurrentLinkedQueue[Throwable]
+
+ (1 to THREAD_COUNT) foreach { i =>
+ val thread = new Thread {
+ override def run() {
+ begin.await(1, TimeUnit.SECONDS)
+ try {
+ while (count.getAndIncrement() < LIMIT && errors.isEmpty) {
+ JSON.parseFull("""{"foo": [1,2,3,4]}""")
+ }
+ } catch {
+ case t: Throwable => errors.add(t)
+ }
+
+ finish.await(10, TimeUnit.SECONDS)
+ }
+ }
+
+ thread.setDaemon(true)
+ thread.start()
+
+ }
+
+
+ errors foreach { throw(_) }
+
+ println("success")
+
+}
diff --git a/test/files/run/t4930.check b/test/files/run/t4930.check
new file mode 100644
index 0000000..a58efd4
--- /dev/null
+++ b/test/files/run/t4930.check
@@ -0,0 +1,2 @@
+List(1)
+List(1)
diff --git a/test/files/run/t4930.scala b/test/files/run/t4930.scala
new file mode 100644
index 0000000..775f627
--- /dev/null
+++ b/test/files/run/t4930.scala
@@ -0,0 +1,11 @@
+import collection.immutable.SortedMap
+
+object Test {
+ implicit val ord: Ordering[Array[Byte]] = Ordering.by((_: Array[Byte]).toIterable)
+
+ def main(args: Array[String]): Unit = {
+ val m = SortedMap(Array[Byte](1) -> 0)
+ println(m.to(Array[Byte](1)) flatMap (_._1.mkString))
+ println(m.from(Array[Byte](1)) flatMap (_._1.mkString))
+ }
+}
diff --git a/test/files/run/t4935.check b/test/files/run/t4935.check
new file mode 100644
index 0000000..ce01362
--- /dev/null
+++ b/test/files/run/t4935.check
@@ -0,0 +1 @@
+hello
diff --git a/test/files/run/t4935.flags b/test/files/run/t4935.flags
new file mode 100644
index 0000000..ac14fe5
--- /dev/null
+++ b/test/files/run/t4935.flags
@@ -0,0 +1 @@
+-optimize
diff --git a/test/files/run/t4935.scala b/test/files/run/t4935.scala
new file mode 100644
index 0000000..5940355
--- /dev/null
+++ b/test/files/run/t4935.scala
@@ -0,0 +1,9 @@
+object Test extends App {
+ for (i <- 0 to 1) {
+ val a = Foo
+ }
+}
+
+object Foo {
+ println("hello")
+}
diff --git a/test/files/run/t4954.scala b/test/files/run/t4954.scala
new file mode 100644
index 0000000..b4916e6
--- /dev/null
+++ b/test/files/run/t4954.scala
@@ -0,0 +1,45 @@
+
+
+import collection._
+
+
+object Test {
+
+ def main(args: Array[String]) {
+ val m = scala.collection.mutable.LinkedHashMap("one" -> 1, "two" -> 2, "three" -> 3, "four" -> 4, "five" -> 5)
+ val expected = List("one", "two", "three", "four", "five")
+ assert(m.keys.iterator.toList == expected)
+ assert(m.keys.drop(0).iterator.toList == expected)
+ assert(m.keys.drop(1).iterator.toList == expected.drop(1))
+ assert(m.keys.drop(2).iterator.toList == expected.drop(2))
+ assert(m.keys.drop(3).iterator.toList == expected.drop(3))
+ assert(m.keys.drop(4).iterator.toList == expected.drop(4))
+ assert(m.keys.drop(5).iterator.toList == expected.drop(5))
+
+ val expvals = List(1, 2, 3, 4, 5)
+ assert(m.values.iterator.toList == expvals)
+ assert(m.values.drop(0).iterator.toList == expvals)
+ assert(m.values.drop(1).iterator.toList == expvals.drop(1))
+ assert(m.values.drop(2).iterator.toList == expvals.drop(2))
+ assert(m.values.drop(3).iterator.toList == expvals.drop(3))
+ assert(m.values.drop(4).iterator.toList == expvals.drop(4))
+ assert(m.values.drop(5).iterator.toList == expvals.drop(5))
+
+ val pred = (x: String) => x.length < 6
+ val filtered = m.filterKeys(pred)
+ assert(filtered.drop(0).keys.toList == expected.filter(pred))
+ assert(filtered.drop(1).keys.toList == expected.filter(pred).drop(1))
+ assert(filtered.drop(2).keys.toList == expected.filter(pred).drop(2))
+ assert(filtered.drop(3).keys.toList == expected.filter(pred).drop(3))
+ assert(filtered.drop(4).keys.toList == expected.filter(pred).drop(4))
+
+ val mapped = m.mapValues(-_)
+ assert(mapped.drop(0).keys.toList == expected)
+ assert(mapped.drop(1).keys.toList == expected.drop(1))
+ assert(mapped.drop(2).keys.toList == expected.drop(2))
+ assert(mapped.drop(3).keys.toList == expected.drop(3))
+ assert(mapped.drop(4).keys.toList == expected.drop(4))
+ assert(mapped.drop(5).keys.toList == expected.drop(5))
+ }
+
+}
diff --git a/test/files/run/t5009.check b/test/files/run/t5009.check
new file mode 100644
index 0000000..6c56722
--- /dev/null
+++ b/test/files/run/t5009.check
@@ -0,0 +1,5 @@
+C(1,true)
+10
+C(7283,20)
+C(66,-3)
+100
diff --git a/test/files/run/t5009.scala b/test/files/run/t5009.scala
new file mode 100644
index 0000000..db12c0d
--- /dev/null
+++ b/test/files/run/t5009.scala
@@ -0,0 +1,14 @@
+object Test extends App {
+
+ case class C[T, U <: String, O >: Object](x: Int, y: T)(z: U, b: Boolean)(s: O, val l: Int)
+
+ val c = C(1, true)("dlkfj", true)("dlkfjlk", 10)
+ println(c)
+ println(c.l)
+
+ println(c.copy(y = 20, x = 7283)("enwa", b = false)(l = -1, s = new Object))
+
+ val res = c.copy[Int, String, Object](y = -3, x = 66)("lkdjen", false)(new Object, 100)
+ println(res)
+ println(res.l)
+}
diff --git a/test/files/run/t5018.scala b/test/files/run/t5018.scala
new file mode 100644
index 0000000..bb67a25
--- /dev/null
+++ b/test/files/run/t5018.scala
@@ -0,0 +1,37 @@
+
+
+
+import java.io._
+import collection._
+
+
+
+object Test {
+
+ def serializeDeserialize[T <: AnyRef](obj: T) = {
+ val buffer = new ByteArrayOutputStream
+ val out = new ObjectOutputStream(buffer)
+ out.writeObject(obj)
+ val in = new ObjectInputStream(new ByteArrayInputStream(buffer.toByteArray))
+ in.readObject.asInstanceOf[T]
+ }
+
+ def main(args: Array[String]) {
+ val values = mutable.Map(1 -> 1).values
+ assert(serializeDeserialize(values).toList == values.toList)
+
+ val keyset = mutable.Map(1 -> 1).keySet
+ assert(serializeDeserialize(keyset) == keyset)
+
+ val imkeyset = immutable.Map(1 -> 1).keySet
+ assert(serializeDeserialize(imkeyset) == imkeyset)
+
+ val defaultmap = immutable.Map(1 -> 1).withDefaultValue(1)
+ assert(serializeDeserialize(defaultmap) == defaultmap)
+
+ val minusmap = mutable.Map(1 -> 1).withDefault(x => -x)
+ assert(serializeDeserialize(minusmap) == minusmap)
+ }
+
+}
+
diff --git a/test/files/run/t5037.check b/test/files/run/t5037.check
new file mode 100644
index 0000000..da29283
--- /dev/null
+++ b/test/files/run/t5037.check
@@ -0,0 +1,2 @@
+true
+false
diff --git a/test/files/run/t5037.scala b/test/files/run/t5037.scala
new file mode 100644
index 0000000..7b1fce7
--- /dev/null
+++ b/test/files/run/t5037.scala
@@ -0,0 +1,18 @@
+object Test {
+ def main(args: Array[String]) {
+ val t = new Test
+ t.inner.foo()
+ }
+}
+
+class Test {
+ class Inner {
+ def foo() {
+ println(bar)
+ bar = false
+ println(bar)
+ }
+ }
+ val inner = new Inner
+ private[this] final var bar = true
+}
diff --git a/test/files/run/t5040.check b/test/files/run/t5040.check
new file mode 100644
index 0000000..3f7b590
--- /dev/null
+++ b/test/files/run/t5040.check
@@ -0,0 +1 @@
+applyDynamic
diff --git a/test/files/run/t5040.flags b/test/files/run/t5040.flags
new file mode 100644
index 0000000..1141f97
--- /dev/null
+++ b/test/files/run/t5040.flags
@@ -0,0 +1 @@
+-language:dynamics
diff --git a/test/files/run/t5040.scala b/test/files/run/t5040.scala
new file mode 100644
index 0000000..6cd2c22
--- /dev/null
+++ b/test/files/run/t5040.scala
@@ -0,0 +1,11 @@
+abstract class Prova2 extends Dynamic {
+ def applyDynamic(m: String)(): Unit
+ private def privateMethod() = println("private method")
+}
+
+object Test extends App {
+ val prova= new Prova2 {
+ def applyDynamic(m: String)() = println("applyDynamic")
+ }
+ prova.privateMethod()
+}
diff --git a/test/files/run/t5053.check b/test/files/run/t5053.check
new file mode 100644
index 0000000..5ec39bb
--- /dev/null
+++ b/test/files/run/t5053.check
@@ -0,0 +1,6 @@
+true
+true
+true
+true
+true
+true
diff --git a/test/files/run/t5053.scala b/test/files/run/t5053.scala
new file mode 100644
index 0000000..e46dad5
--- /dev/null
+++ b/test/files/run/t5053.scala
@@ -0,0 +1,20 @@
+object Test extends App {
+ {
+ val (left, right) = Seq((1, "a"), (1, "a"), (1, "a"), (3, "c")).view.unzip
+ println(left.isInstanceOf[scala.collection.SeqViewLike[_,_,_]])
+ val (l, m, r) = Seq((1, 1.0, "a"), (1, 1.0, "a"), (1, 1.0, "a"), (3, 3.0, "c")).view.unzip3
+ println(l.isInstanceOf[scala.collection.SeqViewLike[_,_,_]])
+ }
+ {
+ val (left, right) = Iterable((1, "a"), (1, "a"), (1, "a"), (3, "c")).view.unzip
+ println(left.isInstanceOf[scala.collection.IterableViewLike[_,_,_]])
+ val (l, m, r) = Iterable((1, 1.0, "a"), (1, 1.0, "a"), (1, 1.0, "a"), (3, 3.0, "c")).view.unzip3
+ println(l.isInstanceOf[scala.collection.IterableViewLike[_,_,_]])
+ }
+ {
+ val (left, right) = Traversable((1, "a"), (1, "a"), (1, "a"), (3, "c")).view.unzip
+ println(left.isInstanceOf[scala.collection.TraversableViewLike[_,_,_]])
+ val (l, m, r) = Traversable((1, 1.0, "a"), (1, 1.0, "a"), (1, 1.0, "a"), (3, 3.0, "c")).view.unzip3
+ println(l.isInstanceOf[scala.collection.TraversableViewLike[_,_,_]])
+ }
+}
diff --git a/test/files/run/t5064.check b/test/files/run/t5064.check
new file mode 100644
index 0000000..077006a
--- /dev/null
+++ b/test/files/run/t5064.check
@@ -0,0 +1,25 @@
+[12] T5064.super.<init>()
+[12] T5064.super.<init>
+[12] this
+[16:23] immutable.this.List.apply(scala.this.Predef.wrapIntArray(Array[Int]{1}))
+[16:20] immutable.this.List.apply
+<16:20> immutable.this.List
+<16:20> immutable.this
+[16:23] scala.this.Predef.wrapIntArray(Array[Int]{1})
+[20] scala.this.Predef.wrapIntArray
+[20] scala.this.Predef
+[20] scala.this
+[26:32] collection.this.Seq.apply(scala.this.Predef.wrapIntArray(Array[Int]{1}))
+[26:29] collection.this.Seq.apply
+<26:29> collection.this.Seq
+<26:29> collection.this
+[26:32] scala.this.Predef.wrapIntArray(Array[Int]{1})
+[29] scala.this.Predef.wrapIntArray
+[29] scala.this.Predef
+[29] scala.this
+[35:39] immutable.this.List
+<35:39> immutable.this
+[42:45] collection.this.Seq
+<42:45> collection.this
+[48:51] immutable.this.Nil
+<48:51> immutable.this
diff --git a/test/files/run/t5064.scala b/test/files/run/t5064.scala
new file mode 100644
index 0000000..35f0951
--- /dev/null
+++ b/test/files/run/t5064.scala
@@ -0,0 +1,23 @@
+import scala.tools.partest._
+
+object Test extends CompilerTest {
+ import global._
+ override def extraSettings = super.extraSettings + " -Yrangepos"
+ override def sources = List(
+ """|class T5064 {
+ | List(1)
+ | Seq(1)
+ | List
+ | Seq
+ | Nil
+ |}""".stripMargin
+ )
+ def check(source: String, unit: CompilationUnit) {
+ for (ClassDef(_, _, _, Template(_, _, stats)) <- unit.body ; stat <- stats ; t <- stat) {
+ t match {
+ case _: Select | _: Apply | _: This => println("%-15s %s".format(t.pos.show, t))
+ case _ =>
+ }
+ }
+ }
+}
\ No newline at end of file
diff --git a/test/files/run/t5072.check b/test/files/run/t5072.check
new file mode 100644
index 0000000..8fe75f5
--- /dev/null
+++ b/test/files/run/t5072.check
@@ -0,0 +1,14 @@
+Type in expressions to have them evaluated.
+Type :help for more information.
+
+scala>
+
+scala> class C
+defined class C
+
+scala> Thread.currentThread.getContextClassLoader.loadClass(classOf[C].getName)
+res0: Class[_] = class C
+
+scala>
+
+scala>
diff --git a/test/files/run/t5072.scala b/test/files/run/t5072.scala
new file mode 100644
index 0000000..eef8604
--- /dev/null
+++ b/test/files/run/t5072.scala
@@ -0,0 +1,8 @@
+import scala.tools.partest.ReplTest
+
+object Test extends ReplTest {
+ def code = """
+class C
+Thread.currentThread.getContextClassLoader.loadClass(classOf[C].getName)
+ """
+}
diff --git a/test/files/run/t5080.check b/test/files/run/t5080.check
new file mode 100644
index 0000000..1385f26
--- /dev/null
+++ b/test/files/run/t5080.check
@@ -0,0 +1 @@
+hey
diff --git a/test/files/run/t5080.scala b/test/files/run/t5080.scala
new file mode 100644
index 0000000..ce72d13
--- /dev/null
+++ b/test/files/run/t5080.scala
@@ -0,0 +1,24 @@
+object Test extends App {
+
+ abstract class Value {
+ }
+
+ case class Num(value: Int) extends Value {
+ override def toString = value.toString;
+ }
+
+ implicit def conversions(x: Value) = new {
+ def toInt =
+ x match {
+ case Num(n) => n
+ case _ => throw new RuntimeException
+ }
+ }
+
+ def eval(v: Value): Value = {
+ println("hey")
+ Num(1)
+ }
+
+ eval(Num(1)).toInt
+}
diff --git a/test/files/run/t5105.check b/test/files/run/t5105.check
new file mode 100644
index 0000000..1d4f6ef
--- /dev/null
+++ b/test/files/run/t5105.check
@@ -0,0 +1 @@
+You buttered your bread. Now sleep in it!
diff --git a/test/files/run/t5105.scala b/test/files/run/t5105.scala
new file mode 100644
index 0000000..f5a9a3c
--- /dev/null
+++ b/test/files/run/t5105.scala
@@ -0,0 +1,14 @@
+object Test {
+ def main(args: Array[String]) {
+ new foo.Bar
+ println("You buttered your bread. Now sleep in it!")
+ }
+}
+
+package foo {
+ trait Foo { def foo() {} }
+ class Bar extends Baz with Foo
+
+ abstract class Baz
+ object Baz extends Foo
+}
diff --git a/test/files/run/t5125.check b/test/files/run/t5125.check
new file mode 100644
index 0000000..d8a0565
--- /dev/null
+++ b/test/files/run/t5125.check
@@ -0,0 +1,4 @@
+public void O1$.f(java.lang.String[])
+public void O1$.f(scala.collection.Seq)
+public void O2$.f(java.lang.String[])
+public void O2$.f(scala.collection.Seq)
diff --git a/test/files/run/t5125.scala b/test/files/run/t5125.scala
new file mode 100644
index 0000000..7ec2b92
--- /dev/null
+++ b/test/files/run/t5125.scala
@@ -0,0 +1,24 @@
+object O1 {
+ def instance = this
+ @scala.annotation.varargs
+ def f(values:String*) = println("Calling O1.f(): " + values)
+}
+
+object O2 {
+ def instance = this
+ @scala.annotation.varargs
+ def f(values:String*) = println("Calling O2.f(): " + values)
+ // uncommenting g() results in errors in A.java
+ def g(): String => Int = s => s.hashCode
+}
+
+object Test extends App {
+ def check(c: Class[_]) {
+ val methodName = "f"
+ val methods = c.getDeclaredMethods.filter(_.getName == methodName)
+ println(methods.map(_.toString).sorted.mkString("\n"))
+ }
+
+ check(O1.getClass)
+ check(O2.getClass)
+}
\ No newline at end of file
diff --git a/test/files/run/t5125b.check b/test/files/run/t5125b.check
new file mode 100644
index 0000000..ddbf908
--- /dev/null
+++ b/test/files/run/t5125b.check
@@ -0,0 +1,7 @@
+public void C1.f(java.lang.String[])
+public void C1.f(scala.collection.Seq)
+public void C2.f(java.lang.String[])
+public void C2.f(scala.collection.Seq)
+public void C2$C3.f(java.lang.String[])
+public void C2$C3.f(scala.collection.Seq)
+public void C4.f(scala.collection.Seq)
diff --git a/test/files/run/t5125b.scala b/test/files/run/t5125b.scala
new file mode 100644
index 0000000..29c08fe
--- /dev/null
+++ b/test/files/run/t5125b.scala
@@ -0,0 +1,37 @@
+class C1 {
+ @scala.annotation.varargs
+ def f(values:String*) = println("Calling C1.f(): " + values)
+}
+
+class C2 {
+ @scala.annotation.varargs
+ def f(values:String*) = println("Calling C2.f(): " + values)
+ def g(): String => Int = s => s.hashCode
+
+ class C3 {
+ @scala.annotation.varargs
+ def f(values:String*) = println("Calling C3.f(): " + values)
+ }
+}
+
+class C4 {
+ def f(values: String*) = println("Calling C4.f(): " + values)
+
+ locally {
+ @scala.annotation.varargs
+ def f(values: String*) = println("Calling C4.<locally>.f(): " + values)
+ }
+}
+
+object Test extends App {
+ def check(c: Class[_]) {
+ val methodName = "f"
+ val methods = c.getDeclaredMethods.filter(_.getName == methodName)
+ println(methods.map(_.toString).sorted.mkString("\n"))
+ }
+
+ check(classOf[C1])
+ check(classOf[C2])
+ check(classOf[C2#C3])
+ check(classOf[C4])
+}
diff --git a/test/files/run/t5158.check b/test/files/run/t5158.check
new file mode 100644
index 0000000..573541a
--- /dev/null
+++ b/test/files/run/t5158.check
@@ -0,0 +1 @@
+0
diff --git a/test/files/run/t5158.scala b/test/files/run/t5158.scala
new file mode 100644
index 0000000..3028ffa
--- /dev/null
+++ b/test/files/run/t5158.scala
@@ -0,0 +1,17 @@
+case class B(var x: Int) {
+ def succ() {
+ x = x + 1
+ }
+}
+
+object Test {
+ def main(args: Array[String]) {
+ val b = B(0)
+ b match {
+ case B(x) =>
+ //println(x)
+ b.succ()
+ println(x)
+ }
+ }
+}
\ No newline at end of file
diff --git a/test/files/run/t5162.scala b/test/files/run/t5162.scala
new file mode 100644
index 0000000..4f91932
--- /dev/null
+++ b/test/files/run/t5162.scala
@@ -0,0 +1,19 @@
+// In run, rather than pos, to check for problems like SI-4283
+object O1 {
+ private[O1] class Base {
+ def foo: Int = 0
+ }
+ class Mediator extends Base
+}
+
+object O2 {
+ class Derived extends O1.Mediator {
+ override def foo: Int = super.foo
+ }
+}
+
+object Test {
+ def main(args: Array[String]) {
+ new O2.Derived().foo
+ }
+}
diff --git a/test/files/run/t5171.check b/test/files/run/t5171.check
new file mode 100644
index 0000000..159606d
--- /dev/null
+++ b/test/files/run/t5171.check
@@ -0,0 +1 @@
+IsList
diff --git a/test/files/run/t5171.scala b/test/files/run/t5171.scala
new file mode 100644
index 0000000..eb8029d
--- /dev/null
+++ b/test/files/run/t5171.scala
@@ -0,0 +1,7 @@
+abstract sealed class ArgNumber
+case object IsList extends ArgNumber
+case object ArgNumber
+
+object Test extends App {
+ println(IsList)
+}
diff --git a/test/files/run/t5201.check b/test/files/run/t5201.check
new file mode 100644
index 0000000..27ba77d
--- /dev/null
+++ b/test/files/run/t5201.check
@@ -0,0 +1 @@
+true
diff --git a/test/files/run/t5201.scala b/test/files/run/t5201.scala
new file mode 100644
index 0000000..48aa7ba
--- /dev/null
+++ b/test/files/run/t5201.scala
@@ -0,0 +1,8 @@
+object Test extends App {
+ // First make sure specific types are preserved
+ val tmp: Vector[Int] = Vector(Vector(1,2), Vector(3,4)).view.flatten.force
+
+ // Now make sure we really get a view
+ val seq = Seq(Seq(1, 2), Seq(3, 4)).view.flatten
+ Console.println(seq.isInstanceOf[collection.SeqView[_,_]])
+}
diff --git a/test/files/run/t5224.check b/test/files/run/t5224.check
new file mode 100644
index 0000000..e15c1c9
--- /dev/null
+++ b/test/files/run/t5224.check
@@ -0,0 +1,9 @@
+{
+ @new Foo(bar = "qwe") class C extends AnyRef {
+ def <init>() = {
+ super.<init>();
+ ()
+ }
+ };
+ ()
+}
diff --git a/test/files/run/t5224.scala b/test/files/run/t5224.scala
new file mode 100644
index 0000000..600a420
--- /dev/null
+++ b/test/files/run/t5224.scala
@@ -0,0 +1,8 @@
+import scala.reflect.runtime.universe._
+
+class Foo(bar: String) extends annotation.ClassfileAnnotation
+
+object Test extends App {
+ val tree = reify{@Foo(bar = "qwe") class C}.tree
+ println(tree.toString)
+}
\ No newline at end of file
diff --git a/test/files/run/t5225_1.check b/test/files/run/t5225_1.check
new file mode 100644
index 0000000..1a47aac
--- /dev/null
+++ b/test/files/run/t5225_1.check
@@ -0,0 +1,4 @@
+{
+ @new transient() @new volatile() var x = 2;
+ ()
+}
diff --git a/test/files/run/t5225_1.scala b/test/files/run/t5225_1.scala
new file mode 100644
index 0000000..917a239
--- /dev/null
+++ b/test/files/run/t5225_1.scala
@@ -0,0 +1,6 @@
+import scala.reflect.runtime.universe._
+
+object Test extends App {
+ val tree = reify{@transient @volatile var x = 2}.tree
+ println(tree.toString)
+}
\ No newline at end of file
diff --git a/test/files/run/t5225_2.check b/test/files/run/t5225_2.check
new file mode 100644
index 0000000..5faa365
--- /dev/null
+++ b/test/files/run/t5225_2.check
@@ -0,0 +1,4 @@
+{
+ def foo(@new `package`.cloneable() x: Int) = "";
+ ()
+}
diff --git a/test/files/run/t5225_2.scala b/test/files/run/t5225_2.scala
new file mode 100644
index 0000000..d1b6074
--- /dev/null
+++ b/test/files/run/t5225_2.scala
@@ -0,0 +1,6 @@
+import scala.reflect.runtime.universe._
+
+object Test extends App {
+ val tree = reify{def foo(@cloneable x: Int) = ""}.tree
+ println(tree.toString)
+}
\ No newline at end of file
diff --git a/test/files/jvm/bug680.check b/test/files/run/t5229_1.check
similarity index 100%
copy from test/files/jvm/bug680.check
copy to test/files/run/t5229_1.check
diff --git a/test/files/run/t5229_1.scala b/test/files/run/t5229_1.scala
new file mode 100644
index 0000000..7e05b08
--- /dev/null
+++ b/test/files/run/t5229_1.scala
@@ -0,0 +1,8 @@
+import scala.reflect.runtime.universe._
+import scala.tools.reflect.Eval
+
+object Test extends App {
+ reify {
+ object C
+ }.eval
+}
\ No newline at end of file
diff --git a/test/files/run/t5229_2.check b/test/files/run/t5229_2.check
new file mode 100644
index 0000000..43c25b9
--- /dev/null
+++ b/test/files/run/t5229_2.check
@@ -0,0 +1,2 @@
+2
+evaluated = ()
diff --git a/test/files/run/t5229_2.scala b/test/files/run/t5229_2.scala
new file mode 100644
index 0000000..f059b09
--- /dev/null
+++ b/test/files/run/t5229_2.scala
@@ -0,0 +1,18 @@
+import scala.reflect.runtime.universe._
+import scala.reflect.runtime.{universe => ru}
+import scala.reflect.runtime.{currentMirror => cm}
+import scala.tools.reflect.ToolBox
+
+object Test extends App {
+ val code = reify {
+ object C {
+ val x = 2
+ }
+
+ println(C.x)
+ };
+
+ val toolbox = cm.mkToolBox()
+ val evaluated = toolbox.eval(code.tree)
+ println("evaluated = " + evaluated)
+}
\ No newline at end of file
diff --git a/test/files/run/t5230.check b/test/files/run/t5230.check
new file mode 100644
index 0000000..43c25b9
--- /dev/null
+++ b/test/files/run/t5230.check
@@ -0,0 +1,2 @@
+2
+evaluated = ()
diff --git a/test/files/run/t5230.scala b/test/files/run/t5230.scala
new file mode 100644
index 0000000..f6a7817
--- /dev/null
+++ b/test/files/run/t5230.scala
@@ -0,0 +1,18 @@
+import scala.reflect.runtime.universe._
+import scala.reflect.runtime.{universe => ru}
+import scala.reflect.runtime.{currentMirror => cm}
+import scala.tools.reflect.ToolBox
+
+object Test extends App {
+ val code = reify {
+ class C {
+ val x = 2
+ }
+
+ println(new C().x)
+ };
+
+ val toolbox = cm.mkToolBox()
+ val evaluated = toolbox.eval(code.tree)
+ println("evaluated = " + evaluated)
+}
\ No newline at end of file
diff --git a/test/files/run/t5256a.check b/test/files/run/t5256a.check
new file mode 100644
index 0000000..09b5a02
--- /dev/null
+++ b/test/files/run/t5256a.check
@@ -0,0 +1,6 @@
+class A
+A
+AnyRef {
+ def <init>(): A
+ def foo: Nothing
+}
diff --git a/test/files/run/t5256a.scala b/test/files/run/t5256a.scala
new file mode 100644
index 0000000..84ef97b
--- /dev/null
+++ b/test/files/run/t5256a.scala
@@ -0,0 +1,11 @@
+import scala.reflect.runtime.universe._
+import scala.reflect.runtime.{currentMirror => cm}
+
+class A { def foo = ??? }
+
+object Test extends App {
+ val c = cm.classSymbol(classOf[A])
+ println(c)
+ println(c.fullName)
+ println(c.typeSignature)
+}
\ No newline at end of file
diff --git a/test/files/run/t5256b.check b/test/files/run/t5256b.check
new file mode 100644
index 0000000..ca93aaa
--- /dev/null
+++ b/test/files/run/t5256b.check
@@ -0,0 +1,6 @@
+class A
+Test.A
+AnyRef {
+ def <init>(): Test.A
+ def foo: Nothing
+}
diff --git a/test/files/run/t5256b.scala b/test/files/run/t5256b.scala
new file mode 100644
index 0000000..0ffab8a
--- /dev/null
+++ b/test/files/run/t5256b.scala
@@ -0,0 +1,10 @@
+import scala.reflect.runtime.universe._
+import scala.reflect.runtime.{currentMirror => cm}
+
+object Test extends App {
+ class A { def foo = ??? }
+ val c = cm.classSymbol(classOf[A])
+ println(c)
+ println(c.fullName)
+ println(c.typeSignature)
+}
\ No newline at end of file
diff --git a/test/files/run/t5256c.check b/test/files/run/t5256c.check
new file mode 100644
index 0000000..7fcd0eb
--- /dev/null
+++ b/test/files/run/t5256c.check
@@ -0,0 +1,6 @@
+class A$1
+Test.A$1
+java.lang.Object {
+ def foo(): Nothing
+ def <init>(): A$1
+}
diff --git a/test/files/run/t5256c.scala b/test/files/run/t5256c.scala
new file mode 100644
index 0000000..d56215f
--- /dev/null
+++ b/test/files/run/t5256c.scala
@@ -0,0 +1,12 @@
+import scala.reflect.runtime.universe._
+import scala.reflect.runtime.{currentMirror => cm}
+
+object Test extends App {
+ {
+ class A { def foo = ??? }
+ val c = cm.classSymbol(classOf[A])
+ println(c)
+ println(c.fullName)
+ println(c.typeSignature)
+ }
+}
\ No newline at end of file
diff --git a/test/files/run/t5256d.check b/test/files/run/t5256d.check
new file mode 100644
index 0000000..b7617e8
--- /dev/null
+++ b/test/files/run/t5256d.check
@@ -0,0 +1,32 @@
+Type in expressions to have them evaluated.
+Type :help for more information.
+
+scala>
+
+scala> import scala.reflect.runtime.universe._
+import scala.reflect.runtime.universe._
+
+scala> import scala.reflect.runtime.{currentMirror => cm}
+import scala.reflect.runtime.{currentMirror=>cm}
+
+scala> class A { def foo = ??? }
+defined class A
+
+scala> val c = cm.classSymbol(classOf[A])
+c: reflect.runtime.universe.ClassSymbol = class A
+
+scala> println(c)
+class A
+
+scala> println(c.fullName)
+$line8.$read.$iw.$iw.$iw.$iw.A
+
+scala> println(c.typeSignature)
+scala.AnyRef {
+ def <init>(): A
+ def foo: scala.Nothing
+}
+
+scala>
+
+scala>
diff --git a/test/files/run/t5256d.scala b/test/files/run/t5256d.scala
new file mode 100644
index 0000000..24ac1eb
--- /dev/null
+++ b/test/files/run/t5256d.scala
@@ -0,0 +1,13 @@
+import scala.tools.partest.ReplTest
+
+object Test extends ReplTest {
+ def code = """
+import scala.reflect.runtime.universe._
+import scala.reflect.runtime.{currentMirror => cm}
+class A { def foo = ??? }
+val c = cm.classSymbol(classOf[A])
+println(c)
+println(c.fullName)
+println(c.typeSignature)
+ """
+}
\ No newline at end of file
diff --git a/test/files/run/t5256e.check b/test/files/run/t5256e.check
new file mode 100644
index 0000000..ed35131
--- /dev/null
+++ b/test/files/run/t5256e.check
@@ -0,0 +1,6 @@
+class A
+Test.C.A
+AnyRef {
+ def <init>(): C.this.A
+ def foo: Nothing
+}
diff --git a/test/files/run/t5256e.scala b/test/files/run/t5256e.scala
new file mode 100644
index 0000000..f83546f
--- /dev/null
+++ b/test/files/run/t5256e.scala
@@ -0,0 +1,10 @@
+import scala.reflect.runtime.universe._
+import scala.reflect.runtime.{currentMirror => cm}
+
+object Test extends App {
+ class C { class A { def foo = ??? } }
+ val c = cm.classSymbol(classOf[C#A])
+ println(c)
+ println(c.fullName)
+ println(c.typeSignature)
+}
\ No newline at end of file
diff --git a/test/files/run/t5256f.check b/test/files/run/t5256f.check
new file mode 100644
index 0000000..6a89d0b
--- /dev/null
+++ b/test/files/run/t5256f.check
@@ -0,0 +1,12 @@
+class A1
+Test.A1
+AnyRef {
+ def <init>(): Test.A1
+ def foo: Nothing
+}
+class A2
+Test.A2
+AnyRef {
+ def <init>(): Test.this.A2
+ def foo: Nothing
+}
diff --git a/test/files/run/t5256f.scala b/test/files/run/t5256f.scala
new file mode 100644
index 0000000..80c7ad8
--- /dev/null
+++ b/test/files/run/t5256f.scala
@@ -0,0 +1,22 @@
+import scala.reflect.runtime.universe._
+import scala.reflect.runtime.{currentMirror => cm}
+
+object Test extends App {
+ class A1 { def foo = ??? }
+
+ val c1 = cm.classSymbol(classOf[A1])
+ println(c1)
+ println(c1.fullName)
+ println(c1.typeSignature)
+
+ new Test
+}
+
+class Test {
+ class A2 { def foo = ??? }
+
+ val c2 = cm.classSymbol(classOf[A2])
+ println(c2)
+ println(c2.fullName)
+ println(c2.typeSignature)
+}
diff --git a/test/files/run/t5256g.check b/test/files/run/t5256g.check
new file mode 100644
index 0000000..c9c8d6e
--- /dev/null
+++ b/test/files/run/t5256g.check
@@ -0,0 +1,3 @@
+anonymous class $anon$1
+Test.$anon$1
+A with B{def <init>(): A with B}
diff --git a/test/files/run/t5256g.scala b/test/files/run/t5256g.scala
new file mode 100644
index 0000000..358c186
--- /dev/null
+++ b/test/files/run/t5256g.scala
@@ -0,0 +1,13 @@
+import scala.reflect.runtime.universe._
+import scala.reflect.runtime.{currentMirror => cm}
+
+class A
+trait B
+
+object Test extends App {
+ val mutant = new A with B
+ val c = cm.classSymbol(mutant.getClass)
+ println(c)
+ println(c.fullName)
+ println(c.typeSignature)
+}
diff --git a/test/files/run/t5256h.check b/test/files/run/t5256h.check
new file mode 100644
index 0000000..1b23a71
--- /dev/null
+++ b/test/files/run/t5256h.check
@@ -0,0 +1,7 @@
+anonymous class $anon$1
+Test.$anon$1
+java.lang.Object {
+ final private val x: Int
+ def x(): Int
+ def <init>(): java.lang.Object{def x(): Int}
+}
diff --git a/test/files/run/t5256h.scala b/test/files/run/t5256h.scala
new file mode 100644
index 0000000..fd4ffd9
--- /dev/null
+++ b/test/files/run/t5256h.scala
@@ -0,0 +1,10 @@
+import scala.reflect.runtime.universe._
+import scala.reflect.runtime.{currentMirror => cm}
+
+object Test extends App {
+ val mutant = new { val x = 2 }
+ val c = cm.classSymbol(mutant.getClass)
+ println(c)
+ println(c.fullName)
+ println(c.typeSignature)
+}
diff --git a/test/files/run/t5258a.check b/test/files/run/t5258a.check
new file mode 100644
index 0000000..4e0b2da
--- /dev/null
+++ b/test/files/run/t5258a.check
@@ -0,0 +1 @@
+int
\ No newline at end of file
diff --git a/test/files/run/t5258a.scala b/test/files/run/t5258a.scala
new file mode 100644
index 0000000..1b98b59
--- /dev/null
+++ b/test/files/run/t5258a.scala
@@ -0,0 +1,8 @@
+import scala.reflect.runtime.universe._
+import scala.tools.reflect.Eval
+
+object Test extends App {
+ reify {
+ println(classOf[Int])
+ }.eval
+}
\ No newline at end of file
diff --git a/test/files/run/t5262.check b/test/files/run/t5262.check
new file mode 100644
index 0000000..4c7a875
--- /dev/null
+++ b/test/files/run/t5262.check
@@ -0,0 +1,2 @@
+List(1, 2, 3, 4)
+List(1, 2, null, 4)
\ No newline at end of file
diff --git a/test/files/run/t5262.scala b/test/files/run/t5262.scala
new file mode 100644
index 0000000..fc4e57a
--- /dev/null
+++ b/test/files/run/t5262.scala
@@ -0,0 +1,26 @@
+
+
+
+
+
+
+
+object Test {
+
+ def serializationDeserialization(obj : Any) {
+ val bos = new java.io.ByteArrayOutputStream()
+ val out = new java.io.ObjectOutputStream(bos)
+ out.writeObject(obj)
+
+ val arr = bos.toByteArray()
+ val in = new java.io.ObjectInputStream(new java.io.ByteArrayInputStream(arr))
+ val o = in.readObject()
+ println(o)
+ }
+
+ def main(args : Array[String]) {
+ serializationDeserialization(List(1,2,3,4))
+ serializationDeserialization(List(1,2,null,4))
+ }
+
+}
diff --git a/test/files/run/t5266_1.check b/test/files/run/t5266_1.check
new file mode 100644
index 0000000..35f2080
--- /dev/null
+++ b/test/files/run/t5266_1.check
@@ -0,0 +1,2 @@
+2
+evaluated = ()
\ No newline at end of file
diff --git a/test/files/run/t5266_1.scala b/test/files/run/t5266_1.scala
new file mode 100644
index 0000000..7bf73ac
--- /dev/null
+++ b/test/files/run/t5266_1.scala
@@ -0,0 +1,15 @@
+import scala.reflect.runtime.universe._
+import scala.reflect.runtime.{universe => ru}
+import scala.reflect.runtime.{currentMirror => cm}
+import scala.tools.reflect.ToolBox
+
+object Test extends App {
+ val code = reify {
+ def x = 2
+ println(x)
+ };
+
+ val toolbox = cm.mkToolBox()
+ val evaluated = toolbox.eval(code.tree)
+ println("evaluated = " + evaluated)
+}
\ No newline at end of file
diff --git a/test/files/run/t5266_2.check b/test/files/run/t5266_2.check
new file mode 100644
index 0000000..35f2080
--- /dev/null
+++ b/test/files/run/t5266_2.check
@@ -0,0 +1,2 @@
+2
+evaluated = ()
\ No newline at end of file
diff --git a/test/files/run/t5266_2.scala b/test/files/run/t5266_2.scala
new file mode 100644
index 0000000..9b33910
--- /dev/null
+++ b/test/files/run/t5266_2.scala
@@ -0,0 +1,16 @@
+import scala.reflect.runtime.universe._
+import scala.reflect.runtime.{universe => ru}
+import scala.reflect.runtime.{currentMirror => cm}
+import scala.tools.reflect.ToolBox
+
+object Test extends App {
+ val code = reify {
+ def x = 2
+ def y = x
+ println(y)
+ };
+
+ val toolbox = cm.mkToolBox()
+ val evaluated = toolbox.eval(code.tree)
+ println("evaluated = " + evaluated)
+}
\ No newline at end of file
diff --git a/test/files/run/t5269.check b/test/files/run/t5269.check
new file mode 100644
index 0000000..0cfbf08
--- /dev/null
+++ b/test/files/run/t5269.check
@@ -0,0 +1 @@
+2
diff --git a/test/files/run/t5269.scala b/test/files/run/t5269.scala
new file mode 100644
index 0000000..dfdabdd
--- /dev/null
+++ b/test/files/run/t5269.scala
@@ -0,0 +1,16 @@
+import scala.reflect.runtime.universe._
+import scala.tools.reflect.Eval
+
+object Test extends App {
+ reify {
+ trait Z {
+ val z = 2
+ }
+
+ class X extends Z {
+ def println() = Predef.println(z)
+ }
+
+ new X().println()
+ }.eval
+}
\ No newline at end of file
diff --git a/test/files/run/t5270.check b/test/files/run/t5270.check
new file mode 100644
index 0000000..08839f6
--- /dev/null
+++ b/test/files/run/t5270.check
@@ -0,0 +1 @@
+200
diff --git a/test/files/run/t5270.scala b/test/files/run/t5270.scala
new file mode 100644
index 0000000..afd45a0
--- /dev/null
+++ b/test/files/run/t5270.scala
@@ -0,0 +1,20 @@
+import scala.reflect.runtime.universe._
+import scala.tools.reflect.Eval
+
+object Test extends App {
+ reify {
+ class Y {
+ def y = 100
+ }
+
+ trait Z { this: Y =>
+ val z = 2 * y
+ }
+
+ class X extends Y with Z {
+ def println() = Predef.println(z)
+ }
+
+ new X().println()
+ }.eval
+}
\ No newline at end of file
diff --git a/test/files/run/t5271_1.check b/test/files/run/t5271_1.check
new file mode 100644
index 0000000..544b4d2
--- /dev/null
+++ b/test/files/run/t5271_1.check
@@ -0,0 +1,12 @@
+{
+ case class C extends Product with Serializable {
+ <caseaccessor> <paramaccessor> val foo: Int = _;
+ <caseaccessor> <paramaccessor> val bar: Int = _;
+ def <init>(foo: Int, bar: Int) = {
+ super.<init>();
+ ()
+ }
+ };
+ ()
+}
+()
diff --git a/test/files/run/t5271_1.scala b/test/files/run/t5271_1.scala
new file mode 100644
index 0000000..9e2c760
--- /dev/null
+++ b/test/files/run/t5271_1.scala
@@ -0,0 +1,15 @@
+import scala.reflect.runtime.universe._
+import scala.reflect.runtime.{universe => ru}
+import scala.reflect.runtime.{currentMirror => cm}
+import scala.tools.reflect.ToolBox
+import scala.tools.reflect.Eval
+
+object Test extends App {
+ val code = reify {
+ case class C(foo: Int, bar: Int)
+ };
+
+ val toolbox = cm.mkToolBox()
+ println(code.tree)
+ println(code.eval)
+}
\ No newline at end of file
diff --git a/test/files/run/t5271_2.check b/test/files/run/t5271_2.check
new file mode 100644
index 0000000..1df8887
--- /dev/null
+++ b/test/files/run/t5271_2.check
@@ -0,0 +1,14 @@
+{
+ case class C extends Product with Serializable {
+ <caseaccessor> <paramaccessor> val foo: Int = _;
+ <caseaccessor> <paramaccessor> val bar: Int = _;
+ def <init>(foo: Int, bar: Int) = {
+ super.<init>();
+ ()
+ }
+ };
+ val c = C.apply(2, 2);
+ Predef.println(c.foo.$times(c.bar))
+}
+4
+()
diff --git a/test/files/run/t5271_2.scala b/test/files/run/t5271_2.scala
new file mode 100644
index 0000000..430738f
--- /dev/null
+++ b/test/files/run/t5271_2.scala
@@ -0,0 +1,17 @@
+import scala.reflect.runtime.universe._
+import scala.reflect.runtime.{universe => ru}
+import scala.reflect.runtime.{currentMirror => cm}
+import scala.tools.reflect.ToolBox
+import scala.tools.reflect.Eval
+
+object Test extends App {
+ val code = reify {
+ case class C(foo: Int, bar: Int)
+ val c = C(2, 2)
+ println(c.foo * c.bar)
+ };
+
+ val toolbox = cm.mkToolBox()
+ println(code.tree)
+ println(code.eval)
+}
\ No newline at end of file
diff --git a/test/files/run/t5271_3.check b/test/files/run/t5271_3.check
new file mode 100644
index 0000000..99aacc2
--- /dev/null
+++ b/test/files/run/t5271_3.check
@@ -0,0 +1,21 @@
+{
+ object C extends AnyRef {
+ def <init>() = {
+ super.<init>();
+ ()
+ };
+ def qwe = 4
+ };
+ case class C extends Product with Serializable {
+ <caseaccessor> <paramaccessor> val foo: Int = _;
+ <caseaccessor> <paramaccessor> val bar: Int = _;
+ def <init>(foo: Int, bar: Int) = {
+ super.<init>();
+ ()
+ }
+ };
+ val c = C.apply(2, 2);
+ Predef.println(c.foo.$times(c.bar).$eq$eq(C.qwe))
+}
+true
+()
diff --git a/test/files/run/t5271_3.scala b/test/files/run/t5271_3.scala
new file mode 100644
index 0000000..f2ca2d4
--- /dev/null
+++ b/test/files/run/t5271_3.scala
@@ -0,0 +1,18 @@
+import scala.reflect.runtime.universe._
+import scala.reflect.runtime.{universe => ru}
+import scala.reflect.runtime.{currentMirror => cm}
+import scala.tools.reflect.ToolBox
+import scala.tools.reflect.Eval
+
+object Test extends App {
+ val code = reify {
+ object C { def qwe = 4 }
+ case class C(foo: Int, bar: Int)
+ val c = C(2, 2)
+ println(c.foo * c.bar == C.qwe)
+ };
+
+ val toolbox = cm.mkToolBox()
+ println(code.tree)
+ println(code.eval)
+}
\ No newline at end of file
diff --git a/test/files/jvm/bug680.check b/test/files/run/t5271_4.check
similarity index 100%
copy from test/files/jvm/bug680.check
copy to test/files/run/t5271_4.check
diff --git a/test/files/run/t5271_4.scala b/test/files/run/t5271_4.scala
new file mode 100644
index 0000000..f63e82b
--- /dev/null
+++ b/test/files/run/t5271_4.scala
@@ -0,0 +1,8 @@
+import scala.reflect.runtime.universe._
+import scala.tools.reflect.Eval
+
+object Test extends App {
+ reify {
+ case object C
+ }.eval
+}
\ No newline at end of file
diff --git a/test/files/run/t5272_1_newpatmat.check b/test/files/run/t5272_1_newpatmat.check
new file mode 100644
index 0000000..9f8d6f2
--- /dev/null
+++ b/test/files/run/t5272_1_newpatmat.check
@@ -0,0 +1 @@
+okay
\ No newline at end of file
diff --git a/test/files/run/t5272_1_newpatmat.scala b/test/files/run/t5272_1_newpatmat.scala
new file mode 100644
index 0000000..e8bb013
--- /dev/null
+++ b/test/files/run/t5272_1_newpatmat.scala
@@ -0,0 +1,11 @@
+import scala.reflect.runtime.universe._
+import scala.tools.reflect.Eval
+
+object Test extends App {
+ reify {
+ 2 match {
+ case 2 => println("okay")
+ case _ => println("not okay")
+ }
+ }.eval
+}
\ No newline at end of file
diff --git a/test/files/run/t5272_1_oldpatmat.check b/test/files/run/t5272_1_oldpatmat.check
new file mode 100644
index 0000000..9f8d6f2
--- /dev/null
+++ b/test/files/run/t5272_1_oldpatmat.check
@@ -0,0 +1 @@
+okay
\ No newline at end of file
diff --git a/test/files/jvm/bug680.check b/test/files/run/t5272_1_oldpatmat.flags
similarity index 100%
copy from test/files/jvm/bug680.check
copy to test/files/run/t5272_1_oldpatmat.flags
diff --git a/test/files/run/t5272_1_oldpatmat.scala b/test/files/run/t5272_1_oldpatmat.scala
new file mode 100644
index 0000000..e8bb013
--- /dev/null
+++ b/test/files/run/t5272_1_oldpatmat.scala
@@ -0,0 +1,11 @@
+import scala.reflect.runtime.universe._
+import scala.tools.reflect.Eval
+
+object Test extends App {
+ reify {
+ 2 match {
+ case 2 => println("okay")
+ case _ => println("not okay")
+ }
+ }.eval
+}
\ No newline at end of file
diff --git a/test/files/run/t5272_2_newpatmat.check b/test/files/run/t5272_2_newpatmat.check
new file mode 100644
index 0000000..549f3f3
--- /dev/null
+++ b/test/files/run/t5272_2_newpatmat.check
@@ -0,0 +1 @@
+okay2
\ No newline at end of file
diff --git a/test/files/run/t5272_2_newpatmat.scala b/test/files/run/t5272_2_newpatmat.scala
new file mode 100644
index 0000000..be79cde
--- /dev/null
+++ b/test/files/run/t5272_2_newpatmat.scala
@@ -0,0 +1,10 @@
+import scala.reflect.runtime.universe._
+import scala.tools.reflect.Eval
+
+object Test extends App {
+ reify {
+ 2 match {
+ case x => println("okay" + x)
+ }
+ }.eval
+}
\ No newline at end of file
diff --git a/test/files/run/t5272_2_oldpatmat.check b/test/files/run/t5272_2_oldpatmat.check
new file mode 100644
index 0000000..549f3f3
--- /dev/null
+++ b/test/files/run/t5272_2_oldpatmat.check
@@ -0,0 +1 @@
+okay2
\ No newline at end of file
diff --git a/test/files/jvm/bug680.check b/test/files/run/t5272_2_oldpatmat.flags
similarity index 100%
copy from test/files/jvm/bug680.check
copy to test/files/run/t5272_2_oldpatmat.flags
diff --git a/test/files/run/t5272_2_oldpatmat.scala b/test/files/run/t5272_2_oldpatmat.scala
new file mode 100644
index 0000000..be79cde
--- /dev/null
+++ b/test/files/run/t5272_2_oldpatmat.scala
@@ -0,0 +1,10 @@
+import scala.reflect.runtime.universe._
+import scala.tools.reflect.Eval
+
+object Test extends App {
+ reify {
+ 2 match {
+ case x => println("okay" + x)
+ }
+ }.eval
+}
\ No newline at end of file
diff --git a/test/files/run/t5273_1_newpatmat.check b/test/files/run/t5273_1_newpatmat.check
new file mode 100644
index 0000000..0cfbf08
--- /dev/null
+++ b/test/files/run/t5273_1_newpatmat.check
@@ -0,0 +1 @@
+2
diff --git a/test/files/run/t5273_1_newpatmat.scala b/test/files/run/t5273_1_newpatmat.scala
new file mode 100644
index 0000000..756f52e
--- /dev/null
+++ b/test/files/run/t5273_1_newpatmat.scala
@@ -0,0 +1,11 @@
+import scala.reflect.runtime.universe._
+import scala.tools.reflect.Eval
+
+object Test extends App {
+ reify {
+ List(1, 2, 3) match {
+ case foo :: bar :: _ => println(foo * bar)
+ case _ => println("this is getting out of hand!")
+ }
+ }.eval
+}
\ No newline at end of file
diff --git a/test/files/run/t5273_1_oldpatmat.check b/test/files/run/t5273_1_oldpatmat.check
new file mode 100644
index 0000000..0cfbf08
--- /dev/null
+++ b/test/files/run/t5273_1_oldpatmat.check
@@ -0,0 +1 @@
+2
diff --git a/test/files/jvm/bug680.check b/test/files/run/t5273_1_oldpatmat.flags
similarity index 100%
copy from test/files/jvm/bug680.check
copy to test/files/run/t5273_1_oldpatmat.flags
diff --git a/test/files/run/t5273_1_oldpatmat.scala b/test/files/run/t5273_1_oldpatmat.scala
new file mode 100644
index 0000000..756f52e
--- /dev/null
+++ b/test/files/run/t5273_1_oldpatmat.scala
@@ -0,0 +1,11 @@
+import scala.reflect.runtime.universe._
+import scala.tools.reflect.Eval
+
+object Test extends App {
+ reify {
+ List(1, 2, 3) match {
+ case foo :: bar :: _ => println(foo * bar)
+ case _ => println("this is getting out of hand!")
+ }
+ }.eval
+}
\ No newline at end of file
diff --git a/test/files/run/t5273_2a_newpatmat.check b/test/files/run/t5273_2a_newpatmat.check
new file mode 100644
index 0000000..d8263ee
--- /dev/null
+++ b/test/files/run/t5273_2a_newpatmat.check
@@ -0,0 +1 @@
+2
\ No newline at end of file
diff --git a/test/files/run/t5273_2a_newpatmat.scala b/test/files/run/t5273_2a_newpatmat.scala
new file mode 100644
index 0000000..c0d1549
--- /dev/null
+++ b/test/files/run/t5273_2a_newpatmat.scala
@@ -0,0 +1,9 @@
+import scala.reflect.runtime.universe._
+import scala.tools.reflect.Eval
+
+object Test extends App {
+ reify {
+ val foo :: bar :: _ = List(1, 2, 3)
+ println(foo * bar)
+ }.eval
+}
\ No newline at end of file
diff --git a/test/files/run/t5273_2a_oldpatmat.check b/test/files/run/t5273_2a_oldpatmat.check
new file mode 100644
index 0000000..d8263ee
--- /dev/null
+++ b/test/files/run/t5273_2a_oldpatmat.check
@@ -0,0 +1 @@
+2
\ No newline at end of file
diff --git a/test/files/jvm/bug680.check b/test/files/run/t5273_2a_oldpatmat.flags
similarity index 100%
copy from test/files/jvm/bug680.check
copy to test/files/run/t5273_2a_oldpatmat.flags
diff --git a/test/files/run/t5273_2a_oldpatmat.scala b/test/files/run/t5273_2a_oldpatmat.scala
new file mode 100644
index 0000000..c0d1549
--- /dev/null
+++ b/test/files/run/t5273_2a_oldpatmat.scala
@@ -0,0 +1,9 @@
+import scala.reflect.runtime.universe._
+import scala.tools.reflect.Eval
+
+object Test extends App {
+ reify {
+ val foo :: bar :: _ = List(1, 2, 3)
+ println(foo * bar)
+ }.eval
+}
\ No newline at end of file
diff --git a/test/files/run/t5273_2b_newpatmat.check b/test/files/run/t5273_2b_newpatmat.check
new file mode 100644
index 0000000..c551774
--- /dev/null
+++ b/test/files/run/t5273_2b_newpatmat.check
@@ -0,0 +1 @@
+name = American Dollar, shortname = USD, value = 2,8567
diff --git a/test/files/run/t5273_2b_newpatmat.scala b/test/files/run/t5273_2b_newpatmat.scala
new file mode 100644
index 0000000..31afd7e
--- /dev/null
+++ b/test/files/run/t5273_2b_newpatmat.scala
@@ -0,0 +1,10 @@
+import scala.reflect.runtime.universe._
+import scala.tools.reflect.Eval
+
+object Test extends App {
+ reify {
+ val RegexParser = """(.*) \d+([A-Z]+) \| (.*) \|.*""".r
+ val RegexParser(name, shortname, value) = "American Dollar 1USD | 2,8567 | sometext"
+ println("name = %s, shortname = %s, value = %s".format(name, shortname, value))
+ }.eval
+}
\ No newline at end of file
diff --git a/test/files/run/t5273_2b_oldpatmat.check b/test/files/run/t5273_2b_oldpatmat.check
new file mode 100644
index 0000000..c551774
--- /dev/null
+++ b/test/files/run/t5273_2b_oldpatmat.check
@@ -0,0 +1 @@
+name = American Dollar, shortname = USD, value = 2,8567
diff --git a/test/files/jvm/bug680.check b/test/files/run/t5273_2b_oldpatmat.flags
similarity index 100%
copy from test/files/jvm/bug680.check
copy to test/files/run/t5273_2b_oldpatmat.flags
diff --git a/test/files/run/t5273_2b_oldpatmat.scala b/test/files/run/t5273_2b_oldpatmat.scala
new file mode 100644
index 0000000..31afd7e
--- /dev/null
+++ b/test/files/run/t5273_2b_oldpatmat.scala
@@ -0,0 +1,10 @@
+import scala.reflect.runtime.universe._
+import scala.tools.reflect.Eval
+
+object Test extends App {
+ reify {
+ val RegexParser = """(.*) \d+([A-Z]+) \| (.*) \|.*""".r
+ val RegexParser(name, shortname, value) = "American Dollar 1USD | 2,8567 | sometext"
+ println("name = %s, shortname = %s, value = %s".format(name, shortname, value))
+ }.eval
+}
\ No newline at end of file
diff --git a/test/files/run/t5274_1.check b/test/files/run/t5274_1.check
new file mode 100644
index 0000000..fca8bc3
--- /dev/null
+++ b/test/files/run/t5274_1.check
@@ -0,0 +1,3 @@
+50! = 30414093201713378043612608166064768844377641568960512000000000000
+49! = 608281864034267560872252163321295376887552831379210240000000000
+50!/49! = 50
diff --git a/test/files/run/t5274_1.scala b/test/files/run/t5274_1.scala
new file mode 100644
index 0000000..c1b842f
--- /dev/null
+++ b/test/files/run/t5274_1.scala
@@ -0,0 +1,14 @@
+import scala.reflect.runtime.universe._
+import scala.tools.reflect.Eval
+
+object Test extends App {
+ reify {
+ def factorial(n: BigInt): BigInt =
+ if (n == 0) 1 else n * factorial(n-1)
+
+ val f50 = factorial(50); val f49 = factorial(49)
+ println("50! = " + f50)
+ println("49! = " + f49)
+ println("50!/49! = " + (f50 / f49))
+ }.eval
+}
\ No newline at end of file
diff --git a/test/files/run/t5274_2.check b/test/files/run/t5274_2.check
new file mode 100644
index 0000000..375536c
--- /dev/null
+++ b/test/files/run/t5274_2.check
@@ -0,0 +1,2 @@
+[6,2,8,5,1]
+[1,2,5,6,8]
diff --git a/test/files/run/t5274_2.scala b/test/files/run/t5274_2.scala
new file mode 100644
index 0000000..17e3976
--- /dev/null
+++ b/test/files/run/t5274_2.scala
@@ -0,0 +1,51 @@
+import scala.reflect.runtime.universe._
+import scala.tools.reflect.Eval
+
+object Test extends App {
+ reify {
+ /** Nested methods can use and even update everything
+ * visible in their scope (including local variables or
+ * arguments of enclosing methods).
+ */
+ def sort(a: Array[Int]) {
+
+ def swap(i: Int, j: Int) {
+ val t = a(i); a(i) = a(j); a(j) = t
+ }
+
+ def sort1(l: Int, r: Int) {
+ val pivot = a((l + r) / 2)
+ var i = l
+ var j = r
+ while (i <= j) {
+ while (a(i) < pivot) i += 1
+ while (a(j) > pivot) j -= 1
+ if (i <= j) {
+ swap(i, j)
+ i += 1
+ j -= 1
+ }
+ }
+ if (l < j) sort1(l, j)
+ if (j < r) sort1(i, r)
+ }
+
+ if (a.length > 0)
+ sort1(0, a.length - 1)
+ }
+
+ def println(ar: Array[Int]) {
+ def print1 = {
+ def iter(i: Int): String =
+ ar(i) + (if (i < ar.length-1) "," + iter(i+1) else "")
+ if (ar.length == 0) "" else iter(0)
+ }
+ Console.println("[" + print1 + "]")
+ }
+
+ val ar = Array(6, 2, 8, 5, 1)
+ println(ar)
+ sort(ar)
+ println(ar)
+ }.eval
+}
\ No newline at end of file
diff --git a/test/files/run/t5275.check b/test/files/run/t5275.check
new file mode 100644
index 0000000..0cfbf08
--- /dev/null
+++ b/test/files/run/t5275.check
@@ -0,0 +1 @@
+2
diff --git a/test/files/run/t5275.scala b/test/files/run/t5275.scala
new file mode 100644
index 0000000..5c84df4
--- /dev/null
+++ b/test/files/run/t5275.scala
@@ -0,0 +1,9 @@
+import scala.reflect.runtime.universe._
+import scala.tools.reflect.Eval
+
+object Test extends App {
+ reify {
+ class C(val foo: Int)
+ println(new C(2).foo)
+ }.eval
+}
\ No newline at end of file
diff --git a/test/files/run/t5276_1a.check b/test/files/run/t5276_1a.check
new file mode 100644
index 0000000..d8263ee
--- /dev/null
+++ b/test/files/run/t5276_1a.check
@@ -0,0 +1 @@
+2
\ No newline at end of file
diff --git a/test/files/run/t5276_1a.scala b/test/files/run/t5276_1a.scala
new file mode 100644
index 0000000..7f4b6ec
--- /dev/null
+++ b/test/files/run/t5276_1a.scala
@@ -0,0 +1,9 @@
+import scala.reflect.runtime.universe._
+import scala.tools.reflect.Eval
+
+object Test extends App {
+ reify {
+ lazy val x = 2
+ println(x)
+ }.eval
+}
\ No newline at end of file
diff --git a/test/files/run/t5276_1b.check b/test/files/run/t5276_1b.check
new file mode 100644
index 0000000..d8263ee
--- /dev/null
+++ b/test/files/run/t5276_1b.check
@@ -0,0 +1 @@
+2
\ No newline at end of file
diff --git a/test/files/run/t5276_1b.scala b/test/files/run/t5276_1b.scala
new file mode 100644
index 0000000..56e7dc1
--- /dev/null
+++ b/test/files/run/t5276_1b.scala
@@ -0,0 +1,9 @@
+import scala.reflect.runtime.universe._
+import scala.tools.reflect.Eval
+
+object Test extends App {
+ reify {
+ implicit lazy val x = 2
+ println(implicitly[Int])
+ }.eval
+}
\ No newline at end of file
diff --git a/test/files/run/t5276_2a.check b/test/files/run/t5276_2a.check
new file mode 100644
index 0000000..d8263ee
--- /dev/null
+++ b/test/files/run/t5276_2a.check
@@ -0,0 +1 @@
+2
\ No newline at end of file
diff --git a/test/files/run/t5276_2a.scala b/test/files/run/t5276_2a.scala
new file mode 100644
index 0000000..af9272c
--- /dev/null
+++ b/test/files/run/t5276_2a.scala
@@ -0,0 +1,12 @@
+import scala.reflect.runtime.universe._
+import scala.tools.reflect.Eval
+
+object Test extends App {
+ reify {
+ class C {
+ lazy val x = 2
+ }
+
+ println(new C().x)
+ }.eval
+}
\ No newline at end of file
diff --git a/test/files/run/t5276_2b.check b/test/files/run/t5276_2b.check
new file mode 100644
index 0000000..d8263ee
--- /dev/null
+++ b/test/files/run/t5276_2b.check
@@ -0,0 +1 @@
+2
\ No newline at end of file
diff --git a/test/files/run/t5276_2b.scala b/test/files/run/t5276_2b.scala
new file mode 100644
index 0000000..b211901
--- /dev/null
+++ b/test/files/run/t5276_2b.scala
@@ -0,0 +1,13 @@
+import scala.reflect.runtime.universe._
+import scala.tools.reflect.Eval
+
+object Test extends App {
+ reify {
+ class C {
+ implicit lazy val x = 2
+ def y = implicitly[Int]
+ }
+
+ println(new C().y)
+ }.eval
+}
\ No newline at end of file
diff --git a/test/files/run/t5277_1.check b/test/files/run/t5277_1.check
new file mode 100644
index 0000000..a48033a
--- /dev/null
+++ b/test/files/run/t5277_1.check
@@ -0,0 +1 @@
+10! = 3628800
diff --git a/test/files/run/t5277_1.scala b/test/files/run/t5277_1.scala
new file mode 100644
index 0000000..a2d5465
--- /dev/null
+++ b/test/files/run/t5277_1.scala
@@ -0,0 +1,15 @@
+import scala.reflect.runtime.universe._
+import scala.tools.reflect.Eval
+
+object Test extends App {
+ reify {
+ def fact(n: Int): BigInt =
+ if (n == 0) 1 else fact(n-1) * n
+ class Factorizer(n: Int) {
+ def ! = fact(n)
+ }
+ implicit def int2fact(n: Int) = new Factorizer(n)
+
+ println("10! = " + (10!))
+ }.eval
+}
\ No newline at end of file
diff --git a/test/files/run/t5277_2.check b/test/files/run/t5277_2.check
new file mode 100644
index 0000000..ca017e2
--- /dev/null
+++ b/test/files/run/t5277_2.check
@@ -0,0 +1,2 @@
+2()
+1()
diff --git a/test/files/run/t5277_2.scala b/test/files/run/t5277_2.scala
new file mode 100644
index 0000000..dd72452
--- /dev/null
+++ b/test/files/run/t5277_2.scala
@@ -0,0 +1,12 @@
+import scala.reflect.runtime.universe._
+import scala.tools.reflect.Eval
+
+object Test extends App {
+ reify {
+ def p(implicit i: Int) = print(i)
+ implicit val v = 2
+
+ println(p)
+ println(p(1))
+ }.eval
+}
\ No newline at end of file
diff --git a/test/files/run/t5279.check b/test/files/run/t5279.check
new file mode 100644
index 0000000..f599e28
--- /dev/null
+++ b/test/files/run/t5279.check
@@ -0,0 +1 @@
+10
diff --git a/test/files/run/t5279.scala b/test/files/run/t5279.scala
new file mode 100644
index 0000000..815c883
--- /dev/null
+++ b/test/files/run/t5279.scala
@@ -0,0 +1,8 @@
+import scala.reflect.runtime.universe._
+import scala.tools.reflect.Eval
+
+object Test extends App {
+ reify {
+ println(new Integer(10))
+ }.eval
+}
\ No newline at end of file
diff --git a/test/files/run/t5284.check b/test/files/run/t5284.check
new file mode 100644
index 0000000..0cfbf08
--- /dev/null
+++ b/test/files/run/t5284.check
@@ -0,0 +1 @@
+2
diff --git a/test/files/run/t5284.scala b/test/files/run/t5284.scala
new file mode 100644
index 0000000..ba0845f
--- /dev/null
+++ b/test/files/run/t5284.scala
@@ -0,0 +1,25 @@
+
+
+
+
+
+/** Here we have a situation where a normalized method parameter `W`
+ * is used in a position which accepts an instance of type `T` - we know we can
+ * safely cast `T` to `W` whenever type bounds on `W` hold.
+ */
+object Test {
+ def main(args: Array[String]) {
+ val a = Blarg(Array(1, 2, 3))
+ println(a.m((x: Int) => x + 1))
+ }
+}
+
+
+object Blarg {
+ def apply[T: Manifest](a: Array[T]) = new Blarg(a)
+}
+
+
+class Blarg[@specialized(Int) T: Manifest](val a: Array[T]) {
+ def m[@specialized(Int) W >: T, @specialized(Int) S](f: W => S) = f(a(0))
+}
diff --git a/test/files/run/t5284b.check b/test/files/run/t5284b.check
new file mode 100644
index 0000000..98d9bcb
--- /dev/null
+++ b/test/files/run/t5284b.check
@@ -0,0 +1 @@
+17
diff --git a/test/files/run/t5284b.scala b/test/files/run/t5284b.scala
new file mode 100644
index 0000000..a9282a8
--- /dev/null
+++ b/test/files/run/t5284b.scala
@@ -0,0 +1,28 @@
+
+
+
+
+
+
+/** Here we have a situation where a normalized method parameter `W`
+ * is used in a position which expects a type `T` - we know we can
+ * safely cast `W` to `T` whenever typebounds of `W` hold.
+ */
+object Test {
+ def main(args: Array[String]) {
+ val foo = Foo.createUnspecialized[Int]
+ println(foo.bar(17))
+ }
+}
+
+
+object Foo {
+ def createUnspecialized[T] = new Foo[T]
+}
+
+
+class Foo[@specialized(Int) T] {
+ val id: T => T = x => x
+
+ def bar[@specialized(Int) W <: T, @specialized(Int) S](w: W) = id(w)
+}
diff --git a/test/files/run/t5284c.check b/test/files/run/t5284c.check
new file mode 100644
index 0000000..00750ed
--- /dev/null
+++ b/test/files/run/t5284c.check
@@ -0,0 +1 @@
+3
diff --git a/test/files/run/t5284c.scala b/test/files/run/t5284c.scala
new file mode 100644
index 0000000..383b84c
--- /dev/null
+++ b/test/files/run/t5284c.scala
@@ -0,0 +1,30 @@
+
+
+
+
+
+
+/** Here we have a compound type `List[W]` used in
+ * a position where `List[T]` is expected. The cast
+ * emitted in the normalized `bar` is safe because the
+ * normalized `bar` can only be called if the type
+ * bounds hold.
+ */
+object Test {
+ def main(args: Array[String]) {
+ val foo = Foo.createUnspecialized[Int]
+ println(foo.bar(List(1, 2, 3)))
+ }
+}
+
+
+object Foo {
+ def createUnspecialized[T] = new Foo[T]
+}
+
+
+class Foo[@specialized(Int) T] {
+ val len: List[T] => Int = xs => xs.length
+
+ def bar[@specialized(Int) W <: T](ws: List[W]) = len(ws)
+}
diff --git a/test/files/run/t5300.scala b/test/files/run/t5300.scala
new file mode 100644
index 0000000..073b296
--- /dev/null
+++ b/test/files/run/t5300.scala
@@ -0,0 +1,7 @@
+object Test {
+ val pf: PartialFunction[Any, Unit] = { case _ => () }
+
+ def main(args: Array[String]): Unit = {
+ pf orElse pf
+ }
+}
diff --git a/test/files/run/t5313.check b/test/files/run/t5313.check
new file mode 100644
index 0000000..7a48b2b
--- /dev/null
+++ b/test/files/run/t5313.check
@@ -0,0 +1,12 @@
+STORE_LOCAL(variable kept1)
+STORE_LOCAL(value result)
+STORE_LOCAL(variable kept1)
+STORE_LOCAL(variable kept2)
+STORE_LOCAL(value kept3)
+STORE_LOCAL(variable kept2)
+STORE_LOCAL(variable kept4)
+STORE_LOCAL(variable kept4)
+STORE_LOCAL(variable kept5)
+STORE_LOCAL(variable kept5)
+STORE_LOCAL(variable kept6)
+STORE_LOCAL(variable kept6)
diff --git a/test/files/run/t5313.scala b/test/files/run/t5313.scala
new file mode 100644
index 0000000..7da8726
--- /dev/null
+++ b/test/files/run/t5313.scala
@@ -0,0 +1,54 @@
+import scala.tools.partest.IcodeTest
+
+object Test extends IcodeTest {
+ override def printIcodeAfterPhase = "dce"
+
+ override def extraSettings: String = super.extraSettings + " -optimize"
+
+ override def code =
+ """class Foo {
+ def randomBoolean = util.Random.nextInt % 2 == 0
+ def bar = {
+ var kept1 = new Object
+ val result = new java.lang.ref.WeakReference(kept1)
+ kept1 = null // we can't eliminate this assigment because result can observe
+ // when the object has no more references. See SI-5313
+ kept1 = new Object // but we can eliminate this one because kept1 has already been clobbered
+ var erased2 = null // we can eliminate this store because it's never used
+ val erased3 = erased2 // and this
+ var erased4 = erased2 // and this
+ val erased5 = erased4 // and this
+ var kept2: Object = new Object // ultimately can't be eliminated
+ while(randomBoolean) {
+ val kept3 = kept2
+ kept2 = null // this can't, because it clobbers kept2, which is used
+ erased4 = null // safe to eliminate
+ println(kept3)
+ }
+ var kept4 = new Object // have to keep, it's used
+ try
+ println(kept4)
+ catch {
+ case _ : Throwable => kept4 = null // have to keep, it clobbers kept4 which is used
+ }
+ var kept5 = new Object
+ print(kept5)
+ kept5 = null // can't eliminate it's a clobber and it's used
+ print(kept5)
+ kept5 = null // can eliminate because we don't care about clobbers of nulls
+ while(randomBoolean) {
+ var kept6: AnyRef = null // not used, but have to keep because it clobbers the next used store
+ // on the back edge of the loop
+ kept6 = new Object // used
+ println(kept6)
+ }
+ result
+ }
+ }""".stripMargin
+
+ override def show() {
+ val storeLocal = "STORE_LOCAL"
+ val lines1 = collectIcode("") filter (_ contains storeLocal) map (x => x.drop(x.indexOf(storeLocal)))
+ println(lines1 mkString "\n")
+ }
+}
diff --git a/test/files/run/t5328.check b/test/files/run/t5328.check
new file mode 100644
index 0000000..77a4396
--- /dev/null
+++ b/test/files/run/t5328.check
@@ -0,0 +1,3 @@
+2
+1,2,8
+1,8,3
diff --git a/test/files/run/t5328.scala b/test/files/run/t5328.scala
new file mode 100644
index 0000000..12adf45
--- /dev/null
+++ b/test/files/run/t5328.scala
@@ -0,0 +1,5 @@
+object Test extends App {
+ println(Vector(1).view.updated(0,2).toList mkString ",")
+ println(Seq(1,2,3).view.updated(2,8).toList mkString ",")
+ println(List(1,2,3).view.updated(1,8).toList mkString ",")
+}
diff --git a/test/files/run/t5334_1.check b/test/files/run/t5334_1.check
new file mode 100644
index 0000000..96d80cd
--- /dev/null
+++ b/test/files/run/t5334_1.check
@@ -0,0 +1 @@
+C
\ No newline at end of file
diff --git a/test/files/run/t5334_1.scala b/test/files/run/t5334_1.scala
new file mode 100644
index 0000000..3aeb7e4
--- /dev/null
+++ b/test/files/run/t5334_1.scala
@@ -0,0 +1,15 @@
+import scala.reflect.runtime.universe._
+import scala.reflect.runtime.{universe => ru}
+import scala.reflect.runtime.{currentMirror => cm}
+import scala.tools.reflect.ToolBox
+
+object Test extends App {
+ val code = reify {
+ class C { override def toString = "C" }
+ val ret = new C
+ ret.asInstanceOf[Object]
+ };
+
+ val toolbox = cm.mkToolBox()
+ println(toolbox.eval(code.tree))
+}
\ No newline at end of file
diff --git a/test/files/run/t5334_2.check b/test/files/run/t5334_2.check
new file mode 100644
index 0000000..613d286
--- /dev/null
+++ b/test/files/run/t5334_2.check
@@ -0,0 +1 @@
+List((C,C))
\ No newline at end of file
diff --git a/test/files/run/t5334_2.scala b/test/files/run/t5334_2.scala
new file mode 100644
index 0000000..64ee1e0
--- /dev/null
+++ b/test/files/run/t5334_2.scala
@@ -0,0 +1,15 @@
+import scala.reflect.runtime.universe._
+import scala.reflect.runtime.{universe => ru}
+import scala.reflect.runtime.{currentMirror => cm}
+import scala.tools.reflect.ToolBox
+
+object Test extends App {
+ val code = reify {
+ class C { override def toString() = "C" }
+ val ret = List((new C, new C))
+ ret.asInstanceOf[List[Any]]
+ };
+
+ val toolbox = cm.mkToolBox()
+ println(toolbox.eval(code.tree))
+}
\ No newline at end of file
diff --git a/test/files/run/t5335.check b/test/files/run/t5335.check
new file mode 100644
index 0000000..0cfbf08
--- /dev/null
+++ b/test/files/run/t5335.check
@@ -0,0 +1 @@
+2
diff --git a/test/files/run/t5335.scala b/test/files/run/t5335.scala
new file mode 100644
index 0000000..714846d
--- /dev/null
+++ b/test/files/run/t5335.scala
@@ -0,0 +1,8 @@
+import scala.reflect.runtime.universe._
+import scala.tools.reflect.Eval
+
+object Test extends App {
+ reify {
+ println(new {def x = 2}.x)
+ }.eval
+}
\ No newline at end of file
diff --git a/test/files/run/t5356.check b/test/files/run/t5356.check
new file mode 100644
index 0000000..7522e7e
--- /dev/null
+++ b/test/files/run/t5356.check
@@ -0,0 +1,6 @@
+1 java.lang.Integer
+1 java.lang.Integer
+1 scala.math.BigInt
+1 java.lang.Double
+1 java.lang.Float
+1
diff --git a/test/files/run/t5356.scala b/test/files/run/t5356.scala
new file mode 100644
index 0000000..ec17e03
--- /dev/null
+++ b/test/files/run/t5356.scala
@@ -0,0 +1,12 @@
+object Test {
+ def f(x: Any { def toInt: Int }) = println(x.toInt + " " + x.getClass.getName)
+
+ def main(args: Array[String]): Unit = {
+ f(1)
+ f(1.toInt)
+ f(BigInt(1))
+ f(1d)
+ f(1f)
+ println((1: (Any { def toInt: Int })).toInt)
+ }
+}
diff --git a/test/files/run/t5375.check b/test/files/run/t5375.check
new file mode 100644
index 0000000..7d3002f
--- /dev/null
+++ b/test/files/run/t5375.check
@@ -0,0 +1 @@
+Composite throwable
\ No newline at end of file
diff --git a/test/files/run/t5375.scala b/test/files/run/t5375.scala
new file mode 100644
index 0000000..e4b329d
--- /dev/null
+++ b/test/files/run/t5375.scala
@@ -0,0 +1,19 @@
+
+
+
+import collection.parallel.CompositeThrowable
+
+
+
+object Test {
+
+ def main(args: Array[String]) {
+ val foos = (1 to 1000) toSeq;
+ try {
+ foos.par.map(i => if (i % 37 == 0) sys.error("i div 37") else i)
+ } catch {
+ case CompositeThrowable(thr) => println("Composite throwable")
+ }
+ }
+
+}
diff --git a/test/files/run/t5377.check b/test/files/run/t5377.check
new file mode 100644
index 0000000..7bd0e29
--- /dev/null
+++ b/test/files/run/t5377.check
@@ -0,0 +1,18 @@
+1 List(1)
+1 List(1)
+2 List(1, 2) List(2, 1)
+2 List(1, 2) List(2, 1)
+2 List(2, 1) List(1, 2)
+2 List(2, 1) List(1, 2)
+3 List(1, 2, 3) List(1, 3, 2) List(2, 1, 3) List(2, 3, 1) List(3, 1, 2) List(3, 2, 1)
+3 List(1, 2, 3) List(1, 3, 2) List(2, 1, 3) List(2, 3, 1) List(3, 1, 2) List(3, 2, 1)
+3 List(1, 3, 2) List(1, 2, 3) List(3, 1, 2) List(3, 2, 1) List(2, 1, 3) List(2, 3, 1)
+3 List(1, 3, 2) List(1, 2, 3) List(3, 1, 2) List(3, 2, 1) List(2, 1, 3) List(2, 3, 1)
+3 List(2, 1, 3) List(2, 3, 1) List(1, 2, 3) List(1, 3, 2) List(3, 2, 1) List(3, 1, 2)
+3 List(2, 1, 3) List(2, 3, 1) List(1, 2, 3) List(1, 3, 2) List(3, 2, 1) List(3, 1, 2)
+3 List(2, 3, 1) List(2, 1, 3) List(3, 2, 1) List(3, 1, 2) List(1, 2, 3) List(1, 3, 2)
+3 List(2, 3, 1) List(2, 1, 3) List(3, 2, 1) List(3, 1, 2) List(1, 2, 3) List(1, 3, 2)
+3 List(3, 1, 2) List(3, 2, 1) List(1, 3, 2) List(1, 2, 3) List(2, 3, 1) List(2, 1, 3)
+3 List(3, 1, 2) List(3, 2, 1) List(1, 3, 2) List(1, 2, 3) List(2, 3, 1) List(2, 1, 3)
+3 List(3, 2, 1) List(3, 1, 2) List(2, 3, 1) List(2, 1, 3) List(1, 3, 2) List(1, 2, 3)
+3 List(3, 2, 1) List(3, 1, 2) List(2, 3, 1) List(2, 1, 3) List(1, 3, 2) List(1, 2, 3)
diff --git a/test/files/run/t5377.scala b/test/files/run/t5377.scala
new file mode 100644
index 0000000..2e8fb1a
--- /dev/null
+++ b/test/files/run/t5377.scala
@@ -0,0 +1,47 @@
+object Test {
+ def testPermutations1(num: Int, stream: Stream[Int]): Unit = {
+ val perm = stream.permutations
+ print(num)
+ while(perm.hasNext) {
+ print(" " + perm.next().toList)
+ }
+ println()
+ }
+ def testPermutations2(num: Int, stream: List[Int]): Unit = {
+ val perm = stream.permutations
+ print(num)
+ while(perm.hasNext) {
+ print(" " + perm.next().toList)
+ }
+ println()
+ }
+
+ def main(args: Array[String]): Unit = {
+ testPermutations1(1, Stream(1))
+ testPermutations2(1, List(1))
+
+ testPermutations1(2, Stream(1, 2))
+ testPermutations2(2, List(1, 2))
+
+ testPermutations1(2, Stream(2, 1))
+ testPermutations2(2, List(2, 1))
+
+ testPermutations1(3, Stream(1, 2, 3))
+ testPermutations2(3, List(1, 2, 3))
+
+ testPermutations1(3, Stream(1, 3, 2))
+ testPermutations2(3, List(1, 3, 2))
+
+ testPermutations1(3, Stream(2, 1, 3))
+ testPermutations2(3, List(2, 1, 3))
+
+ testPermutations1(3, Stream(2, 3, 1))
+ testPermutations2(3, List(2, 3, 1))
+
+ testPermutations1(3, Stream(3, 1, 2))
+ testPermutations2(3, List(3, 1, 2))
+
+ testPermutations1(3, Stream(3, 2, 1))
+ testPermutations2(3, List(3, 2, 1))
+ }
+}
diff --git a/test/files/run/si5380.scala b/test/files/run/t5380.scala
similarity index 100%
rename from test/files/run/si5380.scala
rename to test/files/run/t5380.scala
diff --git a/test/files/run/t5385.check b/test/files/run/t5385.check
new file mode 100644
index 0000000..1df74fc
--- /dev/null
+++ b/test/files/run/t5385.check
@@ -0,0 +1,8 @@
+[0:9] class Azz
+[0:9] class Bzz
+[0:9] class Czz
+[0:9] class Dzz
+[0:11] class Ezz
+[0:11] class Fzz
+[0:13] class Gzz
+[0:13] class Hzz
diff --git a/test/files/run/t5385.scala b/test/files/run/t5385.scala
new file mode 100644
index 0000000..b803897
--- /dev/null
+++ b/test/files/run/t5385.scala
@@ -0,0 +1,16 @@
+import scala.tools.partest._
+
+object Test extends CompilerTest {
+ import global._
+ override def extraSettings = super.extraSettings + " -Yrangepos"
+ override def sources = List(
+ "class Azz", "class Bzz ", "class Czz ", "class Dzz\n",
+ "class Ezz{}", "class Fzz{} ", "class Gzz { }", "class Hzz { } "
+ )
+ def check(source: String, unit: CompilationUnit) {
+ unit.body foreach {
+ case cdef: ClassDef => println("%-15s class %s".format(cdef.pos.show, cdef.name))
+ case _ =>
+ }
+ }
+}
diff --git a/test/files/run/t5387.scala b/test/files/run/t5387.scala
new file mode 100644
index 0000000..5d62a00
--- /dev/null
+++ b/test/files/run/t5387.scala
@@ -0,0 +1,15 @@
+/*
+ * This tests that the predicate of dropWhile is only evaluated as often as needed, see https://issues.scala-lang.org/browse/SI-5387
+ */
+import scala.collection.immutable.ListMap
+object Test extends App{
+ val subject = ListMap(1->1,2->2,3->3,4->4,5->5)
+ val result = ListMap(3->3,4->4,5->5)
+ assert( result == subject.dropWhile{
+ case (key, value) => {
+ assert( key <= 3, "predicate evaluated more often than needed, key "+key )
+ key < 3
+ }
+ }
+ )
+}
diff --git a/test/files/run/t5394.scala b/test/files/run/t5394.scala
new file mode 100644
index 0000000..1b39da3
--- /dev/null
+++ b/test/files/run/t5394.scala
@@ -0,0 +1,4 @@
+object Test extends App {
+ def f[T](l: List[T]): Int = l match { case x :: xs => f(xs) case Nil => 0 }
+ f(List.fill(10000)(0))
+}
\ No newline at end of file
diff --git a/test/files/run/t5407.check b/test/files/run/t5407.check
new file mode 100644
index 0000000..51993f0
--- /dev/null
+++ b/test/files/run/t5407.check
@@ -0,0 +1,2 @@
+2
+2
diff --git a/test/files/run/t5407.scala b/test/files/run/t5407.scala
new file mode 100644
index 0000000..35a8ec6
--- /dev/null
+++ b/test/files/run/t5407.scala
@@ -0,0 +1,17 @@
+case class Foo(private val x: Int, y: Option[Int], z: Boolean)
+
+object Test extends App {
+ def foo(x: Foo) = x match {
+ case Foo(x, Some(y), z) => y
+ case Foo(x, y, z) => 0
+ }
+ val x = Foo(1, Some(2), false)
+ println(foo(x))
+
+
+ def bar(x: Foo) = x match {
+ case Foo(x, Some(y), z) => y
+ case Foo(x, None, z) => 0
+ }
+ println(bar(x))
+}
\ No newline at end of file
diff --git a/test/files/jvm/bug680.check b/test/files/run/t5415.check
similarity index 100%
copy from test/files/jvm/bug680.check
copy to test/files/run/t5415.check
diff --git a/test/files/run/t5415.scala b/test/files/run/t5415.scala
new file mode 100644
index 0000000..c12e209
--- /dev/null
+++ b/test/files/run/t5415.scala
@@ -0,0 +1,12 @@
+object Test extends App{
+ case class Queryable2[T]() { def filter(predicate: T => Boolean) = ??? }
+ trait CoffeesTable{ def sales : Int }
+ val q = Queryable2[CoffeesTable]()
+ import scala.reflect.runtime.universe._
+ import scala.reflect.runtime.{universe => ru}
+ val code = reify{q.filter(_.sales > 5)}
+ import scala.reflect.runtime.{currentMirror => cm}
+ import scala.tools.reflect.ToolBox
+ val toolbox = cm.mkToolBox()
+ val ttree = toolbox.typeCheck(code.tree)
+}
diff --git a/test/files/run/t5418a.check b/test/files/run/t5418a.check
new file mode 100644
index 0000000..5270229
--- /dev/null
+++ b/test/files/run/t5418a.check
@@ -0,0 +1 @@
+Expr[Class[_ <: java.lang.Object]](new Object().getClass())
diff --git a/test/files/run/t5418a.scala b/test/files/run/t5418a.scala
new file mode 100644
index 0000000..90bc542
--- /dev/null
+++ b/test/files/run/t5418a.scala
@@ -0,0 +1,3 @@
+object Test extends App {
+ println(scala.reflect.runtime.universe.reify(new Object().getClass))
+}
\ No newline at end of file
diff --git a/test/files/run/t5418b.check b/test/files/run/t5418b.check
new file mode 100644
index 0000000..48d82a2
--- /dev/null
+++ b/test/files/run/t5418b.check
@@ -0,0 +1,2 @@
+new Object().getClass()
+TypeRef(ThisType(java.lang), java.lang.Class, List(TypeRef(NoPrefix, newTypeName("?0"), List())))
diff --git a/test/files/run/t5418b.scala b/test/files/run/t5418b.scala
new file mode 100644
index 0000000..08e8bb1
--- /dev/null
+++ b/test/files/run/t5418b.scala
@@ -0,0 +1,11 @@
+import scala.reflect.runtime.universe._
+import scala.reflect.runtime.{currentMirror => cm}
+import scala.tools.reflect.ToolBox
+
+object Test extends App {
+ val tb = cm.mkToolBox()
+ val untyped = reify(new Object().getClass).tree
+ val typed = tb.typeCheck(untyped)
+ println(typed)
+ println(showRaw(typed.tpe))
+}
\ No newline at end of file
diff --git a/test/files/run/t5419.check b/test/files/run/t5419.check
new file mode 100644
index 0000000..a9c0f26
--- /dev/null
+++ b/test/files/run/t5419.check
@@ -0,0 +1 @@
+5: @Foo.asInstanceOf[Int]
diff --git a/test/files/run/t5419.scala b/test/files/run/t5419.scala
new file mode 100644
index 0000000..686a79b
--- /dev/null
+++ b/test/files/run/t5419.scala
@@ -0,0 +1,8 @@
+import scala.reflect.runtime.universe._
+
+class Foo extends annotation.StaticAnnotation
+
+object Test extends App {
+ val tree = reify{(5: @Foo).asInstanceOf[Int]}.tree
+ println(tree.toString)
+}
\ No newline at end of file
diff --git a/test/files/run/t5423.check b/test/files/run/t5423.check
new file mode 100644
index 0000000..ae3d3fb
--- /dev/null
+++ b/test/files/run/t5423.check
@@ -0,0 +1 @@
+List(table)
\ No newline at end of file
diff --git a/test/files/run/t5423.scala b/test/files/run/t5423.scala
new file mode 100644
index 0000000..c163212
--- /dev/null
+++ b/test/files/run/t5423.scala
@@ -0,0 +1,11 @@
+import scala.reflect.runtime.universe._
+import scala.reflect.runtime.{universe => ru}
+import scala.reflect.runtime.{currentMirror => cm}
+
+final class table extends annotation.StaticAnnotation
+ at table class A
+
+object Test extends App {
+ val s = cm.classSymbol(classOf[A])
+ println(s.annotations)
+}
\ No newline at end of file
diff --git a/test/files/run/t5428.check b/test/files/run/t5428.check
new file mode 100644
index 0000000..7b4b1d6
--- /dev/null
+++ b/test/files/run/t5428.check
@@ -0,0 +1 @@
+Stack(8, 7, 6, 5, 4, 3)
\ No newline at end of file
diff --git a/test/files/run/t5428.scala b/test/files/run/t5428.scala
new file mode 100644
index 0000000..106bb7f
--- /dev/null
+++ b/test/files/run/t5428.scala
@@ -0,0 +1,29 @@
+
+
+
+import collection.mutable.{Stack, StackProxy}
+
+
+
+class A extends StackProxy[Int] {
+ val self = Stack[Int]()
+}
+
+
+object Test {
+
+ def main(args: Array[String]) {
+ val a = new A
+
+ a push 3
+ a push 4
+ a push 5
+
+ a.push(6, 7, 8)
+
+ println(a)
+
+ a pop
+ }
+
+}
diff --git a/test/files/run/t5488-fn.check b/test/files/run/t5488-fn.check
new file mode 100644
index 0000000..18907d0
--- /dev/null
+++ b/test/files/run/t5488-fn.check
@@ -0,0 +1,17 @@
+B$mcII$sp
+B$mcIL$sp
+B$mcIV$sp
+B$mcLI$sp
+B
+B$mcLV$sp
+B$mcVI$sp
+B$mcVL$sp
+B$mcVV$sp
+C$mcIII$sp
+C$mcIIL$sp
+C$mcILI$sp
+C$mcILL$sp
+C$mcLII$sp
+C$mcLIL$sp
+C$mcLLI$sp
+C
diff --git a/test/files/run/t5488-fn.scala b/test/files/run/t5488-fn.scala
new file mode 100644
index 0000000..d17bcf9
--- /dev/null
+++ b/test/files/run/t5488-fn.scala
@@ -0,0 +1,27 @@
+class B[@specialized(Int, AnyRef, Unit) A, @specialized(Int, AnyRef, Unit) B](f: A => B)
+class C[@specialized(Int, AnyRef) A, @specialized(Int, AnyRef) B, @specialized(Int, AnyRef) C](f: (A, B) => C)
+
+object Test {
+ def main(args:Array[String]) {
+ def show(x: Any) = println(x.getClass.getName)
+
+ show(new B((x: Int) => 1))
+ show(new B((x: Int) => "abc"))
+ show(new B((x: Int) => ()))
+ show(new B((x: AnyRef) => 1))
+ show(new B((x: AnyRef) => "abc"))
+ show(new B((x: AnyRef) => ()))
+ show(new B((x: Unit) => 1))
+ show(new B((x: Unit) => "abc"))
+ show(new B((x: Unit) => ()))
+
+ show(new C((x: Int, y: Int) => 1))
+ show(new C((x: Int, y: Int) => "abc"))
+ show(new C((x: Int, y: AnyRef) => 1))
+ show(new C((x: Int, y: AnyRef) => "abc"))
+ show(new C((x: AnyRef, y: Int) => 1))
+ show(new C((x: AnyRef, y: Int) => "abc"))
+ show(new C((x: AnyRef, y: AnyRef) => 1))
+ show(new C((x: AnyRef, y: AnyRef) => "abc"))
+ }
+}
diff --git a/test/files/run/t5488.check b/test/files/run/t5488.check
new file mode 100644
index 0000000..ccd98c4
--- /dev/null
+++ b/test/files/run/t5488.check
@@ -0,0 +1,14 @@
+A0$mcI$sp
+A0
+B0$mcII$sp
+B0$mcIL$sp
+B0$mcLI$sp
+B0
+C0$mcIII$sp
+C0$mcIIL$sp
+C0$mcILI$sp
+C0$mcILL$sp
+C0$mcLII$sp
+C0$mcLIL$sp
+C0$mcLLI$sp
+C0
diff --git a/test/files/run/t5488.scala b/test/files/run/t5488.scala
new file mode 100644
index 0000000..7bab0cd
--- /dev/null
+++ b/test/files/run/t5488.scala
@@ -0,0 +1,26 @@
+class A0[@specialized(Int, AnyRef) A]()
+class B0[@specialized(Int, AnyRef) A, @specialized(Int, AnyRef) B]()
+class C0[@specialized(Int, AnyRef) A, @specialized(Int, AnyRef) B, @specialized(Int, AnyRef) C]()
+
+object Test {
+ def main(args:Array[String]) {
+ def show(x: Any) = println(x.getClass.getName)
+
+ show(new A0[Int]())
+ show(new A0[AnyRef]())
+
+ show(new B0[Int, Int]())
+ show(new B0[Int, AnyRef]())
+ show(new B0[AnyRef, Int]())
+ show(new B0[AnyRef, AnyRef]())
+
+ show(new C0[Int, Int, Int]())
+ show(new C0[Int, Int, AnyRef]())
+ show(new C0[Int, AnyRef, Int]())
+ show(new C0[Int, AnyRef, AnyRef]())
+ show(new C0[AnyRef, Int, Int]())
+ show(new C0[AnyRef, Int, AnyRef]())
+ show(new C0[AnyRef, AnyRef, Int]())
+ show(new C0[AnyRef, AnyRef, AnyRef]())
+ }
+}
diff --git a/test/files/run/t5500.check b/test/files/run/t5500.check
new file mode 100644
index 0000000..19c6dda
--- /dev/null
+++ b/test/files/run/t5500.check
@@ -0,0 +1,2 @@
+C1$mcLI$sp
+C2$mcLI$sp
diff --git a/test/files/run/t5500.scala b/test/files/run/t5500.scala
new file mode 100644
index 0000000..6fbe168
--- /dev/null
+++ b/test/files/run/t5500.scala
@@ -0,0 +1,12 @@
+import scala.{specialized => spec}
+
+class C1[@spec(Int, AnyRef) A, @spec(Int, AnyRef) B](v:A, w:B)
+
+class C2[@spec(Unit, Boolean, Byte, Char, Short, Int, Long, Float, Double, AnyRef) A, @spec(Unit, Boolean, Byte, Char, Short, Int, Long, Float, Double, AnyRef) B](v:A, w:B)
+
+object Test {
+ def main(args:Array[String]) {
+ println(new C1("abc", 123).getClass.getName)
+ println(new C2[String, Int]("abc", 123).getClass.getName)
+ }
+}
diff --git a/test/files/run/t5500b.check b/test/files/run/t5500b.check
new file mode 100644
index 0000000..4259b24
--- /dev/null
+++ b/test/files/run/t5500b.check
@@ -0,0 +1,28 @@
+C1A$mcLI$sp
+C1A$mcLD$sp
+C1A
+C1A$mcII$sp
+C1A$mcID$sp
+C1A$mcIL$sp
+C1A$mcDI$sp
+C1A$mcDD$sp
+C1A$mcDL$sp
+C1B$mcLI$sp
+C1B$mcLD$sp
+C1B
+C1B$mcII$sp
+C1B$mcID$sp
+C1B$mcIL$sp
+C1B$mcDI$sp
+C1B$mcDD$sp
+C1B$mcDL$sp
+C1C$mcLI$sp
+C1C$mcLI$sp
+C1C$mcLD$sp
+C1C
+C1C$mcII$sp
+C1C$mcID$sp
+C1C$mcIL$sp
+C1C$mcDI$sp
+C1C$mcDD$sp
+C1C$mcDL$sp
diff --git a/test/files/run/t5500b.scala b/test/files/run/t5500b.scala
new file mode 100644
index 0000000..32de858
--- /dev/null
+++ b/test/files/run/t5500b.scala
@@ -0,0 +1,51 @@
+import scala.{specialized => spec}
+
+class C1A[
+ @spec(Double, Int, AnyRef) A,
+ @spec(Double, Int, AnyRef) B
+]
+
+class C1B[
+ @spec(Double, Int, AnyRef) A,
+ @spec(Double, Int, AnyRef) B
+](v: A)
+
+class C1C[
+ @spec(Double, Int, AnyRef) A,
+ @spec(Double, Int, AnyRef) B
+](v:A, w:B)
+
+object Test {
+ def main(args:Array[String]) {
+ println(new C1A[String, Int].getClass.getName)
+ println(new C1A[String, Double].getClass.getName)
+ println(new C1A[String, String].getClass.getName)
+ println(new C1A[Int, Int].getClass.getName)
+ println(new C1A[Int, Double].getClass.getName)
+ println(new C1A[Int, String].getClass.getName)
+ println(new C1A[Double, Int].getClass.getName)
+ println(new C1A[Double, Double].getClass.getName)
+ println(new C1A[Double, String].getClass.getName)
+
+ println(new C1B[String, Int]("abc").getClass.getName)
+ println(new C1B[String, Double]("abc").getClass.getName)
+ println(new C1B[String, String]("abc").getClass.getName)
+ println(new C1B[Int, Int](1).getClass.getName)
+ println(new C1B[Int, Double](1).getClass.getName)
+ println(new C1B[Int, String](1).getClass.getName)
+ println(new C1B[Double, Int](1d).getClass.getName)
+ println(new C1B[Double, Double](1d).getClass.getName)
+ println(new C1B[Double, String](1d).getClass.getName)
+
+ println(new C1C("abc", 123).getClass.getName)
+ println(new C1C("abc", 123).getClass.getName)
+ println(new C1C("a", 1d).getClass.getName)
+ println(new C1C("a", "a").getClass.getName)
+ println(new C1C(1, 1).getClass.getName)
+ println(new C1C(1, 1d).getClass.getName)
+ println(new C1C(1, "a").getClass.getName)
+ println(new C1C(1d, 1).getClass.getName)
+ println(new C1C(1d, 1d).getClass.getName)
+ println(new C1C(1d, "a").getClass.getName)
+ }
+}
diff --git a/test/files/run/t5514.check b/test/files/run/t5514.check
new file mode 100644
index 0000000..c68f7c9
--- /dev/null
+++ b/test/files/run/t5514.check
@@ -0,0 +1,19 @@
+constructed reader: 10
+constructed reader: 9
+constructed reader: 8
+constructed reader: 7
+constructed reader: 6
+constructed reader: 5
+constructed reader: 4
+constructed reader: 3
+constructed reader: 2
+constructed reader: 1
+constructed reader: 0
+[0.0] parsed: List(s10, s9, s8, s7, s6, s5, s4, s3, s2, s1)
+constructed reader: 10
+constructed reader: 9
+constructed reader: 8
+constructed reader: 7
+constructed reader: 6
+constructed reader: 5
+[0.0] parsed: List(s10, s9, s8, s7, s6)
\ No newline at end of file
diff --git a/test/files/run/t5514.scala b/test/files/run/t5514.scala
new file mode 100644
index 0000000..efd5ba6
--- /dev/null
+++ b/test/files/run/t5514.scala
@@ -0,0 +1,35 @@
+
+
+
+import scala.io.Source
+import scala.util.parsing.combinator.Parsers
+import scala.util.parsing.input.Reader
+import scala.util.parsing.input.Position
+
+
+
+class DemoReader(n: Int) extends Reader[String] {
+ def atEnd = n == 0
+ def first = if (n >= 0) "s" + n else throw new IllegalArgumentException("No more input.")
+ def rest = new DemoReader(n - 1)
+ def pos = new Position {
+ def line = 0
+ def column = 0
+ def lineContents = first
+ }
+ println("constructed reader: " + n)
+}
+
+
+object Test extends App with Parsers {
+ type Elem = String
+ def startsWith(prefix: String) = acceptIf(_ startsWith prefix)("Error: " + _)
+
+ val resrep = startsWith("s").*(new DemoReader(10))
+ Console println resrep
+
+ val resrep5 = repN(5, startsWith("s"))(new DemoReader(10))
+ Console println resrep5
+}
+
+
diff --git a/test/files/run/t5527.check b/test/files/run/t5527.check
new file mode 100644
index 0000000..36bee9b
--- /dev/null
+++ b/test/files/run/t5527.check
@@ -0,0 +1,99 @@
+[[syntax trees at end of parser]] // newSource1.scala
+package <empty> {
+ object UselessComments extends scala.AnyRef {
+ def <init>() = {
+ super.<init>();
+ ()
+ };
+ var z = 0;
+ def test1 = {
+ object Maybe extends scala.AnyRef {
+ def <init>() = {
+ super.<init>();
+ ()
+ };
+ /** Some comment inside */
+ def nothing() = ()
+ };
+ ()
+ };
+ def test2 = {
+ var x = 4;
+ if (true)
+ {
+ x = 5;
+ val y = 6;
+ ()
+ }
+ else
+ ()
+ };
+ def test3 = {
+ if (true)
+ z = 3
+ else
+ ();
+ val t = 4;
+ 0.to(4).foreach(((i) => println(i)))
+ };
+ val test4 = 'a' match {
+ case ('0'| '1'| '2'| '3'| '4'| '5'| '6'| '7'| '8'| '9') => true
+ case _ => false
+ }
+ };
+ /** comments that we should keep */
+ object UsefulComments extends scala.AnyRef {
+ def <init>() = {
+ super.<init>();
+ ()
+ };
+ /** class A */
+ class A extends scala.AnyRef {
+ def <init>() = {
+ super.<init>();
+ ()
+ };
+ /** f */
+ def f(i: Int) = i;
+ /** v */
+ val v = 1;
+ /** u */
+ var u = 2
+ };
+ /** trait B */
+ abstract trait B extends scala.AnyRef {
+ def $init$() = {
+ ()
+ };
+ /** T */
+ type T >: _root_.scala.Nothing <: _root_.scala.Any;
+ /** f */
+ def f(i: Int): scala.Unit;
+ /** v */
+ val v = 1;
+ /** u */
+ var u = 2
+ };
+ /** object C */
+ object C extends scala.AnyRef {
+ def <init>() = {
+ super.<init>();
+ ()
+ };
+ /** f */
+ def f(i: Int) = i;
+ /** v */
+ val v = 1;
+ /** u */
+ var u = 2
+ };
+ /** class D */
+ @new deprecated("use ... instead", "2.10.0") class D extends scala.AnyRef {
+ def <init>() = {
+ super.<init>();
+ ()
+ }
+ }
+ }
+}
+
diff --git a/test/files/run/t5527.scala b/test/files/run/t5527.scala
new file mode 100644
index 0000000..2449ff6
--- /dev/null
+++ b/test/files/run/t5527.scala
@@ -0,0 +1,107 @@
+import scala.tools.partest._
+import java.io._
+import scala.tools.nsc._
+import scala.tools.nsc.util.CommandLineParser
+import scala.tools.nsc.doc.{Settings, DocFactory}
+import scala.tools.nsc.reporters.ConsoleReporter
+
+object Test extends DirectTest {
+
+ override def extraSettings: String = "-usejavacp -Xprint:parser -Yrangepos -Ystop-after:parser -d " + testOutput.path
+
+ override def code = """
+ // SI-5527
+ object UselessComments {
+
+ var z = 0
+
+ def test1 = {
+ /** Some comment here */
+ object Maybe {
+ /** Some comment inside */
+ def nothing() = ()
+ }
+ }
+
+ def test2 = {
+ var x = 4
+ if (true) {
+ /** Testing 123 */
+ x = 5
+ val y = 6
+ }
+ }
+
+ def test3 = {
+ if (true)
+ z = 3
+
+ /** Calculate this result. */
+ val t = 4
+ for (i <- 0 to 4)
+ println(i)
+ }
+
+ val test4 = ('a') match {
+ /** Another digit is a giveaway. */
+ case '0' | '1' | '2' | '3' | '4' | '5' | '6' | '7' | '8' | '9' =>
+ true
+ case _ =>
+ false
+ }
+ }
+
+ /** comments that we should keep */
+ object UsefulComments {
+ /** class A */
+ class A {
+ /** f */
+ def f(i: Int) = i
+ /** v */
+ val v = 1
+ /** u */
+ var u = 2
+ }
+ /** trait B */
+ trait B {
+ /** T */
+ type T
+ /** f */
+ def f(i: Int)
+ /** v */
+ val v = 1
+ /** u */
+ var u = 2
+ }
+ /** object C */
+ object C {
+ /** f */
+ def f(i: Int) = i
+ /** v */
+ val v = 1
+ /** u */
+ var u = 2
+ }
+ /** class D */
+ @deprecated("use ... instead", "2.10.0")
+ class D
+ }
+ """.trim
+
+ override def show(): Unit = {
+ // redirect err to out, for logging
+ val prevErr = System.err
+ System.setErr(System.out)
+ compile()
+ System.setErr(prevErr)
+ }
+
+ override def newCompiler(args: String*): Global = {
+ // we want the Scaladoc compiler here, because it keeps DocDef nodes in the tree
+ val settings = new Settings(_ => ())
+ val command = new ScalaDoc.Command((CommandLineParser tokenize extraSettings) ++ args.toList, settings)
+ new DocFactory(new ConsoleReporter(settings), settings).compiler
+ }
+
+ override def isDebug = false // so we don't get the newSettings warning
+}
diff --git a/test/files/run/t5530.check b/test/files/run/t5530.check
new file mode 100644
index 0000000..1013e33
--- /dev/null
+++ b/test/files/run/t5530.check
@@ -0,0 +1,2 @@
+something like this
+ 7 now works!.
diff --git a/test/files/pos/annotDepMethType.flags b/test/files/run/t5530.flags
similarity index 100%
copy from test/files/pos/annotDepMethType.flags
copy to test/files/run/t5530.flags
diff --git a/test/files/run/t5530.scala b/test/files/run/t5530.scala
new file mode 100644
index 0000000..c8109a4
--- /dev/null
+++ b/test/files/run/t5530.scala
@@ -0,0 +1,4 @@
+object Test extends App {
+ println(s"""something like this
+ ${3+4} now works!.""")
+}
\ No newline at end of file
diff --git a/test/files/pos/annotDepMethType.flags b/test/files/run/t5532.flags
similarity index 100%
copy from test/files/pos/annotDepMethType.flags
copy to test/files/run/t5532.flags
diff --git a/test/files/run/t5532.scala b/test/files/run/t5532.scala
new file mode 100644
index 0000000..7500473
--- /dev/null
+++ b/test/files/run/t5532.scala
@@ -0,0 +1,4 @@
+object Test extends App {
+ val x = s"1"
+ val y = s"2"
+}
\ No newline at end of file
diff --git a/test/files/run/t5535.check b/test/files/run/t5535.check
new file mode 100644
index 0000000..8da9829
--- /dev/null
+++ b/test/files/run/t5535.check
@@ -0,0 +1,20 @@
+Type in expressions to have them evaluated.
+Type :help for more information.
+
+scala>
+
+scala> def h()(i: Int) = 1 + i
+h: ()(i: Int)Int
+
+scala> println(h()(5))
+6
+
+scala> val f = h() _
+f: Int => Int = <function1>
+
+scala> println(f(10))
+11
+
+scala>
+
+scala>
diff --git a/test/files/run/t5535.scala b/test/files/run/t5535.scala
new file mode 100644
index 0000000..7bc12f3
--- /dev/null
+++ b/test/files/run/t5535.scala
@@ -0,0 +1,10 @@
+import scala.tools.partest.ReplTest
+
+object Test extends ReplTest {
+ def code = """
+def h()(i: Int) = 1 + i
+println(h()(5))
+val f = h() _
+println(f(10))
+ """
+}
diff --git a/test/files/run/t5537.check b/test/files/run/t5537.check
new file mode 100644
index 0000000..68c3ebf
--- /dev/null
+++ b/test/files/run/t5537.check
@@ -0,0 +1,20 @@
+Type in expressions to have them evaluated.
+Type :help for more information.
+
+scala>
+
+scala> List[Predef.type]()
+res0: List[scala.Predef.type] = List()
+
+scala> List[scala.`package`.type]()
+res1: List[scala.type] = List()
+
+scala> List[List.type]()
+res2: List[scala.collection.immutable.List.type] = List()
+
+scala> List[Set.type]()
+res3: List[Set.type] = List()
+
+scala>
+
+scala>
diff --git a/test/files/run/t5537.scala b/test/files/run/t5537.scala
new file mode 100644
index 0000000..ae88dcc
--- /dev/null
+++ b/test/files/run/t5537.scala
@@ -0,0 +1,10 @@
+import scala.tools.partest.ReplTest
+
+object Test extends ReplTest {
+ def code = """
+List[Predef.type]()
+List[scala.`package`.type]()
+List[List.type]()
+List[Set.type]()
+ """
+}
diff --git a/test/files/run/t5543.check b/test/files/run/t5543.check
new file mode 100644
index 0000000..2ef2d51
--- /dev/null
+++ b/test/files/run/t5543.check
@@ -0,0 +1,9 @@
+Test, 7, 119
+m, 3, 19
+Test, 5, 85
+T
+C
+T
+T
+D
+T
diff --git a/test/files/run/t5543.scala b/test/files/run/t5543.scala
new file mode 100644
index 0000000..3684bf9
--- /dev/null
+++ b/test/files/run/t5543.scala
@@ -0,0 +1,45 @@
+
+object Test extends Function0[Int] {
+ // this and v resolve to Test.this, Test.v not A.this, A.v
+ class A(x: Function0[Int] = this)(val a: Int = v, val b: Int = v * x()) extends Function0[Int] {
+ val v = 3
+ override def toString = x.toString +", "+ a +", "+ b
+ // ordinary instance scope
+ def m(i: Int = v, y: Function0[Int] = this) = "m, "+ i +", "+ y()
+ def apply() = 19
+ }
+ object A {
+ val v = 5
+ // should happily coexist with default getters, in a happier world
+ def init(x: Function0[Int] = Test.this)(a: Int = v, b: Int = v * x()) = x.toString +", "+ a +", "+ b
+ override def toString = "A"
+ }
+ val v = 7
+ def apply() = 17
+ override def toString = "Test"
+ def main(args: Array[String]) {
+ val sut = new A()()
+ println(sut.toString)
+ println(sut.m())
+ println(A.init()())
+
+ println((new T.C()).x)
+ println((new T.D(0,0)).x)
+ }
+}
+
+object T {
+ override def toString = "T"
+
+ // `this` refers to T
+ class C(val x: Any = {println(this); this}) { // prints T
+ println(this) // prints C
+ override def toString() = "C"
+ }
+
+ class D(val x: Any) {
+ override def toString() = "D"
+ // `this` refers again to T
+ def this(a: Int, b: Int, c: Any = {println(this); this}) { this(c); println(this) } // prints T, then prints D
+ }
+}
diff --git a/test/files/run/bug4570.check b/test/files/run/t5544.check
similarity index 100%
copy from test/files/run/bug4570.check
copy to test/files/run/t5544.check
diff --git a/test/files/run/t5544/Api_1.scala b/test/files/run/t5544/Api_1.scala
new file mode 100644
index 0000000..b4c9286
--- /dev/null
+++ b/test/files/run/t5544/Api_1.scala
@@ -0,0 +1,8 @@
+import scala.annotation.StaticAnnotation
+
+class ann(val bar: Any) extends StaticAnnotation
+
+object Api {
+ @ann({def foo = "foo!!"})
+ def foo = println("foo")
+}
diff --git a/test/files/run/t5544/Test_2.scala b/test/files/run/t5544/Test_2.scala
new file mode 100644
index 0000000..285f895
--- /dev/null
+++ b/test/files/run/t5544/Test_2.scala
@@ -0,0 +1,3 @@
+object Test extends App {
+ Api.foo
+}
diff --git a/test/files/jvm/bug680.check b/test/files/run/t5545.check
similarity index 100%
copy from test/files/jvm/bug680.check
copy to test/files/run/t5545.check
diff --git a/test/files/run/t5545.scala b/test/files/run/t5545.scala
new file mode 100644
index 0000000..7efa6d8
--- /dev/null
+++ b/test/files/run/t5545.scala
@@ -0,0 +1,27 @@
+import scala.tools.partest._
+import java.io._
+
+object Test extends DirectTest {
+
+ override def extraSettings: String = "-usejavacp -d " + testOutput.path + " -cp " + testOutput.path
+
+ override def code = """
+ // SI-5545
+ trait F[@specialized(Int) T1, R] {
+ def f(v1: T1): R
+ def g = v1 => f(v1)
+ }
+ """.trim
+
+ override def show(): Unit = {
+ // redirect err to out, for logging
+ val prevErr = System.err
+ System.setErr(System.out)
+ compile()
+ // the bug manifests at the second compilation, when the bytecode is already there
+ compile()
+ System.setErr(prevErr)
+ }
+
+ override def isDebug = false // so we don't get the newSettings warning
+}
diff --git a/test/files/run/t5552.check b/test/files/run/t5552.check
new file mode 100644
index 0000000..a19a608
--- /dev/null
+++ b/test/files/run/t5552.check
@@ -0,0 +1,2 @@
+(3,3)
+(3.0,3.0)
diff --git a/test/files/run/t5552.scala b/test/files/run/t5552.scala
new file mode 100644
index 0000000..afb8a1f
--- /dev/null
+++ b/test/files/run/t5552.scala
@@ -0,0 +1,10 @@
+class C[@specialized(Int) A](a:A) {
+ lazy val b = (a, a)
+ def c = b
+}
+object Test {
+ def main(args:Array[String]) {
+ println(new C(3).c)
+ println(new C(3.0).c)
+ }
+}
diff --git a/test/files/run/t5568.check b/test/files/run/t5568.check
new file mode 100644
index 0000000..67aaf16
--- /dev/null
+++ b/test/files/run/t5568.check
@@ -0,0 +1,9 @@
+void
+int
+class scala.runtime.BoxedUnit
+class scala.runtime.BoxedUnit
+class java.lang.Integer
+class java.lang.Integer
+5
+5
+5
diff --git a/test/files/run/t5568.scala b/test/files/run/t5568.scala
new file mode 100644
index 0000000..14599d9
--- /dev/null
+++ b/test/files/run/t5568.scala
@@ -0,0 +1,16 @@
+object Test {
+ def main(args: Array[String]): Unit = {
+ // these should give unboxed results
+ println(().getClass)
+ println(5.getClass)
+ // these should give boxed results
+ println(().asInstanceOf[AnyRef with Unit].getClass)
+ println(().asInstanceOf[Unit with AnyRef].getClass)
+ println(5.asInstanceOf[AnyRef with Int].getClass)
+ println(5.asInstanceOf[Int with AnyRef].getClass)
+ //make sure ## wasn't broken
+ println(5.##)
+ println((5.asInstanceOf[AnyRef]).##)
+ println((5:Any).##)
+ }
+}
diff --git a/test/files/run/t5577.check b/test/files/run/t5577.check
new file mode 100644
index 0000000..3eca387
--- /dev/null
+++ b/test/files/run/t5577.check
@@ -0,0 +1,11 @@
+Received a size hint: 10
+0
+1
+2
+3
+4
+5
+6
+7
+8
+9
\ No newline at end of file
diff --git a/test/files/run/t5577.scala b/test/files/run/t5577.scala
new file mode 100644
index 0000000..b5d6d8c
--- /dev/null
+++ b/test/files/run/t5577.scala
@@ -0,0 +1,27 @@
+
+
+
+import collection._
+
+
+
+object Test {
+
+ class AlarmingBuffer[T] extends mutable.ArrayBuffer[T] {
+ override def sizeHint(x: Int) {
+ println("Received a size hint: " + x)
+ super.sizeHint(x)
+ }
+ }
+
+ def main(args: Array[String]) {
+ val iteratorBuilder = (new AlarmingBuffer[Int]) mapResult {
+ res => res.iterator
+ }
+
+ iteratorBuilder.sizeHint(10)
+ iteratorBuilder ++= (0 until 10)
+ iteratorBuilder.result.foreach(println)
+ }
+
+}
diff --git a/test/files/run/t5583.check b/test/files/run/t5583.check
new file mode 100644
index 0000000..39b969f
--- /dev/null
+++ b/test/files/run/t5583.check
@@ -0,0 +1,20 @@
+Type in expressions to have them evaluated.
+Type :help for more information.
+
+scala>
+
+scala> var s = 0
+s: Int = 0
+
+scala> for (i <- 1 to 10) {s += i}
+
+scala> for (i <- 1 to 10) {s += i}
+
+scala> for (i <- 1 to 10) {s += i}
+
+scala> println(s)
+165
+
+scala>
+
+scala>
diff --git a/test/files/run/t5583.scala b/test/files/run/t5583.scala
new file mode 100644
index 0000000..8561a59
--- /dev/null
+++ b/test/files/run/t5583.scala
@@ -0,0 +1,11 @@
+import scala.tools.partest.ReplTest
+
+object Test extends ReplTest {
+ def code = """
+var s = 0
+for (i <- 1 to 10) {s += i}
+for (i <- 1 to 10) {s += i}
+for (i <- 1 to 10) {s += i}
+println(s)
+ """
+}
diff --git a/test/files/run/t5588.check b/test/files/run/t5588.check
new file mode 100644
index 0000000..bb101b6
--- /dev/null
+++ b/test/files/run/t5588.check
@@ -0,0 +1,2 @@
+true
+true
diff --git a/test/files/run/t5588.scala b/test/files/run/t5588.scala
new file mode 100644
index 0000000..f214d16
--- /dev/null
+++ b/test/files/run/t5588.scala
@@ -0,0 +1,14 @@
+object Test {
+ object MyEnum extends Enumeration {
+ val Foo = Value(2000000000)
+ val Bar = Value(-2000000000)
+ val X = Value(Integer.MAX_VALUE)
+ val Y = Value(Integer.MIN_VALUE)
+ }
+
+ import MyEnum._
+ def main(args: Array[String]) {
+ println(Foo > Bar)
+ println(X > Y)
+ }
+}
diff --git a/test/files/run/t5590.check b/test/files/run/t5590.check
new file mode 100644
index 0000000..ad4a2ee
--- /dev/null
+++ b/test/files/run/t5590.check
@@ -0,0 +1,4 @@
+Map(a -> a, b -> b, c -> c)
+Map(a -> a, b -> b, c -> c)
+Set(a, b, c, d, e)
+Set(a, b, c, d, e)
\ No newline at end of file
diff --git a/test/files/run/t5590.scala b/test/files/run/t5590.scala
new file mode 100644
index 0000000..9c806e0
--- /dev/null
+++ b/test/files/run/t5590.scala
@@ -0,0 +1,31 @@
+
+
+
+import java.io._
+import collection._
+
+
+
+object Test {
+
+ def check(obj: AnyRef) {
+ println(obj)
+
+ val bos = new ByteArrayOutputStream()
+ val out = new ObjectOutputStream(bos)
+ out.writeObject(obj)
+ val arr = bos.toByteArray()
+ val in = new ObjectInputStream(new ByteArrayInputStream(arr))
+ val deser = in.readObject()
+
+ println(deser)
+ }
+
+ def main(args: Array[String]) {
+ val lhm = mutable.LinkedHashMap("a" -> "a", "b" -> "b", "c" -> "c")
+ val lhs = mutable.LinkedHashSet("a", "b", "c", "d", "e")
+ check(lhm)
+ check(lhs)
+ }
+
+}
diff --git a/test/files/run/t5603.check b/test/files/run/t5603.check
new file mode 100644
index 0000000..3f19a0a
--- /dev/null
+++ b/test/files/run/t5603.check
@@ -0,0 +1,29 @@
+[[syntax trees at end of parser]] // newSource1.scala
+[0:241]package [0:0]<empty> {
+ [0:82]abstract trait Greeting extends [15:82][83]scala.AnyRef {
+ [15]def $init$() = [15]{
+ [15]()
+ };
+ [23:39]val name: [33:39]String;
+ [46:76]val msg = [56:76][56:72][56:71]"How are you, ".$plus([72:76]name)
+ };
+ [87:209]class C extends [94:209][151:159]Greeting {
+ [119:139]val nameElse = _;
+ [95:101]<paramaccessor> private[this] val i: [98:101]Int = _;
+ <119:139>def <init>([95]i: [98]Int) = <119:139>{
+ <119:139>val nameElse = <134:139>"Bob";
+ [94][94][94]super.<init>();
+ [94]()
+ };
+ [168:184]val name = [179:184]"avc";
+ [191:203][191:198]println([199:202]msg)
+ };
+ [215:241]object Test extends [227:241][235:238]App {
+ [227]def <init>() = [227]{
+ [227][227][227]super.<init>();
+ [227]()
+ };
+ [NoPosition]<empty>
+ }
+}
+
diff --git a/test/files/run/t5603.scala b/test/files/run/t5603.scala
new file mode 100644
index 0000000..60dfd01
--- /dev/null
+++ b/test/files/run/t5603.scala
@@ -0,0 +1,42 @@
+import scala.tools.partest._
+import java.io._
+import scala.tools.nsc._
+import scala.tools.nsc.util.CommandLineParser
+import scala.tools.nsc.{Global, Settings, CompilerCommand}
+import scala.tools.nsc.reporters.ConsoleReporter
+
+object Test extends DirectTest {
+
+ override def extraSettings: String = "-usejavacp -Xprint:parser -Ystop-after:parser -d " + testOutput.path
+
+ override def code = """
+ trait Greeting {
+ val name: String
+ val msg = "How are you, "+name
+ }
+ class C(i: Int) extends {
+ val nameElse = "Bob"
+ } with Greeting {
+ val name = "avc"
+ println(msg)
+ }
+
+ object Test extends App {}
+ """.trim
+
+ override def show(): Unit = {
+ // redirect err to out, for logging
+ val prevErr = System.err
+ System.setErr(System.out)
+ compile()
+ System.setErr(prevErr)
+ }
+
+ override def newCompiler(args: String*): Global = {
+
+ val settings = new Settings()
+ settings.Xprintpos.value = true
+ val command = new CompilerCommand((CommandLineParser tokenize extraSettings) ++ args.toList, settings)
+ new Global(command.settings, new ConsoleReporter(settings)) with interactive.RangePositions
+ }
+}
diff --git a/test/files/run/t5604.check b/test/files/run/t5604.check
new file mode 100644
index 0000000..53a2fc8
--- /dev/null
+++ b/test/files/run/t5604.check
@@ -0,0 +1,8 @@
+long
+double
+long
+double
+long
+double
+long
+double
diff --git a/test/files/run/t5604.scala b/test/files/run/t5604.scala
new file mode 100644
index 0000000..a06c8aa
--- /dev/null
+++ b/test/files/run/t5604.scala
@@ -0,0 +1,50 @@
+// a.scala
+// Fri Jan 13 11:31:47 PST 2012
+
+package foo {
+ object regular extends Duh {
+ def buh(n: Long) = println("long")
+ def buh(n: Double) = println("double")
+ }
+ class regular {
+ import regular._
+
+ duh(33L)
+ duh(3.0d)
+ foo.regular.duh(33L)
+ foo.regular.duh(3.0d)
+ buh(66L)
+ buh(6.0d)
+ foo.regular.buh(66L)
+ foo.regular.buh(6.0d)
+ }
+
+ trait Duh {
+ def duh(n: Long) = println("long")
+ def duh(n: Double) = println("double")
+ }
+ package object bar extends Duh {
+ def buh(n: Long) = println("long")
+ def buh(n: Double) = println("double")
+ }
+ package bar {
+ object Main {
+ def main(args:Array[String]) {
+ duh(33L)
+ duh(3.0d)
+ foo.bar.duh(33L)
+ foo.bar.duh(3.0d)
+ buh(66L)
+ buh(6.0d)
+ foo.bar.buh(66L)
+ foo.bar.buh(6.0d)
+ }
+ }
+ }
+}
+
+object Test {
+ def main(args: Array[String]): Unit = {
+ foo.bar.Main.main(null)
+ }
+}
diff --git a/test/files/run/t5608.check b/test/files/run/t5608.check
new file mode 100644
index 0000000..ba70d21
--- /dev/null
+++ b/test/files/run/t5608.check
@@ -0,0 +1 @@
+A at 6
diff --git a/test/files/run/t5608.scala b/test/files/run/t5608.scala
new file mode 100644
index 0000000..19b3681
--- /dev/null
+++ b/test/files/run/t5608.scala
@@ -0,0 +1,12 @@
+object Test {
+ def main(args:Array[String]) {
+ val ns = Array(3L, 3L, 3L)
+ val a1: A = new A(ns(0))
+ val a2: A = new A(ns(0))
+ println(a1 + a2)
+ }
+}
+
+class A(val u: Long) extends AnyVal {
+ def +(other: A) = new A(other.u + u)
+}
diff --git a/test/files/run/t5610.check b/test/files/run/t5610.check
new file mode 100644
index 0000000..023f18d
--- /dev/null
+++ b/test/files/run/t5610.check
@@ -0,0 +1,6 @@
+some string
+some string
+some string
+some string
+List(2, 3)
+List(5, 6)
diff --git a/test/files/run/t5610.scala b/test/files/run/t5610.scala
new file mode 100644
index 0000000..f62b2df
--- /dev/null
+++ b/test/files/run/t5610.scala
@@ -0,0 +1,30 @@
+object Test {
+ def main(args: Array[String]): Unit = {
+ var test: String = null
+ val fun1: Int => () => Unit = foo(test) _
+ val fun2: Int => () => Unit = foo(test)(_)
+ val fun3: Int => () => Unit = {
+ lazy val eta1: String = test
+ (dummy: Int) => foo(eta1)(dummy)
+ }
+ val fun4: Int => () => Unit = {
+ val eta1: () => String = () => test
+ (dummy: Int) => foo(eta1())(dummy)
+ }
+ test = "some string"
+ fun1(1)()
+ fun2(1)()
+ fun3(1)()
+ fun4(1)()
+
+ val f: (String, Int*) => Unit = m(2, 3)
+ f("", 5, 6)
+ }
+
+ def foo(s: => String)(dummy: Int) = () => println(s)
+
+ def m(a: Int*)(z: String, b: Int*) {
+ println(a.toList)
+ println(b.toList)
+ }
+}
diff --git a/test/files/run/t5612.check b/test/files/run/t5612.check
new file mode 100644
index 0000000..9d19cca
--- /dev/null
+++ b/test/files/run/t5612.check
@@ -0,0 +1,4 @@
+START for List(Two, Two, One, Three)
+TWO
+TWO
+ONE
diff --git a/test/files/run/t5612.scala b/test/files/run/t5612.scala
new file mode 100644
index 0000000..48b3093
--- /dev/null
+++ b/test/files/run/t5612.scala
@@ -0,0 +1,28 @@
+object L extends Enumeration {
+ val One, Two, Three = Value
+}
+
+class Foo {
+ def foo(xs: List[L.Value]) {
+ import scala.util.control.Breaks.{break, breakable}
+ println("START for " + xs)
+ breakable {
+ for (x <- xs) {
+ x match {
+ case L.One => println("ONE"); return
+ case L.Two => println("TWO")
+ case L.Three => println("THREE"); break
+ }
+ }
+ }
+ println("FINISH")
+ }
+}
+
+object Test {
+ def main(args: Array[String]) {
+ val f = new Foo()
+ val l = List(L.Two, L.Two, L.One, L.Three)
+ f.foo(l)
+ }
+}
diff --git a/test/files/run/t5614.check b/test/files/run/t5614.check
new file mode 100644
index 0000000..f659f2d
--- /dev/null
+++ b/test/files/run/t5614.check
@@ -0,0 +1,3 @@
+3
+a
+b
diff --git a/test/files/run/t5614.flags b/test/files/run/t5614.flags
new file mode 100644
index 0000000..48fd867
--- /dev/null
+++ b/test/files/run/t5614.flags
@@ -0,0 +1 @@
+-Xexperimental
diff --git a/test/files/run/t5614.scala b/test/files/run/t5614.scala
new file mode 100644
index 0000000..7c85c33
--- /dev/null
+++ b/test/files/run/t5614.scala
@@ -0,0 +1,5 @@
+object Test extends App {
+ val str = s"a\nb"
+ println(str.length)
+ println(str)
+}
diff --git a/test/files/run/t5629.check b/test/files/run/t5629.check
new file mode 100644
index 0000000..6a2d630
--- /dev/null
+++ b/test/files/run/t5629.check
@@ -0,0 +1,2 @@
+int child got: 33
+any child got: 33
diff --git a/test/files/run/t5629.scala b/test/files/run/t5629.scala
new file mode 100644
index 0000000..69feddd
--- /dev/null
+++ b/test/files/run/t5629.scala
@@ -0,0 +1,36 @@
+
+
+
+import scala.{specialized => spec}
+
+
+
+trait GrandParent[@spec(Int) -A] {
+ def foo(a: A): Unit
+ def bar[B <: A](b: B): Unit = println("grandparent got: %s" format b)
+}
+
+
+trait Parent[@spec(Int) -A] extends GrandParent[A] {
+ def foo(a: A) = bar(a)
+}
+
+
+class IntChild extends Parent[Int] {
+ override def bar[B <: Int](b: B): Unit = println("int child got: %s" format b)
+}
+
+
+class AnyChild extends Parent[Any] {
+ override def bar[B <: Any](b: B): Unit = println("any child got: %s" format b)
+}
+
+
+object Test {
+
+ def main(args: Array[String]) {
+ new IntChild().foo(33)
+ new AnyChild().foo(33)
+ }
+
+}
diff --git a/test/files/run/t5629b.check b/test/files/run/t5629b.check
new file mode 100644
index 0000000..1bc0248
--- /dev/null
+++ b/test/files/run/t5629b.check
@@ -0,0 +1,10 @@
+=== pf(1):
+MySmartPF.apply entered...
+newPF.applyOrElse entered...
+default
+scala.MatchError: () (of class scala.runtime.BoxedUnit)
+=== pf(42):
+MySmartPF.apply entered...
+newPF.applyOrElse entered...
+ok
+=== done
diff --git a/test/files/run/t5629b.scala b/test/files/run/t5629b.scala
new file mode 100644
index 0000000..6c90808
--- /dev/null
+++ b/test/files/run/t5629b.scala
@@ -0,0 +1,41 @@
+
+
+
+
+
+object Test extends App {
+
+ trait MyPF[@specialized(Int) -A] extends (A => Unit) {
+ def isDefinedAt(x: A): Boolean
+ def applyOrElse[A1 <: A](x: A1, default: A1 => Unit): Unit = {
+ println("MyPF.applyOrElse entered...")
+ if (isDefinedAt(x)) apply(x) else default(x)
+ }
+ }
+
+ trait MySmartPF[@specialized(Int) -A] extends MyPF[A] {
+ def apply(x: A): Unit = {
+ println("MySmartPF.apply entered...")
+ applyOrElse(x, { _: Any => throw new MatchError })
+ }
+ }
+
+ type T = Int
+ //type T = Any
+
+ def newPF(test: T): MyPF[T] = new MySmartPF[T] {
+ def isDefinedAt(x: T): Boolean = x != test
+ override def applyOrElse[A1 <: T](x: A1, default: A1 => Unit): Unit = {
+ println("newPF.applyOrElse entered...")
+ if (x != test) { println("ok"); () } else { println("default"); default(x) }
+ }
+ }
+
+ val pf = newPF(1)
+ println("=== pf(1):")
+ try { pf(1) } catch { case x => println(x) }
+ println("=== pf(42):")
+ pf(42)
+ println("=== done")
+
+}
diff --git a/test/files/run/t5648.check b/test/files/run/t5648.check
new file mode 100644
index 0000000..1140ff5
--- /dev/null
+++ b/test/files/run/t5648.check
@@ -0,0 +1,4 @@
+true
+true
+true
+true
diff --git a/test/files/neg/caseinherit.flags b/test/files/run/t5648.flags
similarity index 100%
copy from test/files/neg/caseinherit.flags
copy to test/files/run/t5648.flags
diff --git a/test/files/run/t5648.scala b/test/files/run/t5648.scala
new file mode 100644
index 0000000..c5cea9e
--- /dev/null
+++ b/test/files/run/t5648.scala
@@ -0,0 +1,10 @@
+case class C(val s: Int*)
+
+object Test {
+ def main(args: Array[String]): Unit = {
+ println(new C(1, 3, 7) == new C(1, 3, 7))
+ println(new C(1, 3, 7) == C(1, 3, 7))
+ println(C(1, 3, 7) == new C(1, 3, 7))
+ println(C(1, 3, 7) == C(1, 3, 7))
+ }
+}
diff --git a/test/files/run/t5652.check b/test/files/run/t5652.check
new file mode 100644
index 0000000..11438ef
--- /dev/null
+++ b/test/files/run/t5652.check
@@ -0,0 +1,8 @@
+public static final int T1$class.g$1(T1)
+public static int T1$class.f0(T1)
+public static void T1$class.$init$(T1)
+public final int A1.A1$$g$2()
+public int A1.f1()
+public final int A2.A2$$g$1()
+public int A2.f0()
+public int A2.f2()
diff --git a/test/files/run/t5652/t5652_1.scala b/test/files/run/t5652/t5652_1.scala
new file mode 100644
index 0000000..5343f26
--- /dev/null
+++ b/test/files/run/t5652/t5652_1.scala
@@ -0,0 +1,6 @@
+trait T1 {
+ def f0 = { def g = 1 ; class A { def a = g } ; g ; new A().a }
+}
+class A1 {
+ def f1 = { def g = 1 ; class A { def a = g } ; g ; new A().a }
+}
diff --git a/test/files/run/t5652/t5652_2.scala b/test/files/run/t5652/t5652_2.scala
new file mode 100644
index 0000000..765d16f
--- /dev/null
+++ b/test/files/run/t5652/t5652_2.scala
@@ -0,0 +1,9 @@
+class A2 extends A1 with T1{
+ def f2 = { def g = 5 ; class A { def a = g }; g ; new A().a }
+}
+
+object Test extends A2 {
+ def main(args: Array[String]) {
+ println(Seq(Class.forName(classOf[T1].getName + "$class"), classOf[A1], classOf[A2]).flatMap(_.getDeclaredMethods.map(_.toString).sorted).mkString("\n"))
+ }
+}
diff --git a/test/files/run/t5652b.check b/test/files/run/t5652b.check
new file mode 100644
index 0000000..ca9d0a7
--- /dev/null
+++ b/test/files/run/t5652b.check
@@ -0,0 +1,4 @@
+private final int A1.g$1()
+public int A1.f1()
+private final int A2.g$1()
+public int A2.f2()
diff --git a/test/files/run/t5652b/t5652b_1.scala b/test/files/run/t5652b/t5652b_1.scala
new file mode 100644
index 0000000..72ba5dc
--- /dev/null
+++ b/test/files/run/t5652b/t5652b_1.scala
@@ -0,0 +1,3 @@
+class A1 {
+ def f1 = { def g = 5 ; class A { def a = 0 } ; new A; g }
+}
diff --git a/test/files/run/t5652b/t5652b_2.scala b/test/files/run/t5652b/t5652b_2.scala
new file mode 100644
index 0000000..113736a
--- /dev/null
+++ b/test/files/run/t5652b/t5652b_2.scala
@@ -0,0 +1,9 @@
+class A2 extends A1 {
+ def f2 = { def g = 5 ; class A { def a = 0 } ; new A; g }
+}
+
+object Test extends A2 {
+ def main(args: Array[String]) {
+ println(Seq(classOf[A1], classOf[A2]).flatMap(_.getDeclaredMethods.map(_.toString).sorted).mkString("\n"))
+ }
+}
diff --git a/test/files/run/t5652c.check b/test/files/run/t5652c.check
new file mode 100644
index 0000000..3b889e0
--- /dev/null
+++ b/test/files/run/t5652c.check
@@ -0,0 +1,6 @@
+public final int A1.A1$$g$1()
+public final int A1.A1$$g$2()
+public int A1.f1()
+public int A1.f2()
+1
+2
diff --git a/test/files/run/t5652c/t5652c.scala b/test/files/run/t5652c/t5652c.scala
new file mode 100644
index 0000000..c977483
--- /dev/null
+++ b/test/files/run/t5652c/t5652c.scala
@@ -0,0 +1,10 @@
+class A1 {
+ def f1 = { def g = 1 ; class A { def a = g } ; new A().a }
+ def f2 = { def g = 2 ; class A { def a = g } ; new A().a }
+}
+
+object Test extends App {
+ println(classOf[A1].getDeclaredMethods.map(_.toString).sorted.mkString("\n"))
+ println(new A1().f1)
+ println(new A1().f2)
+}
diff --git a/test/files/run/t5655.check b/test/files/run/t5655.check
new file mode 100644
index 0000000..43ebd50
--- /dev/null
+++ b/test/files/run/t5655.check
@@ -0,0 +1,30 @@
+Type in expressions to have them evaluated.
+Type :help for more information.
+
+scala>
+
+scala> object x { def x={} }
+defined module x
+
+scala> import x._
+import x._
+
+scala> x
+<console>:12: error: reference to x is ambiguous;
+it is imported twice in the same scope by
+import x._
+and import x
+ x
+ ^
+
+scala> x
+<console>:12: error: reference to x is ambiguous;
+it is imported twice in the same scope by
+import x._
+and import x
+ x
+ ^
+
+scala>
+
+scala>
diff --git a/test/files/run/t5655.scala b/test/files/run/t5655.scala
new file mode 100644
index 0000000..b15feb7
--- /dev/null
+++ b/test/files/run/t5655.scala
@@ -0,0 +1,10 @@
+import scala.tools.partest.ReplTest
+
+object Test extends ReplTest {
+ def code = """
+object x { def x={} }
+import x._
+x
+x
+ """
+}
diff --git a/test/files/run/t5656.check b/test/files/run/t5656.check
new file mode 100644
index 0000000..9543ee7
--- /dev/null
+++ b/test/files/run/t5656.check
@@ -0,0 +1 @@
+List(1, 2, 3)_List(a, b, c)
\ No newline at end of file
diff --git a/test/files/run/t5656.scala b/test/files/run/t5656.scala
new file mode 100644
index 0000000..f5ea147
--- /dev/null
+++ b/test/files/run/t5656.scala
@@ -0,0 +1,11 @@
+
+
+
+
+object Test {
+
+ def main(args: Array[String]) {
+ println(Seq(List('1', '2', '3'), List('a', 'b', 'c')).view.addString(new StringBuilder, "_"))
+ }
+
+}
diff --git a/test/files/run/t5676.check b/test/files/run/t5676.check
new file mode 100644
index 0000000..3b562d3
--- /dev/null
+++ b/test/files/run/t5676.check
@@ -0,0 +1,3 @@
+ok
+false
+true
diff --git a/test/files/pos/annotDepMethType.flags b/test/files/run/t5676.flags
similarity index 100%
copy from test/files/pos/annotDepMethType.flags
copy to test/files/run/t5676.flags
diff --git a/test/files/run/t5676.scala b/test/files/run/t5676.scala
new file mode 100644
index 0000000..b643c30
--- /dev/null
+++ b/test/files/run/t5676.scala
@@ -0,0 +1,24 @@
+import java.lang.reflect.Modifier
+
+class Bar[T]
+
+class Foo[T] {
+ object A extends Bar[T]
+}
+
+class Baz[S] extends Foo[S] {
+ override object A extends Bar[S] {
+ def foo(): String = "ok"
+ }
+}
+
+object Test {
+
+ def main(a: Array[String]) {
+ val b = new Baz[Any]
+ println(b.A.foo())
+ println(Modifier.isFinal(classOf[Baz[Any]].getModifiers()))
+ println(Modifier.isFinal(Test.getClass.getModifiers()))
+ }
+
+}
diff --git a/test/files/run/t5680.check b/test/files/run/t5680.check
new file mode 100644
index 0000000..0d825ab
--- /dev/null
+++ b/test/files/run/t5680.check
@@ -0,0 +1,3 @@
+[Lscala.runtime.BoxedUnit
+()
+()
diff --git a/test/files/run/t5680.scala b/test/files/run/t5680.scala
new file mode 100644
index 0000000..f61cbd6
--- /dev/null
+++ b/test/files/run/t5680.scala
@@ -0,0 +1,7 @@
+object Test extends App {
+ val x = Array[Unit]((), ())
+ println(x.toString.substring(0, x.toString.indexOf(";")))
+ println(x(0))
+ x(1) = ()
+ println(x(1))
+}
\ No newline at end of file
diff --git a/test/files/run/t5688.check b/test/files/run/t5688.check
new file mode 100644
index 0000000..2c84f9e
--- /dev/null
+++ b/test/files/run/t5688.check
@@ -0,0 +1 @@
+Vector(ta, tb, tab)
diff --git a/test/files/run/t5688.scala b/test/files/run/t5688.scala
new file mode 100644
index 0000000..f99bfb4
--- /dev/null
+++ b/test/files/run/t5688.scala
@@ -0,0 +1,23 @@
+object Test extends App {
+ trait T
+
+ trait TA
+ trait TB
+
+ class A extends T with TA
+ class B extends T with TB
+ class AB extends T with TA with TB
+ // Matching on _: TA with TB
+
+ val li: Vector[T] = Vector(new A, new B, new AB)
+
+ val matched = (for (l <- li) yield {
+ l match {
+ case _: TA with TB => "tab"
+ case _: TA => "ta"
+ case _: TB => "tb"
+ }
+ })
+
+ println(matched)
+}
\ No newline at end of file
diff --git a/test/files/run/t5699.check b/test/files/run/t5699.check
new file mode 100755
index 0000000..df19644
--- /dev/null
+++ b/test/files/run/t5699.check
@@ -0,0 +1,11 @@
+[[syntax trees at end of parser]] // annodef.java
+package <empty> {
+ object MyAnnotation extends {
+ def <init>() = _
+ };
+ class MyAnnotation extends scala.annotation.Annotation with _root_.java.lang.annotation.Annotation with scala.annotation.ClassfileAnnotation {
+ def <init>() = _;
+ def value(): String
+ }
+}
+
diff --git a/test/files/run/t5699.scala b/test/files/run/t5699.scala
new file mode 100755
index 0000000..5cef67e
--- /dev/null
+++ b/test/files/run/t5699.scala
@@ -0,0 +1,24 @@
+import scala.tools.partest.DirectTest
+import scala.tools.nsc.util.BatchSourceFile
+
+object Test extends DirectTest {
+ // Java code
+ override def code = """
+ |public @interface MyAnnotation { String value(); }
+ """.stripMargin
+
+ override def extraSettings: String = "-usejavacp -Ystop-after:typer -Xprint:parser"
+
+ override def show(): Unit = {
+ // redirect err to out, for logging
+ val prevErr = System.err
+ System.setErr(System.out)
+ compile()
+ System.setErr(prevErr)
+ }
+
+ override def newSources(sourceCodes: String*) = {
+ assert(sourceCodes.size == 1)
+ List(new BatchSourceFile("annodef.java", sourceCodes(0)))
+ }
+}
diff --git a/test/files/run/t5704.check b/test/files/run/t5704.check
new file mode 100644
index 0000000..102e320
--- /dev/null
+++ b/test/files/run/t5704.check
@@ -0,0 +1 @@
+String
diff --git a/test/files/run/t5704.flags b/test/files/run/t5704.flags
new file mode 100644
index 0000000..cd66464
--- /dev/null
+++ b/test/files/run/t5704.flags
@@ -0,0 +1 @@
+-language:experimental.macros
\ No newline at end of file
diff --git a/test/files/run/t5704.scala b/test/files/run/t5704.scala
new file mode 100644
index 0000000..ddcbcc2
--- /dev/null
+++ b/test/files/run/t5704.scala
@@ -0,0 +1,19 @@
+import scala.reflect.runtime.universe._
+import scala.reflect.runtime.{universe => ru}
+import scala.reflect.runtime.{currentMirror => cm}
+import scala.tools.reflect.ToolBox
+
+object Test extends App {
+ class MyQuerycollection{
+ def findUserByName( name:String ) = {
+ val tree = reify{ "test" == name }.tree
+ val toolbox = cm.mkToolBox()
+ toolbox.typeCheck(tree) match{
+ case Apply(Select(lhs,op),rhs::Nil) =>
+ println(rhs.tpe)
+ }
+ }
+ }
+ val qc = new MyQuerycollection
+ qc.findUserByName("some value")
+}
\ No newline at end of file
diff --git a/test/files/run/t5710-1.check b/test/files/run/t5710-1.check
new file mode 100644
index 0000000..eac2025
--- /dev/null
+++ b/test/files/run/t5710-1.check
@@ -0,0 +1 @@
+evaluated = (abc,abc)
diff --git a/test/files/run/t5710-1.scala b/test/files/run/t5710-1.scala
new file mode 100644
index 0000000..12bd858
--- /dev/null
+++ b/test/files/run/t5710-1.scala
@@ -0,0 +1,15 @@
+import scala.reflect.runtime.universe._
+import scala.reflect.runtime.{universe => ru}
+import scala.reflect.runtime.{currentMirror => cm}
+import scala.tools.reflect.ToolBox
+
+object Test extends App {
+ val code = reify {
+ val (x, y) = ("abc": Any) match { case x => (x, x) }
+ (x, y)
+ };
+
+ val toolbox = cm.mkToolBox()
+ val evaluated = toolbox.eval(code.tree)
+ println("evaluated = " + evaluated)
+}
\ No newline at end of file
diff --git a/test/files/run/t5710-2.check b/test/files/run/t5710-2.check
new file mode 100644
index 0000000..eac2025
--- /dev/null
+++ b/test/files/run/t5710-2.check
@@ -0,0 +1 @@
+evaluated = (abc,abc)
diff --git a/test/files/run/t5710-2.scala b/test/files/run/t5710-2.scala
new file mode 100644
index 0000000..6d2129c
--- /dev/null
+++ b/test/files/run/t5710-2.scala
@@ -0,0 +1,15 @@
+import scala.reflect.runtime.universe._
+import scala.reflect.runtime.{universe => ru}
+import scala.reflect.runtime.{currentMirror => cm}
+import scala.tools.reflect.ToolBox
+
+object Test extends App {
+ val code = reify {
+ val (x, y) = "abc" match { case x => (x, x) }
+ (x, y)
+ };
+
+ val toolbox = cm.mkToolBox()
+ val evaluated = toolbox.eval(code.tree)
+ println("evaluated = " + evaluated)
+}
\ No newline at end of file
diff --git a/test/files/run/t5713.check b/test/files/run/t5713.check
new file mode 100644
index 0000000..1419eb9
--- /dev/null
+++ b/test/files/run/t5713.check
@@ -0,0 +1 @@
+err
diff --git a/test/files/run/t5713.flags b/test/files/run/t5713.flags
new file mode 100644
index 0000000..cd66464
--- /dev/null
+++ b/test/files/run/t5713.flags
@@ -0,0 +1 @@
+-language:experimental.macros
\ No newline at end of file
diff --git a/test/files/run/t5713/Impls_Macros_1.scala b/test/files/run/t5713/Impls_Macros_1.scala
new file mode 100644
index 0000000..12c3da2
--- /dev/null
+++ b/test/files/run/t5713/Impls_Macros_1.scala
@@ -0,0 +1,28 @@
+package m
+
+import language.experimental.macros
+import scala.reflect.macros.Context
+
+object Level extends Enumeration {
+ val Error = Value(5)
+}
+
+object Logger {
+ def error(message: String): Unit = macro LoggerMacros.error
+}
+
+private object LoggerMacros {
+
+ type LoggerContext = Context { type PrefixType = Logger.type }
+
+ def error(c: LoggerContext)(message: c.Expr[String]): c.Expr[Unit] =
+ log(c)(c.universe.reify(Level.Error), message)
+
+ private def log(c: LoggerContext)(level: c.Expr[Level.Value], message: c.Expr[String]): c.Expr[Unit] =
+// was: if (level.splice.id < 4) // TODO Remove hack!
+ if (c.eval(level).id < 4) // TODO Remove hack!
+ c.universe.reify(())
+ else {
+ c.universe.reify(println(message.splice))
+ }
+}
\ No newline at end of file
diff --git a/test/files/run/t5713/Test_2.scala b/test/files/run/t5713/Test_2.scala
new file mode 100644
index 0000000..24f9e79
--- /dev/null
+++ b/test/files/run/t5713/Test_2.scala
@@ -0,0 +1,5 @@
+import m._
+
+object Test extends App {
+ Logger.error("err")
+}
\ No newline at end of file
diff --git a/test/files/run/t5733.check b/test/files/run/t5733.check
new file mode 100644
index 0000000..e697046
--- /dev/null
+++ b/test/files/run/t5733.check
@@ -0,0 +1,2 @@
+Running ABTest asserts
+Done
diff --git a/test/files/run/t5733.scala b/test/files/run/t5733.scala
new file mode 100644
index 0000000..360264e
--- /dev/null
+++ b/test/files/run/t5733.scala
@@ -0,0 +1,53 @@
+import scala.language.dynamics
+
+object A extends Dynamic {
+ var a = "a"
+
+ def selectDynamic(method:String): String = a
+
+ def updateDynamic(method:String)(v:String) { a = v }
+}
+
+class B extends Dynamic {
+ var b = "b"
+
+ def selectDynamic(method:String): String = b
+
+ def updateDynamic(method:String)(v:String) { b = v }
+}
+
+object Test extends App {
+ assert( A.foo == "a" )
+ assert( A.bar == "a" )
+ A.aaa = "aaa"
+ assert( A.bar == "aaa" )
+
+ val b = new B
+ assert( b.foo == "b" )
+ assert( b.bar == "b" )
+ b.bbb = "bbb"
+ assert( b.bar == "bbb" )
+
+ {
+ println("Running ABTest asserts")
+ A.a = "a"
+ (new ABTest).test()
+ }
+
+ println("Done")
+}
+
+class ABTest {
+ def test() {
+ assert( A.foo == "a" )
+ assert( A.bar == "a" )
+ A.aaa = "aaa"
+ assert( A.bar == "aaa" )
+
+ val b = new B
+ assert( b.foo == "b" )
+ assert( b.bar == "b" )
+ b.bbb = "bbb"
+ assert( b.bar == "bbb" )
+ }
+}
diff --git a/test/files/run/t5753_1.check b/test/files/run/t5753_1.check
new file mode 100644
index 0000000..f70d7bb
--- /dev/null
+++ b/test/files/run/t5753_1.check
@@ -0,0 +1 @@
+42
\ No newline at end of file
diff --git a/test/files/run/t5753_1.flags b/test/files/run/t5753_1.flags
new file mode 100644
index 0000000..cd66464
--- /dev/null
+++ b/test/files/run/t5753_1.flags
@@ -0,0 +1 @@
+-language:experimental.macros
\ No newline at end of file
diff --git a/test/files/run/t5753_1/Impls_Macros_1.scala b/test/files/run/t5753_1/Impls_Macros_1.scala
new file mode 100644
index 0000000..1664301
--- /dev/null
+++ b/test/files/run/t5753_1/Impls_Macros_1.scala
@@ -0,0 +1,10 @@
+import scala.reflect.macros.Context
+import language.experimental.macros
+
+trait Impls {
+ def impl(c: Context)(x: c.Expr[Any]) = x
+}
+
+object Macros extends Impls {
+ def foo(x: Any) = macro impl
+}
\ No newline at end of file
diff --git a/test/files/run/t5753_1/Test_2.scala b/test/files/run/t5753_1/Test_2.scala
new file mode 100644
index 0000000..a277763
--- /dev/null
+++ b/test/files/run/t5753_1/Test_2.scala
@@ -0,0 +1,4 @@
+object Test extends App {
+ import Macros._
+ println(foo(42))
+}
\ No newline at end of file
diff --git a/test/files/run/t5753_2.check b/test/files/run/t5753_2.check
new file mode 100644
index 0000000..f70d7bb
--- /dev/null
+++ b/test/files/run/t5753_2.check
@@ -0,0 +1 @@
+42
\ No newline at end of file
diff --git a/test/files/run/t5753_2.flags b/test/files/run/t5753_2.flags
new file mode 100644
index 0000000..cd66464
--- /dev/null
+++ b/test/files/run/t5753_2.flags
@@ -0,0 +1 @@
+-language:experimental.macros
\ No newline at end of file
diff --git a/test/files/run/t5753_2/Impls_Macros_1.scala b/test/files/run/t5753_2/Impls_Macros_1.scala
new file mode 100644
index 0000000..e23c0b9
--- /dev/null
+++ b/test/files/run/t5753_2/Impls_Macros_1.scala
@@ -0,0 +1,10 @@
+import scala.reflect.macros.{Context => Ctx}
+
+trait Macro_T {
+ def foo[T](c: Ctx)(s: c.Expr[T]) = s
+}
+
+object Macros {
+ def foo[T](s: T) = macro Impls.foo[T]
+ object Impls extends Macro_T
+}
diff --git a/test/files/run/t5753_2/Test_2.scala b/test/files/run/t5753_2/Test_2.scala
new file mode 100644
index 0000000..a277763
--- /dev/null
+++ b/test/files/run/t5753_2/Test_2.scala
@@ -0,0 +1,4 @@
+object Test extends App {
+ import Macros._
+ println(foo(42))
+}
\ No newline at end of file
diff --git a/test/files/run/bug576.check b/test/files/run/t576.check
similarity index 100%
rename from test/files/run/bug576.check
rename to test/files/run/t576.check
diff --git a/test/files/run/t576.scala b/test/files/run/t576.scala
new file mode 100644
index 0000000..756a241
--- /dev/null
+++ b/test/files/run/t576.scala
@@ -0,0 +1,45 @@
+class A {
+ override def equals(other: Any) = other match {
+ case _: this.type => true
+ case _ => false
+ }
+}
+
+object Dingus {
+ def IamDingus = 5
+}
+
+object Test {
+ val x1 = new A
+ val x2 = new A
+
+ val x3 = new { self =>
+ override def equals(other : Any) = other match {
+ case that: self.type => true
+ case _ => false
+ }
+ }
+ val x4 = new { self =>
+ def f(x: Any): Int = x match {
+ case _: x1.type => 1
+ case _: x2.type => 2
+ case _: x3.type => 3
+ case _: self.type => 4
+ case x: Dingus.type => x.IamDingus
+ }
+ }
+
+ def main(args: Array[String]): Unit = {
+
+ assert(x1 == x1)
+ assert(x1 != x2)
+ assert(x1 != ())
+ assert(x2 != x1)
+
+ assert(x3 == x3)
+ assert(x3 != x2)
+ assert(x2 != x3)
+
+ List(x1, x2, x3, x4, Dingus) map x4.f foreach println
+ }
+}
diff --git a/test/files/run/t5770.check b/test/files/run/t5770.check
new file mode 100644
index 0000000..f00c965
--- /dev/null
+++ b/test/files/run/t5770.check
@@ -0,0 +1,10 @@
+1
+2
+3
+4
+5
+6
+7
+8
+9
+10
diff --git a/test/files/run/t5770.scala b/test/files/run/t5770.scala
new file mode 100644
index 0000000..b6c9236
--- /dev/null
+++ b/test/files/run/t5770.scala
@@ -0,0 +1,25 @@
+import scala.reflect.runtime.universe._
+import scala.reflect.runtime.{currentMirror => cm}
+import scala.tools.reflect._
+
+object Test extends App {
+ var i = 0
+ val action = reify { i += 1; println(i) }.tree
+
+ val tb1 = cm.mkToolBox()
+ tb1.eval(action)
+ tb1.eval(action)
+ tb1.eval(action)
+ tb1.frontEnd.reset()
+ tb1.eval(action)
+ tb1.eval(action)
+
+ val tb2 = cm.mkToolBox()
+ tb2.eval(action)
+ tb2.frontEnd.reset()
+ tb2.eval(action)
+ tb2.eval(action)
+ tb2.frontEnd.reset()
+ tb2.eval(action)
+ tb2.eval(action)
+}
diff --git a/test/files/run/t5789.check b/test/files/run/t5789.check
new file mode 100644
index 0000000..ea8d496
--- /dev/null
+++ b/test/files/run/t5789.check
@@ -0,0 +1,14 @@
+Type in expressions to have them evaluated.
+Type :help for more information.
+
+scala>
+
+scala> val n = 2
+n: Int = 2
+
+scala> () => n
+res0: () => Int = <function0>
+
+scala>
+
+scala>
diff --git a/test/files/run/t5789.scala b/test/files/run/t5789.scala
new file mode 100644
index 0000000..461f6a4
--- /dev/null
+++ b/test/files/run/t5789.scala
@@ -0,0 +1,14 @@
+
+import scala.tools.nsc._
+import interpreter.ILoop
+import scala.tools.partest.ReplTest
+
+
+object Test extends ReplTest {
+ override def extraSettings = "-Yinline"
+ def code = """
+ val n = 2
+ () => n
+ """
+}
+
diff --git a/test/files/run/t5804.check b/test/files/run/t5804.check
new file mode 100644
index 0000000..3ccc1c2
--- /dev/null
+++ b/test/files/run/t5804.check
@@ -0,0 +1,4 @@
+128
+16
+128
+32
\ No newline at end of file
diff --git a/test/files/run/t5804.scala b/test/files/run/t5804.scala
new file mode 100644
index 0000000..b96a736
--- /dev/null
+++ b/test/files/run/t5804.scala
@@ -0,0 +1,32 @@
+
+
+import collection.mutable._
+
+
+object Test {
+
+ def main(args: Array[String]) {
+ class CustomHashMap extends HashMap[Int, Int] {
+ override def initialSize = 65
+
+ println(table.length)
+ }
+
+ new CustomHashMap
+ new HashMap {
+ println(table.length)
+ }
+
+ class CustomHashSet extends HashSet[Int] {
+ override def initialSize = 96
+
+ println(table.length)
+ }
+
+ new CustomHashSet
+ new HashSet {
+ println(table.length)
+ }
+ }
+
+}
diff --git a/test/files/run/t5816.check b/test/files/run/t5816.check
new file mode 100644
index 0000000..8e58ace
--- /dev/null
+++ b/test/files/run/t5816.check
@@ -0,0 +1 @@
+5.+(Test.this.y)
diff --git a/test/files/run/t5816.scala b/test/files/run/t5816.scala
new file mode 100644
index 0000000..f0279e5
--- /dev/null
+++ b/test/files/run/t5816.scala
@@ -0,0 +1,17 @@
+import scala.reflect.runtime.universe._
+import scala.reflect.runtime.{currentMirror => cm}
+import scala.tools.reflect.ToolBox
+
+object Test extends App {
+ val toolbox = cm.mkToolBox()
+
+ def printSource[T](expr: Expr[T]) {
+ val ttree = toolbox typeCheck expr.tree
+ println(ttree.toString)
+ }
+
+ var y = 3
+ printSource(reify {
+ 5 + y
+ })
+}
\ No newline at end of file
diff --git a/test/files/run/t5824.check b/test/files/run/t5824.check
new file mode 100644
index 0000000..3774da6
--- /dev/null
+++ b/test/files/run/t5824.check
@@ -0,0 +1 @@
+a b c
diff --git a/test/files/run/t5824.scala b/test/files/run/t5824.scala
new file mode 100644
index 0000000..2ad169e
--- /dev/null
+++ b/test/files/run/t5824.scala
@@ -0,0 +1,8 @@
+import scala.reflect.runtime.universe._
+import scala.tools.reflect.Eval
+
+object Test extends App {
+ reify {
+ println("%s %s %s".format(List("a", "b", "c"): _*))
+ }.eval
+}
diff --git a/test/files/run/t5830.check b/test/files/run/t5830.check
new file mode 100644
index 0000000..675387e
--- /dev/null
+++ b/test/files/run/t5830.check
@@ -0,0 +1,6 @@
+a with oef
+a with oef
+a
+def with oef
+def
+default
diff --git a/test/files/neg/caseinherit.flags b/test/files/run/t5830.flags
similarity index 100%
copy from test/files/neg/caseinherit.flags
copy to test/files/run/t5830.flags
diff --git a/test/files/run/t5830.scala b/test/files/run/t5830.scala
new file mode 100644
index 0000000..5d808bf
--- /dev/null
+++ b/test/files/run/t5830.scala
@@ -0,0 +1,56 @@
+import scala.annotation.switch
+
+object Test extends App {
+ // TODO: should not emit a switch
+ // def noSwitch(ch: Char, eof: Boolean) = (ch: @switch) match {
+ // case 'a' if eof => println("a with oef") // then branch
+ // }
+
+ def onlyThen(ch: Char, eof: Boolean) = (ch: @switch) match {
+ case 'a' if eof => println("a with oef") // then branch
+ case 'c' =>
+ }
+
+ def ifThenElse(ch: Char, eof: Boolean) = (ch: @switch) match {
+ case 'a' if eof => println("a with oef") // then branch
+ case 'a' if eof => println("a with oef2") // unreachable, but the analysis is not that sophisticated
+ case 'a' => println("a") // else-branch
+ case 'c' =>
+ }
+
+ def defaultUnguarded(ch: Char, eof: Boolean) = (ch: @switch) match {
+ case ' ' if eof => println("spacey oef")
+ case _ => println("default")
+ }
+
+ def defaults(ch: Char, eof: Boolean) = (ch: @switch) match {
+ case _ if eof => println("def with oef") // then branch
+ case _ if eof => println("def with oef2") // unreachable, but the analysis is not that sophisticated
+ case _ => println("def") // else-branch
+ }
+
+ // test binders in collapsed cases (no need to run, it's "enough" to know it doesn't crash the compiler)
+ def guard(x: Any): Boolean = true
+ def testBinders =
+ try { println("") } // work around SI-6015
+ catch {
+ case _ if guard(null) =>
+ case x if guard(x) => throw x
+ }
+
+ // def unreachable(ch: Char) = (ch: @switch) match {
+ // case 'a' => println("b") // ok
+ // case 'a' => println("b") // unreachable
+ // case 'c' =>
+ // }
+
+ // noSwitch('a', true)
+ onlyThen('a', true) // 'a with oef'
+ ifThenElse('a', true) // 'a with oef'
+ ifThenElse('a', false) // 'a'
+ defaults('a', true) // 'def with oef'
+ defaults('a', false) // 'def'
+
+ // test that it jumps to default case, no match error
+ defaultUnguarded(' ', false) // default
+}
\ No newline at end of file
diff --git a/test/files/run/t5840.scala b/test/files/run/t5840.scala
new file mode 100644
index 0000000..da036d5
--- /dev/null
+++ b/test/files/run/t5840.scala
@@ -0,0 +1,7 @@
+import scala.reflect.runtime.universe._
+
+object Test extends App {
+ reify {
+ class C[T <: String with Singleton]
+ }
+}
\ No newline at end of file
diff --git a/test/files/run/t5843.check b/test/files/run/t5843.check
new file mode 100644
index 0000000..2bf97f4
--- /dev/null
+++ b/test/files/run/t5843.check
@@ -0,0 +1,9 @@
+ foo="1"
+ bar="2" foo="1"
+null
+ bar="2"
+ foo="1"
+ bar="2"
+ foo="1"
+ bar="2" foo="1"
+ bar="2" foo="1"
diff --git a/test/files/run/t5843.scala b/test/files/run/t5843.scala
new file mode 100644
index 0000000..43d588c
--- /dev/null
+++ b/test/files/run/t5843.scala
@@ -0,0 +1,15 @@
+object Test extends App {
+ val foo = scala.xml.Attribute(null, "foo", "1", scala.xml.Null)
+ val bar = scala.xml.Attribute(null, "bar", "2", foo)
+ println(foo)
+ println(bar)
+ println(scala.xml.TopScope.getURI(foo.pre))
+ println(bar remove "foo")
+ println(bar remove "bar")
+ println(bar remove (null, scala.xml.TopScope, "foo"))
+ println(bar remove (null, scala.xml.TopScope, "bar"))
+
+ val ns = scala.xml.NamespaceBinding(null, "uri", scala.xml.TopScope)
+ println(bar remove (null, ns, "foo"))
+ println(bar remove (null, ns, "bar"))
+}
diff --git a/test/files/run/t5856.scala b/test/files/run/t5856.scala
new file mode 100644
index 0000000..d1e9bd6
--- /dev/null
+++ b/test/files/run/t5856.scala
@@ -0,0 +1,10 @@
+object Test extends App {
+ override def toString = "Test"
+
+ assert(s"$this" == "Test")
+ assert(s"$this$this" == "TestTest")
+ assert(s"$this$$" == "Test$")
+ assert(s"$this.##" == "Test.##")
+ assert(s"$this.toString" == "Test.toString")
+ assert(s"$this=THIS" == "Test=THIS")
+}
\ No newline at end of file
diff --git a/test/files/run/t5857.scala b/test/files/run/t5857.scala
new file mode 100644
index 0000000..bf67bed
--- /dev/null
+++ b/test/files/run/t5857.scala
@@ -0,0 +1,45 @@
+
+
+
+object Test {
+
+ def time[U](b: =>U): Long = {
+ val start = System.currentTimeMillis
+ b
+ val end = System.currentTimeMillis
+
+ end - start
+ }
+
+ def main(args: Array[String]) {
+ val sz = 1000000000
+
+ val range = 1 to sz
+ check { assert(range.min == 1, range.min) }
+ check { assert(range.max == sz, range.max) }
+
+ val descending = sz to 1 by -1
+ check { assert(descending.min == 1) }
+ check { assert(descending.max == sz) }
+
+ val numeric = 1.0 to sz.toDouble by 1
+ check { assert(numeric.min == 1.0) }
+ check { assert(numeric.max == sz.toDouble) }
+
+ val numdesc = sz.toDouble to 1.0 by -1
+ check { assert(numdesc.min == 1.0) }
+ check { assert(numdesc.max == sz.toDouble) }
+ }
+
+ def check[U](b: =>U) {
+ val exectime = time {
+ b
+ }
+
+ // whatever it is, it should be less than, say, 250ms
+ // if `max` involves traversal, it takes over 5 seconds on a 3.2GHz i7 CPU
+ //println(exectime)
+ assert(exectime < 250, exectime)
+ }
+
+}
diff --git a/test/files/run/t5866.check b/test/files/run/t5866.check
new file mode 100644
index 0000000..9f4ec72
--- /dev/null
+++ b/test/files/run/t5866.check
@@ -0,0 +1,2 @@
+0.0
+Foo(0.0)
diff --git a/test/files/run/t5866.scala b/test/files/run/t5866.scala
new file mode 100644
index 0000000..120773e
--- /dev/null
+++ b/test/files/run/t5866.scala
@@ -0,0 +1,11 @@
+class Foo(val d: Double) extends AnyVal {
+ override def toString = s"Foo($d)"
+}
+object Test {
+ def main(args: Array[String]): Unit = {
+ val d: Double = null.asInstanceOf[Double]
+ println(d)
+ val f: Foo = null.asInstanceOf[Foo]
+ println(f)
+ }
+}
diff --git a/test/files/run/t5867.check b/test/files/run/t5867.check
new file mode 100644
index 0000000..e1811ee
--- /dev/null
+++ b/test/files/run/t5867.check
@@ -0,0 +1 @@
+UnrolledBuffer(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50)
\ No newline at end of file
diff --git a/test/files/run/t5867.scala b/test/files/run/t5867.scala
new file mode 100644
index 0000000..6a86ac3
--- /dev/null
+++ b/test/files/run/t5867.scala
@@ -0,0 +1,14 @@
+import collection.mutable.UnrolledBuffer
+
+
+
+object Test {
+
+ def main(args: Array[String]) {
+ val buf = UnrolledBuffer(1 to 50: _*)
+ val dub = buf ++ buf
+
+ println(dub)
+ }
+
+}
diff --git a/test/files/run/t5879.check b/test/files/run/t5879.check
new file mode 100644
index 0000000..b6cbda3
--- /dev/null
+++ b/test/files/run/t5879.check
@@ -0,0 +1,16 @@
+Map(1 -> 1)
+1
+Map(1 -> 1)
+1
+(1,1)
+Map(1 -> 1)
+1
+(1,1)
+Map(1 -> 1)
+1
+(1,2)
+Map(1 -> 2)
+2
+(1,2)
+Map(1 -> 2)
+2
\ No newline at end of file
diff --git a/test/files/run/t5879.scala b/test/files/run/t5879.scala
new file mode 100644
index 0000000..e1c07fc
--- /dev/null
+++ b/test/files/run/t5879.scala
@@ -0,0 +1,74 @@
+import collection.immutable.HashMap
+
+
+object Test {
+
+ def main(args: Array[String]) {
+ resolveDefault()
+ resolveFirst()
+ resolveSecond()
+ resolveMany()
+ }
+
+ def resolveDefault() {
+ val a = HashMap(1 -> "1")
+ val b = HashMap(1 -> "2")
+
+ val r = a.merged(b)(null)
+ println(r)
+ println(r(1))
+
+ val rold = a.merge(b)
+ println(rold)
+ println(rold(1))
+ }
+
+ def resolveFirst() {
+ val a = HashMap(1 -> "1")
+ val b = HashMap(1 -> "2")
+ def collision(a: (Int, String), b: (Int, String)) = {
+ println(a)
+ a
+ }
+
+ val r = a.merged(b) { collision }
+ println(r)
+ println(r(1))
+
+ val rold = a.merge(b, collision)
+ println(rold)
+ println(rold(1))
+ }
+
+ def resolveSecond() {
+ val a = HashMap(1 -> "1")
+ val b = HashMap(1 -> "2")
+ def collision(a: (Int, String), b: (Int, String)) = {
+ println(b)
+ b
+ }
+
+ val r = a.merged(b) { collision }
+ println(r)
+ println(r(1))
+
+ val rold = a.merge(b, collision)
+ println(rold)
+ println(rold(1))
+ }
+
+ def resolveMany() {
+ val a = HashMap((0 until 100) zip (0 until 100): _*)
+ val b = HashMap((0 until 100) zip (100 until 200): _*)
+ def collision(a: (Int, Int), b: (Int, Int)) = {
+ (a._1, a._2 + b._2)
+ }
+
+ val r = a.merged(b) { collision }
+ for ((k, v) <- r) assert(v == 100 + 2 * k, (k, v))
+
+ val rold = a.merge(b, collision)
+ for ((k, v) <- r) assert(v == 100 + 2 * k, (k, v))
+ }
+
+}
diff --git a/test/files/run/t5880.scala b/test/files/run/t5880.scala
new file mode 100644
index 0000000..4cda599
--- /dev/null
+++ b/test/files/run/t5880.scala
@@ -0,0 +1,41 @@
+
+
+import scala.collection.JavaConversions._
+
+
+
+object Test {
+
+ def main(args:Array[String]) = {
+ val tests = 5000
+ val jm: java.util.Map[Int, Int] = scala.collection.mutable.Map((0 until tests) zip (0 until tests).reverse: _*)
+ val es = jm.entrySet()
+ val it = es.iterator
+
+ // chi square test
+ val groups = 10
+ val hits = new Array[Int](groups)
+ def hit(hc: Int) {
+ val bucket = math.abs(hc) / (Int.MaxValue / groups)
+ hits(bucket) += 1
+ }
+ def expected = tests / groups
+ def Dstat = {
+ val diffs = for (i <- 0 until groups) yield math.abs(hits(i) - expected)
+ diffs.sum.toDouble / expected
+ }
+ def ChiSquare = {
+ val diffs = for (i <- 0 until groups) yield (hits(i) - expected) * (hits(i) - expected)
+ diffs.sum.toDouble / expected
+ }
+
+ while (it.hasNext) {
+ val x = it.next()
+ hit(x.##)
+ }
+ // println(hits.toBuffer)
+ // println(ChiSquare)
+ assert(ChiSquare < 4.0, ChiSquare + " -> " + hits.mkString(", "))
+ }
+
+}
diff --git a/test/files/run/t5881.check b/test/files/run/t5881.check
new file mode 100644
index 0000000..f4aeec6
--- /dev/null
+++ b/test/files/run/t5881.check
@@ -0,0 +1,2 @@
+scala.collection.immutable.List
+scala.collection.immutable.List
diff --git a/test/files/run/t5881.scala b/test/files/run/t5881.scala
new file mode 100644
index 0000000..01bee29
--- /dev/null
+++ b/test/files/run/t5881.scala
@@ -0,0 +1,6 @@
+import scala.reflect.ClassTag
+
+object Test extends App {
+ println(implicitly[ClassTag[List[T forSome {type T <: List[T]}]]])
+ println(implicitly[ClassTag[List[Any]]])
+}
\ No newline at end of file
diff --git a/test/files/run/t5894.scala b/test/files/run/t5894.scala
new file mode 100644
index 0000000..abeec32
--- /dev/null
+++ b/test/files/run/t5894.scala
@@ -0,0 +1,17 @@
+import language.experimental.macros
+
+class Test
+
+object Test {
+ def foo = macro fooImpl
+ def fooImpl(c: reflect.macros.Context) = c.literalUnit
+
+ def main(args: Array[String]) {
+ try {
+ val method = classOf[Test].getMethod("foo")
+ sys.error("Static forwarder generated for macro: " + method)
+ } catch {
+ case _: NoSuchMethodException => // okay
+ }
+ }
+}
diff --git a/test/files/run/t5907.check b/test/files/run/t5907.check
new file mode 100644
index 0000000..bc23692
--- /dev/null
+++ b/test/files/run/t5907.check
@@ -0,0 +1,31 @@
+c1: 2
+c1: 2873
+c2: 37
+c3: 1, 2, 27
+c3: 1, 22, 27
+c3: 11, 7, 27
+c4: 1
+c4: 23
+c5: 1, 2, 33, b
+c5: 1, 19, 33, b
+c5: 1, 2, 193, c
+c5: 1, 371, 193, c
+c5: -1, 2, -2, lken
+c6: 29, 18, -12
+c6: 1, 93, 2892
+c6: 1, 93, 761
+c7: 1, 22, 33, elkj
+c7: 1, 283, 29872, me
+c7: 37, 298, 899, ekjr
+c8: 172, 989, 77, eliurna
+c8: 1, 82, 2111, schtring
+c8: -1, 92, 29, lken
+c9: 1, 271, ehebab
+c9: 1, 299, enag
+c9: 1, 299, enag
+c9: 1, 299, enag
+c9: -42, 99, flae
+c9: 10, 298, 27
+c9: elkn, en, emn
+c9: ka, kb, kb
+c9: ka, kb, ka
diff --git a/test/files/run/t5907.scala b/test/files/run/t5907.scala
new file mode 100644
index 0000000..a005e9f
--- /dev/null
+++ b/test/files/run/t5907.scala
@@ -0,0 +1,118 @@
+object Test extends App {
+ t
+
+ def t {
+ val c1 = C1()(1)
+ println(c1.copy()(2))
+
+ {
+ implicit val i = 2873
+ println(c1.copy())
+ }
+
+ val c2 = C2()(1)
+ println(c2.copy()(37))
+
+ val c3 = C3(1,2)(3)
+ println(c3.copy()(27))
+ println(c3.copy(y = 22)(27))
+ println(c3.copy(y = 7, x = 11)(27))
+
+ val c4 = C4(1)
+ println(c4.copy())
+ println(c4.copy(x = 23))
+
+ val c5 = C5(1,2)(3,"a")
+ println(c5.copy()(33,"b"))
+ println(c5.copy(y = 19)(33,"b"))
+
+ {
+ implicit val i = 193
+ implicit val s = "c"
+ println(c5.copy())
+ println(c5.copy(y = 371))
+ println(c5.copy(x = -1)(-2, "lken"))
+ }
+
+ val c6 = C6(1)(2)(3)
+ println(c6.copy(29)(18)(-12))
+
+ {
+ implicit val i = 2892
+ println(c6.copy(x = 1)(93))
+ println(c6.copy(x = 1)(93)(761))
+ }
+
+ val c7 = C7(1)(2)(3)("h")
+ println(c7.copy()(22)(33)("elkj"))
+
+ {
+ implicit val s = "me"
+ println(c7.copy()(283)(29872))
+ println(c7.copy(37)(298)(899)("ekjr"))
+ }
+
+ val c8 = C8(1)(2,3)()("els")
+ println(c8.copy(x = 172)(989, 77)()("eliurna"))
+
+ {
+ implicit val s = "schtring"
+ println(c8.copy()(82,2111)())
+ println(c8.copy(x = -1)(92,29)()("lken"))
+ }
+
+ val c9 = C9(1)(2)()()("u")
+ println(c9.copy()(271)()()("ehebab"))
+
+ {
+ implicit val s = "enag"
+ println(c9.copy()(299))
+ println(c9.copy()(299)())
+ println(c9.copy()(299)()())
+ println(c9.copy(x = -42)(99)()()("flae"))
+ }
+
+ class KA { override def toString = "ka" }
+ class KB extends KA { override def toString = "kb" }
+ val c10 = C10(10)(3)(19)
+ println(c10.copy()(298)(27))
+ println(c10.copy("elkn")("en")("emn"))
+ println(c10.copy(new KA)(new KB)(new KB))
+
+ {
+ implicit val k = new KA
+ println(c10.copy(new KA)(new KB))
+ }
+ }
+}
+
+case class C1(implicit x: Int) {
+ override def toString = s"c1: $x"
+}
+case class C2()(y: Int) {
+ override def toString = s"c2: $y"
+}
+case class C3(x: Int, y: Int)(z: Int) {
+ override def toString = s"c3: $x, $y, $z"
+}
+case class C4(x: Int) {
+ override def toString = s"c4: $x"
+}
+case class C5(x: Int, y: Int)(implicit z: Int, s: String) {
+ override def toString = s"c5: $x, $y, $z, $s"
+}
+case class C6(x: Int)(y: Int)(implicit z: Int) {
+ override def toString = s"c6: $x, $y, $z"
+}
+case class C7(x: Int)(y: Int)(z: Int)(implicit s: String) {
+ override def toString = s"c7: $x, $y, $z, $s"
+}
+case class C8(x: Int)(y: Int, z: Int)()(implicit s: String) {
+ override def toString = s"c8: $x, $y, $z, $s"
+}
+case class C9(x: Int)(y: Int)()()(implicit s: String) {
+ override def toString = s"c9: $x, $y, $s"
+}
+case class C10[T,U <: T](x: T)(y: U)(implicit z: T) {
+ override def toString = s"c9: $x, $y, $z"
+}
diff --git a/test/files/run/t5912.scala b/test/files/run/t5912.scala
new file mode 100644
index 0000000..7710d04
--- /dev/null
+++ b/test/files/run/t5912.scala
@@ -0,0 +1,6 @@
+object Test extends App{
+ import scala.reflect.runtime.{currentMirror=>cm}
+ import scala.tools.reflect._
+ import scala.reflect.runtime.universe._
+ val tree = cm.mkToolBox().typeCheck( Literal(Constant("test")) )
+}
\ No newline at end of file
diff --git a/test/files/run/t5914.check b/test/files/run/t5914.check
new file mode 100644
index 0000000..818e321
--- /dev/null
+++ b/test/files/run/t5914.check
@@ -0,0 +1 @@
+correct
diff --git a/test/files/run/t5914.scala b/test/files/run/t5914.scala
new file mode 100644
index 0000000..45d8815
--- /dev/null
+++ b/test/files/run/t5914.scala
@@ -0,0 +1,23 @@
+import scala.reflect.ClassTag
+
+trait Trees {
+ class Tree
+ implicit val ttTag: ClassTag[TypeTree]
+ type TypeTree <: Tree
+ val TypeTree: TypeTreeExtractor
+ abstract class TypeTreeExtractor {
+ def unapply(t: TypeTree): Option[String]
+ }
+ def test(tree: Tree) =
+ tree match {
+ case TypeTree(_) => println("lolwut")
+ case null => println("correct")
+ }
+}
+
+object Test extends App with Trees {
+ val ttTag = implicitly[ClassTag[TypeTree]]
+ case class TypeTree(meh: String) extends Tree
+ object TypeTree extends TypeTreeExtractor
+ test(null) // should not crash
+}
\ No newline at end of file
diff --git a/test/files/run/t5923a.check b/test/files/run/t5923a.check
new file mode 100644
index 0000000..7165b73
--- /dev/null
+++ b/test/files/run/t5923a.check
@@ -0,0 +1,3 @@
+C(Int)
+C(String)
+C(Nothing)
diff --git a/test/files/run/t5923a/Macros_1.scala b/test/files/run/t5923a/Macros_1.scala
new file mode 100644
index 0000000..6d21362
--- /dev/null
+++ b/test/files/run/t5923a/Macros_1.scala
@@ -0,0 +1,14 @@
+import scala.reflect.macros.Context
+import language.experimental.macros
+
+case class C[T](t: String)
+object C {
+ implicit def foo[T]: C[T] = macro Macros.impl[T]
+}
+
+object Macros {
+ def impl[T: c.WeakTypeTag](c: Context) = {
+ import c.universe._
+ reify(C[T](c.literal(weakTypeOf[T].toString).splice))
+ }
+}
\ No newline at end of file
diff --git a/test/files/run/t5923a/Test_2.scala b/test/files/run/t5923a/Test_2.scala
new file mode 100644
index 0000000..001ff9a
--- /dev/null
+++ b/test/files/run/t5923a/Test_2.scala
@@ -0,0 +1,5 @@
+object Test extends App {
+ println(implicitly[C[Int]])
+ println(implicitly[C[String]])
+ println(implicitly[C[Nothing]])
+}
\ No newline at end of file
diff --git a/test/files/run/t5923b.check b/test/files/run/t5923b.check
new file mode 100644
index 0000000..d56076f
--- /dev/null
+++ b/test/files/run/t5923b.check
@@ -0,0 +1,3 @@
+class [Ljava.lang.Object;
+class [Ljava.lang.Object;
+class [Ljava.lang.Object;
diff --git a/test/files/run/t5923b/Test.scala b/test/files/run/t5923b/Test.scala
new file mode 100644
index 0000000..7c26274
--- /dev/null
+++ b/test/files/run/t5923b/Test.scala
@@ -0,0 +1,7 @@
+object Test extends App {
+ import scala.collection.generic.CanBuildFrom
+ val cbf = implicitly[CanBuildFrom[Nothing, Nothing, Array[Nothing]]]
+ println(cbf().result.getClass)
+ println(new Array[Nothing](0).getClass)
+ println(Array[Nothing]().getClass)
+}
\ No newline at end of file
diff --git a/test/files/run/t5937.scala b/test/files/run/t5937.scala
new file mode 100644
index 0000000..e5bf661
--- /dev/null
+++ b/test/files/run/t5937.scala
@@ -0,0 +1,12 @@
+
+
+
+import collection._
+
+
+
+object Test extends App {
+
+ val list: List[Int] = (immutable.Vector(1, 2, 3) :+ 4)(breakOut)
+
+}
diff --git a/test/files/run/bug594.check b/test/files/run/t594.check
similarity index 100%
rename from test/files/run/bug594.check
rename to test/files/run/t594.check
diff --git a/test/files/run/bug594.scala b/test/files/run/t594.scala
similarity index 100%
rename from test/files/run/bug594.scala
rename to test/files/run/t594.scala
diff --git a/test/files/run/t5940.scala b/test/files/run/t5940.scala
new file mode 100644
index 0000000..147ff38
--- /dev/null
+++ b/test/files/run/t5940.scala
@@ -0,0 +1,41 @@
+import scala.tools.partest._
+
+object Test extends DirectTest {
+ def code = ???
+
+ def macros_1 = """
+ import scala.reflect.macros.Context
+
+ object Impls {
+ def impl(c: Context) = c.literalUnit
+ }
+
+ object Macros {
+ //import Impls._
+ def impl(c: Context) = c.literalUnit
+ def foo = macro impl
+ }
+ """
+ def compileMacros() = {
+ val classpath = List(sys.props("partest.lib"), sys.props("partest.reflect")) mkString sys.props("path.separator")
+ compileString(newCompiler("-language:experimental.macros", "-cp", classpath, "-d", testOutput.path))(macros_1)
+ }
+
+ def test_2 = """
+ object Test extends App {
+ println(Macros.foo)
+ }
+ """
+ def compileTest() = {
+ val classpath = List(sys.props("partest.lib"), testOutput.path) mkString sys.props("path.separator")
+ compileString(newCompiler("-cp", classpath, "-d", testOutput.path))(test_2)
+ }
+
+ def show(): Unit = {
+ log("Compiling Macros_1...")
+ if (compileMacros()) {
+ log("Compiling Test_2...")
+ if (compileTest()) log("Success!") else log("Failed...")
+ }
+ }
+}
\ No newline at end of file
diff --git a/test/files/jvm/bug680.check b/test/files/run/t5942.check
similarity index 100%
copy from test/files/jvm/bug680.check
copy to test/files/run/t5942.check
diff --git a/test/files/run/t5942.scala b/test/files/run/t5942.scala
new file mode 100644
index 0000000..44a8be9
--- /dev/null
+++ b/test/files/run/t5942.scala
@@ -0,0 +1,10 @@
+import scala.reflect.runtime.universe._
+import scala.reflect.runtime.{currentMirror => cm}
+import scala.tools.reflect._
+
+object Test extends App {
+ val tb = cm.mkToolBox()
+ tb.parse("def x = {}")
+ try { tb.parse("def x = {") } catch { case _ => }
+ tb.parse("def x = {}")
+}
diff --git a/test/files/run/t5943a1.check b/test/files/run/t5943a1.check
new file mode 100644
index 0000000..9f4d160
--- /dev/null
+++ b/test/files/run/t5943a1.check
@@ -0,0 +1 @@
+scala.this.Predef.intWrapper(1).to(3).map[Int, scala.collection.immutable.IndexedSeq[Int]](((x$1: Int) => x$1.+(1)))(immutable.this.IndexedSeq.canBuildFrom[Int])
diff --git a/test/files/run/t5943a1.scala b/test/files/run/t5943a1.scala
new file mode 100644
index 0000000..00f4afa
--- /dev/null
+++ b/test/files/run/t5943a1.scala
@@ -0,0 +1,9 @@
+import scala.reflect.runtime.universe._
+import scala.reflect.runtime.{currentMirror => cm}
+import scala.tools.reflect.ToolBox
+
+object Test extends App {
+ val tb = cm.mkToolBox()
+ val expr = tb.parse("1 to 3 map (_+1)")
+ println(tb.typeCheck(expr))
+}
\ No newline at end of file
diff --git a/test/files/run/t5943a2.check b/test/files/run/t5943a2.check
new file mode 100644
index 0000000..29ad79c
--- /dev/null
+++ b/test/files/run/t5943a2.check
@@ -0,0 +1 @@
+Vector(2, 3, 4)
diff --git a/test/files/run/t5943a2.scala b/test/files/run/t5943a2.scala
new file mode 100644
index 0000000..fda8008
--- /dev/null
+++ b/test/files/run/t5943a2.scala
@@ -0,0 +1,9 @@
+import scala.reflect.runtime.universe._
+import scala.reflect.runtime.{currentMirror => cm}
+import scala.tools.reflect.ToolBox
+
+object Test extends App {
+ val tb = cm.mkToolBox()
+ val expr = tb.parse("1 to 3 map (_+1)")
+ println(tb.eval(expr))
+}
\ No newline at end of file
diff --git a/test/files/run/t5966.check b/test/files/run/t5966.check
new file mode 100644
index 0000000..bfe8358
--- /dev/null
+++ b/test/files/run/t5966.check
@@ -0,0 +1,3 @@
+(o()_)("") = List()
+(o("a1")_)("") = WrappedArray(a1)
+(o("a1", "a2")_)("") = WrappedArray(a1, a2)
diff --git a/test/files/run/t5966.scala b/test/files/run/t5966.scala
new file mode 100644
index 0000000..bbe1a6e
--- /dev/null
+++ b/test/files/run/t5966.scala
@@ -0,0 +1,9 @@
+object o { def apply(i: AnyRef*)(j: String) = i }
+
+object Test {
+ def main(args: Array[String]) {
+ println("(o()_)(\"\") = " + (o()_)(""))
+ println("(o(\"a1\")_)(\"\") = " + (o("a1")_)(""))
+ println("(o(\"a1\", \"a2\")_)(\"\") = " + (o("a1", "a2")_)(""))
+ }
+}
diff --git a/test/files/run/t5971.check b/test/files/run/t5971.check
new file mode 100644
index 0000000..0c36a1f
--- /dev/null
+++ b/test/files/run/t5971.check
@@ -0,0 +1,4 @@
+r,b
+r
+a,b
+r,a,b
\ No newline at end of file
diff --git a/test/files/run/t5971.scala b/test/files/run/t5971.scala
new file mode 100644
index 0000000..dbd9bee
--- /dev/null
+++ b/test/files/run/t5971.scala
@@ -0,0 +1,23 @@
+
+
+
+
+
+/** When using `AbstractTransformed` abstract inner class in views in order
+ * to force generating bridges, one must take care to push the corresponding
+ * collection trait (such as `Iterable` or `Seq`) as far as possible to the
+ * left in the linearization order -- otherwise, overridden methods from these
+ * traits can override the already overridden methods in view. This was the
+ * case with `takeWhile`.
+ * Mind blowing, I know.
+ */
+object Test {
+
+ def main(args: Array[String]) {
+ println("bar".view.reverse.filter(_ > 'a').mkString(","))
+ println("bar".view.reverse.take(1).mkString(","))
+ println("bar".view.reverse.dropWhile(_ > 'a').mkString(","))
+ println("bar".view.reverse.takeWhile(_ => true).mkString(","))
+ }
+
+}
diff --git a/test/files/run/t5974.check b/test/files/run/t5974.check
new file mode 100644
index 0000000..9766475
--- /dev/null
+++ b/test/files/run/t5974.check
@@ -0,0 +1 @@
+ok
diff --git a/test/files/run/t5974.scala b/test/files/run/t5974.scala
new file mode 100644
index 0000000..5b99e9f
--- /dev/null
+++ b/test/files/run/t5974.scala
@@ -0,0 +1,10 @@
+object Test extends App {
+ import scala.collection.JavaConverters._
+
+ def ser(a: AnyRef) =
+ (new java.io.ObjectOutputStream(new java.io.ByteArrayOutputStream())).writeObject(a)
+
+ val l = java.util.Arrays.asList("pigdog").asScala
+ ser(l)
+ println("ok")
+}
diff --git a/test/files/run/t5986.check b/test/files/run/t5986.check
new file mode 100644
index 0000000..4101770
--- /dev/null
+++ b/test/files/run/t5986.check
@@ -0,0 +1,15 @@
+Foo(bar, 1)
+Foo(bar, 1)
+Foo(bar, 1),Foo(baz, 3),Foo(bazz, 4)
+Foo(bar, 1)
+Foo(bar, 1)
+Foo(bar, 1),Foo(baz, 3),Foo(bazz, 4)
+Foo(bar, 1)
+Foo(bar, 1)
+Foo(bar, 1),Foo(baz, 3),Foo(bazz, 4)
+Foo(bar, 1)
+Foo(bar, 1)
+Foo(bar, 1),Foo(baz, 3),Foo(bazz, 4)
+Foo(bar, 1)
+Foo(bar, 1)
+Foo(bar, 1),Foo(baz, 3),Foo(bazz, 4)
\ No newline at end of file
diff --git a/test/files/run/t5986.scala b/test/files/run/t5986.scala
new file mode 100644
index 0000000..8cf7086
--- /dev/null
+++ b/test/files/run/t5986.scala
@@ -0,0 +1,36 @@
+
+
+
+import scala.collection._
+
+
+
+/** A sorted set should not replace elements when adding
+ * and the element already exists in the set.
+ */
+object Test {
+
+ class Foo(val name: String, val n: Int) {
+ override def equals(obj: Any): Boolean = obj match { case other: Foo => name == other.name; case _ => false }
+ override def hashCode = name.##
+ override def toString = "Foo(" + name + ", " + n + ")"
+ }
+
+ implicit val ordering: Ordering[Foo] = Ordering.fromLessThan[Foo] { (a, b) => a.name.compareTo(b.name) < 0 }
+
+ def check[S <: Set[Foo]](set: S) {
+ def output(s: Set[Foo]) = println(s.toList.sorted.mkString(","))
+ output(set + new Foo("bar", 2))
+ output(set ++ List(new Foo("bar", 2), new Foo("bar", 3), new Foo("bar", 4)))
+ output(set union Set(new Foo("bar", 2), new Foo("baz", 3), new Foo("bazz", 4)))
+ }
+
+ def main(args: Array[String]) {
+ check(Set(new Foo("bar", 1)))
+ check(immutable.Set(new Foo("bar", 1)))
+ check(mutable.Set(new Foo("bar", 1)))
+ check(immutable.SortedSet(new Foo("bar", 1)))
+ check(mutable.SortedSet(new Foo("bar", 1)))
+ }
+
+}
diff --git a/test/files/run/bug601.check b/test/files/run/t601.check
similarity index 100%
rename from test/files/run/bug601.check
rename to test/files/run/t601.check
diff --git a/test/files/run/bug601.scala b/test/files/run/t601.scala
similarity index 100%
rename from test/files/run/bug601.scala
rename to test/files/run/t601.scala
diff --git a/test/files/run/t6011b.check b/test/files/run/t6011b.check
new file mode 100644
index 0000000..00750ed
--- /dev/null
+++ b/test/files/run/t6011b.check
@@ -0,0 +1 @@
+3
diff --git a/test/files/run/t6011b.scala b/test/files/run/t6011b.scala
new file mode 100644
index 0000000..3d405e0
--- /dev/null
+++ b/test/files/run/t6011b.scala
@@ -0,0 +1,11 @@
+object Test extends App {
+ var cond = true
+
+ // should not generate a switch
+ def f(ch: Char): Int = ch match {
+ case 'a' if cond => 1
+ case 'z' | 'a' => 2
+ }
+
+ println(f('a') + f('z')) // 3
+}
\ No newline at end of file
diff --git a/test/files/run/t6011c.scala b/test/files/run/t6011c.scala
new file mode 100644
index 0000000..0647e3f
--- /dev/null
+++ b/test/files/run/t6011c.scala
@@ -0,0 +1,13 @@
+object Test extends App {
+ // A variation of SI-6011, which eluded the fix
+ // in 2.10.0.
+ //
+ // duplicate keys in SWITCH, can't pick arbitrarily one of them to evict, see SI-6011.
+ // at scala.reflect.internal.SymbolTable.abort(SymbolTable.scala:50)
+ // at scala.tools.nsc.Global.abort(Global.scala:249)
+ // at scala.tools.nsc.backend.jvm.GenASM$JPlainBuilder$jcode$.emitSWITCH(GenASM.scala:1850)
+ ((1: Byte): @unchecked @annotation.switch) match {
+ case 1 => 2
+ case 1 => 3 // crash
+ }
+}
diff --git a/test/files/run/t6023.check b/test/files/run/t6023.check
new file mode 100644
index 0000000..ee93565
--- /dev/null
+++ b/test/files/run/t6023.check
@@ -0,0 +1,12 @@
+{
+ abstract trait Foo extends AnyRef {
+ <stable> <accessor> def a: Int
+ };
+ ()
+}
+{
+ abstract trait Foo extends AnyRef {
+ <stable> <accessor> def a: Int
+ };
+ ()
+}
diff --git a/test/files/run/t6023.scala b/test/files/run/t6023.scala
new file mode 100644
index 0000000..07af368
--- /dev/null
+++ b/test/files/run/t6023.scala
@@ -0,0 +1,17 @@
+import scala.reflect.runtime.universe._
+import scala.reflect.runtime.{currentMirror => cm}
+import scala.tools.reflect.ToolBox
+
+object Test extends App {
+ // test 1: reify
+ val tree = reify{ trait Foo { val a: Int } }.tree
+ println(tree.toString)
+
+ // test 2: import and typecheck
+ val toolbox = cm.mkToolBox()
+ val ttree = toolbox.typeCheck(tree)
+ println(ttree.toString)
+
+ // test 3: import and compile
+ toolbox.eval(tree)
+}
diff --git a/test/files/run/t6028.check b/test/files/run/t6028.check
new file mode 100644
index 0000000..613d25b
--- /dev/null
+++ b/test/files/run/t6028.check
@@ -0,0 +1,84 @@
+[[syntax trees at end of lambdalift]] // newSource1.scala
+package <empty> {
+ class T extends Object {
+ <paramaccessor> val T$$classParam: Int = _;
+ def <init>(classParam: Int): T = {
+ T.super.<init>();
+ ()
+ };
+ private[this] val field: Int = 0;
+ <stable> <accessor> def field(): Int = T.this.field;
+ def foo(methodParam: Int): Function0 = {
+ val methodLocal: Int = 0;
+ {
+ (new anonymous class $anonfun$foo$1(T.this, methodParam, methodLocal): Function0)
+ }
+ };
+ def bar(barParam: Int): Object = {
+ @volatile var MethodLocalObject$module: runtime.VolatileObjectRef = new runtime.VolatileObjectRef(null);
+ T.this.MethodLocalObject$1(barParam, MethodLocalObject$module)
+ };
+ def tryy(tryyParam: Int): Function0 = {
+ var tryyLocal: runtime.IntRef = new runtime.IntRef(0);
+ {
+ (new anonymous class $anonfun$tryy$1(T.this, tryyParam, tryyLocal): Function0)
+ }
+ };
+ @SerialVersionUID(0) final <synthetic> class $anonfun$foo$1 extends runtime.AbstractFunction0$mcI$sp with Serializable {
+ def <init>($outer: T, methodParam$1: Int, methodLocal$1: Int): anonymous class $anonfun$foo$1 = {
+ $anonfun$foo$1.super.<init>();
+ ()
+ };
+ final def apply(): Int = $anonfun$foo$1.this.apply$mcI$sp();
+ <specialized> def apply$mcI$sp(): Int = $anonfun$foo$1.this.$outer.T$$classParam.+($anonfun$foo$1.this.$outer.field()).+($anonfun$foo$1.this.methodParam$1).+($anonfun$foo$1.this.methodLocal$1);
+ <synthetic> <paramaccessor> private[this] val $outer: T = _;
+ <synthetic> <stable> def T$$anonfun$$$outer(): T = $anonfun$foo$1.this.$outer;
+ final <bridge> def apply(): Object = scala.Int.box($anonfun$foo$1.this.apply());
+ <synthetic> <paramaccessor> private[this] val methodParam$1: Int = _;
+ <synthetic> <paramaccessor> private[this] val methodLocal$1: Int = _
+ };
+ abstract trait MethodLocalTrait$1 extends Object {
+ <synthetic> <stable> def T$MethodLocalTrait$$$outer(): T
+ };
+ object MethodLocalObject$2 extends Object with T#MethodLocalTrait$1 {
+ def <init>($outer: T, barParam$1: Int): T#MethodLocalObject$2.type = {
+ MethodLocalObject$2.super.<init>();
+ MethodLocalObject$2.this.$asInstanceOf[T#MethodLocalTrait$1$class]()./*MethodLocalTrait$1$class*/$init$(barParam$1);
+ ()
+ };
+ <synthetic> <paramaccessor> private[this] val $outer: T = _;
+ <synthetic> <stable> def T$MethodLocalObject$$$outer(): T = MethodLocalObject$2.this.$outer;
+ <synthetic> <stable> def T$MethodLocalTrait$$$outer(): T = MethodLocalObject$2.this.$outer
+ };
+ final <stable> private[this] def MethodLocalObject$1(barParam$1: Int, MethodLocalObject$module$1: runtime.VolatileObjectRef): T#MethodLocalObject$2.type = {
+ MethodLocalObject$module$1.elem = new T#MethodLocalObject$2.type(T.this, barParam$1);
+ MethodLocalObject$module$1.elem.$asInstanceOf[T#MethodLocalObject$2.type]()
+ };
+ abstract trait MethodLocalTrait$1$class extends Object with T#MethodLocalTrait$1 {
+ def /*MethodLocalTrait$1$class*/$init$(barParam$1: Int): Unit = {
+ ()
+ };
+ scala.this.Predef.print(scala.Int.box(barParam$1))
+ };
+ @SerialVersionUID(0) final <synthetic> class $anonfun$tryy$1 extends runtime.AbstractFunction0$mcV$sp with Serializable {
+ def <init>($outer: T, tryyParam$1: Int, tryyLocal$1: runtime.IntRef): anonymous class $anonfun$tryy$1 = {
+ $anonfun$tryy$1.super.<init>();
+ ()
+ };
+ final def apply(): Unit = $anonfun$tryy$1.this.apply$mcV$sp();
+ <specialized> def apply$mcV$sp(): Unit = try {
+ $anonfun$tryy$1.this.tryyLocal$1.elem = $anonfun$tryy$1.this.tryyParam$1
+ } finally ();
+ <synthetic> <paramaccessor> private[this] val $outer: T = _;
+ <synthetic> <stable> def T$$anonfun$$$outer(): T = $anonfun$tryy$1.this.$outer;
+ final <bridge> def apply(): Object = {
+ $anonfun$tryy$1.this.apply();
+ scala.runtime.BoxedUnit.UNIT
+ };
+ <synthetic> <paramaccessor> private[this] val tryyParam$1: Int = _;
+ <synthetic> <paramaccessor> private[this] val tryyLocal$1: runtime.IntRef = _
+ }
+ }
+}
+
+warning: there were 1 feature warning(s); re-run with -feature for details
diff --git a/test/files/run/t6028.scala b/test/files/run/t6028.scala
new file mode 100644
index 0000000..cab1753
--- /dev/null
+++ b/test/files/run/t6028.scala
@@ -0,0 +1,21 @@
+import scala.tools.partest._
+import java.io.{Console => _, _}
+
+object Test extends DirectTest {
+
+ override def extraSettings: String = "-usejavacp -Xprint:lambdalift -d " + testOutput.path
+
+ override def code = """class T(classParam: Int) {
+ | val field: Int = 0
+ | def foo(methodParam: Int) = {val methodLocal = 0 ; () => classParam + field + methodParam + methodLocal }
+ | def bar(barParam: Int) = { trait MethodLocalTrait { print(barParam) }; object MethodLocalObject extends MethodLocalTrait; MethodLocalObject }
+ | def tryy(tryyParam: Int) = { var tryyLocal = 0; () => try { tryyLocal = tryyParam } finally () }
+ |}
+ |""".stripMargin.trim
+
+ override def show(): Unit = {
+ Console.withErr(System.out) {
+ compile()
+ }
+ }
+}
diff --git a/test/files/run/bug603.check b/test/files/run/t603.check
similarity index 100%
rename from test/files/run/bug603.check
rename to test/files/run/t603.check
diff --git a/test/files/run/bug603.scala b/test/files/run/t603.scala
similarity index 100%
rename from test/files/run/bug603.scala
rename to test/files/run/t603.scala
diff --git a/test/files/run/t6052.scala b/test/files/run/t6052.scala
new file mode 100644
index 0000000..385d539
--- /dev/null
+++ b/test/files/run/t6052.scala
@@ -0,0 +1,21 @@
+
+
+
+
+
+
+
+object Test extends App {
+ def seqarr(i: Int) = Array[Int]() ++ (0 until i)
+ def pararr(i: Int) = seqarr(i).par
+
+ def check[T](i: Int, f: Int => T) {
+ val gseq = seqarr(i).toSeq.groupBy(f)
+ val gpar = pararr(i).groupBy(f)
+ assert(gseq == gpar, (gseq, gpar))
+ }
+
+ for (i <- 0 until 20) check(i, _ > 0)
+ for (i <- 0 until 20) check(i, _ % 2)
+ for (i <- 0 until 20) check(i, _ % 4)
+}
diff --git a/test/files/run/t6063.check b/test/files/run/t6063.check
new file mode 100644
index 0000000..3934738
--- /dev/null
+++ b/test/files/run/t6063.check
@@ -0,0 +1 @@
+public static int foo.Ob.f5()
diff --git a/test/files/run/t6063/S_1.scala b/test/files/run/t6063/S_1.scala
new file mode 100644
index 0000000..69b1e91
--- /dev/null
+++ b/test/files/run/t6063/S_1.scala
@@ -0,0 +1,11 @@
+package foo
+
+abstract class Foo {
+ private[foo] def f1 = 1
+ private def f2 = 2
+ protected[foo] def f3 = 3
+ protected def f4 = 4
+ def f5 = 5
+}
+
+object Ob extends Foo
diff --git a/test/files/run/t6063/S_2.scala b/test/files/run/t6063/S_2.scala
new file mode 100644
index 0000000..a990cc7
--- /dev/null
+++ b/test/files/run/t6063/S_2.scala
@@ -0,0 +1,8 @@
+import java.lang.reflect.Modifier._
+
+object Test {
+ def main(args: Array[String]): Unit = {
+ val forwarders = Class.forName("foo.Ob").getMethods.toList filter (m => isStatic(m.getModifiers))
+ forwarders.sortBy(_.toString) foreach println
+ }
+}
diff --git a/test/files/run/t6070.check b/test/files/run/t6070.check
new file mode 100644
index 0000000..00750ed
--- /dev/null
+++ b/test/files/run/t6070.check
@@ -0,0 +1 @@
+3
diff --git a/test/files/run/t6070.scala b/test/files/run/t6070.scala
new file mode 100644
index 0000000..b6af48e
--- /dev/null
+++ b/test/files/run/t6070.scala
@@ -0,0 +1,36 @@
+abstract class Bomb {
+ type T
+ val x: T
+
+ def size(that: T): Int
+}
+
+class StringBomb extends Bomb {
+ type T = String
+ val x = "abc"
+ def size(that: String): Int = that.length
+}
+
+class IntBomb extends Bomb {
+ type T = Int
+ val x = 10
+
+ def size(that: Int) = x + that
+}
+
+case class Mean(var bomb: Bomb)
+
+object Test extends App {
+ def foo(x: Mean) = x match {
+ case Mean(b) =>
+ // BUG: b is assumed to be a stable identifier, but it can actually be mutated
+ println(b.size({ mutate(); b.x }))
+ }
+
+ def mutate() {
+ m.bomb = new IntBomb
+ }
+
+ val m = Mean(new StringBomb)
+ foo(m) // should print 3
+}
\ No newline at end of file
diff --git a/test/files/run/t6077_patmat_cse_irrefutable.check b/test/files/run/t6077_patmat_cse_irrefutable.check
new file mode 100644
index 0000000..9766475
--- /dev/null
+++ b/test/files/run/t6077_patmat_cse_irrefutable.check
@@ -0,0 +1 @@
+ok
diff --git a/test/files/run/t6077_patmat_cse_irrefutable.scala b/test/files/run/t6077_patmat_cse_irrefutable.scala
new file mode 100644
index 0000000..b130ae7
--- /dev/null
+++ b/test/files/run/t6077_patmat_cse_irrefutable.scala
@@ -0,0 +1,13 @@
+class LiteralNode(val value: Any)
+
+object LiteralNode {
+ // irrefutable
+ def unapply(n: LiteralNode) = Some(n.value)
+}
+
+object Test extends App {
+ ((new LiteralNode(false)): Any) match {
+ case LiteralNode(true) => println("uh-oh")
+ case LiteralNode(false) => println("ok")
+ }
+}
\ No newline at end of file
diff --git a/test/files/run/t6086-repl.check b/test/files/run/t6086-repl.check
new file mode 100644
index 0000000..97f20bd
--- /dev/null
+++ b/test/files/run/t6086-repl.check
@@ -0,0 +1,12 @@
+Type in expressions to have them evaluated.
+Type :help for more information.
+
+scala>
+
+scala> case class X(s: String)
+defined class X
+
+scala> scala.reflect.runtime.universe.typeOf[X]
+res0: reflect.runtime.universe.Type = X
+
+scala>
diff --git a/test/files/run/t6086-repl.scala b/test/files/run/t6086-repl.scala
new file mode 100644
index 0000000..87f94ec
--- /dev/null
+++ b/test/files/run/t6086-repl.scala
@@ -0,0 +1,8 @@
+import scala.tools.partest.ReplTest
+
+object Test extends ReplTest {
+ def code = """
+ |case class X(s: String)
+ |scala.reflect.runtime.universe.typeOf[X]
+ |""".stripMargin
+}
diff --git a/test/files/run/t6086-vanilla.check b/test/files/run/t6086-vanilla.check
new file mode 100644
index 0000000..62d8fe9
--- /dev/null
+++ b/test/files/run/t6086-vanilla.check
@@ -0,0 +1 @@
+X
diff --git a/test/files/run/t6086-vanilla.scala b/test/files/run/t6086-vanilla.scala
new file mode 100644
index 0000000..b4de581
--- /dev/null
+++ b/test/files/run/t6086-vanilla.scala
@@ -0,0 +1,6 @@
+case class X(s: String)
+
+object Test extends App {
+ import scala.reflect.runtime.universe._
+ println(typeOf[X])
+}
\ No newline at end of file
diff --git a/test/files/run/t6089.check b/test/files/run/t6089.check
new file mode 100644
index 0000000..a8d4424
--- /dev/null
+++ b/test/files/run/t6089.check
@@ -0,0 +1 @@
+scala.MatchError: Foo(0) (of class Foo)
diff --git a/test/files/run/t6089.scala b/test/files/run/t6089.scala
new file mode 100644
index 0000000..c72d7ba
--- /dev/null
+++ b/test/files/run/t6089.scala
@@ -0,0 +1,13 @@
+case class Foo(x: Int)
+
+object Test {
+ def bippo(result: Boolean): Boolean = result
+ def bungus(m: Foo): Boolean =
+ bippo(m match { case Foo(2) => bungus(m) })
+
+ def main(args: Array[String]): Unit = try {
+ bungus(Foo(0))
+ } catch {
+ case x: MatchError => println(x)
+ }
+}
\ No newline at end of file
diff --git a/test/files/run/t6090.scala b/test/files/run/t6090.scala
new file mode 100644
index 0000000..e7dbb36
--- /dev/null
+++ b/test/files/run/t6090.scala
@@ -0,0 +1,6 @@
+class X { def ==(other: X) = true }
+class V(val x: X) extends AnyVal
+object Test extends {
+ def main(args: Array[String]) =
+ assert((new V(new X) == new V(new X)))
+}
diff --git a/test/files/run/t6102.check b/test/files/run/t6102.check
new file mode 100644
index 0000000..b6fc4c6
--- /dev/null
+++ b/test/files/run/t6102.check
@@ -0,0 +1 @@
+hello
\ No newline at end of file
diff --git a/test/files/run/t6102.flags b/test/files/run/t6102.flags
new file mode 100644
index 0000000..e35535c
--- /dev/null
+++ b/test/files/run/t6102.flags
@@ -0,0 +1 @@
+ -Ydead-code
diff --git a/test/files/run/t6102.scala b/test/files/run/t6102.scala
new file mode 100644
index 0000000..5358405
--- /dev/null
+++ b/test/files/run/t6102.scala
@@ -0,0 +1,13 @@
+// SI-6102 Wrong bytecode in lazyval + no-op finally clause
+
+object Test {
+
+ def main(args: Array[String]) {
+ try {
+ val x = 3
+ } finally {
+ print("hello")
+ }
+ }
+}
+
diff --git a/test/files/run/t6104.check b/test/files/run/t6104.check
new file mode 100644
index 0000000..9766475
--- /dev/null
+++ b/test/files/run/t6104.check
@@ -0,0 +1 @@
+ok
diff --git a/test/files/run/t6104.scala b/test/files/run/t6104.scala
new file mode 100644
index 0000000..8ab12c7
--- /dev/null
+++ b/test/files/run/t6104.scala
@@ -0,0 +1,8 @@
+class A { Self =>
+ val ok = "ok"
+ this match {
+ case me at Self => println(me.ok)
+ }
+}
+
+object Test extends A with App
\ No newline at end of file
diff --git a/test/files/run/t6111.check b/test/files/run/t6111.check
new file mode 100644
index 0000000..7fd2e33
--- /dev/null
+++ b/test/files/run/t6111.check
@@ -0,0 +1,2 @@
+(8,8)
+(x,x)
diff --git a/test/files/run/t6111.scala b/test/files/run/t6111.scala
new file mode 100644
index 0000000..7cceea1
--- /dev/null
+++ b/test/files/run/t6111.scala
@@ -0,0 +1,26 @@
+// slightly overkill, but a good test case for implicit resolution in extractor calls,
+// along with the real fix: an extractor pattern with 1 sub-pattern should type check for all extractors
+// that return Option[T], whatever T (even if it's a tuple)
+object Foo {
+ def unapply[S, T](scrutinee: S)(implicit witness: FooHasType[S, T]): Option[T] = scrutinee match {
+ case i: Int => Some((i, i).asInstanceOf[T])
+ }
+}
+
+class FooHasType[S, T]
+object FooHasType {
+ implicit object int extends FooHasType[Int, (Int, Int)]
+}
+
+// resurrected from neg/t997
+object Foo997 { def unapply(x : String): Option[(String, String)] = Some((x, x)) }
+
+object Test extends App {
+ val x = 8
+ println(x match {
+ case Foo(p) => p // p should be a pair of Int
+ })
+
+ // Prints '(x, x)'
+ "x" match { case Foo997(a) => println(a) }
+}
\ No newline at end of file
diff --git a/test/files/run/t6113.check b/test/files/run/t6113.check
new file mode 100644
index 0000000..65fb3cd
--- /dev/null
+++ b/test/files/run/t6113.check
@@ -0,0 +1 @@
+Foo[[X](Int, X)]
diff --git a/test/files/run/t6113.scala b/test/files/run/t6113.scala
new file mode 100644
index 0000000..321cae8
--- /dev/null
+++ b/test/files/run/t6113.scala
@@ -0,0 +1,6 @@
+trait Foo[C[_]]
+
+object Test extends App {
+ import scala.reflect.runtime.universe._
+ println(typeOf[Foo[({type l[X] = (Int, X)})#l]])
+}
\ No newline at end of file
diff --git a/test/files/run/t6114.scala b/test/files/run/t6114.scala
new file mode 100644
index 0000000..cb880ec
--- /dev/null
+++ b/test/files/run/t6114.scala
@@ -0,0 +1,61 @@
+object Test extends App {
+ def testList = {
+ val list = new java.util.ArrayList[Int]
+ list.add(1)
+ list.add(2)
+ list.add(3)
+ import scala.collection.JavaConverters._
+ val next = list.asScala ++ List(4,5,6)
+ assert(next != list.asScala)
+
+ val raw = list.asScala
+ val cloned = raw.clone
+ list.add(1)
+ assert(raw != cloned)
+ }
+ def testSet = {
+ val set = new java.util.HashSet[Int]
+ set.add(1)
+ set.add(2)
+ set.add(3)
+ import scala.collection.JavaConverters._
+ val next = set.asScala ++ Set(4,5,6)
+ assert(next != set.asScala)
+
+ val raw = set.asScala
+ val cloned = raw.clone
+ set.add(4)
+ assert(raw != cloned)
+ }
+ def testMap = {
+ val map = new java.util.HashMap[Int,Int]
+ map.put(1,1)
+ map.put(2,2)
+ map.put(3,3)
+ import scala.collection.JavaConverters._
+ val next = map.asScala ++ Map(4->4,5->5,6->6)
+ assert(next != map.asScala)
+
+ val raw = map.asScala
+ val cloned = raw.clone
+ map.put(4,4)
+ assert(raw != cloned)
+ }
+
+ def testCollection = {
+ val list: java.util.Collection[Int] = new java.util.ArrayDeque[Int]
+ list.add(1)
+ list.add(2)
+ list.add(3)
+ import scala.collection.JavaConverters._
+ val next = list.asScala ++ List(4,5,6)
+ assert(next != list.asScala)
+
+ // Note: Clone is hidden at this level, so no overriden cloning.
+ }
+
+ testList
+ testSet
+ testMap
+ testCollection
+}
diff --git a/test/files/run/t6126.scala b/test/files/run/t6126.scala
new file mode 100644
index 0000000..d552d8e
--- /dev/null
+++ b/test/files/run/t6126.scala
@@ -0,0 +1,8 @@
+trait LogLevelType
+object Test {
+ type LogLevel = Int with LogLevelType
+ final val ErrorLevel = 1.asInstanceOf[Int with LogLevelType]
+ def main(args: Array[String]) {
+ List(ErrorLevel, ErrorLevel)
+ }
+}
diff --git a/test/files/run/t6135.scala b/test/files/run/t6135.scala
new file mode 100644
index 0000000..c0f8f3f
--- /dev/null
+++ b/test/files/run/t6135.scala
@@ -0,0 +1,13 @@
+object Test extends App {
+ class A { class V }
+
+ abstract class B[S] {
+ def foo(t: S, a: A)(v: a.V)
+ }
+
+ val b1 = new B[String] {
+ def foo(t: String, a: A)(v: a.V) = () // Bridge method required here!
+ }
+
+ b1.foo("", null)(null)
+}
diff --git a/test/files/run/t6146b.check b/test/files/run/t6146b.check
new file mode 100644
index 0000000..49ff706
--- /dev/null
+++ b/test/files/run/t6146b.check
@@ -0,0 +1,59 @@
+Type in expressions to have them evaluated.
+Type :help for more information.
+
+scala> :power
+** Power User mode enabled - BEEP WHIR GYVE **
+** :phase has been set to 'typer'. **
+** scala.tools.nsc._ has been imported **
+** global._, definitions._ also imported **
+** Try :help, :vals, power.<tab> **
+
+scala> val u = rootMirror.universe; import u._, language._
+u: $r.intp.global.type = <global>
+import u._
+import language._
+
+scala> val S1 = typeOf[c.X.S1.type forSome { val c: C[_] }].typeSymbol.tpeHK
+S1: u.Type = C.X.S1.type
+
+scala> val S2 = typeOf[O.S2].typeSymbol.tpeHK
+S2: u.Type = C.this.S2
+
+scala> val S3 = typeOf[O.S3].typeSymbol.tpeHK
+S3: u.Type = O.S3
+
+scala> val S4 = typeOf[S4].typeSymbol.tpeHK
+S4: u.Type = S4
+
+scala> val F = typeOf[c.F[_] forSome { val c: C[_] }].typeSymbol.tpeHK
+F: u.Type = C.this.F
+
+scala> val fTpe = typeOf[O.type].decl(newTermName("foo")).paramss.head.head.tpe
+fTpe: u.Type = O.F[Int]
+
+scala> def memType(sub: Type, scrut: Type): Type =
+ nestedMemberType(sub.typeSymbol, scrut.prefix, scrut.typeSymbol.owner)
+memType: (sub: u.Type, scrut: u.Type)u.Type
+
+scala>
+
+scala> val mt1 = memType(S1, fTpe)
+mt1: u.Type = O.X.S1.type
+
+scala> global.typeDeconstruct.show(mt1)
+res0: String =
+TypeRef(
+ pre = SingleType(pre = ThisType(object O), object X)
+ TypeSymbol(class S1 extends C.this.F[T])
+)
+
+scala> memType(S2, fTpe)
+res1: u.Type = O.S2
+
+scala> memType(S3, fTpe)
+res2: u.Type = O.S3
+
+scala> memType(S4, fTpe)
+res3: u.Type = S4
+
+scala>
diff --git a/test/files/run/t6146b.scala b/test/files/run/t6146b.scala
new file mode 100644
index 0000000..e63709a
--- /dev/null
+++ b/test/files/run/t6146b.scala
@@ -0,0 +1,40 @@
+import scala.tools.partest.ReplTest
+
+class A {
+ sealed trait F[A]
+}
+
+class C[T] extends A {
+ sealed trait F[A]
+ object X {
+ object S1 extends F[T]
+ }
+ class S2 extends F[T]
+}
+object O extends C[Int] {
+ def foo(f: F[Int]) = f match { case X.S1 => }
+
+ class S3 extends F[Int]
+}
+class S4 extends O.F[String]
+
+object Test extends ReplTest {
+ override def code = """
+:power
+val u = rootMirror.universe; import u._, language._
+val S1 = typeOf[c.X.S1.type forSome { val c: C[_] }].typeSymbol.tpeHK
+val S2 = typeOf[O.S2].typeSymbol.tpeHK
+val S3 = typeOf[O.S3].typeSymbol.tpeHK
+val S4 = typeOf[S4].typeSymbol.tpeHK
+val F = typeOf[c.F[_] forSome { val c: C[_] }].typeSymbol.tpeHK
+val fTpe = typeOf[O.type].decl(newTermName("foo")).paramss.head.head.tpe
+def memType(sub: Type, scrut: Type): Type =
+ nestedMemberType(sub.typeSymbol, scrut.prefix, scrut.typeSymbol.owner)
+
+val mt1 = memType(S1, fTpe)
+global.typeDeconstruct.show(mt1)
+memType(S2, fTpe)
+memType(S3, fTpe)
+memType(S4, fTpe)
+ """.stripMargin.trim
+}
\ No newline at end of file
diff --git a/test/files/run/t6150.scala b/test/files/run/t6150.scala
new file mode 100644
index 0000000..bd8af5d
--- /dev/null
+++ b/test/files/run/t6150.scala
@@ -0,0 +1,44 @@
+
+
+
+
+object Test {
+ import collection.{ immutable, mutable, generic }
+ def TheOneTrueCBF = collection.IndexedSeq.ReusableCBF
+
+ val cbf1 = implicitly[generic.CanBuildFrom[immutable.Vector[Int], Int, collection.IndexedSeq[Int]]]
+ val cbf2 = implicitly[generic.CanBuildFrom[immutable.IndexedSeq[Int], Int, collection.IndexedSeq[Int]]]
+ val cbf3 = implicitly[generic.CanBuildFrom[collection.IndexedSeq[Int], Int, collection.IndexedSeq[Int]]]
+
+ val cbf4 = implicitly[generic.CanBuildFrom[immutable.Vector[Int], Int, immutable.IndexedSeq[Int]]]
+ val cbf5 = implicitly[generic.CanBuildFrom[immutable.Vector[Int], Int, immutable.Vector[Int]]]
+ val cbf6 = implicitly[generic.CanBuildFrom[immutable.IndexedSeq[Int], Int, immutable.IndexedSeq[Int]]]
+
+ def check[C](v: C) = {
+ assert(v == Vector(1, 2, 3, 4))
+ assert(v.isInstanceOf[Vector[_]])
+ }
+ def checkRealMccoy(x: AnyRef) = {
+ assert(x eq TheOneTrueCBF, cbf1)
+ }
+
+ val v = immutable.Vector(1, 2, 3)
+ val iiv: immutable.IndexedSeq[Int] = immutable.Vector(1, 2, 3)
+ val iv: IndexedSeq[Int] = immutable.Vector(1, 2, 3)
+
+ def main(args: Array[String]): Unit = {
+ List(cbf1, cbf2, cbf3, cbf4, cbf5, cbf6) foreach checkRealMccoy
+ check(v.:+(4)(cbf1))
+ check(v.:+(4)(cbf2))
+ check(v.:+(4)(cbf3))
+
+ check(iiv.:+(4)(cbf2))
+ check(iiv.:+(4)(cbf3))
+
+ check(iv.:+(4)(cbf3))
+ }
+}
+
+
+
+
diff --git a/test/files/run/t6154.check b/test/files/run/t6154.check
new file mode 100644
index 0000000..9766475
--- /dev/null
+++ b/test/files/run/t6154.check
@@ -0,0 +1 @@
+ok
diff --git a/test/files/run/t6154.scala b/test/files/run/t6154.scala
new file mode 100644
index 0000000..02ef629
--- /dev/null
+++ b/test/files/run/t6154.scala
@@ -0,0 +1,10 @@
+object Test {
+ def foo(a: Int) {
+ var bar: Int = 0
+ bar = try { 0 } catch { case ex: Throwable => 0 }
+ new { foo(bar) }
+ }
+
+ def main(args: Array[String]): Unit =
+ try foo(0) catch { case _: java.lang.StackOverflowError => println("ok") }
+}
diff --git a/test/files/run/t6175.scala b/test/files/run/t6175.scala
new file mode 100644
index 0000000..69a0a71
--- /dev/null
+++ b/test/files/run/t6175.scala
@@ -0,0 +1,5 @@
+object Test extends App {
+ import reflect.runtime._
+ val m = universe.typeOf[List[_]].members.head.asMethod
+ currentMirror.reflect (List (2, 3, 1)).reflectMethod(m)
+}
\ No newline at end of file
diff --git a/test/files/run/t6178.check b/test/files/run/t6178.check
new file mode 100644
index 0000000..d8263ee
--- /dev/null
+++ b/test/files/run/t6178.check
@@ -0,0 +1 @@
+2
\ No newline at end of file
diff --git a/test/files/run/t6178.scala b/test/files/run/t6178.scala
new file mode 100644
index 0000000..0b4cf0b
--- /dev/null
+++ b/test/files/run/t6178.scala
@@ -0,0 +1,7 @@
+import scala.reflect.runtime.universe._
+import scala.reflect.runtime.{currentMirror => cm}
+
+object Test extends App {
+ val plus = typeOf[java.lang.String].member(newTermName("$plus")).asMethod
+ println(cm.reflect("").reflectMethod(plus).apply("2"))
+}
\ No newline at end of file
diff --git a/test/files/run/t6181.check b/test/files/run/t6181.check
new file mode 100644
index 0000000..d8263ee
--- /dev/null
+++ b/test/files/run/t6181.check
@@ -0,0 +1 @@
+2
\ No newline at end of file
diff --git a/test/files/run/t6181.scala b/test/files/run/t6181.scala
new file mode 100644
index 0000000..fb23eaf
--- /dev/null
+++ b/test/files/run/t6181.scala
@@ -0,0 +1,8 @@
+import scala.reflect.runtime.universe._
+import scala.reflect.runtime.{currentMirror => cm}
+
+object Test extends App {
+ class C { def test(x: => Int) = println(x) }
+ val mm = cm.reflect(new C).reflectMethod(typeOf[C].member(newTermName("test")).asMethod)
+ mm(2)
+}
\ No newline at end of file
diff --git a/test/files/run/t6187.check b/test/files/run/t6187.check
new file mode 100644
index 0000000..c0ca029
--- /dev/null
+++ b/test/files/run/t6187.check
@@ -0,0 +1,32 @@
+Type in expressions to have them evaluated.
+Type :help for more information.
+
+scala> import language.experimental.macros, reflect.macros.Context
+import language.experimental.macros
+import reflect.macros.Context
+
+scala> def macroImpl[T: c.WeakTypeTag](c: Context)(t: c.Expr[T]): c.Expr[List[T]] = {
+ val r = c.universe.reify { List(t.splice) }
+ c.Expr[List[T]]( c.resetLocalAttrs(r.tree) )
+}
+macroImpl: [T](c: scala.reflect.macros.Context)(t: c.Expr[T])(implicit evidence$1: c.WeakTypeTag[T])c.Expr[List[T]]
+
+scala> def demo[T](t: T): List[T] = macro macroImpl[T]
+demo: [T](t: T)List[T]
+
+scala> def m[T](t: T): List[List[T]] =
+ demo( List((t,true)) collect { case (x,true) => x } )
+m: [T](t: T)List[List[T]]
+
+scala> m(List(1))
+res0: List[List[List[Int]]] = List(List(List(1)))
+
+scala> // Showing we haven't added unreachable warnings
+
+scala> List(1) collect { case x => x }
+res1: List[Int] = List(1)
+
+scala> List("") collect { case x => x }
+res2: List[String] = List("")
+
+scala>
diff --git a/test/files/run/t6187.scala b/test/files/run/t6187.scala
new file mode 100644
index 0000000..ae64291
--- /dev/null
+++ b/test/files/run/t6187.scala
@@ -0,0 +1,18 @@
+import scala.tools.partest.ReplTest
+
+object Test extends ReplTest {
+ override def code = """
+import language.experimental.macros, reflect.macros.Context
+def macroImpl[T: c.WeakTypeTag](c: Context)(t: c.Expr[T]): c.Expr[List[T]] = {
+ val r = c.universe.reify { List(t.splice) }
+ c.Expr[List[T]]( c.resetLocalAttrs(r.tree) )
+}
+def demo[T](t: T): List[T] = macro macroImpl[T]
+def m[T](t: T): List[List[T]] =
+ demo( List((t,true)) collect { case (x,true) => x } )
+m(List(1))
+// Showing we haven't added unreachable warnings
+List(1) collect { case x => x }
+List("") collect { case x => x }
+ """.trim
+}
diff --git a/test/files/run/t6187b.scala b/test/files/run/t6187b.scala
new file mode 100644
index 0000000..d2d3e97
--- /dev/null
+++ b/test/files/run/t6187b.scala
@@ -0,0 +1,5 @@
+object Test extends App {
+ val x: PartialFunction[Int, Int] = { case 1 => 1 }
+ val o: Any = ""
+ assert(x.applyOrElse(0, (_: Int) => o) == "")
+}
diff --git a/test/files/run/t6188.check b/test/files/run/t6188.check
new file mode 100644
index 0000000..5d64afc
--- /dev/null
+++ b/test/files/run/t6188.check
@@ -0,0 +1 @@
+Failure(java.lang.Exception: this is an exception)
diff --git a/test/files/run/t6188.flags b/test/files/run/t6188.flags
new file mode 100644
index 0000000..0ebca3e
--- /dev/null
+++ b/test/files/run/t6188.flags
@@ -0,0 +1 @@
+ -optimize
diff --git a/test/files/run/t6188.scala b/test/files/run/t6188.scala
new file mode 100644
index 0000000..48180dd
--- /dev/null
+++ b/test/files/run/t6188.scala
@@ -0,0 +1,12 @@
+// SI-6188 Optimizer incorrectly removes method invocations containing throw expressions
+
+import scala.util.Success
+
+object Test {
+ def main(args: Array[String]) {
+ val e = new Exception("this is an exception")
+ val res = Success(1).flatMap[Int](x => throw e)
+ println(res)
+ }
+}
+
diff --git a/test/files/run/t6194.check b/test/files/run/t6194.check
new file mode 100644
index 0000000..b325f47
--- /dev/null
+++ b/test/files/run/t6194.check
@@ -0,0 +1 @@
+C:\FooBar\Java\includes\*.jar
diff --git a/test/files/run/t6194.scala b/test/files/run/t6194.scala
new file mode 100644
index 0000000..ced3259
--- /dev/null
+++ b/test/files/run/t6194.scala
@@ -0,0 +1,8 @@
+import scala.tools.nsc.util._
+
+object Test {
+ def main(args: Array[String]): Unit = {
+ val cp = ClassPath.expandPath("""C:\FooBar\Java\includes\*.jar""") mkString java.io.File.pathSeparator
+ println(cp)
+ }
+}
diff --git a/test/files/jvm/bug680.check b/test/files/run/t6197.check
similarity index 100%
copy from test/files/jvm/bug680.check
copy to test/files/run/t6197.check
diff --git a/test/files/run/t6197.scala b/test/files/run/t6197.scala
new file mode 100644
index 0000000..5ab4b00
--- /dev/null
+++ b/test/files/run/t6197.scala
@@ -0,0 +1,21 @@
+import scala.collection.immutable._
+
+object Test extends App {
+
+ // test that a HashTrieSet with one leaf element is not created!
+ val x = HashSet.empty + 1 + 2
+ if(x.getClass.getSimpleName != "HashTrieSet")
+ println("A hash set containing two non-colliding values should be a HashTrieSet")
+
+ val y = x - 1
+ if(y.getClass.getSimpleName != "HashSet1")
+ println("A hash set containing one element should always use HashSet1")
+
+ // it is pretty hard to test that the case where a HashTrieSet has one element which
+ // is itself of type HashTrieS t. That is because the improve hash function makes it very difficult
+ // to find keys that will have hashes that are close together.
+ //
+ // However, it is also not necessary. Removing the ability of a HashTrieSet to have
+ // one child of type HashTrieSet completely breaks the HashSet, so that many other
+ // tests fail
+}
diff --git a/test/files/jvm/bug680.check b/test/files/run/t6198.check
similarity index 100%
copy from test/files/jvm/bug680.check
copy to test/files/run/t6198.check
diff --git a/test/files/run/t6198.scala b/test/files/run/t6198.scala
new file mode 100644
index 0000000..5aa8f1c
--- /dev/null
+++ b/test/files/run/t6198.scala
@@ -0,0 +1,24 @@
+import scala.collection.immutable._
+
+object Test extends App {
+ // test that ListSet.tail does not use a builder
+ // we can't test for O(1) behavior, so the best we can do is to
+ // check that ls.tail always returns the same instance
+ val ls = ListSet.empty[Int] + 1 + 2
+
+ if(ls.tail ne ls.tail)
+ println("ListSet.tail should not use a builder!")
+
+ // class that always causes hash collisions
+ case class Collision(value:Int) { override def hashCode = 0 }
+
+ // create a set that should have a collison
+ val x = HashSet.empty + Collision(0) + Collision(1)
+ if(x.getClass.getSimpleName != "HashSetCollision1")
+ println("HashSet of size >1 with collisions should use HashSetCollision")
+
+ // remove the collision again by removing all but one element
+ val y = x - Collision(0)
+ if(y.getClass.getSimpleName != "HashSet1")
+ println("HashSet of size 1 should use HashSet1" + y.getClass)
+}
diff --git a/test/files/run/t6199-mirror.check b/test/files/run/t6199-mirror.check
new file mode 100644
index 0000000..6a452c1
--- /dev/null
+++ b/test/files/run/t6199-mirror.check
@@ -0,0 +1 @@
+()
diff --git a/test/files/run/t6199-mirror.scala b/test/files/run/t6199-mirror.scala
new file mode 100644
index 0000000..772a384
--- /dev/null
+++ b/test/files/run/t6199-mirror.scala
@@ -0,0 +1,7 @@
+import scala.reflect.runtime.universe._
+import scala.reflect.runtime.{currentMirror => cm}
+
+object Test extends App {
+ class C { def foo = () }
+ println(cm.reflect(new C).reflectMethod(typeOf[C].member(newTermName("foo")).asMethod)())
+}
\ No newline at end of file
diff --git a/test/files/run/t6199-toolbox.check b/test/files/run/t6199-toolbox.check
new file mode 100644
index 0000000..6a452c1
--- /dev/null
+++ b/test/files/run/t6199-toolbox.check
@@ -0,0 +1 @@
+()
diff --git a/test/files/run/t6199-toolbox.scala b/test/files/run/t6199-toolbox.scala
new file mode 100644
index 0000000..89015f5
--- /dev/null
+++ b/test/files/run/t6199-toolbox.scala
@@ -0,0 +1,8 @@
+import scala.reflect.runtime.universe._
+import scala.reflect.runtime.{currentMirror => cm}
+import scala.tools.reflect.ToolBox
+
+object Test extends App {
+ val tb = cm.mkToolBox()
+ println(tb.eval(Literal(Constant(()))))
+}
\ No newline at end of file
diff --git a/test/files/run/t6206.check b/test/files/run/t6206.check
new file mode 100644
index 0000000..8064573
--- /dev/null
+++ b/test/files/run/t6206.check
@@ -0,0 +1,4 @@
+outer
+outer
+inner
+inner
diff --git a/test/files/run/t6206.scala b/test/files/run/t6206.scala
new file mode 100644
index 0000000..07ff246
--- /dev/null
+++ b/test/files/run/t6206.scala
@@ -0,0 +1,37 @@
+class Outer {
+ def apply( position : Inner ) {}
+ class Inner
+
+ this.apply(new Inner)
+ this (new Inner) // error,
+}
+
+
+class Outer1 {
+
+ self =>
+
+ def apply( position : Inner ) : String = "outer"
+
+ class Inner( ) {
+
+ def apply(arg: Inner): String = "inner"
+
+ def testMe = {
+ List(
+ self.apply( this ), // a) this works
+ self( this ), // b) this does not work!
+ this apply this,
+ this(this)
+ ) foreach println
+ }
+ }
+}
+
+object Test {
+ def main(args: Array[String]): Unit = {
+ val o = new Outer1
+ val i = new o.Inner
+ i.testMe
+ }
+}
diff --git a/test/files/run/t6220.scala b/test/files/run/t6220.scala
new file mode 100644
index 0000000..834b692
--- /dev/null
+++ b/test/files/run/t6220.scala
@@ -0,0 +1,92 @@
+import scala.collection.immutable._
+
+object Test extends App {
+
+ // finds an int x such that improved(x) differs in the first bit to improved(0),
+ // which is the worst case for the HashTrieSet
+ def findWorstCaseInts() {
+ // copy of improve from HashSet
+ def improve(hcode: Int) = {
+ var h: Int = hcode + ~(hcode << 9)
+ h = h ^ (h >>> 14)
+ h = h + (h << 4)
+ h ^ (h >>> 10)
+ }
+
+ // find two hashes which have a large separation
+ val x = 0
+ var y = 1
+ val ix = improve(x)
+ while(y!=0 && improve(y)!=ix+(1<<31))
+ y+=1
+ printf("%s %s %x %x\n",x,y,improve(x), improve(y))
+ }
+ // this is not done every test run since it would slow down ant test.suite too much.
+ // findWorstCaseInts()
+
+ // two numbers that are immediately adiacent when fed through HashSet.improve
+ val h0 = 0
+ val h1 = 1270889724
+
+ // h is the hashcode, i is ignored for the hashcode but relevant for equality
+ case class Collision(h:Int, i:Int) {
+ override def hashCode = h
+ }
+ val a = Collision(h0,0)
+ val b = Collision(h0,1)
+ val c = Collision(h1,0)
+
+ // create a HashSetCollision1
+ val x = HashSet(a) + b
+ if(x.getClass.getSimpleName != "HashSetCollision1")
+ println("x should be a collision")
+ StructureTests.validate(x)
+ // StructureTests.printStructure(x)
+ require(x.size==2 && x.contains(a) && x.contains(b))
+
+ // go from a HashSetCollision1 to a HashTrieSet with maximum depth
+ val y = x + c
+ if(y.getClass.getSimpleName != "HashTrieSet")
+ println("y should be a HashTrieSet")
+ StructureTests.validate(y)
+ // StructureTests.printStructure(y)
+ require(y.size==3 && y.contains(a) && y.contains(b) && y.contains(c))
+
+ // go from a HashSet1 directly to a HashTrieSet with maximum depth
+ val z = HashSet(a) + c
+ if(y.getClass.getSimpleName != "HashTrieSet")
+ println("y should be a HashTrieSet")
+ StructureTests.validate(z)
+ // StructureTests.printStructure(z)
+ require(z.size == 2 && z.contains(a) && z.contains(c))
+}
+
+package scala.collection.immutable {
+ object StructureTests {
+ def printStructure(x:HashSet[_], prefix:String="") {
+ x match {
+ case m:HashSet.HashTrieSet[_] =>
+ println(prefix+m.getClass.getSimpleName + " " + m.size)
+ m.elems.foreach(child => printStructure(child, prefix + " "))
+ case m:HashSet.HashSetCollision1[_] =>
+ println(prefix+m.getClass.getSimpleName + " " + m.ks.size)
+ case m:HashSet.HashSet1[_] =>
+ println(prefix+m.getClass.getSimpleName + " " + m.head)
+ case _ =>
+ println(prefix+"empty")
+ }
+ }
+
+ def validate(x:HashSet[_]) {
+ x match {
+ case m:HashSet.HashTrieSet[_] =>
+ require(m.elems.size>1 || (m.elems.size==1 && m.elems(0).isInstanceOf[HashSet.HashTrieSet[_]]))
+ m.elems.foreach(validate _)
+ case m:HashSet.HashSetCollision1[_] =>
+ require(m.ks.size>1)
+ case m:HashSet.HashSet1[_] =>
+ case _ =>
+ }
+ }
+ }
+}
diff --git a/test/files/run/t6223.check b/test/files/run/t6223.check
new file mode 100644
index 0000000..f83799b
--- /dev/null
+++ b/test/files/run/t6223.check
@@ -0,0 +1,4 @@
+bar
+bar$mIc$sp
+bar$mIcI$sp
+bar$mcI$sp
\ No newline at end of file
diff --git a/test/files/run/t6223.scala b/test/files/run/t6223.scala
new file mode 100644
index 0000000..0996ea1
--- /dev/null
+++ b/test/files/run/t6223.scala
@@ -0,0 +1,11 @@
+class Foo[@specialized(Int) A](a:A) {
+ def bar[@specialized(Int) B](f:A => B) = new Foo(f(a))
+}
+
+object Test {
+ def main(args:Array[String]) {
+ val f = new Foo(333)
+ val ms = f.getClass().getDeclaredMethods().map(_.getName).sorted
+ ms.foreach(println)
+ }
+}
diff --git a/test/files/run/t6246.check b/test/files/run/t6246.check
new file mode 100644
index 0000000..9532185
--- /dev/null
+++ b/test/files/run/t6246.check
@@ -0,0 +1,90 @@
+runtimeClass = byte, toString = Byte
+true
+true
+true
+false
+true
+false
+false
+false
+false
+runtimeClass = short, toString = Short
+true
+true
+true
+false
+true
+false
+false
+false
+false
+runtimeClass = char, toString = Char
+true
+true
+true
+false
+true
+false
+false
+false
+false
+runtimeClass = int, toString = Int
+true
+true
+true
+false
+true
+false
+false
+false
+false
+runtimeClass = long, toString = Long
+true
+true
+true
+false
+true
+false
+false
+false
+false
+runtimeClass = float, toString = Float
+true
+true
+true
+false
+true
+false
+false
+false
+false
+runtimeClass = double, toString = Double
+true
+true
+true
+false
+true
+false
+false
+false
+false
+runtimeClass = void, toString = Unit
+true
+true
+true
+false
+true
+false
+false
+false
+false
+runtimeClass = boolean, toString = Boolean
+true
+true
+true
+false
+true
+false
+false
+false
+false
\ No newline at end of file
diff --git a/test/files/run/t6246.scala b/test/files/run/t6246.scala
new file mode 100644
index 0000000..28765e1
--- /dev/null
+++ b/test/files/run/t6246.scala
@@ -0,0 +1,26 @@
+import scala.reflect.{ClassTag, classTag}
+
+object Test extends App {
+ def testValueClass(tag: ClassTag[_]) {
+ println(s"runtimeClass = ${tag.runtimeClass}, toString = ${tag.toString}")
+ println(tag <:< tag)
+ println(tag <:< ClassTag.AnyVal)
+ println(tag <:< ClassTag.Any)
+ println(tag <:< ClassTag.Nothing)
+ println(ClassTag.Nothing <:< tag)
+ println(tag <:< ClassTag.Null)
+ println(ClassTag.Null <:< tag)
+ println(tag <:< ClassTag.Object)
+ println(ClassTag.Object <:< tag)
+ }
+
+ testValueClass(ClassTag.Byte)
+ testValueClass(ClassTag.Short)
+ testValueClass(ClassTag.Char)
+ testValueClass(ClassTag.Int)
+ testValueClass(ClassTag.Long)
+ testValueClass(ClassTag.Float)
+ testValueClass(ClassTag.Double)
+ testValueClass(ClassTag.Unit)
+ testValueClass(ClassTag.Boolean)
+}
\ No newline at end of file
diff --git a/test/files/run/t6259.scala b/test/files/run/t6259.scala
new file mode 100644
index 0000000..294c95e
--- /dev/null
+++ b/test/files/run/t6259.scala
@@ -0,0 +1,56 @@
+import scala.reflect.runtime.universe._
+
+class A[X](implicit val tt: TypeTag[X]) {}
+object B extends A[String]
+
+object C {
+ object D extends A[String]
+}
+
+trait E {
+ object F extends A[String]
+}
+
+class G {
+ object H extends A[String]
+}
+
+object HasX {
+ val x = {
+ object InVal extends A[String]
+ InVal
+ 5
+ }
+
+}
+
+trait NeedsEarly {
+ val x: AnyRef
+}
+
+object Early extends {
+ // Drops to this.getClass and is not ok...
+ val x = { object EarlyOk extends A[String]; EarlyOk }
+} with NeedsEarly
+
+
+class DoubleTrouble[X](x: AnyRef)(implicit override val tt: TypeTag[X]) extends A[X]
+
+object DoubleOk extends DoubleTrouble[String]({
+ // Drops to this.getClass and is an issue
+ object InnerTrouble extends A[String];
+ InnerTrouble
+})
+
+object Test extends App {
+ B
+ C.D
+ val e = new E {}; e.F
+ val g = new G; g.H
+
+ locally(HasX.x)
+ // locally(Early.x) TODO sort out VerifyError in Early$.<init>
+ // DoubleOk TODO sort out VerifyError in DoubleOk$.<init>
+}
+
+
diff --git a/test/files/run/t6260.check b/test/files/run/t6260.check
new file mode 100644
index 0000000..54f98a1
--- /dev/null
+++ b/test/files/run/t6260.check
@@ -0,0 +1 @@
+Box(abcabc)
diff --git a/test/files/run/t6260.scala b/test/files/run/t6260.scala
new file mode 100644
index 0000000..cfe9e1e
--- /dev/null
+++ b/test/files/run/t6260.scala
@@ -0,0 +1,12 @@
+class Box[X <: CharSequence](val x: X) extends AnyVal {
+ def map[Y <: CharSequence](f: X => Y): Box[Y] =
+ ((bx: Box[X]) => new Box(f(bx.x)))(this)
+ override def toString = s"Box($x)"
+}
+
+object Test {
+ def main(args: Array[String]) {
+ val g = (x: String) => x + x
+ println(new Box("abc") map g)
+ }
+}
diff --git a/test/files/run/t6261.scala b/test/files/run/t6261.scala
new file mode 100644
index 0000000..b446325
--- /dev/null
+++ b/test/files/run/t6261.scala
@@ -0,0 +1,130 @@
+import scala.collection.immutable._
+
+object Test extends App {
+
+ def test0() {
+ val m=ListMap(1->2,3->4)
+ if(m.tail ne m.tail)
+ println("ListMap.tail uses a builder, so it is not O(1)")
+ }
+
+ def test1() {
+ // test that a HashTrieMap with one leaf element is not created!
+ val x = HashMap.empty + (1->1) + (2->2)
+ if(x.getClass.getSimpleName != "HashTrieMap")
+ println("A hash map containing two non-colliding values should be a HashTrieMap")
+
+ val y = x - 1
+ if(y.getClass.getSimpleName != "HashMap1")
+ println("A hash map containing one element should always use HashMap1")
+ }
+
+ def test2() {
+ // class that always causes hash collisions
+ case class Collision(value:Int) { override def hashCode = 0 }
+
+ // create a set that should have a collison
+ val x = HashMap.empty + (Collision(0)->0) + (Collision(1) ->0)
+ if(x.getClass.getSimpleName != "HashMapCollision1")
+ println("HashMap of size >1 with collisions should use HashMapCollision")
+
+ // remove the collision again by removing all but one element
+ val y = x - Collision(0)
+ if(y.getClass.getSimpleName != "HashMap1")
+ println("HashMap of size 1 should use HashMap1" + y.getClass)
+ }
+ def test3() {
+ // finds an int x such that improved(x) differs in the first bit to improved(0),
+ // which is the worst case for the HashTrieSet
+ def findWorstCaseInts() {
+ // copy of improve from HashSet
+ def improve(hcode: Int) = {
+ var h: Int = hcode + ~(hcode << 9)
+ h = h ^ (h >>> 14)
+ h = h + (h << 4)
+ h ^ (h >>> 10)
+ }
+
+ // find two hashes which have a large separation
+ val x = 0
+ var y = 1
+ val ix = improve(x)
+ while(y!=0 && improve(y)!=ix+(1<<31))
+ y+=1
+ printf("%s %s %x %x\n",x,y,improve(x), improve(y))
+ }
+ // this is not done every test run since it would slow down ant test.suite too much.
+ // findWorstCaseInts()
+
+ // two numbers that are immediately adiacent when fed through HashSet.improve
+ val h0 = 0
+ val h1 = 1270889724
+
+ // h is the hashcode, i is ignored for the hashcode but relevant for equality
+ case class Collision(h:Int, i:Int) {
+ override def hashCode = h
+ }
+ val a = Collision(h0,0)->0
+ val b = Collision(h0,1)->0
+ val c = Collision(h1,0)->0
+
+ // create a HashSetCollision1
+ val x = HashMap(a) + b
+ if(x.getClass.getSimpleName != "HashMapCollision1")
+ println("x should be a HashMapCollision")
+ StructureTests.validate(x)
+ //StructureTests.printStructure(x)
+ require(x.size==2 && x.contains(a._1) && x.contains(b._1))
+
+ // go from a HashSetCollision1 to a HashTrieSet with maximum depth
+ val y = x + c
+ if(y.getClass.getSimpleName != "HashTrieMap")
+ println("y should be a HashTrieMap")
+ StructureTests.validate(y)
+ // StructureTests.printStructure(y)
+ require(y.size==3 && y.contains(a._1) && y.contains(b._1) && y.contains(c._1))
+
+ // go from a HashSet1 directly to a HashTrieSet with maximum depth
+ val z = HashMap(a) + c
+ if(y.getClass.getSimpleName != "HashTrieMap")
+ println("y should be a HashTrieMap")
+ StructureTests.validate(z)
+ // StructureTests.printStructure(z)
+ require(z.size == 2 && z.contains(a._1) && z.contains(c._1))
+ }
+ test0()
+ test1()
+ test2()
+ test3()
+}
+
+
+package scala.collection.immutable {
+ object StructureTests {
+ def printStructure(x:HashMap[_,_], prefix:String="") {
+ x match {
+ case m:HashMap.HashTrieMap[_,_] =>
+ println(prefix+m.getClass.getSimpleName + " " + m.size)
+ m.elems.foreach(child => printStructure(child, prefix + " "))
+ case m:HashMap.HashMapCollision1[_,_] =>
+ println(prefix+m.getClass.getSimpleName + " " + m.kvs.size)
+ case m:HashMap.HashMap1[_,_] =>
+ println(prefix+m.getClass.getSimpleName + " " + m.head)
+ case _ =>
+ println(prefix+"empty")
+ }
+ }
+
+ def validate(x:HashMap[_,_]) {
+ x match {
+ case m:HashMap.HashTrieMap[_,_] =>
+ require(m.elems.size>1 || (m.elems.size==1 && m.elems(0).isInstanceOf[HashMap.HashTrieMap[_,_]]))
+ m.elems.foreach(validate _)
+ case m:HashMap.HashMapCollision1[_,_] =>
+ require(m.kvs.size>1)
+ case m:HashMap.HashMap1[_,_] =>
+ case _ =>
+ }
+ }
+ }
+}
diff --git a/test/files/run/bug627.check b/test/files/run/t627.check
similarity index 100%
rename from test/files/run/bug627.check
rename to test/files/run/t627.check
diff --git a/test/files/run/bug627.scala b/test/files/run/t627.scala
similarity index 100%
rename from test/files/run/bug627.scala
rename to test/files/run/t627.scala
diff --git a/test/files/run/t6271.scala b/test/files/run/t6271.scala
new file mode 100644
index 0000000..8ebf7ad
--- /dev/null
+++ b/test/files/run/t6271.scala
@@ -0,0 +1,32 @@
+object Test extends App {
+ def filterIssue = {
+ val viewed : Iterable[Iterable[Int]] = List(List(0).view).view
+ val filtered = viewed flatMap { x => List( x filter (_ > 0) ) }
+ filtered.iterator.toIterable.flatten
+ }
+ def takenIssue = {
+ val viewed : Iterable[Iterable[Int]] = List(List(0).view).view
+ val filtered = viewed flatMap { x => List( x take 0 ) }
+ filtered.iterator.toIterable.flatten
+ }
+ def droppedIssue = {
+ val viewed : Iterable[Iterable[Int]] = List(List(0).view).view
+ val filtered = viewed flatMap { x => List( x drop 1 ) }
+ filtered.iterator.toIterable.flatten
+ }
+ def flatMappedIssue = {
+ val viewed : Iterable[Iterable[Int]] = List(List(0).view).view
+ val filtered = viewed flatMap { x => List( x flatMap (_ => List()) ) }
+ filtered.iterator.toIterable.flatten
+ }
+ def slicedIssue = {
+ val viewed : Iterable[Iterable[Int]] = List(List(0).view).view
+ val filtered = viewed flatMap { x => List( x slice (2,3) ) }
+ filtered.iterator.toIterable.flatten
+ }
+ filterIssue
+ takenIssue
+ droppedIssue
+ flatMappedIssue
+ slicedIssue
+}
diff --git a/test/files/run/t6272.check b/test/files/run/t6272.check
new file mode 100644
index 0000000..f00c965
--- /dev/null
+++ b/test/files/run/t6272.check
@@ -0,0 +1,10 @@
+1
+2
+3
+4
+5
+6
+7
+8
+9
+10
diff --git a/test/files/run/t6272.scala b/test/files/run/t6272.scala
new file mode 100644
index 0000000..1744369
--- /dev/null
+++ b/test/files/run/t6272.scala
@@ -0,0 +1,62 @@
+// x1, x2, and x3 resulted in: symbol variable bitmap$0 does not exist in A.<init>
+object A {
+
+ try {
+ lazy val x1 = 1
+ println(x1)
+ sys.error("!")
+ } catch {
+ case _: Throwable =>
+ lazy val x2 = 2
+ println(x2)
+ } finally {
+ lazy val x3 = 3
+ println(x3)
+ }
+
+ if ("".isEmpty) {
+ lazy val x4 = 4
+ println(x4)
+ }
+
+ var b = true
+ while(b) {
+ lazy val x5 = 5
+ println(x5)
+ b = false
+ }
+
+
+ def method {
+ try {
+ lazy val x6 = 6
+ println(x6)
+ sys.error("!")
+ } catch {
+ case _: Throwable =>
+ lazy val x7 = 7
+ println(x7)
+ } finally {
+ lazy val x8 = 8
+ println(x8)
+ }
+
+ if ("".isEmpty) {
+ lazy val x9 = 9
+ println(x9)
+ }
+
+ var b = true
+ while(b) {
+ lazy val x10 = 10
+ println(x10)
+ b = false
+ }
+ }
+}
+
+object Test {
+ def main(args: Array[String]) {
+ A.method
+ }
+}
diff --git a/test/files/run/t6273.check b/test/files/run/t6273.check
new file mode 100644
index 0000000..c1c18da
--- /dev/null
+++ b/test/files/run/t6273.check
@@ -0,0 +1,19 @@
+Type in expressions to have them evaluated.
+Type :help for more information.
+
+scala>
+
+scala> val y = 55
+y: Int = 55
+
+scala> val x = s"""
+ y = $y
+"""
+x: String =
+"
+ y = 55
+"
+
+scala>
+
+scala>
diff --git a/test/files/run/t6273.scala b/test/files/run/t6273.scala
new file mode 100644
index 0000000..ed0fd45
--- /dev/null
+++ b/test/files/run/t6273.scala
@@ -0,0 +1,11 @@
+import scala.tools.partest.ReplTest
+
+object Test extends ReplTest {
+ def tq = "\"\"\""
+ def code = s"""
+val y = 55
+val x = s$tq
+ y = $$y
+$tq
+ """
+}
diff --git a/test/files/run/t6277.check b/test/files/run/t6277.check
new file mode 100644
index 0000000..f32a580
--- /dev/null
+++ b/test/files/run/t6277.check
@@ -0,0 +1 @@
+true
\ No newline at end of file
diff --git a/test/files/run/t6277.scala b/test/files/run/t6277.scala
new file mode 100644
index 0000000..41feee8
--- /dev/null
+++ b/test/files/run/t6277.scala
@@ -0,0 +1,9 @@
+import scala.reflect.runtime.universe._
+
+object Test extends App {
+ locally {
+ val sym = typeOf[List[_]].typeSymbol.asClass
+ val q = sym.isSealed
+ println(q)
+ }
+}
\ No newline at end of file
diff --git a/test/files/run/t6287.check b/test/files/run/t6287.check
new file mode 100644
index 0000000..a86ecbe
--- /dev/null
+++ b/test/files/run/t6287.check
@@ -0,0 +1,3 @@
+Vector(2, 3, 4)
+Vector(2, 3, 4)
+Vector(2, 3, 4)
diff --git a/test/files/run/t6287.scala b/test/files/run/t6287.scala
new file mode 100644
index 0000000..0c75d10
--- /dev/null
+++ b/test/files/run/t6287.scala
@@ -0,0 +1,11 @@
+import scala.reflect.runtime.universe._
+import scala.reflect.runtime.{currentMirror => cm}
+import scala.tools.reflect._
+
+object Test extends App {
+ val tb = cm.mkToolBox()
+ val t1 = tb.parse("1 to 3 map (_+1)")
+ println(tb.eval(t1))
+ println(tb.eval(t1))
+ println(tb.eval(t1))
+}
\ No newline at end of file
diff --git a/test/files/run/t6288.check b/test/files/run/t6288.check
new file mode 100644
index 0000000..e940975
--- /dev/null
+++ b/test/files/run/t6288.check
@@ -0,0 +1,79 @@
+[[syntax trees at end of patmat]] // newSource1.scala
+[7]package [7]<empty> {
+ [7]object Case3 extends [13][106]scala.AnyRef {
+ [13]def <init>(): [13]Case3.type = [13]{
+ [13][13][13]Case3.super.<init>();
+ [13]()
+ };
+ [21]def unapply([29]z: [32]<type: [32]scala.Any>): [21]Option[Int] = [56][52][52]scala.Some.apply[[52]Int]([58]-1);
+ [64]{
+ [64]case <synthetic> val x1: [64]Any = [64]"";
+ [64]case5()[84]{
+ [84]<synthetic> val o7: [84]Option[Int] = [84][84]Case3.unapply([84]x1);
+ [84]if ([84]o7.isEmpty.unary_!)
+ [97][97]matchEnd4([97]())
+ else
+ [84][84]case6()
+ };
+ [64]case6(){
+ [64][64]matchEnd4([64]throw [64][64][64]new [64]MatchError([64]x1))
+ };
+ [64]matchEnd4(x: [NoPosition]Unit){
+ [64]x
+ }
+ }
+ };
+ [113]object Case4 extends [119][217]scala.AnyRef {
+ [119]def <init>(): [119]Case4.type = [119]{
+ [119][119][119]Case4.super.<init>();
+ [119]()
+ };
+ [127]def unapplySeq([138]z: [141]<type: [141]scala.Any>): [127]Option[List[Int]] = [167]scala.None;
+ [175]{
+ [175]case <synthetic> val x1: [175]Any = [175]"";
+ [175]case5()[195]{
+ [195]<synthetic> val o7: [195]Option[List[Int]] = [195][195]Case4.unapplySeq([195]x1);
+ [195]if ([195]o7.isEmpty.unary_!)
+ [195]if ([195][195][195][195]o7.get.!=([195]null).&&([195][195][195][195]o7.get.lengthCompare([195]1).==([195]0)))
+ [208][208]matchEnd4([208]())
+ else
+ [195][195]case6()
+ else
+ [195][195]case6()
+ };
+ [175]case6(){
+ [175][175]matchEnd4([175]throw [175][175][175]new [175]MatchError([175]x1))
+ };
+ [175]matchEnd4(x: [NoPosition]Unit){
+ [175]x
+ }
+ }
+ };
+ [224]object Case5 extends [230][312]scala.AnyRef {
+ [230]def <init>(): [230]Case5.type = [230]{
+ [230][230][230]Case5.super.<init>();
+ [230]()
+ };
+ [238]def unapply([246]z: [249]<type: [249]scala.Any>): [238]Boolean = [265]true;
+ [273]{
+ [273]case <synthetic> val x1: [273]Any = [273]"";
+ [273]case5()[293]{
+ [293]<synthetic> val o7: [293]Option[List[Int]] = [293][293]Case4.unapplySeq([293]x1);
+ [293]if ([293]o7.isEmpty.unary_!)
+ [293]if ([293][293][293][293]o7.get.!=([293]null).&&([293][293][293][293]o7.get.lengthCompare([293]0).==([195]0)))
+ [304][304]matchEnd4([304]())
+ else
+ [293][293]case6()
+ else
+ [293][293]case6()
+ };
+ [273]case6(){
+ [273][273]matchEnd4([273]throw [273][273][273]new [273]MatchError([273]x1))
+ };
+ [273]matchEnd4(x: [NoPosition]Unit){
+ [273]x
+ }
+ }
+ }
+}
+
diff --git a/test/files/run/t6288.scala b/test/files/run/t6288.scala
new file mode 100644
index 0000000..cf5865e
--- /dev/null
+++ b/test/files/run/t6288.scala
@@ -0,0 +1,41 @@
+import scala.tools.partest._
+import java.io.{Console => _, _}
+
+object Test extends DirectTest {
+
+ override def extraSettings: String = "-usejavacp -Xprint:patmat -Xprint-pos -d " + testOutput.path
+
+ override def code =
+ """
+ |object Case3 {
+ | def unapply(z: Any): Option[Int] = Some(-1)
+ |
+ | "" match {
+ | case Case3(nr) => ()
+ | }
+ |}
+ |object Case4 {
+ | def unapplySeq(z: Any): Option[List[Int]] = None
+ |
+ | "" match {
+ | case Case4(nr) => ()
+ | }
+ |}
+ |object Case5 {
+ | def unapply(z: Any): Boolean = true
+ |
+ | "" match {
+ | case Case4() => ()
+ | }
+ |}
+ |
+ |""".stripMargin.trim
+
+ override def show(): Unit = {
+ // Now: [84][84]Case3.unapply([84]x1);
+ // Was: [84][84]Case3.unapply([64]x1);
+ Console.withErr(System.out) {
+ compile()
+ }
+ }
+}
diff --git a/test/files/run/t6288b-jump-position.check b/test/files/run/t6288b-jump-position.check
new file mode 100644
index 0000000..83ba810
--- /dev/null
+++ b/test/files/run/t6288b-jump-position.check
@@ -0,0 +1,76 @@
+object Case3 extends Object {
+ // fields:
+
+ // methods
+ def unapply(z: Object (REF(class Object))): Option {
+ locals: value z
+ startBlock: 1
+ blocks: [1]
+
+ 1:
+ 2 NEW REF(class Some)
+ 2 DUP(REF(class Some))
+ 2 CONSTANT(-1)
+ 2 BOX INT
+ 2 CALL_METHOD scala.Some.<init> (static-instance)
+ 2 RETURN(REF(class Option))
+
+ }
+ Exception handlers:
+
+ def main(args: Array[String] (ARRAY[REF(class String)])): Unit {
+ locals: value args, value x1, value x
+ startBlock: 1
+ blocks: [1,2,3,6,7]
+
+ 1:
+ 4 CONSTANT("")
+ 4 STORE_LOCAL(value x1)
+ 4 SCOPE_ENTER value x1
+ 4 JUMP 2
+
+ 2:
+ 5 LOAD_LOCAL(value x1)
+ 5 IS_INSTANCE REF(class String)
+ 5 CZJUMP (BOOL)NE ? 3 : 6
+
+ 3:
+ 6 LOAD_MODULE object Predef
+ 6 CONSTANT("case 0")
+ 6 CALL_METHOD scala.Predef.println (dynamic)
+ 6 LOAD_FIELD scala.runtime.BoxedUnit.UNIT
+ 6 STORE_LOCAL(value x)
+ 6 JUMP 7
+
+ 6:
+ 8 LOAD_MODULE object Predef
+ 8 CONSTANT("default")
+ 8 CALL_METHOD scala.Predef.println (dynamic)
+ 8 LOAD_FIELD scala.runtime.BoxedUnit.UNIT
+ 8 STORE_LOCAL(value x)
+ 8 JUMP 7
+
+ 7:
+ 10 LOAD_MODULE object Predef
+ 10 CONSTANT("done")
+ 10 CALL_METHOD scala.Predef.println (dynamic)
+ 10 RETURN(UNIT)
+
+ }
+ Exception handlers:
+
+ def <init>(): Case3.type {
+ locals:
+ startBlock: 1
+ blocks: [1]
+
+ 1:
+ 1 THIS(Case3)
+ 1 CALL_METHOD java.lang.Object.<init> (super())
+ 1 RETURN(UNIT)
+
+ }
+ Exception handlers:
+
+
+}
diff --git a/test/files/run/t6288b-jump-position.scala b/test/files/run/t6288b-jump-position.scala
new file mode 100644
index 0000000..e22a1ab
--- /dev/null
+++ b/test/files/run/t6288b-jump-position.scala
@@ -0,0 +1,22 @@
+import scala.tools.partest.IcodeTest
+
+object Test extends IcodeTest {
+ override def code =
+ """object Case3 { // 01
+ | def unapply(z: Any): Option[Int] = Some(-1) // 02
+ | def main(args: Array[String]) { // 03
+ | ("": Any) match { // 04
+ | case x : String => // 05 Read: <linenumber> JUMP <target basic block id>
+ | println("case 0") // 06 expecting "6 JUMP 7", was "8 JUMP 7"
+ | case _ => // 07
+ | println("default") // 08 expecting "8 JUMP 7"
+ | } // 09
+ | println("done") // 10
+ | }
+ |}""".stripMargin
+
+ override def show() {
+ val lines1 = collectIcode("")
+ println(lines1 mkString "\n")
+ }
+}
diff --git a/test/files/run/randomAccessSeq-apply.check b/test/files/run/t629.check
similarity index 100%
rename from test/files/run/randomAccessSeq-apply.check
rename to test/files/run/t629.check
diff --git a/test/files/run/bug629.scala b/test/files/run/t629.scala
similarity index 100%
rename from test/files/run/bug629.scala
rename to test/files/run/t629.scala
diff --git a/test/files/run/t6290.scala b/test/files/run/t6290.scala
new file mode 100644
index 0000000..9d05db0
--- /dev/null
+++ b/test/files/run/t6290.scala
@@ -0,0 +1,4 @@
+object Test {
+ implicit val foo = language.dynamics
+ def main(args: Array[String]): Unit = ()
+}
diff --git a/test/files/run/t6292.scala b/test/files/run/t6292.scala
new file mode 100644
index 0000000..51e31f9
--- /dev/null
+++ b/test/files/run/t6292.scala
@@ -0,0 +1,18 @@
+ import scala.collection.mutable.DoubleLinkedList
+
+object Test {
+ def main(args: Array[String]): Unit = {
+ cloneAndtest(DoubleLinkedList[Int]())
+ cloneAndtest(DoubleLinkedList[Int](1))
+ cloneAndtest(DoubleLinkedList[Int](1,2,3,4))
+ }
+
+ def cloneAndtest(l: DoubleLinkedList[Int]): Unit =
+ testSame(l, l.clone.asInstanceOf[DoubleLinkedList[Int]])
+
+ def testSame(one: DoubleLinkedList[Int], two: DoubleLinkedList[Int]): Unit = {
+ def msg = s" for ${one} and ${two} !"
+ assert(one.size == two.size, s"Cloned sizes are not the same $msg!")
+ assert(one == two, s"Cloned lists are not equal $msg")
+ }
+}
diff --git a/test/files/run/t6318_derived.check b/test/files/run/t6318_derived.check
new file mode 100644
index 0000000..926f2a4
--- /dev/null
+++ b/test/files/run/t6318_derived.check
@@ -0,0 +1,3 @@
+Some(X)
+true
+Some(X)
diff --git a/test/files/run/t6318_derived.scala b/test/files/run/t6318_derived.scala
new file mode 100644
index 0000000..ccdc18d
--- /dev/null
+++ b/test/files/run/t6318_derived.scala
@@ -0,0 +1,15 @@
+import scala.reflect.{ClassTag, classTag}
+
+object Test extends App {
+ def test[T: ClassTag](x: T) {
+ println(classTag[T].runtimeClass.isAssignableFrom(x.getClass))
+ println(classTag[T].unapply(x))
+ }
+
+ class X(val x: Int) extends AnyVal { override def toString = "X" }
+ val x = new X(1)
+ // the commented line crashes because of SI-6326
+ //println(classTag[X].runtimeClass.isAssignableFrom(x.getClass))
+ println(classTag[X].unapply(x))
+ test(x)
+}
\ No newline at end of file
diff --git a/test/files/run/t6318_primitives.check b/test/files/run/t6318_primitives.check
new file mode 100644
index 0000000..b330f91
--- /dev/null
+++ b/test/files/run/t6318_primitives.check
@@ -0,0 +1,36 @@
+true
+Some(1)
+false
+None
+true
+Some(1)
+false
+None
+true
+Some()
+false
+None
+true
+Some(1)
+false
+None
+true
+Some(1)
+false
+None
+true
+Some(1.0)
+false
+None
+true
+Some(1.0)
+false
+None
+true
+Some(true)
+false
+None
+true
+Some(())
+false
+None
diff --git a/test/files/run/t6318_primitives.scala b/test/files/run/t6318_primitives.scala
new file mode 100644
index 0000000..30f2712
--- /dev/null
+++ b/test/files/run/t6318_primitives.scala
@@ -0,0 +1,71 @@
+import scala.reflect.{ClassTag, classTag}
+
+object Test extends App {
+ def test[T: ClassTag](x: T) {
+ println(classTag[T].runtimeClass.isAssignableFrom(x.getClass))
+ println(classTag[T].unapply(x))
+ }
+
+ {
+ val x = 1.toByte
+ println(ClassTag.Byte.runtimeClass.isAssignableFrom(x.getClass))
+ println(ClassTag.Byte.unapply(x))
+ test(x)
+ }
+
+ {
+ val x = 1.toShort
+ println(ClassTag.Short.runtimeClass.isAssignableFrom(x.getClass))
+ println(ClassTag.Short.unapply(x))
+ test(x)
+ }
+
+ {
+ val x = 1.toChar
+ println(ClassTag.Char.runtimeClass.isAssignableFrom(x.getClass))
+ println(ClassTag.Char.unapply(x))
+ test(x)
+ }
+
+ {
+ val x = 1.toInt
+ println(ClassTag.Int.runtimeClass.isAssignableFrom(x.getClass))
+ println(ClassTag.Int.unapply(x))
+ test(x)
+ }
+
+ {
+ val x = 1.toLong
+ println(ClassTag.Long.runtimeClass.isAssignableFrom(x.getClass))
+ println(ClassTag.Long.unapply(x))
+ test(x)
+ }
+
+ {
+ val x = 1.toFloat
+ println(ClassTag.Float.runtimeClass.isAssignableFrom(x.getClass))
+ println(ClassTag.Float.unapply(x))
+ test(x)
+ }
+
+ {
+ val x = 1.toDouble
+ println(ClassTag.Double.runtimeClass.isAssignableFrom(x.getClass))
+ println(ClassTag.Double.unapply(x))
+ test(x)
+ }
+
+ {
+ val x = true
+ println(ClassTag.Boolean.runtimeClass.isAssignableFrom(x.getClass))
+ println(ClassTag.Boolean.unapply(x))
+ test(x)
+ }
+
+ {
+ val x = ()
+ println(ClassTag.Unit.runtimeClass.isAssignableFrom(x.getClass))
+ println(ClassTag.Unit.unapply(x))
+ test(x)
+ }
+}
\ No newline at end of file
diff --git a/test/files/run/t6320.check b/test/files/run/t6320.check
new file mode 100644
index 0000000..e56bacd
--- /dev/null
+++ b/test/files/run/t6320.check
@@ -0,0 +1,17 @@
+Type in expressions to have them evaluated.
+Type :help for more information.
+
+scala>
+
+scala> import scala.language.dynamics
+import scala.language.dynamics
+
+scala> class Dyn(m: Map[String, Any]) extends Dynamic { def selectDynamic[T](s: String): T = m(s).asInstanceOf[T] }
+defined class Dyn
+
+scala> new Dyn(Map("foo" -> 10)).foo[Int]
+res0: Int = 10
+
+scala>
+
+scala>
diff --git a/test/files/run/t6320.scala b/test/files/run/t6320.scala
new file mode 100644
index 0000000..26085a3
--- /dev/null
+++ b/test/files/run/t6320.scala
@@ -0,0 +1,9 @@
+import scala.tools.partest.ReplTest
+
+object Test extends ReplTest {
+ def code = """
+import scala.language.dynamics
+class Dyn(m: Map[String, Any]) extends Dynamic { def selectDynamic[T](s: String): T = m(s).asInstanceOf[T] }
+new Dyn(Map("foo" -> 10)).foo[Int]
+ """
+}
diff --git a/test/files/run/t6323b.check b/test/files/run/t6323b.check
new file mode 100644
index 0000000..d6b1d1f
--- /dev/null
+++ b/test/files/run/t6323b.check
@@ -0,0 +1 @@
+cannot reflect value a, because it's a member of a weak type Test
diff --git a/test/files/run/t6323b.scala b/test/files/run/t6323b.scala
new file mode 100644
index 0000000..f530ac3
--- /dev/null
+++ b/test/files/run/t6323b.scala
@@ -0,0 +1,21 @@
+import scala.reflect.runtime.universe._
+import scala.reflect.runtime.{currentMirror => m}
+import scala.reflect.runtime.{universe => u}
+
+object Test extends App {
+ locally {
+ try {
+ case class Test(a:String,b:List[Int])
+
+ val lookAtMe = m.reflect(Test("a",List(5)))
+ val value = u.weakTypeOf[Test]
+ val members = value.members
+ val member = value.members.filter(_.name.encoded == "a")
+ val aAccessor = lookAtMe.reflectMethod(member.head.asMethod)
+ val thisShouldBeA = aAccessor.apply()
+ println(thisShouldBeA)
+ } catch {
+ case ScalaReflectionException(msg) => println(msg)
+ }
+ }
+}
\ No newline at end of file
diff --git a/test/files/run/t6327.check b/test/files/run/t6327.check
new file mode 100644
index 0000000..f7bacac
--- /dev/null
+++ b/test/files/run/t6327.check
@@ -0,0 +1,4 @@
+A
+A
+A
+A
diff --git a/test/files/run/t6327.scala b/test/files/run/t6327.scala
new file mode 100644
index 0000000..7683101
--- /dev/null
+++ b/test/files/run/t6327.scala
@@ -0,0 +1,22 @@
+import language._
+
+object Test extends App {
+
+ case class R[+T](s: String) { def x() = println(s) }
+
+ // Implicits in contention; StringR is nested to avoid ambiguity
+ object R { implicit val StringR = R[String]("A") }
+ implicit val Default = R[Any]("B")
+
+ class B() extends Dynamic {
+ def selectDynamic[T](f: String)(implicit r: R[T]): Unit = r.x()
+ }
+
+ val b = new B()
+
+ // These should all produce the same output, but they don't
+ b.selectDynamic[String]("baz")
+ b.baz[String]
+ val c = b.selectDynamic[String]("baz")
+ val d = b.baz[String]
+}
diff --git a/test/files/run/t6329_repl.check b/test/files/run/t6329_repl.check
new file mode 100644
index 0000000..693263a
--- /dev/null
+++ b/test/files/run/t6329_repl.check
@@ -0,0 +1,13 @@
+Type in expressions to have them evaluated.
+Type :help for more information.
+
+scala>
+
+scala> classManifest[List[_]]
+warning: there were 1 deprecation warning(s); re-run with -deprecation for details
+res0: scala.reflect.ClassTag[List[_]] = scala.collection.immutable.List[Any]
+
+scala> scala.reflect.classTag[List[_]]
+res1: scala.reflect.ClassTag[List[_]] = scala.collection.immutable.List
+
+scala>
diff --git a/test/files/run/t6329_repl.scala b/test/files/run/t6329_repl.scala
new file mode 100644
index 0000000..add6d64
--- /dev/null
+++ b/test/files/run/t6329_repl.scala
@@ -0,0 +1,8 @@
+import scala.tools.partest.ReplTest
+
+object Test extends ReplTest {
+ def code = """
+ |classManifest[List[_]]
+ |scala.reflect.classTag[List[_]]
+ |""".stripMargin
+}
diff --git a/test/files/run/t6329_repl_bug.check b/test/files/run/t6329_repl_bug.check
new file mode 100644
index 0000000..8663184
--- /dev/null
+++ b/test/files/run/t6329_repl_bug.check
@@ -0,0 +1,13 @@
+Type in expressions to have them evaluated.
+Type :help for more information.
+
+scala>
+
+scala> classManifest[List[_]]
+warning: there were 1 deprecation warnings; re-run with -deprecation for details
+res0: scala.reflect.ClassTag[List[_]] = scala.collection.immutable.List[Any]
+
+scala> scala.reflect.classTag[List[_]]
+res1: scala.reflect.ClassTag[List[_]] = scala.collection.immutable.List
+
+scala>
diff --git a/test/files/run/t6329_repl_bug.pending b/test/files/run/t6329_repl_bug.pending
new file mode 100644
index 0000000..9997d17
--- /dev/null
+++ b/test/files/run/t6329_repl_bug.pending
@@ -0,0 +1,10 @@
+import scala.tools.partest.ReplTest
+
+object Test extends ReplTest {
+ def code = """
+ |import scala.reflect.runtime.universe._
+ |import scala.reflect.runtime._
+ |classManifest[List[_]]
+ |scala.reflect.classTag[List[_]]
+ |""".stripMargin
+}
diff --git a/test/files/run/t6329_vanilla.check b/test/files/run/t6329_vanilla.check
new file mode 100644
index 0000000..8282afa
--- /dev/null
+++ b/test/files/run/t6329_vanilla.check
@@ -0,0 +1,2 @@
+scala.collection.immutable.List[Any]
+scala.collection.immutable.List
diff --git a/test/files/run/t6329_vanilla.scala b/test/files/run/t6329_vanilla.scala
new file mode 100644
index 0000000..a31cd5c
--- /dev/null
+++ b/test/files/run/t6329_vanilla.scala
@@ -0,0 +1,4 @@
+object Test extends App {
+ println(classManifest[List[_]])
+ println(scala.reflect.classTag[List[_]])
+}
\ No newline at end of file
diff --git a/test/files/run/t6329_vanilla_bug.check b/test/files/run/t6329_vanilla_bug.check
new file mode 100644
index 0000000..8282afa
--- /dev/null
+++ b/test/files/run/t6329_vanilla_bug.check
@@ -0,0 +1,2 @@
+scala.collection.immutable.List[Any]
+scala.collection.immutable.List
diff --git a/test/files/run/t6329_vanilla_bug.pending b/test/files/run/t6329_vanilla_bug.pending
new file mode 100644
index 0000000..404f90b
--- /dev/null
+++ b/test/files/run/t6329_vanilla_bug.pending
@@ -0,0 +1,7 @@
+import scala.reflect.runtime.universe._
+import scala.reflect.runtime._
+
+object Test extends App {
+ println(classManifest[List[_]])
+ println(scala.reflect.classTag[List[_]])
+}
\ No newline at end of file
diff --git a/test/files/run/t6331.check b/test/files/run/t6331.check
new file mode 100644
index 0000000..9bf3f78
--- /dev/null
+++ b/test/files/run/t6331.check
@@ -0,0 +1,23 @@
+ () == ()
+ true == true
+ true != false
+ false != true
+ 0.toByte == 0.toByte
+ 0.toByte != 1.toByte
+ 0.toShort == 0.toShort
+ 0.toShort != 1.toShort
+ 0 == 0
+ 0 != 1
+ 0L == 0L
+ 0L != 1L
+ 0.0f == 0.0f
+ 0.0f != -0.0f
+ -0.0f != 0.0f
+ NaNf == NaNf
+ 0.0d == 0.0d
+ 0.0d != -0.0d
+ -0.0d != 0.0d
+ NaNd == NaNd
+ 0 != 0.0d
+ 0 != 0L
+ 0.0d != 0.0f
diff --git a/test/files/run/t6331.scala b/test/files/run/t6331.scala
new file mode 100644
index 0000000..4e43a76
--- /dev/null
+++ b/test/files/run/t6331.scala
@@ -0,0 +1,71 @@
+import scala.tools.partest._
+import java.io._
+import scala.tools.nsc._
+import scala.tools.nsc.util.CommandLineParser
+import scala.tools.nsc.{Global, Settings, CompilerCommand}
+import scala.tools.nsc.reporters.ConsoleReporter
+
+// Test of Constant#equals, which must must account for floating point intricacies.
+object Test extends DirectTest {
+
+ override def code = ""
+
+ override def show() {
+ val global = newCompiler()
+ import global._
+
+ def check(c1: Any, c2: Any): Unit = {
+ val const1 = Constant(c1)
+ val const2 = Constant(c2)
+ val equal = const1 == const2
+ def show(a: Any) = "" + a + (a match {
+ case _: Byte => ".toByte"
+ case _: Short => ".toShort"
+ case _: Long => "L"
+ case _: Float => "f"
+ case _: Double => "d"
+ case _ => ""
+ })
+ val op = if (equal) "==" else "!="
+ println(f"${show(c1)}%12s $op ${show(c2)}")
+
+ val hash1 = const1.hashCode
+ val hash2 = const2.hashCode
+ val hashesEqual = hash1 == hash2
+ val hashBroken = equal && !hashesEqual
+ if (hashBroken) println(f"$hash1%12s != $hash2 // hash codes differ for equal objects!!")
+ }
+
+ check((), ())
+
+ check(true, true)
+ check(true, false)
+ check(false, true)
+
+ check(0.toByte, 0.toByte)
+ check(0.toByte, 1.toByte)
+
+ check(0.toShort, 0.toShort)
+ check(0.toShort, 1.toShort)
+
+ check(0, 0)
+ check(0, 1)
+
+ check(0L, 0L)
+ check(0L, 1L)
+
+ check(0f, 0f)
+ check(0f, -0f)
+ check(-0f, 0f)
+ check(Float.NaN, Float.NaN)
+
+ check(0d, 0d)
+ check(0d, -0d)
+ check(-0d, 0d)
+ check(Double.NaN, Double.NaN)
+
+ check(0, 0d)
+ check(0, 0L)
+ check(0d, 0f)
+ }
+}
diff --git a/test/files/run/t6331b.check b/test/files/run/t6331b.check
new file mode 100644
index 0000000..6ca09e3
--- /dev/null
+++ b/test/files/run/t6331b.check
@@ -0,0 +1,30 @@
+trace> if (Test.this.t)
+ -0.0
+else
+ 0.0
+res: Double = -0.0
+
+trace> if (Test.this.t)
+ 0.0
+else
+ -0.0
+res: Double = 0.0
+
+trace> Test.this.intercept.apply[Any](if (scala.this.Predef.???)
+ -0.0
+else
+ 0.0)
+res: Any = class scala.NotImplementedError
+
+trace> Test.this.intercept.apply[Any](if (scala.this.Predef.???)
+ 0.0
+else
+ 0.0)
+res: Any = class scala.NotImplementedError
+
+trace> Test.this.intercept.apply[Any](if (scala.this.Predef.???)
+ ()
+else
+ ())
+res: Any = class scala.NotImplementedError
+
diff --git a/test/files/run/t6331b.scala b/test/files/run/t6331b.scala
new file mode 100644
index 0000000..f966abe
--- /dev/null
+++ b/test/files/run/t6331b.scala
@@ -0,0 +1,20 @@
+import scala.tools.partest._
+import java.io._
+import scala.tools.nsc._
+import scala.tools.nsc.util.CommandLineParser
+import scala.tools.nsc.{Global, Settings, CompilerCommand}
+import scala.tools.nsc.reporters.ConsoleReporter
+
+import scala.tools.partest.trace
+import scala.util.control.Exception._
+
+
+object Test extends App {
+ def intercept = allCatch.withApply(_.getClass)
+ val t: Boolean = true
+ trace(if (t) -0d else 0d)
+ trace(if (t) 0d else -0d)
+ trace(intercept(if (???) -0d else 0d))
+ trace(intercept(if (???) 0d else 0d))
+ trace(intercept(if (???) () else ()))
+}
diff --git a/test/files/run/t6333.scala b/test/files/run/t6333.scala
new file mode 100644
index 0000000..266d95c
--- /dev/null
+++ b/test/files/run/t6333.scala
@@ -0,0 +1,29 @@
+object Test extends App {
+ import util.Try
+
+ val a = "apple"
+ def fail: String = throw new Exception("Fail!")
+ def argh: Try[String] = throw new Exception("Argh!")
+
+ // No throw tests
+ def tryMethods(expr: => String): Unit = {
+ Try(expr) orElse argh
+ Try(expr).transform(_ => argh, _ => argh)
+ Try(expr).recoverWith { case e if (a == fail) => Try(a) }
+ Try(expr).recoverWith { case _ => argh }
+ Try(expr).getOrElse(a)
+ // TODO - Fail getOrElse?
+ Try(expr) orElse argh
+ Try(expr) orElse Try(a)
+ Try(expr) map (_ => fail)
+ Try(expr) map (_ => a)
+ Try(expr) flatMap (_ => argh)
+ Try(expr) flatMap (_ => Try(a))
+ Try(expr) filter (_ => throw new Exception("O NOES"))
+ Try(expr) filter (_ => true)
+ Try(expr) recover { case _ => fail }
+ Try(expr).failed
+ }
+ tryMethods(a)
+ tryMethods(fail)
+}
diff --git a/test/files/run/t6337a.scala b/test/files/run/t6337a.scala
new file mode 100644
index 0000000..f5490f5
--- /dev/null
+++ b/test/files/run/t6337a.scala
@@ -0,0 +1,16 @@
+object Test {
+ def main(args: Array[String]) {
+ val x = X(XX(3))
+ assert(x.q.x.x + 9 == 13)
+ }
+}
+trait Q extends Any {
+ def x: Int
+ def inc: XX
+}
+case class X(val x: Q) extends AnyVal {
+ def q = X(x.inc)
+}
+case class XX(val x: Int) extends AnyVal with Q {
+ def inc = XX(x + 1)
+}
diff --git a/test/files/run/t6344.check b/test/files/run/t6344.check
new file mode 100644
index 0000000..8d9adac
--- /dev/null
+++ b/test/files/run/t6344.check
@@ -0,0 +1,132 @@
+C0
+public int C0.v1(int)
+public <A> int C0.v1(int)
+public int C0.v3()
+public <A> int C0.v3()
+public int C0.v4(int,scala.collection.immutable.List)
+public <A> int C0.v4(int,scala.collection.immutable.List<Val<A>>)
+public scala.collection.immutable.List C0.v2()
+public <A> scala.collection.immutable.List<Val<A>> C0.v2()
+
+C1
+public java.lang.Object C1.v1(java.lang.Object)
+public <A> java.lang.Object C1.v1(java.lang.Object)
+public java.lang.Object C1.v3()
+public <A> java.lang.Object C1.v3()
+public java.lang.Object C1.v4(java.lang.Object,scala.collection.immutable.List)
+public <A> java.lang.Object C1.v4(java.lang.Object,scala.collection.immutable.List<java.lang.Object>)
+public scala.collection.immutable.List C1.v2()
+public <A> scala.collection.immutable.List<java.lang.Object> C1.v2()
+
+C2
+public java.lang.String C2.v1(java.lang.String)
+public <A> java.lang.String C2.v1(java.lang.String)
+public java.lang.String C2.v3()
+public <A> java.lang.String C2.v3()
+public java.lang.String C2.v4(java.lang.String,scala.collection.immutable.List)
+public <A> java.lang.String C2.v4(java.lang.String,scala.collection.immutable.List<java.lang.String>)
+public scala.collection.immutable.List C2.v2()
+public <A> scala.collection.immutable.List<java.lang.String> C2.v2()
+
+C3
+public java.lang.Object C3.v1(java.lang.Object)
+public A C3.v1(A)
+public java.lang.Object C3.v3()
+public A C3.v3()
+public java.lang.Object C3.v4(java.lang.Object,scala.collection.immutable.List)
+public A C3.v4(A,scala.collection.immutable.List<A>)
+public java.lang.Object C3.x()
+public A C3.x()
+public scala.collection.immutable.List C3.v2()
+public scala.collection.immutable.List<A> C3.v2()
+
+C4
+public java.lang.Integer C4.v1(java.lang.Integer)
+public int C4.v1(int)
+public java.lang.Integer C4.v3()
+public int C4.v3()
+public java.lang.Integer C4.v4(java.lang.Integer,scala.collection.immutable.List)
+public int C4.v4(int,scala.collection.immutable.List<ValA<java.lang.Object>>)
+public scala.collection.immutable.List C4.v2()
+public scala.collection.immutable.List<ValA<java.lang.Object>> C4.v2()
+
+C4B
+public java.lang.String C4B.v1(java.lang.String)
+public java.lang.String C4B.v1(java.lang.String)
+public java.lang.String C4B.v3()
+public java.lang.String C4B.v3()
+public java.lang.String C4B.v4(java.lang.String,scala.collection.immutable.List)
+public java.lang.String C4B.v4(java.lang.String,scala.collection.immutable.List<java.lang.String>)
+public scala.collection.immutable.List C4B.v2()
+public scala.collection.immutable.List<java.lang.String> C4B.v2()
+
+C5
+public double C5.f2(int,java.lang.Object,java.lang.String,double)
+public double C5.f2(int,java.lang.Object,java.lang.String,double)
+public int C5.f3(java.lang.Integer)
+public int C5.f3(int)
+public int C5.f4(java.lang.Integer)
+public int C5.f4(int)
+public int C5.f5(java.lang.Integer)
+public int C5.f5(int)
+public java.lang.Object C5.f1(int,java.lang.Object,java.lang.String,java.lang.Object)
+public <A> A C5.f1(int,java.lang.Object,java.lang.String,A)
+
+C6
+public java.lang.Object C6.f1(int,java.lang.Object,java.lang.String,java.lang.Object)
+public A C6.f1(int,java.lang.Object,java.lang.String,A)
+
+C7
+public java.lang.Integer C7.f1(int,java.lang.Object,java.lang.String,java.lang.Integer)
+public int C7.f1(int,java.lang.Object,java.lang.String,int)
+public java.lang.Object C7.f1(int,java.lang.Object,java.lang.String,java.lang.Object)
+public java.lang.Object C7.f1(int,java.lang.Object,java.lang.String,java.lang.Object)
+
+Gen
+public abstract Gen Gen.plus(Gen,Gen)
+public abstract Gen<A> Gen.plus(Gen<A>,Gen<A>)
+public abstract java.lang.Object Gen.x()
+public abstract A Gen.x()
+
+ValueInt
+public Gen ValueInt.plus(Gen,Gen)
+public Gen<java.lang.Object> ValueInt.plus(Gen<java.lang.Object>,Gen<java.lang.Object>)
+public boolean ValueInt.equals(java.lang.Object)
+public boolean ValueInt.equals(java.lang.Object)
+public int ValueInt.hashCode()
+public int ValueInt.hashCode()
+public int ValueInt.iplus(int,int)
+public int ValueInt.iplus(int,int)
+public int ValueInt.x()
+public int ValueInt.x()
+public java.lang.Object ValueInt.x()
+public java.lang.Object ValueInt.x()
+public static Gen ValueInt.plus$extension(int,Gen,Gen)
+public static Gen<java.lang.Object> ValueInt.plus$extension(int,Gen<java.lang.Object>,Gen<java.lang.Object>)
+public static boolean ValueInt.equals$extension(int,java.lang.Object)
+public static boolean ValueInt.equals$extension(int,java.lang.Object)
+public static int ValueInt.hashCode$extension(int)
+public static int ValueInt.hashCode$extension(int)
+public static int ValueInt.iplus$extension(int,int,int)
+public static int ValueInt.iplus$extension(int,int,int)
+
+RefInt
+public Gen RefInt.plus(Gen,Gen)
+public Gen<java.lang.Object> RefInt.plus(Gen<java.lang.Object>,Gen<java.lang.Object>)
+public RefInt RefInt.rplus(RefInt,RefInt)
+public RefInt RefInt.rplus(RefInt,RefInt)
+public int RefInt.x()
+public int RefInt.x()
+public java.lang.Object RefInt.x()
+public java.lang.Object RefInt.x()
+
+RefInteger
+public Gen RefInteger.plus(Gen,Gen)
+public Gen<java.lang.Integer> RefInteger.plus(Gen<java.lang.Integer>,Gen<java.lang.Integer>)
+public RefInteger RefInteger.bplus(RefInteger,RefInteger)
+public RefInteger RefInteger.bplus(RefInteger,RefInteger)
+public java.lang.Integer RefInteger.x()
+public java.lang.Integer RefInteger.x()
+public java.lang.Object RefInteger.x()
+public java.lang.Object RefInteger.x()
+
diff --git a/test/files/run/t6344.scala b/test/files/run/t6344.scala
new file mode 100644
index 0000000..6f82e4b
--- /dev/null
+++ b/test/files/run/t6344.scala
@@ -0,0 +1,106 @@
+import scala.reflect.{ClassTag, classTag}
+import java.lang.Integer
+
+trait Gen[A] extends Any {
+ def x: A
+ def plus(x1: Gen[A], x2: Gen[A]): Gen[A]
+}
+class ValueInt(val x: Int) extends AnyVal with Gen[Int] {
+ // Gen<java.lang.Object> ValueInt.extension$plus(int,Gen<java.lang.Object>,Gen<java.lang.Object>)
+ def plus(x1: Gen[Int], x2: Gen[Int]): Gen[Int] = new ValueInt(x + x1.x + x2.x)
+ // int ValueInt.extension$iplus(int,int,int)
+ def iplus(x1: ValueInt, x2: ValueInt): ValueInt = new ValueInt(x + x1.x + x2.x)
+}
+class RefInt(val x: Int) extends AnyRef with Gen[Int] {
+ def plus(x1: Gen[Int], x2: Gen[Int]): Gen[Int] = new RefInt(x + x1.x + x2.x)
+ def rplus(x1: RefInt, x2: RefInt): RefInt = new RefInt(x + x1.x + x2.x)
+}
+class RefInteger(val x: java.lang.Integer) extends AnyRef with Gen[Integer] {
+ def plus(x1: Gen[Integer], x2: Gen[Integer]): Gen[Integer] = new RefInteger(x + x1.x + x2.x)
+ def bplus(x1: RefInteger, x2: RefInteger): RefInteger = new RefInteger(x + x1.x + x2.x)
+}
+
+class Val[Q](val value: Int) extends AnyVal
+class ValAny[Q](val value: Any) extends AnyVal
+class ValStr[Q](val value: String) extends AnyVal
+class ValA[Q](val value: Q) extends AnyVal {
+ def f: Q = ???
+}
+class ValB[Q, Q0 <: Q](val value: Q) extends AnyVal {
+ def f: Q0 = ???
+}
+
+class C0 {
+ def v1[A](in: Val[A]) = in
+ def v2[A]: List[Val[A]] = Nil
+ def v3[A]: Val[A] = new Val[A](0)
+ def v4[A <: String](x: Val[A], ys: List[Val[A]]) = ys.head
+}
+class C1 {
+ def v1[A](in: ValAny[A]) = in
+ def v2[A]: List[ValAny[A]] = Nil
+ def v3[A]: ValAny[A] = new ValAny[A]("")
+ def v4[A <: String](x: ValAny[A], ys: List[ValAny[A]]) = ys.head
+}
+class C2 {
+ def v1[A](in: ValStr[A]) = in
+ def v2[A]: List[ValStr[A]] = Nil
+ def v3[A]: ValStr[A] = new ValStr[A]("")
+ def v4[A <: String](x: ValStr[A], ys: List[ValStr[A]]) = ys.head
+}
+class C3[A](val x: A) {
+ def v1(in: ValA[A]) = in
+ def v2: List[ValA[A]] = Nil
+ def v3: ValA[A] = new ValA[A](x)
+ def v4(x: ValA[A], ys: List[ValA[A]]) = ys.head
+}
+class C4 {
+ def v1(in: ValA[Int]) = in
+ def v2: List[ValA[Int]] = Nil
+ def v3: ValA[Int] = new ValA(1)
+ def v4(x: ValA[Int], ys: List[ValA[Int]]) = ys.head
+}
+class C4B {
+ def v1(in: ValA[String]) = in
+ def v2: List[ValA[String]] = Nil
+ def v3: ValA[String] = new ValA("")
+ def v4(x: ValA[String], ys: List[ValA[String]]) = ys.head
+}
+class C5 {
+ def f1[A](x1: Val[A], x2: ValAny[A], x3: ValStr[A], x4: ValA[A]) = x4
+ def f2(x1: Int, x2: Any, x3: String, x4: Double) = x4
+ def f3(x: ValA[Int]) = x.f
+ def f4(x: ValB[Int, Int]) = x.f
+ def f5(x: ValB[Int, _ <: Int]) = x.f
+}
+class C6[A] {
+ def f1(x1: Val[A], x2: ValAny[A], x3: ValStr[A], x4: ValA[A]) = x4
+}
+class C7 extends C6[Int] {
+ override def f1(x1: Val[Int], x2: ValAny[Int], x3: ValStr[Int], x4: ValA[Int]) =
+ super.f1(x1, x2, x3, x4)
+}
+
+object Test {
+ def show[A: ClassTag] = {
+ println(classTag[A].runtimeClass.getName)
+ classTag[A].runtimeClass.getDeclaredMethods.toList.sortBy(_.toString).flatMap(m => List(m.toString, m.toGenericString)) foreach println
+ println("")
+ }
+
+ def main(args: Array[String]): Unit = {
+ show[C0]
+ show[C1]
+ show[C2]
+ show[C3[_]]
+ show[C4]
+ show[C4B]
+ show[C5]
+ show[C6[_]]
+ show[C7]
+ show[Gen[_]]
+ show[ValueInt]
+ show[RefInt]
+ show[RefInteger]
+ }
+}
diff --git a/test/files/run/t6353.check b/test/files/run/t6353.check
new file mode 100644
index 0000000..5676bed
--- /dev/null
+++ b/test/files/run/t6353.check
@@ -0,0 +1 @@
+applyDynamic(apply)(9)
diff --git a/test/files/run/t6353.scala b/test/files/run/t6353.scala
new file mode 100644
index 0000000..112241a
--- /dev/null
+++ b/test/files/run/t6353.scala
@@ -0,0 +1,12 @@
+import language.dynamics
+
+object Test extends App {
+ val x = new X(3)
+ val y = x(9)
+ class X(i: Int) extends Dynamic {
+ def applyDynamic(name: String)(in: Int): Int = {
+ println(s"applyDynamic($name)($in)")
+ i + in
+ }
+ }
+}
diff --git a/test/files/run/t6370.scala b/test/files/run/t6370.scala
new file mode 100644
index 0000000..c86b87d
--- /dev/null
+++ b/test/files/run/t6370.scala
@@ -0,0 +1,12 @@
+object Test {
+
+ def main(args: Array[String]): Unit = {
+ val m = collection.immutable.ListMap( "x" -> 1 )
+ try {
+ m("y")
+ } catch {
+ case e : NoSuchElementException => assert(e.getMessage() == "key not found: y")
+ }
+
+ }
+}
diff --git a/test/files/run/t6380.check b/test/files/run/t6380.check
new file mode 100644
index 0000000..912525e
--- /dev/null
+++ b/test/files/run/t6380.check
@@ -0,0 +1,7 @@
+List(class java.lang.Exception)
+List(class java.lang.Throwable)
+List(class java.lang.RuntimeException)
+List(class java.lang.IllegalArgumentException, class java.util.NoSuchElementException)
+List(class java.lang.IndexOutOfBoundsException, class java.lang.IndexOutOfBoundsException)
+List(class java.lang.IllegalStateException, class java.lang.IllegalStateException)
+List(class java.lang.NullPointerException, class java.lang.NullPointerException)
diff --git a/test/files/run/t6380.scala b/test/files/run/t6380.scala
new file mode 100644
index 0000000..0e264d9
--- /dev/null
+++ b/test/files/run/t6380.scala
@@ -0,0 +1,20 @@
+object Test extends App {
+ classOf[Foo].getDeclaredMethods().sortBy(_.getName).map(_.getExceptionTypes.sortBy(_.getName).toList).toList.foreach(println)
+}
+
+class Foo {
+ @throws[Exception]
+ def bar1 = ???
+ @throws[Throwable]("always")
+ def bar2 = ???
+ @throws(classOf[RuntimeException])
+ def bar3 = ???
+ @throws[IllegalArgumentException] @throws[NoSuchElementException]
+ def bar4 = ???
+ @throws(classOf[IndexOutOfBoundsException]) @throws(classOf[IndexOutOfBoundsException])
+ def bar5 = ???
+ @throws[IllegalStateException]("Cause") @throws[IllegalStateException]
+ def bar6 = ???
+ @throws[NullPointerException]("Cause A") @throws[NullPointerException]("Cause B")
+ def bar7 = ???
+}
\ No newline at end of file
diff --git a/test/files/run/t6392a.check b/test/files/run/t6392a.check
new file mode 100644
index 0000000..6a452c1
--- /dev/null
+++ b/test/files/run/t6392a.check
@@ -0,0 +1 @@
+()
diff --git a/test/files/run/t6392a.scala b/test/files/run/t6392a.scala
new file mode 100644
index 0000000..3a4f9fd
--- /dev/null
+++ b/test/files/run/t6392a.scala
@@ -0,0 +1,9 @@
+import scala.reflect.runtime.universe._
+import scala.reflect.runtime.{currentMirror => cm}
+import scala.tools.reflect.ToolBox
+
+object Test extends App {
+ val tb = cm.mkToolBox()
+ val c = tb.parse("object C")
+ println(tb.eval(c))
+}
\ No newline at end of file
diff --git a/test/files/run/t6392b.check b/test/files/run/t6392b.check
new file mode 100644
index 0000000..b7872f0
--- /dev/null
+++ b/test/files/run/t6392b.check
@@ -0,0 +1 @@
+ModuleDef(Modifiers(), newTermName("C")#MOD, Template(List(Select(Ident(scala#PK), newTypeName("AnyRef")#TPE)), emptyValDef, List(DefDef(Modifiers(), nme.CONSTRUCTOR#PCTOR, List(), List(List()), TypeTree(), Block(List(Apply(Select(Super(This(newTypeName("C")), tpnme.EMPTY), nme.CONSTRUCTOR#PCTOR), List())), Literal(Constant(())))))))
diff --git a/test/files/run/t6392b.scala b/test/files/run/t6392b.scala
new file mode 100644
index 0000000..f69a5aa
--- /dev/null
+++ b/test/files/run/t6392b.scala
@@ -0,0 +1,9 @@
+import scala.reflect.runtime.universe._
+import scala.reflect.runtime.{currentMirror => cm}
+import scala.tools.reflect.ToolBox
+
+object Test extends App {
+ val tb = cm.mkToolBox()
+ val c = tb.parse("object C")
+ println(showRaw(tb.typeCheck(c), printKinds = true))
+}
\ No newline at end of file
diff --git a/test/files/run/t6394a.check b/test/files/run/t6394a.check
new file mode 100644
index 0000000..2a02d41
--- /dev/null
+++ b/test/files/run/t6394a.check
@@ -0,0 +1 @@
+TEST
diff --git a/test/files/run/t6394a.flags b/test/files/run/t6394a.flags
new file mode 100644
index 0000000..cd66464
--- /dev/null
+++ b/test/files/run/t6394a.flags
@@ -0,0 +1 @@
+-language:experimental.macros
\ No newline at end of file
diff --git a/test/files/run/t6394a/Macros_1.scala b/test/files/run/t6394a/Macros_1.scala
new file mode 100644
index 0000000..3d39d3e
--- /dev/null
+++ b/test/files/run/t6394a/Macros_1.scala
@@ -0,0 +1,12 @@
+import scala.reflect.macros.Context
+
+object Macros {
+ def impl(c:Context): c.Expr[Any] = {
+ import c.universe._
+
+ val selfTree = This(c.enclosingClass.symbol.asModule.moduleClass)
+ c.Expr[AnyRef](selfTree)
+ }
+
+ def foo: Any = macro impl
+}
\ No newline at end of file
diff --git a/test/files/run/t6394a/Test_2.scala b/test/files/run/t6394a/Test_2.scala
new file mode 100644
index 0000000..75e84f0
--- /dev/null
+++ b/test/files/run/t6394a/Test_2.scala
@@ -0,0 +1,4 @@
+object Test extends App {
+ println(Macros.foo)
+ override def toString = "TEST"
+}
\ No newline at end of file
diff --git a/test/files/run/t6394b.check b/test/files/run/t6394b.check
new file mode 100644
index 0000000..2a02d41
--- /dev/null
+++ b/test/files/run/t6394b.check
@@ -0,0 +1 @@
+TEST
diff --git a/test/files/run/t6394b.flags b/test/files/run/t6394b.flags
new file mode 100644
index 0000000..cd66464
--- /dev/null
+++ b/test/files/run/t6394b.flags
@@ -0,0 +1 @@
+-language:experimental.macros
\ No newline at end of file
diff --git a/test/files/run/t6394b/Macros_1.scala b/test/files/run/t6394b/Macros_1.scala
new file mode 100644
index 0000000..5d93e1c
--- /dev/null
+++ b/test/files/run/t6394b/Macros_1.scala
@@ -0,0 +1,12 @@
+import scala.reflect.macros.Context
+
+object Macros {
+ def impl(c:Context): c.Expr[Any] = {
+ import c.universe._
+
+ val selfTree = This(tpnme.EMPTY)
+ c.Expr[AnyRef](selfTree)
+ }
+
+ def foo: Any = macro impl
+}
\ No newline at end of file
diff --git a/test/files/run/t6394b/Test_2.scala b/test/files/run/t6394b/Test_2.scala
new file mode 100644
index 0000000..75e84f0
--- /dev/null
+++ b/test/files/run/t6394b/Test_2.scala
@@ -0,0 +1,4 @@
+object Test extends App {
+ println(Macros.foo)
+ override def toString = "TEST"
+}
\ No newline at end of file
diff --git a/test/files/run/t6410.check b/test/files/run/t6410.check
new file mode 100644
index 0000000..051fe49
--- /dev/null
+++ b/test/files/run/t6410.check
@@ -0,0 +1,2 @@
+ParMap(0 -> 4, 1 -> 5)
+ParMap(0 -> 4, 1 -> 5)
\ No newline at end of file
diff --git a/test/files/run/t6410.scala b/test/files/run/t6410.scala
new file mode 100644
index 0000000..2a001b4
--- /dev/null
+++ b/test/files/run/t6410.scala
@@ -0,0 +1,9 @@
+
+
+
+object Test extends App {
+ val x = collection.parallel.mutable.ParArray.range(1,10) groupBy { _ % 2 } mapValues { _.size }
+ println(x)
+ val y = collection.parallel.immutable.ParVector.range(1,10) groupBy { _ % 2 } mapValues { _.size }
+ println(y)
+}
\ No newline at end of file
diff --git a/test/files/run/t6434.check b/test/files/run/t6434.check
new file mode 100644
index 0000000..f898b6b
--- /dev/null
+++ b/test/files/run/t6434.check
@@ -0,0 +1,10 @@
+Type in expressions to have them evaluated.
+Type :help for more information.
+
+scala> def f(x: => Int): Int = x
+f: (x: => Int)Int
+
+scala> f _
+res0: (=> Int) => Int = <function1>
+
+scala>
diff --git a/test/files/run/t6434.scala b/test/files/run/t6434.scala
new file mode 100644
index 0000000..e4a4579
--- /dev/null
+++ b/test/files/run/t6434.scala
@@ -0,0 +1,8 @@
+import scala.tools.partest.ReplTest
+
+object Test extends ReplTest {
+ def code =
+"""def f(x: => Int): Int = x
+f _
+"""
+}
diff --git a/test/files/run/t6439.check b/test/files/run/t6439.check
new file mode 100644
index 0000000..178ea73
--- /dev/null
+++ b/test/files/run/t6439.check
@@ -0,0 +1,66 @@
+Type in expressions to have them evaluated.
+Type :help for more information.
+
+scala>
+
+scala> class A
+defined class A
+
+scala> object A // warn
+defined module A
+warning: previously defined class A is not a companion to object A.
+Companions must be defined together; you may wish to use :paste mode for this.
+
+scala> trait B
+defined trait B
+
+scala> object B // warn
+defined module B
+warning: previously defined trait B is not a companion to object B.
+Companions must be defined together; you may wish to use :paste mode for this.
+
+scala> object C
+defined module C
+
+scala> object Bippy
+defined module Bippy
+
+scala> class C // warn
+defined class C
+warning: previously defined object C is not a companion to class C.
+Companions must be defined together; you may wish to use :paste mode for this.
+
+scala> class D
+defined class D
+
+scala> def D = 0 // no warn
+D: Int
+
+scala> val D = 0 // no warn
+D: Int = 0
+
+scala> object E
+defined module E
+
+scala> var E = 0 // no warn
+E: Int = 0
+
+scala> object F
+defined module F
+
+scala> type F = Int // no warn
+defined type alias F
+
+scala> :power
+** Power User mode enabled - BEEP WHIR GYVE **
+** :phase has been set to 'typer'. **
+** scala.tools.nsc._ has been imported **
+** global._, definitions._ also imported **
+** Try :help, :vals, power.<tab> **
+
+scala> intp("F") // this now works as a result of changing .typeSymbol to .typeSymbolDirect in IMain#Request#definedSymbols
+res0: $r.intp.global.Symbol = type F
+
+scala>
+
+scala>
diff --git a/test/files/run/t6439.scala b/test/files/run/t6439.scala
new file mode 100644
index 0000000..70a2dba
--- /dev/null
+++ b/test/files/run/t6439.scala
@@ -0,0 +1,22 @@
+import scala.tools.partest.ReplTest
+
+object Test extends ReplTest {
+ def code = """
+class A
+object A // warn
+trait B
+object B // warn
+object C
+object Bippy
+class C // warn
+class D
+def D = 0 // no warn
+val D = 0 // no warn
+object E
+var E = 0 // no warn
+object F
+type F = Int // no warn
+:power
+intp("F") // this now works as a result of changing .typeSymbol to .typeSymbolDirect in IMain#Request#definedSymbols
+ """
+}
diff --git a/test/files/run/t6440.check b/test/files/run/t6440.check
new file mode 100644
index 0000000..806279f
--- /dev/null
+++ b/test/files/run/t6440.check
@@ -0,0 +1,4 @@
+pos: source-newSource1.scala,line-9,offset=109 bad symbolic reference. A signature in U.class refers to term pack1
+in package <root> which is not available.
+It may be completely missing from the current classpath, or the version on
+the classpath might be incompatible with the version used when compiling U.class. ERROR
diff --git a/test/files/run/t6440.scala b/test/files/run/t6440.scala
new file mode 100644
index 0000000..5a3a415
--- /dev/null
+++ b/test/files/run/t6440.scala
@@ -0,0 +1,48 @@
+import scala.tools.partest._
+import java.io.File
+
+object Test extends StoreReporterDirectTest {
+ def code = ???
+
+ def compileCode(code: String) = {
+ val classpath = List(sys.props("partest.lib"), testOutput.path) mkString sys.props("path.separator")
+ compileString(newCompiler("-cp", classpath, "-d", testOutput.path))(code)
+ }
+
+ def library1 = """
+ package pack1
+ trait T
+ """
+
+ def library2 = """
+ package pack2
+ trait U extends pack1.T
+ """
+
+ def app = """
+ package pack3
+ object X {
+ trait U
+ }
+ import X._
+ import pack2._
+
+ trait V extends U
+ """
+
+ def show(): Unit = {
+ Seq(library1, library2) foreach compileCode
+ assert(filteredInfos.isEmpty, filteredInfos)
+
+ // blow away the entire package
+ val pack1 = new File(testOutput.path, "pack1")
+ val tClass = new File(pack1, "T.class")
+ assert(tClass.exists)
+ assert(tClass.delete())
+ assert(pack1.delete())
+
+ // bad symbolic reference error expected (but no stack trace!)
+ compileCode(app)
+ println(filteredInfos.mkString("\n"))
+ }
+}
diff --git a/test/files/run/t6440b.check b/test/files/run/t6440b.check
new file mode 100644
index 0000000..9771ce5
--- /dev/null
+++ b/test/files/run/t6440b.check
@@ -0,0 +1,4 @@
+pos: NoPosition bad symbolic reference. A signature in U.class refers to type T
+in package pack1 which is not available.
+It may be completely missing from the current classpath, or the version on
+the classpath might be incompatible with the version used when compiling U.class. ERROR
diff --git a/test/files/run/t6440b.scala b/test/files/run/t6440b.scala
new file mode 100644
index 0000000..974aca2
--- /dev/null
+++ b/test/files/run/t6440b.scala
@@ -0,0 +1,61 @@
+import scala.tools.partest._
+import java.io.File
+
+object Test extends StoreReporterDirectTest {
+ def code = ???
+
+ def compileCode(code: String) = {
+ val classpath = List(sys.props("partest.lib"), testOutput.path) mkString sys.props("path.separator")
+ compileString(newCompiler("-cp", classpath, "-d", testOutput.path))(code)
+ }
+
+ def library1 = """
+ package pack1
+ trait T
+ class U {
+ def t = new T {}
+ def one = 1
+ }
+ """
+
+ def library2 = """
+ package pack2
+ object V {
+ def u = new pack1.U
+ }
+ """
+
+ def app1 = """
+ package pack3
+ object Test {
+ pack2.V.u.one // okay
+ }
+ """
+
+ def app2 = """
+ package pack3
+ object Test {
+ pack2.V.u.t // we have to fail if T.class is misisng
+ }
+ """
+
+ def show(): Unit = {
+ compileCode(library1)
+ val pack1 = new File(testOutput.path, "pack1")
+ val tClass = new File(pack1, "T.class")
+ assert(tClass.exists)
+ assert(tClass.delete())
+
+ // allowed to compile, no direct reference to `T`
+ compileCode(library2)
+ assert(filteredInfos.isEmpty, filteredInfos)
+
+ // allowed to compile, no direct reference to `T`
+ compileCode(app1)
+ assert(filteredInfos.isEmpty, filteredInfos)
+
+ // bad symbolic reference error expected (but no stack trace!)
+ compileCode(app2)
+ println(filteredInfos.mkString("\n"))
+ }
+}
diff --git a/test/files/run/t6443-by-name.check b/test/files/run/t6443-by-name.check
new file mode 100644
index 0000000..6f98fa4
--- /dev/null
+++ b/test/files/run/t6443-by-name.check
@@ -0,0 +1,3 @@
+1
+foo
+foo
diff --git a/test/files/run/t6443-by-name.scala b/test/files/run/t6443-by-name.scala
new file mode 100644
index 0000000..bfd9bf9
--- /dev/null
+++ b/test/files/run/t6443-by-name.scala
@@ -0,0 +1,18 @@
+object Test {
+
+ def main(args: Array[String]) {
+ def foo = {println("foo"); 0}
+ lazyDep(X)(foo)
+ }
+
+ trait T {
+ type U
+ }
+ object X extends T { type U = Int }
+
+ def lazyDep(t: T)(u: => t.U) {
+ println("1")
+ u
+ u
+ }
+}
diff --git a/test/files/run/bug4570.check b/test/files/run/t6443-varargs.check
similarity index 100%
rename from test/files/run/bug4570.check
rename to test/files/run/t6443-varargs.check
diff --git a/test/files/run/t6443-varargs.scala b/test/files/run/t6443-varargs.scala
new file mode 100644
index 0000000..9cbae3e
--- /dev/null
+++ b/test/files/run/t6443-varargs.scala
@@ -0,0 +1,16 @@
+object Test {
+
+ def main(args: Array[String]) {
+ def foo = {println("foo"); 0}
+ lazyDep(X)(foo)
+ }
+
+ trait T {
+ type U
+ }
+ object X extends T { type U = Int }
+
+ def lazyDep(t: T)(us: t.U*) {
+ List(us: _*)
+ }
+}
diff --git a/test/files/run/t6443.scala b/test/files/run/t6443.scala
new file mode 100644
index 0000000..67fe2ca
--- /dev/null
+++ b/test/files/run/t6443.scala
@@ -0,0 +1,15 @@
+class Base
+class Derived extends Base
+
+trait A {
+ def foo(d: String)(d2: d.type): Base
+ val s = ""
+ def bar: Unit = foo(s)(s)
+}
+object B extends A {
+ def foo(d: String)(d2: d.type): D forSome { type D <: S; type S <: Derived } = {d2.isEmpty; null} // Bridge method required here!
+}
+
+object Test extends App {
+ B.bar
+}
diff --git a/test/files/run/t6443b.scala b/test/files/run/t6443b.scala
new file mode 100644
index 0000000..9320b1d
--- /dev/null
+++ b/test/files/run/t6443b.scala
@@ -0,0 +1,16 @@
+trait A {
+ type D >: Null <: C
+ def foo(d: D)(d2: d.type): Unit
+ trait C {
+ def bar: Unit = foo(null)(null)
+ }
+}
+object B extends A {
+ class D extends C
+
+ def foo(d: D)(d2: d.type): Unit = () // Bridge method required here!
+}
+
+object Test extends App {
+ new B.D().bar
+}
diff --git a/test/files/run/t6481.check b/test/files/run/t6481.check
new file mode 100644
index 0000000..7ec2963
--- /dev/null
+++ b/test/files/run/t6481.check
@@ -0,0 +1,4 @@
+delayed init
+new foo(1, 2)
+delayed init
+new foo(b = 2, a = 1)
diff --git a/test/files/run/t6481.scala b/test/files/run/t6481.scala
new file mode 100644
index 0000000..125da3b
--- /dev/null
+++ b/test/files/run/t6481.scala
@@ -0,0 +1,13 @@
+abstract class foo(a: Int, b: Int) extends scala.DelayedInit {
+ def delayedInit(x: => Unit) {
+ println("delayed init");
+ x
+ }
+}
+
+object Test {
+ def main(args: Array[String]) {
+ new foo(1, 2) { println("new foo(1, 2)") }
+ new foo(b = 2, a = 1) { println("new foo(b = 2, a = 1)") }
+ }
+}
diff --git a/test/files/run/t6488.check b/test/files/run/t6488.check
new file mode 100644
index 0000000..3582111
--- /dev/null
+++ b/test/files/run/t6488.check
@@ -0,0 +1 @@
+Success
diff --git a/test/files/run/t6488.scala b/test/files/run/t6488.scala
new file mode 100644
index 0000000..487614e
--- /dev/null
+++ b/test/files/run/t6488.scala
@@ -0,0 +1,11 @@
+import sys.process._
+object Test {
+ // Program that prints "Success" if the command was successfully run then destroyed
+ // It will silently pass if the command "/bin/ls" does not exist
+ // It will fail due to the uncatchable exception in t6488 race condition
+ def main(args: Array[String]) {
+ try Process("/bin/ls").run(ProcessLogger { _ => () }).destroy
+ catch { case _ => () }
+ println("Success")
+ }
+}
diff --git a/test/files/run/t6500.scala b/test/files/run/t6500.scala
new file mode 100644
index 0000000..03a68a3
--- /dev/null
+++ b/test/files/run/t6500.scala
@@ -0,0 +1,13 @@
+object Test extends App {
+ class Box(val value: Int) extends AnyVal
+
+ trait Foo {
+ def append(box: Box): Foo
+ }
+
+ class Bar extends Foo {
+ override def append(box: Box): Bar = this // produces bad forwarder
+ }
+
+ ((new Bar): Foo).append(new Box(0))
+}
diff --git a/test/files/run/t6506.scala b/test/files/run/t6506.scala
new file mode 100644
index 0000000..04d77c3
--- /dev/null
+++ b/test/files/run/t6506.scala
@@ -0,0 +1,8 @@
+object Test {
+ def main(args: Array[String]) {
+ new WL(new {} #:: S) with T
+ }
+ object S { def #::(a: Any): Any = () }
+ trait T
+ class WL(a: Any)
+}
diff --git a/test/files/run/t6534.scala b/test/files/run/t6534.scala
new file mode 100644
index 0000000..33df97e
--- /dev/null
+++ b/test/files/run/t6534.scala
@@ -0,0 +1,14 @@
+trait Foo extends Any { override def equals(x: Any) = false }
+trait Ding extends Any { override def hashCode = -1 }
+
+class Bippy1(val x: Int) extends AnyVal with Foo { } // warn
+class Bippy2(val x: Int) extends AnyVal with Ding { } // warn
+
+object Test {
+ def main(args: Array[String]): Unit = {
+ val b1 = new Bippy1(71)
+ val b2 = new Bippy2(71)
+ assert(b1 == b1 && b1.## == b1.x.##, ((b1, b1.##)))
+ assert(b2 == b2 && b2.## == b2.x.##, ((b2, b2.##)))
+ }
+}
diff --git a/test/files/pos/bug3252.flags b/test/files/run/t6546.flags
similarity index 100%
rename from test/files/pos/bug3252.flags
rename to test/files/run/t6546.flags
diff --git a/test/files/run/t6546/A_1.scala b/test/files/run/t6546/A_1.scala
new file mode 100644
index 0000000..bd086c0
--- /dev/null
+++ b/test/files/run/t6546/A_1.scala
@@ -0,0 +1,6 @@
+final class Opt {
+ @inline def getOrElse(x: => String): String = ""
+}
+class A_1 {
+ def f(x: Opt): String = x getOrElse null
+}
diff --git a/test/files/run/t6546/B_2.scala b/test/files/run/t6546/B_2.scala
new file mode 100644
index 0000000..64ec966
--- /dev/null
+++ b/test/files/run/t6546/B_2.scala
@@ -0,0 +1,8 @@
+import scala.tools.partest.BytecodeTest
+
+object Test extends BytecodeTest {
+ def show: Unit = {
+ val node = loadClassNode("A_1")
+ assert(node.innerClasses.isEmpty, node.innerClasses)
+ }
+}
diff --git a/test/files/run/t6549.check b/test/files/run/t6549.check
new file mode 100644
index 0000000..bc78aac
--- /dev/null
+++ b/test/files/run/t6549.check
@@ -0,0 +1,32 @@
+Type in expressions to have them evaluated.
+Type :help for more information.
+
+scala>
+
+scala> case class `X"`(var xxx: Any)
+defined class X$u0022
+
+scala> val m = Map(("": Any) -> `X"`("\""), ('s: Any) -> `X"`("\""))
+m: scala.collection.immutable.Map[Any,X"] = Map("" -> X"("), 's -> X"("))
+
+scala> m("")
+res0: X" = X"(")
+
+scala> m("").xxx
+res1: Any = "
+
+scala> m("").xxx = 0
+m("").xxx: Any = 0
+
+scala> m("").xxx = "\""
+m("").xxx: Any = "
+
+scala> m('s).xxx = 's
+m(scala.Symbol("s")).xxx: Any = 's
+
+scala> val `"` = 0
+": Int = 0
+
+scala>
+
+scala>
diff --git a/test/files/run/t6549.scala b/test/files/run/t6549.scala
new file mode 100644
index 0000000..7335661
--- /dev/null
+++ b/test/files/run/t6549.scala
@@ -0,0 +1,22 @@
+import scala.tools.partest.ReplTest
+
+// Check that the fragments of code generated in
+// in the REPL correctly escape values added to
+// literal strings.
+//
+// Before, we saw:
+// scala> m("").x = 77
+// <console>:10: error: ')' expected but string literal found.
+// + "m("").x: Int = " + `$ires8` + "\n"
+object Test extends ReplTest {
+ def code = """
+ |case class `X"`(var xxx: Any)
+ |val m = Map(("": Any) -> `X"`("\""), ('s: Any) -> `X"`("\""))
+ |m("")
+ |m("").xxx
+ |m("").xxx = 0
+ |m("").xxx = "\""
+ |m('s).xxx = 's
+ |val `"` = 0
+ """.stripMargin
+}
diff --git a/test/files/run/t6555.check b/test/files/run/t6555.check
new file mode 100644
index 0000000..a18a8e8
--- /dev/null
+++ b/test/files/run/t6555.check
@@ -0,0 +1,22 @@
+[[syntax trees at end of specialize]] // newSource1.scala
+package <empty> {
+ class Foo extends Object {
+ def <init>(): Foo = {
+ Foo.super.<init>();
+ ()
+ };
+ private[this] val f: Int => Int = {
+ @SerialVersionUID(0) final <synthetic> class $anonfun extends scala.runtime.AbstractFunction1$mcII$sp with Serializable {
+ def <init>(): anonymous class $anonfun = {
+ $anonfun.super.<init>();
+ ()
+ };
+ final def apply(param: Int): Int = $anonfun.this.apply$mcII$sp(param);
+ <specialized> def apply$mcII$sp(param: Int): Int = param
+ };
+ (new anonymous class $anonfun(): Int => Int)
+ };
+ <stable> <accessor> def f(): Int => Int = Foo.this.f
+ }
+}
+
diff --git a/test/files/run/t6555.scala b/test/files/run/t6555.scala
new file mode 100644
index 0000000..b1a6137
--- /dev/null
+++ b/test/files/run/t6555.scala
@@ -0,0 +1,15 @@
+import scala.tools.partest._
+import java.io.{Console => _, _}
+
+object Test extends DirectTest {
+
+ override def extraSettings: String = "-usejavacp -Xprint:specialize -d " + testOutput.path
+
+ override def code = "class Foo { val f = (param: Int) => param } "
+
+ override def show(): Unit = {
+ Console.withErr(System.out) {
+ compile()
+ }
+ }
+}
diff --git a/test/files/run/t6559.scala b/test/files/run/t6559.scala
new file mode 100644
index 0000000..5c671f7
--- /dev/null
+++ b/test/files/run/t6559.scala
@@ -0,0 +1,17 @@
+
+object Test {
+
+ def main(args: Array[String]) = {
+ val one = "1"
+ val two = "2"
+
+ val raw = raw"\n$one\n$two\n"
+ val escaped = s"\n$one\n$two\n"
+ val buggy = "\\n1\n2\n"
+ val correct = "\\n1\\n2\\n"
+
+ assert(raw != escaped, "Raw strings should not be escaped.")
+ assert(raw != buggy, "Raw strings after variables should not be escaped.")
+ assert(raw == correct, "Raw strings should stay raw.")
+ }
+}
diff --git a/test/files/run/bug657.check b/test/files/run/t657.check
similarity index 100%
rename from test/files/run/bug657.check
rename to test/files/run/t657.check
diff --git a/test/files/run/bug657.scala b/test/files/run/t657.scala
similarity index 100%
rename from test/files/run/bug657.scala
rename to test/files/run/t657.scala
diff --git a/test/files/run/t6572/bar_1.scala b/test/files/run/t6572/bar_1.scala
new file mode 100644
index 0000000..5518ced
--- /dev/null
+++ b/test/files/run/t6572/bar_1.scala
@@ -0,0 +1,19 @@
+package bar
+
+abstract class IntBase[V] extends Base[Int, V]
+
+class DefaultIntBase[V <: IntProvider] extends IntBase[V] {
+ override protected def hashCode(key: Int) = key
+}
+
+trait IntProvider {
+ def int: Int
+}
+
+abstract class Base[@specialized K, V] {
+
+ protected def hashCode(key: K) = key.hashCode
+
+ def get(key: K): V = throw new RuntimeException
+
+}
\ No newline at end of file
diff --git a/test/files/run/t6572/foo_2.scala b/test/files/run/t6572/foo_2.scala
new file mode 100644
index 0000000..465f0b7
--- /dev/null
+++ b/test/files/run/t6572/foo_2.scala
@@ -0,0 +1,17 @@
+//package foo
+
+import bar._
+
+class FooProvider extends IntProvider {
+ def int = 3
+}
+
+class Wrapper(users: DefaultIntBase[FooProvider]) {
+ final def user(userId: Int) = users.get(userId)
+}
+
+object Test {
+ def main(args: Array[String]) {
+ new Wrapper(new DefaultIntBase)
+ }
+}
\ No newline at end of file
diff --git a/test/files/run/t6584.check b/test/files/run/t6584.check
new file mode 100644
index 0000000..35c8688
--- /dev/null
+++ b/test/files/run/t6584.check
@@ -0,0 +1,8 @@
+Array: 102400
+Vector: 102400
+List: 102400
+Stream: 102400
+Array: 102400
+Vector: 102400
+List: 102400
+Stream: 102400
diff --git a/test/files/run/t6584.scala b/test/files/run/t6584.scala
new file mode 100644
index 0000000..24c236e
--- /dev/null
+++ b/test/files/run/t6584.scala
@@ -0,0 +1,16 @@
+object Test {
+ def main(args: Array[String]): Unit = {
+ val size = 100 * 1024
+ val doubled = (1 to size) ++ (1 to size)
+
+ println("Array: " + Array.tabulate(size)(x => x).distinct.size)
+ println("Vector: " + Vector.tabulate(size)(x => x).distinct.size)
+ println("List: " + List.tabulate(size)(x => x).distinct.size)
+ println("Stream: " + Stream.tabulate(size)(x => x).distinct.size)
+
+ println("Array: " + doubled.toArray.distinct.size)
+ println("Vector: " + doubled.toVector.distinct.size)
+ println("List: " + doubled.toList.distinct.size)
+ println("Stream: " + doubled.toStream.distinct.size)
+ }
+}
diff --git a/test/files/run/t6591_1.check b/test/files/run/t6591_1.check
new file mode 100644
index 0000000..b6cb6c2
--- /dev/null
+++ b/test/files/run/t6591_1.check
@@ -0,0 +1 @@
+Block(List(ValDef(Modifiers(), newTermName("v"), Select(Ident(A), newTypeName("I")), Select(Ident(A), newTermName("impl")))), Ident(newTermName("v")))
diff --git a/test/files/run/t6591_1.scala b/test/files/run/t6591_1.scala
new file mode 100644
index 0000000..6dd9a1d
--- /dev/null
+++ b/test/files/run/t6591_1.scala
@@ -0,0 +1,19 @@
+import scala.reflect.runtime.universe._
+import scala.tools.reflect.ToolBox
+import scala.tools.reflect.Eval
+
+trait O { trait I }
+
+object A extends O {
+ val impl = new I {}
+}
+
+object Test extends App {
+ val code = reify {
+ val v: A.I = A.impl
+ v
+ }
+ println(showRaw(code.tree))
+
+ val v: A.I = code.eval
+}
diff --git a/test/files/run/t6591_2.check b/test/files/run/t6591_2.check
new file mode 100644
index 0000000..b2d5797
--- /dev/null
+++ b/test/files/run/t6591_2.check
@@ -0,0 +1 @@
+Block(List(ValDef(Modifiers(), newTermName("v"), SelectFromTypeTree(Ident(A), newTypeName("I")), Select(Apply(Select(New(Ident(A)), nme.CONSTRUCTOR), List()), newTermName("impl")))), Ident(newTermName("v")))
diff --git a/test/files/run/t6591_2.scala b/test/files/run/t6591_2.scala
new file mode 100644
index 0000000..6214308
--- /dev/null
+++ b/test/files/run/t6591_2.scala
@@ -0,0 +1,19 @@
+import scala.reflect.runtime.universe._
+import scala.tools.reflect.ToolBox
+import scala.tools.reflect.Eval
+
+trait O { trait I }
+
+class A extends O {
+ val impl = new I {}
+}
+
+object Test extends App {
+ val code = reify {
+ val v: A#I = (new A).impl
+ v
+ }
+ println(showRaw(code.tree))
+
+ val v: A#I = code.eval
+}
diff --git a/test/files/run/t6591_3.check b/test/files/run/t6591_3.check
new file mode 100644
index 0000000..a7b594b
--- /dev/null
+++ b/test/files/run/t6591_3.check
@@ -0,0 +1 @@
+Block(List(ValDef(Modifiers(), newTermName("v"), Select(This(newTypeName("A")), newTypeName("I")), Apply(Select(New(Select(This(newTypeName("A")), newTypeName("I"))), nme.CONSTRUCTOR), List()))), Ident(newTermName("v")))
diff --git a/test/files/run/t6591_3.scala b/test/files/run/t6591_3.scala
new file mode 100644
index 0000000..b73a7ba
--- /dev/null
+++ b/test/files/run/t6591_3.scala
@@ -0,0 +1,17 @@
+import scala.reflect.runtime.universe._
+import scala.tools.reflect.ToolBox
+import scala.tools.reflect.Eval
+
+class O { class I }
+
+object A extends O {
+ val code = reify {
+ val v: I = new I
+ v
+ }
+ println(showRaw(code.tree))
+}
+
+object Test extends App {
+ val v: A.I = A.code.eval
+}
diff --git a/test/files/run/t6591_5.check b/test/files/run/t6591_5.check
new file mode 100644
index 0000000..e0b6d06
--- /dev/null
+++ b/test/files/run/t6591_5.check
@@ -0,0 +1 @@
+Expr(Block(List(ValDef(Modifiers(), newTermName("v"), Select(Select(This(newTypeName("A")), newTermName("x")), newTypeName("I")), Select(Ident(scala.Predef), newTermName("$qmark$qmark$qmark")))), Ident(newTermName("v"))))
diff --git a/test/files/run/t6591_5.scala b/test/files/run/t6591_5.scala
new file mode 100644
index 0000000..18d6f90
--- /dev/null
+++ b/test/files/run/t6591_5.scala
@@ -0,0 +1,23 @@
+import scala.reflect.runtime.universe._
+import scala.tools.reflect.ToolBox
+import scala.tools.reflect.Eval
+import java.lang.reflect.InvocationTargetException
+
+class O { class I }
+
+object A extends O {
+ val x = new O
+ val code = reify {
+ val v: x.I = ???
+ v
+ }
+ println(showRaw(code))
+}
+
+object Test extends App {
+ try {
+ val v: A.x.I = A.code.eval
+ } catch {
+ case ex: InvocationTargetException if ex.getCause.isInstanceOf[NotImplementedError] =>
+ }
+}
diff --git a/test/files/run/t6591_6.check b/test/files/run/t6591_6.check
new file mode 100644
index 0000000..0c4847b
--- /dev/null
+++ b/test/files/run/t6591_6.check
@@ -0,0 +1 @@
+Expr(Block(List(ValDef(Modifiers(), newTermName("v"), Select(Select(Ident(newTermName("A")), newTermName("x")), newTypeName("I")), Select(Ident(scala.Predef), newTermName("$qmark$qmark$qmark")))), Ident(newTermName("v"))))
diff --git a/test/files/run/t6591_6.scala b/test/files/run/t6591_6.scala
new file mode 100644
index 0000000..2eee879
--- /dev/null
+++ b/test/files/run/t6591_6.scala
@@ -0,0 +1,24 @@
+import scala.language.existentials
+import scala.reflect.runtime.universe._
+import scala.tools.reflect.ToolBox
+import scala.tools.reflect.Eval
+import java.lang.reflect.InvocationTargetException
+
+class O { class I }
+
+class A extends O {
+ val x = new O
+ val code = reify {
+ val v: x.I = ???
+ v
+ }
+ println(showRaw(code))
+}
+
+object Test extends App {
+ try {
+ val v = (new A).code.eval
+ } catch {
+ case ex: InvocationTargetException if ex.getCause.isInstanceOf[NotImplementedError] =>
+ }
+}
diff --git a/test/files/run/t6591_7.check b/test/files/run/t6591_7.check
new file mode 100644
index 0000000..e21a366
--- /dev/null
+++ b/test/files/run/t6591_7.check
@@ -0,0 +1,4 @@
+name = x, stable = true
+name = y, stable = true
+name = z, stable = false
+name = C, stable = true
diff --git a/test/files/run/t6591_7.scala b/test/files/run/t6591_7.scala
new file mode 100644
index 0000000..b6c8d39
--- /dev/null
+++ b/test/files/run/t6591_7.scala
@@ -0,0 +1,26 @@
+import scala.reflect.runtime.universe._
+import scala.tools.reflect.Eval
+
+object Test extends App {
+ locally {
+ val x = 2
+ def y = 3
+ var z = 4
+ class C {
+ var w = 5
+ locally {
+ val expr = reify(x + y + z + w)
+ // blocked by SI-7103, though it's not the focus of this test
+ // therefore I'm just commenting out the evaluation
+ // println(expr.eval)
+ expr.tree.freeTerms foreach (ft => {
+ // blocked by SI-7104, though it's not the focus of this test
+ // therefore I'm just commenting out the call to typeSignature
+ // println(s"name = ${ft.name}, sig = ${ft.typeSignature}, stable = ${ft.isStable}")
+ println(s"name = ${ft.name}, stable = ${ft.isStable}")
+ })
+ }
+ }
+ new C()
+ }
+}
\ No newline at end of file
diff --git a/test/files/run/t6611.scala b/test/files/run/t6611.scala
new file mode 100644
index 0000000..0947a48
--- /dev/null
+++ b/test/files/run/t6611.scala
@@ -0,0 +1,61 @@
+object Test extends App {
+ locally {
+ val a = Array("1")
+ val a2 = Array(a: _*)
+ assert(a ne a2)
+ }
+
+ locally {
+ val a = Array("1": Object)
+ val a2 = Array[Object](a: _*)
+ assert(a ne a2)
+ }
+
+ locally {
+ val a = Array(true)
+ val a2 = Array[Boolean](a: _*)
+ assert(a ne a2)
+ }
+
+ locally {
+ val a = Array(1: Short)
+ val a2 = Array[Short](a: _*)
+ assert(a ne a2)
+ }
+
+ locally {
+ val a = Array(1: Byte)
+ val a2 = Array[Byte](a: _*)
+ assert(a ne a2)
+ }
+
+ locally {
+ val a = Array(1)
+ val a2 = Array[Int](a: _*)
+ assert(a ne a2)
+ }
+
+ locally {
+ val a = Array(1L)
+ val a2 = Array[Long](a: _*)
+ assert(a ne a2)
+ }
+
+ locally {
+ val a = Array(1f)
+ val a2 = Array[Float](a: _*)
+ assert(a ne a2)
+ }
+
+ locally {
+ val a = Array(1d)
+ val a2 = Array[Double](a: _*)
+ assert(a ne a2)
+ }
+
+ locally {
+ val a = Array(())
+ val a2 = Array[Unit](a: _*)
+ assert(a ne a2)
+ }
+}
diff --git a/test/files/run/t6614.check b/test/files/run/t6614.check
new file mode 100644
index 0000000..2e80ebd
--- /dev/null
+++ b/test/files/run/t6614.check
@@ -0,0 +1,11 @@
+(ArrayStack(),true)
+(ArrayStack(0),true)
+(ArrayStack(0, 1),true)
+(ArrayStack(0, 1, 2),true)
+(ArrayStack(0, 1, 2, 3),true)
+(ArrayStack(0, 1, 2, 3, 4),true)
+(ArrayStack(0, 1, 2, 3, 4, 5),true)
+(ArrayStack(0, 1, 2, 3, 4, 5, 6),true)
+(ArrayStack(0, 1, 2, 3, 4, 5, 6, 7),true)
+(ArrayStack(0, 1, 2, 3, 4, 5, 6, 7, 8),true)
+(ArrayStack(0, 1, 2, 3, 4, 5, 6, 7, 8, 9),true)
diff --git a/test/files/run/t6614.scala b/test/files/run/t6614.scala
new file mode 100644
index 0000000..3ad9f36
--- /dev/null
+++ b/test/files/run/t6614.scala
@@ -0,0 +1,8 @@
+object Test extends App {
+ import scala.collection.mutable.ArrayStack
+
+ println((for (i <- 0 to 10) yield {
+ val in = ArrayStack.tabulate(i)(_.toString)
+ (in, (in filter (_ => true)) == in)
+ }).mkString("\n"))
+}
diff --git a/test/files/run/t6628.check b/test/files/run/t6628.check
new file mode 100644
index 0000000..bb101b6
--- /dev/null
+++ b/test/files/run/t6628.check
@@ -0,0 +1,2 @@
+true
+true
diff --git a/test/files/run/t6628.scala b/test/files/run/t6628.scala
new file mode 100644
index 0000000..84524a7
--- /dev/null
+++ b/test/files/run/t6628.scala
@@ -0,0 +1,11 @@
+object Test {
+ def coll = new Traversable[String] {
+ override def foreach[U](f:String=>U) { f("1") }
+ }
+ val dropped = coll.view drop 1
+
+ def main(args: Array[String]): Unit = {
+ println(dropped.isEmpty)
+ println(dropped.force.isEmpty)
+ }
+}
diff --git a/test/files/run/t6631.scala b/test/files/run/t6631.scala
new file mode 100644
index 0000000..e472b83
--- /dev/null
+++ b/test/files/run/t6631.scala
@@ -0,0 +1,18 @@
+import reflect.ClassTag
+
+object Test extends App {
+ def intercept[T <: Throwable : ClassTag](act: => Any) = try {
+ act
+ } catch {
+ case x: Throwable =>
+ val cls = implicitly[ClassTag[T]].runtimeClass
+ assert(cls.isInstance(x), (x.getClass, x, cls).toString)
+ }
+ assert(s"""\f\r\n\t""" == "\f\r\n\t")
+
+ import StringContext.InvalidEscapeException
+ intercept[InvalidEscapeException](s"""\""")
+ intercept[InvalidEscapeException](s"""\x""")
+ intercept[InvalidEscapeException](s"\")
+
+}
diff --git a/test/files/run/t6632.check b/test/files/run/t6632.check
new file mode 100644
index 0000000..1f084b1
--- /dev/null
+++ b/test/files/run/t6632.check
@@ -0,0 +1,3 @@
+java.lang.IndexOutOfBoundsException: -1
+java.lang.IndexOutOfBoundsException: -2
+java.lang.IndexOutOfBoundsException: -3
diff --git a/test/files/run/t6632.scala b/test/files/run/t6632.scala
new file mode 100644
index 0000000..c1c8d4a
--- /dev/null
+++ b/test/files/run/t6632.scala
@@ -0,0 +1,29 @@
+object Test extends App {
+ import collection.mutable.ListBuffer
+
+ def newLB = ListBuffer('a, 'b, 'c, 'd, 'e)
+
+ val lb0 = newLB
+
+ try {
+ lb0.insert(-1, 'x)
+ } catch {
+ case ex: IndexOutOfBoundsException => println(ex)
+ }
+
+ val lb1 = newLB
+
+ try {
+ lb1.insertAll(-2, Array('x, 'y, 'z))
+ } catch {
+ case ex: IndexOutOfBoundsException => println(ex)
+ }
+
+ val lb2 = newLB
+
+ try {
+ lb2.update(-3, 'u)
+ } catch {
+ case ex: IndexOutOfBoundsException => println(ex)
+ }
+}
\ No newline at end of file
diff --git a/test/files/run/t6633.check b/test/files/run/t6633.check
new file mode 100644
index 0000000..1ff8cdb
--- /dev/null
+++ b/test/files/run/t6633.check
@@ -0,0 +1,3 @@
+java.lang.IndexOutOfBoundsException: 9
+replStringOf OK
+length OK
diff --git a/test/files/run/t6633.scala b/test/files/run/t6633.scala
new file mode 100644
index 0000000..bd993c8
--- /dev/null
+++ b/test/files/run/t6633.scala
@@ -0,0 +1,33 @@
+object Test extends App {
+ import collection.mutable.ListBuffer
+
+ def newLB = ListBuffer('a, 'b, 'c, 'd, 'e)
+
+ val lb0 = newLB
+
+ try {
+ lb0.insert(9, 'x)
+ } catch {
+ case ex: IndexOutOfBoundsException => println(ex)
+ }
+
+ val lb1 = newLB
+
+ try {
+ lb1.insert(9, 'x)
+ } catch {
+ case ex: IndexOutOfBoundsException =>
+ }
+
+ val replStr = scala.runtime.ScalaRunTime.replStringOf(lb1, 100)
+ if (replStr == "ListBuffer('a, 'b, 'c, 'd, 'e)\n")
+ println("replStringOf OK")
+ else
+ println("replStringOf FAILED: " + replStr)
+
+ val len = lb1.length
+ if (len == 5)
+ println("length OK")
+ else
+ println("length FAILED: " + len)
+}
\ No newline at end of file
diff --git a/test/files/run/t6634.check b/test/files/run/t6634.check
new file mode 100644
index 0000000..f6cbb30
--- /dev/null
+++ b/test/files/run/t6634.check
@@ -0,0 +1,31 @@
+Trying lb0 ...
+Checking ...
+String OK.
+Length OK.
+
+Trying lb1 ...
+Checking ...
+String OK.
+Length OK.
+
+Trying lb2 ...
+Checking ...
+String OK.
+Length OK.
+
+Trying lb3 ...
+Checking ...
+String OK.
+Length OK.
+
+Trying lb4 ...
+Checking ...
+String OK.
+Length OK.
+
+Trying lb5 ...
+java.lang.IllegalArgumentException: removing negative number (-1) of elements
+Checking ...
+String OK.
+Length OK.
+
diff --git a/test/files/run/t6634.scala b/test/files/run/t6634.scala
new file mode 100644
index 0000000..759e6d5
--- /dev/null
+++ b/test/files/run/t6634.scala
@@ -0,0 +1,80 @@
+import collection.mutable.ListBuffer
+
+object Test extends App {
+ def newLB = ListBuffer('a, 'b, 'c, 'd, 'e)
+
+ val lb0 = newLB
+ println("Trying lb0 ...")
+ try {
+ lb0.remove(5, 0)
+ } catch {
+ // Not thrown in 2.10, will be thrown in 2.11
+ case ex: IndexOutOfBoundsException => println(ex)
+ }
+ checkNotCorrupted(lb0)
+
+ val lb1 = newLB
+ println("Trying lb1 ...")
+ try {
+ lb1.remove(6, 6)
+ } catch {
+ // Not thrown in 2.10, will be thrown in 2.11
+ case ex: IndexOutOfBoundsException => println(ex)
+ }
+ checkNotCorrupted(lb1)
+
+ val lb2 = newLB
+ println("Trying lb2 ...")
+ try {
+ lb2.remove(99, 6)
+ } catch {
+ // Not thrown in 2.10, will be thrown in 2.11
+ case ex: IndexOutOfBoundsException => println(ex)
+ }
+ checkNotCorrupted(lb2)
+
+ val lb3 = newLB
+ println("Trying lb3 ...")
+ try {
+ lb3.remove(1, 9)
+ } catch {
+ // Not thrown in 2.10, will be thrown in 2.11
+ case ex: IllegalArgumentException => println(ex)
+ }
+ checkNotCorrupted(lb3, "ListBuffer('a)", 1)
+
+ val lb4 = newLB
+ println("Trying lb4 ...")
+ try {
+ lb4.remove(-1, 1)
+ } catch {
+ // Not thrown in 2.10, will be thrown in 2.11
+ case ex: IndexOutOfBoundsException => println(ex)
+ }
+ checkNotCorrupted(lb4, "ListBuffer('b, 'c, 'd, 'e)", 4)
+
+ val lb5 = newLB
+ println("Trying lb5 ...")
+ try {
+ lb5.remove(1, -1)
+ } catch {
+ case ex: IllegalArgumentException => println(ex)
+ }
+ checkNotCorrupted(lb5)
+
+ // buffer should neither be changed nor corrupted after calling remove with invalid arguments
+ def checkNotCorrupted(
+ lb: ListBuffer[Symbol],
+ expectedString: String = "ListBuffer('a, 'b, 'c, 'd, 'e)",
+ expectedLength: Int = 5) = {
+ println("Checking ...")
+ val replStr = scala.runtime.ScalaRunTime.replStringOf(lb, 100)
+ if (replStr == expectedString + "\n") println("String OK.")
+ else println("!!! replStringOf FAILED: " + replStr)
+
+ val len = lb.length
+ if (len == expectedLength) println("Length OK.")
+ else println("!!! length FAILED: " + len)
+ println()
+ }
+}
\ No newline at end of file
diff --git a/test/files/run/t6637.check b/test/files/run/t6637.check
new file mode 100644
index 0000000..9766475
--- /dev/null
+++ b/test/files/run/t6637.check
@@ -0,0 +1 @@
+ok
diff --git a/test/files/run/t6637.scala b/test/files/run/t6637.scala
new file mode 100644
index 0000000..d3c3803
--- /dev/null
+++ b/test/files/run/t6637.scala
@@ -0,0 +1,8 @@
+
+object Test extends App {
+ try {
+ class A ; class B ; List().head.isInstanceOf[A with B]
+ } catch {
+ case _ :java.util.NoSuchElementException => println("ok")
+ }
+}
diff --git a/test/files/run/t6644.scala b/test/files/run/t6644.scala
new file mode 100644
index 0000000..b8b36f9
--- /dev/null
+++ b/test/files/run/t6644.scala
@@ -0,0 +1,8 @@
+class Testable(val c: String) extends AnyVal {
+ def matching(cases: Boolean*) = cases contains true
+}
+
+object Test extends App {
+ assert(new Testable("").matching(true, false))
+}
+
diff --git a/test/files/run/t6646.check b/test/files/run/t6646.check
new file mode 100644
index 0000000..b0b7ad3
--- /dev/null
+++ b/test/files/run/t6646.check
@@ -0,0 +1,5 @@
+Found NotNull
+Found lower
+Found 2
+A single ident is always a pattern
+A single ident is always a pattern
diff --git a/test/files/run/t6646.scala b/test/files/run/t6646.scala
new file mode 100644
index 0000000..150b0df
--- /dev/null
+++ b/test/files/run/t6646.scala
@@ -0,0 +1,19 @@
+sealed trait ColumnOption
+case object NotNull extends ColumnOption
+case object PrimaryKey extends ColumnOption
+case object lower extends ColumnOption
+
+object Test {
+ def main(args: Array[String]) {
+ val l = List(PrimaryKey, NotNull, lower)
+
+ // withFilter must be generated in these
+ for (option @ NotNull <- l) println("Found " + option)
+ for (option @ `lower` <- l) println("Found " + option)
+ for ((`lower`, i) <- l.zipWithIndex) println("Found " + i)
+
+ // no withFilter
+ for (X <- List("A single ident is always a pattern")) println(X)
+ for (`x` <- List("A single ident is always a pattern")) println(`x`)
+ }
+}
diff --git a/test/files/run/t6662.check b/test/files/run/t6662.check
new file mode 100644
index 0000000..6a452c1
--- /dev/null
+++ b/test/files/run/t6662.check
@@ -0,0 +1 @@
+()
diff --git a/test/files/run/t6662/Macro_1.scala b/test/files/run/t6662/Macro_1.scala
new file mode 100644
index 0000000..f373eaa
--- /dev/null
+++ b/test/files/run/t6662/Macro_1.scala
@@ -0,0 +1,8 @@
+import language.experimental.macros
+import scala.reflect.macros.Context
+
+object Demo {
+ def id[T](a: T): T = macro idImpl[T]
+
+ def idImpl[T: c.WeakTypeTag](c: Context)(a: c.Expr[T]): c.Expr[T] = a
+}
diff --git a/test/files/run/t6662/Test_2.scala b/test/files/run/t6662/Test_2.scala
new file mode 100644
index 0000000..03a80b6
--- /dev/null
+++ b/test/files/run/t6662/Test_2.scala
@@ -0,0 +1,8 @@
+// Macro usage:
+
+object Test {
+ def main(args: Array[String]) {
+ val s = Demo id ()
+ println(s)
+ }
+}
diff --git a/test/files/run/syncchannel.check b/test/files/run/t6663.check
similarity index 100%
copy from test/files/run/syncchannel.check
copy to test/files/run/t6663.check
diff --git a/test/files/run/t6663.scala b/test/files/run/t6663.scala
new file mode 100644
index 0000000..6818d28
--- /dev/null
+++ b/test/files/run/t6663.scala
@@ -0,0 +1,17 @@
+import language.dynamics
+
+class C(v: Any) extends Dynamic {
+ def selectDynamic[T](n: String): Option[T] = Option(v.asInstanceOf[T])
+ def applyDynamic[T](n: String)(): Option[T] = Option(v.asInstanceOf[T])
+}
+
+object Test extends App {
+ // this should be converted to
+ // C(42).selectDynamic[Int]("foo").get
+ // but, before fixing SI-6663, became
+ // C(42).selectDynamic[Nothing]("foo").get
+ // leading to a ClassCastException
+ var v = new C(42).foo[Int].get
+ println(v)
+}
+
diff --git a/test/files/run/t6666a.scala b/test/files/run/t6666a.scala
new file mode 100644
index 0000000..1d208a3
--- /dev/null
+++ b/test/files/run/t6666a.scala
@@ -0,0 +1,16 @@
+class A(a: Any)
+
+object Test {
+ def main(args: Array[String]): Unit = {
+ }
+
+ val x: Unit = {
+ object InVal extends A({
+ new {} // okay
+ val o = {new {}} // nesting triggers a VerifyError.
+ null
+ });
+ InVal;
+ ()
+ };
+}
diff --git a/test/files/run/t6669.scala b/test/files/run/t6669.scala
new file mode 100644
index 0000000..b55718b
--- /dev/null
+++ b/test/files/run/t6669.scala
@@ -0,0 +1,26 @@
+import java.io.{ByteArrayOutputStream, PrintStream}
+
+object Test extends App {
+ val baos = new ByteArrayOutputStream()
+ val ps = new PrintStream(baos)
+
+ // first test with the default classpath
+ (scala.Console withOut ps) {
+ scala.tools.scalap.Main.main(Array("-verbose", "java.lang.Object"))
+ }
+
+ // now make sure we saw the '.' in the classpath
+ val msg1 = baos.toString()
+ assert(msg1 contains "directory classpath: .", s"Did not see '.' in the default class path. Full results were:\n$msg1")
+
+ // then test again with a user specified classpath
+ baos.reset
+
+ (scala.Console withOut ps) {
+ scala.tools.scalap.Main.main(Array("-verbose", "-cp", "whatever", "java.lang.Object"))
+ }
+
+ // now make sure we did not see the '.' in the classpath
+ val msg2 = baos.toString()
+ assert(!(msg2 contains "directory classpath: ."), s"Did saw '.' in the user specified class path. Full results were:\n$msg2")
+}
diff --git a/test/files/run/t6673.check b/test/files/run/t6673.check
new file mode 100644
index 0000000..ef2aa55
--- /dev/null
+++ b/test/files/run/t6673.check
@@ -0,0 +1 @@
+List(x)
diff --git a/test/files/run/t6673.scala b/test/files/run/t6673.scala
new file mode 100644
index 0000000..115bbdf
--- /dev/null
+++ b/test/files/run/t6673.scala
@@ -0,0 +1,5 @@
+object Test extends App {
+ def foo(f: String => Array[String])(s: String) = f(s)
+ val test = foo(Array(_)) _
+ println(test("x").toList)
+}
\ No newline at end of file
diff --git a/test/files/run/t6677.scala b/test/files/run/t6677.scala
new file mode 100644
index 0000000..e6eaf6a
--- /dev/null
+++ b/test/files/run/t6677.scala
@@ -0,0 +1,28 @@
+
+class Test {
+ val cm: reflect.runtime.universe.Mirror = reflect.runtime.currentMirror
+ def error {
+ new cm.universe.Traverser // java.lang.VerifyError: (class: Test, method: error signature: ()V) Incompatible object argument for function call
+
+ }
+
+ def okay1 {
+ val cm: reflect.runtime.universe.Mirror = reflect.runtime.currentMirror
+
+ new cm.universe.Traverser
+ }
+
+ def okay2 {
+ val cm: reflect.runtime.universe.Mirror = reflect.runtime.currentMirror
+ val u: reflect.runtime.universe.type = cm.universe
+ new u.Traverser
+ }
+}
+
+object Test {
+ def main(args: Array[String]) {
+ new Test().error
+ new Test().okay1
+ new Test().okay2
+ }
+}
diff --git a/test/files/run/t6677b.scala b/test/files/run/t6677b.scala
new file mode 100644
index 0000000..e4fe5e3
--- /dev/null
+++ b/test/files/run/t6677b.scala
@@ -0,0 +1,33 @@
+trait U {
+ trait U1 {
+ class X
+ }
+ type U11 <: U1
+ val u : U11 = null.asInstanceOf[U11]
+}
+trait A extends U
+
+trait B extends U {
+ def foo = ""
+ class U11 extends U1 { class X extends super.X { foo } } // refer to foo to add $outer pointer
+ override val u = new U11
+}
+class C {
+ val ab: A with B = new A with B // `B with A` works.
+
+ def foo {
+ // fails
+ new ab.u.X
+
+ // works:
+ val u = ab.u
+ new u.X
+ }
+}
+object Test {
+ def main(args: Array[String]) {
+ // java.lang.NoSuchMethodError: A.u()LB$U11;
+ // at C.foo(t6677b.scala:23)
+ new C().foo
+ }
+}
diff --git a/test/files/run/t6687.scala b/test/files/run/t6687.scala
new file mode 100644
index 0000000..ee44e5f
--- /dev/null
+++ b/test/files/run/t6687.scala
@@ -0,0 +1,10 @@
+import scala.reflect.runtime.universe._
+
+class A { lazy val x = 1 }
+
+object Test {
+ def main(args: Array[String]): Unit = {
+ val vars = typeOf[A].members.toList filter (x => x.isTerm && x.asTerm.isVar)
+ assert(vars.isEmpty, vars)
+ }
+}
diff --git a/test/files/run/t6690.scala b/test/files/run/t6690.scala
new file mode 100644
index 0000000..43ede96
--- /dev/null
+++ b/test/files/run/t6690.scala
@@ -0,0 +1,62 @@
+import scala.collection.mutable
+
+object Test extends App {
+ def last0(ml: mutable.MutableList[Int]) =
+ ml.asInstanceOf[{def last0: mutable.LinkedList[Int]}].last0
+
+ def first0(ml: mutable.MutableList[Int]) =
+ ml.asInstanceOf[{def first0: mutable.LinkedList[Int]}].first0
+
+ val f = mutable.Queue[Int]()
+ def check(desc: String) {
+ assert(f.length == 0, s"$desc: non empty: $f")
+ assert(last0(f).isEmpty, s"$desc: last0 leak: ${last0(f)}")
+ assert(first0(f).isEmpty, s"$desc: first0 leak: ${last0(f)}")
+ }
+
+ f.enqueue(1)
+ f.dequeue()
+ check("dequeue 1")
+
+ f.enqueue(1)
+ f.enqueue(2)
+ f.dequeue()
+ assert(last0(f).toList == List(2), last0(f))
+ f.dequeue()
+ check("dequeue 2")
+
+ f.enqueue(1)
+ f.dequeueAll(_ => false)
+ f.dequeueAll(_ => true)
+ check("dequeueAll")
+
+ f.enqueue(1)
+ f.dequeueFirst(_ => true)
+ check("dequeueFirst")
+
+ {
+ f.enqueue(1)
+ val tail = f.tail
+ assert(last0(tail).isEmpty, last0(tail))
+ assert(first0(tail).isEmpty, first0(tail))
+ }
+
+ {
+ val ml = mutable.MutableList[Int]()
+ 1 +=: ml
+ val tail = ml.tail
+ assert(last0(tail).isEmpty, last0(tail))
+ assert(first0(tail).isEmpty, first0(tail))
+ }
+
+ {
+ val ml = mutable.MutableList[Int]()
+ 1 +=: ml
+ ml += 2
+ val tail = ml.tail
+ assert(last0(tail).toList == List(2), last0(tail))
+ assert(first0(tail) == last0(tail).toList, first0(tail))
+ assert(last0(tail.tail).toList == Nil, last0(tail.tail).toList)
+ assert(first0(tail.tail) == Nil, first0(tail.tail))
+ }
+}
diff --git a/test/files/run/t6695.scala b/test/files/run/t6695.scala
new file mode 100644
index 0000000..b527238
--- /dev/null
+++ b/test/files/run/t6695.scala
@@ -0,0 +1,18 @@
+object Test extends App {
+ try {
+ Array("a", "b", "c") match {
+ case Array("a", "x", "c") => println("x")
+ case Array("a", "b", "x") => println("a");
+ case Array("a", "d", _*) => println("wrongly positive")
+ }
+ assert(false, "match succeeded")
+ } catch {
+ case _: MatchError => // okay
+ }
+
+ Array("a", "b", "c") match {
+ case Array("a", "x", "c") => println("x")
+ case Array("a", "b", "x") => println("a");
+ case Array("a", "b", _*) => // okay
+ }
+}
diff --git a/test/files/run/t6706.scala b/test/files/run/t6706.scala
new file mode 100644
index 0000000..905494c
--- /dev/null
+++ b/test/files/run/t6706.scala
@@ -0,0 +1,14 @@
+object Test {
+ var name = "foo" + 1
+ var s1 = Symbol(name)
+ s1 = null
+ System.gc
+ val s2 = Symbol("foo1")
+ name = null
+ System.gc
+ val s3 = Symbol("foo1")
+
+ def main(args: Array[String]): Unit = {
+ assert(s2 eq s3, ((s2, System.identityHashCode(s2), s3, System.identityHashCode(s3))))
+ }
+}
diff --git a/test/files/run/t6715.scala b/test/files/run/t6715.scala
new file mode 100644
index 0000000..07ff342
--- /dev/null
+++ b/test/files/run/t6715.scala
@@ -0,0 +1,15 @@
+import scala.reflect.runtime.universe._
+
+class A {
+ def $$ = 1
+ def $times = 1
+}
+
+object Test {
+ def main(args: Array[String]): Unit = {
+ val memberSet: Set[String] = typeOf[A].members.map{ _.toString }.toSet
+ assert(memberSet contains "method *")
+ assert(memberSet contains "method $$")
+ assert(! (memberSet contains "method"))
+ }
+}
diff --git a/test/files/run/t6725-1.check b/test/files/run/t6725-1.check
new file mode 100644
index 0000000..6ed281c
--- /dev/null
+++ b/test/files/run/t6725-1.check
@@ -0,0 +1,2 @@
+1
+1
diff --git a/test/files/run/t6725-1.scala b/test/files/run/t6725-1.scala
new file mode 100644
index 0000000..a167ef8
--- /dev/null
+++ b/test/files/run/t6725-1.scala
@@ -0,0 +1,5 @@
+object Test extends App {
+ val a = 1
+ val s = f"$a%s%n$a%s"
+ println(s)
+}
\ No newline at end of file
diff --git a/test/files/run/t6725-2.check b/test/files/run/t6725-2.check
new file mode 100644
index 0000000..3496917
--- /dev/null
+++ b/test/files/run/t6725-2.check
@@ -0,0 +1,8 @@
+
+
+aaaa
+
+
+aaaa
+aaaa
+aaaa
diff --git a/test/files/run/t6725-2.scala b/test/files/run/t6725-2.scala
new file mode 100644
index 0000000..e033cf5
--- /dev/null
+++ b/test/files/run/t6725-2.scala
@@ -0,0 +1,6 @@
+object Test extends App {
+ println(f"%n")
+ println(f"aaaa%n")
+ println(f"%naaaa")
+ println(f"aaaa%naaaa")
+}
\ No newline at end of file
diff --git a/test/files/run/t6731.check b/test/files/run/t6731.check
new file mode 100644
index 0000000..a5d59bd
--- /dev/null
+++ b/test/files/run/t6731.check
@@ -0,0 +1,40 @@
+Mono$.bar()
+Mono$.bar
+Mono$.bar()
+Mono$.bar
+Mono$.bar
+Mono$.baz
+Mono$.bar(bippy=1, boppy=2)
+Mono$.baz
+Poly.bar[Nothing]
+Poly.bar[Int]
+Poly.bar[Nothing]()
+Poly.bar[Int]()
+Poly.bar[Int](1, 2, 3)
+Poly.bar[Nothing]
+Poly.bar[Int]
+Poly.bar[Nothing]()
+Poly.bar[Int]()
+Poly.bar[Int](1, 2, 3)
+Updating.bar
+Updating.bar = b
+Nest1$Nest2$Nest3$.bippy(1, 2, 3)
+Nest1$Nest2$Nest3$.bippy
+Named.bippy(a=1, b=2)
+Named.boppy(c=3, d=4)
+Named.apply()
+Named.apply()
+Named.apply(e=5, f=6)
+Named2.bippy(1)(q0, c)
+Named2.bippy(1)(q0, c)
+Named2.bippy(1)(b, q0)
+Named2.bippy(1)(q0, c)
+Named2.bippy(1)(c, b)
+Named2.bippy(1)(b, c)
+Named2.bippy(1)(q0, c)
+Named2.bippy(2)(b, c)
+Named2.bippy(1)(q0, c)
+Named2.bippy(5)(b, c)
+Named2.dingus(100)(b, dong)
+Named2.bippy(1)(q0, q1)
+Named2.hello(100)(!!, !!)
diff --git a/test/files/run/t6731.scala b/test/files/run/t6731.scala
new file mode 100644
index 0000000..89d212e
--- /dev/null
+++ b/test/files/run/t6731.scala
@@ -0,0 +1,143 @@
+import scala.language.dynamics
+import scala.reflect.{ ClassTag, classTag }
+
+object Util {
+ def show[T](x: T): T = { println(x) ; x }
+ def mkArgs(xs: Any*) = xs map { case ((k, v)) => k + "=" + v ; case x => "" + x } mkString ("(", ", ", ")")
+}
+import Util._
+
+abstract class MonoDynamic extends Dynamic {
+ def selectDynamic(name: String): String = show(this + "." + name)
+ def applyDynamic(name: String)(args: Any*): String = show(this + "." + name + mkArgs(args: _*))
+ def applyDynamicNamed(name: String)(args: (String, Any)*): String = show(this + "." + name + mkArgs(args: _*))
+
+ override def toString = this.getClass.getName split '.' last
+}
+
+object Mono extends MonoDynamic {
+ def f(s: String): String = s
+
+ def f1 = this.bar()
+ def f2 = this.bar
+ def f3 = f(this.bar())
+ def f4 = f(this.bar)
+ def f5 = f(f(f(f(f(f(this.bar)))))) + f(f(f(f(f(f(this.baz))))))
+ def f6 = f(f(f(f(f(f(this.bar(bippy = 1, boppy = 2))))))) + f(f(f(f(f(f(this.baz))))))
+}
+
+object Poly extends Dynamic {
+ def selectDynamic[T: ClassTag](name: String): String = show(s"$this.$name[${classTag[T]}]")
+ def applyDynamic[T: ClassTag](name: String)(args: Any*): String = show(args.mkString(s"$this.$name[${classTag[T]}](", ", ", ")"))
+
+ def f(s: String): String = s
+
+ def f1 = this.bar
+ def f2 = this.bar[Int]
+ def f3 = this.bar()
+ def f4 = this.bar[Int]()
+ def f5 = this.bar[Int](1, 2, 3)
+
+ def f6 = f(f(this.bar))
+ def f7 = f(f(this.bar[Int]))
+ def f8 = f(f(this.bar()))
+ def f9 = f(f(this.bar[Int]()))
+ def f10 = f(f(this.bar[Int](1, 2, 3)))
+
+ override def toString = "Poly"
+}
+
+object Updating extends Dynamic {
+ def selectDynamic(name: String): String = show(s"$this.$name")
+ def updateDynamic(name: String)(value: Any): String = show(s"$this.$name = $value")
+
+ def f1 = this.bar
+ def f2 = this.bar = "b"
+
+ override def toString = "Updating"
+}
+
+object Nest1 extends Dynamic {
+ def applyDynamic(name: String)(args: Any*): Nest2.type = Nest2
+
+ object Nest2 extends Dynamic {
+ def applyDynamicNamed(name: String)(args: (String, Any)*): Nest3.type = Nest3
+
+ object Nest3 extends MonoDynamic {
+
+ }
+ }
+
+ def f1 = Nest1.bip().bop(foo = "bar").bippy(1, 2, 3)
+ def f2 = Nest1.bip("abc").bop(foo = 5).bippy
+}
+
+object Named extends Dynamic {
+ def applyDynamic(name: String)(args: Any*): Named.type = {
+ show(this + "." + name + mkArgs(args: _*))
+ this
+ }
+ def applyDynamicNamed(name: String)(args: (String, Any)*): Named.type = {
+ show(this + "." + name + mkArgs(args: _*))
+ this
+ }
+
+ def f1 = this.bippy(a = 1, b = 2).boppy(c = 3, d = 4)()()(e = 5, f = 6)
+ override def toString = "Named"
+}
+
+object Named2 extends Dynamic {
+ def applyDynamic(name: String)(a: Any)(b: Any = "b", c: Any = "c"): Named2.type = {
+ show(this + "." + name + mkArgs(a) + mkArgs(b, c))
+ this
+ }
+ def applyDynamicNamed(name: String)(a: (String, Any))(b: (String, Any), c: (String, Any)): Named2.type = {
+ show(this + "." + name + mkArgs(a) + mkArgs(b, c))
+ this
+ }
+
+ def f1 = this.bippy(1)(b = "q0")
+ def f2 = this.bippy(1)("q0")
+ def f3 = this.bippy(1)(c = "q0")
+ def f4 = this.bippy(1)("q0")
+ def f5 = this.bippy(1)(c = "b", b = "c")
+ def f6 = this.bippy(1)("b", "c")
+ def f7 = this.bippy(1)(b = "q0").bippy(2)()
+ def f8 = this.bippy(1)("q0").bippy(5)(c = "c").dingus(100)(c = "dong")
+ def f9 = this.bippy(1)(b = "q0", c = "q1").hello(100)("!!", "!!")
+
+ override def toString = "Named2"
+}
+
+
+object Test {
+ def main(args: Array[String]): Unit = {
+ {
+ import Mono._
+ f1 ; f2 ; f3 ; f4 ; f5
+ f6
+ }
+ {
+ import Poly._
+ f1 ; f2 ; f3 ; f4 ; f5
+ f6 ; f7 ; f8 ; f9 ; f10
+ }
+ {
+ import Updating._
+ f1 ; f2
+ }
+ {
+ import Nest1._
+ f1 ; f2
+ }
+ {
+ import Named._
+ f1
+ }
+ {
+ import Named2._
+ f1 ; f2 ; f3 ; f4 ; f5
+ f6 ; f7 ; f8 ; f9
+ }
+ }
+}
diff --git a/test/files/run/t6793.scala b/test/files/run/t6793.scala
new file mode 100644
index 0000000..0b1f161
--- /dev/null
+++ b/test/files/run/t6793.scala
@@ -0,0 +1,9 @@
+package a { class C1(private[a] val v0: String) }
+package b { class C2(v1: String) extends a.C1(v1) { def foo = v1 } }
+
+object Test extends App {
+ new b.C2("x")
+
+ val c2Fields = classOf[b.C2].getDeclaredFields
+ assert(c2Fields.size == 1, c2Fields.map(_.getName).toList)
+}
diff --git a/test/files/run/t6793b.scala b/test/files/run/t6793b.scala
new file mode 100644
index 0000000..cb3f2fb
--- /dev/null
+++ b/test/files/run/t6793b.scala
@@ -0,0 +1,11 @@
+package a {
+ class C1(val v0: String)
+ class C2(v1: String) extends a.C1(v1) { def foo = v1 }
+}
+
+object Test extends App {
+ new a.C2("x")
+
+ val c2Fields = classOf[a.C2].getDeclaredFields
+ assert(c2Fields.isEmpty, c2Fields.map(_.getName).mkString(", "))
+}
diff --git a/test/files/run/t6793c.scala b/test/files/run/t6793c.scala
new file mode 100644
index 0000000..e28c7c8
--- /dev/null
+++ b/test/files/run/t6793c.scala
@@ -0,0 +1,11 @@
+package a {
+ class C1(private[a] val v0: String)
+ class C2(v1: String) extends a.C1(v1) { def foo = v1 }
+}
+
+object Test extends App {
+ new a.C2("x").foo
+
+ val c2Fields = classOf[a.C2].getDeclaredFields
+ assert(c2Fields.isEmpty, c2Fields.map(_.getName).toList)
+}
diff --git a/test/files/run/t6827.check b/test/files/run/t6827.check
new file mode 100644
index 0000000..3a3a71c
--- /dev/null
+++ b/test/files/run/t6827.check
@@ -0,0 +1,15 @@
+start at -5: java.lang.IllegalArgumentException: requirement failed: start -5 out of range 10
+start at -1: java.lang.IllegalArgumentException: requirement failed: start -1 out of range 10
+start at limit: java.lang.IllegalArgumentException: requirement failed: start 10 out of range 10
+start at limit-1: ok
+first 10: ok
+read all: ok
+test huge len: ok
+5 from 5: ok
+20 from 5: ok
+test len overflow: ok
+start beyond limit: java.lang.IllegalArgumentException: requirement failed: start 30 out of range 10
+read 0: ok
+read -1: ok
+invalid read 0: java.lang.IllegalArgumentException: requirement failed: start 30 out of range 10
+invalid read -1: java.lang.IllegalArgumentException: requirement failed: start 30 out of range 10
diff --git a/test/files/run/t6827.scala b/test/files/run/t6827.scala
new file mode 100644
index 0000000..8e17af0
--- /dev/null
+++ b/test/files/run/t6827.scala
@@ -0,0 +1,34 @@
+object Test extends App {
+ val ns = (0 until 20)
+ val arr = new Array[Int](10)
+
+ def tryit(label: String, start: Int, len: Int): Unit = {
+ val status = try {
+ val it = ns.toIterator
+ it.copyToArray(arr, start, len)
+ "ok"
+ } catch {
+ case e: Exception => e.toString
+ }
+ println("%s: %s" format (label, status))
+ }
+
+ tryit("start at -5", -5, 10)
+ tryit("start at -1", -1, 10)
+ tryit("start at limit", 10, 10)
+ tryit("start at limit-1", 9, 10)
+ tryit("first 10", 0, 10)
+ tryit("read all", 0, 20)
+ tryit("test huge len", 0, Int.MaxValue)
+ tryit("5 from 5", 5, 10)
+ tryit("20 from 5", 5, 20)
+ tryit("test len overflow", 5, Int.MaxValue)
+ tryit("start beyond limit", 30, 10)
+ tryit("read 0", 0, 0)
+ tryit("read -1", 0, -1)
+ tryit("invalid read 0", 30, 0)
+ tryit("invalid read -1", 30, -1)
+
+ // okay, see SI-7128
+ "...".toIterator.copyToArray(new Array[Char](0), 0, 0)
+}
diff --git a/test/files/run/t6853.scala b/test/files/run/t6853.scala
new file mode 100644
index 0000000..352375c
--- /dev/null
+++ b/test/files/run/t6853.scala
@@ -0,0 +1,18 @@
+// Test cases: the only place we can cut and paste without crying
+// ourself to sleep.
+object Test {
+
+ def main(args: Array[String]): Unit = {
+ // First testing the basic operations
+ val m = collection.mutable.ListMap[String, Int]()
+ var i = 0
+ while(i < 2) { m += ("foo" + i) -> i; i = i+1}
+ assert(m == Map("foo1"->1,"foo0"->0))
+ m-= "foo0"
+ assert(m == Map("foo1"->1))
+ // Now checking if it scales as described in SI-6853
+ i = 0
+ while(i < 80000) { m += ("foo" + i) -> i; i = i+1}
+ assert(m.size == 80000)
+ }
+}
diff --git a/test/files/run/t6863.scala b/test/files/run/t6863.scala
new file mode 100644
index 0000000..d77adb6
--- /dev/null
+++ b/test/files/run/t6863.scala
@@ -0,0 +1,114 @@
+/** Make sure that when a variable is captured its initialization expression is handled properly */
+object Test {
+ def lazyVal() = {
+ // internally lazy vals become vars which are initialized with "_", so they need to be tested just like vars do
+ lazy val x = "42"
+ assert({ () => x }.apply == "42")
+ }
+ def ident() = {
+ val y = "42"
+ var x = y
+ assert({ () => x }.apply == "42")
+ }
+ def apply() = {
+ def y(x : Int) = x.toString
+ var x = y(42)
+ assert({ () => x }.apply == "42")
+ }
+ def literal() = {
+ var x = "42"
+ assert({ () => x }.apply == "42")
+ }
+ def `new`() = {
+ var x = new String("42")
+ assert({ () => x }.apply == "42")
+ }
+ def select() = {
+ object Foo{val bar = "42"}
+ var x = Foo.bar
+ assert({ () => x }.apply == "42")
+ }
+ def `throw`() = {
+ var x = if (true) "42" else throw new Exception("42")
+ assert({ () => x }.apply == "42")
+ }
+ def assign() = {
+ var y = 1
+ var x = y = 42
+ assert({ () => x}.apply == ())
+ }
+ def valDef() = {
+ var x = {val y = 42}
+ assert({ () => x}.apply == ())
+ }
+ def `return`(): String = {
+ var x = if (true) return "42" else ()
+ assert({ () => x}.apply == ())
+ "42"
+ }
+ def tryFinally() = {
+ var x = try { "42" } finally ()
+ assert({ () => x }.apply == "42")
+ }
+ def tryCatch() = {
+ var x = try { "42" } catch { case _ => "43" }
+ assert({ () => x }.apply == "42")
+ }
+ def `if`() = {
+ var x = if (true) ()
+ assert({ () => x }.apply == ())
+ }
+ def ifElse() = {
+ var x = if(true) "42" else "43"
+ assert({ () => x }.apply == "42")
+ }
+ def matchCase() = {
+ var x = 100 match {
+ case 100 => "42"
+ case _ => "43"
+ }
+ assert({ () => x }.apply == "42")
+ }
+ def block() = {
+ var x = {
+ val y = 42
+ "42"
+ }
+ assert({ () => x }.apply == "42")
+ }
+ def labelDef() = {
+ var x = 100 match {
+ case 100 => try "42" finally ()
+ }
+ assert({ () => x }.apply == "42")
+ }
+ def nested() = {
+ var x = {
+ val y = 42
+ if(true) try "42" catch {case _ => "43"}
+ else "44"
+ }
+ assert({ () => x }.apply == "42")
+ }
+ def main(args: Array[String]) {
+ lazyVal()
+ ident()
+ apply()
+ literal()
+ `new`()
+ select()
+ `throw`()
+ assign()
+ valDef()
+ `return`()
+ tryFinally()
+ tryCatch()
+ ifElse()
+ `if`()
+ matchCase()
+ block()
+ labelDef()
+ nested()
+ }
+}
+
diff --git a/test/files/run/t6888.check b/test/files/run/t6888.check
new file mode 100644
index 0000000..4e8a2de
--- /dev/null
+++ b/test/files/run/t6888.check
@@ -0,0 +1,3 @@
+2
+3
+3
diff --git a/test/files/run/t6888.scala b/test/files/run/t6888.scala
new file mode 100644
index 0000000..0c64cbe
--- /dev/null
+++ b/test/files/run/t6888.scala
@@ -0,0 +1,19 @@
+class C {
+ val x = 1
+ object $ {
+ val y = x + x
+ class abc$ {
+ def xy = x + y
+ }
+ object abc$ {
+ def xy = x + y
+ }
+ }
+}
+
+object Test extends App {
+ val c = new C()
+ println(c.$.y)
+ println(c.$.abc$.xy)
+ println(new c.$.abc$().xy)
+}
diff --git a/test/files/run/t6900.scala b/test/files/run/t6900.scala
new file mode 100644
index 0000000..a29d388
--- /dev/null
+++ b/test/files/run/t6900.scala
@@ -0,0 +1,36 @@
+import annotation.tailrec
+
+trait Universe {
+ type T <: AnyRef
+}
+
+final class Bug {
+ var i = 1
+ def stop() = { i -= 1; i < 0 }
+ // the alias bypasses the fast path in erasures InfoTransformer
+ // predicated on `TypeMap.noChangeToSymbols`
+ type Alias = Any
+
+ @tailrec
+ // So we get two symbols for `universe`, the original on the ValDef
+ // and a clone in the MethodType of `f`.
+ def f(universe: Universe, l: Alias): universe.T = {
+ if (stop()) null.asInstanceOf[universe.T] else f(universe, null)
+ }
+
+ @tailrec
+ def g(universe: Universe)(l: Alias): universe.T = {
+ if (stop()) null.asInstanceOf[universe.T] else g(universe)(l)
+ }
+
+ @tailrec
+ def h(universe: Universe)(l: List[universe.T]): List[universe.T] = {
+ if (stop()) Nil else h(universe)(l)
+ }
+}
+
+object Test extends App {
+ assert(new Bug().f(null, null) == null)
+ assert(new Bug().g(null)(null) == null)
+ assert(new Bug().h(null)(null) == Nil)
+}
\ No newline at end of file
diff --git a/test/files/run/t6911.scala b/test/files/run/t6911.scala
new file mode 100644
index 0000000..dd81257
--- /dev/null
+++ b/test/files/run/t6911.scala
@@ -0,0 +1,24 @@
+trait K {
+ case class CC(name: String)
+ case class DD[+A1, A2](x1: A1, x2: A2)
+}
+
+object Test {
+ object Foo extends K
+ object Bar extends K
+
+ val b1 = Foo.CC("b")
+ val b2 = Bar.CC("b")
+ val b3 = Foo.CC("b")
+
+ val c1 = Foo.DD("a", 5)
+ val c2 = Bar.DD("a", 5)
+ val c3 = Foo.DD("a", 5)
+
+ def main(args: Array[String]): Unit = {
+ assert(b1 != b2, ((b1, b2))) // false under 2.9, true under 2.10-RC5
+ assert(b1 == b3, ((b1, b3)))
+ assert(c1 != c2, ((c1, c2)))
+ assert(c1 == c3, ((c1, c3)))
+ }
+}
diff --git a/test/files/run/t6928-run.check b/test/files/run/t6928-run.check
new file mode 100644
index 0000000..a640c3e
--- /dev/null
+++ b/test/files/run/t6928-run.check
@@ -0,0 +1 @@
+3 As
diff --git a/test/files/run/t6928-run.scala b/test/files/run/t6928-run.scala
new file mode 100644
index 0000000..87a8884
--- /dev/null
+++ b/test/files/run/t6928-run.scala
@@ -0,0 +1,10 @@
+abstract class A( val someAs: A* ) {
+ override def toString = someAs.length + " As"
+}
+object B extends A(null, null, null)
+
+object Test {
+ def main(args: Array[String]): Unit = {
+ println(B)
+ }
+}
diff --git a/test/files/run/t6935.scala b/test/files/run/t6935.scala
new file mode 100644
index 0000000..dea2d7f
--- /dev/null
+++ b/test/files/run/t6935.scala
@@ -0,0 +1,14 @@
+object Test {
+
+ def main(args: Array[String]): Unit = {
+ import java.io._
+ val bytes = new ByteArrayOutputStream()
+ val out = new ObjectOutputStream(bytes)
+ out.writeObject(())
+ out.close()
+ val buf = bytes.toByteArray
+ val in = new ObjectInputStream(new ByteArrayInputStream(buf))
+ val unit = in.readObject()
+ assert(unit == ())
+ }
+}
diff --git a/test/files/run/t6937.check b/test/files/run/t6937.check
new file mode 100644
index 0000000..9a1fa4c
--- /dev/null
+++ b/test/files/run/t6937.check
@@ -0,0 +1,26 @@
+Type in expressions to have them evaluated.
+Type :help for more information.
+
+scala>
+
+scala> import scala.reflect.runtime.{universe => ru}
+import scala.reflect.runtime.{universe=>ru}
+
+scala> import scala.reflect.runtime.{currentMirror => cm}
+import scala.reflect.runtime.{currentMirror=>cm}
+
+scala> import scala.reflect.api.{Universe => ApiUniverse}
+import scala.reflect.api.{Universe=>ApiUniverse}
+
+scala> class A
+defined class A
+
+scala> lazy val apiru = ru: ApiUniverse
+apiru: scala.reflect.api.Universe = <lazy>
+
+scala> apiru.typeTag[A].in(cm)
+res0: reflect.runtime.universe.TypeTag[A] = TypeTag[A]
+
+scala>
+
+scala>
diff --git a/test/files/run/t6937.scala b/test/files/run/t6937.scala
new file mode 100644
index 0000000..4b30894
--- /dev/null
+++ b/test/files/run/t6937.scala
@@ -0,0 +1,12 @@
+import scala.tools.partest.ReplTest
+
+object Test extends ReplTest {
+ def code = """
+ import scala.reflect.runtime.{universe => ru}
+ import scala.reflect.runtime.{currentMirror => cm}
+ import scala.reflect.api.{Universe => ApiUniverse}
+ class A
+ lazy val apiru = ru: ApiUniverse
+ apiru.typeTag[A].in(cm)
+ """
+}
\ No newline at end of file
diff --git a/test/files/run/t6955.check b/test/files/run/t6955.check
new file mode 100644
index 0000000..0cfbf08
--- /dev/null
+++ b/test/files/run/t6955.check
@@ -0,0 +1 @@
+2
diff --git a/test/files/run/t6955.scala b/test/files/run/t6955.scala
new file mode 100644
index 0000000..2610acd
--- /dev/null
+++ b/test/files/run/t6955.scala
@@ -0,0 +1,26 @@
+import scala.tools.partest.IcodeTest
+
+class Switches {
+ type Tag = Byte
+
+ def switchBad(i: Tag): Int = i match { // notice type of i is Tag = Byte
+ case 1 => 1
+ case 2 => 2
+ case 3 => 3
+ case _ => 0
+ }
+
+ def switchOkay(i: Byte): Int = i match { // notice type of i is Byte
+ case 1 => 1
+ case 2 => 2
+ case 3 => 3
+ case _ => 0
+ }
+}
+
+object Test extends IcodeTest {
+ // ensure we get two switches out of this -- ignore the rest of the output for robustness
+ // exclude the constant we emit for the "SWITCH ..." string below (we get the icode for all the code you see in this file)
+ override def show() = println(collectIcode("").filter(x => x.indexOf("SWITCH ...") >= 0 && x.indexOf("CONSTANT(") == -1).size)
+}
+
diff --git a/test/files/run/t6956.check b/test/files/run/t6956.check
new file mode 100644
index 0000000..0cfbf08
--- /dev/null
+++ b/test/files/run/t6956.check
@@ -0,0 +1 @@
+2
diff --git a/test/files/run/t6956.scala b/test/files/run/t6956.scala
new file mode 100644
index 0000000..4a6583c
--- /dev/null
+++ b/test/files/run/t6956.scala
@@ -0,0 +1,26 @@
+import scala.tools.partest.IcodeTest
+
+class Switches {
+ private[this] final val ONE = 1
+
+ def switchBad(i: Byte): Int = i match {
+ case ONE => 1
+ case 2 => 2
+ case 3 => 3
+ case _ => 0
+ }
+
+ def switchOkay(i: Byte): Int = i match {
+ case 1 => 1
+ case 2 => 2
+ case 3 => 3
+ case _ => 0
+ }
+}
+
+object Test extends IcodeTest {
+ // ensure we get two switches out of this -- ignore the rest of the output for robustness
+ // exclude the constant we emit for the "SWITCH ..." string below (we get the icode for all the code you see in this file)
+ override def show() = println(collectIcode("").filter(x => x.indexOf("SWITCH ...") >= 0 && x.indexOf("CONSTANT(") == -1).size)
+}
+
diff --git a/test/files/run/t6957.scala b/test/files/run/t6957.scala
new file mode 100644
index 0000000..d0bf8e7
--- /dev/null
+++ b/test/files/run/t6957.scala
@@ -0,0 +1,8 @@
+object Test {
+ def main(args: Array[String]) {
+ class Foo
+ class Parent(f:Foo)
+ class Child extends Parent({val x=new Foo{}; x})
+ new Child
+ }
+}
diff --git a/test/files/run/t6968.check b/test/files/run/t6968.check
new file mode 100644
index 0000000..7a18941
--- /dev/null
+++ b/test/files/run/t6968.check
@@ -0,0 +1 @@
+1, 3, 5
diff --git a/test/files/run/t6968.scala b/test/files/run/t6968.scala
new file mode 100644
index 0000000..b5cadfd
--- /dev/null
+++ b/test/files/run/t6968.scala
@@ -0,0 +1,7 @@
+object Test {
+ def main(args: Array[String]) {
+ val mixedList = List(1,(1,2),4,(3,1),(5,4),6)
+ val as = for((a,b) <- mixedList) yield a
+ println(as.mkString(", "))
+ }
+}
diff --git a/test/files/run/t6969.check b/test/files/run/t6969.check
new file mode 100644
index 0000000..7829781
--- /dev/null
+++ b/test/files/run/t6969.check
@@ -0,0 +1 @@
+All threads completed.
diff --git a/test/files/run/t6969.scala b/test/files/run/t6969.scala
new file mode 100644
index 0000000..8cfc28c
--- /dev/null
+++ b/test/files/run/t6969.scala
@@ -0,0 +1,28 @@
+object Test {
+ private type Clearable = { def clear(): Unit }
+ private def choke() = {
+ try new Array[Object]((Runtime.getRuntime().maxMemory min Int.MaxValue).toInt)
+ catch {
+ case _: OutOfMemoryError => // what do you mean, out of memory?
+ case t: Throwable => println(t)
+ }
+ }
+ private def f(x: Clearable) = x.clear()
+ class Choker(id: Int) extends Thread {
+ private def g(iteration: Int) = {
+ val map = scala.collection.mutable.Map[Int, Int](1 -> 2)
+ try f(map) catch { case t: NullPointerException => println(s"Failed at $id/$iteration") ; throw t }
+ choke()
+ }
+ override def run() {
+ 1 to 50 foreach g
+ }
+ }
+
+ def main(args: Array[String]): Unit = {
+ val threads = 1 to 3 map (id => new Choker(id))
+ threads foreach (_.start())
+ threads foreach (_.join())
+ println("All threads completed.")
+ }
+}
diff --git a/test/files/run/t6989.check b/test/files/run/t6989.check
new file mode 100644
index 0000000..43d4bba
--- /dev/null
+++ b/test/files/run/t6989.check
@@ -0,0 +1,240 @@
+============
+sym = class PackagePrivateJavaClass, signature = ClassInfoType(...), owner = package foo
+isPrivate = false
+isProtected = false
+isPublic = false
+privateWithin = package foo
+============
+sym = constructor PackagePrivateJavaClass, signature = (x$1: Int, x$2: Int)foo.PackagePrivateJavaClass, owner = class PackagePrivateJavaClass
+isPrivate = false
+isProtected = false
+isPublic = true
+privateWithin = <none>
+============
+sym = variable privateField, signature = Int, owner = class PackagePrivateJavaClass
+isPrivate = true
+isProtected = false
+isPublic = false
+privateWithin = <none>
+============
+sym = method privateMethod, signature = ()Unit, owner = class PackagePrivateJavaClass
+isPrivate = true
+isProtected = false
+isPublic = false
+privateWithin = <none>
+============
+sym = variable protectedField, signature = Int, owner = class PackagePrivateJavaClass
+isPrivate = false
+isProtected = true
+isPublic = false
+privateWithin = package foo
+============
+sym = method protectedMethod, signature = ()Unit, owner = class PackagePrivateJavaClass
+isPrivate = false
+isProtected = true
+isPublic = false
+privateWithin = package foo
+============
+sym = variable publicField, signature = Int, owner = class PackagePrivateJavaClass
+isPrivate = false
+isProtected = false
+isPublic = true
+privateWithin = <none>
+============
+sym = method publicMethod, signature = ()Unit, owner = class PackagePrivateJavaClass
+isPrivate = false
+isProtected = false
+isPublic = true
+privateWithin = <none>
+============
+sym = object PackagePrivateJavaClass, signature = foo.PackagePrivateJavaClass.type, owner = package foo
+isPrivate = false
+isProtected = false
+isPublic = false
+privateWithin = package foo
+============
+sym = variable privateStaticField, signature = Int, owner = object PackagePrivateJavaClass
+isPrivate = true
+isProtected = false
+isPublic = false
+privateWithin = <none>
+============
+sym = method privateStaticMethod, signature = ()Unit, owner = object PackagePrivateJavaClass
+isPrivate = true
+isProtected = false
+isPublic = false
+privateWithin = <none>
+============
+sym = variable protectedStaticField, signature = Int, owner = object PackagePrivateJavaClass
+isPrivate = false
+isProtected = true
+isPublic = false
+privateWithin = package foo
+============
+sym = method protectedStaticMethod, signature = ()Unit, owner = object PackagePrivateJavaClass
+isPrivate = false
+isProtected = true
+isPublic = false
+privateWithin = package foo
+============
+sym = variable publicStaticField, signature = Int, owner = object PackagePrivateJavaClass
+isPrivate = false
+isProtected = false
+isPublic = true
+privateWithin = <none>
+============
+sym = method publicStaticMethod, signature = ()Unit, owner = object PackagePrivateJavaClass
+isPrivate = false
+isProtected = false
+isPublic = true
+privateWithin = <none>
+============
+sym = class JavaClass_1, signature = ClassInfoType(...), owner = package foo
+isPrivate = false
+isProtected = false
+isPublic = true
+privateWithin = <none>
+============
+sym = class $PrivateJavaClass, signature = ClassInfoType(...), owner = class JavaClass_1
+isPrivate = true
+isProtected = false
+isPublic = false
+privateWithin = <none>
+============
+sym = constructor $PrivateJavaClass, signature = ()JavaClass_1.this.$PrivateJavaClass, owner = class $PrivateJavaClass
+isPrivate = false
+isProtected = false
+isPublic = true
+privateWithin = <none>
+============
+sym = value this$0, signature = foo.JavaClass_1, owner = class $PrivateJavaClass
+isPrivate = false
+isProtected = false
+isPublic = false
+privateWithin = package foo
+============
+sym = object $PrivateJavaClass, signature = JavaClass_1.this.$PrivateJavaClass.type, owner = class JavaClass_1
+isPrivate = true
+isProtected = false
+isPublic = false
+privateWithin = <none>
+============
+sym = class $ProtectedJavaClass, signature = ClassInfoType(...), owner = class JavaClass_1
+isPrivate = false
+isProtected = true
+isPublic = false
+privateWithin = package foo
+============
+sym = constructor $ProtectedJavaClass, signature = ()JavaClass_1.this.$ProtectedJavaClass, owner = class $ProtectedJavaClass
+isPrivate = false
+isProtected = false
+isPublic = true
+privateWithin = <none>
+============
+sym = value this$0, signature = foo.JavaClass_1, owner = class $ProtectedJavaClass
+isPrivate = false
+isProtected = false
+isPublic = false
+privateWithin = package foo
+============
+sym = object $ProtectedJavaClass, signature = JavaClass_1.this.$ProtectedJavaClass.type, owner = class JavaClass_1
+isPrivate = false
+isProtected = false
+isPublic = false
+privateWithin = package foo
+============
+sym = class $PublicJavaClass, signature = ClassInfoType(...), owner = class JavaClass_1
+isPrivate = false
+isProtected = false
+isPublic = true
+privateWithin = <none>
+============
+sym = constructor $PublicJavaClass, signature = (x$1: foo.JavaClass_1)JavaClass_1.this.$PublicJavaClass, owner = class $PublicJavaClass
+isPrivate = false
+isProtected = false
+isPublic = true
+privateWithin = <none>
+============
+sym = value this$0, signature = foo.JavaClass_1, owner = class $PublicJavaClass
+isPrivate = false
+isProtected = false
+isPublic = false
+privateWithin = package foo
+============
+sym = object $PublicJavaClass, signature = JavaClass_1.this.$PublicJavaClass.type, owner = class JavaClass_1
+isPrivate = false
+isProtected = false
+isPublic = true
+privateWithin = <none>
+============
+sym = constructor JavaClass_1, signature = ()foo.JavaClass_1, owner = class JavaClass_1
+isPrivate = false
+isProtected = false
+isPublic = true
+privateWithin = <none>
+============
+sym = object JavaClass_1, signature = foo.JavaClass_1.type, owner = package foo
+isPrivate = false
+isProtected = false
+isPublic = true
+privateWithin = <none>
+============
+sym = class PrivateStaticJavaClass, signature = ClassInfoType(...), owner = object JavaClass_1
+isPrivate = true
+isProtected = false
+isPublic = false
+privateWithin = <none>
+============
+sym = constructor PrivateStaticJavaClass, signature = ()foo.JavaClass_1.PrivateStaticJavaClass, owner = class PrivateStaticJavaClass
+isPrivate = false
+isProtected = false
+isPublic = true
+privateWithin = <none>
+============
+sym = object PrivateStaticJavaClass, signature = foo.JavaClass_1.PrivateStaticJavaClass.type, owner = object JavaClass_1
+isPrivate = true
+isProtected = false
+isPublic = false
+privateWithin = <none>
+============
+sym = class ProtectedStaticJavaClass, signature = ClassInfoType(...), owner = object JavaClass_1
+isPrivate = true
+isProtected = false
+isPublic = false
+privateWithin = <none>
+============
+sym = constructor ProtectedStaticJavaClass, signature = ()foo.JavaClass_1.ProtectedStaticJavaClass, owner = class ProtectedStaticJavaClass
+isPrivate = false
+isProtected = false
+isPublic = true
+privateWithin = <none>
+============
+sym = object ProtectedStaticJavaClass, signature = foo.JavaClass_1.ProtectedStaticJavaClass.type, owner = object JavaClass_1
+isPrivate = true
+isProtected = false
+isPublic = false
+privateWithin = <none>
+============
+sym = class PublicStaticJavaClass, signature = ClassInfoType(...), owner = object JavaClass_1
+isPrivate = false
+isProtected = false
+isPublic = true
+privateWithin = <none>
+============
+sym = constructor PublicStaticJavaClass, signature = ()foo.JavaClass_1.PublicStaticJavaClass, owner = class PublicStaticJavaClass
+isPrivate = false
+isProtected = false
+isPublic = true
+privateWithin = <none>
+============
+sym = object PublicStaticJavaClass, signature = foo.JavaClass_1.PublicStaticJavaClass.type, owner = object JavaClass_1
+isPrivate = false
+isProtected = false
+isPublic = true
+privateWithin = <none>
+============
+sym = variable staticField, signature = Int, owner = object JavaClass_1
+isPrivate = true
+isProtected = false
+isPublic = false
+privateWithin = <none>
diff --git a/test/files/run/t6989/JavaClass_1.java b/test/files/run/t6989/JavaClass_1.java
new file mode 100644
index 0000000..72ec4d6
--- /dev/null
+++ b/test/files/run/t6989/JavaClass_1.java
@@ -0,0 +1,43 @@
+package foo;
+
+// Originally composed to accommodate pull request feedback, this test has
+// uncovered a handful of bugs in FromJavaClassCompleter, namely:
+// * SI-7071 non-public ctors get lost
+// * SI-7072 inner classes are read incorrectly
+
+// I'm leaving the incorrect results of FromJavaClassCompleters in the check
+// file, so that we get notified when something changes there.
+// ^^^ It's not clear what those incorrect results were, but the fix for SI-7359
+// (backport of fix for SI-6548) has probably resolved some of these. OP, please revisit this comment.
+
+class PackagePrivateJavaClass {
+ private int privateField = 0;
+ protected int protectedField = 1;
+ public int publicField = 2;
+
+ private static int privateStaticField = 3;
+ protected static int protectedStaticField = 4;
+ public static int publicStaticField = 5;
+
+ private void privateMethod() {}
+ protected void protectedMethod() {}
+ public void publicMethod() {}
+
+ private static void privateStaticMethod() {}
+ protected static void protectedStaticMethod() {}
+ public static void publicStaticMethod() {}
+
+ private PackagePrivateJavaClass() {}
+ protected PackagePrivateJavaClass(int x) {}
+ public PackagePrivateJavaClass(int x, int y) {}
+}
+
+public class JavaClass_1 {
+ private class PrivateJavaClass {}
+ private static class PrivateStaticJavaClass {}
+ protected class ProtectedJavaClass {}
+ private static class ProtectedStaticJavaClass {}
+ public class PublicJavaClass {}
+ public static class PublicStaticJavaClass {}
+ private static int staticField = 0;
+}
\ No newline at end of file
diff --git a/test/files/run/t6989/Test_2.scala b/test/files/run/t6989/Test_2.scala
new file mode 100644
index 0000000..e48e824
--- /dev/null
+++ b/test/files/run/t6989/Test_2.scala
@@ -0,0 +1,42 @@
+import scala.reflect.runtime.universe._
+
+// Originally composed to accommodate pull request feedback, this test has
+// uncovered a handful of bugs in FromJavaClassCompleter, namely:
+// * SI-7071 non-public ctors get lost
+// * SI-7072 inner classes are read incorrectly
+
+// I'm leaving the incorrect results of FromJavaClassCompleters in the check
+// file, so that we get notified when something changes there.
+
+package object foo {
+ def testAll(): Unit = {
+ test(typeOf[foo.PackagePrivateJavaClass].typeSymbol)
+ test(typeOf[foo.PackagePrivateJavaClass].typeSymbol.companionSymbol)
+ test(typeOf[foo.JavaClass_1].typeSymbol)
+ test(typeOf[foo.JavaClass_1].typeSymbol.companionSymbol)
+ }
+
+ def test(sym: Symbol): Unit = {
+ printSymbolDetails(sym)
+ if (sym.isClass || sym.isModule) {
+ sym.typeSignature.declarations.toList.sortBy(_.name.toString) foreach test
+ }
+ }
+
+ def printSymbolDetails(sym: Symbol): Unit = {
+ def stableSignature(sym: Symbol) = sym.typeSignature match {
+ case ClassInfoType(_, _, _) => "ClassInfoType(...)"
+ case tpe => tpe.toString
+ }
+ println("============")
+ println(s"sym = $sym, signature = ${stableSignature(sym)}, owner = ${sym.owner}")
+ println(s"isPrivate = ${sym.isPrivate}")
+ println(s"isProtected = ${sym.isProtected}")
+ println(s"isPublic = ${sym.isPublic}")
+ println(s"privateWithin = ${sym.privateWithin}")
+ }
+}
+
+object Test extends App {
+ foo.testAll()
+}
\ No newline at end of file
diff --git a/test/files/run/t7008-scala-defined.check b/test/files/run/t7008-scala-defined.check
new file mode 100644
index 0000000..84ed626
--- /dev/null
+++ b/test/files/run/t7008-scala-defined.check
@@ -0,0 +1,7 @@
+<init>: List(throws[NullPointerException](""))
+bar: List(throws[E1](""))
+baz: List(throws[IllegalStateException](""))
+=============
+<init>: List(throws[NullPointerException](""))
+bar: List(throws[E1](""))
+baz: List(throws[IllegalStateException](""))
diff --git a/test/files/run/t7008-scala-defined/Impls_Macros_2.scala b/test/files/run/t7008-scala-defined/Impls_Macros_2.scala
new file mode 100644
index 0000000..94fd990
--- /dev/null
+++ b/test/files/run/t7008-scala-defined/Impls_Macros_2.scala
@@ -0,0 +1,12 @@
+import language.experimental.macros
+import scala.reflect.macros.Context
+
+object Macros {
+ def impl(c: Context) = {
+ val decls = c.typeOf[ScalaClassWithCheckedExceptions_1[_]].declarations.toList
+ val s = decls.sortBy(_.name.toString).map(decl => (s"${decl.name}: ${decl.annotations}")).mkString(scala.compat.Platform.EOL)
+ c.universe.reify(println(c.literal(s).splice))
+ }
+
+ def foo = macro impl
+}
\ No newline at end of file
diff --git a/test/files/run/t7008-scala-defined/ScalaClassWithCheckedExceptions_1.scala b/test/files/run/t7008-scala-defined/ScalaClassWithCheckedExceptions_1.scala
new file mode 100644
index 0000000..7783c87
--- /dev/null
+++ b/test/files/run/t7008-scala-defined/ScalaClassWithCheckedExceptions_1.scala
@@ -0,0 +1,6 @@
+class ScalaClassWithCheckedExceptions_1[E1 <: Exception] @throws[NullPointerException]("") () {
+ @throws[E1]("") def bar() {}
+ @throws[IllegalStateException]("") def baz(x: Int) {}
+ // FIXME: SI-7066
+ // @throws[E2]("") def foo[E2 <: Exception] {}
+}
\ No newline at end of file
diff --git a/test/files/run/t7008-scala-defined/Test_3.scala b/test/files/run/t7008-scala-defined/Test_3.scala
new file mode 100644
index 0000000..03bb79d
--- /dev/null
+++ b/test/files/run/t7008-scala-defined/Test_3.scala
@@ -0,0 +1,9 @@
+import scala.reflect.runtime.universe._
+
+object Test extends App {
+ Macros.foo
+ println("=============")
+
+ val decls = typeOf[ScalaClassWithCheckedExceptions_1[_]].declarations.toList
+ decls sortBy (_.name.toString) foreach (decl => println(s"${decl.name}: ${decl.annotations}"))
+}
\ No newline at end of file
diff --git a/test/files/run/t7008.check b/test/files/run/t7008.check
new file mode 100644
index 0000000..ee077f9
--- /dev/null
+++ b/test/files/run/t7008.check
@@ -0,0 +1,9 @@
+<init>: List(throws[NullPointerException](classOf[java.lang.NullPointerException]))
+bar: List(throws[Exception](classOf[java.lang.Exception]))
+baz: List(throws[IllegalStateException](classOf[java.lang.IllegalStateException]))
+foo: List(throws[Exception](classOf[java.lang.Exception]))
+=============
+<init>: List(throws[java.lang.NullPointerException](classOf[java.lang.NullPointerException]))
+bar: List(throws[java.lang.Exception](classOf[java.lang.Exception]))
+baz: List(throws[java.lang.IllegalStateException](classOf[java.lang.IllegalStateException]))
+foo: List(throws[java.lang.Exception](classOf[java.lang.Exception]))
diff --git a/test/files/run/t7008/Impls_Macros_2.scala b/test/files/run/t7008/Impls_Macros_2.scala
new file mode 100644
index 0000000..7a17314
--- /dev/null
+++ b/test/files/run/t7008/Impls_Macros_2.scala
@@ -0,0 +1,12 @@
+import language.experimental.macros
+import scala.reflect.macros.Context
+
+object Macros {
+ def impl(c: Context) = {
+ val decls = c.typeOf[JavaClassWithCheckedExceptions_1[_]].declarations.toList
+ val s = decls.sortBy(_.name.toString).map(decl => (s"${decl.name}: ${decl.annotations}")).mkString(scala.compat.Platform.EOL)
+ c.universe.reify(println(c.literal(s).splice))
+ }
+
+ def foo = macro impl
+}
\ No newline at end of file
diff --git a/test/files/run/t7008/JavaClassWithCheckedExceptions_1.java b/test/files/run/t7008/JavaClassWithCheckedExceptions_1.java
new file mode 100644
index 0000000..dda2128
--- /dev/null
+++ b/test/files/run/t7008/JavaClassWithCheckedExceptions_1.java
@@ -0,0 +1,7 @@
+class JavaClassWithCheckedExceptions_1<E1 extends Exception> {
+ public JavaClassWithCheckedExceptions_1() throws NullPointerException {}
+
+ public void bar() throws E1 {}
+ public void baz(int x) throws IllegalStateException {}
+ public <E2 extends Exception> void foo() throws E2 {}
+}
\ No newline at end of file
diff --git a/test/files/run/t7008/Test_3.scala b/test/files/run/t7008/Test_3.scala
new file mode 100644
index 0000000..b2961a8
--- /dev/null
+++ b/test/files/run/t7008/Test_3.scala
@@ -0,0 +1,9 @@
+import scala.reflect.runtime.universe._
+
+object Test extends App {
+ Macros.foo
+ println("=============")
+
+ val decls = typeOf[JavaClassWithCheckedExceptions_1[_]].declarations.toList
+ decls sortBy (_.name.toString) foreach (decl => println(s"${decl.name}: ${decl.annotations}"))
+}
\ No newline at end of file
diff --git a/test/files/run/t7039.check b/test/files/run/t7039.check
new file mode 100644
index 0000000..9549060
--- /dev/null
+++ b/test/files/run/t7039.check
@@ -0,0 +1 @@
+Matched!
diff --git a/test/files/run/t7039.scala b/test/files/run/t7039.scala
new file mode 100644
index 0000000..475c4ae
--- /dev/null
+++ b/test/files/run/t7039.scala
@@ -0,0 +1,11 @@
+object UnapplySeqTest {
+ def unapplySeq(any: Any): Option[(Int, Seq[Int])] = Some((5, List(1)))
+}
+
+object Test extends App {
+ null match {
+ case UnapplySeqTest(5) => println("uh-oh")
+ case UnapplySeqTest(5, 1) => println("Matched!") // compiles
+ case UnapplySeqTest(5, xs @ _*) => println("toooo long: "+ (xs: Seq[Int]))
+ }
+}
\ No newline at end of file
diff --git a/test/files/run/t7046.check b/test/files/run/t7046.check
new file mode 100644
index 0000000..427f1ce
--- /dev/null
+++ b/test/files/run/t7046.check
@@ -0,0 +1,2 @@
+Set(class D, class E)
+Set(class D, class E)
diff --git a/test/files/run/t7046.scala b/test/files/run/t7046.scala
new file mode 100644
index 0000000..647a15c
--- /dev/null
+++ b/test/files/run/t7046.scala
@@ -0,0 +1,13 @@
+import scala.reflect.runtime.universe._
+import scala.reflect.runtime.{currentMirror => cm}
+
+sealed class C
+class D extends C
+class E extends C
+
+object Test extends App {
+ val c = cm.staticClass("C")
+ println(c.knownDirectSubclasses)
+ c.typeSignature
+ println(c.knownDirectSubclasses)
+}
\ No newline at end of file
diff --git a/test/files/jvm/bug680.check b/test/files/run/t7047.check
similarity index 100%
copy from test/files/jvm/bug680.check
copy to test/files/run/t7047.check
diff --git a/test/files/run/t7047/Impls_Macros_1.scala b/test/files/run/t7047/Impls_Macros_1.scala
new file mode 100644
index 0000000..2992e3e
--- /dev/null
+++ b/test/files/run/t7047/Impls_Macros_1.scala
@@ -0,0 +1,19 @@
+import scala.reflect.macros.Context
+import language.experimental.macros
+
+class Foo
+
+object Macros {
+ def impl(c: Context) = {
+ import c.universe._
+ try {
+ c.inferImplicitValue(typeOf[Foo], silent = false)
+ c.abort(c.enclosingPosition, "silent=false is not working")
+ } catch {
+ case _: Exception =>
+ }
+ c.literalNull
+ }
+
+ def foo = macro impl
+}
\ No newline at end of file
diff --git a/test/files/run/t7047/Test_2.scala b/test/files/run/t7047/Test_2.scala
new file mode 100644
index 0000000..acfddae
--- /dev/null
+++ b/test/files/run/t7047/Test_2.scala
@@ -0,0 +1,3 @@
+object Test extends App {
+ Macros.foo
+}
\ No newline at end of file
diff --git a/test/files/run/t7064-old-style-supercalls.check b/test/files/run/t7064-old-style-supercalls.check
new file mode 100644
index 0000000..0cfbf08
--- /dev/null
+++ b/test/files/run/t7064-old-style-supercalls.check
@@ -0,0 +1 @@
+2
diff --git a/test/files/run/t7064-old-style-supercalls.scala b/test/files/run/t7064-old-style-supercalls.scala
new file mode 100644
index 0000000..cffa7b1
--- /dev/null
+++ b/test/files/run/t7064-old-style-supercalls.scala
@@ -0,0 +1,48 @@
+import scala.reflect.runtime.universe._
+import Flag._
+import definitions._
+import scala.reflect.runtime.{currentMirror => cm}
+import scala.tools.reflect.ToolBox
+import scala.tools.reflect.Eval
+
+object Test extends App {
+ val PARAMACCESSOR = (1L << 29).asInstanceOf[FlagSet]
+
+ // these trees can be acquired by running the following incantation:
+ // echo 'class C(val x: Int); class D extends C(2)' > foo.scala
+ // ./scalac -Xprint:parser -Yshow-trees-stringified -Yshow-trees-compact foo.scala
+
+ val c = ClassDef(
+ Modifiers(), newTypeName("C"), List(),
+ Template(
+ List(Select(Ident(ScalaPackage), newTypeName("AnyRef"))),
+ emptyValDef,
+ List(
+ ValDef(Modifiers(PARAMACCESSOR), newTermName("x"), Ident(newTypeName("Int")), EmptyTree),
+ DefDef(
+ Modifiers(),
+ nme.CONSTRUCTOR,
+ List(),
+ List(List(ValDef(Modifiers(PARAM | PARAMACCESSOR), newTermName("x"), Ident(newTypeName("Int")), EmptyTree))),
+ TypeTree(),
+ Block(
+ List(Apply(Select(Super(This(tpnme.EMPTY), tpnme.EMPTY), nme.CONSTRUCTOR), List())),
+ Literal(Constant(())))))))
+ val d = ClassDef(
+ Modifiers(), newTypeName("D"), List(),
+ Template(
+ List(Ident(newTypeName("C"))),
+ emptyValDef,
+ List(
+ DefDef(
+ Modifiers(),
+ nme.CONSTRUCTOR,
+ List(),
+ List(List()),
+ TypeTree(),
+ Block(
+ List(Apply(Select(Super(This(tpnme.EMPTY), tpnme.EMPTY), nme.CONSTRUCTOR), List(Literal(Constant(2))))),
+ Literal(Constant(())))))))
+ val result = Select(Apply(Select(New(Ident(newTypeName("D"))), nme.CONSTRUCTOR), List()), newTermName("x"))
+ println(cm.mkToolBox().eval(Block(List(c, d), result)))
+}
\ No newline at end of file
diff --git a/test/files/run/t7074.check b/test/files/run/t7074.check
new file mode 100644
index 0000000..ab9cf11
--- /dev/null
+++ b/test/files/run/t7074.check
@@ -0,0 +1,9 @@
+<a/>
+<a b="2" c="3" d="1"/>
+<a b="2" c="4" d="1" e="3" f="5"/>
+<a b="5" c="4" d="3" e="2" f="1"/>
+<a b="1" c="2" d="3" e="4" f="5"/>
+<a a:b="2" a:c="3" a:d="1"/>
+<a a:b="2" a:c="4" a:d="1" a:e="3" a:f="5"/>
+<a a:b="5" a:c="4" a:d="3" a:e="2" a:f="1"/>
+<a a:b="1" a:c="2" a:d="3" a:e="4" a:f="5"/>
diff --git a/test/files/run/t7074.scala b/test/files/run/t7074.scala
new file mode 100644
index 0000000..693a076
--- /dev/null
+++ b/test/files/run/t7074.scala
@@ -0,0 +1,15 @@
+import scala.xml.Utility.sort
+
+object Test extends App {
+ println(sort(<a/>))
+ println(sort(<a d="1" b="2" c="3"/>))
+ println(sort(<a d="1" b="2" e="3" c="4" f="5"/>))
+ println(sort(<a f="1" e="2" d="3" c="4" b="5"/>))
+ println(sort(<a b="1" c="2" d="3" e="4" f="5"/>))
+
+ println(sort(<a a:d="1" a:b="2" a:c="3"/>))
+ println(sort(<a a:d="1" a:b="2" a:e="3" a:c="4" a:f="5"/>))
+ println(sort(<a a:f="1" a:e="2" a:d="3" a:c="4" a:b="5"/>))
+ println(sort(<a a:b="1" a:c="2" a:d="3" a:e="4" a:f="5"/>))
+}
+
diff --git a/test/files/run/t7096.check b/test/files/run/t7096.check
new file mode 100644
index 0000000..6f1cef6
--- /dev/null
+++ b/test/files/run/t7096.check
@@ -0,0 +1,2 @@
+testing symbol List(method foo, class Base, package ano, package <root>), param value x, xRefs List(x)
+testing symbol List(method foo, class Sub, package ano, package <root>), param value x, xRefs List(x)
diff --git a/test/files/run/t7096.scala b/test/files/run/t7096.scala
new file mode 100644
index 0000000..e9c0323
--- /dev/null
+++ b/test/files/run/t7096.scala
@@ -0,0 +1,36 @@
+import scala.tools.partest._
+import scala.tools.nsc._
+
+object Test extends CompilerTest {
+ import global._
+ import definitions._
+
+ override def code = """
+package ano
+
+class ann(x: Any) extends annotation.TypeConstraint
+
+abstract class Base {
+ def foo(x: String): String @ann(x.trim())
+}
+
+class Sub extends Base {
+ def foo(x: String): String @ann(x.trim()) = x
+}
+ """
+
+ object syms extends SymsInPackage("ano")
+ import syms._
+
+ def check(source: String, unit: global.CompilationUnit) {
+ afterTyper {
+ terms.filter(_.name.toString == "foo").foreach(sym => {
+ val xParam = sym.tpe.paramss.flatten.head
+ val annot = sym.tpe.finalResultType.annotations.head
+ val xRefs = annot.args.head.filter(t => t.symbol == xParam)
+ println(s"testing symbol ${sym.ownerChain}, param $xParam, xRefs $xRefs")
+ assert(xRefs.length == 1, xRefs)
+ })
+ }
+ }
+}
diff --git a/test/files/run/t7106.check b/test/files/run/t7106.check
new file mode 100644
index 0000000..9a4bb43
--- /dev/null
+++ b/test/files/run/t7106.check
@@ -0,0 +1,6 @@
+[ok] q1 I private final
+[ok] q3 I private final
+[ok] <init> (III)V public
+[ok] bippy1 ()I public
+[ok] bippy2 ()I public
+[ok] bippy3 ()I public
diff --git a/test/files/run/t7106/Analyzed_1.scala b/test/files/run/t7106/Analyzed_1.scala
new file mode 100644
index 0000000..a2ddebc
--- /dev/null
+++ b/test/files/run/t7106/Analyzed_1.scala
@@ -0,0 +1,14 @@
+
+abstract class Base0 { def p2: Int }
+class Base(p1: Int, override val p2: Int) extends Base0
+
+abstract class Sub1(q1: Int, q2: Int, q3: Int) extends Base(q1, q2) {
+ def bippy1 = q1
+ def bippy2 = q2
+ def bippy3 = q3
+}
+abstract class Sub2(q1: Int, q2: Int, q3: Int) extends Base(q1, q2) {
+ def bippy1 = q1
+ def bippy2 = p2
+ def bippy3 = q3
+}
diff --git a/test/files/run/t7106/test.scala b/test/files/run/t7106/test.scala
new file mode 100644
index 0000000..3584a27
--- /dev/null
+++ b/test/files/run/t7106/test.scala
@@ -0,0 +1,10 @@
+import scala.tools.partest.BytecodeTest
+
+object Test extends BytecodeTest {
+ def show {
+ val node1 = loadClassNode("Sub1")
+ val node2 = loadClassNode("Sub2")
+
+ sameMethodAndFieldSignatures(node1, node2)
+ }
+}
diff --git a/test/files/run/t7171.scala b/test/files/run/t7171.scala
new file mode 100644
index 0000000..97585b9
--- /dev/null
+++ b/test/files/run/t7171.scala
@@ -0,0 +1,22 @@
+trait T {
+ final case class A()
+
+ // Was:
+ // error: scrutinee is incompatible with pattern type;
+ // found : T.this.A
+ // required: T#A
+ def foo(a: T#A) = a match {
+ case _: A => true; case _ => false
+ }
+}
+
+object Test extends App {
+ val t1 = new T {}
+ val t2 = new T {}
+ val a1 = new t1.A()
+ val a2 = new t1.A()
+ assert(t1.foo(a1))
+ // as noted in the unchecked warning (tested in the corresponding neg test),
+ // the outer pointer isn't checked
+ assert(t1.foo(a2))
+}
diff --git a/test/files/run/t7185.check b/test/files/run/t7185.check
new file mode 100644
index 0000000..455c1aa
--- /dev/null
+++ b/test/files/run/t7185.check
@@ -0,0 +1,34 @@
+Type in expressions to have them evaluated.
+Type :help for more information.
+
+scala>
+
+scala> import scala.tools.reflect.ToolBox
+import scala.tools.reflect.ToolBox
+
+scala> import scala.reflect.runtime.universe._
+import scala.reflect.runtime.universe._
+
+scala> object O { def apply() = 0 }
+defined module O
+
+scala> val ORef = reify { O }.tree
+ORef: reflect.runtime.universe.Tree = $read.O
+
+scala> val tree = Apply(Block(Nil, Block(Nil, ORef)), Nil)
+tree: reflect.runtime.universe.Apply =
+{
+ {
+ $read.O
+ }
+}()
+
+scala> {val tb = reflect.runtime.currentMirror.mkToolBox(); tb.typeCheck(tree): Any}
+res0: Any =
+{
+ {
+ $read.O.apply()
+ }
+}
+
+scala>
diff --git a/test/files/run/t7185.scala b/test/files/run/t7185.scala
new file mode 100644
index 0000000..d9d913e
--- /dev/null
+++ b/test/files/run/t7185.scala
@@ -0,0 +1,12 @@
+import scala.tools.partest.ReplTest
+
+object Test extends ReplTest {
+ override def code = """
+import scala.tools.reflect.ToolBox
+import scala.reflect.runtime.universe._
+object O { def apply() = 0 }
+val ORef = reify { O }.tree
+val tree = Apply(Block(Nil, Block(Nil, ORef)), Nil)
+{val tb = reflect.runtime.currentMirror.mkToolBox(); tb.typeCheck(tree): Any}
+"""
+}
diff --git a/test/files/run/t7200.scala b/test/files/run/t7200.scala
new file mode 100644
index 0000000..ba342df
--- /dev/null
+++ b/test/files/run/t7200.scala
@@ -0,0 +1,34 @@
+import language.higherKinds
+
+object Test extends App {
+
+ // Slice of comonad is where this came up
+ trait Foo[F[_]] {
+ def coflatMap[A, B](f: F[A] => B): F[A] => F[B]
+ }
+
+ // A non-empty list
+ case class Nel[A](head: A, tail: List[A])
+
+ object NelFoo extends Foo[Nel] {
+
+ // It appears that the return type for recursive calls is not inferred
+ // properly, yet no warning is issued. Providing a return type or
+ // type arguments for the recursive call fixes the problem.
+
+ def coflatMap[A, B](f: Nel[A] => B) = // ok w/ return type
+ l => Nel(f(l), l.tail match {
+ case Nil => Nil
+ case h :: t => {
+ val r = coflatMap(f)(Nel(h, t)) // ok w/ type args
+ r.head :: r.tail
+ }
+ })
+ }
+
+ // Without a recursive call all is well, but with recursion we get a
+ // ClassCastException from Integer to Nothing
+ NelFoo.coflatMap[Int, Int](_.head + 1)(Nel(1, Nil)) // Ok
+ NelFoo.coflatMap[Int, Int](_.head + 1)(Nel(1, List(2))) // CCE
+
+}
diff --git a/test/files/run/t7214.scala b/test/files/run/t7214.scala
new file mode 100644
index 0000000..ff1ea80
--- /dev/null
+++ b/test/files/run/t7214.scala
@@ -0,0 +1,57 @@
+// pattern matcher crashes here trying to synthesize an uneeded outer test.
+// no-symbol does not have an owner
+// at scala.reflect.internal.SymbolTable.abort(SymbolTable.scala:49)
+// at scala.tools.nsc.Global.abort(Global.scala:253)
+// at scala.reflect.internal.Symbols$NoSymbol.owner(Symbols.scala:3248)
+// at scala.reflect.internal.Symbols$Symbol.effectiveOwner(Symbols.scala:678)
+// at scala.reflect.internal.Symbols$Symbol.isDefinedInPackage(Symbols.scala:664)
+// at scala.reflect.internal.TreeGen.mkAttributedSelect(TreeGen.scala:188)
+// at scala.reflect.internal.TreeGen.mkAttributedRef(TreeGen.scala:124)
+// at scala.tools.nsc.ast.TreeDSL$CODE$.REF(TreeDSL.scala:308)
+// at scala.tools.nsc.typechecker.PatternMatching$TreeMakers$TypeTestTreeMaker$treeCondStrategy$.outerTest(PatternMatching.scala:1209)
+class Crash {
+ type Alias = C#T
+
+ val c = new C
+ val t = new c.T
+
+ // Crash via a Typed Pattern...
+ (t: Any) match {
+ case e: Alias =>
+ }
+
+ // ... or via a Typed Extractor Pattern.
+ object Extractor {
+ def unapply(a: Alias): Option[Any] = None
+ }
+ (t: Any) match {
+ case Extractor() =>
+ case _ =>
+ }
+
+ // checking that correct outer tests are applied when
+ // aliases for path dependent types are involved.
+ val c2 = new C
+ type CdotT = c.T
+ type C2dotT = c2.T
+
+ val outerField = t.getClass.getDeclaredFields.find(_.getName contains ("outer")).get
+ outerField.setAccessible(true)
+
+ (t: Any) match {
+ case _: C2dotT =>
+ println(s"!!! wrong match. t.outer=${outerField.get(t)} / c2 = $c2") // this matches on 2.10.0
+ case _: CdotT =>
+ case _ =>
+ println(s"!!! wrong match. t.outer=${outerField.get(t)} / c = $c")
+ }
+}
+
+class C {
+ class T
+}
+
+object Test extends App {
+ new Crash
+}
+
diff --git a/test/files/run/t7215.scala b/test/files/run/t7215.scala
new file mode 100644
index 0000000..c93e97f
--- /dev/null
+++ b/test/files/run/t7215.scala
@@ -0,0 +1,6 @@
+object Test extends App {
+ List[List[Any]]().transpose.isEmpty
+ Array[Array[Any]]().transpose.isEmpty
+ Vector[Vector[Any]]().transpose.isEmpty
+ Stream[Stream[Any]]().transpose.isEmpty
+}
diff --git a/test/files/run/t7235.check b/test/files/run/t7235.check
new file mode 100644
index 0000000..9cb9c55
--- /dev/null
+++ b/test/files/run/t7235.check
@@ -0,0 +1,4 @@
+C
+List(C)
+private val _ = _
+List()
diff --git a/test/files/run/t7235.scala b/test/files/run/t7235.scala
new file mode 100644
index 0000000..6039189
--- /dev/null
+++ b/test/files/run/t7235.scala
@@ -0,0 +1,14 @@
+import scala.reflect.runtime.universe._
+import scala.reflect.runtime.{universe => ru}
+import scala.reflect.runtime.{currentMirror => cm}
+import scala.tools.reflect.ToolBox
+
+class C
+
+object Test extends App {
+ val Block(List(ValDef(_, _, tpt: CompoundTypeTree, _)), _) = reify{ val x: C{} = ??? }.tree
+ println(tpt)
+ println(tpt.templ.parents)
+ println(tpt.templ.self)
+ println(tpt.templ.body)
+}
diff --git a/test/files/jvm/bug680.check b/test/files/run/t7240.check
similarity index 100%
copy from test/files/jvm/bug680.check
copy to test/files/run/t7240.check
diff --git a/test/files/run/t7240/Macros_1.scala b/test/files/run/t7240/Macros_1.scala
new file mode 100644
index 0000000..6465e18
--- /dev/null
+++ b/test/files/run/t7240/Macros_1.scala
@@ -0,0 +1,48 @@
+package bakery
+
+import scala.language.experimental.macros
+import scala.reflect.macros.Context
+
+trait FailureCake {
+ implicit def liftAnyFails[T: Manifest]: Any = ???
+
+ // This works
+ // implicit def liftAny[T]: Any = ???
+}
+
+object Bakery {
+
+ def failure: Any = macro failureImpl
+ def failureImpl(c: Context): c.Expr[Any] = {
+ import c.universe._
+
+ def dslTrait(dslName: String) = {
+ val names = dslName.split("\\.").toList.reverse
+ assert(names.length >= 1, "DSL trait name must be in the valid format. DSL trait name is " + dslName)
+
+ val tpeName = newTypeName(names.head)
+ names.tail.reverse match {
+ case head :: tail ⇒
+ Select(tail.foldLeft[Tree](Ident(newTermName(head)))((tree, name) ⇒ Select(tree, newTermName(name))), tpeName)
+ case Nil ⇒
+ Ident(tpeName)
+ }
+ }
+
+ def composeDSL(transformedBody: Tree) =
+ ClassDef(Modifiers(), newTypeName("eval"), List(), Template(
+ List(dslTrait("bakery.FailureCake")),
+ emptyValDef,
+ List(
+ DefDef(Modifiers(), nme.CONSTRUCTOR, List(), List(List()), TypeTree(),
+ Block(List(Apply(Select(Super(This(tpnme.EMPTY), tpnme.EMPTY), nme.CONSTRUCTOR), List())), Literal(Constant(())))),
+ DefDef(Modifiers(), newTermName("main"), List(), List(List()), Ident(newTypeName("Any")), transformedBody))))
+
+ def constructor = Apply(Select(New(Ident(newTypeName("eval"))), nme.CONSTRUCTOR), List())
+
+ c.eval(c.Expr[Any](
+ c.resetAllAttrs(Block(composeDSL(Literal(Constant(1))), constructor))))
+
+ c.Expr[Any](Literal(Constant(1)))
+ }
+}
\ No newline at end of file
diff --git a/test/files/run/t7240/Test_2.scala b/test/files/run/t7240/Test_2.scala
new file mode 100644
index 0000000..2450bda
--- /dev/null
+++ b/test/files/run/t7240/Test_2.scala
@@ -0,0 +1,3 @@
+object Test extends App {
+ bakery.Bakery.failure
+}
\ No newline at end of file
diff --git a/test/files/run/t7242.scala b/test/files/run/t7242.scala
new file mode 100644
index 0000000..c995336
--- /dev/null
+++ b/test/files/run/t7242.scala
@@ -0,0 +1,71 @@
+class CrashTest {
+ def foo = ()
+ trait CrashTestTable {
+ def cols = foo
+ }
+ // This was leading to a class between the mixed in
+ // outer accessor and the outer accessor of this object.
+ object CrashTestTable extends CrashTestTable {
+ foo
+ cols
+ }
+}
+
+class CrashTest1 {
+ def foo = ()
+ class CrashTestTable {
+ def cols = foo
+ }
+ object CrashTestTable extends CrashTestTable {
+ foo
+ cols
+ }
+}
+
+class CrashTest2 {
+ def foo = ()
+ trait CrashTestTable {
+ def cols = foo
+ }
+ object Obj extends CrashTestTable {
+ foo
+ cols
+ }
+}
+
+class CrashTest3 {
+ def foo = ()
+
+ def meth() {
+ trait CrashTestTable {
+ def cols = foo
+ }
+ object Obj extends CrashTestTable {
+ foo
+ cols
+ }
+ Obj
+ }
+}
+
+object Test extends App {
+ {
+ val c = new CrashTest
+ c.CrashTestTable
+ }
+
+ {
+ val c = new CrashTest1
+ c.CrashTestTable
+ }
+
+ {
+ val c = new CrashTest2
+ c.Obj
+ }
+
+ {
+ val c = new CrashTest3
+ c.meth()
+ }
+}
diff --git a/test/files/run/t7246.check b/test/files/run/t7246.check
new file mode 100755
index 0000000..ce01362
--- /dev/null
+++ b/test/files/run/t7246.check
@@ -0,0 +1 @@
+hello
diff --git a/test/files/run/t7246/Outer.java b/test/files/run/t7246/Outer.java
new file mode 100755
index 0000000..163276f
--- /dev/null
+++ b/test/files/run/t7246/Outer.java
@@ -0,0 +1,4 @@
+public class Outer {
+ public class Inner {
+ }
+}
\ No newline at end of file
diff --git a/test/files/run/t7246/Test.scala b/test/files/run/t7246/Test.scala
new file mode 100755
index 0000000..9f23ca8
--- /dev/null
+++ b/test/files/run/t7246/Test.scala
@@ -0,0 +1,16 @@
+object Test extends App {
+
+ val so = new SubOuter
+ val si = new so.SubInner
+ println(si.bar)
+}
+
+class SubOuter extends Outer {
+
+ val foo = "hello"
+
+ class SubInner extends Inner {
+ def bar = foo
+ }
+
+}
\ No newline at end of file
diff --git a/test/files/run/t7246b.check b/test/files/run/t7246b.check
new file mode 100755
index 0000000..5073bd8
--- /dev/null
+++ b/test/files/run/t7246b.check
@@ -0,0 +1,2 @@
+base
+sub
diff --git a/test/files/run/t7246b/Base.scala b/test/files/run/t7246b/Base.scala
new file mode 100755
index 0000000..4e71d33
--- /dev/null
+++ b/test/files/run/t7246b/Base.scala
@@ -0,0 +1,7 @@
+class Base {
+ val baseOuter = "base"
+
+ class BaseInner {
+ val baseInner = baseOuter
+ }
+}
diff --git a/test/files/run/t7246b/Outer.java b/test/files/run/t7246b/Outer.java
new file mode 100755
index 0000000..53a7931
--- /dev/null
+++ b/test/files/run/t7246b/Outer.java
@@ -0,0 +1,4 @@
+public class Outer extends Base {
+ public class Inner extends BaseInner {
+ }
+}
\ No newline at end of file
diff --git a/test/files/run/t7246b/Test.scala b/test/files/run/t7246b/Test.scala
new file mode 100755
index 0000000..f0982ea
--- /dev/null
+++ b/test/files/run/t7246b/Test.scala
@@ -0,0 +1,14 @@
+object Test extends App {
+
+ val so = new SubOuter
+ val si = new so.SubInner
+ println(si.baseInner)
+ println(si.subInner)
+}
+
+class SubOuter extends Outer {
+ val subOuter = "sub"
+ class SubInner extends Inner {
+ def subInner = subOuter
+ }
+}
diff --git a/test/files/run/t7249.check b/test/files/run/t7249.check
new file mode 100644
index 0000000..7777e0a
--- /dev/null
+++ b/test/files/run/t7249.check
@@ -0,0 +1 @@
+Yup!
diff --git a/test/files/run/t7249.scala b/test/files/run/t7249.scala
new file mode 100644
index 0000000..375df5c
--- /dev/null
+++ b/test/files/run/t7249.scala
@@ -0,0 +1,7 @@
+object Test extends App {
+ def bnToLambda(s: => String): () => String = () => s
+ var x: () => String = () => sys.error("Nope")
+ val y = bnToLambda { x() }
+ x = () => "Yup!"
+ println(y())
+}
diff --git a/test/files/run/t7265.scala b/test/files/run/t7265.scala
new file mode 100644
index 0000000..c556930
--- /dev/null
+++ b/test/files/run/t7265.scala
@@ -0,0 +1,27 @@
+
+import scala.util.Properties._
+
+object Test extends App {
+
+ setProp("java.specification.version", "1.7")
+
+ assert( isJavaAtLeast("1.5"))
+ assert( isJavaAtLeast("1.6"))
+ assert( isJavaAtLeast("1.7"))
+ assert(!isJavaAtLeast("1.8"))
+ assert(!isJavaAtLeast("1.71"))
+
+ failing(isJavaAtLeast("1.a"))
+ failing(isJavaAtLeast("1"))
+ failing(isJavaAtLeast(""))
+ failing(isJavaAtLeast("."))
+ failing(isJavaAtLeast(".5"))
+ failing(isJavaAtLeast("1.7.1"))
+
+ def failing(u: =>Unit) = try {
+ u
+ assert(false, "Expected Exception")
+ } catch {
+ case _: NumberFormatException =>
+ }
+}
diff --git a/test/files/run/t7269.scala b/test/files/run/t7269.scala
new file mode 100644
index 0000000..d22e57d
--- /dev/null
+++ b/test/files/run/t7269.scala
@@ -0,0 +1,32 @@
+import scala.collection.JavaConversions._
+import scala.collection.mutable
+
+object Test extends App {
+
+ def testMap(): Unit = {
+ val mapJ = new java.util.HashMap[Int, String]
+ val mapS: mutable.Map[Int, String] = mapJ
+
+ (10 to 20).foreach(i => mapS += ((i, i.toString)))
+ assert(11 == mapS.size)
+
+ // ConcurrentModificationException thrown in the following line
+ mapS.retain((i, str) => i % 2 == 0)
+ assert(6 == mapS.size)
+ }
+
+ def testSet(): Unit = {
+ val mapJ = new java.util.HashSet[Int]
+ val mapS: mutable.Set[Int] = mapJ
+
+ (10 to 20).foreach(i => mapS += i)
+ assert(11 == mapS.size)
+
+ // ConcurrentModificationException thrown in the following line
+ mapS.retain((i) => i % 2 == 0)
+ assert(6 == mapS.size)
+ }
+
+ testSet()
+ testMap()
+}
diff --git a/test/files/run/t7271.check b/test/files/run/t7271.check
new file mode 100644
index 0000000..dcd828a
--- /dev/null
+++ b/test/files/run/t7271.check
@@ -0,0 +1,12 @@
+[[syntax trees at end of parser]] // newSource1.scala
+[0:91]package [0:0]<empty> {
+ [0:91]class C extends [8:91][91]scala.AnyRef {
+ [8]def <init>() = [8]{
+ [8][8][8]super.<init>();
+ [8]()
+ };
+ [16:44]def quote = [28:44]<28:44><28:44>[28]StringContext([30:34]"foo", [40:44]"baz").s([35:39]this);
+ [51:85]def tripleQuote = [69:85]<69:85><69:85>[69]StringContext([71:75]"foo", [81:85]"baz").s([76:80]this)
+ }
+}
+
diff --git a/test/files/run/t7271.scala b/test/files/run/t7271.scala
new file mode 100644
index 0000000..6fccf14
--- /dev/null
+++ b/test/files/run/t7271.scala
@@ -0,0 +1,34 @@
+import scala.tools.partest._
+import java.io._
+import scala.tools.nsc._
+import scala.tools.nsc.util.CommandLineParser
+import scala.tools.nsc.{Global, Settings, CompilerCommand}
+import scala.tools.nsc.reporters.ConsoleReporter
+
+object Test extends DirectTest {
+
+ override def extraSettings: String = "-usejavacp -Xprint:parser -Ystop-after:parser -d " + testOutput.path
+
+ override def code = """
+ class C {
+ def quote = s"foo${this}baz"
+ def tripleQuote = s"foo${this}baz"
+ }
+ """.trim
+
+ override def show(): Unit = {
+ // redirect err to out, for logging
+ val prevErr = System.err
+ System.setErr(System.out)
+ compile()
+ System.setErr(prevErr)
+ }
+
+ override def newCompiler(args: String*): Global = {
+
+ val settings = new Settings()
+ settings.Xprintpos.value = true
+ val command = new CompilerCommand((CommandLineParser tokenize extraSettings) ++ args.toList, settings)
+ new Global(command.settings, new ConsoleReporter(settings)) with interactive.RangePositions
+ }
+}
diff --git a/test/files/run/t7290.scala b/test/files/run/t7290.scala
new file mode 100644
index 0000000..01f7e8f
--- /dev/null
+++ b/test/files/run/t7290.scala
@@ -0,0 +1,9 @@
+object Test extends App {
+ val y = (0: Int) match {
+ case 1 => 1
+ case 0 | 0 => 0
+ case 2 | 2 | 2 | 3 | 2 | 3 => 0
+ case _ => -1
+ }
+ assert(y == 0, y)
+}
diff --git a/test/files/run/t7291a.check b/test/files/run/t7291a.check
new file mode 100644
index 0000000..126faa1
--- /dev/null
+++ b/test/files/run/t7291a.check
@@ -0,0 +1 @@
+conjure
diff --git a/test/files/jvm/bug680.check b/test/files/run/t7291a.flags
similarity index 100%
copy from test/files/jvm/bug680.check
copy to test/files/run/t7291a.flags
diff --git a/test/files/run/t7291a.scala b/test/files/run/t7291a.scala
new file mode 100644
index 0000000..4b7c4a4
--- /dev/null
+++ b/test/files/run/t7291a.scala
@@ -0,0 +1,19 @@
+trait Fooable[T]
+object Fooable {
+ implicit def conjure[T]: Fooable[T] = {
+ println("conjure")
+ new Fooable[T]{}
+ }
+
+}
+
+object Test {
+ implicit def traversable[T, Coll[_] <: Traversable[_]](implicit
+elem: Fooable[T]): Fooable[Coll[T]] = {
+ println("traversable")
+ new Fooable[Coll[T]]{}
+ }
+ def main(args: Array[String]) {
+ implicitly[Fooable[List[Any]]]
+ }
+}
diff --git a/test/files/run/t7291b.check b/test/files/run/t7291b.check
new file mode 100644
index 0000000..c07ba98
--- /dev/null
+++ b/test/files/run/t7291b.check
@@ -0,0 +1,2 @@
+conjure
+traversable
diff --git a/test/files/run/t7291b.flags b/test/files/run/t7291b.flags
new file mode 100644
index 0000000..d564f2b
--- /dev/null
+++ b/test/files/run/t7291b.flags
@@ -0,0 +1 @@
+-Xdivergence211
\ No newline at end of file
diff --git a/test/files/run/t7291b.scala b/test/files/run/t7291b.scala
new file mode 100644
index 0000000..30c4261
--- /dev/null
+++ b/test/files/run/t7291b.scala
@@ -0,0 +1,19 @@
+trait Fooable[T]
+object Fooable {
+ implicit def conjure[T]: Fooable[T] = {
+ println("conjure")
+ new Fooable[T]{}
+ }
+
+}
+
+object Test {
+ implicit def traversable[T, Coll[_] <: Traversable[_]](implicit
+elem: Fooable[T]): Fooable[Coll[T]] = {
+ println("traversable")
+ new Fooable[Coll[T]]{}
+ }
+ def main(args: Array[String]) {
+ implicitly[Fooable[List[Any]]]
+ }
+}
diff --git a/test/files/run/t7319.check b/test/files/run/t7319.check
new file mode 100644
index 0000000..9667369
--- /dev/null
+++ b/test/files/run/t7319.check
@@ -0,0 +1,38 @@
+Type in expressions to have them evaluated.
+Type :help for more information.
+
+scala>
+
+scala> class M[A]
+defined class M
+
+scala> implicit def ma0[A](a: A): M[A] = null
+warning: there were 1 feature warning(s); re-run with -feature for details
+ma0: [A](a: A)M[A]
+
+scala> implicit def ma1[A](a: A): M[A] = null
+warning: there were 1 feature warning(s); re-run with -feature for details
+ma1: [A](a: A)M[A]
+
+scala> def convert[F[X <: F[X]]](builder: F[_ <: F[_]]) = 0
+warning: there were 1 feature warning(s); re-run with -feature for details
+convert: [F[X <: F[X]]](builder: F[_ <: F[_]])Int
+
+scala> convert(Some[Int](0))
+<console>:12: error: no type parameters for method convert: (builder: F[_ <: F[_]])Int exist so that it can be applied to arguments (Some[Int])
+ --- because ---
+argument expression's type is not compatible with formal parameter type;
+ found : Some[Int]
+ required: ?F forSome { type _$1 <: ?F forSome { type _$2 } }
+ convert(Some[Int](0))
+ ^
+<console>:12: error: type mismatch;
+ found : Some[Int]
+ required: F[_ <: F[_]]
+ convert(Some[Int](0))
+ ^
+
+scala> 0
+res1: Int = 0
+
+scala>
diff --git a/test/files/run/t7319.scala b/test/files/run/t7319.scala
new file mode 100644
index 0000000..23ffeb9
--- /dev/null
+++ b/test/files/run/t7319.scala
@@ -0,0 +1,13 @@
+import scala.tools.partest.ReplTest
+
+object Test extends ReplTest {
+ // so we can provide the ambiguities, rather than relying in Predef implicits
+ override def extraSettings = "-Yno-predef"
+ override def code = """
+class M[A]
+implicit def ma0[A](a: A): M[A] = null
+implicit def ma1[A](a: A): M[A] = null
+def convert[F[X <: F[X]]](builder: F[_ <: F[_]]) = 0
+convert(Some[Int](0))
+0""" // before the fix, this line, and all that followed, re-issued the implicit ambiguity error.
+}
diff --git a/test/files/run/t7325.check b/test/files/run/t7325.check
new file mode 100644
index 0000000..3c7652f
--- /dev/null
+++ b/test/files/run/t7325.check
@@ -0,0 +1,19 @@
+%
+%%
+%%%
+%n
+%
+
+%%n
+%%
+
+%%%n
+%%%
+
+0
+0%d
+0%%d
+0
+
+0
+
diff --git a/test/files/run/t7325.scala b/test/files/run/t7325.scala
new file mode 100644
index 0000000..26f6bc6
--- /dev/null
+++ b/test/files/run/t7325.scala
@@ -0,0 +1,25 @@
+object Test extends App {
+ // println(f"%")
+ println(f"%%")
+ // println(f"%%%")
+ println(f"%%%%")
+ // println(f"%%%%%")
+ println(f"%%%%%%")
+
+ println(f"%%n")
+ println(f"%%%n")
+ println(f"%%%%n")
+ println(f"%%%%%n")
+ println(f"%%%%%%n")
+ println(f"%%%%%%%n")
+
+ // println(f"${0}%")
+ println(f"${0}%d")
+ println(f"${0}%%d")
+ // println(f"${0}%%%d")
+ println(f"${0}%%%%d")
+ // println(f"${0}%%%%%d")
+
+ println(f"${0}%n")
+ println(f"${0}%d%n")
+}
\ No newline at end of file
diff --git a/test/files/run/t7331a.check b/test/files/run/t7331a.check
new file mode 100644
index 0000000..a59b400
--- /dev/null
+++ b/test/files/run/t7331a.check
@@ -0,0 +1,2 @@
+source-<toolbox>,line-1,offset=0
+2
\ No newline at end of file
diff --git a/test/files/run/t7331a.scala b/test/files/run/t7331a.scala
new file mode 100644
index 0000000..1851945
--- /dev/null
+++ b/test/files/run/t7331a.scala
@@ -0,0 +1,10 @@
+import scala.reflect.runtime.universe._
+import scala.reflect.runtime.{currentMirror => cm}
+import scala.tools.reflect.ToolBox
+
+object Test extends App {
+ val tb = cm.mkToolBox()
+ val tree = tb.parse("x")
+ println(tree.pos)
+ println(tree.pos.source.content.length)
+}
\ No newline at end of file
diff --git a/test/files/run/t7331b.check b/test/files/run/t7331b.check
new file mode 100644
index 0000000..7034a95
--- /dev/null
+++ b/test/files/run/t7331b.check
@@ -0,0 +1,3 @@
+reflective compilation has failed:
+
+')' expected but eof found.
diff --git a/test/files/run/t7331b.scala b/test/files/run/t7331b.scala
new file mode 100644
index 0000000..052656d
--- /dev/null
+++ b/test/files/run/t7331b.scala
@@ -0,0 +1,11 @@
+import scala.reflect.runtime.universe._
+import scala.reflect.runtime.{currentMirror => cm}
+import scala.tools.reflect.{ToolBox, ToolBoxError}
+
+object Test extends App {
+ val tb = cm.mkToolBox()
+ try tb.parse("f(x")
+ catch {
+ case ToolBoxError(msg, _) => println(msg)
+ }
+}
\ No newline at end of file
diff --git a/test/files/run/t7331c.check b/test/files/run/t7331c.check
new file mode 100644
index 0000000..af9f1b1
--- /dev/null
+++ b/test/files/run/t7331c.check
@@ -0,0 +1,3 @@
+ClassDef(Modifiers(), newTypeName("C"), List(), Template(List(Select(Ident(scala), newTypeName("AnyRef"))), emptyValDef, List(DefDef(Modifiers(), nme.CONSTRUCTOR, List(), List(List()), TypeTree(), Block(List(Apply(Select(Super(This(tpnme.EMPTY), tpnme.EMPTY), nme.CONSTRUCTOR), List())), Literal(Constant(())))))))
+source-<toolbox>,line-1,offset=6
+NoPosition
diff --git a/test/files/run/t7331c.scala b/test/files/run/t7331c.scala
new file mode 100644
index 0000000..75873af
--- /dev/null
+++ b/test/files/run/t7331c.scala
@@ -0,0 +1,11 @@
+import scala.reflect.runtime.universe._
+import scala.reflect.runtime.{currentMirror => cm}
+import scala.tools.reflect.ToolBox
+
+object Test extends App {
+ val tb = cm.mkToolBox()
+ val tree = tb.parse("class C").asInstanceOf[ClassDef]
+ println(showRaw(tree))
+ println(tree.pos)
+ println(tree.impl.self.pos)
+}
\ No newline at end of file
diff --git a/test/files/run/t7336.scala b/test/files/run/t7336.scala
new file mode 100644
index 0000000..ace83f2
--- /dev/null
+++ b/test/files/run/t7336.scala
@@ -0,0 +1,31 @@
+import scala.concurrent.Await
+import scala.concurrent.ExecutionContext.Implicits.global
+import scala.concurrent.Future
+import scala.concurrent.duration.Duration
+
+/** This test uses recursive calls to Future.flatMap to create arrays whose
+ * combined size is slightly greater than the JVM heap size. A previous
+ * implementation of Future.flatMap would retain references to each array,
+ * resulting in a speedy OutOfMemoryError. Now, each array should be freed soon
+ * after it is created and the test should complete without problems.
+ */
+object Test {
+ def main(args: Array[String]) {
+ def loop(i: Int, arraySize: Int): Future[Unit] = {
+ val array = new Array[Byte](arraySize)
+ Future.successful(i).flatMap { i =>
+ if (i == 0) {
+ Future.successful(())
+ } else {
+ array.size // Force closure to refer to array
+ loop(i - 1, arraySize)
+ }
+
+ }
+ }
+
+ val arraySize = 1000000
+ val tooManyArrays = (Runtime.getRuntime().totalMemory() / arraySize).toInt + 1
+ Await.ready(loop(tooManyArrays, arraySize), Duration.Inf)
+ }
+}
\ No newline at end of file
diff --git a/test/files/jvm/bug680.check b/test/files/run/t7341.check
similarity index 100%
copy from test/files/jvm/bug680.check
copy to test/files/run/t7341.check
diff --git a/test/files/run/t7341.flags b/test/files/run/t7341.flags
new file mode 100755
index 0000000..ae08446
--- /dev/null
+++ b/test/files/run/t7341.flags
@@ -0,0 +1 @@
+-Xcheckinit
\ No newline at end of file
diff --git a/test/files/run/t7341.scala b/test/files/run/t7341.scala
new file mode 100755
index 0000000..dc526c6
--- /dev/null
+++ b/test/files/run/t7341.scala
@@ -0,0 +1,15 @@
+object Obj {
+ private var cache: Any = ()
+ def returning(f: () => Unit) = ()
+ def foo {
+ returning(() => cache = ())
+ }
+
+ def apply(): Any = {
+ cache
+ }
+}
+
+object Test extends App {
+ Obj()
+}
diff --git a/test/files/run/t7359.check b/test/files/run/t7359.check
new file mode 100644
index 0000000..9766475
--- /dev/null
+++ b/test/files/run/t7359.check
@@ -0,0 +1 @@
+ok
diff --git a/test/files/run/t7359/Cyclic_1.java b/test/files/run/t7359/Cyclic_1.java
new file mode 100644
index 0000000..42b46c1
--- /dev/null
+++ b/test/files/run/t7359/Cyclic_1.java
@@ -0,0 +1,3 @@
+abstract class Cyclic {
+ static interface Inner<T extends Inner> { }
+}
\ No newline at end of file
diff --git a/test/files/run/t7359/Test_2.scala b/test/files/run/t7359/Test_2.scala
new file mode 100644
index 0000000..bb6f4cb
--- /dev/null
+++ b/test/files/run/t7359/Test_2.scala
@@ -0,0 +1,6 @@
+import scala.reflect.runtime.universe._
+
+object Test extends App {
+ typeOf[Cyclic].members
+ println("ok")
+}
\ No newline at end of file
diff --git a/test/files/run/t7375a.check b/test/files/run/t7375a.check
new file mode 100644
index 0000000..a0a15df
--- /dev/null
+++ b/test/files/run/t7375a.check
@@ -0,0 +1,4 @@
+C1
+C2
+C1
+C2
diff --git a/test/files/run/t7375a.scala b/test/files/run/t7375a.scala
new file mode 100644
index 0000000..e46ad08
--- /dev/null
+++ b/test/files/run/t7375a.scala
@@ -0,0 +1,16 @@
+import scala.reflect.ClassTag
+
+class C1(val n: Int) extends AnyVal
+class C2(val n: Int) extends AnyRef
+
+object Test {
+ type F1 = C1
+ type F2 = C2
+
+ def main(args: Array[String]): Unit = {
+ println(implicitly[ClassTag[C1]])
+ println(implicitly[ClassTag[C2]])
+ println(implicitly[ClassTag[F1]])
+ println(implicitly[ClassTag[F2]])
+ }
+}
diff --git a/test/files/run/t7375b.check b/test/files/run/t7375b.check
new file mode 100644
index 0000000..d7578e2
--- /dev/null
+++ b/test/files/run/t7375b.check
@@ -0,0 +1,4 @@
+Predef.this.classOf[C1]
+Predef.this.classOf[C2]
+Predef.this.classOf[C1]
+Predef.this.classOf[C2]
diff --git a/test/files/run/t7375b/Macros_1.scala b/test/files/run/t7375b/Macros_1.scala
new file mode 100644
index 0000000..70e79cc
--- /dev/null
+++ b/test/files/run/t7375b/Macros_1.scala
@@ -0,0 +1,18 @@
+import language.experimental.macros
+import scala.reflect.macros.Context
+
+class C1(val n: Int) extends AnyVal
+class C2(val n: Int) extends AnyRef
+
+object Macros {
+ type F1 = C1
+ type F2 = C2
+
+ def foo = macro impl
+ def impl(c: Context) = {
+ import c.universe._
+ def test[T: c.TypeTag] = reify(println(c.literal(c.reifyRuntimeClass(c.typeOf[T]).toString).splice)).tree
+ def tests = Block(List(test[C1], test[C2], test[F1], test[F2]), Literal(Constant(())))
+ c.Expr[Unit](tests)
+ }
+}
\ No newline at end of file
diff --git a/test/files/run/t7375b/Test_2.scala b/test/files/run/t7375b/Test_2.scala
new file mode 100644
index 0000000..acfddae
--- /dev/null
+++ b/test/files/run/t7375b/Test_2.scala
@@ -0,0 +1,3 @@
+object Test extends App {
+ Macros.foo
+}
\ No newline at end of file
diff --git a/test/files/run/t7398.scala b/test/files/run/t7398.scala
new file mode 100644
index 0000000..493c4dc
--- /dev/null
+++ b/test/files/run/t7398.scala
@@ -0,0 +1,26 @@
+import scala.tools.partest._
+
+object Test extends CompilerTest {
+ import global._
+
+ override lazy val units: List[CompilationUnit] = {
+ // This test itself does not depend on JDK8.
+ javaCompilationUnits(global)(defaultMethodSource)
+ }
+
+ private def defaultMethodSource = """
+public interface Iterator<E> {
+ boolean hasNext();
+ E next();
+ default void remove() {
+ throw new UnsupportedOperationException("remove");
+ }
+ default void forEachRemaining(Consumer<? super E> action) {
+ throw new UnsupportedOperationException("forEachRemaining");
+ }
+}
+ """
+
+ // We're only checking we can parse it.
+ def check(source: String, unit: global.CompilationUnit): Unit = ()
+}
diff --git a/test/files/run/t7439.check b/test/files/run/t7439.check
new file mode 100644
index 0000000..ce9e8b5
--- /dev/null
+++ b/test/files/run/t7439.check
@@ -0,0 +1 @@
+pos: NoPosition Class A_1 not found - continuing with a stub. WARNING
diff --git a/test/files/run/t7439/A_1.java b/test/files/run/t7439/A_1.java
new file mode 100644
index 0000000..4accd95
--- /dev/null
+++ b/test/files/run/t7439/A_1.java
@@ -0,0 +1,3 @@
+public class A_1 {
+
+}
\ No newline at end of file
diff --git a/test/files/run/t7439/B_1.java b/test/files/run/t7439/B_1.java
new file mode 100644
index 0000000..5dd3b93
--- /dev/null
+++ b/test/files/run/t7439/B_1.java
@@ -0,0 +1,3 @@
+public class B_1 {
+ public void b(A_1[] a) {}
+}
diff --git a/test/files/run/t7439/Test_2.scala b/test/files/run/t7439/Test_2.scala
new file mode 100644
index 0000000..e00e9d1
--- /dev/null
+++ b/test/files/run/t7439/Test_2.scala
@@ -0,0 +1,31 @@
+import scala.tools.partest._
+import java.io.File
+
+object Test extends StoreReporterDirectTest {
+ def code = ???
+
+ def compileCode(code: String) = {
+ val classpath = List(sys.props("partest.lib"), testOutput.path) mkString sys.props("path.separator")
+ compileString(newCompiler("-cp", classpath, "-d", testOutput.path))(code)
+ }
+
+ def C = """
+ class C {
+ new B_1
+ }
+ """
+
+ def show(): Unit = {
+ //compileCode(C)
+ assert(filteredInfos.isEmpty, filteredInfos)
+
+ // blow away the entire package
+ val a1Class = new File(testOutput.path, "A_1.class")
+ assert(a1Class.exists)
+ assert(a1Class.delete())
+
+ // bad symbolic reference error expected (but no stack trace!)
+ compileCode(C)
+ println(storeReporter.infos.mkString("\n")) // Included a NullPointerException before.
+ }
+}
diff --git a/test/files/run/bug744.check b/test/files/run/t744.check
similarity index 100%
rename from test/files/run/bug744.check
rename to test/files/run/t744.check
diff --git a/test/files/run/bug744.scala b/test/files/run/t744.scala
similarity index 100%
rename from test/files/run/bug744.scala
rename to test/files/run/t744.scala
diff --git a/test/files/run/t7455.check b/test/files/run/t7455.check
new file mode 100644
index 0000000..0eb9342
--- /dev/null
+++ b/test/files/run/t7455.check
@@ -0,0 +1,4 @@
+private[package <empty>] def <init>(x$1: String): Outer[E]
+private[package <empty>] def <init>(): Outer$PrivateInner
+private[package <empty>] def <init>(): Outer$PrivateStaticInner
+private[package <empty>] def <init>(x$2: String): Outer$PublicInner
diff --git a/test/files/run/t7455/Outer.java b/test/files/run/t7455/Outer.java
new file mode 100644
index 0000000..10c97a9
--- /dev/null
+++ b/test/files/run/t7455/Outer.java
@@ -0,0 +1,31 @@
+public class Outer<E> {
+ public void elements() {
+ new C<E>() {
+ };
+ }
+
+ private Outer(String a) {}
+
+ static class SubSelf extends Outer<String> {
+ public SubSelf() { super(""); }
+ }
+
+ private class PrivateInner {
+ }
+ class SubPrivateInner extends PrivateInner {
+ }
+
+ private class PublicInner {
+ private PublicInner(String a) {}
+ }
+ class SubPublicInner extends PublicInner {
+ public SubPublicInner() { super(""); }
+ }
+
+ private static class PrivateStaticInner {
+ }
+ public static class SubPrivateStaticInner extends PrivateStaticInner {
+ }
+}
+
+class C<E> {}
diff --git a/test/files/run/t7455/Test.scala b/test/files/run/t7455/Test.scala
new file mode 100644
index 0000000..b23a724
--- /dev/null
+++ b/test/files/run/t7455/Test.scala
@@ -0,0 +1,30 @@
+import scala.tools.partest._
+
+// javac adds dummy parameters of type Outer$1 to synthetic access constructors
+// This test shows that we strip them from the signatures. If we don't, we trigger
+// parsing of Outer$1 which can fail if it references type parameters of the Outer.
+//
+// OLD OUTPUT:
+// private[package <empty>] def <init>(x$2: Outer$1): Outer$PrivateInner
+// error: error while loading Outer$1, class file 't7455-run.obj/Outer$1.class' is broken
+// (class java.util.NoSuchElementException/key not found: E)
+// ...
+object Test extends DirectTest {
+ override def code = ""
+
+ def show {
+ val classpath = List(sys.props("partest.lib"), testOutput.path) mkString sys.props("path.separator")
+ val compiler = newCompiler("-cp", classpath, "-d", testOutput.path)
+ import compiler._, definitions._
+ new compiler.Run
+
+ for {
+ name <- Seq("Outer", "Outer$PrivateInner", "Outer$PrivateStaticInner", "Outer$PublicInner")
+ clazz = compiler.rootMirror.staticClass(name)
+ constr <- clazz.info.member(nme.CONSTRUCTOR).alternatives
+ } {
+ println(constr.defString)
+ fullyInitializeSymbol(constr)
+ }
+ }
+}
diff --git a/test/files/run/t7498.scala b/test/files/run/t7498.scala
new file mode 100644
index 0000000..1dbf059
--- /dev/null
+++ b/test/files/run/t7498.scala
@@ -0,0 +1,20 @@
+
+
+
+
+
+
+
+object Test extends App {
+ import scala.collection.concurrent.TrieMap
+
+ class Collision(val idx: Int) {
+ override def hashCode = idx % 10
+ }
+
+ val tm = TrieMap[Collision, Unit]()
+ for (i <- 0 until 1000) tm(new Collision(i)) = ()
+
+ tm.par.foreach(kv => ())
+}
+
diff --git a/test/files/run/t7507.scala b/test/files/run/t7507.scala
new file mode 100644
index 0000000..6c1959d
--- /dev/null
+++ b/test/files/run/t7507.scala
@@ -0,0 +1,31 @@
+trait Cake extends Slice
+
+// Minimization
+trait Slice { self: Cake => // must have self type that extends `Slice`
+ private[this] val bippy = () // must be private[this]
+ locally(bippy)
+}
+
+// Originally reported bug:
+trait Cake1 extends Slice1
+trait Slice1 { self: Cake1 =>
+ import java.lang.String // any import will do!
+ val Tuple2(x, y) = ((1, 2))
+}
+
+
+// Nesting
+trait Cake3 extends Outer.Slice3
+
+// Minimization
+object Outer {
+ private[this] val bippy = ()
+ trait Slice3 { self: Cake3 =>
+ locally(bippy)
+ }
+}
+
+object Test extends App {
+ val s1 = new Cake1 {}
+ assert((s1.x, s1.y) == (1, 2), (s1.x, s1.y))
+}
diff --git a/test/files/run/bug751.scala b/test/files/run/t751.scala
similarity index 100%
rename from test/files/run/bug751.scala
rename to test/files/run/t751.scala
diff --git a/test/files/jvm/bug680.check b/test/files/run/t7510.check
similarity index 100%
copy from test/files/jvm/bug680.check
copy to test/files/run/t7510.check
diff --git a/test/files/run/t7510/Ann_1.java b/test/files/run/t7510/Ann_1.java
new file mode 100644
index 0000000..c8c5b20
--- /dev/null
+++ b/test/files/run/t7510/Ann_1.java
@@ -0,0 +1,4 @@
+package foo;
+
+public @interface Ann_1 {
+}
\ No newline at end of file
diff --git a/test/files/run/t7510/Test_2.scala b/test/files/run/t7510/Test_2.scala
new file mode 100644
index 0000000..7d7a95e
--- /dev/null
+++ b/test/files/run/t7510/Test_2.scala
@@ -0,0 +1,9 @@
+import scala.reflect.runtime.universe._
+import scala.reflect.runtime.{currentMirror => cm}
+import scala.tools.reflect.ToolBox
+
+object Test extends App {
+ val tb = cm.mkToolBox()
+ tb.compile(tb.parse("@foo.Ann_1 class C"))
+}
+
diff --git a/test/files/run/t7556.check b/test/files/run/t7556.check
new file mode 100644
index 0000000..3328708
--- /dev/null
+++ b/test/files/run/t7556.check
@@ -0,0 +1,2 @@
+class annotations: List(scala.reflect.ScalaLongSignature)
+3001 decls via runtime reflection
diff --git a/test/files/run/t7556/Test_2.scala b/test/files/run/t7556/Test_2.scala
new file mode 100644
index 0000000..3184873
--- /dev/null
+++ b/test/files/run/t7556/Test_2.scala
@@ -0,0 +1,11 @@
+import scala.reflect.runtime.universe._
+
+object Test {
+ def main(args: Array[String]) {
+ val mc = new MegaClass
+ val anns = mc.getClass.getAnnotations.map(_.annotationType.getName).toList.sorted
+ println(s"class annotations: $anns")
+ val N = typeTag[MegaClass].tpe.declarations.size // was: error reading Scala signature of MegaClass: 65935
+ println(s"$N decls via runtime reflection")
+ }
+}
diff --git a/test/files/run/t7556/mega-class_1.scala b/test/files/run/t7556/mega-class_1.scala
new file mode 100644
index 0000000..dcc9ba8
--- /dev/null
+++ b/test/files/run/t7556/mega-class_1.scala
@@ -0,0 +1,3002 @@
+class MegaClass {
+ def method0: Int = 0
+ def method1: Int = 0
+ def method2: Int = 0
+ def method3: Int = 0
+ def method4: Int = 0
+ def method5: Int = 0
+ def method6: Int = 0
+ def method7: Int = 0
+ def method8: Int = 0
+ def method9: Int = 0
+ def method10: Int = 0
+ def method11: Int = 0
+ def method12: Int = 0
+ def method13: Int = 0
+ def method14: Int = 0
+ def method15: Int = 0
+ def method16: Int = 0
+ def method17: Int = 0
+ def method18: Int = 0
+ def method19: Int = 0
+ def method20: Int = 0
+ def method21: Int = 0
+ def method22: Int = 0
+ def method23: Int = 0
+ def method24: Int = 0
+ def method25: Int = 0
+ def method26: Int = 0
+ def method27: Int = 0
+ def method28: Int = 0
+ def method29: Int = 0
+ def method30: Int = 0
+ def method31: Int = 0
+ def method32: Int = 0
+ def method33: Int = 0
+ def method34: Int = 0
+ def method35: Int = 0
+ def method36: Int = 0
+ def method37: Int = 0
+ def method38: Int = 0
+ def method39: Int = 0
+ def method40: Int = 0
+ def method41: Int = 0
+ def method42: Int = 0
+ def method43: Int = 0
+ def method44: Int = 0
+ def method45: Int = 0
+ def method46: Int = 0
+ def method47: Int = 0
+ def method48: Int = 0
+ def method49: Int = 0
+ def method50: Int = 0
+ def method51: Int = 0
+ def method52: Int = 0
+ def method53: Int = 0
+ def method54: Int = 0
+ def method55: Int = 0
+ def method56: Int = 0
+ def method57: Int = 0
+ def method58: Int = 0
+ def method59: Int = 0
+ def method60: Int = 0
+ def method61: Int = 0
+ def method62: Int = 0
+ def method63: Int = 0
+ def method64: Int = 0
+ def method65: Int = 0
+ def method66: Int = 0
+ def method67: Int = 0
+ def method68: Int = 0
+ def method69: Int = 0
+ def method70: Int = 0
+ def method71: Int = 0
+ def method72: Int = 0
+ def method73: Int = 0
+ def method74: Int = 0
+ def method75: Int = 0
+ def method76: Int = 0
+ def method77: Int = 0
+ def method78: Int = 0
+ def method79: Int = 0
+ def method80: Int = 0
+ def method81: Int = 0
+ def method82: Int = 0
+ def method83: Int = 0
+ def method84: Int = 0
+ def method85: Int = 0
+ def method86: Int = 0
+ def method87: Int = 0
+ def method88: Int = 0
+ def method89: Int = 0
+ def method90: Int = 0
+ def method91: Int = 0
+ def method92: Int = 0
+ def method93: Int = 0
+ def method94: Int = 0
+ def method95: Int = 0
+ def method96: Int = 0
+ def method97: Int = 0
+ def method98: Int = 0
+ def method99: Int = 0
+ def method100: Int = 0
+ def method101: Int = 0
+ def method102: Int = 0
+ def method103: Int = 0
+ def method104: Int = 0
+ def method105: Int = 0
+ def method106: Int = 0
+ def method107: Int = 0
+ def method108: Int = 0
+ def method109: Int = 0
+ def method110: Int = 0
+ def method111: Int = 0
+ def method112: Int = 0
+ def method113: Int = 0
+ def method114: Int = 0
+ def method115: Int = 0
+ def method116: Int = 0
+ def method117: Int = 0
+ def method118: Int = 0
+ def method119: Int = 0
+ def method120: Int = 0
+ def method121: Int = 0
+ def method122: Int = 0
+ def method123: Int = 0
+ def method124: Int = 0
+ def method125: Int = 0
+ def method126: Int = 0
+ def method127: Int = 0
+ def method128: Int = 0
+ def method129: Int = 0
+ def method130: Int = 0
+ def method131: Int = 0
+ def method132: Int = 0
+ def method133: Int = 0
+ def method134: Int = 0
+ def method135: Int = 0
+ def method136: Int = 0
+ def method137: Int = 0
+ def method138: Int = 0
+ def method139: Int = 0
+ def method140: Int = 0
+ def method141: Int = 0
+ def method142: Int = 0
+ def method143: Int = 0
+ def method144: Int = 0
+ def method145: Int = 0
+ def method146: Int = 0
+ def method147: Int = 0
+ def method148: Int = 0
+ def method149: Int = 0
+ def method150: Int = 0
+ def method151: Int = 0
+ def method152: Int = 0
+ def method153: Int = 0
+ def method154: Int = 0
+ def method155: Int = 0
+ def method156: Int = 0
+ def method157: Int = 0
+ def method158: Int = 0
+ def method159: Int = 0
+ def method160: Int = 0
+ def method161: Int = 0
+ def method162: Int = 0
+ def method163: Int = 0
+ def method164: Int = 0
+ def method165: Int = 0
+ def method166: Int = 0
+ def method167: Int = 0
+ def method168: Int = 0
+ def method169: Int = 0
+ def method170: Int = 0
+ def method171: Int = 0
+ def method172: Int = 0
+ def method173: Int = 0
+ def method174: Int = 0
+ def method175: Int = 0
+ def method176: Int = 0
+ def method177: Int = 0
+ def method178: Int = 0
+ def method179: Int = 0
+ def method180: Int = 0
+ def method181: Int = 0
+ def method182: Int = 0
+ def method183: Int = 0
+ def method184: Int = 0
+ def method185: Int = 0
+ def method186: Int = 0
+ def method187: Int = 0
+ def method188: Int = 0
+ def method189: Int = 0
+ def method190: Int = 0
+ def method191: Int = 0
+ def method192: Int = 0
+ def method193: Int = 0
+ def method194: Int = 0
+ def method195: Int = 0
+ def method196: Int = 0
+ def method197: Int = 0
+ def method198: Int = 0
+ def method199: Int = 0
+ def method200: Int = 0
+ def method201: Int = 0
+ def method202: Int = 0
+ def method203: Int = 0
+ def method204: Int = 0
+ def method205: Int = 0
+ def method206: Int = 0
+ def method207: Int = 0
+ def method208: Int = 0
+ def method209: Int = 0
+ def method210: Int = 0
+ def method211: Int = 0
+ def method212: Int = 0
+ def method213: Int = 0
+ def method214: Int = 0
+ def method215: Int = 0
+ def method216: Int = 0
+ def method217: Int = 0
+ def method218: Int = 0
+ def method219: Int = 0
+ def method220: Int = 0
+ def method221: Int = 0
+ def method222: Int = 0
+ def method223: Int = 0
+ def method224: Int = 0
+ def method225: Int = 0
+ def method226: Int = 0
+ def method227: Int = 0
+ def method228: Int = 0
+ def method229: Int = 0
+ def method230: Int = 0
+ def method231: Int = 0
+ def method232: Int = 0
+ def method233: Int = 0
+ def method234: Int = 0
+ def method235: Int = 0
+ def method236: Int = 0
+ def method237: Int = 0
+ def method238: Int = 0
+ def method239: Int = 0
+ def method240: Int = 0
+ def method241: Int = 0
+ def method242: Int = 0
+ def method243: Int = 0
+ def method244: Int = 0
+ def method245: Int = 0
+ def method246: Int = 0
+ def method247: Int = 0
+ def method248: Int = 0
+ def method249: Int = 0
+ def method250: Int = 0
+ def method251: Int = 0
+ def method252: Int = 0
+ def method253: Int = 0
+ def method254: Int = 0
+ def method255: Int = 0
+ def method256: Int = 0
+ def method257: Int = 0
+ def method258: Int = 0
+ def method259: Int = 0
+ def method260: Int = 0
+ def method261: Int = 0
+ def method262: Int = 0
+ def method263: Int = 0
+ def method264: Int = 0
+ def method265: Int = 0
+ def method266: Int = 0
+ def method267: Int = 0
+ def method268: Int = 0
+ def method269: Int = 0
+ def method270: Int = 0
+ def method271: Int = 0
+ def method272: Int = 0
+ def method273: Int = 0
+ def method274: Int = 0
+ def method275: Int = 0
+ def method276: Int = 0
+ def method277: Int = 0
+ def method278: Int = 0
+ def method279: Int = 0
+ def method280: Int = 0
+ def method281: Int = 0
+ def method282: Int = 0
+ def method283: Int = 0
+ def method284: Int = 0
+ def method285: Int = 0
+ def method286: Int = 0
+ def method287: Int = 0
+ def method288: Int = 0
+ def method289: Int = 0
+ def method290: Int = 0
+ def method291: Int = 0
+ def method292: Int = 0
+ def method293: Int = 0
+ def method294: Int = 0
+ def method295: Int = 0
+ def method296: Int = 0
+ def method297: Int = 0
+ def method298: Int = 0
+ def method299: Int = 0
+ def method300: Int = 0
+ def method301: Int = 0
+ def method302: Int = 0
+ def method303: Int = 0
+ def method304: Int = 0
+ def method305: Int = 0
+ def method306: Int = 0
+ def method307: Int = 0
+ def method308: Int = 0
+ def method309: Int = 0
+ def method310: Int = 0
+ def method311: Int = 0
+ def method312: Int = 0
+ def method313: Int = 0
+ def method314: Int = 0
+ def method315: Int = 0
+ def method316: Int = 0
+ def method317: Int = 0
+ def method318: Int = 0
+ def method319: Int = 0
+ def method320: Int = 0
+ def method321: Int = 0
+ def method322: Int = 0
+ def method323: Int = 0
+ def method324: Int = 0
+ def method325: Int = 0
+ def method326: Int = 0
+ def method327: Int = 0
+ def method328: Int = 0
+ def method329: Int = 0
+ def method330: Int = 0
+ def method331: Int = 0
+ def method332: Int = 0
+ def method333: Int = 0
+ def method334: Int = 0
+ def method335: Int = 0
+ def method336: Int = 0
+ def method337: Int = 0
+ def method338: Int = 0
+ def method339: Int = 0
+ def method340: Int = 0
+ def method341: Int = 0
+ def method342: Int = 0
+ def method343: Int = 0
+ def method344: Int = 0
+ def method345: Int = 0
+ def method346: Int = 0
+ def method347: Int = 0
+ def method348: Int = 0
+ def method349: Int = 0
+ def method350: Int = 0
+ def method351: Int = 0
+ def method352: Int = 0
+ def method353: Int = 0
+ def method354: Int = 0
+ def method355: Int = 0
+ def method356: Int = 0
+ def method357: Int = 0
+ def method358: Int = 0
+ def method359: Int = 0
+ def method360: Int = 0
+ def method361: Int = 0
+ def method362: Int = 0
+ def method363: Int = 0
+ def method364: Int = 0
+ def method365: Int = 0
+ def method366: Int = 0
+ def method367: Int = 0
+ def method368: Int = 0
+ def method369: Int = 0
+ def method370: Int = 0
+ def method371: Int = 0
+ def method372: Int = 0
+ def method373: Int = 0
+ def method374: Int = 0
+ def method375: Int = 0
+ def method376: Int = 0
+ def method377: Int = 0
+ def method378: Int = 0
+ def method379: Int = 0
+ def method380: Int = 0
+ def method381: Int = 0
+ def method382: Int = 0
+ def method383: Int = 0
+ def method384: Int = 0
+ def method385: Int = 0
+ def method386: Int = 0
+ def method387: Int = 0
+ def method388: Int = 0
+ def method389: Int = 0
+ def method390: Int = 0
+ def method391: Int = 0
+ def method392: Int = 0
+ def method393: Int = 0
+ def method394: Int = 0
+ def method395: Int = 0
+ def method396: Int = 0
+ def method397: Int = 0
+ def method398: Int = 0
+ def method399: Int = 0
+ def method400: Int = 0
+ def method401: Int = 0
+ def method402: Int = 0
+ def method403: Int = 0
+ def method404: Int = 0
+ def method405: Int = 0
+ def method406: Int = 0
+ def method407: Int = 0
+ def method408: Int = 0
+ def method409: Int = 0
+ def method410: Int = 0
+ def method411: Int = 0
+ def method412: Int = 0
+ def method413: Int = 0
+ def method414: Int = 0
+ def method415: Int = 0
+ def method416: Int = 0
+ def method417: Int = 0
+ def method418: Int = 0
+ def method419: Int = 0
+ def method420: Int = 0
+ def method421: Int = 0
+ def method422: Int = 0
+ def method423: Int = 0
+ def method424: Int = 0
+ def method425: Int = 0
+ def method426: Int = 0
+ def method427: Int = 0
+ def method428: Int = 0
+ def method429: Int = 0
+ def method430: Int = 0
+ def method431: Int = 0
+ def method432: Int = 0
+ def method433: Int = 0
+ def method434: Int = 0
+ def method435: Int = 0
+ def method436: Int = 0
+ def method437: Int = 0
+ def method438: Int = 0
+ def method439: Int = 0
+ def method440: Int = 0
+ def method441: Int = 0
+ def method442: Int = 0
+ def method443: Int = 0
+ def method444: Int = 0
+ def method445: Int = 0
+ def method446: Int = 0
+ def method447: Int = 0
+ def method448: Int = 0
+ def method449: Int = 0
+ def method450: Int = 0
+ def method451: Int = 0
+ def method452: Int = 0
+ def method453: Int = 0
+ def method454: Int = 0
+ def method455: Int = 0
+ def method456: Int = 0
+ def method457: Int = 0
+ def method458: Int = 0
+ def method459: Int = 0
+ def method460: Int = 0
+ def method461: Int = 0
+ def method462: Int = 0
+ def method463: Int = 0
+ def method464: Int = 0
+ def method465: Int = 0
+ def method466: Int = 0
+ def method467: Int = 0
+ def method468: Int = 0
+ def method469: Int = 0
+ def method470: Int = 0
+ def method471: Int = 0
+ def method472: Int = 0
+ def method473: Int = 0
+ def method474: Int = 0
+ def method475: Int = 0
+ def method476: Int = 0
+ def method477: Int = 0
+ def method478: Int = 0
+ def method479: Int = 0
+ def method480: Int = 0
+ def method481: Int = 0
+ def method482: Int = 0
+ def method483: Int = 0
+ def method484: Int = 0
+ def method485: Int = 0
+ def method486: Int = 0
+ def method487: Int = 0
+ def method488: Int = 0
+ def method489: Int = 0
+ def method490: Int = 0
+ def method491: Int = 0
+ def method492: Int = 0
+ def method493: Int = 0
+ def method494: Int = 0
+ def method495: Int = 0
+ def method496: Int = 0
+ def method497: Int = 0
+ def method498: Int = 0
+ def method499: Int = 0
+ def method500: Int = 0
+ def method501: Int = 0
+ def method502: Int = 0
+ def method503: Int = 0
+ def method504: Int = 0
+ def method505: Int = 0
+ def method506: Int = 0
+ def method507: Int = 0
+ def method508: Int = 0
+ def method509: Int = 0
+ def method510: Int = 0
+ def method511: Int = 0
+ def method512: Int = 0
+ def method513: Int = 0
+ def method514: Int = 0
+ def method515: Int = 0
+ def method516: Int = 0
+ def method517: Int = 0
+ def method518: Int = 0
+ def method519: Int = 0
+ def method520: Int = 0
+ def method521: Int = 0
+ def method522: Int = 0
+ def method523: Int = 0
+ def method524: Int = 0
+ def method525: Int = 0
+ def method526: Int = 0
+ def method527: Int = 0
+ def method528: Int = 0
+ def method529: Int = 0
+ def method530: Int = 0
+ def method531: Int = 0
+ def method532: Int = 0
+ def method533: Int = 0
+ def method534: Int = 0
+ def method535: Int = 0
+ def method536: Int = 0
+ def method537: Int = 0
+ def method538: Int = 0
+ def method539: Int = 0
+ def method540: Int = 0
+ def method541: Int = 0
+ def method542: Int = 0
+ def method543: Int = 0
+ def method544: Int = 0
+ def method545: Int = 0
+ def method546: Int = 0
+ def method547: Int = 0
+ def method548: Int = 0
+ def method549: Int = 0
+ def method550: Int = 0
+ def method551: Int = 0
+ def method552: Int = 0
+ def method553: Int = 0
+ def method554: Int = 0
+ def method555: Int = 0
+ def method556: Int = 0
+ def method557: Int = 0
+ def method558: Int = 0
+ def method559: Int = 0
+ def method560: Int = 0
+ def method561: Int = 0
+ def method562: Int = 0
+ def method563: Int = 0
+ def method564: Int = 0
+ def method565: Int = 0
+ def method566: Int = 0
+ def method567: Int = 0
+ def method568: Int = 0
+ def method569: Int = 0
+ def method570: Int = 0
+ def method571: Int = 0
+ def method572: Int = 0
+ def method573: Int = 0
+ def method574: Int = 0
+ def method575: Int = 0
+ def method576: Int = 0
+ def method577: Int = 0
+ def method578: Int = 0
+ def method579: Int = 0
+ def method580: Int = 0
+ def method581: Int = 0
+ def method582: Int = 0
+ def method583: Int = 0
+ def method584: Int = 0
+ def method585: Int = 0
+ def method586: Int = 0
+ def method587: Int = 0
+ def method588: Int = 0
+ def method589: Int = 0
+ def method590: Int = 0
+ def method591: Int = 0
+ def method592: Int = 0
+ def method593: Int = 0
+ def method594: Int = 0
+ def method595: Int = 0
+ def method596: Int = 0
+ def method597: Int = 0
+ def method598: Int = 0
+ def method599: Int = 0
+ def method600: Int = 0
+ def method601: Int = 0
+ def method602: Int = 0
+ def method603: Int = 0
+ def method604: Int = 0
+ def method605: Int = 0
+ def method606: Int = 0
+ def method607: Int = 0
+ def method608: Int = 0
+ def method609: Int = 0
+ def method610: Int = 0
+ def method611: Int = 0
+ def method612: Int = 0
+ def method613: Int = 0
+ def method614: Int = 0
+ def method615: Int = 0
+ def method616: Int = 0
+ def method617: Int = 0
+ def method618: Int = 0
+ def method619: Int = 0
+ def method620: Int = 0
+ def method621: Int = 0
+ def method622: Int = 0
+ def method623: Int = 0
+ def method624: Int = 0
+ def method625: Int = 0
+ def method626: Int = 0
+ def method627: Int = 0
+ def method628: Int = 0
+ def method629: Int = 0
+ def method630: Int = 0
+ def method631: Int = 0
+ def method632: Int = 0
+ def method633: Int = 0
+ def method634: Int = 0
+ def method635: Int = 0
+ def method636: Int = 0
+ def method637: Int = 0
+ def method638: Int = 0
+ def method639: Int = 0
+ def method640: Int = 0
+ def method641: Int = 0
+ def method642: Int = 0
+ def method643: Int = 0
+ def method644: Int = 0
+ def method645: Int = 0
+ def method646: Int = 0
+ def method647: Int = 0
+ def method648: Int = 0
+ def method649: Int = 0
+ def method650: Int = 0
+ def method651: Int = 0
+ def method652: Int = 0
+ def method653: Int = 0
+ def method654: Int = 0
+ def method655: Int = 0
+ def method656: Int = 0
+ def method657: Int = 0
+ def method658: Int = 0
+ def method659: Int = 0
+ def method660: Int = 0
+ def method661: Int = 0
+ def method662: Int = 0
+ def method663: Int = 0
+ def method664: Int = 0
+ def method665: Int = 0
+ def method666: Int = 0
+ def method667: Int = 0
+ def method668: Int = 0
+ def method669: Int = 0
+ def method670: Int = 0
+ def method671: Int = 0
+ def method672: Int = 0
+ def method673: Int = 0
+ def method674: Int = 0
+ def method675: Int = 0
+ def method676: Int = 0
+ def method677: Int = 0
+ def method678: Int = 0
+ def method679: Int = 0
+ def method680: Int = 0
+ def method681: Int = 0
+ def method682: Int = 0
+ def method683: Int = 0
+ def method684: Int = 0
+ def method685: Int = 0
+ def method686: Int = 0
+ def method687: Int = 0
+ def method688: Int = 0
+ def method689: Int = 0
+ def method690: Int = 0
+ def method691: Int = 0
+ def method692: Int = 0
+ def method693: Int = 0
+ def method694: Int = 0
+ def method695: Int = 0
+ def method696: Int = 0
+ def method697: Int = 0
+ def method698: Int = 0
+ def method699: Int = 0
+ def method700: Int = 0
+ def method701: Int = 0
+ def method702: Int = 0
+ def method703: Int = 0
+ def method704: Int = 0
+ def method705: Int = 0
+ def method706: Int = 0
+ def method707: Int = 0
+ def method708: Int = 0
+ def method709: Int = 0
+ def method710: Int = 0
+ def method711: Int = 0
+ def method712: Int = 0
+ def method713: Int = 0
+ def method714: Int = 0
+ def method715: Int = 0
+ def method716: Int = 0
+ def method717: Int = 0
+ def method718: Int = 0
+ def method719: Int = 0
+ def method720: Int = 0
+ def method721: Int = 0
+ def method722: Int = 0
+ def method723: Int = 0
+ def method724: Int = 0
+ def method725: Int = 0
+ def method726: Int = 0
+ def method727: Int = 0
+ def method728: Int = 0
+ def method729: Int = 0
+ def method730: Int = 0
+ def method731: Int = 0
+ def method732: Int = 0
+ def method733: Int = 0
+ def method734: Int = 0
+ def method735: Int = 0
+ def method736: Int = 0
+ def method737: Int = 0
+ def method738: Int = 0
+ def method739: Int = 0
+ def method740: Int = 0
+ def method741: Int = 0
+ def method742: Int = 0
+ def method743: Int = 0
+ def method744: Int = 0
+ def method745: Int = 0
+ def method746: Int = 0
+ def method747: Int = 0
+ def method748: Int = 0
+ def method749: Int = 0
+ def method750: Int = 0
+ def method751: Int = 0
+ def method752: Int = 0
+ def method753: Int = 0
+ def method754: Int = 0
+ def method755: Int = 0
+ def method756: Int = 0
+ def method757: Int = 0
+ def method758: Int = 0
+ def method759: Int = 0
+ def method760: Int = 0
+ def method761: Int = 0
+ def method762: Int = 0
+ def method763: Int = 0
+ def method764: Int = 0
+ def method765: Int = 0
+ def method766: Int = 0
+ def method767: Int = 0
+ def method768: Int = 0
+ def method769: Int = 0
+ def method770: Int = 0
+ def method771: Int = 0
+ def method772: Int = 0
+ def method773: Int = 0
+ def method774: Int = 0
+ def method775: Int = 0
+ def method776: Int = 0
+ def method777: Int = 0
+ def method778: Int = 0
+ def method779: Int = 0
+ def method780: Int = 0
+ def method781: Int = 0
+ def method782: Int = 0
+ def method783: Int = 0
+ def method784: Int = 0
+ def method785: Int = 0
+ def method786: Int = 0
+ def method787: Int = 0
+ def method788: Int = 0
+ def method789: Int = 0
+ def method790: Int = 0
+ def method791: Int = 0
+ def method792: Int = 0
+ def method793: Int = 0
+ def method794: Int = 0
+ def method795: Int = 0
+ def method796: Int = 0
+ def method797: Int = 0
+ def method798: Int = 0
+ def method799: Int = 0
+ def method800: Int = 0
+ def method801: Int = 0
+ def method802: Int = 0
+ def method803: Int = 0
+ def method804: Int = 0
+ def method805: Int = 0
+ def method806: Int = 0
+ def method807: Int = 0
+ def method808: Int = 0
+ def method809: Int = 0
+ def method810: Int = 0
+ def method811: Int = 0
+ def method812: Int = 0
+ def method813: Int = 0
+ def method814: Int = 0
+ def method815: Int = 0
+ def method816: Int = 0
+ def method817: Int = 0
+ def method818: Int = 0
+ def method819: Int = 0
+ def method820: Int = 0
+ def method821: Int = 0
+ def method822: Int = 0
+ def method823: Int = 0
+ def method824: Int = 0
+ def method825: Int = 0
+ def method826: Int = 0
+ def method827: Int = 0
+ def method828: Int = 0
+ def method829: Int = 0
+ def method830: Int = 0
+ def method831: Int = 0
+ def method832: Int = 0
+ def method833: Int = 0
+ def method834: Int = 0
+ def method835: Int = 0
+ def method836: Int = 0
+ def method837: Int = 0
+ def method838: Int = 0
+ def method839: Int = 0
+ def method840: Int = 0
+ def method841: Int = 0
+ def method842: Int = 0
+ def method843: Int = 0
+ def method844: Int = 0
+ def method845: Int = 0
+ def method846: Int = 0
+ def method847: Int = 0
+ def method848: Int = 0
+ def method849: Int = 0
+ def method850: Int = 0
+ def method851: Int = 0
+ def method852: Int = 0
+ def method853: Int = 0
+ def method854: Int = 0
+ def method855: Int = 0
+ def method856: Int = 0
+ def method857: Int = 0
+ def method858: Int = 0
+ def method859: Int = 0
+ def method860: Int = 0
+ def method861: Int = 0
+ def method862: Int = 0
+ def method863: Int = 0
+ def method864: Int = 0
+ def method865: Int = 0
+ def method866: Int = 0
+ def method867: Int = 0
+ def method868: Int = 0
+ def method869: Int = 0
+ def method870: Int = 0
+ def method871: Int = 0
+ def method872: Int = 0
+ def method873: Int = 0
+ def method874: Int = 0
+ def method875: Int = 0
+ def method876: Int = 0
+ def method877: Int = 0
+ def method878: Int = 0
+ def method879: Int = 0
+ def method880: Int = 0
+ def method881: Int = 0
+ def method882: Int = 0
+ def method883: Int = 0
+ def method884: Int = 0
+ def method885: Int = 0
+ def method886: Int = 0
+ def method887: Int = 0
+ def method888: Int = 0
+ def method889: Int = 0
+ def method890: Int = 0
+ def method891: Int = 0
+ def method892: Int = 0
+ def method893: Int = 0
+ def method894: Int = 0
+ def method895: Int = 0
+ def method896: Int = 0
+ def method897: Int = 0
+ def method898: Int = 0
+ def method899: Int = 0
+ def method900: Int = 0
+ def method901: Int = 0
+ def method902: Int = 0
+ def method903: Int = 0
+ def method904: Int = 0
+ def method905: Int = 0
+ def method906: Int = 0
+ def method907: Int = 0
+ def method908: Int = 0
+ def method909: Int = 0
+ def method910: Int = 0
+ def method911: Int = 0
+ def method912: Int = 0
+ def method913: Int = 0
+ def method914: Int = 0
+ def method915: Int = 0
+ def method916: Int = 0
+ def method917: Int = 0
+ def method918: Int = 0
+ def method919: Int = 0
+ def method920: Int = 0
+ def method921: Int = 0
+ def method922: Int = 0
+ def method923: Int = 0
+ def method924: Int = 0
+ def method925: Int = 0
+ def method926: Int = 0
+ def method927: Int = 0
+ def method928: Int = 0
+ def method929: Int = 0
+ def method930: Int = 0
+ def method931: Int = 0
+ def method932: Int = 0
+ def method933: Int = 0
+ def method934: Int = 0
+ def method935: Int = 0
+ def method936: Int = 0
+ def method937: Int = 0
+ def method938: Int = 0
+ def method939: Int = 0
+ def method940: Int = 0
+ def method941: Int = 0
+ def method942: Int = 0
+ def method943: Int = 0
+ def method944: Int = 0
+ def method945: Int = 0
+ def method946: Int = 0
+ def method947: Int = 0
+ def method948: Int = 0
+ def method949: Int = 0
+ def method950: Int = 0
+ def method951: Int = 0
+ def method952: Int = 0
+ def method953: Int = 0
+ def method954: Int = 0
+ def method955: Int = 0
+ def method956: Int = 0
+ def method957: Int = 0
+ def method958: Int = 0
+ def method959: Int = 0
+ def method960: Int = 0
+ def method961: Int = 0
+ def method962: Int = 0
+ def method963: Int = 0
+ def method964: Int = 0
+ def method965: Int = 0
+ def method966: Int = 0
+ def method967: Int = 0
+ def method968: Int = 0
+ def method969: Int = 0
+ def method970: Int = 0
+ def method971: Int = 0
+ def method972: Int = 0
+ def method973: Int = 0
+ def method974: Int = 0
+ def method975: Int = 0
+ def method976: Int = 0
+ def method977: Int = 0
+ def method978: Int = 0
+ def method979: Int = 0
+ def method980: Int = 0
+ def method981: Int = 0
+ def method982: Int = 0
+ def method983: Int = 0
+ def method984: Int = 0
+ def method985: Int = 0
+ def method986: Int = 0
+ def method987: Int = 0
+ def method988: Int = 0
+ def method989: Int = 0
+ def method990: Int = 0
+ def method991: Int = 0
+ def method992: Int = 0
+ def method993: Int = 0
+ def method994: Int = 0
+ def method995: Int = 0
+ def method996: Int = 0
+ def method997: Int = 0
+ def method998: Int = 0
+ def method999: Int = 0
+ def method1000: Int = 0
+ def method1001: Int = 0
+ def method1002: Int = 0
+ def method1003: Int = 0
+ def method1004: Int = 0
+ def method1005: Int = 0
+ def method1006: Int = 0
+ def method1007: Int = 0
+ def method1008: Int = 0
+ def method1009: Int = 0
+ def method1010: Int = 0
+ def method1011: Int = 0
+ def method1012: Int = 0
+ def method1013: Int = 0
+ def method1014: Int = 0
+ def method1015: Int = 0
+ def method1016: Int = 0
+ def method1017: Int = 0
+ def method1018: Int = 0
+ def method1019: Int = 0
+ def method1020: Int = 0
+ def method1021: Int = 0
+ def method1022: Int = 0
+ def method1023: Int = 0
+ def method1024: Int = 0
+ def method1025: Int = 0
+ def method1026: Int = 0
+ def method1027: Int = 0
+ def method1028: Int = 0
+ def method1029: Int = 0
+ def method1030: Int = 0
+ def method1031: Int = 0
+ def method1032: Int = 0
+ def method1033: Int = 0
+ def method1034: Int = 0
+ def method1035: Int = 0
+ def method1036: Int = 0
+ def method1037: Int = 0
+ def method1038: Int = 0
+ def method1039: Int = 0
+ def method1040: Int = 0
+ def method1041: Int = 0
+ def method1042: Int = 0
+ def method1043: Int = 0
+ def method1044: Int = 0
+ def method1045: Int = 0
+ def method1046: Int = 0
+ def method1047: Int = 0
+ def method1048: Int = 0
+ def method1049: Int = 0
+ def method1050: Int = 0
+ def method1051: Int = 0
+ def method1052: Int = 0
+ def method1053: Int = 0
+ def method1054: Int = 0
+ def method1055: Int = 0
+ def method1056: Int = 0
+ def method1057: Int = 0
+ def method1058: Int = 0
+ def method1059: Int = 0
+ def method1060: Int = 0
+ def method1061: Int = 0
+ def method1062: Int = 0
+ def method1063: Int = 0
+ def method1064: Int = 0
+ def method1065: Int = 0
+ def method1066: Int = 0
+ def method1067: Int = 0
+ def method1068: Int = 0
+ def method1069: Int = 0
+ def method1070: Int = 0
+ def method1071: Int = 0
+ def method1072: Int = 0
+ def method1073: Int = 0
+ def method1074: Int = 0
+ def method1075: Int = 0
+ def method1076: Int = 0
+ def method1077: Int = 0
+ def method1078: Int = 0
+ def method1079: Int = 0
+ def method1080: Int = 0
+ def method1081: Int = 0
+ def method1082: Int = 0
+ def method1083: Int = 0
+ def method1084: Int = 0
+ def method1085: Int = 0
+ def method1086: Int = 0
+ def method1087: Int = 0
+ def method1088: Int = 0
+ def method1089: Int = 0
+ def method1090: Int = 0
+ def method1091: Int = 0
+ def method1092: Int = 0
+ def method1093: Int = 0
+ def method1094: Int = 0
+ def method1095: Int = 0
+ def method1096: Int = 0
+ def method1097: Int = 0
+ def method1098: Int = 0
+ def method1099: Int = 0
+ def method1100: Int = 0
+ def method1101: Int = 0
+ def method1102: Int = 0
+ def method1103: Int = 0
+ def method1104: Int = 0
+ def method1105: Int = 0
+ def method1106: Int = 0
+ def method1107: Int = 0
+ def method1108: Int = 0
+ def method1109: Int = 0
+ def method1110: Int = 0
+ def method1111: Int = 0
+ def method1112: Int = 0
+ def method1113: Int = 0
+ def method1114: Int = 0
+ def method1115: Int = 0
+ def method1116: Int = 0
+ def method1117: Int = 0
+ def method1118: Int = 0
+ def method1119: Int = 0
+ def method1120: Int = 0
+ def method1121: Int = 0
+ def method1122: Int = 0
+ def method1123: Int = 0
+ def method1124: Int = 0
+ def method1125: Int = 0
+ def method1126: Int = 0
+ def method1127: Int = 0
+ def method1128: Int = 0
+ def method1129: Int = 0
+ def method1130: Int = 0
+ def method1131: Int = 0
+ def method1132: Int = 0
+ def method1133: Int = 0
+ def method1134: Int = 0
+ def method1135: Int = 0
+ def method1136: Int = 0
+ def method1137: Int = 0
+ def method1138: Int = 0
+ def method1139: Int = 0
+ def method1140: Int = 0
+ def method1141: Int = 0
+ def method1142: Int = 0
+ def method1143: Int = 0
+ def method1144: Int = 0
+ def method1145: Int = 0
+ def method1146: Int = 0
+ def method1147: Int = 0
+ def method1148: Int = 0
+ def method1149: Int = 0
+ def method1150: Int = 0
+ def method1151: Int = 0
+ def method1152: Int = 0
+ def method1153: Int = 0
+ def method1154: Int = 0
+ def method1155: Int = 0
+ def method1156: Int = 0
+ def method1157: Int = 0
+ def method1158: Int = 0
+ def method1159: Int = 0
+ def method1160: Int = 0
+ def method1161: Int = 0
+ def method1162: Int = 0
+ def method1163: Int = 0
+ def method1164: Int = 0
+ def method1165: Int = 0
+ def method1166: Int = 0
+ def method1167: Int = 0
+ def method1168: Int = 0
+ def method1169: Int = 0
+ def method1170: Int = 0
+ def method1171: Int = 0
+ def method1172: Int = 0
+ def method1173: Int = 0
+ def method1174: Int = 0
+ def method1175: Int = 0
+ def method1176: Int = 0
+ def method1177: Int = 0
+ def method1178: Int = 0
+ def method1179: Int = 0
+ def method1180: Int = 0
+ def method1181: Int = 0
+ def method1182: Int = 0
+ def method1183: Int = 0
+ def method1184: Int = 0
+ def method1185: Int = 0
+ def method1186: Int = 0
+ def method1187: Int = 0
+ def method1188: Int = 0
+ def method1189: Int = 0
+ def method1190: Int = 0
+ def method1191: Int = 0
+ def method1192: Int = 0
+ def method1193: Int = 0
+ def method1194: Int = 0
+ def method1195: Int = 0
+ def method1196: Int = 0
+ def method1197: Int = 0
+ def method1198: Int = 0
+ def method1199: Int = 0
+ def method1200: Int = 0
+ def method1201: Int = 0
+ def method1202: Int = 0
+ def method1203: Int = 0
+ def method1204: Int = 0
+ def method1205: Int = 0
+ def method1206: Int = 0
+ def method1207: Int = 0
+ def method1208: Int = 0
+ def method1209: Int = 0
+ def method1210: Int = 0
+ def method1211: Int = 0
+ def method1212: Int = 0
+ def method1213: Int = 0
+ def method1214: Int = 0
+ def method1215: Int = 0
+ def method1216: Int = 0
+ def method1217: Int = 0
+ def method1218: Int = 0
+ def method1219: Int = 0
+ def method1220: Int = 0
+ def method1221: Int = 0
+ def method1222: Int = 0
+ def method1223: Int = 0
+ def method1224: Int = 0
+ def method1225: Int = 0
+ def method1226: Int = 0
+ def method1227: Int = 0
+ def method1228: Int = 0
+ def method1229: Int = 0
+ def method1230: Int = 0
+ def method1231: Int = 0
+ def method1232: Int = 0
+ def method1233: Int = 0
+ def method1234: Int = 0
+ def method1235: Int = 0
+ def method1236: Int = 0
+ def method1237: Int = 0
+ def method1238: Int = 0
+ def method1239: Int = 0
+ def method1240: Int = 0
+ def method1241: Int = 0
+ def method1242: Int = 0
+ def method1243: Int = 0
+ def method1244: Int = 0
+ def method1245: Int = 0
+ def method1246: Int = 0
+ def method1247: Int = 0
+ def method1248: Int = 0
+ def method1249: Int = 0
+ def method1250: Int = 0
+ def method1251: Int = 0
+ def method1252: Int = 0
+ def method1253: Int = 0
+ def method1254: Int = 0
+ def method1255: Int = 0
+ def method1256: Int = 0
+ def method1257: Int = 0
+ def method1258: Int = 0
+ def method1259: Int = 0
+ def method1260: Int = 0
+ def method1261: Int = 0
+ def method1262: Int = 0
+ def method1263: Int = 0
+ def method1264: Int = 0
+ def method1265: Int = 0
+ def method1266: Int = 0
+ def method1267: Int = 0
+ def method1268: Int = 0
+ def method1269: Int = 0
+ def method1270: Int = 0
+ def method1271: Int = 0
+ def method1272: Int = 0
+ def method1273: Int = 0
+ def method1274: Int = 0
+ def method1275: Int = 0
+ def method1276: Int = 0
+ def method1277: Int = 0
+ def method1278: Int = 0
+ def method1279: Int = 0
+ def method1280: Int = 0
+ def method1281: Int = 0
+ def method1282: Int = 0
+ def method1283: Int = 0
+ def method1284: Int = 0
+ def method1285: Int = 0
+ def method1286: Int = 0
+ def method1287: Int = 0
+ def method1288: Int = 0
+ def method1289: Int = 0
+ def method1290: Int = 0
+ def method1291: Int = 0
+ def method1292: Int = 0
+ def method1293: Int = 0
+ def method1294: Int = 0
+ def method1295: Int = 0
+ def method1296: Int = 0
+ def method1297: Int = 0
+ def method1298: Int = 0
+ def method1299: Int = 0
+ def method1300: Int = 0
+ def method1301: Int = 0
+ def method1302: Int = 0
+ def method1303: Int = 0
+ def method1304: Int = 0
+ def method1305: Int = 0
+ def method1306: Int = 0
+ def method1307: Int = 0
+ def method1308: Int = 0
+ def method1309: Int = 0
+ def method1310: Int = 0
+ def method1311: Int = 0
+ def method1312: Int = 0
+ def method1313: Int = 0
+ def method1314: Int = 0
+ def method1315: Int = 0
+ def method1316: Int = 0
+ def method1317: Int = 0
+ def method1318: Int = 0
+ def method1319: Int = 0
+ def method1320: Int = 0
+ def method1321: Int = 0
+ def method1322: Int = 0
+ def method1323: Int = 0
+ def method1324: Int = 0
+ def method1325: Int = 0
+ def method1326: Int = 0
+ def method1327: Int = 0
+ def method1328: Int = 0
+ def method1329: Int = 0
+ def method1330: Int = 0
+ def method1331: Int = 0
+ def method1332: Int = 0
+ def method1333: Int = 0
+ def method1334: Int = 0
+ def method1335: Int = 0
+ def method1336: Int = 0
+ def method1337: Int = 0
+ def method1338: Int = 0
+ def method1339: Int = 0
+ def method1340: Int = 0
+ def method1341: Int = 0
+ def method1342: Int = 0
+ def method1343: Int = 0
+ def method1344: Int = 0
+ def method1345: Int = 0
+ def method1346: Int = 0
+ def method1347: Int = 0
+ def method1348: Int = 0
+ def method1349: Int = 0
+ def method1350: Int = 0
+ def method1351: Int = 0
+ def method1352: Int = 0
+ def method1353: Int = 0
+ def method1354: Int = 0
+ def method1355: Int = 0
+ def method1356: Int = 0
+ def method1357: Int = 0
+ def method1358: Int = 0
+ def method1359: Int = 0
+ def method1360: Int = 0
+ def method1361: Int = 0
+ def method1362: Int = 0
+ def method1363: Int = 0
+ def method1364: Int = 0
+ def method1365: Int = 0
+ def method1366: Int = 0
+ def method1367: Int = 0
+ def method1368: Int = 0
+ def method1369: Int = 0
+ def method1370: Int = 0
+ def method1371: Int = 0
+ def method1372: Int = 0
+ def method1373: Int = 0
+ def method1374: Int = 0
+ def method1375: Int = 0
+ def method1376: Int = 0
+ def method1377: Int = 0
+ def method1378: Int = 0
+ def method1379: Int = 0
+ def method1380: Int = 0
+ def method1381: Int = 0
+ def method1382: Int = 0
+ def method1383: Int = 0
+ def method1384: Int = 0
+ def method1385: Int = 0
+ def method1386: Int = 0
+ def method1387: Int = 0
+ def method1388: Int = 0
+ def method1389: Int = 0
+ def method1390: Int = 0
+ def method1391: Int = 0
+ def method1392: Int = 0
+ def method1393: Int = 0
+ def method1394: Int = 0
+ def method1395: Int = 0
+ def method1396: Int = 0
+ def method1397: Int = 0
+ def method1398: Int = 0
+ def method1399: Int = 0
+ def method1400: Int = 0
+ def method1401: Int = 0
+ def method1402: Int = 0
+ def method1403: Int = 0
+ def method1404: Int = 0
+ def method1405: Int = 0
+ def method1406: Int = 0
+ def method1407: Int = 0
+ def method1408: Int = 0
+ def method1409: Int = 0
+ def method1410: Int = 0
+ def method1411: Int = 0
+ def method1412: Int = 0
+ def method1413: Int = 0
+ def method1414: Int = 0
+ def method1415: Int = 0
+ def method1416: Int = 0
+ def method1417: Int = 0
+ def method1418: Int = 0
+ def method1419: Int = 0
+ def method1420: Int = 0
+ def method1421: Int = 0
+ def method1422: Int = 0
+ def method1423: Int = 0
+ def method1424: Int = 0
+ def method1425: Int = 0
+ def method1426: Int = 0
+ def method1427: Int = 0
+ def method1428: Int = 0
+ def method1429: Int = 0
+ def method1430: Int = 0
+ def method1431: Int = 0
+ def method1432: Int = 0
+ def method1433: Int = 0
+ def method1434: Int = 0
+ def method1435: Int = 0
+ def method1436: Int = 0
+ def method1437: Int = 0
+ def method1438: Int = 0
+ def method1439: Int = 0
+ def method1440: Int = 0
+ def method1441: Int = 0
+ def method1442: Int = 0
+ def method1443: Int = 0
+ def method1444: Int = 0
+ def method1445: Int = 0
+ def method1446: Int = 0
+ def method1447: Int = 0
+ def method1448: Int = 0
+ def method1449: Int = 0
+ def method1450: Int = 0
+ def method1451: Int = 0
+ def method1452: Int = 0
+ def method1453: Int = 0
+ def method1454: Int = 0
+ def method1455: Int = 0
+ def method1456: Int = 0
+ def method1457: Int = 0
+ def method1458: Int = 0
+ def method1459: Int = 0
+ def method1460: Int = 0
+ def method1461: Int = 0
+ def method1462: Int = 0
+ def method1463: Int = 0
+ def method1464: Int = 0
+ def method1465: Int = 0
+ def method1466: Int = 0
+ def method1467: Int = 0
+ def method1468: Int = 0
+ def method1469: Int = 0
+ def method1470: Int = 0
+ def method1471: Int = 0
+ def method1472: Int = 0
+ def method1473: Int = 0
+ def method1474: Int = 0
+ def method1475: Int = 0
+ def method1476: Int = 0
+ def method1477: Int = 0
+ def method1478: Int = 0
+ def method1479: Int = 0
+ def method1480: Int = 0
+ def method1481: Int = 0
+ def method1482: Int = 0
+ def method1483: Int = 0
+ def method1484: Int = 0
+ def method1485: Int = 0
+ def method1486: Int = 0
+ def method1487: Int = 0
+ def method1488: Int = 0
+ def method1489: Int = 0
+ def method1490: Int = 0
+ def method1491: Int = 0
+ def method1492: Int = 0
+ def method1493: Int = 0
+ def method1494: Int = 0
+ def method1495: Int = 0
+ def method1496: Int = 0
+ def method1497: Int = 0
+ def method1498: Int = 0
+ def method1499: Int = 0
+ def method1500: Int = 0
+ def method1501: Int = 0
+ def method1502: Int = 0
+ def method1503: Int = 0
+ def method1504: Int = 0
+ def method1505: Int = 0
+ def method1506: Int = 0
+ def method1507: Int = 0
+ def method1508: Int = 0
+ def method1509: Int = 0
+ def method1510: Int = 0
+ def method1511: Int = 0
+ def method1512: Int = 0
+ def method1513: Int = 0
+ def method1514: Int = 0
+ def method1515: Int = 0
+ def method1516: Int = 0
+ def method1517: Int = 0
+ def method1518: Int = 0
+ def method1519: Int = 0
+ def method1520: Int = 0
+ def method1521: Int = 0
+ def method1522: Int = 0
+ def method1523: Int = 0
+ def method1524: Int = 0
+ def method1525: Int = 0
+ def method1526: Int = 0
+ def method1527: Int = 0
+ def method1528: Int = 0
+ def method1529: Int = 0
+ def method1530: Int = 0
+ def method1531: Int = 0
+ def method1532: Int = 0
+ def method1533: Int = 0
+ def method1534: Int = 0
+ def method1535: Int = 0
+ def method1536: Int = 0
+ def method1537: Int = 0
+ def method1538: Int = 0
+ def method1539: Int = 0
+ def method1540: Int = 0
+ def method1541: Int = 0
+ def method1542: Int = 0
+ def method1543: Int = 0
+ def method1544: Int = 0
+ def method1545: Int = 0
+ def method1546: Int = 0
+ def method1547: Int = 0
+ def method1548: Int = 0
+ def method1549: Int = 0
+ def method1550: Int = 0
+ def method1551: Int = 0
+ def method1552: Int = 0
+ def method1553: Int = 0
+ def method1554: Int = 0
+ def method1555: Int = 0
+ def method1556: Int = 0
+ def method1557: Int = 0
+ def method1558: Int = 0
+ def method1559: Int = 0
+ def method1560: Int = 0
+ def method1561: Int = 0
+ def method1562: Int = 0
+ def method1563: Int = 0
+ def method1564: Int = 0
+ def method1565: Int = 0
+ def method1566: Int = 0
+ def method1567: Int = 0
+ def method1568: Int = 0
+ def method1569: Int = 0
+ def method1570: Int = 0
+ def method1571: Int = 0
+ def method1572: Int = 0
+ def method1573: Int = 0
+ def method1574: Int = 0
+ def method1575: Int = 0
+ def method1576: Int = 0
+ def method1577: Int = 0
+ def method1578: Int = 0
+ def method1579: Int = 0
+ def method1580: Int = 0
+ def method1581: Int = 0
+ def method1582: Int = 0
+ def method1583: Int = 0
+ def method1584: Int = 0
+ def method1585: Int = 0
+ def method1586: Int = 0
+ def method1587: Int = 0
+ def method1588: Int = 0
+ def method1589: Int = 0
+ def method1590: Int = 0
+ def method1591: Int = 0
+ def method1592: Int = 0
+ def method1593: Int = 0
+ def method1594: Int = 0
+ def method1595: Int = 0
+ def method1596: Int = 0
+ def method1597: Int = 0
+ def method1598: Int = 0
+ def method1599: Int = 0
+ def method1600: Int = 0
+ def method1601: Int = 0
+ def method1602: Int = 0
+ def method1603: Int = 0
+ def method1604: Int = 0
+ def method1605: Int = 0
+ def method1606: Int = 0
+ def method1607: Int = 0
+ def method1608: Int = 0
+ def method1609: Int = 0
+ def method1610: Int = 0
+ def method1611: Int = 0
+ def method1612: Int = 0
+ def method1613: Int = 0
+ def method1614: Int = 0
+ def method1615: Int = 0
+ def method1616: Int = 0
+ def method1617: Int = 0
+ def method1618: Int = 0
+ def method1619: Int = 0
+ def method1620: Int = 0
+ def method1621: Int = 0
+ def method1622: Int = 0
+ def method1623: Int = 0
+ def method1624: Int = 0
+ def method1625: Int = 0
+ def method1626: Int = 0
+ def method1627: Int = 0
+ def method1628: Int = 0
+ def method1629: Int = 0
+ def method1630: Int = 0
+ def method1631: Int = 0
+ def method1632: Int = 0
+ def method1633: Int = 0
+ def method1634: Int = 0
+ def method1635: Int = 0
+ def method1636: Int = 0
+ def method1637: Int = 0
+ def method1638: Int = 0
+ def method1639: Int = 0
+ def method1640: Int = 0
+ def method1641: Int = 0
+ def method1642: Int = 0
+ def method1643: Int = 0
+ def method1644: Int = 0
+ def method1645: Int = 0
+ def method1646: Int = 0
+ def method1647: Int = 0
+ def method1648: Int = 0
+ def method1649: Int = 0
+ def method1650: Int = 0
+ def method1651: Int = 0
+ def method1652: Int = 0
+ def method1653: Int = 0
+ def method1654: Int = 0
+ def method1655: Int = 0
+ def method1656: Int = 0
+ def method1657: Int = 0
+ def method1658: Int = 0
+ def method1659: Int = 0
+ def method1660: Int = 0
+ def method1661: Int = 0
+ def method1662: Int = 0
+ def method1663: Int = 0
+ def method1664: Int = 0
+ def method1665: Int = 0
+ def method1666: Int = 0
+ def method1667: Int = 0
+ def method1668: Int = 0
+ def method1669: Int = 0
+ def method1670: Int = 0
+ def method1671: Int = 0
+ def method1672: Int = 0
+ def method1673: Int = 0
+ def method1674: Int = 0
+ def method1675: Int = 0
+ def method1676: Int = 0
+ def method1677: Int = 0
+ def method1678: Int = 0
+ def method1679: Int = 0
+ def method1680: Int = 0
+ def method1681: Int = 0
+ def method1682: Int = 0
+ def method1683: Int = 0
+ def method1684: Int = 0
+ def method1685: Int = 0
+ def method1686: Int = 0
+ def method1687: Int = 0
+ def method1688: Int = 0
+ def method1689: Int = 0
+ def method1690: Int = 0
+ def method1691: Int = 0
+ def method1692: Int = 0
+ def method1693: Int = 0
+ def method1694: Int = 0
+ def method1695: Int = 0
+ def method1696: Int = 0
+ def method1697: Int = 0
+ def method1698: Int = 0
+ def method1699: Int = 0
+ def method1700: Int = 0
+ def method1701: Int = 0
+ def method1702: Int = 0
+ def method1703: Int = 0
+ def method1704: Int = 0
+ def method1705: Int = 0
+ def method1706: Int = 0
+ def method1707: Int = 0
+ def method1708: Int = 0
+ def method1709: Int = 0
+ def method1710: Int = 0
+ def method1711: Int = 0
+ def method1712: Int = 0
+ def method1713: Int = 0
+ def method1714: Int = 0
+ def method1715: Int = 0
+ def method1716: Int = 0
+ def method1717: Int = 0
+ def method1718: Int = 0
+ def method1719: Int = 0
+ def method1720: Int = 0
+ def method1721: Int = 0
+ def method1722: Int = 0
+ def method1723: Int = 0
+ def method1724: Int = 0
+ def method1725: Int = 0
+ def method1726: Int = 0
+ def method1727: Int = 0
+ def method1728: Int = 0
+ def method1729: Int = 0
+ def method1730: Int = 0
+ def method1731: Int = 0
+ def method1732: Int = 0
+ def method1733: Int = 0
+ def method1734: Int = 0
+ def method1735: Int = 0
+ def method1736: Int = 0
+ def method1737: Int = 0
+ def method1738: Int = 0
+ def method1739: Int = 0
+ def method1740: Int = 0
+ def method1741: Int = 0
+ def method1742: Int = 0
+ def method1743: Int = 0
+ def method1744: Int = 0
+ def method1745: Int = 0
+ def method1746: Int = 0
+ def method1747: Int = 0
+ def method1748: Int = 0
+ def method1749: Int = 0
+ def method1750: Int = 0
+ def method1751: Int = 0
+ def method1752: Int = 0
+ def method1753: Int = 0
+ def method1754: Int = 0
+ def method1755: Int = 0
+ def method1756: Int = 0
+ def method1757: Int = 0
+ def method1758: Int = 0
+ def method1759: Int = 0
+ def method1760: Int = 0
+ def method1761: Int = 0
+ def method1762: Int = 0
+ def method1763: Int = 0
+ def method1764: Int = 0
+ def method1765: Int = 0
+ def method1766: Int = 0
+ def method1767: Int = 0
+ def method1768: Int = 0
+ def method1769: Int = 0
+ def method1770: Int = 0
+ def method1771: Int = 0
+ def method1772: Int = 0
+ def method1773: Int = 0
+ def method1774: Int = 0
+ def method1775: Int = 0
+ def method1776: Int = 0
+ def method1777: Int = 0
+ def method1778: Int = 0
+ def method1779: Int = 0
+ def method1780: Int = 0
+ def method1781: Int = 0
+ def method1782: Int = 0
+ def method1783: Int = 0
+ def method1784: Int = 0
+ def method1785: Int = 0
+ def method1786: Int = 0
+ def method1787: Int = 0
+ def method1788: Int = 0
+ def method1789: Int = 0
+ def method1790: Int = 0
+ def method1791: Int = 0
+ def method1792: Int = 0
+ def method1793: Int = 0
+ def method1794: Int = 0
+ def method1795: Int = 0
+ def method1796: Int = 0
+ def method1797: Int = 0
+ def method1798: Int = 0
+ def method1799: Int = 0
+ def method1800: Int = 0
+ def method1801: Int = 0
+ def method1802: Int = 0
+ def method1803: Int = 0
+ def method1804: Int = 0
+ def method1805: Int = 0
+ def method1806: Int = 0
+ def method1807: Int = 0
+ def method1808: Int = 0
+ def method1809: Int = 0
+ def method1810: Int = 0
+ def method1811: Int = 0
+ def method1812: Int = 0
+ def method1813: Int = 0
+ def method1814: Int = 0
+ def method1815: Int = 0
+ def method1816: Int = 0
+ def method1817: Int = 0
+ def method1818: Int = 0
+ def method1819: Int = 0
+ def method1820: Int = 0
+ def method1821: Int = 0
+ def method1822: Int = 0
+ def method1823: Int = 0
+ def method1824: Int = 0
+ def method1825: Int = 0
+ def method1826: Int = 0
+ def method1827: Int = 0
+ def method1828: Int = 0
+ def method1829: Int = 0
+ def method1830: Int = 0
+ def method1831: Int = 0
+ def method1832: Int = 0
+ def method1833: Int = 0
+ def method1834: Int = 0
+ def method1835: Int = 0
+ def method1836: Int = 0
+ def method1837: Int = 0
+ def method1838: Int = 0
+ def method1839: Int = 0
+ def method1840: Int = 0
+ def method1841: Int = 0
+ def method1842: Int = 0
+ def method1843: Int = 0
+ def method1844: Int = 0
+ def method1845: Int = 0
+ def method1846: Int = 0
+ def method1847: Int = 0
+ def method1848: Int = 0
+ def method1849: Int = 0
+ def method1850: Int = 0
+ def method1851: Int = 0
+ def method1852: Int = 0
+ def method1853: Int = 0
+ def method1854: Int = 0
+ def method1855: Int = 0
+ def method1856: Int = 0
+ def method1857: Int = 0
+ def method1858: Int = 0
+ def method1859: Int = 0
+ def method1860: Int = 0
+ def method1861: Int = 0
+ def method1862: Int = 0
+ def method1863: Int = 0
+ def method1864: Int = 0
+ def method1865: Int = 0
+ def method1866: Int = 0
+ def method1867: Int = 0
+ def method1868: Int = 0
+ def method1869: Int = 0
+ def method1870: Int = 0
+ def method1871: Int = 0
+ def method1872: Int = 0
+ def method1873: Int = 0
+ def method1874: Int = 0
+ def method1875: Int = 0
+ def method1876: Int = 0
+ def method1877: Int = 0
+ def method1878: Int = 0
+ def method1879: Int = 0
+ def method1880: Int = 0
+ def method1881: Int = 0
+ def method1882: Int = 0
+ def method1883: Int = 0
+ def method1884: Int = 0
+ def method1885: Int = 0
+ def method1886: Int = 0
+ def method1887: Int = 0
+ def method1888: Int = 0
+ def method1889: Int = 0
+ def method1890: Int = 0
+ def method1891: Int = 0
+ def method1892: Int = 0
+ def method1893: Int = 0
+ def method1894: Int = 0
+ def method1895: Int = 0
+ def method1896: Int = 0
+ def method1897: Int = 0
+ def method1898: Int = 0
+ def method1899: Int = 0
+ def method1900: Int = 0
+ def method1901: Int = 0
+ def method1902: Int = 0
+ def method1903: Int = 0
+ def method1904: Int = 0
+ def method1905: Int = 0
+ def method1906: Int = 0
+ def method1907: Int = 0
+ def method1908: Int = 0
+ def method1909: Int = 0
+ def method1910: Int = 0
+ def method1911: Int = 0
+ def method1912: Int = 0
+ def method1913: Int = 0
+ def method1914: Int = 0
+ def method1915: Int = 0
+ def method1916: Int = 0
+ def method1917: Int = 0
+ def method1918: Int = 0
+ def method1919: Int = 0
+ def method1920: Int = 0
+ def method1921: Int = 0
+ def method1922: Int = 0
+ def method1923: Int = 0
+ def method1924: Int = 0
+ def method1925: Int = 0
+ def method1926: Int = 0
+ def method1927: Int = 0
+ def method1928: Int = 0
+ def method1929: Int = 0
+ def method1930: Int = 0
+ def method1931: Int = 0
+ def method1932: Int = 0
+ def method1933: Int = 0
+ def method1934: Int = 0
+ def method1935: Int = 0
+ def method1936: Int = 0
+ def method1937: Int = 0
+ def method1938: Int = 0
+ def method1939: Int = 0
+ def method1940: Int = 0
+ def method1941: Int = 0
+ def method1942: Int = 0
+ def method1943: Int = 0
+ def method1944: Int = 0
+ def method1945: Int = 0
+ def method1946: Int = 0
+ def method1947: Int = 0
+ def method1948: Int = 0
+ def method1949: Int = 0
+ def method1950: Int = 0
+ def method1951: Int = 0
+ def method1952: Int = 0
+ def method1953: Int = 0
+ def method1954: Int = 0
+ def method1955: Int = 0
+ def method1956: Int = 0
+ def method1957: Int = 0
+ def method1958: Int = 0
+ def method1959: Int = 0
+ def method1960: Int = 0
+ def method1961: Int = 0
+ def method1962: Int = 0
+ def method1963: Int = 0
+ def method1964: Int = 0
+ def method1965: Int = 0
+ def method1966: Int = 0
+ def method1967: Int = 0
+ def method1968: Int = 0
+ def method1969: Int = 0
+ def method1970: Int = 0
+ def method1971: Int = 0
+ def method1972: Int = 0
+ def method1973: Int = 0
+ def method1974: Int = 0
+ def method1975: Int = 0
+ def method1976: Int = 0
+ def method1977: Int = 0
+ def method1978: Int = 0
+ def method1979: Int = 0
+ def method1980: Int = 0
+ def method1981: Int = 0
+ def method1982: Int = 0
+ def method1983: Int = 0
+ def method1984: Int = 0
+ def method1985: Int = 0
+ def method1986: Int = 0
+ def method1987: Int = 0
+ def method1988: Int = 0
+ def method1989: Int = 0
+ def method1990: Int = 0
+ def method1991: Int = 0
+ def method1992: Int = 0
+ def method1993: Int = 0
+ def method1994: Int = 0
+ def method1995: Int = 0
+ def method1996: Int = 0
+ def method1997: Int = 0
+ def method1998: Int = 0
+ def method1999: Int = 0
+ def method2000: Int = 0
+ def method2001: Int = 0
+ def method2002: Int = 0
+ def method2003: Int = 0
+ def method2004: Int = 0
+ def method2005: Int = 0
+ def method2006: Int = 0
+ def method2007: Int = 0
+ def method2008: Int = 0
+ def method2009: Int = 0
+ def method2010: Int = 0
+ def method2011: Int = 0
+ def method2012: Int = 0
+ def method2013: Int = 0
+ def method2014: Int = 0
+ def method2015: Int = 0
+ def method2016: Int = 0
+ def method2017: Int = 0
+ def method2018: Int = 0
+ def method2019: Int = 0
+ def method2020: Int = 0
+ def method2021: Int = 0
+ def method2022: Int = 0
+ def method2023: Int = 0
+ def method2024: Int = 0
+ def method2025: Int = 0
+ def method2026: Int = 0
+ def method2027: Int = 0
+ def method2028: Int = 0
+ def method2029: Int = 0
+ def method2030: Int = 0
+ def method2031: Int = 0
+ def method2032: Int = 0
+ def method2033: Int = 0
+ def method2034: Int = 0
+ def method2035: Int = 0
+ def method2036: Int = 0
+ def method2037: Int = 0
+ def method2038: Int = 0
+ def method2039: Int = 0
+ def method2040: Int = 0
+ def method2041: Int = 0
+ def method2042: Int = 0
+ def method2043: Int = 0
+ def method2044: Int = 0
+ def method2045: Int = 0
+ def method2046: Int = 0
+ def method2047: Int = 0
+ def method2048: Int = 0
+ def method2049: Int = 0
+ def method2050: Int = 0
+ def method2051: Int = 0
+ def method2052: Int = 0
+ def method2053: Int = 0
+ def method2054: Int = 0
+ def method2055: Int = 0
+ def method2056: Int = 0
+ def method2057: Int = 0
+ def method2058: Int = 0
+ def method2059: Int = 0
+ def method2060: Int = 0
+ def method2061: Int = 0
+ def method2062: Int = 0
+ def method2063: Int = 0
+ def method2064: Int = 0
+ def method2065: Int = 0
+ def method2066: Int = 0
+ def method2067: Int = 0
+ def method2068: Int = 0
+ def method2069: Int = 0
+ def method2070: Int = 0
+ def method2071: Int = 0
+ def method2072: Int = 0
+ def method2073: Int = 0
+ def method2074: Int = 0
+ def method2075: Int = 0
+ def method2076: Int = 0
+ def method2077: Int = 0
+ def method2078: Int = 0
+ def method2079: Int = 0
+ def method2080: Int = 0
+ def method2081: Int = 0
+ def method2082: Int = 0
+ def method2083: Int = 0
+ def method2084: Int = 0
+ def method2085: Int = 0
+ def method2086: Int = 0
+ def method2087: Int = 0
+ def method2088: Int = 0
+ def method2089: Int = 0
+ def method2090: Int = 0
+ def method2091: Int = 0
+ def method2092: Int = 0
+ def method2093: Int = 0
+ def method2094: Int = 0
+ def method2095: Int = 0
+ def method2096: Int = 0
+ def method2097: Int = 0
+ def method2098: Int = 0
+ def method2099: Int = 0
+ def method2100: Int = 0
+ def method2101: Int = 0
+ def method2102: Int = 0
+ def method2103: Int = 0
+ def method2104: Int = 0
+ def method2105: Int = 0
+ def method2106: Int = 0
+ def method2107: Int = 0
+ def method2108: Int = 0
+ def method2109: Int = 0
+ def method2110: Int = 0
+ def method2111: Int = 0
+ def method2112: Int = 0
+ def method2113: Int = 0
+ def method2114: Int = 0
+ def method2115: Int = 0
+ def method2116: Int = 0
+ def method2117: Int = 0
+ def method2118: Int = 0
+ def method2119: Int = 0
+ def method2120: Int = 0
+ def method2121: Int = 0
+ def method2122: Int = 0
+ def method2123: Int = 0
+ def method2124: Int = 0
+ def method2125: Int = 0
+ def method2126: Int = 0
+ def method2127: Int = 0
+ def method2128: Int = 0
+ def method2129: Int = 0
+ def method2130: Int = 0
+ def method2131: Int = 0
+ def method2132: Int = 0
+ def method2133: Int = 0
+ def method2134: Int = 0
+ def method2135: Int = 0
+ def method2136: Int = 0
+ def method2137: Int = 0
+ def method2138: Int = 0
+ def method2139: Int = 0
+ def method2140: Int = 0
+ def method2141: Int = 0
+ def method2142: Int = 0
+ def method2143: Int = 0
+ def method2144: Int = 0
+ def method2145: Int = 0
+ def method2146: Int = 0
+ def method2147: Int = 0
+ def method2148: Int = 0
+ def method2149: Int = 0
+ def method2150: Int = 0
+ def method2151: Int = 0
+ def method2152: Int = 0
+ def method2153: Int = 0
+ def method2154: Int = 0
+ def method2155: Int = 0
+ def method2156: Int = 0
+ def method2157: Int = 0
+ def method2158: Int = 0
+ def method2159: Int = 0
+ def method2160: Int = 0
+ def method2161: Int = 0
+ def method2162: Int = 0
+ def method2163: Int = 0
+ def method2164: Int = 0
+ def method2165: Int = 0
+ def method2166: Int = 0
+ def method2167: Int = 0
+ def method2168: Int = 0
+ def method2169: Int = 0
+ def method2170: Int = 0
+ def method2171: Int = 0
+ def method2172: Int = 0
+ def method2173: Int = 0
+ def method2174: Int = 0
+ def method2175: Int = 0
+ def method2176: Int = 0
+ def method2177: Int = 0
+ def method2178: Int = 0
+ def method2179: Int = 0
+ def method2180: Int = 0
+ def method2181: Int = 0
+ def method2182: Int = 0
+ def method2183: Int = 0
+ def method2184: Int = 0
+ def method2185: Int = 0
+ def method2186: Int = 0
+ def method2187: Int = 0
+ def method2188: Int = 0
+ def method2189: Int = 0
+ def method2190: Int = 0
+ def method2191: Int = 0
+ def method2192: Int = 0
+ def method2193: Int = 0
+ def method2194: Int = 0
+ def method2195: Int = 0
+ def method2196: Int = 0
+ def method2197: Int = 0
+ def method2198: Int = 0
+ def method2199: Int = 0
+ def method2200: Int = 0
+ def method2201: Int = 0
+ def method2202: Int = 0
+ def method2203: Int = 0
+ def method2204: Int = 0
+ def method2205: Int = 0
+ def method2206: Int = 0
+ def method2207: Int = 0
+ def method2208: Int = 0
+ def method2209: Int = 0
+ def method2210: Int = 0
+ def method2211: Int = 0
+ def method2212: Int = 0
+ def method2213: Int = 0
+ def method2214: Int = 0
+ def method2215: Int = 0
+ def method2216: Int = 0
+ def method2217: Int = 0
+ def method2218: Int = 0
+ def method2219: Int = 0
+ def method2220: Int = 0
+ def method2221: Int = 0
+ def method2222: Int = 0
+ def method2223: Int = 0
+ def method2224: Int = 0
+ def method2225: Int = 0
+ def method2226: Int = 0
+ def method2227: Int = 0
+ def method2228: Int = 0
+ def method2229: Int = 0
+ def method2230: Int = 0
+ def method2231: Int = 0
+ def method2232: Int = 0
+ def method2233: Int = 0
+ def method2234: Int = 0
+ def method2235: Int = 0
+ def method2236: Int = 0
+ def method2237: Int = 0
+ def method2238: Int = 0
+ def method2239: Int = 0
+ def method2240: Int = 0
+ def method2241: Int = 0
+ def method2242: Int = 0
+ def method2243: Int = 0
+ def method2244: Int = 0
+ def method2245: Int = 0
+ def method2246: Int = 0
+ def method2247: Int = 0
+ def method2248: Int = 0
+ def method2249: Int = 0
+ def method2250: Int = 0
+ def method2251: Int = 0
+ def method2252: Int = 0
+ def method2253: Int = 0
+ def method2254: Int = 0
+ def method2255: Int = 0
+ def method2256: Int = 0
+ def method2257: Int = 0
+ def method2258: Int = 0
+ def method2259: Int = 0
+ def method2260: Int = 0
+ def method2261: Int = 0
+ def method2262: Int = 0
+ def method2263: Int = 0
+ def method2264: Int = 0
+ def method2265: Int = 0
+ def method2266: Int = 0
+ def method2267: Int = 0
+ def method2268: Int = 0
+ def method2269: Int = 0
+ def method2270: Int = 0
+ def method2271: Int = 0
+ def method2272: Int = 0
+ def method2273: Int = 0
+ def method2274: Int = 0
+ def method2275: Int = 0
+ def method2276: Int = 0
+ def method2277: Int = 0
+ def method2278: Int = 0
+ def method2279: Int = 0
+ def method2280: Int = 0
+ def method2281: Int = 0
+ def method2282: Int = 0
+ def method2283: Int = 0
+ def method2284: Int = 0
+ def method2285: Int = 0
+ def method2286: Int = 0
+ def method2287: Int = 0
+ def method2288: Int = 0
+ def method2289: Int = 0
+ def method2290: Int = 0
+ def method2291: Int = 0
+ def method2292: Int = 0
+ def method2293: Int = 0
+ def method2294: Int = 0
+ def method2295: Int = 0
+ def method2296: Int = 0
+ def method2297: Int = 0
+ def method2298: Int = 0
+ def method2299: Int = 0
+ def method2300: Int = 0
+ def method2301: Int = 0
+ def method2302: Int = 0
+ def method2303: Int = 0
+ def method2304: Int = 0
+ def method2305: Int = 0
+ def method2306: Int = 0
+ def method2307: Int = 0
+ def method2308: Int = 0
+ def method2309: Int = 0
+ def method2310: Int = 0
+ def method2311: Int = 0
+ def method2312: Int = 0
+ def method2313: Int = 0
+ def method2314: Int = 0
+ def method2315: Int = 0
+ def method2316: Int = 0
+ def method2317: Int = 0
+ def method2318: Int = 0
+ def method2319: Int = 0
+ def method2320: Int = 0
+ def method2321: Int = 0
+ def method2322: Int = 0
+ def method2323: Int = 0
+ def method2324: Int = 0
+ def method2325: Int = 0
+ def method2326: Int = 0
+ def method2327: Int = 0
+ def method2328: Int = 0
+ def method2329: Int = 0
+ def method2330: Int = 0
+ def method2331: Int = 0
+ def method2332: Int = 0
+ def method2333: Int = 0
+ def method2334: Int = 0
+ def method2335: Int = 0
+ def method2336: Int = 0
+ def method2337: Int = 0
+ def method2338: Int = 0
+ def method2339: Int = 0
+ def method2340: Int = 0
+ def method2341: Int = 0
+ def method2342: Int = 0
+ def method2343: Int = 0
+ def method2344: Int = 0
+ def method2345: Int = 0
+ def method2346: Int = 0
+ def method2347: Int = 0
+ def method2348: Int = 0
+ def method2349: Int = 0
+ def method2350: Int = 0
+ def method2351: Int = 0
+ def method2352: Int = 0
+ def method2353: Int = 0
+ def method2354: Int = 0
+ def method2355: Int = 0
+ def method2356: Int = 0
+ def method2357: Int = 0
+ def method2358: Int = 0
+ def method2359: Int = 0
+ def method2360: Int = 0
+ def method2361: Int = 0
+ def method2362: Int = 0
+ def method2363: Int = 0
+ def method2364: Int = 0
+ def method2365: Int = 0
+ def method2366: Int = 0
+ def method2367: Int = 0
+ def method2368: Int = 0
+ def method2369: Int = 0
+ def method2370: Int = 0
+ def method2371: Int = 0
+ def method2372: Int = 0
+ def method2373: Int = 0
+ def method2374: Int = 0
+ def method2375: Int = 0
+ def method2376: Int = 0
+ def method2377: Int = 0
+ def method2378: Int = 0
+ def method2379: Int = 0
+ def method2380: Int = 0
+ def method2381: Int = 0
+ def method2382: Int = 0
+ def method2383: Int = 0
+ def method2384: Int = 0
+ def method2385: Int = 0
+ def method2386: Int = 0
+ def method2387: Int = 0
+ def method2388: Int = 0
+ def method2389: Int = 0
+ def method2390: Int = 0
+ def method2391: Int = 0
+ def method2392: Int = 0
+ def method2393: Int = 0
+ def method2394: Int = 0
+ def method2395: Int = 0
+ def method2396: Int = 0
+ def method2397: Int = 0
+ def method2398: Int = 0
+ def method2399: Int = 0
+ def method2400: Int = 0
+ def method2401: Int = 0
+ def method2402: Int = 0
+ def method2403: Int = 0
+ def method2404: Int = 0
+ def method2405: Int = 0
+ def method2406: Int = 0
+ def method2407: Int = 0
+ def method2408: Int = 0
+ def method2409: Int = 0
+ def method2410: Int = 0
+ def method2411: Int = 0
+ def method2412: Int = 0
+ def method2413: Int = 0
+ def method2414: Int = 0
+ def method2415: Int = 0
+ def method2416: Int = 0
+ def method2417: Int = 0
+ def method2418: Int = 0
+ def method2419: Int = 0
+ def method2420: Int = 0
+ def method2421: Int = 0
+ def method2422: Int = 0
+ def method2423: Int = 0
+ def method2424: Int = 0
+ def method2425: Int = 0
+ def method2426: Int = 0
+ def method2427: Int = 0
+ def method2428: Int = 0
+ def method2429: Int = 0
+ def method2430: Int = 0
+ def method2431: Int = 0
+ def method2432: Int = 0
+ def method2433: Int = 0
+ def method2434: Int = 0
+ def method2435: Int = 0
+ def method2436: Int = 0
+ def method2437: Int = 0
+ def method2438: Int = 0
+ def method2439: Int = 0
+ def method2440: Int = 0
+ def method2441: Int = 0
+ def method2442: Int = 0
+ def method2443: Int = 0
+ def method2444: Int = 0
+ def method2445: Int = 0
+ def method2446: Int = 0
+ def method2447: Int = 0
+ def method2448: Int = 0
+ def method2449: Int = 0
+ def method2450: Int = 0
+ def method2451: Int = 0
+ def method2452: Int = 0
+ def method2453: Int = 0
+ def method2454: Int = 0
+ def method2455: Int = 0
+ def method2456: Int = 0
+ def method2457: Int = 0
+ def method2458: Int = 0
+ def method2459: Int = 0
+ def method2460: Int = 0
+ def method2461: Int = 0
+ def method2462: Int = 0
+ def method2463: Int = 0
+ def method2464: Int = 0
+ def method2465: Int = 0
+ def method2466: Int = 0
+ def method2467: Int = 0
+ def method2468: Int = 0
+ def method2469: Int = 0
+ def method2470: Int = 0
+ def method2471: Int = 0
+ def method2472: Int = 0
+ def method2473: Int = 0
+ def method2474: Int = 0
+ def method2475: Int = 0
+ def method2476: Int = 0
+ def method2477: Int = 0
+ def method2478: Int = 0
+ def method2479: Int = 0
+ def method2480: Int = 0
+ def method2481: Int = 0
+ def method2482: Int = 0
+ def method2483: Int = 0
+ def method2484: Int = 0
+ def method2485: Int = 0
+ def method2486: Int = 0
+ def method2487: Int = 0
+ def method2488: Int = 0
+ def method2489: Int = 0
+ def method2490: Int = 0
+ def method2491: Int = 0
+ def method2492: Int = 0
+ def method2493: Int = 0
+ def method2494: Int = 0
+ def method2495: Int = 0
+ def method2496: Int = 0
+ def method2497: Int = 0
+ def method2498: Int = 0
+ def method2499: Int = 0
+ def method2500: Int = 0
+ def method2501: Int = 0
+ def method2502: Int = 0
+ def method2503: Int = 0
+ def method2504: Int = 0
+ def method2505: Int = 0
+ def method2506: Int = 0
+ def method2507: Int = 0
+ def method2508: Int = 0
+ def method2509: Int = 0
+ def method2510: Int = 0
+ def method2511: Int = 0
+ def method2512: Int = 0
+ def method2513: Int = 0
+ def method2514: Int = 0
+ def method2515: Int = 0
+ def method2516: Int = 0
+ def method2517: Int = 0
+ def method2518: Int = 0
+ def method2519: Int = 0
+ def method2520: Int = 0
+ def method2521: Int = 0
+ def method2522: Int = 0
+ def method2523: Int = 0
+ def method2524: Int = 0
+ def method2525: Int = 0
+ def method2526: Int = 0
+ def method2527: Int = 0
+ def method2528: Int = 0
+ def method2529: Int = 0
+ def method2530: Int = 0
+ def method2531: Int = 0
+ def method2532: Int = 0
+ def method2533: Int = 0
+ def method2534: Int = 0
+ def method2535: Int = 0
+ def method2536: Int = 0
+ def method2537: Int = 0
+ def method2538: Int = 0
+ def method2539: Int = 0
+ def method2540: Int = 0
+ def method2541: Int = 0
+ def method2542: Int = 0
+ def method2543: Int = 0
+ def method2544: Int = 0
+ def method2545: Int = 0
+ def method2546: Int = 0
+ def method2547: Int = 0
+ def method2548: Int = 0
+ def method2549: Int = 0
+ def method2550: Int = 0
+ def method2551: Int = 0
+ def method2552: Int = 0
+ def method2553: Int = 0
+ def method2554: Int = 0
+ def method2555: Int = 0
+ def method2556: Int = 0
+ def method2557: Int = 0
+ def method2558: Int = 0
+ def method2559: Int = 0
+ def method2560: Int = 0
+ def method2561: Int = 0
+ def method2562: Int = 0
+ def method2563: Int = 0
+ def method2564: Int = 0
+ def method2565: Int = 0
+ def method2566: Int = 0
+ def method2567: Int = 0
+ def method2568: Int = 0
+ def method2569: Int = 0
+ def method2570: Int = 0
+ def method2571: Int = 0
+ def method2572: Int = 0
+ def method2573: Int = 0
+ def method2574: Int = 0
+ def method2575: Int = 0
+ def method2576: Int = 0
+ def method2577: Int = 0
+ def method2578: Int = 0
+ def method2579: Int = 0
+ def method2580: Int = 0
+ def method2581: Int = 0
+ def method2582: Int = 0
+ def method2583: Int = 0
+ def method2584: Int = 0
+ def method2585: Int = 0
+ def method2586: Int = 0
+ def method2587: Int = 0
+ def method2588: Int = 0
+ def method2589: Int = 0
+ def method2590: Int = 0
+ def method2591: Int = 0
+ def method2592: Int = 0
+ def method2593: Int = 0
+ def method2594: Int = 0
+ def method2595: Int = 0
+ def method2596: Int = 0
+ def method2597: Int = 0
+ def method2598: Int = 0
+ def method2599: Int = 0
+ def method2600: Int = 0
+ def method2601: Int = 0
+ def method2602: Int = 0
+ def method2603: Int = 0
+ def method2604: Int = 0
+ def method2605: Int = 0
+ def method2606: Int = 0
+ def method2607: Int = 0
+ def method2608: Int = 0
+ def method2609: Int = 0
+ def method2610: Int = 0
+ def method2611: Int = 0
+ def method2612: Int = 0
+ def method2613: Int = 0
+ def method2614: Int = 0
+ def method2615: Int = 0
+ def method2616: Int = 0
+ def method2617: Int = 0
+ def method2618: Int = 0
+ def method2619: Int = 0
+ def method2620: Int = 0
+ def method2621: Int = 0
+ def method2622: Int = 0
+ def method2623: Int = 0
+ def method2624: Int = 0
+ def method2625: Int = 0
+ def method2626: Int = 0
+ def method2627: Int = 0
+ def method2628: Int = 0
+ def method2629: Int = 0
+ def method2630: Int = 0
+ def method2631: Int = 0
+ def method2632: Int = 0
+ def method2633: Int = 0
+ def method2634: Int = 0
+ def method2635: Int = 0
+ def method2636: Int = 0
+ def method2637: Int = 0
+ def method2638: Int = 0
+ def method2639: Int = 0
+ def method2640: Int = 0
+ def method2641: Int = 0
+ def method2642: Int = 0
+ def method2643: Int = 0
+ def method2644: Int = 0
+ def method2645: Int = 0
+ def method2646: Int = 0
+ def method2647: Int = 0
+ def method2648: Int = 0
+ def method2649: Int = 0
+ def method2650: Int = 0
+ def method2651: Int = 0
+ def method2652: Int = 0
+ def method2653: Int = 0
+ def method2654: Int = 0
+ def method2655: Int = 0
+ def method2656: Int = 0
+ def method2657: Int = 0
+ def method2658: Int = 0
+ def method2659: Int = 0
+ def method2660: Int = 0
+ def method2661: Int = 0
+ def method2662: Int = 0
+ def method2663: Int = 0
+ def method2664: Int = 0
+ def method2665: Int = 0
+ def method2666: Int = 0
+ def method2667: Int = 0
+ def method2668: Int = 0
+ def method2669: Int = 0
+ def method2670: Int = 0
+ def method2671: Int = 0
+ def method2672: Int = 0
+ def method2673: Int = 0
+ def method2674: Int = 0
+ def method2675: Int = 0
+ def method2676: Int = 0
+ def method2677: Int = 0
+ def method2678: Int = 0
+ def method2679: Int = 0
+ def method2680: Int = 0
+ def method2681: Int = 0
+ def method2682: Int = 0
+ def method2683: Int = 0
+ def method2684: Int = 0
+ def method2685: Int = 0
+ def method2686: Int = 0
+ def method2687: Int = 0
+ def method2688: Int = 0
+ def method2689: Int = 0
+ def method2690: Int = 0
+ def method2691: Int = 0
+ def method2692: Int = 0
+ def method2693: Int = 0
+ def method2694: Int = 0
+ def method2695: Int = 0
+ def method2696: Int = 0
+ def method2697: Int = 0
+ def method2698: Int = 0
+ def method2699: Int = 0
+ def method2700: Int = 0
+ def method2701: Int = 0
+ def method2702: Int = 0
+ def method2703: Int = 0
+ def method2704: Int = 0
+ def method2705: Int = 0
+ def method2706: Int = 0
+ def method2707: Int = 0
+ def method2708: Int = 0
+ def method2709: Int = 0
+ def method2710: Int = 0
+ def method2711: Int = 0
+ def method2712: Int = 0
+ def method2713: Int = 0
+ def method2714: Int = 0
+ def method2715: Int = 0
+ def method2716: Int = 0
+ def method2717: Int = 0
+ def method2718: Int = 0
+ def method2719: Int = 0
+ def method2720: Int = 0
+ def method2721: Int = 0
+ def method2722: Int = 0
+ def method2723: Int = 0
+ def method2724: Int = 0
+ def method2725: Int = 0
+ def method2726: Int = 0
+ def method2727: Int = 0
+ def method2728: Int = 0
+ def method2729: Int = 0
+ def method2730: Int = 0
+ def method2731: Int = 0
+ def method2732: Int = 0
+ def method2733: Int = 0
+ def method2734: Int = 0
+ def method2735: Int = 0
+ def method2736: Int = 0
+ def method2737: Int = 0
+ def method2738: Int = 0
+ def method2739: Int = 0
+ def method2740: Int = 0
+ def method2741: Int = 0
+ def method2742: Int = 0
+ def method2743: Int = 0
+ def method2744: Int = 0
+ def method2745: Int = 0
+ def method2746: Int = 0
+ def method2747: Int = 0
+ def method2748: Int = 0
+ def method2749: Int = 0
+ def method2750: Int = 0
+ def method2751: Int = 0
+ def method2752: Int = 0
+ def method2753: Int = 0
+ def method2754: Int = 0
+ def method2755: Int = 0
+ def method2756: Int = 0
+ def method2757: Int = 0
+ def method2758: Int = 0
+ def method2759: Int = 0
+ def method2760: Int = 0
+ def method2761: Int = 0
+ def method2762: Int = 0
+ def method2763: Int = 0
+ def method2764: Int = 0
+ def method2765: Int = 0
+ def method2766: Int = 0
+ def method2767: Int = 0
+ def method2768: Int = 0
+ def method2769: Int = 0
+ def method2770: Int = 0
+ def method2771: Int = 0
+ def method2772: Int = 0
+ def method2773: Int = 0
+ def method2774: Int = 0
+ def method2775: Int = 0
+ def method2776: Int = 0
+ def method2777: Int = 0
+ def method2778: Int = 0
+ def method2779: Int = 0
+ def method2780: Int = 0
+ def method2781: Int = 0
+ def method2782: Int = 0
+ def method2783: Int = 0
+ def method2784: Int = 0
+ def method2785: Int = 0
+ def method2786: Int = 0
+ def method2787: Int = 0
+ def method2788: Int = 0
+ def method2789: Int = 0
+ def method2790: Int = 0
+ def method2791: Int = 0
+ def method2792: Int = 0
+ def method2793: Int = 0
+ def method2794: Int = 0
+ def method2795: Int = 0
+ def method2796: Int = 0
+ def method2797: Int = 0
+ def method2798: Int = 0
+ def method2799: Int = 0
+ def method2800: Int = 0
+ def method2801: Int = 0
+ def method2802: Int = 0
+ def method2803: Int = 0
+ def method2804: Int = 0
+ def method2805: Int = 0
+ def method2806: Int = 0
+ def method2807: Int = 0
+ def method2808: Int = 0
+ def method2809: Int = 0
+ def method2810: Int = 0
+ def method2811: Int = 0
+ def method2812: Int = 0
+ def method2813: Int = 0
+ def method2814: Int = 0
+ def method2815: Int = 0
+ def method2816: Int = 0
+ def method2817: Int = 0
+ def method2818: Int = 0
+ def method2819: Int = 0
+ def method2820: Int = 0
+ def method2821: Int = 0
+ def method2822: Int = 0
+ def method2823: Int = 0
+ def method2824: Int = 0
+ def method2825: Int = 0
+ def method2826: Int = 0
+ def method2827: Int = 0
+ def method2828: Int = 0
+ def method2829: Int = 0
+ def method2830: Int = 0
+ def method2831: Int = 0
+ def method2832: Int = 0
+ def method2833: Int = 0
+ def method2834: Int = 0
+ def method2835: Int = 0
+ def method2836: Int = 0
+ def method2837: Int = 0
+ def method2838: Int = 0
+ def method2839: Int = 0
+ def method2840: Int = 0
+ def method2841: Int = 0
+ def method2842: Int = 0
+ def method2843: Int = 0
+ def method2844: Int = 0
+ def method2845: Int = 0
+ def method2846: Int = 0
+ def method2847: Int = 0
+ def method2848: Int = 0
+ def method2849: Int = 0
+ def method2850: Int = 0
+ def method2851: Int = 0
+ def method2852: Int = 0
+ def method2853: Int = 0
+ def method2854: Int = 0
+ def method2855: Int = 0
+ def method2856: Int = 0
+ def method2857: Int = 0
+ def method2858: Int = 0
+ def method2859: Int = 0
+ def method2860: Int = 0
+ def method2861: Int = 0
+ def method2862: Int = 0
+ def method2863: Int = 0
+ def method2864: Int = 0
+ def method2865: Int = 0
+ def method2866: Int = 0
+ def method2867: Int = 0
+ def method2868: Int = 0
+ def method2869: Int = 0
+ def method2870: Int = 0
+ def method2871: Int = 0
+ def method2872: Int = 0
+ def method2873: Int = 0
+ def method2874: Int = 0
+ def method2875: Int = 0
+ def method2876: Int = 0
+ def method2877: Int = 0
+ def method2878: Int = 0
+ def method2879: Int = 0
+ def method2880: Int = 0
+ def method2881: Int = 0
+ def method2882: Int = 0
+ def method2883: Int = 0
+ def method2884: Int = 0
+ def method2885: Int = 0
+ def method2886: Int = 0
+ def method2887: Int = 0
+ def method2888: Int = 0
+ def method2889: Int = 0
+ def method2890: Int = 0
+ def method2891: Int = 0
+ def method2892: Int = 0
+ def method2893: Int = 0
+ def method2894: Int = 0
+ def method2895: Int = 0
+ def method2896: Int = 0
+ def method2897: Int = 0
+ def method2898: Int = 0
+ def method2899: Int = 0
+ def method2900: Int = 0
+ def method2901: Int = 0
+ def method2902: Int = 0
+ def method2903: Int = 0
+ def method2904: Int = 0
+ def method2905: Int = 0
+ def method2906: Int = 0
+ def method2907: Int = 0
+ def method2908: Int = 0
+ def method2909: Int = 0
+ def method2910: Int = 0
+ def method2911: Int = 0
+ def method2912: Int = 0
+ def method2913: Int = 0
+ def method2914: Int = 0
+ def method2915: Int = 0
+ def method2916: Int = 0
+ def method2917: Int = 0
+ def method2918: Int = 0
+ def method2919: Int = 0
+ def method2920: Int = 0
+ def method2921: Int = 0
+ def method2922: Int = 0
+ def method2923: Int = 0
+ def method2924: Int = 0
+ def method2925: Int = 0
+ def method2926: Int = 0
+ def method2927: Int = 0
+ def method2928: Int = 0
+ def method2929: Int = 0
+ def method2930: Int = 0
+ def method2931: Int = 0
+ def method2932: Int = 0
+ def method2933: Int = 0
+ def method2934: Int = 0
+ def method2935: Int = 0
+ def method2936: Int = 0
+ def method2937: Int = 0
+ def method2938: Int = 0
+ def method2939: Int = 0
+ def method2940: Int = 0
+ def method2941: Int = 0
+ def method2942: Int = 0
+ def method2943: Int = 0
+ def method2944: Int = 0
+ def method2945: Int = 0
+ def method2946: Int = 0
+ def method2947: Int = 0
+ def method2948: Int = 0
+ def method2949: Int = 0
+ def method2950: Int = 0
+ def method2951: Int = 0
+ def method2952: Int = 0
+ def method2953: Int = 0
+ def method2954: Int = 0
+ def method2955: Int = 0
+ def method2956: Int = 0
+ def method2957: Int = 0
+ def method2958: Int = 0
+ def method2959: Int = 0
+ def method2960: Int = 0
+ def method2961: Int = 0
+ def method2962: Int = 0
+ def method2963: Int = 0
+ def method2964: Int = 0
+ def method2965: Int = 0
+ def method2966: Int = 0
+ def method2967: Int = 0
+ def method2968: Int = 0
+ def method2969: Int = 0
+ def method2970: Int = 0
+ def method2971: Int = 0
+ def method2972: Int = 0
+ def method2973: Int = 0
+ def method2974: Int = 0
+ def method2975: Int = 0
+ def method2976: Int = 0
+ def method2977: Int = 0
+ def method2978: Int = 0
+ def method2979: Int = 0
+ def method2980: Int = 0
+ def method2981: Int = 0
+ def method2982: Int = 0
+ def method2983: Int = 0
+ def method2984: Int = 0
+ def method2985: Int = 0
+ def method2986: Int = 0
+ def method2987: Int = 0
+ def method2988: Int = 0
+ def method2989: Int = 0
+ def method2990: Int = 0
+ def method2991: Int = 0
+ def method2992: Int = 0
+ def method2993: Int = 0
+ def method2994: Int = 0
+ def method2995: Int = 0
+ def method2996: Int = 0
+ def method2997: Int = 0
+ def method2998: Int = 0
+ def method2999: Int = 0
+}
diff --git a/test/files/run/t7558.scala b/test/files/run/t7558.scala
new file mode 100644
index 0000000..bfcaaba
--- /dev/null
+++ b/test/files/run/t7558.scala
@@ -0,0 +1,9 @@
+object Test extends App {
+ val cm = reflect.runtime.currentMirror
+ val u = cm.universe
+ import scala.tools.reflect.ToolBox
+ val tb = cm.mkToolBox()
+ val t = { var x = "ab".toList; u.reify { x = x.reverse; x }.tree }
+ val evaluated = tb.eval(t)
+ assert(evaluated == "ba".toList, evaluated)
+}
diff --git a/test/files/run/t7569.check b/test/files/run/t7569.check
new file mode 100644
index 0000000..aade96d
--- /dev/null
+++ b/test/files/run/t7569.check
@@ -0,0 +1,12 @@
+source-newSource1.scala,line-3,offset=49 A.this.one
+source-newSource1.scala,line-3,offset=49 A.this
+source-newSource1.scala,line-2,offset=41 A.super.<init>()
+source-newSource1.scala,line-2,offset=41 A.super.<init>
+source-newSource1.scala,line-2,offset=41 this
+source-newSource1.scala,line-3,offset=49 A.this.one
+source-newSource1.scala,line-3,offset=49 A.this
+RangePosition(newSource1.scala, 55, 57, 65) scala.Int.box(1).toString()
+RangePosition(newSource1.scala, 55, 57, 65) scala.Int.box(1).toString
+RangePosition(newSource1.scala, 55, 55, 56) scala.Int.box(1)
+NoPosition scala.Int.box
+NoPosition scala.Int
diff --git a/test/files/run/t7569.scala b/test/files/run/t7569.scala
new file mode 100644
index 0000000..b1b1443
--- /dev/null
+++ b/test/files/run/t7569.scala
@@ -0,0 +1,19 @@
+import scala.tools.partest._
+object Test extends CompilerTest {
+ import global._
+ override def extraSettings = super.extraSettings + " -Yrangepos"
+ override def sources = List(
+ """|import scala.language.postfixOps
+ |class A {
+ | val one = 1 toString
+ |}""".stripMargin
+ )
+ def check(source: String, unit: CompilationUnit) {
+ for (ClassDef(_, _, _, Template(_, _, stats)) <- unit.body ; stat <- stats ; t <- stat) {
+ t match {
+ case _: Select | _ : Apply | _:This => println("%-15s %s".format(t.pos.toString, t))
+ case _ =>
+ }
+ }
+ }
+}
diff --git a/test/files/run/t7571.scala b/test/files/run/t7571.scala
new file mode 100644
index 0000000..00b9695
--- /dev/null
+++ b/test/files/run/t7571.scala
@@ -0,0 +1,12 @@
+class Foo(val a: Int) extends AnyVal {
+ def foo = { {case x => x + a}: PartialFunction[Int, Int]}
+
+ def bar = (new {}).toString
+}
+
+object Test extends App {
+ val x = new Foo(1).foo.apply(2)
+ assert(x == 3, x)
+ val s = new Foo(1).bar
+ assert(s.nonEmpty, s)
+}
diff --git a/test/files/run/t7617a.check b/test/files/run/t7617a.check
new file mode 100644
index 0000000..94954ab
--- /dev/null
+++ b/test/files/run/t7617a.check
@@ -0,0 +1,2 @@
+hello
+world
diff --git a/test/files/run/t7617a/Macros_1.scala b/test/files/run/t7617a/Macros_1.scala
new file mode 100644
index 0000000..f9772c8
--- /dev/null
+++ b/test/files/run/t7617a/Macros_1.scala
@@ -0,0 +1,22 @@
+import scala.reflect.macros.Context
+import language.experimental.macros
+
+object Macros {
+ def getValueImpl[T](c: Context): c.Expr[T] = {
+ import c.universe._
+ c.Expr[T](Apply(Select(c.prefix.tree, newTermName("getVal")), Nil))
+ }
+ def setValueImpl[T](c: Context)(value: c.Expr[T]): c.Expr[Unit] = {
+ import c.universe._
+ c.Expr[Unit](Apply(Select(c.prefix.tree, newTermName("setVal")), List(value.tree)))
+ }
+}
+
+object Module {
+ private var _val: String = "hello"
+ def setVal(value: String): Unit = this._val = value
+ def getVal(): String = this._val
+
+ def value: String = macro Macros.getValueImpl[String]
+ def value_=(value: String): Unit = macro Macros.setValueImpl[String]
+}
diff --git a/test/files/run/t7617a/Test_2.scala b/test/files/run/t7617a/Test_2.scala
new file mode 100644
index 0000000..da6e34e
--- /dev/null
+++ b/test/files/run/t7617a/Test_2.scala
@@ -0,0 +1,5 @@
+object Test extends App {
+ println(Module.value)
+ Module.value = "world"
+ println(Module.value)
+}
\ No newline at end of file
diff --git a/test/files/run/t7617b.check b/test/files/run/t7617b.check
new file mode 100644
index 0000000..81ec7e8
--- /dev/null
+++ b/test/files/run/t7617b.check
@@ -0,0 +1 @@
+foo = 2
diff --git a/test/files/run/t7617b/Macros_1.scala b/test/files/run/t7617b/Macros_1.scala
new file mode 100644
index 0000000..bc91993
--- /dev/null
+++ b/test/files/run/t7617b/Macros_1.scala
@@ -0,0 +1,8 @@
+import scala.reflect.macros.Context
+
+object Macros {
+ def impl(c: Context)(name: c.Expr[String])(value: c.Expr[Any]) = {
+ import c.universe._
+ reify(println(s"${name.splice} = ${value.splice}"))
+ }
+}
\ No newline at end of file
diff --git a/test/files/run/t7617b/Test_2.scala b/test/files/run/t7617b/Test_2.scala
new file mode 100644
index 0000000..e27f650
--- /dev/null
+++ b/test/files/run/t7617b/Test_2.scala
@@ -0,0 +1,11 @@
+import scala.language.dynamics
+import language.experimental.macros
+
+class C extends Dynamic {
+ def updateDynamic(name: String)(value: Any) = macro Macros.impl
+}
+
+object Test extends App {
+ val c = new C
+ c.foo = 2
+}
\ No newline at end of file
diff --git a/test/files/run/t7657.check b/test/files/run/t7657.check
new file mode 100644
index 0000000..c25d8d1
--- /dev/null
+++ b/test/files/run/t7657.check
@@ -0,0 +1,3 @@
+()
+()
+()
diff --git a/test/files/run/t7657/Macros_1.scala b/test/files/run/t7657/Macros_1.scala
new file mode 100644
index 0000000..b1e31aa
--- /dev/null
+++ b/test/files/run/t7657/Macros_1.scala
@@ -0,0 +1,8 @@
+import scala.reflect.macros.Context
+import language.experimental.macros
+
+trait T { def t(): Unit }
+abstract class A extends T { override def t(): Unit = () }
+
+object Macro { def t(c: Context)(): c.Expr[Unit] = c.universe.reify(()) }
+class C extends A { override def t(): Unit = macro Macro.t }
diff --git a/test/files/run/t7657/Test_2.scala b/test/files/run/t7657/Test_2.scala
new file mode 100644
index 0000000..5cc46b6
--- /dev/null
+++ b/test/files/run/t7657/Test_2.scala
@@ -0,0 +1,6 @@
+object Test extends App {
+ val c = new C()
+ println(c.t())
+ println((c: T).t())
+ println((c: A).t())
+}
\ No newline at end of file
diff --git a/test/files/run/t7733.check b/test/files/run/t7733.check
new file mode 100644
index 0000000..19765bd
--- /dev/null
+++ b/test/files/run/t7733.check
@@ -0,0 +1 @@
+null
diff --git a/test/files/run/t7733/Separate_1.scala b/test/files/run/t7733/Separate_1.scala
new file mode 100644
index 0000000..a326ecd
--- /dev/null
+++ b/test/files/run/t7733/Separate_1.scala
@@ -0,0 +1,5 @@
+package test
+
+class Separate {
+ for (i <- 1 to 10) println(i)
+}
\ No newline at end of file
diff --git a/test/files/run/t7733/Test_2.scala b/test/files/run/t7733/Test_2.scala
new file mode 100644
index 0000000..2835857
--- /dev/null
+++ b/test/files/run/t7733/Test_2.scala
@@ -0,0 +1,9 @@
+import scala.reflect.runtime.universe._
+import scala.reflect.runtime.{currentMirror => cm}
+import scala.tools.reflect.ToolBox
+
+object Test extends App {
+ val tb = cm.mkToolBox()
+ val code = tb.parse("{ val x: test.Separate$$anonfun$1 = null; x }")
+ println(tb.eval(code))
+}
\ No newline at end of file
diff --git a/test/files/run/t7775.scala b/test/files/run/t7775.scala
new file mode 100644
index 0000000..5fb0327
--- /dev/null
+++ b/test/files/run/t7775.scala
@@ -0,0 +1,17 @@
+import scala.concurrent.{duration, future, Await, ExecutionContext}
+import scala.tools.nsc.Settings
+import ExecutionContext.Implicits.global
+
+// Was failing pretty regularly with a ConcurrentModificationException as
+// WrappedProperties#systemProperties iterated directly over the mutable
+// global system properties map.
+object Test {
+ def main(args: Array[String]) {
+ val tries = 1000 // YMMV
+ val compiler = future {
+ for(_ <- 1 to tries) new Settings(_ => {})
+ }
+ for(i <- 1 to tries * 10) System.setProperty(s"foo$i", i.toString)
+ Await.result(compiler, duration.Duration.Inf)
+ }
+}
diff --git a/test/files/run/t7779.scala b/test/files/run/t7779.scala
new file mode 100644
index 0000000..db32cb7
--- /dev/null
+++ b/test/files/run/t7779.scala
@@ -0,0 +1,67 @@
+// -Xmax-classfile-length doesn't compress top-level classes.
+// class :::::::::::::::::::::::::::::::::::::::::::::::::
+
+trait Marker
+
+class Short extends Marker
+
+// We just test with member classes
+object O {
+ object ::::::::::::::::::::::::::::::::::::::::::::::::: extends Marker
+}
+class C {
+ class D {
+ class ::::::::::::::::::::::::::::::::::::::::::::::::: extends Marker
+ }
+}
+
+package pack {
+ // abbreviates to: $colon$colon$colon$colon$colon$colon$colon$colon$colon$colon$colon$colon$colon$colon$colon$colon$colon$colon$colon$colon$colon$colon$colon$colon$colon$colon$colon$colon$colon$colon$colon$colon$colon$colon$colon$colon$colon$colon$colon$colon$colon$colon$colon$colon$colon$colon$colon$colon$colon to $read$$iw$$iw$$colon$colon$colon$colon$colon$colon$colon$colon$$$$c39b3f245029fbed9732fc888d44231b$$$$on$colon$colon$colon$colon$colon$colon$colon$colon$colon$colon
+ // class :::::::::::::::::::::::::::::::::::::::::::::::::
+
+ class Short extends Marker
+
+ // We just test with member classes
+ object O {
+ object ::::::::::::::::::::::::::::::::::::::::::::::::: extends Marker
+ }
+ class C {
+ class D {
+ class ::::::::::::::::::::::::::::::::::::::::::::::::: extends Marker
+ }
+ }
+ package p2 {
+ class Short extends Marker
+
+ object O {
+ object ::::::::::::::::::::::::::::::::::::::::::::::::: extends Marker
+ }
+ class C {
+ class D {
+ class ::::::::::::::::::::::::::::::::::::::::::::::::: extends Marker
+ }
+ }
+ }
+}
+
+
+object Test extends App {
+ import reflect.runtime.universe._
+ def test[T: TypeTag] = {
+ val tt = typeTag[T]
+ val clz = tt.mirror.runtimeClass(tt.tpe)
+ assert(classOf[Marker].isAssignableFrom(clz), clz.toString)
+ }
+
+ test[Short]
+ test[O.:::::::::::::::::::::::::::::::::::::::::::::::::.type]
+ test[C#D#`:::::::::::::::::::::::::::::::::::::::::::::::::`]
+
+ test[pack.Short]
+ test[pack.O.:::::::::::::::::::::::::::::::::::::::::::::::::.type]
+ test[pack.C#D#`:::::::::::::::::::::::::::::::::::::::::::::::::`]
+
+ test[pack.p2.Short]
+ test[pack.p2.O.:::::::::::::::::::::::::::::::::::::::::::::::::.type]
+ test[pack.p2.C#D#`:::::::::::::::::::::::::::::::::::::::::::::::::`]
+}
diff --git a/test/files/run/t7825.scala b/test/files/run/t7825.scala
new file mode 100644
index 0000000..65ca06f
--- /dev/null
+++ b/test/files/run/t7825.scala
@@ -0,0 +1,34 @@
+import scala.tools.partest._
+
+object Test extends CompilerTest {
+ import global._
+
+ override lazy val units: List[CompilationUnit] = {
+ // We can test this on JDK6.
+ javaCompilationUnits(global)(defaultMethodSource) ++ compilationUnits(global)(scalaExtendsDefault)
+ }
+
+ private def defaultMethodSource = """
+public interface Iterator<E> {
+ boolean hasNext();
+ E next();
+ default void remove() {
+ throw new UnsupportedOperationException("remove");
+ }
+}
+ """
+
+ private def scalaExtendsDefault = """
+object Test {
+ object X extends Iterator[String] {
+ def hasNext = true
+ def next = "!"
+ }
+}
+ """
+
+ // We're only checking we that the Scala compilation unit passes refchecks
+ // No further checks are needed here.
+ def check(source: String, unit: global.CompilationUnit): Unit = {
+ }
+}
diff --git a/test/files/run/t7912.scala b/test/files/run/t7912.scala
new file mode 100644
index 0000000..3d603e0
--- /dev/null
+++ b/test/files/run/t7912.scala
@@ -0,0 +1,16 @@
+case object A { override def toString = ??? }
+
+object Test {
+ def foo: Int = (A: Any) match {
+ case 0 => 0
+ }
+ def main(args: Array[String]): Unit = {
+ try {
+ foo
+ sys.error("no exception")
+ } catch {
+ case me: MatchError => assert(me.getMessage == "an instance of class A$", me.getMessage)
+ case ex: Throwable => sys.error("not a match error: " + ex.getClass)
+ }
+ }
+}
diff --git a/test/files/run/t8010.scala b/test/files/run/t8010.scala
new file mode 100644
index 0000000..8636bbd
--- /dev/null
+++ b/test/files/run/t8010.scala
@@ -0,0 +1,22 @@
+trait Base {
+ def t = 1
+ def t(n: Int) = n
+ def bt = 2
+ def bt(n: Int) = n
+}
+trait Derived extends Base {
+ // was: double defintion error
+ override def t = 1 + super.t
+ override def t(n: Int) = 1 + super.t(n)
+ override def bt = 1 + super.bt
+ override def bt(n: Int) = 1 + super.bt(n)
+}
+
+object Test extends App {
+ val d = new Derived {}
+ // not the focus of thie bug, but let's just check the runtime behaviour while we're here.
+ assert(d.t == 2)
+ assert(d.t(1) == 2)
+ assert(d.bt == 3)
+ assert(d.bt(1) == 2)
+}
diff --git a/test/files/run/t8029.scala b/test/files/run/t8029.scala
new file mode 100644
index 0000000..dbd5c41
--- /dev/null
+++ b/test/files/run/t8029.scala
@@ -0,0 +1,57 @@
+import scala.tools.partest._
+import scala.tools.nsc._
+
+object Test extends DirectTest {
+
+ override def extraSettings: String = "-usejavacp -nowarn -Ystop-after:typer"
+
+ override def code = "" // not used
+
+ def code1 = """
+package object p1 {
+ trait A
+ object A
+}
+ """
+
+ def code2 = """
+package object p2 {
+ class A
+ object A
+}
+ """
+
+ def code3 = """
+package object p3 {
+ object A
+ trait A
+}
+ """
+
+ def code4 = """
+package object p4 {
+ object A
+ trait A
+}
+ """
+
+ def show() {
+ val global = newCompiler()
+ import global._
+ def typecheck(code: String): Unit = {
+ val r = new Run
+ val sourceFile = newSources(code).head
+ global.reporter.reset()
+ r.compileSources(sourceFile :: Nil)
+ assert(!global.reporter.hasErrors)
+ }
+
+ def typecheckTwice(code: String): Unit = {
+ typecheck(code)
+ typecheck(code)
+ }
+
+ // was: illegal cyclic reference involving package ...
+ Seq(code1, code2, code3, code4) foreach typecheckTwice
+ }
+}
diff --git a/test/files/run/t8114.scala b/test/files/run/t8114.scala
new file mode 100644
index 0000000..ecbca37
--- /dev/null
+++ b/test/files/run/t8114.scala
@@ -0,0 +1,15 @@
+class AbstractTable[T] { type TableElementType }
+class Table[T] extends AbstractTable[T] { type TableElementType = T }
+
+class Query[E, U]
+class TableQuery[E <: AbstractTable[_]] extends Query[E, E#TableElementType]
+
+object Test extends App {
+ object MyTable extends TableQuery[Table[Long]]
+
+ def list[R](q: Query[_, R]): List[R] = Nil
+ list/*[Long]*/(MyTable) collect { case x => x }
+
+ // Generates a redundant bridge method (double definition error)
+ // in 2.10.x due to (at least) the bug in erasure fixed in SI-7120
+}
diff --git a/test/files/run/bug874.check b/test/files/run/t874.check
similarity index 100%
rename from test/files/run/bug874.check
rename to test/files/run/t874.check
diff --git a/test/files/run/bug874.scala b/test/files/run/t874.scala
similarity index 100%
rename from test/files/run/bug874.scala
rename to test/files/run/t874.scala
diff --git a/test/files/run/bug889.check b/test/files/run/t889.check
similarity index 100%
rename from test/files/run/bug889.check
rename to test/files/run/t889.check
diff --git a/test/files/run/bug889.scala b/test/files/run/t889.scala
similarity index 100%
rename from test/files/run/bug889.scala
rename to test/files/run/t889.scala
diff --git a/test/files/run/bug920.check b/test/files/run/t920.check
similarity index 100%
rename from test/files/run/bug920.check
rename to test/files/run/t920.check
diff --git a/test/files/run/bug920.scala b/test/files/run/t920.scala
similarity index 100%
rename from test/files/run/bug920.scala
rename to test/files/run/t920.scala
diff --git a/test/files/run/bug949.scala b/test/files/run/t949.scala
similarity index 100%
rename from test/files/run/bug949.scala
rename to test/files/run/t949.scala
diff --git a/test/files/run/bug978.scala b/test/files/run/t978.scala
similarity index 100%
rename from test/files/run/bug978.scala
rename to test/files/run/t978.scala
diff --git a/test/files/run/tailcalls.check b/test/files/run/tailcalls.check
index 7670962..f123bc8 100644
--- a/test/files/run/tailcalls.check
+++ b/test/files/run/tailcalls.check
@@ -45,8 +45,7 @@ test TailCall.g3 was successful
test TailCall.h1 was successful
test NonTailCall.f1 0 1 2 was successful
-test NonTailCall.f2 was successful
-
+test NonTailCall.f2
test TailCall.b1 was successful
test TailCall.b2 was successful
test FancyTailCalls.tcTryLocal was successful
diff --git a/test/files/run/tailcalls.scala b/test/files/run/tailcalls.scala
index 8830e7c..04a1a8b 100644
--- a/test/files/run/tailcalls.scala
+++ b/test/files/run/tailcalls.scala
@@ -295,7 +295,7 @@ object Test {
while (!stop) {
try {
calibrator.f(n, n);
- if (n >= Math.MAX_INT / 2) error("calibration failure");
+ if (n >= Int.MaxValue / 2) error("calibration failure");
n = 2 * n;
} catch {
case exception: compat.Platform.StackOverflowError => stop = true
diff --git a/test/files/run/test-cpp.check b/test/files/run/test-cpp.check
new file mode 100644
index 0000000..13f4c64
--- /dev/null
+++ b/test/files/run/test-cpp.check
@@ -0,0 +1,81 @@
+--- a
++++ b
+@@ -36,3 +36,3 @@
+ def main(args: Array[String] (ARRAY[REF(class String)])): Unit {
+- locals: value args, value x, value y
++ locals: value args
+ startBlock: 1
+@@ -41,10 +41,6 @@
+ 1:
+- 52 CONSTANT(2)
+- 52 STORE_LOCAL(value x)
+ 52 SCOPE_ENTER value x
+- 53 LOAD_LOCAL(value x)
+- 53 STORE_LOCAL(value y)
+ 53 SCOPE_ENTER value y
+ 54 LOAD_MODULE object Predef
+- 54 LOAD_LOCAL(value y)
++ 54 CONSTANT(2)
+ 54 BOX INT
+@@ -91,3 +87,3 @@
+ def main(args: Array[String] (ARRAY[REF(class String)])): Unit {
+- locals: value args, value x, value y
++ locals: value args, value x
+ startBlock: 1
+@@ -100,7 +96,5 @@
+ 81 SCOPE_ENTER value x
+- 82 LOAD_LOCAL(value x)
+- 82 STORE_LOCAL(value y)
+ 82 SCOPE_ENTER value y
+ 83 LOAD_MODULE object Predef
+- 83 LOAD_LOCAL(value y)
++ 83 LOAD_LOCAL(value x)
+ 83 BOX INT
+@@ -134,3 +128,3 @@
+ def main(args: Array[String] (ARRAY[REF(class String)])): Unit {
+- locals: value args, value x, value y
++ locals: value args
+ startBlock: 1
+@@ -139,10 +133,6 @@
+ 1:
+- 66 THIS(TestAliasChainDerefThis)
+- 66 STORE_LOCAL(value x)
+ 66 SCOPE_ENTER value x
+- 67 LOAD_LOCAL(value x)
+- 67 STORE_LOCAL(value y)
+ 67 SCOPE_ENTER value y
+ 68 LOAD_MODULE object Predef
+- 68 LOAD_LOCAL(value y)
++ 68 THIS(Object)
+ 68 CALL_METHOD scala.Predef.println (dynamic)
+@@ -175,3 +165,3 @@
+ def test(x: Int (INT)): Unit {
+- locals: value x, value y
++ locals: value x
+ startBlock: 1
+@@ -180,7 +170,5 @@
+ 1:
+- 29 LOAD_LOCAL(value x)
+- 29 STORE_LOCAL(value y)
+ 29 SCOPE_ENTER value y
+ 30 LOAD_MODULE object Predef
+- 30 LOAD_LOCAL(value y)
++ 30 LOAD_LOCAL(value x)
+ 30 BOX INT
+@@ -222,7 +210,5 @@
+ 96 SCOPE_ENTER variable x
+- 97 LOAD_LOCAL(variable x)
+- 97 STORE_LOCAL(variable y)
+ 97 SCOPE_ENTER variable y
+ 98 LOAD_MODULE object Predef
+- 98 LOAD_LOCAL(variable y)
++ 98 LOAD_LOCAL(variable x)
+ 98 BOX INT
+@@ -232,6 +218,4 @@
+ 100 STORE_LOCAL(variable y)
+- 101 LOAD_LOCAL(variable y)
+- 101 STORE_LOCAL(variable x)
+ 102 LOAD_MODULE object Predef
+- 102 LOAD_LOCAL(variable x)
++ 102 LOAD_LOCAL(variable y)
+ 102 BOX INT
diff --git a/test/files/run/test-cpp.scala b/test/files/run/test-cpp.scala
new file mode 100644
index 0000000..5b3bc7b
--- /dev/null
+++ b/test/files/run/test-cpp.scala
@@ -0,0 +1,104 @@
+/**
+ * The only change is in the decision to replace a LOAD_LOCAL(l)
+ * in the copy-propagation performed before ClosureElimination.
+ *
+ * In the general case, the local variable 'l' is connected through
+ * a alias chain with other local variables and at the end of the
+ * alias chain there may be a Value, call it 'v'.
+ *
+ * If 'v' is cheaper to access (it is a Deref(This) or Const(_)), then
+ * replace the instruction to load it from the cheaper place.
+ * Otherwise, we use the local variable at the end of the alias chain
+ * instead of 'l'.
+ */
+
+import scala.tools.partest.IcodeTest
+
+object Test extends IcodeTest {
+ override def printIcodeAfterPhase = "dce"
+}
+
+import scala.util.Random._
+
+/**
+ * The example in the bug report (Issue-5321): an alias chain which store
+ * an Unknown. Should remove local variable 'y'.
+ */
+object TestBugReport {
+ def test(x: Int) = {
+ val y = x
+ println(y)
+ }
+}
+
+/**
+ * The code taken from scala.tools.nsc.settings.Settings:
+ * After inlining of the setter is performed, there is an opportunity for
+ * copy-propagation to eliminate some local variables.
+ */
+object TestSetterInline {
+ private var _postSetHook: this.type => Unit = (x: this.type) => ()
+ def withPostSetHook(f: this.type => Unit): this.type = { _postSetHook = f ; this }
+}
+
+
+/**
+ * The access of the local variable 'y' should be replaced by the
+ * constant.
+ */
+object TestAliasChainConstat {
+
+ def main(args: Array[String]): Unit = {
+ val x = 2
+ val y = x
+ println(y)
+ }
+}
+
+/**
+ * At the end of the alias chain we have a reference to 'this'.
+ * The local variables should be all discarded and replace by a
+ * direct reference to this
+ */
+class TestAliasChainDerefThis {
+
+ def main(args: Array[String]): Unit = {
+ val x = this
+ val y = x
+ println(y)
+ }
+}
+
+/**
+ * At the end of the alias chain, there is the value of a field.
+ * The use of variable 'y' should be replaced by 'x', not by an access
+ * to the field 'f' since it is more costly.
+ */
+object TestAliasChainDerefField {
+ def f = nextInt
+
+ def main(args: Array[String]): Unit = {
+ val x = f
+ val y = x
+ println(y)
+ }
+}
+
+
+/**
+ * The first time 'println' is called, 'x' is replaced by 'y'
+ * and the second time, 'y' is replaced by 'x'. But none of them
+ * can be removed.
+ */
+object TestDifferentBindings {
+
+ def main(args: Array[String]): Unit = {
+ var x = nextInt
+ var y = x
+ println(y)
+
+ y = nextInt
+ x = y
+ println(x)
+ }
+}
diff --git a/test/files/run/toolbox_console_reporter.check b/test/files/run/toolbox_console_reporter.check
new file mode 100644
index 0000000..1395c68
--- /dev/null
+++ b/test/files/run/toolbox_console_reporter.check
@@ -0,0 +1,8 @@
+hello
+============compiler console=============
+warning: method foo in object Utils is deprecated: test
+
+=========================================
+============compiler messages============
+Info(NoPosition,method foo in object Utils is deprecated: test,WARNING)
+=========================================
diff --git a/test/files/run/toolbox_console_reporter.scala b/test/files/run/toolbox_console_reporter.scala
new file mode 100644
index 0000000..d672ccb
--- /dev/null
+++ b/test/files/run/toolbox_console_reporter.scala
@@ -0,0 +1,29 @@
+import scala.reflect.runtime.universe._
+import scala.reflect.runtime.{universe => ru}
+import scala.reflect.runtime.{currentMirror => cm}
+import scala.tools.reflect.{ToolBox, mkConsoleFrontEnd}
+
+object Test extends App {
+ val oldErr = Console.err;
+ val baos = new java.io.ByteArrayOutputStream();
+ Console.setErr(new java.io.PrintStream(baos));
+ try {
+ val toolbox = cm.mkToolBox(frontEnd = mkConsoleFrontEnd(), options = "-deprecation")
+ toolbox.eval(reify{
+ object Utils {
+ @deprecated("test", "2.10.0")
+ def foo { println("hello") }
+ }
+
+ Utils.foo
+ }.tree)
+ println("============compiler console=============")
+ println(baos.toString);
+ println("=========================================")
+ println("============compiler messages============")
+ toolbox.frontEnd.infos.foreach(println(_))
+ println("=========================================")
+ } finally {
+ Console.setErr(oldErr);
+ }
+}
\ No newline at end of file
diff --git a/test/files/run/toolbox_current_run_compiles.check b/test/files/run/toolbox_current_run_compiles.check
new file mode 100644
index 0000000..da29283
--- /dev/null
+++ b/test/files/run/toolbox_current_run_compiles.check
@@ -0,0 +1,2 @@
+true
+false
diff --git a/test/files/run/toolbox_current_run_compiles.scala b/test/files/run/toolbox_current_run_compiles.scala
new file mode 100644
index 0000000..b48c998
--- /dev/null
+++ b/test/files/run/toolbox_current_run_compiles.scala
@@ -0,0 +1,28 @@
+package pkg {
+ import scala.reflect.macros.Context
+ import scala.language.experimental.macros
+
+ object Macros {
+ def impl[T: c.WeakTypeTag](c: Context) = {
+ import c.universe._
+ val sym = c.weakTypeOf[T].typeSymbol
+ val g = c.universe.asInstanceOf[scala.tools.nsc.Global]
+ c.Expr[Boolean](Literal(Constant(g.currentRun.compiles(sym.asInstanceOf[g.Symbol]))))
+ }
+ def compiles[T] = macro impl[T]
+ }
+}
+
+import scala.reflect.runtime.universe._
+import scala.reflect.runtime.{universe => ru}
+import scala.tools.reflect.ToolBox
+
+object Test extends App {
+ val cm = ru.runtimeMirror(getClass.getClassLoader)
+ val toolbox = cm.mkToolBox()
+ toolbox.eval(toolbox.parse("""{
+ class C
+ println(pkg.Macros.compiles[C])
+ println(pkg.Macros.compiles[Object])
+ }"""))
+}
\ No newline at end of file
diff --git a/test/files/run/toolbox_default_reporter_is_silent.check b/test/files/run/toolbox_default_reporter_is_silent.check
new file mode 100644
index 0000000..ce01362
--- /dev/null
+++ b/test/files/run/toolbox_default_reporter_is_silent.check
@@ -0,0 +1 @@
+hello
diff --git a/test/files/run/toolbox_default_reporter_is_silent.scala b/test/files/run/toolbox_default_reporter_is_silent.scala
new file mode 100644
index 0000000..4bd7a64
--- /dev/null
+++ b/test/files/run/toolbox_default_reporter_is_silent.scala
@@ -0,0 +1,16 @@
+import scala.reflect.runtime.universe._
+import scala.reflect.runtime.{universe => ru}
+import scala.reflect.runtime.{currentMirror => cm}
+import scala.tools.reflect.ToolBox
+
+object Test extends App {
+ val toolbox = cm.mkToolBox()
+ toolbox.eval(reify{
+ object Utils {
+ @deprecated("test", "2.10.0")
+ def foo { println("hello") }
+ }
+
+ Utils.foo
+ }.tree)
+}
\ No newline at end of file
diff --git a/test/files/run/toolbox_rangepos.check b/test/files/run/toolbox_rangepos.check
new file mode 100644
index 0000000..b536d3f
--- /dev/null
+++ b/test/files/run/toolbox_rangepos.check
@@ -0,0 +1 @@
+RangePosition(<toolbox>, 0, 2, 5)
diff --git a/test/files/run/toolbox_rangepos.scala b/test/files/run/toolbox_rangepos.scala
new file mode 100644
index 0000000..41fe6da
--- /dev/null
+++ b/test/files/run/toolbox_rangepos.scala
@@ -0,0 +1,8 @@
+import scala.reflect.runtime.{currentMirror => cm}
+import scala.tools.reflect.ToolBox
+
+object Test extends App {
+ val toolbox = cm.mkToolBox(options = "-Yrangepos")
+ val tree = toolbox.parse("2 + 2")
+ println(tree.pos)
+}
diff --git a/test/files/run/toolbox_silent_reporter.check b/test/files/run/toolbox_silent_reporter.check
new file mode 100644
index 0000000..2d05b1e
--- /dev/null
+++ b/test/files/run/toolbox_silent_reporter.check
@@ -0,0 +1,4 @@
+hello
+============compiler messages============
+Info(NoPosition,method foo in object Utils is deprecated: test,WARNING)
+=========================================
diff --git a/test/files/run/toolbox_silent_reporter.scala b/test/files/run/toolbox_silent_reporter.scala
new file mode 100644
index 0000000..03b1d6d
--- /dev/null
+++ b/test/files/run/toolbox_silent_reporter.scala
@@ -0,0 +1,19 @@
+import scala.reflect.runtime.universe._
+import scala.reflect.runtime.{universe => ru}
+import scala.reflect.runtime.{currentMirror => cm}
+import scala.tools.reflect.{ToolBox, mkSilentFrontEnd}
+
+object Test extends App {
+ val toolbox = cm.mkToolBox(options = "-deprecation", frontEnd = mkSilentFrontEnd())
+ toolbox.eval(reify{
+ object Utils {
+ @deprecated("test", "2.10.0")
+ def foo { println("hello") }
+ }
+
+ Utils.foo
+ }.tree)
+ println("============compiler messages============")
+ toolbox.frontEnd.infos.foreach(println(_))
+ println("=========================================")
+}
\ No newline at end of file
diff --git a/test/files/run/toolbox_typecheck_implicitsdisabled.check b/test/files/run/toolbox_typecheck_implicitsdisabled.check
new file mode 100644
index 0000000..db64e11
--- /dev/null
+++ b/test/files/run/toolbox_typecheck_implicitsdisabled.check
@@ -0,0 +1,5 @@
+{
+ import scala.Predef._;
+ scala.Predef.any2ArrowAssoc[Int](1).->[Int](2)
+}
+scala.tools.reflect.ToolBoxError: reflective typecheck has failed: value -> is not a member of Int
diff --git a/test/files/run/toolbox_typecheck_implicitsdisabled.scala b/test/files/run/toolbox_typecheck_implicitsdisabled.scala
new file mode 100644
index 0000000..8a3d433
--- /dev/null
+++ b/test/files/run/toolbox_typecheck_implicitsdisabled.scala
@@ -0,0 +1,27 @@
+import scala.reflect.runtime.universe._
+import scala.reflect.runtime.{universe => ru}
+import scala.reflect.runtime.{currentMirror => cm}
+import scala.tools.reflect.ToolBox
+
+object Test extends App {
+ val toolbox = cm.mkToolBox()
+
+ val tree1 = Block(List(
+ Import(Select(Ident(newTermName("scala")), newTermName("Predef")), List(ImportSelector(nme.WILDCARD, -1, null, -1)))),
+ Apply(Select(Literal(Constant(1)), newTermName("$minus$greater")), List(Literal(Constant(2))))
+ )
+ val ttree1 = toolbox.typeCheck(tree1, withImplicitViewsDisabled = false)
+ println(ttree1)
+
+ try {
+ val tree2 = Block(List(
+ Import(Select(Ident(newTermName("scala")), newTermName("Predef")), List(ImportSelector(nme.WILDCARD, -1, null, -1)))),
+ Apply(Select(Literal(Constant(1)), newTermName("$minus$greater")), List(Literal(Constant(2))))
+ )
+ val ttree2 = toolbox.typeCheck(tree2, withImplicitViewsDisabled = true)
+ println(ttree2)
+ } catch {
+ case ex: Throwable =>
+ println(ex)
+ }
+}
\ No newline at end of file
diff --git a/test/files/run/toolbox_typecheck_inferimplicitvalue.check b/test/files/run/toolbox_typecheck_inferimplicitvalue.check
new file mode 100644
index 0000000..ec17b42
--- /dev/null
+++ b/test/files/run/toolbox_typecheck_inferimplicitvalue.check
@@ -0,0 +1 @@
+C.MC
diff --git a/test/files/run/toolbox_typecheck_inferimplicitvalue.scala b/test/files/run/toolbox_typecheck_inferimplicitvalue.scala
new file mode 100644
index 0000000..3c5c994
--- /dev/null
+++ b/test/files/run/toolbox_typecheck_inferimplicitvalue.scala
@@ -0,0 +1,13 @@
+import scala.reflect.runtime.universe._
+import scala.reflect.runtime.{currentMirror => cm}
+import scala.tools.reflect.ToolBox
+
+class C
+object C {
+ implicit object MC extends C
+}
+
+object Test extends App {
+ val tb = cm.mkToolBox()
+ println(tb.inferImplicitValue(typeOf[C]))
+}
\ No newline at end of file
diff --git a/test/files/run/toolbox_typecheck_macrosdisabled.check b/test/files/run/toolbox_typecheck_macrosdisabled.check
new file mode 100644
index 0000000..688f379
--- /dev/null
+++ b/test/files/run/toolbox_typecheck_macrosdisabled.check
@@ -0,0 +1,41 @@
+{
+ val $u: ru.type = ru;
+ val $m: $u.Mirror = ru.runtimeMirror({
+ final class $anon extends scala.AnyRef {
+ def <init>(): anonymous class $anon = {
+ $anon.super.<init>();
+ ()
+ };
+ ()
+ };
+ new $anon()
+}.getClass().getClassLoader());
+ $u.Expr.apply[Int(2)]($m, {
+ final class $treecreator1 extends TreeCreator {
+ def <init>(): $treecreator1 = {
+ $treecreator1.super.<init>();
+ ()
+ };
+ def apply[U <: scala.reflect.api.Universe with Singleton]($m$untyped: scala.reflect.api.Mirror[U]): U#Tree = {
+ val $u: U = $m$untyped.universe;
+ val $m: $u.Mirror = $m$untyped.asInstanceOf[$u.Mirror];
+ $u.Literal.apply($u.Constant.apply(2))
+ }
+ };
+ new $treecreator1()
+ })($u.TypeTag.apply[Int(2)]($m, {
+ final class $typecreator2 extends TypeCreator {
+ def <init>(): $typecreator2 = {
+ $typecreator2.super.<init>();
+ ()
+ };
+ def apply[U <: scala.reflect.api.Universe with Singleton]($m$untyped: scala.reflect.api.Mirror[U]): U#Type = {
+ val $u: U = $m$untyped.universe;
+ val $m: $u.Mirror = $m$untyped.asInstanceOf[$u.Mirror];
+ $u.ConstantType.apply($u.Constant.apply(2))
+ }
+ };
+ new $typecreator2()
+ }))
+}
+ru.reify[Int](2)
diff --git a/test/files/run/toolbox_typecheck_macrosdisabled.scala b/test/files/run/toolbox_typecheck_macrosdisabled.scala
new file mode 100644
index 0000000..51eb63f
--- /dev/null
+++ b/test/files/run/toolbox_typecheck_macrosdisabled.scala
@@ -0,0 +1,25 @@
+import scala.reflect.runtime.universe._
+import scala.reflect.runtime.{universe => ru}
+import scala.reflect.runtime.{currentMirror => cm}
+import scala.tools.reflect.ToolBox
+
+// Note: If you're looking at this test and you don't know why, you may
+// have accidentally changed the way type tags reify. If so, validate
+// that your changes are accurate and update the check file.
+
+object Test extends App {
+ val toolbox = cm.mkToolBox()
+ val rupkg = cm.staticModule("scala.reflect.runtime.package")
+ val rusym = build.selectTerm(rupkg, "universe")
+ val NullaryMethodType(rutpe) = rusym.typeSignature
+ val ru = build.newFreeTerm("ru", scala.reflect.runtime.universe)
+ build.setTypeSignature(ru, rutpe)
+
+ val tree1 = Apply(Select(Ident(ru), newTermName("reify")), List(Literal(Constant(2))))
+ val ttree1 = toolbox.typeCheck(tree1, withMacrosDisabled = false)
+ println(ttree1)
+
+ val tree2 = Apply(Select(Ident(ru), newTermName("reify")), List(Literal(Constant(2))))
+ val ttree2 = toolbox.typeCheck(tree2, withMacrosDisabled = true)
+ println(ttree2)
+}
diff --git a/test/files/run/toolbox_typecheck_macrosdisabled2.check b/test/files/run/toolbox_typecheck_macrosdisabled2.check
new file mode 100644
index 0000000..e7011d1
--- /dev/null
+++ b/test/files/run/toolbox_typecheck_macrosdisabled2.check
@@ -0,0 +1,41 @@
+{
+ val $u: ru.type = ru;
+ val $m: $u.Mirror = ru.runtimeMirror({
+ final class $anon extends scala.AnyRef {
+ def <init>(): anonymous class $anon = {
+ $anon.super.<init>();
+ ()
+ };
+ ()
+ };
+ new $anon()
+}.getClass().getClassLoader());
+ $u.Expr.apply[Array[Int]]($m, {
+ final class $treecreator1 extends TreeCreator {
+ def <init>(): $treecreator1 = {
+ $treecreator1.super.<init>();
+ ()
+ };
+ def apply[U <: scala.reflect.api.Universe with Singleton]($m$untyped: scala.reflect.api.Mirror[U]): U#Tree = {
+ val $u: U = $m$untyped.universe;
+ val $m: $u.Mirror = $m$untyped.asInstanceOf[$u.Mirror];
+ $u.Apply.apply($u.Select.apply($u.build.Ident($m.staticModule("scala.Array")), $u.newTermName("apply")), scala.collection.immutable.List.apply[$u.Literal]($u.Literal.apply($u.Constant.apply(2))))
+ }
+ };
+ new $treecreator1()
+ })($u.TypeTag.apply[Array[Int]]($m, {
+ final class $typecreator2 extends TypeCreator {
+ def <init>(): $typecreator2 = {
+ $typecreator2.super.<init>();
+ ()
+ };
+ def apply[U <: scala.reflect.api.Universe with Singleton]($m$untyped: scala.reflect.api.Mirror[U]): U#Type = {
+ val $u: U = $m$untyped.universe;
+ val $m: $u.Mirror = $m$untyped.asInstanceOf[$u.Mirror];
+ $u.TypeRef.apply($u.ThisType.apply($m.staticPackage("scala").asModule.moduleClass), $m.staticClass("scala.Array"), scala.collection.immutable.List.apply[$u.Type]($m.staticClass("scala.Int").asType.toTypeConstructor))
+ }
+ };
+ new $typecreator2()
+ }))
+}
+ru.reify[Array[Int]](scala.Array.apply(2))
diff --git a/test/files/run/toolbox_typecheck_macrosdisabled2.scala b/test/files/run/toolbox_typecheck_macrosdisabled2.scala
new file mode 100644
index 0000000..74fd09d
--- /dev/null
+++ b/test/files/run/toolbox_typecheck_macrosdisabled2.scala
@@ -0,0 +1,25 @@
+import scala.reflect.runtime.universe._
+import scala.reflect.runtime.{universe => ru}
+import scala.reflect.runtime.{currentMirror => cm}
+import scala.tools.reflect.ToolBox
+
+// Note: If you're looking at this test and you don't know why, you may
+// have accidentally changed the way type tags reify. If so, validate
+// that your changes are accurate and update the check file.
+
+object Test extends App {
+ val toolbox = cm.mkToolBox()
+ val rupkg = cm.staticModule("scala.reflect.runtime.package")
+ val rusym = build.selectTerm(rupkg, "universe")
+ val NullaryMethodType(rutpe) = rusym.typeSignature
+ val ru = build.newFreeTerm("ru", scala.reflect.runtime.universe)
+ build.setTypeSignature(ru, rutpe)
+
+ val tree1 = Apply(Select(Ident(ru), newTermName("reify")), List(Apply(Select(Ident(newTermName("scala")), newTermName("Array")), List(Literal(Constant(2))))))
+ val ttree1 = toolbox.typeCheck(tree1, withMacrosDisabled = false)
+ println(ttree1)
+
+ val tree2 = Apply(Select(Ident(ru), newTermName("reify")), List(Apply(Select(Ident(newTermName("scala")), newTermName("Array")), List(Literal(Constant(2))))))
+ val ttree2 = toolbox.typeCheck(tree2, withMacrosDisabled = true)
+ println(ttree2)
+}
diff --git a/test/files/run/trait-renaming.check b/test/files/run/trait-renaming.check
new file mode 100644
index 0000000..b2e5aff
--- /dev/null
+++ b/test/files/run/trait-renaming.check
@@ -0,0 +1,2 @@
+public static int bippy.A$B$1$class.f(bippy.A$B$1)
+public static void bippy.A$B$1$class.$init$(bippy.A$B$1)
diff --git a/test/files/run/trait-renaming/A_1.scala b/test/files/run/trait-renaming/A_1.scala
new file mode 100644
index 0000000..2c3d4f5
--- /dev/null
+++ b/test/files/run/trait-renaming/A_1.scala
@@ -0,0 +1,15 @@
+package bippy {
+ class A {
+ def f = {
+ trait B {
+ def f = 5
+ }
+ trait C {
+ def g = 10
+ }
+ new B with C { }
+ }
+
+ def g = Class.forName("bippy.A$B$1$class")
+ }
+}
diff --git a/test/files/run/trait-renaming/B_2.scala b/test/files/run/trait-renaming/B_2.scala
new file mode 100644
index 0000000..174e929
--- /dev/null
+++ b/test/files/run/trait-renaming/B_2.scala
@@ -0,0 +1,5 @@
+object Test {
+ def main(args: Array[String]): Unit = {
+ (new bippy.A).g.getDeclaredMethods.map(_.toString).sorted foreach println
+ }
+}
diff --git a/test/files/run/treePrint.scala b/test/files/run/treePrint.scala
index 745c215..4a80e28 100644
--- a/test/files/run/treePrint.scala
+++ b/test/files/run/treePrint.scala
@@ -4,7 +4,7 @@ object Test {
import scala.tools.nsc._
import interpreter._
import java.io.{ OutputStream, BufferedReader, StringReader, PrintWriter, Writer, OutputStreamWriter}
-
+
val code = """
def foo = {
var q: Boolean = false
@@ -22,11 +22,11 @@ object Test {
else 20
}
else 30
-
+
(x == 5) || !q || true
}
- """
-
+ """
+
class NullOutputStream extends OutputStream { def write(b: Int) { } }
def main(args: Array[String]) {
@@ -35,7 +35,8 @@ object Test {
settings.Ycompacttrees.value = true
val intp = new IMain(settings, new PrintWriter(new NullOutputStream))
- val power = Power(intp)
+ val vals = new ReplVals { }
+ val power = new Power(intp, vals)
intp.interpret("""def initialize = "Have to interpret something or we get errors." """)
power trees code foreach println
}
diff --git a/test/files/run/triemap-hash.scala b/test/files/run/triemap-hash.scala
new file mode 100644
index 0000000..7f19997
--- /dev/null
+++ b/test/files/run/triemap-hash.scala
@@ -0,0 +1,46 @@
+
+
+
+import util.hashing.Hashing
+
+
+
+object Test {
+
+ def main(args: Array[String]) {
+ hashing()
+ equality()
+ }
+
+ def hashing() {
+ import collection._
+
+ val tm = new concurrent.TrieMap[String, String](Hashing.fromFunction(x => x.length + x(0).toInt), Equiv.universal)
+ tm.put("a", "b")
+ tm.put("c", "d")
+
+ assert(tm("a") == "b")
+ assert(tm("c") == "d")
+
+ for (i <- 0 until 1000) tm(i.toString) = i.toString
+ for (i <- 0 until 1000) assert(tm(i.toString) == i.toString)
+ }
+
+ def equality() {
+ import collection._
+
+ val tm = new concurrent.TrieMap[String, String](Hashing.fromFunction(x => x(0).toInt), Equiv.fromFunction(_(0) == _(0)))
+ tm.put("a", "b")
+ tm.put("a1", "d")
+ tm.put("b", "c")
+
+ assert(tm("a") == "d", tm)
+ assert(tm("b") == "c", tm)
+
+ for (i <- 0 until 1000) tm(i.toString) = i.toString
+ assert(tm.size == 12, tm)
+ assert(tm("0") == "0", tm)
+ for (i <- 1 to 9) assert(tm(i.toString) == i.toString + "99", tm)
+ }
+
+}
diff --git a/test/files/run/triple-quoted-expr.scala b/test/files/run/triple-quoted-expr.scala
index 6d91ac5..0b30946 100644
--- a/test/files/run/triple-quoted-expr.scala
+++ b/test/files/run/triple-quoted-expr.scala
@@ -1,18 +1,18 @@
class A {
def f1 = {
val x = 5
-
+
"""
hi"""
}
def f2 = {
val x = 5
-
+
"""hi"""
- }
+ }
def f3 = {
val x = 5
-
+
"\nhi"
}
}
diff --git a/test/files/run/try-catch-unify.check b/test/files/run/try-catch-unify.check
new file mode 100644
index 0000000..67a8c64
--- /dev/null
+++ b/test/files/run/try-catch-unify.check
@@ -0,0 +1,4 @@
+Failure(java.lang.NumberFormatException: For input string: "Hi")
+Success(5.0)
+O NOES
+Failure(java.lang.NumberFormatException: For input string: "Hi")
diff --git a/test/files/run/try-catch-unify.scala b/test/files/run/try-catch-unify.scala
new file mode 100644
index 0000000..8cb14d0
--- /dev/null
+++ b/test/files/run/try-catch-unify.scala
@@ -0,0 +1,16 @@
+import util._
+
+import control.Exception._
+
+object Test {
+ def main(args: Array[String]): Unit = {
+ println(catching(classOf[NumberFormatException]) withTry ("Hi".toDouble))
+ println(catching(classOf[NumberFormatException]) withTry ("5".toDouble))
+ try {
+ catching(classOf[NumberFormatException]) withTry (sys.error("O NOES"))
+ } catch {
+ case t => println(t.getMessage)
+ }
+ println(nonFatalCatch withTry ("Hi".toDouble))
+ }
+}
diff --git a/test/files/run/tuple-zipped.scala b/test/files/run/tuple-zipped.scala
index a985134..b197183 100644
--- a/test/files/run/tuple-zipped.scala
+++ b/test/files/run/tuple-zipped.scala
@@ -15,14 +15,14 @@ object Test {
def main(args: Array[String]): Unit = {
for (cc1 <- xss1 ; cc2 <- xss2) {
- val sum1 = (cc1, cc2).zip map { case (x, y) => x + y } sum
+ val sum1 = (cc1, cc2).zipped map { case (x, y) => x + y } sum
val sum2 = (cc1, cc2).zipped map (_ + _) sum
assert(sum1 == sum2)
}
for (cc1 <- xss1 ; cc2 <- xss2 ; cc3 <- xss3) {
- val sum1 = (cc1, cc2, cc3).zip map { case (x, y, z) => x + y + z } sum
+ val sum1 = (cc1, cc2, cc3).zipped map { case (x, y, z) => x + y + z } sum
val sum2 = (cc1, cc2, cc3).zipped map (_ + _ + _) sum
assert(sum1 == sum2)
diff --git a/test/files/run/tuples-msil.check b/test/files/run/tuples-msil.check
deleted file mode 100644
index e56a796..0000000
--- a/test/files/run/tuples-msil.check
+++ /dev/null
@@ -1,2 +0,0 @@
-(1,abc,True)
-OK
diff --git a/test/files/run/type-currying.check b/test/files/run/type-currying.check
new file mode 100644
index 0000000..e5db238
--- /dev/null
+++ b/test/files/run/type-currying.check
@@ -0,0 +1,27 @@
+Map(abc -> 55)
+(a,0)
+(b,1)
+(c,2)
+(d,3)
+(e,4)
+(f,5)
+(g,6)
+(h,7)
+(i,8)
+(j,9)
+(k,10)
+(l,11)
+(m,12)
+(n,13)
+(o,14)
+(p,15)
+(q,16)
+(r,17)
+(s,18)
+(t,19)
+(u,20)
+(v,21)
+(w,22)
+(x,23)
+(y,24)
+(z,25)
diff --git a/test/files/run/type-currying.scala b/test/files/run/type-currying.scala
new file mode 100644
index 0000000..f9764c6
--- /dev/null
+++ b/test/files/run/type-currying.scala
@@ -0,0 +1,58 @@
+import scala.collection.{ mutable, immutable, generic }
+import generic.CanBuildFrom
+
+object Partial {
+ type KnownContainer[CC[K, V] <: collection.Map[K, V]] = {
+ def values[V] : KnownValues[CC, V]
+ def apply[K] : KnownKeys[CC, K]
+ }
+ type KnownKeys[CC[K, V] <: collection.Map[K, V], K] = {
+ def apply[V](implicit cbf: CanBuildFrom[_, (K, V), CC[K, V]]): CC[K, V]
+ }
+ type KnownValues[CC[K, V] <: collection.Map[K, V], V] = {
+ def apply[K](implicit cbf: CanBuildFrom[_, (K, V), CC[K, V]]): CC[K, V]
+ }
+
+ def apply[CC[K, V] <: collection.Map[K, V]] : KnownContainer[CC] = new {
+ def values[V] : KnownValues[CC, V] = new {
+ def apply[K](implicit cbf: CanBuildFrom[_, (K, V), CC[K, V]]) = cbf().result
+ }
+ def apply[K] = new {
+ def apply[V](implicit cbf: CanBuildFrom[_, (K, V), CC[K, V]]) = cbf().result
+ }
+ }
+}
+
+object Test {
+ val m = Partial[immutable.TreeMap]
+ val m1 = m[String]
+ val m2 = m[Int][Int]
+
+ val mutableBippy = Partial[mutable.HashMap][String][Int]
+ mutableBippy("abc") = 55
+
+ val immutableBippy = Partial[immutable.HashMap].values[Int]
+ def make[T](xs: T*) = immutableBippy[T] ++ xs.zipWithIndex
+
+ val n0 = Partial[immutable.HashMap][String][Int] ++ Seq(("a", 1))
+ val n1 = Partial.apply[immutable.HashMap].apply[String].apply[Int] ++ Seq(("a", 1))
+
+ def main(args: Array[String]): Unit = {
+ println(mutableBippy)
+ make('a' to 'z': _*).toList.sorted foreach println
+ assert(n0 == n1)
+ }
+}
+
+class A {
+ object Foo {
+ def apply[T] = Bar
+ }
+ object Bar {
+ def apply() = Foo
+ }
+
+ def f() = Foo
+ def g = f()[Int]()[String]()
+ def h = Foo[Foo.type]()[Foo.type]()
+}
diff --git a/test/files/run/syncchannel.check b/test/files/run/typed-annotated.check
similarity index 100%
copy from test/files/run/syncchannel.check
copy to test/files/run/typed-annotated.check
diff --git a/test/files/run/typed-annotated/Macros_1.scala b/test/files/run/typed-annotated/Macros_1.scala
new file mode 100644
index 0000000..dd18c63
--- /dev/null
+++ b/test/files/run/typed-annotated/Macros_1.scala
@@ -0,0 +1,17 @@
+import scala.reflect.macros.Context
+import language.experimental.macros
+
+class ann extends scala.annotation.StaticAnnotation
+
+object Macros {
+ def impl(c: Context) = {
+ import c.universe._
+ // val tpt = Annotated(Apply(Select(New(Ident(newTypeName("ann"))), nme.CONSTRUCTOR), List()), Ident(newTypeName("Int")))
+ val tpt = Annotated(Apply(Select(New(Ident(newTypeName("ann"))), nme.CONSTRUCTOR), List()), TypeTree(weakTypeOf[Int]))
+ c.Expr[Unit](Block(
+ List(ValDef(Modifiers(), newTermName("x"), tpt, Literal(Constant(42)))),
+ Apply(Ident(newTermName("println")), List(Ident(newTermName("x"))))))
+ }
+
+ def foo = macro impl
+}
\ No newline at end of file
diff --git a/test/files/run/typed-annotated/Test_2.scala b/test/files/run/typed-annotated/Test_2.scala
new file mode 100644
index 0000000..acfddae
--- /dev/null
+++ b/test/files/run/typed-annotated/Test_2.scala
@@ -0,0 +1,3 @@
+object Test extends App {
+ Macros.foo
+}
\ No newline at end of file
diff --git a/test/files/run/typetags_core.check b/test/files/run/typetags_core.check
new file mode 100644
index 0000000..980b471
--- /dev/null
+++ b/test/files/run/typetags_core.check
@@ -0,0 +1,30 @@
+true
+TypeTag[Byte]
+true
+TypeTag[Short]
+true
+TypeTag[Char]
+true
+TypeTag[Int]
+true
+TypeTag[Long]
+true
+TypeTag[Float]
+true
+TypeTag[Double]
+true
+TypeTag[Boolean]
+true
+TypeTag[Unit]
+true
+TypeTag[Any]
+true
+TypeTag[AnyVal]
+true
+TypeTag[AnyRef]
+true
+TypeTag[java.lang.Object]
+true
+TypeTag[Null]
+true
+TypeTag[Nothing]
diff --git a/test/files/run/typetags_core.scala b/test/files/run/typetags_core.scala
new file mode 100644
index 0000000..5257d55
--- /dev/null
+++ b/test/files/run/typetags_core.scala
@@ -0,0 +1,34 @@
+import scala.reflect.runtime.universe._
+
+object Test extends App {
+ println(implicitly[TypeTag[Byte]] eq TypeTag.Byte)
+ println(implicitly[TypeTag[Byte]])
+ println(implicitly[TypeTag[Short]] eq TypeTag.Short)
+ println(implicitly[TypeTag[Short]])
+ println(implicitly[TypeTag[Char]] eq TypeTag.Char)
+ println(implicitly[TypeTag[Char]])
+ println(implicitly[TypeTag[Int]] eq TypeTag.Int)
+ println(implicitly[TypeTag[Int]])
+ println(implicitly[TypeTag[Long]] eq TypeTag.Long)
+ println(implicitly[TypeTag[Long]])
+ println(implicitly[TypeTag[Float]] eq TypeTag.Float)
+ println(implicitly[TypeTag[Float]])
+ println(implicitly[TypeTag[Double]] eq TypeTag.Double)
+ println(implicitly[TypeTag[Double]])
+ println(implicitly[TypeTag[Boolean]] eq TypeTag.Boolean)
+ println(implicitly[TypeTag[Boolean]])
+ println(implicitly[TypeTag[Unit]] eq TypeTag.Unit)
+ println(implicitly[TypeTag[Unit]])
+ println(implicitly[TypeTag[Any]] eq TypeTag.Any)
+ println(implicitly[TypeTag[Any]])
+ println(implicitly[TypeTag[AnyVal]] eq TypeTag.AnyVal)
+ println(implicitly[TypeTag[AnyVal]])
+ println(implicitly[TypeTag[AnyRef]] eq TypeTag.AnyRef)
+ println(implicitly[TypeTag[AnyRef]])
+ println(implicitly[TypeTag[Object]] eq TypeTag.Object)
+ println(implicitly[TypeTag[Object]])
+ println(implicitly[TypeTag[Null]] eq TypeTag.Null)
+ println(implicitly[TypeTag[Null]])
+ println(implicitly[TypeTag[Nothing]] eq TypeTag.Nothing)
+ println(implicitly[TypeTag[Nothing]])
+}
\ No newline at end of file
diff --git a/test/files/run/typetags_multi.check b/test/files/run/typetags_multi.check
new file mode 100644
index 0000000..6110252
--- /dev/null
+++ b/test/files/run/typetags_multi.check
@@ -0,0 +1,5 @@
+TypeTag[Int]
+TypeTag[Array[Int]]
+TypeTag[Array[Array[Int]]]
+TypeTag[Array[Array[Array[Int]]]]
+TypeTag[Array[Array[Array[Array[Int]]]]]
diff --git a/test/files/run/typetags_multi.scala b/test/files/run/typetags_multi.scala
new file mode 100644
index 0000000..b30aac8
--- /dev/null
+++ b/test/files/run/typetags_multi.scala
@@ -0,0 +1,9 @@
+import scala.reflect.runtime.universe._
+
+object Test extends App {
+ println(implicitly[TypeTag[Int]])
+ println(implicitly[TypeTag[Array[Int]]])
+ println(implicitly[TypeTag[Array[Array[Int]]]])
+ println(implicitly[TypeTag[Array[Array[Array[Int]]]]])
+ println(implicitly[TypeTag[Array[Array[Array[Array[Int]]]]]])
+}
\ No newline at end of file
diff --git a/test/files/run/typetags_serialize.check b/test/files/run/typetags_serialize.check
new file mode 100644
index 0000000..f79436e
--- /dev/null
+++ b/test/files/run/typetags_serialize.check
@@ -0,0 +1,2 @@
+java.io.NotSerializableException: scala.reflect.api.TypeTags$PredefTypeCreator
+java.io.NotSerializableException: Test$$typecreator1$1
diff --git a/test/files/run/typetags_serialize.scala b/test/files/run/typetags_serialize.scala
new file mode 100644
index 0000000..3c842e6
--- /dev/null
+++ b/test/files/run/typetags_serialize.scala
@@ -0,0 +1,29 @@
+import java.io._
+import scala.reflect.runtime.universe._
+import scala.reflect.runtime.{universe => ru}
+import scala.reflect.runtime.{currentMirror => cm}
+
+object Test extends App {
+ def test(tag: TypeTag[_]) =
+ try {
+ val fout = new ByteArrayOutputStream()
+ val out = new ObjectOutputStream(fout)
+ out.writeObject(tag)
+ out.close()
+ fout.close()
+
+ val fin = new ByteArrayInputStream(fout.toByteArray)
+ val in = new ObjectInputStream(fin)
+ val retag = in.readObject().asInstanceOf[ru.TypeTag[_]].in(cm)
+ in.close()
+ fin.close()
+
+ println(retag)
+ } catch {
+ case ex: Exception =>
+ println(ex)
+ }
+
+ test(implicitly[TypeTag[Int]])
+ test(implicitly[TypeTag[String]])
+}
\ No newline at end of file
diff --git a/test/files/jvm/bug680.check b/test/files/run/typetags_without_scala_reflect_manifest_lookup.check
similarity index 100%
copy from test/files/jvm/bug680.check
copy to test/files/run/typetags_without_scala_reflect_manifest_lookup.check
diff --git a/test/files/run/typetags_without_scala_reflect_manifest_lookup.scala b/test/files/run/typetags_without_scala_reflect_manifest_lookup.scala
new file mode 100644
index 0000000..6fd3d2d
--- /dev/null
+++ b/test/files/run/typetags_without_scala_reflect_manifest_lookup.scala
@@ -0,0 +1,29 @@
+import scala.tools.partest._
+import scala.tools.nsc.Settings
+
+object Test extends DirectTest {
+ override def extraSettings = "-cp " + sys.props("partest.lib") + " -d \"" + testOutput.path + "\""
+
+ def code = """
+ object Test extends App {
+ // manifest lookup also involves type tag lookup
+ // because we support manifest <-> typetag convertability
+ //
+ // however when scala-reflect.jar (the home of type tags) is not on the classpath
+ // we need to omit the type tag lookup, because we lack the necessary symbols
+ // to do implicit search and tag materialization
+ // (such missing symbols are e.g. ApiUniverseClass and TypeTagsClass)
+ //
+ // the test case you're looking at checks exactly this
+ // we establish a classpath that only includes scala-library.jar
+ // and then force scalac to perform implicit search for a manifest
+ // if type tag lookup is not disabled, the compiler will crash
+ // if it is disabled, then the compilation will succeed
+ // http://groups.google.com/group/scala-internals/browse_thread/thread/166ce4b71b7c46bb
+ def foo[T: Manifest] = ()
+ foo[List[Int]]
+ }
+ """
+
+ def show = compile()
+}
\ No newline at end of file
diff --git a/test/files/run/typetags_without_scala_reflect_typetag_lookup.check b/test/files/run/typetags_without_scala_reflect_typetag_lookup.check
new file mode 100644
index 0000000..84e5435
--- /dev/null
+++ b/test/files/run/typetags_without_scala_reflect_typetag_lookup.check
@@ -0,0 +1,2 @@
+
+pos: source-newSource1.scala,line-9,offset=466 could not find implicit value for evidence parameter of type reflect.runtime.package.universe.TypeTag[Int] ERROR
diff --git a/test/files/run/typetags_without_scala_reflect_typetag_lookup.scala b/test/files/run/typetags_without_scala_reflect_typetag_lookup.scala
new file mode 100644
index 0000000..1fbdc62
--- /dev/null
+++ b/test/files/run/typetags_without_scala_reflect_typetag_lookup.scala
@@ -0,0 +1,43 @@
+import scala.tools.partest._
+
+object Test extends StoreReporterDirectTest {
+ def code = ???
+
+ def library = """
+ import scala.reflect.runtime.universe._
+
+ object Library {
+ def foo[T: TypeTag] = ()
+ }
+ """
+ def compileLibrary() = {
+ val classpath = List(sys.props("partest.lib"), sys.props("partest.reflect")) mkString sys.props("path.separator")
+ compileString(newCompiler("-cp", classpath, "-d", testOutput.path))(library)
+ }
+
+ def app = """
+ object Test extends App {
+ // tries to materialize a type tag not having scala-reflect.jar on the classpath
+ // even though it's easy to materialize a type tag of Int, this line will fail
+ // because materialization involves classes from scala-reflect.jar
+ //
+ // in this test we make sure that the compiler doesn't crash
+ // but just displays several missing class file errors and an unavailable implicit message
+ Library.foo[Int]
+ }
+ """
+ def compileApp() = {
+ val classpath = List(sys.props("partest.lib"), testOutput.path) mkString sys.props("path.separator")
+ compileString(newCompiler("-cp", classpath, "-d", testOutput.path))(app)
+ }
+
+ def show(): Unit = {
+ compileLibrary();
+ println(filteredInfos.mkString("\n"))
+ storeReporter.infos.clear()
+ compileApp();
+ // we should get bad symbolic reference errors, because we're trying to use an implicit that can't be unpickled
+ // but we don't know the number of these errors and their order, so I just ignore them all
+ println(filteredInfos.filterNot(_.msg.contains("bad symbolic reference")).mkString("\n"))
+ }
+}
diff --git a/test/files/run/typetags_without_scala_reflect_typetag_manifest_interop.check b/test/files/run/typetags_without_scala_reflect_typetag_manifest_interop.check
new file mode 100644
index 0000000..8c9d07d
--- /dev/null
+++ b/test/files/run/typetags_without_scala_reflect_typetag_manifest_interop.check
@@ -0,0 +1,2 @@
+
+pos: source-newSource1.scala,line-9,offset=479 No Manifest available for App.this.T. ERROR
diff --git a/test/files/run/typetags_without_scala_reflect_typetag_manifest_interop.scala b/test/files/run/typetags_without_scala_reflect_typetag_manifest_interop.scala
new file mode 100644
index 0000000..6804baa
--- /dev/null
+++ b/test/files/run/typetags_without_scala_reflect_typetag_manifest_interop.scala
@@ -0,0 +1,47 @@
+import scala.tools.partest._
+import scala.tools.nsc.Settings
+
+object Test extends StoreReporterDirectTest {
+ def code = ???
+
+ def library = """
+ import scala.reflect.runtime.universe._
+
+ trait Library {
+ type T
+ implicit val tt: TypeTag[T]
+ }
+ """
+ def compileLibrary() = {
+ val classpath = List(sys.props("partest.lib"), sys.props("partest.reflect")) mkString sys.props("path.separator")
+ compileString(newCompiler("-cp", classpath, "-d", testOutput.path))(library)
+ }
+
+ def app = """
+ trait App extends Library {
+ // tries to create a manifest from a type tag without having scala-reflect.jar on the classpath
+ // even though it's possible to convert a type tag into a manifest, this will fail
+ // because conversion requires classes from scala-reflect.jar
+ //
+ // in this test we make sure that the compiler doesn't crash
+ // but just displays several missing class file errors and an unavailable implicit message
+ manifest[T]
+ }
+ """
+ def compileApp() = {
+ val classpath = List(sys.props("partest.lib"), testOutput.path) mkString sys.props("path.separator")
+ val global = newCompiler("-cp", classpath, "-d", testOutput.path)
+ compileString(newCompiler("-cp", classpath, "-d", testOutput.path))(app)
+ //global.reporter.ERROR.foreach(println)
+ }
+
+ def show(): Unit = {
+ compileLibrary();
+ println(filteredInfos.mkString("\n"))
+ storeReporter.infos.clear()
+ compileApp();
+ // we should get bad symbolic reference errors, because we're trying to use an implicit that can't be unpickled
+ // but we don't know the number of these errors and their order, so I just ignore them all
+ println(filteredInfos.filterNot (_.msg.contains("bad symbolic reference")).mkString("\n"))
+ }
+}
diff --git a/test/files/run/unapply.scala b/test/files/run/unapply.scala
index 9b60681..90dd4fa 100644
--- a/test/files/run/unapply.scala
+++ b/test/files/run/unapply.scala
@@ -1,12 +1,11 @@
-import scala.testing.SUnit._
-
-object Test extends TestConsoleMain {
- def suite = new TestSuite(
- Foo,
- Mas,
- LisSeqArr,
- StreamFoo
- )
+object Test {
+ def main(args: Array[String]) {
+ Foo.run()
+ Mas.run()
+ LisSeqArr.run()
+ StreamFoo.run()
+ Test1256.run()
+ }
}
// this class is used for representation
@@ -31,9 +30,10 @@ object FaaPreciseSome {
object VarFoo {
def unapply(a : Int)(implicit b : Int) : Option[Int] = Some(a + b)
}
-object Foo extends TestCase("Foo") with Assert {
+
+object Foo {
def unapply(x: Any): Option[Product2[Int, String]] = x match {
- case y: Bar => Some(Tuple(y.size, y.name))
+ case y: Bar => Some(y.size, y.name)
case _ => None
}
def doMatch1(b:Bar) = b match {
@@ -51,25 +51,25 @@ object Foo extends TestCase("Foo") with Assert {
def doMatch5(b:Bar) = (b:Any) match {
case FaaPreciseSome(n:String) => n
}
- override def runTest {
+ def run() {
val b = new Bar
- assertEquals(doMatch1(b),(50,"medium"))
- assertEquals(doMatch2(b),null)
- assertEquals(doMatch3(b),"medium")
- assertEquals(doMatch4(b),"medium")
- assertEquals(doMatch5(b),"medium")
+ assert(doMatch1(b) == (50,"medium"))
+ assert(doMatch2(b) == null)
+ assert(doMatch3(b) == "medium")
+ assert(doMatch4(b) == "medium")
+ assert(doMatch5(b) == "medium")
implicit val bc: Int = 3
- assertEquals(4 match {
+ assert(7 == (4 match {
case VarFoo(x) => x
- }, 7)
+ }))
}
}
// same, but now object is not top-level
-object Mas extends TestCase("Mas") with Assert {
+object Mas {
object Gaz {
def unapply(x: Any): Option[Product2[Int, String]] = x match {
- case y: Baz => Some(Tuple(y.size, y.name))
+ case y: Baz => Some(y.size, y.name)
case _ => None
}
}
@@ -77,57 +77,46 @@ object Mas extends TestCase("Mas") with Assert {
var size: Int = 60
var name: String = "too large"
}
- def runTest {
+ def run() {
val b = new Baz
- assertEquals(b match {
+ assert((60,"too large") == (b match {
case Gaz(s:Int, n:String) => (s,n)
- }, (60,"too large"))
+ }))
}
}
-object LisSeqArr extends TestCase("LisSeqArr") with Assert {
-// def foo[A](x:List[A]) {}
- def runTest {
- assertEquals((List(1,2,3): Any) match { case List(x,y,_*) => (x,y)}, (1,2))
- assertEquals((List(1,2,3): Any) match { case Seq(x,y,_*) => (x,y)}, (1,2))
- //assertEquals((Array(1,2,3): Any) match { case Seq(x,y,_*) => (x,y)}, (1,2))
- //assertEquals((Array(1,2,3): Any) match { case Array(x,y,_*) => {x,y}}, {1,2})
-
- // just compile, feature request #1196
-// (List(1,2,3): Any) match {
-// case a @ List(x,y,_*) => foo(a)
-// }
-
+object LisSeqArr {
+ def run() {
+ assert((1,2) == ((List(1,2,3): Any) match { case List(x,y,_*) => (x,y)}))
+ assert((1,2) == ((List(1,2,3): Any) match { case Seq(x,y,_*) => (x,y)}))
}
}
-
-object StreamFoo extends TestCase("unapply for Streams") with Assert {
- //val x:Stream[Int] = Stream.cons(1,x)
-
+object StreamFoo {
def sum(stream: Stream[Int]): Int =
stream match {
case Stream.Empty => 0
case Stream.cons(hd, tl) => hd + sum(tl)
}
- override def runTest {
+ def run() {
val str: Stream[Int] = List(1,2,3).toStream
- assertEquals(sum(str), 6)
+ assert(6 == sum(str))
}
}
-object Test1256 extends TestCase("1256") {
+object Test1256 {
class Sync {
def unapply(scrut: Any): Boolean = false
}
class Buffer {
val Get = new Sync
-
val jp: PartialFunction[Any, Any] = {
case Get() =>
}
}
- override def runTest { assertFalse((new Buffer).jp.isDefinedAt(42)) }
+ def run() {
+ assert(!(new Buffer).jp.isDefinedAt(42))
+ }
}
diff --git a/test/files/run/unittest_collection.scala b/test/files/run/unittest_collection.scala
index dd95540..3639b49 100644
--- a/test/files/run/unittest_collection.scala
+++ b/test/files/run/unittest_collection.scala
@@ -1,103 +1,58 @@
-
object Test {
- import scala.testing.SUnit._
import scala.collection.mutable.{ArrayBuffer, Buffer, BufferProxy, ListBuffer}
- trait BufferTest extends Assert {
- def doTest(x:Buffer[String]) = {
- // testing method +=
- x += "one"
- assertEquals("retrieving 'one'", x(0), "one")
- assertEquals("length A ", x.length, 1)
- x += "two"
- assertEquals("retrieving 'two'", x(1), "two")
- assertEquals("length B ", x.length, 2)
-
- // testing method -= (removing last element)
- x -= "two"
-
- assertEquals("length C ", x.length, 1)
-
- try { x(1); fail("no exception for removed element") }
- catch { case i:IndexOutOfBoundsException => }
-
- try { x.remove(1); fail("no exception for removed element") }
- catch { case i:IndexOutOfBoundsException => }
-
- x += "two2"
- assertEquals("length D ", x.length, 2)
-
- // removing first element
- x.remove(0)
- assertEquals("length E ", x.length, 1)
-
- // toList
- assertEquals("toList ", x.toList, List("two2"))
-
- // clear
- x.clear
- assertEquals("length F ", x.length, 0)
-
- // copyToBuffer
- x += "a"
- x += "b"
- val dest = new ArrayBuffer[String]
- x copyToBuffer dest
- assertEquals("dest", List("a", "b"), dest.toList)
- assertEquals("source", List("a", "b"), x.toList)
+ def main(args: Array[String]) {
+ test(collection.mutable.ArrayBuffer[String]())
+ test(collection.mutable.ListBuffer[String]())
+ class BBuf(z:ListBuffer[String]) extends BufferProxy[String] {
+ def self = z
}
+ test(new BBuf(collection.mutable.ListBuffer[String]()))
}
- class TArrayBuffer extends TestCase("collection.mutable.ArrayBuffer") with Assert with BufferTest {
-
- var x: ArrayBuffer[String] = _
-
- override def runTest = { setUp; doTest(x); tearDown }
-
- override def setUp = { x = new scala.collection.mutable.ArrayBuffer }
-
- override def tearDown = { x.clear; x = null }
- }
-
- class TListBuffer extends TestCase("collection.mutable.ListBuffer") with Assert with BufferTest {
-
- var x: ListBuffer[String] = _
-
- override def runTest = { setUp; doTest(x); tearDown }
-
- override def setUp = { x = new scala.collection.mutable.ListBuffer }
+ def test(x: Buffer[String]) {
+ // testing method +=
+ x += "one"
+ assert(x(0) == "one", "retrieving 'one'")
+ assert(x.length == 1, "length A")
+ x += "two"
+ assert(x(1) == "two", "retrieving 'two'")
+ assert(x.length == 2, "length B")
- override def tearDown = { x.clear; x = null }
+ // testing method -= (removing last element)
+ x -= "two"
- }
+ assert(x.length == 1, "length C")
- class TBufferProxy extends TestCase("collection.mutable.BufferProxy") with Assert with BufferTest {
+ try { x(1); sys.error("no exception for removed element") }
+ catch { case i:IndexOutOfBoundsException => }
- class BBuf(z:ListBuffer[String]) extends BufferProxy[String] {
- def self = z
- }
+ try { x.remove(1); sys.error("no exception for removed element") }
+ catch { case i:IndexOutOfBoundsException => }
- var x: BufferProxy[String] = _
+ x += "two2"
+ assert(x.length == 2, "length D")
- override def runTest = { setUp; doTest(x); tearDown }
+ // removing first element
+ x.remove(0)
+ assert(x.length == 1, "length E")
- override def setUp = { x = new BBuf(new scala.collection.mutable.ListBuffer) }
+ // toList
+ assert(x.toList == List("two2"), "toList")
- override def tearDown = { x.clear; x = null }
+ // clear
+ x.clear()
+ assert(x.length == 0, "length 0")
+ assert(x.isEmpty, "isEmpty")
+ // copyToBuffer
+ x += "a"
+ x += "b"
+ val dest = new ArrayBuffer[String]
+ x.copyToBuffer(dest)
+ assert(List("a", "b") == dest.toList, "dest")
+ assert(List("a", "b") == x.toList, "source")
}
- def main(args:Array[String]) = {
- val ts = new TestSuite(
- //new TArrayBuffer,
- new TListBuffer//,
- //new TBufferProxy
- )
- val tr = new TestResult()
- ts.run(tr)
- for (failure <- tr.failures) {
- Console.println(failure)
- }
- }
}
diff --git a/test/files/run/unittest_io.scala b/test/files/run/unittest_io.scala
index c2d95a3..2cadb9b 100644
--- a/test/files/run/unittest_io.scala
+++ b/test/files/run/unittest_io.scala
@@ -1,42 +1,40 @@
-import testing.SUnit._
+object Test {
-object Test extends TestConsoleMain {
- def suite = new TestSuite(new UTF8Tests, new SourceTest)
+ def main(args: Array[String]) {
+ UTF8Tests.run()
+ SourceTest.run()
+ }
- class UTF8Tests extends TestCase("UTF8Codec") {
- import io.UTF8Codec.encode
-
- def runTest {
- assertEquals(new String( encode(0x004D), "utf8"), new String(Array(0x004D.asInstanceOf[Char])))
- assertEquals(new String( encode(0x0430), "utf8"), new String(Array(0x0430.asInstanceOf[Char])))
- assertEquals(new String( encode(0x4E8C), "utf8"), new String(Array(0x4E8C.asInstanceOf[Char])))
- assertEquals(new String(encode(0x10302), "utf8"), new String(Array(0xD800.asInstanceOf[Char],
- 0xDF02.asInstanceOf[Char])))
+ object UTF8Tests {
+ def decode(ch: Int) = new String(Array(ch), 0, 1).getBytes("UTF-8")
+ def run() {
+ assert(new String( decode(0x004D), "utf8") == new String(Array(0x004D.asInstanceOf[Char])))
+ assert(new String( decode(0x0430), "utf8") == new String(Array(0x0430.asInstanceOf[Char])))
+ assert(new String( decode(0x4E8C), "utf8") == new String(Array(0x4E8C.asInstanceOf[Char])))
+ assert(new String(decode(0x10302), "utf8") == new String(Array(0xD800.asInstanceOf[Char],
+ 0xDF02.asInstanceOf[Char])))
// a client
val test = "{\"a\":\"\\u0022\"}"
- val Expected = ("a","\"")
- assertTrue(scala.util.parsing.json.JSON.parse(test) match {
- case Some(List(Expected)) => true
- case z => Console.println(z); false
- })
+ val expected = "a" -> "\""
+
+ val parsed = scala.util.parsing.json.JSON.parseFull(test)
+ val result = parsed == Some(Map(expected))
+ if(result)
+ assert(result)
+ else {
+ Console.println(parsed); assert(result)
+ }
}
}
- class SourceTest extends TestCase("Source") {
- def runTest {
- val s = "Here is a test string"
+ object SourceTest {
+ def run() {
+ val s = "Here is a test string"
val f = io.Source.fromBytes(s.getBytes("utf-8"))
val b = new collection.mutable.ArrayBuffer[Char]()
f.copyToBuffer(b)
- assertEquals(s, new String(b.toArray))
-
- /* todo: same factories for BufferedSource and Source
- val g = io.BufferedSource.fromBytes(s.getBytes("utf-8"))
- val c = new collection.mutable.ArrayBuffer[Char]()
- g.copyToBuffer(c)
- assertEquals(s, new String(c.toArray))
- */
+ assert(s == new String(b.toArray))
}
}
}
diff --git a/test/files/run/valueclasses-classmanifest-basic.check b/test/files/run/valueclasses-classmanifest-basic.check
new file mode 100644
index 0000000..bc56c4d
--- /dev/null
+++ b/test/files/run/valueclasses-classmanifest-basic.check
@@ -0,0 +1 @@
+Foo
diff --git a/test/files/run/valueclasses-classmanifest-basic.scala b/test/files/run/valueclasses-classmanifest-basic.scala
new file mode 100644
index 0000000..c2aa08e
--- /dev/null
+++ b/test/files/run/valueclasses-classmanifest-basic.scala
@@ -0,0 +1,5 @@
+class Foo(val x: Int) extends AnyVal
+
+object Test extends App {
+ println(classManifest[Foo])
+}
\ No newline at end of file
diff --git a/test/files/run/valueclasses-classmanifest-existential.check b/test/files/run/valueclasses-classmanifest-existential.check
new file mode 100644
index 0000000..4577aac
--- /dev/null
+++ b/test/files/run/valueclasses-classmanifest-existential.check
@@ -0,0 +1 @@
+Foo[<?>]
diff --git a/test/files/run/valueclasses-classmanifest-existential.scala b/test/files/run/valueclasses-classmanifest-existential.scala
new file mode 100644
index 0000000..11999df
--- /dev/null
+++ b/test/files/run/valueclasses-classmanifest-existential.scala
@@ -0,0 +1,5 @@
+class Foo[T](val x: T) extends AnyVal
+
+object Test extends App {
+ println(classManifest[Foo[_]])
+}
\ No newline at end of file
diff --git a/test/files/run/valueclasses-classmanifest-generic.check b/test/files/run/valueclasses-classmanifest-generic.check
new file mode 100644
index 0000000..c6be42d
--- /dev/null
+++ b/test/files/run/valueclasses-classmanifest-generic.check
@@ -0,0 +1 @@
+Foo[java.lang.String]
diff --git a/test/files/run/valueclasses-classmanifest-generic.scala b/test/files/run/valueclasses-classmanifest-generic.scala
new file mode 100644
index 0000000..280152d
--- /dev/null
+++ b/test/files/run/valueclasses-classmanifest-generic.scala
@@ -0,0 +1,5 @@
+class Foo[T](val x: T) extends AnyVal
+
+object Test extends App {
+ println(classManifest[Foo[String]])
+}
\ No newline at end of file
diff --git a/test/files/run/valueclasses-classtag-basic.check b/test/files/run/valueclasses-classtag-basic.check
new file mode 100644
index 0000000..bc56c4d
--- /dev/null
+++ b/test/files/run/valueclasses-classtag-basic.check
@@ -0,0 +1 @@
+Foo
diff --git a/test/files/run/valueclasses-classtag-basic.scala b/test/files/run/valueclasses-classtag-basic.scala
new file mode 100644
index 0000000..912a4bb
--- /dev/null
+++ b/test/files/run/valueclasses-classtag-basic.scala
@@ -0,0 +1,5 @@
+class Foo(val x: Int) extends AnyVal
+
+object Test extends App {
+ println(scala.reflect.classTag[Foo])
+}
\ No newline at end of file
diff --git a/test/files/run/valueclasses-classtag-existential.check b/test/files/run/valueclasses-classtag-existential.check
new file mode 100644
index 0000000..9e2b9e1
--- /dev/null
+++ b/test/files/run/valueclasses-classtag-existential.check
@@ -0,0 +1 @@
+Object
diff --git a/test/files/run/valueclasses-classtag-existential.scala b/test/files/run/valueclasses-classtag-existential.scala
new file mode 100644
index 0000000..e0db9cd
--- /dev/null
+++ b/test/files/run/valueclasses-classtag-existential.scala
@@ -0,0 +1,5 @@
+class Foo[T](val x: T) extends AnyVal
+
+object Test extends App {
+ println(scala.reflect.classTag[Foo[_]])
+}
\ No newline at end of file
diff --git a/test/files/run/valueclasses-classtag-generic.check b/test/files/run/valueclasses-classtag-generic.check
new file mode 100644
index 0000000..bc56c4d
--- /dev/null
+++ b/test/files/run/valueclasses-classtag-generic.check
@@ -0,0 +1 @@
+Foo
diff --git a/test/files/run/valueclasses-classtag-generic.scala b/test/files/run/valueclasses-classtag-generic.scala
new file mode 100644
index 0000000..bd1f213
--- /dev/null
+++ b/test/files/run/valueclasses-classtag-generic.scala
@@ -0,0 +1,5 @@
+class Foo[T](val x: T) extends AnyVal
+
+object Test extends App {
+ println(scala.reflect.classTag[Foo[String]])
+}
\ No newline at end of file
diff --git a/test/files/run/valueclasses-constr.check b/test/files/run/valueclasses-constr.check
new file mode 100644
index 0000000..785e6fa
--- /dev/null
+++ b/test/files/run/valueclasses-constr.check
@@ -0,0 +1,10 @@
+16
+00:16:40
+16
+00:16:40
+16
+00:16:40
+16
+00:16:40
+16
+00:16:40
diff --git a/test/files/run/valueclasses-constr.scala b/test/files/run/valueclasses-constr.scala
new file mode 100644
index 0000000..652d8d8
--- /dev/null
+++ b/test/files/run/valueclasses-constr.scala
@@ -0,0 +1,79 @@
+package test1 {
+ object TOD {
+ final val SecondsPerDay = 86400
+
+ def apply(seconds: Int) = {
+ val n = seconds % SecondsPerDay
+ new TOD(if (n >= 0) n else n + SecondsPerDay)
+ }
+ }
+
+ final class TOD (val secondsOfDay: Int) extends AnyVal {
+ def hours = secondsOfDay / 3600
+ def minutes = (secondsOfDay / 60) % 60
+ def seconds = secondsOfDay % 60
+
+ override def toString = "%02d:%02d:%02d".format(hours, minutes, seconds)
+ }
+}
+package test2 {
+ object TOD {
+ final val SecondsPerDay = 86400
+
+ def apply(seconds: Int) = {
+ val n = seconds % SecondsPerDay
+ new TOD(if (n >= 0) n else n + SecondsPerDay)
+ }
+ }
+
+ final class TOD private[test2] (val secondsOfDay: Int) extends AnyVal {
+ def hours = secondsOfDay / 3600
+ def minutes = (secondsOfDay / 60) % 60
+ def seconds = secondsOfDay % 60
+
+ override def toString = "%02d:%02d:%02d".format(hours, minutes, seconds)
+ }
+
+ object Client {
+ def newTOD(x: Int) = new TOD(x)
+ }
+}
+
+package test3 {
+ object TOD {
+ final val SecondsPerDay = 86400
+
+ def apply(seconds: Int) = {
+ val n = seconds % SecondsPerDay
+ new TOD(if (n >= 0) n else n + SecondsPerDay)
+ }
+ }
+
+ final class TOD private (val secondsOfDay: Int) extends AnyVal {
+ def hours = secondsOfDay / 3600
+ def minutes = (secondsOfDay / 60) % 60
+ def seconds = secondsOfDay % 60
+
+ override def toString = "%02d:%02d:%02d".format(hours, minutes, seconds)
+ }
+}
+
+object Test extends App {
+
+ val y1: test1.TOD = new test1.TOD(1000)
+ val y2: test2.TOD = test2.Client.newTOD(1000)
+ val x1: test1.TOD = test1.TOD(1000)
+ val x2: test2.TOD = test2.TOD(1000)
+ val x3: test3.TOD = test3.TOD(1000)
+ println(y1.minutes)
+ println(y1)
+ println(y2.minutes)
+ println(y2)
+ println(x1.minutes)
+ println(x1)
+ println(x2.minutes)
+ println(x2)
+ println(x3.minutes)
+ println(x3)
+}
+
diff --git a/test/files/run/valueclasses-manifest-basic.check b/test/files/run/valueclasses-manifest-basic.check
new file mode 100644
index 0000000..bc56c4d
--- /dev/null
+++ b/test/files/run/valueclasses-manifest-basic.check
@@ -0,0 +1 @@
+Foo
diff --git a/test/files/run/valueclasses-manifest-basic.scala b/test/files/run/valueclasses-manifest-basic.scala
new file mode 100644
index 0000000..eefab20
--- /dev/null
+++ b/test/files/run/valueclasses-manifest-basic.scala
@@ -0,0 +1,5 @@
+class Foo(val x: Int) extends AnyVal
+
+object Test extends App {
+ println(manifest[Foo])
+}
\ No newline at end of file
diff --git a/test/files/run/valueclasses-manifest-existential.check b/test/files/run/valueclasses-manifest-existential.check
new file mode 100644
index 0000000..f91a575
--- /dev/null
+++ b/test/files/run/valueclasses-manifest-existential.check
@@ -0,0 +1 @@
+Foo[_ <: Any]
diff --git a/test/files/run/valueclasses-manifest-existential.scala b/test/files/run/valueclasses-manifest-existential.scala
new file mode 100644
index 0000000..47eb6d6
--- /dev/null
+++ b/test/files/run/valueclasses-manifest-existential.scala
@@ -0,0 +1,5 @@
+class Foo[T](val x: T) extends AnyVal
+
+object Test extends App {
+ println(manifest[Foo[_]])
+}
\ No newline at end of file
diff --git a/test/files/run/valueclasses-manifest-generic.check b/test/files/run/valueclasses-manifest-generic.check
new file mode 100644
index 0000000..c6be42d
--- /dev/null
+++ b/test/files/run/valueclasses-manifest-generic.check
@@ -0,0 +1 @@
+Foo[java.lang.String]
diff --git a/test/files/run/valueclasses-manifest-generic.scala b/test/files/run/valueclasses-manifest-generic.scala
new file mode 100644
index 0000000..18313fb
--- /dev/null
+++ b/test/files/run/valueclasses-manifest-generic.scala
@@ -0,0 +1,5 @@
+class Foo[T](val x: T) extends AnyVal
+
+object Test extends App {
+ println(manifest[Foo[String]])
+}
\ No newline at end of file
diff --git a/test/files/run/valueclasses-pavlov.check b/test/files/run/valueclasses-pavlov.check
new file mode 100644
index 0000000..b112e55
--- /dev/null
+++ b/test/files/run/valueclasses-pavlov.check
@@ -0,0 +1,2 @@
+box1: ok
+box2: ok
diff --git a/test/files/run/valueclasses-pavlov.scala b/test/files/run/valueclasses-pavlov.scala
new file mode 100644
index 0000000..e73897f
--- /dev/null
+++ b/test/files/run/valueclasses-pavlov.scala
@@ -0,0 +1,26 @@
+trait Foo extends Any {
+ def box1(x: Box1): String
+ def box2(x: Box2): String
+}
+
+class Box1(val value: String) extends AnyVal
+
+class Box2(val value: String) extends AnyVal with Foo {
+ def box1(x: Box1) = "box1: ok"
+ def box2(x: Box2) = "box2: ok"
+}
+
+class C(x: String) {
+ def this() = this("")
+}
+
+object Test {
+
+ def main(args: Array[String]) {
+ val b1 = new Box1("")
+ val b2 = new Box2("")
+ val f: Foo = b2
+ println(f.box1(b1))
+ println(f.box2(b2))
+ }
+}
diff --git a/test/files/run/valueclasses-typetag-basic.check b/test/files/run/valueclasses-typetag-basic.check
new file mode 100644
index 0000000..bc56c4d
--- /dev/null
+++ b/test/files/run/valueclasses-typetag-basic.check
@@ -0,0 +1 @@
+Foo
diff --git a/test/files/run/valueclasses-typetag-basic.scala b/test/files/run/valueclasses-typetag-basic.scala
new file mode 100644
index 0000000..d0243f7
--- /dev/null
+++ b/test/files/run/valueclasses-typetag-basic.scala
@@ -0,0 +1,5 @@
+class Foo(val x: Int) extends AnyVal
+
+object Test extends App {
+ println(scala.reflect.runtime.universe.typeOf[Foo])
+}
\ No newline at end of file
diff --git a/test/files/run/valueclasses-typetag-existential.check b/test/files/run/valueclasses-typetag-existential.check
new file mode 100644
index 0000000..d166a13
--- /dev/null
+++ b/test/files/run/valueclasses-typetag-existential.check
@@ -0,0 +1 @@
+Foo[_]
diff --git a/test/files/run/valueclasses-typetag-existential.scala b/test/files/run/valueclasses-typetag-existential.scala
new file mode 100644
index 0000000..4cdaa44
--- /dev/null
+++ b/test/files/run/valueclasses-typetag-existential.scala
@@ -0,0 +1,5 @@
+class Foo[T](val x: T) extends AnyVal
+
+object Test extends App {
+ println(scala.reflect.runtime.universe.typeOf[Foo[_]])
+}
\ No newline at end of file
diff --git a/test/files/run/valueclasses-typetag-generic.check b/test/files/run/valueclasses-typetag-generic.check
new file mode 100644
index 0000000..534d1b3
--- /dev/null
+++ b/test/files/run/valueclasses-typetag-generic.check
@@ -0,0 +1 @@
+Foo[String]
diff --git a/test/files/run/valueclasses-typetag-generic.scala b/test/files/run/valueclasses-typetag-generic.scala
new file mode 100644
index 0000000..eb32dfc
--- /dev/null
+++ b/test/files/run/valueclasses-typetag-generic.scala
@@ -0,0 +1,5 @@
+class Foo[T](val x: T) extends AnyVal
+
+object Test extends App {
+ println(scala.reflect.runtime.universe.typeOf[Foo[String]])
+}
\ No newline at end of file
diff --git a/test/files/run/viewtest.scala b/test/files/run/viewtest.scala
old mode 100644
new mode 100755
diff --git a/test/files/run/virtpatmat_alts.check b/test/files/run/virtpatmat_alts.check
new file mode 100644
index 0000000..7a4ad0a
--- /dev/null
+++ b/test/files/run/virtpatmat_alts.check
@@ -0,0 +1 @@
+OK 5
diff --git a/test/files/run/virtpatmat_alts.flags b/test/files/run/virtpatmat_alts.flags
new file mode 100644
index 0000000..3f5a310
--- /dev/null
+++ b/test/files/run/virtpatmat_alts.flags
@@ -0,0 +1 @@
+ -Xexperimental
diff --git a/test/files/run/virtpatmat_alts.scala b/test/files/run/virtpatmat_alts.scala
new file mode 100644
index 0000000..b771752
--- /dev/null
+++ b/test/files/run/virtpatmat_alts.scala
@@ -0,0 +1,12 @@
+object Test extends App {
+ (true, true) match {
+ case (true, true) | (false, false) => 1
+ }
+
+ List(5) match {
+ case 1 :: Nil | 2 :: Nil => println("FAILED")
+ case (x@(4 | 5 | 6)) :: Nil => println("OK "+ x)
+ case 7 :: Nil => println("FAILED")
+ case Nil => println("FAILED")
+ }
+}
\ No newline at end of file
diff --git a/test/files/run/virtpatmat_apply.check b/test/files/run/virtpatmat_apply.check
new file mode 100644
index 0000000..e8e3b29
--- /dev/null
+++ b/test/files/run/virtpatmat_apply.check
@@ -0,0 +1 @@
+OK 2
diff --git a/test/files/run/virtpatmat_apply.flags b/test/files/run/virtpatmat_apply.flags
new file mode 100644
index 0000000..3f5a310
--- /dev/null
+++ b/test/files/run/virtpatmat_apply.flags
@@ -0,0 +1 @@
+ -Xexperimental
diff --git a/test/files/run/virtpatmat_apply.scala b/test/files/run/virtpatmat_apply.scala
new file mode 100644
index 0000000..34cb80e
--- /dev/null
+++ b/test/files/run/virtpatmat_apply.scala
@@ -0,0 +1,7 @@
+object Test extends App {
+ List(1, 2, 3) match {
+ case Nil => println("FAIL")
+ case x :: y :: xs if xs.length == 2 => println("FAIL")
+ case x :: y :: xs if xs.length == 1 => println("OK "+ y)
+ }
+}
\ No newline at end of file
diff --git a/test/files/run/virtpatmat_casting.check b/test/files/run/virtpatmat_casting.check
new file mode 100644
index 0000000..b11425e
--- /dev/null
+++ b/test/files/run/virtpatmat_casting.check
@@ -0,0 +1 @@
+List(1)
diff --git a/test/files/run/virtpatmat_casting.flags b/test/files/run/virtpatmat_casting.flags
new file mode 100644
index 0000000..3f5a310
--- /dev/null
+++ b/test/files/run/virtpatmat_casting.flags
@@ -0,0 +1 @@
+ -Xexperimental
diff --git a/test/files/run/virtpatmat_casting.scala b/test/files/run/virtpatmat_casting.scala
new file mode 100644
index 0000000..d970aba
--- /dev/null
+++ b/test/files/run/virtpatmat_casting.scala
@@ -0,0 +1,8 @@
+object Test extends App {
+ println(List(1,2,3) match {
+ case Nil => List(0)
+// since the :: extractor's argument must be a ::, there has to be a cast before its unapply is invoked
+ case x :: y :: z :: a :: xs => xs ++ List(x)
+ case x :: y :: z :: xs => xs ++ List(x)
+ })
+}
diff --git a/test/files/run/virtpatmat_extends_product.check b/test/files/run/virtpatmat_extends_product.check
new file mode 100644
index 0000000..c07e838
--- /dev/null
+++ b/test/files/run/virtpatmat_extends_product.check
@@ -0,0 +1 @@
+AnnotationInfo(a,1)
diff --git a/test/files/run/virtpatmat_extends_product.flags b/test/files/run/virtpatmat_extends_product.flags
new file mode 100644
index 0000000..8b13789
--- /dev/null
+++ b/test/files/run/virtpatmat_extends_product.flags
@@ -0,0 +1 @@
+
diff --git a/test/files/run/virtpatmat_extends_product.scala b/test/files/run/virtpatmat_extends_product.scala
new file mode 100644
index 0000000..4b4bc63
--- /dev/null
+++ b/test/files/run/virtpatmat_extends_product.scala
@@ -0,0 +1,14 @@
+object Test extends App {
+ case class AnnotationInfo(a: String, b: Int) extends Product2[String, Int] {
+ def _1 = a
+ def _2 = b
+ }
+
+ // if we're not careful in unapplyTypeListFromReturnType, the generated unapply is
+ // thought to return two components instead of one, since AnnotationInfo (the result of the unapply) is a Product2
+ case class NestedAnnotArg(ai: AnnotationInfo)
+
+ NestedAnnotArg(AnnotationInfo("a", 1)) match {
+ case NestedAnnotArg(x) => println(x)
+ }
+}
diff --git a/test/files/run/virtpatmat_literal.check b/test/files/run/virtpatmat_literal.check
new file mode 100644
index 0000000..0eabe36
--- /dev/null
+++ b/test/files/run/virtpatmat_literal.check
@@ -0,0 +1,3 @@
+OK
+OK
+OK
diff --git a/test/files/run/virtpatmat_literal.flags b/test/files/run/virtpatmat_literal.flags
new file mode 100644
index 0000000..3f5a310
--- /dev/null
+++ b/test/files/run/virtpatmat_literal.flags
@@ -0,0 +1 @@
+ -Xexperimental
diff --git a/test/files/run/virtpatmat_literal.scala b/test/files/run/virtpatmat_literal.scala
new file mode 100644
index 0000000..5bd6b30
--- /dev/null
+++ b/test/files/run/virtpatmat_literal.scala
@@ -0,0 +1,22 @@
+object Test extends App {
+ val a = 1
+ 1 match {
+ case 2 => println("FAILED")
+ case 1 => println("OK")
+ case `a` => println("FAILED")
+ }
+
+ val one = 1
+ 1 match {
+ case 2 => println("FAILED")
+ case `one` => println("OK")
+ case 1 => println("FAILED")
+ }
+
+ 1 match {
+ case 2 => println("FAILED")
+ case Test.one => println("OK")
+ case 1 => println("FAILED")
+ }
+
+}
\ No newline at end of file
diff --git a/test/files/run/virtpatmat_nested_lists.check b/test/files/run/virtpatmat_nested_lists.check
new file mode 100644
index 0000000..d8263ee
--- /dev/null
+++ b/test/files/run/virtpatmat_nested_lists.check
@@ -0,0 +1 @@
+2
\ No newline at end of file
diff --git a/test/files/run/virtpatmat_nested_lists.flags b/test/files/run/virtpatmat_nested_lists.flags
new file mode 100644
index 0000000..3f5a310
--- /dev/null
+++ b/test/files/run/virtpatmat_nested_lists.flags
@@ -0,0 +1 @@
+ -Xexperimental
diff --git a/test/files/run/virtpatmat_nested_lists.scala b/test/files/run/virtpatmat_nested_lists.scala
new file mode 100644
index 0000000..fef74ce
--- /dev/null
+++ b/test/files/run/virtpatmat_nested_lists.scala
@@ -0,0 +1,3 @@
+object Test extends App {
+ List(List(1), List(2)) match { case x :: (y :: Nil) :: Nil => println(y) }
+}
diff --git a/test/files/run/virtpatmat_npe.check b/test/files/run/virtpatmat_npe.check
new file mode 100644
index 0000000..a0aba93
--- /dev/null
+++ b/test/files/run/virtpatmat_npe.check
@@ -0,0 +1 @@
+OK
\ No newline at end of file
diff --git a/test/files/run/virtpatmat_npe.flags b/test/files/run/virtpatmat_npe.flags
new file mode 100644
index 0000000..3f5a310
--- /dev/null
+++ b/test/files/run/virtpatmat_npe.flags
@@ -0,0 +1 @@
+ -Xexperimental
diff --git a/test/files/run/virtpatmat_npe.scala b/test/files/run/virtpatmat_npe.scala
new file mode 100644
index 0000000..84a9276
--- /dev/null
+++ b/test/files/run/virtpatmat_npe.scala
@@ -0,0 +1,10 @@
+class C {
+ class D
+ val values = new Array[AnyRef](10)
+ values(0) match {
+ case name: D => println("NOK: "+ name) // the outer check on D's outer should not cause a NPE
+ case null => println("OK")
+ }
+}
+
+object Test extends C with App
\ No newline at end of file
diff --git a/test/files/run/virtpatmat_opt_sharing.check b/test/files/run/virtpatmat_opt_sharing.check
new file mode 100644
index 0000000..d00491f
--- /dev/null
+++ b/test/files/run/virtpatmat_opt_sharing.check
@@ -0,0 +1 @@
+1
diff --git a/test/files/run/virtpatmat_opt_sharing.flags b/test/files/run/virtpatmat_opt_sharing.flags
new file mode 100644
index 0000000..3f5a310
--- /dev/null
+++ b/test/files/run/virtpatmat_opt_sharing.flags
@@ -0,0 +1 @@
+ -Xexperimental
diff --git a/test/files/run/virtpatmat_opt_sharing.scala b/test/files/run/virtpatmat_opt_sharing.scala
new file mode 100644
index 0000000..119e305
--- /dev/null
+++ b/test/files/run/virtpatmat_opt_sharing.scala
@@ -0,0 +1,10 @@
+object Test extends App {
+ virtMatch()
+ def virtMatch() = {
+ List(1, 3, 4, 7) match {
+ case 1 :: 3 :: 4 :: 5 :: x => println("nope")
+ case 1 :: 3 :: 4 :: 6 :: x => println("nope")
+ case 1 :: 3 :: 4 :: 7 :: x => println(1)
+ }
+ }
+}
\ No newline at end of file
diff --git a/test/files/run/virtpatmat_partial.check b/test/files/run/virtpatmat_partial.check
new file mode 100644
index 0000000..137d16d
--- /dev/null
+++ b/test/files/run/virtpatmat_partial.check
@@ -0,0 +1,17 @@
+Map(a -> Some(1), b -> None)
+Map(a -> 1)
+a
+undefined
+a
+undefined
+a
+undefined
+a
+undefined
+hai!
+hai!
+2
+hai!
+undefined
+1
+undefined
diff --git a/test/files/run/virtpatmat_partial.flags b/test/files/run/virtpatmat_partial.flags
new file mode 100644
index 0000000..3f5a310
--- /dev/null
+++ b/test/files/run/virtpatmat_partial.flags
@@ -0,0 +1 @@
+ -Xexperimental
diff --git a/test/files/run/virtpatmat_partial.scala b/test/files/run/virtpatmat_partial.scala
new file mode 100644
index 0000000..a235314
--- /dev/null
+++ b/test/files/run/virtpatmat_partial.scala
@@ -0,0 +1,181 @@
+object Test extends App {
+ val a = Map("a" -> Some(1), "b" -> None)
+ println(a)
+
+// inferred type should be Map[String, Int]
+ val res = a collect {case (p, Some(a)) => (p, a)}
+
+// variations: const target -> switch, non-const -> normal match, char target --> scrut needs toInt,
+// eta-expanded --> work is done by typedFunction, non-eta-expanded --> typedMatch
+
+ object nonConstCharEta {
+ final val GT : Char = 'a'
+ final val GTGT : Char = 'b'
+ final val GTGTGT : Char = 'c'
+ final val GTEQ : Char = 'd'
+ final val GTGTEQ : Char = 'e'
+ final val GTGTGTEQ: Char = 'f'
+ final val ASSIGN : Char = 'g'
+
+ def acceptClosingAngle(in: Char) {
+ val closers: PartialFunction[Char, Char] = {
+ case GTGTGTEQ => GTGTEQ
+ case GTGTGT => GTGT
+ case GTGTEQ => GTEQ
+ case GTGT => GT
+ case GTEQ => ASSIGN
+ }
+ if (closers isDefinedAt in) println(closers(in))
+ else println("undefined")
+ }
+
+ def test() = {
+ acceptClosingAngle(GTGT)
+ acceptClosingAngle(ASSIGN)
+ }
+ }
+
+ object nonConstChar {
+ final val GT : Char = 'a'
+ final val GTGT : Char = 'b'
+ final val GTGTGT : Char = 'c'
+ final val GTEQ : Char = 'd'
+ final val GTGTEQ : Char = 'e'
+ final val GTGTGTEQ: Char = 'f'
+ final val ASSIGN : Char = 'g'
+
+ def acceptClosingAngle(in: Char) {
+ val closers: PartialFunction[Char, Char] = x => x match {
+ case GTGTGTEQ => GTGTEQ
+ case GTGTGT => GTGT
+ case GTGTEQ => GTEQ
+ case GTGT => GT
+ case GTEQ => ASSIGN
+ }
+ if (closers isDefinedAt in) println(closers(in))
+ else println("undefined")
+ }
+
+ def test() = {
+ acceptClosingAngle(GTGT)
+ acceptClosingAngle(ASSIGN)
+ }
+ }
+
+ object constCharEta {
+ final val GT = 'a'
+ final val GTGT = 'b'
+ final val GTGTGT = 'c'
+ final val GTEQ = 'd'
+ final val GTGTEQ = 'e'
+ final val GTGTGTEQ= 'f'
+ final val ASSIGN = 'g'
+
+ def acceptClosingAngle(in: Char) {
+ val closers: PartialFunction[Char, Char] = x => x match {
+ case GTGTGTEQ => GTGTEQ
+ case GTGTGT => GTGT
+ case GTGTEQ => GTEQ
+ case GTGT => GT
+ case GTEQ => ASSIGN
+ }
+ if (closers isDefinedAt in) println(closers(in))
+ else println("undefined")
+ }
+
+ def test() = {
+ acceptClosingAngle(GTGT)
+ acceptClosingAngle(ASSIGN)
+ }
+ }
+
+ object constChar {
+ final val GT = 'a'
+ final val GTGT = 'b'
+ final val GTGTGT = 'c'
+ final val GTEQ = 'd'
+ final val GTGTEQ = 'e'
+ final val GTGTGTEQ= 'f'
+ final val ASSIGN = 'g'
+
+ def acceptClosingAngle(in: Char) {
+ val closers: PartialFunction[Char, Char] = {
+ case GTGTGTEQ => GTGTEQ
+ case GTGTGT => GTGT
+ case GTGTEQ => GTEQ
+ case GTGT => GT
+ case GTEQ => ASSIGN
+ }
+ if (closers isDefinedAt in) println(closers(in))
+ else println("undefined")
+ }
+
+ def test() = {
+ acceptClosingAngle(GTGT)
+ acceptClosingAngle(ASSIGN)
+ }
+ }
+
+ object constIntEta {
+ final val GT = 1
+ final val GTGT = 2
+ final val GTGTGT = 3
+ final val GTEQ = 4
+ final val GTGTEQ = 5
+ final val GTGTGTEQ = 6
+ final val ASSIGN = 7
+
+ def acceptClosingAngle(in: Int) {
+ val closers: PartialFunction[Int, Int] = x => {println("hai!"); (x + 1)} match {
+ case GTGTGTEQ => GTGTEQ
+ case GTGTGT => GTGT
+ case GTGTEQ => GTEQ
+ case GTGT => GT
+ case GTEQ => ASSIGN
+ }
+ if (closers isDefinedAt in) println(closers(in))
+ else println("undefined")
+ }
+
+ def test() = {
+ acceptClosingAngle(GTGT)
+ acceptClosingAngle(ASSIGN)
+ }
+ }
+
+ object constInt {
+ final val GT = 1
+ final val GTGT = 2
+ final val GTGTGT = 3
+ final val GTEQ = 4
+ final val GTGTEQ = 5
+ final val GTGTGTEQ = 6
+ final val ASSIGN = 7
+
+ def acceptClosingAngle(in: Int) {
+ val closers: PartialFunction[Int, Int] = {
+ case GTGTGTEQ => GTGTEQ
+ case GTGTGT => GTGT
+ case GTGTEQ => GTEQ
+ case GTGT => GT
+ case GTEQ => ASSIGN
+ }
+ if (closers isDefinedAt in) println(closers(in))
+ else println("undefined")
+ }
+
+ def test() = {
+ acceptClosingAngle(GTGT)
+ acceptClosingAngle(ASSIGN)
+ }
+ }
+
+ println(res) // prints "Map(a -> 1)"
+
+ nonConstCharEta.test()
+ nonConstChar.test()
+ constCharEta.test()
+ constChar.test()
+ constIntEta.test()
+ constInt.test()
+}
diff --git a/test/files/run/virtpatmat_partial_backquoted.check b/test/files/run/virtpatmat_partial_backquoted.check
new file mode 100644
index 0000000..8ab8f29
--- /dev/null
+++ b/test/files/run/virtpatmat_partial_backquoted.check
@@ -0,0 +1 @@
+Set(You got me!)
diff --git a/test/files/run/virtpatmat_partial_backquoted.scala b/test/files/run/virtpatmat_partial_backquoted.scala
new file mode 100644
index 0000000..6d92229
--- /dev/null
+++ b/test/files/run/virtpatmat_partial_backquoted.scala
@@ -0,0 +1,12 @@
+object Test extends App {
+ class Region { override def toString = "You got me!" }
+ class SymbolType
+ case class SymbolInfo(tp: SymbolType, regions: List[Region], x: Any)
+
+ def findRegionsWithSymbolType(rawSymbolInfos: Seq[SymbolInfo], symbolType: SymbolType): Set[Region] =
+ rawSymbolInfos.collect { case SymbolInfo(`symbolType`, regions, _) => regions }.flatten.toSet
+
+ val stp = new SymbolType
+ val stp2 = new SymbolType
+ println(findRegionsWithSymbolType(List(SymbolInfo(stp2, List(), null), SymbolInfo(stp, List(new Region), null)), stp))
+}
\ No newline at end of file
diff --git a/test/files/run/virtpatmat_staging.check b/test/files/run/virtpatmat_staging.check
new file mode 100644
index 0000000..106ae40
--- /dev/null
+++ b/test/files/run/virtpatmat_staging.check
@@ -0,0 +1 @@
+runOrElse(7, ?guard(false,?).flatMap(? =>one(foo)).orElse(one(bar)))
diff --git a/test/files/run/virtpatmat_staging.flags b/test/files/run/virtpatmat_staging.flags
new file mode 100644
index 0000000..48fd867
--- /dev/null
+++ b/test/files/run/virtpatmat_staging.flags
@@ -0,0 +1 @@
+-Xexperimental
diff --git a/test/files/run/virtpatmat_staging.scala b/test/files/run/virtpatmat_staging.scala
new file mode 100644
index 0000000..c17b450
--- /dev/null
+++ b/test/files/run/virtpatmat_staging.scala
@@ -0,0 +1,52 @@
+trait Intf {
+ type Rep[+T]
+ type M[+T] = Rep[Maybe[T]]
+
+ val __match: Matcher
+ abstract class Matcher {
+ // runs the matcher on the given input
+ def runOrElse[T, U](in: Rep[T])(matcher: Rep[T] => M[U]): Rep[U]
+
+ def zero: M[Nothing]
+ def one[T](x: Rep[T]): M[T]
+ def guard[T](cond: Rep[Boolean], then: => Rep[T]): M[T]
+ def isSuccess[T, U](x: Rep[T])(f: Rep[T] => M[U]): Rep[Boolean] // used for isDefinedAt
+ }
+
+ abstract class Maybe[+A] {
+ def flatMap[B](f: Rep[A] => M[B]): M[B]
+ def orElse[B >: A](alternative: => M[B]): M[B]
+ }
+
+ implicit def proxyMaybe[A](m: M[A]): Maybe[A]
+ implicit def repInt(x: Int): Rep[Int]
+ implicit def repBoolean(x: Boolean): Rep[Boolean]
+ implicit def repString(x: String): Rep[String]
+
+ def test = 7 match { case 5 => "foo" case _ => "bar" }
+}
+
+trait Impl extends Intf {
+ type Rep[+T] = String
+
+ object __match extends Matcher {
+ def runOrElse[T, U](in: Rep[T])(matcher: Rep[T] => M[U]): Rep[U] = ("runOrElse("+ in +", ?" + matcher("?") + ")")
+ def zero: M[Nothing] = "zero"
+ def one[T](x: Rep[T]): M[T] = "one("+x.toString+")"
+ def guard[T](cond: Rep[Boolean], then: => Rep[T]): M[T] = "guard("+cond+","+then+")"
+ def isSuccess[T, U](x: Rep[T])(f: Rep[T] => M[U]): Rep[Boolean] = ("isSuccess("+x+", ?" + f("?") + ")")
+ }
+
+ implicit def proxyMaybe[A](m: M[A]): Maybe[A] = new Maybe[A] {
+ def flatMap[B](f: Rep[A] => M[B]): M[B] = m + ".flatMap(? =>"+ f("?") +")"
+ def orElse[B >: A](alternative: => M[B]): M[B] = m + ".orElse("+ alternative +")"
+ }
+
+ def repInt(x: Int): Rep[Int] = x.toString
+ def repBoolean(x: Boolean): Rep[Boolean] = x.toString
+ def repString(x: String): Rep[String] = x
+}
+
+object Test extends Impl with Intf with App {
+ println(test)
+}
diff --git a/test/files/run/virtpatmat_stringinterp.check b/test/files/run/virtpatmat_stringinterp.check
new file mode 100644
index 0000000..7927f4f
--- /dev/null
+++ b/test/files/run/virtpatmat_stringinterp.check
@@ -0,0 +1 @@
+Node(1)
diff --git a/test/files/pos/annotDepMethType.flags b/test/files/run/virtpatmat_stringinterp.flags
similarity index 100%
rename from test/files/pos/annotDepMethType.flags
rename to test/files/run/virtpatmat_stringinterp.flags
diff --git a/test/files/run/virtpatmat_stringinterp.scala b/test/files/run/virtpatmat_stringinterp.scala
new file mode 100644
index 0000000..213712f
--- /dev/null
+++ b/test/files/run/virtpatmat_stringinterp.scala
@@ -0,0 +1,13 @@
+object Test extends App {
+ case class Node(x: Int)
+
+ implicit def sc2xml(sc: StringContext): XMLContext = new XMLContext(sc)
+ class XMLContext(sc: StringContext) {
+ object xml {
+ def unapplySeq(xml: Node): Option[Seq[Node]] = Some(List(Node(1)))
+ }
+ }
+
+ val x: Node = Node(0)
+ x match { case xml"""<foo arg=$a/>""" => println(a) }
+}
\ No newline at end of file
diff --git a/test/files/run/virtpatmat_switch.check b/test/files/run/virtpatmat_switch.check
new file mode 100644
index 0000000..6ded95c
--- /dev/null
+++ b/test/files/run/virtpatmat_switch.check
@@ -0,0 +1,7 @@
+zero
+one
+many
+got a
+got b
+got some letter
+scala.MatchError: 5 (of class java.lang.Integer)
\ No newline at end of file
diff --git a/test/files/run/virtpatmat_switch.flags b/test/files/run/virtpatmat_switch.flags
new file mode 100644
index 0000000..3f5a310
--- /dev/null
+++ b/test/files/run/virtpatmat_switch.flags
@@ -0,0 +1 @@
+ -Xexperimental
diff --git a/test/files/run/virtpatmat_switch.scala b/test/files/run/virtpatmat_switch.scala
new file mode 100644
index 0000000..1329c19
--- /dev/null
+++ b/test/files/run/virtpatmat_switch.scala
@@ -0,0 +1,38 @@
+object Test extends App {
+ def intSwitch(x: Int) = x match {
+ case 0 => "zero"
+ case 1 => "one"
+ case _ => "many"
+ }
+
+ println(intSwitch(0))
+ println(intSwitch(1))
+ println(intSwitch(10))
+
+ def charSwitch(x: Char) = x match {
+ case 'a' => "got a"
+ case 'b' => "got b"
+ case _ => "got some letter"
+ }
+
+ def byteSwitch(x: Byte) = x match {
+ case 'a' => "got a"
+ case 'b' => "got b"
+ case _ => "got some letter"
+ }
+
+ println(charSwitch('a'))
+ println(byteSwitch('b'))
+ println(charSwitch('z'))
+
+ def implicitDefault(x: Int) = x match {
+ case 0 => 0
+ }
+
+ try {
+ implicitDefault(5)
+ } catch {
+ case e: MatchError => println(e)
+ }
+
+}
diff --git a/test/pending/jvm/t1464.check b/test/files/run/virtpatmat_tailcalls_verifyerror.check
similarity index 100%
rename from test/pending/jvm/t1464.check
rename to test/files/run/virtpatmat_tailcalls_verifyerror.check
diff --git a/test/files/run/virtpatmat_tailcalls_verifyerror.flags b/test/files/run/virtpatmat_tailcalls_verifyerror.flags
new file mode 100644
index 0000000..3f5a310
--- /dev/null
+++ b/test/files/run/virtpatmat_tailcalls_verifyerror.flags
@@ -0,0 +1 @@
+ -Xexperimental
diff --git a/test/files/run/virtpatmat_tailcalls_verifyerror.scala b/test/files/run/virtpatmat_tailcalls_verifyerror.scala
new file mode 100644
index 0000000..5ce91e8
--- /dev/null
+++ b/test/files/run/virtpatmat_tailcalls_verifyerror.scala
@@ -0,0 +1,14 @@
+// shouldn't result in a verify error when run...
+object Test extends App {
+ @annotation.tailrec
+ final def test(meh: Boolean): Boolean = {
+ Some("a") match {
+ case x =>
+ x match {
+ case Some(_) => if(meh) test(false) else false
+ case _ => test(false)
+ }
+ }
+ }
+ println(test(true))
+}
\ No newline at end of file
diff --git a/test/files/run/virtpatmat_try.check b/test/files/run/virtpatmat_try.check
new file mode 100644
index 0000000..80ebbf4
--- /dev/null
+++ b/test/files/run/virtpatmat_try.check
@@ -0,0 +1,2 @@
+meh
+B
diff --git a/test/files/run/virtpatmat_try.flags b/test/files/run/virtpatmat_try.flags
new file mode 100644
index 0000000..3f5a310
--- /dev/null
+++ b/test/files/run/virtpatmat_try.flags
@@ -0,0 +1 @@
+ -Xexperimental
diff --git a/test/files/run/virtpatmat_try.scala b/test/files/run/virtpatmat_try.scala
new file mode 100644
index 0000000..46e67cb
--- /dev/null
+++ b/test/files/run/virtpatmat_try.scala
@@ -0,0 +1,47 @@
+object Test extends App {
+ case class A(val x: String) extends Throwable
+ class B extends Exception { override def toString = "B" }
+ def bla = 0
+
+ try {
+ throw new A("meh")
+ } catch { // this should emit a "catch-switch"
+ case y: A => println(y.x)
+ case (_ : A | _ : B) => println("B")
+ case _ => println("other")
+ }
+
+ try {
+ throw new B()
+ } catch { // case classes and alternative flattening aren't supported yet, but could be in principle
+ // case A(x) => println(x)
+ case y: A => println(y.x)
+ case x@((_ : A) | (_ : B)) => println(x)
+ case _ => println("other")
+ }
+
+ def simpleTry {
+ try {
+ bla
+ } catch {
+ case x: Exception if x.getMessage == "test" => println("first case " + x)
+ case x: Exception => println("second case " + x)
+ }
+ }
+
+ def typedWildcardTry {
+ try { bla } catch { case _: ClassCastException => bla }
+ }
+
+ def wildcardTry {
+ try { bla } catch { case _ => bla }
+ }
+
+ def tryPlusFinally {
+ try { bla } finally { println("finally") }
+ }
+
+ def catchAndPassToLambda {
+ try { bla } catch { case ex: Exception => val f = () => ex }
+ }
+}
\ No newline at end of file
diff --git a/test/files/run/virtpatmat_typed.check b/test/files/run/virtpatmat_typed.check
new file mode 100644
index 0000000..cec2740
--- /dev/null
+++ b/test/files/run/virtpatmat_typed.check
@@ -0,0 +1 @@
+OK foo
diff --git a/test/files/run/virtpatmat_typed.flags b/test/files/run/virtpatmat_typed.flags
new file mode 100644
index 0000000..3f5a310
--- /dev/null
+++ b/test/files/run/virtpatmat_typed.flags
@@ -0,0 +1 @@
+ -Xexperimental
diff --git a/test/files/run/virtpatmat_typed.scala b/test/files/run/virtpatmat_typed.scala
new file mode 100644
index 0000000..a9863cc
--- /dev/null
+++ b/test/files/run/virtpatmat_typed.scala
@@ -0,0 +1,7 @@
+object Test extends App {
+ ("foo": Any) match {
+ case x: Int => println("FAILED")
+ case x: String => println("OK "+ x)
+ case x: String => println("FAILED")
+ }
+}
\ No newline at end of file
diff --git a/test/files/run/virtpatmat_typetag.check b/test/files/run/virtpatmat_typetag.check
new file mode 100644
index 0000000..cac9d9a
--- /dev/null
+++ b/test/files/run/virtpatmat_typetag.check
@@ -0,0 +1,10 @@
+1 is not a Int; it's a class java.lang.Integer
+1 is a java.lang.Integer
+1 is not a java.lang.String; it's a class java.lang.Integer
+true is a Any
+woele is a java.lang.String
+1 is not a Int; it's a class java.lang.Integer
+1 is a java.lang.Integer
+1 is not a java.lang.String; it's a class java.lang.Integer
+true is a Any
+woele is a java.lang.String
diff --git a/test/files/neg/caseinherit.flags b/test/files/run/virtpatmat_typetag.flags
similarity index 100%
copy from test/files/neg/caseinherit.flags
copy to test/files/run/virtpatmat_typetag.flags
diff --git a/test/files/run/virtpatmat_typetag.scala b/test/files/run/virtpatmat_typetag.scala
new file mode 100644
index 0000000..c1b1fd8
--- /dev/null
+++ b/test/files/run/virtpatmat_typetag.scala
@@ -0,0 +1,36 @@
+import reflect.{ClassTag, classTag}
+
+trait Extractors {
+ type T
+ implicit val tTag: ClassTag[T]
+ object ExtractT {
+ def unapply(x: T) = Some(x)
+ }
+ def apply(a: Any) = a match {
+ case ExtractT(x) => println(x +" is a "+ implicitly[ClassTag[T]])
+ case _ => println(a+ " is not a "+ implicitly[ClassTag[T]] +"; it's a "+ a.getClass)
+ }
+}
+
+object Test extends App {
+ def typeMatch[T: ClassTag](a: Any) = a match {
+ case x : T => println(x +" is a "+ implicitly[ClassTag[T]])
+ case _ => println(a+ " is not a "+ implicitly[ClassTag[T]] +"; it's a "+ a.getClass)
+ }
+
+ // the same match as typeMatch, but using an extractor
+ def extractorMatch[S: ClassTag](a: Any) =
+ (new Extractors { type T = S; val tTag = classTag[T] })(a)
+
+ typeMatch[Int](1)
+ typeMatch[Integer](1)
+ typeMatch[String](1)
+ typeMatch[Any](true)
+ typeMatch[String]("woele")
+
+ extractorMatch[Int](1)
+ extractorMatch[Integer](1)
+ extractorMatch[String](1)
+ extractorMatch[Any](true)
+ extractorMatch[String]("woele")
+}
\ No newline at end of file
diff --git a/test/files/run/virtpatmat_unapply.check b/test/files/run/virtpatmat_unapply.check
new file mode 100644
index 0000000..2b89b77
--- /dev/null
+++ b/test/files/run/virtpatmat_unapply.check
@@ -0,0 +1,2 @@
+1
+6
diff --git a/test/files/run/virtpatmat_unapply.flags b/test/files/run/virtpatmat_unapply.flags
new file mode 100644
index 0000000..3f5a310
--- /dev/null
+++ b/test/files/run/virtpatmat_unapply.flags
@@ -0,0 +1 @@
+ -Xexperimental
diff --git a/test/files/run/virtpatmat_unapply.scala b/test/files/run/virtpatmat_unapply.scala
new file mode 100644
index 0000000..6d7b4db
--- /dev/null
+++ b/test/files/run/virtpatmat_unapply.scala
@@ -0,0 +1,32 @@
+class IntList(val hd: Int, val tl: IntList)
+object NilIL extends IntList(0, null)
+object IntList {
+ def unapply(il: IntList): Option[(Int, IntList)] = if(il eq NilIL) None else Some(il.hd, il.tl)
+ def apply(x: Int, xs: IntList) = new IntList(x, xs)
+}
+
+object Test extends App {
+ IntList(1, IntList(2, NilIL)) match {
+ case IntList(a1, IntList(a2, IntList(a3, y))) => println(a1 + a2 + a3)
+ case IntList(x, y) => println(x)
+ }
+
+ IntList(1, IntList(2, IntList(3, NilIL))) match {
+ case IntList(a1, IntList(a2, IntList(a3, y))) => println(a1 + a2 + a3)
+ case IntList(x, y) => println(x)
+ }
+}
+
+// ((x1: IntList) => IntList.unapply(x1).flatMap(((x4: (Int, IntList)) => IntList.unapply(x4._2).flatMap(((x5: (Int, IntList)) => IntList.unapply(x5._2).flatMap(((x6: (Int, IntList)) => implicitly[Predef.MatchingStrategy[Option]].success(Predef.println(x4._1.+(x5._1).+(x6._1))))))))).orElse(IntList.unapply(x1).flatMap(((x7: (Int, IntList)) => implicitly[scala.Predef.MatchingStrategy[Option]].success(Predef.println(x7._1))))).orElse(implicitly[scala.Predef.MatchingStrategy[Option]].fail)) [...]
+
+/*
+ ((x1: IntList) =>
+ IntList.this.unapply(x1).flatMap[Int](((x4: (Int, IntList)) =>
+ IntList.this.unapply(x4._2).flatMap[Int](((x5: (Int, IntList)) =>
+ IntList.this.unapply(x5._2).flatMap[Int](((x6: (Int, IntList)) =>
+ Predef.this.implicitly[scala.Predef.MatchingStrategy[Option]](scala.this.Predef.OptionMatching).success[Int](x6._1))))))).orElse[Int](
+ IntList.this.unapply(x1).flatMap[Int](((x7: (Int, IntList)) =>
+ Predef.this.implicitly[scala.Predef.MatchingStrategy[Option]](scala.this.Predef.OptionMatching).success[Int](x7._1)))).orElse[Int](
+ Predef.this.implicitly[scala.Predef.MatchingStrategy[Option]](scala.this.Predef.OptionMatching).fail)
+ ).apply(IntList.apply(1, null))
+*/
\ No newline at end of file
diff --git a/test/files/run/virtpatmat_unapplyprod.check b/test/files/run/virtpatmat_unapplyprod.check
new file mode 100644
index 0000000..2660ff8
--- /dev/null
+++ b/test/files/run/virtpatmat_unapplyprod.check
@@ -0,0 +1,4 @@
+(2,3)
+(2,3)
+(2,3)
+List(true, false, true)
diff --git a/test/files/run/virtpatmat_unapplyprod.flags b/test/files/run/virtpatmat_unapplyprod.flags
new file mode 100644
index 0000000..3f5a310
--- /dev/null
+++ b/test/files/run/virtpatmat_unapplyprod.flags
@@ -0,0 +1 @@
+ -Xexperimental
diff --git a/test/files/run/virtpatmat_unapplyprod.scala b/test/files/run/virtpatmat_unapplyprod.scala
new file mode 100644
index 0000000..441e5e3
--- /dev/null
+++ b/test/files/run/virtpatmat_unapplyprod.scala
@@ -0,0 +1,23 @@
+object Test extends App {
+ case class Foo(x: Int, y: String)
+
+ Foo(2, "3") match {
+ case Foo(x, y) => println((x, y))
+ }
+
+ case class FooSeq(x: Int, y: String, z: Boolean*)
+
+ FooSeq(2, "3") match {
+ case FooSeq(x, y) => println((x, y))
+ }
+
+ FooSeq(2, "3", true, false, true) match {
+ case FooSeq(x, y) => println("nope")
+ case FooSeq(x, y, true, false, true) => println((x, y))
+ }
+
+ FooSeq(1, "a", true, false, true) match {
+ case FooSeq(1, "a") => println("nope")
+ case FooSeq(1, "a", x at _* ) => println(x.toList)
+ }
+}
\ No newline at end of file
diff --git a/test/files/run/virtpatmat_unapplyseq.check b/test/files/run/virtpatmat_unapplyseq.check
new file mode 100644
index 0000000..62f9457
--- /dev/null
+++ b/test/files/run/virtpatmat_unapplyseq.check
@@ -0,0 +1 @@
+6
\ No newline at end of file
diff --git a/test/files/run/virtpatmat_unapplyseq.flags b/test/files/run/virtpatmat_unapplyseq.flags
new file mode 100644
index 0000000..3f5a310
--- /dev/null
+++ b/test/files/run/virtpatmat_unapplyseq.flags
@@ -0,0 +1 @@
+ -Xexperimental
diff --git a/test/files/run/virtpatmat_unapplyseq.scala b/test/files/run/virtpatmat_unapplyseq.scala
new file mode 100644
index 0000000..270fa90
--- /dev/null
+++ b/test/files/run/virtpatmat_unapplyseq.scala
@@ -0,0 +1,5 @@
+object Test extends App {
+ List(1,2,3) match {
+ case Seq(x, y, z) => println(x * y * z)
+ }
+}
\ No newline at end of file
diff --git a/test/files/run/virtpatmat_valdef.check b/test/files/run/virtpatmat_valdef.check
new file mode 100644
index 0000000..1a45335
--- /dev/null
+++ b/test/files/run/virtpatmat_valdef.check
@@ -0,0 +1 @@
+meh(true,null)
diff --git a/test/files/run/virtpatmat_valdef.scala b/test/files/run/virtpatmat_valdef.scala
new file mode 100644
index 0000000..f1a9b46
--- /dev/null
+++ b/test/files/run/virtpatmat_valdef.scala
@@ -0,0 +1,6 @@
+object Test extends App {
+ // patterns in valdefs...
+ // TODO: irrefutability should indicate we don't actually need to test, just deconstruct
+ val (modified, result) : (Boolean, String) = (true, null)
+ println("meh"+ (modified, result))
+}
\ No newline at end of file
diff --git a/test/files/run/weakconform.scala b/test/files/run/weakconform.scala
old mode 100644
new mode 100755
diff --git a/test/files/run/xml-attribute.check b/test/files/run/xml-attribute.check
new file mode 100644
index 0000000..3cfe377
--- /dev/null
+++ b/test/files/run/xml-attribute.check
@@ -0,0 +1,12 @@
+<t/>
+<t/>
+<t/>
+<t/>
+<t/>
+<t b="1" d="2"/>
+<t b="1" d="2"/>
+<t b="1" d="2"/>
+<t a="1" d="2"/>
+<t b="1" d="2"/>
+<t a="1" b="2" c="3"/>
+<t g="1" e="2" p:a="3" f:e="4" mgruhu:ji="5"/>
diff --git a/test/files/run/xml-attribute.scala b/test/files/run/xml-attribute.scala
new file mode 100644
index 0000000..eb3956c
--- /dev/null
+++ b/test/files/run/xml-attribute.scala
@@ -0,0 +1,37 @@
+import xml.Node
+
+object Test {
+ def main(args: Array[String]): Unit = {
+ val noAttr = <t/>
+ val attrNull = <t a={ null: String }/>
+ val attrNone = <t a={ None: Option[Seq[Node]] }/>
+ val preAttrNull = <t p:a={ null: String }/>
+ val preAttrNone = <t p:a={ None: Option[Seq[Node]] }/>
+ assert(noAttr == attrNull)
+ assert(noAttr == attrNone)
+ assert(noAttr == preAttrNull)
+ assert(noAttr == preAttrNone)
+
+ println(noAttr)
+ println(attrNull)
+ println(attrNone)
+ println(preAttrNull)
+ println(preAttrNone)
+
+ val xml1 = <t b="1" d="2"/>
+ val xml2 = <t a={ null: String } p:a={ null: String } b="1" c={ null: String } d="2"/>
+ val xml3 = <t b="1" c={ null: String } d="2" a={ null: String } p:a={ null: String }/>
+ assert(xml1 == xml2)
+ assert(xml1 == xml3)
+
+ println(xml1)
+ println(xml2)
+ println(xml3)
+
+ // Check if attribute order is retained
+ println(<t a="1" d="2"/>)
+ println(<t b="1" d="2"/>)
+ println(<t a="1" b="2" c="3"/>)
+ println(<t g="1" e="2" p:a="3" f:e="4" mgruhu:ji="5"/>)
+ }
+}
diff --git a/test/files/run/xml-loop-bug.scala b/test/files/run/xml-loop-bug.scala
index 378ae9b..6763767 100644
--- a/test/files/run/xml-loop-bug.scala
+++ b/test/files/run/xml-loop-bug.scala
@@ -1,6 +1,8 @@
object Test {
def main(args: Array[String]): Unit = {
- scala.tools.nsc.io.NullPrintStream.setOutAndErr()
+ val sink = new java.io.PrintStream(new java.io.ByteArrayOutputStream())
+ Console setOut sink
+ Console setErr sink
scala.xml.parsing.ConstructingParser.fromSource(scala.io.Source.fromString("<!DOCTYPE xmeml SYSTEM> <xmeml> <sequence> </sequence> </xmeml> "), true).document.docElem
}
}
diff --git a/test/files/scalacheck/CheckEither.scala b/test/files/scalacheck/CheckEither.scala
index a7e5087..4e8480d 100644
--- a/test/files/scalacheck/CheckEither.scala
+++ b/test/files/scalacheck/CheckEither.scala
@@ -3,12 +3,11 @@ import org.scalacheck.Arbitrary.{arbitrary, arbThrowable}
import org.scalacheck.Gen.oneOf
import org.scalacheck.util.StdRand
import org.scalacheck.Prop._
-import org.scalacheck.ConsoleReporter.{testReport, propReport}
import org.scalacheck.Test.{Params, check}
import org.scalacheck.ConsoleReporter.testStatsEx
import Function.tupled
-object CheckEither extends Properties("Either") {
+object Test extends Properties("Either") {
implicit def arbitraryEither[X, Y](implicit xa: Arbitrary[X], ya: Arbitrary[Y]): Arbitrary[Either[X, Y]] =
Arbitrary[Either[X, Y]](oneOf(arbitrary[X].map(Left(_)), arbitrary[Y].map(Right(_))))
@@ -186,9 +185,3 @@ object CheckEither extends Properties("Either") {
STest.checkProperties(STest.Params(testCallback = ConsoleReporter(0)), this)
}
}
-
-object Test {
- def main(args: Array[String]): Unit = {
- CheckEither.runTests()
- }
-}
diff --git a/test/files/scalacheck/Ctrie.scala b/test/files/scalacheck/Ctrie.scala
new file mode 100644
index 0000000..736bf93
--- /dev/null
+++ b/test/files/scalacheck/Ctrie.scala
@@ -0,0 +1,199 @@
+
+
+
+import org.scalacheck._
+import Prop._
+import org.scalacheck.Gen._
+import collection._
+import collection.concurrent.TrieMap
+
+
+
+case class Wrap(i: Int) {
+ override def hashCode = i // * 0x9e3775cd
+}
+
+
+/** A check mainly oriented towards checking snapshot correctness.
+ */
+object Test extends Properties("concurrent.TrieMap") {
+
+ /* generators */
+
+ val sizes = choose(0, 200000)
+
+ val threadCounts = choose(2, 16)
+
+ val threadCountsAndSizes = for {
+ p <- threadCounts
+ sz <- sizes
+ } yield (p, sz);
+
+
+ /* helpers */
+
+ def inParallel[T](totalThreads: Int)(body: Int => T): Seq[T] = {
+ val threads = for (idx <- 0 until totalThreads) yield new Thread {
+ setName("ParThread-" + idx)
+ private var res: T = _
+ override def run() {
+ res = body(idx)
+ }
+ def result = {
+ this.join()
+ res
+ }
+ }
+
+ threads foreach (_.start())
+ threads map (_.result)
+ }
+
+ def spawn[T](body: =>T): { def get: T } = {
+ val t = new Thread {
+ setName("SpawnThread")
+ private var res: T = _
+ override def run() {
+ res = body
+ }
+ def result = res
+ }
+ t.start()
+ new {
+ def get: T = {
+ t.join()
+ t.result
+ }
+ }
+ }
+
+ def elementRange(threadIdx: Int, totalThreads: Int, totalElems: Int): Range = {
+ val sz = totalElems
+ val idx = threadIdx
+ val p = totalThreads
+ val start = (sz / p) * idx + math.min(idx, sz % p)
+ val elems = (sz / p) + (if (idx < sz % p) 1 else 0)
+ val end = start + elems
+ (start until end)
+ }
+
+ def hasGrown[K, V](last: Map[K, V], current: Map[K, V]) = {
+ (last.size <= current.size) && {
+ last forall {
+ case (k, v) => current.get(k) == Some(v)
+ }
+ }
+ }
+
+ object err {
+ var buffer = new StringBuilder
+ def println(a: AnyRef) = buffer.append(a.toString).append("\n")
+ def clear() = buffer.clear()
+ def flush() = {
+ Console.out.println(buffer)
+ clear()
+ }
+ }
+
+
+ /* properties */
+
+ property("concurrent growing snapshots") = forAll(threadCounts, sizes) {
+ (numThreads, numElems) =>
+ val p = 3 //numThreads
+ val sz = 102 //numElems
+ val ct = new TrieMap[Wrap, Int]
+
+ // checker
+ val checker = spawn {
+ def check(last: Map[Wrap, Int], iterationsLeft: Int): Boolean = {
+ val current = ct.readOnlySnapshot()
+ if (!hasGrown(last, current)) false
+ else if (current.size >= sz) true
+ else if (iterationsLeft < 0) false
+ else check(current, iterationsLeft - 1)
+ }
+ check(ct.readOnlySnapshot(), 500)
+ }
+
+ // fillers
+ inParallel(p) {
+ idx =>
+ elementRange(idx, p, sz) foreach (i => ct.update(Wrap(i), i))
+ }
+
+ // wait for checker to finish
+ val growing = true//checker.get
+
+ val ok = growing && ((0 until sz) forall {
+ case i => ct.get(Wrap(i)) == Some(i)
+ })
+
+ ok
+ }
+
+ property("update") = forAll(sizes) {
+ (n: Int) =>
+ val ct = new TrieMap[Int, Int]
+ for (i <- 0 until n) ct(i) = i
+ (0 until n) forall {
+ case i => ct(i) == i
+ }
+ }
+
+ property("concurrent update") = forAll(threadCountsAndSizes) {
+ case (p, sz) =>
+ val ct = new TrieMap[Wrap, Int]
+
+ inParallel(p) {
+ idx =>
+ for (i <- elementRange(idx, p, sz)) ct(Wrap(i)) = i
+ }
+
+ (0 until sz) forall {
+ case i => ct(Wrap(i)) == i
+ }
+ }
+
+
+ property("concurrent remove") = forAll(threadCounts, sizes) {
+ (p, sz) =>
+ val ct = new TrieMap[Wrap, Int]
+ for (i <- 0 until sz) ct(Wrap(i)) = i
+
+ inParallel(p) {
+ idx =>
+ for (i <- elementRange(idx, p, sz)) ct.remove(Wrap(i))
+ }
+
+ (0 until sz) forall {
+ case i => ct.get(Wrap(i)) == None
+ }
+ }
+
+
+ property("concurrent putIfAbsent") = forAll(threadCounts, sizes) {
+ (p, sz) =>
+ val ct = new TrieMap[Wrap, Int]
+
+ val results = inParallel(p) {
+ idx =>
+ elementRange(idx, p, sz) find (i => ct.putIfAbsent(Wrap(i), i) != None)
+ }
+
+ (results forall (_ == None)) && ((0 until sz) forall {
+ case i => ct.get(Wrap(i)) == Some(i)
+ })
+ }
+
+}
+
+
+
+
+
+
+
+
+
+
diff --git a/test/files/scalacheck/array-new.scala b/test/files/scalacheck/array-new.scala
new file mode 100644
index 0000000..e13a47a
--- /dev/null
+++ b/test/files/scalacheck/array-new.scala
@@ -0,0 +1,37 @@
+import scala.reflect.{ClassTag, classTag}
+import org.scalacheck._
+import Prop._
+import Gen._
+import Arbitrary._
+import util._
+import Buildable._
+import scala.collection.mutable.ArraySeq
+
+object Test extends Properties("Array") {
+ /** At this moment the authentic scalacheck Array Builder/Arb bits are commented out.
+ */
+ implicit def arbArray[T](implicit a: Arbitrary[T], m: ClassTag[T]): Arbitrary[Array[T]] =
+ Arbitrary(containerOf[List,T](arbitrary[T]) map (_.toArray))
+
+ val arrGen: Gen[Array[_]] = oneOf(
+ arbitrary[Array[Int]],
+ arbitrary[Array[Array[Int]]],
+ arbitrary[Array[List[String]]],
+ arbitrary[Array[String]],
+ arbitrary[Array[Boolean]],
+ arbitrary[Array[AnyVal]]
+ )
+
+ // inspired by #1857 and #2352
+ property("eq/ne") = forAll(arrGen, arrGen) { (c1, c2) =>
+ (c1 eq c2) || (c1 ne c2)
+ }
+
+ // inspired by #2299
+ def smallInt = choose(1, 10)
+ property("ofDim") = forAll(smallInt, smallInt, smallInt) { (i1, i2, i3) =>
+ val arr = Array.ofDim[String](i1, i2, i3)
+ val flattened = arr flatMap (x => x) flatMap (x => x)
+ flattened.length == i1 * i2 * i3
+ }
+}
\ No newline at end of file
diff --git a/test/files/scalacheck/array.scala b/test/files/scalacheck/array-old.scala
similarity index 100%
rename from test/files/scalacheck/array.scala
rename to test/files/scalacheck/array-old.scala
diff --git a/test/files/scalacheck/avl.scala b/test/files/scalacheck/avl.scala
new file mode 100644
index 0000000..af79ad4
--- /dev/null
+++ b/test/files/scalacheck/avl.scala
@@ -0,0 +1,114 @@
+import org.scalacheck.Gen
+import org.scalacheck.Prop.forAll
+import org.scalacheck.Properties
+
+import util.logging.ConsoleLogger
+
+package scala.collection.mutable {
+
+ /**
+ * Property of an AVL Tree : Any node of the tree has a balance value beetween in [-1; 1]
+ */
+ abstract class AVLTreeTest(name: String) extends Properties(name) with ConsoleLogger {
+
+ def `2^`(n: Int) = (1 to n).fold(1)((a, b) => b*2)
+
+ def capacityMax(depth: Int): Int = `2^`(depth+1) - 1
+
+ def minDepthForCapacity(x: Int): Int = {
+ var depth = 0
+ while(capacityMax(depth) < x)
+ depth += 1
+ depth
+ }
+
+ def numberOfElementsInLeftSubTree(n: Int): collection.immutable.IndexedSeq[Int] = {
+ val mid = n/2 + n%2
+ ((1 until mid)
+ .filter { i => math.abs(minDepthForCapacity(i) - minDepthForCapacity(n-i)) < 2 }
+ .flatMap { i => Seq(i, n-(i+1)) }).toIndexedSeq.distinct
+ }
+
+ def makeAllBalancedTree[A](elements: List[A]): List[AVLTree[A]] = elements match {
+ case Nil => Leaf::Nil
+ case first::Nil => Node(first, Leaf, Leaf)::Nil
+ case first::second::Nil => Node(second, Node(first, Leaf, Leaf), Leaf)::Node(first, Leaf, Node(second, Leaf, Leaf))::Nil
+ case first::second::third::Nil => Node(second, Node(first, Leaf, Leaf), Node(third, Leaf, Leaf))::Nil
+ case _ => {
+ val combinations = for {
+ left <- numberOfElementsInLeftSubTree(elements.size)
+ root = elements(left)
+ right = elements.size - (left + 1)
+ } yield (root, left, right)
+ (combinations.flatMap(triple => for {
+ l <- makeAllBalancedTree(elements.take(triple._2))
+ r <- makeAllBalancedTree(elements.takeRight(triple._3))
+ } yield Node(triple._1, l, r))).toList
+ }
+ }
+
+ def genInput: org.scalacheck.Gen[(Int, List[AVLTree[Int]])] = for {
+ size <- org.scalacheck.Gen.choose(20, 25)
+ elements <- org.scalacheck.Gen.listOfN(size, org.scalacheck.Gen.choose(0, 1000))
+ selected <- org.scalacheck.Gen.choose(0, 1000)
+ } yield {
+ // selected mustn't be in elements already
+ val list = makeAllBalancedTree(elements.sorted.distinct.map(_*2))
+ (selected*2+1, list)
+ }
+
+ def genInputDelete: org.scalacheck.Gen[(Int, List[AVLTree[Int]])] = for {
+ size <- org.scalacheck.Gen.choose(20, 25)
+ elements <- org.scalacheck.Gen.listOfN(size, org.scalacheck.Gen.choose(0, 1000))
+ e = elements.sorted.distinct
+ selected <- org.scalacheck.Gen.choose(0, e.size-1)
+ } yield {
+ // selected must be in elements already
+ val list = makeAllBalancedTree(e)
+ (e(selected), list)
+ }
+ }
+
+ trait AVLInvariants {
+ self: AVLTreeTest =>
+
+ def isBalanced[A](t: AVLTree[A]): Boolean = t match {
+ case node: Node[A] => math.abs(node.balance) < 2 && (List(node.left, node.right) forall isBalanced)
+ case Leaf => true
+ }
+
+ def setup(invariant: AVLTree[Int] => Boolean) = forAll(genInput) {
+ case (selected: Int, trees: List[AVLTree[Int]]) =>
+ trees.map(tree => invariant(tree)).fold(true)((a, b) => a && b)
+ }
+
+ property("Every tree is initially balanced.") = setup(isBalanced)
+ }
+
+ object TestInsert extends AVLTreeTest("Insert") with AVLInvariants {
+ import math.Ordering.Int
+ property("`insert` creates a new tree containing the given element. The tree remains balanced.") = forAll(genInput) {
+ case (selected: Int, trees: List[AVLTree[Int]]) =>
+ trees.map(tree => {
+ val modifiedTree = tree.insert(selected, Int)
+ modifiedTree.contains(selected, Int) && isBalanced(modifiedTree)
+ }).fold(true)((a, b) => a && b)
+ }
+ }
+
+ object TestRemove extends AVLTreeTest("Remove") with AVLInvariants {
+ import math.Ordering.Int
+ property("`remove` creates a new tree without the given element. The tree remains balanced.") = forAll(genInputDelete) {
+ case (selected: Int, trees: List[AVLTree[Int]]) =>
+ trees.map(tree => {
+ val modifiedTree = tree.remove(selected, Int)
+ tree.contains(selected, Int) && !modifiedTree.contains(selected, Int) && isBalanced(modifiedTree)
+ }).fold(true)((a, b) => a && b)
+ }
+ }
+}
+
+object Test extends Properties("AVL") {
+ include(scala.collection.mutable.TestInsert)
+ include(scala.collection.mutable.TestRemove)
+}
diff --git a/test/files/scalacheck/duration.scala b/test/files/scalacheck/duration.scala
new file mode 100644
index 0000000..5e93638
--- /dev/null
+++ b/test/files/scalacheck/duration.scala
@@ -0,0 +1,69 @@
+import org.scalacheck._
+import Prop._
+import Gen._
+import Arbitrary._
+import math._
+import concurrent.duration.Duration.fromNanos
+
+object Test extends Properties("Division of Duration by Long") {
+
+ val weightedLong =
+ frequency(
+ 1 -> choose(-128L, 127L),
+ 1 -> (arbitrary[Byte] map (_.toLong << 8)),
+ 1 -> (arbitrary[Byte] map (_.toLong << 16)),
+ 1 -> (arbitrary[Byte] map (_.toLong << 24)),
+ 1 -> (arbitrary[Byte] map (_.toLong << 32)),
+ 1 -> (arbitrary[Byte] map (_.toLong << 40)),
+ 1 -> (arbitrary[Byte] map (_.toLong << 48)),
+ 1 -> (choose(-127L, 127L) map (_ << 56))
+ )
+
+ val genTwoSmall = for {
+ a <- weightedLong
+ b <- choose(-(Long.MaxValue / max(1, abs(a))), Long.MaxValue / max(1, abs(a)))
+ } yield (a, b)
+
+ val genTwoLarge = for {
+ a <- weightedLong
+ b <- arbitrary[Long] suchThat (b => (abs(b) > Long.MaxValue / max(1, abs(a))))
+ } yield (a, b)
+
+ val genClose = for {
+ a <- weightedLong
+ if a != 0
+ b <- choose(Long.MaxValue / a - 10, Long.MaxValue / a + 10)
+ } yield (a, b)
+
+ val genBorderline =
+ frequency(
+ 1 -> (Long.MinValue, 0L),
+ 1 -> (Long.MinValue, 1L),
+ 1 -> (Long.MinValue, -1L),
+ 1 -> (0L, Long.MinValue),
+ 1 -> (1L, Long.MinValue),
+ 1 -> (-1L, Long.MinValue),
+ 90 -> genClose
+ )
+
+ def mul(a: Long, b: Long): Long = {
+ (fromNanos(a) * b).toNanos
+ }
+
+ property("without overflow") = forAll(genTwoSmall) { case (a, b) =>
+ a * b == mul(a, b)
+ }
+
+ property("with overflow") = forAll(genTwoLarge) { case (a, b) =>
+ try { mul(a, b); false } catch { case _: IllegalArgumentException => true }
+ }
+
+ property("on overflow edge cases") = forAll(genBorderline) { case (a, b) =>
+ val shouldFit =
+ a != Long.MinValue && // must fail due to illegal duration length
+ (b != Long.MinValue || a == 0) && // Long factor may only be MinValue if the duration is zero, otherwise the result will be illegal
+ (abs(b) <= Long.MaxValue / max(1, abs(a))) // check the rest against the “safe” division method
+ try { mul(a, b); shouldFit }
+ catch { case _: IllegalArgumentException => !shouldFit }
+ }
+}
diff --git a/test/files/scalacheck/nan-ordering.scala b/test/files/scalacheck/nan-ordering.scala
new file mode 100644
index 0000000..2094a46
--- /dev/null
+++ b/test/files/scalacheck/nan-ordering.scala
@@ -0,0 +1,130 @@
+import org.scalacheck._
+import Gen._
+import Prop._
+
+object Test extends Properties("NaN-Ordering") {
+
+ val specFloats: Gen[Float] = oneOf(
+ Float.MaxValue,
+ Float.MinPositiveValue,
+ Float.MinValue,
+ Float.NaN,
+ Float.NegativeInfinity,
+ Float.PositiveInfinity,
+ -0.0f,
+ +0.0f
+ )
+
+ property("Float min") = forAll(specFloats, specFloats) { (d1, d2) => {
+ val mathmin = math.min(d1, d2)
+ val numericmin = d1 min d2
+ mathmin == numericmin || mathmin.isNaN && numericmin.isNaN
+ }
+ }
+
+ property("Float max") = forAll(specFloats, specFloats) { (d1, d2) => {
+ val mathmax = math.max(d1, d2)
+ val numericmax = d1 max d2
+ mathmax == numericmax || mathmax.isNaN && numericmax.isNaN
+ }
+ }
+
+ val numFloat = implicitly[Numeric[Float]]
+
+ property("Float lt") = forAll(specFloats, specFloats) { (d1, d2) => numFloat.lt(d1, d2) == d1 < d2 }
+
+ property("Float lteq") = forAll(specFloats, specFloats) { (d1, d2) => numFloat.lteq(d1, d2) == d1 <= d2 }
+
+ property("Float gt") = forAll(specFloats, specFloats) { (d1, d2) => numFloat.gt(d1, d2) == d1 > d2 }
+
+ property("Float gteq") = forAll(specFloats, specFloats) { (d1, d2) => numFloat.gteq(d1, d2) == d1 >= d2 }
+
+ property("Float equiv") = forAll(specFloats, specFloats) { (d1, d2) => numFloat.equiv(d1, d2) == (d1 == d2) }
+
+ property("Float reverse.min") = forAll(specFloats, specFloats) { (d1, d2) => {
+ val mathmin = math.min(d1, d2)
+ val numericmin = numFloat.reverse.min(d1, d2)
+ mathmin == numericmin || mathmin.isNaN && numericmin.isNaN
+ }
+ }
+
+ property("Float reverse.max") = forAll(specFloats, specFloats) { (d1, d2) => {
+ val mathmax = math.max(d1, d2)
+ val numericmax = numFloat.reverse.max(d1, d2)
+ mathmax == numericmax || mathmax.isNaN && numericmax.isNaN
+ }
+ }
+
+ property("Float reverse.lt") = forAll(specFloats, specFloats) { (d1, d2) => numFloat.reverse.lt(d1, d2) == d2 < d1 }
+
+ property("Float reverse.lteq") = forAll(specFloats, specFloats) { (d1, d2) => numFloat.reverse.lteq(d1, d2) == d2 <= d1 }
+
+ property("Float reverse.gt") = forAll(specFloats, specFloats) { (d1, d2) => numFloat.reverse.gt(d1, d2) == d2 > d1 }
+
+ property("Float reverse.gteq") = forAll(specFloats, specFloats) { (d1, d2) => numFloat.reverse.gteq(d1, d2) == d2 >= d1 }
+
+ property("Float reverse.equiv") = forAll(specFloats, specFloats) { (d1, d2) => numFloat.reverse.equiv(d1, d2) == (d1 == d2) }
+
+
+ val specDoubles: Gen[Double] = oneOf(
+ Double.MaxValue,
+ Double.MinPositiveValue,
+ Double.MinValue,
+ Double.NaN,
+ Double.NegativeInfinity,
+ Double.PositiveInfinity,
+ -0.0,
+ +0.0
+ )
+
+ // ticket #5104
+ property("Double min") = forAll(specDoubles, specDoubles) { (d1, d2) => {
+ val mathmin = math.min(d1, d2)
+ val numericmin = d1 min d2
+ mathmin == numericmin || mathmin.isNaN && numericmin.isNaN
+ }
+ }
+
+ property("Double max") = forAll(specDoubles, specDoubles) { (d1, d2) => {
+ val mathmax = math.max(d1, d2)
+ val numericmax = d1 max d2
+ mathmax == numericmax || mathmax.isNaN && numericmax.isNaN
+ }
+ }
+
+ val numDouble = implicitly[Numeric[Double]]
+
+ property("Double lt") = forAll(specDoubles, specDoubles) { (d1, d2) => numDouble.lt(d1, d2) == d1 < d2 }
+
+ property("Double lteq") = forAll(specDoubles, specDoubles) { (d1, d2) => numDouble.lteq(d1, d2) == d1 <= d2 }
+
+ property("Double gt") = forAll(specDoubles, specDoubles) { (d1, d2) => numDouble.gt(d1, d2) == d1 > d2 }
+
+ property("Double gteq") = forAll(specDoubles, specDoubles) { (d1, d2) => numDouble.gteq(d1, d2) == d1 >= d2 }
+
+ property("Double equiv") = forAll(specDoubles, specDoubles) { (d1, d2) => numDouble.equiv(d1, d2) == (d1 == d2) }
+
+ property("Double reverse.min") = forAll(specDoubles, specDoubles) { (d1, d2) => {
+ val mathmin = math.min(d1, d2)
+ val numericmin = numDouble.reverse.min(d1, d2)
+ mathmin == numericmin || mathmin.isNaN && numericmin.isNaN
+ }
+ }
+
+ property("Double reverse.max") = forAll(specDoubles, specDoubles) { (d1, d2) => {
+ val mathmax = math.max(d1, d2)
+ val numericmax = numDouble.reverse.max(d1, d2)
+ mathmax == numericmax || mathmax.isNaN && numericmax.isNaN
+ }
+ }
+
+ property("Double reverse.lt") = forAll(specDoubles, specDoubles) { (d1, d2) => numDouble.reverse.lt(d1, d2) == d2 < d1 }
+
+ property("Double reverse.lteq") = forAll(specDoubles, specDoubles) { (d1, d2) => numDouble.reverse.lteq(d1, d2) == d2 <= d1 }
+
+ property("Double reverse.gt") = forAll(specDoubles, specDoubles) { (d1, d2) => numDouble.reverse.gt(d1, d2) == d2 > d1 }
+
+ property("Double reverse.gteq") = forAll(specDoubles, specDoubles) { (d1, d2) => numDouble.reverse.gteq(d1, d2) == d2 >= d1 }
+
+ property("Double reverse.equiv") = forAll(specDoubles, specDoubles) { (d1, d2) => numDouble.reverse.equiv(d1, d2) == (d1 == d2) }
+}
diff --git a/test/files/scalacheck/parallel-collections/ParallelCtrieCheck.scala b/test/files/scalacheck/parallel-collections/ParallelCtrieCheck.scala
new file mode 100644
index 0000000..e141c39
--- /dev/null
+++ b/test/files/scalacheck/parallel-collections/ParallelCtrieCheck.scala
@@ -0,0 +1,98 @@
+package scala.collection.parallel
+package mutable
+
+
+
+import org.scalacheck._
+import org.scalacheck.Gen
+import org.scalacheck.Gen._
+import org.scalacheck.Prop._
+import org.scalacheck.Properties
+import org.scalacheck.Arbitrary._
+
+import scala.collection._
+import scala.collection.parallel.ops._
+
+
+
+abstract class ParallelConcurrentTrieMapCheck[K, V](tp: String) extends ParallelMapCheck[K, V]("mutable.ParConcurrentTrieMap[" + tp + "]") {
+ // ForkJoinTasks.defaultForkJoinPool.setMaximumPoolSize(Runtime.getRuntime.availableProcessors * 2)
+ // ForkJoinTasks.defaultForkJoinPool.setParallelism(Runtime.getRuntime.availableProcessors * 2)
+
+ type CollType = ParTrieMap[K, V]
+
+ def isCheckingViews = false
+
+ def hasStrictOrder = false
+
+ def ofSize(vals: Seq[Gen[(K, V)]], sz: Int) = {
+ val ct = new concurrent.TrieMap[K, V]
+ val gen = vals(rnd.nextInt(vals.size))
+ for (i <- 0 until sz) ct += sample(gen)
+ ct
+ }
+
+ def fromTraversable(t: Traversable[(K, V)]) = {
+ val pct = new ParTrieMap[K, V]
+ var i = 0
+ for (kv <- t.toList) {
+ pct += kv
+ i += 1
+ }
+ pct
+ }
+
+}
+
+
+object IntIntParallelConcurrentTrieMapCheck extends ParallelConcurrentTrieMapCheck[Int, Int]("Int, Int")
+with PairOperators[Int, Int]
+with PairValues[Int, Int]
+{
+ def intvalues = new IntValues {}
+ def kvalues = intvalues.values
+ def vvalues = intvalues.values
+
+ val intoperators = new IntOperators {}
+ def voperators = intoperators
+ def koperators = intoperators
+
+ override def printDataStructureDebugInfo(ds: AnyRef) = ds match {
+ case pm: ParTrieMap[k, v] =>
+ println("Mutable parallel ctrie")
+ case _ =>
+ println("could not match data structure type: " + ds.getClass)
+ }
+
+ override def checkDataStructureInvariants(orig: Traversable[(Int, Int)], ds: AnyRef) = ds match {
+ // case pm: ParHashMap[k, v] if 1 == 0 => // disabled this to make tests faster
+ // val invs = pm.brokenInvariants
+
+ // val containsall = (for ((k, v) <- orig) yield {
+ // if (pm.asInstanceOf[ParHashMap[Int, Int]].get(k) == Some(v)) true
+ // else {
+ // println("Does not contain original element: " + (k, v))
+ // false
+ // }
+ // }).foldLeft(true)(_ && _)
+
+
+ // if (invs.isEmpty) containsall
+ // else {
+ // println("Invariants broken:\n" + invs.mkString("\n"))
+ // false
+ // }
+ case _ => true
+ }
+
+}
+
+
+
+
+
+
+
+
+
+
diff --git a/test/files/scalacheck/parallel-collections/ParallelIterableCheck.scala b/test/files/scalacheck/parallel-collections/ParallelIterableCheck.scala
index fbacb9f..e3f8778 100644
--- a/test/files/scalacheck/parallel-collections/ParallelIterableCheck.scala
+++ b/test/files/scalacheck/parallel-collections/ParallelIterableCheck.scala
@@ -86,7 +86,7 @@ abstract class ParallelIterableCheck[T](collName: String) extends Properties(col
println("Collection debug info: ")
coll.printDebugBuffer
println("Task debug info: ")
- println(tasksupport.debugMessages.mkString("\n"))
+ println(coll.tasksupport.debugMessages.mkString("\n"))
}
def printComparison(t: Traversable[_], coll: ParIterable[_], tf: Traversable[_], cf: ParIterable[_], ind: Int) {
@@ -414,21 +414,21 @@ abstract class ParallelIterableCheck[T](collName: String) extends Properties(col
}).reduceLeft(_ && _)
}
- // property("groupBy must be equal") = forAll(collectionPairs) {
- // case (t, coll) =>
- // (for ((f, ind) <- groupByFunctions.zipWithIndex) yield {
- // val tgroup = t.groupBy(f)
- // val cgroup = coll.groupBy(f)
- // if (tgroup != cgroup || cgroup != tgroup) {
- // println("from: " + t)
- // println("and: " + coll)
- // println("groups are: ")
- // println(tgroup)
- // println(cgroup)
- // }
- // ("operator " + ind) |: tgroup == cgroup && cgroup == tgroup
- // }).reduceLeft(_ && _)
- // }
+ property("groupBy must be equal") = forAll(collectionPairs) {
+ case (t, coll) =>
+ (for ((f, ind) <- groupByFunctions.zipWithIndex) yield {
+ val tgroup = t.groupBy(f)
+ val cgroup = coll.groupBy(f)
+ if (tgroup != cgroup || cgroup != tgroup) {
+ println("from: " + t)
+ println("and: " + coll)
+ println("groups are: ")
+ println(tgroup)
+ println(cgroup)
+ }
+ ("operator " + ind) |: tgroup == cgroup && cgroup == tgroup
+ }).reduceLeft(_ && _)
+ }
}
diff --git a/test/files/scalacheck/parallel-collections/ParallelMapCheck1.scala b/test/files/scalacheck/parallel-collections/ParallelMapCheck1.scala
index 05237ba..b6af8f4 100644
--- a/test/files/scalacheck/parallel-collections/ParallelMapCheck1.scala
+++ b/test/files/scalacheck/parallel-collections/ParallelMapCheck1.scala
@@ -20,7 +20,7 @@ abstract class ParallelMapCheck[K, V](collname: String) extends ParallelIterable
property("gets iterated keys") = forAll(collectionPairs) {
case (t, coll) =>
val containsT = for ((k, v) <- t) yield (coll.get(k) == Some(v))
- val containsSelf = for ((k, v) <- coll) yield (coll.get(k) == Some(v))
+ val containsSelf = coll.map { case (k, v) => coll.get(k) == Some(v) }
("Par contains elements of seq map" |: containsT.forall(_ == true)) &&
("Par contains elements of itself" |: containsSelf.forall(_ == true))
}
diff --git a/test/files/scalacheck/parallel-collections/pc.scala b/test/files/scalacheck/parallel-collections/pc.scala
index cc03823..0a91977 100644
--- a/test/files/scalacheck/parallel-collections/pc.scala
+++ b/test/files/scalacheck/parallel-collections/pc.scala
@@ -25,6 +25,9 @@ class ParCollProperties extends Properties("Parallel collections") {
// parallel mutable hash maps (tables)
include(mutable.IntIntParallelHashMapCheck)
+ // parallel ctrie
+ include(mutable.IntIntParallelConcurrentTrieMapCheck)
+
// parallel mutable hash sets (tables)
include(mutable.IntParallelHashSetCheck)
diff --git a/test/files/scalacheck/range.scala b/test/files/scalacheck/range.scala
index 3c1b1ac..7297911 100644
--- a/test/files/scalacheck/range.scala
+++ b/test/files/scalacheck/range.scala
@@ -1,228 +1,257 @@
-import org.scalacheck._
-import Prop._
-import Gen._
-import Arbitrary._
-
-class Counter(r: Range) {
- var cnt = 0L
- var last: Option[Int] = None
- val str = "Range["+r.start+", "+r.end+", "+r.step+(if (r.isInclusive) "]" else ")")
- def apply(x: Int) = {
- cnt += 1L
- if (cnt % 500000000L == 0L) {
- println("Working: %s %d %d" format (str, cnt, x))
- }
- if (cnt > (Int.MaxValue.toLong + 1) * 2)
- error("Count exceeds maximum possible for an Int Range")
- if ((r.step > 0 && last.exists(_ > x)) || (r.step < 0 && last.exists(_ < x)))
- error("Range wrapped: %d %s" format (x, last.toString))
- last = Some(x)
- }
-}
-
-abstract class RangeTest(kind: String) extends Properties("Range "+kind) {
- def myGen: Gen[Range]
-
- val genRange = for {
- start <- arbitrary[Int]
- end <- arbitrary[Int]
- step <- Gen.choose(1, (start - end).abs + 1)
- } yield if (start < end) Range(start, end, step) else Range(start, end, -step)
-
- val genReasonableSizeRange = for {
- start <- choose(-Int.MinValue, Int.MaxValue)
- end <- choose(-Int.MinValue, Int.MaxValue)
- step <- choose(-Int.MaxValue, Int.MaxValue)
- } yield Range(start, end, if (step == 0) 100 else step)
-
- val genSmallRange = for {
- start <- choose(-100, 100)
- end <- choose(-100, 100)
- step <- choose(1, 1)
- } yield if (start < end) Range(start, end, step) else Range(start, end, -step)
-
- val genRangeByOne = for {
- start <- arbitrary[Int]
- end <- arbitrary[Int]
- if (end.toLong - start.toLong).abs <= 10000000L
- } yield if (start < end) Range(start, end) else Range(end, start)
-
- def str(r: Range) = "Range["+r.start+", "+r.end+", "+r.step+(if (r.isInclusive) "]" else ")")
-
- def expectedSize(r: Range): Long = if (r.isInclusive) {
- (r.end.toLong - r.start.toLong < 0, r.step < 0) match {
- case (true, true) | (false, false) => (r.end.toLong - r.start.toLong).abs / r.step.abs.toLong + 1L
- case _ => if (r.start == r.end) 1L else 0L
- }
- } else {
- (r.end.toLong - r.start.toLong < 0, r.step < 0) match {
- case (true, true) | (false, false) => (
- (r.end.toLong - r.start.toLong).abs / r.step.abs.toLong
- + (if ((r.end.toLong - r.start.toLong).abs % r.step.abs.toLong > 0L) 1L else 0L)
- )
- case _ => 0L
- }
- }
-
- def within(r: Range, x: Int) = if (r.step > 0)
- r.start <= x && (if (r.isInclusive) x <= r.end else x < r.end)
- else
- r.start >= x && (if (r.isInclusive) x >= r.end else x > r.end)
-
- def multiple(r: Range, x: Int) = (x.toLong - r.start) % r.step == 0
-
- property("foreach.step") = forAll(myGen) { r =>
- var allValid = true
- val cnt = new Counter(r)
-// println("--------------------")
-// println(r)
- r foreach { x => cnt(x)
-// println(x + ", " + (x - r.start) + ", " + (x.toLong - r.start) + ", " + ((x.toLong - r.start) % r.step))
- allValid &&= multiple(r, x)
- }
- allValid :| str(r)
- }
-
- property("foreach.inside.range") = forAll(myGen) { r =>
- var allValid = true
- var last: Option[Int] = None
- val cnt = new Counter(r)
- r foreach { x => cnt(x)
- allValid &&= within(r, x)
- }
- allValid :| str(r)
- }
-
- property("foreach.visited.size") = forAll(myGen) { r =>
- var visited = 0L
- val cnt = new Counter(r)
- r foreach { x => cnt(x)
- visited += 1L
- }
-// println("----------")
-// println(str(r))
-// println("size: " + r.size)
-// println("expected: " + expectedSize(r))
-// println("visited: " + visited)
- (visited == expectedSize(r)) :| str(r)
- }
-
- property("length") = forAll(myGen suchThat (r => expectedSize(r).toInt == expectedSize(r))) { r =>
- (r.length == expectedSize(r)) :| str(r)
- }
-
- property("isEmpty") = forAll(myGen suchThat (r => expectedSize(r).toInt == expectedSize(r))) { r =>
- (r.isEmpty == (expectedSize(r) == 0L)) :| str(r)
- }
-
- property("contains") = forAll(myGen, arbInt.arbitrary) { (r, x) =>
-// println("----------------")
-// println(str(r))
-// println(x)
-// println("within: " + within(r, x))
-// println("multiple: " + multiple(r, x))
-// println("contains: " + r.contains(x))
- ((within(r, x) && multiple(r, x)) == r.contains(x)) :| str(r)+": "+x
- }
-
- property("take") = forAll(myGen suchThat (r => expectedSize(r).toInt == expectedSize(r)), arbInt.arbitrary) { (r, x) =>
- val t = r take x
- (t.size == (0 max x min r.size) && t.start == r.start && t.step == r.step) :| str(r)+" / "+str(t)+": "+x
- }
-
- property("init") = forAll(myGen suchThat (r => expectedSize(r).toInt == expectedSize(r))) { r =>
- (r.size == 0) || {
- val t = r.init
- (t.size + 1 == r.size) && (t.isEmpty || t.head == r.head)
- }
- }
-
- property("takeWhile") = forAll(myGen suchThat (r => expectedSize(r).toInt == expectedSize(r)), arbInt.arbitrary) { (r, x) =>
- val t = (if (r.step > 0) r takeWhile (_ <= x) else r takeWhile(_ >= x))
- if (r.size == 0) {
- (t.size == 0) :| str(r)+" / "+str(t)+": "+x
- } else {
- val t2 = (if (r.step > 0) Range(r.start, x min r.last, r.step).inclusive else Range(r.start, x max r.last, r.step).inclusive)
- (t.start == r.start && t.size == t2.size && t.step == r.step) :| str(r)+" / "+str(t)+" / "+str(t2)+": "+x
- }
- }
-
- property("reverse.toSet.equal") = forAll(myGen) { r =>
- val reversed = r.reverse
- val aresame = r.toSet == reversed.toSet
- if (!aresame) {
- println(str(r))
- println(r)
- println(reversed)
- println(r.toSet)
- println(reversed.toSet)
- }
- aresame
- }
-}
-
-object NormalRangeTest extends RangeTest("normal") {
- override def myGen = genReasonableSizeRange
- def genOne = for {
- start <- arbitrary[Int]
- end <- arbitrary[Int]
- if (start.toLong - end.toLong).abs < Int.MaxValue.toLong
- } yield Range(start, end, if (start < end) 1 else - 1)
- property("by 1.size + 1 == inclusive.size") = forAll(genOne) { r =>
- (r.size + 1 == r.inclusive.size) :| str(r)
- }
-}
-
-object InclusiveRangeTest extends RangeTest("inclusive") {
- override def myGen = for (r <- genReasonableSizeRange) yield r.inclusive
-}
-
-object ByOneRangeTest extends RangeTest("byOne") {
- override def myGen = genSmallRange
-}
-
-object InclusiveByOneRangeTest extends RangeTest("inclusiveByOne") {
- override def myGen = for (r <- genSmallRange) yield r.inclusive
-}
-
-object SmallValuesRange extends RangeTest("smallValues") {
- override def myGen = genSmallRange
-}
-
-object TooLargeRange extends Properties("Too Large Range") {
- val genTooLargeStart = for {
- start <- choose(-Int.MinValue, 0)
- } yield start
-
- property("Too large range throws exception") = forAll(genTooLargeStart) { start =>
- try {
- val r = Range.inclusive(start, Int.MaxValue, 1)
- println("how here? r = " + r.toString)
- false
- }
- catch { case _: IllegalArgumentException => true }
- }
-}
-
-object Test extends Properties("Range") {
- import org.scalacheck.{ Test => STest }
-
- List(NormalRangeTest, InclusiveRangeTest, ByOneRangeTest, InclusiveByOneRangeTest, TooLargeRange) foreach { ps =>
- STest.checkProperties(STest.Params(testCallback = ConsoleReporter(0)), ps)
- }
-}
-
-/* Mini-benchmark
-def testRange(i: Int, j: Int, k: Int) = {
- var count = 0
- for {
- vi <- 0 to i
- vj <- 0 to j
- vk <- 0 to k
- } { count += 1 }
-}
-
-testRange(10, 1000, 10000)
-testRange(10000, 1000, 10)
-*/
-
+import org.scalacheck._
+import Prop._
+import Gen._
+import Arbitrary._
+
+class Counter(r: Range) {
+ var cnt = 0L
+ var last: Option[Int] = None
+ val str = "Range["+r.start+", "+r.end+", "+r.step+(if (r.isInclusive) "]" else ")")
+ def apply(x: Int) = {
+ cnt += 1L
+ if (cnt % 500000000L == 0L) {
+ println("Working: %s %d %d" format (str, cnt, x))
+ }
+ if (cnt > (Int.MaxValue.toLong + 1) * 2) {
+ val msg = "Count exceeds maximum possible for an Int Range: %s" format str
+ println(msg) // exception is likely to be eaten by an out of memory error
+ sys error msg
+ }
+ if ((r.step > 0 && last.exists(_ > x)) || (r.step < 0 && last.exists(_ < x))) {
+ val msg = "Range %s wrapped: %d %s" format (str, x, last.toString)
+ println(msg) // exception is likely to be eaten by an out of memory error
+ sys error msg
+ }
+ last = Some(x)
+ }
+}
+
+abstract class RangeTest(kind: String) extends Properties("Range "+kind) {
+ def myGen: Gen[Range]
+
+ def genReasonableSizeRange = oneOf(genArbitraryRange, genBoundaryRange)
+
+ def genArbitraryRange = for {
+ start <- choose(Int.MinValue, Int.MaxValue)
+ end <- choose(Int.MinValue, Int.MaxValue)
+ step <- choose(-Int.MaxValue, Int.MaxValue)
+ } yield Range(start, end, if (step == 0) 100 else step)
+
+ def genBoundaryRange = for {
+ boundary <- oneOf(Int.MinValue, -1, 0, 1, Int.MaxValue)
+ isStart <- arbitrary[Boolean]
+ size <- choose(1, 100)
+ step <- choose(1, 101)
+ } yield {
+ val signum = if (boundary == 0) 1 else boundary.signum
+ if (isStart) Range(boundary, boundary - size * boundary.signum, - step * signum)
+ else Range(boundary - size * boundary.signum, boundary, step * signum)
+ }
+
+
+ def genSmallRange = for {
+ start <- choose(-100, 100)
+ end <- choose(-100, 100)
+ step <- choose(1, 1)
+ } yield if (start < end) Range(start, end, step) else Range(start, end, -step)
+
+ def genRangeByOne = oneOf(genRangeOpenByOne, genRangeClosedByOne)
+
+ def genRangeOpenByOne = for {
+ r <- oneOf(genSmallRange, genBoundaryRange)
+ if (r.end.toLong - r.start.toLong).abs <= 10000000L
+ } yield if (r.start < r.end) Range(r.start, r.end) else Range(r.end, r.start)
+
+ def genRangeClosedByOne = for (r <- genRangeOpenByOne) yield r.start to r.end
+
+ def str(r: Range) = "Range["+r.start+", "+r.end+", "+r.step+(if (r.isInclusive) "]" else ")")
+
+ def expectedSize(r: Range): Long = if (r.isInclusive) {
+ (r.end.toLong - r.start.toLong < 0, r.step < 0) match {
+ case (true, true) | (false, false) => (r.end.toLong - r.start.toLong).abs / r.step.abs.toLong + 1L
+ case _ => if (r.start == r.end) 1L else 0L
+ }
+ } else {
+ (r.end.toLong - r.start.toLong < 0, r.step < 0) match {
+ case (true, true) | (false, false) => (
+ (r.end.toLong - r.start.toLong).abs / r.step.abs.toLong
+ + (if ((r.end.toLong - r.start.toLong).abs % r.step.abs.toLong > 0L) 1L else 0L)
+ )
+ case _ => 0L
+ }
+ }
+
+ def within(r: Range, x: Int) = if (r.step > 0)
+ r.start <= x && (if (r.isInclusive) x <= r.end else x < r.end)
+ else
+ r.start >= x && (if (r.isInclusive) x >= r.end else x > r.end)
+
+ def multiple(r: Range, x: Int) = (x.toLong - r.start) % r.step == 0
+
+ property("foreach.step") = forAllNoShrink(myGen) { r =>
+// println("foreach.step "+str(r))
+ var allValid = true
+ val cnt = new Counter(r)
+// println("--------------------")
+// println(r)
+ r foreach { x => cnt(x)
+// println(x + ", " + (x - r.start) + ", " + (x.toLong - r.start) + ", " + ((x.toLong - r.start) % r.step))
+ allValid &&= multiple(r, x)
+ }
+ allValid :| str(r)
+ }
+
+ property("foreach.inside.range") = forAll(myGen) { r =>
+// println("foreach.inside.range "+str(r))
+ var allValid = true
+ var last: Option[Int] = None
+ val cnt = new Counter(r)
+ r foreach { x => cnt(x)
+ allValid &&= within(r, x)
+ }
+ allValid :| str(r)
+ }
+
+ property("foreach.visited.size") = forAll(myGen) { r =>
+// println("foreach.visited.size "+str(r))
+ var visited = 0L
+ val cnt = new Counter(r)
+ r foreach { x => cnt(x)
+ visited += 1L
+ }
+// println("----------")
+// println(str(r))
+// println("size: " + r.size)
+// println("expected: " + expectedSize(r))
+// println("visited: " + visited)
+ (visited == expectedSize(r)) :| str(r)
+ }
+
+ property("length") = forAll(myGen suchThat (r => expectedSize(r).toInt == expectedSize(r))) { r =>
+// println("length "+str(r))
+ (r.length == expectedSize(r)) :| str(r)
+ }
+
+ property("isEmpty") = forAll(myGen suchThat (r => expectedSize(r).toInt == expectedSize(r))) { r =>
+// println("isEmpty "+str(r))
+ (r.isEmpty == (expectedSize(r) == 0L)) :| str(r)
+ }
+
+ property("contains") = forAll(myGen, arbInt.arbitrary) { (r, x) =>
+// println("contains "+str(r))
+// println("----------------")
+// println(str(r))
+// println(x)
+// println("within: " + within(r, x))
+// println("multiple: " + multiple(r, x))
+// println("contains: " + r.contains(x))
+ ((within(r, x) && multiple(r, x)) == r.contains(x)) :| str(r)+": "+x
+ }
+
+ property("take") = forAll(myGen suchThat (r => expectedSize(r).toInt == expectedSize(r)), arbInt.arbitrary) { (r, x) =>
+// println("take "+str(r))
+ val t = r take x
+ (t.size == (0 max x min r.size) && t.start == r.start && t.step == r.step) :| str(r)+" / "+str(t)+": "+x
+ }
+
+ property("init") = forAll(myGen suchThat (r => expectedSize(r).toInt == expectedSize(r))) { r =>
+// println("init "+str(r))
+ (r.size == 0) || {
+ val t = r.init
+ (t.size + 1 == r.size) && (t.isEmpty || t.head == r.head)
+ }
+ }
+
+ property("takeWhile") = forAll(myGen suchThat (r => expectedSize(r).toInt == expectedSize(r)), arbInt.arbitrary) { (r, x) =>
+// println("takeWhile "+str(r))
+ val t = (if (r.step > 0) r takeWhile (_ <= x) else r takeWhile(_ >= x))
+ if (r.size == 0) {
+ (t.size == 0) :| str(r)+" / "+str(t)+": "+x
+ } else {
+ val t2 = (if (r.step > 0) Range(r.start, x min r.last, r.step).inclusive else Range(r.start, x max r.last, r.step).inclusive)
+ (t.start == r.start && t.size == t2.size && t.step == r.step) :| str(r)+" / "+str(t)+" / "+str(t2)+": "+x
+ }
+ }
+
+ property("reverse.toSet.equal") = forAll(myGen) { r =>
+// println("reverse.toSet.equal "+str(r))
+ val reversed = r.reverse
+ val aresame = r.toSet == reversed.toSet
+ if (!aresame) {
+ println(str(r))
+ println(r)
+ println(reversed)
+ println(r.toSet)
+ println(reversed.toSet)
+ }
+ aresame :| str(r)
+ }
+}
+
+object NormalRangeTest extends RangeTest("normal") {
+ override def myGen = genReasonableSizeRange
+ def genOne = for {
+ start <- arbitrary[Int]
+ end <- arbitrary[Int]
+ if (start.toLong - end.toLong).abs < Int.MaxValue.toLong
+ } yield Range(start, end, if (start < end) 1 else - 1)
+ property("by 1.size + 1 == inclusive.size") = forAll(genOne) { r =>
+ (r.size + 1 == r.inclusive.size) :| str(r)
+ }
+}
+
+object InclusiveRangeTest extends RangeTest("inclusive") {
+ override def myGen = for (r <- genReasonableSizeRange) yield r.inclusive
+}
+
+object ByOneRangeTest extends RangeTest("byOne") {
+ override def myGen = genRangeByOne
+}
+
+object InclusiveByOneRangeTest extends RangeTest("inclusiveByOne") {
+ override def myGen = for (r <- genRangeByOne) yield r.inclusive
+}
+
+object SmallValuesRange extends RangeTest("smallValues") {
+ override def myGen = genSmallRange
+}
+
+object TooLargeRange extends Properties("Too Large Range") {
+ val genTooLargeStart = for {
+ start <- choose(-Int.MinValue, 0)
+ } yield start
+
+ property("Too large range throws exception") = forAll(genTooLargeStart) { start =>
+ try {
+ val r = Range.inclusive(start, Int.MaxValue, 1)
+ println("how here? r = " + r.toString)
+ false
+ }
+ catch { case _: IllegalArgumentException => true }
+ }
+}
+
+object Test extends Properties("Range") {
+ import org.scalacheck.{ Test => STest }
+
+ include(NormalRangeTest)
+ include(InclusiveRangeTest)
+ include(ByOneRangeTest)
+ include(InclusiveByOneRangeTest)
+ include(TooLargeRange)
+}
+
+/* Mini-benchmark
+def testRange(i: Int, j: Int, k: Int) = {
+ var count = 0
+ for {
+ vi <- 0 to i
+ vj <- 0 to j
+ vk <- 0 to k
+ } { count += 1 }
+}
+
+testRange(10, 1000, 10000)
+testRange(10000, 1000, 10)
+*/
+
diff --git a/test/files/scalacheck/redblack.scala b/test/files/scalacheck/redblack.scala
new file mode 100644
index 0000000..bbc6504
--- /dev/null
+++ b/test/files/scalacheck/redblack.scala
@@ -0,0 +1,213 @@
+import org.scalacheck._
+import Prop._
+import Gen._
+
+/*
+Properties of a Red & Black Tree:
+
+A node is either red or black.
+The root is black. (This rule is used in some definitions and not others. Since the
+root can always be changed from red to black but not necessarily vice-versa this
+rule has little effect on analysis.)
+All leaves are black.
+Both children of every red node are black.
+Every simple path from a given node to any of its descendant leaves contains the same number of black nodes.
+*/
+
+abstract class RedBlackTest extends Properties("RedBlack") {
+ def minimumSize = 0
+ def maximumSize = 5
+
+ object RedBlackTest extends scala.collection.immutable.RedBlack[String] {
+ def isSmaller(x: String, y: String) = x < y
+ }
+
+ import RedBlackTest._
+
+ def nodeAt[A](tree: Tree[A], n: Int): Option[(String, A)] = if (n < tree.iterator.size && n >= 0)
+ Some(tree.iterator.drop(n).next)
+ else
+ None
+
+ def treeContains[A](tree: Tree[A], key: String) = tree.iterator.map(_._1) contains key
+
+ def mkTree(level: Int, parentIsBlack: Boolean = false, label: String = ""): Gen[Tree[Int]] =
+ if (level == 0) {
+ value(Empty)
+ } else {
+ for {
+ oddOrEven <- choose(0, 2)
+ tryRed = oddOrEven.sample.get % 2 == 0 // work around arbitrary[Boolean] bug
+ isRed = parentIsBlack && tryRed
+ nextLevel = if (isRed) level else level - 1
+ left <- mkTree(nextLevel, !isRed, label + "L")
+ right <- mkTree(nextLevel, !isRed, label + "R")
+ } yield {
+ if (isRed)
+ RedTree(label + "N", 0, left, right)
+ else
+ BlackTree(label + "N", 0, left, right)
+ }
+ }
+
+ def genTree = for {
+ depth <- choose(minimumSize, maximumSize + 1)
+ tree <- mkTree(depth)
+ } yield tree
+
+ type ModifyParm
+ def genParm(tree: Tree[Int]): Gen[ModifyParm]
+ def modify(tree: Tree[Int], parm: ModifyParm): Tree[Int]
+
+ def genInput: Gen[(Tree[Int], ModifyParm, Tree[Int])] = for {
+ tree <- genTree
+ parm <- genParm(tree)
+ } yield (tree, parm, modify(tree, parm))
+}
+
+trait RedBlackInvariants {
+ self: RedBlackTest =>
+
+ import RedBlackTest._
+
+ def rootIsBlack[A](t: Tree[A]) = t.isBlack
+
+ def areAllLeavesBlack[A](t: Tree[A]): Boolean = t match {
+ case Empty => t.isBlack
+ case ne: NonEmpty[_] => List(ne.left, ne.right) forall areAllLeavesBlack
+ }
+
+ def areRedNodeChildrenBlack[A](t: Tree[A]): Boolean = t match {
+ case RedTree(_, _, left, right) => List(left, right) forall (t => t.isBlack && areRedNodeChildrenBlack(t))
+ case BlackTree(_, _, left, right) => List(left, right) forall areRedNodeChildrenBlack
+ case Empty => true
+ }
+
+ def blackNodesToLeaves[A](t: Tree[A]): List[Int] = t match {
+ case Empty => List(1)
+ case BlackTree(_, _, left, right) => List(left, right) flatMap blackNodesToLeaves map (_ + 1)
+ case RedTree(_, _, left, right) => List(left, right) flatMap blackNodesToLeaves
+ }
+
+ def areBlackNodesToLeavesEqual[A](t: Tree[A]): Boolean = t match {
+ case Empty => true
+ case ne: NonEmpty[_] =>
+ (
+ blackNodesToLeaves(ne).distinct.size == 1
+ && areBlackNodesToLeavesEqual(ne.left)
+ && areBlackNodesToLeavesEqual(ne.right)
+ )
+ }
+
+ def orderIsPreserved[A](t: Tree[A]): Boolean =
+ t.iterator zip t.iterator.drop(1) forall { case (x, y) => isSmaller(x._1, y._1) }
+
+ def setup(invariant: Tree[Int] => Boolean) = forAll(genInput) { case (tree, parm, newTree) =>
+ invariant(newTree)
+ }
+
+ property("root is black") = setup(rootIsBlack)
+ property("all leaves are black") = setup(areAllLeavesBlack)
+ property("children of red nodes are black") = setup(areRedNodeChildrenBlack)
+ property("black nodes are balanced") = setup(areBlackNodesToLeavesEqual)
+ property("ordering of keys is preserved") = setup(orderIsPreserved)
+}
+
+object TestInsert extends RedBlackTest with RedBlackInvariants {
+ import RedBlackTest._
+
+ override type ModifyParm = Int
+ override def genParm(tree: Tree[Int]): Gen[ModifyParm] = choose(0, tree.iterator.size + 1)
+ override def modify(tree: Tree[Int], parm: ModifyParm): Tree[Int] = tree update (generateKey(tree, parm), 0)
+
+ def generateKey(tree: Tree[Int], parm: ModifyParm): String = nodeAt(tree, parm) match {
+ case Some((key, _)) => key.init.mkString + "MN"
+ case None => nodeAt(tree, parm - 1) match {
+ case Some((key, _)) => key.init.mkString + "RN"
+ case None => "N"
+ }
+ }
+
+ property("update adds elements") = forAll(genInput) { case (tree, parm, newTree) =>
+ treeContains(newTree, generateKey(tree, parm))
+ }
+}
+
+object TestModify extends RedBlackTest {
+ import RedBlackTest._
+
+ def newValue = 1
+ override def minimumSize = 1
+ override type ModifyParm = Int
+ override def genParm(tree: Tree[Int]): Gen[ModifyParm] = choose(0, tree.iterator.size)
+ override def modify(tree: Tree[Int], parm: ModifyParm): Tree[Int] = nodeAt(tree, parm) map {
+ case (key, _) => tree update (key, newValue)
+ } getOrElse tree
+
+ property("update modifies values") = forAll(genInput) { case (tree, parm, newTree) =>
+ nodeAt(tree,parm) forall { case (key, _) =>
+ newTree.iterator contains (key, newValue)
+ }
+ }
+}
+
+object TestDelete extends RedBlackTest with RedBlackInvariants {
+ import RedBlackTest._
+
+ override def minimumSize = 1
+ override type ModifyParm = Int
+ override def genParm(tree: Tree[Int]): Gen[ModifyParm] = choose(0, tree.iterator.size)
+ override def modify(tree: Tree[Int], parm: ModifyParm): Tree[Int] = nodeAt(tree, parm) map {
+ case (key, _) => tree delete key
+ } getOrElse tree
+
+ property("delete removes elements") = forAll(genInput) { case (tree, parm, newTree) =>
+ nodeAt(tree, parm) forall { case (key, _) =>
+ !treeContains(newTree, key)
+ }
+ }
+}
+
+object TestRange extends RedBlackTest with RedBlackInvariants {
+ import RedBlackTest._
+
+ override type ModifyParm = (Option[Int], Option[Int])
+ override def genParm(tree: Tree[Int]): Gen[ModifyParm] = for {
+ from <- choose(0, tree.iterator.size)
+ to <- choose(0, tree.iterator.size) suchThat (from <=)
+ optionalFrom <- oneOf(Some(from), None, Some(from)) // Double Some(n) to get around a bug
+ optionalTo <- oneOf(Some(to), None, Some(to)) // Double Some(n) to get around a bug
+ } yield (optionalFrom, optionalTo)
+
+ override def modify(tree: Tree[Int], parm: ModifyParm): Tree[Int] = {
+ val from = parm._1 flatMap (nodeAt(tree, _) map (_._1))
+ val to = parm._2 flatMap (nodeAt(tree, _) map (_._1))
+ tree range (from, to)
+ }
+
+ property("range boundaries respected") = forAll(genInput) { case (tree, parm, newTree) =>
+ val from = parm._1 flatMap (nodeAt(tree, _) map (_._1))
+ val to = parm._2 flatMap (nodeAt(tree, _) map (_._1))
+ ("lower boundary" |: (from forall ( key => newTree.iterator.map(_._1) forall (key <=)))) &&
+ ("upper boundary" |: (to forall ( key => newTree.iterator.map(_._1) forall (key >))))
+ }
+
+ property("range returns all elements") = forAll(genInput) { case (tree, parm, newTree) =>
+ val from = parm._1 flatMap (nodeAt(tree, _) map (_._1))
+ val to = parm._2 flatMap (nodeAt(tree, _) map (_._1))
+ val filteredTree = (tree.iterator
+ .map(_._1)
+ .filter(key => from forall (key >=))
+ .filter(key => to forall (key <))
+ .toList)
+ filteredTree == newTree.iterator.map(_._1).toList
+ }
+}
+
+object Test extends Properties("RedBlack") {
+ include(TestInsert)
+ include(TestModify)
+ include(TestDelete)
+ include(TestRange)
+}
+
diff --git a/test/files/scalacheck/redblacktree.scala b/test/files/scalacheck/redblacktree.scala
new file mode 100644
index 0000000..bc7f92a
--- /dev/null
+++ b/test/files/scalacheck/redblacktree.scala
@@ -0,0 +1,258 @@
+import collection.immutable.{RedBlackTree => RB}
+import org.scalacheck._
+import Prop._
+import Gen._
+
+/*
+Properties of a Red & Black Tree:
+
+A node is either red or black.
+The root is black. (This rule is used in some definitions and not others. Since the
+root can always be changed from red to black but not necessarily vice-versa this
+rule has little effect on analysis.)
+All leaves are black.
+Both children of every red node are black.
+Every simple path from a given node to any of its descendant leaves contains the same number of black nodes.
+*/
+
+package scala.collection.immutable.redblacktree {
+ abstract class RedBlackTreeTest extends Properties("RedBlackTree") {
+ def minimumSize = 0
+ def maximumSize = 5
+
+ import RB._
+
+ def nodeAt[A](tree: Tree[String, A], n: Int): Option[(String, A)] = if (n < iterator(tree).size && n >= 0)
+ Some(iterator(tree).drop(n).next)
+ else
+ None
+
+ def treeContains[A](tree: Tree[String, A], key: String) = iterator(tree).map(_._1) contains key
+
+ def height(tree: Tree[_, _]): Int = if (tree eq null) 0 else (1 + math.max(height(tree.left), height(tree.right)))
+
+ def mkTree(level: Int, parentIsBlack: Boolean = false, label: String = ""): Gen[Tree[String, Int]] =
+ if (level == 0) {
+ value(null)
+ } else {
+ for {
+ oddOrEven <- choose(0, 2)
+ tryRed = oddOrEven.sample.get % 2 == 0 // work around arbitrary[Boolean] bug
+ isRed = parentIsBlack && tryRed
+ nextLevel = if (isRed) level else level - 1
+ left <- mkTree(nextLevel, !isRed, label + "L")
+ right <- mkTree(nextLevel, !isRed, label + "R")
+ } yield {
+ if (isRed)
+ RedTree(label + "N", 0, left, right)
+ else
+ BlackTree(label + "N", 0, left, right)
+ }
+ }
+
+ def genTree = for {
+ depth <- choose(minimumSize, maximumSize + 1)
+ tree <- mkTree(depth)
+ } yield tree
+
+ type ModifyParm
+ def genParm(tree: Tree[String, Int]): Gen[ModifyParm]
+ def modify(tree: Tree[String, Int], parm: ModifyParm): Tree[String, Int]
+
+ def genInput: Gen[(Tree[String, Int], ModifyParm, Tree[String, Int])] = for {
+ tree <- genTree
+ parm <- genParm(tree)
+ } yield (tree, parm, modify(tree, parm))
+ }
+
+ trait RedBlackTreeInvariants {
+ self: RedBlackTreeTest =>
+
+ import RB._
+
+ def rootIsBlack[A](t: Tree[String, A]) = isBlack(t)
+
+ def areAllLeavesBlack[A](t: Tree[String, A]): Boolean = t match {
+ case null => isBlack(t)
+ case ne => List(ne.left, ne.right) forall areAllLeavesBlack
+ }
+
+ def areRedNodeChildrenBlack[A](t: Tree[String, A]): Boolean = t match {
+ case RedTree(_, _, left, right) => List(left, right) forall (t => isBlack(t) && areRedNodeChildrenBlack(t))
+ case BlackTree(_, _, left, right) => List(left, right) forall areRedNodeChildrenBlack
+ case null => true
+ }
+
+ def blackNodesToLeaves[A](t: Tree[String, A]): List[Int] = t match {
+ case null => List(1)
+ case BlackTree(_, _, left, right) => List(left, right) flatMap blackNodesToLeaves map (_ + 1)
+ case RedTree(_, _, left, right) => List(left, right) flatMap blackNodesToLeaves
+ }
+
+ def areBlackNodesToLeavesEqual[A](t: Tree[String, A]): Boolean = t match {
+ case null => true
+ case ne =>
+ (
+ blackNodesToLeaves(ne).distinct.size == 1
+ && areBlackNodesToLeavesEqual(ne.left)
+ && areBlackNodesToLeavesEqual(ne.right)
+ )
+ }
+
+ def orderIsPreserved[A](t: Tree[String, A]): Boolean =
+ iterator(t) zip iterator(t).drop(1) forall { case (x, y) => x._1 < y._1 }
+
+ def heightIsBounded(t: Tree[_, _]): Boolean = height(t) <= (2 * (32 - Integer.numberOfLeadingZeros(count(t) + 2)) - 2)
+
+ def setup(invariant: Tree[String, Int] => Boolean) = forAll(genInput) { case (tree, parm, newTree) =>
+ invariant(newTree)
+ }
+
+ property("root is black") = setup(rootIsBlack)
+ property("all leaves are black") = setup(areAllLeavesBlack)
+ property("children of red nodes are black") = setup(areRedNodeChildrenBlack)
+ property("black nodes are balanced") = setup(areBlackNodesToLeavesEqual)
+ property("ordering of keys is preserved") = setup(orderIsPreserved)
+ property("height is bounded") = setup(heightIsBounded)
+ }
+
+ object TestInsert extends RedBlackTreeTest with RedBlackTreeInvariants {
+ import RB._
+
+ override type ModifyParm = Int
+ override def genParm(tree: Tree[String, Int]): Gen[ModifyParm] = choose(0, iterator(tree).size + 1)
+ override def modify(tree: Tree[String, Int], parm: ModifyParm): Tree[String, Int] = update(tree, generateKey(tree, parm), 0, true)
+
+ def generateKey(tree: Tree[String, Int], parm: ModifyParm): String = nodeAt(tree, parm) match {
+ case Some((key, _)) => key.init.mkString + "MN"
+ case None => nodeAt(tree, parm - 1) match {
+ case Some((key, _)) => key.init.mkString + "RN"
+ case None => "N"
+ }
+ }
+
+ property("update adds elements") = forAll(genInput) { case (tree, parm, newTree) =>
+ treeContains(newTree, generateKey(tree, parm))
+ }
+ }
+
+ object TestModify extends RedBlackTreeTest {
+ import RB._
+
+ def newValue = 1
+ override def minimumSize = 1
+ override type ModifyParm = Int
+ override def genParm(tree: Tree[String, Int]): Gen[ModifyParm] = choose(0, iterator(tree).size)
+ override def modify(tree: Tree[String, Int], parm: ModifyParm): Tree[String, Int] = nodeAt(tree, parm) map {
+ case (key, _) => update(tree, key, newValue, true)
+ } getOrElse tree
+
+ property("update modifies values") = forAll(genInput) { case (tree, parm, newTree) =>
+ nodeAt(tree,parm) forall { case (key, _) =>
+ iterator(newTree) contains (key, newValue)
+ }
+ }
+ }
+
+ object TestDelete extends RedBlackTreeTest with RedBlackTreeInvariants {
+ import RB._
+
+ override def minimumSize = 1
+ override type ModifyParm = Int
+ override def genParm(tree: Tree[String, Int]): Gen[ModifyParm] = choose(0, iterator(tree).size)
+ override def modify(tree: Tree[String, Int], parm: ModifyParm): Tree[String, Int] = nodeAt(tree, parm) map {
+ case (key, _) => delete(tree, key)
+ } getOrElse tree
+
+ property("delete removes elements") = forAll(genInput) { case (tree, parm, newTree) =>
+ nodeAt(tree, parm) forall { case (key, _) =>
+ !treeContains(newTree, key)
+ }
+ }
+ }
+
+ object TestRange extends RedBlackTreeTest with RedBlackTreeInvariants {
+ import RB._
+
+ override type ModifyParm = (Option[Int], Option[Int])
+ override def genParm(tree: Tree[String, Int]): Gen[ModifyParm] = for {
+ from <- choose(0, iterator(tree).size)
+ to <- choose(0, iterator(tree).size) suchThat (from <=)
+ optionalFrom <- oneOf(Some(from), None, Some(from)) // Double Some(n) to get around a bug
+ optionalTo <- oneOf(Some(to), None, Some(to)) // Double Some(n) to get around a bug
+ } yield (optionalFrom, optionalTo)
+
+ override def modify(tree: Tree[String, Int], parm: ModifyParm): Tree[String, Int] = {
+ val from = parm._1 flatMap (nodeAt(tree, _) map (_._1))
+ val to = parm._2 flatMap (nodeAt(tree, _) map (_._1))
+ rangeImpl(tree, from, to)
+ }
+
+ property("range boundaries respected") = forAll(genInput) { case (tree, parm, newTree) =>
+ val from = parm._1 flatMap (nodeAt(tree, _) map (_._1))
+ val to = parm._2 flatMap (nodeAt(tree, _) map (_._1))
+ ("lower boundary" |: (from forall ( key => keysIterator(newTree) forall (key <=)))) &&
+ ("upper boundary" |: (to forall ( key => keysIterator(newTree) forall (key >))))
+ }
+
+ property("range returns all elements") = forAll(genInput) { case (tree, parm, newTree) =>
+ val from = parm._1 flatMap (nodeAt(tree, _) map (_._1))
+ val to = parm._2 flatMap (nodeAt(tree, _) map (_._1))
+ val filteredTree = (keysIterator(tree)
+ .filter(key => from forall (key >=))
+ .filter(key => to forall (key <))
+ .toList)
+ filteredTree == keysIterator(newTree).toList
+ }
+ }
+
+ object TestDrop extends RedBlackTreeTest with RedBlackTreeInvariants {
+ import RB._
+
+ override type ModifyParm = Int
+ override def genParm(tree: Tree[String, Int]): Gen[ModifyParm] = choose(0, iterator(tree).size)
+ override def modify(tree: Tree[String, Int], parm: ModifyParm): Tree[String, Int] = drop(tree, parm)
+
+ property("drop") = forAll(genInput) { case (tree, parm, newTree) =>
+ iterator(tree).drop(parm).toList == iterator(newTree).toList
+ }
+ }
+
+ object TestTake extends RedBlackTreeTest with RedBlackTreeInvariants {
+ import RB._
+
+ override type ModifyParm = Int
+ override def genParm(tree: Tree[String, Int]): Gen[ModifyParm] = choose(0, iterator(tree).size)
+ override def modify(tree: Tree[String, Int], parm: ModifyParm): Tree[String, Int] = take(tree, parm)
+
+ property("take") = forAll(genInput) { case (tree, parm, newTree) =>
+ iterator(tree).take(parm).toList == iterator(newTree).toList
+ }
+ }
+
+ object TestSlice extends RedBlackTreeTest with RedBlackTreeInvariants {
+ import RB._
+
+ override type ModifyParm = (Int, Int)
+ override def genParm(tree: Tree[String, Int]): Gen[ModifyParm] = for {
+ from <- choose(0, iterator(tree).size)
+ to <- choose(from, iterator(tree).size)
+ } yield (from, to)
+ override def modify(tree: Tree[String, Int], parm: ModifyParm): Tree[String, Int] = slice(tree, parm._1, parm._2)
+
+ property("slice") = forAll(genInput) { case (tree, parm, newTree) =>
+ iterator(tree).slice(parm._1, parm._2).toList == iterator(newTree).toList
+ }
+ }
+}
+
+object Test extends Properties("RedBlackTree") {
+ import collection.immutable.redblacktree._
+ include(TestInsert)
+ include(TestModify)
+ include(TestDelete)
+ include(TestRange)
+ include(TestDrop)
+ include(TestTake)
+ include(TestSlice)
+}
diff --git a/test/files/scalacheck/si4147.scala b/test/files/scalacheck/si4147.scala
new file mode 100644
index 0000000..1453440
--- /dev/null
+++ b/test/files/scalacheck/si4147.scala
@@ -0,0 +1,67 @@
+import org.scalacheck.Prop.forAll
+import org.scalacheck.Properties
+import org.scalacheck.ConsoleReporter.testStatsEx
+import org.scalacheck.Gen
+import org.scalacheck.ConsoleReporter
+
+
+import collection.mutable
+
+
+object Test extends Properties("Mutable TreeSet") {
+
+ val generator = Gen.listOfN(1000, Gen.chooseNum(0, 1000))
+
+ val denseGenerator = Gen.listOfN(1000, Gen.chooseNum(0, 200))
+
+ property("Insertion doesn't allow duplicates values.") = forAll(generator) { (s: List[Int]) =>
+ {
+ val t = mutable.TreeSet[Int](s: _*)
+ t == s.toSet
+ }
+ }
+
+ property("Verification of size method validity") = forAll(generator) { (s: List[Int]) =>
+ {
+ val t = mutable.TreeSet[Int](s: _*)
+ for (a <- s) {
+ t -= a
+ }
+ t.size == 0
+ }
+ }
+
+ property("All inserted elements are removed") = forAll(generator) { (s: List[Int]) =>
+ {
+ val t = mutable.TreeSet[Int](s: _*)
+ for (a <- s) {
+ t -= a
+ }
+ t == Set()
+ }
+ }
+
+ property("Elements are sorted.") = forAll(generator) { (s: List[Int]) =>
+ {
+ val t = mutable.TreeSet[Int](s: _*)
+ t.toList == s.distinct.sorted
+ }
+ }
+
+ property("Implicit CanBuildFrom resolution succeeds as well as the \"same-result-type\" principle.") =
+ forAll(generator) { (s: List[Int]) =>
+ {
+ val t = mutable.TreeSet[Int](s: _*)
+ val t2 = t.map(_ * 2)
+ t2.isInstanceOf[collection.mutable.TreeSet[Int]]
+ }
+ }
+
+ property("A view doesn't expose off bounds elements") = forAll(denseGenerator) { (s: List[Int]) =>
+ {
+ val t = mutable.TreeSet[Int](s: _*)
+ val view = t.rangeImpl(Some(50), Some(150))
+ view.filter(_ < 50) == Set[Int]() && view.filter(_ >= 150) == Set[Int]()
+ }
+ }
+}
diff --git a/test/files/scalacheck/t2460.scala b/test/files/scalacheck/t2460.scala
new file mode 100644
index 0000000..196b437
--- /dev/null
+++ b/test/files/scalacheck/t2460.scala
@@ -0,0 +1,32 @@
+import org.scalacheck.Prop.forAll
+import org.scalacheck.Properties
+import org.scalacheck.ConsoleReporter.testStatsEx
+import org.scalacheck.{Test => SCTest}
+import org.scalacheck.Gen
+
+object Test extends Properties("Regex : Ticket 2460") {
+
+ val vowel = Gen.oneOf("a", "z")
+
+ val numberOfMatch = forAll(vowel) {
+ (s: String) => "\\s*([a-z])\\s*".r("data").findAllMatchIn((1 to 20).map(_ => s).mkString).size == 20
+ }
+
+ val numberOfGroup = forAll(vowel) {
+ (s: String) => "\\s*([a-z])\\s*([a-z])\\s*".r("data").findAllMatchIn((1 to 20).map(_ => s).mkString).next.groupCount == 2
+ }
+
+ val nameOfGroup = forAll(vowel) {
+ (s: String) => "([a-z])".r("data").findAllMatchIn(s).next.group("data") == s
+ }
+
+ val tests = List(
+ ("numberOfMatch", numberOfMatch),
+ ("numberOfGroup", numberOfGroup),
+ ("nameOfGroup", nameOfGroup)
+ )
+
+ /*tests foreach {
+ case (name, p) => testStatsEx(name, SCTest.check(p))
+ }*/
+}
diff --git a/test/files/scalacheck/treemap.scala b/test/files/scalacheck/treemap.scala
new file mode 100644
index 0000000..f672637
--- /dev/null
+++ b/test/files/scalacheck/treemap.scala
@@ -0,0 +1,154 @@
+import collection.immutable._
+import org.scalacheck._
+import Prop._
+import Gen._
+import Arbitrary._
+import util._
+import Buildable._
+
+object Test extends Properties("TreeMap") {
+ def genTreeMap[A: Arbitrary: Ordering, B: Arbitrary]: Gen[TreeMap[A, B]] =
+ for {
+ keys <- listOf(arbitrary[A])
+ values <- listOfN(keys.size, arbitrary[B])
+ } yield TreeMap(keys zip values: _*)
+ implicit def arbTreeMap[A : Arbitrary : Ordering, B : Arbitrary] = Arbitrary(genTreeMap[A, B])
+
+ property("foreach/iterator consistency") = forAll { (subject: TreeMap[Int, String]) =>
+ val it = subject.iterator
+ var consistent = true
+ subject.foreach { element =>
+ consistent &&= it.hasNext && element == it.next
+ }
+ consistent
+ }
+
+ property("worst-case tree height is iterable") = forAll(choose(0, 10), arbitrary[Boolean]) { (n: Int, even: Boolean) =>
+ /*
+ * According to "Ralf Hinze. Constructing red-black trees" [http://www.cs.ox.ac.uk/ralf.hinze/publications/#P5]
+ * you can construct a skinny tree of height 2n by inserting the elements [1 .. 2^(n+1) - 2] and a tree of height
+ * 2n+1 by inserting the elements [1 .. 3 * 2^n - 2], both in reverse order.
+ *
+ * Since we allocate a fixed size buffer in the iterator (based on the tree size) we need to ensure
+ * it is big enough for these worst-case trees.
+ */
+ val highest = if (even) (1 << (n+1)) - 2 else 3*(1 << n) - 2
+ val values = (1 to highest).reverse
+ val subject = TreeMap(values zip values: _*)
+ val it = subject.iterator
+ try { while (it.hasNext) it.next; true } catch { case _ => false }
+ }
+
+ property("sorted") = forAll { (subject: TreeMap[Int, String]) => (subject.size >= 3) ==> {
+ subject.zip(subject.tail).forall { case (x, y) => x._1 < y._1 }
+ }}
+
+ property("contains all") = forAll { (arr: List[(Int, String)]) =>
+ val subject = TreeMap(arr: _*)
+ arr.map(_._1).forall(subject.contains(_))
+ }
+
+ property("size") = forAll { (elements: List[(Int, Int)]) =>
+ val subject = TreeMap(elements: _*)
+ elements.map(_._1).distinct.size == subject.size
+ }
+
+ property("toSeq") = forAll { (elements: List[(Int, Int)]) =>
+ val subject = TreeMap(elements: _*)
+ elements.map(_._1).distinct.sorted == subject.toSeq.map(_._1)
+ }
+
+ property("head") = forAll { (elements: List[Int]) => elements.nonEmpty ==> {
+ val subject = TreeMap(elements zip elements: _*)
+ elements.min == subject.head._1
+ }}
+
+ property("last") = forAll { (elements: List[Int]) => elements.nonEmpty ==> {
+ val subject = TreeMap(elements zip elements: _*)
+ elements.max == subject.last._1
+ }}
+
+ property("head/tail identity") = forAll { (subject: TreeMap[Int, String]) => subject.nonEmpty ==> {
+ subject == (subject.tail + subject.head)
+ }}
+
+ property("init/last identity") = forAll { (subject: TreeMap[Int, String]) => subject.nonEmpty ==> {
+ subject == (subject.init + subject.last)
+ }}
+
+ property("take") = forAll { (subject: TreeMap[Int, String]) =>
+ val n = choose(0, subject.size).sample.get
+ n == subject.take(n).size && subject.take(n).forall(elt => subject.get(elt._1) == Some(elt._2))
+ }
+
+ property("drop") = forAll { (subject: TreeMap[Int, String]) =>
+ val n = choose(0, subject.size).sample.get
+ (subject.size - n) == subject.drop(n).size && subject.drop(n).forall(elt => subject.get(elt._1) == Some(elt._2))
+ }
+
+ property("take/drop identity") = forAll { (subject: TreeMap[Int, String]) =>
+ val n = choose(-1, subject.size + 1).sample.get
+ subject == subject.take(n) ++ subject.drop(n)
+ }
+
+ property("splitAt") = forAll { (subject: TreeMap[Int, String]) =>
+ val n = choose(-1, subject.size + 1).sample.get
+ val (prefix, suffix) = subject.splitAt(n)
+ prefix == subject.take(n) && suffix == subject.drop(n)
+ }
+
+ def genSliceParms = for {
+ tree <- genTreeMap[Int, String]
+ from <- choose(0, tree.size)
+ until <- choose(from, tree.size)
+ } yield (tree, from, until)
+
+ property("slice") = forAll(genSliceParms) { case (subject, from, until) =>
+ val slice = subject.slice(from, until)
+ slice.size == until - from && subject.toSeq == subject.take(from).toSeq ++ slice ++ subject.drop(until)
+ }
+
+ property("takeWhile") = forAll { (subject: TreeMap[Int, String]) =>
+ val result = subject.takeWhile(_._1 < 0)
+ result.forall(_._1 < 0) && result == subject.take(result.size)
+ }
+
+ property("dropWhile") = forAll { (subject: TreeMap[Int, String]) =>
+ val result = subject.dropWhile(_._1 < 0)
+ result.forall(_._1 >= 0) && result == subject.takeRight(result.size)
+ }
+
+ property("span identity") = forAll { (subject: TreeMap[Int, String]) =>
+ val (prefix, suffix) = subject.span(_._1 < 0)
+ prefix.forall(_._1 < 0) && suffix.forall(_._1 >= 0) && subject == prefix ++ suffix
+ }
+
+ property("from is inclusive") = forAll { (subject: TreeMap[Int, String]) => subject.nonEmpty ==> {
+ val n = choose(0, subject.size - 1).sample.get
+ val from = subject.drop(n).firstKey
+ subject.from(from).firstKey == from && subject.from(from).forall(_._1 >= from)
+ }}
+
+ property("to is inclusive") = forAll { (subject: TreeMap[Int, String]) => subject.nonEmpty ==> {
+ val n = choose(0, subject.size - 1).sample.get
+ val to = subject.drop(n).firstKey
+ subject.to(to).lastKey == to && subject.to(to).forall(_._1 <= to)
+ }}
+
+ property("until is exclusive") = forAll { (subject: TreeMap[Int, String]) => subject.size > 1 ==> {
+ val n = choose(1, subject.size - 1).sample.get
+ val until = subject.drop(n).firstKey
+ subject.until(until).lastKey == subject.take(n).lastKey && subject.until(until).forall(_._1 <= until)
+ }}
+
+ property("remove single") = forAll { (subject: TreeMap[Int, String]) => subject.nonEmpty ==> {
+ val key = oneOf(subject.keys.toSeq).sample.get
+ val removed = subject - key
+ subject.contains(key) && !removed.contains(key) && subject.size - 1 == removed.size
+ }}
+
+ property("remove all") = forAll { (subject: TreeMap[Int, String]) =>
+ val result = subject.foldLeft(subject)((acc, elt) => acc - elt._1)
+ result.isEmpty
+ }
+}
diff --git a/test/files/scalacheck/treeset.scala b/test/files/scalacheck/treeset.scala
new file mode 100644
index 0000000..98e38c8
--- /dev/null
+++ b/test/files/scalacheck/treeset.scala
@@ -0,0 +1,152 @@
+import collection.immutable._
+import org.scalacheck._
+import Prop._
+import Gen._
+import Arbitrary._
+import util._
+
+object Test extends Properties("TreeSet") {
+ def genTreeSet[A: Arbitrary: Ordering]: Gen[TreeSet[A]] =
+ for {
+ elements <- listOf(arbitrary[A])
+ } yield TreeSet(elements: _*)
+ implicit def arbTreeSet[A : Arbitrary : Ordering]: Arbitrary[TreeSet[A]] = Arbitrary(genTreeSet)
+
+ property("foreach/iterator consistency") = forAll { (subject: TreeSet[Int]) =>
+ val it = subject.iterator
+ var consistent = true
+ subject.foreach { element =>
+ consistent &&= it.hasNext && element == it.next
+ }
+ consistent
+ }
+
+ property("worst-case tree height is iterable") = forAll(choose(0, 10), arbitrary[Boolean]) { (n: Int, even: Boolean) =>
+ /*
+ * According to "Ralf Hinze. Constructing red-black trees" [http://www.cs.ox.ac.uk/ralf.hinze/publications/#P5]
+ * you can construct a skinny tree of height 2n by inserting the elements [1 .. 2^(n+1) - 2] and a tree of height
+ * 2n+1 by inserting the elements [1 .. 3 * 2^n - 2], both in reverse order.
+ *
+ * Since we allocate a fixed size buffer in the iterator (based on the tree size) we need to ensure
+ * it is big enough for these worst-case trees.
+ */
+ val highest = if (even) (1 << (n+1)) - 2 else 3*(1 << n) - 2
+ val values = (1 to highest).reverse
+ val subject = TreeSet(values: _*)
+ val it = subject.iterator
+ try { while (it.hasNext) it.next; true } catch { case _ => false }
+ }
+
+ property("sorted") = forAll { (subject: TreeSet[Int]) => (subject.size >= 3) ==> {
+ subject.zip(subject.tail).forall { case (x, y) => x < y }
+ }}
+
+ property("contains all") = forAll { (elements: List[Int]) =>
+ val subject = TreeSet(elements: _*)
+ elements.forall(subject.contains)
+ }
+
+ property("size") = forAll { (elements: List[Int]) =>
+ val subject = TreeSet(elements: _*)
+ elements.distinct.size == subject.size
+ }
+
+ property("toSeq") = forAll { (elements: List[Int]) =>
+ val subject = TreeSet(elements: _*)
+ elements.distinct.sorted == subject.toSeq
+ }
+
+ property("head") = forAll { (elements: List[Int]) => elements.nonEmpty ==> {
+ val subject = TreeSet(elements: _*)
+ elements.min == subject.head
+ }}
+
+ property("last") = forAll { (elements: List[Int]) => elements.nonEmpty ==> {
+ val subject = TreeSet(elements: _*)
+ elements.max == subject.last
+ }}
+
+ property("head/tail identity") = forAll { (subject: TreeSet[Int]) => subject.nonEmpty ==> {
+ subject == (subject.tail + subject.head)
+ }}
+
+ property("init/last identity") = forAll { (subject: TreeSet[Int]) => subject.nonEmpty ==> {
+ subject == (subject.init + subject.last)
+ }}
+
+ property("take") = forAll { (subject: TreeSet[Int]) =>
+ val n = choose(0, subject.size).sample.get
+ n == subject.take(n).size && subject.take(n).forall(subject.contains)
+ }
+
+ property("drop") = forAll { (subject: TreeSet[Int]) =>
+ val n = choose(0, subject.size).sample.get
+ (subject.size - n) == subject.drop(n).size && subject.drop(n).forall(subject.contains)
+ }
+
+ property("take/drop identity") = forAll { (subject: TreeSet[Int]) =>
+ val n = choose(-1, subject.size + 1).sample.get
+ subject == subject.take(n) ++ subject.drop(n)
+ }
+
+ property("splitAt") = forAll { (subject: TreeSet[Int]) =>
+ val n = choose(-1, subject.size + 1).sample.get
+ val (prefix, suffix) = subject.splitAt(n)
+ prefix == subject.take(n) && suffix == subject.drop(n)
+ }
+
+ def genSliceParms = for {
+ tree <- genTreeSet[Int]
+ from <- choose(0, tree.size)
+ until <- choose(from, tree.size)
+ } yield (tree, from, until)
+
+ property("slice") = forAll(genSliceParms) { case (subject, from, until) =>
+ val slice = subject.slice(from, until)
+ slice.size == until - from && subject.toSeq == subject.take(from).toSeq ++ slice ++ subject.drop(until)
+ }
+
+ property("takeWhile") = forAll { (subject: TreeSet[Int]) =>
+ val result = subject.takeWhile(_ < 0)
+ result.forall(_ < 0) && result == subject.take(result.size)
+ }
+
+ property("dropWhile") = forAll { (subject: TreeSet[Int]) =>
+ val result = subject.dropWhile(_ < 0)
+ result.forall(_ >= 0) && result == subject.takeRight(result.size)
+ }
+
+ property("span identity") = forAll { (subject: TreeSet[Int]) =>
+ val (prefix, suffix) = subject.span(_ < 0)
+ prefix.forall(_ < 0) && suffix.forall(_ >= 0) && subject == prefix ++ suffix
+ }
+
+ property("from is inclusive") = forAll { (subject: TreeSet[Int]) => subject.nonEmpty ==> {
+ val n = choose(0, subject.size - 1).sample.get
+ val from = subject.drop(n).firstKey
+ subject.from(from).firstKey == from && subject.from(from).forall(_ >= from)
+ }}
+
+ property("to is inclusive") = forAll { (subject: TreeSet[Int]) => subject.nonEmpty ==> {
+ val n = choose(0, subject.size - 1).sample.get
+ val to = subject.drop(n).firstKey
+ subject.to(to).lastKey == to && subject.to(to).forall(_ <= to)
+ }}
+
+ property("until is exclusive") = forAll { (subject: TreeSet[Int]) => subject.size > 1 ==> {
+ val n = choose(1, subject.size - 1).sample.get
+ val until = subject.drop(n).firstKey
+ subject.until(until).lastKey == subject.take(n).lastKey && subject.until(until).forall(_ <= until)
+ }}
+
+ property("remove single") = forAll { (subject: TreeSet[Int]) => subject.nonEmpty ==> {
+ val element = oneOf(subject.toSeq).sample.get
+ val removed = subject - element
+ subject.contains(element) && !removed.contains(element) && subject.size - 1 == removed.size
+ }}
+
+ property("remove all") = forAll { (subject: TreeSet[Int]) =>
+ val result = subject.foldLeft(subject)((acc, elt) => acc - elt)
+ result.isEmpty
+ }
+}
diff --git a/test/files/scalap/abstractClass/result.test b/test/files/scalap/abstractClass/result.test
index 9b8fc4d..ef1daac 100644
--- a/test/files/scalap/abstractClass/result.test
+++ b/test/files/scalap/abstractClass/result.test
@@ -1,4 +1,4 @@
-abstract class AbstractClass extends java.lang.Object with scala.ScalaObject {
+abstract class AbstractClass extends scala.AnyRef {
def this() = { /* compiled code */ }
def foo : scala.Predef.String
}
diff --git a/test/files/scalap/abstractMethod/result.test b/test/files/scalap/abstractMethod/result.test
index a1bd378..40fa02d 100644
--- a/test/files/scalap/abstractMethod/result.test
+++ b/test/files/scalap/abstractMethod/result.test
@@ -1,4 +1,4 @@
-trait AbstractMethod extends java.lang.Object with scala.ScalaObject {
+trait AbstractMethod extends scala.AnyRef {
def $init$() : scala.Unit = { /* compiled code */ }
def arity : scala.Int
def isCool : scala.Boolean = { /* compiled code */ }
diff --git a/test/files/scalap/caseClass/result.test b/test/files/scalap/caseClass/result.test
index 9b65be4..7d7aa4f 100644
--- a/test/files/scalap/caseClass/result.test
+++ b/test/files/scalap/caseClass/result.test
@@ -1,13 +1,20 @@
-case class CaseClass[A <: scala.Seq[scala.Int]](i : A, s : scala.Predef.String) extends java.lang.Object with scala.ScalaObject with scala.Product with scala.Serializable {
+case class CaseClass[A <: scala.Seq[scala.Int]](i : A, s : scala.Predef.String) extends scala.AnyRef with scala.Product with scala.Serializable {
val i : A = { /* compiled code */ }
val s : scala.Predef.String = { /* compiled code */ }
def foo : scala.Int = { /* compiled code */ }
def copy[A <: scala.Seq[scala.Int]](i : A, s : scala.Predef.String) : CaseClass[A] = { /* compiled code */ }
+ override def productPrefix : java.lang.String = { /* compiled code */ }
+ def productArity : scala.Int = { /* compiled code */ }
+ def productElement(x$1 : scala.Int) : scala.Any = { /* compiled code */ }
+ override def productIterator : scala.collection.Iterator[scala.Any] = { /* compiled code */ }
+ def canEqual(x$1 : scala.Any) : scala.Boolean = { /* compiled code */ }
override def hashCode() : scala.Int = { /* compiled code */ }
- override def toString() : scala.Predef.String = { /* compiled code */ }
+ override def toString() : java.lang.String = { /* compiled code */ }
override def equals(x$1 : scala.Any) : scala.Boolean = { /* compiled code */ }
- override def productPrefix : java.lang.String = { /* compiled code */ }
- override def productArity : scala.Int = { /* compiled code */ }
- override def productElement(x$1 : scala.Int) : scala.Any = { /* compiled code */ }
- override def canEqual(x$1 : scala.Any) : scala.Boolean = { /* compiled code */ }
+}
+object CaseClass extends scala.AnyRef with scala.Serializable {
+ def this() = { /* compiled code */ }
+ final override def toString() : java.lang.String = { /* compiled code */ }
+ def apply[A <: scala.Seq[scala.Int]](i : A, s : scala.Predef.String) : CaseClass[A] = { /* compiled code */ }
+ def unapply[A <: scala.Seq[scala.Int]](x$0 : CaseClass[A]) : scala.Option[scala.Tuple2[A, scala.Predef.String]] = { /* compiled code */ }
}
diff --git a/test/files/scalap/caseObject/result.test b/test/files/scalap/caseObject/result.test
index 556c555..867a4b2 100644
--- a/test/files/scalap/caseObject/result.test
+++ b/test/files/scalap/caseObject/result.test
@@ -1,10 +1,10 @@
-case object CaseObject extends java.lang.Object with scala.ScalaObject with scala.Product with scala.Serializable {
+case object CaseObject extends scala.AnyRef with scala.Product with scala.Serializable {
def bar : scala.Int = { /* compiled code */ }
- final override def hashCode() : scala.Int = { /* compiled code */ }
- final override def toString() : java.lang.String = { /* compiled code */ }
override def productPrefix : java.lang.String = { /* compiled code */ }
- override def productArity : scala.Int = { /* compiled code */ }
- override def productElement(x$1 : scala.Int) : scala.Any = { /* compiled code */ }
- override def canEqual(x$1 : scala.Any) : scala.Boolean = { /* compiled code */ }
- protected def readResolve() : java.lang.Object = { /* compiled code */ }
+ def productArity : scala.Int = { /* compiled code */ }
+ def productElement(x$1 : scala.Int) : scala.Any = { /* compiled code */ }
+ override def productIterator : scala.collection.Iterator[scala.Any] = { /* compiled code */ }
+ def canEqual(x$1 : scala.Any) : scala.Boolean = { /* compiled code */ }
+ override def hashCode() : scala.Int = { /* compiled code */ }
+ override def toString() : java.lang.String = { /* compiled code */ }
}
diff --git a/test/files/scalap/cbnParam/result.test b/test/files/scalap/cbnParam/result.test
index c6b2f4c..52ecb6a 100644
--- a/test/files/scalap/cbnParam/result.test
+++ b/test/files/scalap/cbnParam/result.test
@@ -1,3 +1,3 @@
-class CbnParam extends java.lang.Object with scala.ScalaObject {
+class CbnParam extends scala.AnyRef {
def this(s : => scala.Predef.String) = { /* compiled code */ }
}
diff --git a/test/files/scalap/classPrivate/result.test b/test/files/scalap/classPrivate/result.test
index 0d12b77..ab2d40c 100644
--- a/test/files/scalap/classPrivate/result.test
+++ b/test/files/scalap/classPrivate/result.test
@@ -1,10 +1,10 @@
-class ClassPrivate extends java.lang.Object with scala.ScalaObject {
+class ClassPrivate extends scala.AnyRef {
def this() = { /* compiled code */ }
def baz : scala.Int = { /* compiled code */ }
- class Outer extends java.lang.Object with scala.ScalaObject {
+ class Outer extends scala.AnyRef {
def this() = { /* compiled code */ }
private[ClassPrivate] def qux : scala.Int = { /* compiled code */ }
}
protected def quux : scala.Int = { /* compiled code */ }
private[ClassPrivate] def bar : scala.Int = { /* compiled code */ }
-}
\ No newline at end of file
+}
diff --git a/test/files/scalap/classWithExistential/result.test b/test/files/scalap/classWithExistential/result.test
index 91afdda..caee3fd 100644
--- a/test/files/scalap/classWithExistential/result.test
+++ b/test/files/scalap/classWithExistential/result.test
@@ -1,4 +1,4 @@
-class ClassWithExistential extends java.lang.Object with scala.ScalaObject {
+class ClassWithExistential extends scala.AnyRef {
def this() = { /* compiled code */ }
def foo[A, B] : scala.Function1[A, B forSome {type A <: scala.Seq[scala.Int]; type B >: scala.Predef.String}] = { /* compiled code */ }
-}
\ No newline at end of file
+}
diff --git a/test/files/scalap/classWithSelfAnnotation/result.test b/test/files/scalap/classWithSelfAnnotation/result.test
index 326437c..82bbd9e 100644
--- a/test/files/scalap/classWithSelfAnnotation/result.test
+++ b/test/files/scalap/classWithSelfAnnotation/result.test
@@ -1,4 +1,4 @@
-class ClassWithSelfAnnotation extends java.lang.Object with scala.ScalaObject {
+class ClassWithSelfAnnotation extends scala.AnyRef {
this : ClassWithSelfAnnotation with java.lang.CharSequence =>
def this() = { /* compiled code */ }
def foo : scala.Int = { /* compiled code */ }
diff --git a/test/files/scalap/covariantParam/result.test b/test/files/scalap/covariantParam/result.test
index 8acd9b4..f7a3c98 100644
--- a/test/files/scalap/covariantParam/result.test
+++ b/test/files/scalap/covariantParam/result.test
@@ -1,4 +1,4 @@
-class CovariantParam[+A] extends java.lang.Object with scala.ScalaObject {
+class CovariantParam[+A] extends scala.AnyRef {
def this() = { /* compiled code */ }
def foo[A](a : A) : scala.Int = { /* compiled code */ }
}
diff --git a/test/files/scalap/defaultParameter/result.test b/test/files/scalap/defaultParameter/result.test
index 38bf6ac..0c775ea 100644
--- a/test/files/scalap/defaultParameter/result.test
+++ b/test/files/scalap/defaultParameter/result.test
@@ -1,3 +1,3 @@
-trait DefaultParameter extends java.lang.Object {
+trait DefaultParameter extends scala.AnyRef {
def foo(s : scala.Predef.String) : scala.Unit
-}
\ No newline at end of file
+}
diff --git a/test/files/scalap/implicitParam/result.test b/test/files/scalap/implicitParam/result.test
index 11d678d..a2cfd60 100644
--- a/test/files/scalap/implicitParam/result.test
+++ b/test/files/scalap/implicitParam/result.test
@@ -1,4 +1,4 @@
-class ImplicitParam extends java.lang.Object with scala.ScalaObject {
+class ImplicitParam extends scala.AnyRef {
def this() = { /* compiled code */ }
def foo(i : scala.Int)(implicit f : scala.Float, d : scala.Double) : scala.Int = { /* compiled code */ }
}
diff --git a/test/files/scalap/packageObject/result.test b/test/files/scalap/packageObject/result.test
index 6a8d6ae..5732d92 100644
--- a/test/files/scalap/packageObject/result.test
+++ b/test/files/scalap/packageObject/result.test
@@ -1,4 +1,4 @@
-package object PackageObject extends java.lang.Object with scala.ScalaObject {
+package object PackageObject extends scala.AnyRef {
def this() = { /* compiled code */ }
type A = scala.Predef.String
def foo(i : scala.Int) : scala.Int = { /* compiled code */ }
diff --git a/test/files/scalap/paramClauses/result.test b/test/files/scalap/paramClauses/result.test
index 9ef93d2..3a141e8 100644
--- a/test/files/scalap/paramClauses/result.test
+++ b/test/files/scalap/paramClauses/result.test
@@ -1,4 +1,4 @@
-class ParamClauses extends java.lang.Object with scala.ScalaObject {
+class ParamClauses extends scala.AnyRef {
def this() = { /* compiled code */ }
def foo(i : scala.Int)(s : scala.Predef.String)(t : scala.Double) : scala.Int = { /* compiled code */ }
}
diff --git a/test/files/scalap/paramNames/result.test b/test/files/scalap/paramNames/result.test
index f9d98d9..85e37f8 100644
--- a/test/files/scalap/paramNames/result.test
+++ b/test/files/scalap/paramNames/result.test
@@ -1,4 +1,4 @@
-class ParamNames extends java.lang.Object with scala.ScalaObject {
+class ParamNames extends scala.AnyRef {
def this() = { /* compiled code */ }
def foo(s : => scala.Seq[scala.Int], s2 : => scala.Seq[scala.Any]) : scala.Unit = { /* compiled code */ }
}
diff --git a/test/files/scalap/sequenceParam/result.test b/test/files/scalap/sequenceParam/result.test
index 4b9d784..142d92f 100644
--- a/test/files/scalap/sequenceParam/result.test
+++ b/test/files/scalap/sequenceParam/result.test
@@ -1,3 +1,3 @@
-class SequenceParam extends java.lang.Object with scala.ScalaObject {
+class SequenceParam extends scala.AnyRef {
def this(s : scala.Predef.String, i : scala.Int*) = { /* compiled code */ }
}
diff --git a/test/files/scalap/simpleClass/result.test b/test/files/scalap/simpleClass/result.test
index d10b633..4fdf25d 100644
--- a/test/files/scalap/simpleClass/result.test
+++ b/test/files/scalap/simpleClass/result.test
@@ -1,4 +1,4 @@
-class SimpleClass extends java.lang.Object with scala.ScalaObject {
+class SimpleClass extends scala.AnyRef {
def this() = { /* compiled code */ }
def foo : scala.Int = { /* compiled code */ }
}
diff --git a/test/files/scalap/traitObject/result.test b/test/files/scalap/traitObject/result.test
index 0d7de15..104ba14 100644
--- a/test/files/scalap/traitObject/result.test
+++ b/test/files/scalap/traitObject/result.test
@@ -1,8 +1,8 @@
-trait TraitObject extends java.lang.Object with scala.ScalaObject {
+trait TraitObject extends scala.AnyRef {
def $init$() : scala.Unit = { /* compiled code */ }
def foo : scala.Int = { /* compiled code */ }
}
-object TraitObject extends java.lang.Object with scala.ScalaObject {
+object TraitObject extends scala.AnyRef {
def this() = { /* compiled code */ }
def bar : scala.Int = { /* compiled code */ }
}
diff --git a/test/files/scalap/typeAnnotations/result.test b/test/files/scalap/typeAnnotations/result.test
index b565d61..407b023 100644
--- a/test/files/scalap/typeAnnotations/result.test
+++ b/test/files/scalap/typeAnnotations/result.test
@@ -1,8 +1,8 @@
-abstract class TypeAnnotations[@scala.specialized R] extends java.lang.Object with scala.ScalaObject {
+abstract class TypeAnnotations[@scala.specialized R] extends scala.AnyRef {
def this() = { /* compiled code */ }
@scala.specialized
val x : scala.Int = { /* compiled code */ }
@scala.specialized
type T
def compose[@scala.specialized A](x : A, y : R) : A = { /* compiled code */ }
-}
\ No newline at end of file
+}
diff --git a/test/files/scalap/valAndVar/result.test b/test/files/scalap/valAndVar/result.test
index 934ad0a..e940da9 100644
--- a/test/files/scalap/valAndVar/result.test
+++ b/test/files/scalap/valAndVar/result.test
@@ -1,4 +1,4 @@
-class ValAndVar extends java.lang.Object with scala.ScalaObject {
+class ValAndVar extends scala.AnyRef {
def this() = { /* compiled code */ }
val foo : java.lang.String = { /* compiled code */ }
var bar : scala.Int = { /* compiled code */ }
diff --git a/test/files/scalap/wildcardType/result.test b/test/files/scalap/wildcardType/result.test
index aa3d5d5..e43261d 100644
--- a/test/files/scalap/wildcardType/result.test
+++ b/test/files/scalap/wildcardType/result.test
@@ -1,3 +1,3 @@
-class WildcardType extends java.lang.Object with scala.ScalaObject {
+class WildcardType extends scala.AnyRef {
def this(f : scala.Function1[scala.Int, _]) = { /* compiled code */ }
}
diff --git a/test/files/specialized/SI-7343.scala b/test/files/specialized/SI-7343.scala
new file mode 100644
index 0000000..5ee6830
--- /dev/null
+++ b/test/files/specialized/SI-7343.scala
@@ -0,0 +1,55 @@
+class Parent[@specialized(Int) T]
+
+object Test extends App {
+
+ /**
+ * This method will check if specialization is correctly rewiring parents
+ * for classes defined inside methods. The pattern is important since this
+ * is how closures are currently represented: as locally-defined anonymous
+ * classes, which usually end up inside methods. For these closures we do
+ * want their parents rewired correctly:
+ *
+ * ```
+ * def checkSuperClass$mIc$sp[T](t: T, ...) = {
+ * class X extends Parent$mcI$sp // instead of just Parent
+ * ...
+ * }
+ */
+ def checkSuperClass[@specialized(Int) T](t: T, expectedXSuper: String) = {
+ // test target:
+ // - in checkSuperClass, X should extend Parent
+ // - in checkSuperClass$mIc$sp, X should extend Parent$mcI$sp
+ class X extends Parent[T]()
+
+ // get the superclass for X and make sure it's correct
+ val actualXSuper = (new X).getClass().getSuperclass().getSimpleName()
+ assert(actualXSuper == expectedXSuper, actualXSuper + " != " + expectedXSuper)
+ }
+
+ checkSuperClass("x", "Parent")
+ checkSuperClass(101, "Parent$mcI$sp")
+
+ /**
+ * This is the same check, but in value. It should work exactly the same
+ * as its method counterpart.
+ */
+ class Val[@specialized(Int) T](t: T, expectedXSuper: String) {
+ val check: T = {
+ class X extends Parent[T]()
+
+ // get the superclass for X and make sure it's correct
+ val actualXSuper = (new X).getClass().getSuperclass().getSimpleName()
+ assert(actualXSuper == expectedXSuper, actualXSuper + " != " + expectedXSuper)
+ t
+ }
+ }
+
+ new Val("x", "Parent")
+ new Val(101, "Parent$mcI$sp")
+
+ /**
+ * NOTE: The the same check, only modified to affect constructors, won't
+ * work since the class X definition will always be lifted to become a
+ * member of the class, making it impossible to force its duplication.
+ */
+}
diff --git a/test/files/specialized/SI-7344.scala b/test/files/specialized/SI-7344.scala
new file mode 100644
index 0000000..1040460
--- /dev/null
+++ b/test/files/specialized/SI-7344.scala
@@ -0,0 +1,53 @@
+/* Test for SI-7344, where specialized methods inside the bodies of other
+ * methods are not specialized, although they might as well be. The name
+ * for the specialized method should not be different depending on the
+ * outside method/class' specialization. */
+
+class Test[@specialized(Int, Double) X](val x: X) {
+
+ def checkSpecialization[Y](@specialized(Int, Double) y: Y): X = {
+
+ // checking the specialization using the method name, which we can
+ // extract from an exception's stack trace. We can match just the
+ // prefix, since the compiler will add a suffix to the method name
+ // during lambdalift, when it lifts the local methods outside.
+ def specMe[@specialized(Int, Double) T, N](t: T, n: N): Unit = checkNameStartsWith(n.toString)
+
+ // expected to specialize:
+ specMe("x", "specMe")
+ specMe(123, "specMe$mIc$sp")
+ specMe(1.3, new { override def toString = "specMe$mDc$sp" })
+
+ x
+ }
+
+ // name matching:
+ private[this] def checkNameStartsWith(prefix: String): Unit = {
+ val method = (new Exception).getStackTrace()(1).getMethodName()
+ assert(method.startsWith(prefix), method + ".startsWith(" + prefix + ") should be true")
+ }
+}
+
+object Test extends App {
+ val t1 = new Test("x")
+ val t2 = new Test(123)
+ val t3 = new Test(1.3)
+
+ // we want specialization to rewire these,
+ // that's why they're not in a for loop:
+ t1.checkSpecialization("x")
+
+ // Prevented by SI-7579:
+ // The duplicator loses the @specialized annotation,
+ // so our tree transformation doesn't know it needs to
+ // specialize specMe inside the duplicated (and specialized)
+ // variants of the `checkSpecialization` method
+ // t1.checkSpecialization(123)
+ // t1.checkSpecialization(1.3)
+ // t2.checkSpecialization("x")
+ // t2.checkSpecialization(123)
+ // t2.checkSpecialization(1.3)
+ // t3.checkSpecialization("x")
+ // t3.checkSpecialization(123)
+ // t3.checkSpecialization(1.3)
+}
diff --git a/test/files/specialized/arrays-traits.check b/test/files/specialized/arrays-traits.check
index 92af4f1..40687a7 100644
--- a/test/files/specialized/arrays-traits.check
+++ b/test/files/specialized/arrays-traits.check
@@ -1,6 +1,6 @@
-0
-0
-0
1
2
-1
\ No newline at end of file
+1
+3
+4
+2
diff --git a/test/files/specialized/arrays-traits.scala b/test/files/specialized/arrays-traits.scala
index de54d22..34a1c37 100644
--- a/test/files/specialized/arrays-traits.scala
+++ b/test/files/specialized/arrays-traits.scala
@@ -1,20 +1,12 @@
-
-
-
import runtime.ScalaRunTime._
-
-
trait SuperS[@specialized(AnyRef) T] {
def arr: Array[T]
def foo() = arr(0)
def bar(b: Array[T]) = b(0) = arr(0)
}
-
-class BaseS[@specialized(AnyRef) T](val arr: Array[T]) extends SuperS[T] {
-}
-
+class BaseS[@specialized(AnyRef) T](val arr: Array[T]) extends SuperS[T] { }
trait SuperG[T] {
def arr: Array[T]
@@ -22,13 +14,9 @@ trait SuperG[T] {
def bar(b: Array[T]) = b(0) = arr(0)
}
-
-class BaseG[T](val arr: Array[T]) extends SuperG[T] {
-}
-
+class BaseG[T](val arr: Array[T]) extends SuperG[T] { }
object Test {
-
def main(args: Array[String]) {
(new BaseS(new Array[String](1)): SuperS[String]).foo
println(arrayApplyCount)
@@ -42,5 +30,4 @@ object Test {
println(arrayApplyCount)
println(arrayUpdateCount)
}
-
}
diff --git a/test/files/specialized/arrays.check b/test/files/specialized/arrays.check
index d37dfb7..8df790f 100644
--- a/test/files/specialized/arrays.check
+++ b/test/files/specialized/arrays.check
@@ -1,4 +1,4 @@
-0
-0
50
-51
\ No newline at end of file
+51
+101
+102
diff --git a/test/files/specialized/fft.check b/test/files/specialized/fft.check
index 8457290..74cb9bb 100644
--- a/test/files/specialized/fft.check
+++ b/test/files/specialized/fft.check
@@ -1,4 +1,4 @@
Processing 65536 items
Boxed doubles: 0
Boxed ints: 2
-Boxed longs: 1245366
+Boxed longs: 1179811
diff --git a/test/files/specialized/spec-ame.check b/test/files/specialized/spec-ame.check
index 9c1713c..cf18c01 100644
--- a/test/files/specialized/spec-ame.check
+++ b/test/files/specialized/spec-ame.check
@@ -1,3 +1,3 @@
abc
10
-3
\ No newline at end of file
+2
diff --git a/test/files/specialized/spec-ame.scala b/test/files/specialized/spec-ame.scala
index 79ee421..129fb9f 100644
--- a/test/files/specialized/spec-ame.scala
+++ b/test/files/specialized/spec-ame.scala
@@ -13,6 +13,9 @@ object Test {
def main(args: Array[String]) {
println((new A("abc")).foo.value)
println((new A(10)).foo.value)
+ // before fixing SI-7343, this was printing 3. Now it's printing 2,
+ // since the anonymous class created by doing new B[T] { ... } when
+ // T = Int is now rewired to B$mcI$sp instead of just B[Int]
println(runtime.BoxesRunTime.integerBoxCount)
}
}
diff --git a/test/files/specialized/spec-hlists.check b/test/files/specialized/spec-hlists.check
new file mode 100644
index 0000000..0ab3339
--- /dev/null
+++ b/test/files/specialized/spec-hlists.check
@@ -0,0 +1,2 @@
+class HCons$mcI$sp
+class HCons$mcI$sp
diff --git a/test/files/specialized/spec-hlists.scala b/test/files/specialized/spec-hlists.scala
new file mode 100644
index 0000000..8c4ac8f
--- /dev/null
+++ b/test/files/specialized/spec-hlists.scala
@@ -0,0 +1,29 @@
+/** Test contributed by Stefan Zeiger showing that HLists can be
+ * specialized.
+ */
+
+sealed trait HList {
+ type Self <: HList
+
+ type |: [E] = HCons[E, Self]
+
+ final def |: [@specialized E](elem: E): |: [E] = new HCons[E, Self](elem, this.asInstanceOf[Self])
+
+ def m[@specialized E, T <: AnyRef](x: E): T = null.asInstanceOf[T]
+}
+
+final class HCons[@specialized H, T <: HList](val head: H, val tail: T) extends HList {
+ type Self = HCons[H, T]
+}
+
+final object HNil extends HList {
+ type Self = HNil.type
+}
+
+object Test extends App {
+ val l1 = new HCons(42, "foo" |: HNil)
+ println(l1.getClass)
+
+ val l2 = 42 |: "abc" |: HNil
+ println(l2.getClass)
+}
diff --git a/test/files/specialized/spec-matrix.check b/test/files/specialized/spec-matrix-new.check
similarity index 100%
copy from test/files/specialized/spec-matrix.check
copy to test/files/specialized/spec-matrix-new.check
diff --git a/test/files/specialized/spec-matrix-new.scala b/test/files/specialized/spec-matrix-new.scala
new file mode 100644
index 0000000..e9a6e35
--- /dev/null
+++ b/test/files/specialized/spec-matrix-new.scala
@@ -0,0 +1,82 @@
+import scala.reflect.{ClassTag, classTag}
+
+/** Test matrix multiplication with specialization.
+ */
+
+class Matrix[@specialized A: ClassTag](val rows: Int, val cols: Int) {
+ private val arr: Array[Array[A]] = Array.ofDim[A](rows, cols)
+
+ def apply(i: Int, j: Int): A = {
+ if (i < 0 || i >= rows || j < 0 || j >= cols)
+ throw new NoSuchElementException("Indexes out of bounds: " + (i, j))
+
+ arr(i)(j)
+ }
+
+ def update(i: Int, j: Int, e: A) {
+ arr(i)(j) = e
+ }
+
+ def rowsIterator: Iterator[Array[A]] = new Iterator[Array[A]] {
+ var idx = 0;
+ def hasNext = idx < rows
+ def next = {
+ idx += 1
+ arr(idx - 1)
+ }
+ }
+}
+
+object Test {
+ def main(args: Array[String]) {
+ val m = randomMatrix(200, 100)
+ val n = randomMatrix(100, 200)
+
+ val p = mult(m, n)
+ println(p(0, 0))
+ println("Boxed doubles: " + runtime.BoxesRunTime.doubleBoxCount)
+// println("Boxed integers: " + runtime.BoxesRunTime.integerBoxCount)
+ }
+
+ def randomMatrix(n: Int, m: Int) = {
+ val r = new util.Random(10)
+ val x = new Matrix[Double](n, m)
+ for (i <- 0 until n; j <- 0 until m)
+ x(i, j) = (r.nextInt % 1000).toDouble
+ x
+ }
+
+ def printMatrix[Double](m: Matrix[Double]) {
+ for (i <- 0 until m.rows) {
+ for (j <- 0 until m.cols)
+ print("%5.3f ".format(m(i, j)))
+ println
+ }
+ }
+
+ def multTag[@specialized(Int) T](m: Matrix[T], n: Matrix[T])(implicit at: ClassTag[T], num: Numeric[T]) {
+ val p = new Matrix[T](m.rows, n.cols)
+ import num._
+
+ for (i <- 0 until m.rows)
+ for (j <- 0 until n.cols) {
+ var sum = num.zero
+ for (k <- 0 until n.rows)
+ sum += m(i, k) * n(k, j)
+ p(i, j) = sum
+ }
+ }
+
+ def mult(m: Matrix[Double], n: Matrix[Double]) = {
+ val p = new Matrix[Double](m.rows, n.cols)
+
+ for (i <- 0 until m.rows)
+ for (j <- 0 until n.cols) {
+ var sum = 0.0
+ for (k <- 0 until n.rows)
+ sum += m(i, k) * n(k, j)
+ p(i, j) = sum
+ }
+ p
+ }
+}
\ No newline at end of file
diff --git a/test/files/specialized/spec-matrix.check b/test/files/specialized/spec-matrix-old.check
similarity index 100%
rename from test/files/specialized/spec-matrix.check
rename to test/files/specialized/spec-matrix-old.check
diff --git a/test/files/specialized/spec-matrix-old.scala b/test/files/specialized/spec-matrix-old.scala
new file mode 100644
index 0000000..98735c8
--- /dev/null
+++ b/test/files/specialized/spec-matrix-old.scala
@@ -0,0 +1,80 @@
+/** Test matrix multiplication with specialization.
+ */
+
+class Matrix[@specialized A: ClassManifest](val rows: Int, val cols: Int) {
+ private val arr: Array[Array[A]] = Array.ofDim[A](rows, cols)
+
+ def apply(i: Int, j: Int): A = {
+ if (i < 0 || i >= rows || j < 0 || j >= cols)
+ throw new NoSuchElementException("Indexes out of bounds: " + (i, j))
+
+ arr(i)(j)
+ }
+
+ def update(i: Int, j: Int, e: A) {
+ arr(i)(j) = e
+ }
+
+ def rowsIterator: Iterator[Array[A]] = new Iterator[Array[A]] {
+ var idx = 0;
+ def hasNext = idx < rows
+ def next = {
+ idx += 1
+ arr(idx - 1)
+ }
+ }
+}
+
+object Test {
+ def main(args: Array[String]) {
+ val m = randomMatrix(200, 100)
+ val n = randomMatrix(100, 200)
+
+ val p = mult(m, n)
+ println(p(0, 0))
+ println("Boxed doubles: " + runtime.BoxesRunTime.doubleBoxCount)
+// println("Boxed integers: " + runtime.BoxesRunTime.integerBoxCount)
+ }
+
+ def randomMatrix(n: Int, m: Int) = {
+ val r = new util.Random(10)
+ val x = new Matrix[Double](n, m)
+ for (i <- 0 until n; j <- 0 until m)
+ x(i, j) = (r.nextInt % 1000).toDouble
+ x
+ }
+
+ def printMatrix[Double](m: Matrix[Double]) {
+ for (i <- 0 until m.rows) {
+ for (j <- 0 until m.cols)
+ print("%5.3f ".format(m(i, j)))
+ println
+ }
+ }
+
+ def multManifest[@specialized(Int) T](m: Matrix[T], n: Matrix[T])(implicit cm: ClassManifest[T], num: Numeric[T]) {
+ val p = new Matrix[T](m.rows, n.cols)
+ import num._
+
+ for (i <- 0 until m.rows)
+ for (j <- 0 until n.cols) {
+ var sum = num.zero
+ for (k <- 0 until n.rows)
+ sum += m(i, k) * n(k, j)
+ p(i, j) = sum
+ }
+ }
+
+ def mult(m: Matrix[Double], n: Matrix[Double]) = {
+ val p = new Matrix[Double](m.rows, n.cols)
+
+ for (i <- 0 until m.rows)
+ for (j <- 0 until n.cols) {
+ var sum = 0.0
+ for (k <- 0 until n.rows)
+ sum += m(i, k) * n(k, j)
+ p(i, j) = sum
+ }
+ p
+ }
+}
diff --git a/test/files/specialized/spec-matrix.scala b/test/files/specialized/spec-matrix.scala
deleted file mode 100644
index 9e04019..0000000
--- a/test/files/specialized/spec-matrix.scala
+++ /dev/null
@@ -1,80 +0,0 @@
-/** Test matrix multiplication with specialization.
- */
-
-class Matrix[@specialized A: ClassManifest](val rows: Int, val cols: Int) {
- private val arr: Array[Array[A]] = new Array[Array[A]](rows, cols)
-
- def apply(i: Int, j: Int): A = {
- if (i < 0 || i >= rows || j < 0 || j >= cols)
- throw new NoSuchElementException("Indexes out of bounds: " + (i, j))
-
- arr(i)(j)
- }
-
- def update(i: Int, j: Int, e: A) {
- arr(i)(j) = e
- }
-
- def rowsIterator: Iterator[Array[A]] = new Iterator[Array[A]] {
- var idx = 0;
- def hasNext = idx < rows
- def next = {
- idx += 1
- arr(idx - 1)
- }
- }
-}
-
-object Test {
- def main(args: Array[String]) {
- val m = randomMatrix(200, 100)
- val n = randomMatrix(100, 200)
-
- val p = mult(m, n)
- println(p(0, 0))
- println("Boxed doubles: " + runtime.BoxesRunTime.doubleBoxCount)
-// println("Boxed integers: " + runtime.BoxesRunTime.integerBoxCount)
- }
-
- def randomMatrix(n: Int, m: Int) = {
- val r = new util.Random(10)
- val x = new Matrix[Double](n, m)
- for (i <- 0 until n; j <- 0 until m)
- x(i, j) = (r.nextInt % 1000).toDouble
- x
- }
-
- def printMatrix[Double](m: Matrix[Double]) {
- for (i <- 0 until m.rows) {
- for (j <- 0 until m.cols)
- print("%5.3f ".format(m(i, j)))
- println
- }
- }
-
- def multManifest[@specialized(Int) T](m: Matrix[T], n: Matrix[T])(implicit cm: ClassManifest[T], num: Numeric[T]) {
- val p = new Matrix[T](m.rows, n.cols)
- import num._
-
- for (i <- 0 until m.rows)
- for (j <- 0 until n.cols) {
- var sum = num.zero
- for (k <- 0 until n.rows)
- sum += m(i, k) * n(k, j)
- p(i, j) = sum
- }
- }
-
- def mult(m: Matrix[Double], n: Matrix[Double]) = {
- val p = new Matrix[Double](m.rows, n.cols)
-
- for (i <- 0 until m.rows)
- for (j <- 0 until n.cols) {
- var sum = 0.0
- for (k <- 0 until n.rows)
- sum += m(i, k) * n(k, j)
- p(i, j) = sum
- }
- p
- }
-}
diff --git a/test/files/specialized/spec-patmatch.check b/test/files/specialized/spec-patmatch.check
index 33306ab..a2746c0 100644
--- a/test/files/specialized/spec-patmatch.check
+++ b/test/files/specialized/spec-patmatch.check
@@ -17,4 +17,4 @@ long
double
float
default
-2
\ No newline at end of file
+10
diff --git a/test/files/specialized/t6035.check b/test/files/specialized/t6035.check
new file mode 100644
index 0000000..573541a
--- /dev/null
+++ b/test/files/specialized/t6035.check
@@ -0,0 +1 @@
+0
diff --git a/test/files/specialized/t6035/first_1.scala b/test/files/specialized/t6035/first_1.scala
new file mode 100644
index 0000000..1289e9f
--- /dev/null
+++ b/test/files/specialized/t6035/first_1.scala
@@ -0,0 +1,5 @@
+trait Foo[@specialized(Int) A] {
+ def foo(x: A): A
+}
+
+abstract class Inter extends Foo[Int]
diff --git a/test/files/specialized/t6035/second_2.scala b/test/files/specialized/t6035/second_2.scala
new file mode 100644
index 0000000..fb317e2
--- /dev/null
+++ b/test/files/specialized/t6035/second_2.scala
@@ -0,0 +1,13 @@
+class Baz extends Inter {
+ def foo(x: Int) = x + 1
+}
+
+object Test {
+ def main(args: Array[String]) {
+ // it's important that the type is Inter so we do not call Baz.foo(I)I directly!
+ val baz: Inter = new Baz
+ // here we should go through specialized version of foo and thus have zero boxing
+ baz.foo(1)
+ println(runtime.BoxesRunTime.integerBoxCount)
+ }
+}
diff --git a/test/files/speclib/instrumented.jar b/test/files/speclib/instrumented.jar
deleted file mode 100644
index ab4d76a..0000000
Binary files a/test/files/speclib/instrumented.jar and /dev/null differ
diff --git a/test/files/speclib/instrumented.jar.desired.sha1 b/test/files/speclib/instrumented.jar.desired.sha1
index 68114c2..9dd5771 100644
--- a/test/files/speclib/instrumented.jar.desired.sha1
+++ b/test/files/speclib/instrumented.jar.desired.sha1
@@ -1 +1 @@
-2546f965f6718b000c4e6ef73559c11084177bd8 ?instrumented.jar
+1b11ac773055c1e942c6b5eb4aabdf02292a7194 ?instrumented.jar
diff --git a/test/files/pos/t2868.cmds b/test/flaky/pos/t2868.cmds
similarity index 100%
rename from test/files/pos/t2868.cmds
rename to test/flaky/pos/t2868.cmds
diff --git a/test/files/pos/t2868/Jann.java b/test/flaky/pos/t2868/Jann.java
similarity index 100%
rename from test/files/pos/t2868/Jann.java
rename to test/flaky/pos/t2868/Jann.java
diff --git a/test/files/pos/t2868/Nest.java b/test/flaky/pos/t2868/Nest.java
similarity index 100%
rename from test/files/pos/t2868/Nest.java
rename to test/flaky/pos/t2868/Nest.java
diff --git a/test/flaky/pos/t2868/pick_1.scala b/test/flaky/pos/t2868/pick_1.scala
new file mode 100644
index 0000000..a211687
--- /dev/null
+++ b/test/flaky/pos/t2868/pick_1.scala
@@ -0,0 +1,7 @@
+class ann(s: String) extends annotation.StaticAnnotation
+class pick {
+ final val s = "bang!"
+ @ann("bang!") def foo = 1
+ @Jann(str = "bang!", inn = new Nest(1), arr = Array(1, 2)) def bar = 2
+ @Jann(str = "bang!", inn = new Nest(1), arr = Array(1, 2)) def baz = 3
+}
diff --git a/test/files/pos/t2868/t2868_src_2.scala b/test/flaky/pos/t2868/t2868_src_2.scala
similarity index 100%
rename from test/files/pos/t2868/t2868_src_2.scala
rename to test/flaky/pos/t2868/t2868_src_2.scala
diff --git a/test/instrumented/boxes.patch b/test/instrumented/boxes.patch
new file mode 100644
index 0000000..2bb3243
--- /dev/null
+++ b/test/instrumented/boxes.patch
@@ -0,0 +1,29 @@
+9c9
+<
+---
+> /* INSTRUMENTED VERSION */
+51a52,59
+> public static int booleanBoxCount = 0;
+> public static int characterBoxCount = 0;
+> public static int byteBoxCount = 0;
+> public static int shortBoxCount = 0;
+> public static int integerBoxCount = 0;
+> public static int longBoxCount = 0;
+> public static int floatBoxCount = 0;
+> public static int doubleBoxCount = 0;
+53a62
+> booleanBoxCount += 1;
+57a67
+> characterBoxCount += 1;
+61a72
+> byteBoxCount += 1;
+65a77
+> shortBoxCount += 1;
+69a82
+> integerBoxCount += 1;
+73a87
+> longBoxCount += 1;
+77a92
+> floatBoxCount += 1;
+83a99
+> doubleBoxCount += 1;
diff --git a/test/instrumented/library/scala/runtime/BoxesRunTime.java b/test/instrumented/library/scala/runtime/BoxesRunTime.java
index 797e9f8..57799bd 100644
--- a/test/instrumented/library/scala/runtime/BoxesRunTime.java
+++ b/test/instrumented/library/scala/runtime/BoxesRunTime.java
@@ -1,15 +1,13 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2006-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2006-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-
/* INSTRUMENTED VERSION */
-
package scala.runtime;
import java.io.*;
@@ -30,26 +28,27 @@ import scala.math.ScalaNumber;
* @contributor Stepan Koltsov
* @version 2.0 */
public final class BoxesRunTime
-{
+{
private static final int CHAR = 0, BYTE = 1, SHORT = 2, INT = 3, LONG = 4, FLOAT = 5, DOUBLE = 6, OTHER = 7;
-
+
+ /** We don't need to return BYTE and SHORT, as everything which might
+ * care widens to INT.
+ */
private static int typeCode(Object a) {
if (a instanceof java.lang.Integer) return INT;
- if (a instanceof java.lang.Byte) return BYTE;
- if (a instanceof java.lang.Character) return CHAR;
- if (a instanceof java.lang.Long) return LONG;
if (a instanceof java.lang.Double) return DOUBLE;
- if (a instanceof java.lang.Short) return SHORT;
+ if (a instanceof java.lang.Long) return LONG;
+ if (a instanceof java.lang.Character) return CHAR;
if (a instanceof java.lang.Float) return FLOAT;
+ if ((a instanceof java.lang.Byte) || (a instanceof java.lang.Short)) return INT;
return OTHER;
}
-
+
private static String boxDescription(Object a) {
return "" + a.getClass().getSimpleName() + "(" + a + ")";
}
-
+
/* BOXING ... BOXING ... BOXING ... BOXING ... BOXING ... BOXING ... BOXING ... BOXING */
-
public static int booleanBoxCount = 0;
public static int characterBoxCount = 0;
public static int byteBoxCount = 0;
@@ -58,63 +57,63 @@ public final class BoxesRunTime
public static int longBoxCount = 0;
public static int floatBoxCount = 0;
public static int doubleBoxCount = 0;
-
+
public static java.lang.Boolean boxToBoolean(boolean b) {
- booleanBoxCount++;
+ booleanBoxCount += 1;
return java.lang.Boolean.valueOf(b);
}
-
+
public static java.lang.Character boxToCharacter(char c) {
- characterBoxCount++;
+ characterBoxCount += 1;
return java.lang.Character.valueOf(c);
}
-
+
public static java.lang.Byte boxToByte(byte b) {
- byteBoxCount++;
+ byteBoxCount += 1;
return java.lang.Byte.valueOf(b);
}
-
+
public static java.lang.Short boxToShort(short s) {
- shortBoxCount++;
+ shortBoxCount += 1;
return java.lang.Short.valueOf(s);
}
-
+
public static java.lang.Integer boxToInteger(int i) {
- integerBoxCount++;
+ integerBoxCount += 1;
return java.lang.Integer.valueOf(i);
}
-
+
public static java.lang.Long boxToLong(long l) {
- longBoxCount++;
+ longBoxCount += 1;
return java.lang.Long.valueOf(l);
}
-
+
public static java.lang.Float boxToFloat(float f) {
- floatBoxCount++;
+ floatBoxCount += 1;
return java.lang.Float.valueOf(f);
}
-
+
public static java.lang.Double boxToDouble(double d) {
- doubleBoxCount++;
// System.out.println("box " + d);
// (new Throwable()).printStackTrace();
+ doubleBoxCount += 1;
return java.lang.Double.valueOf(d);
}
-
+
/* UNBOXING ... UNBOXING ... UNBOXING ... UNBOXING ... UNBOXING ... UNBOXING ... UNBOXING */
-
+
public static boolean unboxToBoolean(Object b) {
return b == null ? false : ((java.lang.Boolean)b).booleanValue();
}
-
+
public static char unboxToChar(Object c) {
return c == null ? 0 : ((java.lang.Character)c).charValue();
}
-
+
public static byte unboxToByte(Object b) {
return b == null ? 0 : ((java.lang.Byte)b).byteValue();
}
-
+
public static short unboxToShort(Object s) {
return s == null ? 0 : ((java.lang.Short)s).shortValue();
}
@@ -122,32 +121,23 @@ public final class BoxesRunTime
public static int unboxToInt(Object i) {
return i == null ? 0 : ((java.lang.Integer)i).intValue();
}
-
+
public static long unboxToLong(Object l) {
return l == null ? 0 : ((java.lang.Long)l).longValue();
}
-
+
public static float unboxToFloat(Object f) {
return f == null ? 0.0f : ((java.lang.Float)f).floatValue();
}
-
+
public static double unboxToDouble(Object d) {
// System.out.println("unbox " + d);
return d == null ? 0.0d : ((java.lang.Double)d).doubleValue();
}
/* COMPARISON ... COMPARISON ... COMPARISON ... COMPARISON ... COMPARISON ... COMPARISON */
-
- private static int eqTypeCode(Number a) {
- if ((a instanceof java.lang.Integer) || (a instanceof java.lang.Byte)) return INT;
- if (a instanceof java.lang.Long) return LONG;
- if (a instanceof java.lang.Double) return DOUBLE;
- if (a instanceof java.lang.Short) return INT;
- if (a instanceof java.lang.Float) return FLOAT;
- return OTHER;
- }
-
- public static boolean equals(Object x, Object y) {
+
+ public static boolean equals(Object x, Object y) {
if (x == y) return true;
return equals2(x, y);
}
@@ -162,10 +152,10 @@ public final class BoxesRunTime
return equalsCharObject((java.lang.Character)x, y);
if (x == null)
return y == null;
-
+
return x.equals(y);
}
-
+
public static boolean equalsNumObject(java.lang.Number xn, Object y) {
if (y instanceof java.lang.Number)
return equalsNumNum(xn, (java.lang.Number)y);
@@ -173,13 +163,13 @@ public final class BoxesRunTime
return equalsNumChar(xn, (java.lang.Character)y);
if (xn == null)
return y == null;
-
+
return xn.equals(y);
}
-
+
public static boolean equalsNumNum(java.lang.Number xn, java.lang.Number yn) {
- int xcode = eqTypeCode(xn);
- int ycode = eqTypeCode(yn);
+ int xcode = typeCode(xn);
+ int ycode = typeCode(yn);
switch (ycode > xcode ? ycode : xcode) {
case INT:
return xn.intValue() == yn.intValue();
@@ -195,10 +185,10 @@ public final class BoxesRunTime
}
if (xn == null)
return yn == null;
-
+
return xn.equals(yn);
}
-
+
public static boolean equalsCharObject(java.lang.Character xc, Object y) {
if (y instanceof java.lang.Character)
return xc.charValue() == ((java.lang.Character)y).charValue();
@@ -206,13 +196,16 @@ public final class BoxesRunTime
return equalsNumChar((java.lang.Number)y, xc);
if (xc == null)
return y == null;
-
+
return xc.equals(y);
}
private static boolean equalsNumChar(java.lang.Number xn, java.lang.Character yc) {
+ if (yc == null)
+ return xn == null;
+
char ch = yc.charValue();
- switch (eqTypeCode(xn)) {
+ switch (typeCode(xn)) {
case INT:
return xn.intValue() == ch;
case LONG:
@@ -222,13 +215,10 @@ public final class BoxesRunTime
case DOUBLE:
return xn.doubleValue() == ch;
default:
- if (xn == null)
- return yc == null;
-
return xn.equals(yc);
}
}
-
+
/** Hashcode algorithm is driven by the requirements imposed
* by primitive equality semantics, namely that equal objects
* have equal hashCodes. The first priority are the integral/char
@@ -254,7 +244,7 @@ public final class BoxesRunTime
* as yet have not.
*
* Note: Among primitives, Float.NaN != Float.NaN, but the boxed
- * verisons are equal. This still needs reconciliation.
+ * versions are equal. This still needs reconciliation.
*/
public static int hashFromLong(java.lang.Long n) {
int iv = n.intValue();
@@ -262,16 +252,19 @@ public final class BoxesRunTime
else return n.hashCode();
}
public static int hashFromDouble(java.lang.Double n) {
- int iv = n.intValue();
+ int iv = n.intValue();
double dv = n.doubleValue();
if (iv == dv) return iv;
-
+
long lv = n.longValue();
if (lv == dv) return java.lang.Long.valueOf(lv).hashCode();
+
+ float fv = n.floatValue();
+ if (fv == dv) return java.lang.Float.valueOf(fv).hashCode();
else return n.hashCode();
}
public static int hashFromFloat(java.lang.Float n) {
- int iv = n.intValue();
+ int iv = n.intValue();
float fv = n.floatValue();
if (iv == fv) return iv;
@@ -289,33 +282,50 @@ public final class BoxesRunTime
if (a instanceof Number) return hashFromNumber((Number)a);
else return a.hashCode();
}
-
+
+ private static int unboxCharOrInt(Object arg1, int code) {
+ if (code == CHAR)
+ return ((java.lang.Character) arg1).charValue();
+ else
+ return ((java.lang.Number) arg1).intValue();
+ }
+ private static long unboxCharOrLong(Object arg1, int code) {
+ if (code == CHAR)
+ return ((java.lang.Character) arg1).charValue();
+ else
+ return ((java.lang.Number) arg1).longValue();
+ }
+ private static float unboxCharOrFloat(Object arg1, int code) {
+ if (code == CHAR)
+ return ((java.lang.Character) arg1).charValue();
+ else
+ return ((java.lang.Number) arg1).floatValue();
+ }
+ private static double unboxCharOrDouble(Object arg1, int code) {
+ if (code == CHAR)
+ return ((java.lang.Character) arg1).charValue();
+ else
+ return ((java.lang.Number) arg1).doubleValue();
+ }
+
/* OPERATORS ... OPERATORS ... OPERATORS ... OPERATORS ... OPERATORS ... OPERATORS ... OPERATORS ... OPERATORS */
-
+
/** arg1 + arg2 */
public static Object add(Object arg1, Object arg2) throws NoSuchMethodException {
int code1 = typeCode(arg1);
int code2 = typeCode(arg2);
int maxcode = (code1 < code2) ? code2 : code1;
if (maxcode <= INT) {
- int val1 = (code1 == CHAR) ? ((java.lang.Character) arg1).charValue() : ((java.lang.Number) arg1).intValue();
- int val2 = (code2 == CHAR) ? ((java.lang.Character) arg2).charValue() : ((java.lang.Number) arg2).intValue();
- return boxToInteger(val1 + val2);
+ return boxToInteger(unboxCharOrInt(arg1, code1) + unboxCharOrInt(arg2, code2));
}
if (maxcode <= LONG) {
- long val1 = (code1 == CHAR) ? ((java.lang.Character) arg1).charValue() : ((java.lang.Number) arg1).longValue();
- long val2 = (code2 == CHAR) ? ((java.lang.Character) arg2).charValue() : ((java.lang.Number) arg2).longValue();
- return boxToLong(val1 + val2);
+ return boxToLong(unboxCharOrLong(arg1, code1) + unboxCharOrLong(arg2, code2));
}
if (maxcode <= FLOAT) {
- float val1 = (code1 == CHAR) ? ((java.lang.Character) arg1).charValue() : ((java.lang.Number) arg1).floatValue();
- float val2 = (code2 == CHAR) ? ((java.lang.Character) arg2).charValue() : ((java.lang.Number) arg2).floatValue();
- return boxToFloat(val1 + val2);
+ return boxToFloat(unboxCharOrFloat(arg1, code1) + unboxCharOrFloat(arg2, code2));
}
if (maxcode <= DOUBLE) {
- double val1 = (code1 == CHAR) ? ((java.lang.Character) arg1).charValue() : ((java.lang.Number) arg1).doubleValue();
- double val2 = (code2 == CHAR) ? ((java.lang.Character) arg2).charValue() : ((java.lang.Number) arg2).doubleValue();
- return boxToDouble(val1 + val2);
+ return boxToDouble(unboxCharOrDouble(arg1, code1) + unboxCharOrDouble(arg2, code2));
}
throw new NoSuchMethodException();
}
@@ -326,24 +336,16 @@ public final class BoxesRunTime
int code2 = typeCode(arg2);
int maxcode = (code1 < code2) ? code2 : code1;
if (maxcode <= INT) {
- int val1 = (code1 == CHAR) ? ((java.lang.Character) arg1).charValue() : ((java.lang.Number) arg1).intValue();
- int val2 = (code2 == CHAR) ? ((java.lang.Character) arg2).charValue() : ((java.lang.Number) arg2).intValue();
- return boxToInteger(val1 - val2);
+ return boxToInteger(unboxCharOrInt(arg1, code1) - unboxCharOrInt(arg2, code2));
}
if (maxcode <= LONG) {
- long val1 = (code1 == CHAR) ? ((java.lang.Character) arg1).charValue() : ((java.lang.Number) arg1).longValue();
- long val2 = (code2 == CHAR) ? ((java.lang.Character) arg2).charValue() : ((java.lang.Number) arg2).longValue();
- return boxToLong(val1 - val2);
+ return boxToLong(unboxCharOrLong(arg1, code1) - unboxCharOrLong(arg2, code2));
}
if (maxcode <= FLOAT) {
- float val1 = (code1 == CHAR) ? ((java.lang.Character) arg1).charValue() : ((java.lang.Number) arg1).floatValue();
- float val2 = (code2 == CHAR) ? ((java.lang.Character) arg2).charValue() : ((java.lang.Number) arg2).floatValue();
- return boxToFloat(val1 - val2);
+ return boxToFloat(unboxCharOrFloat(arg1, code1) - unboxCharOrFloat(arg2, code2));
}
if (maxcode <= DOUBLE) {
- double val1 = (code1 == CHAR) ? ((java.lang.Character) arg1).charValue() : ((java.lang.Number) arg1).doubleValue();
- double val2 = (code2 == CHAR) ? ((java.lang.Character) arg2).charValue() : ((java.lang.Number) arg2).doubleValue();
- return boxToDouble(val1 - val2);
+ return boxToDouble(unboxCharOrDouble(arg1, code1) - unboxCharOrDouble(arg2, code2));
}
throw new NoSuchMethodException();
}
@@ -354,24 +356,16 @@ public final class BoxesRunTime
int code2 = typeCode(arg2);
int maxcode = (code1 < code2) ? code2 : code1;
if (maxcode <= INT) {
- int val1 = (code1 == CHAR) ? ((java.lang.Character) arg1).charValue() : ((java.lang.Number) arg1).intValue();
- int val2 = (code2 == CHAR) ? ((java.lang.Character) arg2).charValue() : ((java.lang.Number) arg2).intValue();
- return boxToInteger(val1 * val2);
+ return boxToInteger(unboxCharOrInt(arg1, code1) * unboxCharOrInt(arg2, code2));
}
if (maxcode <= LONG) {
- long val1 = (code1 == CHAR) ? ((java.lang.Character) arg1).charValue() : ((java.lang.Number) arg1).longValue();
- long val2 = (code2 == CHAR) ? ((java.lang.Character) arg2).charValue() : ((java.lang.Number) arg2).longValue();
- return boxToLong(val1 * val2);
+ return boxToLong(unboxCharOrLong(arg1, code1) * unboxCharOrLong(arg2, code2));
}
if (maxcode <= FLOAT) {
- float val1 = (code1 == CHAR) ? ((java.lang.Character) arg1).charValue() : ((java.lang.Number) arg1).floatValue();
- float val2 = (code2 == CHAR) ? ((java.lang.Character) arg2).charValue() : ((java.lang.Number) arg2).floatValue();
- return boxToFloat(val1 * val2);
+ return boxToFloat(unboxCharOrFloat(arg1, code1) * unboxCharOrFloat(arg2, code2));
}
if (maxcode <= DOUBLE) {
- double val1 = (code1 == CHAR) ? ((java.lang.Character) arg1).charValue() : ((java.lang.Number) arg1).doubleValue();
- double val2 = (code2 == CHAR) ? ((java.lang.Character) arg2).charValue() : ((java.lang.Number) arg2).doubleValue();
- return boxToDouble(val1 * val2);
+ return boxToDouble(unboxCharOrDouble(arg1, code1) * unboxCharOrDouble(arg2, code2));
}
throw new NoSuchMethodException();
}
@@ -381,26 +375,16 @@ public final class BoxesRunTime
int code1 = typeCode(arg1);
int code2 = typeCode(arg2);
int maxcode = (code1 < code2) ? code2 : code1;
- if (maxcode <= INT) {
- int val1 = (code1 == CHAR) ? ((java.lang.Character) arg1).charValue() : ((java.lang.Number) arg1).intValue();
- int val2 = (code2 == CHAR) ? ((java.lang.Character) arg2).charValue() : ((java.lang.Number) arg2).intValue();
- return boxToInteger(val1 / val2);
- }
- if (maxcode <= LONG) {
- long val1 = (code1 == CHAR) ? ((java.lang.Character) arg1).charValue() : ((java.lang.Number) arg1).longValue();
- long val2 = (code2 == CHAR) ? ((java.lang.Character) arg2).charValue() : ((java.lang.Number) arg2).longValue();
- return boxToLong(val1 / val2);
- }
- if (maxcode <= FLOAT) {
- float val1 = (code1 == CHAR) ? ((java.lang.Character) arg1).charValue() : ((java.lang.Number) arg1).floatValue();
- float val2 = (code2 == CHAR) ? ((java.lang.Character) arg2).charValue() : ((java.lang.Number) arg2).floatValue();
- return boxToFloat(val1 / val2);
- }
- if (maxcode <= DOUBLE) {
- double val1 = (code1 == CHAR) ? ((java.lang.Character) arg1).charValue() : ((java.lang.Number) arg1).doubleValue();
- double val2 = (code2 == CHAR) ? ((java.lang.Character) arg2).charValue() : ((java.lang.Number) arg2).doubleValue();
- return boxToDouble(val1 / val2);
- }
+
+ if (maxcode <= INT)
+ return boxToInteger(unboxCharOrInt(arg1, code1) / unboxCharOrInt(arg2, code2));
+ if (maxcode <= LONG)
+ return boxToLong(unboxCharOrLong(arg1, code1) / unboxCharOrLong(arg2, code2));
+ if (maxcode <= FLOAT)
+ return boxToFloat(unboxCharOrFloat(arg1, code1) / unboxCharOrFloat(arg2, code2));
+ if (maxcode <= DOUBLE)
+ return boxToDouble(unboxCharOrDouble(arg1, code1) / unboxCharOrDouble(arg2, code2));
+
throw new NoSuchMethodException();
}
@@ -409,26 +393,16 @@ public final class BoxesRunTime
int code1 = typeCode(arg1);
int code2 = typeCode(arg2);
int maxcode = (code1 < code2) ? code2 : code1;
- if (maxcode <= INT) {
- int val1 = (code1 == CHAR) ? ((java.lang.Character) arg1).charValue() : ((java.lang.Number) arg1).intValue();
- int val2 = (code2 == CHAR) ? ((java.lang.Character) arg2).charValue() : ((java.lang.Number) arg2).intValue();
- return boxToInteger(val1 % val2);
- }
- if (maxcode <= LONG) {
- long val1 = (code1 == CHAR) ? ((java.lang.Character) arg1).charValue() : ((java.lang.Number) arg1).longValue();
- long val2 = (code2 == CHAR) ? ((java.lang.Character) arg2).charValue() : ((java.lang.Number) arg2).longValue();
- return boxToLong(val1 % val2);
- }
- if (maxcode <= FLOAT) {
- float val1 = (code1 == CHAR) ? ((java.lang.Character) arg1).charValue() : ((java.lang.Number) arg1).floatValue();
- float val2 = (code2 == CHAR) ? ((java.lang.Character) arg2).charValue() : ((java.lang.Number) arg2).floatValue();
- return boxToFloat(val1 % val2);
- }
- if (maxcode <= DOUBLE) {
- double val1 = (code1 == CHAR) ? ((java.lang.Character) arg1).charValue() : ((java.lang.Number) arg1).doubleValue();
- double val2 = (code2 == CHAR) ? ((java.lang.Character) arg2).charValue() : ((java.lang.Number) arg2).doubleValue();
- return boxToDouble(val1 % val2);
- }
+
+ if (maxcode <= INT)
+ return boxToInteger(unboxCharOrInt(arg1, code1) % unboxCharOrInt(arg2, code2));
+ if (maxcode <= LONG)
+ return boxToLong(unboxCharOrLong(arg1, code1) % unboxCharOrLong(arg2, code2));
+ if (maxcode <= FLOAT)
+ return boxToFloat(unboxCharOrFloat(arg1, code1) % unboxCharOrFloat(arg2, code2));
+ if (maxcode <= DOUBLE)
+ return boxToDouble(unboxCharOrDouble(arg1, code1) % unboxCharOrDouble(arg2, code2));
+
throw new NoSuchMethodException();
}
@@ -437,24 +411,24 @@ public final class BoxesRunTime
int code1 = typeCode(arg1);
int code2 = typeCode(arg2);
if (code1 <= INT) {
- int val1 = (code1 == CHAR) ? ((java.lang.Character) arg1).charValue() : ((java.lang.Number) arg1).intValue();
+ int val1 = unboxCharOrInt(arg1, code1);
if (code2 <= INT) {
- int val2 = (code2 == CHAR) ? ((java.lang.Character) arg2).charValue() : ((java.lang.Number) arg2).intValue();
+ int val2 = unboxCharOrInt(arg2, code2);
return boxToInteger(val1 >> val2);
}
if (code2 <= LONG) {
- long val2 = (code2 == CHAR) ? ((java.lang.Character) arg2).charValue() : ((java.lang.Number) arg2).longValue();
+ long val2 = unboxCharOrLong(arg2, code2);
return boxToInteger(val1 >> val2);
}
}
if (code1 <= LONG) {
- long val1 = (code1 == CHAR) ? ((java.lang.Character) arg1).charValue() : ((java.lang.Number) arg1).longValue();
+ long val1 = unboxCharOrLong(arg1, code1);
if (code2 <= INT) {
- int val2 = (code2 == CHAR) ? ((java.lang.Character) arg2).charValue() : ((java.lang.Number) arg2).intValue();
+ int val2 = unboxCharOrInt(arg2, code2);
return boxToLong(val1 >> val2);
}
if (code2 <= LONG) {
- long val2 = (code2 == CHAR) ? ((java.lang.Character) arg2).charValue() : ((java.lang.Number) arg2).longValue();
+ long val2 = unboxCharOrLong(arg2, code2);
return boxToLong(val1 >> val2);
}
}
@@ -466,24 +440,24 @@ public final class BoxesRunTime
int code1 = typeCode(arg1);
int code2 = typeCode(arg2);
if (code1 <= INT) {
- int val1 = (code1 == CHAR) ? ((java.lang.Character) arg1).charValue() : ((java.lang.Number) arg1).intValue();
+ int val1 = unboxCharOrInt(arg1, code1);
if (code2 <= INT) {
- int val2 = (code2 == CHAR) ? ((java.lang.Character) arg2).charValue() : ((java.lang.Number) arg2).intValue();
+ int val2 = unboxCharOrInt(arg2, code2);
return boxToInteger(val1 << val2);
}
if (code2 <= LONG) {
- long val2 = (code2 == CHAR) ? ((java.lang.Character) arg2).charValue() : ((java.lang.Number) arg2).longValue();
+ long val2 = unboxCharOrLong(arg2, code2);
return boxToInteger(val1 << val2);
}
}
if (code1 <= LONG) {
- long val1 = (code1 == CHAR) ? ((java.lang.Character) arg1).charValue() : ((java.lang.Number) arg1).longValue();
+ long val1 = unboxCharOrLong(arg1, code1);
if (code2 <= INT) {
- int val2 = (code2 == CHAR) ? ((java.lang.Character) arg2).charValue() : ((java.lang.Number) arg2).intValue();
+ int val2 = unboxCharOrInt(arg2, code2);
return boxToLong(val1 << val2);
}
if (code2 <= LONG) {
- long val2 = (code2 == CHAR) ? ((java.lang.Character) arg2).charValue() : ((java.lang.Number) arg2).longValue();
+ long val2 = unboxCharOrLong(arg2, code2);
return boxToLong(val1 << val2);
}
}
@@ -495,70 +469,66 @@ public final class BoxesRunTime
int code1 = typeCode(arg1);
int code2 = typeCode(arg2);
if (code1 <= INT) {
- int val1 = (code1 == CHAR) ? ((java.lang.Character) arg1).charValue() : ((java.lang.Number) arg1).intValue();
+ int val1 = unboxCharOrInt(arg1, code1);
if (code2 <= INT) {
- int val2 = (code2 == CHAR) ? ((java.lang.Character) arg2).charValue() : ((java.lang.Number) arg2).intValue();
+ int val2 = unboxCharOrInt(arg2, code2);
return boxToInteger(val1 >>> val2);
}
if (code2 <= LONG) {
- long val2 = (code2 == CHAR) ? ((java.lang.Character) arg2).charValue() : ((java.lang.Number) arg2).longValue();
+ long val2 = unboxCharOrLong(arg2, code2);
return boxToInteger(val1 >>> val2);
}
}
if (code1 <= LONG) {
- long val1 = (code1 == CHAR) ? ((java.lang.Character) arg1).charValue() : ((java.lang.Number) arg1).longValue();
+ long val1 = unboxCharOrLong(arg1, code1);
if (code2 <= INT) {
- int val2 = (code2 == CHAR) ? ((java.lang.Character) arg2).charValue() : ((java.lang.Number) arg2).intValue();
+ int val2 = unboxCharOrInt(arg2, code2);
return boxToLong(val1 >>> val2);
}
if (code2 <= LONG) {
- long val2 = (code2 == CHAR) ? ((java.lang.Character) arg2).charValue() : ((java.lang.Number) arg2).longValue();
+ long val2 = unboxCharOrLong(arg2, code2);
return boxToLong(val1 >>> val2);
}
}
throw new NoSuchMethodException();
}
-
+
/** -arg */
public static Object negate(Object arg) throws NoSuchMethodException {
int code = typeCode(arg);
if (code <= INT) {
- int val = (code == CHAR) ? ((java.lang.Character) arg).charValue() : ((java.lang.Number) arg).intValue();
+ int val = unboxCharOrInt(arg, code);
return boxToInteger(-val);
}
if (code <= LONG) {
- long val = (code == CHAR) ? ((java.lang.Character) arg).charValue() : ((java.lang.Number) arg).longValue();
+ long val = unboxCharOrLong(arg, code);
return boxToLong(-val);
}
if (code <= FLOAT) {
- float val = (code == CHAR) ? ((java.lang.Character) arg).charValue() : ((java.lang.Number) arg).floatValue();
+ float val = unboxCharOrFloat(arg, code);
return boxToFloat(-val);
}
if (code <= DOUBLE) {
- double val = (code == CHAR) ? ((java.lang.Character) arg).charValue() : ((java.lang.Number) arg).doubleValue();
+ double val = unboxCharOrDouble(arg, code);
return boxToDouble(-val);
}
throw new NoSuchMethodException();
}
-
+
/** +arg */
public static Object positive(Object arg) throws NoSuchMethodException {
int code = typeCode(arg);
if (code <= INT) {
- int val = (code == CHAR) ? ((java.lang.Character) arg).charValue() : ((java.lang.Number) arg).intValue();
- return boxToInteger(+val);
+ return boxToInteger(+unboxCharOrInt(arg, code));
}
if (code <= LONG) {
- long val = (code == CHAR) ? ((java.lang.Character) arg).charValue() : ((java.lang.Number) arg).longValue();
- return boxToLong(+val);
+ return boxToLong(+unboxCharOrLong(arg, code));
}
if (code <= FLOAT) {
- float val = (code == CHAR) ? ((java.lang.Character) arg).charValue() : ((java.lang.Number) arg).floatValue();
- return boxToFloat(+val);
+ return boxToFloat(+unboxCharOrFloat(arg, code));
}
if (code <= DOUBLE) {
- double val = (code == CHAR) ? ((java.lang.Character) arg).charValue() : ((java.lang.Number) arg).doubleValue();
- return boxToDouble(+val);
+ return boxToDouble(+unboxCharOrDouble(arg, code));
}
throw new NoSuchMethodException();
}
@@ -566,72 +536,60 @@ public final class BoxesRunTime
/** arg1 & arg2 */
public static Object takeAnd(Object arg1, Object arg2) throws NoSuchMethodException {
if ((arg1 instanceof Boolean) || (arg2 instanceof Boolean)) {
- if (!((arg1 instanceof Boolean) && (arg2 instanceof Boolean))) {
+ if ((arg1 instanceof Boolean) && (arg2 instanceof Boolean))
+ return boxToBoolean(((java.lang.Boolean) arg1).booleanValue() & ((java.lang.Boolean) arg2).booleanValue());
+ else
throw new NoSuchMethodException();
- }
- return boxToBoolean(((java.lang.Boolean) arg1).booleanValue() & ((java.lang.Boolean) arg2).booleanValue());
}
int code1 = typeCode(arg1);
int code2 = typeCode(arg2);
int maxcode = (code1 < code2) ? code2 : code1;
- if (maxcode <= INT) {
- int val1 = (code1 == CHAR) ? ((java.lang.Character) arg1).charValue() : ((java.lang.Number) arg1).intValue();
- int val2 = (code2 == CHAR) ? ((java.lang.Character) arg2).charValue() : ((java.lang.Number) arg2).intValue();
- return boxToInteger(val1 & val2);
- }
- if (maxcode <= LONG) {
- long val1 = (code1 == CHAR) ? ((java.lang.Character) arg1).charValue() : ((java.lang.Number) arg1).longValue();
- long val2 = (code2 == CHAR) ? ((java.lang.Character) arg2).charValue() : ((java.lang.Number) arg2).longValue();
- return boxToLong(val1 & val2);
- }
+
+ if (maxcode <= INT)
+ return boxToInteger(unboxCharOrInt(arg1, code1) & unboxCharOrInt(arg2, code2));
+ if (maxcode <= LONG)
+ return boxToLong(unboxCharOrLong(arg1, code1) & unboxCharOrLong(arg2, code2));
+
throw new NoSuchMethodException();
}
/** arg1 | arg2 */
public static Object takeOr(Object arg1, Object arg2) throws NoSuchMethodException {
if ((arg1 instanceof Boolean) || (arg2 instanceof Boolean)) {
- if (!((arg1 instanceof Boolean) && (arg2 instanceof Boolean))) {
+ if ((arg1 instanceof Boolean) && (arg2 instanceof Boolean))
+ return boxToBoolean(((java.lang.Boolean) arg1).booleanValue() | ((java.lang.Boolean) arg2).booleanValue());
+ else
throw new NoSuchMethodException();
- }
- return boxToBoolean(((java.lang.Boolean) arg1).booleanValue() | ((java.lang.Boolean) arg2).booleanValue());
}
int code1 = typeCode(arg1);
int code2 = typeCode(arg2);
int maxcode = (code1 < code2) ? code2 : code1;
- if (maxcode <= INT) {
- int val1 = (code1 == CHAR) ? ((java.lang.Character) arg1).charValue() : ((java.lang.Number) arg1).intValue();
- int val2 = (code2 == CHAR) ? ((java.lang.Character) arg2).charValue() : ((java.lang.Number) arg2).intValue();
- return boxToInteger(val1 | val2);
- }
- if (maxcode <= LONG) {
- long val1 = (code1 == CHAR) ? ((java.lang.Character) arg1).charValue() : ((java.lang.Number) arg1).longValue();
- long val2 = (code2 == CHAR) ? ((java.lang.Character) arg2).charValue() : ((java.lang.Number) arg2).longValue();
- return boxToLong(val1 | val2);
- }
+
+ if (maxcode <= INT)
+ return boxToInteger(unboxCharOrInt(arg1, code1) | unboxCharOrInt(arg2, code2));
+ if (maxcode <= LONG)
+ return boxToLong(unboxCharOrLong(arg1, code1) | unboxCharOrLong(arg2, code2));
+
throw new NoSuchMethodException();
}
/** arg1 ^ arg2 */
public static Object takeXor(Object arg1, Object arg2) throws NoSuchMethodException {
if ((arg1 instanceof Boolean) || (arg2 instanceof Boolean)) {
- if (!((arg1 instanceof Boolean) && (arg2 instanceof Boolean))) {
+ if ((arg1 instanceof Boolean) && (arg2 instanceof Boolean))
+ return boxToBoolean(((java.lang.Boolean) arg1).booleanValue() ^ ((java.lang.Boolean) arg2).booleanValue());
+ else
throw new NoSuchMethodException();
- }
- return boxToBoolean(((java.lang.Boolean) arg1).booleanValue() ^ ((java.lang.Boolean) arg2).booleanValue());
}
int code1 = typeCode(arg1);
int code2 = typeCode(arg2);
int maxcode = (code1 < code2) ? code2 : code1;
- if (maxcode <= INT) {
- int val1 = (code1 == CHAR) ? ((java.lang.Character) arg1).charValue() : ((java.lang.Number) arg1).intValue();
- int val2 = (code2 == CHAR) ? ((java.lang.Character) arg2).charValue() : ((java.lang.Number) arg2).intValue();
- return boxToInteger(val1 ^ val2);
- }
- if (maxcode <= LONG) {
- long val1 = (code1 == CHAR) ? ((java.lang.Character) arg1).charValue() : ((java.lang.Number) arg1).longValue();
- long val2 = (code2 == CHAR) ? ((java.lang.Character) arg2).charValue() : ((java.lang.Number) arg2).longValue();
- return boxToLong(val1 ^ val2);
- }
+
+ if (maxcode <= INT)
+ return boxToInteger(unboxCharOrInt(arg1, code1) ^ unboxCharOrInt(arg2, code2));
+ if (maxcode <= LONG)
+ return boxToLong(unboxCharOrLong(arg1, code1) ^ unboxCharOrLong(arg2, code2));
+
throw new NoSuchMethodException();
}
@@ -650,21 +608,19 @@ public final class BoxesRunTime
}
throw new NoSuchMethodException();
}
-
+
/** ~arg */
public static Object complement(Object arg) throws NoSuchMethodException {
int code = typeCode(arg);
if (code <= INT) {
- int val = (code == CHAR) ? ((java.lang.Character) arg).charValue() : ((java.lang.Number) arg).intValue();
- return boxToInteger(~val);
+ return boxToInteger(~unboxCharOrInt(arg, code));
}
if (code <= LONG) {
- long val = (code == CHAR) ? ((java.lang.Character) arg).charValue() : ((java.lang.Number) arg).longValue();
- return boxToLong(~val);
+ return boxToLong(~unboxCharOrLong(arg, code));
}
throw new NoSuchMethodException();
}
-
+
/** !arg */
public static Object takeNot(Object arg) throws NoSuchMethodException {
if (arg instanceof Boolean) {
@@ -672,123 +628,138 @@ public final class BoxesRunTime
}
throw new NoSuchMethodException();
}
-
+
public static Object testEqual(Object arg1, Object arg2) throws NoSuchMethodException {
return boxToBoolean(arg1 == arg2);
}
-
+
public static Object testNotEqual(Object arg1, Object arg2) throws NoSuchMethodException {
return boxToBoolean(arg1 != arg2);
}
-
+
public static Object testLessThan(Object arg1, Object arg2) throws NoSuchMethodException {
int code1 = typeCode(arg1);
int code2 = typeCode(arg2);
int maxcode = (code1 < code2) ? code2 : code1;
if (maxcode <= INT) {
- int val1 = (code1 == CHAR) ? ((java.lang.Character) arg1).charValue() : ((java.lang.Number) arg1).intValue();
- int val2 = (code2 == CHAR) ? ((java.lang.Character) arg2).charValue() : ((java.lang.Number) arg2).intValue();
+ int val1 = unboxCharOrInt(arg1, code1);
+ int val2 = unboxCharOrInt(arg2, code2);
return boxToBoolean(val1 < val2);
}
if (maxcode <= LONG) {
- long val1 = (code1 == CHAR) ? ((java.lang.Character) arg1).charValue() : ((java.lang.Number) arg1).longValue();
- long val2 = (code2 == CHAR) ? ((java.lang.Character) arg2).charValue() : ((java.lang.Number) arg2).longValue();
+ long val1 = unboxCharOrLong(arg1, code1);
+ long val2 = unboxCharOrLong(arg2, code2);
return boxToBoolean(val1 < val2);
}
if (maxcode <= FLOAT) {
- float val1 = (code1 == CHAR) ? ((java.lang.Character) arg1).charValue() : ((java.lang.Number) arg1).floatValue();
- float val2 = (code2 == CHAR) ? ((java.lang.Character) arg2).charValue() : ((java.lang.Number) arg2).floatValue();
+ float val1 = unboxCharOrFloat(arg1, code1);
+ float val2 = unboxCharOrFloat(arg2, code2);
return boxToBoolean(val1 < val2);
}
if (maxcode <= DOUBLE) {
- double val1 = (code1 == CHAR) ? ((java.lang.Character) arg1).charValue() : ((java.lang.Number) arg1).doubleValue();
- double val2 = (code2 == CHAR) ? ((java.lang.Character) arg2).charValue() : ((java.lang.Number) arg2).doubleValue();
+ double val1 = unboxCharOrDouble(arg1, code1);
+ double val2 = unboxCharOrDouble(arg2, code2);
return boxToBoolean(val1 < val2);
}
throw new NoSuchMethodException();
}
-
+
public static Object testLessOrEqualThan(Object arg1, Object arg2) throws NoSuchMethodException {
int code1 = typeCode(arg1);
int code2 = typeCode(arg2);
int maxcode = (code1 < code2) ? code2 : code1;
if (maxcode <= INT) {
- int val1 = (code1 == CHAR) ? ((java.lang.Character) arg1).charValue() : ((java.lang.Number) arg1).intValue();
- int val2 = (code2 == CHAR) ? ((java.lang.Character) arg2).charValue() : ((java.lang.Number) arg2).intValue();
+ int val1 = unboxCharOrInt(arg1, code1);
+ int val2 = unboxCharOrInt(arg2, code2);
return boxToBoolean(val1 <= val2);
}
if (maxcode <= LONG) {
- long val1 = (code1 == CHAR) ? ((java.lang.Character) arg1).charValue() : ((java.lang.Number) arg1).longValue();
- long val2 = (code2 == CHAR) ? ((java.lang.Character) arg2).charValue() : ((java.lang.Number) arg2).longValue();
+ long val1 = unboxCharOrLong(arg1, code1);
+ long val2 = unboxCharOrLong(arg2, code2);
return boxToBoolean(val1 <= val2);
}
if (maxcode <= FLOAT) {
- float val1 = (code1 == CHAR) ? ((java.lang.Character) arg1).charValue() : ((java.lang.Number) arg1).floatValue();
- float val2 = (code2 == CHAR) ? ((java.lang.Character) arg2).charValue() : ((java.lang.Number) arg2).floatValue();
+ float val1 = unboxCharOrFloat(arg1, code1);
+ float val2 = unboxCharOrFloat(arg2, code2);
return boxToBoolean(val1 <= val2);
}
if (maxcode <= DOUBLE) {
- double val1 = (code1 == CHAR) ? ((java.lang.Character) arg1).charValue() : ((java.lang.Number) arg1).doubleValue();
- double val2 = (code2 == CHAR) ? ((java.lang.Character) arg2).charValue() : ((java.lang.Number) arg2).doubleValue();
+ double val1 = unboxCharOrDouble(arg1, code1);
+ double val2 = unboxCharOrDouble(arg2, code2);
return boxToBoolean(val1 <= val2);
}
throw new NoSuchMethodException();
}
-
+
public static Object testGreaterOrEqualThan(Object arg1, Object arg2) throws NoSuchMethodException {
int code1 = typeCode(arg1);
int code2 = typeCode(arg2);
int maxcode = (code1 < code2) ? code2 : code1;
if (maxcode <= INT) {
- int val1 = (code1 == CHAR) ? ((java.lang.Character) arg1).charValue() : ((java.lang.Number) arg1).intValue();
- int val2 = (code2 == CHAR) ? ((java.lang.Character) arg2).charValue() : ((java.lang.Number) arg2).intValue();
+ int val1 = unboxCharOrInt(arg1, code1);
+ int val2 = unboxCharOrInt(arg2, code2);
return boxToBoolean(val1 >= val2);
}
if (maxcode <= LONG) {
- long val1 = (code1 == CHAR) ? ((java.lang.Character) arg1).charValue() : ((java.lang.Number) arg1).longValue();
- long val2 = (code2 == CHAR) ? ((java.lang.Character) arg2).charValue() : ((java.lang.Number) arg2).longValue();
+ long val1 = unboxCharOrLong(arg1, code1);
+ long val2 = unboxCharOrLong(arg2, code2);
return boxToBoolean(val1 >= val2);
}
if (maxcode <= FLOAT) {
- float val1 = (code1 == CHAR) ? ((java.lang.Character) arg1).charValue() : ((java.lang.Number) arg1).floatValue();
- float val2 = (code2 == CHAR) ? ((java.lang.Character) arg2).charValue() : ((java.lang.Number) arg2).floatValue();
+ float val1 = unboxCharOrFloat(arg1, code1);
+ float val2 = unboxCharOrFloat(arg2, code2);
return boxToBoolean(val1 >= val2);
}
if (maxcode <= DOUBLE) {
- double val1 = (code1 == CHAR) ? ((java.lang.Character) arg1).charValue() : ((java.lang.Number) arg1).doubleValue();
- double val2 = (code2 == CHAR) ? ((java.lang.Character) arg2).charValue() : ((java.lang.Number) arg2).doubleValue();
+ double val1 = unboxCharOrDouble(arg1, code1);
+ double val2 = unboxCharOrDouble(arg2, code2);
return boxToBoolean(val1 >= val2);
}
throw new NoSuchMethodException();
}
-
+
public static Object testGreaterThan(Object arg1, Object arg2) throws NoSuchMethodException {
int code1 = typeCode(arg1);
int code2 = typeCode(arg2);
int maxcode = (code1 < code2) ? code2 : code1;
if (maxcode <= INT) {
- int val1 = (code1 == CHAR) ? ((java.lang.Character) arg1).charValue() : ((java.lang.Number) arg1).intValue();
- int val2 = (code2 == CHAR) ? ((java.lang.Character) arg2).charValue() : ((java.lang.Number) arg2).intValue();
+ int val1 = unboxCharOrInt(arg1, code1);
+ int val2 = unboxCharOrInt(arg2, code2);
return boxToBoolean(val1 > val2);
}
if (maxcode <= LONG) {
- long val1 = (code1 == CHAR) ? ((java.lang.Character) arg1).charValue() : ((java.lang.Number) arg1).longValue();
- long val2 = (code2 == CHAR) ? ((java.lang.Character) arg2).charValue() : ((java.lang.Number) arg2).longValue();
+ long val1 = unboxCharOrLong(arg1, code1);
+ long val2 = unboxCharOrLong(arg2, code2);
return boxToBoolean(val1 > val2);
}
if (maxcode <= FLOAT) {
- float val1 = (code1 == CHAR) ? ((java.lang.Character) arg1).charValue() : ((java.lang.Number) arg1).floatValue();
- float val2 = (code2 == CHAR) ? ((java.lang.Character) arg2).charValue() : ((java.lang.Number) arg2).floatValue();
+ float val1 = unboxCharOrFloat(arg1, code1);
+ float val2 = unboxCharOrFloat(arg2, code2);
return boxToBoolean(val1 > val2);
}
if (maxcode <= DOUBLE) {
- double val1 = (code1 == CHAR) ? ((java.lang.Character) arg1).charValue() : ((java.lang.Number) arg1).doubleValue();
- double val2 = (code2 == CHAR) ? ((java.lang.Character) arg2).charValue() : ((java.lang.Number) arg2).doubleValue();
+ double val1 = unboxCharOrDouble(arg1, code1);
+ double val2 = unboxCharOrDouble(arg2, code2);
return boxToBoolean(val1 > val2);
}
throw new NoSuchMethodException();
}
-
+
+ public static boolean isBoxedNumberOrBoolean(Object arg) {
+ return (arg instanceof java.lang.Boolean) || isBoxedNumber(arg);
+ }
+ public static boolean isBoxedNumber(Object arg) {
+ return (
+ (arg instanceof java.lang.Integer)
+ || (arg instanceof java.lang.Long)
+ || (arg instanceof java.lang.Double)
+ || (arg instanceof java.lang.Float)
+ || (arg instanceof java.lang.Short)
+ || (arg instanceof java.lang.Character)
+ || (arg instanceof java.lang.Byte)
+ );
+ }
+
/** arg.toChar */
public static java.lang.Character toCharacter(Object arg) throws NoSuchMethodException {
if (arg instanceof java.lang.Integer) return boxToCharacter((char)unboxToInt(arg));
@@ -872,5 +843,5 @@ public final class BoxesRunTime
if (arg instanceof java.lang.Short) return boxToDouble((double)unboxToShort(arg));
throw new NoSuchMethodException();
}
-
+
}
diff --git a/test/instrumented/library/scala/runtime/ScalaRunTime.scala b/test/instrumented/library/scala/runtime/ScalaRunTime.scala
index a8a74dd..e474ae7 100644
--- a/test/instrumented/library/scala/runtime/ScalaRunTime.scala
+++ b/test/instrumented/library/scala/runtime/ScalaRunTime.scala
@@ -1,41 +1,75 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-
/* INSTRUMENTED VERSION */
+package scala
+package runtime
-package scala.runtime
-
-import scala.reflect.ClassManifest
-import scala.collection.{ Seq, IndexedSeq, TraversableView }
+import scala.collection.{ Seq, IndexedSeq, TraversableView, AbstractIterator }
import scala.collection.mutable.WrappedArray
-import scala.collection.immutable.{ NumericRange, List, Stream, Nil, :: }
+import scala.collection.immutable.{ StringLike, NumericRange, List, Stream, Nil, :: }
import scala.collection.generic.{ Sorted }
-import scala.xml.{ Node, MetaData }
+import scala.reflect.{ ClassTag, classTag }
import scala.util.control.ControlThrowable
+import scala.xml.{ Node, MetaData }
+import java.lang.{ Class => jClass }
+
+import java.lang.Double.doubleToLongBits
import java.lang.reflect.{ Modifier, Method => JMethod }
-/* The object <code>ScalaRunTime</code> provides ...
+/** The object ScalaRunTime provides support methods required by
+ * the scala runtime. All these methods should be considered
+ * outside the API and subject to change or removal without notice.
*/
object ScalaRunTime {
def isArray(x: AnyRef): Boolean = isArray(x, 1)
- def isArray(x: Any, atLevel: Int): Boolean =
- x != null && isArrayClass(x.asInstanceOf[AnyRef].getClass, atLevel)
+ def isArray(x: Any, atLevel: Int): Boolean =
+ x != null && isArrayClass(x.getClass, atLevel)
- private def isArrayClass(clazz: Class[_], atLevel: Int): Boolean =
+ private def isArrayClass(clazz: jClass[_], atLevel: Int): Boolean =
clazz.isArray && (atLevel == 1 || isArrayClass(clazz.getComponentType, atLevel - 1))
- def isValueClass(clazz: Class[_]) = clazz.isPrimitive()
-
+ def isValueClass(clazz: jClass[_]) = clazz.isPrimitive()
+
+ // includes specialized subclasses and future proofed against hypothetical TupleN (for N > 22)
+ def isTuple(x: Any) = x != null && x.getClass.getName.startsWith("scala.Tuple")
+ def isAnyVal(x: Any) = x match {
+ case _: Byte | _: Short | _: Char | _: Int | _: Long | _: Float | _: Double | _: Boolean | _: Unit => true
+ case _ => false
+ }
+
+ /** Return the class object representing an array with element class `clazz`.
+ */
+ def arrayClass(clazz: jClass[_]): jClass[_] = {
+ // newInstance throws an exception if the erasure is Void.TYPE. see SI-5680
+ if (clazz == java.lang.Void.TYPE) classOf[Array[Unit]]
+ else java.lang.reflect.Array.newInstance(clazz, 0).getClass
+ }
+
+ /** Return the class object representing elements in arrays described by a given schematic.
+ */
+ def arrayElementClass(schematic: Any): jClass[_] = schematic match {
+ case cls: jClass[_] => cls.getComponentType
+ case tag: ClassTag[_] => tag.runtimeClass
+ case _ =>
+ throw new UnsupportedOperationException(s"unsupported schematic $schematic (${schematic.getClass})")
+ }
+
+ /** Return the class object representing an unboxed value type,
+ * e.g. classOf[int], not classOf[java.lang.Integer]. The compiler
+ * rewrites expressions like 5.getClass to come here.
+ */
+ def anyValClass[T <: AnyVal : ClassTag](value: T): jClass[T] =
+ classTag[T].runtimeClass.asInstanceOf[jClass[T]]
+
var arrayApplyCount = 0
- var arrayUpdateCount = 0
-
+
/** Retrieve generic array element */
def array_apply(xs: AnyRef, idx: Int): Any = {
arrayApplyCount += 1
@@ -56,7 +90,7 @@ object ScalaRunTime {
/** update generic array element */
def array_update(xs: AnyRef, idx: Int, value: Any): Unit = {
- arrayUpdateCount += 1
+ arrayApplyCount += 1
xs match {
case x: Array[AnyRef] => x(idx) = value.asInstanceOf[AnyRef]
case x: Array[Int] => x(idx) = value.asInstanceOf[Int]
@@ -85,7 +119,7 @@ object ScalaRunTime {
case x: Array[Boolean] => x.length
case x: Array[Unit] => x.length
case null => throw new NullPointerException
- }
+ }
def array_clone(xs: AnyRef): AnyRef = xs match {
case x: Array[AnyRef] => ArrayRuntime.cloneArray(x)
@@ -101,19 +135,21 @@ object ScalaRunTime {
case null => throw new NullPointerException
}
- /** Convert a numeric value array to an object array.
+ /** Convert an array to an object array.
* Needed to deal with vararg arguments of primitive types that are passed
* to a generic Java vararg parameter T ...
*/
- def toObjectArray(src: AnyRef): Array[Object] = {
- val length = array_length(src)
- val dest = new Array[Object](length)
- for (i <- 0 until length)
- array_update(dest, i, array_apply(src, i))
- dest
+ def toObjectArray(src: AnyRef): Array[Object] = src match {
+ case x: Array[AnyRef] => x
+ case _ =>
+ val length = array_length(src)
+ val dest = new Array[Object](length)
+ for (i <- 0 until length)
+ array_update(dest, i, array_apply(src, i))
+ dest
}
- def toArray[T](xs: collection.Seq[T]) = {
+ def toArray[T](xs: scala.collection.Seq[T]) = {
val arr = new Array[AnyRef](xs.length)
var i = 0
for (x <- xs) {
@@ -122,7 +158,7 @@ object ScalaRunTime {
}
arr
}
-
+
// Java bug: http://bugs.sun.com/bugdatabase/view_bug.do?bug_id=4071957
// More background at ticket #2318.
def ensureAccessible(m: JMethod): JMethod = {
@@ -130,64 +166,34 @@ object ScalaRunTime {
try m setAccessible true
catch { case _: SecurityException => () }
}
- m
+ m
}
- def checkInitialized[T <: AnyRef](x: T): T =
+ def checkInitialized[T <: AnyRef](x: T): T =
if (x == null) throw new UninitializedError else x
- abstract class Try[+A] {
- def Catch[B >: A](handler: PartialFunction[Throwable, B]): B
- def Finally(fin: => Unit): A
- }
-
- def Try[A](block: => A): Try[A] = new Try[A] with Runnable {
- private var result: A = _
- private var exception: Throwable =
- try { run() ; null }
- catch {
- case e: ControlThrowable => throw e // don't catch non-local returns etc
- case e: Throwable => e
- }
-
- def run() { result = block }
-
- def Catch[B >: A](handler: PartialFunction[Throwable, B]): B =
- if (exception == null) result
- else if (handler isDefinedAt exception) handler(exception)
- else throw exception
-
- def Finally(fin: => Unit): A = {
- fin
-
- if (exception == null) result
- else throw exception
- }
- }
-
def _toString(x: Product): String =
x.productIterator.mkString(x.productPrefix + "(", ",", ")")
-
- def _hashCode(x: Product): Int = {
- import scala.util.MurmurHash._
- val arr = x.productArity
- var h = startHash(arr)
- var c = startMagicA
- var k = startMagicB
- var i = 0
- while (i < arr) {
- val elem = x.productElement(i)
- h = extendHash(h, if (elem == null) 0 else elem.##, c, k)
- c = nextMagicA(c)
- k = nextMagicB(k)
- i += 1
+
+ def _hashCode(x: Product): Int = scala.util.hashing.MurmurHash3.productHash(x)
+
+ /** A helper for case classes. */
+ def typedProductIterator[T](x: Product): Iterator[T] = {
+ new AbstractIterator[T] {
+ private var c: Int = 0
+ private val cmax = x.productArity
+ def hasNext = c < cmax
+ def next() = {
+ val result = x.productElement(c)
+ c += 1
+ result.asInstanceOf[T]
+ }
}
- finalizeHash(h)
}
/** Fast path equality method for inlining; used when -optimise is set.
*/
- @inline def inlinedEquals(x: Object, y: Object): Boolean =
+ @inline def inlinedEquals(x: Object, y: Object): Boolean =
if (x eq y) true
else if (x eq null) false
else if (x.isInstanceOf[java.lang.Number]) BoxesRunTime.equalsNumObject(x.asInstanceOf[java.lang.Number], y)
@@ -198,61 +204,58 @@ object ScalaRunTime {
case y: Product if x.productArity == y.productArity => x.productIterator sameElements y.productIterator
case _ => false
}
-
+
// hashcode -----------------------------------------------------------
//
// Note that these are the implementations called by ##, so they
// must not call ## themselves.
-
- @inline def hash(x: Any): Int =
- if (x.isInstanceOf[java.lang.Number]) BoxesRunTime.hashFromNumber(x.asInstanceOf[java.lang.Number])
+
+ def hash(x: Any): Int =
+ if (x == null) 0
+ else if (x.isInstanceOf[java.lang.Number]) BoxesRunTime.hashFromNumber(x.asInstanceOf[java.lang.Number])
else x.hashCode
-
- @inline def hash(dv: Double): Int = {
+
+ def hash(dv: Double): Int = {
val iv = dv.toInt
if (iv == dv) return iv
-
+
val lv = dv.toLong
if (lv == dv) return lv.hashCode
val fv = dv.toFloat
if (fv == dv) fv.hashCode else dv.hashCode
}
- @inline def hash(fv: Float): Int = {
+ def hash(fv: Float): Int = {
val iv = fv.toInt
if (iv == fv) return iv
-
+
val lv = fv.toLong
- if (lv == fv) return lv.hashCode
+ if (lv == fv) return hash(lv)
else fv.hashCode
}
- @inline def hash(lv: Long): Int = {
- val iv = lv.toInt
- if (iv == lv) iv else lv.hashCode
- }
- @inline def hash(x: Int): Int = x
- @inline def hash(x: Short): Int = x.toInt
- @inline def hash(x: Byte): Int = x.toInt
- @inline def hash(x: Char): Int = x.toInt
- @inline def hash(x: Boolean): Int = x.hashCode
- @inline def hash(x: Unit): Int = 0
-
- @inline def hash(x: Number): Int = runtime.BoxesRunTime.hashFromNumber(x)
-
- /** XXX Why is there one boxed implementation in here? It would seem
- * we should have all the numbers or none of them.
- */
- @inline def hash(x: java.lang.Long): Int = {
- val iv = x.intValue
- if (iv == x.longValue) iv else x.hashCode
+ def hash(lv: Long): Int = {
+ val low = lv.toInt
+ val lowSign = low >>> 31
+ val high = (lv >>> 32).toInt
+ low ^ (high + lowSign)
}
+ def hash(x: Number): Int = runtime.BoxesRunTime.hashFromNumber(x)
+
+ // The remaining overloads are here for completeness, but the compiler
+ // inlines these definitions directly so they're not generally used.
+ def hash(x: Int): Int = x
+ def hash(x: Short): Int = x.toInt
+ def hash(x: Byte): Int = x.toInt
+ def hash(x: Char): Int = x.toInt
+ def hash(x: Boolean): Int = if (x) true.hashCode else false.hashCode
+ def hash(x: Unit): Int = 0
/** A helper method for constructing case class equality methods,
* because existential types get in the way of a clean outcome and
* it's performing a series of Any/Any equals comparisons anyway.
* See ticket #2867 for specifics.
*/
- def sameElements(xs1: collection.Seq[Any], xs2: collection.Seq[Any]) = xs1 sameElements xs2
+ def sameElements(xs1: scala.collection.Seq[Any], xs2: scala.collection.Seq[Any]) = xs1 sameElements xs2
/** Given any Scala value, convert it to a String.
*
@@ -263,17 +266,17 @@ object ScalaRunTime {
* called on null and (b) depending on the apparent type of an
* array, toString may or may not print it in a human-readable form.
*
- * @param arg the value to stringify
- * @return a string representation of <code>arg</code>
- *
- */
+ * @param arg the value to stringify
+ * @return a string representation of arg.
+ */
def stringOf(arg: Any): String = stringOf(arg, scala.Int.MaxValue)
- def stringOf(arg: Any, maxElements: Int): String = {
- def isScalaClass(x: AnyRef) =
- Option(x.getClass.getPackage) exists (_.getName startsWith "scala.")
-
- def isTuple(x: AnyRef) =
- x.getClass.getName matches """^scala\.Tuple(\d+).*"""
+ def stringOf(arg: Any, maxElements: Int): String = {
+ def packageOf(x: AnyRef) = x.getClass.getPackage match {
+ case null => ""
+ case p => p.getName
+ }
+ def isScalaClass(x: AnyRef) = packageOf(x) startsWith "scala."
+ def isScalaCompilerClass(x: AnyRef) = packageOf(x) startsWith "scala.tools.nsc."
// When doing our own iteration is dangerous
def useOwnToString(x: Any) = x match {
@@ -283,13 +286,14 @@ object ScalaRunTime {
case _: Range | _: NumericRange[_] => true
// Sorted collections to the wrong thing (for us) on iteration - ticket #3493
case _: Sorted[_, _] => true
- // StringBuilder(a, b, c) is not so attractive
- case _: StringBuilder => true
+ // StringBuilder(a, b, c) and similar not so attractive
+ case _: StringLike[_] => true
// Don't want to evaluate any elements in a view
case _: TraversableView[_, _] => true
// Don't want to a) traverse infinity or b) be overly helpful with peoples' custom
// collections which may have useful toString methods - ticket #3710
- case x: Traversable[_] => !x.hasDefiniteSize || !isScalaClass(x)
+ // or c) print AbstractFiles which are somehow also Iterable[AbstractFile]s.
+ case x: Traversable[_] => !x.hasDefiniteSize || !isScalaClass(x) || isScalaCompilerClass(x)
// Otherwise, nothing could possibly go wrong
case _ => false
}
@@ -299,29 +303,60 @@ object ScalaRunTime {
case (k, v) => inner(k) + " -> " + inner(v)
case _ => inner(arg)
}
- // The recursively applied attempt to prettify Array printing
+
+ // Special casing Unit arrays, the value class which uses a reference array type.
+ def arrayToString(x: AnyRef) = {
+ if (x.getClass.getComponentType == classOf[BoxedUnit])
+ 0 until (array_length(x) min maxElements) map (_ => "()") mkString ("Array(", ", ", ")")
+ else
+ WrappedArray make x take maxElements map inner mkString ("Array(", ", ", ")")
+ }
+
+ // The recursively applied attempt to prettify Array printing.
+ // Note that iterator is used if possible and foreach is used as a
+ // last resort, because the parallel collections "foreach" in a
+ // random order even on sequences.
def inner(arg: Any): String = arg match {
case null => "null"
case "" => "\"\""
case x: String => if (x.head.isWhitespace || x.last.isWhitespace) "\"" + x + "\"" else x
case x if useOwnToString(x) => x.toString
- case x: AnyRef if isArray(x) => WrappedArray make x take maxElements map inner mkString ("Array(", ", ", ")")
- case x: collection.Map[_, _] => x take maxElements map mapInner mkString (x.stringPrefix + "(", ", ", ")")
+ case x: AnyRef if isArray(x) => arrayToString(x)
+ case x: scala.collection.Map[_, _] => x.iterator take maxElements map mapInner mkString (x.stringPrefix + "(", ", ", ")")
+ case x: Iterable[_] => x.iterator take maxElements map inner mkString (x.stringPrefix + "(", ", ", ")")
case x: Traversable[_] => x take maxElements map inner mkString (x.stringPrefix + "(", ", ", ")")
case x: Product1[_] if isTuple(x) => "(" + inner(x._1) + ",)" // that special trailing comma
case x: Product if isTuple(x) => x.productIterator map inner mkString ("(", ",", ")")
- case x => x toString
+ case x => x.toString
}
// The try/catch is defense against iterables which aren't actually designed
// to be iterated, such as some scala.tools.nsc.io.AbstractFile derived classes.
- val s =
- try inner(arg)
- catch {
- case _: StackOverflowError | _: UnsupportedOperationException => arg.toString
- }
-
+ try inner(arg)
+ catch {
+ case _: StackOverflowError | _: UnsupportedOperationException | _: AssertionError => "" + arg
+ }
+ }
+
+ /** stringOf formatted for use in a repl result. */
+ def replStringOf(arg: Any, maxElements: Int): String = {
+ val s = stringOf(arg, maxElements)
val nl = if (s contains "\n") "\n" else ""
- nl + s + "\n"
+
+ nl + s + "\n"
+ }
+ private[scala] def checkZip(what: String, coll1: TraversableOnce[_], coll2: TraversableOnce[_]) {
+ if (sys.props contains "scala.debug.zip") {
+ val xs = coll1.toIndexedSeq
+ val ys = coll2.toIndexedSeq
+ if (xs.length != ys.length) {
+ Console.err.println(
+ "Mismatched zip in " + what + ":\n" +
+ " this: " + xs.mkString(", ") + "\n" +
+ " that: " + ys.mkString(", ")
+ )
+ (new Exception).getStackTrace.drop(2).take(10).foreach(println)
+ }
+ }
}
}
diff --git a/test/instrumented/mkinstrumented b/test/instrumented/mkinstrumented
deleted file mode 100644
index a87e8cb..0000000
--- a/test/instrumented/mkinstrumented
+++ /dev/null
@@ -1,46 +0,0 @@
-#
-#
-# Used to compile a jar with instrumented versions of certain classes.
-#
-
-
-
-
-if [ $# -ne 1 ]
-then
- echo "Must provide build dir ('target' or 'build')."
- exit 1
-fi
-
-
-BUILDDIR=$1
-TOPDIR=../..
-SCALAC=$TOPDIR/$BUILDDIR/pack/bin/scalac
-SRC_DIR=library/
-SCALALIB=$TOPDIR/$BUILDDIR/pack/lib/scala-library.jar
-CLASSDIR=classes/
-ARTIFACT=instrumented.jar
-
-
-# compile it
-rm -rf $CLASSDIR
-mkdir $CLASSDIR
-JSOURCES=`find $SRC_DIR -name "*.java" -print`
-SOURCES=`find $SRC_DIR \( -name "*.scala" -o -name "*.java" \) -print`
-echo $SOURCES
-$SCALAC -d $CLASSDIR $SOURCES
-javac -cp $SCALALIB -d $CLASSDIR $JSOURCES
-
-
-# jar it up
-rm $ARTIFACT
-cd $CLASSDIR
-jar cf $ARTIFACT .
-mv $ARTIFACT ../
-cd ..
-
-
-
-
-
-
diff --git a/test/instrumented/mkinstrumented.sh b/test/instrumented/mkinstrumented.sh
new file mode 100755
index 0000000..d734dd2
--- /dev/null
+++ b/test/instrumented/mkinstrumented.sh
@@ -0,0 +1,51 @@
+#/bin/sh
+#
+# Used to compile a jar with instrumented versions of certain classes.
+#
+
+set -e
+
+run () {
+ echo "% $@"
+ "$@"
+}
+
+if [ $# -ne 1 ]
+then
+ echo "Must provide build dir ('target' or 'build')."
+ exit 1
+fi
+
+scriptDir=$(cd $(dirname $0) && pwd)
+
+TOPDIR="$scriptDir/../.."
+RUNTIME="$TOPDIR/src/library/scala/runtime"
+SOURCES="$RUNTIME/BoxesRunTime.java $RUNTIME/ScalaRunTime.scala"
+SCALAC=$TOPDIR/$1/pack/bin/scalac
+SRC_DIR="$scriptDir/library/scala/runtime"
+SCALALIB=$TOPDIR/$1/pack/lib/scala-library.jar
+CLASSDIR="$scriptDir/classes"
+ARTIFACT=instrumented.jar
+DESTINATION="$TOPDIR/test/files/speclib"
+
+[[ -x "$SCALAC" ]] || exit 1;
+
+# compile it
+run rm -rf $CLASSDIR && mkdir $CLASSDIR
+run cp $SOURCES $SRC_DIR
+( cd $SRC_DIR && run patch BoxesRunTime.java $scriptDir/boxes.patch && run patch ScalaRunTime.scala $scriptDir/srt.patch )
+
+ORIG=$(find $SRC_DIR -name '*.orig')
+[[ -z "$ORIG" ]] || rm -f $ORIG
+
+JSOURCES=$(find $SRC_DIR -name "*.java" -print)
+SOURCES=$(find $SRC_DIR -type f -print)
+# echo $SOURCES
+run $SCALAC -d $CLASSDIR $SOURCES
+run javac -cp $SCALALIB -d $CLASSDIR $JSOURCES
+
+# jar it up
+run cd $CLASSDIR
+run jar cf $ARTIFACT .
+run mv -f $ARTIFACT "$DESTINATION"
+echo "$(cd "$DESTINATION" && pwd)/$ARTIFACT has been created."
\ No newline at end of file
diff --git a/test/instrumented/srt.patch b/test/instrumented/srt.patch
new file mode 100644
index 0000000..ee619b2
--- /dev/null
+++ b/test/instrumented/srt.patch
@@ -0,0 +1,10 @@
+8a9,10
+> /* INSTRUMENTED VERSION */
+>
+68a71,72
+> var arrayApplyCount = 0
+>
+70a75
+> arrayApplyCount += 1
+87a93
+> arrayApplyCount += 1
diff --git a/test/junit/scala/concurrent/impl/DefaultPromiseTest.scala b/test/junit/scala/concurrent/impl/DefaultPromiseTest.scala
new file mode 100644
index 0000000..f3a75e2
--- /dev/null
+++ b/test/junit/scala/concurrent/impl/DefaultPromiseTest.scala
@@ -0,0 +1,344 @@
+package scala.concurrent.impl
+
+import java.util.concurrent.ConcurrentLinkedQueue
+import java.util.concurrent.CountDownLatch
+import org.junit.Assert._
+import org.junit.{ After, Before, Test }
+import org.junit.runner.RunWith
+import org.junit.runners.JUnit4
+import scala.annotation.tailrec
+import scala.concurrent.ExecutionContext
+import scala.concurrent.impl.Promise.DefaultPromise
+import scala.util.{ Failure, Success, Try }
+import scala.util.control.NonFatal
+
+/** Tests for the private class DefaultPromise */
+ at RunWith(classOf[JUnit4])
+class DefaultPromiseTest {
+
+ // Many tests in this class use a helper class, Tester, to track the state of
+ // promises and to ensure they behave correctly, particularly the complex behaviour
+ // of linking.
+
+ type Result = Int
+ type PromiseId = Int
+ type HandlerId = Int
+ type ChainId = Int
+
+ /** The state of a set of set of linked promises. */
+ case class Chain(
+ promises: Set[PromiseId],
+ state: Either[Set[HandlerId],Try[Result]]
+ )
+
+ /** A helper class that provides methods for creating, linking, completing and
+ * adding handlers to promises. With each operation it verifies that handlers
+ * are called, any expected exceptions are thrown, and that all promises have
+ * the expected value.
+ *
+ * The links between promises are not tracked precisely. Instead, linked promises
+ * are placed in the same Chain object. Each link in the same chain will share
+ * the same value.
+ */
+ class Tester {
+ var promises = Map.empty[PromiseId, DefaultPromise[Result]]
+ var chains = Map.empty[ChainId, Chain]
+
+ private var counter = 0
+ private def freshId(): Int = {
+ val id = counter
+ counter += 1
+ id
+ }
+
+ /** Handlers report their activity on this queue */
+ private val handlerQueue = new ConcurrentLinkedQueue[(Try[Result], HandlerId)]()
+
+ /** Get the chain for a given promise */
+ private def promiseChain(p: PromiseId): Option[(ChainId, Chain)] = {
+ val found: Iterable[(ChainId, Chain)] = for ((cid, c) <- chains; p0 <- c.promises; if (p0 == p)) yield ((cid, c))
+ found.toList match {
+ case Nil => None
+ case x::Nil => Some(x)
+ case _ => throw new IllegalStateException(s"Promise $p found in more than one chain")
+ }
+ }
+
+ /** Passed to `checkEffect` to indicate the expected effect of an operation */
+ sealed trait Effect
+ case object NoEffect extends Effect
+ case class HandlersFired(result: Try[Result], handlers: Set[HandlerId]) extends Effect
+ case object MaybeIllegalThrown extends Effect
+ case object IllegalThrown extends Effect
+
+ /** Runs an operation while verifying that the operation has the expected effect */
+ private def checkEffect(expected: Effect)(f: => Any) {
+ assert(handlerQueue.isEmpty()) // Should have been cleared by last usage
+ val result = Try(f)
+
+ var fireCounts = Map.empty[(Try[Result], HandlerId), Int]
+ while (!handlerQueue.isEmpty()) {
+ val key = handlerQueue.poll()
+ val newCount = fireCounts.getOrElse(key, 0) + 1
+ fireCounts = fireCounts.updated(key, newCount)
+ }
+
+ def assertIllegalResult = result match {
+ case Failure(e: IllegalStateException) => ()
+ case _ => fail(s"Expected IllegalStateException: $result")
+ }
+
+ expected match {
+ case NoEffect =>
+ assertTrue(s"Shouldn't throw exception: $result", result.isSuccess)
+ assertEquals(Map.empty[(Try[Result], HandlerId), Int], fireCounts)
+ case HandlersFired(firingResult, handlers) =>
+ assert(result.isSuccess)
+ val expectedCounts = handlers.foldLeft(Map.empty[(Try[Result], HandlerId), Int]) {
+ case (map, hid) => map.updated((firingResult, hid), 1)
+ }
+ assertEquals(expectedCounts, fireCounts)
+ case MaybeIllegalThrown =>
+ if (result.isFailure) assertIllegalResult
+ assertEquals(Map.empty, fireCounts)
+ case IllegalThrown =>
+ assertIllegalResult
+ assertEquals(Map.empty, fireCounts)
+ }
+ }
+
+ /** Check each promise has the expected value. */
+ private def assertPromiseValues() {
+ for ((cid, chain) <- chains; p <- chain.promises) {
+ chain.state match {
+ case Right(result) => assertEquals(Some(result), promises(p).value)
+ case Left(_) => ()
+ }
+ }
+ }
+
+ /** Create a promise, returning a handle. */
+ def newPromise(): PromiseId = {
+ val pid = freshId()
+ val cid = freshId()
+ promises = promises.updated(pid, new DefaultPromise[Result]())
+ chains = chains.updated(cid, Chain(Set(pid), Left(Set.empty)))
+ assertPromiseValues()
+ pid
+ }
+
+ /** Complete a promise */
+ def complete(p: PromiseId) {
+ val r = Success(freshId())
+ val (cid, chain) = promiseChain(p).get
+ val (completionEffect, newState) = chain.state match {
+ case Left(handlers) => (HandlersFired(r, handlers), Right(r))
+ case Right(completion) => (IllegalThrown, chain.state)
+ }
+ checkEffect(completionEffect) { promises(p).complete(r) }
+ chains = chains.updated(cid, chain.copy(state = newState))
+ assertPromiseValues()
+ }
+
+ /** Attempt to link two promises together */
+ def link(a: PromiseId, b: PromiseId): (ChainId, ChainId) = {
+ val promiseA = promises(a)
+ val promiseB = promises(b)
+ val (cidA, chainA) = promiseChain(a).get
+ val (cidB, chainB) = promiseChain(b).get
+
+ // Examine the state of each promise's chain to work out
+ // the effect of linking the promises, and to work out
+ // if the two chains should be merged.
+
+ sealed trait MergeOp
+ case object NoMerge extends MergeOp
+ case class Merge(state: Either[Set[HandlerId],Try[Result]]) extends MergeOp
+
+ val (linkEffect, mergeOp) = (chainA.state, chainB.state) match {
+ case (Left(handlers1), Left(handlers2)) =>
+ (NoEffect, Merge(Left(handlers1 ++ handlers2)))
+ case (Left(handlers), Right(result)) =>
+ (HandlersFired(result, handlers), Merge(Right(result)))
+ case (Right(result), Left(handlers)) =>
+ (HandlersFired(result, handlers), Merge(Right(result)))
+ case (Right(_), Right(_)) if (cidA == cidB) =>
+ (MaybeIllegalThrown, NoMerge) // Won't be thrown if happen to link a promise to itself
+ case (Right(_), Right(_)) =>
+ (IllegalThrown, NoMerge)
+ }
+
+ // Perform the linking and merge the chains, if appropriate
+
+ checkEffect(linkEffect) { promiseA.linkRootOf(promiseB) }
+
+ val (newCidA, newCidB) = mergeOp match {
+ case NoMerge => (cidA, cidB)
+ case Merge(newState) => {
+ chains = chains - cidA
+ chains = chains - cidB
+ val newCid = freshId()
+ chains = chains.updated(newCid, Chain(chainA.promises ++ chainB.promises, newState))
+ (newCid, newCid)
+ }
+ }
+ assertPromiseValues()
+ (newCidA, newCidB)
+ }
+
+ /** Attach an onComplete handler. When called, the handler will
+ * place an entry into `handlerQueue` with the handler's identity.
+ * This allows verification of handler calling semantics.
+ */
+ def attachHandler(p: PromiseId): HandlerId = {
+ val hid = freshId()
+ val promise = promises(p)
+ val (cid, chain) = promiseChain(p).get
+ val (attachEffect, newState) = chain.state match {
+ case Left(handlers) =>
+ (NoEffect, Left(handlers + hid))
+ case Right(result) =>
+ (HandlersFired(result, Set(hid)), Right(result))
+ }
+ implicit val ec = new ExecutionContext {
+ def execute(r: Runnable) { r.run() }
+ def reportFailure(t: Throwable) { t.printStackTrace() }
+ }
+
+ checkEffect(attachEffect) { promise.onComplete(result => handlerQueue.add((result, hid))) }
+ chains = chains.updated(cid, chain.copy(state = newState))
+ assertPromiseValues()
+ hid
+ }
+ }
+
+ // Some methods and objects that build a list of promise
+ // actions to test and then execute them
+
+ type PromiseKey = Int
+
+ sealed trait Action
+ case class Complete(p: PromiseKey) extends Action
+ case class Link(a: PromiseKey, b: PromiseKey) extends Action
+ case class AttachHandler(p: PromiseKey) extends Action
+
+ /** Tests a sequence of actions on a Tester. Creates promises as needed. */
+ private def testActions(actions: Seq[Action]) {
+ val t = new Tester()
+ var pMap = Map.empty[PromiseKey, PromiseId]
+ def byKey(key: PromiseKey): PromiseId = {
+ if (!pMap.contains(key)) {
+ pMap = pMap.updated(key, t.newPromise())
+ }
+ pMap(key)
+ }
+
+ actions foreach { action =>
+ action match {
+ case Complete(p) => t.complete(byKey(p))
+ case Link(a, b) => t.link(byKey(a), byKey(b))
+ case AttachHandler(p) => t.attachHandler(byKey(p))
+ }
+ }
+ }
+
+ /** Tests all permutations of actions for `count` promises */
+ private def testPermutations(count: Int) {
+ val ps = (0 until count).toList
+ val pPairs = for (a <- ps; b <- ps) yield (a, b)
+
+ var allActions = ps.map(Complete(_)) ++ pPairs.map { case (a, b) => Link(a, b) } ++ ps.map(AttachHandler(_))
+ for ((permutation, i) <- allActions.permutations.zipWithIndex) {
+ testActions(permutation)
+ }
+ }
+
+ /** Test all permutations of actions with a single promise */
+ @Test
+ def testPermutations1 {
+ testPermutations(1)
+ }
+
+ /** Test all permutations of actions with two promises - about 40 thousand */
+ @Test
+ def testPermutations2 {
+ testPermutations(2)
+ }
+
+ /** Link promises in different orders, using the same link structure as is
+ * used in Future.flatMap */
+ @Test
+ def simulateFlatMapLinking {
+ val random = new scala.util.Random(1)
+ for (_ <- 0 until 10) {
+ val t = new Tester()
+ val flatMapCount = 100
+
+ sealed trait FlatMapEvent
+ case class Link(a: PromiseId, b: PromiseId) extends FlatMapEvent
+ case class Complete(p: PromiseId) extends FlatMapEvent
+
+ @tailrec
+ def flatMapEvents(count: Int, p1: PromiseId, acc: List[FlatMapEvent]): List[FlatMapEvent] = {
+ if (count == 0) {
+ Complete(p1)::acc
+ } else {
+ val p2 = t.newPromise()
+ flatMapEvents(count - 1, p2, Link(p2, p1)::acc)
+ }
+ }
+
+ val events = flatMapEvents(flatMapCount, t.newPromise(), Nil)
+ assertEquals(flatMapCount + 1, t.chains.size) // All promises are unlinked
+ val shuffled = random.shuffle(events)
+ shuffled foreach {
+ case Link(a, b) => t.link(a, b)
+ case Complete(p) => t.complete(p)
+ }
+ // All promises should be linked together, no matter the order of their linking
+ assertEquals(1, t.chains.size)
+ }
+ }
+
+ /** Link promises together on more than one thread, using the same link
+ * structure as is used in Future.flatMap */
+ @Test
+ def testFlatMapLinking {
+ for (_ <- 0 until 100) {
+ val flatMapCount = 100
+ val startLatch = new CountDownLatch(1)
+ val doneLatch = new CountDownLatch(flatMapCount + 1)
+ def execute(f: => Unit) {
+ val ec = ExecutionContext.global
+ ec.execute(new Runnable {
+ def run() {
+ try {
+ startLatch.await()
+ f
+ doneLatch.countDown()
+ } catch {
+ case NonFatal(e) => ec.reportFailure(e)
+ }
+ }
+ })
+ }
+ @tailrec
+ def flatMapTimes(count: Int, p1: DefaultPromise[Int]) {
+ if (count == 0) {
+ execute { p1.success(1) }
+ } else {
+ val p2 = new DefaultPromise[Int]()
+ execute { p2.linkRootOf(p1) }
+ flatMapTimes(count - 1, p2)
+ }
+ }
+
+ val p = new DefaultPromise[Int]()
+ flatMapTimes(flatMapCount, p)
+ startLatch.countDown()
+ doneLatch.await()
+ assertEquals(Some(Success(1)), p.value)
+ }
+ }
+
+}
diff --git a/test/junit/scala/reflect/internal/util/SourceFileTest.scala b/test/junit/scala/reflect/internal/util/SourceFileTest.scala
new file mode 100644
index 0000000..fd24d0a
--- /dev/null
+++ b/test/junit/scala/reflect/internal/util/SourceFileTest.scala
@@ -0,0 +1,58 @@
+package scala.reflect.internal.util
+
+import org.junit.Assert._
+import org.junit.Test
+import org.junit.runner.RunWith
+import org.junit.runners.JUnit4
+
+ at RunWith(classOf[JUnit4])
+class SourceFileTest {
+ def lineContentOf(code: String, offset: Int) =
+ new OffsetPosition(new BatchSourceFile("", code), offset).lineContent
+ //Position.offset(new BatchSourceFile("", code), offset).lineContent
+
+ /*
+ @Test
+ def si8205_overflow(): Unit = {
+ val file = new BatchSourceFile("", "code no newline")
+ // the bug in lineToString counted until MaxValue, and the AIOOBE came from here
+ assertFalse(file.isEndOfLine(Int.MaxValue))
+ }
+ */
+
+ @Test
+ def si8205_lineToString(): Unit = {
+ assertEquals("", lineContentOf("", 0))
+ assertEquals("abc", lineContentOf("abc", 0))
+ assertEquals("abc", lineContentOf("abc", 3))
+ assertEquals("code no newline", lineContentOf("code no newline", 1))
+ assertEquals("", lineContentOf("\n", 0))
+ assertEquals("abc", lineContentOf("abc\ndef", 0))
+ assertEquals("abc", lineContentOf("abc\ndef", 3))
+ assertEquals("def", lineContentOf("abc\ndef", 4))
+ assertEquals("def", lineContentOf("abc\ndef", 6))
+ assertEquals("def", lineContentOf("abc\ndef\n", 7))
+ }
+
+ @Test
+ def CRisEOL(): Unit = {
+ assertEquals("", lineContentOf("\r", 0))
+ assertEquals("abc", lineContentOf("abc\rdef", 0))
+ assertEquals("abc", lineContentOf("abc\rdef", 3))
+ assertEquals("def", lineContentOf("abc\rdef", 4))
+ assertEquals("def", lineContentOf("abc\rdef", 6))
+ assertEquals("def", lineContentOf("abc\rdef\r", 7))
+ }
+
+ @Test
+ def CRNLisEOL(): Unit = {
+ assertEquals("", lineContentOf("\r\n", 0))
+ assertEquals("abc", lineContentOf("abc\r\ndef", 0))
+ assertEquals("abc", lineContentOf("abc\r\ndef", 3))
+ assertEquals("abc", lineContentOf("abc\r\ndef", 4))
+ assertEquals("def", lineContentOf("abc\r\ndef", 5))
+ assertEquals("def", lineContentOf("abc\r\ndef", 7))
+ assertEquals("def", lineContentOf("abc\r\ndef", 8))
+ assertEquals("def", lineContentOf("abc\r\ndef\r\n", 9))
+ }
+}
diff --git a/test/junit/scala/runtime/ScalaRunTimeTest.scala b/test/junit/scala/runtime/ScalaRunTimeTest.scala
new file mode 100644
index 0000000..9da197c
--- /dev/null
+++ b/test/junit/scala/runtime/ScalaRunTimeTest.scala
@@ -0,0 +1,70 @@
+package scala.runtime
+
+import org.junit.Assert._
+import org.junit.Test
+import org.junit.runner.RunWith
+import org.junit.runners.JUnit4
+
+/** Tests for the private class DefaultPromise */
+ at RunWith(classOf[JUnit4])
+class ScalaRunTimeTest {
+ @Test
+ def testIsTuple() {
+ import ScalaRunTime.isTuple
+ def check(v: Any) = {
+ assertTrue(v.toString, isTuple(v))
+ }
+
+ val s = ""
+ check(Tuple1(s))
+ check((s, s))
+ check((s, s, s))
+ check((s, s, s, s))
+ check((s, s, s, s, s))
+ check((s, s, s, s, s, s))
+ check((s, s, s, s, s, s, s))
+ check((s, s, s, s, s, s, s, s))
+ check((s, s, s, s, s, s, s, s, s))
+ check((s, s, s, s, s, s, s, s, s, s))
+ check((s, s, s, s, s, s, s, s, s, s, s))
+ check((s, s, s, s, s, s, s, s, s, s, s, s))
+ check((s, s, s, s, s, s, s, s, s, s, s, s, s))
+ check((s, s, s, s, s, s, s, s, s, s, s, s, s, s))
+ check((s, s, s, s, s, s, s, s, s, s, s, s, s, s, s))
+ check((s, s, s, s, s, s, s, s, s, s, s, s, s, s, s, s))
+ check((s, s, s, s, s, s, s, s, s, s, s, s, s, s, s, s, s))
+ check((s, s, s, s, s, s, s, s, s, s, s, s, s, s, s, s, s, s))
+ check((s, s, s, s, s, s, s, s, s, s, s, s, s, s, s, s, s, s, s))
+ check((s, s, s, s, s, s, s, s, s, s, s, s, s, s, s, s, s, s, s, s))
+ check((s, s, s, s, s, s, s, s, s, s, s, s, s, s, s, s, s, s, s, s, s))
+ check((s, s, s, s, s, s, s, s, s, s, s, s, s, s, s, s, s, s, s, s, s, s))
+
+ // some specialized variants will have mangled classnames
+ check(Tuple1(0))
+ check((0, 0))
+ check((0, 0, 0))
+ check((0, 0, 0, 0))
+ check((0, 0, 0, 0, 0))
+ check((0, 0, 0, 0, 0, 0))
+ check((0, 0, 0, 0, 0, 0, 0))
+ check((0, 0, 0, 0, 0, 0, 0, 0))
+ check((0, 0, 0, 0, 0, 0, 0, 0, 0))
+ check((0, 0, 0, 0, 0, 0, 0, 0, 0, 0))
+ check((0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0))
+ check((0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0))
+ check((0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0))
+ check((0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0))
+ check((0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0))
+ check((0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0))
+ check((0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0))
+ check((0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0))
+ check((0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0))
+ check((0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0))
+ check((0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0))
+ check((0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0))
+
+ case class C()
+ val c = new C()
+ assertFalse(c.toString, isTuple(c))
+ }
+}
diff --git a/test/junit/scala/tools/nsc/SampleTest.scala b/test/junit/scala/tools/nsc/SampleTest.scala
new file mode 100644
index 0000000..8e026da
--- /dev/null
+++ b/test/junit/scala/tools/nsc/SampleTest.scala
@@ -0,0 +1,17 @@
+package scala.tools.nsc
+package test
+
+import org.junit.Assert._
+import org.junit.Test
+import org.junit.runner.RunWith
+import org.junit.runners.JUnit4
+
+/** Sample JUnit test that shows that all pieces
+ of JUnit infrastructure work correctly */
+ at RunWith(classOf[JUnit4])
+class SampleTest {
+ @Test
+ def testMath: Unit = {
+ assert(2+2 == 4, "you didn't get the math right fellow")
+ }
+}
diff --git a/test/osgi/src/BasicLibrary.scala b/test/osgi/src/BasicLibrary.scala
new file mode 100644
index 0000000..6618f02
--- /dev/null
+++ b/test/osgi/src/BasicLibrary.scala
@@ -0,0 +1,37 @@
+package tools.test.osgi
+package libonly
+
+import org.junit.Assert._
+import org.ops4j.pax.exam.CoreOptions._
+
+import org.junit.Test
+import org.junit.runner.RunWith
+import org.ops4j.pax.exam
+import org.ops4j.pax.exam.junit.{
+ Configuration,
+ ExamReactorStrategy,
+ JUnit4TestRunner
+}
+import org.ops4j.pax.exam.spi.reactors.AllConfinedStagedReactorFactory
+import org.ops4j.pax.swissbox.framework.ServiceLookup
+import org.osgi.framework.BundleContext
+
+
+
+ at RunWith(classOf[JUnit4TestRunner])
+ at ExamReactorStrategy(Array(classOf[AllConfinedStagedReactorFactory]))
+class BasicLibraryTest extends ScalaOsgiHelper {
+ @Configuration
+ def config(): Array[exam.Option] =
+ justCoreLibraryOptions
+
+ @Test
+ def everythingLoads(): Unit = {
+ // Note - This tests sun.misc usage.
+ import scala.concurrent._
+ import scala.concurrent.duration.Duration.Inf
+ import ExecutionContext.Implicits._
+ val x = Future(2) map (_ + 1)
+ assertEquals(3, Await.result(x, Inf))
+ }
+}
diff --git a/test/osgi/src/BasicReflection.scala b/test/osgi/src/BasicReflection.scala
new file mode 100644
index 0000000..8a0a05d
--- /dev/null
+++ b/test/osgi/src/BasicReflection.scala
@@ -0,0 +1,66 @@
+package tools.test.osgi
+package reflection
+package basic
+
+import org.junit.Assert._
+import org.ops4j.pax.exam.CoreOptions._
+
+import org.junit.Test
+import org.junit.runner.RunWith
+import org.ops4j.pax.exam
+import org.ops4j.pax.exam.junit.{
+ Configuration,
+ ExamReactorStrategy,
+ JUnit4TestRunner
+}
+import org.ops4j.pax.exam.spi.reactors.AllConfinedStagedReactorFactory
+import org.ops4j.pax.swissbox.framework.ServiceLookup
+import org.osgi.framework.BundleContext
+
+
+class C {
+ val f1 = 2
+ var f2 = 3
+
+ def m1 = 4
+ def m2() = 5
+ def m3[T >: String <: Int]: T = ???
+ def m4[A[_], B <: A[Int]](x: A[B])(implicit y: Int) = ???
+ def m5(x: => Int, y: Int*): String = ???
+
+ class C
+ object M
+
+ override def toString = "an instance of C"
+}
+object M
+
+
+ at RunWith(classOf[JUnit4TestRunner])
+ at ExamReactorStrategy(Array(classOf[AllConfinedStagedReactorFactory]))
+class BasicReflectionTest extends ScalaOsgiHelper {
+
+ @Configuration
+ def config(): Array[exam.Option] =
+ justReflectionOptions
+
+ // Ensure Pax-exam requires C/M in our module
+ def dummy = {
+ new C
+ M.toString
+ }
+
+ @Test
+ def basicMirrorThroughOsgi(): Unit = {
+ // Note for now just assert that we can do this stuff.
+ import scala.reflect.runtime.universe._
+ val cm = runtimeMirror(classOf[C].getClassLoader)
+ val im = cm.reflect(new C)
+ assertEquals("Unable to reflect field name!",
+ "value f1",
+ im.reflectField(typeOf[C].member(newTermName("f1")).asTerm).symbol.toString)
+ assertEquals("Unable to reflect value!",
+ 2,
+ im.reflectField(typeOf[C].member(newTermName("f1")).asTerm).get)
+ }
+}
diff --git a/test/osgi/src/BasicTest.scala b/test/osgi/src/BasicTest.scala
new file mode 100644
index 0000000..109b7b9
--- /dev/null
+++ b/test/osgi/src/BasicTest.scala
@@ -0,0 +1,33 @@
+package tools.test.osgi
+
+import org.junit.Assert._
+import org.ops4j.pax.exam.CoreOptions._
+
+import org.junit.Test
+import org.junit.runner.RunWith
+import org.ops4j.pax.exam
+import org.ops4j.pax.exam.junit.{
+ Configuration,
+ ExamReactorStrategy,
+ JUnit4TestRunner
+}
+import org.ops4j.pax.exam.spi.reactors.AllConfinedStagedReactorFactory
+import org.ops4j.pax.swissbox.framework.ServiceLookup
+import org.osgi.framework.BundleContext
+
+
+
+
+
+ at RunWith(classOf[JUnit4TestRunner])
+ at ExamReactorStrategy(Array(classOf[AllConfinedStagedReactorFactory]))
+class BasicTest extends ScalaOsgiHelper {
+ @Configuration
+ def config(): Array[exam.Option] = {
+ // TODO - Find scala bundles.
+ standardOptions
+ }
+
+ @Test
+ def everythingLoads(): Unit = ()
+}
diff --git a/test/osgi/src/ReflectionToolboxTest.scala b/test/osgi/src/ReflectionToolboxTest.scala
new file mode 100644
index 0000000..bb48078
--- /dev/null
+++ b/test/osgi/src/ReflectionToolboxTest.scala
@@ -0,0 +1,49 @@
+package tools.test.osgi
+package reflection
+package toolbox
+
+import org.junit.Assert._
+import org.ops4j.pax.exam.CoreOptions._
+
+import org.junit.Test
+import org.junit.runner.RunWith
+import org.ops4j.pax.exam
+import org.ops4j.pax.exam.junit.{
+ Configuration,
+ ExamReactorStrategy,
+ JUnit4TestRunner
+}
+import org.ops4j.pax.exam.spi.reactors.AllConfinedStagedReactorFactory
+import org.ops4j.pax.swissbox.framework.ServiceLookup
+import org.osgi.framework.BundleContext
+
+
+class C {
+ val f1 = 2
+}
+
+ at RunWith(classOf[JUnit4TestRunner])
+ at ExamReactorStrategy(Array(classOf[AllConfinedStagedReactorFactory]))
+class ReflectionToolBoxTest extends ScalaOsgiHelper {
+
+ @Configuration
+ def config(): Array[exam.Option] =
+ standardOptions
+
+
+ @Test
+ def basicMirrorThroughOsgi(): Unit = {
+ // Note - this tries to make sure when pulling a toolbox, we get the compiler.
+ import scala.reflect.runtime.universe._
+ import scala.tools.reflect.ToolBox
+ val cm = runtimeMirror(classOf[C].getClassLoader)
+ val tb = cm.mkToolBox()
+ val im = cm.reflect(new C)
+ val tree = tb.parse("1 to 3 map (_+1)")
+ val eval = tb.eval(tree)
+ assertEquals(Vector(2, 3, 4), eval)
+ assertEquals("Evaluate expression using local class.",
+ 2,
+ tb.eval(tb.parse("(new tools.test.osgi.reflection.toolbox.C).f1")))
+ }
+}
diff --git a/test/osgi/src/ScalaOsgiHelper.scala b/test/osgi/src/ScalaOsgiHelper.scala
new file mode 100644
index 0000000..bcdc5c0
--- /dev/null
+++ b/test/osgi/src/ScalaOsgiHelper.scala
@@ -0,0 +1,36 @@
+package tools.test.osgi
+
+import org.ops4j.pax.exam.CoreOptions._
+import org.ops4j.pax.exam
+import java.io.File
+
+trait ScalaOsgiHelper {
+
+ private def allBundleFiles = {
+ def bundleLocation = new File(sys.props.getOrElse("scala.bundle.dir", "build/osgi"))
+ bundleLocation.listFiles filter (_.getName endsWith ".jar")
+ }
+
+ private def filteredBundleFiles(names: String*): Array[exam.Option] =
+ for(bundle <- allBundleFiles; if names exists (bundle.getName contains))
+ yield makeBundle(bundle)
+
+ private def makeBundle(file: File): exam.Option =
+ bundle(file.toURI.toASCIIString)
+
+ def standardOptions: Array[exam.Option] = {
+ val bundles = (allBundleFiles map makeBundle)
+ bundles ++ Array[exam.Option](felix(), equinox(), junitBundles())
+ }
+
+ def justReflectionOptions: Array[exam.Option] = {
+ val bundles = filteredBundleFiles("scala-library", "scala-reflect")
+ bundles ++ Array[exam.Option](felix(), equinox(), junitBundles())
+ }
+
+ def justCoreLibraryOptions: Array[exam.Option] = {
+ val bundles = filteredBundleFiles("scala-library")
+ bundles ++ Array[exam.Option](felix(), equinox(), junitBundles())
+ }
+
+}
diff --git a/test/partest b/test/partest
index 44f7130..842d290 100755
--- a/test/partest
+++ b/test/partest
@@ -53,8 +53,8 @@ if [ -z "$EXT_CLASSPATH" ] ; then
fi
done
elif [ -f "$SCALA_HOME/build/pack/lib/scala-partest.jar" ] ; then
- for lib in `echo "partest library compiler"`; do
- ext="$SCALA_HOME/build/pack/lib/scala-$lib.jar"
+ for lib in `echo "scala-partest scala-library scala-reflect scala-compiler diffutils"`; do
+ ext="$SCALA_HOME/build/pack/lib/$lib.jar"
if [ -z "$EXT_CLASSPATH" ] ; then
EXT_CLASSPATH="$ext"
else
@@ -74,12 +74,22 @@ if $cygwin; then
EXT_CLASSPATH=`cygpath --path --$format "$EXT_CLASSPATH"`
fi
-[ -n "$JAVA_OPTS" ] || JAVA_OPTS="-Xmx1024M -Xms16M"
-[ -n "$SCALAC_OPTS" ] || SCALAC_OPTS="-deprecation"
+# last arg wins, so if JAVA_OPTS already contains one of these options
+# the supplied argument will be used.
+# At this writing it is reported test/partest --all requires 108m permgen.
+JAVA_OPTS="-Xmx1024M -Xms64M -XX:MaxPermSize=128M $JAVA_OPTS"
partestDebugStr=""
if [ ! -z "${PARTEST_DEBUG}" ] ; then
partestDebugStr="-Dpartest.debug=${PARTEST_DEBUG}"
fi
-${JAVACMD:=java} $JAVA_OPTS -cp "$EXT_CLASSPATH" ${partestDebugStr} -Dscala.home="${SCALA_HOME}" -Dpartest.javacmd="${JAVACMD}" -Dpartest.java_opts="${JAVA_OPTS}" -Dpartest.scalac_opts="${SCALAC_OPTS}" -Dpartest.javac_cmd="${JAVA_HOME}/bin/javac" scala.tools.partest.nest.NestRunner "$@"
+${JAVACMD:=java} \
+ $JAVA_OPTS -cp "$EXT_CLASSPATH" \
+ ${partestDebugStr} \
+ -Dscala.home="${SCALA_HOME}" \
+ -Dpartest.javacmd="${JAVACMD}" \
+ -Dpartest.java_opts="${JAVA_OPTS}" \
+ -Dpartest.scalac_opts="${SCALAC_OPTS}" \
+ -Dpartest.javac_cmd="${JAVA_HOME}/bin/javac" \
+ scala.tools.partest.nest.NestRunner "$@"
diff --git a/test/partest.bat b/test/partest.bat
old mode 100644
new mode 100755
index 9ffed9b..b64347c
--- a/test/partest.bat
+++ b/test/partest.bat
@@ -1,99 +1,104 @@
- at echo off
-
-rem ##########################################################################
-rem # Scala code runner 2.7.0-final
-rem ##########################################################################
-rem # (c) 2002-2011 LAMP/EPFL
-rem #
-rem # This is free software; see the distribution for copying conditions.
-rem # There is NO warranty; not even for MERCHANTABILITY or FITNESS FOR A
-rem # PARTICULAR PURPOSE.
-rem ##########################################################################
-
-rem We adopt the following conventions:
-rem - System/user environment variables start with a letter
-rem - Local batch variables start with an underscore ('_')
-
-if "%OS%"=="Windows_NT" (
- @setlocal
- call :set_home
- set _ARGS=%*
-) else (
- set _SCALA_HOME="%SCALA_HOME%"
- rem The following line tests SCALA_HOME instead of _SCALA_HOME, because
- rem the above change to _SCALA_HOME is not visible within this block.
- if "%SCALA_HOME%"=="" goto error1
- call :set_args
-)
-
-rem We use the value of the JAVACMD environment variable if defined
-set _JAVACMD=%JAVACMD%
-if "%_JAVACMD%"=="" set _JAVACMD=java
-
-rem We use the value of the JAVA_OPTS environment variable if defined
-set _JAVA_OPTS="%JAVA_OPTS%"
-if "%_JAVA_OPTS%"=="" set _JAVA_OPTS=-Xmx256M -Xms16M
-
-rem We use the value of the SCALAC_OPTS environment variable if defined
-set _SCALAC_OPTS="%SCALAC_OPTS%"
-if "%_SCALAC_OPTS%"=="" set _SCALAC_OPTS=-deprecation
-
-set _EXTENSION_CLASSPATH=
-if "%_EXTENSION_CLASSPATH%"=="" (
- if exist "%_SCALA_HOME%\lib\scala-partest.jar" (
- for %%f in ("%_SCALA_HOME%\lib\*") do call :add_cpath "%%f"
- if "%OS%"=="Windows_NT" (
- for /d %%f in ("%_SCALA_HOME%\lib\*") do call :add_cpath "%%f"
- )
- ) else if exist "%_SCALA_HOME%\build\pack\lib\scala-partest.jar" (
- for %%f in ("%_SCALA_HOME%\build\pack\lib\*") do call :add_cpath "%%f"
- if "%OS%"=="Windows_NT" (
- for /d %%f in ("%_SCALA_HOME%\build\pack\lib\*") do call :add_cpath "%%f"
- )
- )
-)
-
-set _PROPS=-Dscala.home="%_SCALA_HOME%" -Dpartest.javacmd="%_JAVACMD%" -Dpartest.java_options="%_JAVA_OPTS%" -Dpartest.scalac_options="%_SCALAC_OPTS%" -Dpartest.javac_cmd="%JAVA_HOME%\bin\javac"
-
-rem echo %_JAVACMD% %_JAVA_OPTS% %_PROPS% -cp "%_EXTENSION_CLASSPATH%" scala.tools.partest.nest.NestRunner %_ARGS%
-%_JAVACMD% %_JAVA_OPTS% %_PROPS% -cp "%_EXTENSION_CLASSPATH%" scala.tools.partest.nest.NestRunner %_ARGS%
-goto end
-
-rem ##########################################################################
-rem # subroutines
-
-:add_cpath
- if "%_EXTENSION_CLASSPATH%"=="" (
- set _EXTENSION_CLASSPATH=%~1
- ) else (
- set _EXTENSION_CLASSPATH=%_EXTENSION_CLASSPATH%;%~1
- )
-goto :eof
-
-rem Variable "%~dps0" works on WinXP SP2 or newer
-rem (see http://support.microsoft.com/?kbid=833431)
-rem set _SCALA_HOME=%~dps0..
-:set_home
- set _BIN_DIR=
- for %%i in (%~sf0) do set _BIN_DIR=%_BIN_DIR%%%~dpsi
- set _SCALA_HOME=%_BIN_DIR%..
-goto :eof
-
-:set_args
- set _ARGS=
- :loop
- rem Argument %1 may contain quotes so we use parentheses here
- if (%1)==() goto :eof
- set _ARGS=%_ARGS% %1
- shift
- goto loop
-
-rem ##########################################################################
-rem # errors
-
-:error1
-echo ERROR: environment variable SCALA_HOME is undefined. It should point to your installation directory.
-goto end
-
-:end
-if "%OS%"=="Windows_NT" @endlocal
+ at echo off
+
+rem ##########################################################################
+rem # Scala code runner 2.9.1.final
+rem ##########################################################################
+rem # (c) 2002-2011 LAMP/EPFL
+rem #
+rem # This is free software; see the distribution for copying conditions.
+rem # There is NO warranty; not even for MERCHANTABILITY or FITNESS FOR A
+rem # PARTICULAR PURPOSE.
+rem ##########################################################################
+
+rem We adopt the following conventions:
+rem - System/user environment variables start with a letter
+rem - Local batch variables start with an underscore ('_')
+
+if "%OS%"=="Windows_NT" (
+ @setlocal
+ call :set_home
+ set _ARGS=%*
+) else (
+ set _SCALA_HOME="%SCALA_HOME%"
+ rem The following line tests SCALA_HOME instead of _SCALA_HOME, because
+ rem the above change to _SCALA_HOME is not visible within this block.
+ if "%SCALA_HOME%"=="" goto error1
+ call :set_args
+)
+
+rem We use the value of the JAVACMD environment variable if defined
+set _JAVACMD=%JAVACMD%
+if "%_JAVACMD%"=="" set _JAVACMD=java
+
+rem We use the value of the JAVACCMD environment variable if defined
+set _JAVACCMD=%JAVACCMD%
+if "%_JAVACCMD%"=="" set _JAVACCMD=javac
+
+rem We use the value of the JAVA_OPTS environment variable if defined
+set _JAVA_OPTS=%JAVA_OPTS%
+if "%_JAVA_OPTS%"=="" set _JAVA_OPTS=-Xmx1024M -Xms64M
+
+rem We use the value of the SCALAC_OPTS environment variable if defined
+set _SCALAC_OPTS=%SCALAC_OPTS%
+if "%_SCALAC_OPTS%"=="" set _SCALAC_OPTS=-deprecation
+
+set _EXTENSION_CLASSPATH=
+if "%_EXTENSION_CLASSPATH%"=="" (
+ if exist "%_SCALA_HOME%\lib\scala-partest.jar" (
+ for %%f in ("%_SCALA_HOME%\lib\*") do call :add_cpath "%%f"
+ if "%OS%"=="Windows_NT" (
+ for /d %%f in ("%_SCALA_HOME%\lib\*") do call :add_cpath "%%f"
+ )
+ ) else if exist "%_SCALA_HOME%\build\pack\lib\scala-partest.jar" (
+ for %%f in ("%_SCALA_HOME%\build\pack\lib\*") do call :add_cpath "%%f"
+ if "%OS%"=="Windows_NT" (
+ for /d %%f in ("%_SCALA_HOME%\build\pack\lib\*") do call :add_cpath "%%f"
+ )
+ )
+)
+
+set _PROPS=-Dscala.home="%_SCALA_HOME%" -Dpartest.javacmd="%_JAVACMD%" -Dpartest.java_options="%_JAVA_OPTS%" -Dpartest.scalac_options="%_SCALAC_OPTS%" -Dpartest.javac_cmd="%_JAVACCMD%"
+
+rem echo %_JAVACMD% %_JAVA_OPTS% %_PROPS% -cp "%_EXTENSION_CLASSPATH%" scala.tools.partest.nest.NestRunner %_ARGS%
+%_JAVACMD% %_JAVA_OPTS% %_PROPS% -cp "%_EXTENSION_CLASSPATH%" scala.tools.partest.nest.NestRunner %_ARGS%
+goto end
+
+rem ##########################################################################
+rem # subroutines
+
+:add_cpath
+ if "%_EXTENSION_CLASSPATH%"=="" (
+ set _EXTENSION_CLASSPATH=%~1
+ ) else (
+ set _EXTENSION_CLASSPATH=%_EXTENSION_CLASSPATH%;%~1
+ )
+goto :eof
+
+rem Variable "%~dps0" works on WinXP SP2 or newer
+rem (see http://support.microsoft.com/?kbid=833431)
+rem set _SCALA_HOME=%~dps0..
+:set_home
+ set _BIN_DIR=
+ for %%i in (%~sf0) do set _BIN_DIR=%_BIN_DIR%%%~dpsi
+ set _SCALA_HOME=%_BIN_DIR%..
+goto :eof
+
+:set_args
+ set _ARGS=
+ :loop
+ rem Argument %1 may contain quotes so we use parentheses here
+ if (%1)==() goto :eof
+ set _ARGS=%_ARGS% %1
+ shift
+ goto loop
+
+rem ##########################################################################
+rem # errors
+
+:error1
+echo ERROR: environment variable SCALA_HOME is undefined. It should point to your installation directory.
+goto end
+
+:end
+if "%OS%"=="Windows_NT" @endlocal
+exit /b %errorlevel%
diff --git a/test/pending/continuations-neg/t3628.check b/test/pending/continuations-neg/t3628.check
deleted file mode 100644
index 4df94cd..0000000
--- a/test/pending/continuations-neg/t3628.check
+++ /dev/null
@@ -1,3 +0,0 @@
-ending/continuations-neg/t3628.scala:4: error: not found: type Actor
- val impl: Actor = actor {
- ^
diff --git a/test/pending/junit/scala/util/t7265.scala b/test/pending/junit/scala/util/t7265.scala
new file mode 100644
index 0000000..3b8fa80
--- /dev/null
+++ b/test/pending/junit/scala/util/t7265.scala
@@ -0,0 +1,46 @@
+
+package scala.util
+package test
+
+import org.junit.Assert._
+import org.junit.Test
+import org.junit.runner.RunWith
+import org.junit.runners.JUnit4
+
+import scala.util.PropertiesTrait
+
+/** The java version property uses the spec version
+ * and must work for all "major.minor" and fail otherwise.
+ */
+ at RunWith(classOf[JUnit4])
+class SpecVersionTest {
+ val sut = new PropertiesTrait {
+ override def javaSpecVersion = "1.7"
+
+ override protected def pickJarBasedOn: Class[_] = ???
+ override protected def propCategory: String = "test"
+
+ // override because of vals like releaseVersion
+ override lazy val scalaProps = new java.util.Properties
+ }
+
+ @Test
+ def comparesCorrectly(): Unit = {
+ assert(sut isJavaAtLeast "1.5")
+ assert(sut isJavaAtLeast "1.6")
+ assert(sut isJavaAtLeast "1.7")
+ assert(!(sut isJavaAtLeast "1.8"))
+ }
+ @Test(expected = classOf[NumberFormatException])
+ def badVersion(): Unit = {
+ sut isJavaAtLeast "1.a"
+ }
+ @Test(expected = classOf[NumberFormatException])
+ def missingVersion(): Unit = {
+ sut isJavaAtLeast "1"
+ }
+ @Test(expected = classOf[NumberFormatException])
+ def notASpec(): Unit = {
+ sut isJavaAtLeast "1.7.1"
+ }
+}
diff --git a/test/pending/jvm/interpreter.scala b/test/pending/jvm/interpreter.scala
deleted file mode 100644
index f0bc8b5..0000000
--- a/test/pending/jvm/interpreter.scala
+++ /dev/null
@@ -1,158 +0,0 @@
-import scala.tools.nsc._
-import scala.tools.partest.ReplTest
-
-object Test extends ReplTest {
- def code = <code>
-// basics
-3+4
-def gcd(x: Int, y: Int): Int = {{
- if (x == 0) y
- else if (y == 0) x
- else gcd(y%x, x)
-}}
-val five = gcd(15,35)
-var x = 1
-x = 2
-val three = x+1
-type anotherint = Int
-val four: anotherint = 4
-val bogus: anotherint = "hello"
-trait PointlessTrait
-val (x,y) = (2,3)
-println("hello")
-
-// ticket #1513
-val t1513 = Array(null)
-// ambiguous toString problem from #547
-val atom = new scala.xml.Atom()
-// overriding toString problem from #1404
-class S(override val toString : String)
-val fish = new S("fish")
-// Test that arrays pretty print nicely.
-val arr = Array("What's", "up", "doc?")
-// Test that arrays pretty print nicely, even when we give them type Any
-val arrInt : Any = Array(1,2,3)
-// Test that nested arrays are pretty-printed correctly
-val arrArrInt : Any = Array(Array(1, 2), Array(3, 4))
-
-// implicit conversions
-case class Foo(n: Int)
-case class Bar(n: Int)
-implicit def foo2bar(foo: Foo) = Bar(foo.n)
-val bar: Bar = Foo(3)
-
-// importing from a previous result
-import bar._
-val m = n
-
-// stressing the imports mechanism
-val one = 1
-val one = 1
-val one = 1
-val one = 1
-val one = 1
-val one = 1
-val one = 1
-val one = 1
-val one = 1
-val one = 1
-val one = 1
-val one = 1
-val one = 1
-val one = 1
-val one = 1
-val one = 1
-val one = 1
-val one = 1
-val one = 1
-val one = 1
-
-
-val x1 = 1
-val x2 = 1
-val x3 = 1
-val x4 = 1
-val x5 = 1
-val x6 = 1
-val x7 = 1
-val x8 = 1
-val x9 = 1
-val x10 = 1
-val x11 = 1
-val x12 = 1
-val x13 = 1
-val x14 = 1
-val x15 = 1
-val x16 = 1
-val x17 = 1
-val x18 = 1
-val x19 = 1
-val x20 = 1
-
-val two = one + x5
-
-// handling generic wildcard arrays (#2386)
-// It's put here because type feedback is an important part of it.
-val xs: Array[_] = Array(1, 2)
-xs.size
-xs.head
-xs filter (_ == 2)
-xs map (_ => "abc")
-xs map (x => x)
-xs map (x => (x, x))
-
-// interior syntax errors should *not* go into multi-line input mode.
-// both of the following should abort immediately:
-def x => y => z
-[1,2,3]
-
-
-// multi-line XML
-<a>
-<b
- c="c"
- d="dd"
-/></a>
-
-
-/*
- /*
- multi-line comment
- */
-*/
-
-
-// multi-line string
-"""
-hello
-there
-"""
-
-(1 + // give up early by typing two blank lines
-
-
-// defining and using quoted names should work (ticket #323)
-def `match` = 1
-val x = `match`
-
-// multiple classes defined on one line
-sealed class Exp; class Fact extends Exp; class Term extends Exp
-def f(e: Exp) = e match {{ // non-exhaustive warning here
- case _:Fact => 3
-}}
-
-</code>.text
-
- def appendix() = {
- val settings = new Settings
- settings.classpath.value = sys.props("java.class.path")
- val interp = new Interpreter(settings)
- interp.interpret("def plusOne(x: Int) = x + 1")
- interp.interpret("plusOne(5)")
- interp.reset()
- interp.interpret("\"after reset\"")
- interp.interpret("plusOne(5) // should be undefined now")
- }
-
- appendix()
-}
diff --git a/test/pending/neg/bug3189.check b/test/pending/neg/bug3189.check
deleted file mode 100644
index 520644f..0000000
--- a/test/pending/neg/bug3189.check
+++ /dev/null
@@ -1,7 +0,0 @@
-bug3189.scala:2: error: illegal start of simple pattern
- val Array(a,b*) = ("": Any)
- ^
-bug3189.scala:3: error: ')' expected but '}' found.
-}
-^
-two errors found
diff --git a/test/pending/neg/bug963.scala b/test/pending/neg/bug963.scala
deleted file mode 100644
index 3be0be1..0000000
--- a/test/pending/neg/bug963.scala
+++ /dev/null
@@ -1,26 +0,0 @@
-// Soundness bug, at #963 and dup at #2079.
-trait A {
- type T
- var v : T
-}
-
-object B {
- def f(x : { val y : A }) { x.y.v = x.y.v }
-
- var a : A = _
- var b : Boolean = false
- def y : A = {
- if(b) {
- a = new A { type T = Int; var v = 1 }
- a
- } else {
- a = new A { type T = String; var v = "" }
- b = true
- a
- }
- }
-}
-
-object Test extends Application {
- B.f(B)
-}
diff --git a/test/pending/neg/dot-classpath.flags b/test/pending/neg/dot-classpath.flags
new file mode 100644
index 0000000..5af7a81
--- /dev/null
+++ b/test/pending/neg/dot-classpath.flags
@@ -0,0 +1 @@
+-Ylog-classpath
\ No newline at end of file
diff --git a/test/pending/neg/dot-classpath/S_1.scala b/test/pending/neg/dot-classpath/S_1.scala
new file mode 100644
index 0000000..f8bd124
--- /dev/null
+++ b/test/pending/neg/dot-classpath/S_1.scala
@@ -0,0 +1,3 @@
+package foo {
+ class Bippy
+}
diff --git a/test/pending/neg/dot-classpath/S_2.scala b/test/pending/neg/dot-classpath/S_2.scala
new file mode 100644
index 0000000..e44c1a5
--- /dev/null
+++ b/test/pending/neg/dot-classpath/S_2.scala
@@ -0,0 +1,3 @@
+class A {
+ def f = new foo.Bippy
+}
\ No newline at end of file
diff --git a/test/pending/neg/macro-invalidusage-badbounds-b.check b/test/pending/neg/macro-invalidusage-badbounds-b.check
new file mode 100644
index 0000000..277f407
--- /dev/null
+++ b/test/pending/neg/macro-invalidusage-badbounds-b.check
@@ -0,0 +1,4 @@
+Macros_Test_2.scala:7: error: type arguments [Int] do not conform to macro method foo's type parameter bounds [U <: String]
+ foo[Int]
+ ^
+one error found
diff --git a/test/pending/neg/macro-invalidusage-badbounds-b.flags b/test/pending/neg/macro-invalidusage-badbounds-b.flags
new file mode 100644
index 0000000..cd66464
--- /dev/null
+++ b/test/pending/neg/macro-invalidusage-badbounds-b.flags
@@ -0,0 +1 @@
+-language:experimental.macros
\ No newline at end of file
diff --git a/test/pending/neg/macro-invalidusage-badbounds-b/Impls_1.scala b/test/pending/neg/macro-invalidusage-badbounds-b/Impls_1.scala
new file mode 100644
index 0000000..89020de
--- /dev/null
+++ b/test/pending/neg/macro-invalidusage-badbounds-b/Impls_1.scala
@@ -0,0 +1,5 @@
+import scala.reflect.macros.{Context => Ctx}
+
+object Impls {
+ def foo[U <: String](c: Ctx) = ???
+}
diff --git a/test/pending/neg/macro-invalidusage-badbounds-b/Macros_Test_2.scala b/test/pending/neg/macro-invalidusage-badbounds-b/Macros_Test_2.scala
new file mode 100644
index 0000000..3139599
--- /dev/null
+++ b/test/pending/neg/macro-invalidusage-badbounds-b/Macros_Test_2.scala
@@ -0,0 +1,8 @@
+object Macros {
+ def foo[U <: String] = macro Impls.foo[U]
+}
+
+object Test extends App {
+ import Macros._
+ foo[Int]
+}
\ No newline at end of file
diff --git a/test/pending/neg/plugin-after-terminal/misc/build.sh b/test/pending/neg/plugin-after-terminal/misc/build.sh
old mode 100644
new mode 100755
diff --git a/test/pending/neg/plugin-before-parser/misc/build.sh b/test/pending/neg/plugin-before-parser/misc/build.sh
old mode 100644
new mode 100755
diff --git a/test/pending/neg/plugin-cyclic-dependency/misc/build.sh b/test/pending/neg/plugin-cyclic-dependency/misc/build.sh
old mode 100644
new mode 100755
diff --git a/test/pending/neg/plugin-multiple-rafter/misc/build.sh b/test/pending/neg/plugin-multiple-rafter/misc/build.sh
old mode 100644
new mode 100755
diff --git a/test/pending/neg/plugin-rafter-before-1/misc/build.sh b/test/pending/neg/plugin-rafter-before-1/misc/build.sh
old mode 100644
new mode 100755
diff --git a/test/pending/neg/plugin-rightafter-terminal/misc/build.sh b/test/pending/neg/plugin-rightafter-terminal/misc/build.sh
old mode 100644
new mode 100755
diff --git a/test/pending/neg/reify_packed.check b/test/pending/neg/reify_packed.check
new file mode 100644
index 0000000..f26b902
--- /dev/null
+++ b/test/pending/neg/reify_packed.check
@@ -0,0 +1,4 @@
+reify_packed.scala:6: error: implementation restriction: cannot reify block of type List[_$1] that involves a type declared inside the block being reified. consider casting the return value to a suitable type.
+ reify {
+ ^
+one error found
diff --git a/test/pending/neg/reify_packed.scala b/test/pending/neg/reify_packed.scala
new file mode 100644
index 0000000..7bdaa41
--- /dev/null
+++ b/test/pending/neg/reify_packed.scala
@@ -0,0 +1,15 @@
+import scala.reflect.runtime.universe._
+import scala.reflect.runtime.{universe => ru}
+import scala.reflect.runtime.{currentMirror => cm}
+import scala.tools.reflect.ToolBox
+
+object Test extends App {
+ reify {
+ class C { override def toString() = "C" }
+ val ret = List((new C, new C))
+ ret.asInstanceOf[List[_]]
+ };
+
+ val toolbox = cm.mkToolBox()
+ println(toolbox.eval(code.tree))
+}
\ No newline at end of file
diff --git a/test/pending/neg/t1557.scala b/test/pending/neg/t1557.scala
new file mode 100644
index 0000000..ba93b45
--- /dev/null
+++ b/test/pending/neg/t1557.scala
@@ -0,0 +1,18 @@
+object Test extends App {
+ trait A
+ trait B extends A
+
+ trait C {
+ trait D { type T >: B <: A }
+ val y: (D with this.type)#T = new B { }
+ }
+
+ class D extends C {
+ trait E
+ type T = E
+ def frob(arg : E) : E = arg
+ frob(y)
+ }
+
+ new D
+}
\ No newline at end of file
diff --git a/test/pending/neg/t2066.scala b/test/pending/neg/t2066.scala
new file mode 100644
index 0000000..46177b1
--- /dev/null
+++ b/test/pending/neg/t2066.scala
@@ -0,0 +1,16 @@
+object Test extends App {
+ trait A {
+ def f[T[_]](x : T[Int]) : T[Any]
+ }
+
+ class B extends A {
+ def f[T[+_]](x : T[Int]) : T[Any] = x
+ }
+
+ class P[Y](var y : Y)
+
+ val p = new P(1)
+ val palias = (new B():A).f[P](p)
+ palias.y = "hello"
+ val z: Int = p.y
+}
\ No newline at end of file
diff --git a/test/pending/neg/t3633/test/Test.scala b/test/pending/neg/t3633/test/Test.scala
index 2c54e7b..395a6be 100644
--- a/test/pending/neg/t3633/test/Test.scala
+++ b/test/pending/neg/t3633/test/Test.scala
@@ -6,18 +6,18 @@ final class Test extends PackageProtected {
package another {
object Main {
- def bug1(t: Test) {
+ def t1(t: Test) {
// Can always be replicated.
println(t.foo)
}
- def bug2(t: Test) {
+ def t2(t: Test) {
// Conditions to replicate: must use -optimise, class Test must be final
println(t.bar)
//@noinline is a usable workaround
}
def main(args: Array[String]) {
- bug1(new Test)
- bug2(new Test)
+ t1(new Test)
+ t2(new Test)
}
}
}
diff --git a/test/pending/neg/t5008.scala b/test/pending/neg/t5008.scala
new file mode 100644
index 0000000..2b20bcf
--- /dev/null
+++ b/test/pending/neg/t5008.scala
@@ -0,0 +1,165 @@
+// These are members of class bar.C, completely unrelated to class foo.A.
+// The types shown below include types defined within foo.A which are:
+//
+// - qualified private
+// - qualified protected
+// - object protected
+//
+// val a : foo.A = { /* compiled code */ }
+// val xprot1 : java.lang.Object with foo.A.FooProt1 = { /* compiled code */ }
+// val xprot2 : java.lang.Object with foo.A.FooProt2 = { /* compiled code */ }
+// val xprot3 : java.lang.Object with foo.A.FooProt3 = { /* compiled code */ }
+// val xprot4 : java.lang.Object with foo.A.FooProt4 = { /* compiled code */ }
+// val xpriv3 : java.lang.Object with foo.A.FooPriv3 = { /* compiled code */ }
+// val xpriv4 : java.lang.Object with foo.A.FooPriv4 = { /* compiled code */ }
+//
+// Indeed it will tell me a type which I cannot access:
+//
+// scala> new bar.C
+// res0: bar.C = bar.C at 1339a0dc
+//
+// scala> res0.xpriv3
+// res1: java.lang.Object with res0.a.FooPriv3 = bar.C$$anon$29 at 39556aec
+//
+// scala> new res0.a.FooPriv3
+// <console>:9: error: trait FooPriv3 in class A cannot be accessed in foo.A
+// new res0.a.FooPriv3
+// ^
+// Looking at how the compiler prints the types of those vals, one
+// develops a suspicion how some of it is being allowed:
+//
+// val xpriv4: C.this.a.FooPriv4
+// val xpriv3: C.this.a.FooPriv3
+// val xprot4: C.this.a.FooProt4
+// val xprot3: C.this.a.FooProt3
+// val xprot2: C.this.a.FooProt2
+// val xprot1: C.this.a.FooProt1
+//
+// That is, "this" is in the prefix somewhere, it's just not a "this"
+// which has any bearing.
+
+package foo {
+ class A {
+ trait Foo
+
+ protected trait FooProt1
+ protected[this] trait FooProt2
+ protected[foo] trait FooProt3
+ protected[A] trait FooProt4
+
+ private trait FooPriv1
+ private[this] trait FooPriv2
+ private[foo] trait FooPriv3
+ private[A] trait FooPriv4
+
+ type BarProt1 = FooProt1
+ type BarProt2 = FooProt2
+ type BarProt3 = FooProt3
+ type BarProt4 = FooProt4
+
+ // type BarPriv1 = FooPriv1
+ // type BarPriv2 = FooPriv2
+ type BarPriv3 = FooPriv3
+ type BarPriv4 = FooPriv4
+
+ def fprot1(x: FooProt1) = x
+ def fprot2(x: FooProt2) = x
+ def fprot3(x: FooProt3) = x
+ def fprot4(x: FooProt4) = x
+
+ // def fpriv1(x: FooPriv1) = x
+ // def fpriv2(x: FooPriv2) = x
+ def fpriv3(x: FooPriv3) = x
+ def fpriv4(x: FooPriv4) = x
+
+ val yprot1 = new FooProt1 { }
+ val yprot2 = new FooProt2 { }
+ val yprot3 = new FooProt3 { }
+ val yprot4 = new FooProt4 { }
+
+ // val ypriv1 = new FooPriv1 { }
+ // val ypriv2 = new FooPriv2 { }
+ val ypriv3 = new FooPriv3 { }
+ val ypriv4 = new FooPriv4 { }
+
+ def fpriv_alt1(x: FooPriv1) = 0 // !!! isn't the private type now in the signature of the (public) method?
+ def fpriv_alt2(x: FooPriv2) = 0 // !!! isn't the private[this] type now in the signature of the (public) method?
+ }
+ // Same package, subclass
+ class B extends A {
+ val xprot1 = new BarProt1 { }
+ val xprot2 = new BarProt2 { }
+ val xprot3 = new BarProt3 { }
+ val xprot4 = new BarProt4 { }
+
+ // val xpriv1 = new BarPriv1 { }
+ // val xpriv2 = new BarPriv2 { }
+ val xpriv3 = new BarPriv3 { }
+ val xpriv4 = new BarPriv4 { }
+
+ override def fprot1(x: BarProt1) = x
+ override def fprot2(x: BarProt2) = x
+ override def fprot3(x: BarProt3) = x
+ override def fprot4(x: BarProt4) = x
+
+ // override def fpriv1(x: BarPriv1) = x
+ // override def fpriv2(x: BarPriv2) = x
+ override def fpriv3(x: BarPriv3) = x
+ override def fpriv4(x: BarPriv4) = x
+ }
+ // Same package, unrelated class
+ class C {
+ val a = new A
+ import a._
+
+ val xprot1 = new BarProt1 { }
+ val xprot2 = new BarProt2 { }
+ val xprot3 = new BarProt3 { }
+ val xprot4 = new BarProt4 { }
+
+ // val xpriv1 = new BarPriv1 { }
+ // val xpriv2 = new BarPriv2 { }
+ val xpriv3 = new BarPriv3 { }
+ val xpriv4 = new BarPriv4 { }
+ }
+}
+
+package bar {
+ // Different package, subclass
+ class B extends foo.A {
+ val xprot1 = new BarProt1 { }
+ val xprot2 = new BarProt2 { }
+ val xprot3 = new BarProt3 { }
+ val xprot4 = new BarProt4 { }
+
+ // val xpriv1 = new BarPriv1 { }
+ // val xpriv2 = new BarPriv2 { }
+ val xpriv3 = new BarPriv3 { }
+ val xpriv4 = new BarPriv4 { }
+
+ override def fprot1(x: BarProt1) = x
+ override def fprot2(x: BarProt2) = x
+ override def fprot3(x: BarProt3) = x
+ override def fprot4(x: BarProt4) = x
+
+ // override def fpriv1(x: BarPriv1) = x
+ // override def fpriv2(x: BarPriv2) = x
+ override def fpriv3(x: BarPriv3) = x
+ override def fpriv4(x: BarPriv4) = x
+ }
+ // Different package, unrelated class
+ class C {
+ val a = new foo.A
+ import a._
+
+ val xprot1 = new BarProt1 { }
+ val xprot2 = new BarProt2 { }
+ val xprot3 = new BarProt3 { }
+ val xprot4 = new BarProt4 { }
+
+ // val xpriv1 = new BarPriv1 { }
+ // val xpriv2 = new BarPriv2 { }
+ val xpriv3 = new BarPriv3 { }
+ val xpriv4 = new BarPriv4 { }
+ }
+}
diff --git a/test/pending/neg/t5353.check b/test/pending/neg/t5353.check
new file mode 100644
index 0000000..75e2435
--- /dev/null
+++ b/test/pending/neg/t5353.check
@@ -0,0 +1,4 @@
+t5353.scala:2: error: this type parameter must be specified
+ def f(x: Boolean) = if (x) Array("abc") else Array()
+ ^
+one error found
diff --git a/test/pending/neg/t5353.scala b/test/pending/neg/t5353.scala
new file mode 100644
index 0000000..1ee869a
--- /dev/null
+++ b/test/pending/neg/t5353.scala
@@ -0,0 +1,3 @@
+class A {
+ def f(x: Boolean) = if (x) Array("abc") else Array()
+}
diff --git a/test/pending/neg/t5589neg.check b/test/pending/neg/t5589neg.check
new file mode 100644
index 0000000..f1dad94
--- /dev/null
+++ b/test/pending/neg/t5589neg.check
@@ -0,0 +1,37 @@
+t5589neg.scala:2: warning: `withFilter' method does not yet exist on scala.util.Either.RightProjection[Int,String], using `filter' method instead
+ def f5(x: Either[Int, String]) = for ((y1, y2: String) <- x.right) yield ((y1, y2))
+ ^
+t5589neg.scala:2: error: constructor cannot be instantiated to expected type;
+ found : (T1, T2)
+ required: String
+ def f5(x: Either[Int, String]) = for ((y1, y2: String) <- x.right) yield ((y1, y2))
+ ^
+t5589neg.scala:3: warning: `withFilter' method does not yet exist on scala.util.Either.RightProjection[Int,String], using `filter' method instead
+ def f6(x: Either[Int, String]) = for ((y1, y2: Any) <- x.right) yield ((y1, y2))
+ ^
+t5589neg.scala:3: error: constructor cannot be instantiated to expected type;
+ found : (T1, T2)
+ required: String
+ def f6(x: Either[Int, String]) = for ((y1, y2: Any) <- x.right) yield ((y1, y2))
+ ^
+t5589neg.scala:4: error: constructor cannot be instantiated to expected type;
+ found : (T1,)
+ required: (String, Int)
+ def f7(x: Either[Int, (String, Int)]) = for (y1 @ Tuple1(y2) <- x.right) yield ((y1, y2))
+ ^
+t5589neg.scala:4: error: not found: value y2
+ def f7(x: Either[Int, (String, Int)]) = for (y1 @ Tuple1(y2) <- x.right) yield ((y1, y2))
+ ^
+t5589neg.scala:5: error: constructor cannot be instantiated to expected type;
+ found : (T1, T2, T3)
+ required: (String, Int)
+ def f8(x: Either[Int, (String, Int)]) = for ((y1, y2, y3) <- x.right) yield ((y1, y2))
+ ^
+t5589neg.scala:5: error: not found: value y1
+ def f8(x: Either[Int, (String, Int)]) = for ((y1, y2, y3) <- x.right) yield ((y1, y2))
+ ^
+t5589neg.scala:5: error: not found: value y2
+ def f8(x: Either[Int, (String, Int)]) = for ((y1, y2, y3) <- x.right) yield ((y1, y2))
+ ^
+two warnings found
+7 errors found
diff --git a/test/pending/neg/t5589neg.scala b/test/pending/neg/t5589neg.scala
new file mode 100644
index 0000000..31ff2c3
--- /dev/null
+++ b/test/pending/neg/t5589neg.scala
@@ -0,0 +1,6 @@
+class A {
+ def f5(x: Either[Int, String]) = for ((y1, y2: String) <- x.right) yield ((y1, y2))
+ def f6(x: Either[Int, String]) = for ((y1, y2: Any) <- x.right) yield ((y1, y2))
+ def f7(x: Either[Int, (String, Int)]) = for (y1 @ Tuple1(y2) <- x.right) yield ((y1, y2))
+ def f8(x: Either[Int, (String, Int)]) = for ((y1, y2, y3) <- x.right) yield ((y1, y2))
+}
diff --git a/test/pending/neg/t5589neg2.scala b/test/pending/neg/t5589neg2.scala
new file mode 100644
index 0000000..b7c7ab7
--- /dev/null
+++ b/test/pending/neg/t5589neg2.scala
@@ -0,0 +1,13 @@
+class A {
+ def f1(x: List[((((Int, (Double, (Float, String))), List[String]), List[Int]), List[Float])]) = {
+ for (((((a, (b, (c, d))), es), fs), gs) <- x) yield (d :: es).mkString(", ") // ok
+ }
+
+ def f2(x: List[((((Int, (Double, (Float, String))), List[String]), List[Int]), List[Float])]) = {
+ for (((((a, (b, (c, (d1, d2)))), es), fs), gs) <- x) yield (d :: es).mkString(", ") // not ok
+ }
+
+ def f3(x: List[((((Int, (Double, (Float, String))), List[String]), List[Int]), List[Float])]) = {
+ for (((((a, (b, _)), es), fs), gs) <- x) yield (es ::: fs).mkString(", ") // ok
+ }
+}
\ No newline at end of file
diff --git a/test/pending/neg/t5618.check b/test/pending/neg/t5618.check
new file mode 100644
index 0000000..118e812
--- /dev/null
+++ b/test/pending/neg/t5618.check
@@ -0,0 +1,7 @@
+t5618.scala:12: error: could not find implicit value for parameter class1: Class1
+ val class2 = new Class2
+ ^
+t5618.scala:18: error: could not find implicit value for parameter class1: Class1
+ val class2 = new Class2
+ ^
+two errors found
\ No newline at end of file
diff --git a/test/pending/neg/t5618.scala b/test/pending/neg/t5618.scala
new file mode 100644
index 0000000..66e0678
--- /dev/null
+++ b/test/pending/neg/t5618.scala
@@ -0,0 +1,27 @@
+
+
+
+
+case class Class1
+
+
+case class Class2(implicit class1: Class1)
+
+
+object Test1 {
+ val class2 = new Class2
+ implicit val class1 = new Class1
+}
+
+
+object Test2 {
+ val class2 = new Class2
+ implicit val class1: Class1 = new Class1
+}
+
+
+object Test3 {
+ implicit val class1 = new Class1
+ val class2 = new Class2
+}
+
diff --git a/test/pending/neg/t7441.check b/test/pending/neg/t7441.check
new file mode 100644
index 0000000..f259457
--- /dev/null
+++ b/test/pending/neg/t7441.check
@@ -0,0 +1,6 @@
+t7441.scala:4: error: type mismatch;
+ found : Int(1)
+ required: List[Any]
+ def test = apply(1)
+ ^
+one error found
diff --git a/test/pending/neg/t7441.scala b/test/pending/neg/t7441.scala
new file mode 100644
index 0000000..dad7421
--- /dev/null
+++ b/test/pending/neg/t7441.scala
@@ -0,0 +1,7 @@
+object Test {
+ object Bar {
+ def apply(xs: List[Any]): Int = 0
+ def test = apply(1)
+ }
+ implicit def foo = 1
+}
diff --git a/test/pending/neg/t796.scala b/test/pending/neg/t796.scala
deleted file mode 100644
index c013f49..0000000
--- a/test/pending/neg/t796.scala
+++ /dev/null
@@ -1,20 +0,0 @@
-case class CaseClass( value: Int );
-
-object PatternMatchBug {
- def matcher( a: AnyRef, b: Any ) {
- (a, b) match {
- case ( instance: CaseClass, instance.value ) =>
- System.out.println( "Match succeeded!" );
- case _ =>
- System.out.println( "Match failed!" );
- }
- }
-
- def main( args : Array[String] ) {
- val caseClassInstance = CaseClass( 42 )
-
- matcher( caseClassInstance, 13 )
- matcher( caseClassInstance, 42 )
- }
-}
-
diff --git a/test/pending/pos/bug1987/bug1987a.scala b/test/pending/pos/bug1987/bug1987a.scala
deleted file mode 100644
index 7a62877..0000000
--- a/test/pending/pos/bug1987/bug1987a.scala
+++ /dev/null
@@ -1,7 +0,0 @@
-package bug
-
-// goes with bug1987b.scala
-package object packageb {
- def func(a: Int) = ()
- def func(a: String) = ()
-}
diff --git a/test/pending/pos/bug1987/bug1987b.scala b/test/pending/pos/bug1987/bug1987b.scala
deleted file mode 100644
index 875f512..0000000
--- a/test/pending/pos/bug1987/bug1987b.scala
+++ /dev/null
@@ -1,10 +0,0 @@
-// compile with bug1987a.scala
-
-package bug.packageb
-// Note that the overloading works if instead of being in the package we import it:
-// replace the above line with import bug.packageb._
-
-class Client {
- val x = func(1) // doesn't compile: type mismatch; found: Int(1) required: String
- val y = func("1") // compiles
-}
diff --git a/test/pending/pos/bug4704.scala b/test/pending/pos/bug4704.scala
new file mode 100644
index 0000000..6af719a
--- /dev/null
+++ b/test/pending/pos/bug4704.scala
@@ -0,0 +1,36 @@
+trait Bar {
+ def f1 = super.hashCode
+ def f2 = super[Object].hashCode
+ def f3 = super[ScalaObject].hashCode
+
+ override def hashCode = 1
+}
+trait Barzoo {
+ def g1 = super.hashCode
+ def g2 = super[Object].hashCode
+ def g3 = super[ScalaObject].hashCode
+
+ override def hashCode = 2
+}
+
+trait Foo extends Bar with Barzoo {
+ def f4 = super.hashCode
+ def f5 = super[Object].hashCode
+ def f6 = super[ScalaObject].hashCode
+ def f6b = super[Bar].hashCode
+ def g4 = super[Barzoo].hashCode
+
+ override def hashCode = super[Bar].hashCode + super[Barzoo].hashCode
+}
+
+class Quux extends Foo {
+ override def hashCode = super.hashCode + super[Object].hashCode + super[ScalaObject].hashCode + super[Foo].hashCode
+}
+
+trait Borp extends Quux {
+ def f12 = super.hashCode
+ def f14 = super[ScalaObject].hashCode
+ def f15 = super[Quux].hashCode
+ override def hashCode = super[Quux].hashCode
+}
+
diff --git a/test/pending/pos/exhaust_2.scala b/test/pending/pos/exhaust_2.scala
new file mode 100644
index 0000000..4f4e47c
--- /dev/null
+++ b/test/pending/pos/exhaust_2.scala
@@ -0,0 +1,54 @@
+object ExhaustivityWarnBugReportMinimal {
+ //sealed is needed for the warning
+ sealed trait FoundNode[T]/*presence of parameters is irrelevant*/
+ // This also causes a warning:
+ // sealed abstract class FoundNode[T]/*presence of parameters is irrelevant*/
+ case class FoundFilter[T](/*presence of parameters is irrelevant*/) extends FoundNode[T]
+ case class FoundTypeCase[T](/*presence of parameters is irrelevant*/) extends FoundNode[T]
+ val f: Some[_] = ???
+ f match {
+ case x: Some[t] => //no warning
+ }
+ //With these variants, no warnings:
+ //val v: (Some[Int], FoundNode[_]) = (???, ???)
+ //val v: (Some[AnyRef], FoundNode[_]) = (???, ???)
+ //val v: (Some[String], FoundNode[_]) = (???, ???)
+
+ val v: (Some[_], FoundNode[_]) = (???, ???)
+ //Warning here:
+ v match {
+ case (x: Some[t], _: FoundNode[_]) =>
+ }
+ v match {
+ case (x: Some[t], _) =>
+ }
+
+ v match {
+ case (x: Some[_], _) =>
+ }
+ case class Foo[T]()
+
+ val vp: (Foo[_], FoundNode[_]) = (???, ???)
+ vp match {
+ case (x: Foo[_], _) =>
+ }
+
+ //No warning here:
+ v match {
+ case (Some(y), _) =>
+ }
+
+ v match {
+ case (x, _) =>
+ }
+
+ val v2: (Some[_], Int) = (???, ???)
+ v2 match {
+ case (x: Some[t], _) =>
+ }
+
+ val v3: (Option[_], FoundNode[_]) = (???, ???)
+ v match {
+ case (x: Option[_], _) =>
+ }
+}
diff --git a/test/pending/pos/inference.scala b/test/pending/pos/inference.scala
new file mode 100644
index 0000000..ee462b6
--- /dev/null
+++ b/test/pending/pos/inference.scala
@@ -0,0 +1,41 @@
+import scala.reflect.runtime.universe._
+
+// inference illuminator
+object Test {
+ class D1[T1 : TypeTag, T2 <: T1 : TypeTag](x: T1) { println(typeOf[(T1, T2)]) }
+ class D2[T1 : TypeTag, T2 >: T1 : TypeTag](x: T1) { println(typeOf[(T1, T2)]) }
+ class D3[+T1 : TypeTag, T2 <: T1 : TypeTag](x: T1) { println(typeOf[(T1, T2)]) }
+ class D4[-T1 : TypeTag, T2 >: T1 : TypeTag](x: T1) { println(typeOf[(T1, T2)]) }
+
+ class E1[T1 : TypeTag, T2 <: T1 : TypeTag](x: D1[T1, T2]) { println(typeOf[(T1, T2)]) }
+ class E2[T1 : TypeTag, T2 >: T1 : TypeTag](x: D2[T1, T2]) { println(typeOf[(T1, T2)]) }
+ class E3[+T1 : TypeTag, T2 <: T1 : TypeTag](x: D3[T1, T2]) { println(typeOf[(T1, T2)]) }
+ class E4[-T1 : TypeTag, T2 >: T1 : TypeTag](x: D4[T1, T2]) { println(typeOf[(T1, T2)]) }
+
+ def main(args: Array[String]): Unit = {
+ // WHY YOU NO LIKE NOTHING SO MUCH SCALAC?
+ val d1 = new D1(5)
+ val d2 = new D2(5)
+ val d3 = new D3(5)
+ val d4 = new D4(5)
+
+ new E1(d1) // fails
+ new E2(d2)
+ new E3(d3) // fails
+ new E4(d4)
+ }
+ // found : Test.D1[Int,Nothing]
+ // required: Test.D1[Int,T2]
+ // Note: Nothing <: T2, but class D1 is invariant in type T2.
+ // You may wish to define T2 as +T2 instead. (SLS 4.5)
+ // new E1(d1)
+ // ^
+ // test/pending/pos/inference.scala:22: error: type mismatch;
+ // found : Test.D3[Int,Nothing]
+ // required: Test.D3[Int,T2]
+ // Note: Nothing <: T2, but class D3 is invariant in type T2.
+ // You may wish to define T2 as +T2 instead. (SLS 4.5)
+ // new E3(d3)
+ // ^
+ // two errors found
+}
\ No newline at end of file
diff --git a/test/pending/pos/nothing.scala b/test/pending/pos/nothing.scala
new file mode 100644
index 0000000..f76017f
--- /dev/null
+++ b/test/pending/pos/nothing.scala
@@ -0,0 +1,24 @@
+// More shoddy treatment for nothing.
+class A {
+ class Q3A[+T1, T2 <: T1](x: T1)
+ class Q3B[+T1, T2 <: T1](x: Q3A[T1, T2])
+
+ val x1 = new Q3B(new Q3A("a"))
+ val x2 = new Q3B(new Q3A[String, Nothing]("a"))
+ val x3 = new Q3B(new Q3A[String, Null]("a"))
+ // test/pending/pos/nothing.scala:5: error: type mismatch;
+ // found : A.this.Q3A[String,Nothing]
+ // required: A.this.Q3A[String,T2]
+ // Note: Nothing <: T2, but class Q3A is invariant in type T2.
+ // You may wish to define T2 as +T2 instead. (SLS 4.5)
+ // val x1 = new Q3B(new Q3A("a"))
+ // ^
+ // test/pending/pos/nothing.scala:6: error: type mismatch;
+ // found : A.this.Q3A[String,Nothing]
+ // required: A.this.Q3A[String,T2]
+ // Note: Nothing <: T2, but class Q3A is invariant in type T2.
+ // You may wish to define T2 as +T2 instead. (SLS 4.5)
+ // val x2 = new Q3B(new Q3A[String, Nothing]("a"))
+ // ^
+ // two errors found
+}
diff --git a/test/pending/pos/t1336.scala b/test/pending/pos/t1336.scala
new file mode 100644
index 0000000..6396798
--- /dev/null
+++ b/test/pending/pos/t1336.scala
@@ -0,0 +1,10 @@
+object Foo {
+ def foreach( f : ((Int,Int)) => Unit ) {
+ println("foreach")
+ f(1,2)
+ }
+
+ for( (a,b) <- this ) {
+ println((a,b))
+ }
+}
diff --git a/test/pending/pos/t1380/gnujaxp.jar.desired.sha1 b/test/pending/pos/t1380/gnujaxp.jar.desired.sha1
deleted file mode 100644
index c155c2a..0000000
--- a/test/pending/pos/t1380/gnujaxp.jar.desired.sha1
+++ /dev/null
@@ -1 +0,0 @@
-ee000286d00c5209d5644462c1cfea87fc8b1342 ?gnujaxp.jar
diff --git a/test/pending/pos/t1380/hallo.scala b/test/pending/pos/t1380/hallo.scala
deleted file mode 100644
index bb8fff2..0000000
--- a/test/pending/pos/t1380/hallo.scala
+++ /dev/null
@@ -1,3 +0,0 @@
-object hallo {
- def main(args:Array[String]) = println("hallo")
-}
diff --git a/test/pending/pos/t1476.scala b/test/pending/pos/t1476.scala
new file mode 100644
index 0000000..1f8e95c
--- /dev/null
+++ b/test/pending/pos/t1476.scala
@@ -0,0 +1,23 @@
+abstract class Module {
+ def moduleDemands(): List[Module]
+}
+
+object Test {
+ new Module { owner: Module =>
+ def moduleDemands() = Nil
+
+ val a = new Module { def moduleDemands(): List[Module] = Nil }
+ val b = new Module { def moduleDemands(): List[Module] = owner :: c :: Nil }
+ val c = new Module { def moduleDemands(): List[Module] = owner :: a :: Nil }
+ }
+}
+
+object Test2 {
+ new Module { owner =>
+ def moduleDemands() = Nil
+
+ val a = new Module { def moduleDemands(): List[Module] = Nil }
+ val b = new Module { def moduleDemands(): List[Module] = owner :: c :: Nil }
+ val c = new Module { def moduleDemands(): List[Module] = owner :: a :: Nil }
+ }
+}
diff --git a/test/files/pos/t1751.cmds b/test/pending/pos/t1751.cmds
similarity index 100%
rename from test/files/pos/t1751.cmds
rename to test/pending/pos/t1751.cmds
diff --git a/test/files/pos/t1751/A1_2.scala b/test/pending/pos/t1751/A1_2.scala
similarity index 100%
rename from test/files/pos/t1751/A1_2.scala
rename to test/pending/pos/t1751/A1_2.scala
diff --git a/test/files/pos/t1751/A2_1.scala b/test/pending/pos/t1751/A2_1.scala
similarity index 100%
rename from test/files/pos/t1751/A2_1.scala
rename to test/pending/pos/t1751/A2_1.scala
diff --git a/test/files/pos/t1751/SuiteClasses.java b/test/pending/pos/t1751/SuiteClasses.java
similarity index 100%
rename from test/files/pos/t1751/SuiteClasses.java
rename to test/pending/pos/t1751/SuiteClasses.java
diff --git a/test/files/pos/t1782.cmds b/test/pending/pos/t1782.cmds
similarity index 100%
rename from test/files/pos/t1782.cmds
rename to test/pending/pos/t1782.cmds
diff --git a/test/files/pos/t1782/Ann.java b/test/pending/pos/t1782/Ann.java
similarity index 100%
rename from test/files/pos/t1782/Ann.java
rename to test/pending/pos/t1782/Ann.java
diff --git a/test/files/pos/t1782/Days.java b/test/pending/pos/t1782/Days.java
similarity index 100%
rename from test/files/pos/t1782/Days.java
rename to test/pending/pos/t1782/Days.java
diff --git a/test/files/pos/t1782/ImplementedBy.java b/test/pending/pos/t1782/ImplementedBy.java
similarity index 100%
rename from test/files/pos/t1782/ImplementedBy.java
rename to test/pending/pos/t1782/ImplementedBy.java
diff --git a/test/pending/pos/t1782/Test_1.scala b/test/pending/pos/t1782/Test_1.scala
new file mode 100644
index 0000000..6467a74
--- /dev/null
+++ b/test/pending/pos/t1782/Test_1.scala
@@ -0,0 +1,16 @@
+ at ImplementedBy(classOf[Provider])
+trait Service {
+ def someMethod()
+}
+
+class Provider
+ extends Service
+{
+ // test enumeration java annotations
+ @Ann(Days.Friday) def someMethod() = ()
+
+ // #2103
+ @scala.beans.BeanProperty
+ @Ann(value = Days.Sunday)
+ val t2103 = "test"
+}
diff --git a/test/pending/pos/t1832.scala b/test/pending/pos/t1832.scala
new file mode 100644
index 0000000..bca863f
--- /dev/null
+++ b/test/pending/pos/t1832.scala
@@ -0,0 +1,10 @@
+// Edit by paulp: reduced.
+trait Cloning {
+ trait Foo
+ def fn(g: Int => Unit): Foo
+
+ implicit def mkStar(i: Int) = new { def *(a: Foo): Foo = null }
+
+ val pool1 = 4 * fn { case i => i * 2 }
+ val pool2 = 4 * fn { case i: Int => i * 2 }
+}
diff --git a/test/files/pos/t294.cmds b/test/pending/pos/t294.cmds
similarity index 100%
rename from test/files/pos/t294.cmds
rename to test/pending/pos/t294.cmds
diff --git a/test/files/pos/t294/Ann.java b/test/pending/pos/t294/Ann.java
similarity index 100%
rename from test/files/pos/t294/Ann.java
rename to test/pending/pos/t294/Ann.java
diff --git a/test/files/pos/t294/Ann2.java b/test/pending/pos/t294/Ann2.java
similarity index 100%
rename from test/files/pos/t294/Ann2.java
rename to test/pending/pos/t294/Ann2.java
diff --git a/test/files/pos/t294/Test_1.scala b/test/pending/pos/t294/Test_1.scala
similarity index 100%
rename from test/files/pos/t294/Test_1.scala
rename to test/pending/pos/t294/Test_1.scala
diff --git a/test/files/pos/t294/Test_2.scala b/test/pending/pos/t294/Test_2.scala
similarity index 100%
rename from test/files/pos/t294/Test_2.scala
rename to test/pending/pos/t294/Test_2.scala
diff --git a/test/pending/pos/t3439.scala b/test/pending/pos/t3439.scala
new file mode 100644
index 0000000..425f1ae
--- /dev/null
+++ b/test/pending/pos/t3439.scala
@@ -0,0 +1,2 @@
+abstract class ParametricMessage[M: Manifest](msg: M) { def message = msg }
+case class ParametricMessage1[M: Manifest](msg: M, p1: Class[_]) extends ParametricMessage(msg)
diff --git a/test/pending/pos/t3943/Outer_1.java b/test/pending/pos/t3943/Outer_1.java
new file mode 100644
index 0000000..56c8cc7
--- /dev/null
+++ b/test/pending/pos/t3943/Outer_1.java
@@ -0,0 +1,14 @@
+public class Outer_1<E> {
+ abstract class Inner {
+ abstract public void foo(E e);
+ }
+}
+
+class Child extends Outer_1<String> {
+ // the implicit prefix for Inner is Outer<E> instead of Outer<String>
+ public Inner getInner() {
+ return new Inner() {
+ public void foo(String e) { System.out.println("meh "+e); }
+ };
+ }
+}
diff --git a/test/pending/pos/t3943/test_2.scala b/test/pending/pos/t3943/test_2.scala
new file mode 100644
index 0000000..a19db8b
--- /dev/null
+++ b/test/pending/pos/t3943/test_2.scala
@@ -0,0 +1,8 @@
+object Test extends App {
+ val x: Child = new Child
+ x.getInner.foo("meh")
+// ^
+// error: type mismatch;
+// found : java.lang.String("meh")
+// required: E
+}
diff --git a/test/pending/pos/t4012.scala b/test/pending/pos/t4012.scala
new file mode 100644
index 0000000..9b8a1b0
--- /dev/null
+++ b/test/pending/pos/t4012.scala
@@ -0,0 +1,7 @@
+trait C1[+A] {
+ def head: A = sys.error("")
+}
+trait C2[@specialized +A] extends C1[A] {
+ override def head: A = super.head
+}
+class C3 extends C2[Char]
\ No newline at end of file
diff --git a/test/pending/pos/t4123.scala b/test/pending/pos/t4123.scala
new file mode 100644
index 0000000..82ab16b
--- /dev/null
+++ b/test/pending/pos/t4123.scala
@@ -0,0 +1,14 @@
+// /scala/trac/4123/a.scala
+// Sun Feb 19 00:08:53 PST 2012
+
+trait Iter[@specialized(Byte) +A] extends Iterator[A] {
+ self =>
+
+ override def map[B](f: (A) => B) = super.map(f)
+}
+
+class ByteIter extends Iter[Byte] {
+ var i = 0
+ def hasNext = i < 3
+ def next = { i += 1 ; i.toByte }
+}
\ No newline at end of file
diff --git a/test/pending/pos/t4436.scala b/test/pending/pos/t4436.scala
new file mode 100644
index 0000000..acbf0be
--- /dev/null
+++ b/test/pending/pos/t4436.scala
@@ -0,0 +1,3 @@
+trait Chunk[@specialized +A] {
+ def bippy[@specialized B >: A](e: B): Chunk[B]
+}
\ No newline at end of file
diff --git a/test/pending/pos/t4541.scala b/test/pending/pos/t4541.scala
new file mode 100644
index 0000000..c6d9672
--- /dev/null
+++ b/test/pending/pos/t4541.scala
@@ -0,0 +1,10 @@
+ at SerialVersionUID(1L)
+final class SparseArray[@specialized T](private var data : Array[T]) extends Serializable {
+ def use(inData : Array[T]) = {
+ data = inData;
+ }
+
+ def set(that : SparseArray[T]) = {
+ use(that.data.clone)
+ }
+}
\ No newline at end of file
diff --git a/test/pending/pos/t4606.scala b/test/pending/pos/t4606.scala
new file mode 100644
index 0000000..f4e5058
--- /dev/null
+++ b/test/pending/pos/t4606.scala
@@ -0,0 +1,29 @@
+object t4606 {
+ class A(var x: Int)
+ class B(x: Int) extends A(x)
+ trait C { self: B =>
+ def foo = x
+ def bar = self.x
+ def baz = {
+ val b: B = self
+ b.x
+ }
+ }
+
+ object Toto extends App {
+ val x = new B(10) with C
+ println(x.foo) // 10
+ println(x.bar) // 10
+ println(x.baz) // 10
+ println(x.x) // 10
+ }
+}
+
+object t3194 {
+ class A(var x: Int)
+ class B(x: Int) extends A(x) {
+ self: A =>
+
+ def update(z: Int) = this.x = z
+ }
+}
\ No newline at end of file
diff --git a/test/files/neg/caseinherit.flags b/test/pending/pos/t4649.flags
similarity index 100%
copy from test/files/neg/caseinherit.flags
copy to test/pending/pos/t4649.flags
diff --git a/test/pending/pos/t4649.scala b/test/pending/pos/t4649.scala
new file mode 100644
index 0000000..0d6caa8
--- /dev/null
+++ b/test/pending/pos/t4649.scala
@@ -0,0 +1,6 @@
+object Test {
+ // @annotation.tailrec
+ def lazyFilter[E](s: Stream[E], p: E => Boolean): Stream[E] = s match {
+ case h #:: t => if (p(h)) h #:: lazyFilter(t, p) else lazyFilter(t, p)
+ }
+}
diff --git a/test/pending/pos/t4683.scala b/test/pending/pos/t4683.scala
new file mode 100644
index 0000000..7af7024
--- /dev/null
+++ b/test/pending/pos/t4683.scala
@@ -0,0 +1,11 @@
+
+
+
+
+class DelayedInitTest {
+ def a = ()
+ class B extends DelayedInit {
+ a
+ def delayedInit(body: => Unit) = ()
+ }
+}
diff --git a/test/pending/pos/t4717.scala b/test/pending/pos/t4717.scala
new file mode 100644
index 0000000..7eaa3dd
--- /dev/null
+++ b/test/pending/pos/t4717.scala
@@ -0,0 +1,7 @@
+trait Bounds[@specialized A] {
+ // okay without `>: A`
+ def x[B >: A]: Unit = new Bounds[B] {
+ lazy val it = ??? // def or val okay
+ it
+ }
+}
\ No newline at end of file
diff --git a/test/pending/pos/t4786.scala b/test/pending/pos/t4786.scala
new file mode 100644
index 0000000..f057914
--- /dev/null
+++ b/test/pending/pos/t4786.scala
@@ -0,0 +1,24 @@
+trait Matrix[@specialized A, Repr[C] <: Matrix[C, Repr]] { // crash goes away if @specialize is removed
+ def duplicate(mb: MatrixBuilder[A, Repr]): Repr[A] = {
+ mb.zeros
+ }
+}
+trait DenseMatrix[@specialized A] extends Matrix[A, DenseMatrix]
+trait DenseMatrixFlt extends DenseMatrix[Float]
+
+trait MatrixBuilder[@specialized A, Repr[C] <: Matrix[C, Repr]] {
+ def zeros: Repr[A]
+}
+object DenseFloatBuilder extends MatrixBuilder[Float, DenseMatrix] {
+ val zeros = new Object with DenseMatrixFlt
+ // Note:
+ // - in 2.9 crash goes away if the explicit type "DenseMatrixFlt" is assigned to "zeros"
+ // - in 2.9 crash goes away if DenseMatrixFlt is a class instead of a trait:
+ // val zeros = new DenseMatrixFlt
+}
+
+object Test extends App {
+ val m1 = DenseFloatBuilder.zeros // in 2.9 crash goes away if explicit type "DenseMatrixFlt" is assigned to m1
+ val m2 = m1.duplicate(DenseFloatBuilder)
+}
+
diff --git a/test/pending/pos/t4787.scala b/test/pending/pos/t4787.scala
new file mode 100644
index 0000000..cf3fe93
--- /dev/null
+++ b/test/pending/pos/t4787.scala
@@ -0,0 +1,4 @@
+trait MatrixImpl[@specialized A, @specialized B] {
+ def mapTo[ A2, B2, That <: MatrixImpl[A2, B2]](that: That)(f: A => A2) {
+ }
+}
diff --git a/test/pending/pos/t4790.scala b/test/pending/pos/t4790.scala
new file mode 100644
index 0000000..e451fe8
--- /dev/null
+++ b/test/pending/pos/t4790.scala
@@ -0,0 +1,4 @@
+package spectest {
+ class Sp[@specialized A, B](val a: A, val b: B) { }
+ class Fsp[@specialized A, B](a: A, b: B) extends Sp(a,b) { def ab = (a,b) }
+}
diff --git a/test/pending/pos/t4859.scala b/test/pending/pos/t4859.scala
new file mode 100644
index 0000000..ec5abd9
--- /dev/null
+++ b/test/pending/pos/t4859.scala
@@ -0,0 +1,15 @@
+object O {
+ C().CC()
+ D().DD()
+}
+
+case class C() {
+ case class CC()
+}
+
+case class D() {
+ class DD()
+ object DD {
+ def apply() = new DD()
+ }
+}
diff --git a/test/pending/pos/t5091.scala b/test/pending/pos/t5091.scala
new file mode 100644
index 0000000..217e83f
--- /dev/null
+++ b/test/pending/pos/t5091.scala
@@ -0,0 +1,11 @@
+object RecursiveValueNeedsType {
+
+ def foo(param: String) = 42
+ def bar(n: Int) = 42
+
+ {
+ val xxx = foo(param = null)
+ val param = bar(xxx)
+ }
+
+}
diff --git a/test/pending/pos/t5231.scala b/test/pending/pos/t5231.scala
new file mode 100644
index 0000000..77e6631
--- /dev/null
+++ b/test/pending/pos/t5231.scala
@@ -0,0 +1,18 @@
+object Client {
+ sealed trait ConfigLike {
+ def clientID: Int
+ }
+
+ object Config {
+ def apply() : ConfigBuilder = new ConfigBuilder()
+ implicit def build( cb: ConfigBuilder ) : Config = cb.build
+ }
+
+ final class Config private[Client]( val clientID: Int )
+ extends ConfigLike
+
+ final class ConfigBuilder private () extends ConfigLike {
+ var clientID: Int = 0
+ def build : Config = new Config( clientID )
+ }
+}
diff --git a/test/pending/pos/t5259.scala b/test/pending/pos/t5259.scala
new file mode 100644
index 0000000..317e28a
--- /dev/null
+++ b/test/pending/pos/t5259.scala
@@ -0,0 +1,14 @@
+object DefaultArgBogusTypeMismatch {
+
+ class A[T]
+ class B {
+ type T = this.type
+ def m(implicit a : A[T] = new A[T]) = a
+ }
+
+ def newB = new B
+ val a1 = newB.m // Bogus type mismatch
+
+ val stableB = new B
+ val a2 = stableB.m // OK
+}
diff --git a/test/pending/pos/t5265.scala b/test/pending/pos/t5265.scala
new file mode 100644
index 0000000..3be7d21
--- /dev/null
+++ b/test/pending/pos/t5265.scala
@@ -0,0 +1,21 @@
+import java.util.Date
+
+trait TDate
+
+trait TT[A1,T1]
+
+trait TTFactory[F,G] {
+ def create(f: F) : TT[F,G]
+ def sample: F
+}
+
+object Impls {
+
+ // If the c1 is declared before c2, it compiles fine
+ implicit def c2(s: Date) = c1.create(s)
+
+ implicit val c1 = new TTFactory[Date,TDate] {
+ def create(v: Date): TT[Date,TDate] = sys.error("")
+ def sample = new Date
+ }
+}
\ No newline at end of file
diff --git a/test/pending/pos/t5399.scala b/test/pending/pos/t5399.scala
new file mode 100644
index 0000000..89caba3
--- /dev/null
+++ b/test/pending/pos/t5399.scala
@@ -0,0 +1,8 @@
+class Test {
+ class A[T]
+ class B[T](val a: A[T])
+
+ case class CaseClass[T](x: T)
+
+ def break(existB: B[_]) = CaseClass(existB.a) match { case CaseClass(_) => }
+}
diff --git a/test/pending/pos/t5399a.scala b/test/pending/pos/t5399a.scala
new file mode 100644
index 0000000..4ebd85a
--- /dev/null
+++ b/test/pending/pos/t5399a.scala
@@ -0,0 +1,19 @@
+class Foo {
+ trait Init[T]
+ class ScopedKey[T] extends Init[T]
+
+ trait Setting[T] {
+ val key: ScopedKey[T]
+ }
+
+ case class ScopedKey1[T](val foo: Init[T]) extends ScopedKey[T]
+
+ val scalaHome: Setting[Option[String]] = null
+ val scalaVersion: Setting[String] = null
+
+ def testPatternMatch(s: Setting[_]) {
+ s.key match {
+ case ScopedKey1(scalaHome.key | scalaVersion.key) => ()
+ }
+ }
+}
diff --git a/test/pending/pos/t5400.scala b/test/pending/pos/t5400.scala
new file mode 100644
index 0000000..cb4be4b
--- /dev/null
+++ b/test/pending/pos/t5400.scala
@@ -0,0 +1,14 @@
+trait TFn1B {
+ type In
+ type Out
+ type Apply[T <: In] <: Out
+}
+
+trait TFn1[I, O] extends TFn1B {
+ type In = I
+ type Out = O
+}
+
+trait >>[F1 <: TFn1[_, _], F2 <: TFn1[_, _]] extends TFn1[F1#In, F2#Out] {
+ type Apply[T] = F2#Apply[F1#Apply[T]]
+}
diff --git a/test/files/neg/caseinherit.flags b/test/pending/pos/t5503.flags
similarity index 100%
rename from test/files/neg/caseinherit.flags
rename to test/pending/pos/t5503.flags
diff --git a/test/pending/pos/t5503.scala b/test/pending/pos/t5503.scala
new file mode 100644
index 0000000..8a1925d
--- /dev/null
+++ b/test/pending/pos/t5503.scala
@@ -0,0 +1,18 @@
+trait A {
+ type Type
+ type MethodType <: Type
+
+ val MethodType: MethodTypeExtractor = null
+
+ abstract class MethodTypeExtractor {
+ def unapply(tpe: MethodType): Option[(Any, Any)]
+ }
+}
+
+object Test {
+ val a: A = null
+
+ def foo(tpe: a.Type) = tpe match {
+ case a.MethodType(_, _) =>
+ }
+}
\ No newline at end of file
diff --git a/test/pending/pos/t5521.scala b/test/pending/pos/t5521.scala
new file mode 100644
index 0000000..dc025d0
--- /dev/null
+++ b/test/pending/pos/t5521.scala
@@ -0,0 +1,3 @@
+class Foo { type Bar }
+
+class Quux(val foo: Foo)(val bar: foo.Bar)
\ No newline at end of file
diff --git a/test/pending/pos/t5534.scala b/test/pending/pos/t5534.scala
new file mode 100644
index 0000000..834c4fd
--- /dev/null
+++ b/test/pending/pos/t5534.scala
@@ -0,0 +1,11 @@
+object Phrase extends Enumeration {
+ type Phrase = Value
+ val PHRASE1 = Value("My phrase 1")
+ val PHRASE2 = Value("My phrase 2")
+}
+
+class Entity(text:String)
+
+object Test {
+ val myMapWithPhrases = Phrase.values.map(p => (p -> new Entity(p.toString))).toMap
+}
\ No newline at end of file
diff --git a/test/pending/pos/t5559.scala b/test/pending/pos/t5559.scala
new file mode 100644
index 0000000..586e52c
--- /dev/null
+++ b/test/pending/pos/t5559.scala
@@ -0,0 +1,23 @@
+
+
+
+
+object Test {
+
+ class Inv[T]
+
+ def foo[S](interface: Inv[_ >: S], implementation: Inv[S]) {}
+
+ def bar[R, T <: R](interface: Inv[R], impl: Inv[T]) {
+ //foo[T](interface, impl)
+ foo(interface, impl) // Compilation Error
+ // Inv[R] <: Inv[_ >: S]
+ // Inv[T] <: Inv[S]
+ // ----------------------
+ // R >: S
+ // T == S
+ }
+
+}
+
+
diff --git a/test/pending/pos/t5564.scala b/test/pending/pos/t5564.scala
new file mode 100644
index 0000000..1783a90
--- /dev/null
+++ b/test/pending/pos/t5564.scala
@@ -0,0 +1,5 @@
+trait C
+
+class Foo[@specialized(Int) T, A] {
+ def bar[B >: A <: C]: T = ???
+}
diff --git a/test/pending/pos/t5579.scala b/test/pending/pos/t5579.scala
new file mode 100644
index 0000000..a1ee077
--- /dev/null
+++ b/test/pending/pos/t5579.scala
@@ -0,0 +1,29 @@
+import language.existentials
+
+class Result[+A]
+
+case class Success[A](x: A) extends Result[A]
+
+class Apply[A]
+
+object Apply {
+ def apply[A](f: Int => Result[A]): Apply[A] = new Apply[A]
+}
+
+object TestUnit {
+ //Error is here:
+ def goo = Apply { i =>
+ i match {
+ case 1 => Success(Some(1))
+ case _ => Success(None)
+ }
+ }
+
+ //If type is defined explicitly (which I wanted from compiler to infer), then all is ok
+ def foo = Apply[t forSome { type t >: Some[Int] with None.type <: Option[Int] }] { i =>
+ i match {
+ case 1 => Success(Some(1))
+ case _ => Success(None)
+ }
+ }
+}
diff --git a/test/pending/pos/t5585.scala b/test/pending/pos/t5585.scala
new file mode 100644
index 0000000..5d3eb86
--- /dev/null
+++ b/test/pending/pos/t5585.scala
@@ -0,0 +1,18 @@
+class Result[+A]
+
+case class Success[A](x: A) extends Result[A]
+
+class Apply[A]
+
+object Apply {
+ def apply[A](f: Int => Result[A]): Apply[A] = new Apply[A]
+}
+
+object TestUnit {
+ def goo : Apply[Option[Int]] = Apply { i =>
+ val p = i match {
+ case 1 => Success(Some(1))
+ case _ => Success(None)
+ }
+ }
+}
\ No newline at end of file
diff --git a/test/pending/pos/t5589.scala b/test/pending/pos/t5589.scala
new file mode 100644
index 0000000..69cbb20
--- /dev/null
+++ b/test/pending/pos/t5589.scala
@@ -0,0 +1,22 @@
+class A {
+ // First three compile.
+ def f1(x: Either[Int, String]) = x.right map (y => y)
+ def f2(x: Either[Int, String]) = for (y <- x.right) yield y
+ def f3(x: Either[Int, (String, Int)]) = x.right map { case (y1, y2) => (y1, y2) }
+ // Last one fails.
+ def f4(x: Either[Int, (String, Int)]) = for ((y1, y2) <- x.right) yield ((y1, y2))
+/**
+./a.scala:5: error: constructor cannot be instantiated to expected type;
+ found : (T1, T2)
+ required: Either[Nothing,(String, Int)]
+ def f4(x: Either[Int, (String, Int)]) = for ((y1, y2) <- x.right) yield ((y1, y2))
+ ^
+./a.scala:5: error: not found: value y1
+ def f4(x: Either[Int, (String, Int)]) = for ((y1, y2) <- x.right) yield ((y1, y2))
+ ^
+./a.scala:5: error: not found: value y2
+ def f4(x: Either[Int, (String, Int)]) = for ((y1, y2) <- x.right) yield ((y1, y2))
+ ^
+three errors found
+**/
+}
diff --git a/test/pending/pos/t5606.scala b/test/pending/pos/t5606.scala
new file mode 100644
index 0000000..2545271
--- /dev/null
+++ b/test/pending/pos/t5606.scala
@@ -0,0 +1,9 @@
+
+
+
+
+
+
+
+
+case class CaseTest[_](someData:String)
diff --git a/test/pending/pos/t5626.scala b/test/pending/pos/t5626.scala
new file mode 100644
index 0000000..7ab3881
--- /dev/null
+++ b/test/pending/pos/t5626.scala
@@ -0,0 +1,12 @@
+object Test {
+ val blob0 = new {
+ case class Foo(i : Int)
+ }
+ val foo0 = blob0.Foo(22)
+
+ val blob1 = new {
+ class Foo(i: Int)
+ object Foo { def apply(i: Int): Foo = new Foo(i) }
+ }
+ val foo1 = blob1.Foo(22)
+}
diff --git a/test/pending/pos/t5639/Bar.scala b/test/pending/pos/t5639/Bar.scala
new file mode 100644
index 0000000..f577500
--- /dev/null
+++ b/test/pending/pos/t5639/Bar.scala
@@ -0,0 +1,7 @@
+package pack.age
+
+import pack.age.Implicits._
+
+object Quux {
+ def baz : Baz = 1
+}
diff --git a/test/pending/pos/t5639/Foo.scala b/test/pending/pos/t5639/Foo.scala
new file mode 100644
index 0000000..6602150
--- /dev/null
+++ b/test/pending/pos/t5639/Foo.scala
@@ -0,0 +1,7 @@
+package pack.age
+
+class Baz
+
+object Implicits {
+ implicit def Baz(n: Int): Baz = new Baz
+}
diff --git a/test/pending/pos/t5654.scala b/test/pending/pos/t5654.scala
new file mode 100644
index 0000000..eb711a5
--- /dev/null
+++ b/test/pending/pos/t5654.scala
@@ -0,0 +1,4 @@
+case class Bomb(a: Array[_])
+case class Bomb2(a: Array[T] forSome { type T })
+class Okay1(a: Array[_])
+case class Okay2(s: Seq[_])
\ No newline at end of file
diff --git a/test/pending/pos/t5712.scala b/test/pending/pos/t5712.scala
new file mode 100644
index 0000000..31f3650
--- /dev/null
+++ b/test/pending/pos/t5712.scala
@@ -0,0 +1,14 @@
+import scala.tools.nsc._
+
+object Test {
+
+ // works
+ def mkReifier(global: Global)(typer: global.analyzer.Typer) = typer
+
+/*
+<console>:10: error: not found: value global
+ class Reifier(global: Global)(typer: global.analyzer.Typer) { }
+*/
+ class Reifier(global: Global)(typer: global.analyzer.Typer) { }
+
+}
diff --git a/test/pending/pos/t7234.scala b/test/pending/pos/t7234.scala
new file mode 100644
index 0000000..59a233d
--- /dev/null
+++ b/test/pending/pos/t7234.scala
@@ -0,0 +1,15 @@
+trait Main {
+ trait A {
+ type B
+ }
+ trait C {
+ def c(a: A, x: Int = 0)(b: a.B)
+ }
+ def c: C
+ def d(a: A, x: Int = 0)(b: a.B)
+
+ def ok1(a: A)(b: a.B) = c.c(a, 42)(b)
+ def ok2(a: A)(b: a.B) = d(a)(b)
+
+ def fail(a: A)(b: a.B) = c.c(a)(b)
+}
diff --git a/test/pending/pos/t7234b.scala b/test/pending/pos/t7234b.scala
new file mode 100644
index 0000000..fee98e8
--- /dev/null
+++ b/test/pending/pos/t7234b.scala
@@ -0,0 +1,20 @@
+trait Main {
+ trait A {
+ type B
+ def b: B
+ }
+ trait C {
+ def c(a: A, x: Int = 0)(b: => a.B, bs: a.B*)
+ def d(a: A = null, x: Int = 0)(b1: => a.B = a.b, b2: a.B = a.b)
+ }
+ def c: C
+ def ok(a: A)(b: a.B) = c.c(a, 42)(b)
+ def fail(a: A)(b: a.B) = c.c(a)(b)
+ def fail2(a: A)(b: a.B) = c.c(a)(b, b)
+ def fail3(a: A)(b: a.B) = c.c(a)(b, Seq[a.B](b): _*)
+
+ def fail4(a: A)(b: a.B) = c.d(a)()
+ def fail5(a: A)(b: a.B) = c.d(a)(b1 = a.b)
+ def fail6(a: A)(b: a.B) = c.d(a)(b2 = a.b)
+ def fail7(a: A)(b: a.B) = c.d()()
+}
diff --git a/test/pending/pos/t7778/Foo_1.java b/test/pending/pos/t7778/Foo_1.java
new file mode 100644
index 0000000..65431ff
--- /dev/null
+++ b/test/pending/pos/t7778/Foo_1.java
@@ -0,0 +1,6 @@
+import java.util.concurrent.Callable;
+
+public abstract class Foo_1<T> implements Callable<Foo_1<Object>.Inner> {
+ public abstract class Inner {
+ }
+}
diff --git a/test/pending/pos/t7778/Test_2.scala b/test/pending/pos/t7778/Test_2.scala
new file mode 100644
index 0000000..306303a
--- /dev/null
+++ b/test/pending/pos/t7778/Test_2.scala
@@ -0,0 +1,3 @@
+class Test {
+ null: Foo_1[_]
+}
diff --git a/test/pending/pos/those-kinds-are-high.scala b/test/pending/pos/those-kinds-are-high.scala
index 3012e72..434e64c 100644
--- a/test/pending/pos/those-kinds-are-high.scala
+++ b/test/pending/pos/those-kinds-are-high.scala
@@ -27,11 +27,27 @@ class A {
//
// List[Container[String] with Template[Container[Any] with Template[Container[Any] with Template[Any]]]
//
+ // *** Update 2/24/2012
+ //
+ // Hey, now there are polytypes in the inferred type.
+ // Not sure if that is progress or regress.
+ //
+ // test/pending/pos/those-kinds-are-high.scala:36: error: type mismatch;
+ // found : C1[String]
+ // required: ScalaObject with Container[String] with Template[ScalaObject with Container with Template[ScalaObject with Container with Template[[X]Container[X]]]]
+ // def fFail = List(new C1[String], new C2[String])
+ // ^
+ // test/pending/pos/those-kinds-are-high.scala:36: error: type mismatch;
+ // found : C2[String]
+ // required: ScalaObject with Container[String] with Template[ScalaObject with Container with Template[ScalaObject with Container with Template[[X]Container[X]]]]
+ // def fFail = List(new C1[String], new C2[String])
+ // ^
+ // two errors found
/** Working version explicitly typed.
*/
def fExplicit = List[Template[Container] with Container[String]](new C1[String], new C2[String])
// nope
- // def fFail = List(new C1[String], new C2[String])
+ def fFail = List(new C1[String], new C2[String])
}
diff --git a/test/pending/pos/unapplyGeneric.scala b/test/pending/pos/unapplyGeneric.scala
deleted file mode 100644
index bf88816..0000000
--- a/test/pending/pos/unapplyGeneric.scala
+++ /dev/null
@@ -1,11 +0,0 @@
-object Bar {
- def unapply[A,B](bar:Bar[A,B]) = Some(bar)
-}
-
-class Bar[A,B](val _1:A, val _2:B) extends Product2[A,B]
-
-object Test {
- new Bar(2, 'a') match {
- case Bar(x,y) =>
- }
-}
diff --git a/test/pending/pos/z1720.scala b/test/pending/pos/z1720.scala
new file mode 100644
index 0000000..6050f3f
--- /dev/null
+++ b/test/pending/pos/z1720.scala
@@ -0,0 +1,16 @@
+package test
+
+class Thing {
+ def info: Info[this.type] = InfoRepository.getInfo(this)
+ def info2: Info[this.type] = {
+ def self: this.type = this
+ InfoRepository.getInfo(self)
+ }
+}
+
+trait Info[T]
+case class InfoImpl[T](thing: T) extends Info[T]
+
+object InfoRepository {
+ def getInfo(t: Thing): Info[t.type] = InfoImpl(t)
+}
\ No newline at end of file
diff --git a/test/pending/run/bug2318.scala b/test/pending/run/bug2318.scala
deleted file mode 100644
index cf81bab..0000000
--- a/test/pending/run/bug2318.scala
+++ /dev/null
@@ -1,38 +0,0 @@
-import java.security._
-
-object Test {
- trait Bar { def bar: Unit }
-
- object Mgr extends SecurityManager {
- override def checkPermission(perm: Permission) = perm match {
- case _: java.lang.RuntimePermission => ()
- case _: java.io.FilePermission => ()
- case x: java.security.AccessControlException if x.getName contains ".networkaddress." => () // generality ftw
- case _ => super.checkPermission(perm)
- }
- }
-
- def bug1() = {
- val p = Runtime.getRuntime().exec("ls");
- type Destroyable = { def destroy() : Unit }
- def doDestroy( obj : Destroyable ) : Unit = obj.destroy();
- doDestroy( p );
- }
- def bug2() = {
- System.setSecurityManager(Mgr)
-
- val b = new Bar { def bar = println("bar") }
- b.bar
-
- val structural = b.asInstanceOf[{ def bar: Unit }]
- structural.bar
- }
-
- def main(args: Array[String]) {
- // figuring this will otherwise break on windows
- try bug1()
- catch { case _: java.io.IOException => () }
-
- bug2()
- }
-}
diff --git a/test/pending/run/bug4704run.scala b/test/pending/run/bug4704run.scala
new file mode 100644
index 0000000..af488a5
--- /dev/null
+++ b/test/pending/run/bug4704run.scala
@@ -0,0 +1,10 @@
+trait MM {
+ protected def method = "bip"
+}
+trait NN {
+ protected def method = "bop"
+}
+trait OOOOO extends MM with NN {
+ override protected def method = super[MM].method + super[NN].method
+ override def hashCode = super[MM].hashCode + super[NN].hashCode
+}
diff --git a/test/pending/run/hk-lub-fail.scala b/test/pending/run/hk-lub-fail.scala
new file mode 100644
index 0000000..b58a86e
--- /dev/null
+++ b/test/pending/run/hk-lub-fail.scala
@@ -0,0 +1,37 @@
+// Tue Jul 12 16:38:23 PDT 2011
+
+class Bip[T1]
+class Foo[T2] extends Bip[T2]
+class Bar[T3] extends Bip[T3]
+
+abstract class Factory[CC[X] <: Bip[X]] { }
+
+object Quux1 extends Factory[Foo]
+object Quux2 extends Factory[Bar]
+
+object Test {
+ // FAIL
+ val xs = List(Quux1, Quux2)
+ // error: type mismatch;
+ // found : Quux1.type (with underlying type object Quux1)
+ // required: Factory[_ >: Bar with Foo <: Bip]
+ // ^^ ^^ ^^ ^^ <-- QUIZ: what is missing from these types?
+
+ // The type it should figure out, come on scalac
+ type F = Factory[CC] forSome { type X ; type CC[X] >: Bar[X] with Foo[X] <: Bip[X] }
+
+ // No problem
+ val ys = List[F](Quux1, Quux2)
+
+ // A repl session to get you started.
+/*
+ val quux1 = EmptyPackageClass.tpe.member(newTermName("Quux1"))
+ val quux2 = EmptyPackageClass.tpe.member(newTermName("Quux2"))
+ val tps = List(quux1, quux2) map (_.tpe)
+ val test = EmptyPackageClass.tpe.member(newTermName("Test"))
+ val f = test.tpe.member(newTypeName("F")).tpe
+
+ val fn = f.normalize.asInstanceOf[ExistentialType]
+ val fn2 = fn.underlying.asInstanceOf[TypeRef]
+*/
+}
diff --git a/test/pending/run/idempotency-partial-functions.scala b/test/pending/run/idempotency-partial-functions.scala
new file mode 100644
index 0000000..bc0ca70
--- /dev/null
+++ b/test/pending/run/idempotency-partial-functions.scala
@@ -0,0 +1,28 @@
+import scala.reflect.runtime.universe._
+import scala.reflect.runtime.{currentMirror => cm}
+import scala.tools.reflect.{ToolBox, ToolBoxError}
+import scala.tools.reflect.Eval
+
+// Related to SI-6187
+//
+// Moved to pending as we are currently blocked by the inability
+// to reify the parent types of the anoymous function class,
+// which are not part of the tree, but rather only part of the
+// ClassInfoType.
+object Test extends App {
+ val partials = reify {
+ List((false,true)) collect { case (x,true) => x }
+ }
+ println(Seq(show(partials), showRaw(partials)).mkString("\n\n"))
+ try {
+ println(partials.eval)
+ } catch {
+ case e: ToolBoxError => println(e)
+ }
+ val tb = cm.mkToolBox()
+ val tpartials = tb.typeCheck(partials.tree)
+ println(tpartials)
+ val rtpartials = tb.resetAllAttrs(tpartials)
+ println(tb.eval(rtpartials))
+}
+Test.main(null)
\ No newline at end of file
diff --git a/test/pending/run/implicit-classes.scala b/test/pending/run/implicit-classes.scala
new file mode 100644
index 0000000..02b74de
--- /dev/null
+++ b/test/pending/run/implicit-classes.scala
@@ -0,0 +1,17 @@
+object O {
+ implicit class C(s: String) {
+ def twice = s + s
+ }
+}
+
+/**
+//
+// We'd like to augment object O in Namers so that it also has an implicit method
+object O {
+ implicit class C(s: String) {
+ def twice = s + s
+ }
+ implicit def C(s: String): C = new C(s)
+}
+
+**/
diff --git a/test/pending/run/jar-version.scala b/test/pending/run/jar-version.scala
new file mode 100644
index 0000000..b79dfe7
--- /dev/null
+++ b/test/pending/run/jar-version.scala
@@ -0,0 +1,11 @@
+import scala.util.Properties._
+import scala.tools.nsc.util.ClassPath._
+
+object Test {
+ def main(args: Array[String]): Unit = {
+ infoFor(this).jarManifestMainAttrs get ScalaCompilerVersion match {
+ case Some(v) => println("I was built by scala compiler version " + v)
+ case _ => println("I was not apprised of which scala compiler version built me.")
+ }
+ }
+}
diff --git a/test/pending/run/macro-expand-default.flags b/test/pending/run/macro-expand-default.flags
new file mode 100644
index 0000000..cd66464
--- /dev/null
+++ b/test/pending/run/macro-expand-default.flags
@@ -0,0 +1 @@
+-language:experimental.macros
\ No newline at end of file
diff --git a/test/pending/run/macro-expand-default/Impls_1.scala b/test/pending/run/macro-expand-default/Impls_1.scala
new file mode 100644
index 0000000..7cf8d59
--- /dev/null
+++ b/test/pending/run/macro-expand-default/Impls_1.scala
@@ -0,0 +1,10 @@
+import scala.reflect.macros.{Context => Ctx}
+
+object Impls {
+ def foo(c: Ctx)(x: c.Expr[Int], y: c.Expr[Int]) = {
+ import c.universe._
+ val sum = Apply(Select(x.tree, newTermName("$minus")), List(y.tree))
+ val body = Apply(Select(Ident(definitions.PredefModule), newTermName("println")), List(sum))
+ Expr[Unit](body)
+ }
+}
\ No newline at end of file
diff --git a/test/pending/run/macro-expand-default/Macros_Test_2.scala b/test/pending/run/macro-expand-default/Macros_Test_2.scala
new file mode 100644
index 0000000..92fe84d
--- /dev/null
+++ b/test/pending/run/macro-expand-default/Macros_Test_2.scala
@@ -0,0 +1,8 @@
+object Test extends App {
+ def foo(x: Int = 2, y: Int = -40) = macro Impls.foo
+ foo(y = -40, x = 2)
+ foo(x = 2, y = -40)
+ foo(x = 100)
+ foo(y = 100)
+ foo()
+}
\ No newline at end of file
diff --git a/test/pending/run/macro-expand-implicit-macro-defeats-type-inference.check b/test/pending/run/macro-expand-implicit-macro-defeats-type-inference.check
new file mode 100644
index 0000000..e7cb9c3
--- /dev/null
+++ b/test/pending/run/macro-expand-implicit-macro-defeats-type-inference.check
@@ -0,0 +1,6 @@
+openImplicits are: List()
+enclosingImplicits are: List((List[Int],scala.this.Predef.implicitly[List[Int]]))
+typetag is: TypeTag[Nothing]
+openImplicits are: List()
+enclosingImplicits are: List((List[String],Test.this.bar[String]))
+typetag is: TypeTag[Nothing]
diff --git a/test/pending/run/macro-expand-implicit-macro-defeats-type-inference.flags b/test/pending/run/macro-expand-implicit-macro-defeats-type-inference.flags
new file mode 100644
index 0000000..cd66464
--- /dev/null
+++ b/test/pending/run/macro-expand-implicit-macro-defeats-type-inference.flags
@@ -0,0 +1 @@
+-language:experimental.macros
\ No newline at end of file
diff --git a/test/pending/run/macro-expand-implicit-macro-defeats-type-inference/Impls_1.scala b/test/pending/run/macro-expand-implicit-macro-defeats-type-inference/Impls_1.scala
new file mode 100644
index 0000000..15bcb58
--- /dev/null
+++ b/test/pending/run/macro-expand-implicit-macro-defeats-type-inference/Impls_1.scala
@@ -0,0 +1,10 @@
+import scala.reflect.macros.Context
+
+object Impls {
+ def foo[T: c.WeakTypeTag](c: Context): c.Expr[List[T]] = c.universe.reify {
+ println("openImplicits are: " + c.literal(c.openImplicits.toString).splice)
+ println("enclosingImplicits are: " + c.literal(c.enclosingImplicits.toString).splice)
+ println("typetag is: " + c.literal(c.tag[T].toString).splice)
+ null
+ }
+}
\ No newline at end of file
diff --git a/test/pending/run/macro-expand-implicit-macro-defeats-type-inference/Macros_Test_2.scala b/test/pending/run/macro-expand-implicit-macro-defeats-type-inference/Macros_Test_2.scala
new file mode 100644
index 0000000..27d0662
--- /dev/null
+++ b/test/pending/run/macro-expand-implicit-macro-defeats-type-inference/Macros_Test_2.scala
@@ -0,0 +1,6 @@
+object Test extends App {
+ implicit def foo[T]: List[T] = macro Impls.foo[T]
+ def bar[T](implicit foo: List[T]) {}
+ implicitly[List[Int]]
+ bar[String]
+}
\ No newline at end of file
diff --git a/test/pending/run/macro-expand-macro-has-context-bound.check b/test/pending/run/macro-expand-macro-has-context-bound.check
new file mode 100644
index 0000000..ac4213d
--- /dev/null
+++ b/test/pending/run/macro-expand-macro-has-context-bound.check
@@ -0,0 +1 @@
+43
\ No newline at end of file
diff --git a/test/pending/run/macro-expand-macro-has-context-bound.flags b/test/pending/run/macro-expand-macro-has-context-bound.flags
new file mode 100644
index 0000000..cd66464
--- /dev/null
+++ b/test/pending/run/macro-expand-macro-has-context-bound.flags
@@ -0,0 +1 @@
+-language:experimental.macros
\ No newline at end of file
diff --git a/test/pending/run/macro-expand-macro-has-context-bound/Impls_1.scala b/test/pending/run/macro-expand-macro-has-context-bound/Impls_1.scala
new file mode 100644
index 0000000..be00fd0
--- /dev/null
+++ b/test/pending/run/macro-expand-macro-has-context-bound/Impls_1.scala
@@ -0,0 +1,10 @@
+import scala.reflect.macros.{Context => Ctx}
+
+object Impls {
+ def foo[U](c: Ctx)(x: c.Expr[U])(evidence: c.Expr[Numeric[U]]) = {
+ import c.universe._
+ val plusOne = Apply(Select(evidence.tree, newTermName("plus")), List(x.tree, Literal(Constant(1))))
+ val body = Apply(Select(Ident(definitions.PredefModule), newTermName("println")), List(plusOne))
+ Expr[Unit](body)
+ }
+}
\ No newline at end of file
diff --git a/test/pending/run/macro-expand-macro-has-context-bound/Macros_Test_2.scala b/test/pending/run/macro-expand-macro-has-context-bound/Macros_Test_2.scala
new file mode 100644
index 0000000..7d16b77
--- /dev/null
+++ b/test/pending/run/macro-expand-macro-has-context-bound/Macros_Test_2.scala
@@ -0,0 +1,4 @@
+object Test extends App {
+ def foo[U: Numeric](x: U) = macro Impls.foo[U]
+ foo(42)
+}
\ No newline at end of file
diff --git a/test/pending/run/macro-expand-named.flags b/test/pending/run/macro-expand-named.flags
new file mode 100644
index 0000000..cd66464
--- /dev/null
+++ b/test/pending/run/macro-expand-named.flags
@@ -0,0 +1 @@
+-language:experimental.macros
\ No newline at end of file
diff --git a/test/pending/run/macro-expand-named/Impls_1.scala b/test/pending/run/macro-expand-named/Impls_1.scala
new file mode 100644
index 0000000..7cf8d59
--- /dev/null
+++ b/test/pending/run/macro-expand-named/Impls_1.scala
@@ -0,0 +1,10 @@
+import scala.reflect.macros.{Context => Ctx}
+
+object Impls {
+ def foo(c: Ctx)(x: c.Expr[Int], y: c.Expr[Int]) = {
+ import c.universe._
+ val sum = Apply(Select(x.tree, newTermName("$minus")), List(y.tree))
+ val body = Apply(Select(Ident(definitions.PredefModule), newTermName("println")), List(sum))
+ Expr[Unit](body)
+ }
+}
\ No newline at end of file
diff --git a/test/pending/run/macro-expand-named/Macros_Test_2.scala b/test/pending/run/macro-expand-named/Macros_Test_2.scala
new file mode 100644
index 0000000..abebcf8
--- /dev/null
+++ b/test/pending/run/macro-expand-named/Macros_Test_2.scala
@@ -0,0 +1,5 @@
+object Test extends App {
+ def foo(x: Int, y: Int) = macro Impls.foo
+ foo(y = -40, x = 2)
+ foo(x = 2, y = -40)
+}
\ No newline at end of file
diff --git a/test/pending/run/macro-expand-tparams-prefix-e1.check b/test/pending/run/macro-expand-tparams-prefix-e1.check
new file mode 100644
index 0000000..4fa05a7
--- /dev/null
+++ b/test/pending/run/macro-expand-tparams-prefix-e1.check
@@ -0,0 +1,3 @@
+TypeTag(List[Int])
+TypeTag(String)
+TypeTag(Boolean)
diff --git a/test/pending/run/macro-expand-tparams-prefix-e1.flags b/test/pending/run/macro-expand-tparams-prefix-e1.flags
new file mode 100644
index 0000000..cd66464
--- /dev/null
+++ b/test/pending/run/macro-expand-tparams-prefix-e1.flags
@@ -0,0 +1 @@
+-language:experimental.macros
\ No newline at end of file
diff --git a/test/pending/run/macro-expand-tparams-prefix-e1/Impls_1.scala b/test/pending/run/macro-expand-tparams-prefix-e1/Impls_1.scala
new file mode 100644
index 0000000..26de70c
--- /dev/null
+++ b/test/pending/run/macro-expand-tparams-prefix-e1/Impls_1.scala
@@ -0,0 +1,12 @@
+import scala.reflect.macros.{Context => Ctx}
+
+object Impls {
+ def foo[T, U: c.WeakTypeTag, V](c: Ctx)(implicit T: c.WeakTypeTag[T], V: c.WeakTypeTag[V]): c.Expr[Unit] = {
+ import c.universe._
+ Block(List(
+ Apply(Select(Ident(definitions.PredefModule), newTermName("println")), List(Literal(Constant(T.toString)))),
+ Apply(Select(Ident(definitions.PredefModule), newTermName("println")), List(Literal(Constant(implicitly[c.WeakTypeTag[U]].toString)))),
+ Apply(Select(Ident(definitions.PredefModule), newTermName("println")), List(Literal(Constant(V.toString))))),
+ Literal(Constant(())))
+ }
+}
\ No newline at end of file
diff --git a/test/pending/run/macro-expand-tparams-prefix-e1/Macros_Test_2.scala b/test/pending/run/macro-expand-tparams-prefix-e1/Macros_Test_2.scala
new file mode 100644
index 0000000..d4fc52f
--- /dev/null
+++ b/test/pending/run/macro-expand-tparams-prefix-e1/Macros_Test_2.scala
@@ -0,0 +1,13 @@
+import scala.reflect.runtime.universe._
+
+object Test extends App {
+ class D[T: TypeTag] {
+ class C[U: TypeTag] {
+ def foo[V] = macro Impls.foo[List[T], U, V]
+ foo[Boolean]
+ }
+ }
+
+ val outer1 = new D[Int]
+ new outer1.C[String]
+}
\ No newline at end of file
diff --git a/test/pending/run/macro-expand-tparams-prefix-f1.check b/test/pending/run/macro-expand-tparams-prefix-f1.check
new file mode 100644
index 0000000..d152261
--- /dev/null
+++ b/test/pending/run/macro-expand-tparams-prefix-f1.check
@@ -0,0 +1,3 @@
+TypeTag(List[T])
+TypeTag(U)
+TypeTag(Boolean)
diff --git a/test/pending/run/macro-expand-tparams-prefix-f1.flags b/test/pending/run/macro-expand-tparams-prefix-f1.flags
new file mode 100644
index 0000000..cd66464
--- /dev/null
+++ b/test/pending/run/macro-expand-tparams-prefix-f1.flags
@@ -0,0 +1 @@
+-language:experimental.macros
\ No newline at end of file
diff --git a/test/pending/run/macro-expand-tparams-prefix-f1/Impls_1.scala b/test/pending/run/macro-expand-tparams-prefix-f1/Impls_1.scala
new file mode 100644
index 0000000..26de70c
--- /dev/null
+++ b/test/pending/run/macro-expand-tparams-prefix-f1/Impls_1.scala
@@ -0,0 +1,12 @@
+import scala.reflect.macros.{Context => Ctx}
+
+object Impls {
+ def foo[T, U: c.WeakTypeTag, V](c: Ctx)(implicit T: c.WeakTypeTag[T], V: c.WeakTypeTag[V]): c.Expr[Unit] = {
+ import c.universe._
+ Block(List(
+ Apply(Select(Ident(definitions.PredefModule), newTermName("println")), List(Literal(Constant(T.toString)))),
+ Apply(Select(Ident(definitions.PredefModule), newTermName("println")), List(Literal(Constant(implicitly[c.WeakTypeTag[U]].toString)))),
+ Apply(Select(Ident(definitions.PredefModule), newTermName("println")), List(Literal(Constant(V.toString))))),
+ Literal(Constant(())))
+ }
+}
\ No newline at end of file
diff --git a/test/pending/run/macro-expand-tparams-prefix-f1/Macros_Test_2.scala b/test/pending/run/macro-expand-tparams-prefix-f1/Macros_Test_2.scala
new file mode 100644
index 0000000..9417cf6
--- /dev/null
+++ b/test/pending/run/macro-expand-tparams-prefix-f1/Macros_Test_2.scala
@@ -0,0 +1,13 @@
+import scala.reflect.runtime.universe._
+
+object Test extends App {
+ class D[T] {
+ class C[U] {
+ def foo[V] = macro Impls.foo[List[T], U, V]
+ foo[Boolean]
+ }
+ }
+
+ val outer1 = new D[Int]
+ new outer1.C[String]
+}
\ No newline at end of file
diff --git a/test/pending/run/macro-quasiinvalidbody-a.check b/test/pending/run/macro-quasiinvalidbody-a.check
new file mode 100644
index 0000000..f70d7bb
--- /dev/null
+++ b/test/pending/run/macro-quasiinvalidbody-a.check
@@ -0,0 +1 @@
+42
\ No newline at end of file
diff --git a/test/pending/run/macro-quasiinvalidbody-a.flags b/test/pending/run/macro-quasiinvalidbody-a.flags
new file mode 100644
index 0000000..cd66464
--- /dev/null
+++ b/test/pending/run/macro-quasiinvalidbody-a.flags
@@ -0,0 +1 @@
+-language:experimental.macros
\ No newline at end of file
diff --git a/test/pending/run/macro-quasiinvalidbody-a/Impls_1.scala b/test/pending/run/macro-quasiinvalidbody-a/Impls_1.scala
new file mode 100644
index 0000000..daedde4
--- /dev/null
+++ b/test/pending/run/macro-quasiinvalidbody-a/Impls_1.scala
@@ -0,0 +1,5 @@
+import scala.reflect.macros.{Context => Ctx}
+
+trait Impls {
+ def impl(c: Ctx)(x: c.Expr[Any]) = x
+}
\ No newline at end of file
diff --git a/test/pending/run/macro-quasiinvalidbody-a/Macros_Test_2.scala b/test/pending/run/macro-quasiinvalidbody-a/Macros_Test_2.scala
new file mode 100644
index 0000000..27140a7
--- /dev/null
+++ b/test/pending/run/macro-quasiinvalidbody-a/Macros_Test_2.scala
@@ -0,0 +1,10 @@
+import scala.reflect.macros.{Context => Ctx}
+
+object Macros extends Impls {
+ def foo(x: Any) = macro impl
+}
+
+object Test extends App {
+ import Macros._
+ println(foo(42))
+}
\ No newline at end of file
diff --git a/test/pending/run/macro-quasiinvalidbody-b.check b/test/pending/run/macro-quasiinvalidbody-b.check
new file mode 100644
index 0000000..f70d7bb
--- /dev/null
+++ b/test/pending/run/macro-quasiinvalidbody-b.check
@@ -0,0 +1 @@
+42
\ No newline at end of file
diff --git a/test/pending/run/macro-quasiinvalidbody-b.flags b/test/pending/run/macro-quasiinvalidbody-b.flags
new file mode 100644
index 0000000..cd66464
--- /dev/null
+++ b/test/pending/run/macro-quasiinvalidbody-b.flags
@@ -0,0 +1 @@
+-language:experimental.macros
\ No newline at end of file
diff --git a/test/pending/run/macro-quasiinvalidbody-b/Impls_1.scala b/test/pending/run/macro-quasiinvalidbody-b/Impls_1.scala
new file mode 100644
index 0000000..246fc9f
--- /dev/null
+++ b/test/pending/run/macro-quasiinvalidbody-b/Impls_1.scala
@@ -0,0 +1,7 @@
+import scala.reflect.macros.{Context => Ctx}
+
+trait ImplContainer {
+ object Impls {
+ def foo(c: Ctx)(x: c.Expr[Any]) = x
+ }
+}
\ No newline at end of file
diff --git a/test/pending/run/macro-quasiinvalidbody-b/Macros_Test_2.scala b/test/pending/run/macro-quasiinvalidbody-b/Macros_Test_2.scala
new file mode 100644
index 0000000..da9445a
--- /dev/null
+++ b/test/pending/run/macro-quasiinvalidbody-b/Macros_Test_2.scala
@@ -0,0 +1,10 @@
+import scala.reflect.macros.{Context => Ctx}
+
+object Macros extends ImplContainer {
+ def foo(x: Any) = macro Impls.foo
+}
+
+object Test extends App {
+ import Macros._
+ println(foo(42))
+}
\ No newline at end of file
diff --git a/test/pending/run/macro-reify-array.flags b/test/pending/run/macro-reify-array.flags
new file mode 100644
index 0000000..cd66464
--- /dev/null
+++ b/test/pending/run/macro-reify-array.flags
@@ -0,0 +1 @@
+-language:experimental.macros
\ No newline at end of file
diff --git a/test/pending/run/macro-reify-array/Macros_1.scala b/test/pending/run/macro-reify-array/Macros_1.scala
new file mode 100644
index 0000000..f970be5
--- /dev/null
+++ b/test/pending/run/macro-reify-array/Macros_1.scala
@@ -0,0 +1,11 @@
+import scala.reflect.macros.{Context => Ctx}
+
+object Macros {
+ def foo[T](s: String) = macro Impls.foo[T]
+
+ object Impls {
+ def foo[T: c.WeakTypeTag](c: Ctx)(s: c.Expr[T]) = c.universe.reify {
+ Array(s.splice)
+ }
+ }
+}
\ No newline at end of file
diff --git a/test/pending/run/macro-reify-array/Test_2.scala b/test/pending/run/macro-reify-array/Test_2.scala
new file mode 100644
index 0000000..e40d5b4
--- /dev/null
+++ b/test/pending/run/macro-reify-array/Test_2.scala
@@ -0,0 +1,4 @@
+object Test extends App {
+ val arr = Macros.foo("hello", "world")
+ println(arr.getClass)
+}
\ No newline at end of file
diff --git a/test/pending/run/macro-reify-groundtypetag-hktypeparams-tags.check b/test/pending/run/macro-reify-groundtypetag-hktypeparams-tags.check
new file mode 100644
index 0000000..7e4b000
--- /dev/null
+++ b/test/pending/run/macro-reify-groundtypetag-hktypeparams-tags.check
@@ -0,0 +1,2 @@
+TypeTag(List[Int])
+TypeTag(List[List[Int]])
diff --git a/test/pending/run/macro-reify-groundtypetag-hktypeparams-tags/Test.scala b/test/pending/run/macro-reify-groundtypetag-hktypeparams-tags/Test.scala
new file mode 100644
index 0000000..3252423
--- /dev/null
+++ b/test/pending/run/macro-reify-groundtypetag-hktypeparams-tags/Test.scala
@@ -0,0 +1,9 @@
+import scala.reflect.runtime.universe._
+
+object Test extends App {
+ def fooTypeTagHK[C[_]: TypeTag, T: TypeTag] = {
+ println(implicitly[TypeTag[C[T]]])
+ println(implicitly[TypeTag[List[C[T]]]])
+ }
+ fooTypeTagHK[List, Int]
+}
\ No newline at end of file
diff --git a/test/pending/run/macro-reify-tagful-b.check b/test/pending/run/macro-reify-tagful-b.check
new file mode 100644
index 0000000..5bd9fe2
--- /dev/null
+++ b/test/pending/run/macro-reify-tagful-b.check
@@ -0,0 +1 @@
+List(List(hello world))
diff --git a/test/pending/run/macro-reify-tagful-b.flags b/test/pending/run/macro-reify-tagful-b.flags
new file mode 100644
index 0000000..cd66464
--- /dev/null
+++ b/test/pending/run/macro-reify-tagful-b.flags
@@ -0,0 +1 @@
+-language:experimental.macros
\ No newline at end of file
diff --git a/test/pending/run/macro-reify-tagful-b/Macros_1.scala b/test/pending/run/macro-reify-tagful-b/Macros_1.scala
new file mode 100644
index 0000000..59dbe71
--- /dev/null
+++ b/test/pending/run/macro-reify-tagful-b/Macros_1.scala
@@ -0,0 +1,11 @@
+import scala.reflect.macros.{Context => Ctx}
+
+object Macros {
+ def foo[T](s: T) = macro Impls.foo[List[T]]
+
+ object Impls {
+ def foo[T: c.WeakTypeTag](c: Ctx)(s: c.Expr[T]) = c.universe.reify {
+ List(s.splice)
+ }
+ }
+}
\ No newline at end of file
diff --git a/test/pending/run/macro-reify-tagful-b/Test_2.scala b/test/pending/run/macro-reify-tagful-b/Test_2.scala
new file mode 100644
index 0000000..1422349
--- /dev/null
+++ b/test/pending/run/macro-reify-tagful-b/Test_2.scala
@@ -0,0 +1,4 @@
+object Test extends App {
+ val list: List[List[String]] = Macros.foo(List("hello world"))
+ println(list)
+}
\ No newline at end of file
diff --git a/test/pending/run/macro-reify-tagless-b.check b/test/pending/run/macro-reify-tagless-b.check
new file mode 100644
index 0000000..61ebb4e
--- /dev/null
+++ b/test/pending/run/macro-reify-tagless-b.check
@@ -0,0 +1,3 @@
+error: macro must not return an expr that contains free type variables (namely: T). have you forgot to use c.TypeTag annotations for type parameters external to a reifee?
+
+java.lang.Error: reflective compilation has failed
diff --git a/test/pending/run/macro-reify-tagless-b.flags b/test/pending/run/macro-reify-tagless-b.flags
new file mode 100644
index 0000000..cd66464
--- /dev/null
+++ b/test/pending/run/macro-reify-tagless-b.flags
@@ -0,0 +1 @@
+-language:experimental.macros
\ No newline at end of file
diff --git a/test/pending/run/macro-reify-tagless-b/Impls_Macros_1.scala b/test/pending/run/macro-reify-tagless-b/Impls_Macros_1.scala
new file mode 100644
index 0000000..a581c47
--- /dev/null
+++ b/test/pending/run/macro-reify-tagless-b/Impls_Macros_1.scala
@@ -0,0 +1,11 @@
+import scala.reflect.macros.{Context => Ctx}
+
+object Macros {
+ def foo[T](s: T) = macro Impls.foo[List[T]]
+
+ object Impls {
+ def foo[T](c: Ctx)(s: c.Expr[T]) = c.universe.reify {
+ List(s.splice)
+ }
+ }
+}
\ No newline at end of file
diff --git a/test/pending/run/macro-reify-tagless-b/Test_2.scala b/test/pending/run/macro-reify-tagless-b/Test_2.scala
new file mode 100644
index 0000000..10487b1
--- /dev/null
+++ b/test/pending/run/macro-reify-tagless-b/Test_2.scala
@@ -0,0 +1,13 @@
+object Test extends App {
+ //val list: List[String] = Macros.foo("hello world")
+ //println(list)
+
+ import scala.reflect.runtime.universe._
+ import scala.reflect.runtime.{currentMirror => cm}
+ import scala.tools.reflect.ToolBox
+ val tpt = AppliedTypeTree(Ident(definitions.ListClass), List(Ident(definitions.StringClass)))
+ val rhs = Apply(Select(Ident(newTermName("Macros")), newTermName("foo")), List(Literal(Constant("hello world"))))
+ val list = ValDef(NoMods, newTermName("list"), tpt, rhs)
+ val tree = Block(list, Apply(Select(Ident(definitions.PredefModule), newTermName("println")), List(Ident(list.name))))
+ println(cm.mkToolBox().eval(tree))
+}
diff --git a/test/pending/run/macro-reify-typetag-hktypeparams-notags.check b/test/pending/run/macro-reify-typetag-hktypeparams-notags.check
new file mode 100644
index 0000000..53acc91
--- /dev/null
+++ b/test/pending/run/macro-reify-typetag-hktypeparams-notags.check
@@ -0,0 +1,2 @@
+TypeTag(C[T])
+TypeTag(List[C[T]])
diff --git a/test/pending/run/macro-reify-typetag-hktypeparams-notags/Test.scala b/test/pending/run/macro-reify-typetag-hktypeparams-notags/Test.scala
new file mode 100644
index 0000000..c7b1ced
--- /dev/null
+++ b/test/pending/run/macro-reify-typetag-hktypeparams-notags/Test.scala
@@ -0,0 +1,9 @@
+import scala.reflect.runtime.universe._
+
+object Test extends App {
+ def fooNoTypeTagHK[C[_], T] = {
+ println(implicitly[TypeTag[C[T]]])
+ println(implicitly[TypeTag[List[C[T]]]])
+ }
+ fooNoTypeTagHK[List, Int]
+}
\ No newline at end of file
diff --git a/test/pending/run/macro-reify-typetag-hktypeparams-tags.check b/test/pending/run/macro-reify-typetag-hktypeparams-tags.check
new file mode 100644
index 0000000..7e4b000
--- /dev/null
+++ b/test/pending/run/macro-reify-typetag-hktypeparams-tags.check
@@ -0,0 +1,2 @@
+TypeTag(List[Int])
+TypeTag(List[List[Int]])
diff --git a/test/pending/run/macro-reify-typetag-hktypeparams-tags/Test.scala b/test/pending/run/macro-reify-typetag-hktypeparams-tags/Test.scala
new file mode 100644
index 0000000..3252423
--- /dev/null
+++ b/test/pending/run/macro-reify-typetag-hktypeparams-tags/Test.scala
@@ -0,0 +1,9 @@
+import scala.reflect.runtime.universe._
+
+object Test extends App {
+ def fooTypeTagHK[C[_]: TypeTag, T: TypeTag] = {
+ println(implicitly[TypeTag[C[T]]])
+ println(implicitly[TypeTag[List[C[T]]]])
+ }
+ fooTypeTagHK[List, Int]
+}
\ No newline at end of file
diff --git a/test/pending/run/partial-anyref-spec.check b/test/pending/run/partial-anyref-spec.check
new file mode 100644
index 0000000..26e4193
--- /dev/null
+++ b/test/pending/run/partial-anyref-spec.check
@@ -0,0 +1,13 @@
+Fn$mcII$sp
+Fn$mcLI$sp
+Fn$mcLI$sp
+Fn$mcIL$sp
+Fn
+Fn
+Fn$mcIL$sp
+Fn
+Fn
+Fn3
+Fn3$mcLIDF$sp
+Fn3$mcBIDF$sp
+Fn3
diff --git a/test/pending/run/partial-anyref-spec.scala b/test/pending/run/partial-anyref-spec.scala
new file mode 100644
index 0000000..49ed514
--- /dev/null
+++ b/test/pending/run/partial-anyref-spec.scala
@@ -0,0 +1,31 @@
+class Fn[@specialized(Int, AnyRef) -T, @specialized(Int, AnyRef) +R] {
+ override def toString = getClass.getName
+}
+
+class Fn3[
+ @specialized(Int, AnyRef) -T1,
+ @specialized(Double, AnyRef) -T2,
+ @specialized(Float) -T3,
+ @specialized(Byte, AnyRef) +R
+] {
+ override def toString = getClass.getName
+}
+
+object Test {
+ def main(args: Array[String]): Unit = {
+ println(new Fn[Int, Int])
+ println(new Fn[Int, Byte])
+ println(new Fn[Int, AnyRef])
+ println(new Fn[Byte, Int])
+ println(new Fn[Byte, Byte])
+ println(new Fn[Byte, AnyRef])
+ println(new Fn[AnyRef, Int])
+ println(new Fn[AnyRef, Byte])
+ println(new Fn[AnyRef, AnyRef])
+
+ println(new Fn3[Int, Int, Int, Int])
+ println(new Fn3[Int, Double, Float, Int])
+ println(new Fn3[Int, Double, Float, Byte])
+ println(new Fn3[AnyRef, Double, AnyRef, Int])
+ }
+}
diff --git a/test/pending/run/reflection-mem-eval.scala b/test/pending/run/reflection-mem-eval.scala
new file mode 100644
index 0000000..9045c44
--- /dev/null
+++ b/test/pending/run/reflection-mem-eval.scala
@@ -0,0 +1,26 @@
+import scala.tools.partest.MemoryTest
+
+trait A { type T <: A }
+trait B { type T <: B }
+
+object Test extends MemoryTest {
+ lazy val tb = {
+ import scala.reflect.runtime.universe._
+ import scala.reflect.runtime.{currentMirror => cm}
+ import scala.tools.reflect.ToolBox
+ cm.mkToolBox()
+ }
+
+ override def maxDelta = 10
+ override def calcsPerIter = 3
+ override def calc() {
+ var snippet = """
+ trait A { type T <: A }
+ trait B { type T <: B }
+ def foo[T](x: List[T]) = x
+ foo(List(new A {}, new B {}))
+ """.trim
+ snippet = snippet + "\n" + (List.fill(50)(snippet.split("\n").last) mkString "\n")
+ tb.eval(tb.parse(snippet))
+ }
+}
\ No newline at end of file
diff --git a/test/pending/run/reify_addressbook.check b/test/pending/run/reify_addressbook.check
new file mode 100644
index 0000000..4e12f87
--- /dev/null
+++ b/test/pending/run/reify_addressbook.check
@@ -0,0 +1,30 @@
+<html>
+ <head>
+ <title>
+ My Address Book
+ </title>
+ <style type="text/css"> table { border-right: 1px solid #cccccc; }
+ th { background-color: #cccccc; }
+ td { border-left: 1px solid #acacac; }
+ td { border-bottom: 1px solid #acacac;
+ </style>
+ </head>
+ <body>
+ <table cellspacing="0" cellpadding="2">
+ <tr>
+ <th>Name</th>
+ <th>Age</th>
+ </tr>
+ <tr>
+ <td> Tom </td>
+ <td> 20 </td>
+ </tr><tr>
+ <td> Bob </td>
+ <td> 22 </td>
+ </tr><tr>
+ <td> James </td>
+ <td> 19 </td>
+ </tr>
+ </table>
+ </body>
+ </html>
diff --git a/test/pending/run/reify_addressbook.scala b/test/pending/run/reify_addressbook.scala
new file mode 100644
index 0000000..d53a0f7
--- /dev/null
+++ b/test/pending/run/reify_addressbook.scala
@@ -0,0 +1,65 @@
+import scala.reflect.runtime.universe._
+import scala.tools.reflect.Eval
+
+object Test extends App {
+ reify {
+ case class Person(name: String, age: Int)
+
+ /** An AddressBook takes a variable number of arguments
+ * which are accessed as a Sequence
+ */
+ class AddressBook(a: Person*) {
+ private val people: List[Person] = a.toList
+
+ /** Serialize to XHTML. Scala supports XML literals
+ * which may contain Scala expressions between braces,
+ * which are replaced by their evaluation
+ */
+ def toXHTML =
+ <table cellpadding="2" cellspacing="0">
+ <tr>
+ <th>Name</th>
+ <th>Age</th>
+ </tr>
+ { for (p <- people) yield
+ <tr>
+ <td> { p.name } </td>
+ <td> { p.age.toString() } </td>
+ </tr>
+ }
+ </table>;
+ }
+
+ /** We introduce CSS using raw strings (between triple
+ * quotes). Raw strings may contain newlines and special
+ * characters (like \) are not interpreted.
+ */
+ val header =
+ <head>
+ <title>
+ { "My Address Book" }
+ </title>
+ <style type="text/css"> {
+ """table { border-right: 1px solid #cccccc; }
+ th { background-color: #cccccc; }
+ td { border-left: 1px solid #acacac; }
+ td { border-bottom: 1px solid #acacac;"""}
+ </style>
+ </head>;
+
+ val people = new AddressBook(
+ Person("Tom", 20),
+ Person("Bob", 22),
+ Person("James", 19));
+
+ val page =
+ <html>
+ { header }
+ <body>
+ { people.toXHTML }
+ </body>
+ </html>;
+
+ println(page)
+ }.eval
+}
diff --git a/test/pending/run/reify_brainf_ck.check b/test/pending/run/reify_brainf_ck.check
new file mode 100644
index 0000000..702bb18
--- /dev/null
+++ b/test/pending/run/reify_brainf_ck.check
@@ -0,0 +1,4 @@
+---running---
+Hello World!
+
+---done---
diff --git a/test/pending/run/reify_brainf_ck.scala b/test/pending/run/reify_brainf_ck.scala
new file mode 100644
index 0000000..2af3bca
--- /dev/null
+++ b/test/pending/run/reify_brainf_ck.scala
@@ -0,0 +1,79 @@
+import scala.reflect.runtime.universe._
+import scala.tools.reflect.Eval
+
+object Test extends App {
+ reify {
+ import scala.annotation._
+
+ trait Func[T] {
+ val zero: T
+ def inc(t: T): T
+ def dec(t: T): T
+ def in: T
+ def out(t: T): Unit
+ }
+
+ object ByteFunc extends Func[Byte] {
+ override val zero: Byte = 0
+ override def inc(t: Byte) = ((t + 1) & 0xFF).toByte
+ override def dec(t: Byte) = ((t - 1) & 0xFF).toByte
+ override def in: Byte = readByte
+ override def out(t: Byte) { print(t.toChar) }
+ }
+
+ case class Tape[T](left: List[T], cell: T, right: List[T])(implicit func: Func[T]) {
+ private def headOf(list:List[T]) = if (list.isEmpty) func.zero else list.head
+ private def tailOf(list:List[T]) = if (list.isEmpty) Nil else list.tail
+ def isZero = cell == func.zero
+ def execute(ch: Char) = (ch: @switch) match {
+ case '+' => copy(cell = func.inc(cell))
+ case '-' => copy(cell = func.dec(cell))
+ case '<' => Tape(tailOf(left), headOf(left), cell :: right)
+ case '>' => Tape(cell :: left, headOf(right), tailOf(right))
+ case '.' => func.out(cell); this
+ case ',' => copy(cell = func.in)
+ case '[' | ']' => this
+ case _ => error("Unexpected token: " + ch)
+ }
+ }
+
+ object Tape {
+ def empty[T](func: Func[T]) = Tape(Nil, func.zero, Nil)(func)
+ }
+
+ class Brainfuck[T](func:Func[T]) {
+
+ def execute(p: String) {
+ val prog = p.replaceAll("[^\\+\\-\\[\\]\\.\\,\\>\\<]", "")
+
+ @tailrec def braceMatcher(pos: Int, stack: List[Int], o2c: Map[Int, Int]): Map[Int,Int] =
+ if(pos == prog.length) o2c else (prog(pos): @switch) match {
+ case '[' => braceMatcher(pos + 1, pos :: stack, o2c)
+ case ']' => braceMatcher(pos + 1, stack.tail, o2c + (stack.head -> pos))
+ case _ => braceMatcher(pos + 1, stack, o2c)
+ }
+
+ val open2close = braceMatcher(0, Nil, Map())
+ val close2open = open2close.map(_.swap)
+
+ @tailrec def ex(pos:Int, tape:Tape[T]): Unit =
+ if(pos < prog.length) ex((prog(pos): @switch) match {
+ case '[' if tape.isZero => open2close(pos)
+ case ']' if ! tape.isZero => close2open(pos)
+ case _ => pos + 1
+ }, tape.execute(prog(pos)))
+
+ println("---running---")
+ ex(0, Tape.empty(func))
+ println("\n---done---")
+ }
+ }
+
+ val bf = new Brainfuck(ByteFunc)
+ bf.execute(""">+++++++++[<++++++++>-]<.>+++++++[<++
+ ++>-]<+.+++++++..+++.[-]>++++++++[<++++>-]
+ <.#>+++++++++++[<+++++>-]<.>++++++++[<++
+ +>-]<.+++.------.--------.[-]>++++++++[<++++>
+ -]<+.[-]++++++++++.""")
+ }.eval
+}
diff --git a/test/pending/run/reify_callccinterpreter.check b/test/pending/run/reify_callccinterpreter.check
new file mode 100644
index 0000000..ef8fc12
--- /dev/null
+++ b/test/pending/run/reify_callccinterpreter.check
@@ -0,0 +1,3 @@
+42
+wrong
+5
diff --git a/test/pending/run/reify_callccinterpreter.scala b/test/pending/run/reify_callccinterpreter.scala
new file mode 100644
index 0000000..d9f7736
--- /dev/null
+++ b/test/pending/run/reify_callccinterpreter.scala
@@ -0,0 +1,88 @@
+import scala.reflect.runtime.universe._
+import scala.tools.reflect.Eval
+
+object Test extends App {
+ reify {
+ type Answer = Value;
+
+ /**
+ * A continuation monad.
+ */
+ case class M[A](in: (A => Answer) => Answer) {
+ def bind[B](k: A => M[B]) = M[B](c => in (a => k(a) in c))
+ def map[B](f: A => B): M[B] = bind(x => unitM(f(x)))
+ def flatMap[B](f: A => M[B]): M[B] = bind(f)
+ }
+
+ def unitM[A](a: A) = M[A](c => c(a))
+
+ def id[A] = (x: A) => x
+ def showM(m: M[Value]): String = (m in id).toString()
+
+ def callCC[A](h: (A => M[A]) => M[A]) =
+ M[A](c => h(a => M[A](d => c(a))) in c)
+
+ type Name = String
+
+ trait Term
+ case class Var(x: Name) extends Term
+ case class Con(n: Int) extends Term
+ case class Add(l: Term, r: Term) extends Term
+ case class Lam(x: Name, body: Term) extends Term
+ case class App(fun: Term, arg: Term) extends Term
+ case class Ccc(x: Name, t: Term) extends Term
+
+ trait Value
+ case object Wrong extends Value {
+ override def toString() = "wrong"
+ }
+ case class Num(n: Int) extends Value {
+ override def toString() = n.toString()
+ }
+ case class Fun(f: Value => M[Value]) extends Value {
+ override def toString() = "<function>"
+ }
+
+ type Environment = List[Pair[Name, Value]];
+
+ def lookup(x: Name, e: Environment): M[Value] = e match {
+ case List() => unitM(Wrong)
+ case Pair(y, b) :: e1 => if (x == y) unitM(b) else lookup(x, e1)
+ }
+
+ def add(a: Value, b: Value): M[Value] = Pair(a, b) match {
+ case Pair(Num(m), Num(n)) => unitM(Num(m + n))
+ case _ => unitM(Wrong)
+ }
+
+ def apply(a: Value, b: Value): M[Value] = a match {
+ case Fun(k) => k(b)
+ case _ => unitM(Wrong)
+ }
+
+ def interp(t: Term, e: Environment): M[Value] = t match {
+ case Var(x) => lookup(x, e)
+ case Con(n) => unitM(Num(n))
+ case Add(l, r) => for (a <- interp(l, e);
+ b <- interp(r, e);
+ c <- add(a, b))
+ yield c
+ case Lam(x, t) => unitM(Fun(a => interp(t, Pair(x, a) :: e)))
+ case App(f, t) => for (a <- interp(f, e);
+ b <- interp(t, e);
+ c <- apply(a, b))
+ yield c
+ case Ccc(x, t) => callCC(k => interp(t, Pair(x, Fun(k)) :: e))
+ }
+
+ def test(t: Term): String = showM(interp(t, List()))
+
+ val term0 = App(Lam("x", Add(Var("x"), Var("x"))), Add(Con(10), Con(11)))
+ val term1 = App(Con(1), Con(2))
+ val term2 = Add(Con(1), Ccc("k", Add(Con(2), App(Var("k"), Con(4)))))
+
+ println(test(term0))
+ println(test(term1))
+ println(test(term2))
+ }.eval
+}
diff --git a/test/pending/run/reify_closure2b.check b/test/pending/run/reify_closure2b.check
new file mode 100644
index 0000000..c1f3abd
--- /dev/null
+++ b/test/pending/run/reify_closure2b.check
@@ -0,0 +1,2 @@
+11
+12
diff --git a/test/pending/run/reify_closure2b.scala b/test/pending/run/reify_closure2b.scala
new file mode 100644
index 0000000..0f126c8
--- /dev/null
+++ b/test/pending/run/reify_closure2b.scala
@@ -0,0 +1,21 @@
+import scala.reflect.runtime.universe._
+import scala.reflect.runtime.{universe => ru}
+import scala.reflect.runtime.{currentMirror => cm}
+import scala.tools.reflect.ToolBox
+
+object Test extends App {
+ def foo(y: Int): Int => Int = {
+ class Foo(y: Int) {
+ val fun = reify{(x: Int) => {
+ x + y
+ }}
+ }
+
+ val toolbox = cm.mkToolBox()
+ val dyn = toolbox.eval(new Foo(y).fun.tree)
+ dyn.asInstanceOf[Int => Int]
+ }
+
+ println(foo(1)(10))
+ println(foo(2)(10))
+}
\ No newline at end of file
diff --git a/test/pending/run/reify_closure3b.check b/test/pending/run/reify_closure3b.check
new file mode 100644
index 0000000..c1f3abd
--- /dev/null
+++ b/test/pending/run/reify_closure3b.check
@@ -0,0 +1,2 @@
+11
+12
diff --git a/test/pending/run/reify_closure3b.scala b/test/pending/run/reify_closure3b.scala
new file mode 100644
index 0000000..54ac52b
--- /dev/null
+++ b/test/pending/run/reify_closure3b.scala
@@ -0,0 +1,23 @@
+import scala.reflect.runtime.universe._
+import scala.reflect.runtime.{universe => ru}
+import scala.reflect.runtime.{currentMirror => cm}
+import scala.tools.reflect.ToolBox
+
+object Test extends App {
+ def foo(y: Int): Int => Int = {
+ class Foo(y: Int) {
+ def y1 = y
+
+ val fun = reify{(x: Int) => {
+ x + y1
+ }}
+ }
+
+ val toolbox = cm.mkToolBox()
+ val dyn = toolbox.eval(new Foo(y).fun.tree)
+ dyn.asInstanceOf[Int => Int]
+ }
+
+ println(foo(1)(10))
+ println(foo(2)(10))
+}
\ No newline at end of file
diff --git a/test/pending/run/reify_closure4b.check b/test/pending/run/reify_closure4b.check
new file mode 100644
index 0000000..c1f3abd
--- /dev/null
+++ b/test/pending/run/reify_closure4b.check
@@ -0,0 +1,2 @@
+11
+12
diff --git a/test/pending/run/reify_closure4b.scala b/test/pending/run/reify_closure4b.scala
new file mode 100644
index 0000000..34f707e
--- /dev/null
+++ b/test/pending/run/reify_closure4b.scala
@@ -0,0 +1,23 @@
+import scala.reflect.runtime.universe._
+import scala.reflect.runtime.{universe => ru}
+import scala.reflect.runtime.{currentMirror => cm}
+import scala.tools.reflect.ToolBox
+
+object Test extends App {
+ def foo(y: Int): Int => Int = {
+ class Foo(y: Int) {
+ val y1 = y
+
+ val fun = reify{(x: Int) => {
+ x + y1
+ }}
+ }
+
+ val toolbox = cm.mkToolBox()
+ val dyn = toolbox.eval(new Foo(y).fun.tree)
+ dyn.asInstanceOf[Int => Int]
+ }
+
+ println(foo(1)(10))
+ println(foo(2)(10))
+}
\ No newline at end of file
diff --git a/test/pending/run/reify_closure5b.check b/test/pending/run/reify_closure5b.check
new file mode 100644
index 0000000..df9e19c
--- /dev/null
+++ b/test/pending/run/reify_closure5b.check
@@ -0,0 +1,2 @@
+13
+14
diff --git a/test/pending/run/reify_closure5b.scala b/test/pending/run/reify_closure5b.scala
new file mode 100644
index 0000000..0e506bf
--- /dev/null
+++ b/test/pending/run/reify_closure5b.scala
@@ -0,0 +1,21 @@
+import scala.reflect.runtime.universe._
+import scala.reflect.runtime.{universe => ru}
+import scala.reflect.runtime.{currentMirror => cm}
+import scala.tools.reflect.ToolBox
+
+object Test extends App {
+ def foo[T](ys: List[T]): Int => Int = {
+ class Foo[T](ys: List[T]) {
+ val fun = reify{(x: Int) => {
+ x + ys.length
+ }}
+ }
+
+ val toolbox = cm.mkToolBox()
+ val dyn = toolbox.eval(new Foo(ys).fun.tree)
+ dyn.asInstanceOf[Int => Int]
+ }
+
+ println(foo(List(1, 2, 3))(10))
+ println(foo(List(1, 2, 3, 4))(10))
+}
\ No newline at end of file
diff --git a/test/pending/run/reify_closure9a.check b/test/pending/run/reify_closure9a.check
new file mode 100644
index 0000000..9a03714
--- /dev/null
+++ b/test/pending/run/reify_closure9a.check
@@ -0,0 +1 @@
+10
\ No newline at end of file
diff --git a/test/pending/run/reify_closure9a.scala b/test/pending/run/reify_closure9a.scala
new file mode 100644
index 0000000..f39ff1e
--- /dev/null
+++ b/test/pending/run/reify_closure9a.scala
@@ -0,0 +1,18 @@
+import scala.reflect.runtime.universe._
+import scala.reflect.runtime.{universe => ru}
+import scala.reflect.runtime.{currentMirror => cm}
+import scala.tools.reflect.ToolBox
+
+object Test extends App {
+ def foo(y: Int) = {
+ class Foo(val y: Int) {
+ def fun = reify{y}
+ }
+
+ val toolbox = cm.mkToolBox()
+ val dyn = toolbox.eval(new Foo(y).fun.tree)
+ dyn.asInstanceOf[Int]
+ }
+
+ println(foo(10))
+}
\ No newline at end of file
diff --git a/test/pending/run/reify_closure9b.check b/test/pending/run/reify_closure9b.check
new file mode 100644
index 0000000..9a03714
--- /dev/null
+++ b/test/pending/run/reify_closure9b.check
@@ -0,0 +1 @@
+10
\ No newline at end of file
diff --git a/test/pending/run/reify_closure9b.scala b/test/pending/run/reify_closure9b.scala
new file mode 100644
index 0000000..a6920b4
--- /dev/null
+++ b/test/pending/run/reify_closure9b.scala
@@ -0,0 +1,18 @@
+import scala.reflect.runtime.universe._
+import scala.reflect.runtime.{universe => ru}
+import scala.reflect.runtime.{currentMirror => cm}
+import scala.tools.reflect.ToolBox
+
+object Test extends App {
+ def foo(y: Int) = {
+ class Foo(y: Int) {
+ def fun = reify{y}
+ }
+
+ val toolbox = cm.mkToolBox()
+ val dyn = toolbox.eval(new Foo(y).fun.tree)
+ dyn.asInstanceOf[Int]
+ }
+
+ println(foo(10))
+}
\ No newline at end of file
diff --git a/test/pending/run/reify_closures11.check b/test/pending/run/reify_closures11.check
new file mode 100644
index 0000000..d8263ee
--- /dev/null
+++ b/test/pending/run/reify_closures11.check
@@ -0,0 +1 @@
+2
\ No newline at end of file
diff --git a/test/pending/run/reify_closures11.scala b/test/pending/run/reify_closures11.scala
new file mode 100644
index 0000000..9156208
--- /dev/null
+++ b/test/pending/run/reify_closures11.scala
@@ -0,0 +1,16 @@
+import scala.reflect.runtime.universe._
+import scala.reflect.runtime.{universe => ru}
+import scala.reflect.runtime.{currentMirror => cm}
+import scala.tools.reflect.ToolBox
+
+object Test extends App {
+ def fun() = {
+ def z() = 2
+ reify{z}
+ }
+
+ val toolbox = cm.mkToolBox()
+ val dyn = toolbox.eval(fun().tree)
+ val foo = dyn.asInstanceOf[Int]
+ println(foo)
+}
\ No newline at end of file
diff --git a/test/files/run/syncchannel.check b/test/pending/run/reify_gadts.check
similarity index 100%
rename from test/files/run/syncchannel.check
rename to test/pending/run/reify_gadts.check
diff --git a/test/pending/run/reify_gadts.scala b/test/pending/run/reify_gadts.scala
new file mode 100644
index 0000000..582c080
--- /dev/null
+++ b/test/pending/run/reify_gadts.scala
@@ -0,0 +1,39 @@
+import scala.reflect.runtime.universe._
+import scala.tools.reflect.Eval
+
+object Test extends App {
+ reify {
+ /* The syntax tree of a toy language */
+ abstract class Term[T]
+
+ /* An integer literal */
+ case class Lit(x: Int) extends Term[Int]
+
+ /* Successor of a number */
+ case class Succ(t: Term[Int]) extends Term[Int]
+
+ /* Is 't' equal to zero? */
+ case class IsZero(t: Term[Int]) extends Term[Boolean]
+
+ /* An 'if' expression. */
+ case class If[T](c: Term[Boolean],
+ t1: Term[T],
+ t2: Term[T]) extends Term[T]
+
+ /** A type-safe eval function. The right-hand sides can
+ * make use of the fact that 'T' is a more precise type,
+ * constraint by the pattern type.
+ */
+ def eval[T](t: Term[T]): T = t match {
+ case Lit(n) => n
+
+ // the right hand side makes use of the fact
+ // that T = Int and so it can use '+'
+ case Succ(u) => eval(u) + 1
+ case IsZero(u) => eval(u) == 0
+ case If(c, u1, u2) => eval(if (eval(c)) u1 else u2)
+ }
+ println(
+ eval(If(IsZero(Lit(1)), Lit(41), Succ(Lit(41)))))
+ }.eval
+}
diff --git a/test/pending/run/reify_implicits-new.check b/test/pending/run/reify_implicits-new.check
new file mode 100644
index 0000000..e3aeb20
--- /dev/null
+++ b/test/pending/run/reify_implicits-new.check
@@ -0,0 +1 @@
+x = List(1, 2, 3, 4)
diff --git a/test/pending/run/reify_implicits-new.scala b/test/pending/run/reify_implicits-new.scala
new file mode 100644
index 0000000..42a1dee
--- /dev/null
+++ b/test/pending/run/reify_implicits-new.scala
@@ -0,0 +1,16 @@
+import scala.reflect.{ClassTag, classTag}
+import scala.reflect.runtime.universe._
+import scala.tools.reflect.Eval
+
+object Test extends App {
+ reify {
+ implicit def arrayWrapper[A : ClassTag](x: Array[A]) =
+ new {
+ def sort(p: (A, A) => Boolean) = {
+ util.Sorting.stableSort(x, p); x
+ }
+ }
+ val x = Array(2, 3, 1, 4)
+ println("x = "+ x.sort((x: Int, y: Int) => x < y).toList)
+ }.eval
+}
\ No newline at end of file
diff --git a/test/pending/run/reify_implicits-old.check b/test/pending/run/reify_implicits-old.check
new file mode 100644
index 0000000..e3aeb20
--- /dev/null
+++ b/test/pending/run/reify_implicits-old.check
@@ -0,0 +1 @@
+x = List(1, 2, 3, 4)
diff --git a/test/pending/run/reify_implicits-old.scala b/test/pending/run/reify_implicits-old.scala
new file mode 100644
index 0000000..8ff256d
--- /dev/null
+++ b/test/pending/run/reify_implicits-old.scala
@@ -0,0 +1,15 @@
+import scala.reflect.runtime.universe._
+import scala.tools.reflect.Eval
+
+object Test extends App {
+ reify {
+ implicit def arrayWrapper[A : ClassManifest](x: Array[A]) =
+ new {
+ def sort(p: (A, A) => Boolean) = {
+ util.Sorting.stableSort(x, p); x
+ }
+ }
+ val x = Array(2, 3, 1, 4)
+ println("x = "+ x.sort((x: Int, y: Int) => x < y).toList)
+ }.eval
+}
\ No newline at end of file
diff --git a/test/pending/run/reify_newimpl_07.scala b/test/pending/run/reify_newimpl_07.scala
new file mode 100644
index 0000000..b6886b8
--- /dev/null
+++ b/test/pending/run/reify_newimpl_07.scala
@@ -0,0 +1,14 @@
+import scala.reflect.runtime.universe._
+import scala.tools.reflect.Eval
+
+object Test extends App {
+ {
+ class C(val y: Int) {
+ val code = reify {
+ reify{y}.splice
+ }
+ }
+
+ println(new C(2).code.eval)
+ }
+}
\ No newline at end of file
diff --git a/test/pending/run/reify_newimpl_08.scala b/test/pending/run/reify_newimpl_08.scala
new file mode 100644
index 0000000..6caa33f
--- /dev/null
+++ b/test/pending/run/reify_newimpl_08.scala
@@ -0,0 +1,16 @@
+import scala.reflect.runtime.universe._
+import scala.tools.reflect.Eval
+
+object Test extends App {
+ val code = reify {
+ class C(val y: Int) {
+ val code = reify {
+ reify{y}.splice
+ }
+ }
+
+ new C(2).code.splice
+ }
+
+ println(code.eval)
+}
\ No newline at end of file
diff --git a/test/pending/run/reify_newimpl_09.scala b/test/pending/run/reify_newimpl_09.scala
new file mode 100644
index 0000000..27fbd37
--- /dev/null
+++ b/test/pending/run/reify_newimpl_09.scala
@@ -0,0 +1,13 @@
+import scala.reflect.runtime.universe._
+import scala.tools.reflect.ToolBox
+import scala.tools.reflect.Eval
+
+object Test extends App {
+ {
+ type T = Int
+ val code = reify {
+ List[T](2)
+ }
+ println(code.eval)
+ }
+}
\ No newline at end of file
diff --git a/test/pending/run/reify_newimpl_09a.scala b/test/pending/run/reify_newimpl_09a.scala
new file mode 100644
index 0000000..27fbd37
--- /dev/null
+++ b/test/pending/run/reify_newimpl_09a.scala
@@ -0,0 +1,13 @@
+import scala.reflect.runtime.universe._
+import scala.tools.reflect.ToolBox
+import scala.tools.reflect.Eval
+
+object Test extends App {
+ {
+ type T = Int
+ val code = reify {
+ List[T](2)
+ }
+ println(code.eval)
+ }
+}
\ No newline at end of file
diff --git a/test/pending/run/reify_newimpl_09b.scala b/test/pending/run/reify_newimpl_09b.scala
new file mode 100644
index 0000000..9e86dd5
--- /dev/null
+++ b/test/pending/run/reify_newimpl_09b.scala
@@ -0,0 +1,14 @@
+import scala.reflect.runtime.universe._
+import scala.tools.reflect.ToolBox
+import scala.tools.reflect.Eval
+
+object Test extends App {
+ {
+ type U = Int
+ type T = U
+ val code = reify {
+ List[T](2)
+ }
+ println(code.eval)
+ }
+}
\ No newline at end of file
diff --git a/test/pending/run/reify_newimpl_09c.scala b/test/pending/run/reify_newimpl_09c.scala
new file mode 100644
index 0000000..6bde363
--- /dev/null
+++ b/test/pending/run/reify_newimpl_09c.scala
@@ -0,0 +1,20 @@
+import scala.reflect.runtime.universe._
+import scala.tools.reflect.ToolBox
+import scala.tools.reflect.Eval
+
+object Test extends App {
+ {
+ def foo[W] = {
+ type U = W
+ type T = U
+ reify {
+ List[T](2)
+ }
+ }
+ val code = foo[Int]
+ println(code.tree.freeTypes)
+ val W = code.tree.freeTypes(2)
+ cm.mkToolBox().eval(code.tree, Map(W -> definitions.IntTpe))
+ println(code.eval)
+ }
+}
\ No newline at end of file
diff --git a/test/pending/run/reify_newimpl_10.scala b/test/pending/run/reify_newimpl_10.scala
new file mode 100644
index 0000000..791e529
--- /dev/null
+++ b/test/pending/run/reify_newimpl_10.scala
@@ -0,0 +1,14 @@
+import scala.reflect.runtime.universe._
+import scala.tools.reflect.ToolBox
+import scala.tools.reflect.Eval
+
+object Test extends App {
+ {
+ type T = Int
+ implicit val tt = implicitly[TypeTag[String]].asInstanceOf[TypeTag[T]] // this "mistake" is made for a reason!
+ val code = reify {
+ List[T](2)
+ }
+ println(code.eval)
+ }
+}
\ No newline at end of file
diff --git a/test/pending/run/reify_newimpl_16.scala b/test/pending/run/reify_newimpl_16.scala
new file mode 100644
index 0000000..a0cadf4
--- /dev/null
+++ b/test/pending/run/reify_newimpl_16.scala
@@ -0,0 +1,17 @@
+import scala.reflect.runtime.universe._
+import scala.tools.reflect.ToolBox
+import scala.tools.reflect.Eval
+
+object Test extends App {
+ {
+ class C {
+ type T = Int
+ val code = reify {
+ List[T](2)
+ }
+ println(code.eval)
+ }
+
+ new C
+ }
+}
\ No newline at end of file
diff --git a/test/pending/run/reify_newimpl_17.scala b/test/pending/run/reify_newimpl_17.scala
new file mode 100644
index 0000000..8fbcd52
--- /dev/null
+++ b/test/pending/run/reify_newimpl_17.scala
@@ -0,0 +1,20 @@
+import scala.reflect.runtime.universe._
+import scala.tools.reflect.ToolBox
+import scala.tools.reflect.Eval
+
+object Test extends App {
+ class C[U] {
+ type T = U
+ val code = reify {
+ List[T](2.asInstanceOf[T])
+ }
+ println(code.eval)
+ }
+
+ try {
+ new C[Int]
+ } catch {
+ case ex: Throwable =>
+ println(ex)
+ }
+}
\ No newline at end of file
diff --git a/test/pending/run/reify_newimpl_28.scala b/test/pending/run/reify_newimpl_28.scala
new file mode 100644
index 0000000..524a110
--- /dev/null
+++ b/test/pending/run/reify_newimpl_28.scala
@@ -0,0 +1,17 @@
+import scala.reflect.runtime.universe._
+import scala.tools.reflect.ToolBox
+import scala.tools.reflect.Eval
+
+object Test extends App {
+ {
+ object C {
+ type T = Int
+ val code = reify {
+ List[T](2)
+ }
+ println(code.eval)
+ }
+
+ C
+ }
+}
\ No newline at end of file
diff --git a/test/pending/run/reify_newimpl_32.scala b/test/pending/run/reify_newimpl_32.scala
new file mode 100644
index 0000000..095e59d
--- /dev/null
+++ b/test/pending/run/reify_newimpl_32.scala
@@ -0,0 +1,17 @@
+import scala.reflect.runtime.universe._
+import scala.tools.reflect.ToolBox
+import scala.tools.reflect.Eval
+
+object Test extends App {
+ {
+ object C {
+ type T = Int
+ val code = reify {
+ List[C.T](2)
+ }
+ println(code.eval)
+ }
+
+ C
+ }
+}
\ No newline at end of file
diff --git a/test/pending/run/reify_newimpl_34.scala b/test/pending/run/reify_newimpl_34.scala
new file mode 100644
index 0000000..a0a575e
--- /dev/null
+++ b/test/pending/run/reify_newimpl_34.scala
@@ -0,0 +1,18 @@
+import scala.reflect.runtime.universe._
+import scala.tools.reflect.ToolBox
+import scala.tools.reflect.Eval
+
+object Test extends App {
+ {
+ object C {
+ type T = Int
+ lazy val c = C
+ val code = reify {
+ List[c.T](2)
+ }
+ println(code.eval)
+ }
+
+ C
+ }
+}
\ No newline at end of file
diff --git a/test/pending/run/reify_newimpl_46.scala b/test/pending/run/reify_newimpl_46.scala
new file mode 100644
index 0000000..d063be0
--- /dev/null
+++ b/test/pending/run/reify_newimpl_46.scala
@@ -0,0 +1,15 @@
+import scala.reflect.runtime.universe._
+import scala.reflect.runtime.{universe => ru}
+import scala.reflect.runtime.{currentMirror => cm}
+import scala.tools.reflect.ToolBox
+
+object Test extends App {
+ class C[T[_] >: Null] {
+ val code = reify{val x: T[String] = null; println("ima worx"); x}.tree
+ println(code.freeTypes)
+ val T = code.freeTypes(0)
+ cm.mkToolBox().eval(code, Map(T -> definitions.ListClass.asType))
+ }
+
+ new C[List]
+}
\ No newline at end of file
diff --git a/test/pending/run/reify_newimpl_53.scala b/test/pending/run/reify_newimpl_53.scala
new file mode 100644
index 0000000..54fa4be
--- /dev/null
+++ b/test/pending/run/reify_newimpl_53.scala
@@ -0,0 +1,18 @@
+import scala.reflect.runtime.universe._
+import scala.reflect.runtime.{universe => ru}
+import scala.reflect.runtime.{currentMirror => cm}
+import scala.tools.reflect.ToolBox
+
+object Test extends App {
+ class C[T >: Null] {
+ val code = reify{
+ val tt = implicitly[TypeTag[T]]
+ println("mah typetag is: %s".format(tt))
+ }.tree
+ println(code.freeTypes)
+ val T = code.freeTypes(0)
+ cm.mkToolBox().eval(code, Map(T -> definitions.StringClass.asType))
+ }
+
+ new C[String]
+}
\ No newline at end of file
diff --git a/test/pending/run/reify_simpleinterpreter.check b/test/pending/run/reify_simpleinterpreter.check
new file mode 100644
index 0000000..4344dc9
--- /dev/null
+++ b/test/pending/run/reify_simpleinterpreter.check
@@ -0,0 +1,2 @@
+42
+wrong
diff --git a/test/pending/run/reify_simpleinterpreter.scala b/test/pending/run/reify_simpleinterpreter.scala
new file mode 100644
index 0000000..6cf87ea
--- /dev/null
+++ b/test/pending/run/reify_simpleinterpreter.scala
@@ -0,0 +1,75 @@
+import scala.reflect.runtime.universe._
+
+object Test extends App {
+ reify {
+ case class M[A](value: A) {
+ def bind[B](k: A => M[B]): M[B] = k(value)
+ def map[B](f: A => B): M[B] = bind(x => unitM(f(x)))
+ def flatMap[B](f: A => M[B]): M[B] = bind(f)
+ }
+
+ def unitM[A](a: A): M[A] = M(a)
+
+ def showM(m: M[Value]): String = m.value.toString();
+
+ type Name = String
+
+ trait Term;
+ case class Var(x: Name) extends Term
+ case class Con(n: Int) extends Term
+ case class Add(l: Term, r: Term) extends Term
+ case class Lam(x: Name, body: Term) extends Term
+ case class App(fun: Term, arg: Term) extends Term
+
+ trait Value
+ case object Wrong extends Value {
+ override def toString() = "wrong"
+ }
+ case class Num(n: Int) extends Value {
+ override def toString() = n.toString()
+ }
+ case class Fun(f: Value => M[Value]) extends Value {
+ override def toString() = "<function>"
+ }
+
+ type Environment = List[Pair[Name, Value]]
+
+ def lookup(x: Name, e: Environment): M[Value] = e match {
+ case List() => unitM(Wrong)
+ case Pair(y, b) :: e1 => if (x == y) unitM(b) else lookup(x, e1)
+ }
+
+ def add(a: Value, b: Value): M[Value] = Pair(a, b) match {
+ case Pair(Num(m), Num(n)) => unitM(Num(m + n))
+ case _ => unitM(Wrong)
+ }
+
+ def apply(a: Value, b: Value): M[Value] = a match {
+ case Fun(k) => k(b)
+ case _ => unitM(Wrong)
+ }
+
+ def interp(t: Term, e: Environment): M[Value] = t match {
+ case Var(x) => lookup(x, e)
+ case Con(n) => unitM(Num(n))
+ case Add(l, r) => for (a <- interp(l, e);
+ b <- interp(r, e);
+ c <- add(a, b))
+ yield c
+ case Lam(x, t) => unitM(Fun(a => interp(t, Pair(x, a) :: e)))
+ case App(f, t) => for (a <- interp(f, e);
+ b <- interp(t, e);
+ c <- apply(a, b))
+ yield c
+ }
+
+ def test(t: Term): String =
+ showM(interp(t, List()))
+
+ val term0 = App(Lam("x", Add(Var("x"), Var("x"))), Add(Con(10), Con(11)))
+ val term1 = App(Con(1), Con(2))
+
+ println(test(term0))
+ println(test(term1))
+ }.eval
+}
diff --git a/test/pending/run/structural-types-vs-anon-classes.scala b/test/pending/run/structural-types-vs-anon-classes.scala
new file mode 100644
index 0000000..23410e3
--- /dev/null
+++ b/test/pending/run/structural-types-vs-anon-classes.scala
@@ -0,0 +1,17 @@
+object Test {
+ class Arm
+ class Leg
+ class Tail
+ class Monkey(arms: List[Arm], legs :List[Leg], tail: Tail)
+
+ def makeAwesomeMonkey(arms: List[Arm], legs: List[Leg], tail: Tail) = {
+ object m extends Monkey(arms, legs, tail) {
+ def beAwesome () = "I can fly! I can fly!"
+ }
+ m
+ }
+
+ def main(args: Array[String]): Unit = {
+ println(makeAwesomeMonkey(Nil, Nil, new Tail) beAwesome)
+ }
+}
diff --git a/test/pending/run/subarray.check b/test/pending/run/subarray.check
deleted file mode 100644
index 814f4a4..0000000
--- a/test/pending/run/subarray.check
+++ /dev/null
@@ -1,2 +0,0 @@
-one
-two
diff --git a/test/pending/run/t0446.check b/test/pending/run/t0446.check
deleted file mode 100644
index 7c41a48..0000000
--- a/test/pending/run/t0446.check
+++ /dev/null
@@ -1,2 +0,0 @@
-List(1)
-List(3)
diff --git a/test/pending/run/bug2318.check b/test/pending/run/t2318.check
similarity index 100%
rename from test/pending/run/bug2318.check
rename to test/pending/run/t2318.check
diff --git a/test/pending/run/t2318.scala b/test/pending/run/t2318.scala
new file mode 100644
index 0000000..e42cbb9
--- /dev/null
+++ b/test/pending/run/t2318.scala
@@ -0,0 +1,38 @@
+import java.security._
+
+object Test {
+ trait Bar { def bar: Unit }
+
+ object Mgr extends SecurityManager {
+ override def checkPermission(perm: Permission) = perm match {
+ case _: java.lang.RuntimePermission => ()
+ case _: java.io.FilePermission => ()
+ case x: java.security.AccessControlException if x.getName contains ".networkaddress." => () // generality ftw
+ case _ => super.checkPermission(perm)
+ }
+ }
+
+ def t1() = {
+ val p = Runtime.getRuntime().exec("ls");
+ type Destroyable = { def destroy() : Unit }
+ def doDestroy( obj : Destroyable ) : Unit = obj.destroy();
+ doDestroy( p );
+ }
+ def t2() = {
+ System.setSecurityManager(Mgr)
+
+ val b = new Bar { def bar = println("bar") }
+ b.bar
+
+ val structural = b.asInstanceOf[{ def bar: Unit }]
+ structural.bar
+ }
+
+ def main(args: Array[String]) {
+ // figuring this will otherwise break on windows
+ try t1()
+ catch { case _: java.io.IOException => () }
+
+ t2()
+ }
+}
diff --git a/test/pending/run/bug2364.check b/test/pending/run/t2364.check
similarity index 100%
rename from test/pending/run/bug2364.check
rename to test/pending/run/t2364.check
diff --git a/test/pending/run/bug2364.scala b/test/pending/run/t2364.scala
similarity index 100%
rename from test/pending/run/bug2364.scala
rename to test/pending/run/t2364.scala
diff --git a/test/pending/run/t2897.scala b/test/pending/run/t2897.scala
new file mode 100644
index 0000000..40fd3c2
--- /dev/null
+++ b/test/pending/run/t2897.scala
@@ -0,0 +1,22 @@
+class A {
+ def f1(t: String) = {
+ trait T {
+ def xs = Nil map (_ => t)
+ }
+ }
+ def f2(t: String) = {
+ def xs = Nil map (_ => t)
+ }
+ def f3(t: String) = {
+ var t1 = 5
+ trait T {
+ def xs = { t1 = 10 ; t }
+ }
+ }
+ def f4() = {
+ var u = 5
+ trait T {
+ def xs = Nil map (_ => u = 10)
+ }
+ }
+}
diff --git a/test/pending/run/t3609.scala b/test/pending/run/t3609.scala
old mode 100644
new mode 100755
diff --git a/test/pending/run/t3669.scala b/test/pending/run/t3669.scala
new file mode 100644
index 0000000..c60ba98
--- /dev/null
+++ b/test/pending/run/t3669.scala
@@ -0,0 +1,22 @@
+trait MyTrait[T <: { var id: U }, U] {
+ def test(t: T): T = {
+ val v: U = t.id
+ t.id = v
+ t
+ }
+}
+
+class C (var id: String){
+ // uncommenting this fixes it
+ // def id_=(x: AnyRef) { id = x.asInstanceOf[String] }
+}
+
+class Test extends MyTrait[C, String]
+
+object Test {
+ def main(args: Array[String]): Unit = {
+ val t = new Test()
+ val c1 = new C("a")
+ val c2 = t.test(c1)
+ }
+}
diff --git a/test/pending/run/t3832.scala b/test/pending/run/t3832.scala
new file mode 100644
index 0000000..f081d5b
--- /dev/null
+++ b/test/pending/run/t3832.scala
@@ -0,0 +1,7 @@
+class Test {
+ def this(un: Int) = {
+ this()
+ def test(xs: List[Int]) = xs map (x => x)
+ ()
+ }
+}
\ No newline at end of file
diff --git a/test/files/run/t3897.check b/test/pending/run/t3897.check
similarity index 100%
rename from test/files/run/t3897.check
rename to test/pending/run/t3897.check
diff --git a/test/files/run/t3897/J_2.java b/test/pending/run/t3897/J_2.java
similarity index 100%
rename from test/files/run/t3897/J_2.java
rename to test/pending/run/t3897/J_2.java
diff --git a/test/files/run/t3897/a_1.scala b/test/pending/run/t3897/a_1.scala
similarity index 100%
rename from test/files/run/t3897/a_1.scala
rename to test/pending/run/t3897/a_1.scala
diff --git a/test/files/run/t3897/a_2.scala b/test/pending/run/t3897/a_2.scala
similarity index 100%
rename from test/files/run/t3897/a_2.scala
rename to test/pending/run/t3897/a_2.scala
diff --git a/test/pending/run/t3899.check b/test/pending/run/t3899.check
new file mode 100644
index 0000000..c317608
--- /dev/null
+++ b/test/pending/run/t3899.check
@@ -0,0 +1,4 @@
+a,b
+a,b
+a,b
+a,b
diff --git a/test/pending/run/t3899/Base_1.java b/test/pending/run/t3899/Base_1.java
new file mode 100644
index 0000000..114cc0b
--- /dev/null
+++ b/test/pending/run/t3899/Base_1.java
@@ -0,0 +1,5 @@
+public class Base_1 {
+ public String[] varargs1(String... as) {
+ return as;
+ }
+}
diff --git a/test/pending/run/t3899/Derived_2.scala b/test/pending/run/t3899/Derived_2.scala
new file mode 100644
index 0000000..bb4e537
--- /dev/null
+++ b/test/pending/run/t3899/Derived_2.scala
@@ -0,0 +1,30 @@
+trait T extends Base_1 {
+ def t1(as: String*): Array[String] = {
+ varargs1(as: _*)
+ }
+ def t2(as: String*): Array[String] = {
+ // This is the bug reported in the ticket.
+ super.varargs1(as: _*)
+ }
+}
+
+class C extends Base_1 {
+ def c1(as: String*): Array[String] = {
+ varargs1(as: _*)
+ }
+ def c2(as: String*): Array[String] = {
+ super.varargs1(as: _*)
+ }
+}
+
+
+object Test extends App {
+ val t = new T {}
+ println(t.t1("a", "b").mkString(","))
+ println(t.t2("a", "b").mkString(","))
+
+ val c = new C {}
+ println(c.c1("a", "b").mkString(","))
+ println(c.c2("a", "b").mkString(","))
+
+}
diff --git a/test/pending/run/t4098.scala b/test/pending/run/t4098.scala
new file mode 100644
index 0000000..b74ccf9
--- /dev/null
+++ b/test/pending/run/t4098.scala
@@ -0,0 +1,9 @@
+class A(a: Any) {
+ def this() = { this(b) ; def b = new {} }
+}
+
+object Test {
+ def main(args: Array[String]): Unit = {
+ new A ("")
+ }
+}
diff --git a/test/pending/run/t4283/IllegalAccess.scala b/test/pending/run/t4283/IllegalAccess.scala
deleted file mode 100644
index 33039c9..0000000
--- a/test/pending/run/t4283/IllegalAccess.scala
+++ /dev/null
@@ -1,17 +0,0 @@
-package other
-
-object IllegalAccess {
- def main(args: Array[String]) {
- val x = (new test.ScalaBipp).make.get.asInstanceOf[test.ScalaBipp].f()
- println(x)
- val y = (new test.ScalaBipp).make.get.f()
- println(y)
- val u = (new test.ScalaBipp).make.get.asInstanceOf[test.ScalaBipp].t
- println(u)
- val v = (new test.ScalaBipp).make.get.t
- println(v)
- val sb: test.ScalaBipp = (new test.ScalaBipp).make.get
- val z = sb.t
- println(z)
- }
-}
diff --git a/test/pending/run/bug4291.check b/test/pending/run/t4291.check
similarity index 100%
rename from test/pending/run/bug4291.check
rename to test/pending/run/t4291.check
diff --git a/test/pending/run/bug4291.scala b/test/pending/run/t4291.scala
similarity index 100%
rename from test/pending/run/bug4291.scala
rename to test/pending/run/t4291.scala
diff --git a/test/pending/run/t4460.scala b/test/pending/run/t4460.scala
new file mode 100644
index 0000000..324e2f5
--- /dev/null
+++ b/test/pending/run/t4460.scala
@@ -0,0 +1,12 @@
+trait A
+
+class B(val x: Int) {
+ self: A =>
+
+ def this() = this()
+}
+
+object Test extends B(2) with A {
+ def main(args: Array[String]) { }
+}
+
diff --git a/test/pending/run/t4511.scala b/test/pending/run/t4511.scala
new file mode 100644
index 0000000..58d4e0c
--- /dev/null
+++ b/test/pending/run/t4511.scala
@@ -0,0 +1,10 @@
+class Interval[@specialized T](val high: T)
+class Node[@specialized T](val interval: Interval[T]) {
+ val x1 = Some(interval.high)
+}
+
+object Test {
+ def main(args: Array[String]): Unit = {
+ new Node(new Interval(5)).x1
+ }
+}
\ No newline at end of file
diff --git a/test/pending/run/t4511b.scala b/test/pending/run/t4511b.scala
new file mode 100644
index 0000000..3337fb3
--- /dev/null
+++ b/test/pending/run/t4511b.scala
@@ -0,0 +1,25 @@
+import scala.{specialized => spec}
+
+class Interval[@spec(Int) T](high:T)
+
+class X1[@spec(Int) T](interval:Interval[T]) { val x = interval }
+class Y1[@spec(Int) T](interval:Interval[T]) { val y = Some(interval) }
+
+class X2[T](val interval:Interval[T]) { val x = interval }
+class Y2[T](val interval:Interval[T]) { val y = Some(interval) }
+
+class X3[@spec(Int) T](val interval:Interval[T]) { val x = interval }
+class Y3[@spec(Int) T](val interval:Interval[T]) { val y = Some(interval) }
+
+object Test {
+ def tryit(o: => Any) = println(try { "ok: " + o.getClass.getName } catch { case e => "FAIL: " + e + "\n" + e.getStackTrace.mkString("\n ") })
+
+ def main(args: Array[String]) {
+ tryit(new X1(new Interval(3)))
+ tryit(new X2(new Interval(3)))
+ tryit(new X3(new Interval(3)))
+ tryit(new Y1(new Interval(3)))
+ tryit(new Y2(new Interval(3)))
+ tryit(new Y3(new Interval(3)))
+ }
+}
diff --git a/test/pending/run/t4574.scala b/test/pending/run/t4574.scala
new file mode 100644
index 0000000..1dde496
--- /dev/null
+++ b/test/pending/run/t4574.scala
@@ -0,0 +1,13 @@
+object Test {
+ val xs: List[(Int, Int)] = List((2, 2), null)
+
+ def expectMatchError[T](msg: String)(body: => T) {
+ try { body ; assert(false, "Should not succeed.") }
+ catch { case _: MatchError => println(msg) }
+ }
+
+ def main(args: Array[String]): Unit = {
+ expectMatchError("I hereby refute null!")( for ((x, y) <- xs) yield x )
+ expectMatchError("I denounce null as unListLike!")( (null: Any) match { case List(_*) => true } )
+ }
+}
diff --git a/test/pending/run/t4713/JavaAnnots.java b/test/pending/run/t4713/JavaAnnots.java
new file mode 100644
index 0000000..29541b1
--- /dev/null
+++ b/test/pending/run/t4713/JavaAnnots.java
@@ -0,0 +1,14 @@
+import java.lang.annotation.ElementType;
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+import java.lang.annotation.Target;
+import java.util.List;
+
+public abstract class JavaAnnots {
+ @Retention(RetentionPolicy.RUNTIME)
+ @Target(ElementType.FIELD)
+ public @interface Book {
+ }
+
+ public static final List<String> Book = null;
+}
\ No newline at end of file
diff --git a/test/pending/run/t4713/Problem.scala b/test/pending/run/t4713/Problem.scala
new file mode 100644
index 0000000..e87f657
--- /dev/null
+++ b/test/pending/run/t4713/Problem.scala
@@ -0,0 +1,5 @@
+object Problem {
+ def d() {
+ val v: java.util.List[String] = JavaAnnots.Book
+ }
+}
diff --git a/test/pending/run/t4728.check b/test/pending/run/t4728.check
new file mode 100644
index 0000000..7a754f4
--- /dev/null
+++ b/test/pending/run/t4728.check
@@ -0,0 +1,2 @@
+1
+2
\ No newline at end of file
diff --git a/test/pending/run/t4728.scala b/test/pending/run/t4728.scala
new file mode 100644
index 0000000..36f7860
--- /dev/null
+++ b/test/pending/run/t4728.scala
@@ -0,0 +1,11 @@
+class X
+class Y extends X
+object Ambiguous {
+ def f(x: X) = 1
+ def f(ys: Y*) = 2
+}
+
+object Test extends App {
+ println(Ambiguous.f(new X))
+ println(Ambiguous.f(new Y))
+}
\ No newline at end of file
diff --git a/test/pending/run/t4971.scala b/test/pending/run/t4971.scala
new file mode 100644
index 0000000..c9b6d6f
--- /dev/null
+++ b/test/pending/run/t4971.scala
@@ -0,0 +1,16 @@
+trait A[@specialized(Int) K, @specialized(Double) V] {
+ def doStuff(k: K, v: V): Unit = sys.error("I am overridden, you cannot call me")
+}
+
+trait B[@specialized(Double) V] extends A[Int, V] {
+ override def doStuff(k: Int, v: V): Unit = println("Hi - I'm calling doStuff in B")
+}
+
+object Test {
+ def main(args: Array[String]): Unit = delegate(new B[Double]() {}, 1, 0.1)
+
+ def delegate[@specialized(Int) K, @specialized(Double) V](a: A[K, V], k: K, v: V) {
+ a.doStuff(k, v)
+ }
+}
+
diff --git a/test/pending/run/t4996.scala b/test/pending/run/t4996.scala
new file mode 100644
index 0000000..58a8fe1
--- /dev/null
+++ b/test/pending/run/t4996.scala
@@ -0,0 +1,15 @@
+object SpecializationAbstractOverride {
+
+ trait A[@specialized(Int) T] { def foo(t: T) }
+ trait B extends A[Int] { def foo(t: Int) { println("B.foo") } }
+ trait M extends B { abstract override def foo(t: Int) { super.foo(t) ; println ("M.foo") } }
+ object C extends B with M
+
+ object D extends B { override def foo(t: Int) { super.foo(t); println("M.foo") } }
+
+ def main(args: Array[String]) {
+ D.foo(42) // OK, prints B.foo M.foo
+ C.foo(42) // StackOverflowError
+ }
+}
+
diff --git a/test/pending/run/t5258b.check b/test/pending/run/t5258b.check
new file mode 100644
index 0000000..283b422
--- /dev/null
+++ b/test/pending/run/t5258b.check
@@ -0,0 +1 @@
+TBI
\ No newline at end of file
diff --git a/test/pending/run/t5258b.scala b/test/pending/run/t5258b.scala
new file mode 100644
index 0000000..a280513
--- /dev/null
+++ b/test/pending/run/t5258b.scala
@@ -0,0 +1,9 @@
+import scala.reflect.runtime.universe._
+import scala.tools.reflect.Eval
+
+object Test extends App {
+ reify {
+ class C
+ println(classOf[C])
+ }.eval
+}
\ No newline at end of file
diff --git a/test/pending/run/t5258c.check b/test/pending/run/t5258c.check
new file mode 100644
index 0000000..283b422
--- /dev/null
+++ b/test/pending/run/t5258c.check
@@ -0,0 +1 @@
+TBI
\ No newline at end of file
diff --git a/test/pending/run/t5258c.scala b/test/pending/run/t5258c.scala
new file mode 100644
index 0000000..4a65669
--- /dev/null
+++ b/test/pending/run/t5258c.scala
@@ -0,0 +1,9 @@
+import scala.reflect.runtime.universe._
+import scala.tools.reflect.Eval
+
+object Test extends App {
+ reify {
+ object E extends Enumeration { val foo, bar = Value }
+ println(E.foo)
+ }.eval
+}
\ No newline at end of file
diff --git a/test/pending/run/t5284.scala b/test/pending/run/t5284.scala
new file mode 100644
index 0000000..b43afed
--- /dev/null
+++ b/test/pending/run/t5284.scala
@@ -0,0 +1,14 @@
+object Test {
+ def main(args:Array[String]) {
+ val a = Blarg(Array(1,2,3))
+ println(a.m((x:Int) => x+1))
+ }
+}
+
+object Blarg {
+ def apply[T:Manifest](a:Array[T]) = new Blarg(a)
+}
+class Blarg [@specialized T:Manifest](val a:Array[T]) {
+ def m[@specialized W>:T, at specialized S](f:W=>S) = f(a(0))
+}
+
diff --git a/test/pending/run/t5293-map.scala b/test/pending/run/t5293-map.scala
new file mode 100644
index 0000000..2707aed
--- /dev/null
+++ b/test/pending/run/t5293-map.scala
@@ -0,0 +1,88 @@
+
+
+
+import scala.collection.JavaConverters._
+
+
+
+object Test extends App {
+
+ def bench(label: String)(body: => Unit): Long = {
+ val start = System.nanoTime
+
+ 0.until(10).foreach(_ => body)
+
+ val end = System.nanoTime
+
+ //println("%s: %s ms".format(label, (end - start) / 1000.0 / 1000.0))
+
+ end - start
+ }
+
+ def benchJava(values: java.util.Map[Int, Int]) = {
+ bench("Java Map") {
+ val m = new java.util.HashMap[Int, Int]
+
+ m.putAll(values)
+ }
+ }
+
+ def benchScala(values: Iterable[(Int, Int)]) = {
+ bench("Scala Map") {
+ val m = new scala.collection.mutable.HashMap[Int, Int]
+
+ m ++= values
+ }
+ }
+
+ def benchScalaSorted(values: Iterable[(Int, Int)]) = {
+ bench("Scala Map sorted") {
+ val m = new scala.collection.mutable.HashMap[Int, Int]
+
+ m ++= values.toArray.sorted
+ }
+ }
+
+ def benchScalaPar(values: Iterable[(Int, Int)]) = {
+ bench("Scala ParMap") {
+ val m = new scala.collection.parallel.mutable.ParHashMap[Int, Int] map { x => x }
+
+ m ++= values
+ }
+ }
+
+ val total = 50000
+ val values = (0 until total) zip (0 until total)
+ val map = scala.collection.mutable.HashMap.empty[Int, Int]
+
+ map ++= values
+
+ // warmup
+ for (x <- 0 until 5) {
+ benchJava(map.asJava)
+ benchScala(map)
+ benchScalaPar(map)
+ benchJava(map.asJava)
+ benchScala(map)
+ benchScalaPar(map)
+ }
+
+ val javamap = benchJava(map.asJava)
+ val scalamap = benchScala(map)
+ val scalaparmap = benchScalaPar(map)
+
+ // println(javamap)
+ // println(scalamap)
+ // println(scalaparmap)
+
+ assert(scalamap < (javamap * 10), "scalamap: " + scalamap + " vs. javamap: " + javamap)
+ assert(scalaparmap < (javamap * 10), "scalaparmap: " + scalaparmap + " vs. javamap: " + javamap)
+}
+
+
+
+
+
+
+
+
diff --git a/test/pending/run/t5293.scala b/test/pending/run/t5293.scala
new file mode 100644
index 0000000..01ead45
--- /dev/null
+++ b/test/pending/run/t5293.scala
@@ -0,0 +1,83 @@
+
+
+
+import scala.collection.JavaConverters._
+
+
+
+object Test extends App {
+
+ def bench(label: String)(body: => Unit): Long = {
+ val start = System.nanoTime
+
+ 0.until(10).foreach(_ => body)
+
+ val end = System.nanoTime
+
+ //println("%s: %s ms".format(label, (end - start) / 1000.0 / 1000.0))
+
+ end - start
+ }
+
+ def benchJava(values: java.util.Collection[Int]) = {
+ bench("Java Set") {
+ val set = new java.util.HashSet[Int]
+
+ set.addAll(values)
+ }
+ }
+
+ def benchScala(values: Iterable[Int]) = {
+ bench("Scala Set") {
+ val set = new scala.collection.mutable.HashSet[Int]
+
+ set ++= values
+ }
+ }
+
+ def benchScalaSorted(values: Iterable[Int]) = {
+ bench("Scala Set sorted") {
+ val set = new scala.collection.mutable.HashSet[Int]
+
+ set ++= values.toArray.sorted
+ }
+ }
+
+ def benchScalaPar(values: Iterable[Int]) = {
+ bench("Scala ParSet") {
+ val set = new scala.collection.parallel.mutable.ParHashSet[Int] map { x => x }
+
+ set ++= values
+ }
+ }
+
+ val values = 0 until 50000
+ val set = scala.collection.mutable.HashSet.empty[Int]
+
+ set ++= values
+
+ // warmup
+ for (x <- 0 until 5) {
+ benchJava(set.asJava)
+ benchScala(set)
+ benchScalaPar(set)
+ benchJava(set.asJava)
+ benchScala(set)
+ benchScalaPar(set)
+ }
+
+ val javaset = benchJava(set.asJava)
+ val scalaset = benchScala(set)
+ val scalaparset = benchScalaPar(set)
+
+ assert(scalaset < (javaset * 8), "scalaset: " + scalaset + " vs. javaset: " + javaset)
+ assert(scalaparset < (javaset * 8), "scalaparset: " + scalaparset + " vs. javaset: " + javaset)
+}
+
+
+
+
+
+
+
+
diff --git a/test/pending/run/t5334_1.scala b/test/pending/run/t5334_1.scala
new file mode 100644
index 0000000..b75badb
--- /dev/null
+++ b/test/pending/run/t5334_1.scala
@@ -0,0 +1,9 @@
+import scala.reflect.runtime.universe._
+import scala.tools.reflect.Eval
+
+object Test extends App {
+ reify {
+ class C { override def toString = "C" }
+ new C
+ }.eval
+}
\ No newline at end of file
diff --git a/test/pending/run/t5334_2.scala b/test/pending/run/t5334_2.scala
new file mode 100644
index 0000000..e082e3b
--- /dev/null
+++ b/test/pending/run/t5334_2.scala
@@ -0,0 +1,9 @@
+import scala.reflect.runtime.universe._
+import scala.tools.reflect.Eval
+
+object Test extends App {
+ reify {
+ class C { override def toString() = "C" }
+ List((new C, new C))
+ }.eval
+}
\ No newline at end of file
diff --git a/test/files/jvm/bug680.check b/test/pending/run/t5418.check
similarity index 100%
copy from test/files/jvm/bug680.check
copy to test/pending/run/t5418.check
diff --git a/test/pending/run/t5418.scala b/test/pending/run/t5418.scala
new file mode 100644
index 0000000..e3cb20c
--- /dev/null
+++ b/test/pending/run/t5418.scala
@@ -0,0 +1,8 @@
+import scala.reflect.runtime.universe._
+import scala.tools.reflect.Eval
+
+object Test extends App {
+ reify {
+ new Object().getClass
+ }.eval
+}
\ No newline at end of file
diff --git a/test/pending/run/t5427a.check b/test/pending/run/t5427a.check
new file mode 100644
index 0000000..d8263ee
--- /dev/null
+++ b/test/pending/run/t5427a.check
@@ -0,0 +1 @@
+2
\ No newline at end of file
diff --git a/test/pending/run/t5427a.scala b/test/pending/run/t5427a.scala
new file mode 100644
index 0000000..f7cd051
--- /dev/null
+++ b/test/pending/run/t5427a.scala
@@ -0,0 +1,10 @@
+import scala.reflect.runtime.universe._
+
+object Foo { val bar = 2 }
+
+object Test extends App {
+ val tpe = getType(Foo)
+ val bar = tpe.nonPrivateMember(newTermName("bar"))
+ val value = getValue(Foo, bar)
+ println(value)
+}
\ No newline at end of file
diff --git a/test/pending/run/t5427b.check b/test/pending/run/t5427b.check
new file mode 100644
index 0000000..d8263ee
--- /dev/null
+++ b/test/pending/run/t5427b.check
@@ -0,0 +1 @@
+2
\ No newline at end of file
diff --git a/test/pending/run/t5427b.scala b/test/pending/run/t5427b.scala
new file mode 100644
index 0000000..e80bd12
--- /dev/null
+++ b/test/pending/run/t5427b.scala
@@ -0,0 +1,11 @@
+import scala.reflect.runtime.universe._
+
+class Foo { val bar = 2 }
+
+object Test extends App {
+ val foo = new Foo
+ val tpe = getType(foo)
+ val bar = tpe.nonPrivateMember(newTermName("bar"))
+ val value = getValue(foo, bar)
+ println(value)
+}
\ No newline at end of file
diff --git a/test/pending/run/t5427c.check b/test/pending/run/t5427c.check
new file mode 100644
index 0000000..32c91ab
--- /dev/null
+++ b/test/pending/run/t5427c.check
@@ -0,0 +1 @@
+no public member
\ No newline at end of file
diff --git a/test/pending/run/t5427c.scala b/test/pending/run/t5427c.scala
new file mode 100644
index 0000000..7095158
--- /dev/null
+++ b/test/pending/run/t5427c.scala
@@ -0,0 +1,13 @@
+import scala.reflect.runtime.universe._
+
+class Foo(bar: Int)
+
+object Test extends App {
+ val foo = new Foo(2)
+ val tpe = getType(foo)
+ val bar = tpe.nonPrivateMember(newTermName("bar"))
+ bar match {
+ case NoSymbol => println("no public member")
+ case _ => println("i'm screwed")
+ }
+}
\ No newline at end of file
diff --git a/test/pending/run/t5427d.check b/test/pending/run/t5427d.check
new file mode 100644
index 0000000..d8263ee
--- /dev/null
+++ b/test/pending/run/t5427d.check
@@ -0,0 +1 @@
+2
\ No newline at end of file
diff --git a/test/pending/run/t5427d.scala b/test/pending/run/t5427d.scala
new file mode 100644
index 0000000..f0cc07d
--- /dev/null
+++ b/test/pending/run/t5427d.scala
@@ -0,0 +1,11 @@
+import scala.reflect.runtime.universe._
+
+class Foo(val bar: Int)
+
+object Test extends App {
+ val foo = new Foo(2)
+ val tpe = getType(foo)
+ val bar = tpe.nonPrivateMember(newTermName("bar"))
+ val value = getValue(foo, bar)
+ println(value)
+}
\ No newline at end of file
diff --git a/test/pending/run/t5610a.check b/test/pending/run/t5610a.check
new file mode 100644
index 0000000..2aa46b3
--- /dev/null
+++ b/test/pending/run/t5610a.check
@@ -0,0 +1 @@
+Stroke a kitten
diff --git a/test/pending/run/t5610a.scala b/test/pending/run/t5610a.scala
new file mode 100644
index 0000000..f20b295
--- /dev/null
+++ b/test/pending/run/t5610a.scala
@@ -0,0 +1,19 @@
+object Test extends App {
+ class Result(_str: => String) {
+ lazy val str = _str
+ }
+
+ def foo(str: => String)(i: Int) = new Result(str)
+
+ def bar(f: Int => Result) = f(42)
+
+ var test: String = null
+ val result = bar(foo(test))
+ test = "bar"
+
+ if (result.str == null) {
+ println("Destroy ALL THE THINGS!!!")
+ } else {
+ println("Stroke a kitten")
+ }
+}
\ No newline at end of file
diff --git a/test/pending/run/t5610b.check b/test/pending/run/t5610b.check
new file mode 100644
index 0000000..2aa46b3
--- /dev/null
+++ b/test/pending/run/t5610b.check
@@ -0,0 +1 @@
+Stroke a kitten
diff --git a/test/pending/run/t5610b.scala b/test/pending/run/t5610b.scala
new file mode 100644
index 0000000..d922d63
--- /dev/null
+++ b/test/pending/run/t5610b.scala
@@ -0,0 +1,21 @@
+object Bug {
+ def main(args: Array[String]) {
+ var test: String = null
+ val result = bar(foo(test))
+ test = "bar"
+
+ if (result.str == null) {
+ println("Destroy ALL THE THINGS!!!")
+ } else {
+ println("Stroke a kitten")
+ }
+ }
+
+ class Result(_str: => String) {
+ lazy val str = _str
+ }
+
+ def foo(str: => String)(i: Int) = new Result(str)
+
+ def bar(f: Int => Result) = f(42)
+}
\ No newline at end of file
diff --git a/test/pending/run/t5692.flags b/test/pending/run/t5692.flags
new file mode 100644
index 0000000..cd66464
--- /dev/null
+++ b/test/pending/run/t5692.flags
@@ -0,0 +1 @@
+-language:experimental.macros
\ No newline at end of file
diff --git a/test/pending/run/t5692/Impls_Macros_1.scala b/test/pending/run/t5692/Impls_Macros_1.scala
new file mode 100644
index 0000000..94bcffb
--- /dev/null
+++ b/test/pending/run/t5692/Impls_Macros_1.scala
@@ -0,0 +1,9 @@
+import scala.reflect.macros.Context
+
+object Impls {
+ def impl[A](c: reflect.macros.Context) = c.universe.reify(())
+}
+
+object Macros {
+ def decl[A] = macro Impls.impl[A]
+}
\ No newline at end of file
diff --git a/test/pending/run/t5692/Test_2.scala b/test/pending/run/t5692/Test_2.scala
new file mode 100644
index 0000000..29251a5
--- /dev/null
+++ b/test/pending/run/t5692/Test_2.scala
@@ -0,0 +1,4 @@
+object Test extends App {
+ val x = Macros.decl
+ def y() { Macros.decl(); }
+}
\ No newline at end of file
diff --git a/test/pending/run/t5698/client.scala b/test/pending/run/t5698/client.scala
new file mode 100644
index 0000000..de672c1
--- /dev/null
+++ b/test/pending/run/t5698/client.scala
@@ -0,0 +1,9 @@
+package client
+
+
+
+object Client extends App {
+ val peer = actors.remote.Node("localhost", 23456)
+ val a = actors.remote.RemoteActor.select(peer, 'test)
+ a ! server.TestMsg
+}
diff --git a/test/pending/run/t5698/server.scala b/test/pending/run/t5698/server.scala
new file mode 100644
index 0000000..e8f3cea
--- /dev/null
+++ b/test/pending/run/t5698/server.scala
@@ -0,0 +1,22 @@
+package server
+
+
+
+object Server extends App {
+
+ class ServerActor extends actors.Actor {
+ def act() {
+ actors.remote.RemoteActor.alive(23456)
+ actors.remote.RemoteActor.register('test, actors.Actor.self)
+ loop {
+ react {
+ case TestMsg => println("Yay!")
+ }
+ }
+ }
+ }
+
+ val a = new ServerActor
+ a.start()
+
+}
diff --git a/test/pending/run/t5698/testmsg.scala b/test/pending/run/t5698/testmsg.scala
new file mode 100644
index 0000000..004ff0b
--- /dev/null
+++ b/test/pending/run/t5698/testmsg.scala
@@ -0,0 +1,5 @@
+package server
+
+
+
+case object TestMsg
diff --git a/test/pending/run/t5722.scala b/test/pending/run/t5722.scala
new file mode 100644
index 0000000..21ace06
--- /dev/null
+++ b/test/pending/run/t5722.scala
@@ -0,0 +1,6 @@
+object Test extends App {
+ def foo[T: ClassTag] = println(classOf[T])
+ foo[Int]
+ foo[Array[Int]]
+ foo[List[Int]]
+}
\ No newline at end of file
diff --git a/test/pending/run/t5726a.scala b/test/pending/run/t5726a.scala
new file mode 100644
index 0000000..24d828a
--- /dev/null
+++ b/test/pending/run/t5726a.scala
@@ -0,0 +1,17 @@
+import language.dynamics
+
+class DynamicTest extends Dynamic {
+ def selectDynamic(name: String) = s"value of $name"
+ def updateDynamic(name: String)(value: Any) {
+ println(s"You have just updated property '$name' with value: $value")
+ }
+}
+
+object MyApp extends App {
+ def testing() {
+ val test = new DynamicTest
+ test.firstName = "John"
+ }
+
+ testing()
+}
\ No newline at end of file
diff --git a/test/pending/run/t5726b.scala b/test/pending/run/t5726b.scala
new file mode 100644
index 0000000..839dcf4
--- /dev/null
+++ b/test/pending/run/t5726b.scala
@@ -0,0 +1,16 @@
+import language.dynamics
+
+class DynamicTest extends Dynamic {
+ def updateDynamic(name: String)(value: Any) {
+ println(s"You have just updated property '$name' with value: $value")
+ }
+}
+
+object MyApp extends App {
+ def testing() {
+ val test = new DynamicTest
+ test.firstName = "John"
+ }
+
+ testing()
+}
\ No newline at end of file
diff --git a/test/pending/run/t5882.scala b/test/pending/run/t5882.scala
new file mode 100644
index 0000000..47996d3
--- /dev/null
+++ b/test/pending/run/t5882.scala
@@ -0,0 +1,14 @@
+// SIP-15 was revised to allow nested classes in value classes.
+// This test checks that their basic functionality.
+
+class NodeOps(val n: Any) extends AnyVal { self =>
+ class Foo() { def show = self.show(n) }
+ def show(x: Any) = x.toString
+}
+
+
+object Test extends App {
+
+ val n = new NodeOps("abc")
+ assert(new n.Foo().show == "abc")
+}
diff --git a/test/pending/run/t5943b1.scala b/test/pending/run/t5943b1.scala
new file mode 100644
index 0000000..0d54718
--- /dev/null
+++ b/test/pending/run/t5943b1.scala
@@ -0,0 +1,10 @@
+import scala.reflect.runtime.universe._
+import scala.reflect.runtime.{currentMirror => cm}
+import scala.tools.reflect.ToolBox
+
+// pending until https://issues.scala-lang.org/browse/SI-6393 is fixed
+object Test extends App {
+ val tb = cm.mkToolBox()
+ val expr = tb.parse("math.sqrt(4.0)")
+ println(tb.typeCheck(expr))
+}
\ No newline at end of file
diff --git a/test/pending/run/t5943b2.scala b/test/pending/run/t5943b2.scala
new file mode 100644
index 0000000..85299d9
--- /dev/null
+++ b/test/pending/run/t5943b2.scala
@@ -0,0 +1,10 @@
+import scala.reflect.runtime.universe._
+import scala.reflect.runtime.{currentMirror => cm}
+import scala.tools.reflect.ToolBox
+
+// pending until https://issues.scala-lang.org/browse/SI-6393 is fixed
+object Test extends App {
+ val tb = cm.mkToolBox()
+ val expr = tb.parse("math.sqrt(4.0)")
+ println(tb.eval(expr))
+}
\ No newline at end of file
diff --git a/test/pending/run/t6387.check b/test/pending/run/t6387.check
new file mode 100644
index 0000000..83b33d2
--- /dev/null
+++ b/test/pending/run/t6387.check
@@ -0,0 +1 @@
+1000
diff --git a/test/pending/run/t6387.scala b/test/pending/run/t6387.scala
new file mode 100644
index 0000000..bbebb5f
--- /dev/null
+++ b/test/pending/run/t6387.scala
@@ -0,0 +1,16 @@
+trait A {
+ def foo: Long
+}
+
+object Test {
+ def a(): A = new A {
+ var foo: Long = 1000L
+
+ val test = () => {
+ foo = 28
+ }
+ }
+ def main(args: Array[String]) {
+ println(a().foo)
+ }
+}
diff --git a/test/pending/run/t6408.scala b/test/pending/run/t6408.scala
new file mode 100644
index 0000000..ff17480
--- /dev/null
+++ b/test/pending/run/t6408.scala
@@ -0,0 +1,11 @@
+class X(val i: Int) extends AnyVal {
+ class Inner(val q: Int) {
+ def plus = i + q
+ }
+}
+
+object Test extends App {
+ val x = new X(11)
+ val i = new x.Inner(22)
+ assert(i.plus == 33)
+}
diff --git a/test/pending/run/t6591_4.check b/test/pending/run/t6591_4.check
new file mode 100644
index 0000000..0f1c048
--- /dev/null
+++ b/test/pending/run/t6591_4.check
@@ -0,0 +1 @@
+Expr(Block(List(ValDef(Modifiers(), newTermName("v"), Select(Ident(newTermName("A")), newTypeName("I")), Apply(Select(New(Select(Ident(newTermName("A")), newTypeName("I"))), nme.CONSTRUCTOR), List()))), Ident(newTermName("v"))))
diff --git a/test/pending/run/t6591_4.scala b/test/pending/run/t6591_4.scala
new file mode 100644
index 0000000..f20c8e6
--- /dev/null
+++ b/test/pending/run/t6591_4.scala
@@ -0,0 +1,17 @@
+import scala.reflect.runtime.universe._
+import scala.tools.reflect.ToolBox
+import scala.tools.reflect.Eval
+
+class O { class I }
+
+class A extends O {
+ val code = reify {
+ val v: I = new I
+ v
+ }
+ println(showRaw(code))
+}
+
+object Test extends App {
+ val v: A#I = (new A).code.eval
+}
diff --git a/test/files/jvm/bug680.check b/test/pending/run/virtpatmat_anonfun_underscore.check
similarity index 100%
rename from test/files/jvm/bug680.check
rename to test/pending/run/virtpatmat_anonfun_underscore.check
diff --git a/test/pending/run/virtpatmat_anonfun_underscore.flags b/test/pending/run/virtpatmat_anonfun_underscore.flags
new file mode 100644
index 0000000..23e3dc7
--- /dev/null
+++ b/test/pending/run/virtpatmat_anonfun_underscore.flags
@@ -0,0 +1 @@
+-Yvirtpatmat
\ No newline at end of file
diff --git a/test/pending/run/virtpatmat_anonfun_underscore.scala b/test/pending/run/virtpatmat_anonfun_underscore.scala
new file mode 100644
index 0000000..db6705d
--- /dev/null
+++ b/test/pending/run/virtpatmat_anonfun_underscore.scala
@@ -0,0 +1,4 @@
+object Test extends App {
+ List(1,2,3) map (_ match { case x => x + 1} ) // `_ match` is redundant but shouldn't crash the compiler
+ List((1,2)) map (_ match { case (x, z) => x + z})
+}
\ No newline at end of file
diff --git a/test/pending/script/dashi.check b/test/pending/script/dashi.check
new file mode 100644
index 0000000..c3cf137
--- /dev/null
+++ b/test/pending/script/dashi.check
@@ -0,0 +1 @@
+test.bippy = dingus
diff --git a/test/pending/script/dashi.flags b/test/pending/script/dashi.flags
new file mode 100644
index 0000000..5b46a61
--- /dev/null
+++ b/test/pending/script/dashi.flags
@@ -0,0 +1 @@
+-i dashi/a.scala -e 'setBippy ; getBippy'
diff --git a/test/pending/script/dashi/a.scala b/test/pending/script/dashi/a.scala
new file mode 100644
index 0000000..c4a07bf
--- /dev/null
+++ b/test/pending/script/dashi/a.scala
@@ -0,0 +1,2 @@
+def setBippy = sys.props("test.bippy") = "dingus"
+def getBippy = println("test.bippy = " + sys.props("test.bippy"))
diff --git a/test/pending/script/bug2365.javaopts b/test/pending/script/t2365.javaopts
similarity index 100%
rename from test/pending/script/bug2365.javaopts
rename to test/pending/script/t2365.javaopts
diff --git a/test/pending/script/bug2365.sh b/test/pending/script/t2365.sh
old mode 100644
new mode 100755
similarity index 100%
rename from test/pending/script/bug2365.sh
rename to test/pending/script/t2365.sh
diff --git a/test/pending/script/bug2365/Test.scala b/test/pending/script/t2365/Test.scala
similarity index 100%
rename from test/pending/script/bug2365/Test.scala
rename to test/pending/script/t2365/Test.scala
diff --git a/test/pending/script/bug2365/bug2365.scala b/test/pending/script/t2365/runner.scala
old mode 100644
new mode 100755
similarity index 100%
rename from test/pending/script/bug2365/bug2365.scala
rename to test/pending/script/t2365/runner.scala
diff --git a/test/pending/shootout/meteor.scala b/test/pending/shootout/meteor.scala
index 2fd7027..6dbd3cf 100644
--- a/test/pending/shootout/meteor.scala
+++ b/test/pending/shootout/meteor.scala
@@ -1,3 +1,5 @@
+import scala.reflect.{ClassTag, classTag}
+
/* The Computer Language Shootout
http://shootout.alioth.debian.org/
contributed by Isaac Gouy
@@ -19,7 +21,7 @@ object meteor {
-// Solver.scala
+// Solver.scala
// import scala.collection.mutable._
final class Solver (n: Int) {
@@ -29,8 +31,8 @@ final class Solver (n: Int) {
private val board = new Board()
- val pieces = Array(
- new Piece(0), new Piece(1), new Piece(2), new Piece(3), new Piece(4),
+ val pieces = Array(
+ new Piece(0), new Piece(1), new Piece(2), new Piece(3), new Piece(4),
new Piece(5), new Piece(6), new Piece(7), new Piece(8), new Piece(9) )
val unplaced = new BitSet(pieces.length)
@@ -71,8 +73,8 @@ final class Solver (n: Int) {
private def puzzleSolved() = {
val b = board.asString
- if (first == null){
- first = b; last = b
+ if (first == null){
+ first = b; last = b
} else {
if (b < first){ first = b } else { if (b > last){ last = b } }
}
@@ -81,7 +83,7 @@ final class Solver (n: Int) {
private def shouldPrune() = {
board.unmark
- !board.cells.forall(c => c.contiguousEmptyCells % Piece.size == 0)
+ !board.cells.forall(c => c.contiguousEmptyCells % Piece.size == 0)
}
@@ -108,8 +110,8 @@ final class Solver (n: Int) {
}
/*
- def printPieces() =
- for (i <- Iterator.range(0,Board.pieces)) pieces(i).print
+ def printPieces() =
+ for (i <- Iterator.range(0,Board.pieces)) pieces(i).print
*/
}
@@ -126,7 +128,7 @@ object Board {
val size = rows * cols
}
-final class Board {
+final class Board {
val cells = boardCells()
val cellsPieceWillFill = new Array[BoardCell](Piece.size)
@@ -134,9 +136,9 @@ final class Board {
def unmark() = for (c <- cells) c.unmark
- def asString() =
- new String( cells map(
- c => if (c.piece == null) '-'.toByte
+ def asString() =
+ new String( cells map(
+ c => if (c.piece == null) '-'.toByte
else (c.piece.number + 48).toByte ))
def firstEmptyCellIndex() = cells.findIndexOf(c => c.isEmpty)
@@ -144,13 +146,13 @@ final class Board {
def add(pieceIndex: Int, boardIndex: Int, p: Piece) = {
cellCount = 0
p.unmark
-
+
find( p.cells(pieceIndex), cells(boardIndex))
- val boardHasSpace = cellCount == Piece.size &&
- cellsPieceWillFill.forall(c => c.isEmpty)
+ val boardHasSpace = cellCount == Piece.size &&
+ cellsPieceWillFill.forall(c => c.isEmpty)
- if (boardHasSpace) cellsPieceWillFill.foreach(c => c.piece = p)
+ if (boardHasSpace) cellsPieceWillFill.foreach(c => c.piece = p)
boardHasSpace
}
@@ -180,10 +182,10 @@ final class Board {
if (row % 2 == 1) {
if (!isLast) c.next(Cell.NE) = a(i-(Board.cols-1))
- c.next(Cell.NW) = a(i-Board.cols)
+ c.next(Cell.NW) = a(i-Board.cols)
if (row != m) {
if (!isLast) c.next(Cell.SE) = a(i+(Board.cols+1))
- c.next(Cell.SW) = a(i+Board.cols)
+ c.next(Cell.SW) = a(i+Board.cols)
}
} else {
if (row != 0) {
@@ -212,9 +214,9 @@ final class Board {
Console.print(i + "\t")
for (j <- Iterator.range(0,Cell.sides)){
val c = cells(i).next(j)
- if (c == null)
- Console.print("-- ")
- else
+ if (c == null)
+ Console.print("-- ")
+ else
Console.printf("{0,number,00} ")(c.number)
}
Console.println("")
@@ -241,7 +243,7 @@ final class Piece(_number: Int) {
val number = _number
val cells = for (i <- Array.range(0,Piece.size)) yield new PieceCell()
- {
+ {
number match {
case 0 => make0
case 1 => make1
@@ -252,7 +254,7 @@ final class Piece(_number: Int) {
case 6 => make6
case 7 => make7
case 8 => make8
- case 9 => make9
+ case 9 => make9
}
}
@@ -395,12 +397,12 @@ final class Piece(_number: Int) {
Console.print(i + "\t")
for (j <- Iterator.range(0,Cell.sides)){
val c = cells(i).next(j)
- if (c == null)
- Console.print("-- ")
- else
+ if (c == null)
+ Console.print("-- ")
+ else
for (k <- Iterator.range(0,Piece.size)){
if (cells(k) == c) Console.printf(" {0,number,0} ")(k)
- }
+ }
}
Console.println("")
}
@@ -418,13 +420,13 @@ final class Piece(_number: Int) {
object Cell {
val NW = 0; val NE = 1
val W = 2; val E = 3
- val SW = 4; val SE = 5
+ val SW = 4; val SE = 5
val sides = 6
}
abstract class Cell {
- implicit def m: Manifest[T]
+ implicit def t: ClassTag[T]
type T
val next = new Array[T](Cell.sides)
var marked = false
@@ -437,7 +439,7 @@ abstract class Cell {
final class BoardCell(_number: Int) extends {
type T = BoardCell
- implicit val m = manifest[BoardCell]
+ implicit val t = classTag[BoardCell]
} with Cell {
val number = _number
var piece: Piece = _
@@ -448,10 +450,10 @@ final class BoardCell(_number: Int) extends {
def contiguousEmptyCells(): Int = {
if (!marked && isEmpty){
mark
- var count = 1
+ var count = 1
for (neighbour <- next)
- if (neighbour != null && neighbour.isEmpty)
+ if (neighbour != null && neighbour.isEmpty)
count = count + neighbour.contiguousEmptyCells
count } else { 0 }
@@ -493,4 +495,3 @@ final class PieceCell extends Cell {
-
diff --git a/test/pending/specialized/SI-5005.check b/test/pending/specialized/SI-5005.check
new file mode 100644
index 0000000..81e8342
--- /dev/null
+++ b/test/pending/specialized/SI-5005.check
@@ -0,0 +1,33 @@
+[[syntax trees at end of specialize]] // newSource1
+package <empty> {
+ class C2[@specialized(scala.Boolean) U >: Nothing <: Any] extends Object {
+ def <init>(): C2[U] = {
+ C2.super.<init>();
+ ()
+ };
+ def apply(x: U): U = x;
+ <specialized> def apply$mcZ$sp(x: Boolean): Boolean = C2.this.apply(x.asInstanceOf[U]()).asInstanceOf[Boolean]()
+ };
+ class B extends Object {
+ def <init>(): B = {
+ B.super.<init>();
+ ()
+ };
+ new C2$mcZ$sp().apply$mcZ$sp(true)
+ };
+ <specialized> class C2$mcZ$sp extends C2[Boolean] {
+ <specialized> def <init>(): C2$mcZ$sp = {
+ C2$mcZ$sp.super.<init>();
+ ()
+ };
+ @inline final override <specialized> def apply(x: Boolean): Boolean = C2$mcZ$sp.this.apply$mcZ$sp(x);
+ @inline final override <specialized> def apply$mcZ$sp(x: Boolean): Boolean = x
+ }
+}
+
+[log inliner] Analyzing C2.apply count 0 with 1 blocks
+[log inliner] C2.apply blocks before inlining: 1 (2) after: 1 (2)
+[log inliner] Analyzing C2.apply$mcZ$sp count 0 with 1 blocks
+[log inliner] C2.apply$mcZ$sp blocks before inlining: 1 (8) after: 1 (8)
+[log inliner] Not inlining into apply because it is marked @inline.
+[log inliner] Not inlining into apply$mcZ$sp because it is marked @inline.
diff --git a/test/pending/specialized/SI-5005.scala b/test/pending/specialized/SI-5005.scala
new file mode 100644
index 0000000..280bf0a
--- /dev/null
+++ b/test/pending/specialized/SI-5005.scala
@@ -0,0 +1,36 @@
+import scala.tools.partest._
+import java.io._
+
+
+
+// I think this may be due to a bug in partest where it uses some other version
+// of the scala-library.jar - _hashCode is in line 202 currently, not 212!
+//
+// [partest] testing: [...]/files/specialized/SI-5005.scala [FAILED]
+// [partest] java.lang.NoClassDefFoundError: scala/util/MurmurHash3$
+// [partest] java.lang.NoClassDefFoundError: scala/util/MurmurHash3$
+// [partest] at scala.runtime.ScalaRunTime$._hashCode(ScalaRunTime.scala:212)
+object Test extends DirectTest {
+
+ override def extraSettings: String = "-usejavacp -Xprint:spec -optimize -Ylog:inliner -d " + testOutput.path
+
+ override def code = """
+ class C2[@specialized(Boolean) U]() {
+ @inline final def apply(x: U): U = x
+ }
+
+ class B {
+ (new C2[Boolean]())(true)
+ }
+ """
+
+ override def show(): Unit = {
+ // redirect err to out, for inliner log
+ val prevErr = System.err
+ System.setErr(System.out)
+ compile()
+ System.setErr(prevErr)
+ }
+
+ override def isDebug = false // so we don't get the newSettings warning
+}
diff --git a/test/review b/test/review
old mode 100644
new mode 100755
diff --git a/test/scaladoc/resources/SI-3314-diagrams.scala b/test/scaladoc/resources/SI-3314-diagrams.scala
new file mode 100644
index 0000000..b80a97b
--- /dev/null
+++ b/test/scaladoc/resources/SI-3314-diagrams.scala
@@ -0,0 +1,78 @@
+package scala.test.scaladoc {
+
+ /** Check the interaction between SI-3314 and diagrams
+ * - the three enumerations below should get valid content diagrams:
+ * Value
+ * __________/|\__________
+ * / / / | \ \ \
+ * Mon Tue Wed Thu Fri Sat Sun
+ *
+ * - each member should receive an inhertiance diagram:
+ * Value
+ * |
+ * |
+ * {Mon,Tue,Wed,Thu,Fri,Sat,Sun}
+ */
+ package diagrams {
+
+ /** @contentDiagram
+ * @inheritanceDiagram hideDiagram */
+ trait WeekDayTraitWithDiagram extends Enumeration {
+ type WeekDay = Value
+ /** @inheritanceDiagram */
+ object Mon extends WeekDay
+ /** @inheritanceDiagram */
+ object Tue extends WeekDay
+ /** @inheritanceDiagram */
+ object Wed extends WeekDay
+ /** @inheritanceDiagram */
+ object Thu extends WeekDay
+ /** @inheritanceDiagram */
+ object Fri extends WeekDay
+ /** @inheritanceDiagram */
+ object Sat extends WeekDay
+ /** @inheritanceDiagram */
+ object Sun extends WeekDay
+ }
+
+ /** @contentDiagram
+ * @inheritanceDiagram hideDiagram */
+ class WeekDayClassWithDiagram extends Enumeration {
+ type WeekDay = Value
+ /** @inheritanceDiagram */
+ object Mon extends WeekDay
+ /** @inheritanceDiagram */
+ object Tue extends WeekDay
+ /** @inheritanceDiagram */
+ object Wed extends WeekDay
+ /** @inheritanceDiagram */
+ object Thu extends WeekDay
+ /** @inheritanceDiagram */
+ object Fri extends WeekDay
+ /** @inheritanceDiagram */
+ object Sat extends WeekDay
+ /** @inheritanceDiagram */
+ object Sun extends WeekDay
+ }
+
+ /** @contentDiagram
+ * @inheritanceDiagram hideDiagram */
+ object WeekDayObjectWithDiagram extends Enumeration {
+ type WeekDay = Value
+ /** @inheritanceDiagram */
+ object Mon extends WeekDay
+ /** @inheritanceDiagram */
+ object Tue extends WeekDay
+ /** @inheritanceDiagram */
+ object Wed extends WeekDay
+ /** @inheritanceDiagram */
+ object Thu extends WeekDay
+ /** @inheritanceDiagram */
+ object Fri extends WeekDay
+ /** @inheritanceDiagram */
+ object Sat extends WeekDay
+ /** @inheritanceDiagram */
+ object Sun extends WeekDay
+ }
+ }
+}
\ No newline at end of file
diff --git a/test/scaladoc/resources/SI-3314.scala b/test/scaladoc/resources/SI-3314.scala
new file mode 100644
index 0000000..9e0afdc
--- /dev/null
+++ b/test/scaladoc/resources/SI-3314.scala
@@ -0,0 +1,85 @@
+package scala.test.scaladoc {
+
+ // testing inherited <documented> templates (Enum.Value is included in the source, thus is documented in scaladoc)
+ package test1 {
+ class Enum {
+ abstract class Value
+ class Val extends Value
+ def Value(): Value = new Val
+ }
+
+ object Constants extends Enum {
+ def a = Value
+ }
+ }
+
+ // testing inherited <not documented> templates (scala.Enumeration.Value is taken from the library, thus is not
+ // documented in the scaladoc pages -- but should be inherited to make things clear!)
+ package test2 {
+ trait WeekDayTrait extends Enumeration {
+ type WeekDay = Value
+ val Mon, Tue, Wed, Thu, Fri, Sat, Sun = Value
+ }
+
+ class WeekDayClass extends Enumeration {
+ type WeekDay = Value
+ val Mon, Tue, Wed, Thu, Fri, Sat, Sun = Value
+ }
+
+ object WeekDayObject extends Enumeration {
+ type WeekDay = Value
+ val Mon, Tue, Wed, Thu, Fri, Sat, Sun = Value
+ }
+
+ object UserObject {
+ def isWorkingDay1(d: scala.test.scaladoc.test2.WeekDayClass#Value) = false
+ def isWorkingDay2(d: scala.test.scaladoc.test2.WeekDayClass#WeekDay) = false
+ def isWorkingDay3(d: scala.test.scaladoc.test2.WeekDayTrait#Value) = false
+ def isWorkingDay4(d: scala.test.scaladoc.test2.WeekDayTrait#WeekDay) = false
+ def isWorkingDay5(d: scala.test.scaladoc.test2.WeekDayObject.Value) = false
+ def isWorkingDay6(d: scala.test.scaladoc.test2.WeekDayObject.WeekDay) = false
+ import WeekDayObject._
+ def isWorkingDay7(d: Value) = ! (d == Sat || d == Sun)
+ def isWorkingDay8(d: WeekDay) = ! (d == Sat || d == Sun)
+ def isWorkingDay9(d: WeekDayObject.Value) = ! (d == Sat || d == Sun)
+ }
+
+ class UserClass {
+ def isWorkingDay1(d: scala.test.scaladoc.test2.WeekDayClass#Value) = false
+ def isWorkingDay2(d: scala.test.scaladoc.test2.WeekDayClass#WeekDay) = false
+ def isWorkingDay3(d: scala.test.scaladoc.test2.WeekDayTrait#Value) = false
+ def isWorkingDay4(d: scala.test.scaladoc.test2.WeekDayTrait#WeekDay) = false
+ def isWorkingDay5(d: scala.test.scaladoc.test2.WeekDayObject.Value) = false
+ def isWorkingDay6(d: scala.test.scaladoc.test2.WeekDayObject.WeekDay) = false
+ import WeekDayObject._
+ def isWorkingDay7(d: Value) = ! (d == Sat || d == Sun)
+ def isWorkingDay8(d: WeekDay) = ! (d == Sat || d == Sun)
+ def isWorkingDay9(d: WeekDayObject.Value) = ! (d == Sat || d == Sun)
+ }
+
+ trait UserTrait {
+ def isWorkingDay1(d: scala.test.scaladoc.test2.WeekDayClass#Value) = false
+ def isWorkingDay2(d: scala.test.scaladoc.test2.WeekDayClass#WeekDay) = false
+ def isWorkingDay3(d: scala.test.scaladoc.test2.WeekDayTrait#Value) = false
+ def isWorkingDay4(d: scala.test.scaladoc.test2.WeekDayTrait#WeekDay) = false
+ def isWorkingDay5(d: scala.test.scaladoc.test2.WeekDayObject.Value) = false
+ def isWorkingDay6(d: scala.test.scaladoc.test2.WeekDayObject.WeekDay) = false
+ import WeekDayObject._
+ def isWorkingDay7(d: Value) = ! (d == Sat || d == Sun)
+ def isWorkingDay8(d: WeekDay) = ! (d == Sat || d == Sun)
+ def isWorkingDay9(d: WeekDayObject.Value) = ! (d == Sat || d == Sun)
+ }
+ }
+
+ // testing type lambdas and useless prefixes (should be referenced as T instead of foo.T in the first example)
+ package test3 {
+ import language.higherKinds
+ object `package` {
+ trait T
+ trait A
+ trait X
+ def foo[T](x: T) = 7
+ def bar[A](x: ({type Lambda[X] <: Either[A, X]})#Lambda[String]) = 5
+ }
+ }
+}
diff --git a/test/scaladoc/resources/SI-4360.scala b/test/scaladoc/resources/SI-4360.scala
new file mode 100644
index 0000000..8e8b96a
--- /dev/null
+++ b/test/scaladoc/resources/SI-4360.scala
@@ -0,0 +1,42 @@
+package scala.test.scaladoc.prefix {
+ package pack1 {
+
+ class A {
+ class Z
+ }
+
+ class B extends A
+
+ package a {
+ class C
+ }
+
+ package b {
+ class C
+ }
+
+ package c {
+ class C
+
+ class L extends pack2.Z
+
+ class TEST {
+ // test inherited classes
+ def fooCA(x: pack1.A#Z) = 1
+ def fooCB(x: pack1.B#Z) = 1
+ def fooCS(x: pack2.Z#Z) = 1
+ def fooCL(x: L#Z) = 1
+ // test in packages
+ def fooPA(x: pack1.a.C) = 1
+ def fooPB(x: pack1.b.C) = 1
+ def fooPC(x: pack1.c.C) = 1
+ }
+
+ class A extends pack1.A
+ }
+ }
+
+ package pack2 {
+ class Z extends pack1.A
+ }
+}
\ No newline at end of file
diff --git a/test/scaladoc/resources/SI-5558.scala b/test/scaladoc/resources/SI-5558.scala
new file mode 100644
index 0000000..6523438
--- /dev/null
+++ b/test/scaladoc/resources/SI-5558.scala
@@ -0,0 +1,6 @@
+package test {
+ class T
+ object `package` {
+ def foo = ???
+ }
+}
diff --git a/test/scaladoc/resources/SI-5784.scala b/test/scaladoc/resources/SI-5784.scala
new file mode 100644
index 0000000..3731d49
--- /dev/null
+++ b/test/scaladoc/resources/SI-5784.scala
@@ -0,0 +1,28 @@
+package test.templates {
+ object `package` {
+ /** @template */
+ type String = java.lang.String
+ val String = new StringCompanion
+ class StringCompanion { def boo = ??? }
+ }
+
+ /** @contentDiagram */
+ trait Base {
+ /** @documentable */
+ type String = test.templates.String
+ /** @template
+ * @inheritanceDiagram */
+ type T <: Foo
+ val T: FooExtractor
+ trait Foo { def foo: Int }
+ trait FooExtractor { def apply(foo: Int); def unapply(t: Foo): Option[Int] }
+ }
+
+ /** @contentDiagram */
+ trait Api extends Base {
+ /** @documentable
+ * @inheritanceDiagram */
+ override type T <: FooApi
+ trait FooApi extends Foo { def bar: String }
+ }
+}
diff --git a/test/scaladoc/resources/SI-6509.scala b/test/scaladoc/resources/SI-6509.scala
new file mode 100644
index 0000000..540ba24
--- /dev/null
+++ b/test/scaladoc/resources/SI-6509.scala
@@ -0,0 +1,24 @@
+package test.scaladoc.template.owners
+
+trait X {
+ /** @template */
+ type Symbol >: Null <: SymbolApi
+
+ /** @template */
+ type TypeSymbol >: Null <: Symbol with TypeSymbolApi
+
+ /** @template */
+ type TermSymbol >: Null <: Symbol with TermSymbolApi
+
+ /** @template */
+ type MethodSymbol >: Null <: TermSymbol with MethodSymbolApi
+
+ trait SymbolApi { this: Symbol => def x: Int}
+ trait TermSymbolApi extends SymbolApi { this: TermSymbol => def y: Int}
+ trait TypeSymbolApi extends SymbolApi { this: TypeSymbol => def z: Int}
+ trait MethodSymbolApi extends TermSymbolApi { this: MethodSymbol => def t: Int }
+}
+
+trait Y extends X
+trait Z extends Y
+trait T extends Z
diff --git a/test/scaladoc/resources/SI-6511.scala b/test/scaladoc/resources/SI-6511.scala
new file mode 100644
index 0000000..1f153ca
--- /dev/null
+++ b/test/scaladoc/resources/SI-6511.scala
@@ -0,0 +1,24 @@
+package test.scaladoc.template.diagrams
+
+/** @contentDiagram hideNodes "*Api" */
+trait X {
+ /** @template */
+ type Symbol >: Null <: SymbolApi
+
+ /** @template */
+ type TypeSymbol >: Null <: Symbol with TypeSymbolApi
+
+ /** @template */
+ type TermSymbol >: Null <: Symbol with TermSymbolApi
+
+ /** @template */
+ type MethodSymbol >: Null <: TermSymbol with MethodSymbolApi
+
+ trait SymbolApi { this: Symbol => def x: Int}
+ trait TermSymbolApi extends SymbolApi { this: TermSymbol => def y: Int}
+ trait TypeSymbolApi extends SymbolApi { this: TypeSymbol => def z: Int}
+ trait MethodSymbolApi extends TermSymbolApi { this: MethodSymbol => def t: Int }
+}
+
+/** @contentDiagram hideNodes "*Api" */
+trait Y extends X
diff --git a/test/scaladoc/resources/SI_4676.scala b/test/scaladoc/resources/SI_4676.scala
deleted file mode 100644
index 00c0fc7..0000000
--- a/test/scaladoc/resources/SI_4676.scala
+++ /dev/null
@@ -1,4 +0,0 @@
-class SI_4676 {
- type SS = (String,String)
- def x(ss: SS): Int = 3
-}
diff --git a/test/scaladoc/resources/SI_4898.scala b/test/scaladoc/resources/SI_4898.scala
new file mode 100644
index 0000000..40461d1
--- /dev/null
+++ b/test/scaladoc/resources/SI_4898.scala
@@ -0,0 +1,9 @@
+class SI_4898 {
+
+ /**
+ * A link to [[__root__
+ *
+ * @usecase def test(): Int
+ */
+ def test(implicit param: Int): Int = param
+}
\ No newline at end of file
diff --git a/test/scaladoc/resources/SI_5054_q1.scala b/test/scaladoc/resources/SI_5054_q1.scala
new file mode 100644
index 0000000..02d9be8
--- /dev/null
+++ b/test/scaladoc/resources/SI_5054_q1.scala
@@ -0,0 +1,9 @@
+class SI_5054_q1 {
+ /**
+ * A simple comment
+ *
+ * @param lost a lost parameter
+ * @usecase def test(): Int
+ */
+ def test(implicit lost: Int): Int = lost
+}
diff --git a/test/scaladoc/resources/SI_5054_q2.scala b/test/scaladoc/resources/SI_5054_q2.scala
new file mode 100644
index 0000000..c873731
--- /dev/null
+++ b/test/scaladoc/resources/SI_5054_q2.scala
@@ -0,0 +1,9 @@
+class SI_5054_q2 {
+ /**
+ * A simple comment
+ *
+ * @param lost a lost parameter
+ * @usecase def test(): Int
+ */
+ final def test(implicit lost: Int): Int = lost
+}
diff --git a/test/scaladoc/resources/SI_5054_q3.scala b/test/scaladoc/resources/SI_5054_q3.scala
new file mode 100644
index 0000000..be5d22f
--- /dev/null
+++ b/test/scaladoc/resources/SI_5054_q3.scala
@@ -0,0 +1,9 @@
+class SI_5054_q3 {
+ /**
+ * A simple comment
+ *
+ * @param lost a lost parameter
+ * @usecase def test(): Int
+ */
+ implicit def test(implicit lost: Int): Int = lost
+}
diff --git a/test/scaladoc/resources/SI_5054_q4.scala b/test/scaladoc/resources/SI_5054_q4.scala
new file mode 100644
index 0000000..4e5e486
--- /dev/null
+++ b/test/scaladoc/resources/SI_5054_q4.scala
@@ -0,0 +1,9 @@
+abstract class SI_5054_q4 {
+ /**
+ * A simple comment
+ *
+ * @param lost a lost parameter
+ * @usecase def test(): Int
+ */
+ def test(implicit lost: Int): Int
+}
diff --git a/test/scaladoc/resources/SI_5054_q5.scala b/test/scaladoc/resources/SI_5054_q5.scala
new file mode 100644
index 0000000..05ba748
--- /dev/null
+++ b/test/scaladoc/resources/SI_5054_q5.scala
@@ -0,0 +1,9 @@
+trait SI_5054_q5 {
+ /**
+ * A simple comment
+ *
+ * @param lost a lost parameter
+ * @usecase def test(): Int
+ */
+ def test(implicit lost: Int): Int = lost
+}
diff --git a/test/scaladoc/resources/SI_5054_q6.scala b/test/scaladoc/resources/SI_5054_q6.scala
new file mode 100644
index 0000000..607be65
--- /dev/null
+++ b/test/scaladoc/resources/SI_5054_q6.scala
@@ -0,0 +1,9 @@
+trait SI_5054_q6 {
+ /**
+ * A simple comment
+ *
+ * @param lost a lost parameter
+ * @usecase def test(): Int
+ */
+ def test(implicit lost: Int): Int
+}
diff --git a/test/scaladoc/resources/SI_5054_q7.scala b/test/scaladoc/resources/SI_5054_q7.scala
new file mode 100644
index 0000000..1bd120e
--- /dev/null
+++ b/test/scaladoc/resources/SI_5054_q7.scala
@@ -0,0 +1,22 @@
+trait SI_5054_q7 {
+ /**
+ * The full definition, either used with an implicit value or with an explicit one.
+ *
+ * Some more explanation on implicits...
+ *
+ * @param lost a lost parameter
+ * @return some integer
+ * @usecase def test1(): Int
+ *
+ * This takes the implicit value in scope.
+ *
+ * Example: `test1()`
+ *
+ * @usecase def test2(explicit: Int): Int
+ *
+ * This takes the explicit value passed.
+ *
+ * Example: `test2(3)`
+ */
+ def test(implicit lost: Int): Int
+}
diff --git a/test/scaladoc/resources/SI_5287.scala b/test/scaladoc/resources/SI_5287.scala
new file mode 100644
index 0000000..141ab15
--- /dev/null
+++ b/test/scaladoc/resources/SI_5287.scala
@@ -0,0 +1,17 @@
+trait SI_5287_A {
+ def method(implicit a: Int): Int = a
+}
+
+trait SI_5287_B extends SI_5287_A {
+ override def method(implicit a: Int): Int = a + 1
+}
+
+trait SI_5287 extends SI_5287_B{
+ /**
+ * Some explanation
+ *
+ * @usecase def method(): Int
+ * The usecase explanation
+ */
+ override def method(implicit a: Int): Int = a + 3
+}
\ No newline at end of file
diff --git a/test/scaladoc/resources/Trac3484.scala b/test/scaladoc/resources/Trac3484.scala
deleted file mode 100644
index 9656ec2..0000000
--- a/test/scaladoc/resources/Trac3484.scala
+++ /dev/null
@@ -1,27 +0,0 @@
-class cbf[A, B, C]
-
-/**
- * @define Coll Traversable
- * @define bfreturn $Coll
- */
-class Collection[A] {
- /** What map does...
- *
- * $bfreturn
- * @usecase def map[B](f: A => B): $bfreturn[B]
- *
- */
- def map[B, That](f: A => B)(implicit fact: cbf[Collection[A], B, That]) =
- null
-}
-
-/**
- * @define b John
- * @define a Mister $b
- */
-class SR704 {
- /**
- * Hello $a.
- */
- def foo = 123
-}
diff --git a/test/scaladoc/resources/basic.scala b/test/scaladoc/resources/basic.scala
new file mode 100644
index 0000000..00db666
--- /dev/null
+++ b/test/scaladoc/resources/basic.scala
@@ -0,0 +1,27 @@
+package com.example {
+ package object p1 {
+ def packageObjectMethod = 0
+ }
+}
+
+package com.example.p1 {
+ class Clazz {
+ def foo = packageObjectMethod
+ implicit def intToClass1(n: Int) = new Clazz
+
+ @deprecated("This method is deprecated.")
+ def depracatedMethod = 0
+
+ // Google Token for Scala
+ def :: = 0
+ @deprecated("This method is deprecated.") def :::: = 0
+ }
+
+ class UpperBound[T <: Int]
+
+ class LowerBound[T >: Int]
+
+ class ExistentialType {
+ def foo(array: Array[T] forSome { type T <: AnyVal }) = 0
+ }
+}
diff --git a/test/scaladoc/resources/code-indent.scala b/test/scaladoc/resources/code-indent.scala
new file mode 100644
index 0000000..88946ff
--- /dev/null
+++ b/test/scaladoc/resources/code-indent.scala
@@ -0,0 +1,37 @@
+/**
+ * This is an example of indented comments:
+ * {{{
+ * a typicial indented
+ * comment on multiple
+ * comment lines
+ * }}}
+ * {{{ one liner }}}
+ * {{{ two lines, one useful
+ * }}}
+ * {{{
+ * line1
+ * line2
+ * line3
+ * line4}}}
+ * {{{
+ * a ragged example
+ * a (condition)
+ * the t h e n branch
+ * an alternative
+ * the e l s e branch
+ * }}}
+ * NB: Trailing spaces are necessary for this test!
+ * {{{
+ * l1
+ *
+ * l2
+ *
+ * l3
+ *
+ * l4
+ *
+ * l5
+ * }}}
+
+ */
+class C
diff --git a/test/scaladoc/resources/doc-root/Any.scala b/test/scaladoc/resources/doc-root/Any.scala
new file mode 100644
index 0000000..031b7d9
--- /dev/null
+++ b/test/scaladoc/resources/doc-root/Any.scala
@@ -0,0 +1,114 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2002-2010, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+package scala
+
+/** Class `Any` is the root of the Scala class hierarchy. Every class in a Scala
+ * execution environment inherits directly or indirectly from this class.
+ */
+abstract class Any {
+ /** Compares the receiver object (`this`) with the argument object (`that`) for equivalence.
+ *
+ * Any implementation of this method should be an [[http://en.wikipedia.org/wiki/Equivalence_relation equivalence relation]]:
+ *
+ * - It is reflexive: for any instance `x` of type `Any`, `x.equals(x)` should return `true`.
+ * - It is symmetric: for any instances `x` and `y` of type `Any`, `x.equals(y)` should return `true` if and
+ * only if `y.equals(x)` returns `true`.
+ * - It is transitive: for any instances `x`, `y`, and `z` of type `AnyRef` if `x.equals(y)` returns `true` and
+ * `y.equals(z)` returns `true`, then `x.equals(z)` should return `true`.
+ *
+ * If you override this method, you should verify that your implementation remains an equivalence relation.
+ * Additionally, when overriding this method it is usually necessary to override `hashCode` to ensure that
+ * objects which are "equal" (`o1.equals(o2)` returns `true`) hash to the same [[scala.Int]].
+ * (`o1.hashCode.equals(o2.hashCode)`).
+ *
+ * @param that the object to compare against this object for equality.
+ * @return `true` if the receiver object is equivalent to the argument; `false` otherwise.
+ */
+ def equals(that: Any): Boolean
+
+ /** Calculate a hash code value for the object.
+ *
+ * The default hashing algorithm is platform dependent.
+ *
+ * Note that it is allowed for two objects to have identical hash codes (`o1.hashCode.equals(o2.hashCode)`) yet
+ * not be equal (`o1.equals(o2)` returns `false`). A degenerate implementation could always return `0`.
+ * However, it is required that if two objects are equal (`o1.equals(o2)` returns `true`) that they have
+ * identical hash codes (`o1.hashCode.equals(o2.hashCode)`). Therefore, when overriding this method, be sure
+ * to verify that the behavior is consistent with the `equals` method.
+ *
+ * @return the hash code value for this object.
+ */
+ def hashCode(): Int
+
+ /** Returns a string representation of the object.
+ *
+ * The default representation is platform dependent.
+ *
+ * @return a string representation of the object.
+ */
+ def toString(): String
+
+ /** Returns the runtime class representation of the object.
+ *
+ * @return a class object corresponding to the runtime type of the receiver.
+ */
+ def getClass(): Class[_]
+
+ /** Test two objects for equality.
+ * The expression `x == that` is equivalent to `if (x eq null) that eq null else x.equals(that)`.
+ *
+ * @param that the object to compare against this object for equality.
+ * @return `true` if the receiver object is equivalent to the argument; `false` otherwise.
+ */
+ final def ==(that: Any): Boolean = this equals that
+
+ /** Test two objects for inequality.
+ *
+ * @param that the object to compare against this object for equality.
+ * @return `true` if !(this == that), false otherwise.
+ */
+ final def != (that: Any): Boolean = !(this == that)
+
+ /** Equivalent to `x.hashCode` except for boxed numeric types and `null`.
+ * For numerics, it returns a hash value which is consistent
+ * with value equality: if two value type instances compare
+ * as true, then ## will produce the same hash value for each
+ * of them.
+ * For `null` returns a hashcode where `null.hashCode` throws a
+ * `NullPointerException`.
+ *
+ * @return a hash value consistent with ==
+ */
+ final def ##(): Int = sys.error("##")
+
+ /** Test whether the dynamic type of the receiver object is `T0`.
+ *
+ * Note that the result of the test is modulo Scala's erasure semantics.
+ * Therefore the expression `1.isInstanceOf[String]` will return `false`, while the
+ * expression `List(1).isInstanceOf[List[String]]` will return `true`.
+ * In the latter example, because the type argument is erased as part of compilation it is
+ * not possible to check whether the contents of the list are of the specified type.
+ *
+ * @return `true` if the receiver object is an instance of erasure of type `T0`; `false` otherwise.
+ */
+ def isInstanceOf[T0]: Boolean = sys.error("isInstanceOf")
+
+ /** Cast the receiver object to be of type `T0`.
+ *
+ * Note that the success of a cast at runtime is modulo Scala's erasure semantics.
+ * Therefore the expression `1.asInstanceOf[String]` will throw a `ClassCastException` at
+ * runtime, while the expression `List(1).asInstanceOf[List[String]]` will not.
+ * In the latter example, because the type argument is erased as part of compilation it is
+ * not possible to check whether the contents of the list are of the requested type.
+ *
+ * @throws ClassCastException if the receiver object is not an instance of the erasure of type `T0`.
+ * @return the receiver object.
+ */
+ def asInstanceOf[T0]: T0 = sys.error("asInstanceOf")
+}
diff --git a/test/scaladoc/resources/doc-root/AnyRef.scala b/test/scaladoc/resources/doc-root/AnyRef.scala
new file mode 100644
index 0000000..7d8b9f9
--- /dev/null
+++ b/test/scaladoc/resources/doc-root/AnyRef.scala
@@ -0,0 +1,132 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2002-2010, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+package scala
+
+/** Class `AnyRef` is the root class of all ''reference types''.
+ * All types except the value types descend from this class.
+ * @template
+ */
+trait AnyRef extends Any {
+
+ /** The equality method for reference types. Default implementation delegates to `eq`.
+ *
+ * See also `equals` in [[scala.Any]].
+ *
+ * @param that the object to compare against this object for equality.
+ * @return `true` if the receiver object is equivalent to the argument; `false` otherwise.
+ */
+ def equals(that: Any): Boolean = this eq that
+
+ /** The hashCode method for reference types. See hashCode in [[scala.Any]].
+ *
+ * @return the hash code value for this object.
+ */
+ def hashCode: Int = sys.error("hashCode")
+
+ /** Creates a String representation of this object. The default
+ * representation is platform dependent. On the java platform it
+ * is the concatenation of the class name, "@", and the object's
+ * hashcode in hexadecimal.
+ *
+ * @return a String representation of the object.
+ */
+ def toString: String = sys.error("toString")
+
+ /** Executes the code in `body` with an exclusive lock on `this`.
+ *
+ * @param body the code to execute
+ * @return the result of `body`
+ */
+ def synchronized[T](body: => T): T
+
+ /** Tests whether the argument (`arg0`) is a reference to the receiver object (`this`).
+ *
+ * The `eq` method implements an [[http://en.wikipedia.org/wiki/Equivalence_relation equivalence relation]] on
+ * non-null instances of `AnyRef`, and has three additional properties:
+ *
+ * - It is consistent: for any non-null instances `x` and `y` of type `AnyRef`, multiple invocations of
+ * `x.eq(y)` consistently returns `true` or consistently returns `false`.
+ * - For any non-null instance `x` of type `AnyRef`, `x.eq(null)` and `null.eq(x)` returns `false`.
+ * - `null.eq(null)` returns `true`.
+ *
+ * When overriding the `equals` or `hashCode` methods, it is important to ensure that their behavior is
+ * consistent with reference equality. Therefore, if two objects are references to each other (`o1 eq o2`), they
+ * should be equal to each other (`o1 == o2`) and they should hash to the same value (`o1.hashCode == o2.hashCode`).
+ *
+ * @param that the object to compare against this object for reference equality.
+ * @return `true` if the argument is a reference to the receiver object; `false` otherwise.
+ */
+ final def eq(that: AnyRef): Boolean = sys.error("eq")
+
+ /** Equivalent to `!(this eq that)`.
+ *
+ * @param that the object to compare against this object for reference equality.
+ * @return `true` if the argument is not a reference to the receiver object; `false` otherwise.
+ */
+ final def ne(that: AnyRef): Boolean = !(this eq that)
+
+ /** The expression `x == that` is equivalent to `if (x eq null) that eq null else x.equals(that)`.
+ *
+ * @param arg0 the object to compare against this object for equality.
+ * @return `true` if the receiver object is equivalent to the argument; `false` otherwise.
+ */
+ final def ==(that: AnyRef): Boolean =
+ if (this eq null) that eq null
+ else this equals that
+
+ /** Create a copy of the receiver object.
+ *
+ * The default implementation of the `clone` method is platform dependent.
+ *
+ * @note not specified by SLS as a member of AnyRef
+ * @return a copy of the receiver object.
+ */
+ protected def clone(): AnyRef
+
+ /** Called by the garbage collector on the receiver object when there
+ * are no more references to the object.
+ *
+ * The details of when and if the `finalize` method is invoked, as
+ * well as the interaction between `finalize` and non-local returns
+ * and exceptions, are all platform dependent.
+ *
+ * @note not specified by SLS as a member of AnyRef
+ */
+ protected def finalize(): Unit
+
+ /** A representation that corresponds to the dynamic class of the receiver object.
+ *
+ * The nature of the representation is platform dependent.
+ *
+ * @note not specified by SLS as a member of AnyRef
+ * @return a representation that corresponds to the dynamic class of the receiver object.
+ */
+ def getClass(): Class[_]
+
+ /** Wakes up a single thread that is waiting on the receiver object's monitor.
+ *
+ * @note not specified by SLS as a member of AnyRef
+ */
+ def notify(): Unit
+
+ /** Wakes up all threads that are waiting on the receiver object's monitor.
+ *
+ * @note not specified by SLS as a member of AnyRef
+ */
+ def notifyAll(): Unit
+
+ /** Causes the current Thread to wait until another Thread invokes
+ * the notify() or notifyAll() methods.
+ *
+ * @note not specified by SLS as a member of AnyRef
+ */
+ def wait (): Unit
+ def wait (timeout: Long, nanos: Int): Unit
+ def wait (timeout: Long): Unit
+}
diff --git a/test/scaladoc/resources/doc-root/Nothing.scala b/test/scaladoc/resources/doc-root/Nothing.scala
new file mode 100644
index 0000000..eed6066
--- /dev/null
+++ b/test/scaladoc/resources/doc-root/Nothing.scala
@@ -0,0 +1,23 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2002-2010, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+package scala
+
+/** `Nothing` is - together with [[scala.Null]] - at the bottom of Scala's type hierarchy.
+ *
+ * `Nothing` is a subtype of every other type (including [[scala.Null]]); there exist
+ * ''no instances'' of this type. Although type `Nothing` is uninhabited, it is
+ * nevertheless useful in several ways. For instance, the Scala library defines a value
+ * [[scala.collection.immutable.Nil]] of type `List[Nothing]`. Because lists are covariant in Scala,
+ * this makes [[scala.collection.immutable.Nil]] an instance of `List[T]`, for any element of type `T`.
+ *
+ * Another usage for Nothing is the return type for methods which never return normally.
+ * One example is method error in [[scala.sys]], which always throws an exception.
+ */
+sealed trait Nothing
+
diff --git a/test/scaladoc/resources/doc-root/Null.scala b/test/scaladoc/resources/doc-root/Null.scala
new file mode 100644
index 0000000..7455e78
--- /dev/null
+++ b/test/scaladoc/resources/doc-root/Null.scala
@@ -0,0 +1,17 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2002-2010, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+package scala
+
+/** `Null` is - together with [[scala.Nothing]] - at the bottom of the Scala type hierarchy.
+ *
+ * `Null` is a subtype of all reference types; its only instance is the `null` reference.
+ * Since `Null` is not a subtype of value types, `null` is not a member of any such type. For instance,
+ * it is not possible to assign `null` to a variable of type [[scala.Int]].
+ */
+sealed trait Null
diff --git a/test/scaladoc/resources/explicit-inheritance-override.scala b/test/scaladoc/resources/explicit-inheritance-override.scala
new file mode 100644
index 0000000..62ce653
--- /dev/null
+++ b/test/scaladoc/resources/explicit-inheritance-override.scala
@@ -0,0 +1,48 @@
+// This tests the implicit comment inheritance capabilities of scaladoc for class inheritance (no $super, no @inheritdoc)
+class InheritDocBase {
+ /**
+ * The base comment. And another sentence...
+ *
+ * @param arg1 The T term comment
+ * @param arg2 The string comment
+ * @tparam T the type of the first argument
+ * @throws SomeException if the function is not called with correct parameters
+ * @return The return comment
+ * @see The Manual
+ * @note Be careful!
+ * @example function[Int](3, "something")
+ * @author a Scala developer
+ * @version 0.0.2
+ * @since 0.0.1
+ * @todo Call mom.
+ */
+ def function[T](arg1: T, arg2: String): Double = 0.0d
+}
+
+class InheritDocDerived extends InheritDocBase {
+ /**
+ * Starting line
+ *
+ * @inheritdoc
+ * @inheritdoc
+ *
+ * Ending line
+ *
+ * @param arg1 Start1 @inheritdoc End1
+ * @param arg2 Start2 @inheritdoc End2
+ * @param arg3 Start3 ShouldWarn @inheritdoc End3
+ * @tparam T StartT @inheritdoc EndT
+ * @tparam ShouldWarn StartSW @inheritdoc EndSW
+ * @throws SomeException StartEx @inheritdoc EndEx
+ * @throws SomeOtherException StartSOE Should Warn @inheritdoc EndSOE
+ * @return StartRet @inheritdoc EndRet
+ * @see StartSee @inheritdoc EndSee
+ * @note StartNote @inheritdoc EndNote
+ * @example StartExample @inheritdoc EndExample
+ * @author StartAuthor @inheritdoc EndAuthor
+ * @version StartVer @inheritdoc EndVer
+ * @since StartSince @inheritdoc EndSince
+ * @todo StartTodo @inheritdoc And dad! EndTodo
+ */
+ override def function[T](arg1: T, arg2: String): Double = 1.0d
+}
\ No newline at end of file
diff --git a/test/scaladoc/resources/explicit-inheritance-usecase.scala b/test/scaladoc/resources/explicit-inheritance-usecase.scala
new file mode 100644
index 0000000..e10cec4
--- /dev/null
+++ b/test/scaladoc/resources/explicit-inheritance-usecase.scala
@@ -0,0 +1,47 @@
+// This tests the implicit comment inheritance capabilities of scaladoc for usecases (no $super, no @inheritdoc)
+/** Testing use case inheritance */
+class UseCaseInheritDoc {
+ /**
+ * The base comment. And another sentence...
+ *
+ * @param arg1 The T term comment
+ * @param arg2 The string comment
+ * @tparam T the type of the first argument
+ * @throws SomeException if the function is not called with correct parameters
+ * @return The return comment
+ * @see The Manual
+ * @note Be careful!
+ * @example function[Int](3, "something")
+ * @author a Scala developer
+ * @version 0.0.2
+ * @since 0.0.1
+ * @todo Call mom.
+ *
+ * @usecase def function[T](arg1: T, arg2: String): Double
+ *
+ * Starting line
+ *
+ * @inheritdoc
+ * @inheritdoc
+ *
+ * Ending line
+ *
+ * @param arg1 Start1 @inheritdoc End1
+ * @param arg2 Start2 @inheritdoc End2
+ * @param arg3 Start3 ShouldWarn @inheritdoc End3
+ * @tparam T StartT @inheritdoc EndT
+ * @tparam ShouldWarn StartSW @inheritdoc EndSW
+ * @throws SomeException StartEx @inheritdoc EndEx
+ * @throws SomeOtherException StartSOE Should Warn @inheritdoc EndSOE
+ * @return StartRet @inheritdoc EndRet
+ * @see StartSee @inheritdoc EndSee
+ * @note StartNote @inheritdoc EndNote
+ * @example StartExample @inheritdoc EndExample
+ * @author StartAuthor @inheritdoc EndAuthor
+ * @version StartVer @inheritdoc EndVer
+ * @since StartSince @inheritdoc EndSince
+ * @todo StartTodo @inheritdoc And dad! EndTodo
+ */
+ def function[T](implicit arg1: T, arg2: String): Double = 0.0d
+}
+
diff --git a/test/scaladoc/resources/implicit-inheritance-override.scala b/test/scaladoc/resources/implicit-inheritance-override.scala
new file mode 100644
index 0000000..5d692f5
--- /dev/null
+++ b/test/scaladoc/resources/implicit-inheritance-override.scala
@@ -0,0 +1,41 @@
+// This tests the implicit comment inheritance capabilities of scaladoc for class inheritance (no $super, no @inheritdoc)
+class Base {
+ /**
+ * The base comment. And another sentence...
+ *
+ * @param arg1 The T term comment
+ * @param arg2 The string comment
+ * @tparam T the type of the first argument
+ * @return The return comment
+ */
+ def function[T](arg1: T, arg2: String): Double = 0.0d
+}
+
+class DerivedA extends Base {
+ /**
+ * Overriding the comment, the params and returns comments should stay the same.
+ */
+ override def function[T](arg1: T, arg2: String): Double = 1.0d
+}
+
+class DerivedB extends Base {
+ /**
+ * @param arg1 The overridden T term comment
+ * @param arg2 The overridden string comment
+ */
+ override def function[T](arg1: T, arg2: String): Double = 2.0d
+}
+
+class DerivedC extends Base {
+ /**
+ * @return The overridden return comment
+ */
+ override def function[T](arg1: T, arg2: String): Double = 3.0d
+}
+
+class DerivedD extends Base {
+ /**
+ * @tparam T The overriden type parameter comment
+ */
+ override def function[T](arg1: T, arg2: String): Double = 3.0d
+}
\ No newline at end of file
diff --git a/test/scaladoc/resources/implicit-inheritance-usecase.scala b/test/scaladoc/resources/implicit-inheritance-usecase.scala
new file mode 100644
index 0000000..8dd1262
--- /dev/null
+++ b/test/scaladoc/resources/implicit-inheritance-usecase.scala
@@ -0,0 +1,57 @@
+// This tests the implicit comment inheritance capabilities of scaladoc for usecases (no $super, no @inheritdoc)
+/** Testing use case inheritance */
+class UseCaseInheritance {
+ /**
+ * The base comment. And another sentence...
+ *
+ * @param arg1 The T term comment
+ * @param arg2 The string comment
+ * @tparam T The type parameter
+ * @return The return comment
+ *
+ * @usecase def missing_arg[T](arg1: T): Double
+ *
+ * @usecase def missing_targ(arg1: Int, arg2: String): Double
+ *
+ * @usecase def overridden_arg1[T](implicit arg1: T, arg2: String): Double
+ * @param arg1 The overridden T term comment
+ *
+ * @usecase def overridden_targ[T](implicit arg1: T, arg2: String): Double
+ * @tparam T The overridden type parameter comment
+ *
+ * @usecase def overridden_return[T](implicit arg1: T, arg2: String): Double
+ * @return The overridden return comment
+ *
+ * @usecase def added_arg[T](implicit arg1: T, arg2: String, arg3: Float): Double
+ * @param arg3 The added float comment
+ *
+ * @usecase def overridden_comment[T](implicit arg1: T, arg2: String): Double
+ * The overridden comment.
+ */
+ def function[T](implicit arg1: T, arg2: String): Double = 0.0d
+}
+
+/** Testing the override-use case interaction */
+class UseCaseOverrideInheritance extends UseCaseInheritance {
+ /**
+ * @usecase def missing_arg[T](arg1: T): Double
+ *
+ * @usecase def missing_targ(arg1: Int, arg2: String): Double
+ *
+ * @usecase def overridden_arg1[T](implicit arg1: T, arg2: String): Double
+ * @param arg1 The overridden T term comment
+ *
+ * @usecase def overridden_targ[T](implicit arg1: T, arg2: String): Double
+ * @tparam T The overridden type parameter comment
+ *
+ * @usecase def overridden_return[T](implicit arg1: T, arg2: String): Double
+ * @return The overridden return comment
+ *
+ * @usecase def added_arg[T](implicit arg1: T, arg2: String, arg3: Float): Double
+ * @param arg3 The added float comment
+ *
+ * @usecase def overridden_comment[T](implicit arg1: T, arg2: String): Double
+ * The overridden comment.
+ */
+ override def function[T](implicit arg1: T, arg2: String): Double = 0.0d
+}
diff --git a/test/scaladoc/resources/implicits-ambiguating-res.scala b/test/scaladoc/resources/implicits-ambiguating-res.scala
new file mode 100644
index 0000000..6ed5136
--- /dev/null
+++ b/test/scaladoc/resources/implicits-ambiguating-res.scala
@@ -0,0 +1,72 @@
+/**
+ * Test scaladoc implicits distinguishing -- supress all members by implicit conversion that are shadowed by the
+ * class' own members
+ *
+ * {{{
+ * scala> class A { def foo(t: String) = 4 }
+ * defined class A
+ *
+ * scala> class B { def foo(t: Any) = 5 }
+ * defined class B
+ *
+ * scala> implicit def AtoB(a:A) = new B
+ * AtoB: (a: A)B
+ *
+ * scala> val a = new A
+ * a: A = A at 28f553e3
+ *
+ * scala> a.foo("T")
+ * res1: Int = 4
+ *
+ * scala> a.foo(4)
+ * res2: Int = 5
+ * }}}
+ */
+package scala.test.scaladoc.implicits.ambiguating
+import language.implicitConversions // according to SIP18
+
+/** - conv1-5 should be ambiguous
+ * - conv6-7 should not be ambiguous
+ * - conv8 should be ambiguous
+ * - conv9 should be ambiguous
+ * - conv10 and conv11 should not be ambiguous */
+class A[T]
+/** conv1-9 should be the same, conv10 should be ambiguous, conv11 should be okay */
+class B extends A[Int]
+/** conv1-9 should be the same, conv10 and conv11 should not be ambiguous */
+class C extends A[Double]
+ /** conv1-9 should be the same, conv10 should not be ambiguous while conv11 should be ambiguous */
+class D extends A[AnyRef]
+
+class X[T] {
+ def conv1: AnyRef = ???
+ def conv2: T = ???
+ def conv3(l: Int): AnyRef = ???
+ def conv4(l: AnyRef): AnyRef = ???
+ def conv5(l: AnyRef): String = ???
+ def conv6(l: String)(m: String): AnyRef = ???
+ def conv7(l: AnyRef)(m: AnyRef): AnyRef = ???
+ def conv8(l: AnyRef): AnyRef = ???
+ def conv9(l: String): AnyRef = ???
+ def conv10(l: T): T = ???
+ def conv11(l: T): T = ???
+}
+
+class Z[T] {
+ def conv1: AnyRef = ???
+ def conv2: T = ???
+ def conv3(p: Int): AnyRef = ???
+ def conv4(p: AnyRef): String = ???
+ def conv5(p: AnyRef): AnyRef = ???
+ def conv6(p: String, q: String): AnyRef = ???
+ def conv7(p: AnyRef, q: AnyRef): AnyRef = ???
+ def conv8(p: String): AnyRef = ???
+ def conv9(p: AnyRef): AnyRef = ???
+ def conv10(p: Int): T = ???
+ def conv11(p: String): T = ???
+}
+
+object A {
+ implicit def AtoX[T](a: A[T]) = new X[T]
+ implicit def AtoZ[T](a: A[T]) = new Z[T]
+}
diff --git a/test/scaladoc/resources/implicits-base-res.scala b/test/scaladoc/resources/implicits-base-res.scala
new file mode 100644
index 0000000..d6c0332
--- /dev/null
+++ b/test/scaladoc/resources/implicits-base-res.scala
@@ -0,0 +1,152 @@
+/**
+ * Test scaladoc implicits - the bread and butter of the testsuite :)
+ */
+package scala.test.scaladoc.implicits.base
+
+class Foo[T]
+class Bar[T]
+trait MyNumeric[R]
+
+/** Class A
+ * - tests the complete type inference
+ * - the following inherited methods should appear:
+ * {{{
+ * def convToGtColonDoubleA(x: Double) // pimpA3: with a constraint that T <: Double
+ * def convToIntA(x: Int) // pimpA2: with a constraint that T = Int
+ * def convToManifestA(x: T) // pimpA7: with 2 constraints: T: Manifest and T <: Double
+ * def convToMyNumericA(x: T) // pimpA6: with a constraint that there is x: MyNumeric[T] implicit in scope
+ * def convToNumericA(x: T) // pimpA1: with a constraint that there is x: Numeric[T] implicit in scope
+ * def convToPimpedA(x: Bar[Foo[T]]) // pimpA5: no constraints, SHADOWED
+ * def convToPimpedA(x: S) // pimpA4: with 3 constraints: T = Foo[Bar[S]], S: Foo and S: Bar, SHADOWED
+ * def convToPimpedA(x: T) // pimpA0: with no constraints, SHADOWED
+ * def convToTraversableOps(x: T) // pimpA7: with 2 constraints: T: Manifest and T <: Double
+ * // should not be abstract!
+ * }}}
+ */
+class A[T] {
+ /** This should prevent the implicitly inherited `def convToPimpedA: T` from `pimpA0` from showing up */
+ def convToPimpedA(x: T): T = sys.error("Let's check it out!")
+ /** This should check implicit member elimination in the case of subtyping */
+ def foo(a: T, b: AnyRef): T
+}
+/** Companion object with implicit transformations */
+object A {
+ import language.implicitConversions // according to SIP18
+
+ implicit def pimpA0[V](a: A[V]) = new PimpedA(a)
+ implicit def pimpA1[ZBUR: Numeric](a: A[ZBUR]) = new NumericA[ZBUR](a)
+ implicit def pimpA2(a: A[Int]) = new IntA(a)
+ implicit def pimpA3(a: A[T] forSome { type T <: Double }) = new GtColonDoubleA(a)
+ implicit def pimpA4[S](a: A[Foo[Bar[S]]])(implicit foo: Foo[S], bar: Bar[S]): PimpedA[S] = sys.error("not implemented")
+ implicit def pimpA5[Z](a: A[Z]): PimpedA[Bar[Foo[Z]]] = sys.error("not implemented")
+ implicit def pimpA6[Z: MyNumeric](a: A[Z]) = new MyNumericA[Z](a)
+ // TODO: Add H <: Double and see why it crashes for C and D -- context bounds, need to check!
+ implicit def pimpA7[H <: Double : Manifest](a: A[H]) = new ManifestA[H](a) with MyTraversableOps[H] { def convToTraversableOps(x: H): H = sys.error("no") }
+}
+
+
+/** Class B
+ * - tests the existential type solving
+ * - the following inherited methods should appear:
+ * {{{
+ * def convToGtColonDoubleA(x: Double) // pimpA3: no constraints
+ * def convToManifestA(x: Double) // pimpA7: no constraints
+ * def convToMyNumericA(x: Double) // pimpA6: (if showAll is set) with a constraint that there is x: MyNumeric[Double] implicit in scope
+ * def convToNumericA(x: Double) // pimpA1: no constraintsd
+ * def convToPimpedA(x: Bar[Foo[Double]]) // pimpA5: no constraints, SHADOWED
+ * def convToPimpedA(x: Double) // pimpA0: no constraints, SHADOWED
+ * def convToTraversableOps(x: Double) // pimpA7: no constraints
+ * // should not be abstract!
+ * }}}
+ */
+class B extends A[Double]
+object B extends A
+
+
+/** Class C
+ * - tests asSeenFrom
+ * - the following inherited methods should appear:
+ * {{{
+ * def convToIntA(x: Int) // pimpA2: no constraints
+ * def convToMyNumericA(x: Int) // pimpA6: (if showAll is set) with a constraint that there is x: MyNumeric[Int] implicit in scope
+ * def convToNumericA(x: Int) // pimpA1: no constraints
+ * def convToPimpedA(x: Int) // pimpA0: no constraints, SHADOWED
+ * def convToPimpedA(x: Bar[Foo[Int]]) // pimpA5: no constraints, SHADOWED
+ * }}}
+ */
+class C extends A[Int]
+object C extends A
+
+
+/** Class D
+ * - tests implicit elimination
+ * - the following inherited methods should appear:
+ * {{{
+ * def convToMyNumericA(x: String) // pimpA6: (if showAll is set) with a constraint that there is x: MyNumeric[String] implicit in scope
+ * def convToNumericA(x: String) // pimpA1: (if showAll is set) with a constraint that there is x: Numeric[String] implicit in scope
+ * def convToPimpedA(x: Bar[Foo[String]]) // pimpA5: no constraints, SHADOWED
+ * def convToPimpedA(x: String) // pimpA0: no constraints, SHADOWED
+ * }}}
+ */
+class D extends A[String]
+/** Companion object with implicit transformations */
+object D extends A
+
+
+/** PimpedA class <br/>
+ * - tests simple inheritance and asSeenFrom
+ * - A, B and C should be implicitly converted to this */
+class PimpedA[V](a: A[V]) {
+ /** The convToPimpedA: V documentation... */
+ def convToPimpedA(x: V): V = sys.error("Not implemented")
+}
+
+/** NumericA class <br/>
+ * - tests the implicit conversion between parametric and fixed types
+ * - A, B and C should be implicitly converted to this */
+class NumericA[U: Numeric](a: A[U]) {
+ /** The convToNumericA: U documentation... */
+ def convToNumericA(x: U): U = implicitly[Numeric[U]].zero
+}
+
+/** IntA class <br/>
+ * - tests the interaction between implicit conversion and specific types
+ * - A and C should be implicitly converted to this */
+class IntA(a: A[Int]) {
+ /** The convToIntA: Int documentation... */
+ def convToIntA(x: Int): Int = 0
+}
+
+/** GtColonDoubleA class <br/>
+ * - tests the interaction between implicit conversion and existential types
+ * - A and B should be implicitly converted to this */
+class GtColonDoubleA(a: A[T] forSome { type T <: Double }) {
+ /** The convToGtColonDoubleA: Double documentation... */
+ def convToGtColonDoubleA(x: Double): Double = 0
+}
+
+/** MyNumericA class <br/>
+ * - tests the implicit conversion between parametric and fixed types
+ * - A should be implicitly converted to this */
+class MyNumericA[U: MyNumeric](a: A[U]) {
+ /** The convToMyNumericA: U documentation... */
+ def convToMyNumericA(x: U): U = sys.error("dunno")
+}
+
+/** ManifestA class <br/>
+ * - tests the manifest recognition
+ * - A, B, C, D should be implicitly converted to this */
+class ManifestA[W: Manifest](a: A[W]) {
+ /** The convToManifestA: W documentation... */
+ def convToManifestA(x: W): W = sys.error("dunno")
+}
+
+// [Eugene to Vlad] how do I test typetags here?
+
+/** MyTraversableOps class <br/>
+ * - checks if any abstract members are added - should not happen!
+ */
+trait MyTraversableOps[S] {
+ /** The convToTraversableOps: S documentation... */
+ def convToTraversableOps(x: S): S
+}
diff --git a/test/scaladoc/resources/implicits-chaining-res.scala b/test/scaladoc/resources/implicits-chaining-res.scala
new file mode 100644
index 0000000..c005d5f
--- /dev/null
+++ b/test/scaladoc/resources/implicits-chaining-res.scala
@@ -0,0 +1,50 @@
+/**
+ * Testing scaladoc implicits chaining
+ */
+package scala.test.scaladoc.implicits {
+
+ import language.implicitConversions // according to SIP18
+
+ // the classes involved
+ case class Z[U](a: U)
+ case class Intermediate[T, U](t: T, u: U)
+ class Implicit1[T](b: Implicit2[T])
+ class Implicit2[T](c: Implicit3[T])
+ class Implicit3[T](/* and so on */)
+
+ object chaining {
+
+ // the base conversion
+ implicit def convertToZ[T](a: A[T])(implicit b: Implicit1[T]): Z[A[T]] = Z(a)
+
+ // and the implicit chaining, don't you just love it? :D
+ // implicit1, with one alternative
+ implicit def implicit1[T <: Intermediate[_, _]](implicit b: Implicit2[T]) = new Implicit1[T](b)
+ // implicit2, with two alternatives
+ implicit def implicit2alt1[T <: Intermediate[_ <: String, _]](implicit c: Implicit3[T]) = new Implicit2[T](c)
+ implicit def implicit2alt2[T <: Intermediate[_ <: Double, _]](implicit c: Implicit3[T]) = new Implicit2[T](c)
+ // implicit3, with two alternatives
+ implicit def implicit3alt1[T <: Intermediate[_, _ <: Int]] = new Implicit3[T]()
+ implicit def implicit3alt2[T <: Intermediate[_ <: Double, _ <: AnyRef],X] = new Implicit3[T]()
+
+ // and our targets
+ /** conversion here, with constraints */
+ class A[T]()
+ /** conversion here, no constraints */
+ class B extends A[Intermediate[String, Int]]
+ /** no conversion */
+ class C extends A[Intermediate[String, String]]
+ /** conversion here, no constraints */
+ class D extends A[Intermediate[Double, Int]]
+ /** conversion here, no constraints */
+ class E extends A[Intermediate[Double, String]]
+ /** no conversion */
+ class F extends A[Intermediate[String, Double]]
+
+ object scalacTest {
+ (new B).a
+ (new D).a
+ (new E).a
+ }
+ }
+}
diff --git a/test/scaladoc/resources/implicits-elimination-res.scala b/test/scaladoc/resources/implicits-elimination-res.scala
new file mode 100644
index 0000000..5f7135c
--- /dev/null
+++ b/test/scaladoc/resources/implicits-elimination-res.scala
@@ -0,0 +1,14 @@
+/**
+ * Testing scaladoc implicits elimination
+ */
+package scala.test.scaladoc.implicits.elimination {
+
+ import language.implicitConversions // according to SIP18
+
+ /** No conversion, as B doesn't bring any member */
+ class A
+ class B { class C; trait V; type T; }
+ object A {
+ implicit def toB(a: A): B = null
+ }
+}
diff --git a/test/scaladoc/resources/implicits-known-type-classes-res.scala b/test/scaladoc/resources/implicits-known-type-classes-res.scala
new file mode 100644
index 0000000..77c91aa
--- /dev/null
+++ b/test/scaladoc/resources/implicits-known-type-classes-res.scala
@@ -0,0 +1,39 @@
+/** Tests the "known type classes" feature of scaladoc implicits
+ * if the test fails, please update the correct qualified name of
+ * the type class in src/compiler/scala/tools/nsc/doc/Settings.scala
+ * in the knownTypeClasses map. Thank you! */
+package scala.test.scaladoc.implicits.typeclasses {
+ class A[T]
+ object A {
+ import language.implicitConversions
+ import scala.reflect.ClassTag
+ import scala.reflect.runtime.universe.TypeTag
+ implicit def convertNumeric [T: Numeric] (a: A[T]) = new B(implicitly[Numeric[T]])
+ implicit def convertIntegral [T: Integral] (a: A[T]) = new B(implicitly[Integral[T]])
+ implicit def convertFractional [T: Fractional] (a: A[T]) = new B(implicitly[Fractional[T]])
+ implicit def convertManifest [T: Manifest] (a: A[T]) = new B(implicitly[Manifest[T]])
+ implicit def convertClassManifest [T: ClassManifest] (a: A[T]) = new B(implicitly[ClassManifest[T]])
+ implicit def convertOptManifest [T: OptManifest] (a: A[T]) = new B(implicitly[OptManifest[T]])
+ implicit def convertClassTag [T: ClassTag] (a: A[T]) = new B(implicitly[ClassTag[T]])
+ implicit def convertTypeTag [T: TypeTag] (a: A[T]) = new B(implicitly[TypeTag[T]])
+ type K[X] = Numeric[X]
+ type L[X] = Integral[X]
+ type M[X] = Fractional[X]
+ type N[X] = Manifest[X]
+ type O[X] = ClassManifest[X]
+ type P[X] = OptManifest[X]
+ type Q[X] = ClassTag[X]
+ type R[X] = TypeTag[X]
+ implicit def convertK [T: K] (a: A[T]) = new B(implicitly[K[T]])
+ implicit def convertL [T: L] (a: A[T]) = new B(implicitly[L[T]])
+ implicit def convertM [T: M] (a: A[T]) = new B(implicitly[M[T]])
+ implicit def convertN [T: N] (a: A[T]) = new B(implicitly[N[T]])
+ implicit def convertO [T: O] (a: A[T]) = new B(implicitly[O[T]])
+ implicit def convertP [T: P] (a: A[T]) = new B(implicitly[P[T]])
+ implicit def convertQ [T: Q] (a: A[T]) = new B(implicitly[Q[T]])
+ implicit def convertR [T: R] (a: A[T]) = new B(implicitly[R[T]])
+ }
+ class B[T](t: T) {
+ def typeClass: T = t
+ }
+}
\ No newline at end of file
diff --git a/test/scaladoc/resources/implicits-scopes-res.scala b/test/scaladoc/resources/implicits-scopes-res.scala
new file mode 100644
index 0000000..c675a64
--- /dev/null
+++ b/test/scaladoc/resources/implicits-scopes-res.scala
@@ -0,0 +1,52 @@
+/**
+ * Testing scaladoc implicit scopes - looking for implicits in the right places
+ */
+package scala.test.scaladoc.implicits.scopes
+import language.implicitConversions // according to SIP18
+
+// TEST1 - In package object
+package object test1 {
+ implicit def toB(a: A): B = null
+}
+package test1 {
+ class A
+ class B { def b = "" }
+}
+
+// TEST2 - In enclosing package - doesn't seem to work even in scalac
+package object test2 {
+ import classes._
+ implicit def toB(a: A): B = null
+}
+package test2 {
+ package classes {
+ class A
+ class B { def b = "" }
+ object test { (new A).b }
+ }
+}
+
+// TEST3 - In companion object
+package test3 {
+ class A
+ object A { implicit def toB(a: A): B = null }
+ class B { def b = "" }
+}
+
+// TEST4 - Nested type's companion object
+package test4 {
+ class U[V]
+ class S
+ object S { implicit def toB(a: A): B = null }
+ class A extends U[S]
+ class B { def b = "" }
+}
+
+// TEST5 - In scope
+package test5 {
+ object scope {
+ class A
+ class B { def b = "" }
+ implicit def toB(a: A): B = null
+ }
+}
diff --git a/test/scaladoc/resources/implicits-shadowing-res.scala b/test/scaladoc/resources/implicits-shadowing-res.scala
new file mode 100644
index 0000000..c5e9493
--- /dev/null
+++ b/test/scaladoc/resources/implicits-shadowing-res.scala
@@ -0,0 +1,64 @@
+/**
+ * Test scaladoc implicits distinguishing -- supress all members by implicit conversion that are shadowed by the
+ * class' own members
+ *
+ * {{{
+ * scala> class A { def foo(t: String) = 4 }
+ * defined class A
+ *
+ * scala> class B { def foo(t: Any) = 5 }
+ * defined class B
+ *
+ * scala> implicit def AtoB(a:A) = new B
+ * AtoB: (a: A)B
+ *
+ * scala> val a = new A
+ * a: A = A at 28f553e3
+ *
+ * scala> a.foo("T")
+ * res1: Int = 4
+ *
+ * scala> a.foo(4)
+ * res2: Int = 5
+ * }}}
+ */
+package scala.test.scaladoc.implicits.shadowing
+import language.implicitConversions // according to SIP18
+
+/** conv5, conv8, conv9, conv10, conv11 should be visible */
+class A[T] {
+ def conv1: AnyRef = ???
+ def conv2: T = ???
+ def conv3(l: Int): AnyRef = ???
+ def conv4(l: AnyRef): AnyRef = ???
+ def conv5(l: String): AnyRef = ???
+ def conv6(l: AnyRef): AnyRef = ???
+ def conv7(l: AnyRef): String = ???
+ def conv8(l: String)(m: String): AnyRef = ???
+ def conv9(l: AnyRef)(m: AnyRef): AnyRef = ???
+ def conv10(l: T): T = ???
+ def conv11(l: T): T = ???
+}
+/** conv5, conv8, conv9, conv11 should be visible */
+class B extends A[Int]
+/** conv5, conv8, conv9, conv10, conv11 should be visible */
+class C extends A[Double]
+/** conv5, conv8, conv9, conv10 should be visible */
+class D extends A[AnyRef]
+
+class Z[T] {
+ def conv1: AnyRef = ???
+ def conv2: T = ???
+ def conv3(p: Int): AnyRef = ???
+ def conv4(p: String): AnyRef = ???
+ def conv5(p: AnyRef): AnyRef = ???
+ def conv6(p: AnyRef): String = ???
+ def conv7(p: AnyRef): AnyRef = ???
+ def conv8(p: String, q: String): AnyRef = ???
+ def conv9(p: AnyRef, q: AnyRef): AnyRef = ???
+ def conv10(p: Int): T = ???
+ def conv11(p: String): T = ???
+}
+object A {
+ implicit def AtoZ[T](a: A[T]) = new Z[T]
+}
diff --git a/test/scaladoc/resources/inheritdoc-corner-cases.scala b/test/scaladoc/resources/inheritdoc-corner-cases.scala
new file mode 100644
index 0000000..8cd995e
--- /dev/null
+++ b/test/scaladoc/resources/inheritdoc-corner-cases.scala
@@ -0,0 +1,78 @@
+// TEST1: Inherit from multiple classes
+trait A {
+ /**
+ * Hello 1 comment
+ */
+ def hello1 = 0
+}
+
+trait B {
+ /**
+ * Hello 2 comment
+ */
+ def hello2 = 1
+}
+
+trait C extends B
+
+class D extends A with C {
+ /**
+ * Inherited: @inheritdoc
+ */
+ override def hello1 = super.hello2
+
+ /**
+ * Inherited: @inheritdoc
+ */
+ override def hello2 = super.hello1
+}
+
+// TEST2: Invalid inherit: no parents
+trait E {
+ /**
+ * @inheritdoc
+ */
+ def whereDidThisComeFrom
+}
+
+// TEST3: Invalid inherit, but other parents present
+trait F extends E {
+ /**
+ * @inheritdoc
+ */
+ def howAboutThis
+}
+
+
+// TEST4: Inherit from something that inherits: inherit should propagate
+trait G extends D {
+ /**
+ * @inheritdoc
+ */
+ override def hello1 = 13
+
+ /**
+ * @inheritdoc
+ */
+ override def hello2 = 14
+}
+
+// TEST5: Inherit missing parameters
+trait H extends G {
+ /**
+ * Missing params
+ * @throws HelloException @inheritdoc
+ * @todo @inheritdoc
+ */
+ override def hello1 = 15
+}
+
+// TEST6: Inherit from something that inherits in the usecase
+trait I extends G {
+ /**
+ * @inheritdoc
+ * @usecase def hello1(i: Int)
+ * @inheritdoc
+ */
+ override def hello1 = 13
+}
\ No newline at end of file
diff --git a/test/scaladoc/resources/links.scala b/test/scaladoc/resources/links.scala
new file mode 100644
index 0000000..bd69665
--- /dev/null
+++ b/test/scaladoc/resources/links.scala
@@ -0,0 +1,74 @@
+// that would be:
+// SI-5079 "Scaladoc can't link to an object (only a class or trait)"
+// SI-4497 "Links in ScalaDoc - Spec and implementation unsufficient"
+// SI-4224 "Wiki-links should support method targets"
+// SI-3695 "support non-fully-qualified type links in scaladoc comments"
+// SI-6487 "Scaladoc can't link to inner classes"
+// SI-6495 "Scaladoc won't pick up group name, priority and description from owner chain"
+// SI-6501 "Scaladoc won't link to a @template type T as a template but as a member"
+package scala.test.scaladoc.links {
+ import language.higherKinds
+ class C
+
+ trait Target {
+ type T
+ type S = String
+ class C
+ def foo(i: Int) = 2
+ def foo(s: String) = 3
+ def foo[A[_]](x: A[String]) = 5
+ def foo[A[_[_]]](x: A[List]) = 6
+ val bar: Boolean
+ def baz(c: scala.test.scaladoc.links.C) = 7
+ }
+
+ object Target {
+ type T = Int => Int
+ type S = Int
+ class C
+ def foo(i: Int) = 2
+ def foo(z: String) = 3
+ def foo[A[_]](x: A[String]) = 5
+ def foo[A[_[_]]](x: A[List]) = 6
+ val bar: Boolean = false
+ val onlyInObject = 1
+ def baz(c: scala.test.scaladoc.links.C) = 7
+ }
+
+ /**
+ * Links to the trait:
+ * - [[scala.test.scaladoc.links.Target$ object Test]]
+ * - [[scala.test package scala.test]]
+ * - [[scala.test.scaladoc.links.Target!.T trait Target -> type T]]
+ * - [[test.scaladoc.links.Target!.S trait Target -> type S]]
+ * - [[scaladoc.links.Target!.foo(i:Int)* trait Target -> def foo]]
+ * - [[links.Target!.bar trait Target -> def bar]]
+ * - [[[[Target!.foo[A[_[_]]]* trait Target -> def foo with 3 nested tparams]]]] (should exercise nested parens)
+ * - [[Target$.T object Target -> type T]]
+ * - [[Target$.S object Target -> type S]]
+ * - [[Target$.foo(z:Str* object Target -> def foo]]
+ * - [[Target$.bar object Target -> def bar]]
+ * - [[[[Target$.foo[A[_[_]]]* trait Target -> def foo with 3 nested tparams]]]] (should exercise nested parens)
+ * - [[Target.onlyInObject object Target -> def foo]] (should find the object)
+ * - [[Target$.C object Target -> class C]] (should link directly to C, not as a member)
+ * - [[Target!.C trait Target -> class C]] (should link directly to C, not as a member)
+ * - [[Target$.baz(c:scala\.test\.scaladoc\.links\.C)* object Target -> def baz]] (should use dots in prefix)
+ * - [[Target!.baz(c:scala\.test\.scaladoc\.links\.C)* trait Target -> def baz]] (should use dots in prefix)
+ * - [[localMethod object TEST -> localMethod]] (should use the current template to resolve link instead of inTpl, that's the package)
+ * - [[#localMethod object TEST -> localMethod]] (should exercise Java-style links to empty members)
+ * - [[ImOutside class ImOutside (check correct lookup in EmptyPackage)]]
+ * - [[ImOutside.Inner#foo class ImOutside#class Inner#method foo (check correct lookup in EmptyPackage)]]
+ * - [[ImOutside.T class ImOutside#type T (check correct linking to templates)]]
+ * - [[ImOutside.T#foo class ImOutside#type T#method foo (check correct interaction between @template and links)]]
+ */
+ object TEST {
+ def localMethod = 3
+ }
+}
+trait ImOutside {
+ /** @template */
+ type T <: Inner
+ class Inner {
+ def foo: Any
+ }
+}
diff --git a/test/scaladoc/resources/package-object-res.scala b/test/scaladoc/resources/package-object-res.scala
new file mode 100644
index 0000000..f1f714d
--- /dev/null
+++ b/test/scaladoc/resources/package-object-res.scala
@@ -0,0 +1,14 @@
+/** This package has A and B.
+ */
+package test {
+ trait A { def hi = "hello" }
+ trait B { def bye = "bye!" }
+}
+
+/** This package object extends A and B.
+ */
+package object test extends A with B {
+ override def hi = "good morning!"
+ override def bye = "good bye!"
+ protected def thank = "thank you!"
+}
diff --git a/test/scaladoc/run/SI-191-deprecated.check b/test/scaladoc/run/SI-191-deprecated.check
new file mode 100755
index 0000000..3925a0d
--- /dev/null
+++ b/test/scaladoc/run/SI-191-deprecated.check
@@ -0,0 +1 @@
+Done.
\ No newline at end of file
diff --git a/test/scaladoc/run/SI-191-deprecated.scala b/test/scaladoc/run/SI-191-deprecated.scala
new file mode 100755
index 0000000..4ed24ff
--- /dev/null
+++ b/test/scaladoc/run/SI-191-deprecated.scala
@@ -0,0 +1,72 @@
+import scala.tools.nsc.doc.model._
+import scala.tools.nsc.doc.base._
+import scala.tools.nsc.doc.base.comment._
+import scala.tools.partest.ScaladocModelTest
+import java.net.{URI, URL}
+import java.io.File
+
+object Test extends ScaladocModelTest {
+
+ override def code =
+ """
+ /** See:
+ * - [[scala.collection.Map]] Simple linking
+ * - [[scala.collection.immutable.::]] Linking with symbolic name
+ * - [[scala.Int]].toLong Linking to a class
+ * - [[scala.Predef]] Linking to an object
+ * - [[scala.Int.toLong]] Linking to a method
+ * - [[scala]] Linking to a package
+ * - [[scala.AbstractMethodError]] Linking to a member in the package object
+ * - [[scala.Predef.String]] Linking to a member in an object
+ *
+ * Don't look at:
+ * - [[scala.NoLink]] Not linking :)
+ */
+ object Test {
+ def foo(param: Any) {}
+ def barr(l: scala.collection.immutable.List[Any]) {}
+ def bar(l: List[String]) {} // TODO: Should be able to link to type aliases
+ def baz(d: java.util.Date) {} // Should not be resolved
+ }
+ """
+
+ def scalaURL = "http://bog.us"
+
+ override def scaladocSettings = "-no-link-warnings -external-urls scala=" + scalaURL
+
+ def testModel(rootPackage: Package) {
+ import access._
+ val test = rootPackage._object("Test")
+
+ def check(memberDef: Def, expected: Int) {
+ val externals = memberDef.valueParams(0)(0).resultType.refEntity collect {
+ case (_, (LinkToExternal(name, url), _)) => assert(url.contains(scalaURL)); name
+ }
+ assert(externals.size == expected)
+ }
+
+ check(test._method("foo"), 1)
+ check(test._method("bar"), 0)
+ check(test._method("barr"), 2)
+ check(test._method("baz"), 0)
+
+ val expectedUrls = collection.mutable.Set[String](
+ "scala.collection.Map",
+ "scala.collection.immutable.::",
+ "scala.Int",
+ "scala.Predef$",
+ "scala.Int at toLong:Long",
+ "scala.package",
+ "scala.package at AbstractMethodError=AbstractMethodError",
+ "scala.Predef$@String=String"
+ ).map(scalaURL + "/index.html#" + _)
+
+ def isExpectedExternalLink(l: EntityLink) = l.link match {
+ case LinkToExternal(name, url) => assert(expectedUrls contains url, url); true
+ case _ => false
+ }
+
+ assert(countLinks(test.comment.get, isExpectedExternalLink) == 8,
+ countLinks(test.comment.get, isExpectedExternalLink) + " == 8")
+ }
+}
diff --git a/test/scaladoc/run/SI-191.check b/test/scaladoc/run/SI-191.check
new file mode 100755
index 0000000..3925a0d
--- /dev/null
+++ b/test/scaladoc/run/SI-191.check
@@ -0,0 +1 @@
+Done.
\ No newline at end of file
diff --git a/test/scaladoc/run/SI-191.scala b/test/scaladoc/run/SI-191.scala
new file mode 100755
index 0000000..29b1e7d
--- /dev/null
+++ b/test/scaladoc/run/SI-191.scala
@@ -0,0 +1,77 @@
+import scala.tools.nsc.doc.model._
+import scala.tools.nsc.doc.base._
+import scala.tools.nsc.doc.base.comment._
+import scala.tools.partest.ScaladocModelTest
+import java.net.{URI, URL}
+import java.io.File
+
+object Test extends ScaladocModelTest {
+
+ override def code =
+ """
+ /** See:
+ * - [[scala.collection.Map]] Simple linking
+ * - [[scala.collection.immutable.::]] Linking with symbolic name
+ * - [[scala.Int]].toLong Linking to a class
+ * - [[scala.Predef]] Linking to an object
+ * - [[scala.Int.toLong]] Linking to a method
+ * - [[scala]] Linking to a package
+ * - [[scala.AbstractMethodError]] Linking to a member in the package object
+ * - [[scala.Predef.String]] Linking to a member in an object
+ *
+ * Don't look at:
+ * - [[scala.NoLink]] Not linking :)
+ */
+ object Test {
+ def foo(param: Any) {}
+ def barr(l: scala.collection.immutable.List[Any]) {}
+ def bar(l: List[String]) {} // TODO: Should be able to link to type aliases
+ def baz(d: java.util.Date) {} // Should not be resolved
+ }
+ """
+
+ def scalaURL = "http://bog.us"
+
+ override def scaladocSettings = {
+ val scalaLibUri = getClass.getClassLoader.getResource("scala/Function1.class").getPath.split("!")(0)
+ val scalaLibPath = new URI(scalaLibUri).getPath
+ val externalArg = s"$scalaLibPath#$scalaURL"
+ "-no-link-warnings -doc-external-doc " + externalArg
+ }
+
+ def testModel(rootPackage: Package) {
+ import access._
+ val test = rootPackage._object("Test")
+
+ def check(memberDef: Def, expected: Int) {
+ val externals = memberDef.valueParams(0)(0).resultType.refEntity collect {
+ case (_, (LinkToExternal(name, url), _)) => assert(url.contains(scalaURL)); name
+ }
+ assert(externals.size == expected)
+ }
+
+ check(test._method("foo"), 1)
+ check(test._method("bar"), 0)
+ check(test._method("barr"), 2)
+ check(test._method("baz"), 0)
+
+ val expectedUrls = collection.mutable.Set[String](
+ "scala.collection.Map",
+ "scala.collection.immutable.::",
+ "scala.Int",
+ "scala.Predef$",
+ "scala.Int at toLong:Long",
+ "scala.package",
+ "scala.package at AbstractMethodError=AbstractMethodError",
+ "scala.Predef$@String=String"
+ ).map(scalaURL + "/index.html#" + _)
+
+ def isExpectedExternalLink(l: EntityLink) = l.link match {
+ case LinkToExternal(name, url) => assert(expectedUrls contains url, url); true
+ case _ => false
+ }
+
+ assert(countLinks(test.comment.get, isExpectedExternalLink) == 8,
+ countLinks(test.comment.get, isExpectedExternalLink) + " == 8")
+ }
+}
diff --git a/test/scaladoc/run/SI-3314-diagrams.check b/test/scaladoc/run/SI-3314-diagrams.check
new file mode 100644
index 0000000..619c561
--- /dev/null
+++ b/test/scaladoc/run/SI-3314-diagrams.check
@@ -0,0 +1 @@
+Done.
diff --git a/test/scaladoc/run/SI-3314-diagrams.scala b/test/scaladoc/run/SI-3314-diagrams.scala
new file mode 100644
index 0000000..050c6e7
--- /dev/null
+++ b/test/scaladoc/run/SI-3314-diagrams.scala
@@ -0,0 +1,28 @@
+import scala.tools.nsc.doc.model._
+import scala.tools.partest.ScaladocModelTest
+
+object Test extends ScaladocModelTest {
+
+ override def resourceFile = "SI-3314-diagrams.scala"
+
+ // no need for special settings
+ def scaladocSettings = "-diagrams"
+
+ def testModel(rootPackage: Package) = {
+ // get the quick access implicit defs in scope (_package(s), _class(es), _trait(s), object(s) _method(s), _value(s))
+ import access._
+
+ // just need to check the member exists, access methods will throw an error if there's a problem
+ val base = rootPackage._package("scala")._package("test")._package("scaladoc")
+
+ val diagrams = base._package("diagrams")
+ val templates = List(diagrams._trait("WeekDayTraitWithDiagram"), diagrams._class("WeekDayClassWithDiagram"), diagrams._object("WeekDayObjectWithDiagram"))
+
+ for (template <- templates) {
+ testDiagram(template, template.contentDiagram, 8, 7)
+ val subtemplates = List("Mon", "Tue", "Wed", "Thu", "Fri", "Sat", "Sun").map(template._object(_))
+ for (subtemplate <- subtemplates)
+ testDiagram(subtemplate, subtemplate.inheritanceDiagram, 2, 1)
+ }
+ }
+}
\ No newline at end of file
diff --git a/test/scaladoc/run/SI-3314.check b/test/scaladoc/run/SI-3314.check
new file mode 100644
index 0000000..619c561
--- /dev/null
+++ b/test/scaladoc/run/SI-3314.check
@@ -0,0 +1 @@
+Done.
diff --git a/test/scaladoc/run/SI-3314.scala b/test/scaladoc/run/SI-3314.scala
new file mode 100644
index 0000000..c856fe4
--- /dev/null
+++ b/test/scaladoc/run/SI-3314.scala
@@ -0,0 +1,92 @@
+import scala.tools.nsc.doc.base._
+import scala.tools.nsc.doc.model._
+import scala.tools.partest.ScaladocModelTest
+
+object Test extends ScaladocModelTest {
+
+ override def resourceFile = "SI-3314.scala"
+
+ // no need for special settings
+ def scaladocSettings = "-feature"
+
+ def testModel(rootPackage: Package) = {
+ // get the quick access implicit defs in scope (_package(s), _class(es), _trait(s), object(s) _method(s), _value(s))
+ import access._
+
+ // just need to check the member exists, access methods will throw an error if there's a problem
+ val base = rootPackage._package("scala")._package("test")._package("scaladoc")
+
+
+
+ // test1
+
+ val test1 = base._package("test1")
+ val test1Value = test1._class("Enum")._method("Value").resultType
+ assert(test1Value.name == "Value", test1Value.name + " == Value")
+ assert(test1Value.refEntity.size == 1, test1Value.refEntity.size + " == 1")
+
+ val test1Constants = test1._object("Constants")._method("a").resultType
+ assert(test1Constants.name == "Value", test1Constants.name + " == Value")
+ assert(test1Constants.refEntity.size == 1, test1Constants.refEntity.size + " == 1")
+ assert(test1Constants.refEntity(0)._1 == LinkToMember(test1._object("Constants")._class("Value"), test1._object("Constants")),
+ test1Constants.refEntity(0)._1 + " == LinkToMember(test1.Enum.Value)")
+
+
+ // test2
+
+ val test2 = base._package("test2")
+ def testDefinition(doc: DocTemplateEntity) = {
+ for (day <- List("Mon", "Tue", "Wed", "Thu", "Fri", "Sat", "Sun")) {
+ assert(doc._value(day).resultType.name == "Value",
+ doc._value(day).resultType.name + " == Value")
+ assert(doc._value(day).resultType.refEntity.size == 1,
+ doc._value(day).resultType.refEntity.size + " == 1")
+ assert(doc._value(day).resultType.refEntity(0)._1 == LinkToMember(doc._classMbr("Value"), doc),
+ doc._value(day).resultType.refEntity(0)._1 + " == LinkToMember(" + doc.qualifiedName + ".Value)")
+ }
+ }
+ testDefinition(test2._trait("WeekDayTrait"))
+ testDefinition(test2._class("WeekDayClass"))
+ testDefinition(test2._object("WeekDayObject"))
+
+ def testUsage(doc: DocTemplateEntity) = {
+ val ValueInClass = test2._class("WeekDayClass")._classMbr("Value")
+ val ValueInTrait = test2._trait("WeekDayTrait")._classMbr("Value")
+ val ValueInObject = test2._object("WeekDayObject")._classMbr("Value")
+ val WeekDayInObject = test2._object("WeekDayObject")._member("WeekDay")
+
+ val expected = List(
+ ("isWorkingDay1", "WeekDayClass.Value", ValueInClass),
+ ("isWorkingDay2", "WeekDayClass.Value", ValueInClass),
+ ("isWorkingDay3", "WeekDayTrait.Value", ValueInTrait),
+ ("isWorkingDay4", "WeekDayTrait.Value", ValueInTrait),
+ ("isWorkingDay5", "WeekDayObject.Value", ValueInObject),
+ ("isWorkingDay6", "WeekDay", WeekDayInObject),
+ ("isWorkingDay7", "WeekDayObject.Value", ValueInObject),
+ ("isWorkingDay8", "WeekDay", WeekDayInObject),
+ ("isWorkingDay9", "WeekDayObject.Value", ValueInObject))
+
+ for ((method, name, ref) <- expected) {
+ assert(doc._method(method).valueParams(0)(0).resultType.name == name,
+ doc._method(method).valueParams(0)(0).resultType.name + " == " + name + " (in " + doc + "." + method + ")")
+ assert(doc._method(method).valueParams(0)(0).resultType.refEntity.size == 1,
+ doc._method(method).valueParams(0)(0).resultType.refEntity.size + " == " + 1 + " (in " + doc + "." + method + ")")
+ assert(doc._method(method).valueParams(0)(0).resultType.refEntity(0)._1 == LinkToMember(ref, ref.inTemplate),
+ doc._method(method).valueParams(0)(0).resultType.refEntity(0)._1 + " == LinkToMember(" + ref.qualifiedName + ") (in " + doc + "." + method + ")")
+ }
+ }
+ testUsage(test2._object("UserObject"))
+ testUsage(test2._class("UserClass"))
+ testUsage(test2._trait("UserTrait"))
+
+
+ // test3
+ val test3 = base._package("test3")
+ val foo = test3._method("foo")
+ assert(foo.valueParams(0)(0).resultType.name == "T",
+ foo.valueParams(0)(0).resultType.name + " == T")
+ val bar = test3._method("bar")
+ assert(bar.valueParams(0)(0).resultType.name == "(AnyRef { type Lambda[X] <: Either[A,X] })#Lambda[String]",
+ bar.valueParams(0)(0).resultType.name + " == (AnyRef { type Lambda[X] <: Either[A,X] })#Lambda[String]")
+ }
+}
diff --git a/test/scaladoc/run/SI-3448.check b/test/scaladoc/run/SI-3448.check
new file mode 100644
index 0000000..619c561
--- /dev/null
+++ b/test/scaladoc/run/SI-3448.check
@@ -0,0 +1 @@
+Done.
diff --git a/test/scaladoc/run/SI-3448.scala b/test/scaladoc/run/SI-3448.scala
new file mode 100644
index 0000000..a2d3f59
--- /dev/null
+++ b/test/scaladoc/run/SI-3448.scala
@@ -0,0 +1,38 @@
+import scala.tools.nsc.doc.model._
+import scala.tools.partest.ScaladocModelTest
+
+object Test extends ScaladocModelTest {
+
+ // Working around the fact that usecases have the form Coll[T] and not Coll[T, U], as required by Map
+ override def code = """
+ /**
+ * @define Coll C[T]
+ */
+ class C[T] {
+ /**
+ * @usecase def foo[T]: $Coll[T]
+ */
+ def foo[T: Numeric]: C[T]
+ }
+
+
+ /**
+ * @define Coll D1[T]
+ */
+ class D[U, T] extends C[T] {
+ protected type D1[Z] = D[U, Z]
+ }
+ """
+
+ // no need for special settings
+ def scaladocSettings = ""
+
+ def testModel(rootPackage: Package) = {
+ // get the quick access implicit defs in scope (_package(s), _class(es), _trait(s), object(s) _method(s), _value(s))
+ import access._
+
+ // just need to check the member exists, access methods will throw an error if there's a problem
+ assert(rootPackage._class("D")._method("foo").resultType.name == "D[U, T]",
+ rootPackage._class("D")._method("foo").resultType.name + " == D[U, T]")
+ }
+}
\ No newline at end of file
diff --git a/test/scaladoc/run/SI-3484.check b/test/scaladoc/run/SI-3484.check
new file mode 100644
index 0000000..619c561
--- /dev/null
+++ b/test/scaladoc/run/SI-3484.check
@@ -0,0 +1 @@
+Done.
diff --git a/test/scaladoc/run/SI-3484.scala b/test/scaladoc/run/SI-3484.scala
new file mode 100644
index 0000000..297aebe
--- /dev/null
+++ b/test/scaladoc/run/SI-3484.scala
@@ -0,0 +1,52 @@
+import scala.tools.nsc.doc.model._
+import scala.tools.nsc.doc.model.diagram._
+import scala.tools.partest.ScaladocModelTest
+
+object Test extends ScaladocModelTest {
+
+ override def code = """
+ class cbf[A, B, C]
+
+ /**
+ * @define Coll Collection
+ * @define bfreturn $Coll
+ */
+ class Collection[A] {
+ /** What map does...
+ *
+ * $bfreturn
+ * @usecase def map[B](f: A => B): $bfreturn[B]
+ *
+ */
+ def map[B, That](f: A => B)(implicit fact: cbf[Collection[A], B, That]) =
+ null
+ }
+
+ /**
+ * @define b John
+ * @define a Mister $b
+ */
+ class SR704 {
+ /**
+ * Hello $a.
+ */
+ def foo = 123
+ }
+ """
+
+ // diagrams must be started. In case there's an error with dot, it should not report anything
+ def scaladocSettings = ""
+
+ def testModel(rootPackage: Package) = {
+ // get the quick access implicit defs in scope (_package(s), _class(es), _trait(s), object(s) _method(s), _value(s))
+ import access._
+
+ // check correct expansion of the use case signature
+ val map = rootPackage._class("Collection")._method("map")
+ assert(map.resultType.name == "Collection[B]", map.resultType.name + " == Traversable[B]")
+
+ val foo = rootPackage._class("SR704")._method("foo")
+ assert(extractCommentText(foo.comment.get).contains("Hello Mister John."),
+ extractCommentText(foo.comment.get) + ".contains(Hello Mister John.)")
+ }
+}
\ No newline at end of file
diff --git a/test/scaladoc/run/SI-4324.check b/test/scaladoc/run/SI-4324.check
new file mode 100644
index 0000000..619c561
--- /dev/null
+++ b/test/scaladoc/run/SI-4324.check
@@ -0,0 +1 @@
+Done.
diff --git a/test/scaladoc/run/SI-4324.scala b/test/scaladoc/run/SI-4324.scala
new file mode 100644
index 0000000..686a133
--- /dev/null
+++ b/test/scaladoc/run/SI-4324.scala
@@ -0,0 +1,24 @@
+import scala.tools.nsc.doc.model._
+import scala.tools.partest.ScaladocModelTest
+
+object Test extends ScaladocModelTest {
+
+ override def code = """
+ case class Test4324(arg11: String, arg12: Int)(arg21: String, arg22: Int)(arg31: Int, arg32: String)
+ """
+
+ // no need for special settings
+ def scaladocSettings = ""
+
+ def testModel(rootPackage: Package) = {
+ // get the quick access implicit defs in scope (_package(s), _class(es), _trait(s), object(s) _method(s), _value(s))
+ import access._
+
+ // just need to check the member exists, access methods will throw an error if there's a problem
+ rootPackage._class("Test4324").asInstanceOf[Class].valueParams match {
+ case List(List(arg11, arg12), List(arg21, arg22), List(arg31, arg32)) => //yeeey, do nothing
+ case other =>
+ assert(false, "Incorrect valueParams generated: " + other + " instead of (arg11, arg12)(arg21, arg22)(arg31, arg32)")
+ }
+ }
+}
\ No newline at end of file
diff --git a/test/scaladoc/run/SI-4360.check b/test/scaladoc/run/SI-4360.check
new file mode 100644
index 0000000..619c561
--- /dev/null
+++ b/test/scaladoc/run/SI-4360.check
@@ -0,0 +1 @@
+Done.
diff --git a/test/scaladoc/run/SI-4360.scala b/test/scaladoc/run/SI-4360.scala
new file mode 100644
index 0000000..3abc61c
--- /dev/null
+++ b/test/scaladoc/run/SI-4360.scala
@@ -0,0 +1,48 @@
+import scala.tools.nsc.doc.model._
+import scala.tools.partest.ScaladocModelTest
+
+object Test extends ScaladocModelTest {
+
+ override def resourceFile = "SI-4360.scala"
+
+ // no need for special settings
+ def scaladocSettings = ""
+
+ def testModel(rootPackage: Package) = {
+ // get the quick access implicit defs in scope (_package(s), _class(es), _trait(s), object(s) _method(s), _value(s))
+ import access._
+
+ // just need to check the member exists, access methods will throw an error if there's a problem
+ val base = rootPackage._package("scala")._package("test")._package("scaladoc")._package("prefix")
+
+ val TEST = base._package("pack1")._package("c")._class("TEST")
+ val fooCA = TEST._method("fooCA")
+ val fooCB = TEST._method("fooCB")
+ val fooCS = TEST._method("fooCS")
+ val fooCL = TEST._method("fooCL")
+ val fooPA = TEST._method("fooPA")
+ val fooPB = TEST._method("fooPB")
+ val fooPC = TEST._method("fooPC")
+
+ val expected = List(
+ (fooCA, "Z", 1),
+ (fooCB, "B.Z", 1),
+ (fooCS, "pack2.Z.Z", 1),
+ (fooCL, "L.Z", 1),
+ (fooPA, "a.C", 1),
+ (fooPB, "b.C", 1),
+ (fooPC, "C", 1)
+ )
+
+ for ((method, name, refs) <- expected) {
+ assert(method.valueParams(0)(0).resultType.name == name,
+ method.valueParams(0)(0).resultType.name + " == " + name + " (in " + method.qualifiedName + ")")
+ assert(method.valueParams(0)(0).resultType.refEntity.size == refs,
+ method.valueParams(0)(0).resultType.refEntity.size + " == " + refs + " (in " + method.qualifiedName + ")")
+ }
+
+ val A = base._package("pack1")._package("c")._class("A")
+ assert(A.linearizationTypes(0).name == "pack1.A", A.linearizationTypes(0).name + " == pack1.A")
+ assert(A.linearizationTypes(0).refEntity.size == 1, A.linearizationTypes(0).refEntity.size + " == 1")
+ }
+}
\ No newline at end of file
diff --git a/test/scaladoc/run/SI-4676.check b/test/scaladoc/run/SI-4676.check
new file mode 100644
index 0000000..619c561
--- /dev/null
+++ b/test/scaladoc/run/SI-4676.check
@@ -0,0 +1 @@
+Done.
diff --git a/test/scaladoc/run/SI-4676.scala b/test/scaladoc/run/SI-4676.scala
new file mode 100644
index 0000000..b83a59a
--- /dev/null
+++ b/test/scaladoc/run/SI-4676.scala
@@ -0,0 +1,26 @@
+import scala.tools.nsc.doc.model._
+import scala.tools.nsc.doc.model.diagram._
+import scala.tools.partest.ScaladocModelTest
+
+object Test extends ScaladocModelTest {
+
+ override def code = """
+ class SI_4676 {
+ type SS = (String,String)
+ def x(ss: SS): Int = 3
+ }
+ class cbf[A, B, C]
+ """
+
+ // diagrams must be started. In case there's an error with dot, it should not report anything
+ def scaladocSettings = ""
+
+ def testModel(rootPackage: Package) = {
+ // get the quick access implicit defs in scope (_package(s), _class(es), _trait(s), object(s) _method(s), _value(s))
+ import access._
+
+ // check correct expansion of the use case signature
+ val x = rootPackage._class("SI_4676")._method("x")
+ assert(x.valueParams(0)(0).resultType.name == "(String, String)", "parameter ss of method x has type (String, String")
+ }
+}
\ No newline at end of file
diff --git a/test/scaladoc/run/SI-4887.check b/test/scaladoc/run/SI-4887.check
new file mode 100644
index 0000000..619c561
--- /dev/null
+++ b/test/scaladoc/run/SI-4887.check
@@ -0,0 +1 @@
+Done.
diff --git a/test/scaladoc/run/SI-4887.scala b/test/scaladoc/run/SI-4887.scala
new file mode 100644
index 0000000..af83344
--- /dev/null
+++ b/test/scaladoc/run/SI-4887.scala
@@ -0,0 +1,46 @@
+import scala.tools.nsc.doc.model._
+import scala.tools.partest.ScaladocModelTest
+
+object Test extends ScaladocModelTest {
+
+ override def code = """
+ package scala.test.scaladoc.existentials {
+ import language.higherKinds
+ import language.existentials
+
+ class X[T, U, V]
+
+ trait TEST {
+ type T
+ type U
+ type A
+ def foo1(x: X[T, U, _]) = 3
+ def foo2(x: X[Z[_], U, z.type] forSome {type Z[_] <: { def z: String }; val z: Z[_ <: Int]}) = 4
+ def foo3(x: X[Z, Z, V] forSome { type Z <: T; type V <: T }) = 6
+ }
+ }
+ """
+
+ // no need for special settings
+ def scaladocSettings = "-feature"
+
+ def testModel(rootPackage: Package) = {
+ // get the quick access implicit defs in scope (_package(s), _class(es), _trait(s), object(s) _method(s), _value(s))
+ import access._
+
+ val base = rootPackage._package("scala")._package("test")._package("scaladoc")._package("existentials")
+ val TEST = base._trait("TEST")
+
+ val foo1 = TEST._method("foo1")
+ assert(foo1.valueParams(0)(0).resultType.name == "X[T, U, _]",
+ foo1.valueParams(0)(0).resultType.name + " == X[T, U, _]")
+
+ val foo2 = TEST._method("foo2")
+ assert(foo2.valueParams(0)(0).resultType.name == "X[Z[_], U, _ <: [_]AnyRef { def z: String } with Singleton]",
+ foo2.valueParams(0)(0).resultType.name + " == X[Z[_], U, _ <: [_]AnyRef { def z: String } with Singleton]")
+
+ val foo3 = TEST._method("foo3")
+ assert(foo3.valueParams(0)(0).resultType.name == "X[Z, Z, V] forSome {type Z <: T, type V <: T}",
+ foo3.valueParams(0)(0).resultType.name + " == X[Z, Z, V] forSome {type Z <: T, type V <: T}")
+ }
+}
\ No newline at end of file
diff --git a/test/scaladoc/run/SI-5235.check b/test/scaladoc/run/SI-5235.check
new file mode 100644
index 0000000..d9acfd0
--- /dev/null
+++ b/test/scaladoc/run/SI-5235.check
@@ -0,0 +1,4 @@
+newSource:10: warning: Could not find the type $Coll points to while expanding it for the usecase signature of method reverse in trait SpecificColl.In this context, $Coll = "BullSh".
+ * @usecase def reverse(): $Coll
+ ^
+Done.
diff --git a/test/scaladoc/run/SI-5235.scala b/test/scaladoc/run/SI-5235.scala
new file mode 100644
index 0000000..c6b7922
--- /dev/null
+++ b/test/scaladoc/run/SI-5235.scala
@@ -0,0 +1,88 @@
+import scala.tools.nsc.doc.base._
+import scala.tools.nsc.doc.model._
+import scala.tools.nsc.doc.model.diagram._
+import scala.tools.partest.ScaladocModelTest
+
+object Test extends ScaladocModelTest {
+
+ override def code = """
+ package scala.test.scaladoc.SI5235 {
+ trait Builder[From, To]
+
+ /**
+ * @define Coll `GenericColl`
+ */
+ class GenericColl {
+ /**
+ * @usecase def reverse(): $Coll
+ * Returns the reversed $Coll.
+ */
+ def reverse[T](implicit something: Builder[GenericColl, T]): T
+ def foo1: GenericColl = ???
+ }
+
+ /** Nooo, don't point to this */
+ trait MyCollection
+
+ package specific {
+ /**
+ * @define Coll `BullSh`
+ */
+ trait SpecificColl extends GenericColl {
+ def foo2: SpecificColl = ???
+ }
+ }
+
+ package mycoll {
+ /**
+ * @define Coll `mycoll.MyCollection`
+ */
+ class MyCollection extends specific.SpecificColl {
+ def foo3: MyCollection = ???
+ }
+ }
+ }
+ """
+
+ // diagrams must be started. In case there's an error with dot, it should not report anything
+ def scaladocSettings = ""
+
+ def testModel(rootPackage: Package) = {
+ // get the quick access implicit defs in scope (_package(s), _class(es), _trait(s), object(s) _method(s), _value(s))
+ import access._
+
+ val base = rootPackage._package("scala")._package("test")._package("scaladoc")._package("SI5235")
+
+ val GenericColl = base._class("GenericColl")
+ val SpecificColl = base._package("specific")._trait("SpecificColl")
+ val MyCollection = base._package("mycoll")._class("MyCollection")
+
+ // check comment text
+ val gcComment = extractCommentText(GenericColl._method("reverse").comment.get)
+ val scComment = extractCommentText(SpecificColl._method("reverse").comment.get)
+ val mcComment = extractCommentText(MyCollection._method("reverse").comment.get)
+ assert(gcComment.contains("Returns the reversed GenericColl."),
+ gcComment + ".contains(\"Returns the reversed GenericColl.\")")
+ assert(scComment.contains("Returns the reversed BullSh."),
+ scComment + ".contains(\"Returns the reversed BullSh.\")")
+ assert(mcComment.contains("Returns the reversed mycoll.MyCollection."),
+ mcComment + ".contains(\"Returns the reversed mycoll.MyCollection.\")")
+
+ // check signatures
+ val gcReverse = GenericColl._method("reverse")
+ val scReverse = SpecificColl._method("reverse")
+ val mcReverse = MyCollection._method("reverse")
+ val gcReverseType = gcReverse.resultType
+ val scReverseType = scReverse.resultType
+ val mcReverseType = mcReverse.resultType
+ assert(gcReverseType.name == "GenericColl", gcReverseType.name + " == GenericColl")
+ assert(scReverseType.name == "BullSh", scReverseType.name + " == BullSh")
+ assert(mcReverseType.name == "MyCollection",mcReverseType.name + " == MyCollection")
+ assert(gcReverseType.refEntity(0)._1 == LinkToTpl(GenericColl),
+ gcReverse.qualifiedName + "'s return type has a link to " + GenericColl.qualifiedName)
+ assert(scReverseType.refEntity(0)._1 == Tooltip("BullSh"),
+ scReverseType.refEntity(0)._1 + " == Tooltip(\"BullSh\")")
+ assert(mcReverseType.refEntity(0)._1 == LinkToTpl(MyCollection),
+ mcReverse.qualifiedName + "'s return type has a link to " + MyCollection.qualifiedName)
+ }
+}
diff --git a/test/scaladoc/run/SI-5373.check b/test/scaladoc/run/SI-5373.check
new file mode 100644
index 0000000..619c561
--- /dev/null
+++ b/test/scaladoc/run/SI-5373.check
@@ -0,0 +1 @@
+Done.
diff --git a/test/scaladoc/run/SI-5373.scala b/test/scaladoc/run/SI-5373.scala
new file mode 100644
index 0000000..65cf8ba
--- /dev/null
+++ b/test/scaladoc/run/SI-5373.scala
@@ -0,0 +1,34 @@
+import scala.tools.nsc.doc.model._
+import scala.tools.partest.ScaladocModelTest
+
+object Test extends ScaladocModelTest {
+
+ override def code = """
+ import scala.annotation.bridge
+
+ package scala.test {
+
+ trait A {
+ def foo = ()
+ }
+
+ trait B extends A {
+ @bridge()
+ def foo = ()
+ }
+
+ class C extends B
+ }
+ """
+
+ // no need for special settings
+ def scaladocSettings = ""
+
+ def testModel(rootPackage: Package) = {
+ // get the quick access implicit defs in scope (_package(s), _class(es), _trait(s), object(s) _method(s), _value(s))
+ import access._
+
+ // just need to check the member exists, access methods will throw an error if there's a problem
+ rootPackage._package("scala")._package("test")._class("C")._method("foo")
+ }
+}
\ No newline at end of file
diff --git a/test/scaladoc/run/SI-5533.check b/test/scaladoc/run/SI-5533.check
new file mode 100644
index 0000000..619c561
--- /dev/null
+++ b/test/scaladoc/run/SI-5533.check
@@ -0,0 +1 @@
+Done.
diff --git a/test/scaladoc/run/SI-5533.scala b/test/scaladoc/run/SI-5533.scala
new file mode 100644
index 0000000..989d9aa
--- /dev/null
+++ b/test/scaladoc/run/SI-5533.scala
@@ -0,0 +1,39 @@
+import scala.tools.nsc.doc.model._
+import scala.tools.partest.ScaladocModelTest
+
+object Test extends ScaladocModelTest {
+
+ // Working around the fact that usecases have the form Coll[T] and not Coll[T, U], as required by Map
+ override def code = """
+ package a {
+ class A { class Z }
+ class C extends b.B { class X extends Y }
+ }
+
+ package b {
+ /** @contentDiagram */
+ class B extends a.A { class Y extends Z }
+ /** @contentDiagram */
+ class D extends a.C { class V extends X }
+ }
+ """
+
+ // no need for special settings
+ def scaladocSettings = "-diagrams -skip-packages a"
+
+ def testModel(rootPackage: Package) = {
+ // get the quick access implicit defs in scope (_package(s), _class(es), _trait(s), object(s) _method(s), _value(s))
+ import access._
+
+ // just need to check the member exists, access methods will throw an error if there's a problem
+ assert(!rootPackage.templates.exists(_.name == "a"), "package a should not exist in the root package")
+ assert(rootPackage.templates.exists(_.name == "b"), "package b should exist in the root package")
+ val b = rootPackage._package("b")
+ val B = b._class("B")
+ val D = b._class("D")
+ testDiagram(B, B.contentDiagram, 2, 1)
+ // unfortunately not all packages, as B1 extends A.this.A1 and it gets the wrong member -- maybe we should model
+ // things as we do for symbols?
+ testDiagram(D, D.contentDiagram, 3, 2)
+ }
+}
\ No newline at end of file
diff --git a/test/scaladoc/run/SI-5780.check b/test/scaladoc/run/SI-5780.check
new file mode 100644
index 0000000..619c561
--- /dev/null
+++ b/test/scaladoc/run/SI-5780.check
@@ -0,0 +1 @@
+Done.
diff --git a/test/scaladoc/run/SI-5780.scala b/test/scaladoc/run/SI-5780.scala
new file mode 100644
index 0000000..809567f
--- /dev/null
+++ b/test/scaladoc/run/SI-5780.scala
@@ -0,0 +1,25 @@
+import scala.tools.nsc.doc.model._
+import scala.tools.nsc.doc.model.diagram._
+import scala.tools.partest.ScaladocModelTest
+
+object Test extends ScaladocModelTest {
+
+ override def code = """
+ package scala.test.scaladoc.SI5780
+
+ object `package` { def foo: AnyRef = "hello"; class T /* so the package is not dropped */ }
+ """
+
+ // diagrams must be started. In case there's an error with dot, it should not report anything
+ def scaladocSettings = "-doc-root-content " + resourcePath + "/doc-root"
+
+ def testModel(rootPackage: Package) = {
+ // get the quick access implicit defs in scope (_package(s), _class(es), _trait(s), object(s) _method(s), _value(s))
+ import access._
+
+ val foo = rootPackage._package("scala")._package("test")._package("scaladoc")._package("SI5780")._method("foo")
+ // check that AnyRef is properly linked to its template:
+ assert(foo.resultType.name == "AnyRef", foo.resultType.name + " == AnyRef")
+ assert(foo.resultType.refEntity.size == 1, foo.resultType.refEntity + ".size == 1")
+ }
+}
\ No newline at end of file
diff --git a/test/scaladoc/run/SI-5784.check b/test/scaladoc/run/SI-5784.check
new file mode 100644
index 0000000..619c561
--- /dev/null
+++ b/test/scaladoc/run/SI-5784.check
@@ -0,0 +1 @@
+Done.
diff --git a/test/scaladoc/run/SI-5784.scala b/test/scaladoc/run/SI-5784.scala
new file mode 100644
index 0000000..318eb78
--- /dev/null
+++ b/test/scaladoc/run/SI-5784.scala
@@ -0,0 +1,44 @@
+import scala.tools.nsc.doc.model._
+import scala.tools.partest.ScaladocModelTest
+
+object Test extends ScaladocModelTest {
+
+ override def resourceFile: String = "SI-5784.scala"
+
+ // no need for special settings
+ def scaladocSettings = "-diagrams"
+
+ def testModel(rootPackage: Package) = {
+ // get the quick access implicit defs in scope (_package(s), _class(es), _trait(s), object(s) _method(s), _value(s))
+ import access._
+
+ val main = rootPackage._package("test")._package("templates")
+
+ val String = main._aliasTypeTpl("String")
+ assert(String.companion.isDefined, "test.templates.String should have a pseudo-companion object")
+
+ val Base = main._trait("Base")
+ assert(Base.members.filter(_.inDefinitionTemplates.head == Base).length == 5, Base.members.filter(_.inDefinitionTemplates.head == Base).length + " == 5")
+ assert(Base.members.collect{case d: DocTemplateEntity => d}.length == 4, Base.members.collect{case d: DocTemplateEntity => d}.length == 4)
+ testDiagram(Base, Base.contentDiagram, 2, 1)
+
+ val BaseT = Base._absTypeTpl("T")
+ val Foo = Base._trait("Foo")
+ assert(BaseT.members.filter(_.inDefinitionTemplates.head == Base).length == 0, BaseT.members.filter(_.inDefinitionTemplates.head == Base).length + " == 0")
+ assert(BaseT.members.map(_.name).sorted == Foo.members.map(_.name).sorted, BaseT.members.map(_.name).sorted + " == " + Foo.members.map(_.name).sorted)
+ assert(BaseT.companion.isDefined, "test.templates.Base.T should have a pseudo-companion object")
+ testDiagram(BaseT, BaseT.inheritanceDiagram, 2, 1)
+
+ val Api = main._trait("Api")
+ assert(Api.members.filter(_.inDefinitionTemplates.head == Api).length == 2, Api.members.filter(_.inDefinitionTemplates.head == Api).length + " == 2") // FooApi and override type T
+ assert(Api.members.collect{case d: DocTemplateEntity => d}.length == 5, Api.members.collect{case d: DocTemplateEntity => d}.length == 5)
+ testDiagram(Api, Api.contentDiagram, 3, 2)
+
+ val ApiT = Api._absTypeTpl("T")
+ val FooApi = Api._trait("FooApi")
+ assert(ApiT.members.filter(_.inDefinitionTemplates.head == Api).length == 0, ApiT.members.filter(_.inDefinitionTemplates.head == Api).length + " == 0")
+ assert(ApiT.members.map(_.name).sorted == FooApi.members.map(_.name).sorted, ApiT.members.map(_.name).sorted + " == " + FooApi.members.map(_.name).sorted)
+ assert(ApiT.companion.isDefined, "test.templates.Api.T should have a pseudo-companion object")
+ testDiagram(ApiT, ApiT.inheritanceDiagram, 2, 1)
+ }
+}
\ No newline at end of file
diff --git a/test/scaladoc/run/SI-5933.check b/test/scaladoc/run/SI-5933.check
new file mode 100644
index 0000000..619c561
--- /dev/null
+++ b/test/scaladoc/run/SI-5933.check
@@ -0,0 +1 @@
+Done.
diff --git a/test/scaladoc/run/SI-5933.scala b/test/scaladoc/run/SI-5933.scala
new file mode 100644
index 0000000..087116f
--- /dev/null
+++ b/test/scaladoc/run/SI-5933.scala
@@ -0,0 +1,43 @@
+import scala.tools.nsc.doc.model._
+import scala.tools.partest.ScaladocModelTest
+
+object Test extends ScaladocModelTest {
+
+ // Test code
+ override def code = """
+ // This example should compile without errors, and the pattern match should be correctly displayed
+
+ import language.higherKinds
+
+ abstract class Base[M[_, _]] {
+ def foo[A, B]: M[(A, B), Any]
+ }
+
+ class Derived extends Base[PartialFunction] {
+ def foo[A, B] /*: PartialFunction[(A, B) => Any]*/ = { case (a, b) => (a: A, b: B) }
+ }
+
+ object Test {
+ lazy val lx = { println("hello"); 3 }
+ def test1(x: Int = lx) = ???
+ def test2(x: Int = lx match { case 0 => 1; case 3 => 4 }) = ???
+ }
+ """
+
+ // no need for special settings
+ def scaladocSettings = ""
+
+ def testModel(rootPackage: Package) = {
+ // get the quick access implicit defs in scope (_package(s), _class(es), _trait(s), object(s) _method(s), _value(s))
+ import access._
+
+ val Test = rootPackage._object("Test")
+ val test1 = Test._method("test1")
+ val test2 = Test._method("test2")
+
+ def assertEqual(s1: String, s2: String) = assert(s1 == s2, s1 + " == " + s2)
+
+ assertEqual(test1.valueParams(0)(0).defaultValue.get.expression, "lx")
+ assertEqual(test2.valueParams(0)(0).defaultValue.get.expression, "lx match { case 0 => 1; case 3 => 4 }")
+ }
+}
\ No newline at end of file
diff --git a/test/scaladoc/run/SI-5965.check b/test/scaladoc/run/SI-5965.check
new file mode 100644
index 0000000..619c561
--- /dev/null
+++ b/test/scaladoc/run/SI-5965.check
@@ -0,0 +1 @@
+Done.
diff --git a/test/scaladoc/run/SI-5965.scala b/test/scaladoc/run/SI-5965.scala
new file mode 100644
index 0000000..6f4540d
--- /dev/null
+++ b/test/scaladoc/run/SI-5965.scala
@@ -0,0 +1,24 @@
+import scala.tools.nsc.doc.model._
+import scala.tools.partest.ScaladocModelTest
+
+object Test extends ScaladocModelTest {
+
+ override def code = """
+ abstract class Param
+ class Test
+ object Test {
+ def apply(i: Int): Test = new Test
+ def apply(i: Int, p: Param = new Param { }): Test = new Test
+ }
+ """
+
+ // no need for special settings
+ def scaladocSettings = ""
+
+ def testModel(rootPackage: Package) = {
+ import access._
+
+ // just need to make sure the model exists
+ val base = rootPackage._object("Test")
+ }
+}
\ No newline at end of file
diff --git a/test/scaladoc/run/SI-6017.check b/test/scaladoc/run/SI-6017.check
new file mode 100644
index 0000000..619c561
--- /dev/null
+++ b/test/scaladoc/run/SI-6017.check
@@ -0,0 +1 @@
+Done.
diff --git a/test/scaladoc/run/SI-6017.scala b/test/scaladoc/run/SI-6017.scala
new file mode 100644
index 0000000..9951534
--- /dev/null
+++ b/test/scaladoc/run/SI-6017.scala
@@ -0,0 +1,28 @@
+import scala.tools.nsc.doc
+import scala.tools.nsc.doc.model._
+import scala.tools.nsc.doc.html.page.{Index, ReferenceIndex}
+import scala.tools.partest.ScaladocModelTest
+
+object Test extends ScaladocModelTest {
+ override def scaladocSettings = ""
+ override def code = """
+ class STAR
+ class Star
+ """
+
+ def testModel(rootPackage: Package) {
+ model match {
+ case Some(universe) => {
+ val index = IndexModelFactory.makeIndex(universe)
+ // Because "STAR" and "Star" are different
+ assert(index.firstLetterIndex('s').keys.toSeq.length == 2)
+
+ val indexPage = new Index(universe, index)
+ val letters = indexPage.letters
+ assert(letters.length > 1)
+ assert(letters(0).toString == "<span>#</span>")
+ }
+ case _ => assert(false)
+ }
+ }
+}
diff --git a/test/scaladoc/run/SI-6140.check b/test/scaladoc/run/SI-6140.check
new file mode 100644
index 0000000..619c561
--- /dev/null
+++ b/test/scaladoc/run/SI-6140.check
@@ -0,0 +1 @@
+Done.
diff --git a/test/scaladoc/run/SI-6140.scala b/test/scaladoc/run/SI-6140.scala
new file mode 100644
index 0000000..4bb9a4d
--- /dev/null
+++ b/test/scaladoc/run/SI-6140.scala
@@ -0,0 +1,18 @@
+import scala.tools.nsc.doc.model._
+import scala.tools.partest.ScaladocModelTest
+
+object Test extends ScaladocModelTest {
+
+ // This caused an infinite recursion in method inline() in CommentFactory.scala
+ override def code = """
+ /** {{ code? }} */
+ class C
+ """
+
+ // no need for special settings
+ def scaladocSettings = ""
+
+ def testModel(rootPackage: Package) = {
+ // if it doesn't hang, the test is passed
+ }
+}
diff --git a/test/scaladoc/run/SI-6509.check b/test/scaladoc/run/SI-6509.check
new file mode 100644
index 0000000..3925a0d
--- /dev/null
+++ b/test/scaladoc/run/SI-6509.check
@@ -0,0 +1 @@
+Done.
\ No newline at end of file
diff --git a/test/scaladoc/run/SI-6509.scala b/test/scaladoc/run/SI-6509.scala
new file mode 100644
index 0000000..3857949
--- /dev/null
+++ b/test/scaladoc/run/SI-6509.scala
@@ -0,0 +1,30 @@
+import scala.tools.nsc.doc.model._
+import scala.tools.partest.ScaladocModelTest
+
+object Test extends ScaladocModelTest {
+
+ override def resourceFile: String = "SI-6509.scala"
+
+ // no need for special settings
+ def scaladocSettings = ""
+
+ def testModel(rootPackage: Package) = {
+ // get the quick access implicit defs in scope (_package(s), _class(es), _trait(s), object(s) _method(s), _value(s))
+ import access._
+
+ val main = rootPackage._package("test")._package("scaladoc")._package("template")._package("owners")
+ val X = main._trait("X")
+ val Y = main._trait("Y")
+ val Z = main._trait("Z")
+ val T = main._trait("T")
+
+ def checkTemplateOwner(d: DocTemplateEntity) =
+ for (mbr <- List("Symbol", "TypeSymbol", "TermSymbol", "MethodSymbol")) {
+ val tpl = d._absTypeTpl(mbr).inTemplate
+ assert(tpl == X, tpl + " == X")
+ }
+
+ for (tpl <- List(X, Y, Z, T))
+ checkTemplateOwner(tpl)
+ }
+}
\ No newline at end of file
diff --git a/test/scaladoc/run/SI-6511.check b/test/scaladoc/run/SI-6511.check
new file mode 100644
index 0000000..3925a0d
--- /dev/null
+++ b/test/scaladoc/run/SI-6511.check
@@ -0,0 +1 @@
+Done.
\ No newline at end of file
diff --git a/test/scaladoc/run/SI-6511.scala b/test/scaladoc/run/SI-6511.scala
new file mode 100644
index 0000000..cc950a9
--- /dev/null
+++ b/test/scaladoc/run/SI-6511.scala
@@ -0,0 +1,22 @@
+import scala.tools.nsc.doc.model._
+import scala.tools.partest.ScaladocModelTest
+
+object Test extends ScaladocModelTest {
+
+ override def resourceFile: String = "SI-6511.scala"
+
+ // no need for special settings
+ def scaladocSettings = "-diagrams"
+
+ def testModel(rootPackage: Package) = {
+ // get the quick access implicit defs in scope (_package(s), _class(es), _trait(s), object(s) _method(s), _value(s))
+ import access._
+
+ val main = rootPackage._package("test")._package("scaladoc")._package("template")._package("diagrams")
+ val X = main._trait("X")
+ val Y = main._trait("Y")
+
+ testDiagram(X, X.contentDiagram, nodes = 4, edges = 3)
+ testDiagram(Y, Y.contentDiagram, nodes = 4, edges = 3)
+ }
+}
\ No newline at end of file
diff --git a/test/scaladoc/run/SI-6580.check b/test/scaladoc/run/SI-6580.check
new file mode 100644
index 0000000..2fb6ec3
--- /dev/null
+++ b/test/scaladoc/run/SI-6580.check
@@ -0,0 +1,11 @@
+Chain(List(Chain(List(Text(Here z(1) is defined as follows:), Text(
+), HtmlTag(<br>), Text(
+), Text( ), HtmlTag(<img src='http://example.com/fig1.png'>), Text(
+), HtmlTag(<br>), Text(
+), Text(plus z(1) times), Text(
+), HtmlTag(<br>), Text(
+), Text( ), HtmlTag(<img src='http://example.com/fig2.png'>), Text(
+), HtmlTag(<br>), Text(
+), Text(equals QL of something
+)))))
+Done.
diff --git a/test/scaladoc/run/SI-6580.scala b/test/scaladoc/run/SI-6580.scala
new file mode 100644
index 0000000..c544138
--- /dev/null
+++ b/test/scaladoc/run/SI-6580.scala
@@ -0,0 +1,32 @@
+import scala.tools.nsc.doc
+import scala.tools.nsc.doc.model._
+import scala.tools.nsc.doc.html.page.{Index, ReferenceIndex}
+import scala.tools.partest.ScaladocModelTest
+
+object Test extends ScaladocModelTest {
+ override def scaladocSettings = ""
+ override def code = """
+
+ object Test {
+ /** Here z(1) is defined as follows:
+ * <br>
+ * <img src='http://example.com/fig1.png'>
+ * <br>
+ * plus z(1) times
+ * <br>
+ * <img src='http://example.com/fig2.png'>
+ * <br>
+ * equals QL of something
+ */
+ def f = 1
+ }
+
+ """
+
+ def testModel(rootPackage: Package) {
+ import access._
+
+ val f = rootPackage._object("Test")._method("f")
+ println(f.comment.get.short)
+ }
+}
diff --git a/test/scaladoc/run/SI-6715.check b/test/scaladoc/run/SI-6715.check
new file mode 100644
index 0000000..619c561
--- /dev/null
+++ b/test/scaladoc/run/SI-6715.check
@@ -0,0 +1 @@
+Done.
diff --git a/test/scaladoc/run/SI-6715.scala b/test/scaladoc/run/SI-6715.scala
new file mode 100644
index 0000000..92d3376
--- /dev/null
+++ b/test/scaladoc/run/SI-6715.scala
@@ -0,0 +1,15 @@
+import scala.tools.nsc.doc.model._
+import scala.tools.partest.ScaladocModelTest
+
+object Test extends ScaladocModelTest {
+ def scaladocSettings = ""
+
+ override def code = "object A { def $$ = 123 }"
+
+ def testModel(rootPackage: Package) = {
+ import access._
+
+ val method = rootPackage._object("A")._method("$$")
+ assert(method != null)
+ }
+}
diff --git a/test/scaladoc/run/SI-6812.check b/test/scaladoc/run/SI-6812.check
new file mode 100644
index 0000000..619c561
--- /dev/null
+++ b/test/scaladoc/run/SI-6812.check
@@ -0,0 +1 @@
+Done.
diff --git a/test/scaladoc/run/SI-6812.scala b/test/scaladoc/run/SI-6812.scala
new file mode 100644
index 0000000..fbd9588
--- /dev/null
+++ b/test/scaladoc/run/SI-6812.scala
@@ -0,0 +1,24 @@
+import scala.tools.nsc.doc.model._
+import scala.tools.partest.ScaladocModelTest
+import language._
+
+object Test extends ScaladocModelTest {
+
+ override def code = """
+ import scala.reflect.macros.Context
+ import language.experimental.macros
+
+ object Macros {
+ def impl(c: Context) = c.literalUnit
+ def foo = macro impl
+ }
+
+ class C {
+ def bar = Macros.foo
+ }
+ """
+
+ def scaladocSettings = ""
+ override def extraSettings = super.extraSettings + " -Ymacro-no-expand"
+ def testModel(root: Package) = ()
+}
diff --git a/test/scaladoc/run/SI-7367.check b/test/scaladoc/run/SI-7367.check
new file mode 100755
index 0000000..3925a0d
--- /dev/null
+++ b/test/scaladoc/run/SI-7367.check
@@ -0,0 +1 @@
+Done.
\ No newline at end of file
diff --git a/test/scaladoc/run/SI-7367.scala b/test/scaladoc/run/SI-7367.scala
new file mode 100755
index 0000000..6e5a317
--- /dev/null
+++ b/test/scaladoc/run/SI-7367.scala
@@ -0,0 +1,25 @@
+import scala.tools.nsc.doc.model._
+import scala.tools.partest.ScaladocModelTest
+
+object Test extends ScaladocModelTest {
+ override def code = """
+ class annot() extends annotation.StaticAnnotation {
+ def this(a: Any) = this()
+ }
+
+ @annot(0)
+ class B
+ """
+
+ def scaladocSettings = ""
+
+ def testModel(root: Package) = {
+ import access._
+ val annotations = root._class("B").annotations
+ assert(annotations.size == 1)
+ assert(annotations(0).annotationClass == root._class("annot"))
+ val args = annotations(0).arguments
+ assert(args.size == 1)
+ assert(args(0).value.expression == "0")
+ }
+}
diff --git a/test/scaladoc/run/diagrams-base.check b/test/scaladoc/run/diagrams-base.check
new file mode 100644
index 0000000..619c561
--- /dev/null
+++ b/test/scaladoc/run/diagrams-base.check
@@ -0,0 +1 @@
+Done.
diff --git a/test/scaladoc/run/diagrams-base.scala b/test/scaladoc/run/diagrams-base.scala
new file mode 100644
index 0000000..b7aeed5
--- /dev/null
+++ b/test/scaladoc/run/diagrams-base.scala
@@ -0,0 +1,73 @@
+import scala.tools.nsc.doc.model._
+import scala.tools.nsc.doc.model.diagram._
+import scala.tools.partest.ScaladocModelTest
+
+object Test extends ScaladocModelTest {
+
+ override def code = """
+ package scala.test.scaladoc.diagrams
+
+ import language.implicitConversions
+
+ trait A
+ trait B
+ trait C
+ class E extends A with B with C
+ object E { implicit def eToT(e: E) = new T }
+
+ class F extends E
+ class G extends E
+ private class H extends E /* since it's private, it won't go into the diagram */
+ class T { def t = true }
+
+ class X
+ object X { implicit def xToE(x: X) = new E}
+ class Y extends X
+ class Z
+ object Z { implicit def zToE(z: Z) = new E}
+ """
+
+ // diagrams must be started. In case there's an error with dot, it should not report anything
+ def scaladocSettings = "-diagrams -implicits"
+
+ def testModel(rootPackage: Package) = {
+ // get the quick access implicit defs in scope (_package(s), _class(es), _trait(s), object(s) _method(s), _value(s))
+ import access._
+
+ val base = rootPackage._package("scala")._package("test")._package("scaladoc")._package("diagrams")
+ val E = base._class("E")
+ val diag = E.inheritanceDiagram.get
+
+ // there must be a single this node
+ assert(diag.nodes.filter(_.isThisNode).length == 1)
+
+ // 1. check class E diagram
+ assert(diag.isInheritanceDiagram)
+
+ val (incoming, outgoing) = diag.edges.partition(!_._1.isThisNode)
+ assert(incoming.length == 5)
+ assert(outgoing.head._2.length == 4)
+
+ val (outgoingSuperclass, outgoingImplicit) = outgoing.head._2.partition(_.isNormalNode)
+ assert(outgoingSuperclass.length == 3)
+ assert(outgoingImplicit.length == 1)
+
+ val (incomingSubclass, incomingImplicit) = incoming.partition(_._1.isNormalNode)
+ assert(incomingSubclass.length == 2)
+ assert(incomingImplicit.length == 3)
+
+ val classDiag = diag.asInstanceOf[InheritanceDiagram]
+ assert(classDiag.incomingImplicits.length == 3)
+ assert(classDiag.outgoingImplicits.length == 1)
+
+ // 2. check package diagram
+ // NOTE: Z should be eliminated because it's isolated
+ val packDiag = base.contentDiagram.get
+ assert(packDiag.isContentDiagram)
+ assert(packDiag.nodes.length == 8) // check singular object removal
+ assert(packDiag.edges.length == 4)
+ assert(packDiag.edges.foldLeft(0)(_ + _._2.length) == 6)
+
+ // TODO: Should check numbering
+ }
+}
\ No newline at end of file
diff --git a/test/scaladoc/run/diagrams-determinism.check b/test/scaladoc/run/diagrams-determinism.check
new file mode 100644
index 0000000..619c561
--- /dev/null
+++ b/test/scaladoc/run/diagrams-determinism.check
@@ -0,0 +1 @@
+Done.
diff --git a/test/scaladoc/run/diagrams-determinism.scala b/test/scaladoc/run/diagrams-determinism.scala
new file mode 100644
index 0000000..2b6f8ee
--- /dev/null
+++ b/test/scaladoc/run/diagrams-determinism.scala
@@ -0,0 +1,67 @@
+import scala.tools.nsc.doc.model._
+import scala.tools.nsc.doc.model.diagram._
+import scala.tools.partest.ScaladocModelTest
+
+object Test extends ScaladocModelTest {
+
+ override def code = """
+ package scala.test.scaladoc.diagrams
+
+ trait A
+ trait B extends A
+ trait C extends B
+ trait D extends C with A
+ trait E extends C with A with D
+ """
+
+ // diagrams must be started. In case there's an error with dot, it should not report anything
+ def scaladocSettings = "-diagrams -implicits"
+
+ def testModel(rootPackage: Package) = {
+ // get the quick access implicit defs in scope (_package(s), _class(es), _trait(s), object(s) _method(s), _value(s))
+ import access._
+
+ def diagramString(rootPackage: Package) = {
+ val base = rootPackage._package("scala")._package("test")._package("scaladoc")._package("diagrams")
+ val A = base._trait("A")
+ val B = base._trait("B")
+ val C = base._trait("C")
+ val D = base._trait("D")
+ val E = base._trait("E")
+
+ base.contentDiagram.get.toString + "\n" +
+ A.inheritanceDiagram.get.toString + "\n" +
+ B.inheritanceDiagram.get.toString + "\n" +
+ C.inheritanceDiagram.get.toString + "\n" +
+ D.inheritanceDiagram.get.toString + "\n" +
+ E.inheritanceDiagram.get.toString
+ }
+
+ // 1. check that several runs produce the same output
+ val run0 = diagramString(rootPackage)
+ val run1 = diagramString(model.getOrElse({sys.error("Scaladoc Model Test ERROR: No universe generated!")}).rootPackage)
+ val run2 = diagramString(model.getOrElse({sys.error("Scaladoc Model Test ERROR: No universe generated!")}).rootPackage)
+ val run3 = diagramString(model.getOrElse({sys.error("Scaladoc Model Test ERROR: No universe generated!")}).rootPackage)
+
+ // any variance in the order of the diagram elements should crash the following tests:
+ assert(run0 == run1)
+ assert(run1 == run2)
+ assert(run2 == run3)
+
+ // 2. check the order in the diagram: this node, subclasses, and then implicit conversions
+ def assertRightOrder(template: DocTemplateEntity, diagram: Diagram) =
+ for ((node, subclasses) <- diagram.edges)
+ assert(subclasses == subclasses.filter(_.isThisNode) :::
+ subclasses.filter(node => node.isNormalNode || node.isOutsideNode) :::
+ subclasses.filter(_.isImplicitNode),
+ "Diagram order for " + template + ": " + subclasses)
+
+ val base = rootPackage._package("scala")._package("test")._package("scaladoc")._package("diagrams")
+ assertRightOrder(base, base.contentDiagram.get)
+ assertRightOrder(base._trait("A"), base._trait("A").inheritanceDiagram.get)
+ assertRightOrder(base._trait("B"), base._trait("B").inheritanceDiagram.get)
+ assertRightOrder(base._trait("C"), base._trait("C").inheritanceDiagram.get)
+ assertRightOrder(base._trait("D"), base._trait("D").inheritanceDiagram.get)
+ assertRightOrder(base._trait("E"), base._trait("E").inheritanceDiagram.get)
+ }
+}
\ No newline at end of file
diff --git a/test/scaladoc/run/diagrams-filtering.check b/test/scaladoc/run/diagrams-filtering.check
new file mode 100644
index 0000000..619c561
--- /dev/null
+++ b/test/scaladoc/run/diagrams-filtering.check
@@ -0,0 +1 @@
+Done.
diff --git a/test/scaladoc/run/diagrams-filtering.scala b/test/scaladoc/run/diagrams-filtering.scala
new file mode 100644
index 0000000..54e3e9a
--- /dev/null
+++ b/test/scaladoc/run/diagrams-filtering.scala
@@ -0,0 +1,93 @@
+import scala.tools.nsc.doc.model._
+import scala.tools.nsc.doc.model.diagram._
+import scala.tools.partest.ScaladocModelTest
+
+object Test extends ScaladocModelTest {
+
+ override def code = """
+ package scala.test.scaladoc
+
+ /** @contentDiagram hideNodes "scala.test.*.A" "java.*", hideEdges ("*G" -> "*E") */
+ package object diagrams {
+ def foo = 4
+ }
+
+ package diagrams {
+ import language.implicitConversions
+
+ /** @inheritanceDiagram hideIncomingImplicits, hideNodes "*E" */
+ trait A
+ trait AA extends A
+ trait B
+ trait AAA extends B
+
+ /** @inheritanceDiagram hideDiagram */
+ trait C
+ trait AAAA extends C
+
+ /** @inheritanceDiagram hideEdges("*E" -> "*A") */
+ class E extends A with B with C
+ class F extends E
+ /** @inheritanceDiagram hideNodes "*G" "G" */
+ class G extends E
+ private class H extends E /* since it's private, it won't go into the diagram */
+ class T { def t = true }
+ object E {
+ implicit def eToT(e: E) = new T
+ implicit def eToA(e: E) = new A { }
+ }
+ }
+ """
+
+ // diagrams must be started. In case there's an error with dot, it should not report anything
+ def scaladocSettings = "-diagrams -implicits"
+
+ def testModel(rootPackage: Package) = {
+ // get the quick access implicit defs in scope (_package(s), _class(es), _trait(s), object(s) _method(s), _value(s))
+ import access._
+
+ // base package
+ // Assert we have 7 nodes and 6 edges
+ val base = rootPackage._package("scala")._package("test")._package("scaladoc")._package("diagrams")
+ val packDiag = base.contentDiagram.get
+ assert(packDiag.nodes.length == 6)
+ assert(packDiag.edges.map(_._2.length).sum == 5)
+
+ // trait A
+ // Assert we have just 3 nodes and 2 edges
+ val A = base._trait("A")
+ val ADiag = A.inheritanceDiagram.get
+ assert(ADiag.nodes.length == 3)
+ assert(ADiag.edges.map(_._2.length).sum == 2)
+
+ // trait C
+ val C = base._trait("C")
+ assert(!C.inheritanceDiagram.isDefined)
+
+ // trait G
+ val G = base._class("G")
+ assert(!G.inheritanceDiagram.isDefined)
+
+ // trait E
+ val E = base._class("E")
+ val EDiag = E.inheritanceDiagram.get
+
+ // there must be a single this node
+ assert(EDiag.nodes.filter(_.isThisNode).length == 1)
+
+ // 1. check class E diagram
+ val (incoming, outgoing) = EDiag.edges.partition(!_._1.isThisNode)
+ assert(incoming.length == 2) // F and G
+ assert(outgoing.head._2.length == 3) // B, C and T
+
+ val (outgoingSuperclass, outgoingImplicit) = outgoing.head._2.partition(_.isNormalNode)
+ assert(outgoingSuperclass.length == 2) // B and C
+ assert(outgoingImplicit.length == 1, outgoingImplicit) // T
+
+ val (incomingSubclass, incomingImplicit) = incoming.partition(_._1.isNormalNode)
+ assert(incomingSubclass.length == 2) // F and G
+ assert(incomingImplicit.length == 0)
+
+ assert(EDiag.nodes.length == 6) // E, B and C, F and G and the implicit conversion to T
+ }
+}
\ No newline at end of file
diff --git a/test/scaladoc/run/diagrams-inherited-nodes.check b/test/scaladoc/run/diagrams-inherited-nodes.check
new file mode 100644
index 0000000..619c561
--- /dev/null
+++ b/test/scaladoc/run/diagrams-inherited-nodes.check
@@ -0,0 +1 @@
+Done.
diff --git a/test/scaladoc/run/diagrams-inherited-nodes.scala b/test/scaladoc/run/diagrams-inherited-nodes.scala
new file mode 100644
index 0000000..8ac382a
--- /dev/null
+++ b/test/scaladoc/run/diagrams-inherited-nodes.scala
@@ -0,0 +1,69 @@
+import scala.tools.nsc.doc.model._
+import scala.tools.nsc.doc.model.diagram._
+import scala.tools.partest.ScaladocModelTest
+
+object Test extends ScaladocModelTest {
+
+ override def code = """
+ package scala.test.scaladoc.diagrams.inherited.nodes {
+
+ /** @contentDiagram
+ * @inheritanceDiagram hideDiagram */
+ trait T1 {
+ trait A1
+ trait A2 extends A1
+ trait A3 extends A2
+ }
+
+ /** @contentDiagram
+ * @inheritanceDiagram hideDiagram */
+ trait T2 extends T1 {
+ trait B1 extends A1
+ trait B2 extends A2 with B1
+ trait B3 extends A3 with B2
+ }
+
+ /** @contentDiagram
+ * @inheritanceDiagram hideDiagram */
+ trait T3 {
+ self: T1 with T2 =>
+ trait C1 extends B1
+ trait C2 extends B2 with C1
+ trait C3 extends B3 with C2
+ }
+
+ /** @contentDiagram
+ * @inheritanceDiagram hideDiagram */
+ trait T4 extends T3 with T2 with T1 {
+ trait D1 extends C1
+ trait D2 extends C2 with D1
+ trait D3 extends C3 with D2
+ }
+ }
+ """
+
+ // diagrams must be started. In case there's an error with dot, it should not report anything
+ def scaladocSettings = "-diagrams"
+
+ def testModel(rootPackage: Package) = {
+ // get the quick access implicit defs in scope (_package(s), _class(es), _trait(s), object(s) _method(s), _value(s))
+ import access._
+
+ // base package
+ // Assert we have 7 nodes and 6 edges
+ val base = rootPackage._package("scala")._package("test")._package("scaladoc")._package("diagrams")._package("inherited")._package("nodes")
+
+ def checkDiagram(t: String, nodes: Int, edges: Int) = {
+ // trait T1
+ val T = base._trait(t)
+ val TDiag = T.contentDiagram.get
+ assert(TDiag.nodes.length == nodes, t + ": " + TDiag.nodes + ".length == " + nodes)
+ assert(TDiag.edges.map(_._2.length).sum == edges, t + ": " + TDiag.edges.mkString("List(\n", ",\n", "\n)") + ".map(_._2.length).sum == " + edges)
+ }
+
+ checkDiagram("T1", 3, 2)
+ checkDiagram("T2", 6, 7)
+ checkDiagram("T3", 3, 2)
+ checkDiagram("T4", 12, 17)
+ }
+}
\ No newline at end of file
diff --git a/test/scaladoc/run/groups.check b/test/scaladoc/run/groups.check
new file mode 100644
index 0000000..619c561
--- /dev/null
+++ b/test/scaladoc/run/groups.check
@@ -0,0 +1 @@
+Done.
diff --git a/test/scaladoc/run/groups.scala b/test/scaladoc/run/groups.scala
new file mode 100644
index 0000000..c9e4a86
--- /dev/null
+++ b/test/scaladoc/run/groups.scala
@@ -0,0 +1,127 @@
+import scala.tools.nsc.doc.model._
+import scala.tools.partest.ScaladocModelTest
+
+object Test extends ScaladocModelTest {
+
+ override def code = """
+ package test.scaladoc {
+
+ /** @groupname Z From owner chain */
+ package object `groups`
+
+ package groups {
+ /**
+ * The trait A
+ * @groupdesc A Group A is the group that contains functions starting with f
+ * For example:
+ * {{{
+ * this is an example
+ * }}}
+ * @groupdesc B Group B is the group that contains functions starting with b
+ * @groupname B Group B has a nice new name and a high priority
+ * @groupprio B -10
+ * @group Traits
+ * @note This is a note
+ */
+ trait A {
+ /** foo description
+ * @group A */
+ def foo = 1
+
+ /** bar description
+ * @group B */
+ def bar = 2
+ }
+
+ /** The trait B
+ * @group Traits
+ * @groupdesc C Group C is introduced by B
+ */
+ trait B {
+ /** baz descriptopn
+ * @group C */
+ def baz = 3
+ }
+
+ /** The class C which inherits from both A and B
+ * @group Classes
+ * @groupdesc B Look ma, I'm overriding group descriptions!!!
+ * @groupname B And names
+ */
+ class C extends A with B {
+ /** Oh noes, I lost my group -- or did I?!? */
+ override def baz = 4
+ }
+ }
+ }
+ """
+
+ // no need for special settings
+ def scaladocSettings = "-feature"
+
+ def testModel(rootPackage: Package) = {
+ // get the quick access implicit defs in scope (_package(s), _class(es), _trait(s), object(s) _method(s), _value(s))
+ import access._
+
+ // just need to check the member exists, access methods will throw an error if there's a problem
+ val base = rootPackage._package("test")._package("scaladoc")._package("groups")
+
+ def checkGroup(mbr: MemberEntity, grp: String) =
+ assert(mbr.group == grp, "Incorrect group for " + mbr.qualifiedName + ": " + mbr.group + " instead of " + grp)
+
+ def checkGroupDesc(dtpl: DocTemplateEntity, grp: String, grpDesc: String) = {
+ assert(dtpl.groupDescription(grp).isDefined,
+ "Group description for " + grp + " not defined in " + dtpl.qualifiedName)
+ assert(extractCommentText(dtpl.groupDescription(grp).get).contains(grpDesc),
+ "Group description for " + grp + " in " + dtpl.qualifiedName + " does not contain \"" + grpDesc + "\": \"" +
+ extractCommentText(dtpl.groupDescription(grp).get) + "\"")
+ }
+
+ def checkGroupName(dtpl: DocTemplateEntity, grp: String, grpName: String) =
+ // TODO: See why we need trim here, we already do trimming in the CommentFactory
+ assert(dtpl.groupName(grp) == grpName,
+ "Group name for " + grp + " in " + dtpl.qualifiedName + " does not equal \"" + grpName + "\": \"" + dtpl.groupName(grp) + "\"")
+
+ def checkGroupPrio(dtpl: DocTemplateEntity, grp: String, grpPrio: Int) =
+ assert(dtpl.groupPriority(grp) == grpPrio,
+ "Group priority for " + grp + " in " + dtpl.qualifiedName + " does not equal " + grpPrio + ": " + dtpl.groupPriority(grp))
+
+
+ val A = base._trait("A")
+ val B = base._trait("B")
+ val C = base._class("C")
+ checkGroup(A, "Traits")
+ checkGroup(B, "Traits")
+ checkGroup(C, "Classes")
+ checkGroup(A._method("foo"), "A")
+ checkGroup(A._method("bar"), "B")
+ checkGroup(B._method("baz"), "C")
+ checkGroup(C._method("foo"), "A")
+ checkGroup(C._method("bar"), "B")
+ checkGroup(C._method("baz"), "C")
+
+ checkGroupDesc(A, "A", "Group A is the group that contains functions starting with f")
+ checkGroupName(A, "A", "A")
+ checkGroupPrio(A, "A", 0)
+ checkGroupDesc(A, "B", "Group B is the group that contains functions starting with b")
+ checkGroupName(A, "B", "Group B has a nice new name and a high priority")
+ checkGroupPrio(A, "B", -10)
+ checkGroupName(A, "Z", "From owner chain")
+
+ checkGroupDesc(B, "C", "Group C is introduced by B")
+ checkGroupName(B, "C", "C")
+ checkGroupPrio(B, "C", 0)
+ checkGroupName(B, "Z", "From owner chain")
+
+ checkGroupDesc(C, "A", "Group A is the group that contains functions starting with f")
+ checkGroupName(C, "A", "A")
+ checkGroupPrio(C, "A", 0)
+ checkGroupDesc(C, "B", "Look ma, I'm overriding group descriptions!!!")
+ checkGroupName(C, "B", "And names")
+ checkGroupPrio(C, "B", -10)
+ checkGroupDesc(C, "C", "Group C is introduced by B")
+ checkGroupName(C, "C", "C")
+ checkGroupPrio(C, "C", 0)
+ checkGroupName(C, "Z", "From owner chain")
+ }
+}
\ No newline at end of file
diff --git a/test/scaladoc/run/implicits-ambiguating.check b/test/scaladoc/run/implicits-ambiguating.check
new file mode 100644
index 0000000..619c561
--- /dev/null
+++ b/test/scaladoc/run/implicits-ambiguating.check
@@ -0,0 +1 @@
+Done.
diff --git a/test/scaladoc/run/implicits-ambiguating.scala b/test/scaladoc/run/implicits-ambiguating.scala
new file mode 100644
index 0000000..05daf1f
--- /dev/null
+++ b/test/scaladoc/run/implicits-ambiguating.scala
@@ -0,0 +1,114 @@
+import scala.tools.nsc.doc.model._
+import scala.tools.partest.ScaladocModelTest
+
+object Test extends ScaladocModelTest {
+
+ // test a file instead of a piece of code
+ override def resourceFile = "implicits-ambiguating-res.scala"
+
+ // start implicits
+ def scaladocSettings = "-implicits"
+
+ def testModel(root: Package) = {
+ // get the quick access implicit defs in scope (_package(s), _class(es), _trait(s), object(s) _method(s), _value(s))
+ import access._
+
+ def isAmbiguous(mbr: MemberEntity): Boolean =
+ mbr.byConversion.map(_.source.implicitsShadowing.get(mbr).map(_.isAmbiguous).getOrElse(false)).getOrElse(false)
+
+ // SEE THE test/resources/implicits-chaining-res.scala FOR THE EXPLANATION OF WHAT'S CHECKED HERE:
+ val base = root._package("scala")._package("test")._package("scaladoc")._package("implicits")._package("ambiguating")
+ var conv1: ImplicitConversion = null
+ var conv2: ImplicitConversion = null
+
+//// class A ///////////////////////////////////////////////////////////////////////////////////////////////////////////
+
+ val A = base._class("A")
+
+ conv1 = A._conversion(base._object("A").qualifiedName + ".AtoX")
+ conv2 = A._conversion(base._object("A").qualifiedName + ".AtoZ")
+ assert(conv1.members.length == 11)
+ assert(conv2.members.length == 11)
+ assert(conv1.constraints.length == 0)
+ assert(conv2.constraints.length == 0)
+
+ /** - conv1-5 should be ambiguous
+ * - conv6-7 should not be ambiguous
+ * - conv8 should be ambiguous
+ * - conv9 should be ambiguous
+ * - conv10 and conv11 should not be ambiguous */
+ def check1to9(cls: String): Unit = {
+ for (conv <- (1 to 5).map("conv" + _)) {
+ assert(isAmbiguous(conv1._member(conv)), cls + " - AtoX." + conv + " is ambiguous")
+ assert(isAmbiguous(conv2._member(conv)), cls + " - AtoZ." + conv + " is ambiguous")
+ }
+ for (conv <- (6 to 7).map("conv" + _)) {
+ assert(!isAmbiguous(conv1._member(conv)), cls + " - AtoX." + conv + " is not ambiguous")
+ assert(!isAmbiguous(conv2._member(conv)), cls + " - AtoZ." + conv + " is not ambiguous")
+ }
+ assert(isAmbiguous(conv1._member("conv8")), cls + " - AtoX.conv8 is ambiguous")
+ assert(isAmbiguous(conv2._member("conv8")), cls + " - AtoZ.conv8 is ambiguous")
+ assert(isAmbiguous(conv1._member("conv9")), cls + " - AtoX.conv9 is ambiguous")
+ assert(isAmbiguous(conv2._member("conv9")), cls + " - AtoZ.conv9 is ambiguous")
+ }
+ check1to9("A")
+ assert(!isAmbiguous(conv1._member("conv10")), "A - AtoX.conv10 is not ambiguous")
+ assert(!isAmbiguous(conv2._member("conv10")), "A - AtoZ.conv10 is not ambiguous")
+ assert(!isAmbiguous(conv1._member("conv11")), "A - AtoX.conv11 is not ambiguous")
+ assert(!isAmbiguous(conv2._member("conv11")), "A - AtoZ.conv11 is not ambiguous")
+
+//// class B ///////////////////////////////////////////////////////////////////////////////////////////////////////////
+
+ val B = base._class("B")
+
+ conv1 = B._conversion(base._object("A").qualifiedName + ".AtoX")
+ conv2 = B._conversion(base._object("A").qualifiedName + ".AtoZ")
+ assert(conv1.members.length == 11)
+ assert(conv2.members.length == 11)
+ assert(conv1.constraints.length == 0)
+ assert(conv2.constraints.length == 0)
+
+ /** conv1-9 should be the same, conv10 should be ambiguous, conv11 should be okay */
+ check1to9("B")
+ assert(isAmbiguous(conv1._member("conv10")), "B - AtoX.conv10 is ambiguous")
+ assert(isAmbiguous(conv2._member("conv10")), "B - AtoZ.conv10 is ambiguous")
+ assert(!isAmbiguous(conv1._member("conv11")), "B - AtoX.conv11 is not ambiguous")
+ assert(!isAmbiguous(conv2._member("conv11")), "B - AtoZ.conv11 is not ambiguous")
+
+//// class C ///////////////////////////////////////////////////////////////////////////////////////////////////////////
+
+ val C = base._class("C")
+
+ conv1 = C._conversion(base._object("A").qualifiedName + ".AtoX")
+ conv2 = C._conversion(base._object("A").qualifiedName + ".AtoZ")
+ assert(conv1.members.length == 11)
+ assert(conv2.members.length == 11)
+ assert(conv1.constraints.length == 0)
+ assert(conv2.constraints.length == 0)
+
+ /** conv1-9 should be the same, conv10 and conv11 should not be ambiguous */
+ check1to9("C")
+ assert(!isAmbiguous(conv1._member("conv10")), "C - AtoX.conv10 is not ambiguous")
+ assert(!isAmbiguous(conv2._member("conv10")), "C - AtoZ.conv10 is not ambiguous")
+ assert(!isAmbiguous(conv1._member("conv11")), "C - AtoX.conv11 is not ambiguous")
+ assert(!isAmbiguous(conv2._member("conv11")), "C - AtoZ.conv11 is not ambiguous")
+
+//// class D ///////////////////////////////////////////////////////////////////////////////////////////////////////////
+
+ val D = base._class("D")
+
+ conv1 = D._conversion(base._object("A").qualifiedName + ".AtoX")
+ conv2 = D._conversion(base._object("A").qualifiedName + ".AtoZ")
+ assert(conv1.members.length == 11)
+ assert(conv2.members.length == 11)
+ assert(conv1.constraints.length == 0)
+ assert(conv2.constraints.length == 0)
+
+ /** conv1-9 should be the same, conv10 should not be ambiguous while conv11 should be ambiguous */
+ check1to9("D")
+ assert(!isAmbiguous(conv1._member("conv10")), "D - AtoX.conv10 is not ambiguous")
+ assert(!isAmbiguous(conv2._member("conv10")), "D - AtoZ.conv10 is not ambiguous")
+ assert(isAmbiguous(conv1._member("conv11")), "D - AtoX.conv11 is ambiguous")
+ assert(isAmbiguous(conv2._member("conv11")), "D - AtoZ.conv11 is ambiguous")
+ }
+}
\ No newline at end of file
diff --git a/test/scaladoc/run/implicits-base.check b/test/scaladoc/run/implicits-base.check
new file mode 100644
index 0000000..619c561
--- /dev/null
+++ b/test/scaladoc/run/implicits-base.check
@@ -0,0 +1 @@
+Done.
diff --git a/test/scaladoc/run/implicits-base.scala b/test/scaladoc/run/implicits-base.scala
new file mode 100644
index 0000000..3d57306
--- /dev/null
+++ b/test/scaladoc/run/implicits-base.scala
@@ -0,0 +1,209 @@
+import scala.tools.nsc.doc.model._
+import scala.tools.partest.ScaladocModelTest
+import language._
+
+object Test extends ScaladocModelTest {
+
+ // test a file instead of a piece of code
+ override def resourceFile = "implicits-base-res.scala"
+
+ // start implicits
+ def scaladocSettings = "-implicits -implicits-show-all"
+
+ def testModel(root: Package) = {
+ // get the quick access implicit defs in scope (_package(s), _class(es), _trait(s), object(s) _method(s), _value(s))
+ import access._
+
+ def isShadowed(mbr: MemberEntity): Boolean =
+ mbr.byConversion.map(_.source.implicitsShadowing.get(mbr).map(_.isShadowed).getOrElse(false)).getOrElse(false)
+
+ // SEE THE test/resources/implicits-base-res.scala FOR THE EXPLANATION OF WHAT'S CHECKED HERE:
+ val base = root._package("scala")._package("test")._package("scaladoc")._package("implicits")._package("base")
+ var conv: ImplicitConversion = null
+
+//// class A ///////////////////////////////////////////////////////////////////////////////////////////////////////////
+
+ val A = base._class("A")
+
+ // def convToPimpedA(x: T) // pimpA0: with no constraints, SHADOWED
+ conv = A._conversion(A.qualifiedName + ".pimpA0")
+ assert(conv.members.length == 1)
+ assert(conv.constraints.length == 0)
+ assert(isShadowed(conv._member("convToPimpedA")))
+ assert(conv._member("convToPimpedA").resultType.name == "T")
+
+ // def convToNumericA: T // pimpA1: with a constraint that there is x: Numeric[T] implicit in scope
+ conv = A._conversion(A.qualifiedName + ".pimpA1")
+ assert(conv.members.length == 1)
+ assert(conv.constraints.length == 1)
+ assert(conv._member("convToNumericA").resultType.name == "T")
+
+ // def convToIntA: Int // pimpA2: with a constraint that T = Int
+ conv = A._conversion(A.qualifiedName + ".pimpA2")
+ assert(conv.members.length == 1)
+ assert(conv.constraints.length == 1)
+ assert(conv._member("convToIntA").resultType.name == "Int")
+
+ // def convToGtColonDoubleA: Double // pimpA3: with a constraint that T <: Double
+ conv = A._conversion(A.qualifiedName + ".pimpA3")
+ assert(conv.members.length == 1)
+ assert(conv.constraints.length == 1)
+ assert(conv._member("convToGtColonDoubleA").resultType.name == "Double")
+
+ // def convToPimpedA: S // pimpA4: with 3 constraints: T = Foo[Bar[S]], S: Foo and S: Bar
+ conv = A._conversion(A.qualifiedName + ".pimpA4")
+ assert(conv.members.length == 1)
+ assert(conv.constraints.length == 3)
+ assert(conv._member("convToPimpedA").resultType.name == "S")
+
+ // def convToPimpedA: Bar[Foo[T]] // pimpA5: no constraints
+ conv = A._conversion(A.qualifiedName + ".pimpA5")
+ assert(conv.members.length == 1)
+ assert(conv.constraints.length == 0)
+ assert(isShadowed(conv._member("convToPimpedA")))
+ assert(conv._member("convToPimpedA").resultType.name == "Bar[Foo[T]]")
+
+ // def convToMyNumericA: T // pimpA6: with a constraint that there is x: MyNumeric[T] implicit in scope
+ conv = A._conversion(A.qualifiedName + ".pimpA6")
+ assert(conv.members.length == 1)
+ assert(conv.constraints.length == 1)
+ assert(conv._member("convToMyNumericA").resultType.name == "T")
+
+ // def convToManifestA: T // pimpA7: with 2 constraints: T: Manifest and T <: Double
+ // def convToTraversableOps: T // pimpA7: with 2 constraints: T: Manifest and T <: Double
+ // should not be abstract!
+ conv = A._conversion(A.qualifiedName + ".pimpA7")
+ assert(conv.members.length == 2)
+ assert(conv.constraints.length == 2)
+ assert(conv._member("convToManifestA").resultType.name == "T")
+ assert(conv._member("convToTraversableOps").resultType.name == "T")
+ assert(conv._member("convToTraversableOps").flags.toString.indexOf("abstract") == -1)
+
+//// class B ///////////////////////////////////////////////////////////////////////////////////////////////////////////
+
+ val B = base._class("B")
+
+ // these conversions should not affect B
+ assert(B._conversions(A.qualifiedName + ".pimpA2").isEmpty)
+ assert(B._conversions(A.qualifiedName + ".pimpA4").isEmpty)
+
+ // def convToPimpedA(x: Double) // pimpA0: no constraints, SHADOWED
+ conv = B._conversion(A.qualifiedName + ".pimpA0")
+ assert(conv.members.length == 1)
+ assert(conv.constraints.length == 0)
+ assert(isShadowed(conv._member("convToPimpedA")))
+ assert(conv._member("convToPimpedA").resultType.name == "Double")
+
+ // def convToNumericA: Double // pimpA1: no constraintsd
+ conv = B._conversion(A.qualifiedName + ".pimpA1")
+ assert(conv.members.length == 1)
+ assert(conv.constraints.length == 0)
+ assert(conv._member("convToNumericA").resultType.name == "Double")
+
+ // def convToGtColonDoubleA: Double // pimpA3: no constraints
+ conv = B._conversion(A.qualifiedName + ".pimpA3")
+ assert(conv.members.length == 1)
+ assert(conv.constraints.length == 0)
+ assert(conv._member("convToGtColonDoubleA").resultType.name == "Double")
+
+ // def convToPimpedA: Bar[Foo[Double]] // pimpA5: no constraints
+ conv = B._conversion(A.qualifiedName + ".pimpA5")
+ assert(conv.members.length == 1)
+ assert(conv.constraints.length == 0)
+ assert(isShadowed(conv._member("convToPimpedA")))
+ assert(conv._member("convToPimpedA").resultType.name == "Bar[Foo[Double]]")
+
+ // def convToMyNumericA: Double // pimpA6: (if showAll is set) with a constraint that there is x: MyNumeric[Double] implicit in scope
+ conv = B._conversion(A.qualifiedName + ".pimpA6")
+ assert(conv.members.length == 1)
+ assert(conv.constraints.length == 1)
+ assert(conv._member("convToMyNumericA").resultType.name == "Double")
+
+ // def convToManifestA: Double // pimpA7: no constraints
+ // def convToTraversableOps: Double // pimpA7: no constraints
+ // // should not be abstract!
+ conv = B._conversion(A.qualifiedName + ".pimpA7")
+ assert(conv.members.length == 2)
+ assert(conv.constraints.length == 0)
+ assert(conv._member("convToManifestA").resultType.name == "Double")
+ assert(conv._member("convToTraversableOps").resultType.name == "Double")
+ assert(conv._member("convToTraversableOps").flags.toString.indexOf("abstract") == -1)
+
+//// class C ///////////////////////////////////////////////////////////////////////////////////////////////////////////
+
+ val C = base._class("C")
+
+ // these conversions should not affect C
+ assert(C._conversions(A.qualifiedName + ".pimpA3").isEmpty)
+ assert(C._conversions(A.qualifiedName + ".pimpA4").isEmpty)
+ assert(C._conversions(A.qualifiedName + ".pimpA7").isEmpty)
+
+ // def convToPimpedA(x: Int) // pimpA0: no constraints, SHADOWED
+ conv = C._conversion(A.qualifiedName + ".pimpA0")
+ assert(conv.members.length == 1)
+ assert(conv.constraints.length == 0)
+ assert(isShadowed(conv._member("convToPimpedA")))
+ assert(conv._member("convToPimpedA").resultType.name == "Int")
+
+ // def convToNumericA: Int // pimpA1: no constraints
+ conv = C._conversion(A.qualifiedName + ".pimpA1")
+ assert(conv.members.length == 1)
+ assert(conv.constraints.length == 0)
+ assert(conv._member("convToNumericA").resultType.name == "Int")
+
+ // def convToIntA: Int // pimpA2: no constraints
+ conv = C._conversion(A.qualifiedName + ".pimpA2")
+ assert(conv.members.length == 1)
+ assert(conv.constraints.length == 0)
+ assert(conv._member("convToIntA").resultType.name == "Int")
+
+ // def convToPimpedA: Bar[Foo[Int]] // pimpA5: no constraints
+ conv = C._conversion(A.qualifiedName + ".pimpA5")
+ assert(conv.members.length == 1)
+ assert(conv.constraints.length == 0)
+ assert(isShadowed(conv._member("convToPimpedA")))
+ assert(conv._member("convToPimpedA").resultType.name == "Bar[Foo[Int]]")
+
+ // def convToMyNumericA: Int // pimpA6: (if showAll is set) with a constraint that there is x: MyNumeric[Int] implicit in scope
+ conv = C._conversion(A.qualifiedName + ".pimpA6")
+ assert(conv.members.length == 1)
+ assert(conv.constraints.length == 1)
+ assert(conv._member("convToMyNumericA").resultType.name == "Int")
+
+//// class D ///////////////////////////////////////////////////////////////////////////////////////////////////////////
+
+ val D = base._class("D")
+
+ // these conversions should not affect D
+ assert(D._conversions(A.qualifiedName + ".pimpA2").isEmpty)
+ assert(D._conversions(A.qualifiedName + ".pimpA3").isEmpty)
+ assert(D._conversions(A.qualifiedName + ".pimpA4").isEmpty)
+ assert(D._conversions(A.qualifiedName + ".pimpA7").isEmpty)
+
+ // def convToPimpedA(x: String) // pimpA0: no constraints, SHADOWED
+ conv = D._conversion(A.qualifiedName + ".pimpA0")
+ assert(conv.members.length == 1)
+ assert(conv.constraints.length == 0)
+ assert(isShadowed(conv._member("convToPimpedA")))
+ assert(conv._member("convToPimpedA").resultType.name == "String")
+
+ // def convToNumericA: String // pimpA1: (if showAll is set) with a constraint that there is x: Numeric[String] implicit in scope
+ conv = D._conversion(A.qualifiedName + ".pimpA1")
+ assert(conv.members.length == 1)
+ assert(conv.constraints.length == 1)
+ assert(conv._member("convToNumericA").resultType.name == "String")
+
+ // def convToPimpedA: Bar[Foo[String]] // pimpA5: no constraints
+ conv = D._conversion(A.qualifiedName + ".pimpA5")
+ assert(conv.members.length == 1)
+ assert(conv.constraints.length == 0)
+ assert(isShadowed(conv._member("convToPimpedA")))
+ assert(conv._member("convToPimpedA").resultType.name == "Bar[Foo[String]]")
+
+ // def convToMyNumericA: String // pimpA6: (if showAll is set) with a constraint that there is x: MyNumeric[String] implicit in scope
+ conv = D._conversion(A.qualifiedName + ".pimpA6")
+ assert(conv.members.length == 1)
+ assert(conv.constraints.length == 1)
+ assert(conv._member("convToMyNumericA").resultType.name == "String")
+ }
+}
diff --git a/test/scaladoc/run/implicits-chaining.check b/test/scaladoc/run/implicits-chaining.check
new file mode 100644
index 0000000..619c561
--- /dev/null
+++ b/test/scaladoc/run/implicits-chaining.check
@@ -0,0 +1 @@
+Done.
diff --git a/test/scaladoc/run/implicits-chaining.scala b/test/scaladoc/run/implicits-chaining.scala
new file mode 100644
index 0000000..858ca9c
--- /dev/null
+++ b/test/scaladoc/run/implicits-chaining.scala
@@ -0,0 +1,65 @@
+import scala.tools.nsc.doc.model._
+import scala.tools.partest.ScaladocModelTest
+import language._
+
+object Test extends ScaladocModelTest {
+
+ // test a file instead of a piece of code
+ override def resourceFile = "implicits-chaining-res.scala"
+
+ // start implicits
+ def scaladocSettings = "-implicits"
+
+ def testModel(root: Package) = {
+ // get the quick access implicit defs in scope (_package(s), _class(es), _trait(s), object(s) _method(s), _value(s))
+ import access._
+
+ // SEE THE test/resources/implicits-chaining-res.scala FOR THE EXPLANATION OF WHAT'S CHECKED HERE:
+ val base = root._package("scala")._package("test")._package("scaladoc")._package("implicits")._object("chaining")
+ var conv: ImplicitConversion = null
+
+//// class A ///////////////////////////////////////////////////////////////////////////////////////////////////////////
+
+ val A = base._class("A")
+
+ conv = A._conversion(base.qualifiedName + ".convertToZ")
+ assert(conv.members.length == 1)
+ assert(conv.constraints.length == 1)
+
+//// class B ///////////////////////////////////////////////////////////////////////////////////////////////////////////
+
+ val B = base._class("B")
+
+ conv = B._conversion(base.qualifiedName + ".convertToZ")
+ assert(conv.members.length == 1)
+ assert(conv.constraints.length == 0)
+
+//// class C ///////////////////////////////////////////////////////////////////////////////////////////////////////////
+
+ val C = base._class("C")
+
+ assert(C._conversions(base.qualifiedName + ".convertToZ").isEmpty)
+
+//// class D ///////////////////////////////////////////////////////////////////////////////////////////////////////////
+
+ val D = base._class("D")
+
+ conv = D._conversion(base.qualifiedName + ".convertToZ")
+ assert(conv.members.length == 1)
+ assert(conv.constraints.length == 0)
+
+//// class E ///////////////////////////////////////////////////////////////////////////////////////////////////////////
+
+ val E = base._class("E")
+
+ conv = E._conversion(base.qualifiedName + ".convertToZ")
+ assert(conv.members.length == 1)
+ assert(conv.constraints.length == 0)
+
+//// class F ///////////////////////////////////////////////////////////////////////////////////////////////////////////
+
+ val F = base._class("F")
+
+ assert(F._conversions(base.qualifiedName + ".convertToZ").isEmpty)
+ }
+}
diff --git a/test/scaladoc/run/implicits-known-type-classes.check b/test/scaladoc/run/implicits-known-type-classes.check
new file mode 100644
index 0000000..619c561
--- /dev/null
+++ b/test/scaladoc/run/implicits-known-type-classes.check
@@ -0,0 +1 @@
+Done.
diff --git a/test/scaladoc/run/implicits-known-type-classes.scala b/test/scaladoc/run/implicits-known-type-classes.scala
new file mode 100644
index 0000000..471a1a2
--- /dev/null
+++ b/test/scaladoc/run/implicits-known-type-classes.scala
@@ -0,0 +1,33 @@
+import scala.tools.nsc.doc.model._
+import scala.tools.partest.ScaladocModelTest
+import language._
+
+object Test extends ScaladocModelTest {
+
+ // test a file instead of a piece of code
+ override def resourceFile = "implicits-known-type-classes-res.scala"
+
+ // start implicits
+ def scaladocSettings = "-implicits"
+
+ def testModel(root: Package) = {
+ // get the quick access implicit defs in scope (_package(s), _class(es), _trait(s), object(s) _method(s), _value(s))
+ import access._
+
+ /** Tests the "known type classes" feature of scaladoc implicits
+ * if the test fails, please update the correct qualified name of
+ * the type class in src/compiler/scala/tools/nsc/doc/Settings.scala
+ * in the knownTypeClasses map. Thank you! */
+
+ val base = root._package("scala")._package("test")._package("scaladoc")._package("implicits")._package("typeclasses")
+ var conv: ImplicitConversion = null
+
+ val A = base._class("A")
+
+ for (conversion <- A.conversions if !conversion.isHiddenConversion) {
+ assert(conversion.constraints.length == 1, conversion.constraints.length + " == 1 (in " + conversion + ")")
+ assert(conversion.constraints.head.isInstanceOf[KnownTypeClassConstraint],
+ conversion.constraints.head + " is not a known type class constraint!")
+ }
+ }
+}
diff --git a/test/scaladoc/run/implicits-scopes.check b/test/scaladoc/run/implicits-scopes.check
new file mode 100644
index 0000000..619c561
--- /dev/null
+++ b/test/scaladoc/run/implicits-scopes.check
@@ -0,0 +1 @@
+Done.
diff --git a/test/scaladoc/run/implicits-scopes.scala b/test/scaladoc/run/implicits-scopes.scala
new file mode 100644
index 0000000..d91deba
--- /dev/null
+++ b/test/scaladoc/run/implicits-scopes.scala
@@ -0,0 +1,79 @@
+import scala.tools.nsc.doc.model._
+import scala.tools.partest.ScaladocModelTest
+import language._
+
+object Test extends ScaladocModelTest {
+
+ // test a file instead of a piece of code
+ override def resourceFile = "implicits-scopes-res.scala"
+
+ // start implicits
+ def scaladocSettings = "-implicits"
+
+ def testModel(root: Package) = {
+ // get the quick access implicit defs in scope (_package(s), _class(es), _trait(s), object(s) _method(s), _value(s))
+ import access._
+ var conv: ImplicitConversion = null
+
+ // SEE THE test/resources/implicits-chaining-res.scala FOR THE EXPLANATION OF WHAT'S CHECKED HERE:
+ val base = root._package("scala")._package("test")._package("scaladoc")._package("implicits")._package("scopes")
+
+//// test1 /////////////////////////////////////////////////////////////////////////////////////////////////////////////
+
+ val doTest1 = {
+ val test1 = base._package("test1")
+ val A = test1._class("A")
+
+ conv = A._conversion(test1.qualifiedName + ".toB")
+ assert(conv.members.length == 1)
+ assert(conv.constraints.length == 0)
+ }
+
+//// test2 /////////////////////////////////////////////////////////////////////////////////////////////////////////////
+
+ val doTest2 = {
+ val test2 = base._package("test2")
+ val classes = test2._package("classes")
+ val A = classes._class("A")
+
+ conv = A._conversion(test2.qualifiedName + ".toB")
+ assert(conv.members.length == 1)
+ assert(conv.constraints.length == 0)
+ }
+
+//// test3 /////////////////////////////////////////////////////////////////////////////////////////////////////////////
+
+ val doTest3 = {
+ val test3 = base._package("test3")
+ val A = test3._class("A")
+
+ conv = A._conversion(A.qualifiedName + ".toB")
+ assert(conv.members.length == 1)
+ assert(conv.constraints.length == 0)
+ }
+
+//// test4 /////////////////////////////////////////////////////////////////////////////////////////////////////////////
+
+ val doTest4 = {
+ val test4 = base._package("test4")
+ val A = test4._class("A")
+ val S = test4._object("S")
+
+ conv = A._conversion(S.qualifiedName + ".toB")
+ assert(conv.members.length == 1)
+ assert(conv.constraints.length == 0)
+ }
+
+//// test5 /////////////////////////////////////////////////////////////////////////////////////////////////////////////
+
+ val doTest5 = {
+ val test5 = base._package("test5")
+ val scope = test5._object("scope")
+ val A = scope._class("A")
+
+ conv = A._conversion(scope.qualifiedName + ".toB")
+ assert(conv.members.length == 1)
+ assert(conv.constraints.length == 0)
+ }
+ }
+}
diff --git a/test/scaladoc/run/implicits-shadowing.check b/test/scaladoc/run/implicits-shadowing.check
new file mode 100644
index 0000000..619c561
--- /dev/null
+++ b/test/scaladoc/run/implicits-shadowing.check
@@ -0,0 +1 @@
+Done.
diff --git a/test/scaladoc/run/implicits-shadowing.scala b/test/scaladoc/run/implicits-shadowing.scala
new file mode 100644
index 0000000..6869b12
--- /dev/null
+++ b/test/scaladoc/run/implicits-shadowing.scala
@@ -0,0 +1,59 @@
+import scala.tools.nsc.doc.model._
+import scala.tools.partest.ScaladocModelTest
+
+object Test extends ScaladocModelTest {
+
+ // test a file instead of a piece of code
+ override def resourceFile = "implicits-shadowing-res.scala"
+
+ // start implicits
+ def scaladocSettings = "-implicits"
+
+ def testModel(root: Package) = {
+ // get the quick access implicit defs in scope (_package(s), _class(es), _trait(s), object(s) _method(s), _value(s))
+ import access._
+
+ def isShadowed(mbr: MemberEntity): Boolean =
+ mbr.byConversion.map(_.source.implicitsShadowing.get(mbr).map(_.isShadowed).getOrElse(false)).getOrElse(false)
+
+ // SEE THE test/resources/implicits-chaining-res.scala FOR THE EXPLANATION OF WHAT'S CHECKED HERE:
+ val base = root._package("scala")._package("test")._package("scaladoc")._package("implicits")._package("shadowing")
+ var conv: ImplicitConversion = null
+
+//// class A ///////////////////////////////////////////////////////////////////////////////////////////////////////////
+
+ val A = base._class("A")
+
+ conv = A._conversion(base._object("A").qualifiedName + ".AtoZ")
+ assert(conv.members.length == 11)
+ assert(conv.members.forall(isShadowed(_)))
+ assert(conv.constraints.length == 0)
+
+//// class B ///////////////////////////////////////////////////////////////////////////////////////////////////////////
+
+ val B = base._class("B")
+
+ conv = B._conversion(base._object("A").qualifiedName + ".AtoZ")
+ assert(conv.members.length == 11)
+ assert(conv.members.forall(isShadowed(_)))
+ assert(conv.constraints.length == 0)
+
+//// class C ///////////////////////////////////////////////////////////////////////////////////////////////////////////
+
+ val C = base._class("C")
+
+ conv = C._conversion(base._object("A").qualifiedName + ".AtoZ")
+ assert(conv.members.length == 11)
+ assert(conv.members.forall(isShadowed(_)))
+ assert(conv.constraints.length == 0)
+
+//// class D ///////////////////////////////////////////////////////////////////////////////////////////////////////////
+
+ val D = base._class("D")
+
+ conv = D._conversion(base._object("A").qualifiedName + ".AtoZ")
+ assert(conv.members.length == 11)
+ assert(conv.members.forall(isShadowed(_)))
+ assert(conv.constraints.length == 0)
+ }
+}
diff --git a/test/scaladoc/run/implicits-var-exp.check b/test/scaladoc/run/implicits-var-exp.check
new file mode 100644
index 0000000..619c561
--- /dev/null
+++ b/test/scaladoc/run/implicits-var-exp.check
@@ -0,0 +1 @@
+Done.
diff --git a/test/scaladoc/run/implicits-var-exp.scala b/test/scaladoc/run/implicits-var-exp.scala
new file mode 100644
index 0000000..94d2990
--- /dev/null
+++ b/test/scaladoc/run/implicits-var-exp.scala
@@ -0,0 +1,56 @@
+import scala.tools.nsc.doc.model._
+import scala.tools.nsc.doc.model.diagram._
+import scala.tools.partest.ScaladocModelTest
+
+object Test extends ScaladocModelTest {
+
+ override def code = """
+ package scala.test.scaladoc.variable.expansion {
+ /** @define coll WROOOONG-A */
+ class A
+
+ object A {
+ import language.implicitConversions
+ implicit def aToC(a: A) = new C
+ implicit def aToE(a: A) = new E with F
+ }
+
+ /** @define coll WROOOONG-B */
+ class B {
+ /** foo returns a $coll */
+ def foo: Nothing = ???
+ }
+
+ /** @define coll collection */
+ class C extends B
+
+ /** @define coll WROOOONG-D */
+ trait D {
+ /** bar returns a $coll */
+ def bar: Nothing = ???
+ }
+
+ /** @define coll result */
+ //trait E { self: D => override def bar: Nothing = ??? }
+ trait E extends D { override def bar: Nothing = ??? }
+
+ /** @define coll WROOOONG-F */
+ trait F
+ }
+ """
+
+ // diagrams must be started. In case there's an error with dot, it should not report anything
+ def scaladocSettings = "-implicits"
+
+ def testModel(rootPackage: Package) = {
+ // get the quick access implicit defs in scope (_package(s), _class(es), _trait(s), object(s) _method(s), _value(s))
+ import access._
+
+ val base = rootPackage._package("scala")._package("test")._package("scaladoc")._package("variable")._package("expansion")
+ val foo = base._class("A")._method("foo")
+ assert(foo.comment.get.body.toString.contains("foo returns a collection"), "\"" + foo.comment.get.body.toString + "\".contains(\"foo returns a collection\")")
+
+ val bar = base._class("A")._method("bar")
+ assert(bar.comment.get.body.toString.contains("bar returns a result"), "\"" + bar.comment.get.body.toString + "\".contains(\"bar returns a result\")")
+ }
+}
\ No newline at end of file
diff --git a/test/scaladoc/run/links.check b/test/scaladoc/run/links.check
new file mode 100644
index 0000000..619c561
--- /dev/null
+++ b/test/scaladoc/run/links.check
@@ -0,0 +1 @@
+Done.
diff --git a/test/scaladoc/run/links.scala b/test/scaladoc/run/links.scala
new file mode 100644
index 0000000..fde24ed
--- /dev/null
+++ b/test/scaladoc/run/links.scala
@@ -0,0 +1,32 @@
+import scala.tools.nsc.doc.base._
+import scala.tools.nsc.doc.model._
+import scala.tools.partest.ScaladocModelTest
+
+// SI-5079 "Scaladoc can't link to an object (only a class or trait)"
+// SI-4497 "Links in ScalaDoc - Spec and implementation unsufficient"
+// SI-4224 "Wiki-links should support method targets"
+// SI-3695 "support non-fully-qualified type links in scaladoc comments"
+// SI-6487 "Scaladoc can't link to inner classes"
+// SI-6495 "Scaladoc won't pick up group name, priority and description from owner chain"
+// SI-6501 "Scaladoc won't link to a @template type T as a template but as a member"
+object Test extends ScaladocModelTest {
+
+ override def resourceFile = "links.scala"
+
+ // no need for special settings
+ def scaladocSettings = ""
+
+ def testModel(rootPackage: Package) = {
+ // get the quick access implicit defs in scope (_package(s), _class(es), _trait(s), object(s) _method(s), _value(s))
+ import access._
+
+ // just need to check the member exists, access methods will throw an error if there's a problem
+ val base = rootPackage._package("scala")._package("test")._package("scaladoc")._package("links")
+ val TEST = base._object("TEST")
+
+ val memberLinks = countLinks(TEST.comment.get, _.link.isInstanceOf[LinkToMember[_, _]])
+ val templateLinks = countLinks(TEST.comment.get, _.link.isInstanceOf[LinkToTpl[_]])
+ assert(memberLinks == 17, memberLinks + " == 17 (the member links in object TEST)")
+ assert(templateLinks == 6, templateLinks + " == 6 (the template links in object TEST)")
+ }
+}
diff --git a/test/scaladoc/run/package-object.check b/test/scaladoc/run/package-object.check
new file mode 100644
index 0000000..7da897a
--- /dev/null
+++ b/test/scaladoc/run/package-object.check
@@ -0,0 +1,4 @@
+List(test.B, test.A, scala.AnyRef, scala.Any)
+List(B, A, AnyRef, Any)
+Some((newSource,10))
+Done.
diff --git a/test/scaladoc/run/package-object.scala b/test/scaladoc/run/package-object.scala
new file mode 100644
index 0000000..f5c79b1
--- /dev/null
+++ b/test/scaladoc/run/package-object.scala
@@ -0,0 +1,17 @@
+import scala.tools.nsc.doc.model._
+import scala.tools.partest.ScaladocModelTest
+import language._
+
+object Test extends ScaladocModelTest {
+ override def resourceFile = "package-object-res.scala"
+ override def scaladocSettings = ""
+ def testModel(root: Package) = {
+ import access._
+
+ val p = root._package("test")
+ println(p.linearizationTemplates)
+ println(p.linearizationTypes)
+ println(p.inSource)
+ }
+}
+
diff --git a/test/scaladoc/run/t4922.check b/test/scaladoc/run/t4922.check
new file mode 100644
index 0000000..619c561
--- /dev/null
+++ b/test/scaladoc/run/t4922.check
@@ -0,0 +1 @@
+Done.
diff --git a/test/scaladoc/run/t4922.scala b/test/scaladoc/run/t4922.scala
new file mode 100644
index 0000000..bce87ac
--- /dev/null
+++ b/test/scaladoc/run/t4922.scala
@@ -0,0 +1,32 @@
+import scala.tools.nsc.doc.model._
+import scala.tools.partest.ScaladocModelTest
+
+object Test extends ScaladocModelTest {
+
+ // Test code
+ override def code = """
+ // This the default values should be displayed
+
+ object Test {
+ def f (a: Any = "".isEmpty) = ()
+ def g[A](b: A = null) = ()
+ }
+ """
+
+ // no need for special settings
+ def scaladocSettings = ""
+
+ def testModel(rootPackage: Package) = {
+ // get the quick access implicit defs in scope (_package(s), _class(es), _trait(s), object(s) _method(s), _value(s))
+ import access._
+
+ val Test = rootPackage._object("Test")
+ val f = Test._method("f")
+ val g = Test._method("g")
+
+ def assertEqual(s1: String, s2: String) = assert(s1 == s2, s1 + " == " + s2)
+
+ assertEqual(f.valueParams(0)(0).defaultValue.get.expression, "\"\".isEmpty")
+ assertEqual(g.valueParams(0)(0).defaultValue.get.expression, "null")
+ }
+}
\ No newline at end of file
diff --git a/test/scaladoc/run/t7767.check b/test/scaladoc/run/t7767.check
new file mode 100644
index 0000000..619c561
--- /dev/null
+++ b/test/scaladoc/run/t7767.check
@@ -0,0 +1 @@
+Done.
diff --git a/test/scaladoc/run/t7767.scala b/test/scaladoc/run/t7767.scala
new file mode 100644
index 0000000..6c9ceb5
--- /dev/null
+++ b/test/scaladoc/run/t7767.scala
@@ -0,0 +1,18 @@
+import scala.tools.nsc.doc.model._
+import scala.tools.partest.ScaladocModelTest
+
+object Test extends ScaladocModelTest {
+
+ override def code = """
+ class Docable extends { /**Doc*/ val foo = 0 } with AnyRef
+ """
+
+ // no need for special settings
+ def scaladocSettings = ""
+
+ def testModel(rootPackage: Package) = {
+ import access._
+ val comment = rootPackage._class("Docable")._value("foo").comment.map(_.body.toString.trim).getOrElse("")
+ assert(comment.contains("Doc"), comment)
+ }
+}
diff --git a/test/scaladoc/run/usecase-var-expansion.check b/test/scaladoc/run/usecase-var-expansion.check
new file mode 100644
index 0000000..3faa473
--- /dev/null
+++ b/test/scaladoc/run/usecase-var-expansion.check
@@ -0,0 +1,4 @@
+newSource:8: error: Incorrect variable expansion for $Coll in use case. Does the variable expand to wiki syntax when documenting class Test2?
+ * @usecase def foo: $Coll[T]
+ ^
+Done.
diff --git a/test/scaladoc/run/usecase-var-expansion.scala b/test/scaladoc/run/usecase-var-expansion.scala
new file mode 100644
index 0000000..e86ea4a
--- /dev/null
+++ b/test/scaladoc/run/usecase-var-expansion.scala
@@ -0,0 +1,26 @@
+import scala.tools.nsc.doc.model._
+import scala.tools.partest.ScaladocModelTest
+import language._
+
+object Test extends ScaladocModelTest {
+
+ override def code = """
+ /**
+ * @define Coll `Test`
+ */
+ class Test[T] {
+ /**
+ * member $Coll
+ * @usecase def foo: $Coll[T]
+ * usecase $Coll
+ */
+ def foo(implicit err: String): Test[T] = sys.error(err)
+ }
+
+ /** @define Coll {{{some `really` < !! >> invalid $$$ thing}}} */
+ class Test2[T] extends Test[Int]
+ """
+
+ def scaladocSettings = ""
+ def testModel(root: Package) = ()
+}
diff --git a/test/scaladoc/scala/IndexScriptTest.scala b/test/scaladoc/scala/IndexScriptTest.scala
deleted file mode 100644
index 991491c..0000000
--- a/test/scaladoc/scala/IndexScriptTest.scala
+++ /dev/null
@@ -1,52 +0,0 @@
-import org.scalacheck._
-import org.scalacheck.Prop._
-
-import scala.tools.nsc.doc
-import scala.tools.nsc.doc.html.page.IndexScript
-import java.net.URLClassLoader
-
-object Test extends Properties("IndexScript") {
-
- def getClasspath = {
- val loader = Thread.currentThread.getContextClassLoader
- val paths = loader.asInstanceOf[URLClassLoader].getURLs
- val morepaths = loader.getParent.asInstanceOf[URLClassLoader].getURLs
- (paths ++ morepaths).map(_.getPath).mkString(java.io.File.pathSeparator)
- }
-
- val docFactory = {
- val settings = new doc.Settings({Console.err.println(_)})
- settings.classpath.value = getClasspath
- val reporter = new scala.tools.nsc.reporters.ConsoleReporter(settings)
- new doc.DocFactory(reporter, settings)
- }
-
- val indexModelFactory = doc.model.IndexModelFactory
-
- def createIndexScript(path: String) =
- docFactory.makeUniverse(List(path)) match {
- case Some(universe) => {
- val index = new IndexScript(universe,
- indexModelFactory.makeIndex(universe))
- Some(index)
- }
- case _ =>
- None
- }
-
- property("allPackages") = {
- createIndexScript("src/compiler/scala/tools/nsc/doc/html/page/Index.scala") match {
- case Some(index) =>
- index.allPackages.map(_.toString) == List(
- "scala",
- "scala.tools",
- "scala.tools.nsc",
- "scala.tools.nsc.doc",
- "scala.tools.nsc.doc.html",
- "scala.tools.nsc.doc.html.page"
- )
- case None =>
- false
- }
- }
-}
diff --git a/test/scaladoc/scala/IndexTest.scala b/test/scaladoc/scala/IndexTest.scala
deleted file mode 100644
index 2d73164..0000000
--- a/test/scaladoc/scala/IndexTest.scala
+++ /dev/null
@@ -1,82 +0,0 @@
-import org.scalacheck._
-import org.scalacheck.Prop._
-
-import scala.tools.nsc.doc
-import scala.tools.nsc.doc.html.page.Index
-import java.net.URLClassLoader
-
-object Test extends Properties("Index") {
-
- def getClasspath = {
- // these things can be tricky
- // this test previously relied on the assumption that the current thread's classloader is an url classloader and contains all the classpaths
- // does partest actually guarantee this? to quote Leonard Nimoy: The answer, of course, is no.
- // this test _will_ fail again some time in the future.
- val paths = Thread.currentThread.getContextClassLoader.asInstanceOf[URLClassLoader].getURLs.map(_.getPath)
- val morepaths = Thread.currentThread.getContextClassLoader.getParent.asInstanceOf[URLClassLoader].getURLs.map(_.getPath)
- (paths ++ morepaths).mkString(java.io.File.pathSeparator)
- }
-
- val docFactory = {
- val settings = new doc.Settings({Console.err.println(_)})
-
- settings.classpath.value = getClasspath
- println(settings.classpath.value)
-
- val reporter = new scala.tools.nsc.reporters.ConsoleReporter(settings)
-
- new doc.DocFactory(reporter, settings)
- }
-
- val indexModelFactory = doc.model.IndexModelFactory
-
- def createIndex(path: String): Option[Index] = {
-
- val maybeUniverse = {
- //val stream = new java.io.ByteArrayOutputStream
- //val original = Console.out
- //Console.setOut(stream)
-
- val result = docFactory.makeUniverse(List(path))
-
- // assert(stream.toString == "model contains 2 documentable templates\n")
- //Console.setOut(original)
-
- result
- }
-
- maybeUniverse match {
- case Some(universe) => {
- val index = new Index(universe, indexModelFactory.makeIndex(universe))
- return Some(index)
- }
- case _ => return None
- }
-
- }
-
- property("path") = {
- createIndex("src/compiler/scala/tools/nsc/doc/html/page/Index.scala") match {
- case Some(index) =>
- index.path == List("index.html")
- case None => false
- }
- }
-
- property("title") = {
- createIndex("src/compiler/scala/tools/nsc/doc/html/page/Index.scala") match {
- case Some(index) =>
- index.title == ""
-
- case None => false
- }
- }
- property("browser contants a script element") = {
- createIndex("src/compiler/scala/tools/nsc/doc/html/page/Index.scala") match {
- case Some(index) =>
- (index.browser \ "script").size == 1
-
- case None => false
- }
- }
-}
diff --git a/test/scaladoc/scala/html/HtmlFactoryTest.scala b/test/scaladoc/scala/html/HtmlFactoryTest.scala
deleted file mode 100644
index fcdacc3..0000000
--- a/test/scaladoc/scala/html/HtmlFactoryTest.scala
+++ /dev/null
@@ -1,375 +0,0 @@
-import org.scalacheck._
-import org.scalacheck.Prop._
-
-import java.net.URLClassLoader
-
-object XMLUtil {
- import scala.xml._
-
- def stripGroup(seq: Node): Node = {
- seq match {
- case group: Group => {
- <div class="group">{ group.nodes.map(stripGroup _) }</div>
- }
- case e: Elem => {
- val child = e.child.map(stripGroup _)
- Elem(e.prefix, e.label, e.attributes, e.scope, child : _*)
- }
- case _ => seq
- }
- }
-}
-
-object Test extends Properties("HtmlFactory") {
- import scala.tools.nsc.doc.{DocFactory, Settings}
- import scala.tools.nsc.doc.model.IndexModelFactory
- import scala.tools.nsc.doc.html.HtmlFactory
- import scala.tools.nsc.doc.html.page.ReferenceIndex
-
- def getClasspath = {
- // these things can be tricky
- // this test previously relied on the assumption that the current thread's classloader is an url classloader and contains all the classpaths
- // does partest actually guarantee this? to quote Leonard Nimoy: The answer, of course, is no.
- // this test _will_ fail again some time in the future.
- val paths = Thread.currentThread.getContextClassLoader.asInstanceOf[URLClassLoader].getURLs.map(_.getPath)
- val morepaths = Thread.currentThread.getContextClassLoader.getParent.asInstanceOf[URLClassLoader].getURLs.map(_.getPath)
- (paths ++ morepaths).mkString(java.io.File.pathSeparator)
- }
-
- def createFactory = {
- val settings = new Settings({Console.err.println(_)})
- settings.classpath.value = getClasspath
-
- val reporter = new scala.tools.nsc.reporters.ConsoleReporter(settings)
- new DocFactory(reporter, settings)
- }
-
- def createTemplates(basename: String) = {
- val result = scala.collection.mutable.Map[String, scala.xml.NodeSeq]()
-
- createFactory.makeUniverse(List("test/scaladoc/resources/"+basename)) match {
- case Some(universe) => {
- val index = IndexModelFactory.makeIndex(universe)
- (new HtmlFactory(universe, index)).writeTemplates((page) => {
- result += (page.absoluteLinkTo(page.path) -> page.body)
- })
- }
- case _ => ;
- }
-
- result
- }
-
- def createReferenceIndex(basename: String) = {
- createFactory.makeUniverse(List("test/scaladoc/resources/"+basename)) match {
- case Some(universe) => {
- val index = IndexModelFactory.makeIndex(universe)
- val pages = index.firstLetterIndex.map({
- case (key, value) => {
- val page = new ReferenceIndex(key, index, universe)
- page.absoluteLinkTo(page.path) -> page.body
- }
- })
- Some(pages)
- }
- case _ =>
- None
- }
- }
-
- def createTemplate(scala: String) = {
- val html = scala.stripSuffix(".scala") + ".html"
- createTemplates(scala)(html)
- }
-
- def shortComments(root: scala.xml.Node) =
- XMLUtil.stripGroup(root).descendant.flatMap {
- case e: scala.xml.Elem => {
- if (e.attribute("class").toString.contains("shortcomment")) {
- Some(e)
- } else {
- None
- }
- }
- case _ => None
- }
-
- property("Trac #3790") = {
- createTemplate("Trac3790.scala") match {
- case node: scala.xml.Node => {
- val comments = shortComments(node)
-
- comments.exists { _.toString.contains(">A lazy String\n</p>") } &&
- comments.exists { _.toString.contains(">A non-lazy String\n</p>") }
- }
- case _ => false
- }
- }
-
- property("Trac #4306") = {
- val files = createTemplates("Trac4306.scala")
- files("com/example/trac4306/foo/package$$Bar.html") != None
- }
-
- property("Trac #4366") = {
- createTemplate("Trac4366.scala") match {
- case node: scala.xml.Node => {
- shortComments(node).exists { n => {
- val str = n.toString
- str.contains("<code>foo</code>") && str.contains("</strong>")
- } }
- }
- case _ => false
- }
- }
-
- property("Trac #4358") = {
- createTemplate("Trac4358.scala") match {
- case node: scala.xml.Node =>
- ! shortComments(node).exists {
- _.toString.contains("<em>i.</em>")
- }
- case _ => false
- }
- }
-
- property("Trac #4180") = {
- createTemplate("Trac4180.scala") != None
- }
-
- property("Trac #4372") = {
- createTemplate("Trac4372.scala") match {
- case node: scala.xml.Node => {
- val html = node.toString
- html.contains("<span class=\"name\" title=\"gt4s: $plus$colon\">+:</span>") &&
- html.contains("<span class=\"name\" title=\"gt4s: $minus$colon\">-:</span>") &&
- html.contains("""<span class="params">(<span name="n">n: <span name="scala.Int" class="extype">Int</span></span>)</span><span class="result">: <span name="scala.Int" class="extype">Int</span></span>""")
- }
- case _ => false
- }
- }
-
- property("Trac #4374 - public") = {
- val files = createTemplates("Trac4374.scala")
- files("WithPublic.html") match {
- case node: scala.xml.Node => {
- val s = node.toString
- s.contains("""href="WithPublic$.html"""") &&
- files.get("WithPublic$.html") != None
- }
- case _ => false
- }
- }
-
- property("Trac #4374 - private") = {
- val files = createTemplates("Trac4374.scala")
- files("WithPrivate.html") match {
- case node: scala.xml.Node => {
- val s = node.toString
- ! s.contains("""href="WithPrivate$.html"""") &&
- files.get("WithPrivate$.html") == None
- }
- case _ => false
- }
- }
-
- property("Trac #3484") = {
- val files = createTemplates("Trac3484.scala")
-
- files("Collection.html") match {
- case node: scala.xml.Node => {
- val s = node.toString
- s.contains("""<span class="result">: Traversable[B]</span>""")
- }
- case _ => false
- }
- }
-
- property("Trac #3484 - SR704") = {
- val files = createTemplates("Trac3484.scala")
-
- files("SR704.html") match {
- case node: scala.xml.Node => {
- val s = node.toString
- s.contains("Hello Mister John.")
- }
- case _ => false
- }
- }
-
- property("Trac #4325 - files") = {
- val files = createTemplates("Trac4325.scala")
-
- files.get("WithSynthetic.html") != None &&
- files.get("WithSynthetic$.html") == None &&
- files.get("WithObject.html") != None &&
- files.get("WithObject$.html") != None
- }
-
- property("Trac #4325 - Don't link to syntetic companion") = {
- val files = createTemplates("Trac4325.scala")
-
- files("WithSynthetic.html") match {
- case node: scala.xml.Node => {
- val s = node.toString
- ! s.contains("""href="WithSynthetic$.html"""")
- }
- case _ => false
- }
- }
-
- property("Trac #4325 - Link to companion") = {
- val files = createTemplates("Trac4325.scala")
-
- files("WithObject.html") match {
- case node: scala.xml.Node => {
- val s = node.toString
- s.contains("""href="WithObject$.html"""")
- }
- case _ => false
- }
- }
-
- property("Trac #4420 - no whitespace at end of line") = {
- val files = createTemplates("Trac4420.scala")
-
- files("TestA.html") match {
- case node: scala.xml.Node => {
- val s = node.toString
- s.contains("""See YYY for more details""")
- }
- case _ => false
- }
- }
-
- property("Trac #484 - refinements and existentials") = {
- val files = createTemplates("Trac484.scala")
- val lines = """
- |type Bar = AnyRef { type Dingus <: T forSome { type T <: String } }
- |type Foo = AnyRef { ... /* 3 definitions in type refinement */ }
- |def g(x: T forSome { type T <: String }): String
- |def h(x: Float): AnyRef { def quux(x: Int,y: Int): Int }
- |def hh(x: Float): AnyRef { def quux(x: Int,y: Int): Int }
- |def j(x: Int): Bar
- |def k(): AnyRef { type Dingus <: T forSome { type T <: String } }
- """.stripMargin.trim.lines map (_.trim)
-
- files("RefinementAndExistentials.html") match {
- case node: scala.xml.Node => {
- val s = node.text.replaceAll("\\s+", " ")
- lines forall (s contains _)
- }
- case _ => false
- }
- }
-
- property("Trac #4289") = {
- val files = createTemplates("Trac4289.scala")
-
- files("Subclass.html") match {
- case node: scala.xml.Node => {
- node.toString.contains {
- """<dt>returns</dt><dd class="cmt"><p>123</p></dd>"""
- }
- }
- case _ => false
- }
- }
-
- property("Trac #4409") = {
- createTemplate("Trac4409.scala") match {
- case node: scala.xml.Node => {
- ! node.toString.contains("""<div class="block"><ol>since""")
- }
- case _ => false
- }
- }
-
- property("Trac #4452") = {
- createTemplate("Trac4452.scala") match {
- case node: scala.xml.Node =>
- ! node.toString.contains(">*")
- case _ => false
- }
- }
-
- property("Trac #4471") = {
- createReferenceIndex("Trac4471.scala") match {
- case Some(pages) =>
- (pages.get("index/index-f.html") match {
- case Some(node) => node.toString.contains(">A</a></strike>")
- case _ => false
- }) && (pages.get("index/index-b.html") match {
- case Some(node) => node.toString.contains(">bar</strike>")
- case _ => false
- })
- case _ => false
- }
- }
-
- property("SI-4641") = {
- createReferenceIndex("SI_4641.scala") match {
- case Some(pages) => pages.contains("index/index-_.html")
- case _ => false
- }
- }
-
- property("SI-4421") = {
- createTemplate("SI_4421.scala") match {
- case node: scala.xml.Node => {
- val html = node.toString
- html.contains(">Example:") && html.contains(">Note<")
- }
- case _ => false
- }
- }
-
- property("SI-4589") = {
- createTemplate("SI_4589.scala") match {
- case node: scala.xml.Node => {
- val html = node.toString
- html.contains(">x0123456789: <") &&
- html.contains(">x012345678901234567890123456789: <")
- }
- case _ => false
- }
- }
-
- property("Should decode symbolic type alias name.") = {
- createTemplate("SI_4715.scala") match {
- case node: scala.xml.Node => {
- val html = node.toString
- html.contains(">: :+:[<")
- }
- case _ => false
- }
- }
-
- property("Shouldn't drop type arguments to aliased tuple.") = {
- createTemplate("SI_4676.scala") match {
- case node: scala.xml.Node => {
- node.toString.contains(">ss: (String, String)<")
- }
- case _ => false
- }
- }
-
- property("Default arguments of synthesized constructor") = {
- val files = createTemplates("SI_4287.scala")
-
- files("ClassWithSugar.html") match {
- case node: scala.xml.Node => {
- node.toString.contains(">123<")
- }
- case _ => false
- }
- }
-
- property("Default arguments of synthesized constructor") = {
- createTemplate("SI_4507.scala") match {
- case node: scala.xml.Node =>
- ! node.toString.contains("<li>returns silently when evaluating true and true</li>")
- case _ => false
- }
- }
-}
diff --git a/test/scaladoc/scala/model/CommentFactoryTest.scala b/test/scaladoc/scala/model/CommentFactoryTest.scala
deleted file mode 100644
index 69c314a..0000000
--- a/test/scaladoc/scala/model/CommentFactoryTest.scala
+++ /dev/null
@@ -1,155 +0,0 @@
-import org.scalacheck._
-import org.scalacheck.Prop._
-
-import scala.tools.nsc.Global
-import scala.tools.nsc.doc
-import scala.tools.nsc.doc.model.comment._
-
-class Factory(val g: Global, val s: doc.Settings)
- extends doc.model.ModelFactory(g, s) {
- thisFactory: Factory with CommentFactory with doc.model.TreeFactory =>
-
- def strip(c: Comment): Option[Inline] = {
- c.body match {
- case Body(List(Paragraph(Chain(List(Summary(inner)))))) => Some(inner)
- case _ => None
- }
- }
-
- def parseComment(s: String): Option[Inline] =
- strip(parse(s, "", scala.tools.nsc.util.NoPosition))
-
- def createBody(s: String) =
- parse(s, "", scala.tools.nsc.util.NoPosition).body
-}
-
-object Test extends Properties("CommentFactory") {
- val factory = {
- val settings = new doc.Settings((str: String) => {})
- val reporter = new scala.tools.nsc.reporters.ConsoleReporter(settings)
- val g = new Global(settings, reporter)
- (new Factory(g, settings) with CommentFactory with doc.model.TreeFactory)
- }
-
- def parse(src: String, dst: Inline) = {
- factory.parseComment(src) match {
- case Some(inline) =>
- inline == dst
- case _ =>
- false
- }
- }
-
- property("parse") = parse(
- "/** One two three */",
- Text("One two three")
- )
- property("parse") = parse(
- "/** One `two` three */",
- Chain(List(Text("One "), Monospace(Text("two")), Text(" three")))
- )
-
- property("parse") = parse(
- """
-/** One two
- * three */""",
- Text("One two\nthree")
- )
- property("parse") = parse(
- """
-/** One `two`
- * three */""",
- Chain(List(Text("One "), Monospace(Text("two")), Text("\n"), Text("three")))
- )
-
- property("parse") = parse(
- """
-/** One `two`
- * three */""",
- Chain(List(Text("One "), Monospace(Text("two")), Text("\n"), Text(" three")))
- )
-
- property("parse") = parse(
- """
-/** One
- * `two` three */""",
- Chain(List(Text("One"), Text("\n"), Monospace(Text("two")), Text(" three")))
- )
-
- property("Trac #4361 - ^...^") = parse(
- """
-/**
- * hello ^world^ */""",
- Chain(List(Text("hello "), Superscript(Text("world"))))
- )
-
- property("Trac #4361 - single ^ symbol") = parse(
- """
-/**
- * <pre>
- * hello ^world
- * </pre>
- *
- */""",
- Chain(List(Text(""), Text("\n"),
-
-
- HtmlTag("<pre>\nhello ^world\n</pre>")))
- )
-
- property("Trac #4366 - body") = {
- val body = factory.createBody(
- """
- /**
- * <strong><code>foo</code> has been deprecated and will be removed in a future version. Please call <code>bar</code> instead.</strong>
- */
- """
- )
-
- body == Body(List(Paragraph(Chain(List(
- Summary(Chain(List(HtmlTag("<strong><code>foo</code> has been deprecated and will be removed in a future version. Please call <code>bar</code> instead.</strong>"), Text("\n"), Text(""))))
- )))))
- }
-
- property("Trac #4366 - summary") = {
- val body = factory.createBody(
- """
- /**
- * <strong><code>foo</code> has been deprecated and will be removed in a future version. Please call <code>bar</code> instead.</strong>
- */
- """
- )
- body.summary == Some(Chain(List(HtmlTag("<strong><code>foo</code> has been deprecated and will be removed in a future version. Please call <code>bar</code> instead.</strong>"), Text("\n"), Text(""))))
- }
-
- property("Trac #4358 - body") = {
- factory.createBody(
- """
- /**
- * Implicit conversion that invokes the <code>expect</code> method on the <code>EasyMock</code> companion object (<em>i.e.</em>, the
- * static <code>expect</code> method in Java class <code>org.easymock.EasyMock</code>).
- */
- """
- ) match {
- case Body(List(Paragraph(Chain(List(Summary(Chain(List(Chain(List(
- Text("Implicit conversion that invokes the "),
- HtmlTag("<code>expect</code>"),
- Text(" method on the "),
- HtmlTag("<code>EasyMock</code>"),
- Text(" companion object ("),
- HtmlTag("<em>i.e.</em>"),
- Text(", the\nstatic "),
- HtmlTag("<code>expect</code>"),
- Text(" method in Java class "),
- HtmlTag("<code>org.easymock.EasyMock</code>"),
- Text(")")
- )), Text(".")))), Text("\n")))))) =>
- true
- case other => {
- println(other)
- false
- }
- }
- }
-
-}
diff --git a/test/scaladoc/scalacheck/CommentFactoryTest.scala b/test/scaladoc/scalacheck/CommentFactoryTest.scala
new file mode 100644
index 0000000..96174d2
--- /dev/null
+++ b/test/scaladoc/scalacheck/CommentFactoryTest.scala
@@ -0,0 +1,169 @@
+import org.scalacheck._
+import org.scalacheck.Prop._
+
+import scala.tools.nsc.Global
+import scala.tools.nsc.doc
+import scala.tools.nsc.doc.base.comment._
+import scala.tools.nsc.doc.model._
+import scala.tools.nsc.doc.model.diagram._
+
+class Factory(val g: Global, val s: doc.Settings)
+ extends doc.model.ModelFactory(g, s) {
+ thisFactory: Factory
+ with ModelFactoryImplicitSupport
+ with ModelFactoryTypeSupport
+ with DiagramFactory
+ with CommentFactory
+ with doc.model.TreeFactory
+ with MemberLookup =>
+
+ def strip(c: Comment): Option[Inline] = {
+ c.body match {
+ case Body(List(Paragraph(Chain(List(Summary(inner)))))) => Some(inner)
+ case _ => None
+ }
+ }
+
+ def parseComment(s: String): Option[Inline] =
+ strip(parse(s, "", scala.tools.nsc.util.NoPosition))
+
+ def createBody(s: String) =
+ parse(s, "", scala.tools.nsc.util.NoPosition).body
+}
+
+object Test extends Properties("CommentFactory") {
+ val factory = {
+ val settings = new doc.Settings((str: String) => {})
+ val reporter = new scala.tools.nsc.reporters.ConsoleReporter(settings)
+ val g = new Global(settings, reporter)
+ (new Factory(g, settings)
+ with ModelFactoryImplicitSupport
+ with ModelFactoryTypeSupport
+ with DiagramFactory
+ with CommentFactory
+ with doc.model.TreeFactory
+ with MemberLookup)
+ }
+
+ def parse(src: String, dst: Inline) = {
+ factory.parseComment(src) match {
+ case Some(inline) =>
+ inline == dst
+ case _ =>
+ false
+ }
+ }
+
+ property("parse") = parse(
+ "/** One two three */",
+ Text("One two three")
+ )
+ property("parse") = parse(
+ "/** One `two` three */",
+ Chain(List(Text("One "), Monospace(Text("two")), Text(" three")))
+ )
+
+ property("parse") = parse(
+ """
+/** One two
+ * three */""",
+ Text("One two\nthree")
+ )
+ property("parse") = parse(
+ """
+/** One `two`
+ * three */""",
+ Chain(List(Text("One "), Monospace(Text("two")), Text("\n"), Text("three")))
+ )
+
+ property("parse") = parse(
+ """
+/** One `two`
+ * three */""",
+ Chain(List(Text("One "), Monospace(Text("two")), Text("\n"), Text(" three")))
+ )
+
+ property("parse") = parse(
+ """
+/** One
+ * `two` three */""",
+ Chain(List(Text("One"), Text("\n"), Monospace(Text("two")), Text(" three")))
+ )
+
+ property("Trac #4361 - ^...^") = parse(
+ """
+/**
+ * hello ^world^ */""",
+ Chain(List(Text("hello "), Superscript(Text("world"))))
+ )
+
+ property("Trac #4361 - single ^ symbol") = parse(
+ """
+/**
+ * <pre>
+ * hello ^world
+ * </pre>
+ *
+ */""",
+ Chain(List(Text(""), Text("\n"),
+
+
+ HtmlTag("<pre>\nhello ^world\n</pre>")))
+ )
+
+ property("Trac #4366 - body") = {
+ val body = factory.createBody(
+ """
+ /**
+ * <strong><code>foo</code> has been deprecated and will be removed in a future version. Please call <code>bar</code> instead.</strong>
+ */
+ """
+ )
+
+ body == Body(List(Paragraph(Chain(List(
+ Summary(Chain(List(HtmlTag("<strong><code>foo</code> has been deprecated and will be removed in a future version. Please call <code>bar</code> instead.</strong>"), Text("\n"), Text(""))))
+ )))))
+ }
+
+ property("Trac #4366 - summary") = {
+ val body = factory.createBody(
+ """
+ /**
+ * <strong><code>foo</code> has been deprecated and will be removed in a future version. Please call <code>bar</code> instead.</strong>
+ */
+ """
+ )
+ body.summary == Some(Chain(List(HtmlTag("<strong><code>foo</code> has been deprecated and will be removed in a future version. Please call <code>bar</code> instead.</strong>"), Text("\n"), Text(""))))
+ }
+
+ property("Trac #4358 - body") = {
+ factory.createBody(
+ """
+ /**
+ * Implicit conversion that invokes the <code>expect</code> method on the <code>EasyMock</code> companion object (<em>i.e.</em>, the
+ * static <code>expect</code> method in Java class <code>org.easymock.EasyMock</code>).
+ */
+ """
+ ) match {
+ case Body(List(Paragraph(Chain(List(Summary(Chain(List(Chain(List(
+ Text("Implicit conversion that invokes the "),
+ HtmlTag("<code>expect</code>"),
+ Text(" method on the "),
+ HtmlTag("<code>EasyMock</code>"),
+ Text(" companion object ("),
+ HtmlTag("<em>i.e.</em>"),
+ Text(", the\nstatic "),
+ HtmlTag("<code>expect</code>"),
+ Text(" method in Java class "),
+ HtmlTag("<code>org.easymock.EasyMock</code>"),
+ Text(")")
+ )), Text(".")))), Text("\n")))))) =>
+ true
+ case other => {
+ println(other)
+ false
+ }
+ }
+ }
+
+}
diff --git a/test/scaladoc/scalacheck/HtmlFactoryTest.flags b/test/scaladoc/scalacheck/HtmlFactoryTest.flags
new file mode 100644
index 0000000..b2264ec
--- /dev/null
+++ b/test/scaladoc/scalacheck/HtmlFactoryTest.flags
@@ -0,0 +1 @@
+-encoding UTF-8
\ No newline at end of file
diff --git a/test/scaladoc/scalacheck/HtmlFactoryTest.scala b/test/scaladoc/scalacheck/HtmlFactoryTest.scala
new file mode 100644
index 0000000..13eacf7
--- /dev/null
+++ b/test/scaladoc/scalacheck/HtmlFactoryTest.scala
@@ -0,0 +1,697 @@
+import org.scalacheck._
+import org.scalacheck.Prop._
+
+import java.net.{URLClassLoader, URLDecoder}
+
+object XMLUtil {
+ import scala.xml._
+
+ def stripGroup(seq: Node): Node = {
+ seq match {
+ case group: Group => {
+ <div class="group">{ group.nodes.map(stripGroup _) }</div>
+ }
+ case e: Elem => {
+ val child = e.child.map(stripGroup _)
+ Elem(e.prefix, e.label, e.attributes, e.scope, child : _*)
+ }
+ case _ => seq
+ }
+ }
+}
+
+object Test extends Properties("HtmlFactory") {
+
+ final val RESOURCES = "test/scaladoc/resources/"
+
+ import scala.tools.nsc.doc.{DocFactory, Settings}
+ import scala.tools.nsc.doc.model.IndexModelFactory
+ import scala.tools.nsc.doc.html.HtmlFactory
+ import scala.tools.nsc.doc.html.page.ReferenceIndex
+
+ def getClasspath = {
+ // these things can be tricky
+ // this test previously relied on the assumption that the current thread's classloader is an url classloader and contains all the classpaths
+ // does partest actually guarantee this? to quote Leonard Nimoy: The answer, of course, is no.
+ // this test _will_ fail again some time in the future.
+ val paths = Thread.currentThread.getContextClassLoader.asInstanceOf[URLClassLoader].getURLs.map(u => URLDecoder.decode(u.getPath))
+ val morepaths = Thread.currentThread.getContextClassLoader.getParent.asInstanceOf[URLClassLoader].getURLs.map(u => URLDecoder.decode(u.getPath))
+ (paths ++ morepaths).mkString(java.io.File.pathSeparator)
+ }
+
+ def createFactory = {
+ val settings = new Settings({Console.err.println(_)})
+ settings.classpath.value = getClasspath
+
+ val reporter = new scala.tools.nsc.reporters.ConsoleReporter(settings)
+ new DocFactory(reporter, settings)
+ }
+
+ def createTemplates(basename: String) = {
+ val result = scala.collection.mutable.Map[String, scala.xml.NodeSeq]()
+
+ createFactory.makeUniverse(Left(List(RESOURCES+basename))) match {
+ case Some(universe) => {
+ val index = IndexModelFactory.makeIndex(universe)
+ (new HtmlFactory(universe, index)).writeTemplates((page) => {
+ result += (page.absoluteLinkTo(page.path) -> page.body)
+ })
+ }
+ case _ => ;
+ }
+
+ result
+ }
+
+ def createReferenceIndex(basename: String) = {
+ createFactory.makeUniverse(Left(List(RESOURCES+basename))) match {
+ case Some(universe) => {
+ val index = IndexModelFactory.makeIndex(universe)
+ val pages = index.firstLetterIndex.map({
+ case (key, value) => {
+ val page = new ReferenceIndex(key, index, universe)
+ page.absoluteLinkTo(page.path) -> page.body
+ }
+ })
+ Some(pages)
+ }
+ case _ =>
+ None
+ }
+ }
+
+ def createTemplate(scala: String) = {
+ val html = scala.stripSuffix(".scala") + ".html"
+ createTemplates(scala)(html)
+ }
+
+ /**
+ * This tests the text without the markup - ex:
+ *
+ * <h4 class="signature">
+ * <span class="modifier_kind">
+ * <span class="modifier">implicit</span>
+ * <span class="kind">def</span>
+ * </span>
+ * <span class="symbol">
+ * <span class="name">test</span><span class="params">()</span><span class="result">: <span name="scala.Int" class="extype">Int</span></span>
+ * </span>
+ * </h4>
+ *
+ * becomes:
+ *
+ * implicit def test(): Int
+ *
+ * and is required to contain the text in the given checks
+ *
+ * NOTE: Comparison is done ignoring all whitespace
+ */
+ def checkText(scalaFile: String, debug: Boolean = true)(checks: (Option[String], String, Boolean)*): Boolean = {
+ val htmlFile = scalaFile.stripSuffix(".scala") + ".html"
+ val htmlAllFiles = createTemplates(scalaFile)
+ var result = true
+
+ for ((fileHint, check, expected) <- checks) {
+ // resolve the file to be checked
+ val fileName = fileHint match {
+ case Some(file) =>
+ if (file endsWith ".html")
+ file
+ else
+ file + ".html"
+ case None =>
+ htmlFile
+ }
+ val fileTextPretty = htmlAllFiles(fileName).text.replace('→',' ').replaceAll("\\s+"," ")
+ val fileText = fileTextPretty.replaceAll(" ", "")
+
+ val checkTextPretty = check.replace('→',' ').replaceAll("\\s+"," ")
+ val checkText = checkTextPretty.replaceAll(" ", "")
+
+ val checkValue = fileText.contains(checkText) == expected
+ if (debug && (!checkValue)) {
+ Console.err.println("")
+ Console.err.println("HTML Check failed for resource file " + scalaFile + ":")
+ Console.err.println("Could not match: \n" + checkTextPretty)
+ Console.err.println("In the extracted HTML text: \n" + fileTextPretty)
+ Console.err.println("NOTE: The whitespaces are eliminated before matching!")
+ Console.err.println("")
+ }
+ result &&= checkValue
+ }
+
+ result
+ }
+
+
+ def shortComments(root: scala.xml.Node) =
+ XMLUtil.stripGroup(root).descendant.flatMap {
+ case e: scala.xml.Elem => {
+ if (e.attribute("class").toString.contains("shortcomment")) {
+ Some(e)
+ } else {
+ None
+ }
+ }
+ case _ => None
+ }
+
+ property("Trac #3790") = {
+ createTemplate("Trac3790.scala") match {
+ case node: scala.xml.Node => {
+ val comments = shortComments(node)
+
+ comments.exists { _.toString.contains(">A lazy String\n</p>") } &&
+ comments.exists { _.toString.contains(">A non-lazy String\n</p>") }
+ }
+ case _ => false
+ }
+ }
+
+ property("Trac #4306") = {
+ val files = createTemplates("Trac4306.scala")
+ files("com/example/trac4306/foo/package$$Bar.html") != None
+ }
+
+ property("Trac #4366") = {
+ createTemplate("Trac4366.scala") match {
+ case node: scala.xml.Node => {
+ shortComments(node).exists { n => {
+ val str = n.toString
+ str.contains("<code>foo</code>") && str.contains("</strong>")
+ } }
+ }
+ case _ => false
+ }
+ }
+
+ property("Trac #4358") = {
+ createTemplate("Trac4358.scala") match {
+ case node: scala.xml.Node =>
+ ! shortComments(node).exists {
+ _.toString.contains("<em>i.</em>")
+ }
+ case _ => false
+ }
+ }
+
+ property("Trac #4180") = {
+ createTemplate("Trac4180.scala") != None
+ }
+
+ property("Trac #4372") = {
+ createTemplate("Trac4372.scala") match {
+ case node: scala.xml.Node => {
+ val html = node.toString
+ html.contains("<span title=\"gt4s: $plus$colon\" class=\"name\">+:</span>") &&
+ html.contains("<span title=\"gt4s: $minus$colon\" class=\"name\">-:</span>") &&
+ html.contains("""<span class="params">(<span name="n">n: <span class="extype" name="scala.Int">Int</span></span>)</span><span class="result">: <span class="extype" name="scala.Int">Int</span></span>""")
+ }
+ case _ => false
+ }
+ }
+
+ property("Trac #4374 - public") = {
+ val files = createTemplates("Trac4374.scala")
+ files("WithPublic.html") match {
+ case node: scala.xml.Node => {
+ val s = node.toString
+ s.contains("""href="WithPublic$.html"""") &&
+ files.get("WithPublic$.html") != None
+ }
+ case _ => false
+ }
+ }
+
+ property("Trac #4374 - private") = {
+ val files = createTemplates("Trac4374.scala")
+ files("WithPrivate.html") match {
+ case node: scala.xml.Node => {
+ val s = node.toString
+ ! s.contains("""href="WithPrivate$.html"""") &&
+ files.get("WithPrivate$.html") == None
+ }
+ case _ => false
+ }
+ }
+
+ property("Trac #4325 - files") = {
+ val files = createTemplates("Trac4325.scala")
+
+ files.get("WithSynthetic.html") != None &&
+ files.get("WithSynthetic$.html") == None &&
+ files.get("WithObject.html") != None &&
+ files.get("WithObject$.html") != None
+ }
+
+ property("Trac #4325 - Don't link to syntetic companion") = {
+ val files = createTemplates("Trac4325.scala")
+
+ files("WithSynthetic.html") match {
+ case node: scala.xml.Node => {
+ val s = node.toString
+ ! s.contains("""href="WithSynthetic$.html"""")
+ }
+ case _ => false
+ }
+ }
+
+ property("Trac #4325 - Link to companion") = {
+ val files = createTemplates("Trac4325.scala")
+
+ files("WithObject.html") match {
+ case node: scala.xml.Node => {
+ val s = node.toString
+ s.contains("""href="WithObject$.html"""")
+ }
+ case _ => false
+ }
+ }
+
+ property("Trac #4420 - no whitespace at end of line") = {
+ val files = createTemplates("Trac4420.scala")
+
+ files("TestA.html") match {
+ case node: scala.xml.Node => {
+ val s = node.toString
+ s.contains("""See YYY for more details""")
+ }
+ case _ => false
+ }
+ }
+ //
+ // property("Trac #484 - refinements and existentials") = {
+ // val files = createTemplates("Trac484.scala")
+ // val lines = """
+ // |type Bar = AnyRef { type Dingus <: T forSome { type T <: String } }
+ // |type Foo = AnyRef { ... /* 3 definitions in type refinement */ }
+ // |def g(x: T forSome { type T <: String }): String
+ // |def h(x: Float): AnyRef { def quux(x: Int,y: Int): Int }
+ // |def hh(x: Float): AnyRef { def quux(x: Int,y: Int): Int }
+ // |def j(x: Int): Bar
+ // |def k(): AnyRef { type Dingus <: T forSome { type T <: String } }
+ // """.stripMargin.trim.lines map (_.trim)
+ //
+ // files("RefinementAndExistentials.html") match {
+ // case node: scala.xml.Node => {
+ // val s = node.text.replaceAll("\\s+", " ")
+ // lines forall (s contains _)
+ // }
+ // case _ => false
+ // }
+ // }
+
+ property("Trac #4289") = {
+ val files = createTemplates("Trac4289.scala")
+
+ files("Subclass.html") match {
+ case node: scala.xml.Node => {
+ node.toString.contains {
+ """<dt>returns</dt><dd class="cmt"><p>123</p></dd>"""
+ }
+ }
+ case _ => false
+ }
+ }
+
+ property("Trac #4409") = {
+ createTemplate("Trac4409.scala") match {
+ case node: scala.xml.Node => {
+ ! node.toString.contains("""<div class="block"><ol>since""")
+ }
+ case _ => false
+ }
+ }
+
+ property("Trac #4452") = {
+ createTemplate("Trac4452.scala") match {
+ case node: scala.xml.Node =>
+ ! node.toString.contains(">*")
+ case _ => false
+ }
+ }
+
+ property("Trac #4471") = {
+ createReferenceIndex("Trac4471.scala") match {
+ case Some(pages) =>
+ (pages.get("index/index-f.html") match {
+ case Some(node) => node.toString.contains(">A</a></strike>")
+ case _ => false
+ }) && (pages.get("index/index-b.html") match {
+ case Some(node) => node.toString.contains(">bar</strike>")
+ case _ => false
+ })
+ case _ => false
+ }
+ }
+
+ property("SI-4641") = {
+ createReferenceIndex("SI_4641.scala") match {
+ case Some(pages) => pages.contains("index/index-_.html")
+ case _ => false
+ }
+ }
+
+ property("SI-4421") = {
+ createTemplate("SI_4421.scala") match {
+ case node: scala.xml.Node => {
+ val html = node.toString
+ html.contains(">Example:") && html.contains(">Note<")
+ }
+ case _ => false
+ }
+ }
+
+ property("SI-4589") = {
+ createTemplate("SI_4589.scala") match {
+ case node: scala.xml.Node => {
+ val html = node.toString
+ html.contains(">x0123456789: <") &&
+ html.contains(">x012345678901234567890123456789: <")
+ }
+ case _ => false
+ }
+ }
+
+ property("SI-4714: Should decode symbolic type alias name.") = {
+ createTemplate("SI_4715.scala") match {
+ case node: scala.xml.Node => {
+ val html = node.toString
+ html.contains(">:+:<")
+ }
+ case _ => false
+ }
+ }
+
+ property("SI-4287: Default arguments of synthesized constructor") = {
+ val files = createTemplates("SI_4287.scala")
+
+ files("ClassWithSugar.html") match {
+ case node: scala.xml.Node => {
+ node.toString.contains(">123<")
+ }
+ case _ => false
+ }
+ }
+
+ property("SI-4507: Default arguments of synthesized constructor") = {
+ createTemplate("SI_4507.scala") match {
+ case node: scala.xml.Node =>
+ ! node.toString.contains("<li>returns silently when evaluating true and true</li>")
+ case _ => false
+ }
+ }
+
+ property("SI-4898: Use cases and links should not crash scaladoc") = {
+ createTemplate("SI_4898.scala")
+ true
+ }
+
+ property("SI-5054: Use cases should override their original members") =
+ checkText("SI_5054_q1.scala")(
+ (None,"""def test(): Int""", true)
+ //Disabled because the full signature is now displayed
+ //(None,"""def test(implicit lost: Int): Int""", false)
+ )
+
+ property("SI-5054: Use cases should keep their flags - final should not be lost") =
+ checkText("SI_5054_q2.scala")((None, """final def test(): Int""", true))
+
+ property("SI-5054: Use cases should keep their flags - implicit should not be lost") =
+ checkText("SI_5054_q3.scala")((None, """implicit def test(): Int""", true))
+
+ property("SI-5054: Use cases should keep their flags - real abstract should not be lost") =
+ checkText("SI_5054_q4.scala")((None, """abstract def test(): Int""", true))
+
+ property("SI-5054: Use cases should keep their flags - traits should not be affected") =
+ checkText("SI_5054_q5.scala")((None, """def test(): Int""", true))
+
+ property("SI-5054: Use cases should keep their flags - traits should not be affected") =
+ checkText("SI_5054_q6.scala")((None, """abstract def test(): Int""", true))
+
+ property("SI-5054: Use case individual signature test") =
+ checkText("SI_5054_q7.scala")(
+ (None, """abstract def test2(explicit: Int): Int [use case] This takes the explicit value passed.""", true),
+ (None, """abstract def test1(): Int [use case] This takes the implicit value in scope.""", true)
+ )
+
+ property("SI-5287: Display correct \"Definition classes\"") =
+ checkText("SI_5287.scala")(
+ (None,
+ """def method(): Int
+ [use case] The usecase explanation
+ [use case] The usecase explanation
+ Definition Classes SI_5287 SI_5287_B SI_5287_A""", true)
+ ) // the explanation appears twice, as small comment and full comment
+
+ property("Comment inheritance: Correct comment inheritance for overriding") =
+ checkText("implicit-inheritance-override.scala")(
+ (Some("Base"),
+ """def function[T](arg1: T, arg2: String): Double
+ The base comment.
+ The base comment. And another sentence...
+ T the type of the first argument
+ arg1 The T term comment
+ arg2 The string comment
+ returns The return comment
+ """, true),
+ (Some("DerivedA"),
+ """def function[T](arg1: T, arg2: String): Double
+ Overriding the comment, the params and returns comments should stay the same.
+ Overriding the comment, the params and returns comments should stay the same.
+ T the type of the first argument
+ arg1 The T term comment
+ arg2 The string comment
+ returns The return comment
+ """, true),
+ (Some("DerivedB"),
+ """def function[T](arg1: T, arg2: String): Double
+ T the type of the first argument
+ arg1 The overridden T term comment
+ arg2 The overridden string comment
+ returns The return comment
+ """, true),
+ (Some("DerivedC"),
+ """def function[T](arg1: T, arg2: String): Double
+ T the type of the first argument
+ arg1 The T term comment
+ arg2 The string comment
+ returns The overridden return comment
+ """, true),
+ (Some("DerivedD"),
+ """def function[T](arg1: T, arg2: String): Double
+ T The overriden type parameter comment
+ arg1 The T term comment
+ arg2 The string comment
+ returns The return comment
+ """, true)
+ )
+
+ for (useCaseFile <- List("UseCaseInheritance", "UseCaseOverrideInheritance")) {
+ property("Comment inheritance: Correct comment inheritance for usecases") =
+ checkText("implicit-inheritance-usecase.scala")(
+ (Some(useCaseFile),
+ """def missing_arg[T](arg1: T): Double
+ [use case]
+ [use case]
+ T The type parameter
+ arg1 The T term comment
+ returns The return comment
+ """, true),
+ (Some(useCaseFile),
+ """def missing_targ(arg1: Int, arg2: String): Double
+ [use case]
+ [use case]
+ arg1 The T term comment
+ arg2 The string comment
+ returns The return comment
+ """, true),
+ (Some(useCaseFile),
+ """def overridden_arg1[T](implicit arg1: T, arg2: String): Double
+ [use case]
+ [use case]
+ T The type parameter
+ arg1 The overridden T term comment
+ arg2 The string comment
+ returns The return comment
+ """, true),
+ (Some(useCaseFile),
+ """def overridden_targ[T](implicit arg1: T, arg2: String): Double
+ [use case]
+ [use case]
+ T The overridden type parameter comment
+ arg1 The T term comment
+ arg2 The string comment
+ returns The return comment
+ """, true),
+ (Some(useCaseFile),
+ """def overridden_return[T](implicit arg1: T, arg2: String): Double
+ [use case]
+ [use case]
+ T The type parameter
+ arg1 The T term comment
+ arg2 The string comment
+ returns The overridden return comment
+ """, true),
+ (Some(useCaseFile),
+ """def added_arg[T](implicit arg1: T, arg2: String, arg3: Float): Double
+ [use case]
+ [use case]
+ T The type parameter
+ arg1 The T term comment
+ arg2 The string comment
+ arg3 The added float comment
+ returns The return comment
+ """, true),
+ (Some(useCaseFile),
+ """def overridden_comment[T](implicit arg1: T, arg2: String): Double
+ [use case] The overridden comment.
+ [use case] The overridden comment.
+ T The type parameter
+ arg1 The T term comment
+ arg2 The string comment
+ returns The return comment
+ """, true)
+ )
+ }
+
+ property("Comment inheritance: Correct explicit inheritance for override") =
+ checkText("explicit-inheritance-override.scala")(
+ (Some("InheritDocDerived"),
+ """def function[T](arg1: T, arg2: String): Double
+ Starting line
+ Starting line
+ The base comment. And another sentence...
+ The base comment. And another sentence...
+ Ending line
+ T StartT the type of the first argument EndT
+ arg1 Start1 The T term comment End1
+ arg2 Start2 The string comment End2
+ returns StartRet The return comment EndRet""", true),
+ (Some("InheritDocDerived"),
+ """Definition Classes InheritDocDerived → InheritDocBase
+ Example: StartExample function[Int](3, "something") EndExample
+ Version StartVer 0.0.2 EndVer
+ Since StartSince 0.0.1 EndSince
+ Exceptions thrown
+ SomeException StartEx if the function is not called with correct parameters EndEx
+ SomeOtherException StartSOE Should Warn <invalid inheritdoc annotation> EndSOE
+ To do StartTodo Call mom. And dad! EndTodo
+ Note StartNote Be careful! EndNote
+ See also StartSee The Manual EndSee
+ """, true))
+
+ property("Comment inheritance: Correct explicit inheritance for usecase") =
+ checkText("explicit-inheritance-usecase.scala")(
+ (Some("UseCaseInheritDoc"),
+ """def function[T](arg1: T, arg2: String): Double
+ [use case] Starting line
+ [use case] Starting line
+ The base comment. And another sentence...
+ The base comment. And another sentence...
+ Ending line
+ T StartT the type of the first argument EndT
+ arg1 Start1 The T term comment End1
+ arg2 Start2 The string comment End2
+ returns StartRet The return comment EndRet""", true),
+ (Some("UseCaseInheritDoc"),
+ """Example: StartExample function[Int](3,"something") EndExample
+ Version StartVer 0.0.2 EndVer
+ Since StartSince 0.0.1 EndSince
+ Exceptions thrown
+ SomeException StartEx if the function is not called with correct parameters EndEx
+ SomeOtherException StartSOE Should Warn <invalid inheritdoc annotation> EndSOE
+ To do StartTodo Call mom. And dad! EndTodo
+ Note StartNote Be careful! EndNote
+ See also StartSee The Manual EndSee
+ """, true))
+
+ property("Comment inheritance: Correct explicit inheritance in corner cases") =
+ checkText("inheritdoc-corner-cases.scala")(
+ (Some("D"),
+ """def hello1: Int
+ Inherited: Hello 1 comment
+ Inherited: Hello 1 comment
+ Definition Classes D → A
+ """, true),
+ (Some("D"),
+ """def hello2: Int
+ Inherited: Hello 2 comment
+ Inherited: Hello 2 comment
+ Definition Classes D → B
+ """, true),
+ (Some("G"),
+ """def hello1: Int
+ Inherited: Hello 1 comment
+ Inherited: Hello 1 comment
+ Definition Classes G → D → A
+ """, true),
+ (Some("G"),
+ """def hello2: Int
+ Inherited: Hello 2 comment
+ Inherited: Hello 2 comment
+ Definition Classes G → D → B
+ """, true),
+ (Some("I"),
+ """def hello1(i: Int): Unit
+ [use case] Inherited: Hello 1 comment
+ [use case] Inherited: Hello 1 comment
+ Definition Classes I → G → D → A
+ """, true)
+ // traits E, F and H shouldn't crash scaladoc but we don't need to check the output
+ )
+
+ property("Indentation normalization for code blocks") = {
+ val files = createTemplates("code-indent.scala")
+
+ files("C.html") match {
+ case node: scala.xml.Node => {
+ val s = node.toString
+ s.contains("<pre>a typicial indented\ncomment on multiple\ncomment lines</pre>") &&
+ s.contains("<pre>one liner</pre>") &&
+ s.contains("<pre>two lines, one useful</pre>") &&
+ s.contains("<pre>line1\nline2\nline3\nline4</pre>") &&
+ s.contains("<pre>a ragged example\na (condition)\n the t h e n branch\nan alternative\n the e l s e branch</pre>") &&
+ s.contains("<pre>l1\n\nl2\n\nl3\n\nl4\n\nl5</pre>")
+ }
+ case _ => false
+ }
+ }
+
+ {
+ val files = createTemplates("basic.scala")
+ //println(files)
+
+ property("class") = files.get("com/example/p1/Clazz.html") match {
+ case Some(node: scala.xml.Node) => {
+ property("implicit convertion") =
+ node.toString contains "<span class=\"modifier\">implicit </span>"
+
+ property("gt4s") =
+ node.toString contains "title=\"gt4s: $colon$colon\""
+
+ property("gt4s of a deprecated method") =
+ node.toString contains "title=\"gt4s: $colon$colon$colon$colon. Deprecated: "
+ true
+ }
+ case _ => false
+ }
+ property("package") = files.get("com/example/p1/package.html") != None
+
+ property("package object") = files("com/example/p1/package.html") match {
+ case node: scala.xml.Node =>
+ node.toString contains "com.example.p1.package#packageObjectMethod"
+ case _ => false
+ }
+
+ property("lower bound") = files("com/example/p1/LowerBound.html") match {
+ case node: scala.xml.Node => true
+ case _ => false
+ }
+
+ property("upper bound") = files("com/example/p1/UpperBound.html") match {
+ case node: scala.xml.Node => true
+ case _ => false
+ }
+ }
+}
diff --git a/test/scaladoc/scalacheck/IndexScriptTest.scala b/test/scaladoc/scalacheck/IndexScriptTest.scala
new file mode 100644
index 0000000..5aef38e
--- /dev/null
+++ b/test/scaladoc/scalacheck/IndexScriptTest.scala
@@ -0,0 +1,52 @@
+import org.scalacheck._
+import org.scalacheck.Prop._
+
+import scala.tools.nsc.doc
+import scala.tools.nsc.doc.html.page.IndexScript
+import java.net.{URLClassLoader, URLDecoder}
+
+object Test extends Properties("IndexScript") {
+
+ def getClasspath = {
+ val loader = Thread.currentThread.getContextClassLoader
+ val paths = loader.asInstanceOf[URLClassLoader].getURLs
+ val morepaths = loader.getParent.asInstanceOf[URLClassLoader].getURLs
+ (paths ++ morepaths).map(u => URLDecoder.decode(u.getPath)).mkString(java.io.File.pathSeparator)
+ }
+
+ val docFactory = {
+ val settings = new doc.Settings({Console.err.println(_)})
+ settings.classpath.value = getClasspath
+ val reporter = new scala.tools.nsc.reporters.ConsoleReporter(settings)
+ new doc.DocFactory(reporter, settings)
+ }
+
+ val indexModelFactory = doc.model.IndexModelFactory
+
+ def createIndexScript(path: String) =
+ docFactory.makeUniverse(Left(List(path))) match {
+ case Some(universe) => {
+ val index = new IndexScript(universe,
+ indexModelFactory.makeIndex(universe))
+ Some(index)
+ }
+ case _ =>
+ None
+ }
+
+ property("allPackages") = {
+ createIndexScript("src/compiler/scala/tools/nsc/doc/html/page/Index.scala") match {
+ case Some(index) =>
+ index.allPackages.map(_.toString) == List(
+ "scala",
+ "scala.tools",
+ "scala.tools.nsc",
+ "scala.tools.nsc.doc",
+ "scala.tools.nsc.doc.html",
+ "scala.tools.nsc.doc.html.page"
+ )
+ case None =>
+ false
+ }
+ }
+}
diff --git a/test/scaladoc/scalacheck/IndexTest.scala b/test/scaladoc/scalacheck/IndexTest.scala
new file mode 100644
index 0000000..e114767
--- /dev/null
+++ b/test/scaladoc/scalacheck/IndexTest.scala
@@ -0,0 +1,82 @@
+import org.scalacheck._
+import org.scalacheck.Prop._
+
+import scala.tools.nsc.doc
+import scala.tools.nsc.doc.html.page.Index
+import java.net.{URLClassLoader, URLDecoder}
+
+object Test extends Properties("Index") {
+
+ def getClasspath = {
+ // these things can be tricky
+ // this test previously relied on the assumption that the current thread's classloader is an url classloader and contains all the classpaths
+ // does partest actually guarantee this? to quote Leonard Nimoy: The answer, of course, is no.
+ // this test _will_ fail again some time in the future.
+ val paths = Thread.currentThread.getContextClassLoader.asInstanceOf[URLClassLoader].getURLs.map(u => URLDecoder.decode(u.getPath))
+ val morepaths = Thread.currentThread.getContextClassLoader.getParent.asInstanceOf[URLClassLoader].getURLs.map(u => URLDecoder.decode(u.getPath))
+ (paths ++ morepaths).mkString(java.io.File.pathSeparator)
+ }
+
+ val docFactory = {
+ val settings = new doc.Settings({Console.err.println(_)})
+
+ settings.classpath.value = getClasspath
+ println(settings.classpath.value)
+
+ val reporter = new scala.tools.nsc.reporters.ConsoleReporter(settings)
+
+ new doc.DocFactory(reporter, settings)
+ }
+
+ val indexModelFactory = doc.model.IndexModelFactory
+
+ def createIndex(path: String): Option[Index] = {
+
+ val maybeUniverse = {
+ //val stream = new java.io.ByteArrayOutputStream
+ //val original = Console.out
+ //Console.setOut(stream)
+
+ val result = docFactory.makeUniverse(Left(List(path)))
+
+ // assert(stream.toString == "model contains 2 documentable templates\n")
+ //Console.setOut(original)
+
+ result
+ }
+
+ maybeUniverse match {
+ case Some(universe) => {
+ val index = new Index(universe, indexModelFactory.makeIndex(universe))
+ return Some(index)
+ }
+ case _ => return None
+ }
+
+ }
+
+ property("path") = {
+ createIndex("src/compiler/scala/tools/nsc/doc/html/page/Index.scala") match {
+ case Some(index) =>
+ index.path == List("index.html")
+ case None => false
+ }
+ }
+
+ property("title") = {
+ createIndex("src/compiler/scala/tools/nsc/doc/html/page/Index.scala") match {
+ case Some(index) =>
+ index.title == ""
+
+ case None => false
+ }
+ }
+
+ property("package objects in index") = {
+ createIndex("test/scaladoc/resources/SI-5558.scala") match {
+ case Some(index) =>
+ index.index.firstLetterIndex('f') isDefinedAt "foo"
+ case None => false
+ }
+ }
+}
diff --git a/test/script-tests/README b/test/script-tests/README
new file mode 100755
index 0000000..3f5c2ce
--- /dev/null
+++ b/test/script-tests/README
@@ -0,0 +1,8 @@
+This is a fresh start for script tests. The fact that windows exists can
+no longer be allowed to stand in the way of testing the wide range of
+functionality which currently goes completely untested. So I'll just be
+putting self-contained script tests in here to run some way that doesn't
+depend on all the platform stars aligning all the time. Feel free to
+join me.
+
+-- extempore, Nov 21 2011
\ No newline at end of file
diff --git a/test/script-tests/jar-manifest/resources/MANIFEST.MF b/test/script-tests/jar-manifest/resources/MANIFEST.MF
new file mode 100644
index 0000000..93c54c3
--- /dev/null
+++ b/test/script-tests/jar-manifest/resources/MANIFEST.MF
@@ -0,0 +1,3 @@
+Manifest-Version: 1.0
+Main-Class: bippy.Runner
+Class-Path: bippy.jar dingus.jar http://mirrors.ibiblio.org/pub/mirrors/maven2/com/thoughtworks/paranamer/paranamer/2.4/paranamer-2.4.jar
diff --git a/test/script-tests/jar-manifest/run-test b/test/script-tests/jar-manifest/run-test
new file mode 100755
index 0000000..2c6d587
--- /dev/null
+++ b/test/script-tests/jar-manifest/run-test
@@ -0,0 +1,41 @@
+#!/usr/bin/env bash
+#
+
+set -e
+
+paranamerjar="http://mirrors.ibiblio.org/pub/mirrors/maven2/com/thoughtworks/paranamer/paranamer/2.4/paranamer-2.4.jar"
+build=$(dirname $0)/../../../build/pack
+
+if [[ -n "$SCALA_HOME" ]]; then
+ scala="$SCALA_HOME/bin/scala"
+elif [[ -d $build ]]; then
+ scala=$(cd $build && pwd -P)/bin/scala
+else
+ scala="scala"
+fi
+
+echo "$($scala -version 2>&1)"
+scalac="${scala}c"
+
+[[ -d lib ]] || mkdir lib
+[[ -f lib/paranamer-2.4.jar ]] || ( printf >&2 "Grabbing paranamer jar\n\n" && cd lib && wget --quiet "$paranamerjar" )
+
+rm -rf target && mkdir target
+"$scalac" -d target -cp lib/'*' src/*.scala
+cd target
+
+jar cmf ../resources/MANIFEST.MF bippy.jar bippy
+jar cf dingus.jar dingus
+
+run () {
+ echo ""
+ echo "% $@"
+ "$@"
+}
+
+cat <<EOM
+$(run pwd)
+$(run jar tf bippy.jar)
+$(run jar tf dingus.jar)
+$(run $scala $@ bippy.jar)
+EOM
diff --git a/test/script-tests/jar-manifest/run-test.check b/test/script-tests/jar-manifest/run-test.check
new file mode 100644
index 0000000..ef59a6c
--- /dev/null
+++ b/test/script-tests/jar-manifest/run-test.check
@@ -0,0 +1,30 @@
+Scala code runner version 2.10.0.r26038-b20111121102734 -- Copyright 2002-2011, LAMP/EPFL
+
+% pwd
+/scala/trunk/test/script-tests/jar-manifest/target
+
+% jar tf bippy.jar
+META-INF/
+META-INF/MANIFEST.MF
+bippy/
+bippy/Runner$$anonfun$main$1.class
+bippy/Runner$$anonfun$main$2.class
+bippy/Runner$$anonfun$main$3.class
+bippy/Runner$.class
+bippy/Runner.class
+
+% jar tf dingus.jar
+META-INF/
+META-INF/MANIFEST.MF
+dingus/
+dingus/Printable.class
+
+% /scala/trunk/build/pack/bin/scala bippy.jar
+1 "Greetings from dingus.jar!"
+2 bippyBingle has parameters: imparametorium, antidisestablish, x
+3 bippyBoo has parameters: quuxParameter
+4
+5 Urls exposed through the classloader:
+6 file:/scala/trunk/test/script-tests/jar-manifest/target/./bippy.jar
+7 file:/scala/trunk/test/script-tests/jar-manifest/target/./dingus.jar
+8 http://mirrors.ibiblio.org/pub/mirrors/maven2/com/thoughtworks/paranamer/paranamer/2.4/paranamer-2.4.jar
diff --git a/test/script-tests/jar-manifest/src/jar-test.scala b/test/script-tests/jar-manifest/src/jar-test.scala
new file mode 100644
index 0000000..80e3aaf
--- /dev/null
+++ b/test/script-tests/jar-manifest/src/jar-test.scala
@@ -0,0 +1,34 @@
+import scala.tools.nsc.util.HasClassPath
+
+package bippy {
+ object Runner {
+ var line = 0
+ def echo(msgs: Any*) = {
+ line += 1
+ Console.println("%-2s %s".format(line, msgs mkString " "))
+ }
+
+ def bippyBoo(quuxParameter: Int) = 5
+ def bippyBingle(imparametorium: String, antidisestablish: Int, x: Float) = ()
+
+ def main(args: Array[String]): Unit = {
+ echo(new dingus.Printable)
+ val namer = new com.thoughtworks.paranamer.BytecodeReadingParanamer
+ getClass.getMethods filter (_.getName startsWith "bippy") foreach { m =>
+ echo(m.getName, "has parameters:", namer.lookupParameterNames(m).mkString(", "))
+ }
+ echo("")
+ echo("Urls exposed through the classloader:")
+ getClass.getClassLoader match {
+ case x: HasClassPath => x.classPathURLs foreach (x => echo(x))
+ case _ => echo("None! Seems unlikely we'd get this far then.")
+ }
+ }
+ }
+}
+
+package dingus {
+ class Printable {
+ override def toString = "\"Greetings from dingus.jar!\""
+ }
+}
\ No newline at end of file
diff --git a/test/support/annotations/mkAnnotationsJar.sh b/test/support/annotations/mkAnnotationsJar.sh
old mode 100644
new mode 100755
diff --git a/tools/abspath b/tools/abspath
old mode 100644
new mode 100755
diff --git a/tools/binary-repo-lib.sh b/tools/binary-repo-lib.sh
old mode 100644
new mode 100755
index 3a75593..92ef3a0
--- a/tools/binary-repo-lib.sh
+++ b/tools/binary-repo-lib.sh
@@ -3,10 +3,12 @@
# Library to push and pull binary artifacts from a remote repository using CURL.
-remote_urlbase="http://typesafe.artifactoryonline.com/typesafe/scala-sha-bootstrap/org/scala-lang/bootstrap"
+remote_urlget="http://repo.typesafe.com/typesafe/scala-sha-bootstrap/org/scala-lang/bootstrap"
+remote_urlpush="http://private-repo.typesafe.com/typesafe/scala-sha-bootstrap/org/scala-lang/bootstrap"
libraryJar="$(pwd)/lib/scala-library.jar"
desired_ext=".desired.sha1"
push_jar="$(pwd)/tools/push.jar"
+if [[ "$OSTYPE" == *Cygwin* || "$OSTYPE" == *cygwin* ]]; then push_jar="$(cygpath -m "$push_jar")"; fi
# Cache dir has .sbt in it to line up with SBT build.
cache_dir="${HOME}/.sbt/cache/scala"
@@ -34,8 +36,8 @@ curlUpload() {
local data=$2
local user=$3
local password=$4
- local url="${remote_urlbase}/${remote_location}"
- java -jar $push_jar "$data" "$remote_location" "$user" "$password"
+ local url="${remote_urlpush}/${remote_location}"
+ java -jar $push_jar "$data" "$url" "$user" "$password"
if (( $? != 0 )); then
echo "Error uploading $data to $url"
echo "$url"
@@ -50,7 +52,7 @@ curlDownload() {
checkCurl
local jar=$1
local url=$2
- if [[ "$OSTYPE" == *Cygwin* ]]; then
+ if [[ "$OSTYPE" == *Cygwin* || "$OSTYPE" == *cygwin* ]]; then
jar=$(cygpath -m $1)
fi
http_code=$(curl --write-out '%{http_code}' --silent --fail --output "$jar" "$url")
@@ -74,25 +76,47 @@ pushJarFile() {
local jar_dir=$(dirname $jar)
local jar_name=${jar#$jar_dir/}
pushd $jar_dir >/dev/null
- local jar_sha1=$(shasum -p $jar_name)
- local version=${jar_sha1% ?$jar_name}
+ local version=$(makeJarSha $jar_name)
local remote_uri=${version}${jar#$basedir}
- echo " Pushing to ${remote_urlbase}/${remote_uri} ..."
+ echo " Pushing to ${remote_urlpush}/${remote_uri} ..."
echo " $curl"
curlUpload $remote_uri $jar_name $user $pw
echo " Making new sha1 file ...."
- echo "$jar_sha1" > "${jar_name}${desired_ext}"
+ echo "$version ?$jar_name" > "${jar_name}${desired_ext}"
popd >/dev/null
# TODO - Git remove jar and git add jar.desired.sha1
# rm $jar
}
+makeJarSha() {
+ local jar=$1
+ if which sha1sum 2>/dev/null >/dev/null; then
+ shastring=$(sha1sum "$jar")
+ echo "$shastring" | sed 's/ .*//'
+ elif which shasum 2>/dev/null >/dev/null; then
+ shastring=$(shasum "$jar")
+ echo "$shastring" | sed 's/ .*//'
+ else
+ shastring=$(openssl sha1 "$jar")
+ echo "$shastring" | sed 's/^.*= //'
+ fi
+}
+
+getJarSha() {
+ local jar=$1
+ if [[ ! -f "$jar" ]]; then
+ echo ""
+ else
+ echo $(makeJarSha $jar)
+ fi
+}
+
# Tests whether or not the .desired.sha1 hash matches a given file.
# Arugment 1 - The jar file to test validity.
# Returns: Empty string on failure, "OK" on success.
isJarFileValid() {
local jar=$1
- if [[ ! -f $jar ]]; then
+ if [[ ! -f "$jar" ]]; then
echo ""
else
local jar_dir=$(dirname $jar)
@@ -113,7 +137,7 @@ pushJarFiles() {
local user=$2
local password=$3
# TODO - ignore target/ and build/
- local jarFiles="$(find ${basedir}/lib -name "*.jar") $(find ${basedir}/test/files -name "*.jar")"
+ local jarFiles="$(find ${basedir}/lib -name "*.jar") $(find ${basedir}/test/files -name "*.jar") $(find ${basedir}/tools -name "*.jar")"
local changed="no"
for jar in $jarFiles; do
local valid=$(isJarFileValid $jar)
@@ -130,6 +154,27 @@ pushJarFiles() {
fi
}
+
+checkJarSha() {
+ local jar=$1
+ local sha=$2
+ local testsha=$(getJarSha "$jar")
+ if test "$sha" == "$testsha"; then
+ echo "OK"
+ fi
+}
+
+makeCacheLocation() {
+ local uri=$1
+ local sha=$2
+ local cache_loc="$cache_dir/$uri"
+ local cdir=$(dirname $cache_loc)
+ if [[ ! -d "$cdir" ]]; then
+ mkdir -p "$cdir"
+ fi
+ echo "$cache_loc"
+}
+
# Pulls a single binary artifact from a remote repository.
# Argument 1 - The uri to the file that should be downloaded.
# Argument 2 - SHA of the file...
@@ -137,16 +182,21 @@ pushJarFiles() {
pullJarFileToCache() {
local uri=$1
local sha=$2
- local cache_loc=$cache_dir/$uri
- local cdir=$(dirname $cache_loc)
- if [[ ! -d $cdir ]]; then
- mkdir -p $cdir
- fi
+ local cache_loc="$(makeCacheLocation $uri)"
# TODO - Check SHA of local cache is accurate.
- if [[ ! -f $cache_loc ]]; then
- curlDownload $cache_loc ${remote_urlbase}/${uri}
+ if test -f "$cache_loc" && test "$(checkJarSha "$cache_loc" "$sha")" != "OK"; then
+ echo "Found bad cached file: $cache_loc"
+ rm -f "$cache_loc"
+ fi
+ if [[ ! -f "$cache_loc" ]]; then
+ # Note: After we follow up with JFrog, we should check the more stable raw file server first
+ # before hitting the more flaky artifactory.
+ curlDownload $cache_loc ${remote_urlget}/${uri}
+ if test "$(checkJarSha "$cache_loc" "$sha")" != "OK"; then
+ echo "Trouble downloading $uri. Please try pull-binary-libs again when your internet connection is stable."
+ exit 2
+ fi
fi
- echo "$cache_loc"
}
# Pulls a single binary artifact from a remote repository.
@@ -161,7 +211,8 @@ pullJarFile() {
local version=${sha1% ?$jar_name}
local remote_uri=${version}/${jar#$basedir/}
echo "Resolving [${remote_uri}]"
- local cached_file=$(pullJarFileToCache $remote_uri $version)
+ pullJarFileToCache $remote_uri $version
+ local cached_file=$(makeCacheLocation $remote_uri)
cp $cached_file $jar
}
diff --git a/tools/buildcp b/tools/buildcp
new file mode 100755
index 0000000..766ab81
--- /dev/null
+++ b/tools/buildcp
@@ -0,0 +1,11 @@
+#!/bin/sh
+#
+
+[[ $# -eq 1 ]] || { echo "Usage: $0 <locker|quick|...>"; exit 0; }
+
+dir=$(dirname $0)
+lib=$($dir/abspath $dir/../lib)
+build=$($dir/abspath $dir/../build)
+cp=$($dir/cpof $build/$1/classes):$build/asm/classes
+
+echo $cp:$lib/fjbg.jar:$lib/msil.jar:$lib/forkjoin.jar:$lib/jline.jar:$lib/extra/'*'
diff --git a/tools/class-dump b/tools/class-dump
new file mode 100755
index 0000000..06a7e5a
--- /dev/null
+++ b/tools/class-dump
@@ -0,0 +1,6 @@
+#!/bin/sh
+#
+
+JAVAP_OPTS="-private"
+
+[[ -n "$1" ]] && ( cd "$(dirname "$1")" && javap $JAVAP_OPTS "$(basename "${1%%.class}")" )
diff --git a/tools/cleanup-commit b/tools/cleanup-commit
new file mode 100755
index 0000000..400d434
--- /dev/null
+++ b/tools/cleanup-commit
@@ -0,0 +1,130 @@
+#!/bin/bash
+
+##
+## The cleanup-commit script
+## -------------------------
+## This little script will cleanup your commit before you send it. You need to add the files to the staged area and
+## run this script. It will automatically cleanup tabs and trailing spaces for the files you added and then add the
+## clean versions to the staging area.
+##
+## Use at your own risk, I spent some time making the script error-proof so it will abort if sees any inconsistency,
+## but of course playing around with your commit might break things. Btw, it saves the original file to file.bak.
+##
+## Happy hacking!
+##
+
+ABORT="Ab0rT0p3r4+|0n"
+
+#
+# Cleanup function
+#
+function cleanup {
+ echo Cleaning up $1...
+ # prepare the ground
+ rm -rf $1.bak
+ # compress <TAB> into double <BLANK> and eliminate trailing <BLANC>s
+ sed -i.bak -e 's/\t/ /g' -e 's/ *$//' $1
+}
+
+
+#
+# Get the git status for the current staged commit
+#
+FULLSTATUS=`git status --porcelain`
+
+if [ $? -ne 0 ]
+then
+ echo "Unable to run git. Check if:"
+ echo " -- git is installed (you can run git in the command line)"
+ echo " -- the current directory is a valid git repository"
+ exit 1
+fi
+
+echo
+
+#
+# Based on the status decide what files will get cleaned up
+#
+CLEANUP_FILES=`echo "$FULLSTATUS" | while read LINE
+do
+
+ STATUS=$(echo $LINE | sed 's/^\(..\).*$/\1/')
+ if [ $? -ne 0 ]
+ then
+ echo "Could not get the status for line: $LINE"
+ echo " -- you have the basic unix tools installed (grep, cut, sed)"
+ echo $ABORT # This goes to CLEANUP_FILES
+ exit 1
+ fi
+
+ FILES=$(echo $LINE | sed 's/^..//')
+ FILE1=$(echo $FILES | cut -d ' ' -f 1)
+ FILE2=$(echo $FILES | cut -d ' ' -f 3)
+
+ case "$STATUS" in
+ [AMRDC]" ")
+ case "$STATUS" in
+ "A "|"M ")
+ echo $FILE1
+ ;;
+ "R ")
+ echo $FILE2
+ ;;
+ "D ")
+ #nothing to do
+ ;;
+ "C ")
+ echo $FILE1
+ echo $FILE2
+ ;;
+ esac
+ ;;
+ "??")
+ # File is not tracked, no need to do anything about it
+ # echo Untracked: $FILE1
+ ;;
+ *)
+ echo "Unstable status of file $FILE1 (\"$STATUS\")" >&2
+ echo "Aborting cleanup!" >&2
+ echo $ABORT # This goes to CLEANUP_FILES
+ exit 1
+ esac
+done; echo $CLEANUP_FILES`
+
+
+#
+# Perform actual cleanup
+#
+case $CLEANUP_FILES in
+*"$ABORT")
+ echo
+ exit 1
+ ;;
+"")
+ echo Nothing to do!
+ ;;
+*)
+ cd $(git rev-parse --show-toplevel)
+
+ if [ $? -ne 0 ]
+ then
+ echo Unexpected error: cannot cd to the repository root
+ echo Aborting cleanup!
+ exit 1
+ fi
+
+ echo "$CLEANUP_FILES" | while read FILE
+ do
+ cleanup $FILE
+ done
+
+ cd - &>/dev/null
+
+ echo
+ echo "Cleanup done: "
+ echo " - original files saved as .bak"
+ echo " - you can do \"git diff\" to see the changes the script did"
+ echo " - you can do \"git commit -a\" to commit the cleaned up files"
+ echo
+ ;;
+esac
diff --git a/tools/codegen b/tools/codegen
old mode 100644
new mode 100755
index 734235a..35c93fb
--- a/tools/codegen
+++ b/tools/codegen
@@ -3,6 +3,6 @@
THISDIR=`dirname $0`
SCALALIB=$THISDIR/../src/library/scala
-BINDIR=$THISDIR/../build/pack/bin
+BINDIR=$THISDIR/../build/quick/bin
$BINDIR/scala scala.tools.cmd.gen.Codegen "$@"
diff --git a/tools/codegen-anyvals b/tools/codegen-anyvals
old mode 100644
new mode 100755
diff --git a/tools/cpof b/tools/cpof
old mode 100644
new mode 100755
index ab5a42b..c09ed20
--- a/tools/cpof
+++ b/tools/cpof
@@ -3,28 +3,16 @@
# Creates a classpath out of the contents of each directory
# given as an argument.
-if [ $# == 0 ] ; then
- echo "Usage: $0 [dir1 dir2 ...]"
- exit 1
-fi
+[[ $# == 0 ]] && { echo "Usage: $0 [dir1 dir2 ...]" && exit 0; }
-THISDIR=`dirname $0`
-ABSCMD="${THISDIR}/abspath"
-CPRES=""
+sdir=$(dirname $0)
+cp=""
-for dir in $* ; do
- absdir=`${ABSCMD} $dir`
- LS=`ls -1 ${absdir}`
-
- for x in $LS ; do
- ABS=`${ABSCMD} "${absdir}/${x}"`
- CPRES="${CPRES}:${ABS}"
+for dir in "$@" ; do
+ for x in $($sdir/abspath $dir)/* ; do
+ cp="$cp:$($sdir/abspath $x)"
done
done
-# shaving the : off the beginning. Applause to /bin/sh for
-# keeping us humble about how far we've come.
-LEN=$(( ${#CPRES} - 1 ))
-result=${CPRES:1:${LEN}}
-
-echo $result
+# shaving the : off the beginning.
+echo ${cp#:}
diff --git a/tools/deploy-local-maven-snapshot b/tools/deploy-local-maven-snapshot
old mode 100644
new mode 100755
diff --git a/tools/diffPickled b/tools/diffPickled
old mode 100644
new mode 100755
diff --git a/tools/epfl-build b/tools/epfl-build
old mode 100644
new mode 100755
diff --git a/tools/epfl-build-2.x.x b/tools/epfl-build-2.x.x
deleted file mode 100644
index 7bc884c..0000000
--- a/tools/epfl-build-2.x.x
+++ /dev/null
@@ -1,35 +0,0 @@
-#!/usr/bin/env bash
-#
-
-[[ $# -gt 0 ]] || {
- echo "Usage: $0 <version> [publish destination]"
- echo ""
- exit 0
-}
-
-version="$1"
-shift
-rsyncDest="$1"
-
-# should not be hardcoded
-mavenSettings="/home/linuxsoft/apps/hudson-maven-settings/settings.xml"
-
-# main build sequence
-ant all.clean
-./pull-binary-libs.sh
-ant nightly
-ant docscomp
-
-# publish nightly build
-if [ -n "$rsyncDest" ]; then
- echo "Copying nightly build to $rsyncDest"
- # Archive Scala nightly distribution
- rsync -az dists/archives/ "$rsyncDest/distributions"
- # SKIP PUBLISHING DOCS IN 2.8.X BRANCH
- if [[ $version != "2.8.x" ]]; then
- rsync -az build/scaladoc/ "$rsyncDest/docs"
- fi
- rsync -az dists/sbaz/ "$rsyncDest/sbaz"
- # Deploy the maven artifacts on scala-tools.org
- ( cd dists/maven/latest && ant deploy.snapshot -Dsettings.file="$mavenSettings" )
-fi
diff --git a/tools/epfl-publish b/tools/epfl-publish
old mode 100644
new mode 100755
index de5e17b..cdf1882
--- a/tools/epfl-publish
+++ b/tools/epfl-publish
@@ -24,7 +24,7 @@ if [[ -z $publish_to ]]; then
else
echo "Publishing nightly build to $publish_to"
# Archive Scala nightly distribution
- rsync -az dists/archives/ "$publish_to/distributions"
+ rsync -az --exclude scala-latest-sources.tgz dists/archives/ "$publish_to/distributions"
# only publish scaladoc nightly for trunk
[[ $version == "master" ]] && rsync -az build/scaladoc/ "$publish_to/docs"
# sbaz
diff --git a/tools/get-scala-commit-date b/tools/get-scala-commit-date
old mode 100644
new mode 100755
diff --git a/tools/get-scala-commit-date.bat b/tools/get-scala-commit-date.bat
index a071555..e169de1 100644
--- a/tools/get-scala-commit-date.bat
+++ b/tools/get-scala-commit-date.bat
@@ -1,24 +1,9 @@
@echo off
-rem
-rem Usage: get-scala-revison.bat [dir]
-rem Figures out current scala commit date of a git clone.
-rem
-rem If no dir is given, current working dir is used.
-
- at setlocal
-set _DIR=
-if "%*"=="" (
- for /f "delims=;" %%i in ('cd') do set "_DIR=%%i"
+for %%X in (bash.exe) do (set FOUND=%%~$PATH:X)
+if defined FOUND (
+ bash "%~dp0\get-scala-commit-date"
) else (
- set "_DIR=%~1"
-)
-cd %_DIR%
-
-rem TODO - Check with a real windows user that this works!
-if exist .git\NUL (
- for /f "tokens=1delims= " in ('git log --format="%ci" -1') do set commitdate=%%a
- echo %commitdate%
-)
-
-:end
- at endlocal
+ rem echo this script does not work with cmd.exe. please, install bash
+ echo unknown
+ exit 1
+)
\ No newline at end of file
diff --git a/tools/get-scala-commit-sha b/tools/get-scala-commit-sha
old mode 100644
new mode 100755
diff --git a/tools/get-scala-commit-sha.bat b/tools/get-scala-commit-sha.bat
index 7a5afa1..1eaffc0 100644
--- a/tools/get-scala-commit-sha.bat
+++ b/tools/get-scala-commit-sha.bat
@@ -1,21 +1,9 @@
@echo off
-rem
-rem Usage: get-scala-commit-drift.bat [dir]
-rem Figures out current scala commit drift, of a clone.
-rem
-rem If no dir is given, current working dir is used.
-
- at setlocal
-set _DIR=
-if "%*"=="" (
- for /f "delims=;" %%i in ('cd') do set "_DIR=%%i"
+for %%X in (bash.exe) do (set FOUND=%%~$PATH:X)
+if defined FOUND (
+ bash "%~dp0\get-scala-commit-sha"
) else (
- set "_DIR=%~1"
-)
-cd %_DIR%
-
-rem TODO - truncate chars.
-git log -1 --format="%H
-
-:end
- at endlocal
+ rem echo this script does not work with cmd.exe. please, install bash
+ echo unknown
+ exit 1
+)
\ No newline at end of file
diff --git a/tools/jar-dump b/tools/jar-dump
new file mode 100755
index 0000000..166441b
--- /dev/null
+++ b/tools/jar-dump
@@ -0,0 +1,4 @@
+#!/bin/sh
+#
+
+jar tf "$1" | sort
diff --git a/tools/locker_scala b/tools/locker_scala
old mode 100644
new mode 100755
index 4434c94..02d2efc
--- a/tools/locker_scala
+++ b/tools/locker_scala
@@ -1,8 +1,6 @@
-#!/bin/bash
+#!/bin/sh
#
-THISDIR=`dirname $0`
-CP=`$THISDIR/lockercp`
-CLASS="scala.tools.nsc.MainGenericRunner"
+CP=$($(dirname $BASH_SOURCE)/lockercp)
-java -classpath "$CP" $CLASS -usejavacp "$@"
+java $JAVA_OPTS -classpath "$CP" scala.tools.nsc.MainGenericRunner -usejavacp "$@"
diff --git a/tools/locker_scalac b/tools/locker_scalac
old mode 100644
new mode 100755
index 2ad153e..c4b28b7
--- a/tools/locker_scalac
+++ b/tools/locker_scalac
@@ -1,8 +1,6 @@
-#!/bin/bash
+#!/bin/sh
#
-THISDIR=`dirname $0`
-CP=`$THISDIR/lockercp`
-CLASS="scala.tools.nsc.Main"
+CP=$($(dirname $BASH_SOURCE)/lockercp)
-java -classpath "$CP" $CLASS -usejavacp "$@"
+java $JAVA_OPTS -classpath "$CP" scala.tools.nsc.Main -usejavacp "$@"
diff --git a/tools/lockercp b/tools/lockercp
old mode 100644
new mode 100755
index 3e87995..43c72dd
--- a/tools/lockercp
+++ b/tools/lockercp
@@ -1,10 +1,4 @@
#!/bin/sh
#
-THISDIR=`dirname $0`
-ABS=${THISDIR}/abspath
-LIBDIR=`$ABS $THISDIR/../lib`
-
-cp=`${THISDIR}/cpof ${THISDIR}/../build/locker/classes`
-
-echo ${cp}:$LIBDIR/fjbg.jar:$LIBDIR/msil.jar:$LIBDIR/forkjoin.jar:$LIBDIR/jline.jar:$LIBDIR/extra/'*'
+$(dirname $0)/buildcp locker
diff --git a/tools/make-release-notes b/tools/make-release-notes
old mode 100644
new mode 100755
diff --git a/tools/new-starr b/tools/new-starr
new file mode 100755
index 0000000..5f00cc7
--- /dev/null
+++ b/tools/new-starr
@@ -0,0 +1,6 @@
+#!/bin/bash
+
+ant -Dscalac.args.optimise="-optimise" locker.done
+cp -R src/starr/* src/
+ant build-opt
+ant starr.done
diff --git a/tools/packcp b/tools/packcp
old mode 100644
new mode 100755
index 42bce9e..ecc7ee1
--- a/tools/packcp
+++ b/tools/packcp
@@ -1,5 +1,5 @@
#!/bin/sh
#
-THISDIR=`dirname $0`
-${THISDIR}/cpof ${THISDIR}/../build/pack/lib
+dir=$(dirname $0)
+$dir/cpof $dir/../build/pack/lib
diff --git a/tools/pathResolver b/tools/pathResolver
old mode 100644
new mode 100755
diff --git a/tools/profile_scala b/tools/profile_scala
deleted file mode 100644
index 037fc32..0000000
--- a/tools/profile_scala
+++ /dev/null
@@ -1,17 +0,0 @@
-#!/bin/bash
-#
-
-# Uses quick by default
-CLASSPATH=`tools/quickcp`
-
-AGENT=${YOURKIT_PATH:-/Applications/YourKit.app/bin/mac/libyjpagent.jnilib}
-
-java $JAVA_OPTS \
- -classpath $CLASSPATH \
- -agentpath:$AGENT=$YNP_STARTUP_OPTIONS \
- scala.tools.nsc.MainGenericRunner -usejavacp \
- -i <(cat <<EOF
-lazy val profiler = new scala.tools.util.YourkitProfiling { }
-import profiler._
-EOF
-) "$@"
diff --git a/tools/profile_scalac b/tools/profile_scalac
deleted file mode 100644
index f29b5b6..0000000
--- a/tools/profile_scalac
+++ /dev/null
@@ -1,25 +0,0 @@
-#!/bin/bash
-#
-# To influence behavior, you can set:
-#
-# YOURKIT_PATH
-# YOURKIT_PROFILE_PHASES
-# YNP_STARTUP_OPTIONS
-#
-
-# Start cpu sampling immediately
-DEFAULT_OPTS="sampling,onexit=snapshot"
-
-# Uses quick by default
-CLASSPATH=`tools/quickcp`
-
-AGENT=${YOURKIT_PATH:-/Applications/YourKit.app/bin/mac/libyjpagent.jnilib}
-OPTS=${YNP_STARTUP_OPTIONS:-$DEFAULT_OPTS}
-PHASES=${YOURKIT_PROFILE_PHASES:-all}
-
-java $JAVA_OPTS \
- -classpath $CLASSPATH \
- -agentpath:$AGENT=$OPTS \
- scala.tools.nsc.Main -usejavacp \
- -Yprofile:$PHASES \
- "$@"
diff --git a/tools/push.jar b/tools/push.jar
deleted file mode 100644
index 8fe9346..0000000
Binary files a/tools/push.jar and /dev/null differ
diff --git a/tools/push.jar.desired.sha1 b/tools/push.jar.desired.sha1
index 53d566f..63e6a47 100644
--- a/tools/push.jar.desired.sha1
+++ b/tools/push.jar.desired.sha1
@@ -1 +1 @@
-de5d3eb21a732e4bce44c283ccfbd1ed94bfeaed ?push.jar
+a1883f4304d5aa65e1f6ee6aad5900c62dd81079 ?push.jar
diff --git a/tools/quick_scala b/tools/quick_scala
new file mode 100755
index 0000000..16938dd
--- /dev/null
+++ b/tools/quick_scala
@@ -0,0 +1,6 @@
+#!/bin/sh
+#
+
+CP=$($(dirname $BASH_SOURCE)/quickcp)
+
+java $JAVA_OPTS -classpath "$CP" scala.tools.nsc.MainGenericRunner -usejavacp "$@"
diff --git a/tools/quick_scalac b/tools/quick_scalac
new file mode 100755
index 0000000..1b9a036
--- /dev/null
+++ b/tools/quick_scalac
@@ -0,0 +1,6 @@
+#!/bin/sh
+#
+
+CP=$($(dirname $BASH_SOURCE)/quickcp)
+
+java $JAVA_OPTS -classpath "$CP" scala.tools.nsc.Main -usejavacp "$@"
diff --git a/tools/quickcp b/tools/quickcp
old mode 100644
new mode 100755
index dd5251d..25d46e5
--- a/tools/quickcp
+++ b/tools/quickcp
@@ -1,10 +1,4 @@
#!/bin/sh
#
-THISDIR=`dirname $0`
-ABS=${THISDIR}/abspath
-LIBDIR=`$ABS $THISDIR/../lib`
-
-cp=`${THISDIR}/cpof ${THISDIR}/../build/quick/classes`
-
-echo ${cp}:$LIBDIR/fjbg.jar:$LIBDIR/msil.jar:$LIBDIR/forkjoin.jar:$LIBDIR/jline.jar:$LIBDIR/extra/'*'
+$(dirname $0)/buildcp quick
diff --git a/tools/remotetest b/tools/remotetest
old mode 100644
new mode 100755
diff --git a/tools/rm-orphan-checkfiles b/tools/rm-orphan-checkfiles
new file mode 100755
index 0000000..ca0a3f2
--- /dev/null
+++ b/tools/rm-orphan-checkfiles
@@ -0,0 +1,18 @@
+#!/bin/sh
+#
+# Scans for and removes .check and .flags files under test/
+# which don't have an accompanying test.
+
+shopt -s nullglob
+
+echo "Scanning for orphan check files..."
+for f in $(ls -1d test/{files,pending,disabled}/{jvm,neg,pos,run}/*.check); do
+ base=${f%%.check}
+ [[ -d $base ]] || [[ -f $base.scala ]] || git rm -f $f
+done
+
+echo "Scanning for orphan flags files..."
+for f in $(ls -1d test/{files,pending,disabled}/{jvm,neg,pos,run}/*.flags); do
+ base=${f%%.flags}
+ [[ -d $base ]] || [[ -f $base.scala ]] || git rm -f $f
+done
diff --git a/tools/scaladoc-compare b/tools/scaladoc-compare
new file mode 100755
index 0000000..74fbfd1
--- /dev/null
+++ b/tools/scaladoc-compare
@@ -0,0 +1,50 @@
+#!/bin/bash
+#
+# Script to compare scaladoc raw files. For an explanation read the next echos.
+#
+
+if [ $# -ne 2 ]
+then
+ echo
+ echo "scaladoc-compare will compare the scaladoc-generated pages in two different locations and output the diff"
+ echo "it's main purpose is to track changes to scaladoc and prevent updates that break things."
+ echo
+ echo "This script is meant to be used with the scaladoc -raw-output option, as it compares .html.raw files "
+ echo "instead of markup-heavy .html files."
+ echo
+ echo "Script usage $0 <new api files path> <old api files path>"
+ echo " eg: $0 build/scaladoc/library build/scaladoc-prev/library | less"
+ echo
+ exit 1
+fi
+
+NEW_PATH=$1
+OLD_PATH=$2
+
+FILES=`find $NEW_PATH -name '*.html.raw'`
+if [ "$FILES" == "" ]
+then
+ echo "No .html.raw files found in $NEW_PATH!"
+ exit 1
+fi
+
+for NEW_FILE in $FILES
+do
+ OLD_FILE=${NEW_FILE/$NEW_PATH/$OLD_PATH}
+ if [ -f $OLD_FILE ]
+ then
+ #echo $NEW_FILE" => "$OLD_FILE
+ DIFF=`diff -q -w $NEW_FILE $OLD_FILE 2>&1`
+ if [ "$DIFF" != "" ]
+ then
+ # Redo the full diff
+ echo "$NEW_FILE:"
+ diff -w $NEW_FILE $OLD_FILE 2>&1
+ echo -e "\n\n"
+ fi
+ else
+ echo -e "$NEW_FILE: No corresponding file (expecting $OLD_FILE)\n\n"
+ fi
+done
+
+echo Done.
diff --git a/tools/scalawhich b/tools/scalawhich
deleted file mode 100644
index 6a4b178..0000000
--- a/tools/scalawhich
+++ /dev/null
@@ -1,4 +0,0 @@
-#!/bin/sh
-#
-
-scala scala.tools.util.Which $*
diff --git a/tools/scmp b/tools/scmp
deleted file mode 100644
index f6acea5..0000000
--- a/tools/scmp
+++ /dev/null
@@ -1,4 +0,0 @@
-#!/bin/sh
-#
-
-scala scala.tools.cmd.program.Scmp "$@"
diff --git a/tools/showPickled b/tools/showPickled
old mode 100644
new mode 100755
diff --git a/tools/stability-test.sh b/tools/stability-test.sh
new file mode 100755
index 0000000..f017ac0
--- /dev/null
+++ b/tools/stability-test.sh
@@ -0,0 +1,29 @@
+#!/usr/bin/env bash
+#
+
+declare failed
+
+echo "Comparing build/quick/classes and build/strap/classes"
+for dir in library reflect compiler; do
+ # feel free to replace by a more elegant approach -- don't know how
+ if diff -rw -x '*.css' \
+ -x '*.custom' \
+ -x '*.gif' \
+ -x '*.js' \
+ -x '*.layout' \
+ -x '*.png' \
+ -x '*.properties' \
+ -x '*.tmpl' \
+ -x '*.tooltip' \
+ -x '*.txt' \
+ -x '*.xml' \
+ build/{quick,strap}/classes/$dir
+ then
+ classes=$(find build/quick/classes/$dir -name '*.class' | wc -l)
+ printf "%8s: %5d classfiles verified identical\n" $dir $classes
+ else
+ failed=true
+ fi
+done
+
+[[ -z $failed ]] || exit 127
diff --git a/tools/starr_scala b/tools/starr_scala
new file mode 100755
index 0000000..9b0fb60
--- /dev/null
+++ b/tools/starr_scala
@@ -0,0 +1,6 @@
+#!/bin/sh
+#
+
+CP=$($(dirname $BASH_SOURCE)/starrcp)
+
+java $JAVA_OPTS -classpath "$CP" scala.tools.nsc.MainGenericRunner -usejavacp "$@"
diff --git a/tools/starr_scalac b/tools/starr_scalac
new file mode 100755
index 0000000..972eeaf
--- /dev/null
+++ b/tools/starr_scalac
@@ -0,0 +1,6 @@
+#!/bin/sh
+#
+
+CP=$($(dirname $BASH_SOURCE)/starrcp)
+
+java $JAVA_OPTS -classpath "$CP" scala.tools.nsc.Main -usejavacp "$@"
diff --git a/tools/starrcp b/tools/starrcp
old mode 100644
new mode 100755
index 6add566..76f40fd
--- a/tools/starrcp
+++ b/tools/starrcp
@@ -1,5 +1,5 @@
#!/bin/sh
#
-THISDIR=`dirname $0`
-${THISDIR}/cpof ${THISDIR}/../lib
\ No newline at end of file
+dir=$(dirname $0)
+$dir/cpof $dir/../lib
diff --git a/tools/strapcp b/tools/strapcp
old mode 100644
new mode 100755
index 61e4a61..6a46b4e
--- a/tools/strapcp
+++ b/tools/strapcp
@@ -1,8 +1,12 @@
#!/bin/sh
#
-THISDIR=`dirname $0`
-cp=`${THISDIR}/cpof ${THISDIR}/../build/strap/classes`
-fjbg=`${THISDIR}/abspath ${THISDIR}/../lib/fjbg.jar`
+dir=$(dirname $0)
+strap="$dir/../build/strap/classes"
+[[ -d $strap ]] || { echo "Error: no directory at $strap"; exit 1; }
-echo ${cp}:${fjbg}
+cp=$($dir/cpof $strap)
+fjbg=$($dir/abspath $dir/../lib/fjbg.jar)
+asm=$($dir/abspath $dir/../build/asm/classes)
+
+echo $cp:$fjbg:$asm
diff --git a/tools/test-renamer b/tools/test-renamer
new file mode 100755
index 0000000..5a7fc3d
--- /dev/null
+++ b/tools/test-renamer
@@ -0,0 +1,82 @@
+#!/usr/bin/env bash
+#
+# Despite its shameful hackiness, checked in for posterity.
+# It'll live on forever in the git history; then I can remove it.
+
+# set -e
+shopt -s nullglob
+
+cd $(dirname $0)/../test
+
+rename_pattern='^.*/bug[0-9]+.*?(\.scala)?$'
+
+targets ()
+{
+ ls -d pending/*/* disabled/*/* | egrep "$rename_pattern"
+}
+
+showRun ()
+{
+ echo "$@"
+ "$@"
+}
+
+for path in $(targets); do
+ if [[ -f "$path" ]]; then
+ # echo "$path"
+ dir=$(dirname "$path")
+ file=$(basename "$path")
+ base=${file%%.scala}
+ num=${base##bug}
+
+ (cd "$dir" &&
+ for file in ${base}.*; do
+ ext=${file##*.}
+ newname="t${num}.${ext}"
+
+ if [[ -e "$newname" ]]; then
+ echo "Hey, $newname already exists."
+ else
+ showRun perl -pi -e "'s/bug$num\b/t$num/g;'" "$file"
+ showRun mv "$file" "$newname"
+ fi
+ done
+ )
+ fi
+
+ if [[ -d "$path" ]]; then
+ dir=$(dirname "$path")
+ file=$(basename "$path")
+ base="$file"
+ num=${base##bug}
+
+ (cd "$dir" &&
+ for file in $file ${file}.*; do
+ ext=${file##*.}
+ if [[ "$ext" != "$file" ]]; then
+ newname="t${num}.${ext}"
+ else
+ newname="t${num}"
+ for file0 in ${file}/*; do
+ showRun perl -pi -e "'s/bug$num\b/t$num/g;'" "$file0"
+ done
+ fi
+
+ if [[ -e "$newname" ]]; then
+ echo "Hey, $newname already exists."
+ else
+ if [[ -f "$file" ]]; then
+ showRun perl -pi -e "'s/bug$num\b/t$num/g;'" "$file"
+ fi
+ showRun mv "$file" "$newname"
+ fi
+ done
+ )
+
+ fi
+
+done
+#
+# for d in files/*/*; do
+# [[ -d "$d" ]] && do_dir "$d"
+# done
diff --git a/tools/tokens b/tools/tokens
deleted file mode 100644
index b910fb2..0000000
--- a/tools/tokens
+++ /dev/null
@@ -1,4 +0,0 @@
-#!/bin/sh
-#
-
-scala scala.tools.cmd.program.Tokens "$@"
diff --git a/tools/truncate b/tools/truncate
deleted file mode 100644
index b7f410e..0000000
--- a/tools/truncate
+++ /dev/null
@@ -1,7 +0,0 @@
-#!/bin/bash
-
-find . -type f -not -path "*.svn*" -name "*.scala" -exec sed -i "" -e 's/$ *Id.*$/$Id$/g' {} \;
-find . -type f -not -path "*.svn*" -name "*.java" -exec sed -i "" -e 's/$ *Id.*$/$Id$/g' {} \;
-find . -type f -not -path "*.svn*" -name "*.cs" -exec sed -i "" -e 's/$ *Id.*$/$Id$/g' {} \;
-find . -type f -not -path "*.svn*" -name "*.js" -exec sed -i "" -e 's/$ *Id.*$/$Id$/g' {} \;
-find . -type f -not -path "*.svn*" -name "*.scala.disabled" -exec sed -i "" -e 's/$ *Id.*$/$Id$/g' {} \;
diff --git a/tools/updatescalacheck b/tools/updatescalacheck
old mode 100644
new mode 100755
diff --git a/tools/verify-jar-cache b/tools/verify-jar-cache
new file mode 100755
index 0000000..8a376a6
--- /dev/null
+++ b/tools/verify-jar-cache
@@ -0,0 +1,33 @@
+#!/bin/bash
+#
+# Discovers files whose sha sum does not match the
+# sha embedded in their directory name from ~/.sbt/cache/scala.
+# Pass -f to remove them, otherwise it just prints them.
+
+set -e
+cd ~/.sbt/cache/scala
+unset failed
+
+unset removal
+[[ "$1" == "-f" ]] && removal=true
+
+for file in $(find . -type f); do
+ sha=$(echo "${file:2}" | sed 's/\/.*$//')
+ sum=$(shasum "$file" | sed 's/ .*$//')
+ if [[ $sum != $sha ]]; then
+ failed=true
+ if [[ -n "$removal" ]]; then
+ echo "Removing corrupt file $file, shasum=$sum"
+ rm -rf $sha
+ else
+ echo "Found corrupt file $file, shasum=$sum."
+ fi
+ fi
+done
+
+if [[ -z "$failed" ]]; then
+ echo "All cached files match their shas."
+elif [[ -z "$removal" ]]; then
+ echo ""
+ echo "Run again with -f to remove the corrupt files."
+fi
--
Alioth's /usr/local/bin/git-commit-notice on /srv/git.debian.org/git/pkg-java/scala-2.10.git
More information about the pkg-java-commits
mailing list